[
  {
    "path": ".gitignore",
    "content": "# Initially taken from Github's Python gitignore file\n\nrun2\nrun\npretrained\n\n# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\ncheckpoints\n*.ckpt\n# C extensions\n*.so\n\ncache\n\n# tests and logs\ntests/fixtures/cached_*_text.txt\nlogs/\nlightning_logs/\nlang_code_data/\ndocker\ndoc\n_typos.toml\ndata\ndocs\n\nldm_generated_image.png\n*.log\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n.hypothesis/\n.pytest_cache/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n.python-version\n\n# celery beat schedule file\ncelerybeat-schedule\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n\n# vscode\n.vs\n.vscode\n\n# Pycharm\n.idea\n\n# TF code\ntensorflow_code\n\n# Models\nproc_data\n\n# examples\nruns\n/runs_old\n/wandb\n/examples/runs\n/examples/**/*.args\n/examples/rag/sweep\n\n# data\n/data\nserialization_dir\n\n# emacs\n*.*~\ndebug.env\n\n# vim\n.*.swp\n\n#ctags\ntags\n\n# pre-commit\n.pre-commit*\n\n# .lock\n*.lock\n\n# DS_Store (MacOS)\n.DS_Store\n# RL pipelines may produce mp4 outputs\n*.mp4\n\n# dependencies\n/transformers\n\n# ruff\n.ruff_cache\n\nwandb\n\n__pycache__"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [2023] [Gongfan Fang]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "# Diff-Pruning: Structural Pruning for Diffusion Models\n\n<div align=\"center\">\n<img src=\"assets/framework.png\" width=\"80%\"></img>\n</div>\n\n## Update\nCheck our latest work [DeepCache](https://horseee.github.io/Diffusion_DeepCache/), a **training-free and almost loessless** method for diffusion model acceleration. It can be viewed as a special pruning technique that dynamically drops deep layers and only runs shallow ones during inference.\n\n\n## Introduction\n> **Structural Pruning for Diffusion Models** [[arxiv]](https://arxiv.org/abs/2305.10924)  \n> *[Gongfan Fang](https://fangggf.github.io/), [Xinyin Ma](https://horseee.github.io/), [Xinchao Wang](https://sites.google.com/site/sitexinchaowang/)*    \n> *National University of Singapore*\n\nThis work presents *Diff-Pruning*, an efficient structrual pruning method for diffusion models. Our empirical assessment highlights two primary features:\n1) ``Efficiency``: It enables approximately a 50% reduction in FLOPs at a mere 10% to 20% of the original training expenditure; \n2) ``Consistency``: The pruned diffusion models inherently preserve generative behavior congruent with the pre-trained ones.\n\n<div align=\"center\">\n<img src=\"assets/LSUN.png\" width=\"80%\"></img>\n</div>\n\n### Supported Methods\n- [x] Magnitude Pruning\n- [x] Random Pruning\n- [x] Taylor Pruning\n- [x] Diff-Pruning (A taylor-based method proposed in our paper)   \n\n### TODO List\n- [ ] Support more diffusion models from Diffusers\n- [ ] Upload checkpoints of pruned models\n- [ ] Training scripts for CelebA-HQ, LSUN Church & LSUN Bedroom\n- [ ] Align the performance with the [DDIM Repo](https://github.com/ermongroup/ddim). \n\n## Our Exp Code (Unorganized)\n\n### Pruning with DDIM codebase\nThis example shows how to prune a DDPM model pre-trained on CIFAR-10 using the [DDIM codebase](https://github.com/ermongroup/ddim). Since that [Huggingface Diffusers](https://github.com/huggingface/diffusers) do not support [``skip_type='quad'``](https://github.com/ermongroup/ddim/issues/3) in DDIM, you may get slightly worse FID scores with Diffusers for both pre-trained models (FID=4.5) and pruned models (FID=5.6). We are working on this to implement the quad strategy for Diffusers. For reproducibility, we provide our original **but unorganized** exp code for the paper in [ddpm_exp](ddpm_exp). \n\n```bash\ncd ddpm_exp\n# Prune & Finetune\nbash scripts/simple_cifar_our.sh 0.05 # the pre-trained model and data will be automatically prepared\n# Sampling\nbash scripts/sample_cifar_ddpm_pruning.sh run/finetune_simple_v2/cifar10_ours_T=0.05.pth/logs/post_training/ckpt_100000.pth run/sample\n```\n\nFor FID, please refer to [this section](https://github.com/VainF/Diff-Pruning#4-fid-score).  \n\nOutput:\n```\nFound 49984 files.\n100%|██████████████████████████████████████████████████████████████████████████████████████████████████████| 391/391 [00:49<00:00,  7.97it/s]\nFID:  5.242662673752534\n```\n\n### Pruning with LDM codebase\n\nPlease check [ldm_exp/run.sh](ldm_exp/run.sh) for an example of pruning a pre-trained LDM model on ImageNet. This codebase is still unorganized. We will clean it up in the future.\n\n## Pruning with Huggingface Diffusers\n\nThe following pipeline prunes a pre-trained DDPM on CIFAR-10 with [Huggingface Diffusers](https://github.com/huggingface/diffusers).\n\n### 0. Requirements, Data and Pretrained Model\n\n* Requirements\n```bash\npip install -r requirements.txt\n```\n \n* Data\n  \nDownload and extract CIFAR-10 images to *data/cifar10_images* for training and evaluation.\n```bash\npython tools/extract_cifar10.py --output data\n```\n* Pretrained Models\n  \nThe following script will download an official DDPM model and convert it to the format of Huggingface Diffusers. You can find the converted model at *pretrained/ddpm_ema_cifar10*. It is an EMA version of [google/ddpm-cifar10-32](https://huggingface.co/google/ddpm-cifar10-32)\n```bash\nbash tools/convert_cifar10_ddpm_ema.sh\n```\n\n(Optional) You can also download a pre-converted model using wget\n```bash\nwget https://github.com/VainF/Diff-Pruning/releases/download/v0.0.1/ddpm_ema_cifar10.zip\n```\n\n### 1. Pruning\nCreate a pruned model at *run/pruned/ddpm_cifar10_pruned*\n```bash\nbash scripts/prune_ddpm_cifar10.sh 0.3  # pruning ratio = 30\\%\n```\n\n### 2. Finetuning (Post-Training)\nFinetune the model and save it at *run/finetuned/ddpm_cifar10_pruned_post_training*\n```bash\nbash scripts/finetune_ddpm_cifar10.sh\n```\n\n### 3. Sampling\n**Pruned:** Sample and save images to *run/sample/ddpm_cifar10_pruned*\n```bash\nbash scripts/sample_ddpm_cifar10_pruned.sh\n```\n\n**Pretrained:** Sample and save images to *run/sample/ddpm_cifar10_pretrained*\n```bash\nbash scripts/sample_ddpm_cifar10_pretrained.sh\n```\n\n### 4. FID Score\nThis script was modified from https://github.com/mseitzer/pytorch-fid. \n\n```bash\n# pre-compute the stats of CIFAR-10 dataset\npython fid_score.py --save-stats data/cifar10_images run/fid_stats_cifar10.npz --device cuda:0 --batch-size 256\n```\n\n```bash\n# Compute the FID score of sampled images\npython fid_score.py run/sample/ddpm_cifar10_pruned run/fid_stats_cifar10.npz --device cuda:0 --batch-size 256\n```\n\n### 5. (Optional) Distributed Training and Sampling with Accelerate\nThis project supports distributed training and sampling. \n```bash\npython -m torch.distributed.launch --nproc_per_node=8 --master_port 22222 --use_env <ddpm_sample.py|ddpm_train.py> ...\n```\nA multi-processing example can be found at [scripts/sample_ddpm_cifar10_pretrained_distributed.sh](scripts/sample_ddpm_cifar10_pretrained_distributed.sh).\n\n\n## Prune Pre-trained DPMs from [HuggingFace Diffusers](https://huggingface.co/models?library=diffusers)\n\n### :rocket: [Denoising Diffusion Probabilistic Models (DDPMs)](https://arxiv.org/abs/2006.11239)\nExample: [google/ddpm-ema-bedroom-256](https://huggingface.co/google/ddpm-ema-bedroom-256)\n```bash\npython ddpm_prune.py \\\n--dataset \"<path/to/imagefoler>\" \\  \n--model_path google/ddpm-ema-bedroom-256 \\\n--save_path run/pruned/ddpm_ema_bedroom_256_pruned \\\n--pruning_ratio 0.05 \\\n--pruner \"<random|magnitude|reinit|taylor|diff-pruning>\" \\\n--batch_size 4 \\\n--thr 0.05 \\\n--device cuda:0 \\\n```\nThe ``dataset`` and ``thr`` arguments only work for taylor & diff-pruning.\n\n\n### :rocket: [Latent Diffusion Models (LDMs)](https://arxiv.org/abs/2112.10752)\nExample: [CompVis/ldm-celebahq-256](https://huggingface.co/CompVis/ldm-celebahq-256)\n```bash\npython ldm_prune.py \\\n--model_path CompVis/ldm-celebahq-256 \\\n--save_path run/pruned/ldm_celeba_pruned \\\n--pruning_ratio 0.05 \\\n--pruner \"<random|magnitude|reinit>\" \\\n--device cuda:0 \\\n--batch_size 4 \\\n```\n\n## Results\n\n* **DDPM on Cifar-10, CelebA and LSUN**\n\n<div align=\"center\">\n<img src=\"assets/exp.png\" width=\"75%\"></img>\n<img src=\"https://github.com/VainF/Diff-Pruning/assets/18592211/39b3a7ad-2abb-4934-9ee0-07724029660b\" width=\"75%\"></img>\n</div>\n\n* **Conditional LDM on ImageNet-1K 256**\n\nWe also have some results on Conditional LDM for ImageNet-1K 256x256, where we finetune a pruned LDM for only 4 epochs. Will release the training script soon.\n\n<div align=\"center\">\n<img src=\"https://github.com/VainF/Diff-Pruning/assets/18592211/31dbf489-2ca2-4625-ba54-5a5ff4e4a626\" width=\"75%\"></img>\n<img src=\"https://github.com/VainF/Diff-Pruning/assets/18592211/20d546c5-9012-4ba9-80b2-96ed29da7d07\" width=\"85%\"></img>\n</div>\n\n\n## Acknowledgement\n\nThis project is heavily based on [Diffusers](https://github.com/huggingface/diffusers), [Torch-Pruning](https://github.com/VainF/Torch-Pruning), [pytorch-fid](https://github.com/mseitzer/pytorch-fid). Our experiments were conducted on [ddim](https://github.com/ermongroup/ddim) and [LDM](https://github.com/CompVis/latent-diffusion).\n\n## Citation\nIf you find this work helpful, please cite:\n```\n@inproceedings{fang2023structural,\n  title={Structural pruning for diffusion models},\n  author={Gongfan Fang and Xinyin Ma and Xinchao Wang},\n  booktitle={Advances in Neural Information Processing Systems},\n  year={2023},\n}\n```\n\n```\n@inproceedings{fang2023depgraph,\n  title={Depgraph: Towards any structural pruning},\n  author={Fang, Gongfan and Ma, Xinyin and Song, Mingli and Mi, Michael Bi and Wang, Xinchao},\n  booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},\n  pages={16091--16101},\n  year={2023}\n}\n```\n"
  },
  {
    "path": "ddpm_exp/.gitignore",
    "content": ".vscode\n__pycache__\n*.log\nrun\ndata\n*.png"
  },
  {
    "path": "ddpm_exp/LICENSE",
    "content": "MIT License\n\nCopyright (c) 2020 Jiaming Song\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "ddpm_exp/README.md",
    "content": "# Denoising Diffusion Implicit Models (DDIM)\n\n[Jiaming Song](http://tsong.me), [Chenlin Meng](http://cs.stanford.edu/~chenlin) and [Stefano Ermon](http://cs.stanford.edu/~ermon), Stanford\n\nImplements sampling from an implicit model that is trained with the same procedure as [Denoising Diffusion Probabilistic Model](https://hojonathanho.github.io/diffusion/), but costs much less time and compute if you want to sample from it (click image below for a video demo):\n\n<a href=\"http://www.youtube.com/watch?v=WCKzxoSduJQ\" target=\"_blank\">![](http://img.youtube.com/vi/WCKzxoSduJQ/0.jpg)</a>\n\n## **Integration with 🤗 Diffusers library**\n\nDDIM is now also available in 🧨 Diffusers and accesible via the [DDIMPipeline](https://huggingface.co/docs/diffusers/api/pipelines/ddim).\nDiffusers allows you to test DDIM in PyTorch in just a couple lines of code.\n\nYou can install diffusers as follows:\n\n```\npip install diffusers torch accelerate\n```\n\nAnd then try out the model with just a couple lines of code:\n\n```python\nfrom diffusers import DDIMPipeline\n\nmodel_id = \"google/ddpm-cifar10-32\"\n\n# load model and scheduler\nddim = DDIMPipeline.from_pretrained(model_id)\n\n# run pipeline in inference (sample random noise and denoise)\nimage = ddim(num_inference_steps=50).images[0]\n\n# save image\nimage.save(\"ddim_generated_image.png\")\n```\n\nMore DDPM/DDIM models compatible with hte DDIM pipeline can be found directly [on the Hub](https://huggingface.co/models?library=diffusers&sort=downloads&search=ddpm)\n\nTo better understand the DDIM scheduler, you can check out [this introductionary google colab](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/diffusers_intro.ipynb)\n\nThe DDIM scheduler can also be used with more powerful diffusion models such as [Stable Diffusion](https://huggingface.co/docs/diffusers/v0.7.0/en/api/pipelines/stable_diffusion#stable-diffusion-pipelines)\n\nYou simply need to [accept the license on the Hub](https://huggingface.co/runwayml/stable-diffusion-v1-5), login with `huggingface-cli login` and install transformers:\n\n```\npip install transformers\n```\n\nThen you can run:\n\n```python\nfrom diffusers import StableDiffusionPipeline, DDIMScheduler\n\nddim = DDIMScheduler.from_config(\"runwayml/stable-diffusion-v1-5\", subfolder=\"scheduler\")\npipeline = StableDiffusionPipeline.from_pretrained(\"runwayml/stable-diffusion-v1-5\", scheduler=ddim)\n\nimage = pipeline(\"An astronaut riding a horse.\").images[0]\n\nimage.save(\"astronaut_riding_a_horse.png\")\n```\n\n## Running the Experiments\nThe code has been tested on PyTorch 1.6.\n\n### Train a model\nTraining is exactly the same as DDPM with the following:\n```\npython main.py --config {DATASET}.yml --exp {PROJECT_PATH} --doc {MODEL_NAME} --ni\n```\n\n### Sampling from the model\n\n#### Sampling from the generalized model for FID evaluation\n```\npython main.py --config {DATASET}.yml --exp {PROJECT_PATH} --doc {MODEL_NAME} --sample --fid --timesteps {STEPS} --eta {ETA} --ni\n```\nwhere \n- `ETA` controls the scale of the variance (0 is DDIM, and 1 is one type of DDPM).\n- `STEPS` controls how many timesteps used in the process.\n- `MODEL_NAME` finds the pre-trained checkpoint according to its inferred path.\n\nIf you want to use the DDPM pretrained model:\n```\npython main.py --config {DATASET}.yml --exp {PROJECT_PATH} --use_pretrained --sample --fid --timesteps {STEPS} --eta {ETA} --ni\n```\nthe `--use_pretrained` option will automatically load the model according to the dataset.\n\nWe provide a CelebA 64x64 model [here](https://drive.google.com/file/d/1R_H-fJYXSH79wfSKs9D-fuKQVan5L-GR/view?usp=sharing), and use the DDPM version for CIFAR10 and LSUN.\n\nIf you want to use the version with the larger variance in DDPM: use the `--sample_type ddpm_noisy` option.\n\n#### Sampling from the model for image inpainting \nUse `--interpolation` option instead of `--fid`.\n\n#### Sampling from the sequence of images that lead to the sample\nUse `--sequence` option instead.\n\nThe above two cases contain some hard-coded lines specific to producing the image, so modify them according to your needs.\n\n\n## References and Acknowledgements\n```\n@article{song2020denoising,\n  title={Denoising Diffusion Implicit Models},\n  author={Song, Jiaming and Meng, Chenlin and Ermon, Stefano},\n  journal={arXiv:2010.02502},\n  year={2020},\n  month={October},\n  abbr={Preprint},\n  url={https://arxiv.org/abs/2010.02502}\n}\n```\n\n\nThis implementation is based on / inspired by:\n\n- [https://github.com/hojonathanho/diffusion](https://github.com/hojonathanho/diffusion) (the DDPM TensorFlow repo), \n- [https://github.com/pesser/pytorch_diffusion](https://github.com/pesser/pytorch_diffusion) (PyTorch helper that loads the DDPM model), and\n- [https://github.com/ermongroup/ncsnv2](https://github.com/ermongroup/ncsnv2) (code structure).\n"
  },
  {
    "path": "ddpm_exp/calc_fid.py",
    "content": "from cleanfid import fid\nimport argparse\nparser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\nparser.add_argument('--path1', type=str, required=True, help='Path to the images')\nparser.add_argument('--path2', type=str, required=True, help='Path to the images')\nargs = parser.parse_args()\n\nif args.path2==\"cifar10\":\n    score = fid.compute_fid(args.dir, dataset_name=\"cifar10\", dataset_res=32, dataset_split=\"train\")\nelse:\n    score = fid.compute_fid(args.path1, args.path2)\nprint(\"FID: \", score)"
  },
  {
    "path": "ddpm_exp/compute_flops.py",
    "content": "import torch\nimport random, os\nimport argparse\nfrom PIL import Image\nimport torchvision\nimport numpy as np\nimport pytorch_msssim\nfrom utils import UnlabeledImageFolder\nfrom tqdm import tqdm \nimport torch_pruning as tp\nparser = argparse.ArgumentParser()\nparser.add_argument('--restore_from', type=str, required=True)\nargs = parser.parse_args()\n\nmodel = torch.load(args.restore_from, map_location='cpu')[0]\nexample_inputs = {'x': torch.randn(1, 3, 32, 32), 't': torch.ones(1)}\nmacs, params = tp.utils.count_ops_and_params(model, example_inputs)\nprint(\"model: {}, macs: {} G, params: {} M\".format(args.restore_from, macs/1e9, params/1e6))\n\n"
  },
  {
    "path": "ddpm_exp/compute_pruned_ssim_curve.py",
    "content": "import pytorch_msssim \nimport os\nimport torch\nfrom PIL import Image\nimport torchvision\n\nbase_folder_name = 'run/prune_ssim_2/0'\nfolder_name = [os.path.join('run/prune_ssim_2', '{}'.format(k)) for k in range(50, 1000+1, 50)]\nn_samples = 32\n# test ssim for each folder\nfolder_ssim = []\nfor f in folder_name:\n    ssim_list = []\n    for img_id in range(n_samples):\n        img1 = Image.open(os.path.join(base_folder_name, f'{img_id}.png'))\n        img2 = Image.open(os.path.join(f, f'{img_id}.png'))\n        img1_tensor = torchvision.transforms.ToTensor()(img1)\n        img2_tensor = torchvision.transforms.ToTensor()(img2)\n        img1_tensor = img1_tensor.unsqueeze(0)\n        img2_tensor = img2_tensor.unsqueeze(0)\n        ssim = pytorch_msssim.ssim(img1_tensor, img2_tensor, data_range=1.0, size_average=True)\n        ssim_list.append(ssim)\n    ssim = sum(ssim_list) / len(ssim_list)\n    folder_ssim.append(ssim.item())\nprint(folder_ssim)"
  },
  {
    "path": "ddpm_exp/compute_ssim.py",
    "content": "import torch\nimport random, os\nimport argparse\nfrom PIL import Image\nimport torchvision\nimport numpy as np\nimport pytorch_msssim\nfrom utils import UnlabeledImageFolder\nfrom tqdm import tqdm \nparser = argparse.ArgumentParser()\nparser.add_argument('--path', type=str, required=True, nargs='+')\nargs = parser.parse_args()\n\n# generate radom index\nnrow = 16\nimg_index = random.sample(list(range(50000)), nrow*nrow)\npath1 = args.path[0]\npath2 = args.path[1]\nprint(path1, path2)\nimg_dst1 = UnlabeledImageFolder(path1, transform=torchvision.transforms.ToTensor(), exts=[\"png\"])\nimg_dst2 = UnlabeledImageFolder(path2, transform=torchvision.transforms.ToTensor(), exts=[\"png\"])\nprint(len(img_dst1), len(img_dst2))\n\nloader1 = torch.utils.data.DataLoader(\n    img_dst1,\n    batch_size=100,\n    shuffle=False,\n    num_workers=4,\n    drop_last=False,\n)\nloader2 = torch.utils.data.DataLoader(\n    img_dst2,\n    batch_size=100,\n    shuffle=False,\n    num_workers=4,\n    drop_last=False,\n)\n\nwith torch.no_grad():\n    ssim_list = []\n    mse_list = []\n    for i, (img1, img2) in tqdm(enumerate(zip(loader1, loader2))):\n        ssim = pytorch_msssim.ssim(img1.cuda(), img2.cuda(), data_range=1.0, size_average=False)\n        ssim_list.append(ssim.cpu())\n        mse = torch.nn.functional.mse_loss(img1.cuda(), img2.cuda(), reduction='none').mean(dim=(1,2,3))\n        mse_list.append(mse.cpu())\n\n    ssim = torch.cat(ssim_list, dim=0)\n    mse = torch.cat(mse_list, dim=0)\n    ssim_avg = ssim.mean()\n    mse_avg = mse.mean()\n    print(\"path1: {}, path2: {}, ssim: {}, mse: {}\".format(path1, path2, ssim_avg, mse_avg))\n\n    "
  },
  {
    "path": "ddpm_exp/compute_ssim_vis.py",
    "content": "import torch\nimport random, os\nimport argparse\nfrom PIL import Image\nimport torchvision\nimport numpy as np\nimport pytorch_msssim\nfrom utils import UnlabeledImageFolder\nfrom tqdm import tqdm \nimg_ids = [159, 149, 144, 127, 86, 41]\nimage_folder1 = 'run/sample_v2/bedroom_250k/image_samples/images/0'\nimage_folder2 = 'run/sample_v2/bedroom_official/image_samples/images/0'\nbase_img_id = 0\nssim_list = []\nfor iid in img_ids:\n    img1 = Image.open(os.path.join(image_folder1, f'{iid}.png'))\n    img2 = Image.open(os.path.join(image_folder2, f'{iid}.png'))\n    img1_tensor = torchvision.transforms.ToTensor()(img1).unsqueeze(0)\n    img2_tensor = torchvision.transforms.ToTensor()(img2).unsqueeze(0)\n    ssim = pytorch_msssim.ssim(img1_tensor, img2_tensor, data_range=1.0, size_average=True)\n    ssim_list.append(ssim.item())\nprint(ssim_list)\n    "
  },
  {
    "path": "ddpm_exp/configs/bedroom.yml",
    "content": "data:\n    dataset: \"LSUN\"\n    category: \"bedroom\"\n    image_size: 256\n    channels: 3\n    logit_transform: false\n    uniform_dequantization: false\n    gaussian_dequantization: false\n    random_flip: true\n    rescaled: true\n    num_workers: 32\n\nmodel:\n    type: \"simple\"\n    in_channels: 3\n    out_ch: 3\n    ch: 128\n    ch_mult: [1, 1, 2, 2, 4, 4]\n    num_res_blocks: 2\n    attn_resolutions: [16, ]\n    dropout: 0.0\n    var_type: fixedsmall\n    ema_rate: 0.999\n    ema: True\n    resamp_with_conv: True\n\ndiffusion:\n    beta_schedule: linear\n    beta_start: 0.0001\n    beta_end: 0.02\n    num_diffusion_timesteps: 1000\n\ntraining:\n    batch_size: 8\n    n_epochs: 10000\n    n_iters: 5000000\n    snapshot_freq: 5000\n    validation_freq: 2000\n\nsampling:\n    batch_size: 16\n    last_only: True\n\noptim:\n    weight_decay: 0.000\n    optimizer: \"Adam\"\n    lr: 0.000002\n    beta1: 0.9\n    amsgrad: false\n    eps: 0.00000001\n"
  },
  {
    "path": "ddpm_exp/configs/celeba.yml",
    "content": "data:\n    dataset: \"CELEBA\"\n    image_size: 64\n    channels: 3\n    logit_transform: false\n    uniform_dequantization: false\n    gaussian_dequantization: false\n    random_flip: true\n    rescaled: true\n    num_workers: 4\n\nmodel:\n    type: \"simple\"\n    in_channels: 3\n    out_ch: 3\n    ch: 128\n    ch_mult: [1, 2, 2, 2, 4]\n    num_res_blocks: 2\n    attn_resolutions: [16, ]\n    dropout: 0.1\n    var_type: fixedlarge\n    ema_rate: 0.9999\n    ema: True\n    resamp_with_conv: True\n\ndiffusion:\n    beta_schedule: linear\n    beta_start: 0.0001\n    beta_end: 0.02\n    num_diffusion_timesteps: 1000\n\ntraining:\n    batch_size: 96 # 128\n    n_epochs: 10000\n    n_iters: 5000000\n    snapshot_freq: 5000\n    validation_freq: 20000\n\nsampling:\n    batch_size: 32\n    last_only: True\n\noptim:\n    weight_decay: 0.000\n    optimizer: \"Adam\"\n    lr: 0.0002\n    beta1: 0.9\n    amsgrad: false\n    eps: 0.00000001\n    grad_clip: 1.0\n"
  },
  {
    "path": "ddpm_exp/configs/church.yml",
    "content": "data:\n    dataset: \"LSUN\"\n    category: \"church_outdoor\"\n    image_size: 256\n    channels: 3\n    logit_transform: false\n    uniform_dequantization: false\n    gaussian_dequantization: false\n    random_flip: true\n    rescaled: true\n    num_workers: 32\n\nmodel:\n    type: \"simple\"\n    in_channels: 3\n    out_ch: 3\n    ch: 128\n    ch_mult: [1, 1, 2, 2, 4, 4]\n    num_res_blocks: 2\n    attn_resolutions: [16, ]\n    dropout: 0.0\n    var_type: fixedsmall\n    ema_rate: 0.999\n    ema: True\n    resamp_with_conv: True\n\ndiffusion:\n    beta_schedule: linear\n    beta_start: 0.0001\n    beta_end: 0.02\n    num_diffusion_timesteps: 1000\n\ntraining:\n    batch_size: 8 # 64\n    n_epochs: 10000\n    n_iters: 5000000\n    snapshot_freq: 5000\n    validation_freq: 2000\n\nsampling:\n    batch_size: 16\n    last_only: True\n\noptim:\n    weight_decay: 0.000\n    optimizer: \"Adam\"\n    lr: 0.00002\n    beta1: 0.9\n    amsgrad: false\n    eps: 0.00000001\n"
  },
  {
    "path": "ddpm_exp/configs/cifar10.yml",
    "content": "data:\n    dataset: \"CIFAR10\"\n    image_size: 32\n    channels: 3\n    logit_transform: false\n    uniform_dequantization: false\n    gaussian_dequantization: false\n    random_flip: true\n    rescaled: true\n    num_workers: 4\n\nmodel:\n    type: \"simple\"\n    in_channels: 3\n    out_ch: 3\n    ch: 128\n    ch_mult: [1, 2, 2, 2]\n    num_res_blocks: 2\n    attn_resolutions: [16, ]\n    dropout: 0.1\n    var_type: fixedlarge\n    ema_rate: 0.9999\n    ema: True\n    resamp_with_conv: True\n\ndiffusion:\n    beta_schedule: linear\n    beta_start: 0.0001\n    beta_end: 0.02\n    num_diffusion_timesteps: 1000\n\ntraining:\n    batch_size: 128\n    n_epochs: 10000\n    n_iters: 5000000\n    snapshot_freq: 5000\n    validation_freq: 2000\n\nsampling:\n    batch_size: 64\n    last_only: True\n\noptim:\n    weight_decay: 0.000\n    optimizer: \"Adam\"\n    lr: 0.0002\n    beta1: 0.9\n    amsgrad: false\n    eps: 0.00000001\n    grad_clip: 1.0\n"
  },
  {
    "path": "ddpm_exp/configs/cifar10_pruning.yml",
    "content": "data:\n    dataset: \"CIFAR10\"\n    image_size: 32\n    channels: 3\n    logit_transform: false\n    uniform_dequantization: false\n    gaussian_dequantization: false\n    random_flip: true\n    rescaled: true\n    num_workers: 4\n\nmodel:\n    type: \"simple\"\n    in_channels: 3\n    out_ch: 3\n    ch: 128\n    ch_mult: [1, 2, 2, 2]\n    num_res_blocks: 2\n    attn_resolutions: [16, ]\n    dropout: 0.1\n    var_type: fixedlarge\n    ema_rate: 0.9999\n    ema: True\n    resamp_with_conv: True\n\ndiffusion:\n    beta_schedule: linear\n    beta_start: 0.0001\n    beta_end: 0.02\n    num_diffusion_timesteps: 1000\n\ntraining:\n    batch_size: 128\n    n_epochs: 10000\n    n_iters: 5000000\n    snapshot_freq: 5000\n    validation_freq: 2000\n\nsampling:\n    batch_size: 64\n    last_only: True\n\noptim:\n    weight_decay: 0.000\n    optimizer: \"Adam\"\n    lr: 0.00002\n    beta1: 0.9\n    amsgrad: false\n    eps: 0.00000001\n    grad_clip: 1.0\n"
  },
  {
    "path": "ddpm_exp/datasets/__init__.py",
    "content": "import os\nimport torch\nimport numbers\nimport torchvision.transforms as transforms\nimport torchvision.transforms.functional as F\nfrom torchvision.datasets import CIFAR10\nfrom datasets.celeba import CelebA\nfrom datasets.ffhq import FFHQ\nfrom datasets.lsun import LSUN\nfrom torch.utils.data import Subset\nimport numpy as np\n\n\nclass Crop(object):\n    def __init__(self, x1, x2, y1, y2):\n        self.x1 = x1\n        self.x2 = x2\n        self.y1 = y1\n        self.y2 = y2\n\n    def __call__(self, img):\n        return F.crop(img, self.x1, self.y1, self.x2 - self.x1, self.y2 - self.y1)\n\n    def __repr__(self):\n        return self.__class__.__name__ + \"(x1={}, x2={}, y1={}, y2={})\".format(\n            self.x1, self.x2, self.y1, self.y2\n        )\n\n\ndef get_dataset(args, config):\n    if config.data.random_flip is False:\n        tran_transform = test_transform = transforms.Compose(\n            [transforms.Resize(config.data.image_size), transforms.ToTensor()]\n        )\n    else:\n        tran_transform = transforms.Compose(\n            [\n                transforms.Resize(config.data.image_size),\n                transforms.RandomHorizontalFlip(p=0.5),\n                transforms.ToTensor(),\n            ]\n        )\n        test_transform = transforms.Compose(\n            [transforms.Resize(config.data.image_size), transforms.ToTensor()]\n        )\n\n    if config.data.dataset == \"CIFAR10\":\n        dataset = CIFAR10(\n            os.path.join('data', \"cifar10\"),\n            train=True,\n            download=True,\n            transform=tran_transform,\n        )\n        test_dataset = CIFAR10(\n            os.path.join('data', \"cifar10\"),\n            train=False,\n            download=True,\n            transform=test_transform,\n        )\n\n    elif config.data.dataset == \"CELEBA\":\n        cx = 89\n        cy = 121\n        x1 = cy - 64\n        x2 = cy + 64\n        y1 = cx - 64\n        y2 = cx + 64\n        if config.data.random_flip:\n            dataset = CelebA(\n                root=os.path.join(\"data\", \"celeba\"),\n                split=\"train\",\n                transform=transforms.Compose(\n                    [\n                        Crop(x1, x2, y1, y2),\n                        transforms.Resize(config.data.image_size),\n                        transforms.RandomHorizontalFlip(),\n                        transforms.ToTensor(),\n                    ]\n                ),\n                download=False,\n            )\n        else:\n            dataset = CelebA(\n                root=os.path.join(\"data\", \"celeba\"),\n                split=\"train\",\n                transform=transforms.Compose(\n                    [\n                        Crop(x1, x2, y1, y2),\n                        transforms.Resize(config.data.image_size),\n                        transforms.ToTensor(),\n                    ]\n                ),\n                download=False,\n            )\n\n        test_dataset = CelebA(\n            root=os.path.join(\"data\", \"celeba\"),\n            split=\"test\",\n            transform=transforms.Compose(\n                [\n                    Crop(x1, x2, y1, y2),\n                    transforms.Resize(config.data.image_size),\n                    transforms.ToTensor(),\n                ]\n            ),\n            download=True,\n        )\n\n    elif config.data.dataset == \"LSUN\":\n        train_folder = \"{}_train\".format(config.data.category)\n        val_folder = \"{}_val\".format(config.data.category)\n        if config.data.random_flip:\n            dataset = LSUN(\n                root=os.path.join(\"data\", \"lsun\"),\n                classes=[train_folder],\n                transform=transforms.Compose(\n                    [\n                        transforms.Resize(config.data.image_size),\n                        transforms.CenterCrop(config.data.image_size),\n                        transforms.RandomHorizontalFlip(p=0.5),\n                        transforms.ToTensor(),\n                    ]\n                ),\n            )\n        else:\n            dataset = LSUN(\n                root=os.path.join(\"data\", \"lsun\"),\n                classes=[train_folder],\n                transform=transforms.Compose(\n                    [\n                        transforms.Resize(config.data.image_size),\n                        transforms.CenterCrop(config.data.image_size),\n                        transforms.ToTensor(),\n                    ]\n                ),\n            )\n\n        test_dataset = LSUN(\n            root=os.path.join(\"data\", \"lsun\"),\n            classes=[val_folder],\n            transform=transforms.Compose(\n                [\n                    transforms.Resize(config.data.image_size),\n                    transforms.CenterCrop(config.data.image_size),\n                    transforms.ToTensor(),\n                ]\n            ),\n        )\n\n    elif config.data.dataset == \"FFHQ\":\n        if config.data.random_flip:\n            dataset = FFHQ(\n                path=os.path.join(\"data\", \"FFHQ\"),\n                transform=transforms.Compose(\n                    [transforms.RandomHorizontalFlip(p=0.5), transforms.ToTensor()]\n                ),\n                resolution=config.data.image_size,\n            )\n        else:\n            dataset = FFHQ(\n                path=os.path.join(\"data\", \"FFHQ\"),\n                transform=transforms.ToTensor(),\n                resolution=config.data.image_size,\n            )\n\n        num_items = len(dataset)\n        indices = list(range(num_items))\n        random_state = np.random.get_state()\n        np.random.seed(2019)\n        np.random.shuffle(indices)\n        np.random.set_state(random_state)\n        train_indices, test_indices = (\n            indices[: int(num_items * 0.9)],\n            indices[int(num_items * 0.9) :],\n        )\n        test_dataset = Subset(dataset, test_indices)\n        dataset = Subset(dataset, train_indices)\n    else:\n        dataset, test_dataset = None, None\n\n    return dataset, test_dataset\n\n\ndef logit_transform(image, lam=1e-6):\n    image = lam + (1 - 2 * lam) * image\n    return torch.log(image) - torch.log1p(-image)\n\n\ndef data_transform(config, X):\n    if config.data.uniform_dequantization:\n        X = X / 256.0 * 255.0 + torch.rand_like(X) / 256.0\n    if config.data.gaussian_dequantization:\n        X = X + torch.randn_like(X) * 0.01\n\n    if config.data.rescaled:\n        X = 2 * X - 1.0\n    elif config.data.logit_transform:\n        X = logit_transform(X)\n\n    if hasattr(config, \"image_mean\"):\n        return X - config.image_mean.to(X.device)[None, ...]\n\n    return X\n\n\ndef inverse_data_transform(config, X):\n    if hasattr(config, \"image_mean\"):\n        X = X + config.image_mean.to(X.device)[None, ...]\n\n    if config.data.logit_transform:\n        X = torch.sigmoid(X)\n    elif config.data.rescaled:\n        X = (X + 1.0) / 2.0\n\n    return torch.clamp(X, 0.0, 1.0)\n"
  },
  {
    "path": "ddpm_exp/datasets/celeba.py",
    "content": "import torch\nimport os\nimport PIL\nfrom .vision import VisionDataset\nfrom .utils import download_file_from_google_drive, check_integrity\n\n\nclass CelebA(VisionDataset):\n    \"\"\"`Large-scale CelebFaces Attributes (CelebA) Dataset <http://mmlab.ie.cuhk.edu.hk/projects/CelebA.html>`_ Dataset.\n\n    Args:\n        root (string): Root directory where images are downloaded to.\n        split (string): One of {'train', 'valid', 'test'}.\n            Accordingly dataset is selected.\n        target_type (string or list, optional): Type of target to use, ``attr``, ``identity``, ``bbox``,\n            or ``landmarks``. Can also be a list to output a tuple with all specified target types.\n            The targets represent:\n                ``attr`` (np.array shape=(40,) dtype=int): binary (0, 1) labels for attributes\n                ``identity`` (int): label for each person (data points with the same identity are the same person)\n                ``bbox`` (np.array shape=(4,) dtype=int): bounding box (x, y, width, height)\n                ``landmarks`` (np.array shape=(10,) dtype=int): landmark points (lefteye_x, lefteye_y, righteye_x,\n                    righteye_y, nose_x, nose_y, leftmouth_x, leftmouth_y, rightmouth_x, rightmouth_y)\n            Defaults to ``attr``.\n        transform (callable, optional): A function/transform that  takes in an PIL image\n            and returns a transformed version. E.g, ``transforms.ToTensor``\n        target_transform (callable, optional): A function/transform that takes in the\n            target and transforms it.\n        download (bool, optional): If true, downloads the dataset from the internet and\n            puts it in root directory. If dataset is already downloaded, it is not\n            downloaded again.\n    \"\"\"\n\n    base_folder = \"Img\"\n    # There currently does not appear to be a easy way to extract 7z in python (without introducing additional\n    # dependencies). The \"in-the-wild\" (not aligned+cropped) images are only in 7z, so they are not available\n    # right now.\n    file_list = [\n        # File ID                         MD5 Hash                            Filename\n        (\"0B7EVK8r0v71pZjFTYXZWM3FlRnM\", \"00d2c5bc6d35e252742224ab0c1e8fcb\", \"img_align_celeba.zip\"),\n        # (\"0B7EVK8r0v71pbWNEUjJKdDQ3dGc\", \"b6cd7e93bc7a96c2dc33f819aa3ac651\", \"img_align_celeba_png.7z\"),\n        # (\"0B7EVK8r0v71peklHb0pGdDl6R28\", \"b6cd7e93bc7a96c2dc33f819aa3ac651\", \"img_celeba.7z\"),\n        (\"0B7EVK8r0v71pblRyaVFSWGxPY0U\", \"75e246fa4810816ffd6ee81facbd244c\", \"list_attr_celeba.txt\"),\n        (\"1_ee_0u7vcNLOfNLegJRHmolfH5ICW-XS\", \"32bd1bd63d3c78cd57e08160ec5ed1e2\", \"identity_CelebA.txt\"),\n        (\"0B7EVK8r0v71pbThiMVRxWXZ4dU0\", \"00566efa6fedff7a56946cd1c10f1c16\", \"list_bbox_celeba.txt\"),\n        (\"0B7EVK8r0v71pd0FJY3Blby1HUTQ\", \"cc24ecafdb5b50baae59b03474781f8c\", \"list_landmarks_align_celeba.txt\"),\n        # (\"0B7EVK8r0v71pTzJIdlJWdHczRlU\", \"063ee6ddb681f96bc9ca28c6febb9d1a\", \"list_landmarks_celeba.txt\"),\n        (\"0B7EVK8r0v71pY0NSMzRuSXJEVkk\", \"d32c9cbf5e040fd4025c592c306e6668\", \"list_eval_partition.txt\"),\n    ]\n\n    def __init__(self, root,\n                 split=\"train\",\n                 target_type=\"attr\",\n                 transform=None, target_transform=None,\n                 download=False):\n        import pandas\n        super(CelebA, self).__init__(root)\n        self.split = split\n        if isinstance(target_type, list):\n            self.target_type = target_type\n        else:\n            self.target_type = [target_type]\n        self.transform = transform\n        self.target_transform = target_transform\n\n        #if download:\n        #    self.download()\n\n        #if not self._check_integrity():\n        #    raise RuntimeError('Dataset not found or corrupted.' +\n        #                       ' You can use download=True to download it')\n\n        self.transform = transform\n        self.target_transform = target_transform\n\n        if split.lower() == \"train\":\n            split = 0\n        elif split.lower() == \"valid\":\n            split = 1\n        elif split.lower() == \"test\":\n            split = 2\n        else:\n            raise ValueError('Wrong split entered! Please use split=\"train\" '\n                             'or split=\"valid\" or split=\"test\"')\n\n        with open(os.path.join(self.root, 'Eval', \"list_eval_partition.txt\"), \"r\") as f:\n            splits = pandas.read_csv(f, delim_whitespace=True, header=None, index_col=0)\n\n        with open(os.path.join(self.root, 'Anno', \"identity_CelebA.txt\"), \"r\") as f:\n            self.identity = pandas.read_csv(f, delim_whitespace=True, header=None, index_col=0)\n\n        with open(os.path.join(self.root, 'Anno', \"list_bbox_celeba.txt\"), \"r\") as f:\n            self.bbox = pandas.read_csv(f, delim_whitespace=True, header=1, index_col=0)\n\n        with open(os.path.join(self.root, 'Anno', \"list_landmarks_align_celeba.txt\"), \"r\") as f:\n            self.landmarks_align = pandas.read_csv(f, delim_whitespace=True, header=1)\n\n        with open(os.path.join(self.root, 'Anno', \"list_attr_celeba.txt\"), \"r\") as f:\n            self.attr = pandas.read_csv(f, delim_whitespace=True, header=1)\n\n        mask = (splits[1] == split)\n        self.filename = splits[mask].index.values\n        self.identity = torch.as_tensor(self.identity[mask].values)\n        self.bbox = torch.as_tensor(self.bbox[mask].values)\n        self.landmarks_align = torch.as_tensor(self.landmarks_align[mask].values)\n        self.attr = torch.as_tensor(self.attr[mask].values)\n        self.attr = (self.attr + 1) // 2  # map from {-1, 1} to {0, 1}\n\n    def _check_integrity(self):\n        for (_, md5, filename) in self.file_list:\n            fpath = os.path.join(self.root, self.base_folder, filename)\n            _, ext = os.path.splitext(filename)\n            # Allow original archive to be deleted (zip and 7z)\n            # Only need the extracted images\n            if ext not in [\".zip\", \".7z\"] and not check_integrity(fpath, md5):\n                return False\n\n        # Should check a hash of the images\n        return os.path.isdir(os.path.join(self.root, self.base_folder, \"img_align_celeba\"))\n\n    def download(self):\n        import zipfile\n\n        if self._check_integrity():\n            print('Files already downloaded and verified')\n            return\n\n        for (file_id, md5, filename) in self.file_list:\n            download_file_from_google_drive(file_id, os.path.join(self.root, self.base_folder), filename, md5)\n\n        with zipfile.ZipFile(os.path.join(self.root, self.base_folder, \"img_align_celeba.zip\"), \"r\") as f:\n            f.extractall(os.path.join(self.root, self.base_folder))\n\n    def __getitem__(self, index):\n        X = PIL.Image.open(os.path.join(self.root, self.base_folder, \"img_align_celeba\", self.filename[index]))\n\n        target = []\n        for t in self.target_type:\n            if t == \"attr\":\n                target.append(self.attr[index, :])\n            elif t == \"identity\":\n                target.append(self.identity[index, 0])\n            elif t == \"bbox\":\n                target.append(self.bbox[index, :])\n            elif t == \"landmarks\":\n                target.append(self.landmarks_align[index, :])\n            else:\n                raise ValueError(\"Target type \\\"{}\\\" is not recognized.\".format(t))\n        target = tuple(target) if len(target) > 1 else target[0]\n\n        if self.transform is not None:\n            X = self.transform(X)\n\n        if self.target_transform is not None:\n            target = self.target_transform(target)\n\n        return X, target\n\n    def __len__(self):\n        return len(self.attr)\n\n    def extra_repr(self):\n        lines = [\"Target type: {target_type}\", \"Split: {split}\"]\n        return '\\n'.join(lines).format(**self.__dict__)\n"
  },
  {
    "path": "ddpm_exp/datasets/ffhq.py",
    "content": "from io import BytesIO\n\nimport lmdb\nfrom PIL import Image\nfrom torch.utils.data import Dataset\n\n\nclass FFHQ(Dataset):\n    def __init__(self, path, transform, resolution=8):\n        self.env = lmdb.open(\n            path,\n            max_readers=32,\n            readonly=True,\n            lock=False,\n            readahead=False,\n            meminit=False,\n        )\n\n        if not self.env:\n            raise IOError('Cannot open lmdb dataset', path)\n\n        with self.env.begin(write=False) as txn:\n            self.length = int(txn.get('length'.encode('utf-8')).decode('utf-8'))\n\n        self.resolution = resolution\n        self.transform = transform\n\n    def __len__(self):\n        return self.length\n\n    def __getitem__(self, index):\n        with self.env.begin(write=False) as txn:\n            key = f'{self.resolution}-{str(index).zfill(5)}'.encode('utf-8')\n            img_bytes = txn.get(key)\n\n        buffer = BytesIO(img_bytes)\n        img = Image.open(buffer)\n        img = self.transform(img)\n        target = 0\n\n        return img, target"
  },
  {
    "path": "ddpm_exp/datasets/lsun.py",
    "content": "from .vision import VisionDataset\nfrom PIL import Image\nimport os\nimport os.path\nimport io\nfrom collections.abc import Iterable\nimport pickle\nfrom torchvision.datasets.utils import verify_str_arg, iterable_to_str\n\n\nclass LSUNClass(VisionDataset):\n    def __init__(self, root, transform=None, target_transform=None):\n        import lmdb\n\n        super(LSUNClass, self).__init__(\n            root, transform=transform, target_transform=target_transform\n        )\n\n        self.env = lmdb.open(\n            root,\n            max_readers=1,\n            readonly=True,\n            lock=False,\n            readahead=False,\n            meminit=False,\n        )\n        with self.env.begin(write=False) as txn:\n            self.length = txn.stat()[\"entries\"]\n        root_split = root.split(\"/\")\n        cache_file = os.path.join(\"/\".join(root_split[:-1]), f\"_cache_{root_split[-1]}\")\n        if os.path.isfile(cache_file):\n            self.keys = pickle.load(open(cache_file, \"rb\"))\n        else:\n            with self.env.begin(write=False) as txn:\n                self.keys = [key for key, _ in txn.cursor()]\n            pickle.dump(self.keys, open(cache_file, \"wb\"))\n\n    def __getitem__(self, index):\n        img, target = None, None\n        env = self.env\n        with env.begin(write=False) as txn:\n            imgbuf = txn.get(self.keys[index])\n\n        buf = io.BytesIO()\n        buf.write(imgbuf)\n        buf.seek(0)\n        img = Image.open(buf).convert(\"RGB\")\n\n        if self.transform is not None:\n            img = self.transform(img)\n\n        if self.target_transform is not None:\n            target = self.target_transform(target)\n\n        return img, target\n\n    def __len__(self):\n        return self.length\n\n\nclass LSUN(VisionDataset):\n    \"\"\"\n    `LSUN <https://www.yf.io/p/lsun>`_ dataset.\n\n    Args:\n        root (string): Root directory for the database files.\n        classes (string or list): One of {'train', 'val', 'test'} or a list of\n            categories to load. e,g. ['bedroom_train', 'church_outdoor_train'].\n        transform (callable, optional): A function/transform that  takes in an PIL image\n            and returns a transformed version. E.g, ``transforms.RandomCrop``\n        target_transform (callable, optional): A function/transform that takes in the\n            target and transforms it.\n    \"\"\"\n\n    def __init__(self, root, classes=\"train\", transform=None, target_transform=None):\n        super(LSUN, self).__init__(\n            root, transform=transform, target_transform=target_transform\n        )\n        self.classes = self._verify_classes(classes)\n\n        # for each class, create an LSUNClassDataset\n        self.dbs = []\n        for c in self.classes:\n            self.dbs.append(\n                LSUNClass(root=root + \"/\" + c + \"_lmdb\", transform=transform)\n            )\n\n        self.indices = []\n        count = 0\n        for db in self.dbs:\n            count += len(db)\n            self.indices.append(count)\n\n        self.length = count\n\n    def _verify_classes(self, classes):\n        categories = [\n            \"bedroom\",\n            \"bridge\",\n            \"church_outdoor\",\n            \"classroom\",\n            \"conference_room\",\n            \"dining_room\",\n            \"kitchen\",\n            \"living_room\",\n            \"restaurant\",\n            \"tower\",\n        ]\n        dset_opts = [\"train\", \"val\", \"test\"]\n\n        try:\n            verify_str_arg(classes, \"classes\", dset_opts)\n            if classes == \"test\":\n                classes = [classes]\n            else:\n                classes = [c + \"_\" + classes for c in categories]\n        except ValueError:\n            if not isinstance(classes, Iterable):\n                msg = (\n                    \"Expected type str or Iterable for argument classes, \"\n                    \"but got type {}.\"\n                )\n                raise ValueError(msg.format(type(classes)))\n\n            classes = list(classes)\n            msg_fmtstr = (\n                \"Expected type str for elements in argument classes, \"\n                \"but got type {}.\"\n            )\n            for c in classes:\n                verify_str_arg(c, custom_msg=msg_fmtstr.format(type(c)))\n                c_short = c.split(\"_\")\n                category, dset_opt = \"_\".join(c_short[:-1]), c_short[-1]\n\n                msg_fmtstr = \"Unknown value '{}' for {}. Valid values are {{{}}}.\"\n                msg = msg_fmtstr.format(\n                    category, \"LSUN class\", iterable_to_str(categories)\n                )\n                verify_str_arg(category, valid_values=categories, custom_msg=msg)\n\n                msg = msg_fmtstr.format(dset_opt, \"postfix\", iterable_to_str(dset_opts))\n                verify_str_arg(dset_opt, valid_values=dset_opts, custom_msg=msg)\n\n        return classes\n\n    def __getitem__(self, index):\n        \"\"\"\n        Args:\n            index (int): Index\n\n        Returns:\n            tuple: Tuple (image, target) where target is the index of the target category.\n        \"\"\"\n        target = 0\n        sub = 0\n        for ind in self.indices:\n            if index < ind:\n                break\n            target += 1\n            sub = ind\n\n        db = self.dbs[target]\n        index = index - sub\n\n        if self.target_transform is not None:\n            target = self.target_transform(target)\n\n        img, _ = db[index]\n        return img, target\n\n    def __len__(self):\n        return self.length\n\n    def extra_repr(self):\n        return \"Classes: {classes}\".format(**self.__dict__)\n"
  },
  {
    "path": "ddpm_exp/datasets/utils.py",
    "content": "import os\nimport os.path\nimport hashlib\nimport errno\nfrom torch.utils.model_zoo import tqdm\n\n\ndef gen_bar_updater():\n    pbar = tqdm(total=None)\n\n    def bar_update(count, block_size, total_size):\n        if pbar.total is None and total_size:\n            pbar.total = total_size\n        progress_bytes = count * block_size\n        pbar.update(progress_bytes - pbar.n)\n\n    return bar_update\n\n\ndef check_integrity(fpath, md5=None):\n    if md5 is None:\n        return True\n    if not os.path.isfile(fpath):\n        return False\n    md5o = hashlib.md5()\n    with open(fpath, 'rb') as f:\n        # read in 1MB chunks\n        for chunk in iter(lambda: f.read(1024 * 1024), b''):\n            md5o.update(chunk)\n    md5c = md5o.hexdigest()\n    if md5c != md5:\n        return False\n    return True\n\n\ndef makedir_exist_ok(dirpath):\n    \"\"\"\n    Python2 support for os.makedirs(.., exist_ok=True)\n    \"\"\"\n    try:\n        os.makedirs(dirpath)\n    except OSError as e:\n        if e.errno == errno.EEXIST:\n            pass\n        else:\n            raise\n\n\ndef download_url(url, root, filename=None, md5=None):\n    \"\"\"Download a file from a url and place it in root.\n\n    Args:\n        url (str): URL to download file from\n        root (str): Directory to place downloaded file in\n        filename (str, optional): Name to save the file under. If None, use the basename of the URL\n        md5 (str, optional): MD5 checksum of the download. If None, do not check\n    \"\"\"\n    from six.moves import urllib\n\n    root = os.path.expanduser(root)\n    if not filename:\n        filename = os.path.basename(url)\n    fpath = os.path.join(root, filename)\n\n    makedir_exist_ok(root)\n\n    # downloads file\n    if os.path.isfile(fpath) and check_integrity(fpath, md5):\n        print('Using downloaded and verified file: ' + fpath)\n    else:\n        try:\n            print('Downloading ' + url + ' to ' + fpath)\n            urllib.request.urlretrieve(\n                url, fpath,\n                reporthook=gen_bar_updater()\n            )\n        except OSError:\n            if url[:5] == 'https':\n                url = url.replace('https:', 'http:')\n                print('Failed download. Trying https -> http instead.'\n                      ' Downloading ' + url + ' to ' + fpath)\n                urllib.request.urlretrieve(\n                    url, fpath,\n                    reporthook=gen_bar_updater()\n                )\n\n\ndef list_dir(root, prefix=False):\n    \"\"\"List all directories at a given root\n\n    Args:\n        root (str): Path to directory whose folders need to be listed\n        prefix (bool, optional): If true, prepends the path to each result, otherwise\n            only returns the name of the directories found\n    \"\"\"\n    root = os.path.expanduser(root)\n    directories = list(\n        filter(\n            lambda p: os.path.isdir(os.path.join(root, p)),\n            os.listdir(root)\n        )\n    )\n\n    if prefix is True:\n        directories = [os.path.join(root, d) for d in directories]\n\n    return directories\n\n\ndef list_files(root, suffix, prefix=False):\n    \"\"\"List all files ending with a suffix at a given root\n\n    Args:\n        root (str): Path to directory whose folders need to be listed\n        suffix (str or tuple): Suffix of the files to match, e.g. '.png' or ('.jpg', '.png').\n            It uses the Python \"str.endswith\" method and is passed directly\n        prefix (bool, optional): If true, prepends the path to each result, otherwise\n            only returns the name of the files found\n    \"\"\"\n    root = os.path.expanduser(root)\n    files = list(\n        filter(\n            lambda p: os.path.isfile(os.path.join(root, p)) and p.endswith(suffix),\n            os.listdir(root)\n        )\n    )\n\n    if prefix is True:\n        files = [os.path.join(root, d) for d in files]\n\n    return files\n\n\ndef download_file_from_google_drive(file_id, root, filename=None, md5=None):\n    \"\"\"Download a Google Drive file from  and place it in root.\n\n    Args:\n        file_id (str): id of file to be downloaded\n        root (str): Directory to place downloaded file in\n        filename (str, optional): Name to save the file under. If None, use the id of the file.\n        md5 (str, optional): MD5 checksum of the download. If None, do not check\n    \"\"\"\n    # Based on https://stackoverflow.com/questions/38511444/python-download-files-from-google-drive-using-url\n    import requests\n    url = \"https://docs.google.com/uc?export=download\"\n\n    root = os.path.expanduser(root)\n    if not filename:\n        filename = file_id\n    fpath = os.path.join(root, filename)\n\n    makedir_exist_ok(root)\n\n    if os.path.isfile(fpath) and check_integrity(fpath, md5):\n        print('Using downloaded and verified file: ' + fpath)\n    else:\n        session = requests.Session()\n\n        response = session.get(url, params={'id': file_id}, stream=True)\n        token = _get_confirm_token(response)\n\n        if token:\n            params = {'id': file_id, 'confirm': token}\n            response = session.get(url, params=params, stream=True)\n\n        _save_response_content(response, fpath)\n\n\ndef _get_confirm_token(response):\n    for key, value in response.cookies.items():\n        if key.startswith('download_warning'):\n            return value\n\n    return None\n\n\ndef _save_response_content(response, destination, chunk_size=32768):\n    with open(destination, \"wb\") as f:\n        pbar = tqdm(total=None)\n        progress = 0\n        for chunk in response.iter_content(chunk_size):\n            if chunk:  # filter out keep-alive new chunks\n                f.write(chunk)\n                progress += len(chunk)\n                pbar.update(progress - pbar.n)\n        pbar.close()\n"
  },
  {
    "path": "ddpm_exp/datasets/vision.py",
    "content": "import os\nimport torch\nimport torch.utils.data as data\n\n\nclass VisionDataset(data.Dataset):\n    _repr_indent = 4\n\n    def __init__(self, root, transforms=None, transform=None, target_transform=None):\n        if isinstance(root, torch._six.string_classes):\n            root = os.path.expanduser(root)\n        self.root = root\n        \n        has_transforms = transforms is not None\n        has_separate_transform = transform is not None or target_transform is not None\n        if has_transforms and has_separate_transform:\n            raise ValueError(\"Only transforms or transform/target_transform can \"\n                             \"be passed as argument\")\n\n        # for backwards-compatibility\n        self.transform = transform\n        self.target_transform = target_transform\n\n        if has_separate_transform:\n            transforms = StandardTransform(transform, target_transform)\n        self.transforms = transforms\n\n    def __getitem__(self, index):\n        raise NotImplementedError\n\n    def __len__(self):\n        raise NotImplementedError\n\n    def __repr__(self):\n        head = \"Dataset \" + self.__class__.__name__\n        body = [\"Number of datapoints: {}\".format(self.__len__())]\n        if self.root is not None:\n            body.append(\"Root location: {}\".format(self.root))\n        body += self.extra_repr().splitlines()\n        if hasattr(self, 'transform') and self.transform is not None:\n            body += self._format_transform_repr(self.transform,\n                                                \"Transforms: \")\n        if hasattr(self, 'target_transform') and self.target_transform is not None:\n            body += self._format_transform_repr(self.target_transform,\n                                                \"Target transforms: \")\n        lines = [head] + [\" \" * self._repr_indent + line for line in body]\n        return '\\n'.join(lines)\n\n    def _format_transform_repr(self, transform, head):\n        lines = transform.__repr__().splitlines()\n        return ([\"{}{}\".format(head, lines[0])] +\n                [\"{}{}\".format(\" \" * len(head), line) for line in lines[1:]])\n\n    def extra_repr(self):\n        return \"\"\n\n\nclass StandardTransform(object):\n    def __init__(self, transform=None, target_transform=None):\n        self.transform = transform\n        self.target_transform = target_transform\n\n    def __call__(self, input, target):\n        if self.transform is not None:\n            input = self.transform(input)\n        if self.target_transform is not None:\n            target = self.target_transform(target)\n        return input, target\n\n    def _format_transform_repr(self, transform, head):\n        lines = transform.__repr__().splitlines()\n        return ([\"{}{}\".format(head, lines[0])] +\n                [\"{}{}\".format(\" \" * len(head), line) for line in lines[1:]])\n\n    def __repr__(self):\n        body = [self.__class__.__name__]\n        if self.transform is not None:\n            body += self._format_transform_repr(self.transform,\n                                                \"Transform: \")\n        if self.target_transform is not None:\n            body += self._format_transform_repr(self.target_transform,\n                                                \"Target transform: \")\n\n        return '\\n'.join(body)\n"
  },
  {
    "path": "ddpm_exp/draw_ssim_pruned_curve.py",
    "content": "import matplotlib.pyplot as plt\nimport numpy as np\n\nplt.style.use('seaborn-whitegrid')\n\nssim = [0.7881933450698853, 0.8069899082183838, 0.8119480609893799, 0.8162015080451965, 0.8389594554901123, 0.8415904641151428, 0.8398601412773132, 0.8351159691810608, 0.8382353186607361, 0.8380391001701355, 0.8358467221260071, 0.8335589170455933, 0.8339887857437134, 0.8341929316520691, 0.8322316408157349, 0.8351540565490723, 0.8365049958229065, 0.8395034074783325, 0.8369854092597961, 0.8361033797264099]\nloss = [1701.1046142578125, 1480.7890625, 1344.899658203125, 1244.06982421875, 1163.198486328125, 1095.482421875, 1037.3287353515625, 986.4912109375, 941.4730224609375, 901.2019653320312, 864.8944091796875, 831.9296875, 801.815185546875, 774.1593017578125, 748.635498046875, 724.9940795898438, 703.0166015625, 682.5120239257812, 663.32568359375, 645.3184204101562, 628.3749389648438, 612.4005126953125, 597.3052978515625, 583.0064697265625, 569.4385986328125, 556.5473022460938, 544.2760009765625, 532.5770874023438, 521.4034423828125, 510.709716796875, 500.46917724609375, 490.6474914550781, 481.222412109375, 472.17437744140625, 463.4827575683594, 455.11700439453125, 447.05352783203125, 439.2783203125, 431.7735595703125, 424.529541015625, 417.5376281738281, 410.7806701660156, 404.24041748046875, 397.9093017578125, 391.771728515625, 385.82086181640625, 380.05987548828125, 374.47900390625, 369.06243896484375, 363.7962646484375, 358.68304443359375, 353.7085266113281, 348.8709716796875, 344.1724853515625, 339.605224609375, 335.15997314453125, 330.8258972167969, 326.5983581542969, 322.47613525390625, 318.45684814453125, 314.5384826660156, 310.71697998046875, 306.98724365234375, 303.3455810546875, 299.7877197265625, 296.310791015625, 292.9114990234375, 289.58966064453125, 286.3433837890625, 283.16998291015625, 280.06707763671875, 277.03326416015625, 274.06695556640625, 271.161865234375, 268.31640625, 265.52734375, 262.79425048828125, 260.1163330078125, 257.49163818359375, 254.91751098632812, 252.3910369873047, 249.91122436523438, 247.47616577148438, 245.088623046875, 242.750732421875, 240.45819091796875, 238.20266723632812, 235.9857177734375, 233.80799865722656, 231.66690063476562, 229.56248474121094, 227.4961395263672, 225.4654541015625, 223.46958923339844, 221.50381469726562, 219.56704711914062, 217.66116333007812, 215.78857421875, 213.94786071777344, 212.13548278808594, 210.34634399414062, 208.5807342529297, 206.84222412109375, 205.13070678710938, 203.44268798828125, 201.78305053710938, 200.1453399658203, 198.532958984375, 196.9427490234375, 195.3751220703125, 193.82810974121094, 192.3029022216797, 190.79945373535156, 189.3192596435547, 187.86029052734375, 186.42015075683594, 184.99755859375, 183.5935516357422, 182.20855712890625, 180.8433380126953, 179.49468994140625, 178.16262817382812, 176.84832763671875, 175.5514678955078, 174.27105712890625, 173.00845336914062, 171.76220703125, 170.53294372558594, 169.3196563720703, 168.12110900878906, 166.9364471435547, 165.76898193359375, 164.61373901367188, 163.47064208984375, 162.33859252929688, 161.22225952148438, 160.12184143066406, 159.03469848632812, 157.95745849609375, 156.89071655273438, 155.836181640625, 154.79522705078125, 153.76693725585938, 152.75314331054688, 151.74832153320312, 150.75482177734375, 149.77114868164062, 148.79794311523438, 147.83566284179688, 146.88336181640625, 145.94140625, 145.0076141357422, 144.08383178710938, 143.16946411132812, 142.2662353515625, 141.37387084960938, 140.49444580078125, 139.6236572265625, 138.75778198242188, 137.9005889892578, 137.05108642578125, 136.20909118652344, 135.3750457763672, 134.55081176757812, 133.73622131347656, 132.92990112304688, 132.1318359375, 131.3417510986328, 130.55929565429688, 129.7850341796875, 129.01817321777344, 128.2555694580078, 127.50141906738281, 126.75614929199219, 126.01831817626953, 125.28939819335938, 124.56617736816406, 123.84973907470703, 123.13714599609375, 122.43183135986328, 121.73556518554688, 121.05023956298828, 120.36729431152344, 119.6861801147461, 119.00979614257812, 118.34149932861328, 117.68106079101562, 117.0245361328125, 116.37326049804688, 115.72502899169922, 115.0794677734375, 114.43995666503906, 113.80744934082031, 113.185302734375, 112.56655883789062, 111.9501724243164, 111.33888244628906, 110.731201171875, 110.12779998779297, 109.52902221679688, 108.93549346923828, 108.3429183959961, 107.75627899169922, 107.17346954345703, 106.59703063964844, 106.02497863769531, 105.45870971679688, 104.89581298828125, 104.33368682861328, 103.77556610107422, 103.22441101074219, 102.67940521240234, 102.13814544677734, 101.595947265625, 101.05874633789062, 100.52738189697266, 99.99861145019531, 99.47378540039062, 98.95279693603516, 98.43711853027344, 97.927490234375, 97.41922760009766, 96.91082763671875, 96.40644836425781, 95.91044616699219, 95.42032623291016, 94.93365478515625, 94.44966125488281, 93.96620178222656, 93.48726654052734, 93.01268005371094, 92.54640197753906, 92.08195495605469, 91.62220764160156, 91.16517639160156, 90.71146392822266, 90.26289367675781, 89.81585693359375, 89.37476348876953, 88.93356323242188, 88.49430084228516, 88.05615234375, 87.62488555908203, 87.20310974121094, 86.78509521484375, 86.36715698242188, 85.94837188720703, 85.5322036743164, 85.119384765625, 84.70726013183594, 84.29851531982422, 83.8918685913086, 83.4896469116211, 83.08975219726562, 82.6915283203125, 82.2962417602539, 81.90049743652344, 81.50473022460938, 81.11053466796875, 80.71994018554688, 80.33277130126953, 79.95138549804688, 79.5705337524414, 79.19496154785156, 78.82186889648438, 78.44967651367188, 78.078125, 77.70683288574219, 77.33111572265625, 76.96113586425781, 76.59521484375, 76.23463439941406, 75.8738784790039, 75.5166015625, 75.15583038330078, 74.7983627319336, 74.4422378540039, 74.08897399902344, 73.74169921875, 73.39446258544922, 73.05160522460938, 72.7100830078125, 72.37609100341797, 72.03876495361328, 71.70317077636719, 71.36734008789062, 71.03208923339844, 70.6976318359375, 70.36759185791016, 70.03667449951172, 69.70942687988281, 69.38262176513672, 69.05623626708984, 68.73023223876953, 68.40640258789062, 68.08369445800781, 67.76331329345703, 67.44386291503906, 67.12708282470703, 66.80943298339844, 66.49469757080078, 66.18429565429688, 65.87628173828125, 65.56700897216797, 65.25809478759766, 64.94975280761719, 64.64031219482422, 64.33030700683594, 64.01728057861328, 63.7086181640625, 63.404884338378906, 63.10600662231445, 62.80165481567383, 62.49952697753906, 62.1977653503418, 61.896236419677734, 61.5944709777832, 61.29471969604492, 60.99738311767578, 60.701576232910156, 60.40677261352539, 60.11320877075195, 59.82258224487305, 59.53261947631836, 59.24340057373047, 58.95545959472656, 58.671363830566406, 58.38852310180664, 58.108306884765625, 57.82665252685547, 57.54595947265625, 57.266910552978516, 56.98750686645508, 56.709312438964844, 56.431488037109375, 56.154640197753906, 55.880706787109375, 55.60820007324219, 55.33389663696289, 55.061912536621094, 54.790802001953125, 54.52444076538086, 54.25933837890625, 53.99445724487305, 53.730342864990234, 53.46611022949219, 53.19966125488281, 52.93572998046875, 52.67394256591797, 52.416324615478516, 52.15863037109375, 51.9014892578125, 51.64102554321289, 51.380699157714844, 51.123252868652344, 50.869956970214844, 50.619815826416016, 50.36848068237305, 50.117496490478516, 49.864952087402344, 49.614585876464844, 49.36606216430664, 49.11790084838867, 48.872718811035156, 48.6287841796875, 48.383766174316406, 48.13976287841797, 47.89812469482422, 47.65973663330078, 47.42310333251953, 47.187564849853516, 46.95360565185547, 46.718772888183594, 46.485862731933594, 46.25577926635742, 46.027374267578125, 45.79834747314453, 45.57139587402344, 45.344879150390625, 45.12104797363281, 44.89805603027344, 44.67631530761719, 44.45355987548828, 44.230438232421875, 44.00713348388672, 43.78421401977539, 43.56409454345703, 43.346736907958984, 43.13029098510742, 42.91291809082031, 42.694793701171875, 42.47801971435547, 42.26348876953125, 42.0494384765625, 41.83710479736328, 41.625648498535156, 41.414955139160156, 41.20560073852539, 40.99818420410156, 40.791481018066406, 40.585731506347656, 40.380332946777344, 40.17467498779297, 39.96947479248047, 39.76425552368164, 39.55950164794922, 39.355892181396484, 39.152591705322266, 38.949493408203125, 38.745704650878906, 38.54241180419922, 38.34136199951172, 38.14094924926758, 37.94009017944336, 37.73867416381836, 37.537044525146484, 37.336360931396484, 37.13530731201172, 36.934104919433594, 36.73320007324219, 36.53413391113281, 36.33588409423828, 36.13719940185547, 35.937522888183594, 35.739013671875, 35.542327880859375, 35.34554672241211, 35.149192810058594, 34.954002380371094, 34.75892639160156, 34.56444549560547, 34.37016296386719, 34.175926208496094, 33.980865478515625, 33.78504943847656, 33.58888244628906, 33.393436431884766, 33.19927215576172, 33.0058708190918, 32.81242752075195, 32.61833953857422, 32.423622131347656, 32.22987365722656, 32.03801345825195, 31.847660064697266, 31.658077239990234, 31.468374252319336, 31.278139114379883, 31.088520050048828, 30.89966583251953, 30.711360931396484, 30.523611068725586, 30.336217880249023, 30.14923095703125, 29.963003158569336, 29.778030395507812, 29.593975067138672, 29.410850524902344, 29.228641510009766, 29.04698371887207, 28.86606216430664, 28.686256408691406, 28.507423400878906, 28.328943252563477, 28.150875091552734, 27.97317123413086, 27.796123504638672, 27.619800567626953, 27.44475555419922, 27.27039337158203, 27.096721649169922, 26.923952102661133, 26.751808166503906, 26.58050537109375, 26.410259246826172, 26.241487503051758, 26.073375701904297, 25.90522003173828, 25.737794876098633, 25.571483612060547, 25.406145095825195, 25.241474151611328, 25.077468872070312, 24.914419174194336, 24.752695083618164, 24.592121124267578, 24.43234634399414, 24.273231506347656, 24.114547729492188, 23.955957412719727, 23.798091888427734, 23.641109466552734, 23.484966278076172, 23.329912185668945, 23.17569923400879, 23.02271842956543, 22.87088966369629, 22.720455169677734, 22.570812225341797, 22.421768188476562, 22.27305793762207, 22.12588119506836, 21.979734420776367, 21.83547592163086, 21.692113876342773, 21.54950714111328, 21.406421661376953, 21.263275146484375, 21.122180938720703, 20.982616424560547, 20.842681884765625, 20.70368766784668, 20.565185546875, 20.427291870117188, 20.289962768554688, 20.153470993041992, 20.017803192138672, 19.883617401123047, 19.750282287597656, 19.616554260253906, 19.483001708984375, 19.3499755859375, 19.217700958251953, 19.086288452148438, 18.956266403198242, 18.826961517333984, 18.699195861816406, 18.57220458984375, 18.44577407836914, 18.320362091064453, 18.19457244873047, 18.069015502929688, 17.943695068359375, 17.81871795654297, 17.69390106201172, 17.569774627685547, 17.445335388183594, 17.32052230834961, 17.196054458618164, 17.072261810302734, 16.949085235595703, 16.826692581176758, 16.704326629638672, 16.581392288208008, 16.458843231201172, 16.336633682250977, 16.215139389038086, 16.094675064086914, 15.97494888305664, 15.855484962463379, 15.736421585083008, 15.61740493774414, 15.499762535095215, 15.383194923400879, 15.267399787902832, 15.1524658203125, 15.037047386169434, 14.921875, 14.807332992553711, 14.6937837600708, 14.581942558288574, 14.471169471740723, 14.359855651855469, 14.247969627380371, 14.136343002319336, 14.0252103805542, 13.91417121887207, 13.804189682006836, 13.695024490356445, 13.586652755737305, 13.478588104248047, 13.370796203613281, 13.263662338256836, 13.156829833984375, 13.050079345703125, 12.943574905395508, 12.8375244140625, 12.732397079467773, 12.627817153930664, 12.523782730102539, 12.420042037963867, 12.316635131835938, 12.213995933532715, 12.112537384033203, 12.011775970458984, 11.911060333251953, 11.81020736694336, 11.709442138671875, 11.609159469604492, 11.510124206542969, 11.41226577758789, 11.314695358276367, 11.217529296875, 11.120745658874512, 11.024354934692383, 10.928537368774414, 10.833683013916016, 10.739753723144531, 10.646350860595703, 10.553449630737305, 10.460601806640625, 10.368167877197266, 10.275918960571289, 10.184614181518555, 10.094273567199707, 10.00442123413086, 9.915353775024414, 9.826492309570312, 9.737730979919434, 9.649423599243164, 9.56181526184082, 9.474851608276367, 9.3887939453125, 9.303421020507812, 9.218555450439453, 9.134313583374023, 9.050726890563965, 8.967226028442383, 8.884288787841797, 8.80159854888916, 8.71987533569336, 8.638511657714844, 8.55799674987793, 8.477949142456055, 8.398301124572754, 8.319267272949219, 8.240785598754883, 8.16270637512207, 8.084941864013672, 8.00758171081543, 7.930901050567627, 7.854917526245117, 7.779313087463379, 7.70433235168457, 7.629762649536133, 7.555689811706543, 7.482339859008789, 7.409557342529297, 7.336893081665039, 7.26435661315918, 7.192422866821289, 7.1210832595825195, 7.050585746765137, 6.980339527130127, 6.910588264465332, 6.841214179992676, 6.772533893585205, 6.7045183181762695, 6.637387275695801, 6.570642948150635, 6.504342079162598, 6.438445568084717, 6.372898101806641, 6.307552337646484, 6.242652416229248, 6.1783342361450195, 6.114615440368652, 6.05164909362793, 5.989071369171143, 5.926817893981934, 5.864975929260254, 5.8035430908203125, 5.742734909057617, 5.6824235916137695, 5.622620105743408, 5.56337308883667, 5.504632472991943, 5.44635009765625, 5.388323783874512, 5.330789089202881, 5.273573398590088, 5.216763019561768, 5.160672187805176, 5.105111122131348, 5.0502824783325195, 4.995979309082031, 4.941986083984375, 4.887808799743652, 4.834039688110352, 4.780519485473633, 4.727843284606934, 4.675847053527832, 4.624444007873535, 4.573704719543457, 4.523074150085449, 4.472465515136719, 4.4224443435668945, 4.372983932495117, 4.324192047119141, 4.275915145874023, 4.227880954742432, 4.180074691772461, 4.132648944854736, 4.085781097412109, 4.03977632522583, 3.993844747543335, 3.948244094848633, 3.9029383659362793, 3.8578546047210693, 3.8133997917175293, 3.7693405151367188, 3.7258453369140625, 3.682644844055176, 3.6398329734802246, 3.5972371101379395, 3.555053234100342, 3.51322078704834, 3.47196102142334, 3.4310460090637207, 3.3906357288360596, 3.350480079650879, 3.3109285831451416, 3.27160382270813, 3.2327582836151123, 3.1944308280944824, 3.156402826309204, 3.118509292602539, 3.081109046936035, 3.0437707901000977, 3.006779670715332, 2.970116138458252, 2.9337751865386963, 2.8978099822998047, 2.862208366394043, 2.827075958251953, 2.7925100326538086, 2.75809383392334, 2.7239840030670166, 2.690075159072876, 2.6565277576446533, 2.6233577728271484, 2.59077787399292, 2.5584311485290527, 2.52634859085083, 2.494575262069702, 2.4631528854370117, 2.4322350025177, 2.401658535003662, 2.3714113235473633, 2.341179370880127, 2.311309576034546, 2.281765937805176, 2.2527246475219727, 2.223891496658325, 2.19508695602417, 2.166395664215088, 2.1379075050354004, 2.109847068786621, 2.0825068950653076, 2.055701732635498, 2.0291335582733154, 2.0026559829711914, 1.9766024351119995, 1.9506968259811401, 1.9250354766845703, 1.8999783992767334, 1.8748676776885986, 1.8499674797058105, 1.825405478477478, 1.80095636844635, 1.7769408226013184, 1.7534890174865723, 1.7300043106079102, 1.7066116333007812, 1.6833750009536743, 1.6603283882141113, 1.637768030166626, 1.615763783454895, 1.5940053462982178, 1.5724085569381714, 1.5510432720184326, 1.5298420190811157, 1.5087008476257324, 1.4878756999969482, 1.46720552444458, 1.4469165802001953, 1.42703378200531, 1.4072201251983643, 1.3874483108520508, 1.368030309677124, 1.3487962484359741, 1.3297483921051025, 1.3111402988433838, 1.2925734519958496, 1.2742488384246826, 1.2563354969024658, 1.239056944847107, 1.221704125404358, 1.2041783332824707, 1.1868020296096802, 1.169940710067749, 1.1533498764038086, 1.137148380279541, 1.1206257343292236, 1.1041970252990723, 1.0881215333938599, 1.0724332332611084, 1.0570216178894043, 1.0418777465820312, 1.027139663696289, 1.01233971118927, 0.9975317716598511, 0.982831597328186, 0.9682676196098328, 0.9541558027267456, 0.9400227665901184, 0.9261940717697144, 0.912714421749115, 0.8991694450378418, 0.8859115839004517, 0.8725903630256653, 0.8595502376556396, 0.8467142581939697, 0.8339176774024963, 0.8214421272277832, 0.8089535236358643, 0.7969492673873901, 0.7850332856178284, 0.773577868938446, 0.7619916796684265, 0.750514566898346, 0.7390782833099365, 0.727920651435852, 0.7166476845741272, 0.7057903409004211, 0.6950562596321106, 0.6844556331634521, 0.6740690469741821, 0.6636630296707153, 0.6534810662269592, 0.6433228850364685, 0.633602499961853, 0.6237667202949524, 0.6142684817314148, 0.604796290397644, 0.5954349040985107, 0.5865603685379028, 0.5772913694381714, 0.5682709217071533, 0.5592767000198364, 0.5505585670471191, 0.542014479637146, 0.5336143374443054, 0.5253323316574097, 0.5171076059341431, 0.5091003179550171, 0.5012395977973938, 0.493182897567749, 0.48534587025642395, 0.47781723737716675, 0.4703660011291504, 0.46325814723968506, 0.4561285972595215, 0.4492647051811218, 0.44225677847862244, 0.4352635145187378, 0.4283895790576935, 0.42159581184387207, 0.4147633910179138, 0.40812188386917114, 0.4014328718185425, 0.39500337839126587, 0.3887244760990143, 0.3823932707309723, 0.3762394189834595, 0.37033089995384216, 0.3649292290210724, 0.35929298400878906, 0.3535959720611572, 0.34809982776641846, 0.342424601316452, 0.33691826462745667, 0.3312578797340393, 0.32602420449256897, 0.3210577368736267, 0.3160027861595154, 0.31118467450141907, 0.30630141496658325, 0.3016233444213867, 0.2967444062232971, 0.2918931841850281, 0.2870868444442749, 0.28218063712120056, 0.27778011560440063, 0.2733995318412781, 0.26912352442741394, 0.2651045024394989, 0.2612626552581787, 0.257208913564682, 0.2527666687965393, 0.24844947457313538, 0.2442183792591095, 0.2404608428478241, 0.23674535751342773, 0.2332439422607422, 0.22945661842823029, 0.22556596994400024, 0.22160732746124268, 0.21786770224571228, 0.21428701281547546, 0.21095798909664154, 0.20757737755775452, 0.20441585779190063, 0.20126710832118988, 0.19809749722480774, 0.19500720500946045, 0.19189956784248352, 0.18880324065685272, 0.18582747876644135, 0.18278327584266663, 0.18014571070671082, 0.17744965851306915, 0.1748645305633545, 0.1720936894416809, 0.16946980357170105, 0.16655337810516357, 0.16428059339523315, 0.16178739070892334, 0.15912079811096191, 0.1564910113811493, 0.15409769117832184, 0.1519279032945633, 0.14959266781806946, 0.14729416370391846, 0.14519476890563965, 0.14297731220722198, 0.14067870378494263, 0.1384662240743637, 0.13619963824748993, 0.13372863829135895, 0.13164782524108887, 0.12953300774097443, 0.12771821022033691, 0.12615008652210236, 0.12415669858455658, 0.1224883422255516, 0.12066398561000824, 0.11871618032455444, 0.11666613817214966, 0.11477735638618469, 0.11323611438274384, 0.11176219582557678, 0.11029437184333801, 0.10888860374689102, 0.10724224895238876, 0.10557863116264343, 0.10384707897901535, 0.10236769914627075, 0.1006280779838562, 0.09903483092784882, 0.09761733561754227, 0.09611119329929352, 0.09474831074476242, 0.09324538707733154, 0.0919327437877655, 0.09046762436628342, 0.08917433023452759, 0.08783926069736481, 0.08677000552415848, 0.08565567433834076, 0.08473670482635498, 0.08394858241081238, 0.08294089138507843, 0.08191481232643127, 0.08059291541576385, 0.07923152297735214, 0.07800912857055664, 0.07714410126209259, 0.07613521814346313, 0.07499660551548004, 0.0738428384065628, 0.0728812888264656, 0.07185603678226471, 0.07075358927249908, 0.06998489797115326, 0.06904087215662003, 0.06817013025283813, 0.0675143226981163, 0.06662875413894653, 0.06585747003555298, 0.06498762965202332, 0.06416307389736176, 0.06331884860992432, 0.06252430379390717, 0.06164319068193436, 0.0608375146985054, 0.06004999577999115, 0.05958651006221771, 0.05899893864989281, 0.05812441185116768, 0.05736687779426575, 0.05654726177453995, 0.05582256615161896, 0.055398836731910706, 0.05473468452692032, 0.05418632924556732, 0.053625207394361496, 0.05313240364193916, 0.05263800173997879, 0.05228545516729355]\nloss = loss / np.max(loss)\nstage = list(range(50, 1000+1, 50))\n\n# Set the font size\nplt.rcParams['font.family'] = 'serif'\nplt.rcParams['font.size'] = 23\n\n# Define custom colors in a dark color scheme\ncolor_blue = '#002147'\ncolor_red = '#8C1515'\nbackground_color = '#F0F0F0'\n\n# Set the figure size\nplt.figure(figsize=(10, 4))\n\n# Set the background color\nplt.rcParams['axes.facecolor'] = background_color\n\n# Plot SSIM with markers\nax1 = plt.gca()\nax1.set_ylabel('SSIM')\nax1.plot(stage, ssim, marker='o', linestyle='-', label='SSIM', color=color_blue)\n\n# Plot Loss with markers\nax2 = plt.twinx()\nax2.set_ylabel('Relative Loss')\nax2.plot(np.arange(len(loss)), loss, label='Relative Loss', color=color_red)\n\n# Set x-axis label\nax1.set_xlabel('Steps')\n\n# Add vertical dashed line and text\nax1.axvline(x=300, color='#777777aa', linestyle='dashed')\nax2.text(300 + 10, 0.35, 'Threshold $\\mathcal{T}$', rotation=90, color='gray', fontsize=12)\n\n# Adjust the alignment of twin axes\nax1.spines['left'].set_color(color_blue)\nax1.spines['right'].set_visible(False)\nax1.yaxis.label.set_color(color_blue)\nax1.tick_params(axis='y', colors=color_blue)\nax2.spines['right'].set_color(color_red)\nax2.spines['left'].set_visible(False)\nax2.yaxis.label.set_color(color_red)\nax2.tick_params(axis='y', colors=color_red)\n\n# Set the y-axis limits for Loss\nax2.set_ylim([0, np.max(loss)])\n\n# Show legend in one box\nlines, labels = ax1.get_legend_handles_labels()\nlines2, labels2 = ax2.get_legend_handles_labels()\nax2.legend(lines + lines2, labels + labels2, loc='lower right')\n\n# Set the title\n\n# Add grid\nplt.grid(color='white', linestyle='-', linewidth=0.5)\n\n# Set the layout and padding\nplt.tight_layout(pad=1.0)\n\n# Save the figure\nplt.title(\"CIFAR-10\")\nplt.savefig('run/ssim_loss.pdf', dpi=300)\nplt.savefig('run/ssim_loss.png', dpi=300)\n\nplt.close()\n\n\n\n\n\n\n\n\n\n\nssim = [0.5799465179443359, 0.6136730909347534, 0.6535662412643433, 0.6940350532531738, 0.7176114916801453, 0.7297082543373108, 0.7317281365394592, 0.7415740489959717, 0.731893002986908, 0.7338791489601135, 0.7469093799591064, 0.7643880248069763, 0.7528172135353088, 0.7652474045753479, 0.7676505446434021, 0.7659737467765808, 0.7663692831993103, 0.7921077609062195, 0.7951106429100037, 0.7900822162628174]\nloss = [4539.2880859375, 3768.095947265625, 3357.234375, 3075.648681640625, 2858.8349609375, 2681.1640625, 2529.66748046875, 2396.486083984375, 2277.68798828125, 2170.572265625, 2073.0498046875, 1983.8221435546875, 1901.969482421875, 1826.51416015625, 1756.9361572265625, 1692.67529296875, 1632.9080810546875, 1577.036865234375, 1524.709228515625, 1475.60205078125, 1429.496826171875, 1386.111572265625, 1345.2410888671875, 1306.699462890625, 1270.2772216796875, 1235.816650390625, 1203.143310546875, 1172.09619140625, 1142.5673828125, 1114.4703369140625, 1087.7288818359375, 1062.2257080078125, 1037.8934326171875, 1014.63818359375, 992.4036865234375, 971.1495361328125, 950.8143310546875, 931.31298828125, 912.5927734375, 894.5926513671875, 877.2578125, 860.58203125, 844.5435180664062, 829.0625, 814.1162109375, 799.66796875, 785.71142578125, 772.231201171875, 759.2171630859375, 746.6537475585938, 734.4761962890625, 722.6871948242188, 711.263916015625, 700.1922607421875, 689.4476318359375, 679.0269775390625, 668.9332885742188, 659.1286010742188, 649.6156005859375, 640.3824462890625, 631.3946533203125, 622.6455078125, 614.136962890625, 605.8540649414062, 597.7898559570312, 589.9388427734375, 582.287109375, 574.8184814453125, 567.5428466796875, 560.46044921875, 553.5679931640625, 546.8153076171875, 540.208251953125, 533.760986328125, 527.4842529296875, 521.3709716796875, 515.41015625, 509.5645751953125, 503.8065490722656, 498.16217041015625, 492.656494140625, 487.27862548828125, 482.0224914550781, 476.8771667480469, 471.83258056640625, 466.8781433105469, 462.02825927734375, 457.2860107421875, 452.6307373046875, 448.0614013671875, 443.5903015136719, 439.2196044921875, 434.9393310546875, 430.72430419921875, 426.5743713378906, 422.4966735839844, 418.49322509765625, 414.55938720703125, 410.68951416015625, 406.8865966796875, 403.1524658203125, 399.4714050292969, 395.8497619628906, 392.2903137207031, 388.796630859375, 385.36053466796875, 381.9772033691406, 378.6322326660156, 375.3372497558594, 372.1002197265625, 368.9242858886719, 365.79498291015625, 362.7067565917969, 359.657470703125, 356.65362548828125, 353.6961669921875, 350.78558349609375, 347.9106750488281, 345.0701904296875, 342.27056884765625, 339.5118713378906, 336.7920227050781, 334.1104736328125, 331.4671630859375, 328.8617858886719, 326.29071044921875, 323.758056640625, 321.26202392578125, 318.79345703125, 316.34747314453125, 313.92901611328125, 311.5457458496094, 309.1994934082031, 306.8837890625, 304.5982360839844, 302.3414611816406, 300.10614013671875, 297.8866271972656, 295.6922607421875, 293.5301513671875, 291.3970031738281, 289.29144287109375, 287.2158203125, 285.16796875, 283.1424560546875, 281.13409423828125, 279.1377258300781, 277.15594482421875, 275.20147705078125, 273.274658203125, 271.3721618652344, 269.4991455078125, 267.65350341796875, 265.8316650390625, 264.02655029296875, 262.23492431640625, 260.45770263671875, 258.69610595703125, 256.95526123046875, 255.234130859375, 253.54019165039062, 251.86520385742188, 250.21116638183594, 248.5689239501953, 246.93984985351562, 245.31585693359375, 243.70863342285156, 242.11697387695312, 240.54721069335938, 238.99844360351562, 237.47918701171875, 235.97952270507812, 234.4829559326172, 232.99127197265625, 231.49490356445312, 230.009033203125, 228.55003356933594, 227.11834716796875, 225.71218872070312, 224.3217010498047, 222.94598388671875, 221.5821990966797, 220.22264099121094, 218.86444091796875, 217.51229858398438, 216.1671600341797, 214.83935546875, 213.5290985107422, 212.23281860351562, 210.94717407226562, 209.6590118408203, 208.39019775390625, 207.14077758789062, 205.90513610839844, 204.6878662109375, 203.48330688476562, 202.28472900390625, 201.0923309326172, 199.90463256835938, 198.7206268310547, 197.548828125, 196.3909912109375, 195.24566650390625, 194.1101531982422, 192.98435974121094, 191.87210083007812, 190.7747344970703, 189.68112182617188, 188.5771484375, 187.48060607910156, 186.40255737304688, 185.33895874023438, 184.28480529785156, 183.2340545654297, 182.18869018554688, 181.15188598632812, 180.12258911132812, 179.10223388671875, 178.09197998046875, 177.0926513671875, 176.10470581054688, 175.13047790527344, 174.16094970703125, 173.19235229492188, 172.22509765625, 171.26443481445312, 170.31271362304688, 169.37289428710938, 168.43927001953125, 167.5128173828125, 166.6007080078125, 165.69393920898438, 164.78707885742188, 163.87860107421875, 162.9747772216797, 162.08349609375, 161.205810546875, 160.3376922607422, 159.48043823242188, 158.62545776367188, 157.76852416992188, 156.90916442871094, 156.06423950195312, 155.22775268554688, 154.40029907226562, 153.5887451171875, 152.77359008789062, 151.96192932128906, 151.1425018310547, 150.33209228515625, 149.52468872070312, 148.72679138183594, 147.93780517578125, 147.1558837890625, 146.38275146484375, 145.6148681640625, 144.84906005859375, 144.08462524414062, 143.3209991455078, 142.56076049804688, 141.80752563476562, 141.05966186523438, 140.32247924804688, 139.5969696044922, 138.8789520263672, 138.15431213378906, 137.4319305419922, 136.71456909179688, 136.001953125, 135.29641723632812, 134.59725952148438, 133.90882873535156, 133.21835327148438, 132.53102111816406, 131.8437042236328, 131.16131591796875, 130.48312377929688, 129.80667114257812, 129.1376953125, 128.47406005859375, 127.81607055664062, 127.16371154785156, 126.51216125488281, 125.86033630371094, 125.21190643310547, 124.56687927246094, 123.92790985107422, 123.29476165771484, 122.66639709472656, 122.04109954833984, 121.41598510742188, 120.79107666015625, 120.17152404785156, 119.55781555175781, 118.95069885253906, 118.35137176513672, 117.76033020019531, 117.16847229003906, 116.5713119506836, 115.96839904785156, 115.3705825805664, 114.77892303466797, 114.19495391845703, 113.61700439453125, 113.04344177246094, 112.47366333007812, 111.90603637695312, 111.33882141113281, 110.76946258544922, 110.20514678955078, 109.64720916748047, 109.09403228759766, 108.5472183227539, 108.00634002685547, 107.47209167480469, 106.93167877197266, 106.3891830444336, 105.84819030761719, 105.31497192382812, 104.78697967529297, 104.2626953125, 103.74164581298828, 103.2225341796875, 102.70263671875, 102.18333435058594, 101.66871643066406, 101.15846252441406, 100.65023803710938, 100.14640808105469, 99.64842987060547, 99.1537094116211, 98.66120910644531, 98.17019653320312, 97.68133544921875, 97.19447326660156, 96.70794677734375, 96.22498321533203, 95.74699401855469, 95.27214050292969, 94.80259704589844, 94.33360290527344, 93.86566162109375, 93.39513397216797, 92.92683410644531, 92.46549224853516, 92.01234436035156, 91.56328582763672, 91.11687469482422, 90.66963195800781, 90.21475982666016, 89.76103210449219, 89.30477905273438, 88.85597229003906, 88.41807556152344, 87.98734283447266, 87.558837890625, 87.13265228271484, 86.70266723632812, 86.26994323730469, 85.84093475341797, 85.41881561279297, 85.00090026855469, 84.590087890625, 84.18545532226562, 83.78675842285156, 83.37939453125, 82.96934509277344, 82.55995178222656, 82.15340423583984, 81.7559814453125, 81.36235046386719, 80.9716796875, 80.58477020263672, 80.20170593261719, 79.81788635253906, 79.4328842163086, 79.04986572265625, 78.66758728027344, 78.28589630126953, 77.90595245361328, 77.52804565429688, 77.15101623535156, 76.77655792236328, 76.40097045898438, 76.02364349365234, 75.64501953125, 75.26689147949219, 74.8895263671875, 74.51441192626953, 74.14277648925781, 73.77396392822266, 73.40673828125, 73.03805541992188, 72.6688232421875, 72.29788208007812, 71.93028259277344, 71.56787109375, 71.20892333984375, 70.85118103027344, 70.49273681640625, 70.13478088378906, 69.77824401855469, 69.4261245727539, 69.07501220703125, 68.72384643554688, 68.37470245361328, 68.0278091430664, 67.68260192871094, 67.33564758300781, 66.98927307128906, 66.64806365966797, 66.31043243408203, 65.97486877441406, 65.63951110839844, 65.3048095703125, 64.97119140625, 64.63858795166016, 64.30635070800781, 63.974857330322266, 63.64590072631836, 63.31915283203125, 62.994956970214844, 62.67194747924805, 62.350990295410156, 62.02912521362305, 61.705020904541016, 61.37890625, 61.056907653808594, 60.73951721191406, 60.42740249633789, 60.118309020996094, 59.811317443847656, 59.502479553222656, 59.189208984375, 58.87731170654297, 58.56908416748047, 58.265113830566406, 57.964420318603516, 57.6660041809082, 57.368896484375, 57.0717658996582, 56.77558898925781, 56.478187561035156, 56.17683029174805, 55.87291717529297, 55.571327209472656, 55.27485656738281, 54.98467254638672, 54.699951171875, 54.41563034057617, 54.129852294921875, 53.84320068359375, 53.55724334716797, 53.274314880371094, 52.993202209472656, 52.71284484863281, 52.43351745605469, 52.15589904785156, 51.879058837890625, 51.6051025390625, 51.334014892578125, 51.0655403137207, 50.798011779785156, 50.53080749511719, 50.26268005371094, 49.99579620361328, 49.73133850097656, 49.46936798095703, 49.208396911621094, 48.94771957397461, 48.687599182128906, 48.42808532714844, 48.169952392578125, 47.91440200805664, 47.6610107421875, 47.409912109375, 47.1600341796875, 46.91320037841797, 46.66697692871094, 46.419395446777344, 46.16619873046875, 45.91246032714844, 45.66108322143555, 45.413063049316406, 45.16651153564453, 44.921051025390625, 44.67573547363281, 44.432029724121094, 44.189178466796875, 43.94789123535156, 43.70808410644531, 43.470550537109375, 43.234169006347656, 42.99854278564453, 42.762184143066406, 42.523956298828125, 42.2872314453125, 42.054325103759766, 41.82463073730469, 41.59766387939453, 41.369163513183594, 41.13751983642578, 40.90327453613281, 40.668785095214844, 40.43696212768555, 40.209022521972656, 39.983795166015625, 39.75868225097656, 39.532135009765625, 39.304534912109375, 39.078147888183594, 38.85414123535156, 38.63133239746094, 38.40913391113281, 38.18644714355469, 37.964088439941406, 37.74298858642578, 37.524375915527344, 37.30720520019531, 37.09025955200195, 36.87240219116211, 36.65234375, 36.431217193603516, 36.21006774902344, 35.992942810058594, 35.77734375, 35.56315612792969, 35.34864044189453, 35.13450622558594, 34.920257568359375, 34.706077575683594, 34.49102783203125, 34.27655029296875, 34.06257247924805, 33.84934616088867, 33.63843536376953, 33.429710388183594, 33.22266387939453, 33.014068603515625, 32.805091857910156, 32.597267150878906, 32.39238739013672, 32.19000244140625, 31.989036560058594, 31.787109375, 31.58185577392578, 31.37429428100586, 31.16775894165039, 30.96398162841797, 30.763967514038086, 30.566076278686523, 30.369342803955078, 30.171037673950195, 29.970752716064453, 29.76913070678711, 29.568056106567383, 29.370765686035156, 29.175628662109375, 28.980947494506836, 28.78591537475586, 28.58932876586914, 28.39220428466797, 28.19733428955078, 28.00490951538086, 27.81519317626953, 27.626718521118164, 27.439435958862305, 27.252986907958984, 27.067100524902344, 26.88239288330078, 26.699377059936523, 26.517051696777344, 26.334407806396484, 26.151626586914062, 25.969593048095703, 25.789051055908203, 25.610992431640625, 25.433944702148438, 25.257705688476562, 25.081737518310547, 24.905364990234375, 24.730724334716797, 24.556011199951172, 24.38326644897461, 24.210472106933594, 24.03833770751953, 23.867185592651367, 23.69612693786621, 23.526260375976562, 23.35738754272461, 23.189224243164062, 23.020156860351562, 22.851537704467773, 22.68604850769043, 22.52411651611328, 22.364612579345703, 22.205974578857422, 22.04595375061035, 21.883800506591797, 21.722026824951172, 21.5626163482666, 21.407323837280273, 21.252456665039062, 21.09724235534668, 20.938791275024414, 20.779685974121094, 20.62162971496582, 20.4669189453125, 20.3142147064209, 20.162883758544922, 20.01047706604004, 19.85685157775879, 19.703975677490234, 19.553754806518555, 19.40742301940918, 19.264314651489258, 19.122966766357422, 18.97968101501465, 18.834999084472656, 18.690229415893555, 18.546415328979492, 18.402610778808594, 18.258975982666016, 18.11440658569336, 17.970054626464844, 17.827861785888672, 17.68832015991211, 17.550716400146484, 17.415355682373047, 17.280445098876953, 17.14678192138672, 17.014076232910156, 16.883808135986328, 16.753719329833984, 16.622791290283203, 16.488920211791992, 16.35369300842285, 16.22126007080078, 16.095569610595703, 15.974079132080078, 15.853219032287598, 15.72891616821289, 15.59947395324707, 15.469198226928711, 15.342164993286133, 15.219317436218262, 15.100605964660645, 14.980583190917969, 14.860252380371094, 14.739171028137207, 14.619288444519043, 14.502647399902344, 14.389965057373047, 14.27832317352295, 14.165441513061523, 14.051509857177734, 13.93765640258789, 13.826026916503906, 13.715468406677246, 13.607650756835938, 13.496648788452148, 13.383329391479492, 13.267261505126953, 13.151922225952148, 13.038779258728027, 12.92779541015625, 12.816251754760742, 12.701602935791016, 12.584444999694824, 12.468511581420898, 12.354952812194824, 12.244566917419434, 12.135894775390625, 12.029569625854492, 11.925186157226562, 11.823380470275879, 11.723055839538574, 11.622727394104004, 11.520914077758789, 11.416922569274902, 11.312165260314941, 11.208732604980469, 11.10700798034668, 11.007889747619629, 10.909204483032227, 10.809144020080566, 10.707747459411621, 10.605745315551758, 10.506563186645508, 10.410165786743164, 10.314408302307129, 10.218300819396973, 10.121312141418457, 10.023993492126465, 9.928132057189941, 9.834404945373535, 9.744147300720215, 9.653356552124023, 9.561277389526367, 9.468231201171875, 9.374344825744629, 9.282003402709961, 9.192495346069336, 9.103446960449219, 9.015043258666992, 8.926200866699219, 8.838485717773438, 8.751630783081055, 8.665960311889648, 8.580838203430176, 8.494034767150879, 8.40644645690918, 8.319029808044434, 8.233461380004883, 8.149580001831055, 8.068504333496094, 7.987727165222168, 7.907271385192871, 7.826296806335449, 7.7446088790893555, 7.663700103759766, 7.583346843719482, 7.5041728019714355, 7.424949645996094, 7.34641170501709, 7.268475532531738, 7.190487861633301, 7.113183975219727, 7.035463333129883, 6.957459449768066, 6.880485534667969, 6.805814743041992, 6.733950138092041, 6.664864540100098, 6.596710205078125, 6.526113986968994, 6.453292369842529, 6.379787445068359, 6.308520317077637, 6.239605903625488, 6.171825408935547, 6.103394985198975, 6.033662796020508, 5.964019775390625, 5.89523458480835, 5.827916145324707, 5.761248588562012, 5.695002555847168, 5.628237724304199, 5.562036991119385, 5.497188568115234, 5.434276580810547, 5.373710632324219, 5.314513683319092, 5.255434036254883, 5.19560432434082, 5.135910987854004, 5.076885223388672, 5.018493175506592, 4.960127353668213, 4.900954246520996, 4.841784477233887, 4.784269332885742, 4.729909420013428, 4.67695951461792, 4.624540328979492, 4.571746349334717, 4.516648292541504, 4.46004056930542, 4.403772354125977, 4.347835063934326, 4.293648719787598, 4.2399163246154785, 4.187976837158203, 4.13656759262085, 4.086839199066162, 4.038580894470215, 3.991074323654175, 3.9420456886291504, 3.892064094543457, 3.8420252799987793, 3.793379306793213, 3.7477054595947266, 3.704021453857422, 3.6613032817840576, 3.6168198585510254, 3.5706210136413574, 3.5225014686584473, 3.4756243228912354, 3.4306745529174805, 3.387819528579712, 3.3469200134277344, 3.306817054748535, 3.266785144805908, 3.227121353149414, 3.188246726989746, 3.1500604152679443, 3.1109695434570312, 3.072086811065674, 3.0339338779449463, 2.9974265098571777, 2.9611244201660156, 2.9252705574035645, 2.887768268585205, 2.846853494644165, 2.8051815032958984, 2.765190601348877, 2.728276491165161, 2.6949098110198975, 2.663076639175415, 2.630977153778076, 2.5972986221313477, 2.5623912811279297, 2.5293757915496826, 2.4986140727996826, 2.4683361053466797, 2.4374566078186035, 2.4048168659210205, 2.371562957763672, 2.3394112586975098, 2.3097755908966064, 2.2830123901367188, 2.2568652629852295, 2.2302122116088867, 2.2025556564331055, 2.1735823154449463, 2.1443448066711426, 2.1145293712615967, 2.0841856002807617, 2.0533528327941895, 2.0237834453582764, 1.996132254600525, 1.9719631671905518, 1.9500808715820312, 1.9278086423873901, 1.9034643173217773, 1.8765110969543457, 1.8477627038955688, 1.8204681873321533, 1.7961366176605225, 1.7744765281677246, 1.754655361175537, 1.7349741458892822, 1.7141201496124268, 1.6927661895751953, 1.6689279079437256, 1.6446927785873413, 1.619302749633789, 1.5943386554718018, 1.5706288814544678, 1.5495566129684448, 1.5302032232284546, 1.5114333629608154, 1.491368293762207, 1.469764232635498, 1.446757435798645, 1.424503207206726, 1.4041261672973633, 1.3876874446868896, 1.37278151512146, 1.3566217422485352, 1.3387857675552368, 1.3189970254898071, 1.2991602420806885, 1.2807953357696533, 1.2628533840179443, 1.2454438209533691, 1.2298157215118408, 1.215038776397705, 1.2016723155975342, 1.1890926361083984, 1.176241159439087, 1.1628756523132324, 1.1477210521697998, 1.1322383880615234, 1.118009328842163, 1.1052467823028564, 1.0926997661590576, 1.0799956321716309, 1.0650734901428223, 1.0481101274490356, 1.0310444831848145, 1.0148855447769165, 1.0002671480178833, 0.9865742921829224, 0.9726752042770386, 0.9584107398986816, 0.9439973831176758, 0.9304401278495789, 0.9194836616516113, 0.910431444644928, 0.901872992515564, 0.8922662138938904, 0.8820143938064575, 0.8693972229957581, 0.8558073043823242, 0.8431190252304077, 0.8318266868591309, 0.8213794827461243, 0.8131312727928162, 0.8069919347763062, 0.8026072978973389, 0.7977644801139832, 0.7888401746749878, 0.7746130228042603, 0.7577764391899109, 0.7395559549331665, 0.7252950072288513, 0.7153338193893433, 0.7092757225036621, 0.7042636275291443, 0.6987306475639343, 0.6903582215309143, 0.6798433065414429, 0.6693308353424072, 0.6607911586761475, 0.6553004384040833, 0.6516624689102173, 0.6483136415481567, 0.6432492136955261, 0.637688159942627, 0.6315328478813171, 0.6241682767868042, 0.6156589984893799, 0.606623649597168, 0.5980398654937744, 0.5924094319343567, 0.5872237682342529, 0.5824892520904541, 0.5762549638748169, 0.5678626298904419, 0.5592249631881714, 0.5521055459976196, 0.5471092462539673, 0.5430928468704224, 0.5402786731719971, 0.5361867547035217, 0.530924916267395, 0.5247969627380371, 0.5195049047470093, 0.5150901079177856, 0.5105984210968018, 0.5053007006645203, 0.49872955679893494, 0.49299514293670654, 0.48698410391807556, 0.48231664299964905, 0.4776288866996765, 0.4725630283355713, 0.46631789207458496, 0.4607853293418884, 0.4565424621105194, 0.4528442919254303, 0.4492824077606201, 0.44360923767089844, 0.436774343252182, 0.4306948781013489, 0.42571407556533813, 0.42529743909835815, 0.4260627329349518, 0.42652514576911926, 0.42327821254730225, 0.4168250560760498, 0.40701374411582947, 0.39881962537765503, 0.392905592918396, 0.38991519808769226, 0.388450026512146, 0.38770100474357605, 0.3859521150588989, 0.3823249340057373, 0.3760799765586853, 0.3682119846343994, 0.35995787382125854, 0.35440850257873535, 0.35170888900756836, 0.3540180027484894, 0.35911762714385986, 0.36555320024490356, 0.36951082944869995, 0.368580162525177, 0.3634859323501587, 0.3560991585254669, 0.3505817651748657, 0.34617334604263306, 0.34385979175567627, 0.341214120388031, 0.33820509910583496, 0.33600670099258423, 0.33432748913764954, 0.3346988558769226, 0.334031879901886, 0.33080244064331055, 0.32728511095046997, 0.3241499662399292, 0.3206929564476013, 0.3173835873603821, 0.31362858414649963, 0.3082450032234192, 0.3012043535709381]\n\nloss = loss / np.max(loss)\nstage = list(range(50, 1000+1, 50))\n\n# Define custom colors in a dark color scheme\ncolor_blue = '#002147'\ncolor_red = '#8C1515'\n\n# Set the figure size\nplt.figure(figsize=(10, 4))\n\n# Plot SSIM with markers\nax1 = plt.gca()\nax1.set_ylabel('SSIM')\nax1.plot(stage, ssim, marker='o', linestyle='-', label='SSIM', color=color_blue)\n\n# Plot Loss with markers\nax2 = plt.twinx()\nax2.set_ylabel('Relative Loss')\nax2.plot(np.arange(len(loss)), loss, label='Relative Loss', color=color_red)\n\n# Set x-axis label\nax1.set_xlabel('Steps')\n\nax1.axvline(x=950, color='#777777aa', linestyle='dashed')\nax2.text(950 + 10, 0.35, 'Threshold $\\mathcal{T}$', rotation=90, color='gray', fontsize=12)\n\nax2.set_ylim([0, np.max(loss)])\n\n# Show legend in one box\nlines, labels = ax1.get_legend_handles_labels()\nlines2, labels2 = ax2.get_legend_handles_labels()\nax2.legend(lines + lines2, labels + labels2, loc='lower right')\n\n\n# Adjust the alignment of twin axes\nax1.spines['left'].set_color(color_blue)\nax1.spines['right'].set_visible(False)\nax1.yaxis.label.set_color(color_blue)\nax1.tick_params(axis='y', colors=color_blue)\nax2.spines['right'].set_color(color_red)\nax2.spines['left'].set_visible(False)\nax2.yaxis.label.set_color(color_red)\nax2.tick_params(axis='y', colors=color_red)\n# Set the title\n\n# Add grid\nplt.grid(color='white', linestyle='-', linewidth=0.5)\n\n# Set the layout and padding\nplt.tight_layout(pad=1.0)\n\nplt.title(\"CeleA-HQ\")\n# Save the figure\nplt.savefig('run/ssim_loss2.pdf', dpi=300)\nplt.savefig('run/ssim_loss2.png', dpi=300)\n\nplt.close()\n\n"
  },
  {
    "path": "ddpm_exp/extract_cifar10.py",
    "content": "import os\nimport torchvision\nfrom torchvision.datasets import CIFAR10\nfrom tqdm import tqdm\n\n# Define the path to the folder where the images will be saved\nsave_path = 'data/cifar10/images'\n\n# Create the folder if it doesn't exist\nif not os.path.exists(save_path):\n    os.makedirs(save_path)\n\n# Load the CIFAR10 dataset\ndataset = CIFAR10(root='data/cifar10', train=True, download=True)\n\n# Loop through the dataset and save each image to the folder\nfor i in tqdm(range(len(dataset))):\n    image, label = dataset[i]\n    image_name = f'{i}.png'\n    image_path = os.path.join(save_path, image_name)\n    image.save(image_path)"
  },
  {
    "path": "ddpm_exp/fid_score.py",
    "content": "\"\"\"Calculates the Frechet Inception Distance (FID) to evalulate GANs\n\nThe FID metric calculates the distance between two distributions of images.\nTypically, we have summary statistics (mean & covariance matrix) of one\nof these distributions, while the 2nd distribution is given by a GAN.\n\nWhen run as a stand-alone program, it compares the distribution of\nimages that are stored as PNG/JPEG at a specified location with a\ndistribution given by summary statistics (in pickle format).\n\nThe FID is calculated by assuming that X_1 and X_2 are the activations of\nthe pool_3 layer of the inception net for generated samples and real world\nsamples respectively.\n\nSee --help to see further details.\n\nCode apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead\nof Tensorflow\n\nCopyright 2018 Institute of Bioinformatics, JKU Linz\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n   http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\nimport os\nimport pathlib\nfrom argparse import ArgumentDefaultsHelpFormatter, ArgumentParser\n\nimport numpy as np\nimport torch\nimport torchvision.transforms as TF\nfrom PIL import Image\nfrom scipy import linalg\nfrom torch.nn.functional import adaptive_avg_pool2d\n\ntry:\n    from tqdm import tqdm\nexcept ImportError:\n    # If tqdm is not available, provide a mock version of it\n    def tqdm(x):\n        return x\n\nfrom inception import InceptionV3\n\nparser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)\nparser.add_argument('--batch-size', type=int, default=50,\n                    help='Batch size to use')\nparser.add_argument('--dataset_name', type=str, default=None)\nparser.add_argument('--num-workers', type=int,\n                    help=('Number of processes to use for data loading. '\n                          'Defaults to `min(8, num_cpus)`'))\nparser.add_argument('--device', type=str, default=None,\n                    help='Device to use. Like cuda, cuda:0 or cpu')\nparser.add_argument('--dims', type=int, default=2048,\n                    choices=list(InceptionV3.BLOCK_INDEX_BY_DIM),\n                    help=('Dimensionality of Inception features to use. '\n                          'By default, uses pool3 features'))\nparser.add_argument('--num_samples', type=int, default=None,\n                    help=('Number of samples for FID estimation'))\nparser.add_argument('--res', type=int, default=None,\n                    help=('Resolutions of samples for FID estimation'))\nparser.add_argument('--save-stats', action='store_true',\n                    help=('Generate an npz archive from a directory of samples. '\n                          'The first path is used as input and the second as output.'))\n\nparser.add_argument('path', type=str, nargs=2,\n                    help=('Paths to the generated images or '\n                          'to .npz statistic files'))\n\n\nIMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',\n                    'tif', 'tiff', 'webp'}\n\n\nclass ImagePathDataset(torch.utils.data.Dataset):\n    def __init__(self, files, transforms=None):\n        self.files = files\n        self.transforms = transforms\n\n    def __len__(self):\n        return len(self.files)\n\n    def __getitem__(self, i):\n        path = self.files[i]\n        img = Image.open(path).convert('RGB')\n        if self.transforms is not None:\n            img = self.transforms(img)\n        return img\n\n\ndef get_activations(files, model, batch_size=50, dims=2048, device='cpu',\n                    num_workers=1, res=None, dataset_name=None):\n    \"\"\"Calculates the activations of the pool_3 layer for all images.\n\n    Params:\n    -- files       : List of image files paths\n    -- model       : Instance of inception model\n    -- batch_size  : Batch size of images for the model to process at once.\n                     Make sure that the number of samples is a multiple of\n                     the batch size, otherwise some samples are ignored. This\n                     behavior is retained to match the original FID score\n                     implementation.\n    -- dims        : Dimensionality of features returned by Inception\n    -- device      : Device to run calculations\n    -- num_workers : Number of parallel dataloader workers\n\n    Returns:\n    -- A numpy array of dimension (num images, dims) that contains the\n       activations of the given tensor when feeding inception with the\n       query tensor.\n    \"\"\"\n    model.eval()\n\n    if batch_size > len(files):\n        print(('Warning: batch size is bigger than the data size. '\n               'Setting batch size to data size'))\n        batch_size = len(files)\n\n    if res is None:\n        trans = TF.ToTensor()\n    else:\n        if dataset_name == 'celeba':\n            from datasets import Crop\n            cx = 89\n            cy = 121\n            x1 = cy - 64\n            x2 = cy + 64\n            y1 = cx - 64\n            y2 = cx + 64\n            trans = TF.Compose([\n                        Crop(x1, x2, y1, y2),\n                        TF.Resize(res),\n                        TF.ToTensor(),\n            ])\n        else:\n            trans = TF.Compose([\n                TF.Resize(res),\n                TF.CenterCrop(res),\n                TF.ToTensor()\n            ])\n    \n    dataset = ImagePathDataset(files, transforms=trans)\n    dataloader = torch.utils.data.DataLoader(dataset,\n                                             batch_size=batch_size,\n                                             shuffle=False,\n                                             drop_last=False,\n                                             num_workers=num_workers)\n\n    pred_arr = np.empty((len(files), dims))\n\n    start_idx = 0\n\n    for batch in tqdm(dataloader):\n        batch = batch.to(device)\n\n        with torch.no_grad():\n            pred = model(batch)[0]\n\n        # If model output is not scalar, apply global spatial average pooling.\n        # This happens if you choose a dimensionality not equal 2048.\n        if pred.size(2) != 1 or pred.size(3) != 1:\n            pred = adaptive_avg_pool2d(pred, output_size=(1, 1))\n\n        pred = pred.squeeze(3).squeeze(2).cpu().numpy()\n\n        pred_arr[start_idx:start_idx + pred.shape[0]] = pred\n\n        start_idx = start_idx + pred.shape[0]\n\n    return pred_arr\n\n\ndef calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):\n    \"\"\"Numpy implementation of the Frechet Distance.\n    The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)\n    and X_2 ~ N(mu_2, C_2) is\n            d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).\n\n    Stable version by Dougal J. Sutherland.\n\n    Params:\n    -- mu1   : Numpy array containing the activations of a layer of the\n               inception net (like returned by the function 'get_predictions')\n               for generated samples.\n    -- mu2   : The sample mean over activations, precalculated on an\n               representative data set.\n    -- sigma1: The covariance matrix over activations for generated samples.\n    -- sigma2: The covariance matrix over activations, precalculated on an\n               representative data set.\n\n    Returns:\n    --   : The Frechet Distance.\n    \"\"\"\n\n    mu1 = np.atleast_1d(mu1)\n    mu2 = np.atleast_1d(mu2)\n\n    sigma1 = np.atleast_2d(sigma1)\n    sigma2 = np.atleast_2d(sigma2)\n\n    assert mu1.shape == mu2.shape, \\\n        'Training and test mean vectors have different lengths'\n    assert sigma1.shape == sigma2.shape, \\\n        'Training and test covariances have different dimensions'\n\n    diff = mu1 - mu2\n\n    # Product might be almost singular\n    covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)\n    if not np.isfinite(covmean).all():\n        msg = ('fid calculation produces singular product; '\n               'adding %s to diagonal of cov estimates') % eps\n        print(msg)\n        offset = np.eye(sigma1.shape[0]) * eps\n        covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))\n\n    # Numerical error might give slight imaginary component\n    if np.iscomplexobj(covmean):\n        if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):\n            m = np.max(np.abs(covmean.imag))\n            raise ValueError('Imaginary component {}'.format(m))\n        covmean = covmean.real\n\n    tr_covmean = np.trace(covmean)\n\n    return (diff.dot(diff) + np.trace(sigma1)\n            + np.trace(sigma2) - 2 * tr_covmean)\n\n\ndef calculate_activation_statistics(files, model, batch_size=50, dims=2048,\n                                    device='cpu', num_workers=1, res=None, dataset_name=None):\n    \"\"\"Calculation of the statistics used by the FID.\n    Params:\n    -- files       : List of image files paths\n    -- model       : Instance of inception model\n    -- batch_size  : The images numpy array is split into batches with\n                     batch size batch_size. A reasonable batch size\n                     depends on the hardware.\n    -- dims        : Dimensionality of features returned by Inception\n    -- device      : Device to run calculations\n    -- num_workers : Number of parallel dataloader workers\n\n    Returns:\n    -- mu    : The mean over samples of the activations of the pool_3 layer of\n               the inception model.\n    -- sigma : The covariance matrix of the activations of the pool_3 layer of\n               the inception model.\n    \"\"\"\n    act = get_activations(files, model, batch_size, dims, device, num_workers, res=res, dataset_name=dataset_name)\n    mu = np.mean(act, axis=0)\n    sigma = np.cov(act, rowvar=False)\n    return mu, sigma\n\n\ndef compute_statistics_of_path(path, model, batch_size, dims, device,\n                               num_workers=1, num_samples=None, res=None, dataset_name=None):\n    if path.endswith('.npz'):\n        with np.load(path) as f:\n            m, s = f['mu'][:], f['sigma'][:]\n    else:\n        path = pathlib.Path(path)\n\n        files = sorted([file for ext in IMAGE_EXTENSIONS\n                       for file in path.glob('**/*.{}'.format(ext))])\n        if num_samples is not None:\n            #import random\n            #files = random.sample(files, num_samples)\n            files = files[:num_samples]\n        print(\"Found %d files.\" % len(files))\n        m, s = calculate_activation_statistics(files, model, batch_size,\n                                               dims, device, num_workers, res=res, dataset_name=dataset_name)\n\n    return m, s\n\n\ndef calculate_fid_given_paths(paths, batch_size, device, dims, num_workers=1, num_samples=None, res=None, dataset_name=None):\n    \"\"\"Calculates the FID of two paths\"\"\"\n    for p in paths:\n        if not os.path.exists(p):\n            raise RuntimeError('Invalid path: %s' % p)\n\n    block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]\n\n    model = InceptionV3([block_idx]).to(device)\n\n    m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n    m2, s2 = compute_statistics_of_path(paths[1], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n    fid_value = calculate_frechet_distance(m1, s1, m2, s2)\n\n    return fid_value\n\n\ndef save_fid_stats(paths, batch_size, device, dims, num_workers=1, num_samples=None, res=None, dataset_name=None):\n    \"\"\"Calculates the FID of two paths\"\"\"\n    if not os.path.exists(paths[0]):\n        raise RuntimeError('Invalid path: %s' % paths[0])\n\n    if os.path.exists(paths[1]):\n        raise RuntimeError('Existing output file: %s' % paths[1])\n\n    block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]\n\n    model = InceptionV3([block_idx]).to(device)\n\n    print(f\"Saving statistics for {paths[0]}\")\n\n    m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n\n    np.savez_compressed(paths[1], mu=m1, sigma=s1)\n\n\ndef main():\n    args = parser.parse_args()\n\n    if args.device is None:\n        device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')\n    else:\n        device = torch.device(args.device)\n\n    if args.num_workers is None:\n        try:\n            num_cpus = len(os.sched_getaffinity(0))\n        except AttributeError:\n            # os.sched_getaffinity is not available under Windows, use\n            # os.cpu_count instead (which may not return the *available* number\n            # of CPUs).\n            num_cpus = os.cpu_count()\n\n        num_workers = min(num_cpus, 8) if num_cpus is not None else 0\n    else:\n        num_workers = args.num_workers\n\n    if args.save_stats:\n        save_fid_stats(args.path, args.batch_size, device, args.dims, num_workers, num_samples=args.num_samples, res=args.res, dataset_name=args.dataset_name)\n        return\n\n    fid_value = calculate_fid_given_paths(args.path,\n                                          args.batch_size,\n                                          device,\n                                          args.dims,\n                                          num_workers,\n                                          num_samples=args.num_samples,\n                                          res = args.res, dataset_name=args.dataset_name)\n    print('FID: ', fid_value)\n\n\nif __name__ == '__main__':\n    main()\n"
  },
  {
    "path": "ddpm_exp/finetune.py",
    "content": "import argparse\nimport traceback\nimport shutil\nimport logging\nimport yaml\nimport sys\nimport os\nimport torch\nimport numpy as np\nimport torch.utils.tensorboard as tb\nfrom tqdm import tqdm\nfrom runners.diffusion import Diffusion\nfrom torchvision import transforms\nimport torchvision\nfrom datasets import get_dataset, data_transform, inverse_data_transform\nfrom utils import UnlabeledImageFolder\nfrom accelerate import Accelerator\ntorch.set_printoptions(sci_mode=False)\n\n\ndef parse_args_and_config(accelerator):\n    parser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\n\n    parser.add_argument(\n        \"--config\", type=str, required=True, help=\"Path to the config file\"\n    )\n    parser.add_argument(\"--seed\", type=int, default=2333, help=\"Random seed\")\n    parser.add_argument(\"--taylor_batch_size\", type=int, default=128, help=\"batch size for taylor expansion\")\n    parser.add_argument(\n        \"--exp\", type=str, default=\"exp\", help=\"Path for saving running related data.\"\n    )\n    parser.add_argument(\n        \"--kd\",\n        action=\"store_true\",\n        default=False,\n        help=\"skip according to (uniform or quadratic)\",\n    )\n    parser.add_argument(\n        \"--doc\",\n        type=str,\n        required=True,\n        help=\"A string for documentation purpose. \"\n        \"Will be the name of the log folder.\",\n    )\n    parser.add_argument(\n        \"--comment\", type=str, default=\"\", help=\"A string for experiment comment\"\n    )\n\n    parser.add_argument(\n        \"--load_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--save_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--verbose\",\n        type=str,\n        default=\"info\",\n        help=\"Verbose level: info | debug | warning | critical\",\n    )\n    parser.add_argument(\"--test\", action=\"store_true\", help=\"Whether to test the model\")\n    parser.add_argument(\n        \"--sample\",\n        action=\"store_true\",\n        help=\"Whether to produce samples from the model\",\n    )\n    parser.add_argument(\"--fid\", action=\"store_true\")\n    parser.add_argument(\"--interpolation\", action=\"store_true\")\n    parser.add_argument(\n        \"--resume_training\", action=\"store_true\", help=\"Whether to resume training\"\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--image_folder\",\n        type=str,\n        default=\"images\",\n        help=\"The folder name of samples\",\n    )\n    parser.add_argument(\n        \"--ni\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_ema\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\"--use_pretrained\", action=\"store_true\")\n    parser.add_argument(\n        \"--sample_type\",\n        type=str,\n        default=\"generalized\",\n        help=\"sampling approach (generalized or ddpm_noisy)\",\n    )\n    parser.add_argument(\n        \"--skip_type\",\n        type=str,\n        default=\"uniform\",\n        help=\"skip according to (uniform or quadratic)\",\n    )\n\n    parser.add_argument(\n        \"--pruner\",\n        type=str,\n        default=\"taylor\",\n        choices=[\"taylor\", \"random\", \"magnitude\", \"reinit\", \"first_order_taylor\", \"second_order_taylor\"],\n    )\n\n    parser.add_argument(\n        \"--restore_from\",\n        type=str,\n        default=None,\n        help=\"Restore from user a checkpoint\",\n    )\n    parser.add_argument(\n        \"--timesteps\", type=int, default=1000, help=\"number of steps involved\"\n    )\n    parser.add_argument(\n        \"--eta\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--pruning_ratio\",\n        type=float,\n        default=0.0,\n        help=\"pruning ratio\",\n    )\n    \n    parser.add_argument(\"--sequence\", action=\"store_true\")\n\n    args = parser.parse_args()\n    args.log_path = os.path.join(args.exp, \"logs\", args.doc)\n    \n    # parse config file\n    with open(os.path.join(\"configs\", args.config), \"r\") as f:\n        config = yaml.safe_load(f)\n    new_config = dict2namespace(config)\n\n        #tb_path = os.path.join(args.exp, \"tensorboard\", args.doc)\n    if accelerator.is_main_process:\n        if not args.test and not args.sample:\n            if not args.resume_training:\n                if os.path.exists(args.log_path):\n                    overwrite = False\n                    if args.ni:\n                        overwrite = True\n                    else:\n                        response = input(\"Folder already exists. Overwrite? (Y/N)\")\n                        if response.upper() == \"Y\":\n                            overwrite = True\n\n                    if overwrite:\n                        shutil.rmtree(args.log_path)\n                        #shutil.rmtree(tb_path)\n                        os.makedirs(args.log_path)\n                        #if os.path.exists(tb_path):\n                        #    shutil.rmtree(tb_path)\n                    else:\n                        print(\"Folder exists. Program halted.\")\n                        sys.exit(0)\n                else:\n                    os.makedirs(args.log_path)\n\n                with open(os.path.join(args.log_path, \"config.yml\"), \"w\") as f:\n                    yaml.dump(new_config, f, default_flow_style=False)\n            os.makedirs(os.path.join(args.log_path, 'vis'), exist_ok=True)\n            #new_config.tb_logger = tb.SummaryWriter(log_dir=tb_path)\n            # setup logger\n            level = getattr(logging, args.verbose.upper(), None)\n            if not isinstance(level, int):\n                raise ValueError(\"level {} not supported\".format(args.verbose))\n\n            handler1 = logging.StreamHandler()\n            handler2 = logging.FileHandler(os.path.join(args.log_path, \"stdout.txt\"))\n            formatter = logging.Formatter(\n                \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n            )\n            handler1.setFormatter(formatter)\n            handler2.setFormatter(formatter)\n            logger = logging.getLogger()\n            logger.addHandler(handler1)\n            logger.addHandler(handler2)\n            logger.setLevel(level)\n        else:\n            level = getattr(logging, args.verbose.upper(), None)\n            if not isinstance(level, int):\n                raise ValueError(\"level {} not supported\".format(args.verbose))\n\n            handler1 = logging.StreamHandler()\n            formatter = logging.Formatter(\n                \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n            )\n            handler1.setFormatter(formatter)\n            logger = logging.getLogger()\n            logger.addHandler(handler1)\n            logger.setLevel(level)\n\n    if args.sample:\n        os.makedirs(os.path.join(args.exp, \"image_samples\", args.image_folder, str(accelerator.process_index)), exist_ok=True)\n        args.image_folder = os.path.join(\n            args.exp, \"image_samples\", args.image_folder, str(accelerator.process_index)\n        )\n        if not os.path.exists(args.image_folder):\n            os.makedirs(args.image_folder)\n        else:\n            if not (args.fid or args.interpolation):\n                overwrite = False\n                if args.ni:\n                    overwrite = True\n                else:\n                    response = input(\n                        f\"Image folder {args.image_folder} already exists. Overwrite? (Y/N)\"\n                    )\n                    if response.upper() == \"Y\":\n                        overwrite = True\n\n                if overwrite:\n                    shutil.rmtree(args.image_folder)\n                    os.makedirs(args.image_folder)\n                else:\n                    print(\"Output image folder exists. Program halted.\")\n                    sys.exit(0)\n\n    # add device\n    #device = #torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    #logging.info(\"Using device: {}\".format(device))\n    #new_config.device = device\n\n    # set random seed\n    torch.manual_seed(args.seed)\n    np.random.seed(args.seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(args.seed)\n\n    torch.backends.cudnn.benchmark = True\n\n    return args, new_config\n\n\ndef dict2namespace(config):\n    namespace = argparse.Namespace()\n    for key, value in config.items():\n        if isinstance(value, dict):\n            new_value = dict2namespace(value)\n        else:\n            new_value = value\n        setattr(namespace, key, new_value)\n    return namespace\n\n\ndef main():\n    accelerator = Accelerator()\n    args, config = parse_args_and_config(accelerator)\n    logging.info(\"Writing log file to {}\".format(args.log_path))\n    logging.info(\"Exp instance id = {}\".format(os.getpid()))\n    logging.info(\"Exp comment = {}\".format(args.comment))\n\n    try:\n        runner = Diffusion(args, config)\n        runner.accelerator = accelerator\n        if args.sample:\n            runner.sample()\n        elif args.test:\n            runner.test()\n        else:\n            runner.train(kd=args.kd)\n    except Exception:\n        logging.error(traceback.format_exc())\n\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "ddpm_exp/finetune_simple.py",
    "content": "import argparse\nimport traceback\nimport shutil\nimport logging\nimport yaml\nimport sys\nimport os\nimport torch\nimport numpy as np\nimport torch.utils.tensorboard as tb\nfrom tqdm import tqdm\nfrom runners.diffusion_simple import Diffusion\nfrom torchvision import transforms\nimport torchvision\nfrom datasets import get_dataset, data_transform, inverse_data_transform\n\nfrom utils import UnlabeledImageFolder\n\ntorch.set_printoptions(sci_mode=False)\n\n\ndef parse_args_and_config():\n    parser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\n\n    parser.add_argument(\n        \"--config\", type=str, required=True, help=\"Path to the config file\"\n    )\n    parser.add_argument(\"--seed\", type=int, default=2333, help=\"Random seed\")\n    parser.add_argument(\"--taylor_batch_size\", type=int, default=128, help=\"batch size for taylor expansion\")\n    parser.add_argument(\n        \"--exp\", type=str, default=\"exp\", help=\"Path for saving running related data.\"\n    )\n    parser.add_argument(\n        \"--doc\",\n        type=str,\n        required=True,\n        help=\"A string for documentation purpose. \"\n        \"Will be the name of the log folder.\",\n    )\n    parser.add_argument(\n        \"--comment\", type=str, default=\"\", help=\"A string for experiment comment\"\n    )\n\n    parser.add_argument(\n        \"--load_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--save_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--verbose\",\n        type=str,\n        default=\"info\",\n        help=\"Verbose level: info | debug | warning | critical\",\n    )\n    parser.add_argument(\"--test\", action=\"store_true\", help=\"Whether to test the model\")\n    parser.add_argument(\n        \"--sample\",\n        action=\"store_true\",\n        help=\"Whether to produce samples from the model\",\n    )\n    parser.add_argument(\"--fid\", action=\"store_true\")\n    parser.add_argument(\"--interpolation\", action=\"store_true\")\n    parser.add_argument(\n        \"--resume_training\", action=\"store_true\", help=\"Whether to resume training\"\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--image_folder\",\n        type=str,\n        default=\"images\",\n        help=\"The folder name of samples\",\n    )\n    parser.add_argument(\n        \"--ni\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_ema\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\"--use_pretrained\", action=\"store_true\")\n    parser.add_argument(\n        \"--sample_type\",\n        type=str,\n        default=\"generalized\",\n        help=\"sampling approach (generalized or ddpm_noisy)\",\n    )\n    parser.add_argument(\n        \"--skip_type\",\n        type=str,\n        default=\"uniform\",\n        help=\"skip according to (uniform or quadratic)\",\n    )\n\n    parser.add_argument(\n        \"--pruner\",\n        type=str,\n        default=\"taylor\",\n        choices=[\"taylor\", \"random\", \"magnitude\", \"reinit\", \"first_order_taylor\", \"second_order_taylor\", \"ours\"],\n    )\n\n    parser.add_argument(\n        \"--restore_from\",\n        type=str,\n        default=None,\n        help=\"Restore from user a checkpoint\",\n    )\n    parser.add_argument(\n        \"--timesteps\", type=int, default=1000, help=\"number of steps involved\"\n    )\n    parser.add_argument(\n        \"--eta\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--thr\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--pruning_ratio\",\n        type=float,\n        default=0.0,\n        help=\"pruning ratio\",\n    )\n    \n    parser.add_argument(\"--sequence\", action=\"store_true\")\n\n    args = parser.parse_args()\n    args.log_path = os.path.join(args.exp, \"logs\", args.doc)\n\n    # parse config file\n    with open(os.path.join(\"configs\", args.config), \"r\") as f:\n        config = yaml.safe_load(f)\n    new_config = dict2namespace(config)\n\n    #tb_path = os.path.join(args.exp, \"tensorboard\", args.doc)\n\n    if not args.test and not args.sample:\n        if not args.resume_training:\n            if os.path.exists(args.log_path):\n                overwrite = False\n                if args.ni:\n                    overwrite = True\n                else:\n                    response = input(\"Folder already exists. Overwrite? (Y/N)\")\n                    if response.upper() == \"Y\":\n                        overwrite = True\n\n                if overwrite:\n                    shutil.rmtree(args.log_path)\n                    #shutil.rmtree(tb_path)\n                    os.makedirs(args.log_path)\n                    #if os.path.exists(tb_path):\n                    #    shutil.rmtree(tb_path)\n                else:\n                    print(\"Folder exists. Program halted.\")\n                    sys.exit(0)\n            else:\n                os.makedirs(args.log_path)\n\n            with open(os.path.join(args.log_path, \"config.yml\"), \"w\") as f:\n                yaml.dump(new_config, f, default_flow_style=False)\n        os.makedirs(os.path.join(args.log_path, 'vis'), exist_ok=True)\n        #new_config.tb_logger = tb.SummaryWriter(log_dir=tb_path)\n        # setup logger\n        level = getattr(logging, args.verbose.upper(), None)\n        if not isinstance(level, int):\n            raise ValueError(\"level {} not supported\".format(args.verbose))\n\n        handler1 = logging.StreamHandler()\n        handler2 = logging.FileHandler(os.path.join(args.log_path, \"stdout.txt\"))\n        formatter = logging.Formatter(\n            \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n        )\n        handler1.setFormatter(formatter)\n        handler2.setFormatter(formatter)\n        logger = logging.getLogger()\n        logger.addHandler(handler1)\n        logger.addHandler(handler2)\n        logger.setLevel(level)\n\n    else:\n        level = getattr(logging, args.verbose.upper(), None)\n        if not isinstance(level, int):\n            raise ValueError(\"level {} not supported\".format(args.verbose))\n\n        handler1 = logging.StreamHandler()\n        formatter = logging.Formatter(\n            \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n        )\n        handler1.setFormatter(formatter)\n        logger = logging.getLogger()\n        logger.addHandler(handler1)\n        logger.setLevel(level)\n\n        if args.sample:\n            os.makedirs(os.path.join(args.exp, \"image_samples\"), exist_ok=True)\n            args.image_folder = os.path.join(\n                args.exp, \"image_samples\", args.image_folder\n            )\n            if not os.path.exists(args.image_folder):\n                os.makedirs(args.image_folder)\n            else:\n                if not (args.fid or args.interpolation):\n                    overwrite = False\n                    if args.ni:\n                        overwrite = True\n                    else:\n                        response = input(\n                            f\"Image folder {args.image_folder} already exists. Overwrite? (Y/N)\"\n                        )\n                        if response.upper() == \"Y\":\n                            overwrite = True\n\n                    if overwrite:\n                        shutil.rmtree(args.image_folder)\n                        os.makedirs(args.image_folder)\n                    else:\n                        print(\"Output image folder exists. Program halted.\")\n                        sys.exit(0)\n\n    # add device\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    logging.info(\"Using device: {}\".format(device))\n    new_config.device = device\n\n    # set random seed\n    torch.manual_seed(args.seed)\n    np.random.seed(args.seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(args.seed)\n\n    torch.backends.cudnn.benchmark = True\n\n    return args, new_config\n\n\ndef dict2namespace(config):\n    namespace = argparse.Namespace()\n    for key, value in config.items():\n        if isinstance(value, dict):\n            new_value = dict2namespace(value)\n        else:\n            new_value = value\n        setattr(namespace, key, new_value)\n    return namespace\n\n\ndef main():\n    args, config = parse_args_and_config()\n    logging.info(\"Writing log file to {}\".format(args.log_path))\n    logging.info(\"Exp instance id = {}\".format(os.getpid()))\n    logging.info(\"Exp comment = {}\".format(args.comment))\n\n    try:\n        runner = Diffusion(args, config)\n        if args.pruning_ratio > 0 and args.load_pruned_model is None:\n            # Dataset \n            print(config)\n            dataset, _ = get_dataset(args, config)\n            print(f\"Dataset size: {len(dataset)}\")\n            train_dataloader = torch.utils.data.DataLoader(\n                dataset, batch_size=args.taylor_batch_size, shuffle=True, num_workers=4, drop_last=True\n            )\n\n            from models.diffusion import AttnBlock\n            import torch_pruning as tp\n            print(\"Pruning ...\")\n            model = runner.model\n            model.to(runner.device)\n           \n            example_inputs = {'x': torch.randn(1, 3, config.data.image_size, config.data.image_size).to(runner.device), 't': torch.ones(1).to(runner.device)}\n\n            if args.pruner == 'taylor':\n                imp = tp.importance.TaylorImportance()\n            elif args.pruner == 'first_order_taylor':\n                imp = tp.importance.FullTaylorImportance(order=1)\n            elif args.pruner == 'second_order_taylor':\n                imp = tp.importance.FullTaylorImportance(order=2)\n            elif args.pruner == 'random' or args.pruner == 'reinit':\n                imp = tp.importance.RandomImportance()\n            elif args.pruner == 'magnitude':\n                imp = tp.importance.MagnitudeImportance()\n            elif args.pruner == 'ours':\n                imp = tp.importance.TaylorImportance()\n\n            ignored_layers = [model.conv_out]\n            channel_groups = {}\n            iterative_steps = 1\n            pruner = tp.pruner.MagnitudePruner(\n                model,\n                example_inputs,\n                importance=imp,\n                iterative_steps=iterative_steps,\n                channel_groups =channel_groups,\n                ch_sparsity=args.pruning_ratio, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n                ignored_layers=ignored_layers,\n                root_module_types=[torch.nn.Conv2d, torch.nn.Linear]\n            )\n            base_macs, base_nparams = tp.utils.count_ops_and_params(model, example_inputs)\n\n            if 'taylor' in args.pruner or args.pruner=='ours':\n                x = next(iter(train_dataloader))\n                if isinstance(x, (list, tuple)):\n                    x = x[0]\n                x = x.to(runner.device)\n                x = data_transform(config, x)\n                n = x.size(0)\n                e = torch.randn_like(x)\n                b = runner.betas\n                #t = torch.randint(\n                #        low=0, high=runner.num_timesteps, size=(n // 2 + 1,)\n                #).to(runner.device)\n                #t = torch.cat([t, runner.num_timesteps - t - 1], dim=0)[:n]\n                from functions.losses import loss_registry\n                \n                model.zero_grad()\n                max_loss = 0\n                for step_k in tqdm(range(1000)):\n                    t = torch.ones(n, dtype=torch.long).to(runner.device)*step_k\n                    loss = loss_registry[config.model.type](model, x, t, e, b)\n                    if args.pruner == 'ours':\n                        if loss>max_loss:\n                            max_loss = loss\n                        if loss<max_loss*args.thr:\n                            break\n                        #print(loss, max_loss)\n                    loss.backward()\n\n            print(\"============ Before Pruning ============\")\n            print(model)\n            for g in pruner.step(interactive=True):\n                g.prune()\n            \n            if args.pruner == 'reinit':\n                def reset_parameters(model):\n                    for m in model.modules():\n                        if hasattr(m, 'reset_parameters'):\n                            m.reset_parameters()\n                model.apply(reset_parameters)\n            \n            macs, nparams = tp.utils.count_ops_and_params(model, example_inputs)\n            print(\"============ After Pruning ============\")\n            print(model)\n            print(\"#Params: {:.4f} M => {:.4f} M\".format(base_nparams/1e6, nparams/1e6))\n            print(\"#MACs: {:.4f} G => {:.4f} G\".format(base_macs/1e9, macs/1e9))\n            del pruner\n            # Save pruned model\n            print(\"Saving pruned model as {}\".format(os.path.join(args.log_path, \"pruned_model.pth\")))\n            torch.save(\n                model,\n                os.path.join(args.log_path, \"pruned_model.pth\"),\n            )\n        \n        if args.load_pruned_model is not None:\n            print(\"Loading pruned model from {}\".format(args.load_pruned_model))\n            model = torch.load(args.load_pruned_model, map_location='cpu')\n            runner.model = model\n            \n        print(step_k)\n        if args.sample:\n            runner.sample()\n        elif args.test:\n            runner.test()\n        else:\n            runner.train()\n    except Exception:\n        logging.error(traceback.format_exc())\n\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())"
  },
  {
    "path": "ddpm_exp/functions/__init__.py",
    "content": "import torch.optim as optim\n\n\ndef get_optimizer(config, parameters):\n    if config.optim.optimizer == 'Adam':\n        return optim.Adam(parameters, lr=config.optim.lr, weight_decay=config.optim.weight_decay,\n                          betas=(config.optim.beta1, 0.999), amsgrad=config.optim.amsgrad,\n                          eps=config.optim.eps)\n    elif config.optim.optimizer == 'RMSProp':\n        return optim.RMSprop(parameters, lr=config.optim.lr, weight_decay=config.optim.weight_decay)\n    elif config.optim.optimizer == 'SGD':\n        return optim.SGD(parameters, lr=config.optim.lr, momentum=0.9)\n    else:\n        raise NotImplementedError(\n            'Optimizer {} not understood.'.format(config.optim.optimizer))\n"
  },
  {
    "path": "ddpm_exp/functions/ckpt_util.py",
    "content": "import os, hashlib\nimport requests\nfrom tqdm import tqdm\n\nURL_MAP = {\n    \"cifar10\": \"https://heibox.uni-heidelberg.de/f/869980b53bf5416c8a28/?dl=1\",\n    \"ema_cifar10\": \"https://heibox.uni-heidelberg.de/f/2e4f01e2d9ee49bab1d5/?dl=1\",\n    \"lsun_bedroom\": \"https://heibox.uni-heidelberg.de/f/f179d4f21ebc4d43bbfe/?dl=1\",\n    \"ema_lsun_bedroom\": \"https://heibox.uni-heidelberg.de/f/b95206528f384185889b/?dl=1\",\n    \"lsun_cat\": \"https://heibox.uni-heidelberg.de/f/fac870bd988348eab88e/?dl=1\",\n    \"ema_lsun_cat\": \"https://heibox.uni-heidelberg.de/f/0701aac3aa69457bbe34/?dl=1\",\n    \"lsun_church\": \"https://heibox.uni-heidelberg.de/f/2711a6f712e34b06b9d8/?dl=1\",\n    \"ema_lsun_church\": \"https://heibox.uni-heidelberg.de/f/44ccb50ef3c6436db52e/?dl=1\",\n}\nCKPT_MAP = {\n    \"cifar10\": \"diffusion_cifar10_model/model-790000.ckpt\",\n    \"ema_cifar10\": \"ema_diffusion_cifar10_model/model-790000.ckpt\",\n    \"lsun_bedroom\": \"diffusion_lsun_bedroom_model/model-2388000.ckpt\",\n    \"ema_lsun_bedroom\": \"ema_diffusion_lsun_bedroom_model/model-2388000.ckpt\",\n    \"lsun_cat\": \"diffusion_lsun_cat_model/model-1761000.ckpt\",\n    \"ema_lsun_cat\": \"ema_diffusion_lsun_cat_model/model-1761000.ckpt\",\n    \"lsun_church\": \"diffusion_lsun_church_model/model-4432000.ckpt\",\n    \"ema_lsun_church\": \"ema_diffusion_lsun_church_model/model-4432000.ckpt\",\n    \"celeba\": \"ema_diffusion_celeba_model/model.ckpt\",\n    \"ema_celeba\": \"ema_diffusion_celeba_model/model.ckpt\",\n}\n\nMD5_MAP = {\n    \"cifar10\": \"82ed3067fd1002f5cf4c339fb80c4669\",\n    \"ema_cifar10\": \"1fa350b952534ae442b1d5235cce5cd3\",\n    \"lsun_bedroom\": \"f70280ac0e08b8e696f42cb8e948ff1c\",\n    \"ema_lsun_bedroom\": \"1921fa46b66a3665e450e42f36c2720f\",\n    \"lsun_cat\": \"bbee0e7c3d7abfb6e2539eaf2fb9987b\",\n    \"ema_lsun_cat\": \"646f23f4821f2459b8bafc57fd824558\",\n    \"lsun_church\": \"eb619b8a5ab95ef80f94ce8a5488dae3\",\n    \"ema_lsun_church\": \"fdc68a23938c2397caba4a260bc2445f\",\n}\n\n\ndef download(url, local_path, chunk_size=1024):\n    os.makedirs(os.path.split(local_path)[0], exist_ok=True)\n    with requests.get(url, stream=True) as r:\n        total_size = int(r.headers.get(\"content-length\", 0))\n        with tqdm(total=total_size, unit=\"B\", unit_scale=True) as pbar:\n            with open(local_path, \"wb\") as f:\n                for data in r.iter_content(chunk_size=chunk_size):\n                    if data:\n                        f.write(data)\n                        pbar.update(chunk_size)\n\n\ndef md5_hash(path):\n    with open(path, \"rb\") as f:\n        content = f.read()\n    return hashlib.md5(content).hexdigest()\n\n\ndef get_ckpt_path(name, root=None, check=False):\n    if 'church_outdoor' in name:\n        name = name.replace('church_outdoor', 'church')\n    #assert name in URL_MAP\n    # Modify the path when necessary\n    cachedir = os.environ.get(\"XDG_CACHE_HOME\", os.path.expanduser(\"./run/cache\"))\n    root = (\n        root\n        if root is not None\n        else os.path.join(cachedir, \"diffusion_models_converted\")\n    )\n    path = os.path.join(root, CKPT_MAP[name])\n    if not os.path.exists(path) or (check and not md5_hash(path) == MD5_MAP[name]):\n        print(\"Downloading {} model from {} to {}\".format(name, URL_MAP[name], path))\n        download(URL_MAP[name], path)\n        md5 = md5_hash(path)\n        assert md5 == MD5_MAP[name], md5\n    return path\n"
  },
  {
    "path": "ddpm_exp/functions/denoising.py",
    "content": "import torch\n\n\ndef compute_alpha(beta, t):\n    beta = torch.cat([torch.zeros(1).to(beta.device), beta], dim=0)\n    a = (1 - beta).cumprod(dim=0).index_select(0, t + 1).view(-1, 1, 1, 1)\n    return a\n\n\ndef generalized_steps(x, seq, model, b, **kwargs):\n    with torch.no_grad():\n        n = x.size(0)\n        seq_next = [-1] + list(seq[:-1])\n        x0_preds = []\n        xs = [x]\n        for i, j in zip(reversed(seq), reversed(seq_next)):\n            t = (torch.ones(n) * i).to(x.device)\n            next_t = (torch.ones(n) * j).to(x.device)\n            at = compute_alpha(b, t.long())\n            at_next = compute_alpha(b, next_t.long())\n            xt = xs[-1].to('cuda')\n            et = model(xt, t)\n            x0_t = (xt - et * (1 - at).sqrt()) / at.sqrt()\n            x0_preds.append(x0_t.to('cpu'))\n            c1 = (\n                kwargs.get(\"eta\", 0) * ((1 - at / at_next) * (1 - at_next) / (1 - at)).sqrt()\n            )\n            c2 = ((1 - at_next) - c1 ** 2).sqrt()\n            xt_next = at_next.sqrt() * x0_t + c1 * torch.randn_like(x) + c2 * et\n            xs.append(xt_next.to('cpu'))\n\n    return xs, x0_preds\n\n\ndef ddpm_steps(x, seq, model, b, **kwargs):\n    with torch.no_grad():\n        n = x.size(0)\n        seq_next = [-1] + list(seq[:-1])\n        xs = [x]\n        x0_preds = []\n        betas = b\n        for i, j in zip(reversed(seq), reversed(seq_next)):\n            t = (torch.ones(n) * i).to(x.device)\n            next_t = (torch.ones(n) * j).to(x.device)\n            at = compute_alpha(betas, t.long())\n            atm1 = compute_alpha(betas, next_t.long())\n            beta_t = 1 - at / atm1\n            x = xs[-1].to('cuda')\n\n            output = model(x, t.float())\n            e = output\n\n            x0_from_e = (1.0 / at).sqrt() * x - (1.0 / at - 1).sqrt() * e\n            x0_from_e = torch.clamp(x0_from_e, -1, 1)\n            x0_preds.append(x0_from_e.to('cpu'))\n            mean_eps = (\n                (atm1.sqrt() * beta_t) * x0_from_e + ((1 - beta_t).sqrt() * (1 - atm1)) * x\n            ) / (1.0 - at)\n\n            mean = mean_eps\n            noise = torch.randn_like(x)\n            mask = 1 - (t == 0).float()\n            mask = mask.view(-1, 1, 1, 1)\n            logvar = beta_t.log()\n            sample = mean + mask * torch.exp(0.5 * logvar) * noise\n            xs.append(sample.to('cpu'))\n    return xs, x0_preds\n"
  },
  {
    "path": "ddpm_exp/functions/losses.py",
    "content": "import torch\n\n\ndef noise_estimation_loss(model,\n                          x0: torch.Tensor,\n                          t: torch.LongTensor,\n                          e: torch.Tensor,\n                          b: torch.Tensor, keepdim=False):\n    a = (1-b).cumprod(dim=0).index_select(0, t).view(-1, 1, 1, 1)\n    x = x0 * a.sqrt() + e * (1.0 - a).sqrt()\n    output = model(x, t.float())\n    if keepdim:\n        return (e - output).square().sum(dim=(1, 2, 3))\n    else:\n        return (e - output).square().sum(dim=(1, 2, 3)).mean(dim=0)\n\ndef noise_estimation_kd_loss(model,\n                             teacher,\n                          x0: torch.Tensor,\n                          t: torch.LongTensor,\n                          e: torch.Tensor,\n                          b: torch.Tensor, keepdim=False):\n    a = (1-b).cumprod(dim=0).index_select(0, t).view(-1, 1, 1, 1)\n    x = x0 * a.sqrt() + e * (1.0 - a).sqrt()\n    output = model(x, t.float())\n    with torch.no_grad():\n        teacher_output = teacher(x, t.float())\n    if keepdim:\n        return 0.7*(teacher_output - output).square().sum(dim=(1, 2, 3)) + 0.3 * (e - output).square().sum(dim=(1, 2, 3))\n    else:\n        return 0.7*(teacher_output - output).square().sum(dim=(1, 2, 3)).mean(dim=0) + 0.3 * (e - output).square().sum(dim=(1, 2, 3)).mean(dim=0)\n\n\nloss_registry = {\n    'simple': noise_estimation_loss,\n}\n"
  },
  {
    "path": "ddpm_exp/inception.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision\n\ntry:\n    from torchvision.models.utils import load_state_dict_from_url\nexcept ImportError:\n    from torch.utils.model_zoo import load_url as load_state_dict_from_url\n\n# Inception weights ported to Pytorch from\n# http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz\nFID_WEIGHTS_URL = 'https://github.com/mseitzer/pytorch-fid/releases/download/fid_weights/pt_inception-2015-12-05-6726825d.pth'  # noqa: E501\n\n\nclass InceptionV3(nn.Module):\n    \"\"\"Pretrained InceptionV3 network returning feature maps\"\"\"\n\n    # Index of default block of inception to return,\n    # corresponds to output of final average pooling\n    DEFAULT_BLOCK_INDEX = 3\n\n    # Maps feature dimensionality to their output blocks indices\n    BLOCK_INDEX_BY_DIM = {\n        64: 0,   # First max pooling features\n        192: 1,  # Second max pooling featurs\n        768: 2,  # Pre-aux classifier features\n        2048: 3  # Final average pooling features\n    }\n\n    def __init__(self,\n                 output_blocks=(DEFAULT_BLOCK_INDEX,),\n                 resize_input=True,\n                 normalize_input=True,\n                 requires_grad=False,\n                 use_fid_inception=True):\n        \"\"\"Build pretrained InceptionV3\n\n        Parameters\n        ----------\n        output_blocks : list of int\n            Indices of blocks to return features of. Possible values are:\n                - 0: corresponds to output of first max pooling\n                - 1: corresponds to output of second max pooling\n                - 2: corresponds to output which is fed to aux classifier\n                - 3: corresponds to output of final average pooling\n        resize_input : bool\n            If true, bilinearly resizes input to width and height 299 before\n            feeding input to model. As the network without fully connected\n            layers is fully convolutional, it should be able to handle inputs\n            of arbitrary size, so resizing might not be strictly needed\n        normalize_input : bool\n            If true, scales the input from range (0, 1) to the range the\n            pretrained Inception network expects, namely (-1, 1)\n        requires_grad : bool\n            If true, parameters of the model require gradients. Possibly useful\n            for finetuning the network\n        use_fid_inception : bool\n            If true, uses the pretrained Inception model used in Tensorflow's\n            FID implementation. If false, uses the pretrained Inception model\n            available in torchvision. The FID Inception model has different\n            weights and a slightly different structure from torchvision's\n            Inception model. If you want to compute FID scores, you are\n            strongly advised to set this parameter to true to get comparable\n            results.\n        \"\"\"\n        super(InceptionV3, self).__init__()\n\n        self.resize_input = resize_input\n        self.normalize_input = normalize_input\n        self.output_blocks = sorted(output_blocks)\n        self.last_needed_block = max(output_blocks)\n\n        assert self.last_needed_block <= 3, \\\n            'Last possible output block index is 3'\n\n        self.blocks = nn.ModuleList()\n\n        if use_fid_inception:\n            inception = fid_inception_v3()\n        else:\n            inception = _inception_v3(weights='DEFAULT')\n\n        # Block 0: input to maxpool1\n        block0 = [\n            inception.Conv2d_1a_3x3,\n            inception.Conv2d_2a_3x3,\n            inception.Conv2d_2b_3x3,\n            nn.MaxPool2d(kernel_size=3, stride=2)\n        ]\n        self.blocks.append(nn.Sequential(*block0))\n\n        # Block 1: maxpool1 to maxpool2\n        if self.last_needed_block >= 1:\n            block1 = [\n                inception.Conv2d_3b_1x1,\n                inception.Conv2d_4a_3x3,\n                nn.MaxPool2d(kernel_size=3, stride=2)\n            ]\n            self.blocks.append(nn.Sequential(*block1))\n\n        # Block 2: maxpool2 to aux classifier\n        if self.last_needed_block >= 2:\n            block2 = [\n                inception.Mixed_5b,\n                inception.Mixed_5c,\n                inception.Mixed_5d,\n                inception.Mixed_6a,\n                inception.Mixed_6b,\n                inception.Mixed_6c,\n                inception.Mixed_6d,\n                inception.Mixed_6e,\n            ]\n            self.blocks.append(nn.Sequential(*block2))\n\n        # Block 3: aux classifier to final avgpool\n        if self.last_needed_block >= 3:\n            block3 = [\n                inception.Mixed_7a,\n                inception.Mixed_7b,\n                inception.Mixed_7c,\n                nn.AdaptiveAvgPool2d(output_size=(1, 1))\n            ]\n            self.blocks.append(nn.Sequential(*block3))\n\n        for param in self.parameters():\n            param.requires_grad = requires_grad\n\n    def forward(self, inp):\n        \"\"\"Get Inception feature maps\n\n        Parameters\n        ----------\n        inp : torch.autograd.Variable\n            Input tensor of shape Bx3xHxW. Values are expected to be in\n            range (0, 1)\n\n        Returns\n        -------\n        List of torch.autograd.Variable, corresponding to the selected output\n        block, sorted ascending by index\n        \"\"\"\n        outp = []\n        x = inp\n\n        if self.resize_input:\n            x = F.interpolate(x,\n                              size=(299, 299),\n                              mode='bilinear',\n                              align_corners=False)\n\n        if self.normalize_input:\n            x = 2 * x - 1  # Scale from range (0, 1) to range (-1, 1)\n\n        for idx, block in enumerate(self.blocks):\n            x = block(x)\n            if idx in self.output_blocks:\n                outp.append(x)\n\n            if idx == self.last_needed_block:\n                break\n\n        return outp\n\n\ndef _inception_v3(*args, **kwargs):\n    \"\"\"Wraps `torchvision.models.inception_v3`\"\"\"\n    try:\n        version = tuple(map(int, torchvision.__version__.split('.')[:2]))\n    except ValueError:\n        # Just a caution against weird version strings\n        version = (0,)\n\n    # Skips default weight inititialization if supported by torchvision\n    # version. See https://github.com/mseitzer/pytorch-fid/issues/28.\n    if version >= (0, 6):\n        kwargs['init_weights'] = False\n\n    # Backwards compatibility: `weights` argument was handled by `pretrained`\n    # argument prior to version 0.13.\n    if version < (0, 13) and 'weights' in kwargs:\n        if kwargs['weights'] == 'DEFAULT':\n            kwargs['pretrained'] = True\n        elif kwargs['weights'] is None:\n            kwargs['pretrained'] = False\n        else:\n            raise ValueError(\n                'weights=={} not supported in torchvision {}'.format(\n                    kwargs['weights'], torchvision.__version__\n                )\n            )\n        del kwargs['weights']\n\n    return torchvision.models.inception_v3(*args, **kwargs)\n\n\ndef fid_inception_v3():\n    \"\"\"Build pretrained Inception model for FID computation\n\n    The Inception model for FID computation uses a different set of weights\n    and has a slightly different structure than torchvision's Inception.\n\n    This method first constructs torchvision's Inception and then patches the\n    necessary parts that are different in the FID Inception model.\n    \"\"\"\n    inception = _inception_v3(num_classes=1008,\n                              aux_logits=False,\n                              weights=None)\n    inception.Mixed_5b = FIDInceptionA(192, pool_features=32)\n    inception.Mixed_5c = FIDInceptionA(256, pool_features=64)\n    inception.Mixed_5d = FIDInceptionA(288, pool_features=64)\n    inception.Mixed_6b = FIDInceptionC(768, channels_7x7=128)\n    inception.Mixed_6c = FIDInceptionC(768, channels_7x7=160)\n    inception.Mixed_6d = FIDInceptionC(768, channels_7x7=160)\n    inception.Mixed_6e = FIDInceptionC(768, channels_7x7=192)\n    inception.Mixed_7b = FIDInceptionE_1(1280)\n    inception.Mixed_7c = FIDInceptionE_2(2048)\n\n    state_dict = load_state_dict_from_url(FID_WEIGHTS_URL, progress=True)\n    inception.load_state_dict(state_dict)\n    return inception\n\n\nclass FIDInceptionA(torchvision.models.inception.InceptionA):\n    \"\"\"InceptionA block patched for FID computation\"\"\"\n    def __init__(self, in_channels, pool_features):\n        super(FIDInceptionA, self).__init__(in_channels, pool_features)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch5x5 = self.branch5x5_1(x)\n        branch5x5 = self.branch5x5_2(branch5x5)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionC(torchvision.models.inception.InceptionC):\n    \"\"\"InceptionC block patched for FID computation\"\"\"\n    def __init__(self, in_channels, channels_7x7):\n        super(FIDInceptionC, self).__init__(in_channels, channels_7x7)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch7x7 = self.branch7x7_1(x)\n        branch7x7 = self.branch7x7_2(branch7x7)\n        branch7x7 = self.branch7x7_3(branch7x7)\n\n        branch7x7dbl = self.branch7x7dbl_1(x)\n        branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionE_1(torchvision.models.inception.InceptionE):\n    \"\"\"First InceptionE block patched for FID computation\"\"\"\n    def __init__(self, in_channels):\n        super(FIDInceptionE_1, self).__init__(in_channels)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = [\n            self.branch3x3_2a(branch3x3),\n            self.branch3x3_2b(branch3x3),\n        ]\n        branch3x3 = torch.cat(branch3x3, 1)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = [\n            self.branch3x3dbl_3a(branch3x3dbl),\n            self.branch3x3dbl_3b(branch3x3dbl),\n        ]\n        branch3x3dbl = torch.cat(branch3x3dbl, 1)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionE_2(torchvision.models.inception.InceptionE):\n    \"\"\"Second InceptionE block patched for FID computation\"\"\"\n    def __init__(self, in_channels):\n        super(FIDInceptionE_2, self).__init__(in_channels)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = [\n            self.branch3x3_2a(branch3x3),\n            self.branch3x3_2b(branch3x3),\n        ]\n        branch3x3 = torch.cat(branch3x3, 1)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = [\n            self.branch3x3dbl_3a(branch3x3dbl),\n            self.branch3x3dbl_3b(branch3x3dbl),\n        ]\n        branch3x3dbl = torch.cat(branch3x3dbl, 1)\n\n        # Patch: The FID Inception model uses max pooling instead of average\n        # pooling. This is likely an error in this specific Inception\n        # implementation, as other Inception models use average pooling here\n        # (which matches the description in the paper).\n        branch_pool = F.max_pool2d(x, kernel_size=3, stride=1, padding=1)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)\n"
  },
  {
    "path": "ddpm_exp/main.py",
    "content": "import argparse\nimport traceback\nimport shutil\nimport logging\nimport yaml\nimport sys\nimport os\nimport torch\nimport numpy as np\nimport torch.utils.tensorboard as tb\n\nfrom runners.diffusion import Diffusion\n\ntorch.set_printoptions(sci_mode=False)\n\n\ndef parse_args_and_config():\n    parser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\n\n    parser.add_argument(\n        \"--config\", type=str, required=True, help=\"Path to the config file\"\n    )\n    parser.add_argument(\"--seed\", type=int, default=1234, help=\"Random seed\")\n    parser.add_argument(\n        \"--exp\", type=str, default=\"exp\", help=\"Path for saving running related data.\"\n    )\n    parser.add_argument(\n        \"--doc\",\n        type=str,\n        required=True,\n        help=\"A string for documentation purpose. \"\n        \"Will be the name of the log folder.\",\n    )\n    parser.add_argument(\n        \"--comment\", type=str, default=\"\", help=\"A string for experiment comment\"\n    )\n    parser.add_argument(\n        \"--verbose\",\n        type=str,\n        default=\"info\",\n        help=\"Verbose level: info | debug | warning | critical\",\n    )\n    parser.add_argument(\"--test\", action=\"store_true\", help=\"Whether to test the model\")\n    parser.add_argument(\n        \"--sample\",\n        action=\"store_true\",\n        help=\"Whether to produce samples from the model\",\n    )\n    parser.add_argument(\"--fid\", action=\"store_true\")\n    parser.add_argument(\"--interpolation\", action=\"store_true\")\n    parser.add_argument(\n        \"--resume_training\", action=\"store_true\", help=\"Whether to resume training\"\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--image_folder\",\n        type=str,\n        default=\"images\",\n        help=\"The folder name of samples\",\n    )\n    parser.add_argument(\n        \"--ni\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\"--use_pretrained\", action=\"store_true\")\n    parser.add_argument(\n        \"--sample_type\",\n        type=str,\n        default=\"generalized\",\n        help=\"sampling approach (generalized or ddpm_noisy)\",\n    )\n    parser.add_argument(\n        \"--skip_type\",\n        type=str,\n        default=\"uniform\",\n        help=\"skip according to (uniform or quadratic)\",\n    )\n    parser.add_argument(\n        \"--timesteps\", type=int, default=1000, help=\"number of steps involved\"\n    )\n    parser.add_argument(\n        \"--eta\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\"--sequence\", action=\"store_true\")\n\n    args = parser.parse_args()\n    args.log_path = os.path.join(args.exp, \"logs\", args.doc)\n\n    # parse config file\n    with open(os.path.join(\"configs\", args.config), \"r\") as f:\n        config = yaml.safe_load(f)\n    new_config = dict2namespace(config)\n\n    tb_path = os.path.join(args.exp, \"tensorboard\", args.doc)\n\n    if not args.test and not args.sample:\n        if not args.resume_training:\n            if os.path.exists(args.log_path):\n                overwrite = False\n                if args.ni:\n                    overwrite = True\n                else:\n                    response = input(\"Folder already exists. Overwrite? (Y/N)\")\n                    if response.upper() == \"Y\":\n                        overwrite = True\n\n                if overwrite:\n                    shutil.rmtree(args.log_path)\n                    shutil.rmtree(tb_path)\n                    os.makedirs(args.log_path)\n                    if os.path.exists(tb_path):\n                        shutil.rmtree(tb_path)\n                else:\n                    print(\"Folder exists. Program halted.\")\n                    sys.exit(0)\n            else:\n                os.makedirs(args.log_path)\n\n            with open(os.path.join(args.log_path, \"config.yml\"), \"w\") as f:\n                yaml.dump(new_config, f, default_flow_style=False)\n\n        new_config.tb_logger = tb.SummaryWriter(log_dir=tb_path)\n        # setup logger\n        level = getattr(logging, args.verbose.upper(), None)\n        if not isinstance(level, int):\n            raise ValueError(\"level {} not supported\".format(args.verbose))\n\n        handler1 = logging.StreamHandler()\n        handler2 = logging.FileHandler(os.path.join(args.log_path, \"stdout.txt\"))\n        formatter = logging.Formatter(\n            \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n        )\n        handler1.setFormatter(formatter)\n        handler2.setFormatter(formatter)\n        logger = logging.getLogger()\n        logger.addHandler(handler1)\n        logger.addHandler(handler2)\n        logger.setLevel(level)\n\n    else:\n        level = getattr(logging, args.verbose.upper(), None)\n        if not isinstance(level, int):\n            raise ValueError(\"level {} not supported\".format(args.verbose))\n\n        handler1 = logging.StreamHandler()\n        formatter = logging.Formatter(\n            \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n        )\n        handler1.setFormatter(formatter)\n        logger = logging.getLogger()\n        logger.addHandler(handler1)\n        logger.setLevel(level)\n\n        if args.sample:\n            os.makedirs(os.path.join(args.exp, \"image_samples\"), exist_ok=True)\n            args.image_folder = os.path.join(\n                args.exp, \"image_samples\", args.image_folder\n            )\n            if not os.path.exists(args.image_folder):\n                os.makedirs(args.image_folder)\n            else:\n                if not (args.fid or args.interpolation):\n                    overwrite = False\n                    if args.ni:\n                        overwrite = True\n                    else:\n                        response = input(\n                            f\"Image folder {args.image_folder} already exists. Overwrite? (Y/N)\"\n                        )\n                        if response.upper() == \"Y\":\n                            overwrite = True\n\n                    if overwrite:\n                        shutil.rmtree(args.image_folder)\n                        os.makedirs(args.image_folder)\n                    else:\n                        print(\"Output image folder exists. Program halted.\")\n                        sys.exit(0)\n\n    # add device\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    logging.info(\"Using device: {}\".format(device))\n    new_config.device = device\n\n    # set random seed\n    torch.manual_seed(args.seed)\n    np.random.seed(args.seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(args.seed)\n\n    torch.backends.cudnn.benchmark = True\n\n    return args, new_config\n\n\ndef dict2namespace(config):\n    namespace = argparse.Namespace()\n    for key, value in config.items():\n        if isinstance(value, dict):\n            new_value = dict2namespace(value)\n        else:\n            new_value = value\n        setattr(namespace, key, new_value)\n    return namespace\n\n\ndef main():\n    args, config = parse_args_and_config()\n    logging.info(\"Writing log file to {}\".format(args.log_path))\n    logging.info(\"Exp instance id = {}\".format(os.getpid()))\n    logging.info(\"Exp comment = {}\".format(args.comment))\n\n    try:\n        runner = Diffusion(args, config)\n        if args.sample:\n            runner.sample()\n        elif args.test:\n            runner.test()\n        else:\n            runner.train()\n    except Exception:\n        logging.error(traceback.format_exc())\n\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "ddpm_exp/models/diffusion.py",
    "content": "import math\nimport torch\nimport torch.nn as nn\n\n\ndef get_timestep_embedding(timesteps, embedding_dim):\n    \"\"\"\n    This matches the implementation in Denoising Diffusion Probabilistic Models:\n    From Fairseq.\n    Build sinusoidal embeddings.\n    This matches the implementation in tensor2tensor, but differs slightly\n    from the description in Section 3.5 of \"Attention Is All You Need\".\n    \"\"\"\n    assert len(timesteps.shape) == 1\n\n    half_dim = embedding_dim // 2\n    emb = math.log(10000) / (half_dim - 1)\n    emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb)\n    emb = emb.to(device=timesteps.device)\n    emb = timesteps.float()[:, None] * emb[None, :]\n    emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)\n    if embedding_dim % 2 == 1:  # zero pad\n        emb = torch.nn.functional.pad(emb, (0, 1, 0, 0))\n    return emb\n\n\ndef nonlinearity(x):\n    # swish\n    return x*torch.sigmoid(x)\n\n\ndef Normalize(in_channels):\n    return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True)\n\n\nclass Upsample(nn.Module):\n    def __init__(self, in_channels, with_conv):\n        super().__init__()\n        self.with_conv = with_conv\n        if self.with_conv:\n            self.conv = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, x):\n        x = torch.nn.functional.interpolate(\n            x, scale_factor=2.0, mode=\"nearest\")\n        if self.with_conv:\n            x = self.conv(x)\n        return x\n\n\nclass Downsample(nn.Module):\n    def __init__(self, in_channels, with_conv):\n        super().__init__()\n        self.with_conv = with_conv\n        if self.with_conv:\n            # no asymmetric padding in torch conv, must do it ourselves\n            self.conv = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=3,\n                                        stride=2,\n                                        padding=0)\n\n    def forward(self, x):\n        if self.with_conv:\n            pad = (0, 1, 0, 1)\n            x = torch.nn.functional.pad(x, pad, mode=\"constant\", value=0)\n            x = self.conv(x)\n        else:\n            x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2)\n        return x\n\n\nclass ResnetBlock(nn.Module):\n    def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False,\n                 dropout, temb_channels=512):\n        super().__init__()\n        self.in_channels = in_channels\n        out_channels = in_channels if out_channels is None else out_channels\n        self.out_channels = out_channels\n        self.use_conv_shortcut = conv_shortcut\n\n        self.norm1 = Normalize(in_channels)\n        self.conv1 = torch.nn.Conv2d(in_channels,\n                                     out_channels,\n                                     kernel_size=3,\n                                     stride=1,\n                                     padding=1)\n        self.temb_proj = torch.nn.Linear(temb_channels,\n                                         out_channels)\n        self.norm2 = Normalize(out_channels)\n        self.dropout = torch.nn.Dropout(dropout)\n        self.conv2 = torch.nn.Conv2d(out_channels,\n                                     out_channels,\n                                     kernel_size=3,\n                                     stride=1,\n                                     padding=1)\n        if self.in_channels != self.out_channels:\n            if self.use_conv_shortcut:\n                self.conv_shortcut = torch.nn.Conv2d(in_channels,\n                                                     out_channels,\n                                                     kernel_size=3,\n                                                     stride=1,\n                                                     padding=1)\n            else:\n                self.nin_shortcut = torch.nn.Conv2d(in_channels,\n                                                    out_channels,\n                                                    kernel_size=1,\n                                                    stride=1,\n                                                    padding=0)\n\n    def forward(self, x, temb):\n        h = x\n        h = self.norm1(h)\n        h = nonlinearity(h)\n        h = self.conv1(h)\n\n        h = h + self.temb_proj(nonlinearity(temb))[:, :, None, None]\n\n        h = self.norm2(h)\n        h = nonlinearity(h)\n        h = self.dropout(h)\n        h = self.conv2(h)\n\n        if self.in_channels != self.out_channels:\n            if self.use_conv_shortcut:\n                x = self.conv_shortcut(x)\n            else:\n                x = self.nin_shortcut(x)\n\n        return x+h\n\n\nclass AttnBlock(nn.Module):\n    def __init__(self, in_channels):\n        super().__init__()\n        self.in_channels = in_channels\n\n        self.norm = Normalize(in_channels)\n        self.q = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.k = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.v = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.proj_out = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=1,\n                                        stride=1,\n                                        padding=0)\n\n    def forward(self, x):\n        h_ = x\n        h_ = self.norm(h_)\n        q = self.q(h_)\n        k = self.k(h_)\n        v = self.v(h_)\n\n        # compute attention\n        b, c, h, w = q.shape\n        q = q.reshape(b, c, h*w)\n        q = q.permute(0, 2, 1)   # b,hw,c\n        k = k.reshape(b, c, h*w)  # b,c,hw\n        w_ = torch.bmm(q, k)     # b,hw,hw    w[b,i,j]=sum_c q[b,i,c]k[b,c,j]\n        w_ = w_ * (int(c)**(-0.5))\n        w_ = torch.nn.functional.softmax(w_, dim=2)\n\n        # attend to values\n        v = v.reshape(b, c, h*w)\n        w_ = w_.permute(0, 2, 1)   # b,hw,hw (first hw of k, second of q)\n        # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j]\n        h_ = torch.bmm(v, w_)\n        h_ = h_.reshape(b, c, h, w)\n\n        h_ = self.proj_out(h_)\n\n        return x+h_\n\n\nclass Model(nn.Module):\n    def __init__(self, config):\n        super().__init__()\n        self.config = config\n        ch, out_ch, ch_mult = config.model.ch, config.model.out_ch, tuple(config.model.ch_mult)\n        num_res_blocks = config.model.num_res_blocks\n        attn_resolutions = config.model.attn_resolutions\n        dropout = config.model.dropout\n        in_channels = config.model.in_channels\n        resolution = config.data.image_size\n        resamp_with_conv = config.model.resamp_with_conv\n        num_timesteps = config.diffusion.num_diffusion_timesteps\n        \n        if config.model.type == 'bayesian':\n            self.logvar = nn.Parameter(torch.zeros(num_timesteps))\n        \n        self.ch = ch\n        self.temb_ch = self.ch*4\n        self.num_resolutions = len(ch_mult)\n        self.num_res_blocks = num_res_blocks\n        self.resolution = resolution\n        self.in_channels = in_channels\n\n        # timestep embedding\n        self.temb = nn.Module()\n        self.temb.dense = nn.ModuleList([\n            torch.nn.Linear(self.ch,\n                            self.temb_ch),\n            torch.nn.Linear(self.temb_ch,\n                            self.temb_ch),\n        ])\n\n        # downsampling\n        self.conv_in = torch.nn.Conv2d(in_channels,\n                                       self.ch,\n                                       kernel_size=3,\n                                       stride=1,\n                                       padding=1)\n\n        curr_res = resolution\n        in_ch_mult = (1,)+ch_mult\n        self.down = nn.ModuleList()\n        block_in = None\n        for i_level in range(self.num_resolutions):\n            block = nn.ModuleList()\n            attn = nn.ModuleList()\n            block_in = ch*in_ch_mult[i_level]\n            block_out = ch*ch_mult[i_level]\n            for i_block in range(self.num_res_blocks):\n                block.append(ResnetBlock(in_channels=block_in,\n                                         out_channels=block_out,\n                                         temb_channels=self.temb_ch,\n                                         dropout=dropout))\n                block_in = block_out\n                if curr_res in attn_resolutions:\n                    attn.append(AttnBlock(block_in))\n            down = nn.Module()\n            down.block = block\n            down.attn = attn\n            if i_level != self.num_resolutions-1:\n                down.downsample = Downsample(block_in, resamp_with_conv)\n                curr_res = curr_res // 2\n            self.down.append(down)\n\n        # middle\n        self.mid = nn.Module()\n        self.mid.block_1 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n        self.mid.attn_1 = AttnBlock(block_in)\n        self.mid.block_2 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n\n        # upsampling\n        self.up = nn.ModuleList()\n        for i_level in reversed(range(self.num_resolutions)):\n            block = nn.ModuleList()\n            attn = nn.ModuleList()\n            block_out = ch*ch_mult[i_level]\n            skip_in = ch*ch_mult[i_level]\n            for i_block in range(self.num_res_blocks+1):\n                if i_block == self.num_res_blocks:\n                    skip_in = ch*in_ch_mult[i_level]\n                block.append(ResnetBlock(in_channels=block_in+skip_in,\n                                         out_channels=block_out,\n                                         temb_channels=self.temb_ch,\n                                         dropout=dropout))\n                block_in = block_out\n                if curr_res in attn_resolutions:\n                    attn.append(AttnBlock(block_in))\n            up = nn.Module()\n            up.block = block\n            up.attn = attn\n            if i_level != 0:\n                up.upsample = Upsample(block_in, resamp_with_conv)\n                curr_res = curr_res * 2\n            self.up.insert(0, up)  # prepend to get consistent order\n\n        # end\n        self.norm_out = Normalize(block_in)\n        self.conv_out = torch.nn.Conv2d(block_in,\n                                        out_ch,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, x, t):\n        assert x.shape[2] == x.shape[3] == self.resolution\n\n        # timestep embedding\n        temb = get_timestep_embedding(t, self.ch)\n        temb = self.temb.dense[0](temb)\n        temb = nonlinearity(temb)\n        temb = self.temb.dense[1](temb)\n\n        # downsampling\n        hs = [self.conv_in(x)]\n        for i_level in range(self.num_resolutions):\n            for i_block in range(self.num_res_blocks):\n                h = self.down[i_level].block[i_block](hs[-1], temb)\n                if len(self.down[i_level].attn) > 0:\n                    h = self.down[i_level].attn[i_block](h)\n                hs.append(h)\n            if i_level != self.num_resolutions-1:\n                hs.append(self.down[i_level].downsample(hs[-1]))\n\n        # middle\n        h = hs[-1]\n        h = self.mid.block_1(h, temb)\n        h = self.mid.attn_1(h)\n        h = self.mid.block_2(h, temb)\n\n        # upsampling\n        for i_level in reversed(range(self.num_resolutions)):\n            for i_block in range(self.num_res_blocks+1):\n                h = self.up[i_level].block[i_block](\n                    torch.cat([h, hs.pop()], dim=1), temb)\n                if len(self.up[i_level].attn) > 0:\n                    h = self.up[i_level].attn[i_block](h)\n            if i_level != 0:\n                h = self.up[i_level].upsample(h)\n\n        # end\n        h = self.norm_out(h)\n        h = nonlinearity(h)\n        h = self.conv_out(h)\n        return h\n"
  },
  {
    "path": "ddpm_exp/models/ema.py",
    "content": "import torch.nn as nn\n\n\nclass EMAHelper(object):\n    def __init__(self, mu=0.999):\n        self.mu = mu\n        self.shadow = []\n\n    def to(self, device=None) -> None:\n        self.shadow = [\n            p.to(device=device)\n            for p in self.shadow\n        ]\n\n    def copy_to(self, parameters) -> None:\n        parameters = list(parameters)\n        for s_param, param in zip(self.shadow, parameters):\n            param.data.copy_(s_param.to(param.device).data)\n\n    def store(self, parameters) -> None:\n        r\"\"\"\n        Args:\n        Save the current parameters for restoring later.\n            parameters: Iterable of `torch.nn.Parameter`; the parameters to be\n                temporarily stored.\n        \"\"\"\n        self.temp_stored_params = [param.detach().cpu().clone() for param in parameters]\n\n    def restore(self, parameters) -> None:\n        if self.temp_stored_params is None:\n            raise RuntimeError(\"This ExponentialMovingAverage has no `store()`ed weights \" \"to `restore()`\")\n        for c_param, param in zip(self.temp_stored_params, parameters):\n            param.data.copy_(c_param.data)\n        self.temp_stored_params = None\n\n    def register(self, module):\n        for param in module.parameters():\n            if param.requires_grad:\n                self.shadow.append(param.data.clone())\n\n    def update(self, module):\n        for i, (shadow_param, param) in enumerate(zip(self.shadow, module.parameters())):\n            if param.requires_grad:\n                shadow_param.data = (\n                    1. - self.mu) * param.data + self.mu * shadow_param.data\n                #if i==0:\n                #    print(shadow_param.flatten()[0])\n                    \n    def ema(self, module):\n        for shadow_param, param in zip(self.shadow, module.parameters()):\n            if param.requires_grad:\n                param.data.copy_(shadow_param.data)\n\n    #def ema_copy(self, module):\n    #    if isinstance(module, nn.parallel.DistributedDataParallel):\n    #        from copy import deepcopy\n    #        inner_module = module.module\n    #        module_copy = deepcopy(inner_module).to(inner_module.config.device)\n    ##        module_copy.load_state_dict(inner_module.state_dict())\n    #        module_copy = nn.DistributedDataParallel(module_copy)\n    #    else:\n    #        module_copy = deepcopy(inner_module).to(module.config.device)\n    #        module_copy.load_state_dict(module.state_dict())\n    #    # module_copy = copy.deepcopy(module)\n    #    self.ema(module_copy)\n    #    return module_copy\n\n    def state_dict(self):\n        return self.shadow\n\n    def load_state_dict(self, state_dict):\n        if isinstance(state_dict, list):\n            self.shadow = state_dict\n        else:\n            self.shadow = state_dict.values()"
  },
  {
    "path": "ddpm_exp/prune.py",
    "content": "import argparse\nimport traceback\nimport shutil\nimport logging\nimport yaml\nimport sys\nimport os\nimport torch\nimport numpy as np\nimport torch.utils.tensorboard as tb\nfrom tqdm import tqdm\nfrom runners.diffusion import Diffusion\nfrom torchvision import transforms\nimport torchvision\nfrom datasets import get_dataset, data_transform, inverse_data_transform\nimport torchvision.utils as tvu\nfrom utils import UnlabeledImageFolder\n\ntorch.set_printoptions(sci_mode=False)\n\n\ndef parse_args_and_config():\n    parser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\n\n    parser.add_argument(\n        \"--config\", type=str, required=True, help=\"Path to the config file\"\n    )\n    parser.add_argument(\"--seed\", type=int, default=2333, help=\"Random seed\")\n    parser.add_argument(\"--taylor_batch_size\", type=int, default=128, help=\"batch size for taylor expansion\")\n    parser.add_argument(\n        \"--exp\", type=str, default=\"exp\", help=\"Path for saving running related data.\"\n    )\n    parser.add_argument(\n        \"--doc\",\n        type=str,\n        required=True,\n        help=\"A string for documentation purpose. \"\n        \"Will be the name of the log folder.\",\n    )\n    parser.add_argument(\n        \"--comment\", type=str, default=\"\", help=\"A string for experiment comment\"\n    )\n\n    parser.add_argument(\n        \"--load_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--save_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--verbose\",\n        type=str,\n        default=\"info\",\n        help=\"Verbose level: info | debug | warning | critical\",\n    )\n    parser.add_argument(\"--test\", action=\"store_true\", help=\"Whether to test the model\")\n    parser.add_argument(\n        \"--sample\",\n        action=\"store_true\",\n        help=\"Whether to produce samples from the model\",\n    )\n    parser.add_argument(\"--fid\", action=\"store_true\")\n    parser.add_argument(\"--interpolation\", action=\"store_true\")\n    parser.add_argument(\n        \"--resume_training\", action=\"store_true\", help=\"Whether to resume training\"\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--image_folder\",\n        type=str,\n        default=\"images\",\n        help=\"The folder name of samples\",\n    )\n    parser.add_argument(\n        \"--ni\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_generated_samples\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_ema\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\"--use_pretrained\", action=\"store_true\")\n    parser.add_argument(\n        \"--sample_type\",\n        type=str,\n        default=\"generalized\",\n        help=\"sampling approach (generalized or ddpm_noisy)\",\n    )\n    parser.add_argument(\n        \"--skip_type\",\n        type=str,\n        default=\"uniform\",\n        help=\"skip according to (uniform or quadratic)\",\n    )\n\n    parser.add_argument(\n        \"--pruner\",\n        type=str,\n        default=\"taylor\",\n        choices=[\"taylor\", \"random\", \"magnitude\", \"reinit\", \"first_order_taylor\", \"second_order_taylor\", 'abs_taylor', 'fisher', 'ours'],\n    )\n\n    parser.add_argument(\n        \"--restore_from\",\n        type=str,\n        default=None,\n        help=\"Restore from user a checkpoint\",\n    )\n    parser.add_argument(\n        \"--timesteps\", type=int, default=1000, help=\"number of steps involved\"\n    )\n    parser.add_argument(\n        \"--eta\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--thr\",\n        type=float,\n        default=0.01,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--pruning_ratio\",\n        type=float,\n        default=0.0,\n        help=\"pruning ratio\",\n    )\n    \n    parser.add_argument(\"--sequence\", action=\"store_true\")\n\n    args = parser.parse_args()\n    args.log_path = os.path.join(args.exp, \"logs\", args.doc)\n\n    # parse config file\n    with open(os.path.join(\"configs\", args.config), \"r\") as f:\n        config = yaml.safe_load(f)\n    new_config = dict2namespace(config)\n\n    # add device\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    logging.info(\"Using device: {}\".format(device))\n    new_config.device = device\n\n    # set random seed\n    torch.manual_seed(args.seed)\n    np.random.seed(args.seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(args.seed)\n    torch.backends.cudnn.benchmark = True\n    return args, new_config\n\n\ndef dict2namespace(config):\n    namespace = argparse.Namespace()\n    for key, value in config.items():\n        if isinstance(value, dict):\n            new_value = dict2namespace(value)\n        else:\n            new_value = value\n        setattr(namespace, key, new_value)\n    return namespace\n\n\ndef main():\n    args, config = parse_args_and_config()\n    \n    runner = Diffusion(args, config)\n    if args.pruning_ratio > 0 and args.load_pruned_model is None:\n        # Dataset \n        print(config)\n        dataset, _ = get_dataset(args, config)\n        print(f\"Dataset size: {len(dataset)}\")\n        train_dataloader = torch.utils.data.DataLoader(\n            dataset, batch_size=args.taylor_batch_size, shuffle=True, num_workers=4, drop_last=True\n        )\n        from models.diffusion import AttnBlock\n        import torch_pruning as tp\n        print(\"Pruning ...\")\n        model = runner.model.eval()\n        model.to(runner.device)\n        example_inputs = {'x': torch.randn(1, 3, config.data.image_size, config.data.image_size).to(runner.device), 't': torch.ones(1).to(runner.device)}\n        if args.pruner == 'taylor':\n            imp = tp.importance.TaylorImportance()\n        elif args.pruner == 'first_order_taylor':\n            imp = tp.importance.FullTaylorImportance(order=1)\n        elif args.pruner == 'second_order_taylor':\n            imp = tp.importance.FullTaylorImportance(order=2)\n        elif args.pruner == 'random' or args.pruner == 'reinit':\n            imp = tp.importance.RandomImportance()\n        elif args.pruner == 'magnitude':\n            imp = tp.importance.MagnitudeImportance()\n        elif args.pruner == 'abs_taylor':\n            imp = tp.importance.AbsTaylorImportance()\n        elif args.pruner == 'fisher':\n            imp = tp.importance.FisherImportance()\n        elif args.pruner == 'ours':\n            imp = tp.importance.TaylorImportance()\n\n        ignored_layers = [model.conv_out]\n        channel_groups = {}\n        iterative_steps = 1\n        pruner = tp.pruner.MagnitudePruner(\n            model,\n            example_inputs,\n            importance=imp,\n            iterative_steps=iterative_steps,\n            channel_groups =channel_groups,\n            ch_sparsity=args.pruning_ratio, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n            ignored_layers=ignored_layers,\n            root_module_types=[torch.nn.Conv2d, torch.nn.Linear]\n        )\n        \n        #torch.manual_seed(10)\n        base_macs, base_nparams = tp.utils.count_ops_and_params(model, example_inputs)\n        image_path = args.save_pruned_model.replace('.pth', '')\n        n = config.sampling.batch_size\n        noise = torch.randn(\n            n,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=runner.device,\n        )\n        \n        if 'taylor' in args.pruner or 'fisher' in args.pruner or 'ours' in args.pruner:\n            x = iter(train_dataloader).next()\n            if isinstance(x, (list, tuple)):\n                x = x[0]\n            x = x.to(runner.device)\n            x = data_transform(config, x)\n            x = x.to(runner.device)\n            n = x.size(0)\n            e = torch.randn_like(x)\n            b = runner.betas\n            from functions.losses import noise_estimation_loss\n            model.zero_grad()\n            max_loss = 0\n            for step_k in tqdm(range(0, 1000)):\n                t = torch.ones(n, dtype=torch.long).to(runner.device) * step_k\n                loss = noise_estimation_loss(model, x, t, e, b)\n                if args.pruner == 'ours':\n                    if loss>max_loss:\n                        max_loss = loss\n                    if loss<max_loss*args.thr:\n                        break\n                    print(loss, max_loss)\n                loss.backward()\n        \n        print(\"============ Before Pruning ============\")\n        print(model)\n        for g in pruner.step(interactive=True):\n            g.prune()\n        \n        if args.pruner == 'reinit':\n            def reset_parameters(model):\n                for m in model.modules():\n                    if hasattr(m, 'reset_parameters'):\n                        m.reset_parameters()\n            model.apply(reset_parameters)\n        \n        macs, nparams = tp.utils.count_ops_and_params(model, example_inputs)\n        print(\"============ After Pruning ============\")\n        print(model)\n        print(\"#Params: {:.4f} M => {:.4f} M\".format(base_nparams/1e6, nparams/1e6))\n        print(\"#MACs: {:.4f} G => {:.4f} G\".format(base_macs/1e9, macs/1e9))\n        del pruner\n        # Save pruned model\n        os.makedirs(os.path.dirname(args.save_pruned_model), exist_ok=True)\n        print(\"Saving pruned model as {}\".format(args.save_pruned_model))\n        torch.save(\n            model,\n            args.save_pruned_model\n        )\n\n        with torch.no_grad():\n            n = config.sampling.batch_size\n            x = runner.sample_image(noise, model)\n            x = inverse_data_transform(config, x)\n            grid = tvu.make_grid(x)\n            tvu.save_image(grid, image_path+'-pruned.png')\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "ddpm_exp/prune_kd.py",
    "content": "import argparse\nimport traceback\nimport shutil\nimport logging\nimport yaml\nimport sys\nimport os\nimport torch\nimport numpy as np\nimport torch.utils.tensorboard as tb\nfrom tqdm import tqdm\nfrom runners.diffusion_kd import Diffusion\nfrom torchvision import transforms\nimport torchvision\n\nfrom utils import UnlabeledImageFolder\n\ntorch.set_printoptions(sci_mode=False)\n\n\ndef parse_args_and_config():\n    parser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\n\n    parser.add_argument(\n        \"--config\", type=str, required=True, help=\"Path to the config file\"\n    )\n    parser.add_argument(\"--seed\", type=int, default=2333, help=\"Random seed\")\n    parser.add_argument(\"--taylor_batch_size\", type=int, default=128, help=\"batch size for taylor expansion\")\n    parser.add_argument(\n        \"--exp\", type=str, default=\"exp\", help=\"Path for saving running related data.\"\n    )\n    parser.add_argument(\n        \"--doc\",\n        type=str,\n        required=True,\n        help=\"A string for documentation purpose. \"\n        \"Will be the name of the log folder.\",\n    )\n    parser.add_argument(\n        \"--comment\", type=str, default=\"\", help=\"A string for experiment comment\"\n    )\n\n    parser.add_argument(\n        \"--load_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--save_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--verbose\",\n        type=str,\n        default=\"info\",\n        help=\"Verbose level: info | debug | warning | critical\",\n    )\n    parser.add_argument(\"--test\", action=\"store_true\", help=\"Whether to test the model\")\n    parser.add_argument(\n        \"--sample\",\n        action=\"store_true\",\n        help=\"Whether to produce samples from the model\",\n    )\n    parser.add_argument(\"--fid\", action=\"store_true\")\n    parser.add_argument(\"--interpolation\", action=\"store_true\")\n    parser.add_argument(\n        \"--resume_training\", action=\"store_true\", help=\"Whether to resume training\"\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--image_folder\",\n        type=str,\n        default=\"images\",\n        help=\"The folder name of samples\",\n    )\n    parser.add_argument(\n        \"--ni\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_ema\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\"--use_pretrained\", action=\"store_true\")\n    parser.add_argument(\n        \"--sample_type\",\n        type=str,\n        default=\"generalized\",\n        help=\"sampling approach (generalized or ddpm_noisy)\",\n    )\n    parser.add_argument(\n        \"--skip_type\",\n        type=str,\n        default=\"uniform\",\n        help=\"skip according to (uniform or quadratic)\",\n    )\n\n    parser.add_argument(\n        \"--pruner\",\n        type=str,\n        default=\"taylor\",\n        choices=[\"taylor\", \"random\", \"magnitude\", \"reinit\", \"first_order_taylor\", \"second_order_taylor\"],\n    )\n\n    parser.add_argument(\n        \"--restore_from\",\n        type=str,\n        default=None,\n        help=\"Restore from user a checkpoint\",\n    )\n    parser.add_argument(\n        \"--timesteps\", type=int, default=1000, help=\"number of steps involved\"\n    )\n    parser.add_argument(\n        \"--eta\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--pruning_ratio\",\n        type=float,\n        default=0.0,\n        help=\"pruning ratio\",\n    )\n    \n    parser.add_argument(\"--sequence\", action=\"store_true\")\n\n    args = parser.parse_args()\n    args.log_path = os.path.join(args.exp, \"logs\", args.doc)\n\n    # parse config file\n    with open(os.path.join(\"configs\", args.config), \"r\") as f:\n        config = yaml.safe_load(f)\n    new_config = dict2namespace(config)\n\n    #tb_path = os.path.join(args.exp, \"tensorboard\", args.doc)\n\n    if not args.test and not args.sample:\n        if not args.resume_training:\n            if os.path.exists(args.log_path):\n                overwrite = False\n                if args.ni:\n                    overwrite = True\n                else:\n                    response = input(\"Folder already exists. Overwrite? (Y/N)\")\n                    if response.upper() == \"Y\":\n                        overwrite = True\n\n                if overwrite:\n                    shutil.rmtree(args.log_path)\n                    #shutil.rmtree(tb_path)\n                    os.makedirs(args.log_path)\n                    #if os.path.exists(tb_path):\n                    #    shutil.rmtree(tb_path)\n                else:\n                    print(\"Folder exists. Program halted.\")\n                    sys.exit(0)\n            else:\n                os.makedirs(args.log_path)\n\n            with open(os.path.join(args.log_path, \"config.yml\"), \"w\") as f:\n                yaml.dump(new_config, f, default_flow_style=False)\n        os.makedirs(os.path.join(args.log_path, 'vis'), exist_ok=True)\n        #new_config.tb_logger = tb.SummaryWriter(log_dir=tb_path)\n        # setup logger\n        level = getattr(logging, args.verbose.upper(), None)\n        if not isinstance(level, int):\n            raise ValueError(\"level {} not supported\".format(args.verbose))\n\n        handler1 = logging.StreamHandler()\n        handler2 = logging.FileHandler(os.path.join(args.log_path, \"stdout.txt\"))\n        formatter = logging.Formatter(\n            \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n        )\n        handler1.setFormatter(formatter)\n        handler2.setFormatter(formatter)\n        logger = logging.getLogger()\n        logger.addHandler(handler1)\n        logger.addHandler(handler2)\n        logger.setLevel(level)\n\n    else:\n        level = getattr(logging, args.verbose.upper(), None)\n        if not isinstance(level, int):\n            raise ValueError(\"level {} not supported\".format(args.verbose))\n\n        handler1 = logging.StreamHandler()\n        formatter = logging.Formatter(\n            \"%(levelname)s - %(filename)s - %(asctime)s - %(message)s\"\n        )\n        handler1.setFormatter(formatter)\n        logger = logging.getLogger()\n        logger.addHandler(handler1)\n        logger.setLevel(level)\n\n        if args.sample:\n            os.makedirs(os.path.join(args.exp, \"image_samples\"), exist_ok=True)\n            args.image_folder = os.path.join(\n                args.exp, \"image_samples\", args.image_folder\n            )\n            if not os.path.exists(args.image_folder):\n                os.makedirs(args.image_folder)\n            else:\n                if not (args.fid or args.interpolation):\n                    overwrite = False\n                    if args.ni:\n                        overwrite = True\n                    else:\n                        response = input(\n                            f\"Image folder {args.image_folder} already exists. Overwrite? (Y/N)\"\n                        )\n                        if response.upper() == \"Y\":\n                            overwrite = True\n\n                    if overwrite:\n                        shutil.rmtree(args.image_folder)\n                        os.makedirs(args.image_folder)\n                    else:\n                        print(\"Output image folder exists. Program halted.\")\n                        sys.exit(0)\n\n    # add device\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    logging.info(\"Using device: {}\".format(device))\n    new_config.device = device\n\n    # set random seed\n    torch.manual_seed(args.seed)\n    np.random.seed(args.seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(args.seed)\n\n    torch.backends.cudnn.benchmark = True\n\n    return args, new_config\n\n\ndef dict2namespace(config):\n    namespace = argparse.Namespace()\n    for key, value in config.items():\n        if isinstance(value, dict):\n            new_value = dict2namespace(value)\n        else:\n            new_value = value\n        setattr(namespace, key, new_value)\n    return namespace\n\n\ndef main():\n    args, config = parse_args_and_config()\n    logging.info(\"Writing log file to {}\".format(args.log_path))\n    logging.info(\"Exp instance id = {}\".format(os.getpid()))\n    logging.info(\"Exp comment = {}\".format(args.comment))\n\n    try:\n        runner = Diffusion(args, config)\n        if args.pruning_ratio > 0 and args.load_pruned_model is None:\n            # Dataset \n            print(config)\n            if config.data.dataset.lower() == 'cifar10':\n                augmentations = transforms.Compose(\n                    [\n                        transforms.RandomHorizontalFlip(),\n                        transforms.ToTensor(),\n                        transforms.Normalize([0.5], [0.5]),\n                    ]\n                )\n                dataset = torchvision.datasets.CIFAR10(root='./data/cifar10', train=True, download=False, transform=augmentations)\n            else:\n                augmentations = transforms.Compose(\n                    [\n                        transforms.Resize(256, interpolation=transforms.InterpolationMode.BILINEAR),\n                        transforms.CenterCrop(256),\n                        transforms.RandomHorizontalFlip(),\n                        transforms.ToTensor(),\n                        transforms.Normalize([0.5], [0.5]),\n                    ]\n                )\n                if 'celeba' in config.data.dataset.lower():\n                    dataset = UnlabeledImageFolder(args.dataset, transform=augmentations, exts=['png'])\n                else:\n                    dataset = UnlabeledImageFolder(args.dataset, transform=augmentations, exts=['webp'])\n            print(f\"Dataset size: {len(dataset)}\")\n\n            train_dataloader = torch.utils.data.DataLoader(\n                dataset, batch_size=args.taylor_batch_size, shuffle=True, num_workers=4, drop_last=True\n            )\n\n            from models.diffusion import AttnBlock\n            import torch_pruning as tp\n            print(\"Pruning ...\")\n            model = runner.model\n            model.to(runner.device)\n            example_inputs = {'x': torch.randn(1, 3, 32, 32).to(runner.device), 't': torch.ones(1).to(runner.device)}\n\n            if args.pruner == 'taylor':\n                imp = tp.importance.TaylorImportance()\n            elif args.pruner == 'first_order_taylor':\n                imp = tp.importance.FullTaylorImportance(order=1)\n            elif args.pruner == 'second_order_taylor':\n                imp = tp.importance.FullTaylorImportance(order=2)\n            elif args.pruner == 'random' or args.pruner == 'reinit':\n                imp = tp.importance.RandomImportance()\n            elif args.pruner == 'magnitude':\n                imp = tp.importance.MagnitudeImportance()\n            ignored_layers = [model.conv_out]\n            channel_groups = {}\n            iterative_steps = 1\n            pruner = tp.pruner.MagnitudePruner(\n                model,\n                example_inputs,\n                importance=imp,\n                iterative_steps=iterative_steps,\n                channel_groups =channel_groups,\n                ch_sparsity=args.pruning_ratio, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n                ignored_layers=ignored_layers,\n                root_module_types=[torch.nn.Conv2d, torch.nn.Linear]\n            )\n            base_macs, base_nparams = tp.utils.count_ops_and_params(model, example_inputs)\n\n            if 'taylor' in args.pruner:\n                x = iter(train_dataloader).next()\n                if isinstance(x, (list, tuple)):\n                    x = x[0]\n                x = x.to(runner.device)\n                n = x.size(0)\n                e = torch.randn_like(x)\n                b = runner.betas\n                t = torch.randint(\n                        low=0, high=runner.num_timesteps, size=(n // 2 + 1,)\n                ).to(runner.device)\n                \n                t = torch.cat([t, runner.num_timesteps - t - 1], dim=0)[:n]\n                from functions.losses import loss_registry\n                \n                model.zero_grad()\n                for step_k in tqdm(range(1000)):\n                    loss = loss_registry[config.model.type](model, x, t, e, b)\n                    loss.backward()\n\n            print(\"============ Before Pruning ============\")\n            print(model)\n            for g in pruner.step(interactive=True):\n                g.prune()\n            \n            if args.pruner == 'reinit':\n                def reset_parameters(model):\n                    for m in model.modules():\n                        if hasattr(m, 'reset_parameters'):\n                            m.reset_parameters()\n                model.apply(reset_parameters)\n            \n            macs, nparams = tp.utils.count_ops_and_params(model, example_inputs)\n            print(\"============ After Pruning ============\")\n            print(model)\n            print(\"#Params: {:.4f} M => {:.4f} M\".format(base_nparams/1e6, nparams/1e6))\n            print(\"#MACs: {:.4f} G => {:.4f} G\".format(base_macs/1e9, macs/1e9))\n            del pruner\n            # Save pruned model\n            print(\"Saving pruned model as {}\".format(os.path.join(args.log_path, \"pruned_model.pth\")))\n            torch.save(\n                model,\n                os.path.join(args.log_path, \"pruned_model.pth\"),\n            )\n        \n        if args.load_pruned_model is not None:\n            print(\"Loading pruned model from {}\".format(args.load_pruned_model))\n            model = torch.load(args.load_pruned_model, map_location='cpu')\n            runner.model = model\n        \n        if args.sample:\n            runner.sample()\n        elif args.test:\n            runner.test()\n        else:\n            runner.train()\n    except Exception:\n        logging.error(traceback.format_exc())\n\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "ddpm_exp/prune_ssim.py",
    "content": "import argparse\nimport traceback\nimport shutil\nimport logging\nimport yaml\nimport sys\nimport os\nimport torch\nimport numpy as np\nimport torch.utils.tensorboard as tb\nfrom tqdm import tqdm\nfrom runners.diffusion import Diffusion\nfrom torchvision import transforms\nimport torchvision\nfrom datasets import get_dataset, data_transform, inverse_data_transform\nimport torchvision.utils as tvu\nfrom utils import UnlabeledImageFolder\n\ntorch.set_printoptions(sci_mode=False)\n\n\ndef parse_args_and_config():\n    parser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\n\n    parser.add_argument(\n        \"--config\", type=str, required=True, help=\"Path to the config file\"\n    )\n    parser.add_argument(\"--seed\", type=int, default=2333, help=\"Random seed\")\n    parser.add_argument(\"--taylor_batch_size\", type=int, default=128, help=\"batch size for taylor expansion\")\n    parser.add_argument(\n        \"--exp\", type=str, default=\"exp\", help=\"Path for saving running related data.\"\n    )\n    parser.add_argument(\n        \"--doc\",\n        type=str,\n        required=True,\n        help=\"A string for documentation purpose. \"\n        \"Will be the name of the log folder.\",\n    )\n    parser.add_argument(\n        \"--comment\", type=str, default=\"\", help=\"A string for experiment comment\"\n    )\n\n    parser.add_argument(\n        \"--load_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--save_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--verbose\",\n        type=str,\n        default=\"info\",\n        help=\"Verbose level: info | debug | warning | critical\",\n    )\n    parser.add_argument(\"--test\", action=\"store_true\", help=\"Whether to test the model\")\n    parser.add_argument(\n        \"--sample\",\n        action=\"store_true\",\n        help=\"Whether to produce samples from the model\",\n    )\n    parser.add_argument(\"--fid\", action=\"store_true\")\n    parser.add_argument(\"--interpolation\", action=\"store_true\")\n    parser.add_argument(\n        \"--resume_training\", action=\"store_true\", help=\"Whether to resume training\"\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--image_folder\",\n        type=str,\n        default=\"images\",\n        help=\"The folder name of samples\",\n    )\n    parser.add_argument(\n        \"--ni\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_generated_samples\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_ema\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\"--use_pretrained\", action=\"store_true\")\n    parser.add_argument(\n        \"--sample_type\",\n        type=str,\n        default=\"generalized\",\n        help=\"sampling approach (generalized or ddpm_noisy)\",\n    )\n    parser.add_argument(\n        \"--skip_type\",\n        type=str,\n        default=\"uniform\",\n        help=\"skip according to (uniform or quadratic)\",\n    )\n\n    parser.add_argument(\n        \"--pruner\",\n        type=str,\n        default=\"taylor\",\n        choices=[\"taylor\", \"random\", \"magnitude\", \"reinit\", \"first_order_taylor\", \"second_order_taylor\", 'abs_taylor', 'fisher', 'ours'],\n    )\n\n    parser.add_argument(\n        \"--restore_from\",\n        type=str,\n        default=None,\n        help=\"Restore from user a checkpoint\",\n    )\n    parser.add_argument(\n        \"--timesteps\", type=int, default=1000, help=\"number of steps involved\"\n    )\n    parser.add_argument(\n        \"--eta\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--pruning_ratio\",\n        type=float,\n        default=0.0,\n        help=\"pruning ratio\",\n    )\n\n    parser.add_argument(\n        \"--stage\",\n        type=int,\n        default=0,\n        help=\"pruning ratio\",\n    )\n    \n    parser.add_argument(\"--sequence\", action=\"store_true\")\n\n    args = parser.parse_args()\n    args.log_path = os.path.join(args.exp, \"logs\", args.doc)\n\n    # parse config file\n    with open(os.path.join(\"configs\", args.config), \"r\") as f:\n        config = yaml.safe_load(f)\n    new_config = dict2namespace(config)\n\n    # add device\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    logging.info(\"Using device: {}\".format(device))\n    new_config.device = device\n\n    # set random seed\n    torch.manual_seed(args.seed)\n    np.random.seed(args.seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(args.seed)\n    torch.backends.cudnn.benchmark = True\n    return args, new_config\n\n\ndef dict2namespace(config):\n    namespace = argparse.Namespace()\n    for key, value in config.items():\n        if isinstance(value, dict):\n            new_value = dict2namespace(value)\n        else:\n            new_value = value\n        setattr(namespace, key, new_value)\n    return namespace\n\n\ndef main():\n    args, config = parse_args_and_config()\n    \n    runner = Diffusion(args, config)\n\n    # Dataset \n    print(config)\n    dataset, _ = get_dataset(args, config)\n    print(f\"Dataset size: {len(dataset)}\")\n    train_dataloader = torch.utils.data.DataLoader(\n        dataset, batch_size=args.taylor_batch_size, shuffle=True, num_workers=4, drop_last=True\n    )\n    from models.diffusion import AttnBlock\n    import torch_pruning as tp\n    print(\"Pruning ...\")\n    model = runner.model.eval()\n    model.to(runner.device)\n    example_inputs = {'x': torch.randn(1, 3, config.data.image_size, config.data.image_size).to(runner.device), 't': torch.ones(1).to(runner.device)}\n    if args.pruner == 'taylor':\n        imp = tp.importance.TaylorImportance()\n    elif args.pruner == 'first_order_taylor':\n        imp = tp.importance.FullTaylorImportance(order=1)\n    elif args.pruner == 'second_order_taylor':\n        imp = tp.importance.FullTaylorImportance(order=2)\n    elif args.pruner == 'random' or args.pruner == 'reinit':\n        imp = tp.importance.RandomImportance()\n    elif args.pruner == 'magnitude':\n        imp = tp.importance.MagnitudeImportance()\n    elif args.pruner == 'abs_taylor':\n        imp = tp.importance.AbsTaylorImportance()\n    elif args.pruner == 'fisher':\n        imp = tp.importance.FisherImportance()\n    elif args.pruner == 'ours':\n        imp = tp.importance.TaylorImportance()\n\n    ignored_layers = [model.conv_out]\n    channel_groups = {}\n    iterative_steps = 1\n    pruner = tp.pruner.MagnitudePruner(\n        model,\n        example_inputs,\n        importance=imp,\n        iterative_steps=iterative_steps,\n        channel_groups =channel_groups,\n        ch_sparsity=args.pruning_ratio, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n        ignored_layers=ignored_layers,\n        root_module_types=[torch.nn.Conv2d, torch.nn.Linear]\n    )\n    #torch.manual_seed(10)\n    base_macs, base_nparams = tp.utils.count_ops_and_params(model, example_inputs)\n    image_path = args.save_pruned_model.replace('.pth', '')\n    n = config.sampling.batch_size\n    noise = torch.randn(\n        n,\n        config.data.channels,\n        config.data.image_size,\n        config.data.image_size,\n        device=runner.device,\n    )\n    loss_list = []\n    if args.pruning_ratio > 0 and args.stage>0 and args.load_pruned_model is None:\n        if 'taylor' in args.pruner or 'fisher' in args.pruner or 'ours' in args.pruner:\n            x = iter(train_dataloader).next()\n            if isinstance(x, (list, tuple)):\n                x = x[0]\n            x = x.to(runner.device)\n            x = data_transform(config, x)\n            x = x.to(runner.device)\n            n = x.size(0)\n            e = torch.randn_like(x)\n            b = runner.betas\n            alphas = 1.0 - b\n            alphas_cumprod = alphas.cumprod(dim=0)\n            weights = ((1 - alphas) / (torch.sqrt( alphas_cumprod * (1 - alphas_cumprod) ) + 1e-8)).clamp(min=1)\n            print(weights)\n            from functions.losses import loss_registry\n            model.zero_grad()\n            if args.pruner=='abs_taylor':\n                imp.set_model(model)\n            max_loss = 0\n\n            for step_k in tqdm(range(0, args.stage)):\n                t = torch.ones(n, dtype=torch.long).to(runner.device) * step_k\n                loss = loss_registry[config.model.type](model, x, t, e, b)\n                #if loss>max_loss:\n                #    max_loss = loss\n                #if loss<max_loss*0.00:\n                #    break\n                #if args.pruner == 'ours':\n                #    loss = weights[step_k] * loss\n                loss_list.append(loss.item())\n                loss.backward()\n        \n        print(\"============ Before Pruning ============\")\n        print(model)\n        for g in pruner.step(interactive=True):\n            g.prune()\n        \n        if args.pruner == 'reinit':\n            def reset_parameters(model):\n                for m in model.modules():\n                    if hasattr(m, 'reset_parameters'):\n                        m.reset_parameters()\n            model.apply(reset_parameters)\n        \n        macs, nparams = tp.utils.count_ops_and_params(model, example_inputs)\n        print(\"============ After Pruning ============\")\n        print(model)\n        print(\"#Params: {:.4f} M => {:.4f} M\".format(base_nparams/1e6, nparams/1e6))\n        print(\"#MACs: {:.4f} G => {:.4f} G\".format(base_macs/1e9, macs/1e9))\n        del pruner\n        # Save pruned model\n        #os.makedirs(os.path.dirname(args.save_pruned_model), exist_ok=True)\n        #print(\"Saving pruned model as {}\".format(args.save_pruned_model))\n        #torch.save(\n        #    model,\n        #    args.save_pruned_model\n        #)\n\n    os.makedirs('run/prune_ssim_2/{}'.format(args.stage), exist_ok=True)\n    os.makedirs('run/vis_2', exist_ok=True)\n    with torch.no_grad():\n        n = config.sampling.batch_size\n        x = runner.sample_image(noise, model)\n        x = inverse_data_transform(config, x)\n        for i, xi in enumerate(x):\n            tvu.save_image(xi, 'run/prune_ssim_2/{}/{}.png'.format(args.stage, i))\n        grid = tvu.make_grid(x)\n        tvu.save_image(grid, 'run/vis_2/pruned-{}.png'.format(args.stage))\n    print(loss_list)\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "ddpm_exp/prune_test.py",
    "content": "import argparse\nimport traceback\nimport shutil\nimport logging\nimport yaml\nimport sys\nimport os\nimport torch\nimport numpy as np\nimport torch.utils.tensorboard as tb\nfrom tqdm import tqdm\nfrom runners.diffusion import Diffusion\nfrom torchvision import transforms\nimport torchvision\nfrom datasets import get_dataset, data_transform, inverse_data_transform\nimport torchvision.utils as tvu\nfrom utils import UnlabeledImageFolder\n\ntorch.set_printoptions(sci_mode=False)\n\n\ndef parse_args_and_config():\n    parser = argparse.ArgumentParser(description=globals()[\"__doc__\"])\n\n    parser.add_argument(\n        \"--config\", type=str, required=True, help=\"Path to the config file\"\n    )\n    parser.add_argument(\"--seed\", type=int, default=2333, help=\"Random seed\")\n    parser.add_argument(\"--taylor_batch_size\", type=int, default=128, help=\"batch size for taylor expansion\")\n    parser.add_argument(\n        \"--exp\", type=str, default=\"exp\", help=\"Path for saving running related data.\"\n    )\n    parser.add_argument(\n        \"--doc\",\n        type=str,\n        required=True,\n        help=\"A string for documentation purpose. \"\n        \"Will be the name of the log folder.\",\n    )\n    parser.add_argument(\n        \"--comment\", type=str, default=\"\", help=\"A string for experiment comment\"\n    )\n\n    parser.add_argument(\n        \"--load_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--save_pruned_model\", type=str, default=None, help=\"load pruned models\"\n    )\n\n    parser.add_argument(\n        \"--verbose\",\n        type=str,\n        default=\"info\",\n        help=\"Verbose level: info | debug | warning | critical\",\n    )\n    parser.add_argument(\"--test\", action=\"store_true\", help=\"Whether to test the model\")\n    parser.add_argument(\n        \"--sample\",\n        action=\"store_true\",\n        help=\"Whether to produce samples from the model\",\n    )\n    parser.add_argument(\"--fid\", action=\"store_true\")\n    parser.add_argument(\"--interpolation\", action=\"store_true\")\n    parser.add_argument(\n        \"--resume_training\", action=\"store_true\", help=\"Whether to resume training\"\n    )\n    parser.add_argument(\n        \"-i\",\n        \"--image_folder\",\n        type=str,\n        default=\"images\",\n        help=\"The folder name of samples\",\n    )\n    parser.add_argument(\n        \"--ni\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_generated_samples\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\n        \"--use_ema\",\n        action=\"store_true\",\n        help=\"No interaction. Suitable for Slurm Job launcher\",\n    )\n    parser.add_argument(\"--use_pretrained\", action=\"store_true\")\n    parser.add_argument(\n        \"--sample_type\",\n        type=str,\n        default=\"generalized\",\n        help=\"sampling approach (generalized or ddpm_noisy)\",\n    )\n    parser.add_argument(\n        \"--skip_type\",\n        type=str,\n        default=\"uniform\",\n        help=\"skip according to (uniform or quadratic)\",\n    )\n\n    parser.add_argument(\n        \"--pruner\",\n        type=str,\n        default=\"taylor\",\n        choices=[\"taylor\", \"random\", \"magnitude\", \"reinit\", \"first_order_taylor\", \"second_order_taylor\", 'abs_taylor', 'fisher', 'ours'],\n    )\n\n    parser.add_argument(\n        \"--restore_from\",\n        type=str,\n        default=None,\n        help=\"Restore from user a checkpoint\",\n    )\n    parser.add_argument(\n        \"--timesteps\", type=int, default=1000, help=\"number of steps involved\"\n    )\n    parser.add_argument(\n        \"--eta\",\n        type=float,\n        default=0.0,\n        help=\"eta used to control the variances of sigma\",\n    )\n    parser.add_argument(\n        \"--pruning_ratio\",\n        type=float,\n        default=0.0,\n        help=\"pruning ratio\",\n    )\n    \n    parser.add_argument(\"--sequence\", action=\"store_true\")\n\n    args = parser.parse_args()\n    args.log_path = os.path.join(args.exp, \"logs\", args.doc)\n\n    # parse config file\n    with open(os.path.join(\"configs\", args.config), \"r\") as f:\n        config = yaml.safe_load(f)\n    new_config = dict2namespace(config)\n\n    # add device\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    logging.info(\"Using device: {}\".format(device))\n    new_config.device = device\n\n    # set random seed\n    torch.manual_seed(args.seed)\n    np.random.seed(args.seed)\n    if torch.cuda.is_available():\n        torch.cuda.manual_seed_all(args.seed)\n    torch.backends.cudnn.benchmark = True\n    return args, new_config\n\n\ndef dict2namespace(config):\n    namespace = argparse.Namespace()\n    for key, value in config.items():\n        if isinstance(value, dict):\n            new_value = dict2namespace(value)\n        else:\n            new_value = value\n        setattr(namespace, key, new_value)\n    return namespace\n\n\ndef main():\n    args, config = parse_args_and_config()\n    \n    runner = Diffusion(args, config)\n    torch.manual_seed(10)\n    if True or args.pruning_ratio > 0 and args.load_pruned_model is None:\n        # Dataset \n        print(config)\n        dataset, _ = get_dataset(args, config)\n        print(f\"Dataset size: {len(dataset)}\")\n        train_dataloader = torch.utils.data.DataLoader(\n            dataset, batch_size=args.taylor_batch_size, shuffle=True, num_workers=4, drop_last=True\n        )\n        from models.diffusion import AttnBlock\n        import torch_pruning as tp\n        print(\"Pruning ...\")\n        model = runner.model.eval()\n        model.to(runner.device)\n        example_inputs = {'x': torch.randn(1, 3, config.data.image_size, config.data.image_size).to(runner.device), 't': torch.ones(1).to(runner.device)}\n        if args.pruner == 'taylor':\n            imp = tp.importance.TaylorImportance()\n        elif args.pruner == 'first_order_taylor':\n            imp = tp.importance.FullTaylorImportance(order=1)\n        elif args.pruner == 'second_order_taylor':\n            imp = tp.importance.FullTaylorImportance(order=2)\n        elif args.pruner == 'random' or args.pruner == 'reinit':\n            imp = tp.importance.RandomImportance()\n        elif args.pruner == 'magnitude':\n            imp = tp.importance.MagnitudeImportance()\n        elif args.pruner == 'abs_taylor':\n            imp = tp.importance.AbsTaylorImportance()\n        elif args.pruner == 'fisher':\n            imp = tp.importance.FisherImportance()\n        elif args.pruner == 'ours':\n            imp = tp.importance.TaylorImportance()\n\n        ignored_layers = [model.conv_out, model.temb]\n        channel_groups = {}\n        iterative_steps = 1\n        pruner = tp.pruner.MagnitudePruner(\n            model,\n            example_inputs,\n            importance=imp,\n            iterative_steps=iterative_steps,\n            channel_groups =channel_groups,\n            ch_sparsity=args.pruning_ratio, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n            ignored_layers=ignored_layers,\n            root_module_types=[torch.nn.Conv2d, torch.nn.Linear]\n        )\n        #torch.manual_seed(10)\n        base_macs, base_nparams = tp.utils.count_ops_and_params(model, example_inputs)\n        image_path = args.save_pruned_model.replace('.pth', '')\n        n = config.sampling.batch_size\n        noise = torch.randn(\n            n,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=runner.device,\n        )\n        \n        if 'taylor' in args.pruner or 'fisher' in args.pruner or 'ours' in args.pruner:\n            if args.use_generated_samples:\n                with torch.no_grad():\n                    x = runner.sample_image(noise, model)\n                    x = inverse_data_transform(config, x)\n                    grid = tvu.make_grid(x)\n                    tvu.save_image(grid, image_path+'-generated.png')\n            else:\n                x = iter(train_dataloader).next()\n                if isinstance(x, (list, tuple)):\n                    x = x[0]\n                x = x.to(runner.device)\n                x = data_transform(config, x)\n            x = x.to(runner.device)\n            n = x.size(0)\n            e = torch.randn_like(x)\n            b = runner.betas\n            alphas = 1.0 - b\n            alphas_cumprod = alphas.cumprod(dim=0)\n            #weights = torch.ones(1000) # torch.linspace(100, 1, 1000) #((1 - alphas) / (torch.sqrt( alphas_cumprod * (1 - alphas_cumprod) ) + 1e-8)).clamp(min=1)\n            #print(weights)\n            #t = torch.randint(\n            #        low=0, high=runner.num_timesteps, size=(n // 2 + 1,)\n            #).to(runner.device)\n            #t = torch.cat([t, runner.num_timesteps - t - 1], dim=0)[:n]\n            from functions.losses import loss_registry\n            model.zero_grad()\n            if args.pruner=='abs_taylor':\n                imp.set_model(model)\n\n            for step_k in tqdm(range(0, 400)):\n                t = torch.ones(n, dtype=torch.long).to(runner.device) * step_k\n                loss = loss_registry[config.model.type](model, x, t, e, b)\n                #if args.pruner == 'ours':\n                #    loss = weights[step_k] * loss\n                loss.backward()\n        \n        print(\"============ Before Pruning ============\")\n        print(model)\n        for g in pruner.step(interactive=True):\n            g.prune()\n        \n        if args.pruner == 'reinit':\n            def reset_parameters(model):\n                for m in model.modules():\n                    if hasattr(m, 'reset_parameters'):\n                        m.reset_parameters()\n            model.apply(reset_parameters)\n        \n        macs, nparams = tp.utils.count_ops_and_params(model, example_inputs)\n        print(\"============ After Pruning ============\")\n        print(model)\n        print(\"#Params: {:.4f} M => {:.4f} M\".format(base_nparams/1e6, nparams/1e6))\n        print(\"#MACs: {:.4f} G => {:.4f} G\".format(base_macs/1e9, macs/1e9))\n        del pruner\n        # Save pruned model\n        os.makedirs(os.path.dirname(args.save_pruned_model), exist_ok=True)\n        print(\"Saving pruned model as {}\".format(args.save_pruned_model))\n        torch.save(\n            model,\n            args.save_pruned_model\n        )\n\n        with torch.no_grad():\n            \n            n = config.sampling.batch_size\n            #x = torch.randn(\n            #    n,\n            #    config.data.channels,\n            #    config.data.image_size,\n            #    config.data.image_size,\n            #    device=runner.device,\n            #)\n            x = runner.sample_image(noise, model)\n            x = inverse_data_transform(config, x)\n            grid = tvu.make_grid(x)\n            tvu.save_image(grid, image_path+'-pruned.png')\n    return 0\n\n\nif __name__ == \"__main__\":\n    sys.exit(main())\n"
  },
  {
    "path": "ddpm_exp/runners/__init__.py",
    "content": ""
  },
  {
    "path": "ddpm_exp/runners/diffusion.py",
    "content": "import os\nimport logging\nimport time\nimport glob\n\nimport numpy as np\nimport tqdm\nimport torch\nimport torch.utils.data as data\n\nfrom models.diffusion import Model\nfrom models.ema import EMAHelper\nfrom functions import get_optimizer\nfrom functions.losses import noise_estimation_loss, noise_estimation_kd_loss\nfrom datasets import get_dataset, data_transform, inverse_data_transform\nfrom functions.ckpt_util import get_ckpt_path\nimport torchvision.utils as tvu\nfrom accelerate import Accelerator\n\n\ndef torch2hwcuint8(x, clip=False):\n    if clip:\n        x = torch.clamp(x, -1, 1)\n    x = (x + 1.0) / 2.0\n    return x\n\n\ndef get_beta_schedule(beta_schedule, *, beta_start, beta_end, num_diffusion_timesteps):\n    def sigmoid(x):\n        return 1 / (np.exp(-x) + 1)\n\n    if beta_schedule == \"quad\":\n        betas = (\n            np.linspace(\n                beta_start ** 0.5,\n                beta_end ** 0.5,\n                num_diffusion_timesteps,\n                dtype=np.float64,\n            )\n            ** 2\n        )\n    elif beta_schedule == \"linear\":\n        betas = np.linspace(\n            beta_start, beta_end, num_diffusion_timesteps, dtype=np.float64\n        )\n    elif beta_schedule == \"const\":\n        betas = beta_end * np.ones(num_diffusion_timesteps, dtype=np.float64)\n    elif beta_schedule == \"jsd\":  # 1/T, 1/(T-1), 1/(T-2), ..., 1\n        betas = 1.0 / np.linspace(\n            num_diffusion_timesteps, 1, num_diffusion_timesteps, dtype=np.float64\n        )\n    elif beta_schedule == \"sigmoid\":\n        betas = np.linspace(-6, 6, num_diffusion_timesteps)\n        betas = sigmoid(betas) * (beta_end - beta_start) + beta_start\n    else:\n        raise NotImplementedError(beta_schedule)\n    assert betas.shape == (num_diffusion_timesteps,)\n    return betas\n\n\nclass Diffusion(object):\n    def __init__(self, args, config, device=None):\n        self.args = args\n        self.config = config\n        if device is None:\n            device = (\n                torch.device(\"cuda\")\n                if torch.cuda.is_available()\n                else torch.device(\"cpu\")\n            )\n        self.device = device\n\n        self.model_var_type = config.model.var_type\n        betas = get_beta_schedule(\n            beta_schedule=config.diffusion.beta_schedule,\n            beta_start=config.diffusion.beta_start,\n            beta_end=config.diffusion.beta_end,\n            num_diffusion_timesteps=config.diffusion.num_diffusion_timesteps,\n        )\n        betas = self.betas = torch.from_numpy(betas).float().to(self.device)\n        self.num_timesteps = betas.shape[0]\n\n        alphas = 1.0 - betas\n        alphas_cumprod = alphas.cumprod(dim=0)\n        alphas_cumprod_prev = torch.cat(\n            [torch.ones(1).to(device), alphas_cumprod[:-1]], dim=0\n        )\n        posterior_variance = (\n            betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n        )\n        if self.model_var_type == \"fixedlarge\":\n            self.logvar = betas.log()\n            # torch.cat(\n            # [posterior_variance[1:2], betas[1:]], dim=0).log()\n        elif self.model_var_type == \"fixedsmall\":\n            self.logvar = posterior_variance.clamp(min=1e-20).log()\n\n        self.build_model()\n\n    def build_model(self):\n        args, config = self.args, self.config\n        model = Model(config)\n        if args.load_pruned_model is not None:\n            print(\"Loading pruned model from {}\".format(args.load_pruned_model))\n            states = torch.load(args.load_pruned_model, map_location='cpu')\n\n            if isinstance(states, torch.nn.Module): # a simple pruned model \n                model = torch.load(args.load_pruned_model, map_location='cpu')\n            elif isinstance(states, list): # pruned model and training states\n                model = states[0]\n                if args.use_ema and self.config.model.ema:\n                    print(\"Loading EMA\")\n                    ema_helper = EMAHelper(mu=self.config.model.ema_rate)\n                    ema_helper.register(model)\n                    ema_helper.load_state_dict(states[-1])\n                    ema_helper.ema(model)\n                    self.ema_helper = ema_helper\n                else:\n                    self.ema_helper = None\n            else:\n                raise NotImplementedError\n            self.model = model\n        elif args.restore_from is not None and os.path.isfile(args.restore_from):\n            ckpt = args.restore_from\n            print(\"Loading checkpoint {}\".format(ckpt))\n            states = torch.load(\n                ckpt,\n                map_location='cpu',\n            )\n            if isinstance(states[0], torch.nn.Module):\n                model = states[0]\n                model = model.to(self.device)\n            else:\n                model = model.to(self.device)\n                model.load_state_dict(states[0], strict=True) \n\n            if args.use_ema and self.config.model.ema:\n                print(\"Loading EMA\")\n                ema_helper = EMAHelper(mu=self.config.model.ema_rate)\n                ema_helper.register(model)\n                ema_helper.load_state_dict(states[-1])\n                ema_helper.ema(model)\n                self.ema_helper = ema_helper\n            else:\n                self.ema_helper = None\n        \n        elif not self.args.use_pretrained:\n            if getattr(self.config.sampling, \"ckpt_id\", None) is None:\n                ckpt = os.path.join(self.args.log_path, \"ckpt.pth\")\n                states = torch.load(\n                    ckpt,\n                    map_location=self.config.device,\n                )\n            else:\n                ckpt = os.path.join(\n                        self.args.log_path, f\"ckpt_{self.config.sampling.ckpt_id}.pth\"\n                    )\n                states = torch.load(\n                    ckpt,\n                    map_location=self.config.device,\n                )\n            print(\"Loading checkpoint {}\".format(ckpt))\n\n            if isinstance(states[0], torch.nn.Module):\n                model = states[0]\n                model = model.to(self.device)\n            else:\n                model = model.to(self.device)\n                model.load_state_dict(states[0], strict=True)\n\n            if args.use_ema and self.config.model.ema:\n                ema_helper = EMAHelper(mu=self.config.model.ema_rate)\n                ema_helper.register(model)\n                ema_helper.load_state_dict(states[-1])\n                ema_helper.ema(model)\n                self.ema_helper = ema_helper\n            else:\n                self.ema_helper = None\n        else:\n            # This used the pretrained DDPM model, see https://github.com/pesser/pytorch_diffusion\n            if self.config.data.dataset == \"CIFAR10\":\n                name = \"cifar10\"\n            elif self.config.data.dataset == \"LSUN\":\n                name = f\"lsun_{self.config.data.category}\"\n            elif self.config.data.dataset == \"CELEBA\":\n                name = 'celeba'\n            ckpt = get_ckpt_path(f\"ema_{name}\")\n            print(\"Loading checkpoint {}\".format(ckpt))\n            states = torch.load(ckpt, map_location=self.device)\n            if isinstance(states, (list,tuple)):\n                model.load_state_dict(states[0])\n            else:\n                model.load_state_dict(states)\n            model.to(self.device)\n        self.model = model\n\n    def train(self, kd=False):\n        accelerator = self.accelerator\n        device = accelerator.device\n\n        if kd:\n            teacher = Model(self.config)\n            if self.config.data.dataset == \"CIFAR10\":\n                name = \"cifar10\"\n            elif self.config.data.dataset == \"LSUN\":\n                name = f\"lsun_{self.config.data.category}\"\n            elif self.config.data.dataset == 'CELEBA':\n                name = \"celeba\"\n            ckpt = get_ckpt_path(f\"ema_{name}\")\n            print(\"Loading teacher from {}\".format(ckpt))\n            states = torch.load(ckpt, map_location='cpu')\n            if isinstance(states, list):\n                teacher.load_state_dict(states[0])\n            else:\n                teacher.load_state_dict()\n            teacher.to(accelerator.device)\n            self.teacher = teacher.eval()\n            \n        args, config = self.args, self.config\n        dataset, test_dataset = get_dataset(args, config)\n        train_loader = data.DataLoader(\n            dataset,\n            batch_size=config.training.batch_size,\n            shuffle=True,\n            num_workers=config.data.num_workers,\n        )\n        model = self.model\n        optimizer = get_optimizer(self.config, model.parameters())\n        if args.use_ema and self.config.model.ema:\n            ema_helper = EMAHelper(mu=self.config.model.ema_rate)\n            ema_helper.register(model)\n        else:\n            ema_helper = None\n\n        start_epoch, step = 0, 0\n        if self.args.resume_training:\n            states = torch.load(os.path.join(self.args.log_path, \"ckpt.pth\"))\n            if isinstance(states[0], torch.nn.Module):\n                self.model = model = states[0]\n                optimizer = get_optimizer(self.config, model.parameters()) # rebuild the optimizer\n            else:\n                model.load_state_dict(states[0])\n            states[1][\"param_groups\"][0][\"eps\"] = self.config.optim.eps\n            optimizer.load_state_dict(states[1])\n            start_epoch = states[2]\n            step = states[3]\n            if self.config.model.ema:\n                ema_helper.load_state_dict(states[4])\n\n        model, optimizer, train_loader = accelerator.prepare(model, optimizer, train_loader)\n        self.model = model\n\n        if ema_helper is not None:\n            ema_helper.to(accelerator.device)\n\n        if accelerator.is_main_process:\n            unwrapped_model = accelerator.unwrap_model(model)\n            unwrapped_model.eval()\n            if ema_helper is not None:\n                ema_helper.store(unwrapped_model.parameters())\n                ema_helper.copy_to(unwrapped_model.parameters())\n\n            with torch.no_grad():\n                n = config.sampling.batch_size\n                x = torch.randn(\n                    n,\n                    config.data.channels,\n                    config.data.image_size,\n                    config.data.image_size,\n                    device=accelerator.device,\n                )\n                x = self.sample_image(x, unwrapped_model)\n                x = inverse_data_transform(config, x)\n                grid = tvu.make_grid(x)\n                tvu.save_image(grid, os.path.join(args.log_path, 'vis', 'Init.png'))\n                #tb_logger.add_image('Before Training', grid, global_step=0)\n            \n            if args.use_ema:\n                ema_helper.restore(unwrapped_model.parameters())\n\n        for epoch in range(start_epoch, self.config.training.n_epochs):\n            data_start = time.time()\n            data_time = 0    \n            for i, (x, y) in enumerate(train_loader):\n                n = x.size(0)\n                data_time += time.time() - data_start\n                model.train()\n                step += 1\n\n                x = x.to(self.device)\n                x = data_transform(self.config, x)\n                e = torch.randn_like(x)\n                b = self.betas\n\n                # antithetic sampling\n                t = torch.randint(\n                    low=0, high=self.num_timesteps, size=(n // 2 + 1,)\n                ).to(self.device)\n                t = torch.cat([t, self.num_timesteps - t - 1], dim=0)[:n]\n\n                if kd:\n                    loss = noise_estimation_kd_loss(model, teacher, x, t, e, b)\n                else:\n                    loss = noise_estimation_loss(model, x, t, e, b)\n\n                #tb_logger.add_scalar(\"loss\", loss, global_step=step)\n                if step % 100 == 0:\n                    logging.info(\n                        f\"step: {step} (Ep={epoch}/{self.config.training.n_epochs}, Iter={i}/{len(train_loader)}), loss: {loss.item()}, data time: {data_time / (i+1)}\"\n                    )\n\n                optimizer.zero_grad()\n                accelerator.backward(loss)\n                try:\n                    if accelerator.sync_gradients:\n                        accelerator.clip_grad_norm_(model.parameters(), config.optim.grad_clip)\n                except Exception:\n                    pass\n                optimizer.step()\n\n                if args.use_ema and self.config.model.ema:\n                    ema_helper.update(model)\n                \n                accelerator.wait_for_everyone()\n                if accelerator.is_main_process:\n                    if step % self.config.training.snapshot_freq == 0 or step == 1:\n                        unwrapped_model = accelerator.unwrap_model(model)\n                        unwrapped_model.eval()\n\n                        unwrapped_model.zero_grad()\n                        states = [\n                            unwrapped_model,\n                            optimizer.state_dict(),\n                            epoch,\n                            step,\n                        ]\n                        if args.use_ema and self.config.model.ema:\n                            states.append(ema_helper.state_dict())\n\n                        torch.save(\n                            states,\n                            os.path.join(self.args.log_path, \"ckpt_{}.pth\".format(step)),\n                        )\n                        torch.save(states, os.path.join(self.args.log_path, \"ckpt.pth\"))\n                    \n                data_start = time.time()\n\n            accelerator.wait_for_everyone()\n            # Sampling for visualization\n            if accelerator.is_main_process:\n                unwrapped_model = accelerator.unwrap_model(model)\n                unwrapped_model.eval()\n                if ema_helper is not None:\n                    ema_helper.store(unwrapped_model.parameters())\n                    ema_helper.copy_to(unwrapped_model.parameters())\n\n                with torch.no_grad():\n                    n = config.sampling.batch_size\n                    x = torch.randn(\n                        n,\n                        config.data.channels,\n                        config.data.image_size,\n                        config.data.image_size,\n                        device=accelerator.device,\n                    )\n                    x = self.sample_image(x, model)\n                    x = inverse_data_transform(config, x)\n                    grid = tvu.make_grid(x)\n                    tvu.save_image(grid, os.path.join(args.log_path, 'vis', 'epoch-{}.png'.format(epoch)))\n                \n                if args.use_ema:\n                    ema_helper.restore(unwrapped_model.parameters())\n        accelerator.end_training()\n            \n    def sample(self):\n        accelerator = self.accelerator\n        model = self.model \n        model.to(accelerator.device)\n        model.eval()\n        \n        if self.args.fid:\n            self.sample_fid(model)\n        elif self.args.interpolation:\n            self.sample_interpolation(model)\n        elif self.args.sequence:\n            self.sample_sequence(model)\n        else:\n            raise NotImplementedError(\"Sample procedeure not defined\")\n\n    def sample_fid(self, model):\n        args = self.args\n        accelerator = self.accelerator\n        import torch\n        torch.manual_seed(accelerator.process_index+args.seed)\n        import random\n        random.seed(accelerator.process_index+args.seed)\n        import numpy as np\n        np.random.seed(accelerator.process_index+args.seed)\n\n        config = self.config\n        img_id = len(glob.glob(f\"{self.args.image_folder}/*\"))\n        print(f\"starting from image {img_id}\")\n        total_n_samples = 50000\n        n_rounds = (total_n_samples - img_id) // config.sampling.batch_size\n        \n        #os.makedirs(os.path.join(self.args.image_folder, '{}'.format(accelerator.process_index)), exist_ok=True)\n        with torch.no_grad():\n            for _ in tqdm.tqdm(\n                range(n_rounds), desc=\"Generating image samples for FID evaluation.\"\n            ):\n                n = config.sampling.batch_size\n                x = torch.randn(\n                    n,\n                    config.data.channels,\n                    config.data.image_size,\n                    config.data.image_size,\n                    device=accelerator.device,\n                )\n\n                x = self.sample_image(x, model)\n                x = inverse_data_transform(config, x)\n\n                for i in range(n):\n                    tvu.save_image(\n                        x[i], os.path.join(self.args.image_folder, f\"{img_id}.png\")\n                    )\n                    img_id += 1\n\n    def sample_sequence(self, model):\n        config = self.config\n\n        x = torch.randn(\n            8,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=self.device,\n        )\n\n        # NOTE: This means that we are producing each predicted x0, not x_{t-1} at timestep t.\n        with torch.no_grad():\n            _, x = self.sample_image(x, model, last=False)\n\n        x = [inverse_data_transform(config, y) for y in x]\n\n        for i in range(len(x)):\n            for j in range(x[i].size(0)):\n                tvu.save_image(\n                    x[i][j], os.path.join(self.args.image_folder, f\"{j}_{i}.png\")\n                )\n\n    def sample_interpolation(self, model):\n        config = self.config\n\n        def slerp(z1, z2, alpha):\n            theta = torch.acos(torch.sum(z1 * z2) / (torch.norm(z1) * torch.norm(z2)))\n            return (\n                torch.sin((1 - alpha) * theta) / torch.sin(theta) * z1\n                + torch.sin(alpha * theta) / torch.sin(theta) * z2\n            )\n\n        z1 = torch.randn(\n            1,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=self.device,\n        )\n        z2 = torch.randn(\n            1,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=self.device,\n        )\n        alpha = torch.arange(0.0, 1.01, 0.1).to(z1.device)\n        z_ = []\n        for i in range(alpha.size(0)):\n            z_.append(slerp(z1, z2, alpha[i]))\n\n        x = torch.cat(z_, dim=0)\n        xs = []\n\n        # Hard coded here, modify to your preferences\n        with torch.no_grad():\n            for i in range(0, x.size(0), 8):\n                xs.append(self.sample_image(x[i : i + 8], model))\n        x = inverse_data_transform(config, torch.cat(xs, dim=0))\n        for i in range(x.size(0)):\n            tvu.save_image(x[i], os.path.join(self.args.image_folder, f\"{i}.png\"))\n\n    def sample_image(self, x, model, last=True):\n        try:\n            skip = self.args.skip\n        except Exception:\n            skip = 1\n\n        if self.args.sample_type == \"generalized\":\n            if self.args.skip_type == \"uniform\":\n                skip = self.num_timesteps // self.args.timesteps\n                seq = range(0, self.num_timesteps, skip)\n            elif self.args.skip_type == \"quad\":\n                seq = (\n                    np.linspace(\n                        0, np.sqrt(self.num_timesteps * 0.8), self.args.timesteps\n                    )\n                    ** 2\n                )\n                seq = [int(s) for s in list(seq)]\n            else:\n                raise NotImplementedError\n            from functions.denoising import generalized_steps\n\n            xs = generalized_steps(x, seq, model, self.betas, eta=self.args.eta)\n            x = xs\n        elif self.args.sample_type == \"ddpm_noisy\":\n            if self.args.skip_type == \"uniform\":\n                skip = self.num_timesteps // self.args.timesteps\n                seq = range(0, self.num_timesteps, skip)\n            elif self.args.skip_type == \"quad\":\n                seq = (\n                    np.linspace(\n                        0, np.sqrt(self.num_timesteps * 0.8), self.args.timesteps\n                    )\n                    ** 2\n                )\n                seq = [int(s) for s in list(seq)]\n            else:\n                raise NotImplementedError\n            from functions.denoising import ddpm_steps\n\n            x = ddpm_steps(x, seq, model, self.betas)\n        else:\n            raise NotImplementedError\n        if last:\n            x = x[0][-1]\n        return x\n\n    def test(self):\n        pass\n"
  },
  {
    "path": "ddpm_exp/runners/diffusion_simple.py",
    "content": "import os\nimport logging\nimport time\nimport glob\n\nimport numpy as np\nimport tqdm\nimport torch\nimport torch.utils.data as data\n\nfrom models.diffusion import Model\nfrom models.ema import EMAHelper\nfrom functions import get_optimizer\nfrom functions.losses import loss_registry\nfrom datasets import get_dataset, data_transform, inverse_data_transform\nfrom functions.ckpt_util import get_ckpt_path\nimport torchvision.utils as tvu\n\ndef torch2hwcuint8(x, clip=False):\n    if clip:\n        x = torch.clamp(x, -1, 1)\n    x = (x + 1.0) / 2.0\n    return x\n\n\ndef get_beta_schedule(beta_schedule, *, beta_start, beta_end, num_diffusion_timesteps):\n    def sigmoid(x):\n        return 1 / (np.exp(-x) + 1)\n\n    if beta_schedule == \"quad\":\n        betas = (\n            np.linspace(\n                beta_start ** 0.5,\n                beta_end ** 0.5,\n                num_diffusion_timesteps,\n                dtype=np.float64,\n            )\n            ** 2\n        )\n    elif beta_schedule == \"linear\":\n        betas = np.linspace(\n            beta_start, beta_end, num_diffusion_timesteps, dtype=np.float64\n        )\n    elif beta_schedule == \"const\":\n        betas = beta_end * np.ones(num_diffusion_timesteps, dtype=np.float64)\n    elif beta_schedule == \"jsd\":  # 1/T, 1/(T-1), 1/(T-2), ..., 1\n        betas = 1.0 / np.linspace(\n            num_diffusion_timesteps, 1, num_diffusion_timesteps, dtype=np.float64\n        )\n    elif beta_schedule == \"sigmoid\":\n        betas = np.linspace(-6, 6, num_diffusion_timesteps)\n        betas = sigmoid(betas) * (beta_end - beta_start) + beta_start\n    else:\n        raise NotImplementedError(beta_schedule)\n    assert betas.shape == (num_diffusion_timesteps,)\n    return betas\n\n\nclass Diffusion(object):\n    def __init__(self, args, config, device=None):\n        self.args = args\n        self.config = config\n        if device is None:\n            device = (\n                torch.device(\"cuda\")\n                if torch.cuda.is_available()\n                else torch.device(\"cpu\")\n            )\n        self.device = device\n\n        self.model_var_type = config.model.var_type\n        betas = get_beta_schedule(\n            beta_schedule=config.diffusion.beta_schedule,\n            beta_start=config.diffusion.beta_start,\n            beta_end=config.diffusion.beta_end,\n            num_diffusion_timesteps=config.diffusion.num_diffusion_timesteps,\n        )\n        betas = self.betas = torch.from_numpy(betas).float().to(self.device)\n        self.num_timesteps = betas.shape[0]\n\n        alphas = 1.0 - betas\n        alphas_cumprod = alphas.cumprod(dim=0)\n        alphas_cumprod_prev = torch.cat(\n            [torch.ones(1).to(device), alphas_cumprod[:-1]], dim=0\n        )\n        posterior_variance = (\n            betas * (1.0 - alphas_cumprod_prev) / (1.0 - alphas_cumprod)\n        )\n        if self.model_var_type == \"fixedlarge\":\n            self.logvar = betas.log()\n            # torch.cat(\n            # [posterior_variance[1:2], betas[1:]], dim=0).log()\n        elif self.model_var_type == \"fixedsmall\":\n            self.logvar = posterior_variance.clamp(min=1e-20).log()\n\n        self.build_model()\n\n    def build_model(self):\n        args, config = self.args, self.config\n        model = Model(config)\n        \n        if args.restore_from is not None and os.path.isfile(args.restore_from):\n            ckpt = args.restore_from\n            print(\"Loading checkpoint {}\".format(ckpt))\n            states = torch.load(\n                ckpt,\n                map_location=self.config.device,\n            )\n            if isinstance(states[0], torch.nn.Module):\n                model = states[0]\n                model = model.to(self.device)\n            else:\n                model = model.to(self.device)\n                model.load_state_dict(states[0], strict=True) \n\n            if args.use_ema and self.config.model.ema:\n                print(\"Loading EMA\")\n                ema_helper = EMAHelper(mu=self.config.model.ema_rate)\n                ema_helper.register(model)\n                ema_helper.load_state_dict(states[-1])\n                ema_helper.ema(model)\n                self.ema_helper = ema_helper\n            else:\n                self.ema_helper = None\n        \n        elif not self.args.use_pretrained:\n            if getattr(self.config.sampling, \"ckpt_id\", None) is None:\n                ckpt = os.path.join(self.args.log_path, \"ckpt.pth\")\n                states = torch.load(\n                    ckpt,\n                    map_location=self.config.device,\n                )\n            else:\n                ckpt = os.path.join(\n                        self.args.log_path, f\"ckpt_{self.config.sampling.ckpt_id}.pth\"\n                    )\n                states = torch.load(\n                    ckpt,\n                    map_location=self.config.device,\n                )\n            print(\"Loading checkpoint {}\".format(ckpt))\n\n            if isinstance(states[0], torch.nn.Module):\n                model = states[0]\n                model = model.to(self.device)\n            else:\n                model = model.to(self.device)\n                model.load_state_dict(states[0], strict=True)\n\n            if args.use_ema and self.config.model.ema:\n                ema_helper = EMAHelper(mu=self.config.model.ema_rate)\n                ema_helper.register(model)\n                ema_helper.load_state_dict(states[-1])\n                ema_helper.ema(model)\n                self.ema_helper = ema_helper\n            else:\n                self.ema_helper = None\n        else:\n            # This used the pretrained DDPM model, see https://github.com/pesser/pytorch_diffusion\n            if self.config.data.dataset == \"CIFAR10\":\n                name = \"cifar10\"\n            elif self.config.data.dataset == \"LSUN\":\n                name = f\"lsun_{self.config.data.category}\"\n            elif self.config.data.dataset == \"CELEBA\":\n                name = 'celeba'\n            ckpt = get_ckpt_path(f\"ema_{name}\")\n            print(\"Loading checkpoint {}\".format(ckpt))\n            states = torch.load(ckpt, map_location=self.device)\n            if isinstance(states, (list,tuple)):\n                model.load_state_dict(states[0])\n            else:\n                model.load_state_dict(states)\n            model.to(self.device)\n        self.model = model\n\n    def train(self):\n        args, config = self.args, self.config\n        dataset, test_dataset = get_dataset(args, config)\n        train_loader = data.DataLoader(\n            dataset,\n            batch_size=config.training.batch_size,\n            shuffle=True,\n            num_workers=config.data.num_workers,\n        )\n        model = self.model\n        model = model.to(self.device)\n        #model = torch.nn.DataParallel(model)\n\n        optimizer = get_optimizer(self.config, model.parameters())\n\n        if args.use_ema and self.config.model.ema:\n            ema_helper = EMAHelper(mu=self.config.model.ema_rate)\n            ema_helper.register(model)\n        else:\n            ema_helper = None\n        #ema_helper = self.ema_helper\n\n        start_epoch, step = 0, 0\n        if self.args.resume_training:\n            states = torch.load(os.path.join(self.args.log_path, \"ckpt.pth\"))\n            model.load_state_dict(states[0])\n\n            states[1][\"param_groups\"][0][\"eps\"] = self.config.optim.eps\n            optimizer.load_state_dict(states[1])\n            start_epoch = states[2]\n            step = states[3]\n            if self.config.model.ema:\n                ema_helper.load_state_dict(states[4])\n\n        model.eval()\n        with torch.no_grad():\n            n = config.sampling.batch_size\n            x = torch.randn(\n                n,\n                config.data.channels,\n                config.data.image_size,\n                config.data.image_size,\n                device=self.device,\n            )\n            x = self.sample_image(x, model)\n            x = inverse_data_transform(config, x)\n            grid = tvu.make_grid(x)\n            tvu.save_image(grid, os.path.join(args.log_path, 'vis', 'Init.png'))\n            #tb_logger.add_image('Before Training', grid, global_step=0)\n\n        for epoch in range(start_epoch, self.config.training.n_epochs):\n            data_start = time.time()\n            data_time = 0    \n            for i, (x, y) in enumerate(train_loader):\n                n = x.size(0)\n                data_time += time.time() - data_start\n                model.train()\n                step += 1\n\n                x = x.to(self.device)\n                x = data_transform(self.config, x)\n                e = torch.randn_like(x)\n                b = self.betas\n\n                # antithetic sampling\n                t = torch.randint(\n                    low=0, high=self.num_timesteps, size=(n // 2 + 1,)\n                ).to(self.device)\n                t = torch.cat([t, self.num_timesteps - t - 1], dim=0)[:n]\n                loss = loss_registry[config.model.type](model, x, t, e, b)\n\n                #tb_logger.add_scalar(\"loss\", loss, global_step=step)\n     \n                if step % 100 == 0:\n                    logging.info(\n                        f\"step: {step} (Ep={epoch}/{self.config.training.n_epochs}, Iter={i}/{len(train_loader)}), loss: {loss.item()}, data time: {data_time / (i+1)}\"\n                    )\n\n                optimizer.zero_grad()\n                loss.backward()\n\n                try:\n                    torch.nn.utils.clip_grad_norm_(\n                        model.parameters(), config.optim.grad_clip\n                    )\n                except Exception:\n                    pass\n                optimizer.step()\n\n                if args.use_ema and self.config.model.ema:\n                    ema_helper.update(model)\n                \n                if step % self.config.training.snapshot_freq == 0 or step == 1:\n                    model.zero_grad()\n                    states = [\n                        model,\n                        optimizer.state_dict(),\n                        epoch,\n                        step,\n                    ]\n                    if args.use_ema and self.config.model.ema:\n                        states.append(ema_helper.state_dict())\n\n                    torch.save(\n                        states,\n                        os.path.join(self.args.log_path, \"ckpt_{}.pth\".format(step)),\n                    )\n                    torch.save(states, os.path.join(self.args.log_path, \"ckpt.pth\"))\n                    \n                data_start = time.time()\n\n            # Sampling for visualization\n            model.eval()\n            with torch.no_grad():\n                n = config.sampling.batch_size\n                x = torch.randn(\n                    n,\n                    config.data.channels,\n                    config.data.image_size,\n                    config.data.image_size,\n                    device=self.device,\n                )\n                x = self.sample_image(x, model)\n                x = inverse_data_transform(config, x)\n                grid = tvu.make_grid(x)\n                tvu.save_image(grid, os.path.join(args.log_path, 'vis', 'epoch-{}.png'.format(epoch)))\n\n    def sample(self):\n        model = self.model #Model(self.config)\n        model.to(self.device)\n        #model = torch.nn.DataParallel(model)\n        model.eval()\n\n        if self.args.fid:\n            self.sample_fid(model)\n        elif self.args.interpolation:\n            self.sample_interpolation(model)\n        elif self.args.sequence:\n            self.sample_sequence(model)\n        else:\n            raise NotImplementedError(\"Sample procedeure not defined\")\n\n    def sample_fid(self, model):\n        import torch\n        torch.manual_seed(0)\n        import random\n        random.seed(0)\n        import numpy as np\n        np.random.seed(0)\n\n        config = self.config\n        img_id = len(glob.glob(f\"{self.args.image_folder}/*\"))\n        print(f\"starting from image {img_id}\")\n        total_n_samples = 50000\n        n_rounds = (total_n_samples - img_id) // config.sampling.batch_size\n\n        with torch.no_grad():\n            for _ in tqdm.tqdm(\n                range(n_rounds), desc=\"Generating image samples for FID evaluation.\"\n            ):\n                n = config.sampling.batch_size\n                x = torch.randn(\n                    n,\n                    config.data.channels,\n                    config.data.image_size,\n                    config.data.image_size,\n                    device=self.device,\n                )\n\n                x = self.sample_image(x, model)\n                x = inverse_data_transform(config, x)\n\n                for i in range(n):\n                    tvu.save_image(\n                        x[i], os.path.join(self.args.image_folder, f\"{img_id}.png\")\n                    )\n                    img_id += 1\n\n    def sample_sequence(self, model):\n        config = self.config\n\n        x = torch.randn(\n            8,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=self.device,\n        )\n\n        # NOTE: This means that we are producing each predicted x0, not x_{t-1} at timestep t.\n        with torch.no_grad():\n            _, x = self.sample_image(x, model, last=False)\n\n        x = [inverse_data_transform(config, y) for y in x]\n\n        for i in range(len(x)):\n            for j in range(x[i].size(0)):\n                tvu.save_image(\n                    x[i][j], os.path.join(self.args.image_folder, f\"{j}_{i}.png\")\n                )\n\n    def sample_interpolation(self, model):\n        config = self.config\n\n        def slerp(z1, z2, alpha):\n            theta = torch.acos(torch.sum(z1 * z2) / (torch.norm(z1) * torch.norm(z2)))\n            return (\n                torch.sin((1 - alpha) * theta) / torch.sin(theta) * z1\n                + torch.sin(alpha * theta) / torch.sin(theta) * z2\n            )\n\n        z1 = torch.randn(\n            1,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=self.device,\n        )\n        z2 = torch.randn(\n            1,\n            config.data.channels,\n            config.data.image_size,\n            config.data.image_size,\n            device=self.device,\n        )\n        alpha = torch.arange(0.0, 1.01, 0.1).to(z1.device)\n        z_ = []\n        for i in range(alpha.size(0)):\n            z_.append(slerp(z1, z2, alpha[i]))\n\n        x = torch.cat(z_, dim=0)\n        xs = []\n\n        # Hard coded here, modify to your preferences\n        with torch.no_grad():\n            for i in range(0, x.size(0), 8):\n                xs.append(self.sample_image(x[i : i + 8], model))\n        x = inverse_data_transform(config, torch.cat(xs, dim=0))\n        for i in range(x.size(0)):\n            tvu.save_image(x[i], os.path.join(self.args.image_folder, f\"{i}.png\"))\n\n    def sample_image(self, x, model, last=True):\n        try:\n            skip = self.args.skip\n        except Exception:\n            skip = 1\n\n        if self.args.sample_type == \"generalized\":\n            if self.args.skip_type == \"uniform\":\n                skip = self.num_timesteps // self.args.timesteps\n                seq = range(0, self.num_timesteps, skip)\n            elif self.args.skip_type == \"quad\":\n                seq = (\n                    np.linspace(\n                        0, np.sqrt(self.num_timesteps * 0.8), self.args.timesteps\n                    )\n                    ** 2\n                )\n                seq = [int(s) for s in list(seq)]\n            else:\n                raise NotImplementedError\n            from functions.denoising import generalized_steps\n\n            xs = generalized_steps(x, seq, model, self.betas, eta=self.args.eta)\n            x = xs\n        elif self.args.sample_type == \"ddpm_noisy\":\n            if self.args.skip_type == \"uniform\":\n                skip = self.num_timesteps // self.args.timesteps\n                seq = range(0, self.num_timesteps, skip)\n            elif self.args.skip_type == \"quad\":\n                seq = (\n                    np.linspace(\n                        0, np.sqrt(self.num_timesteps * 0.8), self.args.timesteps\n                    )\n                    ** 2\n                )\n                seq = [int(s) for s in list(seq)]\n            else:\n                raise NotImplementedError\n            from functions.denoising import ddpm_steps\n\n            x = ddpm_steps(x, seq, model, self.betas)\n        else:\n            raise NotImplementedError\n        if last:\n            x = x[0][-1]\n        return x\n\n    def test(self):\n        pass\n"
  },
  {
    "path": "ddpm_exp/scripts/finetune_bedroom_ddpm.sh",
    "content": "python -m torch.distributed.launch --nproc_per_node=6 --master_port 22223 --use_env finetune.py \\\n--config bedroom.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune/bedroom_ddpm_$1_0.3_finetuned-continue-v4-2e-5 \\\n--doc post_training \\\n--skip_type uniform  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--load_pruned_model run/finetune/bedroom_ddpm_taylor_0.3_finetuned-continue-v3-2e-6/logs/post_training/ckpt_65000.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/finetune_celeba_ddpm.sh",
    "content": "python finetune.py \\\n--config celeba.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune_final/celeba_T=$1_finetuned \\\n--doc post_training \\\n--skip_type uniform  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--load_pruned_model \"run/pruned_final/celeba_T=$1.pth\" \\"
  },
  {
    "path": "ddpm_exp/scripts/finetune_celeba_ddpm_kd.sh",
    "content": "python finetune.py \\\n--config celeba.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune_v2/celeba_ddpm_$1_0.3_finetuned_kd \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--kd \\\n--load_pruned_model run/pruned/celeba_ddpm_$1_0.3.pth  \\"
  },
  {
    "path": "ddpm_exp/scripts/finetune_church_ddpm.sh",
    "content": "python -m torch.distributed.launch --nproc_per_node=4 --master_port 22223 --use_env finetune.py \\\n--config church.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune/church_ddpm_$1_0.3_finetuned \\\n--doc post_training \\\n--skip_type uniform  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--load_pruned_model run/pruned/church_ddpm_$1_0.3.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/finetune_cifar_ddpm.sh",
    "content": "python finetune.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune_v3/cifar10_ddpm_$1_finetuned_0.05T.pth \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--load_pruned_model run/pruned_v5/cifar10_pruned_$1.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/finetune_cifar_ddpm_kd.sh",
    "content": "python finetune.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune_v2/cifar10_ddpm_$1_0.3_finetuned_kd \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--kd \\\n--load_pruned_model run/pruned/cifar10_pruned_$1_0.3.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/finetune_cifar_ddpm_random.sh",
    "content": "python -m torch.distributed.launch --nproc_per_node=2 --master_port 22223 --use_env finetune.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune/cifar10_pruned_random_0.3_finetuned\\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner random \\\n--load_pruned_model run/pruned/cifar10_pruned_random_0.3.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/finetune_cifar_ddpm_taylor.sh",
    "content": "python finetune.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune/cifar10_pruned_taylor_0.3_real_x_finetuned \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner taylor \\\n--load_pruned_model run/pruned/cifar10_pruned_taylor_0.3_real_x.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_bedroom_sample_pratrained.sh",
    "content": "python prune.py \\\n--config bedroom.yml \\\n--exp run/ddim_bedroom_official \\\n--sample \\\n--use_pretrained \\\n--timesteps 50 \\\n--eta 0 \\\n--ni \\\n--doc 50steps_quad \\\n--skip_type quad  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema"
  },
  {
    "path": "ddpm_exp/scripts/old/run_celeba_pruning_scratch.sh",
    "content": "python prune.py \\\n--config celeba.yml \\\n--exp run/ddim_celeba_pruning_reinit \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner reinit \\\n--taylor_batch_size 96 \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_celeba_pruning_taylor.sh",
    "content": "python prune.py \\\n--config celeba.yml \\\n--exp run/ddim_celeba_pruning_taylor \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner taylor \\\n--taylor_batch_size 96 \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_celeba_sample_pratrained.sh",
    "content": "python prune.py \\\n--config celeba.yml \\\n--exp run/ddim_celeba_official \\\n--sample \\\n--use_pretrained \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc 100steps_quad \\\n--skip_type quad  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--restore_from run/cache/diffusion_models_converted/celeba/ckpt.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_church_pruning_taylor.sh",
    "content": "python prune.py \\\n--config church.yml \\\n--exp run/ddim_church_pruning_taylor \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner random \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_first_order_taylor.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_first_order_taylor \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner first_order_taylor \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_magnitude.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_magnitude \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner magnitude \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_random.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_random \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner random \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_random_kd.sh",
    "content": "python prune_kd.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_random \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner random \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_scratch.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_reinit \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner reinit \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_second_order_taylor.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_second_order_taylor \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner second_order_taylor \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_taylor.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_taylor \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner taylor \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_pruning_taylor_kd.sh",
    "content": "python prune_kd.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_pruning_taylor_kd \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner taylor \\"
  },
  {
    "path": "ddpm_exp/scripts/old/run_cifar_train.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/ddim_cifar10_train_v2 \\\n--use_pretrained \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training_with_0.2_pruning_ratio_v2 \\\n--skip_type quad  \\\n--pruning_ratio 0.2 \\\n--use_ema \\"
  },
  {
    "path": "ddpm_exp/scripts/prune_bedroom_ddpm.sh",
    "content": "#!/bin/bash\n\n# Execute the Python script with the provided arguments\npython prune.py \\\n--config \"bedroom.yml\" \\\n--timesteps \"100\" \\\n--eta \"0\" \\\n--ni \\\n--doc \"post_training\" \\\n--skip_type \"quad\" \\\n--pruning_ratio \"0.3\" \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"$1\" \\\n--save_pruned_model \"run/pruned/bedroom_ddpm_$1_0.3.pth\" \\\n--taylor_batch_size \"4\""
  },
  {
    "path": "ddpm_exp/scripts/prune_bedroom_ddpm_test.sh",
    "content": "#!/bin/bash\n\n# Execute the Python script with the provided arguments\npython prune_test.py \\\n--config \"bedroom.yml\" \\\n--timesteps \"100\" \\\n--eta \"0\" \\\n--ni \\\n--doc \"post_training\" \\\n--skip_type \"quad\" \\\n--pruning_ratio \"0.05\" \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"$1\" \\\n--save_pruned_model \"run/pruned_test/bedroom_ddpm_$1.pth\" \\\n--taylor_batch_size \"4\""
  },
  {
    "path": "ddpm_exp/scripts/prune_celeba_ddpm.sh",
    "content": "#!/bin/bash\n\n# Execute the Python script with the provided arguments\npython prune.py \\\n--config \"celeba.yml\" \\\n--timesteps \"100\" \\\n--eta \"0\" \\\n--ni \\\n--doc \"post_training\" \\\n--skip_type \"quad\" \\\n--pruning_ratio \"0.3\" \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"ours\" \\\n--save_pruned_model \"run/pruned_final/celeba_T=$1.pth\" \\\n--taylor_batch_size \"64\" \\\n--thr \"$1\""
  },
  {
    "path": "ddpm_exp/scripts/prune_celeba_ddpm_ssim.sh",
    "content": "python prune_ssim.py \\\n--config celeba.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.15 \\\n--use_ema \\\n--use_pretrained \\\n--stage $1 \\\n--pruner \"ours\" \\\n--save_pruned_model run/pruned_v4/celeba_pruned.pth \\\n--taylor_batch_size 64"
  },
  {
    "path": "ddpm_exp/scripts/prune_church_ddpm.sh",
    "content": "#!/bin/bash\n\n# Execute the Python script with the provided arguments\npython prune.py \\\n--config \"church.yml\" \\\n--timesteps \"100\" \\\n--eta \"0\" \\\n--ni \\\n--doc \"post_training\" \\\n--skip_type \"quad\" \\\n--pruning_ratio \"0.3\" \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"$1\" \\\n--save_pruned_model \"run/pruned/church_ddpm_$1_0.3.pth\" \\\n--taylor_batch_size \"2\""
  },
  {
    "path": "ddpm_exp/scripts/prune_church_ddpm_test.sh",
    "content": "#!/bin/bash\n\n# Execute the Python script with the provided arguments\npython prune_test.py \\\n--config \"church.yml\" \\\n--timesteps \"100\" \\\n--eta \"0\" \\\n--ni \\\n--doc \"post_training\" \\\n--skip_type \"quad\" \\\n--pruning_ratio \"0.05\" \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"$1\" \\\n--save_pruned_model \"run/pruned_test/church_ddpm_$1.pth\" \\\n--taylor_batch_size \"4\""
  },
  {
    "path": "ddpm_exp/scripts/prune_cifar_ddpm.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"$1\" \\\n--save_pruned_model run/pruned_v5/cifar10_pruned_$1_$2.pth \\\n--thr $2 \\"
  },
  {
    "path": "ddpm_exp/scripts/prune_cifar_ddpm_ssim.sh",
    "content": "python prune_ssim.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.2 \\\n--use_ema \\\n--use_pretrained \\\n--stage $1 \\\n--pruner \"ours\" \\\n--save_pruned_model run/pruned_v4/cifar10_pruned.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/prune_cifar_ddpm_test.sh",
    "content": "python prune_test.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"$1\" \\\n--save_pruned_model run/pruned_test/cifar10_pruned_$1_0.2.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/run_celeba.sh",
    "content": "#!/bin/bash\n\n# Execute the Python script with the provided arguments\npython prune.py \\\n--config \"celeba.yml\" \\\n--timesteps \"100\" \\\n--eta \"0\" \\\n--ni \\\n--doc \"post_training\" \\\n--skip_type \"quad\" \\\n--pruning_ratio \"0.3\" \\\n--use_ema \\\n--use_pretrained \\\n--pruner \"ours\" \\\n--save_pruned_model \"run/pruned_final/celeba_T=$1.pth\" \\\n--taylor_batch_size \"64\" \\\n--thr \"$1\"\n\npython finetune.py \\\n--config celeba.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune_final/celeba_T=$1_finetuned \\\n--doc post_training \\\n--skip_type uniform  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--load_pruned_model \"run/pruned_final/celeba_T=$1.pth\" \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_bedroom_ddpm_pretrained.sh",
    "content": "python -m torch.distributed.launch --nproc_per_node=1 --master_port 22200 --use_env finetune.py \\\n--config bedroom.yml \\\n--exp $1 \\\n--sample \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--use_pretrained \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_bedroom_ddpm_pruning.sh",
    "content": "python -m torch.distributed.launch --nproc_per_node=4 --master_port 22223 --use_env finetune.py \\\n--config bedroom.yml \\\n--exp $2 \\\n--sample \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--restore_from $1 \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_celeba_ddpm_pruning.sh",
    "content": "python finetune.py \\\n--config celeba.yml \\\n--exp $2 \\\n--sample \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--restore_from $1 \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_celeba_pretrained.sh",
    "content": "python prune.py \\\n--config celeba.yml \\\n--exp run/sample/ddim_celeba_official \\\n--sample \\\n--use_pretrained \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc official \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema"
  },
  {
    "path": "ddpm_exp/scripts/sample_church_ddpm_pruning.sh",
    "content": "python finetune.py \\\n--config church.yml \\\n--exp run/sample/church_ddpm_350k \\\n--sample \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--restore_from run/finetune_v2/church_pruned_taylor_0.3_finetuned/logs/post_training/ckpt_350000.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_church_ddpm_pruning_old.sh",
    "content": "python -m torch.distributed.launch --nproc_per_node=4 --master_port 22221 --use_env finetune.py \\\n--config church.yml \\\n--exp $2 \\\n--sample \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--restore_from $1 \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_church_ddpm_test.sh",
    "content": "python finetune.py \\\n--config church.yml \\\n--exp run/sample/church_ddpm_350k \\\n--sample \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--restore_from run/finetune_v2/church_pruned_taylor_0.3_finetuned/logs/post_training/ckpt_350000.pth \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_church_pretrained.sh",
    "content": "python prune.py \\\n--config church.yml \\\n--exp run/sample/ddim_church_official \\\n--sample \\\n--use_pretrained \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc official \\\n--skip_type uniform  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema"
  },
  {
    "path": "ddpm_exp/scripts/sample_cifar_ddpm_pruning.sh",
    "content": "python finetune.py \\\n--config cifar10.yml \\\n--exp \"$2\" \\\n--sample \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample \\\n--skip_type quad  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema \\\n--restore_from \"$1\" \\"
  },
  {
    "path": "ddpm_exp/scripts/sample_cifar_pretrained.sh",
    "content": "python prune.py \\\n--config cifar10.yml \\\n--exp run/sample/ddim_cifar10_official \\\n--sample \\\n--use_pretrained \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--doc sample_100k \\\n--skip_type quad  \\\n--pruning_ratio 0.0 \\\n--fid \\\n--use_ema"
  },
  {
    "path": "ddpm_exp/scripts/simple_celeba_our.sh",
    "content": "python finetune_simple.py \\\n--config celeba.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune_simple/celeba_ours_T=$1.pth \\\n--doc post_training \\\n--skip_type uniform  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--thr $1 \\\n--pruner ours \\\n--taylor_batch_size 64"
  },
  {
    "path": "ddpm_exp/scripts/simple_cifar_our.sh",
    "content": "python finetune_simple.py \\\n--config cifar10.yml \\\n--timesteps 100 \\\n--eta 0 \\\n--ni \\\n--exp run/finetune_simple_v2/cifar10_ours_T=$1.pth \\\n--doc post_training \\\n--skip_type quad  \\\n--pruning_ratio 0.3 \\\n--use_ema \\\n--use_pretrained \\\n--thr $1 \\\n--pruner ours"
  },
  {
    "path": "ddpm_exp/tools/extract_cifar10.py",
    "content": "import os\nimport torchvision\nfrom torchvision.datasets import CIFAR10\nfrom tqdm import tqdm\n\n# Define the path to the folder where the images will be saved\nsave_path = 'data/cifar10/images'\n\n# Create the folder if it doesn't exist\nif not os.path.exists(save_path):\n    os.makedirs(save_path)\n\n# Load the CIFAR10 dataset\ndataset = CIFAR10(root='data/cifar10', train=True, download=True)\n\n# Loop through the dataset and save each image to the folder\nfor i in tqdm(range(len(dataset))):\n    image, label = dataset[i]\n    image_name = f'{i}.png'\n    image_path = os.path.join(save_path, image_name)\n    image.save(image_path)"
  },
  {
    "path": "ddpm_exp/tools/transform_weights.py",
    "content": "import torch\n\nstate = torch.load(\"model.ckpt.old\")\nold_dict = state[0]\nprint(state[0].keys())\nstate[0] = {pname.replace(\"module.\", ''): pval for pname, pval in old_dict.items()}\nprint(state[0].keys())\ntorch.save(state, \"model.ckpt\")"
  },
  {
    "path": "ddpm_exp/torch_pruning/__init__.py",
    "content": "from .dependency import *\nfrom .pruner import *\nfrom . import _helpers, utils, importance"
  },
  {
    "path": "ddpm_exp/torch_pruning/_helpers.py",
    "content": "import torch.nn as nn\nimport numpy as np\nimport torch\nfrom operator import add\nfrom numbers import Number\n\n\ndef is_scalar(x):\n    if isinstance(x, torch.Tensor):\n        return len(x.shape) == 0\n    elif isinstance(x, Number):\n        return True\n    elif isinstance(x, (list, tuple)):\n        return False\n    return False\n\n\nclass _FlattenIndexMapping(object):\n    def __init__(self, stride=1, reverse=False):\n        self._stride = stride\n        self.reverse = reverse\n\n    def __call__(self, idxs):\n        new_idxs = []\n        if self.reverse == True:\n            for i in idxs:\n                new_idxs.append(i // self._stride)\n                new_idxs = list(set(new_idxs))\n        else:\n            for i in idxs:\n                new_idxs.extend(\n                    list(range(i * self._stride, (i + 1) * self._stride)))\n        return new_idxs\n\n\nclass _ConcatIndexMapping(object):\n    def __init__(self, offset, reverse=False):\n        self.offset = offset\n        self.reverse = reverse\n\n    def __call__(self, idxs):\n\n        if self.reverse == True:\n            new_idxs = [\n                i - self.offset[0]\n                for i in idxs\n                if (i >= self.offset[0] and i < self.offset[1])\n            ]\n        else:\n            new_idxs = [i + self.offset[0] for i in idxs]\n        return new_idxs\n\n\nclass _SplitIndexMapping(object):\n    def __init__(self, offset, reverse=False):\n        self.offset = offset\n        self.reverse = reverse\n\n    def __call__(self, idxs):\n        if self.reverse == True:\n            new_idxs = [i + self.offset[0] for i in idxs]\n        else:\n            new_idxs = [\n                i - self.offset[0]\n                for i in idxs\n                if (i >= self.offset[0] and i < self.offset[1])\n            ]\n        return new_idxs\n\n\nclass _GroupConvIndexMapping(object):\n    def __init__(self, in_channels, out_channels, groups, reverse=False):\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.groups = groups\n        self.reverse = reverse\n\n    def __call__(self, idxs):\n        if self.reverse == True:\n            new_idxs = [i + self.offset[0] for i in idxs]\n        else:\n            group_histgram = np.histogram(\n                idxs, bins=self.groups, range=(0, self.out_channels)\n            )\n            max_group_size = int(group_histgram.max())\n        return new_idxs\n\n\nclass ScalarSum:\n    def __init__(self):\n        self._results = {}\n\n    def update(self, metric_name, metric_value):\n        if metric_name not in self._results:\n            self._results[metric_name] = 0\n        self._results[metric_name] += metric_value\n\n    def results(self):\n        return self._results\n\n    def reset(self):\n        self._results = {}\n\n\nclass VectorSum:\n    def __init__(self):\n        self._results = {}\n\n    def update(self, metric_name, metric_value):\n        if metric_name not in self._results:\n            self._results[metric_name] = metric_value\n        if isinstance(metric_value, torch.Tensor):\n            self._results[metric_name] += metric_value\n        elif isinstance(metric_value, list):\n            self._results[metric_name] = list(\n                map(add, self._results[metric_name], metric_value)\n            )\n\n    def results(self):\n        return self._results\n\n    def reset(self):\n        self._results = {}\n"
  },
  {
    "path": "ddpm_exp/torch_pruning/dependency.py",
    "content": "import typing\nimport warnings\nfrom numbers import Number\nfrom collections import namedtuple\n\nimport torch\nimport torch.nn as nn\n\nfrom .pruner import function\nfrom . import _helpers, utils, ops\n\n__all__ = [\"Dependency\", \"Group\", \"DependencyGraph\"]\n\n\nclass Node(object):\n    \"\"\" Nodes of DepGraph\n    \"\"\"\n\n    def __init__(self, module: nn.Module, grad_fn, name: str = None):\n        # For Computational Graph (Tracing)\n        self.inputs = []\n        self.outputs = []\n        self.module = module\n        self.grad_fn = grad_fn\n        self._name = name\n        self.type = ops.module2type(module)\n        self.class_type = module.__class__\n\n        # For Dependency Graph\n        self.dependencies = []  # Adjacency List\n        self.enable_index_mapping = True\n        self.pruning_dim = -1\n\n    @property\n    def name(self):\n        if self._name is None:\n            return str(self.module)\n        else:\n            fmt = self._name\n            if self.type != ops.OPTYPE.PARAMETER:\n                fmt += \" ({})\".format(str(self.module))\n            return fmt\n\n    def add_input(self, node, allow_dumplicated=False):\n        #if node not in self.inputs:\n        if allow_dumplicated is True:\n            self.inputs.append(node)\n        else:\n            if node not in self.inputs:\n                self.inputs.append(node)\n\n    def add_output(self, node, allow_dumplicated=False):\n        if allow_dumplicated is True:\n            self.outputs.append(node)\n        else:\n            if node not in self.outputs:\n                self.outputs.append(node)\n\n    def __repr__(self):\n        return \"<Node: ({})>\".format(self.name)\n\n    def __str__(self):\n        return \"<Node: ({})>\".format(self.name)\n\n    def details(self):\n        fmt = \"-\" * 32 + \"\\n\"\n        fmt += \"<Node: ({})>\\n\".format(self.name)\n        fmt += \" \" * 4 + \"IN:\\n\"\n        for in_node in self.inputs:\n            fmt += \" \" * 8 + \"{}\\n\".format(in_node)\n        fmt += \" \" * 4 + \"OUT:\\n\"\n        for out_node in self.outputs:\n            fmt += \" \" * 8 + \"{}\\n\".format(out_node)\n        fmt += \" \" * 4 + \"DEP:\\n\"\n        for dep in self.dependencies:\n            fmt += \" \" * 8 + \"{}\\n\".format(dep)\n        fmt += \"\\tEnable_index_mapping={}\\n\".format(\n            self.enable_index_mapping)\n        fmt = \"-\" * 32 + \"\\n\"\n        return fmt\n\n\nclass Edge():  # for readability\n    pass\n\n\nclass Dependency(Edge):\n    def __init__(\n        self,\n        trigger: typing.Callable,\n        handler: typing.Callable,\n        source: Node,\n        target: Node,\n    ):\n        \"\"\"Layer dependency (Edge of DepGraph) in structral neural network pruning. \n        Args:\n            trigger (Callable): a pruning function that triggers this dependency\n            handler (Callable): a pruning function that can fix the broken dependency\n            source (Node): the source node pruned by the trigger function\n            target (Node): the target node pruned by the handler function\n            index_mapping (Callable): a callable function for index mapping\n        \"\"\"\n        self.trigger = trigger\n        self.handler = handler\n        self.source = source\n        self.target = target\n        self.index_mapping = [None, None]\n\n    def __call__(self, idxs: list):\n        self.handler.__self__.pruning_dim = self.target.pruning_dim\n        result = self.handler(\n            self.target.module,\n            idxs,\n        )\n        return result\n\n    def __repr__(self):\n        return str(self)\n\n    def __str__(self):\n        return \"{} on {} => {} on {}\".format(\n            \"None\" if self.trigger is None else self.trigger.__name__,\n            self.source.name,\n            self.handler.__name__,\n            self.target.name,\n        )\n\n    def is_triggered_by(self, pruning_fn):\n        return pruning_fn == self.trigger\n\n    def __eq__(self, other):\n        return (\n            self.source == other.source\n            and self.trigger == other.trigger\n            and self.handler == other.handler\n            and self.target == other.target\n        )\n\n    def __hash__(self):\n        return hash((self.source, self.target, self.trigger, self.handler))\n\n\nGroupItem = namedtuple('GroupItem', ['dep', 'idxs'])\n\n\nclass Group(object):\n    \"\"\"A group that contains dependencies and pruning indices.   \n    Each element is defined as a namedtuple('GroupItem', ['dep', 'idxs']).\n    A group is a iterable list \n    [ [Dep1, Indices1], [Dep2, Indices2], ..., [DepK, IndicesK] ]\n    \"\"\"\n\n    def __init__(self):\n        self._group = list()\n        self._DG = None # for group.prune(idxs=NEW_IDXS)\n\n    def prune(self, idxs=None, record_history=True):\n        \"\"\"Prune all coupled layers in the group\n        \"\"\"\n        if idxs is not None:\n            module = self._group[0].dep.target.module\n            pruning_fn = self._group[0].dep.handler\n            new_group = self._DG.get_pruning_group(module, pruning_fn, idxs)\n            new_group.prune()\n        else:\n            for dep, idxs in self._group:\n                if dep.target.type == ops.OPTYPE.PARAMETER:\n                    old_parameter = dep.target.module\n                    name = self._DG._param_to_name[old_parameter]\n                    self._DG._param_to_name.pop(old_parameter)\n                    pruned_parameter = dep(idxs)\n                    path = name.split('.')\n                    module = self._DG.model\n                    for p in path[:-1]:\n                        module = getattr(module, p)\n                    setattr(module, path[-1], pruned_parameter)\n                    self._DG._param_to_name[pruned_parameter] = name\n                    self._DG.module2node[pruned_parameter] = self._DG.module2node.pop(old_parameter)\n                    self._DG.module2node[pruned_parameter].module = pruned_parameter           \n                else:\n                    dep(idxs)\n        if record_history:\n            root_module, pruning_fn, root_pruning_idx = self[0][0].target.module, self[0][0].trigger, self[0][1]\n            root_module_name = self._DG._module2name[root_module]\n            self._DG._pruning_history.append([root_module_name, self._DG.is_out_channel_pruning_fn(pruning_fn), root_pruning_idx])\n\n    def add_dep(self, dep, idxs):\n        self._group.append(GroupItem(dep=dep, idxs=idxs))\n\n    def __getitem__(self, k):\n        return self._group[k]\n\n    @property\n    def items(self):\n        return self._group\n\n    def has_dep(self, dep):\n        for _dep, _ in self._group:\n            if dep == _dep:\n                return True\n        return False\n\n    def has_pruning_op(self, dep, idxs):\n        for _dep, _idxs in self._group:\n            if (\n                _dep.target == dep.target\n                and _dep.handler == dep.handler\n                and _idxs == idxs\n            ):\n                return True\n        return False\n\n    def __len__(self):\n        return len(self._group)\n\n    def add_and_merge(self, dep, idxs):\n        for i, (_dep, _idxs) in enumerate(self._group):\n            if _dep.target == dep.target and _dep.handler == dep.handler:\n                self._group[i] = (_dep, list(set(_idxs + idxs)))\n                return\n        self.add_dep(dep, idxs)\n\n    def __str__(self):\n        fmt = \"\"\n        fmt += \"\\n\" + \"-\" * 32 + \"\\n\"\n        fmt += \" \" * 10 + \"Pruning Group\"\n        fmt += \"\\n\" + \"-\" * 32 + \"\\n\"\n        for i, (dep, idxs) in enumerate(self._group):\n            fmt += \"[{}] {}, #idxs={}\\n\".format(i, dep, len(idxs))\n        fmt += \"-\" * 32 + \"\\n\"\n        return fmt\n\n    def details(self):\n        fmt = \"\"\n        fmt += \"\\n\" + \"-\" * 32 + \"\\n\"\n        fmt += \" \" * 10 + \"Pruning Group\"\n        fmt += \"\\n\" + \"-\" * 32 + \"\\n\"\n        for i, (dep, idxs) in enumerate(self._group):\n            if i==0: \n                fmt += \"[{}] {}, idxs={} (Pruning Root)\\n\".format(i, dep, idxs)\n            else:\n                fmt += \"[{}] {}, idxs={}\\n\".format(i, dep, idxs)\n        fmt += \"-\" * 32 + \"\\n\"\n        return fmt\n\n    def exec(self):\n        \"\"\"old interface, replaced by group.prune()\"\"\"\n        self.prune()\n\n    def __call__(self):\n        return self.prune()\n\nUnwrappedParameters = namedtuple('UnwrappedParameters', ['parameters', 'pruning_dim'])\n\nclass DependencyGraph(object):\n\n    def __init__(self):\n        _dummy_pruners = {\n            ops.OPTYPE.CONCAT: ops.ConcatPruner(),\n            ops.OPTYPE.SPLIT: ops.SplitPruner(),\n            ops.OPTYPE.ELEMENTWISE: ops.ElementWisePruner(),\n            ops.OPTYPE.RESHAPE: ops.ReshapePruner(),\n            ops.OPTYPE.CUSTOMIZED: None,\n        }\n        self.REGISTERED_PRUNERS = function.PrunerBox.copy()  # shallow copy\n        self.REGISTERED_PRUNERS.update(_dummy_pruners)\n        self.CUSTOMIZED_PRUNERS = {}\n        self.IGNORED_LAYERS = []\n\n        # cache\n        self._in_channel_pruning_fn = set([p.prune_in_channels for p in self.REGISTERED_PRUNERS.values() if p is not None] + [p.prune_in_channels for p in self.CUSTOMIZED_PRUNERS.values() if p is not None])\n        self._out_channel_pruning_fn = set([p.prune_out_channels for p in self.REGISTERED_PRUNERS.values() if p is not None] + [p.prune_out_channels for p in self.CUSTOMIZED_PRUNERS.values() if p is not None])\n        self._op_id = 0\n\n        # Pruning History\n        self._pruning_history = []\n\n    def pruning_history(self):\n        return self._pruning_history\n\n    def load_pruning_history(self, pruning_history):\n        self._pruning_history = pruning_history\n        for module_name, is_out_channel_pruning, pruning_idx in self._pruning_history:\n            module = self.model\n            for n in module_name.split('.'):\n                module = getattr(module, n)\n            pruner = self.get_pruner_of_module(module)\n            if is_out_channel_pruning:\n                pruning_fn = pruner.prune_out_channels\n            else:\n                pruning_fn = pruner.prune_in_channels\n            group = self.get_pruning_group(module, pruning_fn, pruning_idx)\n            group.prune(record_history=False)\n            \n    def build_dependency(\n        self,\n        model: torch.nn.Module,\n        example_inputs: typing.Union[torch.Tensor, typing.Sequence],\n        forward_fn: typing.Callable[[\n            torch.nn.Module, typing.Union[torch.Tensor, typing.Sequence]], torch.Tensor] = None,\n        output_transform: typing.Callable = None,\n        unwrapped_parameters: typing.Dict[nn.Parameter, int] = None,\n        customized_pruners: typing.Dict[typing.Any,\n                                        function.BasePruningFunc] = None,\n        verbose: bool = True,\n    ):\n        \"\"\"Build a dependency graph through tracing.\n        Args:\n            model (class): the model to be pruned.\n            example_inputs (torch.Tensor or List): dummy inputs for tracing.\n            forward_fn (Callable): a function to run the model with example_inputs, which should return a reduced tensor for backpropagation.\n            output_transform (Callable): a function to transform network outputs.\n            unwrapped_parameters (List): unwrapped nn.parameters defined by parameters.\n            customized_pruners (typing.Dict[typing.Any, function.BasePruningFunc]): pruners for customized layers.\n            verbose (bool): verbose mode.\n        \"\"\"\n\n        self.verbose = verbose\n        self.model = model\n        self._module2name = {module: name for (\n            name, module) in model.named_modules()}\n\n        # Register customized pruners\n        if customized_pruners is not None:\n            for customized_module, customized_pruner in customized_pruners.items():\n                self.register_customized_layer(\n                    customized_module, customized_pruner)\n\n        # Ignore all sub-modules of customized layers\n        for layer_type in self.CUSTOMIZED_PRUNERS.keys():\n            for m in self.model.modules():\n                if isinstance(m, layer_type):\n                    for sub_module in m.modules():\n                        if sub_module != m:\n                            self.IGNORED_LAYERS.append(sub_module)\n\n        # Detect unwrapped nn.parameters\n        wrapped_parameters = []\n        prunable_module_types = self.REGISTERED_PRUNERS.keys()\n        for m in self.model.modules():\n            op_type = ops.module2type(m)\n            if ( op_type in prunable_module_types and op_type!=ops.OPTYPE.ELEMENTWISE ) or m.__class__ in self.CUSTOMIZED_PRUNERS.keys():\n                wrapped_parameters.extend(list(m.parameters()))\n        unwrapped_detected = []\n        _param_to_name = {}\n        for name, p in self.model.named_parameters():\n            is_wrapped = False\n            for p_wrapped in wrapped_parameters:\n                if p is p_wrapped:\n                    is_wrapped = True\n                    break\n            if not is_wrapped:\n                unwrapped_detected.append(p)\n                _param_to_name[p] = name\n        if unwrapped_parameters is None:\n            unwrapped_parameters = []\n        self._param_to_name = _param_to_name\n        unwrapped_detected = list( set(unwrapped_detected) - set([p for (p, _) in unwrapped_parameters]) )\n        if len(unwrapped_detected)>0 and self.verbose:\n            warnings.warn(\"Unwrapped parameters detected: {}.\\n Torch-Pruning will prune the last non-singleton dimension of a parameter. If you wish to customize this behavior, please provide an unwrapped_parameters argument.\".format([_param_to_name[p] for p in unwrapped_detected]))\n        for p in unwrapped_detected:\n            # get the last dimension that >1\n            def last_non_singleton_dim(tensor):\n                non_singleton_dims = [i for i, s in enumerate(tensor.shape) if s > 1]\n                return non_singleton_dims[-1] if non_singleton_dims else None\n            pruning_dim = last_non_singleton_dim(p)\n            if pruning_dim is not None:\n                unwrapped_parameters.append( UnwrappedParameters(parameters=p, pruning_dim=pruning_dim) ) # prune the last non-singleton dim by daufault\n        self.unwrapped_parameters = unwrapped_parameters\n        # Build computational graph by tracing.\n        self.module2node = self._trace(\n            model, example_inputs, forward_fn, output_transform=output_transform\n        )\n\n        # Build dependency graph\n        self._build_dependency(self.module2node)\n        \n        # Init Shape information\n        self._init_shape_information()\n\n        # Update index mapping for torch.cat/split/chunck/...\n        self.update_index_mapping()\n        return self\n\n    def register_customized_layer(\n        self,\n        layer_type: typing.Type,\n        layer_pruner: function.BasePruningFunc,\n    ):\n        \"\"\"Register a customized pruner\n        Args:\n            layer_type (class): the type of target layer\n            pruner (tp.pruner.BasePruningFunc): a pruner for the specified layer type.\n        \"\"\"\n        self.CUSTOMIZED_PRUNERS[layer_type] = layer_pruner\n        # Update cache\n        self._in_channel_pruning_fn = set([p.prune_in_channels for p in self.REGISTERED_PRUNERS.values() if p is not None] + [p.prune_in_channels for p in self.CUSTOMIZED_PRUNERS.values() if p is not None])\n        self._out_channel_pruning_fn = set([p.prune_out_channels for p in self.REGISTERED_PRUNERS.values() if p is not None] + [p.prune_out_channels for p in self.CUSTOMIZED_PRUNERS.values() if p is not None])\n\n    def check_pruning_group(self, group: Group) -> bool:\n        \"\"\"check the group to avoid over-pruning. Return True if there are sufficient prunable elements.\n        Args:\n            group (Group): a depenedency group\n        \"\"\"\n\n        for dep, idxs in group:\n            if self.is_out_channel_pruning_fn(dep.handler):\n                prunable_chs = self.get_out_channels(\n                    dep.target.module)\n                if prunable_chs is None: continue\n                if prunable_chs <= len(idxs):\n                    return False\n\n            if self.is_in_channel_pruning_fn(dep.handler):\n                prunable_in_chs = self.get_in_channels(\n                    dep.target.module)\n                if prunable_in_chs is None: continue\n                if prunable_in_chs <= len(idxs):\n                    return False\n        return True\n\n    def is_out_channel_pruning_fn(self, fn: typing.Callable) -> bool:\n        return (fn in self._out_channel_pruning_fn)\n    \n    def is_in_channel_pruning_fn(self, fn: typing.Callable) -> bool:\n        return (fn in self._in_channel_pruning_fn)\n\n    def get_pruning_plan(self, module: nn.Module, pruning_fn: typing.Callable, idxs: typing.Union[list, tuple]) -> Group:\n        \"\"\" An alias of DependencyGraph.get_pruning_group for compatibility.\n        \"\"\"\n        return self.get_pruning_group(module, pruning_fn, idxs)\n\n    def get_pruning_group(\n        self,\n        module: nn.Module,\n        pruning_fn: typing.Callable,\n        idxs: typing.Union[list, tuple],\n    ) -> Group:\n        \"\"\"Get the pruning group of pruning_fn.\n        Args:\n            module (nn.Module): the to-be-pruned module/layer.\n            pruning_fn (Callable): the pruning function.\n            idxs (list or tuple): the indices of channels/dimensions.\n        \"\"\"\n        if isinstance(module, ops.TORCH_CONV) and module.groups == module.out_channels:\n            pruning_fn = function.prune_depthwise_conv_out_channels\n        if isinstance(idxs, Number):\n            idxs = [idxs]\n\n        self.update_index_mapping()\n        group = Group()\n        #  the user pruning operation\n        root_node = self.module2node[module]\n        group.add_dep(\n            Dependency(pruning_fn, pruning_fn,\n                       source=root_node, target=root_node), idxs\n        )\n        visited_node = set()\n\n        def _fix_dependency_graph_non_recursive(dep, idxs):\n            processing_stack = [(dep, idxs)]\n            while len(processing_stack) > 0:\n                dep, idxs = processing_stack.pop(-1)\n                node, fn = dep.target, dep.handler\n                visited_node.add(node)\n                #print(dep)\n                #print(node.dependencies)\n                for new_dep in node.dependencies:\n                    if new_dep.is_triggered_by(fn):\n                        new_indices = idxs\n                        for mapping in new_dep.index_mapping:\n                            if mapping is not None:\n                                new_indices = mapping(new_indices)\n                                #print(new_dep, new_dep.index_mapping)\n                                #print(len(new_indices), new_indices)\n                        #print()\n                        if len(new_indices) == 0:\n                            continue\n                        if (new_dep.target in visited_node) and group.has_pruning_op(\n                            new_dep, new_indices\n                        ):\n                            continue\n                        else:\n                            group.add_dep(new_dep, new_indices)\n                            processing_stack.append(\n                                (new_dep, new_indices)\n                            )\n\n        _fix_dependency_graph_non_recursive(*group[0])\n\n        # merge pruning ops\n        merged_group = Group()\n        for dep, idxs in group.items:\n            merged_group.add_and_merge(dep, idxs)\n        merged_group._DG = self\n        return merged_group\n\n    def get_all_groups(self, ignored_layers=[], root_module_types=(ops.TORCH_CONV, ops.TORCH_LINEAR)):\n        visited_layers = []\n        ignored_layers = ignored_layers+self.IGNORED_LAYERS\n        for m in list(self.module2node.keys()):\n            if m in ignored_layers:\n                continue\n\n            if not isinstance(m, tuple(root_module_types)):\n                continue\n\n            pruner = self.get_pruner_of_module(m)\n            if pruner is None or pruner.get_out_channels(m) is None:\n                continue\n\n            if m in visited_layers:\n                continue\n\n            layer_channels = pruner.get_out_channels(m)\n            group = self.get_pruning_group(\n                m, pruner.prune_out_channels, list(range(layer_channels)))\n            prunable_group = True\n            for dep, _ in group:\n                module = dep.target.module\n                pruning_fn = dep.handler\n                if self.is_out_channel_pruning_fn(pruning_fn):\n                    visited_layers.append(module)\n                    if module in ignored_layers:\n                        prunable_group = False\n            if prunable_group:\n                yield group\n\n    def get_pruner_of_module(self, module):\n        p = self.CUSTOMIZED_PRUNERS.get(module.__class__, None)\n        if p is None:\n            p = self.REGISTERED_PRUNERS.get(ops.module2type(module), None)\n        return p\n\n    def get_out_channels(self, module_or_node):\n        if isinstance(module_or_node, Node):\n            module = module_or_node.module\n            pruning_dim = module_or_node.pruning_dim\n        else:\n            module = module_or_node\n            pruning_dim = self.module2node[module].pruning_dim\n        p = self.get_pruner_of_module(module)\n        p.pruning_dim = pruning_dim\n        if p is None:\n            return None\n        return p.get_out_channels(module)\n\n    def get_in_channels(self, module_or_node):\n        if isinstance(module_or_node, Node):\n            module = module_or_node.module\n            pruning_dim = module_or_node.pruning_dim\n        else:\n            module = module_or_node\n            pruning_dim = self.module2node[module].pruning_dim\n        p = self.get_pruner_of_module(module)\n        p.pruning_dim = pruning_dim\n        if p is None:\n            return None\n        return p.get_in_channels(module)\n\n    def _infer_out_channels_recursively(self, node: Node):\n        \"\"\" infer the number of output channels recursively\n        \"\"\"     \n        ch = self.get_out_channels(node)\n        if ch is None:\n            ch = 0\n            for in_node in node.inputs:\n                if node.type == ops.OPTYPE.CONCAT:\n                    sub_ch = self._infer_out_channels_recursively(in_node)\n                    if sub_ch is None:\n                        return None\n                    ch += sub_ch\n                else:\n                    if in_node.type == ops.OPTYPE.SPLIT and in_node.module.split_sizes is not None:\n                        for i, split_out_node in enumerate(in_node.outputs):\n                            if split_out_node == node:\n                                ch = in_node.module.split_sizes[i]\n                    else:\n                        ch = self._infer_out_channels_recursively(in_node)\n            if ch == 0:\n                return None\n        return ch\n\n    def _infer_in_channels_recursively(self, node: Node):\n        \"\"\" infer the number of input channels recursively\n        \"\"\"         \n        ch = self.get_in_channels(node)\n        if ch is None:\n            ch = 0\n            for out_node in node.outputs:\n                if node.type == ops.OPTYPE.SPLIT:\n                    sub_ch = self._infer_in_channels_recursively(out_node)\n                    if sub_ch is None:\n                        return None\n                    ch += sub_ch\n                else:\n                    ch = self._infer_in_channels_recursively(out_node)\n            if ch == 0:\n                return None\n        return ch\n\n    def _build_dependency(self, module2node):\n\n        for _, node in module2node.items():\n            ###########################################\n            # Rule 1) - Inter-layer Dependency\n            ###########################################\n            for in_node in node.inputs:\n                handler = self.get_pruner_of_module(in_node.module).prune_out_channels\n                trigger = self.get_pruner_of_module(node.module).prune_in_channels\n                dep = Dependency(\n                    trigger=trigger, handler=handler, source=node, target=in_node\n                )\n                node.dependencies.append(dep)\n\n            for out_node in node.outputs:\n                trigger = self.get_pruner_of_module(node.module).prune_out_channels\n                handler = self.get_pruner_of_module(out_node.module).prune_in_channels\n                dep = Dependency(\n                    trigger=trigger, handler=handler, source=node, target=out_node\n                )\n                node.dependencies.append(dep)\n\n            ###########################################\n            # Rule 2) - Intra-layer Dependency\n            ###########################################\n\n            # This is implictly implemented by assigning\n            # prune_out_channels=prune_in_channels in tp.pruner.function.BasePruningFunc\n\n    def _trace(self, model, example_inputs, forward_fn, output_transform):\n        \"\"\" Tracing the model as a graph\n        \"\"\"\n        model.eval()\n        gradfn2module = {}\n        visited = {}\n        self._2d_4d = True # only for pytorch<=1.8\n        def _record_grad_fn(module, inputs, outputs):\n            if module not in visited:\n                visited[module] = 1\n            else:\n                visited[module] += 1\n            \n            if isinstance(module, nn.Linear) and len(outputs.shape)==3:\n                self._2d_4d=False\n\n            if isinstance(outputs, tuple):\n                outputs = outputs[0]\n            if isinstance(outputs, torch.nn.utils.rnn.PackedSequence):\n                outputs = outputs.data\n            gradfn2module[outputs.grad_fn] = module\n\n        registered_types = tuple(ops.type2class(\n            t) for t in self.REGISTERED_PRUNERS.keys()) + tuple(self.CUSTOMIZED_PRUNERS.keys())\n        hooks = [\n            m.register_forward_hook(_record_grad_fn)\n            for m in model.modules()\n            if (isinstance(m, registered_types) and m not in self.IGNORED_LAYERS)\n        ]\n\n        # Feed forward and record gradient functions of prunable modules\n        if forward_fn is not None:\n            out = forward_fn(model, example_inputs)\n        elif isinstance(example_inputs, dict):\n            out = model(**example_inputs)\n        else:\n            try:\n                out = model(*example_inputs)\n            except:\n                out = model(example_inputs)\n\n        for hook in hooks:\n            hook.remove()\n        # for recursive models or layers\n        reused = [m for (m, count) in visited.items() if count > 1]\n\n        # build graph\n        if output_transform is not None:\n            out = output_transform(out)\n\n        module2node = {}\n        for o in utils.flatten_as_list(out):\n            self._trace_computational_graph(\n                module2node, o.grad_fn, gradfn2module, reused)\n\n        # TODO: Improving ViT pruning\n        # This is a corner case for pruning ViT,\n        # where concatination of pos_emb and cls_emv is not applied on the feature dim.\n        # Notably, this is not a good practice and will be fixed in the future version\n        if len(self.unwrapped_parameters) > 0:\n            for node in module2node.values():\n                if node.type in (ops.OPTYPE.CONCAT, ops.OPTYPE.SPLIT):\n                    stack = [node]\n                    visited = set()\n                    while len(stack) > 0:\n                        n = stack.pop(-1)\n                        visited.add(n)\n                        if n.type == ops.OPTYPE.PARAMETER and len(n.module.shape) == 3:\n                            node.enable_index_mapping = False\n                            break\n                        else:\n                            for ni in n.inputs:\n                                if ni not in visited:\n                                    stack.append(ni)\n        return module2node\n\n    def _trace_computational_graph(self, module2node, grad_fn_root, gradfn2module, reused):\n\n        def create_node_if_not_exists(grad_fn):\n            module = gradfn2module.get(grad_fn, None)\n            if module is not None \\\n                and module in module2node \\\n                    and module not in reused:\n                return module2node[module]\n\n            # 1. link grad_fns and modules\n            if module is None:  # a new module\n                if not hasattr(grad_fn, \"name\"):\n                    # we treat all unknwon modules as element-wise operations by default,\n                    # which does not modify the #dimension/#channel of features.\n                    # If you have some customized layers, please register it with DependencyGraph.register_customized_layer\n                    module = ops._ElementWiseOp(self._op_id ,\"Unknown\")\n                    self._op_id+=1\n                    if self.verbose:\n                        warnings.warn(\n                            \"[Warning] Unknown operation {} encountered, which will be handled as an element-wise op\".format(\n                                str(grad_fn))\n                        )\n                elif \"catbackward\" in grad_fn.name().lower():\n                    module = ops._ConcatOp(self._op_id)\n                    self._op_id+=1\n                elif \"split\" in grad_fn.name().lower():\n                    module = ops._SplitOp(self._op_id)\n                    self._op_id+=1\n                elif \"view\" in grad_fn.name().lower() or 'reshape' in grad_fn.name().lower():\n                    module = ops._ReshapeOp(self._op_id)\n                    self._op_id+=1\n                else:\n                    # treate other ops as element-wise ones, like Add, Sub, Div, Mul.\n                    module = ops._ElementWiseOp(self._op_id, grad_fn.name())\n                    self._op_id+=1\n                gradfn2module[grad_fn] = module\n\n            # 2. link modules and nodes\n            if module not in module2node:\n                node = Node(\n                    module=module,\n                    grad_fn=grad_fn,\n                    name=self._module2name.get(module, None),\n                )\n                if (\n                    type(module) in self.CUSTOMIZED_PRUNERS\n                ):  # mark it as a customized layer\n                    node.type = ops.OPTYPE.CUSTOMIZED\n                module2node[module] = node\n            else:\n                node = module2node[module]\n            return node\n\n        # non-recursive construction of computational graph\n        processing_stack = [grad_fn_root]\n        visited = set()\n        visited_as_output_node = set()\n        while len(processing_stack) > 0:\n            grad_fn = processing_stack.pop(-1)\n            if grad_fn in visited:\n                continue\n            \n            node = create_node_if_not_exists(grad_fn=grad_fn)\n            if hasattr(grad_fn, \"next_functions\"):\n                for f in grad_fn.next_functions:\n                    if f[0] is not None:\n                        if (\n                            hasattr(f[0], \"name\")\n                            and \"accumulategrad\" in f[0].name().lower()\n                        ):  # a leaf variable.\n                            is_unwrapped_param = False\n                            for (j, (p, dim)) in enumerate(self.unwrapped_parameters):\n                                if f[0].variable is p:\n                                    is_unwrapped_param = True\n                                    gradfn2module[f[0]] = p\n                                    self._module2name[p] = \"UnwrappedParameter_{} ({})\".format(j, p.shape)\n                            if not is_unwrapped_param:\n                                continue\n                        input_node = create_node_if_not_exists(f[0])\n\n                        #allow_dumplicated = False\n\n                        # TODO: support duplicated concat/split like torch.cat([x, x], dim=1)\n                        # The following implementation is can achieve this but will introduce some bugs. \n                        # will be fixed in the future version\n                        #if node.type == ops.OPTYPE.CONCAT:\n                        #    allow_dumplicated = (node not in visited_as_output_node)\n                        #    node.add_input(input_node, allow_dumplicated=allow_dumplicated)\n                        #    input_node.add_output(node, allow_dumplicated=allow_dumplicated)\n                        #    print(node, node.inputs)\n                        #elif input_node.type == ops.OPTYPE.SPLIT:\n                        #    allow_dumplicated = (node not in visited_as_output_node)\n                        #    node.add_input(input_node, allow_dumplicated=allow_dumplicated)\n                        #    input_node.add_output(node, allow_dumplicated=allow_dumplicated)\n                        #else:\n                        node.add_input(input_node, allow_dumplicated=False)\n                        input_node.add_output(node, allow_dumplicated=False)\n\n                        processing_stack.append(f[0])\n            visited.add(grad_fn)\n            visited_as_output_node.add(node)\n        \n        for (param, dim) in self.unwrapped_parameters:\n            module2node[param].pruning_dim = dim\n        return module2node\n\n    def update_index_mapping(self):\n        \"\"\" update all index mapping after pruning\n        \"\"\"       \n        # update index mapping\n        for module, node in self.module2node.items():\n            if node.type == ops.OPTYPE.CONCAT:\n                self._update_concat_index_mapping(node)\n            if node.type == ops.OPTYPE.SPLIT:\n                self._update_split_index_mapping(node)\n            if node.type == ops.OPTYPE.RESHAPE:\n                self._update_reshape_index_mapping(node)\n\n    def _init_shape_information(self):\n        for module, node in self.module2node.items():\n            \n            if node.type == ops.OPTYPE.SPLIT:\n                grad_fn = node.grad_fn\n                if hasattr(grad_fn, '_saved_self_sizes'):\n                    if hasattr(grad_fn, '_saved_split_sizes') and hasattr(grad_fn, '_saved_dim') :\n                        if grad_fn._saved_dim != 1:\n                            continue\n                        chs = list(grad_fn._saved_split_sizes)\n                        node.module.split_sizes = chs\n                    elif hasattr(grad_fn, '_saved_split_size') and hasattr(grad_fn, '_saved_dim'):\n                        if grad_fn._saved_dim != 1:\n                            continue\n                        chs = [grad_fn._saved_split_size for _ in range(len(node.outputs))]\n                        node.module.split_sizes = chs\n                    offsets = [0]\n                    for i in range(len(chs)):\n                        offsets.append(offsets[i] + chs[i])\n                    node.module.offsets = offsets\n                else: # legency version\n                    chs = []\n                    for n in node.outputs:\n                        chs.append(self._infer_in_channels_recursively(n))\n                    offsets = [0]\n                    for ch in chs:\n                        if ch is None: continue\n                        offsets.append(offsets[-1] + ch)\n                    node.module.split_sizes = chs\n                    node.module.offsets = offsets\n                                                \n    def _update_flatten_index_mapping(self, fc_node: Node):\n        if fc_node.type != ops.OPTYPE.LINEAR:\n            return\n        fc_in_features = fc_node.module.in_features\n        feature_channels = 0\n        for n in fc_node.inputs:\n            feature_channels = self._infer_out_channels_recursively(n)\n            if feature_channels is not None:  # =0 if there is a residual connection to model inputs\n                break\n        if (\n            feature_channels is None\n        ):  # the first layer: https://github.com/VainF/Torch-Pruning/issues/21\n            return\n        stride = fc_in_features // feature_channels\n        if stride > 1 and fc_in_features % feature_channels == 0:\n            for in_node in fc_node.inputs:\n                for dep in fc_node.dependencies:\n                    if dep.target == in_node:\n                        dep.index_mapping[0] = _helpers._FlattenIndexMapping(\n                            stride=stride, reverse=True\n                        )\n                for dep in in_node.dependencies:\n                    if dep.target == fc_node:\n                        dep.index_mapping[0] = _helpers._FlattenIndexMapping(\n                            stride=stride, reverse=False\n                        )\n\n    def _update_reshape_index_mapping(self, reshape_node: Node):\n        \n        # Only Supports 2D/4D tensors\n        # TODO: Better support for reshape/view/flatten\n        if hasattr(reshape_node.grad_fn, '_saved_self_sizes'): \n            size = reshape_node.grad_fn._saved_self_sizes\n            if (len(size)!=1 and len(size)!=4):\n                return\n        else: # old pytorch versions\n            if not self._2d_4d:\n                return \n\n        out_channels = None\n        for n in reshape_node.outputs:\n            out_channels = self._infer_in_channels_recursively(n)\n            if out_channels is not None:  # =0 if there is a residual connection to model inputs\n                break\n        \n        in_channels = None\n        for n in reshape_node.inputs:\n            in_channels = self._infer_out_channels_recursively(n)\n            if in_channels is not None:  # =0 if there is a residual connection to model inputs\n                break\n        \n        if out_channels is None or in_channels is None: return\n        if out_channels==in_channels: return\n        \n        if hasattr(reshape_node.grad_fn, '_saved_self_sizes'):\n            if len(size)==4 and size[1]*size[2]*size[3]!=out_channels:\n                return\n        \n        # Flatten\n        #print(reshape_node.grad_fn._saved_self_sizes, in_channels, out_channels)\n        if out_channels > in_channels:\n             for in_node in reshape_node.inputs:\n                for dep in reshape_node.dependencies:\n                    if dep.target == in_node:\n                        dep.index_mapping[0] = _helpers._FlattenIndexMapping(\n                            stride=out_channels // in_channels, reverse=True\n                        )\n\n                for dep in in_node.dependencies:\n                    if dep.target == reshape_node:\n                        dep.index_mapping[0] = _helpers._FlattenIndexMapping(\n                            stride=out_channels // in_channels, reverse=False\n                        )\n        else: # 1D -> 2D\n            for out_node in reshape_node.outputs:\n                for dep in reshape_node.dependencies:\n                    if dep.target == out_node:\n                        dep.index_mapping[0] = _helpers._FlattenIndexMapping(\n                            stride=in_channels // out_channels, reverse=True\n                        )\n\n                for dep in out_node.dependencies:\n                    if dep.target == reshape_node:\n                        dep.index_mapping[0] = _helpers._FlattenIndexMapping(\n                            stride=in_channels // out_channels, reverse=False\n                        )\n        #print(in_channels, out_channels)\n        #print(reshape_node.grad_fn._saved_self_sizes)\n        #print('------')\n        \n    def _update_concat_index_mapping(self, cat_node: Node):\n        if cat_node.type != ops.OPTYPE.CONCAT:\n            return\n        \n        if cat_node.module.concat_sizes is not None:\n            chs = cat_node.module.concat_sizes\n        else:\n            chs = []\n            for n in cat_node.inputs:\n                chs.append(self.infer_channels(n, cat_node))\n            cat_node.module.concat_sizes = chs\n            \n        offsets = [0]\n        for ch in chs:\n            if ch is None: \n                #warnings.warn(\"Fails to trace the concat operation. It may lead to unexpected results.\")\n                return\n            offsets.append(offsets[-1] + ch)\n        cat_node.module.offsets = offsets\n\n        # no transform if the concat dim is different from the feature dim\n        # TODO: make the messy for loop more efficient\n        addressed_dep = []\n        for i, in_node in enumerate(cat_node.inputs):\n            for dep in cat_node.dependencies:\n                if any((dep is d) for d in addressed_dep): continue\n                if dep.target == in_node:\n                    if cat_node.enable_index_mapping:\n                        dep.index_mapping[1] = _helpers._ConcatIndexMapping(\n                            offset=offsets[i: i + 2], reverse=True\n                        )\n                        addressed_dep.append(dep)\n                        break\n                        \n        addressed_dep = []\n        for i, in_node in enumerate(cat_node.inputs):\n            for dep in in_node.dependencies:\n                if any((dep is d) for d in addressed_dep): continue\n                if dep.target == cat_node:\n                    if cat_node.enable_index_mapping:\n                        dep.index_mapping[1] = _helpers._ConcatIndexMapping(\n                            offset=offsets[i: i + 2], reverse=False\n                        )\n                        addressed_dep.append(dep)\n                        break\n    \n        \n    def _update_split_index_mapping(self, split_node: Node):\n        if split_node.type != ops.OPTYPE.SPLIT:\n            return\n\n        offsets = split_node.module.offsets\n        if offsets is None:\n            return\n        addressed_dep = []\n        for i, out_node in enumerate(split_node.outputs):\n            for dep in split_node.dependencies:\n                if any((dep is d) for d in addressed_dep): continue\n                if dep.target == out_node:\n                    if split_node.enable_index_mapping:\n                        dep.index_mapping[0] = (_helpers._SplitIndexMapping(\n                            offset=offsets[i: i + 2], reverse=False\n                        ))\n                        addressed_dep.append(dep)\n                        break\n        \n        addressed_dep = []\n        for i, out_node in enumerate(split_node.outputs):\n            for dep in out_node.dependencies:\n                if dep.target == split_node:\n                    if any((dep is d) for d in addressed_dep): continue\n                    if split_node.enable_index_mapping:\n                        dep.index_mapping[0] = (_helpers._SplitIndexMapping(\n                            offset=offsets[i: i + 2], reverse=True\n                        ))\n                        addressed_dep.append(dep)\n                        break\n\n    def infer_channels(self, node_1, node_2):\n        if node_1.type == ops.OPTYPE.SPLIT:\n            for i, n in enumerate(node_1.outputs):\n                if n == node_2:\n                    return node_1.module.split_sizes[i]\n        return self._infer_out_channels_recursively(node_1)\n\n        \n\n\n\n        "
  },
  {
    "path": "ddpm_exp/torch_pruning/importance.py",
    "content": "import abc\nimport torch\nimport torch.nn as nn\n\nimport typing\nfrom .pruner import function\nfrom ._helpers import _FlattenIndexMapping\nfrom . import ops\nimport math\n\nclass Importance(abc.ABC):\n    \"\"\" estimate the importance of a Pruning Group, and return an 1-D per-channel importance score.\n    \"\"\"\n    @abc.abstractclassmethod\n    def __call__(self, group)-> torch.Tensor:\n        raise NotImplementedError\n\nclass MagnitudeImportance(Importance):\n    def __init__(self, p=2, group_reduction=\"mean\", normalizer='mean'):\n        self.p = p\n        self.group_reduction = group_reduction\n        self.normalizer = normalizer\n\n    def _normalize(self, group_importance, normalizer):\n        if normalizer is None: \n            return group_importance\n        elif isinstance(normalizer, typing.Callable):\n            return normalizer(group_importance)\n        elif normalizer == \"sum\":\n            return group_importance / group_importance.sum()\n        elif normalizer == \"standarization\":\n            return (group_importance - group_importance.min()) / (group_importance.max() - group_importance.min()+1e-8)\n        elif normalizer == \"mean\":\n            return group_importance / group_importance.mean()\n        elif normalizer == \"max\":\n            return group_importance / group_importance.max()\n        elif normalizer == 'gaussian':\n            return (group_importance - group_importance.mean()) / (group_importance.std()+1e-8)\n        else:\n            raise NotImplementedError\n        \n    def _reduce(self, group_imp):\n        if self.group_reduction == \"sum\":\n            group_imp = group_imp.sum(dim=0)\n        elif self.group_reduction == \"mean\":\n            group_imp = group_imp.mean(dim=0)\n        elif self.group_reduction == \"max\":\n            group_imp = group_imp.max(dim=0)[0]\n        elif self.group_reduction == \"prod\":\n            group_imp = torch.prod(group_imp, dim=0)\n        elif self.group_reduction=='first':\n            group_imp = group_imp[0]\n        elif self.group_reduction is None:\n            group_imp = group_imp\n        else: \n            raise NotImplementedError\n        return group_imp\n\n    @torch.no_grad()\n    def __call__(self, group, ch_groups=1):\n        group_imp = []\n        #Get group norm\n        #print(group.details())\n        for dep, idxs in group:\n            idxs.sort()\n            layer = dep.target.module\n            prune_fn = dep.handler\n            # Conv out_channels\n            if prune_fn in [\n                function.prune_conv_out_channels,\n                function.prune_linear_out_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = layer.weight.data.transpose(1, 0)[idxs].flatten(1)\n                else:\n                    w = layer.weight.data[idxs].flatten(1)\n                local_norm = w.abs().pow(self.p).sum(1)\n                #if ch_groups>1:\n                #    local_norm = local_norm.view(ch_groups, -1).sum(0)\n                #    local_norm = local_norm.repeat(ch_groups)\n                group_imp.append(local_norm)\n\n            # Conv in_channels\n            elif prune_fn in [\n                function.prune_conv_in_channels,\n                function.prune_linear_in_channels,\n            ]:\n                is_conv_flatten_linear = False\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = (layer.weight).flatten(1)\n                else:\n                    w = (layer.weight).transpose(0, 1).flatten(1)     \n                \n                #if ch_groups>1 and prune_fn==function.prune_conv_in_channels and layer.groups==1:\n                    # standard convs with ch_groups>1\n                    #print(w.shape)\n                #    w = w.view(ch_groups, w.shape[0] // ch_groups, w.shape[1]).flatten(1)       \n                local_norm = w.abs().pow(self.p).sum(1)\n                #if ch_groups>1:\n                    #if len(local_norm)==len(group_imp[0]):\n                    #local_norm = local_norm.view(ch_groups, -1).sum(0)\n                #    local_norm = local_norm.repeat(ch_groups)\n                local_norm = local_norm[idxs]\n                group_imp.append(local_norm)\n            # BN\n            elif prune_fn == function.prune_batchnorm_out_channels:\n                # regularize BN\n                if layer.affine:\n                    w = layer.weight.data[idxs]\n                    local_norm = w.abs().pow(self.p)\n                    #if ch_groups>1:\n                    #    local_norm = local_norm.view(ch_groups, -1).sum(0)\n                    #    local_norm = local_norm.repeat(ch_groups)\n                    #print(local_norm.shape)\n                    group_imp.append(local_norm)\n        if len(group_imp)==0:\n            return None\n        imp_size = len(group_imp[0])\n        aligned_group_imp = []\n        for imp in group_imp:\n            if len(imp)==imp_size:\n                aligned_group_imp.append(imp)\n        group_imp = torch.stack(aligned_group_imp, dim=0)\n        group_imp = self._reduce(group_imp)\n        group_imp = self._normalize(group_imp, self.normalizer)\n        return group_imp\n\n\nclass BNScaleImportance(MagnitudeImportance):\n    \"\"\"Learning Efficient Convolutional Networks through Network Slimming, \n    https://arxiv.org/abs/1708.06519\n    \"\"\"\n    def __init__(self, group_reduction='mean', normalizer='mean'):\n        super().__init__(p=1, group_reduction=group_reduction, normalizer=normalizer)\n    \n    def __call__(self, group, ch_groups=1):\n        group_imp = []\n        for dep, _ in group:\n            module = dep.target.module\n            if isinstance(module, (ops.TORCH_BATCHNORM)) and module.affine:\n                local_imp = torch.abs(module.weight.data)\n                if ch_groups>1:\n                    local_imp = local_imp.view(ch_groups, -1).mean(0)\n                    local_imp = local_imp.repeat(ch_groups)\n                group_imp.append(local_imp)\n        if len(group_imp)==0:\n            return None\n        group_imp = torch.stack(group_imp, dim=0)\n        group_imp = self._reduce(group_imp)\n        group_imp = self._normalize(group_imp, self.normalizer)\n        return group_imp\n\n\nclass LAMPImportance(MagnitudeImportance):\n    \"\"\"Layer-adaptive Sparsity for the Magnitude-based Pruning,\n    https://arxiv.org/abs/2010.07611\n    \"\"\"\n    def __init__(self, p=2, group_reduction=\"mean\", normalizer='mean'):\n        super().__init__(p=p, group_reduction=group_reduction, normalizer=normalizer)\n\n    @torch.no_grad()\n    def __call__(self, group, **kwargs):\n        group_imp = []\n        for dep, idxs in group:\n            layer = dep.target.module\n            prune_fn = dep.handler\n\n            if prune_fn in [\n                function.prune_conv_out_channels,\n                function.prune_linear_out_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = (layer.weight)[:, idxs].transpose(0, 1)\n                else:\n                    w = (layer.weight)[idxs]\n                local_imp = torch.norm(\n                    torch.flatten(w, 1), dim=1, p=self.p)\n                group_imp.append(local_imp)\n\n            elif prune_fn in [\n                function.prune_conv_in_channels,\n                function.prune_linear_in_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = (layer.weight)[idxs].flatten(1)\n                else:\n                    w = (layer.weight)[:, idxs].transpose(0, 1).flatten(1)\n                if (\n                    w.shape[0] != group_imp[0].shape[0]\n                ):  # for conv-flatten-linear without global pooling\n                    w = w.view(\n                        group_imp[0].shape[0],\n                        w.shape[0] // group_imp[0].shape[0],\n                        w.shape[1],\n                    ).flatten(1)\n                local_imp = torch.norm(w, dim=1, p=self.p)\n                group_imp.append(local_imp)\n\n            elif prune_fn == function.prune_batchnorm_out_channels:\n                if layer.affine is not None:\n                    w = (layer.weight)[idxs].view(-1, 1)\n                    local_imp = torch.norm(w, dim=1, p=self.p)\n                    group_imp.append(local_imp)\n        if len(group_imp)==0:\n            return None\n        group_imp = torch.stack(group_imp, dim=0)\n        group_imp = self._reduce(group_imp)\n        group_imp = self._normalize(group_imp, self.normalizer)\n        return self.lamp(group_imp)\n\n    def lamp(self, imp):\n        argsort_idx = torch.argsort(imp, dim=0, descending=True).tolist()\n        sorted_imp = imp[argsort_idx]\n        cumsum_imp = torch.cumsum(sorted_imp, dim=0)\n        sorted_imp = sorted_imp / cumsum_imp\n        inversed_idx = torch.arange(len(sorted_imp))[\n            argsort_idx\n        ].tolist()  # [0, 1, 2, 3, ..., ]\n        return sorted_imp[inversed_idx]\n\nclass RandomImportance(Importance):\n    @torch.no_grad()\n    def __call__(self, group, **kwargs):\n        _, idxs = group[0]\n        return torch.rand(len(idxs))\n\nclass GroupNormImportance(MagnitudeImportance):\n    def __init__(self, p=2, normalizer='max'):\n        super().__init__(p=p, group_reduction=None, normalizer=normalizer)\n        self.p = p\n        self.normalizer = normalizer\n        \n    @torch.no_grad()\n    def __call__(self, group, ch_groups=1):\n        group_norm = 0\n\n        #Get group norm\n        for dep, idxs in group:\n            idxs.sort()\n            layer = dep.target.module\n            prune_fn = dep.handler\n\n            # Conv out_channels\n            if prune_fn in [\n                function.prune_conv_out_channels,\n                function.prune_linear_out_channels,\n            ]:  \n                if hasattr(layer, 'transposed') and layer.transposed:\n                    w = layer.weight.data.transpose(1, 0)[idxs].flatten(1)\n                else:\n                    w = layer.weight.data[idxs].flatten(1)\n                local_norm = w.abs().pow(self.p).sum(1)\n                #print(local_norm.shape, layer, idxs, ch_groups)\n                if ch_groups>1:\n                    local_norm = local_norm.view(ch_groups, -1).sum(0)\n                    local_norm = local_norm.repeat(ch_groups)\n                group_norm+=local_norm\n                #if layer.bias is not None:\n                #    group_norm += layer.bias.data[idxs].pow(2)\n            # Conv in_channels\n            elif prune_fn in [\n                function.prune_conv_in_channels,\n                function.prune_linear_in_channels,\n            ]:\n                is_conv_flatten_linear = False\n                if hasattr(layer, 'transposed') and layer.transposed:\n                    w = (layer.weight).flatten(1)  \n                else:\n                    w = (layer.weight).transpose(0, 1).flatten(1)             \n                if (w.shape[0] != group_norm.shape[0]):  \n                    if (hasattr(dep, 'index_mapping') and isinstance(dep.index_mapping, _FlattenIndexMapping)):\n                        #conv-flatten\n                        w = w[idxs].view(\n                            group_norm.shape[0],\n                            w.shape[0] // group_norm.shape[0],\n                            w.shape[1],\n                        ).flatten(1)\n                        is_conv_flatten_linear = True\n                    elif ch_groups>1 and prune_fn==function.prune_conv_in_channels and layer.groups==1:\n                        # non-grouped conv with group convs\n                        w = w.view(w.shape[0] // group_norm.shape[0],\n                                group_norm.shape[0], w.shape[1]).transpose(0, 1).flatten(1)           \n                local_norm = w.abs().pow(self.p).sum(1)\n                if ch_groups>1:\n                    if len(local_norm)==len(group_norm):\n                        local_norm = local_norm.view(ch_groups, -1).sum(0)\n                    local_norm = local_norm.repeat(ch_groups)\n                if not is_conv_flatten_linear:\n                    local_norm = local_norm[idxs]\n                group_norm += local_norm\n            # BN\n            elif prune_fn == function.prune_batchnorm_out_channels:\n                # regularize BN\n                if layer.affine:\n                    w = layer.weight.data[idxs]\n                    local_norm = w.abs().pow(self.p)\n                    if ch_groups>1:\n                        local_norm = local_norm.view(ch_groups, -1).sum(0)\n                        local_norm = local_norm.repeat(ch_groups)\n                    group_norm += local_norm\n\n            elif prune_fn == function.prune_lstm_out_channels:\n                _idxs = torch.tensor(idxs)\n                local_norm = 0\n                local_norm_reverse = 0\n                num_layers = layer.num_layers\n                expanded_idxs = torch.cat([ _idxs+i*layer.hidden_size for i in range(4) ], dim=0)\n                if layer.bidirectional:\n                    postfix = ['', '_reverse']\n                else:\n                    postfix = ['']\n\n                local_norm+=getattr(layer, 'weight_hh_l0')[expanded_idxs].abs().pow(self.p).sum(1).view(4, -1).sum(0)\n                local_norm+=getattr(layer, 'weight_hh_l0')[:, _idxs].abs().pow(self.p).sum(0)\n                local_norm+=getattr(layer, 'weight_ih_l0')[expanded_idxs].abs().pow(self.p).sum(1).view(4, -1).sum(0)\n                if layer.bidirectional:\n                    local_norm_reverse+=getattr(layer, 'weight_hh_l0')[expanded_idxs].abs().pow(self.p).sum(1).view(4, -1).sum(0)\n                    local_norm_reverse+=getattr(layer, 'weight_hh_l0')[:, _idxs].abs().pow(self.p).sum(0)\n                    local_norm_reverse+=getattr(layer, 'weight_ih_l0')[expanded_idxs].abs().pow(self.p).sum(1).view(4, -1).sum(0)\n                    local_norm = torch.cat([local_norm, local_norm_reverse], dim=0)\n                group_norm += local_norm\n            elif prune_fn == function.prune_lstm_in_channels:\n                local_norm=getattr(layer, 'weight_ih_l0')[:, idxs].abs().pow(self.p).sum(0)\n                if layer.bidirectional:\n                    local_norm_reverse+=getattr(layer, 'weight_ih_l0_reverse')[:, idxs].abs().pow(self.p).sum(0)\n                    local_norm = torch.cat([local_norm, local_norm_reverse], dim=0)\n                group_norm+=local_norm\n        group_imp = group_norm**(1/self.p)\n        group_imp = self._normalize(group_imp, self.normalizer)\n        return group_imp \n\nclass TaylorImportance(Importance):\n    def __init__(self, group_reduction=\"mean\", normalizer='mean'):\n        self.group_reduction = group_reduction\n        self.normalizer = normalizer\n    \n    def set_model(self, model):\n        self.model = model\n\n    def _normalize(self, group_importance, normalizer):\n        if normalizer is None: \n            return group_importance\n        elif isinstance(normalizer, typing.Callable):\n            return normalizer(group_importance)\n        elif normalizer == \"sum\":\n            return group_importance / group_importance.sum()\n        elif normalizer == \"standarization\":\n            return (group_importance - group_importance.min()) / (group_importance.max() - group_importance.min()+1e-8)\n        elif normalizer == \"mean\":\n            return group_importance / group_importance.mean()\n        elif normalizer == \"max\":\n            return group_importance / group_importance.max()\n        elif normalizer == 'gaussian':\n            return (group_importance - group_importance.mean()) / (group_importance.std()+1e-8)\n        else:\n            raise NotImplementedError\n        \n    def _reduce(self, group_imp):\n        if self.group_reduction == \"sum\":\n            group_imp = group_imp.sum(dim=0)\n        elif self.group_reduction == \"mean\":\n            group_imp = group_imp.mean(dim=0)\n        elif self.group_reduction == \"max\":\n            group_imp = group_imp.max(dim=0)[0]\n        elif self.group_reduction == \"prod\":\n            group_imp = torch.prod(group_imp, dim=0)\n        elif self.group_reduction=='first':\n            group_imp = group_imp[0]\n        elif self.group_reduction is None:\n            group_imp = group_imp\n        else: \n            raise NotImplementedError\n        return group_imp\n\n    @torch.no_grad()\n    def __call__(self, group, ch_groups=1):\n        group_imp = []\n        for dep, idxs in group:\n            idxs.sort()\n            layer = dep.target.module\n            prune_fn = dep.handler\n            # Conv out_channels\n            if prune_fn in [\n                function.prune_conv_out_channels,\n                function.prune_linear_out_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = layer.weight.data.transpose(1, 0)[idxs].flatten(1)\n                    dw= layer.weight.grad.data.transpose(1, 0)[idxs].flatten(1)\n                else:\n                    w = layer.weight.data[idxs].flatten(1)\n                    dw= layer.weight.grad.data[idxs].flatten(1)\n                wdw = (w*dw).abs().pow(2).sum(1)\n                local_norm = wdw\n                group_imp.append(local_norm)\n            # Conv in_channels\n            elif prune_fn in [\n                function.prune_conv_in_channels,\n                function.prune_linear_in_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = (layer.weight).flatten(1)[idxs]\n                    dw= (layer.weight.grad).flatten(1)[idxs]\n                else:\n                    w = (layer.weight).transpose(0, 1).flatten(1)[idxs]     \n                    dw= (layer.weight.grad).transpose(0, 1).flatten(1)[idxs]     \n                wdw = (w*dw).abs().pow(2).sum(1)\n                local_norm = wdw\n                group_imp.append(local_norm)\n            # BN\n            elif prune_fn == function.prune_groupnorm_out_channels:\n                # regularize BN\n                if layer.affine:\n                    w = layer.weight.data[idxs]\n                    dw= layer.weight.grad.data[idxs]\n                    wdw = (w*dw).abs()\n                    local_norm = wdw\n                    group_imp.append(local_norm)\n\n        if len(group_imp)==0:\n            return None\n        imp_size = len(group_imp[0])\n        aligned_group_imp = []\n        for imp in group_imp:\n            if len(imp)==imp_size:\n                aligned_group_imp.append(imp)\n        group_imp = torch.stack(aligned_group_imp, dim=0)\n        group_imp = group_imp.sum(0)  #self._reduce(group_imp)\n        #if ch_groups>1:\n        #    group_imp = group_imp.view(ch_groups, -1).mean(0)\n        #    group_imp = group_imp.repeat(ch_groups)\n        #group_imp = group_imp.abs()\n        #group_imp = self._normalize(group_imp, self.normalizer)\n        return group_imp\n\n\n\nclass FullTaylorImportance(Importance):\n    def __init__(self, order=1, group_reduction=\"mean\", normalizer='mean'):\n        self.group_reduction = group_reduction\n        self.normalizer = normalizer\n        self.order=order\n\n    def set_model(self, model):\n        self.model = model\n\n    def _normalize(self, group_importance, normalizer):\n        if normalizer is None: \n            return group_importance\n        elif isinstance(normalizer, typing.Callable):\n            return normalizer(group_importance)\n        elif normalizer == \"sum\":\n            return group_importance / group_importance.sum()\n        elif normalizer == \"standarization\":\n            return (group_importance - group_importance.min()) / (group_importance.max() - group_importance.min()+1e-8)\n        elif normalizer == \"mean\":\n            return group_importance / group_importance.mean()\n        elif normalizer == \"max\":\n            return group_importance / group_importance.max()\n        elif normalizer == 'gaussian':\n            return (group_importance - group_importance.mean()) / (group_importance.std()+1e-8)\n        else:\n            raise NotImplementedError\n        \n    def _reduce(self, group_imp):\n        if self.group_reduction == \"sum\":\n            group_imp = group_imp.sum(dim=0)\n        elif self.group_reduction == \"mean\":\n            group_imp = group_imp.mean(dim=0)\n        elif self.group_reduction == \"max\":\n            group_imp = group_imp.max(dim=0)[0]\n        elif self.group_reduction == \"prod\":\n            group_imp = torch.prod(group_imp, dim=0)\n        elif self.group_reduction=='first':\n            group_imp = group_imp[0]\n        elif self.group_reduction is None:\n            group_imp = group_imp\n        else: \n            raise NotImplementedError\n        return group_imp\n\n    @torch.no_grad()\n    def __call__(self, group, ch_groups=1):\n        group_imp = []\n        for dep, idxs in group:\n            idxs.sort()\n            layer = dep.target.module\n            prune_fn = dep.handler\n            # Conv out_channels\n            if prune_fn in [\n                function.prune_conv_out_channels,\n                function.prune_linear_out_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = layer.weight.data.transpose(1, 0)[idxs].flatten(1)\n                    dw= layer.weight.grad.data.transpose(1, 0)[idxs].flatten(1)\n                else:\n                    w = layer.weight.data[idxs].flatten(1)\n                    dw= layer.weight.grad.data[idxs].flatten(1)\n    \n                if self.order == 1:\n                    wdw = (w*dw).sum(1)\n                elif self.order == 2:\n                    wdw = (w*dw).sum(1) + (w*dw).pow(2).sum(1)\n                \n                local_norm = wdw\n                group_imp.append(local_norm)\n            # Conv in_channels\n            elif prune_fn in [\n                function.prune_conv_in_channels,\n                function.prune_linear_in_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = (layer.weight).flatten(1)[idxs]\n                    dw= (layer.weight.grad).flatten(1)[idxs]\n                else:\n                    w = (layer.weight).transpose(0, 1).flatten(1)[idxs]     \n                    dw= (layer.weight.grad).transpose(0, 1).flatten(1)[idxs]     \n                if self.order == 1:\n                    wdw = (w*dw).sum(1)\n                elif self.order == 2:\n                    wdw = (w*dw).sum(1) + (w*dw).pow(2).sum(1)\n                \n                local_norm = wdw\n                group_imp.append(local_norm)\n            # BN\n            elif prune_fn == function.prune_groupnorm_out_channels:\n                # regularize BN\n                if layer.affine:\n                    w = layer.weight.data[idxs]\n                    dw= layer.weight.grad.data[idxs]\n                    if self.order == 1:\n                        wdw = (w*dw)\n                    elif self.order == 2:\n                        wdw = (w*dw) + (w*dw).pow(2)\n                    local_norm = wdw\n                    group_imp.append(local_norm)\n\n        if len(group_imp)==0:\n            return None\n        imp_size = len(group_imp[0])\n        aligned_group_imp = []\n        for imp in group_imp:\n            if len(imp)==imp_size:\n                aligned_group_imp.append(imp)\n        group_imp = torch.stack(aligned_group_imp, dim=0)\n        group_imp = group_imp.sum(0).abs()  #self._reduce(group_imp)\n        return group_imp\n\n\n\n\nclass AbsTaylorImportance(Importance):\n    def __init__(self, order=1, group_reduction=\"mean\", normalizer='mean'):\n        self.group_reduction = group_reduction\n        self.normalizer = normalizer\n        self.order=order\n        self._accum_abs_grad = {}\n        \n    def set_model(self, model):\n        self.model = model\n\n    def _normalize(self, group_importance, normalizer):\n        if normalizer is None: \n            return group_importance\n        elif isinstance(normalizer, typing.Callable):\n            return normalizer(group_importance)\n        elif normalizer == \"sum\":\n            return group_importance / group_importance.sum()\n        elif normalizer == \"standarization\":\n            return (group_importance - group_importance.min()) / (group_importance.max() - group_importance.min()+1e-8)\n        elif normalizer == \"mean\":\n            return group_importance / group_importance.mean()\n        elif normalizer == \"max\":\n            return group_importance / group_importance.max()\n        elif normalizer == 'gaussian':\n            return (group_importance - group_importance.mean()) / (group_importance.std()+1e-8)\n        else:\n            raise NotImplementedError\n        \n    def _reduce(self, group_imp):\n        if self.group_reduction == \"sum\":\n            group_imp = group_imp.sum(dim=0)\n        elif self.group_reduction == \"mean\":\n            group_imp = group_imp.mean(dim=0)\n        elif self.group_reduction == \"max\":\n            group_imp = group_imp.max(dim=0)[0]\n        elif self.group_reduction == \"prod\":\n            group_imp = torch.prod(group_imp, dim=0)\n        elif self.group_reduction=='first':\n            group_imp = group_imp[0]\n        elif self.group_reduction is None:\n            group_imp = group_imp\n        else: \n            raise NotImplementedError\n        return group_imp\n\n    def accum_abs_grad(self, model):\n        for name, p in model.named_parameters():\n            if p.requires_grad:\n                if p not in self._accum_abs_grad:\n                    self._accum_abs_grad[name]=(p.grad).abs()\n                else:\n                    self._accum_abs_grad[name]+=(p.grad).abs()    \n    \n    def assign_abs_grad(self, model):\n        for name, p in model.named_parameters():\n            if name in self._accum_abs_grad:\n                p.grad = self._accum_abs_grad[name]\n\n    @torch.no_grad()\n    def __call__(self, group, ch_groups=1):\n        group_imp = []\n        for dep, idxs in group:\n            idxs.sort()\n            layer = dep.target.module\n            prune_fn = dep.handler\n            # Conv out_channels\n            if prune_fn in [\n                function.prune_conv_out_channels,\n                function.prune_linear_out_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = layer.weight.data.transpose(1, 0)[idxs].flatten(1)\n                    dw= layer.weight.grad.data.transpose(1, 0)[idxs].flatten(1)\n                else:\n                    w = layer.weight.data[idxs].flatten(1)\n                    dw= layer.weight.grad.data[idxs].flatten(1)\n                wdw = (w*dw).abs().sum(1)\n                local_norm = wdw\n                group_imp.append(local_norm)\n            # Conv in_channels\n            elif prune_fn in [\n                function.prune_conv_in_channels,\n                function.prune_linear_in_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = (layer.weight).flatten(1)[idxs]\n                    dw= (layer.weight.grad).flatten(1)[idxs]\n                else:\n                    w = (layer.weight).transpose(0, 1).flatten(1)[idxs]     \n                    dw= (layer.weight.grad).transpose(0, 1).flatten(1)[idxs]     \n                wdw = (w * dw).abs().sum(1)\n                local_norm = wdw\n                group_imp.append(local_norm)\n            # BN\n            elif prune_fn == function.prune_groupnorm_out_channels:\n                # regularize BN\n                if layer.affine:\n                    w = layer.weight.data[idxs]\n                    dw= layer.weight.grad.data[idxs]\n                    wdw = (w * dw).abs()\n                    local_norm = wdw\n                    group_imp.append(local_norm)\n\n        if len(group_imp)==0:\n            return None\n        imp_size = len(group_imp[0])\n        aligned_group_imp = []\n        for imp in group_imp:\n            if len(imp)==imp_size:\n                aligned_group_imp.append(imp)\n        group_imp = torch.stack(aligned_group_imp, dim=0)\n        group_imp = group_imp.sum(0)  #self._reduce(group_imp)\n        #if ch_groups>1:\n        #    group_imp = group_imp.view(ch_groups, -1).mean(0)\n        #    group_imp = group_imp.repeat(ch_groups)\n        #group_imp = group_imp.abs()\n        #group_imp = self._normalize(group_imp, self.normalizer)\n        return group_imp\n\nclass FisherImportance(Importance):\n    def __init__(self, group_reduction=\"mean\", normalizer='mean'):\n        self.group_reduction = group_reduction\n        self.normalizer = normalizer\n    \n    def set_model(self, model):\n        self.model = model\n\n    def _normalize(self, group_importance, normalizer):\n        if normalizer is None: \n            return group_importance\n        elif isinstance(normalizer, typing.Callable):\n            return normalizer(group_importance)\n        elif normalizer == \"sum\":\n            return group_importance / group_importance.sum()\n        elif normalizer == \"standarization\":\n            return (group_importance - group_importance.min()) / (group_importance.max() - group_importance.min()+1e-8)\n        elif normalizer == \"mean\":\n            return group_importance / group_importance.mean()\n        elif normalizer == \"max\":\n            return group_importance / group_importance.max()\n        elif normalizer == 'gaussian':\n            return (group_importance - group_importance.mean()) / (group_importance.std()+1e-8)\n        else:\n            raise NotImplementedError\n        \n    def _reduce(self, group_imp):\n        if self.group_reduction == \"sum\":\n            group_imp = group_imp.sum(dim=0)\n        elif self.group_reduction == \"mean\":\n            group_imp = group_imp.mean(dim=0)\n        elif self.group_reduction == \"max\":\n            group_imp = group_imp.max(dim=0)[0]\n        elif self.group_reduction == \"prod\":\n            group_imp = torch.prod(group_imp, dim=0)\n        elif self.group_reduction=='first':\n            group_imp = group_imp[0]\n        elif self.group_reduction is None:\n            group_imp = group_imp\n        else: \n            raise NotImplementedError\n        return group_imp\n\n    @torch.no_grad()\n    def __call__(self, group, ch_groups=1):\n        group_imp = []\n        for dep, idxs in group:\n            idxs.sort()\n            layer = dep.target.module\n            prune_fn = dep.handler\n            # Conv out_channels\n            if prune_fn in [\n                function.prune_conv_out_channels,\n                function.prune_linear_out_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = layer.weight.data.transpose(1, 0)[idxs].flatten(1)\n                    dw= layer.weight.grad.data.transpose(1, 0)[idxs].flatten(1)\n                else:\n                    w = layer.weight.data[idxs].flatten(1)\n                    dw= layer.weight.grad.data[idxs].flatten(1)\n                #wdw = dw\n                local_norm = dw.pow(2).sum(1)\n                #if ch_groups>1:\n                #    local_norm = local_norm.view(ch_groups, -1).mean(0)\n                #    local_norm = local_norm.repeat(ch_groups)\n                group_imp.append(local_norm)\n            # Conv in_channels\n            elif prune_fn in [\n                function.prune_conv_in_channels,\n                function.prune_linear_in_channels,\n            ]:\n                if hasattr(layer, \"transposed\") and layer.transposed:\n                    w = (layer.weight).flatten(1)[idxs]\n                    dw= (layer.weight.grad).flatten(1)[idxs]\n                else:\n                    w = (layer.weight).transpose(0, 1).flatten(1)[idxs]     \n                    dw= (layer.weight.grad).transpose(0, 1).flatten(1)[idxs]     \n\n                local_norm = dw.pow(2).sum(1)\n                #if ch_groups>1:\n                #    local_norm = local_norm.view(ch_groups, -1).mean(0) \n                #    local_norm = local_norm.repeat(ch_groups)    \n                group_imp.append(local_norm)\n            # BN\n            elif prune_fn == function.prune_groupnorm_out_channels:\n                # regularize BN\n                if layer.affine:\n                    w = layer.weight.data[idxs]\n                    dw= layer.weight.grad.data[idxs]\n                    wdw = w*dw\n                    local_norm = wdw.pow(2)\n                    #if ch_groups>1:\n                    #    local_norm = local_norm.view(ch_groups, -1).mean(0)\n                    #    local_norm = local_norm.repeat(ch_groups)\n                    group_imp.append(local_norm)\n        if len(group_imp)==0:\n            return None\n        imp_size = len(group_imp[0])\n        aligned_group_imp = []\n        for imp in group_imp:\n            if len(imp)==imp_size:\n                aligned_group_imp.append(imp)\n        group_imp = torch.stack(aligned_group_imp, dim=0)\n        group_imp = group_imp.sum(0)  #self._reduce(group_imp)\n        #if ch_groups>1:\n        #    group_imp = group_imp.view(ch_groups, -1).mean(0)\n        #    group_imp = group_imp.repeat(ch_groups)\n        #group_imp = group_imp.abs()\n        #group_imp = self._normalize(group_imp, self.normalizer)\n        return group_imp"
  },
  {
    "path": "ddpm_exp/torch_pruning/ops.py",
    "content": "import torch.nn as nn\nfrom enum import IntEnum\n\n\nclass DummyMHA(nn.Module):\n    def __init__(self):\n        super(DummyMHA, self).__init__()\n\n\nclass _CustomizedOp(nn.Module):\n    def __init__(self, op_class):\n        self.op_cls = op_class\n\n    def __repr__(self):\n        return \"CustomizedOp({})\".format(str(self.op_cls))\n\n\nclass _ConcatOp(nn.Module):\n    def __init__(self, id):\n        super(_ConcatOp, self).__init__()\n        self.offsets = None\n        self.concat_sizes = None\n        self.id = id\n\n    def __repr__(self):\n        return \"_ConcatOp_{}({})\".format(self.id, self.offsets)\n\n\nclass _SplitOp(nn.Module):\n    def __init__(self, id):\n        super(_SplitOp, self).__init__()\n        self.offsets = None\n        self.split_sizes = None  \n        self.id = id\n\n    def __repr__(self):\n        return \"_SplitOp_{}({})\".format(self.id,self.offsets)\n\nclass _ReshapeOp(nn.Module):\n    def __init__(self, id):\n        super(_ReshapeOp, self).__init__()\n        self.id = id\n    def __repr__(self):\n        return \"_Reshape_{}()\".format(self.id)\n\n\nclass _ElementWiseOp(nn.Module):\n    def __init__(self, id, grad_fn):\n        super(_ElementWiseOp, self).__init__()\n        self._grad_fn = grad_fn\n        self.id = id\n    def __repr__(self):\n        return \"_ElementWiseOp_{}({})\".format(self.id, self._grad_fn)\n\n\n######################################################\n# Dummy Pruners\nclass DummyPruner(object):\n    def __call__(self, layer, *args, **kargs):\n        return layer\n\n    def prune_out_channels(self, layer, idxs):\n        return layer\n\n    prune_in_channels = prune_out_channels\n\n    def get_out_channels(self, layer):\n        return None\n\n    def get_in_channels(self, layer):\n        return None\n\n\nclass ConcatPruner(DummyPruner):\n    def prune_out_channels(self, layer, idxs):\n        if layer.concat_sizes is None:\n            return\n        new_concat_sizes = layer.concat_sizes.copy()\n        concat_sizes = layer.concat_sizes\n        offsets = [0]\n        for i in range(len(concat_sizes)):\n            offsets.append(offsets[i] + concat_sizes[i])\n        for idx in idxs: # find the ID of the concat\n            for i in range(len(offsets)-1):\n                if idx >= offsets[i] and idx < offsets[i+1]:\n                    concat_sizes[i] -= 1\n                    break\n            new_concat_sizes[i]-=1\n        layer.concat_sizes = new_concat_sizes\n        offsets = [0]\n        for i in range(len(new_concat_sizes)):\n            offsets.append(offsets[i] + new_concat_sizes[i])\n        self.offsets = offsets\n\n    prune_in_channels = prune_out_channels\n\n\nclass SplitPruner(DummyPruner):\n    def prune_out_channels(self, layer, idxs):\n        if layer.split_sizes is None:\n            return\n        new_split_sizes = layer.split_sizes.copy()\n        split_sizes = layer.split_sizes\n        #offsets = layer.offsets\n        # accumulate split_sizes\n        offsets = [0]\n        for i in range(len(split_sizes)):\n            offsets.append(offsets[i] + split_sizes[i])\n        for idx in idxs: # find the ID of the split\n            for i in range(len(offsets)-1):\n                if idx >= offsets[i] and idx < offsets[i+1]:\n                    split_sizes[i] -= 1\n                    break\n            new_split_sizes[i]-=1\n        layer.split_sizes = new_split_sizes\n        offsets = [0]\n        for i in range(len(new_split_sizes)):\n            offsets.append(offsets[i] + new_split_sizes[i])\n        self.offsets = offsets\n\n    prune_in_channels = prune_out_channels\n        \n    \n\nclass ReshapePruner(DummyPruner):\n    pass\n\nclass ElementWisePruner(DummyPruner):\n    pass\n\n\n# Standard Modules\nTORCH_CONV = nn.modules.conv._ConvNd\nTORCH_BATCHNORM = nn.modules.batchnorm._BatchNorm\nTORCH_LAYERNORM = nn.modules.normalization.LayerNorm\nTORCH_GROUPNORM = nn.GroupNorm\nTORCH_INSTANCENORM = nn.modules.instancenorm._InstanceNorm\nTORCH_PRELU = nn.PReLU\nTORCH_LINEAR = nn.Linear\nTORCH_EMBED = nn.Embedding\nTORCH_PARAMETER = nn.Parameter\nTORCH_LSTM = nn.LSTM\ntry:\n    TORCH_MHA = nn.MultiheadAttention\nexcept:\n    TORCH_MHA = DummyMHA  # for pytorch w/o MultiHeadAttention\nTORCH_OTHERS = None\n\n\nclass OPTYPE(IntEnum):\n    CONV = 0\n    BN = 1\n    LINEAR = 2\n    PRELU = 3\n    DEPTHWISE_CONV = 4\n    CONCAT = 5  # torch.cat\n    SPLIT = 6  # torch.split\n    CUSTOMIZED = 7  # customized module\n    ELEMENTWISE = 8  # element-wise add, sub, etc.\n    LN = 9  # nn.LayerNorm\n    EMBED = 10  # nn.Embedding\n    PARAMETER = 11  # nn.Parameter\n    MHA = 12\n    LSTM = 13\n    RESHAPE = 14\n    GN = 15  # nn.GroupNorm\n    IN = 16  # nn.InstanceNorm\n\n\ndef module2type(module):\n    if isinstance(module, TORCH_CONV):\n        if module.groups == module.out_channels:\n            return OPTYPE.DEPTHWISE_CONV\n        else:\n            return OPTYPE.CONV\n    elif isinstance(module, TORCH_BATCHNORM):\n        return OPTYPE.BN\n    elif isinstance(module, TORCH_PRELU):\n        return OPTYPE.PRELU\n    elif isinstance(module, TORCH_LINEAR):\n        return OPTYPE.LINEAR\n    elif isinstance(module, _ConcatOp):\n        return OPTYPE.CONCAT\n    elif isinstance(module, _SplitOp):\n        return OPTYPE.SPLIT\n    elif isinstance(module, TORCH_LAYERNORM):\n        return OPTYPE.LN\n    elif isinstance(module, TORCH_EMBED):\n        return OPTYPE.EMBED\n    elif isinstance(module, _CustomizedOp):\n        return OPTYPE.CUSTOMIZED\n    elif isinstance(module, nn.Parameter):\n        return OPTYPE.PARAMETER\n    elif isinstance(module, TORCH_MHA):\n        return OPTYPE.MHA\n    elif isinstance(module, TORCH_LSTM):\n        return OPTYPE.LSTM\n    elif isinstance(module, TORCH_GROUPNORM):\n        return OPTYPE.GN\n    elif isinstance(module, TORCH_INSTANCENORM):\n        return OPTYPE.IN\n    elif isinstance(module, _ReshapeOp):\n        return OPTYPE.RESHAPE\n    else:\n        return OPTYPE.ELEMENTWISE\n\n\ndef type2class(op_type):\n    if op_type == OPTYPE.CONV or op_type==OPTYPE.DEPTHWISE_CONV:\n        return TORCH_CONV\n    elif op_type == OPTYPE.BN:\n        return TORCH_BATCHNORM\n    elif op_type == OPTYPE.PRELU:\n        return TORCH_PRELU\n    elif op_type == OPTYPE.LINEAR:\n        return TORCH_LINEAR\n    elif op_type == OPTYPE.CONCAT:\n        return _ConcatOp\n    elif op_type == OPTYPE.SPLIT:\n        return _SplitOp\n    elif op_type == OPTYPE.LN:\n        return TORCH_LAYERNORM\n    elif op_type == OPTYPE.EMBED:\n        return TORCH_EMBED\n    elif op_type == OPTYPE.CUSTOMIZED:\n        return _CustomizedOp\n    elif op_type == OPTYPE.PARAMETER:\n        return TORCH_PARAMETER\n    elif op_type == OPTYPE.MHA:\n        return TORCH_MHA\n    elif op_type == OPTYPE.GN:\n        return TORCH_GROUPNORM\n    elif op_type == OPTYPE.IN:\n        return TORCH_INSTANCENORM\n    elif op_type == OPTYPE.LSTM:\n        return TORCH_LSTM\n    elif OPTYPE == OPTYPE.RESHAPE:\n        return _ReshapeOp\n    else:\n        return _ElementWiseOp\n\n"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/__init__.py",
    "content": "from .function import *\nfrom .algorithms import *"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/__init__.py",
    "content": "from .metapruner import MetaPruner\nfrom .magnitude_based_pruner import MagnitudePruner\nfrom .batchnorm_scale_pruner import BNScalePruner\nfrom .group_norm_pruner import GroupNormPruner\nfrom .scaling_factor_pruner import ScalingFactorPruner \nfrom .taylor_pruner import TaylorPruner"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/batchnorm_scale_pruner.py",
    "content": "from numbers import Number\nfrom typing import Callable\nfrom .metapruner import MetaPruner\nfrom .scheduler import linear_scheduler\nimport torch\nimport torch.nn as nn\n\nclass BNScalePruner(MetaPruner):\n    def __init__(\n        self,\n        model,\n        example_inputs,\n        importance,\n        reg=1e-5,\n        iterative_steps=1,\n        iterative_sparsity_scheduler: Callable = linear_scheduler,\n        ch_sparsity=0.5,\n        ch_sparsity_dict=None,\n        global_pruning=False,\n        max_ch_sparsity=1.0,\n        round_to=None,\n        ignored_layers=None,\n        customized_pruners=None,\n        unwrapped_parameters=None,\n        output_transform=None,\n    ):\n        super(BNScalePruner, self).__init__(\n            model=model,\n            example_inputs=example_inputs,\n            importance=importance,\n            iterative_steps=iterative_steps,\n            iterative_sparsity_scheduler=iterative_sparsity_scheduler,\n            ch_sparsity=ch_sparsity,\n            ch_sparsity_dict=ch_sparsity_dict,\n            global_pruning=global_pruning,\n            max_ch_sparsity=max_ch_sparsity,\n            round_to=round_to,\n            ignored_layers=ignored_layers,\n            customized_pruners=customized_pruners,\n            unwrapped_parameters=unwrapped_parameters,\n            output_transform=output_transform,\n        )\n        self.reg = reg\n\n    def regularize(self, model):\n        for m in model.modules():\n            if isinstance(m, (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d)) and m.affine==True:\n                m.weight.grad.data.add_(self.reg*torch.sign(m.weight.data))\n"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/group_norm_pruner.py",
    "content": "import torch\nimport math\nfrom .metapruner import MetaPruner\nfrom .scheduler import linear_scheduler\nfrom .. import function\nfrom ..._helpers import _FlattenIndexMapping\n\n\nclass GroupNormPruner(MetaPruner):\n    def __init__(\n        self,\n        model,\n        example_inputs,\n        importance,\n        reg=1e-4,\n        alpha=4,\n        iterative_steps=1,\n        iterative_sparsity_scheduler=linear_scheduler,\n        ch_sparsity=0.5,\n        global_pruning=False,\n        channel_groups=dict(),\n        max_ch_sparsity=1.0,\n        soft_keeping_ratio=0.0,\n        ch_sparsity_dict=None,\n        round_to=None,\n        ignored_layers=None,\n        customized_pruners=None,\n        unwrapped_parameters=None,\n        output_transform=None,\n    ):\n        super(GroupNormPruner, self).__init__(\n            model=model,\n            example_inputs=example_inputs,\n            importance=importance,\n            iterative_steps=iterative_steps,\n            iterative_sparsity_scheduler=iterative_sparsity_scheduler,\n            ch_sparsity=ch_sparsity,\n            ch_sparsity_dict=ch_sparsity_dict,\n            global_pruning=global_pruning,\n            channel_groups=channel_groups,\n            max_ch_sparsity=max_ch_sparsity,\n            round_to=round_to,\n            ignored_layers=ignored_layers,\n            customized_pruners=customized_pruners,\n            unwrapped_parameters=unwrapped_parameters,\n            output_transform=output_transform,\n        )\n        self.reg = reg\n        self.alpha = alpha\n        self.groups = list(self.DG.get_all_groups())\n        self.soft_keeping_ratio = soft_keeping_ratio\n        self.cnt = 0\n\n    @torch.no_grad()\n    def regularize(self, model, base=16):\n        for i, group in enumerate(self.groups):\n            ch_groups = self.get_channel_groups(group)\n            group_norm = 0\n\n            # Get group norm\n            #print(group)\n            for dep, idxs in group:\n                idxs.sort()\n                layer = dep.target.module\n                prune_fn = dep.handler\n\n                # Conv out_channels\n                if prune_fn in [\n                    function.prune_conv_out_channels,\n                    function.prune_linear_out_channels,\n                ]:\n                    w = layer.weight.data[idxs].flatten(1)\n                    local_norm = w.pow(2).sum(1)\n                    #print(local_norm.shape, layer, idxs, ch_groups)\n                    if ch_groups>1:\n                        local_norm = local_norm.view(ch_groups, -1).sum(0)\n                        local_norm = local_norm.repeat(ch_groups)\n                    group_norm+=local_norm\n                    #if layer.bias is not None:\n                    #    group_norm += layer.bias.data[idxs].pow(2)\n                # Conv in_channels\n                elif prune_fn in [\n                    function.prune_conv_in_channels,\n                    function.prune_linear_in_channels,\n                ]:\n                    w = (layer.weight).transpose(0, 1).flatten(1)\n                    if (\n                        w.shape[0] != group_norm.shape[0]\n                    ):  \n                        if hasattr(dep, 'index_mapping') and isinstance(dep.index_mapping, _FlattenIndexMapping):\n                            # conv - latten\n                            w = w.view(\n                                group_norm.shape[0],\n                                w.shape[0] // group_norm.shape[0],\n                                w.shape[1],\n                            ).flatten(1)\n                        elif ch_groups>1 and prune_fn==function.prune_conv_in_channels and layer.groups==1:\n                            # group conv\n                            w = w.view(w.shape[0] // group_norm.shape[0],\n                                    group_norm.shape[0], w.shape[1]).transpose(0, 1).flatten(1)               \n                    local_norm = w.pow(2).sum(1)\n                    if ch_groups>1:\n                        if len(local_norm)==len(group_norm):\n                            local_norm = local_norm.view(ch_groups, -1).sum(0)\n                        local_norm = local_norm.repeat(ch_groups)\n                    group_norm += local_norm[idxs]\n                # BN\n                elif prune_fn == function.prune_batchnorm_out_channels:\n                    # regularize BN\n                    if layer.affine:\n                        w = layer.weight.data[idxs]\n                        local_norm = w.pow(2)\n                        if ch_groups>1:\n                            local_norm = local_norm.view(ch_groups, -1).sum(0)\n                            local_norm = local_norm.repeat(ch_groups)\n                        group_norm += local_norm\n\n                        #b = layer.bias.data[idxs]\n                        #local_norm = b.pow(2)\n                        #if ch_groups>1:\n                        #    local_norm = local_norm.view(ch_groups, -1).sum(0)\n                        #    local_norm = local_norm.repeat(ch_groups)\n                        #group_norm += local_norm\n\n\n            current_channels = len(group_norm)\n            if ch_groups>1:\n                group_norm = group_norm.view(ch_groups, -1).sum(0)\n                group_stride = current_channels//ch_groups\n                group_norm = torch.cat([group_norm+group_stride*i for i in range(ch_groups)], 0)\n            group_norm = group_norm.sqrt()\n            base = 16\n            scale = base**((group_norm.max() - group_norm) / (group_norm.max() - group_norm.min()))\n            #if self.cnt%1000==0:\n            #    print(\"=\"*15)\n            #    print(group)\n            #    print(\"Group {}\".format(i))\n            #    print(group_norm)\n            #    print(scale)\n            \n            # Update Gradient\n            for dep, idxs in group:\n                layer = dep.target.module\n                prune_fn = dep.handler\n                if prune_fn in [\n                    function.prune_conv_out_channels,\n                    function.prune_linear_out_channels,\n                ]:\n                    w = layer.weight.data[idxs]\n                    g = w * scale.view( -1, *([1]*(len(w.shape)-1)) ) #/ group_norm.view( -1, *([1]*(len(w.shape)-1)) ) * group_size #group_size #* scale.view( -1, *([1]*(len(w.shape)-1)) )\n                    layer.weight.grad.data[idxs]+=self.reg * g \n                    #if layer.bias is not None:\n                    #    b = layer.bias.data[idxs]\n                    #    g = b * scale\n                    #    layer.bias.grad.data[idxs]+=self.reg * g \n                elif prune_fn in [\n                    function.prune_conv_in_channels,\n                    function.prune_linear_in_channels,\n                ]:\n                    gn = group_norm\n                    if hasattr(dep.target, 'index_transform') and isinstance(dep.target.index_transform, _FlattenIndexTransform):\n                        gn = group_norm.repeat_interleave(w.shape[1]//group_norm.shape[0])\n                    # regularize input channels\n                    if prune_fn==function.prune_conv_in_channels and layer.groups>1:\n                        scale = scale[:len(idxs)//ch_groups]\n                        idxs = idxs[:len(idxs)//ch_groups]\n                    w = layer.weight.data[:, idxs]\n                    g = w * scale.view( 1, -1, *([1]*(len(w.shape)-2))  ) #/ gn.view( 1, -1, *([1]*(len(w.shape)-2)) ) * group_size #* scale.view( 1, -1, *([1]*(len(w.shape)-2))  )\n                    layer.weight.grad.data[:, idxs]+=self.reg * g\n                elif prune_fn == function.prune_batchnorm_out_channels:\n                    # regularize BN\n                    if layer.affine is not None:\n                        w = layer.weight.data[idxs]\n                        g = w * scale #/ group_norm * group_size\n                        layer.weight.grad.data[idxs]+=self.reg * g \n\n                        #b = layer.bias.data[idxs]\n                        #g = b * scale #/ group_norm * group_size\n                        #layer.bias.grad.data[idxs]+=self.reg * g \n        self.cnt+=1"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/magnitude_based_pruner.py",
    "content": "from .metapruner import MetaPruner\n\nclass MagnitudePruner(MetaPruner):\n    pass\n    "
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/metapruner.py",
    "content": "import torch\nimport torch.nn as nn\nimport typing\n\nfrom .scheduler import linear_scheduler\nfrom ..import function\nfrom ... import ops, dependency\nimport os\nimport matplotlib.pyplot as plt\n\nclass MetaPruner:\n    \"\"\"\n        Meta Pruner for structural pruning.\n\n        Args:\n            model (nn.Module): A to-be-pruned model\n            example_inputs (torch.Tensor or List): dummy inputs for graph tracing.\n            importance (Callable): importance estimator.\n            global_pruning (bool): enable global pruning. \n            ch_sparsity (float): global channel sparisty.\n            ch_sparsity_dict (Dict[nn.Module, float]): layer-specific sparsity.\n            iterative_steps (int): number of steps for iterative pruning.\n            iterative_sparsity_scheduler (Callable): scheduler for iterative pruning.\n            max_ch_sparsity (float): maximum channel sparsity.\n            ignored_layers (List[nn.Module]): ignored modules.\n\n            round_to (int): channel rounding.\n            customized_pruners (dict): a dict containing module-pruner pairs.\n            unwrapped_parameters (list): nn.Parameter that does not belong to any supported layerss.\n            root_module_types (list): types of prunable modules.\n            output_transform (Callable): A function to transform network outputs.\n        \"\"\"\n\n    def __init__(\n        self,\n        # Basic\n        model: nn.Module,\n        example_inputs: torch.Tensor,\n        importance: typing.Callable,\n        # https://pytorch.org/tutorials/intermediate/pruning_tutorial.html#global-pruning.\n        global_pruning: bool = False,\n        ch_sparsity: float = 0.5,  # channel/dim sparsity\n        ch_sparsity_dict: typing.Dict[nn.Module, float] = None,\n        max_ch_sparsity: float = 1.0,\n        iterative_steps: int = 1,  # for iterative pruning\n        iterative_sparsity_scheduler: typing.Callable = linear_scheduler,\n        ignored_layers: typing.List[nn.Module] = None,\n\n        # Advanced\n        round_to: int = None,  # round channels to 8x, 16x, ...\n        # for grouped channels.\n        channel_groups: typing.Dict[nn.Module, int] = dict(),\n        # pruners for customized layers\n        customized_pruners: typing.Dict[typing.Any,\n                                        function.BasePruningFunc] = None,\n        # unwrapped nn.Parameters like ViT.pos_emb\n        unwrapped_parameters: typing.List[nn.Parameter] = None,\n        root_module_types: typing.List = [\n            ops.TORCH_CONV, ops.TORCH_LINEAR, ops.TORCH_LSTM],  # root module for each group\n        output_transform: typing.Callable = None,\n    ):\n        self.model = model\n        self.importance = importance\n        self.ch_sparsity = ch_sparsity\n        self.ch_sparsity_dict = ch_sparsity_dict if ch_sparsity_dict is not None else {}\n        self.max_ch_sparsity = max_ch_sparsity\n        self.global_pruning = global_pruning\n\n        self.channel_groups = channel_groups\n        self.root_module_types = root_module_types\n        self.round_to = round_to\n\n        # Build dependency graph\n        self.DG = dependency.DependencyGraph().build_dependency(\n            model,\n            example_inputs=example_inputs,\n            output_transform=output_transform,\n            unwrapped_parameters=unwrapped_parameters,\n            customized_pruners=customized_pruners,\n        )\n\n        self.ignored_layers = []\n        if ignored_layers:\n            for layer in ignored_layers:\n                self.ignored_layers.extend(list(layer.modules()))\n\n        self.iterative_steps = iterative_steps\n        self.iterative_sparsity_scheduler = iterative_sparsity_scheduler\n        self.current_step = 0\n\n        # Record initial status\n        self.layer_init_out_ch = {}\n        self.layer_init_in_ch = {}\n        for m in self.DG.module2node.keys():\n            if ops.module2type(m) in self.DG.REGISTERED_PRUNERS:\n                self.layer_init_out_ch[m] = self.DG.get_out_channels(m)\n                self.layer_init_in_ch[m] = self.DG.get_in_channels(m)\n\n        # global channel sparsity for each iterative step\n        self.per_step_ch_sparsity = self.iterative_sparsity_scheduler(\n            self.ch_sparsity, self.iterative_steps\n        )\n\n        # The customized channel sparsity for different layers\n        self.ch_sparsity_dict = {}\n        if ch_sparsity_dict is not None:\n            for module in ch_sparsity_dict:\n                sparsity = ch_sparsity_dict[module]\n                for submodule in module.modules():\n                    prunable_types = tuple([ops.type2class(\n                        prunable_type) for prunable_type in self.DG.REGISTERED_PRUNERS.keys()])\n                    if isinstance(submodule, prunable_types):\n                        self.ch_sparsity_dict[submodule] = self.iterative_sparsity_scheduler(\n                            sparsity, self.iterative_steps\n                        )\n\n        # detect group convs & group norms\n        for m in self.model.modules():\n            if isinstance(m, ops.TORCH_CONV) \\\n                and m.groups > 1 \\\n                    and m.groups != m.out_channels:\n                self.channel_groups[m] = m.groups\n            if isinstance(m, ops.TORCH_GROUPNORM):\n                self.channel_groups[m] = m.num_groups\n        \n        if self.global_pruning:\n            initial_total_channels = 0\n            for group in self.DG.get_all_groups(ignored_layers=self.ignored_layers, root_module_types=self.root_module_types):\n                ch_groups = self.get_channel_groups(group)\n                # utils.count_prunable_out_channels( group[0][0].target.module )\n                initial_total_channels += (self.DG.get_out_channels(\n                    group[0][0].target.module) // ch_groups)\n            self.initial_total_channels = initial_total_channels\n    \n    def pruning_history(self):\n        return self.DG.pruning_history()\n\n    def load_pruning_history(self, pruning_history):\n        self.DG.load_pruning_history(pruning_history)\n\n    def get_target_sparsity(self, module):\n        s = self.ch_sparsity_dict.get(module, self.per_step_ch_sparsity)[\n            self.current_step]\n        return min(s, self.max_ch_sparsity)\n\n    def reset(self):\n        self.current_step = 0\n\n    def regularize(self, model, loss):\n        \"\"\" Model regularizor\n        \"\"\"\n        pass\n\n    def step(self, interactive=False):\n        self.current_step += 1\n        if self.global_pruning:\n            if interactive:\n                return self.prune_global()\n            else:\n                for group in self.prune_global():\n                    group.prune()\n        else:\n            if interactive:\n                return self.prune_local()\n            else:\n                for group in self.prune_local():\n                    group.prune()\n\n    def estimate_importance(self, group, ch_groups=1):\n        return self.importance(group, ch_groups=ch_groups)\n\n    def _check_sparsity(self, group):\n        for dep, _ in group:\n            module = dep.target.module\n            pruning_fn = dep.handler\n            if dep.target.type == ops.OPTYPE.PARAMETER:\n                continue\n            if self.DG.is_out_channel_pruning_fn(pruning_fn):\n                target_sparsity = self.get_target_sparsity(module)\n                layer_out_ch = self.DG.get_out_channels(module)\n                if layer_out_ch is None: continue\n                if layer_out_ch < self.layer_init_out_ch[module] * (\n                    1 - self.max_ch_sparsity\n                ) or layer_out_ch == 1:\n                    return False\n\n            elif self.DG.is_in_channel_pruning_fn(pruning_fn):\n                layer_in_ch = self.DG.get_in_channels(module)\n                if layer_in_ch is None: continue\n                if layer_in_ch < self.layer_init_in_ch[module] * (\n                    1 - self.max_ch_sparsity\n                ) or layer_in_ch == 1:\n                    return False\n        return True\n\n    def get_channel_groups(self, group):\n        if isinstance(self.channel_groups, int):\n            return self.channel_groups\n        for dep, _ in group:\n            module = dep.target.module\n            if module in self.channel_groups:\n                return self.channel_groups[module]\n        return 1  # no channel grouping\n\n    def prune_local(self):\n        if self.current_step > self.iterative_steps:\n            return\n        for gi, group in enumerate(self.DG.get_all_groups(ignored_layers=self.ignored_layers, root_module_types=self.root_module_types)):\n            # check pruning rate\n            if self._check_sparsity(group):\n                module = group[0][0].target.module\n                pruning_fn = group[0][0].handler\n\n                ch_groups = self.get_channel_groups(group)\n                imp = self.estimate_importance(group, ch_groups=ch_groups)\n                if imp is None: continue\n                \n                os.makedirs('run/pruning_logs', exist_ok=True)\n                # draw bar for imp\n                plt.figure()\n                plt.bar(range(len(imp)), imp.cpu().numpy())\n                plt.savefig(f'run/pruning_logs/imp_{gi}.png')\n                plt.close()\n\n                current_channels = self.DG.get_out_channels(module)\n                target_sparsity = self.get_target_sparsity(module)\n                n_pruned = current_channels - int(\n                    self.layer_init_out_ch[module] *\n                    (1 - target_sparsity)\n                )\n\n                if self.round_to:\n                    n_pruned = n_pruned - (n_pruned % self.round_to)\n    \n                if n_pruned <= 0:\n                    continue\n                if ch_groups > 1:\n                    group_size = current_channels // ch_groups\n                    pruning_idxs = []\n                    n_pruned_per_group = n_pruned // ch_groups # max(1, n_pruned // ch_groups)\n                    for chg in range(ch_groups):\n                        sub_group_imp = imp[chg*group_size: (chg+1)*group_size]\n                        sub_imp_argsort = torch.argsort(sub_group_imp)\n                        sub_pruning_idxs = sub_imp_argsort[:n_pruned_per_group]+chg*group_size\n                        pruning_idxs.append(sub_pruning_idxs)\n                    pruning_idxs = torch.cat(pruning_idxs, 0)\n                else:\n                    imp_argsort = torch.argsort(imp)\n                    pruning_idxs = imp_argsort[:(n_pruned//ch_groups)]\n                    \n                group = self.DG.get_pruning_group(\n                    module, pruning_fn, pruning_idxs.tolist())\n                if self.DG.check_pruning_group(group):\n                    yield group\n\n    def prune_global(self):\n        if self.current_step > self.iterative_steps:\n            return\n        global_importance = []\n        for group in self.DG.get_all_groups(ignored_layers=self.ignored_layers, root_module_types=self.root_module_types):\n            if self._check_sparsity(group):\n                ch_groups = self.get_channel_groups(group)\n                imp = self.estimate_importance(group, ch_groups=ch_groups)\n                if imp is None: continue\n                if ch_groups > 1:\n                    imp = imp[:len(imp)//ch_groups]\n                global_importance.append((group, ch_groups, imp))\n\n        imp = torch.cat([local_imp[-1]\n                        for local_imp in global_importance], dim=0)\n        target_sparsity = self.per_step_ch_sparsity[self.current_step]\n        n_pruned = len(imp) - int(\n            self.initial_total_channels *\n            (1 - target_sparsity)\n        )\n        if n_pruned <= 0:\n            return\n        topk_imp, _ = torch.topk(imp, k=n_pruned, largest=False)\n        \n        # global pruning through thresholding\n        thres = topk_imp[-1]\n        for group, ch_groups, imp in global_importance:\n            module = group[0][0].target.module\n            pruning_fn = group[0][0].handler\n            pruning_indices = (imp <= thres).nonzero().view(-1)\n            if ch_groups > 1:\n                group_size = self.DG.get_out_channels(module)//ch_groups\n                pruning_indices = torch.cat(\n                    [pruning_indices+group_size*i for i in range(ch_groups)], 0)\n            if self.round_to:\n                n_pruned = len(pruning_indices)\n                n_pruned = n_pruned - (n_pruned % self.round_to)\n                pruning_indices = pruning_indices[:n_pruned]\n            group = self.DG.get_pruning_group(\n                module, pruning_fn, pruning_indices.tolist())\n            if self.DG.check_pruning_group(group):\n                yield group\n"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/scaling_factor_pruner.py",
    "content": "from numbers import Number\nfrom typing import Callable\nfrom .metapruner import MetaPruner\nfrom .scheduler import linear_scheduler\nimport torch\nimport torch.nn as nn\nfrom .scheduler import linear_scheduler\nfrom ..import function\nfrom ... import ops, dependency\n\nclass ScalingFactorPruner(MetaPruner):\n    def __init__(\n        self,\n        model,\n        example_inputs,\n        importance,\n        reg=1e-5,\n        iterative_steps=1,\n        iterative_sparsity_scheduler: Callable = linear_scheduler,\n        ch_sparsity=0.5,\n        ch_sparsity_dict=None,\n        global_pruning=False,\n        max_ch_sparsity=1.0,\n        round_to=None,\n        channel_groups=None,\n        ignored_layers=None,\n        customized_pruners=None,\n        unwrapped_parameters=None,\n        output_transform=None,\n    ):\n        super(ScalingFactorPruner, self).__init__(\n            model=model,\n            example_inputs=example_inputs,\n            importance=importance,\n            iterative_steps=iterative_steps,\n            iterative_sparsity_scheduler=iterative_sparsity_scheduler,\n            ch_sparsity=ch_sparsity,\n            ch_sparsity_dict=ch_sparsity_dict,\n            global_pruning=global_pruning,\n            max_ch_sparsity=max_ch_sparsity,\n            round_to=round_to,\n            ignored_layers=ignored_layers,\n            customized_pruners=customized_pruners,\n            unwrapped_parameters=unwrapped_parameters,\n            output_transform=output_transform,\n            channel_groups=channel_groups,\n        )\n        self.reg = reg\n        self.groups = list(self.DG.get_all_groups())\n\n    def regularize(self, model):\n\n        for i, group in enumerate(self.groups):\n            ch_groups = self.get_channel_groups(group)\n            # Get group norm\n            #print(group)\n            group_norm = []\n            for dep, idxs in group:\n                idxs.sort()\n                layer = dep.target.module\n                prune_fn = dep.handler\n                if prune_fn == function.prune_groupnorm_out_channels:\n                    # regularize BN\n                    if layer.affine:\n                        w = layer.weight.data[idxs]\n                        local_norm = w.pow(2)\n                        group_norm.append(local_norm)\n            if len(group_norm)==0:\n                continue\n            group_norm = [gn for gn in group_norm if gn.shape[0]==group_norm[0].shape[0]]\n            group_norm = torch.stack(group_norm, 0).sum(0)\n            group_norm = group_norm.sqrt()\n            base = 16\n            scale = base**((group_norm.max() - group_norm) / (group_norm.max() - group_norm.min()))\n            \n            # Update Gradient\n            for dep, idxs in group:\n                layer = dep.target.module\n                prune_fn = dep.handler\n                if prune_fn == function.prune_groupnorm_out_channels and len(idxs)==group_norm.shape[0]:\n                    # regularize BN\n                    if layer.affine is not None:\n                        w = layer.weight.data[idxs]\n                        g = w * scale #/ group_norm * group_size\n                        layer.weight.grad.data[idxs]+=self.reg * g \n\n                        #b = layer.bias.data[idxs]\n                        #g = b * scale #/ group_norm * group_size\n                        #layer.bias.grad.data[idxs]+=self.reg * g \n"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/scheduler.py",
    "content": "\ndef linear_scheduler(ch_sparsity_dict, steps):\n    return [((i) / float(steps)) * ch_sparsity_dict for i in range(steps+1)]"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/algorithms/taylor_pruner.py",
    "content": "import torch\nimport math\nfrom .metapruner import MetaPruner\nfrom .scheduler import linear_scheduler\nfrom .. import function\nfrom ..._helpers import _FlattenIndexMapping\n\n\nclass TaylorPruner(MetaPruner):\n    def __init__(\n        self,\n        model,\n        example_inputs,\n        importance,\n        reg=1e-4,\n        alpha=4,\n        iterative_steps=1,\n        iterative_sparsity_scheduler=linear_scheduler,\n        ch_sparsity=0.5,\n        global_pruning=False,\n        channel_groups=dict(),\n        max_ch_sparsity=1.0,\n        soft_keeping_ratio=0.0,\n        ch_sparsity_dict=None,\n        round_to=None,\n        ignored_layers=None,\n        customized_pruners=None,\n        unwrapped_parameters=None,\n        output_transform=None,\n    ):\n        super(TaylorPruner, self).__init__(\n            model=model,\n            example_inputs=example_inputs,\n            importance=importance,\n            iterative_steps=iterative_steps,\n            iterative_sparsity_scheduler=iterative_sparsity_scheduler,\n            ch_sparsity=ch_sparsity,\n            ch_sparsity_dict=ch_sparsity_dict,\n            global_pruning=global_pruning,\n            channel_groups=channel_groups,\n            max_ch_sparsity=max_ch_sparsity,\n            round_to=round_to,\n            ignored_layers=ignored_layers,\n            customized_pruners=customized_pruners,\n            unwrapped_parameters=unwrapped_parameters,\n            output_transform=output_transform,\n        )\n        self.reg = reg\n        self.alpha = alpha\n        self.groups = list(self.DG.get_all_groups())\n        self.soft_keeping_ratio = soft_keeping_ratio\n        self.cnt = 0\n\n    @torch.no_grad()\n    def regularize(self, model, base=16):\n        min_avg = 0\n        cnt = 0\n        for i, group in enumerate(self.groups):\n            ch_groups = self.get_channel_groups(group)\n            group_imp = []\n\n            # Get group norm\n            for dep, idxs in group:\n                idxs.sort()\n                layer = dep.target.module\n                prune_fn = dep.handler\n\n                # Conv out_channels\n                if prune_fn in [\n                    function.prune_conv_out_channels,\n                    function.prune_linear_out_channels,\n                ]:\n                    if hasattr(layer, \"transposed\") and layer.transposed:\n                        w = layer.weight.data.transpose(1, 0)[idxs].flatten(1)\n                        dw= layer.weight.grad.data.transpose(1, 0)[idxs].flatten(1)\n                    else:\n                        w = layer.weight.data[idxs].flatten(1)\n                        dw= layer.weight.grad.data[idxs].flatten(1)\n                    wdw = w * dw \n                    local_norm = wdw.abs().sum(1)\n                    group_imp.append(local_norm)\n                \n                # Conv in_channels\n                elif prune_fn in [\n                    function.prune_conv_in_channels,\n                    function.prune_linear_in_channels,\n                ]:\n                    if hasattr(layer, \"transposed\") and layer.transposed:\n                        w = (layer.weight).flatten(1)[idxs]\n                        dw= (layer.weight.grad).flatten(1)[idxs]\n                    else:\n                        w = (layer.weight).transpose(0, 1).flatten(1)[idxs]     \n                        dw= (layer.weight.grad).transpose(0, 1).flatten(1)[idxs]     \n                    wdw = w * dw\n                    local_norm = wdw.abs().sum(1)\n                    group_imp.append(local_norm)\n                # BN\n                elif prune_fn == function.prune_groupnorm_out_channels:\n                    # regularize BN\n                    if layer.affine:\n                        w = layer.weight.data[idxs]\n                        dw= layer.weight.grad.data[idxs]\n                        wdw = w * dw\n                        local_norm = wdw.abs()\n                        group_imp.append(local_norm)\n\n            if len(group_imp)==0:\n                return None\n            imp_size = len(group_imp[0])\n            aligned_group_imp = []\n            for imp in group_imp:\n                if len(imp)==imp_size:\n                    aligned_group_imp.append(imp)\n            group_imp = torch.stack(aligned_group_imp, dim=0)\n            group_imp = group_imp.sum(0).abs()\n            min_avg+=float(group_imp.min())\n            cnt+=1\n            base = 16\n            scale = base**((group_imp.max() - group_imp) / (group_imp.max() - group_imp.min()))\n            \n            # Update Gradient\n            for dep, idxs in group:\n                layer = dep.target.module\n                prune_fn = dep.handler\n                if prune_fn in [\n                    function.prune_conv_out_channels,\n                    function.prune_linear_out_channels,\n                ]:\n                    w = layer.weight.data[idxs]\n                    g = w * scale.view( -1, *([1]*(len(w.shape)-1)) ) #/ group_norm.view( -1, *([1]*(len(w.shape)-1)) ) * group_size #group_size #* scale.view( -1, *([1]*(len(w.shape)-1)) )\n                    layer.weight.grad.data[idxs]+=self.reg * g \n                elif prune_fn in [\n                    function.prune_conv_in_channels,\n                    function.prune_linear_in_channels,\n                ]:\n                    w = layer.weight.data[:, idxs]\n                    g = w * scale.view( 1, -1, *([1]*(len(w.shape)-2))  ) #/ gn.view( 1, -1, *([1]*(len(w.shape)-2)) ) * group_size #* scale.view( 1, -1, *([1]*(len(w.shape)-2))  )\n                    layer.weight.grad.data[:, idxs]+=self.reg * g\n                elif prune_fn == function.prune_groupnorm_out_channels:\n                    # regularize BN\n                    if layer.affine is not None:\n                        w = layer.weight.data[idxs]\n                        g = w * scale #/ group_norm * group_size\n                        layer.weight.grad.data[idxs]+=self.reg * g \n        return min_avg / cnt"
  },
  {
    "path": "ddpm_exp/torch_pruning/pruner/function.py",
    "content": "import torch\nimport torch.nn as nn\n\nfrom .. import ops\n\nfrom copy import deepcopy\nfrom functools import reduce\nfrom operator import mul\n\nfrom abc import ABC, abstractclassmethod, abstractmethod, abstractstaticmethod\nfrom typing import Callable, Sequence, Tuple, Dict\n\n__all__=[\n    'BasePruningFunc',\n    'PrunerBox',\n\n    'prune_conv_out_channels',\n    'prune_conv_in_channels',\n    'prune_depthwise_conv_out_channels',\n    'prune_depthwise_conv_in_channels',\n    'prune_batchnorm_out_channels',\n    'prune_batchnorm_in_channels',\n    'prune_linear_out_channels',\n    'prune_linear_in_channels',\n    'prune_prelu_out_channels',\n    'prune_prelu_in_channels',\n    'prune_layernorm_out_channels',\n    'prune_layernorm_in_channels',\n    'prune_embedding_out_channels',\n    'prune_embedding_in_channels',\n    'prune_parameter_out_channels',\n    'prune_parameter_in_channels',\n    'prune_multihead_attention_out_channels',\n    'prune_multihead_attention_in_channels',\n    'prune_groupnorm_out_channels',\n    'prune_groupnorm_in_channels',\n    'prune_instancenorm_out_channels',\n    'prune_instancenorm_in_channels',\n]\n\nclass BasePruningFunc(ABC):\n    TARGET_MODULES = ops.TORCH_OTHERS  # None\n\n    def __init__(self, pruning_dim=1):\n        self.pruning_dim = pruning_dim\n\n    @abstractclassmethod\n    def prune_out_channels(self, layer: nn.Module, idxs: Sequence[int]):\n        raise NotImplementedError\n\n    @abstractclassmethod\n    def prune_in_channels(self, layer: nn.Module, idxs: Sequence[int]):\n        raise NotImplementedError\n\n    @abstractclassmethod\n    def get_out_channels(self, layer: nn.Module):\n        raise NotImplementedError\n\n    @abstractclassmethod\n    def get_in_channels(self, layer: nn.Module):\n        raise NotImplementedError\n\n    def check(self, layer, idxs, to_output):\n        if self.TARGET_MODULES is not None:\n            assert isinstance(layer, self.TARGET_MODULES), 'Mismatched pruner {} and module {}'.format(\n                self.__str__, layer)\n        if to_output:\n            prunable_channels = self.get_out_channels(layer)\n        else:\n            prunable_channels = self.get_in_channels(layer)\n        if prunable_channels is not None:\n            assert all(idx < prunable_channels and idx >=\n                       0 for idx in idxs), \"All pruning indices should fall into [{}, {})\".format(0, prunable_channels)\n\n    def __call__(self, layer: nn.Module, idxs: Sequence[int], to_output: bool = True, inplace: bool = True, dry_run: bool = False) -> Tuple[nn.Module, int]:\n        idxs.sort()\n        self.check(layer, idxs, to_output)\n        pruning_fn = self.prune_out_channels if to_output else self.prune_in_channels\n        if not inplace:\n            layer = deepcopy(layer)\n        layer = pruning_fn(layer, idxs)\n        return layer\n\n\nclass ConvPruner(BasePruningFunc):\n    TARGET_MODULE = ops.TORCH_CONV\n\n    def prune_out_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        keep_idxs = list(set(range(layer.out_channels)) - set(idxs))\n        keep_idxs.sort()\n        layer.out_channels = layer.out_channels-len(idxs)\n        if not layer.transposed:\n            if layer.weight.grad is not None:\n                grad = layer.weight.grad.data[keep_idxs]\n            else:\n                grad = None\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data[keep_idxs])\n            layer.weight.grad = grad\n        else:\n            if layer.weight.grad is not None:\n                grad = layer.weight.grad.data[:, keep_idxs]\n            else:\n                grad = None\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data[:, keep_idxs])\n            layer.weight.grad = grad\n        if layer.bias is not None:\n            if layer.bias.grad is not None:\n                grad = layer.bias.grad.data[keep_idxs]\n            else:\n                grad = None\n            layer.bias = torch.nn.Parameter(layer.bias.data[keep_idxs])\n            layer.bias.grad = grad\n        return layer\n\n    def prune_in_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        keep_idxs = list(set(range(layer.in_channels)) - set(idxs))\n        keep_idxs.sort()\n        layer.in_channels = layer.in_channels - len(idxs)\n        if layer.groups>1:\n            keep_idxs = keep_idxs[:len(keep_idxs)//layer.groups]\n        if not layer.transposed:\n            if layer.weight.grad is not None:\n                grad = layer.weight.grad.data[:, keep_idxs]\n            else:\n                grad = None\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data[:, keep_idxs])\n            layer.weight.grad = grad\n        else:\n            if layer.weight.grad is not None:\n                grad = layer.weight.grad.data[keep_idxs]\n            else:\n                grad = None\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data[keep_idxs])\n            layer.weight.grad = grad\n        # no bias pruning because it does not change the output channels\n        return layer\n\n    def get_out_channels(self, layer):\n        return layer.out_channels\n\n    def get_in_channels(self, layer):\n        return layer.in_channels\n\n\nclass DepthwiseConvPruner(ConvPruner):\n    TARGET_MODULE = ops.TORCH_CONV\n\n    def prune_out_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        keep_idxs = list(set(range(layer.out_channels)) - set(idxs))\n        keep_idxs.sort()\n        layer.out_channels = layer.out_channels-len(idxs)\n        layer.in_channels = layer.in_channels-len(idxs)\n        layer.groups = layer.groups-len(idxs)\n        layer.weight = torch.nn.Parameter(layer.weight.data[keep_idxs])\n        if layer.bias is not None:\n            layer.bias = torch.nn.Parameter(layer.bias.data[keep_idxs])\n        return layer\n\n    prune_in_channels = prune_out_channels\n    # def prune_input(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n    #    return self.prune_output(layer, idxs)\n\n\nclass LinearPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_LINEAR\n\n    def prune_out_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        keep_idxs = list(set(range(layer.out_features)) - set(idxs))\n        keep_idxs.sort()\n        layer.out_features = layer.out_features-len(idxs)\n        if layer.weight.grad is not None:\n            grad = layer.weight.grad.data[keep_idxs]\n        else:\n            grad = None\n        layer.weight = torch.nn.Parameter(layer.weight.data[keep_idxs])\n        layer.weight.grad = grad\n        if layer.bias is not None:\n            if layer.bias.grad is not None:\n                grad = layer.bias.grad.data[keep_idxs]\n            else:\n                grad = None\n            layer.bias = torch.nn.Parameter(layer.bias.data[keep_idxs])\n            layer.bias.grad = grad\n        return layer\n\n    def prune_in_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        keep_idxs = list(set(range(layer.in_features)) - set(idxs))\n        keep_idxs.sort()\n        layer.in_features = layer.in_features-len(idxs)\n        if layer.weight.grad is not None:\n            grad = layer.weight.grad.data[:, keep_idxs]\n        else:\n            grad = None\n        layer.weight = torch.nn.Parameter(\n            layer.weight.data[:, keep_idxs])\n        layer.weight.grad = grad\n        return layer\n\n    def get_out_channels(self, layer):\n        return layer.out_features\n\n    def get_in_channels(self, layer):\n        return layer.in_features\n\n\nclass BatchnormPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_BATCHNORM\n\n    def prune_out_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        keep_idxs = list(set(range(layer.num_features)) - set(idxs))\n        keep_idxs.sort()\n        layer.num_features = layer.num_features-len(idxs)\n        layer.running_mean = layer.running_mean.data[keep_idxs]\n        layer.running_var = layer.running_var.data[keep_idxs]\n        if layer.affine:\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data[keep_idxs])\n            layer.bias = torch.nn.Parameter(layer.bias.data[keep_idxs])\n        return layer\n\n    prune_in_channels = prune_out_channels\n    # def prune_in_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n    #    return self.prune_out_channels(layer=layer, idxs=idxs)\n\n    def get_out_channels(self, layer):\n        return layer.num_features\n\n    def get_in_channels(self, layer):\n        return layer.num_features\n\n\nclass LayernormPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_LAYERNORM\n\n    def __init__(self, metrcis=None, pruning_dim=-1):\n        super().__init__(metrcis)\n        self.pruning_dim = pruning_dim\n\n    def check(self, layer, idxs):\n        layer.dim = self.pruning_dim\n\n    def prune_out_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        pruning_dim = self.pruning_dim\n        if len(layer.normalized_shape) < -pruning_dim:\n            return layer\n        num_features = layer.normalized_shape[pruning_dim]\n        keep_idxs = torch.tensor(list(set(range(num_features)) - set(idxs)))\n        keep_idxs.sort()\n        if layer.elementwise_affine:\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data.index_select(pruning_dim, keep_idxs))\n            layer.bias = torch.nn.Parameter(\n                layer.bias.data.index_select(pruning_dim, keep_idxs))\n        if pruning_dim != -1:\n            layer.normalized_shape = layer.normalized_shape[:pruning_dim] + (\n                keep_idxs.size(0), ) + layer.normalized_shape[pruning_dim+1:]\n        else:\n            layer.normalized_shape = layer.normalized_shape[:pruning_dim] + (\n                keep_idxs.size(0), )\n        return layer\n\n    prune_in_channels = prune_out_channels\n\n    def get_out_channels(self, layer):\n        return layer.normalized_shape[self.pruning_dim]\n\n    def get_in_channels(self, layer):\n        return layer.normalized_shape[self.pruning_dim]\n\nclass GroupNormPruner(BasePruningFunc):\n    def prune_out_channels(self, layer: nn.GroupNorm, idxs: list) -> nn.Module:\n        keep_idxs = list(set(range(layer.num_channels)) - set(idxs))\n        keep_idxs.sort()\n        layer.num_channels = layer.num_channels-len(idxs)\n        if layer.affine:\n            if layer.weight.grad is not None:\n                grad = layer.weight.grad.data[keep_idxs]\n            else:\n                grad = None\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data[keep_idxs])\n            layer.weight.grad = grad\n            \n            if layer.bias.grad is not None:\n                grad = layer.bias.grad.data[keep_idxs]\n            else:\n                grad = None\n            layer.bias = torch.nn.Parameter(layer.bias.data[keep_idxs])\n            layer.bias.grad = grad\n        return layer\n    \n    prune_in_channels = prune_out_channels\n\n    def get_out_channels(self, layer):\n        return layer.num_channels\n\n    def get_in_channels(self, layer):\n        return layer.num_channels\n\nclass InstanceNormPruner(BasePruningFunc):\n    def prune_out_channels(self, layer: nn.Module, idxs: Sequence[int]) -> nn.Module:\n        keep_idxs = list(set(range(layer.num_features)) - set(idxs))\n        keep_idxs.sort()\n        layer.num_features = layer.num_features-len(idxs)\n        if layer.affine:\n            layer.weight = torch.nn.Parameter(\n                layer.weight.data[keep_idxs])\n            layer.bias = torch.nn.Parameter(layer.bias.data[keep_idxs])\n        return layer\n\n    prune_in_channels = prune_out_channels\n\n    def get_out_channels(self, layer):\n        return layer.num_features\n\n    def get_in_channels(self, layer):\n        return layer.num_features\n\n\nclass PReLUPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_PRELU\n\n    def prune_out_channels(self, layer: nn.PReLU, idxs: list) -> nn.Module:\n        if layer.num_parameters == 1:\n            return layer\n        keep_idxs = list(set(range(layer.num_parameters)) - set(idxs))\n        keep_idxs.sort()\n        layer.num_parameters = layer.num_parameters-len(idxs)\n        layer.weight = torch.nn.Parameter(layer.weight.data[keep_idxs])\n        return layer\n\n    prune_in_channels = prune_out_channels\n\n    # def prune_in_channels(self, layer:nn.Module, idxs: Sequence[int]) -> nn.Module:\n    #    return self.prune_out_channels(layer=layer, idxs=idxs)\n\n    def get_out_channels(self, layer):\n        if layer.num_parameters == 1:\n            return None\n        else:\n            return layer.num_parameters\n\n    def get_in_channels(self, layer):\n        return self.get_out_channels(layer=layer)\n\nclass EmbeddingPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_EMBED\n\n    def prune_out_channels(self, layer: nn.Embedding, idxs: list) -> nn.Module:\n        num_features = layer.embedding_dim\n        keep_idxs = list(set(range(num_features)) - set(idxs))\n        keep_idxs.sort()\n        layer.weight = torch.nn.Parameter(\n            layer.weight.data[:, keep_idxs])\n        layer.embedding_dim = len(keep_idxs)\n        return layer\n\n    prune_in_channels = prune_out_channels\n\n    # def prune_in_channels(self, layer: nn.Embedding, idxs: list)-> nn.Module:\n    #    return self.prune_out_channels(layer=layer, idxs=idxs)\n\n    def get_out_channels(self, layer):\n        return layer.embedding_dim\n\n    def get_in_channels(self, layer):\n        return self.get_out_channels(layer=layer)\n\nclass LSTMPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_LSTM\n\n    def prune_out_channels(self, layer: nn.LSTM, idxs: list) -> nn.Module:\n        assert layer.num_layers==1\n        num_layers = layer.num_layers\n        num_features = layer.hidden_size\n        keep_idxs = list(set(range(num_features)) - set(idxs))\n        keep_idxs.sort()\n        keep_idxs = torch.tensor(keep_idxs)\n        expanded_keep_idxs = torch.cat([ keep_idxs+i*num_features for i in range(4) ], dim=0)\n        if layer.bidirectional:\n            postfix = ['', '_reverse']\n        else:\n            postfix = ['']\n        #for l in range(num_layers):\n        for pf in postfix:\n            setattr(layer, 'weight_hh_l0'+pf, torch.nn.Parameter(\n                getattr(layer, 'weight_hh_l0'+pf).data[expanded_keep_idxs]))\n            if layer.bias:\n                setattr(layer, 'bias_hh_l0'+pf, torch.nn.Parameter(\n                    getattr(layer, 'bias_hh_l0'+pf).data[expanded_keep_idxs]))\n            setattr(layer, 'weight_hh_l0'+pf, torch.nn.Parameter(\n                getattr(layer, 'weight_hh_l0'+pf).data[:, keep_idxs]))\n\n            setattr(layer, 'weight_ih_l0'+pf, torch.nn.Parameter(\n                getattr(layer, 'weight_ih_l0'+pf).data[expanded_keep_idxs]))\n            if layer.bias:\n                setattr(layer, 'bias_ih_l0'+pf, torch.nn.Parameter(\n                    getattr(layer, 'bias_ih_l0'+pf).data[expanded_keep_idxs]))\n        layer.hidden_size = len(keep_idxs)\n\n    def prune_in_channels(self, layer: nn.LSTM, idxs: list):\n        num_features = layer.input_size\n        keep_idxs = list(set(range(num_features)) - set(idxs))\n        keep_idxs.sort()\n        setattr(layer, 'weight_ih_l0', torch.nn.Parameter(\n                    getattr(layer, 'weight_ih_l0').data[:, keep_idxs]))\n        if layer.bidirectional:\n            setattr(layer, 'weight_ih_l0_reverse', torch.nn.Parameter(\n                    getattr(layer, 'weight_ih_l0_reverse').data[:, keep_idxs]))\n        layer.input_size = len(keep_idxs)\n\n    def get_out_channels(self, layer):\n        return layer.hidden_size\n        \n    def get_in_channels(self, layer):\n        return layer.input_size\n    \n\nclass ParameterPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_PARAMETER\n    def __init__(self, pruning_dim=-1):\n        super().__init__(pruning_dim=pruning_dim)\n        \n    def prune_out_channels(self, tensor, idxs: list) -> nn.Module:\n        keep_idxs = list(set(range(tensor.data.shape[self.pruning_dim])) - set(idxs))\n        keep_idxs.sort()\n        pruned_parameter = nn.Parameter(torch.index_select(\n            tensor.data, self.pruning_dim, torch.LongTensor(keep_idxs).to(tensor.device)))\n        return pruned_parameter\n\n    prune_in_channels = prune_out_channels\n\n    def get_out_channels(self, parameter):\n        return parameter.shape[self.pruning_dim]\n\n    def get_in_channels(self, parameter):\n        return parameter.shape[self.pruning_dim]\n\n\nclass MultiheadAttentionPruner(BasePruningFunc):\n    TARGET_MODULES = ops.TORCH_MHA\n\n    def check(self, layer, idxs, to_output):\n        super().check(layer, idxs, to_output)\n        assert (layer.embed_dim - len(idxs)) % layer.num_heads == 0, \"embed_dim (%d) of MultiheadAttention after pruning must divide evenly by `num_heads` (%d)\" % (layer.embed_dim, layer.num_heads)\n\n    def prune_out_channels(self, layer, idxs: list) -> nn.Module:\n        keep_idxs = list(set(range(layer.embed_dim)) - set(idxs))\n        keep_idxs.sort()\n\n\n        if layer.q_proj_weight is not None:\n            layer.q_proj_weight = nn.Parameter(torch.index_select(\n                layer.q_proj_weight.data, 0, torch.LongTensor(keep_idxs)))\n        if layer.k_proj_weight is not None:\n            layer.q_proj_weight = nn.Parameter(torch.index_select(\n                layer.q_proj_weight.data, 0, torch.LongTensor(keep_idxs)))\n        if layer.v_proj_weight is not None:\n            layer.v_proj_weight = nn.Parameter(torch.index_select(\n                layer.v_proj_weight.data, 0, torch.LongTensor(keep_idxs)))\n\n\n        pruning_idxs_repeated = idxs + \\\n            [i+layer.embed_dim for i in idxs] + \\\n            [i+2*layer.embed_dim for i in idxs]\n        keep_idxs_3x_repeated = list(\n            set(range(3*layer.embed_dim)) - set(pruning_idxs_repeated))\n        keep_idxs_3x_repeated.sort()\n        if layer.in_proj_weight is not None:\n            layer.in_proj_weight = nn.Parameter(torch.index_select(\n                layer.in_proj_weight.data, 0, torch.LongTensor(keep_idxs_3x_repeated)))\n            layer.in_proj_weight = nn.Parameter(torch.index_select(\n                layer.in_proj_weight.data, 1, torch.LongTensor(keep_idxs)))\n        if layer.in_proj_bias is not None:\n            layer.in_proj_bias = nn.Parameter(torch.index_select(\n                layer.in_proj_bias.data, 0, torch.LongTensor(keep_idxs_3x_repeated)))\n\n\n        if layer.bias_k is not None:\n            layer.bias_k = nn.Parameter(torch.index_select(\n                layer.bias_k.data, 2, torch.LongTensor(keep_idxs)))\n        if layer.bias_v is not None:\n            layer.bias_v = nn.Parameter(torch.index_select(\n                layer.bias_v.data, 2, torch.LongTensor(keep_idxs)))\n\n        linear = layer.out_proj\n        keep_idxs = list(set(range(linear.out_features)) - set(idxs))\n        keep_idxs.sort()\n        linear.out_features = linear.out_features-len(idxs)\n        linear.weight = torch.nn.Parameter(\n            linear.weight.data[keep_idxs])\n        if linear.bias is not None:\n            linear.bias = torch.nn.Parameter(\n                linear.bias.data[keep_idxs])\n        keep_idxs = list(set(range(linear.in_features)) - set(idxs))\n        keep_idxs.sort()\n        linear.in_features = linear.in_features-len(idxs)\n        linear.weight = torch.nn.Parameter(\n            linear.weight.data[:, keep_idxs])\n\n        layer.embed_dim = layer.embed_dim - len(idxs)\n        layer.head_dim = layer.embed_dim // layer.num_heads\n        layer.kdim = layer.embed_dim\n        layer.vdim = layer.embed_dim\n        return layer\n\n    prune_in_channels = prune_out_channels\n\n    def get_out_channels(self, layer):\n        return layer.embed_dim\n\n    def get_in_channels(self, layer):\n        return self.get_out_channels(layer)\n\nPrunerBox = {\n    ops.OPTYPE.CONV: ConvPruner(),\n    ops.OPTYPE.LINEAR: LinearPruner(),\n    ops.OPTYPE.BN: BatchnormPruner(),\n    ops.OPTYPE.DEPTHWISE_CONV: DepthwiseConvPruner(),\n    ops.OPTYPE.PRELU: PReLUPruner(),\n    ops.OPTYPE.LN: LayernormPruner(),\n    ops.OPTYPE.EMBED: EmbeddingPruner(),\n    ops.OPTYPE.PARAMETER: ParameterPruner(),\n    ops.OPTYPE.MHA: MultiheadAttentionPruner(),\n    ops.OPTYPE.LSTM: LSTMPruner(),\n    ops.OPTYPE.GN: GroupNormPruner(),\n    ops.OPTYPE.IN: InstanceNormPruner(),\n}\n\n# Alias\nprune_conv_out_channels = PrunerBox[ops.OPTYPE.CONV].prune_out_channels\nprune_conv_in_channels = PrunerBox[ops.OPTYPE.CONV].prune_in_channels\n\nprune_depthwise_conv_out_channels = PrunerBox[ops.OPTYPE.DEPTHWISE_CONV].prune_out_channels\nprune_depthwise_conv_in_channels = PrunerBox[ops.OPTYPE.DEPTHWISE_CONV].prune_in_channels\n\nprune_batchnorm_out_channels = PrunerBox[ops.OPTYPE.BN].prune_out_channels\nprune_batchnorm_in_channels = PrunerBox[ops.OPTYPE.BN].prune_in_channels\n\nprune_linear_out_channels = PrunerBox[ops.OPTYPE.LINEAR].prune_out_channels\nprune_linear_in_channels = PrunerBox[ops.OPTYPE.LINEAR].prune_in_channels\n\nprune_prelu_out_channels = PrunerBox[ops.OPTYPE.PRELU].prune_out_channels\nprune_prelu_in_channels = PrunerBox[ops.OPTYPE.PRELU].prune_in_channels\n\nprune_layernorm_out_channels = PrunerBox[ops.OPTYPE.LN].prune_out_channels\nprune_layernorm_in_channels = PrunerBox[ops.OPTYPE.LN].prune_in_channels\n\nprune_embedding_out_channels = PrunerBox[ops.OPTYPE.EMBED].prune_out_channels\nprune_embedding_in_channels = PrunerBox[ops.OPTYPE.EMBED].prune_in_channels\n\nprune_parameter_out_channels = PrunerBox[ops.OPTYPE.PARAMETER].prune_out_channels\nprune_parameter_in_channels = PrunerBox[ops.OPTYPE.PARAMETER].prune_in_channels\n\nprune_multihead_attention_out_channels = PrunerBox[ops.OPTYPE.MHA].prune_out_channels\nprune_multihead_attention_in_channels = PrunerBox[ops.OPTYPE.MHA].prune_in_channels\n\nprune_lstm_out_channels = PrunerBox[ops.OPTYPE.LSTM].prune_out_channels\nprune_lstm_in_channels = PrunerBox[ops.OPTYPE.LSTM].prune_in_channels\n\nprune_groupnorm_out_channels = PrunerBox[ops.OPTYPE.GN].prune_out_channels\nprune_groupnorm_in_channels = PrunerBox[ops.OPTYPE.GN].prune_in_channels\n\nprune_instancenorm_out_channels = PrunerBox[ops.OPTYPE.IN].prune_out_channels\nprune_instancenorm_in_channels = PrunerBox[ops.OPTYPE.IN].prune_in_channels"
  },
  {
    "path": "ddpm_exp/torch_pruning/utils/__init__.py",
    "content": "from .utils import *\nfrom .op_counter import count_ops_and_params"
  },
  {
    "path": "ddpm_exp/torch_pruning/utils/op_counter.py",
    "content": "'''\nThis opcounter is adapted from https://github.com/sovrasov/flops-counter.pytorch\n\nCopyright (C) 2021 Sovrasov V. - All Rights Reserved\n * You may use, distribute and modify this code under the\n * terms of the MIT license.\n * You should have received a copy of the MIT license with\n * this file. If not visit https://opensource.org/licenses/MIT\n'''\n\nimport numpy as np\nimport torch.nn as nn\nimport torch\n\n@torch.no_grad()\ndef count_ops_and_params(model, example_inputs):\n    global CUSTOM_MODULES_MAPPING\n    model = copy.deepcopy(model)\n    flops_model = add_flops_counting_methods(model)\n    flops_model.eval()\n    flops_model.start_flops_count(ost=sys.stdout, verbose=False,\n                                  ignore_list=[])\n    if isinstance(example_inputs, dict):\n        _ = flops_model(**example_inputs)\n    elif isinstance(example_inputs, (tuple, list)):\n        _ = flops_model(*example_inputs)\n    else:\n        _ = flops_model(example_inputs)\n    flops_count, params_count = flops_model.compute_average_flops_cost()\n    flops_model.stop_flops_count()\n    CUSTOM_MODULES_MAPPING = {}\n    return flops_count, params_count\n\n\ndef empty_flops_counter_hook(module, input, output):\n    module.__flops__ += 0\n\n\ndef upsample_flops_counter_hook(module, input, output):\n    output_size = output[0]\n    batch_size = output_size.shape[0]\n    output_elements_count = batch_size\n    for val in output_size.shape[1:]:\n        output_elements_count *= val\n    module.__flops__ += int(output_elements_count)\n\n\ndef relu_flops_counter_hook(module, input, output):\n    active_elements_count = output.numel()\n    module.__flops__ += int(active_elements_count)\n\n\ndef linear_flops_counter_hook(module, input, output):\n    input = input[0]\n    # pytorch checks dimensions, so here we don't care much\n    output_last_dim = output.shape[-1]\n    bias_flops = output_last_dim if module.bias is not None else 0\n    module.__flops__ += int(np.prod(input.shape) * output_last_dim + bias_flops)\n\n\ndef pool_flops_counter_hook(module, input, output):\n    input = input[0]\n    module.__flops__ += int(np.prod(input.shape))\n\n\ndef bn_flops_counter_hook(module, input, output):\n    input = input[0]\n\n    batch_flops = np.prod(input.shape)\n    if module.affine:\n        batch_flops *= 2\n    module.__flops__ += int(batch_flops)\n\n\ndef conv_flops_counter_hook(conv_module, input, output):\n    # Can have multiple inputs, getting the first one\n    input = input[0]\n\n    batch_size = input.shape[0]\n    output_dims = list(output.shape[2:])\n\n    kernel_dims = list(conv_module.kernel_size)\n    in_channels = conv_module.in_channels\n    out_channels = conv_module.out_channels\n    groups = conv_module.groups\n\n    filters_per_channel = out_channels // groups\n    conv_per_position_flops = int(np.prod(kernel_dims)) * \\\n        in_channels * filters_per_channel\n\n    active_elements_count = batch_size * int(np.prod(output_dims))\n\n    overall_conv_flops = conv_per_position_flops * active_elements_count\n\n    bias_flops = 0\n\n    if conv_module.bias is not None:\n\n        bias_flops = out_channels * active_elements_count\n\n    overall_flops = overall_conv_flops + bias_flops\n\n    conv_module.__flops__ += int(overall_flops)\n\n\ndef rnn_flops(flops, rnn_module, w_ih, w_hh, input_size):\n    # matrix matrix mult ih state and internal state\n    flops += w_ih.shape[0]*w_ih.shape[1]\n    # matrix matrix mult hh state and internal state\n    flops += w_hh.shape[0]*w_hh.shape[1]\n    if isinstance(rnn_module, (nn.RNN, nn.RNNCell)):\n        # add both operations\n        flops += rnn_module.hidden_size\n    elif isinstance(rnn_module, (nn.GRU, nn.GRUCell)):\n        # hadamard of r\n        flops += rnn_module.hidden_size\n        # adding operations from both states\n        flops += rnn_module.hidden_size*3\n        # last two hadamard product and add\n        flops += rnn_module.hidden_size*3\n    elif isinstance(rnn_module, (nn.LSTM, nn.LSTMCell)):\n        # adding operations from both states\n        flops += rnn_module.hidden_size*4\n        # two hadamard product and add for C state\n        flops += rnn_module.hidden_size + rnn_module.hidden_size + rnn_module.hidden_size\n        # final hadamard\n        flops += rnn_module.hidden_size + rnn_module.hidden_size + rnn_module.hidden_size\n    return flops\n\n\ndef rnn_flops_counter_hook(rnn_module, input, output):\n    \"\"\"\n    Takes into account batch goes at first position, contrary\n    to pytorch common rule (but actually it doesn't matter).\n    If sigmoid and tanh are hard, only a comparison FLOPS should be accurate\n    \"\"\"\n    flops = 0\n    # input is a tuple containing a sequence to process and (optionally) hidden state\n    inp = input[0]\n    batch_size = inp[0].shape[0]\n    seq_length = inp[0].shape[1]\n    num_layers = rnn_module.num_layers\n\n    for i in range(num_layers):\n        w_ih = rnn_module.__getattr__('weight_ih_l' + str(i))\n        w_hh = rnn_module.__getattr__('weight_hh_l' + str(i))\n        if i == 0:\n            input_size = rnn_module.input_size\n        else:\n            input_size = rnn_module.hidden_size\n        flops = rnn_flops(flops, rnn_module, w_ih, w_hh, input_size)\n        if rnn_module.bias:\n            b_ih = rnn_module.__getattr__('bias_ih_l' + str(i))\n            b_hh = rnn_module.__getattr__('bias_hh_l' + str(i))\n            flops += b_ih.shape[0] + b_hh.shape[0]\n\n    flops *= batch_size\n    flops *= seq_length\n    if rnn_module.bidirectional:\n        flops *= 2\n    rnn_module.__flops__ += int(flops)\n\n\ndef rnn_cell_flops_counter_hook(rnn_cell_module, input, output):\n    flops = 0\n    inp = input[0]\n    batch_size = inp.shape[0]\n    w_ih = rnn_cell_module.__getattr__('weight_ih')\n    w_hh = rnn_cell_module.__getattr__('weight_hh')\n    input_size = inp.shape[1]\n    flops = rnn_flops(flops, rnn_cell_module, w_ih, w_hh, input_size)\n    if rnn_cell_module.bias:\n        b_ih = rnn_cell_module.__getattr__('bias_ih')\n        b_hh = rnn_cell_module.__getattr__('bias_hh')\n        flops += b_ih.shape[0] + b_hh.shape[0]\n\n    flops *= batch_size\n    rnn_cell_module.__flops__ += int(flops)\n\n\ndef multihead_attention_counter_hook(multihead_attention_module, input, output):\n    flops = 0\n    q, k, v = input\n\n    batch_first = multihead_attention_module.batch_first \\\n        if hasattr(multihead_attention_module, 'batch_first') else False\n    if batch_first:\n        batch_size = q.shape[0]\n        len_idx = 1\n    else:\n        batch_size = q.shape[1]\n        len_idx = 0\n\n    dim_idx = 2\n\n    qdim = q.shape[dim_idx]\n    kdim = k.shape[dim_idx]\n    vdim = v.shape[dim_idx]\n\n    qlen = q.shape[len_idx]\n    klen = k.shape[len_idx]\n    vlen = v.shape[len_idx]\n\n    num_heads = multihead_attention_module.num_heads\n    assert qdim == multihead_attention_module.embed_dim\n\n    if multihead_attention_module.kdim is None:\n        assert kdim == qdim\n    if multihead_attention_module.vdim is None:\n        assert vdim == qdim\n\n    flops = 0\n\n    # Q scaling\n    flops += qlen * qdim\n\n    # Initial projections\n    flops += (\n        (qlen * qdim * qdim)  # QW\n        + (klen * kdim * kdim)  # KW\n        + (vlen * vdim * vdim)  # VW\n    )\n\n    if multihead_attention_module.in_proj_bias is not None:\n        flops += (qlen + klen + vlen) * qdim\n\n    # attention heads: scale, matmul, softmax, matmul\n    qk_head_dim = qdim // num_heads\n    v_head_dim = vdim // num_heads\n\n    head_flops = (\n        (qlen * klen * qk_head_dim)  # QK^T\n        + (qlen * klen)  # softmax\n        + (qlen * klen * v_head_dim)  # AV\n    )\n\n    flops += num_heads * head_flops\n\n    # final projection, bias is always enabled\n    flops += qlen * vdim * (vdim + 1)\n\n    flops *= batch_size\n    multihead_attention_module.__flops__ += int(flops)\n\n\nCUSTOM_MODULES_MAPPING = {}\n\nMODULES_MAPPING = {\n    # convolutions\n    nn.Conv1d: conv_flops_counter_hook,\n    nn.Conv2d: conv_flops_counter_hook,\n    nn.Conv3d: conv_flops_counter_hook,\n    # activations\n    nn.ReLU: relu_flops_counter_hook,\n    nn.PReLU: relu_flops_counter_hook,\n    nn.ELU: relu_flops_counter_hook,\n    nn.LeakyReLU: relu_flops_counter_hook,\n    nn.ReLU6: relu_flops_counter_hook,\n    # poolings\n    nn.MaxPool1d: pool_flops_counter_hook,\n    nn.AvgPool1d: pool_flops_counter_hook,\n    nn.AvgPool2d: pool_flops_counter_hook,\n    nn.MaxPool2d: pool_flops_counter_hook,\n    nn.MaxPool3d: pool_flops_counter_hook,\n    nn.AvgPool3d: pool_flops_counter_hook,\n    nn.AdaptiveMaxPool1d: pool_flops_counter_hook,\n    nn.AdaptiveAvgPool1d: pool_flops_counter_hook,\n    nn.AdaptiveMaxPool2d: pool_flops_counter_hook,\n    nn.AdaptiveAvgPool2d: pool_flops_counter_hook,\n    nn.AdaptiveMaxPool3d: pool_flops_counter_hook,\n    nn.AdaptiveAvgPool3d: pool_flops_counter_hook,\n    # BNs\n    nn.BatchNorm1d: bn_flops_counter_hook,\n    nn.BatchNorm2d: bn_flops_counter_hook,\n    nn.BatchNorm3d: bn_flops_counter_hook,\n\n    nn.InstanceNorm1d: bn_flops_counter_hook,\n    nn.InstanceNorm2d: bn_flops_counter_hook,\n    nn.InstanceNorm3d: bn_flops_counter_hook,\n    nn.GroupNorm: bn_flops_counter_hook,\n    # FC\n    nn.Linear: linear_flops_counter_hook,\n    # Upscale\n    nn.Upsample: upsample_flops_counter_hook,\n    # Deconvolution\n    nn.ConvTranspose1d: conv_flops_counter_hook,\n    nn.ConvTranspose2d: conv_flops_counter_hook,\n    nn.ConvTranspose3d: conv_flops_counter_hook,\n    # RNN\n    nn.RNN: rnn_flops_counter_hook,\n    nn.GRU: rnn_flops_counter_hook,\n    nn.LSTM: rnn_flops_counter_hook,\n    nn.RNNCell: rnn_cell_flops_counter_hook,\n    nn.LSTMCell: rnn_cell_flops_counter_hook,\n    nn.GRUCell: rnn_cell_flops_counter_hook,\n    nn.MultiheadAttention: multihead_attention_counter_hook\n}\n\nif hasattr(nn, 'GELU'):\n    MODULES_MAPPING[nn.GELU] = relu_flops_counter_hook\n\n\nimport sys\nfrom functools import partial\nimport torch.nn as nn\nimport copy\n\ndef accumulate_flops(self):\n    if is_supported_instance(self):\n        return self.__flops__\n    else:\n        sum = 0\n        for m in self.children():\n            sum += m.accumulate_flops()\n        return sum\n\n\ndef get_model_parameters_number(model):\n    params_num = sum(p.numel() for p in model.parameters() if p.requires_grad)\n    return params_num\n\n\ndef add_flops_counting_methods(net_main_module):\n    # adding additional methods to the existing module object,\n    # this is done this way so that each function has access to self object\n    net_main_module.start_flops_count = start_flops_count.__get__(net_main_module)\n    net_main_module.stop_flops_count = stop_flops_count.__get__(net_main_module)\n    net_main_module.reset_flops_count = reset_flops_count.__get__(net_main_module)\n    net_main_module.compute_average_flops_cost = compute_average_flops_cost.__get__(\n                                                    net_main_module)\n\n    net_main_module.reset_flops_count()\n\n    return net_main_module\n\n\ndef compute_average_flops_cost(self):\n    \"\"\"\n    A method that will be available after add_flops_counting_methods() is called\n    on a desired net object.\n    Returns current mean flops consumption per image.\n    \"\"\"\n\n    for m in self.modules():\n        m.accumulate_flops = accumulate_flops.__get__(m)\n\n    flops_sum = self.accumulate_flops()\n\n    for m in self.modules():\n        if hasattr(m, 'accumulate_flops'):\n            del m.accumulate_flops\n\n    params_sum = get_model_parameters_number(self)\n    return flops_sum / self.__batch_counter__, params_sum\n\n\ndef start_flops_count(self, **kwargs):\n    \"\"\"\n    A method that will be available after add_flops_counting_methods() is called\n    on a desired net object.\n    Activates the computation of mean flops consumption per image.\n    Call it before you run the network.\n    \"\"\"\n    add_batch_counter_hook_function(self)\n\n    seen_types = set()\n\n    def add_flops_counter_hook_function(module, ost, verbose, ignore_list):\n        if type(module) in ignore_list:\n            seen_types.add(type(module))\n            if is_supported_instance(module):\n                module.__params__ = 0\n        elif is_supported_instance(module):\n            if hasattr(module, '__flops_handle__'):\n                return\n            if type(module) in CUSTOM_MODULES_MAPPING:\n                handle = module.register_forward_hook(\n                                        CUSTOM_MODULES_MAPPING[type(module)])\n            else:\n                handle = module.register_forward_hook(MODULES_MAPPING[type(module)])\n            module.__flops_handle__ = handle\n            seen_types.add(type(module))\n        else:\n            if verbose and not type(module) in (nn.Sequential, nn.ModuleList) and \\\n               not type(module) in seen_types:\n                print('Warning: module ' + type(module).__name__ +\n                      ' is treated as a zero-op.', file=ost)\n            seen_types.add(type(module))\n\n    self.apply(partial(add_flops_counter_hook_function, **kwargs))\n\n\ndef stop_flops_count(self):\n    \"\"\"\n    A method that will be available after add_flops_counting_methods() is called\n    on a desired net object.\n    Stops computing the mean flops consumption per image.\n    Call whenever you want to pause the computation.\n    \"\"\"\n    remove_batch_counter_hook_function(self)\n    self.apply(remove_flops_counter_hook_function)\n    self.apply(remove_flops_counter_variables)\n\n\ndef reset_flops_count(self):\n    \"\"\"\n    A method that will be available after add_flops_counting_methods() is called\n    on a desired net object.\n    Resets statistics computed so far.\n    \"\"\"\n    add_batch_counter_variables_or_reset(self)\n    self.apply(add_flops_counter_variable_or_reset)\n\n\n# ---- Internal functions\ndef batch_counter_hook(module, input, output):\n    batch_size = 1\n    if len(input) > 0:\n        # Can have multiple inputs, getting the first one\n        input = input[0]\n        batch_size = len(input)\n    else:\n        pass\n        print('Warning! No positional inputs found for a module,'\n              ' assuming batch size is 1.')\n    module.__batch_counter__ += batch_size\n\n\ndef add_batch_counter_variables_or_reset(module):\n\n    module.__batch_counter__ = 0\n\n\ndef add_batch_counter_hook_function(module):\n    if hasattr(module, '__batch_counter_handle__'):\n        return\n\n    handle = module.register_forward_hook(batch_counter_hook)\n    module.__batch_counter_handle__ = handle\n\n\ndef remove_batch_counter_hook_function(module):\n    if hasattr(module, '__batch_counter_handle__'):\n        module.__batch_counter_handle__.remove()\n        del module.__batch_counter_handle__\n\n\ndef add_flops_counter_variable_or_reset(module):\n    if is_supported_instance(module):\n        if hasattr(module, '__flops__') or hasattr(module, '__params__'):\n            print('Warning: variables __flops__ or __params__ are already '\n                  'defined for the module' + type(module).__name__ +\n                  ' ptflops can affect your code!')\n            module.__ptflops_backup_flops__ = module.__flops__\n            module.__ptflops_backup_params__ = module.__params__\n        module.__flops__ = 0\n        module.__params__ = get_model_parameters_number(module)\n\n\ndef is_supported_instance(module):\n    if type(module) in MODULES_MAPPING or type(module) in CUSTOM_MODULES_MAPPING:\n        return True\n    return False\n\n\ndef remove_flops_counter_hook_function(module):\n    if is_supported_instance(module):\n        if hasattr(module, '__flops_handle__'):\n            module.__flops_handle__.remove()\n            del module.__flops_handle__\n\n\ndef remove_flops_counter_variables(module):\n    if is_supported_instance(module):\n        if hasattr(module, '__flops__'):\n            del module.__flops__\n            if hasattr(module, '__ptflops_backup_flops__'):\n                module.__flops__ = module.__ptflops_backup_flops__\n        if hasattr(module, '__params__'):\n            del module.__params__\n            if hasattr(module, '__ptflops_backup_params__'):\n                module.__params__ = module.__ptflops_backup_params__"
  },
  {
    "path": "ddpm_exp/torch_pruning/utils/utils.py",
    "content": "from ..ops import TORCH_CONV, TORCH_BATCHNORM, TORCH_PRELU, TORCH_LINEAR\nfrom ..ops import module2type\nimport torch\nfrom .op_counter import count_ops_and_params\nimport torch.nn as nn\n\n@torch.no_grad()\ndef count_params(module):\n    return sum([p.numel() for p in module.parameters()])\n\ndef flatten_as_list(obj):\n    if isinstance(obj, torch.Tensor):\n        return [obj]\n    elif isinstance(obj, (list, tuple)):\n        flattened_list = []\n        for sub_obj in obj:\n            flattened_list.extend(flatten_as_list(sub_obj))\n        return flattened_list\n    elif isinstance(obj, dict):\n        flattened_list = []\n        for sub_obj in obj.values():\n            flattened_list.extend(flatten_as_list(sub_obj))\n        return flattened_list\n    else:\n        return obj\n\ndef draw_computational_graph(DG, save_as, title='Computational Graph', figsize=(16, 16), dpi=200, cmap=None):\n    import numpy as np\n    import matplotlib.pyplot as plt\n    plt.style.use('bmh')\n    n_nodes = len(DG.module2node)\n    module2idx = {m: i for (i, m) in enumerate(DG.module2node.keys())}\n    G = np.zeros((n_nodes, n_nodes))\n    fill_value = 1\n    for module, node in DG.module2node.items():\n        for input_node in node.inputs:\n            G[module2idx[input_node.module], module2idx[node.module]] = fill_value\n            G[module2idx[node.module], module2idx[input_node.module]] = fill_value\n        for out_node in node.outputs:\n            G[module2idx[out_node.module], module2idx[node.module]] = fill_value\n            G[module2idx[node.module], module2idx[out_node.module]] = fill_value\n        pruner = DG.get_pruner_of_module(module)\n    fig, ax = plt.subplots(figsize=(figsize))\n    ax.imshow(G, cmap=cmap if cmap is not None else plt.get_cmap('Blues'))\n    # plt.hlines(y=np.arange(0, n_nodes)+0.5, xmin=np.full(n_nodes, 0)-0.5, xmax=np.full(n_nodes, n_nodes)-0.5, color=\"#444444\", linewidth=0.1)\n    # plt.vlines(x=np.arange(0, n_nodes)+0.5, ymin=np.full(n_nodes, 0)-0.5, ymax=np.full(n_nodes, n_nodes)-0.5, color=\"#444444\", linewidth=0.1)\n    if title is not None:\n        ax.set_title(title)\n    fig.tight_layout()\n    plt.savefig(save_as, dpi=dpi)\n    return fig, ax\n\n\ndef draw_groups(DG, save_as, title='Group', figsize=(16, 16), dpi=200, cmap=None):\n    import numpy as np\n    import matplotlib.pyplot as plt\n    plt.style.use('bmh')\n    n_nodes = 2*len(DG.module2node)\n    node2idx = {m: i for (i, m) in enumerate(DG.module2node.values())}\n    G = np.zeros((n_nodes, n_nodes))\n    fill_value = 10\n    for i, (module, node) in enumerate(DG.module2node.items()):\n        pruning_fn = DG.get_pruner_of_module(module).prune_out_channels\n        prunable_ch = DG.get_out_channels(module)\n        if prunable_ch is None: continue\n        group = DG.get_pruning_group(module, pruning_fn, list(range(prunable_ch)))\n        grouped_idxs = []\n        for dep, _ in group:\n            source, target, trigger, handler = dep.source, dep.target, dep.trigger, dep.handler\n            if DG.is_out_channel_pruning_fn(trigger):\n                grouped_idxs.append(node2idx[source]*2+1)\n            else:\n                grouped_idxs.append(node2idx[source]*2)\n\n            if DG.is_out_channel_pruning_fn(handler):\n                grouped_idxs.append(node2idx[target]*2+1)\n            else:\n                grouped_idxs.append(node2idx[target]*2)\n        grouped_idxs = list(set(grouped_idxs))\n        for k1 in grouped_idxs:\n            for k2 in grouped_idxs:\n                G[k1, k2] = fill_value\n\n    fig, ax = plt.subplots(figsize=(figsize))\n    ax.imshow(G, cmap=cmap if cmap is not None else plt.get_cmap('Blues'))\n    # plt.hlines(y=np.arange(0, n_nodes)+0.5, xmin=np.full(n_nodes, 0)-0.5, xmax=np.full(n_nodes, n_nodes)-0.5, color=\"#999999\", linewidth=0.1)\n    # plt.vlines(x=np.arange(0, n_nodes)+0.5, ymin=np.full(n_nodes, 0)-0.5, ymax=np.full(n_nodes, n_nodes)-0.5, color=\"#999999\", linewidth=0.1)\n    if title is not None:\n        ax.set_title(title)\n    fig.tight_layout()\n    plt.savefig(save_as, dpi=dpi)\n    return fig, ax\n\n\ndef draw_dependency_graph(DG, save_as, title='Group', figsize=(16, 16), dpi=200, cmap=None):\n    import numpy as np\n    import matplotlib.pyplot as plt\n    plt.style.use('bmh')\n    n_nodes = len(DG.module2node)\n    node2idx = {node: i for (i, node) in enumerate(DG.module2node.values())}\n    G = np.zeros((2*n_nodes, 2*n_nodes))\n    fill_value = 10\n    for module, node in DG.module2node.items():\n        for dep in node.dependencies:\n            trigger = dep.trigger\n            handler = dep.handler\n            source = dep.source\n            target = dep.target\n\n            if DG.is_out_channel_pruning_fn(trigger):\n                G[2*node2idx[source]+1, 2*node2idx[target]] = fill_value\n            else:\n                G[2*node2idx[source], 2*node2idx[target]+1] = fill_value\n\n        pruner = DG.get_pruner_of_module(module)\n        if pruner.prune_out_channels == pruner.prune_in_channels:\n            G[2*node2idx[node], 2*node2idx[node]+1] = fill_value\n\n    fig, ax = plt.subplots(figsize=(figsize))\n    ax.imshow(G, cmap=cmap if cmap is not None else plt.get_cmap('Blues'))\n    # plt.hlines(y=np.arange(0, 2*n_nodes)+0.5, xmin=np.full(2*n_nodes, 0)-0.5, xmax=np.full(2*n_nodes, 2*n_nodes)-0.5, color=\"#999999\", linewidth=0.05)\n    # plt.vlines(x=np.arange(0, 2*n_nodes)+0.5, ymin=np.full(2*n_nodes, 0)-0.5, ymax=np.full(2*n_nodes, 2*n_nodes)-0.5, color=\"#999999\", linewidth=0.05)\n    if title is not None:\n        ax.set_title(title)\n    fig.tight_layout()\n    plt.savefig(save_as, dpi=dpi)\n    return fig, ax"
  },
  {
    "path": "ddpm_exp/utils.py",
    "content": "import torch, os\nfrom glob import glob\nfrom PIL import Image\n\nclass UnlabeledImageFolder(torch.utils.data.Dataset):\n    def __init__(self, root, transform=None, exts=[\"*.jpg\", \"*.png\", \"*.jpeg\", \"*.webp\"]):\n        self.root = root\n        self.files = []\n        self.transform = transform\n        for ext in exts:\n            self.files.extend(glob(os.path.join(root, '**/*.{}'.format(ext)), recursive=True))\n\n    def __len__(self):\n        return len(self.files)\n\n    def __getitem__(self, idx):\n        path = self.files[idx]\n        img = Image.open(path).convert(\"RGB\")\n        if self.transform is not None:\n            img = self.transform(img)\n        return img\n\nimport torch\n\ndef set_dropout(model, p):\n    for m in model.modules():\n        if isinstance(m, torch.nn.Dropout):\n            m.p = p\n        "
  },
  {
    "path": "ddpm_prune.py",
    "content": "from diffusers import DiffusionPipeline, DDPMPipeline, DDIMPipeline, DDIMScheduler, DDPMScheduler\nfrom diffusers.models import UNet2DModel\nimport torch_pruning as tp\nimport torch\nimport torchvision\nfrom torchvision import transforms\nimport torchvision\nfrom tqdm import tqdm\nimport os\nfrom glob import glob\nfrom PIL import Image\nimport accelerate\nimport utils\n\nimport argparse\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--dataset\", type=str,  default=None, help=\"path to an image folder\")\nparser.add_argument(\"--model_path\", type=str, required=True)\nparser.add_argument(\"--save_path\", type=str, required=True)\nparser.add_argument(\"--pruning_ratio\", type=float, default=0.3)\nparser.add_argument(\"--batch_size\", type=int, default=128)\nparser.add_argument(\"--device\", type=str, default='cpu')\nparser.add_argument(\"--pruner\", type=str, default='taylor', choices=['taylor', 'random', 'magnitude', 'reinit', 'diff-pruning'])\n\nparser.add_argument(\"--thr\", type=float, default=0.05, help=\"threshold for diff-pruning\")\n\nargs = parser.parse_args()\n\nbatch_size = args.batch_size\ndataset = args.dataset\n\nif __name__=='__main__':\n    \n    # loading images for gradient-based pruning\n    if args.pruner in ['taylor', 'diff-pruning']:\n        dataset = utils.get_dataset(args.dataset)\n        print(f\"Dataset size: {len(dataset)}\")\n        train_dataloader = torch.utils.data.DataLoader(\n            dataset, batch_size=args.batch_size, shuffle=True, num_workers=4, drop_last=True\n        )\n        import torch_pruning as tp\n        clean_images = next(iter(train_dataloader))\n        if isinstance(clean_images, (list, tuple)):\n            clean_images = clean_images[0]\n        clean_images = clean_images.to(args.device)\n        noise = torch.randn(clean_images.shape).to(clean_images.device)\n\n    # Loading pretrained model\n    print(\"Loading pretrained model from {}\".format(args.model_path))\n    pipeline = DDPMPipeline.from_pretrained(args.model_path).to(args.device)\n    scheduler = pipeline.scheduler\n    model = pipeline.unet.eval()\n    if 'cifar' in args.model_path:\n        example_inputs = {'sample': torch.randn(1, 3, 32, 32).to(args.device), 'timestep': torch.ones((1,)).long().to(args.device)}\n    else:\n        example_inputs = {'sample': torch.randn(1, 3, 256, 256).to(args.device), 'timestep': torch.ones((1,)).long().to(args.device)}\n\n    if args.pruning_ratio>0:\n        if args.pruner == 'taylor':\n            imp = tp.importance.TaylorImportance(multivariable=True) # standard first-order taylor expansion\n        elif args.pruner == 'random' or args.pruner=='reinit':\n            imp = tp.importance.RandomImportance()\n        elif args.pruner == 'magnitude':\n            imp = tp.importance.MagnitudeImportance()\n        elif args.pruner == 'diff-pruning':\n            imp = tp.importance.TaylorImportance(multivariable=False) # a modified version, estimating the accumulated error of weight removal\n        else:\n            raise NotImplementedError\n\n        ignored_layers = [model.conv_out]\n        channel_groups = {}\n        #from diffusers.models.attention import \n        #for m in model.modules():\n        #    if isinstance(m, AttentionBlock):\n        #        channel_groups[m.query] = m.num_heads\n        #        channel_groups[m.key] = m.num_heads\n        #        channel_groups[m.value] = m.num_heads\n        \n        pruner = tp.pruner.MagnitudePruner(\n            model,\n            example_inputs,\n            importance=imp,\n            iterative_steps=1,\n            channel_groups=channel_groups,\n            ch_sparsity=args.pruning_ratio,\n            ignored_layers=ignored_layers,\n        )\n\n        base_macs, base_params = tp.utils.count_ops_and_params(model, example_inputs)\n        model.zero_grad()\n        model.eval()\n        import random\n\n        if args.pruner in ['taylor', 'diff-pruning']:\n            loss_max = 0\n            print(\"Accumulating gradients for pruning...\")\n            for step_k in tqdm(range(1000)):\n                timesteps = (step_k*torch.ones((args.batch_size,), device=clean_images.device)).long()\n                noisy_images = scheduler.add_noise(clean_images, noise, timesteps)\n                model_output = model(noisy_images, timesteps).sample\n                loss = torch.nn.functional.mse_loss(model_output, noise) \n                loss.backward() \n                \n                if args.pruner=='diff-pruning':\n                    if loss>loss_max: loss_max = loss\n                    if loss<loss_max * args.thr: break # taylor expansion over pruned timesteps ( L_t / L_max > thr )\n\n        for g in pruner.step(interactive=True):\n            g.prune()\n\n        # Update static attributes\n        from diffusers.models.resnet import Upsample2D, Downsample2D\n        for m in model.modules():\n            if isinstance(m, (Upsample2D, Downsample2D)):\n                m.channels = m.conv.in_channels\n                m.out_channels == m.conv.out_channels\n\n        macs, params = tp.utils.count_ops_and_params(model, example_inputs)\n        print(model)\n        print(\"#Params: {:.4f} M => {:.4f} M\".format(base_params/1e6, params/1e6))\n        print(\"#MACS: {:.4f} G => {:.4f} G\".format(base_macs/1e9, macs/1e9))\n        model.zero_grad()\n        del pruner\n\n        if args.pruner=='reinit':\n            def reset_parameters(model):\n                for m in model.modules():\n                    if hasattr(m, 'reset_parameters'):\n                        m.reset_parameters()\n            reset_parameters(model)\n\n    pipeline.save_pretrained(args.save_path)\n    if args.pruning_ratio>0:\n        os.makedirs(os.path.join(args.save_path, \"pruned\"), exist_ok=True)\n        torch.save(model, os.path.join(args.save_path, \"pruned\", \"unet_pruned.pth\"))\n\n    # Sampling images from the pruned model\n    pipeline = DDIMPipeline(\n        unet = model,\n        scheduler = DDIMScheduler.from_pretrained(args.save_path, subfolder=\"scheduler\")\n    )\n    with torch.no_grad():\n        generator = torch.Generator(device=pipeline.device).manual_seed(0)\n        pipeline.to(\"cuda\")\n        images = pipeline(num_inference_steps=100, batch_size=args.batch_size, generator=generator, output_type=\"numpy\").images\n        os.makedirs(os.path.join(args.save_path, 'vis'), exist_ok=True)\n        torchvision.utils.save_image(torch.from_numpy(images).permute([0, 3, 1, 2]), \"{}/vis/after_pruning.png\".format(args.save_path))\n        \n"
  },
  {
    "path": "ddpm_sample.py",
    "content": "from diffusers import DDIMPipeline, DDIMScheduler, UNet2DModel\nimport argparse, os, torch\nfrom tqdm import tqdm\nimport torch_pruning as tp\nimport accelerate\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--total_samples\", type=int, default=50000)\nparser.add_argument(\"--batch_size\", type=int, default=100)\nparser.add_argument(\"--output_dir\", type=str, default=\"samples\")\nparser.add_argument(\"--model_path\", type=str, default=\"samples\")\nparser.add_argument(\"--ddim_steps\", type=int, default=100)\nparser.add_argument(\"--pruned_model_ckpt\", type=str, default=None)\nparser.add_argument(\"--seed\", type=int, default=0)\nparser.add_argument(\"--skip_type\", type=str, default=\"uniform\")\n\nargs = parser.parse_args()\n\nif __name__ == \"__main__\":\n    os.makedirs(args.output_dir, exist_ok=True)\n    # pruned model\n    accelerator = accelerate.Accelerator()\n\n    if os.path.isdir(args.model_path):\n        if args.pruned_model_ckpt is not None:\n            print(\"Loading pruned model from {}\".format(args.pruned_model_ckpt))\n            unet = torch.load(args.pruned_model_ckpt).eval()\n        else:\n            print(\"Loading model from {}\".format(args.model_path))\n            subfolder = 'unet' if os.path.isdir(os.path.join(args.model_path, 'unet')) else None\n            unet = UNet2DModel.from_pretrained(args.model_path, subfolder=subfolder).eval()\n        pipeline = DDIMPipeline(\n            unet=unet,\n            scheduler=DDIMScheduler.from_pretrained(args.model_path, subfolder=\"scheduler\")\n        )\n    # standard model\n    else:  \n        print(\"Loading pretrained model from {}\".format(args.model_path))\n        pipeline = DDIMPipeline.from_pretrained(\n            args.model_path,\n        )\n\n    pipeline.scheduler.skip_type = args.skip_type\n    # Test Flops\n    pipeline.to(accelerator.device)\n    if accelerator.is_main_process:\n        if 'cifar' in args.model_path:\n            example_inputs = {'sample': torch.randn(1, 3, 32, 32).to(accelerator.device), 'timestep': torch.ones((1,)).long().to(accelerator.device)}\n        else:\n            example_inputs = {'sample': torch.randn(1, 3, 256, 256).to(accelerator.device), 'timestep': torch.ones((1,)).long().to(accelerator.device)}\n        macs, params = tp.utils.count_ops_and_params(pipeline.unet, example_inputs)\n        print(f\"MACS: {macs/1e9} G, Params: {params/1e6} M\")\n\n    # Create subfolders for each process\n    save_sub_dir = os.path.join(args.output_dir, 'process_{}'.format(accelerator.process_index))\n    os.makedirs(save_sub_dir, exist_ok=True)\n    generator = torch.Generator(device=pipeline.device).manual_seed(args.seed+accelerator.process_index)\n\n    # Set up progress bar\n    if not accelerator.is_main_process:\n        pipeline.set_progress_bar_config(disable=True)\n    \n    # Sampling\n    accelerator.wait_for_everyone()\n    with torch.no_grad():\n        # num_batches of each process\n        num_batches = (args.total_samples) // (args.batch_size * accelerator.num_processes)\n        if accelerator.is_main_process:\n            print(\"Samping {}x{}={} images with {} process(es)\".format(num_batches*args.batch_size, accelerator.num_processes, num_batches*accelerator.num_processes*args.batch_size, accelerator.num_processes))\n        for i in tqdm(range(num_batches), disable=not accelerator.is_main_process):\n            images = pipeline(batch_size=args.batch_size, num_inference_steps=args.ddim_steps, generator=generator).images\n            for j, image in enumerate(images):\n                filename = os.path.join(save_sub_dir, f\"{i * args.batch_size + j}.png\")\n                image.save(filename)\n\n    # Finished\n    accelerator.wait_for_everyone()\n    if accelerator.is_main_process:\n        accelerator.print(f\"Saved {num_batches*accelerator.num_processes*args.batch_size} samples to {args.output_dir}\")\n    #accelerator.end_training()\n    \n"
  },
  {
    "path": "ddpm_train.py",
    "content": "# Modifed from https://github.com/huggingface/diffusers/tree/main/examples/unconditional_image_generation\n\nimport argparse\nimport inspect\nimport logging\nimport math\nimport os, sys\n\nimport accelerate\nimport torch\nimport torch.nn.functional as F\nfrom accelerate import Accelerator\nfrom accelerate.logging import get_logger\nfrom accelerate.utils import ProjectConfiguration\nfrom packaging import version\nfrom torchvision import transforms\nimport torchvision\nfrom tqdm.auto import tqdm\nimport diffusers\nfrom diffusers import DDPMPipeline, DDPMScheduler, UNet2DModel, DDIMPipeline, DDIMScheduler\nfrom diffusers.optimization import get_scheduler\nfrom diffusers.training_utils import EMAModel\nfrom diffusers.utils import is_accelerate_version, is_tensorboard_available, is_wandb_available\n\nimport utils\n\nlogger = get_logger(__name__, log_level=\"INFO\")\n\ndef parse_args():\n    parser = argparse.ArgumentParser(description=\"Simple example of a training script.\")\n    parser.add_argument(\"--pruned_model_ckpt\", type=str, default=None)\n\n    parser.add_argument(\n        \"--dataset\",\n        type=str,\n        default=None,\n        help=(\n            \"The name of the Dataset (from the HuggingFace hub) to train on (could be your own, possibly private,\"\n            \" dataset). It can also be a path pointing to a local copy of a dataset in your filesystem,\"\n            \" or to a folder containing files that HF Datasets can understand.\"\n        ),\n    )\n    parser.add_argument(\n        \"--dataset_config_name\",\n        type=str,\n        default=None,\n        help=\"The config of the Dataset, leave as None if there's only one config.\",\n    )\n    parser.add_argument(\n        \"--model_path\",\n        type=str,\n        default=None,\n        help=\"The config of the UNet model to train, leave as None to use standard DDPM configuration.\",\n    )\n    parser.add_argument(\n        \"--train_data_dir\",\n        type=str,\n        default=None,\n        help=(\n            \"A folder containing the training data. Folder contents must follow the structure described in\"\n            \" https://huggingface.co/docs/datasets/image_dataset#imagefolder. In particular, a `metadata.jsonl` file\"\n            \" must exist to provide the captions for the images. Ignored if `dataset` is specified.\"\n        ),\n    )\n    parser.add_argument(\n        \"--output_dir\",\n        type=str,\n        default=\"ddpm-model-64\",\n        help=\"The output directory where the checkpoints will be written.\",\n    )\n    parser.add_argument(\"--overwrite_output_dir\", action=\"store_true\")\n    parser.add_argument(\n        \"--cache_dir\",\n        type=str,\n        default='./cache',\n        help=\"The directory where the downloaded models and datasets will be stored.\",\n    )\n    parser.add_argument(\n        \"--resolution\",\n        type=int,\n        default=64,\n        help=(\n            \"The resolution for input images, all the images in the train/validation dataset will be resized to this\"\n            \" resolution\"\n        ),\n    )\n    parser.add_argument(\n        \"--dropout\",\n        type=float,\n        default=0.0,\n    )\n    parser.add_argument(\n        \"--center_crop\",\n        default=False,\n        action=\"store_true\",\n        help=(\n            \"Whether to center crop the input images to the resolution. If not set, the images will be randomly\"\n            \" cropped. The images will be resized to the resolution first before cropping.\"\n        ),\n    )\n    parser.add_argument(\n        \"--train_batch_size\", type=int, default=16, help=\"Batch size (per device) for the training dataloader.\"\n    )\n    parser.add_argument(\n        \"--eval_batch_size\", type=int, default=16, help=\"The number of images to generate for evaluation.\"\n    )\n    parser.add_argument(\n        \"--dataloader_num_workers\",\n        type=int,\n        default=0,\n        help=(\n            \"The number of subprocesses to use for data loading. 0 means that the data will be loaded in the main\"\n            \" process.\"\n        ),\n    )\n    parser.add_argument(\n        \"--checkpoint_id\",\n        type=int,\n        default=None,\n    )\n    parser.add_argument(\n        \"--load_ema\",\n        action=\"store_true\",\n        default=False,\n    )\n    parser.add_argument(\"--num_iters\", type=int, default=10000)\n    parser.add_argument(\n        \"--save_model_steps\", type=int, default=1000, help=\"How often to save the model during training.\"\n    )\n    parser.add_argument(\n        \"--gradient_accumulation_steps\",\n        type=int,\n        default=1,\n        help=\"Number of updates steps to accumulate before performing a backward/update pass.\",\n    )\n    parser.add_argument(\n        \"--learning_rate\",\n        type=float,\n        default=1e-4,\n        help=\"Initial learning rate (after the potential warmup period) to use.\",\n    )\n    parser.add_argument(\n        \"--lr_scheduler\",\n        type=str,\n        default=\"constant\",\n        help=(\n            'The scheduler type to use. Choose between [\"linear\", \"cosine\", \"cosine_with_restarts\", \"polynomial\",'\n            ' \"constant\", \"constant_with_warmup\"]'\n        ),\n    )\n    parser.add_argument(\n        \"--lr_warmup_steps\", type=int, default=500, help=\"Number of steps for the warmup in the lr scheduler.\"\n    )\n    parser.add_argument(\"--adam_beta1\", type=float, default=0.9, help=\"The beta1 parameter for the Adam optimizer.\")\n    parser.add_argument(\"--adam_beta2\", type=float, default=0.999, help=\"The beta2 parameter for the Adam optimizer.\")\n    parser.add_argument(\n        \"--adam_weight_decay\", type=float, default=0.0, help=\"Weight decay magnitude for the Adam optimizer.\"\n    )\n    parser.add_argument(\"--adam_epsilon\", type=float, default=1e-08, help=\"Epsilon value for the Adam optimizer.\")\n    parser.add_argument(\n        \"--use_ema\",\n        action=\"store_true\",\n        help=\"Whether to use Exponential Moving Average for the final model weights.\",\n    )\n    parser.add_argument(\"--ema_inv_gamma\", type=float, default=1.0, help=\"The inverse gamma value for the EMA decay.\")\n    parser.add_argument(\"--ema_power\", type=float, default=3 / 4, help=\"The power value for the EMA decay.\")\n    parser.add_argument(\"--ema_max_decay\", type=float, default=0.999, help=\"The maximum decay magnitude for EMA.\")\n    parser.add_argument(\"--push_to_hub\", action=\"store_true\", help=\"Whether or not to push the model to the Hub.\")\n    parser.add_argument(\"--hub_token\", type=str, default=None, help=\"The token to use to push to the Model Hub.\")\n    parser.add_argument(\n        \"--hub_model_id\",\n        type=str,\n        default=None,\n        help=\"The name of the repository to keep in sync with the local `output_dir`.\",\n    )\n    parser.add_argument(\n        \"--hub_private_repo\", action=\"store_true\", help=\"Whether or not to create a private repository.\"\n    )\n    parser.add_argument(\n        \"--logger\",\n        type=str,\n        default=\"tensorboard\",\n        choices=[\"tensorboard\", \"wandb\"],\n        help=(\n            \"Whether to use [tensorboard](https://www.tensorflow.org/tensorboard) or [wandb](https://www.wandb.ai)\"\n            \" for experiment tracking and logging of model metrics and model checkpoints\"\n        ),\n    )\n    parser.add_argument(\n        \"--logging_dir\",\n        type=str,\n        default=\"logs\",\n        help=(\n            \"[TensorBoard](https://www.tensorflow.org/tensorboard) log directory. Will default to\"\n            \" *output_dir/runs/**CURRENT_DATETIME_HOSTNAME***.\"\n        ),\n    )\n    parser.add_argument(\"--local_rank\", type=int, default=-1, help=\"For distributed training: local_rank\")\n    parser.add_argument(\n        \"--mixed_precision\",\n        type=str,\n        default=\"no\",\n        choices=[\"no\", \"fp16\", \"bf16\"],\n        help=(\n            \"Whether to use mixed precision. Choose\"\n            \"between fp16 and bf16 (bfloat16). Bf16 requires PyTorch >= 1.10.\"\n            \"and an Nvidia Ampere GPU.\"\n        ),\n    )\n    parser.add_argument(\n        \"--prediction_type\",\n        type=str,\n        default=\"epsilon\",\n        choices=[\"epsilon\", \"sample\"],\n        help=\"Whether the model should predict the 'epsilon'/noise error or directly the reconstructed image 'x0'.\",\n    )\n    parser.add_argument(\"--ddpm_num_steps\", type=int, default=1000)\n    parser.add_argument(\"--ddim_num_inference_steps\", type=int, default=100)\n    parser.add_argument(\"--ddpm_beta_schedule\", type=str, default=\"linear\")\n    parser.add_argument(\n        \"--checkpointing_steps\",\n        type=int,\n        default=500,\n        help=(\n            \"Save a checkpoint of the training state every X updates. These checkpoints are only suitable for resuming\"\n            \" training using `--resume_from_checkpoint`.\"\n        ),\n    )\n    parser.add_argument(\n        \"--resume_from_checkpoint\",\n        type=str,\n        default=None,\n        help=(\n            \"Whether training should be resumed from a previous checkpoint. Use a path saved by\"\n            ' `--checkpointing_steps`, or `\"latest\"` to automatically select the last available checkpoint.'\n        ),\n    )\n    parser.add_argument(\n        \"--enable_xformers_memory_efficient_attention\", action=\"store_true\", help=\"Whether or not to use xformers.\"\n    )\n\n    args = parser.parse_args()\n    env_local_rank = int(os.environ.get(\"LOCAL_RANK\", -1))\n    if env_local_rank != -1 and env_local_rank != args.local_rank:\n        args.local_rank = env_local_rank\n\n    if args.dataset is None and args.train_data_dir is None:\n        raise ValueError(\"You must specify either a dataset name from the hub or a train data directory.\")\n\n    return args\n\ndef main(args):\n    logging_dir = os.path.join(args.output_dir, args.logging_dir)\n    accelerator_project_config = ProjectConfiguration()\n    accelerator = Accelerator(\n        gradient_accumulation_steps=args.gradient_accumulation_steps,\n        mixed_precision=args.mixed_precision,\n        log_with=args.logger,\n        project_dir=logging_dir,\n        project_config=accelerator_project_config,\n    )\n\n    if args.logger == \"tensorboard\":\n        if not is_tensorboard_available():\n            raise ImportError(\"Make sure to install tensorboard if you want to use it for logging during training.\")\n    elif args.logger == \"wandb\":\n        if not is_wandb_available():\n            raise ImportError(\"Make sure to install wandb if you want to use it for logging during training.\")\n        import wandb\n\n    # Make one log on every process with the configuration for debugging.\n    logging.basicConfig(\n        format=\"%(asctime)s - %(levelname)s - %(name)s - %(message)s\",\n        datefmt=\"%m/%d/%Y %H:%M:%S\",\n        level=logging.INFO,\n    )\n    logger.info(accelerator.state, main_process_only=False)\n    if accelerator.is_local_main_process:\n        diffusers.utils.logging.set_verbosity_info()\n    else:\n        diffusers.utils.logging.set_verbosity_error()\n\n    # Handle the repository creation\n    if accelerator.is_main_process:\n        if args.output_dir is not None:\n            os.makedirs(args.output_dir, exist_ok=True)\n\n    # Loading pruned model\n    if os.path.isdir(args.model_path):\n        if args.pruned_model_ckpt is not None:\n            print(\"Loading pruned model from {}\".format(args.pruned_model_ckpt))\n            unet = torch.load(args.pruned_model_ckpt, map_location='cpu').eval()\n        else:\n            print(\"Loading model from {}\".format(args.model_path))\n            subfolder = 'unet' if os.path.isdir(os.path.join(args.model_path, 'unet')) else None\n            unet = UNet2DModel.from_pretrained(args.model_path, subfolder=subfolder).eval()\n        pipeline = DDPMPipeline(\n            unet=unet,\n            scheduler=DDPMScheduler.from_pretrained(args.model_path, subfolder=\"scheduler\")\n        )\n    # Loading standard model\n    else:  \n        print(\"Loading pretrained model from {}\".format(args.model_path))\n        pipeline = DDPMPipeline.from_pretrained(\n            args.model_path,\n        )\n    model = pipeline.unet\n    noise_scheduler = pipeline.scheduler\n\n    # Get the datasets: you can either provide your own training and evaluation files (see below)\n    dataset = utils.get_dataset(args.dataset)\n    logger.info(f\"Dataset size: {len(dataset)}\")\n    train_dataloader = torch.utils.data.DataLoader(\n        dataset, batch_size=args.train_batch_size, shuffle=True, num_workers=args.dataloader_num_workers\n    )\n    num_epochs = math.ceil(args.num_iters / len(train_dataloader))\n\n    # Create EMA for the model.\n    if args.use_ema:\n        ema_model = EMAModel(\n            model.parameters(),\n            decay=args.ema_max_decay,\n            use_ema_warmup=False,\n            inv_gamma=args.ema_inv_gamma,\n            power=args.ema_power,\n            model_cls=UNet2DModel,\n            model_config=model.config,\n        )\n\n    # Initialize the optimizer\n    optimizer = torch.optim.Adam(\n        model.parameters(),\n        lr=args.learning_rate,\n        betas=(args.adam_beta1, args.adam_beta2),\n        weight_decay=args.adam_weight_decay,\n        eps=args.adam_epsilon,\n    )\n\n    # Initialize the learning rate scheduler\n    lr_scheduler = get_scheduler(\n        args.lr_scheduler,\n        optimizer=optimizer,\n        num_warmup_steps=args.lr_warmup_steps * args.gradient_accumulation_steps,\n        num_training_steps=(len(train_dataloader) * num_epochs),\n    )\n\n    # Prepare everything with our `accelerator`.\n    model, optimizer, train_dataloader, lr_scheduler = accelerator.prepare(\n        model, optimizer, train_dataloader, lr_scheduler\n    )\n\n    if args.use_ema:\n        ema_model.to(accelerator.device)\n\n    # We need to initialize the trackers we use, and also store our configuration.\n    # The trackers initializes automatically on the main process.\n    if accelerator.is_main_process:\n        run = os.path.split(__file__)[-1].split(\".\")[0]\n        accelerator.init_trackers(run)\n\n    total_batch_size = args.train_batch_size * accelerator.num_processes * args.gradient_accumulation_steps\n    num_update_steps_per_epoch = math.ceil(len(train_dataloader) / args.gradient_accumulation_steps)\n\n    logger.info(\"***** Running training *****\")\n    logger.info(f\"  Num examples = {len(dataset)}\")\n    logger.info(f\"  Instantaneous batch size per device = {args.train_batch_size}\")\n    logger.info(f\"  Total train batch size (w. parallel, distributed & accumulation) = {total_batch_size}\")\n    logger.info(f\"  Gradient Accumulation steps = {args.gradient_accumulation_steps}\")\n    logger.info(f\"  Num Epochs = {num_epochs}\")\n    logger.info(f\"  Total optimization steps = {args.num_iters}\")\n\n    global_step = 0\n    first_epoch = 0\n\n    # save the shell command\n    if accelerator.is_main_process:\n        with open(os.path.join(args.output_dir, 'run.sh'), 'w') as f:\n            f.write('python ' + ' '.join(sys.argv))\n\n    # setup dropout\n    if args.dropout>0:\n        utils.set_dropout(model, args.dropout)\n\n    # generate images before training\n    if accelerator.is_main_process:\n        unet = accelerator.unwrap_model(model).eval()\n        if args.use_ema:\n            ema_model.store(unet.parameters())\n            ema_model.copy_to(unet.parameters())\n        pipeline = DDIMPipeline(\n            unet=unet,\n            scheduler=DDIMScheduler(num_train_timesteps=args.ddpm_num_steps)\n        )\n        pipeline.scheduler.set_timesteps(args.ddim_num_inference_steps)\n        images = pipeline(\n            batch_size=args.eval_batch_size,\n            num_inference_steps=args.ddim_num_inference_steps,\n            output_type=\"numpy\",\n        ).images\n        if args.use_ema:\n            ema_model.restore(unet.parameters())\n        os.makedirs(os.path.join(args.output_dir, 'vis'), exist_ok=True)\n        torchvision.utils.save_image(torch.from_numpy(images).permute([0, 3, 1, 2]), os.path.join(args.output_dir, 'vis', 'before_training.png'))\n        images_processed = (images * 255).round().astype(\"uint8\")\n        if args.logger == \"tensorboard\":\n            if is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n                tracker = accelerator.get_tracker(\"tensorboard\", unwrap=True)\n            else:\n                tracker = accelerator.get_tracker(\"tensorboard\")\n            tracker.add_images(\"After Pruning\", images_processed.transpose(0, 3, 1, 2), 0)\n        elif args.logger == \"wandb\":\n            # Upcoming `log_images` helper coming in https://github.com/huggingface/accelerate/pull/962/files\n            accelerator.get_tracker(\"wandb\").log(\n                {\"After Pruning\": [wandb.Image(img) for img in images_processed], \"epoch\": 0},\n                step=global_step,\n            )\n        del unet\n        del pipeline\n\n    accelerator.wait_for_everyone()    \n    # Train!\n    os.makedirs(os.path.join(args.output_dir, 'vis'), exist_ok=True)\n    for epoch in range(first_epoch, num_epochs):\n        progress_bar = tqdm(total=num_update_steps_per_epoch, disable=not accelerator.is_local_main_process)\n        progress_bar.set_description(f\"Epoch {epoch}\")\n        for step, batch in enumerate(train_dataloader):\n            model.train()\n            # Skip steps until we reach the resumed step\n            if args.resume_from_checkpoint and epoch == first_epoch and step < resume_step:\n                if step % args.gradient_accumulation_steps == 0:\n                    progress_bar.update(1)\n                continue\n            if isinstance(batch, (list, tuple)):\n                clean_images = batch[0]\n            else:\n                clean_images = batch\n            noise = torch.randn(clean_images.shape).to(clean_images.device)\n            bsz = clean_images.shape[0]\n\n            # The standard training procedure in diffusers\n            #timesteps = torch.randint(\n            #    0, noise_scheduler.config.num_train_timesteps, (bsz,), device=clean_images.device\n            #).long()\n\n            # Our experiements were conduct on https://github.com/ermongroup/ddim/blob/main/runners/diffusion.py\n            timesteps = torch.randint(\n                low=0, high=noise_scheduler.config.num_train_timesteps, size=(bsz // 2 + 1,)\n            ).to(clean_images.device)\n            timesteps = torch.cat([timesteps, noise_scheduler.config.num_train_timesteps - timesteps - 1], dim=0)[:bsz]\n\n            # Add noise to the clean images according to the noise magnitude at each timestep\n            # (this is the forward diffusion process)\n            noisy_images = noise_scheduler.add_noise(clean_images, noise, timesteps)\n\n            with accelerator.accumulate(model):\n                optimizer.zero_grad()\n                # Predict the noise residual\n                model_output = model(noisy_images, timesteps).sample\n                loss = (noise - model_output).square().sum(dim=(1, 2, 3)).mean(dim=0) \n                accelerator.backward(loss)\n                if accelerator.sync_gradients:\n                    accelerator.clip_grad_norm_(model.parameters(), 1.0)\n                optimizer.step()\n                lr_scheduler.step()\n                \n            # Checks if the accelerator has performed an optimization step behind the scenes\n            if accelerator.sync_gradients:\n                if args.use_ema:\n                    ema_model.step(model.parameters())\n                progress_bar.update(1)\n                global_step += 1\n\n            logs = {\"loss\": loss.detach().item(), \"lr\": lr_scheduler.get_last_lr()[0], \"step\": global_step}\n            if args.use_ema:\n                logs[\"ema_decay\"] = ema_model.cur_decay_value\n            progress_bar.set_postfix(**logs)\n            accelerator.log(logs, step=global_step)\n\n            # Save the model & generate sample images \n            if global_step % args.save_model_steps == 0:\n                accelerator.wait_for_everyone()\n                if accelerator.is_main_process:\n                    # save the model\n                    unet = accelerator.unwrap_model(model).eval()\n                    unet.zero_grad()\n                    os.makedirs(os.path.join(args.output_dir, 'pruned'), exist_ok=True)\n                    torch.save(unet, os.path.join(args.output_dir, 'pruned', 'unet_pruned.pth'.format(global_step)))\n                    torch.save(unet, os.path.join(args.output_dir, 'pruned', 'unet_pruned-{}.pth'.format(global_step)))\n                    if args.use_ema:\n                        ema_model.store(unet.parameters())\n                        ema_model.copy_to(unet.parameters())\n                        torch.save(unet, os.path.join(args.output_dir, 'pruned', 'unet_ema_pruned.pth'.format(global_step)))\n                        torch.save(unet, os.path.join(args.output_dir, 'pruned', 'unet_ema_pruned-{}.pth'.format(global_step)))\n                    pipeline = DDPMPipeline(\n                        unet=unet,\n                        scheduler=noise_scheduler,\n                    )\n                    pipeline.save_pretrained(args.output_dir) \n\n                    # generate images\n                    logger.info(\"Sampling images...\")\n                    pipeline = DDIMPipeline(\n                        unet=unet,\n                        scheduler=DDIMScheduler(num_train_timesteps=args.ddpm_num_steps)\n                    )\n                    pipeline.scheduler.set_timesteps(args.ddim_num_inference_steps)\n                    images = pipeline(\n                        batch_size=args.eval_batch_size,\n                        num_inference_steps=args.ddim_num_inference_steps,\n                        output_type=\"numpy\",\n                    ).images\n\n                    if args.use_ema:\n                        ema_model.restore(unet.parameters())\n                    torchvision.utils.save_image(torch.from_numpy(images).permute([0, 3, 1, 2]), os.path.join(args.output_dir, 'vis', 'iter-{}.png'.format(global_step)))\n                    # denormalize the images and save to tensorboard\n                    images_processed = (images * 255).round().astype(\"uint8\")\n\n                    if args.logger == \"tensorboard\":\n                        if is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n                            tracker = accelerator.get_tracker(\"tensorboard\", unwrap=True)\n                        else:\n                            tracker = accelerator.get_tracker(\"tensorboard\")\n                        tracker.add_images(\"test_samples\", images_processed.transpose(0, 3, 1, 2), global_step)\n                    elif args.logger == \"wandb\":\n                        # Upcoming `log_images` helper coming in https://github.com/huggingface/accelerate/pull/962/files\n                        accelerator.get_tracker(\"wandb\").log(\n                            {\"test_samples\": [wandb.Image(img) for img in images_processed], \"steps\": global_step},\n                            step=global_step,\n                        )\n                    del unet\n                    del pipeline\n            if global_step>args.num_iters:\n                progress_bar.close()\n                accelerator.wait_for_everyone()\n                accelerator.end_training()\n                return\n        progress_bar.close()\n        accelerator.wait_for_everyone()\n    accelerator.end_training()\n\n\nif __name__ == \"__main__\":\n    args = parse_args()\n    main(args)\n"
  },
  {
    "path": "diffusers/__init__.py",
    "content": "__version__ = \"0.17.0.dev0\"\n\nfrom .configuration_utils import ConfigMixin\nfrom .utils import (\n    OptionalDependencyNotAvailable,\n    is_flax_available,\n    is_inflect_available,\n    is_k_diffusion_available,\n    is_k_diffusion_version,\n    is_librosa_available,\n    is_note_seq_available,\n    is_onnx_available,\n    is_scipy_available,\n    is_torch_available,\n    is_torchsde_available,\n    is_transformers_available,\n    is_transformers_version,\n    is_unidecode_available,\n    logging,\n)\n\n\ntry:\n    if not is_onnx_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_onnx_objects import *  # noqa F403\nelse:\n    from .pipelines import OnnxRuntimeModel\n\ntry:\n    if not is_torch_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_pt_objects import *  # noqa F403\nelse:\n    from .models import (\n        AutoencoderKL,\n        ControlNetModel,\n        ModelMixin,\n        PriorTransformer,\n        T5FilmDecoder,\n        Transformer2DModel,\n        UNet1DModel,\n        UNet2DConditionModel,\n        UNet2DModel,\n        UNet3DConditionModel,\n        VQModel,\n    )\n    from .optimization import (\n        get_constant_schedule,\n        get_constant_schedule_with_warmup,\n        get_cosine_schedule_with_warmup,\n        get_cosine_with_hard_restarts_schedule_with_warmup,\n        get_linear_schedule_with_warmup,\n        get_polynomial_decay_schedule_with_warmup,\n        get_scheduler,\n    )\n    from .pipelines import (\n        AudioPipelineOutput,\n        DanceDiffusionPipeline,\n        DDIMPipeline,\n        DDPMPipeline,\n        DiffusionPipeline,\n        DiTPipeline,\n        ImagePipelineOutput,\n        KarrasVePipeline,\n        LDMPipeline,\n        LDMSuperResolutionPipeline,\n        PNDMPipeline,\n        RePaintPipeline,\n        ScoreSdeVePipeline,\n    )\n    from .schedulers import (\n        DDIMInverseScheduler,\n        DDIMScheduler,\n        DDPMScheduler,\n        DEISMultistepScheduler,\n        DPMSolverMultistepInverseScheduler,\n        DPMSolverMultistepScheduler,\n        DPMSolverSinglestepScheduler,\n        EulerAncestralDiscreteScheduler,\n        EulerDiscreteScheduler,\n        HeunDiscreteScheduler,\n        IPNDMScheduler,\n        KarrasVeScheduler,\n        KDPM2AncestralDiscreteScheduler,\n        KDPM2DiscreteScheduler,\n        PNDMScheduler,\n        RePaintScheduler,\n        SchedulerMixin,\n        ScoreSdeVeScheduler,\n        UnCLIPScheduler,\n        UniPCMultistepScheduler,\n        VQDiffusionScheduler,\n    )\n    from .training_utils import EMAModel\n\ntry:\n    if not (is_torch_available() and is_scipy_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_torch_and_scipy_objects import *  # noqa F403\nelse:\n    from .schedulers import LMSDiscreteScheduler\n\ntry:\n    if not (is_torch_available() and is_torchsde_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_torch_and_torchsde_objects import *  # noqa F403\nelse:\n    from .schedulers import DPMSolverSDEScheduler\n\ntry:\n    if not (is_torch_available() and is_transformers_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_torch_and_transformers_objects import *  # noqa F403\nelse:\n    from .pipelines import (\n        AltDiffusionImg2ImgPipeline,\n        AltDiffusionPipeline,\n        AudioLDMPipeline,\n        CycleDiffusionPipeline,\n        IFImg2ImgPipeline,\n        IFImg2ImgSuperResolutionPipeline,\n        IFInpaintingPipeline,\n        IFInpaintingSuperResolutionPipeline,\n        IFPipeline,\n        IFSuperResolutionPipeline,\n        LDMTextToImagePipeline,\n        PaintByExamplePipeline,\n        SemanticStableDiffusionPipeline,\n        StableDiffusionAttendAndExcitePipeline,\n        StableDiffusionControlNetImg2ImgPipeline,\n        StableDiffusionControlNetInpaintPipeline,\n        StableDiffusionControlNetPipeline,\n        StableDiffusionDepth2ImgPipeline,\n        StableDiffusionDiffEditPipeline,\n        StableDiffusionImageVariationPipeline,\n        StableDiffusionImg2ImgPipeline,\n        StableDiffusionInpaintPipeline,\n        StableDiffusionInpaintPipelineLegacy,\n        StableDiffusionInstructPix2PixPipeline,\n        StableDiffusionLatentUpscalePipeline,\n        StableDiffusionModelEditingPipeline,\n        StableDiffusionPanoramaPipeline,\n        StableDiffusionPipeline,\n        StableDiffusionPipelineSafe,\n        StableDiffusionPix2PixZeroPipeline,\n        StableDiffusionSAGPipeline,\n        StableDiffusionUpscalePipeline,\n        StableUnCLIPImg2ImgPipeline,\n        StableUnCLIPPipeline,\n        TextToVideoSDPipeline,\n        TextToVideoZeroPipeline,\n        UnCLIPImageVariationPipeline,\n        UnCLIPPipeline,\n        VersatileDiffusionDualGuidedPipeline,\n        VersatileDiffusionImageVariationPipeline,\n        VersatileDiffusionPipeline,\n        VersatileDiffusionTextToImagePipeline,\n        VQDiffusionPipeline,\n    )\n\ntry:\n    if not (is_torch_available() and is_transformers_available() and is_k_diffusion_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_torch_and_transformers_and_k_diffusion_objects import *  # noqa F403\nelse:\n    from .pipelines import StableDiffusionKDiffusionPipeline\n\ntry:\n    if not (is_torch_available() and is_transformers_available() and is_onnx_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_torch_and_transformers_and_onnx_objects import *  # noqa F403\nelse:\n    from .pipelines import (\n        OnnxStableDiffusionImg2ImgPipeline,\n        OnnxStableDiffusionInpaintPipeline,\n        OnnxStableDiffusionInpaintPipelineLegacy,\n        OnnxStableDiffusionPipeline,\n        OnnxStableDiffusionUpscalePipeline,\n        StableDiffusionOnnxPipeline,\n    )\n\ntry:\n    if not (is_torch_available() and is_librosa_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_torch_and_librosa_objects import *  # noqa F403\nelse:\n    from .pipelines import AudioDiffusionPipeline, Mel\n\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_note_seq_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_transformers_and_torch_and_note_seq_objects import *  # noqa F403\nelse:\n    from .pipelines import SpectrogramDiffusionPipeline\n\ntry:\n    if not is_flax_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_flax_objects import *  # noqa F403\nelse:\n    from .models.controlnet_flax import FlaxControlNetModel\n    from .models.modeling_flax_utils import FlaxModelMixin\n    from .models.unet_2d_condition_flax import FlaxUNet2DConditionModel\n    from .models.vae_flax import FlaxAutoencoderKL\n    from .pipelines import FlaxDiffusionPipeline\n    from .schedulers import (\n        FlaxDDIMScheduler,\n        FlaxDDPMScheduler,\n        FlaxDPMSolverMultistepScheduler,\n        FlaxKarrasVeScheduler,\n        FlaxLMSDiscreteScheduler,\n        FlaxPNDMScheduler,\n        FlaxSchedulerMixin,\n        FlaxScoreSdeVeScheduler,\n    )\n\n\ntry:\n    if not (is_flax_available() and is_transformers_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_flax_and_transformers_objects import *  # noqa F403\nelse:\n    from .pipelines import (\n        FlaxStableDiffusionControlNetPipeline,\n        FlaxStableDiffusionImg2ImgPipeline,\n        FlaxStableDiffusionInpaintPipeline,\n        FlaxStableDiffusionPipeline,\n    )\n\ntry:\n    if not (is_note_seq_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from .utils.dummy_note_seq_objects import *  # noqa F403\nelse:\n    from .pipelines import MidiProcessor\n"
  },
  {
    "path": "diffusers/commands/__init__.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom abc import ABC, abstractmethod\nfrom argparse import ArgumentParser\n\n\nclass BaseDiffusersCLICommand(ABC):\n    @staticmethod\n    @abstractmethod\n    def register_subcommand(parser: ArgumentParser):\n        raise NotImplementedError()\n\n    @abstractmethod\n    def run(self):\n        raise NotImplementedError()\n"
  },
  {
    "path": "diffusers/commands/diffusers_cli.py",
    "content": "#!/usr/bin/env python\n# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom argparse import ArgumentParser\n\nfrom .env import EnvironmentCommand\n\n\ndef main():\n    parser = ArgumentParser(\"Diffusers CLI tool\", usage=\"diffusers-cli <command> [<args>]\")\n    commands_parser = parser.add_subparsers(help=\"diffusers-cli command helpers\")\n\n    # Register commands\n    EnvironmentCommand.register_subcommand(commands_parser)\n\n    # Let's go\n    args = parser.parse_args()\n\n    if not hasattr(args, \"func\"):\n        parser.print_help()\n        exit(1)\n\n    # Run\n    service = args.func(args)\n    service.run()\n\n\nif __name__ == \"__main__\":\n    main()\n"
  },
  {
    "path": "diffusers/commands/env.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport platform\nfrom argparse import ArgumentParser\n\nimport huggingface_hub\n\nfrom .. import __version__ as version\nfrom ..utils import is_accelerate_available, is_torch_available, is_transformers_available, is_xformers_available\nfrom . import BaseDiffusersCLICommand\n\n\ndef info_command_factory(_):\n    return EnvironmentCommand()\n\n\nclass EnvironmentCommand(BaseDiffusersCLICommand):\n    @staticmethod\n    def register_subcommand(parser: ArgumentParser):\n        download_parser = parser.add_parser(\"env\")\n        download_parser.set_defaults(func=info_command_factory)\n\n    def run(self):\n        hub_version = huggingface_hub.__version__\n\n        pt_version = \"not installed\"\n        pt_cuda_available = \"NA\"\n        if is_torch_available():\n            import torch\n\n            pt_version = torch.__version__\n            pt_cuda_available = torch.cuda.is_available()\n\n        transformers_version = \"not installed\"\n        if is_transformers_available():\n            import transformers\n\n            transformers_version = transformers.__version__\n\n        accelerate_version = \"not installed\"\n        if is_accelerate_available():\n            import accelerate\n\n            accelerate_version = accelerate.__version__\n\n        xformers_version = \"not installed\"\n        if is_xformers_available():\n            import xformers\n\n            xformers_version = xformers.__version__\n\n        info = {\n            \"`diffusers` version\": version,\n            \"Platform\": platform.platform(),\n            \"Python version\": platform.python_version(),\n            \"PyTorch version (GPU?)\": f\"{pt_version} ({pt_cuda_available})\",\n            \"Huggingface_hub version\": hub_version,\n            \"Transformers version\": transformers_version,\n            \"Accelerate version\": accelerate_version,\n            \"xFormers version\": xformers_version,\n            \"Using GPU in script?\": \"<fill in>\",\n            \"Using distributed or parallel set-up in script?\": \"<fill in>\",\n        }\n\n        print(\"\\nCopy-and-paste the text below in your GitHub issue and FILL OUT the two last points.\\n\")\n        print(self.format_dict(info))\n\n        return info\n\n    @staticmethod\n    def format_dict(d):\n        return \"\\n\".join([f\"- {prop}: {val}\" for prop, val in d.items()]) + \"\\n\"\n"
  },
  {
    "path": "diffusers/configuration_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n# Copyright (c) 2022, NVIDIA CORPORATION.  All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\" ConfigMixin base class and utilities.\"\"\"\nimport dataclasses\nimport functools\nimport importlib\nimport inspect\nimport json\nimport os\nimport re\nfrom collections import OrderedDict\nfrom pathlib import PosixPath\nfrom typing import Any, Dict, Tuple, Union\n\nimport numpy as np\nfrom huggingface_hub import hf_hub_download\nfrom huggingface_hub.utils import EntryNotFoundError, RepositoryNotFoundError, RevisionNotFoundError\nfrom requests import HTTPError\n\nfrom . import __version__\nfrom .utils import (\n    DIFFUSERS_CACHE,\n    HUGGINGFACE_CO_RESOLVE_ENDPOINT,\n    DummyObject,\n    deprecate,\n    extract_commit_hash,\n    http_user_agent,\n    logging,\n)\n\n\nlogger = logging.get_logger(__name__)\n\n_re_configuration_file = re.compile(r\"config\\.(.*)\\.json\")\n\n\nclass FrozenDict(OrderedDict):\n    def __init__(self, *args, **kwargs):\n        super().__init__(*args, **kwargs)\n\n        for key, value in self.items():\n            setattr(self, key, value)\n\n        self.__frozen = True\n\n    def __delitem__(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``__delitem__`` on a {self.__class__.__name__} instance.\")\n\n    def setdefault(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``setdefault`` on a {self.__class__.__name__} instance.\")\n\n    def pop(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``pop`` on a {self.__class__.__name__} instance.\")\n\n    def update(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``update`` on a {self.__class__.__name__} instance.\")\n\n    def __setattr__(self, name, value):\n        if hasattr(self, \"__frozen\") and self.__frozen:\n            raise Exception(f\"You cannot use ``__setattr__`` on a {self.__class__.__name__} instance.\")\n        super().__setattr__(name, value)\n\n    def __setitem__(self, name, value):\n        if hasattr(self, \"__frozen\") and self.__frozen:\n            raise Exception(f\"You cannot use ``__setattr__`` on a {self.__class__.__name__} instance.\")\n        super().__setitem__(name, value)\n\n\nclass ConfigMixin:\n    r\"\"\"\n    Base class for all configuration classes. Stores all configuration parameters under `self.config` Also handles all\n    methods for loading/downloading/saving classes inheriting from [`ConfigMixin`] with\n        - [`~ConfigMixin.from_config`]\n        - [`~ConfigMixin.save_config`]\n\n    Class attributes:\n        - **config_name** (`str`) -- A filename under which the config should stored when calling\n          [`~ConfigMixin.save_config`] (should be overridden by parent class).\n        - **ignore_for_config** (`List[str]`) -- A list of attributes that should not be saved in the config (should be\n          overridden by subclass).\n        - **has_compatibles** (`bool`) -- Whether the class has compatible classes (should be overridden by subclass).\n        - **_deprecated_kwargs** (`List[str]`) -- Keyword arguments that are deprecated. Note that the init function\n          should only have a `kwargs` argument if at least one argument is deprecated (should be overridden by\n          subclass).\n    \"\"\"\n    config_name = None\n    ignore_for_config = []\n    has_compatibles = False\n\n    _deprecated_kwargs = []\n\n    def register_to_config(self, **kwargs):\n        if self.config_name is None:\n            raise NotImplementedError(f\"Make sure that {self.__class__} has defined a class name `config_name`\")\n        # Special case for `kwargs` used in deprecation warning added to schedulers\n        # TODO: remove this when we remove the deprecation warning, and the `kwargs` argument,\n        # or solve in a more general way.\n        kwargs.pop(\"kwargs\", None)\n\n        if not hasattr(self, \"_internal_dict\"):\n            internal_dict = kwargs\n        else:\n            previous_dict = dict(self._internal_dict)\n            internal_dict = {**self._internal_dict, **kwargs}\n            logger.debug(f\"Updating config from {previous_dict} to {internal_dict}\")\n\n        self._internal_dict = FrozenDict(internal_dict)\n\n    def __getattr__(self, name: str) -> Any:\n        \"\"\"The only reason we overwrite `getattr` here is to gracefully deprecate accessing\n        config attributes directly. See https://github.com/huggingface/diffusers/pull/3129\n\n        Tihs funtion is mostly copied from PyTorch's __getattr__ overwrite:\n        https://pytorch.org/docs/stable/_modules/torch/nn/modules/module.html#Module\n        \"\"\"\n\n        is_in_config = \"_internal_dict\" in self.__dict__ and hasattr(self.__dict__[\"_internal_dict\"], name)\n        is_attribute = name in self.__dict__\n\n        if is_in_config and not is_attribute:\n            deprecation_message = f\"Accessing config attribute `{name}` directly via '{type(self).__name__}' object attribute is deprecated. Please access '{name}' over '{type(self).__name__}'s config object instead, e.g. 'scheduler.config.{name}'.\"\n            deprecate(\"direct config name access\", \"1.0.0\", deprecation_message, standard_warn=False)\n            return self._internal_dict[name]\n\n        raise AttributeError(f\"'{type(self).__name__}' object has no attribute '{name}'\")\n\n    def save_config(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):\n        \"\"\"\n        Save a configuration object to the directory `save_directory`, so that it can be re-loaded using the\n        [`~ConfigMixin.from_config`] class method.\n\n        Args:\n            save_directory (`str` or `os.PathLike`):\n                Directory where the configuration JSON file will be saved (will be created if it does not exist).\n        \"\"\"\n        if os.path.isfile(save_directory):\n            raise AssertionError(f\"Provided path ({save_directory}) should be a directory, not a file\")\n\n        os.makedirs(save_directory, exist_ok=True)\n\n        # If we save using the predefined names, we can load using `from_config`\n        output_config_file = os.path.join(save_directory, self.config_name)\n\n        self.to_json_file(output_config_file)\n        logger.info(f\"Configuration saved in {output_config_file}\")\n\n    @classmethod\n    def from_config(cls, config: Union[FrozenDict, Dict[str, Any]] = None, return_unused_kwargs=False, **kwargs):\n        r\"\"\"\n        Instantiate a Python class from a config dictionary\n\n        Parameters:\n            config (`Dict[str, Any]`):\n                A config dictionary from which the Python class will be instantiated. Make sure to only load\n                configuration files of compatible classes.\n            return_unused_kwargs (`bool`, *optional*, defaults to `False`):\n                Whether kwargs that are not consumed by the Python class should be returned or not.\n\n            kwargs (remaining dictionary of keyword arguments, *optional*):\n                Can be used to update the configuration object (after it being loaded) and initiate the Python class.\n                `**kwargs` will be directly passed to the underlying scheduler/model's `__init__` method and eventually\n                overwrite same named arguments of `config`.\n\n        Examples:\n\n        ```python\n        >>> from diffusers import DDPMScheduler, DDIMScheduler, PNDMScheduler\n\n        >>> # Download scheduler from huggingface.co and cache.\n        >>> scheduler = DDPMScheduler.from_pretrained(\"google/ddpm-cifar10-32\")\n\n        >>> # Instantiate DDIM scheduler class with same config as DDPM\n        >>> scheduler = DDIMScheduler.from_config(scheduler.config)\n\n        >>> # Instantiate PNDM scheduler class with same config as DDPM\n        >>> scheduler = PNDMScheduler.from_config(scheduler.config)\n        ```\n        \"\"\"\n        # <===== TO BE REMOVED WITH DEPRECATION\n        # TODO(Patrick) - make sure to remove the following lines when config==\"model_path\" is deprecated\n        if \"pretrained_model_name_or_path\" in kwargs:\n            config = kwargs.pop(\"pretrained_model_name_or_path\")\n\n        if config is None:\n            raise ValueError(\"Please make sure to provide a config as the first positional argument.\")\n        # ======>\n\n        if not isinstance(config, dict):\n            deprecation_message = \"It is deprecated to pass a pretrained model name or path to `from_config`.\"\n            if \"Scheduler\" in cls.__name__:\n                deprecation_message += (\n                    f\"If you were trying to load a scheduler, please use {cls}.from_pretrained(...) instead.\"\n                    \" Otherwise, please make sure to pass a configuration dictionary instead. This functionality will\"\n                    \" be removed in v1.0.0.\"\n                )\n            elif \"Model\" in cls.__name__:\n                deprecation_message += (\n                    f\"If you were trying to load a model, please use {cls}.load_config(...) followed by\"\n                    f\" {cls}.from_config(...) instead. Otherwise, please make sure to pass a configuration dictionary\"\n                    \" instead. This functionality will be removed in v1.0.0.\"\n                )\n            deprecate(\"config-passed-as-path\", \"1.0.0\", deprecation_message, standard_warn=False)\n            config, kwargs = cls.load_config(pretrained_model_name_or_path=config, return_unused_kwargs=True, **kwargs)\n\n        init_dict, unused_kwargs, hidden_dict = cls.extract_init_dict(config, **kwargs)\n\n        # Allow dtype to be specified on initialization\n        if \"dtype\" in unused_kwargs:\n            init_dict[\"dtype\"] = unused_kwargs.pop(\"dtype\")\n\n        # add possible deprecated kwargs\n        for deprecated_kwarg in cls._deprecated_kwargs:\n            if deprecated_kwarg in unused_kwargs:\n                init_dict[deprecated_kwarg] = unused_kwargs.pop(deprecated_kwarg)\n\n        # Return model and optionally state and/or unused_kwargs\n        model = cls(**init_dict)\n\n        # make sure to also save config parameters that might be used for compatible classes\n        model.register_to_config(**hidden_dict)\n\n        # add hidden kwargs of compatible classes to unused_kwargs\n        unused_kwargs = {**unused_kwargs, **hidden_dict}\n\n        if return_unused_kwargs:\n            return (model, unused_kwargs)\n        else:\n            return model\n\n    @classmethod\n    def get_config_dict(cls, *args, **kwargs):\n        deprecation_message = (\n            f\" The function get_config_dict is deprecated. Please use {cls}.load_config instead. This function will be\"\n            \" removed in version v1.0.0\"\n        )\n        deprecate(\"get_config_dict\", \"1.0.0\", deprecation_message, standard_warn=False)\n        return cls.load_config(*args, **kwargs)\n\n    @classmethod\n    def load_config(\n        cls,\n        pretrained_model_name_or_path: Union[str, os.PathLike],\n        return_unused_kwargs=False,\n        return_commit_hash=False,\n        **kwargs,\n    ) -> Tuple[Dict[str, Any], Dict[str, Any]]:\n        r\"\"\"\n        Instantiate a Python class from a config dictionary\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike`, *optional*):\n                Can be either:\n\n                    - A string, the *model id* of a model repo on huggingface.co. Valid model ids should have an\n                      organization name, like `google/ddpm-celebahq-256`.\n                    - A path to a *directory* containing model weights saved using [`~ConfigMixin.save_config`], e.g.,\n                      `./my_model_directory/`.\n\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            output_loading_info(`bool`, *optional*, defaults to `False`):\n                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `transformers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            subfolder (`str`, *optional*, defaults to `\"\"`):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n            return_unused_kwargs (`bool`, *optional*, defaults to `False):\n                Whether unused keyword arguments of the config shall be returned.\n            return_commit_hash (`bool`, *optional*, defaults to `False):\n                Whether the commit_hash of the loaded configuration shall be returned.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n\n        <Tip>\n\n        Activate the special [\"offline-mode\"](https://huggingface.co/transformers/installation.html#offline-mode) to\n        use this method in a firewalled environment.\n\n        </Tip>\n        \"\"\"\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        force_download = kwargs.pop(\"force_download\", False)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", False)\n        revision = kwargs.pop(\"revision\", None)\n        _ = kwargs.pop(\"mirror\", None)\n        subfolder = kwargs.pop(\"subfolder\", None)\n        user_agent = kwargs.pop(\"user_agent\", {})\n\n        user_agent = {**user_agent, \"file_type\": \"config\"}\n        user_agent = http_user_agent(user_agent)\n\n        pretrained_model_name_or_path = str(pretrained_model_name_or_path)\n\n        if cls.config_name is None:\n            raise ValueError(\n                \"`self.config_name` is not defined. Note that one should not load a config from \"\n                \"`ConfigMixin`. Please make sure to define `config_name` in a class inheriting from `ConfigMixin`\"\n            )\n\n        if os.path.isfile(pretrained_model_name_or_path):\n            config_file = pretrained_model_name_or_path\n        elif os.path.isdir(pretrained_model_name_or_path):\n            if os.path.isfile(os.path.join(pretrained_model_name_or_path, cls.config_name)):\n                # Load from a PyTorch checkpoint\n                config_file = os.path.join(pretrained_model_name_or_path, cls.config_name)\n            elif subfolder is not None and os.path.isfile(\n                os.path.join(pretrained_model_name_or_path, subfolder, cls.config_name)\n            ):\n                config_file = os.path.join(pretrained_model_name_or_path, subfolder, cls.config_name)\n            else:\n                raise EnvironmentError(\n                    f\"Error no file named {cls.config_name} found in directory {pretrained_model_name_or_path}.\"\n                )\n        else:\n            try:\n                # Load from URL or cache if already cached\n                config_file = hf_hub_download(\n                    pretrained_model_name_or_path,\n                    filename=cls.config_name,\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    proxies=proxies,\n                    resume_download=resume_download,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    user_agent=user_agent,\n                    subfolder=subfolder,\n                    revision=revision,\n                )\n            except RepositoryNotFoundError:\n                raise EnvironmentError(\n                    f\"{pretrained_model_name_or_path} is not a local folder and is not a valid model identifier\"\n                    \" listed on 'https://huggingface.co/models'\\nIf this is a private repository, make sure to pass a\"\n                    \" token having permission to this repo with `use_auth_token` or log in with `huggingface-cli\"\n                    \" login`.\"\n                )\n            except RevisionNotFoundError:\n                raise EnvironmentError(\n                    f\"{revision} is not a valid git identifier (branch name, tag name or commit id) that exists for\"\n                    \" this model name. Check the model page at\"\n                    f\" 'https://huggingface.co/{pretrained_model_name_or_path}' for available revisions.\"\n                )\n            except EntryNotFoundError:\n                raise EnvironmentError(\n                    f\"{pretrained_model_name_or_path} does not appear to have a file named {cls.config_name}.\"\n                )\n            except HTTPError as err:\n                raise EnvironmentError(\n                    \"There was a specific connection error when trying to load\"\n                    f\" {pretrained_model_name_or_path}:\\n{err}\"\n                )\n            except ValueError:\n                raise EnvironmentError(\n                    f\"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this model, couldn't find it\"\n                    f\" in the cached files and it looks like {pretrained_model_name_or_path} is not the path to a\"\n                    f\" directory containing a {cls.config_name} file.\\nCheckout your internet connection or see how to\"\n                    \" run the library in offline mode at\"\n                    \" 'https://huggingface.co/docs/diffusers/installation#offline-mode'.\"\n                )\n            except EnvironmentError:\n                raise EnvironmentError(\n                    f\"Can't load config for '{pretrained_model_name_or_path}'. If you were trying to load it from \"\n                    \"'https://huggingface.co/models', make sure you don't have a local directory with the same name. \"\n                    f\"Otherwise, make sure '{pretrained_model_name_or_path}' is the correct path to a directory \"\n                    f\"containing a {cls.config_name} file\"\n                )\n\n        try:\n            # Load config dict\n            config_dict = cls._dict_from_json_file(config_file)\n\n            commit_hash = extract_commit_hash(config_file)\n        except (json.JSONDecodeError, UnicodeDecodeError):\n            raise EnvironmentError(f\"It looks like the config file at '{config_file}' is not a valid JSON file.\")\n\n        if not (return_unused_kwargs or return_commit_hash):\n            return config_dict\n\n        outputs = (config_dict,)\n\n        if return_unused_kwargs:\n            outputs += (kwargs,)\n\n        if return_commit_hash:\n            outputs += (commit_hash,)\n\n        return outputs\n\n    @staticmethod\n    def _get_init_keys(cls):\n        return set(dict(inspect.signature(cls.__init__).parameters).keys())\n\n    @classmethod\n    def extract_init_dict(cls, config_dict, **kwargs):\n        # 0. Copy origin config dict\n        original_dict = dict(config_dict.items())\n\n        # 1. Retrieve expected config attributes from __init__ signature\n        expected_keys = cls._get_init_keys(cls)\n        expected_keys.remove(\"self\")\n        # remove general kwargs if present in dict\n        if \"kwargs\" in expected_keys:\n            expected_keys.remove(\"kwargs\")\n        # remove flax internal keys\n        if hasattr(cls, \"_flax_internal_args\"):\n            for arg in cls._flax_internal_args:\n                expected_keys.remove(arg)\n\n        # 2. Remove attributes that cannot be expected from expected config attributes\n        # remove keys to be ignored\n        if len(cls.ignore_for_config) > 0:\n            expected_keys = expected_keys - set(cls.ignore_for_config)\n\n        # load diffusers library to import compatible and original scheduler\n        diffusers_library = importlib.import_module(__name__.split(\".\")[0])\n\n        if cls.has_compatibles:\n            compatible_classes = [c for c in cls._get_compatibles() if not isinstance(c, DummyObject)]\n        else:\n            compatible_classes = []\n\n        expected_keys_comp_cls = set()\n        for c in compatible_classes:\n            expected_keys_c = cls._get_init_keys(c)\n            expected_keys_comp_cls = expected_keys_comp_cls.union(expected_keys_c)\n        expected_keys_comp_cls = expected_keys_comp_cls - cls._get_init_keys(cls)\n        config_dict = {k: v for k, v in config_dict.items() if k not in expected_keys_comp_cls}\n\n        # remove attributes from orig class that cannot be expected\n        orig_cls_name = config_dict.pop(\"_class_name\", cls.__name__)\n        if orig_cls_name != cls.__name__ and hasattr(diffusers_library, orig_cls_name):\n            orig_cls = getattr(diffusers_library, orig_cls_name)\n            unexpected_keys_from_orig = cls._get_init_keys(orig_cls) - expected_keys\n            config_dict = {k: v for k, v in config_dict.items() if k not in unexpected_keys_from_orig}\n\n        # remove private attributes\n        config_dict = {k: v for k, v in config_dict.items() if not k.startswith(\"_\")}\n\n        # 3. Create keyword arguments that will be passed to __init__ from expected keyword arguments\n        init_dict = {}\n        for key in expected_keys:\n            # if config param is passed to kwarg and is present in config dict\n            # it should overwrite existing config dict key\n            if key in kwargs and key in config_dict:\n                config_dict[key] = kwargs.pop(key)\n\n            if key in kwargs:\n                # overwrite key\n                init_dict[key] = kwargs.pop(key)\n            elif key in config_dict:\n                # use value from config dict\n                init_dict[key] = config_dict.pop(key)\n\n        # 4. Give nice warning if unexpected values have been passed\n        if len(config_dict) > 0:\n            logger.warning(\n                f\"The config attributes {config_dict} were passed to {cls.__name__}, \"\n                \"but are not expected and will be ignored. Please verify your \"\n                f\"{cls.config_name} configuration file.\"\n            )\n\n        # 5. Give nice info if config attributes are initiliazed to default because they have not been passed\n        passed_keys = set(init_dict.keys())\n        if len(expected_keys - passed_keys) > 0:\n            logger.info(\n                f\"{expected_keys - passed_keys} was not found in config. Values will be initialized to default values.\"\n            )\n\n        # 6. Define unused keyword arguments\n        unused_kwargs = {**config_dict, **kwargs}\n\n        # 7. Define \"hidden\" config parameters that were saved for compatible classes\n        hidden_config_dict = {k: v for k, v in original_dict.items() if k not in init_dict}\n\n        return init_dict, unused_kwargs, hidden_config_dict\n\n    @classmethod\n    def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]):\n        with open(json_file, \"r\", encoding=\"utf-8\") as reader:\n            text = reader.read()\n        return json.loads(text)\n\n    def __repr__(self):\n        return f\"{self.__class__.__name__} {self.to_json_string()}\"\n\n    @property\n    def config(self) -> Dict[str, Any]:\n        \"\"\"\n        Returns the config of the class as a frozen dictionary\n\n        Returns:\n            `Dict[str, Any]`: Config of the class.\n        \"\"\"\n        return self._internal_dict\n\n    def to_json_string(self) -> str:\n        \"\"\"\n        Serializes this instance to a JSON string.\n\n        Returns:\n            `str`: String containing all the attributes that make up this configuration instance in JSON format.\n        \"\"\"\n        config_dict = self._internal_dict if hasattr(self, \"_internal_dict\") else {}\n        config_dict[\"_class_name\"] = self.__class__.__name__\n        config_dict[\"_diffusers_version\"] = __version__\n\n        def to_json_saveable(value):\n            if isinstance(value, np.ndarray):\n                value = value.tolist()\n            elif isinstance(value, PosixPath):\n                value = str(value)\n            return value\n\n        config_dict = {k: to_json_saveable(v) for k, v in config_dict.items()}\n        # Don't save \"_ignore_files\"\n        config_dict.pop(\"_ignore_files\", None)\n\n        return json.dumps(config_dict, indent=2, sort_keys=True) + \"\\n\"\n\n    def to_json_file(self, json_file_path: Union[str, os.PathLike]):\n        \"\"\"\n        Save this instance to a JSON file.\n\n        Args:\n            json_file_path (`str` or `os.PathLike`):\n                Path to the JSON file in which this configuration instance's parameters will be saved.\n        \"\"\"\n        with open(json_file_path, \"w\", encoding=\"utf-8\") as writer:\n            writer.write(self.to_json_string())\n\n\ndef register_to_config(init):\n    r\"\"\"\n    Decorator to apply on the init of classes inheriting from [`ConfigMixin`] so that all the arguments are\n    automatically sent to `self.register_for_config`. To ignore a specific argument accepted by the init but that\n    shouldn't be registered in the config, use the `ignore_for_config` class variable\n\n    Warning: Once decorated, all private arguments (beginning with an underscore) are trashed and not sent to the init!\n    \"\"\"\n\n    @functools.wraps(init)\n    def inner_init(self, *args, **kwargs):\n        # Ignore private kwargs in the init.\n        init_kwargs = {k: v for k, v in kwargs.items() if not k.startswith(\"_\")}\n        config_init_kwargs = {k: v for k, v in kwargs.items() if k.startswith(\"_\")}\n        if not isinstance(self, ConfigMixin):\n            raise RuntimeError(\n                f\"`@register_for_config` was applied to {self.__class__.__name__} init method, but this class does \"\n                \"not inherit from `ConfigMixin`.\"\n            )\n\n        ignore = getattr(self, \"ignore_for_config\", [])\n        # Get positional arguments aligned with kwargs\n        new_kwargs = {}\n        signature = inspect.signature(init)\n        parameters = {\n            name: p.default for i, (name, p) in enumerate(signature.parameters.items()) if i > 0 and name not in ignore\n        }\n        for arg, name in zip(args, parameters.keys()):\n            new_kwargs[name] = arg\n\n        # Then add all kwargs\n        new_kwargs.update(\n            {\n                k: init_kwargs.get(k, default)\n                for k, default in parameters.items()\n                if k not in ignore and k not in new_kwargs\n            }\n        )\n        new_kwargs = {**config_init_kwargs, **new_kwargs}\n        getattr(self, \"register_to_config\")(**new_kwargs)\n        init(self, *args, **init_kwargs)\n\n    return inner_init\n\n\ndef flax_register_to_config(cls):\n    original_init = cls.__init__\n\n    @functools.wraps(original_init)\n    def init(self, *args, **kwargs):\n        if not isinstance(self, ConfigMixin):\n            raise RuntimeError(\n                f\"`@register_for_config` was applied to {self.__class__.__name__} init method, but this class does \"\n                \"not inherit from `ConfigMixin`.\"\n            )\n\n        # Ignore private kwargs in the init. Retrieve all passed attributes\n        init_kwargs = dict(kwargs.items())\n\n        # Retrieve default values\n        fields = dataclasses.fields(self)\n        default_kwargs = {}\n        for field in fields:\n            # ignore flax specific attributes\n            if field.name in self._flax_internal_args:\n                continue\n            if type(field.default) == dataclasses._MISSING_TYPE:\n                default_kwargs[field.name] = None\n            else:\n                default_kwargs[field.name] = getattr(self, field.name)\n\n        # Make sure init_kwargs override default kwargs\n        new_kwargs = {**default_kwargs, **init_kwargs}\n        # dtype should be part of `init_kwargs`, but not `new_kwargs`\n        if \"dtype\" in new_kwargs:\n            new_kwargs.pop(\"dtype\")\n\n        # Get positional arguments aligned with kwargs\n        for i, arg in enumerate(args):\n            name = fields[i].name\n            new_kwargs[name] = arg\n\n        getattr(self, \"register_to_config\")(**new_kwargs)\n        original_init(self, *args, **kwargs)\n\n    cls.__init__ = init\n    return cls\n"
  },
  {
    "path": "diffusers/dependency_versions_check.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport sys\n\nfrom .dependency_versions_table import deps\nfrom .utils.versions import require_version, require_version_core\n\n\n# define which module versions we always want to check at run time\n# (usually the ones defined in `install_requires` in setup.py)\n#\n# order specific notes:\n# - tqdm must be checked before tokenizers\n\npkgs_to_check_at_runtime = \"python tqdm regex requests packaging filelock numpy tokenizers\".split()\nif sys.version_info < (3, 7):\n    pkgs_to_check_at_runtime.append(\"dataclasses\")\nif sys.version_info < (3, 8):\n    pkgs_to_check_at_runtime.append(\"importlib_metadata\")\n\nfor pkg in pkgs_to_check_at_runtime:\n    if pkg in deps:\n        if pkg == \"tokenizers\":\n            # must be loaded here, or else tqdm check may fail\n            from .utils import is_tokenizers_available\n\n            if not is_tokenizers_available():\n                continue  # not required, check version only if installed\n\n        require_version_core(deps[pkg])\n    else:\n        raise ValueError(f\"can't find {pkg} in {deps.keys()}, check dependency_versions_table.py\")\n\n\ndef dep_version_check(pkg, hint=None):\n    require_version(deps[pkg], hint)\n"
  },
  {
    "path": "diffusers/dependency_versions_table.py",
    "content": "# THIS FILE HAS BEEN AUTOGENERATED. To update:\n# 1. modify the `_deps` dict in setup.py\n# 2. run `make deps_table_update``\ndeps = {\n    \"Pillow\": \"Pillow\",\n    \"accelerate\": \"accelerate>=0.11.0\",\n    \"compel\": \"compel==0.1.8\",\n    \"black\": \"black~=23.1\",\n    \"datasets\": \"datasets\",\n    \"filelock\": \"filelock\",\n    \"flax\": \"flax>=0.4.1\",\n    \"hf-doc-builder\": \"hf-doc-builder>=0.3.0\",\n    \"huggingface-hub\": \"huggingface-hub>=0.13.2\",\n    \"requests-mock\": \"requests-mock==1.10.0\",\n    \"importlib_metadata\": \"importlib_metadata\",\n    \"isort\": \"isort>=5.5.4\",\n    \"jax\": \"jax>=0.2.8,!=0.3.2\",\n    \"jaxlib\": \"jaxlib>=0.1.65\",\n    \"Jinja2\": \"Jinja2\",\n    \"k-diffusion\": \"k-diffusion>=0.0.12\",\n    \"librosa\": \"librosa\",\n    \"numpy\": \"numpy\",\n    \"omegaconf\": \"omegaconf\",\n    \"parameterized\": \"parameterized\",\n    \"protobuf\": \"protobuf>=3.20.3,<4\",\n    \"pytest\": \"pytest\",\n    \"pytest-timeout\": \"pytest-timeout\",\n    \"pytest-xdist\": \"pytest-xdist\",\n    \"ruff\": \"ruff>=0.0.241\",\n    \"safetensors\": \"safetensors\",\n    \"sentencepiece\": \"sentencepiece>=0.1.91,!=0.1.92\",\n    \"scipy\": \"scipy\",\n    \"regex\": \"regex!=2019.12.17\",\n    \"requests\": \"requests\",\n    \"tensorboard\": \"tensorboard\",\n    \"torch\": \"torch>=1.4\",\n    \"torchvision\": \"torchvision\",\n    \"transformers\": \"transformers>=4.25.1\",\n    \"urllib3\": \"urllib3<=2.0.0\",\n}\n"
  },
  {
    "path": "diffusers/experimental/README.md",
    "content": "# 🧨 Diffusers Experimental\n\nWe are adding experimental code to support novel applications and usages of the Diffusers library.\nCurrently, the following experiments are supported:\n* Reinforcement learning via an implementation of the [Diffuser](https://arxiv.org/abs/2205.09991) model."
  },
  {
    "path": "diffusers/experimental/__init__.py",
    "content": "from .rl import ValueGuidedRLPipeline\n"
  },
  {
    "path": "diffusers/experimental/rl/__init__.py",
    "content": "from .value_guided_sampling import ValueGuidedRLPipeline\n"
  },
  {
    "path": "diffusers/experimental/rl/value_guided_sampling.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport torch\nimport tqdm\n\nfrom ...models.unet_1d import UNet1DModel\nfrom ...pipelines import DiffusionPipeline\nfrom ...utils import randn_tensor\nfrom ...utils.dummy_pt_objects import DDPMScheduler\n\n\nclass ValueGuidedRLPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n    Pipeline for sampling actions from a diffusion model trained to predict sequences of states.\n\n    Original implementation inspired by this repository: https://github.com/jannerm/diffuser.\n\n    Parameters:\n        value_function ([`UNet1DModel`]): A specialized UNet for fine-tuning trajectories base on reward.\n        unet ([`UNet1DModel`]): U-Net architecture to denoise the encoded trajectories.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded trajectories. Default for this\n            application is [`DDPMScheduler`].\n        env: An environment following the OpenAI gym API to act in. For now only Hopper has pretrained models.\n    \"\"\"\n\n    def __init__(\n        self,\n        value_function: UNet1DModel,\n        unet: UNet1DModel,\n        scheduler: DDPMScheduler,\n        env,\n    ):\n        super().__init__()\n        self.value_function = value_function\n        self.unet = unet\n        self.scheduler = scheduler\n        self.env = env\n        self.data = env.get_dataset()\n        self.means = {}\n        for key in self.data.keys():\n            try:\n                self.means[key] = self.data[key].mean()\n            except:  # noqa: E722\n                pass\n        self.stds = {}\n        for key in self.data.keys():\n            try:\n                self.stds[key] = self.data[key].std()\n            except:  # noqa: E722\n                pass\n        self.state_dim = env.observation_space.shape[0]\n        self.action_dim = env.action_space.shape[0]\n\n    def normalize(self, x_in, key):\n        return (x_in - self.means[key]) / self.stds[key]\n\n    def de_normalize(self, x_in, key):\n        return x_in * self.stds[key] + self.means[key]\n\n    def to_torch(self, x_in):\n        if type(x_in) is dict:\n            return {k: self.to_torch(v) for k, v in x_in.items()}\n        elif torch.is_tensor(x_in):\n            return x_in.to(self.unet.device)\n        return torch.tensor(x_in, device=self.unet.device)\n\n    def reset_x0(self, x_in, cond, act_dim):\n        for key, val in cond.items():\n            x_in[:, key, act_dim:] = val.clone()\n        return x_in\n\n    def run_diffusion(self, x, conditions, n_guide_steps, scale):\n        batch_size = x.shape[0]\n        y = None\n        for i in tqdm.tqdm(self.scheduler.timesteps):\n            # create batch of timesteps to pass into model\n            timesteps = torch.full((batch_size,), i, device=self.unet.device, dtype=torch.long)\n            for _ in range(n_guide_steps):\n                with torch.enable_grad():\n                    x.requires_grad_()\n\n                    # permute to match dimension for pre-trained models\n                    y = self.value_function(x.permute(0, 2, 1), timesteps).sample\n                    grad = torch.autograd.grad([y.sum()], [x])[0]\n\n                    posterior_variance = self.scheduler._get_variance(i)\n                    model_std = torch.exp(0.5 * posterior_variance)\n                    grad = model_std * grad\n\n                grad[timesteps < 2] = 0\n                x = x.detach()\n                x = x + scale * grad\n                x = self.reset_x0(x, conditions, self.action_dim)\n\n            prev_x = self.unet(x.permute(0, 2, 1), timesteps).sample.permute(0, 2, 1)\n\n            # TODO: verify deprecation of this kwarg\n            x = self.scheduler.step(prev_x, i, x, predict_epsilon=False)[\"prev_sample\"]\n\n            # apply conditions to the trajectory (set the initial state)\n            x = self.reset_x0(x, conditions, self.action_dim)\n            x = self.to_torch(x)\n        return x, y\n\n    def __call__(self, obs, batch_size=64, planning_horizon=32, n_guide_steps=2, scale=0.1):\n        # normalize the observations and create  batch dimension\n        obs = self.normalize(obs, \"observations\")\n        obs = obs[None].repeat(batch_size, axis=0)\n\n        conditions = {0: self.to_torch(obs)}\n        shape = (batch_size, planning_horizon, self.state_dim + self.action_dim)\n\n        # generate initial noise and apply our conditions (to make the trajectories start at current state)\n        x1 = randn_tensor(shape, device=self.unet.device)\n        x = self.reset_x0(x1, conditions, self.action_dim)\n        x = self.to_torch(x)\n\n        # run the diffusion process\n        x, y = self.run_diffusion(x, conditions, n_guide_steps, scale)\n\n        # sort output trajectories by value\n        sorted_idx = y.argsort(0, descending=True).squeeze()\n        sorted_values = x[sorted_idx]\n        actions = sorted_values[:, :, : self.action_dim]\n        actions = actions.detach().cpu().numpy()\n        denorm_actions = self.de_normalize(actions, key=\"actions\")\n\n        # select the action with the highest value\n        if y is not None:\n            selected_index = 0\n        else:\n            # if we didn't run value guiding, select a random action\n            selected_index = np.random.randint(0, batch_size)\n\n        denorm_actions = denorm_actions[selected_index, 0]\n        return denorm_actions\n"
  },
  {
    "path": "diffusers/image_processor.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom PIL import Image\n\nfrom .configuration_utils import ConfigMixin, register_to_config\nfrom .utils import CONFIG_NAME, PIL_INTERPOLATION, deprecate\n\n\nclass VaeImageProcessor(ConfigMixin):\n    \"\"\"\n    Image Processor for VAE\n\n    Args:\n        do_resize (`bool`, *optional*, defaults to `True`):\n            Whether to downscale the image's (height, width) dimensions to multiples of `vae_scale_factor`.\n        vae_scale_factor (`int`, *optional*, defaults to `8`):\n            VAE scale factor. If `do_resize` is True, the image will be automatically resized to multiples of this\n            factor.\n        resample (`str`, *optional*, defaults to `lanczos`):\n            Resampling filter to use when resizing the image.\n        do_normalize (`bool`, *optional*, defaults to `True`):\n            Whether to normalize the image to [-1,1]\n    \"\"\"\n\n    config_name = CONFIG_NAME\n\n    @register_to_config\n    def __init__(\n        self,\n        do_resize: bool = True,\n        vae_scale_factor: int = 8,\n        resample: str = \"lanczos\",\n        do_normalize: bool = True,\n    ):\n        super().__init__()\n\n    @staticmethod\n    def numpy_to_pil(images):\n        \"\"\"\n        Convert a numpy image or a batch of images to a PIL image.\n        \"\"\"\n        if images.ndim == 3:\n            images = images[None, ...]\n        images = (images * 255).round().astype(\"uint8\")\n        if images.shape[-1] == 1:\n            # special case for grayscale (single channel) images\n            pil_images = [Image.fromarray(image.squeeze(), mode=\"L\") for image in images]\n        else:\n            pil_images = [Image.fromarray(image) for image in images]\n\n        return pil_images\n\n    @staticmethod\n    def numpy_to_pt(images):\n        \"\"\"\n        Convert a numpy image to a pytorch tensor\n        \"\"\"\n        if images.ndim == 3:\n            images = images[..., None]\n\n        images = torch.from_numpy(images.transpose(0, 3, 1, 2))\n        return images\n\n    @staticmethod\n    def pt_to_numpy(images):\n        \"\"\"\n        Convert a pytorch tensor to a numpy image\n        \"\"\"\n        images = images.cpu().permute(0, 2, 3, 1).float().numpy()\n        return images\n\n    @staticmethod\n    def normalize(images):\n        \"\"\"\n        Normalize an image array to [-1,1]\n        \"\"\"\n        return 2.0 * images - 1.0\n\n    @staticmethod\n    def denormalize(images):\n        \"\"\"\n        Denormalize an image array to [0,1]\n        \"\"\"\n        return (images / 2 + 0.5).clamp(0, 1)\n\n    def resize(self, images: PIL.Image.Image) -> PIL.Image.Image:\n        \"\"\"\n        Resize a PIL image. Both height and width will be downscaled to the next integer multiple of `vae_scale_factor`\n        \"\"\"\n        w, h = images.size\n        w, h = (x - x % self.config.vae_scale_factor for x in (w, h))  # resize to integer multiple of vae_scale_factor\n        images = images.resize((w, h), resample=PIL_INTERPOLATION[self.config.resample])\n        return images\n\n    def preprocess(\n        self,\n        image: Union[torch.FloatTensor, PIL.Image.Image, np.ndarray],\n    ) -> torch.Tensor:\n        \"\"\"\n        Preprocess the image input, accepted formats are PIL images, numpy arrays or pytorch tensors\"\n        \"\"\"\n        supported_formats = (PIL.Image.Image, np.ndarray, torch.Tensor)\n        if isinstance(image, supported_formats):\n            image = [image]\n        elif not (isinstance(image, list) and all(isinstance(i, supported_formats) for i in image)):\n            raise ValueError(\n                f\"Input is in incorrect format: {[type(i) for i in image]}. Currently, we only support {', '.join(supported_formats)}\"\n            )\n\n        if isinstance(image[0], PIL.Image.Image):\n            if self.config.do_resize:\n                image = [self.resize(i) for i in image]\n            image = [np.array(i).astype(np.float32) / 255.0 for i in image]\n            image = np.stack(image, axis=0)  # to np\n            image = self.numpy_to_pt(image)  # to pt\n\n        elif isinstance(image[0], np.ndarray):\n            image = np.concatenate(image, axis=0) if image[0].ndim == 4 else np.stack(image, axis=0)\n            image = self.numpy_to_pt(image)\n            _, _, height, width = image.shape\n            if self.config.do_resize and (\n                height % self.config.vae_scale_factor != 0 or width % self.config.vae_scale_factor != 0\n            ):\n                raise ValueError(\n                    f\"Currently we only support resizing for PIL image - please resize your numpy array to be divisible by {self.config.vae_scale_factor}\"\n                    f\"currently the sizes are {height} and {width}. You can also pass a PIL image instead to use resize option in VAEImageProcessor\"\n                )\n\n        elif isinstance(image[0], torch.Tensor):\n            image = torch.cat(image, axis=0) if image[0].ndim == 4 else torch.stack(image, axis=0)\n            _, _, height, width = image.shape\n            if self.config.do_resize and (\n                height % self.config.vae_scale_factor != 0 or width % self.config.vae_scale_factor != 0\n            ):\n                raise ValueError(\n                    f\"Currently we only support resizing for PIL image - please resize your pytorch tensor to be divisible by {self.config.vae_scale_factor}\"\n                    f\"currently the sizes are {height} and {width}. You can also pass a PIL image instead to use resize option in VAEImageProcessor\"\n                )\n\n        # expected range [0,1], normalize to [-1,1]\n        do_normalize = self.config.do_normalize\n        if image.min() < 0:\n            warnings.warn(\n                \"Passing `image` as torch tensor with value range in [-1,1] is deprecated. The expected value range for image tensor is [0,1] \"\n                f\"when passing as pytorch tensor or numpy Array. You passed `image` with value range [{image.min()},{image.max()}]\",\n                FutureWarning,\n            )\n            do_normalize = False\n\n        if do_normalize:\n            image = self.normalize(image)\n\n        return image\n\n    def postprocess(\n        self,\n        image: torch.FloatTensor,\n        output_type: str = \"pil\",\n        do_denormalize: Optional[List[bool]] = None,\n    ):\n        if not isinstance(image, torch.Tensor):\n            raise ValueError(\n                f\"Input for postprocessing is in incorrect format: {type(image)}. We only support pytorch tensor\"\n            )\n        if output_type not in [\"latent\", \"pt\", \"np\", \"pil\"]:\n            deprecation_message = (\n                f\"the output_type {output_type} is outdated and has been set to `np`. Please make sure to set it to one of these instead: \"\n                \"`pil`, `np`, `pt`, `latent`\"\n            )\n            deprecate(\"Unsupported output_type\", \"1.0.0\", deprecation_message, standard_warn=False)\n            output_type = \"np\"\n\n        if output_type == \"latent\":\n            return image\n\n        if do_denormalize is None:\n            do_denormalize = [self.config.do_normalize] * image.shape[0]\n\n        image = torch.stack(\n            [self.denormalize(image[i]) if do_denormalize[i] else image[i] for i in range(image.shape[0])]\n        )\n\n        if output_type == \"pt\":\n            return image\n\n        image = self.pt_to_numpy(image)\n\n        if output_type == \"np\":\n            return image\n\n        if output_type == \"pil\":\n            return self.numpy_to_pil(image)\n"
  },
  {
    "path": "diffusers/loaders.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport os\nimport warnings\nfrom collections import defaultdict\nfrom pathlib import Path\nfrom typing import Callable, Dict, List, Optional, Union\n\nimport torch\nfrom huggingface_hub import hf_hub_download\n\nfrom .models.attention_processor import (\n    AttnAddedKVProcessor,\n    AttnAddedKVProcessor2_0,\n    CustomDiffusionAttnProcessor,\n    CustomDiffusionXFormersAttnProcessor,\n    LoRAAttnAddedKVProcessor,\n    LoRAAttnProcessor,\n    SlicedAttnAddedKVProcessor,\n)\nfrom .utils import (\n    DIFFUSERS_CACHE,\n    HF_HUB_OFFLINE,\n    TEXT_ENCODER_TARGET_MODULES,\n    _get_model_file,\n    deprecate,\n    is_safetensors_available,\n    is_transformers_available,\n    logging,\n)\n\n\nif is_safetensors_available():\n    import safetensors\n\nif is_transformers_available():\n    from transformers import PreTrainedModel, PreTrainedTokenizer\n\n\nlogger = logging.get_logger(__name__)\n\nTEXT_ENCODER_NAME = \"text_encoder\"\nUNET_NAME = \"unet\"\n\nLORA_WEIGHT_NAME = \"pytorch_lora_weights.bin\"\nLORA_WEIGHT_NAME_SAFE = \"pytorch_lora_weights.safetensors\"\n\nTEXT_INVERSION_NAME = \"learned_embeds.bin\"\nTEXT_INVERSION_NAME_SAFE = \"learned_embeds.safetensors\"\n\nCUSTOM_DIFFUSION_WEIGHT_NAME = \"pytorch_custom_diffusion_weights.bin\"\nCUSTOM_DIFFUSION_WEIGHT_NAME_SAFE = \"pytorch_custom_diffusion_weights.safetensors\"\n\n\nclass AttnProcsLayers(torch.nn.Module):\n    def __init__(self, state_dict: Dict[str, torch.Tensor]):\n        super().__init__()\n        self.layers = torch.nn.ModuleList(state_dict.values())\n        self.mapping = dict(enumerate(state_dict.keys()))\n        self.rev_mapping = {v: k for k, v in enumerate(state_dict.keys())}\n\n        # .processor for unet, .k_proj, \".q_proj\", \".v_proj\", and \".out_proj\" for text encoder\n        self.split_keys = [\".processor\", \".k_proj\", \".q_proj\", \".v_proj\", \".out_proj\"]\n\n        # we add a hook to state_dict() and load_state_dict() so that the\n        # naming fits with `unet.attn_processors`\n        def map_to(module, state_dict, *args, **kwargs):\n            new_state_dict = {}\n            for key, value in state_dict.items():\n                num = int(key.split(\".\")[1])  # 0 is always \"layers\"\n                new_key = key.replace(f\"layers.{num}\", module.mapping[num])\n                new_state_dict[new_key] = value\n\n            return new_state_dict\n\n        def remap_key(key, state_dict):\n            for k in self.split_keys:\n                if k in key:\n                    return key.split(k)[0] + k\n\n            raise ValueError(\n                f\"There seems to be a problem with the state_dict: {set(state_dict.keys())}. {key} has to have one of {self.split_keys}.\"\n            )\n\n        def map_from(module, state_dict, *args, **kwargs):\n            all_keys = list(state_dict.keys())\n            for key in all_keys:\n                replace_key = remap_key(key, state_dict)\n                new_key = key.replace(replace_key, f\"layers.{module.rev_mapping[replace_key]}\")\n                state_dict[new_key] = state_dict[key]\n                del state_dict[key]\n\n        self._register_state_dict_hook(map_to)\n        self._register_load_state_dict_pre_hook(map_from, with_module=True)\n\n\nclass UNet2DConditionLoadersMixin:\n    text_encoder_name = TEXT_ENCODER_NAME\n    unet_name = UNET_NAME\n\n    def load_attn_procs(self, pretrained_model_name_or_path_or_dict: Union[str, Dict[str, torch.Tensor]], **kwargs):\n        r\"\"\"\n        Load pretrained attention processor layers into `UNet2DConditionModel`. Attention processor layers have to be\n        defined in\n        [`cross_attention.py`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py)\n        and be a `torch.nn.Module` class.\n\n        <Tip warning={true}>\n\n        This function is experimental and might change in the future.\n\n        </Tip>\n\n        Parameters:\n            pretrained_model_name_or_path_or_dict (`str` or `os.PathLike` or `dict`):\n                Can be either:\n\n                    - A string, the *model id* of a pretrained model hosted inside a model repo on huggingface.co.\n                      Valid model ids should have an organization name, like `google/ddpm-celebahq-256`.\n                    - A path to a *directory* containing model weights saved using [`~ModelMixin.save_config`], e.g.,\n                      `./my_model_directory/`.\n                    - A [torch state\n                      dict](https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict).\n\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `diffusers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            subfolder (`str`, *optional*, defaults to `\"\"`):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information.\n\n        <Tip>\n\n        It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n        models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n        \"\"\"\n\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        force_download = kwargs.pop(\"force_download\", False)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        subfolder = kwargs.pop(\"subfolder\", None)\n        weight_name = kwargs.pop(\"weight_name\", None)\n        use_safetensors = kwargs.pop(\"use_safetensors\", None)\n\n        if use_safetensors and not is_safetensors_available():\n            raise ValueError(\n                \"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetenstors\"\n            )\n\n        allow_pickle = False\n        if use_safetensors is None:\n            use_safetensors = is_safetensors_available()\n            allow_pickle = True\n\n        user_agent = {\n            \"file_type\": \"attn_procs_weights\",\n            \"framework\": \"pytorch\",\n        }\n\n        model_file = None\n        if not isinstance(pretrained_model_name_or_path_or_dict, dict):\n            # Let's first try to load .safetensors weights\n            if (use_safetensors and weight_name is None) or (\n                weight_name is not None and weight_name.endswith(\".safetensors\")\n            ):\n                try:\n                    model_file = _get_model_file(\n                        pretrained_model_name_or_path_or_dict,\n                        weights_name=weight_name or LORA_WEIGHT_NAME_SAFE,\n                        cache_dir=cache_dir,\n                        force_download=force_download,\n                        resume_download=resume_download,\n                        proxies=proxies,\n                        local_files_only=local_files_only,\n                        use_auth_token=use_auth_token,\n                        revision=revision,\n                        subfolder=subfolder,\n                        user_agent=user_agent,\n                    )\n                    state_dict = safetensors.torch.load_file(model_file, device=\"cpu\")\n                except IOError as e:\n                    if not allow_pickle:\n                        raise e\n                    # try loading non-safetensors weights\n                    pass\n            if model_file is None:\n                model_file = _get_model_file(\n                    pretrained_model_name_or_path_or_dict,\n                    weights_name=weight_name or LORA_WEIGHT_NAME,\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    resume_download=resume_download,\n                    proxies=proxies,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    revision=revision,\n                    subfolder=subfolder,\n                    user_agent=user_agent,\n                )\n                state_dict = torch.load(model_file, map_location=\"cpu\")\n        else:\n            state_dict = pretrained_model_name_or_path_or_dict\n\n        # fill attn processors\n        attn_processors = {}\n\n        is_lora = all(\"lora\" in k for k in state_dict.keys())\n        is_custom_diffusion = any(\"custom_diffusion\" in k for k in state_dict.keys())\n\n        if is_lora:\n            is_new_lora_format = all(\n                key.startswith(self.unet_name) or key.startswith(self.text_encoder_name) for key in state_dict.keys()\n            )\n            if is_new_lora_format:\n                # Strip the `\"unet\"` prefix.\n                is_text_encoder_present = any(key.startswith(self.text_encoder_name) for key in state_dict.keys())\n                if is_text_encoder_present:\n                    warn_message = \"The state_dict contains LoRA params corresponding to the text encoder which are not being used here. To use both UNet and text encoder related LoRA params, use [`pipe.load_lora_weights()`](https://huggingface.co/docs/diffusers/main/en/api/loaders#diffusers.loaders.LoraLoaderMixin.load_lora_weights).\"\n                    warnings.warn(warn_message)\n                unet_keys = [k for k in state_dict.keys() if k.startswith(self.unet_name)]\n                state_dict = {k.replace(f\"{self.unet_name}.\", \"\"): v for k, v in state_dict.items() if k in unet_keys}\n\n            lora_grouped_dict = defaultdict(dict)\n            for key, value in state_dict.items():\n                attn_processor_key, sub_key = \".\".join(key.split(\".\")[:-3]), \".\".join(key.split(\".\")[-3:])\n                lora_grouped_dict[attn_processor_key][sub_key] = value\n\n            for key, value_dict in lora_grouped_dict.items():\n                rank = value_dict[\"to_k_lora.down.weight\"].shape[0]\n                hidden_size = value_dict[\"to_k_lora.up.weight\"].shape[0]\n\n                attn_processor = self\n                for sub_key in key.split(\".\"):\n                    attn_processor = getattr(attn_processor, sub_key)\n\n                if isinstance(\n                    attn_processor, (AttnAddedKVProcessor, SlicedAttnAddedKVProcessor, AttnAddedKVProcessor2_0)\n                ):\n                    cross_attention_dim = value_dict[\"add_k_proj_lora.down.weight\"].shape[1]\n                    attn_processor_class = LoRAAttnAddedKVProcessor\n                else:\n                    cross_attention_dim = value_dict[\"to_k_lora.down.weight\"].shape[1]\n                    attn_processor_class = LoRAAttnProcessor\n\n                attn_processors[key] = attn_processor_class(\n                    hidden_size=hidden_size, cross_attention_dim=cross_attention_dim, rank=rank\n                )\n                attn_processors[key].load_state_dict(value_dict)\n        elif is_custom_diffusion:\n            custom_diffusion_grouped_dict = defaultdict(dict)\n            for key, value in state_dict.items():\n                if len(value) == 0:\n                    custom_diffusion_grouped_dict[key] = {}\n                else:\n                    if \"to_out\" in key:\n                        attn_processor_key, sub_key = \".\".join(key.split(\".\")[:-3]), \".\".join(key.split(\".\")[-3:])\n                    else:\n                        attn_processor_key, sub_key = \".\".join(key.split(\".\")[:-2]), \".\".join(key.split(\".\")[-2:])\n                    custom_diffusion_grouped_dict[attn_processor_key][sub_key] = value\n\n            for key, value_dict in custom_diffusion_grouped_dict.items():\n                if len(value_dict) == 0:\n                    attn_processors[key] = CustomDiffusionAttnProcessor(\n                        train_kv=False, train_q_out=False, hidden_size=None, cross_attention_dim=None\n                    )\n                else:\n                    cross_attention_dim = value_dict[\"to_k_custom_diffusion.weight\"].shape[1]\n                    hidden_size = value_dict[\"to_k_custom_diffusion.weight\"].shape[0]\n                    train_q_out = True if \"to_q_custom_diffusion.weight\" in value_dict else False\n                    attn_processors[key] = CustomDiffusionAttnProcessor(\n                        train_kv=True,\n                        train_q_out=train_q_out,\n                        hidden_size=hidden_size,\n                        cross_attention_dim=cross_attention_dim,\n                    )\n                    attn_processors[key].load_state_dict(value_dict)\n        else:\n            raise ValueError(\n                f\"{model_file} does not seem to be in the correct format expected by LoRA or Custom Diffusion training.\"\n            )\n\n        # set correct dtype & device\n        attn_processors = {k: v.to(device=self.device, dtype=self.dtype) for k, v in attn_processors.items()}\n\n        # set layers\n        self.set_attn_processor(attn_processors)\n\n    def save_attn_procs(\n        self,\n        save_directory: Union[str, os.PathLike],\n        is_main_process: bool = True,\n        weight_name: str = None,\n        save_function: Callable = None,\n        safe_serialization: bool = False,\n        **kwargs,\n    ):\n        r\"\"\"\n        Save an attention processor to a directory, so that it can be re-loaded using the\n        [`~loaders.UNet2DConditionLoadersMixin.load_attn_procs`] method.\n\n        Arguments:\n            save_directory (`str` or `os.PathLike`):\n                Directory to which to save. Will be created if it doesn't exist.\n            is_main_process (`bool`, *optional*, defaults to `True`):\n                Whether the process calling this is the main process or not. Useful when in distributed training like\n                TPUs and need to call this function on all processes. In this case, set `is_main_process=True` only on\n                the main process to avoid race conditions.\n            save_function (`Callable`):\n                The function to use to save the state dictionary. Useful on distributed training like TPUs when one\n                need to replace `torch.save` by another method. Can be configured with the environment variable\n                `DIFFUSERS_SAVE_MODE`.\n        \"\"\"\n        weight_name = weight_name or deprecate(\n            \"weights_name\",\n            \"0.18.0\",\n            \"`weights_name` is deprecated, please use `weight_name` instead.\",\n            take_from=kwargs,\n        )\n        if os.path.isfile(save_directory):\n            logger.error(f\"Provided path ({save_directory}) should be a directory, not a file\")\n            return\n\n        if save_function is None:\n            if safe_serialization:\n\n                def save_function(weights, filename):\n                    return safetensors.torch.save_file(weights, filename, metadata={\"format\": \"pt\"})\n\n            else:\n                save_function = torch.save\n\n        os.makedirs(save_directory, exist_ok=True)\n\n        is_custom_diffusion = any(\n            isinstance(x, (CustomDiffusionAttnProcessor, CustomDiffusionXFormersAttnProcessor))\n            for (_, x) in self.attn_processors.items()\n        )\n        if is_custom_diffusion:\n            model_to_save = AttnProcsLayers(\n                {\n                    y: x\n                    for (y, x) in self.attn_processors.items()\n                    if isinstance(x, (CustomDiffusionAttnProcessor, CustomDiffusionXFormersAttnProcessor))\n                }\n            )\n            state_dict = model_to_save.state_dict()\n            for name, attn in self.attn_processors.items():\n                if len(attn.state_dict()) == 0:\n                    state_dict[name] = {}\n        else:\n            model_to_save = AttnProcsLayers(self.attn_processors)\n            state_dict = model_to_save.state_dict()\n\n        if weight_name is None:\n            if safe_serialization:\n                weight_name = CUSTOM_DIFFUSION_WEIGHT_NAME_SAFE if is_custom_diffusion else LORA_WEIGHT_NAME_SAFE\n            else:\n                weight_name = CUSTOM_DIFFUSION_WEIGHT_NAME if is_custom_diffusion else LORA_WEIGHT_NAME\n\n        # Save the model\n        save_function(state_dict, os.path.join(save_directory, weight_name))\n        logger.info(f\"Model weights saved in {os.path.join(save_directory, weight_name)}\")\n\n\nclass TextualInversionLoaderMixin:\n    r\"\"\"\n    Mixin class for loading textual inversion tokens and embeddings to the tokenizer and text encoder.\n    \"\"\"\n\n    def maybe_convert_prompt(self, prompt: Union[str, List[str]], tokenizer: \"PreTrainedTokenizer\"):\n        r\"\"\"\n        Maybe convert a prompt into a \"multi vector\"-compatible prompt. If the prompt includes a token that corresponds\n        to a multi-vector textual inversion embedding, this function will process the prompt so that the special token\n        is replaced with multiple special tokens each corresponding to one of the vectors. If the prompt has no textual\n        inversion token or a textual inversion token that is a single vector, the input prompt is simply returned.\n\n        Parameters:\n            prompt (`str` or list of `str`):\n                The prompt or prompts to guide the image generation.\n            tokenizer (`PreTrainedTokenizer`):\n                The tokenizer responsible for encoding the prompt into input tokens.\n\n        Returns:\n            `str` or list of `str`: The converted prompt\n        \"\"\"\n        if not isinstance(prompt, List):\n            prompts = [prompt]\n        else:\n            prompts = prompt\n\n        prompts = [self._maybe_convert_prompt(p, tokenizer) for p in prompts]\n\n        if not isinstance(prompt, List):\n            return prompts[0]\n\n        return prompts\n\n    def _maybe_convert_prompt(self, prompt: str, tokenizer: \"PreTrainedTokenizer\"):\n        r\"\"\"\n        Maybe convert a prompt into a \"multi vector\"-compatible prompt. If the prompt includes a token that corresponds\n        to a multi-vector textual inversion embedding, this function will process the prompt so that the special token\n        is replaced with multiple special tokens each corresponding to one of the vectors. If the prompt has no textual\n        inversion token or a textual inversion token that is a single vector, the input prompt is simply returned.\n\n        Parameters:\n            prompt (`str`):\n                The prompt to guide the image generation.\n            tokenizer (`PreTrainedTokenizer`):\n                The tokenizer responsible for encoding the prompt into input tokens.\n\n        Returns:\n            `str`: The converted prompt\n        \"\"\"\n        tokens = tokenizer.tokenize(prompt)\n        for token in tokens:\n            if token in tokenizer.added_tokens_encoder:\n                replacement = token\n                i = 1\n                while f\"{token}_{i}\" in tokenizer.added_tokens_encoder:\n                    replacement += f\" {token}_{i}\"\n                    i += 1\n\n                prompt = prompt.replace(token, replacement)\n\n        return prompt\n\n    def load_textual_inversion(\n        self,\n        pretrained_model_name_or_path: Union[str, List[str]],\n        token: Optional[Union[str, List[str]]] = None,\n        **kwargs,\n    ):\n        r\"\"\"\n        Load textual inversion embeddings into the text encoder of stable diffusion pipelines. Both `diffusers` and\n        `Automatic1111` formats are supported (see example below).\n\n        <Tip warning={true}>\n\n        This function is experimental and might change in the future.\n\n        </Tip>\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike` or `List[str or os.PathLike]`):\n                Can be either:\n\n                    - A string, the *model id* of a pretrained model hosted inside a model repo on huggingface.co.\n                      Valid model ids should have an organization name, like\n                      `\"sd-concepts-library/low-poly-hd-logos-icons\"`.\n                    - A path to a *directory* containing textual inversion weights, e.g.\n                      `./my_text_inversion_directory/`.\n                    - A path to a *file* containing textual inversion weights, e.g. `./my_text_inversions.pt`.\n\n                Or a list of those elements.\n            token (`str` or `List[str]`, *optional*):\n                Override the token to use for the textual inversion weights. If `pretrained_model_name_or_path` is a\n                list, then `token` must also be a list of equal length.\n            weight_name (`str`, *optional*):\n                Name of a custom weight file. This should be used in two cases:\n\n                    - The saved textual inversion file is in `diffusers` format, but was saved under a specific weight\n                      name, such as `text_inv.bin`.\n                    - The saved textual inversion file is in the \"Automatic1111\" form.\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `diffusers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            subfolder (`str`, *optional*, defaults to `\"\"`):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n\n        Example:\n\n        To load a textual inversion embedding vector in `diffusers` format:\n\n        ```py\n        from diffusers import StableDiffusionPipeline\n        import torch\n\n        model_id = \"runwayml/stable-diffusion-v1-5\"\n        pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16).to(\"cuda\")\n\n        pipe.load_textual_inversion(\"sd-concepts-library/cat-toy\")\n\n        prompt = \"A <cat-toy> backpack\"\n\n        image = pipe(prompt, num_inference_steps=50).images[0]\n        image.save(\"cat-backpack.png\")\n        ```\n\n        To load a textual inversion embedding vector in Automatic1111 format, make sure to first download the vector,\n        e.g. from [civitAI](https://civitai.com/models/3036?modelVersionId=9857) and then load the vector locally:\n\n        ```py\n        from diffusers import StableDiffusionPipeline\n        import torch\n\n        model_id = \"runwayml/stable-diffusion-v1-5\"\n        pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16).to(\"cuda\")\n\n        pipe.load_textual_inversion(\"./charturnerv2.pt\", token=\"charturnerv2\")\n\n        prompt = \"charturnerv2, multiple views of the same character in the same outfit, a character turnaround of a woman wearing a black jacket and red shirt, best quality, intricate details.\"\n\n        image = pipe(prompt, num_inference_steps=50).images[0]\n        image.save(\"character.png\")\n        ```\n\n        \"\"\"\n        if not hasattr(self, \"tokenizer\") or not isinstance(self.tokenizer, PreTrainedTokenizer):\n            raise ValueError(\n                f\"{self.__class__.__name__} requires `self.tokenizer` of type `PreTrainedTokenizer` for calling\"\n                f\" `{self.load_textual_inversion.__name__}`\"\n            )\n\n        if not hasattr(self, \"text_encoder\") or not isinstance(self.text_encoder, PreTrainedModel):\n            raise ValueError(\n                f\"{self.__class__.__name__} requires `self.text_encoder` of type `PreTrainedModel` for calling\"\n                f\" `{self.load_textual_inversion.__name__}`\"\n            )\n\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        force_download = kwargs.pop(\"force_download\", False)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        subfolder = kwargs.pop(\"subfolder\", None)\n        weight_name = kwargs.pop(\"weight_name\", None)\n        use_safetensors = kwargs.pop(\"use_safetensors\", None)\n\n        if use_safetensors and not is_safetensors_available():\n            raise ValueError(\n                \"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetenstors\"\n            )\n\n        allow_pickle = False\n        if use_safetensors is None:\n            use_safetensors = is_safetensors_available()\n            allow_pickle = True\n\n        user_agent = {\n            \"file_type\": \"text_inversion\",\n            \"framework\": \"pytorch\",\n        }\n\n        if isinstance(pretrained_model_name_or_path, str):\n            pretrained_model_name_or_paths = [pretrained_model_name_or_path]\n        else:\n            pretrained_model_name_or_paths = pretrained_model_name_or_path\n\n        if isinstance(token, str):\n            tokens = [token]\n        elif token is None:\n            tokens = [None] * len(pretrained_model_name_or_paths)\n        else:\n            tokens = token\n\n        if len(pretrained_model_name_or_paths) != len(tokens):\n            raise ValueError(\n                f\"You have passed a list of models of length {len(pretrained_model_name_or_paths)}, and list of tokens of length {len(tokens)}\"\n                f\"Make sure both lists have the same length.\"\n            )\n\n        valid_tokens = [t for t in tokens if t is not None]\n        if len(set(valid_tokens)) < len(valid_tokens):\n            raise ValueError(f\"You have passed a list of tokens that contains duplicates: {tokens}\")\n\n        token_ids_and_embeddings = []\n\n        for pretrained_model_name_or_path, token in zip(pretrained_model_name_or_paths, tokens):\n            # 1. Load textual inversion file\n            model_file = None\n            # Let's first try to load .safetensors weights\n            if (use_safetensors and weight_name is None) or (\n                weight_name is not None and weight_name.endswith(\".safetensors\")\n            ):\n                try:\n                    model_file = _get_model_file(\n                        pretrained_model_name_or_path,\n                        weights_name=weight_name or TEXT_INVERSION_NAME_SAFE,\n                        cache_dir=cache_dir,\n                        force_download=force_download,\n                        resume_download=resume_download,\n                        proxies=proxies,\n                        local_files_only=local_files_only,\n                        use_auth_token=use_auth_token,\n                        revision=revision,\n                        subfolder=subfolder,\n                        user_agent=user_agent,\n                    )\n                    state_dict = safetensors.torch.load_file(model_file, device=\"cpu\")\n                except Exception as e:\n                    if not allow_pickle:\n                        raise e\n\n                    model_file = None\n\n            if model_file is None:\n                model_file = _get_model_file(\n                    pretrained_model_name_or_path,\n                    weights_name=weight_name or TEXT_INVERSION_NAME,\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    resume_download=resume_download,\n                    proxies=proxies,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    revision=revision,\n                    subfolder=subfolder,\n                    user_agent=user_agent,\n                )\n                state_dict = torch.load(model_file, map_location=\"cpu\")\n\n            # 2. Load token and embedding correcly from file\n            if isinstance(state_dict, torch.Tensor):\n                if token is None:\n                    raise ValueError(\n                        \"You are trying to load a textual inversion embedding that has been saved as a PyTorch tensor. Make sure to pass the name of the corresponding token in this case: `token=...`.\"\n                    )\n                embedding = state_dict\n            elif len(state_dict) == 1:\n                # diffusers\n                loaded_token, embedding = next(iter(state_dict.items()))\n            elif \"string_to_param\" in state_dict:\n                # A1111\n                loaded_token = state_dict[\"name\"]\n                embedding = state_dict[\"string_to_param\"][\"*\"]\n\n            if token is not None and loaded_token != token:\n                logger.info(f\"The loaded token: {loaded_token} is overwritten by the passed token {token}.\")\n            else:\n                token = loaded_token\n\n            embedding = embedding.to(dtype=self.text_encoder.dtype, device=self.text_encoder.device)\n\n            # 3. Make sure we don't mess up the tokenizer or text encoder\n            vocab = self.tokenizer.get_vocab()\n            if token in vocab:\n                raise ValueError(\n                    f\"Token {token} already in tokenizer vocabulary. Please choose a different token name or remove {token} and embedding from the tokenizer and text encoder.\"\n                )\n            elif f\"{token}_1\" in vocab:\n                multi_vector_tokens = [token]\n                i = 1\n                while f\"{token}_{i}\" in self.tokenizer.added_tokens_encoder:\n                    multi_vector_tokens.append(f\"{token}_{i}\")\n                    i += 1\n\n                raise ValueError(\n                    f\"Multi-vector Token {multi_vector_tokens} already in tokenizer vocabulary. Please choose a different token name or remove the {multi_vector_tokens} and embedding from the tokenizer and text encoder.\"\n                )\n\n            is_multi_vector = len(embedding.shape) > 1 and embedding.shape[0] > 1\n\n            if is_multi_vector:\n                tokens = [token] + [f\"{token}_{i}\" for i in range(1, embedding.shape[0])]\n                embeddings = [e for e in embedding]  # noqa: C416\n            else:\n                tokens = [token]\n                embeddings = [embedding[0]] if len(embedding.shape) > 1 else [embedding]\n\n            # add tokens and get ids\n            self.tokenizer.add_tokens(tokens)\n            token_ids = self.tokenizer.convert_tokens_to_ids(tokens)\n            token_ids_and_embeddings += zip(token_ids, embeddings)\n\n            logger.info(f\"Loaded textual inversion embedding for {token}.\")\n\n        # resize token embeddings and set all new embeddings\n        self.text_encoder.resize_token_embeddings(len(self.tokenizer))\n        for token_id, embedding in token_ids_and_embeddings:\n            self.text_encoder.get_input_embeddings().weight.data[token_id] = embedding\n\n\nclass LoraLoaderMixin:\n    r\"\"\"\n    Utility class for handling the loading LoRA layers into UNet (of class [`UNet2DConditionModel`]) and Text Encoder\n    (of class [`CLIPTextModel`](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel)).\n\n    <Tip warning={true}>\n\n    This function is experimental and might change in the future.\n\n    </Tip>\n    \"\"\"\n    text_encoder_name = TEXT_ENCODER_NAME\n    unet_name = UNET_NAME\n\n    def load_lora_weights(self, pretrained_model_name_or_path_or_dict: Union[str, Dict[str, torch.Tensor]], **kwargs):\n        r\"\"\"\n        Load pretrained attention processor layers (such as LoRA) into [`UNet2DConditionModel`] and\n        [`CLIPTextModel`](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel)).\n\n        <Tip warning={true}>\n\n        This function is experimental and might change in the future.\n\n        </Tip>\n\n        Parameters:\n            pretrained_model_name_or_path_or_dict (`str` or `os.PathLike` or `dict`):\n                Can be either:\n\n                    - A string, the *model id* of a pretrained model hosted inside a model repo on huggingface.co.\n                      Valid model ids should have an organization name, like `google/ddpm-celebahq-256`.\n                    - A path to a *directory* containing model weights saved using [`~ModelMixin.save_config`], e.g.,\n                      `./my_model_directory/`.\n                    - A [torch state\n                      dict](https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict).\n\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `diffusers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            subfolder (`str`, *optional*, defaults to `\"\"`):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information.\n\n        <Tip>\n\n        It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n        models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n        \"\"\"\n        # Load the main state dict first which has the LoRA layers for either of\n        # UNet and text encoder or both.\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        force_download = kwargs.pop(\"force_download\", False)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        subfolder = kwargs.pop(\"subfolder\", None)\n        weight_name = kwargs.pop(\"weight_name\", None)\n        use_safetensors = kwargs.pop(\"use_safetensors\", None)\n\n        if use_safetensors and not is_safetensors_available():\n            raise ValueError(\n                \"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetenstors\"\n            )\n\n        allow_pickle = False\n        if use_safetensors is None:\n            use_safetensors = is_safetensors_available()\n            allow_pickle = True\n\n        user_agent = {\n            \"file_type\": \"attn_procs_weights\",\n            \"framework\": \"pytorch\",\n        }\n\n        model_file = None\n        if not isinstance(pretrained_model_name_or_path_or_dict, dict):\n            # Let's first try to load .safetensors weights\n            if (use_safetensors and weight_name is None) or (\n                weight_name is not None and weight_name.endswith(\".safetensors\")\n            ):\n                try:\n                    model_file = _get_model_file(\n                        pretrained_model_name_or_path_or_dict,\n                        weights_name=weight_name or LORA_WEIGHT_NAME_SAFE,\n                        cache_dir=cache_dir,\n                        force_download=force_download,\n                        resume_download=resume_download,\n                        proxies=proxies,\n                        local_files_only=local_files_only,\n                        use_auth_token=use_auth_token,\n                        revision=revision,\n                        subfolder=subfolder,\n                        user_agent=user_agent,\n                    )\n                    state_dict = safetensors.torch.load_file(model_file, device=\"cpu\")\n                except IOError as e:\n                    if not allow_pickle:\n                        raise e\n                    # try loading non-safetensors weights\n                    pass\n            if model_file is None:\n                model_file = _get_model_file(\n                    pretrained_model_name_or_path_or_dict,\n                    weights_name=weight_name or LORA_WEIGHT_NAME,\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    resume_download=resume_download,\n                    proxies=proxies,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    revision=revision,\n                    subfolder=subfolder,\n                    user_agent=user_agent,\n                )\n                state_dict = torch.load(model_file, map_location=\"cpu\")\n        else:\n            state_dict = pretrained_model_name_or_path_or_dict\n\n        # If the serialization format is new (introduced in https://github.com/huggingface/diffusers/pull/2918),\n        # then the `state_dict` keys should have `self.unet_name` and/or `self.text_encoder_name` as\n        # their prefixes.\n        keys = list(state_dict.keys())\n        if all(key.startswith(self.unet_name) or key.startswith(self.text_encoder_name) for key in keys):\n            # Load the layers corresponding to UNet.\n            unet_keys = [k for k in keys if k.startswith(self.unet_name)]\n            logger.info(f\"Loading {self.unet_name}.\")\n            unet_lora_state_dict = {\n                k.replace(f\"{self.unet_name}.\", \"\"): v for k, v in state_dict.items() if k in unet_keys\n            }\n            self.unet.load_attn_procs(unet_lora_state_dict)\n\n            # Load the layers corresponding to text encoder and make necessary adjustments.\n            text_encoder_keys = [k for k in keys if k.startswith(self.text_encoder_name)]\n            logger.info(f\"Loading {self.text_encoder_name}.\")\n            text_encoder_lora_state_dict = {\n                k.replace(f\"{self.text_encoder_name}.\", \"\"): v for k, v in state_dict.items() if k in text_encoder_keys\n            }\n            if len(text_encoder_lora_state_dict) > 0:\n                attn_procs_text_encoder = self._load_text_encoder_attn_procs(text_encoder_lora_state_dict)\n                self._modify_text_encoder(attn_procs_text_encoder)\n\n                # save lora attn procs of text encoder so that it can be easily retrieved\n                self._text_encoder_lora_attn_procs = attn_procs_text_encoder\n\n        # Otherwise, we're dealing with the old format. This means the `state_dict` should only\n        # contain the module names of the `unet` as its keys WITHOUT any prefix.\n        elif not all(\n            key.startswith(self.unet_name) or key.startswith(self.text_encoder_name) for key in state_dict.keys()\n        ):\n            self.unet.load_attn_procs(state_dict)\n            warn_message = \"You have saved the LoRA weights using the old format. To convert the old LoRA weights to the new format, you can first load them in a dictionary and then create a new dictionary like the following: `new_state_dict = {f'unet'.{module_name}: params for module_name, params in old_state_dict.items()}`.\"\n            warnings.warn(warn_message)\n\n    @property\n    def text_encoder_lora_attn_procs(self):\n        if hasattr(self, \"_text_encoder_lora_attn_procs\"):\n            return self._text_encoder_lora_attn_procs\n        return\n\n    def _modify_text_encoder(self, attn_processors: Dict[str, LoRAAttnProcessor]):\n        r\"\"\"\n        Monkey-patches the forward passes of attention modules of the text encoder.\n\n        Parameters:\n            attn_processors: Dict[str, `LoRAAttnProcessor`]:\n                A dictionary mapping the module names and their corresponding [`~LoRAAttnProcessor`].\n        \"\"\"\n        # Loop over the original attention modules.\n        for name, _ in self.text_encoder.named_modules():\n            if any(x in name for x in TEXT_ENCODER_TARGET_MODULES):\n                # Retrieve the module and its corresponding LoRA processor.\n                module = self.text_encoder.get_submodule(name)\n                # Construct a new function that performs the LoRA merging. We will monkey patch\n                # this forward pass.\n                lora_layer = getattr(attn_processors[name], self._get_lora_layer_attribute(name))\n                old_forward = module.forward\n\n                def new_forward(x):\n                    return old_forward(x) + lora_layer(x)\n\n                # Monkey-patch.\n                module.forward = new_forward\n\n    def _get_lora_layer_attribute(self, name: str) -> str:\n        if \"q_proj\" in name:\n            return \"to_q_lora\"\n        elif \"v_proj\" in name:\n            return \"to_v_lora\"\n        elif \"k_proj\" in name:\n            return \"to_k_lora\"\n        else:\n            return \"to_out_lora\"\n\n    def _load_text_encoder_attn_procs(\n        self, pretrained_model_name_or_path_or_dict: Union[str, Dict[str, torch.Tensor]], **kwargs\n    ):\n        r\"\"\"\n        Load pretrained attention processor layers for\n        [`CLIPTextModel`](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel).\n\n        <Tip warning={true}>\n\n        This function is experimental and might change in the future.\n\n        </Tip>\n\n        Parameters:\n            pretrained_model_name_or_path_or_dict (`str` or `os.PathLike` or `dict`):\n                Can be either:\n\n                    - A string, the *model id* of a pretrained model hosted inside a model repo on huggingface.co.\n                      Valid model ids should have an organization name, like `google/ddpm-celebahq-256`.\n                    - A path to a *directory* containing model weights saved using [`~ModelMixin.save_config`], e.g.,\n                      `./my_model_directory/`.\n                    - A [torch state\n                      dict](https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict).\n\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `diffusers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            subfolder (`str`, *optional*, defaults to `\"\"`):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information.\n\n        Returns:\n            `Dict[name, LoRAAttnProcessor]`: Mapping between the module names and their corresponding\n            [`LoRAAttnProcessor`].\n\n        <Tip>\n\n        It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n        models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n        \"\"\"\n\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        force_download = kwargs.pop(\"force_download\", False)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        subfolder = kwargs.pop(\"subfolder\", None)\n        weight_name = kwargs.pop(\"weight_name\", None)\n        use_safetensors = kwargs.pop(\"use_safetensors\", None)\n\n        if use_safetensors and not is_safetensors_available():\n            raise ValueError(\n                \"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetenstors\"\n            )\n\n        allow_pickle = False\n        if use_safetensors is None:\n            use_safetensors = is_safetensors_available()\n            allow_pickle = True\n\n        user_agent = {\n            \"file_type\": \"attn_procs_weights\",\n            \"framework\": \"pytorch\",\n        }\n\n        model_file = None\n        if not isinstance(pretrained_model_name_or_path_or_dict, dict):\n            # Let's first try to load .safetensors weights\n            if (use_safetensors and weight_name is None) or (\n                weight_name is not None and weight_name.endswith(\".safetensors\")\n            ):\n                try:\n                    model_file = _get_model_file(\n                        pretrained_model_name_or_path_or_dict,\n                        weights_name=weight_name or LORA_WEIGHT_NAME_SAFE,\n                        cache_dir=cache_dir,\n                        force_download=force_download,\n                        resume_download=resume_download,\n                        proxies=proxies,\n                        local_files_only=local_files_only,\n                        use_auth_token=use_auth_token,\n                        revision=revision,\n                        subfolder=subfolder,\n                        user_agent=user_agent,\n                    )\n                    state_dict = safetensors.torch.load_file(model_file, device=\"cpu\")\n                except IOError as e:\n                    if not allow_pickle:\n                        raise e\n                    # try loading non-safetensors weights\n                    pass\n            if model_file is None:\n                model_file = _get_model_file(\n                    pretrained_model_name_or_path_or_dict,\n                    weights_name=weight_name or LORA_WEIGHT_NAME,\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    resume_download=resume_download,\n                    proxies=proxies,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    revision=revision,\n                    subfolder=subfolder,\n                    user_agent=user_agent,\n                )\n                state_dict = torch.load(model_file, map_location=\"cpu\")\n        else:\n            state_dict = pretrained_model_name_or_path_or_dict\n\n        # fill attn processors\n        attn_processors = {}\n\n        is_lora = all(\"lora\" in k for k in state_dict.keys())\n\n        if is_lora:\n            lora_grouped_dict = defaultdict(dict)\n            for key, value in state_dict.items():\n                attn_processor_key, sub_key = \".\".join(key.split(\".\")[:-3]), \".\".join(key.split(\".\")[-3:])\n                lora_grouped_dict[attn_processor_key][sub_key] = value\n\n            for key, value_dict in lora_grouped_dict.items():\n                rank = value_dict[\"to_k_lora.down.weight\"].shape[0]\n                cross_attention_dim = value_dict[\"to_k_lora.down.weight\"].shape[1]\n                hidden_size = value_dict[\"to_k_lora.up.weight\"].shape[0]\n\n                attn_processors[key] = LoRAAttnProcessor(\n                    hidden_size=hidden_size, cross_attention_dim=cross_attention_dim, rank=rank\n                )\n                attn_processors[key].load_state_dict(value_dict)\n\n        else:\n            raise ValueError(f\"{model_file} does not seem to be in the correct format expected by LoRA training.\")\n\n        # set correct dtype & device\n        attn_processors = {\n            k: v.to(device=self.device, dtype=self.text_encoder.dtype) for k, v in attn_processors.items()\n        }\n        return attn_processors\n\n    @classmethod\n    def save_lora_weights(\n        self,\n        save_directory: Union[str, os.PathLike],\n        unet_lora_layers: Dict[str, Union[torch.nn.Module, torch.Tensor]] = None,\n        text_encoder_lora_layers: Dict[str, torch.nn.Module] = None,\n        is_main_process: bool = True,\n        weight_name: str = None,\n        save_function: Callable = None,\n        safe_serialization: bool = False,\n    ):\n        r\"\"\"\n        Save the LoRA parameters corresponding to the UNet and the text encoder.\n\n        Arguments:\n            save_directory (`str` or `os.PathLike`):\n                Directory to which to save. Will be created if it doesn't exist.\n            unet_lora_layers (`Dict[str, torch.nn.Module]` or `Dict[str, torch.Tensor]`):\n                State dict of the LoRA layers corresponding to the UNet. Specifying this helps to make the\n                serialization process easier and cleaner. Values can be both LoRA torch.nn.Modules layers or torch\n                weights.\n            text_encoder_lora_layers (`Dict[str, torch.nn.Module] or `Dict[str, torch.Tensor]`):\n                State dict of the LoRA layers corresponding to the `text_encoder`. Since the `text_encoder` comes from\n                `transformers`, we cannot rejig it. That is why we have to explicitly pass the text encoder LoRA state\n                dict. Values can be both LoRA torch.nn.Modules layers or torch weights.\n            is_main_process (`bool`, *optional*, defaults to `True`):\n                Whether the process calling this is the main process or not. Useful when in distributed training like\n                TPUs and need to call this function on all processes. In this case, set `is_main_process=True` only on\n                the main process to avoid race conditions.\n            save_function (`Callable`):\n                The function to use to save the state dictionary. Useful on distributed training like TPUs when one\n                need to replace `torch.save` by another method. Can be configured with the environment variable\n                `DIFFUSERS_SAVE_MODE`.\n        \"\"\"\n        if os.path.isfile(save_directory):\n            logger.error(f\"Provided path ({save_directory}) should be a directory, not a file\")\n            return\n\n        if save_function is None:\n            if safe_serialization:\n\n                def save_function(weights, filename):\n                    return safetensors.torch.save_file(weights, filename, metadata={\"format\": \"pt\"})\n\n            else:\n                save_function = torch.save\n\n        os.makedirs(save_directory, exist_ok=True)\n\n        # Create a flat dictionary.\n        state_dict = {}\n        if unet_lora_layers is not None:\n            weights = (\n                unet_lora_layers.state_dict() if isinstance(unet_lora_layers, torch.nn.Module) else unet_lora_layers\n            )\n\n            unet_lora_state_dict = {f\"{self.unet_name}.{module_name}\": param for module_name, param in weights.items()}\n            state_dict.update(unet_lora_state_dict)\n\n        if text_encoder_lora_layers is not None:\n            weights = (\n                text_encoder_lora_layers.state_dict()\n                if isinstance(text_encoder_lora_layers, torch.nn.Module)\n                else text_encoder_lora_layers\n            )\n\n            text_encoder_lora_state_dict = {\n                f\"{self.text_encoder_name}.{module_name}\": param for module_name, param in weights.items()\n            }\n            state_dict.update(text_encoder_lora_state_dict)\n\n        # Save the model\n        if weight_name is None:\n            if safe_serialization:\n                weight_name = LORA_WEIGHT_NAME_SAFE\n            else:\n                weight_name = LORA_WEIGHT_NAME\n\n        save_function(state_dict, os.path.join(save_directory, weight_name))\n        logger.info(f\"Model weights saved in {os.path.join(save_directory, weight_name)}\")\n\n\nclass FromCkptMixin:\n    \"\"\"This helper class allows to directly load .ckpt stable diffusion file_extension\n    into the respective classes.\"\"\"\n\n    @classmethod\n    def from_ckpt(cls, pretrained_model_link_or_path, **kwargs):\n        r\"\"\"\n        Instantiate a PyTorch diffusion pipeline from pre-trained pipeline weights saved in the original .ckpt format.\n\n        The pipeline is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated).\n\n        Parameters:\n            pretrained_model_link_or_path (`str` or `os.PathLike`, *optional*):\n                Can be either:\n                    - A link to the .ckpt file on the Hub. Should be in the format\n                      `\"https://huggingface.co/<repo_id>/blob/main/<path_to_file>\"`\n                    - A path to a *file* containing all pipeline weights.\n            torch_dtype (`str` or `torch.dtype`, *optional*):\n                Override the default `torch.dtype` and load the model under this dtype. If `\"auto\"` is passed the dtype\n                will be automatically derived from the model's weights.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            local_files_only (`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `huggingface-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            use_safetensors (`bool`, *optional*, defaults to `None`):\n                If set to `None`, the pipeline will load the `safetensors` weights if they're available **and** if the\n                `safetensors` library is installed. If set to `True`, the pipeline will forcibly load the models from\n                `safetensors` weights. If set to `False` the pipeline will *not* use `safetensors`.\n            extract_ema (`bool`, *optional*, defaults to `False`): Only relevant for\n                checkpoints that have both EMA and non-EMA weights. Whether to extract the EMA weights or not. Defaults\n                to `False`. Pass `True` to extract the EMA weights. EMA weights usually yield higher quality images for\n                inference. Non-EMA weights are usually better to continue fine-tuning.\n            upcast_attention (`bool`, *optional*, defaults to `None`):\n                Whether the attention computation should always be upcasted. This is necessary when running stable\n            image_size (`int`, *optional*, defaults to 512):\n                The image size that the model was trained on. Use 512 for Stable Diffusion v1.X and Stable Diffusion v2\n                Base. Use 768 for Stable Diffusion v2.\n            prediction_type (`str`, *optional*):\n                The prediction type that the model was trained on. Use `'epsilon'` for Stable Diffusion v1.X and Stable\n                Diffusion v2 Base. Use `'v_prediction'` for Stable Diffusion v2.\n            num_in_channels (`int`, *optional*, defaults to None):\n                The number of input channels. If `None`, it will be automatically inferred.\n            scheduler_type (`str`, *optional*, defaults to 'pndm'):\n                Type of scheduler to use. Should be one of `[\"pndm\", \"lms\", \"heun\", \"euler\", \"euler-ancestral\", \"dpm\",\n                \"ddim\"]`.\n            load_safety_checker (`bool`, *optional*, defaults to `True`):\n                Whether to load the safety checker or not. Defaults to `True`.\n            kwargs (remaining dictionary of keyword arguments, *optional*):\n                Can be used to overwrite load - and saveable variables - *i.e.* the pipeline components - of the\n                specific pipeline class. The overwritten components are then directly passed to the pipelines\n                `__init__` method. See example below for more information.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import StableDiffusionPipeline\n\n        >>> # Download pipeline from huggingface.co and cache.\n        >>> pipeline = StableDiffusionPipeline.from_ckpt(\n        ...     \"https://huggingface.co/WarriorMama777/OrangeMixs/blob/main/Models/AbyssOrangeMix/AbyssOrangeMix.safetensors\"\n        ... )\n\n        >>> # Download pipeline from local file\n        >>> # file is downloaded under ./v1-5-pruned-emaonly.ckpt\n        >>> pipeline = StableDiffusionPipeline.from_ckpt(\"./v1-5-pruned-emaonly\")\n\n        >>> # Enable float16 and move to GPU\n        >>> pipeline = StableDiffusionPipeline.from_ckpt(\n        ...     \"https://huggingface.co/runwayml/stable-diffusion-v1-5/blob/main/v1-5-pruned-emaonly.ckpt\",\n        ...     torch_dtype=torch.float16,\n        ... )\n        >>> pipeline.to(\"cuda\")\n        ```\n        \"\"\"\n        # import here to avoid circular dependency\n        from .pipelines.stable_diffusion.convert_from_ckpt import download_from_original_stable_diffusion_ckpt\n\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        force_download = kwargs.pop(\"force_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        extract_ema = kwargs.pop(\"extract_ema\", False)\n        image_size = kwargs.pop(\"image_size\", 512)\n        scheduler_type = kwargs.pop(\"scheduler_type\", \"pndm\")\n        num_in_channels = kwargs.pop(\"num_in_channels\", None)\n        upcast_attention = kwargs.pop(\"upcast_attention\", None)\n        load_safety_checker = kwargs.pop(\"load_safety_checker\", True)\n        prediction_type = kwargs.pop(\"prediction_type\", None)\n\n        torch_dtype = kwargs.pop(\"torch_dtype\", None)\n\n        use_safetensors = kwargs.pop(\"use_safetensors\", None if is_safetensors_available() else False)\n\n        pipeline_name = cls.__name__\n        file_extension = pretrained_model_link_or_path.rsplit(\".\", 1)[-1]\n        from_safetensors = file_extension == \"safetensors\"\n\n        if from_safetensors and use_safetensors is False:\n            raise ValueError(\"Make sure to install `safetensors` with `pip install safetensors`.\")\n\n        # TODO: For now we only support stable diffusion\n        stable_unclip = None\n        controlnet = False\n\n        if pipeline_name == \"StableDiffusionControlNetPipeline\":\n            model_type = \"FrozenCLIPEmbedder\"\n            controlnet = True\n        elif \"StableDiffusion\" in pipeline_name:\n            model_type = \"FrozenCLIPEmbedder\"\n        elif pipeline_name == \"StableUnCLIPPipeline\":\n            model_type == \"FrozenOpenCLIPEmbedder\"\n            stable_unclip = \"txt2img\"\n        elif pipeline_name == \"StableUnCLIPImg2ImgPipeline\":\n            model_type == \"FrozenOpenCLIPEmbedder\"\n            stable_unclip = \"img2img\"\n        elif pipeline_name == \"PaintByExamplePipeline\":\n            model_type == \"PaintByExample\"\n        elif pipeline_name == \"LDMTextToImagePipeline\":\n            model_type == \"LDMTextToImage\"\n        else:\n            raise ValueError(f\"Unhandled pipeline class: {pipeline_name}\")\n\n        # remove huggingface url\n        for prefix in [\"https://huggingface.co/\", \"huggingface.co/\", \"hf.co/\", \"https://hf.co/\"]:\n            if pretrained_model_link_or_path.startswith(prefix):\n                pretrained_model_link_or_path = pretrained_model_link_or_path[len(prefix) :]\n\n        # Code based on diffusers.pipelines.pipeline_utils.DiffusionPipeline.from_pretrained\n        ckpt_path = Path(pretrained_model_link_or_path)\n        if not ckpt_path.is_file():\n            # get repo_id and (potentially nested) file path of ckpt in repo\n            repo_id = str(Path().joinpath(*ckpt_path.parts[:2]))\n            file_path = str(Path().joinpath(*ckpt_path.parts[2:]))\n\n            if file_path.startswith(\"blob/\"):\n                file_path = file_path[len(\"blob/\") :]\n\n            if file_path.startswith(\"main/\"):\n                file_path = file_path[len(\"main/\") :]\n\n            pretrained_model_link_or_path = hf_hub_download(\n                repo_id,\n                filename=file_path,\n                cache_dir=cache_dir,\n                resume_download=resume_download,\n                proxies=proxies,\n                local_files_only=local_files_only,\n                use_auth_token=use_auth_token,\n                revision=revision,\n                force_download=force_download,\n            )\n\n        pipe = download_from_original_stable_diffusion_ckpt(\n            pretrained_model_link_or_path,\n            pipeline_class=cls,\n            model_type=model_type,\n            stable_unclip=stable_unclip,\n            controlnet=controlnet,\n            from_safetensors=from_safetensors,\n            extract_ema=extract_ema,\n            image_size=image_size,\n            scheduler_type=scheduler_type,\n            num_in_channels=num_in_channels,\n            upcast_attention=upcast_attention,\n            load_safety_checker=load_safety_checker,\n            prediction_type=prediction_type,\n        )\n\n        if torch_dtype is not None:\n            pipe.to(torch_dtype=torch_dtype)\n\n        return pipe\n"
  },
  {
    "path": "diffusers/models/README.md",
    "content": "# Models\n\nFor more detail on the models, please refer to the [docs](https://huggingface.co/docs/diffusers/api/models)."
  },
  {
    "path": "diffusers/models/__init__.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom ..utils import is_flax_available, is_torch_available\n\n\nif is_torch_available():\n    from .autoencoder_kl import AutoencoderKL\n    from .controlnet import ControlNetModel\n    from .dual_transformer_2d import DualTransformer2DModel\n    from .modeling_utils import ModelMixin\n    from .prior_transformer import PriorTransformer\n    from .t5_film_transformer import T5FilmDecoder\n    from .transformer_2d import Transformer2DModel\n    from .unet_1d import UNet1DModel\n    from .unet_2d import UNet2DModel\n    from .unet_2d_condition import UNet2DConditionModel\n    from .unet_3d_condition import UNet3DConditionModel\n    from .vq_model import VQModel\n\nif is_flax_available():\n    from .controlnet_flax import FlaxControlNetModel\n    from .unet_2d_condition_flax import FlaxUNet2DConditionModel\n    from .vae_flax import FlaxAutoencoderKL\n"
  },
  {
    "path": "diffusers/models/attention.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Optional\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom ..utils import maybe_allow_in_graph\nfrom .attention_processor import Attention\nfrom .embeddings import CombinedTimestepLabelEmbeddings\n\n\n@maybe_allow_in_graph\nclass BasicTransformerBlock(nn.Module):\n    r\"\"\"\n    A basic Transformer block.\n\n    Parameters:\n        dim (`int`): The number of channels in the input and output.\n        num_attention_heads (`int`): The number of heads to use for multi-head attention.\n        attention_head_dim (`int`): The number of channels in each head.\n        dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use.\n        cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention.\n        only_cross_attention (`bool`, *optional*):\n            Whether to use only cross-attention layers. In this case two cross attention layers are used.\n        double_self_attention (`bool`, *optional*):\n            Whether to use two self-attention layers. In this case no cross attention layers are used.\n        activation_fn (`str`, *optional*, defaults to `\"geglu\"`): Activation function to be used in feed-forward.\n        num_embeds_ada_norm (:\n            obj: `int`, *optional*): The number of diffusion steps used during training. See `Transformer2DModel`.\n        attention_bias (:\n            obj: `bool`, *optional*, defaults to `False`): Configure if the attentions should contain a bias parameter.\n    \"\"\"\n\n    def __init__(\n        self,\n        dim: int,\n        num_attention_heads: int,\n        attention_head_dim: int,\n        dropout=0.0,\n        cross_attention_dim: Optional[int] = None,\n        activation_fn: str = \"geglu\",\n        num_embeds_ada_norm: Optional[int] = None,\n        attention_bias: bool = False,\n        only_cross_attention: bool = False,\n        double_self_attention: bool = False,\n        upcast_attention: bool = False,\n        norm_elementwise_affine: bool = True,\n        norm_type: str = \"layer_norm\",\n        final_dropout: bool = False,\n    ):\n        super().__init__()\n        self.only_cross_attention = only_cross_attention\n\n        self.use_ada_layer_norm_zero = (num_embeds_ada_norm is not None) and norm_type == \"ada_norm_zero\"\n        self.use_ada_layer_norm = (num_embeds_ada_norm is not None) and norm_type == \"ada_norm\"\n\n        if norm_type in (\"ada_norm\", \"ada_norm_zero\") and num_embeds_ada_norm is None:\n            raise ValueError(\n                f\"`norm_type` is set to {norm_type}, but `num_embeds_ada_norm` is not defined. Please make sure to\"\n                f\" define `num_embeds_ada_norm` if setting `norm_type` to {norm_type}.\"\n            )\n\n        # Define 3 blocks. Each block has its own normalization layer.\n        # 1. Self-Attn\n        if self.use_ada_layer_norm:\n            self.norm1 = AdaLayerNorm(dim, num_embeds_ada_norm)\n        elif self.use_ada_layer_norm_zero:\n            self.norm1 = AdaLayerNormZero(dim, num_embeds_ada_norm)\n        else:\n            self.norm1 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine)\n        self.attn1 = Attention(\n            query_dim=dim,\n            heads=num_attention_heads,\n            dim_head=attention_head_dim,\n            dropout=dropout,\n            bias=attention_bias,\n            cross_attention_dim=cross_attention_dim if only_cross_attention else None,\n            upcast_attention=upcast_attention,\n        )\n\n        # 2. Cross-Attn\n        if cross_attention_dim is not None or double_self_attention:\n            # We currently only use AdaLayerNormZero for self attention where there will only be one attention block.\n            # I.e. the number of returned modulation chunks from AdaLayerZero would not make sense if returned during\n            # the second cross attention block.\n            self.norm2 = (\n                AdaLayerNorm(dim, num_embeds_ada_norm)\n                if self.use_ada_layer_norm\n                else nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine)\n            )\n            self.attn2 = Attention(\n                query_dim=dim,\n                cross_attention_dim=cross_attention_dim if not double_self_attention else None,\n                heads=num_attention_heads,\n                dim_head=attention_head_dim,\n                dropout=dropout,\n                bias=attention_bias,\n                upcast_attention=upcast_attention,\n            )  # is self-attn if encoder_hidden_states is none\n        else:\n            self.norm2 = None\n            self.attn2 = None\n\n        # 3. Feed-forward\n        self.norm3 = nn.LayerNorm(dim, elementwise_affine=norm_elementwise_affine)\n        self.ff = FeedForward(dim, dropout=dropout, activation_fn=activation_fn, final_dropout=final_dropout)\n\n    def forward(\n        self,\n        hidden_states,\n        attention_mask=None,\n        encoder_hidden_states=None,\n        encoder_attention_mask=None,\n        timestep=None,\n        cross_attention_kwargs=None,\n        class_labels=None,\n    ):\n        # Notice that normalization is always applied before the real computation in the following blocks.\n        # 1. Self-Attention\n        if self.use_ada_layer_norm:\n            norm_hidden_states = self.norm1(hidden_states, timestep)\n        elif self.use_ada_layer_norm_zero:\n            norm_hidden_states, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.norm1(\n                hidden_states, timestep, class_labels, hidden_dtype=hidden_states.dtype\n            )\n        else:\n            norm_hidden_states = self.norm1(hidden_states)\n\n        cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {}\n        attn_output = self.attn1(\n            norm_hidden_states,\n            encoder_hidden_states=encoder_hidden_states if self.only_cross_attention else None,\n            attention_mask=attention_mask,\n            **cross_attention_kwargs,\n        )\n        if self.use_ada_layer_norm_zero:\n            attn_output = gate_msa.unsqueeze(1) * attn_output\n        hidden_states = attn_output + hidden_states\n\n        # 2. Cross-Attention\n        if self.attn2 is not None:\n            norm_hidden_states = (\n                self.norm2(hidden_states, timestep) if self.use_ada_layer_norm else self.norm2(hidden_states)\n            )\n            # TODO (Birch-San): Here we should prepare the encoder_attention mask correctly\n            # prepare attention mask here\n\n            attn_output = self.attn2(\n                norm_hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                attention_mask=encoder_attention_mask,\n                **cross_attention_kwargs,\n            )\n            hidden_states = attn_output + hidden_states\n\n        # 3. Feed-forward\n        norm_hidden_states = self.norm3(hidden_states)\n\n        if self.use_ada_layer_norm_zero:\n            norm_hidden_states = norm_hidden_states * (1 + scale_mlp[:, None]) + shift_mlp[:, None]\n\n        ff_output = self.ff(norm_hidden_states)\n\n        if self.use_ada_layer_norm_zero:\n            ff_output = gate_mlp.unsqueeze(1) * ff_output\n\n        hidden_states = ff_output + hidden_states\n\n        return hidden_states\n\n\nclass FeedForward(nn.Module):\n    r\"\"\"\n    A feed-forward layer.\n\n    Parameters:\n        dim (`int`): The number of channels in the input.\n        dim_out (`int`, *optional*): The number of channels in the output. If not given, defaults to `dim`.\n        mult (`int`, *optional*, defaults to 4): The multiplier to use for the hidden dimension.\n        dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use.\n        activation_fn (`str`, *optional*, defaults to `\"geglu\"`): Activation function to be used in feed-forward.\n        final_dropout (`bool` *optional*, defaults to False): Apply a final dropout.\n    \"\"\"\n\n    def __init__(\n        self,\n        dim: int,\n        dim_out: Optional[int] = None,\n        mult: int = 4,\n        dropout: float = 0.0,\n        activation_fn: str = \"geglu\",\n        final_dropout: bool = False,\n    ):\n        super().__init__()\n        inner_dim = int(dim * mult)\n        dim_out = dim_out if dim_out is not None else dim\n\n        if activation_fn == \"gelu\":\n            act_fn = GELU(dim, inner_dim)\n        if activation_fn == \"gelu-approximate\":\n            act_fn = GELU(dim, inner_dim, approximate=\"tanh\")\n        elif activation_fn == \"geglu\":\n            act_fn = GEGLU(dim, inner_dim)\n        elif activation_fn == \"geglu-approximate\":\n            act_fn = ApproximateGELU(dim, inner_dim)\n\n        self.net = nn.ModuleList([])\n        # project in\n        self.net.append(act_fn)\n        # project dropout\n        self.net.append(nn.Dropout(dropout))\n        # project out\n        self.net.append(nn.Linear(inner_dim, dim_out))\n        # FF as used in Vision Transformer, MLP-Mixer, etc. have a final dropout\n        if final_dropout:\n            self.net.append(nn.Dropout(dropout))\n\n    def forward(self, hidden_states):\n        for module in self.net:\n            hidden_states = module(hidden_states)\n        return hidden_states\n\n\nclass GELU(nn.Module):\n    r\"\"\"\n    GELU activation function with tanh approximation support with `approximate=\"tanh\"`.\n    \"\"\"\n\n    def __init__(self, dim_in: int, dim_out: int, approximate: str = \"none\"):\n        super().__init__()\n        self.proj = nn.Linear(dim_in, dim_out)\n        self.approximate = approximate\n\n    def gelu(self, gate):\n        if gate.device.type != \"mps\":\n            return F.gelu(gate, approximate=self.approximate)\n        # mps: gelu is not implemented for float16\n        return F.gelu(gate.to(dtype=torch.float32), approximate=self.approximate).to(dtype=gate.dtype)\n\n    def forward(self, hidden_states):\n        hidden_states = self.proj(hidden_states)\n        hidden_states = self.gelu(hidden_states)\n        return hidden_states\n\n\nclass GEGLU(nn.Module):\n    r\"\"\"\n    A variant of the gated linear unit activation function from https://arxiv.org/abs/2002.05202.\n\n    Parameters:\n        dim_in (`int`): The number of channels in the input.\n        dim_out (`int`): The number of channels in the output.\n    \"\"\"\n\n    def __init__(self, dim_in: int, dim_out: int):\n        super().__init__()\n        self.proj = nn.Linear(dim_in, dim_out * 2)\n\n    def gelu(self, gate):\n        if gate.device.type != \"mps\":\n            return F.gelu(gate)\n        # mps: gelu is not implemented for float16\n        return F.gelu(gate.to(dtype=torch.float32)).to(dtype=gate.dtype)\n\n    def forward(self, hidden_states):\n        hidden_states, gate = self.proj(hidden_states).chunk(2, dim=-1)\n        return hidden_states * self.gelu(gate)\n\n\nclass ApproximateGELU(nn.Module):\n    \"\"\"\n    The approximate form of Gaussian Error Linear Unit (GELU)\n\n    For more details, see section 2: https://arxiv.org/abs/1606.08415\n    \"\"\"\n\n    def __init__(self, dim_in: int, dim_out: int):\n        super().__init__()\n        self.proj = nn.Linear(dim_in, dim_out)\n\n    def forward(self, x):\n        x = self.proj(x)\n        return x * torch.sigmoid(1.702 * x)\n\n\nclass AdaLayerNorm(nn.Module):\n    \"\"\"\n    Norm layer modified to incorporate timestep embeddings.\n    \"\"\"\n\n    def __init__(self, embedding_dim, num_embeddings):\n        super().__init__()\n        self.emb = nn.Embedding(num_embeddings, embedding_dim)\n        self.silu = nn.SiLU()\n        self.linear = nn.Linear(embedding_dim, embedding_dim * 2)\n        self.norm = nn.LayerNorm(embedding_dim, elementwise_affine=False)\n\n    def forward(self, x, timestep):\n        emb = self.linear(self.silu(self.emb(timestep)))\n        scale, shift = torch.chunk(emb, 2)\n        x = self.norm(x) * (1 + scale) + shift\n        return x\n\n\nclass AdaLayerNormZero(nn.Module):\n    \"\"\"\n    Norm layer adaptive layer norm zero (adaLN-Zero).\n    \"\"\"\n\n    def __init__(self, embedding_dim, num_embeddings):\n        super().__init__()\n\n        self.emb = CombinedTimestepLabelEmbeddings(num_embeddings, embedding_dim)\n\n        self.silu = nn.SiLU()\n        self.linear = nn.Linear(embedding_dim, 6 * embedding_dim, bias=True)\n        self.norm = nn.LayerNorm(embedding_dim, elementwise_affine=False, eps=1e-6)\n\n    def forward(self, x, timestep, class_labels, hidden_dtype=None):\n        emb = self.linear(self.silu(self.emb(timestep, class_labels, hidden_dtype=hidden_dtype)))\n        shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = emb.chunk(6, dim=1)\n        x = self.norm(x) * (1 + scale_msa[:, None]) + shift_msa[:, None]\n        return x, gate_msa, shift_mlp, scale_mlp, gate_mlp\n\n\nclass AdaGroupNorm(nn.Module):\n    \"\"\"\n    GroupNorm layer modified to incorporate timestep embeddings.\n    \"\"\"\n\n    def __init__(\n        self, embedding_dim: int, out_dim: int, num_groups: int, act_fn: Optional[str] = None, eps: float = 1e-5\n    ):\n        super().__init__()\n        self.num_groups = num_groups\n        self.eps = eps\n        self.act = None\n        if act_fn == \"swish\":\n            self.act = lambda x: F.silu(x)\n        elif act_fn == \"mish\":\n            self.act = nn.Mish()\n        elif act_fn == \"silu\":\n            self.act = nn.SiLU()\n        elif act_fn == \"gelu\":\n            self.act = nn.GELU()\n\n        self.linear = nn.Linear(embedding_dim, out_dim * 2)\n\n    def forward(self, x, emb):\n        if self.act:\n            emb = self.act(emb)\n        emb = self.linear(emb)\n        emb = emb[:, :, None, None]\n        scale, shift = emb.chunk(2, dim=1)\n\n        x = F.group_norm(x, self.num_groups, eps=self.eps)\n        x = x * (1 + scale) + shift\n        return x\n"
  },
  {
    "path": "diffusers/models/attention_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport functools\nimport math\n\nimport flax.linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\ndef _query_chunk_attention(query, key, value, precision, key_chunk_size: int = 4096):\n    \"\"\"Multi-head dot product attention with a limited number of queries.\"\"\"\n    num_kv, num_heads, k_features = key.shape[-3:]\n    v_features = value.shape[-1]\n    key_chunk_size = min(key_chunk_size, num_kv)\n    query = query / jnp.sqrt(k_features)\n\n    @functools.partial(jax.checkpoint, prevent_cse=False)\n    def summarize_chunk(query, key, value):\n        attn_weights = jnp.einsum(\"...qhd,...khd->...qhk\", query, key, precision=precision)\n\n        max_score = jnp.max(attn_weights, axis=-1, keepdims=True)\n        max_score = jax.lax.stop_gradient(max_score)\n        exp_weights = jnp.exp(attn_weights - max_score)\n\n        exp_values = jnp.einsum(\"...vhf,...qhv->...qhf\", value, exp_weights, precision=precision)\n        max_score = jnp.einsum(\"...qhk->...qh\", max_score)\n\n        return (exp_values, exp_weights.sum(axis=-1), max_score)\n\n    def chunk_scanner(chunk_idx):\n        # julienne key array\n        key_chunk = jax.lax.dynamic_slice(\n            operand=key,\n            start_indices=[0] * (key.ndim - 3) + [chunk_idx, 0, 0],  # [...,k,h,d]\n            slice_sizes=list(key.shape[:-3]) + [key_chunk_size, num_heads, k_features],  # [...,k,h,d]\n        )\n\n        # julienne value array\n        value_chunk = jax.lax.dynamic_slice(\n            operand=value,\n            start_indices=[0] * (value.ndim - 3) + [chunk_idx, 0, 0],  # [...,v,h,d]\n            slice_sizes=list(value.shape[:-3]) + [key_chunk_size, num_heads, v_features],  # [...,v,h,d]\n        )\n\n        return summarize_chunk(query, key_chunk, value_chunk)\n\n    chunk_values, chunk_weights, chunk_max = jax.lax.map(f=chunk_scanner, xs=jnp.arange(0, num_kv, key_chunk_size))\n\n    global_max = jnp.max(chunk_max, axis=0, keepdims=True)\n    max_diffs = jnp.exp(chunk_max - global_max)\n\n    chunk_values *= jnp.expand_dims(max_diffs, axis=-1)\n    chunk_weights *= max_diffs\n\n    all_values = chunk_values.sum(axis=0)\n    all_weights = jnp.expand_dims(chunk_weights, -1).sum(axis=0)\n\n    return all_values / all_weights\n\n\ndef jax_memory_efficient_attention(\n    query, key, value, precision=jax.lax.Precision.HIGHEST, query_chunk_size: int = 1024, key_chunk_size: int = 4096\n):\n    r\"\"\"\n    Flax Memory-efficient multi-head dot product attention. https://arxiv.org/abs/2112.05682v2\n    https://github.com/AminRezaei0x443/memory-efficient-attention\n\n    Args:\n        query (`jnp.ndarray`): (batch..., query_length, head, query_key_depth_per_head)\n        key (`jnp.ndarray`): (batch..., key_value_length, head, query_key_depth_per_head)\n        value (`jnp.ndarray`): (batch..., key_value_length, head, value_depth_per_head)\n        precision (`jax.lax.Precision`, *optional*, defaults to `jax.lax.Precision.HIGHEST`):\n            numerical precision for computation\n        query_chunk_size (`int`, *optional*, defaults to 1024):\n            chunk size to divide query array value must divide query_length equally without remainder\n        key_chunk_size (`int`, *optional*, defaults to 4096):\n            chunk size to divide key and value array value must divide key_value_length equally without remainder\n\n    Returns:\n        (`jnp.ndarray`) with shape of (batch..., query_length, head, value_depth_per_head)\n    \"\"\"\n    num_q, num_heads, q_features = query.shape[-3:]\n\n    def chunk_scanner(chunk_idx, _):\n        # julienne query array\n        query_chunk = jax.lax.dynamic_slice(\n            operand=query,\n            start_indices=([0] * (query.ndim - 3)) + [chunk_idx, 0, 0],  # [...,q,h,d]\n            slice_sizes=list(query.shape[:-3]) + [min(query_chunk_size, num_q), num_heads, q_features],  # [...,q,h,d]\n        )\n\n        return (\n            chunk_idx + query_chunk_size,  # unused ignore it\n            _query_chunk_attention(\n                query=query_chunk, key=key, value=value, precision=precision, key_chunk_size=key_chunk_size\n            ),\n        )\n\n    _, res = jax.lax.scan(\n        f=chunk_scanner, init=0, xs=None, length=math.ceil(num_q / query_chunk_size)  # start counter  # stop counter\n    )\n\n    return jnp.concatenate(res, axis=-3)  # fuse the chunked result back\n\n\nclass FlaxAttention(nn.Module):\n    r\"\"\"\n    A Flax multi-head attention module as described in: https://arxiv.org/abs/1706.03762\n\n    Parameters:\n        query_dim (:obj:`int`):\n            Input hidden states dimension\n        heads (:obj:`int`, *optional*, defaults to 8):\n            Number of heads\n        dim_head (:obj:`int`, *optional*, defaults to 64):\n            Hidden states dimension inside each head\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        use_memory_efficient_attention (`bool`, *optional*, defaults to `False`):\n            enable memory efficient attention https://arxiv.org/abs/2112.05682\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n\n    \"\"\"\n    query_dim: int\n    heads: int = 8\n    dim_head: int = 64\n    dropout: float = 0.0\n    use_memory_efficient_attention: bool = False\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        inner_dim = self.dim_head * self.heads\n        self.scale = self.dim_head**-0.5\n\n        # Weights were exported with old names {to_q, to_k, to_v, to_out}\n        self.query = nn.Dense(inner_dim, use_bias=False, dtype=self.dtype, name=\"to_q\")\n        self.key = nn.Dense(inner_dim, use_bias=False, dtype=self.dtype, name=\"to_k\")\n        self.value = nn.Dense(inner_dim, use_bias=False, dtype=self.dtype, name=\"to_v\")\n\n        self.proj_attn = nn.Dense(self.query_dim, dtype=self.dtype, name=\"to_out_0\")\n\n    def reshape_heads_to_batch_dim(self, tensor):\n        batch_size, seq_len, dim = tensor.shape\n        head_size = self.heads\n        tensor = tensor.reshape(batch_size, seq_len, head_size, dim // head_size)\n        tensor = jnp.transpose(tensor, (0, 2, 1, 3))\n        tensor = tensor.reshape(batch_size * head_size, seq_len, dim // head_size)\n        return tensor\n\n    def reshape_batch_dim_to_heads(self, tensor):\n        batch_size, seq_len, dim = tensor.shape\n        head_size = self.heads\n        tensor = tensor.reshape(batch_size // head_size, head_size, seq_len, dim)\n        tensor = jnp.transpose(tensor, (0, 2, 1, 3))\n        tensor = tensor.reshape(batch_size // head_size, seq_len, dim * head_size)\n        return tensor\n\n    def __call__(self, hidden_states, context=None, deterministic=True):\n        context = hidden_states if context is None else context\n\n        query_proj = self.query(hidden_states)\n        key_proj = self.key(context)\n        value_proj = self.value(context)\n\n        query_states = self.reshape_heads_to_batch_dim(query_proj)\n        key_states = self.reshape_heads_to_batch_dim(key_proj)\n        value_states = self.reshape_heads_to_batch_dim(value_proj)\n\n        if self.use_memory_efficient_attention:\n            query_states = query_states.transpose(1, 0, 2)\n            key_states = key_states.transpose(1, 0, 2)\n            value_states = value_states.transpose(1, 0, 2)\n\n            # this if statement create a chunk size for each layer of the unet\n            # the chunk size is equal to the query_length dimension of the deepest layer of the unet\n\n            flatten_latent_dim = query_states.shape[-3]\n            if flatten_latent_dim % 64 == 0:\n                query_chunk_size = int(flatten_latent_dim / 64)\n            elif flatten_latent_dim % 16 == 0:\n                query_chunk_size = int(flatten_latent_dim / 16)\n            elif flatten_latent_dim % 4 == 0:\n                query_chunk_size = int(flatten_latent_dim / 4)\n            else:\n                query_chunk_size = int(flatten_latent_dim)\n\n            hidden_states = jax_memory_efficient_attention(\n                query_states, key_states, value_states, query_chunk_size=query_chunk_size, key_chunk_size=4096 * 4\n            )\n\n            hidden_states = hidden_states.transpose(1, 0, 2)\n        else:\n            # compute attentions\n            attention_scores = jnp.einsum(\"b i d, b j d->b i j\", query_states, key_states)\n            attention_scores = attention_scores * self.scale\n            attention_probs = nn.softmax(attention_scores, axis=2)\n\n            # attend to values\n            hidden_states = jnp.einsum(\"b i j, b j d -> b i d\", attention_probs, value_states)\n\n        hidden_states = self.reshape_batch_dim_to_heads(hidden_states)\n        hidden_states = self.proj_attn(hidden_states)\n        return hidden_states\n\n\nclass FlaxBasicTransformerBlock(nn.Module):\n    r\"\"\"\n    A Flax transformer block layer with `GLU` (Gated Linear Unit) activation function as described in:\n    https://arxiv.org/abs/1706.03762\n\n\n    Parameters:\n        dim (:obj:`int`):\n            Inner hidden states dimension\n        n_heads (:obj:`int`):\n            Number of heads\n        d_head (:obj:`int`):\n            Hidden states dimension inside each head\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        only_cross_attention (`bool`, defaults to `False`):\n            Whether to only apply cross attention.\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n        use_memory_efficient_attention (`bool`, *optional*, defaults to `False`):\n            enable memory efficient attention https://arxiv.org/abs/2112.05682\n    \"\"\"\n    dim: int\n    n_heads: int\n    d_head: int\n    dropout: float = 0.0\n    only_cross_attention: bool = False\n    dtype: jnp.dtype = jnp.float32\n    use_memory_efficient_attention: bool = False\n\n    def setup(self):\n        # self attention (or cross_attention if only_cross_attention is True)\n        self.attn1 = FlaxAttention(\n            self.dim, self.n_heads, self.d_head, self.dropout, self.use_memory_efficient_attention, dtype=self.dtype\n        )\n        # cross attention\n        self.attn2 = FlaxAttention(\n            self.dim, self.n_heads, self.d_head, self.dropout, self.use_memory_efficient_attention, dtype=self.dtype\n        )\n        self.ff = FlaxFeedForward(dim=self.dim, dropout=self.dropout, dtype=self.dtype)\n        self.norm1 = nn.LayerNorm(epsilon=1e-5, dtype=self.dtype)\n        self.norm2 = nn.LayerNorm(epsilon=1e-5, dtype=self.dtype)\n        self.norm3 = nn.LayerNorm(epsilon=1e-5, dtype=self.dtype)\n\n    def __call__(self, hidden_states, context, deterministic=True):\n        # self attention\n        residual = hidden_states\n        if self.only_cross_attention:\n            hidden_states = self.attn1(self.norm1(hidden_states), context, deterministic=deterministic)\n        else:\n            hidden_states = self.attn1(self.norm1(hidden_states), deterministic=deterministic)\n        hidden_states = hidden_states + residual\n\n        # cross attention\n        residual = hidden_states\n        hidden_states = self.attn2(self.norm2(hidden_states), context, deterministic=deterministic)\n        hidden_states = hidden_states + residual\n\n        # feed forward\n        residual = hidden_states\n        hidden_states = self.ff(self.norm3(hidden_states), deterministic=deterministic)\n        hidden_states = hidden_states + residual\n\n        return hidden_states\n\n\nclass FlaxTransformer2DModel(nn.Module):\n    r\"\"\"\n    A Spatial Transformer layer with Gated Linear Unit (GLU) activation function as described in:\n    https://arxiv.org/pdf/1506.02025.pdf\n\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input number of channels\n        n_heads (:obj:`int`):\n            Number of heads\n        d_head (:obj:`int`):\n            Hidden states dimension inside each head\n        depth (:obj:`int`, *optional*, defaults to 1):\n            Number of transformers block\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        use_linear_projection (`bool`, defaults to `False`): tbd\n        only_cross_attention (`bool`, defaults to `False`): tbd\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n        use_memory_efficient_attention (`bool`, *optional*, defaults to `False`):\n            enable memory efficient attention https://arxiv.org/abs/2112.05682\n    \"\"\"\n    in_channels: int\n    n_heads: int\n    d_head: int\n    depth: int = 1\n    dropout: float = 0.0\n    use_linear_projection: bool = False\n    only_cross_attention: bool = False\n    dtype: jnp.dtype = jnp.float32\n    use_memory_efficient_attention: bool = False\n\n    def setup(self):\n        self.norm = nn.GroupNorm(num_groups=32, epsilon=1e-5)\n\n        inner_dim = self.n_heads * self.d_head\n        if self.use_linear_projection:\n            self.proj_in = nn.Dense(inner_dim, dtype=self.dtype)\n        else:\n            self.proj_in = nn.Conv(\n                inner_dim,\n                kernel_size=(1, 1),\n                strides=(1, 1),\n                padding=\"VALID\",\n                dtype=self.dtype,\n            )\n\n        self.transformer_blocks = [\n            FlaxBasicTransformerBlock(\n                inner_dim,\n                self.n_heads,\n                self.d_head,\n                dropout=self.dropout,\n                only_cross_attention=self.only_cross_attention,\n                dtype=self.dtype,\n                use_memory_efficient_attention=self.use_memory_efficient_attention,\n            )\n            for _ in range(self.depth)\n        ]\n\n        if self.use_linear_projection:\n            self.proj_out = nn.Dense(inner_dim, dtype=self.dtype)\n        else:\n            self.proj_out = nn.Conv(\n                inner_dim,\n                kernel_size=(1, 1),\n                strides=(1, 1),\n                padding=\"VALID\",\n                dtype=self.dtype,\n            )\n\n    def __call__(self, hidden_states, context, deterministic=True):\n        batch, height, width, channels = hidden_states.shape\n        residual = hidden_states\n        hidden_states = self.norm(hidden_states)\n        if self.use_linear_projection:\n            hidden_states = hidden_states.reshape(batch, height * width, channels)\n            hidden_states = self.proj_in(hidden_states)\n        else:\n            hidden_states = self.proj_in(hidden_states)\n            hidden_states = hidden_states.reshape(batch, height * width, channels)\n\n        for transformer_block in self.transformer_blocks:\n            hidden_states = transformer_block(hidden_states, context, deterministic=deterministic)\n\n        if self.use_linear_projection:\n            hidden_states = self.proj_out(hidden_states)\n            hidden_states = hidden_states.reshape(batch, height, width, channels)\n        else:\n            hidden_states = hidden_states.reshape(batch, height, width, channels)\n            hidden_states = self.proj_out(hidden_states)\n\n        hidden_states = hidden_states + residual\n        return hidden_states\n\n\nclass FlaxFeedForward(nn.Module):\n    r\"\"\"\n    Flax module that encapsulates two Linear layers separated by a non-linearity. It is the counterpart of PyTorch's\n    [`FeedForward`] class, with the following simplifications:\n    - The activation function is currently hardcoded to a gated linear unit from:\n    https://arxiv.org/abs/2002.05202\n    - `dim_out` is equal to `dim`.\n    - The number of hidden dimensions is hardcoded to `dim * 4` in [`FlaxGELU`].\n\n    Parameters:\n        dim (:obj:`int`):\n            Inner hidden states dimension\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    dim: int\n    dropout: float = 0.0\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        # The second linear layer needs to be called\n        # net_2 for now to match the index of the Sequential layer\n        self.net_0 = FlaxGEGLU(self.dim, self.dropout, self.dtype)\n        self.net_2 = nn.Dense(self.dim, dtype=self.dtype)\n\n    def __call__(self, hidden_states, deterministic=True):\n        hidden_states = self.net_0(hidden_states)\n        hidden_states = self.net_2(hidden_states)\n        return hidden_states\n\n\nclass FlaxGEGLU(nn.Module):\n    r\"\"\"\n    Flax implementation of a Linear layer followed by the variant of the gated linear unit activation function from\n    https://arxiv.org/abs/2002.05202.\n\n    Parameters:\n        dim (:obj:`int`):\n            Input hidden states dimension\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    dim: int\n    dropout: float = 0.0\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        inner_dim = self.dim * 4\n        self.proj = nn.Dense(inner_dim * 2, dtype=self.dtype)\n\n    def __call__(self, hidden_states, deterministic=True):\n        hidden_states = self.proj(hidden_states)\n        hidden_linear, hidden_gelu = jnp.split(hidden_states, 2, axis=2)\n        return hidden_linear * nn.gelu(hidden_gelu)\n"
  },
  {
    "path": "diffusers/models/attention_processor.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport warnings\nfrom typing import Callable, Optional, Union\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom ..utils import deprecate, logging, maybe_allow_in_graph\nfrom ..utils.import_utils import is_xformers_available\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nif is_xformers_available():\n    import xformers\n    import xformers.ops\nelse:\n    xformers = None\n\n\n@maybe_allow_in_graph\nclass Attention(nn.Module):\n    r\"\"\"\n    A cross attention layer.\n\n    Parameters:\n        query_dim (`int`): The number of channels in the query.\n        cross_attention_dim (`int`, *optional*):\n            The number of channels in the encoder_hidden_states. If not given, defaults to `query_dim`.\n        heads (`int`,  *optional*, defaults to 8): The number of heads to use for multi-head attention.\n        dim_head (`int`,  *optional*, defaults to 64): The number of channels in each head.\n        dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use.\n        bias (`bool`, *optional*, defaults to False):\n            Set to `True` for the query, key, and value linear layers to contain a bias parameter.\n    \"\"\"\n\n    def __init__(\n        self,\n        query_dim: int,\n        cross_attention_dim: Optional[int] = None,\n        heads: int = 8,\n        dim_head: int = 64,\n        dropout: float = 0.0,\n        bias=False,\n        upcast_attention: bool = False,\n        upcast_softmax: bool = False,\n        cross_attention_norm: Optional[str] = None,\n        cross_attention_norm_num_groups: int = 32,\n        added_kv_proj_dim: Optional[int] = None,\n        norm_num_groups: Optional[int] = None,\n        out_bias: bool = True,\n        scale_qk: bool = True,\n        only_cross_attention: bool = False,\n        eps: float = 1e-5,\n        rescale_output_factor: float = 1.0,\n        residual_connection: bool = False,\n        _from_deprecated_attn_block=False,\n        processor: Optional[\"AttnProcessor\"] = None,\n    ):\n        super().__init__()\n        inner_dim = dim_head * heads\n        cross_attention_dim = cross_attention_dim if cross_attention_dim is not None else query_dim\n        self.upcast_attention = upcast_attention\n        self.upcast_softmax = upcast_softmax\n        self.rescale_output_factor = rescale_output_factor\n        self.residual_connection = residual_connection\n\n        # we make use of this private variable to know whether this class is loaded\n        # with an deprecated state dict so that we can convert it on the fly\n        self._from_deprecated_attn_block = _from_deprecated_attn_block\n\n        self.scale_qk = scale_qk\n        self.scale = dim_head**-0.5 if self.scale_qk else 1.0\n\n        self.heads = heads\n        # for slice_size > 0 the attention score computation\n        # is split across the batch axis to save memory\n        # You can set slice_size with `set_attention_slice`\n        self.sliceable_head_dim = heads\n\n        self.added_kv_proj_dim = added_kv_proj_dim\n        self.only_cross_attention = only_cross_attention\n\n        if self.added_kv_proj_dim is None and self.only_cross_attention:\n            raise ValueError(\n                \"`only_cross_attention` can only be set to True if `added_kv_proj_dim` is not None. Make sure to set either `only_cross_attention=False` or define `added_kv_proj_dim`.\"\n            )\n\n        if norm_num_groups is not None:\n            self.group_norm = nn.GroupNorm(num_channels=query_dim, num_groups=norm_num_groups, eps=eps, affine=True)\n        else:\n            self.group_norm = None\n\n        if cross_attention_norm is None:\n            self.norm_cross = None\n        elif cross_attention_norm == \"layer_norm\":\n            self.norm_cross = nn.LayerNorm(cross_attention_dim)\n        elif cross_attention_norm == \"group_norm\":\n            if self.added_kv_proj_dim is not None:\n                # The given `encoder_hidden_states` are initially of shape\n                # (batch_size, seq_len, added_kv_proj_dim) before being projected\n                # to (batch_size, seq_len, cross_attention_dim). The norm is applied\n                # before the projection, so we need to use `added_kv_proj_dim` as\n                # the number of channels for the group norm.\n                norm_cross_num_channels = added_kv_proj_dim\n            else:\n                norm_cross_num_channels = cross_attention_dim\n\n            self.norm_cross = nn.GroupNorm(\n                num_channels=norm_cross_num_channels, num_groups=cross_attention_norm_num_groups, eps=1e-5, affine=True\n            )\n        else:\n            raise ValueError(\n                f\"unknown cross_attention_norm: {cross_attention_norm}. Should be None, 'layer_norm' or 'group_norm'\"\n            )\n\n        self.to_q = nn.Linear(query_dim, inner_dim, bias=bias)\n\n        if not self.only_cross_attention:\n            # only relevant for the `AddedKVProcessor` classes\n            self.to_k = nn.Linear(cross_attention_dim, inner_dim, bias=bias)\n            self.to_v = nn.Linear(cross_attention_dim, inner_dim, bias=bias)\n        else:\n            self.to_k = None\n            self.to_v = None\n\n        if self.added_kv_proj_dim is not None:\n            self.add_k_proj = nn.Linear(added_kv_proj_dim, inner_dim)\n            self.add_v_proj = nn.Linear(added_kv_proj_dim, inner_dim)\n\n        self.to_out = nn.ModuleList([])\n        self.to_out.append(nn.Linear(inner_dim, query_dim, bias=out_bias))\n        self.to_out.append(nn.Dropout(dropout))\n\n        # set attention processor\n        # We use the AttnProcessor2_0 by default when torch 2.x is used which uses\n        # torch.nn.functional.scaled_dot_product_attention for native Flash/memory_efficient_attention\n        # but only if it has the default `scale` argument. TODO remove scale_qk check when we move to torch 2.1\n        if processor is None:\n            processor = (\n                AttnProcessor2_0() if hasattr(F, \"scaled_dot_product_attention\") and self.scale_qk else AttnProcessor()\n            )\n        self.set_processor(processor)\n\n    def set_use_memory_efficient_attention_xformers(\n        self, use_memory_efficient_attention_xformers: bool, attention_op: Optional[Callable] = None\n    ):\n        is_lora = hasattr(self, \"processor\") and isinstance(\n            self.processor, (LoRAAttnProcessor, LoRAXFormersAttnProcessor)\n        )\n        is_custom_diffusion = hasattr(self, \"processor\") and isinstance(\n            self.processor, (CustomDiffusionAttnProcessor, CustomDiffusionXFormersAttnProcessor)\n        )\n\n        if use_memory_efficient_attention_xformers:\n            if self.added_kv_proj_dim is not None:\n                # TODO(Anton, Patrick, Suraj, William) - currently xformers doesn't work for UnCLIP\n                # which uses this type of cross attention ONLY because the attention mask of format\n                # [0, ..., -10.000, ..., 0, ...,] is not supported\n                raise NotImplementedError(\n                    \"Memory efficient attention with `xformers` is currently not supported when\"\n                    \" `self.added_kv_proj_dim` is defined.\"\n                )\n            elif not is_xformers_available():\n                raise ModuleNotFoundError(\n                    (\n                        \"Refer to https://github.com/facebookresearch/xformers for more information on how to install\"\n                        \" xformers\"\n                    ),\n                    name=\"xformers\",\n                )\n            elif not torch.cuda.is_available():\n                raise ValueError(\n                    \"torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is\"\n                    \" only available for GPU \"\n                )\n            elif hasattr(F, \"scaled_dot_product_attention\") and self.scale_qk:\n                warnings.warn(\n                    \"You have specified using flash attention using xFormers but you have PyTorch 2.0 already installed. \"\n                    \"We will default to PyTorch's native efficient flash attention implementation provided by PyTorch 2.0.\"\n                )\n            else:\n                try:\n                    # Make sure we can run the memory efficient attention\n                    _ = xformers.ops.memory_efficient_attention(\n                        torch.randn((1, 2, 40), device=\"cuda\"),\n                        torch.randn((1, 2, 40), device=\"cuda\"),\n                        torch.randn((1, 2, 40), device=\"cuda\"),\n                    )\n                except Exception as e:\n                    raise e\n\n            if is_lora:\n                processor = LoRAXFormersAttnProcessor(\n                    hidden_size=self.processor.hidden_size,\n                    cross_attention_dim=self.processor.cross_attention_dim,\n                    rank=self.processor.rank,\n                    attention_op=attention_op,\n                )\n                processor.load_state_dict(self.processor.state_dict())\n                processor.to(self.processor.to_q_lora.up.weight.device)\n            elif is_custom_diffusion:\n                processor = CustomDiffusionXFormersAttnProcessor(\n                    train_kv=self.processor.train_kv,\n                    train_q_out=self.processor.train_q_out,\n                    hidden_size=self.processor.hidden_size,\n                    cross_attention_dim=self.processor.cross_attention_dim,\n                    attention_op=attention_op,\n                )\n                processor.load_state_dict(self.processor.state_dict())\n                if hasattr(self.processor, \"to_k_custom_diffusion\"):\n                    processor.to(self.processor.to_k_custom_diffusion.weight.device)\n            else:\n                processor = XFormersAttnProcessor(attention_op=attention_op)\n        else:\n            if is_lora:\n                processor = LoRAAttnProcessor(\n                    hidden_size=self.processor.hidden_size,\n                    cross_attention_dim=self.processor.cross_attention_dim,\n                    rank=self.processor.rank,\n                )\n                processor.load_state_dict(self.processor.state_dict())\n                processor.to(self.processor.to_q_lora.up.weight.device)\n            elif is_custom_diffusion:\n                processor = CustomDiffusionAttnProcessor(\n                    train_kv=self.processor.train_kv,\n                    train_q_out=self.processor.train_q_out,\n                    hidden_size=self.processor.hidden_size,\n                    cross_attention_dim=self.processor.cross_attention_dim,\n                )\n                processor.load_state_dict(self.processor.state_dict())\n                if hasattr(self.processor, \"to_k_custom_diffusion\"):\n                    processor.to(self.processor.to_k_custom_diffusion.weight.device)\n            else:\n                # set attention processor\n                # We use the AttnProcessor2_0 by default when torch 2.x is used which uses\n                # torch.nn.functional.scaled_dot_product_attention for native Flash/memory_efficient_attention\n                # but only if it has the default `scale` argument. TODO remove scale_qk check when we move to torch 2.1\n                processor = (\n                    AttnProcessor2_0()\n                    if hasattr(F, \"scaled_dot_product_attention\") and self.scale_qk\n                    else AttnProcessor()\n                )\n\n        self.set_processor(processor)\n\n    def set_attention_slice(self, slice_size):\n        if slice_size is not None and slice_size > self.sliceable_head_dim:\n            raise ValueError(f\"slice_size {slice_size} has to be smaller or equal to {self.sliceable_head_dim}.\")\n\n        if slice_size is not None and self.added_kv_proj_dim is not None:\n            processor = SlicedAttnAddedKVProcessor(slice_size)\n        elif slice_size is not None:\n            processor = SlicedAttnProcessor(slice_size)\n        elif self.added_kv_proj_dim is not None:\n            processor = AttnAddedKVProcessor()\n        else:\n            # set attention processor\n            # We use the AttnProcessor2_0 by default when torch 2.x is used which uses\n            # torch.nn.functional.scaled_dot_product_attention for native Flash/memory_efficient_attention\n            # but only if it has the default `scale` argument. TODO remove scale_qk check when we move to torch 2.1\n            processor = (\n                AttnProcessor2_0() if hasattr(F, \"scaled_dot_product_attention\") and self.scale_qk else AttnProcessor()\n            )\n\n        self.set_processor(processor)\n\n    def set_processor(self, processor: \"AttnProcessor\"):\n        # if current processor is in `self._modules` and if passed `processor` is not, we need to\n        # pop `processor` from `self._modules`\n        if (\n            hasattr(self, \"processor\")\n            and isinstance(self.processor, torch.nn.Module)\n            and not isinstance(processor, torch.nn.Module)\n        ):\n            logger.info(f\"You are removing possibly trained weights of {self.processor} with {processor}\")\n            self._modules.pop(\"processor\")\n\n        self.processor = processor\n\n    def forward(self, hidden_states, encoder_hidden_states=None, attention_mask=None, **cross_attention_kwargs):\n        # The `Attention` class can call different attention processors / attention functions\n        # here we simply pass along all tensors to the selected processor class\n        # For standard processors that are defined here, `**cross_attention_kwargs` is empty\n        return self.processor(\n            self,\n            hidden_states,\n            encoder_hidden_states=encoder_hidden_states,\n            attention_mask=attention_mask,\n            **cross_attention_kwargs,\n        )\n\n    def batch_to_head_dim(self, tensor):\n        head_size = self.heads\n        batch_size, seq_len, dim = tensor.shape\n        tensor = tensor.reshape(batch_size // head_size, head_size, seq_len, dim)\n        tensor = tensor.permute(0, 2, 1, 3).reshape(batch_size // head_size, seq_len, dim * head_size)\n        return tensor\n\n    def head_to_batch_dim(self, tensor, out_dim=3):\n        head_size = self.heads\n        batch_size, seq_len, dim = tensor.shape\n        tensor = tensor.reshape(batch_size, seq_len, head_size, dim // head_size)\n        tensor = tensor.permute(0, 2, 1, 3)\n\n        if out_dim == 3:\n            tensor = tensor.reshape(batch_size * head_size, seq_len, dim // head_size)\n\n        return tensor\n\n    def get_attention_scores(self, query, key, attention_mask=None):\n        dtype = query.dtype\n        if self.upcast_attention:\n            query = query.float()\n            key = key.float()\n\n        if attention_mask is None:\n            baddbmm_input = torch.empty(\n                query.shape[0], query.shape[1], key.shape[1], dtype=query.dtype, device=query.device\n            )\n            beta = 0\n        else:\n            baddbmm_input = attention_mask\n            beta = 1\n\n        attention_scores = torch.baddbmm(\n            baddbmm_input,\n            query,\n            key.transpose(-1, -2),\n            beta=beta,\n            alpha=self.scale,\n        )\n        del baddbmm_input\n\n        if self.upcast_softmax:\n            attention_scores = attention_scores.float()\n\n        attention_probs = attention_scores.softmax(dim=-1)\n        del attention_scores\n\n        attention_probs = attention_probs.to(dtype)\n\n        return attention_probs\n\n    def prepare_attention_mask(self, attention_mask, target_length, batch_size=None, out_dim=3):\n        if batch_size is None:\n            deprecate(\n                \"batch_size=None\",\n                \"0.0.15\",\n                (\n                    \"Not passing the `batch_size` parameter to `prepare_attention_mask` can lead to incorrect\"\n                    \" attention mask preparation and is deprecated behavior. Please make sure to pass `batch_size` to\"\n                    \" `prepare_attention_mask` when preparing the attention_mask.\"\n                ),\n            )\n            batch_size = 1\n\n        head_size = self.heads\n        if attention_mask is None:\n            return attention_mask\n\n        if attention_mask.shape[-1] != target_length:\n            if attention_mask.device.type == \"mps\":\n                # HACK: MPS: Does not support padding by greater than dimension of input tensor.\n                # Instead, we can manually construct the padding tensor.\n                padding_shape = (attention_mask.shape[0], attention_mask.shape[1], target_length)\n                padding = torch.zeros(padding_shape, dtype=attention_mask.dtype, device=attention_mask.device)\n                attention_mask = torch.cat([attention_mask, padding], dim=2)\n            else:\n                attention_mask = F.pad(attention_mask, (0, target_length), value=0.0)\n\n        if out_dim == 3:\n            if attention_mask.shape[0] < batch_size * head_size:\n                attention_mask = attention_mask.repeat_interleave(head_size, dim=0)\n        elif out_dim == 4:\n            attention_mask = attention_mask.unsqueeze(1)\n            attention_mask = attention_mask.repeat_interleave(head_size, dim=1)\n\n        return attention_mask\n\n    def norm_encoder_hidden_states(self, encoder_hidden_states):\n        assert self.norm_cross is not None, \"self.norm_cross must be defined to call self.norm_encoder_hidden_states\"\n\n        if isinstance(self.norm_cross, nn.LayerNorm):\n            encoder_hidden_states = self.norm_cross(encoder_hidden_states)\n        elif isinstance(self.norm_cross, nn.GroupNorm):\n            # Group norm norms along the channels dimension and expects\n            # input to be in the shape of (N, C, *). In this case, we want\n            # to norm along the hidden dimension, so we need to move\n            # (batch_size, sequence_length, hidden_size) ->\n            # (batch_size, hidden_size, sequence_length)\n            encoder_hidden_states = encoder_hidden_states.transpose(1, 2)\n            encoder_hidden_states = self.norm_cross(encoder_hidden_states)\n            encoder_hidden_states = encoder_hidden_states.transpose(1, 2)\n        else:\n            assert False\n\n        return encoder_hidden_states\n\n\nclass AttnProcessor:\n    def __call__(\n        self,\n        attn: Attention,\n        hidden_states,\n        encoder_hidden_states=None,\n        attention_mask=None,\n    ):\n        residual = hidden_states\n\n        input_ndim = hidden_states.ndim\n\n        if input_ndim == 4:\n            batch_size, channel, height, width = hidden_states.shape\n            hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2)\n\n        batch_size, sequence_length, _ = (\n            hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape\n        )\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if attn.group_norm is not None:\n            hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n\n        query = attn.head_to_batch_dim(query)\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        if input_ndim == 4:\n            hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width)\n\n        if attn.residual_connection:\n            hidden_states = hidden_states + residual\n\n        hidden_states = hidden_states / attn.rescale_output_factor\n\n        return hidden_states\n\n\nclass LoRALinearLayer(nn.Module):\n    def __init__(self, in_features, out_features, rank=4):\n        super().__init__()\n\n        if rank > min(in_features, out_features):\n            raise ValueError(f\"LoRA rank {rank} must be less or equal than {min(in_features, out_features)}\")\n\n        self.down = nn.Linear(in_features, rank, bias=False)\n        self.up = nn.Linear(rank, out_features, bias=False)\n\n        nn.init.normal_(self.down.weight, std=1 / rank)\n        nn.init.zeros_(self.up.weight)\n\n    def forward(self, hidden_states):\n        orig_dtype = hidden_states.dtype\n        dtype = self.down.weight.dtype\n\n        down_hidden_states = self.down(hidden_states.to(dtype))\n        up_hidden_states = self.up(down_hidden_states)\n\n        return up_hidden_states.to(orig_dtype)\n\n\nclass LoRAAttnProcessor(nn.Module):\n    def __init__(self, hidden_size, cross_attention_dim=None, rank=4):\n        super().__init__()\n\n        self.hidden_size = hidden_size\n        self.cross_attention_dim = cross_attention_dim\n        self.rank = rank\n\n        self.to_q_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n        self.to_k_lora = LoRALinearLayer(cross_attention_dim or hidden_size, hidden_size, rank)\n        self.to_v_lora = LoRALinearLayer(cross_attention_dim or hidden_size, hidden_size, rank)\n        self.to_out_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None, scale=1.0):\n        residual = hidden_states\n\n        input_ndim = hidden_states.ndim\n\n        if input_ndim == 4:\n            batch_size, channel, height, width = hidden_states.shape\n            hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2)\n\n        batch_size, sequence_length, _ = (\n            hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape\n        )\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if attn.group_norm is not None:\n            hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states) + scale * self.to_q_lora(hidden_states)\n        query = attn.head_to_batch_dim(query)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states) + scale * self.to_k_lora(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states) + scale * self.to_v_lora(encoder_hidden_states)\n\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states) + scale * self.to_out_lora(hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        if input_ndim == 4:\n            hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width)\n\n        if attn.residual_connection:\n            hidden_states = hidden_states + residual\n\n        hidden_states = hidden_states / attn.rescale_output_factor\n\n        return hidden_states\n\n\nclass CustomDiffusionAttnProcessor(nn.Module):\n    def __init__(\n        self,\n        train_kv=True,\n        train_q_out=True,\n        hidden_size=None,\n        cross_attention_dim=None,\n        out_bias=True,\n        dropout=0.0,\n    ):\n        super().__init__()\n        self.train_kv = train_kv\n        self.train_q_out = train_q_out\n\n        self.hidden_size = hidden_size\n        self.cross_attention_dim = cross_attention_dim\n\n        # `_custom_diffusion` id for easy serialization and loading.\n        if self.train_kv:\n            self.to_k_custom_diffusion = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False)\n            self.to_v_custom_diffusion = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False)\n        if self.train_q_out:\n            self.to_q_custom_diffusion = nn.Linear(hidden_size, hidden_size, bias=False)\n            self.to_out_custom_diffusion = nn.ModuleList([])\n            self.to_out_custom_diffusion.append(nn.Linear(hidden_size, hidden_size, bias=out_bias))\n            self.to_out_custom_diffusion.append(nn.Dropout(dropout))\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        batch_size, sequence_length, _ = hidden_states.shape\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n        if self.train_q_out:\n            query = self.to_q_custom_diffusion(hidden_states)\n        else:\n            query = attn.to_q(hidden_states)\n\n        if encoder_hidden_states is None:\n            crossattn = False\n            encoder_hidden_states = hidden_states\n        else:\n            crossattn = True\n            if attn.norm_cross:\n                encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        if self.train_kv:\n            key = self.to_k_custom_diffusion(encoder_hidden_states)\n            value = self.to_v_custom_diffusion(encoder_hidden_states)\n        else:\n            key = attn.to_k(encoder_hidden_states)\n            value = attn.to_v(encoder_hidden_states)\n\n        if crossattn:\n            detach = torch.ones_like(key)\n            detach[:, :1, :] = detach[:, :1, :] * 0.0\n            key = detach * key + (1 - detach) * key.detach()\n            value = detach * value + (1 - detach) * value.detach()\n\n        query = attn.head_to_batch_dim(query)\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        if self.train_q_out:\n            # linear proj\n            hidden_states = self.to_out_custom_diffusion[0](hidden_states)\n            # dropout\n            hidden_states = self.to_out_custom_diffusion[1](hidden_states)\n        else:\n            # linear proj\n            hidden_states = attn.to_out[0](hidden_states)\n            # dropout\n            hidden_states = attn.to_out[1](hidden_states)\n\n        return hidden_states\n\n\nclass AttnAddedKVProcessor:\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        residual = hidden_states\n        hidden_states = hidden_states.view(hidden_states.shape[0], hidden_states.shape[1], -1).transpose(1, 2)\n        batch_size, sequence_length, _ = hidden_states.shape\n\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states)\n        query = attn.head_to_batch_dim(query)\n\n        encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states)\n        encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states)\n        encoder_hidden_states_key_proj = attn.head_to_batch_dim(encoder_hidden_states_key_proj)\n        encoder_hidden_states_value_proj = attn.head_to_batch_dim(encoder_hidden_states_value_proj)\n\n        if not attn.only_cross_attention:\n            key = attn.to_k(hidden_states)\n            value = attn.to_v(hidden_states)\n            key = attn.head_to_batch_dim(key)\n            value = attn.head_to_batch_dim(value)\n            key = torch.cat([encoder_hidden_states_key_proj, key], dim=1)\n            value = torch.cat([encoder_hidden_states_value_proj, value], dim=1)\n        else:\n            key = encoder_hidden_states_key_proj\n            value = encoder_hidden_states_value_proj\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        hidden_states = hidden_states.transpose(-1, -2).reshape(residual.shape)\n        hidden_states = hidden_states + residual\n\n        return hidden_states\n\n\nclass AttnAddedKVProcessor2_0:\n    def __init__(self):\n        if not hasattr(F, \"scaled_dot_product_attention\"):\n            raise ImportError(\n                \"AttnAddedKVProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.\"\n            )\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        residual = hidden_states\n        hidden_states = hidden_states.view(hidden_states.shape[0], hidden_states.shape[1], -1).transpose(1, 2)\n        batch_size, sequence_length, _ = hidden_states.shape\n\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size, out_dim=4)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states)\n        query = attn.head_to_batch_dim(query, out_dim=4)\n\n        encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states)\n        encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states)\n        encoder_hidden_states_key_proj = attn.head_to_batch_dim(encoder_hidden_states_key_proj, out_dim=4)\n        encoder_hidden_states_value_proj = attn.head_to_batch_dim(encoder_hidden_states_value_proj, out_dim=4)\n\n        if not attn.only_cross_attention:\n            key = attn.to_k(hidden_states)\n            value = attn.to_v(hidden_states)\n            key = attn.head_to_batch_dim(key, out_dim=4)\n            value = attn.head_to_batch_dim(value, out_dim=4)\n            key = torch.cat([encoder_hidden_states_key_proj, key], dim=2)\n            value = torch.cat([encoder_hidden_states_value_proj, value], dim=2)\n        else:\n            key = encoder_hidden_states_key_proj\n            value = encoder_hidden_states_value_proj\n\n        # the output of sdp = (batch, num_heads, seq_len, head_dim)\n        # TODO: add support for attn.scale when we move to Torch 2.1\n        hidden_states = F.scaled_dot_product_attention(\n            query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False\n        )\n        hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, residual.shape[1])\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        hidden_states = hidden_states.transpose(-1, -2).reshape(residual.shape)\n        hidden_states = hidden_states + residual\n\n        return hidden_states\n\n\nclass LoRAAttnAddedKVProcessor(nn.Module):\n    def __init__(self, hidden_size, cross_attention_dim=None, rank=4):\n        super().__init__()\n\n        self.hidden_size = hidden_size\n        self.cross_attention_dim = cross_attention_dim\n        self.rank = rank\n\n        self.to_q_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n        self.add_k_proj_lora = LoRALinearLayer(cross_attention_dim or hidden_size, hidden_size, rank)\n        self.add_v_proj_lora = LoRALinearLayer(cross_attention_dim or hidden_size, hidden_size, rank)\n        self.to_k_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n        self.to_v_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n        self.to_out_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None, scale=1.0):\n        residual = hidden_states\n        hidden_states = hidden_states.view(hidden_states.shape[0], hidden_states.shape[1], -1).transpose(1, 2)\n        batch_size, sequence_length, _ = hidden_states.shape\n\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states) + scale * self.to_q_lora(hidden_states)\n        query = attn.head_to_batch_dim(query)\n\n        encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states) + scale * self.add_k_proj_lora(\n            encoder_hidden_states\n        )\n        encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states) + scale * self.add_v_proj_lora(\n            encoder_hidden_states\n        )\n        encoder_hidden_states_key_proj = attn.head_to_batch_dim(encoder_hidden_states_key_proj)\n        encoder_hidden_states_value_proj = attn.head_to_batch_dim(encoder_hidden_states_value_proj)\n\n        if not attn.only_cross_attention:\n            key = attn.to_k(hidden_states) + scale * self.to_k_lora(hidden_states)\n            value = attn.to_v(hidden_states) + scale * self.to_v_lora(hidden_states)\n            key = attn.head_to_batch_dim(key)\n            value = attn.head_to_batch_dim(value)\n            key = torch.cat([encoder_hidden_states_key_proj, key], dim=1)\n            value = torch.cat([encoder_hidden_states_value_proj, value], dim=1)\n        else:\n            key = encoder_hidden_states_key_proj\n            value = encoder_hidden_states_value_proj\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states) + scale * self.to_out_lora(hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        hidden_states = hidden_states.transpose(-1, -2).reshape(residual.shape)\n        hidden_states = hidden_states + residual\n\n        return hidden_states\n\n\nclass XFormersAttnProcessor:\n    def __init__(self, attention_op: Optional[Callable] = None):\n        self.attention_op = attention_op\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        residual = hidden_states\n\n        input_ndim = hidden_states.ndim\n\n        if input_ndim == 4:\n            batch_size, channel, height, width = hidden_states.shape\n            hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2)\n\n        batch_size, sequence_length, _ = (\n            hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape\n        )\n\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if attn.group_norm is not None:\n            hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n\n        query = attn.head_to_batch_dim(query).contiguous()\n        key = attn.head_to_batch_dim(key).contiguous()\n        value = attn.head_to_batch_dim(value).contiguous()\n\n        hidden_states = xformers.ops.memory_efficient_attention(\n            query, key, value, attn_bias=attention_mask, op=self.attention_op, scale=attn.scale\n        )\n        hidden_states = hidden_states.to(query.dtype)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        if input_ndim == 4:\n            hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width)\n\n        if attn.residual_connection:\n            hidden_states = hidden_states + residual\n\n        hidden_states = hidden_states / attn.rescale_output_factor\n\n        return hidden_states\n\n\nclass AttnProcessor2_0:\n    def __init__(self):\n        if not hasattr(F, \"scaled_dot_product_attention\"):\n            raise ImportError(\"AttnProcessor2_0 requires PyTorch 2.0, to use it, please upgrade PyTorch to 2.0.\")\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        residual = hidden_states\n\n        input_ndim = hidden_states.ndim\n\n        if input_ndim == 4:\n            batch_size, channel, height, width = hidden_states.shape\n            hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2)\n\n        batch_size, sequence_length, _ = (\n            hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape\n        )\n        inner_dim = hidden_states.shape[-1]\n\n        if attention_mask is not None:\n            attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n            # scaled_dot_product_attention expects attention_mask shape to be\n            # (batch, heads, source_length, target_length)\n            attention_mask = attention_mask.view(batch_size, attn.heads, -1, attention_mask.shape[-1])\n\n        if attn.group_norm is not None:\n            hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n\n        head_dim = inner_dim // attn.heads\n        query = query.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)\n        key = key.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)\n        value = value.view(batch_size, -1, attn.heads, head_dim).transpose(1, 2)\n\n        # the output of sdp = (batch, num_heads, seq_len, head_dim)\n        # TODO: add support for attn.scale when we move to Torch 2.1\n        hidden_states = F.scaled_dot_product_attention(\n            query, key, value, attn_mask=attention_mask, dropout_p=0.0, is_causal=False\n        )\n\n        hidden_states = hidden_states.transpose(1, 2).reshape(batch_size, -1, attn.heads * head_dim)\n        hidden_states = hidden_states.to(query.dtype)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        if input_ndim == 4:\n            hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width)\n\n        if attn.residual_connection:\n            hidden_states = hidden_states + residual\n\n        hidden_states = hidden_states / attn.rescale_output_factor\n\n        return hidden_states\n\n\nclass LoRAXFormersAttnProcessor(nn.Module):\n    def __init__(self, hidden_size, cross_attention_dim, rank=4, attention_op: Optional[Callable] = None):\n        super().__init__()\n\n        self.hidden_size = hidden_size\n        self.cross_attention_dim = cross_attention_dim\n        self.rank = rank\n        self.attention_op = attention_op\n\n        self.to_q_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n        self.to_k_lora = LoRALinearLayer(cross_attention_dim or hidden_size, hidden_size, rank)\n        self.to_v_lora = LoRALinearLayer(cross_attention_dim or hidden_size, hidden_size, rank)\n        self.to_out_lora = LoRALinearLayer(hidden_size, hidden_size, rank)\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None, scale=1.0):\n        residual = hidden_states\n\n        input_ndim = hidden_states.ndim\n\n        if input_ndim == 4:\n            batch_size, channel, height, width = hidden_states.shape\n            hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2)\n\n        batch_size, sequence_length, _ = (\n            hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape\n        )\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if attn.group_norm is not None:\n            hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states) + scale * self.to_q_lora(hidden_states)\n        query = attn.head_to_batch_dim(query).contiguous()\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states) + scale * self.to_k_lora(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states) + scale * self.to_v_lora(encoder_hidden_states)\n\n        key = attn.head_to_batch_dim(key).contiguous()\n        value = attn.head_to_batch_dim(value).contiguous()\n\n        hidden_states = xformers.ops.memory_efficient_attention(\n            query, key, value, attn_bias=attention_mask, op=self.attention_op, scale=attn.scale\n        )\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states) + scale * self.to_out_lora(hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        if input_ndim == 4:\n            hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width)\n\n        if attn.residual_connection:\n            hidden_states = hidden_states + residual\n\n        hidden_states = hidden_states / attn.rescale_output_factor\n\n        return hidden_states\n\n\nclass CustomDiffusionXFormersAttnProcessor(nn.Module):\n    def __init__(\n        self,\n        train_kv=True,\n        train_q_out=False,\n        hidden_size=None,\n        cross_attention_dim=None,\n        out_bias=True,\n        dropout=0.0,\n        attention_op: Optional[Callable] = None,\n    ):\n        super().__init__()\n        self.train_kv = train_kv\n        self.train_q_out = train_q_out\n\n        self.hidden_size = hidden_size\n        self.cross_attention_dim = cross_attention_dim\n        self.attention_op = attention_op\n\n        # `_custom_diffusion` id for easy serialization and loading.\n        if self.train_kv:\n            self.to_k_custom_diffusion = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False)\n            self.to_v_custom_diffusion = nn.Linear(cross_attention_dim or hidden_size, hidden_size, bias=False)\n        if self.train_q_out:\n            self.to_q_custom_diffusion = nn.Linear(hidden_size, hidden_size, bias=False)\n            self.to_out_custom_diffusion = nn.ModuleList([])\n            self.to_out_custom_diffusion.append(nn.Linear(hidden_size, hidden_size, bias=out_bias))\n            self.to_out_custom_diffusion.append(nn.Dropout(dropout))\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        batch_size, sequence_length, _ = (\n            hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape\n        )\n\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if self.train_q_out:\n            query = self.to_q_custom_diffusion(hidden_states)\n        else:\n            query = attn.to_q(hidden_states)\n\n        if encoder_hidden_states is None:\n            crossattn = False\n            encoder_hidden_states = hidden_states\n        else:\n            crossattn = True\n            if attn.norm_cross:\n                encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        if self.train_kv:\n            key = self.to_k_custom_diffusion(encoder_hidden_states)\n            value = self.to_v_custom_diffusion(encoder_hidden_states)\n        else:\n            key = attn.to_k(encoder_hidden_states)\n            value = attn.to_v(encoder_hidden_states)\n\n        if crossattn:\n            detach = torch.ones_like(key)\n            detach[:, :1, :] = detach[:, :1, :] * 0.0\n            key = detach * key + (1 - detach) * key.detach()\n            value = detach * value + (1 - detach) * value.detach()\n\n        query = attn.head_to_batch_dim(query).contiguous()\n        key = attn.head_to_batch_dim(key).contiguous()\n        value = attn.head_to_batch_dim(value).contiguous()\n\n        hidden_states = xformers.ops.memory_efficient_attention(\n            query, key, value, attn_bias=attention_mask, op=self.attention_op, scale=attn.scale\n        )\n        hidden_states = hidden_states.to(query.dtype)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        if self.train_q_out:\n            # linear proj\n            hidden_states = self.to_out_custom_diffusion[0](hidden_states)\n            # dropout\n            hidden_states = self.to_out_custom_diffusion[1](hidden_states)\n        else:\n            # linear proj\n            hidden_states = attn.to_out[0](hidden_states)\n            # dropout\n            hidden_states = attn.to_out[1](hidden_states)\n        return hidden_states\n\n\nclass SlicedAttnProcessor:\n    def __init__(self, slice_size):\n        self.slice_size = slice_size\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        residual = hidden_states\n\n        input_ndim = hidden_states.ndim\n\n        if input_ndim == 4:\n            batch_size, channel, height, width = hidden_states.shape\n            hidden_states = hidden_states.view(batch_size, channel, height * width).transpose(1, 2)\n\n        batch_size, sequence_length, _ = (\n            hidden_states.shape if encoder_hidden_states is None else encoder_hidden_states.shape\n        )\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if attn.group_norm is not None:\n            hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states)\n        dim = query.shape[-1]\n        query = attn.head_to_batch_dim(query)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        batch_size_attention, query_tokens, _ = query.shape\n        hidden_states = torch.zeros(\n            (batch_size_attention, query_tokens, dim // attn.heads), device=query.device, dtype=query.dtype\n        )\n\n        for i in range(batch_size_attention // self.slice_size):\n            start_idx = i * self.slice_size\n            end_idx = (i + 1) * self.slice_size\n\n            query_slice = query[start_idx:end_idx]\n            key_slice = key[start_idx:end_idx]\n            attn_mask_slice = attention_mask[start_idx:end_idx] if attention_mask is not None else None\n\n            attn_slice = attn.get_attention_scores(query_slice, key_slice, attn_mask_slice)\n\n            attn_slice = torch.bmm(attn_slice, value[start_idx:end_idx])\n\n            hidden_states[start_idx:end_idx] = attn_slice\n\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        if input_ndim == 4:\n            hidden_states = hidden_states.transpose(-1, -2).reshape(batch_size, channel, height, width)\n\n        if attn.residual_connection:\n            hidden_states = hidden_states + residual\n\n        hidden_states = hidden_states / attn.rescale_output_factor\n\n        return hidden_states\n\n\nclass SlicedAttnAddedKVProcessor:\n    def __init__(self, slice_size):\n        self.slice_size = slice_size\n\n    def __call__(self, attn: \"Attention\", hidden_states, encoder_hidden_states=None, attention_mask=None):\n        residual = hidden_states\n        hidden_states = hidden_states.view(hidden_states.shape[0], hidden_states.shape[1], -1).transpose(1, 2)\n\n        batch_size, sequence_length, _ = hidden_states.shape\n\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        hidden_states = attn.group_norm(hidden_states.transpose(1, 2)).transpose(1, 2)\n\n        query = attn.to_q(hidden_states)\n        dim = query.shape[-1]\n        query = attn.head_to_batch_dim(query)\n\n        encoder_hidden_states_key_proj = attn.add_k_proj(encoder_hidden_states)\n        encoder_hidden_states_value_proj = attn.add_v_proj(encoder_hidden_states)\n\n        encoder_hidden_states_key_proj = attn.head_to_batch_dim(encoder_hidden_states_key_proj)\n        encoder_hidden_states_value_proj = attn.head_to_batch_dim(encoder_hidden_states_value_proj)\n\n        if not attn.only_cross_attention:\n            key = attn.to_k(hidden_states)\n            value = attn.to_v(hidden_states)\n            key = attn.head_to_batch_dim(key)\n            value = attn.head_to_batch_dim(value)\n            key = torch.cat([encoder_hidden_states_key_proj, key], dim=1)\n            value = torch.cat([encoder_hidden_states_value_proj, value], dim=1)\n        else:\n            key = encoder_hidden_states_key_proj\n            value = encoder_hidden_states_value_proj\n\n        batch_size_attention, query_tokens, _ = query.shape\n        hidden_states = torch.zeros(\n            (batch_size_attention, query_tokens, dim // attn.heads), device=query.device, dtype=query.dtype\n        )\n\n        for i in range(batch_size_attention // self.slice_size):\n            start_idx = i * self.slice_size\n            end_idx = (i + 1) * self.slice_size\n\n            query_slice = query[start_idx:end_idx]\n            key_slice = key[start_idx:end_idx]\n            attn_mask_slice = attention_mask[start_idx:end_idx] if attention_mask is not None else None\n\n            attn_slice = attn.get_attention_scores(query_slice, key_slice, attn_mask_slice)\n\n            attn_slice = torch.bmm(attn_slice, value[start_idx:end_idx])\n\n            hidden_states[start_idx:end_idx] = attn_slice\n\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        hidden_states = hidden_states.transpose(-1, -2).reshape(residual.shape)\n        hidden_states = hidden_states + residual\n\n        return hidden_states\n\n\nAttentionProcessor = Union[\n    AttnProcessor,\n    AttnProcessor2_0,\n    XFormersAttnProcessor,\n    SlicedAttnProcessor,\n    AttnAddedKVProcessor,\n    SlicedAttnAddedKVProcessor,\n    AttnAddedKVProcessor2_0,\n    LoRAAttnProcessor,\n    LoRAXFormersAttnProcessor,\n    LoRAAttnAddedKVProcessor,\n    CustomDiffusionAttnProcessor,\n    CustomDiffusionXFormersAttnProcessor,\n]\n"
  },
  {
    "path": "diffusers/models/autoencoder_kl.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, apply_forward_hook\nfrom .modeling_utils import ModelMixin\nfrom .vae import Decoder, DecoderOutput, DiagonalGaussianDistribution, Encoder\n\n\n@dataclass\nclass AutoencoderKLOutput(BaseOutput):\n    \"\"\"\n    Output of AutoencoderKL encoding method.\n\n    Args:\n        latent_dist (`DiagonalGaussianDistribution`):\n            Encoded outputs of `Encoder` represented as the mean and logvar of `DiagonalGaussianDistribution`.\n            `DiagonalGaussianDistribution` allows for sampling latents from the distribution.\n    \"\"\"\n\n    latent_dist: \"DiagonalGaussianDistribution\"\n\n\nclass AutoencoderKL(ModelMixin, ConfigMixin):\n    r\"\"\"Variational Autoencoder (VAE) model with KL loss from the paper Auto-Encoding Variational Bayes by Diederik P. Kingma\n    and Max Welling.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the model (such as downloading or saving, etc.)\n\n    Parameters:\n        in_channels (int, *optional*, defaults to 3): Number of channels in the input image.\n        out_channels (int,  *optional*, defaults to 3): Number of channels in the output.\n        down_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"DownEncoderBlock2D\",)`): Tuple of downsample block types.\n        up_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"UpDecoderBlock2D\",)`): Tuple of upsample block types.\n        block_out_channels (`Tuple[int]`, *optional*, defaults to :\n            obj:`(64,)`): Tuple of block output channels.\n        act_fn (`str`, *optional*, defaults to `\"silu\"`): The activation function to use.\n        latent_channels (`int`, *optional*, defaults to 4): Number of channels in the latent space.\n        sample_size (`int`, *optional*, defaults to `32`): TODO\n        scaling_factor (`float`, *optional*, defaults to 0.18215):\n            The component-wise standard deviation of the trained latent space computed using the first batch of the\n            training set. This is used to scale the latent space to have unit variance when training the diffusion\n            model. The latents are scaled with the formula `z = z * scaling_factor` before being passed to the\n            diffusion model. When decoding, the latents are scaled back to the original scale with the formula: `z = 1\n            / scaling_factor * z`. For more details, refer to sections 4.3.2 and D.1 of the [High-Resolution Image\n            Synthesis with Latent Diffusion Models](https://arxiv.org/abs/2112.10752) paper.\n    \"\"\"\n\n    _supports_gradient_checkpointing = True\n\n    @register_to_config\n    def __init__(\n        self,\n        in_channels: int = 3,\n        out_channels: int = 3,\n        down_block_types: Tuple[str] = (\"DownEncoderBlock2D\",),\n        up_block_types: Tuple[str] = (\"UpDecoderBlock2D\",),\n        block_out_channels: Tuple[int] = (64,),\n        layers_per_block: int = 1,\n        act_fn: str = \"silu\",\n        latent_channels: int = 4,\n        norm_num_groups: int = 32,\n        sample_size: int = 32,\n        scaling_factor: float = 0.18215,\n    ):\n        super().__init__()\n\n        # pass init params to Encoder\n        self.encoder = Encoder(\n            in_channels=in_channels,\n            out_channels=latent_channels,\n            down_block_types=down_block_types,\n            block_out_channels=block_out_channels,\n            layers_per_block=layers_per_block,\n            act_fn=act_fn,\n            norm_num_groups=norm_num_groups,\n            double_z=True,\n        )\n\n        # pass init params to Decoder\n        self.decoder = Decoder(\n            in_channels=latent_channels,\n            out_channels=out_channels,\n            up_block_types=up_block_types,\n            block_out_channels=block_out_channels,\n            layers_per_block=layers_per_block,\n            norm_num_groups=norm_num_groups,\n            act_fn=act_fn,\n        )\n\n        self.quant_conv = nn.Conv2d(2 * latent_channels, 2 * latent_channels, 1)\n        self.post_quant_conv = nn.Conv2d(latent_channels, latent_channels, 1)\n\n        self.use_slicing = False\n        self.use_tiling = False\n\n        # only relevant if vae tiling is enabled\n        self.tile_sample_min_size = self.config.sample_size\n        sample_size = (\n            self.config.sample_size[0]\n            if isinstance(self.config.sample_size, (list, tuple))\n            else self.config.sample_size\n        )\n        self.tile_latent_min_size = int(sample_size / (2 ** (len(self.config.block_out_channels) - 1)))\n        self.tile_overlap_factor = 0.25\n\n    def _set_gradient_checkpointing(self, module, value=False):\n        if isinstance(module, (Encoder, Decoder)):\n            module.gradient_checkpointing = value\n\n    def enable_tiling(self, use_tiling: bool = True):\n        r\"\"\"\n        Enable tiled VAE decoding. When this option is enabled, the VAE will split the input tensor into tiles to\n        compute decoding and encoding in several steps. This is useful to save a large amount of memory and to allow\n        the processing of larger images.\n        \"\"\"\n        self.use_tiling = use_tiling\n\n    def disable_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.enable_tiling(False)\n\n    def enable_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding. When this option is enabled, the VAE will split the input tensor in slices to\n        compute decoding in several steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.use_slicing = True\n\n    def disable_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_slicing` was previously invoked, this method will go back to computing\n        decoding in one step.\n        \"\"\"\n        self.use_slicing = False\n\n    @apply_forward_hook\n    def encode(self, x: torch.FloatTensor, return_dict: bool = True) -> AutoencoderKLOutput:\n        if self.use_tiling and (x.shape[-1] > self.tile_sample_min_size or x.shape[-2] > self.tile_sample_min_size):\n            return self.tiled_encode(x, return_dict=return_dict)\n\n        h = self.encoder(x)\n        moments = self.quant_conv(h)\n        posterior = DiagonalGaussianDistribution(moments)\n\n        if not return_dict:\n            return (posterior,)\n\n        return AutoencoderKLOutput(latent_dist=posterior)\n\n    def _decode(self, z: torch.FloatTensor, return_dict: bool = True) -> Union[DecoderOutput, torch.FloatTensor]:\n        if self.use_tiling and (z.shape[-1] > self.tile_latent_min_size or z.shape[-2] > self.tile_latent_min_size):\n            return self.tiled_decode(z, return_dict=return_dict)\n\n        z = self.post_quant_conv(z)\n        dec = self.decoder(z)\n\n        if not return_dict:\n            return (dec,)\n\n        return DecoderOutput(sample=dec)\n\n    @apply_forward_hook\n    def decode(self, z: torch.FloatTensor, return_dict: bool = True) -> Union[DecoderOutput, torch.FloatTensor]:\n        if self.use_slicing and z.shape[0] > 1:\n            decoded_slices = [self._decode(z_slice).sample for z_slice in z.split(1)]\n            decoded = torch.cat(decoded_slices)\n        else:\n            decoded = self._decode(z).sample\n\n        if not return_dict:\n            return (decoded,)\n\n        return DecoderOutput(sample=decoded)\n\n    def blend_v(self, a, b, blend_extent):\n        blend_extent = min(a.shape[2], b.shape[2], blend_extent)\n        for y in range(blend_extent):\n            b[:, :, y, :] = a[:, :, -blend_extent + y, :] * (1 - y / blend_extent) + b[:, :, y, :] * (y / blend_extent)\n        return b\n\n    def blend_h(self, a, b, blend_extent):\n        blend_extent = min(a.shape[3], b.shape[3], blend_extent)\n        for x in range(blend_extent):\n            b[:, :, :, x] = a[:, :, :, -blend_extent + x] * (1 - x / blend_extent) + b[:, :, :, x] * (x / blend_extent)\n        return b\n\n    def tiled_encode(self, x: torch.FloatTensor, return_dict: bool = True) -> AutoencoderKLOutput:\n        r\"\"\"Encode a batch of images using a tiled encoder.\n\n        Args:\n        When this option is enabled, the VAE will split the input tensor into tiles to compute encoding in several\n        steps. This is useful to keep memory use constant regardless of image size. The end result of tiled encoding is:\n        different from non-tiled encoding due to each tile using a different encoder. To avoid tiling artifacts, the\n        tiles overlap and are blended together to form a smooth output. You may still see tile-sized changes in the\n        look of the output, but they should be much less noticeable.\n            x (`torch.FloatTensor`): Input batch of images. return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`AutoencoderKLOutput`] instead of a plain tuple.\n        \"\"\"\n        overlap_size = int(self.tile_sample_min_size * (1 - self.tile_overlap_factor))\n        blend_extent = int(self.tile_latent_min_size * self.tile_overlap_factor)\n        row_limit = self.tile_latent_min_size - blend_extent\n\n        # Split the image into 512x512 tiles and encode them separately.\n        rows = []\n        for i in range(0, x.shape[2], overlap_size):\n            row = []\n            for j in range(0, x.shape[3], overlap_size):\n                tile = x[:, :, i : i + self.tile_sample_min_size, j : j + self.tile_sample_min_size]\n                tile = self.encoder(tile)\n                tile = self.quant_conv(tile)\n                row.append(tile)\n            rows.append(row)\n        result_rows = []\n        for i, row in enumerate(rows):\n            result_row = []\n            for j, tile in enumerate(row):\n                # blend the above tile and the left tile\n                # to the current tile and add the current tile to the result row\n                if i > 0:\n                    tile = self.blend_v(rows[i - 1][j], tile, blend_extent)\n                if j > 0:\n                    tile = self.blend_h(row[j - 1], tile, blend_extent)\n                result_row.append(tile[:, :, :row_limit, :row_limit])\n            result_rows.append(torch.cat(result_row, dim=3))\n\n        moments = torch.cat(result_rows, dim=2)\n        posterior = DiagonalGaussianDistribution(moments)\n\n        if not return_dict:\n            return (posterior,)\n\n        return AutoencoderKLOutput(latent_dist=posterior)\n\n    def tiled_decode(self, z: torch.FloatTensor, return_dict: bool = True) -> Union[DecoderOutput, torch.FloatTensor]:\n        r\"\"\"Decode a batch of images using a tiled decoder.\n\n        Args:\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding in several\n        steps. This is useful to keep memory use constant regardless of image size. The end result of tiled decoding is:\n        different from non-tiled decoding due to each tile using a different decoder. To avoid tiling artifacts, the\n        tiles overlap and are blended together to form a smooth output. You may still see tile-sized changes in the\n        look of the output, but they should be much less noticeable.\n            z (`torch.FloatTensor`): Input batch of latent vectors. return_dict (`bool`, *optional*, defaults to\n            `True`):\n                Whether or not to return a [`DecoderOutput`] instead of a plain tuple.\n        \"\"\"\n        overlap_size = int(self.tile_latent_min_size * (1 - self.tile_overlap_factor))\n        blend_extent = int(self.tile_sample_min_size * self.tile_overlap_factor)\n        row_limit = self.tile_sample_min_size - blend_extent\n\n        # Split z into overlapping 64x64 tiles and decode them separately.\n        # The tiles have an overlap to avoid seams between tiles.\n        rows = []\n        for i in range(0, z.shape[2], overlap_size):\n            row = []\n            for j in range(0, z.shape[3], overlap_size):\n                tile = z[:, :, i : i + self.tile_latent_min_size, j : j + self.tile_latent_min_size]\n                tile = self.post_quant_conv(tile)\n                decoded = self.decoder(tile)\n                row.append(decoded)\n            rows.append(row)\n        result_rows = []\n        for i, row in enumerate(rows):\n            result_row = []\n            for j, tile in enumerate(row):\n                # blend the above tile and the left tile\n                # to the current tile and add the current tile to the result row\n                if i > 0:\n                    tile = self.blend_v(rows[i - 1][j], tile, blend_extent)\n                if j > 0:\n                    tile = self.blend_h(row[j - 1], tile, blend_extent)\n                result_row.append(tile[:, :, :row_limit, :row_limit])\n            result_rows.append(torch.cat(result_row, dim=3))\n\n        dec = torch.cat(result_rows, dim=2)\n        if not return_dict:\n            return (dec,)\n\n        return DecoderOutput(sample=dec)\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        sample_posterior: bool = False,\n        return_dict: bool = True,\n        generator: Optional[torch.Generator] = None,\n    ) -> Union[DecoderOutput, torch.FloatTensor]:\n        r\"\"\"\n        Args:\n            sample (`torch.FloatTensor`): Input sample.\n            sample_posterior (`bool`, *optional*, defaults to `False`):\n                Whether to sample from the posterior.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`DecoderOutput`] instead of a plain tuple.\n        \"\"\"\n        x = sample\n        posterior = self.encode(x).latent_dist\n        if sample_posterior:\n            z = posterior.sample(generator=generator)\n        else:\n            z = posterior.mode()\n        dec = self.decode(z).sample\n\n        if not return_dict:\n            return (dec,)\n\n        return DecoderOutput(sample=dec)\n"
  },
  {
    "path": "diffusers/models/controlnet.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Any, Dict, List, Optional, Tuple, Union\n\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, logging\nfrom .attention_processor import AttentionProcessor, AttnProcessor\nfrom .embeddings import TimestepEmbedding, Timesteps\nfrom .modeling_utils import ModelMixin\nfrom .unet_2d_blocks import (\n    CrossAttnDownBlock2D,\n    DownBlock2D,\n    UNetMidBlock2DCrossAttn,\n    get_down_block,\n)\nfrom .unet_2d_condition import UNet2DConditionModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n@dataclass\nclass ControlNetOutput(BaseOutput):\n    down_block_res_samples: Tuple[torch.Tensor]\n    mid_block_res_sample: torch.Tensor\n\n\nclass ControlNetConditioningEmbedding(nn.Module):\n    \"\"\"\n    Quoting from https://arxiv.org/abs/2302.05543: \"Stable Diffusion uses a pre-processing method similar to VQ-GAN\n    [11] to convert the entire dataset of 512 × 512 images into smaller 64 × 64 “latent images” for stabilized\n    training. This requires ControlNets to convert image-based conditions to 64 × 64 feature space to match the\n    convolution size. We use a tiny network E(·) of four convolution layers with 4 × 4 kernels and 2 × 2 strides\n    (activated by ReLU, channels are 16, 32, 64, 128, initialized with Gaussian weights, trained jointly with the full\n    model) to encode image-space conditions ... into feature maps ...\"\n    \"\"\"\n\n    def __init__(\n        self,\n        conditioning_embedding_channels: int,\n        conditioning_channels: int = 3,\n        block_out_channels: Tuple[int] = (16, 32, 96, 256),\n    ):\n        super().__init__()\n\n        self.conv_in = nn.Conv2d(conditioning_channels, block_out_channels[0], kernel_size=3, padding=1)\n\n        self.blocks = nn.ModuleList([])\n\n        for i in range(len(block_out_channels) - 1):\n            channel_in = block_out_channels[i]\n            channel_out = block_out_channels[i + 1]\n            self.blocks.append(nn.Conv2d(channel_in, channel_in, kernel_size=3, padding=1))\n            self.blocks.append(nn.Conv2d(channel_in, channel_out, kernel_size=3, padding=1, stride=2))\n\n        self.conv_out = zero_module(\n            nn.Conv2d(block_out_channels[-1], conditioning_embedding_channels, kernel_size=3, padding=1)\n        )\n\n    def forward(self, conditioning):\n        embedding = self.conv_in(conditioning)\n        embedding = F.silu(embedding)\n\n        for block in self.blocks:\n            embedding = block(embedding)\n            embedding = F.silu(embedding)\n\n        embedding = self.conv_out(embedding)\n\n        return embedding\n\n\nclass ControlNetModel(ModelMixin, ConfigMixin):\n    _supports_gradient_checkpointing = True\n\n    @register_to_config\n    def __init__(\n        self,\n        in_channels: int = 4,\n        flip_sin_to_cos: bool = True,\n        freq_shift: int = 0,\n        down_block_types: Tuple[str] = (\n            \"CrossAttnDownBlock2D\",\n            \"CrossAttnDownBlock2D\",\n            \"CrossAttnDownBlock2D\",\n            \"DownBlock2D\",\n        ),\n        only_cross_attention: Union[bool, Tuple[bool]] = False,\n        block_out_channels: Tuple[int] = (320, 640, 1280, 1280),\n        layers_per_block: int = 2,\n        downsample_padding: int = 1,\n        mid_block_scale_factor: float = 1,\n        act_fn: str = \"silu\",\n        norm_num_groups: Optional[int] = 32,\n        norm_eps: float = 1e-5,\n        cross_attention_dim: int = 1280,\n        attention_head_dim: Union[int, Tuple[int]] = 8,\n        use_linear_projection: bool = False,\n        class_embed_type: Optional[str] = None,\n        num_class_embeds: Optional[int] = None,\n        upcast_attention: bool = False,\n        resnet_time_scale_shift: str = \"default\",\n        projection_class_embeddings_input_dim: Optional[int] = None,\n        controlnet_conditioning_channel_order: str = \"rgb\",\n        conditioning_embedding_out_channels: Optional[Tuple[int]] = (16, 32, 96, 256),\n        global_pool_conditions: bool = False,\n    ):\n        super().__init__()\n\n        # Check inputs\n        if len(block_out_channels) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`: {attention_head_dim}. `down_block_types`: {down_block_types}.\"\n            )\n\n        # input\n        conv_in_kernel = 3\n        conv_in_padding = (conv_in_kernel - 1) // 2\n        self.conv_in = nn.Conv2d(\n            in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding\n        )\n\n        # time\n        time_embed_dim = block_out_channels[0] * 4\n\n        self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift)\n        timestep_input_dim = block_out_channels[0]\n\n        self.time_embedding = TimestepEmbedding(\n            timestep_input_dim,\n            time_embed_dim,\n            act_fn=act_fn,\n        )\n\n        # class embedding\n        if class_embed_type is None and num_class_embeds is not None:\n            self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim)\n        elif class_embed_type == \"timestep\":\n            self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim)\n        elif class_embed_type == \"identity\":\n            self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim)\n        elif class_embed_type == \"projection\":\n            if projection_class_embeddings_input_dim is None:\n                raise ValueError(\n                    \"`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set\"\n                )\n            # The projection `class_embed_type` is the same as the timestep `class_embed_type` except\n            # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings\n            # 2. it projects from an arbitrary input dimension.\n            #\n            # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations.\n            # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings.\n            # As a result, `TimestepEmbedding` can be passed arbitrary vectors.\n            self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim)\n        else:\n            self.class_embedding = None\n\n        # control net conditioning embedding\n        self.controlnet_cond_embedding = ControlNetConditioningEmbedding(\n            conditioning_embedding_channels=block_out_channels[0],\n            block_out_channels=conditioning_embedding_out_channels,\n        )\n\n        self.down_blocks = nn.ModuleList([])\n        self.controlnet_down_blocks = nn.ModuleList([])\n\n        if isinstance(only_cross_attention, bool):\n            only_cross_attention = [only_cross_attention] * len(down_block_types)\n\n        if isinstance(attention_head_dim, int):\n            attention_head_dim = (attention_head_dim,) * len(down_block_types)\n\n        # down\n        output_channel = block_out_channels[0]\n\n        controlnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1)\n        controlnet_block = zero_module(controlnet_block)\n        self.controlnet_down_blocks.append(controlnet_block)\n\n        for i, down_block_type in enumerate(down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = get_down_block(\n                down_block_type,\n                num_layers=layers_per_block,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                temb_channels=time_embed_dim,\n                add_downsample=not is_final_block,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                cross_attention_dim=cross_attention_dim,\n                attn_num_head_channels=attention_head_dim[i],\n                downsample_padding=downsample_padding,\n                use_linear_projection=use_linear_projection,\n                only_cross_attention=only_cross_attention[i],\n                upcast_attention=upcast_attention,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n            )\n            self.down_blocks.append(down_block)\n\n            for _ in range(layers_per_block):\n                controlnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1)\n                controlnet_block = zero_module(controlnet_block)\n                self.controlnet_down_blocks.append(controlnet_block)\n\n            if not is_final_block:\n                controlnet_block = nn.Conv2d(output_channel, output_channel, kernel_size=1)\n                controlnet_block = zero_module(controlnet_block)\n                self.controlnet_down_blocks.append(controlnet_block)\n\n        # mid\n        mid_block_channel = block_out_channels[-1]\n\n        controlnet_block = nn.Conv2d(mid_block_channel, mid_block_channel, kernel_size=1)\n        controlnet_block = zero_module(controlnet_block)\n        self.controlnet_mid_block = controlnet_block\n\n        self.mid_block = UNetMidBlock2DCrossAttn(\n            in_channels=mid_block_channel,\n            temb_channels=time_embed_dim,\n            resnet_eps=norm_eps,\n            resnet_act_fn=act_fn,\n            output_scale_factor=mid_block_scale_factor,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attention_head_dim[-1],\n            resnet_groups=norm_num_groups,\n            use_linear_projection=use_linear_projection,\n            upcast_attention=upcast_attention,\n        )\n\n    @classmethod\n    def from_unet(\n        cls,\n        unet: UNet2DConditionModel,\n        controlnet_conditioning_channel_order: str = \"rgb\",\n        conditioning_embedding_out_channels: Optional[Tuple[int]] = (16, 32, 96, 256),\n        load_weights_from_unet: bool = True,\n    ):\n        r\"\"\"\n        Instantiate Controlnet class from UNet2DConditionModel.\n\n        Parameters:\n            unet (`UNet2DConditionModel`):\n                UNet model which weights are copied to the ControlNet. Note that all configuration options are also\n                copied where applicable.\n        \"\"\"\n        controlnet = cls(\n            in_channels=unet.config.in_channels,\n            flip_sin_to_cos=unet.config.flip_sin_to_cos,\n            freq_shift=unet.config.freq_shift,\n            down_block_types=unet.config.down_block_types,\n            only_cross_attention=unet.config.only_cross_attention,\n            block_out_channels=unet.config.block_out_channels,\n            layers_per_block=unet.config.layers_per_block,\n            downsample_padding=unet.config.downsample_padding,\n            mid_block_scale_factor=unet.config.mid_block_scale_factor,\n            act_fn=unet.config.act_fn,\n            norm_num_groups=unet.config.norm_num_groups,\n            norm_eps=unet.config.norm_eps,\n            cross_attention_dim=unet.config.cross_attention_dim,\n            attention_head_dim=unet.config.attention_head_dim,\n            use_linear_projection=unet.config.use_linear_projection,\n            class_embed_type=unet.config.class_embed_type,\n            num_class_embeds=unet.config.num_class_embeds,\n            upcast_attention=unet.config.upcast_attention,\n            resnet_time_scale_shift=unet.config.resnet_time_scale_shift,\n            projection_class_embeddings_input_dim=unet.config.projection_class_embeddings_input_dim,\n            controlnet_conditioning_channel_order=controlnet_conditioning_channel_order,\n            conditioning_embedding_out_channels=conditioning_embedding_out_channels,\n        )\n\n        if load_weights_from_unet:\n            controlnet.conv_in.load_state_dict(unet.conv_in.state_dict())\n            controlnet.time_proj.load_state_dict(unet.time_proj.state_dict())\n            controlnet.time_embedding.load_state_dict(unet.time_embedding.state_dict())\n\n            if controlnet.class_embedding:\n                controlnet.class_embedding.load_state_dict(unet.class_embedding.state_dict())\n\n            controlnet.down_blocks.load_state_dict(unet.down_blocks.state_dict())\n            controlnet.mid_block.load_state_dict(unet.mid_block.state_dict())\n\n        return controlnet\n\n    @property\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.attn_processors\n    def attn_processors(self) -> Dict[str, AttentionProcessor]:\n        r\"\"\"\n        Returns:\n            `dict` of attention processors: A dictionary containing all attention processors used in the model with\n            indexed by its weight name.\n        \"\"\"\n        # set recursively\n        processors = {}\n\n        def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]):\n            if hasattr(module, \"set_processor\"):\n                processors[f\"{name}.processor\"] = module.processor\n\n            for sub_name, child in module.named_children():\n                fn_recursive_add_processors(f\"{name}.{sub_name}\", child, processors)\n\n            return processors\n\n        for name, module in self.named_children():\n            fn_recursive_add_processors(name, module, processors)\n\n        return processors\n\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.set_attn_processor\n    def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]):\n        r\"\"\"\n        Parameters:\n            `processor (`dict` of `AttentionProcessor` or `AttentionProcessor`):\n                The instantiated processor class or a dictionary of processor classes that will be set as the processor\n                of **all** `Attention` layers.\n            In case `processor` is a dict, the key needs to define the path to the corresponding cross attention processor. This is strongly recommended when setting trainable attention processors.:\n\n        \"\"\"\n        count = len(self.attn_processors.keys())\n\n        if isinstance(processor, dict) and len(processor) != count:\n            raise ValueError(\n                f\"A dict of processors was passed, but the number of processors {len(processor)} does not match the\"\n                f\" number of attention layers: {count}. Please make sure to pass {count} processor classes.\"\n            )\n\n        def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor):\n            if hasattr(module, \"set_processor\"):\n                if not isinstance(processor, dict):\n                    module.set_processor(processor)\n                else:\n                    module.set_processor(processor.pop(f\"{name}.processor\"))\n\n            for sub_name, child in module.named_children():\n                fn_recursive_attn_processor(f\"{name}.{sub_name}\", child, processor)\n\n        for name, module in self.named_children():\n            fn_recursive_attn_processor(name, module, processor)\n\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor\n    def set_default_attn_processor(self):\n        \"\"\"\n        Disables custom attention processors and sets the default attention implementation.\n        \"\"\"\n        self.set_attn_processor(AttnProcessor())\n\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.set_attention_slice\n    def set_attention_slice(self, slice_size):\n        r\"\"\"\n        Enable sliced attention computation.\n\n        When this option is enabled, the attention module will split the input tensor in slices, to compute attention\n        in several steps. This is useful to save some memory in exchange for a small speed decrease.\n\n        Args:\n            slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `\"auto\"`):\n                When `\"auto\"`, halves the input to the attention heads, so attention will be computed in two steps. If\n                `\"max\"`, maximum amount of memory will be saved by running only one slice at a time. If a number is\n                provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim`\n                must be a multiple of `slice_size`.\n        \"\"\"\n        sliceable_head_dims = []\n\n        def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module):\n            if hasattr(module, \"set_attention_slice\"):\n                sliceable_head_dims.append(module.sliceable_head_dim)\n\n            for child in module.children():\n                fn_recursive_retrieve_sliceable_dims(child)\n\n        # retrieve number of attention layers\n        for module in self.children():\n            fn_recursive_retrieve_sliceable_dims(module)\n\n        num_sliceable_layers = len(sliceable_head_dims)\n\n        if slice_size == \"auto\":\n            # half the attention head size is usually a good trade-off between\n            # speed and memory\n            slice_size = [dim // 2 for dim in sliceable_head_dims]\n        elif slice_size == \"max\":\n            # make smallest slice possible\n            slice_size = num_sliceable_layers * [1]\n\n        slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size\n\n        if len(slice_size) != len(sliceable_head_dims):\n            raise ValueError(\n                f\"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different\"\n                f\" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}.\"\n            )\n\n        for i in range(len(slice_size)):\n            size = slice_size[i]\n            dim = sliceable_head_dims[i]\n            if size is not None and size > dim:\n                raise ValueError(f\"size {size} has to be smaller or equal to {dim}.\")\n\n        # Recursively walk through all the children.\n        # Any children which exposes the set_attention_slice method\n        # gets the message\n        def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]):\n            if hasattr(module, \"set_attention_slice\"):\n                module.set_attention_slice(slice_size.pop())\n\n            for child in module.children():\n                fn_recursive_set_attention_slice(child, slice_size)\n\n        reversed_slice_size = list(reversed(slice_size))\n        for module in self.children():\n            fn_recursive_set_attention_slice(module, reversed_slice_size)\n\n    def _set_gradient_checkpointing(self, module, value=False):\n        if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D)):\n            module.gradient_checkpointing = value\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[torch.Tensor, float, int],\n        encoder_hidden_states: torch.Tensor,\n        controlnet_cond: torch.FloatTensor,\n        conditioning_scale: float = 1.0,\n        class_labels: Optional[torch.Tensor] = None,\n        timestep_cond: Optional[torch.Tensor] = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        guess_mode: bool = False,\n        return_dict: bool = True,\n    ) -> Union[ControlNetOutput, Tuple]:\n        # check channel order\n        channel_order = self.config.controlnet_conditioning_channel_order\n\n        if channel_order == \"rgb\":\n            # in rgb order by default\n            ...\n        elif channel_order == \"bgr\":\n            controlnet_cond = torch.flip(controlnet_cond, dims=[1])\n        else:\n            raise ValueError(f\"unknown `controlnet_conditioning_channel_order`: {channel_order}\")\n\n        # prepare attention_mask\n        if attention_mask is not None:\n            attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0\n            attention_mask = attention_mask.unsqueeze(1)\n\n        # 1. time\n        timesteps = timestep\n        if not torch.is_tensor(timesteps):\n            # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can\n            # This would be a good case for the `match` statement (Python 3.10+)\n            is_mps = sample.device.type == \"mps\"\n            if isinstance(timestep, float):\n                dtype = torch.float32 if is_mps else torch.float64\n            else:\n                dtype = torch.int32 if is_mps else torch.int64\n            timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device)\n        elif len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(sample.device)\n\n        # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n        timesteps = timesteps.expand(sample.shape[0])\n\n        t_emb = self.time_proj(timesteps)\n\n        # timesteps does not contain any weights and will always return f32 tensors\n        # but time_embedding might actually be running in fp16. so we need to cast here.\n        # there might be better ways to encapsulate this.\n        t_emb = t_emb.to(dtype=sample.dtype)\n\n        emb = self.time_embedding(t_emb, timestep_cond)\n\n        if self.class_embedding is not None:\n            if class_labels is None:\n                raise ValueError(\"class_labels should be provided when num_class_embeds > 0\")\n\n            if self.config.class_embed_type == \"timestep\":\n                class_labels = self.time_proj(class_labels)\n\n            class_emb = self.class_embedding(class_labels).to(dtype=self.dtype)\n            emb = emb + class_emb\n\n        # 2. pre-process\n        sample = self.conv_in(sample)\n\n        controlnet_cond = self.controlnet_cond_embedding(controlnet_cond)\n\n        sample = sample + controlnet_cond\n\n        # 3. down\n        down_block_res_samples = (sample,)\n        for downsample_block in self.down_blocks:\n            if hasattr(downsample_block, \"has_cross_attention\") and downsample_block.has_cross_attention:\n                sample, res_samples = downsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    encoder_hidden_states=encoder_hidden_states,\n                    attention_mask=attention_mask,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                )\n            else:\n                sample, res_samples = downsample_block(hidden_states=sample, temb=emb)\n\n            down_block_res_samples += res_samples\n\n        # 4. mid\n        if self.mid_block is not None:\n            sample = self.mid_block(\n                sample,\n                emb,\n                encoder_hidden_states=encoder_hidden_states,\n                attention_mask=attention_mask,\n                cross_attention_kwargs=cross_attention_kwargs,\n            )\n\n        # 5. Control net blocks\n\n        controlnet_down_block_res_samples = ()\n\n        for down_block_res_sample, controlnet_block in zip(down_block_res_samples, self.controlnet_down_blocks):\n            down_block_res_sample = controlnet_block(down_block_res_sample)\n            controlnet_down_block_res_samples = controlnet_down_block_res_samples + (down_block_res_sample,)\n\n        down_block_res_samples = controlnet_down_block_res_samples\n\n        mid_block_res_sample = self.controlnet_mid_block(sample)\n\n        # 6. scaling\n        if guess_mode and not self.config.global_pool_conditions:\n            scales = torch.logspace(-1, 0, len(down_block_res_samples) + 1, device=sample.device)  # 0.1 to 1.0\n\n            scales = scales * conditioning_scale\n            down_block_res_samples = [sample * scale for sample, scale in zip(down_block_res_samples, scales)]\n            mid_block_res_sample = mid_block_res_sample * scales[-1]  # last one\n        else:\n            down_block_res_samples = [sample * conditioning_scale for sample in down_block_res_samples]\n            mid_block_res_sample = mid_block_res_sample * conditioning_scale\n\n        if self.config.global_pool_conditions:\n            down_block_res_samples = [\n                torch.mean(sample, dim=(2, 3), keepdim=True) for sample in down_block_res_samples\n            ]\n            mid_block_res_sample = torch.mean(mid_block_res_sample, dim=(2, 3), keepdim=True)\n\n        if not return_dict:\n            return (down_block_res_samples, mid_block_res_sample)\n\n        return ControlNetOutput(\n            down_block_res_samples=down_block_res_samples, mid_block_res_sample=mid_block_res_sample\n        )\n\n\ndef zero_module(module):\n    for p in module.parameters():\n        nn.init.zeros_(p)\n    return module\n"
  },
  {
    "path": "diffusers/models/controlnet_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Tuple, Union\n\nimport flax\nimport flax.linen as nn\nimport jax\nimport jax.numpy as jnp\nfrom flax.core.frozen_dict import FrozenDict\n\nfrom ..configuration_utils import ConfigMixin, flax_register_to_config\nfrom ..utils import BaseOutput\nfrom .embeddings_flax import FlaxTimestepEmbedding, FlaxTimesteps\nfrom .modeling_flax_utils import FlaxModelMixin\nfrom .unet_2d_blocks_flax import (\n    FlaxCrossAttnDownBlock2D,\n    FlaxDownBlock2D,\n    FlaxUNetMidBlock2DCrossAttn,\n)\n\n\n@flax.struct.dataclass\nclass FlaxControlNetOutput(BaseOutput):\n    down_block_res_samples: jnp.ndarray\n    mid_block_res_sample: jnp.ndarray\n\n\nclass FlaxControlNetConditioningEmbedding(nn.Module):\n    conditioning_embedding_channels: int\n    block_out_channels: Tuple[int] = (16, 32, 96, 256)\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.conv_in = nn.Conv(\n            self.block_out_channels[0],\n            kernel_size=(3, 3),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        blocks = []\n        for i in range(len(self.block_out_channels) - 1):\n            channel_in = self.block_out_channels[i]\n            channel_out = self.block_out_channels[i + 1]\n            conv1 = nn.Conv(\n                channel_in,\n                kernel_size=(3, 3),\n                padding=((1, 1), (1, 1)),\n                dtype=self.dtype,\n            )\n            blocks.append(conv1)\n            conv2 = nn.Conv(\n                channel_out,\n                kernel_size=(3, 3),\n                strides=(2, 2),\n                padding=((1, 1), (1, 1)),\n                dtype=self.dtype,\n            )\n            blocks.append(conv2)\n        self.blocks = blocks\n\n        self.conv_out = nn.Conv(\n            self.conditioning_embedding_channels,\n            kernel_size=(3, 3),\n            padding=((1, 1), (1, 1)),\n            kernel_init=nn.initializers.zeros_init(),\n            bias_init=nn.initializers.zeros_init(),\n            dtype=self.dtype,\n        )\n\n    def __call__(self, conditioning):\n        embedding = self.conv_in(conditioning)\n        embedding = nn.silu(embedding)\n\n        for block in self.blocks:\n            embedding = block(embedding)\n            embedding = nn.silu(embedding)\n\n        embedding = self.conv_out(embedding)\n\n        return embedding\n\n\n@flax_register_to_config\nclass FlaxControlNetModel(nn.Module, FlaxModelMixin, ConfigMixin):\n    r\"\"\"\n    Quoting from https://arxiv.org/abs/2302.05543: \"Stable Diffusion uses a pre-processing method similar to VQ-GAN\n    [11] to convert the entire dataset of 512 × 512 images into smaller 64 × 64 “latent images” for stabilized\n    training. This requires ControlNets to convert image-based conditions to 64 × 64 feature space to match the\n    convolution size. We use a tiny network E(·) of four convolution layers with 4 × 4 kernels and 2 × 2 strides\n    (activated by ReLU, channels are 16, 32, 64, 128, initialized with Gaussian weights, trained jointly with the full\n    model) to encode image-space conditions ... into feature maps ...\"\n\n    This model inherits from [`FlaxModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the models (such as downloading or saving, etc.)\n\n    Also, this model is a Flax Linen [flax.linen.Module](https://flax.readthedocs.io/en/latest/flax.linen.html#module)\n    subclass. Use it as a regular Flax linen Module and refer to the Flax documentation for all matter related to\n    general usage and behavior.\n\n    Finally, this model supports inherent JAX features such as:\n    - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit)\n    - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation)\n    - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap)\n    - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap)\n\n    Parameters:\n        sample_size (`int`, *optional*):\n            The size of the input sample.\n        in_channels (`int`, *optional*, defaults to 4):\n            The number of channels in the input sample.\n        down_block_types (`Tuple[str]`, *optional*, defaults to `(\"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"DownBlock2D\")`):\n            The tuple of downsample blocks to use. The corresponding class names will be: \"FlaxCrossAttnDownBlock2D\",\n            \"FlaxCrossAttnDownBlock2D\", \"FlaxCrossAttnDownBlock2D\", \"FlaxDownBlock2D\"\n        block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`):\n            The tuple of output channels for each block.\n        layers_per_block (`int`, *optional*, defaults to 2):\n            The number of layers per block.\n        attention_head_dim (`int` or `Tuple[int]`, *optional*, defaults to 8):\n            The dimension of the attention heads.\n        cross_attention_dim (`int`, *optional*, defaults to 768):\n            The dimension of the cross attention features.\n        dropout (`float`, *optional*, defaults to 0):\n            Dropout probability for down, up and bottleneck blocks.\n        flip_sin_to_cos (`bool`, *optional*, defaults to `True`):\n            Whether to flip the sin to cos in the time embedding.\n        freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding.\n        controlnet_conditioning_channel_order (`str`, *optional*, defaults to `rgb`):\n            The channel order of conditional image. Will convert it to `rgb` if it's `bgr`\n        conditioning_embedding_out_channels (`tuple`, *optional*, defaults to `(16, 32, 96, 256)`):\n            The tuple of output channel for each block in conditioning_embedding layer\n\n\n    \"\"\"\n    sample_size: int = 32\n    in_channels: int = 4\n    down_block_types: Tuple[str] = (\n        \"CrossAttnDownBlock2D\",\n        \"CrossAttnDownBlock2D\",\n        \"CrossAttnDownBlock2D\",\n        \"DownBlock2D\",\n    )\n    only_cross_attention: Union[bool, Tuple[bool]] = False\n    block_out_channels: Tuple[int] = (320, 640, 1280, 1280)\n    layers_per_block: int = 2\n    attention_head_dim: Union[int, Tuple[int]] = 8\n    cross_attention_dim: int = 1280\n    dropout: float = 0.0\n    use_linear_projection: bool = False\n    dtype: jnp.dtype = jnp.float32\n    flip_sin_to_cos: bool = True\n    freq_shift: int = 0\n    controlnet_conditioning_channel_order: str = \"rgb\"\n    conditioning_embedding_out_channels: Tuple[int] = (16, 32, 96, 256)\n\n    def init_weights(self, rng: jax.random.KeyArray) -> FrozenDict:\n        # init input tensors\n        sample_shape = (1, self.in_channels, self.sample_size, self.sample_size)\n        sample = jnp.zeros(sample_shape, dtype=jnp.float32)\n        timesteps = jnp.ones((1,), dtype=jnp.int32)\n        encoder_hidden_states = jnp.zeros((1, 1, self.cross_attention_dim), dtype=jnp.float32)\n        controlnet_cond_shape = (1, 3, self.sample_size * 8, self.sample_size * 8)\n        controlnet_cond = jnp.zeros(controlnet_cond_shape, dtype=jnp.float32)\n\n        params_rng, dropout_rng = jax.random.split(rng)\n        rngs = {\"params\": params_rng, \"dropout\": dropout_rng}\n\n        return self.init(rngs, sample, timesteps, encoder_hidden_states, controlnet_cond)[\"params\"]\n\n    def setup(self):\n        block_out_channels = self.block_out_channels\n        time_embed_dim = block_out_channels[0] * 4\n\n        # input\n        self.conv_in = nn.Conv(\n            block_out_channels[0],\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        # time\n        self.time_proj = FlaxTimesteps(\n            block_out_channels[0], flip_sin_to_cos=self.flip_sin_to_cos, freq_shift=self.config.freq_shift\n        )\n        self.time_embedding = FlaxTimestepEmbedding(time_embed_dim, dtype=self.dtype)\n\n        self.controlnet_cond_embedding = FlaxControlNetConditioningEmbedding(\n            conditioning_embedding_channels=block_out_channels[0],\n            block_out_channels=self.conditioning_embedding_out_channels,\n        )\n\n        only_cross_attention = self.only_cross_attention\n        if isinstance(only_cross_attention, bool):\n            only_cross_attention = (only_cross_attention,) * len(self.down_block_types)\n\n        attention_head_dim = self.attention_head_dim\n        if isinstance(attention_head_dim, int):\n            attention_head_dim = (attention_head_dim,) * len(self.down_block_types)\n\n        # down\n        down_blocks = []\n        controlnet_down_blocks = []\n\n        output_channel = block_out_channels[0]\n\n        controlnet_block = nn.Conv(\n            output_channel,\n            kernel_size=(1, 1),\n            padding=\"VALID\",\n            kernel_init=nn.initializers.zeros_init(),\n            bias_init=nn.initializers.zeros_init(),\n            dtype=self.dtype,\n        )\n        controlnet_down_blocks.append(controlnet_block)\n\n        for i, down_block_type in enumerate(self.down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            if down_block_type == \"CrossAttnDownBlock2D\":\n                down_block = FlaxCrossAttnDownBlock2D(\n                    in_channels=input_channel,\n                    out_channels=output_channel,\n                    dropout=self.dropout,\n                    num_layers=self.layers_per_block,\n                    attn_num_head_channels=attention_head_dim[i],\n                    add_downsample=not is_final_block,\n                    use_linear_projection=self.use_linear_projection,\n                    only_cross_attention=only_cross_attention[i],\n                    dtype=self.dtype,\n                )\n            else:\n                down_block = FlaxDownBlock2D(\n                    in_channels=input_channel,\n                    out_channels=output_channel,\n                    dropout=self.dropout,\n                    num_layers=self.layers_per_block,\n                    add_downsample=not is_final_block,\n                    dtype=self.dtype,\n                )\n\n            down_blocks.append(down_block)\n\n            for _ in range(self.layers_per_block):\n                controlnet_block = nn.Conv(\n                    output_channel,\n                    kernel_size=(1, 1),\n                    padding=\"VALID\",\n                    kernel_init=nn.initializers.zeros_init(),\n                    bias_init=nn.initializers.zeros_init(),\n                    dtype=self.dtype,\n                )\n                controlnet_down_blocks.append(controlnet_block)\n\n            if not is_final_block:\n                controlnet_block = nn.Conv(\n                    output_channel,\n                    kernel_size=(1, 1),\n                    padding=\"VALID\",\n                    kernel_init=nn.initializers.zeros_init(),\n                    bias_init=nn.initializers.zeros_init(),\n                    dtype=self.dtype,\n                )\n                controlnet_down_blocks.append(controlnet_block)\n\n        self.down_blocks = down_blocks\n        self.controlnet_down_blocks = controlnet_down_blocks\n\n        # mid\n        mid_block_channel = block_out_channels[-1]\n        self.mid_block = FlaxUNetMidBlock2DCrossAttn(\n            in_channels=mid_block_channel,\n            dropout=self.dropout,\n            attn_num_head_channels=attention_head_dim[-1],\n            use_linear_projection=self.use_linear_projection,\n            dtype=self.dtype,\n        )\n\n        self.controlnet_mid_block = nn.Conv(\n            mid_block_channel,\n            kernel_size=(1, 1),\n            padding=\"VALID\",\n            kernel_init=nn.initializers.zeros_init(),\n            bias_init=nn.initializers.zeros_init(),\n            dtype=self.dtype,\n        )\n\n    def __call__(\n        self,\n        sample,\n        timesteps,\n        encoder_hidden_states,\n        controlnet_cond,\n        conditioning_scale: float = 1.0,\n        return_dict: bool = True,\n        train: bool = False,\n    ) -> Union[FlaxControlNetOutput, Tuple]:\n        r\"\"\"\n        Args:\n            sample (`jnp.ndarray`): (batch, channel, height, width) noisy inputs tensor\n            timestep (`jnp.ndarray` or `float` or `int`): timesteps\n            encoder_hidden_states (`jnp.ndarray`): (batch_size, sequence_length, hidden_size) encoder hidden states\n            controlnet_cond (`jnp.ndarray`): (batch, channel, height, width) the conditional input tensor\n            conditioning_scale: (`float`) the scale factor for controlnet outputs\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition_flax.FlaxUNet2DConditionOutput`] instead of a\n                plain tuple.\n            train (`bool`, *optional*, defaults to `False`):\n                Use deterministic functions and disable dropout when not training.\n\n        Returns:\n            [`~models.unet_2d_condition_flax.FlaxUNet2DConditionOutput`] or `tuple`:\n            [`~models.unet_2d_condition_flax.FlaxUNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`.\n            When returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        channel_order = self.controlnet_conditioning_channel_order\n        if channel_order == \"bgr\":\n            controlnet_cond = jnp.flip(controlnet_cond, axis=1)\n\n        # 1. time\n        if not isinstance(timesteps, jnp.ndarray):\n            timesteps = jnp.array([timesteps], dtype=jnp.int32)\n        elif isinstance(timesteps, jnp.ndarray) and len(timesteps.shape) == 0:\n            timesteps = timesteps.astype(dtype=jnp.float32)\n            timesteps = jnp.expand_dims(timesteps, 0)\n\n        t_emb = self.time_proj(timesteps)\n        t_emb = self.time_embedding(t_emb)\n\n        # 2. pre-process\n        sample = jnp.transpose(sample, (0, 2, 3, 1))\n        sample = self.conv_in(sample)\n\n        controlnet_cond = jnp.transpose(controlnet_cond, (0, 2, 3, 1))\n        controlnet_cond = self.controlnet_cond_embedding(controlnet_cond)\n        sample += controlnet_cond\n\n        # 3. down\n        down_block_res_samples = (sample,)\n        for down_block in self.down_blocks:\n            if isinstance(down_block, FlaxCrossAttnDownBlock2D):\n                sample, res_samples = down_block(sample, t_emb, encoder_hidden_states, deterministic=not train)\n            else:\n                sample, res_samples = down_block(sample, t_emb, deterministic=not train)\n            down_block_res_samples += res_samples\n\n        # 4. mid\n        sample = self.mid_block(sample, t_emb, encoder_hidden_states, deterministic=not train)\n\n        # 5. contronet blocks\n        controlnet_down_block_res_samples = ()\n        for down_block_res_sample, controlnet_block in zip(down_block_res_samples, self.controlnet_down_blocks):\n            down_block_res_sample = controlnet_block(down_block_res_sample)\n            controlnet_down_block_res_samples += (down_block_res_sample,)\n\n        down_block_res_samples = controlnet_down_block_res_samples\n\n        mid_block_res_sample = self.controlnet_mid_block(sample)\n\n        # 6. scaling\n        down_block_res_samples = [sample * conditioning_scale for sample in down_block_res_samples]\n        mid_block_res_sample *= conditioning_scale\n\n        if not return_dict:\n            return (down_block_res_samples, mid_block_res_sample)\n\n        return FlaxControlNetOutput(\n            down_block_res_samples=down_block_res_samples, mid_block_res_sample=mid_block_res_sample\n        )\n"
  },
  {
    "path": "diffusers/models/cross_attention.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom ..utils import deprecate\nfrom .attention_processor import (  # noqa: F401\n    Attention,\n    AttentionProcessor,\n    AttnAddedKVProcessor,\n    AttnProcessor2_0,\n    LoRAAttnProcessor,\n    LoRALinearLayer,\n    LoRAXFormersAttnProcessor,\n    SlicedAttnAddedKVProcessor,\n    SlicedAttnProcessor,\n    XFormersAttnProcessor,\n)\nfrom .attention_processor import AttnProcessor as AttnProcessorRename  # noqa: F401\n\n\ndeprecate(\n    \"cross_attention\",\n    \"0.18.0\",\n    \"Importing from cross_attention is deprecated. Please import from diffusers.models.attention_processor instead.\",\n    standard_warn=False,\n)\n\n\nAttnProcessor = AttentionProcessor\n\n\nclass CrossAttention(Attention):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n\n\nclass CrossAttnProcessor(AttnProcessorRename):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n\n\nclass LoRACrossAttnProcessor(LoRAAttnProcessor):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n\n\nclass CrossAttnAddedKVProcessor(AttnAddedKVProcessor):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n\n\nclass XFormersCrossAttnProcessor(XFormersAttnProcessor):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n\n\nclass LoRAXFormersCrossAttnProcessor(LoRAXFormersAttnProcessor):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n\n\nclass SlicedCrossAttnProcessor(SlicedAttnProcessor):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n\n\nclass SlicedCrossAttnAddedKVProcessor(SlicedAttnAddedKVProcessor):\n    def __init__(self, *args, **kwargs):\n        deprecation_message = f\"{self.__class__.__name__} is deprecated and will be removed in `0.18.0`. Please use `from diffusers.models.attention_processor import {''.join(self.__class__.__name__.split('Cross'))} instead.\"\n        deprecate(\"cross_attention\", \"0.18.0\", deprecation_message, standard_warn=False)\n        super().__init__(*args, **kwargs)\n"
  },
  {
    "path": "diffusers/models/dual_transformer_2d.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Optional\n\nfrom torch import nn\n\nfrom .transformer_2d import Transformer2DModel, Transformer2DModelOutput\n\n\nclass DualTransformer2DModel(nn.Module):\n    \"\"\"\n    Dual transformer wrapper that combines two `Transformer2DModel`s for mixed inference.\n\n    Parameters:\n        num_attention_heads (`int`, *optional*, defaults to 16): The number of heads to use for multi-head attention.\n        attention_head_dim (`int`, *optional*, defaults to 88): The number of channels in each head.\n        in_channels (`int`, *optional*):\n            Pass if the input is continuous. The number of channels in the input and output.\n        num_layers (`int`, *optional*, defaults to 1): The number of layers of Transformer blocks to use.\n        dropout (`float`, *optional*, defaults to 0.1): The dropout probability to use.\n        cross_attention_dim (`int`, *optional*): The number of encoder_hidden_states dimensions to use.\n        sample_size (`int`, *optional*): Pass if the input is discrete. The width of the latent images.\n            Note that this is fixed at training time as it is used for learning a number of position embeddings. See\n            `ImagePositionalEmbeddings`.\n        num_vector_embeds (`int`, *optional*):\n            Pass if the input is discrete. The number of classes of the vector embeddings of the latent pixels.\n            Includes the class for the masked latent pixel.\n        activation_fn (`str`, *optional*, defaults to `\"geglu\"`): Activation function to be used in feed-forward.\n        num_embeds_ada_norm ( `int`, *optional*): Pass if at least one of the norm_layers is `AdaLayerNorm`.\n            The number of diffusion steps used during training. Note that this is fixed at training time as it is used\n            to learn a number of embeddings that are added to the hidden states. During inference, you can denoise for\n            up to but not more than steps than `num_embeds_ada_norm`.\n        attention_bias (`bool`, *optional*):\n            Configure if the TransformerBlocks' attention should contain a bias parameter.\n    \"\"\"\n\n    def __init__(\n        self,\n        num_attention_heads: int = 16,\n        attention_head_dim: int = 88,\n        in_channels: Optional[int] = None,\n        num_layers: int = 1,\n        dropout: float = 0.0,\n        norm_num_groups: int = 32,\n        cross_attention_dim: Optional[int] = None,\n        attention_bias: bool = False,\n        sample_size: Optional[int] = None,\n        num_vector_embeds: Optional[int] = None,\n        activation_fn: str = \"geglu\",\n        num_embeds_ada_norm: Optional[int] = None,\n    ):\n        super().__init__()\n        self.transformers = nn.ModuleList(\n            [\n                Transformer2DModel(\n                    num_attention_heads=num_attention_heads,\n                    attention_head_dim=attention_head_dim,\n                    in_channels=in_channels,\n                    num_layers=num_layers,\n                    dropout=dropout,\n                    norm_num_groups=norm_num_groups,\n                    cross_attention_dim=cross_attention_dim,\n                    attention_bias=attention_bias,\n                    sample_size=sample_size,\n                    num_vector_embeds=num_vector_embeds,\n                    activation_fn=activation_fn,\n                    num_embeds_ada_norm=num_embeds_ada_norm,\n                )\n                for _ in range(2)\n            ]\n        )\n\n        # Variables that can be set by a pipeline:\n\n        # The ratio of transformer1 to transformer2's output states to be combined during inference\n        self.mix_ratio = 0.5\n\n        # The shape of `encoder_hidden_states` is expected to be\n        # `(batch_size, condition_lengths[0]+condition_lengths[1], num_features)`\n        self.condition_lengths = [77, 257]\n\n        # Which transformer to use to encode which condition.\n        # E.g. `(1, 0)` means that we'll use `transformers[1](conditions[0])` and `transformers[0](conditions[1])`\n        self.transformer_index_for_condition = [1, 0]\n\n    def forward(\n        self,\n        hidden_states,\n        encoder_hidden_states,\n        timestep=None,\n        attention_mask=None,\n        cross_attention_kwargs=None,\n        return_dict: bool = True,\n    ):\n        \"\"\"\n        Args:\n            hidden_states ( When discrete, `torch.LongTensor` of shape `(batch size, num latent pixels)`.\n                When continuous, `torch.FloatTensor` of shape `(batch size, channel, height, width)`): Input\n                hidden_states\n            encoder_hidden_states ( `torch.LongTensor` of shape `(batch size, encoder_hidden_states dim)`, *optional*):\n                Conditional embeddings for cross attention layer. If not given, cross-attention defaults to\n                self-attention.\n            timestep ( `torch.long`, *optional*):\n                Optional timestep to be applied as an embedding in AdaLayerNorm's. Used to indicate denoising step.\n            attention_mask (`torch.FloatTensor`, *optional*):\n                Optional attention mask to be applied in Attention\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~models.transformer_2d.Transformer2DModelOutput`] or `tuple`:\n            [`~models.transformer_2d.Transformer2DModelOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        input_states = hidden_states\n\n        encoded_states = []\n        tokens_start = 0\n        # attention_mask is not used yet\n        for i in range(2):\n            # for each of the two transformers, pass the corresponding condition tokens\n            condition_state = encoder_hidden_states[:, tokens_start : tokens_start + self.condition_lengths[i]]\n            transformer_index = self.transformer_index_for_condition[i]\n            encoded_state = self.transformers[transformer_index](\n                input_states,\n                encoder_hidden_states=condition_state,\n                timestep=timestep,\n                cross_attention_kwargs=cross_attention_kwargs,\n                return_dict=False,\n            )[0]\n            encoded_states.append(encoded_state - input_states)\n            tokens_start += self.condition_lengths[i]\n\n        output_states = encoded_states[0] * self.mix_ratio + encoded_states[1] * (1 - self.mix_ratio)\n        output_states = output_states + input_states\n\n        if not return_dict:\n            return (output_states,)\n\n        return Transformer2DModelOutput(sample=output_states)\n"
  },
  {
    "path": "diffusers/models/embeddings.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\nfrom typing import Optional\n\nimport numpy as np\nimport torch\nfrom torch import nn\n\n\ndef get_timestep_embedding(\n    timesteps: torch.Tensor,\n    embedding_dim: int,\n    flip_sin_to_cos: bool = False,\n    downscale_freq_shift: float = 1,\n    scale: float = 1,\n    max_period: int = 10000,\n):\n    \"\"\"\n    This matches the implementation in Denoising Diffusion Probabilistic Models: Create sinusoidal timestep embeddings.\n\n    :param timesteps: a 1-D Tensor of N indices, one per batch element.\n                      These may be fractional.\n    :param embedding_dim: the dimension of the output. :param max_period: controls the minimum frequency of the\n    embeddings. :return: an [N x dim] Tensor of positional embeddings.\n    \"\"\"\n    assert len(timesteps.shape) == 1, \"Timesteps should be a 1d-array\"\n\n    half_dim = embedding_dim // 2\n    exponent = -math.log(max_period) * torch.arange(\n        start=0, end=half_dim, dtype=torch.float32, device=timesteps.device\n    )\n    exponent = exponent / (half_dim - downscale_freq_shift)\n\n    emb = torch.exp(exponent)\n    emb = timesteps[:, None].float() * emb[None, :]\n\n    # scale embeddings\n    emb = scale * emb\n\n    # concat sine and cosine embeddings\n    emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1)\n\n    # flip sine and cosine embeddings\n    if flip_sin_to_cos:\n        emb = torch.cat([emb[:, half_dim:], emb[:, :half_dim]], dim=-1)\n\n    # zero pad\n    if embedding_dim % 2 == 1:\n        emb = torch.nn.functional.pad(emb, (0, 1, 0, 0))\n    return emb\n\n\ndef get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0):\n    \"\"\"\n    grid_size: int of the grid height and width return: pos_embed: [grid_size*grid_size, embed_dim] or\n    [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token)\n    \"\"\"\n    grid_h = np.arange(grid_size, dtype=np.float32)\n    grid_w = np.arange(grid_size, dtype=np.float32)\n    grid = np.meshgrid(grid_w, grid_h)  # here w goes first\n    grid = np.stack(grid, axis=0)\n\n    grid = grid.reshape([2, 1, grid_size, grid_size])\n    pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid)\n    if cls_token and extra_tokens > 0:\n        pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0)\n    return pos_embed\n\n\ndef get_2d_sincos_pos_embed_from_grid(embed_dim, grid):\n    if embed_dim % 2 != 0:\n        raise ValueError(\"embed_dim must be divisible by 2\")\n\n    # use half of dimensions to encode grid_h\n    emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0])  # (H*W, D/2)\n    emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1])  # (H*W, D/2)\n\n    emb = np.concatenate([emb_h, emb_w], axis=1)  # (H*W, D)\n    return emb\n\n\ndef get_1d_sincos_pos_embed_from_grid(embed_dim, pos):\n    \"\"\"\n    embed_dim: output dimension for each position pos: a list of positions to be encoded: size (M,) out: (M, D)\n    \"\"\"\n    if embed_dim % 2 != 0:\n        raise ValueError(\"embed_dim must be divisible by 2\")\n\n    omega = np.arange(embed_dim // 2, dtype=np.float64)\n    omega /= embed_dim / 2.0\n    omega = 1.0 / 10000**omega  # (D/2,)\n\n    pos = pos.reshape(-1)  # (M,)\n    out = np.einsum(\"m,d->md\", pos, omega)  # (M, D/2), outer product\n\n    emb_sin = np.sin(out)  # (M, D/2)\n    emb_cos = np.cos(out)  # (M, D/2)\n\n    emb = np.concatenate([emb_sin, emb_cos], axis=1)  # (M, D)\n    return emb\n\n\nclass PatchEmbed(nn.Module):\n    \"\"\"2D Image to Patch Embedding\"\"\"\n\n    def __init__(\n        self,\n        height=224,\n        width=224,\n        patch_size=16,\n        in_channels=3,\n        embed_dim=768,\n        layer_norm=False,\n        flatten=True,\n        bias=True,\n    ):\n        super().__init__()\n\n        num_patches = (height // patch_size) * (width // patch_size)\n        self.flatten = flatten\n        self.layer_norm = layer_norm\n\n        self.proj = nn.Conv2d(\n            in_channels, embed_dim, kernel_size=(patch_size, patch_size), stride=patch_size, bias=bias\n        )\n        if layer_norm:\n            self.norm = nn.LayerNorm(embed_dim, elementwise_affine=False, eps=1e-6)\n        else:\n            self.norm = None\n\n        pos_embed = get_2d_sincos_pos_embed(embed_dim, int(num_patches**0.5))\n        self.register_buffer(\"pos_embed\", torch.from_numpy(pos_embed).float().unsqueeze(0), persistent=False)\n\n    def forward(self, latent):\n        latent = self.proj(latent)\n        if self.flatten:\n            latent = latent.flatten(2).transpose(1, 2)  # BCHW -> BNC\n        if self.layer_norm:\n            latent = self.norm(latent)\n        return latent + self.pos_embed\n\n\nclass TimestepEmbedding(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        time_embed_dim: int,\n        act_fn: str = \"silu\",\n        out_dim: int = None,\n        post_act_fn: Optional[str] = None,\n        cond_proj_dim=None,\n    ):\n        super().__init__()\n\n        self.linear_1 = nn.Linear(in_channels, time_embed_dim)\n\n        if cond_proj_dim is not None:\n            self.cond_proj = nn.Linear(cond_proj_dim, in_channels, bias=False)\n        else:\n            self.cond_proj = None\n\n        if act_fn == \"silu\":\n            self.act = nn.SiLU()\n        elif act_fn == \"mish\":\n            self.act = nn.Mish()\n        elif act_fn == \"gelu\":\n            self.act = nn.GELU()\n        else:\n            raise ValueError(f\"{act_fn} does not exist. Make sure to define one of 'silu', 'mish', or 'gelu'\")\n\n        if out_dim is not None:\n            time_embed_dim_out = out_dim\n        else:\n            time_embed_dim_out = time_embed_dim\n        self.linear_2 = nn.Linear(time_embed_dim, time_embed_dim_out)\n\n        if post_act_fn is None:\n            self.post_act = None\n        elif post_act_fn == \"silu\":\n            self.post_act = nn.SiLU()\n        elif post_act_fn == \"mish\":\n            self.post_act = nn.Mish()\n        elif post_act_fn == \"gelu\":\n            self.post_act = nn.GELU()\n        else:\n            raise ValueError(f\"{post_act_fn} does not exist. Make sure to define one of 'silu', 'mish', or 'gelu'\")\n\n    def forward(self, sample, condition=None):\n        if condition is not None:\n            sample = sample + self.cond_proj(condition)\n        sample = self.linear_1(sample)\n\n        if self.act is not None:\n            sample = self.act(sample)\n\n        sample = self.linear_2(sample)\n\n        if self.post_act is not None:\n            sample = self.post_act(sample)\n        return sample\n\n\nclass Timesteps(nn.Module):\n    def __init__(self, num_channels: int, flip_sin_to_cos: bool, downscale_freq_shift: float):\n        super().__init__()\n        self.num_channels = num_channels\n        self.flip_sin_to_cos = flip_sin_to_cos\n        self.downscale_freq_shift = downscale_freq_shift\n\n    def forward(self, timesteps):\n        t_emb = get_timestep_embedding(\n            timesteps,\n            self.num_channels,\n            flip_sin_to_cos=self.flip_sin_to_cos,\n            downscale_freq_shift=self.downscale_freq_shift,\n        )\n        return t_emb\n\n\nclass GaussianFourierProjection(nn.Module):\n    \"\"\"Gaussian Fourier embeddings for noise levels.\"\"\"\n\n    def __init__(\n        self, embedding_size: int = 256, scale: float = 1.0, set_W_to_weight=True, log=True, flip_sin_to_cos=False\n    ):\n        super().__init__()\n        self.weight = nn.Parameter(torch.randn(embedding_size) * scale, requires_grad=False)\n        self.log = log\n        self.flip_sin_to_cos = flip_sin_to_cos\n\n        if set_W_to_weight:\n            # to delete later\n            self.W = nn.Parameter(torch.randn(embedding_size) * scale, requires_grad=False)\n\n            self.weight = self.W\n\n    def forward(self, x):\n        if self.log:\n            x = torch.log(x)\n\n        x_proj = x[:, None] * self.weight[None, :] * 2 * np.pi\n\n        if self.flip_sin_to_cos:\n            out = torch.cat([torch.cos(x_proj), torch.sin(x_proj)], dim=-1)\n        else:\n            out = torch.cat([torch.sin(x_proj), torch.cos(x_proj)], dim=-1)\n        return out\n\n\nclass ImagePositionalEmbeddings(nn.Module):\n    \"\"\"\n    Converts latent image classes into vector embeddings. Sums the vector embeddings with positional embeddings for the\n    height and width of the latent space.\n\n    For more details, see figure 10 of the dall-e paper: https://arxiv.org/abs/2102.12092\n\n    For VQ-diffusion:\n\n    Output vector embeddings are used as input for the transformer.\n\n    Note that the vector embeddings for the transformer are different than the vector embeddings from the VQVAE.\n\n    Args:\n        num_embed (`int`):\n            Number of embeddings for the latent pixels embeddings.\n        height (`int`):\n            Height of the latent image i.e. the number of height embeddings.\n        width (`int`):\n            Width of the latent image i.e. the number of width embeddings.\n        embed_dim (`int`):\n            Dimension of the produced vector embeddings. Used for the latent pixel, height, and width embeddings.\n    \"\"\"\n\n    def __init__(\n        self,\n        num_embed: int,\n        height: int,\n        width: int,\n        embed_dim: int,\n    ):\n        super().__init__()\n\n        self.height = height\n        self.width = width\n        self.num_embed = num_embed\n        self.embed_dim = embed_dim\n\n        self.emb = nn.Embedding(self.num_embed, embed_dim)\n        self.height_emb = nn.Embedding(self.height, embed_dim)\n        self.width_emb = nn.Embedding(self.width, embed_dim)\n\n    def forward(self, index):\n        emb = self.emb(index)\n\n        height_emb = self.height_emb(torch.arange(self.height, device=index.device).view(1, self.height))\n\n        # 1 x H x D -> 1 x H x 1 x D\n        height_emb = height_emb.unsqueeze(2)\n\n        width_emb = self.width_emb(torch.arange(self.width, device=index.device).view(1, self.width))\n\n        # 1 x W x D -> 1 x 1 x W x D\n        width_emb = width_emb.unsqueeze(1)\n\n        pos_emb = height_emb + width_emb\n\n        # 1 x H x W x D -> 1 x L xD\n        pos_emb = pos_emb.view(1, self.height * self.width, -1)\n\n        emb = emb + pos_emb[:, : emb.shape[1], :]\n\n        return emb\n\n\nclass LabelEmbedding(nn.Module):\n    \"\"\"\n    Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance.\n\n    Args:\n        num_classes (`int`): The number of classes.\n        hidden_size (`int`): The size of the vector embeddings.\n        dropout_prob (`float`): The probability of dropping a label.\n    \"\"\"\n\n    def __init__(self, num_classes, hidden_size, dropout_prob):\n        super().__init__()\n        use_cfg_embedding = dropout_prob > 0\n        self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size)\n        self.num_classes = num_classes\n        self.dropout_prob = dropout_prob\n\n    def token_drop(self, labels, force_drop_ids=None):\n        \"\"\"\n        Drops labels to enable classifier-free guidance.\n        \"\"\"\n        if force_drop_ids is None:\n            drop_ids = torch.rand(labels.shape[0], device=labels.device) < self.dropout_prob\n        else:\n            drop_ids = torch.tensor(force_drop_ids == 1)\n        labels = torch.where(drop_ids, self.num_classes, labels)\n        return labels\n\n    def forward(self, labels, force_drop_ids=None):\n        use_dropout = self.dropout_prob > 0\n        if (self.training and use_dropout) or (force_drop_ids is not None):\n            labels = self.token_drop(labels, force_drop_ids)\n        embeddings = self.embedding_table(labels)\n        return embeddings\n\n\nclass CombinedTimestepLabelEmbeddings(nn.Module):\n    def __init__(self, num_classes, embedding_dim, class_dropout_prob=0.1):\n        super().__init__()\n\n        self.time_proj = Timesteps(num_channels=256, flip_sin_to_cos=True, downscale_freq_shift=1)\n        self.timestep_embedder = TimestepEmbedding(in_channels=256, time_embed_dim=embedding_dim)\n        self.class_embedder = LabelEmbedding(num_classes, embedding_dim, class_dropout_prob)\n\n    def forward(self, timestep, class_labels, hidden_dtype=None):\n        timesteps_proj = self.time_proj(timestep)\n        timesteps_emb = self.timestep_embedder(timesteps_proj.to(dtype=hidden_dtype))  # (N, D)\n\n        class_labels = self.class_embedder(class_labels)  # (N, D)\n\n        conditioning = timesteps_emb + class_labels  # (N, D)\n\n        return conditioning\n\n\nclass TextTimeEmbedding(nn.Module):\n    def __init__(self, encoder_dim: int, time_embed_dim: int, num_heads: int = 64):\n        super().__init__()\n        self.norm1 = nn.LayerNorm(encoder_dim)\n        self.pool = AttentionPooling(num_heads, encoder_dim)\n        self.proj = nn.Linear(encoder_dim, time_embed_dim)\n        self.norm2 = nn.LayerNorm(time_embed_dim)\n\n    def forward(self, hidden_states):\n        hidden_states = self.norm1(hidden_states)\n        hidden_states = self.pool(hidden_states)\n        hidden_states = self.proj(hidden_states)\n        hidden_states = self.norm2(hidden_states)\n        return hidden_states\n\n\nclass AttentionPooling(nn.Module):\n    # Copied from https://github.com/deep-floyd/IF/blob/2f91391f27dd3c468bf174be5805b4cc92980c0b/deepfloyd_if/model/nn.py#L54\n\n    def __init__(self, num_heads, embed_dim, dtype=None):\n        super().__init__()\n        self.dtype = dtype\n        self.positional_embedding = nn.Parameter(torch.randn(1, embed_dim) / embed_dim**0.5)\n        self.k_proj = nn.Linear(embed_dim, embed_dim, dtype=self.dtype)\n        self.q_proj = nn.Linear(embed_dim, embed_dim, dtype=self.dtype)\n        self.v_proj = nn.Linear(embed_dim, embed_dim, dtype=self.dtype)\n        self.num_heads = num_heads\n        self.dim_per_head = embed_dim // self.num_heads\n\n    def forward(self, x):\n        bs, length, width = x.size()\n\n        def shape(x):\n            # (bs, length, width) --> (bs, length, n_heads, dim_per_head)\n            x = x.view(bs, -1, self.num_heads, self.dim_per_head)\n            # (bs, length, n_heads, dim_per_head) --> (bs, n_heads, length, dim_per_head)\n            x = x.transpose(1, 2)\n            # (bs, n_heads, length, dim_per_head) --> (bs*n_heads, length, dim_per_head)\n            x = x.reshape(bs * self.num_heads, -1, self.dim_per_head)\n            # (bs*n_heads, length, dim_per_head) --> (bs*n_heads, dim_per_head, length)\n            x = x.transpose(1, 2)\n            return x\n\n        class_token = x.mean(dim=1, keepdim=True) + self.positional_embedding.to(x.dtype)\n        x = torch.cat([class_token, x], dim=1)  # (bs, length+1, width)\n\n        # (bs*n_heads, class_token_length, dim_per_head)\n        q = shape(self.q_proj(class_token))\n        # (bs*n_heads, length+class_token_length, dim_per_head)\n        k = shape(self.k_proj(x))\n        v = shape(self.v_proj(x))\n\n        # (bs*n_heads, class_token_length, length+class_token_length):\n        scale = 1 / math.sqrt(math.sqrt(self.dim_per_head))\n        weight = torch.einsum(\"bct,bcs->bts\", q * scale, k * scale)  # More stable with f16 than dividing afterwards\n        weight = torch.softmax(weight.float(), dim=-1).type(weight.dtype)\n\n        # (bs*n_heads, dim_per_head, class_token_length)\n        a = torch.einsum(\"bts,bcs->bct\", weight, v)\n\n        # (bs, length+1, width)\n        a = a.reshape(bs, -1, 1).transpose(1, 2)\n\n        return a[:, 0, :]  # cls_token\n"
  },
  {
    "path": "diffusers/models/embeddings_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\n\ndef get_sinusoidal_embeddings(\n    timesteps: jnp.ndarray,\n    embedding_dim: int,\n    freq_shift: float = 1,\n    min_timescale: float = 1,\n    max_timescale: float = 1.0e4,\n    flip_sin_to_cos: bool = False,\n    scale: float = 1.0,\n) -> jnp.ndarray:\n    \"\"\"Returns the positional encoding (same as Tensor2Tensor).\n\n    Args:\n        timesteps: a 1-D Tensor of N indices, one per batch element.\n        These may be fractional.\n        embedding_dim: The number of output channels.\n        min_timescale: The smallest time unit (should probably be 0.0).\n        max_timescale: The largest time unit.\n    Returns:\n        a Tensor of timing signals [N, num_channels]\n    \"\"\"\n    assert timesteps.ndim == 1, \"Timesteps should be a 1d-array\"\n    assert embedding_dim % 2 == 0, f\"Embedding dimension {embedding_dim} should be even\"\n    num_timescales = float(embedding_dim // 2)\n    log_timescale_increment = math.log(max_timescale / min_timescale) / (num_timescales - freq_shift)\n    inv_timescales = min_timescale * jnp.exp(jnp.arange(num_timescales, dtype=jnp.float32) * -log_timescale_increment)\n    emb = jnp.expand_dims(timesteps, 1) * jnp.expand_dims(inv_timescales, 0)\n\n    # scale embeddings\n    scaled_time = scale * emb\n\n    if flip_sin_to_cos:\n        signal = jnp.concatenate([jnp.cos(scaled_time), jnp.sin(scaled_time)], axis=1)\n    else:\n        signal = jnp.concatenate([jnp.sin(scaled_time), jnp.cos(scaled_time)], axis=1)\n    signal = jnp.reshape(signal, [jnp.shape(timesteps)[0], embedding_dim])\n    return signal\n\n\nclass FlaxTimestepEmbedding(nn.Module):\n    r\"\"\"\n    Time step Embedding Module. Learns embeddings for input time steps.\n\n    Args:\n        time_embed_dim (`int`, *optional*, defaults to `32`):\n                Time step embedding dimension\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n                Parameters `dtype`\n    \"\"\"\n    time_embed_dim: int = 32\n    dtype: jnp.dtype = jnp.float32\n\n    @nn.compact\n    def __call__(self, temb):\n        temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_1\")(temb)\n        temb = nn.silu(temb)\n        temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_2\")(temb)\n        return temb\n\n\nclass FlaxTimesteps(nn.Module):\n    r\"\"\"\n    Wrapper Module for sinusoidal Time step Embeddings as described in https://arxiv.org/abs/2006.11239\n\n    Args:\n        dim (`int`, *optional*, defaults to `32`):\n                Time step embedding dimension\n    \"\"\"\n    dim: int = 32\n    flip_sin_to_cos: bool = False\n    freq_shift: float = 1\n\n    @nn.compact\n    def __call__(self, timesteps):\n        return get_sinusoidal_embeddings(\n            timesteps, embedding_dim=self.dim, flip_sin_to_cos=self.flip_sin_to_cos, freq_shift=self.freq_shift\n        )\n"
  },
  {
    "path": "diffusers/models/modeling_flax_pytorch_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\" PyTorch - Flax general utilities.\"\"\"\nimport re\n\nimport jax.numpy as jnp\nfrom flax.traverse_util import flatten_dict, unflatten_dict\nfrom jax.random import PRNGKey\n\nfrom ..utils import logging\n\n\nlogger = logging.get_logger(__name__)\n\n\ndef rename_key(key):\n    regex = r\"\\w+[.]\\d+\"\n    pats = re.findall(regex, key)\n    for pat in pats:\n        key = key.replace(pat, \"_\".join(pat.split(\".\")))\n    return key\n\n\n#####################\n# PyTorch => Flax #\n#####################\n\n\n# Adapted from https://github.com/huggingface/transformers/blob/c603c80f46881ae18b2ca50770ef65fa4033eacd/src/transformers/modeling_flax_pytorch_utils.py#L69\n# and https://github.com/patil-suraj/stable-diffusion-jax/blob/main/stable_diffusion_jax/convert_diffusers_to_jax.py\ndef rename_key_and_reshape_tensor(pt_tuple_key, pt_tensor, random_flax_state_dict):\n    \"\"\"Rename PT weight names to corresponding Flax weight names and reshape tensor if necessary\"\"\"\n\n    # conv norm or layer norm\n    renamed_pt_tuple_key = pt_tuple_key[:-1] + (\"scale\",)\n    if (\n        any(\"norm\" in str_ for str_ in pt_tuple_key)\n        and (pt_tuple_key[-1] == \"bias\")\n        and (pt_tuple_key[:-1] + (\"bias\",) not in random_flax_state_dict)\n        and (pt_tuple_key[:-1] + (\"scale\",) in random_flax_state_dict)\n    ):\n        renamed_pt_tuple_key = pt_tuple_key[:-1] + (\"scale\",)\n        return renamed_pt_tuple_key, pt_tensor\n    elif pt_tuple_key[-1] in [\"weight\", \"gamma\"] and pt_tuple_key[:-1] + (\"scale\",) in random_flax_state_dict:\n        renamed_pt_tuple_key = pt_tuple_key[:-1] + (\"scale\",)\n        return renamed_pt_tuple_key, pt_tensor\n\n    # embedding\n    if pt_tuple_key[-1] == \"weight\" and pt_tuple_key[:-1] + (\"embedding\",) in random_flax_state_dict:\n        pt_tuple_key = pt_tuple_key[:-1] + (\"embedding\",)\n        return renamed_pt_tuple_key, pt_tensor\n\n    # conv layer\n    renamed_pt_tuple_key = pt_tuple_key[:-1] + (\"kernel\",)\n    if pt_tuple_key[-1] == \"weight\" and pt_tensor.ndim == 4:\n        pt_tensor = pt_tensor.transpose(2, 3, 1, 0)\n        return renamed_pt_tuple_key, pt_tensor\n\n    # linear layer\n    renamed_pt_tuple_key = pt_tuple_key[:-1] + (\"kernel\",)\n    if pt_tuple_key[-1] == \"weight\":\n        pt_tensor = pt_tensor.T\n        return renamed_pt_tuple_key, pt_tensor\n\n    # old PyTorch layer norm weight\n    renamed_pt_tuple_key = pt_tuple_key[:-1] + (\"weight\",)\n    if pt_tuple_key[-1] == \"gamma\":\n        return renamed_pt_tuple_key, pt_tensor\n\n    # old PyTorch layer norm bias\n    renamed_pt_tuple_key = pt_tuple_key[:-1] + (\"bias\",)\n    if pt_tuple_key[-1] == \"beta\":\n        return renamed_pt_tuple_key, pt_tensor\n\n    return pt_tuple_key, pt_tensor\n\n\ndef convert_pytorch_state_dict_to_flax(pt_state_dict, flax_model, init_key=42):\n    # Step 1: Convert pytorch tensor to numpy\n    pt_state_dict = {k: v.numpy() for k, v in pt_state_dict.items()}\n\n    # Step 2: Since the model is stateless, get random Flax params\n    random_flax_params = flax_model.init_weights(PRNGKey(init_key))\n\n    random_flax_state_dict = flatten_dict(random_flax_params)\n    flax_state_dict = {}\n\n    # Need to change some parameters name to match Flax names\n    for pt_key, pt_tensor in pt_state_dict.items():\n        renamed_pt_key = rename_key(pt_key)\n        pt_tuple_key = tuple(renamed_pt_key.split(\".\"))\n\n        # Correctly rename weight parameters\n        flax_key, flax_tensor = rename_key_and_reshape_tensor(pt_tuple_key, pt_tensor, random_flax_state_dict)\n\n        if flax_key in random_flax_state_dict:\n            if flax_tensor.shape != random_flax_state_dict[flax_key].shape:\n                raise ValueError(\n                    f\"PyTorch checkpoint seems to be incorrect. Weight {pt_key} was expected to be of shape \"\n                    f\"{random_flax_state_dict[flax_key].shape}, but is {flax_tensor.shape}.\"\n                )\n\n        # also add unexpected weight so that warning is thrown\n        flax_state_dict[flax_key] = jnp.asarray(flax_tensor)\n\n    return unflatten_dict(flax_state_dict)\n"
  },
  {
    "path": "diffusers/models/modeling_flax_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nfrom pickle import UnpicklingError\nfrom typing import Any, Dict, Union\n\nimport jax\nimport jax.numpy as jnp\nimport msgpack.exceptions\nfrom flax.core.frozen_dict import FrozenDict, unfreeze\nfrom flax.serialization import from_bytes, to_bytes\nfrom flax.traverse_util import flatten_dict, unflatten_dict\nfrom huggingface_hub import hf_hub_download\nfrom huggingface_hub.utils import EntryNotFoundError, RepositoryNotFoundError, RevisionNotFoundError\nfrom requests import HTTPError\n\nfrom .. import __version__, is_torch_available\nfrom ..utils import (\n    CONFIG_NAME,\n    DIFFUSERS_CACHE,\n    FLAX_WEIGHTS_NAME,\n    HUGGINGFACE_CO_RESOLVE_ENDPOINT,\n    WEIGHTS_NAME,\n    logging,\n)\nfrom .modeling_flax_pytorch_utils import convert_pytorch_state_dict_to_flax\n\n\nlogger = logging.get_logger(__name__)\n\n\nclass FlaxModelMixin:\n    r\"\"\"\n    Base class for all flax models.\n\n    [`FlaxModelMixin`] takes care of storing the configuration of the models and handles methods for loading,\n    downloading and saving models.\n    \"\"\"\n    config_name = CONFIG_NAME\n    _automatically_saved_args = [\"_diffusers_version\", \"_class_name\", \"_name_or_path\"]\n    _flax_internal_args = [\"name\", \"parent\", \"dtype\"]\n\n    @classmethod\n    def _from_config(cls, config, **kwargs):\n        \"\"\"\n        All context managers that the model should be initialized under go here.\n        \"\"\"\n        return cls(config, **kwargs)\n\n    def _cast_floating_to(self, params: Union[Dict, FrozenDict], dtype: jnp.dtype, mask: Any = None) -> Any:\n        \"\"\"\n        Helper method to cast floating-point values of given parameter `PyTree` to given `dtype`.\n        \"\"\"\n\n        # taken from https://github.com/deepmind/jmp/blob/3a8318abc3292be38582794dbf7b094e6583b192/jmp/_src/policy.py#L27\n        def conditional_cast(param):\n            if isinstance(param, jnp.ndarray) and jnp.issubdtype(param.dtype, jnp.floating):\n                param = param.astype(dtype)\n            return param\n\n        if mask is None:\n            return jax.tree_map(conditional_cast, params)\n\n        flat_params = flatten_dict(params)\n        flat_mask, _ = jax.tree_flatten(mask)\n\n        for masked, key in zip(flat_mask, flat_params.keys()):\n            if masked:\n                param = flat_params[key]\n                flat_params[key] = conditional_cast(param)\n\n        return unflatten_dict(flat_params)\n\n    def to_bf16(self, params: Union[Dict, FrozenDict], mask: Any = None):\n        r\"\"\"\n        Cast the floating-point `params` to `jax.numpy.bfloat16`. This returns a new `params` tree and does not cast\n        the `params` in place.\n\n        This method can be used on TPU to explicitly convert the model parameters to bfloat16 precision to do full\n        half-precision training or to save weights in bfloat16 for inference in order to save memory and improve speed.\n\n        Arguments:\n            params (`Union[Dict, FrozenDict]`):\n                A `PyTree` of model parameters.\n            mask (`Union[Dict, FrozenDict]`):\n                A `PyTree` with same structure as the `params` tree. The leaves should be booleans, `True` for params\n                you want to cast, and should be `False` for those you want to skip.\n\n        Examples:\n\n        ```python\n        >>> from diffusers import FlaxUNet2DConditionModel\n\n        >>> # load model\n        >>> model, params = FlaxUNet2DConditionModel.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n        >>> # By default, the model parameters will be in fp32 precision, to cast these to bfloat16 precision\n        >>> params = model.to_bf16(params)\n        >>> # If you don't want to cast certain parameters (for example layer norm bias and scale)\n        >>> # then pass the mask as follows\n        >>> from flax import traverse_util\n\n        >>> model, params = FlaxUNet2DConditionModel.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n        >>> flat_params = traverse_util.flatten_dict(params)\n        >>> mask = {\n        ...     path: (path[-2] != (\"LayerNorm\", \"bias\") and path[-2:] != (\"LayerNorm\", \"scale\"))\n        ...     for path in flat_params\n        ... }\n        >>> mask = traverse_util.unflatten_dict(mask)\n        >>> params = model.to_bf16(params, mask)\n        ```\"\"\"\n        return self._cast_floating_to(params, jnp.bfloat16, mask)\n\n    def to_fp32(self, params: Union[Dict, FrozenDict], mask: Any = None):\n        r\"\"\"\n        Cast the floating-point `params` to `jax.numpy.float32`. This method can be used to explicitly convert the\n        model parameters to fp32 precision. This returns a new `params` tree and does not cast the `params` in place.\n\n        Arguments:\n            params (`Union[Dict, FrozenDict]`):\n                A `PyTree` of model parameters.\n            mask (`Union[Dict, FrozenDict]`):\n                A `PyTree` with same structure as the `params` tree. The leaves should be booleans, `True` for params\n                you want to cast, and should be `False` for those you want to skip\n\n        Examples:\n\n        ```python\n        >>> from diffusers import FlaxUNet2DConditionModel\n\n        >>> # Download model and configuration from huggingface.co\n        >>> model, params = FlaxUNet2DConditionModel.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n        >>> # By default, the model params will be in fp32, to illustrate the use of this method,\n        >>> # we'll first cast to fp16 and back to fp32\n        >>> params = model.to_f16(params)\n        >>> # now cast back to fp32\n        >>> params = model.to_fp32(params)\n        ```\"\"\"\n        return self._cast_floating_to(params, jnp.float32, mask)\n\n    def to_fp16(self, params: Union[Dict, FrozenDict], mask: Any = None):\n        r\"\"\"\n        Cast the floating-point `params` to `jax.numpy.float16`. This returns a new `params` tree and does not cast the\n        `params` in place.\n\n        This method can be used on GPU to explicitly convert the model parameters to float16 precision to do full\n        half-precision training or to save weights in float16 for inference in order to save memory and improve speed.\n\n        Arguments:\n            params (`Union[Dict, FrozenDict]`):\n                A `PyTree` of model parameters.\n            mask (`Union[Dict, FrozenDict]`):\n                A `PyTree` with same structure as the `params` tree. The leaves should be booleans, `True` for params\n                you want to cast, and should be `False` for those you want to skip\n\n        Examples:\n\n        ```python\n        >>> from diffusers import FlaxUNet2DConditionModel\n\n        >>> # load model\n        >>> model, params = FlaxUNet2DConditionModel.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n        >>> # By default, the model params will be in fp32, to cast these to float16\n        >>> params = model.to_fp16(params)\n        >>> # If you want don't want to cast certain parameters (for example layer norm bias and scale)\n        >>> # then pass the mask as follows\n        >>> from flax import traverse_util\n\n        >>> model, params = FlaxUNet2DConditionModel.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n        >>> flat_params = traverse_util.flatten_dict(params)\n        >>> mask = {\n        ...     path: (path[-2] != (\"LayerNorm\", \"bias\") and path[-2:] != (\"LayerNorm\", \"scale\"))\n        ...     for path in flat_params\n        ... }\n        >>> mask = traverse_util.unflatten_dict(mask)\n        >>> params = model.to_fp16(params, mask)\n        ```\"\"\"\n        return self._cast_floating_to(params, jnp.float16, mask)\n\n    def init_weights(self, rng: jax.random.KeyArray) -> Dict:\n        raise NotImplementedError(f\"init_weights method has to be implemented for {self}\")\n\n    @classmethod\n    def from_pretrained(\n        cls,\n        pretrained_model_name_or_path: Union[str, os.PathLike],\n        dtype: jnp.dtype = jnp.float32,\n        *model_args,\n        **kwargs,\n    ):\n        r\"\"\"\n        Instantiate a pretrained flax model from a pre-trained model configuration.\n\n        The warning *Weights from XXX not initialized from pretrained model* means that the weights of XXX do not come\n        pretrained with the rest of the model. It is up to you to train those weights with a downstream fine-tuning\n        task.\n\n        The warning *Weights from XXX not used in YYY* means that the layer XXX is not used by YYY, therefore those\n        weights are discarded.\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike`):\n                Can be either:\n\n                    - A string, the *model id* of a pretrained model hosted inside a model repo on huggingface.co.\n                      Valid model ids are namespaced under a user or organization name, like\n                      `runwayml/stable-diffusion-v1-5`.\n                    - A path to a *directory* containing model weights saved using [`~ModelMixin.save_pretrained`],\n                      e.g., `./my_model_directory/`.\n            dtype (`jax.numpy.dtype`, *optional*, defaults to `jax.numpy.float32`):\n                The data type of the computation. Can be one of `jax.numpy.float32`, `jax.numpy.float16` (on GPUs) and\n                `jax.numpy.bfloat16` (on TPUs).\n\n                This can be used to enable mixed-precision training or half-precision inference on GPUs or TPUs. If\n                specified all the computation will be performed with the given `dtype`.\n\n                **Note that this only specifies the dtype of the computation and does not influence the dtype of model\n                parameters.**\n\n                If you wish to change the dtype of the model parameters, see [`~ModelMixin.to_fp16`] and\n                [`~ModelMixin.to_bf16`].\n            model_args (sequence of positional arguments, *optional*):\n                All remaining positional arguments will be passed to the underlying model's `__init__` method.\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            from_pt (`bool`, *optional*, defaults to `False`):\n                Load the model weights from a PyTorch checkpoint save file.\n            kwargs (remaining dictionary of keyword arguments, *optional*):\n                Can be used to update the configuration object (after it being loaded) and initiate the model (e.g.,\n                `output_attentions=True`). Behaves differently depending on whether a `config` is provided or\n                automatically loaded:\n\n                    - If a configuration is provided with `config`, `**kwargs` will be directly passed to the\n                      underlying model's `__init__` method (we assume all relevant updates to the configuration have\n                      already been done)\n                    - If a configuration is not provided, `kwargs` will be first passed to the configuration class\n                      initialization function ([`~ConfigMixin.from_config`]). Each key of `kwargs` that corresponds to\n                      a configuration attribute will be used to override said attribute with the supplied `kwargs`\n                      value. Remaining keys that do not correspond to any configuration attribute will be passed to the\n                      underlying model's `__init__` function.\n\n        Examples:\n\n        ```python\n        >>> from diffusers import FlaxUNet2DConditionModel\n\n        >>> # Download model and configuration from huggingface.co and cache.\n        >>> model, params = FlaxUNet2DConditionModel.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n        >>> # Model was saved using *save_pretrained('./test/saved_model/')* (for example purposes, not runnable).\n        >>> model, params = FlaxUNet2DConditionModel.from_pretrained(\"./test/saved_model/\")\n        ```\"\"\"\n        config = kwargs.pop(\"config\", None)\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        force_download = kwargs.pop(\"force_download\", False)\n        from_pt = kwargs.pop(\"from_pt\", False)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", False)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        subfolder = kwargs.pop(\"subfolder\", None)\n\n        user_agent = {\n            \"diffusers\": __version__,\n            \"file_type\": \"model\",\n            \"framework\": \"flax\",\n        }\n\n        # Load config if we don't provide a configuration\n        config_path = config if config is not None else pretrained_model_name_or_path\n        model, model_kwargs = cls.from_config(\n            config_path,\n            cache_dir=cache_dir,\n            return_unused_kwargs=True,\n            force_download=force_download,\n            resume_download=resume_download,\n            proxies=proxies,\n            local_files_only=local_files_only,\n            use_auth_token=use_auth_token,\n            revision=revision,\n            subfolder=subfolder,\n            # model args\n            dtype=dtype,\n            **kwargs,\n        )\n\n        # Load model\n        pretrained_path_with_subfolder = (\n            pretrained_model_name_or_path\n            if subfolder is None\n            else os.path.join(pretrained_model_name_or_path, subfolder)\n        )\n        if os.path.isdir(pretrained_path_with_subfolder):\n            if from_pt:\n                if not os.path.isfile(os.path.join(pretrained_path_with_subfolder, WEIGHTS_NAME)):\n                    raise EnvironmentError(\n                        f\"Error no file named {WEIGHTS_NAME} found in directory {pretrained_path_with_subfolder} \"\n                    )\n                model_file = os.path.join(pretrained_path_with_subfolder, WEIGHTS_NAME)\n            elif os.path.isfile(os.path.join(pretrained_path_with_subfolder, FLAX_WEIGHTS_NAME)):\n                # Load from a Flax checkpoint\n                model_file = os.path.join(pretrained_path_with_subfolder, FLAX_WEIGHTS_NAME)\n            # Check if pytorch weights exist instead\n            elif os.path.isfile(os.path.join(pretrained_path_with_subfolder, WEIGHTS_NAME)):\n                raise EnvironmentError(\n                    f\"{WEIGHTS_NAME} file found in directory {pretrained_path_with_subfolder}. Please load the model\"\n                    \" using `from_pt=True`.\"\n                )\n            else:\n                raise EnvironmentError(\n                    f\"Error no file named {FLAX_WEIGHTS_NAME} or {WEIGHTS_NAME} found in directory \"\n                    f\"{pretrained_path_with_subfolder}.\"\n                )\n        else:\n            try:\n                model_file = hf_hub_download(\n                    pretrained_model_name_or_path,\n                    filename=FLAX_WEIGHTS_NAME if not from_pt else WEIGHTS_NAME,\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    proxies=proxies,\n                    resume_download=resume_download,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    user_agent=user_agent,\n                    subfolder=subfolder,\n                    revision=revision,\n                )\n\n            except RepositoryNotFoundError:\n                raise EnvironmentError(\n                    f\"{pretrained_model_name_or_path} is not a local folder and is not a valid model identifier \"\n                    \"listed on 'https://huggingface.co/models'\\nIf this is a private repository, make sure to pass a \"\n                    \"token having permission to this repo with `use_auth_token` or log in with `huggingface-cli \"\n                    \"login`.\"\n                )\n            except RevisionNotFoundError:\n                raise EnvironmentError(\n                    f\"{revision} is not a valid git identifier (branch name, tag name or commit id) that exists for \"\n                    \"this model name. Check the model page at \"\n                    f\"'https://huggingface.co/{pretrained_model_name_or_path}' for available revisions.\"\n                )\n            except EntryNotFoundError:\n                raise EnvironmentError(\n                    f\"{pretrained_model_name_or_path} does not appear to have a file named {FLAX_WEIGHTS_NAME}.\"\n                )\n            except HTTPError as err:\n                raise EnvironmentError(\n                    f\"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\\n\"\n                    f\"{err}\"\n                )\n            except ValueError:\n                raise EnvironmentError(\n                    f\"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this model, couldn't find it\"\n                    f\" in the cached files and it looks like {pretrained_model_name_or_path} is not the path to a\"\n                    f\" directory containing a file named {FLAX_WEIGHTS_NAME} or {WEIGHTS_NAME}.\\nCheckout your\"\n                    \" internet connection or see how to run the library in offline mode at\"\n                    \" 'https://huggingface.co/docs/transformers/installation#offline-mode'.\"\n                )\n            except EnvironmentError:\n                raise EnvironmentError(\n                    f\"Can't load the model for '{pretrained_model_name_or_path}'. If you were trying to load it from \"\n                    \"'https://huggingface.co/models', make sure you don't have a local directory with the same name. \"\n                    f\"Otherwise, make sure '{pretrained_model_name_or_path}' is the correct path to a directory \"\n                    f\"containing a file named {FLAX_WEIGHTS_NAME} or {WEIGHTS_NAME}.\"\n                )\n\n        if from_pt:\n            if is_torch_available():\n                from .modeling_utils import load_state_dict\n            else:\n                raise EnvironmentError(\n                    \"Can't load the model in PyTorch format because PyTorch is not installed. \"\n                    \"Please, install PyTorch or use native Flax weights.\"\n                )\n\n            # Step 1: Get the pytorch file\n            pytorch_model_file = load_state_dict(model_file)\n\n            # Step 2: Convert the weights\n            state = convert_pytorch_state_dict_to_flax(pytorch_model_file, model)\n        else:\n            try:\n                with open(model_file, \"rb\") as state_f:\n                    state = from_bytes(cls, state_f.read())\n            except (UnpicklingError, msgpack.exceptions.ExtraData) as e:\n                try:\n                    with open(model_file) as f:\n                        if f.read().startswith(\"version\"):\n                            raise OSError(\n                                \"You seem to have cloned a repository without having git-lfs installed. Please\"\n                                \" install git-lfs and run `git lfs install` followed by `git lfs pull` in the\"\n                                \" folder you cloned.\"\n                            )\n                        else:\n                            raise ValueError from e\n                except (UnicodeDecodeError, ValueError):\n                    raise EnvironmentError(f\"Unable to convert {model_file} to Flax deserializable object. \")\n            # make sure all arrays are stored as jnp.ndarray\n            # NOTE: This is to prevent a bug this will be fixed in Flax >= v0.3.4:\n            # https://github.com/google/flax/issues/1261\n        state = jax.tree_util.tree_map(lambda x: jax.device_put(x, jax.devices(\"cpu\")[0]), state)\n\n        # flatten dicts\n        state = flatten_dict(state)\n\n        params_shape_tree = jax.eval_shape(model.init_weights, rng=jax.random.PRNGKey(0))\n        required_params = set(flatten_dict(unfreeze(params_shape_tree)).keys())\n\n        shape_state = flatten_dict(unfreeze(params_shape_tree))\n\n        missing_keys = required_params - set(state.keys())\n        unexpected_keys = set(state.keys()) - required_params\n\n        if missing_keys:\n            logger.warning(\n                f\"The checkpoint {pretrained_model_name_or_path} is missing required keys: {missing_keys}. \"\n                \"Make sure to call model.init_weights to initialize the missing weights.\"\n            )\n            cls._missing_keys = missing_keys\n\n        for key in state.keys():\n            if key in shape_state and state[key].shape != shape_state[key].shape:\n                raise ValueError(\n                    f\"Trying to load the pretrained weight for {key} failed: checkpoint has shape \"\n                    f\"{state[key].shape} which is incompatible with the model shape {shape_state[key].shape}. \"\n                )\n\n        # remove unexpected keys to not be saved again\n        for unexpected_key in unexpected_keys:\n            del state[unexpected_key]\n\n        if len(unexpected_keys) > 0:\n            logger.warning(\n                f\"Some weights of the model checkpoint at {pretrained_model_name_or_path} were not used when\"\n                f\" initializing {model.__class__.__name__}: {unexpected_keys}\\n- This IS expected if you are\"\n                f\" initializing {model.__class__.__name__} from the checkpoint of a model trained on another task or\"\n                \" with another architecture.\"\n            )\n        else:\n            logger.info(f\"All model checkpoint weights were used when initializing {model.__class__.__name__}.\\n\")\n\n        if len(missing_keys) > 0:\n            logger.warning(\n                f\"Some weights of {model.__class__.__name__} were not initialized from the model checkpoint at\"\n                f\" {pretrained_model_name_or_path} and are newly initialized: {missing_keys}\\nYou should probably\"\n                \" TRAIN this model on a down-stream task to be able to use it for predictions and inference.\"\n            )\n        else:\n            logger.info(\n                f\"All the weights of {model.__class__.__name__} were initialized from the model checkpoint at\"\n                f\" {pretrained_model_name_or_path}.\\nIf your task is similar to the task the model of the checkpoint\"\n                f\" was trained on, you can already use {model.__class__.__name__} for predictions without further\"\n                \" training.\"\n            )\n\n        return model, unflatten_dict(state)\n\n    def save_pretrained(\n        self,\n        save_directory: Union[str, os.PathLike],\n        params: Union[Dict, FrozenDict],\n        is_main_process: bool = True,\n    ):\n        \"\"\"\n        Save a model and its configuration file to a directory, so that it can be re-loaded using the\n        `[`~FlaxModelMixin.from_pretrained`]` class method\n\n        Arguments:\n            save_directory (`str` or `os.PathLike`):\n                Directory to which to save. Will be created if it doesn't exist.\n            params (`Union[Dict, FrozenDict]`):\n                A `PyTree` of model parameters.\n            is_main_process (`bool`, *optional*, defaults to `True`):\n                Whether the process calling this is the main process or not. Useful when in distributed training like\n                TPUs and need to call this function on all processes. In this case, set `is_main_process=True` only on\n                the main process to avoid race conditions.\n        \"\"\"\n        if os.path.isfile(save_directory):\n            logger.error(f\"Provided path ({save_directory}) should be a directory, not a file\")\n            return\n\n        os.makedirs(save_directory, exist_ok=True)\n\n        model_to_save = self\n\n        # Attach architecture to the config\n        # Save the config\n        if is_main_process:\n            model_to_save.save_config(save_directory)\n\n        # save model\n        output_model_file = os.path.join(save_directory, FLAX_WEIGHTS_NAME)\n        with open(output_model_file, \"wb\") as f:\n            model_bytes = to_bytes(params)\n            f.write(model_bytes)\n\n        logger.info(f\"Model weights saved in {output_model_file}\")\n"
  },
  {
    "path": "diffusers/models/modeling_pytorch_flax_utils.py",
    "content": "# coding=utf-8\r\n# Copyright 2023 The HuggingFace Inc. team.\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n#     http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\"\"\" PyTorch - Flax general utilities.\"\"\"\r\n\r\nfrom pickle import UnpicklingError\r\n\r\nimport jax\r\nimport jax.numpy as jnp\r\nimport numpy as np\r\nfrom flax.serialization import from_bytes\r\nfrom flax.traverse_util import flatten_dict\r\n\r\nfrom ..utils import logging\r\n\r\n\r\nlogger = logging.get_logger(__name__)\r\n\r\n\r\n#####################\r\n# Flax => PyTorch #\r\n#####################\r\n\r\n\r\n# from https://github.com/huggingface/transformers/blob/main/src/transformers/modeling_flax_pytorch_utils.py#L224-L352\r\ndef load_flax_checkpoint_in_pytorch_model(pt_model, model_file):\r\n    try:\r\n        with open(model_file, \"rb\") as flax_state_f:\r\n            flax_state = from_bytes(None, flax_state_f.read())\r\n    except UnpicklingError as e:\r\n        try:\r\n            with open(model_file) as f:\r\n                if f.read().startswith(\"version\"):\r\n                    raise OSError(\r\n                        \"You seem to have cloned a repository without having git-lfs installed. Please\"\r\n                        \" install git-lfs and run `git lfs install` followed by `git lfs pull` in the\"\r\n                        \" folder you cloned.\"\r\n                    )\r\n                else:\r\n                    raise ValueError from e\r\n        except (UnicodeDecodeError, ValueError):\r\n            raise EnvironmentError(f\"Unable to convert {model_file} to Flax deserializable object. \")\r\n\r\n    return load_flax_weights_in_pytorch_model(pt_model, flax_state)\r\n\r\n\r\ndef load_flax_weights_in_pytorch_model(pt_model, flax_state):\r\n    \"\"\"Load flax checkpoints in a PyTorch model\"\"\"\r\n\r\n    try:\r\n        import torch  # noqa: F401\r\n    except ImportError:\r\n        logger.error(\r\n            \"Loading Flax weights in PyTorch requires both PyTorch and Flax to be installed. Please see\"\r\n            \" https://pytorch.org/ and https://flax.readthedocs.io/en/latest/installation.html for installation\"\r\n            \" instructions.\"\r\n        )\r\n        raise\r\n\r\n    # check if we have bf16 weights\r\n    is_type_bf16 = flatten_dict(jax.tree_util.tree_map(lambda x: x.dtype == jnp.bfloat16, flax_state)).values()\r\n    if any(is_type_bf16):\r\n        # convert all weights to fp32 if they are bf16 since torch.from_numpy can-not handle bf16\r\n\r\n        # and bf16 is not fully supported in PT yet.\r\n        logger.warning(\r\n            \"Found ``bfloat16`` weights in Flax model. Casting all ``bfloat16`` weights to ``float32`` \"\r\n            \"before loading those in PyTorch model.\"\r\n        )\r\n        flax_state = jax.tree_util.tree_map(\r\n            lambda params: params.astype(np.float32) if params.dtype == jnp.bfloat16 else params, flax_state\r\n        )\r\n\r\n    pt_model.base_model_prefix = \"\"\r\n\r\n    flax_state_dict = flatten_dict(flax_state, sep=\".\")\r\n    pt_model_dict = pt_model.state_dict()\r\n\r\n    # keep track of unexpected & missing keys\r\n    unexpected_keys = []\r\n    missing_keys = set(pt_model_dict.keys())\r\n\r\n    for flax_key_tuple, flax_tensor in flax_state_dict.items():\r\n        flax_key_tuple_array = flax_key_tuple.split(\".\")\r\n\r\n        if flax_key_tuple_array[-1] == \"kernel\" and flax_tensor.ndim == 4:\r\n            flax_key_tuple_array = flax_key_tuple_array[:-1] + [\"weight\"]\r\n            flax_tensor = jnp.transpose(flax_tensor, (3, 2, 0, 1))\r\n        elif flax_key_tuple_array[-1] == \"kernel\":\r\n            flax_key_tuple_array = flax_key_tuple_array[:-1] + [\"weight\"]\r\n            flax_tensor = flax_tensor.T\r\n        elif flax_key_tuple_array[-1] == \"scale\":\r\n            flax_key_tuple_array = flax_key_tuple_array[:-1] + [\"weight\"]\r\n\r\n        if \"time_embedding\" not in flax_key_tuple_array:\r\n            for i, flax_key_tuple_string in enumerate(flax_key_tuple_array):\r\n                flax_key_tuple_array[i] = (\r\n                    flax_key_tuple_string.replace(\"_0\", \".0\")\r\n                    .replace(\"_1\", \".1\")\r\n                    .replace(\"_2\", \".2\")\r\n                    .replace(\"_3\", \".3\")\r\n                    .replace(\"_4\", \".4\")\r\n                    .replace(\"_5\", \".5\")\r\n                    .replace(\"_6\", \".6\")\r\n                    .replace(\"_7\", \".7\")\r\n                    .replace(\"_8\", \".8\")\r\n                    .replace(\"_9\", \".9\")\r\n                )\r\n\r\n        flax_key = \".\".join(flax_key_tuple_array)\r\n\r\n        if flax_key in pt_model_dict:\r\n            if flax_tensor.shape != pt_model_dict[flax_key].shape:\r\n                raise ValueError(\r\n                    f\"Flax checkpoint seems to be incorrect. Weight {flax_key_tuple} was expected \"\r\n                    f\"to be of shape {pt_model_dict[flax_key].shape}, but is {flax_tensor.shape}.\"\r\n                )\r\n            else:\r\n                # add weight to pytorch dict\r\n                flax_tensor = np.asarray(flax_tensor) if not isinstance(flax_tensor, np.ndarray) else flax_tensor\r\n                pt_model_dict[flax_key] = torch.from_numpy(flax_tensor)\r\n                # remove from missing keys\r\n                missing_keys.remove(flax_key)\r\n        else:\r\n            # weight is not expected by PyTorch model\r\n            unexpected_keys.append(flax_key)\r\n\r\n    pt_model.load_state_dict(pt_model_dict)\r\n\r\n    # re-transform missing_keys to list\r\n    missing_keys = list(missing_keys)\r\n\r\n    if len(unexpected_keys) > 0:\r\n        logger.warning(\r\n            \"Some weights of the Flax model were not used when initializing the PyTorch model\"\r\n            f\" {pt_model.__class__.__name__}: {unexpected_keys}\\n- This IS expected if you are initializing\"\r\n            f\" {pt_model.__class__.__name__} from a Flax model trained on another task or with another architecture\"\r\n            \" (e.g. initializing a BertForSequenceClassification model from a FlaxBertForPreTraining model).\\n- This\"\r\n            f\" IS NOT expected if you are initializing {pt_model.__class__.__name__} from a Flax model that you expect\"\r\n            \" to be exactly identical (e.g. initializing a BertForSequenceClassification model from a\"\r\n            \" FlaxBertForSequenceClassification model).\"\r\n        )\r\n    if len(missing_keys) > 0:\r\n        logger.warning(\r\n            f\"Some weights of {pt_model.__class__.__name__} were not initialized from the Flax model and are newly\"\r\n            f\" initialized: {missing_keys}\\nYou should probably TRAIN this model on a down-stream task to be able to\"\r\n            \" use it for predictions and inference.\"\r\n        )\r\n\r\n    return pt_model\r\n"
  },
  {
    "path": "diffusers/models/modeling_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n# Copyright (c) 2022, NVIDIA CORPORATION.  All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport itertools\nimport os\nfrom functools import partial\nfrom typing import Any, Callable, List, Optional, Tuple, Union\n\nimport torch\nfrom torch import Tensor, device\n\nfrom .. import __version__\nfrom ..utils import (\n    CONFIG_NAME,\n    DIFFUSERS_CACHE,\n    FLAX_WEIGHTS_NAME,\n    HF_HUB_OFFLINE,\n    SAFETENSORS_WEIGHTS_NAME,\n    WEIGHTS_NAME,\n    _add_variant,\n    _get_model_file,\n    deprecate,\n    is_accelerate_available,\n    is_safetensors_available,\n    is_torch_version,\n    logging,\n)\n\n\nlogger = logging.get_logger(__name__)\n\n\nif is_torch_version(\">=\", \"1.9.0\"):\n    _LOW_CPU_MEM_USAGE_DEFAULT = True\nelse:\n    _LOW_CPU_MEM_USAGE_DEFAULT = False\n\n\nif is_accelerate_available():\n    import accelerate\n    from accelerate.utils import set_module_tensor_to_device\n    from accelerate.utils.versions import is_torch_version\n\nif is_safetensors_available():\n    import safetensors\n\n\ndef get_parameter_device(parameter: torch.nn.Module):\n    try:\n        parameters_and_buffers = itertools.chain(parameter.parameters(), parameter.buffers())\n        return next(parameters_and_buffers).device\n    except StopIteration:\n        # For torch.nn.DataParallel compatibility in PyTorch 1.5\n\n        def find_tensor_attributes(module: torch.nn.Module) -> List[Tuple[str, Tensor]]:\n            tuples = [(k, v) for k, v in module.__dict__.items() if torch.is_tensor(v)]\n            return tuples\n\n        gen = parameter._named_members(get_members_fn=find_tensor_attributes)\n        first_tuple = next(gen)\n        return first_tuple[1].device\n\n\ndef get_parameter_dtype(parameter: torch.nn.Module):\n    try:\n        params = tuple(parameter.parameters())\n        if len(params) > 0:\n            return params[0].dtype\n\n        buffers = tuple(parameter.buffers())\n        if len(buffers) > 0:\n            return buffers[0].dtype\n\n    except StopIteration:\n        # For torch.nn.DataParallel compatibility in PyTorch 1.5\n\n        def find_tensor_attributes(module: torch.nn.Module) -> List[Tuple[str, Tensor]]:\n            tuples = [(k, v) for k, v in module.__dict__.items() if torch.is_tensor(v)]\n            return tuples\n\n        gen = parameter._named_members(get_members_fn=find_tensor_attributes)\n        first_tuple = next(gen)\n        return first_tuple[1].dtype\n\n\ndef load_state_dict(checkpoint_file: Union[str, os.PathLike], variant: Optional[str] = None):\n    \"\"\"\n    Reads a checkpoint file, returning properly formatted errors if they arise.\n    \"\"\"\n    try:\n        if os.path.basename(checkpoint_file) == _add_variant(WEIGHTS_NAME, variant):\n            return torch.load(checkpoint_file, map_location=\"cpu\")\n        else:\n            return safetensors.torch.load_file(checkpoint_file, device=\"cpu\")\n    except Exception as e:\n        try:\n            with open(checkpoint_file) as f:\n                if f.read().startswith(\"version\"):\n                    raise OSError(\n                        \"You seem to have cloned a repository without having git-lfs installed. Please install \"\n                        \"git-lfs and run `git lfs install` followed by `git lfs pull` in the folder \"\n                        \"you cloned.\"\n                    )\n                else:\n                    raise ValueError(\n                        f\"Unable to locate the file {checkpoint_file} which is necessary to load this pretrained \"\n                        \"model. Make sure you have saved the model properly.\"\n                    ) from e\n        except (UnicodeDecodeError, ValueError):\n            raise OSError(\n                f\"Unable to load weights from checkpoint file for '{checkpoint_file}' \"\n                f\"at '{checkpoint_file}'. \"\n                \"If you tried to load a PyTorch model from a TF 2.0 checkpoint, please set from_tf=True.\"\n            )\n\n\ndef _load_state_dict_into_model(model_to_load, state_dict):\n    # Convert old format to new format if needed from a PyTorch state_dict\n    # copy state_dict so _load_from_state_dict can modify it\n    state_dict = state_dict.copy()\n    error_msgs = []\n\n    # PyTorch's `_load_from_state_dict` does not copy parameters in a module's descendants\n    # so we need to apply the function recursively.\n    def load(module: torch.nn.Module, prefix=\"\"):\n        args = (state_dict, prefix, {}, True, [], [], error_msgs)\n        module._load_from_state_dict(*args)\n\n        for name, child in module._modules.items():\n            if child is not None:\n                load(child, prefix + name + \".\")\n\n    load(model_to_load)\n\n    return error_msgs\n\n\nclass ModelMixin(torch.nn.Module):\n    r\"\"\"\n    Base class for all models.\n\n    [`ModelMixin`] takes care of storing the configuration of the models and handles methods for loading, downloading\n    and saving models.\n\n        - **config_name** ([`str`]) -- A filename under which the model should be stored when calling\n          [`~models.ModelMixin.save_pretrained`].\n    \"\"\"\n    config_name = CONFIG_NAME\n    _automatically_saved_args = [\"_diffusers_version\", \"_class_name\", \"_name_or_path\"]\n    _supports_gradient_checkpointing = False\n\n    def __init__(self):\n        super().__init__()\n\n    def __getattr__(self, name: str) -> Any:\n        \"\"\"The only reason we overwrite `getattr` here is to gracefully deprecate accessing\n        config attributes directly. See https://github.com/huggingface/diffusers/pull/3129 We need to overwrite\n        __getattr__ here in addition so that we don't trigger `torch.nn.Module`'s __getattr__':\n        https://pytorch.org/docs/stable/_modules/torch/nn/modules/module.html#Module\n        \"\"\"\n\n        is_in_config = \"_internal_dict\" in self.__dict__ and hasattr(self.__dict__[\"_internal_dict\"], name)\n        is_attribute = name in self.__dict__\n\n        if is_in_config and not is_attribute:\n            deprecation_message = f\"Accessing config attribute `{name}` directly via '{type(self).__name__}' object attribute is deprecated. Please access '{name}' over '{type(self).__name__}'s config object instead, e.g. 'unet.config.{name}'.\"\n            deprecate(\"direct config name access\", \"1.0.0\", deprecation_message, standard_warn=False, stacklevel=3)\n            return self._internal_dict[name]\n\n        # call PyTorch's https://pytorch.org/docs/stable/_modules/torch/nn/modules/module.html#Module\n        return super().__getattr__(name)\n\n    @property\n    def is_gradient_checkpointing(self) -> bool:\n        \"\"\"\n        Whether gradient checkpointing is activated for this model or not.\n\n        Note that in other frameworks this feature can be referred to as \"activation checkpointing\" or \"checkpoint\n        activations\".\n        \"\"\"\n        return any(hasattr(m, \"gradient_checkpointing\") and m.gradient_checkpointing for m in self.modules())\n\n    def enable_gradient_checkpointing(self):\n        \"\"\"\n        Activates gradient checkpointing for the current model.\n\n        Note that in other frameworks this feature can be referred to as \"activation checkpointing\" or \"checkpoint\n        activations\".\n        \"\"\"\n        if not self._supports_gradient_checkpointing:\n            raise ValueError(f\"{self.__class__.__name__} does not support gradient checkpointing.\")\n        self.apply(partial(self._set_gradient_checkpointing, value=True))\n\n    def disable_gradient_checkpointing(self):\n        \"\"\"\n        Deactivates gradient checkpointing for the current model.\n\n        Note that in other frameworks this feature can be referred to as \"activation checkpointing\" or \"checkpoint\n        activations\".\n        \"\"\"\n        if self._supports_gradient_checkpointing:\n            self.apply(partial(self._set_gradient_checkpointing, value=False))\n\n    def set_use_memory_efficient_attention_xformers(\n        self, valid: bool, attention_op: Optional[Callable] = None\n    ) -> None:\n        # Recursively walk through all the children.\n        # Any children which exposes the set_use_memory_efficient_attention_xformers method\n        # gets the message\n        def fn_recursive_set_mem_eff(module: torch.nn.Module):\n            if hasattr(module, \"set_use_memory_efficient_attention_xformers\"):\n                module.set_use_memory_efficient_attention_xformers(valid, attention_op)\n\n            for child in module.children():\n                fn_recursive_set_mem_eff(child)\n\n        for module in self.children():\n            if isinstance(module, torch.nn.Module):\n                fn_recursive_set_mem_eff(module)\n\n    def enable_xformers_memory_efficient_attention(self, attention_op: Optional[Callable] = None):\n        r\"\"\"\n        Enable memory efficient attention as implemented in xformers.\n\n        When this option is enabled, you should observe lower GPU memory usage and a potential speed up at inference\n        time. Speed up at training time is not guaranteed.\n\n        Warning: When Memory Efficient Attention and Sliced attention are both enabled, the Memory Efficient Attention\n        is used.\n\n        Parameters:\n            attention_op (`Callable`, *optional*):\n                Override the default `None` operator for use as `op` argument to the\n                [`memory_efficient_attention()`](https://facebookresearch.github.io/xformers/components/ops.html#xformers.ops.memory_efficient_attention)\n                function of xFormers.\n\n        Examples:\n\n        ```py\n        >>> import torch\n        >>> from diffusers import UNet2DConditionModel\n        >>> from xformers.ops import MemoryEfficientAttentionFlashAttentionOp\n\n        >>> model = UNet2DConditionModel.from_pretrained(\n        ...     \"stabilityai/stable-diffusion-2-1\", subfolder=\"unet\", torch_dtype=torch.float16\n        ... )\n        >>> model = model.to(\"cuda\")\n        >>> model.enable_xformers_memory_efficient_attention(attention_op=MemoryEfficientAttentionFlashAttentionOp)\n        ```\n        \"\"\"\n        self.set_use_memory_efficient_attention_xformers(True, attention_op)\n\n    def disable_xformers_memory_efficient_attention(self):\n        r\"\"\"\n        Disable memory efficient attention as implemented in xformers.\n        \"\"\"\n        self.set_use_memory_efficient_attention_xformers(False)\n\n    def save_pretrained(\n        self,\n        save_directory: Union[str, os.PathLike],\n        is_main_process: bool = True,\n        save_function: Callable = None,\n        safe_serialization: bool = False,\n        variant: Optional[str] = None,\n    ):\n        \"\"\"\n        Save a model and its configuration file to a directory, so that it can be re-loaded using the\n        `[`~models.ModelMixin.from_pretrained`]` class method.\n\n        Arguments:\n            save_directory (`str` or `os.PathLike`):\n                Directory to which to save. Will be created if it doesn't exist.\n            is_main_process (`bool`, *optional*, defaults to `True`):\n                Whether the process calling this is the main process or not. Useful when in distributed training like\n                TPUs and need to call this function on all processes. In this case, set `is_main_process=True` only on\n                the main process to avoid race conditions.\n            save_function (`Callable`):\n                The function to use to save the state dictionary. Useful on distributed training like TPUs when one\n                need to replace `torch.save` by another method. Can be configured with the environment variable\n                `DIFFUSERS_SAVE_MODE`.\n            safe_serialization (`bool`, *optional*, defaults to `False`):\n                Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).\n            variant (`str`, *optional*):\n                If specified, weights are saved in the format pytorch_model.<variant>.bin.\n        \"\"\"\n        if safe_serialization and not is_safetensors_available():\n            raise ImportError(\"`safe_serialization` requires the `safetensors library: `pip install safetensors`.\")\n\n        if os.path.isfile(save_directory):\n            logger.error(f\"Provided path ({save_directory}) should be a directory, not a file\")\n            return\n\n        os.makedirs(save_directory, exist_ok=True)\n\n        model_to_save = self\n\n        # Attach architecture to the config\n        # Save the config\n        if is_main_process:\n            model_to_save.save_config(save_directory)\n\n        # Save the model\n        state_dict = model_to_save.state_dict()\n\n        weights_name = SAFETENSORS_WEIGHTS_NAME if safe_serialization else WEIGHTS_NAME\n        weights_name = _add_variant(weights_name, variant)\n\n        # Save the model\n        if safe_serialization:\n            safetensors.torch.save_file(\n                state_dict, os.path.join(save_directory, weights_name), metadata={\"format\": \"pt\"}\n            )\n        else:\n            torch.save(state_dict, os.path.join(save_directory, weights_name))\n\n        logger.info(f\"Model weights saved in {os.path.join(save_directory, weights_name)}\")\n\n    @classmethod\n    def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.PathLike]], **kwargs):\n        r\"\"\"\n        Instantiate a pretrained pytorch model from a pre-trained model configuration.\n\n        The model is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated). To train\n        the model, you should first set it back in training mode with `model.train()`.\n\n        The warning *Weights from XXX not initialized from pretrained model* means that the weights of XXX do not come\n        pretrained with the rest of the model. It is up to you to train those weights with a downstream fine-tuning\n        task.\n\n        The warning *Weights from XXX not used in YYY* means that the layer XXX is not used by YYY, therefore those\n        weights are discarded.\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike`, *optional*):\n                Can be either:\n\n                    - A string, the *model id* of a pretrained model hosted inside a model repo on huggingface.co.\n                      Valid model ids should have an organization name, like `google/ddpm-celebahq-256`.\n                    - A path to a *directory* containing model weights saved using [`~ModelMixin.save_config`], e.g.,\n                      `./my_model_directory/`.\n\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            torch_dtype (`str` or `torch.dtype`, *optional*):\n                Override the default `torch.dtype` and load the model under this dtype. If `\"auto\"` is passed the dtype\n                will be automatically derived from the model's weights.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            output_loading_info(`bool`, *optional*, defaults to `False`):\n                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `diffusers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            from_flax (`bool`, *optional*, defaults to `False`):\n                Load the model weights from a Flax checkpoint save file.\n            subfolder (`str`, *optional*, defaults to `\"\"`):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information.\n            device_map (`str` or `Dict[str, Union[int, str, torch.device]]`, *optional*):\n                A map that specifies where each submodule should go. It doesn't need to be refined to each\n                parameter/buffer name, once a given module name is inside, every submodule of it will be sent to the\n                same device.\n\n                To have Accelerate compute the most optimized `device_map` automatically, set `device_map=\"auto\"`. For\n                more information about each option see [designing a device\n                map](https://hf.co/docs/accelerate/main/en/usage_guides/big_modeling#designing-a-device-map).\n            low_cpu_mem_usage (`bool`, *optional*, defaults to `True` if torch version >= 1.9.0 else `False`):\n                Speed up model loading by not initializing the weights and only loading the pre-trained weights. This\n                also tries to not use more than 1x model size in CPU memory (including peak memory) while loading the\n                model. This is only supported when torch version >= 1.9.0. If you are using an older version of torch,\n                setting this argument to `True` will raise an error.\n            variant (`str`, *optional*):\n                If specified load weights from `variant` filename, *e.g.* pytorch_model.<variant>.bin. `variant` is\n                ignored when using `from_flax`.\n            use_safetensors (`bool`, *optional*, defaults to `None`):\n                If set to `None`, the `safetensors` weights will be downloaded if they're available **and** if the\n                `safetensors` library is installed. If set to `True`, the model will be forcibly loaded from\n                `safetensors` weights. If set to `False`, loading will *not* use `safetensors`.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n\n        <Tip>\n\n        Activate the special [\"offline-mode\"](https://huggingface.co/diffusers/installation.html#offline-mode) to use\n        this method in a firewalled environment.\n\n        </Tip>\n\n        \"\"\"\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        ignore_mismatched_sizes = kwargs.pop(\"ignore_mismatched_sizes\", False)\n        force_download = kwargs.pop(\"force_download\", False)\n        from_flax = kwargs.pop(\"from_flax\", False)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        output_loading_info = kwargs.pop(\"output_loading_info\", False)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        torch_dtype = kwargs.pop(\"torch_dtype\", None)\n        subfolder = kwargs.pop(\"subfolder\", None)\n        device_map = kwargs.pop(\"device_map\", None)\n        low_cpu_mem_usage = kwargs.pop(\"low_cpu_mem_usage\", _LOW_CPU_MEM_USAGE_DEFAULT)\n        variant = kwargs.pop(\"variant\", None)\n        use_safetensors = kwargs.pop(\"use_safetensors\", None)\n\n        if use_safetensors and not is_safetensors_available():\n            raise ValueError(\n                \"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetenstors\"\n            )\n\n        allow_pickle = False\n        if use_safetensors is None:\n            use_safetensors = is_safetensors_available()\n            allow_pickle = True\n\n        if low_cpu_mem_usage and not is_accelerate_available():\n            low_cpu_mem_usage = False\n            logger.warning(\n                \"Cannot initialize model with low cpu memory usage because `accelerate` was not found in the\"\n                \" environment. Defaulting to `low_cpu_mem_usage=False`. It is strongly recommended to install\"\n                \" `accelerate` for faster and less memory-intense model loading. You can do so with: \\n```\\npip\"\n                \" install accelerate\\n```\\n.\"\n            )\n\n        if device_map is not None and not is_accelerate_available():\n            raise NotImplementedError(\n                \"Loading and dispatching requires `accelerate`. Please make sure to install accelerate or set\"\n                \" `device_map=None`. You can install accelerate with `pip install accelerate`.\"\n            )\n\n        # Check if we can handle device_map and dispatching the weights\n        if device_map is not None and not is_torch_version(\">=\", \"1.9.0\"):\n            raise NotImplementedError(\n                \"Loading and dispatching requires torch >= 1.9.0. Please either update your PyTorch version or set\"\n                \" `device_map=None`.\"\n            )\n\n        if low_cpu_mem_usage is True and not is_torch_version(\">=\", \"1.9.0\"):\n            raise NotImplementedError(\n                \"Low memory initialization requires torch >= 1.9.0. Please either update your PyTorch version or set\"\n                \" `low_cpu_mem_usage=False`.\"\n            )\n\n        if low_cpu_mem_usage is False and device_map is not None:\n            raise ValueError(\n                f\"You cannot set `low_cpu_mem_usage` to `False` while using device_map={device_map} for loading and\"\n                \" dispatching. Please make sure to set `low_cpu_mem_usage=True`.\"\n            )\n\n        # Load config if we don't provide a configuration\n        config_path = pretrained_model_name_or_path\n\n        user_agent = {\n            \"diffusers\": __version__,\n            \"file_type\": \"model\",\n            \"framework\": \"pytorch\",\n        }\n\n        # load config\n        config, unused_kwargs, commit_hash = cls.load_config(\n            config_path,\n            cache_dir=cache_dir,\n            return_unused_kwargs=True,\n            return_commit_hash=True,\n            force_download=force_download,\n            resume_download=resume_download,\n            proxies=proxies,\n            local_files_only=local_files_only,\n            use_auth_token=use_auth_token,\n            revision=revision,\n            subfolder=subfolder,\n            device_map=device_map,\n            user_agent=user_agent,\n            **kwargs,\n        )\n\n        # load model\n        model_file = None\n        if from_flax:\n            model_file = _get_model_file(\n                pretrained_model_name_or_path,\n                weights_name=FLAX_WEIGHTS_NAME,\n                cache_dir=cache_dir,\n                force_download=force_download,\n                resume_download=resume_download,\n                proxies=proxies,\n                local_files_only=local_files_only,\n                use_auth_token=use_auth_token,\n                revision=revision,\n                subfolder=subfolder,\n                user_agent=user_agent,\n                commit_hash=commit_hash,\n            )\n            model = cls.from_config(config, **unused_kwargs)\n\n            # Convert the weights\n            from .modeling_pytorch_flax_utils import load_flax_checkpoint_in_pytorch_model\n\n            model = load_flax_checkpoint_in_pytorch_model(model, model_file)\n        else:\n            if use_safetensors:\n                try:\n                    model_file = _get_model_file(\n                        pretrained_model_name_or_path,\n                        weights_name=_add_variant(SAFETENSORS_WEIGHTS_NAME, variant),\n                        cache_dir=cache_dir,\n                        force_download=force_download,\n                        resume_download=resume_download,\n                        proxies=proxies,\n                        local_files_only=local_files_only,\n                        use_auth_token=use_auth_token,\n                        revision=revision,\n                        subfolder=subfolder,\n                        user_agent=user_agent,\n                        commit_hash=commit_hash,\n                    )\n                except IOError as e:\n                    if not allow_pickle:\n                        raise e\n                    pass\n            if model_file is None:\n                model_file = _get_model_file(\n                    pretrained_model_name_or_path,\n                    weights_name=_add_variant(WEIGHTS_NAME, variant),\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    resume_download=resume_download,\n                    proxies=proxies,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    revision=revision,\n                    subfolder=subfolder,\n                    user_agent=user_agent,\n                    commit_hash=commit_hash,\n                )\n\n            if low_cpu_mem_usage:\n                # Instantiate model with empty weights\n                with accelerate.init_empty_weights():\n                    model = cls.from_config(config, **unused_kwargs)\n\n                # if device_map is None, load the state dict and move the params from meta device to the cpu\n                if device_map is None:\n                    param_device = \"cpu\"\n                    state_dict = load_state_dict(model_file, variant=variant)\n                    model._convert_deprecated_attention_blocks(state_dict)\n                    # move the params from meta device to cpu\n                    missing_keys = set(model.state_dict().keys()) - set(state_dict.keys())\n                    if len(missing_keys) > 0:\n                        raise ValueError(\n                            f\"Cannot load {cls} from {pretrained_model_name_or_path} because the following keys are\"\n                            f\" missing: \\n {', '.join(missing_keys)}. \\n Please make sure to pass\"\n                            \" `low_cpu_mem_usage=False` and `device_map=None` if you want to randomly initialize\"\n                            \" those weights or else make sure your checkpoint file is correct.\"\n                        )\n\n                    empty_state_dict = model.state_dict()\n                    for param_name, param in state_dict.items():\n                        accepts_dtype = \"dtype\" in set(\n                            inspect.signature(set_module_tensor_to_device).parameters.keys()\n                        )\n\n                        if empty_state_dict[param_name].shape != param.shape:\n                            raise ValueError(\n                                f\"Cannot load {pretrained_model_name_or_path} because {param_name} expected shape {empty_state_dict[param_name]}, but got {param.shape}. If you want to instead overwrite randomly initialized weights, please make sure to pass both `low_cpu_mem_usage=False` and `ignore_mismatched_sizes=True`. For more information, see also: https://github.com/huggingface/diffusers/issues/1619#issuecomment-1345604389 as an example.\"\n                            )\n\n                        if accepts_dtype:\n                            set_module_tensor_to_device(\n                                model, param_name, param_device, value=param, dtype=torch_dtype\n                            )\n                        else:\n                            set_module_tensor_to_device(model, param_name, param_device, value=param)\n                else:  # else let accelerate handle loading and dispatching.\n                    # Load weights and dispatch according to the device_map\n                    # by default the device_map is None and the weights are loaded on the CPU\n                    accelerate.load_checkpoint_and_dispatch(model, model_file, device_map, dtype=torch_dtype)\n\n                loading_info = {\n                    \"missing_keys\": [],\n                    \"unexpected_keys\": [],\n                    \"mismatched_keys\": [],\n                    \"error_msgs\": [],\n                }\n            else:\n                model = cls.from_config(config, **unused_kwargs)\n\n                state_dict = load_state_dict(model_file, variant=variant)\n                model._convert_deprecated_attention_blocks(state_dict)\n\n                model, missing_keys, unexpected_keys, mismatched_keys, error_msgs = cls._load_pretrained_model(\n                    model,\n                    state_dict,\n                    model_file,\n                    pretrained_model_name_or_path,\n                    ignore_mismatched_sizes=ignore_mismatched_sizes,\n                )\n\n                loading_info = {\n                    \"missing_keys\": missing_keys,\n                    \"unexpected_keys\": unexpected_keys,\n                    \"mismatched_keys\": mismatched_keys,\n                    \"error_msgs\": error_msgs,\n                }\n\n        if torch_dtype is not None and not isinstance(torch_dtype, torch.dtype):\n            raise ValueError(\n                f\"{torch_dtype} needs to be of type `torch.dtype`, e.g. `torch.float16`, but is {type(torch_dtype)}.\"\n            )\n        elif torch_dtype is not None:\n            model = model.to(torch_dtype)\n\n        model.register_to_config(_name_or_path=pretrained_model_name_or_path)\n\n        # Set model in evaluation mode to deactivate DropOut modules by default\n        model.eval()\n        if output_loading_info:\n            return model, loading_info\n\n        return model\n\n    @classmethod\n    def _load_pretrained_model(\n        cls,\n        model,\n        state_dict,\n        resolved_archive_file,\n        pretrained_model_name_or_path,\n        ignore_mismatched_sizes=False,\n    ):\n        # Retrieve missing & unexpected_keys\n        model_state_dict = model.state_dict()\n        loaded_keys = list(state_dict.keys())\n\n        expected_keys = list(model_state_dict.keys())\n\n        original_loaded_keys = loaded_keys\n\n        missing_keys = list(set(expected_keys) - set(loaded_keys))\n        unexpected_keys = list(set(loaded_keys) - set(expected_keys))\n\n        # Make sure we are able to load base models as well as derived models (with heads)\n        model_to_load = model\n\n        def _find_mismatched_keys(\n            state_dict,\n            model_state_dict,\n            loaded_keys,\n            ignore_mismatched_sizes,\n        ):\n            mismatched_keys = []\n            if ignore_mismatched_sizes:\n                for checkpoint_key in loaded_keys:\n                    model_key = checkpoint_key\n\n                    if (\n                        model_key in model_state_dict\n                        and state_dict[checkpoint_key].shape != model_state_dict[model_key].shape\n                    ):\n                        mismatched_keys.append(\n                            (checkpoint_key, state_dict[checkpoint_key].shape, model_state_dict[model_key].shape)\n                        )\n                        del state_dict[checkpoint_key]\n            return mismatched_keys\n\n        if state_dict is not None:\n            # Whole checkpoint\n            mismatched_keys = _find_mismatched_keys(\n                state_dict,\n                model_state_dict,\n                original_loaded_keys,\n                ignore_mismatched_sizes,\n            )\n            error_msgs = _load_state_dict_into_model(model_to_load, state_dict)\n\n        if len(error_msgs) > 0:\n            error_msg = \"\\n\\t\".join(error_msgs)\n            if \"size mismatch\" in error_msg:\n                error_msg += (\n                    \"\\n\\tYou may consider adding `ignore_mismatched_sizes=True` in the model `from_pretrained` method.\"\n                )\n            raise RuntimeError(f\"Error(s) in loading state_dict for {model.__class__.__name__}:\\n\\t{error_msg}\")\n\n        if len(unexpected_keys) > 0:\n            logger.warning(\n                f\"Some weights of the model checkpoint at {pretrained_model_name_or_path} were not used when\"\n                f\" initializing {model.__class__.__name__}: {unexpected_keys}\\n- This IS expected if you are\"\n                f\" initializing {model.__class__.__name__} from the checkpoint of a model trained on another task\"\n                \" or with another architecture (e.g. initializing a BertForSequenceClassification model from a\"\n                \" BertForPreTraining model).\\n- This IS NOT expected if you are initializing\"\n                f\" {model.__class__.__name__} from the checkpoint of a model that you expect to be exactly\"\n                \" identical (initializing a BertForSequenceClassification model from a\"\n                \" BertForSequenceClassification model).\"\n            )\n        else:\n            logger.info(f\"All model checkpoint weights were used when initializing {model.__class__.__name__}.\\n\")\n        if len(missing_keys) > 0:\n            logger.warning(\n                f\"Some weights of {model.__class__.__name__} were not initialized from the model checkpoint at\"\n                f\" {pretrained_model_name_or_path} and are newly initialized: {missing_keys}\\nYou should probably\"\n                \" TRAIN this model on a down-stream task to be able to use it for predictions and inference.\"\n            )\n        elif len(mismatched_keys) == 0:\n            logger.info(\n                f\"All the weights of {model.__class__.__name__} were initialized from the model checkpoint at\"\n                f\" {pretrained_model_name_or_path}.\\nIf your task is similar to the task the model of the\"\n                f\" checkpoint was trained on, you can already use {model.__class__.__name__} for predictions\"\n                \" without further training.\"\n            )\n        if len(mismatched_keys) > 0:\n            mismatched_warning = \"\\n\".join(\n                [\n                    f\"- {key}: found shape {shape1} in the checkpoint and {shape2} in the model instantiated\"\n                    for key, shape1, shape2 in mismatched_keys\n                ]\n            )\n            logger.warning(\n                f\"Some weights of {model.__class__.__name__} were not initialized from the model checkpoint at\"\n                f\" {pretrained_model_name_or_path} and are newly initialized because the shapes did not\"\n                f\" match:\\n{mismatched_warning}\\nYou should probably TRAIN this model on a down-stream task to be\"\n                \" able to use it for predictions and inference.\"\n            )\n\n        return model, missing_keys, unexpected_keys, mismatched_keys, error_msgs\n\n    @property\n    def device(self) -> device:\n        \"\"\"\n        `torch.device`: The device on which the module is (assuming that all the module parameters are on the same\n        device).\n        \"\"\"\n        return get_parameter_device(self)\n\n    @property\n    def dtype(self) -> torch.dtype:\n        \"\"\"\n        `torch.dtype`: The dtype of the module (assuming that all the module parameters have the same dtype).\n        \"\"\"\n        return get_parameter_dtype(self)\n\n    def num_parameters(self, only_trainable: bool = False, exclude_embeddings: bool = False) -> int:\n        \"\"\"\n        Get number of (optionally, trainable or non-embeddings) parameters in the module.\n\n        Args:\n            only_trainable (`bool`, *optional*, defaults to `False`):\n                Whether or not to return only the number of trainable parameters\n\n            exclude_embeddings (`bool`, *optional*, defaults to `False`):\n                Whether or not to return only the number of non-embeddings parameters\n\n        Returns:\n            `int`: The number of parameters.\n        \"\"\"\n\n        if exclude_embeddings:\n            embedding_param_names = [\n                f\"{name}.weight\"\n                for name, module_type in self.named_modules()\n                if isinstance(module_type, torch.nn.Embedding)\n            ]\n            non_embedding_parameters = [\n                parameter for name, parameter in self.named_parameters() if name not in embedding_param_names\n            ]\n            return sum(p.numel() for p in non_embedding_parameters if p.requires_grad or not only_trainable)\n        else:\n            return sum(p.numel() for p in self.parameters() if p.requires_grad or not only_trainable)\n\n    def _convert_deprecated_attention_blocks(self, state_dict):\n        deprecated_attention_block_paths = []\n\n        def recursive_find_attn_block(name, module):\n            if hasattr(module, \"_from_deprecated_attn_block\") and module._from_deprecated_attn_block:\n                deprecated_attention_block_paths.append(name)\n\n            for sub_name, sub_module in module.named_children():\n                sub_name = sub_name if name == \"\" else f\"{name}.{sub_name}\"\n                recursive_find_attn_block(sub_name, sub_module)\n\n        recursive_find_attn_block(\"\", self)\n\n        # NOTE: we have to check if the deprecated parameters are in the state dict\n        # because it is possible we are loading from a state dict that was already\n        # converted\n\n        for path in deprecated_attention_block_paths:\n            # group_norm path stays the same\n\n            # query -> to_q\n            if f\"{path}.query.weight\" in state_dict:\n                state_dict[f\"{path}.to_q.weight\"] = state_dict.pop(f\"{path}.query.weight\")\n            if f\"{path}.query.bias\" in state_dict:\n                state_dict[f\"{path}.to_q.bias\"] = state_dict.pop(f\"{path}.query.bias\")\n\n            # key -> to_k\n            if f\"{path}.key.weight\" in state_dict:\n                state_dict[f\"{path}.to_k.weight\"] = state_dict.pop(f\"{path}.key.weight\")\n            if f\"{path}.key.bias\" in state_dict:\n                state_dict[f\"{path}.to_k.bias\"] = state_dict.pop(f\"{path}.key.bias\")\n\n            # value -> to_v\n            if f\"{path}.value.weight\" in state_dict:\n                state_dict[f\"{path}.to_v.weight\"] = state_dict.pop(f\"{path}.value.weight\")\n            if f\"{path}.value.bias\" in state_dict:\n                state_dict[f\"{path}.to_v.bias\"] = state_dict.pop(f\"{path}.value.bias\")\n\n            # proj_attn -> to_out.0\n            if f\"{path}.proj_attn.weight\" in state_dict:\n                state_dict[f\"{path}.to_out.0.weight\"] = state_dict.pop(f\"{path}.proj_attn.weight\")\n            if f\"{path}.proj_attn.bias\" in state_dict:\n                state_dict[f\"{path}.to_out.0.bias\"] = state_dict.pop(f\"{path}.proj_attn.bias\")\n"
  },
  {
    "path": "diffusers/models/prior_transformer.py",
    "content": "from dataclasses import dataclass\nfrom typing import Optional, Union\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .attention import BasicTransformerBlock\nfrom .embeddings import TimestepEmbedding, Timesteps\nfrom .modeling_utils import ModelMixin\n\n\n@dataclass\nclass PriorTransformerOutput(BaseOutput):\n    \"\"\"\n    Args:\n        predicted_image_embedding (`torch.FloatTensor` of shape `(batch_size, embedding_dim)`):\n            The predicted CLIP image embedding conditioned on the CLIP text embedding input.\n    \"\"\"\n\n    predicted_image_embedding: torch.FloatTensor\n\n\nclass PriorTransformer(ModelMixin, ConfigMixin):\n    \"\"\"\n    The prior transformer from unCLIP is used to predict CLIP image embeddings from CLIP text embeddings. Note that the\n    transformer predicts the image embeddings through a denoising diffusion process.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the models (such as downloading or saving, etc.)\n\n    For more details, see the original paper: https://arxiv.org/abs/2204.06125\n\n    Parameters:\n        num_attention_heads (`int`, *optional*, defaults to 32): The number of heads to use for multi-head attention.\n        attention_head_dim (`int`, *optional*, defaults to 64): The number of channels in each head.\n        num_layers (`int`, *optional*, defaults to 20): The number of layers of Transformer blocks to use.\n        embedding_dim (`int`, *optional*, defaults to 768): The dimension of the CLIP embeddings. Note that CLIP\n            image embeddings and text embeddings are both the same dimension.\n        num_embeddings (`int`, *optional*, defaults to 77): The max number of clip embeddings allowed. I.e. the\n            length of the prompt after it has been tokenized.\n        additional_embeddings (`int`, *optional*, defaults to 4): The number of additional tokens appended to the\n            projected hidden_states. The actual length of the used hidden_states is `num_embeddings +\n            additional_embeddings`.\n        dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use.\n\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        num_attention_heads: int = 32,\n        attention_head_dim: int = 64,\n        num_layers: int = 20,\n        embedding_dim: int = 768,\n        num_embeddings=77,\n        additional_embeddings=4,\n        dropout: float = 0.0,\n    ):\n        super().__init__()\n        self.num_attention_heads = num_attention_heads\n        self.attention_head_dim = attention_head_dim\n        inner_dim = num_attention_heads * attention_head_dim\n        self.additional_embeddings = additional_embeddings\n\n        self.time_proj = Timesteps(inner_dim, True, 0)\n        self.time_embedding = TimestepEmbedding(inner_dim, inner_dim)\n\n        self.proj_in = nn.Linear(embedding_dim, inner_dim)\n\n        self.embedding_proj = nn.Linear(embedding_dim, inner_dim)\n        self.encoder_hidden_states_proj = nn.Linear(embedding_dim, inner_dim)\n\n        self.positional_embedding = nn.Parameter(torch.zeros(1, num_embeddings + additional_embeddings, inner_dim))\n\n        self.prd_embedding = nn.Parameter(torch.zeros(1, 1, inner_dim))\n\n        self.transformer_blocks = nn.ModuleList(\n            [\n                BasicTransformerBlock(\n                    inner_dim,\n                    num_attention_heads,\n                    attention_head_dim,\n                    dropout=dropout,\n                    activation_fn=\"gelu\",\n                    attention_bias=True,\n                )\n                for d in range(num_layers)\n            ]\n        )\n\n        self.norm_out = nn.LayerNorm(inner_dim)\n        self.proj_to_clip_embeddings = nn.Linear(inner_dim, embedding_dim)\n\n        causal_attention_mask = torch.full(\n            [num_embeddings + additional_embeddings, num_embeddings + additional_embeddings], -10000.0\n        )\n        causal_attention_mask.triu_(1)\n        causal_attention_mask = causal_attention_mask[None, ...]\n        self.register_buffer(\"causal_attention_mask\", causal_attention_mask, persistent=False)\n\n        self.clip_mean = nn.Parameter(torch.zeros(1, embedding_dim))\n        self.clip_std = nn.Parameter(torch.zeros(1, embedding_dim))\n\n    def forward(\n        self,\n        hidden_states,\n        timestep: Union[torch.Tensor, float, int],\n        proj_embedding: torch.FloatTensor,\n        encoder_hidden_states: torch.FloatTensor,\n        attention_mask: Optional[torch.BoolTensor] = None,\n        return_dict: bool = True,\n    ):\n        \"\"\"\n        Args:\n            hidden_states (`torch.FloatTensor` of shape `(batch_size, embedding_dim)`):\n                x_t, the currently predicted image embeddings.\n            timestep (`torch.long`):\n                Current denoising step.\n            proj_embedding (`torch.FloatTensor` of shape `(batch_size, embedding_dim)`):\n                Projected embedding vector the denoising process is conditioned on.\n            encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, num_embeddings, embedding_dim)`):\n                Hidden states of the text embeddings the denoising process is conditioned on.\n            attention_mask (`torch.BoolTensor` of shape `(batch_size, num_embeddings)`):\n                Text mask for the text embeddings.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.prior_transformer.PriorTransformerOutput`] instead of a plain\n                tuple.\n\n        Returns:\n            [`~models.prior_transformer.PriorTransformerOutput`] or `tuple`:\n            [`~models.prior_transformer.PriorTransformerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        batch_size = hidden_states.shape[0]\n\n        timesteps = timestep\n        if not torch.is_tensor(timesteps):\n            timesteps = torch.tensor([timesteps], dtype=torch.long, device=hidden_states.device)\n        elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(hidden_states.device)\n\n        # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n        timesteps = timesteps * torch.ones(batch_size, dtype=timesteps.dtype, device=timesteps.device)\n\n        timesteps_projected = self.time_proj(timesteps)\n\n        # timesteps does not contain any weights and will always return f32 tensors\n        # but time_embedding might be fp16, so we need to cast here.\n        timesteps_projected = timesteps_projected.to(dtype=self.dtype)\n        time_embeddings = self.time_embedding(timesteps_projected)\n\n        proj_embeddings = self.embedding_proj(proj_embedding)\n        encoder_hidden_states = self.encoder_hidden_states_proj(encoder_hidden_states)\n        hidden_states = self.proj_in(hidden_states)\n        prd_embedding = self.prd_embedding.to(hidden_states.dtype).expand(batch_size, -1, -1)\n        positional_embeddings = self.positional_embedding.to(hidden_states.dtype)\n\n        hidden_states = torch.cat(\n            [\n                encoder_hidden_states,\n                proj_embeddings[:, None, :],\n                time_embeddings[:, None, :],\n                hidden_states[:, None, :],\n                prd_embedding,\n            ],\n            dim=1,\n        )\n\n        hidden_states = hidden_states + positional_embeddings\n\n        if attention_mask is not None:\n            attention_mask = (1 - attention_mask.to(hidden_states.dtype)) * -10000.0\n            attention_mask = F.pad(attention_mask, (0, self.additional_embeddings), value=0.0)\n            attention_mask = (attention_mask[:, None, :] + self.causal_attention_mask).to(hidden_states.dtype)\n            attention_mask = attention_mask.repeat_interleave(self.config.num_attention_heads, dim=0)\n\n        for block in self.transformer_blocks:\n            hidden_states = block(hidden_states, attention_mask=attention_mask)\n\n        hidden_states = self.norm_out(hidden_states)\n        hidden_states = hidden_states[:, -1]\n        predicted_image_embedding = self.proj_to_clip_embeddings(hidden_states)\n\n        if not return_dict:\n            return (predicted_image_embedding,)\n\n        return PriorTransformerOutput(predicted_image_embedding=predicted_image_embedding)\n\n    def post_process_latents(self, prior_latents):\n        prior_latents = (prior_latents * self.clip_std) + self.clip_mean\n        return prior_latents\n"
  },
  {
    "path": "diffusers/models/resnet.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n# `TemporalConvLayer` Copyright 2023 Alibaba DAMO-VILAB, The ModelScope Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom functools import partial\nfrom typing import Optional\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom .attention import AdaGroupNorm\n\n\nclass Upsample1D(nn.Module):\n    \"\"\"A 1D upsampling layer with an optional convolution.\n\n    Parameters:\n        channels (`int`):\n            number of channels in the inputs and outputs.\n        use_conv (`bool`, default `False`):\n            option to use a convolution.\n        use_conv_transpose (`bool`, default `False`):\n            option to use a convolution transpose.\n        out_channels (`int`, optional):\n            number of output channels. Defaults to `channels`.\n    \"\"\"\n\n    def __init__(self, channels, use_conv=False, use_conv_transpose=False, out_channels=None, name=\"conv\"):\n        super().__init__()\n        self.channels = channels\n        self.out_channels = out_channels or channels\n        self.use_conv = use_conv\n        self.use_conv_transpose = use_conv_transpose\n        self.name = name\n\n        self.conv = None\n        if use_conv_transpose:\n            self.conv = nn.ConvTranspose1d(channels, self.out_channels, 4, 2, 1)\n        elif use_conv:\n            self.conv = nn.Conv1d(self.channels, self.out_channels, 3, padding=1)\n\n    def forward(self, x):\n        assert x.shape[1] == self.channels\n        if self.use_conv_transpose:\n            return self.conv(x)\n\n        x = F.interpolate(x, scale_factor=2.0, mode=\"nearest\")\n\n        if self.use_conv:\n            x = self.conv(x)\n\n        return x\n\n\nclass Downsample1D(nn.Module):\n    \"\"\"A 1D downsampling layer with an optional convolution.\n\n    Parameters:\n        channels (`int`):\n            number of channels in the inputs and outputs.\n        use_conv (`bool`, default `False`):\n            option to use a convolution.\n        out_channels (`int`, optional):\n            number of output channels. Defaults to `channels`.\n        padding (`int`, default `1`):\n            padding for the convolution.\n    \"\"\"\n\n    def __init__(self, channels, use_conv=False, out_channels=None, padding=1, name=\"conv\"):\n        super().__init__()\n        self.channels = channels\n        self.out_channels = out_channels or channels\n        self.use_conv = use_conv\n        self.padding = padding\n        stride = 2\n        self.name = name\n\n        if use_conv:\n            self.conv = nn.Conv1d(self.channels, self.out_channels, 3, stride=stride, padding=padding)\n        else:\n            assert self.channels == self.out_channels\n            self.conv = nn.AvgPool1d(kernel_size=stride, stride=stride)\n\n    def forward(self, x):\n        assert x.shape[1] == self.channels\n        return self.conv(x)\n\n\nclass Upsample2D(nn.Module):\n    \"\"\"A 2D upsampling layer with an optional convolution.\n\n    Parameters:\n        channels (`int`):\n            number of channels in the inputs and outputs.\n        use_conv (`bool`, default `False`):\n            option to use a convolution.\n        use_conv_transpose (`bool`, default `False`):\n            option to use a convolution transpose.\n        out_channels (`int`, optional):\n            number of output channels. Defaults to `channels`.\n    \"\"\"\n\n    def __init__(self, channels, use_conv=False, use_conv_transpose=False, out_channels=None, name=\"conv\"):\n        super().__init__()\n        self.channels = channels\n        self.out_channels = out_channels or channels\n        self.use_conv = use_conv\n        self.use_conv_transpose = use_conv_transpose\n        self.name = name\n\n        conv = None\n        if use_conv_transpose:\n            conv = nn.ConvTranspose2d(channels, self.out_channels, 4, 2, 1)\n        elif use_conv:\n            conv = nn.Conv2d(self.channels, self.out_channels, 3, padding=1)\n\n        # TODO(Suraj, Patrick) - clean up after weight dicts are correctly renamed\n        if name == \"conv\":\n            self.conv = conv\n        else:\n            self.Conv2d_0 = conv\n\n    def forward(self, hidden_states, output_size=None):\n        assert hidden_states.shape[1] == self.channels\n\n        if self.use_conv_transpose:\n            return self.conv(hidden_states)\n\n        # Cast to float32 to as 'upsample_nearest2d_out_frame' op does not support bfloat16\n        # TODO(Suraj): Remove this cast once the issue is fixed in PyTorch\n        # https://github.com/pytorch/pytorch/issues/86679\n        dtype = hidden_states.dtype\n        if dtype == torch.bfloat16:\n            hidden_states = hidden_states.to(torch.float32)\n\n        # upsample_nearest_nhwc fails with large batch sizes. see https://github.com/huggingface/diffusers/issues/984\n        if hidden_states.shape[0] >= 64:\n            hidden_states = hidden_states.contiguous()\n\n        # if `output_size` is passed we force the interpolation output\n        # size and do not make use of `scale_factor=2`\n        if output_size is None:\n            hidden_states = F.interpolate(hidden_states, scale_factor=2.0, mode=\"nearest\")\n        else:\n            hidden_states = F.interpolate(hidden_states, size=output_size, mode=\"nearest\")\n\n        # If the input is bfloat16, we cast back to bfloat16\n        if dtype == torch.bfloat16:\n            hidden_states = hidden_states.to(dtype)\n\n        # TODO(Suraj, Patrick) - clean up after weight dicts are correctly renamed\n        if self.use_conv:\n            if self.name == \"conv\":\n                hidden_states = self.conv(hidden_states)\n            else:\n                hidden_states = self.Conv2d_0(hidden_states)\n\n        return hidden_states\n\n\nclass Downsample2D(nn.Module):\n    \"\"\"A 2D downsampling layer with an optional convolution.\n\n    Parameters:\n        channels (`int`):\n            number of channels in the inputs and outputs.\n        use_conv (`bool`, default `False`):\n            option to use a convolution.\n        out_channels (`int`, optional):\n            number of output channels. Defaults to `channels`.\n        padding (`int`, default `1`):\n            padding for the convolution.\n    \"\"\"\n\n    def __init__(self, channels, use_conv=False, out_channels=None, padding=1, name=\"conv\"):\n        super().__init__()\n        self.channels = channels\n        self.out_channels = out_channels or channels\n        self.use_conv = use_conv\n        self.padding = padding\n        stride = 2\n        self.name = name\n\n        if use_conv:\n            conv = nn.Conv2d(self.channels, self.out_channels, 3, stride=stride, padding=padding)\n        else:\n            assert self.channels == self.out_channels\n            conv = nn.AvgPool2d(kernel_size=stride, stride=stride)\n\n        # TODO(Suraj, Patrick) - clean up after weight dicts are correctly renamed\n        if name == \"conv\":\n            self.Conv2d_0 = conv\n            self.conv = conv\n        elif name == \"Conv2d_0\":\n            self.conv = conv\n        else:\n            self.conv = conv\n\n    def forward(self, hidden_states):\n        assert hidden_states.shape[1] == self.channels\n        if self.use_conv and self.padding == 0:\n            pad = (0, 1, 0, 1)\n            hidden_states = F.pad(hidden_states, pad, mode=\"constant\", value=0)\n\n        assert hidden_states.shape[1] == self.channels\n        hidden_states = self.conv(hidden_states)\n\n        return hidden_states\n\n\nclass FirUpsample2D(nn.Module):\n    \"\"\"A 2D FIR upsampling layer with an optional convolution.\n\n    Parameters:\n        channels (`int`):\n            number of channels in the inputs and outputs.\n        use_conv (`bool`, default `False`):\n            option to use a convolution.\n        out_channels (`int`, optional):\n            number of output channels. Defaults to `channels`.\n        fir_kernel (`tuple`, default `(1, 3, 3, 1)`):\n            kernel for the FIR filter.\n    \"\"\"\n\n    def __init__(self, channels=None, out_channels=None, use_conv=False, fir_kernel=(1, 3, 3, 1)):\n        super().__init__()\n        out_channels = out_channels if out_channels else channels\n        if use_conv:\n            self.Conv2d_0 = nn.Conv2d(channels, out_channels, kernel_size=3, stride=1, padding=1)\n        self.use_conv = use_conv\n        self.fir_kernel = fir_kernel\n        self.out_channels = out_channels\n\n    def _upsample_2d(self, hidden_states, weight=None, kernel=None, factor=2, gain=1):\n        \"\"\"Fused `upsample_2d()` followed by `Conv2d()`.\n\n        Padding is performed only once at the beginning, not between the operations. The fused op is considerably more\n        efficient than performing the same calculation using standard TensorFlow ops. It supports gradients of\n        arbitrary order.\n\n        Args:\n            hidden_states: Input tensor of the shape `[N, C, H, W]` or `[N, H, W, C]`.\n            weight: Weight tensor of the shape `[filterH, filterW, inChannels,\n                outChannels]`. Grouped convolution can be performed by `inChannels = x.shape[0] // numGroups`.\n            kernel: FIR filter of the shape `[firH, firW]` or `[firN]`\n                (separable). The default is `[1] * factor`, which corresponds to nearest-neighbor upsampling.\n            factor: Integer upsampling factor (default: 2).\n            gain: Scaling factor for signal magnitude (default: 1.0).\n\n        Returns:\n            output: Tensor of the shape `[N, C, H * factor, W * factor]` or `[N, H * factor, W * factor, C]`, and same\n            datatype as `hidden_states`.\n        \"\"\"\n\n        assert isinstance(factor, int) and factor >= 1\n\n        # Setup filter kernel.\n        if kernel is None:\n            kernel = [1] * factor\n\n        # setup kernel\n        kernel = torch.tensor(kernel, dtype=torch.float32)\n        if kernel.ndim == 1:\n            kernel = torch.outer(kernel, kernel)\n        kernel /= torch.sum(kernel)\n\n        kernel = kernel * (gain * (factor**2))\n\n        if self.use_conv:\n            convH = weight.shape[2]\n            convW = weight.shape[3]\n            inC = weight.shape[1]\n\n            pad_value = (kernel.shape[0] - factor) - (convW - 1)\n\n            stride = (factor, factor)\n            # Determine data dimensions.\n            output_shape = (\n                (hidden_states.shape[2] - 1) * factor + convH,\n                (hidden_states.shape[3] - 1) * factor + convW,\n            )\n            output_padding = (\n                output_shape[0] - (hidden_states.shape[2] - 1) * stride[0] - convH,\n                output_shape[1] - (hidden_states.shape[3] - 1) * stride[1] - convW,\n            )\n            assert output_padding[0] >= 0 and output_padding[1] >= 0\n            num_groups = hidden_states.shape[1] // inC\n\n            # Transpose weights.\n            weight = torch.reshape(weight, (num_groups, -1, inC, convH, convW))\n            weight = torch.flip(weight, dims=[3, 4]).permute(0, 2, 1, 3, 4)\n            weight = torch.reshape(weight, (num_groups * inC, -1, convH, convW))\n\n            inverse_conv = F.conv_transpose2d(\n                hidden_states, weight, stride=stride, output_padding=output_padding, padding=0\n            )\n\n            output = upfirdn2d_native(\n                inverse_conv,\n                torch.tensor(kernel, device=inverse_conv.device),\n                pad=((pad_value + 1) // 2 + factor - 1, pad_value // 2 + 1),\n            )\n        else:\n            pad_value = kernel.shape[0] - factor\n            output = upfirdn2d_native(\n                hidden_states,\n                torch.tensor(kernel, device=hidden_states.device),\n                up=factor,\n                pad=((pad_value + 1) // 2 + factor - 1, pad_value // 2),\n            )\n\n        return output\n\n    def forward(self, hidden_states):\n        if self.use_conv:\n            height = self._upsample_2d(hidden_states, self.Conv2d_0.weight, kernel=self.fir_kernel)\n            height = height + self.Conv2d_0.bias.reshape(1, -1, 1, 1)\n        else:\n            height = self._upsample_2d(hidden_states, kernel=self.fir_kernel, factor=2)\n\n        return height\n\n\nclass FirDownsample2D(nn.Module):\n    \"\"\"A 2D FIR downsampling layer with an optional convolution.\n\n    Parameters:\n        channels (`int`):\n            number of channels in the inputs and outputs.\n        use_conv (`bool`, default `False`):\n            option to use a convolution.\n        out_channels (`int`, optional):\n            number of output channels. Defaults to `channels`.\n        fir_kernel (`tuple`, default `(1, 3, 3, 1)`):\n            kernel for the FIR filter.\n    \"\"\"\n\n    def __init__(self, channels=None, out_channels=None, use_conv=False, fir_kernel=(1, 3, 3, 1)):\n        super().__init__()\n        out_channels = out_channels if out_channels else channels\n        if use_conv:\n            self.Conv2d_0 = nn.Conv2d(channels, out_channels, kernel_size=3, stride=1, padding=1)\n        self.fir_kernel = fir_kernel\n        self.use_conv = use_conv\n        self.out_channels = out_channels\n\n    def _downsample_2d(self, hidden_states, weight=None, kernel=None, factor=2, gain=1):\n        \"\"\"Fused `Conv2d()` followed by `downsample_2d()`.\n        Padding is performed only once at the beginning, not between the operations. The fused op is considerably more\n        efficient than performing the same calculation using standard TensorFlow ops. It supports gradients of\n        arbitrary order.\n\n        Args:\n            hidden_states: Input tensor of the shape `[N, C, H, W]` or `[N, H, W, C]`.\n            weight:\n                Weight tensor of the shape `[filterH, filterW, inChannels, outChannels]`. Grouped convolution can be\n                performed by `inChannels = x.shape[0] // numGroups`.\n            kernel: FIR filter of the shape `[firH, firW]` or `[firN]` (separable). The default is `[1] *\n            factor`, which corresponds to average pooling.\n            factor: Integer downsampling factor (default: 2).\n            gain: Scaling factor for signal magnitude (default: 1.0).\n\n        Returns:\n            output: Tensor of the shape `[N, C, H // factor, W // factor]` or `[N, H // factor, W // factor, C]`, and\n            same datatype as `x`.\n        \"\"\"\n\n        assert isinstance(factor, int) and factor >= 1\n        if kernel is None:\n            kernel = [1] * factor\n\n        # setup kernel\n        kernel = torch.tensor(kernel, dtype=torch.float32)\n        if kernel.ndim == 1:\n            kernel = torch.outer(kernel, kernel)\n        kernel /= torch.sum(kernel)\n\n        kernel = kernel * gain\n\n        if self.use_conv:\n            _, _, convH, convW = weight.shape\n            pad_value = (kernel.shape[0] - factor) + (convW - 1)\n            stride_value = [factor, factor]\n            upfirdn_input = upfirdn2d_native(\n                hidden_states,\n                torch.tensor(kernel, device=hidden_states.device),\n                pad=((pad_value + 1) // 2, pad_value // 2),\n            )\n            output = F.conv2d(upfirdn_input, weight, stride=stride_value, padding=0)\n        else:\n            pad_value = kernel.shape[0] - factor\n            output = upfirdn2d_native(\n                hidden_states,\n                torch.tensor(kernel, device=hidden_states.device),\n                down=factor,\n                pad=((pad_value + 1) // 2, pad_value // 2),\n            )\n\n        return output\n\n    def forward(self, hidden_states):\n        if self.use_conv:\n            downsample_input = self._downsample_2d(hidden_states, weight=self.Conv2d_0.weight, kernel=self.fir_kernel)\n            hidden_states = downsample_input + self.Conv2d_0.bias.reshape(1, -1, 1, 1)\n        else:\n            hidden_states = self._downsample_2d(hidden_states, kernel=self.fir_kernel, factor=2)\n\n        return hidden_states\n\n\n# downsample/upsample layer used in k-upscaler, might be able to use FirDownsample2D/DirUpsample2D instead\nclass KDownsample2D(nn.Module):\n    def __init__(self, pad_mode=\"reflect\"):\n        super().__init__()\n        self.pad_mode = pad_mode\n        kernel_1d = torch.tensor([[1 / 8, 3 / 8, 3 / 8, 1 / 8]])\n        self.pad = kernel_1d.shape[1] // 2 - 1\n        self.register_buffer(\"kernel\", kernel_1d.T @ kernel_1d, persistent=False)\n\n    def forward(self, x):\n        x = F.pad(x, (self.pad,) * 4, self.pad_mode)\n        weight = x.new_zeros([x.shape[1], x.shape[1], self.kernel.shape[0], self.kernel.shape[1]])\n        indices = torch.arange(x.shape[1], device=x.device)\n        weight[indices, indices] = self.kernel.to(weight)\n        return F.conv2d(x, weight, stride=2)\n\n\nclass KUpsample2D(nn.Module):\n    def __init__(self, pad_mode=\"reflect\"):\n        super().__init__()\n        self.pad_mode = pad_mode\n        kernel_1d = torch.tensor([[1 / 8, 3 / 8, 3 / 8, 1 / 8]]) * 2\n        self.pad = kernel_1d.shape[1] // 2 - 1\n        self.register_buffer(\"kernel\", kernel_1d.T @ kernel_1d, persistent=False)\n\n    def forward(self, x):\n        x = F.pad(x, ((self.pad + 1) // 2,) * 4, self.pad_mode)\n        weight = x.new_zeros([x.shape[1], x.shape[1], self.kernel.shape[0], self.kernel.shape[1]])\n        indices = torch.arange(x.shape[1], device=x.device)\n        weight[indices, indices] = self.kernel.to(weight)\n        return F.conv_transpose2d(x, weight, stride=2, padding=self.pad * 2 + 1)\n\n\nclass ResnetBlock2D(nn.Module):\n    r\"\"\"\n    A Resnet block.\n\n    Parameters:\n        in_channels (`int`): The number of channels in the input.\n        out_channels (`int`, *optional*, default to be `None`):\n            The number of output channels for the first conv2d layer. If None, same as `in_channels`.\n        dropout (`float`, *optional*, defaults to `0.0`): The dropout probability to use.\n        temb_channels (`int`, *optional*, default to `512`): the number of channels in timestep embedding.\n        groups (`int`, *optional*, default to `32`): The number of groups to use for the first normalization layer.\n        groups_out (`int`, *optional*, default to None):\n            The number of groups to use for the second normalization layer. if set to None, same as `groups`.\n        eps (`float`, *optional*, defaults to `1e-6`): The epsilon to use for the normalization.\n        non_linearity (`str`, *optional*, default to `\"swish\"`): the activation function to use.\n        time_embedding_norm (`str`, *optional*, default to `\"default\"` ): Time scale shift config.\n            By default, apply timestep embedding conditioning with a simple shift mechanism. Choose \"scale_shift\" or\n            \"ada_group\" for a stronger conditioning with scale and shift.\n        kernel (`torch.FloatTensor`, optional, default to None): FIR filter, see\n            [`~models.resnet.FirUpsample2D`] and [`~models.resnet.FirDownsample2D`].\n        output_scale_factor (`float`, *optional*, default to be `1.0`): the scale factor to use for the output.\n        use_in_shortcut (`bool`, *optional*, default to `True`):\n            If `True`, add a 1x1 nn.conv2d layer for skip-connection.\n        up (`bool`, *optional*, default to `False`): If `True`, add an upsample layer.\n        down (`bool`, *optional*, default to `False`): If `True`, add a downsample layer.\n        conv_shortcut_bias (`bool`, *optional*, default to `True`):  If `True`, adds a learnable bias to the\n            `conv_shortcut` output.\n        conv_2d_out_channels (`int`, *optional*, default to `None`): the number of channels in the output.\n            If None, same as `out_channels`.\n    \"\"\"\n\n    def __init__(\n        self,\n        *,\n        in_channels,\n        out_channels=None,\n        conv_shortcut=False,\n        dropout=0.0,\n        temb_channels=512,\n        groups=32,\n        groups_out=None,\n        pre_norm=True,\n        eps=1e-6,\n        non_linearity=\"swish\",\n        skip_time_act=False,\n        time_embedding_norm=\"default\",  # default, scale_shift, ada_group\n        kernel=None,\n        output_scale_factor=1.0,\n        use_in_shortcut=None,\n        up=False,\n        down=False,\n        conv_shortcut_bias: bool = True,\n        conv_2d_out_channels: Optional[int] = None,\n    ):\n        super().__init__()\n        self.pre_norm = pre_norm\n        self.pre_norm = True\n        self.in_channels = in_channels\n        out_channels = in_channels if out_channels is None else out_channels\n        self.out_channels = out_channels\n        self.use_conv_shortcut = conv_shortcut\n        self.up = up\n        self.down = down\n        self.output_scale_factor = output_scale_factor\n        self.time_embedding_norm = time_embedding_norm\n        self.skip_time_act = skip_time_act\n\n        if groups_out is None:\n            groups_out = groups\n\n        if self.time_embedding_norm == \"ada_group\":\n            self.norm1 = AdaGroupNorm(temb_channels, in_channels, groups, eps=eps)\n        else:\n            self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=in_channels, eps=eps, affine=True)\n\n        self.conv1 = torch.nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1)\n\n        if temb_channels is not None:\n            if self.time_embedding_norm == \"default\":\n                self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels)\n            elif self.time_embedding_norm == \"scale_shift\":\n                self.time_emb_proj = torch.nn.Linear(temb_channels, 2 * out_channels)\n            elif self.time_embedding_norm == \"ada_group\":\n                self.time_emb_proj = None\n            else:\n                raise ValueError(f\"unknown time_embedding_norm : {self.time_embedding_norm} \")\n        else:\n            self.time_emb_proj = None\n\n        if self.time_embedding_norm == \"ada_group\":\n            self.norm2 = AdaGroupNorm(temb_channels, out_channels, groups_out, eps=eps)\n        else:\n            self.norm2 = torch.nn.GroupNorm(num_groups=groups_out, num_channels=out_channels, eps=eps, affine=True)\n\n        self.dropout = torch.nn.Dropout(dropout)\n        conv_2d_out_channels = conv_2d_out_channels or out_channels\n        self.conv2 = torch.nn.Conv2d(out_channels, conv_2d_out_channels, kernel_size=3, stride=1, padding=1)\n\n        if non_linearity == \"swish\":\n            self.nonlinearity = lambda x: F.silu(x)\n        elif non_linearity == \"mish\":\n            self.nonlinearity = nn.Mish()\n        elif non_linearity == \"silu\":\n            self.nonlinearity = nn.SiLU()\n        elif non_linearity == \"gelu\":\n            self.nonlinearity = nn.GELU()\n\n        self.upsample = self.downsample = None\n        if self.up:\n            if kernel == \"fir\":\n                fir_kernel = (1, 3, 3, 1)\n                self.upsample = lambda x: upsample_2d(x, kernel=fir_kernel)\n            elif kernel == \"sde_vp\":\n                self.upsample = partial(F.interpolate, scale_factor=2.0, mode=\"nearest\")\n            else:\n                self.upsample = Upsample2D(in_channels, use_conv=False)\n        elif self.down:\n            if kernel == \"fir\":\n                fir_kernel = (1, 3, 3, 1)\n                self.downsample = lambda x: downsample_2d(x, kernel=fir_kernel)\n            elif kernel == \"sde_vp\":\n                self.downsample = partial(F.avg_pool2d, kernel_size=2, stride=2)\n            else:\n                self.downsample = Downsample2D(in_channels, use_conv=False, padding=1, name=\"op\")\n\n        self.use_in_shortcut = self.in_channels != conv_2d_out_channels if use_in_shortcut is None else use_in_shortcut\n\n        self.conv_shortcut = None\n        if self.use_in_shortcut:\n            self.conv_shortcut = torch.nn.Conv2d(\n                in_channels, conv_2d_out_channels, kernel_size=1, stride=1, padding=0, bias=conv_shortcut_bias\n            )\n\n    def forward(self, input_tensor, temb):\n        hidden_states = input_tensor\n\n        if self.time_embedding_norm == \"ada_group\":\n            hidden_states = self.norm1(hidden_states, temb)\n        else:\n            hidden_states = self.norm1(hidden_states)\n\n        hidden_states = self.nonlinearity(hidden_states)\n\n        if self.upsample is not None:\n            # upsample_nearest_nhwc fails with large batch sizes. see https://github.com/huggingface/diffusers/issues/984\n            if hidden_states.shape[0] >= 64:\n                input_tensor = input_tensor.contiguous()\n                hidden_states = hidden_states.contiguous()\n            input_tensor = self.upsample(input_tensor)\n            hidden_states = self.upsample(hidden_states)\n        elif self.downsample is not None:\n            input_tensor = self.downsample(input_tensor)\n            hidden_states = self.downsample(hidden_states)\n\n        hidden_states = self.conv1(hidden_states)\n\n        if self.time_emb_proj is not None:\n            if not self.skip_time_act:\n                temb = self.nonlinearity(temb)\n            temb = self.time_emb_proj(temb)[:, :, None, None]\n\n        if temb is not None and self.time_embedding_norm == \"default\":\n            hidden_states = hidden_states + temb\n\n        if self.time_embedding_norm == \"ada_group\":\n            hidden_states = self.norm2(hidden_states, temb)\n        else:\n            hidden_states = self.norm2(hidden_states)\n\n        if temb is not None and self.time_embedding_norm == \"scale_shift\":\n            scale, shift = torch.chunk(temb, 2, dim=1)\n            hidden_states = hidden_states * (1 + scale) + shift\n\n        hidden_states = self.nonlinearity(hidden_states)\n\n        hidden_states = self.dropout(hidden_states)\n        hidden_states = self.conv2(hidden_states)\n\n        if self.conv_shortcut is not None:\n            input_tensor = self.conv_shortcut(input_tensor)\n\n        output_tensor = (input_tensor + hidden_states) / self.output_scale_factor\n\n        return output_tensor\n\n\nclass Mish(torch.nn.Module):\n    def forward(self, hidden_states):\n        return hidden_states * torch.tanh(torch.nn.functional.softplus(hidden_states))\n\n\n# unet_rl.py\ndef rearrange_dims(tensor):\n    if len(tensor.shape) == 2:\n        return tensor[:, :, None]\n    if len(tensor.shape) == 3:\n        return tensor[:, :, None, :]\n    elif len(tensor.shape) == 4:\n        return tensor[:, :, 0, :]\n    else:\n        raise ValueError(f\"`len(tensor)`: {len(tensor)} has to be 2, 3 or 4.\")\n\n\nclass Conv1dBlock(nn.Module):\n    \"\"\"\n    Conv1d --> GroupNorm --> Mish\n    \"\"\"\n\n    def __init__(self, inp_channels, out_channels, kernel_size, n_groups=8):\n        super().__init__()\n\n        self.conv1d = nn.Conv1d(inp_channels, out_channels, kernel_size, padding=kernel_size // 2)\n        self.group_norm = nn.GroupNorm(n_groups, out_channels)\n        self.mish = nn.Mish()\n\n    def forward(self, x):\n        x = self.conv1d(x)\n        x = rearrange_dims(x)\n        x = self.group_norm(x)\n        x = rearrange_dims(x)\n        x = self.mish(x)\n        return x\n\n\n# unet_rl.py\nclass ResidualTemporalBlock1D(nn.Module):\n    def __init__(self, inp_channels, out_channels, embed_dim, kernel_size=5):\n        super().__init__()\n        self.conv_in = Conv1dBlock(inp_channels, out_channels, kernel_size)\n        self.conv_out = Conv1dBlock(out_channels, out_channels, kernel_size)\n\n        self.time_emb_act = nn.Mish()\n        self.time_emb = nn.Linear(embed_dim, out_channels)\n\n        self.residual_conv = (\n            nn.Conv1d(inp_channels, out_channels, 1) if inp_channels != out_channels else nn.Identity()\n        )\n\n    def forward(self, x, t):\n        \"\"\"\n        Args:\n            x : [ batch_size x inp_channels x horizon ]\n            t : [ batch_size x embed_dim ]\n\n        returns:\n            out : [ batch_size x out_channels x horizon ]\n        \"\"\"\n        t = self.time_emb_act(t)\n        t = self.time_emb(t)\n        out = self.conv_in(x) + rearrange_dims(t)\n        out = self.conv_out(out)\n        return out + self.residual_conv(x)\n\n\ndef upsample_2d(hidden_states, kernel=None, factor=2, gain=1):\n    r\"\"\"Upsample2D a batch of 2D images with the given filter.\n    Accepts a batch of 2D images of the shape `[N, C, H, W]` or `[N, H, W, C]` and upsamples each image with the given\n    filter. The filter is normalized so that if the input pixels are constant, they will be scaled by the specified\n    `gain`. Pixels outside the image are assumed to be zero, and the filter is padded with zeros so that its shape is\n    a: multiple of the upsampling factor.\n\n    Args:\n        hidden_states: Input tensor of the shape `[N, C, H, W]` or `[N, H, W, C]`.\n        kernel: FIR filter of the shape `[firH, firW]` or `[firN]`\n          (separable). The default is `[1] * factor`, which corresponds to nearest-neighbor upsampling.\n        factor: Integer upsampling factor (default: 2).\n        gain: Scaling factor for signal magnitude (default: 1.0).\n\n    Returns:\n        output: Tensor of the shape `[N, C, H * factor, W * factor]`\n    \"\"\"\n    assert isinstance(factor, int) and factor >= 1\n    if kernel is None:\n        kernel = [1] * factor\n\n    kernel = torch.tensor(kernel, dtype=torch.float32)\n    if kernel.ndim == 1:\n        kernel = torch.outer(kernel, kernel)\n    kernel /= torch.sum(kernel)\n\n    kernel = kernel * (gain * (factor**2))\n    pad_value = kernel.shape[0] - factor\n    output = upfirdn2d_native(\n        hidden_states,\n        kernel.to(device=hidden_states.device),\n        up=factor,\n        pad=((pad_value + 1) // 2 + factor - 1, pad_value // 2),\n    )\n    return output\n\n\ndef downsample_2d(hidden_states, kernel=None, factor=2, gain=1):\n    r\"\"\"Downsample2D a batch of 2D images with the given filter.\n    Accepts a batch of 2D images of the shape `[N, C, H, W]` or `[N, H, W, C]` and downsamples each image with the\n    given filter. The filter is normalized so that if the input pixels are constant, they will be scaled by the\n    specified `gain`. Pixels outside the image are assumed to be zero, and the filter is padded with zeros so that its\n    shape is a multiple of the downsampling factor.\n\n    Args:\n        hidden_states: Input tensor of the shape `[N, C, H, W]` or `[N, H, W, C]`.\n        kernel: FIR filter of the shape `[firH, firW]` or `[firN]`\n          (separable). The default is `[1] * factor`, which corresponds to average pooling.\n        factor: Integer downsampling factor (default: 2).\n        gain: Scaling factor for signal magnitude (default: 1.0).\n\n    Returns:\n        output: Tensor of the shape `[N, C, H // factor, W // factor]`\n    \"\"\"\n\n    assert isinstance(factor, int) and factor >= 1\n    if kernel is None:\n        kernel = [1] * factor\n\n    kernel = torch.tensor(kernel, dtype=torch.float32)\n    if kernel.ndim == 1:\n        kernel = torch.outer(kernel, kernel)\n    kernel /= torch.sum(kernel)\n\n    kernel = kernel * gain\n    pad_value = kernel.shape[0] - factor\n    output = upfirdn2d_native(\n        hidden_states, kernel.to(device=hidden_states.device), down=factor, pad=((pad_value + 1) // 2, pad_value // 2)\n    )\n    return output\n\n\ndef upfirdn2d_native(tensor, kernel, up=1, down=1, pad=(0, 0)):\n    up_x = up_y = up\n    down_x = down_y = down\n    pad_x0 = pad_y0 = pad[0]\n    pad_x1 = pad_y1 = pad[1]\n\n    _, channel, in_h, in_w = tensor.shape\n    tensor = tensor.reshape(-1, in_h, in_w, 1)\n\n    _, in_h, in_w, minor = tensor.shape\n    kernel_h, kernel_w = kernel.shape\n\n    out = tensor.view(-1, in_h, 1, in_w, 1, minor)\n    out = F.pad(out, [0, 0, 0, up_x - 1, 0, 0, 0, up_y - 1])\n    out = out.view(-1, in_h * up_y, in_w * up_x, minor)\n\n    out = F.pad(out, [0, 0, max(pad_x0, 0), max(pad_x1, 0), max(pad_y0, 0), max(pad_y1, 0)])\n    out = out.to(tensor.device)  # Move back to mps if necessary\n    out = out[\n        :,\n        max(-pad_y0, 0) : out.shape[1] - max(-pad_y1, 0),\n        max(-pad_x0, 0) : out.shape[2] - max(-pad_x1, 0),\n        :,\n    ]\n\n    out = out.permute(0, 3, 1, 2)\n    out = out.reshape([-1, 1, in_h * up_y + pad_y0 + pad_y1, in_w * up_x + pad_x0 + pad_x1])\n    w = torch.flip(kernel, [0, 1]).view(1, 1, kernel_h, kernel_w)\n    out = F.conv2d(out, w)\n    out = out.reshape(\n        -1,\n        minor,\n        in_h * up_y + pad_y0 + pad_y1 - kernel_h + 1,\n        in_w * up_x + pad_x0 + pad_x1 - kernel_w + 1,\n    )\n    out = out.permute(0, 2, 3, 1)\n    out = out[:, ::down_y, ::down_x, :]\n\n    out_h = (in_h * up_y + pad_y0 + pad_y1 - kernel_h) // down_y + 1\n    out_w = (in_w * up_x + pad_x0 + pad_x1 - kernel_w) // down_x + 1\n\n    return out.view(-1, channel, out_h, out_w)\n\n\nclass TemporalConvLayer(nn.Module):\n    \"\"\"\n    Temporal convolutional layer that can be used for video (sequence of images) input Code mostly copied from:\n    https://github.com/modelscope/modelscope/blob/1509fdb973e5871f37148a4b5e5964cafd43e64d/modelscope/models/multi_modal/video_synthesis/unet_sd.py#L1016\n    \"\"\"\n\n    def __init__(self, in_dim, out_dim=None, dropout=0.0):\n        super().__init__()\n        out_dim = out_dim or in_dim\n        self.in_dim = in_dim\n        self.out_dim = out_dim\n\n        # conv layers\n        self.conv1 = nn.Sequential(\n            nn.GroupNorm(32, in_dim), nn.SiLU(), nn.Conv3d(in_dim, out_dim, (3, 1, 1), padding=(1, 0, 0))\n        )\n        self.conv2 = nn.Sequential(\n            nn.GroupNorm(32, out_dim),\n            nn.SiLU(),\n            nn.Dropout(dropout),\n            nn.Conv3d(out_dim, in_dim, (3, 1, 1), padding=(1, 0, 0)),\n        )\n        self.conv3 = nn.Sequential(\n            nn.GroupNorm(32, out_dim),\n            nn.SiLU(),\n            nn.Dropout(dropout),\n            nn.Conv3d(out_dim, in_dim, (3, 1, 1), padding=(1, 0, 0)),\n        )\n        self.conv4 = nn.Sequential(\n            nn.GroupNorm(32, out_dim),\n            nn.SiLU(),\n            nn.Dropout(dropout),\n            nn.Conv3d(out_dim, in_dim, (3, 1, 1), padding=(1, 0, 0)),\n        )\n\n        # zero out the last layer params,so the conv block is identity\n        nn.init.zeros_(self.conv4[-1].weight)\n        nn.init.zeros_(self.conv4[-1].bias)\n\n    def forward(self, hidden_states, num_frames=1):\n        hidden_states = (\n            hidden_states[None, :].reshape((-1, num_frames) + hidden_states.shape[1:]).permute(0, 2, 1, 3, 4)\n        )\n\n        identity = hidden_states\n        hidden_states = self.conv1(hidden_states)\n        hidden_states = self.conv2(hidden_states)\n        hidden_states = self.conv3(hidden_states)\n        hidden_states = self.conv4(hidden_states)\n\n        hidden_states = identity + hidden_states\n\n        hidden_states = hidden_states.permute(0, 2, 1, 3, 4).reshape(\n            (hidden_states.shape[0] * hidden_states.shape[2], -1) + hidden_states.shape[3:]\n        )\n        return hidden_states\n"
  },
  {
    "path": "diffusers/models/resnet_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport flax.linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass FlaxUpsample2D(nn.Module):\n    out_channels: int\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.conv = nn.Conv(\n            self.out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n    def __call__(self, hidden_states):\n        batch, height, width, channels = hidden_states.shape\n        hidden_states = jax.image.resize(\n            hidden_states,\n            shape=(batch, height * 2, width * 2, channels),\n            method=\"nearest\",\n        )\n        hidden_states = self.conv(hidden_states)\n        return hidden_states\n\n\nclass FlaxDownsample2D(nn.Module):\n    out_channels: int\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.conv = nn.Conv(\n            self.out_channels,\n            kernel_size=(3, 3),\n            strides=(2, 2),\n            padding=((1, 1), (1, 1)),  # padding=\"VALID\",\n            dtype=self.dtype,\n        )\n\n    def __call__(self, hidden_states):\n        # pad = ((0, 0), (0, 1), (0, 1), (0, 0))  # pad height and width dim\n        # hidden_states = jnp.pad(hidden_states, pad_width=pad)\n        hidden_states = self.conv(hidden_states)\n        return hidden_states\n\n\nclass FlaxResnetBlock2D(nn.Module):\n    in_channels: int\n    out_channels: int = None\n    dropout_prob: float = 0.0\n    use_nin_shortcut: bool = None\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        out_channels = self.in_channels if self.out_channels is None else self.out_channels\n\n        self.norm1 = nn.GroupNorm(num_groups=32, epsilon=1e-5)\n        self.conv1 = nn.Conv(\n            out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        self.time_emb_proj = nn.Dense(out_channels, dtype=self.dtype)\n\n        self.norm2 = nn.GroupNorm(num_groups=32, epsilon=1e-5)\n        self.dropout = nn.Dropout(self.dropout_prob)\n        self.conv2 = nn.Conv(\n            out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        use_nin_shortcut = self.in_channels != out_channels if self.use_nin_shortcut is None else self.use_nin_shortcut\n\n        self.conv_shortcut = None\n        if use_nin_shortcut:\n            self.conv_shortcut = nn.Conv(\n                out_channels,\n                kernel_size=(1, 1),\n                strides=(1, 1),\n                padding=\"VALID\",\n                dtype=self.dtype,\n            )\n\n    def __call__(self, hidden_states, temb, deterministic=True):\n        residual = hidden_states\n        hidden_states = self.norm1(hidden_states)\n        hidden_states = nn.swish(hidden_states)\n        hidden_states = self.conv1(hidden_states)\n\n        temb = self.time_emb_proj(nn.swish(temb))\n        temb = jnp.expand_dims(jnp.expand_dims(temb, 1), 1)\n        hidden_states = hidden_states + temb\n\n        hidden_states = self.norm2(hidden_states)\n        hidden_states = nn.swish(hidden_states)\n        hidden_states = self.dropout(hidden_states, deterministic)\n        hidden_states = self.conv2(hidden_states)\n\n        if self.conv_shortcut is not None:\n            residual = self.conv_shortcut(residual)\n\n        return hidden_states + residual\n"
  },
  {
    "path": "diffusers/models/t5_film_transformer.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\n\nimport torch\nfrom torch import nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .attention_processor import Attention\nfrom .embeddings import get_timestep_embedding\nfrom .modeling_utils import ModelMixin\n\n\nclass T5FilmDecoder(ModelMixin, ConfigMixin):\n    @register_to_config\n    def __init__(\n        self,\n        input_dims: int = 128,\n        targets_length: int = 256,\n        max_decoder_noise_time: float = 2000.0,\n        d_model: int = 768,\n        num_layers: int = 12,\n        num_heads: int = 12,\n        d_kv: int = 64,\n        d_ff: int = 2048,\n        dropout_rate: float = 0.1,\n    ):\n        super().__init__()\n\n        self.conditioning_emb = nn.Sequential(\n            nn.Linear(d_model, d_model * 4, bias=False),\n            nn.SiLU(),\n            nn.Linear(d_model * 4, d_model * 4, bias=False),\n            nn.SiLU(),\n        )\n\n        self.position_encoding = nn.Embedding(targets_length, d_model)\n        self.position_encoding.weight.requires_grad = False\n\n        self.continuous_inputs_projection = nn.Linear(input_dims, d_model, bias=False)\n\n        self.dropout = nn.Dropout(p=dropout_rate)\n\n        self.decoders = nn.ModuleList()\n        for lyr_num in range(num_layers):\n            # FiLM conditional T5 decoder\n            lyr = DecoderLayer(d_model=d_model, d_kv=d_kv, num_heads=num_heads, d_ff=d_ff, dropout_rate=dropout_rate)\n            self.decoders.append(lyr)\n\n        self.decoder_norm = T5LayerNorm(d_model)\n\n        self.post_dropout = nn.Dropout(p=dropout_rate)\n        self.spec_out = nn.Linear(d_model, input_dims, bias=False)\n\n    def encoder_decoder_mask(self, query_input, key_input):\n        mask = torch.mul(query_input.unsqueeze(-1), key_input.unsqueeze(-2))\n        return mask.unsqueeze(-3)\n\n    def forward(self, encodings_and_masks, decoder_input_tokens, decoder_noise_time):\n        batch, _, _ = decoder_input_tokens.shape\n        assert decoder_noise_time.shape == (batch,)\n\n        # decoder_noise_time is in [0, 1), so rescale to expected timing range.\n        time_steps = get_timestep_embedding(\n            decoder_noise_time * self.config.max_decoder_noise_time,\n            embedding_dim=self.config.d_model,\n            max_period=self.config.max_decoder_noise_time,\n        ).to(dtype=self.dtype)\n\n        conditioning_emb = self.conditioning_emb(time_steps).unsqueeze(1)\n\n        assert conditioning_emb.shape == (batch, 1, self.config.d_model * 4)\n\n        seq_length = decoder_input_tokens.shape[1]\n\n        # If we want to use relative positions for audio context, we can just offset\n        # this sequence by the length of encodings_and_masks.\n        decoder_positions = torch.broadcast_to(\n            torch.arange(seq_length, device=decoder_input_tokens.device),\n            (batch, seq_length),\n        )\n\n        position_encodings = self.position_encoding(decoder_positions)\n\n        inputs = self.continuous_inputs_projection(decoder_input_tokens)\n        inputs += position_encodings\n        y = self.dropout(inputs)\n\n        # decoder: No padding present.\n        decoder_mask = torch.ones(\n            decoder_input_tokens.shape[:2], device=decoder_input_tokens.device, dtype=inputs.dtype\n        )\n\n        # Translate encoding masks to encoder-decoder masks.\n        encodings_and_encdec_masks = [(x, self.encoder_decoder_mask(decoder_mask, y)) for x, y in encodings_and_masks]\n\n        # cross attend style: concat encodings\n        encoded = torch.cat([x[0] for x in encodings_and_encdec_masks], dim=1)\n        encoder_decoder_mask = torch.cat([x[1] for x in encodings_and_encdec_masks], dim=-1)\n\n        for lyr in self.decoders:\n            y = lyr(\n                y,\n                conditioning_emb=conditioning_emb,\n                encoder_hidden_states=encoded,\n                encoder_attention_mask=encoder_decoder_mask,\n            )[0]\n\n        y = self.decoder_norm(y)\n        y = self.post_dropout(y)\n\n        spec_out = self.spec_out(y)\n        return spec_out\n\n\nclass DecoderLayer(nn.Module):\n    def __init__(self, d_model, d_kv, num_heads, d_ff, dropout_rate, layer_norm_epsilon=1e-6):\n        super().__init__()\n        self.layer = nn.ModuleList()\n\n        # cond self attention: layer 0\n        self.layer.append(\n            T5LayerSelfAttentionCond(d_model=d_model, d_kv=d_kv, num_heads=num_heads, dropout_rate=dropout_rate)\n        )\n\n        # cross attention: layer 1\n        self.layer.append(\n            T5LayerCrossAttention(\n                d_model=d_model,\n                d_kv=d_kv,\n                num_heads=num_heads,\n                dropout_rate=dropout_rate,\n                layer_norm_epsilon=layer_norm_epsilon,\n            )\n        )\n\n        # Film Cond MLP + dropout: last layer\n        self.layer.append(\n            T5LayerFFCond(d_model=d_model, d_ff=d_ff, dropout_rate=dropout_rate, layer_norm_epsilon=layer_norm_epsilon)\n        )\n\n    def forward(\n        self,\n        hidden_states,\n        conditioning_emb=None,\n        attention_mask=None,\n        encoder_hidden_states=None,\n        encoder_attention_mask=None,\n        encoder_decoder_position_bias=None,\n    ):\n        hidden_states = self.layer[0](\n            hidden_states,\n            conditioning_emb=conditioning_emb,\n            attention_mask=attention_mask,\n        )\n\n        if encoder_hidden_states is not None:\n            encoder_extended_attention_mask = torch.where(encoder_attention_mask > 0, 0, -1e10).to(\n                encoder_hidden_states.dtype\n            )\n\n            hidden_states = self.layer[1](\n                hidden_states,\n                key_value_states=encoder_hidden_states,\n                attention_mask=encoder_extended_attention_mask,\n            )\n\n        # Apply Film Conditional Feed Forward layer\n        hidden_states = self.layer[-1](hidden_states, conditioning_emb)\n\n        return (hidden_states,)\n\n\nclass T5LayerSelfAttentionCond(nn.Module):\n    def __init__(self, d_model, d_kv, num_heads, dropout_rate):\n        super().__init__()\n        self.layer_norm = T5LayerNorm(d_model)\n        self.FiLMLayer = T5FiLMLayer(in_features=d_model * 4, out_features=d_model)\n        self.attention = Attention(query_dim=d_model, heads=num_heads, dim_head=d_kv, out_bias=False, scale_qk=False)\n        self.dropout = nn.Dropout(dropout_rate)\n\n    def forward(\n        self,\n        hidden_states,\n        conditioning_emb=None,\n        attention_mask=None,\n    ):\n        # pre_self_attention_layer_norm\n        normed_hidden_states = self.layer_norm(hidden_states)\n\n        if conditioning_emb is not None:\n            normed_hidden_states = self.FiLMLayer(normed_hidden_states, conditioning_emb)\n\n        # Self-attention block\n        attention_output = self.attention(normed_hidden_states)\n\n        hidden_states = hidden_states + self.dropout(attention_output)\n\n        return hidden_states\n\n\nclass T5LayerCrossAttention(nn.Module):\n    def __init__(self, d_model, d_kv, num_heads, dropout_rate, layer_norm_epsilon):\n        super().__init__()\n        self.attention = Attention(query_dim=d_model, heads=num_heads, dim_head=d_kv, out_bias=False, scale_qk=False)\n        self.layer_norm = T5LayerNorm(d_model, eps=layer_norm_epsilon)\n        self.dropout = nn.Dropout(dropout_rate)\n\n    def forward(\n        self,\n        hidden_states,\n        key_value_states=None,\n        attention_mask=None,\n    ):\n        normed_hidden_states = self.layer_norm(hidden_states)\n        attention_output = self.attention(\n            normed_hidden_states,\n            encoder_hidden_states=key_value_states,\n            attention_mask=attention_mask.squeeze(1),\n        )\n        layer_output = hidden_states + self.dropout(attention_output)\n        return layer_output\n\n\nclass T5LayerFFCond(nn.Module):\n    def __init__(self, d_model, d_ff, dropout_rate, layer_norm_epsilon):\n        super().__init__()\n        self.DenseReluDense = T5DenseGatedActDense(d_model=d_model, d_ff=d_ff, dropout_rate=dropout_rate)\n        self.film = T5FiLMLayer(in_features=d_model * 4, out_features=d_model)\n        self.layer_norm = T5LayerNorm(d_model, eps=layer_norm_epsilon)\n        self.dropout = nn.Dropout(dropout_rate)\n\n    def forward(self, hidden_states, conditioning_emb=None):\n        forwarded_states = self.layer_norm(hidden_states)\n        if conditioning_emb is not None:\n            forwarded_states = self.film(forwarded_states, conditioning_emb)\n\n        forwarded_states = self.DenseReluDense(forwarded_states)\n        hidden_states = hidden_states + self.dropout(forwarded_states)\n        return hidden_states\n\n\nclass T5DenseGatedActDense(nn.Module):\n    def __init__(self, d_model, d_ff, dropout_rate):\n        super().__init__()\n        self.wi_0 = nn.Linear(d_model, d_ff, bias=False)\n        self.wi_1 = nn.Linear(d_model, d_ff, bias=False)\n        self.wo = nn.Linear(d_ff, d_model, bias=False)\n        self.dropout = nn.Dropout(dropout_rate)\n        self.act = NewGELUActivation()\n\n    def forward(self, hidden_states):\n        hidden_gelu = self.act(self.wi_0(hidden_states))\n        hidden_linear = self.wi_1(hidden_states)\n        hidden_states = hidden_gelu * hidden_linear\n        hidden_states = self.dropout(hidden_states)\n\n        hidden_states = self.wo(hidden_states)\n        return hidden_states\n\n\nclass T5LayerNorm(nn.Module):\n    def __init__(self, hidden_size, eps=1e-6):\n        \"\"\"\n        Construct a layernorm module in the T5 style. No bias and no subtraction of mean.\n        \"\"\"\n        super().__init__()\n        self.weight = nn.Parameter(torch.ones(hidden_size))\n        self.variance_epsilon = eps\n\n    def forward(self, hidden_states):\n        # T5 uses a layer_norm which only scales and doesn't shift, which is also known as Root Mean\n        # Square Layer Normalization https://arxiv.org/abs/1910.07467 thus variance is calculated\n        # w/o mean and there is no bias. Additionally we want to make sure that the accumulation for\n        # half-precision inputs is done in fp32\n\n        variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True)\n        hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)\n\n        # convert into half-precision if necessary\n        if self.weight.dtype in [torch.float16, torch.bfloat16]:\n            hidden_states = hidden_states.to(self.weight.dtype)\n\n        return self.weight * hidden_states\n\n\nclass NewGELUActivation(nn.Module):\n    \"\"\"\n    Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see\n    the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415\n    \"\"\"\n\n    def forward(self, input: torch.Tensor) -> torch.Tensor:\n        return 0.5 * input * (1.0 + torch.tanh(math.sqrt(2.0 / math.pi) * (input + 0.044715 * torch.pow(input, 3.0))))\n\n\nclass T5FiLMLayer(nn.Module):\n    \"\"\"\n    FiLM Layer\n    \"\"\"\n\n    def __init__(self, in_features, out_features):\n        super().__init__()\n        self.scale_bias = nn.Linear(in_features, out_features * 2, bias=False)\n\n    def forward(self, x, conditioning_emb):\n        emb = self.scale_bias(conditioning_emb)\n        scale, shift = torch.chunk(emb, 2, -1)\n        x = x * (1 + scale) + shift\n        return x\n"
  },
  {
    "path": "diffusers/models/transformer_2d.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Optional\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..models.embeddings import ImagePositionalEmbeddings\nfrom ..utils import BaseOutput, deprecate\nfrom .attention import BasicTransformerBlock\nfrom .embeddings import PatchEmbed\nfrom .modeling_utils import ModelMixin\n\n\n@dataclass\nclass Transformer2DModelOutput(BaseOutput):\n    \"\"\"\n    Args:\n        sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` or `(batch size, num_vector_embeds - 1, num_latent_pixels)` if [`Transformer2DModel`] is discrete):\n            Hidden states conditioned on `encoder_hidden_states` input. If discrete, returns probability distributions\n            for the unnoised latent pixels.\n    \"\"\"\n\n    sample: torch.FloatTensor\n\n\nclass Transformer2DModel(ModelMixin, ConfigMixin):\n    \"\"\"\n    Transformer model for image-like data. Takes either discrete (classes of vector embeddings) or continuous (actual\n    embeddings) inputs.\n\n    When input is continuous: First, project the input (aka embedding) and reshape to b, t, d. Then apply standard\n    transformer action. Finally, reshape to image.\n\n    When input is discrete: First, input (classes of latent pixels) is converted to embeddings and has positional\n    embeddings applied, see `ImagePositionalEmbeddings`. Then apply standard transformer action. Finally, predict\n    classes of unnoised image.\n\n    Note that it is assumed one of the input classes is the masked latent pixel. The predicted classes of the unnoised\n    image do not contain a prediction for the masked pixel as the unnoised image cannot be masked.\n\n    Parameters:\n        num_attention_heads (`int`, *optional*, defaults to 16): The number of heads to use for multi-head attention.\n        attention_head_dim (`int`, *optional*, defaults to 88): The number of channels in each head.\n        in_channels (`int`, *optional*):\n            Pass if the input is continuous. The number of channels in the input and output.\n        num_layers (`int`, *optional*, defaults to 1): The number of layers of Transformer blocks to use.\n        dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use.\n        cross_attention_dim (`int`, *optional*): The number of encoder_hidden_states dimensions to use.\n        sample_size (`int`, *optional*): Pass if the input is discrete. The width of the latent images.\n            Note that this is fixed at training time as it is used for learning a number of position embeddings. See\n            `ImagePositionalEmbeddings`.\n        num_vector_embeds (`int`, *optional*):\n            Pass if the input is discrete. The number of classes of the vector embeddings of the latent pixels.\n            Includes the class for the masked latent pixel.\n        activation_fn (`str`, *optional*, defaults to `\"geglu\"`): Activation function to be used in feed-forward.\n        num_embeds_ada_norm ( `int`, *optional*): Pass if at least one of the norm_layers is `AdaLayerNorm`.\n            The number of diffusion steps used during training. Note that this is fixed at training time as it is used\n            to learn a number of embeddings that are added to the hidden states. During inference, you can denoise for\n            up to but not more than steps than `num_embeds_ada_norm`.\n        attention_bias (`bool`, *optional*):\n            Configure if the TransformerBlocks' attention should contain a bias parameter.\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        num_attention_heads: int = 16,\n        attention_head_dim: int = 88,\n        in_channels: Optional[int] = None,\n        out_channels: Optional[int] = None,\n        num_layers: int = 1,\n        dropout: float = 0.0,\n        norm_num_groups: int = 32,\n        cross_attention_dim: Optional[int] = None,\n        attention_bias: bool = False,\n        sample_size: Optional[int] = None,\n        num_vector_embeds: Optional[int] = None,\n        patch_size: Optional[int] = None,\n        activation_fn: str = \"geglu\",\n        num_embeds_ada_norm: Optional[int] = None,\n        use_linear_projection: bool = False,\n        only_cross_attention: bool = False,\n        upcast_attention: bool = False,\n        norm_type: str = \"layer_norm\",\n        norm_elementwise_affine: bool = True,\n    ):\n        super().__init__()\n        self.use_linear_projection = use_linear_projection\n        self.num_attention_heads = num_attention_heads\n        self.attention_head_dim = attention_head_dim\n        inner_dim = num_attention_heads * attention_head_dim\n\n        # 1. Transformer2DModel can process both standard continuous images of shape `(batch_size, num_channels, width, height)` as well as quantized image embeddings of shape `(batch_size, num_image_vectors)`\n        # Define whether input is continuous or discrete depending on configuration\n        self.is_input_continuous = (in_channels is not None) and (patch_size is None)\n        self.is_input_vectorized = num_vector_embeds is not None\n        self.is_input_patches = in_channels is not None and patch_size is not None\n\n        if norm_type == \"layer_norm\" and num_embeds_ada_norm is not None:\n            deprecation_message = (\n                f\"The configuration file of this model: {self.__class__} is outdated. `norm_type` is either not set or\"\n                \" incorrectly set to `'layer_norm'`.Make sure to set `norm_type` to `'ada_norm'` in the config.\"\n                \" Please make sure to update the config accordingly as leaving `norm_type` might led to incorrect\"\n                \" results in future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it\"\n                \" would be very nice if you could open a Pull request for the `transformer/config.json` file\"\n            )\n            deprecate(\"norm_type!=num_embeds_ada_norm\", \"1.0.0\", deprecation_message, standard_warn=False)\n            norm_type = \"ada_norm\"\n\n        if self.is_input_continuous and self.is_input_vectorized:\n            raise ValueError(\n                f\"Cannot define both `in_channels`: {in_channels} and `num_vector_embeds`: {num_vector_embeds}. Make\"\n                \" sure that either `in_channels` or `num_vector_embeds` is None.\"\n            )\n        elif self.is_input_vectorized and self.is_input_patches:\n            raise ValueError(\n                f\"Cannot define both `num_vector_embeds`: {num_vector_embeds} and `patch_size`: {patch_size}. Make\"\n                \" sure that either `num_vector_embeds` or `num_patches` is None.\"\n            )\n        elif not self.is_input_continuous and not self.is_input_vectorized and not self.is_input_patches:\n            raise ValueError(\n                f\"Has to define `in_channels`: {in_channels}, `num_vector_embeds`: {num_vector_embeds}, or patch_size:\"\n                f\" {patch_size}. Make sure that `in_channels`, `num_vector_embeds` or `num_patches` is not None.\"\n            )\n\n        # 2. Define input layers\n        if self.is_input_continuous:\n            self.in_channels = in_channels\n\n            self.norm = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True)\n            if use_linear_projection:\n                self.proj_in = nn.Linear(in_channels, inner_dim)\n            else:\n                self.proj_in = nn.Conv2d(in_channels, inner_dim, kernel_size=1, stride=1, padding=0)\n        elif self.is_input_vectorized:\n            assert sample_size is not None, \"Transformer2DModel over discrete input must provide sample_size\"\n            assert num_vector_embeds is not None, \"Transformer2DModel over discrete input must provide num_embed\"\n\n            self.height = sample_size\n            self.width = sample_size\n            self.num_vector_embeds = num_vector_embeds\n            self.num_latent_pixels = self.height * self.width\n\n            self.latent_image_embedding = ImagePositionalEmbeddings(\n                num_embed=num_vector_embeds, embed_dim=inner_dim, height=self.height, width=self.width\n            )\n        elif self.is_input_patches:\n            assert sample_size is not None, \"Transformer2DModel over patched input must provide sample_size\"\n\n            self.height = sample_size\n            self.width = sample_size\n\n            self.patch_size = patch_size\n            self.pos_embed = PatchEmbed(\n                height=sample_size,\n                width=sample_size,\n                patch_size=patch_size,\n                in_channels=in_channels,\n                embed_dim=inner_dim,\n            )\n\n        # 3. Define transformers blocks\n        self.transformer_blocks = nn.ModuleList(\n            [\n                BasicTransformerBlock(\n                    inner_dim,\n                    num_attention_heads,\n                    attention_head_dim,\n                    dropout=dropout,\n                    cross_attention_dim=cross_attention_dim,\n                    activation_fn=activation_fn,\n                    num_embeds_ada_norm=num_embeds_ada_norm,\n                    attention_bias=attention_bias,\n                    only_cross_attention=only_cross_attention,\n                    upcast_attention=upcast_attention,\n                    norm_type=norm_type,\n                    norm_elementwise_affine=norm_elementwise_affine,\n                )\n                for d in range(num_layers)\n            ]\n        )\n\n        # 4. Define output layers\n        self.out_channels = in_channels if out_channels is None else out_channels\n        if self.is_input_continuous:\n            # TODO: should use out_channels for continuous projections\n            if use_linear_projection:\n                self.proj_out = nn.Linear(inner_dim, in_channels)\n            else:\n                self.proj_out = nn.Conv2d(inner_dim, in_channels, kernel_size=1, stride=1, padding=0)\n        elif self.is_input_vectorized:\n            self.norm_out = nn.LayerNorm(inner_dim)\n            self.out = nn.Linear(inner_dim, self.num_vector_embeds - 1)\n        elif self.is_input_patches:\n            self.norm_out = nn.LayerNorm(inner_dim, elementwise_affine=False, eps=1e-6)\n            self.proj_out_1 = nn.Linear(inner_dim, 2 * inner_dim)\n            self.proj_out_2 = nn.Linear(inner_dim, patch_size * patch_size * self.out_channels)\n\n    def forward(\n        self,\n        hidden_states,\n        encoder_hidden_states=None,\n        timestep=None,\n        class_labels=None,\n        cross_attention_kwargs=None,\n        return_dict: bool = True,\n    ):\n        \"\"\"\n        Args:\n            hidden_states ( When discrete, `torch.LongTensor` of shape `(batch size, num latent pixels)`.\n                When continuous, `torch.FloatTensor` of shape `(batch size, channel, height, width)`): Input\n                hidden_states\n            encoder_hidden_states ( `torch.FloatTensor` of shape `(batch size, sequence len, embed dims)`, *optional*):\n                Conditional embeddings for cross attention layer. If not given, cross-attention defaults to\n                self-attention.\n            timestep ( `torch.long`, *optional*):\n                Optional timestep to be applied as an embedding in AdaLayerNorm's. Used to indicate denoising step.\n            class_labels ( `torch.LongTensor` of shape `(batch size, num classes)`, *optional*):\n                Optional class labels to be applied as an embedding in AdaLayerZeroNorm. Used to indicate class labels\n                conditioning.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~models.transformer_2d.Transformer2DModelOutput`] or `tuple`:\n            [`~models.transformer_2d.Transformer2DModelOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        # 1. Input\n        if self.is_input_continuous:\n            batch, _, height, width = hidden_states.shape\n            residual = hidden_states\n\n            hidden_states = self.norm(hidden_states)\n            if not self.use_linear_projection:\n                hidden_states = self.proj_in(hidden_states)\n                inner_dim = hidden_states.shape[1]\n                hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim)\n            else:\n                inner_dim = hidden_states.shape[1]\n                hidden_states = hidden_states.permute(0, 2, 3, 1).reshape(batch, height * width, inner_dim)\n                hidden_states = self.proj_in(hidden_states)\n        elif self.is_input_vectorized:\n            hidden_states = self.latent_image_embedding(hidden_states)\n        elif self.is_input_patches:\n            hidden_states = self.pos_embed(hidden_states)\n\n        # 2. Blocks\n        for block in self.transformer_blocks:\n            hidden_states = block(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                timestep=timestep,\n                cross_attention_kwargs=cross_attention_kwargs,\n                class_labels=class_labels,\n            )\n\n        # 3. Output\n        if self.is_input_continuous:\n            if not self.use_linear_projection:\n                hidden_states = hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous()\n                hidden_states = self.proj_out(hidden_states)\n            else:\n                hidden_states = self.proj_out(hidden_states)\n                hidden_states = hidden_states.reshape(batch, height, width, inner_dim).permute(0, 3, 1, 2).contiguous()\n\n            output = hidden_states + residual\n        elif self.is_input_vectorized:\n            hidden_states = self.norm_out(hidden_states)\n            logits = self.out(hidden_states)\n            # (batch, self.num_vector_embeds - 1, self.num_latent_pixels)\n            logits = logits.permute(0, 2, 1)\n\n            # log(p(x_0))\n            output = F.log_softmax(logits.double(), dim=1).float()\n        elif self.is_input_patches:\n            # TODO: cleanup!\n            conditioning = self.transformer_blocks[0].norm1.emb(\n                timestep, class_labels, hidden_dtype=hidden_states.dtype\n            )\n            shift, scale = self.proj_out_1(F.silu(conditioning)).chunk(2, dim=1)\n            hidden_states = self.norm_out(hidden_states) * (1 + scale[:, None]) + shift[:, None]\n            hidden_states = self.proj_out_2(hidden_states)\n\n            # unpatchify\n            height = width = int(hidden_states.shape[1] ** 0.5)\n            hidden_states = hidden_states.reshape(\n                shape=(-1, height, width, self.patch_size, self.patch_size, self.out_channels)\n            )\n            hidden_states = torch.einsum(\"nhwpqc->nchpwq\", hidden_states)\n            output = hidden_states.reshape(\n                shape=(-1, self.out_channels, height * self.patch_size, width * self.patch_size)\n            )\n\n        if not return_dict:\n            return (output,)\n\n        return Transformer2DModelOutput(sample=output)\n"
  },
  {
    "path": "diffusers/models/transformer_temporal.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Optional\n\nimport torch\nfrom torch import nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .attention import BasicTransformerBlock\nfrom .modeling_utils import ModelMixin\n\n\n@dataclass\nclass TransformerTemporalModelOutput(BaseOutput):\n    \"\"\"\n    Args:\n        sample (`torch.FloatTensor` of shape `(batch_size x num_frames, num_channels, height, width)`)\n            Hidden states conditioned on `encoder_hidden_states` input.\n    \"\"\"\n\n    sample: torch.FloatTensor\n\n\nclass TransformerTemporalModel(ModelMixin, ConfigMixin):\n    \"\"\"\n    Transformer model for video-like data.\n\n    Parameters:\n        num_attention_heads (`int`, *optional*, defaults to 16): The number of heads to use for multi-head attention.\n        attention_head_dim (`int`, *optional*, defaults to 88): The number of channels in each head.\n        in_channels (`int`, *optional*):\n            Pass if the input is continuous. The number of channels in the input and output.\n        num_layers (`int`, *optional*, defaults to 1): The number of layers of Transformer blocks to use.\n        dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use.\n        cross_attention_dim (`int`, *optional*): The number of encoder_hidden_states dimensions to use.\n        sample_size (`int`, *optional*): Pass if the input is discrete. The width of the latent images.\n            Note that this is fixed at training time as it is used for learning a number of position embeddings. See\n            `ImagePositionalEmbeddings`.\n        activation_fn (`str`, *optional*, defaults to `\"geglu\"`): Activation function to be used in feed-forward.\n        attention_bias (`bool`, *optional*):\n            Configure if the TransformerBlocks' attention should contain a bias parameter.\n        double_self_attention (`bool`, *optional*):\n            Configure if each TransformerBlock should contain two self-attention layers\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        num_attention_heads: int = 16,\n        attention_head_dim: int = 88,\n        in_channels: Optional[int] = None,\n        out_channels: Optional[int] = None,\n        num_layers: int = 1,\n        dropout: float = 0.0,\n        norm_num_groups: int = 32,\n        cross_attention_dim: Optional[int] = None,\n        attention_bias: bool = False,\n        sample_size: Optional[int] = None,\n        activation_fn: str = \"geglu\",\n        norm_elementwise_affine: bool = True,\n        double_self_attention: bool = True,\n    ):\n        super().__init__()\n        self.num_attention_heads = num_attention_heads\n        self.attention_head_dim = attention_head_dim\n        inner_dim = num_attention_heads * attention_head_dim\n\n        self.in_channels = in_channels\n\n        self.norm = torch.nn.GroupNorm(num_groups=norm_num_groups, num_channels=in_channels, eps=1e-6, affine=True)\n        self.proj_in = nn.Linear(in_channels, inner_dim)\n\n        # 3. Define transformers blocks\n        self.transformer_blocks = nn.ModuleList(\n            [\n                BasicTransformerBlock(\n                    inner_dim,\n                    num_attention_heads,\n                    attention_head_dim,\n                    dropout=dropout,\n                    cross_attention_dim=cross_attention_dim,\n                    activation_fn=activation_fn,\n                    attention_bias=attention_bias,\n                    double_self_attention=double_self_attention,\n                    norm_elementwise_affine=norm_elementwise_affine,\n                )\n                for d in range(num_layers)\n            ]\n        )\n\n        self.proj_out = nn.Linear(inner_dim, in_channels)\n\n    def forward(\n        self,\n        hidden_states,\n        encoder_hidden_states=None,\n        timestep=None,\n        class_labels=None,\n        num_frames=1,\n        cross_attention_kwargs=None,\n        return_dict: bool = True,\n    ):\n        \"\"\"\n        Args:\n            hidden_states ( When discrete, `torch.LongTensor` of shape `(batch size, num latent pixels)`.\n                When continous, `torch.FloatTensor` of shape `(batch size, channel, height, width)`): Input\n                hidden_states\n            encoder_hidden_states ( `torch.LongTensor` of shape `(batch size, encoder_hidden_states dim)`, *optional*):\n                Conditional embeddings for cross attention layer. If not given, cross-attention defaults to\n                self-attention.\n            timestep ( `torch.long`, *optional*):\n                Optional timestep to be applied as an embedding in AdaLayerNorm's. Used to indicate denoising step.\n            class_labels ( `torch.LongTensor` of shape `(batch size, num classes)`, *optional*):\n                Optional class labels to be applied as an embedding in AdaLayerZeroNorm. Used to indicate class labels\n                conditioning.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~models.transformer_2d.TransformerTemporalModelOutput`] or `tuple`:\n            [`~models.transformer_2d.TransformerTemporalModelOutput`] if `return_dict` is True, otherwise a `tuple`.\n            When returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        # 1. Input\n        batch_frames, channel, height, width = hidden_states.shape\n        batch_size = batch_frames // num_frames\n\n        residual = hidden_states\n\n        hidden_states = hidden_states[None, :].reshape(batch_size, num_frames, channel, height, width)\n        hidden_states = hidden_states.permute(0, 2, 1, 3, 4)\n\n        hidden_states = self.norm(hidden_states)\n        hidden_states = hidden_states.permute(0, 3, 4, 2, 1).reshape(batch_size * height * width, num_frames, channel)\n\n        hidden_states = self.proj_in(hidden_states)\n\n        # 2. Blocks\n        for block in self.transformer_blocks:\n            hidden_states = block(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                timestep=timestep,\n                cross_attention_kwargs=cross_attention_kwargs,\n                class_labels=class_labels,\n            )\n\n        # 3. Output\n        hidden_states = self.proj_out(hidden_states)\n        hidden_states = (\n            hidden_states[None, None, :]\n            .reshape(batch_size, height, width, channel, num_frames)\n            .permute(0, 3, 4, 1, 2)\n            .contiguous()\n        )\n        hidden_states = hidden_states.reshape(batch_frames, channel, height, width)\n\n        output = hidden_states + residual\n\n        if not return_dict:\n            return (output,)\n\n        return TransformerTemporalModelOutput(sample=output)\n"
  },
  {
    "path": "diffusers/models/unet_1d.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .embeddings import GaussianFourierProjection, TimestepEmbedding, Timesteps\nfrom .modeling_utils import ModelMixin\nfrom .unet_1d_blocks import get_down_block, get_mid_block, get_out_block, get_up_block\n\n\n@dataclass\nclass UNet1DOutput(BaseOutput):\n    \"\"\"\n    Args:\n        sample (`torch.FloatTensor` of shape `(batch_size, num_channels, sample_size)`):\n            Hidden states output. Output of last layer of model.\n    \"\"\"\n\n    sample: torch.FloatTensor\n\n\nclass UNet1DModel(ModelMixin, ConfigMixin):\n    r\"\"\"\n    UNet1DModel is a 1D UNet model that takes in a noisy sample and a timestep and returns sample shaped output.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the model (such as downloading or saving, etc.)\n\n    Parameters:\n        sample_size (`int`, *optional*): Default length of sample. Should be adaptable at runtime.\n        in_channels (`int`, *optional*, defaults to 2): Number of channels in the input sample.\n        out_channels (`int`, *optional*, defaults to 2): Number of channels in the output.\n        extra_in_channels (`int`, *optional*, defaults to 0):\n            Number of additional channels to be added to the input of the first down block. Useful for cases where the\n            input data has more channels than what the model is initially designed for.\n        time_embedding_type (`str`, *optional*, defaults to `\"fourier\"`): Type of time embedding to use.\n        freq_shift (`float`, *optional*, defaults to 0.0): Frequency shift for fourier time embedding.\n        flip_sin_to_cos (`bool`, *optional*, defaults to :\n            obj:`False`): Whether to flip sin to cos for fourier time embedding.\n        down_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"DownBlock1D\", \"DownBlock1DNoSkip\", \"AttnDownBlock1D\")`): Tuple of downsample block types.\n        up_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"UpBlock1D\", \"UpBlock1DNoSkip\", \"AttnUpBlock1D\")`): Tuple of upsample block types.\n        block_out_channels (`Tuple[int]`, *optional*, defaults to :\n            obj:`(32, 32, 64)`): Tuple of block output channels.\n        mid_block_type (`str`, *optional*, defaults to \"UNetMidBlock1D\"): block type for middle of UNet.\n        out_block_type (`str`, *optional*, defaults to `None`): optional output processing of UNet.\n        act_fn (`str`, *optional*, defaults to None): optional activation function in UNet blocks.\n        norm_num_groups (`int`, *optional*, defaults to 8): group norm member count in UNet blocks.\n        layers_per_block (`int`, *optional*, defaults to 1): added number of layers in a UNet block.\n        downsample_each_block (`int`, *optional*, defaults to False:\n            experimental feature for using a UNet without upsampling.\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        sample_size: int = 65536,\n        sample_rate: Optional[int] = None,\n        in_channels: int = 2,\n        out_channels: int = 2,\n        extra_in_channels: int = 0,\n        time_embedding_type: str = \"fourier\",\n        flip_sin_to_cos: bool = True,\n        use_timestep_embedding: bool = False,\n        freq_shift: float = 0.0,\n        down_block_types: Tuple[str] = (\"DownBlock1DNoSkip\", \"DownBlock1D\", \"AttnDownBlock1D\"),\n        up_block_types: Tuple[str] = (\"AttnUpBlock1D\", \"UpBlock1D\", \"UpBlock1DNoSkip\"),\n        mid_block_type: Tuple[str] = \"UNetMidBlock1D\",\n        out_block_type: str = None,\n        block_out_channels: Tuple[int] = (32, 32, 64),\n        act_fn: str = None,\n        norm_num_groups: int = 8,\n        layers_per_block: int = 1,\n        downsample_each_block: bool = False,\n    ):\n        super().__init__()\n        self.sample_size = sample_size\n\n        # time\n        if time_embedding_type == \"fourier\":\n            self.time_proj = GaussianFourierProjection(\n                embedding_size=8, set_W_to_weight=False, log=False, flip_sin_to_cos=flip_sin_to_cos\n            )\n            timestep_input_dim = 2 * block_out_channels[0]\n        elif time_embedding_type == \"positional\":\n            self.time_proj = Timesteps(\n                block_out_channels[0], flip_sin_to_cos=flip_sin_to_cos, downscale_freq_shift=freq_shift\n            )\n            timestep_input_dim = block_out_channels[0]\n\n        if use_timestep_embedding:\n            time_embed_dim = block_out_channels[0] * 4\n            self.time_mlp = TimestepEmbedding(\n                in_channels=timestep_input_dim,\n                time_embed_dim=time_embed_dim,\n                act_fn=act_fn,\n                out_dim=block_out_channels[0],\n            )\n\n        self.down_blocks = nn.ModuleList([])\n        self.mid_block = None\n        self.up_blocks = nn.ModuleList([])\n        self.out_block = None\n\n        # down\n        output_channel = in_channels\n        for i, down_block_type in enumerate(down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n\n            if i == 0:\n                input_channel += extra_in_channels\n\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = get_down_block(\n                down_block_type,\n                num_layers=layers_per_block,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                temb_channels=block_out_channels[0],\n                add_downsample=not is_final_block or downsample_each_block,\n            )\n            self.down_blocks.append(down_block)\n\n        # mid\n        self.mid_block = get_mid_block(\n            mid_block_type,\n            in_channels=block_out_channels[-1],\n            mid_channels=block_out_channels[-1],\n            out_channels=block_out_channels[-1],\n            embed_dim=block_out_channels[0],\n            num_layers=layers_per_block,\n            add_downsample=downsample_each_block,\n        )\n\n        # up\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        output_channel = reversed_block_out_channels[0]\n        if out_block_type is None:\n            final_upsample_channels = out_channels\n        else:\n            final_upsample_channels = block_out_channels[0]\n\n        for i, up_block_type in enumerate(up_block_types):\n            prev_output_channel = output_channel\n            output_channel = (\n                reversed_block_out_channels[i + 1] if i < len(up_block_types) - 1 else final_upsample_channels\n            )\n\n            is_final_block = i == len(block_out_channels) - 1\n\n            up_block = get_up_block(\n                up_block_type,\n                num_layers=layers_per_block,\n                in_channels=prev_output_channel,\n                out_channels=output_channel,\n                temb_channels=block_out_channels[0],\n                add_upsample=not is_final_block,\n            )\n            self.up_blocks.append(up_block)\n            prev_output_channel = output_channel\n\n        # out\n        num_groups_out = norm_num_groups if norm_num_groups is not None else min(block_out_channels[0] // 4, 32)\n        self.out_block = get_out_block(\n            out_block_type=out_block_type,\n            num_groups_out=num_groups_out,\n            embed_dim=block_out_channels[0],\n            out_channels=out_channels,\n            act_fn=act_fn,\n            fc_dim=block_out_channels[-1] // 4,\n        )\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[torch.Tensor, float, int],\n        return_dict: bool = True,\n    ) -> Union[UNet1DOutput, Tuple]:\n        r\"\"\"\n        Args:\n            sample (`torch.FloatTensor`): `(batch_size, num_channels, sample_size)` noisy inputs tensor\n            timestep (`torch.FloatTensor` or `float` or `int): (batch) timesteps\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~models.unet_1d.UNet1DOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~models.unet_1d.UNet1DOutput`] or `tuple`: [`~models.unet_1d.UNet1DOutput`] if `return_dict` is True,\n            otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n        \"\"\"\n\n        # 1. time\n        timesteps = timestep\n        if not torch.is_tensor(timesteps):\n            timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device)\n        elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(sample.device)\n\n        timestep_embed = self.time_proj(timesteps)\n        if self.config.use_timestep_embedding:\n            timestep_embed = self.time_mlp(timestep_embed)\n        else:\n            timestep_embed = timestep_embed[..., None]\n            timestep_embed = timestep_embed.repeat([1, 1, sample.shape[2]]).to(sample.dtype)\n            timestep_embed = timestep_embed.broadcast_to((sample.shape[:1] + timestep_embed.shape[1:]))\n\n        # 2. down\n        down_block_res_samples = ()\n        for downsample_block in self.down_blocks:\n            sample, res_samples = downsample_block(hidden_states=sample, temb=timestep_embed)\n            down_block_res_samples += res_samples\n\n        # 3. mid\n        if self.mid_block:\n            sample = self.mid_block(sample, timestep_embed)\n\n        # 4. up\n        for i, upsample_block in enumerate(self.up_blocks):\n            res_samples = down_block_res_samples[-1:]\n            down_block_res_samples = down_block_res_samples[:-1]\n            sample = upsample_block(sample, res_hidden_states_tuple=res_samples, temb=timestep_embed)\n\n        # 5. post-process\n        if self.out_block:\n            sample = self.out_block(sample, timestep_embed)\n\n        if not return_dict:\n            return (sample,)\n\n        return UNet1DOutput(sample=sample)\n"
  },
  {
    "path": "diffusers/models/unet_1d_blocks.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\n\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom .resnet import Downsample1D, ResidualTemporalBlock1D, Upsample1D, rearrange_dims\n\n\nclass DownResnetBlock1D(nn.Module):\n    def __init__(\n        self,\n        in_channels,\n        out_channels=None,\n        num_layers=1,\n        conv_shortcut=False,\n        temb_channels=32,\n        groups=32,\n        groups_out=None,\n        non_linearity=None,\n        time_embedding_norm=\"default\",\n        output_scale_factor=1.0,\n        add_downsample=True,\n    ):\n        super().__init__()\n        self.in_channels = in_channels\n        out_channels = in_channels if out_channels is None else out_channels\n        self.out_channels = out_channels\n        self.use_conv_shortcut = conv_shortcut\n        self.time_embedding_norm = time_embedding_norm\n        self.add_downsample = add_downsample\n        self.output_scale_factor = output_scale_factor\n\n        if groups_out is None:\n            groups_out = groups\n\n        # there will always be at least one resnet\n        resnets = [ResidualTemporalBlock1D(in_channels, out_channels, embed_dim=temb_channels)]\n\n        for _ in range(num_layers):\n            resnets.append(ResidualTemporalBlock1D(out_channels, out_channels, embed_dim=temb_channels))\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if non_linearity == \"swish\":\n            self.nonlinearity = lambda x: F.silu(x)\n        elif non_linearity == \"mish\":\n            self.nonlinearity = nn.Mish()\n        elif non_linearity == \"silu\":\n            self.nonlinearity = nn.SiLU()\n        else:\n            self.nonlinearity = None\n\n        self.downsample = None\n        if add_downsample:\n            self.downsample = Downsample1D(out_channels, use_conv=True, padding=1)\n\n    def forward(self, hidden_states, temb=None):\n        output_states = ()\n\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for resnet in self.resnets[1:]:\n            hidden_states = resnet(hidden_states, temb)\n\n        output_states += (hidden_states,)\n\n        if self.nonlinearity is not None:\n            hidden_states = self.nonlinearity(hidden_states)\n\n        if self.downsample is not None:\n            hidden_states = self.downsample(hidden_states)\n\n        return hidden_states, output_states\n\n\nclass UpResnetBlock1D(nn.Module):\n    def __init__(\n        self,\n        in_channels,\n        out_channels=None,\n        num_layers=1,\n        temb_channels=32,\n        groups=32,\n        groups_out=None,\n        non_linearity=None,\n        time_embedding_norm=\"default\",\n        output_scale_factor=1.0,\n        add_upsample=True,\n    ):\n        super().__init__()\n        self.in_channels = in_channels\n        out_channels = in_channels if out_channels is None else out_channels\n        self.out_channels = out_channels\n        self.time_embedding_norm = time_embedding_norm\n        self.add_upsample = add_upsample\n        self.output_scale_factor = output_scale_factor\n\n        if groups_out is None:\n            groups_out = groups\n\n        # there will always be at least one resnet\n        resnets = [ResidualTemporalBlock1D(2 * in_channels, out_channels, embed_dim=temb_channels)]\n\n        for _ in range(num_layers):\n            resnets.append(ResidualTemporalBlock1D(out_channels, out_channels, embed_dim=temb_channels))\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if non_linearity == \"swish\":\n            self.nonlinearity = lambda x: F.silu(x)\n        elif non_linearity == \"mish\":\n            self.nonlinearity = nn.Mish()\n        elif non_linearity == \"silu\":\n            self.nonlinearity = nn.SiLU()\n        else:\n            self.nonlinearity = None\n\n        self.upsample = None\n        if add_upsample:\n            self.upsample = Upsample1D(out_channels, use_conv_transpose=True)\n\n    def forward(self, hidden_states, res_hidden_states_tuple=None, temb=None):\n        if res_hidden_states_tuple is not None:\n            res_hidden_states = res_hidden_states_tuple[-1]\n            hidden_states = torch.cat((hidden_states, res_hidden_states), dim=1)\n\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for resnet in self.resnets[1:]:\n            hidden_states = resnet(hidden_states, temb)\n\n        if self.nonlinearity is not None:\n            hidden_states = self.nonlinearity(hidden_states)\n\n        if self.upsample is not None:\n            hidden_states = self.upsample(hidden_states)\n\n        return hidden_states\n\n\nclass ValueFunctionMidBlock1D(nn.Module):\n    def __init__(self, in_channels, out_channels, embed_dim):\n        super().__init__()\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.embed_dim = embed_dim\n\n        self.res1 = ResidualTemporalBlock1D(in_channels, in_channels // 2, embed_dim=embed_dim)\n        self.down1 = Downsample1D(out_channels // 2, use_conv=True)\n        self.res2 = ResidualTemporalBlock1D(in_channels // 2, in_channels // 4, embed_dim=embed_dim)\n        self.down2 = Downsample1D(out_channels // 4, use_conv=True)\n\n    def forward(self, x, temb=None):\n        x = self.res1(x, temb)\n        x = self.down1(x)\n        x = self.res2(x, temb)\n        x = self.down2(x)\n        return x\n\n\nclass MidResTemporalBlock1D(nn.Module):\n    def __init__(\n        self,\n        in_channels,\n        out_channels,\n        embed_dim,\n        num_layers: int = 1,\n        add_downsample: bool = False,\n        add_upsample: bool = False,\n        non_linearity=None,\n    ):\n        super().__init__()\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.add_downsample = add_downsample\n\n        # there will always be at least one resnet\n        resnets = [ResidualTemporalBlock1D(in_channels, out_channels, embed_dim=embed_dim)]\n\n        for _ in range(num_layers):\n            resnets.append(ResidualTemporalBlock1D(out_channels, out_channels, embed_dim=embed_dim))\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if non_linearity == \"swish\":\n            self.nonlinearity = lambda x: F.silu(x)\n        elif non_linearity == \"mish\":\n            self.nonlinearity = nn.Mish()\n        elif non_linearity == \"silu\":\n            self.nonlinearity = nn.SiLU()\n        else:\n            self.nonlinearity = None\n\n        self.upsample = None\n        if add_upsample:\n            self.upsample = Downsample1D(out_channels, use_conv=True)\n\n        self.downsample = None\n        if add_downsample:\n            self.downsample = Downsample1D(out_channels, use_conv=True)\n\n        if self.upsample and self.downsample:\n            raise ValueError(\"Block cannot downsample and upsample\")\n\n    def forward(self, hidden_states, temb):\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for resnet in self.resnets[1:]:\n            hidden_states = resnet(hidden_states, temb)\n\n        if self.upsample:\n            hidden_states = self.upsample(hidden_states)\n        if self.downsample:\n            self.downsample = self.downsample(hidden_states)\n\n        return hidden_states\n\n\nclass OutConv1DBlock(nn.Module):\n    def __init__(self, num_groups_out, out_channels, embed_dim, act_fn):\n        super().__init__()\n        self.final_conv1d_1 = nn.Conv1d(embed_dim, embed_dim, 5, padding=2)\n        self.final_conv1d_gn = nn.GroupNorm(num_groups_out, embed_dim)\n        if act_fn == \"silu\":\n            self.final_conv1d_act = nn.SiLU()\n        if act_fn == \"mish\":\n            self.final_conv1d_act = nn.Mish()\n        self.final_conv1d_2 = nn.Conv1d(embed_dim, out_channels, 1)\n\n    def forward(self, hidden_states, temb=None):\n        hidden_states = self.final_conv1d_1(hidden_states)\n        hidden_states = rearrange_dims(hidden_states)\n        hidden_states = self.final_conv1d_gn(hidden_states)\n        hidden_states = rearrange_dims(hidden_states)\n        hidden_states = self.final_conv1d_act(hidden_states)\n        hidden_states = self.final_conv1d_2(hidden_states)\n        return hidden_states\n\n\nclass OutValueFunctionBlock(nn.Module):\n    def __init__(self, fc_dim, embed_dim):\n        super().__init__()\n        self.final_block = nn.ModuleList(\n            [\n                nn.Linear(fc_dim + embed_dim, fc_dim // 2),\n                nn.Mish(),\n                nn.Linear(fc_dim // 2, 1),\n            ]\n        )\n\n    def forward(self, hidden_states, temb):\n        hidden_states = hidden_states.view(hidden_states.shape[0], -1)\n        hidden_states = torch.cat((hidden_states, temb), dim=-1)\n        for layer in self.final_block:\n            hidden_states = layer(hidden_states)\n\n        return hidden_states\n\n\n_kernels = {\n    \"linear\": [1 / 8, 3 / 8, 3 / 8, 1 / 8],\n    \"cubic\": [-0.01171875, -0.03515625, 0.11328125, 0.43359375, 0.43359375, 0.11328125, -0.03515625, -0.01171875],\n    \"lanczos3\": [\n        0.003689131001010537,\n        0.015056144446134567,\n        -0.03399861603975296,\n        -0.066637322306633,\n        0.13550527393817902,\n        0.44638532400131226,\n        0.44638532400131226,\n        0.13550527393817902,\n        -0.066637322306633,\n        -0.03399861603975296,\n        0.015056144446134567,\n        0.003689131001010537,\n    ],\n}\n\n\nclass Downsample1d(nn.Module):\n    def __init__(self, kernel=\"linear\", pad_mode=\"reflect\"):\n        super().__init__()\n        self.pad_mode = pad_mode\n        kernel_1d = torch.tensor(_kernels[kernel])\n        self.pad = kernel_1d.shape[0] // 2 - 1\n        self.register_buffer(\"kernel\", kernel_1d)\n\n    def forward(self, hidden_states):\n        hidden_states = F.pad(hidden_states, (self.pad,) * 2, self.pad_mode)\n        weight = hidden_states.new_zeros([hidden_states.shape[1], hidden_states.shape[1], self.kernel.shape[0]])\n        indices = torch.arange(hidden_states.shape[1], device=hidden_states.device)\n        weight[indices, indices] = self.kernel.to(weight)\n        return F.conv1d(hidden_states, weight, stride=2)\n\n\nclass Upsample1d(nn.Module):\n    def __init__(self, kernel=\"linear\", pad_mode=\"reflect\"):\n        super().__init__()\n        self.pad_mode = pad_mode\n        kernel_1d = torch.tensor(_kernels[kernel]) * 2\n        self.pad = kernel_1d.shape[0] // 2 - 1\n        self.register_buffer(\"kernel\", kernel_1d)\n\n    def forward(self, hidden_states, temb=None):\n        hidden_states = F.pad(hidden_states, ((self.pad + 1) // 2,) * 2, self.pad_mode)\n        weight = hidden_states.new_zeros([hidden_states.shape[1], hidden_states.shape[1], self.kernel.shape[0]])\n        indices = torch.arange(hidden_states.shape[1], device=hidden_states.device)\n        weight[indices, indices] = self.kernel.to(weight)\n        return F.conv_transpose1d(hidden_states, weight, stride=2, padding=self.pad * 2 + 1)\n\n\nclass SelfAttention1d(nn.Module):\n    def __init__(self, in_channels, n_head=1, dropout_rate=0.0):\n        super().__init__()\n        self.channels = in_channels\n        self.group_norm = nn.GroupNorm(1, num_channels=in_channels)\n        self.num_heads = n_head\n\n        self.query = nn.Linear(self.channels, self.channels)\n        self.key = nn.Linear(self.channels, self.channels)\n        self.value = nn.Linear(self.channels, self.channels)\n\n        self.proj_attn = nn.Linear(self.channels, self.channels, bias=True)\n\n        self.dropout = nn.Dropout(dropout_rate, inplace=True)\n\n    def transpose_for_scores(self, projection: torch.Tensor) -> torch.Tensor:\n        new_projection_shape = projection.size()[:-1] + (self.num_heads, -1)\n        # move heads to 2nd position (B, T, H * D) -> (B, T, H, D) -> (B, H, T, D)\n        new_projection = projection.view(new_projection_shape).permute(0, 2, 1, 3)\n        return new_projection\n\n    def forward(self, hidden_states):\n        residual = hidden_states\n        batch, channel_dim, seq = hidden_states.shape\n\n        hidden_states = self.group_norm(hidden_states)\n        hidden_states = hidden_states.transpose(1, 2)\n\n        query_proj = self.query(hidden_states)\n        key_proj = self.key(hidden_states)\n        value_proj = self.value(hidden_states)\n\n        query_states = self.transpose_for_scores(query_proj)\n        key_states = self.transpose_for_scores(key_proj)\n        value_states = self.transpose_for_scores(value_proj)\n\n        scale = 1 / math.sqrt(math.sqrt(key_states.shape[-1]))\n\n        attention_scores = torch.matmul(query_states * scale, key_states.transpose(-1, -2) * scale)\n        attention_probs = torch.softmax(attention_scores, dim=-1)\n\n        # compute attention output\n        hidden_states = torch.matmul(attention_probs, value_states)\n\n        hidden_states = hidden_states.permute(0, 2, 1, 3).contiguous()\n        new_hidden_states_shape = hidden_states.size()[:-2] + (self.channels,)\n        hidden_states = hidden_states.view(new_hidden_states_shape)\n\n        # compute next hidden_states\n        hidden_states = self.proj_attn(hidden_states)\n        hidden_states = hidden_states.transpose(1, 2)\n        hidden_states = self.dropout(hidden_states)\n\n        output = hidden_states + residual\n\n        return output\n\n\nclass ResConvBlock(nn.Module):\n    def __init__(self, in_channels, mid_channels, out_channels, is_last=False):\n        super().__init__()\n        self.is_last = is_last\n        self.has_conv_skip = in_channels != out_channels\n\n        if self.has_conv_skip:\n            self.conv_skip = nn.Conv1d(in_channels, out_channels, 1, bias=False)\n\n        self.conv_1 = nn.Conv1d(in_channels, mid_channels, 5, padding=2)\n        self.group_norm_1 = nn.GroupNorm(1, mid_channels)\n        self.gelu_1 = nn.GELU()\n        self.conv_2 = nn.Conv1d(mid_channels, out_channels, 5, padding=2)\n\n        if not self.is_last:\n            self.group_norm_2 = nn.GroupNorm(1, out_channels)\n            self.gelu_2 = nn.GELU()\n\n    def forward(self, hidden_states):\n        residual = self.conv_skip(hidden_states) if self.has_conv_skip else hidden_states\n\n        hidden_states = self.conv_1(hidden_states)\n        hidden_states = self.group_norm_1(hidden_states)\n        hidden_states = self.gelu_1(hidden_states)\n        hidden_states = self.conv_2(hidden_states)\n\n        if not self.is_last:\n            hidden_states = self.group_norm_2(hidden_states)\n            hidden_states = self.gelu_2(hidden_states)\n\n        output = hidden_states + residual\n        return output\n\n\nclass UNetMidBlock1D(nn.Module):\n    def __init__(self, mid_channels, in_channels, out_channels=None):\n        super().__init__()\n\n        out_channels = in_channels if out_channels is None else out_channels\n\n        # there is always at least one resnet\n        self.down = Downsample1d(\"cubic\")\n        resnets = [\n            ResConvBlock(in_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, out_channels),\n        ]\n        attentions = [\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(out_channels, out_channels // 32),\n        ]\n        self.up = Upsample1d(kernel=\"cubic\")\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(self, hidden_states, temb=None):\n        hidden_states = self.down(hidden_states)\n        for attn, resnet in zip(self.attentions, self.resnets):\n            hidden_states = resnet(hidden_states)\n            hidden_states = attn(hidden_states)\n\n        hidden_states = self.up(hidden_states)\n\n        return hidden_states\n\n\nclass AttnDownBlock1D(nn.Module):\n    def __init__(self, out_channels, in_channels, mid_channels=None):\n        super().__init__()\n        mid_channels = out_channels if mid_channels is None else mid_channels\n\n        self.down = Downsample1d(\"cubic\")\n        resnets = [\n            ResConvBlock(in_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, out_channels),\n        ]\n        attentions = [\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(out_channels, out_channels // 32),\n        ]\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(self, hidden_states, temb=None):\n        hidden_states = self.down(hidden_states)\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            hidden_states = resnet(hidden_states)\n            hidden_states = attn(hidden_states)\n\n        return hidden_states, (hidden_states,)\n\n\nclass DownBlock1D(nn.Module):\n    def __init__(self, out_channels, in_channels, mid_channels=None):\n        super().__init__()\n        mid_channels = out_channels if mid_channels is None else mid_channels\n\n        self.down = Downsample1d(\"cubic\")\n        resnets = [\n            ResConvBlock(in_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, out_channels),\n        ]\n\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(self, hidden_states, temb=None):\n        hidden_states = self.down(hidden_states)\n\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states)\n\n        return hidden_states, (hidden_states,)\n\n\nclass DownBlock1DNoSkip(nn.Module):\n    def __init__(self, out_channels, in_channels, mid_channels=None):\n        super().__init__()\n        mid_channels = out_channels if mid_channels is None else mid_channels\n\n        resnets = [\n            ResConvBlock(in_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, out_channels),\n        ]\n\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(self, hidden_states, temb=None):\n        hidden_states = torch.cat([hidden_states, temb], dim=1)\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states)\n\n        return hidden_states, (hidden_states,)\n\n\nclass AttnUpBlock1D(nn.Module):\n    def __init__(self, in_channels, out_channels, mid_channels=None):\n        super().__init__()\n        mid_channels = out_channels if mid_channels is None else mid_channels\n\n        resnets = [\n            ResConvBlock(2 * in_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, out_channels),\n        ]\n        attentions = [\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(mid_channels, mid_channels // 32),\n            SelfAttention1d(out_channels, out_channels // 32),\n        ]\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n        self.up = Upsample1d(kernel=\"cubic\")\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None):\n        res_hidden_states = res_hidden_states_tuple[-1]\n        hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            hidden_states = resnet(hidden_states)\n            hidden_states = attn(hidden_states)\n\n        hidden_states = self.up(hidden_states)\n\n        return hidden_states\n\n\nclass UpBlock1D(nn.Module):\n    def __init__(self, in_channels, out_channels, mid_channels=None):\n        super().__init__()\n        mid_channels = in_channels if mid_channels is None else mid_channels\n\n        resnets = [\n            ResConvBlock(2 * in_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, out_channels),\n        ]\n\n        self.resnets = nn.ModuleList(resnets)\n        self.up = Upsample1d(kernel=\"cubic\")\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None):\n        res_hidden_states = res_hidden_states_tuple[-1]\n        hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states)\n\n        hidden_states = self.up(hidden_states)\n\n        return hidden_states\n\n\nclass UpBlock1DNoSkip(nn.Module):\n    def __init__(self, in_channels, out_channels, mid_channels=None):\n        super().__init__()\n        mid_channels = in_channels if mid_channels is None else mid_channels\n\n        resnets = [\n            ResConvBlock(2 * in_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, mid_channels),\n            ResConvBlock(mid_channels, mid_channels, out_channels, is_last=True),\n        ]\n\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None):\n        res_hidden_states = res_hidden_states_tuple[-1]\n        hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states)\n\n        return hidden_states\n\n\ndef get_down_block(down_block_type, num_layers, in_channels, out_channels, temb_channels, add_downsample):\n    if down_block_type == \"DownResnetBlock1D\":\n        return DownResnetBlock1D(\n            in_channels=in_channels,\n            num_layers=num_layers,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n        )\n    elif down_block_type == \"DownBlock1D\":\n        return DownBlock1D(out_channels=out_channels, in_channels=in_channels)\n    elif down_block_type == \"AttnDownBlock1D\":\n        return AttnDownBlock1D(out_channels=out_channels, in_channels=in_channels)\n    elif down_block_type == \"DownBlock1DNoSkip\":\n        return DownBlock1DNoSkip(out_channels=out_channels, in_channels=in_channels)\n    raise ValueError(f\"{down_block_type} does not exist.\")\n\n\ndef get_up_block(up_block_type, num_layers, in_channels, out_channels, temb_channels, add_upsample):\n    if up_block_type == \"UpResnetBlock1D\":\n        return UpResnetBlock1D(\n            in_channels=in_channels,\n            num_layers=num_layers,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n        )\n    elif up_block_type == \"UpBlock1D\":\n        return UpBlock1D(in_channels=in_channels, out_channels=out_channels)\n    elif up_block_type == \"AttnUpBlock1D\":\n        return AttnUpBlock1D(in_channels=in_channels, out_channels=out_channels)\n    elif up_block_type == \"UpBlock1DNoSkip\":\n        return UpBlock1DNoSkip(in_channels=in_channels, out_channels=out_channels)\n    raise ValueError(f\"{up_block_type} does not exist.\")\n\n\ndef get_mid_block(mid_block_type, num_layers, in_channels, mid_channels, out_channels, embed_dim, add_downsample):\n    if mid_block_type == \"MidResTemporalBlock1D\":\n        return MidResTemporalBlock1D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            embed_dim=embed_dim,\n            add_downsample=add_downsample,\n        )\n    elif mid_block_type == \"ValueFunctionMidBlock1D\":\n        return ValueFunctionMidBlock1D(in_channels=in_channels, out_channels=out_channels, embed_dim=embed_dim)\n    elif mid_block_type == \"UNetMidBlock1D\":\n        return UNetMidBlock1D(in_channels=in_channels, mid_channels=mid_channels, out_channels=out_channels)\n    raise ValueError(f\"{mid_block_type} does not exist.\")\n\n\ndef get_out_block(*, out_block_type, num_groups_out, embed_dim, out_channels, act_fn, fc_dim):\n    if out_block_type == \"OutConv1DBlock\":\n        return OutConv1DBlock(num_groups_out, out_channels, embed_dim, act_fn)\n    elif out_block_type == \"ValueFunction\":\n        return OutValueFunctionBlock(fc_dim, embed_dim)\n    return None\n"
  },
  {
    "path": "diffusers/models/unet_2d.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .embeddings import GaussianFourierProjection, TimestepEmbedding, Timesteps\nfrom .modeling_utils import ModelMixin\nfrom .unet_2d_blocks import UNetMidBlock2D, get_down_block, get_up_block\n\n\n@dataclass\nclass UNet2DOutput(BaseOutput):\n    \"\"\"\n    Args:\n        sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):\n            Hidden states output. Output of last layer of model.\n    \"\"\"\n\n    sample: torch.FloatTensor\n\n\nclass UNet2DModel(ModelMixin, ConfigMixin):\n    r\"\"\"\n    UNet2DModel is a 2D UNet model that takes in a noisy sample and a timestep and returns sample shaped output.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the model (such as downloading or saving, etc.)\n\n    Parameters:\n        sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`):\n            Height and width of input/output sample. Dimensions must be a multiple of `2 ** (len(block_out_channels) -\n            1)`.\n        in_channels (`int`, *optional*, defaults to 3): Number of channels in the input image.\n        out_channels (`int`, *optional*, defaults to 3): Number of channels in the output.\n        center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample.\n        time_embedding_type (`str`, *optional*, defaults to `\"positional\"`): Type of time embedding to use.\n        freq_shift (`int`, *optional*, defaults to 0): Frequency shift for fourier time embedding.\n        flip_sin_to_cos (`bool`, *optional*, defaults to :\n            obj:`True`): Whether to flip sin to cos for fourier time embedding.\n        down_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"DownBlock2D\", \"AttnDownBlock2D\", \"AttnDownBlock2D\", \"AttnDownBlock2D\")`): Tuple of downsample block\n            types.\n        mid_block_type (`str`, *optional*, defaults to `\"UNetMidBlock2D\"`):\n            The mid block type. Choose from `UNetMidBlock2D` or `UnCLIPUNetMidBlock2D`.\n        up_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"AttnUpBlock2D\", \"AttnUpBlock2D\", \"AttnUpBlock2D\", \"UpBlock2D\")`): Tuple of upsample block types.\n        block_out_channels (`Tuple[int]`, *optional*, defaults to :\n            obj:`(224, 448, 672, 896)`): Tuple of block output channels.\n        layers_per_block (`int`, *optional*, defaults to `2`): The number of layers per block.\n        mid_block_scale_factor (`float`, *optional*, defaults to `1`): The scale factor for the mid block.\n        downsample_padding (`int`, *optional*, defaults to `1`): The padding for the downsample convolution.\n        act_fn (`str`, *optional*, defaults to `\"silu\"`): The activation function to use.\n        attention_head_dim (`int`, *optional*, defaults to `8`): The attention head dimension.\n        norm_num_groups (`int`, *optional*, defaults to `32`): The number of groups for the normalization.\n        norm_eps (`float`, *optional*, defaults to `1e-5`): The epsilon for the normalization.\n        resnet_time_scale_shift (`str`, *optional*, defaults to `\"default\"`): Time scale shift config\n            for resnet blocks, see [`~models.resnet.ResnetBlock2D`]. Choose from `default` or `scale_shift`.\n        class_embed_type (`str`, *optional*, defaults to None):\n            The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`,\n            `\"timestep\"`, or `\"identity\"`.\n        num_class_embeds (`int`, *optional*, defaults to None):\n            Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing\n            class conditioning with `class_embed_type` equal to `None`.\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        sample_size: Optional[Union[int, Tuple[int, int]]] = None,\n        in_channels: int = 3,\n        out_channels: int = 3,\n        center_input_sample: bool = False,\n        time_embedding_type: str = \"positional\",\n        freq_shift: int = 0,\n        flip_sin_to_cos: bool = True,\n        down_block_types: Tuple[str] = (\"DownBlock2D\", \"AttnDownBlock2D\", \"AttnDownBlock2D\", \"AttnDownBlock2D\"),\n        up_block_types: Tuple[str] = (\"AttnUpBlock2D\", \"AttnUpBlock2D\", \"AttnUpBlock2D\", \"UpBlock2D\"),\n        block_out_channels: Tuple[int] = (224, 448, 672, 896),\n        layers_per_block: int = 2,\n        mid_block_scale_factor: float = 1,\n        downsample_padding: int = 1,\n        act_fn: str = \"silu\",\n        attention_head_dim: Optional[int] = 8,\n        norm_num_groups: int = 32,\n        norm_eps: float = 1e-5,\n        resnet_time_scale_shift: str = \"default\",\n        add_attention: bool = True,\n        class_embed_type: Optional[str] = None,\n        num_class_embeds: Optional[int] = None,\n    ):\n        super().__init__()\n\n        self.sample_size = sample_size\n        time_embed_dim = block_out_channels[0] * 4\n\n        # Check inputs\n        if len(down_block_types) != len(up_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}.\"\n            )\n\n        if len(block_out_channels) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}.\"\n            )\n\n        # input\n        self.conv_in = nn.Conv2d(in_channels, block_out_channels[0], kernel_size=3, padding=(1, 1))\n\n        # time\n        if time_embedding_type == \"fourier\":\n            self.time_proj = GaussianFourierProjection(embedding_size=block_out_channels[0], scale=16)\n            timestep_input_dim = 2 * block_out_channels[0]\n        elif time_embedding_type == \"positional\":\n            self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift)\n            timestep_input_dim = block_out_channels[0]\n\n        self.time_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim)\n\n        # class embedding\n        if class_embed_type is None and num_class_embeds is not None:\n            self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim)\n        elif class_embed_type == \"timestep\":\n            self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim)\n        elif class_embed_type == \"identity\":\n            self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim)\n        else:\n            self.class_embedding = None\n\n        self.down_blocks = nn.ModuleList([])\n        self.mid_block = None\n        self.up_blocks = nn.ModuleList([])\n\n        # down\n        output_channel = block_out_channels[0]\n        for i, down_block_type in enumerate(down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = get_down_block(\n                down_block_type,\n                num_layers=layers_per_block,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                temb_channels=time_embed_dim,\n                add_downsample=not is_final_block,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                attn_num_head_channels=attention_head_dim,\n                downsample_padding=downsample_padding,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n            )\n            self.down_blocks.append(down_block)\n\n        # mid\n        self.mid_block = UNetMidBlock2D(\n            in_channels=block_out_channels[-1],\n            temb_channels=time_embed_dim,\n            resnet_eps=norm_eps,\n            resnet_act_fn=act_fn,\n            output_scale_factor=mid_block_scale_factor,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n            attn_num_head_channels=attention_head_dim,\n            resnet_groups=norm_num_groups,\n            add_attention=add_attention,\n        )\n\n        # up\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        output_channel = reversed_block_out_channels[0]\n        for i, up_block_type in enumerate(up_block_types):\n            prev_output_channel = output_channel\n            output_channel = reversed_block_out_channels[i]\n            input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)]\n\n            is_final_block = i == len(block_out_channels) - 1\n\n            up_block = get_up_block(\n                up_block_type,\n                num_layers=layers_per_block + 1,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                prev_output_channel=prev_output_channel,\n                temb_channels=time_embed_dim,\n                add_upsample=not is_final_block,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                attn_num_head_channels=attention_head_dim,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n            )\n            self.up_blocks.append(up_block)\n            prev_output_channel = output_channel\n\n        # out\n        num_groups_out = norm_num_groups if norm_num_groups is not None else min(block_out_channels[0] // 4, 32)\n        self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=num_groups_out, eps=norm_eps)\n        self.conv_act = nn.SiLU()\n        self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, kernel_size=3, padding=1)\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[torch.Tensor, float, int],\n        class_labels: Optional[torch.Tensor] = None,\n        return_dict: bool = True,\n    ) -> Union[UNet2DOutput, Tuple]:\n        r\"\"\"\n        Args:\n            sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor\n            timestep (`torch.FloatTensor` or `float` or `int): (batch) timesteps\n            class_labels (`torch.FloatTensor`, *optional*, defaults to `None`):\n                Optional class labels for conditioning. Their embeddings will be summed with the timestep embeddings.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~models.unet_2d.UNet2DOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~models.unet_2d.UNet2DOutput`] or `tuple`: [`~models.unet_2d.UNet2DOutput`] if `return_dict` is True,\n            otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        # 0. center input if necessary\n        if self.config.center_input_sample:\n            sample = 2 * sample - 1.0\n\n        # 1. time\n        timesteps = timestep\n        if not torch.is_tensor(timesteps):\n            timesteps = torch.tensor([timesteps], dtype=torch.long, device=sample.device)\n        elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(sample.device)\n\n        # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n        timesteps = timesteps * torch.ones(sample.shape[0], dtype=timesteps.dtype, device=timesteps.device)\n\n        t_emb = self.time_proj(timesteps)\n\n        # timesteps does not contain any weights and will always return f32 tensors\n        # but time_embedding might actually be running in fp16. so we need to cast here.\n        # there might be better ways to encapsulate this.\n        t_emb = t_emb.to(dtype=self.dtype)\n        emb = self.time_embedding(t_emb)\n\n        if self.class_embedding is not None:\n            if class_labels is None:\n                raise ValueError(\"class_labels should be provided when doing class conditioning\")\n\n            if self.config.class_embed_type == \"timestep\":\n                class_labels = self.time_proj(class_labels)\n\n            class_emb = self.class_embedding(class_labels).to(dtype=self.dtype)\n            emb = emb + class_emb\n\n        # 2. pre-process\n        skip_sample = sample\n        sample = self.conv_in(sample)\n\n        # 3. down\n        down_block_res_samples = (sample,)\n        for downsample_block in self.down_blocks:\n            if hasattr(downsample_block, \"skip_conv\"):\n                sample, res_samples, skip_sample = downsample_block(\n                    hidden_states=sample, temb=emb, skip_sample=skip_sample\n                )\n            else:\n                sample, res_samples = downsample_block(hidden_states=sample, temb=emb)\n\n            down_block_res_samples += res_samples\n\n        # 4. mid\n        sample = self.mid_block(sample, emb)\n\n        # 5. up\n        skip_sample = None\n        for upsample_block in self.up_blocks:\n            res_samples = down_block_res_samples[-len(upsample_block.resnets) :]\n            down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)]\n\n            if hasattr(upsample_block, \"skip_conv\"):\n                sample, skip_sample = upsample_block(sample, res_samples, emb, skip_sample)\n            else:\n                sample = upsample_block(sample, res_samples, emb)\n\n        # 6. post-process\n        sample = self.conv_norm_out(sample)\n        sample = self.conv_act(sample)\n        sample = self.conv_out(sample)\n\n        if skip_sample is not None:\n            sample += skip_sample\n\n        if self.config.time_embedding_type == \"fourier\":\n            timesteps = timesteps.reshape((sample.shape[0], *([1] * len(sample.shape[1:]))))\n            sample = sample / timesteps\n\n        if not return_dict:\n            return (sample,)\n\n        return UNet2DOutput(sample=sample)\n"
  },
  {
    "path": "diffusers/models/unet_2d_blocks.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Optional\n\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom ..utils import is_torch_version\nfrom .attention import AdaGroupNorm\nfrom .attention_processor import Attention, AttnAddedKVProcessor, AttnAddedKVProcessor2_0\nfrom .dual_transformer_2d import DualTransformer2DModel\nfrom .resnet import Downsample2D, FirDownsample2D, FirUpsample2D, KDownsample2D, KUpsample2D, ResnetBlock2D, Upsample2D\nfrom .transformer_2d import Transformer2DModel\n\n\ndef get_down_block(\n    down_block_type,\n    num_layers,\n    in_channels,\n    out_channels,\n    temb_channels,\n    add_downsample,\n    resnet_eps,\n    resnet_act_fn,\n    attn_num_head_channels,\n    resnet_groups=None,\n    cross_attention_dim=None,\n    downsample_padding=None,\n    dual_cross_attention=False,\n    use_linear_projection=False,\n    only_cross_attention=False,\n    upcast_attention=False,\n    resnet_time_scale_shift=\"default\",\n    resnet_skip_time_act=False,\n    resnet_out_scale_factor=1.0,\n    cross_attention_norm=None,\n):\n    down_block_type = down_block_type[7:] if down_block_type.startswith(\"UNetRes\") else down_block_type\n    if down_block_type == \"DownBlock2D\":\n        return DownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"ResnetDownsampleBlock2D\":\n        return ResnetDownsampleBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n            skip_time_act=resnet_skip_time_act,\n            output_scale_factor=resnet_out_scale_factor,\n        )\n    elif down_block_type == \"AttnDownBlock2D\":\n        return AttnDownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"CrossAttnDownBlock2D\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for CrossAttnDownBlock2D\")\n        return CrossAttnDownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            dual_cross_attention=dual_cross_attention,\n            use_linear_projection=use_linear_projection,\n            only_cross_attention=only_cross_attention,\n            upcast_attention=upcast_attention,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"SimpleCrossAttnDownBlock2D\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for SimpleCrossAttnDownBlock2D\")\n        return SimpleCrossAttnDownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n            skip_time_act=resnet_skip_time_act,\n            output_scale_factor=resnet_out_scale_factor,\n            only_cross_attention=only_cross_attention,\n            cross_attention_norm=cross_attention_norm,\n        )\n    elif down_block_type == \"SkipDownBlock2D\":\n        return SkipDownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            downsample_padding=downsample_padding,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"AttnSkipDownBlock2D\":\n        return AttnSkipDownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            downsample_padding=downsample_padding,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"DownEncoderBlock2D\":\n        return DownEncoderBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"AttnDownEncoderBlock2D\":\n        return AttnDownEncoderBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"KDownBlock2D\":\n        return KDownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n        )\n    elif down_block_type == \"KCrossAttnDownBlock2D\":\n        return KCrossAttnDownBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            add_self_attention=True if not add_downsample else False,\n        )\n    raise ValueError(f\"{down_block_type} does not exist.\")\n\n\ndef get_up_block(\n    up_block_type,\n    num_layers,\n    in_channels,\n    out_channels,\n    prev_output_channel,\n    temb_channels,\n    add_upsample,\n    resnet_eps,\n    resnet_act_fn,\n    attn_num_head_channels,\n    resnet_groups=None,\n    cross_attention_dim=None,\n    dual_cross_attention=False,\n    use_linear_projection=False,\n    only_cross_attention=False,\n    upcast_attention=False,\n    resnet_time_scale_shift=\"default\",\n    resnet_skip_time_act=False,\n    resnet_out_scale_factor=1.0,\n    cross_attention_norm=None,\n):\n    up_block_type = up_block_type[7:] if up_block_type.startswith(\"UNetRes\") else up_block_type\n    if up_block_type == \"UpBlock2D\":\n        return UpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"ResnetUpsampleBlock2D\":\n        return ResnetUpsampleBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n            skip_time_act=resnet_skip_time_act,\n            output_scale_factor=resnet_out_scale_factor,\n        )\n    elif up_block_type == \"CrossAttnUpBlock2D\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for CrossAttnUpBlock2D\")\n        return CrossAttnUpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            dual_cross_attention=dual_cross_attention,\n            use_linear_projection=use_linear_projection,\n            only_cross_attention=only_cross_attention,\n            upcast_attention=upcast_attention,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"SimpleCrossAttnUpBlock2D\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for SimpleCrossAttnUpBlock2D\")\n        return SimpleCrossAttnUpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n            skip_time_act=resnet_skip_time_act,\n            output_scale_factor=resnet_out_scale_factor,\n            only_cross_attention=only_cross_attention,\n            cross_attention_norm=cross_attention_norm,\n        )\n    elif up_block_type == \"AttnUpBlock2D\":\n        return AttnUpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"SkipUpBlock2D\":\n        return SkipUpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"AttnSkipUpBlock2D\":\n        return AttnSkipUpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"UpDecoderBlock2D\":\n        return UpDecoderBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"AttnUpDecoderBlock2D\":\n        return AttnUpDecoderBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            attn_num_head_channels=attn_num_head_channels,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"KUpBlock2D\":\n        return KUpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n        )\n    elif up_block_type == \"KCrossAttnUpBlock2D\":\n        return KCrossAttnUpBlock2D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n        )\n\n    raise ValueError(f\"{up_block_type} does not exist.\")\n\n\nclass UNetMidBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        add_attention: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n    ):\n        super().__init__()\n        resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32)\n        self.add_attention = add_attention\n\n        # there is always at least one resnet\n        resnets = [\n            ResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=resnet_groups,\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n            )\n        ]\n        attentions = []\n\n        for _ in range(num_layers):\n            if self.add_attention:\n                attentions.append(\n                    Attention(\n                        in_channels,\n                        heads=in_channels // attn_num_head_channels if attn_num_head_channels is not None else 1,\n                        dim_head=attn_num_head_channels if attn_num_head_channels is not None else in_channels,\n                        rescale_output_factor=output_scale_factor,\n                        eps=resnet_eps,\n                        norm_num_groups=resnet_groups,\n                        residual_connection=True,\n                        bias=True,\n                        upcast_softmax=True,\n                        _from_deprecated_attn_block=True,\n                    )\n                )\n            else:\n                attentions.append(None)\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=in_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(self, hidden_states, temb=None):\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for attn, resnet in zip(self.attentions, self.resnets[1:]):\n            if attn is not None:\n                hidden_states = attn(hidden_states)\n            hidden_states = resnet(hidden_states, temb)\n\n        return hidden_states\n\n\nclass UNetMidBlock2DCrossAttn(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        cross_attention_dim=1280,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n        resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32)\n\n        # there is always at least one resnet\n        resnets = [\n            ResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=resnet_groups,\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n            )\n        ]\n        attentions = []\n\n        for _ in range(num_layers):\n            if not dual_cross_attention:\n                attentions.append(\n                    Transformer2DModel(\n                        attn_num_head_channels,\n                        in_channels // attn_num_head_channels,\n                        in_channels=in_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                        use_linear_projection=use_linear_projection,\n                        upcast_attention=upcast_attention,\n                    )\n                )\n            else:\n                attentions.append(\n                    DualTransformer2DModel(\n                        attn_num_head_channels,\n                        in_channels // attn_num_head_channels,\n                        in_channels=in_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                    )\n                )\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=in_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for attn, resnet in zip(self.attentions, self.resnets[1:]):\n            hidden_states = attn(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                cross_attention_kwargs=cross_attention_kwargs,\n                return_dict=False,\n            )[0]\n            hidden_states = resnet(hidden_states, temb)\n\n        return hidden_states\n\n\nclass UNetMidBlock2DSimpleCrossAttn(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        cross_attention_dim=1280,\n        skip_time_act=False,\n        only_cross_attention=False,\n        cross_attention_norm=None,\n    ):\n        super().__init__()\n\n        self.has_cross_attention = True\n\n        self.attn_num_head_channels = attn_num_head_channels\n        resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32)\n\n        self.num_heads = in_channels // self.attn_num_head_channels\n\n        # there is always at least one resnet\n        resnets = [\n            ResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=resnet_groups,\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n                skip_time_act=skip_time_act,\n            )\n        ]\n        attentions = []\n\n        for _ in range(num_layers):\n            processor = (\n                AttnAddedKVProcessor2_0() if hasattr(F, \"scaled_dot_product_attention\") else AttnAddedKVProcessor()\n            )\n\n            attentions.append(\n                Attention(\n                    query_dim=in_channels,\n                    cross_attention_dim=in_channels,\n                    heads=self.num_heads,\n                    dim_head=attn_num_head_channels,\n                    added_kv_proj_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                    bias=True,\n                    upcast_softmax=True,\n                    only_cross_attention=only_cross_attention,\n                    cross_attention_norm=cross_attention_norm,\n                    processor=processor,\n                )\n            )\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=in_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                    skip_time_act=skip_time_act,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {}\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for attn, resnet in zip(self.attentions, self.resnets[1:]):\n            # attn\n            hidden_states = attn(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                attention_mask=attention_mask,\n                **cross_attention_kwargs,\n            )\n\n            # resnet\n            hidden_states = resnet(hidden_states, temb)\n\n        return hidden_states\n\n\nclass AttnDownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        downsample_padding=1,\n        add_downsample=True,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            attentions.append(\n                Attention(\n                    out_channels,\n                    heads=out_channels // attn_num_head_channels if attn_num_head_channels is not None else 1,\n                    dim_head=attn_num_head_channels if attn_num_head_channels is not None else out_channels,\n                    rescale_output_factor=output_scale_factor,\n                    eps=resnet_eps,\n                    norm_num_groups=resnet_groups,\n                    residual_connection=True,\n                    bias=True,\n                    upcast_softmax=True,\n                    _from_deprecated_attn_block=True,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    Downsample2D(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n    def forward(self, hidden_states, temb=None, upsample_size=None):\n        output_states = ()\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = attn(hidden_states)\n            output_states += (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n            output_states += (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass CrossAttnDownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        downsample_padding=1,\n        add_downsample=True,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        only_cross_attention=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            if not dual_cross_attention:\n                attentions.append(\n                    Transformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                        use_linear_projection=use_linear_projection,\n                        only_cross_attention=only_cross_attention,\n                        upcast_attention=upcast_attention,\n                    )\n                )\n            else:\n                attentions.append(\n                    DualTransformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                    )\n                )\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    Downsample2D(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        # TODO(Patrick, William) - attention mask is not used\n        output_states = ()\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                        use_reentrant=False,\n                    )[0]\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                    )[0]\n            else:\n                hidden_states = resnet(hidden_states, temb)\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n            output_states = output_states + (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n            output_states = output_states + (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass DownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_downsample=True,\n        downsample_padding=1,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    Downsample2D(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, temb=None):\n        output_states = ()\n\n        for resnet in self.resnets:\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n            output_states = output_states + (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n            output_states = output_states + (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass DownEncoderBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_downsample=True,\n        downsample_padding=1,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=None,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    Downsample2D(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n    def forward(self, hidden_states):\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states, temb=None)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n        return hidden_states\n\n\nclass AttnDownEncoderBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        add_downsample=True,\n        downsample_padding=1,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=None,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            attentions.append(\n                Attention(\n                    out_channels,\n                    heads=out_channels // attn_num_head_channels if attn_num_head_channels is not None else 1,\n                    dim_head=attn_num_head_channels if attn_num_head_channels is not None else out_channels,\n                    rescale_output_factor=output_scale_factor,\n                    eps=resnet_eps,\n                    norm_num_groups=resnet_groups,\n                    residual_connection=True,\n                    bias=True,\n                    upcast_softmax=True,\n                    _from_deprecated_attn_block=True,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    Downsample2D(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n    def forward(self, hidden_states):\n        for resnet, attn in zip(self.resnets, self.attentions):\n            hidden_states = resnet(hidden_states, temb=None)\n            hidden_states = attn(hidden_states)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n        return hidden_states\n\n\nclass AttnSkipDownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=np.sqrt(2.0),\n        downsample_padding=1,\n        add_downsample=True,\n    ):\n        super().__init__()\n        self.attentions = nn.ModuleList([])\n        self.resnets = nn.ModuleList([])\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            self.resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=min(in_channels // 4, 32),\n                    groups_out=min(out_channels // 4, 32),\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            self.attentions.append(\n                Attention(\n                    out_channels,\n                    heads=out_channels // attn_num_head_channels if attn_num_head_channels is not None else 1,\n                    dim_head=attn_num_head_channels if attn_num_head_channels is not None else out_channels,\n                    rescale_output_factor=output_scale_factor,\n                    eps=resnet_eps,\n                    norm_num_groups=32,\n                    residual_connection=True,\n                    bias=True,\n                    upcast_softmax=True,\n                    _from_deprecated_attn_block=True,\n                )\n            )\n\n        if add_downsample:\n            self.resnet_down = ResnetBlock2D(\n                in_channels=out_channels,\n                out_channels=out_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=min(out_channels // 4, 32),\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n                use_in_shortcut=True,\n                down=True,\n                kernel=\"fir\",\n            )\n            self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)])\n            self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1))\n        else:\n            self.resnet_down = None\n            self.downsamplers = None\n            self.skip_conv = None\n\n    def forward(self, hidden_states, temb=None, skip_sample=None):\n        output_states = ()\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = attn(hidden_states)\n            output_states += (hidden_states,)\n\n        if self.downsamplers is not None:\n            hidden_states = self.resnet_down(hidden_states, temb)\n            for downsampler in self.downsamplers:\n                skip_sample = downsampler(skip_sample)\n\n            hidden_states = self.skip_conv(skip_sample) + hidden_states\n\n            output_states += (hidden_states,)\n\n        return hidden_states, output_states, skip_sample\n\n\nclass SkipDownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_pre_norm: bool = True,\n        output_scale_factor=np.sqrt(2.0),\n        add_downsample=True,\n        downsample_padding=1,\n    ):\n        super().__init__()\n        self.resnets = nn.ModuleList([])\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            self.resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=min(in_channels // 4, 32),\n                    groups_out=min(out_channels // 4, 32),\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        if add_downsample:\n            self.resnet_down = ResnetBlock2D(\n                in_channels=out_channels,\n                out_channels=out_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=min(out_channels // 4, 32),\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n                use_in_shortcut=True,\n                down=True,\n                kernel=\"fir\",\n            )\n            self.downsamplers = nn.ModuleList([FirDownsample2D(out_channels, out_channels=out_channels)])\n            self.skip_conv = nn.Conv2d(3, out_channels, kernel_size=(1, 1), stride=(1, 1))\n        else:\n            self.resnet_down = None\n            self.downsamplers = None\n            self.skip_conv = None\n\n    def forward(self, hidden_states, temb=None, skip_sample=None):\n        output_states = ()\n\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states, temb)\n            output_states += (hidden_states,)\n\n        if self.downsamplers is not None:\n            hidden_states = self.resnet_down(hidden_states, temb)\n            for downsampler in self.downsamplers:\n                skip_sample = downsampler(skip_sample)\n\n            hidden_states = self.skip_conv(skip_sample) + hidden_states\n\n            output_states += (hidden_states,)\n\n        return hidden_states, output_states, skip_sample\n\n\nclass ResnetDownsampleBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_downsample=True,\n        skip_time_act=False,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                    skip_time_act=skip_time_act,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    ResnetBlock2D(\n                        in_channels=out_channels,\n                        out_channels=out_channels,\n                        temb_channels=temb_channels,\n                        eps=resnet_eps,\n                        groups=resnet_groups,\n                        dropout=dropout,\n                        time_embedding_norm=resnet_time_scale_shift,\n                        non_linearity=resnet_act_fn,\n                        output_scale_factor=output_scale_factor,\n                        pre_norm=resnet_pre_norm,\n                        skip_time_act=skip_time_act,\n                        down=True,\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, temb=None):\n        output_states = ()\n\n        for resnet in self.resnets:\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n            output_states = output_states + (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states, temb)\n\n            output_states = output_states + (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass SimpleCrossAttnDownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        add_downsample=True,\n        skip_time_act=False,\n        only_cross_attention=False,\n        cross_attention_norm=None,\n    ):\n        super().__init__()\n\n        self.has_cross_attention = True\n\n        resnets = []\n        attentions = []\n\n        self.attn_num_head_channels = attn_num_head_channels\n        self.num_heads = out_channels // self.attn_num_head_channels\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                    skip_time_act=skip_time_act,\n                )\n            )\n\n            processor = (\n                AttnAddedKVProcessor2_0() if hasattr(F, \"scaled_dot_product_attention\") else AttnAddedKVProcessor()\n            )\n\n            attentions.append(\n                Attention(\n                    query_dim=out_channels,\n                    cross_attention_dim=out_channels,\n                    heads=self.num_heads,\n                    dim_head=attn_num_head_channels,\n                    added_kv_proj_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                    bias=True,\n                    upcast_softmax=True,\n                    only_cross_attention=only_cross_attention,\n                    cross_attention_norm=cross_attention_norm,\n                    processor=processor,\n                )\n            )\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    ResnetBlock2D(\n                        in_channels=out_channels,\n                        out_channels=out_channels,\n                        temb_channels=temb_channels,\n                        eps=resnet_eps,\n                        groups=resnet_groups,\n                        dropout=dropout,\n                        time_embedding_norm=resnet_time_scale_shift,\n                        non_linearity=resnet_act_fn,\n                        output_scale_factor=output_scale_factor,\n                        pre_norm=resnet_pre_norm,\n                        skip_time_act=skip_time_act,\n                        down=True,\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        output_states = ()\n        cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {}\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb)\n                hidden_states = torch.utils.checkpoint.checkpoint(\n                    create_custom_forward(attn, return_dict=False),\n                    hidden_states,\n                    encoder_hidden_states,\n                    cross_attention_kwargs,\n                )[0]\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    attention_mask=attention_mask,\n                    **cross_attention_kwargs,\n                )\n\n            output_states = output_states + (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states, temb)\n\n            output_states = output_states + (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass KDownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 4,\n        resnet_eps: float = 1e-5,\n        resnet_act_fn: str = \"gelu\",\n        resnet_group_size: int = 32,\n        add_downsample=False,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            groups = in_channels // resnet_group_size\n            groups_out = out_channels // resnet_group_size\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    dropout=dropout,\n                    temb_channels=temb_channels,\n                    groups=groups,\n                    groups_out=groups_out,\n                    eps=resnet_eps,\n                    non_linearity=resnet_act_fn,\n                    time_embedding_norm=\"ada_group\",\n                    conv_shortcut_bias=False,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            # YiYi's comments- might be able to use FirDownsample2D, look into details later\n            self.downsamplers = nn.ModuleList([KDownsample2D()])\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, temb=None):\n        output_states = ()\n\n        for resnet in self.resnets:\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n            output_states += (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n        return hidden_states, output_states\n\n\nclass KCrossAttnDownBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        cross_attention_dim: int,\n        dropout: float = 0.0,\n        num_layers: int = 4,\n        resnet_group_size: int = 32,\n        add_downsample=True,\n        attn_num_head_channels: int = 64,\n        add_self_attention: bool = False,\n        resnet_eps: float = 1e-5,\n        resnet_act_fn: str = \"gelu\",\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        self.has_cross_attention = True\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            groups = in_channels // resnet_group_size\n            groups_out = out_channels // resnet_group_size\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    dropout=dropout,\n                    temb_channels=temb_channels,\n                    groups=groups,\n                    groups_out=groups_out,\n                    eps=resnet_eps,\n                    non_linearity=resnet_act_fn,\n                    time_embedding_norm=\"ada_group\",\n                    conv_shortcut_bias=False,\n                )\n            )\n            attentions.append(\n                KAttentionBlock(\n                    out_channels,\n                    out_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    cross_attention_dim=cross_attention_dim,\n                    temb_channels=temb_channels,\n                    attention_bias=True,\n                    add_self_attention=add_self_attention,\n                    cross_attention_norm=\"layer_norm\",\n                    group_size=resnet_group_size,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n        self.attentions = nn.ModuleList(attentions)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList([KDownsample2D()])\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        output_states = ()\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        attention_mask,\n                        cross_attention_kwargs,\n                        use_reentrant=False,\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        attention_mask,\n                        cross_attention_kwargs,\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    emb=temb,\n                    attention_mask=attention_mask,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                )\n\n            if self.downsamplers is None:\n                output_states += (None,)\n            else:\n                output_states += (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n        return hidden_states, output_states\n\n\nclass AttnUpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        prev_output_channel: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        add_upsample=True,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            attentions.append(\n                Attention(\n                    out_channels,\n                    heads=out_channels // attn_num_head_channels if attn_num_head_channels is not None else 1,\n                    dim_head=attn_num_head_channels if attn_num_head_channels is not None else out_channels,\n                    rescale_output_factor=output_scale_factor,\n                    eps=resnet_eps,\n                    norm_num_groups=resnet_groups,\n                    residual_connection=True,\n                    bias=True,\n                    upcast_softmax=True,\n                    _from_deprecated_attn_block=True,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None):\n        for resnet, attn in zip(self.resnets, self.attentions):\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = attn(hidden_states)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states)\n\n        return hidden_states\n\n\nclass CrossAttnUpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        prev_output_channel: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        add_upsample=True,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        only_cross_attention=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            if not dual_cross_attention:\n                attentions.append(\n                    Transformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                        use_linear_projection=use_linear_projection,\n                        only_cross_attention=only_cross_attention,\n                        upcast_attention=upcast_attention,\n                    )\n                )\n            else:\n                attentions.append(\n                    DualTransformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                    )\n                )\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self,\n        hidden_states,\n        res_hidden_states_tuple,\n        temb=None,\n        encoder_hidden_states=None,\n        cross_attention_kwargs=None,\n        upsample_size=None,\n        attention_mask=None,\n    ):\n        # TODO(Patrick, William) - attention mask is not used\n        for resnet, attn in zip(self.resnets, self.attentions):\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                        use_reentrant=False,\n                    )[0]\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                    )[0]\n            else:\n                hidden_states = resnet(hidden_states, temb)\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, upsample_size)\n\n        return hidden_states\n\n\nclass UpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        prev_output_channel: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_upsample=True,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None):\n        for resnet in self.resnets:\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, upsample_size)\n\n        return hidden_states\n\n\nclass UpDecoderBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_upsample=True,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            input_channels = in_channels if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=input_channels,\n                    out_channels=out_channels,\n                    temb_channels=None,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n    def forward(self, hidden_states):\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states, temb=None)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states)\n\n        return hidden_states\n\n\nclass AttnUpDecoderBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        add_upsample=True,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        for i in range(num_layers):\n            input_channels = in_channels if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=input_channels,\n                    out_channels=out_channels,\n                    temb_channels=None,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            attentions.append(\n                Attention(\n                    out_channels,\n                    heads=out_channels // attn_num_head_channels if attn_num_head_channels is not None else 1,\n                    dim_head=attn_num_head_channels if attn_num_head_channels is not None else out_channels,\n                    rescale_output_factor=output_scale_factor,\n                    eps=resnet_eps,\n                    norm_num_groups=resnet_groups,\n                    residual_connection=True,\n                    bias=True,\n                    upcast_softmax=True,\n                    _from_deprecated_attn_block=True,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n    def forward(self, hidden_states):\n        for resnet, attn in zip(self.resnets, self.attentions):\n            hidden_states = resnet(hidden_states, temb=None)\n            hidden_states = attn(hidden_states)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states)\n\n        return hidden_states\n\n\nclass AttnSkipUpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        prev_output_channel: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=np.sqrt(2.0),\n        upsample_padding=1,\n        add_upsample=True,\n    ):\n        super().__init__()\n        self.attentions = nn.ModuleList([])\n        self.resnets = nn.ModuleList([])\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            self.resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=min(resnet_in_channels + res_skip_channels // 4, 32),\n                    groups_out=min(out_channels // 4, 32),\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.attentions.append(\n            Attention(\n                out_channels,\n                heads=out_channels // attn_num_head_channels if attn_num_head_channels is not None else 1,\n                dim_head=attn_num_head_channels if attn_num_head_channels is not None else out_channels,\n                rescale_output_factor=output_scale_factor,\n                eps=resnet_eps,\n                norm_num_groups=32,\n                residual_connection=True,\n                bias=True,\n                upcast_softmax=True,\n                _from_deprecated_attn_block=True,\n            )\n        )\n\n        self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels)\n        if add_upsample:\n            self.resnet_up = ResnetBlock2D(\n                in_channels=out_channels,\n                out_channels=out_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=min(out_channels // 4, 32),\n                groups_out=min(out_channels // 4, 32),\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n                use_in_shortcut=True,\n                up=True,\n                kernel=\"fir\",\n            )\n            self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n            self.skip_norm = torch.nn.GroupNorm(\n                num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True\n            )\n            self.act = nn.SiLU()\n        else:\n            self.resnet_up = None\n            self.skip_conv = None\n            self.skip_norm = None\n            self.act = None\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, skip_sample=None):\n        for resnet in self.resnets:\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            hidden_states = resnet(hidden_states, temb)\n\n        hidden_states = self.attentions[0](hidden_states)\n\n        if skip_sample is not None:\n            skip_sample = self.upsampler(skip_sample)\n        else:\n            skip_sample = 0\n\n        if self.resnet_up is not None:\n            skip_sample_states = self.skip_norm(hidden_states)\n            skip_sample_states = self.act(skip_sample_states)\n            skip_sample_states = self.skip_conv(skip_sample_states)\n\n            skip_sample = skip_sample + skip_sample_states\n\n            hidden_states = self.resnet_up(hidden_states, temb)\n\n        return hidden_states, skip_sample\n\n\nclass SkipUpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        prev_output_channel: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_pre_norm: bool = True,\n        output_scale_factor=np.sqrt(2.0),\n        add_upsample=True,\n        upsample_padding=1,\n    ):\n        super().__init__()\n        self.resnets = nn.ModuleList([])\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            self.resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=min((resnet_in_channels + res_skip_channels) // 4, 32),\n                    groups_out=min(out_channels // 4, 32),\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.upsampler = FirUpsample2D(in_channels, out_channels=out_channels)\n        if add_upsample:\n            self.resnet_up = ResnetBlock2D(\n                in_channels=out_channels,\n                out_channels=out_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=min(out_channels // 4, 32),\n                groups_out=min(out_channels // 4, 32),\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n                use_in_shortcut=True,\n                up=True,\n                kernel=\"fir\",\n            )\n            self.skip_conv = nn.Conv2d(out_channels, 3, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n            self.skip_norm = torch.nn.GroupNorm(\n                num_groups=min(out_channels // 4, 32), num_channels=out_channels, eps=resnet_eps, affine=True\n            )\n            self.act = nn.SiLU()\n        else:\n            self.resnet_up = None\n            self.skip_conv = None\n            self.skip_norm = None\n            self.act = None\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, skip_sample=None):\n        for resnet in self.resnets:\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            hidden_states = resnet(hidden_states, temb)\n\n        if skip_sample is not None:\n            skip_sample = self.upsampler(skip_sample)\n        else:\n            skip_sample = 0\n\n        if self.resnet_up is not None:\n            skip_sample_states = self.skip_norm(hidden_states)\n            skip_sample_states = self.act(skip_sample_states)\n            skip_sample_states = self.skip_conv(skip_sample_states)\n\n            skip_sample = skip_sample + skip_sample_states\n\n            hidden_states = self.resnet_up(hidden_states, temb)\n\n        return hidden_states, skip_sample\n\n\nclass ResnetUpsampleBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        prev_output_channel: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_upsample=True,\n        skip_time_act=False,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                    skip_time_act=skip_time_act,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList(\n                [\n                    ResnetBlock2D(\n                        in_channels=out_channels,\n                        out_channels=out_channels,\n                        temb_channels=temb_channels,\n                        eps=resnet_eps,\n                        groups=resnet_groups,\n                        dropout=dropout,\n                        time_embedding_norm=resnet_time_scale_shift,\n                        non_linearity=resnet_act_fn,\n                        output_scale_factor=output_scale_factor,\n                        pre_norm=resnet_pre_norm,\n                        skip_time_act=skip_time_act,\n                        up=True,\n                    )\n                ]\n            )\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None):\n        for resnet in self.resnets:\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, temb)\n\n        return hidden_states\n\n\nclass SimpleCrossAttnUpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        prev_output_channel: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        add_upsample=True,\n        skip_time_act=False,\n        only_cross_attention=False,\n        cross_attention_norm=None,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        self.num_heads = out_channels // self.attn_num_head_channels\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                    skip_time_act=skip_time_act,\n                )\n            )\n\n            processor = (\n                AttnAddedKVProcessor2_0() if hasattr(F, \"scaled_dot_product_attention\") else AttnAddedKVProcessor()\n            )\n\n            attentions.append(\n                Attention(\n                    query_dim=out_channels,\n                    cross_attention_dim=out_channels,\n                    heads=self.num_heads,\n                    dim_head=attn_num_head_channels,\n                    added_kv_proj_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                    bias=True,\n                    upcast_softmax=True,\n                    only_cross_attention=only_cross_attention,\n                    cross_attention_norm=cross_attention_norm,\n                    processor=processor,\n                )\n            )\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList(\n                [\n                    ResnetBlock2D(\n                        in_channels=out_channels,\n                        out_channels=out_channels,\n                        temb_channels=temb_channels,\n                        eps=resnet_eps,\n                        groups=resnet_groups,\n                        dropout=dropout,\n                        time_embedding_norm=resnet_time_scale_shift,\n                        non_linearity=resnet_act_fn,\n                        output_scale_factor=output_scale_factor,\n                        pre_norm=resnet_pre_norm,\n                        skip_time_act=skip_time_act,\n                        up=True,\n                    )\n                ]\n            )\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self,\n        hidden_states,\n        res_hidden_states_tuple,\n        temb=None,\n        encoder_hidden_states=None,\n        upsample_size=None,\n        attention_mask=None,\n        cross_attention_kwargs=None,\n    ):\n        cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {}\n        for resnet, attn in zip(self.resnets, self.attentions):\n            # resnet\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                hidden_states = torch.utils.checkpoint.checkpoint(create_custom_forward(resnet), hidden_states, temb)\n                hidden_states = torch.utils.checkpoint.checkpoint(\n                    create_custom_forward(attn, return_dict=False),\n                    hidden_states,\n                    encoder_hidden_states,\n                    cross_attention_kwargs,\n                )[0]\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    attention_mask=attention_mask,\n                    **cross_attention_kwargs,\n                )\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, temb)\n\n        return hidden_states\n\n\nclass KUpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 5,\n        resnet_eps: float = 1e-5,\n        resnet_act_fn: str = \"gelu\",\n        resnet_group_size: Optional[int] = 32,\n        add_upsample=True,\n    ):\n        super().__init__()\n        resnets = []\n        k_in_channels = 2 * out_channels\n        k_out_channels = in_channels\n        num_layers = num_layers - 1\n\n        for i in range(num_layers):\n            in_channels = k_in_channels if i == 0 else out_channels\n            groups = in_channels // resnet_group_size\n            groups_out = out_channels // resnet_group_size\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=k_out_channels if (i == num_layers - 1) else out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=groups,\n                    groups_out=groups_out,\n                    dropout=dropout,\n                    non_linearity=resnet_act_fn,\n                    time_embedding_norm=\"ada_group\",\n                    conv_shortcut_bias=False,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([KUpsample2D()])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None):\n        res_hidden_states_tuple = res_hidden_states_tuple[-1]\n        if res_hidden_states_tuple is not None:\n            hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1)\n\n        for resnet in self.resnets:\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states)\n\n        return hidden_states\n\n\nclass KCrossAttnUpBlock2D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 4,\n        resnet_eps: float = 1e-5,\n        resnet_act_fn: str = \"gelu\",\n        resnet_group_size: int = 32,\n        attn_num_head_channels=1,  # attention dim_head\n        cross_attention_dim: int = 768,\n        add_upsample: bool = True,\n        upcast_attention: bool = False,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        is_first_block = in_channels == out_channels == temb_channels\n        is_middle_block = in_channels != out_channels\n        add_self_attention = True if is_first_block else False\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        # in_channels, and out_channels for the block (k-unet)\n        k_in_channels = out_channels if is_first_block else 2 * out_channels\n        k_out_channels = in_channels\n\n        num_layers = num_layers - 1\n\n        for i in range(num_layers):\n            in_channels = k_in_channels if i == 0 else out_channels\n            groups = in_channels // resnet_group_size\n            groups_out = out_channels // resnet_group_size\n\n            if is_middle_block and (i == num_layers - 1):\n                conv_2d_out_channels = k_out_channels\n            else:\n                conv_2d_out_channels = None\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    conv_2d_out_channels=conv_2d_out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=groups,\n                    groups_out=groups_out,\n                    dropout=dropout,\n                    non_linearity=resnet_act_fn,\n                    time_embedding_norm=\"ada_group\",\n                    conv_shortcut_bias=False,\n                )\n            )\n            attentions.append(\n                KAttentionBlock(\n                    k_out_channels if (i == num_layers - 1) else out_channels,\n                    k_out_channels // attn_num_head_channels\n                    if (i == num_layers - 1)\n                    else out_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    cross_attention_dim=cross_attention_dim,\n                    temb_channels=temb_channels,\n                    attention_bias=True,\n                    add_self_attention=add_self_attention,\n                    cross_attention_norm=\"layer_norm\",\n                    upcast_attention=upcast_attention,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n        self.attentions = nn.ModuleList(attentions)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([KUpsample2D()])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self,\n        hidden_states,\n        res_hidden_states_tuple,\n        temb=None,\n        encoder_hidden_states=None,\n        cross_attention_kwargs=None,\n        upsample_size=None,\n        attention_mask=None,\n    ):\n        res_hidden_states_tuple = res_hidden_states_tuple[-1]\n        if res_hidden_states_tuple is not None:\n            hidden_states = torch.cat([hidden_states, res_hidden_states_tuple], dim=1)\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        attention_mask,\n                        cross_attention_kwargs,\n                        use_reentrant=False,\n                    )[0]\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        attention_mask,\n                        cross_attention_kwargs,\n                    )[0]\n            else:\n                hidden_states = resnet(hidden_states, temb)\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    emb=temb,\n                    attention_mask=attention_mask,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                )\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states)\n\n        return hidden_states\n\n\n# can potentially later be renamed to `No-feed-forward` attention\nclass KAttentionBlock(nn.Module):\n    r\"\"\"\n    A basic Transformer block.\n\n    Parameters:\n        dim (`int`): The number of channels in the input and output.\n        num_attention_heads (`int`): The number of heads to use for multi-head attention.\n        attention_head_dim (`int`): The number of channels in each head.\n        dropout (`float`, *optional*, defaults to 0.0): The dropout probability to use.\n        cross_attention_dim (`int`, *optional*): The size of the encoder_hidden_states vector for cross attention.\n        activation_fn (`str`, *optional*, defaults to `\"geglu\"`): Activation function to be used in feed-forward.\n        num_embeds_ada_norm (:\n            obj: `int`, *optional*): The number of diffusion steps used during training. See `Transformer2DModel`.\n        attention_bias (:\n            obj: `bool`, *optional*, defaults to `False`): Configure if the attentions should contain a bias parameter.\n    \"\"\"\n\n    def __init__(\n        self,\n        dim: int,\n        num_attention_heads: int,\n        attention_head_dim: int,\n        dropout: float = 0.0,\n        cross_attention_dim: Optional[int] = None,\n        attention_bias: bool = False,\n        upcast_attention: bool = False,\n        temb_channels: int = 768,  # for ada_group_norm\n        add_self_attention: bool = False,\n        cross_attention_norm: Optional[str] = None,\n        group_size: int = 32,\n    ):\n        super().__init__()\n        self.add_self_attention = add_self_attention\n\n        # 1. Self-Attn\n        if add_self_attention:\n            self.norm1 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size))\n            self.attn1 = Attention(\n                query_dim=dim,\n                heads=num_attention_heads,\n                dim_head=attention_head_dim,\n                dropout=dropout,\n                bias=attention_bias,\n                cross_attention_dim=None,\n                cross_attention_norm=None,\n            )\n\n        # 2. Cross-Attn\n        self.norm2 = AdaGroupNorm(temb_channels, dim, max(1, dim // group_size))\n        self.attn2 = Attention(\n            query_dim=dim,\n            cross_attention_dim=cross_attention_dim,\n            heads=num_attention_heads,\n            dim_head=attention_head_dim,\n            dropout=dropout,\n            bias=attention_bias,\n            upcast_attention=upcast_attention,\n            cross_attention_norm=cross_attention_norm,\n        )\n\n    def _to_3d(self, hidden_states, height, weight):\n        return hidden_states.permute(0, 2, 3, 1).reshape(hidden_states.shape[0], height * weight, -1)\n\n    def _to_4d(self, hidden_states, height, weight):\n        return hidden_states.permute(0, 2, 1).reshape(hidden_states.shape[0], -1, height, weight)\n\n    def forward(\n        self,\n        hidden_states,\n        encoder_hidden_states=None,\n        emb=None,\n        attention_mask=None,\n        cross_attention_kwargs=None,\n    ):\n        cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {}\n\n        # 1. Self-Attention\n        if self.add_self_attention:\n            norm_hidden_states = self.norm1(hidden_states, emb)\n\n            height, weight = norm_hidden_states.shape[2:]\n            norm_hidden_states = self._to_3d(norm_hidden_states, height, weight)\n\n            attn_output = self.attn1(\n                norm_hidden_states,\n                encoder_hidden_states=None,\n                **cross_attention_kwargs,\n            )\n            attn_output = self._to_4d(attn_output, height, weight)\n\n            hidden_states = attn_output + hidden_states\n\n        # 2. Cross-Attention/None\n        norm_hidden_states = self.norm2(hidden_states, emb)\n\n        height, weight = norm_hidden_states.shape[2:]\n        norm_hidden_states = self._to_3d(norm_hidden_states, height, weight)\n        attn_output = self.attn2(\n            norm_hidden_states,\n            encoder_hidden_states=encoder_hidden_states,\n            **cross_attention_kwargs,\n        )\n        attn_output = self._to_4d(attn_output, height, weight)\n\n        hidden_states = attn_output + hidden_states\n\n        return hidden_states\n"
  },
  {
    "path": "diffusers/models/unet_2d_blocks_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom .attention_flax import FlaxTransformer2DModel\nfrom .resnet_flax import FlaxDownsample2D, FlaxResnetBlock2D, FlaxUpsample2D\n\n\nclass FlaxCrossAttnDownBlock2D(nn.Module):\n    r\"\"\"\n    Cross Attention 2D Downsizing block - original architecture from Unet transformers:\n    https://arxiv.org/abs/2103.06104\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        out_channels (:obj:`int`):\n            Output channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of attention blocks layers\n        attn_num_head_channels (:obj:`int`, *optional*, defaults to 1):\n            Number of attention heads of each spatial transformer block\n        add_downsample (:obj:`bool`, *optional*, defaults to `True`):\n            Whether to add downsampling layer before each final output\n        use_memory_efficient_attention (`bool`, *optional*, defaults to `False`):\n            enable memory efficient attention https://arxiv.org/abs/2112.05682\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    out_channels: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    attn_num_head_channels: int = 1\n    add_downsample: bool = True\n    use_linear_projection: bool = False\n    only_cross_attention: bool = False\n    use_memory_efficient_attention: bool = False\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        resnets = []\n        attentions = []\n\n        for i in range(self.num_layers):\n            in_channels = self.in_channels if i == 0 else self.out_channels\n\n            res_block = FlaxResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=self.out_channels,\n                dropout_prob=self.dropout,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n\n            attn_block = FlaxTransformer2DModel(\n                in_channels=self.out_channels,\n                n_heads=self.attn_num_head_channels,\n                d_head=self.out_channels // self.attn_num_head_channels,\n                depth=1,\n                use_linear_projection=self.use_linear_projection,\n                only_cross_attention=self.only_cross_attention,\n                use_memory_efficient_attention=self.use_memory_efficient_attention,\n                dtype=self.dtype,\n            )\n            attentions.append(attn_block)\n\n        self.resnets = resnets\n        self.attentions = attentions\n\n        if self.add_downsample:\n            self.downsamplers_0 = FlaxDownsample2D(self.out_channels, dtype=self.dtype)\n\n    def __call__(self, hidden_states, temb, encoder_hidden_states, deterministic=True):\n        output_states = ()\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            hidden_states = resnet(hidden_states, temb, deterministic=deterministic)\n            hidden_states = attn(hidden_states, encoder_hidden_states, deterministic=deterministic)\n            output_states += (hidden_states,)\n\n        if self.add_downsample:\n            hidden_states = self.downsamplers_0(hidden_states)\n            output_states += (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass FlaxDownBlock2D(nn.Module):\n    r\"\"\"\n    Flax 2D downsizing block\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        out_channels (:obj:`int`):\n            Output channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of attention blocks layers\n        add_downsample (:obj:`bool`, *optional*, defaults to `True`):\n            Whether to add downsampling layer before each final output\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    out_channels: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    add_downsample: bool = True\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        resnets = []\n\n        for i in range(self.num_layers):\n            in_channels = self.in_channels if i == 0 else self.out_channels\n\n            res_block = FlaxResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=self.out_channels,\n                dropout_prob=self.dropout,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n        self.resnets = resnets\n\n        if self.add_downsample:\n            self.downsamplers_0 = FlaxDownsample2D(self.out_channels, dtype=self.dtype)\n\n    def __call__(self, hidden_states, temb, deterministic=True):\n        output_states = ()\n\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states, temb, deterministic=deterministic)\n            output_states += (hidden_states,)\n\n        if self.add_downsample:\n            hidden_states = self.downsamplers_0(hidden_states)\n            output_states += (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass FlaxCrossAttnUpBlock2D(nn.Module):\n    r\"\"\"\n    Cross Attention 2D Upsampling block - original architecture from Unet transformers:\n    https://arxiv.org/abs/2103.06104\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        out_channels (:obj:`int`):\n            Output channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of attention blocks layers\n        attn_num_head_channels (:obj:`int`, *optional*, defaults to 1):\n            Number of attention heads of each spatial transformer block\n        add_upsample (:obj:`bool`, *optional*, defaults to `True`):\n            Whether to add upsampling layer before each final output\n        use_memory_efficient_attention (`bool`, *optional*, defaults to `False`):\n            enable memory efficient attention https://arxiv.org/abs/2112.05682\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    out_channels: int\n    prev_output_channel: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    attn_num_head_channels: int = 1\n    add_upsample: bool = True\n    use_linear_projection: bool = False\n    only_cross_attention: bool = False\n    use_memory_efficient_attention: bool = False\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        resnets = []\n        attentions = []\n\n        for i in range(self.num_layers):\n            res_skip_channels = self.in_channels if (i == self.num_layers - 1) else self.out_channels\n            resnet_in_channels = self.prev_output_channel if i == 0 else self.out_channels\n\n            res_block = FlaxResnetBlock2D(\n                in_channels=resnet_in_channels + res_skip_channels,\n                out_channels=self.out_channels,\n                dropout_prob=self.dropout,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n\n            attn_block = FlaxTransformer2DModel(\n                in_channels=self.out_channels,\n                n_heads=self.attn_num_head_channels,\n                d_head=self.out_channels // self.attn_num_head_channels,\n                depth=1,\n                use_linear_projection=self.use_linear_projection,\n                only_cross_attention=self.only_cross_attention,\n                use_memory_efficient_attention=self.use_memory_efficient_attention,\n                dtype=self.dtype,\n            )\n            attentions.append(attn_block)\n\n        self.resnets = resnets\n        self.attentions = attentions\n\n        if self.add_upsample:\n            self.upsamplers_0 = FlaxUpsample2D(self.out_channels, dtype=self.dtype)\n\n    def __call__(self, hidden_states, res_hidden_states_tuple, temb, encoder_hidden_states, deterministic=True):\n        for resnet, attn in zip(self.resnets, self.attentions):\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = jnp.concatenate((hidden_states, res_hidden_states), axis=-1)\n\n            hidden_states = resnet(hidden_states, temb, deterministic=deterministic)\n            hidden_states = attn(hidden_states, encoder_hidden_states, deterministic=deterministic)\n\n        if self.add_upsample:\n            hidden_states = self.upsamplers_0(hidden_states)\n\n        return hidden_states\n\n\nclass FlaxUpBlock2D(nn.Module):\n    r\"\"\"\n    Flax 2D upsampling block\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        out_channels (:obj:`int`):\n            Output channels\n        prev_output_channel (:obj:`int`):\n            Output channels from the previous block\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of attention blocks layers\n        add_downsample (:obj:`bool`, *optional*, defaults to `True`):\n            Whether to add downsampling layer before each final output\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    out_channels: int\n    prev_output_channel: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    add_upsample: bool = True\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        resnets = []\n\n        for i in range(self.num_layers):\n            res_skip_channels = self.in_channels if (i == self.num_layers - 1) else self.out_channels\n            resnet_in_channels = self.prev_output_channel if i == 0 else self.out_channels\n\n            res_block = FlaxResnetBlock2D(\n                in_channels=resnet_in_channels + res_skip_channels,\n                out_channels=self.out_channels,\n                dropout_prob=self.dropout,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n\n        self.resnets = resnets\n\n        if self.add_upsample:\n            self.upsamplers_0 = FlaxUpsample2D(self.out_channels, dtype=self.dtype)\n\n    def __call__(self, hidden_states, res_hidden_states_tuple, temb, deterministic=True):\n        for resnet in self.resnets:\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = jnp.concatenate((hidden_states, res_hidden_states), axis=-1)\n\n            hidden_states = resnet(hidden_states, temb, deterministic=deterministic)\n\n        if self.add_upsample:\n            hidden_states = self.upsamplers_0(hidden_states)\n\n        return hidden_states\n\n\nclass FlaxUNetMidBlock2DCrossAttn(nn.Module):\n    r\"\"\"\n    Cross Attention 2D Mid-level block - original architecture from Unet transformers: https://arxiv.org/abs/2103.06104\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of attention blocks layers\n        attn_num_head_channels (:obj:`int`, *optional*, defaults to 1):\n            Number of attention heads of each spatial transformer block\n        use_memory_efficient_attention (`bool`, *optional*, defaults to `False`):\n            enable memory efficient attention https://arxiv.org/abs/2112.05682\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    attn_num_head_channels: int = 1\n    use_linear_projection: bool = False\n    use_memory_efficient_attention: bool = False\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        # there is always at least one resnet\n        resnets = [\n            FlaxResnetBlock2D(\n                in_channels=self.in_channels,\n                out_channels=self.in_channels,\n                dropout_prob=self.dropout,\n                dtype=self.dtype,\n            )\n        ]\n\n        attentions = []\n\n        for _ in range(self.num_layers):\n            attn_block = FlaxTransformer2DModel(\n                in_channels=self.in_channels,\n                n_heads=self.attn_num_head_channels,\n                d_head=self.in_channels // self.attn_num_head_channels,\n                depth=1,\n                use_linear_projection=self.use_linear_projection,\n                use_memory_efficient_attention=self.use_memory_efficient_attention,\n                dtype=self.dtype,\n            )\n            attentions.append(attn_block)\n\n            res_block = FlaxResnetBlock2D(\n                in_channels=self.in_channels,\n                out_channels=self.in_channels,\n                dropout_prob=self.dropout,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n\n        self.resnets = resnets\n        self.attentions = attentions\n\n    def __call__(self, hidden_states, temb, encoder_hidden_states, deterministic=True):\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for attn, resnet in zip(self.attentions, self.resnets[1:]):\n            hidden_states = attn(hidden_states, encoder_hidden_states, deterministic=deterministic)\n            hidden_states = resnet(hidden_states, temb, deterministic=deterministic)\n\n        return hidden_states\n"
  },
  {
    "path": "diffusers/models/unet_2d_condition.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Any, Dict, List, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.utils.checkpoint\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..loaders import UNet2DConditionLoadersMixin\nfrom ..utils import BaseOutput, logging\nfrom .attention_processor import AttentionProcessor, AttnProcessor\nfrom .embeddings import GaussianFourierProjection, TextTimeEmbedding, TimestepEmbedding, Timesteps\nfrom .modeling_utils import ModelMixin\nfrom .unet_2d_blocks import (\n    CrossAttnDownBlock2D,\n    CrossAttnUpBlock2D,\n    DownBlock2D,\n    UNetMidBlock2DCrossAttn,\n    UNetMidBlock2DSimpleCrossAttn,\n    UpBlock2D,\n    get_down_block,\n    get_up_block,\n)\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n@dataclass\nclass UNet2DConditionOutput(BaseOutput):\n    \"\"\"\n    Args:\n        sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):\n            Hidden states conditioned on `encoder_hidden_states` input. Output of last layer of model.\n    \"\"\"\n\n    sample: torch.FloatTensor\n\n\nclass UNet2DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin):\n    r\"\"\"\n    UNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep\n    and returns sample shaped output.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the models (such as downloading or saving, etc.)\n\n    Parameters:\n        sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`):\n            Height and width of input/output sample.\n        in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample.\n        out_channels (`int`, *optional*, defaults to 4): The number of channels in the output.\n        center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample.\n        flip_sin_to_cos (`bool`, *optional*, defaults to `False`):\n            Whether to flip the sin to cos in the time embedding.\n        freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding.\n        down_block_types (`Tuple[str]`, *optional*, defaults to `(\"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"DownBlock2D\")`):\n            The tuple of downsample blocks to use.\n        mid_block_type (`str`, *optional*, defaults to `\"UNetMidBlock2DCrossAttn\"`):\n            The mid block type. Choose from `UNetMidBlock2DCrossAttn` or `UNetMidBlock2DSimpleCrossAttn`, will skip the\n            mid block layer if `None`.\n        up_block_types (`Tuple[str]`, *optional*, defaults to `(\"UpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\",)`):\n            The tuple of upsample blocks to use.\n        only_cross_attention(`bool` or `Tuple[bool]`, *optional*, default to `False`):\n            Whether to include self-attention in the basic transformer blocks, see\n            [`~models.attention.BasicTransformerBlock`].\n        block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`):\n            The tuple of output channels for each block.\n        layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block.\n        downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution.\n        mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block.\n        act_fn (`str`, *optional*, defaults to `\"silu\"`): The activation function to use.\n        norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization.\n            If `None`, it will skip the normalization and activation layers in post-processing\n        norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization.\n        cross_attention_dim (`int` or `Tuple[int]`, *optional*, defaults to 1280):\n            The dimension of the cross attention features.\n        encoder_hid_dim (`int`, *optional*, defaults to None):\n            If given, `encoder_hidden_states` will be projected from this dimension to `cross_attention_dim`.\n        attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads.\n        resnet_time_scale_shift (`str`, *optional*, defaults to `\"default\"`): Time scale shift config\n            for resnet blocks, see [`~models.resnet.ResnetBlock2D`]. Choose from `default` or `scale_shift`.\n        class_embed_type (`str`, *optional*, defaults to None):\n            The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`,\n            `\"timestep\"`, `\"identity\"`, `\"projection\"`, or `\"simple_projection\"`.\n        addition_embed_type (`str`, *optional*, defaults to None):\n            Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or\n            \"text\". \"text\" will use the `TextTimeEmbedding` layer.\n        num_class_embeds (`int`, *optional*, defaults to None):\n            Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing\n            class conditioning with `class_embed_type` equal to `None`.\n        time_embedding_type (`str`, *optional*, default to `positional`):\n            The type of position embedding to use for timesteps. Choose from `positional` or `fourier`.\n        time_embedding_dim (`int`, *optional*, default to `None`):\n            An optional override for the dimension of the projected time embedding.\n        time_embedding_act_fn (`str`, *optional*, default to `None`):\n            Optional activation function to use on the time embeddings only one time before they as passed to the rest\n            of the unet. Choose from `silu`, `mish`, `gelu`, and `swish`.\n        timestep_post_act (`str, *optional*, default to `None`):\n            The second activation function to use in timestep embedding. Choose from `silu`, `mish` and `gelu`.\n        time_cond_proj_dim (`int`, *optional*, default to `None`):\n            The dimension of `cond_proj` layer in timestep embedding.\n        conv_in_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_in` layer.\n        conv_out_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_out` layer.\n        projection_class_embeddings_input_dim (`int`, *optional*): The dimension of the `class_labels` input when\n            using the \"projection\" `class_embed_type`. Required when using the \"projection\" `class_embed_type`.\n        class_embeddings_concat (`bool`, *optional*, defaults to `False`): Whether to concatenate the time\n            embeddings with the class embeddings.\n        mid_block_only_cross_attention (`bool`, *optional*, defaults to `None`):\n            Whether to use cross attention with the mid block when using the `UNetMidBlock2DSimpleCrossAttn`. If\n            `only_cross_attention` is given as a single boolean and `mid_block_only_cross_attention` is None, the\n            `only_cross_attention` value will be used as the value for `mid_block_only_cross_attention`. Else, it will\n            default to `False`.\n    \"\"\"\n\n    _supports_gradient_checkpointing = True\n\n    @register_to_config\n    def __init__(\n        self,\n        sample_size: Optional[int] = None,\n        in_channels: int = 4,\n        out_channels: int = 4,\n        center_input_sample: bool = False,\n        flip_sin_to_cos: bool = True,\n        freq_shift: int = 0,\n        down_block_types: Tuple[str] = (\n            \"CrossAttnDownBlock2D\",\n            \"CrossAttnDownBlock2D\",\n            \"CrossAttnDownBlock2D\",\n            \"DownBlock2D\",\n        ),\n        mid_block_type: Optional[str] = \"UNetMidBlock2DCrossAttn\",\n        up_block_types: Tuple[str] = (\"UpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\"),\n        only_cross_attention: Union[bool, Tuple[bool]] = False,\n        block_out_channels: Tuple[int] = (320, 640, 1280, 1280),\n        layers_per_block: Union[int, Tuple[int]] = 2,\n        downsample_padding: int = 1,\n        mid_block_scale_factor: float = 1,\n        act_fn: str = \"silu\",\n        norm_num_groups: Optional[int] = 32,\n        norm_eps: float = 1e-5,\n        cross_attention_dim: Union[int, Tuple[int]] = 1280,\n        encoder_hid_dim: Optional[int] = None,\n        attention_head_dim: Union[int, Tuple[int]] = 8,\n        dual_cross_attention: bool = False,\n        use_linear_projection: bool = False,\n        class_embed_type: Optional[str] = None,\n        addition_embed_type: Optional[str] = None,\n        num_class_embeds: Optional[int] = None,\n        upcast_attention: bool = False,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_skip_time_act: bool = False,\n        resnet_out_scale_factor: int = 1.0,\n        time_embedding_type: str = \"positional\",\n        time_embedding_dim: Optional[int] = None,\n        time_embedding_act_fn: Optional[str] = None,\n        timestep_post_act: Optional[str] = None,\n        time_cond_proj_dim: Optional[int] = None,\n        conv_in_kernel: int = 3,\n        conv_out_kernel: int = 3,\n        projection_class_embeddings_input_dim: Optional[int] = None,\n        class_embeddings_concat: bool = False,\n        mid_block_only_cross_attention: Optional[bool] = None,\n        cross_attention_norm: Optional[str] = None,\n        addition_embed_type_num_heads=64,\n    ):\n        super().__init__()\n\n        self.sample_size = sample_size\n\n        # Check inputs\n        if len(down_block_types) != len(up_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}.\"\n            )\n\n        if len(block_out_channels) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `only_cross_attention` as `down_block_types`. `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`: {attention_head_dim}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if isinstance(cross_attention_dim, list) and len(cross_attention_dim) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `cross_attention_dim` as `down_block_types`. `cross_attention_dim`: {cross_attention_dim}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(layers_per_block, int) and len(layers_per_block) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `layers_per_block` as `down_block_types`. `layers_per_block`: {layers_per_block}. `down_block_types`: {down_block_types}.\"\n            )\n\n        # input\n        conv_in_padding = (conv_in_kernel - 1) // 2\n        self.conv_in = nn.Conv2d(\n            in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding\n        )\n\n        # time\n        if time_embedding_type == \"fourier\":\n            time_embed_dim = time_embedding_dim or block_out_channels[0] * 2\n            if time_embed_dim % 2 != 0:\n                raise ValueError(f\"`time_embed_dim` should be divisible by 2, but is {time_embed_dim}.\")\n            self.time_proj = GaussianFourierProjection(\n                time_embed_dim // 2, set_W_to_weight=False, log=False, flip_sin_to_cos=flip_sin_to_cos\n            )\n            timestep_input_dim = time_embed_dim\n        elif time_embedding_type == \"positional\":\n            time_embed_dim = time_embedding_dim or block_out_channels[0] * 4\n\n            self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift)\n            timestep_input_dim = block_out_channels[0]\n        else:\n            raise ValueError(\n                f\"{time_embedding_type} does not exist. Please make sure to use one of `fourier` or `positional`.\"\n            )\n\n        self.time_embedding = TimestepEmbedding(\n            timestep_input_dim,\n            time_embed_dim,\n            act_fn=act_fn,\n            post_act_fn=timestep_post_act,\n            cond_proj_dim=time_cond_proj_dim,\n        )\n\n        if encoder_hid_dim is not None:\n            self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim)\n        else:\n            self.encoder_hid_proj = None\n\n        # class embedding\n        if class_embed_type is None and num_class_embeds is not None:\n            self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim)\n        elif class_embed_type == \"timestep\":\n            self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim, act_fn=act_fn)\n        elif class_embed_type == \"identity\":\n            self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim)\n        elif class_embed_type == \"projection\":\n            if projection_class_embeddings_input_dim is None:\n                raise ValueError(\n                    \"`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set\"\n                )\n            # The projection `class_embed_type` is the same as the timestep `class_embed_type` except\n            # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings\n            # 2. it projects from an arbitrary input dimension.\n            #\n            # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations.\n            # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings.\n            # As a result, `TimestepEmbedding` can be passed arbitrary vectors.\n            self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim)\n        elif class_embed_type == \"simple_projection\":\n            if projection_class_embeddings_input_dim is None:\n                raise ValueError(\n                    \"`class_embed_type`: 'simple_projection' requires `projection_class_embeddings_input_dim` be set\"\n                )\n            self.class_embedding = nn.Linear(projection_class_embeddings_input_dim, time_embed_dim)\n        else:\n            self.class_embedding = None\n\n        if addition_embed_type == \"text\":\n            if encoder_hid_dim is not None:\n                text_time_embedding_from_dim = encoder_hid_dim\n            else:\n                text_time_embedding_from_dim = cross_attention_dim\n\n            self.add_embedding = TextTimeEmbedding(\n                text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads\n            )\n        elif addition_embed_type is not None:\n            raise ValueError(f\"addition_embed_type: {addition_embed_type} must be None or 'text'.\")\n\n        if time_embedding_act_fn is None:\n            self.time_embed_act = None\n        elif time_embedding_act_fn == \"swish\":\n            self.time_embed_act = lambda x: F.silu(x)\n        elif time_embedding_act_fn == \"mish\":\n            self.time_embed_act = nn.Mish()\n        elif time_embedding_act_fn == \"silu\":\n            self.time_embed_act = nn.SiLU()\n        elif time_embedding_act_fn == \"gelu\":\n            self.time_embed_act = nn.GELU()\n        else:\n            raise ValueError(f\"Unsupported activation function: {time_embedding_act_fn}\")\n\n        self.down_blocks = nn.ModuleList([])\n        self.up_blocks = nn.ModuleList([])\n\n        if isinstance(only_cross_attention, bool):\n            if mid_block_only_cross_attention is None:\n                mid_block_only_cross_attention = only_cross_attention\n\n            only_cross_attention = [only_cross_attention] * len(down_block_types)\n\n        if mid_block_only_cross_attention is None:\n            mid_block_only_cross_attention = False\n\n        if isinstance(attention_head_dim, int):\n            attention_head_dim = (attention_head_dim,) * len(down_block_types)\n\n        if isinstance(cross_attention_dim, int):\n            cross_attention_dim = (cross_attention_dim,) * len(down_block_types)\n\n        if isinstance(layers_per_block, int):\n            layers_per_block = [layers_per_block] * len(down_block_types)\n\n        if class_embeddings_concat:\n            # The time embeddings are concatenated with the class embeddings. The dimension of the\n            # time embeddings passed to the down, middle, and up blocks is twice the dimension of the\n            # regular time embeddings\n            blocks_time_embed_dim = time_embed_dim * 2\n        else:\n            blocks_time_embed_dim = time_embed_dim\n\n        # down\n        output_channel = block_out_channels[0]\n        for i, down_block_type in enumerate(down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = get_down_block(\n                down_block_type,\n                num_layers=layers_per_block[i],\n                in_channels=input_channel,\n                out_channels=output_channel,\n                temb_channels=blocks_time_embed_dim,\n                add_downsample=not is_final_block,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                cross_attention_dim=cross_attention_dim[i],\n                attn_num_head_channels=attention_head_dim[i],\n                downsample_padding=downsample_padding,\n                dual_cross_attention=dual_cross_attention,\n                use_linear_projection=use_linear_projection,\n                only_cross_attention=only_cross_attention[i],\n                upcast_attention=upcast_attention,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                resnet_skip_time_act=resnet_skip_time_act,\n                resnet_out_scale_factor=resnet_out_scale_factor,\n                cross_attention_norm=cross_attention_norm,\n            )\n            self.down_blocks.append(down_block)\n\n        # mid\n        if mid_block_type == \"UNetMidBlock2DCrossAttn\":\n            self.mid_block = UNetMidBlock2DCrossAttn(\n                in_channels=block_out_channels[-1],\n                temb_channels=blocks_time_embed_dim,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                output_scale_factor=mid_block_scale_factor,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                cross_attention_dim=cross_attention_dim[-1],\n                attn_num_head_channels=attention_head_dim[-1],\n                resnet_groups=norm_num_groups,\n                dual_cross_attention=dual_cross_attention,\n                use_linear_projection=use_linear_projection,\n                upcast_attention=upcast_attention,\n            )\n        elif mid_block_type == \"UNetMidBlock2DSimpleCrossAttn\":\n            self.mid_block = UNetMidBlock2DSimpleCrossAttn(\n                in_channels=block_out_channels[-1],\n                temb_channels=blocks_time_embed_dim,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                output_scale_factor=mid_block_scale_factor,\n                cross_attention_dim=cross_attention_dim[-1],\n                attn_num_head_channels=attention_head_dim[-1],\n                resnet_groups=norm_num_groups,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                skip_time_act=resnet_skip_time_act,\n                only_cross_attention=mid_block_only_cross_attention,\n                cross_attention_norm=cross_attention_norm,\n            )\n        elif mid_block_type is None:\n            self.mid_block = None\n        else:\n            raise ValueError(f\"unknown mid_block_type : {mid_block_type}\")\n\n        # count how many layers upsample the images\n        self.num_upsamplers = 0\n\n        # up\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        reversed_attention_head_dim = list(reversed(attention_head_dim))\n        reversed_layers_per_block = list(reversed(layers_per_block))\n        reversed_cross_attention_dim = list(reversed(cross_attention_dim))\n        only_cross_attention = list(reversed(only_cross_attention))\n\n        output_channel = reversed_block_out_channels[0]\n        for i, up_block_type in enumerate(up_block_types):\n            is_final_block = i == len(block_out_channels) - 1\n\n            prev_output_channel = output_channel\n            output_channel = reversed_block_out_channels[i]\n            input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)]\n\n            # add upsample block for all BUT final layer\n            if not is_final_block:\n                add_upsample = True\n                self.num_upsamplers += 1\n            else:\n                add_upsample = False\n\n            up_block = get_up_block(\n                up_block_type,\n                num_layers=reversed_layers_per_block[i] + 1,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                prev_output_channel=prev_output_channel,\n                temb_channels=blocks_time_embed_dim,\n                add_upsample=add_upsample,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                cross_attention_dim=reversed_cross_attention_dim[i],\n                attn_num_head_channels=reversed_attention_head_dim[i],\n                dual_cross_attention=dual_cross_attention,\n                use_linear_projection=use_linear_projection,\n                only_cross_attention=only_cross_attention[i],\n                upcast_attention=upcast_attention,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                resnet_skip_time_act=resnet_skip_time_act,\n                resnet_out_scale_factor=resnet_out_scale_factor,\n                cross_attention_norm=cross_attention_norm,\n            )\n            self.up_blocks.append(up_block)\n            prev_output_channel = output_channel\n\n        # out\n        if norm_num_groups is not None:\n            self.conv_norm_out = nn.GroupNorm(\n                num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps\n            )\n\n            if act_fn == \"swish\":\n                self.conv_act = lambda x: F.silu(x)\n            elif act_fn == \"mish\":\n                self.conv_act = nn.Mish()\n            elif act_fn == \"silu\":\n                self.conv_act = nn.SiLU()\n            elif act_fn == \"gelu\":\n                self.conv_act = nn.GELU()\n            else:\n                raise ValueError(f\"Unsupported activation function: {act_fn}\")\n\n        else:\n            self.conv_norm_out = None\n            self.conv_act = None\n\n        conv_out_padding = (conv_out_kernel - 1) // 2\n        self.conv_out = nn.Conv2d(\n            block_out_channels[0], out_channels, kernel_size=conv_out_kernel, padding=conv_out_padding\n        )\n\n    @property\n    def attn_processors(self) -> Dict[str, AttentionProcessor]:\n        r\"\"\"\n        Returns:\n            `dict` of attention processors: A dictionary containing all attention processors used in the model with\n            indexed by its weight name.\n        \"\"\"\n        # set recursively\n        processors = {}\n\n        def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]):\n            if hasattr(module, \"set_processor\"):\n                processors[f\"{name}.processor\"] = module.processor\n\n            for sub_name, child in module.named_children():\n                fn_recursive_add_processors(f\"{name}.{sub_name}\", child, processors)\n\n            return processors\n\n        for name, module in self.named_children():\n            fn_recursive_add_processors(name, module, processors)\n\n        return processors\n\n    def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]):\n        r\"\"\"\n        Parameters:\n            `processor (`dict` of `AttentionProcessor` or `AttentionProcessor`):\n                The instantiated processor class or a dictionary of processor classes that will be set as the processor\n                of **all** `Attention` layers.\n            In case `processor` is a dict, the key needs to define the path to the corresponding cross attention processor. This is strongly recommended when setting trainable attention processors.:\n\n        \"\"\"\n        count = len(self.attn_processors.keys())\n\n        if isinstance(processor, dict) and len(processor) != count:\n            raise ValueError(\n                f\"A dict of processors was passed, but the number of processors {len(processor)} does not match the\"\n                f\" number of attention layers: {count}. Please make sure to pass {count} processor classes.\"\n            )\n\n        def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor):\n            if hasattr(module, \"set_processor\"):\n                if not isinstance(processor, dict):\n                    module.set_processor(processor)\n                else:\n                    module.set_processor(processor.pop(f\"{name}.processor\"))\n\n            for sub_name, child in module.named_children():\n                fn_recursive_attn_processor(f\"{name}.{sub_name}\", child, processor)\n\n        for name, module in self.named_children():\n            fn_recursive_attn_processor(name, module, processor)\n\n    def set_default_attn_processor(self):\n        \"\"\"\n        Disables custom attention processors and sets the default attention implementation.\n        \"\"\"\n        self.set_attn_processor(AttnProcessor())\n\n    def set_attention_slice(self, slice_size):\n        r\"\"\"\n        Enable sliced attention computation.\n\n        When this option is enabled, the attention module will split the input tensor in slices, to compute attention\n        in several steps. This is useful to save some memory in exchange for a small speed decrease.\n\n        Args:\n            slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `\"auto\"`):\n                When `\"auto\"`, halves the input to the attention heads, so attention will be computed in two steps. If\n                `\"max\"`, maximum amount of memory will be saved by running only one slice at a time. If a number is\n                provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim`\n                must be a multiple of `slice_size`.\n        \"\"\"\n        sliceable_head_dims = []\n\n        def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module):\n            if hasattr(module, \"set_attention_slice\"):\n                sliceable_head_dims.append(module.sliceable_head_dim)\n\n            for child in module.children():\n                fn_recursive_retrieve_sliceable_dims(child)\n\n        # retrieve number of attention layers\n        for module in self.children():\n            fn_recursive_retrieve_sliceable_dims(module)\n\n        num_sliceable_layers = len(sliceable_head_dims)\n\n        if slice_size == \"auto\":\n            # half the attention head size is usually a good trade-off between\n            # speed and memory\n            slice_size = [dim // 2 for dim in sliceable_head_dims]\n        elif slice_size == \"max\":\n            # make smallest slice possible\n            slice_size = num_sliceable_layers * [1]\n\n        slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size\n\n        if len(slice_size) != len(sliceable_head_dims):\n            raise ValueError(\n                f\"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different\"\n                f\" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}.\"\n            )\n\n        for i in range(len(slice_size)):\n            size = slice_size[i]\n            dim = sliceable_head_dims[i]\n            if size is not None and size > dim:\n                raise ValueError(f\"size {size} has to be smaller or equal to {dim}.\")\n\n        # Recursively walk through all the children.\n        # Any children which exposes the set_attention_slice method\n        # gets the message\n        def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]):\n            if hasattr(module, \"set_attention_slice\"):\n                module.set_attention_slice(slice_size.pop())\n\n            for child in module.children():\n                fn_recursive_set_attention_slice(child, slice_size)\n\n        reversed_slice_size = list(reversed(slice_size))\n        for module in self.children():\n            fn_recursive_set_attention_slice(module, reversed_slice_size)\n\n    def _set_gradient_checkpointing(self, module, value=False):\n        if isinstance(module, (CrossAttnDownBlock2D, DownBlock2D, CrossAttnUpBlock2D, UpBlock2D)):\n            module.gradient_checkpointing = value\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[torch.Tensor, float, int],\n        encoder_hidden_states: torch.Tensor,\n        class_labels: Optional[torch.Tensor] = None,\n        timestep_cond: Optional[torch.Tensor] = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None,\n        mid_block_additional_residual: Optional[torch.Tensor] = None,\n        return_dict: bool = True,\n    ) -> Union[UNet2DConditionOutput, Tuple]:\n        r\"\"\"\n        Args:\n            sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor\n            timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps\n            encoder_hidden_states (`torch.FloatTensor`): (batch, sequence_length, feature_dim) encoder hidden states\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Returns:\n            [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`:\n            [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        # By default samples have to be AT least a multiple of the overall upsampling factor.\n        # The overall upsampling factor is equal to 2 ** (# num of upsampling layers).\n        # However, the upsampling interpolation output size can be forced to fit any upsampling size\n        # on the fly if necessary.\n        default_overall_up_factor = 2**self.num_upsamplers\n\n        # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor`\n        forward_upsample_size = False\n        upsample_size = None\n\n        if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]):\n            logger.info(\"Forward upsample size to force interpolation output size.\")\n            forward_upsample_size = True\n\n        # prepare attention_mask\n        if attention_mask is not None:\n            attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0\n            attention_mask = attention_mask.unsqueeze(1)\n\n        # 0. center input if necessary\n        if self.config.center_input_sample:\n            sample = 2 * sample - 1.0\n\n        # 1. time\n        timesteps = timestep\n        if not torch.is_tensor(timesteps):\n            # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can\n            # This would be a good case for the `match` statement (Python 3.10+)\n            is_mps = sample.device.type == \"mps\"\n            if isinstance(timestep, float):\n                dtype = torch.float32 if is_mps else torch.float64\n            else:\n                dtype = torch.int32 if is_mps else torch.int64\n            timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device)\n        elif len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(sample.device)\n\n        # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n        timesteps = timesteps.expand(sample.shape[0])\n\n        t_emb = self.time_proj(timesteps)\n\n        # `Timesteps` does not contain any weights and will always return f32 tensors\n        # but time_embedding might actually be running in fp16. so we need to cast here.\n        # there might be better ways to encapsulate this.\n        t_emb = t_emb.to(dtype=sample.dtype)\n\n        emb = self.time_embedding(t_emb, timestep_cond)\n\n        if self.class_embedding is not None:\n            if class_labels is None:\n                raise ValueError(\"class_labels should be provided when num_class_embeds > 0\")\n\n            if self.config.class_embed_type == \"timestep\":\n                class_labels = self.time_proj(class_labels)\n\n                # `Timesteps` does not contain any weights and will always return f32 tensors\n                # there might be better ways to encapsulate this.\n                class_labels = class_labels.to(dtype=sample.dtype)\n\n            class_emb = self.class_embedding(class_labels).to(dtype=sample.dtype)\n\n            if self.config.class_embeddings_concat:\n                emb = torch.cat([emb, class_emb], dim=-1)\n            else:\n                emb = emb + class_emb\n\n        if self.config.addition_embed_type == \"text\":\n            aug_emb = self.add_embedding(encoder_hidden_states)\n            emb = emb + aug_emb\n\n        if self.time_embed_act is not None:\n            emb = self.time_embed_act(emb)\n\n        if self.encoder_hid_proj is not None:\n            encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states)\n\n        # 2. pre-process\n        sample = self.conv_in(sample)\n\n        # 3. down\n        down_block_res_samples = (sample,)\n        for downsample_block in self.down_blocks:\n            if hasattr(downsample_block, \"has_cross_attention\") and downsample_block.has_cross_attention:\n                sample, res_samples = downsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    encoder_hidden_states=encoder_hidden_states,\n                    attention_mask=attention_mask,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                )\n            else:\n                sample, res_samples = downsample_block(hidden_states=sample, temb=emb)\n\n            down_block_res_samples += res_samples\n\n        if down_block_additional_residuals is not None:\n            new_down_block_res_samples = ()\n\n            for down_block_res_sample, down_block_additional_residual in zip(\n                down_block_res_samples, down_block_additional_residuals\n            ):\n                down_block_res_sample = down_block_res_sample + down_block_additional_residual\n                new_down_block_res_samples = new_down_block_res_samples + (down_block_res_sample,)\n\n            down_block_res_samples = new_down_block_res_samples\n\n        # 4. mid\n        if self.mid_block is not None:\n            sample = self.mid_block(\n                sample,\n                emb,\n                encoder_hidden_states=encoder_hidden_states,\n                attention_mask=attention_mask,\n                cross_attention_kwargs=cross_attention_kwargs,\n            )\n\n        if mid_block_additional_residual is not None:\n            sample = sample + mid_block_additional_residual\n\n        # 5. up\n        for i, upsample_block in enumerate(self.up_blocks):\n            is_final_block = i == len(self.up_blocks) - 1\n\n            res_samples = down_block_res_samples[-len(upsample_block.resnets) :]\n            down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)]\n\n            # if we have not reached the final block and need to forward the\n            # upsample size, we do it here\n            if not is_final_block and forward_upsample_size:\n                upsample_size = down_block_res_samples[-1].shape[2:]\n\n            if hasattr(upsample_block, \"has_cross_attention\") and upsample_block.has_cross_attention:\n                sample = upsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    res_hidden_states_tuple=res_samples,\n                    encoder_hidden_states=encoder_hidden_states,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    upsample_size=upsample_size,\n                    attention_mask=attention_mask,\n                )\n            else:\n                sample = upsample_block(\n                    hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size\n                )\n\n        # 6. post-process\n        if self.conv_norm_out:\n            sample = self.conv_norm_out(sample)\n            sample = self.conv_act(sample)\n        sample = self.conv_out(sample)\n\n        if not return_dict:\n            return (sample,)\n\n        return UNet2DConditionOutput(sample=sample)\n"
  },
  {
    "path": "diffusers/models/unet_2d_condition_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom typing import Tuple, Union\n\nimport flax\nimport flax.linen as nn\nimport jax\nimport jax.numpy as jnp\nfrom flax.core.frozen_dict import FrozenDict\n\nfrom ..configuration_utils import ConfigMixin, flax_register_to_config\nfrom ..utils import BaseOutput\nfrom .embeddings_flax import FlaxTimestepEmbedding, FlaxTimesteps\nfrom .modeling_flax_utils import FlaxModelMixin\nfrom .unet_2d_blocks_flax import (\n    FlaxCrossAttnDownBlock2D,\n    FlaxCrossAttnUpBlock2D,\n    FlaxDownBlock2D,\n    FlaxUNetMidBlock2DCrossAttn,\n    FlaxUpBlock2D,\n)\n\n\n@flax.struct.dataclass\nclass FlaxUNet2DConditionOutput(BaseOutput):\n    \"\"\"\n    Args:\n        sample (`jnp.ndarray` of shape `(batch_size, num_channels, height, width)`):\n            Hidden states conditioned on `encoder_hidden_states` input. Output of last layer of model.\n    \"\"\"\n\n    sample: jnp.ndarray\n\n\n@flax_register_to_config\nclass FlaxUNet2DConditionModel(nn.Module, FlaxModelMixin, ConfigMixin):\n    r\"\"\"\n    FlaxUNet2DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a\n    timestep and returns sample shaped output.\n\n    This model inherits from [`FlaxModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the models (such as downloading or saving, etc.)\n\n    Also, this model is a Flax Linen [flax.linen.Module](https://flax.readthedocs.io/en/latest/flax.linen.html#module)\n    subclass. Use it as a regular Flax linen Module and refer to the Flax documentation for all matter related to\n    general usage and behavior.\n\n    Finally, this model supports inherent JAX features such as:\n    - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit)\n    - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation)\n    - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap)\n    - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap)\n\n    Parameters:\n        sample_size (`int`, *optional*):\n            The size of the input sample.\n        in_channels (`int`, *optional*, defaults to 4):\n            The number of channels in the input sample.\n        out_channels (`int`, *optional*, defaults to 4):\n            The number of channels in the output.\n        down_block_types (`Tuple[str]`, *optional*, defaults to `(\"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"DownBlock2D\")`):\n            The tuple of downsample blocks to use. The corresponding class names will be: \"FlaxCrossAttnDownBlock2D\",\n            \"FlaxCrossAttnDownBlock2D\", \"FlaxCrossAttnDownBlock2D\", \"FlaxDownBlock2D\"\n        up_block_types (`Tuple[str]`, *optional*, defaults to `(\"UpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\",)`):\n            The tuple of upsample blocks to use. The corresponding class names will be: \"FlaxUpBlock2D\",\n            \"FlaxCrossAttnUpBlock2D\", \"FlaxCrossAttnUpBlock2D\", \"FlaxCrossAttnUpBlock2D\"\n        block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`):\n            The tuple of output channels for each block.\n        layers_per_block (`int`, *optional*, defaults to 2):\n            The number of layers per block.\n        attention_head_dim (`int` or `Tuple[int]`, *optional*, defaults to 8):\n            The dimension of the attention heads.\n        cross_attention_dim (`int`, *optional*, defaults to 768):\n            The dimension of the cross attention features.\n        dropout (`float`, *optional*, defaults to 0):\n            Dropout probability for down, up and bottleneck blocks.\n        flip_sin_to_cos (`bool`, *optional*, defaults to `True`):\n            Whether to flip the sin to cos in the time embedding.\n        freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding.\n        use_memory_efficient_attention (`bool`, *optional*, defaults to `False`):\n            enable memory efficient attention https://arxiv.org/abs/2112.05682\n\n    \"\"\"\n\n    sample_size: int = 32\n    in_channels: int = 4\n    out_channels: int = 4\n    down_block_types: Tuple[str] = (\n        \"CrossAttnDownBlock2D\",\n        \"CrossAttnDownBlock2D\",\n        \"CrossAttnDownBlock2D\",\n        \"DownBlock2D\",\n    )\n    up_block_types: Tuple[str] = (\"UpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\")\n    only_cross_attention: Union[bool, Tuple[bool]] = False\n    block_out_channels: Tuple[int] = (320, 640, 1280, 1280)\n    layers_per_block: int = 2\n    attention_head_dim: Union[int, Tuple[int]] = 8\n    cross_attention_dim: int = 1280\n    dropout: float = 0.0\n    use_linear_projection: bool = False\n    dtype: jnp.dtype = jnp.float32\n    flip_sin_to_cos: bool = True\n    freq_shift: int = 0\n    use_memory_efficient_attention: bool = False\n\n    def init_weights(self, rng: jax.random.KeyArray) -> FrozenDict:\n        # init input tensors\n        sample_shape = (1, self.in_channels, self.sample_size, self.sample_size)\n        sample = jnp.zeros(sample_shape, dtype=jnp.float32)\n        timesteps = jnp.ones((1,), dtype=jnp.int32)\n        encoder_hidden_states = jnp.zeros((1, 1, self.cross_attention_dim), dtype=jnp.float32)\n\n        params_rng, dropout_rng = jax.random.split(rng)\n        rngs = {\"params\": params_rng, \"dropout\": dropout_rng}\n\n        return self.init(rngs, sample, timesteps, encoder_hidden_states)[\"params\"]\n\n    def setup(self):\n        block_out_channels = self.block_out_channels\n        time_embed_dim = block_out_channels[0] * 4\n\n        # input\n        self.conv_in = nn.Conv(\n            block_out_channels[0],\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        # time\n        self.time_proj = FlaxTimesteps(\n            block_out_channels[0], flip_sin_to_cos=self.flip_sin_to_cos, freq_shift=self.config.freq_shift\n        )\n        self.time_embedding = FlaxTimestepEmbedding(time_embed_dim, dtype=self.dtype)\n\n        only_cross_attention = self.only_cross_attention\n        if isinstance(only_cross_attention, bool):\n            only_cross_attention = (only_cross_attention,) * len(self.down_block_types)\n\n        attention_head_dim = self.attention_head_dim\n        if isinstance(attention_head_dim, int):\n            attention_head_dim = (attention_head_dim,) * len(self.down_block_types)\n\n        # down\n        down_blocks = []\n        output_channel = block_out_channels[0]\n        for i, down_block_type in enumerate(self.down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            if down_block_type == \"CrossAttnDownBlock2D\":\n                down_block = FlaxCrossAttnDownBlock2D(\n                    in_channels=input_channel,\n                    out_channels=output_channel,\n                    dropout=self.dropout,\n                    num_layers=self.layers_per_block,\n                    attn_num_head_channels=attention_head_dim[i],\n                    add_downsample=not is_final_block,\n                    use_linear_projection=self.use_linear_projection,\n                    only_cross_attention=only_cross_attention[i],\n                    use_memory_efficient_attention=self.use_memory_efficient_attention,\n                    dtype=self.dtype,\n                )\n            else:\n                down_block = FlaxDownBlock2D(\n                    in_channels=input_channel,\n                    out_channels=output_channel,\n                    dropout=self.dropout,\n                    num_layers=self.layers_per_block,\n                    add_downsample=not is_final_block,\n                    dtype=self.dtype,\n                )\n\n            down_blocks.append(down_block)\n        self.down_blocks = down_blocks\n\n        # mid\n        self.mid_block = FlaxUNetMidBlock2DCrossAttn(\n            in_channels=block_out_channels[-1],\n            dropout=self.dropout,\n            attn_num_head_channels=attention_head_dim[-1],\n            use_linear_projection=self.use_linear_projection,\n            use_memory_efficient_attention=self.use_memory_efficient_attention,\n            dtype=self.dtype,\n        )\n\n        # up\n        up_blocks = []\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        reversed_attention_head_dim = list(reversed(attention_head_dim))\n        only_cross_attention = list(reversed(only_cross_attention))\n        output_channel = reversed_block_out_channels[0]\n        for i, up_block_type in enumerate(self.up_block_types):\n            prev_output_channel = output_channel\n            output_channel = reversed_block_out_channels[i]\n            input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)]\n\n            is_final_block = i == len(block_out_channels) - 1\n\n            if up_block_type == \"CrossAttnUpBlock2D\":\n                up_block = FlaxCrossAttnUpBlock2D(\n                    in_channels=input_channel,\n                    out_channels=output_channel,\n                    prev_output_channel=prev_output_channel,\n                    num_layers=self.layers_per_block + 1,\n                    attn_num_head_channels=reversed_attention_head_dim[i],\n                    add_upsample=not is_final_block,\n                    dropout=self.dropout,\n                    use_linear_projection=self.use_linear_projection,\n                    only_cross_attention=only_cross_attention[i],\n                    use_memory_efficient_attention=self.use_memory_efficient_attention,\n                    dtype=self.dtype,\n                )\n            else:\n                up_block = FlaxUpBlock2D(\n                    in_channels=input_channel,\n                    out_channels=output_channel,\n                    prev_output_channel=prev_output_channel,\n                    num_layers=self.layers_per_block + 1,\n                    add_upsample=not is_final_block,\n                    dropout=self.dropout,\n                    dtype=self.dtype,\n                )\n\n            up_blocks.append(up_block)\n            prev_output_channel = output_channel\n        self.up_blocks = up_blocks\n\n        # out\n        self.conv_norm_out = nn.GroupNorm(num_groups=32, epsilon=1e-5)\n        self.conv_out = nn.Conv(\n            self.out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n    def __call__(\n        self,\n        sample,\n        timesteps,\n        encoder_hidden_states,\n        down_block_additional_residuals=None,\n        mid_block_additional_residual=None,\n        return_dict: bool = True,\n        train: bool = False,\n    ) -> Union[FlaxUNet2DConditionOutput, Tuple]:\n        r\"\"\"\n        Args:\n            sample (`jnp.ndarray`): (batch, channel, height, width) noisy inputs tensor\n            timestep (`jnp.ndarray` or `float` or `int`): timesteps\n            encoder_hidden_states (`jnp.ndarray`): (batch_size, sequence_length, hidden_size) encoder hidden states\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition_flax.FlaxUNet2DConditionOutput`] instead of a\n                plain tuple.\n            train (`bool`, *optional*, defaults to `False`):\n                Use deterministic functions and disable dropout when not training.\n\n        Returns:\n            [`~models.unet_2d_condition_flax.FlaxUNet2DConditionOutput`] or `tuple`:\n            [`~models.unet_2d_condition_flax.FlaxUNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`.\n            When returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        # 1. time\n        if not isinstance(timesteps, jnp.ndarray):\n            timesteps = jnp.array([timesteps], dtype=jnp.int32)\n        elif isinstance(timesteps, jnp.ndarray) and len(timesteps.shape) == 0:\n            timesteps = timesteps.astype(dtype=jnp.float32)\n            timesteps = jnp.expand_dims(timesteps, 0)\n\n        t_emb = self.time_proj(timesteps)\n        t_emb = self.time_embedding(t_emb)\n\n        # 2. pre-process\n        sample = jnp.transpose(sample, (0, 2, 3, 1))\n        sample = self.conv_in(sample)\n\n        # 3. down\n        down_block_res_samples = (sample,)\n        for down_block in self.down_blocks:\n            if isinstance(down_block, FlaxCrossAttnDownBlock2D):\n                sample, res_samples = down_block(sample, t_emb, encoder_hidden_states, deterministic=not train)\n            else:\n                sample, res_samples = down_block(sample, t_emb, deterministic=not train)\n            down_block_res_samples += res_samples\n\n        if down_block_additional_residuals is not None:\n            new_down_block_res_samples = ()\n\n            for down_block_res_sample, down_block_additional_residual in zip(\n                down_block_res_samples, down_block_additional_residuals\n            ):\n                down_block_res_sample += down_block_additional_residual\n                new_down_block_res_samples += (down_block_res_sample,)\n\n            down_block_res_samples = new_down_block_res_samples\n\n        # 4. mid\n        sample = self.mid_block(sample, t_emb, encoder_hidden_states, deterministic=not train)\n\n        if mid_block_additional_residual is not None:\n            sample += mid_block_additional_residual\n\n        # 5. up\n        for up_block in self.up_blocks:\n            res_samples = down_block_res_samples[-(self.layers_per_block + 1) :]\n            down_block_res_samples = down_block_res_samples[: -(self.layers_per_block + 1)]\n            if isinstance(up_block, FlaxCrossAttnUpBlock2D):\n                sample = up_block(\n                    sample,\n                    temb=t_emb,\n                    encoder_hidden_states=encoder_hidden_states,\n                    res_hidden_states_tuple=res_samples,\n                    deterministic=not train,\n                )\n            else:\n                sample = up_block(sample, temb=t_emb, res_hidden_states_tuple=res_samples, deterministic=not train)\n\n        # 6. post-process\n        sample = self.conv_norm_out(sample)\n        sample = nn.silu(sample)\n        sample = self.conv_out(sample)\n        sample = jnp.transpose(sample, (0, 3, 1, 2))\n\n        if not return_dict:\n            return (sample,)\n\n        return FlaxUNet2DConditionOutput(sample=sample)\n"
  },
  {
    "path": "diffusers/models/unet_3d_blocks.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\nfrom torch import nn\n\nfrom .resnet import Downsample2D, ResnetBlock2D, TemporalConvLayer, Upsample2D\nfrom .transformer_2d import Transformer2DModel\nfrom .transformer_temporal import TransformerTemporalModel\n\n\ndef get_down_block(\n    down_block_type,\n    num_layers,\n    in_channels,\n    out_channels,\n    temb_channels,\n    add_downsample,\n    resnet_eps,\n    resnet_act_fn,\n    attn_num_head_channels,\n    resnet_groups=None,\n    cross_attention_dim=None,\n    downsample_padding=None,\n    dual_cross_attention=False,\n    use_linear_projection=True,\n    only_cross_attention=False,\n    upcast_attention=False,\n    resnet_time_scale_shift=\"default\",\n):\n    if down_block_type == \"DownBlock3D\":\n        return DownBlock3D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"CrossAttnDownBlock3D\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for CrossAttnDownBlock3D\")\n        return CrossAttnDownBlock3D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            dual_cross_attention=dual_cross_attention,\n            use_linear_projection=use_linear_projection,\n            only_cross_attention=only_cross_attention,\n            upcast_attention=upcast_attention,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    raise ValueError(f\"{down_block_type} does not exist.\")\n\n\ndef get_up_block(\n    up_block_type,\n    num_layers,\n    in_channels,\n    out_channels,\n    prev_output_channel,\n    temb_channels,\n    add_upsample,\n    resnet_eps,\n    resnet_act_fn,\n    attn_num_head_channels,\n    resnet_groups=None,\n    cross_attention_dim=None,\n    dual_cross_attention=False,\n    use_linear_projection=True,\n    only_cross_attention=False,\n    upcast_attention=False,\n    resnet_time_scale_shift=\"default\",\n):\n    if up_block_type == \"UpBlock3D\":\n        return UpBlock3D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"CrossAttnUpBlock3D\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for CrossAttnUpBlock3D\")\n        return CrossAttnUpBlock3D(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            dual_cross_attention=dual_cross_attention,\n            use_linear_projection=use_linear_projection,\n            only_cross_attention=only_cross_attention,\n            upcast_attention=upcast_attention,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    raise ValueError(f\"{up_block_type} does not exist.\")\n\n\nclass UNetMidBlock3DCrossAttn(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        cross_attention_dim=1280,\n        dual_cross_attention=False,\n        use_linear_projection=True,\n        upcast_attention=False,\n    ):\n        super().__init__()\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n        resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32)\n\n        # there is always at least one resnet\n        resnets = [\n            ResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=resnet_groups,\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n            )\n        ]\n        temp_convs = [\n            TemporalConvLayer(\n                in_channels,\n                in_channels,\n                dropout=0.1,\n            )\n        ]\n        attentions = []\n        temp_attentions = []\n\n        for _ in range(num_layers):\n            attentions.append(\n                Transformer2DModel(\n                    in_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    in_channels=in_channels,\n                    num_layers=1,\n                    cross_attention_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                    use_linear_projection=use_linear_projection,\n                    upcast_attention=upcast_attention,\n                )\n            )\n            temp_attentions.append(\n                TransformerTemporalModel(\n                    in_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    in_channels=in_channels,\n                    num_layers=1,\n                    cross_attention_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                )\n            )\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=in_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            temp_convs.append(\n                TemporalConvLayer(\n                    in_channels,\n                    in_channels,\n                    dropout=0.1,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n        self.temp_convs = nn.ModuleList(temp_convs)\n        self.attentions = nn.ModuleList(attentions)\n        self.temp_attentions = nn.ModuleList(temp_attentions)\n\n    def forward(\n        self,\n        hidden_states,\n        temb=None,\n        encoder_hidden_states=None,\n        attention_mask=None,\n        num_frames=1,\n        cross_attention_kwargs=None,\n    ):\n        hidden_states = self.resnets[0](hidden_states, temb)\n        hidden_states = self.temp_convs[0](hidden_states, num_frames=num_frames)\n        for attn, temp_attn, resnet, temp_conv in zip(\n            self.attentions, self.temp_attentions, self.resnets[1:], self.temp_convs[1:]\n        ):\n            hidden_states = attn(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                cross_attention_kwargs=cross_attention_kwargs,\n            ).sample\n            hidden_states = temp_attn(\n                hidden_states, num_frames=num_frames, cross_attention_kwargs=cross_attention_kwargs\n            ).sample\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = temp_conv(hidden_states, num_frames=num_frames)\n\n        return hidden_states\n\n\nclass CrossAttnDownBlock3D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        downsample_padding=1,\n        add_downsample=True,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        only_cross_attention=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n        temp_attentions = []\n        temp_convs = []\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            temp_convs.append(\n                TemporalConvLayer(\n                    out_channels,\n                    out_channels,\n                    dropout=0.1,\n                )\n            )\n            attentions.append(\n                Transformer2DModel(\n                    out_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    in_channels=out_channels,\n                    num_layers=1,\n                    cross_attention_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                    use_linear_projection=use_linear_projection,\n                    only_cross_attention=only_cross_attention,\n                    upcast_attention=upcast_attention,\n                )\n            )\n            temp_attentions.append(\n                TransformerTemporalModel(\n                    out_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    in_channels=out_channels,\n                    num_layers=1,\n                    cross_attention_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                )\n            )\n        self.resnets = nn.ModuleList(resnets)\n        self.temp_convs = nn.ModuleList(temp_convs)\n        self.attentions = nn.ModuleList(attentions)\n        self.temp_attentions = nn.ModuleList(temp_attentions)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    Downsample2D(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self,\n        hidden_states,\n        temb=None,\n        encoder_hidden_states=None,\n        attention_mask=None,\n        num_frames=1,\n        cross_attention_kwargs=None,\n    ):\n        # TODO(Patrick, William) - attention mask is not used\n        output_states = ()\n\n        for resnet, temp_conv, attn, temp_attn in zip(\n            self.resnets, self.temp_convs, self.attentions, self.temp_attentions\n        ):\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = temp_conv(hidden_states, num_frames=num_frames)\n            hidden_states = attn(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                cross_attention_kwargs=cross_attention_kwargs,\n            ).sample\n            hidden_states = temp_attn(\n                hidden_states, num_frames=num_frames, cross_attention_kwargs=cross_attention_kwargs\n            ).sample\n\n            output_states += (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n            output_states += (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass DownBlock3D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_downsample=True,\n        downsample_padding=1,\n    ):\n        super().__init__()\n        resnets = []\n        temp_convs = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            temp_convs.append(\n                TemporalConvLayer(\n                    out_channels,\n                    out_channels,\n                    dropout=0.1,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n        self.temp_convs = nn.ModuleList(temp_convs)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    Downsample2D(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, temb=None, num_frames=1):\n        output_states = ()\n\n        for resnet, temp_conv in zip(self.resnets, self.temp_convs):\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = temp_conv(hidden_states, num_frames=num_frames)\n\n            output_states += (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n            output_states += (hidden_states,)\n\n        return hidden_states, output_states\n\n\nclass CrossAttnUpBlock3D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        prev_output_channel: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        add_upsample=True,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        only_cross_attention=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n        resnets = []\n        temp_convs = []\n        attentions = []\n        temp_attentions = []\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            temp_convs.append(\n                TemporalConvLayer(\n                    out_channels,\n                    out_channels,\n                    dropout=0.1,\n                )\n            )\n            attentions.append(\n                Transformer2DModel(\n                    out_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    in_channels=out_channels,\n                    num_layers=1,\n                    cross_attention_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                    use_linear_projection=use_linear_projection,\n                    only_cross_attention=only_cross_attention,\n                    upcast_attention=upcast_attention,\n                )\n            )\n            temp_attentions.append(\n                TransformerTemporalModel(\n                    out_channels // attn_num_head_channels,\n                    attn_num_head_channels,\n                    in_channels=out_channels,\n                    num_layers=1,\n                    cross_attention_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                )\n            )\n        self.resnets = nn.ModuleList(resnets)\n        self.temp_convs = nn.ModuleList(temp_convs)\n        self.attentions = nn.ModuleList(attentions)\n        self.temp_attentions = nn.ModuleList(temp_attentions)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self,\n        hidden_states,\n        res_hidden_states_tuple,\n        temb=None,\n        encoder_hidden_states=None,\n        upsample_size=None,\n        attention_mask=None,\n        num_frames=1,\n        cross_attention_kwargs=None,\n    ):\n        # TODO(Patrick, William) - attention mask is not used\n        for resnet, temp_conv, attn, temp_attn in zip(\n            self.resnets, self.temp_convs, self.attentions, self.temp_attentions\n        ):\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = temp_conv(hidden_states, num_frames=num_frames)\n            hidden_states = attn(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                cross_attention_kwargs=cross_attention_kwargs,\n            ).sample\n            hidden_states = temp_attn(\n                hidden_states, num_frames=num_frames, cross_attention_kwargs=cross_attention_kwargs\n            ).sample\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, upsample_size)\n\n        return hidden_states\n\n\nclass UpBlock3D(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        prev_output_channel: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_upsample=True,\n    ):\n        super().__init__()\n        resnets = []\n        temp_convs = []\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlock2D(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            temp_convs.append(\n                TemporalConvLayer(\n                    out_channels,\n                    out_channels,\n                    dropout=0.1,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n        self.temp_convs = nn.ModuleList(temp_convs)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([Upsample2D(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None, num_frames=1):\n        for resnet, temp_conv in zip(self.resnets, self.temp_convs):\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            hidden_states = resnet(hidden_states, temb)\n            hidden_states = temp_conv(hidden_states, num_frames=num_frames)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, upsample_size)\n\n        return hidden_states\n"
  },
  {
    "path": "diffusers/models/unet_3d_condition.py",
    "content": "# Copyright 2023 Alibaba DAMO-VILAB and The HuggingFace Team. All rights reserved.\n# Copyright 2023 The ModelScope Team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Any, Dict, List, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.utils.checkpoint\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..loaders import UNet2DConditionLoadersMixin\nfrom ..utils import BaseOutput, logging\nfrom .attention_processor import AttentionProcessor, AttnProcessor\nfrom .embeddings import TimestepEmbedding, Timesteps\nfrom .modeling_utils import ModelMixin\nfrom .transformer_temporal import TransformerTemporalModel\nfrom .unet_3d_blocks import (\n    CrossAttnDownBlock3D,\n    CrossAttnUpBlock3D,\n    DownBlock3D,\n    UNetMidBlock3DCrossAttn,\n    UpBlock3D,\n    get_down_block,\n    get_up_block,\n)\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n@dataclass\nclass UNet3DConditionOutput(BaseOutput):\n    \"\"\"\n    Args:\n        sample (`torch.FloatTensor` of shape `(batch_size, num_frames, num_channels, height, width)`):\n            Hidden states conditioned on `encoder_hidden_states` input. Output of last layer of model.\n    \"\"\"\n\n    sample: torch.FloatTensor\n\n\nclass UNet3DConditionModel(ModelMixin, ConfigMixin, UNet2DConditionLoadersMixin):\n    r\"\"\"\n    UNet3DConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a timestep\n    and returns sample shaped output.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the models (such as downloading or saving, etc.)\n\n    Parameters:\n        sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`):\n            Height and width of input/output sample.\n        in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample.\n        out_channels (`int`, *optional*, defaults to 4): The number of channels in the output.\n        down_block_types (`Tuple[str]`, *optional*, defaults to `(\"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"CrossAttnDownBlock2D\", \"DownBlock2D\")`):\n            The tuple of downsample blocks to use.\n        up_block_types (`Tuple[str]`, *optional*, defaults to `(\"UpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\", \"CrossAttnUpBlock2D\",)`):\n            The tuple of upsample blocks to use.\n        block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`):\n            The tuple of output channels for each block.\n        layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block.\n        downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution.\n        mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block.\n        act_fn (`str`, *optional*, defaults to `\"silu\"`): The activation function to use.\n        norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization.\n            If `None`, it will skip the normalization and activation layers in post-processing\n        norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization.\n        cross_attention_dim (`int`, *optional*, defaults to 1280): The dimension of the cross attention features.\n        attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads.\n    \"\"\"\n\n    _supports_gradient_checkpointing = False\n\n    @register_to_config\n    def __init__(\n        self,\n        sample_size: Optional[int] = None,\n        in_channels: int = 4,\n        out_channels: int = 4,\n        down_block_types: Tuple[str] = (\n            \"CrossAttnDownBlock3D\",\n            \"CrossAttnDownBlock3D\",\n            \"CrossAttnDownBlock3D\",\n            \"DownBlock3D\",\n        ),\n        up_block_types: Tuple[str] = (\"UpBlock3D\", \"CrossAttnUpBlock3D\", \"CrossAttnUpBlock3D\", \"CrossAttnUpBlock3D\"),\n        block_out_channels: Tuple[int] = (320, 640, 1280, 1280),\n        layers_per_block: int = 2,\n        downsample_padding: int = 1,\n        mid_block_scale_factor: float = 1,\n        act_fn: str = \"silu\",\n        norm_num_groups: Optional[int] = 32,\n        norm_eps: float = 1e-5,\n        cross_attention_dim: int = 1024,\n        attention_head_dim: Union[int, Tuple[int]] = 64,\n    ):\n        super().__init__()\n\n        self.sample_size = sample_size\n\n        # Check inputs\n        if len(down_block_types) != len(up_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`: {down_block_types}. `up_block_types`: {up_block_types}.\"\n            )\n\n        if len(block_out_channels) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`: {block_out_channels}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types):\n            raise ValueError(\n                f\"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`: {attention_head_dim}. `down_block_types`: {down_block_types}.\"\n            )\n\n        # input\n        conv_in_kernel = 3\n        conv_out_kernel = 3\n        conv_in_padding = (conv_in_kernel - 1) // 2\n        self.conv_in = nn.Conv2d(\n            in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding\n        )\n\n        # time\n        time_embed_dim = block_out_channels[0] * 4\n        self.time_proj = Timesteps(block_out_channels[0], True, 0)\n        timestep_input_dim = block_out_channels[0]\n\n        self.time_embedding = TimestepEmbedding(\n            timestep_input_dim,\n            time_embed_dim,\n            act_fn=act_fn,\n        )\n\n        self.transformer_in = TransformerTemporalModel(\n            num_attention_heads=8,\n            attention_head_dim=attention_head_dim,\n            in_channels=block_out_channels[0],\n            num_layers=1,\n        )\n\n        # class embedding\n        self.down_blocks = nn.ModuleList([])\n        self.up_blocks = nn.ModuleList([])\n\n        if isinstance(attention_head_dim, int):\n            attention_head_dim = (attention_head_dim,) * len(down_block_types)\n\n        # down\n        output_channel = block_out_channels[0]\n        for i, down_block_type in enumerate(down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = get_down_block(\n                down_block_type,\n                num_layers=layers_per_block,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                temb_channels=time_embed_dim,\n                add_downsample=not is_final_block,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                cross_attention_dim=cross_attention_dim,\n                attn_num_head_channels=attention_head_dim[i],\n                downsample_padding=downsample_padding,\n                dual_cross_attention=False,\n            )\n            self.down_blocks.append(down_block)\n\n        # mid\n        self.mid_block = UNetMidBlock3DCrossAttn(\n            in_channels=block_out_channels[-1],\n            temb_channels=time_embed_dim,\n            resnet_eps=norm_eps,\n            resnet_act_fn=act_fn,\n            output_scale_factor=mid_block_scale_factor,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attention_head_dim[-1],\n            resnet_groups=norm_num_groups,\n            dual_cross_attention=False,\n        )\n\n        # count how many layers upsample the images\n        self.num_upsamplers = 0\n\n        # up\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        reversed_attention_head_dim = list(reversed(attention_head_dim))\n\n        output_channel = reversed_block_out_channels[0]\n        for i, up_block_type in enumerate(up_block_types):\n            is_final_block = i == len(block_out_channels) - 1\n\n            prev_output_channel = output_channel\n            output_channel = reversed_block_out_channels[i]\n            input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)]\n\n            # add upsample block for all BUT final layer\n            if not is_final_block:\n                add_upsample = True\n                self.num_upsamplers += 1\n            else:\n                add_upsample = False\n\n            up_block = get_up_block(\n                up_block_type,\n                num_layers=layers_per_block + 1,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                prev_output_channel=prev_output_channel,\n                temb_channels=time_embed_dim,\n                add_upsample=add_upsample,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                cross_attention_dim=cross_attention_dim,\n                attn_num_head_channels=reversed_attention_head_dim[i],\n                dual_cross_attention=False,\n            )\n            self.up_blocks.append(up_block)\n            prev_output_channel = output_channel\n\n        # out\n        if norm_num_groups is not None:\n            self.conv_norm_out = nn.GroupNorm(\n                num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps\n            )\n            self.conv_act = nn.SiLU()\n        else:\n            self.conv_norm_out = None\n            self.conv_act = None\n\n        conv_out_padding = (conv_out_kernel - 1) // 2\n        self.conv_out = nn.Conv2d(\n            block_out_channels[0], out_channels, kernel_size=conv_out_kernel, padding=conv_out_padding\n        )\n\n    @property\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.attn_processors\n    def attn_processors(self) -> Dict[str, AttentionProcessor]:\n        r\"\"\"\n        Returns:\n            `dict` of attention processors: A dictionary containing all attention processors used in the model with\n            indexed by its weight name.\n        \"\"\"\n        # set recursively\n        processors = {}\n\n        def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]):\n            if hasattr(module, \"set_processor\"):\n                processors[f\"{name}.processor\"] = module.processor\n\n            for sub_name, child in module.named_children():\n                fn_recursive_add_processors(f\"{name}.{sub_name}\", child, processors)\n\n            return processors\n\n        for name, module in self.named_children():\n            fn_recursive_add_processors(name, module, processors)\n\n        return processors\n\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.set_attention_slice\n    def set_attention_slice(self, slice_size):\n        r\"\"\"\n        Enable sliced attention computation.\n\n        When this option is enabled, the attention module will split the input tensor in slices, to compute attention\n        in several steps. This is useful to save some memory in exchange for a small speed decrease.\n\n        Args:\n            slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `\"auto\"`):\n                When `\"auto\"`, halves the input to the attention heads, so attention will be computed in two steps. If\n                `\"max\"`, maximum amount of memory will be saved by running only one slice at a time. If a number is\n                provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim`\n                must be a multiple of `slice_size`.\n        \"\"\"\n        sliceable_head_dims = []\n\n        def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module):\n            if hasattr(module, \"set_attention_slice\"):\n                sliceable_head_dims.append(module.sliceable_head_dim)\n\n            for child in module.children():\n                fn_recursive_retrieve_sliceable_dims(child)\n\n        # retrieve number of attention layers\n        for module in self.children():\n            fn_recursive_retrieve_sliceable_dims(module)\n\n        num_sliceable_layers = len(sliceable_head_dims)\n\n        if slice_size == \"auto\":\n            # half the attention head size is usually a good trade-off between\n            # speed and memory\n            slice_size = [dim // 2 for dim in sliceable_head_dims]\n        elif slice_size == \"max\":\n            # make smallest slice possible\n            slice_size = num_sliceable_layers * [1]\n\n        slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size\n\n        if len(slice_size) != len(sliceable_head_dims):\n            raise ValueError(\n                f\"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different\"\n                f\" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}.\"\n            )\n\n        for i in range(len(slice_size)):\n            size = slice_size[i]\n            dim = sliceable_head_dims[i]\n            if size is not None and size > dim:\n                raise ValueError(f\"size {size} has to be smaller or equal to {dim}.\")\n\n        # Recursively walk through all the children.\n        # Any children which exposes the set_attention_slice method\n        # gets the message\n        def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]):\n            if hasattr(module, \"set_attention_slice\"):\n                module.set_attention_slice(slice_size.pop())\n\n            for child in module.children():\n                fn_recursive_set_attention_slice(child, slice_size)\n\n        reversed_slice_size = list(reversed(slice_size))\n        for module in self.children():\n            fn_recursive_set_attention_slice(module, reversed_slice_size)\n\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.set_attn_processor\n    def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]):\n        r\"\"\"\n        Parameters:\n            `processor (`dict` of `AttentionProcessor` or `AttentionProcessor`):\n                The instantiated processor class or a dictionary of processor classes that will be set as the processor\n                of **all** `Attention` layers.\n            In case `processor` is a dict, the key needs to define the path to the corresponding cross attention processor. This is strongly recommended when setting trainable attention processors.:\n\n        \"\"\"\n        count = len(self.attn_processors.keys())\n\n        if isinstance(processor, dict) and len(processor) != count:\n            raise ValueError(\n                f\"A dict of processors was passed, but the number of processors {len(processor)} does not match the\"\n                f\" number of attention layers: {count}. Please make sure to pass {count} processor classes.\"\n            )\n\n        def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor):\n            if hasattr(module, \"set_processor\"):\n                if not isinstance(processor, dict):\n                    module.set_processor(processor)\n                else:\n                    module.set_processor(processor.pop(f\"{name}.processor\"))\n\n            for sub_name, child in module.named_children():\n                fn_recursive_attn_processor(f\"{name}.{sub_name}\", child, processor)\n\n        for name, module in self.named_children():\n            fn_recursive_attn_processor(name, module, processor)\n\n    # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.set_default_attn_processor\n    def set_default_attn_processor(self):\n        \"\"\"\n        Disables custom attention processors and sets the default attention implementation.\n        \"\"\"\n        self.set_attn_processor(AttnProcessor())\n\n    def _set_gradient_checkpointing(self, module, value=False):\n        if isinstance(module, (CrossAttnDownBlock3D, DownBlock3D, CrossAttnUpBlock3D, UpBlock3D)):\n            module.gradient_checkpointing = value\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[torch.Tensor, float, int],\n        encoder_hidden_states: torch.Tensor,\n        class_labels: Optional[torch.Tensor] = None,\n        timestep_cond: Optional[torch.Tensor] = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None,\n        mid_block_additional_residual: Optional[torch.Tensor] = None,\n        return_dict: bool = True,\n    ) -> Union[UNet3DConditionOutput, Tuple]:\n        r\"\"\"\n        Args:\n            sample (`torch.FloatTensor`): (batch, num_frames, channel, height, width) noisy inputs tensor\n            timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps\n            encoder_hidden_states (`torch.FloatTensor`): (batch, sequence_length, feature_dim) encoder hidden states\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition.UNet3DConditionOutput`] instead of a plain tuple.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Returns:\n            [`~models.unet_2d_condition.UNet3DConditionOutput`] or `tuple`:\n            [`~models.unet_2d_condition.UNet3DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        # By default samples have to be AT least a multiple of the overall upsampling factor.\n        # The overall upsampling factor is equal to 2 ** (# num of upsampling layears).\n        # However, the upsampling interpolation output size can be forced to fit any upsampling size\n        # on the fly if necessary.\n        default_overall_up_factor = 2**self.num_upsamplers\n\n        # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor`\n        forward_upsample_size = False\n        upsample_size = None\n\n        if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]):\n            logger.info(\"Forward upsample size to force interpolation output size.\")\n            forward_upsample_size = True\n\n        # prepare attention_mask\n        if attention_mask is not None:\n            attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0\n            attention_mask = attention_mask.unsqueeze(1)\n\n        # 1. time\n        timesteps = timestep\n        if not torch.is_tensor(timesteps):\n            # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can\n            # This would be a good case for the `match` statement (Python 3.10+)\n            is_mps = sample.device.type == \"mps\"\n            if isinstance(timestep, float):\n                dtype = torch.float32 if is_mps else torch.float64\n            else:\n                dtype = torch.int32 if is_mps else torch.int64\n            timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device)\n        elif len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(sample.device)\n\n        # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n        num_frames = sample.shape[2]\n        timesteps = timesteps.expand(sample.shape[0])\n\n        t_emb = self.time_proj(timesteps)\n\n        # timesteps does not contain any weights and will always return f32 tensors\n        # but time_embedding might actually be running in fp16. so we need to cast here.\n        # there might be better ways to encapsulate this.\n        t_emb = t_emb.to(dtype=self.dtype)\n\n        emb = self.time_embedding(t_emb, timestep_cond)\n        emb = emb.repeat_interleave(repeats=num_frames, dim=0)\n        encoder_hidden_states = encoder_hidden_states.repeat_interleave(repeats=num_frames, dim=0)\n\n        # 2. pre-process\n        sample = sample.permute(0, 2, 1, 3, 4).reshape((sample.shape[0] * num_frames, -1) + sample.shape[3:])\n        sample = self.conv_in(sample)\n\n        sample = self.transformer_in(\n            sample, num_frames=num_frames, cross_attention_kwargs=cross_attention_kwargs\n        ).sample\n\n        # 3. down\n        down_block_res_samples = (sample,)\n        for downsample_block in self.down_blocks:\n            if hasattr(downsample_block, \"has_cross_attention\") and downsample_block.has_cross_attention:\n                sample, res_samples = downsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    encoder_hidden_states=encoder_hidden_states,\n                    attention_mask=attention_mask,\n                    num_frames=num_frames,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                )\n            else:\n                sample, res_samples = downsample_block(hidden_states=sample, temb=emb, num_frames=num_frames)\n\n            down_block_res_samples += res_samples\n\n        if down_block_additional_residuals is not None:\n            new_down_block_res_samples = ()\n\n            for down_block_res_sample, down_block_additional_residual in zip(\n                down_block_res_samples, down_block_additional_residuals\n            ):\n                down_block_res_sample = down_block_res_sample + down_block_additional_residual\n                new_down_block_res_samples += (down_block_res_sample,)\n\n            down_block_res_samples = new_down_block_res_samples\n\n        # 4. mid\n        if self.mid_block is not None:\n            sample = self.mid_block(\n                sample,\n                emb,\n                encoder_hidden_states=encoder_hidden_states,\n                attention_mask=attention_mask,\n                num_frames=num_frames,\n                cross_attention_kwargs=cross_attention_kwargs,\n            )\n\n        if mid_block_additional_residual is not None:\n            sample = sample + mid_block_additional_residual\n\n        # 5. up\n        for i, upsample_block in enumerate(self.up_blocks):\n            is_final_block = i == len(self.up_blocks) - 1\n\n            res_samples = down_block_res_samples[-len(upsample_block.resnets) :]\n            down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)]\n\n            # if we have not reached the final block and need to forward the\n            # upsample size, we do it here\n            if not is_final_block and forward_upsample_size:\n                upsample_size = down_block_res_samples[-1].shape[2:]\n\n            if hasattr(upsample_block, \"has_cross_attention\") and upsample_block.has_cross_attention:\n                sample = upsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    res_hidden_states_tuple=res_samples,\n                    encoder_hidden_states=encoder_hidden_states,\n                    upsample_size=upsample_size,\n                    attention_mask=attention_mask,\n                    num_frames=num_frames,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                )\n            else:\n                sample = upsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    res_hidden_states_tuple=res_samples,\n                    upsample_size=upsample_size,\n                    num_frames=num_frames,\n                )\n\n        # 6. post-process\n        if self.conv_norm_out:\n            sample = self.conv_norm_out(sample)\n            sample = self.conv_act(sample)\n\n        sample = self.conv_out(sample)\n\n        # reshape to (batch, channel, framerate, width, height)\n        sample = sample[None, :].reshape((-1, num_frames) + sample.shape[1:]).permute(0, 2, 1, 3, 4)\n\n        if not return_dict:\n            return (sample,)\n\n        return UNet3DConditionOutput(sample=sample)\n"
  },
  {
    "path": "diffusers/models/vae.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Optional\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\n\nfrom ..utils import BaseOutput, is_torch_version, randn_tensor\nfrom .unet_2d_blocks import UNetMidBlock2D, get_down_block, get_up_block\n\n\n@dataclass\nclass DecoderOutput(BaseOutput):\n    \"\"\"\n    Output of decoding method.\n\n    Args:\n        sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):\n            Decoded output sample of the model. Output of the last layer of the model.\n    \"\"\"\n\n    sample: torch.FloatTensor\n\n\nclass Encoder(nn.Module):\n    def __init__(\n        self,\n        in_channels=3,\n        out_channels=3,\n        down_block_types=(\"DownEncoderBlock2D\",),\n        block_out_channels=(64,),\n        layers_per_block=2,\n        norm_num_groups=32,\n        act_fn=\"silu\",\n        double_z=True,\n    ):\n        super().__init__()\n        self.layers_per_block = layers_per_block\n\n        self.conv_in = torch.nn.Conv2d(\n            in_channels,\n            block_out_channels[0],\n            kernel_size=3,\n            stride=1,\n            padding=1,\n        )\n\n        self.mid_block = None\n        self.down_blocks = nn.ModuleList([])\n\n        # down\n        output_channel = block_out_channels[0]\n        for i, down_block_type in enumerate(down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = get_down_block(\n                down_block_type,\n                num_layers=self.layers_per_block,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                add_downsample=not is_final_block,\n                resnet_eps=1e-6,\n                downsample_padding=0,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                attn_num_head_channels=None,\n                temb_channels=None,\n            )\n            self.down_blocks.append(down_block)\n\n        # mid\n        self.mid_block = UNetMidBlock2D(\n            in_channels=block_out_channels[-1],\n            resnet_eps=1e-6,\n            resnet_act_fn=act_fn,\n            output_scale_factor=1,\n            resnet_time_scale_shift=\"default\",\n            attn_num_head_channels=None,\n            resnet_groups=norm_num_groups,\n            temb_channels=None,\n        )\n\n        # out\n        self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[-1], num_groups=norm_num_groups, eps=1e-6)\n        self.conv_act = nn.SiLU()\n\n        conv_out_channels = 2 * out_channels if double_z else out_channels\n        self.conv_out = nn.Conv2d(block_out_channels[-1], conv_out_channels, 3, padding=1)\n\n        self.gradient_checkpointing = False\n\n    def forward(self, x):\n        sample = x\n        sample = self.conv_in(sample)\n\n        if self.training and self.gradient_checkpointing:\n\n            def create_custom_forward(module):\n                def custom_forward(*inputs):\n                    return module(*inputs)\n\n                return custom_forward\n\n            # down\n            if is_torch_version(\">=\", \"1.11.0\"):\n                for down_block in self.down_blocks:\n                    sample = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(down_block), sample, use_reentrant=False\n                    )\n                # middle\n                sample = torch.utils.checkpoint.checkpoint(\n                    create_custom_forward(self.mid_block), sample, use_reentrant=False\n                )\n            else:\n                for down_block in self.down_blocks:\n                    sample = torch.utils.checkpoint.checkpoint(create_custom_forward(down_block), sample)\n                # middle\n                sample = torch.utils.checkpoint.checkpoint(create_custom_forward(self.mid_block), sample)\n\n        else:\n            # down\n            for down_block in self.down_blocks:\n                sample = down_block(sample)\n\n            # middle\n            sample = self.mid_block(sample)\n\n        # post-process\n        sample = self.conv_norm_out(sample)\n        sample = self.conv_act(sample)\n        sample = self.conv_out(sample)\n\n        return sample\n\n\nclass Decoder(nn.Module):\n    def __init__(\n        self,\n        in_channels=3,\n        out_channels=3,\n        up_block_types=(\"UpDecoderBlock2D\",),\n        block_out_channels=(64,),\n        layers_per_block=2,\n        norm_num_groups=32,\n        act_fn=\"silu\",\n    ):\n        super().__init__()\n        self.layers_per_block = layers_per_block\n\n        self.conv_in = nn.Conv2d(\n            in_channels,\n            block_out_channels[-1],\n            kernel_size=3,\n            stride=1,\n            padding=1,\n        )\n\n        self.mid_block = None\n        self.up_blocks = nn.ModuleList([])\n\n        # mid\n        self.mid_block = UNetMidBlock2D(\n            in_channels=block_out_channels[-1],\n            resnet_eps=1e-6,\n            resnet_act_fn=act_fn,\n            output_scale_factor=1,\n            resnet_time_scale_shift=\"default\",\n            attn_num_head_channels=None,\n            resnet_groups=norm_num_groups,\n            temb_channels=None,\n        )\n\n        # up\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        output_channel = reversed_block_out_channels[0]\n        for i, up_block_type in enumerate(up_block_types):\n            prev_output_channel = output_channel\n            output_channel = reversed_block_out_channels[i]\n\n            is_final_block = i == len(block_out_channels) - 1\n\n            up_block = get_up_block(\n                up_block_type,\n                num_layers=self.layers_per_block + 1,\n                in_channels=prev_output_channel,\n                out_channels=output_channel,\n                prev_output_channel=None,\n                add_upsample=not is_final_block,\n                resnet_eps=1e-6,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                attn_num_head_channels=None,\n                temb_channels=None,\n            )\n            self.up_blocks.append(up_block)\n            prev_output_channel = output_channel\n\n        # out\n        self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=1e-6)\n        self.conv_act = nn.SiLU()\n        self.conv_out = nn.Conv2d(block_out_channels[0], out_channels, 3, padding=1)\n\n        self.gradient_checkpointing = False\n\n    def forward(self, z):\n        sample = z\n        sample = self.conv_in(sample)\n\n        upscale_dtype = next(iter(self.up_blocks.parameters())).dtype\n        if self.training and self.gradient_checkpointing:\n\n            def create_custom_forward(module):\n                def custom_forward(*inputs):\n                    return module(*inputs)\n\n                return custom_forward\n\n            if is_torch_version(\">=\", \"1.11.0\"):\n                # middle\n                sample = torch.utils.checkpoint.checkpoint(\n                    create_custom_forward(self.mid_block), sample, use_reentrant=False\n                )\n                sample = sample.to(upscale_dtype)\n\n                # up\n                for up_block in self.up_blocks:\n                    sample = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(up_block), sample, use_reentrant=False\n                    )\n            else:\n                # middle\n                sample = torch.utils.checkpoint.checkpoint(create_custom_forward(self.mid_block), sample)\n                sample = sample.to(upscale_dtype)\n\n                # up\n                for up_block in self.up_blocks:\n                    sample = torch.utils.checkpoint.checkpoint(create_custom_forward(up_block), sample)\n        else:\n            # middle\n            sample = self.mid_block(sample)\n            sample = sample.to(upscale_dtype)\n\n            # up\n            for up_block in self.up_blocks:\n                sample = up_block(sample)\n\n        # post-process\n        sample = self.conv_norm_out(sample)\n        sample = self.conv_act(sample)\n        sample = self.conv_out(sample)\n\n        return sample\n\n\nclass VectorQuantizer(nn.Module):\n    \"\"\"\n    Improved version over VectorQuantizer, can be used as a drop-in replacement. Mostly avoids costly matrix\n    multiplications and allows for post-hoc remapping of indices.\n    \"\"\"\n\n    # NOTE: due to a bug the beta term was applied to the wrong term. for\n    # backwards compatibility we use the buggy version by default, but you can\n    # specify legacy=False to fix it.\n    def __init__(\n        self, n_e, vq_embed_dim, beta, remap=None, unknown_index=\"random\", sane_index_shape=False, legacy=True\n    ):\n        super().__init__()\n        self.n_e = n_e\n        self.vq_embed_dim = vq_embed_dim\n        self.beta = beta\n        self.legacy = legacy\n\n        self.embedding = nn.Embedding(self.n_e, self.vq_embed_dim)\n        self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e)\n\n        self.remap = remap\n        if self.remap is not None:\n            self.register_buffer(\"used\", torch.tensor(np.load(self.remap)))\n            self.re_embed = self.used.shape[0]\n            self.unknown_index = unknown_index  # \"random\" or \"extra\" or integer\n            if self.unknown_index == \"extra\":\n                self.unknown_index = self.re_embed\n                self.re_embed = self.re_embed + 1\n            print(\n                f\"Remapping {self.n_e} indices to {self.re_embed} indices. \"\n                f\"Using {self.unknown_index} for unknown indices.\"\n            )\n        else:\n            self.re_embed = n_e\n\n        self.sane_index_shape = sane_index_shape\n\n    def remap_to_used(self, inds):\n        ishape = inds.shape\n        assert len(ishape) > 1\n        inds = inds.reshape(ishape[0], -1)\n        used = self.used.to(inds)\n        match = (inds[:, :, None] == used[None, None, ...]).long()\n        new = match.argmax(-1)\n        unknown = match.sum(2) < 1\n        if self.unknown_index == \"random\":\n            new[unknown] = torch.randint(0, self.re_embed, size=new[unknown].shape).to(device=new.device)\n        else:\n            new[unknown] = self.unknown_index\n        return new.reshape(ishape)\n\n    def unmap_to_all(self, inds):\n        ishape = inds.shape\n        assert len(ishape) > 1\n        inds = inds.reshape(ishape[0], -1)\n        used = self.used.to(inds)\n        if self.re_embed > self.used.shape[0]:  # extra token\n            inds[inds >= self.used.shape[0]] = 0  # simply set to zero\n        back = torch.gather(used[None, :][inds.shape[0] * [0], :], 1, inds)\n        return back.reshape(ishape)\n\n    def forward(self, z):\n        # reshape z -> (batch, height, width, channel) and flatten\n        z = z.permute(0, 2, 3, 1).contiguous()\n        z_flattened = z.view(-1, self.vq_embed_dim)\n\n        # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z\n        min_encoding_indices = torch.argmin(torch.cdist(z_flattened, self.embedding.weight), dim=1)\n\n        z_q = self.embedding(min_encoding_indices).view(z.shape)\n        perplexity = None\n        min_encodings = None\n\n        # compute loss for embedding\n        if not self.legacy:\n            loss = self.beta * torch.mean((z_q.detach() - z) ** 2) + torch.mean((z_q - z.detach()) ** 2)\n        else:\n            loss = torch.mean((z_q.detach() - z) ** 2) + self.beta * torch.mean((z_q - z.detach()) ** 2)\n\n        # preserve gradients\n        z_q = z + (z_q - z).detach()\n\n        # reshape back to match original input shape\n        z_q = z_q.permute(0, 3, 1, 2).contiguous()\n\n        if self.remap is not None:\n            min_encoding_indices = min_encoding_indices.reshape(z.shape[0], -1)  # add batch axis\n            min_encoding_indices = self.remap_to_used(min_encoding_indices)\n            min_encoding_indices = min_encoding_indices.reshape(-1, 1)  # flatten\n\n        if self.sane_index_shape:\n            min_encoding_indices = min_encoding_indices.reshape(z_q.shape[0], z_q.shape[2], z_q.shape[3])\n\n        return z_q, loss, (perplexity, min_encodings, min_encoding_indices)\n\n    def get_codebook_entry(self, indices, shape):\n        # shape specifying (batch, height, width, channel)\n        if self.remap is not None:\n            indices = indices.reshape(shape[0], -1)  # add batch axis\n            indices = self.unmap_to_all(indices)\n            indices = indices.reshape(-1)  # flatten again\n\n        # get quantized latent vectors\n        z_q = self.embedding(indices)\n\n        if shape is not None:\n            z_q = z_q.view(shape)\n            # reshape back to match original input shape\n            z_q = z_q.permute(0, 3, 1, 2).contiguous()\n\n        return z_q\n\n\nclass DiagonalGaussianDistribution(object):\n    def __init__(self, parameters, deterministic=False):\n        self.parameters = parameters\n        self.mean, self.logvar = torch.chunk(parameters, 2, dim=1)\n        self.logvar = torch.clamp(self.logvar, -30.0, 20.0)\n        self.deterministic = deterministic\n        self.std = torch.exp(0.5 * self.logvar)\n        self.var = torch.exp(self.logvar)\n        if self.deterministic:\n            self.var = self.std = torch.zeros_like(\n                self.mean, device=self.parameters.device, dtype=self.parameters.dtype\n            )\n\n    def sample(self, generator: Optional[torch.Generator] = None) -> torch.FloatTensor:\n        # make sure sample is on the same device as the parameters and has same dtype\n        sample = randn_tensor(\n            self.mean.shape, generator=generator, device=self.parameters.device, dtype=self.parameters.dtype\n        )\n        x = self.mean + self.std * sample\n        return x\n\n    def kl(self, other=None):\n        if self.deterministic:\n            return torch.Tensor([0.0])\n        else:\n            if other is None:\n                return 0.5 * torch.sum(torch.pow(self.mean, 2) + self.var - 1.0 - self.logvar, dim=[1, 2, 3])\n            else:\n                return 0.5 * torch.sum(\n                    torch.pow(self.mean - other.mean, 2) / other.var\n                    + self.var / other.var\n                    - 1.0\n                    - self.logvar\n                    + other.logvar,\n                    dim=[1, 2, 3],\n                )\n\n    def nll(self, sample, dims=[1, 2, 3]):\n        if self.deterministic:\n            return torch.Tensor([0.0])\n        logtwopi = np.log(2.0 * np.pi)\n        return 0.5 * torch.sum(logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var, dim=dims)\n\n    def mode(self):\n        return self.mean\n"
  },
  {
    "path": "diffusers/models/vae_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# JAX implementation of VQGAN from taming-transformers https://github.com/CompVis/taming-transformers\n\nimport math\nfrom functools import partial\nfrom typing import Tuple\n\nimport flax\nimport flax.linen as nn\nimport jax\nimport jax.numpy as jnp\nfrom flax.core.frozen_dict import FrozenDict\n\nfrom ..configuration_utils import ConfigMixin, flax_register_to_config\nfrom ..utils import BaseOutput\nfrom .modeling_flax_utils import FlaxModelMixin\n\n\n@flax.struct.dataclass\nclass FlaxDecoderOutput(BaseOutput):\n    \"\"\"\n    Output of decoding method.\n\n    Args:\n        sample (`jnp.ndarray` of shape `(batch_size, num_channels, height, width)`):\n            Decoded output sample of the model. Output of the last layer of the model.\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n\n    sample: jnp.ndarray\n\n\n@flax.struct.dataclass\nclass FlaxAutoencoderKLOutput(BaseOutput):\n    \"\"\"\n    Output of AutoencoderKL encoding method.\n\n    Args:\n        latent_dist (`FlaxDiagonalGaussianDistribution`):\n            Encoded outputs of `Encoder` represented as the mean and logvar of `FlaxDiagonalGaussianDistribution`.\n            `FlaxDiagonalGaussianDistribution` allows for sampling latents from the distribution.\n    \"\"\"\n\n    latent_dist: \"FlaxDiagonalGaussianDistribution\"\n\n\nclass FlaxUpsample2D(nn.Module):\n    \"\"\"\n    Flax implementation of 2D Upsample layer\n\n    Args:\n        in_channels (`int`):\n            Input channels\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n\n    in_channels: int\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.conv = nn.Conv(\n            self.in_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n    def __call__(self, hidden_states):\n        batch, height, width, channels = hidden_states.shape\n        hidden_states = jax.image.resize(\n            hidden_states,\n            shape=(batch, height * 2, width * 2, channels),\n            method=\"nearest\",\n        )\n        hidden_states = self.conv(hidden_states)\n        return hidden_states\n\n\nclass FlaxDownsample2D(nn.Module):\n    \"\"\"\n    Flax implementation of 2D Downsample layer\n\n    Args:\n        in_channels (`int`):\n            Input channels\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n\n    in_channels: int\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.conv = nn.Conv(\n            self.in_channels,\n            kernel_size=(3, 3),\n            strides=(2, 2),\n            padding=\"VALID\",\n            dtype=self.dtype,\n        )\n\n    def __call__(self, hidden_states):\n        pad = ((0, 0), (0, 1), (0, 1), (0, 0))  # pad height and width dim\n        hidden_states = jnp.pad(hidden_states, pad_width=pad)\n        hidden_states = self.conv(hidden_states)\n        return hidden_states\n\n\nclass FlaxResnetBlock2D(nn.Module):\n    \"\"\"\n    Flax implementation of 2D Resnet Block.\n\n    Args:\n        in_channels (`int`):\n            Input channels\n        out_channels (`int`):\n            Output channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        groups (:obj:`int`, *optional*, defaults to `32`):\n            The number of groups to use for group norm.\n        use_nin_shortcut (:obj:`bool`, *optional*, defaults to `None`):\n            Whether to use `nin_shortcut`. This activates a new layer inside ResNet block\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n\n    in_channels: int\n    out_channels: int = None\n    dropout: float = 0.0\n    groups: int = 32\n    use_nin_shortcut: bool = None\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        out_channels = self.in_channels if self.out_channels is None else self.out_channels\n\n        self.norm1 = nn.GroupNorm(num_groups=self.groups, epsilon=1e-6)\n        self.conv1 = nn.Conv(\n            out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        self.norm2 = nn.GroupNorm(num_groups=self.groups, epsilon=1e-6)\n        self.dropout_layer = nn.Dropout(self.dropout)\n        self.conv2 = nn.Conv(\n            out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        use_nin_shortcut = self.in_channels != out_channels if self.use_nin_shortcut is None else self.use_nin_shortcut\n\n        self.conv_shortcut = None\n        if use_nin_shortcut:\n            self.conv_shortcut = nn.Conv(\n                out_channels,\n                kernel_size=(1, 1),\n                strides=(1, 1),\n                padding=\"VALID\",\n                dtype=self.dtype,\n            )\n\n    def __call__(self, hidden_states, deterministic=True):\n        residual = hidden_states\n        hidden_states = self.norm1(hidden_states)\n        hidden_states = nn.swish(hidden_states)\n        hidden_states = self.conv1(hidden_states)\n\n        hidden_states = self.norm2(hidden_states)\n        hidden_states = nn.swish(hidden_states)\n        hidden_states = self.dropout_layer(hidden_states, deterministic)\n        hidden_states = self.conv2(hidden_states)\n\n        if self.conv_shortcut is not None:\n            residual = self.conv_shortcut(residual)\n\n        return hidden_states + residual\n\n\nclass FlaxAttentionBlock(nn.Module):\n    r\"\"\"\n    Flax Convolutional based multi-head attention block for diffusion-based VAE.\n\n    Parameters:\n        channels (:obj:`int`):\n            Input channels\n        num_head_channels (:obj:`int`, *optional*, defaults to `None`):\n            Number of attention heads\n        num_groups (:obj:`int`, *optional*, defaults to `32`):\n            The number of groups to use for group norm\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n\n    \"\"\"\n    channels: int\n    num_head_channels: int = None\n    num_groups: int = 32\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.num_heads = self.channels // self.num_head_channels if self.num_head_channels is not None else 1\n\n        dense = partial(nn.Dense, self.channels, dtype=self.dtype)\n\n        self.group_norm = nn.GroupNorm(num_groups=self.num_groups, epsilon=1e-6)\n        self.query, self.key, self.value = dense(), dense(), dense()\n        self.proj_attn = dense()\n\n    def transpose_for_scores(self, projection):\n        new_projection_shape = projection.shape[:-1] + (self.num_heads, -1)\n        # move heads to 2nd position (B, T, H * D) -> (B, T, H, D)\n        new_projection = projection.reshape(new_projection_shape)\n        # (B, T, H, D) -> (B, H, T, D)\n        new_projection = jnp.transpose(new_projection, (0, 2, 1, 3))\n        return new_projection\n\n    def __call__(self, hidden_states):\n        residual = hidden_states\n        batch, height, width, channels = hidden_states.shape\n\n        hidden_states = self.group_norm(hidden_states)\n\n        hidden_states = hidden_states.reshape((batch, height * width, channels))\n\n        query = self.query(hidden_states)\n        key = self.key(hidden_states)\n        value = self.value(hidden_states)\n\n        # transpose\n        query = self.transpose_for_scores(query)\n        key = self.transpose_for_scores(key)\n        value = self.transpose_for_scores(value)\n\n        # compute attentions\n        scale = 1 / math.sqrt(math.sqrt(self.channels / self.num_heads))\n        attn_weights = jnp.einsum(\"...qc,...kc->...qk\", query * scale, key * scale)\n        attn_weights = nn.softmax(attn_weights, axis=-1)\n\n        # attend to values\n        hidden_states = jnp.einsum(\"...kc,...qk->...qc\", value, attn_weights)\n\n        hidden_states = jnp.transpose(hidden_states, (0, 2, 1, 3))\n        new_hidden_states_shape = hidden_states.shape[:-2] + (self.channels,)\n        hidden_states = hidden_states.reshape(new_hidden_states_shape)\n\n        hidden_states = self.proj_attn(hidden_states)\n        hidden_states = hidden_states.reshape((batch, height, width, channels))\n        hidden_states = hidden_states + residual\n        return hidden_states\n\n\nclass FlaxDownEncoderBlock2D(nn.Module):\n    r\"\"\"\n    Flax Resnet blocks-based Encoder block for diffusion-based VAE.\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        out_channels (:obj:`int`):\n            Output channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of Resnet layer block\n        resnet_groups (:obj:`int`, *optional*, defaults to `32`):\n            The number of groups to use for the Resnet block group norm\n        add_downsample (:obj:`bool`, *optional*, defaults to `True`):\n            Whether to add downsample layer\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    out_channels: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    resnet_groups: int = 32\n    add_downsample: bool = True\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        resnets = []\n        for i in range(self.num_layers):\n            in_channels = self.in_channels if i == 0 else self.out_channels\n\n            res_block = FlaxResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=self.out_channels,\n                dropout=self.dropout,\n                groups=self.resnet_groups,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n        self.resnets = resnets\n\n        if self.add_downsample:\n            self.downsamplers_0 = FlaxDownsample2D(self.out_channels, dtype=self.dtype)\n\n    def __call__(self, hidden_states, deterministic=True):\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states, deterministic=deterministic)\n\n        if self.add_downsample:\n            hidden_states = self.downsamplers_0(hidden_states)\n\n        return hidden_states\n\n\nclass FlaxUpDecoderBlock2D(nn.Module):\n    r\"\"\"\n    Flax Resnet blocks-based Decoder block for diffusion-based VAE.\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        out_channels (:obj:`int`):\n            Output channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of Resnet layer block\n        resnet_groups (:obj:`int`, *optional*, defaults to `32`):\n            The number of groups to use for the Resnet block group norm\n        add_upsample (:obj:`bool`, *optional*, defaults to `True`):\n            Whether to add upsample layer\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    out_channels: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    resnet_groups: int = 32\n    add_upsample: bool = True\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        resnets = []\n        for i in range(self.num_layers):\n            in_channels = self.in_channels if i == 0 else self.out_channels\n            res_block = FlaxResnetBlock2D(\n                in_channels=in_channels,\n                out_channels=self.out_channels,\n                dropout=self.dropout,\n                groups=self.resnet_groups,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n\n        self.resnets = resnets\n\n        if self.add_upsample:\n            self.upsamplers_0 = FlaxUpsample2D(self.out_channels, dtype=self.dtype)\n\n    def __call__(self, hidden_states, deterministic=True):\n        for resnet in self.resnets:\n            hidden_states = resnet(hidden_states, deterministic=deterministic)\n\n        if self.add_upsample:\n            hidden_states = self.upsamplers_0(hidden_states)\n\n        return hidden_states\n\n\nclass FlaxUNetMidBlock2D(nn.Module):\n    r\"\"\"\n    Flax Unet Mid-Block module.\n\n    Parameters:\n        in_channels (:obj:`int`):\n            Input channels\n        dropout (:obj:`float`, *optional*, defaults to 0.0):\n            Dropout rate\n        num_layers (:obj:`int`, *optional*, defaults to 1):\n            Number of Resnet layer block\n        resnet_groups (:obj:`int`, *optional*, defaults to `32`):\n            The number of groups to use for the Resnet and Attention block group norm\n        attn_num_head_channels (:obj:`int`, *optional*, defaults to `1`):\n            Number of attention heads for each attention block\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int\n    dropout: float = 0.0\n    num_layers: int = 1\n    resnet_groups: int = 32\n    attn_num_head_channels: int = 1\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        resnet_groups = self.resnet_groups if self.resnet_groups is not None else min(self.in_channels // 4, 32)\n\n        # there is always at least one resnet\n        resnets = [\n            FlaxResnetBlock2D(\n                in_channels=self.in_channels,\n                out_channels=self.in_channels,\n                dropout=self.dropout,\n                groups=resnet_groups,\n                dtype=self.dtype,\n            )\n        ]\n\n        attentions = []\n\n        for _ in range(self.num_layers):\n            attn_block = FlaxAttentionBlock(\n                channels=self.in_channels,\n                num_head_channels=self.attn_num_head_channels,\n                num_groups=resnet_groups,\n                dtype=self.dtype,\n            )\n            attentions.append(attn_block)\n\n            res_block = FlaxResnetBlock2D(\n                in_channels=self.in_channels,\n                out_channels=self.in_channels,\n                dropout=self.dropout,\n                groups=resnet_groups,\n                dtype=self.dtype,\n            )\n            resnets.append(res_block)\n\n        self.resnets = resnets\n        self.attentions = attentions\n\n    def __call__(self, hidden_states, deterministic=True):\n        hidden_states = self.resnets[0](hidden_states, deterministic=deterministic)\n        for attn, resnet in zip(self.attentions, self.resnets[1:]):\n            hidden_states = attn(hidden_states)\n            hidden_states = resnet(hidden_states, deterministic=deterministic)\n\n        return hidden_states\n\n\nclass FlaxEncoder(nn.Module):\n    r\"\"\"\n    Flax Implementation of VAE Encoder.\n\n    This model is a Flax Linen [flax.linen.Module](https://flax.readthedocs.io/en/latest/flax.linen.html#module)\n    subclass. Use it as a regular Flax linen Module and refer to the Flax documentation for all matter related to\n    general usage and behavior.\n\n    Finally, this model supports inherent JAX features such as:\n    - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit)\n    - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation)\n    - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap)\n    - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap)\n\n    Parameters:\n        in_channels (:obj:`int`, *optional*, defaults to 3):\n            Input channels\n        out_channels (:obj:`int`, *optional*, defaults to 3):\n            Output channels\n        down_block_types (:obj:`Tuple[str]`, *optional*, defaults to `(DownEncoderBlock2D)`):\n            DownEncoder block type\n        block_out_channels (:obj:`Tuple[str]`, *optional*, defaults to `(64,)`):\n            Tuple containing the number of output channels for each block\n        layers_per_block (:obj:`int`, *optional*, defaults to `2`):\n            Number of Resnet layer for each block\n        norm_num_groups (:obj:`int`, *optional*, defaults to `32`):\n            norm num group\n        act_fn (:obj:`str`, *optional*, defaults to `silu`):\n            Activation function\n        double_z (:obj:`bool`, *optional*, defaults to `False`):\n            Whether to double the last output channels\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            Parameters `dtype`\n    \"\"\"\n    in_channels: int = 3\n    out_channels: int = 3\n    down_block_types: Tuple[str] = (\"DownEncoderBlock2D\",)\n    block_out_channels: Tuple[int] = (64,)\n    layers_per_block: int = 2\n    norm_num_groups: int = 32\n    act_fn: str = \"silu\"\n    double_z: bool = False\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        block_out_channels = self.block_out_channels\n        # in\n        self.conv_in = nn.Conv(\n            block_out_channels[0],\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        # downsampling\n        down_blocks = []\n        output_channel = block_out_channels[0]\n        for i, _ in enumerate(self.down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = FlaxDownEncoderBlock2D(\n                in_channels=input_channel,\n                out_channels=output_channel,\n                num_layers=self.layers_per_block,\n                resnet_groups=self.norm_num_groups,\n                add_downsample=not is_final_block,\n                dtype=self.dtype,\n            )\n            down_blocks.append(down_block)\n        self.down_blocks = down_blocks\n\n        # middle\n        self.mid_block = FlaxUNetMidBlock2D(\n            in_channels=block_out_channels[-1],\n            resnet_groups=self.norm_num_groups,\n            attn_num_head_channels=None,\n            dtype=self.dtype,\n        )\n\n        # end\n        conv_out_channels = 2 * self.out_channels if self.double_z else self.out_channels\n        self.conv_norm_out = nn.GroupNorm(num_groups=self.norm_num_groups, epsilon=1e-6)\n        self.conv_out = nn.Conv(\n            conv_out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n    def __call__(self, sample, deterministic: bool = True):\n        # in\n        sample = self.conv_in(sample)\n\n        # downsampling\n        for block in self.down_blocks:\n            sample = block(sample, deterministic=deterministic)\n\n        # middle\n        sample = self.mid_block(sample, deterministic=deterministic)\n\n        # end\n        sample = self.conv_norm_out(sample)\n        sample = nn.swish(sample)\n        sample = self.conv_out(sample)\n\n        return sample\n\n\nclass FlaxDecoder(nn.Module):\n    r\"\"\"\n    Flax Implementation of VAE Decoder.\n\n    This model is a Flax Linen [flax.linen.Module](https://flax.readthedocs.io/en/latest/flax.linen.html#module)\n    subclass. Use it as a regular Flax linen Module and refer to the Flax documentation for all matter related to\n    general usage and behavior.\n\n    Finally, this model supports inherent JAX features such as:\n    - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit)\n    - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation)\n    - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap)\n    - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap)\n\n    Parameters:\n        in_channels (:obj:`int`, *optional*, defaults to 3):\n            Input channels\n        out_channels (:obj:`int`, *optional*, defaults to 3):\n            Output channels\n        up_block_types (:obj:`Tuple[str]`, *optional*, defaults to `(UpDecoderBlock2D)`):\n            UpDecoder block type\n        block_out_channels (:obj:`Tuple[str]`, *optional*, defaults to `(64,)`):\n            Tuple containing the number of output channels for each block\n        layers_per_block (:obj:`int`, *optional*, defaults to `2`):\n            Number of Resnet layer for each block\n        norm_num_groups (:obj:`int`, *optional*, defaults to `32`):\n            norm num group\n        act_fn (:obj:`str`, *optional*, defaults to `silu`):\n            Activation function\n        double_z (:obj:`bool`, *optional*, defaults to `False`):\n            Whether to double the last output channels\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            parameters `dtype`\n    \"\"\"\n    in_channels: int = 3\n    out_channels: int = 3\n    up_block_types: Tuple[str] = (\"UpDecoderBlock2D\",)\n    block_out_channels: int = (64,)\n    layers_per_block: int = 2\n    norm_num_groups: int = 32\n    act_fn: str = \"silu\"\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        block_out_channels = self.block_out_channels\n\n        # z to block_in\n        self.conv_in = nn.Conv(\n            block_out_channels[-1],\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n        # middle\n        self.mid_block = FlaxUNetMidBlock2D(\n            in_channels=block_out_channels[-1],\n            resnet_groups=self.norm_num_groups,\n            attn_num_head_channels=None,\n            dtype=self.dtype,\n        )\n\n        # upsampling\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        output_channel = reversed_block_out_channels[0]\n        up_blocks = []\n        for i, _ in enumerate(self.up_block_types):\n            prev_output_channel = output_channel\n            output_channel = reversed_block_out_channels[i]\n\n            is_final_block = i == len(block_out_channels) - 1\n\n            up_block = FlaxUpDecoderBlock2D(\n                in_channels=prev_output_channel,\n                out_channels=output_channel,\n                num_layers=self.layers_per_block + 1,\n                resnet_groups=self.norm_num_groups,\n                add_upsample=not is_final_block,\n                dtype=self.dtype,\n            )\n            up_blocks.append(up_block)\n            prev_output_channel = output_channel\n\n        self.up_blocks = up_blocks\n\n        # end\n        self.conv_norm_out = nn.GroupNorm(num_groups=self.norm_num_groups, epsilon=1e-6)\n        self.conv_out = nn.Conv(\n            self.out_channels,\n            kernel_size=(3, 3),\n            strides=(1, 1),\n            padding=((1, 1), (1, 1)),\n            dtype=self.dtype,\n        )\n\n    def __call__(self, sample, deterministic: bool = True):\n        # z to block_in\n        sample = self.conv_in(sample)\n\n        # middle\n        sample = self.mid_block(sample, deterministic=deterministic)\n\n        # upsampling\n        for block in self.up_blocks:\n            sample = block(sample, deterministic=deterministic)\n\n        sample = self.conv_norm_out(sample)\n        sample = nn.swish(sample)\n        sample = self.conv_out(sample)\n\n        return sample\n\n\nclass FlaxDiagonalGaussianDistribution(object):\n    def __init__(self, parameters, deterministic=False):\n        # Last axis to account for channels-last\n        self.mean, self.logvar = jnp.split(parameters, 2, axis=-1)\n        self.logvar = jnp.clip(self.logvar, -30.0, 20.0)\n        self.deterministic = deterministic\n        self.std = jnp.exp(0.5 * self.logvar)\n        self.var = jnp.exp(self.logvar)\n        if self.deterministic:\n            self.var = self.std = jnp.zeros_like(self.mean)\n\n    def sample(self, key):\n        return self.mean + self.std * jax.random.normal(key, self.mean.shape)\n\n    def kl(self, other=None):\n        if self.deterministic:\n            return jnp.array([0.0])\n\n        if other is None:\n            return 0.5 * jnp.sum(self.mean**2 + self.var - 1.0 - self.logvar, axis=[1, 2, 3])\n\n        return 0.5 * jnp.sum(\n            jnp.square(self.mean - other.mean) / other.var + self.var / other.var - 1.0 - self.logvar + other.logvar,\n            axis=[1, 2, 3],\n        )\n\n    def nll(self, sample, axis=[1, 2, 3]):\n        if self.deterministic:\n            return jnp.array([0.0])\n\n        logtwopi = jnp.log(2.0 * jnp.pi)\n        return 0.5 * jnp.sum(logtwopi + self.logvar + jnp.square(sample - self.mean) / self.var, axis=axis)\n\n    def mode(self):\n        return self.mean\n\n\n@flax_register_to_config\nclass FlaxAutoencoderKL(nn.Module, FlaxModelMixin, ConfigMixin):\n    r\"\"\"\n    Flax Implementation of Variational Autoencoder (VAE) model with KL loss from the paper Auto-Encoding Variational\n    Bayes by Diederik P. Kingma and Max Welling.\n\n    This model is a Flax Linen [flax.linen.Module](https://flax.readthedocs.io/en/latest/flax.linen.html#module)\n    subclass. Use it as a regular Flax linen Module and refer to the Flax documentation for all matter related to\n    general usage and behavior.\n\n    Finally, this model supports inherent JAX features such as:\n    - [Just-In-Time (JIT) compilation](https://jax.readthedocs.io/en/latest/jax.html#just-in-time-compilation-jit)\n    - [Automatic Differentiation](https://jax.readthedocs.io/en/latest/jax.html#automatic-differentiation)\n    - [Vectorization](https://jax.readthedocs.io/en/latest/jax.html#vectorization-vmap)\n    - [Parallelization](https://jax.readthedocs.io/en/latest/jax.html#parallelization-pmap)\n\n    Parameters:\n        in_channels (:obj:`int`, *optional*, defaults to 3):\n            Input channels\n        out_channels (:obj:`int`, *optional*, defaults to 3):\n            Output channels\n        down_block_types (:obj:`Tuple[str]`, *optional*, defaults to `(DownEncoderBlock2D)`):\n            DownEncoder block type\n        up_block_types (:obj:`Tuple[str]`, *optional*, defaults to `(UpDecoderBlock2D)`):\n            UpDecoder block type\n        block_out_channels (:obj:`Tuple[str]`, *optional*, defaults to `(64,)`):\n            Tuple containing the number of output channels for each block\n        layers_per_block (:obj:`int`, *optional*, defaults to `2`):\n            Number of Resnet layer for each block\n        act_fn (:obj:`str`, *optional*, defaults to `silu`):\n            Activation function\n        latent_channels (:obj:`int`, *optional*, defaults to `4`):\n            Latent space channels\n        norm_num_groups (:obj:`int`, *optional*, defaults to `32`):\n            Norm num group\n        sample_size (:obj:`int`, *optional*, defaults to 32):\n            Sample input size\n        scaling_factor (`float`, *optional*, defaults to 0.18215):\n            The component-wise standard deviation of the trained latent space computed using the first batch of the\n            training set. This is used to scale the latent space to have unit variance when training the diffusion\n            model. The latents are scaled with the formula `z = z * scaling_factor` before being passed to the\n            diffusion model. When decoding, the latents are scaled back to the original scale with the formula: `z = 1\n            / scaling_factor * z`. For more details, refer to sections 4.3.2 and D.1 of the [High-Resolution Image\n            Synthesis with Latent Diffusion Models](https://arxiv.org/abs/2112.10752) paper.\n        dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n            parameters `dtype`\n    \"\"\"\n    in_channels: int = 3\n    out_channels: int = 3\n    down_block_types: Tuple[str] = (\"DownEncoderBlock2D\",)\n    up_block_types: Tuple[str] = (\"UpDecoderBlock2D\",)\n    block_out_channels: Tuple[int] = (64,)\n    layers_per_block: int = 1\n    act_fn: str = \"silu\"\n    latent_channels: int = 4\n    norm_num_groups: int = 32\n    sample_size: int = 32\n    scaling_factor: float = 0.18215\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.encoder = FlaxEncoder(\n            in_channels=self.config.in_channels,\n            out_channels=self.config.latent_channels,\n            down_block_types=self.config.down_block_types,\n            block_out_channels=self.config.block_out_channels,\n            layers_per_block=self.config.layers_per_block,\n            act_fn=self.config.act_fn,\n            norm_num_groups=self.config.norm_num_groups,\n            double_z=True,\n            dtype=self.dtype,\n        )\n        self.decoder = FlaxDecoder(\n            in_channels=self.config.latent_channels,\n            out_channels=self.config.out_channels,\n            up_block_types=self.config.up_block_types,\n            block_out_channels=self.config.block_out_channels,\n            layers_per_block=self.config.layers_per_block,\n            norm_num_groups=self.config.norm_num_groups,\n            act_fn=self.config.act_fn,\n            dtype=self.dtype,\n        )\n        self.quant_conv = nn.Conv(\n            2 * self.config.latent_channels,\n            kernel_size=(1, 1),\n            strides=(1, 1),\n            padding=\"VALID\",\n            dtype=self.dtype,\n        )\n        self.post_quant_conv = nn.Conv(\n            self.config.latent_channels,\n            kernel_size=(1, 1),\n            strides=(1, 1),\n            padding=\"VALID\",\n            dtype=self.dtype,\n        )\n\n    def init_weights(self, rng: jax.random.KeyArray) -> FrozenDict:\n        # init input tensors\n        sample_shape = (1, self.in_channels, self.sample_size, self.sample_size)\n        sample = jnp.zeros(sample_shape, dtype=jnp.float32)\n\n        params_rng, dropout_rng, gaussian_rng = jax.random.split(rng, 3)\n        rngs = {\"params\": params_rng, \"dropout\": dropout_rng, \"gaussian\": gaussian_rng}\n\n        return self.init(rngs, sample)[\"params\"]\n\n    def encode(self, sample, deterministic: bool = True, return_dict: bool = True):\n        sample = jnp.transpose(sample, (0, 2, 3, 1))\n\n        hidden_states = self.encoder(sample, deterministic=deterministic)\n        moments = self.quant_conv(hidden_states)\n        posterior = FlaxDiagonalGaussianDistribution(moments)\n\n        if not return_dict:\n            return (posterior,)\n\n        return FlaxAutoencoderKLOutput(latent_dist=posterior)\n\n    def decode(self, latents, deterministic: bool = True, return_dict: bool = True):\n        if latents.shape[-1] != self.config.latent_channels:\n            latents = jnp.transpose(latents, (0, 2, 3, 1))\n\n        hidden_states = self.post_quant_conv(latents)\n        hidden_states = self.decoder(hidden_states, deterministic=deterministic)\n\n        hidden_states = jnp.transpose(hidden_states, (0, 3, 1, 2))\n\n        if not return_dict:\n            return (hidden_states,)\n\n        return FlaxDecoderOutput(sample=hidden_states)\n\n    def __call__(self, sample, sample_posterior=False, deterministic: bool = True, return_dict: bool = True):\n        posterior = self.encode(sample, deterministic=deterministic, return_dict=return_dict)\n        if sample_posterior:\n            rng = self.make_rng(\"gaussian\")\n            hidden_states = posterior.latent_dist.sample(rng)\n        else:\n            hidden_states = posterior.latent_dist.mode()\n\n        sample = self.decode(hidden_states, return_dict=return_dict).sample\n\n        if not return_dict:\n            return (sample,)\n\n        return FlaxDecoderOutput(sample=sample)\n"
  },
  {
    "path": "diffusers/models/vq_model.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .modeling_utils import ModelMixin\nfrom .vae import Decoder, DecoderOutput, Encoder, VectorQuantizer\n\n\n@dataclass\nclass VQEncoderOutput(BaseOutput):\n    \"\"\"\n    Output of VQModel encoding method.\n\n    Args:\n        latents (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)`):\n            Encoded output sample of the model. Output of the last layer of the model.\n    \"\"\"\n\n    latents: torch.FloatTensor\n\n\nclass VQModel(ModelMixin, ConfigMixin):\n    r\"\"\"VQ-VAE model from the paper Neural Discrete Representation Learning by Aaron van den Oord, Oriol Vinyals and Koray\n    Kavukcuoglu.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the model (such as downloading or saving, etc.)\n\n    Parameters:\n        in_channels (int, *optional*, defaults to 3): Number of channels in the input image.\n        out_channels (int,  *optional*, defaults to 3): Number of channels in the output.\n        down_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"DownEncoderBlock2D\",)`): Tuple of downsample block types.\n        up_block_types (`Tuple[str]`, *optional*, defaults to :\n            obj:`(\"UpDecoderBlock2D\",)`): Tuple of upsample block types.\n        block_out_channels (`Tuple[int]`, *optional*, defaults to :\n            obj:`(64,)`): Tuple of block output channels.\n        act_fn (`str`, *optional*, defaults to `\"silu\"`): The activation function to use.\n        latent_channels (`int`, *optional*, defaults to `3`): Number of channels in the latent space.\n        sample_size (`int`, *optional*, defaults to `32`): TODO\n        num_vq_embeddings (`int`, *optional*, defaults to `256`): Number of codebook vectors in the VQ-VAE.\n        vq_embed_dim (`int`, *optional*): Hidden dim of codebook vectors in the VQ-VAE.\n        scaling_factor (`float`, *optional*, defaults to `0.18215`):\n            The component-wise standard deviation of the trained latent space computed using the first batch of the\n            training set. This is used to scale the latent space to have unit variance when training the diffusion\n            model. The latents are scaled with the formula `z = z * scaling_factor` before being passed to the\n            diffusion model. When decoding, the latents are scaled back to the original scale with the formula: `z = 1\n            / scaling_factor * z`. For more details, refer to sections 4.3.2 and D.1 of the [High-Resolution Image\n            Synthesis with Latent Diffusion Models](https://arxiv.org/abs/2112.10752) paper.\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        in_channels: int = 3,\n        out_channels: int = 3,\n        down_block_types: Tuple[str] = (\"DownEncoderBlock2D\",),\n        up_block_types: Tuple[str] = (\"UpDecoderBlock2D\",),\n        block_out_channels: Tuple[int] = (64,),\n        layers_per_block: int = 1,\n        act_fn: str = \"silu\",\n        latent_channels: int = 3,\n        sample_size: int = 32,\n        num_vq_embeddings: int = 256,\n        norm_num_groups: int = 32,\n        vq_embed_dim: Optional[int] = None,\n        scaling_factor: float = 0.18215,\n    ):\n        super().__init__()\n\n        # pass init params to Encoder\n        self.encoder = Encoder(\n            in_channels=in_channels,\n            out_channels=latent_channels,\n            down_block_types=down_block_types,\n            block_out_channels=block_out_channels,\n            layers_per_block=layers_per_block,\n            act_fn=act_fn,\n            norm_num_groups=norm_num_groups,\n            double_z=False,\n        )\n\n        vq_embed_dim = vq_embed_dim if vq_embed_dim is not None else latent_channels\n\n        self.quant_conv = nn.Conv2d(latent_channels, vq_embed_dim, 1)\n        self.quantize = VectorQuantizer(num_vq_embeddings, vq_embed_dim, beta=0.25, remap=None, sane_index_shape=False)\n        self.post_quant_conv = nn.Conv2d(vq_embed_dim, latent_channels, 1)\n\n        # pass init params to Decoder\n        self.decoder = Decoder(\n            in_channels=latent_channels,\n            out_channels=out_channels,\n            up_block_types=up_block_types,\n            block_out_channels=block_out_channels,\n            layers_per_block=layers_per_block,\n            act_fn=act_fn,\n            norm_num_groups=norm_num_groups,\n        )\n\n    def encode(self, x: torch.FloatTensor, return_dict: bool = True) -> VQEncoderOutput:\n        h = self.encoder(x)\n        h = self.quant_conv(h)\n\n        if not return_dict:\n            return (h,)\n\n        return VQEncoderOutput(latents=h)\n\n    def decode(\n        self, h: torch.FloatTensor, force_not_quantize: bool = False, return_dict: bool = True\n    ) -> Union[DecoderOutput, torch.FloatTensor]:\n        # also go through quantization layer\n        if not force_not_quantize:\n            quant, emb_loss, info = self.quantize(h)\n        else:\n            quant = h\n        quant = self.post_quant_conv(quant)\n        dec = self.decoder(quant)\n\n        if not return_dict:\n            return (dec,)\n\n        return DecoderOutput(sample=dec)\n\n    def forward(self, sample: torch.FloatTensor, return_dict: bool = True) -> Union[DecoderOutput, torch.FloatTensor]:\n        r\"\"\"\n        Args:\n            sample (`torch.FloatTensor`): Input sample.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`DecoderOutput`] instead of a plain tuple.\n        \"\"\"\n        x = sample\n        h = self.encode(x).latents\n        dec = self.decode(h).sample\n\n        if not return_dict:\n            return (dec,)\n\n        return DecoderOutput(sample=dec)\n"
  },
  {
    "path": "diffusers/optimization.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"PyTorch optimization for diffusion models.\"\"\"\n\nimport math\nfrom enum import Enum\nfrom typing import Optional, Union\n\nfrom torch.optim import Optimizer\nfrom torch.optim.lr_scheduler import LambdaLR\n\nfrom .utils import logging\n\n\nlogger = logging.get_logger(__name__)\n\n\nclass SchedulerType(Enum):\n    LINEAR = \"linear\"\n    COSINE = \"cosine\"\n    COSINE_WITH_RESTARTS = \"cosine_with_restarts\"\n    POLYNOMIAL = \"polynomial\"\n    CONSTANT = \"constant\"\n    CONSTANT_WITH_WARMUP = \"constant_with_warmup\"\n    PIECEWISE_CONSTANT = \"piecewise_constant\"\n\n\ndef get_constant_schedule(optimizer: Optimizer, last_epoch: int = -1):\n    \"\"\"\n    Create a schedule with a constant learning rate, using the learning rate set in optimizer.\n\n    Args:\n        optimizer ([`~torch.optim.Optimizer`]):\n            The optimizer for which to schedule the learning rate.\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n\n    Return:\n        `torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.\n    \"\"\"\n    return LambdaLR(optimizer, lambda _: 1, last_epoch=last_epoch)\n\n\ndef get_constant_schedule_with_warmup(optimizer: Optimizer, num_warmup_steps: int, last_epoch: int = -1):\n    \"\"\"\n    Create a schedule with a constant learning rate preceded by a warmup period during which the learning rate\n    increases linearly between 0 and the initial lr set in the optimizer.\n\n    Args:\n        optimizer ([`~torch.optim.Optimizer`]):\n            The optimizer for which to schedule the learning rate.\n        num_warmup_steps (`int`):\n            The number of steps for the warmup phase.\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n\n    Return:\n        `torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.\n    \"\"\"\n\n    def lr_lambda(current_step: int):\n        if current_step < num_warmup_steps:\n            return float(current_step) / float(max(1.0, num_warmup_steps))\n        return 1.0\n\n    return LambdaLR(optimizer, lr_lambda, last_epoch=last_epoch)\n\n\ndef get_piecewise_constant_schedule(optimizer: Optimizer, step_rules: str, last_epoch: int = -1):\n    \"\"\"\n    Create a schedule with a constant learning rate, using the learning rate set in optimizer.\n\n    Args:\n        optimizer ([`~torch.optim.Optimizer`]):\n            The optimizer for which to schedule the learning rate.\n        step_rules (`string`):\n            The rules for the learning rate. ex: rule_steps=\"1:10,0.1:20,0.01:30,0.005\" it means that the learning rate\n            if multiple 1 for the first 10 steps, mutiple 0.1 for the next 20 steps, multiple 0.01 for the next 30\n            steps and multiple 0.005 for the other steps.\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n\n    Return:\n        `torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.\n    \"\"\"\n\n    rules_dict = {}\n    rule_list = step_rules.split(\",\")\n    for rule_str in rule_list[:-1]:\n        value_str, steps_str = rule_str.split(\":\")\n        steps = int(steps_str)\n        value = float(value_str)\n        rules_dict[steps] = value\n    last_lr_multiple = float(rule_list[-1])\n\n    def create_rules_function(rules_dict, last_lr_multiple):\n        def rule_func(steps: int) -> float:\n            sorted_steps = sorted(rules_dict.keys())\n            for i, sorted_step in enumerate(sorted_steps):\n                if steps < sorted_step:\n                    return rules_dict[sorted_steps[i]]\n            return last_lr_multiple\n\n        return rule_func\n\n    rules_func = create_rules_function(rules_dict, last_lr_multiple)\n\n    return LambdaLR(optimizer, rules_func, last_epoch=last_epoch)\n\n\ndef get_linear_schedule_with_warmup(optimizer, num_warmup_steps, num_training_steps, last_epoch=-1):\n    \"\"\"\n    Create a schedule with a learning rate that decreases linearly from the initial lr set in the optimizer to 0, after\n    a warmup period during which it increases linearly from 0 to the initial lr set in the optimizer.\n\n    Args:\n        optimizer ([`~torch.optim.Optimizer`]):\n            The optimizer for which to schedule the learning rate.\n        num_warmup_steps (`int`):\n            The number of steps for the warmup phase.\n        num_training_steps (`int`):\n            The total number of training steps.\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n\n    Return:\n        `torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.\n    \"\"\"\n\n    def lr_lambda(current_step: int):\n        if current_step < num_warmup_steps:\n            return float(current_step) / float(max(1, num_warmup_steps))\n        return max(\n            0.0, float(num_training_steps - current_step) / float(max(1, num_training_steps - num_warmup_steps))\n        )\n\n    return LambdaLR(optimizer, lr_lambda, last_epoch)\n\n\ndef get_cosine_schedule_with_warmup(\n    optimizer: Optimizer, num_warmup_steps: int, num_training_steps: int, num_cycles: float = 0.5, last_epoch: int = -1\n):\n    \"\"\"\n    Create a schedule with a learning rate that decreases following the values of the cosine function between the\n    initial lr set in the optimizer to 0, after a warmup period during which it increases linearly between 0 and the\n    initial lr set in the optimizer.\n\n    Args:\n        optimizer ([`~torch.optim.Optimizer`]):\n            The optimizer for which to schedule the learning rate.\n        num_warmup_steps (`int`):\n            The number of steps for the warmup phase.\n        num_training_steps (`int`):\n            The total number of training steps.\n        num_periods (`float`, *optional*, defaults to 0.5):\n            The number of periods of the cosine function in a schedule (the default is to just decrease from the max\n            value to 0 following a half-cosine).\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n\n    Return:\n        `torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.\n    \"\"\"\n\n    def lr_lambda(current_step):\n        if current_step < num_warmup_steps:\n            return float(current_step) / float(max(1, num_warmup_steps))\n        progress = float(current_step - num_warmup_steps) / float(max(1, num_training_steps - num_warmup_steps))\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(num_cycles) * 2.0 * progress)))\n\n    return LambdaLR(optimizer, lr_lambda, last_epoch)\n\n\ndef get_cosine_with_hard_restarts_schedule_with_warmup(\n    optimizer: Optimizer, num_warmup_steps: int, num_training_steps: int, num_cycles: int = 1, last_epoch: int = -1\n):\n    \"\"\"\n    Create a schedule with a learning rate that decreases following the values of the cosine function between the\n    initial lr set in the optimizer to 0, with several hard restarts, after a warmup period during which it increases\n    linearly between 0 and the initial lr set in the optimizer.\n\n    Args:\n        optimizer ([`~torch.optim.Optimizer`]):\n            The optimizer for which to schedule the learning rate.\n        num_warmup_steps (`int`):\n            The number of steps for the warmup phase.\n        num_training_steps (`int`):\n            The total number of training steps.\n        num_cycles (`int`, *optional*, defaults to 1):\n            The number of hard restarts to use.\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n\n    Return:\n        `torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.\n    \"\"\"\n\n    def lr_lambda(current_step):\n        if current_step < num_warmup_steps:\n            return float(current_step) / float(max(1, num_warmup_steps))\n        progress = float(current_step - num_warmup_steps) / float(max(1, num_training_steps - num_warmup_steps))\n        if progress >= 1.0:\n            return 0.0\n        return max(0.0, 0.5 * (1.0 + math.cos(math.pi * ((float(num_cycles) * progress) % 1.0))))\n\n    return LambdaLR(optimizer, lr_lambda, last_epoch)\n\n\ndef get_polynomial_decay_schedule_with_warmup(\n    optimizer, num_warmup_steps, num_training_steps, lr_end=1e-7, power=1.0, last_epoch=-1\n):\n    \"\"\"\n    Create a schedule with a learning rate that decreases as a polynomial decay from the initial lr set in the\n    optimizer to end lr defined by *lr_end*, after a warmup period during which it increases linearly from 0 to the\n    initial lr set in the optimizer.\n\n    Args:\n        optimizer ([`~torch.optim.Optimizer`]):\n            The optimizer for which to schedule the learning rate.\n        num_warmup_steps (`int`):\n            The number of steps for the warmup phase.\n        num_training_steps (`int`):\n            The total number of training steps.\n        lr_end (`float`, *optional*, defaults to 1e-7):\n            The end LR.\n        power (`float`, *optional*, defaults to 1.0):\n            Power factor.\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n\n    Note: *power* defaults to 1.0 as in the fairseq implementation, which in turn is based on the original BERT\n    implementation at\n    https://github.com/google-research/bert/blob/f39e881b169b9d53bea03d2d341b31707a6c052b/optimization.py#L37\n\n    Return:\n        `torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.\n\n    \"\"\"\n\n    lr_init = optimizer.defaults[\"lr\"]\n    if not (lr_init > lr_end):\n        raise ValueError(f\"lr_end ({lr_end}) must be be smaller than initial lr ({lr_init})\")\n\n    def lr_lambda(current_step: int):\n        if current_step < num_warmup_steps:\n            return float(current_step) / float(max(1, num_warmup_steps))\n        elif current_step > num_training_steps:\n            return lr_end / lr_init  # as LambdaLR multiplies by lr_init\n        else:\n            lr_range = lr_init - lr_end\n            decay_steps = num_training_steps - num_warmup_steps\n            pct_remaining = 1 - (current_step - num_warmup_steps) / decay_steps\n            decay = lr_range * pct_remaining**power + lr_end\n            return decay / lr_init  # as LambdaLR multiplies by lr_init\n\n    return LambdaLR(optimizer, lr_lambda, last_epoch)\n\n\nTYPE_TO_SCHEDULER_FUNCTION = {\n    SchedulerType.LINEAR: get_linear_schedule_with_warmup,\n    SchedulerType.COSINE: get_cosine_schedule_with_warmup,\n    SchedulerType.COSINE_WITH_RESTARTS: get_cosine_with_hard_restarts_schedule_with_warmup,\n    SchedulerType.POLYNOMIAL: get_polynomial_decay_schedule_with_warmup,\n    SchedulerType.CONSTANT: get_constant_schedule,\n    SchedulerType.CONSTANT_WITH_WARMUP: get_constant_schedule_with_warmup,\n    SchedulerType.PIECEWISE_CONSTANT: get_piecewise_constant_schedule,\n}\n\n\ndef get_scheduler(\n    name: Union[str, SchedulerType],\n    optimizer: Optimizer,\n    step_rules: Optional[str] = None,\n    num_warmup_steps: Optional[int] = None,\n    num_training_steps: Optional[int] = None,\n    num_cycles: int = 1,\n    power: float = 1.0,\n    last_epoch: int = -1,\n):\n    \"\"\"\n    Unified API to get any scheduler from its name.\n\n    Args:\n        name (`str` or `SchedulerType`):\n            The name of the scheduler to use.\n        optimizer (`torch.optim.Optimizer`):\n            The optimizer that will be used during training.\n        step_rules (`str`, *optional*):\n            A string representing the step rules to use. This is only used by the `PIECEWISE_CONSTANT` scheduler.\n        num_warmup_steps (`int`, *optional*):\n            The number of warmup steps to do. This is not required by all schedulers (hence the argument being\n            optional), the function will raise an error if it's unset and the scheduler type requires it.\n        num_training_steps (`int``, *optional*):\n            The number of training steps to do. This is not required by all schedulers (hence the argument being\n            optional), the function will raise an error if it's unset and the scheduler type requires it.\n        num_cycles (`int`, *optional*):\n            The number of hard restarts used in `COSINE_WITH_RESTARTS` scheduler.\n        power (`float`, *optional*, defaults to 1.0):\n            Power factor. See `POLYNOMIAL` scheduler\n        last_epoch (`int`, *optional*, defaults to -1):\n            The index of the last epoch when resuming training.\n    \"\"\"\n    name = SchedulerType(name)\n    schedule_func = TYPE_TO_SCHEDULER_FUNCTION[name]\n    if name == SchedulerType.CONSTANT:\n        return schedule_func(optimizer, last_epoch=last_epoch)\n\n    if name == SchedulerType.PIECEWISE_CONSTANT:\n        return schedule_func(optimizer, rules=step_rules, last_epoch=last_epoch)\n\n    # All other schedulers require `num_warmup_steps`\n    if num_warmup_steps is None:\n        raise ValueError(f\"{name} requires `num_warmup_steps`, please provide that argument.\")\n\n    if name == SchedulerType.CONSTANT_WITH_WARMUP:\n        return schedule_func(optimizer, num_warmup_steps=num_warmup_steps, last_epoch=last_epoch)\n\n    # All other schedulers require `num_training_steps`\n    if num_training_steps is None:\n        raise ValueError(f\"{name} requires `num_training_steps`, please provide that argument.\")\n\n    if name == SchedulerType.COSINE_WITH_RESTARTS:\n        return schedule_func(\n            optimizer,\n            num_warmup_steps=num_warmup_steps,\n            num_training_steps=num_training_steps,\n            num_cycles=num_cycles,\n            last_epoch=last_epoch,\n        )\n\n    if name == SchedulerType.POLYNOMIAL:\n        return schedule_func(\n            optimizer,\n            num_warmup_steps=num_warmup_steps,\n            num_training_steps=num_training_steps,\n            power=power,\n            last_epoch=last_epoch,\n        )\n\n    return schedule_func(\n        optimizer, num_warmup_steps=num_warmup_steps, num_training_steps=num_training_steps, last_epoch=last_epoch\n    )\n"
  },
  {
    "path": "diffusers/pipeline_utils.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n\n# limitations under the License.\n\n# NOTE: This file is deprecated and will be removed in a future version.\n# It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works\n\nfrom .pipelines import DiffusionPipeline, ImagePipelineOutput  # noqa: F401\nfrom .utils import deprecate\n\n\ndeprecate(\n    \"pipelines_utils\",\n    \"0.22.0\",\n    \"Importing `DiffusionPipeline` or `ImagePipelineOutput` from diffusers.pipeline_utils is deprecated. Please import from diffusers.pipelines.pipeline_utils instead.\",\n    standard_warn=False,\n    stacklevel=3,\n)\n"
  },
  {
    "path": "diffusers/pipelines/README.md",
    "content": "# 🧨 Diffusers Pipelines\n\nPipelines provide a simple way to run state-of-the-art diffusion models in inference.\nMost diffusion systems consist of multiple independently-trained models and highly adaptable scheduler \ncomponents - all of which are needed to have a functioning end-to-end diffusion system.\n\nAs an example, [Stable Diffusion](https://huggingface.co/blog/stable_diffusion) has three independently trained models:\n- [Autoencoder](https://github.com/huggingface/diffusers/blob/5cbed8e0d157f65d3ddc2420dfd09f2df630e978/src/diffusers/models/vae.py#L392)\n- [Conditional Unet](https://github.com/huggingface/diffusers/blob/5cbed8e0d157f65d3ddc2420dfd09f2df630e978/src/diffusers/models/unet_2d_condition.py#L12)\n- [CLIP text encoder](https://huggingface.co/docs/transformers/main/en/model_doc/clip#transformers.CLIPTextModel)\n- a scheduler component, [scheduler](https://github.com/huggingface/diffusers/blob/main/src/diffusers/schedulers/scheduling_pndm.py), \n- a [CLIPImageProcessor](https://huggingface.co/docs/transformers/main/en/model_doc/clip#transformers.CLIPImageProcessor),\n- as well as a [safety checker](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion/safety_checker.py).\nAll of these components are necessary to run stable diffusion in inference even though they were trained \nor created independently from each other.\n\nTo that end, we strive to offer all open-sourced, state-of-the-art diffusion system under a unified API. \nMore specifically, we strive to provide pipelines that\n- 1. can load the officially published weights and yield 1-to-1 the same outputs as the original implementation according to the corresponding paper (*e.g.* [LDMTextToImagePipeline](https://github.com/huggingface/diffusers/tree/main/src/diffusers/pipelines/latent_diffusion), uses the officially released weights of [High-Resolution Image Synthesis with Latent Diffusion Models](https://arxiv.org/abs/2112.10752)),\n- 2. have a simple user interface to run the model in inference (see the [Pipelines API](#pipelines-api) section), \n- 3. are easy to understand with code that is self-explanatory and can be read along-side the official paper (see [Pipelines summary](#pipelines-summary)),\n- 4. can easily be contributed by the community (see the [Contribution](#contribution) section).\n\n**Note** that pipelines do not (and should not) offer any training functionality. \nIf you are looking for *official* training examples, please have a look at [examples](https://github.com/huggingface/diffusers/tree/main/examples).\n\n\n## Pipelines Summary\n\nThe following table summarizes all officially supported pipelines, their corresponding paper, and if \navailable a colab notebook to directly try them out.\n\n| Pipeline                                                                                                                      | Source                                                                                                                       | Tasks | Colab\n|-------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------|:---:|:---:|\n| [dance diffusion](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/dance_diffusion)                 | [**Dance Diffusion**](https://github.com/Harmonai-org/sample-generator)                                                      | *Unconditional Audio Generation* |\n| [ddpm](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/ddpm)                                       | [**Denoising Diffusion Probabilistic Models**](https://arxiv.org/abs/2006.11239)                                             | *Unconditional Image Generation* |\n| [ddim](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/ddim)                                       | [**Denoising Diffusion Implicit Models**](https://arxiv.org/abs/2010.02502)                                                  | *Unconditional Image Generation* | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/training_example.ipynb)\n| [latent_diffusion](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/latent_diffusion)               | [**High-Resolution Image Synthesis with Latent Diffusion Models**](https://arxiv.org/abs/2112.10752)                         | *Text-to-Image Generation* | \n| [latent_diffusion_uncond](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/latent_diffusion_uncond) | [**High-Resolution Image Synthesis with Latent Diffusion Models**](https://arxiv.org/abs/2112.10752)                         | *Unconditional Image Generation* | \n| [pndm](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/pndm)                                       | [**Pseudo Numerical Methods for Diffusion Models on Manifolds**](https://arxiv.org/abs/2202.09778)                           | *Unconditional Image Generation* | \n| [score_sde_ve](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/score_sde_ve)                       | [**Score-Based Generative Modeling through Stochastic Differential Equations**](https://openreview.net/forum?id=PxTIG12RRHS) | *Unconditional Image Generation* | \n| [score_sde_vp](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/score_sde_vp)                       | [**Score-Based Generative Modeling through Stochastic Differential Equations**](https://openreview.net/forum?id=PxTIG12RRHS) | *Unconditional Image Generation* | \n| [stable_diffusion](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion)               | [**Stable Diffusion**](https://stability.ai/blog/stable-diffusion-public-release)                                            | *Text-to-Image Generation* | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/stable_diffusion.ipynb)\n| [stable_diffusion](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion)               | [**Stable Diffusion**](https://stability.ai/blog/stable-diffusion-public-release)                                            | *Image-to-Image Text-Guided Generation* | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/image_2_image_using_diffusers.ipynb)\n| [stable_diffusion](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion)               | [**Stable Diffusion**](https://stability.ai/blog/stable-diffusion-public-release)                                            | *Text-Guided Image Inpainting* | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/in_painting_with_stable_diffusion_using_diffusers.ipynb)\n| [stochastic_karras_ve](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stochastic_karras_ve)       | [**Elucidating the Design Space of Diffusion-Based Generative Models**](https://arxiv.org/abs/2206.00364)                    | *Unconditional Image Generation* | \n\n**Note**: Pipelines are simple examples of how to play around with the diffusion systems as described in the corresponding papers. \nHowever, most of them can be adapted to use different scheduler components or even different model components. Some pipeline examples are shown in the [Examples](#examples) below.\n\n## Pipelines API\n\nDiffusion models often consist of multiple independently-trained models or other previously existing components. \n\n\nEach model has been trained independently on a different task and the scheduler can easily be swapped out and replaced with a different one. \nDuring inference, we however want to be able to easily load all components and use them in inference - even if one component, *e.g.* CLIP's text encoder, originates from a different library, such as [Transformers](https://github.com/huggingface/transformers). To that end, all pipelines provide the following functionality:\n\n- [`from_pretrained` method](https://github.com/huggingface/diffusers/blob/5cbed8e0d157f65d3ddc2420dfd09f2df630e978/src/diffusers/pipeline_utils.py#L139) that accepts a Hugging Face Hub repository id, *e.g.* [runwayml/stable-diffusion-v1-5](https://huggingface.co/runwayml/stable-diffusion-v1-5) or a path to a local directory, *e.g.*\n\"./stable-diffusion\". To correctly retrieve which models and components should be loaded, one has to provide a `model_index.json` file, *e.g.* [runwayml/stable-diffusion-v1-5/model_index.json](https://huggingface.co/runwayml/stable-diffusion-v1-5/blob/main/model_index.json), which defines all components that should be \nloaded into the pipelines. More specifically, for each model/component one needs to define the format `<name>: [\"<library>\", \"<class name>\"]`. `<name>` is the attribute name given to the loaded instance of `<class name>` which can be found in the library or pipeline folder called `\"<library>\"`.\n- [`save_pretrained`](https://github.com/huggingface/diffusers/blob/5cbed8e0d157f65d3ddc2420dfd09f2df630e978/src/diffusers/pipeline_utils.py#L90) that accepts a local path, *e.g.* `./stable-diffusion` under which all models/components of the pipeline will be saved. For each component/model a folder is created inside the local path that is named after the given attribute name, *e.g.* `./stable_diffusion/unet`. \nIn addition, a `model_index.json` file is created at the root of the local path, *e.g.* `./stable_diffusion/model_index.json` so that the complete pipeline can again be instantiated \nfrom the local path.\n- [`to`](https://github.com/huggingface/diffusers/blob/5cbed8e0d157f65d3ddc2420dfd09f2df630e978/src/diffusers/pipeline_utils.py#L118) which accepts a `string` or `torch.device` to move all models that are of type `torch.nn.Module` to the passed device. The behavior is fully analogous to [PyTorch's `to` method](https://pytorch.org/docs/stable/generated/torch.nn.Module.html#torch.nn.Module.to).\n- [`__call__`] method to use the pipeline in inference. `__call__` defines inference logic of the pipeline and should ideally encompass all aspects of it, from pre-processing to forwarding tensors to the different models and schedulers, as well as post-processing. The API of the `__call__` method can strongly vary from pipeline to pipeline. *E.g.* a text-to-image pipeline, such as [`StableDiffusionPipeline`](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py) should accept among other things the text prompt to generate the image. A pure image generation pipeline, such as [DDPMPipeline](https://github.com/huggingface/diffusers/tree/main/src/diffusers/pipelines/ddpm) on the other hand can be run without providing any inputs. To better understand what inputs can be adapted for \neach pipeline, one should look directly into the respective pipeline.\n\n**Note**: All pipelines have PyTorch's autograd disabled by decorating the `__call__` method with a [`torch.no_grad`](https://pytorch.org/docs/stable/generated/torch.no_grad.html) decorator because pipelines should\nnot be used for training. If you want to store the gradients during the forward pass, we recommend writing your own pipeline, see also our [community-examples](https://github.com/huggingface/diffusers/tree/main/examples/community)\n\n## Contribution\n\nWe are more than happy about any contribution to the officially supported pipelines 🤗. We aspire \nall of our pipelines to be  **self-contained**, **easy-to-tweak**, **beginner-friendly** and for **one-purpose-only**.\n\n- **Self-contained**: A pipeline shall be as self-contained as possible. More specifically, this means that all functionality should be either directly defined in the pipeline file itself, should be inherited from (and only from) the [`DiffusionPipeline` class](https://github.com/huggingface/diffusers/blob/5cbed8e0d157f65d3ddc2420dfd09f2df630e978/src/diffusers/pipeline_utils.py#L56) or be directly attached to the model and scheduler components of the pipeline. \n- **Easy-to-use**: Pipelines should be extremely easy to use - one should be able to load the pipeline and \nuse it for its designated task, *e.g.* text-to-image generation, in just a couple of lines of code. Most \nlogic including pre-processing, an unrolled diffusion loop, and post-processing should all happen inside the `__call__` method.\n- **Easy-to-tweak**: Certain pipelines will not be able to handle all use cases and tasks that you might like them to. If you want to use a certain pipeline for a specific use case that is not yet supported, you might have to copy the pipeline file and tweak the code to your needs. We try to make the pipeline code as readable as possible so that each part –from pre-processing to diffusing to post-processing– can easily be adapted. If you would like the community to benefit from your customized pipeline, we would love to see a contribution to our [community-examples](https://github.com/huggingface/diffusers/tree/main/examples/community). If you feel that an important pipeline should be part of the official pipelines but isn't, a contribution to the [official pipelines](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines) would be even better.\n- **One-purpose-only**: Pipelines should be used for one task and one task only. Even if two tasks are very similar from a modeling point of view, *e.g.* image2image translation and in-painting, pipelines shall be used for one task only to keep them *easy-to-tweak* and *readable*.\n\n## Examples\n\n### Text-to-Image generation with Stable Diffusion\n\n```python\n# make sure you're logged in with `huggingface-cli login`\nfrom diffusers import StableDiffusionPipeline, LMSDiscreteScheduler\n\npipe = StableDiffusionPipeline.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\npipe = pipe.to(\"cuda\")\n\nprompt = \"a photo of an astronaut riding a horse on mars\"\nimage = pipe(prompt).images[0]  \n    \nimage.save(\"astronaut_rides_horse.png\")\n```\n\n### Image-to-Image text-guided generation with Stable Diffusion\n\nThe `StableDiffusionImg2ImgPipeline` lets you pass a text prompt and an initial image to condition the generation of new images.\n\n```python\nimport requests\nfrom PIL import Image\nfrom io import BytesIO\n\nfrom diffusers import StableDiffusionImg2ImgPipeline\n\n# load the pipeline\ndevice = \"cuda\"\npipe = StableDiffusionImg2ImgPipeline.from_pretrained(\n    \"runwayml/stable-diffusion-v1-5\",\n    torch_dtype=torch.float16,\n).to(device)\n\n# let's download an initial image\nurl = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/assets/stable-samples/img2img/sketch-mountains-input.jpg\"\n\nresponse = requests.get(url)\ninit_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\ninit_image = init_image.resize((768, 512))\n\nprompt = \"A fantasy landscape, trending on artstation\"\n\nimages = pipe(prompt=prompt, image=init_image, strength=0.75, guidance_scale=7.5).images\n\nimages[0].save(\"fantasy_landscape.png\")\n```\nYou can also run this example on colab [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/image_2_image_using_diffusers.ipynb)\n\n### Tweak prompts reusing seeds and latents\n\nYou can generate your own latents to reproduce results, or tweak your prompt on a specific result you liked. [This notebook](https://github.com/pcuenca/diffusers-examples/blob/main/notebooks/stable-diffusion-seeds.ipynb) shows how to do it step by step. You can also run it in Google Colab [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/pcuenca/diffusers-examples/blob/main/notebooks/stable-diffusion-seeds.ipynb).\n\n\n### In-painting using Stable Diffusion\n\nThe `StableDiffusionInpaintPipeline` lets you edit specific parts of an image by providing a mask and text prompt.\n\n```python\nimport PIL\nimport requests\nimport torch\nfrom io import BytesIO\n\nfrom diffusers import StableDiffusionInpaintPipeline\n\ndef download_image(url):\n    response = requests.get(url)\n    return PIL.Image.open(BytesIO(response.content)).convert(\"RGB\")\n\nimg_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png\"\nmask_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png\"\n\ninit_image = download_image(img_url).resize((512, 512))\nmask_image = download_image(mask_url).resize((512, 512))\n\npipe = StableDiffusionInpaintPipeline.from_pretrained(\n    \"runwayml/stable-diffusion-inpainting\",\n    torch_dtype=torch.float16,\n)\npipe = pipe.to(\"cuda\")\n\nprompt = \"Face of a yellow cat, high resolution, sitting on a park bench\"\nimage = pipe(prompt=prompt, image=init_image, mask_image=mask_image).images[0]\n```\n\nYou can also run this example on colab [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/in_painting_with_stable_diffusion_using_diffusers.ipynb)\n"
  },
  {
    "path": "diffusers/pipelines/__init__.py",
    "content": "from ..utils import (\n    OptionalDependencyNotAvailable,\n    is_flax_available,\n    is_k_diffusion_available,\n    is_librosa_available,\n    is_note_seq_available,\n    is_onnx_available,\n    is_torch_available,\n    is_transformers_available,\n)\n\n\ntry:\n    if not is_torch_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_pt_objects import *  # noqa F403\nelse:\n    from .dance_diffusion import DanceDiffusionPipeline\n    from .ddim import DDIMPipeline\n    from .ddpm import DDPMPipeline\n    from .dit import DiTPipeline\n    from .latent_diffusion import LDMSuperResolutionPipeline\n    from .latent_diffusion_uncond import LDMPipeline\n    from .pipeline_utils import AudioPipelineOutput, DiffusionPipeline, ImagePipelineOutput\n    from .pndm import PNDMPipeline\n    from .repaint import RePaintPipeline\n    from .score_sde_ve import ScoreSdeVePipeline\n    from .stochastic_karras_ve import KarrasVePipeline\n\ntry:\n    if not (is_torch_available() and is_librosa_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_torch_and_librosa_objects import *  # noqa F403\nelse:\n    from .audio_diffusion import AudioDiffusionPipeline, Mel\n\ntry:\n    if not (is_torch_available() and is_transformers_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_torch_and_transformers_objects import *  # noqa F403\nelse:\n    from .alt_diffusion import AltDiffusionImg2ImgPipeline, AltDiffusionPipeline\n    from .audioldm import AudioLDMPipeline\n    from .controlnet import (\n        StableDiffusionControlNetImg2ImgPipeline,\n        StableDiffusionControlNetInpaintPipeline,\n        StableDiffusionControlNetPipeline,\n    )\n    from .deepfloyd_if import (\n        IFImg2ImgPipeline,\n        IFImg2ImgSuperResolutionPipeline,\n        IFInpaintingPipeline,\n        IFInpaintingSuperResolutionPipeline,\n        IFPipeline,\n        IFSuperResolutionPipeline,\n    )\n    from .latent_diffusion import LDMTextToImagePipeline\n    from .paint_by_example import PaintByExamplePipeline\n    from .semantic_stable_diffusion import SemanticStableDiffusionPipeline\n    from .stable_diffusion import (\n        CycleDiffusionPipeline,\n        StableDiffusionAttendAndExcitePipeline,\n        StableDiffusionDepth2ImgPipeline,\n        StableDiffusionDiffEditPipeline,\n        StableDiffusionImageVariationPipeline,\n        StableDiffusionImg2ImgPipeline,\n        StableDiffusionInpaintPipeline,\n        StableDiffusionInpaintPipelineLegacy,\n        StableDiffusionInstructPix2PixPipeline,\n        StableDiffusionLatentUpscalePipeline,\n        StableDiffusionModelEditingPipeline,\n        StableDiffusionPanoramaPipeline,\n        StableDiffusionPipeline,\n        StableDiffusionPix2PixZeroPipeline,\n        StableDiffusionSAGPipeline,\n        StableDiffusionUpscalePipeline,\n        StableUnCLIPImg2ImgPipeline,\n        StableUnCLIPPipeline,\n    )\n    from .stable_diffusion_safe import StableDiffusionPipelineSafe\n    from .text_to_video_synthesis import TextToVideoSDPipeline, TextToVideoZeroPipeline\n    from .unclip import UnCLIPImageVariationPipeline, UnCLIPPipeline\n    from .versatile_diffusion import (\n        VersatileDiffusionDualGuidedPipeline,\n        VersatileDiffusionImageVariationPipeline,\n        VersatileDiffusionPipeline,\n        VersatileDiffusionTextToImagePipeline,\n    )\n    from .vq_diffusion import VQDiffusionPipeline\n\ntry:\n    if not is_onnx_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_onnx_objects import *  # noqa F403\nelse:\n    from .onnx_utils import OnnxRuntimeModel\n\ntry:\n    if not (is_torch_available() and is_transformers_available() and is_onnx_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_torch_and_transformers_and_onnx_objects import *  # noqa F403\nelse:\n    from .stable_diffusion import (\n        OnnxStableDiffusionImg2ImgPipeline,\n        OnnxStableDiffusionInpaintPipeline,\n        OnnxStableDiffusionInpaintPipelineLegacy,\n        OnnxStableDiffusionPipeline,\n        OnnxStableDiffusionUpscalePipeline,\n        StableDiffusionOnnxPipeline,\n    )\n\ntry:\n    if not (is_torch_available() and is_transformers_available() and is_k_diffusion_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_torch_and_transformers_and_k_diffusion_objects import *  # noqa F403\nelse:\n    from .stable_diffusion import StableDiffusionKDiffusionPipeline\n\ntry:\n    if not is_flax_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_flax_objects import *  # noqa F403\nelse:\n    from .pipeline_flax_utils import FlaxDiffusionPipeline\n\n\ntry:\n    if not (is_flax_available() and is_transformers_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_flax_and_transformers_objects import *  # noqa F403\nelse:\n    from .controlnet import FlaxStableDiffusionControlNetPipeline\n    from .stable_diffusion import (\n        FlaxStableDiffusionImg2ImgPipeline,\n        FlaxStableDiffusionInpaintPipeline,\n        FlaxStableDiffusionPipeline,\n    )\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_note_seq_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_transformers_and_torch_and_note_seq_objects import *  # noqa F403\nelse:\n    from .spectrogram_diffusion import MidiProcessor, SpectrogramDiffusionPipeline\n"
  },
  {
    "path": "diffusers/pipelines/alt_diffusion/__init__.py",
    "content": "from dataclasses import dataclass\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL\nfrom PIL import Image\n\nfrom ...utils import BaseOutput, is_torch_available, is_transformers_available\n\n\n@dataclass\n# Copied from diffusers.pipelines.stable_diffusion.__init__.StableDiffusionPipelineOutput with Stable->Alt\nclass AltDiffusionPipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for Alt Diffusion pipelines.\n\n    Args:\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n        nsfw_content_detected (`List[bool]`)\n            List of flags denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, or `None` if safety checking could not be performed.\n    \"\"\"\n\n    images: Union[List[PIL.Image.Image], np.ndarray]\n    nsfw_content_detected: Optional[List[bool]]\n\n\nif is_transformers_available() and is_torch_available():\n    from .modeling_roberta_series import RobertaSeriesModelWithTransformation\n    from .pipeline_alt_diffusion import AltDiffusionPipeline\n    from .pipeline_alt_diffusion_img2img import AltDiffusionImg2ImgPipeline\n"
  },
  {
    "path": "diffusers/pipelines/alt_diffusion/modeling_roberta_series.py",
    "content": "from dataclasses import dataclass\nfrom typing import Optional, Tuple\n\nimport torch\nfrom torch import nn\nfrom transformers import RobertaPreTrainedModel, XLMRobertaConfig, XLMRobertaModel\nfrom transformers.utils import ModelOutput\n\n\n@dataclass\nclass TransformationModelOutput(ModelOutput):\n    \"\"\"\n    Base class for text model's outputs that also contains a pooling of the last hidden states.\n\n    Args:\n        text_embeds (`torch.FloatTensor` of shape `(batch_size, output_dim)` *optional* returned when model is initialized with `with_projection=True`):\n            The text embeddings obtained by applying the projection layer to the pooler_output.\n        last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`):\n            Sequence of hidden-states at the output of the last layer of the model.\n        hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):\n            Tuple of `torch.FloatTensor` (one for the output of the embeddings, if the model has an embedding layer, +\n            one for the output of each layer) of shape `(batch_size, sequence_length, hidden_size)`.\n\n            Hidden-states of the model at the output of each layer plus the optional initial embedding outputs.\n        attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):\n            Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,\n            sequence_length)`.\n\n            Attentions weights after the attention softmax, used to compute the weighted average in the self-attention\n            heads.\n    \"\"\"\n\n    projection_state: Optional[torch.FloatTensor] = None\n    last_hidden_state: torch.FloatTensor = None\n    hidden_states: Optional[Tuple[torch.FloatTensor]] = None\n    attentions: Optional[Tuple[torch.FloatTensor]] = None\n\n\nclass RobertaSeriesConfig(XLMRobertaConfig):\n    def __init__(\n        self,\n        pad_token_id=1,\n        bos_token_id=0,\n        eos_token_id=2,\n        project_dim=512,\n        pooler_fn=\"cls\",\n        learn_encoder=False,\n        use_attention_mask=True,\n        **kwargs,\n    ):\n        super().__init__(pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs)\n        self.project_dim = project_dim\n        self.pooler_fn = pooler_fn\n        self.learn_encoder = learn_encoder\n        self.use_attention_mask = use_attention_mask\n\n\nclass RobertaSeriesModelWithTransformation(RobertaPreTrainedModel):\n    _keys_to_ignore_on_load_unexpected = [r\"pooler\", r\"logit_scale\"]\n    _keys_to_ignore_on_load_missing = [r\"position_ids\", r\"predictions.decoder.bias\"]\n    base_model_prefix = \"roberta\"\n    config_class = RobertaSeriesConfig\n\n    def __init__(self, config):\n        super().__init__(config)\n        self.roberta = XLMRobertaModel(config)\n        self.transformation = nn.Linear(config.hidden_size, config.project_dim)\n        self.has_pre_transformation = getattr(config, \"has_pre_transformation\", False)\n        if self.has_pre_transformation:\n            self.transformation_pre = nn.Linear(config.hidden_size, config.project_dim)\n            self.pre_LN = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)\n        self.post_init()\n\n    def forward(\n        self,\n        input_ids: Optional[torch.Tensor] = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        token_type_ids: Optional[torch.Tensor] = None,\n        position_ids: Optional[torch.Tensor] = None,\n        head_mask: Optional[torch.Tensor] = None,\n        inputs_embeds: Optional[torch.Tensor] = None,\n        encoder_hidden_states: Optional[torch.Tensor] = None,\n        encoder_attention_mask: Optional[torch.Tensor] = None,\n        output_attentions: Optional[bool] = None,\n        return_dict: Optional[bool] = None,\n        output_hidden_states: Optional[bool] = None,\n    ):\n        r\"\"\" \"\"\"\n\n        return_dict = return_dict if return_dict is not None else self.config.use_return_dict\n\n        outputs = self.base_model(\n            input_ids=input_ids,\n            attention_mask=attention_mask,\n            token_type_ids=token_type_ids,\n            position_ids=position_ids,\n            head_mask=head_mask,\n            inputs_embeds=inputs_embeds,\n            encoder_hidden_states=encoder_hidden_states,\n            encoder_attention_mask=encoder_attention_mask,\n            output_attentions=output_attentions,\n            output_hidden_states=True if self.has_pre_transformation else output_hidden_states,\n            return_dict=return_dict,\n        )\n\n        if self.has_pre_transformation:\n            sequence_output2 = outputs[\"hidden_states\"][-2]\n            sequence_output2 = self.pre_LN(sequence_output2)\n            projection_state2 = self.transformation_pre(sequence_output2)\n\n            return TransformationModelOutput(\n                projection_state=projection_state2,\n                last_hidden_state=outputs.last_hidden_state,\n                hidden_states=outputs.hidden_states,\n                attentions=outputs.attentions,\n            )\n        else:\n            projection_state = self.transformation(outputs.last_hidden_state)\n            return TransformationModelOutput(\n                projection_state=projection_state,\n                last_hidden_state=outputs.last_hidden_state,\n                hidden_states=outputs.hidden_states,\n                attentions=outputs.attentions,\n            )\n"
  },
  {
    "path": "diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, XLMRobertaTokenizer\n\nfrom diffusers.utils import is_accelerate_available, is_accelerate_version\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import deprecate, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import DiffusionPipeline\nfrom ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker\nfrom . import AltDiffusionPipelineOutput, RobertaSeriesModelWithTransformation\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import AltDiffusionPipeline\n\n        >>> pipe = AltDiffusionPipeline.from_pretrained(\"BAAI/AltDiffusion-m9\", torch_dtype=torch.float16)\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> # \"dark elf princess, highly detailed, d & d, fantasy, highly detailed, digital painting, trending on artstation, concept art, sharp focus, illustration, art by artgerm and greg rutkowski and fuji choko and viktoria gavrilenko and hoang lap\"\n        >>> prompt = \"黑暗精灵公主，非常详细，幻想，非常详细，数字绘画，概念艺术，敏锐的焦点，插图\"\n        >>> image = pipe(prompt).images[0]\n        ```\n\"\"\"\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline with Stable->Alt, CLIPTextModel->RobertaSeriesModelWithTransformation, CLIPTokenizer->XLMRobertaTokenizer, AltDiffusionSafetyChecker->StableDiffusionSafetyChecker\nclass AltDiffusionPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Alt Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n        - *Ckpt*: [`loaders.FromCkptMixin.from_ckpt`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`RobertaSeriesModelWithTransformation`]):\n            Frozen text-encoder. Alt Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.RobertaSeriesModelWithTransformation),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`XLMRobertaTokenizer`):\n            Tokenizer of class\n            [XLMRobertaTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.XLMRobertaTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: RobertaSeriesModelWithTransformation,\n        tokenizer: XLMRobertaTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Alt Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely. If your checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    def enable_vae_tiling(self):\n        r\"\"\"\n        Enable tiled VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in\n        several steps. This is useful to save a large amount of memory and to allow the processing of larger images.\n        \"\"\"\n        self.vae.enable_tiling()\n\n    def disable_vae_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_tiling()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    def decode_latents(self, latents):\n        warnings.warn(\n            (\n                \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n                \" use VaeImageProcessor instead\"\n            ),\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.AltDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.AltDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.AltDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return AltDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/alt_diffusion/pipeline_alt_diffusion_img2img.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, XLMRobertaTokenizer\n\nfrom diffusers.utils import is_accelerate_available, is_accelerate_version\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import PIL_INTERPOLATION, deprecate, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import DiffusionPipeline\nfrom ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker\nfrom . import AltDiffusionPipelineOutput, RobertaSeriesModelWithTransformation\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import requests\n        >>> import torch\n        >>> from PIL import Image\n        >>> from io import BytesIO\n\n        >>> from diffusers import AltDiffusionImg2ImgPipeline\n\n        >>> device = \"cuda\"\n        >>> model_id_or_path = \"BAAI/AltDiffusion-m9\"\n        >>> pipe = AltDiffusionImg2ImgPipeline.from_pretrained(model_id_or_path, torch_dtype=torch.float16)\n        >>> pipe = pipe.to(device)\n\n        >>> url = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/assets/stable-samples/img2img/sketch-mountains-input.jpg\"\n\n        >>> response = requests.get(url)\n        >>> init_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> init_image = init_image.resize((768, 512))\n\n        >>> # \"A fantasy landscape, trending on artstation\"\n        >>> prompt = \"幻想风景, artstation\"\n\n        >>> images = pipe(prompt=prompt, image=init_image, strength=0.75, guidance_scale=7.5).images\n        >>> images[0].save(\"幻想风景.png\")\n        ```\n\"\"\"\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline with Stable->Alt, CLIPTextModel->RobertaSeriesModelWithTransformation, CLIPTokenizer->XLMRobertaTokenizer, AltDiffusionSafetyChecker->StableDiffusionSafetyChecker\nclass AltDiffusionImg2ImgPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-guided image to image generation using Alt Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n        - *Ckpt*: [`loaders.FromCkptMixin.from_ckpt`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`RobertaSeriesModelWithTransformation`]):\n            Frozen text-encoder. Alt Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.RobertaSeriesModelWithTransformation),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`XLMRobertaTokenizer`):\n            Tokenizer of class\n            [XLMRobertaTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.XLMRobertaTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: RobertaSeriesModelWithTransformation,\n        tokenizer: XLMRobertaTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Alt Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely. If your checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    def decode_latents(self, latents):\n        warnings.warn(\n            (\n                \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n                \" use VaeImageProcessor instead\"\n            ),\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self, prompt, strength, callback_steps, negative_prompt=None, prompt_embeds=None, negative_prompt_embeds=None\n    ):\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    def prepare_latents(self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None):\n        if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}\"\n            )\n\n        image = image.to(device=device, dtype=dtype)\n\n        batch_size = batch_size * num_images_per_prompt\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            init_latents = [\n                self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size)\n            ]\n            init_latents = torch.cat(init_latents, dim=0)\n        else:\n            init_latents = self.vae.encode(image).latent_dist.sample(generator)\n\n        init_latents = self.vae.config.scaling_factor * init_latents\n\n        if batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] == 0:\n            # expand init_latents for batch_size\n            deprecation_message = (\n                f\"You have passed {batch_size} text prompts (`prompt`), but only {init_latents.shape[0]} initial\"\n                \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                \" your script to pass as many initial images as text prompts to suppress this warning.\"\n            )\n            deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n            additional_image_per_prompt = batch_size // init_latents.shape[0]\n            init_latents = torch.cat([init_latents] * additional_image_per_prompt, dim=0)\n        elif batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] != 0:\n            raise ValueError(\n                f\"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {batch_size} text prompts.\"\n            )\n        else:\n            init_latents = torch.cat([init_latents], dim=0)\n\n        shape = init_latents.shape\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        # get latents\n        init_latents = self.scheduler.add_noise(init_latents, noise, timestep)\n        latents = init_latents\n\n        return latents\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        strength: float = 0.8,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: Optional[float] = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference. This parameter will be modulated by `strength`.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.AltDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.AltDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.AltDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(prompt, strength, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds)\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Preprocess image\n        image = self.image_processor.preprocess(image)\n\n        # 5. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device)\n        latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt)\n\n        # 6. Prepare latent variables\n        latents = self.prepare_latents(\n            image, latent_timestep, batch_size, num_images_per_prompt, prompt_embeds.dtype, device, generator\n        )\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return AltDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/audio_diffusion/__init__.py",
    "content": "from .mel import Mel\nfrom .pipeline_audio_diffusion import AudioDiffusionPipeline\n"
  },
  {
    "path": "diffusers/pipelines/audio_diffusion/mel.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport numpy as np  # noqa: E402\n\nfrom ...configuration_utils import ConfigMixin, register_to_config\nfrom ...schedulers.scheduling_utils import SchedulerMixin\n\n\ntry:\n    import librosa  # noqa: E402\n\n    _librosa_can_be_imported = True\n    _import_error = \"\"\nexcept Exception as e:\n    _librosa_can_be_imported = False\n    _import_error = (\n        f\"Cannot import librosa because {e}. Make sure to correctly install librosa to be able to install it.\"\n    )\n\n\nfrom PIL import Image  # noqa: E402\n\n\nclass Mel(ConfigMixin, SchedulerMixin):\n    \"\"\"\n    Parameters:\n        x_res (`int`): x resolution of spectrogram (time)\n        y_res (`int`): y resolution of spectrogram (frequency bins)\n        sample_rate (`int`): sample rate of audio\n        n_fft (`int`): number of Fast Fourier Transforms\n        hop_length (`int`): hop length (a higher number is recommended for lower than 256 y_res)\n        top_db (`int`): loudest in decibels\n        n_iter (`int`): number of iterations for Griffin Linn mel inversion\n    \"\"\"\n\n    config_name = \"mel_config.json\"\n\n    @register_to_config\n    def __init__(\n        self,\n        x_res: int = 256,\n        y_res: int = 256,\n        sample_rate: int = 22050,\n        n_fft: int = 2048,\n        hop_length: int = 512,\n        top_db: int = 80,\n        n_iter: int = 32,\n    ):\n        self.hop_length = hop_length\n        self.sr = sample_rate\n        self.n_fft = n_fft\n        self.top_db = top_db\n        self.n_iter = n_iter\n        self.set_resolution(x_res, y_res)\n        self.audio = None\n\n        if not _librosa_can_be_imported:\n            raise ValueError(_import_error)\n\n    def set_resolution(self, x_res: int, y_res: int):\n        \"\"\"Set resolution.\n\n        Args:\n            x_res (`int`): x resolution of spectrogram (time)\n            y_res (`int`): y resolution of spectrogram (frequency bins)\n        \"\"\"\n        self.x_res = x_res\n        self.y_res = y_res\n        self.n_mels = self.y_res\n        self.slice_size = self.x_res * self.hop_length - 1\n\n    def load_audio(self, audio_file: str = None, raw_audio: np.ndarray = None):\n        \"\"\"Load audio.\n\n        Args:\n            audio_file (`str`): must be a file on disk due to Librosa limitation or\n            raw_audio (`np.ndarray`): audio as numpy array\n        \"\"\"\n        if audio_file is not None:\n            self.audio, _ = librosa.load(audio_file, mono=True, sr=self.sr)\n        else:\n            self.audio = raw_audio\n\n        # Pad with silence if necessary.\n        if len(self.audio) < self.x_res * self.hop_length:\n            self.audio = np.concatenate([self.audio, np.zeros((self.x_res * self.hop_length - len(self.audio),))])\n\n    def get_number_of_slices(self) -> int:\n        \"\"\"Get number of slices in audio.\n\n        Returns:\n            `int`: number of spectograms audio can be sliced into\n        \"\"\"\n        return len(self.audio) // self.slice_size\n\n    def get_audio_slice(self, slice: int = 0) -> np.ndarray:\n        \"\"\"Get slice of audio.\n\n        Args:\n            slice (`int`): slice number of audio (out of get_number_of_slices())\n\n        Returns:\n            `np.ndarray`: audio as numpy array\n        \"\"\"\n        return self.audio[self.slice_size * slice : self.slice_size * (slice + 1)]\n\n    def get_sample_rate(self) -> int:\n        \"\"\"Get sample rate:\n\n        Returns:\n            `int`: sample rate of audio\n        \"\"\"\n        return self.sr\n\n    def audio_slice_to_image(self, slice: int) -> Image.Image:\n        \"\"\"Convert slice of audio to spectrogram.\n\n        Args:\n            slice (`int`): slice number of audio to convert (out of get_number_of_slices())\n\n        Returns:\n            `PIL Image`: grayscale image of x_res x y_res\n        \"\"\"\n        S = librosa.feature.melspectrogram(\n            y=self.get_audio_slice(slice), sr=self.sr, n_fft=self.n_fft, hop_length=self.hop_length, n_mels=self.n_mels\n        )\n        log_S = librosa.power_to_db(S, ref=np.max, top_db=self.top_db)\n        bytedata = (((log_S + self.top_db) * 255 / self.top_db).clip(0, 255) + 0.5).astype(np.uint8)\n        image = Image.fromarray(bytedata)\n        return image\n\n    def image_to_audio(self, image: Image.Image) -> np.ndarray:\n        \"\"\"Converts spectrogram to audio.\n\n        Args:\n            image (`PIL Image`): x_res x y_res grayscale image\n\n        Returns:\n            audio (`np.ndarray`): raw audio\n        \"\"\"\n        bytedata = np.frombuffer(image.tobytes(), dtype=\"uint8\").reshape((image.height, image.width))\n        log_S = bytedata.astype(\"float\") * self.top_db / 255 - self.top_db\n        S = librosa.db_to_power(log_S)\n        audio = librosa.feature.inverse.mel_to_audio(\n            S, sr=self.sr, n_fft=self.n_fft, hop_length=self.hop_length, n_iter=self.n_iter\n        )\n        return audio\n"
  },
  {
    "path": "diffusers/pipelines/audio_diffusion/pipeline_audio_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom math import acos, sin\nfrom typing import List, Tuple, Union\n\nimport numpy as np\nimport torch\nfrom PIL import Image\n\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import DDIMScheduler, DDPMScheduler\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import AudioPipelineOutput, BaseOutput, DiffusionPipeline, ImagePipelineOutput\nfrom .mel import Mel\n\n\nclass AudioDiffusionPipeline(DiffusionPipeline):\n    \"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        vqae ([`AutoencoderKL`]): Variational AutoEncoder for Latent Audio Diffusion or None\n        unet ([`UNet2DConditionModel`]): UNET model\n        mel ([`Mel`]): transform audio <-> spectrogram\n        scheduler ([`DDIMScheduler` or `DDPMScheduler`]): de-noising scheduler\n    \"\"\"\n\n    _optional_components = [\"vqvae\"]\n\n    def __init__(\n        self,\n        vqvae: AutoencoderKL,\n        unet: UNet2DConditionModel,\n        mel: Mel,\n        scheduler: Union[DDIMScheduler, DDPMScheduler],\n    ):\n        super().__init__()\n        self.register_modules(unet=unet, scheduler=scheduler, mel=mel, vqvae=vqvae)\n\n    def get_default_steps(self) -> int:\n        \"\"\"Returns default number of steps recommended for inference\n\n        Returns:\n            `int`: number of steps\n        \"\"\"\n        return 50 if isinstance(self.scheduler, DDIMScheduler) else 1000\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        audio_file: str = None,\n        raw_audio: np.ndarray = None,\n        slice: int = 0,\n        start_step: int = 0,\n        steps: int = None,\n        generator: torch.Generator = None,\n        mask_start_secs: float = 0,\n        mask_end_secs: float = 0,\n        step_generator: torch.Generator = None,\n        eta: float = 0,\n        noise: torch.Tensor = None,\n        encoding: torch.Tensor = None,\n        return_dict=True,\n    ) -> Union[\n        Union[AudioPipelineOutput, ImagePipelineOutput],\n        Tuple[List[Image.Image], Tuple[int, List[np.ndarray]]],\n    ]:\n        \"\"\"Generate random mel spectrogram from audio input and convert to audio.\n\n        Args:\n            batch_size (`int`): number of samples to generate\n            audio_file (`str`): must be a file on disk due to Librosa limitation or\n            raw_audio (`np.ndarray`): audio as numpy array\n            slice (`int`): slice number of audio to convert\n            start_step (int): step to start from\n            steps (`int`): number of de-noising steps (defaults to 50 for DDIM, 1000 for DDPM)\n            generator (`torch.Generator`): random number generator or None\n            mask_start_secs (`float`): number of seconds of audio to mask (not generate) at start\n            mask_end_secs (`float`): number of seconds of audio to mask (not generate) at end\n            step_generator (`torch.Generator`): random number generator used to de-noise or None\n            eta (`float`): parameter between 0 and 1 used with DDIM scheduler\n            noise (`torch.Tensor`): noise tensor of shape (batch_size, 1, height, width) or None\n            encoding (`torch.Tensor`): for UNet2DConditionModel shape (batch_size, seq_length, cross_attention_dim)\n            return_dict (`bool`): if True return AudioPipelineOutput, ImagePipelineOutput else Tuple\n\n        Returns:\n            `List[PIL Image]`: mel spectrograms (`float`, `List[np.ndarray]`): sample rate and raw audios\n        \"\"\"\n\n        steps = steps or self.get_default_steps()\n        self.scheduler.set_timesteps(steps)\n        step_generator = step_generator or generator\n        # For backwards compatibility\n        if type(self.unet.config.sample_size) == int:\n            self.unet.config.sample_size = (self.unet.config.sample_size, self.unet.config.sample_size)\n        if noise is None:\n            noise = randn_tensor(\n                (\n                    batch_size,\n                    self.unet.config.in_channels,\n                    self.unet.config.sample_size[0],\n                    self.unet.config.sample_size[1],\n                ),\n                generator=generator,\n                device=self.device,\n            )\n        images = noise\n        mask = None\n\n        if audio_file is not None or raw_audio is not None:\n            self.mel.load_audio(audio_file, raw_audio)\n            input_image = self.mel.audio_slice_to_image(slice)\n            input_image = np.frombuffer(input_image.tobytes(), dtype=\"uint8\").reshape(\n                (input_image.height, input_image.width)\n            )\n            input_image = (input_image / 255) * 2 - 1\n            input_images = torch.tensor(input_image[np.newaxis, :, :], dtype=torch.float).to(self.device)\n\n            if self.vqvae is not None:\n                input_images = self.vqvae.encode(torch.unsqueeze(input_images, 0)).latent_dist.sample(\n                    generator=generator\n                )[0]\n                input_images = self.vqvae.config.scaling_factor * input_images\n\n            if start_step > 0:\n                images[0, 0] = self.scheduler.add_noise(input_images, noise, self.scheduler.timesteps[start_step - 1])\n\n            pixels_per_second = (\n                self.unet.config.sample_size[1] * self.mel.get_sample_rate() / self.mel.x_res / self.mel.hop_length\n            )\n            mask_start = int(mask_start_secs * pixels_per_second)\n            mask_end = int(mask_end_secs * pixels_per_second)\n            mask = self.scheduler.add_noise(input_images, noise, torch.tensor(self.scheduler.timesteps[start_step:]))\n\n        for step, t in enumerate(self.progress_bar(self.scheduler.timesteps[start_step:])):\n            if isinstance(self.unet, UNet2DConditionModel):\n                model_output = self.unet(images, t, encoding)[\"sample\"]\n            else:\n                model_output = self.unet(images, t)[\"sample\"]\n\n            if isinstance(self.scheduler, DDIMScheduler):\n                images = self.scheduler.step(\n                    model_output=model_output,\n                    timestep=t,\n                    sample=images,\n                    eta=eta,\n                    generator=step_generator,\n                )[\"prev_sample\"]\n            else:\n                images = self.scheduler.step(\n                    model_output=model_output,\n                    timestep=t,\n                    sample=images,\n                    generator=step_generator,\n                )[\"prev_sample\"]\n\n            if mask is not None:\n                if mask_start > 0:\n                    images[:, :, :, :mask_start] = mask[:, step, :, :mask_start]\n                if mask_end > 0:\n                    images[:, :, :, -mask_end:] = mask[:, step, :, -mask_end:]\n\n        if self.vqvae is not None:\n            # 0.18215 was scaling factor used in training to ensure unit variance\n            images = 1 / self.vqvae.config.scaling_factor * images\n            images = self.vqvae.decode(images)[\"sample\"]\n\n        images = (images / 2 + 0.5).clamp(0, 1)\n        images = images.cpu().permute(0, 2, 3, 1).numpy()\n        images = (images * 255).round().astype(\"uint8\")\n        images = list(\n            (Image.fromarray(_[:, :, 0]) for _ in images)\n            if images.shape[3] == 1\n            else (Image.fromarray(_, mode=\"RGB\").convert(\"L\") for _ in images)\n        )\n\n        audios = [self.mel.image_to_audio(_) for _ in images]\n        if not return_dict:\n            return images, (self.mel.get_sample_rate(), audios)\n\n        return BaseOutput(**AudioPipelineOutput(np.array(audios)[:, np.newaxis, :]), **ImagePipelineOutput(images))\n\n    @torch.no_grad()\n    def encode(self, images: List[Image.Image], steps: int = 50) -> np.ndarray:\n        \"\"\"Reverse step process: recover noisy image from generated image.\n\n        Args:\n            images (`List[PIL Image]`): list of images to encode\n            steps (`int`): number of encoding steps to perform (defaults to 50)\n\n        Returns:\n            `np.ndarray`: noise tensor of shape (batch_size, 1, height, width)\n        \"\"\"\n\n        # Only works with DDIM as this method is deterministic\n        assert isinstance(self.scheduler, DDIMScheduler)\n        self.scheduler.set_timesteps(steps)\n        sample = np.array(\n            [np.frombuffer(image.tobytes(), dtype=\"uint8\").reshape((1, image.height, image.width)) for image in images]\n        )\n        sample = (sample / 255) * 2 - 1\n        sample = torch.Tensor(sample).to(self.device)\n\n        for t in self.progress_bar(torch.flip(self.scheduler.timesteps, (0,))):\n            prev_timestep = t - self.scheduler.config.num_train_timesteps // self.scheduler.num_inference_steps\n            alpha_prod_t = self.scheduler.alphas_cumprod[t]\n            alpha_prod_t_prev = (\n                self.scheduler.alphas_cumprod[prev_timestep]\n                if prev_timestep >= 0\n                else self.scheduler.final_alpha_cumprod\n            )\n            beta_prod_t = 1 - alpha_prod_t\n            model_output = self.unet(sample, t)[\"sample\"]\n            pred_sample_direction = (1 - alpha_prod_t_prev) ** (0.5) * model_output\n            sample = (sample - pred_sample_direction) * alpha_prod_t_prev ** (-0.5)\n            sample = sample * alpha_prod_t ** (0.5) + beta_prod_t ** (0.5) * model_output\n\n        return sample\n\n    @staticmethod\n    def slerp(x0: torch.Tensor, x1: torch.Tensor, alpha: float) -> torch.Tensor:\n        \"\"\"Spherical Linear intERPolation\n\n        Args:\n            x0 (`torch.Tensor`): first tensor to interpolate between\n            x1 (`torch.Tensor`): seconds tensor to interpolate between\n            alpha (`float`): interpolation between 0 and 1\n\n        Returns:\n            `torch.Tensor`: interpolated tensor\n        \"\"\"\n\n        theta = acos(torch.dot(torch.flatten(x0), torch.flatten(x1)) / torch.norm(x0) / torch.norm(x1))\n        return sin((1 - alpha) * theta) * x0 / sin(theta) + sin(alpha * theta) * x1 / sin(theta)\n"
  },
  {
    "path": "diffusers/pipelines/audioldm/__init__.py",
    "content": "from ...utils import (\n    OptionalDependencyNotAvailable,\n    is_torch_available,\n    is_transformers_available,\n    is_transformers_version,\n)\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_transformers_version(\">=\", \"4.27.0\")):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import (\n        AudioLDMPipeline,\n    )\nelse:\n    from .pipeline_audioldm import AudioLDMPipeline\n"
  },
  {
    "path": "diffusers/pipelines/audioldm/pipeline_audioldm.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom transformers import ClapTextModelWithProjection, RobertaTokenizer, RobertaTokenizerFast, SpeechT5HifiGan\n\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_available, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import AudioPipelineOutput, DiffusionPipeline\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import AudioLDMPipeline\n\n        >>> pipe = AudioLDMPipeline.from_pretrained(\"cvssp/audioldm\", torch_dtype=torch.float16)\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> prompt = \"A hammer hitting a wooden surface\"\n        >>> audio = pipe(prompt).audio[0]\n        ```\n\"\"\"\n\n\nclass AudioLDMPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-to-audio generation using AudioLDM.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode audios to and from latent representations.\n        text_encoder ([`ClapTextModelWithProjection`]):\n            Frozen text-encoder. AudioLDM uses the text portion of\n            [CLAP](https://huggingface.co/docs/transformers/main/model_doc/clap#transformers.ClapTextModelWithProjection),\n            specifically the [RoBERTa HSTAT-unfused](https://huggingface.co/laion/clap-htsat-unfused) variant.\n        tokenizer ([`PreTrainedTokenizer`]):\n            Tokenizer of class\n            [RobertaTokenizer](https://huggingface.co/docs/transformers/model_doc/roberta#transformers.RobertaTokenizer).\n        unet ([`UNet2DConditionModel`]): U-Net architecture to denoise the encoded audio latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded audio latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        vocoder ([`SpeechT5HifiGan`]):\n            Vocoder of class\n            [SpeechT5HifiGan](https://huggingface.co/docs/transformers/main/en/model_doc/speecht5#transformers.SpeechT5HifiGan).\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: ClapTextModelWithProjection,\n        tokenizer: Union[RobertaTokenizer, RobertaTokenizerFast],\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        vocoder: SpeechT5HifiGan,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            vocoder=vocoder,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and vocoder have their state dicts saved to CPU and then are moved to a `torch.device('meta')\n        and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae, self.vocoder]:\n            cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_waveforms_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device (`torch.device`):\n                torch device\n            num_waveforms_per_prompt (`int`):\n                number of waveforms that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the audio generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            attention_mask = text_inputs.attention_mask\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLAP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask.to(device),\n            )\n            prompt_embeds = prompt_embeds.text_embeds\n            # additional L_2 normalization over each hidden-state\n            prompt_embeds = F.normalize(prompt_embeds, dim=-1)\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        (\n            bs_embed,\n            seq_len,\n        ) = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_waveforms_per_prompt)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_waveforms_per_prompt, seq_len)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            uncond_input_ids = uncond_input.input_ids.to(device)\n            attention_mask = uncond_input.attention_mask.to(device)\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input_ids,\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds.text_embeds\n            # additional L_2 normalization over each hidden-state\n            negative_prompt_embeds = F.normalize(negative_prompt_embeds, dim=-1)\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_waveforms_per_prompt)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_waveforms_per_prompt, seq_len)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def decode_latents(self, latents):\n        latents = 1 / self.vae.config.scaling_factor * latents\n        mel_spectrogram = self.vae.decode(latents).sample\n        return mel_spectrogram\n\n    def mel_spectrogram_to_waveform(self, mel_spectrogram):\n        if mel_spectrogram.dim() == 4:\n            mel_spectrogram = mel_spectrogram.squeeze(1)\n\n        waveform = self.vocoder(mel_spectrogram)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        waveform = waveform.cpu().float()\n        return waveform\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        audio_length_in_s,\n        vocoder_upsample_factor,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        min_audio_length_in_s = vocoder_upsample_factor * self.vae_scale_factor\n        if audio_length_in_s < min_audio_length_in_s:\n            raise ValueError(\n                f\"`audio_length_in_s` has to be a positive value greater than or equal to {min_audio_length_in_s}, but \"\n                f\"is {audio_length_in_s}.\"\n            )\n\n        if self.vocoder.config.model_in_dim % self.vae_scale_factor != 0:\n            raise ValueError(\n                f\"The number of frequency bins in the vocoder's log-mel spectrogram has to be divisible by the \"\n                f\"VAE scale factor, but got {self.vocoder.config.model_in_dim} bins and a scale factor of \"\n                f\"{self.vae_scale_factor}.\"\n            )\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents with width->self.vocoder.config.model_in_dim\n    def prepare_latents(self, batch_size, num_channels_latents, height, dtype, device, generator, latents=None):\n        shape = (\n            batch_size,\n            num_channels_latents,\n            height // self.vae_scale_factor,\n            self.vocoder.config.model_in_dim // self.vae_scale_factor,\n        )\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        audio_length_in_s: Optional[float] = None,\n        num_inference_steps: int = 10,\n        guidance_scale: float = 2.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_waveforms_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        output_type: Optional[str] = \"np\",\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the audio generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            audio_length_in_s (`int`, *optional*, defaults to 5.12):\n                The length of the generated audio sample in seconds.\n            num_inference_steps (`int`, *optional*, defaults to 10):\n                The number of denoising steps. More denoising steps usually lead to a higher quality audio at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 2.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate audios that are closely linked to the text `prompt`,\n                usually at the expense of lower sound quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the audio generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_waveforms_per_prompt (`int`, *optional*, defaults to 1):\n                The number of waveforms to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for audio\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttnProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            output_type (`str`, *optional*, defaults to `\"np\"`):\n                The output format of the generate image. Choose between:\n                - `\"np\"`: Return Numpy `np.ndarray` objects.\n                - `\"pt\"`: Return PyTorch `torch.Tensor` objects.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated audios.\n        \"\"\"\n        # 0. Convert audio input length from seconds to spectrogram height\n        vocoder_upsample_factor = np.prod(self.vocoder.config.upsample_rates) / self.vocoder.config.sampling_rate\n\n        if audio_length_in_s is None:\n            audio_length_in_s = self.unet.config.sample_size * self.vae_scale_factor * vocoder_upsample_factor\n\n        height = int(audio_length_in_s / vocoder_upsample_factor)\n\n        original_waveform_length = int(audio_length_in_s * self.vocoder.config.sampling_rate)\n        if height % self.vae_scale_factor != 0:\n            height = int(np.ceil(height / self.vae_scale_factor)) * self.vae_scale_factor\n            logger.info(\n                f\"Audio length in seconds {audio_length_in_s} is increased to {height * vocoder_upsample_factor} \"\n                f\"so that it can be handled by the model. It will be cut to {audio_length_in_s} after the \"\n                f\"denoising process.\"\n            )\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt,\n            audio_length_in_s,\n            vocoder_upsample_factor,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_waveforms_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_waveforms_per_prompt,\n            num_channels_latents,\n            height,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=None,\n                    class_labels=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # 8. Post-processing\n        mel_spectrogram = self.decode_latents(latents)\n\n        audio = self.mel_spectrogram_to_waveform(mel_spectrogram)\n\n        audio = audio[:, :original_waveform_length]\n\n        if output_type == \"np\":\n            audio = audio.numpy()\n\n        if not return_dict:\n            return (audio,)\n\n        return AudioPipelineOutput(audios=audio)\n"
  },
  {
    "path": "diffusers/pipelines/controlnet/__init__.py",
    "content": "from ...utils import (\n    OptionalDependencyNotAvailable,\n    is_flax_available,\n    is_torch_available,\n    is_transformers_available,\n)\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import *  # noqa F403\nelse:\n    from .multicontrolnet import MultiControlNetModel\n    from .pipeline_controlnet import StableDiffusionControlNetPipeline\n    from .pipeline_controlnet_img2img import StableDiffusionControlNetImg2ImgPipeline\n    from .pipeline_controlnet_inpaint import StableDiffusionControlNetInpaintPipeline\n\n\nif is_transformers_available() and is_flax_available():\n    from .pipeline_flax_controlnet import FlaxStableDiffusionControlNetPipeline\n"
  },
  {
    "path": "diffusers/pipelines/controlnet/multicontrolnet.py",
    "content": "from typing import Any, Dict, List, Optional, Tuple, Union\n\nimport torch\nfrom torch import nn\n\nfrom ...models.controlnet import ControlNetModel, ControlNetOutput\nfrom ...models.modeling_utils import ModelMixin\n\n\nclass MultiControlNetModel(ModelMixin):\n    r\"\"\"\n    Multiple `ControlNetModel` wrapper class for Multi-ControlNet\n\n    This module is a wrapper for multiple instances of the `ControlNetModel`. The `forward()` API is designed to be\n    compatible with `ControlNetModel`.\n\n    Args:\n        controlnets (`List[ControlNetModel]`):\n            Provides additional conditioning to the unet during the denoising process. You must set multiple\n            `ControlNetModel` as a list.\n    \"\"\"\n\n    def __init__(self, controlnets: Union[List[ControlNetModel], Tuple[ControlNetModel]]):\n        super().__init__()\n        self.nets = nn.ModuleList(controlnets)\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[torch.Tensor, float, int],\n        encoder_hidden_states: torch.Tensor,\n        controlnet_cond: List[torch.tensor],\n        conditioning_scale: List[float],\n        class_labels: Optional[torch.Tensor] = None,\n        timestep_cond: Optional[torch.Tensor] = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        guess_mode: bool = False,\n        return_dict: bool = True,\n    ) -> Union[ControlNetOutput, Tuple]:\n        for i, (image, scale, controlnet) in enumerate(zip(controlnet_cond, conditioning_scale, self.nets)):\n            down_samples, mid_sample = controlnet(\n                sample,\n                timestep,\n                encoder_hidden_states,\n                image,\n                scale,\n                class_labels,\n                timestep_cond,\n                attention_mask,\n                cross_attention_kwargs,\n                guess_mode,\n                return_dict,\n            )\n\n            # merge samples\n            if i == 0:\n                down_block_res_samples, mid_block_res_sample = down_samples, mid_sample\n            else:\n                down_block_res_samples = [\n                    samples_prev + samples_curr\n                    for samples_prev, samples_curr in zip(down_block_res_samples, down_samples)\n                ]\n                mid_block_res_sample += mid_sample\n\n        return down_block_res_samples, mid_block_res_sample\n"
  },
  {
    "path": "diffusers/pipelines/controlnet/pipeline_controlnet.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport inspect\nimport os\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union\n\nimport numpy as np\nimport PIL.Image\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, ControlNetModel, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_compiled_module,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom ..stable_diffusion import StableDiffusionPipelineOutput\nfrom ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker\nfrom .multicontrolnet import MultiControlNetModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> # !pip install opencv-python transformers accelerate\n        >>> from diffusers import StableDiffusionControlNetPipeline, ControlNetModel, UniPCMultistepScheduler\n        >>> from diffusers.utils import load_image\n        >>> import numpy as np\n        >>> import torch\n\n        >>> import cv2\n        >>> from PIL import Image\n\n        >>> # download an image\n        >>> image = load_image(\n        ...     \"https://hf.co/datasets/huggingface/documentation-images/resolve/main/diffusers/input_image_vermeer.png\"\n        ... )\n        >>> image = np.array(image)\n\n        >>> # get canny image\n        >>> image = cv2.Canny(image, 100, 200)\n        >>> image = image[:, :, None]\n        >>> image = np.concatenate([image, image, image], axis=2)\n        >>> canny_image = Image.fromarray(image)\n\n        >>> # load control net and stable diffusion v1-5\n        >>> controlnet = ControlNetModel.from_pretrained(\"lllyasviel/sd-controlnet-canny\", torch_dtype=torch.float16)\n        >>> pipe = StableDiffusionControlNetPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\", controlnet=controlnet, torch_dtype=torch.float16\n        ... )\n\n        >>> # speed up diffusion process with faster scheduler and memory optimization\n        >>> pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)\n        >>> # remove following line if xformers is not installed\n        >>> pipe.enable_xformers_memory_efficient_attention()\n\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> # generate image\n        >>> generator = torch.manual_seed(0)\n        >>> image = pipe(\n        ...     \"futuristic-looking woman\", num_inference_steps=20, generator=generator, image=canny_image\n        ... ).images[0]\n        ```\n\"\"\"\n\n\nclass StableDiffusionControlNetPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion with ControlNet guidance.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        controlnet ([`ControlNetModel`] or `List[ControlNetModel]`):\n            Provides additional conditioning to the unet during the denoising process. If you set multiple ControlNets\n            as a list, the outputs from each ControlNet are added together to create one combined additional\n            conditioning.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        controlnet: Union[ControlNetModel, List[ControlNetModel], Tuple[ControlNetModel], MultiControlNetModel],\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        if isinstance(controlnet, (list, tuple)):\n            controlnet = MultiControlNetModel(controlnet)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            controlnet=controlnet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling\n    def enable_vae_tiling(self):\n        r\"\"\"\n        Enable tiled VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in\n        several steps. This is useful to save a large amount of memory and to allow the processing of larger images.\n        \"\"\"\n        self.vae.enable_tiling()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling\n    def disable_vae_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_tiling()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae, controlnet, and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae, self.controlnet]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            # the safety checker can offload the vae again\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # control net hook has be manually offloaded as it alternates with unet\n        cpu_offload_with_hook(self.controlnet, device)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n        controlnet_conditioning_scale=1.0,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        # `prompt` needs more sophisticated handling when there are multiple\n        # conditionings.\n        if isinstance(self.controlnet, MultiControlNetModel):\n            if isinstance(prompt, list):\n                logger.warning(\n                    f\"You have {len(self.controlnet.nets)} ControlNets and you have passed {len(prompt)}\"\n                    \" prompts. The conditionings will be fixed across the prompts.\"\n                )\n\n        # Check `image`\n        is_compiled = hasattr(F, \"scaled_dot_product_attention\") and isinstance(\n            self.controlnet, torch._dynamo.eval_frame.OptimizedModule\n        )\n        if (\n            isinstance(self.controlnet, ControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, ControlNetModel)\n        ):\n            self.check_image(image, prompt, prompt_embeds)\n        elif (\n            isinstance(self.controlnet, MultiControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, MultiControlNetModel)\n        ):\n            if not isinstance(image, list):\n                raise TypeError(\"For multiple controlnets: `image` must be type `list`\")\n\n            # When `image` is a nested list:\n            # (e.g. [[canny_image_1, pose_image_1], [canny_image_2, pose_image_2]])\n            elif any(isinstance(i, list) for i in image):\n                raise ValueError(\"A single batch of multiple conditionings are supported at the moment.\")\n            elif len(image) != len(self.controlnet.nets):\n                raise ValueError(\n                    \"For multiple controlnets: `image` must have the same length as the number of controlnets.\"\n                )\n\n            for image_ in image:\n                self.check_image(image_, prompt, prompt_embeds)\n        else:\n            assert False\n\n        # Check `controlnet_conditioning_scale`\n        if (\n            isinstance(self.controlnet, ControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, ControlNetModel)\n        ):\n            if not isinstance(controlnet_conditioning_scale, float):\n                raise TypeError(\"For single controlnet: `controlnet_conditioning_scale` must be type `float`.\")\n        elif (\n            isinstance(self.controlnet, MultiControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, MultiControlNetModel)\n        ):\n            if isinstance(controlnet_conditioning_scale, list):\n                if any(isinstance(i, list) for i in controlnet_conditioning_scale):\n                    raise ValueError(\"A single batch of multiple conditionings are supported at the moment.\")\n            elif isinstance(controlnet_conditioning_scale, list) and len(controlnet_conditioning_scale) != len(\n                self.controlnet.nets\n            ):\n                raise ValueError(\n                    \"For multiple controlnets: When `controlnet_conditioning_scale` is specified as `list`, it must have\"\n                    \" the same length as the number of controlnets\"\n                )\n        else:\n            assert False\n\n    def check_image(self, image, prompt, prompt_embeds):\n        image_is_pil = isinstance(image, PIL.Image.Image)\n        image_is_tensor = isinstance(image, torch.Tensor)\n        image_is_pil_list = isinstance(image, list) and isinstance(image[0], PIL.Image.Image)\n        image_is_tensor_list = isinstance(image, list) and isinstance(image[0], torch.Tensor)\n\n        if not image_is_pil and not image_is_tensor and not image_is_pil_list and not image_is_tensor_list:\n            raise TypeError(\n                \"image must be passed and be one of PIL image, torch tensor, list of PIL images, or list of torch tensors\"\n            )\n\n        if image_is_pil:\n            image_batch_size = 1\n        elif image_is_tensor:\n            image_batch_size = image.shape[0]\n        elif image_is_pil_list:\n            image_batch_size = len(image)\n        elif image_is_tensor_list:\n            image_batch_size = len(image)\n\n        if prompt is not None and isinstance(prompt, str):\n            prompt_batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            prompt_batch_size = len(prompt)\n        elif prompt_embeds is not None:\n            prompt_batch_size = prompt_embeds.shape[0]\n\n        if image_batch_size != 1 and image_batch_size != prompt_batch_size:\n            raise ValueError(\n                f\"If image batch size is not 1, image batch size must be same as prompt batch size. image batch size: {image_batch_size}, prompt batch size: {prompt_batch_size}\"\n            )\n\n    def prepare_image(\n        self,\n        image,\n        width,\n        height,\n        batch_size,\n        num_images_per_prompt,\n        device,\n        dtype,\n        do_classifier_free_guidance=False,\n        guess_mode=False,\n    ):\n        if not isinstance(image, torch.Tensor):\n            if isinstance(image, PIL.Image.Image):\n                image = [image]\n\n            if isinstance(image[0], PIL.Image.Image):\n                images = []\n\n                for image_ in image:\n                    image_ = image_.convert(\"RGB\")\n                    image_ = image_.resize((width, height), resample=PIL_INTERPOLATION[\"lanczos\"])\n                    image_ = np.array(image_)\n                    image_ = image_[None, :]\n                    images.append(image_)\n\n                image = images\n\n                image = np.concatenate(image, axis=0)\n                image = np.array(image).astype(np.float32) / 255.0\n                image = image.transpose(0, 3, 1, 2)\n                image = torch.from_numpy(image)\n            elif isinstance(image[0], torch.Tensor):\n                image = torch.cat(image, dim=0)\n\n        image_batch_size = image.shape[0]\n\n        if image_batch_size == 1:\n            repeat_by = batch_size\n        else:\n            # image batch size is the same as prompt batch size\n            repeat_by = num_images_per_prompt\n\n        image = image.repeat_interleave(repeat_by, dim=0)\n\n        image = image.to(device=device, dtype=dtype)\n\n        if do_classifier_free_guidance and not guess_mode:\n            image = torch.cat([image] * 2)\n\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    def _default_height_width(self, height, width, image):\n        # NOTE: It is possible that a list of images have different\n        # dimensions for each image, so just checking the first image\n        # is not _exactly_ correct, but it is simple.\n        while isinstance(image, list):\n            image = image[0]\n\n        if height is None:\n            if isinstance(image, PIL.Image.Image):\n                height = image.height\n            elif isinstance(image, torch.Tensor):\n                height = image.shape[2]\n\n            height = (height // 8) * 8  # round down to nearest multiple of 8\n\n        if width is None:\n            if isinstance(image, PIL.Image.Image):\n                width = image.width\n            elif isinstance(image, torch.Tensor):\n                width = image.shape[3]\n\n            width = (width // 8) * 8  # round down to nearest multiple of 8\n\n        return height, width\n\n    # override DiffusionPipeline\n    def save_pretrained(\n        self,\n        save_directory: Union[str, os.PathLike],\n        safe_serialization: bool = False,\n        variant: Optional[str] = None,\n    ):\n        if isinstance(self.controlnet, ControlNetModel):\n            super().save_pretrained(save_directory, safe_serialization, variant)\n        else:\n            raise NotImplementedError(\"Currently, the `save_pretrained()` is not implemented for Multi-ControlNet.\")\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image, List[torch.FloatTensor], List[PIL.Image.Image]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        controlnet_conditioning_scale: Union[float, List[float]] = 1.0,\n        guess_mode: bool = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor`, `PIL.Image.Image`, `List[torch.FloatTensor]`, `List[PIL.Image.Image]`,\n                    `List[List[torch.FloatTensor]]`, or `List[List[PIL.Image.Image]]`):\n                The ControlNet input condition. ControlNet uses this input condition to generate guidance to Unet. If\n                the type is specified as `Torch.FloatTensor`, it is passed to ControlNet as is. `PIL.Image.Image` can\n                also be accepted as an image. The dimensions of the output image defaults to `image`'s dimensions. If\n                height and/or width are passed, `image` is resized according to them. If multiple ControlNets are\n                specified in init, images must be passed as a list such that each element of the list can be correctly\n                batched for input to a single controlnet.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0):\n                The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added\n                to the residual in the original unet. If multiple ControlNets are specified in init, you can set the\n                corresponding scale as a list.\n            guess_mode (`bool`, *optional*, defaults to `False`):\n                In this mode, the ControlNet encoder will try best to recognize the content of the input image even if\n                you remove all prompts. The `guidance_scale` between 3.0 and 5.0 is recommended.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height, width = self._default_height_width(height, width, image)\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt,\n            image,\n            height,\n            width,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n            controlnet_conditioning_scale,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        controlnet = self.controlnet._orig_mod if is_compiled_module(self.controlnet) else self.controlnet\n\n        if isinstance(controlnet, MultiControlNetModel) and isinstance(controlnet_conditioning_scale, float):\n            controlnet_conditioning_scale = [controlnet_conditioning_scale] * len(controlnet.nets)\n\n        global_pool_conditions = (\n            controlnet.config.global_pool_conditions\n            if isinstance(controlnet, ControlNetModel)\n            else controlnet.nets[0].config.global_pool_conditions\n        )\n        guess_mode = guess_mode or global_pool_conditions\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare image\n        if isinstance(controlnet, ControlNetModel):\n            image = self.prepare_image(\n                image=image,\n                width=width,\n                height=height,\n                batch_size=batch_size * num_images_per_prompt,\n                num_images_per_prompt=num_images_per_prompt,\n                device=device,\n                dtype=controlnet.dtype,\n                do_classifier_free_guidance=do_classifier_free_guidance,\n                guess_mode=guess_mode,\n            )\n        elif isinstance(controlnet, MultiControlNetModel):\n            images = []\n\n            for image_ in image:\n                image_ = self.prepare_image(\n                    image=image_,\n                    width=width,\n                    height=height,\n                    batch_size=batch_size * num_images_per_prompt,\n                    num_images_per_prompt=num_images_per_prompt,\n                    device=device,\n                    dtype=controlnet.dtype,\n                    do_classifier_free_guidance=do_classifier_free_guidance,\n                    guess_mode=guess_mode,\n                )\n\n                images.append(image_)\n\n            image = images\n        else:\n            assert False\n\n        # 5. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 6. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # controlnet(s) inference\n                if guess_mode and do_classifier_free_guidance:\n                    # Infer ControlNet only for the conditional batch.\n                    controlnet_latent_model_input = latents\n                    controlnet_prompt_embeds = prompt_embeds.chunk(2)[1]\n                else:\n                    controlnet_latent_model_input = latent_model_input\n                    controlnet_prompt_embeds = prompt_embeds\n\n                down_block_res_samples, mid_block_res_sample = self.controlnet(\n                    controlnet_latent_model_input,\n                    t,\n                    encoder_hidden_states=controlnet_prompt_embeds,\n                    controlnet_cond=image,\n                    conditioning_scale=controlnet_conditioning_scale,\n                    guess_mode=guess_mode,\n                    return_dict=False,\n                )\n\n                if guess_mode and do_classifier_free_guidance:\n                    # Infered ControlNet only for the conditional batch.\n                    # To apply the output of ControlNet to both the unconditional and conditional batches,\n                    # add 0 to the unconditional batch to keep it unchanged.\n                    down_block_res_samples = [torch.cat([torch.zeros_like(d), d]) for d in down_block_res_samples]\n                    mid_block_res_sample = torch.cat([torch.zeros_like(mid_block_res_sample), mid_block_res_sample])\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    down_block_additional_residuals=down_block_res_samples,\n                    mid_block_additional_residual=mid_block_res_sample,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # If we do sequential model offloading, let's offload unet and controlnet\n        # manually for max memory savings\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.unet.to(\"cpu\")\n            self.controlnet.to(\"cpu\")\n            torch.cuda.empty_cache()\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/controlnet/pipeline_controlnet_img2img.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport inspect\nimport os\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union\n\nimport numpy as np\nimport PIL.Image\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, ControlNetModel, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    deprecate,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_compiled_module,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom ..stable_diffusion import StableDiffusionPipelineOutput\nfrom ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker\nfrom .multicontrolnet import MultiControlNetModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> # !pip install opencv-python transformers accelerate\n        >>> from diffusers import StableDiffusionControlNetImg2ImgPipeline, ControlNetModel, UniPCMultistepScheduler\n        >>> from diffusers.utils import load_image\n        >>> import numpy as np\n        >>> import torch\n\n        >>> import cv2\n        >>> from PIL import Image\n\n        >>> # download an image\n        >>> image = load_image(\n        ...     \"https://hf.co/datasets/huggingface/documentation-images/resolve/main/diffusers/input_image_vermeer.png\"\n        ... )\n        >>> np_image = np.array(image)\n\n        >>> # get canny image\n        >>> np_image = cv2.Canny(np_image, 100, 200)\n        >>> np_image = np_image[:, :, None]\n        >>> np_image = np.concatenate([np_image, np_image, np_image], axis=2)\n        >>> canny_image = Image.fromarray(np_image)\n\n        >>> # load control net and stable diffusion v1-5\n        >>> controlnet = ControlNetModel.from_pretrained(\"lllyasviel/sd-controlnet-canny\", torch_dtype=torch.float16)\n        >>> pipe = StableDiffusionControlNetImg2ImgPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\", controlnet=controlnet, torch_dtype=torch.float16\n        ... )\n\n        >>> # speed up diffusion process with faster scheduler and memory optimization\n        >>> pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> # generate image\n        >>> generator = torch.manual_seed(0)\n        >>> image = pipe(\n        ...     \"futuristic-looking woman\",\n        ...     num_inference_steps=20,\n        ...     generator=generator,\n        ...     image=image,\n        ...     control_image=canny_image,\n        ... ).images[0]\n        ```\n\"\"\"\n\n\ndef prepare_image(image):\n    if isinstance(image, torch.Tensor):\n        # Batch single image\n        if image.ndim == 3:\n            image = image.unsqueeze(0)\n\n        image = image.to(dtype=torch.float32)\n    else:\n        # preprocess image\n        if isinstance(image, (PIL.Image.Image, np.ndarray)):\n            image = [image]\n\n        if isinstance(image, list) and isinstance(image[0], PIL.Image.Image):\n            image = [np.array(i.convert(\"RGB\"))[None, :] for i in image]\n            image = np.concatenate(image, axis=0)\n        elif isinstance(image, list) and isinstance(image[0], np.ndarray):\n            image = np.concatenate([i[None, :] for i in image], axis=0)\n\n        image = image.transpose(0, 3, 1, 2)\n        image = torch.from_numpy(image).to(dtype=torch.float32) / 127.5 - 1.0\n\n    return image\n\n\nclass StableDiffusionControlNetImg2ImgPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion with ControlNet guidance.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        controlnet ([`ControlNetModel`] or `List[ControlNetModel]`):\n            Provides additional conditioning to the unet during the denoising process. If you set multiple ControlNets\n            as a list, the outputs from each ControlNet are added together to create one combined additional\n            conditioning.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        controlnet: Union[ControlNetModel, List[ControlNetModel], Tuple[ControlNetModel], MultiControlNetModel],\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        if isinstance(controlnet, (list, tuple)):\n            controlnet = MultiControlNetModel(controlnet)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            controlnet=controlnet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling\n    def enable_vae_tiling(self):\n        r\"\"\"\n        Enable tiled VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in\n        several steps. This is useful to save a large amount of memory and to allow the processing of larger images.\n        \"\"\"\n        self.vae.enable_tiling()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling\n    def disable_vae_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_tiling()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae, controlnet, and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae, self.controlnet]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            # the safety checker can offload the vae again\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # control net hook has be manually offloaded as it alternates with unet\n        cpu_offload_with_hook(self.controlnet, device)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n        controlnet_conditioning_scale=1.0,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        # `prompt` needs more sophisticated handling when there are multiple\n        # conditionings.\n        if isinstance(self.controlnet, MultiControlNetModel):\n            if isinstance(prompt, list):\n                logger.warning(\n                    f\"You have {len(self.controlnet.nets)} ControlNets and you have passed {len(prompt)}\"\n                    \" prompts. The conditionings will be fixed across the prompts.\"\n                )\n\n        # Check `image`\n        is_compiled = hasattr(F, \"scaled_dot_product_attention\") and isinstance(\n            self.controlnet, torch._dynamo.eval_frame.OptimizedModule\n        )\n        if (\n            isinstance(self.controlnet, ControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, ControlNetModel)\n        ):\n            self.check_image(image, prompt, prompt_embeds)\n        elif (\n            isinstance(self.controlnet, MultiControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, MultiControlNetModel)\n        ):\n            if not isinstance(image, list):\n                raise TypeError(\"For multiple controlnets: `image` must be type `list`\")\n\n            # When `image` is a nested list:\n            # (e.g. [[canny_image_1, pose_image_1], [canny_image_2, pose_image_2]])\n            elif any(isinstance(i, list) for i in image):\n                raise ValueError(\"A single batch of multiple conditionings are supported at the moment.\")\n            elif len(image) != len(self.controlnet.nets):\n                raise ValueError(\n                    \"For multiple controlnets: `image` must have the same length as the number of controlnets.\"\n                )\n\n            for image_ in image:\n                self.check_image(image_, prompt, prompt_embeds)\n        else:\n            assert False\n\n        # Check `controlnet_conditioning_scale`\n        if (\n            isinstance(self.controlnet, ControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, ControlNetModel)\n        ):\n            if not isinstance(controlnet_conditioning_scale, float):\n                raise TypeError(\"For single controlnet: `controlnet_conditioning_scale` must be type `float`.\")\n        elif (\n            isinstance(self.controlnet, MultiControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, MultiControlNetModel)\n        ):\n            if isinstance(controlnet_conditioning_scale, list):\n                if any(isinstance(i, list) for i in controlnet_conditioning_scale):\n                    raise ValueError(\"A single batch of multiple conditionings are supported at the moment.\")\n            elif isinstance(controlnet_conditioning_scale, list) and len(controlnet_conditioning_scale) != len(\n                self.controlnet.nets\n            ):\n                raise ValueError(\n                    \"For multiple controlnets: When `controlnet_conditioning_scale` is specified as `list`, it must have\"\n                    \" the same length as the number of controlnets\"\n                )\n        else:\n            assert False\n\n    def check_image(self, image, prompt, prompt_embeds):\n        image_is_pil = isinstance(image, PIL.Image.Image)\n        image_is_tensor = isinstance(image, torch.Tensor)\n        image_is_pil_list = isinstance(image, list) and isinstance(image[0], PIL.Image.Image)\n        image_is_tensor_list = isinstance(image, list) and isinstance(image[0], torch.Tensor)\n\n        if not image_is_pil and not image_is_tensor and not image_is_pil_list and not image_is_tensor_list:\n            raise TypeError(\n                \"image must be passed and be one of PIL image, torch tensor, list of PIL images, or list of torch tensors\"\n            )\n\n        if image_is_pil:\n            image_batch_size = 1\n        elif image_is_tensor:\n            image_batch_size = image.shape[0]\n        elif image_is_pil_list:\n            image_batch_size = len(image)\n        elif image_is_tensor_list:\n            image_batch_size = len(image)\n\n        if prompt is not None and isinstance(prompt, str):\n            prompt_batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            prompt_batch_size = len(prompt)\n        elif prompt_embeds is not None:\n            prompt_batch_size = prompt_embeds.shape[0]\n\n        if image_batch_size != 1 and image_batch_size != prompt_batch_size:\n            raise ValueError(\n                f\"If image batch size is not 1, image batch size must be same as prompt batch size. image batch size: {image_batch_size}, prompt batch size: {prompt_batch_size}\"\n            )\n\n    # Copied from diffusers.pipelines.controlnet.pipeline_controlnet.StableDiffusionControlNetPipeline.prepare_image\n    def prepare_control_image(\n        self,\n        image,\n        width,\n        height,\n        batch_size,\n        num_images_per_prompt,\n        device,\n        dtype,\n        do_classifier_free_guidance=False,\n        guess_mode=False,\n    ):\n        if not isinstance(image, torch.Tensor):\n            if isinstance(image, PIL.Image.Image):\n                image = [image]\n\n            if isinstance(image[0], PIL.Image.Image):\n                images = []\n\n                for image_ in image:\n                    image_ = image_.convert(\"RGB\")\n                    image_ = image_.resize((width, height), resample=PIL_INTERPOLATION[\"lanczos\"])\n                    image_ = np.array(image_)\n                    image_ = image_[None, :]\n                    images.append(image_)\n\n                image = images\n\n                image = np.concatenate(image, axis=0)\n                image = np.array(image).astype(np.float32) / 255.0\n                image = image.transpose(0, 3, 1, 2)\n                image = torch.from_numpy(image)\n            elif isinstance(image[0], torch.Tensor):\n                image = torch.cat(image, dim=0)\n\n        image_batch_size = image.shape[0]\n\n        if image_batch_size == 1:\n            repeat_by = batch_size\n        else:\n            # image batch size is the same as prompt batch size\n            repeat_by = num_images_per_prompt\n\n        image = image.repeat_interleave(repeat_by, dim=0)\n\n        image = image.to(device=device, dtype=dtype)\n\n        if do_classifier_free_guidance and not guess_mode:\n            image = torch.cat([image] * 2)\n\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.prepare_latents\n    def prepare_latents(self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None):\n        if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}\"\n            )\n\n        image = image.to(device=device, dtype=dtype)\n\n        batch_size = batch_size * num_images_per_prompt\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            init_latents = [\n                self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size)\n            ]\n            init_latents = torch.cat(init_latents, dim=0)\n        else:\n            init_latents = self.vae.encode(image).latent_dist.sample(generator)\n\n        init_latents = self.vae.config.scaling_factor * init_latents\n\n        if batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] == 0:\n            # expand init_latents for batch_size\n            deprecation_message = (\n                f\"You have passed {batch_size} text prompts (`prompt`), but only {init_latents.shape[0]} initial\"\n                \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                \" your script to pass as many initial images as text prompts to suppress this warning.\"\n            )\n            deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n            additional_image_per_prompt = batch_size // init_latents.shape[0]\n            init_latents = torch.cat([init_latents] * additional_image_per_prompt, dim=0)\n        elif batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] != 0:\n            raise ValueError(\n                f\"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {batch_size} text prompts.\"\n            )\n        else:\n            init_latents = torch.cat([init_latents], dim=0)\n\n        shape = init_latents.shape\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        # get latents\n        init_latents = self.scheduler.add_noise(init_latents, noise, timestep)\n        latents = init_latents\n\n        return latents\n\n    def _default_height_width(self, height, width, image):\n        # NOTE: It is possible that a list of images have different\n        # dimensions for each image, so just checking the first image\n        # is not _exactly_ correct, but it is simple.\n        while isinstance(image, list):\n            image = image[0]\n\n        if height is None:\n            if isinstance(image, PIL.Image.Image):\n                height = image.height\n            elif isinstance(image, torch.Tensor):\n                height = image.shape[2]\n\n            height = (height // 8) * 8  # round down to nearest multiple of 8\n\n        if width is None:\n            if isinstance(image, PIL.Image.Image):\n                width = image.width\n            elif isinstance(image, torch.Tensor):\n                width = image.shape[3]\n\n            width = (width // 8) * 8  # round down to nearest multiple of 8\n\n        return height, width\n\n    # override DiffusionPipeline\n    def save_pretrained(\n        self,\n        save_directory: Union[str, os.PathLike],\n        safe_serialization: bool = False,\n        variant: Optional[str] = None,\n    ):\n        if isinstance(self.controlnet, ControlNetModel):\n            super().save_pretrained(save_directory, safe_serialization, variant)\n        else:\n            raise NotImplementedError(\"Currently, the `save_pretrained()` is not implemented for Multi-ControlNet.\")\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image, List[torch.FloatTensor], List[PIL.Image.Image]] = None,\n        control_image: Union[\n            torch.FloatTensor, PIL.Image.Image, List[torch.FloatTensor], List[PIL.Image.Image]\n        ] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        strength: float = 0.8,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        controlnet_conditioning_scale: Union[float, List[float]] = 0.8,\n        guess_mode: bool = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor`, `PIL.Image.Image`, `List[torch.FloatTensor]`, `List[PIL.Image.Image]`,\n                    `List[List[torch.FloatTensor]]`, or `List[List[PIL.Image.Image]]`):\n                The ControlNet input condition. ControlNet uses this input condition to generate guidance to Unet. If\n                the type is specified as `Torch.FloatTensor`, it is passed to ControlNet as is. `PIL.Image.Image` can\n                also be accepted as an image. The dimensions of the output image defaults to `image`'s dimensions. If\n                height and/or width are passed, `image` is resized according to them. If multiple ControlNets are\n                specified in init, images must be passed as a list such that each element of the list can be correctly\n                batched for input to a single controlnet.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 1.0):\n                The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added\n                to the residual in the original unet. If multiple ControlNets are specified in init, you can set the\n                corresponding scale as a list. Note that by default, we use a smaller conditioning scale for inpainting\n                than for [`~StableDiffusionControlNetPipeline.__call__`].\n            guess_mode (`bool`, *optional*, defaults to `False`):\n                In this mode, the ControlNet encoder will try best to recognize the content of the input image even if\n                you remove all prompts. The `guidance_scale` between 3.0 and 5.0 is recommended.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height, width = self._default_height_width(height, width, image)\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt,\n            control_image,\n            height,\n            width,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n            controlnet_conditioning_scale,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        controlnet = self.controlnet._orig_mod if is_compiled_module(self.controlnet) else self.controlnet\n\n        if isinstance(controlnet, MultiControlNetModel) and isinstance(controlnet_conditioning_scale, float):\n            controlnet_conditioning_scale = [controlnet_conditioning_scale] * len(controlnet.nets)\n\n        global_pool_conditions = (\n            controlnet.config.global_pool_conditions\n            if isinstance(controlnet, ControlNetModel)\n            else controlnet.nets[0].config.global_pool_conditions\n        )\n        guess_mode = guess_mode or global_pool_conditions\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n        # 4. Prepare image, and controlnet_conditioning_image\n        image = prepare_image(image)\n\n        # 5. Prepare image\n        if isinstance(controlnet, ControlNetModel):\n            control_image = self.prepare_control_image(\n                image=control_image,\n                width=width,\n                height=height,\n                batch_size=batch_size * num_images_per_prompt,\n                num_images_per_prompt=num_images_per_prompt,\n                device=device,\n                dtype=controlnet.dtype,\n                do_classifier_free_guidance=do_classifier_free_guidance,\n                guess_mode=guess_mode,\n            )\n        elif isinstance(controlnet, MultiControlNetModel):\n            control_images = []\n\n            for control_image_ in control_image:\n                control_image_ = self.prepare_control_image(\n                    image=control_image_,\n                    width=width,\n                    height=height,\n                    batch_size=batch_size * num_images_per_prompt,\n                    num_images_per_prompt=num_images_per_prompt,\n                    device=device,\n                    dtype=controlnet.dtype,\n                    do_classifier_free_guidance=do_classifier_free_guidance,\n                    guess_mode=guess_mode,\n                )\n\n                control_images.append(control_image_)\n\n            control_image = control_images\n        else:\n            assert False\n\n        # 5. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device)\n        latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt)\n\n        # 6. Prepare latent variables\n        latents = self.prepare_latents(\n            image,\n            latent_timestep,\n            batch_size,\n            num_images_per_prompt,\n            prompt_embeds.dtype,\n            device,\n            generator,\n        )\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # controlnet(s) inference\n                if guess_mode and do_classifier_free_guidance:\n                    # Infer ControlNet only for the conditional batch.\n                    controlnet_latent_model_input = latents\n                    controlnet_prompt_embeds = prompt_embeds.chunk(2)[1]\n                else:\n                    controlnet_latent_model_input = latent_model_input\n                    controlnet_prompt_embeds = prompt_embeds\n\n                down_block_res_samples, mid_block_res_sample = self.controlnet(\n                    controlnet_latent_model_input,\n                    t,\n                    encoder_hidden_states=controlnet_prompt_embeds,\n                    controlnet_cond=control_image,\n                    conditioning_scale=controlnet_conditioning_scale,\n                    guess_mode=guess_mode,\n                    return_dict=False,\n                )\n\n                if guess_mode and do_classifier_free_guidance:\n                    # Infered ControlNet only for the conditional batch.\n                    # To apply the output of ControlNet to both the unconditional and conditional batches,\n                    # add 0 to the unconditional batch to keep it unchanged.\n                    down_block_res_samples = [torch.cat([torch.zeros_like(d), d]) for d in down_block_res_samples]\n                    mid_block_res_sample = torch.cat([torch.zeros_like(mid_block_res_sample), mid_block_res_sample])\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    down_block_additional_residuals=down_block_res_samples,\n                    mid_block_additional_residual=mid_block_res_sample,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # If we do sequential model offloading, let's offload unet and controlnet\n        # manually for max memory savings\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.unet.to(\"cpu\")\n            self.controlnet.to(\"cpu\")\n            torch.cuda.empty_cache()\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/controlnet/pipeline_controlnet_inpaint.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# This model implementation is heavily inspired by https://github.com/haofanwang/ControlNet-for-Diffusers/\n\nimport inspect\nimport os\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union\n\nimport numpy as np\nimport PIL.Image\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, ControlNetModel, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_compiled_module,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom ..stable_diffusion import StableDiffusionPipelineOutput\nfrom ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker\nfrom .multicontrolnet import MultiControlNetModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> # !pip install opencv-python transformers accelerate\n        >>> from diffusers import StableDiffusionControlNetInpaintPipeline, ControlNetModel, UniPCMultistepScheduler\n        >>> from diffusers.utils import load_image\n        >>> import numpy as np\n        >>> import torch\n\n        >>> import cv2\n        >>> from PIL import Image\n\n        >>> img_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png\"\n        >>> mask_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png\"\n\n        >>> init_image = load_image(img_url).resize((512, 512))\n        >>> mask_image = load_image(mask_url).resize((512, 512))\n\n        >>> image = np.array(init_image)\n\n        >>> # get canny image\n        >>> image = cv2.Canny(image, 100, 200)\n        >>> image = image[:, :, None]\n        >>> image = np.concatenate([image, image, image], axis=2)\n        >>> canny_image = Image.fromarray(image)\n\n        >>> # load control net and stable diffusion inpainting\n        >>> controlnet = ControlNetModel.from_pretrained(\"lllyasviel/sd-controlnet-canny\", torch_dtype=torch.float16)\n        >>> pipe = StableDiffusionControlNetInpaintPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-inpainting\", controlnet=controlnet, torch_dtype=torch.float16\n        ... )\n\n        >>> # speed up diffusion process with faster scheduler and memory optimization\n        >>> pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)\n\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> # generate image\n        >>> generator = torch.manual_seed(0)\n        >>> image = pipe(\n        ...     \"spiderman\",\n        ...     num_inference_steps=30,\n        ...     generator=generator,\n        ...     image=init_image,\n        ...     mask_image=mask_image,\n        ...     control_image=canny_image,\n        ... ).images[0]\n        ```\n\"\"\"\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_inpaint.prepare_mask_and_masked_image\ndef prepare_mask_and_masked_image(image, mask, height, width, return_image=False):\n    \"\"\"\n    Prepares a pair (image, mask) to be consumed by the Stable Diffusion pipeline. This means that those inputs will be\n    converted to ``torch.Tensor`` with shapes ``batch x channels x height x width`` where ``channels`` is ``3`` for the\n    ``image`` and ``1`` for the ``mask``.\n\n    The ``image`` will be converted to ``torch.float32`` and normalized to be in ``[-1, 1]``. The ``mask`` will be\n    binarized (``mask > 0.5``) and cast to ``torch.float32`` too.\n\n    Args:\n        image (Union[np.array, PIL.Image, torch.Tensor]): The image to inpaint.\n            It can be a ``PIL.Image``, or a ``height x width x 3`` ``np.array`` or a ``channels x height x width``\n            ``torch.Tensor`` or a ``batch x channels x height x width`` ``torch.Tensor``.\n        mask (_type_): The mask to apply to the image, i.e. regions to inpaint.\n            It can be a ``PIL.Image``, or a ``height x width`` ``np.array`` or a ``1 x height x width``\n            ``torch.Tensor`` or a ``batch x 1 x height x width`` ``torch.Tensor``.\n\n\n    Raises:\n        ValueError: ``torch.Tensor`` images should be in the ``[-1, 1]`` range. ValueError: ``torch.Tensor`` mask\n        should be in the ``[0, 1]`` range. ValueError: ``mask`` and ``image`` should have the same spatial dimensions.\n        TypeError: ``mask`` is a ``torch.Tensor`` but ``image`` is not\n            (ot the other way around).\n\n    Returns:\n        tuple[torch.Tensor]: The pair (mask, masked_image) as ``torch.Tensor`` with 4\n            dimensions: ``batch x channels x height x width``.\n    \"\"\"\n\n    if image is None:\n        raise ValueError(\"`image` input cannot be undefined.\")\n\n    if mask is None:\n        raise ValueError(\"`mask_image` input cannot be undefined.\")\n\n    if isinstance(image, torch.Tensor):\n        if not isinstance(mask, torch.Tensor):\n            raise TypeError(f\"`image` is a torch.Tensor but `mask` (type: {type(mask)} is not\")\n\n        # Batch single image\n        if image.ndim == 3:\n            assert image.shape[0] == 3, \"Image outside a batch should be of shape (3, H, W)\"\n            image = image.unsqueeze(0)\n\n        # Batch and add channel dim for single mask\n        if mask.ndim == 2:\n            mask = mask.unsqueeze(0).unsqueeze(0)\n\n        # Batch single mask or add channel dim\n        if mask.ndim == 3:\n            # Single batched mask, no channel dim or single mask not batched but channel dim\n            if mask.shape[0] == 1:\n                mask = mask.unsqueeze(0)\n\n            # Batched masks no channel dim\n            else:\n                mask = mask.unsqueeze(1)\n\n        assert image.ndim == 4 and mask.ndim == 4, \"Image and Mask must have 4 dimensions\"\n        assert image.shape[-2:] == mask.shape[-2:], \"Image and Mask must have the same spatial dimensions\"\n        assert image.shape[0] == mask.shape[0], \"Image and Mask must have the same batch size\"\n\n        # Check image is in [-1, 1]\n        if image.min() < -1 or image.max() > 1:\n            raise ValueError(\"Image should be in [-1, 1] range\")\n\n        # Check mask is in [0, 1]\n        if mask.min() < 0 or mask.max() > 1:\n            raise ValueError(\"Mask should be in [0, 1] range\")\n\n        # Binarize mask\n        mask[mask < 0.5] = 0\n        mask[mask >= 0.5] = 1\n\n        # Image as float32\n        image = image.to(dtype=torch.float32)\n    elif isinstance(mask, torch.Tensor):\n        raise TypeError(f\"`mask` is a torch.Tensor but `image` (type: {type(image)} is not\")\n    else:\n        # preprocess image\n        if isinstance(image, (PIL.Image.Image, np.ndarray)):\n            image = [image]\n        if isinstance(image, list) and isinstance(image[0], PIL.Image.Image):\n            # resize all images w.r.t passed height an width\n            image = [i.resize((width, height), resample=PIL.Image.LANCZOS) for i in image]\n            image = [np.array(i.convert(\"RGB\"))[None, :] for i in image]\n            image = np.concatenate(image, axis=0)\n        elif isinstance(image, list) and isinstance(image[0], np.ndarray):\n            image = np.concatenate([i[None, :] for i in image], axis=0)\n\n        image = image.transpose(0, 3, 1, 2)\n        image = torch.from_numpy(image).to(dtype=torch.float32) / 127.5 - 1.0\n\n        # preprocess mask\n        if isinstance(mask, (PIL.Image.Image, np.ndarray)):\n            mask = [mask]\n\n        if isinstance(mask, list) and isinstance(mask[0], PIL.Image.Image):\n            mask = [i.resize((width, height), resample=PIL.Image.LANCZOS) for i in mask]\n            mask = np.concatenate([np.array(m.convert(\"L\"))[None, None, :] for m in mask], axis=0)\n            mask = mask.astype(np.float32) / 255.0\n        elif isinstance(mask, list) and isinstance(mask[0], np.ndarray):\n            mask = np.concatenate([m[None, None, :] for m in mask], axis=0)\n\n        mask[mask < 0.5] = 0\n        mask[mask >= 0.5] = 1\n        mask = torch.from_numpy(mask)\n\n    masked_image = image * (mask < 0.5)\n\n    # n.b. ensure backwards compatibility as old function does not return image\n    if return_image:\n        return mask, masked_image, image\n\n    return mask, masked_image\n\n\nclass StableDiffusionControlNetInpaintPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion with ControlNet guidance.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        controlnet ([`ControlNetModel`] or `List[ControlNetModel]`):\n            Provides additional conditioning to the unet during the denoising process. If you set multiple ControlNets\n            as a list, the outputs from each ControlNet are added together to create one combined additional\n            conditioning.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        controlnet: Union[ControlNetModel, List[ControlNetModel], Tuple[ControlNetModel], MultiControlNetModel],\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        if isinstance(controlnet, (list, tuple)):\n            controlnet = MultiControlNetModel(controlnet)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            controlnet=controlnet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling\n    def enable_vae_tiling(self):\n        r\"\"\"\n        Enable tiled VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in\n        several steps. This is useful to save a large amount of memory and to allow the processing of larger images.\n        \"\"\"\n        self.vae.enable_tiling()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling\n    def disable_vae_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_tiling()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae, controlnet, and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae, self.controlnet]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            # the safety checker can offload the vae again\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # control net hook has be manually offloaded as it alternates with unet\n        cpu_offload_with_hook(self.controlnet, device)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n        controlnet_conditioning_scale=1.0,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        # `prompt` needs more sophisticated handling when there are multiple\n        # conditionings.\n        if isinstance(self.controlnet, MultiControlNetModel):\n            if isinstance(prompt, list):\n                logger.warning(\n                    f\"You have {len(self.controlnet.nets)} ControlNets and you have passed {len(prompt)}\"\n                    \" prompts. The conditionings will be fixed across the prompts.\"\n                )\n\n        # Check `image`\n        is_compiled = hasattr(F, \"scaled_dot_product_attention\") and isinstance(\n            self.controlnet, torch._dynamo.eval_frame.OptimizedModule\n        )\n        if (\n            isinstance(self.controlnet, ControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, ControlNetModel)\n        ):\n            self.check_image(image, prompt, prompt_embeds)\n        elif (\n            isinstance(self.controlnet, MultiControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, MultiControlNetModel)\n        ):\n            if not isinstance(image, list):\n                raise TypeError(\"For multiple controlnets: `image` must be type `list`\")\n\n            # When `image` is a nested list:\n            # (e.g. [[canny_image_1, pose_image_1], [canny_image_2, pose_image_2]])\n            elif any(isinstance(i, list) for i in image):\n                raise ValueError(\"A single batch of multiple conditionings are supported at the moment.\")\n            elif len(image) != len(self.controlnet.nets):\n                raise ValueError(\n                    \"For multiple controlnets: `image` must have the same length as the number of controlnets.\"\n                )\n\n            for image_ in image:\n                self.check_image(image_, prompt, prompt_embeds)\n        else:\n            assert False\n\n        # Check `controlnet_conditioning_scale`\n        if (\n            isinstance(self.controlnet, ControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, ControlNetModel)\n        ):\n            if not isinstance(controlnet_conditioning_scale, float):\n                raise TypeError(\"For single controlnet: `controlnet_conditioning_scale` must be type `float`.\")\n        elif (\n            isinstance(self.controlnet, MultiControlNetModel)\n            or is_compiled\n            and isinstance(self.controlnet._orig_mod, MultiControlNetModel)\n        ):\n            if isinstance(controlnet_conditioning_scale, list):\n                if any(isinstance(i, list) for i in controlnet_conditioning_scale):\n                    raise ValueError(\"A single batch of multiple conditionings are supported at the moment.\")\n            elif isinstance(controlnet_conditioning_scale, list) and len(controlnet_conditioning_scale) != len(\n                self.controlnet.nets\n            ):\n                raise ValueError(\n                    \"For multiple controlnets: When `controlnet_conditioning_scale` is specified as `list`, it must have\"\n                    \" the same length as the number of controlnets\"\n                )\n        else:\n            assert False\n\n    def check_image(self, image, prompt, prompt_embeds):\n        image_is_pil = isinstance(image, PIL.Image.Image)\n        image_is_tensor = isinstance(image, torch.Tensor)\n        image_is_pil_list = isinstance(image, list) and isinstance(image[0], PIL.Image.Image)\n        image_is_tensor_list = isinstance(image, list) and isinstance(image[0], torch.Tensor)\n\n        if not image_is_pil and not image_is_tensor and not image_is_pil_list and not image_is_tensor_list:\n            raise TypeError(\n                \"image must be passed and be one of PIL image, torch tensor, list of PIL images, or list of torch tensors\"\n            )\n\n        if image_is_pil:\n            image_batch_size = 1\n        elif image_is_tensor:\n            image_batch_size = image.shape[0]\n        elif image_is_pil_list:\n            image_batch_size = len(image)\n        elif image_is_tensor_list:\n            image_batch_size = len(image)\n\n        if prompt is not None and isinstance(prompt, str):\n            prompt_batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            prompt_batch_size = len(prompt)\n        elif prompt_embeds is not None:\n            prompt_batch_size = prompt_embeds.shape[0]\n\n        if image_batch_size != 1 and image_batch_size != prompt_batch_size:\n            raise ValueError(\n                f\"If image batch size is not 1, image batch size must be same as prompt batch size. image batch size: {image_batch_size}, prompt batch size: {prompt_batch_size}\"\n            )\n\n    # Copied from diffusers.pipelines.controlnet.pipeline_controlnet.StableDiffusionControlNetPipeline.prepare_image\n    def prepare_control_image(\n        self,\n        image,\n        width,\n        height,\n        batch_size,\n        num_images_per_prompt,\n        device,\n        dtype,\n        do_classifier_free_guidance=False,\n        guess_mode=False,\n    ):\n        if not isinstance(image, torch.Tensor):\n            if isinstance(image, PIL.Image.Image):\n                image = [image]\n\n            if isinstance(image[0], PIL.Image.Image):\n                images = []\n\n                for image_ in image:\n                    image_ = image_.convert(\"RGB\")\n                    image_ = image_.resize((width, height), resample=PIL_INTERPOLATION[\"lanczos\"])\n                    image_ = np.array(image_)\n                    image_ = image_[None, :]\n                    images.append(image_)\n\n                image = images\n\n                image = np.concatenate(image, axis=0)\n                image = np.array(image).astype(np.float32) / 255.0\n                image = image.transpose(0, 3, 1, 2)\n                image = torch.from_numpy(image)\n            elif isinstance(image[0], torch.Tensor):\n                image = torch.cat(image, dim=0)\n\n        image_batch_size = image.shape[0]\n\n        if image_batch_size == 1:\n            repeat_by = batch_size\n        else:\n            # image batch size is the same as prompt batch size\n            repeat_by = num_images_per_prompt\n\n        image = image.repeat_interleave(repeat_by, dim=0)\n\n        image = image.to(device=device, dtype=dtype)\n\n        if do_classifier_free_guidance and not guess_mode:\n            image = torch.cat([image] * 2)\n\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_inpaint.StableDiffusionInpaintPipeline.prepare_latents\n    def prepare_latents(\n        self,\n        batch_size,\n        num_channels_latents,\n        height,\n        width,\n        dtype,\n        device,\n        generator,\n        latents=None,\n        image=None,\n        timestep=None,\n        is_strength_max=True,\n    ):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if (image is None or timestep is None) and not is_strength_max:\n            raise ValueError(\n                \"Since strength < 1. initial latents are to be initialised as a combination of Image + Noise.\"\n                \"However, either the image or the noise timestep has not been provided.\"\n            )\n\n        if latents is None:\n            noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n            if is_strength_max:\n                # if strength is 100% then simply initialise the latents to noise\n                latents = noise\n            else:\n                # otherwise initialise latents as init image + noise\n                image = image.to(device=device, dtype=dtype)\n                if isinstance(generator, list):\n                    image_latents = [\n                        self.vae.encode(image[i : i + 1]).latent_dist.sample(generator=generator[i])\n                        for i in range(batch_size)\n                    ]\n                else:\n                    image_latents = self.vae.encode(image).latent_dist.sample(generator=generator)\n\n                image_latents = self.vae.config.scaling_factor * image_latents\n\n                latents = self.scheduler.add_noise(image_latents, noise, timestep)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n\n        return latents\n\n    def _default_height_width(self, height, width, image):\n        # NOTE: It is possible that a list of images have different\n        # dimensions for each image, so just checking the first image\n        # is not _exactly_ correct, but it is simple.\n        while isinstance(image, list):\n            image = image[0]\n\n        if height is None:\n            if isinstance(image, PIL.Image.Image):\n                height = image.height\n            elif isinstance(image, torch.Tensor):\n                height = image.shape[2]\n\n            height = (height // 8) * 8  # round down to nearest multiple of 8\n\n        if width is None:\n            if isinstance(image, PIL.Image.Image):\n                width = image.width\n            elif isinstance(image, torch.Tensor):\n                width = image.shape[3]\n\n            width = (width // 8) * 8  # round down to nearest multiple of 8\n\n        return height, width\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_inpaint.StableDiffusionInpaintPipeline.prepare_mask_latents\n    def prepare_mask_latents(\n        self, mask, masked_image, batch_size, height, width, dtype, device, generator, do_classifier_free_guidance\n    ):\n        # resize the mask to latents shape as we concatenate the mask to the latents\n        # we do that before converting to dtype to avoid breaking in case we're using cpu_offload\n        # and half precision\n        mask = torch.nn.functional.interpolate(\n            mask, size=(height // self.vae_scale_factor, width // self.vae_scale_factor)\n        )\n        mask = mask.to(device=device, dtype=dtype)\n\n        masked_image = masked_image.to(device=device, dtype=dtype)\n\n        # encode the mask image into latents space so we can concatenate it to the latents\n        if isinstance(generator, list):\n            masked_image_latents = [\n                self.vae.encode(masked_image[i : i + 1]).latent_dist.sample(generator=generator[i])\n                for i in range(batch_size)\n            ]\n            masked_image_latents = torch.cat(masked_image_latents, dim=0)\n        else:\n            masked_image_latents = self.vae.encode(masked_image).latent_dist.sample(generator=generator)\n        masked_image_latents = self.vae.config.scaling_factor * masked_image_latents\n\n        # duplicate mask and masked_image_latents for each generation per prompt, using mps friendly method\n        if mask.shape[0] < batch_size:\n            if not batch_size % mask.shape[0] == 0:\n                raise ValueError(\n                    \"The passed mask and the required batch size don't match. Masks are supposed to be duplicated to\"\n                    f\" a total batch size of {batch_size}, but {mask.shape[0]} masks were passed. Make sure the number\"\n                    \" of masks that you pass is divisible by the total requested batch size.\"\n                )\n            mask = mask.repeat(batch_size // mask.shape[0], 1, 1, 1)\n        if masked_image_latents.shape[0] < batch_size:\n            if not batch_size % masked_image_latents.shape[0] == 0:\n                raise ValueError(\n                    \"The passed images and the required batch size don't match. Images are supposed to be duplicated\"\n                    f\" to a total batch size of {batch_size}, but {masked_image_latents.shape[0]} images were passed.\"\n                    \" Make sure the number of images that you pass is divisible by the total requested batch size.\"\n                )\n            masked_image_latents = masked_image_latents.repeat(batch_size // masked_image_latents.shape[0], 1, 1, 1)\n\n        mask = torch.cat([mask] * 2) if do_classifier_free_guidance else mask\n        masked_image_latents = (\n            torch.cat([masked_image_latents] * 2) if do_classifier_free_guidance else masked_image_latents\n        )\n\n        # aligning device to prevent device errors when concating it with the latent model input\n        masked_image_latents = masked_image_latents.to(device=device, dtype=dtype)\n        return mask, masked_image_latents\n\n    # override DiffusionPipeline\n    def save_pretrained(\n        self,\n        save_directory: Union[str, os.PathLike],\n        safe_serialization: bool = False,\n        variant: Optional[str] = None,\n    ):\n        if isinstance(self.controlnet, ControlNetModel):\n            super().save_pretrained(save_directory, safe_serialization, variant)\n        else:\n            raise NotImplementedError(\"Currently, the `save_pretrained()` is not implemented for Multi-ControlNet.\")\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.Tensor, PIL.Image.Image] = None,\n        mask_image: Union[torch.Tensor, PIL.Image.Image] = None,\n        control_image: Union[\n            torch.FloatTensor, PIL.Image.Image, List[torch.FloatTensor], List[PIL.Image.Image]\n        ] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        controlnet_conditioning_scale: Union[float, List[float]] = 0.5,\n        guess_mode: bool = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor`, `PIL.Image.Image`, `List[torch.FloatTensor]`, `List[PIL.Image.Image]`,\n                    `List[List[torch.FloatTensor]]`, or `List[List[PIL.Image.Image]]`):\n                The ControlNet input condition. ControlNet uses this input condition to generate guidance to Unet. If\n                the type is specified as `Torch.FloatTensor`, it is passed to ControlNet as is. `PIL.Image.Image` can\n                also be accepted as an image. The dimensions of the output image defaults to `image`'s dimensions. If\n                height and/or width are passed, `image` is resized according to them. If multiple ControlNets are\n                specified in init, images must be passed as a list such that each element of the list can be correctly\n                batched for input to a single controlnet.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            controlnet_conditioning_scale (`float` or `List[float]`, *optional*, defaults to 0.5):\n                The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added\n                to the residual in the original unet. If multiple ControlNets are specified in init, you can set the\n                corresponding scale as a list. Note that by default, we use a smaller conditioning scale for inpainting\n                than for [`~StableDiffusionControlNetPipeline.__call__`].\n            guess_mode (`bool`, *optional*, defaults to `False`):\n                In this mode, the ControlNet encoder will try best to recognize the content of the input image even if\n                you remove all prompts. The `guidance_scale` between 3.0 and 5.0 is recommended.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height, width = self._default_height_width(height, width, image)\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt,\n            control_image,\n            height,\n            width,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n            controlnet_conditioning_scale,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        controlnet = self.controlnet._orig_mod if is_compiled_module(self.controlnet) else self.controlnet\n\n        if isinstance(controlnet, MultiControlNetModel) and isinstance(controlnet_conditioning_scale, float):\n            controlnet_conditioning_scale = [controlnet_conditioning_scale] * len(controlnet.nets)\n\n        global_pool_conditions = (\n            controlnet.config.global_pool_conditions\n            if isinstance(controlnet, ControlNetModel)\n            else controlnet.nets[0].config.global_pool_conditions\n        )\n        guess_mode = guess_mode or global_pool_conditions\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare image\n        if isinstance(controlnet, ControlNetModel):\n            control_image = self.prepare_control_image(\n                image=control_image,\n                width=width,\n                height=height,\n                batch_size=batch_size * num_images_per_prompt,\n                num_images_per_prompt=num_images_per_prompt,\n                device=device,\n                dtype=controlnet.dtype,\n                do_classifier_free_guidance=do_classifier_free_guidance,\n                guess_mode=guess_mode,\n            )\n        elif isinstance(controlnet, MultiControlNetModel):\n            control_images = []\n\n            for control_image_ in control_image:\n                control_image_ = self.prepare_control_image(\n                    image=control_image_,\n                    width=width,\n                    height=height,\n                    batch_size=batch_size * num_images_per_prompt,\n                    num_images_per_prompt=num_images_per_prompt,\n                    device=device,\n                    dtype=controlnet.dtype,\n                    do_classifier_free_guidance=do_classifier_free_guidance,\n                    guess_mode=guess_mode,\n                )\n\n                control_images.append(control_image_)\n\n            control_image = control_images\n        else:\n            assert False\n\n        # 4. Preprocess mask and image - resizes image and mask w.r.t height and width\n        # 5. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 6. Prepare latent variables\n        num_channels_latents = self.vae.config.latent_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 7. Prepare mask latent variables\n        mask, masked_image = prepare_mask_and_masked_image(image, mask_image, height, width)\n        mask, masked_image_latents = self.prepare_mask_latents(\n            mask,\n            masked_image,\n            batch_size * num_images_per_prompt,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            do_classifier_free_guidance,\n        )\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                if guess_mode and do_classifier_free_guidance:\n                    # Infer ControlNet only for the conditional batch.\n                    controlnet_latent_model_input = latents\n                    controlnet_prompt_embeds = prompt_embeds.chunk(2)[1]\n                else:\n                    controlnet_latent_model_input = latent_model_input\n                    controlnet_prompt_embeds = prompt_embeds\n\n                down_block_res_samples, mid_block_res_sample = self.controlnet(\n                    controlnet_latent_model_input,\n                    t,\n                    encoder_hidden_states=controlnet_prompt_embeds,\n                    controlnet_cond=control_image,\n                    conditioning_scale=controlnet_conditioning_scale,\n                    guess_mode=guess_mode,\n                    return_dict=False,\n                )\n\n                if guess_mode and do_classifier_free_guidance:\n                    # Infered ControlNet only for the conditional batch.\n                    # To apply the output of ControlNet to both the unconditional and conditional batches,\n                    # add 0 to the unconditional batch to keep it unchanged.\n                    down_block_res_samples = [torch.cat([torch.zeros_like(d), d]) for d in down_block_res_samples]\n                    mid_block_res_sample = torch.cat([torch.zeros_like(mid_block_res_sample), mid_block_res_sample])\n\n                # predict the noise residual\n                latent_model_input = torch.cat([latent_model_input, mask, masked_image_latents], dim=1)\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    down_block_additional_residuals=down_block_res_samples,\n                    mid_block_additional_residual=mid_block_res_sample,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # If we do sequential model offloading, let's offload unet and controlnet\n        # manually for max memory savings\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.unet.to(\"cpu\")\n            self.controlnet.to(\"cpu\")\n            torch.cuda.empty_cache()\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/controlnet/pipeline_flax_controlnet.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom functools import partial\nfrom typing import Dict, List, Optional, Union\n\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom flax.core.frozen_dict import FrozenDict\nfrom flax.jax_utils import unreplicate\nfrom flax.training.common_utils import shard\nfrom PIL import Image\nfrom transformers import CLIPFeatureExtractor, CLIPTokenizer, FlaxCLIPTextModel\n\nfrom ...models import FlaxAutoencoderKL, FlaxControlNetModel, FlaxUNet2DConditionModel\nfrom ...schedulers import (\n    FlaxDDIMScheduler,\n    FlaxDPMSolverMultistepScheduler,\n    FlaxLMSDiscreteScheduler,\n    FlaxPNDMScheduler,\n)\nfrom ...utils import PIL_INTERPOLATION, logging, replace_example_docstring\nfrom ..pipeline_flax_utils import FlaxDiffusionPipeline\nfrom ..stable_diffusion import FlaxStableDiffusionPipelineOutput\nfrom ..stable_diffusion.safety_checker_flax import FlaxStableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n# Set to True to use python for loop instead of jax.fori_loop for easier debugging\nDEBUG = False\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import jax\n        >>> import numpy as np\n        >>> import jax.numpy as jnp\n        >>> from flax.jax_utils import replicate\n        >>> from flax.training.common_utils import shard\n        >>> from diffusers.utils import load_image\n        >>> from PIL import Image\n        >>> from diffusers import FlaxStableDiffusionControlNetPipeline, FlaxControlNetModel\n\n\n        >>> def image_grid(imgs, rows, cols):\n        ...     w, h = imgs[0].size\n        ...     grid = Image.new(\"RGB\", size=(cols * w, rows * h))\n        ...     for i, img in enumerate(imgs):\n        ...         grid.paste(img, box=(i % cols * w, i // cols * h))\n        ...     return grid\n\n\n        >>> def create_key(seed=0):\n        ...     return jax.random.PRNGKey(seed)\n\n\n        >>> rng = create_key(0)\n\n        >>> # get canny image\n        >>> canny_image = load_image(\n        ...     \"https://huggingface.co/datasets/YiYiXu/test-doc-assets/resolve/main/blog_post_cell_10_output_0.jpeg\"\n        ... )\n\n        >>> prompts = \"best quality, extremely detailed\"\n        >>> negative_prompts = \"monochrome, lowres, bad anatomy, worst quality, low quality\"\n\n        >>> # load control net and stable diffusion v1-5\n        >>> controlnet, controlnet_params = FlaxControlNetModel.from_pretrained(\n        ...     \"lllyasviel/sd-controlnet-canny\", from_pt=True, dtype=jnp.float32\n        ... )\n        >>> pipe, params = FlaxStableDiffusionControlNetPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\", controlnet=controlnet, revision=\"flax\", dtype=jnp.float32\n        ... )\n        >>> params[\"controlnet\"] = controlnet_params\n\n        >>> num_samples = jax.device_count()\n        >>> rng = jax.random.split(rng, jax.device_count())\n\n        >>> prompt_ids = pipe.prepare_text_inputs([prompts] * num_samples)\n        >>> negative_prompt_ids = pipe.prepare_text_inputs([negative_prompts] * num_samples)\n        >>> processed_image = pipe.prepare_image_inputs([canny_image] * num_samples)\n\n        >>> p_params = replicate(params)\n        >>> prompt_ids = shard(prompt_ids)\n        >>> negative_prompt_ids = shard(negative_prompt_ids)\n        >>> processed_image = shard(processed_image)\n\n        >>> output = pipe(\n        ...     prompt_ids=prompt_ids,\n        ...     image=processed_image,\n        ...     params=p_params,\n        ...     prng_seed=rng,\n        ...     num_inference_steps=50,\n        ...     neg_prompt_ids=negative_prompt_ids,\n        ...     jit=True,\n        ... ).images\n\n        >>> output_images = pipe.numpy_to_pil(np.asarray(output.reshape((num_samples,) + output.shape[-3:])))\n        >>> output_images = image_grid(output_images, num_samples // 4, 4)\n        >>> output_images.save(\"generated_image.png\")\n        ```\n\"\"\"\n\n\nclass FlaxStableDiffusionControlNetPipeline(FlaxDiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion with ControlNet Guidance.\n\n    This model inherits from [`FlaxDiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`FlaxAutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`FlaxCLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.FlaxCLIPTextModel),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`FlaxUNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        controlnet ([`FlaxControlNetModel`]:\n            Provides additional conditioning to the unet during the denoising process.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`FlaxDDIMScheduler`], [`FlaxLMSDiscreteScheduler`], [`FlaxPNDMScheduler`], or\n            [`FlaxDPMSolverMultistepScheduler`].\n        safety_checker ([`FlaxStableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPFeatureExtractor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: FlaxAutoencoderKL,\n        text_encoder: FlaxCLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: FlaxUNet2DConditionModel,\n        controlnet: FlaxControlNetModel,\n        scheduler: Union[\n            FlaxDDIMScheduler, FlaxPNDMScheduler, FlaxLMSDiscreteScheduler, FlaxDPMSolverMultistepScheduler\n        ],\n        safety_checker: FlaxStableDiffusionSafetyChecker,\n        feature_extractor: CLIPFeatureExtractor,\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        super().__init__()\n        self.dtype = dtype\n\n        if safety_checker is None:\n            logger.warn(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            controlnet=controlnet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n\n    def prepare_text_inputs(self, prompt: Union[str, List[str]]):\n        if not isinstance(prompt, (str, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        text_input = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_tensors=\"np\",\n        )\n\n        return text_input.input_ids\n\n    def prepare_image_inputs(self, image: Union[Image.Image, List[Image.Image]]):\n        if not isinstance(image, (Image.Image, list)):\n            raise ValueError(f\"image has to be of type `PIL.Image.Image` or list but is {type(image)}\")\n\n        if isinstance(image, Image.Image):\n            image = [image]\n\n        processed_images = jnp.concatenate([preprocess(img, jnp.float32) for img in image])\n\n        return processed_images\n\n    def _get_has_nsfw_concepts(self, features, params):\n        has_nsfw_concepts = self.safety_checker(features, params)\n        return has_nsfw_concepts\n\n    def _run_safety_checker(self, images, safety_model_params, jit=False):\n        # safety_model_params should already be replicated when jit is True\n        pil_images = [Image.fromarray(image) for image in images]\n        features = self.feature_extractor(pil_images, return_tensors=\"np\").pixel_values\n\n        if jit:\n            features = shard(features)\n            has_nsfw_concepts = _p_get_has_nsfw_concepts(self, features, safety_model_params)\n            has_nsfw_concepts = unshard(has_nsfw_concepts)\n            safety_model_params = unreplicate(safety_model_params)\n        else:\n            has_nsfw_concepts = self._get_has_nsfw_concepts(features, safety_model_params)\n\n        images_was_copied = False\n        for idx, has_nsfw_concept in enumerate(has_nsfw_concepts):\n            if has_nsfw_concept:\n                if not images_was_copied:\n                    images_was_copied = True\n                    images = images.copy()\n\n                images[idx] = np.zeros(images[idx].shape, dtype=np.uint8)  # black image\n\n            if any(has_nsfw_concepts):\n                warnings.warn(\n                    \"Potential NSFW content was detected in one or more images. A black image will be returned\"\n                    \" instead. Try again with a different prompt and/or seed.\"\n                )\n\n        return images, has_nsfw_concepts\n\n    def _generate(\n        self,\n        prompt_ids: jnp.array,\n        image: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        num_inference_steps: int,\n        guidance_scale: float,\n        latents: Optional[jnp.array] = None,\n        neg_prompt_ids: Optional[jnp.array] = None,\n        controlnet_conditioning_scale: float = 1.0,\n    ):\n        height, width = image.shape[-2:]\n        if height % 64 != 0 or width % 64 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 64 but are {height} and {width}.\")\n\n        # get prompt text embeddings\n        prompt_embeds = self.text_encoder(prompt_ids, params=params[\"text_encoder\"])[0]\n\n        # TODO: currently it is assumed `do_classifier_free_guidance = guidance_scale > 1.0`\n        # implement this conditional `do_classifier_free_guidance = guidance_scale > 1.0`\n        batch_size = prompt_ids.shape[0]\n\n        max_length = prompt_ids.shape[-1]\n\n        if neg_prompt_ids is None:\n            uncond_input = self.tokenizer(\n                [\"\"] * batch_size, padding=\"max_length\", max_length=max_length, return_tensors=\"np\"\n            ).input_ids\n        else:\n            uncond_input = neg_prompt_ids\n        negative_prompt_embeds = self.text_encoder(uncond_input, params=params[\"text_encoder\"])[0]\n        context = jnp.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        image = jnp.concatenate([image] * 2)\n\n        latents_shape = (\n            batch_size,\n            self.unet.config.in_channels,\n            height // self.vae_scale_factor,\n            width // self.vae_scale_factor,\n        )\n        if latents is None:\n            latents = jax.random.normal(prng_seed, shape=latents_shape, dtype=jnp.float32)\n        else:\n            if latents.shape != latents_shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {latents_shape}\")\n\n        def loop_body(step, args):\n            latents, scheduler_state = args\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            latents_input = jnp.concatenate([latents] * 2)\n\n            t = jnp.array(scheduler_state.timesteps, dtype=jnp.int32)[step]\n            timestep = jnp.broadcast_to(t, latents_input.shape[0])\n\n            latents_input = self.scheduler.scale_model_input(scheduler_state, latents_input, t)\n\n            down_block_res_samples, mid_block_res_sample = self.controlnet.apply(\n                {\"params\": params[\"controlnet\"]},\n                jnp.array(latents_input),\n                jnp.array(timestep, dtype=jnp.int32),\n                encoder_hidden_states=context,\n                controlnet_cond=image,\n                conditioning_scale=controlnet_conditioning_scale,\n                return_dict=False,\n            )\n\n            # predict the noise residual\n            noise_pred = self.unet.apply(\n                {\"params\": params[\"unet\"]},\n                jnp.array(latents_input),\n                jnp.array(timestep, dtype=jnp.int32),\n                encoder_hidden_states=context,\n                down_block_additional_residuals=down_block_res_samples,\n                mid_block_additional_residual=mid_block_res_sample,\n            ).sample\n\n            # perform guidance\n            noise_pred_uncond, noise_prediction_text = jnp.split(noise_pred, 2, axis=0)\n            noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents, scheduler_state = self.scheduler.step(scheduler_state, noise_pred, t, latents).to_tuple()\n            return latents, scheduler_state\n\n        scheduler_state = self.scheduler.set_timesteps(\n            params[\"scheduler\"], num_inference_steps=num_inference_steps, shape=latents_shape\n        )\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * params[\"scheduler\"].init_noise_sigma\n\n        if DEBUG:\n            # run with python for loop\n            for i in range(num_inference_steps):\n                latents, scheduler_state = loop_body(i, (latents, scheduler_state))\n        else:\n            latents, _ = jax.lax.fori_loop(0, num_inference_steps, loop_body, (latents, scheduler_state))\n\n        # scale and decode the image latents with vae\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.apply({\"params\": params[\"vae\"]}, latents, method=self.vae.decode).sample\n\n        image = (image / 2 + 0.5).clip(0, 1).transpose(0, 2, 3, 1)\n        return image\n\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt_ids: jnp.array,\n        image: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        num_inference_steps: int = 50,\n        guidance_scale: Union[float, jnp.array] = 7.5,\n        latents: jnp.array = None,\n        neg_prompt_ids: jnp.array = None,\n        controlnet_conditioning_scale: Union[float, jnp.array] = 1.0,\n        return_dict: bool = True,\n        jit: bool = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt_ids (`jnp.array`):\n                The prompt or prompts to guide the image generation.\n            image (`jnp.array`):\n                Array representing the ControlNet input condition. ControlNet use this input condition to generate\n                guidance to Unet.\n            params (`Dict` or `FrozenDict`): Dictionary containing the model parameters/weights\n            prng_seed (`jax.random.KeyArray` or `jax.Array`): Array containing random number generator key\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            latents (`jnp.array`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            controlnet_conditioning_scale (`float` or `jnp.array`, *optional*, defaults to 1.0):\n                The outputs of the controlnet are multiplied by `controlnet_conditioning_scale` before they are added\n                to the residual in the original unet.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] instead of\n                a plain tuple.\n            jit (`bool`, defaults to `False`):\n                Whether to run `pmap` versions of the generation and safety scoring functions. NOTE: This argument\n                exists because `__call__` is not yet end-to-end pmap-able. It will be removed in a future release.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a\n            `tuple. When returning a tuple, the first element is a list with the generated images, and the second\n            element is a list of `bool`s denoting whether the corresponding generated image likely represents\n            \"not-safe-for-work\" (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        height, width = image.shape[-2:]\n\n        if isinstance(guidance_scale, float):\n            # Convert to a tensor so each device gets a copy. Follow the prompt_ids for\n            # shape information, as they may be sharded (when `jit` is `True`), or not.\n            guidance_scale = jnp.array([guidance_scale] * prompt_ids.shape[0])\n            if len(prompt_ids.shape) > 2:\n                # Assume sharded\n                guidance_scale = guidance_scale[:, None]\n\n        if isinstance(controlnet_conditioning_scale, float):\n            # Convert to a tensor so each device gets a copy. Follow the prompt_ids for\n            # shape information, as they may be sharded (when `jit` is `True`), or not.\n            controlnet_conditioning_scale = jnp.array([controlnet_conditioning_scale] * prompt_ids.shape[0])\n            if len(prompt_ids.shape) > 2:\n                # Assume sharded\n                controlnet_conditioning_scale = controlnet_conditioning_scale[:, None]\n\n        if jit:\n            images = _p_generate(\n                self,\n                prompt_ids,\n                image,\n                params,\n                prng_seed,\n                num_inference_steps,\n                guidance_scale,\n                latents,\n                neg_prompt_ids,\n                controlnet_conditioning_scale,\n            )\n        else:\n            images = self._generate(\n                prompt_ids,\n                image,\n                params,\n                prng_seed,\n                num_inference_steps,\n                guidance_scale,\n                latents,\n                neg_prompt_ids,\n                controlnet_conditioning_scale,\n            )\n\n        if self.safety_checker is not None:\n            safety_params = params[\"safety_checker\"]\n            images_uint8_casted = (images * 255).round().astype(\"uint8\")\n            num_devices, batch_size = images.shape[:2]\n\n            images_uint8_casted = np.asarray(images_uint8_casted).reshape(num_devices * batch_size, height, width, 3)\n            images_uint8_casted, has_nsfw_concept = self._run_safety_checker(images_uint8_casted, safety_params, jit)\n            images = np.asarray(images)\n\n            # block images\n            if any(has_nsfw_concept):\n                for i, is_nsfw in enumerate(has_nsfw_concept):\n                    if is_nsfw:\n                        images[i] = np.asarray(images_uint8_casted[i])\n\n            images = images.reshape(num_devices, batch_size, height, width, 3)\n        else:\n            images = np.asarray(images)\n            has_nsfw_concept = False\n\n        if not return_dict:\n            return (images, has_nsfw_concept)\n\n        return FlaxStableDiffusionPipelineOutput(images=images, nsfw_content_detected=has_nsfw_concept)\n\n\n# Static argnums are pipe, num_inference_steps. A change would trigger recompilation.\n# Non-static args are (sharded) input tensors mapped over their first dimension (hence, `0`).\n@partial(\n    jax.pmap,\n    in_axes=(None, 0, 0, 0, 0, None, 0, 0, 0, 0),\n    static_broadcasted_argnums=(0, 5),\n)\ndef _p_generate(\n    pipe,\n    prompt_ids,\n    image,\n    params,\n    prng_seed,\n    num_inference_steps,\n    guidance_scale,\n    latents,\n    neg_prompt_ids,\n    controlnet_conditioning_scale,\n):\n    return pipe._generate(\n        prompt_ids,\n        image,\n        params,\n        prng_seed,\n        num_inference_steps,\n        guidance_scale,\n        latents,\n        neg_prompt_ids,\n        controlnet_conditioning_scale,\n    )\n\n\n@partial(jax.pmap, static_broadcasted_argnums=(0,))\ndef _p_get_has_nsfw_concepts(pipe, features, params):\n    return pipe._get_has_nsfw_concepts(features, params)\n\n\ndef unshard(x: jnp.ndarray):\n    # einops.rearrange(x, 'd b ... -> (d b) ...')\n    num_devices, batch_size = x.shape[:2]\n    rest = x.shape[2:]\n    return x.reshape(num_devices * batch_size, *rest)\n\n\ndef preprocess(image, dtype):\n    image = image.convert(\"RGB\")\n    w, h = image.size\n    w, h = (x - x % 64 for x in (w, h))  # resize to integer multiple of 64\n    image = image.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"])\n    image = jnp.array(image).astype(dtype) / 255.0\n    image = image[None].transpose(0, 3, 1, 2)\n    return image\n"
  },
  {
    "path": "diffusers/pipelines/dance_diffusion/__init__.py",
    "content": "from .pipeline_dance_diffusion import DanceDiffusionPipeline\n"
  },
  {
    "path": "diffusers/pipelines/dance_diffusion/pipeline_dance_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...utils import logging, randn_tensor\nfrom ..pipeline_utils import AudioPipelineOutput, DiffusionPipeline\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass DanceDiffusionPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        unet ([`UNet1DModel`]): U-Net architecture to denoise the encoded image.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image. Can be one of\n            [`IPNDMScheduler`].\n    \"\"\"\n\n    def __init__(self, unet, scheduler):\n        super().__init__()\n        self.register_modules(unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        num_inference_steps: int = 100,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        audio_length_in_s: Optional[float] = None,\n        return_dict: bool = True,\n    ) -> Union[AudioPipelineOutput, Tuple]:\n        r\"\"\"\n        Args:\n            batch_size (`int`, *optional*, defaults to 1):\n                The number of audio samples to generate.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality audio sample at\n                the expense of slower inference.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            audio_length_in_s (`float`, *optional*, defaults to `self.unet.config.sample_size/self.unet.config.sample_rate`):\n                The length of the generated audio sample in seconds. Note that the output of the pipeline, *i.e.*\n                `sample_size`, will be `audio_length_in_s` * `self.unet.config.sample_rate`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.AudioPipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.AudioPipelineOutput`] or `tuple`: [`~pipelines.utils.AudioPipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n\n        if audio_length_in_s is None:\n            audio_length_in_s = self.unet.config.sample_size / self.unet.config.sample_rate\n\n        sample_size = audio_length_in_s * self.unet.config.sample_rate\n\n        down_scale_factor = 2 ** len(self.unet.up_blocks)\n        if sample_size < 3 * down_scale_factor:\n            raise ValueError(\n                f\"{audio_length_in_s} is too small. Make sure it's bigger or equal to\"\n                f\" {3 * down_scale_factor / self.unet.config.sample_rate}.\"\n            )\n\n        original_sample_size = int(sample_size)\n        if sample_size % down_scale_factor != 0:\n            sample_size = (\n                (audio_length_in_s * self.unet.config.sample_rate) // down_scale_factor + 1\n            ) * down_scale_factor\n            logger.info(\n                f\"{audio_length_in_s} is increased to {sample_size / self.unet.config.sample_rate} so that it can be handled\"\n                f\" by the model. It will be cut to {original_sample_size / self.unet.config.sample_rate} after the denoising\"\n                \" process.\"\n            )\n        sample_size = int(sample_size)\n\n        dtype = next(iter(self.unet.parameters())).dtype\n        shape = (batch_size, self.unet.config.in_channels, sample_size)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        audio = randn_tensor(shape, generator=generator, device=self.device, dtype=dtype)\n\n        # set step values\n        self.scheduler.set_timesteps(num_inference_steps, device=audio.device)\n        self.scheduler.timesteps = self.scheduler.timesteps.to(dtype)\n\n        for t in self.progress_bar(self.scheduler.timesteps):\n            # 1. predict noise model_output\n            model_output = self.unet(audio, t).sample\n\n            # 2. compute previous image: x_t -> t_t-1\n            audio = self.scheduler.step(model_output, t, audio).prev_sample\n\n        audio = audio.clamp(-1, 1).float().cpu().numpy()\n\n        audio = audio[:, :, :original_sample_size]\n\n        if not return_dict:\n            return (audio,)\n\n        return AudioPipelineOutput(audios=audio)\n"
  },
  {
    "path": "diffusers/pipelines/ddim/__init__.py",
    "content": "from .pipeline_ddim import DDIMPipeline\n"
  },
  {
    "path": "diffusers/pipelines/ddim/pipeline_ddim.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...schedulers import DDIMScheduler\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass DDIMPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        unet ([`UNet2DModel`]): U-Net architecture to denoise the encoded image.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image. Can be one of\n            [`DDPMScheduler`], or [`DDIMScheduler`].\n    \"\"\"\n\n    def __init__(self, unet, scheduler):\n        super().__init__()\n\n        # make sure scheduler can always be converted to DDIM\n        scheduler = DDIMScheduler.from_config(scheduler.config)\n\n        self.register_modules(unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        eta: float = 0.0,\n        num_inference_steps: int = 50,\n        use_clipped_model_output: Optional[bool] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n    ) -> Union[ImagePipelineOutput, Tuple]:\n        r\"\"\"\n        Args:\n            batch_size (`int`, *optional*, defaults to 1):\n                The number of images to generate.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            eta (`float`, *optional*, defaults to 0.0):\n                The eta parameter which controls the scale of the variance (0 is DDIM and 1 is one type of DDPM).\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            use_clipped_model_output (`bool`, *optional*, defaults to `None`):\n                if `True` or `False`, see documentation for `DDIMScheduler.step`. If `None`, nothing is passed\n                downstream to the scheduler. So use `None` for schedulers which don't support this argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n\n        # Sample gaussian noise to begin loop\n        if isinstance(self.unet.config.sample_size, int):\n            image_shape = (\n                batch_size,\n                self.unet.config.in_channels,\n                self.unet.config.sample_size,\n                self.unet.config.sample_size,\n            )\n        else:\n            image_shape = (batch_size, self.unet.config.in_channels, *self.unet.config.sample_size)\n\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        image = randn_tensor(image_shape, generator=generator, device=self.device, dtype=self.unet.dtype)\n\n        # set step values\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        for t in self.progress_bar(self.scheduler.timesteps):\n            # 1. predict noise model_output\n            model_output = self.unet(image, t).sample\n\n            # 2. predict previous mean of image x_t-1 and add variance depending on eta\n            # eta corresponds to η in paper and should be between [0, 1]\n            # do x_t -> x_t-1\n            image = self.scheduler.step(\n                model_output, t, image, eta=eta, use_clipped_model_output=use_clipped_model_output, generator=generator\n            ).prev_sample\n\n        image = (image / 2 + 0.5).clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/ddpm/__init__.py",
    "content": "from .pipeline_ddpm import DDPMPipeline\n"
  },
  {
    "path": "diffusers/pipelines/ddpm/pipeline_ddpm.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass DDPMPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        unet ([`UNet2DModel`]): U-Net architecture to denoise the encoded image.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image. Can be one of\n            [`DDPMScheduler`], or [`DDIMScheduler`].\n    \"\"\"\n\n    def __init__(self, unet, scheduler):\n        super().__init__()\n        self.register_modules(unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        num_inference_steps: int = 1000,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n    ) -> Union[ImagePipelineOutput, Tuple]:\n        r\"\"\"\n        Args:\n            batch_size (`int`, *optional*, defaults to 1):\n                The number of images to generate.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            num_inference_steps (`int`, *optional*, defaults to 1000):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        # Sample gaussian noise to begin loop\n        if isinstance(self.unet.config.sample_size, int):\n            image_shape = (\n                batch_size,\n                self.unet.config.in_channels,\n                self.unet.config.sample_size,\n                self.unet.config.sample_size,\n            )\n        else:\n            image_shape = (batch_size, self.unet.config.in_channels, *self.unet.config.sample_size)\n\n        if self.device.type == \"mps\":\n            # randn does not work reproducibly on mps\n            image = randn_tensor(image_shape, generator=generator)\n            image = image.to(self.device)\n        else:\n            image = randn_tensor(image_shape, generator=generator, device=self.device)\n\n        # set step values\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        for t in self.progress_bar(self.scheduler.timesteps):\n            # 1. predict noise model_output\n            model_output = self.unet(image, t).sample\n\n            # 2. compute previous image: x_t -> x_t-1\n            image = self.scheduler.step(model_output, t, image, generator=generator).prev_sample\n\n        image = (image / 2 + 0.5).clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/__init__.py",
    "content": "from dataclasses import dataclass\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL\n\nfrom ...utils import BaseOutput, OptionalDependencyNotAvailable, is_torch_available, is_transformers_available\nfrom .timesteps import (\n    fast27_timesteps,\n    smart27_timesteps,\n    smart50_timesteps,\n    smart100_timesteps,\n    smart185_timesteps,\n    super27_timesteps,\n    super40_timesteps,\n    super100_timesteps,\n)\n\n\n@dataclass\nclass IFPipelineOutput(BaseOutput):\n    \"\"\"\n    Args:\n    Output class for Stable Diffusion pipelines.\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n        nsfw_detected (`List[bool]`)\n            List of flags denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content or a watermark. `None` if safety checking could not be performed.\n        watermark_detected (`List[bool]`)\n            List of flags denoting whether the corresponding generated image likely has a watermark. `None` if safety\n            checking could not be performed.\n    \"\"\"\n\n    images: Union[List[PIL.Image.Image], np.ndarray]\n    nsfw_detected: Optional[List[bool]]\n    watermark_detected: Optional[List[bool]]\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import *  # noqa F403\nelse:\n    from .pipeline_if import IFPipeline\n    from .pipeline_if_img2img import IFImg2ImgPipeline\n    from .pipeline_if_img2img_superresolution import IFImg2ImgSuperResolutionPipeline\n    from .pipeline_if_inpainting import IFInpaintingPipeline\n    from .pipeline_if_inpainting_superresolution import IFInpaintingSuperResolutionPipeline\n    from .pipeline_if_superresolution import IFSuperResolutionPipeline\n    from .safety_checker import IFSafetyChecker\n    from .watermark import IFWatermarker\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/pipeline_if.py",
    "content": "import html\nimport inspect\nimport re\nimport urllib.parse as ul\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport torch\nfrom transformers import CLIPImageProcessor, T5EncoderModel, T5Tokenizer\n\nfrom ...loaders import LoraLoaderMixin\nfrom ...models import UNet2DConditionModel\nfrom ...schedulers import DDPMScheduler\nfrom ...utils import (\n    BACKENDS_MAPPING,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_bs4_available,\n    is_ftfy_available,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import IFPipelineOutput\nfrom .safety_checker import IFSafetyChecker\nfrom .watermark import IFWatermarker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nif is_bs4_available():\n    from bs4 import BeautifulSoup\n\nif is_ftfy_available():\n    import ftfy\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> from diffusers import IFPipeline, IFSuperResolutionPipeline, DiffusionPipeline\n        >>> from diffusers.utils import pt_to_pil\n        >>> import torch\n\n        >>> pipe = IFPipeline.from_pretrained(\"DeepFloyd/IF-I-XL-v1.0\", variant=\"fp16\", torch_dtype=torch.float16)\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> prompt = 'a photo of a kangaroo wearing an orange hoodie and blue sunglasses standing in front of the eiffel tower holding a sign that says \"very deep learning\"'\n        >>> prompt_embeds, negative_embeds = pipe.encode_prompt(prompt)\n\n        >>> image = pipe(prompt_embeds=prompt_embeds, negative_prompt_embeds=negative_embeds, output_type=\"pt\").images\n\n        >>> # save intermediate image\n        >>> pil_image = pt_to_pil(image)\n        >>> pil_image[0].save(\"./if_stage_I.png\")\n\n        >>> super_res_1_pipe = IFSuperResolutionPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-II-L-v1.0\", text_encoder=None, variant=\"fp16\", torch_dtype=torch.float16\n        ... )\n        >>> super_res_1_pipe.enable_model_cpu_offload()\n\n        >>> image = super_res_1_pipe(\n        ...     image=image, prompt_embeds=prompt_embeds, negative_prompt_embeds=negative_embeds, output_type=\"pt\"\n        ... ).images\n\n        >>> # save intermediate image\n        >>> pil_image = pt_to_pil(image)\n        >>> pil_image[0].save(\"./if_stage_I.png\")\n\n        >>> safety_modules = {\n        ...     \"feature_extractor\": pipe.feature_extractor,\n        ...     \"safety_checker\": pipe.safety_checker,\n        ...     \"watermarker\": pipe.watermarker,\n        ... }\n        >>> super_res_2_pipe = DiffusionPipeline.from_pretrained(\n        ...     \"stabilityai/stable-diffusion-x4-upscaler\", **safety_modules, torch_dtype=torch.float16\n        ... )\n        >>> super_res_2_pipe.enable_model_cpu_offload()\n\n        >>> image = super_res_2_pipe(\n        ...     prompt=prompt,\n        ...     image=image,\n        ... ).images\n        >>> image[0].save(\"./if_stage_II.png\")\n        ```\n\"\"\"\n\n\nclass IFPipeline(DiffusionPipeline, LoraLoaderMixin):\n    tokenizer: T5Tokenizer\n    text_encoder: T5EncoderModel\n\n    unet: UNet2DConditionModel\n    scheduler: DDPMScheduler\n\n    feature_extractor: Optional[CLIPImageProcessor]\n    safety_checker: Optional[IFSafetyChecker]\n\n    watermarker: Optional[IFWatermarker]\n\n    bad_punct_regex = re.compile(\n        r\"[\" + \"#®•©™&@·º½¾¿¡§~\" + \"\\)\" + \"\\(\" + \"\\]\" + \"\\[\" + \"\\}\" + \"\\{\" + \"\\|\" + \"\\\\\" + \"\\/\" + \"\\*\" + r\"]{1,}\"\n    )  # noqa\n\n    _optional_components = [\"tokenizer\", \"text_encoder\", \"safety_checker\", \"feature_extractor\", \"watermarker\"]\n\n    def __init__(\n        self,\n        tokenizer: T5Tokenizer,\n        text_encoder: T5EncoderModel,\n        unet: UNet2DConditionModel,\n        scheduler: DDPMScheduler,\n        safety_checker: Optional[IFSafetyChecker],\n        feature_extractor: Optional[CLIPImageProcessor],\n        watermarker: Optional[IFWatermarker],\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the IF license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            watermarker=watermarker,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        models = [\n            self.text_encoder,\n            self.unet,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n\n        if self.text_encoder is not None:\n            _, hook = cpu_offload_with_hook(self.text_encoder, device, prev_module_hook=hook)\n\n            # Accelerate will move the next model to the device _before_ calling the offload hook of the\n            # previous model. This will cause both models to be present on the device at the same time.\n            # IF uses T5 for its text encoder which is really large. We can manually call the offload\n            # hook for the text encoder to ensure it's moved to the cpu before the unet is moved to\n            # the GPU.\n            self.text_encoder_offload_hook = hook\n\n        _, hook = cpu_offload_with_hook(self.unet, device, prev_module_hook=hook)\n\n        # if the safety checker isn't called, `unet_offload_hook` will have to be called to manually offload the unet\n        self.unet_offload_hook = hook\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    def remove_all_hooks(self):\n        if is_accelerate_available():\n            from accelerate.hooks import remove_hook_from_module\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        for model in [self.text_encoder, self.unet, self.safety_checker]:\n            if model is not None:\n                remove_hook_from_module(model, recurse=True)\n\n        self.unet_offload_hook = None\n        self.text_encoder_offload_hook = None\n        self.final_offload_hook = None\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    def encode_prompt(\n        self,\n        prompt,\n        do_classifier_free_guidance=True,\n        num_images_per_prompt=1,\n        device=None,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        clean_caption: bool = False,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`, *optional*):\n                torch device to place the resulting embeddings on\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.\n                Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if device is None:\n            device = self._execution_device\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        # while T5 can handle much longer input sequences than 77, the text encoder was trained with a max length of 77 for IF\n        max_length = 77\n\n        if prompt_embeds is None:\n            prompt = self._text_preprocessing(prompt, clean_caption=clean_caption)\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(untruncated_ids[:, max_length - 1 : -1])\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {max_length} tokens: {removed_text}\"\n                )\n\n            attention_mask = text_inputs.attention_mask.to(device)\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        if self.text_encoder is not None:\n            dtype = self.text_encoder.dtype\n        elif self.unet is not None:\n            dtype = self.unet.dtype\n        else:\n            dtype = None\n\n        prompt_embeds = prompt_embeds.to(dtype=dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_attention_mask=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            attention_mask = uncond_input.attention_mask.to(device)\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n        else:\n            negative_prompt_embeds = None\n\n        return prompt_embeds, negative_prompt_embeds\n\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, nsfw_detected, watermark_detected = self.safety_checker(\n                images=image,\n                clip_input=safety_checker_input.pixel_values.to(dtype=dtype),\n            )\n        else:\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n\n        return image, nsfw_detected, watermark_detected\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def prepare_intermediate_images(self, batch_size, num_channels, height, width, dtype, device, generator):\n        shape = (batch_size, num_channels, height, width)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        intermediate_images = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        intermediate_images = intermediate_images * self.scheduler.init_noise_sigma\n        return intermediate_images\n\n    def _text_preprocessing(self, text, clean_caption=False):\n        if clean_caption and not is_bs4_available():\n            logger.warn(BACKENDS_MAPPING[\"bs4\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if clean_caption and not is_ftfy_available():\n            logger.warn(BACKENDS_MAPPING[\"ftfy\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if not isinstance(text, (tuple, list)):\n            text = [text]\n\n        def process(text: str):\n            if clean_caption:\n                text = self._clean_caption(text)\n                text = self._clean_caption(text)\n            else:\n                text = text.lower().strip()\n            return text\n\n        return [process(t) for t in text]\n\n    def _clean_caption(self, caption):\n        caption = str(caption)\n        caption = ul.unquote_plus(caption)\n        caption = caption.strip().lower()\n        caption = re.sub(\"<person>\", \"person\", caption)\n        # urls:\n        caption = re.sub(\n            r\"\\b((?:https?:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        caption = re.sub(\n            r\"\\b((?:www:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        # html:\n        caption = BeautifulSoup(caption, features=\"html.parser\").text\n\n        # @<nickname>\n        caption = re.sub(r\"@[\\w\\d]+\\b\", \"\", caption)\n\n        # 31C0—31EF CJK Strokes\n        # 31F0—31FF Katakana Phonetic Extensions\n        # 3200—32FF Enclosed CJK Letters and Months\n        # 3300—33FF CJK Compatibility\n        # 3400—4DBF CJK Unified Ideographs Extension A\n        # 4DC0—4DFF Yijing Hexagram Symbols\n        # 4E00—9FFF CJK Unified Ideographs\n        caption = re.sub(r\"[\\u31c0-\\u31ef]+\", \"\", caption)\n        caption = re.sub(r\"[\\u31f0-\\u31ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3200-\\u32ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3300-\\u33ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3400-\\u4dbf]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4dc0-\\u4dff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4e00-\\u9fff]+\", \"\", caption)\n        #######################################################\n\n        # все виды тире / all types of dash --> \"-\"\n        caption = re.sub(\n            r\"[\\u002D\\u058A\\u05BE\\u1400\\u1806\\u2010-\\u2015\\u2E17\\u2E1A\\u2E3A\\u2E3B\\u2E40\\u301C\\u3030\\u30A0\\uFE31\\uFE32\\uFE58\\uFE63\\uFF0D]+\",  # noqa\n            \"-\",\n            caption,\n        )\n\n        # кавычки к одному стандарту\n        caption = re.sub(r\"[`´«»“”¨]\", '\"', caption)\n        caption = re.sub(r\"[‘’]\", \"'\", caption)\n\n        # &quot;\n        caption = re.sub(r\"&quot;?\", \"\", caption)\n        # &amp\n        caption = re.sub(r\"&amp\", \"\", caption)\n\n        # ip adresses:\n        caption = re.sub(r\"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\", \" \", caption)\n\n        # article ids:\n        caption = re.sub(r\"\\d:\\d\\d\\s+$\", \"\", caption)\n\n        # \\n\n        caption = re.sub(r\"\\\\n\", \" \", caption)\n\n        # \"#123\"\n        caption = re.sub(r\"#\\d{1,3}\\b\", \"\", caption)\n        # \"#12345..\"\n        caption = re.sub(r\"#\\d{5,}\\b\", \"\", caption)\n        # \"123456..\"\n        caption = re.sub(r\"\\b\\d{6,}\\b\", \"\", caption)\n        # filenames:\n        caption = re.sub(r\"[\\S]+\\.(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)\", \"\", caption)\n\n        #\n        caption = re.sub(r\"[\\\"\\']{2,}\", r'\"', caption)  # \"\"\"AUSVERKAUFT\"\"\"\n        caption = re.sub(r\"[\\.]{2,}\", r\" \", caption)  # \"\"\"AUSVERKAUFT\"\"\"\n\n        caption = re.sub(self.bad_punct_regex, r\" \", caption)  # ***AUSVERKAUFT***, #AUSVERKAUFT\n        caption = re.sub(r\"\\s+\\.\\s+\", r\" \", caption)  # \" . \"\n\n        # this-is-my-cute-cat / this_is_my_cute_cat\n        regex2 = re.compile(r\"(?:\\-|\\_)\")\n        if len(re.findall(regex2, caption)) > 3:\n            caption = re.sub(regex2, \" \", caption)\n\n        caption = ftfy.fix_text(caption)\n        caption = html.unescape(html.unescape(caption))\n\n        caption = re.sub(r\"\\b[a-zA-Z]{1,3}\\d{3,15}\\b\", \"\", caption)  # jc6640\n        caption = re.sub(r\"\\b[a-zA-Z]+\\d+[a-zA-Z]+\\b\", \"\", caption)  # jc6640vc\n        caption = re.sub(r\"\\b\\d+[a-zA-Z]+\\d+\\b\", \"\", caption)  # 6640vc231\n\n        caption = re.sub(r\"(worldwide\\s+)?(free\\s+)?shipping\", \"\", caption)\n        caption = re.sub(r\"(free\\s)?download(\\sfree)?\", \"\", caption)\n        caption = re.sub(r\"\\bclick\\b\\s(?:for|on)\\s\\w+\", \"\", caption)\n        caption = re.sub(r\"\\b(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)(\\simage[s]?)?\", \"\", caption)\n        caption = re.sub(r\"\\bpage\\s+\\d+\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\d*[a-zA-Z]+\\d+[a-zA-Z]+\\d+[a-zA-Z\\d]*\\b\", r\" \", caption)  # j2d1a2a...\n\n        caption = re.sub(r\"\\b\\d+\\.?\\d*[xх×]\\d+\\.?\\d*\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\s+\\:\\s+\", r\": \", caption)\n        caption = re.sub(r\"(\\D[,\\./])\\b\", r\"\\1 \", caption)\n        caption = re.sub(r\"\\s+\", \" \", caption)\n\n        caption.strip()\n\n        caption = re.sub(r\"^[\\\"\\']([\\w\\W]+)[\\\"\\']$\", r\"\\1\", caption)\n        caption = re.sub(r\"^[\\'\\_,\\-\\:;]\", r\"\", caption)\n        caption = re.sub(r\"[\\'\\_,\\-\\:\\-\\+]$\", r\"\", caption)\n        caption = re.sub(r\"^\\.\\S+$\", \"\", caption)\n\n        return caption.strip()\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        num_inference_steps: int = 100,\n        timesteps: List[int] = None,\n        guidance_scale: float = 7.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        clean_caption: bool = True,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            timesteps (`List[int]`, *optional*):\n                Custom timesteps to use for the denoising process. If not defined, equal spaced `num_inference_steps`\n                timesteps are used. Must be in descending order.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size):\n                The width in pixels of the generated image.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            clean_caption (`bool`, *optional*, defaults to `True`):\n                Whether or not to clean the caption before creating embeddings. Requires `beautifulsoup4` and `ftfy` to\n                be installed. If the dependencies are not installed, the embeddings will be created from the raw\n                prompt.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images, and the second element is a list\n            of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\" (nsfw)\n            or watermarked content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(prompt, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds)\n\n        # 2. Define call parameters\n        height = height or self.unet.config.sample_size\n        width = width or self.unet.config.sample_size\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds, negative_prompt_embeds = self.encode_prompt(\n            prompt,\n            do_classifier_free_guidance,\n            num_images_per_prompt=num_images_per_prompt,\n            device=device,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n            clean_caption=clean_caption,\n        )\n\n        if do_classifier_free_guidance:\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        # 4. Prepare timesteps\n        if timesteps is not None:\n            self.scheduler.set_timesteps(timesteps=timesteps, device=device)\n            timesteps = self.scheduler.timesteps\n            num_inference_steps = len(timesteps)\n        else:\n            self.scheduler.set_timesteps(num_inference_steps, device=device)\n            timesteps = self.scheduler.timesteps\n\n        # 5. Prepare intermediate images\n        intermediate_images = self.prepare_intermediate_images(\n            batch_size * num_images_per_prompt,\n            self.unet.config.in_channels,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # HACK: see comment in `enable_model_cpu_offload`\n        if hasattr(self, \"text_encoder_offload_hook\") and self.text_encoder_offload_hook is not None:\n            self.text_encoder_offload_hook.offload()\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                model_input = (\n                    torch.cat([intermediate_images] * 2) if do_classifier_free_guidance else intermediate_images\n                )\n                model_input = self.scheduler.scale_model_input(model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred_uncond, _ = noise_pred_uncond.split(model_input.shape[1], dim=1)\n                    noise_pred_text, predicted_variance = noise_pred_text.split(model_input.shape[1], dim=1)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n                    noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n                if self.scheduler.config.variance_type not in [\"learned\", \"learned_range\"]:\n                    noise_pred, _ = noise_pred.split(model_input.shape[1], dim=1)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                intermediate_images = self.scheduler.step(\n                    noise_pred, t, intermediate_images, **extra_step_kwargs, return_dict=False\n                )[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, intermediate_images)\n\n        image = intermediate_images\n\n        if output_type == \"pil\":\n            # 8. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 9. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n            # 10. Convert to PIL\n            image = self.numpy_to_pil(image)\n\n            # 11. Apply watermark\n            if self.watermarker is not None:\n                image = self.watermarker.apply_watermark(image, self.unet.config.sample_size)\n        elif output_type == \"pt\":\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n        else:\n            # 8. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 9. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, nsfw_detected, watermark_detected)\n\n        return IFPipelineOutput(images=image, nsfw_detected=nsfw_detected, watermark_detected=watermark_detected)\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/pipeline_if_img2img.py",
    "content": "import html\nimport inspect\nimport re\nimport urllib.parse as ul\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, T5EncoderModel, T5Tokenizer\n\nfrom ...loaders import LoraLoaderMixin\nfrom ...models import UNet2DConditionModel\nfrom ...schedulers import DDPMScheduler\nfrom ...utils import (\n    BACKENDS_MAPPING,\n    PIL_INTERPOLATION,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_bs4_available,\n    is_ftfy_available,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import IFPipelineOutput\nfrom .safety_checker import IFSafetyChecker\nfrom .watermark import IFWatermarker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nif is_bs4_available():\n    from bs4 import BeautifulSoup\n\nif is_ftfy_available():\n    import ftfy\n\n\ndef resize(images: PIL.Image.Image, img_size: int) -> PIL.Image.Image:\n    w, h = images.size\n\n    coef = w / h\n\n    w, h = img_size, img_size\n\n    if coef >= 1:\n        w = int(round(img_size / 8 * coef) * 8)\n    else:\n        h = int(round(img_size / 8 / coef) * 8)\n\n    images = images.resize((w, h), resample=PIL_INTERPOLATION[\"bicubic\"], reducing_gap=None)\n\n    return images\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> from diffusers import IFImg2ImgPipeline, IFImg2ImgSuperResolutionPipeline, DiffusionPipeline\n        >>> from diffusers.utils import pt_to_pil\n        >>> import torch\n        >>> from PIL import Image\n        >>> import requests\n        >>> from io import BytesIO\n\n        >>> url = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/assets/stable-samples/img2img/sketch-mountains-input.jpg\"\n        >>> response = requests.get(url)\n        >>> original_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> original_image = original_image.resize((768, 512))\n\n        >>> pipe = IFImg2ImgPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-I-XL-v1.0\",\n        ...     variant=\"fp16\",\n        ...     torch_dtype=torch.float16,\n        ... )\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> prompt = \"A fantasy landscape in style minecraft\"\n        >>> prompt_embeds, negative_embeds = pipe.encode_prompt(prompt)\n\n        >>> image = pipe(\n        ...     image=original_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ...     output_type=\"pt\",\n        ... ).images\n\n        >>> # save intermediate image\n        >>> pil_image = pt_to_pil(image)\n        >>> pil_image[0].save(\"./if_stage_I.png\")\n\n        >>> super_res_1_pipe = IFImg2ImgSuperResolutionPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-II-L-v1.0\",\n        ...     text_encoder=None,\n        ...     variant=\"fp16\",\n        ...     torch_dtype=torch.float16,\n        ... )\n        >>> super_res_1_pipe.enable_model_cpu_offload()\n\n        >>> image = super_res_1_pipe(\n        ...     image=image,\n        ...     original_image=original_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ... ).images\n        >>> image[0].save(\"./if_stage_II.png\")\n        ```\n\"\"\"\n\n\nclass IFImg2ImgPipeline(DiffusionPipeline, LoraLoaderMixin):\n    tokenizer: T5Tokenizer\n    text_encoder: T5EncoderModel\n\n    unet: UNet2DConditionModel\n    scheduler: DDPMScheduler\n\n    feature_extractor: Optional[CLIPImageProcessor]\n    safety_checker: Optional[IFSafetyChecker]\n\n    watermarker: Optional[IFWatermarker]\n\n    bad_punct_regex = re.compile(\n        r\"[\" + \"#®•©™&@·º½¾¿¡§~\" + \"\\)\" + \"\\(\" + \"\\]\" + \"\\[\" + \"\\}\" + \"\\{\" + \"\\|\" + \"\\\\\" + \"\\/\" + \"\\*\" + r\"]{1,}\"\n    )  # noqa\n\n    _optional_components = [\"tokenizer\", \"text_encoder\", \"safety_checker\", \"feature_extractor\", \"watermarker\"]\n\n    def __init__(\n        self,\n        tokenizer: T5Tokenizer,\n        text_encoder: T5EncoderModel,\n        unet: UNet2DConditionModel,\n        scheduler: DDPMScheduler,\n        safety_checker: Optional[IFSafetyChecker],\n        feature_extractor: Optional[CLIPImageProcessor],\n        watermarker: Optional[IFWatermarker],\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the IF license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            watermarker=watermarker,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        models = [\n            self.text_encoder,\n            self.unet,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n\n        if self.text_encoder is not None:\n            _, hook = cpu_offload_with_hook(self.text_encoder, device, prev_module_hook=hook)\n\n            # Accelerate will move the next model to the device _before_ calling the offload hook of the\n            # previous model. This will cause both models to be present on the device at the same time.\n            # IF uses T5 for its text encoder which is really large. We can manually call the offload\n            # hook for the text encoder to ensure it's moved to the cpu before the unet is moved to\n            # the GPU.\n            self.text_encoder_offload_hook = hook\n\n        _, hook = cpu_offload_with_hook(self.unet, device, prev_module_hook=hook)\n\n        # if the safety checker isn't called, `unet_offload_hook` will have to be called to manually offload the unet\n        self.unet_offload_hook = hook\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.remove_all_hooks\n    def remove_all_hooks(self):\n        if is_accelerate_available():\n            from accelerate.hooks import remove_hook_from_module\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        for model in [self.text_encoder, self.unet, self.safety_checker]:\n            if model is not None:\n                remove_hook_from_module(model, recurse=True)\n\n        self.unet_offload_hook = None\n        self.text_encoder_offload_hook = None\n        self.final_offload_hook = None\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.encode_prompt\n    def encode_prompt(\n        self,\n        prompt,\n        do_classifier_free_guidance=True,\n        num_images_per_prompt=1,\n        device=None,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        clean_caption: bool = False,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`, *optional*):\n                torch device to place the resulting embeddings on\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.\n                Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if device is None:\n            device = self._execution_device\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        # while T5 can handle much longer input sequences than 77, the text encoder was trained with a max length of 77 for IF\n        max_length = 77\n\n        if prompt_embeds is None:\n            prompt = self._text_preprocessing(prompt, clean_caption=clean_caption)\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(untruncated_ids[:, max_length - 1 : -1])\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {max_length} tokens: {removed_text}\"\n                )\n\n            attention_mask = text_inputs.attention_mask.to(device)\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        if self.text_encoder is not None:\n            dtype = self.text_encoder.dtype\n        elif self.unet is not None:\n            dtype = self.unet.dtype\n        else:\n            dtype = None\n\n        prompt_embeds = prompt_embeds.to(dtype=dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_attention_mask=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            attention_mask = uncond_input.attention_mask.to(device)\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n        else:\n            negative_prompt_embeds = None\n\n        return prompt_embeds, negative_prompt_embeds\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, nsfw_detected, watermark_detected = self.safety_checker(\n                images=image,\n                clip_input=safety_checker_input.pixel_values.to(dtype=dtype),\n            )\n        else:\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n\n        return image, nsfw_detected, watermark_detected\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        batch_size,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        if isinstance(image, list):\n            check_image_type = image[0]\n        else:\n            check_image_type = image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(image, list):\n            image_batch_size = len(image)\n        elif isinstance(image, torch.Tensor):\n            image_batch_size = image.shape[0]\n        elif isinstance(image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(image, np.ndarray):\n            image_batch_size = image.shape[0]\n        else:\n            assert False\n\n        if batch_size != image_batch_size:\n            raise ValueError(f\"image batch size: {image_batch_size} must be same as prompt batch size {batch_size}\")\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._text_preprocessing\n    def _text_preprocessing(self, text, clean_caption=False):\n        if clean_caption and not is_bs4_available():\n            logger.warn(BACKENDS_MAPPING[\"bs4\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if clean_caption and not is_ftfy_available():\n            logger.warn(BACKENDS_MAPPING[\"ftfy\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if not isinstance(text, (tuple, list)):\n            text = [text]\n\n        def process(text: str):\n            if clean_caption:\n                text = self._clean_caption(text)\n                text = self._clean_caption(text)\n            else:\n                text = text.lower().strip()\n            return text\n\n        return [process(t) for t in text]\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._clean_caption\n    def _clean_caption(self, caption):\n        caption = str(caption)\n        caption = ul.unquote_plus(caption)\n        caption = caption.strip().lower()\n        caption = re.sub(\"<person>\", \"person\", caption)\n        # urls:\n        caption = re.sub(\n            r\"\\b((?:https?:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        caption = re.sub(\n            r\"\\b((?:www:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        # html:\n        caption = BeautifulSoup(caption, features=\"html.parser\").text\n\n        # @<nickname>\n        caption = re.sub(r\"@[\\w\\d]+\\b\", \"\", caption)\n\n        # 31C0—31EF CJK Strokes\n        # 31F0—31FF Katakana Phonetic Extensions\n        # 3200—32FF Enclosed CJK Letters and Months\n        # 3300—33FF CJK Compatibility\n        # 3400—4DBF CJK Unified Ideographs Extension A\n        # 4DC0—4DFF Yijing Hexagram Symbols\n        # 4E00—9FFF CJK Unified Ideographs\n        caption = re.sub(r\"[\\u31c0-\\u31ef]+\", \"\", caption)\n        caption = re.sub(r\"[\\u31f0-\\u31ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3200-\\u32ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3300-\\u33ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3400-\\u4dbf]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4dc0-\\u4dff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4e00-\\u9fff]+\", \"\", caption)\n        #######################################################\n\n        # все виды тире / all types of dash --> \"-\"\n        caption = re.sub(\n            r\"[\\u002D\\u058A\\u05BE\\u1400\\u1806\\u2010-\\u2015\\u2E17\\u2E1A\\u2E3A\\u2E3B\\u2E40\\u301C\\u3030\\u30A0\\uFE31\\uFE32\\uFE58\\uFE63\\uFF0D]+\",  # noqa\n            \"-\",\n            caption,\n        )\n\n        # кавычки к одному стандарту\n        caption = re.sub(r\"[`´«»“”¨]\", '\"', caption)\n        caption = re.sub(r\"[‘’]\", \"'\", caption)\n\n        # &quot;\n        caption = re.sub(r\"&quot;?\", \"\", caption)\n        # &amp\n        caption = re.sub(r\"&amp\", \"\", caption)\n\n        # ip adresses:\n        caption = re.sub(r\"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\", \" \", caption)\n\n        # article ids:\n        caption = re.sub(r\"\\d:\\d\\d\\s+$\", \"\", caption)\n\n        # \\n\n        caption = re.sub(r\"\\\\n\", \" \", caption)\n\n        # \"#123\"\n        caption = re.sub(r\"#\\d{1,3}\\b\", \"\", caption)\n        # \"#12345..\"\n        caption = re.sub(r\"#\\d{5,}\\b\", \"\", caption)\n        # \"123456..\"\n        caption = re.sub(r\"\\b\\d{6,}\\b\", \"\", caption)\n        # filenames:\n        caption = re.sub(r\"[\\S]+\\.(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)\", \"\", caption)\n\n        #\n        caption = re.sub(r\"[\\\"\\']{2,}\", r'\"', caption)  # \"\"\"AUSVERKAUFT\"\"\"\n        caption = re.sub(r\"[\\.]{2,}\", r\" \", caption)  # \"\"\"AUSVERKAUFT\"\"\"\n\n        caption = re.sub(self.bad_punct_regex, r\" \", caption)  # ***AUSVERKAUFT***, #AUSVERKAUFT\n        caption = re.sub(r\"\\s+\\.\\s+\", r\" \", caption)  # \" . \"\n\n        # this-is-my-cute-cat / this_is_my_cute_cat\n        regex2 = re.compile(r\"(?:\\-|\\_)\")\n        if len(re.findall(regex2, caption)) > 3:\n            caption = re.sub(regex2, \" \", caption)\n\n        caption = ftfy.fix_text(caption)\n        caption = html.unescape(html.unescape(caption))\n\n        caption = re.sub(r\"\\b[a-zA-Z]{1,3}\\d{3,15}\\b\", \"\", caption)  # jc6640\n        caption = re.sub(r\"\\b[a-zA-Z]+\\d+[a-zA-Z]+\\b\", \"\", caption)  # jc6640vc\n        caption = re.sub(r\"\\b\\d+[a-zA-Z]+\\d+\\b\", \"\", caption)  # 6640vc231\n\n        caption = re.sub(r\"(worldwide\\s+)?(free\\s+)?shipping\", \"\", caption)\n        caption = re.sub(r\"(free\\s)?download(\\sfree)?\", \"\", caption)\n        caption = re.sub(r\"\\bclick\\b\\s(?:for|on)\\s\\w+\", \"\", caption)\n        caption = re.sub(r\"\\b(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)(\\simage[s]?)?\", \"\", caption)\n        caption = re.sub(r\"\\bpage\\s+\\d+\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\d*[a-zA-Z]+\\d+[a-zA-Z]+\\d+[a-zA-Z\\d]*\\b\", r\" \", caption)  # j2d1a2a...\n\n        caption = re.sub(r\"\\b\\d+\\.?\\d*[xх×]\\d+\\.?\\d*\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\s+\\:\\s+\", r\": \", caption)\n        caption = re.sub(r\"(\\D[,\\./])\\b\", r\"\\1 \", caption)\n        caption = re.sub(r\"\\s+\", \" \", caption)\n\n        caption.strip()\n\n        caption = re.sub(r\"^[\\\"\\']([\\w\\W]+)[\\\"\\']$\", r\"\\1\", caption)\n        caption = re.sub(r\"^[\\'\\_,\\-\\:;]\", r\"\", caption)\n        caption = re.sub(r\"[\\'\\_,\\-\\:\\-\\+]$\", r\"\", caption)\n        caption = re.sub(r\"^\\.\\S+$\", \"\", caption)\n\n        return caption.strip()\n\n    def preprocess_image(self, image: PIL.Image.Image) -> torch.Tensor:\n        if not isinstance(image, list):\n            image = [image]\n\n        def numpy_to_pt(images):\n            if images.ndim == 3:\n                images = images[..., None]\n\n            images = torch.from_numpy(images.transpose(0, 3, 1, 2))\n            return images\n\n        if isinstance(image[0], PIL.Image.Image):\n            new_image = []\n\n            for image_ in image:\n                image_ = image_.convert(\"RGB\")\n                image_ = resize(image_, self.unet.sample_size)\n                image_ = np.array(image_)\n                image_ = image_.astype(np.float32)\n                image_ = image_ / 127.5 - 1\n                new_image.append(image_)\n\n            image = new_image\n\n            image = np.stack(image, axis=0)  # to np\n            image = numpy_to_pt(image)  # to pt\n\n        elif isinstance(image[0], np.ndarray):\n            image = np.concatenate(image, axis=0) if image[0].ndim == 4 else np.stack(image, axis=0)\n            image = numpy_to_pt(image)\n\n        elif isinstance(image[0], torch.Tensor):\n            image = torch.cat(image, axis=0) if image[0].ndim == 4 else torch.stack(image, axis=0)\n\n        return image\n\n    def get_timesteps(self, num_inference_steps, strength):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start:]\n\n        return timesteps, num_inference_steps - t_start\n\n    def prepare_intermediate_images(\n        self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None\n    ):\n        _, channels, height, width = image.shape\n\n        batch_size = batch_size * num_images_per_prompt\n\n        shape = (batch_size, channels, height, width)\n\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        image = image.repeat_interleave(num_images_per_prompt, dim=0)\n        image = self.scheduler.add_noise(image, noise, timestep)\n\n        return image\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[\n            PIL.Image.Image, torch.Tensor, np.ndarray, List[PIL.Image.Image], List[torch.Tensor], List[np.ndarray]\n        ] = None,\n        strength: float = 0.7,\n        num_inference_steps: int = 80,\n        timesteps: List[int] = None,\n        guidance_scale: float = 10.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        clean_caption: bool = True,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            timesteps (`List[int]`, *optional*):\n                Custom timesteps to use for the denoising process. If not defined, equal spaced `num_inference_steps`\n                timesteps are used. Must be in descending order.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            clean_caption (`bool`, *optional*, defaults to `True`):\n                Whether or not to clean the caption before creating embeddings. Requires `beautifulsoup4` and `ftfy` to\n                be installed. If the dependencies are not installed, the embeddings will be created from the raw\n                prompt.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images, and the second element is a list\n            of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\" (nsfw)\n            or watermarked content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        self.check_inputs(\n            prompt, image, batch_size, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        device = self._execution_device\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds, negative_prompt_embeds = self.encode_prompt(\n            prompt,\n            do_classifier_free_guidance,\n            num_images_per_prompt=num_images_per_prompt,\n            device=device,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n            clean_caption=clean_caption,\n        )\n\n        if do_classifier_free_guidance:\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        dtype = prompt_embeds.dtype\n\n        # 4. Prepare timesteps\n        if timesteps is not None:\n            self.scheduler.set_timesteps(timesteps=timesteps, device=device)\n            timesteps = self.scheduler.timesteps\n            num_inference_steps = len(timesteps)\n        else:\n            self.scheduler.set_timesteps(num_inference_steps, device=device)\n            timesteps = self.scheduler.timesteps\n\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength)\n\n        # 5. Prepare intermediate images\n        image = self.preprocess_image(image)\n        image = image.to(device=device, dtype=dtype)\n\n        noise_timestep = timesteps[0:1]\n        noise_timestep = noise_timestep.repeat(batch_size * num_images_per_prompt)\n\n        intermediate_images = self.prepare_intermediate_images(\n            image, noise_timestep, batch_size, num_images_per_prompt, dtype, device, generator\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # HACK: see comment in `enable_model_cpu_offload`\n        if hasattr(self, \"text_encoder_offload_hook\") and self.text_encoder_offload_hook is not None:\n            self.text_encoder_offload_hook.offload()\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                model_input = (\n                    torch.cat([intermediate_images] * 2) if do_classifier_free_guidance else intermediate_images\n                )\n                model_input = self.scheduler.scale_model_input(model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred_uncond, _ = noise_pred_uncond.split(model_input.shape[1], dim=1)\n                    noise_pred_text, predicted_variance = noise_pred_text.split(model_input.shape[1], dim=1)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n                    noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n                if self.scheduler.config.variance_type not in [\"learned\", \"learned_range\"]:\n                    noise_pred, _ = noise_pred.split(model_input.shape[1], dim=1)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                intermediate_images = self.scheduler.step(\n                    noise_pred, t, intermediate_images, **extra_step_kwargs, return_dict=False\n                )[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, intermediate_images)\n\n        image = intermediate_images\n\n        if output_type == \"pil\":\n            # 8. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 9. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n            # 10. Convert to PIL\n            image = self.numpy_to_pil(image)\n\n            # 11. Apply watermark\n            if self.watermarker is not None:\n                self.watermarker.apply_watermark(image, self.unet.config.sample_size)\n        elif output_type == \"pt\":\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n        else:\n            # 8. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 9. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, nsfw_detected, watermark_detected)\n\n        return IFPipelineOutput(images=image, nsfw_detected=nsfw_detected, watermark_detected=watermark_detected)\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/pipeline_if_img2img_superresolution.py",
    "content": "import html\nimport inspect\nimport re\nimport urllib.parse as ul\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPImageProcessor, T5EncoderModel, T5Tokenizer\n\nfrom ...models import UNet2DConditionModel\nfrom ...schedulers import DDPMScheduler\nfrom ...utils import (\n    BACKENDS_MAPPING,\n    PIL_INTERPOLATION,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_bs4_available,\n    is_ftfy_available,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import IFPipelineOutput\nfrom .safety_checker import IFSafetyChecker\nfrom .watermark import IFWatermarker\n\n\nif is_bs4_available():\n    from bs4 import BeautifulSoup\n\nif is_ftfy_available():\n    import ftfy\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.resize\ndef resize(images: PIL.Image.Image, img_size: int) -> PIL.Image.Image:\n    w, h = images.size\n\n    coef = w / h\n\n    w, h = img_size, img_size\n\n    if coef >= 1:\n        w = int(round(img_size / 8 * coef) * 8)\n    else:\n        h = int(round(img_size / 8 / coef) * 8)\n\n    images = images.resize((w, h), resample=PIL_INTERPOLATION[\"bicubic\"], reducing_gap=None)\n\n    return images\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> from diffusers import IFImg2ImgPipeline, IFImg2ImgSuperResolutionPipeline, DiffusionPipeline\n        >>> from diffusers.utils import pt_to_pil\n        >>> import torch\n        >>> from PIL import Image\n        >>> import requests\n        >>> from io import BytesIO\n\n        >>> url = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/assets/stable-samples/img2img/sketch-mountains-input.jpg\"\n        >>> response = requests.get(url)\n        >>> original_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> original_image = original_image.resize((768, 512))\n\n        >>> pipe = IFImg2ImgPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-I-XL-v1.0\",\n        ...     variant=\"fp16\",\n        ...     torch_dtype=torch.float16,\n        ... )\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> prompt = \"A fantasy landscape in style minecraft\"\n        >>> prompt_embeds, negative_embeds = pipe.encode_prompt(prompt)\n\n        >>> image = pipe(\n        ...     image=original_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ...     output_type=\"pt\",\n        ... ).images\n\n        >>> # save intermediate image\n        >>> pil_image = pt_to_pil(image)\n        >>> pil_image[0].save(\"./if_stage_I.png\")\n\n        >>> super_res_1_pipe = IFImg2ImgSuperResolutionPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-II-L-v1.0\",\n        ...     text_encoder=None,\n        ...     variant=\"fp16\",\n        ...     torch_dtype=torch.float16,\n        ... )\n        >>> super_res_1_pipe.enable_model_cpu_offload()\n\n        >>> image = super_res_1_pipe(\n        ...     image=image,\n        ...     original_image=original_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ... ).images\n        >>> image[0].save(\"./if_stage_II.png\")\n        ```\n\"\"\"\n\n\nclass IFImg2ImgSuperResolutionPipeline(DiffusionPipeline):\n    tokenizer: T5Tokenizer\n    text_encoder: T5EncoderModel\n\n    unet: UNet2DConditionModel\n    scheduler: DDPMScheduler\n    image_noising_scheduler: DDPMScheduler\n\n    feature_extractor: Optional[CLIPImageProcessor]\n    safety_checker: Optional[IFSafetyChecker]\n\n    watermarker: Optional[IFWatermarker]\n\n    bad_punct_regex = re.compile(\n        r\"[\" + \"#®•©™&@·º½¾¿¡§~\" + \"\\)\" + \"\\(\" + \"\\]\" + \"\\[\" + \"\\}\" + \"\\{\" + \"\\|\" + \"\\\\\" + \"\\/\" + \"\\*\" + r\"]{1,}\"\n    )  # noqa\n\n    _optional_components = [\"tokenizer\", \"text_encoder\", \"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        tokenizer: T5Tokenizer,\n        text_encoder: T5EncoderModel,\n        unet: UNet2DConditionModel,\n        scheduler: DDPMScheduler,\n        image_noising_scheduler: DDPMScheduler,\n        safety_checker: Optional[IFSafetyChecker],\n        feature_extractor: Optional[CLIPImageProcessor],\n        watermarker: Optional[IFWatermarker],\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the IF license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        if unet.config.in_channels != 6:\n            logger.warn(\n                \"It seems like you have loaded a checkpoint that shall not be used for super resolution from {unet.config._name_or_path} as it accepts {unet.config.in_channels} input channels instead of 6. Please make sure to pass a super resolution checkpoint as the `'unet'`: IFSuperResolutionPipeline.from_pretrained(unet=super_resolution_unet, ...)`.\"\n            )\n\n        self.register_modules(\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            image_noising_scheduler=image_noising_scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            watermarker=watermarker,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        models = [\n            self.text_encoder,\n            self.unet,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n\n        if self.text_encoder is not None:\n            _, hook = cpu_offload_with_hook(self.text_encoder, device, prev_module_hook=hook)\n\n            # Accelerate will move the next model to the device _before_ calling the offload hook of the\n            # previous model. This will cause both models to be present on the device at the same time.\n            # IF uses T5 for its text encoder which is really large. We can manually call the offload\n            # hook for the text encoder to ensure it's moved to the cpu before the unet is moved to\n            # the GPU.\n            self.text_encoder_offload_hook = hook\n\n        _, hook = cpu_offload_with_hook(self.unet, device, prev_module_hook=hook)\n\n        # if the safety checker isn't called, `unet_offload_hook` will have to be called to manually offload the unet\n        self.unet_offload_hook = hook\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.remove_all_hooks\n    def remove_all_hooks(self):\n        if is_accelerate_available():\n            from accelerate.hooks import remove_hook_from_module\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        for model in [self.text_encoder, self.unet, self.safety_checker]:\n            if model is not None:\n                remove_hook_from_module(model, recurse=True)\n\n        self.unet_offload_hook = None\n        self.text_encoder_offload_hook = None\n        self.final_offload_hook = None\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._text_preprocessing\n    def _text_preprocessing(self, text, clean_caption=False):\n        if clean_caption and not is_bs4_available():\n            logger.warn(BACKENDS_MAPPING[\"bs4\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if clean_caption and not is_ftfy_available():\n            logger.warn(BACKENDS_MAPPING[\"ftfy\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if not isinstance(text, (tuple, list)):\n            text = [text]\n\n        def process(text: str):\n            if clean_caption:\n                text = self._clean_caption(text)\n                text = self._clean_caption(text)\n            else:\n                text = text.lower().strip()\n            return text\n\n        return [process(t) for t in text]\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._clean_caption\n    def _clean_caption(self, caption):\n        caption = str(caption)\n        caption = ul.unquote_plus(caption)\n        caption = caption.strip().lower()\n        caption = re.sub(\"<person>\", \"person\", caption)\n        # urls:\n        caption = re.sub(\n            r\"\\b((?:https?:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        caption = re.sub(\n            r\"\\b((?:www:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        # html:\n        caption = BeautifulSoup(caption, features=\"html.parser\").text\n\n        # @<nickname>\n        caption = re.sub(r\"@[\\w\\d]+\\b\", \"\", caption)\n\n        # 31C0—31EF CJK Strokes\n        # 31F0—31FF Katakana Phonetic Extensions\n        # 3200—32FF Enclosed CJK Letters and Months\n        # 3300—33FF CJK Compatibility\n        # 3400—4DBF CJK Unified Ideographs Extension A\n        # 4DC0—4DFF Yijing Hexagram Symbols\n        # 4E00—9FFF CJK Unified Ideographs\n        caption = re.sub(r\"[\\u31c0-\\u31ef]+\", \"\", caption)\n        caption = re.sub(r\"[\\u31f0-\\u31ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3200-\\u32ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3300-\\u33ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3400-\\u4dbf]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4dc0-\\u4dff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4e00-\\u9fff]+\", \"\", caption)\n        #######################################################\n\n        # все виды тире / all types of dash --> \"-\"\n        caption = re.sub(\n            r\"[\\u002D\\u058A\\u05BE\\u1400\\u1806\\u2010-\\u2015\\u2E17\\u2E1A\\u2E3A\\u2E3B\\u2E40\\u301C\\u3030\\u30A0\\uFE31\\uFE32\\uFE58\\uFE63\\uFF0D]+\",  # noqa\n            \"-\",\n            caption,\n        )\n\n        # кавычки к одному стандарту\n        caption = re.sub(r\"[`´«»“”¨]\", '\"', caption)\n        caption = re.sub(r\"[‘’]\", \"'\", caption)\n\n        # &quot;\n        caption = re.sub(r\"&quot;?\", \"\", caption)\n        # &amp\n        caption = re.sub(r\"&amp\", \"\", caption)\n\n        # ip adresses:\n        caption = re.sub(r\"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\", \" \", caption)\n\n        # article ids:\n        caption = re.sub(r\"\\d:\\d\\d\\s+$\", \"\", caption)\n\n        # \\n\n        caption = re.sub(r\"\\\\n\", \" \", caption)\n\n        # \"#123\"\n        caption = re.sub(r\"#\\d{1,3}\\b\", \"\", caption)\n        # \"#12345..\"\n        caption = re.sub(r\"#\\d{5,}\\b\", \"\", caption)\n        # \"123456..\"\n        caption = re.sub(r\"\\b\\d{6,}\\b\", \"\", caption)\n        # filenames:\n        caption = re.sub(r\"[\\S]+\\.(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)\", \"\", caption)\n\n        #\n        caption = re.sub(r\"[\\\"\\']{2,}\", r'\"', caption)  # \"\"\"AUSVERKAUFT\"\"\"\n        caption = re.sub(r\"[\\.]{2,}\", r\" \", caption)  # \"\"\"AUSVERKAUFT\"\"\"\n\n        caption = re.sub(self.bad_punct_regex, r\" \", caption)  # ***AUSVERKAUFT***, #AUSVERKAUFT\n        caption = re.sub(r\"\\s+\\.\\s+\", r\" \", caption)  # \" . \"\n\n        # this-is-my-cute-cat / this_is_my_cute_cat\n        regex2 = re.compile(r\"(?:\\-|\\_)\")\n        if len(re.findall(regex2, caption)) > 3:\n            caption = re.sub(regex2, \" \", caption)\n\n        caption = ftfy.fix_text(caption)\n        caption = html.unescape(html.unescape(caption))\n\n        caption = re.sub(r\"\\b[a-zA-Z]{1,3}\\d{3,15}\\b\", \"\", caption)  # jc6640\n        caption = re.sub(r\"\\b[a-zA-Z]+\\d+[a-zA-Z]+\\b\", \"\", caption)  # jc6640vc\n        caption = re.sub(r\"\\b\\d+[a-zA-Z]+\\d+\\b\", \"\", caption)  # 6640vc231\n\n        caption = re.sub(r\"(worldwide\\s+)?(free\\s+)?shipping\", \"\", caption)\n        caption = re.sub(r\"(free\\s)?download(\\sfree)?\", \"\", caption)\n        caption = re.sub(r\"\\bclick\\b\\s(?:for|on)\\s\\w+\", \"\", caption)\n        caption = re.sub(r\"\\b(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)(\\simage[s]?)?\", \"\", caption)\n        caption = re.sub(r\"\\bpage\\s+\\d+\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\d*[a-zA-Z]+\\d+[a-zA-Z]+\\d+[a-zA-Z\\d]*\\b\", r\" \", caption)  # j2d1a2a...\n\n        caption = re.sub(r\"\\b\\d+\\.?\\d*[xх×]\\d+\\.?\\d*\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\s+\\:\\s+\", r\": \", caption)\n        caption = re.sub(r\"(\\D[,\\./])\\b\", r\"\\1 \", caption)\n        caption = re.sub(r\"\\s+\", \" \", caption)\n\n        caption.strip()\n\n        caption = re.sub(r\"^[\\\"\\']([\\w\\W]+)[\\\"\\']$\", r\"\\1\", caption)\n        caption = re.sub(r\"^[\\'\\_,\\-\\:;]\", r\"\", caption)\n        caption = re.sub(r\"[\\'\\_,\\-\\:\\-\\+]$\", r\"\", caption)\n        caption = re.sub(r\"^\\.\\S+$\", \"\", caption)\n\n        return caption.strip()\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.encode_prompt\n    def encode_prompt(\n        self,\n        prompt,\n        do_classifier_free_guidance=True,\n        num_images_per_prompt=1,\n        device=None,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        clean_caption: bool = False,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`, *optional*):\n                torch device to place the resulting embeddings on\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.\n                Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if device is None:\n            device = self._execution_device\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        # while T5 can handle much longer input sequences than 77, the text encoder was trained with a max length of 77 for IF\n        max_length = 77\n\n        if prompt_embeds is None:\n            prompt = self._text_preprocessing(prompt, clean_caption=clean_caption)\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(untruncated_ids[:, max_length - 1 : -1])\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {max_length} tokens: {removed_text}\"\n                )\n\n            attention_mask = text_inputs.attention_mask.to(device)\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        if self.text_encoder is not None:\n            dtype = self.text_encoder.dtype\n        elif self.unet is not None:\n            dtype = self.unet.dtype\n        else:\n            dtype = None\n\n        prompt_embeds = prompt_embeds.to(dtype=dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_attention_mask=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            attention_mask = uncond_input.attention_mask.to(device)\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n        else:\n            negative_prompt_embeds = None\n\n        return prompt_embeds, negative_prompt_embeds\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, nsfw_detected, watermark_detected = self.safety_checker(\n                images=image,\n                clip_input=safety_checker_input.pixel_values.to(dtype=dtype),\n            )\n        else:\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n\n        return image, nsfw_detected, watermark_detected\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        original_image,\n        batch_size,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        # image\n\n        if isinstance(image, list):\n            check_image_type = image[0]\n        else:\n            check_image_type = image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(image, list):\n            image_batch_size = len(image)\n        elif isinstance(image, torch.Tensor):\n            image_batch_size = image.shape[0]\n        elif isinstance(image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(image, np.ndarray):\n            image_batch_size = image.shape[0]\n        else:\n            assert False\n\n        if batch_size != image_batch_size:\n            raise ValueError(f\"image batch size: {image_batch_size} must be same as prompt batch size {batch_size}\")\n\n        # original_image\n\n        if isinstance(original_image, list):\n            check_image_type = original_image[0]\n        else:\n            check_image_type = original_image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`original_image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(original_image, list):\n            image_batch_size = len(original_image)\n        elif isinstance(original_image, torch.Tensor):\n            image_batch_size = original_image.shape[0]\n        elif isinstance(original_image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(original_image, np.ndarray):\n            image_batch_size = original_image.shape[0]\n        else:\n            assert False\n\n        if batch_size != image_batch_size:\n            raise ValueError(\n                f\"original_image batch size: {image_batch_size} must be same as prompt batch size {batch_size}\"\n            )\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.IFImg2ImgPipeline.preprocess_image with preprocess_image -> preprocess_original_image\n    def preprocess_original_image(self, image: PIL.Image.Image) -> torch.Tensor:\n        if not isinstance(image, list):\n            image = [image]\n\n        def numpy_to_pt(images):\n            if images.ndim == 3:\n                images = images[..., None]\n\n            images = torch.from_numpy(images.transpose(0, 3, 1, 2))\n            return images\n\n        if isinstance(image[0], PIL.Image.Image):\n            new_image = []\n\n            for image_ in image:\n                image_ = image_.convert(\"RGB\")\n                image_ = resize(image_, self.unet.sample_size)\n                image_ = np.array(image_)\n                image_ = image_.astype(np.float32)\n                image_ = image_ / 127.5 - 1\n                new_image.append(image_)\n\n            image = new_image\n\n            image = np.stack(image, axis=0)  # to np\n            image = numpy_to_pt(image)  # to pt\n\n        elif isinstance(image[0], np.ndarray):\n            image = np.concatenate(image, axis=0) if image[0].ndim == 4 else np.stack(image, axis=0)\n            image = numpy_to_pt(image)\n\n        elif isinstance(image[0], torch.Tensor):\n            image = torch.cat(image, axis=0) if image[0].ndim == 4 else torch.stack(image, axis=0)\n\n        return image\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_superresolution.IFSuperResolutionPipeline.preprocess_image\n    def preprocess_image(self, image: PIL.Image.Image, num_images_per_prompt, device) -> torch.Tensor:\n        if not isinstance(image, torch.Tensor) and not isinstance(image, list):\n            image = [image]\n\n        if isinstance(image[0], PIL.Image.Image):\n            image = [np.array(i).astype(np.float32) / 255.0 for i in image]\n\n            image = np.stack(image, axis=0)  # to np\n            image = torch.from_numpy(image.transpose(0, 3, 1, 2))\n        elif isinstance(image[0], np.ndarray):\n            image = np.stack(image, axis=0)  # to np\n            if image.ndim == 5:\n                image = image[0]\n\n            image = torch.from_numpy(image.transpose(0, 3, 1, 2))\n        elif isinstance(image, list) and isinstance(image[0], torch.Tensor):\n            dims = image[0].ndim\n\n            if dims == 3:\n                image = torch.stack(image, dim=0)\n            elif dims == 4:\n                image = torch.concat(image, dim=0)\n            else:\n                raise ValueError(f\"Image must have 3 or 4 dimensions, instead got {dims}\")\n\n        image = image.to(device=device, dtype=self.unet.dtype)\n\n        image = image.repeat_interleave(num_images_per_prompt, dim=0)\n\n        return image\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.IFImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start:]\n\n        return timesteps, num_inference_steps - t_start\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.IFImg2ImgPipeline.prepare_intermediate_images\n    def prepare_intermediate_images(\n        self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None\n    ):\n        _, channels, height, width = image.shape\n\n        batch_size = batch_size * num_images_per_prompt\n\n        shape = (batch_size, channels, height, width)\n\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        image = image.repeat_interleave(num_images_per_prompt, dim=0)\n        image = self.scheduler.add_noise(image, noise, timestep)\n\n        return image\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        image: Union[PIL.Image.Image, np.ndarray, torch.FloatTensor],\n        original_image: Union[\n            PIL.Image.Image, torch.Tensor, np.ndarray, List[PIL.Image.Image], List[torch.Tensor], List[np.ndarray]\n        ] = None,\n        strength: float = 0.8,\n        prompt: Union[str, List[str]] = None,\n        num_inference_steps: int = 50,\n        timesteps: List[int] = None,\n        guidance_scale: float = 4.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        noise_level: int = 250,\n        clean_caption: bool = True,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            original_image (`torch.FloatTensor` or `PIL.Image.Image`):\n                The original image that `image` was varied from.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            timesteps (`List[int]`, *optional*):\n                Custom timesteps to use for the denoising process. If not defined, equal spaced `num_inference_steps`\n                timesteps are used. Must be in descending order.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            noise_level (`int`, *optional*, defaults to 250):\n                The amount of noise to add to the upscaled image. Must be in the range `[0, 1000)`\n            clean_caption (`bool`, *optional*, defaults to `True`):\n                Whether or not to clean the caption before creating embeddings. Requires `beautifulsoup4` and `ftfy` to\n                be installed. If the dependencies are not installed, the embeddings will be created from the raw\n                prompt.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images, and the second element is a list\n            of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\" (nsfw)\n            or watermarked content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        self.check_inputs(\n            prompt,\n            image,\n            original_image,\n            batch_size,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        device = self._execution_device\n\n        # 3. Encode input prompt\n        prompt_embeds, negative_prompt_embeds = self.encode_prompt(\n            prompt,\n            do_classifier_free_guidance,\n            num_images_per_prompt=num_images_per_prompt,\n            device=device,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n            clean_caption=clean_caption,\n        )\n\n        if do_classifier_free_guidance:\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        dtype = prompt_embeds.dtype\n\n        # 4. Prepare timesteps\n        if timesteps is not None:\n            self.scheduler.set_timesteps(timesteps=timesteps, device=device)\n            timesteps = self.scheduler.timesteps\n            num_inference_steps = len(timesteps)\n        else:\n            self.scheduler.set_timesteps(num_inference_steps, device=device)\n            timesteps = self.scheduler.timesteps\n\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength)\n\n        # 5. prepare original image\n        original_image = self.preprocess_original_image(original_image)\n        original_image = original_image.to(device=device, dtype=dtype)\n\n        # 6. Prepare intermediate images\n        noise_timestep = timesteps[0:1]\n        noise_timestep = noise_timestep.repeat(batch_size * num_images_per_prompt)\n\n        intermediate_images = self.prepare_intermediate_images(\n            original_image,\n            noise_timestep,\n            batch_size,\n            num_images_per_prompt,\n            dtype,\n            device,\n            generator,\n        )\n\n        # 7. Prepare upscaled image and noise level\n        _, _, height, width = original_image.shape\n\n        image = self.preprocess_image(image, num_images_per_prompt, device)\n\n        upscaled = F.interpolate(image, (height, width), mode=\"bilinear\", align_corners=True)\n\n        noise_level = torch.tensor([noise_level] * upscaled.shape[0], device=upscaled.device)\n        noise = randn_tensor(upscaled.shape, generator=generator, device=upscaled.device, dtype=upscaled.dtype)\n        upscaled = self.image_noising_scheduler.add_noise(upscaled, noise, timesteps=noise_level)\n\n        if do_classifier_free_guidance:\n            noise_level = torch.cat([noise_level] * 2)\n\n        # 8. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # HACK: see comment in `enable_model_cpu_offload`\n        if hasattr(self, \"text_encoder_offload_hook\") and self.text_encoder_offload_hook is not None:\n            self.text_encoder_offload_hook.offload()\n\n        # 9. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                model_input = torch.cat([intermediate_images, upscaled], dim=1)\n\n                model_input = torch.cat([model_input] * 2) if do_classifier_free_guidance else model_input\n                model_input = self.scheduler.scale_model_input(model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    class_labels=noise_level,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred_uncond, _ = noise_pred_uncond.split(model_input.shape[1] // 2, dim=1)\n                    noise_pred_text, predicted_variance = noise_pred_text.split(model_input.shape[1] // 2, dim=1)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n                    noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                intermediate_images = self.scheduler.step(\n                    noise_pred, t, intermediate_images, **extra_step_kwargs, return_dict=False\n                )[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, intermediate_images)\n\n        image = intermediate_images\n\n        if output_type == \"pil\":\n            # 10. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 11. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n            # 12. Convert to PIL\n            image = self.numpy_to_pil(image)\n\n            # 13. Apply watermark\n            if self.watermarker is not None:\n                self.watermarker.apply_watermark(image, self.unet.config.sample_size)\n        elif output_type == \"pt\":\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n        else:\n            # 10. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 11. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, nsfw_detected, watermark_detected)\n\n        return IFPipelineOutput(images=image, nsfw_detected=nsfw_detected, watermark_detected=watermark_detected)\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/pipeline_if_inpainting.py",
    "content": "import html\nimport inspect\nimport re\nimport urllib.parse as ul\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, T5EncoderModel, T5Tokenizer\n\nfrom ...loaders import LoraLoaderMixin\nfrom ...models import UNet2DConditionModel\nfrom ...schedulers import DDPMScheduler\nfrom ...utils import (\n    BACKENDS_MAPPING,\n    PIL_INTERPOLATION,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_bs4_available,\n    is_ftfy_available,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import IFPipelineOutput\nfrom .safety_checker import IFSafetyChecker\nfrom .watermark import IFWatermarker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nif is_bs4_available():\n    from bs4 import BeautifulSoup\n\nif is_ftfy_available():\n    import ftfy\n\n\n# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.resize\ndef resize(images: PIL.Image.Image, img_size: int) -> PIL.Image.Image:\n    w, h = images.size\n\n    coef = w / h\n\n    w, h = img_size, img_size\n\n    if coef >= 1:\n        w = int(round(img_size / 8 * coef) * 8)\n    else:\n        h = int(round(img_size / 8 / coef) * 8)\n\n    images = images.resize((w, h), resample=PIL_INTERPOLATION[\"bicubic\"], reducing_gap=None)\n\n    return images\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> from diffusers import IFInpaintingPipeline, IFInpaintingSuperResolutionPipeline, DiffusionPipeline\n        >>> from diffusers.utils import pt_to_pil\n        >>> import torch\n        >>> from PIL import Image\n        >>> import requests\n        >>> from io import BytesIO\n\n        >>> url = \"https://huggingface.co/datasets/diffusers/docs-images/resolve/main/if/person.png\"\n        >>> response = requests.get(url)\n        >>> original_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> original_image = original_image\n\n        >>> url = \"https://huggingface.co/datasets/diffusers/docs-images/resolve/main/if/glasses_mask.png\"\n        >>> response = requests.get(url)\n        >>> mask_image = Image.open(BytesIO(response.content))\n        >>> mask_image = mask_image\n\n        >>> pipe = IFInpaintingPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-I-XL-v1.0\", variant=\"fp16\", torch_dtype=torch.float16\n        ... )\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> prompt = \"blue sunglasses\"\n        >>> prompt_embeds, negative_embeds = pipe.encode_prompt(prompt)\n\n        >>> image = pipe(\n        ...     image=original_image,\n        ...     mask_image=mask_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ...     output_type=\"pt\",\n        ... ).images\n\n        >>> # save intermediate image\n        >>> pil_image = pt_to_pil(image)\n        >>> pil_image[0].save(\"./if_stage_I.png\")\n\n        >>> super_res_1_pipe = IFInpaintingSuperResolutionPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-II-L-v1.0\", text_encoder=None, variant=\"fp16\", torch_dtype=torch.float16\n        ... )\n        >>> super_res_1_pipe.enable_model_cpu_offload()\n\n        >>> image = super_res_1_pipe(\n        ...     image=image,\n        ...     mask_image=mask_image,\n        ...     original_image=original_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ... ).images\n        >>> image[0].save(\"./if_stage_II.png\")\n        ```\n\"\"\"\n\n\nclass IFInpaintingPipeline(DiffusionPipeline, LoraLoaderMixin):\n    tokenizer: T5Tokenizer\n    text_encoder: T5EncoderModel\n\n    unet: UNet2DConditionModel\n    scheduler: DDPMScheduler\n\n    feature_extractor: Optional[CLIPImageProcessor]\n    safety_checker: Optional[IFSafetyChecker]\n\n    watermarker: Optional[IFWatermarker]\n\n    bad_punct_regex = re.compile(\n        r\"[\" + \"#®•©™&@·º½¾¿¡§~\" + \"\\)\" + \"\\(\" + \"\\]\" + \"\\[\" + \"\\}\" + \"\\{\" + \"\\|\" + \"\\\\\" + \"\\/\" + \"\\*\" + r\"]{1,}\"\n    )  # noqa\n\n    _optional_components = [\"tokenizer\", \"text_encoder\", \"safety_checker\", \"feature_extractor\", \"watermarker\"]\n\n    def __init__(\n        self,\n        tokenizer: T5Tokenizer,\n        text_encoder: T5EncoderModel,\n        unet: UNet2DConditionModel,\n        scheduler: DDPMScheduler,\n        safety_checker: Optional[IFSafetyChecker],\n        feature_extractor: Optional[CLIPImageProcessor],\n        watermarker: Optional[IFWatermarker],\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the IF license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            watermarker=watermarker,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        models = [\n            self.text_encoder,\n            self.unet,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n\n        if self.text_encoder is not None:\n            _, hook = cpu_offload_with_hook(self.text_encoder, device, prev_module_hook=hook)\n\n            # Accelerate will move the next model to the device _before_ calling the offload hook of the\n            # previous model. This will cause both models to be present on the device at the same time.\n            # IF uses T5 for its text encoder which is really large. We can manually call the offload\n            # hook for the text encoder to ensure it's moved to the cpu before the unet is moved to\n            # the GPU.\n            self.text_encoder_offload_hook = hook\n\n        _, hook = cpu_offload_with_hook(self.unet, device, prev_module_hook=hook)\n\n        # if the safety checker isn't called, `unet_offload_hook` will have to be called to manually offload the unet\n        self.unet_offload_hook = hook\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.remove_all_hooks\n    def remove_all_hooks(self):\n        if is_accelerate_available():\n            from accelerate.hooks import remove_hook_from_module\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        for model in [self.text_encoder, self.unet, self.safety_checker]:\n            if model is not None:\n                remove_hook_from_module(model, recurse=True)\n\n        self.unet_offload_hook = None\n        self.text_encoder_offload_hook = None\n        self.final_offload_hook = None\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.encode_prompt\n    def encode_prompt(\n        self,\n        prompt,\n        do_classifier_free_guidance=True,\n        num_images_per_prompt=1,\n        device=None,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        clean_caption: bool = False,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`, *optional*):\n                torch device to place the resulting embeddings on\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.\n                Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if device is None:\n            device = self._execution_device\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        # while T5 can handle much longer input sequences than 77, the text encoder was trained with a max length of 77 for IF\n        max_length = 77\n\n        if prompt_embeds is None:\n            prompt = self._text_preprocessing(prompt, clean_caption=clean_caption)\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(untruncated_ids[:, max_length - 1 : -1])\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {max_length} tokens: {removed_text}\"\n                )\n\n            attention_mask = text_inputs.attention_mask.to(device)\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        if self.text_encoder is not None:\n            dtype = self.text_encoder.dtype\n        elif self.unet is not None:\n            dtype = self.unet.dtype\n        else:\n            dtype = None\n\n        prompt_embeds = prompt_embeds.to(dtype=dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_attention_mask=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            attention_mask = uncond_input.attention_mask.to(device)\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n        else:\n            negative_prompt_embeds = None\n\n        return prompt_embeds, negative_prompt_embeds\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, nsfw_detected, watermark_detected = self.safety_checker(\n                images=image,\n                clip_input=safety_checker_input.pixel_values.to(dtype=dtype),\n            )\n        else:\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n\n        return image, nsfw_detected, watermark_detected\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        mask_image,\n        batch_size,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        # image\n\n        if isinstance(image, list):\n            check_image_type = image[0]\n        else:\n            check_image_type = image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(image, list):\n            image_batch_size = len(image)\n        elif isinstance(image, torch.Tensor):\n            image_batch_size = image.shape[0]\n        elif isinstance(image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(image, np.ndarray):\n            image_batch_size = image.shape[0]\n        else:\n            assert False\n\n        if batch_size != image_batch_size:\n            raise ValueError(f\"image batch size: {image_batch_size} must be same as prompt batch size {batch_size}\")\n\n        # mask_image\n\n        if isinstance(mask_image, list):\n            check_image_type = mask_image[0]\n        else:\n            check_image_type = mask_image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`mask_image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(mask_image, list):\n            image_batch_size = len(mask_image)\n        elif isinstance(mask_image, torch.Tensor):\n            image_batch_size = mask_image.shape[0]\n        elif isinstance(mask_image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(mask_image, np.ndarray):\n            image_batch_size = mask_image.shape[0]\n        else:\n            assert False\n\n        if image_batch_size != 1 and batch_size != image_batch_size:\n            raise ValueError(\n                f\"mask_image batch size: {image_batch_size} must be `1` or the same as prompt batch size {batch_size}\"\n            )\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._text_preprocessing\n    def _text_preprocessing(self, text, clean_caption=False):\n        if clean_caption and not is_bs4_available():\n            logger.warn(BACKENDS_MAPPING[\"bs4\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if clean_caption and not is_ftfy_available():\n            logger.warn(BACKENDS_MAPPING[\"ftfy\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if not isinstance(text, (tuple, list)):\n            text = [text]\n\n        def process(text: str):\n            if clean_caption:\n                text = self._clean_caption(text)\n                text = self._clean_caption(text)\n            else:\n                text = text.lower().strip()\n            return text\n\n        return [process(t) for t in text]\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._clean_caption\n    def _clean_caption(self, caption):\n        caption = str(caption)\n        caption = ul.unquote_plus(caption)\n        caption = caption.strip().lower()\n        caption = re.sub(\"<person>\", \"person\", caption)\n        # urls:\n        caption = re.sub(\n            r\"\\b((?:https?:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        caption = re.sub(\n            r\"\\b((?:www:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        # html:\n        caption = BeautifulSoup(caption, features=\"html.parser\").text\n\n        # @<nickname>\n        caption = re.sub(r\"@[\\w\\d]+\\b\", \"\", caption)\n\n        # 31C0—31EF CJK Strokes\n        # 31F0—31FF Katakana Phonetic Extensions\n        # 3200—32FF Enclosed CJK Letters and Months\n        # 3300—33FF CJK Compatibility\n        # 3400—4DBF CJK Unified Ideographs Extension A\n        # 4DC0—4DFF Yijing Hexagram Symbols\n        # 4E00—9FFF CJK Unified Ideographs\n        caption = re.sub(r\"[\\u31c0-\\u31ef]+\", \"\", caption)\n        caption = re.sub(r\"[\\u31f0-\\u31ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3200-\\u32ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3300-\\u33ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3400-\\u4dbf]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4dc0-\\u4dff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4e00-\\u9fff]+\", \"\", caption)\n        #######################################################\n\n        # все виды тире / all types of dash --> \"-\"\n        caption = re.sub(\n            r\"[\\u002D\\u058A\\u05BE\\u1400\\u1806\\u2010-\\u2015\\u2E17\\u2E1A\\u2E3A\\u2E3B\\u2E40\\u301C\\u3030\\u30A0\\uFE31\\uFE32\\uFE58\\uFE63\\uFF0D]+\",  # noqa\n            \"-\",\n            caption,\n        )\n\n        # кавычки к одному стандарту\n        caption = re.sub(r\"[`´«»“”¨]\", '\"', caption)\n        caption = re.sub(r\"[‘’]\", \"'\", caption)\n\n        # &quot;\n        caption = re.sub(r\"&quot;?\", \"\", caption)\n        # &amp\n        caption = re.sub(r\"&amp\", \"\", caption)\n\n        # ip adresses:\n        caption = re.sub(r\"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\", \" \", caption)\n\n        # article ids:\n        caption = re.sub(r\"\\d:\\d\\d\\s+$\", \"\", caption)\n\n        # \\n\n        caption = re.sub(r\"\\\\n\", \" \", caption)\n\n        # \"#123\"\n        caption = re.sub(r\"#\\d{1,3}\\b\", \"\", caption)\n        # \"#12345..\"\n        caption = re.sub(r\"#\\d{5,}\\b\", \"\", caption)\n        # \"123456..\"\n        caption = re.sub(r\"\\b\\d{6,}\\b\", \"\", caption)\n        # filenames:\n        caption = re.sub(r\"[\\S]+\\.(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)\", \"\", caption)\n\n        #\n        caption = re.sub(r\"[\\\"\\']{2,}\", r'\"', caption)  # \"\"\"AUSVERKAUFT\"\"\"\n        caption = re.sub(r\"[\\.]{2,}\", r\" \", caption)  # \"\"\"AUSVERKAUFT\"\"\"\n\n        caption = re.sub(self.bad_punct_regex, r\" \", caption)  # ***AUSVERKAUFT***, #AUSVERKAUFT\n        caption = re.sub(r\"\\s+\\.\\s+\", r\" \", caption)  # \" . \"\n\n        # this-is-my-cute-cat / this_is_my_cute_cat\n        regex2 = re.compile(r\"(?:\\-|\\_)\")\n        if len(re.findall(regex2, caption)) > 3:\n            caption = re.sub(regex2, \" \", caption)\n\n        caption = ftfy.fix_text(caption)\n        caption = html.unescape(html.unescape(caption))\n\n        caption = re.sub(r\"\\b[a-zA-Z]{1,3}\\d{3,15}\\b\", \"\", caption)  # jc6640\n        caption = re.sub(r\"\\b[a-zA-Z]+\\d+[a-zA-Z]+\\b\", \"\", caption)  # jc6640vc\n        caption = re.sub(r\"\\b\\d+[a-zA-Z]+\\d+\\b\", \"\", caption)  # 6640vc231\n\n        caption = re.sub(r\"(worldwide\\s+)?(free\\s+)?shipping\", \"\", caption)\n        caption = re.sub(r\"(free\\s)?download(\\sfree)?\", \"\", caption)\n        caption = re.sub(r\"\\bclick\\b\\s(?:for|on)\\s\\w+\", \"\", caption)\n        caption = re.sub(r\"\\b(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)(\\simage[s]?)?\", \"\", caption)\n        caption = re.sub(r\"\\bpage\\s+\\d+\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\d*[a-zA-Z]+\\d+[a-zA-Z]+\\d+[a-zA-Z\\d]*\\b\", r\" \", caption)  # j2d1a2a...\n\n        caption = re.sub(r\"\\b\\d+\\.?\\d*[xх×]\\d+\\.?\\d*\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\s+\\:\\s+\", r\": \", caption)\n        caption = re.sub(r\"(\\D[,\\./])\\b\", r\"\\1 \", caption)\n        caption = re.sub(r\"\\s+\", \" \", caption)\n\n        caption.strip()\n\n        caption = re.sub(r\"^[\\\"\\']([\\w\\W]+)[\\\"\\']$\", r\"\\1\", caption)\n        caption = re.sub(r\"^[\\'\\_,\\-\\:;]\", r\"\", caption)\n        caption = re.sub(r\"[\\'\\_,\\-\\:\\-\\+]$\", r\"\", caption)\n        caption = re.sub(r\"^\\.\\S+$\", \"\", caption)\n\n        return caption.strip()\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.IFImg2ImgPipeline.preprocess_image\n    def preprocess_image(self, image: PIL.Image.Image) -> torch.Tensor:\n        if not isinstance(image, list):\n            image = [image]\n\n        def numpy_to_pt(images):\n            if images.ndim == 3:\n                images = images[..., None]\n\n            images = torch.from_numpy(images.transpose(0, 3, 1, 2))\n            return images\n\n        if isinstance(image[0], PIL.Image.Image):\n            new_image = []\n\n            for image_ in image:\n                image_ = image_.convert(\"RGB\")\n                image_ = resize(image_, self.unet.sample_size)\n                image_ = np.array(image_)\n                image_ = image_.astype(np.float32)\n                image_ = image_ / 127.5 - 1\n                new_image.append(image_)\n\n            image = new_image\n\n            image = np.stack(image, axis=0)  # to np\n            image = numpy_to_pt(image)  # to pt\n\n        elif isinstance(image[0], np.ndarray):\n            image = np.concatenate(image, axis=0) if image[0].ndim == 4 else np.stack(image, axis=0)\n            image = numpy_to_pt(image)\n\n        elif isinstance(image[0], torch.Tensor):\n            image = torch.cat(image, axis=0) if image[0].ndim == 4 else torch.stack(image, axis=0)\n\n        return image\n\n    def preprocess_mask_image(self, mask_image) -> torch.Tensor:\n        if not isinstance(mask_image, list):\n            mask_image = [mask_image]\n\n        if isinstance(mask_image[0], torch.Tensor):\n            mask_image = torch.cat(mask_image, axis=0) if mask_image[0].ndim == 4 else torch.stack(mask_image, axis=0)\n\n            if mask_image.ndim == 2:\n                # Batch and add channel dim for single mask\n                mask_image = mask_image.unsqueeze(0).unsqueeze(0)\n            elif mask_image.ndim == 3 and mask_image.shape[0] == 1:\n                # Single mask, the 0'th dimension is considered to be\n                # the existing batch size of 1\n                mask_image = mask_image.unsqueeze(0)\n            elif mask_image.ndim == 3 and mask_image.shape[0] != 1:\n                # Batch of mask, the 0'th dimension is considered to be\n                # the batching dimension\n                mask_image = mask_image.unsqueeze(1)\n\n            mask_image[mask_image < 0.5] = 0\n            mask_image[mask_image >= 0.5] = 1\n\n        elif isinstance(mask_image[0], PIL.Image.Image):\n            new_mask_image = []\n\n            for mask_image_ in mask_image:\n                mask_image_ = mask_image_.convert(\"L\")\n                mask_image_ = resize(mask_image_, self.unet.sample_size)\n                mask_image_ = np.array(mask_image_)\n                mask_image_ = mask_image_[None, None, :]\n                new_mask_image.append(mask_image_)\n\n            mask_image = new_mask_image\n\n            mask_image = np.concatenate(mask_image, axis=0)\n            mask_image = mask_image.astype(np.float32) / 255.0\n            mask_image[mask_image < 0.5] = 0\n            mask_image[mask_image >= 0.5] = 1\n            mask_image = torch.from_numpy(mask_image)\n\n        elif isinstance(mask_image[0], np.ndarray):\n            mask_image = np.concatenate([m[None, None, :] for m in mask_image], axis=0)\n\n            mask_image[mask_image < 0.5] = 0\n            mask_image[mask_image >= 0.5] = 1\n            mask_image = torch.from_numpy(mask_image)\n\n        return mask_image\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.IFImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start:]\n\n        return timesteps, num_inference_steps - t_start\n\n    def prepare_intermediate_images(\n        self, image, timestep, batch_size, num_images_per_prompt, dtype, device, mask_image, generator=None\n    ):\n        image_batch_size, channels, height, width = image.shape\n\n        batch_size = batch_size * num_images_per_prompt\n\n        shape = (batch_size, channels, height, width)\n\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        image = image.repeat_interleave(num_images_per_prompt, dim=0)\n        noised_image = self.scheduler.add_noise(image, noise, timestep)\n\n        image = (1 - mask_image) * image + mask_image * noised_image\n\n        return image\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[\n            PIL.Image.Image, torch.Tensor, np.ndarray, List[PIL.Image.Image], List[torch.Tensor], List[np.ndarray]\n        ] = None,\n        mask_image: Union[\n            PIL.Image.Image, torch.Tensor, np.ndarray, List[PIL.Image.Image], List[torch.Tensor], List[np.ndarray]\n        ] = None,\n        strength: float = 1.0,\n        num_inference_steps: int = 50,\n        timesteps: List[int] = None,\n        guidance_scale: float = 7.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        clean_caption: bool = True,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            mask_image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be\n                repainted, while black pixels will be preserved. If `mask_image` is a PIL image, it will be converted\n                to a single channel (luminance) before use. If it's a tensor, it should contain one color channel (L)\n                instead of 3, so the expected shape would be `(B, H, W, 1)`.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            timesteps (`List[int]`, *optional*):\n                Custom timesteps to use for the denoising process. If not defined, equal spaced `num_inference_steps`\n                timesteps are used. Must be in descending order.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            clean_caption (`bool`, *optional*, defaults to `True`):\n                Whether or not to clean the caption before creating embeddings. Requires `beautifulsoup4` and `ftfy` to\n                be installed. If the dependencies are not installed, the embeddings will be created from the raw\n                prompt.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images, and the second element is a list\n            of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\" (nsfw)\n            or watermarked content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        self.check_inputs(\n            prompt,\n            image,\n            mask_image,\n            batch_size,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n        device = self._execution_device\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds, negative_prompt_embeds = self.encode_prompt(\n            prompt,\n            do_classifier_free_guidance,\n            num_images_per_prompt=num_images_per_prompt,\n            device=device,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n            clean_caption=clean_caption,\n        )\n\n        if do_classifier_free_guidance:\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        dtype = prompt_embeds.dtype\n\n        # 4. Prepare timesteps\n        if timesteps is not None:\n            self.scheduler.set_timesteps(timesteps=timesteps, device=device)\n            timesteps = self.scheduler.timesteps\n            num_inference_steps = len(timesteps)\n        else:\n            self.scheduler.set_timesteps(num_inference_steps, device=device)\n            timesteps = self.scheduler.timesteps\n\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength)\n\n        # 5. Prepare intermediate images\n        image = self.preprocess_image(image)\n        image = image.to(device=device, dtype=dtype)\n\n        mask_image = self.preprocess_mask_image(mask_image)\n        mask_image = mask_image.to(device=device, dtype=dtype)\n\n        if mask_image.shape[0] == 1:\n            mask_image = mask_image.repeat_interleave(batch_size * num_images_per_prompt, dim=0)\n        else:\n            mask_image = mask_image.repeat_interleave(num_images_per_prompt, dim=0)\n\n        noise_timestep = timesteps[0:1]\n        noise_timestep = noise_timestep.repeat(batch_size * num_images_per_prompt)\n\n        intermediate_images = self.prepare_intermediate_images(\n            image, noise_timestep, batch_size, num_images_per_prompt, dtype, device, mask_image, generator\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # HACK: see comment in `enable_model_cpu_offload`\n        if hasattr(self, \"text_encoder_offload_hook\") and self.text_encoder_offload_hook is not None:\n            self.text_encoder_offload_hook.offload()\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                model_input = (\n                    torch.cat([intermediate_images] * 2) if do_classifier_free_guidance else intermediate_images\n                )\n                model_input = self.scheduler.scale_model_input(model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred_uncond, _ = noise_pred_uncond.split(model_input.shape[1], dim=1)\n                    noise_pred_text, predicted_variance = noise_pred_text.split(model_input.shape[1], dim=1)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n                    noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n                if self.scheduler.config.variance_type not in [\"learned\", \"learned_range\"]:\n                    noise_pred, _ = noise_pred.split(model_input.shape[1], dim=1)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                prev_intermediate_images = intermediate_images\n\n                intermediate_images = self.scheduler.step(\n                    noise_pred, t, intermediate_images, **extra_step_kwargs, return_dict=False\n                )[0]\n\n                intermediate_images = (1 - mask_image) * prev_intermediate_images + mask_image * intermediate_images\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, intermediate_images)\n\n        image = intermediate_images\n\n        if output_type == \"pil\":\n            # 8. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 9. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n            # 10. Convert to PIL\n            image = self.numpy_to_pil(image)\n\n            # 11. Apply watermark\n            if self.watermarker is not None:\n                self.watermarker.apply_watermark(image, self.unet.config.sample_size)\n        elif output_type == \"pt\":\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n        else:\n            # 8. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 9. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, nsfw_detected, watermark_detected)\n\n        return IFPipelineOutput(images=image, nsfw_detected=nsfw_detected, watermark_detected=watermark_detected)\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/pipeline_if_inpainting_superresolution.py",
    "content": "import html\nimport inspect\nimport re\nimport urllib.parse as ul\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPImageProcessor, T5EncoderModel, T5Tokenizer\n\nfrom ...models import UNet2DConditionModel\nfrom ...schedulers import DDPMScheduler\nfrom ...utils import (\n    BACKENDS_MAPPING,\n    PIL_INTERPOLATION,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_bs4_available,\n    is_ftfy_available,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import IFPipelineOutput\nfrom .safety_checker import IFSafetyChecker\nfrom .watermark import IFWatermarker\n\n\nif is_bs4_available():\n    from bs4 import BeautifulSoup\n\nif is_ftfy_available():\n    import ftfy\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.resize\ndef resize(images: PIL.Image.Image, img_size: int) -> PIL.Image.Image:\n    w, h = images.size\n\n    coef = w / h\n\n    w, h = img_size, img_size\n\n    if coef >= 1:\n        w = int(round(img_size / 8 * coef) * 8)\n    else:\n        h = int(round(img_size / 8 / coef) * 8)\n\n    images = images.resize((w, h), resample=PIL_INTERPOLATION[\"bicubic\"], reducing_gap=None)\n\n    return images\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> from diffusers import IFInpaintingPipeline, IFInpaintingSuperResolutionPipeline, DiffusionPipeline\n        >>> from diffusers.utils import pt_to_pil\n        >>> import torch\n        >>> from PIL import Image\n        >>> import requests\n        >>> from io import BytesIO\n\n        >>> url = \"https://huggingface.co/datasets/diffusers/docs-images/resolve/main/if/person.png\"\n        >>> response = requests.get(url)\n        >>> original_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> original_image = original_image\n\n        >>> url = \"https://huggingface.co/datasets/diffusers/docs-images/resolve/main/if/glasses_mask.png\"\n        >>> response = requests.get(url)\n        >>> mask_image = Image.open(BytesIO(response.content))\n        >>> mask_image = mask_image\n\n        >>> pipe = IFInpaintingPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-I-XL-v1.0\", variant=\"fp16\", torch_dtype=torch.float16\n        ... )\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> prompt = \"blue sunglasses\"\n\n        >>> prompt_embeds, negative_embeds = pipe.encode_prompt(prompt)\n        >>> image = pipe(\n        ...     image=original_image,\n        ...     mask_image=mask_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ...     output_type=\"pt\",\n        ... ).images\n\n        >>> # save intermediate image\n        >>> pil_image = pt_to_pil(image)\n        >>> pil_image[0].save(\"./if_stage_I.png\")\n\n        >>> super_res_1_pipe = IFInpaintingSuperResolutionPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-II-L-v1.0\", text_encoder=None, variant=\"fp16\", torch_dtype=torch.float16\n        ... )\n        >>> super_res_1_pipe.enable_model_cpu_offload()\n\n        >>> image = super_res_1_pipe(\n        ...     image=image,\n        ...     mask_image=mask_image,\n        ...     original_image=original_image,\n        ...     prompt_embeds=prompt_embeds,\n        ...     negative_prompt_embeds=negative_embeds,\n        ... ).images\n        >>> image[0].save(\"./if_stage_II.png\")\n        ```\n    \"\"\"\n\n\nclass IFInpaintingSuperResolutionPipeline(DiffusionPipeline):\n    tokenizer: T5Tokenizer\n    text_encoder: T5EncoderModel\n\n    unet: UNet2DConditionModel\n    scheduler: DDPMScheduler\n    image_noising_scheduler: DDPMScheduler\n\n    feature_extractor: Optional[CLIPImageProcessor]\n    safety_checker: Optional[IFSafetyChecker]\n\n    watermarker: Optional[IFWatermarker]\n\n    bad_punct_regex = re.compile(\n        r\"[\" + \"#®•©™&@·º½¾¿¡§~\" + \"\\)\" + \"\\(\" + \"\\]\" + \"\\[\" + \"\\}\" + \"\\{\" + \"\\|\" + \"\\\\\" + \"\\/\" + \"\\*\" + r\"]{1,}\"\n    )  # noqa\n\n    _optional_components = [\"tokenizer\", \"text_encoder\", \"safety_checker\", \"feature_extractor\", \"watermarker\"]\n\n    def __init__(\n        self,\n        tokenizer: T5Tokenizer,\n        text_encoder: T5EncoderModel,\n        unet: UNet2DConditionModel,\n        scheduler: DDPMScheduler,\n        image_noising_scheduler: DDPMScheduler,\n        safety_checker: Optional[IFSafetyChecker],\n        feature_extractor: Optional[CLIPImageProcessor],\n        watermarker: Optional[IFWatermarker],\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the IF license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        if unet.config.in_channels != 6:\n            logger.warn(\n                \"It seems like you have loaded a checkpoint that shall not be used for super resolution from {unet.config._name_or_path} as it accepts {unet.config.in_channels} input channels instead of 6. Please make sure to pass a super resolution checkpoint as the `'unet'`: IFSuperResolutionPipeline.from_pretrained(unet=super_resolution_unet, ...)`.\"\n            )\n\n        self.register_modules(\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            image_noising_scheduler=image_noising_scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            watermarker=watermarker,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        models = [\n            self.text_encoder,\n            self.unet,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n\n        if self.text_encoder is not None:\n            _, hook = cpu_offload_with_hook(self.text_encoder, device, prev_module_hook=hook)\n\n            # Accelerate will move the next model to the device _before_ calling the offload hook of the\n            # previous model. This will cause both models to be present on the device at the same time.\n            # IF uses T5 for its text encoder which is really large. We can manually call the offload\n            # hook for the text encoder to ensure it's moved to the cpu before the unet is moved to\n            # the GPU.\n            self.text_encoder_offload_hook = hook\n\n        _, hook = cpu_offload_with_hook(self.unet, device, prev_module_hook=hook)\n\n        # if the safety checker isn't called, `unet_offload_hook` will have to be called to manually offload the unet\n        self.unet_offload_hook = hook\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.remove_all_hooks\n    def remove_all_hooks(self):\n        if is_accelerate_available():\n            from accelerate.hooks import remove_hook_from_module\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        for model in [self.text_encoder, self.unet, self.safety_checker]:\n            if model is not None:\n                remove_hook_from_module(model, recurse=True)\n\n        self.unet_offload_hook = None\n        self.text_encoder_offload_hook = None\n        self.final_offload_hook = None\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._text_preprocessing\n    def _text_preprocessing(self, text, clean_caption=False):\n        if clean_caption and not is_bs4_available():\n            logger.warn(BACKENDS_MAPPING[\"bs4\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if clean_caption and not is_ftfy_available():\n            logger.warn(BACKENDS_MAPPING[\"ftfy\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if not isinstance(text, (tuple, list)):\n            text = [text]\n\n        def process(text: str):\n            if clean_caption:\n                text = self._clean_caption(text)\n                text = self._clean_caption(text)\n            else:\n                text = text.lower().strip()\n            return text\n\n        return [process(t) for t in text]\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._clean_caption\n    def _clean_caption(self, caption):\n        caption = str(caption)\n        caption = ul.unquote_plus(caption)\n        caption = caption.strip().lower()\n        caption = re.sub(\"<person>\", \"person\", caption)\n        # urls:\n        caption = re.sub(\n            r\"\\b((?:https?:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        caption = re.sub(\n            r\"\\b((?:www:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        # html:\n        caption = BeautifulSoup(caption, features=\"html.parser\").text\n\n        # @<nickname>\n        caption = re.sub(r\"@[\\w\\d]+\\b\", \"\", caption)\n\n        # 31C0—31EF CJK Strokes\n        # 31F0—31FF Katakana Phonetic Extensions\n        # 3200—32FF Enclosed CJK Letters and Months\n        # 3300—33FF CJK Compatibility\n        # 3400—4DBF CJK Unified Ideographs Extension A\n        # 4DC0—4DFF Yijing Hexagram Symbols\n        # 4E00—9FFF CJK Unified Ideographs\n        caption = re.sub(r\"[\\u31c0-\\u31ef]+\", \"\", caption)\n        caption = re.sub(r\"[\\u31f0-\\u31ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3200-\\u32ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3300-\\u33ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3400-\\u4dbf]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4dc0-\\u4dff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4e00-\\u9fff]+\", \"\", caption)\n        #######################################################\n\n        # все виды тире / all types of dash --> \"-\"\n        caption = re.sub(\n            r\"[\\u002D\\u058A\\u05BE\\u1400\\u1806\\u2010-\\u2015\\u2E17\\u2E1A\\u2E3A\\u2E3B\\u2E40\\u301C\\u3030\\u30A0\\uFE31\\uFE32\\uFE58\\uFE63\\uFF0D]+\",  # noqa\n            \"-\",\n            caption,\n        )\n\n        # кавычки к одному стандарту\n        caption = re.sub(r\"[`´«»“”¨]\", '\"', caption)\n        caption = re.sub(r\"[‘’]\", \"'\", caption)\n\n        # &quot;\n        caption = re.sub(r\"&quot;?\", \"\", caption)\n        # &amp\n        caption = re.sub(r\"&amp\", \"\", caption)\n\n        # ip adresses:\n        caption = re.sub(r\"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\", \" \", caption)\n\n        # article ids:\n        caption = re.sub(r\"\\d:\\d\\d\\s+$\", \"\", caption)\n\n        # \\n\n        caption = re.sub(r\"\\\\n\", \" \", caption)\n\n        # \"#123\"\n        caption = re.sub(r\"#\\d{1,3}\\b\", \"\", caption)\n        # \"#12345..\"\n        caption = re.sub(r\"#\\d{5,}\\b\", \"\", caption)\n        # \"123456..\"\n        caption = re.sub(r\"\\b\\d{6,}\\b\", \"\", caption)\n        # filenames:\n        caption = re.sub(r\"[\\S]+\\.(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)\", \"\", caption)\n\n        #\n        caption = re.sub(r\"[\\\"\\']{2,}\", r'\"', caption)  # \"\"\"AUSVERKAUFT\"\"\"\n        caption = re.sub(r\"[\\.]{2,}\", r\" \", caption)  # \"\"\"AUSVERKAUFT\"\"\"\n\n        caption = re.sub(self.bad_punct_regex, r\" \", caption)  # ***AUSVERKAUFT***, #AUSVERKAUFT\n        caption = re.sub(r\"\\s+\\.\\s+\", r\" \", caption)  # \" . \"\n\n        # this-is-my-cute-cat / this_is_my_cute_cat\n        regex2 = re.compile(r\"(?:\\-|\\_)\")\n        if len(re.findall(regex2, caption)) > 3:\n            caption = re.sub(regex2, \" \", caption)\n\n        caption = ftfy.fix_text(caption)\n        caption = html.unescape(html.unescape(caption))\n\n        caption = re.sub(r\"\\b[a-zA-Z]{1,3}\\d{3,15}\\b\", \"\", caption)  # jc6640\n        caption = re.sub(r\"\\b[a-zA-Z]+\\d+[a-zA-Z]+\\b\", \"\", caption)  # jc6640vc\n        caption = re.sub(r\"\\b\\d+[a-zA-Z]+\\d+\\b\", \"\", caption)  # 6640vc231\n\n        caption = re.sub(r\"(worldwide\\s+)?(free\\s+)?shipping\", \"\", caption)\n        caption = re.sub(r\"(free\\s)?download(\\sfree)?\", \"\", caption)\n        caption = re.sub(r\"\\bclick\\b\\s(?:for|on)\\s\\w+\", \"\", caption)\n        caption = re.sub(r\"\\b(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)(\\simage[s]?)?\", \"\", caption)\n        caption = re.sub(r\"\\bpage\\s+\\d+\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\d*[a-zA-Z]+\\d+[a-zA-Z]+\\d+[a-zA-Z\\d]*\\b\", r\" \", caption)  # j2d1a2a...\n\n        caption = re.sub(r\"\\b\\d+\\.?\\d*[xх×]\\d+\\.?\\d*\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\s+\\:\\s+\", r\": \", caption)\n        caption = re.sub(r\"(\\D[,\\./])\\b\", r\"\\1 \", caption)\n        caption = re.sub(r\"\\s+\", \" \", caption)\n\n        caption.strip()\n\n        caption = re.sub(r\"^[\\\"\\']([\\w\\W]+)[\\\"\\']$\", r\"\\1\", caption)\n        caption = re.sub(r\"^[\\'\\_,\\-\\:;]\", r\"\", caption)\n        caption = re.sub(r\"[\\'\\_,\\-\\:\\-\\+]$\", r\"\", caption)\n        caption = re.sub(r\"^\\.\\S+$\", \"\", caption)\n\n        return caption.strip()\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.encode_prompt\n    def encode_prompt(\n        self,\n        prompt,\n        do_classifier_free_guidance=True,\n        num_images_per_prompt=1,\n        device=None,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        clean_caption: bool = False,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`, *optional*):\n                torch device to place the resulting embeddings on\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.\n                Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if device is None:\n            device = self._execution_device\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        # while T5 can handle much longer input sequences than 77, the text encoder was trained with a max length of 77 for IF\n        max_length = 77\n\n        if prompt_embeds is None:\n            prompt = self._text_preprocessing(prompt, clean_caption=clean_caption)\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(untruncated_ids[:, max_length - 1 : -1])\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {max_length} tokens: {removed_text}\"\n                )\n\n            attention_mask = text_inputs.attention_mask.to(device)\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        if self.text_encoder is not None:\n            dtype = self.text_encoder.dtype\n        elif self.unet is not None:\n            dtype = self.unet.dtype\n        else:\n            dtype = None\n\n        prompt_embeds = prompt_embeds.to(dtype=dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_attention_mask=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            attention_mask = uncond_input.attention_mask.to(device)\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n        else:\n            negative_prompt_embeds = None\n\n        return prompt_embeds, negative_prompt_embeds\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, nsfw_detected, watermark_detected = self.safety_checker(\n                images=image,\n                clip_input=safety_checker_input.pixel_values.to(dtype=dtype),\n            )\n        else:\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n\n        return image, nsfw_detected, watermark_detected\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        original_image,\n        mask_image,\n        batch_size,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        # image\n\n        if isinstance(image, list):\n            check_image_type = image[0]\n        else:\n            check_image_type = image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(image, list):\n            image_batch_size = len(image)\n        elif isinstance(image, torch.Tensor):\n            image_batch_size = image.shape[0]\n        elif isinstance(image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(image, np.ndarray):\n            image_batch_size = image.shape[0]\n        else:\n            assert False\n\n        if batch_size != image_batch_size:\n            raise ValueError(f\"image batch size: {image_batch_size} must be same as prompt batch size {batch_size}\")\n\n        # original_image\n\n        if isinstance(original_image, list):\n            check_image_type = original_image[0]\n        else:\n            check_image_type = original_image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`original_image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(original_image, list):\n            image_batch_size = len(original_image)\n        elif isinstance(original_image, torch.Tensor):\n            image_batch_size = original_image.shape[0]\n        elif isinstance(original_image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(original_image, np.ndarray):\n            image_batch_size = original_image.shape[0]\n        else:\n            assert False\n\n        if batch_size != image_batch_size:\n            raise ValueError(\n                f\"original_image batch size: {image_batch_size} must be same as prompt batch size {batch_size}\"\n            )\n\n        # mask_image\n\n        if isinstance(mask_image, list):\n            check_image_type = mask_image[0]\n        else:\n            check_image_type = mask_image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`mask_image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(mask_image, list):\n            image_batch_size = len(mask_image)\n        elif isinstance(mask_image, torch.Tensor):\n            image_batch_size = mask_image.shape[0]\n        elif isinstance(mask_image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(mask_image, np.ndarray):\n            image_batch_size = mask_image.shape[0]\n        else:\n            assert False\n\n        if image_batch_size != 1 and batch_size != image_batch_size:\n            raise ValueError(\n                f\"mask_image batch size: {image_batch_size} must be `1` or the same as prompt batch size {batch_size}\"\n            )\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.IFImg2ImgPipeline.preprocess_image with preprocess_image -> preprocess_original_image\n    def preprocess_original_image(self, image: PIL.Image.Image) -> torch.Tensor:\n        if not isinstance(image, list):\n            image = [image]\n\n        def numpy_to_pt(images):\n            if images.ndim == 3:\n                images = images[..., None]\n\n            images = torch.from_numpy(images.transpose(0, 3, 1, 2))\n            return images\n\n        if isinstance(image[0], PIL.Image.Image):\n            new_image = []\n\n            for image_ in image:\n                image_ = image_.convert(\"RGB\")\n                image_ = resize(image_, self.unet.sample_size)\n                image_ = np.array(image_)\n                image_ = image_.astype(np.float32)\n                image_ = image_ / 127.5 - 1\n                new_image.append(image_)\n\n            image = new_image\n\n            image = np.stack(image, axis=0)  # to np\n            image = numpy_to_pt(image)  # to pt\n\n        elif isinstance(image[0], np.ndarray):\n            image = np.concatenate(image, axis=0) if image[0].ndim == 4 else np.stack(image, axis=0)\n            image = numpy_to_pt(image)\n\n        elif isinstance(image[0], torch.Tensor):\n            image = torch.cat(image, axis=0) if image[0].ndim == 4 else torch.stack(image, axis=0)\n\n        return image\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_superresolution.IFSuperResolutionPipeline.preprocess_image\n    def preprocess_image(self, image: PIL.Image.Image, num_images_per_prompt, device) -> torch.Tensor:\n        if not isinstance(image, torch.Tensor) and not isinstance(image, list):\n            image = [image]\n\n        if isinstance(image[0], PIL.Image.Image):\n            image = [np.array(i).astype(np.float32) / 255.0 for i in image]\n\n            image = np.stack(image, axis=0)  # to np\n            image = torch.from_numpy(image.transpose(0, 3, 1, 2))\n        elif isinstance(image[0], np.ndarray):\n            image = np.stack(image, axis=0)  # to np\n            if image.ndim == 5:\n                image = image[0]\n\n            image = torch.from_numpy(image.transpose(0, 3, 1, 2))\n        elif isinstance(image, list) and isinstance(image[0], torch.Tensor):\n            dims = image[0].ndim\n\n            if dims == 3:\n                image = torch.stack(image, dim=0)\n            elif dims == 4:\n                image = torch.concat(image, dim=0)\n            else:\n                raise ValueError(f\"Image must have 3 or 4 dimensions, instead got {dims}\")\n\n        image = image.to(device=device, dtype=self.unet.dtype)\n\n        image = image.repeat_interleave(num_images_per_prompt, dim=0)\n\n        return image\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_inpainting.IFInpaintingPipeline.preprocess_mask_image\n    def preprocess_mask_image(self, mask_image) -> torch.Tensor:\n        if not isinstance(mask_image, list):\n            mask_image = [mask_image]\n\n        if isinstance(mask_image[0], torch.Tensor):\n            mask_image = torch.cat(mask_image, axis=0) if mask_image[0].ndim == 4 else torch.stack(mask_image, axis=0)\n\n            if mask_image.ndim == 2:\n                # Batch and add channel dim for single mask\n                mask_image = mask_image.unsqueeze(0).unsqueeze(0)\n            elif mask_image.ndim == 3 and mask_image.shape[0] == 1:\n                # Single mask, the 0'th dimension is considered to be\n                # the existing batch size of 1\n                mask_image = mask_image.unsqueeze(0)\n            elif mask_image.ndim == 3 and mask_image.shape[0] != 1:\n                # Batch of mask, the 0'th dimension is considered to be\n                # the batching dimension\n                mask_image = mask_image.unsqueeze(1)\n\n            mask_image[mask_image < 0.5] = 0\n            mask_image[mask_image >= 0.5] = 1\n\n        elif isinstance(mask_image[0], PIL.Image.Image):\n            new_mask_image = []\n\n            for mask_image_ in mask_image:\n                mask_image_ = mask_image_.convert(\"L\")\n                mask_image_ = resize(mask_image_, self.unet.sample_size)\n                mask_image_ = np.array(mask_image_)\n                mask_image_ = mask_image_[None, None, :]\n                new_mask_image.append(mask_image_)\n\n            mask_image = new_mask_image\n\n            mask_image = np.concatenate(mask_image, axis=0)\n            mask_image = mask_image.astype(np.float32) / 255.0\n            mask_image[mask_image < 0.5] = 0\n            mask_image[mask_image >= 0.5] = 1\n            mask_image = torch.from_numpy(mask_image)\n\n        elif isinstance(mask_image[0], np.ndarray):\n            mask_image = np.concatenate([m[None, None, :] for m in mask_image], axis=0)\n\n            mask_image[mask_image < 0.5] = 0\n            mask_image[mask_image >= 0.5] = 1\n            mask_image = torch.from_numpy(mask_image)\n\n        return mask_image\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_img2img.IFImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start:]\n\n        return timesteps, num_inference_steps - t_start\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if_inpainting.IFInpaintingPipeline.prepare_intermediate_images\n    def prepare_intermediate_images(\n        self, image, timestep, batch_size, num_images_per_prompt, dtype, device, mask_image, generator=None\n    ):\n        image_batch_size, channels, height, width = image.shape\n\n        batch_size = batch_size * num_images_per_prompt\n\n        shape = (batch_size, channels, height, width)\n\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        image = image.repeat_interleave(num_images_per_prompt, dim=0)\n        noised_image = self.scheduler.add_noise(image, noise, timestep)\n\n        image = (1 - mask_image) * image + mask_image * noised_image\n\n        return image\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        image: Union[PIL.Image.Image, np.ndarray, torch.FloatTensor],\n        original_image: Union[\n            PIL.Image.Image, torch.Tensor, np.ndarray, List[PIL.Image.Image], List[torch.Tensor], List[np.ndarray]\n        ] = None,\n        mask_image: Union[\n            PIL.Image.Image, torch.Tensor, np.ndarray, List[PIL.Image.Image], List[torch.Tensor], List[np.ndarray]\n        ] = None,\n        strength: float = 0.8,\n        prompt: Union[str, List[str]] = None,\n        num_inference_steps: int = 100,\n        timesteps: List[int] = None,\n        guidance_scale: float = 4.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        noise_level: int = 0,\n        clean_caption: bool = True,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            original_image (`torch.FloatTensor` or `PIL.Image.Image`):\n                The original image that `image` was varied from.\n            mask_image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be\n                repainted, while black pixels will be preserved. If `mask_image` is a PIL image, it will be converted\n                to a single channel (luminance) before use. If it's a tensor, it should contain one color channel (L)\n                instead of 3, so the expected shape would be `(B, H, W, 1)`.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            timesteps (`List[int]`, *optional*):\n                Custom timesteps to use for the denoising process. If not defined, equal spaced `num_inference_steps`\n                timesteps are used. Must be in descending order.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            noise_level (`int`, *optional*, defaults to 0):\n                The amount of noise to add to the upscaled image. Must be in the range `[0, 1000)`\n            clean_caption (`bool`, *optional*, defaults to `True`):\n                Whether or not to clean the caption before creating embeddings. Requires `beautifulsoup4` and `ftfy` to\n                be installed. If the dependencies are not installed, the embeddings will be created from the raw\n                prompt.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images, and the second element is a list\n            of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\" (nsfw)\n            or watermarked content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        self.check_inputs(\n            prompt,\n            image,\n            original_image,\n            mask_image,\n            batch_size,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        device = self._execution_device\n\n        # 3. Encode input prompt\n        prompt_embeds, negative_prompt_embeds = self.encode_prompt(\n            prompt,\n            do_classifier_free_guidance,\n            num_images_per_prompt=num_images_per_prompt,\n            device=device,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n            clean_caption=clean_caption,\n        )\n\n        if do_classifier_free_guidance:\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        dtype = prompt_embeds.dtype\n\n        # 4. Prepare timesteps\n        if timesteps is not None:\n            self.scheduler.set_timesteps(timesteps=timesteps, device=device)\n            timesteps = self.scheduler.timesteps\n            num_inference_steps = len(timesteps)\n        else:\n            self.scheduler.set_timesteps(num_inference_steps, device=device)\n            timesteps = self.scheduler.timesteps\n\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength)\n\n        # 5. prepare original image\n        original_image = self.preprocess_original_image(original_image)\n        original_image = original_image.to(device=device, dtype=dtype)\n\n        # 6. prepare mask image\n        mask_image = self.preprocess_mask_image(mask_image)\n        mask_image = mask_image.to(device=device, dtype=dtype)\n\n        if mask_image.shape[0] == 1:\n            mask_image = mask_image.repeat_interleave(batch_size * num_images_per_prompt, dim=0)\n        else:\n            mask_image = mask_image.repeat_interleave(num_images_per_prompt, dim=0)\n\n        # 6. Prepare intermediate images\n        noise_timestep = timesteps[0:1]\n        noise_timestep = noise_timestep.repeat(batch_size * num_images_per_prompt)\n\n        intermediate_images = self.prepare_intermediate_images(\n            original_image,\n            noise_timestep,\n            batch_size,\n            num_images_per_prompt,\n            dtype,\n            device,\n            mask_image,\n            generator,\n        )\n\n        # 7. Prepare upscaled image and noise level\n        _, _, height, width = original_image.shape\n\n        image = self.preprocess_image(image, num_images_per_prompt, device)\n\n        upscaled = F.interpolate(image, (height, width), mode=\"bilinear\", align_corners=True)\n\n        noise_level = torch.tensor([noise_level] * upscaled.shape[0], device=upscaled.device)\n        noise = randn_tensor(upscaled.shape, generator=generator, device=upscaled.device, dtype=upscaled.dtype)\n        upscaled = self.image_noising_scheduler.add_noise(upscaled, noise, timesteps=noise_level)\n\n        if do_classifier_free_guidance:\n            noise_level = torch.cat([noise_level] * 2)\n\n        # 8. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # HACK: see comment in `enable_model_cpu_offload`\n        if hasattr(self, \"text_encoder_offload_hook\") and self.text_encoder_offload_hook is not None:\n            self.text_encoder_offload_hook.offload()\n\n        # 9. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                model_input = torch.cat([intermediate_images, upscaled], dim=1)\n\n                model_input = torch.cat([model_input] * 2) if do_classifier_free_guidance else model_input\n                model_input = self.scheduler.scale_model_input(model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    class_labels=noise_level,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred_uncond, _ = noise_pred_uncond.split(model_input.shape[1] // 2, dim=1)\n                    noise_pred_text, predicted_variance = noise_pred_text.split(model_input.shape[1] // 2, dim=1)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n                    noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                prev_intermediate_images = intermediate_images\n\n                intermediate_images = self.scheduler.step(\n                    noise_pred, t, intermediate_images, **extra_step_kwargs, return_dict=False\n                )[0]\n\n                intermediate_images = (1 - mask_image) * prev_intermediate_images + mask_image * intermediate_images\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, intermediate_images)\n\n        image = intermediate_images\n\n        if output_type == \"pil\":\n            # 10. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 11. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n            # 12. Convert to PIL\n            image = self.numpy_to_pil(image)\n\n            # 13. Apply watermark\n            if self.watermarker is not None:\n                self.watermarker.apply_watermark(image, self.unet.config.sample_size)\n        elif output_type == \"pt\":\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n        else:\n            # 10. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 11. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, nsfw_detected, watermark_detected)\n\n        return IFPipelineOutput(images=image, nsfw_detected=nsfw_detected, watermark_detected=watermark_detected)\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/pipeline_if_superresolution.py",
    "content": "import html\nimport inspect\nimport re\nimport urllib.parse as ul\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPImageProcessor, T5EncoderModel, T5Tokenizer\n\nfrom ...models import UNet2DConditionModel\nfrom ...schedulers import DDPMScheduler\nfrom ...utils import (\n    BACKENDS_MAPPING,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_bs4_available,\n    is_ftfy_available,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import IFPipelineOutput\nfrom .safety_checker import IFSafetyChecker\nfrom .watermark import IFWatermarker\n\n\nif is_bs4_available():\n    from bs4 import BeautifulSoup\n\nif is_ftfy_available():\n    import ftfy\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> from diffusers import IFPipeline, IFSuperResolutionPipeline, DiffusionPipeline\n        >>> from diffusers.utils import pt_to_pil\n        >>> import torch\n\n        >>> pipe = IFPipeline.from_pretrained(\"DeepFloyd/IF-I-XL-v1.0\", variant=\"fp16\", torch_dtype=torch.float16)\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> prompt = 'a photo of a kangaroo wearing an orange hoodie and blue sunglasses standing in front of the eiffel tower holding a sign that says \"very deep learning\"'\n        >>> prompt_embeds, negative_embeds = pipe.encode_prompt(prompt)\n\n        >>> image = pipe(prompt_embeds=prompt_embeds, negative_prompt_embeds=negative_embeds, output_type=\"pt\").images\n\n        >>> # save intermediate image\n        >>> pil_image = pt_to_pil(image)\n        >>> pil_image[0].save(\"./if_stage_I.png\")\n\n        >>> super_res_1_pipe = IFSuperResolutionPipeline.from_pretrained(\n        ...     \"DeepFloyd/IF-II-L-v1.0\", text_encoder=None, variant=\"fp16\", torch_dtype=torch.float16\n        ... )\n        >>> super_res_1_pipe.enable_model_cpu_offload()\n\n        >>> image = super_res_1_pipe(\n        ...     image=image, prompt_embeds=prompt_embeds, negative_prompt_embeds=negative_embeds\n        ... ).images\n        >>> image[0].save(\"./if_stage_II.png\")\n        ```\n\"\"\"\n\n\nclass IFSuperResolutionPipeline(DiffusionPipeline):\n    tokenizer: T5Tokenizer\n    text_encoder: T5EncoderModel\n\n    unet: UNet2DConditionModel\n    scheduler: DDPMScheduler\n    image_noising_scheduler: DDPMScheduler\n\n    feature_extractor: Optional[CLIPImageProcessor]\n    safety_checker: Optional[IFSafetyChecker]\n\n    watermarker: Optional[IFWatermarker]\n\n    bad_punct_regex = re.compile(\n        r\"[\" + \"#®•©™&@·º½¾¿¡§~\" + \"\\)\" + \"\\(\" + \"\\]\" + \"\\[\" + \"\\}\" + \"\\{\" + \"\\|\" + \"\\\\\" + \"\\/\" + \"\\*\" + r\"]{1,}\"\n    )  # noqa\n\n    _optional_components = [\"tokenizer\", \"text_encoder\", \"safety_checker\", \"feature_extractor\", \"watermarker\"]\n\n    def __init__(\n        self,\n        tokenizer: T5Tokenizer,\n        text_encoder: T5EncoderModel,\n        unet: UNet2DConditionModel,\n        scheduler: DDPMScheduler,\n        image_noising_scheduler: DDPMScheduler,\n        safety_checker: Optional[IFSafetyChecker],\n        feature_extractor: Optional[CLIPImageProcessor],\n        watermarker: Optional[IFWatermarker],\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the IF license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        if unet.config.in_channels != 6:\n            logger.warn(\n                \"It seems like you have loaded a checkpoint that shall not be used for super resolution from {unet.config._name_or_path} as it accepts {unet.config.in_channels} input channels instead of 6. Please make sure to pass a super resolution checkpoint as the `'unet'`: IFSuperResolutionPipeline.from_pretrained(unet=super_resolution_unet, ...)`.\"\n            )\n\n        self.register_modules(\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            image_noising_scheduler=image_noising_scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            watermarker=watermarker,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        models = [\n            self.text_encoder,\n            self.unet,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n\n        if self.text_encoder is not None:\n            _, hook = cpu_offload_with_hook(self.text_encoder, device, prev_module_hook=hook)\n\n            # Accelerate will move the next model to the device _before_ calling the offload hook of the\n            # previous model. This will cause both models to be present on the device at the same time.\n            # IF uses T5 for its text encoder which is really large. We can manually call the offload\n            # hook for the text encoder to ensure it's moved to the cpu before the unet is moved to\n            # the GPU.\n            self.text_encoder_offload_hook = hook\n\n        _, hook = cpu_offload_with_hook(self.unet, device, prev_module_hook=hook)\n\n        # if the safety checker isn't called, `unet_offload_hook` will have to be called to manually offload the unet\n        self.unet_offload_hook = hook\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.remove_all_hooks\n    def remove_all_hooks(self):\n        if is_accelerate_available():\n            from accelerate.hooks import remove_hook_from_module\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        for model in [self.text_encoder, self.unet, self.safety_checker]:\n            if model is not None:\n                remove_hook_from_module(model, recurse=True)\n\n        self.unet_offload_hook = None\n        self.text_encoder_offload_hook = None\n        self.final_offload_hook = None\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._text_preprocessing\n    def _text_preprocessing(self, text, clean_caption=False):\n        if clean_caption and not is_bs4_available():\n            logger.warn(BACKENDS_MAPPING[\"bs4\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if clean_caption and not is_ftfy_available():\n            logger.warn(BACKENDS_MAPPING[\"ftfy\"][-1].format(\"Setting `clean_caption=True`\"))\n            logger.warn(\"Setting `clean_caption` to False...\")\n            clean_caption = False\n\n        if not isinstance(text, (tuple, list)):\n            text = [text]\n\n        def process(text: str):\n            if clean_caption:\n                text = self._clean_caption(text)\n                text = self._clean_caption(text)\n            else:\n                text = text.lower().strip()\n            return text\n\n        return [process(t) for t in text]\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._clean_caption\n    def _clean_caption(self, caption):\n        caption = str(caption)\n        caption = ul.unquote_plus(caption)\n        caption = caption.strip().lower()\n        caption = re.sub(\"<person>\", \"person\", caption)\n        # urls:\n        caption = re.sub(\n            r\"\\b((?:https?:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        caption = re.sub(\n            r\"\\b((?:www:(?:\\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\\w/-]*\\b\\/?(?!@)))\",  # noqa\n            \"\",\n            caption,\n        )  # regex for urls\n        # html:\n        caption = BeautifulSoup(caption, features=\"html.parser\").text\n\n        # @<nickname>\n        caption = re.sub(r\"@[\\w\\d]+\\b\", \"\", caption)\n\n        # 31C0—31EF CJK Strokes\n        # 31F0—31FF Katakana Phonetic Extensions\n        # 3200—32FF Enclosed CJK Letters and Months\n        # 3300—33FF CJK Compatibility\n        # 3400—4DBF CJK Unified Ideographs Extension A\n        # 4DC0—4DFF Yijing Hexagram Symbols\n        # 4E00—9FFF CJK Unified Ideographs\n        caption = re.sub(r\"[\\u31c0-\\u31ef]+\", \"\", caption)\n        caption = re.sub(r\"[\\u31f0-\\u31ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3200-\\u32ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3300-\\u33ff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u3400-\\u4dbf]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4dc0-\\u4dff]+\", \"\", caption)\n        caption = re.sub(r\"[\\u4e00-\\u9fff]+\", \"\", caption)\n        #######################################################\n\n        # все виды тире / all types of dash --> \"-\"\n        caption = re.sub(\n            r\"[\\u002D\\u058A\\u05BE\\u1400\\u1806\\u2010-\\u2015\\u2E17\\u2E1A\\u2E3A\\u2E3B\\u2E40\\u301C\\u3030\\u30A0\\uFE31\\uFE32\\uFE58\\uFE63\\uFF0D]+\",  # noqa\n            \"-\",\n            caption,\n        )\n\n        # кавычки к одному стандарту\n        caption = re.sub(r\"[`´«»“”¨]\", '\"', caption)\n        caption = re.sub(r\"[‘’]\", \"'\", caption)\n\n        # &quot;\n        caption = re.sub(r\"&quot;?\", \"\", caption)\n        # &amp\n        caption = re.sub(r\"&amp\", \"\", caption)\n\n        # ip adresses:\n        caption = re.sub(r\"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\", \" \", caption)\n\n        # article ids:\n        caption = re.sub(r\"\\d:\\d\\d\\s+$\", \"\", caption)\n\n        # \\n\n        caption = re.sub(r\"\\\\n\", \" \", caption)\n\n        # \"#123\"\n        caption = re.sub(r\"#\\d{1,3}\\b\", \"\", caption)\n        # \"#12345..\"\n        caption = re.sub(r\"#\\d{5,}\\b\", \"\", caption)\n        # \"123456..\"\n        caption = re.sub(r\"\\b\\d{6,}\\b\", \"\", caption)\n        # filenames:\n        caption = re.sub(r\"[\\S]+\\.(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)\", \"\", caption)\n\n        #\n        caption = re.sub(r\"[\\\"\\']{2,}\", r'\"', caption)  # \"\"\"AUSVERKAUFT\"\"\"\n        caption = re.sub(r\"[\\.]{2,}\", r\" \", caption)  # \"\"\"AUSVERKAUFT\"\"\"\n\n        caption = re.sub(self.bad_punct_regex, r\" \", caption)  # ***AUSVERKAUFT***, #AUSVERKAUFT\n        caption = re.sub(r\"\\s+\\.\\s+\", r\" \", caption)  # \" . \"\n\n        # this-is-my-cute-cat / this_is_my_cute_cat\n        regex2 = re.compile(r\"(?:\\-|\\_)\")\n        if len(re.findall(regex2, caption)) > 3:\n            caption = re.sub(regex2, \" \", caption)\n\n        caption = ftfy.fix_text(caption)\n        caption = html.unescape(html.unescape(caption))\n\n        caption = re.sub(r\"\\b[a-zA-Z]{1,3}\\d{3,15}\\b\", \"\", caption)  # jc6640\n        caption = re.sub(r\"\\b[a-zA-Z]+\\d+[a-zA-Z]+\\b\", \"\", caption)  # jc6640vc\n        caption = re.sub(r\"\\b\\d+[a-zA-Z]+\\d+\\b\", \"\", caption)  # 6640vc231\n\n        caption = re.sub(r\"(worldwide\\s+)?(free\\s+)?shipping\", \"\", caption)\n        caption = re.sub(r\"(free\\s)?download(\\sfree)?\", \"\", caption)\n        caption = re.sub(r\"\\bclick\\b\\s(?:for|on)\\s\\w+\", \"\", caption)\n        caption = re.sub(r\"\\b(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)(\\simage[s]?)?\", \"\", caption)\n        caption = re.sub(r\"\\bpage\\s+\\d+\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\d*[a-zA-Z]+\\d+[a-zA-Z]+\\d+[a-zA-Z\\d]*\\b\", r\" \", caption)  # j2d1a2a...\n\n        caption = re.sub(r\"\\b\\d+\\.?\\d*[xх×]\\d+\\.?\\d*\\b\", \"\", caption)\n\n        caption = re.sub(r\"\\b\\s+\\:\\s+\", r\": \", caption)\n        caption = re.sub(r\"(\\D[,\\./])\\b\", r\"\\1 \", caption)\n        caption = re.sub(r\"\\s+\", \" \", caption)\n\n        caption.strip()\n\n        caption = re.sub(r\"^[\\\"\\']([\\w\\W]+)[\\\"\\']$\", r\"\\1\", caption)\n        caption = re.sub(r\"^[\\'\\_,\\-\\:;]\", r\"\", caption)\n        caption = re.sub(r\"[\\'\\_,\\-\\:\\-\\+]$\", r\"\", caption)\n        caption = re.sub(r\"^\\.\\S+$\", \"\", caption)\n\n        return caption.strip()\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.encode_prompt\n    def encode_prompt(\n        self,\n        prompt,\n        do_classifier_free_guidance=True,\n        num_images_per_prompt=1,\n        device=None,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        clean_caption: bool = False,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`, *optional*):\n                torch device to place the resulting embeddings on\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.\n                Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if device is None:\n            device = self._execution_device\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        # while T5 can handle much longer input sequences than 77, the text encoder was trained with a max length of 77 for IF\n        max_length = 77\n\n        if prompt_embeds is None:\n            prompt = self._text_preprocessing(prompt, clean_caption=clean_caption)\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(untruncated_ids[:, max_length - 1 : -1])\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {max_length} tokens: {removed_text}\"\n                )\n\n            attention_mask = text_inputs.attention_mask.to(device)\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        if self.text_encoder is not None:\n            dtype = self.text_encoder.dtype\n        elif self.unet is not None:\n            dtype = self.unet.dtype\n        else:\n            dtype = None\n\n        prompt_embeds = prompt_embeds.to(dtype=dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_attention_mask=True,\n                add_special_tokens=True,\n                return_tensors=\"pt\",\n            )\n            attention_mask = uncond_input.attention_mask.to(device)\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n        else:\n            negative_prompt_embeds = None\n\n        return prompt_embeds, negative_prompt_embeds\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, nsfw_detected, watermark_detected = self.safety_checker(\n                images=image,\n                clip_input=safety_checker_input.pixel_values.to(dtype=dtype),\n            )\n        else:\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n\n        return image, nsfw_detected, watermark_detected\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        batch_size,\n        noise_level,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        if noise_level < 0 or noise_level >= self.image_noising_scheduler.config.num_train_timesteps:\n            raise ValueError(\n                f\"`noise_level`: {noise_level} must be a valid timestep in `self.noising_scheduler`, [0, {self.image_noising_scheduler.config.num_train_timesteps})\"\n            )\n\n        if isinstance(image, list):\n            check_image_type = image[0]\n        else:\n            check_image_type = image\n\n        if (\n            not isinstance(check_image_type, torch.Tensor)\n            and not isinstance(check_image_type, PIL.Image.Image)\n            and not isinstance(check_image_type, np.ndarray)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is\"\n                f\" {type(check_image_type)}\"\n            )\n\n        if isinstance(image, list):\n            image_batch_size = len(image)\n        elif isinstance(image, torch.Tensor):\n            image_batch_size = image.shape[0]\n        elif isinstance(image, PIL.Image.Image):\n            image_batch_size = 1\n        elif isinstance(image, np.ndarray):\n            image_batch_size = image.shape[0]\n        else:\n            assert False\n\n        if batch_size != image_batch_size:\n            raise ValueError(f\"image batch size: {image_batch_size} must be same as prompt batch size {batch_size}\")\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_intermediate_images\n    def prepare_intermediate_images(self, batch_size, num_channels, height, width, dtype, device, generator):\n        shape = (batch_size, num_channels, height, width)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        intermediate_images = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        intermediate_images = intermediate_images * self.scheduler.init_noise_sigma\n        return intermediate_images\n\n    def preprocess_image(self, image, num_images_per_prompt, device):\n        if not isinstance(image, torch.Tensor) and not isinstance(image, list):\n            image = [image]\n\n        if isinstance(image[0], PIL.Image.Image):\n            image = [np.array(i).astype(np.float32) / 255.0 for i in image]\n\n            image = np.stack(image, axis=0)  # to np\n            image = torch.from_numpy(image.transpose(0, 3, 1, 2))\n        elif isinstance(image[0], np.ndarray):\n            image = np.stack(image, axis=0)  # to np\n            if image.ndim == 5:\n                image = image[0]\n\n            image = torch.from_numpy(image.transpose(0, 3, 1, 2))\n        elif isinstance(image, list) and isinstance(image[0], torch.Tensor):\n            dims = image[0].ndim\n\n            if dims == 3:\n                image = torch.stack(image, dim=0)\n            elif dims == 4:\n                image = torch.concat(image, dim=0)\n            else:\n                raise ValueError(f\"Image must have 3 or 4 dimensions, instead got {dims}\")\n\n        image = image.to(device=device, dtype=self.unet.dtype)\n\n        image = image.repeat_interleave(num_images_per_prompt, dim=0)\n\n        return image\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: int = None,\n        width: int = None,\n        image: Union[PIL.Image.Image, np.ndarray, torch.FloatTensor] = None,\n        num_inference_steps: int = 50,\n        timesteps: List[int] = None,\n        guidance_scale: float = 4.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        noise_level: int = 250,\n        clean_caption: bool = True,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size):\n                The width in pixels of the generated image.\n            image (`PIL.Image.Image`, `np.ndarray`, `torch.FloatTensor`):\n                The image to be upscaled.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            timesteps (`List[int]`, *optional*):\n                Custom timesteps to use for the denoising process. If not defined, equal spaced `num_inference_steps`\n                timesteps are used. Must be in descending order.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            noise_level (`int`, *optional*, defaults to 250):\n                The amount of noise to add to the upscaled image. Must be in the range `[0, 1000)`\n            clean_caption (`bool`, *optional*, defaults to `True`):\n                Whether or not to clean the caption before creating embeddings. Requires `beautifulsoup4` and `ftfy` to\n                be installed. If the dependencies are not installed, the embeddings will be created from the raw\n                prompt.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.IFPipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images, and the second element is a list\n            of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\" (nsfw)\n            or watermarked content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        self.check_inputs(\n            prompt,\n            image,\n            batch_size,\n            noise_level,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n\n        height = height or self.unet.config.sample_size\n        width = width or self.unet.config.sample_size\n\n        device = self._execution_device\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds, negative_prompt_embeds = self.encode_prompt(\n            prompt,\n            do_classifier_free_guidance,\n            num_images_per_prompt=num_images_per_prompt,\n            device=device,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n            clean_caption=clean_caption,\n        )\n\n        if do_classifier_free_guidance:\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        # 4. Prepare timesteps\n        if timesteps is not None:\n            self.scheduler.set_timesteps(timesteps=timesteps, device=device)\n            timesteps = self.scheduler.timesteps\n            num_inference_steps = len(timesteps)\n        else:\n            self.scheduler.set_timesteps(num_inference_steps, device=device)\n            timesteps = self.scheduler.timesteps\n\n        # 5. Prepare intermediate images\n        num_channels = self.unet.config.in_channels // 2\n        intermediate_images = self.prepare_intermediate_images(\n            batch_size * num_images_per_prompt,\n            num_channels,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Prepare upscaled image and noise level\n        image = self.preprocess_image(image, num_images_per_prompt, device)\n        upscaled = F.interpolate(image, (height, width), mode=\"bilinear\", align_corners=True)\n\n        noise_level = torch.tensor([noise_level] * upscaled.shape[0], device=upscaled.device)\n        noise = randn_tensor(upscaled.shape, generator=generator, device=upscaled.device, dtype=upscaled.dtype)\n        upscaled = self.image_noising_scheduler.add_noise(upscaled, noise, timesteps=noise_level)\n\n        if do_classifier_free_guidance:\n            noise_level = torch.cat([noise_level] * 2)\n\n        # HACK: see comment in `enable_model_cpu_offload`\n        if hasattr(self, \"text_encoder_offload_hook\") and self.text_encoder_offload_hook is not None:\n            self.text_encoder_offload_hook.offload()\n\n        # 8. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                model_input = torch.cat([intermediate_images, upscaled], dim=1)\n\n                model_input = torch.cat([model_input] * 2) if do_classifier_free_guidance else model_input\n                model_input = self.scheduler.scale_model_input(model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    class_labels=noise_level,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred_uncond, _ = noise_pred_uncond.split(model_input.shape[1] // 2, dim=1)\n                    noise_pred_text, predicted_variance = noise_pred_text.split(model_input.shape[1] // 2, dim=1)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n                    noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                intermediate_images = self.scheduler.step(\n                    noise_pred, t, intermediate_images, **extra_step_kwargs, return_dict=False\n                )[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, intermediate_images)\n\n        image = intermediate_images\n\n        if output_type == \"pil\":\n            # 9. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 10. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n            # 11. Convert to PIL\n            image = self.numpy_to_pil(image)\n\n            # 12. Apply watermark\n            if self.watermarker is not None:\n                self.watermarker.apply_watermark(image, self.unet.config.sample_size)\n        elif output_type == \"pt\":\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n        else:\n            # 9. Post-processing\n            image = (image / 2 + 0.5).clamp(0, 1)\n            image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n            # 10. Run safety checker\n            image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, nsfw_detected, watermark_detected)\n\n        return IFPipelineOutput(images=image, nsfw_detected=nsfw_detected, watermark_detected=watermark_detected)\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/safety_checker.py",
    "content": "import numpy as np\nimport torch\nimport torch.nn as nn\nfrom transformers import CLIPConfig, CLIPVisionModelWithProjection, PreTrainedModel\n\nfrom ...utils import logging\n\n\nlogger = logging.get_logger(__name__)\n\n\nclass IFSafetyChecker(PreTrainedModel):\n    config_class = CLIPConfig\n\n    _no_split_modules = [\"CLIPEncoderLayer\"]\n\n    def __init__(self, config: CLIPConfig):\n        super().__init__(config)\n\n        self.vision_model = CLIPVisionModelWithProjection(config.vision_config)\n\n        self.p_head = nn.Linear(config.vision_config.projection_dim, 1)\n        self.w_head = nn.Linear(config.vision_config.projection_dim, 1)\n\n    @torch.no_grad()\n    def forward(self, clip_input, images, p_threshold=0.5, w_threshold=0.5):\n        image_embeds = self.vision_model(clip_input)[0]\n\n        nsfw_detected = self.p_head(image_embeds)\n        nsfw_detected = nsfw_detected.flatten()\n        nsfw_detected = nsfw_detected > p_threshold\n        nsfw_detected = nsfw_detected.tolist()\n\n        if any(nsfw_detected):\n            logger.warning(\n                \"Potential NSFW content was detected in one or more images. A black image will be returned instead.\"\n                \" Try again with a different prompt and/or seed.\"\n            )\n\n        for idx, nsfw_detected_ in enumerate(nsfw_detected):\n            if nsfw_detected_:\n                images[idx] = np.zeros(images[idx].shape)\n\n        watermark_detected = self.w_head(image_embeds)\n        watermark_detected = watermark_detected.flatten()\n        watermark_detected = watermark_detected > w_threshold\n        watermark_detected = watermark_detected.tolist()\n\n        if any(watermark_detected):\n            logger.warning(\n                \"Potential watermarked content was detected in one or more images. A black image will be returned instead.\"\n                \" Try again with a different prompt and/or seed.\"\n            )\n\n        for idx, watermark_detected_ in enumerate(watermark_detected):\n            if watermark_detected_:\n                images[idx] = np.zeros(images[idx].shape)\n\n        return images, nsfw_detected, watermark_detected\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/timesteps.py",
    "content": "fast27_timesteps = [\n    999,\n    800,\n    799,\n    600,\n    599,\n    500,\n    400,\n    399,\n    377,\n    355,\n    333,\n    311,\n    288,\n    266,\n    244,\n    222,\n    200,\n    199,\n    177,\n    155,\n    133,\n    111,\n    88,\n    66,\n    44,\n    22,\n    0,\n]\n\nsmart27_timesteps = [\n    999,\n    976,\n    952,\n    928,\n    905,\n    882,\n    858,\n    857,\n    810,\n    762,\n    715,\n    714,\n    572,\n    429,\n    428,\n    286,\n    285,\n    238,\n    190,\n    143,\n    142,\n    118,\n    95,\n    71,\n    47,\n    24,\n    0,\n]\n\nsmart50_timesteps = [\n    999,\n    988,\n    977,\n    966,\n    955,\n    944,\n    933,\n    922,\n    911,\n    900,\n    899,\n    879,\n    859,\n    840,\n    820,\n    800,\n    799,\n    766,\n    733,\n    700,\n    699,\n    650,\n    600,\n    599,\n    500,\n    499,\n    400,\n    399,\n    350,\n    300,\n    299,\n    266,\n    233,\n    200,\n    199,\n    179,\n    159,\n    140,\n    120,\n    100,\n    99,\n    88,\n    77,\n    66,\n    55,\n    44,\n    33,\n    22,\n    11,\n    0,\n]\n\nsmart100_timesteps = [\n    999,\n    995,\n    992,\n    989,\n    985,\n    981,\n    978,\n    975,\n    971,\n    967,\n    964,\n    961,\n    957,\n    956,\n    951,\n    947,\n    942,\n    937,\n    933,\n    928,\n    923,\n    919,\n    914,\n    913,\n    908,\n    903,\n    897,\n    892,\n    887,\n    881,\n    876,\n    871,\n    870,\n    864,\n    858,\n    852,\n    846,\n    840,\n    834,\n    828,\n    827,\n    820,\n    813,\n    806,\n    799,\n    792,\n    785,\n    784,\n    777,\n    770,\n    763,\n    756,\n    749,\n    742,\n    741,\n    733,\n    724,\n    716,\n    707,\n    699,\n    698,\n    688,\n    677,\n    666,\n    656,\n    655,\n    645,\n    634,\n    623,\n    613,\n    612,\n    598,\n    584,\n    570,\n    569,\n    555,\n    541,\n    527,\n    526,\n    505,\n    484,\n    483,\n    462,\n    440,\n    439,\n    396,\n    395,\n    352,\n    351,\n    308,\n    307,\n    264,\n    263,\n    220,\n    219,\n    176,\n    132,\n    88,\n    44,\n    0,\n]\n\nsmart185_timesteps = [\n    999,\n    997,\n    995,\n    992,\n    990,\n    988,\n    986,\n    984,\n    981,\n    979,\n    977,\n    975,\n    972,\n    970,\n    968,\n    966,\n    964,\n    961,\n    959,\n    957,\n    956,\n    954,\n    951,\n    949,\n    946,\n    944,\n    941,\n    939,\n    936,\n    934,\n    931,\n    929,\n    926,\n    924,\n    921,\n    919,\n    916,\n    914,\n    913,\n    910,\n    907,\n    905,\n    902,\n    899,\n    896,\n    893,\n    891,\n    888,\n    885,\n    882,\n    879,\n    877,\n    874,\n    871,\n    870,\n    867,\n    864,\n    861,\n    858,\n    855,\n    852,\n    849,\n    846,\n    843,\n    840,\n    837,\n    834,\n    831,\n    828,\n    827,\n    824,\n    821,\n    817,\n    814,\n    811,\n    808,\n    804,\n    801,\n    798,\n    795,\n    791,\n    788,\n    785,\n    784,\n    780,\n    777,\n    774,\n    770,\n    766,\n    763,\n    760,\n    756,\n    752,\n    749,\n    746,\n    742,\n    741,\n    737,\n    733,\n    730,\n    726,\n    722,\n    718,\n    714,\n    710,\n    707,\n    703,\n    699,\n    698,\n    694,\n    690,\n    685,\n    681,\n    677,\n    673,\n    669,\n    664,\n    660,\n    656,\n    655,\n    650,\n    646,\n    641,\n    636,\n    632,\n    627,\n    622,\n    618,\n    613,\n    612,\n    607,\n    602,\n    596,\n    591,\n    586,\n    580,\n    575,\n    570,\n    569,\n    563,\n    557,\n    551,\n    545,\n    539,\n    533,\n    527,\n    526,\n    519,\n    512,\n    505,\n    498,\n    491,\n    484,\n    483,\n    474,\n    466,\n    457,\n    449,\n    440,\n    439,\n    428,\n    418,\n    407,\n    396,\n    395,\n    381,\n    366,\n    352,\n    351,\n    330,\n    308,\n    307,\n    286,\n    264,\n    263,\n    242,\n    220,\n    219,\n    176,\n    175,\n    132,\n    131,\n    88,\n    44,\n    0,\n]\n\nsuper27_timesteps = [\n    999,\n    991,\n    982,\n    974,\n    966,\n    958,\n    950,\n    941,\n    933,\n    925,\n    916,\n    908,\n    900,\n    899,\n    874,\n    850,\n    825,\n    800,\n    799,\n    700,\n    600,\n    500,\n    400,\n    300,\n    200,\n    100,\n    0,\n]\n\nsuper40_timesteps = [\n    999,\n    992,\n    985,\n    978,\n    971,\n    964,\n    957,\n    949,\n    942,\n    935,\n    928,\n    921,\n    914,\n    907,\n    900,\n    899,\n    879,\n    859,\n    840,\n    820,\n    800,\n    799,\n    766,\n    733,\n    700,\n    699,\n    650,\n    600,\n    599,\n    500,\n    499,\n    400,\n    399,\n    300,\n    299,\n    200,\n    199,\n    100,\n    99,\n    0,\n]\n\nsuper100_timesteps = [\n    999,\n    996,\n    992,\n    989,\n    985,\n    982,\n    979,\n    975,\n    972,\n    968,\n    965,\n    961,\n    958,\n    955,\n    951,\n    948,\n    944,\n    941,\n    938,\n    934,\n    931,\n    927,\n    924,\n    920,\n    917,\n    914,\n    910,\n    907,\n    903,\n    900,\n    899,\n    891,\n    884,\n    876,\n    869,\n    861,\n    853,\n    846,\n    838,\n    830,\n    823,\n    815,\n    808,\n    800,\n    799,\n    788,\n    777,\n    766,\n    755,\n    744,\n    733,\n    722,\n    711,\n    700,\n    699,\n    688,\n    677,\n    666,\n    655,\n    644,\n    633,\n    622,\n    611,\n    600,\n    599,\n    585,\n    571,\n    557,\n    542,\n    528,\n    514,\n    500,\n    499,\n    485,\n    471,\n    457,\n    442,\n    428,\n    414,\n    400,\n    399,\n    379,\n    359,\n    340,\n    320,\n    300,\n    299,\n    279,\n    259,\n    240,\n    220,\n    200,\n    199,\n    166,\n    133,\n    100,\n    99,\n    66,\n    33,\n    0,\n]\n"
  },
  {
    "path": "diffusers/pipelines/deepfloyd_if/watermark.py",
    "content": "from typing import List\n\nimport PIL\nimport torch\nfrom PIL import Image\n\nfrom ...configuration_utils import ConfigMixin\nfrom ...models.modeling_utils import ModelMixin\nfrom ...utils import PIL_INTERPOLATION\n\n\nclass IFWatermarker(ModelMixin, ConfigMixin):\n    def __init__(self):\n        super().__init__()\n\n        self.register_buffer(\"watermark_image\", torch.zeros((62, 62, 4)))\n        self.watermark_image_as_pil = None\n\n    def apply_watermark(self, images: List[PIL.Image.Image], sample_size=None):\n        # copied from https://github.com/deep-floyd/IF/blob/b77482e36ca2031cb94dbca1001fc1e6400bf4ab/deepfloyd_if/modules/base.py#L287\n\n        h = images[0].height\n        w = images[0].width\n\n        sample_size = sample_size or h\n\n        coef = min(h / sample_size, w / sample_size)\n        img_h, img_w = (int(h / coef), int(w / coef)) if coef < 1 else (h, w)\n\n        S1, S2 = 1024**2, img_w * img_h\n        K = (S2 / S1) ** 0.5\n        wm_size, wm_x, wm_y = int(K * 62), img_w - int(14 * K), img_h - int(14 * K)\n\n        if self.watermark_image_as_pil is None:\n            watermark_image = self.watermark_image.to(torch.uint8).cpu().numpy()\n            watermark_image = Image.fromarray(watermark_image, mode=\"RGBA\")\n            self.watermark_image_as_pil = watermark_image\n\n        wm_img = self.watermark_image_as_pil.resize(\n            (wm_size, wm_size), PIL_INTERPOLATION[\"bicubic\"], reducing_gap=None\n        )\n\n        for pil_img in images:\n            pil_img.paste(wm_img, box=(wm_x - wm_size, wm_y - wm_size, wm_x, wm_y), mask=wm_img.split()[-1])\n\n        return images\n"
  },
  {
    "path": "diffusers/pipelines/dit/__init__.py",
    "content": "from .pipeline_dit import DiTPipeline\n"
  },
  {
    "path": "diffusers/pipelines/dit/pipeline_dit.py",
    "content": "# Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)\n# William Peebles and Saining Xie\n#\n# Copyright (c) 2021 OpenAI\n# MIT License\n#\n# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Dict, List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...models import AutoencoderKL, Transformer2DModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass DiTPipeline(DiffusionPipeline):\n    r\"\"\"\n    This pipeline inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        transformer ([`Transformer2DModel`]):\n            Class conditioned Transformer in Diffusion model to denoise the encoded image latents.\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        scheduler ([`DDIMScheduler`]):\n            A scheduler to be used in combination with `dit` to denoise the encoded image latents.\n    \"\"\"\n\n    def __init__(\n        self,\n        transformer: Transformer2DModel,\n        vae: AutoencoderKL,\n        scheduler: KarrasDiffusionSchedulers,\n        id2label: Optional[Dict[int, str]] = None,\n    ):\n        super().__init__()\n        self.register_modules(transformer=transformer, vae=vae, scheduler=scheduler)\n\n        # create a imagenet -> id dictionary for easier use\n        self.labels = {}\n        if id2label is not None:\n            for key, value in id2label.items():\n                for label in value.split(\",\"):\n                    self.labels[label.lstrip().rstrip()] = int(key)\n            self.labels = dict(sorted(self.labels.items()))\n\n    def get_label_ids(self, label: Union[str, List[str]]) -> List[int]:\n        r\"\"\"\n\n        Map label strings, *e.g.* from ImageNet, to corresponding class ids.\n\n        Parameters:\n            label (`str` or `dict` of `str`): label strings to be mapped to class ids.\n\n        Returns:\n            `list` of `int`: Class ids to be processed by pipeline.\n        \"\"\"\n\n        if not isinstance(label, list):\n            label = list(label)\n\n        for l in label:\n            if l not in self.labels:\n                raise ValueError(\n                    f\"{l} does not exist. Please make sure to select one of the following labels: \\n {self.labels}.\"\n                )\n\n        return [self.labels[l] for l in label]\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        class_labels: List[int],\n        guidance_scale: float = 4.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        num_inference_steps: int = 50,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n    ) -> Union[ImagePipelineOutput, Tuple]:\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            class_labels (List[int]):\n                List of imagenet class labels for the images to be generated.\n            guidance_scale (`float`, *optional*, defaults to 4.0):\n                Scale of the guidance signal.\n            generator (`torch.Generator`, *optional*):\n                A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation\n                deterministic.\n            num_inference_steps (`int`, *optional*, defaults to 250):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`ImagePipelineOutput`] instead of a plain tuple.\n        \"\"\"\n\n        batch_size = len(class_labels)\n        latent_size = self.transformer.config.sample_size\n        latent_channels = self.transformer.config.in_channels\n\n        latents = randn_tensor(\n            shape=(batch_size, latent_channels, latent_size, latent_size),\n            generator=generator,\n            device=self.device,\n            dtype=self.transformer.dtype,\n        )\n        latent_model_input = torch.cat([latents] * 2) if guidance_scale > 1 else latents\n\n        class_labels = torch.tensor(class_labels, device=self.device).reshape(-1)\n        class_null = torch.tensor([1000] * batch_size, device=self.device)\n        class_labels_input = torch.cat([class_labels, class_null], 0) if guidance_scale > 1 else class_labels\n\n        # set step values\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        for t in self.progress_bar(self.scheduler.timesteps):\n            if guidance_scale > 1:\n                half = latent_model_input[: len(latent_model_input) // 2]\n                latent_model_input = torch.cat([half, half], dim=0)\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            timesteps = t\n            if not torch.is_tensor(timesteps):\n                # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can\n                # This would be a good case for the `match` statement (Python 3.10+)\n                is_mps = latent_model_input.device.type == \"mps\"\n                if isinstance(timesteps, float):\n                    dtype = torch.float32 if is_mps else torch.float64\n                else:\n                    dtype = torch.int32 if is_mps else torch.int64\n                timesteps = torch.tensor([timesteps], dtype=dtype, device=latent_model_input.device)\n            elif len(timesteps.shape) == 0:\n                timesteps = timesteps[None].to(latent_model_input.device)\n            # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n            timesteps = timesteps.expand(latent_model_input.shape[0])\n            # predict noise model_output\n            noise_pred = self.transformer(\n                latent_model_input, timestep=timesteps, class_labels=class_labels_input\n            ).sample\n\n            # perform guidance\n            if guidance_scale > 1:\n                eps, rest = noise_pred[:, :latent_channels], noise_pred[:, latent_channels:]\n                cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)\n\n                half_eps = uncond_eps + guidance_scale * (cond_eps - uncond_eps)\n                eps = torch.cat([half_eps, half_eps], dim=0)\n\n                noise_pred = torch.cat([eps, rest], dim=1)\n\n            # learned sigma\n            if self.transformer.config.out_channels // 2 == latent_channels:\n                model_output, _ = torch.split(noise_pred, latent_channels, dim=1)\n            else:\n                model_output = noise_pred\n\n            # compute previous image: x_t -> x_t-1\n            latent_model_input = self.scheduler.step(model_output, t, latent_model_input).prev_sample\n\n        if guidance_scale > 1:\n            latents, _ = latent_model_input.chunk(2, dim=0)\n        else:\n            latents = latent_model_input\n\n        latents = 1 / self.vae.config.scaling_factor * latents\n        samples = self.vae.decode(latents).sample\n\n        samples = (samples / 2 + 0.5).clamp(0, 1)\n\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        samples = samples.cpu().permute(0, 2, 3, 1).float().numpy()\n\n        if output_type == \"pil\":\n            samples = self.numpy_to_pil(samples)\n\n        if not return_dict:\n            return (samples,)\n\n        return ImagePipelineOutput(images=samples)\n"
  },
  {
    "path": "diffusers/pipelines/latent_diffusion/__init__.py",
    "content": "from ...utils import is_transformers_available\nfrom .pipeline_latent_diffusion_superresolution import LDMSuperResolutionPipeline\n\n\nif is_transformers_available():\n    from .pipeline_latent_diffusion import LDMBertModel, LDMTextToImagePipeline\n"
  },
  {
    "path": "diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\nimport torch.nn as nn\nimport torch.utils.checkpoint\nfrom transformers import PretrainedConfig, PreTrainedModel, PreTrainedTokenizer\nfrom transformers.activations import ACT2FN\nfrom transformers.modeling_outputs import BaseModelOutput\nfrom transformers.utils import logging\n\nfrom ...models import AutoencoderKL, UNet2DConditionModel, UNet2DModel, VQModel\nfrom ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass LDMTextToImagePipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        vqvae ([`VQModel`]):\n            Vector-quantized (VQ) Model to encode and decode images to and from latent representations.\n        bert ([`LDMBertModel`]):\n            Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture.\n        tokenizer (`transformers.BertTokenizer`):\n            Tokenizer of class\n            [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n\n    def __init__(\n        self,\n        vqvae: Union[VQModel, AutoencoderKL],\n        bert: PreTrainedModel,\n        tokenizer: PreTrainedTokenizer,\n        unet: Union[UNet2DModel, UNet2DConditionModel],\n        scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],\n    ):\n        super().__init__()\n        self.register_modules(vqvae=vqvae, bert=bert, tokenizer=tokenizer, unet=unet, scheduler=scheduler)\n        self.vae_scale_factor = 2 ** (len(self.vqvae.config.block_out_channels) - 1)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 1.0,\n        eta: Optional[float] = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        **kwargs,\n    ) -> Union[Tuple, ImagePipelineOutput]:\n        r\"\"\"\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 1.0):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt` at\n                the, usually at the expense of lower image quality.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        if isinstance(prompt, str):\n            batch_size = 1\n        elif isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        # get unconditional embeddings for classifier free guidance\n        if guidance_scale != 1.0:\n            uncond_input = self.tokenizer(\n                [\"\"] * batch_size, padding=\"max_length\", max_length=77, truncation=True, return_tensors=\"pt\"\n            )\n            negative_prompt_embeds = self.bert(uncond_input.input_ids.to(self.device))[0]\n\n        # get prompt text embeddings\n        text_input = self.tokenizer(prompt, padding=\"max_length\", max_length=77, truncation=True, return_tensors=\"pt\")\n        prompt_embeds = self.bert(text_input.input_ids.to(self.device))[0]\n\n        # get the initial random noise unless the user supplied it\n        latents_shape = (batch_size, self.unet.config.in_channels, height // 8, width // 8)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(latents_shape, generator=generator, device=self.device, dtype=prompt_embeds.dtype)\n        else:\n            if latents.shape != latents_shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {latents_shape}\")\n        latents = latents.to(self.device)\n\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n\n        extra_kwargs = {}\n        if accepts_eta:\n            extra_kwargs[\"eta\"] = eta\n\n        for t in self.progress_bar(self.scheduler.timesteps):\n            if guidance_scale == 1.0:\n                # guidance_scale of 1 means no guidance\n                latents_input = latents\n                context = prompt_embeds\n            else:\n                # For classifier free guidance, we need to do two forward passes.\n                # Here we concatenate the unconditional and text embeddings into a single batch\n                # to avoid doing two forward passes\n                latents_input = torch.cat([latents] * 2)\n                context = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n            # predict the noise residual\n            noise_pred = self.unet(latents_input, t, encoder_hidden_states=context).sample\n            # perform guidance\n            if guidance_scale != 1.0:\n                noise_pred_uncond, noise_prediction_text = noise_pred.chunk(2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample\n\n        # scale and decode the image latents with vae\n        latents = 1 / self.vqvae.config.scaling_factor * latents\n        image = self.vqvae.decode(latents).sample\n\n        image = (image / 2 + 0.5).clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n\n\n################################################################################\n# Code for the text transformer model\n################################################################################\n\"\"\" PyTorch LDMBERT model.\"\"\"\n\n\nlogger = logging.get_logger(__name__)\n\nLDMBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [\n    \"ldm-bert\",\n    # See all LDMBert models at https://huggingface.co/models?filter=ldmbert\n]\n\n\nLDMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP = {\n    \"ldm-bert\": \"https://huggingface.co/valhalla/ldm-bert/blob/main/config.json\",\n}\n\n\n\"\"\" LDMBERT model configuration\"\"\"\n\n\nclass LDMBertConfig(PretrainedConfig):\n    model_type = \"ldmbert\"\n    keys_to_ignore_at_inference = [\"past_key_values\"]\n    attribute_map = {\"num_attention_heads\": \"encoder_attention_heads\", \"hidden_size\": \"d_model\"}\n\n    def __init__(\n        self,\n        vocab_size=30522,\n        max_position_embeddings=77,\n        encoder_layers=32,\n        encoder_ffn_dim=5120,\n        encoder_attention_heads=8,\n        head_dim=64,\n        encoder_layerdrop=0.0,\n        activation_function=\"gelu\",\n        d_model=1280,\n        dropout=0.1,\n        attention_dropout=0.0,\n        activation_dropout=0.0,\n        init_std=0.02,\n        classifier_dropout=0.0,\n        scale_embedding=False,\n        use_cache=True,\n        pad_token_id=0,\n        **kwargs,\n    ):\n        self.vocab_size = vocab_size\n        self.max_position_embeddings = max_position_embeddings\n        self.d_model = d_model\n        self.encoder_ffn_dim = encoder_ffn_dim\n        self.encoder_layers = encoder_layers\n        self.encoder_attention_heads = encoder_attention_heads\n        self.head_dim = head_dim\n        self.dropout = dropout\n        self.attention_dropout = attention_dropout\n        self.activation_dropout = activation_dropout\n        self.activation_function = activation_function\n        self.init_std = init_std\n        self.encoder_layerdrop = encoder_layerdrop\n        self.classifier_dropout = classifier_dropout\n        self.use_cache = use_cache\n        self.num_hidden_layers = encoder_layers\n        self.scale_embedding = scale_embedding  # scale factor will be sqrt(d_model) if True\n\n        super().__init__(pad_token_id=pad_token_id, **kwargs)\n\n\ndef _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None):\n    \"\"\"\n    Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.\n    \"\"\"\n    bsz, src_len = mask.size()\n    tgt_len = tgt_len if tgt_len is not None else src_len\n\n    expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype)\n\n    inverted_mask = 1.0 - expanded_mask\n\n    return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min)\n\n\n# Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->LDMBert\nclass LDMBertAttention(nn.Module):\n    \"\"\"Multi-headed attention from 'Attention Is All You Need' paper\"\"\"\n\n    def __init__(\n        self,\n        embed_dim: int,\n        num_heads: int,\n        head_dim: int,\n        dropout: float = 0.0,\n        is_decoder: bool = False,\n        bias: bool = False,\n    ):\n        super().__init__()\n        self.embed_dim = embed_dim\n        self.num_heads = num_heads\n        self.dropout = dropout\n        self.head_dim = head_dim\n        self.inner_dim = head_dim * num_heads\n\n        self.scaling = self.head_dim**-0.5\n        self.is_decoder = is_decoder\n\n        self.k_proj = nn.Linear(embed_dim, self.inner_dim, bias=bias)\n        self.v_proj = nn.Linear(embed_dim, self.inner_dim, bias=bias)\n        self.q_proj = nn.Linear(embed_dim, self.inner_dim, bias=bias)\n        self.out_proj = nn.Linear(self.inner_dim, embed_dim)\n\n    def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):\n        return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()\n\n    def forward(\n        self,\n        hidden_states: torch.Tensor,\n        key_value_states: Optional[torch.Tensor] = None,\n        past_key_value: Optional[Tuple[torch.Tensor]] = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        layer_head_mask: Optional[torch.Tensor] = None,\n        output_attentions: bool = False,\n    ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:\n        \"\"\"Input shape: Batch x Time x Channel\"\"\"\n\n        # if key_value_states are provided this layer is used as a cross-attention layer\n        # for the decoder\n        is_cross_attention = key_value_states is not None\n\n        bsz, tgt_len, _ = hidden_states.size()\n\n        # get query proj\n        query_states = self.q_proj(hidden_states) * self.scaling\n        # get key, value proj\n        if is_cross_attention and past_key_value is not None:\n            # reuse k,v, cross_attentions\n            key_states = past_key_value[0]\n            value_states = past_key_value[1]\n        elif is_cross_attention:\n            # cross_attentions\n            key_states = self._shape(self.k_proj(key_value_states), -1, bsz)\n            value_states = self._shape(self.v_proj(key_value_states), -1, bsz)\n        elif past_key_value is not None:\n            # reuse k, v, self_attention\n            key_states = self._shape(self.k_proj(hidden_states), -1, bsz)\n            value_states = self._shape(self.v_proj(hidden_states), -1, bsz)\n            key_states = torch.cat([past_key_value[0], key_states], dim=2)\n            value_states = torch.cat([past_key_value[1], value_states], dim=2)\n        else:\n            # self_attention\n            key_states = self._shape(self.k_proj(hidden_states), -1, bsz)\n            value_states = self._shape(self.v_proj(hidden_states), -1, bsz)\n\n        if self.is_decoder:\n            # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.\n            # Further calls to cross_attention layer can then reuse all cross-attention\n            # key/value_states (first \"if\" case)\n            # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of\n            # all previous decoder key/value_states. Further calls to uni-directional self-attention\n            # can concat previous decoder key/value_states to current projected key/value_states (third \"elif\" case)\n            # if encoder bi-directional self-attention `past_key_value` is always `None`\n            past_key_value = (key_states, value_states)\n\n        proj_shape = (bsz * self.num_heads, -1, self.head_dim)\n        query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape)\n        key_states = key_states.view(*proj_shape)\n        value_states = value_states.view(*proj_shape)\n\n        src_len = key_states.size(1)\n        attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))\n\n        if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len):\n            raise ValueError(\n                f\"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is\"\n                f\" {attn_weights.size()}\"\n            )\n\n        if attention_mask is not None:\n            if attention_mask.size() != (bsz, 1, tgt_len, src_len):\n                raise ValueError(\n                    f\"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}\"\n                )\n            attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask\n            attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)\n\n        attn_weights = nn.functional.softmax(attn_weights, dim=-1)\n\n        if layer_head_mask is not None:\n            if layer_head_mask.size() != (self.num_heads,):\n                raise ValueError(\n                    f\"Head mask for a single layer should be of size {(self.num_heads,)}, but is\"\n                    f\" {layer_head_mask.size()}\"\n                )\n            attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len)\n            attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)\n\n        if output_attentions:\n            # this operation is a bit awkward, but it's required to\n            # make sure that attn_weights keeps its gradient.\n            # In order to do so, attn_weights have to be reshaped\n            # twice and have to be reused in the following\n            attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)\n            attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len)\n        else:\n            attn_weights_reshaped = None\n\n        attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training)\n\n        attn_output = torch.bmm(attn_probs, value_states)\n\n        if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim):\n            raise ValueError(\n                f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is\"\n                f\" {attn_output.size()}\"\n            )\n\n        attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim)\n        attn_output = attn_output.transpose(1, 2)\n\n        # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be\n        # partitioned across GPUs when using tensor-parallelism.\n        attn_output = attn_output.reshape(bsz, tgt_len, self.inner_dim)\n\n        attn_output = self.out_proj(attn_output)\n\n        return attn_output, attn_weights_reshaped, past_key_value\n\n\nclass LDMBertEncoderLayer(nn.Module):\n    def __init__(self, config: LDMBertConfig):\n        super().__init__()\n        self.embed_dim = config.d_model\n        self.self_attn = LDMBertAttention(\n            embed_dim=self.embed_dim,\n            num_heads=config.encoder_attention_heads,\n            head_dim=config.head_dim,\n            dropout=config.attention_dropout,\n        )\n        self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)\n        self.dropout = config.dropout\n        self.activation_fn = ACT2FN[config.activation_function]\n        self.activation_dropout = config.activation_dropout\n        self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim)\n        self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim)\n        self.final_layer_norm = nn.LayerNorm(self.embed_dim)\n\n    def forward(\n        self,\n        hidden_states: torch.FloatTensor,\n        attention_mask: torch.FloatTensor,\n        layer_head_mask: torch.FloatTensor,\n        output_attentions: Optional[bool] = False,\n    ) -> Tuple[torch.FloatTensor, Optional[torch.FloatTensor]]:\n        \"\"\"\n        Args:\n            hidden_states (`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)`\n            attention_mask (`torch.FloatTensor`): attention mask of size\n                `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.\n            layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size\n                `(encoder_attention_heads,)`.\n            output_attentions (`bool`, *optional*):\n                Whether or not to return the attentions tensors of all attention layers. See `attentions` under\n                returned tensors for more detail.\n        \"\"\"\n        residual = hidden_states\n        hidden_states = self.self_attn_layer_norm(hidden_states)\n        hidden_states, attn_weights, _ = self.self_attn(\n            hidden_states=hidden_states,\n            attention_mask=attention_mask,\n            layer_head_mask=layer_head_mask,\n            output_attentions=output_attentions,\n        )\n        hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)\n        hidden_states = residual + hidden_states\n\n        residual = hidden_states\n        hidden_states = self.final_layer_norm(hidden_states)\n        hidden_states = self.activation_fn(self.fc1(hidden_states))\n        hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training)\n        hidden_states = self.fc2(hidden_states)\n        hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)\n        hidden_states = residual + hidden_states\n\n        if hidden_states.dtype == torch.float16 and (\n            torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any()\n        ):\n            clamp_value = torch.finfo(hidden_states.dtype).max - 1000\n            hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value)\n\n        outputs = (hidden_states,)\n\n        if output_attentions:\n            outputs += (attn_weights,)\n\n        return outputs\n\n\n# Copied from transformers.models.bart.modeling_bart.BartPretrainedModel with Bart->LDMBert\nclass LDMBertPreTrainedModel(PreTrainedModel):\n    config_class = LDMBertConfig\n    base_model_prefix = \"model\"\n    _supports_gradient_checkpointing = True\n    _keys_to_ignore_on_load_unexpected = [r\"encoder\\.version\", r\"decoder\\.version\"]\n\n    def _init_weights(self, module):\n        std = self.config.init_std\n        if isinstance(module, nn.Linear):\n            module.weight.data.normal_(mean=0.0, std=std)\n            if module.bias is not None:\n                module.bias.data.zero_()\n        elif isinstance(module, nn.Embedding):\n            module.weight.data.normal_(mean=0.0, std=std)\n            if module.padding_idx is not None:\n                module.weight.data[module.padding_idx].zero_()\n\n    def _set_gradient_checkpointing(self, module, value=False):\n        if isinstance(module, (LDMBertEncoder,)):\n            module.gradient_checkpointing = value\n\n    @property\n    def dummy_inputs(self):\n        pad_token = self.config.pad_token_id\n        input_ids = torch.tensor([[0, 6, 10, 4, 2], [0, 8, 12, 2, pad_token]], device=self.device)\n        dummy_inputs = {\n            \"attention_mask\": input_ids.ne(pad_token),\n            \"input_ids\": input_ids,\n        }\n        return dummy_inputs\n\n\nclass LDMBertEncoder(LDMBertPreTrainedModel):\n    \"\"\"\n    Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a\n    [`LDMBertEncoderLayer`].\n\n    Args:\n        config: LDMBertConfig\n        embed_tokens (nn.Embedding): output embedding\n    \"\"\"\n\n    def __init__(self, config: LDMBertConfig):\n        super().__init__(config)\n\n        self.dropout = config.dropout\n\n        embed_dim = config.d_model\n        self.padding_idx = config.pad_token_id\n        self.max_source_positions = config.max_position_embeddings\n\n        self.embed_tokens = nn.Embedding(config.vocab_size, embed_dim)\n        self.embed_positions = nn.Embedding(config.max_position_embeddings, embed_dim)\n        self.layers = nn.ModuleList([LDMBertEncoderLayer(config) for _ in range(config.encoder_layers)])\n        self.layer_norm = nn.LayerNorm(embed_dim)\n\n        self.gradient_checkpointing = False\n        # Initialize weights and apply final processing\n        self.post_init()\n\n    def get_input_embeddings(self):\n        return self.embed_tokens\n\n    def set_input_embeddings(self, value):\n        self.embed_tokens = value\n\n    def forward(\n        self,\n        input_ids: torch.LongTensor = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        position_ids: Optional[torch.LongTensor] = None,\n        head_mask: Optional[torch.Tensor] = None,\n        inputs_embeds: Optional[torch.FloatTensor] = None,\n        output_attentions: Optional[bool] = None,\n        output_hidden_states: Optional[bool] = None,\n        return_dict: Optional[bool] = None,\n    ) -> Union[Tuple, BaseModelOutput]:\n        r\"\"\"\n        Args:\n            input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):\n                Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you\n                provide it.\n\n                Indices can be obtained using [`BartTokenizer`]. See [`PreTrainedTokenizer.encode`] and\n                [`PreTrainedTokenizer.__call__`] for details.\n\n                [What are input IDs?](../glossary#input-ids)\n            attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):\n                Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:\n\n                - 1 for tokens that are **not masked**,\n                - 0 for tokens that are **masked**.\n\n                [What are attention masks?](../glossary#attention-mask)\n            head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*):\n                Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`:\n\n                - 1 indicates the head is **not masked**,\n                - 0 indicates the head is **masked**.\n\n            inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):\n                Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation.\n                This is useful if you want more control over how to convert `input_ids` indices into associated vectors\n                than the model's internal embedding lookup matrix.\n            output_attentions (`bool`, *optional*):\n                Whether or not to return the attentions tensors of all attention layers. See `attentions` under\n                returned tensors for more detail.\n            output_hidden_states (`bool`, *optional*):\n                Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors\n                for more detail.\n            return_dict (`bool`, *optional*):\n                Whether or not to return a [`~utils.BaseModelOutput`] instead of a plain tuple.\n        \"\"\"\n        output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions\n        output_hidden_states = (\n            output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states\n        )\n        return_dict = return_dict if return_dict is not None else self.config.use_return_dict\n\n        # retrieve input_ids and inputs_embeds\n        if input_ids is not None and inputs_embeds is not None:\n            raise ValueError(\"You cannot specify both input_ids and inputs_embeds at the same time\")\n        elif input_ids is not None:\n            input_shape = input_ids.size()\n            input_ids = input_ids.view(-1, input_shape[-1])\n        elif inputs_embeds is not None:\n            input_shape = inputs_embeds.size()[:-1]\n        else:\n            raise ValueError(\"You have to specify either input_ids or inputs_embeds\")\n\n        if inputs_embeds is None:\n            inputs_embeds = self.embed_tokens(input_ids)\n\n        seq_len = input_shape[1]\n        if position_ids is None:\n            position_ids = torch.arange(seq_len, dtype=torch.long, device=inputs_embeds.device).expand((1, -1))\n        embed_pos = self.embed_positions(position_ids)\n\n        hidden_states = inputs_embeds + embed_pos\n        hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)\n\n        # expand attention_mask\n        if attention_mask is not None:\n            # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]\n            attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype)\n\n        encoder_states = () if output_hidden_states else None\n        all_attentions = () if output_attentions else None\n\n        # check if head_mask has a correct number of layers specified if desired\n        if head_mask is not None:\n            if head_mask.size()[0] != (len(self.layers)):\n                raise ValueError(\n                    f\"The head_mask should be specified for {len(self.layers)} layers, but it is for\"\n                    f\" {head_mask.size()[0]}.\"\n                )\n\n        for idx, encoder_layer in enumerate(self.layers):\n            if output_hidden_states:\n                encoder_states = encoder_states + (hidden_states,)\n            if self.gradient_checkpointing and self.training:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs, output_attentions)\n\n                    return custom_forward\n\n                layer_outputs = torch.utils.checkpoint.checkpoint(\n                    create_custom_forward(encoder_layer),\n                    hidden_states,\n                    attention_mask,\n                    (head_mask[idx] if head_mask is not None else None),\n                )\n            else:\n                layer_outputs = encoder_layer(\n                    hidden_states,\n                    attention_mask,\n                    layer_head_mask=(head_mask[idx] if head_mask is not None else None),\n                    output_attentions=output_attentions,\n                )\n\n            hidden_states = layer_outputs[0]\n\n            if output_attentions:\n                all_attentions = all_attentions + (layer_outputs[1],)\n\n        hidden_states = self.layer_norm(hidden_states)\n\n        if output_hidden_states:\n            encoder_states = encoder_states + (hidden_states,)\n\n        if not return_dict:\n            return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)\n        return BaseModelOutput(\n            last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions\n        )\n\n\nclass LDMBertModel(LDMBertPreTrainedModel):\n    _no_split_modules = []\n\n    def __init__(self, config: LDMBertConfig):\n        super().__init__(config)\n        self.model = LDMBertEncoder(config)\n        self.to_logits = nn.Linear(config.hidden_size, config.vocab_size)\n\n    def forward(\n        self,\n        input_ids=None,\n        attention_mask=None,\n        position_ids=None,\n        head_mask=None,\n        inputs_embeds=None,\n        output_attentions=None,\n        output_hidden_states=None,\n        return_dict=None,\n    ):\n        outputs = self.model(\n            input_ids,\n            attention_mask=attention_mask,\n            position_ids=position_ids,\n            head_mask=head_mask,\n            inputs_embeds=inputs_embeds,\n            output_attentions=output_attentions,\n            output_hidden_states=output_hidden_states,\n            return_dict=return_dict,\n        )\n        return outputs\n"
  },
  {
    "path": "diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion_superresolution.py",
    "content": "import inspect\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.utils.checkpoint\n\nfrom ...models import UNet2DModel, VQModel\nfrom ...schedulers import (\n    DDIMScheduler,\n    DPMSolverMultistepScheduler,\n    EulerAncestralDiscreteScheduler,\n    EulerDiscreteScheduler,\n    LMSDiscreteScheduler,\n    PNDMScheduler,\n)\nfrom ...utils import PIL_INTERPOLATION, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\ndef preprocess(image):\n    w, h = image.size\n    w, h = (x - x % 32 for x in (w, h))  # resize to integer multiple of 32\n    image = image.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"])\n    image = np.array(image).astype(np.float32) / 255.0\n    image = image[None].transpose(0, 3, 1, 2)\n    image = torch.from_numpy(image)\n    return 2.0 * image - 1.0\n\n\nclass LDMSuperResolutionPipeline(DiffusionPipeline):\n    r\"\"\"\n    A pipeline for image super-resolution using Latent\n\n    This class inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        vqvae ([`VQModel`]):\n            Vector-quantized (VQ) VAE Model to encode and decode images to and from latent representations.\n        unet ([`UNet2DModel`]): U-Net architecture to denoise the encoded image.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latens. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], [`EulerDiscreteScheduler`],\n            [`EulerAncestralDiscreteScheduler`], [`DPMSolverMultistepScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n\n    def __init__(\n        self,\n        vqvae: VQModel,\n        unet: UNet2DModel,\n        scheduler: Union[\n            DDIMScheduler,\n            PNDMScheduler,\n            LMSDiscreteScheduler,\n            EulerDiscreteScheduler,\n            EulerAncestralDiscreteScheduler,\n            DPMSolverMultistepScheduler,\n        ],\n    ):\n        super().__init__()\n        self.register_modules(vqvae=vqvae, unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        image: Union[torch.Tensor, PIL.Image.Image] = None,\n        batch_size: Optional[int] = 1,\n        num_inference_steps: Optional[int] = 100,\n        eta: Optional[float] = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n    ) -> Union[Tuple, ImagePipelineOutput]:\n        r\"\"\"\n        Args:\n            image (`torch.Tensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            batch_size (`int`, *optional*, defaults to 1):\n                Number of images to generate.\n            num_inference_steps (`int`, *optional*, defaults to 100):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        if isinstance(image, PIL.Image.Image):\n            batch_size = 1\n        elif isinstance(image, torch.Tensor):\n            batch_size = image.shape[0]\n        else:\n            raise ValueError(f\"`image` has to be of type `PIL.Image.Image` or `torch.Tensor` but is {type(image)}\")\n\n        if isinstance(image, PIL.Image.Image):\n            image = preprocess(image)\n\n        height, width = image.shape[-2:]\n\n        # in_channels should be 6: 3 for latents, 3 for low resolution image\n        latents_shape = (batch_size, self.unet.config.in_channels // 2, height, width)\n        latents_dtype = next(self.unet.parameters()).dtype\n\n        latents = randn_tensor(latents_shape, generator=generator, device=self.device, dtype=latents_dtype)\n\n        image = image.to(device=self.device, dtype=latents_dtype)\n\n        # set timesteps and move to the correct device\n        self.scheduler.set_timesteps(num_inference_steps, device=self.device)\n        timesteps_tensor = self.scheduler.timesteps\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature.\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_kwargs = {}\n        if accepts_eta:\n            extra_kwargs[\"eta\"] = eta\n\n        for t in self.progress_bar(timesteps_tensor):\n            # concat latents and low resolution image in the channel dimension.\n            latents_input = torch.cat([latents, image], dim=1)\n            latents_input = self.scheduler.scale_model_input(latents_input, t)\n            # predict the noise residual\n            noise_pred = self.unet(latents_input, t).sample\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_kwargs).prev_sample\n\n        # decode the image latents with the VQVAE\n        image = self.vqvae.decode(latents).sample\n        image = torch.clamp(image, -1.0, 1.0)\n        image = image / 2 + 0.5\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/latent_diffusion_uncond/__init__.py",
    "content": "from .pipeline_latent_diffusion_uncond import LDMPipeline\n"
  },
  {
    "path": "diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...models import UNet2DModel, VQModel\nfrom ...schedulers import DDIMScheduler\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass LDMPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        vqvae ([`VQModel`]):\n            Vector-quantized (VQ) Model to encode and decode images to and from latent representations.\n        unet ([`UNet2DModel`]): U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            [`DDIMScheduler`] is to be used in combination with `unet` to denoise the encoded image latents.\n    \"\"\"\n\n    def __init__(self, vqvae: VQModel, unet: UNet2DModel, scheduler: DDIMScheduler):\n        super().__init__()\n        self.register_modules(vqvae=vqvae, unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        eta: float = 0.0,\n        num_inference_steps: int = 50,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        **kwargs,\n    ) -> Union[Tuple, ImagePipelineOutput]:\n        r\"\"\"\n        Args:\n            batch_size (`int`, *optional*, defaults to 1):\n                Number of images to generate.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n\n        latents = randn_tensor(\n            (batch_size, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size),\n            generator=generator,\n        )\n        latents = latents.to(self.device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n\n        extra_kwargs = {}\n        if accepts_eta:\n            extra_kwargs[\"eta\"] = eta\n\n        for t in self.progress_bar(self.scheduler.timesteps):\n            latent_model_input = self.scheduler.scale_model_input(latents, t)\n            # predict the noise residual\n            noise_prediction = self.unet(latent_model_input, t).sample\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_prediction, t, latents, **extra_kwargs).prev_sample\n\n        # decode the image latents with the VAE\n        image = self.vqvae.decode(latents).sample\n\n        image = (image / 2 + 0.5).clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/onnx_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n# Copyright (c) 2022, NVIDIA CORPORATION.  All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport os\nimport shutil\nfrom pathlib import Path\nfrom typing import Optional, Union\n\nimport numpy as np\nfrom huggingface_hub import hf_hub_download\n\nfrom ..utils import ONNX_EXTERNAL_WEIGHTS_NAME, ONNX_WEIGHTS_NAME, is_onnx_available, logging\n\n\nif is_onnx_available():\n    import onnxruntime as ort\n\n\nlogger = logging.get_logger(__name__)\n\nORT_TO_NP_TYPE = {\n    \"tensor(bool)\": np.bool_,\n    \"tensor(int8)\": np.int8,\n    \"tensor(uint8)\": np.uint8,\n    \"tensor(int16)\": np.int16,\n    \"tensor(uint16)\": np.uint16,\n    \"tensor(int32)\": np.int32,\n    \"tensor(uint32)\": np.uint32,\n    \"tensor(int64)\": np.int64,\n    \"tensor(uint64)\": np.uint64,\n    \"tensor(float16)\": np.float16,\n    \"tensor(float)\": np.float32,\n    \"tensor(double)\": np.float64,\n}\n\n\nclass OnnxRuntimeModel:\n    def __init__(self, model=None, **kwargs):\n        logger.info(\"`diffusers.OnnxRuntimeModel` is experimental and might change in the future.\")\n        self.model = model\n        self.model_save_dir = kwargs.get(\"model_save_dir\", None)\n        self.latest_model_name = kwargs.get(\"latest_model_name\", ONNX_WEIGHTS_NAME)\n\n    def __call__(self, **kwargs):\n        inputs = {k: np.array(v) for k, v in kwargs.items()}\n        return self.model.run(None, inputs)\n\n    @staticmethod\n    def load_model(path: Union[str, Path], provider=None, sess_options=None):\n        \"\"\"\n        Loads an ONNX Inference session with an ExecutionProvider. Default provider is `CPUExecutionProvider`\n\n        Arguments:\n            path (`str` or `Path`):\n                Directory from which to load\n            provider(`str`, *optional*):\n                Onnxruntime execution provider to use for loading the model, defaults to `CPUExecutionProvider`\n        \"\"\"\n        if provider is None:\n            logger.info(\"No onnxruntime provider specified, using CPUExecutionProvider\")\n            provider = \"CPUExecutionProvider\"\n\n        return ort.InferenceSession(path, providers=[provider], sess_options=sess_options)\n\n    def _save_pretrained(self, save_directory: Union[str, Path], file_name: Optional[str] = None, **kwargs):\n        \"\"\"\n        Save a model and its configuration file to a directory, so that it can be re-loaded using the\n        [`~optimum.onnxruntime.modeling_ort.ORTModel.from_pretrained`] class method. It will always save the\n        latest_model_name.\n\n        Arguments:\n            save_directory (`str` or `Path`):\n                Directory where to save the model file.\n            file_name(`str`, *optional*):\n                Overwrites the default model file name from `\"model.onnx\"` to `file_name`. This allows you to save the\n                model with a different name.\n        \"\"\"\n        model_file_name = file_name if file_name is not None else ONNX_WEIGHTS_NAME\n\n        src_path = self.model_save_dir.joinpath(self.latest_model_name)\n        dst_path = Path(save_directory).joinpath(model_file_name)\n        try:\n            shutil.copyfile(src_path, dst_path)\n        except shutil.SameFileError:\n            pass\n\n        # copy external weights (for models >2GB)\n        src_path = self.model_save_dir.joinpath(ONNX_EXTERNAL_WEIGHTS_NAME)\n        if src_path.exists():\n            dst_path = Path(save_directory).joinpath(ONNX_EXTERNAL_WEIGHTS_NAME)\n            try:\n                shutil.copyfile(src_path, dst_path)\n            except shutil.SameFileError:\n                pass\n\n    def save_pretrained(\n        self,\n        save_directory: Union[str, os.PathLike],\n        **kwargs,\n    ):\n        \"\"\"\n        Save a model to a directory, so that it can be re-loaded using the [`~OnnxModel.from_pretrained`] class\n        method.:\n\n        Arguments:\n            save_directory (`str` or `os.PathLike`):\n                Directory to which to save. Will be created if it doesn't exist.\n        \"\"\"\n        if os.path.isfile(save_directory):\n            logger.error(f\"Provided path ({save_directory}) should be a directory, not a file\")\n            return\n\n        os.makedirs(save_directory, exist_ok=True)\n\n        # saving model weights/files\n        self._save_pretrained(save_directory, **kwargs)\n\n    @classmethod\n    def _from_pretrained(\n        cls,\n        model_id: Union[str, Path],\n        use_auth_token: Optional[Union[bool, str, None]] = None,\n        revision: Optional[Union[str, None]] = None,\n        force_download: bool = False,\n        cache_dir: Optional[str] = None,\n        file_name: Optional[str] = None,\n        provider: Optional[str] = None,\n        sess_options: Optional[\"ort.SessionOptions\"] = None,\n        **kwargs,\n    ):\n        \"\"\"\n        Load a model from a directory or the HF Hub.\n\n        Arguments:\n            model_id (`str` or `Path`):\n                Directory from which to load\n            use_auth_token (`str` or `bool`):\n                Is needed to load models from a private or gated repository\n            revision (`str`):\n                Revision is the specific model version to use. It can be a branch name, a tag name, or a commit id\n            cache_dir (`Union[str, Path]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            file_name(`str`):\n                Overwrites the default model file name from `\"model.onnx\"` to `file_name`. This allows you to load\n                different model files from the same repository or directory.\n            provider(`str`):\n                The ONNX runtime provider, e.g. `CPUExecutionProvider` or `CUDAExecutionProvider`.\n            kwargs (`Dict`, *optional*):\n                kwargs will be passed to the model during initialization\n        \"\"\"\n        model_file_name = file_name if file_name is not None else ONNX_WEIGHTS_NAME\n        # load model from local directory\n        if os.path.isdir(model_id):\n            model = OnnxRuntimeModel.load_model(\n                os.path.join(model_id, model_file_name), provider=provider, sess_options=sess_options\n            )\n            kwargs[\"model_save_dir\"] = Path(model_id)\n        # load model from hub\n        else:\n            # download model\n            model_cache_path = hf_hub_download(\n                repo_id=model_id,\n                filename=model_file_name,\n                use_auth_token=use_auth_token,\n                revision=revision,\n                cache_dir=cache_dir,\n                force_download=force_download,\n            )\n            kwargs[\"model_save_dir\"] = Path(model_cache_path).parent\n            kwargs[\"latest_model_name\"] = Path(model_cache_path).name\n            model = OnnxRuntimeModel.load_model(model_cache_path, provider=provider, sess_options=sess_options)\n        return cls(model=model, **kwargs)\n\n    @classmethod\n    def from_pretrained(\n        cls,\n        model_id: Union[str, Path],\n        force_download: bool = True,\n        use_auth_token: Optional[str] = None,\n        cache_dir: Optional[str] = None,\n        **model_kwargs,\n    ):\n        revision = None\n        if len(str(model_id).split(\"@\")) == 2:\n            model_id, revision = model_id.split(\"@\")\n\n        return cls._from_pretrained(\n            model_id=model_id,\n            revision=revision,\n            cache_dir=cache_dir,\n            force_download=force_download,\n            use_auth_token=use_auth_token,\n            **model_kwargs,\n        )\n"
  },
  {
    "path": "diffusers/pipelines/paint_by_example/__init__.py",
    "content": "from dataclasses import dataclass\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL\nfrom PIL import Image\n\nfrom ...utils import is_torch_available, is_transformers_available\n\n\nif is_transformers_available() and is_torch_available():\n    from .image_encoder import PaintByExampleImageEncoder\n    from .pipeline_paint_by_example import PaintByExamplePipeline\n"
  },
  {
    "path": "diffusers/pipelines/paint_by_example/image_encoder.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport torch\nfrom torch import nn\nfrom transformers import CLIPPreTrainedModel, CLIPVisionModel\n\nfrom ...models.attention import BasicTransformerBlock\nfrom ...utils import logging\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass PaintByExampleImageEncoder(CLIPPreTrainedModel):\n    def __init__(self, config, proj_size=768):\n        super().__init__(config)\n        self.proj_size = proj_size\n\n        self.model = CLIPVisionModel(config)\n        self.mapper = PaintByExampleMapper(config)\n        self.final_layer_norm = nn.LayerNorm(config.hidden_size)\n        self.proj_out = nn.Linear(config.hidden_size, self.proj_size)\n\n        # uncondition for scaling\n        self.uncond_vector = nn.Parameter(torch.randn((1, 1, self.proj_size)))\n\n    def forward(self, pixel_values, return_uncond_vector=False):\n        clip_output = self.model(pixel_values=pixel_values)\n        latent_states = clip_output.pooler_output\n        latent_states = self.mapper(latent_states[:, None])\n        latent_states = self.final_layer_norm(latent_states)\n        latent_states = self.proj_out(latent_states)\n        if return_uncond_vector:\n            return latent_states, self.uncond_vector\n\n        return latent_states\n\n\nclass PaintByExampleMapper(nn.Module):\n    def __init__(self, config):\n        super().__init__()\n        num_layers = (config.num_hidden_layers + 1) // 5\n        hid_size = config.hidden_size\n        num_heads = 1\n        self.blocks = nn.ModuleList(\n            [\n                BasicTransformerBlock(hid_size, num_heads, hid_size, activation_fn=\"gelu\", attention_bias=True)\n                for _ in range(num_layers)\n            ]\n        )\n\n    def forward(self, hidden_states):\n        for block in self.blocks:\n            hidden_states = block(hidden_states)\n\n        return hidden_states\n"
  },
  {
    "path": "diffusers/pipelines/paint_by_example/pipeline_paint_by_example.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor\n\nfrom diffusers.utils import is_accelerate_available\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler\nfrom ...utils import logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom ..stable_diffusion import StableDiffusionPipelineOutput\nfrom ..stable_diffusion.safety_checker import StableDiffusionSafetyChecker\nfrom .image_encoder import PaintByExampleImageEncoder\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\ndef prepare_mask_and_masked_image(image, mask):\n    \"\"\"\n    Prepares a pair (image, mask) to be consumed by the Paint by Example pipeline. This means that those inputs will be\n    converted to ``torch.Tensor`` with shapes ``batch x channels x height x width`` where ``channels`` is ``3`` for the\n    ``image`` and ``1`` for the ``mask``.\n\n    The ``image`` will be converted to ``torch.float32`` and normalized to be in ``[-1, 1]``. The ``mask`` will be\n    binarized (``mask > 0.5``) and cast to ``torch.float32`` too.\n\n    Args:\n        image (Union[np.array, PIL.Image, torch.Tensor]): The image to inpaint.\n            It can be a ``PIL.Image``, or a ``height x width x 3`` ``np.array`` or a ``channels x height x width``\n            ``torch.Tensor`` or a ``batch x channels x height x width`` ``torch.Tensor``.\n        mask (_type_): The mask to apply to the image, i.e. regions to inpaint.\n            It can be a ``PIL.Image``, or a ``height x width`` ``np.array`` or a ``1 x height x width``\n            ``torch.Tensor`` or a ``batch x 1 x height x width`` ``torch.Tensor``.\n\n\n    Raises:\n        ValueError: ``torch.Tensor`` images should be in the ``[-1, 1]`` range. ValueError: ``torch.Tensor`` mask\n        should be in the ``[0, 1]`` range. ValueError: ``mask`` and ``image`` should have the same spatial dimensions.\n        TypeError: ``mask`` is a ``torch.Tensor`` but ``image`` is not\n            (ot the other way around).\n\n    Returns:\n        tuple[torch.Tensor]: The pair (mask, masked_image) as ``torch.Tensor`` with 4\n            dimensions: ``batch x channels x height x width``.\n    \"\"\"\n    if isinstance(image, torch.Tensor):\n        if not isinstance(mask, torch.Tensor):\n            raise TypeError(f\"`image` is a torch.Tensor but `mask` (type: {type(mask)} is not\")\n\n        # Batch single image\n        if image.ndim == 3:\n            assert image.shape[0] == 3, \"Image outside a batch should be of shape (3, H, W)\"\n            image = image.unsqueeze(0)\n\n        # Batch and add channel dim for single mask\n        if mask.ndim == 2:\n            mask = mask.unsqueeze(0).unsqueeze(0)\n\n        # Batch single mask or add channel dim\n        if mask.ndim == 3:\n            # Batched mask\n            if mask.shape[0] == image.shape[0]:\n                mask = mask.unsqueeze(1)\n            else:\n                mask = mask.unsqueeze(0)\n\n        assert image.ndim == 4 and mask.ndim == 4, \"Image and Mask must have 4 dimensions\"\n        assert image.shape[-2:] == mask.shape[-2:], \"Image and Mask must have the same spatial dimensions\"\n        assert image.shape[0] == mask.shape[0], \"Image and Mask must have the same batch size\"\n        assert mask.shape[1] == 1, \"Mask image must have a single channel\"\n\n        # Check image is in [-1, 1]\n        if image.min() < -1 or image.max() > 1:\n            raise ValueError(\"Image should be in [-1, 1] range\")\n\n        # Check mask is in [0, 1]\n        if mask.min() < 0 or mask.max() > 1:\n            raise ValueError(\"Mask should be in [0, 1] range\")\n\n        # paint-by-example inverses the mask\n        mask = 1 - mask\n\n        # Binarize mask\n        mask[mask < 0.5] = 0\n        mask[mask >= 0.5] = 1\n\n        # Image as float32\n        image = image.to(dtype=torch.float32)\n    elif isinstance(mask, torch.Tensor):\n        raise TypeError(f\"`mask` is a torch.Tensor but `image` (type: {type(image)} is not\")\n    else:\n        if isinstance(image, PIL.Image.Image):\n            image = [image]\n\n        image = np.concatenate([np.array(i.convert(\"RGB\"))[None, :] for i in image], axis=0)\n        image = image.transpose(0, 3, 1, 2)\n        image = torch.from_numpy(image).to(dtype=torch.float32) / 127.5 - 1.0\n\n        # preprocess mask\n        if isinstance(mask, PIL.Image.Image):\n            mask = [mask]\n\n        mask = np.concatenate([np.array(m.convert(\"L\"))[None, None, :] for m in mask], axis=0)\n        mask = mask.astype(np.float32) / 255.0\n\n        # paint-by-example inverses the mask\n        mask = 1 - mask\n\n        mask[mask < 0.5] = 0\n        mask[mask >= 0.5] = 1\n        mask = torch.from_numpy(mask)\n\n    masked_image = image * mask\n\n    return mask, masked_image\n\n\nclass PaintByExamplePipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for image-guided image inpainting using Stable Diffusion. *This is an experimental feature*.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        image_encoder ([`PaintByExampleImageEncoder`]):\n            Encodes the example input image. The unet is conditioned on the example image instead of a text prompt.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    # TODO: feature_extractor is required to encode initial images (if they are in PIL format),\n    # we should give a descriptive message if the pipeline doesn't have one.\n    _optional_components = [\"safety_checker\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        image_encoder: PaintByExampleImageEncoder,\n        unet: UNet2DConditionModel,\n        scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = False,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            vae=vae,\n            image_encoder=image_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.vae, self.image_encoder]:\n            cpu_offload(cpu_offloaded_model, execution_device=device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_image_variation.StableDiffusionImageVariationPipeline.check_inputs\n    def check_inputs(self, image, height, width, callback_steps):\n        if (\n            not isinstance(image, torch.Tensor)\n            and not isinstance(image, PIL.Image.Image)\n            and not isinstance(image, list)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor` or `PIL.Image.Image` or `List[PIL.Image.Image]` but is\"\n                f\" {type(image)}\"\n            )\n\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_inpaint.StableDiffusionInpaintPipeline.prepare_mask_latents\n    def prepare_mask_latents(\n        self, mask, masked_image, batch_size, height, width, dtype, device, generator, do_classifier_free_guidance\n    ):\n        # resize the mask to latents shape as we concatenate the mask to the latents\n        # we do that before converting to dtype to avoid breaking in case we're using cpu_offload\n        # and half precision\n        mask = torch.nn.functional.interpolate(\n            mask, size=(height // self.vae_scale_factor, width // self.vae_scale_factor)\n        )\n        mask = mask.to(device=device, dtype=dtype)\n\n        masked_image = masked_image.to(device=device, dtype=dtype)\n\n        # encode the mask image into latents space so we can concatenate it to the latents\n        if isinstance(generator, list):\n            masked_image_latents = [\n                self.vae.encode(masked_image[i : i + 1]).latent_dist.sample(generator=generator[i])\n                for i in range(batch_size)\n            ]\n            masked_image_latents = torch.cat(masked_image_latents, dim=0)\n        else:\n            masked_image_latents = self.vae.encode(masked_image).latent_dist.sample(generator=generator)\n        masked_image_latents = self.vae.config.scaling_factor * masked_image_latents\n\n        # duplicate mask and masked_image_latents for each generation per prompt, using mps friendly method\n        if mask.shape[0] < batch_size:\n            if not batch_size % mask.shape[0] == 0:\n                raise ValueError(\n                    \"The passed mask and the required batch size don't match. Masks are supposed to be duplicated to\"\n                    f\" a total batch size of {batch_size}, but {mask.shape[0]} masks were passed. Make sure the number\"\n                    \" of masks that you pass is divisible by the total requested batch size.\"\n                )\n            mask = mask.repeat(batch_size // mask.shape[0], 1, 1, 1)\n        if masked_image_latents.shape[0] < batch_size:\n            if not batch_size % masked_image_latents.shape[0] == 0:\n                raise ValueError(\n                    \"The passed images and the required batch size don't match. Images are supposed to be duplicated\"\n                    f\" to a total batch size of {batch_size}, but {masked_image_latents.shape[0]} images were passed.\"\n                    \" Make sure the number of images that you pass is divisible by the total requested batch size.\"\n                )\n            masked_image_latents = masked_image_latents.repeat(batch_size // masked_image_latents.shape[0], 1, 1, 1)\n\n        mask = torch.cat([mask] * 2) if do_classifier_free_guidance else mask\n        masked_image_latents = (\n            torch.cat([masked_image_latents] * 2) if do_classifier_free_guidance else masked_image_latents\n        )\n\n        # aligning device to prevent device errors when concating it with the latent model input\n        masked_image_latents = masked_image_latents.to(device=device, dtype=dtype)\n        return mask, masked_image_latents\n\n    def _encode_image(self, image, device, num_images_per_prompt, do_classifier_free_guidance):\n        dtype = next(self.image_encoder.parameters()).dtype\n\n        if not isinstance(image, torch.Tensor):\n            image = self.feature_extractor(images=image, return_tensors=\"pt\").pixel_values\n\n        image = image.to(device=device, dtype=dtype)\n        image_embeddings, negative_prompt_embeds = self.image_encoder(image, return_uncond_vector=True)\n\n        # duplicate image embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = image_embeddings.shape\n        image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1)\n        image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, image_embeddings.shape[0], 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(bs_embed * num_images_per_prompt, 1, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            image_embeddings = torch.cat([negative_prompt_embeds, image_embeddings])\n\n        return image_embeddings\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        example_image: Union[torch.FloatTensor, PIL.Image.Image],\n        image: Union[torch.FloatTensor, PIL.Image.Image],\n        mask_image: Union[torch.FloatTensor, PIL.Image.Image],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 5.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            example_image (`torch.FloatTensor` or `PIL.Image.Image` or `List[PIL.Image.Image]`):\n                The exemplar image to guide the image generation.\n            image (`torch.FloatTensor` or `PIL.Image.Image` or `List[PIL.Image.Image]`):\n                `Image`, or tensor representing an image batch which will be inpainted, *i.e.* parts of the image will\n                be masked out with `mask_image` and repainted according to `prompt`.\n            mask_image (`torch.FloatTensor` or `PIL.Image.Image` or `List[PIL.Image.Image]`):\n                `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be\n                repainted, while black pixels will be preserved. If `mask_image` is a PIL image, it will be converted\n                to a single channel (luminance) before use. If it's a tensor, it should contain one color channel (L)\n                instead of 3, so the expected shape would be `(B, H, W, 1)`.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Define call parameters\n        if isinstance(image, PIL.Image.Image):\n            batch_size = 1\n        elif isinstance(image, list):\n            batch_size = len(image)\n        else:\n            batch_size = image.shape[0]\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 2. Preprocess mask and image\n        mask, masked_image = prepare_mask_and_masked_image(image, mask_image)\n        height, width = masked_image.shape[-2:]\n\n        # 3. Check inputs\n        self.check_inputs(example_image, height, width, callback_steps)\n\n        # 4. Encode input image\n        image_embeddings = self._encode_image(\n            example_image, device, num_images_per_prompt, do_classifier_free_guidance\n        )\n\n        # 5. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 6. Prepare latent variables\n        num_channels_latents = self.vae.config.latent_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            image_embeddings.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 7. Prepare mask latent variables\n        mask, masked_image_latents = self.prepare_mask_latents(\n            mask,\n            masked_image,\n            batch_size * num_images_per_prompt,\n            height,\n            width,\n            image_embeddings.dtype,\n            device,\n            generator,\n            do_classifier_free_guidance,\n        )\n\n        # 8. Check that sizes of mask, masked image and latents match\n        num_channels_mask = mask.shape[1]\n        num_channels_masked_image = masked_image_latents.shape[1]\n        if num_channels_latents + num_channels_mask + num_channels_masked_image != self.unet.config.in_channels:\n            raise ValueError(\n                f\"Incorrect configuration settings! The config of `pipeline.unet`: {self.unet.config} expects\"\n                f\" {self.unet.config.in_channels} but received `num_channels_latents`: {num_channels_latents} +\"\n                f\" `num_channels_mask`: {num_channels_mask} + `num_channels_masked_image`: {num_channels_masked_image}\"\n                f\" = {num_channels_latents+num_channels_masked_image+num_channels_mask}. Please verify the config of\"\n                \" `pipeline.unet` or your `mask_image` or `image` input.\"\n            )\n\n        # 9. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 10. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n\n                # concat latents, mask, masked_image_latents in the channel dimension\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n                latent_model_input = torch.cat([latent_model_input, masked_image_latents, mask], dim=1)\n\n                # predict the noise residual\n                noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=image_embeddings).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, image_embeddings.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/pipeline_flax_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n# Copyright (c) 2022, NVIDIA CORPORATION.  All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport importlib\nimport inspect\nimport os\nfrom typing import Any, Dict, List, Optional, Union\n\nimport flax\nimport numpy as np\nimport PIL\nfrom flax.core.frozen_dict import FrozenDict\nfrom huggingface_hub import snapshot_download\nfrom PIL import Image\nfrom tqdm.auto import tqdm\n\nfrom ..configuration_utils import ConfigMixin\nfrom ..models.modeling_flax_utils import FLAX_WEIGHTS_NAME, FlaxModelMixin\nfrom ..schedulers.scheduling_utils_flax import SCHEDULER_CONFIG_NAME, FlaxSchedulerMixin\nfrom ..utils import CONFIG_NAME, DIFFUSERS_CACHE, BaseOutput, http_user_agent, is_transformers_available, logging\n\n\nif is_transformers_available():\n    from transformers import FlaxPreTrainedModel\n\nINDEX_FILE = \"diffusion_flax_model.bin\"\n\n\nlogger = logging.get_logger(__name__)\n\n\nLOADABLE_CLASSES = {\n    \"diffusers\": {\n        \"FlaxModelMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"FlaxSchedulerMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"FlaxDiffusionPipeline\": [\"save_pretrained\", \"from_pretrained\"],\n    },\n    \"transformers\": {\n        \"PreTrainedTokenizer\": [\"save_pretrained\", \"from_pretrained\"],\n        \"PreTrainedTokenizerFast\": [\"save_pretrained\", \"from_pretrained\"],\n        \"FlaxPreTrainedModel\": [\"save_pretrained\", \"from_pretrained\"],\n        \"FeatureExtractionMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"ProcessorMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"ImageProcessingMixin\": [\"save_pretrained\", \"from_pretrained\"],\n    },\n}\n\nALL_IMPORTABLE_CLASSES = {}\nfor library in LOADABLE_CLASSES:\n    ALL_IMPORTABLE_CLASSES.update(LOADABLE_CLASSES[library])\n\n\ndef import_flax_or_no_model(module, class_name):\n    try:\n        # 1. First make sure that if a Flax object is present, import this one\n        class_obj = getattr(module, \"Flax\" + class_name)\n    except AttributeError:\n        # 2. If this doesn't work, it's not a model and we don't append \"Flax\"\n        class_obj = getattr(module, class_name)\n    except AttributeError:\n        raise ValueError(f\"Neither Flax{class_name} nor {class_name} exist in {module}\")\n\n    return class_obj\n\n\n@flax.struct.dataclass\nclass FlaxImagePipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for image pipelines.\n\n    Args:\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n    \"\"\"\n\n    images: Union[List[PIL.Image.Image], np.ndarray]\n\n\nclass FlaxDiffusionPipeline(ConfigMixin):\n    r\"\"\"\n    Base class for all models.\n\n    [`FlaxDiffusionPipeline`] takes care of storing all components (models, schedulers, processors) for diffusion\n    pipelines and handles methods for loading, downloading and saving models as well as a few methods common to all\n    pipelines to:\n\n        - enabling/disabling the progress bar for the denoising iteration\n\n    Class attributes:\n\n        - **config_name** ([`str`]) -- name of the config file that will store the class and module names of all\n          components of the diffusion pipeline.\n    \"\"\"\n    config_name = \"model_index.json\"\n\n    def register_modules(self, **kwargs):\n        # import it here to avoid circular import\n        from diffusers import pipelines\n\n        for name, module in kwargs.items():\n            if module is None:\n                register_dict = {name: (None, None)}\n            else:\n                # retrieve library\n                library = module.__module__.split(\".\")[0]\n\n                # check if the module is a pipeline module\n                pipeline_dir = module.__module__.split(\".\")[-2]\n                path = module.__module__.split(\".\")\n                is_pipeline_module = pipeline_dir in path and hasattr(pipelines, pipeline_dir)\n\n                # if library is not in LOADABLE_CLASSES, then it is a custom module.\n                # Or if it's a pipeline module, then the module is inside the pipeline\n                # folder so we set the library to module name.\n                if library not in LOADABLE_CLASSES or is_pipeline_module:\n                    library = pipeline_dir\n\n                # retrieve class_name\n                class_name = module.__class__.__name__\n\n                register_dict = {name: (library, class_name)}\n\n            # save model index config\n            self.register_to_config(**register_dict)\n\n            # set models\n            setattr(self, name, module)\n\n    def save_pretrained(self, save_directory: Union[str, os.PathLike], params: Union[Dict, FrozenDict]):\n        # TODO: handle inference_state\n        \"\"\"\n        Save all variables of the pipeline that can be saved and loaded as well as the pipelines configuration file to\n        a directory. A pipeline variable can be saved and loaded if its class implements both a save and loading\n        method. The pipeline can easily be re-loaded using the `[`~FlaxDiffusionPipeline.from_pretrained`]` class\n        method.\n\n        Arguments:\n            save_directory (`str` or `os.PathLike`):\n                Directory to which to save. Will be created if it doesn't exist.\n        \"\"\"\n        self.save_config(save_directory)\n\n        model_index_dict = dict(self.config)\n        model_index_dict.pop(\"_class_name\")\n        model_index_dict.pop(\"_diffusers_version\")\n        model_index_dict.pop(\"_module\", None)\n\n        for pipeline_component_name in model_index_dict.keys():\n            sub_model = getattr(self, pipeline_component_name)\n            if sub_model is None:\n                # edge case for saving a pipeline with safety_checker=None\n                continue\n\n            model_cls = sub_model.__class__\n\n            save_method_name = None\n            # search for the model's base class in LOADABLE_CLASSES\n            for library_name, library_classes in LOADABLE_CLASSES.items():\n                library = importlib.import_module(library_name)\n                for base_class, save_load_methods in library_classes.items():\n                    class_candidate = getattr(library, base_class, None)\n                    if class_candidate is not None and issubclass(model_cls, class_candidate):\n                        # if we found a suitable base class in LOADABLE_CLASSES then grab its save method\n                        save_method_name = save_load_methods[0]\n                        break\n                if save_method_name is not None:\n                    break\n\n            save_method = getattr(sub_model, save_method_name)\n            expects_params = \"params\" in set(inspect.signature(save_method).parameters.keys())\n\n            if expects_params:\n                save_method(\n                    os.path.join(save_directory, pipeline_component_name), params=params[pipeline_component_name]\n                )\n            else:\n                save_method(os.path.join(save_directory, pipeline_component_name))\n\n    @classmethod\n    def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.PathLike]], **kwargs):\n        r\"\"\"\n        Instantiate a Flax diffusion pipeline from pre-trained pipeline weights.\n\n        The pipeline is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated).\n\n        The warning *Weights from XXX not initialized from pretrained model* means that the weights of XXX do not come\n        pretrained with the rest of the model. It is up to you to train those weights with a downstream fine-tuning\n        task.\n\n        The warning *Weights from XXX not used in YYY* means that the layer XXX is not used by YYY, therefore those\n        weights are discarded.\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike`, *optional*):\n                Can be either:\n\n                    - A string, the *repo id* of a pretrained pipeline hosted inside a model repo on\n                      https://huggingface.co/ Valid repo ids have to be located under a user or organization name, like\n                      `CompVis/ldm-text2im-large-256`.\n                    - A path to a *directory* containing pipeline weights saved using\n                      [`~FlaxDiffusionPipeline.save_pretrained`], e.g., `./my_pipeline_directory/`.\n            dtype (`str` or `jnp.dtype`, *optional*):\n                Override the default `jnp.dtype` and load the model under this dtype. If `\"auto\"` is passed the dtype\n                will be automatically derived from the model's weights.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            output_loading_info(`bool`, *optional*, defaults to `False`):\n                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `huggingface-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information. specify the folder name here.\n\n            kwargs (remaining dictionary of keyword arguments, *optional*):\n                Can be used to overwrite load - and saveable variables - *i.e.* the pipeline components - of the\n                specific pipeline class. The overwritten components are then directly passed to the pipelines\n                `__init__` method. See example below for more information.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models), *e.g.* `\"runwayml/stable-diffusion-v1-5\"`\n\n        </Tip>\n\n        <Tip>\n\n        Activate the special [\"offline-mode\"](https://huggingface.co/diffusers/installation.html#offline-mode) to use\n        this method in a firewalled environment.\n\n        </Tip>\n\n        Examples:\n\n        ```py\n        >>> from diffusers import FlaxDiffusionPipeline\n\n        >>> # Download pipeline from huggingface.co and cache.\n        >>> # Requires to be logged in to Hugging Face hub,\n        >>> # see more in [the documentation](https://huggingface.co/docs/hub/security-tokens)\n        >>> pipeline, params = FlaxDiffusionPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\",\n        ...     revision=\"bf16\",\n        ...     dtype=jnp.bfloat16,\n        ... )\n\n        >>> # Download pipeline, but use a different scheduler\n        >>> from diffusers import FlaxDPMSolverMultistepScheduler\n\n        >>> model_id = \"runwayml/stable-diffusion-v1-5\"\n        >>> dpmpp, dpmpp_state = FlaxDPMSolverMultistepScheduler.from_pretrained(\n        ...     model_id,\n        ...     subfolder=\"scheduler\",\n        ... )\n\n        >>> dpm_pipe, dpm_params = FlaxStableDiffusionPipeline.from_pretrained(\n        ...     model_id, revision=\"bf16\", dtype=jnp.bfloat16, scheduler=dpmpp\n        ... )\n        >>> dpm_params[\"scheduler\"] = dpmpp_state\n        ```\n        \"\"\"\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", False)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        from_pt = kwargs.pop(\"from_pt\", False)\n        use_memory_efficient_attention = kwargs.pop(\"use_memory_efficient_attention\", False)\n        dtype = kwargs.pop(\"dtype\", None)\n\n        # 1. Download the checkpoints and configs\n        # use snapshot download here to get it working from from_pretrained\n        if not os.path.isdir(pretrained_model_name_or_path):\n            config_dict = cls.load_config(\n                pretrained_model_name_or_path,\n                cache_dir=cache_dir,\n                resume_download=resume_download,\n                proxies=proxies,\n                local_files_only=local_files_only,\n                use_auth_token=use_auth_token,\n                revision=revision,\n            )\n            # make sure we only download sub-folders and `diffusers` filenames\n            folder_names = [k for k in config_dict.keys() if not k.startswith(\"_\")]\n            allow_patterns = [os.path.join(k, \"*\") for k in folder_names]\n            allow_patterns += [FLAX_WEIGHTS_NAME, SCHEDULER_CONFIG_NAME, CONFIG_NAME, cls.config_name]\n\n            # make sure we don't download PyTorch weights, unless when using from_pt\n            ignore_patterns = \"*.bin\" if not from_pt else []\n\n            if cls != FlaxDiffusionPipeline:\n                requested_pipeline_class = cls.__name__\n            else:\n                requested_pipeline_class = config_dict.get(\"_class_name\", cls.__name__)\n                requested_pipeline_class = (\n                    requested_pipeline_class\n                    if requested_pipeline_class.startswith(\"Flax\")\n                    else \"Flax\" + requested_pipeline_class\n                )\n\n            user_agent = {\"pipeline_class\": requested_pipeline_class}\n            user_agent = http_user_agent(user_agent)\n\n            # download all allow_patterns\n            cached_folder = snapshot_download(\n                pretrained_model_name_or_path,\n                cache_dir=cache_dir,\n                resume_download=resume_download,\n                proxies=proxies,\n                local_files_only=local_files_only,\n                use_auth_token=use_auth_token,\n                revision=revision,\n                allow_patterns=allow_patterns,\n                ignore_patterns=ignore_patterns,\n                user_agent=user_agent,\n            )\n        else:\n            cached_folder = pretrained_model_name_or_path\n\n        config_dict = cls.load_config(cached_folder)\n\n        # 2. Load the pipeline class, if using custom module then load it from the hub\n        # if we load from explicit class, let's use it\n        if cls != FlaxDiffusionPipeline:\n            pipeline_class = cls\n        else:\n            diffusers_module = importlib.import_module(cls.__module__.split(\".\")[0])\n            class_name = (\n                config_dict[\"_class_name\"]\n                if config_dict[\"_class_name\"].startswith(\"Flax\")\n                else \"Flax\" + config_dict[\"_class_name\"]\n            )\n            pipeline_class = getattr(diffusers_module, class_name)\n\n        # some modules can be passed directly to the init\n        # in this case they are already instantiated in `kwargs`\n        # extract them here\n        expected_modules, optional_kwargs = cls._get_signature_keys(pipeline_class)\n        passed_class_obj = {k: kwargs.pop(k) for k in expected_modules if k in kwargs}\n\n        init_dict, _, _ = pipeline_class.extract_init_dict(config_dict, **kwargs)\n\n        init_kwargs = {}\n\n        # inference_params\n        params = {}\n\n        # import it here to avoid circular import\n        from diffusers import pipelines\n\n        # 3. Load each module in the pipeline\n        for name, (library_name, class_name) in init_dict.items():\n            if class_name is None:\n                # edge case for when the pipeline was saved with safety_checker=None\n                init_kwargs[name] = None\n                continue\n\n            is_pipeline_module = hasattr(pipelines, library_name)\n            loaded_sub_model = None\n            sub_model_should_be_defined = True\n\n            # if the model is in a pipeline module, then we load it from the pipeline\n            if name in passed_class_obj:\n                # 1. check that passed_class_obj has correct parent class\n                if not is_pipeline_module:\n                    library = importlib.import_module(library_name)\n                    class_obj = getattr(library, class_name)\n                    importable_classes = LOADABLE_CLASSES[library_name]\n                    class_candidates = {c: getattr(library, c, None) for c in importable_classes.keys()}\n\n                    expected_class_obj = None\n                    for class_name, class_candidate in class_candidates.items():\n                        if class_candidate is not None and issubclass(class_obj, class_candidate):\n                            expected_class_obj = class_candidate\n\n                    if not issubclass(passed_class_obj[name].__class__, expected_class_obj):\n                        raise ValueError(\n                            f\"{passed_class_obj[name]} is of type: {type(passed_class_obj[name])}, but should be\"\n                            f\" {expected_class_obj}\"\n                        )\n                elif passed_class_obj[name] is None:\n                    logger.warning(\n                        f\"You have passed `None` for {name} to disable its functionality in {pipeline_class}. Note\"\n                        f\" that this might lead to problems when using {pipeline_class} and is not recommended.\"\n                    )\n                    sub_model_should_be_defined = False\n                else:\n                    logger.warning(\n                        f\"You have passed a non-standard module {passed_class_obj[name]}. We cannot verify whether it\"\n                        \" has the correct type\"\n                    )\n\n                # set passed class object\n                loaded_sub_model = passed_class_obj[name]\n            elif is_pipeline_module:\n                pipeline_module = getattr(pipelines, library_name)\n                class_obj = import_flax_or_no_model(pipeline_module, class_name)\n\n                importable_classes = ALL_IMPORTABLE_CLASSES\n                class_candidates = {c: class_obj for c in importable_classes.keys()}\n            else:\n                # else we just import it from the library.\n                library = importlib.import_module(library_name)\n                class_obj = import_flax_or_no_model(library, class_name)\n\n                importable_classes = LOADABLE_CLASSES[library_name]\n                class_candidates = {c: getattr(library, c, None) for c in importable_classes.keys()}\n\n            if loaded_sub_model is None and sub_model_should_be_defined:\n                load_method_name = None\n                for class_name, class_candidate in class_candidates.items():\n                    if class_candidate is not None and issubclass(class_obj, class_candidate):\n                        load_method_name = importable_classes[class_name][1]\n\n                load_method = getattr(class_obj, load_method_name)\n\n                # check if the module is in a subdirectory\n                if os.path.isdir(os.path.join(cached_folder, name)):\n                    loadable_folder = os.path.join(cached_folder, name)\n                else:\n                    loaded_sub_model = cached_folder\n\n                if issubclass(class_obj, FlaxModelMixin):\n                    loaded_sub_model, loaded_params = load_method(\n                        loadable_folder,\n                        from_pt=from_pt,\n                        use_memory_efficient_attention=use_memory_efficient_attention,\n                        dtype=dtype,\n                    )\n                    params[name] = loaded_params\n                elif is_transformers_available() and issubclass(class_obj, FlaxPreTrainedModel):\n                    if from_pt:\n                        # TODO(Suraj): Fix this in Transformers. We should be able to use `_do_init=False` here\n                        loaded_sub_model = load_method(loadable_folder, from_pt=from_pt)\n                        loaded_params = loaded_sub_model.params\n                        del loaded_sub_model._params\n                    else:\n                        loaded_sub_model, loaded_params = load_method(loadable_folder, _do_init=False)\n                    params[name] = loaded_params\n                elif issubclass(class_obj, FlaxSchedulerMixin):\n                    loaded_sub_model, scheduler_state = load_method(loadable_folder)\n                    params[name] = scheduler_state\n                else:\n                    loaded_sub_model = load_method(loadable_folder)\n\n            init_kwargs[name] = loaded_sub_model  # UNet(...), # DiffusionSchedule(...)\n\n        # 4. Potentially add passed objects if expected\n        missing_modules = set(expected_modules) - set(init_kwargs.keys())\n        passed_modules = list(passed_class_obj.keys())\n\n        if len(missing_modules) > 0 and missing_modules <= set(passed_modules):\n            for module in missing_modules:\n                init_kwargs[module] = passed_class_obj.get(module, None)\n        elif len(missing_modules) > 0:\n            passed_modules = set(list(init_kwargs.keys()) + list(passed_class_obj.keys())) - optional_kwargs\n            raise ValueError(\n                f\"Pipeline {pipeline_class} expected {expected_modules}, but only {passed_modules} were passed.\"\n            )\n\n        model = pipeline_class(**init_kwargs, dtype=dtype)\n        return model, params\n\n    @staticmethod\n    def _get_signature_keys(obj):\n        parameters = inspect.signature(obj.__init__).parameters\n        required_parameters = {k: v for k, v in parameters.items() if v.default == inspect._empty}\n        optional_parameters = set({k for k, v in parameters.items() if v.default != inspect._empty})\n        expected_modules = set(required_parameters.keys()) - {\"self\"}\n        return expected_modules, optional_parameters\n\n    @property\n    def components(self) -> Dict[str, Any]:\n        r\"\"\"\n\n        The `self.components` property can be useful to run different pipelines with the same weights and\n        configurations to not have to re-allocate memory.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import (\n        ...     FlaxStableDiffusionPipeline,\n        ...     FlaxStableDiffusionImg2ImgPipeline,\n        ... )\n\n        >>> text2img = FlaxStableDiffusionPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\", revision=\"bf16\", dtype=jnp.bfloat16\n        ... )\n        >>> img2img = FlaxStableDiffusionImg2ImgPipeline(**text2img.components)\n        ```\n\n        Returns:\n            A dictionary containing all the modules needed to initialize the pipeline.\n        \"\"\"\n        expected_modules, optional_parameters = self._get_signature_keys(self)\n        components = {\n            k: getattr(self, k) for k in self.config.keys() if not k.startswith(\"_\") and k not in optional_parameters\n        }\n\n        if set(components.keys()) != expected_modules:\n            raise ValueError(\n                f\"{self} has been incorrectly initialized or {self.__class__} is incorrectly implemented. Expected\"\n                f\" {expected_modules} to be defined, but {components} are defined.\"\n            )\n\n        return components\n\n    @staticmethod\n    def numpy_to_pil(images):\n        \"\"\"\n        Convert a numpy image or a batch of images to a PIL image.\n        \"\"\"\n        if images.ndim == 3:\n            images = images[None, ...]\n        images = (images * 255).round().astype(\"uint8\")\n        if images.shape[-1] == 1:\n            # special case for grayscale (single channel) images\n            pil_images = [Image.fromarray(image.squeeze(), mode=\"L\") for image in images]\n        else:\n            pil_images = [Image.fromarray(image) for image in images]\n\n        return pil_images\n\n    # TODO: make it compatible with jax.lax\n    def progress_bar(self, iterable):\n        if not hasattr(self, \"_progress_bar_config\"):\n            self._progress_bar_config = {}\n        elif not isinstance(self._progress_bar_config, dict):\n            raise ValueError(\n                f\"`self._progress_bar_config` should be of type `dict`, but is {type(self._progress_bar_config)}.\"\n            )\n\n        return tqdm(iterable, **self._progress_bar_config)\n\n    def set_progress_bar_config(self, **kwargs):\n        self._progress_bar_config = kwargs\n"
  },
  {
    "path": "diffusers/pipelines/pipeline_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n# Copyright (c) 2022, NVIDIA CORPORATION.  All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport fnmatch\nimport importlib\nimport inspect\nimport os\nimport re\nimport sys\nimport warnings\nfrom dataclasses import dataclass\nfrom pathlib import Path\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom huggingface_hub import hf_hub_download, model_info, snapshot_download\nfrom packaging import version\nfrom tqdm.auto import tqdm\n\nimport diffusers\n\nfrom .. import __version__\nfrom ..configuration_utils import ConfigMixin\nfrom ..models.modeling_utils import _LOW_CPU_MEM_USAGE_DEFAULT\nfrom ..schedulers.scheduling_utils import SCHEDULER_CONFIG_NAME\nfrom ..utils import (\n    CONFIG_NAME,\n    DEPRECATED_REVISION_ARGS,\n    DIFFUSERS_CACHE,\n    HF_HUB_OFFLINE,\n    SAFETENSORS_WEIGHTS_NAME,\n    WEIGHTS_NAME,\n    BaseOutput,\n    deprecate,\n    get_class_from_dynamic_module,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_compiled_module,\n    is_safetensors_available,\n    is_torch_version,\n    is_transformers_available,\n    logging,\n    numpy_to_pil,\n)\n\n\nif is_transformers_available():\n    import transformers\n    from transformers import PreTrainedModel\n    from transformers.utils import FLAX_WEIGHTS_NAME as TRANSFORMERS_FLAX_WEIGHTS_NAME\n    from transformers.utils import SAFE_WEIGHTS_NAME as TRANSFORMERS_SAFE_WEIGHTS_NAME\n    from transformers.utils import WEIGHTS_NAME as TRANSFORMERS_WEIGHTS_NAME\n\nfrom ..utils import FLAX_WEIGHTS_NAME, ONNX_EXTERNAL_WEIGHTS_NAME, ONNX_WEIGHTS_NAME\n\n\nif is_accelerate_available():\n    import accelerate\n\n\nINDEX_FILE = \"diffusion_pytorch_model.bin\"\nCUSTOM_PIPELINE_FILE_NAME = \"pipeline.py\"\nDUMMY_MODULES_FOLDER = \"diffusers.utils\"\nTRANSFORMERS_DUMMY_MODULES_FOLDER = \"transformers.utils\"\n\n\nlogger = logging.get_logger(__name__)\n\n\nLOADABLE_CLASSES = {\n    \"diffusers\": {\n        \"ModelMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"SchedulerMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"DiffusionPipeline\": [\"save_pretrained\", \"from_pretrained\"],\n        \"OnnxRuntimeModel\": [\"save_pretrained\", \"from_pretrained\"],\n    },\n    \"transformers\": {\n        \"PreTrainedTokenizer\": [\"save_pretrained\", \"from_pretrained\"],\n        \"PreTrainedTokenizerFast\": [\"save_pretrained\", \"from_pretrained\"],\n        \"PreTrainedModel\": [\"save_pretrained\", \"from_pretrained\"],\n        \"FeatureExtractionMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"ProcessorMixin\": [\"save_pretrained\", \"from_pretrained\"],\n        \"ImageProcessingMixin\": [\"save_pretrained\", \"from_pretrained\"],\n    },\n    \"onnxruntime.training\": {\n        \"ORTModule\": [\"save_pretrained\", \"from_pretrained\"],\n    },\n}\n\nALL_IMPORTABLE_CLASSES = {}\nfor library in LOADABLE_CLASSES:\n    ALL_IMPORTABLE_CLASSES.update(LOADABLE_CLASSES[library])\n\n\n@dataclass\nclass ImagePipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for image pipelines.\n\n    Args:\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n    \"\"\"\n\n    images: Union[List[PIL.Image.Image], np.ndarray]\n\n\n@dataclass\nclass AudioPipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for audio pipelines.\n\n    Args:\n        audios (`np.ndarray`)\n            List of denoised samples of shape `(batch_size, num_channels, sample_rate)`. Numpy array present the\n            denoised audio samples of the diffusion pipeline.\n    \"\"\"\n\n    audios: np.ndarray\n\n\ndef is_safetensors_compatible(filenames, variant=None, passed_components=None) -> bool:\n    \"\"\"\n    Checking for safetensors compatibility:\n    - By default, all models are saved with the default pytorch serialization, so we use the list of default pytorch\n      files to know which safetensors files are needed.\n    - The model is safetensors compatible only if there is a matching safetensors file for every default pytorch file.\n\n    Converting default pytorch serialized filenames to safetensors serialized filenames:\n    - For models from the diffusers library, just replace the \".bin\" extension with \".safetensors\"\n    - For models from the transformers library, the filename changes from \"pytorch_model\" to \"model\", and the \".bin\"\n      extension is replaced with \".safetensors\"\n    \"\"\"\n    pt_filenames = []\n\n    sf_filenames = set()\n\n    passed_components = passed_components or []\n\n    for filename in filenames:\n        _, extension = os.path.splitext(filename)\n\n        if len(filename.split(\"/\")) == 2 and filename.split(\"/\")[0] in passed_components:\n            continue\n\n        if extension == \".bin\":\n            pt_filenames.append(filename)\n        elif extension == \".safetensors\":\n            sf_filenames.add(filename)\n\n    for filename in pt_filenames:\n        #  filename = 'foo/bar/baz.bam' -> path = 'foo/bar', filename = 'baz', extention = '.bam'\n        path, filename = os.path.split(filename)\n        filename, extension = os.path.splitext(filename)\n\n        if filename.startswith(\"pytorch_model\"):\n            filename = filename.replace(\"pytorch_model\", \"model\")\n        else:\n            filename = filename\n\n        expected_sf_filename = os.path.join(path, filename)\n        expected_sf_filename = f\"{expected_sf_filename}.safetensors\"\n\n        if expected_sf_filename not in sf_filenames:\n            logger.warning(f\"{expected_sf_filename} not found\")\n            return False\n\n    return True\n\n\ndef variant_compatible_siblings(filenames, variant=None) -> Union[List[os.PathLike], str]:\n    weight_names = [\n        WEIGHTS_NAME,\n        SAFETENSORS_WEIGHTS_NAME,\n        FLAX_WEIGHTS_NAME,\n        ONNX_WEIGHTS_NAME,\n        ONNX_EXTERNAL_WEIGHTS_NAME,\n    ]\n\n    if is_transformers_available():\n        weight_names += [TRANSFORMERS_WEIGHTS_NAME, TRANSFORMERS_SAFE_WEIGHTS_NAME, TRANSFORMERS_FLAX_WEIGHTS_NAME]\n\n    # model_pytorch, diffusion_model_pytorch, ...\n    weight_prefixes = [w.split(\".\")[0] for w in weight_names]\n    # .bin, .safetensors, ...\n    weight_suffixs = [w.split(\".\")[-1] for w in weight_names]\n    # -00001-of-00002\n    transformers_index_format = r\"\\d{5}-of-\\d{5}\"\n\n    if variant is not None:\n        # `diffusion_pytorch_model.fp16.bin` as well as `model.fp16-00001-of-00002.safetenstors`\n        variant_file_re = re.compile(\n            rf\"({'|'.join(weight_prefixes)})\\.({variant}|{variant}-{transformers_index_format})\\.({'|'.join(weight_suffixs)})$\"\n        )\n        # `text_encoder/pytorch_model.bin.index.fp16.json`\n        variant_index_re = re.compile(\n            rf\"({'|'.join(weight_prefixes)})\\.({'|'.join(weight_suffixs)})\\.index\\.{variant}\\.json$\"\n        )\n\n    # `diffusion_pytorch_model.bin` as well as `model-00001-of-00002.safetenstors`\n    non_variant_file_re = re.compile(\n        rf\"({'|'.join(weight_prefixes)})(-{transformers_index_format})?\\.({'|'.join(weight_suffixs)})$\"\n    )\n    # `text_encoder/pytorch_model.bin.index.json`\n    non_variant_index_re = re.compile(rf\"({'|'.join(weight_prefixes)})\\.({'|'.join(weight_suffixs)})\\.index\\.json\")\n\n    if variant is not None:\n        variant_weights = {f for f in filenames if variant_file_re.match(f.split(\"/\")[-1]) is not None}\n        variant_indexes = {f for f in filenames if variant_index_re.match(f.split(\"/\")[-1]) is not None}\n        variant_filenames = variant_weights | variant_indexes\n    else:\n        variant_filenames = set()\n\n    non_variant_weights = {f for f in filenames if non_variant_file_re.match(f.split(\"/\")[-1]) is not None}\n    non_variant_indexes = {f for f in filenames if non_variant_index_re.match(f.split(\"/\")[-1]) is not None}\n    non_variant_filenames = non_variant_weights | non_variant_indexes\n\n    # all variant filenames will be used by default\n    usable_filenames = set(variant_filenames)\n\n    def convert_to_variant(filename):\n        if \"index\" in filename:\n            variant_filename = filename.replace(\"index\", f\"index.{variant}\")\n        elif re.compile(f\"^(.*?){transformers_index_format}\").match(filename) is not None:\n            variant_filename = f\"{filename.split('-')[0]}.{variant}-{'-'.join(filename.split('-')[1:])}\"\n        else:\n            variant_filename = f\"{filename.split('.')[0]}.{variant}.{filename.split('.')[1]}\"\n        return variant_filename\n\n    for f in non_variant_filenames:\n        variant_filename = convert_to_variant(f)\n        if variant_filename not in usable_filenames:\n            usable_filenames.add(f)\n\n    return usable_filenames, variant_filenames\n\n\ndef warn_deprecated_model_variant(pretrained_model_name_or_path, use_auth_token, variant, revision, model_filenames):\n    info = model_info(\n        pretrained_model_name_or_path,\n        use_auth_token=use_auth_token,\n        revision=None,\n    )\n    filenames = {sibling.rfilename for sibling in info.siblings}\n    comp_model_filenames, _ = variant_compatible_siblings(filenames, variant=revision)\n    comp_model_filenames = [\".\".join(f.split(\".\")[:1] + f.split(\".\")[2:]) for f in comp_model_filenames]\n\n    if set(comp_model_filenames) == set(model_filenames):\n        warnings.warn(\n            f\"You are loading the variant {revision} from {pretrained_model_name_or_path} via `revision='{revision}'` even though you can load it via `variant=`{revision}`. Loading model variants via `revision='{revision}'` is deprecated and will be removed in diffusers v1. Please use `variant='{revision}'` instead.\",\n            FutureWarning,\n        )\n    else:\n        warnings.warn(\n            f\"You are loading the variant {revision} from {pretrained_model_name_or_path} via `revision='{revision}'`. This behavior is deprecated and will be removed in diffusers v1. One should use `variant='{revision}'` instead. However, it appears that {pretrained_model_name_or_path} currently does not have the required variant filenames in the 'main' branch. \\n The Diffusers team and community would be very grateful if you could open an issue: https://github.com/huggingface/diffusers/issues/new with the title '{pretrained_model_name_or_path} is missing {revision} files' so that the correct variant file can be added.\",\n            FutureWarning,\n        )\n\n\ndef maybe_raise_or_warn(\n    library_name, library, class_name, importable_classes, passed_class_obj, name, is_pipeline_module\n):\n    \"\"\"Simple helper method to raise or warn in case incorrect module has been passed\"\"\"\n    if not is_pipeline_module:\n        library = importlib.import_module(library_name)\n        class_obj = getattr(library, class_name)\n        class_candidates = {c: getattr(library, c, None) for c in importable_classes.keys()}\n\n        expected_class_obj = None\n        for class_name, class_candidate in class_candidates.items():\n            if class_candidate is not None and issubclass(class_obj, class_candidate):\n                expected_class_obj = class_candidate\n\n        # Dynamo wraps the original model in a private class.\n        # I didn't find a public API to get the original class.\n        sub_model = passed_class_obj[name]\n        model_cls = sub_model.__class__\n        if is_compiled_module(sub_model):\n            model_cls = sub_model._orig_mod.__class__\n\n        if not issubclass(model_cls, expected_class_obj):\n            raise ValueError(\n                f\"{passed_class_obj[name]} is of type: {model_cls}, but should be\" f\" {expected_class_obj}\"\n            )\n    else:\n        logger.warning(\n            f\"You have passed a non-standard module {passed_class_obj[name]}. We cannot verify whether it\"\n            \" has the correct type\"\n        )\n\n\ndef get_class_obj_and_candidates(library_name, class_name, importable_classes, pipelines, is_pipeline_module):\n    \"\"\"Simple helper method to retrieve class object of module as well as potential parent class objects\"\"\"\n    if is_pipeline_module:\n        pipeline_module = getattr(pipelines, library_name)\n\n        class_obj = getattr(pipeline_module, class_name)\n        class_candidates = {c: class_obj for c in importable_classes.keys()}\n    else:\n        # else we just import it from the library.\n        library = importlib.import_module(library_name)\n\n        class_obj = getattr(library, class_name)\n        class_candidates = {c: getattr(library, c, None) for c in importable_classes.keys()}\n\n    return class_obj, class_candidates\n\n\ndef _get_pipeline_class(class_obj, config, custom_pipeline=None, cache_dir=None, revision=None):\n    if custom_pipeline is not None:\n        if custom_pipeline.endswith(\".py\"):\n            path = Path(custom_pipeline)\n            # decompose into folder & file\n            file_name = path.name\n            custom_pipeline = path.parent.absolute()\n        else:\n            file_name = CUSTOM_PIPELINE_FILE_NAME\n\n        return get_class_from_dynamic_module(\n            custom_pipeline, module_file=file_name, cache_dir=cache_dir, revision=revision\n        )\n\n    if class_obj != DiffusionPipeline:\n        return class_obj\n\n    diffusers_module = importlib.import_module(class_obj.__module__.split(\".\")[0])\n    return getattr(diffusers_module, config[\"_class_name\"])\n\n\ndef load_sub_model(\n    library_name: str,\n    class_name: str,\n    importable_classes: List[Any],\n    pipelines: Any,\n    is_pipeline_module: bool,\n    pipeline_class: Any,\n    torch_dtype: torch.dtype,\n    provider: Any,\n    sess_options: Any,\n    device_map: Optional[Union[Dict[str, torch.device], str]],\n    model_variants: Dict[str, str],\n    name: str,\n    from_flax: bool,\n    variant: str,\n    low_cpu_mem_usage: bool,\n    cached_folder: Union[str, os.PathLike],\n):\n    \"\"\"Helper method to load the module `name` from `library_name` and `class_name`\"\"\"\n    # retrieve class candidates\n    class_obj, class_candidates = get_class_obj_and_candidates(\n        library_name, class_name, importable_classes, pipelines, is_pipeline_module\n    )\n\n    load_method_name = None\n    # retrive load method name\n    for class_name, class_candidate in class_candidates.items():\n        if class_candidate is not None and issubclass(class_obj, class_candidate):\n            load_method_name = importable_classes[class_name][1]\n\n    # if load method name is None, then we have a dummy module -> raise Error\n    if load_method_name is None:\n        none_module = class_obj.__module__\n        is_dummy_path = none_module.startswith(DUMMY_MODULES_FOLDER) or none_module.startswith(\n            TRANSFORMERS_DUMMY_MODULES_FOLDER\n        )\n        if is_dummy_path and \"dummy\" in none_module:\n            # call class_obj for nice error message of missing requirements\n            class_obj()\n\n        raise ValueError(\n            f\"The component {class_obj} of {pipeline_class} cannot be loaded as it does not seem to have\"\n            f\" any of the loading methods defined in {ALL_IMPORTABLE_CLASSES}.\"\n        )\n\n    load_method = getattr(class_obj, load_method_name)\n\n    # add kwargs to loading method\n    loading_kwargs = {}\n    if issubclass(class_obj, torch.nn.Module):\n        loading_kwargs[\"torch_dtype\"] = torch_dtype\n    if issubclass(class_obj, diffusers.OnnxRuntimeModel):\n        loading_kwargs[\"provider\"] = provider\n        loading_kwargs[\"sess_options\"] = sess_options\n\n    is_diffusers_model = issubclass(class_obj, diffusers.ModelMixin)\n\n    if is_transformers_available():\n        transformers_version = version.parse(version.parse(transformers.__version__).base_version)\n    else:\n        transformers_version = \"N/A\"\n\n    is_transformers_model = (\n        is_transformers_available()\n        and issubclass(class_obj, PreTrainedModel)\n        and transformers_version >= version.parse(\"4.20.0\")\n    )\n\n    # When loading a transformers model, if the device_map is None, the weights will be initialized as opposed to diffusers.\n    # To make default loading faster we set the `low_cpu_mem_usage=low_cpu_mem_usage` flag which is `True` by default.\n    # This makes sure that the weights won't be initialized which significantly speeds up loading.\n    if is_diffusers_model or is_transformers_model:\n        loading_kwargs[\"device_map\"] = device_map\n        loading_kwargs[\"variant\"] = model_variants.pop(name, None)\n        if from_flax:\n            loading_kwargs[\"from_flax\"] = True\n\n        # the following can be deleted once the minimum required `transformers` version\n        # is higher than 4.27\n        if (\n            is_transformers_model\n            and loading_kwargs[\"variant\"] is not None\n            and transformers_version < version.parse(\"4.27.0\")\n        ):\n            raise ImportError(\n                f\"When passing `variant='{variant}'`, please make sure to upgrade your `transformers` version to at least 4.27.0.dev0\"\n            )\n        elif is_transformers_model and loading_kwargs[\"variant\"] is None:\n            loading_kwargs.pop(\"variant\")\n\n        # if `from_flax` and model is transformer model, can currently not load with `low_cpu_mem_usage`\n        if not (from_flax and is_transformers_model):\n            loading_kwargs[\"low_cpu_mem_usage\"] = low_cpu_mem_usage\n        else:\n            loading_kwargs[\"low_cpu_mem_usage\"] = False\n\n    # check if the module is in a subdirectory\n    if os.path.isdir(os.path.join(cached_folder, name)):\n        loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs)\n    else:\n        # else load from the root directory\n        loaded_sub_model = load_method(cached_folder, **loading_kwargs)\n\n    return loaded_sub_model\n\n\nclass DiffusionPipeline(ConfigMixin):\n    r\"\"\"\n    Base class for all models.\n\n    [`DiffusionPipeline`] takes care of storing all components (models, schedulers, processors) for diffusion pipelines\n    and handles methods for loading, downloading and saving models as well as a few methods common to all pipelines to:\n\n        - move all PyTorch modules to the device of your choice\n        - enabling/disabling the progress bar for the denoising iteration\n\n    Class attributes:\n\n        - **config_name** (`str`) -- name of the config file that will store the class and module names of all\n          components of the diffusion pipeline.\n        - **_optional_components** (List[`str`]) -- list of all components that are optional so they don't have to be\n          passed for the pipeline to function (should be overridden by subclasses).\n    \"\"\"\n    config_name = \"model_index.json\"\n    _optional_components = []\n\n    def register_modules(self, **kwargs):\n        # import it here to avoid circular import\n        from diffusers import pipelines\n\n        for name, module in kwargs.items():\n            # retrieve library\n            if module is None:\n                register_dict = {name: (None, None)}\n            else:\n                # register the original module, not the dynamo compiled one\n                if is_compiled_module(module):\n                    module = module._orig_mod\n\n                library = module.__module__.split(\".\")[0]\n\n                # check if the module is a pipeline module\n                pipeline_dir = module.__module__.split(\".\")[-2] if len(module.__module__.split(\".\")) > 2 else None\n                path = module.__module__.split(\".\")\n                is_pipeline_module = pipeline_dir in path and hasattr(pipelines, pipeline_dir)\n\n                # if library is not in LOADABLE_CLASSES, then it is a custom module.\n                # Or if it's a pipeline module, then the module is inside the pipeline\n                # folder so we set the library to module name.\n                if library not in LOADABLE_CLASSES or is_pipeline_module:\n                    library = pipeline_dir\n\n                # retrieve class_name\n                class_name = module.__class__.__name__\n\n                register_dict = {name: (library, class_name)}\n\n            # save model index config\n            self.register_to_config(**register_dict)\n\n            # set models\n            setattr(self, name, module)\n\n    def __setattr__(self, name: str, value: Any):\n        if name in self.__dict__ and hasattr(self.config, name):\n            # We need to overwrite the config if name exists in config\n            if isinstance(getattr(self.config, name), (tuple, list)):\n                if value is not None and self.config[name][0] is not None:\n                    class_library_tuple = (value.__module__.split(\".\")[0], value.__class__.__name__)\n                else:\n                    class_library_tuple = (None, None)\n\n                self.register_to_config(**{name: class_library_tuple})\n            else:\n                self.register_to_config(**{name: value})\n\n        super().__setattr__(name, value)\n\n    def save_pretrained(\n        self,\n        save_directory: Union[str, os.PathLike],\n        safe_serialization: bool = False,\n        variant: Optional[str] = None,\n    ):\n        \"\"\"\n        Save all variables of the pipeline that can be saved and loaded as well as the pipelines configuration file to\n        a directory. A pipeline variable can be saved and loaded if its class implements both a save and loading\n        method. The pipeline can easily be re-loaded using the [`~DiffusionPipeline.from_pretrained`] class method.\n\n        Arguments:\n            save_directory (`str` or `os.PathLike`):\n                Directory to which to save. Will be created if it doesn't exist.\n            safe_serialization (`bool`, *optional*, defaults to `False`):\n                Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).\n            variant (`str`, *optional*):\n                If specified, weights are saved in the format pytorch_model.<variant>.bin.\n        \"\"\"\n        model_index_dict = dict(self.config)\n        model_index_dict.pop(\"_class_name\", None)\n        model_index_dict.pop(\"_diffusers_version\", None)\n        model_index_dict.pop(\"_module\", None)\n\n        expected_modules, optional_kwargs = self._get_signature_keys(self)\n\n        def is_saveable_module(name, value):\n            if name not in expected_modules:\n                return False\n            if name in self._optional_components and value[0] is None:\n                return False\n            return True\n\n        model_index_dict = {k: v for k, v in model_index_dict.items() if is_saveable_module(k, v)}\n        for pipeline_component_name in model_index_dict.keys():\n            sub_model = getattr(self, pipeline_component_name)\n            model_cls = sub_model.__class__\n\n            # Dynamo wraps the original model in a private class.\n            # I didn't find a public API to get the original class.\n            if is_compiled_module(sub_model):\n                sub_model = sub_model._orig_mod\n                model_cls = sub_model.__class__\n\n            save_method_name = None\n            # search for the model's base class in LOADABLE_CLASSES\n            for library_name, library_classes in LOADABLE_CLASSES.items():\n                if library_name in sys.modules:\n                    library = importlib.import_module(library_name)\n                else:\n                    logger.info(\n                        f\"{library_name} is not installed. Cannot save {pipeline_component_name} as {library_classes} from {library_name}\"\n                    )\n\n                for base_class, save_load_methods in library_classes.items():\n                    class_candidate = getattr(library, base_class, None)\n                    if class_candidate is not None and issubclass(model_cls, class_candidate):\n                        # if we found a suitable base class in LOADABLE_CLASSES then grab its save method\n                        save_method_name = save_load_methods[0]\n                        break\n                if save_method_name is not None:\n                    break\n\n            if save_method_name is None:\n                logger.warn(f\"self.{pipeline_component_name}={sub_model} of type {type(sub_model)} cannot be saved.\")\n                # make sure that unsaveable components are not tried to be loaded afterward\n                self.register_to_config(**{pipeline_component_name: (None, None)})\n                continue\n\n            save_method = getattr(sub_model, save_method_name)\n\n            # Call the save method with the argument safe_serialization only if it's supported\n            save_method_signature = inspect.signature(save_method)\n            save_method_accept_safe = \"safe_serialization\" in save_method_signature.parameters\n            save_method_accept_variant = \"variant\" in save_method_signature.parameters\n\n            save_kwargs = {}\n            if save_method_accept_safe:\n                save_kwargs[\"safe_serialization\"] = safe_serialization\n            if save_method_accept_variant:\n                save_kwargs[\"variant\"] = variant\n\n            save_method(os.path.join(save_directory, pipeline_component_name), **save_kwargs)\n\n        # finally save the config\n        self.save_config(save_directory)\n\n    def to(\n        self,\n        torch_device: Optional[Union[str, torch.device]] = None,\n        torch_dtype: Optional[torch.dtype] = None,\n        silence_dtype_warnings: bool = False,\n    ):\n        if torch_device is None and torch_dtype is None:\n            return self\n\n        # throw warning if pipeline is in \"offloaded\"-mode but user tries to manually set to GPU.\n        def module_is_sequentially_offloaded(module):\n            if not is_accelerate_available() or is_accelerate_version(\"<\", \"0.14.0\"):\n                return False\n\n            return hasattr(module, \"_hf_hook\") and not isinstance(\n                module._hf_hook, (accelerate.hooks.CpuOffload, accelerate.hooks.AlignDevicesHook)\n            )\n\n        def module_is_offloaded(module):\n            if not is_accelerate_available() or is_accelerate_version(\"<\", \"0.17.0.dev0\"):\n                return False\n\n            return hasattr(module, \"_hf_hook\") and isinstance(module._hf_hook, accelerate.hooks.CpuOffload)\n\n        # .to(\"cuda\") would raise an error if the pipeline is sequentially offloaded, so we raise our own to make it clearer\n        pipeline_is_sequentially_offloaded = any(\n            module_is_sequentially_offloaded(module) for _, module in self.components.items()\n        )\n        if pipeline_is_sequentially_offloaded and torch.device(torch_device).type == \"cuda\":\n            raise ValueError(\n                \"It seems like you have activated sequential model offloading by calling `enable_sequential_cpu_offload`, but are now attempting to move the pipeline to GPU. This is not compatible with offloading. Please, move your pipeline `.to('cpu')` or consider removing the move altogether if you use sequential offloading.\"\n            )\n\n        # Display a warning in this case (the operation succeeds but the benefits are lost)\n        pipeline_is_offloaded = any(module_is_offloaded(module) for _, module in self.components.items())\n        if pipeline_is_offloaded and torch.device(torch_device).type == \"cuda\":\n            logger.warning(\n                f\"It seems like you have activated model offloading by calling `enable_model_cpu_offload`, but are now manually moving the pipeline to GPU. It is strongly recommended against doing so as memory gains from offloading are likely to be lost. Offloading automatically takes care of moving the individual components {', '.join(self.components.keys())} to GPU when needed. To make sure offloading works as expected, you should consider moving the pipeline back to CPU: `pipeline.to('cpu')` or removing the move altogether if you use offloading.\"\n            )\n\n        module_names, _ = self._get_signature_keys(self)\n        modules = [getattr(self, n, None) for n in module_names]\n        modules = [m for m in modules if isinstance(m, torch.nn.Module)]\n\n        is_offloaded = pipeline_is_offloaded or pipeline_is_sequentially_offloaded\n        for module in modules:\n            is_loaded_in_8bit = hasattr(module, \"is_loaded_in_8bit\") and module.is_loaded_in_8bit\n\n            if is_loaded_in_8bit and torch_dtype is not None:\n                logger.warning(\n                    f\"The module '{module.__class__.__name__}' has been loaded in 8bit and conversion to {torch_dtype} is not yet supported. Module is still in 8bit precision.\"\n                )\n\n            if is_loaded_in_8bit and torch_device is not None:\n                logger.warning(\n                    f\"The module '{module.__class__.__name__}' has been loaded in 8bit and moving it to {torch_dtype} via `.to()` is not yet supported. Module is still on {module.device}.\"\n                )\n            else:\n                module.to(torch_device, torch_dtype)\n\n            if (\n                module.dtype == torch.float16\n                and str(torch_device) in [\"cpu\"]\n                and not silence_dtype_warnings\n                and not is_offloaded\n            ):\n                logger.warning(\n                    \"Pipelines loaded with `torch_dtype=torch.float16` cannot run with `cpu` device. It\"\n                    \" is not recommended to move them to `cpu` as running them will fail. Please make\"\n                    \" sure to use an accelerator to run the pipeline in inference, due to the lack of\"\n                    \" support for`float16` operations on this device in PyTorch. Please, remove the\"\n                    \" `torch_dtype=torch.float16` argument, or use another device for inference.\"\n                )\n        return self\n\n    @property\n    def device(self) -> torch.device:\n        r\"\"\"\n        Returns:\n            `torch.device`: The torch device on which the pipeline is located.\n        \"\"\"\n        module_names, _ = self._get_signature_keys(self)\n        modules = [getattr(self, n, None) for n in module_names]\n        modules = [m for m in modules if isinstance(m, torch.nn.Module)]\n\n        for module in modules:\n            return module.device\n\n        return torch.device(\"cpu\")\n\n    @classmethod\n    def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.PathLike]], **kwargs):\n        r\"\"\"\n        Instantiate a PyTorch diffusion pipeline from pre-trained pipeline weights.\n\n        The pipeline is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated).\n\n        The warning *Weights from XXX not initialized from pretrained model* means that the weights of XXX do not come\n        pretrained with the rest of the model. It is up to you to train those weights with a downstream fine-tuning\n        task.\n\n        The warning *Weights from XXX not used in YYY* means that the layer XXX is not used by YYY, therefore those\n        weights are discarded.\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike`, *optional*):\n                Can be either:\n\n                    - A string, the *repo id* of a pretrained pipeline hosted inside a model repo on\n                      https://huggingface.co/ Valid repo ids have to be located under a user or organization name, like\n                      `CompVis/ldm-text2im-large-256`.\n                    - A path to a *directory* containing pipeline weights saved using\n                      [`~DiffusionPipeline.save_pretrained`], e.g., `./my_pipeline_directory/`.\n            torch_dtype (`str` or `torch.dtype`, *optional*):\n                Override the default `torch.dtype` and load the model under this dtype. If `\"auto\"` is passed the dtype\n                will be automatically derived from the model's weights.\n            custom_pipeline (`str`, *optional*):\n\n                <Tip warning={true}>\n\n                    This is an experimental feature and is likely to change in the future.\n\n                </Tip>\n\n                Can be either:\n\n                    - A string, the *repo id* of a custom pipeline hosted inside a model repo on\n                      https://huggingface.co/. Valid repo ids have to be located under a user or organization name,\n                      like `hf-internal-testing/diffusers-dummy-pipeline`.\n\n                        <Tip>\n\n                         It is required that the model repo has a file, called `pipeline.py` that defines the custom\n                         pipeline.\n\n                        </Tip>\n\n                    - A string, the *file name* of a community pipeline hosted on GitHub under\n                      https://github.com/huggingface/diffusers/tree/main/examples/community. Valid file names have to\n                      match exactly the file name without `.py` located under the above link, *e.g.*\n                      `clip_guided_stable_diffusion`.\n\n                        <Tip>\n\n                         Community pipelines are always loaded from the current `main` branch of GitHub.\n\n                        </Tip>\n\n                    - A path to a *directory* containing a custom pipeline, e.g., `./my_pipeline_directory/`.\n\n                        <Tip>\n\n                         It is required that the directory has a file, called `pipeline.py` that defines the custom\n                         pipeline.\n\n                        </Tip>\n\n                For more information on how to load and create custom pipelines, please have a look at [Loading and\n                Adding Custom\n                Pipelines](https://huggingface.co/docs/diffusers/using-diffusers/custom_pipeline_overview)\n\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            output_loading_info(`bool`, *optional*, defaults to `False`):\n                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `huggingface-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            custom_revision (`str`, *optional*, defaults to `\"main\"` when loading from the Hub and to local version of `diffusers` when loading from GitHub):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id similar to\n                `revision` when loading a custom pipeline from the Hub. It can be a diffusers version when loading a\n                custom pipeline from GitHub.\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information. specify the folder name here.\n            device_map (`str` or `Dict[str, Union[int, str, torch.device]]`, *optional*):\n                A map that specifies where each submodule should go. It doesn't need to be refined to each\n                parameter/buffer name, once a given module name is inside, every submodule of it will be sent to the\n                same device.\n\n                To have Accelerate compute the most optimized `device_map` automatically, set `device_map=\"auto\"`. For\n                more information about each option see [designing a device\n                map](https://hf.co/docs/accelerate/main/en/usage_guides/big_modeling#designing-a-device-map).\n            low_cpu_mem_usage (`bool`, *optional*, defaults to `True` if torch version >= 1.9.0 else `False`):\n                Speed up model loading by not initializing the weights and only loading the pre-trained weights. This\n                also tries to not use more than 1x model size in CPU memory (including peak memory) while loading the\n                model. This is only supported when torch version >= 1.9.0. If you are using an older version of torch,\n                setting this argument to `True` will raise an error.\n            use_safetensors (`bool`, *optional*, defaults to `None`):\n                If set to `None`, the pipeline will load the `safetensors` weights if they're available **and** if the\n                `safetensors` library is installed. If set to `True`, the pipeline will forcibly load the models from\n                `safetensors` weights. If set to `False` the pipeline will *not* use `safetensors`.\n            kwargs (remaining dictionary of keyword arguments, *optional*):\n                Can be used to overwrite load - and saveable variables - *i.e.* the pipeline components - of the\n                specific pipeline class. The overwritten components are then directly passed to the pipelines\n                `__init__` method. See example below for more information.\n            variant (`str`, *optional*):\n                If specified load weights from `variant` filename, *e.g.* pytorch_model.<variant>.bin. `variant` is\n                ignored when using `from_flax`.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models), *e.g.* `\"runwayml/stable-diffusion-v1-5\"`\n\n        </Tip>\n\n        <Tip>\n\n        Activate the special [\"offline-mode\"](https://huggingface.co/diffusers/installation.html#offline-mode) to use\n        this method in a firewalled environment.\n\n        </Tip>\n\n        Examples:\n\n        ```py\n        >>> from diffusers import DiffusionPipeline\n\n        >>> # Download pipeline from huggingface.co and cache.\n        >>> pipeline = DiffusionPipeline.from_pretrained(\"CompVis/ldm-text2im-large-256\")\n\n        >>> # Download pipeline that requires an authorization token\n        >>> # For more information on access tokens, please refer to this section\n        >>> # of the documentation](https://huggingface.co/docs/hub/security-tokens)\n        >>> pipeline = DiffusionPipeline.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n\n        >>> # Use a different scheduler\n        >>> from diffusers import LMSDiscreteScheduler\n\n        >>> scheduler = LMSDiscreteScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.scheduler = scheduler\n        ```\n        \"\"\"\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        force_download = kwargs.pop(\"force_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        from_flax = kwargs.pop(\"from_flax\", False)\n        torch_dtype = kwargs.pop(\"torch_dtype\", None)\n        custom_pipeline = kwargs.pop(\"custom_pipeline\", None)\n        custom_revision = kwargs.pop(\"custom_revision\", None)\n        provider = kwargs.pop(\"provider\", None)\n        sess_options = kwargs.pop(\"sess_options\", None)\n        device_map = kwargs.pop(\"device_map\", None)\n        low_cpu_mem_usage = kwargs.pop(\"low_cpu_mem_usage\", _LOW_CPU_MEM_USAGE_DEFAULT)\n        variant = kwargs.pop(\"variant\", None)\n        use_safetensors = kwargs.pop(\"use_safetensors\", None if is_safetensors_available() else False)\n\n        # 1. Download the checkpoints and configs\n        # use snapshot download here to get it working from from_pretrained\n        if not os.path.isdir(pretrained_model_name_or_path):\n            cached_folder = cls.download(\n                pretrained_model_name_or_path,\n                cache_dir=cache_dir,\n                resume_download=resume_download,\n                force_download=force_download,\n                proxies=proxies,\n                local_files_only=local_files_only,\n                use_auth_token=use_auth_token,\n                revision=revision,\n                from_flax=from_flax,\n                use_safetensors=use_safetensors,\n                custom_pipeline=custom_pipeline,\n                custom_revision=custom_revision,\n                variant=variant,\n                **kwargs,\n            )\n        else:\n            cached_folder = pretrained_model_name_or_path\n\n        config_dict = cls.load_config(cached_folder)\n\n        # pop out \"_ignore_files\" as it is only needed for download\n        config_dict.pop(\"_ignore_files\", None)\n\n        # 2. Define which model components should load variants\n        # We retrieve the information by matching whether variant\n        # model checkpoints exist in the subfolders\n        model_variants = {}\n        if variant is not None:\n            for folder in os.listdir(cached_folder):\n                folder_path = os.path.join(cached_folder, folder)\n                is_folder = os.path.isdir(folder_path) and folder in config_dict\n                variant_exists = is_folder and any(\n                    p.split(\".\")[1].startswith(variant) for p in os.listdir(folder_path)\n                )\n                if variant_exists:\n                    model_variants[folder] = variant\n\n        # 3. Load the pipeline class, if using custom module then load it from the hub\n        # if we load from explicit class, let's use it\n        pipeline_class = _get_pipeline_class(\n            cls, config_dict, custom_pipeline=custom_pipeline, cache_dir=cache_dir, revision=custom_revision\n        )\n\n        # DEPRECATED: To be removed in 1.0.0\n        if pipeline_class.__name__ == \"StableDiffusionInpaintPipeline\" and version.parse(\n            version.parse(config_dict[\"_diffusers_version\"]).base_version\n        ) <= version.parse(\"0.5.1\"):\n            from diffusers import StableDiffusionInpaintPipeline, StableDiffusionInpaintPipelineLegacy\n\n            pipeline_class = StableDiffusionInpaintPipelineLegacy\n\n            deprecation_message = (\n                \"You are using a legacy checkpoint for inpainting with Stable Diffusion, therefore we are loading the\"\n                f\" {StableDiffusionInpaintPipelineLegacy} class instead of {StableDiffusionInpaintPipeline}. For\"\n                \" better inpainting results, we strongly suggest using Stable Diffusion's official inpainting\"\n                \" checkpoint: https://huggingface.co/runwayml/stable-diffusion-inpainting instead or adapting your\"\n                f\" checkpoint {pretrained_model_name_or_path} to the format of\"\n                \" https://huggingface.co/runwayml/stable-diffusion-inpainting. Note that we do not actively maintain\"\n                \" the {StableDiffusionInpaintPipelineLegacy} class and will likely remove it in version 1.0.0.\"\n            )\n            deprecate(\"StableDiffusionInpaintPipelineLegacy\", \"1.0.0\", deprecation_message, standard_warn=False)\n\n        # 4. Define expected modules given pipeline signature\n        # and define non-None initialized modules (=`init_kwargs`)\n\n        # some modules can be passed directly to the init\n        # in this case they are already instantiated in `kwargs`\n        # extract them here\n        expected_modules, optional_kwargs = cls._get_signature_keys(pipeline_class)\n        passed_class_obj = {k: kwargs.pop(k) for k in expected_modules if k in kwargs}\n        passed_pipe_kwargs = {k: kwargs.pop(k) for k in optional_kwargs if k in kwargs}\n\n        init_dict, unused_kwargs, _ = pipeline_class.extract_init_dict(config_dict, **kwargs)\n\n        # define init kwargs\n        init_kwargs = {k: init_dict.pop(k) for k in optional_kwargs if k in init_dict}\n        init_kwargs = {**init_kwargs, **passed_pipe_kwargs}\n\n        # remove `null` components\n        def load_module(name, value):\n            if value[0] is None:\n                return False\n            if name in passed_class_obj and passed_class_obj[name] is None:\n                return False\n            return True\n\n        init_dict = {k: v for k, v in init_dict.items() if load_module(k, v)}\n\n        # Special case: safety_checker must be loaded separately when using `from_flax`\n        if from_flax and \"safety_checker\" in init_dict and \"safety_checker\" not in passed_class_obj:\n            raise NotImplementedError(\n                \"The safety checker cannot be automatically loaded when loading weights `from_flax`.\"\n                \" Please, pass `safety_checker=None` to `from_pretrained`, and load the safety checker\"\n                \" separately if you need it.\"\n            )\n\n        # 5. Throw nice warnings / errors for fast accelerate loading\n        if len(unused_kwargs) > 0:\n            logger.warning(\n                f\"Keyword arguments {unused_kwargs} are not expected by {pipeline_class.__name__} and will be ignored.\"\n            )\n\n        if low_cpu_mem_usage and not is_accelerate_available():\n            low_cpu_mem_usage = False\n            logger.warning(\n                \"Cannot initialize model with low cpu memory usage because `accelerate` was not found in the\"\n                \" environment. Defaulting to `low_cpu_mem_usage=False`. It is strongly recommended to install\"\n                \" `accelerate` for faster and less memory-intense model loading. You can do so with: \\n```\\npip\"\n                \" install accelerate\\n```\\n.\"\n            )\n\n        if device_map is not None and not is_torch_version(\">=\", \"1.9.0\"):\n            raise NotImplementedError(\n                \"Loading and dispatching requires torch >= 1.9.0. Please either update your PyTorch version or set\"\n                \" `device_map=None`.\"\n            )\n\n        if low_cpu_mem_usage is True and not is_torch_version(\">=\", \"1.9.0\"):\n            raise NotImplementedError(\n                \"Low memory initialization requires torch >= 1.9.0. Please either update your PyTorch version or set\"\n                \" `low_cpu_mem_usage=False`.\"\n            )\n\n        if low_cpu_mem_usage is False and device_map is not None:\n            raise ValueError(\n                f\"You cannot set `low_cpu_mem_usage` to False while using device_map={device_map} for loading and\"\n                \" dispatching. Please make sure to set `low_cpu_mem_usage=True`.\"\n            )\n\n        # import it here to avoid circular import\n        from diffusers import pipelines\n\n        # 6. Load each module in the pipeline\n        for name, (library_name, class_name) in init_dict.items():\n            # 6.1 - now that JAX/Flax is an official framework of the library, we might load from Flax names\n            if class_name.startswith(\"Flax\"):\n                class_name = class_name[4:]\n\n            # 6.2 Define all importable classes\n            is_pipeline_module = hasattr(pipelines, library_name)\n            importable_classes = ALL_IMPORTABLE_CLASSES if is_pipeline_module else LOADABLE_CLASSES[library_name]\n            loaded_sub_model = None\n\n            # 6.3 Use passed sub model or load class_name from library_name\n            if name in passed_class_obj:\n                # if the model is in a pipeline module, then we load it from the pipeline\n                # check that passed_class_obj has correct parent class\n                maybe_raise_or_warn(\n                    library_name, library, class_name, importable_classes, passed_class_obj, name, is_pipeline_module\n                )\n\n                loaded_sub_model = passed_class_obj[name]\n            else:\n                # load sub model\n                loaded_sub_model = load_sub_model(\n                    library_name=library_name,\n                    class_name=class_name,\n                    importable_classes=importable_classes,\n                    pipelines=pipelines,\n                    is_pipeline_module=is_pipeline_module,\n                    pipeline_class=pipeline_class,\n                    torch_dtype=torch_dtype,\n                    provider=provider,\n                    sess_options=sess_options,\n                    device_map=device_map,\n                    model_variants=model_variants,\n                    name=name,\n                    from_flax=from_flax,\n                    variant=variant,\n                    low_cpu_mem_usage=low_cpu_mem_usage,\n                    cached_folder=cached_folder,\n                )\n\n            init_kwargs[name] = loaded_sub_model  # UNet(...), # DiffusionSchedule(...)\n\n        # 7. Potentially add passed objects if expected\n        missing_modules = set(expected_modules) - set(init_kwargs.keys())\n        passed_modules = list(passed_class_obj.keys())\n        optional_modules = pipeline_class._optional_components\n        if len(missing_modules) > 0 and missing_modules <= set(passed_modules + optional_modules):\n            for module in missing_modules:\n                init_kwargs[module] = passed_class_obj.get(module, None)\n        elif len(missing_modules) > 0:\n            passed_modules = set(list(init_kwargs.keys()) + list(passed_class_obj.keys())) - optional_kwargs\n            raise ValueError(\n                f\"Pipeline {pipeline_class} expected {expected_modules}, but only {passed_modules} were passed.\"\n            )\n\n        # 8. Instantiate the pipeline\n        model = pipeline_class(**init_kwargs)\n\n        return_cached_folder = kwargs.pop(\"return_cached_folder\", False)\n        if return_cached_folder:\n            message = f\"Passing `return_cached_folder=True` is deprecated and will be removed in `diffusers=0.18.0`. Please do the following instead: \\n 1. Load the cached_folder via `cached_folder={cls}.download({pretrained_model_name_or_path})`. \\n 2. Load the pipeline by loading from the cached folder: `pipeline={cls}.from_pretrained(cached_folder)`.\"\n            deprecate(\"return_cached_folder\", \"0.18.0\", message)\n            return model, cached_folder\n\n        return model\n\n    @classmethod\n    def download(cls, pretrained_model_name, **kwargs) -> Union[str, os.PathLike]:\n        r\"\"\"\n        Download and cache a PyTorch diffusion pipeline from pre-trained pipeline weights.\n\n        Parameters:\n            pretrained_model_name (`str` or `os.PathLike`, *optional*):\n                Should be a string, the *repo id* of a pretrained pipeline hosted inside a model repo on\n                https://huggingface.co/ Valid repo ids have to be located under a user or organization name, like\n                `CompVis/ldm-text2im-large-256`.\n            custom_pipeline (`str`, *optional*):\n\n                <Tip warning={true}>\n\n                    This is an experimental feature and is likely to change in the future.\n\n                </Tip>\n\n                Can be either:\n\n                    - A string, the *repo id* of a custom pipeline hosted inside a model repo on\n                      https://huggingface.co/. Valid repo ids have to be located under a user or organization name,\n                      like `hf-internal-testing/diffusers-dummy-pipeline`.\n\n                        <Tip>\n\n                         It is required that the model repo has a file, called `pipeline.py` that defines the custom\n                         pipeline.\n\n                        </Tip>\n\n                    - A string, the *file name* of a community pipeline hosted on GitHub under\n                      https://github.com/huggingface/diffusers/tree/main/examples/community. Valid file names have to\n                      match exactly the file name without `.py` located under the above link, *e.g.*\n                      `clip_guided_stable_diffusion`.\n\n                        <Tip>\n\n                         Community pipelines are always loaded from the current `main` branch of GitHub.\n\n                        </Tip>\n\n                    - A path to a *directory* containing a custom pipeline, e.g., `./my_pipeline_directory/`.\n\n                        <Tip>\n\n                         It is required that the directory has a file, called `pipeline.py` that defines the custom\n                         pipeline.\n\n                        </Tip>\n\n                For more information on how to load and create custom pipelines, please have a look at [Loading and\n                Adding Custom\n                Pipelines](https://huggingface.co/docs/diffusers/using-diffusers/custom_pipeline_overview)\n\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            output_loading_info(`bool`, *optional*, defaults to `False`):\n                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `huggingface-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n            custom_revision (`str`, *optional*, defaults to `\"main\"` when loading from the Hub and to local version of\n            `diffusers` when loading from GitHub):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id similar to\n                `revision` when loading a custom pipeline from the Hub. It can be a diffusers version when loading a\n                custom pipeline from GitHub.\n            mirror (`str`, *optional*):\n                Mirror source to accelerate downloads in China. If you are from China and have an accessibility\n                problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety.\n                Please refer to the mirror site for more information. specify the folder name here.\n            variant (`str`, *optional*):\n                If specified load weights from `variant` filename, *e.g.* pytorch_model.<variant>.bin. `variant` is\n                ignored when using `from_flax`.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models)\n\n        </Tip>\n\n        \"\"\"\n        cache_dir = kwargs.pop(\"cache_dir\", DIFFUSERS_CACHE)\n        resume_download = kwargs.pop(\"resume_download\", False)\n        force_download = kwargs.pop(\"force_download\", False)\n        proxies = kwargs.pop(\"proxies\", None)\n        local_files_only = kwargs.pop(\"local_files_only\", HF_HUB_OFFLINE)\n        use_auth_token = kwargs.pop(\"use_auth_token\", None)\n        revision = kwargs.pop(\"revision\", None)\n        from_flax = kwargs.pop(\"from_flax\", False)\n        custom_pipeline = kwargs.pop(\"custom_pipeline\", None)\n        custom_revision = kwargs.pop(\"custom_revision\", None)\n        variant = kwargs.pop(\"variant\", None)\n        use_safetensors = kwargs.pop(\"use_safetensors\", None)\n\n        if use_safetensors and not is_safetensors_available():\n            raise ValueError(\n                \"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetenstors\"\n            )\n\n        allow_pickle = False\n        if use_safetensors is None:\n            use_safetensors = is_safetensors_available()\n            allow_pickle = True\n\n        pipeline_is_cached = False\n        allow_patterns = None\n        ignore_patterns = None\n\n        if not local_files_only:\n            config_file = hf_hub_download(\n                pretrained_model_name,\n                cls.config_name,\n                cache_dir=cache_dir,\n                revision=revision,\n                proxies=proxies,\n                force_download=force_download,\n                resume_download=resume_download,\n                use_auth_token=use_auth_token,\n            )\n            info = model_info(\n                pretrained_model_name,\n                use_auth_token=use_auth_token,\n                revision=revision,\n            )\n\n            config_dict = cls._dict_from_json_file(config_file)\n\n            ignore_filenames = config_dict.pop(\"_ignore_files\", [])\n\n            # retrieve all folder_names that contain relevant files\n            folder_names = [k for k, v in config_dict.items() if isinstance(v, list)]\n\n            filenames = {sibling.rfilename for sibling in info.siblings}\n            model_filenames, variant_filenames = variant_compatible_siblings(filenames, variant=variant)\n\n            # remove ignored filenames\n            model_filenames = set(model_filenames) - set(ignore_filenames)\n            variant_filenames = set(variant_filenames) - set(ignore_filenames)\n\n            # if the whole pipeline is cached we don't have to ping the Hub\n            if revision in DEPRECATED_REVISION_ARGS and version.parse(\n                version.parse(__version__).base_version\n            ) >= version.parse(\"0.18.0\"):\n                warn_deprecated_model_variant(\n                    pretrained_model_name, use_auth_token, variant, revision, model_filenames\n                )\n\n            model_folder_names = {os.path.split(f)[0] for f in model_filenames}\n\n            # all filenames compatible with variant will be added\n            allow_patterns = list(model_filenames)\n\n            # allow all patterns from non-model folders\n            # this enables downloading schedulers, tokenizers, ...\n            allow_patterns += [f\"{k}/*\" for k in folder_names if k not in model_folder_names]\n            # also allow downloading config.json files with the model\n            allow_patterns += [os.path.join(k, \"config.json\") for k in model_folder_names]\n\n            allow_patterns += [\n                SCHEDULER_CONFIG_NAME,\n                CONFIG_NAME,\n                cls.config_name,\n                CUSTOM_PIPELINE_FILE_NAME,\n            ]\n\n            # retrieve passed components that should not be downloaded\n            pipeline_class = _get_pipeline_class(\n                cls, config_dict, custom_pipeline=custom_pipeline, cache_dir=cache_dir, revision=custom_revision\n            )\n            expected_components, _ = cls._get_signature_keys(pipeline_class)\n            passed_components = [k for k in expected_components if k in kwargs]\n\n            if (\n                use_safetensors\n                and not allow_pickle\n                and not is_safetensors_compatible(\n                    model_filenames, variant=variant, passed_components=passed_components\n                )\n            ):\n                raise EnvironmentError(\n                    f\"Could not found the necessary `safetensors` weights in {model_filenames} (variant={variant})\"\n                )\n            if from_flax:\n                ignore_patterns = [\"*.bin\", \"*.safetensors\", \"*.onnx\", \"*.pb\"]\n            elif use_safetensors and is_safetensors_compatible(\n                model_filenames, variant=variant, passed_components=passed_components\n            ):\n                ignore_patterns = [\"*.bin\", \"*.msgpack\"]\n\n                safetensors_variant_filenames = {f for f in variant_filenames if f.endswith(\".safetensors\")}\n                safetensors_model_filenames = {f for f in model_filenames if f.endswith(\".safetensors\")}\n                if (\n                    len(safetensors_variant_filenames) > 0\n                    and safetensors_model_filenames != safetensors_variant_filenames\n                ):\n                    logger.warn(\n                        f\"\\nA mixture of {variant} and non-{variant} filenames will be loaded.\\nLoaded {variant} filenames:\\n[{', '.join(safetensors_variant_filenames)}]\\nLoaded non-{variant} filenames:\\n[{', '.join(safetensors_model_filenames - safetensors_variant_filenames)}\\nIf this behavior is not expected, please check your folder structure.\"\n                    )\n            else:\n                ignore_patterns = [\"*.safetensors\", \"*.msgpack\"]\n\n                bin_variant_filenames = {f for f in variant_filenames if f.endswith(\".bin\")}\n                bin_model_filenames = {f for f in model_filenames if f.endswith(\".bin\")}\n                if len(bin_variant_filenames) > 0 and bin_model_filenames != bin_variant_filenames:\n                    logger.warn(\n                        f\"\\nA mixture of {variant} and non-{variant} filenames will be loaded.\\nLoaded {variant} filenames:\\n[{', '.join(bin_variant_filenames)}]\\nLoaded non-{variant} filenames:\\n[{', '.join(bin_model_filenames - bin_variant_filenames)}\\nIf this behavior is not expected, please check your folder structure.\"\n                    )\n\n            # Don't download any objects that are passed\n            allow_patterns = [\n                p for p in allow_patterns if not (len(p.split(\"/\")) == 2 and p.split(\"/\")[0] in passed_components)\n            ]\n            # Don't download index files of forbidden patterns either\n            ignore_patterns = ignore_patterns + [f\"{i}.index.*json\" for i in ignore_patterns]\n\n            re_ignore_pattern = [re.compile(fnmatch.translate(p)) for p in ignore_patterns]\n            re_allow_pattern = [re.compile(fnmatch.translate(p)) for p in allow_patterns]\n\n            expected_files = [f for f in filenames if not any(p.match(f) for p in re_ignore_pattern)]\n            expected_files = [f for f in expected_files if any(p.match(f) for p in re_allow_pattern)]\n\n            snapshot_folder = Path(config_file).parent\n            pipeline_is_cached = all((snapshot_folder / f).is_file() for f in expected_files)\n\n            if pipeline_is_cached:\n                # if the pipeline is cached, we can directly return it\n                # else call snapshot_download\n                return snapshot_folder\n\n        user_agent = {\"pipeline_class\": cls.__name__}\n        if custom_pipeline is not None and not custom_pipeline.endswith(\".py\"):\n            user_agent[\"custom_pipeline\"] = custom_pipeline\n\n        # download all allow_patterns - ignore_patterns\n        cached_folder = snapshot_download(\n            pretrained_model_name,\n            cache_dir=cache_dir,\n            resume_download=resume_download,\n            proxies=proxies,\n            local_files_only=local_files_only,\n            use_auth_token=use_auth_token,\n            revision=revision,\n            allow_patterns=allow_patterns,\n            ignore_patterns=ignore_patterns,\n            user_agent=user_agent,\n        )\n\n        return cached_folder\n\n    @staticmethod\n    def _get_signature_keys(obj):\n        parameters = inspect.signature(obj.__init__).parameters\n        required_parameters = {k: v for k, v in parameters.items() if v.default == inspect._empty}\n        optional_parameters = set({k for k, v in parameters.items() if v.default != inspect._empty})\n        expected_modules = set(required_parameters.keys()) - {\"self\"}\n        return expected_modules, optional_parameters\n\n    @property\n    def components(self) -> Dict[str, Any]:\n        r\"\"\"\n\n        The `self.components` property can be useful to run different pipelines with the same weights and\n        configurations to not have to re-allocate memory.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import (\n        ...     StableDiffusionPipeline,\n        ...     StableDiffusionImg2ImgPipeline,\n        ...     StableDiffusionInpaintPipeline,\n        ... )\n\n        >>> text2img = StableDiffusionPipeline.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n        >>> img2img = StableDiffusionImg2ImgPipeline(**text2img.components)\n        >>> inpaint = StableDiffusionInpaintPipeline(**text2img.components)\n        ```\n\n        Returns:\n            A dictionary containing all the modules needed to initialize the pipeline.\n        \"\"\"\n        expected_modules, optional_parameters = self._get_signature_keys(self)\n        components = {\n            k: getattr(self, k) for k in self.config.keys() if not k.startswith(\"_\") and k not in optional_parameters\n        }\n\n        if set(components.keys()) != expected_modules:\n            raise ValueError(\n                f\"{self} has been incorrectly initialized or {self.__class__} is incorrectly implemented. Expected\"\n                f\" {expected_modules} to be defined, but {components.keys()} are defined.\"\n            )\n\n        return components\n\n    @staticmethod\n    def numpy_to_pil(images):\n        \"\"\"\n        Convert a numpy image or a batch of images to a PIL image.\n        \"\"\"\n        return numpy_to_pil(images)\n\n    def progress_bar(self, iterable=None, total=None):\n        if not hasattr(self, \"_progress_bar_config\"):\n            self._progress_bar_config = {}\n        elif not isinstance(self._progress_bar_config, dict):\n            raise ValueError(\n                f\"`self._progress_bar_config` should be of type `dict`, but is {type(self._progress_bar_config)}.\"\n            )\n\n        if iterable is not None:\n            return tqdm(iterable, **self._progress_bar_config)\n        elif total is not None:\n            return tqdm(total=total, **self._progress_bar_config)\n        else:\n            raise ValueError(\"Either `total` or `iterable` has to be defined.\")\n\n    def set_progress_bar_config(self, **kwargs):\n        self._progress_bar_config = kwargs\n\n    def enable_xformers_memory_efficient_attention(self, attention_op: Optional[Callable] = None):\n        r\"\"\"\n        Enable memory efficient attention as implemented in xformers.\n\n        When this option is enabled, you should observe lower GPU memory usage and a potential speed up at inference\n        time. Speed up at training time is not guaranteed.\n\n        Warning: When Memory Efficient Attention and Sliced attention are both enabled, the Memory Efficient Attention\n        is used.\n\n        Parameters:\n            attention_op (`Callable`, *optional*):\n                Override the default `None` operator for use as `op` argument to the\n                [`memory_efficient_attention()`](https://facebookresearch.github.io/xformers/components/ops.html#xformers.ops.memory_efficient_attention)\n                function of xFormers.\n\n        Examples:\n\n        ```py\n        >>> import torch\n        >>> from diffusers import DiffusionPipeline\n        >>> from xformers.ops import MemoryEfficientAttentionFlashAttentionOp\n\n        >>> pipe = DiffusionPipeline.from_pretrained(\"stabilityai/stable-diffusion-2-1\", torch_dtype=torch.float16)\n        >>> pipe = pipe.to(\"cuda\")\n        >>> pipe.enable_xformers_memory_efficient_attention(attention_op=MemoryEfficientAttentionFlashAttentionOp)\n        >>> # Workaround for not accepting attention shape using VAE for Flash Attention\n        >>> pipe.vae.enable_xformers_memory_efficient_attention(attention_op=None)\n        ```\n        \"\"\"\n        self.set_use_memory_efficient_attention_xformers(True, attention_op)\n\n    def disable_xformers_memory_efficient_attention(self):\n        r\"\"\"\n        Disable memory efficient attention as implemented in xformers.\n        \"\"\"\n        self.set_use_memory_efficient_attention_xformers(False)\n\n    def set_use_memory_efficient_attention_xformers(\n        self, valid: bool, attention_op: Optional[Callable] = None\n    ) -> None:\n        # Recursively walk through all the children.\n        # Any children which exposes the set_use_memory_efficient_attention_xformers method\n        # gets the message\n        def fn_recursive_set_mem_eff(module: torch.nn.Module):\n            if hasattr(module, \"set_use_memory_efficient_attention_xformers\"):\n                module.set_use_memory_efficient_attention_xformers(valid, attention_op)\n\n            for child in module.children():\n                fn_recursive_set_mem_eff(child)\n\n        module_names, _ = self._get_signature_keys(self)\n        modules = [getattr(self, n, None) for n in module_names]\n        modules = [m for m in modules if isinstance(m, torch.nn.Module)]\n\n        for module in modules:\n            fn_recursive_set_mem_eff(module)\n\n    def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = \"auto\"):\n        r\"\"\"\n        Enable sliced attention computation.\n\n        When this option is enabled, the attention module will split the input tensor in slices, to compute attention\n        in several steps. This is useful to save some memory in exchange for a small speed decrease.\n\n        Args:\n            slice_size (`str` or `int`, *optional*, defaults to `\"auto\"`):\n                When `\"auto\"`, halves the input to the attention heads, so attention will be computed in two steps. If\n                `\"max\"`, maximum amount of memory will be saved by running only one slice at a time. If a number is\n                provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim`\n                must be a multiple of `slice_size`.\n        \"\"\"\n        self.set_attention_slice(slice_size)\n\n    def disable_attention_slicing(self):\n        r\"\"\"\n        Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go\n        back to computing attention in one step.\n        \"\"\"\n        # set slice_size = `None` to disable `attention slicing`\n        self.enable_attention_slicing(None)\n\n    def set_attention_slice(self, slice_size: Optional[int]):\n        module_names, _ = self._get_signature_keys(self)\n        modules = [getattr(self, n, None) for n in module_names]\n        modules = [m for m in modules if isinstance(m, torch.nn.Module) and hasattr(m, \"set_attention_slice\")]\n\n        for module in modules:\n            module.set_attention_slice(slice_size)\n"
  },
  {
    "path": "diffusers/pipelines/pndm/__init__.py",
    "content": "from .pipeline_pndm import PNDMPipeline\n"
  },
  {
    "path": "diffusers/pipelines/pndm/pipeline_pndm.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...models import UNet2DModel\nfrom ...schedulers import PNDMScheduler\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass PNDMPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        unet (`UNet2DModel`): U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            The `PNDMScheduler` to be used in combination with `unet` to denoise the encoded image.\n    \"\"\"\n\n    unet: UNet2DModel\n    scheduler: PNDMScheduler\n\n    def __init__(self, unet: UNet2DModel, scheduler: PNDMScheduler):\n        super().__init__()\n\n        scheduler = PNDMScheduler.from_config(scheduler.config)\n\n        self.register_modules(unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        num_inference_steps: int = 50,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        **kwargs,\n    ) -> Union[ImagePipelineOutput, Tuple]:\n        r\"\"\"\n        Args:\n            batch_size (`int`, `optional`, defaults to 1): The number of images to generate.\n            num_inference_steps (`int`, `optional`, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            generator (`torch.Generator`, `optional`): A [torch\n                generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation\n                deterministic.\n            output_type (`str`, `optional`, defaults to `\"pil\"`): The output format of the generate image. Choose\n                between [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, `optional`, defaults to `True`): Whether or not to return a\n                [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        # For more information on the sampling method you can take a look at Algorithm 2 of\n        # the official paper: https://arxiv.org/pdf/2202.09778.pdf\n\n        # Sample gaussian noise to begin loop\n        image = randn_tensor(\n            (batch_size, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size),\n            generator=generator,\n            device=self.device,\n        )\n\n        self.scheduler.set_timesteps(num_inference_steps)\n        for t in self.progress_bar(self.scheduler.timesteps):\n            model_output = self.unet(image, t).sample\n\n            image = self.scheduler.step(model_output, t, image).prev_sample\n\n        image = (image / 2 + 0.5).clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/repaint/__init__.py",
    "content": "from .pipeline_repaint import RePaintPipeline\n"
  },
  {
    "path": "diffusers/pipelines/repaint/pipeline_repaint.py",
    "content": "# Copyright 2023 ETH Zurich Computer Vision Lab and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport PIL\nimport torch\n\nfrom ...models import UNet2DModel\nfrom ...schedulers import RePaintScheduler\nfrom ...utils import PIL_INTERPOLATION, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess\ndef _preprocess_image(image: Union[List, PIL.Image.Image, torch.Tensor]):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\ndef _preprocess_mask(mask: Union[List, PIL.Image.Image, torch.Tensor]):\n    if isinstance(mask, torch.Tensor):\n        return mask\n    elif isinstance(mask, PIL.Image.Image):\n        mask = [mask]\n\n    if isinstance(mask[0], PIL.Image.Image):\n        w, h = mask[0].size\n        w, h = (x - x % 32 for x in (w, h))  # resize to integer multiple of 32\n        mask = [np.array(m.convert(\"L\").resize((w, h), resample=PIL_INTERPOLATION[\"nearest\"]))[None, :] for m in mask]\n        mask = np.concatenate(mask, axis=0)\n        mask = mask.astype(np.float32) / 255.0\n        mask[mask < 0.5] = 0\n        mask[mask >= 0.5] = 1\n        mask = torch.from_numpy(mask)\n    elif isinstance(mask[0], torch.Tensor):\n        mask = torch.cat(mask, dim=0)\n    return mask\n\n\nclass RePaintPipeline(DiffusionPipeline):\n    unet: UNet2DModel\n    scheduler: RePaintScheduler\n\n    def __init__(self, unet, scheduler):\n        super().__init__()\n        self.register_modules(unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        image: Union[torch.Tensor, PIL.Image.Image],\n        mask_image: Union[torch.Tensor, PIL.Image.Image],\n        num_inference_steps: int = 250,\n        eta: float = 0.0,\n        jump_length: int = 10,\n        jump_n_sample: int = 10,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n    ) -> Union[ImagePipelineOutput, Tuple]:\n        r\"\"\"\n        Args:\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                The original image to inpaint on.\n            mask_image (`torch.FloatTensor` or `PIL.Image.Image`):\n                The mask_image where 0.0 values define which part of the original image to inpaint (change).\n            num_inference_steps (`int`, *optional*, defaults to 1000):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            eta (`float`):\n                The weight of noise for added noise in a diffusion step. Its value is between 0.0 and 1.0 - 0.0 is DDIM\n                and 1.0 is DDPM scheduler respectively.\n            jump_length (`int`, *optional*, defaults to 10):\n                The number of steps taken forward in time before going backward in time for a single jump (\"j\" in\n                RePaint paper). Take a look at Figure 9 and 10 in https://arxiv.org/pdf/2201.09865.pdf.\n            jump_n_sample (`int`, *optional*, defaults to 10):\n                The number of times we will make forward time jump for a given chosen time sample. Take a look at\n                Figure 9 and 10 in https://arxiv.org/pdf/2201.09865.pdf.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n\n        original_image = image\n\n        original_image = _preprocess_image(original_image)\n        original_image = original_image.to(device=self.device, dtype=self.unet.dtype)\n        mask_image = _preprocess_mask(mask_image)\n        mask_image = mask_image.to(device=self.device, dtype=self.unet.dtype)\n\n        batch_size = original_image.shape[0]\n\n        # sample gaussian noise to begin the loop\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        image_shape = original_image.shape\n        image = randn_tensor(image_shape, generator=generator, device=self.device, dtype=self.unet.dtype)\n\n        # set step values\n        self.scheduler.set_timesteps(num_inference_steps, jump_length, jump_n_sample, self.device)\n        self.scheduler.eta = eta\n\n        t_last = self.scheduler.timesteps[0] + 1\n        generator = generator[0] if isinstance(generator, list) else generator\n        for i, t in enumerate(self.progress_bar(self.scheduler.timesteps)):\n            if t < t_last:\n                # predict the noise residual\n                model_output = self.unet(image, t).sample\n                # compute previous image: x_t -> x_t-1\n                image = self.scheduler.step(model_output, t, image, original_image, mask_image, generator).prev_sample\n\n            else:\n                # compute the reverse: x_t-1 -> x_t\n                image = self.scheduler.undo_step(image, t_last, generator)\n            t_last = t\n\n        image = (image / 2 + 0.5).clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/score_sde_ve/__init__.py",
    "content": "from .pipeline_score_sde_ve import ScoreSdeVePipeline\n"
  },
  {
    "path": "diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...models import UNet2DModel\nfrom ...schedulers import ScoreSdeVeScheduler\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass ScoreSdeVePipeline(DiffusionPipeline):\n    r\"\"\"\n    Parameters:\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n        unet ([`UNet2DModel`]): U-Net architecture to denoise the encoded image. scheduler ([`SchedulerMixin`]):\n            The [`ScoreSdeVeScheduler`] scheduler to be used in combination with `unet` to denoise the encoded image.\n    \"\"\"\n    unet: UNet2DModel\n    scheduler: ScoreSdeVeScheduler\n\n    def __init__(self, unet: UNet2DModel, scheduler: ScoreSdeVeScheduler):\n        super().__init__()\n        self.register_modules(unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        num_inference_steps: int = 2000,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        **kwargs,\n    ) -> Union[ImagePipelineOutput, Tuple]:\n        r\"\"\"\n        Args:\n            batch_size (`int`, *optional*, defaults to 1):\n                The number of images to generate.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n\n        img_size = self.unet.config.sample_size\n        shape = (batch_size, 3, img_size, img_size)\n\n        model = self.unet\n\n        sample = randn_tensor(shape, generator=generator) * self.scheduler.init_noise_sigma\n        sample = sample.to(self.device)\n\n        self.scheduler.set_timesteps(num_inference_steps)\n        self.scheduler.set_sigmas(num_inference_steps)\n\n        for i, t in enumerate(self.progress_bar(self.scheduler.timesteps)):\n            sigma_t = self.scheduler.sigmas[i] * torch.ones(shape[0], device=self.device)\n\n            # correction step\n            for _ in range(self.scheduler.config.correct_steps):\n                model_output = self.unet(sample, sigma_t).sample\n                sample = self.scheduler.step_correct(model_output, sample, generator=generator).prev_sample\n\n            # prediction step\n            model_output = model(sample, sigma_t).sample\n            output = self.scheduler.step_pred(model_output, t, sample, generator=generator)\n\n            sample, sample_mean = output.prev_sample, output.prev_sample_mean\n\n        sample = sample_mean.clamp(0, 1)\n        sample = sample.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            sample = self.numpy_to_pil(sample)\n\n        if not return_dict:\n            return (sample,)\n\n        return ImagePipelineOutput(images=sample)\n"
  },
  {
    "path": "diffusers/pipelines/semantic_stable_diffusion/__init__.py",
    "content": "from dataclasses import dataclass\nfrom enum import Enum\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL\nfrom PIL import Image\n\nfrom ...utils import BaseOutput, is_torch_available, is_transformers_available\n\n\n@dataclass\nclass SemanticStableDiffusionPipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for Stable Diffusion pipelines.\n\n    Args:\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n        nsfw_content_detected (`List[bool]`)\n            List of flags denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, or `None` if safety checking could not be performed.\n    \"\"\"\n\n    images: Union[List[PIL.Image.Image], np.ndarray]\n    nsfw_content_detected: Optional[List[bool]]\n\n\nif is_transformers_available() and is_torch_available():\n    from .pipeline_semantic_stable_diffusion import SemanticStableDiffusionPipeline\n"
  },
  {
    "path": "diffusers/pipelines/semantic_stable_diffusion/pipeline_semantic_stable_diffusion.py",
    "content": "import inspect\nimport warnings\nfrom itertools import repeat\nfrom typing import Callable, List, Optional, Union\n\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import SemanticStableDiffusionPipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import SemanticStableDiffusionPipeline\n\n        >>> pipe = SemanticStableDiffusionPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> out = pipe(\n        ...     prompt=\"a photo of the face of a woman\",\n        ...     num_images_per_prompt=1,\n        ...     guidance_scale=7,\n        ...     editing_prompt=[\n        ...         \"smiling, smile\",  # Concepts to apply\n        ...         \"glasses, wearing glasses\",\n        ...         \"curls, wavy hair, curly hair\",\n        ...         \"beard, full beard, mustache\",\n        ...     ],\n        ...     reverse_editing_direction=[\n        ...         False,\n        ...         False,\n        ...         False,\n        ...         False,\n        ...     ],  # Direction of guidance i.e. increase all concepts\n        ...     edit_warmup_steps=[10, 10, 10, 10],  # Warmup period for each concept\n        ...     edit_guidance_scale=[4, 5, 5, 5.4],  # Guidance scale for each concept\n        ...     edit_threshold=[\n        ...         0.99,\n        ...         0.975,\n        ...         0.925,\n        ...         0.96,\n        ...     ],  # Threshold for each concept. Threshold equals the percentile of the latent space that will be discarded. I.e. threshold=0.99 uses 1% of the latent dimensions\n        ...     edit_momentum_scale=0.3,  # Momentum scale that will be added to the latent guidance\n        ...     edit_mom_beta=0.6,  # Momentum beta\n        ...     edit_weights=[1, 1, 1, 1, 1],  # Weights of the individual concepts against each other\n        ... )\n        >>> image = out.images[0]\n        ```\n\"\"\"\n\n\nclass SemanticStableDiffusionPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-to-image generation with latent editing.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    This model builds on the implementation of ['StableDiffusionPipeline']\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latens. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`Q16SafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/CompVis/stable-diffusion-v1-4) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: int = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        editing_prompt: Optional[Union[str, List[str]]] = None,\n        editing_prompt_embeddings: Optional[torch.Tensor] = None,\n        reverse_editing_direction: Optional[Union[bool, List[bool]]] = False,\n        edit_guidance_scale: Optional[Union[float, List[float]]] = 5,\n        edit_warmup_steps: Optional[Union[int, List[int]]] = 10,\n        edit_cooldown_steps: Optional[Union[int, List[int]]] = None,\n        edit_threshold: Optional[Union[float, List[float]]] = 0.9,\n        edit_momentum_scale: Optional[float] = 0.1,\n        edit_mom_beta: Optional[float] = 0.4,\n        edit_weights: Optional[List[float]] = None,\n        sem_guidance: Optional[List[torch.Tensor]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            editing_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to use for Semantic guidance. Semantic guidance is disabled by setting\n                `editing_prompt = None`. Guidance direction of prompt should be specified via\n                `reverse_editing_direction`.\n            editing_prompt_embeddings (`torch.Tensor>`, *optional*):\n                Pre-computed embeddings to use for semantic guidance. Guidance direction of embedding should be\n                specified via `reverse_editing_direction`.\n            reverse_editing_direction (`bool` or `List[bool]`, *optional*, defaults to `False`):\n                Whether the corresponding prompt in `editing_prompt` should be increased or decreased.\n            edit_guidance_scale (`float` or `List[float]`, *optional*, defaults to 5):\n                Guidance scale for semantic guidance. If provided as list values should correspond to `editing_prompt`.\n                `edit_guidance_scale` is defined as `s_e` of equation 6 of [SEGA\n                Paper](https://arxiv.org/pdf/2301.12247.pdf).\n            edit_warmup_steps (`float` or `List[float]`, *optional*, defaults to 10):\n                Number of diffusion steps (for each prompt) for which semantic guidance will not be applied. Momentum\n                will still be calculated for those steps and applied once all warmup periods are over.\n                `edit_warmup_steps` is defined as `delta` (δ) of [SEGA Paper](https://arxiv.org/pdf/2301.12247.pdf).\n            edit_cooldown_steps (`float` or `List[float]`, *optional*, defaults to `None`):\n                Number of diffusion steps (for each prompt) after which semantic guidance will no longer be applied.\n            edit_threshold (`float` or `List[float]`, *optional*, defaults to 0.9):\n                Threshold of semantic guidance.\n            edit_momentum_scale (`float`, *optional*, defaults to 0.1):\n                Scale of the momentum to be added to the semantic guidance at each diffusion step. If set to 0.0\n                momentum will be disabled. Momentum is already built up during warmup, i.e. for diffusion steps smaller\n                than `sld_warmup_steps`. Momentum will only be added to latent guidance once all warmup periods are\n                finished. `edit_momentum_scale` is defined as `s_m` of equation 7 of [SEGA\n                Paper](https://arxiv.org/pdf/2301.12247.pdf).\n            edit_mom_beta (`float`, *optional*, defaults to 0.4):\n                Defines how semantic guidance momentum builds up. `edit_mom_beta` indicates how much of the previous\n                momentum will be kept. Momentum is already built up during warmup, i.e. for diffusion steps smaller\n                than `edit_warmup_steps`. `edit_mom_beta` is defined as `beta_m` (β) of equation 8 of [SEGA\n                Paper](https://arxiv.org/pdf/2301.12247.pdf).\n            edit_weights (`List[float]`, *optional*, defaults to `None`):\n                Indicates how much each individual concept should influence the overall guidance. If no weights are\n                provided all concepts are applied equally. `edit_mom_beta` is defined as `g_i` of equation 9 of [SEGA\n                Paper](https://arxiv.org/pdf/2301.12247.pdf).\n            sem_guidance (`List[torch.Tensor]`, *optional*):\n                List of pre-generated guidance vectors to be applied at generation. Length of the list has to\n                correspond to `num_inference_steps`.\n\n        Returns:\n            [`~pipelines.semantic_stable_diffusion.SemanticStableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.semantic_stable_diffusion.SemanticStableDiffusionPipelineOutput`] if `return_dict` is True,\n            otherwise a `tuple. When returning a tuple, the first element is a list with the generated images, and the\n            second element is a list of `bool`s denoting whether the corresponding generated image likely represents\n            \"not-safe-for-work\" (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(prompt, height, width, callback_steps)\n\n        # 2. Define call parameters\n        batch_size = 1 if isinstance(prompt, str) else len(prompt)\n\n        if editing_prompt:\n            enable_edit_guidance = True\n            if isinstance(editing_prompt, str):\n                editing_prompt = [editing_prompt]\n            enabled_editing_prompts = len(editing_prompt)\n        elif editing_prompt_embeddings is not None:\n            enable_edit_guidance = True\n            enabled_editing_prompts = editing_prompt_embeddings.shape[0]\n        else:\n            enabled_editing_prompts = 0\n            enable_edit_guidance = False\n\n        # get prompt text embeddings\n        text_inputs = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            return_tensors=\"pt\",\n        )\n        text_input_ids = text_inputs.input_ids\n\n        if text_input_ids.shape[-1] > self.tokenizer.model_max_length:\n            removed_text = self.tokenizer.batch_decode(text_input_ids[:, self.tokenizer.model_max_length :])\n            logger.warning(\n                \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n            )\n            text_input_ids = text_input_ids[:, : self.tokenizer.model_max_length]\n        text_embeddings = self.text_encoder(text_input_ids.to(self.device))[0]\n\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = text_embeddings.shape\n        text_embeddings = text_embeddings.repeat(1, num_images_per_prompt, 1)\n        text_embeddings = text_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        if enable_edit_guidance:\n            # get safety text embeddings\n            if editing_prompt_embeddings is None:\n                edit_concepts_input = self.tokenizer(\n                    [x for item in editing_prompt for x in repeat(item, batch_size)],\n                    padding=\"max_length\",\n                    max_length=self.tokenizer.model_max_length,\n                    return_tensors=\"pt\",\n                )\n\n                edit_concepts_input_ids = edit_concepts_input.input_ids\n\n                if edit_concepts_input_ids.shape[-1] > self.tokenizer.model_max_length:\n                    removed_text = self.tokenizer.batch_decode(\n                        edit_concepts_input_ids[:, self.tokenizer.model_max_length :]\n                    )\n                    logger.warning(\n                        \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                        f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                    )\n                    edit_concepts_input_ids = edit_concepts_input_ids[:, : self.tokenizer.model_max_length]\n                edit_concepts = self.text_encoder(edit_concepts_input_ids.to(self.device))[0]\n            else:\n                edit_concepts = editing_prompt_embeddings.to(self.device).repeat(batch_size, 1, 1)\n\n            # duplicate text embeddings for each generation per prompt, using mps friendly method\n            bs_embed_edit, seq_len_edit, _ = edit_concepts.shape\n            edit_concepts = edit_concepts.repeat(1, num_images_per_prompt, 1)\n            edit_concepts = edit_concepts.view(bs_embed_edit * num_images_per_prompt, seq_len_edit, -1)\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n        # get unconditional embeddings for classifier free guidance\n\n        if do_classifier_free_guidance:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"]\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = text_input_ids.shape[-1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device))[0]\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = uncond_embeddings.shape[1]\n            uncond_embeddings = uncond_embeddings.repeat(batch_size, num_images_per_prompt, 1)\n            uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            if enable_edit_guidance:\n                text_embeddings = torch.cat([uncond_embeddings, text_embeddings, edit_concepts])\n            else:\n                text_embeddings = torch.cat([uncond_embeddings, text_embeddings])\n        # get the initial random noise unless the user supplied it\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=self.device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            text_embeddings.dtype,\n            self.device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs.\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # Initialize edit_momentum to None\n        edit_momentum = None\n\n        self.uncond_estimates = None\n        self.text_estimates = None\n        self.edit_estimates = None\n        self.sem_guidance = None\n\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = (\n                torch.cat([latents] * (2 + enabled_editing_prompts)) if do_classifier_free_guidance else latents\n            )\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            # predict the noise residual\n            noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=text_embeddings).sample\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_out = noise_pred.chunk(2 + enabled_editing_prompts)  # [b,4, 64, 64]\n                noise_pred_uncond, noise_pred_text = noise_pred_out[0], noise_pred_out[1]\n                noise_pred_edit_concepts = noise_pred_out[2:]\n\n                # default text guidance\n                noise_guidance = guidance_scale * (noise_pred_text - noise_pred_uncond)\n                # noise_guidance = (noise_pred_text - noise_pred_edit_concepts[0])\n\n                if self.uncond_estimates is None:\n                    self.uncond_estimates = torch.zeros((num_inference_steps + 1, *noise_pred_uncond.shape))\n                self.uncond_estimates[i] = noise_pred_uncond.detach().cpu()\n\n                if self.text_estimates is None:\n                    self.text_estimates = torch.zeros((num_inference_steps + 1, *noise_pred_text.shape))\n                self.text_estimates[i] = noise_pred_text.detach().cpu()\n\n                if self.edit_estimates is None and enable_edit_guidance:\n                    self.edit_estimates = torch.zeros(\n                        (num_inference_steps + 1, len(noise_pred_edit_concepts), *noise_pred_edit_concepts[0].shape)\n                    )\n\n                if self.sem_guidance is None:\n                    self.sem_guidance = torch.zeros((num_inference_steps + 1, *noise_pred_text.shape))\n\n                if edit_momentum is None:\n                    edit_momentum = torch.zeros_like(noise_guidance)\n\n                if enable_edit_guidance:\n                    concept_weights = torch.zeros(\n                        (len(noise_pred_edit_concepts), noise_guidance.shape[0]),\n                        device=self.device,\n                        dtype=noise_guidance.dtype,\n                    )\n                    noise_guidance_edit = torch.zeros(\n                        (len(noise_pred_edit_concepts), *noise_guidance.shape),\n                        device=self.device,\n                        dtype=noise_guidance.dtype,\n                    )\n                    # noise_guidance_edit = torch.zeros_like(noise_guidance)\n                    warmup_inds = []\n                    for c, noise_pred_edit_concept in enumerate(noise_pred_edit_concepts):\n                        self.edit_estimates[i, c] = noise_pred_edit_concept\n                        if isinstance(edit_guidance_scale, list):\n                            edit_guidance_scale_c = edit_guidance_scale[c]\n                        else:\n                            edit_guidance_scale_c = edit_guidance_scale\n\n                        if isinstance(edit_threshold, list):\n                            edit_threshold_c = edit_threshold[c]\n                        else:\n                            edit_threshold_c = edit_threshold\n                        if isinstance(reverse_editing_direction, list):\n                            reverse_editing_direction_c = reverse_editing_direction[c]\n                        else:\n                            reverse_editing_direction_c = reverse_editing_direction\n                        if edit_weights:\n                            edit_weight_c = edit_weights[c]\n                        else:\n                            edit_weight_c = 1.0\n                        if isinstance(edit_warmup_steps, list):\n                            edit_warmup_steps_c = edit_warmup_steps[c]\n                        else:\n                            edit_warmup_steps_c = edit_warmup_steps\n\n                        if isinstance(edit_cooldown_steps, list):\n                            edit_cooldown_steps_c = edit_cooldown_steps[c]\n                        elif edit_cooldown_steps is None:\n                            edit_cooldown_steps_c = i + 1\n                        else:\n                            edit_cooldown_steps_c = edit_cooldown_steps\n                        if i >= edit_warmup_steps_c:\n                            warmup_inds.append(c)\n                        if i >= edit_cooldown_steps_c:\n                            noise_guidance_edit[c, :, :, :, :] = torch.zeros_like(noise_pred_edit_concept)\n                            continue\n\n                        noise_guidance_edit_tmp = noise_pred_edit_concept - noise_pred_uncond\n                        # tmp_weights = (noise_pred_text - noise_pred_edit_concept).sum(dim=(1, 2, 3))\n                        tmp_weights = (noise_guidance - noise_pred_edit_concept).sum(dim=(1, 2, 3))\n\n                        tmp_weights = torch.full_like(tmp_weights, edit_weight_c)  # * (1 / enabled_editing_prompts)\n                        if reverse_editing_direction_c:\n                            noise_guidance_edit_tmp = noise_guidance_edit_tmp * -1\n                        concept_weights[c, :] = tmp_weights\n\n                        noise_guidance_edit_tmp = noise_guidance_edit_tmp * edit_guidance_scale_c\n\n                        # torch.quantile function expects float32\n                        if noise_guidance_edit_tmp.dtype == torch.float32:\n                            tmp = torch.quantile(\n                                torch.abs(noise_guidance_edit_tmp).flatten(start_dim=2),\n                                edit_threshold_c,\n                                dim=2,\n                                keepdim=False,\n                            )\n                        else:\n                            tmp = torch.quantile(\n                                torch.abs(noise_guidance_edit_tmp).flatten(start_dim=2).to(torch.float32),\n                                edit_threshold_c,\n                                dim=2,\n                                keepdim=False,\n                            ).to(noise_guidance_edit_tmp.dtype)\n\n                        noise_guidance_edit_tmp = torch.where(\n                            torch.abs(noise_guidance_edit_tmp) >= tmp[:, :, None, None],\n                            noise_guidance_edit_tmp,\n                            torch.zeros_like(noise_guidance_edit_tmp),\n                        )\n                        noise_guidance_edit[c, :, :, :, :] = noise_guidance_edit_tmp\n\n                        # noise_guidance_edit = noise_guidance_edit + noise_guidance_edit_tmp\n\n                    warmup_inds = torch.tensor(warmup_inds).to(self.device)\n                    if len(noise_pred_edit_concepts) > warmup_inds.shape[0] > 0:\n                        concept_weights = concept_weights.to(\"cpu\")  # Offload to cpu\n                        noise_guidance_edit = noise_guidance_edit.to(\"cpu\")\n\n                        concept_weights_tmp = torch.index_select(concept_weights.to(self.device), 0, warmup_inds)\n                        concept_weights_tmp = torch.where(\n                            concept_weights_tmp < 0, torch.zeros_like(concept_weights_tmp), concept_weights_tmp\n                        )\n                        concept_weights_tmp = concept_weights_tmp / concept_weights_tmp.sum(dim=0)\n                        # concept_weights_tmp = torch.nan_to_num(concept_weights_tmp)\n\n                        noise_guidance_edit_tmp = torch.index_select(\n                            noise_guidance_edit.to(self.device), 0, warmup_inds\n                        )\n                        noise_guidance_edit_tmp = torch.einsum(\n                            \"cb,cbijk->bijk\", concept_weights_tmp, noise_guidance_edit_tmp\n                        )\n                        noise_guidance_edit_tmp = noise_guidance_edit_tmp\n                        noise_guidance = noise_guidance + noise_guidance_edit_tmp\n\n                        self.sem_guidance[i] = noise_guidance_edit_tmp.detach().cpu()\n\n                        del noise_guidance_edit_tmp\n                        del concept_weights_tmp\n                        concept_weights = concept_weights.to(self.device)\n                        noise_guidance_edit = noise_guidance_edit.to(self.device)\n\n                    concept_weights = torch.where(\n                        concept_weights < 0, torch.zeros_like(concept_weights), concept_weights\n                    )\n\n                    concept_weights = torch.nan_to_num(concept_weights)\n\n                    noise_guidance_edit = torch.einsum(\"cb,cbijk->bijk\", concept_weights, noise_guidance_edit)\n\n                    noise_guidance_edit = noise_guidance_edit + edit_momentum_scale * edit_momentum\n\n                    edit_momentum = edit_mom_beta * edit_momentum + (1 - edit_mom_beta) * noise_guidance_edit\n\n                    if warmup_inds.shape[0] == len(noise_pred_edit_concepts):\n                        noise_guidance = noise_guidance + noise_guidance_edit\n                        self.sem_guidance[i] = noise_guidance_edit.detach().cpu()\n\n                if sem_guidance is not None:\n                    edit_guidance = sem_guidance[i].to(self.device)\n                    noise_guidance = noise_guidance + edit_guidance\n\n                noise_pred = noise_pred_uncond + noise_guidance\n\n                # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        # 8. Post-processing\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, self.device, text_embeddings.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return SemanticStableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/spectrogram_diffusion/__init__.py",
    "content": "# flake8: noqa\nfrom ...utils import is_note_seq_available, is_transformers_available, is_torch_available\nfrom ...utils import OptionalDependencyNotAvailable\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import *  # noqa F403\nelse:\n    from .notes_encoder import SpectrogramNotesEncoder\n    from .continous_encoder import SpectrogramContEncoder\n    from .pipeline_spectrogram_diffusion import (\n        SpectrogramContEncoder,\n        SpectrogramDiffusionPipeline,\n        T5FilmDecoder,\n    )\n\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_note_seq_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_transformers_and_torch_and_note_seq_objects import *  # noqa F403\nelse:\n    from .midi_utils import MidiProcessor\n"
  },
  {
    "path": "diffusers/pipelines/spectrogram_diffusion/continous_encoder.py",
    "content": "# Copyright 2022 The Music Spectrogram Diffusion Authors.\n# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\nimport torch.nn as nn\nfrom transformers.modeling_utils import ModuleUtilsMixin\nfrom transformers.models.t5.modeling_t5 import (\n    T5Block,\n    T5Config,\n    T5LayerNorm,\n)\n\nfrom ...configuration_utils import ConfigMixin, register_to_config\nfrom ...models import ModelMixin\n\n\nclass SpectrogramContEncoder(ModelMixin, ConfigMixin, ModuleUtilsMixin):\n    @register_to_config\n    def __init__(\n        self,\n        input_dims: int,\n        targets_context_length: int,\n        d_model: int,\n        dropout_rate: float,\n        num_layers: int,\n        num_heads: int,\n        d_kv: int,\n        d_ff: int,\n        feed_forward_proj: str,\n        is_decoder: bool = False,\n    ):\n        super().__init__()\n\n        self.input_proj = nn.Linear(input_dims, d_model, bias=False)\n\n        self.position_encoding = nn.Embedding(targets_context_length, d_model)\n        self.position_encoding.weight.requires_grad = False\n\n        self.dropout_pre = nn.Dropout(p=dropout_rate)\n\n        t5config = T5Config(\n            d_model=d_model,\n            num_heads=num_heads,\n            d_kv=d_kv,\n            d_ff=d_ff,\n            feed_forward_proj=feed_forward_proj,\n            dropout_rate=dropout_rate,\n            is_decoder=is_decoder,\n            is_encoder_decoder=False,\n        )\n        self.encoders = nn.ModuleList()\n        for lyr_num in range(num_layers):\n            lyr = T5Block(t5config)\n            self.encoders.append(lyr)\n\n        self.layer_norm = T5LayerNorm(d_model)\n        self.dropout_post = nn.Dropout(p=dropout_rate)\n\n    def forward(self, encoder_inputs, encoder_inputs_mask):\n        x = self.input_proj(encoder_inputs)\n\n        # terminal relative positional encodings\n        max_positions = encoder_inputs.shape[1]\n        input_positions = torch.arange(max_positions, device=encoder_inputs.device)\n\n        seq_lens = encoder_inputs_mask.sum(-1)\n        input_positions = torch.roll(input_positions.unsqueeze(0), tuple(seq_lens.tolist()), dims=0)\n        x += self.position_encoding(input_positions)\n\n        x = self.dropout_pre(x)\n\n        # inverted the attention mask\n        input_shape = encoder_inputs.size()\n        extended_attention_mask = self.get_extended_attention_mask(encoder_inputs_mask, input_shape)\n\n        for lyr in self.encoders:\n            x = lyr(x, extended_attention_mask)[0]\n        x = self.layer_norm(x)\n\n        return self.dropout_post(x), encoder_inputs_mask\n"
  },
  {
    "path": "diffusers/pipelines/spectrogram_diffusion/midi_utils.py",
    "content": "# Copyright 2022 The Music Spectrogram Diffusion Authors.\n# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport dataclasses\nimport math\nimport os\nfrom typing import Any, Callable, List, Mapping, MutableMapping, Optional, Sequence, Tuple, Union\n\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\n\nfrom ...utils import is_note_seq_available\nfrom .pipeline_spectrogram_diffusion import TARGET_FEATURE_LENGTH\n\n\nif is_note_seq_available():\n    import note_seq\nelse:\n    raise ImportError(\"Please install note-seq via `pip install note-seq`\")\n\n\nINPUT_FEATURE_LENGTH = 2048\n\nSAMPLE_RATE = 16000\nHOP_SIZE = 320\nFRAME_RATE = int(SAMPLE_RATE // HOP_SIZE)\n\nDEFAULT_STEPS_PER_SECOND = 100\nDEFAULT_MAX_SHIFT_SECONDS = 10\nDEFAULT_NUM_VELOCITY_BINS = 1\n\nSLAKH_CLASS_PROGRAMS = {\n    \"Acoustic Piano\": 0,\n    \"Electric Piano\": 4,\n    \"Chromatic Percussion\": 8,\n    \"Organ\": 16,\n    \"Acoustic Guitar\": 24,\n    \"Clean Electric Guitar\": 26,\n    \"Distorted Electric Guitar\": 29,\n    \"Acoustic Bass\": 32,\n    \"Electric Bass\": 33,\n    \"Violin\": 40,\n    \"Viola\": 41,\n    \"Cello\": 42,\n    \"Contrabass\": 43,\n    \"Orchestral Harp\": 46,\n    \"Timpani\": 47,\n    \"String Ensemble\": 48,\n    \"Synth Strings\": 50,\n    \"Choir and Voice\": 52,\n    \"Orchestral Hit\": 55,\n    \"Trumpet\": 56,\n    \"Trombone\": 57,\n    \"Tuba\": 58,\n    \"French Horn\": 60,\n    \"Brass Section\": 61,\n    \"Soprano/Alto Sax\": 64,\n    \"Tenor Sax\": 66,\n    \"Baritone Sax\": 67,\n    \"Oboe\": 68,\n    \"English Horn\": 69,\n    \"Bassoon\": 70,\n    \"Clarinet\": 71,\n    \"Pipe\": 73,\n    \"Synth Lead\": 80,\n    \"Synth Pad\": 88,\n}\n\n\n@dataclasses.dataclass\nclass NoteRepresentationConfig:\n    \"\"\"Configuration note representations.\"\"\"\n\n    onsets_only: bool\n    include_ties: bool\n\n\n@dataclasses.dataclass\nclass NoteEventData:\n    pitch: int\n    velocity: Optional[int] = None\n    program: Optional[int] = None\n    is_drum: Optional[bool] = None\n    instrument: Optional[int] = None\n\n\n@dataclasses.dataclass\nclass NoteEncodingState:\n    \"\"\"Encoding state for note transcription, keeping track of active pitches.\"\"\"\n\n    # velocity bin for active pitches and programs\n    active_pitches: MutableMapping[Tuple[int, int], int] = dataclasses.field(default_factory=dict)\n\n\n@dataclasses.dataclass\nclass EventRange:\n    type: str\n    min_value: int\n    max_value: int\n\n\n@dataclasses.dataclass\nclass Event:\n    type: str\n    value: int\n\n\nclass Tokenizer:\n    def __init__(self, regular_ids: int):\n        # The special tokens: 0=PAD, 1=EOS, and 2=UNK\n        self._num_special_tokens = 3\n        self._num_regular_tokens = regular_ids\n\n    def encode(self, token_ids):\n        encoded = []\n        for token_id in token_ids:\n            if not 0 <= token_id < self._num_regular_tokens:\n                raise ValueError(\n                    f\"token_id {token_id} does not fall within valid range of [0, {self._num_regular_tokens})\"\n                )\n            encoded.append(token_id + self._num_special_tokens)\n\n        # Add EOS token\n        encoded.append(1)\n\n        # Pad to till INPUT_FEATURE_LENGTH\n        encoded = encoded + [0] * (INPUT_FEATURE_LENGTH - len(encoded))\n\n        return encoded\n\n\nclass Codec:\n    \"\"\"Encode and decode events.\n\n    Useful for declaring what certain ranges of a vocabulary should be used for. This is intended to be used from\n    Python before encoding or after decoding with GenericTokenVocabulary. This class is more lightweight and does not\n    include things like EOS or UNK token handling.\n\n    To ensure that 'shift' events are always the first block of the vocab and start at 0, that event type is required\n    and specified separately.\n    \"\"\"\n\n    def __init__(self, max_shift_steps: int, steps_per_second: float, event_ranges: List[EventRange]):\n        \"\"\"Define Codec.\n\n        Args:\n          max_shift_steps: Maximum number of shift steps that can be encoded.\n          steps_per_second: Shift steps will be interpreted as having a duration of\n              1 / steps_per_second.\n          event_ranges: Other supported event types and their ranges.\n        \"\"\"\n        self.steps_per_second = steps_per_second\n        self._shift_range = EventRange(type=\"shift\", min_value=0, max_value=max_shift_steps)\n        self._event_ranges = [self._shift_range] + event_ranges\n        # Ensure all event types have unique names.\n        assert len(self._event_ranges) == len({er.type for er in self._event_ranges})\n\n    @property\n    def num_classes(self) -> int:\n        return sum(er.max_value - er.min_value + 1 for er in self._event_ranges)\n\n    # The next couple methods are simplified special case methods just for shift\n    # events that are intended to be used from within autograph functions.\n\n    def is_shift_event_index(self, index: int) -> bool:\n        return (self._shift_range.min_value <= index) and (index <= self._shift_range.max_value)\n\n    @property\n    def max_shift_steps(self) -> int:\n        return self._shift_range.max_value\n\n    def encode_event(self, event: Event) -> int:\n        \"\"\"Encode an event to an index.\"\"\"\n        offset = 0\n        for er in self._event_ranges:\n            if event.type == er.type:\n                if not er.min_value <= event.value <= er.max_value:\n                    raise ValueError(\n                        f\"Event value {event.value} is not within valid range \"\n                        f\"[{er.min_value}, {er.max_value}] for type {event.type}\"\n                    )\n                return offset + event.value - er.min_value\n            offset += er.max_value - er.min_value + 1\n\n        raise ValueError(f\"Unknown event type: {event.type}\")\n\n    def event_type_range(self, event_type: str) -> Tuple[int, int]:\n        \"\"\"Return [min_id, max_id] for an event type.\"\"\"\n        offset = 0\n        for er in self._event_ranges:\n            if event_type == er.type:\n                return offset, offset + (er.max_value - er.min_value)\n            offset += er.max_value - er.min_value + 1\n\n        raise ValueError(f\"Unknown event type: {event_type}\")\n\n    def decode_event_index(self, index: int) -> Event:\n        \"\"\"Decode an event index to an Event.\"\"\"\n        offset = 0\n        for er in self._event_ranges:\n            if offset <= index <= offset + er.max_value - er.min_value:\n                return Event(type=er.type, value=er.min_value + index - offset)\n            offset += er.max_value - er.min_value + 1\n\n        raise ValueError(f\"Unknown event index: {index}\")\n\n\n@dataclasses.dataclass\nclass ProgramGranularity:\n    # both tokens_map_fn and program_map_fn should be idempotent\n    tokens_map_fn: Callable[[Sequence[int], Codec], Sequence[int]]\n    program_map_fn: Callable[[int], int]\n\n\ndef drop_programs(tokens, codec: Codec):\n    \"\"\"Drops program change events from a token sequence.\"\"\"\n    min_program_id, max_program_id = codec.event_type_range(\"program\")\n    return tokens[(tokens < min_program_id) | (tokens > max_program_id)]\n\n\ndef programs_to_midi_classes(tokens, codec):\n    \"\"\"Modifies program events to be the first program in the MIDI class.\"\"\"\n    min_program_id, max_program_id = codec.event_type_range(\"program\")\n    is_program = (tokens >= min_program_id) & (tokens <= max_program_id)\n    return np.where(is_program, min_program_id + 8 * ((tokens - min_program_id) // 8), tokens)\n\n\nPROGRAM_GRANULARITIES = {\n    # \"flat\" granularity; drop program change tokens and set NoteSequence\n    # programs to zero\n    \"flat\": ProgramGranularity(tokens_map_fn=drop_programs, program_map_fn=lambda program: 0),\n    # map each program to the first program in its MIDI class\n    \"midi_class\": ProgramGranularity(\n        tokens_map_fn=programs_to_midi_classes, program_map_fn=lambda program: 8 * (program // 8)\n    ),\n    # leave programs as is\n    \"full\": ProgramGranularity(tokens_map_fn=lambda tokens, codec: tokens, program_map_fn=lambda program: program),\n}\n\n\ndef frame(signal, frame_length, frame_step, pad_end=False, pad_value=0, axis=-1):\n    \"\"\"\n    equivalent of tf.signal.frame\n    \"\"\"\n    signal_length = signal.shape[axis]\n    if pad_end:\n        frames_overlap = frame_length - frame_step\n        rest_samples = np.abs(signal_length - frames_overlap) % np.abs(frame_length - frames_overlap)\n        pad_size = int(frame_length - rest_samples)\n\n        if pad_size != 0:\n            pad_axis = [0] * signal.ndim\n            pad_axis[axis] = pad_size\n            signal = F.pad(signal, pad_axis, \"constant\", pad_value)\n    frames = signal.unfold(axis, frame_length, frame_step)\n    return frames\n\n\ndef program_to_slakh_program(program):\n    # this is done very hackily, probably should use a custom mapping\n    for slakh_program in sorted(SLAKH_CLASS_PROGRAMS.values(), reverse=True):\n        if program >= slakh_program:\n            return slakh_program\n\n\ndef audio_to_frames(\n    samples,\n    hop_size: int,\n    frame_rate: int,\n) -> Tuple[Sequence[Sequence[int]], torch.Tensor]:\n    \"\"\"Convert audio samples to non-overlapping frames and frame times.\"\"\"\n    frame_size = hop_size\n    samples = np.pad(samples, [0, frame_size - len(samples) % frame_size], mode=\"constant\")\n\n    # Split audio into frames.\n    frames = frame(\n        torch.Tensor(samples).unsqueeze(0),\n        frame_length=frame_size,\n        frame_step=frame_size,\n        pad_end=False,  # TODO check why its off by 1 here when True\n    )\n\n    num_frames = len(samples) // frame_size\n\n    times = np.arange(num_frames) / frame_rate\n    return frames, times\n\n\ndef note_sequence_to_onsets_and_offsets_and_programs(\n    ns: note_seq.NoteSequence,\n) -> Tuple[Sequence[float], Sequence[NoteEventData]]:\n    \"\"\"Extract onset & offset times and pitches & programs from a NoteSequence.\n\n    The onset & offset times will not necessarily be in sorted order.\n\n    Args:\n      ns: NoteSequence from which to extract onsets and offsets.\n\n    Returns:\n      times: A list of note onset and offset times. values: A list of NoteEventData objects where velocity is zero for\n      note\n          offsets.\n    \"\"\"\n    # Sort by program and pitch and put offsets before onsets as a tiebreaker for\n    # subsequent stable sort.\n    notes = sorted(ns.notes, key=lambda note: (note.is_drum, note.program, note.pitch))\n    times = [note.end_time for note in notes if not note.is_drum] + [note.start_time for note in notes]\n    values = [\n        NoteEventData(pitch=note.pitch, velocity=0, program=note.program, is_drum=False)\n        for note in notes\n        if not note.is_drum\n    ] + [\n        NoteEventData(pitch=note.pitch, velocity=note.velocity, program=note.program, is_drum=note.is_drum)\n        for note in notes\n    ]\n    return times, values\n\n\ndef num_velocity_bins_from_codec(codec: Codec):\n    \"\"\"Get number of velocity bins from event codec.\"\"\"\n    lo, hi = codec.event_type_range(\"velocity\")\n    return hi - lo\n\n\n# segment an array into segments of length n\ndef segment(a, n):\n    return [a[i : i + n] for i in range(0, len(a), n)]\n\n\ndef velocity_to_bin(velocity, num_velocity_bins):\n    if velocity == 0:\n        return 0\n    else:\n        return math.ceil(num_velocity_bins * velocity / note_seq.MAX_MIDI_VELOCITY)\n\n\ndef note_event_data_to_events(\n    state: Optional[NoteEncodingState],\n    value: NoteEventData,\n    codec: Codec,\n) -> Sequence[Event]:\n    \"\"\"Convert note event data to a sequence of events.\"\"\"\n    if value.velocity is None:\n        # onsets only, no program or velocity\n        return [Event(\"pitch\", value.pitch)]\n    else:\n        num_velocity_bins = num_velocity_bins_from_codec(codec)\n        velocity_bin = velocity_to_bin(value.velocity, num_velocity_bins)\n        if value.program is None:\n            # onsets + offsets + velocities only, no programs\n            if state is not None:\n                state.active_pitches[(value.pitch, 0)] = velocity_bin\n            return [Event(\"velocity\", velocity_bin), Event(\"pitch\", value.pitch)]\n        else:\n            if value.is_drum:\n                # drum events use a separate vocabulary\n                return [Event(\"velocity\", velocity_bin), Event(\"drum\", value.pitch)]\n            else:\n                # program + velocity + pitch\n                if state is not None:\n                    state.active_pitches[(value.pitch, value.program)] = velocity_bin\n                return [\n                    Event(\"program\", value.program),\n                    Event(\"velocity\", velocity_bin),\n                    Event(\"pitch\", value.pitch),\n                ]\n\n\ndef note_encoding_state_to_events(state: NoteEncodingState) -> Sequence[Event]:\n    \"\"\"Output program and pitch events for active notes plus a final tie event.\"\"\"\n    events = []\n    for pitch, program in sorted(state.active_pitches.keys(), key=lambda k: k[::-1]):\n        if state.active_pitches[(pitch, program)]:\n            events += [Event(\"program\", program), Event(\"pitch\", pitch)]\n    events.append(Event(\"tie\", 0))\n    return events\n\n\ndef encode_and_index_events(\n    state, event_times, event_values, codec, frame_times, encode_event_fn, encoding_state_to_events_fn=None\n):\n    \"\"\"Encode a sequence of timed events and index to audio frame times.\n\n    Encodes time shifts as repeated single step shifts for later run length encoding.\n\n    Optionally, also encodes a sequence of \"state events\", keeping track of the current encoding state at each audio\n    frame. This can be used e.g. to prepend events representing the current state to a targets segment.\n\n    Args:\n      state: Initial event encoding state.\n      event_times: Sequence of event times.\n      event_values: Sequence of event values.\n      encode_event_fn: Function that transforms event value into a sequence of one\n          or more Event objects.\n      codec: An Codec object that maps Event objects to indices.\n      frame_times: Time for every audio frame.\n      encoding_state_to_events_fn: Function that transforms encoding state into a\n          sequence of one or more Event objects.\n\n    Returns:\n      events: Encoded events and shifts. event_start_indices: Corresponding start event index for every audio frame.\n          Note: one event can correspond to multiple audio indices due to sampling rate differences. This makes\n          splitting sequences tricky because the same event can appear at the end of one sequence and the beginning of\n          another.\n      event_end_indices: Corresponding end event index for every audio frame. Used\n          to ensure when slicing that one chunk ends where the next begins. Should always be true that\n          event_end_indices[i] = event_start_indices[i + 1].\n      state_events: Encoded \"state\" events representing the encoding state before\n          each event.\n      state_event_indices: Corresponding state event index for every audio frame.\n    \"\"\"\n    indices = np.argsort(event_times, kind=\"stable\")\n    event_steps = [round(event_times[i] * codec.steps_per_second) for i in indices]\n    event_values = [event_values[i] for i in indices]\n\n    events = []\n    state_events = []\n    event_start_indices = []\n    state_event_indices = []\n\n    cur_step = 0\n    cur_event_idx = 0\n    cur_state_event_idx = 0\n\n    def fill_event_start_indices_to_cur_step():\n        while (\n            len(event_start_indices) < len(frame_times)\n            and frame_times[len(event_start_indices)] < cur_step / codec.steps_per_second\n        ):\n            event_start_indices.append(cur_event_idx)\n            state_event_indices.append(cur_state_event_idx)\n\n    for event_step, event_value in zip(event_steps, event_values):\n        while event_step > cur_step:\n            events.append(codec.encode_event(Event(type=\"shift\", value=1)))\n            cur_step += 1\n            fill_event_start_indices_to_cur_step()\n            cur_event_idx = len(events)\n            cur_state_event_idx = len(state_events)\n        if encoding_state_to_events_fn:\n            # Dump state to state events *before* processing the next event, because\n            # we want to capture the state prior to the occurrence of the event.\n            for e in encoding_state_to_events_fn(state):\n                state_events.append(codec.encode_event(e))\n\n        for e in encode_event_fn(state, event_value, codec):\n            events.append(codec.encode_event(e))\n\n    # After the last event, continue filling out the event_start_indices array.\n    # The inequality is not strict because if our current step lines up exactly\n    # with (the start of) an audio frame, we need to add an additional shift event\n    # to \"cover\" that frame.\n    while cur_step / codec.steps_per_second <= frame_times[-1]:\n        events.append(codec.encode_event(Event(type=\"shift\", value=1)))\n        cur_step += 1\n        fill_event_start_indices_to_cur_step()\n        cur_event_idx = len(events)\n\n    # Now fill in event_end_indices. We need this extra array to make sure that\n    # when we slice events, each slice ends exactly where the subsequent slice\n    # begins.\n    event_end_indices = event_start_indices[1:] + [len(events)]\n\n    events = np.array(events).astype(np.int32)\n    state_events = np.array(state_events).astype(np.int32)\n    event_start_indices = segment(np.array(event_start_indices).astype(np.int32), TARGET_FEATURE_LENGTH)\n    event_end_indices = segment(np.array(event_end_indices).astype(np.int32), TARGET_FEATURE_LENGTH)\n    state_event_indices = segment(np.array(state_event_indices).astype(np.int32), TARGET_FEATURE_LENGTH)\n\n    outputs = []\n    for start_indices, end_indices, event_indices in zip(event_start_indices, event_end_indices, state_event_indices):\n        outputs.append(\n            {\n                \"inputs\": events,\n                \"event_start_indices\": start_indices,\n                \"event_end_indices\": end_indices,\n                \"state_events\": state_events,\n                \"state_event_indices\": event_indices,\n            }\n        )\n\n    return outputs\n\n\ndef extract_sequence_with_indices(features, state_events_end_token=None, feature_key=\"inputs\"):\n    \"\"\"Extract target sequence corresponding to audio token segment.\"\"\"\n    features = features.copy()\n    start_idx = features[\"event_start_indices\"][0]\n    end_idx = features[\"event_end_indices\"][-1]\n\n    features[feature_key] = features[feature_key][start_idx:end_idx]\n\n    if state_events_end_token is not None:\n        # Extract the state events corresponding to the audio start token, and\n        # prepend them to the targets array.\n        state_event_start_idx = features[\"state_event_indices\"][0]\n        state_event_end_idx = state_event_start_idx + 1\n        while features[\"state_events\"][state_event_end_idx - 1] != state_events_end_token:\n            state_event_end_idx += 1\n        features[feature_key] = np.concatenate(\n            [\n                features[\"state_events\"][state_event_start_idx:state_event_end_idx],\n                features[feature_key],\n            ],\n            axis=0,\n        )\n\n    return features\n\n\ndef map_midi_programs(\n    feature, codec: Codec, granularity_type: str = \"full\", feature_key: str = \"inputs\"\n) -> Mapping[str, Any]:\n    \"\"\"Apply MIDI program map to token sequences.\"\"\"\n    granularity = PROGRAM_GRANULARITIES[granularity_type]\n\n    feature[feature_key] = granularity.tokens_map_fn(feature[feature_key], codec)\n    return feature\n\n\ndef run_length_encode_shifts_fn(\n    features,\n    codec: Codec,\n    feature_key: str = \"inputs\",\n    state_change_event_types: Sequence[str] = (),\n) -> Callable[[Mapping[str, Any]], Mapping[str, Any]]:\n    \"\"\"Return a function that run-length encodes shifts for a given codec.\n\n    Args:\n      codec: The Codec to use for shift events.\n      feature_key: The feature key for which to run-length encode shifts.\n      state_change_event_types: A list of event types that represent state\n          changes; tokens corresponding to these event types will be interpreted as state changes and redundant ones\n          will be removed.\n\n    Returns:\n      A preprocessing function that run-length encodes single-step shifts.\n    \"\"\"\n    state_change_event_ranges = [codec.event_type_range(event_type) for event_type in state_change_event_types]\n\n    def run_length_encode_shifts(features: MutableMapping[str, Any]) -> Mapping[str, Any]:\n        \"\"\"Combine leading/interior shifts, trim trailing shifts.\n\n        Args:\n          features: Dict of features to process.\n\n        Returns:\n          A dict of features.\n        \"\"\"\n        events = features[feature_key]\n\n        shift_steps = 0\n        total_shift_steps = 0\n        output = np.array([], dtype=np.int32)\n\n        current_state = np.zeros(len(state_change_event_ranges), dtype=np.int32)\n\n        for event in events:\n            if codec.is_shift_event_index(event):\n                shift_steps += 1\n                total_shift_steps += 1\n\n            else:\n                # If this event is a state change and has the same value as the current\n                # state, we can skip it entirely.\n                is_redundant = False\n                for i, (min_index, max_index) in enumerate(state_change_event_ranges):\n                    if (min_index <= event) and (event <= max_index):\n                        if current_state[i] == event:\n                            is_redundant = True\n                        current_state[i] = event\n                if is_redundant:\n                    continue\n\n                # Once we've reached a non-shift event, RLE all previous shift events\n                # before outputting the non-shift event.\n                if shift_steps > 0:\n                    shift_steps = total_shift_steps\n                    while shift_steps > 0:\n                        output_steps = np.minimum(codec.max_shift_steps, shift_steps)\n                        output = np.concatenate([output, [output_steps]], axis=0)\n                        shift_steps -= output_steps\n                output = np.concatenate([output, [event]], axis=0)\n\n        features[feature_key] = output\n        return features\n\n    return run_length_encode_shifts(features)\n\n\ndef note_representation_processor_chain(features, codec: Codec, note_representation_config: NoteRepresentationConfig):\n    tie_token = codec.encode_event(Event(\"tie\", 0))\n    state_events_end_token = tie_token if note_representation_config.include_ties else None\n\n    features = extract_sequence_with_indices(\n        features, state_events_end_token=state_events_end_token, feature_key=\"inputs\"\n    )\n\n    features = map_midi_programs(features, codec)\n\n    features = run_length_encode_shifts_fn(features, codec, state_change_event_types=[\"velocity\", \"program\"])\n\n    return features\n\n\nclass MidiProcessor:\n    def __init__(self):\n        self.codec = Codec(\n            max_shift_steps=DEFAULT_MAX_SHIFT_SECONDS * DEFAULT_STEPS_PER_SECOND,\n            steps_per_second=DEFAULT_STEPS_PER_SECOND,\n            event_ranges=[\n                EventRange(\"pitch\", note_seq.MIN_MIDI_PITCH, note_seq.MAX_MIDI_PITCH),\n                EventRange(\"velocity\", 0, DEFAULT_NUM_VELOCITY_BINS),\n                EventRange(\"tie\", 0, 0),\n                EventRange(\"program\", note_seq.MIN_MIDI_PROGRAM, note_seq.MAX_MIDI_PROGRAM),\n                EventRange(\"drum\", note_seq.MIN_MIDI_PITCH, note_seq.MAX_MIDI_PITCH),\n            ],\n        )\n        self.tokenizer = Tokenizer(self.codec.num_classes)\n        self.note_representation_config = NoteRepresentationConfig(onsets_only=False, include_ties=True)\n\n    def __call__(self, midi: Union[bytes, os.PathLike, str]):\n        if not isinstance(midi, bytes):\n            with open(midi, \"rb\") as f:\n                midi = f.read()\n\n        ns = note_seq.midi_to_note_sequence(midi)\n        ns_sus = note_seq.apply_sustain_control_changes(ns)\n\n        for note in ns_sus.notes:\n            if not note.is_drum:\n                note.program = program_to_slakh_program(note.program)\n\n        samples = np.zeros(int(ns_sus.total_time * SAMPLE_RATE))\n\n        _, frame_times = audio_to_frames(samples, HOP_SIZE, FRAME_RATE)\n        times, values = note_sequence_to_onsets_and_offsets_and_programs(ns_sus)\n\n        events = encode_and_index_events(\n            state=NoteEncodingState(),\n            event_times=times,\n            event_values=values,\n            frame_times=frame_times,\n            codec=self.codec,\n            encode_event_fn=note_event_data_to_events,\n            encoding_state_to_events_fn=note_encoding_state_to_events,\n        )\n\n        events = [\n            note_representation_processor_chain(event, self.codec, self.note_representation_config) for event in events\n        ]\n        input_tokens = [self.tokenizer.encode(event[\"inputs\"]) for event in events]\n\n        return input_tokens\n"
  },
  {
    "path": "diffusers/pipelines/spectrogram_diffusion/notes_encoder.py",
    "content": "# Copyright 2022 The Music Spectrogram Diffusion Authors.\n# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\nimport torch.nn as nn\nfrom transformers.modeling_utils import ModuleUtilsMixin\nfrom transformers.models.t5.modeling_t5 import T5Block, T5Config, T5LayerNorm\n\nfrom ...configuration_utils import ConfigMixin, register_to_config\nfrom ...models import ModelMixin\n\n\nclass SpectrogramNotesEncoder(ModelMixin, ConfigMixin, ModuleUtilsMixin):\n    @register_to_config\n    def __init__(\n        self,\n        max_length: int,\n        vocab_size: int,\n        d_model: int,\n        dropout_rate: float,\n        num_layers: int,\n        num_heads: int,\n        d_kv: int,\n        d_ff: int,\n        feed_forward_proj: str,\n        is_decoder: bool = False,\n    ):\n        super().__init__()\n\n        self.token_embedder = nn.Embedding(vocab_size, d_model)\n\n        self.position_encoding = nn.Embedding(max_length, d_model)\n        self.position_encoding.weight.requires_grad = False\n\n        self.dropout_pre = nn.Dropout(p=dropout_rate)\n\n        t5config = T5Config(\n            vocab_size=vocab_size,\n            d_model=d_model,\n            num_heads=num_heads,\n            d_kv=d_kv,\n            d_ff=d_ff,\n            dropout_rate=dropout_rate,\n            feed_forward_proj=feed_forward_proj,\n            is_decoder=is_decoder,\n            is_encoder_decoder=False,\n        )\n\n        self.encoders = nn.ModuleList()\n        for lyr_num in range(num_layers):\n            lyr = T5Block(t5config)\n            self.encoders.append(lyr)\n\n        self.layer_norm = T5LayerNorm(d_model)\n        self.dropout_post = nn.Dropout(p=dropout_rate)\n\n    def forward(self, encoder_input_tokens, encoder_inputs_mask):\n        x = self.token_embedder(encoder_input_tokens)\n\n        seq_length = encoder_input_tokens.shape[1]\n        inputs_positions = torch.arange(seq_length, device=encoder_input_tokens.device)\n        x += self.position_encoding(inputs_positions)\n\n        x = self.dropout_pre(x)\n\n        # inverted the attention mask\n        input_shape = encoder_input_tokens.size()\n        extended_attention_mask = self.get_extended_attention_mask(encoder_inputs_mask, input_shape)\n\n        for lyr in self.encoders:\n            x = lyr(x, extended_attention_mask)[0]\n        x = self.layer_norm(x)\n\n        return self.dropout_post(x), encoder_inputs_mask\n"
  },
  {
    "path": "diffusers/pipelines/spectrogram_diffusion/pipeline_spectrogram_diffusion.py",
    "content": "# Copyright 2022 The Music Spectrogram Diffusion Authors.\n# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom typing import Any, Callable, List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ...models import T5FilmDecoder\nfrom ...schedulers import DDPMScheduler\nfrom ...utils import is_onnx_available, logging, randn_tensor\n\n\nif is_onnx_available():\n    from ..onnx_utils import OnnxRuntimeModel\n\nfrom ..pipeline_utils import AudioPipelineOutput, DiffusionPipeline\nfrom .continous_encoder import SpectrogramContEncoder\nfrom .notes_encoder import SpectrogramNotesEncoder\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nTARGET_FEATURE_LENGTH = 256\n\n\nclass SpectrogramDiffusionPipeline(DiffusionPipeline):\n    _optional_components = [\"melgan\"]\n\n    def __init__(\n        self,\n        notes_encoder: SpectrogramNotesEncoder,\n        continuous_encoder: SpectrogramContEncoder,\n        decoder: T5FilmDecoder,\n        scheduler: DDPMScheduler,\n        melgan: OnnxRuntimeModel if is_onnx_available() else Any,\n    ) -> None:\n        super().__init__()\n\n        # From MELGAN\n        self.min_value = math.log(1e-5)  # Matches MelGAN training.\n        self.max_value = 4.0  # Largest value for most examples\n        self.n_dims = 128\n\n        self.register_modules(\n            notes_encoder=notes_encoder,\n            continuous_encoder=continuous_encoder,\n            decoder=decoder,\n            scheduler=scheduler,\n            melgan=melgan,\n        )\n\n    def scale_features(self, features, output_range=(-1.0, 1.0), clip=False):\n        \"\"\"Linearly scale features to network outputs range.\"\"\"\n        min_out, max_out = output_range\n        if clip:\n            features = torch.clip(features, self.min_value, self.max_value)\n        # Scale to [0, 1].\n        zero_one = (features - self.min_value) / (self.max_value - self.min_value)\n        # Scale to [min_out, max_out].\n        return zero_one * (max_out - min_out) + min_out\n\n    def scale_to_features(self, outputs, input_range=(-1.0, 1.0), clip=False):\n        \"\"\"Invert by linearly scaling network outputs to features range.\"\"\"\n        min_out, max_out = input_range\n        outputs = torch.clip(outputs, min_out, max_out) if clip else outputs\n        # Scale to [0, 1].\n        zero_one = (outputs - min_out) / (max_out - min_out)\n        # Scale to [self.min_value, self.max_value].\n        return zero_one * (self.max_value - self.min_value) + self.min_value\n\n    def encode(self, input_tokens, continuous_inputs, continuous_mask):\n        tokens_mask = input_tokens > 0\n        tokens_encoded, tokens_mask = self.notes_encoder(\n            encoder_input_tokens=input_tokens, encoder_inputs_mask=tokens_mask\n        )\n\n        continuous_encoded, continuous_mask = self.continuous_encoder(\n            encoder_inputs=continuous_inputs, encoder_inputs_mask=continuous_mask\n        )\n\n        return [(tokens_encoded, tokens_mask), (continuous_encoded, continuous_mask)]\n\n    def decode(self, encodings_and_masks, input_tokens, noise_time):\n        timesteps = noise_time\n        if not torch.is_tensor(timesteps):\n            timesteps = torch.tensor([timesteps], dtype=torch.long, device=input_tokens.device)\n        elif torch.is_tensor(timesteps) and len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(input_tokens.device)\n\n        # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n        timesteps = timesteps * torch.ones(input_tokens.shape[0], dtype=timesteps.dtype, device=timesteps.device)\n\n        logits = self.decoder(\n            encodings_and_masks=encodings_and_masks, decoder_input_tokens=input_tokens, decoder_noise_time=timesteps\n        )\n        return logits\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        input_tokens: List[List[int]],\n        generator: Optional[torch.Generator] = None,\n        num_inference_steps: int = 100,\n        return_dict: bool = True,\n        output_type: str = \"numpy\",\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ) -> Union[AudioPipelineOutput, Tuple]:\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        pred_mel = np.zeros([1, TARGET_FEATURE_LENGTH, self.n_dims], dtype=np.float32)\n        full_pred_mel = np.zeros([1, 0, self.n_dims], np.float32)\n        ones = torch.ones((1, TARGET_FEATURE_LENGTH), dtype=bool, device=self.device)\n\n        for i, encoder_input_tokens in enumerate(input_tokens):\n            if i == 0:\n                encoder_continuous_inputs = torch.from_numpy(pred_mel[:1].copy()).to(\n                    device=self.device, dtype=self.decoder.dtype\n                )\n                # The first chunk has no previous context.\n                encoder_continuous_mask = torch.zeros((1, TARGET_FEATURE_LENGTH), dtype=bool, device=self.device)\n            else:\n                # The full song pipeline does not feed in a context feature, so the mask\n                # will be all 0s after the feature converter. Because we know we're\n                # feeding in a full context chunk from the previous prediction, set it\n                # to all 1s.\n                encoder_continuous_mask = ones\n\n            encoder_continuous_inputs = self.scale_features(\n                encoder_continuous_inputs, output_range=[-1.0, 1.0], clip=True\n            )\n\n            encodings_and_masks = self.encode(\n                input_tokens=torch.IntTensor([encoder_input_tokens]).to(device=self.device),\n                continuous_inputs=encoder_continuous_inputs,\n                continuous_mask=encoder_continuous_mask,\n            )\n\n            # Sample encoder_continuous_inputs shaped gaussian noise to begin loop\n            x = randn_tensor(\n                shape=encoder_continuous_inputs.shape,\n                generator=generator,\n                device=self.device,\n                dtype=self.decoder.dtype,\n            )\n\n            # set step values\n            self.scheduler.set_timesteps(num_inference_steps)\n\n            # Denoising diffusion loop\n            for j, t in enumerate(self.progress_bar(self.scheduler.timesteps)):\n                output = self.decode(\n                    encodings_and_masks=encodings_and_masks,\n                    input_tokens=x,\n                    noise_time=t / self.scheduler.config.num_train_timesteps,  # rescale to [0, 1)\n                )\n\n                # Compute previous output: x_t -> x_t-1\n                x = self.scheduler.step(output, t, x, generator=generator).prev_sample\n\n            mel = self.scale_to_features(x, input_range=[-1.0, 1.0])\n            encoder_continuous_inputs = mel[:1]\n            pred_mel = mel.cpu().float().numpy()\n\n            full_pred_mel = np.concatenate([full_pred_mel, pred_mel[:1]], axis=1)\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, full_pred_mel)\n\n            logger.info(\"Generated segment\", i)\n\n        if output_type == \"numpy\" and not is_onnx_available():\n            raise ValueError(\n                \"Cannot return output in 'np' format if ONNX is not available. Make sure to have ONNX installed or set 'output_type' to 'mel'.\"\n            )\n        elif output_type == \"numpy\" and self.melgan is None:\n            raise ValueError(\n                \"Cannot return output in 'np' format if melgan component is not defined. Make sure to define `self.melgan` or set 'output_type' to 'mel'.\"\n            )\n\n        if output_type == \"numpy\":\n            output = self.melgan(input_features=full_pred_mel.astype(np.float32))\n        else:\n            output = full_pred_mel\n\n        if not return_dict:\n            return (output,)\n\n        return AudioPipelineOutput(audios=output)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/README.md",
    "content": "# Stable Diffusion\n\n## Overview\n\nStable Diffusion was proposed in [Stable Diffusion Announcement](https://stability.ai/blog/stable-diffusion-announcement) by Patrick Esser and Robin Rombach and the Stability AI team.\n\nThe summary of the model is the following:\n\n*Stable Diffusion is a text-to-image model that will empower billions of people to create stunning art within seconds. It is a breakthrough in speed and quality meaning that it can run on consumer GPUs. You can see some of the amazing output that has been created by this model without pre or post-processing on this page. The model itself builds upon the work of the team at CompVis and Runway in their widely used latent diffusion model combined with insights from the conditional diffusion models by our lead generative AI developer Katherine Crowson, Dall-E 2 by Open AI, Imagen by Google Brain and many others. We are delighted that AI media generation is a cooperative field and hope it can continue this way to bring the gift of creativity to all.* \n\n## Tips:\n\n- Stable Diffusion has the same architecture as [Latent Diffusion](https://arxiv.org/abs/2112.10752) but uses a frozen CLIP Text Encoder instead of training the text encoder jointly with the diffusion model.\n- An in-detail explanation of the Stable Diffusion model can be found under [Stable Diffusion with 🧨 Diffusers](https://huggingface.co/blog/stable_diffusion).\n- If you don't want to rely on the Hugging Face Hub and having to pass a authentication token, you can \ndownload the weights with `git lfs install; git clone https://huggingface.co/runwayml/stable-diffusion-v1-5` and instead pass the local path to the cloned folder to `from_pretrained` as shown below.\n- Stable Diffusion can work with a variety of different samplers as is shown below.\n\n## Available Pipelines:\n\n| Pipeline | Tasks | Colab\n|---|---|:---:|\n| [pipeline_stable_diffusion.py](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py) | *Text-to-Image Generation* | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/training_example.ipynb)\n| [pipeline_stable_diffusion_img2img](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py) | *Image-to-Image Text-Guided Generation* | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/image_2_image_using_diffusers.ipynb)\n| [pipeline_stable_diffusion_inpaint](https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint.py) | *Text-Guided Image Inpainting* | [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/in_painting_with_stable_diffusion_using_diffusers.ipynb)\n\n## Examples:\n\n### Using Stable Diffusion without being logged into the Hub.\n\nIf you want to download the model weights using a single Python line, you need to be logged in via `huggingface-cli login`. \n\n```python\nfrom diffusers import DiffusionPipeline\n\npipeline = DiffusionPipeline.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\n```\n\nThis however can make it difficult to build applications on top of `diffusers` as you will always have to pass the token around. A potential way to solve this issue is by downloading the weights to a local path `\"./stable-diffusion-v1-5\"`:\n\n```\ngit lfs install\ngit clone https://huggingface.co/runwayml/stable-diffusion-v1-5\n```\n\nand simply passing the local path to `from_pretrained`:\n\n```python\nfrom diffusers import StableDiffusionPipeline\n\npipe = StableDiffusionPipeline.from_pretrained(\"./stable-diffusion-v1-5\")\n```\n\n### Text-to-Image with default PLMS scheduler\n\n```python\n# make sure you're logged in with `huggingface-cli login`\nfrom diffusers import StableDiffusionPipeline\n\npipe = StableDiffusionPipeline.from_pretrained(\"runwayml/stable-diffusion-v1-5\")\npipe = pipe.to(\"cuda\")\n\nprompt = \"a photo of an astronaut riding a horse on mars\"\nimage = pipe(prompt).images[0]  \n    \nimage.save(\"astronaut_rides_horse.png\")\n```\n\n### Text-to-Image with DDIM scheduler\n\n```python\n# make sure you're logged in with `huggingface-cli login`\nfrom diffusers import StableDiffusionPipeline, DDIMScheduler\n\nscheduler =  DDIMScheduler.from_pretrained(\"CompVis/stable-diffusion-v1-4\", subfolder=\"scheduler\")\n\npipe = StableDiffusionPipeline.from_pretrained(\n    \"runwayml/stable-diffusion-v1-5\", \n    scheduler=scheduler,\n).to(\"cuda\")\n\nprompt = \"a photo of an astronaut riding a horse on mars\"\nimage = pipe(prompt).images[0]  \n    \nimage.save(\"astronaut_rides_horse.png\")\n```\n\n### Text-to-Image with K-LMS scheduler\n\n```python\n# make sure you're logged in with `huggingface-cli login`\nfrom diffusers import StableDiffusionPipeline, LMSDiscreteScheduler\n\nlms = LMSDiscreteScheduler.from_pretrained(\"CompVis/stable-diffusion-v1-4\", subfolder=\"scheduler\")\n\npipe = StableDiffusionPipeline.from_pretrained(\n    \"runwayml/stable-diffusion-v1-5\", \n    scheduler=lms,\n).to(\"cuda\")\n\nprompt = \"a photo of an astronaut riding a horse on mars\"\nimage = pipe(prompt).images[0]  \n    \nimage.save(\"astronaut_rides_horse.png\")\n```\n\n### CycleDiffusion using Stable Diffusion and DDIM scheduler\n\n```python\nimport requests\nimport torch\nfrom PIL import Image\nfrom io import BytesIO\n\nfrom diffusers import CycleDiffusionPipeline, DDIMScheduler\n\n\n# load the scheduler. CycleDiffusion only supports stochastic schedulers.\n\n# load the pipeline\n# make sure you're logged in with `huggingface-cli login`\nmodel_id_or_path = \"CompVis/stable-diffusion-v1-4\"\nscheduler = DDIMScheduler.from_pretrained(model_id_or_path, subfolder=\"scheduler\")\npipe = CycleDiffusionPipeline.from_pretrained(model_id_or_path, scheduler=scheduler).to(\"cuda\")\n\n# let's download an initial image\nurl = \"https://raw.githubusercontent.com/ChenWu98/cycle-diffusion/main/data/dalle2/An%20astronaut%20riding%20a%20horse.png\"\nresponse = requests.get(url)\ninit_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\ninit_image = init_image.resize((512, 512))\ninit_image.save(\"horse.png\")\n\n# let's specify a prompt\nsource_prompt = \"An astronaut riding a horse\"\nprompt = \"An astronaut riding an elephant\"\n\n# call the pipeline\nimage = pipe(\n    prompt=prompt,\n    source_prompt=source_prompt,\n    image=init_image,\n    num_inference_steps=100,\n    eta=0.1,\n    strength=0.8,\n    guidance_scale=2,\n    source_guidance_scale=1,\n).images[0]\n\nimage.save(\"horse_to_elephant.png\")\n\n# let's try another example\n# See more samples at the original repo: https://github.com/ChenWu98/cycle-diffusion\nurl = \"https://raw.githubusercontent.com/ChenWu98/cycle-diffusion/main/data/dalle2/A%20black%20colored%20car.png\"\nresponse = requests.get(url)\ninit_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\ninit_image = init_image.resize((512, 512))\ninit_image.save(\"black.png\")\n\nsource_prompt = \"A black colored car\"\nprompt = \"A blue colored car\"\n\n# call the pipeline\ntorch.manual_seed(0)\nimage = pipe(\n    prompt=prompt,\n    source_prompt=source_prompt,\n    image=init_image,\n    num_inference_steps=100,\n    eta=0.1,\n    strength=0.85,\n    guidance_scale=3,\n    source_guidance_scale=1,\n).images[0]\n\nimage.save(\"black_to_blue.png\")\n```\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/__init__.py",
    "content": "from dataclasses import dataclass\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL\nfrom PIL import Image\n\nfrom ...utils import (\n    BaseOutput,\n    OptionalDependencyNotAvailable,\n    is_flax_available,\n    is_k_diffusion_available,\n    is_k_diffusion_version,\n    is_onnx_available,\n    is_torch_available,\n    is_transformers_available,\n    is_transformers_version,\n)\n\n\n@dataclass\nclass StableDiffusionPipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for Stable Diffusion pipelines.\n\n    Args:\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n        nsfw_content_detected (`List[bool]`)\n            List of flags denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, or `None` if safety checking could not be performed.\n    \"\"\"\n\n    images: Union[List[PIL.Image.Image], np.ndarray]\n    nsfw_content_detected: Optional[List[bool]]\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import *  # noqa F403\nelse:\n    from .pipeline_cycle_diffusion import CycleDiffusionPipeline\n    from .pipeline_stable_diffusion import StableDiffusionPipeline\n    from .pipeline_stable_diffusion_attend_and_excite import StableDiffusionAttendAndExcitePipeline\n    from .pipeline_stable_diffusion_img2img import StableDiffusionImg2ImgPipeline\n    from .pipeline_stable_diffusion_inpaint import StableDiffusionInpaintPipeline\n    from .pipeline_stable_diffusion_inpaint_legacy import StableDiffusionInpaintPipelineLegacy\n    from .pipeline_stable_diffusion_instruct_pix2pix import StableDiffusionInstructPix2PixPipeline\n    from .pipeline_stable_diffusion_latent_upscale import StableDiffusionLatentUpscalePipeline\n    from .pipeline_stable_diffusion_model_editing import StableDiffusionModelEditingPipeline\n    from .pipeline_stable_diffusion_panorama import StableDiffusionPanoramaPipeline\n    from .pipeline_stable_diffusion_sag import StableDiffusionSAGPipeline\n    from .pipeline_stable_diffusion_upscale import StableDiffusionUpscalePipeline\n    from .pipeline_stable_unclip import StableUnCLIPPipeline\n    from .pipeline_stable_unclip_img2img import StableUnCLIPImg2ImgPipeline\n    from .safety_checker import StableDiffusionSafetyChecker\n    from .stable_unclip_image_normalizer import StableUnCLIPImageNormalizer\n\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_transformers_version(\">=\", \"4.25.0\")):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import StableDiffusionImageVariationPipeline\nelse:\n    from .pipeline_stable_diffusion_image_variation import StableDiffusionImageVariationPipeline\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_transformers_version(\">=\", \"4.26.0\")):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import (\n        StableDiffusionDepth2ImgPipeline,\n        StableDiffusionDiffEditPipeline,\n        StableDiffusionPix2PixZeroPipeline,\n    )\nelse:\n    from .pipeline_stable_diffusion_depth2img import StableDiffusionDepth2ImgPipeline\n    from .pipeline_stable_diffusion_diffedit import StableDiffusionDiffEditPipeline\n    from .pipeline_stable_diffusion_pix2pix_zero import StableDiffusionPix2PixZeroPipeline\n\n\ntry:\n    if not (\n        is_torch_available()\n        and is_transformers_available()\n        and is_k_diffusion_available()\n        and is_k_diffusion_version(\">=\", \"0.0.12\")\n    ):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_and_k_diffusion_objects import *  # noqa F403\nelse:\n    from .pipeline_stable_diffusion_k_diffusion import StableDiffusionKDiffusionPipeline\n\ntry:\n    if not (is_transformers_available() and is_onnx_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_onnx_objects import *  # noqa F403\nelse:\n    from .pipeline_onnx_stable_diffusion import OnnxStableDiffusionPipeline, StableDiffusionOnnxPipeline\n    from .pipeline_onnx_stable_diffusion_img2img import OnnxStableDiffusionImg2ImgPipeline\n    from .pipeline_onnx_stable_diffusion_inpaint import OnnxStableDiffusionInpaintPipeline\n    from .pipeline_onnx_stable_diffusion_inpaint_legacy import OnnxStableDiffusionInpaintPipelineLegacy\n    from .pipeline_onnx_stable_diffusion_upscale import OnnxStableDiffusionUpscalePipeline\n\nif is_transformers_available() and is_flax_available():\n    import flax\n\n    @flax.struct.dataclass\n    class FlaxStableDiffusionPipelineOutput(BaseOutput):\n        \"\"\"\n        Output class for Stable Diffusion pipelines.\n\n        Args:\n            images (`np.ndarray`)\n                Array of shape `(batch_size, height, width, num_channels)` with images from the diffusion pipeline.\n            nsfw_content_detected (`List[bool]`)\n                List of flags denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n                (nsfw) content.\n        \"\"\"\n\n        images: np.ndarray\n        nsfw_content_detected: List[bool]\n\n    from ...schedulers.scheduling_pndm_flax import PNDMSchedulerState\n    from .pipeline_flax_stable_diffusion import FlaxStableDiffusionPipeline\n    from .pipeline_flax_stable_diffusion_img2img import FlaxStableDiffusionImg2ImgPipeline\n    from .pipeline_flax_stable_diffusion_inpaint import FlaxStableDiffusionInpaintPipeline\n    from .safety_checker_flax import FlaxStableDiffusionSafetyChecker\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/convert_from_ckpt.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\" Conversion script for the Stable Diffusion checkpoints.\"\"\"\n\nimport re\nfrom io import BytesIO\nfrom typing import Optional\n\nimport requests\nimport torch\nfrom transformers import (\n    AutoFeatureExtractor,\n    BertTokenizerFast,\n    CLIPImageProcessor,\n    CLIPTextModel,\n    CLIPTextModelWithProjection,\n    CLIPTokenizer,\n    CLIPVisionConfig,\n    CLIPVisionModelWithProjection,\n)\n\nfrom ...models import (\n    AutoencoderKL,\n    ControlNetModel,\n    PriorTransformer,\n    UNet2DConditionModel,\n)\nfrom ...schedulers import (\n    DDIMScheduler,\n    DDPMScheduler,\n    DPMSolverMultistepScheduler,\n    EulerAncestralDiscreteScheduler,\n    EulerDiscreteScheduler,\n    HeunDiscreteScheduler,\n    LMSDiscreteScheduler,\n    PNDMScheduler,\n    UnCLIPScheduler,\n)\nfrom ...utils import is_omegaconf_available, is_safetensors_available, logging\nfrom ...utils.import_utils import BACKENDS_MAPPING\nfrom ..latent_diffusion.pipeline_latent_diffusion import LDMBertConfig, LDMBertModel\nfrom ..paint_by_example import PaintByExampleImageEncoder\nfrom ..pipeline_utils import DiffusionPipeline\nfrom .safety_checker import StableDiffusionSafetyChecker\nfrom .stable_unclip_image_normalizer import StableUnCLIPImageNormalizer\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\ndef shave_segments(path, n_shave_prefix_segments=1):\n    \"\"\"\n    Removes segments. Positive values shave the first segments, negative shave the last segments.\n    \"\"\"\n    if n_shave_prefix_segments >= 0:\n        return \".\".join(path.split(\".\")[n_shave_prefix_segments:])\n    else:\n        return \".\".join(path.split(\".\")[:n_shave_prefix_segments])\n\n\ndef renew_resnet_paths(old_list, n_shave_prefix_segments=0):\n    \"\"\"\n    Updates paths inside resnets to the new naming scheme (local renaming)\n    \"\"\"\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item.replace(\"in_layers.0\", \"norm1\")\n        new_item = new_item.replace(\"in_layers.2\", \"conv1\")\n\n        new_item = new_item.replace(\"out_layers.0\", \"norm2\")\n        new_item = new_item.replace(\"out_layers.3\", \"conv2\")\n\n        new_item = new_item.replace(\"emb_layers.1\", \"time_emb_proj\")\n        new_item = new_item.replace(\"skip_connection\", \"conv_shortcut\")\n\n        new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef renew_vae_resnet_paths(old_list, n_shave_prefix_segments=0):\n    \"\"\"\n    Updates paths inside resnets to the new naming scheme (local renaming)\n    \"\"\"\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item\n\n        new_item = new_item.replace(\"nin_shortcut\", \"conv_shortcut\")\n        new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef renew_attention_paths(old_list, n_shave_prefix_segments=0):\n    \"\"\"\n    Updates paths inside attentions to the new naming scheme (local renaming)\n    \"\"\"\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item\n\n        #         new_item = new_item.replace('norm.weight', 'group_norm.weight')\n        #         new_item = new_item.replace('norm.bias', 'group_norm.bias')\n\n        #         new_item = new_item.replace('proj_out.weight', 'proj_attn.weight')\n        #         new_item = new_item.replace('proj_out.bias', 'proj_attn.bias')\n\n        #         new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef renew_vae_attention_paths(old_list, n_shave_prefix_segments=0):\n    \"\"\"\n    Updates paths inside attentions to the new naming scheme (local renaming)\n    \"\"\"\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item\n\n        new_item = new_item.replace(\"norm.weight\", \"group_norm.weight\")\n        new_item = new_item.replace(\"norm.bias\", \"group_norm.bias\")\n\n        new_item = new_item.replace(\"q.weight\", \"to_q.weight\")\n        new_item = new_item.replace(\"q.bias\", \"to_q.bias\")\n\n        new_item = new_item.replace(\"k.weight\", \"to_k.weight\")\n        new_item = new_item.replace(\"k.bias\", \"to_k.bias\")\n\n        new_item = new_item.replace(\"v.weight\", \"to_v.weight\")\n        new_item = new_item.replace(\"v.bias\", \"to_v.bias\")\n\n        new_item = new_item.replace(\"proj_out.weight\", \"to_out.0.weight\")\n        new_item = new_item.replace(\"proj_out.bias\", \"to_out.0.bias\")\n\n        new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef assign_to_checkpoint(\n    paths, checkpoint, old_checkpoint, attention_paths_to_split=None, additional_replacements=None, config=None\n):\n    \"\"\"\n    This does the final conversion step: take locally converted weights and apply a global renaming to them. It splits\n    attention layers, and takes into account additional replacements that may arise.\n\n    Assigns the weights to the new checkpoint.\n    \"\"\"\n    assert isinstance(paths, list), \"Paths should be a list of dicts containing 'old' and 'new' keys.\"\n\n    # Splits the attention layers into three variables.\n    if attention_paths_to_split is not None:\n        for path, path_map in attention_paths_to_split.items():\n            old_tensor = old_checkpoint[path]\n            channels = old_tensor.shape[0] // 3\n\n            target_shape = (-1, channels) if len(old_tensor.shape) == 3 else (-1)\n\n            num_heads = old_tensor.shape[0] // config[\"num_head_channels\"] // 3\n\n            old_tensor = old_tensor.reshape((num_heads, 3 * channels // num_heads) + old_tensor.shape[1:])\n            query, key, value = old_tensor.split(channels // num_heads, dim=1)\n\n            checkpoint[path_map[\"query\"]] = query.reshape(target_shape)\n            checkpoint[path_map[\"key\"]] = key.reshape(target_shape)\n            checkpoint[path_map[\"value\"]] = value.reshape(target_shape)\n\n    for path in paths:\n        new_path = path[\"new\"]\n\n        # These have already been assigned\n        if attention_paths_to_split is not None and new_path in attention_paths_to_split:\n            continue\n\n        # Global renaming happens here\n        new_path = new_path.replace(\"middle_block.0\", \"mid_block.resnets.0\")\n        new_path = new_path.replace(\"middle_block.1\", \"mid_block.attentions.0\")\n        new_path = new_path.replace(\"middle_block.2\", \"mid_block.resnets.1\")\n\n        if additional_replacements is not None:\n            for replacement in additional_replacements:\n                new_path = new_path.replace(replacement[\"old\"], replacement[\"new\"])\n\n        # proj_attn.weight has to be converted from conv 1D to linear\n        is_attn_weight = \"proj_attn.weight\" in new_path or (\"attentions\" in new_path and \"to_\" in new_path)\n        shape = old_checkpoint[path[\"old\"]].shape\n        if is_attn_weight and len(shape) == 3:\n            checkpoint[new_path] = old_checkpoint[path[\"old\"]][:, :, 0]\n        elif is_attn_weight and len(shape) == 4:\n            checkpoint[new_path] = old_checkpoint[path[\"old\"]][:, :, 0, 0]\n        else:\n            checkpoint[new_path] = old_checkpoint[path[\"old\"]]\n\n\ndef conv_attn_to_linear(checkpoint):\n    keys = list(checkpoint.keys())\n    attn_keys = [\"query.weight\", \"key.weight\", \"value.weight\"]\n    for key in keys:\n        if \".\".join(key.split(\".\")[-2:]) in attn_keys:\n            if checkpoint[key].ndim > 2:\n                checkpoint[key] = checkpoint[key][:, :, 0, 0]\n        elif \"proj_attn.weight\" in key:\n            if checkpoint[key].ndim > 2:\n                checkpoint[key] = checkpoint[key][:, :, 0]\n\n\ndef create_unet_diffusers_config(original_config, image_size: int, controlnet=False):\n    \"\"\"\n    Creates a config for the diffusers based on the config of the LDM model.\n    \"\"\"\n    if controlnet:\n        unet_params = original_config.model.params.control_stage_config.params\n    else:\n        unet_params = original_config.model.params.unet_config.params\n\n    vae_params = original_config.model.params.first_stage_config.params.ddconfig\n\n    block_out_channels = [unet_params.model_channels * mult for mult in unet_params.channel_mult]\n\n    down_block_types = []\n    resolution = 1\n    for i in range(len(block_out_channels)):\n        block_type = \"CrossAttnDownBlock2D\" if resolution in unet_params.attention_resolutions else \"DownBlock2D\"\n        down_block_types.append(block_type)\n        if i != len(block_out_channels) - 1:\n            resolution *= 2\n\n    up_block_types = []\n    for i in range(len(block_out_channels)):\n        block_type = \"CrossAttnUpBlock2D\" if resolution in unet_params.attention_resolutions else \"UpBlock2D\"\n        up_block_types.append(block_type)\n        resolution //= 2\n\n    vae_scale_factor = 2 ** (len(vae_params.ch_mult) - 1)\n\n    head_dim = unet_params.num_heads if \"num_heads\" in unet_params else None\n    use_linear_projection = (\n        unet_params.use_linear_in_transformer if \"use_linear_in_transformer\" in unet_params else False\n    )\n    if use_linear_projection:\n        # stable diffusion 2-base-512 and 2-768\n        if head_dim is None:\n            head_dim = [5, 10, 20, 20]\n\n    class_embed_type = None\n    projection_class_embeddings_input_dim = None\n\n    if \"num_classes\" in unet_params:\n        if unet_params.num_classes == \"sequential\":\n            class_embed_type = \"projection\"\n            assert \"adm_in_channels\" in unet_params\n            projection_class_embeddings_input_dim = unet_params.adm_in_channels\n        else:\n            raise NotImplementedError(f\"Unknown conditional unet num_classes config: {unet_params.num_classes}\")\n\n    config = {\n        \"sample_size\": image_size // vae_scale_factor,\n        \"in_channels\": unet_params.in_channels,\n        \"down_block_types\": tuple(down_block_types),\n        \"block_out_channels\": tuple(block_out_channels),\n        \"layers_per_block\": unet_params.num_res_blocks,\n        \"cross_attention_dim\": unet_params.context_dim,\n        \"attention_head_dim\": head_dim,\n        \"use_linear_projection\": use_linear_projection,\n        \"class_embed_type\": class_embed_type,\n        \"projection_class_embeddings_input_dim\": projection_class_embeddings_input_dim,\n    }\n\n    if not controlnet:\n        config[\"out_channels\"] = unet_params.out_channels\n        config[\"up_block_types\"] = tuple(up_block_types)\n\n    return config\n\n\ndef create_vae_diffusers_config(original_config, image_size: int):\n    \"\"\"\n    Creates a config for the diffusers based on the config of the LDM model.\n    \"\"\"\n    vae_params = original_config.model.params.first_stage_config.params.ddconfig\n    _ = original_config.model.params.first_stage_config.params.embed_dim\n\n    block_out_channels = [vae_params.ch * mult for mult in vae_params.ch_mult]\n    down_block_types = [\"DownEncoderBlock2D\"] * len(block_out_channels)\n    up_block_types = [\"UpDecoderBlock2D\"] * len(block_out_channels)\n\n    config = {\n        \"sample_size\": image_size,\n        \"in_channels\": vae_params.in_channels,\n        \"out_channels\": vae_params.out_ch,\n        \"down_block_types\": tuple(down_block_types),\n        \"up_block_types\": tuple(up_block_types),\n        \"block_out_channels\": tuple(block_out_channels),\n        \"latent_channels\": vae_params.z_channels,\n        \"layers_per_block\": vae_params.num_res_blocks,\n    }\n    return config\n\n\ndef create_diffusers_schedular(original_config):\n    schedular = DDIMScheduler(\n        num_train_timesteps=original_config.model.params.timesteps,\n        beta_start=original_config.model.params.linear_start,\n        beta_end=original_config.model.params.linear_end,\n        beta_schedule=\"scaled_linear\",\n    )\n    return schedular\n\n\ndef create_ldm_bert_config(original_config):\n    bert_params = original_config.model.parms.cond_stage_config.params\n    config = LDMBertConfig(\n        d_model=bert_params.n_embed,\n        encoder_layers=bert_params.n_layer,\n        encoder_ffn_dim=bert_params.n_embed * 4,\n    )\n    return config\n\n\ndef convert_ldm_unet_checkpoint(checkpoint, config, path=None, extract_ema=False, controlnet=False):\n    \"\"\"\n    Takes a state dict and a config, and returns a converted checkpoint.\n    \"\"\"\n\n    # extract state_dict for UNet\n    unet_state_dict = {}\n    keys = list(checkpoint.keys())\n\n    if controlnet:\n        unet_key = \"control_model.\"\n    else:\n        unet_key = \"model.diffusion_model.\"\n\n    # at least a 100 parameters have to start with `model_ema` in order for the checkpoint to be EMA\n    if sum(k.startswith(\"model_ema\") for k in keys) > 100 and extract_ema:\n        print(f\"Checkpoint {path} has both EMA and non-EMA weights.\")\n        print(\n            \"In this conversion only the EMA weights are extracted. If you want to instead extract the non-EMA\"\n            \" weights (useful to continue fine-tuning), please make sure to remove the `--extract_ema` flag.\"\n        )\n        for key in keys:\n            if key.startswith(\"model.diffusion_model\"):\n                flat_ema_key = \"model_ema.\" + \"\".join(key.split(\".\")[1:])\n                unet_state_dict[key.replace(unet_key, \"\")] = checkpoint.pop(flat_ema_key)\n    else:\n        if sum(k.startswith(\"model_ema\") for k in keys) > 100:\n            print(\n                \"In this conversion only the non-EMA weights are extracted. If you want to instead extract the EMA\"\n                \" weights (usually better for inference), please make sure to add the `--extract_ema` flag.\"\n            )\n\n        for key in keys:\n            if key.startswith(unet_key):\n                unet_state_dict[key.replace(unet_key, \"\")] = checkpoint.pop(key)\n\n    new_checkpoint = {}\n\n    new_checkpoint[\"time_embedding.linear_1.weight\"] = unet_state_dict[\"time_embed.0.weight\"]\n    new_checkpoint[\"time_embedding.linear_1.bias\"] = unet_state_dict[\"time_embed.0.bias\"]\n    new_checkpoint[\"time_embedding.linear_2.weight\"] = unet_state_dict[\"time_embed.2.weight\"]\n    new_checkpoint[\"time_embedding.linear_2.bias\"] = unet_state_dict[\"time_embed.2.bias\"]\n\n    if config[\"class_embed_type\"] is None:\n        # No parameters to port\n        ...\n    elif config[\"class_embed_type\"] == \"timestep\" or config[\"class_embed_type\"] == \"projection\":\n        new_checkpoint[\"class_embedding.linear_1.weight\"] = unet_state_dict[\"label_emb.0.0.weight\"]\n        new_checkpoint[\"class_embedding.linear_1.bias\"] = unet_state_dict[\"label_emb.0.0.bias\"]\n        new_checkpoint[\"class_embedding.linear_2.weight\"] = unet_state_dict[\"label_emb.0.2.weight\"]\n        new_checkpoint[\"class_embedding.linear_2.bias\"] = unet_state_dict[\"label_emb.0.2.bias\"]\n    else:\n        raise NotImplementedError(f\"Not implemented `class_embed_type`: {config['class_embed_type']}\")\n\n    new_checkpoint[\"conv_in.weight\"] = unet_state_dict[\"input_blocks.0.0.weight\"]\n    new_checkpoint[\"conv_in.bias\"] = unet_state_dict[\"input_blocks.0.0.bias\"]\n\n    if not controlnet:\n        new_checkpoint[\"conv_norm_out.weight\"] = unet_state_dict[\"out.0.weight\"]\n        new_checkpoint[\"conv_norm_out.bias\"] = unet_state_dict[\"out.0.bias\"]\n        new_checkpoint[\"conv_out.weight\"] = unet_state_dict[\"out.2.weight\"]\n        new_checkpoint[\"conv_out.bias\"] = unet_state_dict[\"out.2.bias\"]\n\n    # Retrieves the keys for the input blocks only\n    num_input_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in unet_state_dict if \"input_blocks\" in layer})\n    input_blocks = {\n        layer_id: [key for key in unet_state_dict if f\"input_blocks.{layer_id}\" in key]\n        for layer_id in range(num_input_blocks)\n    }\n\n    # Retrieves the keys for the middle blocks only\n    num_middle_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in unet_state_dict if \"middle_block\" in layer})\n    middle_blocks = {\n        layer_id: [key for key in unet_state_dict if f\"middle_block.{layer_id}\" in key]\n        for layer_id in range(num_middle_blocks)\n    }\n\n    # Retrieves the keys for the output blocks only\n    num_output_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in unet_state_dict if \"output_blocks\" in layer})\n    output_blocks = {\n        layer_id: [key for key in unet_state_dict if f\"output_blocks.{layer_id}\" in key]\n        for layer_id in range(num_output_blocks)\n    }\n\n    for i in range(1, num_input_blocks):\n        block_id = (i - 1) // (config[\"layers_per_block\"] + 1)\n        layer_in_block_id = (i - 1) % (config[\"layers_per_block\"] + 1)\n\n        resnets = [\n            key for key in input_blocks[i] if f\"input_blocks.{i}.0\" in key and f\"input_blocks.{i}.0.op\" not in key\n        ]\n        attentions = [key for key in input_blocks[i] if f\"input_blocks.{i}.1\" in key]\n\n        if f\"input_blocks.{i}.0.op.weight\" in unet_state_dict:\n            new_checkpoint[f\"down_blocks.{block_id}.downsamplers.0.conv.weight\"] = unet_state_dict.pop(\n                f\"input_blocks.{i}.0.op.weight\"\n            )\n            new_checkpoint[f\"down_blocks.{block_id}.downsamplers.0.conv.bias\"] = unet_state_dict.pop(\n                f\"input_blocks.{i}.0.op.bias\"\n            )\n\n        paths = renew_resnet_paths(resnets)\n        meta_path = {\"old\": f\"input_blocks.{i}.0\", \"new\": f\"down_blocks.{block_id}.resnets.{layer_in_block_id}\"}\n        assign_to_checkpoint(\n            paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config\n        )\n\n        if len(attentions):\n            paths = renew_attention_paths(attentions)\n            meta_path = {\"old\": f\"input_blocks.{i}.1\", \"new\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}\"}\n            assign_to_checkpoint(\n                paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config\n            )\n\n    resnet_0 = middle_blocks[0]\n    attentions = middle_blocks[1]\n    resnet_1 = middle_blocks[2]\n\n    resnet_0_paths = renew_resnet_paths(resnet_0)\n    assign_to_checkpoint(resnet_0_paths, new_checkpoint, unet_state_dict, config=config)\n\n    resnet_1_paths = renew_resnet_paths(resnet_1)\n    assign_to_checkpoint(resnet_1_paths, new_checkpoint, unet_state_dict, config=config)\n\n    attentions_paths = renew_attention_paths(attentions)\n    meta_path = {\"old\": \"middle_block.1\", \"new\": \"mid_block.attentions.0\"}\n    assign_to_checkpoint(\n        attentions_paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config\n    )\n\n    for i in range(num_output_blocks):\n        block_id = i // (config[\"layers_per_block\"] + 1)\n        layer_in_block_id = i % (config[\"layers_per_block\"] + 1)\n        output_block_layers = [shave_segments(name, 2) for name in output_blocks[i]]\n        output_block_list = {}\n\n        for layer in output_block_layers:\n            layer_id, layer_name = layer.split(\".\")[0], shave_segments(layer, 1)\n            if layer_id in output_block_list:\n                output_block_list[layer_id].append(layer_name)\n            else:\n                output_block_list[layer_id] = [layer_name]\n\n        if len(output_block_list) > 1:\n            resnets = [key for key in output_blocks[i] if f\"output_blocks.{i}.0\" in key]\n            attentions = [key for key in output_blocks[i] if f\"output_blocks.{i}.1\" in key]\n\n            resnet_0_paths = renew_resnet_paths(resnets)\n            paths = renew_resnet_paths(resnets)\n\n            meta_path = {\"old\": f\"output_blocks.{i}.0\", \"new\": f\"up_blocks.{block_id}.resnets.{layer_in_block_id}\"}\n            assign_to_checkpoint(\n                paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config\n            )\n\n            output_block_list = {k: sorted(v) for k, v in output_block_list.items()}\n            if [\"conv.bias\", \"conv.weight\"] in output_block_list.values():\n                index = list(output_block_list.values()).index([\"conv.bias\", \"conv.weight\"])\n                new_checkpoint[f\"up_blocks.{block_id}.upsamplers.0.conv.weight\"] = unet_state_dict[\n                    f\"output_blocks.{i}.{index}.conv.weight\"\n                ]\n                new_checkpoint[f\"up_blocks.{block_id}.upsamplers.0.conv.bias\"] = unet_state_dict[\n                    f\"output_blocks.{i}.{index}.conv.bias\"\n                ]\n\n                # Clear attentions as they have been attributed above.\n                if len(attentions) == 2:\n                    attentions = []\n\n            if len(attentions):\n                paths = renew_attention_paths(attentions)\n                meta_path = {\n                    \"old\": f\"output_blocks.{i}.1\",\n                    \"new\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}\",\n                }\n                assign_to_checkpoint(\n                    paths, new_checkpoint, unet_state_dict, additional_replacements=[meta_path], config=config\n                )\n        else:\n            resnet_0_paths = renew_resnet_paths(output_block_layers, n_shave_prefix_segments=1)\n            for path in resnet_0_paths:\n                old_path = \".\".join([\"output_blocks\", str(i), path[\"old\"]])\n                new_path = \".\".join([\"up_blocks\", str(block_id), \"resnets\", str(layer_in_block_id), path[\"new\"]])\n\n                new_checkpoint[new_path] = unet_state_dict[old_path]\n\n    if controlnet:\n        # conditioning embedding\n\n        orig_index = 0\n\n        new_checkpoint[\"controlnet_cond_embedding.conv_in.weight\"] = unet_state_dict.pop(\n            f\"input_hint_block.{orig_index}.weight\"\n        )\n        new_checkpoint[\"controlnet_cond_embedding.conv_in.bias\"] = unet_state_dict.pop(\n            f\"input_hint_block.{orig_index}.bias\"\n        )\n\n        orig_index += 2\n\n        diffusers_index = 0\n\n        while diffusers_index < 6:\n            new_checkpoint[f\"controlnet_cond_embedding.blocks.{diffusers_index}.weight\"] = unet_state_dict.pop(\n                f\"input_hint_block.{orig_index}.weight\"\n            )\n            new_checkpoint[f\"controlnet_cond_embedding.blocks.{diffusers_index}.bias\"] = unet_state_dict.pop(\n                f\"input_hint_block.{orig_index}.bias\"\n            )\n            diffusers_index += 1\n            orig_index += 2\n\n        new_checkpoint[\"controlnet_cond_embedding.conv_out.weight\"] = unet_state_dict.pop(\n            f\"input_hint_block.{orig_index}.weight\"\n        )\n        new_checkpoint[\"controlnet_cond_embedding.conv_out.bias\"] = unet_state_dict.pop(\n            f\"input_hint_block.{orig_index}.bias\"\n        )\n\n        # down blocks\n        for i in range(num_input_blocks):\n            new_checkpoint[f\"controlnet_down_blocks.{i}.weight\"] = unet_state_dict.pop(f\"zero_convs.{i}.0.weight\")\n            new_checkpoint[f\"controlnet_down_blocks.{i}.bias\"] = unet_state_dict.pop(f\"zero_convs.{i}.0.bias\")\n\n        # mid block\n        new_checkpoint[\"controlnet_mid_block.weight\"] = unet_state_dict.pop(\"middle_block_out.0.weight\")\n        new_checkpoint[\"controlnet_mid_block.bias\"] = unet_state_dict.pop(\"middle_block_out.0.bias\")\n\n    return new_checkpoint\n\n\ndef convert_ldm_vae_checkpoint(checkpoint, config):\n    # extract state dict for VAE\n    vae_state_dict = {}\n    vae_key = \"first_stage_model.\"\n    keys = list(checkpoint.keys())\n    for key in keys:\n        if key.startswith(vae_key):\n            vae_state_dict[key.replace(vae_key, \"\")] = checkpoint.get(key)\n\n    new_checkpoint = {}\n\n    new_checkpoint[\"encoder.conv_in.weight\"] = vae_state_dict[\"encoder.conv_in.weight\"]\n    new_checkpoint[\"encoder.conv_in.bias\"] = vae_state_dict[\"encoder.conv_in.bias\"]\n    new_checkpoint[\"encoder.conv_out.weight\"] = vae_state_dict[\"encoder.conv_out.weight\"]\n    new_checkpoint[\"encoder.conv_out.bias\"] = vae_state_dict[\"encoder.conv_out.bias\"]\n    new_checkpoint[\"encoder.conv_norm_out.weight\"] = vae_state_dict[\"encoder.norm_out.weight\"]\n    new_checkpoint[\"encoder.conv_norm_out.bias\"] = vae_state_dict[\"encoder.norm_out.bias\"]\n\n    new_checkpoint[\"decoder.conv_in.weight\"] = vae_state_dict[\"decoder.conv_in.weight\"]\n    new_checkpoint[\"decoder.conv_in.bias\"] = vae_state_dict[\"decoder.conv_in.bias\"]\n    new_checkpoint[\"decoder.conv_out.weight\"] = vae_state_dict[\"decoder.conv_out.weight\"]\n    new_checkpoint[\"decoder.conv_out.bias\"] = vae_state_dict[\"decoder.conv_out.bias\"]\n    new_checkpoint[\"decoder.conv_norm_out.weight\"] = vae_state_dict[\"decoder.norm_out.weight\"]\n    new_checkpoint[\"decoder.conv_norm_out.bias\"] = vae_state_dict[\"decoder.norm_out.bias\"]\n\n    new_checkpoint[\"quant_conv.weight\"] = vae_state_dict[\"quant_conv.weight\"]\n    new_checkpoint[\"quant_conv.bias\"] = vae_state_dict[\"quant_conv.bias\"]\n    new_checkpoint[\"post_quant_conv.weight\"] = vae_state_dict[\"post_quant_conv.weight\"]\n    new_checkpoint[\"post_quant_conv.bias\"] = vae_state_dict[\"post_quant_conv.bias\"]\n\n    # Retrieves the keys for the encoder down blocks only\n    num_down_blocks = len({\".\".join(layer.split(\".\")[:3]) for layer in vae_state_dict if \"encoder.down\" in layer})\n    down_blocks = {\n        layer_id: [key for key in vae_state_dict if f\"down.{layer_id}\" in key] for layer_id in range(num_down_blocks)\n    }\n\n    # Retrieves the keys for the decoder up blocks only\n    num_up_blocks = len({\".\".join(layer.split(\".\")[:3]) for layer in vae_state_dict if \"decoder.up\" in layer})\n    up_blocks = {\n        layer_id: [key for key in vae_state_dict if f\"up.{layer_id}\" in key] for layer_id in range(num_up_blocks)\n    }\n\n    for i in range(num_down_blocks):\n        resnets = [key for key in down_blocks[i] if f\"down.{i}\" in key and f\"down.{i}.downsample\" not in key]\n\n        if f\"encoder.down.{i}.downsample.conv.weight\" in vae_state_dict:\n            new_checkpoint[f\"encoder.down_blocks.{i}.downsamplers.0.conv.weight\"] = vae_state_dict.pop(\n                f\"encoder.down.{i}.downsample.conv.weight\"\n            )\n            new_checkpoint[f\"encoder.down_blocks.{i}.downsamplers.0.conv.bias\"] = vae_state_dict.pop(\n                f\"encoder.down.{i}.downsample.conv.bias\"\n            )\n\n        paths = renew_vae_resnet_paths(resnets)\n        meta_path = {\"old\": f\"down.{i}.block\", \"new\": f\"down_blocks.{i}.resnets\"}\n        assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)\n\n    mid_resnets = [key for key in vae_state_dict if \"encoder.mid.block\" in key]\n    num_mid_res_blocks = 2\n    for i in range(1, num_mid_res_blocks + 1):\n        resnets = [key for key in mid_resnets if f\"encoder.mid.block_{i}\" in key]\n\n        paths = renew_vae_resnet_paths(resnets)\n        meta_path = {\"old\": f\"mid.block_{i}\", \"new\": f\"mid_block.resnets.{i - 1}\"}\n        assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)\n\n    mid_attentions = [key for key in vae_state_dict if \"encoder.mid.attn\" in key]\n    paths = renew_vae_attention_paths(mid_attentions)\n    meta_path = {\"old\": \"mid.attn_1\", \"new\": \"mid_block.attentions.0\"}\n    assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)\n    conv_attn_to_linear(new_checkpoint)\n\n    for i in range(num_up_blocks):\n        block_id = num_up_blocks - 1 - i\n        resnets = [\n            key for key in up_blocks[block_id] if f\"up.{block_id}\" in key and f\"up.{block_id}.upsample\" not in key\n        ]\n\n        if f\"decoder.up.{block_id}.upsample.conv.weight\" in vae_state_dict:\n            new_checkpoint[f\"decoder.up_blocks.{i}.upsamplers.0.conv.weight\"] = vae_state_dict[\n                f\"decoder.up.{block_id}.upsample.conv.weight\"\n            ]\n            new_checkpoint[f\"decoder.up_blocks.{i}.upsamplers.0.conv.bias\"] = vae_state_dict[\n                f\"decoder.up.{block_id}.upsample.conv.bias\"\n            ]\n\n        paths = renew_vae_resnet_paths(resnets)\n        meta_path = {\"old\": f\"up.{block_id}.block\", \"new\": f\"up_blocks.{i}.resnets\"}\n        assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)\n\n    mid_resnets = [key for key in vae_state_dict if \"decoder.mid.block\" in key]\n    num_mid_res_blocks = 2\n    for i in range(1, num_mid_res_blocks + 1):\n        resnets = [key for key in mid_resnets if f\"decoder.mid.block_{i}\" in key]\n\n        paths = renew_vae_resnet_paths(resnets)\n        meta_path = {\"old\": f\"mid.block_{i}\", \"new\": f\"mid_block.resnets.{i - 1}\"}\n        assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)\n\n    mid_attentions = [key for key in vae_state_dict if \"decoder.mid.attn\" in key]\n    paths = renew_vae_attention_paths(mid_attentions)\n    meta_path = {\"old\": \"mid.attn_1\", \"new\": \"mid_block.attentions.0\"}\n    assign_to_checkpoint(paths, new_checkpoint, vae_state_dict, additional_replacements=[meta_path], config=config)\n    conv_attn_to_linear(new_checkpoint)\n    return new_checkpoint\n\n\ndef convert_ldm_bert_checkpoint(checkpoint, config):\n    def _copy_attn_layer(hf_attn_layer, pt_attn_layer):\n        hf_attn_layer.q_proj.weight.data = pt_attn_layer.to_q.weight\n        hf_attn_layer.k_proj.weight.data = pt_attn_layer.to_k.weight\n        hf_attn_layer.v_proj.weight.data = pt_attn_layer.to_v.weight\n\n        hf_attn_layer.out_proj.weight = pt_attn_layer.to_out.weight\n        hf_attn_layer.out_proj.bias = pt_attn_layer.to_out.bias\n\n    def _copy_linear(hf_linear, pt_linear):\n        hf_linear.weight = pt_linear.weight\n        hf_linear.bias = pt_linear.bias\n\n    def _copy_layer(hf_layer, pt_layer):\n        # copy layer norms\n        _copy_linear(hf_layer.self_attn_layer_norm, pt_layer[0][0])\n        _copy_linear(hf_layer.final_layer_norm, pt_layer[1][0])\n\n        # copy attn\n        _copy_attn_layer(hf_layer.self_attn, pt_layer[0][1])\n\n        # copy MLP\n        pt_mlp = pt_layer[1][1]\n        _copy_linear(hf_layer.fc1, pt_mlp.net[0][0])\n        _copy_linear(hf_layer.fc2, pt_mlp.net[2])\n\n    def _copy_layers(hf_layers, pt_layers):\n        for i, hf_layer in enumerate(hf_layers):\n            if i != 0:\n                i += i\n            pt_layer = pt_layers[i : i + 2]\n            _copy_layer(hf_layer, pt_layer)\n\n    hf_model = LDMBertModel(config).eval()\n\n    # copy  embeds\n    hf_model.model.embed_tokens.weight = checkpoint.transformer.token_emb.weight\n    hf_model.model.embed_positions.weight.data = checkpoint.transformer.pos_emb.emb.weight\n\n    # copy layer norm\n    _copy_linear(hf_model.model.layer_norm, checkpoint.transformer.norm)\n\n    # copy hidden layers\n    _copy_layers(hf_model.model.layers, checkpoint.transformer.attn_layers.layers)\n\n    _copy_linear(hf_model.to_logits, checkpoint.transformer.to_logits)\n\n    return hf_model\n\n\ndef convert_ldm_clip_checkpoint(checkpoint):\n    text_model = CLIPTextModel.from_pretrained(\"openai/clip-vit-large-patch14\")\n\n    keys = list(checkpoint.keys())\n\n    text_model_dict = {}\n\n    for key in keys:\n        if key.startswith(\"cond_stage_model.transformer\"):\n            text_model_dict[key[len(\"cond_stage_model.transformer.\") :]] = checkpoint[key]\n\n    text_model.load_state_dict(text_model_dict)\n\n    return text_model\n\n\ntextenc_conversion_lst = [\n    (\"cond_stage_model.model.positional_embedding\", \"text_model.embeddings.position_embedding.weight\"),\n    (\"cond_stage_model.model.token_embedding.weight\", \"text_model.embeddings.token_embedding.weight\"),\n    (\"cond_stage_model.model.ln_final.weight\", \"text_model.final_layer_norm.weight\"),\n    (\"cond_stage_model.model.ln_final.bias\", \"text_model.final_layer_norm.bias\"),\n]\ntextenc_conversion_map = {x[0]: x[1] for x in textenc_conversion_lst}\n\ntextenc_transformer_conversion_lst = [\n    # (stable-diffusion, HF Diffusers)\n    (\"resblocks.\", \"text_model.encoder.layers.\"),\n    (\"ln_1\", \"layer_norm1\"),\n    (\"ln_2\", \"layer_norm2\"),\n    (\".c_fc.\", \".fc1.\"),\n    (\".c_proj.\", \".fc2.\"),\n    (\".attn\", \".self_attn\"),\n    (\"ln_final.\", \"transformer.text_model.final_layer_norm.\"),\n    (\"token_embedding.weight\", \"transformer.text_model.embeddings.token_embedding.weight\"),\n    (\"positional_embedding\", \"transformer.text_model.embeddings.position_embedding.weight\"),\n]\nprotected = {re.escape(x[0]): x[1] for x in textenc_transformer_conversion_lst}\ntextenc_pattern = re.compile(\"|\".join(protected.keys()))\n\n\ndef convert_paint_by_example_checkpoint(checkpoint):\n    config = CLIPVisionConfig.from_pretrained(\"openai/clip-vit-large-patch14\")\n    model = PaintByExampleImageEncoder(config)\n\n    keys = list(checkpoint.keys())\n\n    text_model_dict = {}\n\n    for key in keys:\n        if key.startswith(\"cond_stage_model.transformer\"):\n            text_model_dict[key[len(\"cond_stage_model.transformer.\") :]] = checkpoint[key]\n\n    # load clip vision\n    model.model.load_state_dict(text_model_dict)\n\n    # load mapper\n    keys_mapper = {\n        k[len(\"cond_stage_model.mapper.res\") :]: v\n        for k, v in checkpoint.items()\n        if k.startswith(\"cond_stage_model.mapper\")\n    }\n\n    MAPPING = {\n        \"attn.c_qkv\": [\"attn1.to_q\", \"attn1.to_k\", \"attn1.to_v\"],\n        \"attn.c_proj\": [\"attn1.to_out.0\"],\n        \"ln_1\": [\"norm1\"],\n        \"ln_2\": [\"norm3\"],\n        \"mlp.c_fc\": [\"ff.net.0.proj\"],\n        \"mlp.c_proj\": [\"ff.net.2\"],\n    }\n\n    mapped_weights = {}\n    for key, value in keys_mapper.items():\n        prefix = key[: len(\"blocks.i\")]\n        suffix = key.split(prefix)[-1].split(\".\")[-1]\n        name = key.split(prefix)[-1].split(suffix)[0][1:-1]\n        mapped_names = MAPPING[name]\n\n        num_splits = len(mapped_names)\n        for i, mapped_name in enumerate(mapped_names):\n            new_name = \".\".join([prefix, mapped_name, suffix])\n            shape = value.shape[0] // num_splits\n            mapped_weights[new_name] = value[i * shape : (i + 1) * shape]\n\n    model.mapper.load_state_dict(mapped_weights)\n\n    # load final layer norm\n    model.final_layer_norm.load_state_dict(\n        {\n            \"bias\": checkpoint[\"cond_stage_model.final_ln.bias\"],\n            \"weight\": checkpoint[\"cond_stage_model.final_ln.weight\"],\n        }\n    )\n\n    # load final proj\n    model.proj_out.load_state_dict(\n        {\n            \"bias\": checkpoint[\"proj_out.bias\"],\n            \"weight\": checkpoint[\"proj_out.weight\"],\n        }\n    )\n\n    # load uncond vector\n    model.uncond_vector.data = torch.nn.Parameter(checkpoint[\"learnable_vector\"])\n    return model\n\n\ndef convert_open_clip_checkpoint(checkpoint):\n    text_model = CLIPTextModel.from_pretrained(\"stabilityai/stable-diffusion-2\", subfolder=\"text_encoder\")\n\n    keys = list(checkpoint.keys())\n\n    text_model_dict = {}\n\n    if \"cond_stage_model.model.text_projection\" in checkpoint:\n        d_model = int(checkpoint[\"cond_stage_model.model.text_projection\"].shape[0])\n    else:\n        d_model = 1024\n\n    text_model_dict[\"text_model.embeddings.position_ids\"] = text_model.text_model.embeddings.get_buffer(\"position_ids\")\n\n    for key in keys:\n        if \"resblocks.23\" in key:  # Diffusers drops the final layer and only uses the penultimate layer\n            continue\n        if key in textenc_conversion_map:\n            text_model_dict[textenc_conversion_map[key]] = checkpoint[key]\n        if key.startswith(\"cond_stage_model.model.transformer.\"):\n            new_key = key[len(\"cond_stage_model.model.transformer.\") :]\n            if new_key.endswith(\".in_proj_weight\"):\n                new_key = new_key[: -len(\".in_proj_weight\")]\n                new_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], new_key)\n                text_model_dict[new_key + \".q_proj.weight\"] = checkpoint[key][:d_model, :]\n                text_model_dict[new_key + \".k_proj.weight\"] = checkpoint[key][d_model : d_model * 2, :]\n                text_model_dict[new_key + \".v_proj.weight\"] = checkpoint[key][d_model * 2 :, :]\n            elif new_key.endswith(\".in_proj_bias\"):\n                new_key = new_key[: -len(\".in_proj_bias\")]\n                new_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], new_key)\n                text_model_dict[new_key + \".q_proj.bias\"] = checkpoint[key][:d_model]\n                text_model_dict[new_key + \".k_proj.bias\"] = checkpoint[key][d_model : d_model * 2]\n                text_model_dict[new_key + \".v_proj.bias\"] = checkpoint[key][d_model * 2 :]\n            else:\n                new_key = textenc_pattern.sub(lambda m: protected[re.escape(m.group(0))], new_key)\n\n                text_model_dict[new_key] = checkpoint[key]\n\n    text_model.load_state_dict(text_model_dict)\n\n    return text_model\n\n\ndef stable_unclip_image_encoder(original_config):\n    \"\"\"\n    Returns the image processor and clip image encoder for the img2img unclip pipeline.\n\n    We currently know of two types of stable unclip models which separately use the clip and the openclip image\n    encoders.\n    \"\"\"\n\n    image_embedder_config = original_config.model.params.embedder_config\n\n    sd_clip_image_embedder_class = image_embedder_config.target\n    sd_clip_image_embedder_class = sd_clip_image_embedder_class.split(\".\")[-1]\n\n    if sd_clip_image_embedder_class == \"ClipImageEmbedder\":\n        clip_model_name = image_embedder_config.params.model\n\n        if clip_model_name == \"ViT-L/14\":\n            feature_extractor = CLIPImageProcessor()\n            image_encoder = CLIPVisionModelWithProjection.from_pretrained(\"openai/clip-vit-large-patch14\")\n        else:\n            raise NotImplementedError(f\"Unknown CLIP checkpoint name in stable diffusion checkpoint {clip_model_name}\")\n\n    elif sd_clip_image_embedder_class == \"FrozenOpenCLIPImageEmbedder\":\n        feature_extractor = CLIPImageProcessor()\n        image_encoder = CLIPVisionModelWithProjection.from_pretrained(\"laion/CLIP-ViT-H-14-laion2B-s32B-b79K\")\n    else:\n        raise NotImplementedError(\n            f\"Unknown CLIP image embedder class in stable diffusion checkpoint {sd_clip_image_embedder_class}\"\n        )\n\n    return feature_extractor, image_encoder\n\n\ndef stable_unclip_image_noising_components(\n    original_config, clip_stats_path: Optional[str] = None, device: Optional[str] = None\n):\n    \"\"\"\n    Returns the noising components for the img2img and txt2img unclip pipelines.\n\n    Converts the stability noise augmentor into\n    1. a `StableUnCLIPImageNormalizer` for holding the CLIP stats\n    2. a `DDPMScheduler` for holding the noise schedule\n\n    If the noise augmentor config specifies a clip stats path, the `clip_stats_path` must be provided.\n    \"\"\"\n    noise_aug_config = original_config.model.params.noise_aug_config\n    noise_aug_class = noise_aug_config.target\n    noise_aug_class = noise_aug_class.split(\".\")[-1]\n\n    if noise_aug_class == \"CLIPEmbeddingNoiseAugmentation\":\n        noise_aug_config = noise_aug_config.params\n        embedding_dim = noise_aug_config.timestep_dim\n        max_noise_level = noise_aug_config.noise_schedule_config.timesteps\n        beta_schedule = noise_aug_config.noise_schedule_config.beta_schedule\n\n        image_normalizer = StableUnCLIPImageNormalizer(embedding_dim=embedding_dim)\n        image_noising_scheduler = DDPMScheduler(num_train_timesteps=max_noise_level, beta_schedule=beta_schedule)\n\n        if \"clip_stats_path\" in noise_aug_config:\n            if clip_stats_path is None:\n                raise ValueError(\"This stable unclip config requires a `clip_stats_path`\")\n\n            clip_mean, clip_std = torch.load(clip_stats_path, map_location=device)\n            clip_mean = clip_mean[None, :]\n            clip_std = clip_std[None, :]\n\n            clip_stats_state_dict = {\n                \"mean\": clip_mean,\n                \"std\": clip_std,\n            }\n\n            image_normalizer.load_state_dict(clip_stats_state_dict)\n    else:\n        raise NotImplementedError(f\"Unknown noise augmentor class: {noise_aug_class}\")\n\n    return image_normalizer, image_noising_scheduler\n\n\ndef convert_controlnet_checkpoint(\n    checkpoint, original_config, checkpoint_path, image_size, upcast_attention, extract_ema\n):\n    ctrlnet_config = create_unet_diffusers_config(original_config, image_size=image_size, controlnet=True)\n    ctrlnet_config[\"upcast_attention\"] = upcast_attention\n\n    ctrlnet_config.pop(\"sample_size\")\n\n    controlnet_model = ControlNetModel(**ctrlnet_config)\n\n    converted_ctrl_checkpoint = convert_ldm_unet_checkpoint(\n        checkpoint, ctrlnet_config, path=checkpoint_path, extract_ema=extract_ema, controlnet=True\n    )\n\n    controlnet_model.load_state_dict(converted_ctrl_checkpoint)\n\n    return controlnet_model\n\n\ndef download_from_original_stable_diffusion_ckpt(\n    checkpoint_path: str,\n    original_config_file: str = None,\n    image_size: int = 512,\n    prediction_type: str = None,\n    model_type: str = None,\n    extract_ema: bool = False,\n    scheduler_type: str = \"pndm\",\n    num_in_channels: Optional[int] = None,\n    upcast_attention: Optional[bool] = None,\n    device: str = None,\n    from_safetensors: bool = False,\n    stable_unclip: Optional[str] = None,\n    stable_unclip_prior: Optional[str] = None,\n    clip_stats_path: Optional[str] = None,\n    controlnet: Optional[bool] = None,\n    load_safety_checker: bool = True,\n    pipeline_class: DiffusionPipeline = None,\n) -> DiffusionPipeline:\n    \"\"\"\n    Load a Stable Diffusion pipeline object from a CompVis-style `.ckpt`/`.safetensors` file and (ideally) a `.yaml`\n    config file.\n\n    Although many of the arguments can be automatically inferred, some of these rely on brittle checks against the\n    global step count, which will likely fail for models that have undergone further fine-tuning. Therefore, it is\n    recommended that you override the default values and/or supply an `original_config_file` wherever possible.\n\n    Args:\n        checkpoint_path (`str`): Path to `.ckpt` file.\n        original_config_file (`str`):\n            Path to `.yaml` config file corresponding to the original architecture. If `None`, will be automatically\n            inferred by looking for a key that only exists in SD2.0 models.\n        image_size (`int`, *optional*, defaults to 512):\n            The image size that the model was trained on. Use 512 for Stable Diffusion v1.X and Stable Diffusion v2\n            Base. Use 768 for Stable Diffusion v2.\n        prediction_type (`str`, *optional*):\n            The prediction type that the model was trained on. Use `'epsilon'` for Stable Diffusion v1.X and Stable\n            Diffusion v2 Base. Use `'v_prediction'` for Stable Diffusion v2.\n        num_in_channels (`int`, *optional*, defaults to None):\n            The number of input channels. If `None`, it will be automatically inferred.\n        scheduler_type (`str`, *optional*, defaults to 'pndm'):\n            Type of scheduler to use. Should be one of `[\"pndm\", \"lms\", \"heun\", \"euler\", \"euler-ancestral\", \"dpm\",\n            \"ddim\"]`.\n        model_type (`str`, *optional*, defaults to `None`):\n            The pipeline type. `None` to automatically infer, or one of `[\"FrozenOpenCLIPEmbedder\",\n            \"FrozenCLIPEmbedder\", \"PaintByExample\"]`.\n        is_img2img (`bool`, *optional*, defaults to `False`):\n            Whether the model should be loaded as an img2img pipeline.\n        extract_ema (`bool`, *optional*, defaults to `False`): Only relevant for\n            checkpoints that have both EMA and non-EMA weights. Whether to extract the EMA weights or not. Defaults to\n            `False`. Pass `True` to extract the EMA weights. EMA weights usually yield higher quality images for\n            inference. Non-EMA weights are usually better to continue fine-tuning.\n        upcast_attention (`bool`, *optional*, defaults to `None`):\n            Whether the attention computation should always be upcasted. This is necessary when running stable\n            diffusion 2.1.\n        device (`str`, *optional*, defaults to `None`):\n            The device to use. Pass `None` to determine automatically.\n        from_safetensors (`str`, *optional*, defaults to `False`):\n            If `checkpoint_path` is in `safetensors` format, load checkpoint with safetensors instead of PyTorch.\n        load_safety_checker (`bool`, *optional*, defaults to `True`):\n            Whether to load the safety checker or not. Defaults to `True`.\n        pipeline_class (`str`, *optional*, defaults to `None`):\n            The pipeline class to use. Pass `None` to determine automatically.\n        return: A StableDiffusionPipeline object representing the passed-in `.ckpt`/`.safetensors` file.\n    \"\"\"\n\n    # import pipelines here to avoid circular import error when using from_ckpt method\n    from diffusers import (\n        LDMTextToImagePipeline,\n        PaintByExamplePipeline,\n        StableDiffusionControlNetPipeline,\n        StableDiffusionPipeline,\n        StableUnCLIPImg2ImgPipeline,\n        StableUnCLIPPipeline,\n    )\n\n    if pipeline_class is None:\n        pipeline_class = StableDiffusionPipeline\n\n    if prediction_type == \"v-prediction\":\n        prediction_type = \"v_prediction\"\n\n    if not is_omegaconf_available():\n        raise ValueError(BACKENDS_MAPPING[\"omegaconf\"][1])\n\n    from omegaconf import OmegaConf\n\n    if from_safetensors:\n        if not is_safetensors_available():\n            raise ValueError(BACKENDS_MAPPING[\"safetensors\"][1])\n\n        from safetensors import safe_open\n\n        checkpoint = {}\n        with safe_open(checkpoint_path, framework=\"pt\", device=\"cpu\") as f:\n            for key in f.keys():\n                checkpoint[key] = f.get_tensor(key)\n    else:\n        if device is None:\n            device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n            checkpoint = torch.load(checkpoint_path, map_location=device)\n        else:\n            checkpoint = torch.load(checkpoint_path, map_location=device)\n\n    # Sometimes models don't have the global_step item\n    if \"global_step\" in checkpoint:\n        global_step = checkpoint[\"global_step\"]\n    else:\n        print(\"global_step key not found in model\")\n        global_step = None\n\n    # NOTE: this while loop isn't great but this controlnet checkpoint has one additional\n    # \"state_dict\" key https://huggingface.co/thibaud/controlnet-canny-sd21\n    while \"state_dict\" in checkpoint:\n        checkpoint = checkpoint[\"state_dict\"]\n\n    if original_config_file is None:\n        key_name = \"model.diffusion_model.input_blocks.2.1.transformer_blocks.0.attn2.to_k.weight\"\n\n        # model_type = \"v1\"\n        config_url = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/configs/stable-diffusion/v1-inference.yaml\"\n\n        if key_name in checkpoint and checkpoint[key_name].shape[-1] == 1024:\n            # model_type = \"v2\"\n            config_url = \"https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference-v.yaml\"\n\n            if global_step == 110000:\n                # v2.1 needs to upcast attention\n                upcast_attention = True\n\n        original_config_file = BytesIO(requests.get(config_url).content)\n\n    original_config = OmegaConf.load(original_config_file)\n\n    if num_in_channels is not None:\n        original_config[\"model\"][\"params\"][\"unet_config\"][\"params\"][\"in_channels\"] = num_in_channels\n\n    if (\n        \"parameterization\" in original_config[\"model\"][\"params\"]\n        and original_config[\"model\"][\"params\"][\"parameterization\"] == \"v\"\n    ):\n        if prediction_type is None:\n            # NOTE: For stable diffusion 2 base it is recommended to pass `prediction_type==\"epsilon\"`\n            # as it relies on a brittle global step parameter here\n            prediction_type = \"epsilon\" if global_step == 875000 else \"v_prediction\"\n        if image_size is None:\n            # NOTE: For stable diffusion 2 base one has to pass `image_size==512`\n            # as it relies on a brittle global step parameter here\n            image_size = 512 if global_step == 875000 else 768\n    else:\n        if prediction_type is None:\n            prediction_type = \"epsilon\"\n        if image_size is None:\n            image_size = 512\n\n    if controlnet is None:\n        controlnet = \"control_stage_config\" in original_config.model.params\n\n    if controlnet:\n        controlnet_model = convert_controlnet_checkpoint(\n            checkpoint, original_config, checkpoint_path, image_size, upcast_attention, extract_ema\n        )\n\n    num_train_timesteps = original_config.model.params.timesteps\n    beta_start = original_config.model.params.linear_start\n    beta_end = original_config.model.params.linear_end\n\n    scheduler = DDIMScheduler(\n        beta_end=beta_end,\n        beta_schedule=\"scaled_linear\",\n        beta_start=beta_start,\n        num_train_timesteps=num_train_timesteps,\n        steps_offset=1,\n        clip_sample=False,\n        set_alpha_to_one=False,\n        prediction_type=prediction_type,\n    )\n    # make sure scheduler works correctly with DDIM\n    scheduler.register_to_config(clip_sample=False)\n\n    if scheduler_type == \"pndm\":\n        config = dict(scheduler.config)\n        config[\"skip_prk_steps\"] = True\n        scheduler = PNDMScheduler.from_config(config)\n    elif scheduler_type == \"lms\":\n        scheduler = LMSDiscreteScheduler.from_config(scheduler.config)\n    elif scheduler_type == \"heun\":\n        scheduler = HeunDiscreteScheduler.from_config(scheduler.config)\n    elif scheduler_type == \"euler\":\n        scheduler = EulerDiscreteScheduler.from_config(scheduler.config)\n    elif scheduler_type == \"euler-ancestral\":\n        scheduler = EulerAncestralDiscreteScheduler.from_config(scheduler.config)\n    elif scheduler_type == \"dpm\":\n        scheduler = DPMSolverMultistepScheduler.from_config(scheduler.config)\n    elif scheduler_type == \"ddim\":\n        scheduler = scheduler\n    else:\n        raise ValueError(f\"Scheduler of type {scheduler_type} doesn't exist!\")\n\n    # Convert the UNet2DConditionModel model.\n    unet_config = create_unet_diffusers_config(original_config, image_size=image_size)\n    unet_config[\"upcast_attention\"] = upcast_attention\n    unet = UNet2DConditionModel(**unet_config)\n\n    converted_unet_checkpoint = convert_ldm_unet_checkpoint(\n        checkpoint, unet_config, path=checkpoint_path, extract_ema=extract_ema\n    )\n\n    unet.load_state_dict(converted_unet_checkpoint)\n\n    # Convert the VAE model.\n    vae_config = create_vae_diffusers_config(original_config, image_size=image_size)\n    converted_vae_checkpoint = convert_ldm_vae_checkpoint(checkpoint, vae_config)\n\n    vae = AutoencoderKL(**vae_config)\n    vae.load_state_dict(converted_vae_checkpoint)\n\n    # Convert the text model.\n    if model_type is None:\n        model_type = original_config.model.params.cond_stage_config.target.split(\".\")[-1]\n        logger.debug(f\"no `model_type` given, `model_type` inferred as: {model_type}\")\n\n    if model_type == \"FrozenOpenCLIPEmbedder\":\n        text_model = convert_open_clip_checkpoint(checkpoint)\n        tokenizer = CLIPTokenizer.from_pretrained(\"stabilityai/stable-diffusion-2\", subfolder=\"tokenizer\")\n\n        if stable_unclip is None:\n            if controlnet:\n                pipe = StableDiffusionControlNetPipeline(\n                    vae=vae,\n                    text_encoder=text_model,\n                    tokenizer=tokenizer,\n                    unet=unet,\n                    scheduler=scheduler,\n                    controlnet=controlnet_model,\n                    safety_checker=None,\n                    feature_extractor=None,\n                    requires_safety_checker=False,\n                )\n            else:\n                pipe = pipeline_class(\n                    vae=vae,\n                    text_encoder=text_model,\n                    tokenizer=tokenizer,\n                    unet=unet,\n                    scheduler=scheduler,\n                    safety_checker=None,\n                    feature_extractor=None,\n                    requires_safety_checker=False,\n                )\n        else:\n            image_normalizer, image_noising_scheduler = stable_unclip_image_noising_components(\n                original_config, clip_stats_path=clip_stats_path, device=device\n            )\n\n            if stable_unclip == \"img2img\":\n                feature_extractor, image_encoder = stable_unclip_image_encoder(original_config)\n\n                pipe = StableUnCLIPImg2ImgPipeline(\n                    # image encoding components\n                    feature_extractor=feature_extractor,\n                    image_encoder=image_encoder,\n                    # image noising components\n                    image_normalizer=image_normalizer,\n                    image_noising_scheduler=image_noising_scheduler,\n                    # regular denoising components\n                    tokenizer=tokenizer,\n                    text_encoder=text_model,\n                    unet=unet,\n                    scheduler=scheduler,\n                    # vae\n                    vae=vae,\n                )\n            elif stable_unclip == \"txt2img\":\n                if stable_unclip_prior is None or stable_unclip_prior == \"karlo\":\n                    karlo_model = \"kakaobrain/karlo-v1-alpha\"\n                    prior = PriorTransformer.from_pretrained(karlo_model, subfolder=\"prior\")\n\n                    prior_tokenizer = CLIPTokenizer.from_pretrained(\"openai/clip-vit-large-patch14\")\n                    prior_text_model = CLIPTextModelWithProjection.from_pretrained(\"openai/clip-vit-large-patch14\")\n\n                    prior_scheduler = UnCLIPScheduler.from_pretrained(karlo_model, subfolder=\"prior_scheduler\")\n                    prior_scheduler = DDPMScheduler.from_config(prior_scheduler.config)\n                else:\n                    raise NotImplementedError(f\"unknown prior for stable unclip model: {stable_unclip_prior}\")\n\n                pipe = StableUnCLIPPipeline(\n                    # prior components\n                    prior_tokenizer=prior_tokenizer,\n                    prior_text_encoder=prior_text_model,\n                    prior=prior,\n                    prior_scheduler=prior_scheduler,\n                    # image noising components\n                    image_normalizer=image_normalizer,\n                    image_noising_scheduler=image_noising_scheduler,\n                    # regular denoising components\n                    tokenizer=tokenizer,\n                    text_encoder=text_model,\n                    unet=unet,\n                    scheduler=scheduler,\n                    # vae\n                    vae=vae,\n                )\n            else:\n                raise NotImplementedError(f\"unknown `stable_unclip` type: {stable_unclip}\")\n    elif model_type == \"PaintByExample\":\n        vision_model = convert_paint_by_example_checkpoint(checkpoint)\n        tokenizer = CLIPTokenizer.from_pretrained(\"openai/clip-vit-large-patch14\")\n        feature_extractor = AutoFeatureExtractor.from_pretrained(\"CompVis/stable-diffusion-safety-checker\")\n        pipe = PaintByExamplePipeline(\n            vae=vae,\n            image_encoder=vision_model,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=None,\n            feature_extractor=feature_extractor,\n        )\n    elif model_type == \"FrozenCLIPEmbedder\":\n        text_model = convert_ldm_clip_checkpoint(checkpoint)\n        tokenizer = CLIPTokenizer.from_pretrained(\"openai/clip-vit-large-patch14\")\n\n        if load_safety_checker:\n            safety_checker = StableDiffusionSafetyChecker.from_pretrained(\"CompVis/stable-diffusion-safety-checker\")\n            feature_extractor = AutoFeatureExtractor.from_pretrained(\"CompVis/stable-diffusion-safety-checker\")\n        else:\n            safety_checker = None\n            feature_extractor = None\n\n        if controlnet:\n            pipe = StableDiffusionControlNetPipeline(\n                vae=vae,\n                text_encoder=text_model,\n                tokenizer=tokenizer,\n                unet=unet,\n                controlnet=controlnet_model,\n                scheduler=scheduler,\n                safety_checker=safety_checker,\n                feature_extractor=feature_extractor,\n            )\n        else:\n            pipe = pipeline_class(\n                vae=vae,\n                text_encoder=text_model,\n                tokenizer=tokenizer,\n                unet=unet,\n                scheduler=scheduler,\n                safety_checker=safety_checker,\n                feature_extractor=feature_extractor,\n            )\n    else:\n        text_config = create_ldm_bert_config(original_config)\n        text_model = convert_ldm_bert_checkpoint(checkpoint, text_config)\n        tokenizer = BertTokenizerFast.from_pretrained(\"bert-base-uncased\")\n        pipe = LDMTextToImagePipeline(vqvae=vae, bert=text_model, tokenizer=tokenizer, unet=unet, scheduler=scheduler)\n\n    return pipe\n\n\ndef download_controlnet_from_original_ckpt(\n    checkpoint_path: str,\n    original_config_file: str,\n    image_size: int = 512,\n    extract_ema: bool = False,\n    num_in_channels: Optional[int] = None,\n    upcast_attention: Optional[bool] = None,\n    device: str = None,\n    from_safetensors: bool = False,\n) -> DiffusionPipeline:\n    if not is_omegaconf_available():\n        raise ValueError(BACKENDS_MAPPING[\"omegaconf\"][1])\n\n    from omegaconf import OmegaConf\n\n    if from_safetensors:\n        if not is_safetensors_available():\n            raise ValueError(BACKENDS_MAPPING[\"safetensors\"][1])\n\n        from safetensors import safe_open\n\n        checkpoint = {}\n        with safe_open(checkpoint_path, framework=\"pt\", device=\"cpu\") as f:\n            for key in f.keys():\n                checkpoint[key] = f.get_tensor(key)\n    else:\n        if device is None:\n            device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n            checkpoint = torch.load(checkpoint_path, map_location=device)\n        else:\n            checkpoint = torch.load(checkpoint_path, map_location=device)\n\n    # NOTE: this while loop isn't great but this controlnet checkpoint has one additional\n    # \"state_dict\" key https://huggingface.co/thibaud/controlnet-canny-sd21\n    while \"state_dict\" in checkpoint:\n        checkpoint = checkpoint[\"state_dict\"]\n\n    original_config = OmegaConf.load(original_config_file)\n\n    if num_in_channels is not None:\n        original_config[\"model\"][\"params\"][\"unet_config\"][\"params\"][\"in_channels\"] = num_in_channels\n\n    if \"control_stage_config\" not in original_config.model.params:\n        raise ValueError(\"`control_stage_config` not present in original config\")\n\n    controlnet_model = convert_controlnet_checkpoint(\n        checkpoint, original_config, checkpoint_path, image_size, upcast_attention, extract_ema\n    )\n\n    return controlnet_model\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_cycle_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom diffusers.utils import is_accelerate_available, is_accelerate_version\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import DDIMScheduler\nfrom ...utils import PIL_INTERPOLATION, deprecate, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\ndef posterior_sample(scheduler, latents, timestep, clean_latents, generator, eta):\n    # 1. get previous step value (=t-1)\n    prev_timestep = timestep - scheduler.config.num_train_timesteps // scheduler.num_inference_steps\n\n    if prev_timestep <= 0:\n        return clean_latents\n\n    # 2. compute alphas, betas\n    alpha_prod_t = scheduler.alphas_cumprod[timestep]\n    alpha_prod_t_prev = (\n        scheduler.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else scheduler.final_alpha_cumprod\n    )\n\n    variance = scheduler._get_variance(timestep, prev_timestep)\n    std_dev_t = eta * variance ** (0.5)\n\n    # direction pointing to x_t\n    e_t = (latents - alpha_prod_t ** (0.5) * clean_latents) / (1 - alpha_prod_t) ** (0.5)\n    dir_xt = (1.0 - alpha_prod_t_prev - std_dev_t**2) ** (0.5) * e_t\n    noise = std_dev_t * randn_tensor(\n        clean_latents.shape, dtype=clean_latents.dtype, device=clean_latents.device, generator=generator\n    )\n    prev_latents = alpha_prod_t_prev ** (0.5) * clean_latents + dir_xt + noise\n\n    return prev_latents\n\n\ndef compute_noise(scheduler, prev_latents, latents, timestep, noise_pred, eta):\n    # 1. get previous step value (=t-1)\n    prev_timestep = timestep - scheduler.config.num_train_timesteps // scheduler.num_inference_steps\n\n    # 2. compute alphas, betas\n    alpha_prod_t = scheduler.alphas_cumprod[timestep]\n    alpha_prod_t_prev = (\n        scheduler.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else scheduler.final_alpha_cumprod\n    )\n\n    beta_prod_t = 1 - alpha_prod_t\n\n    # 3. compute predicted original sample from predicted noise also called\n    # \"predicted x_0\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n    pred_original_sample = (latents - beta_prod_t ** (0.5) * noise_pred) / alpha_prod_t ** (0.5)\n\n    # 4. Clip \"predicted x_0\"\n    if scheduler.config.clip_sample:\n        pred_original_sample = torch.clamp(pred_original_sample, -1, 1)\n\n    # 5. compute variance: \"sigma_t(η)\" -> see formula (16)\n    # σ_t = sqrt((1 − α_t−1)/(1 − α_t)) * sqrt(1 − α_t/α_t−1)\n    variance = scheduler._get_variance(timestep, prev_timestep)\n    std_dev_t = eta * variance ** (0.5)\n\n    # 6. compute \"direction pointing to x_t\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n    pred_sample_direction = (1 - alpha_prod_t_prev - std_dev_t**2) ** (0.5) * noise_pred\n\n    noise = (prev_latents - (alpha_prod_t_prev ** (0.5) * pred_original_sample + pred_sample_direction)) / (\n        variance ** (0.5) * eta\n    )\n    return noise\n\n\nclass CycleDiffusionPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-guided image to image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/CompVis/stable-diffusion-v1-4) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: DDIMScheduler,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.check_inputs\n    def check_inputs(\n        self, prompt, strength, callback_steps, negative_prompt=None, prompt_embeds=None, negative_prompt_embeds=None\n    ):\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    def prepare_latents(self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None):\n        image = image.to(device=device, dtype=dtype)\n\n        batch_size = image.shape[0]\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            init_latents = [\n                self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size)\n            ]\n            init_latents = torch.cat(init_latents, dim=0)\n        else:\n            init_latents = self.vae.encode(image).latent_dist.sample(generator)\n\n        init_latents = self.vae.config.scaling_factor * init_latents\n\n        if batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] == 0:\n            # expand init_latents for batch_size\n            deprecation_message = (\n                f\"You have passed {batch_size} text prompts (`prompt`), but only {init_latents.shape[0]} initial\"\n                \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                \" your script to pass as many initial images as text prompts to suppress this warning.\"\n            )\n            deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n            additional_image_per_prompt = batch_size // init_latents.shape[0]\n            init_latents = torch.cat([init_latents] * additional_image_per_prompt * num_images_per_prompt, dim=0)\n        elif batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] != 0:\n            raise ValueError(\n                f\"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {batch_size} text prompts.\"\n            )\n        else:\n            init_latents = torch.cat([init_latents] * num_images_per_prompt, dim=0)\n\n        # add noise to latents using the timestep\n        shape = init_latents.shape\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        # get latents\n        clean_latents = init_latents\n        init_latents = self.scheduler.add_noise(init_latents, noise, timestep)\n        latents = init_latents\n\n        return latents, clean_latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        source_prompt: Union[str, List[str]],\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        strength: float = 0.8,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        source_guidance_scale: Optional[float] = 1,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: Optional[float] = 0.1,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference. This parameter will be modulated by `strength`.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            source_guidance_scale (`float`, *optional*, defaults to 1):\n                Guidance scale for the source prompt. This is useful to control the amount of influence the source\n                prompt for encoding.\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.1):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs\n        self.check_inputs(prompt, strength, callback_steps)\n\n        # 2. Define call parameters\n        batch_size = 1 if isinstance(prompt, str) else len(prompt)\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            prompt_embeds=prompt_embeds,\n        )\n        source_prompt_embeds = self._encode_prompt(\n            source_prompt, device, num_images_per_prompt, do_classifier_free_guidance, None\n        )\n\n        # 4. Preprocess image\n        image = preprocess(image)\n\n        # 5. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device)\n        latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt)\n\n        # 6. Prepare latent variables\n        latents, clean_latents = self.prepare_latents(\n            image, latent_timestep, batch_size, num_images_per_prompt, prompt_embeds.dtype, device, generator\n        )\n        source_latents = latents\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n        generator = extra_step_kwargs.pop(\"generator\", None)\n\n        # 8. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2)\n                source_latent_model_input = torch.cat([source_latents] * 2)\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n                source_latent_model_input = self.scheduler.scale_model_input(source_latent_model_input, t)\n\n                # predict the noise residual\n                concat_latent_model_input = torch.stack(\n                    [\n                        source_latent_model_input[0],\n                        latent_model_input[0],\n                        source_latent_model_input[1],\n                        latent_model_input[1],\n                    ],\n                    dim=0,\n                )\n                concat_prompt_embeds = torch.stack(\n                    [\n                        source_prompt_embeds[0],\n                        prompt_embeds[0],\n                        source_prompt_embeds[1],\n                        prompt_embeds[1],\n                    ],\n                    dim=0,\n                )\n                concat_noise_pred = self.unet(\n                    concat_latent_model_input, t, encoder_hidden_states=concat_prompt_embeds\n                ).sample\n\n                # perform guidance\n                (\n                    source_noise_pred_uncond,\n                    noise_pred_uncond,\n                    source_noise_pred_text,\n                    noise_pred_text,\n                ) = concat_noise_pred.chunk(4, dim=0)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n                source_noise_pred = source_noise_pred_uncond + source_guidance_scale * (\n                    source_noise_pred_text - source_noise_pred_uncond\n                )\n\n                # Sample source_latents from the posterior distribution.\n                prev_source_latents = posterior_sample(\n                    self.scheduler, source_latents, t, clean_latents, generator=generator, **extra_step_kwargs\n                )\n                # Compute noise.\n                noise = compute_noise(\n                    self.scheduler, prev_source_latents, source_latents, t, source_noise_pred, **extra_step_kwargs\n                )\n                source_latents = prev_source_latents\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(\n                    noise_pred, t, latents, variance_noise=noise, **extra_step_kwargs\n                ).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # 9. Post-processing\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_flax_stable_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom functools import partial\nfrom typing import Dict, List, Optional, Union\n\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom flax.core.frozen_dict import FrozenDict\nfrom flax.jax_utils import unreplicate\nfrom flax.training.common_utils import shard\nfrom packaging import version\nfrom PIL import Image\nfrom transformers import CLIPImageProcessor, CLIPTokenizer, FlaxCLIPTextModel\n\nfrom ...models import FlaxAutoencoderKL, FlaxUNet2DConditionModel\nfrom ...schedulers import (\n    FlaxDDIMScheduler,\n    FlaxDPMSolverMultistepScheduler,\n    FlaxLMSDiscreteScheduler,\n    FlaxPNDMScheduler,\n)\nfrom ...utils import deprecate, logging, replace_example_docstring\nfrom ..pipeline_flax_utils import FlaxDiffusionPipeline\nfrom . import FlaxStableDiffusionPipelineOutput\nfrom .safety_checker_flax import FlaxStableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n# Set to True to use python for loop instead of jax.fori_loop for easier debugging\nDEBUG = False\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import jax\n        >>> import numpy as np\n        >>> from flax.jax_utils import replicate\n        >>> from flax.training.common_utils import shard\n\n        >>> from diffusers import FlaxStableDiffusionPipeline\n\n        >>> pipeline, params = FlaxStableDiffusionPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\", revision=\"bf16\", dtype=jax.numpy.bfloat16\n        ... )\n\n        >>> prompt = \"a photo of an astronaut riding a horse on mars\"\n\n        >>> prng_seed = jax.random.PRNGKey(0)\n        >>> num_inference_steps = 50\n\n        >>> num_samples = jax.device_count()\n        >>> prompt = num_samples * [prompt]\n        >>> prompt_ids = pipeline.prepare_inputs(prompt)\n        # shard inputs and rng\n\n        >>> params = replicate(params)\n        >>> prng_seed = jax.random.split(prng_seed, jax.device_count())\n        >>> prompt_ids = shard(prompt_ids)\n\n        >>> images = pipeline(prompt_ids, params, prng_seed, num_inference_steps, jit=True).images\n        >>> images = pipeline.numpy_to_pil(np.asarray(images.reshape((num_samples,) + images.shape[-3:])))\n        ```\n\"\"\"\n\n\nclass FlaxStableDiffusionPipeline(FlaxDiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion.\n\n    This model inherits from [`FlaxDiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`FlaxAutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`FlaxCLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.FlaxCLIPTextModel),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`FlaxUNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`FlaxDDIMScheduler`], [`FlaxLMSDiscreteScheduler`], [`FlaxPNDMScheduler`], or\n            [`FlaxDPMSolverMultistepScheduler`].\n        safety_checker ([`FlaxStableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: FlaxAutoencoderKL,\n        text_encoder: FlaxCLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: FlaxUNet2DConditionModel,\n        scheduler: Union[\n            FlaxDDIMScheduler, FlaxPNDMScheduler, FlaxLMSDiscreteScheduler, FlaxDPMSolverMultistepScheduler\n        ],\n        safety_checker: FlaxStableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        super().__init__()\n        self.dtype = dtype\n\n        if safety_checker is None:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n\n    def prepare_inputs(self, prompt: Union[str, List[str]]):\n        if not isinstance(prompt, (str, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        text_input = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_tensors=\"np\",\n        )\n        return text_input.input_ids\n\n    def _get_has_nsfw_concepts(self, features, params):\n        has_nsfw_concepts = self.safety_checker(features, params)\n        return has_nsfw_concepts\n\n    def _run_safety_checker(self, images, safety_model_params, jit=False):\n        # safety_model_params should already be replicated when jit is True\n        pil_images = [Image.fromarray(image) for image in images]\n        features = self.feature_extractor(pil_images, return_tensors=\"np\").pixel_values\n\n        if jit:\n            features = shard(features)\n            has_nsfw_concepts = _p_get_has_nsfw_concepts(self, features, safety_model_params)\n            has_nsfw_concepts = unshard(has_nsfw_concepts)\n            safety_model_params = unreplicate(safety_model_params)\n        else:\n            has_nsfw_concepts = self._get_has_nsfw_concepts(features, safety_model_params)\n\n        images_was_copied = False\n        for idx, has_nsfw_concept in enumerate(has_nsfw_concepts):\n            if has_nsfw_concept:\n                if not images_was_copied:\n                    images_was_copied = True\n                    images = images.copy()\n\n                images[idx] = np.zeros(images[idx].shape, dtype=np.uint8)  # black image\n\n            if any(has_nsfw_concepts):\n                warnings.warn(\n                    \"Potential NSFW content was detected in one or more images. A black image will be returned\"\n                    \" instead. Try again with a different prompt and/or seed.\"\n                )\n\n        return images, has_nsfw_concepts\n\n    def _generate(\n        self,\n        prompt_ids: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        num_inference_steps: int,\n        height: int,\n        width: int,\n        guidance_scale: float,\n        latents: Optional[jnp.array] = None,\n        neg_prompt_ids: Optional[jnp.array] = None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        # get prompt text embeddings\n        prompt_embeds = self.text_encoder(prompt_ids, params=params[\"text_encoder\"])[0]\n\n        # TODO: currently it is assumed `do_classifier_free_guidance = guidance_scale > 1.0`\n        # implement this conditional `do_classifier_free_guidance = guidance_scale > 1.0`\n        batch_size = prompt_ids.shape[0]\n\n        max_length = prompt_ids.shape[-1]\n\n        if neg_prompt_ids is None:\n            uncond_input = self.tokenizer(\n                [\"\"] * batch_size, padding=\"max_length\", max_length=max_length, return_tensors=\"np\"\n            ).input_ids\n        else:\n            uncond_input = neg_prompt_ids\n        negative_prompt_embeds = self.text_encoder(uncond_input, params=params[\"text_encoder\"])[0]\n        context = jnp.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        # Ensure model output will be `float32` before going into the scheduler\n        guidance_scale = jnp.array([guidance_scale], dtype=jnp.float32)\n\n        latents_shape = (\n            batch_size,\n            self.unet.config.in_channels,\n            height // self.vae_scale_factor,\n            width // self.vae_scale_factor,\n        )\n        if latents is None:\n            latents = jax.random.normal(prng_seed, shape=latents_shape, dtype=jnp.float32)\n        else:\n            if latents.shape != latents_shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {latents_shape}\")\n\n        def loop_body(step, args):\n            latents, scheduler_state = args\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            latents_input = jnp.concatenate([latents] * 2)\n\n            t = jnp.array(scheduler_state.timesteps, dtype=jnp.int32)[step]\n            timestep = jnp.broadcast_to(t, latents_input.shape[0])\n\n            latents_input = self.scheduler.scale_model_input(scheduler_state, latents_input, t)\n\n            # predict the noise residual\n            noise_pred = self.unet.apply(\n                {\"params\": params[\"unet\"]},\n                jnp.array(latents_input),\n                jnp.array(timestep, dtype=jnp.int32),\n                encoder_hidden_states=context,\n            ).sample\n            # perform guidance\n            noise_pred_uncond, noise_prediction_text = jnp.split(noise_pred, 2, axis=0)\n            noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents, scheduler_state = self.scheduler.step(scheduler_state, noise_pred, t, latents).to_tuple()\n            return latents, scheduler_state\n\n        scheduler_state = self.scheduler.set_timesteps(\n            params[\"scheduler\"], num_inference_steps=num_inference_steps, shape=latents.shape\n        )\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * params[\"scheduler\"].init_noise_sigma\n\n        if DEBUG:\n            # run with python for loop\n            for i in range(num_inference_steps):\n                latents, scheduler_state = loop_body(i, (latents, scheduler_state))\n        else:\n            latents, _ = jax.lax.fori_loop(0, num_inference_steps, loop_body, (latents, scheduler_state))\n\n        # scale and decode the image latents with vae\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.apply({\"params\": params[\"vae\"]}, latents, method=self.vae.decode).sample\n\n        image = (image / 2 + 0.5).clip(0, 1).transpose(0, 2, 3, 1)\n        return image\n\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt_ids: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        num_inference_steps: int = 50,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        guidance_scale: Union[float, jnp.array] = 7.5,\n        latents: jnp.array = None,\n        neg_prompt_ids: jnp.array = None,\n        return_dict: bool = True,\n        jit: bool = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            latents (`jnp.array`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. tensor will ge generated\n                by sampling using the supplied random `generator`.\n            jit (`bool`, defaults to `False`):\n                Whether to run `pmap` versions of the generation and safety scoring functions. NOTE: This argument\n                exists because `__call__` is not yet end-to-end pmap-able. It will be removed in a future release.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] instead of\n                a plain tuple.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a\n            `tuple. When returning a tuple, the first element is a list with the generated images, and the second\n            element is a list of `bool`s denoting whether the corresponding generated image likely represents\n            \"not-safe-for-work\" (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        if isinstance(guidance_scale, float):\n            # Convert to a tensor so each device gets a copy. Follow the prompt_ids for\n            # shape information, as they may be sharded (when `jit` is `True`), or not.\n            guidance_scale = jnp.array([guidance_scale] * prompt_ids.shape[0])\n            if len(prompt_ids.shape) > 2:\n                # Assume sharded\n                guidance_scale = guidance_scale[:, None]\n\n        if jit:\n            images = _p_generate(\n                self,\n                prompt_ids,\n                params,\n                prng_seed,\n                num_inference_steps,\n                height,\n                width,\n                guidance_scale,\n                latents,\n                neg_prompt_ids,\n            )\n        else:\n            images = self._generate(\n                prompt_ids,\n                params,\n                prng_seed,\n                num_inference_steps,\n                height,\n                width,\n                guidance_scale,\n                latents,\n                neg_prompt_ids,\n            )\n\n        if self.safety_checker is not None:\n            safety_params = params[\"safety_checker\"]\n            images_uint8_casted = (images * 255).round().astype(\"uint8\")\n            num_devices, batch_size = images.shape[:2]\n\n            images_uint8_casted = np.asarray(images_uint8_casted).reshape(num_devices * batch_size, height, width, 3)\n            images_uint8_casted, has_nsfw_concept = self._run_safety_checker(images_uint8_casted, safety_params, jit)\n            images = np.asarray(images)\n\n            # block images\n            if any(has_nsfw_concept):\n                for i, is_nsfw in enumerate(has_nsfw_concept):\n                    if is_nsfw:\n                        images[i] = np.asarray(images_uint8_casted[i])\n\n            images = images.reshape(num_devices, batch_size, height, width, 3)\n        else:\n            images = np.asarray(images)\n            has_nsfw_concept = False\n\n        if not return_dict:\n            return (images, has_nsfw_concept)\n\n        return FlaxStableDiffusionPipelineOutput(images=images, nsfw_content_detected=has_nsfw_concept)\n\n\n# Static argnums are pipe, num_inference_steps, height, width. A change would trigger recompilation.\n# Non-static args are (sharded) input tensors mapped over their first dimension (hence, `0`).\n@partial(\n    jax.pmap,\n    in_axes=(None, 0, 0, 0, None, None, None, 0, 0, 0),\n    static_broadcasted_argnums=(0, 4, 5, 6),\n)\ndef _p_generate(\n    pipe,\n    prompt_ids,\n    params,\n    prng_seed,\n    num_inference_steps,\n    height,\n    width,\n    guidance_scale,\n    latents,\n    neg_prompt_ids,\n):\n    return pipe._generate(\n        prompt_ids,\n        params,\n        prng_seed,\n        num_inference_steps,\n        height,\n        width,\n        guidance_scale,\n        latents,\n        neg_prompt_ids,\n    )\n\n\n@partial(jax.pmap, static_broadcasted_argnums=(0,))\ndef _p_get_has_nsfw_concepts(pipe, features, params):\n    return pipe._get_has_nsfw_concepts(features, params)\n\n\ndef unshard(x: jnp.ndarray):\n    # einops.rearrange(x, 'd b ... -> (d b) ...')\n    num_devices, batch_size = x.shape[:2]\n    rest = x.shape[2:]\n    return x.reshape(num_devices * batch_size, *rest)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_flax_stable_diffusion_controlnet.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# NOTE: This file is deprecated and will be removed in a future version.\n# It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works\n\nfrom ...utils import deprecate\nfrom ..controlnet.pipeline_flax_controlnet import FlaxStableDiffusionControlNetPipeline  # noqa: F401\n\n\ndeprecate(\n    \"stable diffusion controlnet\",\n    \"0.22.0\",\n    \"Importing `FlaxStableDiffusionControlNetPipeline` from diffusers.pipelines.stable_diffusion.flax_pipeline_stable_diffusion_controlnet is deprecated. Please import `from diffusers import FlaxStableDiffusionControlNetPipeline` instead.\",\n    standard_warn=False,\n    stacklevel=3,\n)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_flax_stable_diffusion_img2img.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom functools import partial\nfrom typing import Dict, List, Optional, Union\n\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom flax.core.frozen_dict import FrozenDict\nfrom flax.jax_utils import unreplicate\nfrom flax.training.common_utils import shard\nfrom PIL import Image\nfrom transformers import CLIPImageProcessor, CLIPTokenizer, FlaxCLIPTextModel\n\nfrom ...models import FlaxAutoencoderKL, FlaxUNet2DConditionModel\nfrom ...schedulers import (\n    FlaxDDIMScheduler,\n    FlaxDPMSolverMultistepScheduler,\n    FlaxLMSDiscreteScheduler,\n    FlaxPNDMScheduler,\n)\nfrom ...utils import PIL_INTERPOLATION, logging, replace_example_docstring\nfrom ..pipeline_flax_utils import FlaxDiffusionPipeline\nfrom . import FlaxStableDiffusionPipelineOutput\nfrom .safety_checker_flax import FlaxStableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n# Set to True to use python for loop instead of jax.fori_loop for easier debugging\nDEBUG = False\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import jax\n        >>> import numpy as np\n        >>> import jax.numpy as jnp\n        >>> from flax.jax_utils import replicate\n        >>> from flax.training.common_utils import shard\n        >>> import requests\n        >>> from io import BytesIO\n        >>> from PIL import Image\n        >>> from diffusers import FlaxStableDiffusionImg2ImgPipeline\n\n\n        >>> def create_key(seed=0):\n        ...     return jax.random.PRNGKey(seed)\n\n\n        >>> rng = create_key(0)\n\n        >>> url = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/assets/stable-samples/img2img/sketch-mountains-input.jpg\"\n        >>> response = requests.get(url)\n        >>> init_img = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> init_img = init_img.resize((768, 512))\n\n        >>> prompts = \"A fantasy landscape, trending on artstation\"\n\n        >>> pipeline, params = FlaxStableDiffusionImg2ImgPipeline.from_pretrained(\n        ...     \"CompVis/stable-diffusion-v1-4\",\n        ...     revision=\"flax\",\n        ...     dtype=jnp.bfloat16,\n        ... )\n\n        >>> num_samples = jax.device_count()\n        >>> rng = jax.random.split(rng, jax.device_count())\n        >>> prompt_ids, processed_image = pipeline.prepare_inputs(\n        ...     prompt=[prompts] * num_samples, image=[init_img] * num_samples\n        ... )\n        >>> p_params = replicate(params)\n        >>> prompt_ids = shard(prompt_ids)\n        >>> processed_image = shard(processed_image)\n\n        >>> output = pipeline(\n        ...     prompt_ids=prompt_ids,\n        ...     image=processed_image,\n        ...     params=p_params,\n        ...     prng_seed=rng,\n        ...     strength=0.75,\n        ...     num_inference_steps=50,\n        ...     jit=True,\n        ...     height=512,\n        ...     width=768,\n        ... ).images\n\n        >>> output_images = pipeline.numpy_to_pil(np.asarray(output.reshape((num_samples,) + output.shape[-3:])))\n        ```\n\"\"\"\n\n\nclass FlaxStableDiffusionImg2ImgPipeline(FlaxDiffusionPipeline):\n    r\"\"\"\n    Pipeline for image-to-image generation using Stable Diffusion.\n\n    This model inherits from [`FlaxDiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`FlaxAutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`FlaxCLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.FlaxCLIPTextModel),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`FlaxUNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`FlaxDDIMScheduler`], [`FlaxLMSDiscreteScheduler`], [`FlaxPNDMScheduler`], or\n            [`FlaxDPMSolverMultistepScheduler`].\n        safety_checker ([`FlaxStableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: FlaxAutoencoderKL,\n        text_encoder: FlaxCLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: FlaxUNet2DConditionModel,\n        scheduler: Union[\n            FlaxDDIMScheduler, FlaxPNDMScheduler, FlaxLMSDiscreteScheduler, FlaxDPMSolverMultistepScheduler\n        ],\n        safety_checker: FlaxStableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        super().__init__()\n        self.dtype = dtype\n\n        if safety_checker is None:\n            logger.warn(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n\n    def prepare_inputs(self, prompt: Union[str, List[str]], image: Union[Image.Image, List[Image.Image]]):\n        if not isinstance(prompt, (str, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if not isinstance(image, (Image.Image, list)):\n            raise ValueError(f\"image has to be of type `PIL.Image.Image` or list but is {type(image)}\")\n\n        if isinstance(image, Image.Image):\n            image = [image]\n\n        processed_images = jnp.concatenate([preprocess(img, jnp.float32) for img in image])\n\n        text_input = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_tensors=\"np\",\n        )\n        return text_input.input_ids, processed_images\n\n    def _get_has_nsfw_concepts(self, features, params):\n        has_nsfw_concepts = self.safety_checker(features, params)\n        return has_nsfw_concepts\n\n    def _run_safety_checker(self, images, safety_model_params, jit=False):\n        # safety_model_params should already be replicated when jit is True\n        pil_images = [Image.fromarray(image) for image in images]\n        features = self.feature_extractor(pil_images, return_tensors=\"np\").pixel_values\n\n        if jit:\n            features = shard(features)\n            has_nsfw_concepts = _p_get_has_nsfw_concepts(self, features, safety_model_params)\n            has_nsfw_concepts = unshard(has_nsfw_concepts)\n            safety_model_params = unreplicate(safety_model_params)\n        else:\n            has_nsfw_concepts = self._get_has_nsfw_concepts(features, safety_model_params)\n\n        images_was_copied = False\n        for idx, has_nsfw_concept in enumerate(has_nsfw_concepts):\n            if has_nsfw_concept:\n                if not images_was_copied:\n                    images_was_copied = True\n                    images = images.copy()\n\n                images[idx] = np.zeros(images[idx].shape, dtype=np.uint8)  # black image\n\n            if any(has_nsfw_concepts):\n                warnings.warn(\n                    \"Potential NSFW content was detected in one or more images. A black image will be returned\"\n                    \" instead. Try again with a different prompt and/or seed.\"\n                )\n\n        return images, has_nsfw_concepts\n\n    def get_timestep_start(self, num_inference_steps, strength):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n\n        return t_start\n\n    def _generate(\n        self,\n        prompt_ids: jnp.array,\n        image: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        start_timestep: int,\n        num_inference_steps: int,\n        height: int,\n        width: int,\n        guidance_scale: float,\n        noise: Optional[jnp.array] = None,\n        neg_prompt_ids: Optional[jnp.array] = None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        # get prompt text embeddings\n        prompt_embeds = self.text_encoder(prompt_ids, params=params[\"text_encoder\"])[0]\n\n        # TODO: currently it is assumed `do_classifier_free_guidance = guidance_scale > 1.0`\n        # implement this conditional `do_classifier_free_guidance = guidance_scale > 1.0`\n        batch_size = prompt_ids.shape[0]\n\n        max_length = prompt_ids.shape[-1]\n\n        if neg_prompt_ids is None:\n            uncond_input = self.tokenizer(\n                [\"\"] * batch_size, padding=\"max_length\", max_length=max_length, return_tensors=\"np\"\n            ).input_ids\n        else:\n            uncond_input = neg_prompt_ids\n        negative_prompt_embeds = self.text_encoder(uncond_input, params=params[\"text_encoder\"])[0]\n        context = jnp.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        latents_shape = (\n            batch_size,\n            self.unet.config.in_channels,\n            height // self.vae_scale_factor,\n            width // self.vae_scale_factor,\n        )\n        if noise is None:\n            noise = jax.random.normal(prng_seed, shape=latents_shape, dtype=jnp.float32)\n        else:\n            if noise.shape != latents_shape:\n                raise ValueError(f\"Unexpected latents shape, got {noise.shape}, expected {latents_shape}\")\n\n        # Create init_latents\n        init_latent_dist = self.vae.apply({\"params\": params[\"vae\"]}, image, method=self.vae.encode).latent_dist\n        init_latents = init_latent_dist.sample(key=prng_seed).transpose((0, 3, 1, 2))\n        init_latents = self.vae.config.scaling_factor * init_latents\n\n        def loop_body(step, args):\n            latents, scheduler_state = args\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            latents_input = jnp.concatenate([latents] * 2)\n\n            t = jnp.array(scheduler_state.timesteps, dtype=jnp.int32)[step]\n            timestep = jnp.broadcast_to(t, latents_input.shape[0])\n\n            latents_input = self.scheduler.scale_model_input(scheduler_state, latents_input, t)\n\n            # predict the noise residual\n            noise_pred = self.unet.apply(\n                {\"params\": params[\"unet\"]},\n                jnp.array(latents_input),\n                jnp.array(timestep, dtype=jnp.int32),\n                encoder_hidden_states=context,\n            ).sample\n            # perform guidance\n            noise_pred_uncond, noise_prediction_text = jnp.split(noise_pred, 2, axis=0)\n            noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents, scheduler_state = self.scheduler.step(scheduler_state, noise_pred, t, latents).to_tuple()\n            return latents, scheduler_state\n\n        scheduler_state = self.scheduler.set_timesteps(\n            params[\"scheduler\"], num_inference_steps=num_inference_steps, shape=latents_shape\n        )\n\n        latent_timestep = scheduler_state.timesteps[start_timestep : start_timestep + 1].repeat(batch_size)\n\n        latents = self.scheduler.add_noise(params[\"scheduler\"], init_latents, noise, latent_timestep)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * params[\"scheduler\"].init_noise_sigma\n\n        if DEBUG:\n            # run with python for loop\n            for i in range(start_timestep, num_inference_steps):\n                latents, scheduler_state = loop_body(i, (latents, scheduler_state))\n        else:\n            latents, _ = jax.lax.fori_loop(start_timestep, num_inference_steps, loop_body, (latents, scheduler_state))\n\n        # scale and decode the image latents with vae\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.apply({\"params\": params[\"vae\"]}, latents, method=self.vae.decode).sample\n\n        image = (image / 2 + 0.5).clip(0, 1).transpose(0, 2, 3, 1)\n        return image\n\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt_ids: jnp.array,\n        image: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        strength: float = 0.8,\n        num_inference_steps: int = 50,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        guidance_scale: Union[float, jnp.array] = 7.5,\n        noise: jnp.array = None,\n        neg_prompt_ids: jnp.array = None,\n        return_dict: bool = True,\n        jit: bool = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt_ids (`jnp.array`):\n                The prompt or prompts to guide the image generation.\n            image (`jnp.array`):\n                Array representing an image batch, that will be used as the starting point for the process.\n            params (`Dict` or `FrozenDict`): Dictionary containing the model parameters/weights\n            prng_seed (`jax.random.KeyArray` or `jax.Array`): Array containing random number generator key\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            noise (`jnp.array`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. tensor will ge generated\n                by sampling using the supplied random `generator`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] instead of\n                a plain tuple.\n            jit (`bool`, defaults to `False`):\n                Whether to run `pmap` versions of the generation and safety scoring functions. NOTE: This argument\n                exists because `__call__` is not yet end-to-end pmap-able. It will be removed in a future release.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a\n            `tuple. When returning a tuple, the first element is a list with the generated images, and the second\n            element is a list of `bool`s denoting whether the corresponding generated image likely represents\n            \"not-safe-for-work\" (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        if isinstance(guidance_scale, float):\n            # Convert to a tensor so each device gets a copy. Follow the prompt_ids for\n            # shape information, as they may be sharded (when `jit` is `True`), or not.\n            guidance_scale = jnp.array([guidance_scale] * prompt_ids.shape[0])\n            if len(prompt_ids.shape) > 2:\n                # Assume sharded\n                guidance_scale = guidance_scale[:, None]\n\n        start_timestep = self.get_timestep_start(num_inference_steps, strength)\n\n        if jit:\n            images = _p_generate(\n                self,\n                prompt_ids,\n                image,\n                params,\n                prng_seed,\n                start_timestep,\n                num_inference_steps,\n                height,\n                width,\n                guidance_scale,\n                noise,\n                neg_prompt_ids,\n            )\n        else:\n            images = self._generate(\n                prompt_ids,\n                image,\n                params,\n                prng_seed,\n                start_timestep,\n                num_inference_steps,\n                height,\n                width,\n                guidance_scale,\n                noise,\n                neg_prompt_ids,\n            )\n\n        if self.safety_checker is not None:\n            safety_params = params[\"safety_checker\"]\n            images_uint8_casted = (images * 255).round().astype(\"uint8\")\n            num_devices, batch_size = images.shape[:2]\n\n            images_uint8_casted = np.asarray(images_uint8_casted).reshape(num_devices * batch_size, height, width, 3)\n            images_uint8_casted, has_nsfw_concept = self._run_safety_checker(images_uint8_casted, safety_params, jit)\n            images = np.asarray(images)\n\n            # block images\n            if any(has_nsfw_concept):\n                for i, is_nsfw in enumerate(has_nsfw_concept):\n                    if is_nsfw:\n                        images[i] = np.asarray(images_uint8_casted[i])\n\n            images = images.reshape(num_devices, batch_size, height, width, 3)\n        else:\n            images = np.asarray(images)\n            has_nsfw_concept = False\n\n        if not return_dict:\n            return (images, has_nsfw_concept)\n\n        return FlaxStableDiffusionPipelineOutput(images=images, nsfw_content_detected=has_nsfw_concept)\n\n\n# Static argnums are pipe, start_timestep, num_inference_steps, height, width. A change would trigger recompilation.\n# Non-static args are (sharded) input tensors mapped over their first dimension (hence, `0`).\n@partial(\n    jax.pmap,\n    in_axes=(None, 0, 0, 0, 0, None, None, None, None, 0, 0, 0),\n    static_broadcasted_argnums=(0, 5, 6, 7, 8),\n)\ndef _p_generate(\n    pipe,\n    prompt_ids,\n    image,\n    params,\n    prng_seed,\n    start_timestep,\n    num_inference_steps,\n    height,\n    width,\n    guidance_scale,\n    noise,\n    neg_prompt_ids,\n):\n    return pipe._generate(\n        prompt_ids,\n        image,\n        params,\n        prng_seed,\n        start_timestep,\n        num_inference_steps,\n        height,\n        width,\n        guidance_scale,\n        noise,\n        neg_prompt_ids,\n    )\n\n\n@partial(jax.pmap, static_broadcasted_argnums=(0,))\ndef _p_get_has_nsfw_concepts(pipe, features, params):\n    return pipe._get_has_nsfw_concepts(features, params)\n\n\ndef unshard(x: jnp.ndarray):\n    # einops.rearrange(x, 'd b ... -> (d b) ...')\n    num_devices, batch_size = x.shape[:2]\n    rest = x.shape[2:]\n    return x.reshape(num_devices * batch_size, *rest)\n\n\ndef preprocess(image, dtype):\n    w, h = image.size\n    w, h = (x - x % 32 for x in (w, h))  # resize to integer multiple of 32\n    image = image.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"])\n    image = jnp.array(image).astype(dtype) / 255.0\n    image = image[None].transpose(0, 3, 1, 2)\n    return 2.0 * image - 1.0\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_flax_stable_diffusion_inpaint.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom functools import partial\nfrom typing import Dict, List, Optional, Union\n\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom flax.core.frozen_dict import FrozenDict\nfrom flax.jax_utils import unreplicate\nfrom flax.training.common_utils import shard\nfrom packaging import version\nfrom PIL import Image\nfrom transformers import CLIPImageProcessor, CLIPTokenizer, FlaxCLIPTextModel\n\nfrom ...models import FlaxAutoencoderKL, FlaxUNet2DConditionModel\nfrom ...schedulers import (\n    FlaxDDIMScheduler,\n    FlaxDPMSolverMultistepScheduler,\n    FlaxLMSDiscreteScheduler,\n    FlaxPNDMScheduler,\n)\nfrom ...utils import PIL_INTERPOLATION, deprecate, logging, replace_example_docstring\nfrom ..pipeline_flax_utils import FlaxDiffusionPipeline\nfrom . import FlaxStableDiffusionPipelineOutput\nfrom .safety_checker_flax import FlaxStableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n# Set to True to use python for loop instead of jax.fori_loop for easier debugging\nDEBUG = False\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import jax\n        >>> import numpy as np\n        >>> from flax.jax_utils import replicate\n        >>> from flax.training.common_utils import shard\n        >>> import PIL\n        >>> import requests\n        >>> from io import BytesIO\n        >>> from diffusers import FlaxStableDiffusionInpaintPipeline\n\n\n        >>> def download_image(url):\n        ...     response = requests.get(url)\n        ...     return PIL.Image.open(BytesIO(response.content)).convert(\"RGB\")\n\n\n        >>> img_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png\"\n        >>> mask_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png\"\n\n        >>> init_image = download_image(img_url).resize((512, 512))\n        >>> mask_image = download_image(mask_url).resize((512, 512))\n\n        >>> pipeline, params = FlaxStableDiffusionInpaintPipeline.from_pretrained(\n        ...     \"xvjiarui/stable-diffusion-2-inpainting\"\n        ... )\n\n        >>> prompt = \"Face of a yellow cat, high resolution, sitting on a park bench\"\n        >>> prng_seed = jax.random.PRNGKey(0)\n        >>> num_inference_steps = 50\n\n        >>> num_samples = jax.device_count()\n        >>> prompt = num_samples * [prompt]\n        >>> init_image = num_samples * [init_image]\n        >>> mask_image = num_samples * [mask_image]\n        >>> prompt_ids, processed_masked_images, processed_masks = pipeline.prepare_inputs(\n        ...     prompt, init_image, mask_image\n        ... )\n        # shard inputs and rng\n\n        >>> params = replicate(params)\n        >>> prng_seed = jax.random.split(prng_seed, jax.device_count())\n        >>> prompt_ids = shard(prompt_ids)\n        >>> processed_masked_images = shard(processed_masked_images)\n        >>> processed_masks = shard(processed_masks)\n\n        >>> images = pipeline(\n        ...     prompt_ids, processed_masks, processed_masked_images, params, prng_seed, num_inference_steps, jit=True\n        ... ).images\n        >>> images = pipeline.numpy_to_pil(np.asarray(images.reshape((num_samples,) + images.shape[-3:])))\n        ```\n\"\"\"\n\n\nclass FlaxStableDiffusionInpaintPipeline(FlaxDiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-guided image inpainting using Stable Diffusion. *This is an experimental feature*.\n\n    This model inherits from [`FlaxDiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`FlaxAutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`FlaxCLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.FlaxCLIPTextModel),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`FlaxUNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`FlaxDDIMScheduler`], [`FlaxLMSDiscreteScheduler`], [`FlaxPNDMScheduler`], or\n            [`FlaxDPMSolverMultistepScheduler`].\n        safety_checker ([`FlaxStableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: FlaxAutoencoderKL,\n        text_encoder: FlaxCLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: FlaxUNet2DConditionModel,\n        scheduler: Union[\n            FlaxDDIMScheduler, FlaxPNDMScheduler, FlaxLMSDiscreteScheduler, FlaxDPMSolverMultistepScheduler\n        ],\n        safety_checker: FlaxStableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        super().__init__()\n        self.dtype = dtype\n\n        if safety_checker is None:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n\n    def prepare_inputs(\n        self,\n        prompt: Union[str, List[str]],\n        image: Union[Image.Image, List[Image.Image]],\n        mask: Union[Image.Image, List[Image.Image]],\n    ):\n        if not isinstance(prompt, (str, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if not isinstance(image, (Image.Image, list)):\n            raise ValueError(f\"image has to be of type `PIL.Image.Image` or list but is {type(image)}\")\n\n        if isinstance(image, Image.Image):\n            image = [image]\n\n        if not isinstance(mask, (Image.Image, list)):\n            raise ValueError(f\"image has to be of type `PIL.Image.Image` or list but is {type(image)}\")\n\n        if isinstance(mask, Image.Image):\n            mask = [mask]\n\n        processed_images = jnp.concatenate([preprocess_image(img, jnp.float32) for img in image])\n        processed_masks = jnp.concatenate([preprocess_mask(m, jnp.float32) for m in mask])\n        # processed_masks[processed_masks < 0.5] = 0\n        processed_masks = processed_masks.at[processed_masks < 0.5].set(0)\n        # processed_masks[processed_masks >= 0.5] = 1\n        processed_masks = processed_masks.at[processed_masks >= 0.5].set(1)\n\n        processed_masked_images = processed_images * (processed_masks < 0.5)\n\n        text_input = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_tensors=\"np\",\n        )\n        return text_input.input_ids, processed_masked_images, processed_masks\n\n    def _get_has_nsfw_concepts(self, features, params):\n        has_nsfw_concepts = self.safety_checker(features, params)\n        return has_nsfw_concepts\n\n    def _run_safety_checker(self, images, safety_model_params, jit=False):\n        # safety_model_params should already be replicated when jit is True\n        pil_images = [Image.fromarray(image) for image in images]\n        features = self.feature_extractor(pil_images, return_tensors=\"np\").pixel_values\n\n        if jit:\n            features = shard(features)\n            has_nsfw_concepts = _p_get_has_nsfw_concepts(self, features, safety_model_params)\n            has_nsfw_concepts = unshard(has_nsfw_concepts)\n            safety_model_params = unreplicate(safety_model_params)\n        else:\n            has_nsfw_concepts = self._get_has_nsfw_concepts(features, safety_model_params)\n\n        images_was_copied = False\n        for idx, has_nsfw_concept in enumerate(has_nsfw_concepts):\n            if has_nsfw_concept:\n                if not images_was_copied:\n                    images_was_copied = True\n                    images = images.copy()\n\n                images[idx] = np.zeros(images[idx].shape, dtype=np.uint8)  # black image\n\n            if any(has_nsfw_concepts):\n                warnings.warn(\n                    \"Potential NSFW content was detected in one or more images. A black image will be returned\"\n                    \" instead. Try again with a different prompt and/or seed.\"\n                )\n\n        return images, has_nsfw_concepts\n\n    def _generate(\n        self,\n        prompt_ids: jnp.array,\n        mask: jnp.array,\n        masked_image: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        num_inference_steps: int,\n        height: int,\n        width: int,\n        guidance_scale: float,\n        latents: Optional[jnp.array] = None,\n        neg_prompt_ids: Optional[jnp.array] = None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        # get prompt text embeddings\n        prompt_embeds = self.text_encoder(prompt_ids, params=params[\"text_encoder\"])[0]\n\n        # TODO: currently it is assumed `do_classifier_free_guidance = guidance_scale > 1.0`\n        # implement this conditional `do_classifier_free_guidance = guidance_scale > 1.0`\n        batch_size = prompt_ids.shape[0]\n\n        max_length = prompt_ids.shape[-1]\n\n        if neg_prompt_ids is None:\n            uncond_input = self.tokenizer(\n                [\"\"] * batch_size, padding=\"max_length\", max_length=max_length, return_tensors=\"np\"\n            ).input_ids\n        else:\n            uncond_input = neg_prompt_ids\n        negative_prompt_embeds = self.text_encoder(uncond_input, params=params[\"text_encoder\"])[0]\n        context = jnp.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        latents_shape = (\n            batch_size,\n            self.vae.config.latent_channels,\n            height // self.vae_scale_factor,\n            width // self.vae_scale_factor,\n        )\n        if latents is None:\n            latents = jax.random.normal(prng_seed, shape=latents_shape, dtype=self.dtype)\n        else:\n            if latents.shape != latents_shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {latents_shape}\")\n\n        prng_seed, mask_prng_seed = jax.random.split(prng_seed)\n\n        masked_image_latent_dist = self.vae.apply(\n            {\"params\": params[\"vae\"]}, masked_image, method=self.vae.encode\n        ).latent_dist\n        masked_image_latents = masked_image_latent_dist.sample(key=mask_prng_seed).transpose((0, 3, 1, 2))\n        masked_image_latents = self.vae.config.scaling_factor * masked_image_latents\n        del mask_prng_seed\n\n        mask = jax.image.resize(mask, (*mask.shape[:-2], *masked_image_latents.shape[-2:]), method=\"nearest\")\n\n        # 8. Check that sizes of mask, masked image and latents match\n        num_channels_latents = self.vae.config.latent_channels\n        num_channels_mask = mask.shape[1]\n        num_channels_masked_image = masked_image_latents.shape[1]\n        if num_channels_latents + num_channels_mask + num_channels_masked_image != self.unet.config.in_channels:\n            raise ValueError(\n                f\"Incorrect configuration settings! The config of `pipeline.unet`: {self.unet.config} expects\"\n                f\" {self.unet.config.in_channels} but received `num_channels_latents`: {num_channels_latents} +\"\n                f\" `num_channels_mask`: {num_channels_mask} + `num_channels_masked_image`: {num_channels_masked_image}\"\n                f\" = {num_channels_latents+num_channels_masked_image+num_channels_mask}. Please verify the config of\"\n                \" `pipeline.unet` or your `mask_image` or `image` input.\"\n            )\n\n        def loop_body(step, args):\n            latents, mask, masked_image_latents, scheduler_state = args\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            latents_input = jnp.concatenate([latents] * 2)\n            mask_input = jnp.concatenate([mask] * 2)\n            masked_image_latents_input = jnp.concatenate([masked_image_latents] * 2)\n\n            t = jnp.array(scheduler_state.timesteps, dtype=jnp.int32)[step]\n            timestep = jnp.broadcast_to(t, latents_input.shape[0])\n\n            latents_input = self.scheduler.scale_model_input(scheduler_state, latents_input, t)\n            # concat latents, mask, masked_image_latents in the channel dimension\n            latents_input = jnp.concatenate([latents_input, mask_input, masked_image_latents_input], axis=1)\n\n            # predict the noise residual\n            noise_pred = self.unet.apply(\n                {\"params\": params[\"unet\"]},\n                jnp.array(latents_input),\n                jnp.array(timestep, dtype=jnp.int32),\n                encoder_hidden_states=context,\n            ).sample\n            # perform guidance\n            noise_pred_uncond, noise_prediction_text = jnp.split(noise_pred, 2, axis=0)\n            noise_pred = noise_pred_uncond + guidance_scale * (noise_prediction_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents, scheduler_state = self.scheduler.step(scheduler_state, noise_pred, t, latents).to_tuple()\n            return latents, mask, masked_image_latents, scheduler_state\n\n        scheduler_state = self.scheduler.set_timesteps(\n            params[\"scheduler\"], num_inference_steps=num_inference_steps, shape=latents.shape\n        )\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * params[\"scheduler\"].init_noise_sigma\n\n        if DEBUG:\n            # run with python for loop\n            for i in range(num_inference_steps):\n                latents, mask, masked_image_latents, scheduler_state = loop_body(\n                    i, (latents, mask, masked_image_latents, scheduler_state)\n                )\n        else:\n            latents, _, _, _ = jax.lax.fori_loop(\n                0, num_inference_steps, loop_body, (latents, mask, masked_image_latents, scheduler_state)\n            )\n\n        # scale and decode the image latents with vae\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.apply({\"params\": params[\"vae\"]}, latents, method=self.vae.decode).sample\n\n        image = (image / 2 + 0.5).clip(0, 1).transpose(0, 2, 3, 1)\n        return image\n\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt_ids: jnp.array,\n        mask: jnp.array,\n        masked_image: jnp.array,\n        params: Union[Dict, FrozenDict],\n        prng_seed: jax.random.KeyArray,\n        num_inference_steps: int = 50,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        guidance_scale: Union[float, jnp.array] = 7.5,\n        latents: jnp.array = None,\n        neg_prompt_ids: jnp.array = None,\n        return_dict: bool = True,\n        jit: bool = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            latents (`jnp.array`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. tensor will ge generated\n                by sampling using the supplied random `generator`.\n            jit (`bool`, defaults to `False`):\n                Whether to run `pmap` versions of the generation and safety scoring functions. NOTE: This argument\n                exists because `__call__` is not yet end-to-end pmap-able. It will be removed in a future release.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] instead of\n                a plain tuple.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] or `tuple`:\n                [`~pipelines.stable_diffusion.FlaxStableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a\n            `tuple. When returning a tuple, the first element is a list with the generated images, and the second\n            element is a list of `bool`s denoting whether the corresponding generated image likely represents\n            \"not-safe-for-work\" (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        masked_image = jax.image.resize(masked_image, (*masked_image.shape[:-2], height, width), method=\"bicubic\")\n        mask = jax.image.resize(mask, (*mask.shape[:-2], height, width), method=\"nearest\")\n\n        if isinstance(guidance_scale, float):\n            # Convert to a tensor so each device gets a copy. Follow the prompt_ids for\n            # shape information, as they may be sharded (when `jit` is `True`), or not.\n            guidance_scale = jnp.array([guidance_scale] * prompt_ids.shape[0])\n            if len(prompt_ids.shape) > 2:\n                # Assume sharded\n                guidance_scale = guidance_scale[:, None]\n\n        if jit:\n            images = _p_generate(\n                self,\n                prompt_ids,\n                mask,\n                masked_image,\n                params,\n                prng_seed,\n                num_inference_steps,\n                height,\n                width,\n                guidance_scale,\n                latents,\n                neg_prompt_ids,\n            )\n        else:\n            images = self._generate(\n                prompt_ids,\n                mask,\n                masked_image,\n                params,\n                prng_seed,\n                num_inference_steps,\n                height,\n                width,\n                guidance_scale,\n                latents,\n                neg_prompt_ids,\n            )\n\n        if self.safety_checker is not None:\n            safety_params = params[\"safety_checker\"]\n            images_uint8_casted = (images * 255).round().astype(\"uint8\")\n            num_devices, batch_size = images.shape[:2]\n\n            images_uint8_casted = np.asarray(images_uint8_casted).reshape(num_devices * batch_size, height, width, 3)\n            images_uint8_casted, has_nsfw_concept = self._run_safety_checker(images_uint8_casted, safety_params, jit)\n            images = np.asarray(images)\n\n            # block images\n            if any(has_nsfw_concept):\n                for i, is_nsfw in enumerate(has_nsfw_concept):\n                    if is_nsfw:\n                        images[i] = np.asarray(images_uint8_casted[i])\n\n            images = images.reshape(num_devices, batch_size, height, width, 3)\n        else:\n            images = np.asarray(images)\n            has_nsfw_concept = False\n\n        if not return_dict:\n            return (images, has_nsfw_concept)\n\n        return FlaxStableDiffusionPipelineOutput(images=images, nsfw_content_detected=has_nsfw_concept)\n\n\n# Static argnums are pipe, num_inference_steps, height, width. A change would trigger recompilation.\n# Non-static args are (sharded) input tensors mapped over their first dimension (hence, `0`).\n@partial(\n    jax.pmap,\n    in_axes=(None, 0, 0, 0, 0, 0, None, None, None, 0, 0, 0),\n    static_broadcasted_argnums=(0, 6, 7, 8),\n)\ndef _p_generate(\n    pipe,\n    prompt_ids,\n    mask,\n    masked_image,\n    params,\n    prng_seed,\n    num_inference_steps,\n    height,\n    width,\n    guidance_scale,\n    latents,\n    neg_prompt_ids,\n):\n    return pipe._generate(\n        prompt_ids,\n        mask,\n        masked_image,\n        params,\n        prng_seed,\n        num_inference_steps,\n        height,\n        width,\n        guidance_scale,\n        latents,\n        neg_prompt_ids,\n    )\n\n\n@partial(jax.pmap, static_broadcasted_argnums=(0,))\ndef _p_get_has_nsfw_concepts(pipe, features, params):\n    return pipe._get_has_nsfw_concepts(features, params)\n\n\ndef unshard(x: jnp.ndarray):\n    # einops.rearrange(x, 'd b ... -> (d b) ...')\n    num_devices, batch_size = x.shape[:2]\n    rest = x.shape[2:]\n    return x.reshape(num_devices * batch_size, *rest)\n\n\ndef preprocess_image(image, dtype):\n    w, h = image.size\n    w, h = (x - x % 32 for x in (w, h))  # resize to integer multiple of 32\n    image = image.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"])\n    image = jnp.array(image).astype(dtype) / 255.0\n    image = image[None].transpose(0, 3, 1, 2)\n    return 2.0 * image - 1.0\n\n\ndef preprocess_mask(mask, dtype):\n    w, h = mask.size\n    w, h = (x - x % 32 for x in (w, h))  # resize to integer multiple of 32\n    mask = mask.resize((w, h))\n    mask = jnp.array(mask.convert(\"L\")).astype(dtype) / 255.0\n    mask = jnp.expand_dims(mask, axis=(0, 1))\n\n    return mask\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler\nfrom ...utils import deprecate, logging\nfrom ..onnx_utils import ORT_TO_NP_TYPE, OnnxRuntimeModel\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\n\n\nlogger = logging.get_logger(__name__)\n\n\nclass OnnxStableDiffusionPipeline(DiffusionPipeline):\n    vae_encoder: OnnxRuntimeModel\n    vae_decoder: OnnxRuntimeModel\n    text_encoder: OnnxRuntimeModel\n    tokenizer: CLIPTokenizer\n    unet: OnnxRuntimeModel\n    scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler]\n    safety_checker: OnnxRuntimeModel\n    feature_extractor: CLIPImageProcessor\n\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae_encoder: OnnxRuntimeModel,\n        vae_decoder: OnnxRuntimeModel,\n        text_encoder: OnnxRuntimeModel,\n        tokenizer: CLIPTokenizer,\n        unet: OnnxRuntimeModel,\n        scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],\n        safety_checker: OnnxRuntimeModel,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae_encoder=vae_encoder,\n            vae_decoder=vae_decoder,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    def _encode_prompt(\n        self,\n        prompt: Union[str, List[str]],\n        num_images_per_prompt: Optional[int],\n        do_classifier_free_guidance: bool,\n        negative_prompt: Optional[str],\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # get prompt text embeddings\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"max_length\", return_tensors=\"np\").input_ids\n\n            if not np.array_equal(text_input_ids, untruncated_ids):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            prompt_embeds = self.text_encoder(input_ids=text_input_ids.astype(np.int32))[0]\n\n        prompt_embeds = np.repeat(prompt_embeds, num_images_per_prompt, axis=0)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt] * batch_size\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            negative_prompt_embeds = self.text_encoder(input_ids=uncond_input.input_ids.astype(np.int32))[0]\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = np.repeat(negative_prompt_embeds, num_images_per_prompt, axis=0)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = np.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def check_inputs(\n        self,\n        prompt: Union[str, List[str]],\n        height: Optional[int],\n        width: Optional[int],\n        callback_steps: int,\n        negative_prompt: Optional[str] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = 512,\n        width: Optional[int] = 512,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: Optional[float] = 0.0,\n        generator: Optional[np.random.RandomState] = None,\n        latents: Optional[np.ndarray] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, np.ndarray], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`PIL.Image.Image` or List[`PIL.Image.Image`] or `torch.FloatTensor`):\n                `Image`, or tensor representing an image batch which will be upscaled. *\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`np.random.RandomState`, *optional*):\n                One or a list of [numpy generator(s)](TODO) to make generation deterministic.\n            latents (`np.ndarray`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if generator is None:\n            generator = np.random\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # get the initial random noise unless the user supplied it\n        latents_dtype = prompt_embeds.dtype\n        latents_shape = (batch_size * num_images_per_prompt, 4, height // 8, width // 8)\n        if latents is None:\n            latents = generator.randn(*latents_shape).astype(latents_dtype)\n        elif latents.shape != latents_shape:\n            raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {latents_shape}\")\n\n        # set timesteps\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        latents = latents * np.float64(self.scheduler.init_noise_sigma)\n\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        timestep_dtype = next(\n            (input.type for input in self.unet.model.get_inputs() if input.name == \"timestep\"), \"tensor(float)\"\n        )\n        timestep_dtype = ORT_TO_NP_TYPE[timestep_dtype]\n\n        for i, t in enumerate(self.progress_bar(self.scheduler.timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = np.concatenate([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(torch.from_numpy(latent_model_input), t)\n            latent_model_input = latent_model_input.cpu().numpy()\n\n            # predict the noise residual\n            timestep = np.array([t], dtype=timestep_dtype)\n            noise_pred = self.unet(sample=latent_model_input, timestep=timestep, encoder_hidden_states=prompt_embeds)\n            noise_pred = noise_pred[0]\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = np.split(noise_pred, 2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            scheduler_output = self.scheduler.step(\n                torch.from_numpy(noise_pred), t, torch.from_numpy(latents), **extra_step_kwargs\n            )\n            latents = scheduler_output.prev_sample.numpy()\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        latents = 1 / 0.18215 * latents\n        # image = self.vae_decoder(latent_sample=latents)[0]\n        # it seems likes there is a strange result for using half-precision vae decoder if batchsize>1\n        image = np.concatenate(\n            [self.vae_decoder(latent_sample=latents[i : i + 1])[0] for i in range(latents.shape[0])]\n        )\n\n        image = np.clip(image / 2 + 0.5, 0, 1)\n        image = image.transpose((0, 2, 3, 1))\n\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(\n                self.numpy_to_pil(image), return_tensors=\"np\"\n            ).pixel_values.astype(image.dtype)\n\n            images, has_nsfw_concept = [], []\n            for i in range(image.shape[0]):\n                image_i, has_nsfw_concept_i = self.safety_checker(\n                    clip_input=safety_checker_input[i : i + 1], images=image[i : i + 1]\n                )\n                images.append(image_i)\n                has_nsfw_concept.append(has_nsfw_concept_i[0])\n            image = np.concatenate(images)\n        else:\n            has_nsfw_concept = None\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n\n\nclass StableDiffusionOnnxPipeline(OnnxStableDiffusionPipeline):\n    def __init__(\n        self,\n        vae_encoder: OnnxRuntimeModel,\n        vae_decoder: OnnxRuntimeModel,\n        text_encoder: OnnxRuntimeModel,\n        tokenizer: CLIPTokenizer,\n        unet: OnnxRuntimeModel,\n        scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],\n        safety_checker: OnnxRuntimeModel,\n        feature_extractor: CLIPImageProcessor,\n    ):\n        deprecation_message = \"Please use `OnnxStableDiffusionPipeline` instead of `StableDiffusionOnnxPipeline`.\"\n        deprecate(\"StableDiffusionOnnxPipeline\", \"1.0.0\", deprecation_message)\n        super().__init__(\n            vae_encoder=vae_encoder,\n            vae_decoder=vae_decoder,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion_img2img.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler\nfrom ...utils import PIL_INTERPOLATION, deprecate, logging\nfrom ..onnx_utils import ORT_TO_NP_TYPE, OnnxRuntimeModel\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess with 8->64\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 64 for x in (w, h))  # resize to integer multiple of 64\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\nclass OnnxStableDiffusionImg2ImgPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-guided image to image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    vae_encoder: OnnxRuntimeModel\n    vae_decoder: OnnxRuntimeModel\n    text_encoder: OnnxRuntimeModel\n    tokenizer: CLIPTokenizer\n    unet: OnnxRuntimeModel\n    scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler]\n    safety_checker: OnnxRuntimeModel\n    feature_extractor: CLIPImageProcessor\n\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae_encoder: OnnxRuntimeModel,\n        vae_decoder: OnnxRuntimeModel,\n        text_encoder: OnnxRuntimeModel,\n        tokenizer: CLIPTokenizer,\n        unet: OnnxRuntimeModel,\n        scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],\n        safety_checker: OnnxRuntimeModel,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae_encoder=vae_encoder,\n            vae_decoder=vae_decoder,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_onnx_stable_diffusion.OnnxStableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt: Union[str, List[str]],\n        num_images_per_prompt: Optional[int],\n        do_classifier_free_guidance: bool,\n        negative_prompt: Optional[str],\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # get prompt text embeddings\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"max_length\", return_tensors=\"np\").input_ids\n\n            if not np.array_equal(text_input_ids, untruncated_ids):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            prompt_embeds = self.text_encoder(input_ids=text_input_ids.astype(np.int32))[0]\n\n        prompt_embeds = np.repeat(prompt_embeds, num_images_per_prompt, axis=0)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt] * batch_size\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            negative_prompt_embeds = self.text_encoder(input_ids=uncond_input.input_ids.astype(np.int32))[0]\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = np.repeat(negative_prompt_embeds, num_images_per_prompt, axis=0)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = np.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def check_inputs(\n        self,\n        prompt: Union[str, List[str]],\n        callback_steps: int,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        image: Union[np.ndarray, PIL.Image.Image] = None,\n        strength: float = 0.8,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: Optional[float] = 0.0,\n        generator: Optional[np.random.RandomState] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, np.ndarray], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            image (`np.ndarray` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference. This parameter will be modulated by `strength`.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`np.random.RandomState`, *optional*):\n                A np.random.RandomState to make generation deterministic.\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: np.ndarray)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # check inputs. Raise error if not correct\n        self.check_inputs(prompt, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds)\n\n        # define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if generator is None:\n            generator = np.random\n\n        # set timesteps\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        image = preprocess(image).cpu().numpy()\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        latents_dtype = prompt_embeds.dtype\n        image = image.astype(latents_dtype)\n        # encode the init image into latents and scale the latents\n        init_latents = self.vae_encoder(sample=image)[0]\n        init_latents = 0.18215 * init_latents\n\n        if isinstance(prompt, str):\n            prompt = [prompt]\n        if len(prompt) > init_latents.shape[0] and len(prompt) % init_latents.shape[0] == 0:\n            # expand init_latents for batch_size\n            deprecation_message = (\n                f\"You have passed {len(prompt)} text prompts (`prompt`), but only {init_latents.shape[0]} initial\"\n                \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                \" your script to pass as many initial images as text prompts to suppress this warning.\"\n            )\n            deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n            additional_image_per_prompt = len(prompt) // init_latents.shape[0]\n            init_latents = np.concatenate([init_latents] * additional_image_per_prompt * num_images_per_prompt, axis=0)\n        elif len(prompt) > init_latents.shape[0] and len(prompt) % init_latents.shape[0] != 0:\n            raise ValueError(\n                f\"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {len(prompt)} text prompts.\"\n            )\n        else:\n            init_latents = np.concatenate([init_latents] * num_images_per_prompt, axis=0)\n\n        # get the original timestep using init_timestep\n        offset = self.scheduler.config.get(\"steps_offset\", 0)\n        init_timestep = int(num_inference_steps * strength) + offset\n        init_timestep = min(init_timestep, num_inference_steps)\n\n        timesteps = self.scheduler.timesteps.numpy()[-init_timestep]\n        timesteps = np.array([timesteps] * batch_size * num_images_per_prompt)\n\n        # add noise to latents using the timesteps\n        noise = generator.randn(*init_latents.shape).astype(latents_dtype)\n        init_latents = self.scheduler.add_noise(\n            torch.from_numpy(init_latents), torch.from_numpy(noise), torch.from_numpy(timesteps)\n        )\n        init_latents = init_latents.numpy()\n\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        latents = init_latents\n\n        t_start = max(num_inference_steps - init_timestep + offset, 0)\n        timesteps = self.scheduler.timesteps[t_start:].numpy()\n\n        timestep_dtype = next(\n            (input.type for input in self.unet.model.get_inputs() if input.name == \"timestep\"), \"tensor(float)\"\n        )\n        timestep_dtype = ORT_TO_NP_TYPE[timestep_dtype]\n\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = np.concatenate([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(torch.from_numpy(latent_model_input), t)\n            latent_model_input = latent_model_input.cpu().numpy()\n\n            # predict the noise residual\n            timestep = np.array([t], dtype=timestep_dtype)\n            noise_pred = self.unet(sample=latent_model_input, timestep=timestep, encoder_hidden_states=prompt_embeds)[\n                0\n            ]\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = np.split(noise_pred, 2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            scheduler_output = self.scheduler.step(\n                torch.from_numpy(noise_pred), t, torch.from_numpy(latents), **extra_step_kwargs\n            )\n            latents = scheduler_output.prev_sample.numpy()\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        latents = 1 / 0.18215 * latents\n        # image = self.vae_decoder(latent_sample=latents)[0]\n        # it seems likes there is a strange result for using half-precision vae decoder if batchsize>1\n        image = np.concatenate(\n            [self.vae_decoder(latent_sample=latents[i : i + 1])[0] for i in range(latents.shape[0])]\n        )\n\n        image = np.clip(image / 2 + 0.5, 0, 1)\n        image = image.transpose((0, 2, 3, 1))\n\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(\n                self.numpy_to_pil(image), return_tensors=\"np\"\n            ).pixel_values.astype(image.dtype)\n            # safety_checker does not support batched inputs yet\n            images, has_nsfw_concept = [], []\n            for i in range(image.shape[0]):\n                image_i, has_nsfw_concept_i = self.safety_checker(\n                    clip_input=safety_checker_input[i : i + 1], images=image[i : i + 1]\n                )\n                images.append(image_i)\n                has_nsfw_concept.append(has_nsfw_concept_i[0])\n            image = np.concatenate(images)\n        else:\n            has_nsfw_concept = None\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion_inpaint.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler\nfrom ...utils import PIL_INTERPOLATION, deprecate, logging\nfrom ..onnx_utils import ORT_TO_NP_TYPE, OnnxRuntimeModel\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nNUM_UNET_INPUT_CHANNELS = 9\nNUM_LATENT_CHANNELS = 4\n\n\ndef prepare_mask_and_masked_image(image, mask, latents_shape):\n    image = np.array(image.convert(\"RGB\").resize((latents_shape[1] * 8, latents_shape[0] * 8)))\n    image = image[None].transpose(0, 3, 1, 2)\n    image = image.astype(np.float32) / 127.5 - 1.0\n\n    image_mask = np.array(mask.convert(\"L\").resize((latents_shape[1] * 8, latents_shape[0] * 8)))\n    masked_image = image * (image_mask < 127.5)\n\n    mask = mask.resize((latents_shape[1], latents_shape[0]), PIL_INTERPOLATION[\"nearest\"])\n    mask = np.array(mask.convert(\"L\"))\n    mask = mask.astype(np.float32) / 255.0\n    mask = mask[None, None]\n    mask[mask < 0.5] = 0\n    mask[mask >= 0.5] = 1\n\n    return mask, masked_image\n\n\nclass OnnxStableDiffusionInpaintPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-guided image inpainting using Stable Diffusion. *This is an experimental feature*.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    vae_encoder: OnnxRuntimeModel\n    vae_decoder: OnnxRuntimeModel\n    text_encoder: OnnxRuntimeModel\n    tokenizer: CLIPTokenizer\n    unet: OnnxRuntimeModel\n    scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler]\n    safety_checker: OnnxRuntimeModel\n    feature_extractor: CLIPImageProcessor\n\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae_encoder: OnnxRuntimeModel,\n        vae_decoder: OnnxRuntimeModel,\n        text_encoder: OnnxRuntimeModel,\n        tokenizer: CLIPTokenizer,\n        unet: OnnxRuntimeModel,\n        scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],\n        safety_checker: OnnxRuntimeModel,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n        logger.info(\"`OnnxStableDiffusionInpaintPipeline` is experimental and will very likely change in the future.\")\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae_encoder=vae_encoder,\n            vae_decoder=vae_decoder,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_onnx_stable_diffusion.OnnxStableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt: Union[str, List[str]],\n        num_images_per_prompt: Optional[int],\n        do_classifier_free_guidance: bool,\n        negative_prompt: Optional[str],\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # get prompt text embeddings\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"max_length\", return_tensors=\"np\").input_ids\n\n            if not np.array_equal(text_input_ids, untruncated_ids):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            prompt_embeds = self.text_encoder(input_ids=text_input_ids.astype(np.int32))[0]\n\n        prompt_embeds = np.repeat(prompt_embeds, num_images_per_prompt, axis=0)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt] * batch_size\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            negative_prompt_embeds = self.text_encoder(input_ids=uncond_input.input_ids.astype(np.int32))[0]\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = np.repeat(negative_prompt_embeds, num_images_per_prompt, axis=0)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = np.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_onnx_stable_diffusion.OnnxStableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt: Union[str, List[str]],\n        height: Optional[int],\n        width: Optional[int],\n        callback_steps: int,\n        negative_prompt: Optional[str] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        image: PIL.Image.Image,\n        mask_image: PIL.Image.Image,\n        height: Optional[int] = 512,\n        width: Optional[int] = 512,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[np.random.RandomState] = None,\n        latents: Optional[np.ndarray] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, np.ndarray], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch which will be inpainted, *i.e.* parts of the image will\n                be masked out with `mask_image` and repainted according to `prompt`.\n            mask_image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be\n                repainted, while black pixels will be preserved. If `mask_image` is a PIL image, it will be converted\n                to a single channel (luminance) before use. If it's a tensor, it should contain one color channel (L)\n                instead of 3, so the expected shape would be `(B, H, W, 1)`.\n            height (`int`, *optional*, defaults to 512):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to 512):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`np.random.RandomState`, *optional*):\n                A np.random.RandomState to make generation deterministic.\n            latents (`np.ndarray`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: np.ndarray)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if generator is None:\n            generator = np.random\n\n        # set timesteps\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        num_channels_latents = NUM_LATENT_CHANNELS\n        latents_shape = (batch_size * num_images_per_prompt, num_channels_latents, height // 8, width // 8)\n        latents_dtype = prompt_embeds.dtype\n        if latents is None:\n            latents = generator.randn(*latents_shape).astype(latents_dtype)\n        else:\n            if latents.shape != latents_shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {latents_shape}\")\n\n        # prepare mask and masked_image\n        mask, masked_image = prepare_mask_and_masked_image(image, mask_image, latents_shape[-2:])\n        mask = mask.astype(latents.dtype)\n        masked_image = masked_image.astype(latents.dtype)\n\n        masked_image_latents = self.vae_encoder(sample=masked_image)[0]\n        masked_image_latents = 0.18215 * masked_image_latents\n\n        # duplicate mask and masked_image_latents for each generation per prompt\n        mask = mask.repeat(batch_size * num_images_per_prompt, 0)\n        masked_image_latents = masked_image_latents.repeat(batch_size * num_images_per_prompt, 0)\n\n        mask = np.concatenate([mask] * 2) if do_classifier_free_guidance else mask\n        masked_image_latents = (\n            np.concatenate([masked_image_latents] * 2) if do_classifier_free_guidance else masked_image_latents\n        )\n\n        num_channels_mask = mask.shape[1]\n        num_channels_masked_image = masked_image_latents.shape[1]\n\n        unet_input_channels = NUM_UNET_INPUT_CHANNELS\n        if num_channels_latents + num_channels_mask + num_channels_masked_image != unet_input_channels:\n            raise ValueError(\n                \"Incorrect configuration settings! The config of `pipeline.unet` expects\"\n                f\" {unet_input_channels} but received `num_channels_latents`: {num_channels_latents} +\"\n                f\" `num_channels_mask`: {num_channels_mask} + `num_channels_masked_image`: {num_channels_masked_image}\"\n                f\" = {num_channels_latents+num_channels_masked_image+num_channels_mask}. Please verify the config of\"\n                \" `pipeline.unet` or your `mask_image` or `image` input.\"\n            )\n\n        # set timesteps\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * np.float64(self.scheduler.init_noise_sigma)\n\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        timestep_dtype = next(\n            (input.type for input in self.unet.model.get_inputs() if input.name == \"timestep\"), \"tensor(float)\"\n        )\n        timestep_dtype = ORT_TO_NP_TYPE[timestep_dtype]\n\n        for i, t in enumerate(self.progress_bar(self.scheduler.timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = np.concatenate([latents] * 2) if do_classifier_free_guidance else latents\n            # concat latents, mask, masked_image_latnets in the channel dimension\n            latent_model_input = self.scheduler.scale_model_input(torch.from_numpy(latent_model_input), t)\n            latent_model_input = latent_model_input.cpu().numpy()\n            latent_model_input = np.concatenate([latent_model_input, mask, masked_image_latents], axis=1)\n\n            # predict the noise residual\n            timestep = np.array([t], dtype=timestep_dtype)\n            noise_pred = self.unet(sample=latent_model_input, timestep=timestep, encoder_hidden_states=prompt_embeds)[\n                0\n            ]\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = np.split(noise_pred, 2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            scheduler_output = self.scheduler.step(\n                torch.from_numpy(noise_pred), t, torch.from_numpy(latents), **extra_step_kwargs\n            )\n            latents = scheduler_output.prev_sample.numpy()\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        latents = 1 / 0.18215 * latents\n        # image = self.vae_decoder(latent_sample=latents)[0]\n        # it seems likes there is a strange result for using half-precision vae decoder if batchsize>1\n        image = np.concatenate(\n            [self.vae_decoder(latent_sample=latents[i : i + 1])[0] for i in range(latents.shape[0])]\n        )\n\n        image = np.clip(image / 2 + 0.5, 0, 1)\n        image = image.transpose((0, 2, 3, 1))\n\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(\n                self.numpy_to_pil(image), return_tensors=\"np\"\n            ).pixel_values.astype(image.dtype)\n            # safety_checker does not support batched inputs yet\n            images, has_nsfw_concept = [], []\n            for i in range(image.shape[0]):\n                image_i, has_nsfw_concept_i = self.safety_checker(\n                    clip_input=safety_checker_input[i : i + 1], images=image[i : i + 1]\n                )\n                images.append(image_i)\n                has_nsfw_concept.append(has_nsfw_concept_i[0])\n            image = np.concatenate(images)\n        else:\n            has_nsfw_concept = None\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion_inpaint_legacy.py",
    "content": "import inspect\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler\nfrom ...utils import deprecate, logging\nfrom ..onnx_utils import ORT_TO_NP_TYPE, OnnxRuntimeModel\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\ndef preprocess(image):\n    w, h = image.size\n    w, h = (x - x % 32 for x in (w, h))  # resize to integer multiple of 32\n    image = image.resize((w, h), resample=PIL.Image.LANCZOS)\n    image = np.array(image).astype(np.float32) / 255.0\n    image = image[None].transpose(0, 3, 1, 2)\n    return 2.0 * image - 1.0\n\n\ndef preprocess_mask(mask, scale_factor=8):\n    mask = mask.convert(\"L\")\n    w, h = mask.size\n    w, h = (x - x % 32 for x in (w, h))  # resize to integer multiple of 32\n    mask = mask.resize((w // scale_factor, h // scale_factor), resample=PIL.Image.NEAREST)\n    mask = np.array(mask).astype(np.float32) / 255.0\n    mask = np.tile(mask, (4, 1, 1))\n    mask = mask[None].transpose(0, 1, 2, 3)  # what does this step do?\n    mask = 1 - mask  # repaint white, keep black\n    return mask\n\n\nclass OnnxStableDiffusionInpaintPipelineLegacy(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-guided image inpainting using Stable Diffusion. This is a *legacy feature* for Onnx pipelines to\n    provide compatibility with StableDiffusionInpaintPipelineLegacy and may be removed in the future.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    vae_encoder: OnnxRuntimeModel\n    vae_decoder: OnnxRuntimeModel\n    text_encoder: OnnxRuntimeModel\n    tokenizer: CLIPTokenizer\n    unet: OnnxRuntimeModel\n    scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler]\n    safety_checker: OnnxRuntimeModel\n    feature_extractor: CLIPImageProcessor\n\n    def __init__(\n        self,\n        vae_encoder: OnnxRuntimeModel,\n        vae_decoder: OnnxRuntimeModel,\n        text_encoder: OnnxRuntimeModel,\n        tokenizer: CLIPTokenizer,\n        unet: OnnxRuntimeModel,\n        scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],\n        safety_checker: OnnxRuntimeModel,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae_encoder=vae_encoder,\n            vae_decoder=vae_decoder,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_onnx_stable_diffusion.OnnxStableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt: Union[str, List[str]],\n        num_images_per_prompt: Optional[int],\n        do_classifier_free_guidance: bool,\n        negative_prompt: Optional[str],\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # get prompt text embeddings\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"max_length\", return_tensors=\"np\").input_ids\n\n            if not np.array_equal(text_input_ids, untruncated_ids):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            prompt_embeds = self.text_encoder(input_ids=text_input_ids.astype(np.int32))[0]\n\n        prompt_embeds = np.repeat(prompt_embeds, num_images_per_prompt, axis=0)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt] * batch_size\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"np\",\n            )\n            negative_prompt_embeds = self.text_encoder(input_ids=uncond_input.input_ids.astype(np.int32))[0]\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = np.repeat(negative_prompt_embeds, num_images_per_prompt, axis=0)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = np.concatenate([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def check_inputs(\n        self,\n        prompt,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        image: Union[np.ndarray, PIL.Image.Image] = None,\n        mask_image: Union[np.ndarray, PIL.Image.Image] = None,\n        strength: float = 0.8,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: Optional[float] = 0.0,\n        generator: Optional[np.random.RandomState] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, np.ndarray], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            image (`nd.ndarray` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process. This is the image whose masked region will be inpainted.\n            mask_image (`nd.ndarray` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be\n                replaced by noise and therefore repainted, while black pixels will be preserved. If `mask_image` is a\n                PIL image, it will be converted to a single channel (luminance) before use. If it's a tensor, it should\n                contain one color channel (L) instead of 3, so the expected shape would be `(B, H, W, 1)`.uu\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference. This parameter will be modulated by `strength`.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (?) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`np.random.RandomState`, *optional*):\n                A np.random.RandomState to make generation deterministic.\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: np.ndarray)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # check inputs. Raise error if not correct\n        self.check_inputs(prompt, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds)\n\n        # define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if generator is None:\n            generator = np.random\n\n        # set timesteps\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        if isinstance(image, PIL.Image.Image):\n            image = preprocess(image)\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        latents_dtype = prompt_embeds.dtype\n        image = image.astype(latents_dtype)\n\n        # encode the init image into latents and scale the latents\n        init_latents = self.vae_encoder(sample=image)[0]\n        init_latents = 0.18215 * init_latents\n\n        # Expand init_latents for batch_size and num_images_per_prompt\n        init_latents = np.concatenate([init_latents] * num_images_per_prompt, axis=0)\n        init_latents_orig = init_latents\n\n        # preprocess mask\n        if not isinstance(mask_image, np.ndarray):\n            mask_image = preprocess_mask(mask_image, 8)\n        mask_image = mask_image.astype(latents_dtype)\n        mask = np.concatenate([mask_image] * num_images_per_prompt, axis=0)\n\n        # check sizes\n        if not mask.shape == init_latents.shape:\n            raise ValueError(\"The mask and image should be the same size!\")\n\n        # get the original timestep using init_timestep\n        offset = self.scheduler.config.get(\"steps_offset\", 0)\n        init_timestep = int(num_inference_steps * strength) + offset\n        init_timestep = min(init_timestep, num_inference_steps)\n\n        timesteps = self.scheduler.timesteps.numpy()[-init_timestep]\n        timesteps = np.array([timesteps] * batch_size * num_images_per_prompt)\n\n        # add noise to latents using the timesteps\n        noise = generator.randn(*init_latents.shape).astype(latents_dtype)\n        init_latents = self.scheduler.add_noise(\n            torch.from_numpy(init_latents), torch.from_numpy(noise), torch.from_numpy(timesteps)\n        )\n        init_latents = init_latents.numpy()\n\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (?) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to ? in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        latents = init_latents\n\n        t_start = max(num_inference_steps - init_timestep + offset, 0)\n        timesteps = self.scheduler.timesteps[t_start:].numpy()\n        timestep_dtype = next(\n            (input.type for input in self.unet.model.get_inputs() if input.name == \"timestep\"), \"tensor(float)\"\n        )\n        timestep_dtype = ORT_TO_NP_TYPE[timestep_dtype]\n\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = np.concatenate([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            # predict the noise residual\n            timestep = np.array([t], dtype=timestep_dtype)\n            noise_pred = self.unet(sample=latent_model_input, timestep=timestep, encoder_hidden_states=prompt_embeds)[\n                0\n            ]\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = np.split(noise_pred, 2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(\n                torch.from_numpy(noise_pred), t, torch.from_numpy(latents), **extra_step_kwargs\n            ).prev_sample\n\n            latents = latents.numpy()\n\n            init_latents_proper = self.scheduler.add_noise(\n                torch.from_numpy(init_latents_orig), torch.from_numpy(noise), torch.from_numpy(np.array([t]))\n            )\n\n            init_latents_proper = init_latents_proper.numpy()\n\n            latents = (init_latents_proper * mask) + (latents * (1 - mask))\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        latents = 1 / 0.18215 * latents\n        # image = self.vae_decoder(latent_sample=latents)[0]\n        # it seems likes there is a strange result for using half-precision vae decoder if batchsize>1\n        image = np.concatenate(\n            [self.vae_decoder(latent_sample=latents[i : i + 1])[0] for i in range(latents.shape[0])]\n        )\n\n        image = np.clip(image / 2 + 0.5, 0, 1)\n        image = image.transpose((0, 2, 3, 1))\n\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(\n                self.numpy_to_pil(image), return_tensors=\"np\"\n            ).pixel_values.astype(image.dtype)\n            # There will throw an error if use safety_checker batchsize>1\n            images, has_nsfw_concept = [], []\n            for i in range(image.shape[0]):\n                image_i, has_nsfw_concept_i = self.safety_checker(\n                    clip_input=safety_checker_input[i : i + 1], images=image[i : i + 1]\n                )\n                images.append(image_i)\n                has_nsfw_concept.append(has_nsfw_concept_i[0])\n            image = np.concatenate(images)\n        else:\n            has_nsfw_concept = None\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_onnx_stable_diffusion_upscale.py",
    "content": "from logging import getLogger\nfrom typing import Any, Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\n\nfrom ...schedulers import DDPMScheduler\nfrom ..onnx_utils import ORT_TO_NP_TYPE, OnnxRuntimeModel\nfrom ..pipeline_utils import ImagePipelineOutput\nfrom . import StableDiffusionUpscalePipeline\n\n\nlogger = getLogger(__name__)\n\n\nNUM_LATENT_CHANNELS = 4\nNUM_UNET_INPUT_CHANNELS = 7\n\nORT_TO_PT_TYPE = {\n    \"float16\": torch.float16,\n    \"float32\": torch.float32,\n}\n\n\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 64 for x in (w, h))  # resize to integer multiple of 32\n\n        image = [np.array(i.resize((w, h)))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n\n    return image\n\n\nclass OnnxStableDiffusionUpscalePipeline(StableDiffusionUpscalePipeline):\n    def __init__(\n        self,\n        vae: OnnxRuntimeModel,\n        text_encoder: OnnxRuntimeModel,\n        tokenizer: Any,\n        unet: OnnxRuntimeModel,\n        low_res_scheduler: DDPMScheduler,\n        scheduler: Any,\n        max_noise_level: int = 350,\n    ):\n        super().__init__(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            low_res_scheduler=low_res_scheduler,\n            scheduler=scheduler,\n            safety_checker=None,\n            feature_extractor=None,\n            watermarker=None,\n            max_noise_level=max_noise_level,\n        )\n\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        image: Union[torch.FloatTensor, PIL.Image.Image, List[PIL.Image.Image]],\n        num_inference_steps: int = 75,\n        guidance_scale: float = 9.0,\n        noise_level: int = 20,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[np.ndarray] = None,\n        negative_prompt_embeds: Optional[np.ndarray] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            image (`np.ndarray` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference. This parameter will be modulated by `strength`.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            noise_level TODO\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`np.random.RandomState`, *optional*):\n                A np.random.RandomState to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`np.ndarray`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: np.ndarray)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # 1. Check inputs\n        self.check_inputs(prompt, image, noise_level, callback_steps)\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        text_embeddings = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        latents_dtype = ORT_TO_PT_TYPE[str(text_embeddings.dtype)]\n\n        # 4. Preprocess image\n        image = preprocess(image)\n        image = image.cpu()\n\n        # 5. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Add noise to image\n        noise_level = torch.tensor([noise_level], dtype=torch.long, device=device)\n        noise = torch.randn(image.shape, generator=generator, device=device, dtype=latents_dtype)\n        image = self.low_res_scheduler.add_noise(image, noise, noise_level)\n\n        batch_multiplier = 2 if do_classifier_free_guidance else 1\n        image = np.concatenate([image] * batch_multiplier * num_images_per_prompt)\n        noise_level = np.concatenate([noise_level] * image.shape[0])\n\n        # 6. Prepare latent variables\n        height, width = image.shape[2:]\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            NUM_LATENT_CHANNELS,\n            height,\n            width,\n            latents_dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 7. Check that sizes of image and latents match\n        num_channels_image = image.shape[1]\n        if NUM_LATENT_CHANNELS + num_channels_image != NUM_UNET_INPUT_CHANNELS:\n            raise ValueError(\n                \"Incorrect configuration settings! The config of `pipeline.unet` expects\"\n                f\" {NUM_UNET_INPUT_CHANNELS} but received `num_channels_latents`: {NUM_LATENT_CHANNELS} +\"\n                f\" `num_channels_image`: {num_channels_image} \"\n                f\" = {NUM_LATENT_CHANNELS+num_channels_image}. Please verify the config of\"\n                \" `pipeline.unet` or your `image` input.\"\n            )\n\n        # 8. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        timestep_dtype = next(\n            (input.type for input in self.unet.model.get_inputs() if input.name == \"timestep\"), \"tensor(float)\"\n        )\n        timestep_dtype = ORT_TO_NP_TYPE[timestep_dtype]\n\n        # 9. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = np.concatenate([latents] * 2) if do_classifier_free_guidance else latents\n\n                # concat latents, mask, masked_image_latents in the channel dimension\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n                latent_model_input = np.concatenate([latent_model_input, image], axis=1)\n\n                # timestep to tensor\n                timestep = np.array([t], dtype=timestep_dtype)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    sample=latent_model_input,\n                    timestep=timestep,\n                    encoder_hidden_states=text_embeddings,\n                    class_labels=noise_level.astype(np.int64),\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = np.split(noise_pred, 2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(\n                    torch.from_numpy(noise_pred), t, latents, **extra_step_kwargs\n                ).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # 10. Post-processing\n        image = self.decode_latents(latents.float())\n\n        # 11. Convert to PIL\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n\n    def decode_latents(self, latents):\n        latents = 1 / 0.08333 * latents\n        image = self.vae(latent_sample=latents)[0]\n        image = np.clip(image / 2 + 0.5, 0, 1)\n        image = image.transpose((0, 2, 3, 1))\n        return image\n\n    def _encode_prompt(\n        self,\n        prompt: Union[str, List[str]],\n        device,\n        num_images_per_prompt: Optional[int],\n        do_classifier_free_guidance: bool,\n        negative_prompt: Optional[str],\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            # no positional arguments to text_encoder\n            prompt_embeds = self.text_encoder(\n                input_ids=text_input_ids.int().to(device),\n                # attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt)\n        prompt_embeds = prompt_embeds.reshape(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = text_input_ids.shape[-1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            # if hasattr(uncond_input, \"attention_mask\"):\n            #     attention_mask = uncond_input.attention_mask.to(device)\n            # else:\n            #     attention_mask = None\n\n            uncond_embeddings = self.text_encoder(\n                input_ids=uncond_input.input_ids.int().to(device),\n                # attention_mask=attention_mask,\n            )\n            uncond_embeddings = uncond_embeddings[0]\n\n        if do_classifier_free_guidance:\n            seq_len = uncond_embeddings.shape[1]\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt)\n            uncond_embeddings = uncond_embeddings.reshape(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = np.concatenate([uncond_embeddings, prompt_embeds])\n\n        return prompt_embeds\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import FromCkptMixin, LoraLoaderMixin, TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    deprecate,\n    is_accelerate_available,\n    is_accelerate_version,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import StableDiffusionPipeline\n\n        >>> pipe = StableDiffusionPipeline.from_pretrained(\"runwayml/stable-diffusion-v1-5\", torch_dtype=torch.float16)\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> prompt = \"a photo of an astronaut riding a horse on mars\"\n        >>> image = pipe(prompt).images[0]\n        ```\n\"\"\"\n\n\nclass StableDiffusionPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin, FromCkptMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n        - *Ckpt*: [`loaders.FromCkptMixin.from_ckpt`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely. If your checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    def enable_vae_tiling(self):\n        r\"\"\"\n        Enable tiled VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in\n        several steps. This is useful to save a large amount of memory and to allow the processing of larger images.\n        \"\"\"\n        self.vae.enable_tiling()\n\n    def disable_vae_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_tiling()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_attend_and_excite.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport math\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\nfrom torch.nn import functional as F\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...models.attention_processor import Attention\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_available, is_accelerate_version, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import StableDiffusionAttendAndExcitePipeline\n\n        >>> pipe = StableDiffusionAttendAndExcitePipeline.from_pretrained(\n        ...     \"CompVis/stable-diffusion-v1-4\", torch_dtype=torch.float16\n        ... ).to(\"cuda\")\n\n\n        >>> prompt = \"a cat and a frog\"\n\n        >>> # use get_indices function to find out indices of the tokens you want to alter\n        >>> pipe.get_indices(prompt)\n        {0: '<|startoftext|>', 1: 'a</w>', 2: 'cat</w>', 3: 'and</w>', 4: 'a</w>', 5: 'frog</w>', 6: '<|endoftext|>'}\n\n        >>> token_indices = [2, 5]\n        >>> seed = 6141\n        >>> generator = torch.Generator(\"cuda\").manual_seed(seed)\n\n        >>> images = pipe(\n        ...     prompt=prompt,\n        ...     token_indices=token_indices,\n        ...     guidance_scale=7.5,\n        ...     generator=generator,\n        ...     num_inference_steps=50,\n        ...     max_iter_to_alter=25,\n        ... ).images\n\n        >>> image = images[0]\n        >>> image.save(f\"../images/{prompt}_{seed}.png\")\n        ```\n\"\"\"\n\n\nclass AttentionStore:\n    @staticmethod\n    def get_empty_store():\n        return {\"down\": [], \"mid\": [], \"up\": []}\n\n    def __call__(self, attn, is_cross: bool, place_in_unet: str):\n        if self.cur_att_layer >= 0 and is_cross:\n            if attn.shape[1] == np.prod(self.attn_res):\n                self.step_store[place_in_unet].append(attn)\n\n        self.cur_att_layer += 1\n        if self.cur_att_layer == self.num_att_layers:\n            self.cur_att_layer = 0\n            self.between_steps()\n\n    def between_steps(self):\n        self.attention_store = self.step_store\n        self.step_store = self.get_empty_store()\n\n    def get_average_attention(self):\n        average_attention = self.attention_store\n        return average_attention\n\n    def aggregate_attention(self, from_where: List[str]) -> torch.Tensor:\n        \"\"\"Aggregates the attention across the different layers and heads at the specified resolution.\"\"\"\n        out = []\n        attention_maps = self.get_average_attention()\n        for location in from_where:\n            for item in attention_maps[location]:\n                cross_maps = item.reshape(-1, self.attn_res[0], self.attn_res[1], item.shape[-1])\n                out.append(cross_maps)\n        out = torch.cat(out, dim=0)\n        out = out.sum(0) / out.shape[0]\n        return out\n\n    def reset(self):\n        self.cur_att_layer = 0\n        self.step_store = self.get_empty_store()\n        self.attention_store = {}\n\n    def __init__(self, attn_res):\n        \"\"\"\n        Initialize an empty AttentionStore :param step_index: used to visualize only a specific step in the diffusion\n        process\n        \"\"\"\n        self.num_att_layers = -1\n        self.cur_att_layer = 0\n        self.step_store = self.get_empty_store()\n        self.attention_store = {}\n        self.curr_step_index = 0\n        self.attn_res = attn_res\n\n\nclass AttendExciteAttnProcessor:\n    def __init__(self, attnstore, place_in_unet):\n        super().__init__()\n        self.attnstore = attnstore\n        self.place_in_unet = place_in_unet\n\n    def __call__(self, attn: Attention, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        batch_size, sequence_length, _ = hidden_states.shape\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n\n        query = attn.to_q(hidden_states)\n\n        is_cross = encoder_hidden_states is not None\n        encoder_hidden_states = encoder_hidden_states if encoder_hidden_states is not None else hidden_states\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n\n        query = attn.head_to_batch_dim(query)\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n\n        # only need to store attention maps during the Attend and Excite process\n        if attention_probs.requires_grad:\n            self.attnstore(attention_probs, is_cross, self.place_in_unet)\n\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        return hidden_states\n\n\nclass StableDiffusionAttendAndExcitePipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion and Attend and Excite.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        indices,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        indices_is_list_ints = isinstance(indices, list) and isinstance(indices[0], int)\n        indices_is_list_list_ints = (\n            isinstance(indices, list) and isinstance(indices[0], list) and isinstance(indices[0][0], int)\n        )\n\n        if not indices_is_list_ints and not indices_is_list_list_ints:\n            raise TypeError(\"`indices` must be a list of ints or a list of a list of ints\")\n\n        if indices_is_list_ints:\n            indices_batch_size = 1\n        elif indices_is_list_list_ints:\n            indices_batch_size = len(indices)\n\n        if prompt is not None and isinstance(prompt, str):\n            prompt_batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            prompt_batch_size = len(prompt)\n        elif prompt_embeds is not None:\n            prompt_batch_size = prompt_embeds.shape[0]\n\n        if indices_batch_size != prompt_batch_size:\n            raise ValueError(\n                f\"indices batch size must be same as prompt batch size. indices batch size: {indices_batch_size}, prompt batch size: {prompt_batch_size}\"\n            )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @staticmethod\n    def _compute_max_attention_per_index(\n        attention_maps: torch.Tensor,\n        indices: List[int],\n    ) -> List[torch.Tensor]:\n        \"\"\"Computes the maximum attention value for each of the tokens we wish to alter.\"\"\"\n        attention_for_text = attention_maps[:, :, 1:-1]\n        attention_for_text *= 100\n        attention_for_text = torch.nn.functional.softmax(attention_for_text, dim=-1)\n\n        # Shift indices since we removed the first token\n        indices = [index - 1 for index in indices]\n\n        # Extract the maximum values\n        max_indices_list = []\n        for i in indices:\n            image = attention_for_text[:, :, i]\n            smoothing = GaussianSmoothing().to(attention_maps.device)\n            input = F.pad(image.unsqueeze(0).unsqueeze(0), (1, 1, 1, 1), mode=\"reflect\")\n            image = smoothing(input).squeeze(0).squeeze(0)\n            max_indices_list.append(image.max())\n        return max_indices_list\n\n    def _aggregate_and_get_max_attention_per_token(\n        self,\n        indices: List[int],\n    ):\n        \"\"\"Aggregates the attention for each token and computes the max activation value for each token to alter.\"\"\"\n        attention_maps = self.attention_store.aggregate_attention(\n            from_where=(\"up\", \"down\", \"mid\"),\n        )\n        max_attention_per_index = self._compute_max_attention_per_index(\n            attention_maps=attention_maps,\n            indices=indices,\n        )\n        return max_attention_per_index\n\n    @staticmethod\n    def _compute_loss(max_attention_per_index: List[torch.Tensor]) -> torch.Tensor:\n        \"\"\"Computes the attend-and-excite loss using the maximum attention value for each token.\"\"\"\n        losses = [max(0, 1.0 - curr_max) for curr_max in max_attention_per_index]\n        loss = max(losses)\n        return loss\n\n    @staticmethod\n    def _update_latent(latents: torch.Tensor, loss: torch.Tensor, step_size: float) -> torch.Tensor:\n        \"\"\"Update the latent according to the computed loss.\"\"\"\n        grad_cond = torch.autograd.grad(loss.requires_grad_(True), [latents], retain_graph=True)[0]\n        latents = latents - step_size * grad_cond\n        return latents\n\n    def _perform_iterative_refinement_step(\n        self,\n        latents: torch.Tensor,\n        indices: List[int],\n        loss: torch.Tensor,\n        threshold: float,\n        text_embeddings: torch.Tensor,\n        step_size: float,\n        t: int,\n        max_refinement_steps: int = 20,\n    ):\n        \"\"\"\n        Performs the iterative latent refinement introduced in the paper. Here, we continuously update the latent code\n        according to our loss objective until the given threshold is reached for all tokens.\n        \"\"\"\n        iteration = 0\n        target_loss = max(0, 1.0 - threshold)\n        while loss > target_loss:\n            iteration += 1\n\n            latents = latents.clone().detach().requires_grad_(True)\n            self.unet(latents, t, encoder_hidden_states=text_embeddings).sample\n            self.unet.zero_grad()\n\n            # Get max activation value for each subject token\n            max_attention_per_index = self._aggregate_and_get_max_attention_per_token(\n                indices=indices,\n            )\n\n            loss = self._compute_loss(max_attention_per_index)\n\n            if loss != 0:\n                latents = self._update_latent(latents, loss, step_size)\n\n            logger.info(f\"\\t Try {iteration}. loss: {loss}\")\n\n            if iteration >= max_refinement_steps:\n                logger.info(f\"\\t Exceeded max number of iterations ({max_refinement_steps})! \")\n                break\n\n        # Run one more time but don't compute gradients and update the latents.\n        # We just need to compute the new loss - the grad update will occur below\n        latents = latents.clone().detach().requires_grad_(True)\n        _ = self.unet(latents, t, encoder_hidden_states=text_embeddings).sample\n        self.unet.zero_grad()\n\n        # Get max activation value for each subject token\n        max_attention_per_index = self._aggregate_and_get_max_attention_per_token(\n            indices=indices,\n        )\n        loss = self._compute_loss(max_attention_per_index)\n        logger.info(f\"\\t Finished with loss of: {loss}\")\n        return loss, latents, max_attention_per_index\n\n    def register_attention_control(self):\n        attn_procs = {}\n        cross_att_count = 0\n        for name in self.unet.attn_processors.keys():\n            if name.startswith(\"mid_block\"):\n                place_in_unet = \"mid\"\n            elif name.startswith(\"up_blocks\"):\n                place_in_unet = \"up\"\n            elif name.startswith(\"down_blocks\"):\n                place_in_unet = \"down\"\n            else:\n                continue\n\n            cross_att_count += 1\n            attn_procs[name] = AttendExciteAttnProcessor(attnstore=self.attention_store, place_in_unet=place_in_unet)\n\n        self.unet.set_attn_processor(attn_procs)\n        self.attention_store.num_att_layers = cross_att_count\n\n    def get_indices(self, prompt: str) -> Dict[str, int]:\n        \"\"\"Utility function to list the indices of the tokens you wish to alte\"\"\"\n        ids = self.tokenizer(prompt).input_ids\n        indices = {i: tok for tok, i in zip(self.tokenizer.convert_ids_to_tokens(ids), range(len(ids)))}\n        return indices\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        token_indices: Union[List[int], List[List[int]]],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: int = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        max_iter_to_alter: int = 25,\n        thresholds: dict = {0: 0.05, 10: 0.5, 20: 0.8},\n        scale_factor: int = 20,\n        attn_res: Optional[Tuple[int]] = (16, 16),\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            token_indices (`List[int]`):\n                The token indices to alter with attend-and-excite.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            max_iter_to_alter (`int`, *optional*, defaults to `25`):\n                Number of denoising steps to apply attend-and-excite. The first <max_iter_to_alter> denoising steps are\n                where the attend-and-excite is applied. I.e. if `max_iter_to_alter` is 25 and there are a total of `30`\n                denoising steps, the first 25 denoising steps will apply attend-and-excite and the last 5 will not\n                apply attend-and-excite.\n            thresholds (`dict`, *optional*, defaults to `{0: 0.05, 10: 0.5, 20: 0.8}`):\n                Dictionary defining the iterations and desired thresholds to apply iterative latent refinement in.\n            scale_factor (`int`, *optional*, default to 20):\n                Scale factor that controls the step size of each Attend and Excite update.\n            attn_res (`tuple`, *optional*, default computed from width and height):\n                The 2D resolution of the semantic attention map.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`. :type attention_store: object\n        \"\"\"\n\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt,\n            token_indices,\n            height,\n            width,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        if attn_res is None:\n            attn_res = int(np.ceil(width / 32)), int(np.ceil(height / 32))\n        self.attention_store = AttentionStore(attn_res)\n        self.register_attention_control()\n\n        # default config for step size from original repo\n        scale_range = np.linspace(1.0, 0.5, len(self.scheduler.timesteps))\n        step_size = scale_factor * np.sqrt(scale_range)\n\n        text_embeddings = (\n            prompt_embeds[batch_size * num_images_per_prompt :] if do_classifier_free_guidance else prompt_embeds\n        )\n\n        if isinstance(token_indices[0], int):\n            token_indices = [token_indices]\n\n        indices = []\n\n        for ind in token_indices:\n            indices = indices + [ind] * num_images_per_prompt\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # Attend and excite process\n                with torch.enable_grad():\n                    latents = latents.clone().detach().requires_grad_(True)\n                    updated_latents = []\n                    for latent, index, text_embedding in zip(latents, indices, text_embeddings):\n                        # Forward pass of denoising with text conditioning\n                        latent = latent.unsqueeze(0)\n                        text_embedding = text_embedding.unsqueeze(0)\n\n                        self.unet(\n                            latent,\n                            t,\n                            encoder_hidden_states=text_embedding,\n                            cross_attention_kwargs=cross_attention_kwargs,\n                        ).sample\n                        self.unet.zero_grad()\n\n                        # Get max activation value for each subject token\n                        max_attention_per_index = self._aggregate_and_get_max_attention_per_token(\n                            indices=index,\n                        )\n\n                        loss = self._compute_loss(max_attention_per_index=max_attention_per_index)\n\n                        # If this is an iterative refinement step, verify we have reached the desired threshold for all\n                        if i in thresholds.keys() and loss > 1.0 - thresholds[i]:\n                            loss, latent, max_attention_per_index = self._perform_iterative_refinement_step(\n                                latents=latent,\n                                indices=index,\n                                loss=loss,\n                                threshold=thresholds[i],\n                                text_embeddings=text_embedding,\n                                step_size=step_size[i],\n                                t=t,\n                            )\n\n                        # Perform gradient update\n                        if i < max_iter_to_alter:\n                            if loss != 0:\n                                latent = self._update_latent(\n                                    latents=latent,\n                                    loss=loss,\n                                    step_size=step_size[i],\n                                )\n                            logger.info(f\"Iteration {i} | Loss: {loss:0.4f}\")\n\n                        updated_latents.append(latent)\n\n                    latents = torch.cat(updated_latents, dim=0)\n\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # 8. Post-processing\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n\n\nclass GaussianSmoothing(torch.nn.Module):\n    \"\"\"\n    Arguments:\n    Apply gaussian smoothing on a 1d, 2d or 3d tensor. Filtering is performed seperately for each channel in the input\n    using a depthwise convolution.\n        channels (int, sequence): Number of channels of the input tensors. Output will\n            have this number of channels as well.\n        kernel_size (int, sequence): Size of the gaussian kernel. sigma (float, sequence): Standard deviation of the\n        gaussian kernel. dim (int, optional): The number of dimensions of the data.\n            Default value is 2 (spatial).\n    \"\"\"\n\n    # channels=1, kernel_size=kernel_size, sigma=sigma, dim=2\n    def __init__(\n        self,\n        channels: int = 1,\n        kernel_size: int = 3,\n        sigma: float = 0.5,\n        dim: int = 2,\n    ):\n        super().__init__()\n\n        if isinstance(kernel_size, int):\n            kernel_size = [kernel_size] * dim\n        if isinstance(sigma, float):\n            sigma = [sigma] * dim\n\n        # The gaussian kernel is the product of the\n        # gaussian function of each dimension.\n        kernel = 1\n        meshgrids = torch.meshgrid([torch.arange(size, dtype=torch.float32) for size in kernel_size])\n        for size, std, mgrid in zip(kernel_size, sigma, meshgrids):\n            mean = (size - 1) / 2\n            kernel *= 1 / (std * math.sqrt(2 * math.pi)) * torch.exp(-(((mgrid - mean) / (2 * std)) ** 2))\n\n        # Make sure sum of values in gaussian kernel equals 1.\n        kernel = kernel / torch.sum(kernel)\n\n        # Reshape to depthwise convolutional weight\n        kernel = kernel.view(1, 1, *kernel.size())\n        kernel = kernel.repeat(channels, *[1] * (kernel.dim() - 1))\n\n        self.register_buffer(\"weight\", kernel)\n        self.groups = channels\n\n        if dim == 1:\n            self.conv = F.conv1d\n        elif dim == 2:\n            self.conv = F.conv2d\n        elif dim == 3:\n            self.conv = F.conv3d\n        else:\n            raise RuntimeError(\"Only 1, 2 and 3 dimensions are supported. Received {}.\".format(dim))\n\n    def forward(self, input):\n        \"\"\"\n        Arguments:\n        Apply gaussian filter to input.\n            input (torch.Tensor): Input to apply gaussian filter on.\n        Returns:\n            filtered (torch.Tensor): Filtered output.\n        \"\"\"\n        return self.conv(input, weight=self.weight.to(input.dtype), groups=self.groups)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_controlnet.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# NOTE: This file is deprecated and will be removed in a future version.\n# It only exists so that temporarely `from diffusers.pipelines import DiffusionPipeline` works\nfrom ...utils import deprecate\nfrom ..controlnet.multicontrolnet import MultiControlNetModel  # noqa: F401\nfrom ..controlnet.pipeline_controlnet import StableDiffusionControlNetPipeline  # noqa: F401\n\n\ndeprecate(\n    \"stable diffusion controlnet\",\n    \"0.22.0\",\n    \"Importing `StableDiffusionControlNetPipeline` or `MultiControlNetModel` from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_controlnet is deprecated. Please import `from diffusers import StableDiffusionControlNetPipeline` instead.\",\n    standard_warn=False,\n    stacklevel=3,\n)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_depth2img.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport contextlib\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPTextModel, CLIPTokenizer, DPTFeatureExtractor, DPTForDepthEstimation\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import LoraLoaderMixin, TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import PIL_INTERPOLATION, deprecate, is_accelerate_available, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\nclass StableDiffusionDepth2ImgPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin):\n    r\"\"\"\n    Pipeline for text-guided image to image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        depth_estimator: DPTForDepthEstimation,\n        feature_extractor: DPTFeatureExtractor,\n    ):\n        super().__init__()\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            depth_estimator=depth_estimator,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae, self.depth_estimator]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.check_inputs\n    def check_inputs(\n        self, prompt, strength, callback_steps, negative_prompt=None, prompt_embeds=None, negative_prompt_embeds=None\n    ):\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.prepare_latents\n    def prepare_latents(self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None):\n        if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}\"\n            )\n\n        image = image.to(device=device, dtype=dtype)\n\n        batch_size = batch_size * num_images_per_prompt\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            init_latents = [\n                self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size)\n            ]\n            init_latents = torch.cat(init_latents, dim=0)\n        else:\n            init_latents = self.vae.encode(image).latent_dist.sample(generator)\n\n        init_latents = self.vae.config.scaling_factor * init_latents\n\n        if batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] == 0:\n            # expand init_latents for batch_size\n            deprecation_message = (\n                f\"You have passed {batch_size} text prompts (`prompt`), but only {init_latents.shape[0]} initial\"\n                \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                \" your script to pass as many initial images as text prompts to suppress this warning.\"\n            )\n            deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n            additional_image_per_prompt = batch_size // init_latents.shape[0]\n            init_latents = torch.cat([init_latents] * additional_image_per_prompt, dim=0)\n        elif batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] != 0:\n            raise ValueError(\n                f\"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {batch_size} text prompts.\"\n            )\n        else:\n            init_latents = torch.cat([init_latents], dim=0)\n\n        shape = init_latents.shape\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        # get latents\n        init_latents = self.scheduler.add_noise(init_latents, noise, timestep)\n        latents = init_latents\n\n        return latents\n\n    def prepare_depth_map(self, image, depth_map, batch_size, do_classifier_free_guidance, dtype, device):\n        if isinstance(image, PIL.Image.Image):\n            image = [image]\n        else:\n            image = list(image)\n\n        if isinstance(image[0], PIL.Image.Image):\n            width, height = image[0].size\n        else:\n            height, width = image[0].shape[-2:]\n\n        if depth_map is None:\n            pixel_values = self.feature_extractor(images=image, return_tensors=\"pt\").pixel_values\n            pixel_values = pixel_values.to(device=device)\n            # The DPT-Hybrid model uses batch-norm layers which are not compatible with fp16.\n            # So we use `torch.autocast` here for half precision inference.\n            context_manger = torch.autocast(\"cuda\", dtype=dtype) if device.type == \"cuda\" else contextlib.nullcontext()\n            with context_manger:\n                depth_map = self.depth_estimator(pixel_values).predicted_depth\n        else:\n            depth_map = depth_map.to(device=device, dtype=dtype)\n\n        depth_map = torch.nn.functional.interpolate(\n            depth_map.unsqueeze(1),\n            size=(height // self.vae_scale_factor, width // self.vae_scale_factor),\n            mode=\"bicubic\",\n            align_corners=False,\n        )\n\n        depth_min = torch.amin(depth_map, dim=[1, 2, 3], keepdim=True)\n        depth_max = torch.amax(depth_map, dim=[1, 2, 3], keepdim=True)\n        depth_map = 2.0 * (depth_map - depth_min) / (depth_max - depth_min) - 1.0\n        depth_map = depth_map.to(dtype)\n\n        # duplicate mask and masked_image_latents for each generation per prompt, using mps friendly method\n        if depth_map.shape[0] < batch_size:\n            repeat_by = batch_size // depth_map.shape[0]\n            depth_map = depth_map.repeat(repeat_by, 1, 1, 1)\n\n        depth_map = torch.cat([depth_map] * 2) if do_classifier_free_guidance else depth_map\n        return depth_map\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        depth_map: Optional[torch.FloatTensor] = None,\n        strength: float = 0.8,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: Optional[float] = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference. This parameter will be modulated by `strength`.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> import torch\n        >>> import requests\n        >>> from PIL import Image\n\n        >>> from diffusers import StableDiffusionDepth2ImgPipeline\n\n        >>> pipe = StableDiffusionDepth2ImgPipeline.from_pretrained(\n        ...     \"stabilityai/stable-diffusion-2-depth\",\n        ...     torch_dtype=torch.float16,\n        ... )\n        >>> pipe.to(\"cuda\")\n\n\n        >>> url = \"http://images.cocodataset.org/val2017/000000039769.jpg\"\n        >>> init_image = Image.open(requests.get(url, stream=True).raw)\n        >>> prompt = \"two tigers\"\n        >>> n_propmt = \"bad, deformed, ugly, bad anotomy\"\n        >>> image = pipe(prompt=prompt, image=init_image, negative_prompt=n_propmt, strength=0.7).images[0]\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs\n        self.check_inputs(\n            prompt,\n            strength,\n            callback_steps,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        if image is None:\n            raise ValueError(\"`image` input cannot be undefined.\")\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare depth mask\n        depth_mask = self.prepare_depth_map(\n            image,\n            depth_map,\n            batch_size * num_images_per_prompt,\n            do_classifier_free_guidance,\n            prompt_embeds.dtype,\n            device,\n        )\n\n        # 5. Preprocess image\n        image = preprocess(image)\n\n        # 6. Set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device)\n        latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt)\n\n        # 7. Prepare latent variables\n        latents = self.prepare_latents(\n            image, latent_timestep, batch_size, num_images_per_prompt, prompt_embeds.dtype, device, generator\n        )\n\n        # 8. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 9. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n                latent_model_input = torch.cat([latent_model_input, depth_mask], dim=1)\n\n                # predict the noise residual\n                noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=prompt_embeds, return_dict=False)[\n                    0\n                ]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n        else:\n            image = latents\n\n        image = self.image_processor.postprocess(image, output_type=output_type)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_diffedit.py",
    "content": "# Copyright 2023 DiffEdit Authors and Pix2Pix Zero Authors and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom dataclasses import dataclass\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import LoraLoaderMixin, TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import DDIMInverseScheduler, KarrasDiffusionSchedulers\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    BaseOutput,\n    deprecate,\n    is_accelerate_available,\n    is_accelerate_version,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n@dataclass\nclass DiffEditInversionPipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for Stable Diffusion pipelines.\n\n    Args:\n        latents (`torch.FloatTensor`)\n            inverted latents tensor\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `num_timesteps * batch_size` or numpy array of shape `(num_timesteps,\n            batch_size, height, width, num_channels)`. PIL images or numpy array present the denoised images of the\n            diffusion pipeline.\n    \"\"\"\n\n    latents: torch.FloatTensor\n    images: Union[List[PIL.Image.Image], np.ndarray]\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n\n        ```py\n        >>> import PIL\n        >>> import requests\n        >>> import torch\n        >>> from io import BytesIO\n\n        >>> from diffusers import StableDiffusionDiffEditPipeline\n\n\n        >>> def download_image(url):\n        ...     response = requests.get(url)\n        ...     return PIL.Image.open(BytesIO(response.content)).convert(\"RGB\")\n\n\n        >>> img_url = \"https://github.com/Xiang-cd/DiffEdit-stable-diffusion/raw/main/assets/origin.png\"\n\n        >>> init_image = download_image(img_url).resize((768, 768))\n\n        >>> pipe = StableDiffusionDiffEditPipeline.from_pretrained(\n        ...     \"stabilityai/stable-diffusion-2-1\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> pipeline.scheduler = DDIMScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.inverse_scheduler = DDIMInverseScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.enable_model_cpu_offload()\n\n        >>> mask_prompt = \"A bowl of fruits\"\n        >>> prompt = \"A bowl of pears\"\n\n        >>> mask_image = pipe.generate_mask(image=init_image, source_prompt=prompt, target_prompt=mask_prompt)\n        >>> image_latents = pipe.invert(image=init_image, prompt=mask_prompt).latents\n        >>> image = pipe(prompt=prompt, mask_image=mask_image, image_latents=image_latents).images[0]\n        ```\n\"\"\"\n\nEXAMPLE_INVERT_DOC_STRING = \"\"\"\n        ```py\n        >>> import PIL\n        >>> import requests\n        >>> import torch\n        >>> from io import BytesIO\n\n        >>> from diffusers import StableDiffusionDiffEditPipeline\n\n\n        >>> def download_image(url):\n        ...     response = requests.get(url)\n        ...     return PIL.Image.open(BytesIO(response.content)).convert(\"RGB\")\n\n\n        >>> img_url = \"https://github.com/Xiang-cd/DiffEdit-stable-diffusion/raw/main/assets/origin.png\"\n\n        >>> init_image = download_image(img_url).resize((768, 768))\n\n        >>> pipe = StableDiffusionDiffEditPipeline.from_pretrained(\n        ...     \"stabilityai/stable-diffusion-2-1\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> pipeline.scheduler = DDIMScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.inverse_scheduler = DDIMInverseScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.enable_model_cpu_offload()\n\n        >>> prompt = \"A bowl of fruits\"\n\n        >>> inverted_latents = pipe.invert(image=init_image, prompt=prompt).latents\n        ```\n\"\"\"\n\n\ndef auto_corr_loss(hidden_states, generator=None):\n    reg_loss = 0.0\n    for i in range(hidden_states.shape[0]):\n        for j in range(hidden_states.shape[1]):\n            noise = hidden_states[i : i + 1, j : j + 1, :, :]\n            while True:\n                roll_amount = torch.randint(noise.shape[2] // 2, (1,), generator=generator).item()\n                reg_loss += (noise * torch.roll(noise, shifts=roll_amount, dims=2)).mean() ** 2\n                reg_loss += (noise * torch.roll(noise, shifts=roll_amount, dims=3)).mean() ** 2\n\n                if noise.shape[2] <= 8:\n                    break\n                noise = torch.nn.functional.avg_pool2d(noise, kernel_size=2)\n    return reg_loss\n\n\ndef kl_divergence(hidden_states):\n    return hidden_states.var() + hidden_states.mean() ** 2 - 1 - torch.log(hidden_states.var() + 1e-7)\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\ndef preprocess_mask(mask, batch_size: int = 1):\n    if not isinstance(mask, torch.Tensor):\n        # preprocess mask\n        if isinstance(mask, PIL.Image.Image) or isinstance(mask, np.ndarray):\n            mask = [mask]\n\n        if isinstance(mask, list):\n            if isinstance(mask[0], PIL.Image.Image):\n                mask = [np.array(m.convert(\"L\")).astype(np.float32) / 255.0 for m in mask]\n            if isinstance(mask[0], np.ndarray):\n                mask = np.stack(mask, axis=0) if mask[0].ndim < 3 else np.concatenate(mask, axis=0)\n                mask = torch.from_numpy(mask)\n            elif isinstance(mask[0], torch.Tensor):\n                mask = torch.stack(mask, dim=0) if mask[0].ndim < 3 else torch.cat(mask, dim=0)\n\n    # Batch and add channel dim for single mask\n    if mask.ndim == 2:\n        mask = mask.unsqueeze(0).unsqueeze(0)\n\n    # Batch single mask or add channel dim\n    if mask.ndim == 3:\n        # Single batched mask, no channel dim or single mask not batched but channel dim\n        if mask.shape[0] == 1:\n            mask = mask.unsqueeze(0)\n\n        # Batched masks no channel dim\n        else:\n            mask = mask.unsqueeze(1)\n\n    # Check mask shape\n    if batch_size > 1:\n        if mask.shape[0] == 1:\n            mask = torch.cat([mask] * batch_size)\n        elif mask.shape[0] > 1 and mask.shape[0] != batch_size:\n            raise ValueError(\n                f\"`mask_image` with batch size {mask.shape[0]} cannot be broadcasted to batch size {batch_size} \"\n                f\"inferred by prompt inputs\"\n            )\n\n    if mask.shape[1] != 1:\n        raise ValueError(f\"`mask_image` must have 1 channel, but has {mask.shape[1]} channels\")\n\n    # Check mask is in [0, 1]\n    if mask.min() < 0 or mask.max() > 1:\n        raise ValueError(\"`mask_image` should be in [0, 1] range\")\n\n    # Binarize mask\n    mask[mask < 0.5] = 0\n    mask[mask >= 0.5] = 1\n\n    return mask\n\n\nclass StableDiffusionDiffEditPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin):\n    r\"\"\"\n    Pipeline for text-guided image inpainting using Stable Diffusion using DiffEdit. *This is an experimental feature*.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents.\n        inverse_scheduler (`[DDIMInverseScheduler]`):\n            A scheduler to be used in combination with `unet` to fill in the unmasked part of the input latents\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\", \"inverse_scheduler\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        inverse_scheduler: DDIMInverseScheduler,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"skip_prk_steps\") and scheduler.config.skip_prk_steps is False:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration\"\n                \" `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make\"\n                \" sure to update the config accordingly as not setting `skip_prk_steps` in the config might lead to\"\n                \" incorrect results in future versions. If you have downloaded this checkpoint from the Hugging Face\"\n                \" Hub, it would be very nice if you could open a Pull request for the\"\n                \" `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"skip_prk_steps not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"skip_prk_steps\"] = True\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            inverse_scheduler=inverse_scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling\n    def enable_vae_tiling(self):\n        r\"\"\"\n        Enable tiled VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in\n        several steps. This is useful to save a large amount of memory and to allow the processing of larger images.\n        \"\"\"\n        self.vae.enable_tiling()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling\n    def disable_vae_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_tiling()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def check_inputs(\n        self,\n        prompt,\n        strength,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (strength is None) or (strength is not None and (strength < 0 or strength > 1)):\n            raise ValueError(\n                f\"The value of `strength` should in [0.0, 1.0] but is, but is {strength} of type {type(strength)}.\"\n            )\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def check_source_inputs(\n        self,\n        source_prompt=None,\n        source_negative_prompt=None,\n        source_prompt_embeds=None,\n        source_negative_prompt_embeds=None,\n    ):\n        if source_prompt is not None and source_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `source_prompt`: {source_prompt} and `source_prompt_embeds`: {source_prompt_embeds}.\"\n                \"  Please make sure to only forward one of the two.\"\n            )\n        elif source_prompt is None and source_prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `source_image` or `source_prompt_embeds`. Cannot leave all both of the arguments undefined.\"\n            )\n        elif source_prompt is not None and (\n            not isinstance(source_prompt, str) and not isinstance(source_prompt, list)\n        ):\n            raise ValueError(f\"`source_prompt` has to be of type `str` or `list` but is {type(source_prompt)}\")\n\n        if source_negative_prompt is not None and source_negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `source_negative_prompt`: {source_negative_prompt} and `source_negative_prompt_embeds`:\"\n                f\" {source_negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if source_prompt_embeds is not None and source_negative_prompt_embeds is not None:\n            if source_prompt_embeds.shape != source_negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`source_prompt_embeds` and `source_negative_prompt_embeds` must have the same shape when passed\"\n                    f\" directly, but got: `source_prompt_embeds` {source_prompt_embeds.shape} !=\"\n                    f\" `source_negative_prompt_embeds` {source_negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    def get_inverse_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n\n        # safety for t_start overflow to prevent empty timsteps slice\n        if t_start == 0:\n            return self.inverse_scheduler.timesteps, num_inference_steps\n        timesteps = self.inverse_scheduler.timesteps[:-t_start]\n\n        return timesteps, num_inference_steps - t_start\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_pix2pix_zero.StableDiffusionPix2PixZeroPipeline.prepare_image_latents\n    def prepare_image_latents(self, image, batch_size, dtype, device, generator=None):\n        if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}\"\n            )\n\n        image = image.to(device=device, dtype=dtype)\n\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            latents = [self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size)]\n            latents = torch.cat(latents, dim=0)\n        else:\n            latents = self.vae.encode(image).latent_dist.sample(generator)\n\n        latents = self.vae.config.scaling_factor * latents\n\n        if batch_size != latents.shape[0]:\n            if batch_size % latents.shape[0] == 0:\n                # expand image_latents for batch_size\n                deprecation_message = (\n                    f\"You have passed {batch_size} text prompts (`prompt`), but only {latents.shape[0]} initial\"\n                    \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                    \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                    \" your script to pass as many initial images as text prompts to suppress this warning.\"\n                )\n                deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n                additional_latents_per_image = batch_size // latents.shape[0]\n                latents = torch.cat([latents] * additional_latents_per_image, dim=0)\n            else:\n                raise ValueError(\n                    f\"Cannot duplicate `image` of batch size {latents.shape[0]} to {batch_size} text prompts.\"\n                )\n        else:\n            latents = torch.cat([latents], dim=0)\n\n        return latents\n\n    def get_epsilon(self, model_output: torch.Tensor, sample: torch.Tensor, timestep: int):\n        pred_type = self.inverse_scheduler.config.prediction_type\n        alpha_prod_t = self.inverse_scheduler.alphas_cumprod[timestep]\n\n        beta_prod_t = 1 - alpha_prod_t\n\n        if pred_type == \"epsilon\":\n            return model_output\n        elif pred_type == \"sample\":\n            return (sample - alpha_prod_t ** (0.5) * model_output) / beta_prod_t ** (0.5)\n        elif pred_type == \"v_prediction\":\n            return (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        else:\n            raise ValueError(\n                f\"prediction_type given as {pred_type} must be one of `epsilon`, `sample`, or `v_prediction`\"\n            )\n\n    @torch.no_grad()\n    def generate_mask(\n        self,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        target_prompt: Optional[Union[str, List[str]]] = None,\n        target_negative_prompt: Optional[Union[str, List[str]]] = None,\n        target_prompt_embeds: Optional[torch.FloatTensor] = None,\n        target_negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        source_prompt: Optional[Union[str, List[str]]] = None,\n        source_negative_prompt: Optional[Union[str, List[str]]] = None,\n        source_prompt_embeds: Optional[torch.FloatTensor] = None,\n        source_negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        num_maps_per_mask: Optional[int] = 10,\n        mask_encode_strength: Optional[float] = 0.5,\n        mask_thresholding_ratio: Optional[float] = 3.0,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        output_type: Optional[str] = \"np\",\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function used to generate a latent mask given a mask prompt, a target prompt, and an image.\n\n        Args:\n            image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch which will be used for computing the mask.\n            target_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the semantic mask generation. If not defined, one has to pass\n                `prompt_embeds`. instead.\n            target_negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            target_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            target_negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            source_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the semantic mask generation using the method in [DiffEdit:\n                Diffusion-Based Semantic Image Editing with Mask Guidance](https://arxiv.org/pdf/2210.11427.pdf). If\n                not defined, one has to pass `source_prompt_embeds` or `source_image` instead.\n            source_negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the semantic mask generation away from using the method in [DiffEdit:\n                Diffusion-Based Semantic Image Editing with Mask Guidance](https://arxiv.org/pdf/2210.11427.pdf). If\n                not defined, one has to pass `source_negative_prompt_embeds` or `source_image` instead.\n            source_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings to guide the semantic mask generation. Can be used to easily tweak text\n                inputs, *e.g.* prompt weighting. If not provided, text embeddings will be generated from\n                `source_prompt` input argument.\n            source_negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings to negatively guide the semantic mask generation. Can be used to easily\n                tweak text inputs, *e.g.* prompt weighting. If not provided, text embeddings will be generated from\n                `source_negative_prompt` input argument.\n            num_maps_per_mask (`int`, *optional*, defaults to 10):\n                The number of noise maps sampled to generate the semantic mask using the method in [DiffEdit:\n                Diffusion-Based Semantic Image Editing with Mask Guidance](https://arxiv.org/pdf/2210.11427.pdf).\n            mask_encode_strength (`float`, *optional*, defaults to 0.5):\n                Conceptually, the strength of the noise maps sampled to generate the semantic mask using the method in\n                [DiffEdit: Diffusion-Based Semantic Image Editing with Mask Guidance](\n                https://arxiv.org/pdf/2210.11427.pdf). Must be between 0 and 1.\n            mask_thresholding_ratio (`float`, *optional*, defaults to 3.0):\n                The maximum multiple of the mean absolute difference used to clamp the semantic guidance map before\n                mask binarization.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            `List[PIL.Image.Image]` or `np.array`: `List[PIL.Image.Image]` if `output_type` is `\"pil\"`, otherwise a\n            `np.array`. When returning a `List[PIL.Image.Image]`, the list will consist of a batch of single-channel\n            binary image with dimensions `(height // self.vae_scale_factor, width // self.vae_scale_factor)`, otherwise\n            the `np.array` will have shape `(batch_size, height // self.vae_scale_factor, width //\n            self.vae_scale_factor)`.\n        \"\"\"\n\n        # 1. Check inputs (Provide dummy argument for callback_steps)\n        self.check_inputs(\n            target_prompt,\n            mask_encode_strength,\n            1,\n            target_negative_prompt,\n            target_prompt_embeds,\n            target_negative_prompt_embeds,\n        )\n\n        self.check_source_inputs(\n            source_prompt,\n            source_negative_prompt,\n            source_prompt_embeds,\n            source_negative_prompt_embeds,\n        )\n\n        if (num_maps_per_mask is None) or (\n            num_maps_per_mask is not None and (not isinstance(num_maps_per_mask, int) or num_maps_per_mask <= 0)\n        ):\n            raise ValueError(\n                f\"`num_maps_per_mask` has to be a positive integer but is {num_maps_per_mask} of type\"\n                f\" {type(num_maps_per_mask)}.\"\n            )\n\n        if mask_thresholding_ratio is None or mask_thresholding_ratio <= 0:\n            raise ValueError(\n                f\"`mask_thresholding_ratio` has to be positive but is {mask_thresholding_ratio} of type\"\n                f\" {type(mask_thresholding_ratio)}.\"\n            )\n\n        # 2. Define call parameters\n        if target_prompt is not None and isinstance(target_prompt, str):\n            batch_size = 1\n        elif target_prompt is not None and isinstance(target_prompt, list):\n            batch_size = len(target_prompt)\n        else:\n            batch_size = target_prompt_embeds.shape[0]\n        if cross_attention_kwargs is None:\n            cross_attention_kwargs = {}\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompts\n        target_prompt_embeds = self._encode_prompt(\n            target_prompt,\n            device,\n            num_maps_per_mask,\n            do_classifier_free_guidance,\n            target_negative_prompt,\n            prompt_embeds=target_prompt_embeds,\n            negative_prompt_embeds=target_negative_prompt_embeds,\n        )\n\n        source_prompt_embeds = self._encode_prompt(\n            source_prompt,\n            device,\n            num_maps_per_mask,\n            do_classifier_free_guidance,\n            source_negative_prompt,\n            prompt_embeds=source_prompt_embeds,\n            negative_prompt_embeds=source_negative_prompt_embeds,\n        )\n\n        # 4. Preprocess image\n        image = preprocess(image).repeat_interleave(num_maps_per_mask, dim=0)\n\n        # 5. Set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, _ = self.get_timesteps(num_inference_steps, mask_encode_strength, device)\n        encode_timestep = timesteps[0]\n\n        # 6. Prepare image latents and add noise with specified strength\n        image_latents = self.prepare_image_latents(\n            image, batch_size * num_maps_per_mask, self.vae.dtype, device, generator\n        )\n        noise = randn_tensor(image_latents.shape, generator=generator, device=device, dtype=self.vae.dtype)\n        image_latents = self.scheduler.add_noise(image_latents, noise, encode_timestep)\n\n        latent_model_input = torch.cat([image_latents] * (4 if do_classifier_free_guidance else 2))\n        latent_model_input = self.scheduler.scale_model_input(latent_model_input, encode_timestep)\n\n        # 7. Predict the noise residual\n        prompt_embeds = torch.cat([source_prompt_embeds, target_prompt_embeds])\n        noise_pred = self.unet(\n            latent_model_input,\n            encode_timestep,\n            encoder_hidden_states=prompt_embeds,\n            cross_attention_kwargs=cross_attention_kwargs,\n        ).sample\n\n        if do_classifier_free_guidance:\n            noise_pred_neg_src, noise_pred_source, noise_pred_uncond, noise_pred_target = noise_pred.chunk(4)\n            noise_pred_source = noise_pred_neg_src + guidance_scale * (noise_pred_source - noise_pred_neg_src)\n            noise_pred_target = noise_pred_uncond + guidance_scale * (noise_pred_target - noise_pred_uncond)\n        else:\n            noise_pred_source, noise_pred_target = noise_pred.chunk(2)\n\n        # 8. Compute the mask from the absolute difference of predicted noise residuals\n        # TODO: Consider smoothing mask guidance map\n        mask_guidance_map = (\n            torch.abs(noise_pred_target - noise_pred_source)\n            .reshape(batch_size, num_maps_per_mask, *noise_pred_target.shape[-3:])\n            .mean([1, 2])\n        )\n        clamp_magnitude = mask_guidance_map.mean() * mask_thresholding_ratio\n        semantic_mask_image = mask_guidance_map.clamp(0, clamp_magnitude) / clamp_magnitude\n        semantic_mask_image = torch.where(semantic_mask_image <= 0.5, 0, 1)\n        mask_image = semantic_mask_image.cpu().numpy()\n\n        # 9. Convert to Numpy array or PIL.\n        if output_type == \"pil\":\n            mask_image = self.image_processor.numpy_to_pil(mask_image)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        return mask_image\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_INVERT_DOC_STRING)\n    def invert(\n        self,\n        prompt: Optional[Union[str, List[str]]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        num_inference_steps: int = 50,\n        inpaint_strength: float = 0.8,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        decode_latents: bool = False,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        lambda_auto_corr: float = 20.0,\n        lambda_kl: float = 20.0,\n        num_reg_steps: int = 0,\n        num_auto_corr_rolls: int = 5,\n    ):\n        r\"\"\"\n        Function used to generate inverted latents given a prompt and image.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch to produce the inverted latents, guided by `prompt`.\n            inpaint_strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how far into the noising process to run latent inversion. Must be between 0 and\n                1. When `strength` is 1, the inversion process will be run for the full number of iterations specified\n                in `num_inference_steps`. `image` will be used as a reference for the inversion process, adding more\n                noise the larger the `strength`. If `strength` is 0, no inpainting will occur.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            decode_latents (`bool`, *optional*, defaults to `False`):\n                Whether or not to decode the inverted latents into a generated image. Setting this argument to `True`\n                will decode all inverted latents for each timestep into a list of generated images.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.DiffEditInversionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            lambda_auto_corr (`float`, *optional*, defaults to 20.0):\n                Lambda parameter to control auto correction\n            lambda_kl (`float`, *optional*, defaults to 20.0):\n                Lambda parameter to control Kullback–Leibler divergence output\n            num_reg_steps (`int`, *optional*, defaults to 0):\n                Number of regularization loss steps\n            num_auto_corr_rolls (`int`, *optional*, defaults to 5):\n                Number of auto correction roll steps\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.pipeline_stable_diffusion_diffedit.DiffEditInversionPipelineOutput`] or\n            `tuple`: [`~pipelines.stable_diffusion.pipeline_stable_diffusion_diffedit.DiffEditInversionPipelineOutput`]\n            if `return_dict` is `True`, otherwise a `tuple`. When returning a tuple, the first element is the inverted\n            latents tensors ordered by increasing noise, and then second is the corresponding decoded images if\n            `decode_latents` is `True`, otherwise `None`.\n        \"\"\"\n\n        # 1. Check inputs\n        self.check_inputs(\n            prompt,\n            inpaint_strength,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        if image is None:\n            raise ValueError(\"`image` input cannot be undefined.\")\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n        if cross_attention_kwargs is None:\n            cross_attention_kwargs = {}\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Preprocess image\n        image = preprocess(image)\n\n        # 4. Prepare latent variables\n        num_images_per_prompt = 1\n        latents = self.prepare_image_latents(\n            image, batch_size * num_images_per_prompt, self.vae.dtype, device, generator\n        )\n\n        # 5. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 6. Prepare timesteps\n        self.inverse_scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_inverse_timesteps(num_inference_steps, inpaint_strength, device)\n\n        # 7. Noising loop where we obtain the intermediate noised latent image for each timestep.\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.inverse_scheduler.order\n        inverted_latents = [latents.detach().clone()]\n        with self.progress_bar(total=num_inference_steps - 1) as progress_bar:\n            for i, t in enumerate(timesteps[:-1]):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.inverse_scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # regularization of the noise prediction (not in original code or paper but borrowed from Pix2PixZero)\n                if num_reg_steps > 0:\n                    with torch.enable_grad():\n                        for _ in range(num_reg_steps):\n                            if lambda_auto_corr > 0:\n                                for _ in range(num_auto_corr_rolls):\n                                    var = torch.autograd.Variable(noise_pred.detach().clone(), requires_grad=True)\n\n                                    # Derive epsilon from model output before regularizing to IID standard normal\n                                    var_epsilon = self.get_epsilon(var, latent_model_input.detach(), t)\n\n                                    l_ac = auto_corr_loss(var_epsilon, generator=generator)\n                                    l_ac.backward()\n\n                                    grad = var.grad.detach() / num_auto_corr_rolls\n                                    noise_pred = noise_pred - lambda_auto_corr * grad\n\n                            if lambda_kl > 0:\n                                var = torch.autograd.Variable(noise_pred.detach().clone(), requires_grad=True)\n\n                                # Derive epsilon from model output before regularizing to IID standard normal\n                                var_epsilon = self.get_epsilon(var, latent_model_input.detach(), t)\n\n                                l_kld = kl_divergence(var_epsilon)\n                                l_kld.backward()\n\n                                grad = var.grad.detach()\n                                noise_pred = noise_pred - lambda_kl * grad\n\n                            noise_pred = noise_pred.detach()\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.inverse_scheduler.step(noise_pred, t, latents).prev_sample\n                inverted_latents.append(latents.detach().clone())\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or (\n                    (i + 1) > num_warmup_steps and (i + 1) % self.inverse_scheduler.order == 0\n                ):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        assert len(inverted_latents) == len(timesteps)\n        latents = torch.stack(list(reversed(inverted_latents)), 1)\n\n        # 8. Post-processing\n        image = None\n        if decode_latents:\n            image = self.decode_latents(latents.flatten(0, 1).detach())\n\n        # 9. Convert to PIL.\n        if decode_latents and output_type == \"pil\":\n            image = self.image_processor.numpy_to_pil(image)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (latents, image)\n\n        return DiffEditInversionPipelineOutput(latents=latents, images=image)\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Optional[Union[str, List[str]]] = None,\n        mask_image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        image_latents: torch.FloatTensor = None,\n        inpaint_strength: Optional[float] = 0.8,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            mask_image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, to mask the generated image. White pixels in the mask\n                will be repainted, while black pixels will be preserved. If `mask_image` is a PIL image, it will be\n                converted to a single channel (luminance) before use. If it's a tensor, it should contain one color\n                channel (L) instead of 3, so the expected shape would be `(B, 1, H, W)`.\n            image_latents (`PIL.Image.Image` or `torch.FloatTensor`):\n                Partially noised image latents from the inversion process to be used as inputs for image generation.\n            inpaint_strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to inpaint the masked area. Must be between 0 and 1. When `strength`\n                is 1, the denoising process will be run on the masked area for the full number of iterations specified\n                in `num_inference_steps`. `image_latents` will be used as a reference for the masked area, adding more\n                noise to that region the larger the `strength`. If `strength` is 0, no inpainting will occur.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # 1. Check inputs\n        self.check_inputs(\n            prompt,\n            inpaint_strength,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        if mask_image is None:\n            raise ValueError(\n                \"`mask_image` input cannot be undefined. Use `generate_mask()` to compute `mask_image` from text prompts.\"\n            )\n        if image_latents is None:\n            raise ValueError(\n                \"`image_latents` input cannot be undefined. Use `invert()` to compute `image_latents` from input images.\"\n            )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n        if cross_attention_kwargs is None:\n            cross_attention_kwargs = {}\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Preprocess mask\n        mask_image = preprocess_mask(mask_image, batch_size)\n        latent_height, latent_width = mask_image.shape[-2:]\n        mask_image = torch.cat([mask_image] * num_images_per_prompt)\n        mask_image = mask_image.to(device=device, dtype=prompt_embeds.dtype)\n\n        # 5. Set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, inpaint_strength, device)\n\n        # 6. Preprocess image latents\n        image_latents = preprocess(image_latents)\n        latent_shape = (self.vae.config.latent_channels, latent_height, latent_width)\n        if image_latents.shape[-3:] != latent_shape:\n            raise ValueError(\n                f\"Each latent image in `image_latents` must have shape {latent_shape}, \"\n                f\"but has shape {image_latents.shape[-3:]}\"\n            )\n        if image_latents.ndim == 4:\n            image_latents = image_latents.reshape(batch_size, len(timesteps), *latent_shape)\n        if image_latents.shape[:2] != (batch_size, len(timesteps)):\n            raise ValueError(\n                f\"`image_latents` must have batch size {batch_size} with latent images from {len(timesteps)} timesteps, \"\n                f\"but has batch size {image_latents.shape[0]} with latent images from {image_latents.shape[1]} timesteps.\"\n            )\n        image_latents = image_latents.transpose(0, 1).repeat_interleave(num_images_per_prompt, dim=1)\n        image_latents = image_latents.to(device=device, dtype=prompt_embeds.dtype)\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        latents = image_latents[0].detach().clone()\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # mask with inverted latents from appropriate timestep - use original image latent for last step\n                latents = latents * mask_image + image_latents[i] * (1 - mask_image)\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_image_variation.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPVisionModelWithProjection\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import deprecate, is_accelerate_available, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass StableDiffusionImageVariationPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline to generate variations from an input image using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        image_encoder ([`CLIPVisionModelWithProjection`]):\n            Frozen CLIP image-encoder. Stable Diffusion Image Variation uses the vision portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPVisionModelWithProjection),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    # TODO: feature_extractor is required to encode images (if they are in PIL format),\n    # we should give a descriptive message if the pipeline doesn't have one.\n    _optional_components = [\"safety_checker\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        image_encoder: CLIPVisionModelWithProjection,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warn(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            image_encoder=image_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.image_encoder, self.vae, self.safety_checker]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_image(self, image, device, num_images_per_prompt, do_classifier_free_guidance):\n        dtype = next(self.image_encoder.parameters()).dtype\n\n        if not isinstance(image, torch.Tensor):\n            image = self.feature_extractor(images=image, return_tensors=\"pt\").pixel_values\n\n        image = image.to(device=device, dtype=dtype)\n        image_embeddings = self.image_encoder(image).image_embeds\n        image_embeddings = image_embeddings.unsqueeze(1)\n\n        # duplicate image embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = image_embeddings.shape\n        image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1)\n        image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = torch.zeros_like(image_embeddings)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            image_embeddings = torch.cat([negative_prompt_embeds, image_embeddings])\n\n        return image_embeddings\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(self, image, height, width, callback_steps):\n        if (\n            not isinstance(image, torch.Tensor)\n            and not isinstance(image, PIL.Image.Image)\n            and not isinstance(image, list)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor` or `PIL.Image.Image` or `List[PIL.Image.Image]` but is\"\n                f\" {type(image)}\"\n            )\n\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        image: Union[PIL.Image.Image, List[PIL.Image.Image], torch.FloatTensor],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            image (`PIL.Image.Image` or `List[PIL.Image.Image]` or `torch.FloatTensor`):\n                The image or images to guide the image generation. If you provide a tensor, it needs to comply with the\n                configuration of\n                [this](https://huggingface.co/lambdalabs/sd-image-variations-diffusers/blob/main/feature_extractor/preprocessor_config.json)\n                `CLIPImageProcessor`\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(image, height, width, callback_steps)\n\n        # 2. Define call parameters\n        if isinstance(image, PIL.Image.Image):\n            batch_size = 1\n        elif isinstance(image, list):\n            batch_size = len(image)\n        else:\n            batch_size = image.shape[0]\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input image\n        image_embeddings = self._encode_image(image, device, num_images_per_prompt, do_classifier_free_guidance)\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            image_embeddings.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=image_embeddings).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, image_embeddings.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_img2img.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import FromCkptMixin, LoraLoaderMixin, TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    deprecate,\n    is_accelerate_available,\n    is_accelerate_version,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import requests\n        >>> import torch\n        >>> from PIL import Image\n        >>> from io import BytesIO\n\n        >>> from diffusers import StableDiffusionImg2ImgPipeline\n\n        >>> device = \"cuda\"\n        >>> model_id_or_path = \"runwayml/stable-diffusion-v1-5\"\n        >>> pipe = StableDiffusionImg2ImgPipeline.from_pretrained(model_id_or_path, torch_dtype=torch.float16)\n        >>> pipe = pipe.to(device)\n\n        >>> url = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/assets/stable-samples/img2img/sketch-mountains-input.jpg\"\n\n        >>> response = requests.get(url)\n        >>> init_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> init_image = init_image.resize((768, 512))\n\n        >>> prompt = \"A fantasy landscape, trending on artstation\"\n\n        >>> images = pipe(prompt=prompt, image=init_image, strength=0.75, guidance_scale=7.5).images\n        >>> images[0].save(\"fantasy_landscape.png\")\n        ```\n\"\"\"\n\n\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\nclass StableDiffusionImg2ImgPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin, FromCkptMixin):\n    r\"\"\"\n    Pipeline for text-guided image to image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n        - *Ckpt*: [`loaders.FromCkptMixin.from_ckpt`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely. If your checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self, prompt, strength, callback_steps, negative_prompt=None, prompt_embeds=None, negative_prompt_embeds=None\n    ):\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    def prepare_latents(self, image, timestep, batch_size, num_images_per_prompt, dtype, device, generator=None):\n        if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}\"\n            )\n\n        image = image.to(device=device, dtype=dtype)\n\n        batch_size = batch_size * num_images_per_prompt\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            init_latents = [\n                self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size)\n            ]\n            init_latents = torch.cat(init_latents, dim=0)\n        else:\n            init_latents = self.vae.encode(image).latent_dist.sample(generator)\n\n        init_latents = self.vae.config.scaling_factor * init_latents\n\n        if batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] == 0:\n            # expand init_latents for batch_size\n            deprecation_message = (\n                f\"You have passed {batch_size} text prompts (`prompt`), but only {init_latents.shape[0]} initial\"\n                \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                \" your script to pass as many initial images as text prompts to suppress this warning.\"\n            )\n            deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n            additional_image_per_prompt = batch_size // init_latents.shape[0]\n            init_latents = torch.cat([init_latents] * additional_image_per_prompt, dim=0)\n        elif batch_size > init_latents.shape[0] and batch_size % init_latents.shape[0] != 0:\n            raise ValueError(\n                f\"Cannot duplicate `image` of batch size {init_latents.shape[0]} to {batch_size} text prompts.\"\n            )\n        else:\n            init_latents = torch.cat([init_latents], dim=0)\n\n        shape = init_latents.shape\n        noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n\n        # get latents\n        init_latents = self.scheduler.add_noise(init_latents, noise, timestep)\n        latents = init_latents\n\n        return latents\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        strength: float = 0.8,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: Optional[float] = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to transform the reference `image`. Must be between 0 and 1. `image`\n                will be used as a starting point, adding more noise to it the larger the `strength`. The number of\n                denoising steps depends on the amount of noise initially added. When `strength` is 1, added noise will\n                be maximum and the denoising process will run for the full number of iterations specified in\n                `num_inference_steps`. A value of 1, therefore, essentially ignores `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference. This parameter will be modulated by `strength`.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(prompt, strength, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds)\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Preprocess image\n        image = self.image_processor.preprocess(image)\n\n        # 5. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device)\n        latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt)\n\n        # 6. Prepare latent variables\n        latents = self.prepare_latents(\n            image, latent_timestep, batch_size, num_images_per_prompt, prompt_embeds.dtype, device, generator\n        )\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import LoraLoaderMixin, TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import deprecate, is_accelerate_available, is_accelerate_version, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\ndef prepare_mask_and_masked_image(image, mask, height, width, return_image: bool = False):\n    \"\"\"\n    Prepares a pair (image, mask) to be consumed by the Stable Diffusion pipeline. This means that those inputs will be\n    converted to ``torch.Tensor`` with shapes ``batch x channels x height x width`` where ``channels`` is ``3`` for the\n    ``image`` and ``1`` for the ``mask``.\n\n    The ``image`` will be converted to ``torch.float32`` and normalized to be in ``[-1, 1]``. The ``mask`` will be\n    binarized (``mask > 0.5``) and cast to ``torch.float32`` too.\n\n    Args:\n        image (Union[np.array, PIL.Image, torch.Tensor]): The image to inpaint.\n            It can be a ``PIL.Image``, or a ``height x width x 3`` ``np.array`` or a ``channels x height x width``\n            ``torch.Tensor`` or a ``batch x channels x height x width`` ``torch.Tensor``.\n        mask (_type_): The mask to apply to the image, i.e. regions to inpaint.\n            It can be a ``PIL.Image``, or a ``height x width`` ``np.array`` or a ``1 x height x width``\n            ``torch.Tensor`` or a ``batch x 1 x height x width`` ``torch.Tensor``.\n\n\n    Raises:\n        ValueError: ``torch.Tensor`` images should be in the ``[-1, 1]`` range. ValueError: ``torch.Tensor`` mask\n        should be in the ``[0, 1]`` range. ValueError: ``mask`` and ``image`` should have the same spatial dimensions.\n        TypeError: ``mask`` is a ``torch.Tensor`` but ``image`` is not\n            (ot the other way around).\n\n    Returns:\n        tuple[torch.Tensor]: The pair (mask, masked_image) as ``torch.Tensor`` with 4\n            dimensions: ``batch x channels x height x width``.\n    \"\"\"\n\n    if image is None:\n        raise ValueError(\"`image` input cannot be undefined.\")\n\n    if mask is None:\n        raise ValueError(\"`mask_image` input cannot be undefined.\")\n\n    if isinstance(image, torch.Tensor):\n        if not isinstance(mask, torch.Tensor):\n            raise TypeError(f\"`image` is a torch.Tensor but `mask` (type: {type(mask)} is not\")\n\n        # Batch single image\n        if image.ndim == 3:\n            assert image.shape[0] == 3, \"Image outside a batch should be of shape (3, H, W)\"\n            image = image.unsqueeze(0)\n\n        # Batch and add channel dim for single mask\n        if mask.ndim == 2:\n            mask = mask.unsqueeze(0).unsqueeze(0)\n\n        # Batch single mask or add channel dim\n        if mask.ndim == 3:\n            # Single batched mask, no channel dim or single mask not batched but channel dim\n            if mask.shape[0] == 1:\n                mask = mask.unsqueeze(0)\n\n            # Batched masks no channel dim\n            else:\n                mask = mask.unsqueeze(1)\n\n        assert image.ndim == 4 and mask.ndim == 4, \"Image and Mask must have 4 dimensions\"\n        assert image.shape[-2:] == mask.shape[-2:], \"Image and Mask must have the same spatial dimensions\"\n        assert image.shape[0] == mask.shape[0], \"Image and Mask must have the same batch size\"\n\n        # Check image is in [-1, 1]\n        if image.min() < -1 or image.max() > 1:\n            raise ValueError(\"Image should be in [-1, 1] range\")\n\n        # Check mask is in [0, 1]\n        if mask.min() < 0 or mask.max() > 1:\n            raise ValueError(\"Mask should be in [0, 1] range\")\n\n        # Binarize mask\n        mask[mask < 0.5] = 0\n        mask[mask >= 0.5] = 1\n\n        # Image as float32\n        image = image.to(dtype=torch.float32)\n    elif isinstance(mask, torch.Tensor):\n        raise TypeError(f\"`mask` is a torch.Tensor but `image` (type: {type(image)} is not\")\n    else:\n        # preprocess image\n        if isinstance(image, (PIL.Image.Image, np.ndarray)):\n            image = [image]\n        if isinstance(image, list) and isinstance(image[0], PIL.Image.Image):\n            # resize all images w.r.t passed height an width\n            image = [i.resize((width, height), resample=PIL.Image.LANCZOS) for i in image]\n            image = [np.array(i.convert(\"RGB\"))[None, :] for i in image]\n            image = np.concatenate(image, axis=0)\n        elif isinstance(image, list) and isinstance(image[0], np.ndarray):\n            image = np.concatenate([i[None, :] for i in image], axis=0)\n\n        image = image.transpose(0, 3, 1, 2)\n        image = torch.from_numpy(image).to(dtype=torch.float32) / 127.5 - 1.0\n\n        # preprocess mask\n        if isinstance(mask, (PIL.Image.Image, np.ndarray)):\n            mask = [mask]\n\n        if isinstance(mask, list) and isinstance(mask[0], PIL.Image.Image):\n            mask = [i.resize((width, height), resample=PIL.Image.LANCZOS) for i in mask]\n            mask = np.concatenate([np.array(m.convert(\"L\"))[None, None, :] for m in mask], axis=0)\n            mask = mask.astype(np.float32) / 255.0\n        elif isinstance(mask, list) and isinstance(mask[0], np.ndarray):\n            mask = np.concatenate([m[None, None, :] for m in mask], axis=0)\n\n        mask[mask < 0.5] = 0\n        mask[mask >= 0.5] = 1\n        mask = torch.from_numpy(mask)\n\n    masked_image = image * (mask < 0.5)\n\n    # n.b. ensure backwards compatibility as old function does not return image\n    if return_image:\n        return mask, masked_image, image\n\n    return mask, masked_image\n\n\nclass StableDiffusionInpaintPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin):\n    r\"\"\"\n    Pipeline for text-guided image inpainting using Stable Diffusion. *This is an experimental feature*.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"skip_prk_steps\") and scheduler.config.skip_prk_steps is False:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration\"\n                \" `skip_prk_steps`. `skip_prk_steps` should be set to True in the configuration file. Please make\"\n                \" sure to update the config accordingly as not setting `skip_prk_steps` in the config might lead to\"\n                \" incorrect results in future versions. If you have downloaded this checkpoint from the Hugging Face\"\n                \" Hub, it would be very nice if you could open a Pull request for the\"\n                \" `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"skip_prk_steps not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"skip_prk_steps\"] = True\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n        # Check shapes, assume num_channels_latents == 4, num_channels_mask == 1, num_channels_masked == 4\n        if unet.config.in_channels != 9:\n            logger.warning(\n                f\"You have loaded a UNet with {unet.config.in_channels} input channels, whereas by default,\"\n                f\" {self.__class__} assumes that `pipeline.unet` has 9 input channels: 4 for `num_channels_latents`,\"\n                \" 1 for `num_channels_mask`, and 4 for `num_channels_masked_image`. If you did not intend to modify\"\n                \" this behavior, please check whether you have loaded the right checkpoint.\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        strength,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def prepare_latents(\n        self,\n        batch_size,\n        num_channels_latents,\n        height,\n        width,\n        dtype,\n        device,\n        generator,\n        latents=None,\n        image=None,\n        timestep=None,\n        is_strength_max=True,\n    ):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if (image is None or timestep is None) and not is_strength_max:\n            raise ValueError(\n                \"Since strength < 1. initial latents are to be initialised as a combination of Image + Noise.\"\n                \"However, either the image or the noise timestep has not been provided.\"\n            )\n\n        if latents is None:\n            noise = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n            if is_strength_max:\n                # if strength is 100% then simply initialise the latents to noise\n                latents = noise\n            else:\n                # otherwise initialise latents as init image + noise\n                image = image.to(device=device, dtype=dtype)\n                if isinstance(generator, list):\n                    image_latents = [\n                        self.vae.encode(image[i : i + 1]).latent_dist.sample(generator=generator[i])\n                        for i in range(batch_size)\n                    ]\n                else:\n                    image_latents = self.vae.encode(image).latent_dist.sample(generator=generator)\n\n                image_latents = self.vae.config.scaling_factor * image_latents\n\n                latents = self.scheduler.add_noise(image_latents, noise, timestep)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n\n        return latents\n\n    def prepare_mask_latents(\n        self, mask, masked_image, batch_size, height, width, dtype, device, generator, do_classifier_free_guidance\n    ):\n        # resize the mask to latents shape as we concatenate the mask to the latents\n        # we do that before converting to dtype to avoid breaking in case we're using cpu_offload\n        # and half precision\n        mask = torch.nn.functional.interpolate(\n            mask, size=(height // self.vae_scale_factor, width // self.vae_scale_factor)\n        )\n        mask = mask.to(device=device, dtype=dtype)\n\n        masked_image = masked_image.to(device=device, dtype=dtype)\n\n        # encode the mask image into latents space so we can concatenate it to the latents\n        if isinstance(generator, list):\n            masked_image_latents = [\n                self.vae.encode(masked_image[i : i + 1]).latent_dist.sample(generator=generator[i])\n                for i in range(batch_size)\n            ]\n            masked_image_latents = torch.cat(masked_image_latents, dim=0)\n        else:\n            masked_image_latents = self.vae.encode(masked_image).latent_dist.sample(generator=generator)\n        masked_image_latents = self.vae.config.scaling_factor * masked_image_latents\n\n        # duplicate mask and masked_image_latents for each generation per prompt, using mps friendly method\n        if mask.shape[0] < batch_size:\n            if not batch_size % mask.shape[0] == 0:\n                raise ValueError(\n                    \"The passed mask and the required batch size don't match. Masks are supposed to be duplicated to\"\n                    f\" a total batch size of {batch_size}, but {mask.shape[0]} masks were passed. Make sure the number\"\n                    \" of masks that you pass is divisible by the total requested batch size.\"\n                )\n            mask = mask.repeat(batch_size // mask.shape[0], 1, 1, 1)\n        if masked_image_latents.shape[0] < batch_size:\n            if not batch_size % masked_image_latents.shape[0] == 0:\n                raise ValueError(\n                    \"The passed images and the required batch size don't match. Images are supposed to be duplicated\"\n                    f\" to a total batch size of {batch_size}, but {masked_image_latents.shape[0]} images were passed.\"\n                    \" Make sure the number of images that you pass is divisible by the total requested batch size.\"\n                )\n            masked_image_latents = masked_image_latents.repeat(batch_size // masked_image_latents.shape[0], 1, 1, 1)\n\n        mask = torch.cat([mask] * 2) if do_classifier_free_guidance else mask\n        masked_image_latents = (\n            torch.cat([masked_image_latents] * 2) if do_classifier_free_guidance else masked_image_latents\n        )\n\n        # aligning device to prevent device errors when concating it with the latent model input\n        masked_image_latents = masked_image_latents.to(device=device, dtype=dtype)\n        return mask, masked_image_latents\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        mask_image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        strength: float = 1.0,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch which will be inpainted, *i.e.* parts of the image will\n                be masked out with `mask_image` and repainted according to `prompt`.\n            mask_image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be\n                repainted, while black pixels will be preserved. If `mask_image` is a PIL image, it will be converted\n                to a single channel (luminance) before use. If it's a tensor, it should contain one color channel (L)\n                instead of 3, so the expected shape would be `(B, H, W, 1)`.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            strength (`float`, *optional*, defaults to 1.):\n                Conceptually, indicates how much to transform the masked portion of the reference `image`. Must be\n                between 0 and 1. `image` will be used as a starting point, adding more noise to it the larger the\n                `strength`. The number of denoising steps depends on the amount of noise initially added. When\n                `strength` is 1, added noise will be maximum and the denoising process will run for the full number of\n                iterations specified in `num_inference_steps`. A value of 1, therefore, essentially ignores the masked\n                portion of the reference `image`.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n        Examples:\n\n        ```py\n        >>> import PIL\n        >>> import requests\n        >>> import torch\n        >>> from io import BytesIO\n\n        >>> from diffusers import StableDiffusionInpaintPipeline\n\n\n        >>> def download_image(url):\n        ...     response = requests.get(url)\n        ...     return PIL.Image.open(BytesIO(response.content)).convert(\"RGB\")\n\n\n        >>> img_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo.png\"\n        >>> mask_url = \"https://raw.githubusercontent.com/CompVis/latent-diffusion/main/data/inpainting_examples/overture-creations-5sI6fQgYIuo_mask.png\"\n\n        >>> init_image = download_image(img_url).resize((512, 512))\n        >>> mask_image = download_image(mask_url).resize((512, 512))\n\n        >>> pipe = StableDiffusionInpaintPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-inpainting\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> prompt = \"Face of a yellow cat, high resolution, sitting on a park bench\"\n        >>> image = pipe(prompt=prompt, image=init_image, mask_image=mask_image).images[0]\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs\n        self.check_inputs(\n            prompt,\n            height,\n            width,\n            strength,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(\n            num_inference_steps=num_inference_steps, strength=strength, device=device\n        )\n        # at which timestep to set the initial noise (n.b. 50% if strength is 0.5)\n        latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt)\n        # create a boolean to check if the strength is set to 1. if so then initialise the latents with pure noise\n        is_strength_max = strength == 1.0\n\n        # 5. Preprocess mask and image\n        mask, masked_image, init_image = prepare_mask_and_masked_image(\n            image, mask_image, height, width, return_image=True\n        )\n\n        # 6. Prepare latent variables\n        num_channels_latents = self.vae.config.latent_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n            image=init_image,\n            timestep=latent_timestep,\n            is_strength_max=is_strength_max,\n        )\n\n        # 7. Prepare mask latent variables\n        mask, masked_image_latents = self.prepare_mask_latents(\n            mask,\n            masked_image,\n            batch_size * num_images_per_prompt,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            do_classifier_free_guidance,\n        )\n\n        # 8. Check that sizes of mask, masked image and latents match\n        num_channels_mask = mask.shape[1]\n        num_channels_masked_image = masked_image_latents.shape[1]\n        if num_channels_latents + num_channels_mask + num_channels_masked_image != self.unet.config.in_channels:\n            raise ValueError(\n                f\"Incorrect configuration settings! The config of `pipeline.unet`: {self.unet.config} expects\"\n                f\" {self.unet.config.in_channels} but received `num_channels_latents`: {num_channels_latents} +\"\n                f\" `num_channels_mask`: {num_channels_mask} + `num_channels_masked_image`: {num_channels_masked_image}\"\n                f\" = {num_channels_latents+num_channels_masked_image+num_channels_mask}. Please verify the config of\"\n                \" `pipeline.unet` or your `mask_image` or `image` input.\"\n            )\n\n        # 9. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 10. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n\n                # concat latents, mask, masked_image_latents in the channel dimension\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n                latent_model_input = torch.cat([latent_model_input, mask, masked_image_latents], dim=1)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_inpaint_legacy.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import FromCkptMixin, LoraLoaderMixin, TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    deprecate,\n    is_accelerate_available,\n    is_accelerate_version,\n    logging,\n    randn_tensor,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)\n\n\ndef preprocess_image(image, batch_size):\n    w, h = image.size\n    w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n    image = image.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"])\n    image = np.array(image).astype(np.float32) / 255.0\n    image = np.vstack([image[None].transpose(0, 3, 1, 2)] * batch_size)\n    image = torch.from_numpy(image)\n    return 2.0 * image - 1.0\n\n\ndef preprocess_mask(mask, batch_size, scale_factor=8):\n    if not isinstance(mask, torch.FloatTensor):\n        mask = mask.convert(\"L\")\n        w, h = mask.size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n        mask = mask.resize((w // scale_factor, h // scale_factor), resample=PIL_INTERPOLATION[\"nearest\"])\n        mask = np.array(mask).astype(np.float32) / 255.0\n        mask = np.tile(mask, (4, 1, 1))\n        mask = np.vstack([mask[None]] * batch_size)\n        mask = 1 - mask  # repaint white, keep black\n        mask = torch.from_numpy(mask)\n        return mask\n\n    else:\n        valid_mask_channel_sizes = [1, 3]\n        # if mask channel is fourth tensor dimension, permute dimensions to pytorch standard (B, C, H, W)\n        if mask.shape[3] in valid_mask_channel_sizes:\n            mask = mask.permute(0, 3, 1, 2)\n        elif mask.shape[1] not in valid_mask_channel_sizes:\n            raise ValueError(\n                f\"Mask channel dimension of size in {valid_mask_channel_sizes} should be second or fourth dimension,\"\n                f\" but received mask of shape {tuple(mask.shape)}\"\n            )\n        # (potentially) reduce mask channel dimension from 3 to 1 for broadcasting to latent shape\n        mask = mask.mean(dim=1, keepdim=True)\n        h, w = mask.shape[-2:]\n        h, w = (x - x % 8 for x in (h, w))  # resize to integer multiple of 8\n        mask = torch.nn.functional.interpolate(mask, (h // scale_factor, w // scale_factor))\n        return mask\n\n\nclass StableDiffusionInpaintPipelineLegacy(\n    DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin, FromCkptMixin\n):\n    r\"\"\"\n    Pipeline for text-guided image inpainting using Stable Diffusion. *This is an experimental feature*.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n        - *Ckpt*: [`loaders.FromCkptMixin.from_ckpt`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"feature_extractor\"]\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.__init__\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely. If your checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.check_inputs\n    def check_inputs(\n        self, prompt, strength, callback_steps, negative_prompt=None, prompt_embeds=None, negative_prompt_embeds=None\n    ):\n        if strength < 0 or strength > 1:\n            raise ValueError(f\"The value of strength should in [0.0, 1.0] but is {strength}\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.StableDiffusionImg2ImgPipeline.get_timesteps\n    def get_timesteps(self, num_inference_steps, strength, device):\n        # get the original timestep using init_timestep\n        init_timestep = min(int(num_inference_steps * strength), num_inference_steps)\n\n        t_start = max(num_inference_steps - init_timestep, 0)\n        timesteps = self.scheduler.timesteps[t_start * self.scheduler.order :]\n\n        return timesteps, num_inference_steps - t_start\n\n    def prepare_latents(self, image, timestep, num_images_per_prompt, dtype, device, generator):\n        image = image.to(device=self.device, dtype=dtype)\n        init_latent_dist = self.vae.encode(image).latent_dist\n        init_latents = init_latent_dist.sample(generator=generator)\n        init_latents = self.vae.config.scaling_factor * init_latents\n\n        # Expand init_latents for batch_size and num_images_per_prompt\n        init_latents = torch.cat([init_latents] * num_images_per_prompt, dim=0)\n        init_latents_orig = init_latents\n\n        # add noise to latents using the timesteps\n        noise = randn_tensor(init_latents.shape, generator=generator, device=self.device, dtype=dtype)\n        init_latents = self.scheduler.add_noise(init_latents, noise, timestep)\n        latents = init_latents\n        return latents, init_latents_orig, noise\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        mask_image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        strength: float = 0.8,\n        num_inference_steps: Optional[int] = 50,\n        guidance_scale: Optional[float] = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        add_predicted_noise: Optional[bool] = False,\n        eta: Optional[float] = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, that will be used as the starting point for the\n                process. This is the image whose masked region will be inpainted.\n            mask_image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch, to mask `image`. White pixels in the mask will be\n                replaced by noise and therefore repainted, while black pixels will be preserved. If `mask_image` is a\n                PIL image, it will be converted to a single channel (luminance) before use. If mask is a tensor, the\n                expected shape should be either `(B, H, W, C)` or `(B, C, H, W)`, where C is 1 or 3.\n            strength (`float`, *optional*, defaults to 0.8):\n                Conceptually, indicates how much to inpaint the masked area. Must be between 0 and 1. When `strength`\n                is 1, the denoising process will be run on the masked area for the full number of iterations specified\n                in `num_inference_steps`. `image` will be used as a reference for the masked area, adding more noise to\n                that region the larger the `strength`. If `strength` is 0, no inpainting will occur.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The reference number of denoising steps. More denoising steps usually lead to a higher quality image at\n                the expense of slower inference. This parameter will be modulated by `strength`, as explained above.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            add_predicted_noise (`bool`, *optional*, defaults to True):\n                Use predicted noise instead of random noise when constructing noisy versions of the original image in\n                the reverse diffusion process\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 1. Check inputs\n        self.check_inputs(prompt, strength, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds)\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Preprocess image and mask\n        if not isinstance(image, torch.FloatTensor):\n            image = preprocess_image(image, batch_size)\n\n        mask_image = preprocess_mask(mask_image, batch_size, self.vae_scale_factor)\n\n        # 5. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, strength, device)\n        latent_timestep = timesteps[:1].repeat(batch_size * num_images_per_prompt)\n\n        # 6. Prepare latent variables\n        # encode the init image into latents and scale the latents\n        latents, init_latents_orig, noise = self.prepare_latents(\n            image, latent_timestep, num_images_per_prompt, prompt_embeds.dtype, device, generator\n        )\n\n        # 7. Prepare mask latent\n        mask = mask_image.to(device=self.device, dtype=latents.dtype)\n        mask = torch.cat([mask] * num_images_per_prompt)\n\n        # 8. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 9. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=prompt_embeds, return_dict=False)[\n                    0\n                ]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n                # masking\n                if add_predicted_noise:\n                    init_latents_proper = self.scheduler.add_noise(\n                        init_latents_orig, noise_pred_uncond, torch.tensor([t])\n                    )\n                else:\n                    init_latents_proper = self.scheduler.add_noise(init_latents_orig, noise, torch.tensor([t]))\n\n                latents = (init_latents_proper * mask) + (latents * (1 - mask))\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # use original latents corresponding to unmasked portions of the image\n        latents = (init_latents_orig * mask) + (latents * (1 - mask))\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_instruct_pix2pix.py",
    "content": "# Copyright 2023 The InstructPix2Pix Authors and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import LoraLoaderMixin, TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    deprecate,\n    is_accelerate_available,\n    is_accelerate_version,\n    logging,\n    randn_tensor,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\nclass StableDiffusionInstructPix2PixPipeline(DiffusionPipeline, TextualInversionLoaderMixin, LoraLoaderMixin):\n    r\"\"\"\n    Pipeline for pixel-level image editing by following text instructions. Based on Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    In addition the pipeline inherits the following loading methods:\n        - *Textual-Inversion*: [`loaders.TextualInversionLoaderMixin.load_textual_inversion`]\n        - *LoRA*: [`loaders.LoraLoaderMixin.load_lora_weights`]\n\n    as well as the following saving methods:\n        - *LoRA*: [`loaders.LoraLoaderMixin.save_lora_weights`]\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        num_inference_steps: int = 100,\n        guidance_scale: float = 7.5,\n        image_guidance_scale: float = 1.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`PIL.Image.Image`):\n                `Image`, or tensor representing an image batch which will be repainted according to `prompt`.\n            num_inference_steps (`int`, *optional*, defaults to 100):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality. This pipeline requires a value of at least `1`.\n            image_guidance_scale (`float`, *optional*, defaults to 1.5):\n                Image guidance scale is to push the generated image towards the inital image `image`. Image guidance\n                scale is enabled by setting `image_guidance_scale > 1`. Higher image guidance scale encourages to\n                generate images that are closely linked to the source image `image`, usually at the expense of lower\n                image quality. This pipeline requires a value of at least `1`.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> import PIL\n        >>> import requests\n        >>> import torch\n        >>> from io import BytesIO\n\n        >>> from diffusers import StableDiffusionInstructPix2PixPipeline\n\n\n        >>> def download_image(url):\n        ...     response = requests.get(url)\n        ...     return PIL.Image.open(BytesIO(response.content)).convert(\"RGB\")\n\n\n        >>> img_url = \"https://huggingface.co/datasets/diffusers/diffusers-images-docs/resolve/main/mountain.png\"\n\n        >>> image = download_image(img_url).resize((512, 512))\n\n        >>> pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(\n        ...     \"timbrooks/instruct-pix2pix\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> prompt = \"make the mountains snowy\"\n        >>> image = pipe(prompt=prompt, image=image).images[0]\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Check inputs\n        self.check_inputs(prompt, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds)\n\n        if image is None:\n            raise ValueError(\"`image` input cannot be undefined.\")\n\n        # 1. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0 and image_guidance_scale >= 1.0\n        # check if scheduler is in sigmas space\n        scheduler_is_in_sigma_space = hasattr(self.scheduler, \"sigmas\")\n\n        # 2. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 3. Preprocess image\n        image = preprocess(image)\n        height, width = image.shape[-2:]\n\n        # 4. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare Image latents\n        image_latents = self.prepare_image_latents(\n            image,\n            batch_size,\n            num_images_per_prompt,\n            prompt_embeds.dtype,\n            device,\n            do_classifier_free_guidance,\n            generator,\n        )\n\n        # 6. Prepare latent variables\n        num_channels_latents = self.vae.config.latent_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 7. Check that shapes of latents and image match the UNet channels\n        num_channels_image = image_latents.shape[1]\n        if num_channels_latents + num_channels_image != self.unet.config.in_channels:\n            raise ValueError(\n                f\"Incorrect configuration settings! The config of `pipeline.unet`: {self.unet.config} expects\"\n                f\" {self.unet.config.in_channels} but received `num_channels_latents`: {num_channels_latents} +\"\n                f\" `num_channels_image`: {num_channels_image} \"\n                f\" = {num_channels_latents+num_channels_image}. Please verify the config of\"\n                \" `pipeline.unet` or your `image` input.\"\n            )\n\n        # 8. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 9. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # Expand the latents if we are doing classifier free guidance.\n                # The latents are expanded 3 times because for pix2pix the guidance\\\n                # is applied for both the text and the input image.\n                latent_model_input = torch.cat([latents] * 3) if do_classifier_free_guidance else latents\n\n                # concat latents, image_latents in the channel dimension\n                scaled_latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n                scaled_latent_model_input = torch.cat([scaled_latent_model_input, image_latents], dim=1)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    scaled_latent_model_input, t, encoder_hidden_states=prompt_embeds, return_dict=False\n                )[0]\n\n                # Hack:\n                # For karras style schedulers the model does classifer free guidance using the\n                # predicted_original_sample instead of the noise_pred. So we need to compute the\n                # predicted_original_sample here if we are using a karras style scheduler.\n                if scheduler_is_in_sigma_space:\n                    step_index = (self.scheduler.timesteps == t).nonzero().item()\n                    sigma = self.scheduler.sigmas[step_index]\n                    noise_pred = latent_model_input - sigma * noise_pred\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_text, noise_pred_image, noise_pred_uncond = noise_pred.chunk(3)\n                    noise_pred = (\n                        noise_pred_uncond\n                        + guidance_scale * (noise_pred_text - noise_pred_image)\n                        + image_guidance_scale * (noise_pred_image - noise_pred_uncond)\n                    )\n\n                # Hack:\n                # For karras style schedulers the model does classifer free guidance using the\n                # predicted_original_sample instead of the noise_pred. But the scheduler.step function\n                # expects the noise_pred and computes the predicted_original_sample internally. So we\n                # need to overwrite the noise_pred here such that the value of the computed\n                # predicted_original_sample is correct.\n                if scheduler_is_in_sigma_space:\n                    noise_pred = (noise_pred - latents) / (-sigma)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_ prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            # pix2pix has two  negative embeddings, and unlike in other pipelines latents are ordered [prompt_embeds, negative_prompt_embeds, negative_prompt_embeds]\n            prompt_embeds = torch.cat([prompt_embeds, negative_prompt_embeds, negative_prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def check_inputs(\n        self, prompt, callback_steps, negative_prompt=None, prompt_embeds=None, negative_prompt_embeds=None\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    def prepare_image_latents(\n        self, image, batch_size, num_images_per_prompt, dtype, device, do_classifier_free_guidance, generator=None\n    ):\n        if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}\"\n            )\n\n        image = image.to(device=device, dtype=dtype)\n\n        batch_size = batch_size * num_images_per_prompt\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            image_latents = [self.vae.encode(image[i : i + 1]).latent_dist.mode() for i in range(batch_size)]\n            image_latents = torch.cat(image_latents, dim=0)\n        else:\n            image_latents = self.vae.encode(image).latent_dist.mode()\n\n        if batch_size > image_latents.shape[0] and batch_size % image_latents.shape[0] == 0:\n            # expand image_latents for batch_size\n            deprecation_message = (\n                f\"You have passed {batch_size} text prompts (`prompt`), but only {image_latents.shape[0]} initial\"\n                \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                \" your script to pass as many initial images as text prompts to suppress this warning.\"\n            )\n            deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n            additional_image_per_prompt = batch_size // image_latents.shape[0]\n            image_latents = torch.cat([image_latents] * additional_image_per_prompt, dim=0)\n        elif batch_size > image_latents.shape[0] and batch_size % image_latents.shape[0] != 0:\n            raise ValueError(\n                f\"Cannot duplicate `image` of batch size {image_latents.shape[0]} to {batch_size} text prompts.\"\n            )\n        else:\n            image_latents = torch.cat([image_latents], dim=0)\n\n        if do_classifier_free_guidance:\n            uncond_image_latents = torch.zeros_like(image_latents)\n            image_latents = torch.cat([image_latents, image_latents, uncond_image_latents], dim=0)\n\n        return image_latents\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_k_diffusion.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport importlib\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport torch\nfrom k_diffusion.external import CompVisDenoiser, CompVisVDenoiser\nfrom k_diffusion.sampling import get_sigmas_karras\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...pipelines import DiffusionPipeline\nfrom ...schedulers import LMSDiscreteScheduler\nfrom ...utils import is_accelerate_available, is_accelerate_version, logging, randn_tensor\nfrom . import StableDiffusionPipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass ModelWrapper:\n    def __init__(self, model, alphas_cumprod):\n        self.model = model\n        self.alphas_cumprod = alphas_cumprod\n\n    def apply_model(self, *args, **kwargs):\n        if len(args) == 3:\n            encoder_hidden_states = args[-1]\n            args = args[:2]\n        if kwargs.get(\"cond\", None) is not None:\n            encoder_hidden_states = kwargs.pop(\"cond\")\n        return self.model(*args, encoder_hidden_states=encoder_hidden_states, **kwargs).sample\n\n\nclass StableDiffusionKDiffusionPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    <Tip warning={true}>\n\n        This is an experimental pipeline and is likely to change in the future.\n\n    </Tip>\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae,\n        text_encoder,\n        tokenizer,\n        unet,\n        scheduler,\n        safety_checker,\n        feature_extractor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        logger.info(\n            f\"{self.__class__} is an experimntal pipeline and is likely to change in the future. We recommend to use\"\n            \" this pipeline for fast experimentation / iteration if needed, but advice to rely on existing pipelines\"\n            \" as defined in https://huggingface.co/docs/diffusers/api/schedulers#implemented-schedulers for\"\n            \" production settings.\"\n        )\n\n        # get correct sigmas from LMS\n        scheduler = LMSDiscreteScheduler.from_config(scheduler.config)\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n        model = ModelWrapper(unet, scheduler.alphas_cumprod)\n        if scheduler.config.prediction_type == \"v_prediction\":\n            self.k_diffusion_model = CompVisVDenoiser(model)\n        else:\n            self.k_diffusion_model = CompVisDenoiser(model)\n\n    def set_scheduler(self, scheduler_type: str):\n        library = importlib.import_module(\"k_diffusion\")\n        sampling = getattr(library, \"sampling\")\n        self.sampler = getattr(sampling, scheduler_type)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_model_cpu_offload\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            if latents.shape != shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {shape}\")\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        return latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        use_karras_sigmas: Optional[bool] = False,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            use_karras_sigmas (`bool`, *optional*, defaults to `False`):\n                Use karras sigmas. For example, specifying `sample_dpmpp_2m` to `set_scheduler` will be equivalent to\n                `DPM++2M` in stable-diffusion-webui. On top of that, setting this option to True will make it `DPM++2M\n                Karras`.\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = True\n        if guidance_scale <= 1.0:\n            raise ValueError(\"has to use guidance_scale\")\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=prompt_embeds.device)\n\n        # 5. Prepare sigmas\n        if use_karras_sigmas:\n            sigma_min: float = self.k_diffusion_model.sigmas[0].item()\n            sigma_max: float = self.k_diffusion_model.sigmas[-1].item()\n            sigmas = get_sigmas_karras(n=num_inference_steps, sigma_min=sigma_min, sigma_max=sigma_max)\n            sigmas = sigmas.to(device)\n        else:\n            sigmas = self.scheduler.sigmas\n        sigmas = sigmas.to(prompt_embeds.dtype)\n\n        # 6. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n        latents = latents * sigmas[0]\n        self.k_diffusion_model.sigmas = self.k_diffusion_model.sigmas.to(latents.device)\n        self.k_diffusion_model.log_sigmas = self.k_diffusion_model.log_sigmas.to(latents.device)\n\n        # 7. Define model function\n        def model_fn(x, t):\n            latent_model_input = torch.cat([x] * 2)\n            t = torch.cat([t] * 2)\n\n            noise_pred = self.k_diffusion_model(latent_model_input, t, cond=prompt_embeds)\n\n            noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n            noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n            return noise_pred\n\n        # 8. Run k-diffusion solver\n        latents = self.sampler(model_fn, latents, sigmas)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_latent_upscale.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import EulerDiscreteScheduler\nfrom ...utils import is_accelerate_available, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_upscale.preprocess\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 64 for x in (w, h))  # resize to integer multiple of 64\n\n        image = [np.array(i.resize((w, h)))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\nclass StableDiffusionLatentUpscalePipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline to upscale the resolution of Stable Diffusion output images by a factor of 2.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/main/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`EulerDiscreteScheduler`].\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: EulerDiscreteScheduler,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(self, prompt, device, do_classifier_free_guidance, negative_prompt):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `list(int)`):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n        \"\"\"\n        batch_size = len(prompt) if isinstance(prompt, list) else 1\n\n        text_inputs = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_length=True,\n            return_tensors=\"pt\",\n        )\n        text_input_ids = text_inputs.input_ids\n\n        untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n        if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(text_input_ids, untruncated_ids):\n            removed_text = self.tokenizer.batch_decode(untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1])\n            logger.warning(\n                \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n            )\n\n        text_encoder_out = self.text_encoder(\n            text_input_ids.to(device),\n            output_hidden_states=True,\n        )\n        text_embeddings = text_encoder_out.hidden_states[-1]\n        text_pooler_out = text_encoder_out.pooler_output\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = text_input_ids.shape[-1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_length=True,\n                return_tensors=\"pt\",\n            )\n\n            uncond_encoder_out = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                output_hidden_states=True,\n            )\n\n            uncond_embeddings = uncond_encoder_out.hidden_states[-1]\n            uncond_pooler_out = uncond_encoder_out.pooler_output\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            text_embeddings = torch.cat([uncond_embeddings, text_embeddings])\n            text_pooler_out = torch.cat([uncond_pooler_out, text_pooler_out])\n\n        return text_embeddings, text_pooler_out\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def check_inputs(self, prompt, image, callback_steps):\n        if not isinstance(prompt, str) and not isinstance(prompt, list):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if (\n            not isinstance(image, torch.Tensor)\n            and not isinstance(image, PIL.Image.Image)\n            and not isinstance(image, list)\n        ):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or `list` but is {type(image)}\"\n            )\n\n        # verify batch size of prompt and image are same if image is a list or tensor\n        if isinstance(image, list) or isinstance(image, torch.Tensor):\n            if isinstance(prompt, str):\n                batch_size = 1\n            else:\n                batch_size = len(prompt)\n            if isinstance(image, list):\n                image_batch_size = len(image)\n            else:\n                image_batch_size = image.shape[0] if image.ndim == 4 else 1\n            if batch_size != image_batch_size:\n                raise ValueError(\n                    f\"`prompt` has batch size {batch_size} and `image` has batch size {image_batch_size}.\"\n                    \" Please make sure that passed `prompt` matches the batch size of `image`.\"\n                )\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_upscale.StableDiffusionUpscalePipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height, width)\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            if latents.shape != shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {shape}\")\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        image: Union[torch.FloatTensor, PIL.Image.Image, List[PIL.Image.Image]],\n        num_inference_steps: int = 75,\n        guidance_scale: float = 9.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image upscaling.\n            image (`PIL.Image.Image` or List[`PIL.Image.Image`] or `torch.FloatTensor`):\n                `Image`, or tensor representing an image batch which will be upscaled. If it's a tensor, it can be\n                either a latent output from a stable diffusion model, or an image tensor in the range `[-1, 1]`. It\n                will be considered a `latent` if `image.shape[1]` is `4`; otherwise, it will be considered to be an\n                image representation and encoded using this pipeline's `vae` encoder.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n        ```py\n        >>> from diffusers import StableDiffusionLatentUpscalePipeline, StableDiffusionPipeline\n        >>> import torch\n\n\n        >>> pipeline = StableDiffusionPipeline.from_pretrained(\n        ...     \"CompVis/stable-diffusion-v1-4\", torch_dtype=torch.float16\n        ... )\n        >>> pipeline.to(\"cuda\")\n\n        >>> model_id = \"stabilityai/sd-x2-latent-upscaler\"\n        >>> upscaler = StableDiffusionLatentUpscalePipeline.from_pretrained(model_id, torch_dtype=torch.float16)\n        >>> upscaler.to(\"cuda\")\n\n        >>> prompt = \"a photo of an astronaut high resolution, unreal engine, ultra realistic\"\n        >>> generator = torch.manual_seed(33)\n\n        >>> low_res_latents = pipeline(prompt, generator=generator, output_type=\"latent\").images\n\n        >>> with torch.no_grad():\n        ...     image = pipeline.decode_latents(low_res_latents)\n        >>> image = pipeline.numpy_to_pil(image)[0]\n\n        >>> image.save(\"../images/a1.png\")\n\n        >>> upscaled_image = upscaler(\n        ...     prompt=prompt,\n        ...     image=low_res_latents,\n        ...     num_inference_steps=20,\n        ...     guidance_scale=0,\n        ...     generator=generator,\n        ... ).images[0]\n\n        >>> upscaled_image.save(\"../images/a2.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # 1. Check inputs\n        self.check_inputs(prompt, image, callback_steps)\n\n        # 2. Define call parameters\n        batch_size = 1 if isinstance(prompt, str) else len(prompt)\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        if guidance_scale == 0:\n            prompt = [\"\"] * batch_size\n\n        # 3. Encode input prompt\n        text_embeddings, text_pooler_out = self._encode_prompt(\n            prompt, device, do_classifier_free_guidance, negative_prompt\n        )\n\n        # 4. Preprocess image\n        image = preprocess(image)\n        image = image.to(dtype=text_embeddings.dtype, device=device)\n        if image.shape[1] == 3:\n            # encode image if not in latent-space yet\n            image = self.vae.encode(image).latent_dist.sample() * self.vae.config.scaling_factor\n\n        # 5. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        batch_multiplier = 2 if do_classifier_free_guidance else 1\n        image = image[None, :] if image.ndim == 3 else image\n        image = torch.cat([image] * batch_multiplier)\n\n        # 5. Add noise to image (set to be 0):\n        # (see below notes from the author):\n        # \"the This step theoretically can make the model work better on out-of-distribution inputs, but mostly just seems to make it match the input less, so it's turned off by default.\"\n        noise_level = torch.tensor([0.0], dtype=torch.float32, device=device)\n        noise_level = torch.cat([noise_level] * image.shape[0])\n        inv_noise_level = (noise_level**2 + 1) ** (-0.5)\n\n        image_cond = F.interpolate(image, scale_factor=2, mode=\"nearest\") * inv_noise_level[:, None, None, None]\n        image_cond = image_cond.to(text_embeddings.dtype)\n\n        noise_level_embed = torch.cat(\n            [\n                torch.ones(text_pooler_out.shape[0], 64, dtype=text_pooler_out.dtype, device=device),\n                torch.zeros(text_pooler_out.shape[0], 64, dtype=text_pooler_out.dtype, device=device),\n            ],\n            dim=1,\n        )\n\n        timestep_condition = torch.cat([noise_level_embed, text_pooler_out], dim=1)\n\n        # 6. Prepare latent variables\n        height, width = image.shape[2:]\n        num_channels_latents = self.vae.config.latent_channels\n        latents = self.prepare_latents(\n            batch_size,\n            num_channels_latents,\n            height * 2,  # 2x upscale\n            width * 2,\n            text_embeddings.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 7. Check that sizes of image and latents match\n        num_channels_image = image.shape[1]\n        if num_channels_latents + num_channels_image != self.unet.config.in_channels:\n            raise ValueError(\n                f\"Incorrect configuration settings! The config of `pipeline.unet`: {self.unet.config} expects\"\n                f\" {self.unet.config.in_channels} but received `num_channels_latents`: {num_channels_latents} +\"\n                f\" `num_channels_image`: {num_channels_image} \"\n                f\" = {num_channels_latents+num_channels_image}. Please verify the config of\"\n                \" `pipeline.unet` or your `image` input.\"\n            )\n\n        # 9. Denoising loop\n        num_warmup_steps = 0\n\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                sigma = self.scheduler.sigmas[i]\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                scaled_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                scaled_model_input = torch.cat([scaled_model_input, image_cond], dim=1)\n                # preconditioning parameter based on  Karras et al. (2022) (table 1)\n                timestep = torch.log(sigma) * 0.25\n\n                noise_pred = self.unet(\n                    scaled_model_input,\n                    timestep,\n                    encoder_hidden_states=text_embeddings,\n                    timestep_cond=timestep_condition,\n                ).sample\n\n                # in original repo, the output contains a variance channel that's not used\n                noise_pred = noise_pred[:, :-1]\n\n                # apply preconditioning, based on table 1 in Karras et al. (2022)\n                inv_sigma = 1 / (sigma**2 + 1)\n                noise_pred = inv_sigma * latent_model_input + self.scheduler.scale_model_input(sigma, t) * noise_pred\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n        else:\n            image = latents\n\n        image = self.image_processor.postprocess(image, output_type=output_type)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_model_editing.py",
    "content": "# Copyright 2023 TIME Authors and The HuggingFace Team. All rights reserved.\"\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport copy\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport torch\nfrom transformers import CLIPFeatureExtractor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import PNDMScheduler\nfrom ...schedulers.scheduling_utils import SchedulerMixin\nfrom ...utils import is_accelerate_available, is_accelerate_version, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nAUGS_CONST = [\"A photo of \", \"An image of \", \"A picture of \"]\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import StableDiffusionModelEditingPipeline\n\n        >>> model_ckpt = \"CompVis/stable-diffusion-v1-4\"\n        >>> pipe = StableDiffusionModelEditingPipeline.from_pretrained(model_ckpt)\n\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> source_prompt = \"A pack of roses\"\n        >>> destination_prompt = \"A pack of blue roses\"\n        >>> pipe.edit_model(source_prompt, destination_prompt)\n\n        >>> prompt = \"A field of roses\"\n        >>> image = pipe(prompt).images[0]\n        ```\n\"\"\"\n\n\nclass StableDiffusionModelEditingPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image model editing using \"Editing Implicit Assumptions in Text-to-Image Diffusion Models\".\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.).\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents.\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPFeatureExtractor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n        with_to_k ([`bool`]):\n            Whether to edit the key projection matrices along wiht the value projection matrices.\n        with_augs ([`list`]):\n            Textual augmentations to apply while editing the text-to-image model. Set to [] for no augmentations.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: SchedulerMixin,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPFeatureExtractor,\n        requires_safety_checker: bool = True,\n        with_to_k: bool = True,\n        with_augs: list = AUGS_CONST,\n    ):\n        super().__init__()\n\n        if isinstance(scheduler, PNDMScheduler):\n            logger.error(\"PNDMScheduler for this pipeline is currently not supported.\")\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n        self.with_to_k = with_to_k\n        self.with_augs = with_augs\n\n        # get cross-attention layers\n        ca_layers = []\n\n        def append_ca(net_):\n            if net_.__class__.__name__ == \"CrossAttention\":\n                ca_layers.append(net_)\n            elif hasattr(net_, \"children\"):\n                for net__ in net_.children():\n                    append_ca(net__)\n\n        # recursively find all cross-attention layers in unet\n        for net in self.unet.named_children():\n            if \"down\" in net[0]:\n                append_ca(net[1])\n            elif \"up\" in net[0]:\n                append_ca(net[1])\n            elif \"mid\" in net[0]:\n                append_ca(net[1])\n\n        # get projection matrices\n        self.ca_clip_layers = [l for l in ca_layers if l.to_v.in_features == 768]\n        self.projection_matrices = [l.to_v for l in self.ca_clip_layers]\n        self.og_matrices = [copy.deepcopy(l.to_v) for l in self.ca_clip_layers]\n        if self.with_to_k:\n            self.projection_matrices = self.projection_matrices + [l.to_k for l in self.ca_clip_layers]\n            self.og_matrices = self.og_matrices + [copy.deepcopy(l.to_k) for l in self.ca_clip_layers]\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def edit_model(\n        self,\n        source_prompt: str,\n        destination_prompt: str,\n        lamb: float = 0.1,\n        restart_params: bool = True,\n    ):\n        r\"\"\"\n        Apply model editing via closed-form solution (see Eq. 5 in the TIME paper https://arxiv.org/abs/2303.08084)\n\n        Args:\n            source_prompt (`str`):\n                The source prompt containing the concept to be edited.\n            destination_prompt (`str`):\n                The destination prompt. Must contain all words from source_prompt with additional ones to specify the\n                target edit.\n            lamb (`float`, *optional*, defaults to 0.1):\n                The lambda parameter specifying the regularization intesity. Smaller values increase the editing power.\n            restart_params (`bool`, *optional*, defaults to True):\n                Restart the model parameters to their pre-trained version before editing. This is done to avoid edit\n                compounding. When it is False, edits accumulate.\n        \"\"\"\n\n        # restart LDM parameters\n        if restart_params:\n            num_ca_clip_layers = len(self.ca_clip_layers)\n            for idx_, l in enumerate(self.ca_clip_layers):\n                l.to_v = copy.deepcopy(self.og_matrices[idx_])\n                self.projection_matrices[idx_] = l.to_v\n                if self.with_to_k:\n                    l.to_k = copy.deepcopy(self.og_matrices[num_ca_clip_layers + idx_])\n                    self.projection_matrices[num_ca_clip_layers + idx_] = l.to_k\n\n        # set up sentences\n        old_texts = [source_prompt]\n        new_texts = [destination_prompt]\n        # add augmentations\n        base = old_texts[0] if old_texts[0][0:1] != \"A\" else \"a\" + old_texts[0][1:]\n        for aug in self.with_augs:\n            old_texts.append(aug + base)\n        base = new_texts[0] if new_texts[0][0:1] != \"A\" else \"a\" + new_texts[0][1:]\n        for aug in self.with_augs:\n            new_texts.append(aug + base)\n\n        # prepare input k* and v*\n        old_embs, new_embs = [], []\n        for old_text, new_text in zip(old_texts, new_texts):\n            text_input = self.tokenizer(\n                [old_text, new_text],\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_embeddings = self.text_encoder(text_input.input_ids.to(self.device))[0]\n            old_emb, new_emb = text_embeddings\n            old_embs.append(old_emb)\n            new_embs.append(new_emb)\n\n        # identify corresponding destinations for each token in old_emb\n        idxs_replaces = []\n        for old_text, new_text in zip(old_texts, new_texts):\n            tokens_a = self.tokenizer(old_text).input_ids\n            tokens_b = self.tokenizer(new_text).input_ids\n            tokens_a = [self.tokenizer.encode(\"a \")[1] if self.tokenizer.decode(t) == \"an\" else t for t in tokens_a]\n            tokens_b = [self.tokenizer.encode(\"a \")[1] if self.tokenizer.decode(t) == \"an\" else t for t in tokens_b]\n            num_orig_tokens = len(tokens_a)\n            idxs_replace = []\n            j = 0\n            for i in range(num_orig_tokens):\n                curr_token = tokens_a[i]\n                while tokens_b[j] != curr_token:\n                    j += 1\n                idxs_replace.append(j)\n                j += 1\n            while j < 77:\n                idxs_replace.append(j)\n                j += 1\n            while len(idxs_replace) < 77:\n                idxs_replace.append(76)\n            idxs_replaces.append(idxs_replace)\n\n        # prepare batch: for each pair of setences, old context and new values\n        contexts, valuess = [], []\n        for old_emb, new_emb, idxs_replace in zip(old_embs, new_embs, idxs_replaces):\n            context = old_emb.detach()\n            values = []\n            with torch.no_grad():\n                for layer in self.projection_matrices:\n                    values.append(layer(new_emb[idxs_replace]).detach())\n            contexts.append(context)\n            valuess.append(values)\n\n        # edit the model\n        for layer_num in range(len(self.projection_matrices)):\n            # mat1 = \\lambda W + \\sum{v k^T}\n            mat1 = lamb * self.projection_matrices[layer_num].weight\n\n            # mat2 = \\lambda I + \\sum{k k^T}\n            mat2 = lamb * torch.eye(\n                self.projection_matrices[layer_num].weight.shape[1],\n                device=self.projection_matrices[layer_num].weight.device,\n            )\n\n            # aggregate sums for mat1, mat2\n            for context, values in zip(contexts, valuess):\n                context_vector = context.reshape(context.shape[0], context.shape[1], 1)\n                context_vector_T = context.reshape(context.shape[0], 1, context.shape[1])\n                value_vector = values[layer_num].reshape(values[layer_num].shape[0], values[layer_num].shape[1], 1)\n                for_mat1 = (value_vector @ context_vector_T).sum(dim=0)\n                for_mat2 = (context_vector @ context_vector_T).sum(dim=0)\n                mat1 += for_mat1\n                mat2 += for_mat2\n\n            # update projection matrix\n            self.projection_matrices[layer_num].weight = torch.nn.Parameter(mat1 @ torch.inverse(mat2))\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_panorama.py",
    "content": "# Copyright 2023 MultiDiffusion Authors and The HuggingFace Team. All rights reserved.\"\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import DDIMScheduler, PNDMScheduler\nfrom ...utils import is_accelerate_available, is_accelerate_version, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import StableDiffusionPanoramaPipeline, DDIMScheduler\n\n        >>> model_ckpt = \"stabilityai/stable-diffusion-2-base\"\n        >>> scheduler = DDIMScheduler.from_pretrained(model_ckpt, subfolder=\"scheduler\")\n        >>> pipe = StableDiffusionPanoramaPipeline.from_pretrained(\n        ...     model_ckpt, scheduler=scheduler, torch_dtype=torch.float16\n        ... )\n\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> prompt = \"a photo of the dolomites\"\n        >>> image = pipe(prompt).images[0]\n        ```\n\"\"\"\n\n\nclass StableDiffusionPanoramaPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using \"MultiDiffusion: Fusing Diffusion Paths for Controlled Image\n    Generation\".\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.).\n\n    To generate panorama-like images, be sure to pass the `width` parameter accordingly when using the pipeline. Our\n    recommendation for the `width` value is 2048. This is the default value of the `width` parameter for this pipeline.\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. The original work\n            on Multi Diffsion used the [`DDIMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: DDIMScheduler,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if isinstance(scheduler, PNDMScheduler):\n            logger.error(\"PNDMScheduler for this pipeline is currently not supported.\")\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    def get_views(self, panorama_height, panorama_width, window_size=64, stride=8):\n        # Here, we define the mappings F_i (see Eq. 7 in the MultiDiffusion paper https://arxiv.org/abs/2302.08113)\n        panorama_height /= 8\n        panorama_width /= 8\n        num_blocks_height = (panorama_height - window_size) // stride + 1\n        num_blocks_width = (panorama_width - window_size) // stride + 1\n        total_num_blocks = int(num_blocks_height * num_blocks_width)\n        views = []\n        for i in range(total_num_blocks):\n            h_start = int((i // num_blocks_width) * stride)\n            h_end = h_start + window_size\n            w_start = int((i % num_blocks_width) * stride)\n            w_end = w_start + window_size\n            views.append((h_start, h_end, w_start, w_end))\n        return views\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = 512,\n        width: Optional[int] = 2048,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to 512:\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to 2048):\n                The width in pixels of the generated image. The width is kept to a high number because the\n                    pipeline is supposed to be used for generating panorama-like images.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Define panorama grid and initialize views for synthesis.\n        views = self.get_views(height, width)\n        count = torch.zeros_like(latents)\n        value = torch.zeros_like(latents)\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        # Each denoising step also includes refinement of the latents with respect to the\n        # views.\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                count.zero_()\n                value.zero_()\n\n                # generate views\n                # Here, we iterate through different spatial crops of the latents and denoise them. These\n                # denoised (latent) crops are then averaged to produce the final latent\n                # for the current timestep via MultiDiffusion. Please see Sec. 4.1 in the\n                # MultiDiffusion paper for more details: https://arxiv.org/abs/2302.08113\n                for h_start, h_end, w_start, w_end in views:\n                    # get the latents corresponding to the current view coordinates\n                    latents_for_view = latents[:, :, h_start:h_end, w_start:w_end]\n\n                    # expand the latents if we are doing classifier free guidance\n                    latent_model_input = (\n                        torch.cat([latents_for_view] * 2) if do_classifier_free_guidance else latents_for_view\n                    )\n                    latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                    # predict the noise residual\n                    noise_pred = self.unet(\n                        latent_model_input,\n                        t,\n                        encoder_hidden_states=prompt_embeds,\n                        cross_attention_kwargs=cross_attention_kwargs,\n                    ).sample\n\n                    # perform guidance\n                    if do_classifier_free_guidance:\n                        noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                        noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                    # compute the previous noisy sample x_t -> x_t-1\n                    latents_view_denoised = self.scheduler.step(\n                        noise_pred, t, latents_for_view, **extra_step_kwargs\n                    ).prev_sample\n                    value[:, :, h_start:h_end, w_start:w_end] += latents_view_denoised\n                    count[:, :, h_start:h_end, w_start:w_end] += 1\n\n                # take the MultiDiffusion step. Eq. 5 in MultiDiffusion paper: https://arxiv.org/abs/2302.08113\n                latents = torch.where(count > 0, value / count, value)\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_pix2pix_zero.py",
    "content": "# Copyright 2023 Pix2Pix Zero Authors and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom dataclasses import dataclass\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.nn.functional as F\nfrom transformers import (\n    BlipForConditionalGeneration,\n    BlipProcessor,\n    CLIPImageProcessor,\n    CLIPTextModel,\n    CLIPTokenizer,\n)\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...models.attention_processor import Attention\nfrom ...schedulers import DDIMScheduler, DDPMScheduler, EulerAncestralDiscreteScheduler, LMSDiscreteScheduler\nfrom ...schedulers.scheduling_ddim_inverse import DDIMInverseScheduler\nfrom ...utils import (\n    PIL_INTERPOLATION,\n    BaseOutput,\n    deprecate,\n    is_accelerate_available,\n    is_accelerate_version,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n@dataclass\nclass Pix2PixInversionPipelineOutput(BaseOutput, TextualInversionLoaderMixin):\n    \"\"\"\n    Output class for Stable Diffusion pipelines.\n\n    Args:\n        latents (`torch.FloatTensor`)\n            inverted latents tensor\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n    \"\"\"\n\n    latents: torch.FloatTensor\n    images: Union[List[PIL.Image.Image], np.ndarray]\n\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import requests\n        >>> import torch\n\n        >>> from diffusers import DDIMScheduler, StableDiffusionPix2PixZeroPipeline\n\n\n        >>> def download(embedding_url, local_filepath):\n        ...     r = requests.get(embedding_url)\n        ...     with open(local_filepath, \"wb\") as f:\n        ...         f.write(r.content)\n\n\n        >>> model_ckpt = \"CompVis/stable-diffusion-v1-4\"\n        >>> pipeline = StableDiffusionPix2PixZeroPipeline.from_pretrained(model_ckpt, torch_dtype=torch.float16)\n        >>> pipeline.scheduler = DDIMScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.to(\"cuda\")\n\n        >>> prompt = \"a high resolution painting of a cat in the style of van gough\"\n        >>> source_emb_url = \"https://hf.co/datasets/sayakpaul/sample-datasets/resolve/main/cat.pt\"\n        >>> target_emb_url = \"https://hf.co/datasets/sayakpaul/sample-datasets/resolve/main/dog.pt\"\n\n        >>> for url in [source_emb_url, target_emb_url]:\n        ...     download(url, url.split(\"/\")[-1])\n\n        >>> src_embeds = torch.load(source_emb_url.split(\"/\")[-1])\n        >>> target_embeds = torch.load(target_emb_url.split(\"/\")[-1])\n        >>> images = pipeline(\n        ...     prompt,\n        ...     source_embeds=src_embeds,\n        ...     target_embeds=target_embeds,\n        ...     num_inference_steps=50,\n        ...     cross_attention_guidance_amount=0.15,\n        ... ).images\n\n        >>> images[0].save(\"edited_image_dog.png\")\n        ```\n\"\"\"\n\nEXAMPLE_INVERT_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from transformers import BlipForConditionalGeneration, BlipProcessor\n        >>> from diffusers import DDIMScheduler, DDIMInverseScheduler, StableDiffusionPix2PixZeroPipeline\n\n        >>> import requests\n        >>> from PIL import Image\n\n        >>> captioner_id = \"Salesforce/blip-image-captioning-base\"\n        >>> processor = BlipProcessor.from_pretrained(captioner_id)\n        >>> model = BlipForConditionalGeneration.from_pretrained(\n        ...     captioner_id, torch_dtype=torch.float16, low_cpu_mem_usage=True\n        ... )\n\n        >>> sd_model_ckpt = \"CompVis/stable-diffusion-v1-4\"\n        >>> pipeline = StableDiffusionPix2PixZeroPipeline.from_pretrained(\n        ...     sd_model_ckpt,\n        ...     caption_generator=model,\n        ...     caption_processor=processor,\n        ...     torch_dtype=torch.float16,\n        ...     safety_checker=None,\n        ... )\n\n        >>> pipeline.scheduler = DDIMScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.inverse_scheduler = DDIMInverseScheduler.from_config(pipeline.scheduler.config)\n        >>> pipeline.enable_model_cpu_offload()\n\n        >>> img_url = \"https://github.com/pix2pixzero/pix2pix-zero/raw/main/assets/test_images/cats/cat_6.png\"\n\n        >>> raw_image = Image.open(requests.get(img_url, stream=True).raw).convert(\"RGB\").resize((512, 512))\n        >>> # generate caption\n        >>> caption = pipeline.generate_caption(raw_image)\n\n        >>> # \"a photography of a cat with flowers and dai dai daie - daie - daie kasaii\"\n        >>> inv_latents = pipeline.invert(caption, image=raw_image).latents\n        >>> # we need to generate source and target embeds\n\n        >>> source_prompts = [\"a cat sitting on the street\", \"a cat playing in the field\", \"a face of a cat\"]\n\n        >>> target_prompts = [\"a dog sitting on the street\", \"a dog playing in the field\", \"a face of a dog\"]\n\n        >>> source_embeds = pipeline.get_embeds(source_prompts)\n        >>> target_embeds = pipeline.get_embeds(target_prompts)\n        >>> # the latents can then be used to edit a real image\n        >>> # when using Stable Diffusion 2 or other models that use v-prediction\n        >>> # set `cross_attention_guidance_amount` to 0.01 or less to avoid input latent gradient explosion\n\n        >>> image = pipeline(\n        ...     caption,\n        ...     source_embeds=source_embeds,\n        ...     target_embeds=target_embeds,\n        ...     num_inference_steps=50,\n        ...     cross_attention_guidance_amount=0.15,\n        ...     generator=generator,\n        ...     latents=inv_latents,\n        ...     negative_prompt=caption,\n        ... ).images[0]\n        >>> image.save(\"edited_image.png\")\n        ```\n\"\"\"\n\n\n# Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_img2img.preprocess\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n\n        image = [np.array(i.resize((w, h), resample=PIL_INTERPOLATION[\"lanczos\"]))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\ndef prepare_unet(unet: UNet2DConditionModel):\n    \"\"\"Modifies the UNet (`unet`) to perform Pix2Pix Zero optimizations.\"\"\"\n    pix2pix_zero_attn_procs = {}\n    for name in unet.attn_processors.keys():\n        module_name = name.replace(\".processor\", \"\")\n        module = unet.get_submodule(module_name)\n        if \"attn2\" in name:\n            pix2pix_zero_attn_procs[name] = Pix2PixZeroAttnProcessor(is_pix2pix_zero=True)\n            module.requires_grad_(True)\n        else:\n            pix2pix_zero_attn_procs[name] = Pix2PixZeroAttnProcessor(is_pix2pix_zero=False)\n            module.requires_grad_(False)\n\n    unet.set_attn_processor(pix2pix_zero_attn_procs)\n    return unet\n\n\nclass Pix2PixZeroL2Loss:\n    def __init__(self):\n        self.loss = 0.0\n\n    def compute_loss(self, predictions, targets):\n        self.loss += ((predictions - targets) ** 2).sum((1, 2)).mean(0)\n\n\nclass Pix2PixZeroAttnProcessor:\n    \"\"\"An attention processor class to store the attention weights.\n    In Pix2Pix Zero, it happens during computations in the cross-attention blocks.\"\"\"\n\n    def __init__(self, is_pix2pix_zero=False):\n        self.is_pix2pix_zero = is_pix2pix_zero\n        if self.is_pix2pix_zero:\n            self.reference_cross_attn_map = {}\n\n    def __call__(\n        self,\n        attn: Attention,\n        hidden_states,\n        encoder_hidden_states=None,\n        attention_mask=None,\n        timestep=None,\n        loss=None,\n    ):\n        batch_size, sequence_length, _ = hidden_states.shape\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n        query = attn.to_q(hidden_states)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n\n        query = attn.head_to_batch_dim(query)\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        if self.is_pix2pix_zero and timestep is not None:\n            # new bookkeeping to save the attention weights.\n            if loss is None:\n                self.reference_cross_attn_map[timestep.item()] = attention_probs.detach().cpu()\n            # compute loss\n            elif loss is not None:\n                prev_attn_probs = self.reference_cross_attn_map.pop(timestep.item())\n                loss.compute_loss(attention_probs, prev_attn_probs.to(attention_probs.device))\n\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        return hidden_states\n\n\nclass StableDiffusionPix2PixZeroPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for pixel-levl image editing using Pix2Pix Zero. Based on Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], [`EulerAncestralDiscreteScheduler`], or [`DDPMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n        requires_safety_checker (bool):\n            Whether the pipeline requires a safety checker. We recommend setting it to True if you're using the\n            pipeline publicly.\n    \"\"\"\n    _optional_components = [\n        \"safety_checker\",\n        \"feature_extractor\",\n        \"caption_generator\",\n        \"caption_processor\",\n        \"inverse_scheduler\",\n    ]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: Union[DDPMScheduler, DDIMScheduler, EulerAncestralDiscreteScheduler, LMSDiscreteScheduler],\n        feature_extractor: CLIPImageProcessor,\n        safety_checker: StableDiffusionSafetyChecker,\n        inverse_scheduler: DDIMInverseScheduler,\n        caption_generator: BlipForConditionalGeneration,\n        caption_processor: BlipProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n            caption_processor=caption_processor,\n            caption_generator=caption_generator,\n            inverse_scheduler=inverse_scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        hook = None\n        for cpu_offloaded_model in [self.vae, self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        if self.safety_checker is not None:\n            _, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        source_embeds,\n        target_embeds,\n        callback_steps,\n        prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n        if source_embeds is None and target_embeds is None:\n            raise ValueError(\"`source_embeds` and `target_embeds` cannot be undefined.\")\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n    #  Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def generate_caption(self, images):\n        \"\"\"Generates caption for a given image.\"\"\"\n        text = \"a photography of\"\n\n        prev_device = self.caption_generator.device\n\n        device = self._execution_device\n        inputs = self.caption_processor(images, text, return_tensors=\"pt\").to(\n            device=device, dtype=self.caption_generator.dtype\n        )\n        self.caption_generator.to(device)\n        outputs = self.caption_generator.generate(**inputs, max_new_tokens=128)\n\n        # offload caption generator\n        self.caption_generator.to(prev_device)\n\n        caption = self.caption_processor.batch_decode(outputs, skip_special_tokens=True)[0]\n        return caption\n\n    def construct_direction(self, embs_source: torch.Tensor, embs_target: torch.Tensor):\n        \"\"\"Constructs the edit direction to steer the image generation process semantically.\"\"\"\n        return (embs_target.mean(0) - embs_source.mean(0)).unsqueeze(0)\n\n    @torch.no_grad()\n    def get_embeds(self, prompt: List[str], batch_size: int = 16) -> torch.FloatTensor:\n        num_prompts = len(prompt)\n        embeds = []\n        for i in range(0, num_prompts, batch_size):\n            prompt_slice = prompt[i : i + batch_size]\n\n            input_ids = self.tokenizer(\n                prompt_slice,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            ).input_ids\n\n            input_ids = input_ids.to(self.text_encoder.device)\n            embeds.append(self.text_encoder(input_ids)[0])\n\n        return torch.cat(embeds, dim=0).mean(0)[None]\n\n    def prepare_image_latents(self, image, batch_size, dtype, device, generator=None):\n        if not isinstance(image, (torch.Tensor, PIL.Image.Image, list)):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(image)}\"\n            )\n\n        image = image.to(device=device, dtype=dtype)\n\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if isinstance(generator, list):\n            latents = [self.vae.encode(image[i : i + 1]).latent_dist.sample(generator[i]) for i in range(batch_size)]\n            latents = torch.cat(latents, dim=0)\n        else:\n            latents = self.vae.encode(image).latent_dist.sample(generator)\n\n        latents = self.vae.config.scaling_factor * latents\n\n        if batch_size != latents.shape[0]:\n            if batch_size % latents.shape[0] == 0:\n                # expand image_latents for batch_size\n                deprecation_message = (\n                    f\"You have passed {batch_size} text prompts (`prompt`), but only {latents.shape[0]} initial\"\n                    \" images (`image`). Initial images are now duplicating to match the number of text prompts. Note\"\n                    \" that this behavior is deprecated and will be removed in a version 1.0.0. Please make sure to update\"\n                    \" your script to pass as many initial images as text prompts to suppress this warning.\"\n                )\n                deprecate(\"len(prompt) != len(image)\", \"1.0.0\", deprecation_message, standard_warn=False)\n                additional_latents_per_image = batch_size // latents.shape[0]\n                latents = torch.cat([latents] * additional_latents_per_image, dim=0)\n            else:\n                raise ValueError(\n                    f\"Cannot duplicate `image` of batch size {latents.shape[0]} to {batch_size} text prompts.\"\n                )\n        else:\n            latents = torch.cat([latents], dim=0)\n\n        return latents\n\n    def get_epsilon(self, model_output: torch.Tensor, sample: torch.Tensor, timestep: int):\n        pred_type = self.inverse_scheduler.config.prediction_type\n        alpha_prod_t = self.inverse_scheduler.alphas_cumprod[timestep]\n\n        beta_prod_t = 1 - alpha_prod_t\n\n        if pred_type == \"epsilon\":\n            return model_output\n        elif pred_type == \"sample\":\n            return (sample - alpha_prod_t ** (0.5) * model_output) / beta_prod_t ** (0.5)\n        elif pred_type == \"v_prediction\":\n            return (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        else:\n            raise ValueError(\n                f\"prediction_type given as {pred_type} must be one of `epsilon`, `sample`, or `v_prediction`\"\n            )\n\n    def auto_corr_loss(self, hidden_states, generator=None):\n        reg_loss = 0.0\n        for i in range(hidden_states.shape[0]):\n            for j in range(hidden_states.shape[1]):\n                noise = hidden_states[i : i + 1, j : j + 1, :, :]\n                while True:\n                    roll_amount = torch.randint(noise.shape[2] // 2, (1,), generator=generator).item()\n                    reg_loss += (noise * torch.roll(noise, shifts=roll_amount, dims=2)).mean() ** 2\n                    reg_loss += (noise * torch.roll(noise, shifts=roll_amount, dims=3)).mean() ** 2\n\n                    if noise.shape[2] <= 8:\n                        break\n                    noise = F.avg_pool2d(noise, kernel_size=2)\n        return reg_loss\n\n    def kl_divergence(self, hidden_states):\n        mean = hidden_states.mean()\n        var = hidden_states.var()\n        return var + mean**2 - 1 - torch.log(var + 1e-7)\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Optional[Union[str, List[str]]] = None,\n        image: Optional[Union[torch.FloatTensor, PIL.Image.Image]] = None,\n        source_embeds: torch.Tensor = None,\n        target_embeds: torch.Tensor = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        cross_attention_guidance_amount: float = 0.1,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            source_embeds (`torch.Tensor`):\n                Source concept embeddings. Generation of the embeddings as per the [original\n                paper](https://arxiv.org/abs/2302.03027). Used in discovering the edit direction.\n            target_embeds (`torch.Tensor`):\n                Target concept embeddings. Generation of the embeddings as per the [original\n                paper](https://arxiv.org/abs/2302.03027). Used in discovering the edit direction.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            cross_attention_guidance_amount (`float`, defaults to 0.1):\n                Amount of guidance needed from the reference cross-attention maps.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Define the spatial resolutions.\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt,\n            image,\n            source_embeds,\n            target_embeds,\n            callback_steps,\n            prompt_embeds,\n        )\n\n        # 3. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n        if cross_attention_kwargs is None:\n            cross_attention_kwargs = {}\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Generate the inverted noise from the input image or any other image\n        # generated from the input prompt.\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n        latents_init = latents.clone()\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Rejig the UNet so that we can obtain the cross-attenion maps and\n        # use them for guiding the subsequent image generation.\n        self.unet = prepare_unet(self.unet)\n\n        # 7. Denoising loop where we obtain the cross-attention maps.\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs={\"timestep\": t},\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # 8. Compute the edit directions.\n        edit_direction = self.construct_direction(source_embeds, target_embeds).to(prompt_embeds.device)\n\n        # 9. Edit the prompt embeddings as per the edit directions discovered.\n        prompt_embeds_edit = prompt_embeds.clone()\n        prompt_embeds_edit[1:2] += edit_direction\n\n        # 10. Second denoising loop to generate the edited image.\n        latents = latents_init\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # we want to learn the latent such that it steers the generation\n                # process towards the edited direction, so make the make initial\n                # noise learnable\n                x_in = latent_model_input.detach().clone()\n                x_in.requires_grad = True\n\n                # optimizer\n                opt = torch.optim.SGD([x_in], lr=cross_attention_guidance_amount)\n\n                with torch.enable_grad():\n                    # initialize loss\n                    loss = Pix2PixZeroL2Loss()\n\n                    # predict the noise residual\n                    noise_pred = self.unet(\n                        x_in,\n                        t,\n                        encoder_hidden_states=prompt_embeds_edit.detach(),\n                        cross_attention_kwargs={\"timestep\": t, \"loss\": loss},\n                    ).sample\n\n                    loss.loss.backward(retain_graph=False)\n                    opt.step()\n\n                # recompute the noise\n                noise_pred = self.unet(\n                    x_in.detach(),\n                    t,\n                    encoder_hidden_states=prompt_embeds_edit,\n                    cross_attention_kwargs={\"timestep\": None},\n                ).sample\n\n                latents = x_in.detach().chunk(2)[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_INVERT_DOC_STRING)\n    def invert(\n        self,\n        prompt: Optional[str] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 1,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        cross_attention_guidance_amount: float = 0.1,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        lambda_auto_corr: float = 20.0,\n        lambda_kl: float = 20.0,\n        num_reg_steps: int = 5,\n        num_auto_corr_rolls: int = 5,\n    ):\n        r\"\"\"\n        Function used to generate inverted latents given a prompt and image.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`PIL.Image.Image`, *optional*):\n                `Image`, or tensor representing an image batch which will be used for conditioning.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 1):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            cross_attention_guidance_amount (`float`, defaults to 0.1):\n                Amount of guidance needed from the reference cross-attention maps.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            lambda_auto_corr (`float`, *optional*, defaults to 20.0):\n                Lambda parameter to control auto correction\n            lambda_kl (`float`, *optional*, defaults to 20.0):\n                Lambda parameter to control Kullback–Leibler divergence output\n            num_reg_steps (`int`, *optional*, defaults to 5):\n                Number of regularization loss steps\n            num_auto_corr_rolls (`int`, *optional*, defaults to 5):\n                Number of auto correction roll steps\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.pipeline_stable_diffusion_pix2pix_zero.Pix2PixInversionPipelineOutput`] or\n            `tuple`:\n            [`~pipelines.stable_diffusion.pipeline_stable_diffusion_pix2pix_zero.Pix2PixInversionPipelineOutput`] if\n            `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is the inverted\n            latents tensor and then second is the corresponding decoded image.\n        \"\"\"\n        # 1. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n        if cross_attention_kwargs is None:\n            cross_attention_kwargs = {}\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Preprocess image\n        image = preprocess(image)\n\n        # 4. Prepare latent variables\n        latents = self.prepare_image_latents(image, batch_size, self.vae.dtype, device, generator)\n\n        # 5. Encode input prompt\n        num_images_per_prompt = 1\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            prompt_embeds=prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.inverse_scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.inverse_scheduler.timesteps\n\n        # 6. Rejig the UNet so that we can obtain the cross-attenion maps and\n        # use them for guiding the subsequent image generation.\n        self.unet = prepare_unet(self.unet)\n\n        # 7. Denoising loop where we obtain the cross-attention maps.\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.inverse_scheduler.order\n        with self.progress_bar(total=num_inference_steps - 1) as progress_bar:\n            for i, t in enumerate(timesteps[:-1]):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.inverse_scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs={\"timestep\": t},\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # regularization of the noise prediction\n                with torch.enable_grad():\n                    for _ in range(num_reg_steps):\n                        if lambda_auto_corr > 0:\n                            for _ in range(num_auto_corr_rolls):\n                                var = torch.autograd.Variable(noise_pred.detach().clone(), requires_grad=True)\n\n                                # Derive epsilon from model output before regularizing to IID standard normal\n                                var_epsilon = self.get_epsilon(var, latent_model_input.detach(), t)\n\n                                l_ac = self.auto_corr_loss(var_epsilon, generator=generator)\n                                l_ac.backward()\n\n                                grad = var.grad.detach() / num_auto_corr_rolls\n                                noise_pred = noise_pred - lambda_auto_corr * grad\n\n                        if lambda_kl > 0:\n                            var = torch.autograd.Variable(noise_pred.detach().clone(), requires_grad=True)\n\n                            # Derive epsilon from model output before regularizing to IID standard normal\n                            var_epsilon = self.get_epsilon(var, latent_model_input.detach(), t)\n\n                            l_kld = self.kl_divergence(var_epsilon)\n                            l_kld.backward()\n\n                            grad = var.grad.detach()\n                            noise_pred = noise_pred - lambda_kl * grad\n\n                        noise_pred = noise_pred.detach()\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.inverse_scheduler.step(noise_pred, t, latents).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or (\n                    (i + 1) > num_warmup_steps and (i + 1) % self.inverse_scheduler.order == 0\n                ):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        inverted_latents = latents.detach().clone()\n\n        # 8. Post-processing\n        image = self.decode_latents(latents.detach())\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        # 9. Convert to PIL.\n        if output_type == \"pil\":\n            image = self.image_processor.numpy_to_pil(image)\n\n        if not return_dict:\n            return (inverted_latents, image)\n\n        return Pix2PixInversionPipelineOutput(latents=inverted_latents, images=image)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_sag.py",
    "content": "# Copyright 2023 Susung Hong and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport torch\nimport torch.nn.functional as F\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_available, is_accelerate_version, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\nfrom .safety_checker import StableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import StableDiffusionSAGPipeline\n\n        >>> pipe = StableDiffusionSAGPipeline.from_pretrained(\n        ...     \"runwayml/stable-diffusion-v1-5\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> prompt = \"a photo of an astronaut riding a horse on mars\"\n        >>> image = pipe(prompt, sag_scale=0.75).images[0]\n        ```\n\"\"\"\n\n\n# processes and stores attention probabilities\nclass CrossAttnStoreProcessor:\n    def __init__(self):\n        self.attention_probs = None\n\n    def __call__(\n        self,\n        attn,\n        hidden_states,\n        encoder_hidden_states=None,\n        attention_mask=None,\n    ):\n        batch_size, sequence_length, _ = hidden_states.shape\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n        query = attn.to_q(hidden_states)\n\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n\n        query = attn.head_to_batch_dim(query)\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        self.attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        hidden_states = torch.bmm(self.attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        return hidden_states\n\n\n# Modified to get self-attention guidance scale in this paper (https://arxiv.org/pdf/2210.00939.pdf) as an input\nclass StableDiffusionSAGPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_sequential_cpu_offload\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        Note that offloading happens on a submodule basis. Memory savings are higher than with\n        `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n        if self.safety_checker is not None:\n            cpu_offload(self.safety_checker, execution_device=device, offload_buffers=True)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is None:\n            has_nsfw_concept = None\n        else:\n            if torch.is_tensor(image):\n                feature_extractor_input = self.image_processor.postprocess(image, output_type=\"pil\")\n            else:\n                feature_extractor_input = self.image_processor.numpy_to_pil(image)\n            safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n        return image, has_nsfw_concept\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        sag_scale: float = 0.75,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            sag_scale (`float`, *optional*, defaults to 0.75):\n                SAG scale as defined in [Improving Sample Quality of Diffusion Models Using Self-Attention Guidance]\n                (https://arxiv.org/abs/2210.00939). `sag_scale` is defined as `s_s` of equation (24) of SAG paper:\n                https://arxiv.org/pdf/2210.00939.pdf. Typically chosen between [0, 1.0] for better quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n        # and `sag_scale` is` `s` of equation (16)\n        # of the self-attentnion guidance paper: https://arxiv.org/pdf/2210.00939.pdf\n        # `sag_scale = 0` means no self-attention guidance\n        do_self_attention_guidance = sag_scale > 0.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        store_processor = CrossAttnStoreProcessor()\n        self.unet.mid_block.attentions[0].transformer_blocks[0].attn1.processor = store_processor\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n\n        map_size = None\n\n        def get_map_size(module, input, output):\n            nonlocal map_size\n            map_size = output[0].shape[-2:]\n\n        with self.unet.mid_block.attentions[0].register_forward_hook(get_map_size):\n            with self.progress_bar(total=num_inference_steps) as progress_bar:\n                for i, t in enumerate(timesteps):\n                    # expand the latents if we are doing classifier free guidance\n                    latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                    latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                    # predict the noise residual\n\n                    noise_pred = self.unet(\n                        latent_model_input,\n                        t,\n                        encoder_hidden_states=prompt_embeds,\n                        cross_attention_kwargs=cross_attention_kwargs,\n                    ).sample\n\n                    # perform guidance\n                    if do_classifier_free_guidance:\n                        noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                        noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                    # perform self-attention guidance with the stored self-attentnion map\n                    if do_self_attention_guidance:\n                        # classifier-free guidance produces two chunks of attention map\n                        # and we only use unconditional one according to equation (25)\n                        # in https://arxiv.org/pdf/2210.00939.pdf\n                        if do_classifier_free_guidance:\n                            # DDIM-like prediction of x0\n                            pred_x0 = self.pred_x0(latents, noise_pred_uncond, t)\n                            # get the stored attention maps\n                            uncond_attn, cond_attn = store_processor.attention_probs.chunk(2)\n                            # self-attention-based degrading of latents\n                            degraded_latents = self.sag_masking(\n                                pred_x0, uncond_attn, map_size, t, self.pred_epsilon(latents, noise_pred_uncond, t)\n                            )\n                            uncond_emb, _ = prompt_embeds.chunk(2)\n                            # forward and give guidance\n                            degraded_pred = self.unet(degraded_latents, t, encoder_hidden_states=uncond_emb).sample\n                            noise_pred += sag_scale * (noise_pred_uncond - degraded_pred)\n                        else:\n                            # DDIM-like prediction of x0\n                            pred_x0 = self.pred_x0(latents, noise_pred, t)\n                            # get the stored attention maps\n                            cond_attn = store_processor.attention_probs\n                            # self-attention-based degrading of latents\n                            degraded_latents = self.sag_masking(\n                                pred_x0, cond_attn, map_size, t, self.pred_epsilon(latents, noise_pred, t)\n                            )\n                            # forward and give guidance\n                            degraded_pred = self.unet(degraded_latents, t, encoder_hidden_states=prompt_embeds).sample\n                            noise_pred += sag_scale * (noise_pred - degraded_pred)\n\n                    # compute the previous noisy sample x_t -> x_t-1\n                    latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                    # call the callback, if provided\n                    if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                        progress_bar.update()\n                        if callback is not None and i % callback_steps == 0:\n                            callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n        else:\n            image = latents\n            has_nsfw_concept = None\n\n        if has_nsfw_concept is None:\n            do_denormalize = [True] * image.shape[0]\n        else:\n            do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept]\n\n        image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize)\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n\n    def sag_masking(self, original_latents, attn_map, map_size, t, eps):\n        # Same masking process as in SAG paper: https://arxiv.org/pdf/2210.00939.pdf\n        bh, hw1, hw2 = attn_map.shape\n        b, latent_channel, latent_h, latent_w = original_latents.shape\n        h = self.unet.config.attention_head_dim\n        if isinstance(h, list):\n            h = h[-1]\n\n        # Produce attention mask\n        attn_map = attn_map.reshape(b, h, hw1, hw2)\n        attn_mask = attn_map.mean(1, keepdim=False).sum(1, keepdim=False) > 1.0\n        attn_mask = (\n            attn_mask.reshape(b, map_size[0], map_size[1])\n            .unsqueeze(1)\n            .repeat(1, latent_channel, 1, 1)\n            .type(attn_map.dtype)\n        )\n        attn_mask = F.interpolate(attn_mask, (latent_h, latent_w))\n\n        # Blur according to the self-attention mask\n        degraded_latents = gaussian_blur_2d(original_latents, kernel_size=9, sigma=1.0)\n        degraded_latents = degraded_latents * attn_mask + original_latents * (1 - attn_mask)\n\n        # Noise it again to match the noise level\n        degraded_latents = self.scheduler.add_noise(degraded_latents, noise=eps, timesteps=t)\n\n        return degraded_latents\n\n    # Modified from diffusers.schedulers.scheduling_ddim.DDIMScheduler.step\n    # Note: there are some schedulers that clip or do not return x_0 (PNDMScheduler, DDIMScheduler, etc.)\n    def pred_x0(self, sample, model_output, timestep):\n        alpha_prod_t = self.scheduler.alphas_cumprod[timestep]\n\n        beta_prod_t = 1 - alpha_prod_t\n        if self.scheduler.config.prediction_type == \"epsilon\":\n            pred_original_sample = (sample - beta_prod_t ** (0.5) * model_output) / alpha_prod_t ** (0.5)\n        elif self.scheduler.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n        elif self.scheduler.config.prediction_type == \"v_prediction\":\n            pred_original_sample = (alpha_prod_t**0.5) * sample - (beta_prod_t**0.5) * model_output\n            # predict V\n            model_output = (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.scheduler.config.prediction_type} must be one of `epsilon`, `sample`,\"\n                \" or `v_prediction`\"\n            )\n\n        return pred_original_sample\n\n    def pred_epsilon(self, sample, model_output, timestep):\n        alpha_prod_t = self.scheduler.alphas_cumprod[timestep]\n\n        beta_prod_t = 1 - alpha_prod_t\n        if self.scheduler.config.prediction_type == \"epsilon\":\n            pred_eps = model_output\n        elif self.scheduler.config.prediction_type == \"sample\":\n            pred_eps = (sample - (alpha_prod_t**0.5) * model_output) / (beta_prod_t**0.5)\n        elif self.scheduler.config.prediction_type == \"v_prediction\":\n            pred_eps = (beta_prod_t**0.5) * sample + (alpha_prod_t**0.5) * model_output\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.scheduler.config.prediction_type} must be one of `epsilon`, `sample`,\"\n                \" or `v_prediction`\"\n            )\n\n        return pred_eps\n\n\n# Gaussian blur\ndef gaussian_blur_2d(img, kernel_size, sigma):\n    ksize_half = (kernel_size - 1) * 0.5\n\n    x = torch.linspace(-ksize_half, ksize_half, steps=kernel_size)\n\n    pdf = torch.exp(-0.5 * (x / sigma).pow(2))\n\n    x_kernel = pdf / pdf.sum()\n    x_kernel = x_kernel.to(device=img.device, dtype=img.dtype)\n\n    kernel2d = torch.mm(x_kernel[:, None], x_kernel[None, :])\n    kernel2d = kernel2d.expand(img.shape[-3], 1, kernel2d.shape[0], kernel2d.shape[1])\n\n    padding = [kernel_size // 2, kernel_size // 2, kernel_size // 2, kernel_size // 2]\n\n    img = F.pad(img, padding, mode=\"reflect\")\n    img = F.conv2d(img, kernel2d, groups=img.shape[-3])\n\n    return img\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_upscale.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...models.attention_processor import AttnProcessor2_0, LoRAXFormersAttnProcessor, XFormersAttnProcessor\nfrom ...schedulers import DDPMScheduler, KarrasDiffusionSchedulers\nfrom ...utils import deprecate, is_accelerate_available, is_accelerate_version, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionPipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\ndef preprocess(image):\n    if isinstance(image, torch.Tensor):\n        return image\n    elif isinstance(image, PIL.Image.Image):\n        image = [image]\n\n    if isinstance(image[0], PIL.Image.Image):\n        w, h = image[0].size\n        w, h = (x - x % 64 for x in (w, h))  # resize to integer multiple of 64\n\n        image = [np.array(i.resize((w, h)))[None, :] for i in image]\n        image = np.concatenate(image, axis=0)\n        image = np.array(image).astype(np.float32) / 255.0\n        image = image.transpose(0, 3, 1, 2)\n        image = 2.0 * image - 1.0\n        image = torch.from_numpy(image)\n    elif isinstance(image[0], torch.Tensor):\n        image = torch.cat(image, dim=0)\n    return image\n\n\nclass StableDiffusionUpscalePipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-guided image super-resolution using Stable Diffusion 2.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        low_res_scheduler ([`SchedulerMixin`]):\n            A scheduler used to add initial noise to the low res conditioning image. It must be an instance of\n            [`DDPMScheduler`].\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n    _optional_components = [\"watermarker\", \"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        low_res_scheduler: DDPMScheduler,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: Optional[Any] = None,\n        feature_extractor: Optional[CLIPImageProcessor] = None,\n        watermarker: Optional[Any] = None,\n        max_noise_level: int = 350,\n    ):\n        super().__init__()\n\n        if hasattr(\n            vae, \"config\"\n        ):  # check if vae has a config attribute `scaling_factor` and if it is set to 0.08333, else set it to 0.08333 and deprecate\n            is_vae_scaling_factor_set_to_0_08333 = (\n                hasattr(vae.config, \"scaling_factor\") and vae.config.scaling_factor == 0.08333\n            )\n            if not is_vae_scaling_factor_set_to_0_08333:\n                deprecation_message = (\n                    \"The configuration file of the vae does not contain `scaling_factor` or it is set to\"\n                    f\" {vae.config.scaling_factor}, which seems highly unlikely. If your checkpoint is a fine-tuned\"\n                    \" version of `stabilityai/stable-diffusion-x4-upscaler` you should change 'scaling_factor' to\"\n                    \" 0.08333 Please make sure to update the config accordingly, as not doing so might lead to\"\n                    \" incorrect results in future versions. If you have downloaded this checkpoint from the Hugging\"\n                    \" Face Hub, it would be very nice if you could open a Pull Request for the `vae/config.json` file\"\n                )\n                deprecate(\"wrong scaling_factor\", \"1.0.0\", deprecation_message, standard_warn=False)\n                vae.register_to_config(scaling_factor=0.08333)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            low_res_scheduler=low_res_scheduler,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            watermarker=watermarker,\n            feature_extractor=feature_extractor,\n        )\n        self.register_to_config(max_noise_level=max_noise_level)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            if cpu_offloaded_model is not None:\n                _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.run_safety_checker\n    def run_safety_checker(self, image, device, dtype):\n        if self.safety_checker is not None:\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, nsfw_detected, watermark_detected = self.safety_checker(\n                images=image,\n                clip_input=safety_checker_input.pixel_values.to(dtype=dtype),\n            )\n        else:\n            nsfw_detected = None\n            watermark_detected = None\n\n            if hasattr(self, \"unet_offload_hook\") and self.unet_offload_hook is not None:\n                self.unet_offload_hook.offload()\n\n        return image, nsfw_detected, watermark_detected\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        noise_level,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        if (\n            not isinstance(image, torch.Tensor)\n            and not isinstance(image, PIL.Image.Image)\n            and not isinstance(image, list)\n        ):\n            raise ValueError(\n                f\"`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or `list` but is {type(image)}\"\n            )\n\n        # verify batch size of prompt and image are same if image is a list or tensor\n        if isinstance(image, list) or isinstance(image, torch.Tensor):\n            if isinstance(prompt, str):\n                batch_size = 1\n            else:\n                batch_size = len(prompt)\n            if isinstance(image, list):\n                image_batch_size = len(image)\n            else:\n                image_batch_size = image.shape[0]\n            if batch_size != image_batch_size:\n                raise ValueError(\n                    f\"`prompt` has batch size {batch_size} and `image` has batch size {image_batch_size}.\"\n                    \" Please make sure that passed `prompt` matches the batch size of `image`.\"\n                )\n\n        # check noise level\n        if noise_level > self.config.max_noise_level:\n            raise ValueError(f\"`noise_level` has to be <= {self.config.max_noise_level} but is {noise_level}\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height, width)\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            if latents.shape != shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {shape}\")\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        image: Union[torch.FloatTensor, PIL.Image.Image, List[PIL.Image.Image]] = None,\n        num_inference_steps: int = 75,\n        guidance_scale: float = 9.0,\n        noise_level: int = 20,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            image (`PIL.Image.Image` or List[`PIL.Image.Image`] or `torch.FloatTensor`):\n                `Image`, or tensor representing an image batch which will be upscaled. *\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds`. instead. Ignored when not using guidance (i.e., ignored if `guidance_scale`\n                is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n        ```py\n        >>> import requests\n        >>> from PIL import Image\n        >>> from io import BytesIO\n        >>> from diffusers import StableDiffusionUpscalePipeline\n        >>> import torch\n\n        >>> # load model and scheduler\n        >>> model_id = \"stabilityai/stable-diffusion-x4-upscaler\"\n        >>> pipeline = StableDiffusionUpscalePipeline.from_pretrained(\n        ...     model_id, revision=\"fp16\", torch_dtype=torch.float16\n        ... )\n        >>> pipeline = pipeline.to(\"cuda\")\n\n        >>> # let's download an  image\n        >>> url = \"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd2-upscale/low_res_cat.png\"\n        >>> response = requests.get(url)\n        >>> low_res_img = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> low_res_img = low_res_img.resize((128, 128))\n        >>> prompt = \"a white cat\"\n\n        >>> upscaled_image = pipeline(prompt=prompt, image=low_res_img).images[0]\n        >>> upscaled_image.save(\"upsampled_cat.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n\n        # 1. Check inputs\n        self.check_inputs(\n            prompt,\n            image,\n            noise_level,\n            callback_steps,\n            negative_prompt,\n            prompt_embeds,\n            negative_prompt_embeds,\n        )\n\n        if image is None:\n            raise ValueError(\"`image` input cannot be undefined.\")\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Preprocess image\n        image = preprocess(image)\n        image = image.to(dtype=prompt_embeds.dtype, device=device)\n\n        # 5. set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Add noise to image\n        noise_level = torch.tensor([noise_level], dtype=torch.long, device=device)\n        noise = randn_tensor(image.shape, generator=generator, device=device, dtype=prompt_embeds.dtype)\n        image = self.low_res_scheduler.add_noise(image, noise, noise_level)\n\n        batch_multiplier = 2 if do_classifier_free_guidance else 1\n        image = torch.cat([image] * batch_multiplier * num_images_per_prompt)\n        noise_level = torch.cat([noise_level] * image.shape[0])\n\n        # 6. Prepare latent variables\n        height, width = image.shape[2:]\n        num_channels_latents = self.vae.config.latent_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 7. Check that sizes of image and latents match\n        num_channels_image = image.shape[1]\n        if num_channels_latents + num_channels_image != self.unet.config.in_channels:\n            raise ValueError(\n                f\"Incorrect configuration settings! The config of `pipeline.unet`: {self.unet.config} expects\"\n                f\" {self.unet.config.in_channels} but received `num_channels_latents`: {num_channels_latents} +\"\n                f\" `num_channels_image`: {num_channels_image} \"\n                f\" = {num_channels_latents+num_channels_image}. Please verify the config of\"\n                \" `pipeline.unet` or your `image` input.\"\n            )\n\n        # 8. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 9. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n\n                # concat latents, mask, masked_image_latents in the channel dimension\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n                latent_model_input = torch.cat([latent_model_input, image], dim=1)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    class_labels=noise_level,\n                    return_dict=False,\n                )[0]\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # 10. Post-processing\n        # make sure the VAE is in float32 mode, as it overflows in float16\n        self.vae.to(dtype=torch.float32)\n\n        use_torch_2_0_or_xformers = self.vae.decoder.mid_block.attentions[0].processor in [\n            AttnProcessor2_0,\n            XFormersAttnProcessor,\n            LoRAXFormersAttnProcessor,\n        ]\n        # if xformers or torch_2_0 is used attention block does not need\n        # to be in float32 which can save lots of memory\n        if not use_torch_2_0_or_xformers:\n            self.vae.post_quant_conv.to(latents.dtype)\n            self.vae.decoder.conv_in.to(latents.dtype)\n            self.vae.decoder.mid_block.to(latents.dtype)\n        else:\n            latents = latents.float()\n\n        # 11. Convert to PIL\n        if output_type == \"pil\":\n            image = self.decode_latents(latents)\n\n            image, has_nsfw_concept, _ = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n            image = self.numpy_to_pil(image)\n\n            # 11. Apply watermark\n            if self.watermarker is not None:\n                image = self.watermarker.apply_watermark(image)\n        elif output_type == \"pt\":\n            latents = 1 / self.vae.config.scaling_factor * latents\n            image = self.vae.decode(latents).sample\n            has_nsfw_concept = None\n        else:\n            image = self.decode_latents(latents)\n            has_nsfw_concept = None\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_unclip.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union\n\nimport torch\nfrom transformers import CLIPTextModel, CLIPTextModelWithProjection, CLIPTokenizer\nfrom transformers.models.clip.modeling_clip import CLIPTextModelOutput\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, PriorTransformer, UNet2DConditionModel\nfrom ...models.embeddings import get_timestep_embedding\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_available, is_accelerate_version, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\nfrom .stable_unclip_image_normalizer import StableUnCLIPImageNormalizer\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import StableUnCLIPPipeline\n\n        >>> pipe = StableUnCLIPPipeline.from_pretrained(\n        ...     \"fusing/stable-unclip-2-1-l\", torch_dtype=torch.float16\n        ... )  # TODO update model path\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> prompt = \"a photo of an astronaut riding a horse on mars\"\n        >>> images = pipe(prompt).images\n        >>> images[0].save(\"astronaut_horse.png\")\n        ```\n\"\"\"\n\n\nclass StableUnCLIPPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    \"\"\"\n    Pipeline for text-to-image generation using stable unCLIP.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        prior_tokenizer ([`CLIPTokenizer`]):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        prior_text_encoder ([`CLIPTextModelWithProjection`]):\n            Frozen text-encoder.\n        prior ([`PriorTransformer`]):\n            The canonincal unCLIP prior to approximate the image embedding from the text embedding.\n        prior_scheduler ([`KarrasDiffusionSchedulers`]):\n            Scheduler used in the prior denoising process.\n        image_normalizer ([`StableUnCLIPImageNormalizer`]):\n            Used to normalize the predicted image embeddings before the noise is applied and un-normalize the image\n            embeddings after the noise has been applied.\n        image_noising_scheduler ([`KarrasDiffusionSchedulers`]):\n            Noise schedule for adding noise to the predicted image embeddings. The amount of noise to add is determined\n            by `noise_level` in `StableUnCLIPPipeline.__call__`.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder.\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`KarrasDiffusionSchedulers`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents.\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n    \"\"\"\n\n    # prior components\n    prior_tokenizer: CLIPTokenizer\n    prior_text_encoder: CLIPTextModelWithProjection\n    prior: PriorTransformer\n    prior_scheduler: KarrasDiffusionSchedulers\n\n    # image noising components\n    image_normalizer: StableUnCLIPImageNormalizer\n    image_noising_scheduler: KarrasDiffusionSchedulers\n\n    # regular denoising components\n    tokenizer: CLIPTokenizer\n    text_encoder: CLIPTextModel\n    unet: UNet2DConditionModel\n    scheduler: KarrasDiffusionSchedulers\n\n    vae: AutoencoderKL\n\n    def __init__(\n        self,\n        # prior components\n        prior_tokenizer: CLIPTokenizer,\n        prior_text_encoder: CLIPTextModelWithProjection,\n        prior: PriorTransformer,\n        prior_scheduler: KarrasDiffusionSchedulers,\n        # image noising components\n        image_normalizer: StableUnCLIPImageNormalizer,\n        image_noising_scheduler: KarrasDiffusionSchedulers,\n        # regular denoising components\n        tokenizer: CLIPTokenizer,\n        text_encoder: CLIPTextModelWithProjection,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        # vae\n        vae: AutoencoderKL,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            prior_tokenizer=prior_tokenizer,\n            prior_text_encoder=prior_text_encoder,\n            prior=prior,\n            prior_scheduler=prior_scheduler,\n            image_normalizer=image_normalizer,\n            image_noising_scheduler=image_noising_scheduler,\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            vae=vae,\n        )\n\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        # TODO: self.prior.post_process_latents and self.image_noiser.{scale,unscale} are not covered by the offload hooks, so they fails if added to the list\n        models = [\n            self.prior_text_encoder,\n            self.text_encoder,\n            self.unet,\n            self.vae,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.prior_text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.unclip.pipeline_unclip.UnCLIPPipeline._encode_prompt with _encode_prompt->_encode_prior_prompt, tokenizer->prior_tokenizer, text_encoder->prior_text_encoder\n    def _encode_prior_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        text_model_output: Optional[Union[CLIPTextModelOutput, Tuple]] = None,\n        text_attention_mask: Optional[torch.Tensor] = None,\n    ):\n        if text_model_output is None:\n            batch_size = len(prompt) if isinstance(prompt, list) else 1\n            # get prompt text embeddings\n            text_inputs = self.prior_tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.prior_tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            text_mask = text_inputs.attention_mask.bool().to(device)\n\n            untruncated_ids = self.prior_tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.prior_tokenizer.batch_decode(\n                    untruncated_ids[:, self.prior_tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.prior_tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n                text_input_ids = text_input_ids[:, : self.prior_tokenizer.model_max_length]\n\n            prior_text_encoder_output = self.prior_text_encoder(text_input_ids.to(device))\n\n            prompt_embeds = prior_text_encoder_output.text_embeds\n            prior_text_encoder_hidden_states = prior_text_encoder_output.last_hidden_state\n\n        else:\n            batch_size = text_model_output[0].shape[0]\n            prompt_embeds, prior_text_encoder_hidden_states = text_model_output[0], text_model_output[1]\n            text_mask = text_attention_mask\n\n        prompt_embeds = prompt_embeds.repeat_interleave(num_images_per_prompt, dim=0)\n        prior_text_encoder_hidden_states = prior_text_encoder_hidden_states.repeat_interleave(\n            num_images_per_prompt, dim=0\n        )\n        text_mask = text_mask.repeat_interleave(num_images_per_prompt, dim=0)\n\n        if do_classifier_free_guidance:\n            uncond_tokens = [\"\"] * batch_size\n\n            uncond_input = self.prior_tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=self.prior_tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            uncond_text_mask = uncond_input.attention_mask.bool().to(device)\n            negative_prompt_embeds_prior_text_encoder_output = self.prior_text_encoder(\n                uncond_input.input_ids.to(device)\n            )\n\n            negative_prompt_embeds = negative_prompt_embeds_prior_text_encoder_output.text_embeds\n            uncond_prior_text_encoder_hidden_states = (\n                negative_prompt_embeds_prior_text_encoder_output.last_hidden_state\n            )\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len)\n\n            seq_len = uncond_prior_text_encoder_hidden_states.shape[1]\n            uncond_prior_text_encoder_hidden_states = uncond_prior_text_encoder_hidden_states.repeat(\n                1, num_images_per_prompt, 1\n            )\n            uncond_prior_text_encoder_hidden_states = uncond_prior_text_encoder_hidden_states.view(\n                batch_size * num_images_per_prompt, seq_len, -1\n            )\n            uncond_text_mask = uncond_text_mask.repeat_interleave(num_images_per_prompt, dim=0)\n\n            # done duplicates\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n            prior_text_encoder_hidden_states = torch.cat(\n                [uncond_prior_text_encoder_hidden_states, prior_text_encoder_hidden_states]\n            )\n\n            text_mask = torch.cat([uncond_text_mask, text_mask])\n\n        return prompt_embeds, prior_text_encoder_hidden_states, text_mask\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs with prepare_extra_step_kwargs->prepare_prior_extra_step_kwargs, scheduler->prior_scheduler\n    def prepare_prior_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the prior_scheduler step, since not all prior_schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other prior_schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.prior_scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the prior_scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.prior_scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        noise_level,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Please make sure to define only one of the two.\"\n            )\n\n        if prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n\n        if prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                \"Provide either `negative_prompt` or `negative_prompt_embeds`. Cannot leave both `negative_prompt` and `negative_prompt_embeds` undefined.\"\n            )\n\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        if noise_level < 0 or noise_level >= self.image_noising_scheduler.config.num_train_timesteps:\n            raise ValueError(\n                f\"`noise_level` must be between 0 and {self.image_noising_scheduler.config.num_train_timesteps - 1}, inclusive.\"\n            )\n\n    # Copied from diffusers.pipelines.unclip.pipeline_unclip.UnCLIPPipeline.prepare_latents\n    def prepare_latents(self, shape, dtype, device, generator, latents, scheduler):\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            if latents.shape != shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {shape}\")\n            latents = latents.to(device)\n\n        latents = latents * scheduler.init_noise_sigma\n        return latents\n\n    def noise_image_embeddings(\n        self,\n        image_embeds: torch.Tensor,\n        noise_level: int,\n        noise: Optional[torch.FloatTensor] = None,\n        generator: Optional[torch.Generator] = None,\n    ):\n        \"\"\"\n        Add noise to the image embeddings. The amount of noise is controlled by a `noise_level` input. A higher\n        `noise_level` increases the variance in the final un-noised images.\n\n        The noise is applied in two ways\n        1. A noise schedule is applied directly to the embeddings\n        2. A vector of sinusoidal time embeddings are appended to the output.\n\n        In both cases, the amount of noise is controlled by the same `noise_level`.\n\n        The embeddings are normalized before the noise is applied and un-normalized after the noise is applied.\n        \"\"\"\n        if noise is None:\n            noise = randn_tensor(\n                image_embeds.shape, generator=generator, device=image_embeds.device, dtype=image_embeds.dtype\n            )\n\n        noise_level = torch.tensor([noise_level] * image_embeds.shape[0], device=image_embeds.device)\n\n        self.image_normalizer.to(image_embeds.device)\n        image_embeds = self.image_normalizer.scale(image_embeds)\n\n        image_embeds = self.image_noising_scheduler.add_noise(image_embeds, timesteps=noise_level, noise=noise)\n\n        image_embeds = self.image_normalizer.unscale(image_embeds)\n\n        noise_level = get_timestep_embedding(\n            timesteps=noise_level, embedding_dim=image_embeds.shape[-1], flip_sin_to_cos=True, downscale_freq_shift=0\n        )\n\n        # `get_timestep_embeddings` does not contain any weights and will always return f32 tensors,\n        # but we might actually be running in fp16. so we need to cast here.\n        # there might be better ways to encapsulate this.\n        noise_level = noise_level.to(image_embeds.dtype)\n\n        image_embeds = torch.cat((image_embeds, noise_level), 1)\n\n        return image_embeds\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        # regular denoising process args\n        prompt: Optional[Union[str, List[str]]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 20,\n        guidance_scale: float = 10.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[torch.Generator] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        noise_level: int = 0,\n        # prior args\n        prior_num_inference_steps: int = 25,\n        prior_guidance_scale: float = 4.0,\n        prior_latents: Optional[torch.FloatTensor] = None,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 20):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 10.0):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            noise_level (`int`, *optional*, defaults to `0`):\n                The amount of noise to add to the image embeddings. A higher `noise_level` increases the variance in\n                the final un-noised images. See `StableUnCLIPPipeline.noise_image_embeddings` for details.\n            prior_num_inference_steps (`int`, *optional*, defaults to 25):\n                The number of denoising steps in the prior denoising process. More denoising steps usually lead to a\n                higher quality image at the expense of slower inference.\n            prior_guidance_scale (`float`, *optional*, defaults to 4.0):\n                Guidance scale for the prior denoising process as defined in [Classifier-Free Diffusion\n                Guidance](https://arxiv.org/abs/2207.12598). `prior_guidance_scale` is defined as `w` of equation 2. of\n                [Imagen Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting\n                `guidance_scale > 1`. Higher guidance scale encourages to generate images that are closely linked to\n                the text `prompt`, usually at the expense of lower image quality.\n            prior_latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                embedding generation in the prior denoising process. Can be used to tweak the same generation with\n                different prompts. If not provided, a latents tensor will ge generated by sampling using the supplied\n                random `generator`.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~ pipeline_utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt=prompt,\n            height=height,\n            width=width,\n            callback_steps=callback_steps,\n            noise_level=noise_level,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        batch_size = batch_size * num_images_per_prompt\n\n        device = self._execution_device\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        prior_do_classifier_free_guidance = prior_guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prior_prompt_embeds, prior_text_encoder_hidden_states, prior_text_mask = self._encode_prior_prompt(\n            prompt=prompt,\n            device=device,\n            num_images_per_prompt=num_images_per_prompt,\n            do_classifier_free_guidance=prior_do_classifier_free_guidance,\n        )\n\n        # 4. Prepare prior timesteps\n        self.prior_scheduler.set_timesteps(prior_num_inference_steps, device=device)\n        prior_timesteps_tensor = self.prior_scheduler.timesteps\n\n        # 5. Prepare prior latent variables\n        embedding_dim = self.prior.config.embedding_dim\n        prior_latents = self.prepare_latents(\n            (batch_size, embedding_dim),\n            prior_prompt_embeds.dtype,\n            device,\n            generator,\n            prior_latents,\n            self.prior_scheduler,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        prior_extra_step_kwargs = self.prepare_prior_extra_step_kwargs(generator, eta)\n\n        # 7. Prior denoising loop\n        for i, t in enumerate(self.progress_bar(prior_timesteps_tensor)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = torch.cat([prior_latents] * 2) if prior_do_classifier_free_guidance else prior_latents\n            latent_model_input = self.prior_scheduler.scale_model_input(latent_model_input, t)\n\n            predicted_image_embedding = self.prior(\n                latent_model_input,\n                timestep=t,\n                proj_embedding=prior_prompt_embeds,\n                encoder_hidden_states=prior_text_encoder_hidden_states,\n                attention_mask=prior_text_mask,\n            ).predicted_image_embedding\n\n            if prior_do_classifier_free_guidance:\n                predicted_image_embedding_uncond, predicted_image_embedding_text = predicted_image_embedding.chunk(2)\n                predicted_image_embedding = predicted_image_embedding_uncond + prior_guidance_scale * (\n                    predicted_image_embedding_text - predicted_image_embedding_uncond\n                )\n\n            prior_latents = self.prior_scheduler.step(\n                predicted_image_embedding,\n                timestep=t,\n                sample=prior_latents,\n                **prior_extra_step_kwargs,\n                return_dict=False,\n            )[0]\n\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, prior_latents)\n\n        prior_latents = self.prior.post_process_latents(prior_latents)\n\n        image_embeds = prior_latents\n\n        # done prior\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 8. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt=prompt,\n            device=device,\n            num_images_per_prompt=num_images_per_prompt,\n            do_classifier_free_guidance=do_classifier_free_guidance,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 9. Prepare image embeddings\n        image_embeds = self.noise_image_embeddings(\n            image_embeds=image_embeds,\n            noise_level=noise_level,\n            generator=generator,\n        )\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = torch.zeros_like(image_embeds)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            image_embeds = torch.cat([negative_prompt_embeds, image_embeds])\n\n        # 10. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 11. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        latents = self.prepare_latents(\n            shape=shape,\n            dtype=prompt_embeds.dtype,\n            device=device,\n            generator=generator,\n            latents=latents,\n            scheduler=self.scheduler,\n        )\n\n        # 12. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 13. Denoising loop\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            # predict the noise residual\n            noise_pred = self.unet(\n                latent_model_input,\n                t,\n                encoder_hidden_states=prompt_embeds,\n                class_labels=image_embeds,\n                cross_attention_kwargs=cross_attention_kwargs,\n                return_dict=False,\n            )[0]\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n        else:\n            image = latents\n\n        image = self.image_processor.postprocess(image, output_type=output_type)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/pipeline_stable_unclip_img2img.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport PIL\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModelWithProjection\n\nfrom diffusers.utils.import_utils import is_accelerate_available\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...models.embeddings import get_timestep_embedding\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_version, logging, randn_tensor, replace_example_docstring\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\nfrom .stable_unclip_image_normalizer import StableUnCLIPImageNormalizer\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import requests\n        >>> import torch\n        >>> from PIL import Image\n        >>> from io import BytesIO\n\n        >>> from diffusers import StableUnCLIPImg2ImgPipeline\n\n        >>> pipe = StableUnCLIPImg2ImgPipeline.from_pretrained(\n        ...     \"fusing/stable-unclip-2-1-l-img2img\", torch_dtype=torch.float16\n        ... )  # TODO update model path\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> url = \"https://raw.githubusercontent.com/CompVis/stable-diffusion/main/assets/stable-samples/img2img/sketch-mountains-input.jpg\"\n\n        >>> response = requests.get(url)\n        >>> init_image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> init_image = init_image.resize((768, 512))\n\n        >>> prompt = \"A fantasy landscape, trending on artstation\"\n\n        >>> images = pipe(prompt, init_image).images\n        >>> images[0].save(\"fantasy_landscape.png\")\n        ```\n\"\"\"\n\n\nclass StableUnCLIPImg2ImgPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    \"\"\"\n    Pipeline for text-guided image to image generation using stable unCLIP.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        feature_extractor ([`CLIPImageProcessor`]):\n            Feature extractor for image pre-processing before being encoded.\n        image_encoder ([`CLIPVisionModelWithProjection`]):\n            CLIP vision model for encoding images.\n        image_normalizer ([`StableUnCLIPImageNormalizer`]):\n            Used to normalize the predicted image embeddings before the noise is applied and un-normalize the image\n            embeddings after the noise has been applied.\n        image_noising_scheduler ([`KarrasDiffusionSchedulers`]):\n            Noise schedule for adding noise to the predicted image embeddings. The amount of noise to add is determined\n            by `noise_level` in `StableUnCLIPPipeline.__call__`.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder.\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`KarrasDiffusionSchedulers`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents.\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n    \"\"\"\n\n    # image encoding components\n    feature_extractor: CLIPImageProcessor\n    image_encoder: CLIPVisionModelWithProjection\n\n    # image noising components\n    image_normalizer: StableUnCLIPImageNormalizer\n    image_noising_scheduler: KarrasDiffusionSchedulers\n\n    # regular denoising components\n    tokenizer: CLIPTokenizer\n    text_encoder: CLIPTextModel\n    unet: UNet2DConditionModel\n    scheduler: KarrasDiffusionSchedulers\n\n    vae: AutoencoderKL\n\n    def __init__(\n        self,\n        # image encoding components\n        feature_extractor: CLIPImageProcessor,\n        image_encoder: CLIPVisionModelWithProjection,\n        # image noising components\n        image_normalizer: StableUnCLIPImageNormalizer,\n        image_noising_scheduler: KarrasDiffusionSchedulers,\n        # regular denoising components\n        tokenizer: CLIPTokenizer,\n        text_encoder: CLIPTextModel,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        # vae\n        vae: AutoencoderKL,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            feature_extractor=feature_extractor,\n            image_encoder=image_encoder,\n            image_normalizer=image_normalizer,\n            image_noising_scheduler=image_noising_scheduler,\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            unet=unet,\n            scheduler=scheduler,\n            vae=vae,\n        )\n\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        # TODO: self.image_normalizer.{scale,unscale} are not covered by the offload hooks, so they fails if added to the list\n        models = [\n            self.image_encoder,\n            self.text_encoder,\n            self.unet,\n            self.vae,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.image_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def _encode_image(\n        self,\n        image,\n        device,\n        batch_size,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        noise_level,\n        generator,\n        image_embeds,\n    ):\n        dtype = next(self.image_encoder.parameters()).dtype\n\n        if isinstance(image, PIL.Image.Image):\n            # the image embedding should repeated so it matches the total batch size of the prompt\n            repeat_by = batch_size\n        else:\n            # assume the image input is already properly batched and just needs to be repeated so\n            # it matches the num_images_per_prompt.\n            #\n            # NOTE(will) this is probably missing a few number of side cases. I.e. batched/non-batched\n            # `image_embeds`. If those happen to be common use cases, let's think harder about\n            # what the expected dimensions of inputs should be and how we handle the encoding.\n            repeat_by = num_images_per_prompt\n\n        if image_embeds is None:\n            if not isinstance(image, torch.Tensor):\n                image = self.feature_extractor(images=image, return_tensors=\"pt\").pixel_values\n\n            image = image.to(device=device, dtype=dtype)\n            image_embeds = self.image_encoder(image).image_embeds\n\n        image_embeds = self.noise_image_embeddings(\n            image_embeds=image_embeds,\n            noise_level=noise_level,\n            generator=generator,\n        )\n\n        # duplicate image embeddings for each generation per prompt, using mps friendly method\n        image_embeds = image_embeds.unsqueeze(1)\n        bs_embed, seq_len, _ = image_embeds.shape\n        image_embeds = image_embeds.repeat(1, repeat_by, 1)\n        image_embeds = image_embeds.view(bs_embed * repeat_by, seq_len, -1)\n        image_embeds = image_embeds.squeeze(1)\n\n        if do_classifier_free_guidance:\n            negative_prompt_embeds = torch.zeros_like(image_embeds)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            image_embeds = torch.cat([negative_prompt_embeds, image_embeds])\n\n        return image_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(\n        self,\n        prompt,\n        image,\n        height,\n        width,\n        callback_steps,\n        noise_level,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n        image_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Please make sure to define only one of the two.\"\n            )\n\n        if prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n\n        if prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                \"Provide either `negative_prompt` or `negative_prompt_embeds`. Cannot leave both `negative_prompt` and `negative_prompt_embeds` undefined.\"\n            )\n\n        if prompt is not None and negative_prompt is not None:\n            if type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n        if noise_level < 0 or noise_level >= self.image_noising_scheduler.config.num_train_timesteps:\n            raise ValueError(\n                f\"`noise_level` must be between 0 and {self.image_noising_scheduler.config.num_train_timesteps - 1}, inclusive.\"\n            )\n\n        if image is not None and image_embeds is not None:\n            raise ValueError(\n                \"Provide either `image` or `image_embeds`. Please make sure to define only one of the two.\"\n            )\n\n        if image is None and image_embeds is None:\n            raise ValueError(\n                \"Provide either `image` or `image_embeds`. Cannot leave both `image` and `image_embeds` undefined.\"\n            )\n\n        if image is not None:\n            if (\n                not isinstance(image, torch.Tensor)\n                and not isinstance(image, PIL.Image.Image)\n                and not isinstance(image, list)\n            ):\n                raise ValueError(\n                    \"`image` has to be of type `torch.FloatTensor` or `PIL.Image.Image` or `List[PIL.Image.Image]` but is\"\n                    f\" {type(image)}\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_unclip.StableUnCLIPPipeline.noise_image_embeddings\n    def noise_image_embeddings(\n        self,\n        image_embeds: torch.Tensor,\n        noise_level: int,\n        noise: Optional[torch.FloatTensor] = None,\n        generator: Optional[torch.Generator] = None,\n    ):\n        \"\"\"\n        Add noise to the image embeddings. The amount of noise is controlled by a `noise_level` input. A higher\n        `noise_level` increases the variance in the final un-noised images.\n\n        The noise is applied in two ways\n        1. A noise schedule is applied directly to the embeddings\n        2. A vector of sinusoidal time embeddings are appended to the output.\n\n        In both cases, the amount of noise is controlled by the same `noise_level`.\n\n        The embeddings are normalized before the noise is applied and un-normalized after the noise is applied.\n        \"\"\"\n        if noise is None:\n            noise = randn_tensor(\n                image_embeds.shape, generator=generator, device=image_embeds.device, dtype=image_embeds.dtype\n            )\n\n        noise_level = torch.tensor([noise_level] * image_embeds.shape[0], device=image_embeds.device)\n\n        self.image_normalizer.to(image_embeds.device)\n        image_embeds = self.image_normalizer.scale(image_embeds)\n\n        image_embeds = self.image_noising_scheduler.add_noise(image_embeds, timesteps=noise_level, noise=noise)\n\n        image_embeds = self.image_normalizer.unscale(image_embeds)\n\n        noise_level = get_timestep_embedding(\n            timesteps=noise_level, embedding_dim=image_embeds.shape[-1], flip_sin_to_cos=True, downscale_freq_shift=0\n        )\n\n        # `get_timestep_embeddings` does not contain any weights and will always return f32 tensors,\n        # but we might actually be running in fp16. so we need to cast here.\n        # there might be better ways to encapsulate this.\n        noise_level = noise_level.to(image_embeds.dtype)\n\n        image_embeds = torch.cat((image_embeds, noise_level), 1)\n\n        return image_embeds\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        image: Union[torch.FloatTensor, PIL.Image.Image] = None,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 20,\n        guidance_scale: float = 10,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[torch.Generator] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        noise_level: int = 0,\n        image_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, either `prompt_embeds` will be\n                used or prompt is initialized to `\"\"`.\n            image (`torch.FloatTensor` or `PIL.Image.Image`):\n                `Image`, or tensor representing an image batch. The image will be encoded to its CLIP embedding which\n                the unet will be conditioned on. Note that the image is _not_ encoded by the vae and then used as the\n                latents in the denoising process such as in the standard stable diffusion text guided image variation\n                process.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 20):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 10.0):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n            noise_level (`int`, *optional*, defaults to `0`):\n                The amount of noise to add to the image embeddings. A higher `noise_level` increases the variance in\n                the final un-noised images. See `StableUnCLIPPipeline.noise_image_embeddings` for details.\n            image_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated CLIP embeddings to condition the unet on. Note that these are not latents to be used in\n                the denoising process. If you want to provide pre-generated latents, pass them to `__call__` as\n                `latents`.\n\n        Examples:\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~ pipeline_utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        if prompt is None and prompt_embeds is None:\n            prompt = len(image) * [\"\"] if isinstance(image, list) else \"\"\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt=prompt,\n            image=image,\n            height=height,\n            width=width,\n            callback_steps=callback_steps,\n            noise_level=noise_level,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n            image_embeds=image_embeds,\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        batch_size = batch_size * num_images_per_prompt\n\n        device = self._execution_device\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt=prompt,\n            device=device,\n            num_images_per_prompt=num_images_per_prompt,\n            do_classifier_free_guidance=do_classifier_free_guidance,\n            negative_prompt=negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Encoder input image\n        noise_level = torch.tensor([noise_level], device=device)\n        image_embeds = self._encode_image(\n            image=image,\n            device=device,\n            batch_size=batch_size,\n            num_images_per_prompt=num_images_per_prompt,\n            do_classifier_free_guidance=do_classifier_free_guidance,\n            noise_level=noise_level,\n            generator=generator,\n            image_embeds=image_embeds,\n        )\n\n        # 5. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 6. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size=batch_size,\n            num_channels_latents=num_channels_latents,\n            height=height,\n            width=width,\n            dtype=prompt_embeds.dtype,\n            device=device,\n            generator=generator,\n            latents=latents,\n        )\n\n        # 7. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 8. Denoising loop\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            # predict the noise residual\n            noise_pred = self.unet(\n                latent_model_input,\n                t,\n                encoder_hidden_states=prompt_embeds,\n                class_labels=image_embeds,\n                cross_attention_kwargs=cross_attention_kwargs,\n                return_dict=False,\n            )[0]\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs, return_dict=False)[0]\n\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        # 9. Post-processing\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n        else:\n            image = latents\n\n        image = self.image_processor.postprocess(image, output_type=output_type)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/safety_checker.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom transformers import CLIPConfig, CLIPVisionModel, PreTrainedModel\n\nfrom ...utils import logging\n\n\nlogger = logging.get_logger(__name__)\n\n\ndef cosine_distance(image_embeds, text_embeds):\n    normalized_image_embeds = nn.functional.normalize(image_embeds)\n    normalized_text_embeds = nn.functional.normalize(text_embeds)\n    return torch.mm(normalized_image_embeds, normalized_text_embeds.t())\n\n\nclass StableDiffusionSafetyChecker(PreTrainedModel):\n    config_class = CLIPConfig\n\n    _no_split_modules = [\"CLIPEncoderLayer\"]\n\n    def __init__(self, config: CLIPConfig):\n        super().__init__(config)\n\n        self.vision_model = CLIPVisionModel(config.vision_config)\n        self.visual_projection = nn.Linear(config.vision_config.hidden_size, config.projection_dim, bias=False)\n\n        self.concept_embeds = nn.Parameter(torch.ones(17, config.projection_dim), requires_grad=False)\n        self.special_care_embeds = nn.Parameter(torch.ones(3, config.projection_dim), requires_grad=False)\n\n        self.concept_embeds_weights = nn.Parameter(torch.ones(17), requires_grad=False)\n        self.special_care_embeds_weights = nn.Parameter(torch.ones(3), requires_grad=False)\n\n    @torch.no_grad()\n    def forward(self, clip_input, images):\n        pooled_output = self.vision_model(clip_input)[1]  # pooled_output\n        image_embeds = self.visual_projection(pooled_output)\n\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        special_cos_dist = cosine_distance(image_embeds, self.special_care_embeds).cpu().float().numpy()\n        cos_dist = cosine_distance(image_embeds, self.concept_embeds).cpu().float().numpy()\n\n        result = []\n        batch_size = image_embeds.shape[0]\n        for i in range(batch_size):\n            result_img = {\"special_scores\": {}, \"special_care\": [], \"concept_scores\": {}, \"bad_concepts\": []}\n\n            # increase this value to create a stronger `nfsw` filter\n            # at the cost of increasing the possibility of filtering benign images\n            adjustment = 0.0\n\n            for concept_idx in range(len(special_cos_dist[0])):\n                concept_cos = special_cos_dist[i][concept_idx]\n                concept_threshold = self.special_care_embeds_weights[concept_idx].item()\n                result_img[\"special_scores\"][concept_idx] = round(concept_cos - concept_threshold + adjustment, 3)\n                if result_img[\"special_scores\"][concept_idx] > 0:\n                    result_img[\"special_care\"].append({concept_idx, result_img[\"special_scores\"][concept_idx]})\n                    adjustment = 0.01\n\n            for concept_idx in range(len(cos_dist[0])):\n                concept_cos = cos_dist[i][concept_idx]\n                concept_threshold = self.concept_embeds_weights[concept_idx].item()\n                result_img[\"concept_scores\"][concept_idx] = round(concept_cos - concept_threshold + adjustment, 3)\n                if result_img[\"concept_scores\"][concept_idx] > 0:\n                    result_img[\"bad_concepts\"].append(concept_idx)\n\n            result.append(result_img)\n\n        has_nsfw_concepts = [len(res[\"bad_concepts\"]) > 0 for res in result]\n\n        for idx, has_nsfw_concept in enumerate(has_nsfw_concepts):\n            if has_nsfw_concept:\n                if torch.is_tensor(images) or torch.is_tensor(images[0]):\n                    images[idx] = torch.zeros_like(images[idx])  # black image\n                else:\n                    images[idx] = np.zeros(images[idx].shape)  # black image\n\n        if any(has_nsfw_concepts):\n            logger.warning(\n                \"Potential NSFW content was detected in one or more images. A black image will be returned instead.\"\n                \" Try again with a different prompt and/or seed.\"\n            )\n\n        return images, has_nsfw_concepts\n\n    @torch.no_grad()\n    def forward_onnx(self, clip_input: torch.FloatTensor, images: torch.FloatTensor):\n        pooled_output = self.vision_model(clip_input)[1]  # pooled_output\n        image_embeds = self.visual_projection(pooled_output)\n\n        special_cos_dist = cosine_distance(image_embeds, self.special_care_embeds)\n        cos_dist = cosine_distance(image_embeds, self.concept_embeds)\n\n        # increase this value to create a stronger `nsfw` filter\n        # at the cost of increasing the possibility of filtering benign images\n        adjustment = 0.0\n\n        special_scores = special_cos_dist - self.special_care_embeds_weights + adjustment\n        # special_scores = special_scores.round(decimals=3)\n        special_care = torch.any(special_scores > 0, dim=1)\n        special_adjustment = special_care * 0.01\n        special_adjustment = special_adjustment.unsqueeze(1).expand(-1, cos_dist.shape[1])\n\n        concept_scores = (cos_dist - self.concept_embeds_weights) + special_adjustment\n        # concept_scores = concept_scores.round(decimals=3)\n        has_nsfw_concepts = torch.any(concept_scores > 0, dim=1)\n\n        images[has_nsfw_concepts] = 0.0  # black image\n\n        return images, has_nsfw_concepts\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/safety_checker_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Optional, Tuple\n\nimport jax\nimport jax.numpy as jnp\nfrom flax import linen as nn\nfrom flax.core.frozen_dict import FrozenDict\nfrom transformers import CLIPConfig, FlaxPreTrainedModel\nfrom transformers.models.clip.modeling_flax_clip import FlaxCLIPVisionModule\n\n\ndef jax_cosine_distance(emb_1, emb_2, eps=1e-12):\n    norm_emb_1 = jnp.divide(emb_1.T, jnp.clip(jnp.linalg.norm(emb_1, axis=1), a_min=eps)).T\n    norm_emb_2 = jnp.divide(emb_2.T, jnp.clip(jnp.linalg.norm(emb_2, axis=1), a_min=eps)).T\n    return jnp.matmul(norm_emb_1, norm_emb_2.T)\n\n\nclass FlaxStableDiffusionSafetyCheckerModule(nn.Module):\n    config: CLIPConfig\n    dtype: jnp.dtype = jnp.float32\n\n    def setup(self):\n        self.vision_model = FlaxCLIPVisionModule(self.config.vision_config)\n        self.visual_projection = nn.Dense(self.config.projection_dim, use_bias=False, dtype=self.dtype)\n\n        self.concept_embeds = self.param(\"concept_embeds\", jax.nn.initializers.ones, (17, self.config.projection_dim))\n        self.special_care_embeds = self.param(\n            \"special_care_embeds\", jax.nn.initializers.ones, (3, self.config.projection_dim)\n        )\n\n        self.concept_embeds_weights = self.param(\"concept_embeds_weights\", jax.nn.initializers.ones, (17,))\n        self.special_care_embeds_weights = self.param(\"special_care_embeds_weights\", jax.nn.initializers.ones, (3,))\n\n    def __call__(self, clip_input):\n        pooled_output = self.vision_model(clip_input)[1]\n        image_embeds = self.visual_projection(pooled_output)\n\n        special_cos_dist = jax_cosine_distance(image_embeds, self.special_care_embeds)\n        cos_dist = jax_cosine_distance(image_embeds, self.concept_embeds)\n\n        # increase this value to create a stronger `nfsw` filter\n        # at the cost of increasing the possibility of filtering benign image inputs\n        adjustment = 0.0\n\n        special_scores = special_cos_dist - self.special_care_embeds_weights[None, :] + adjustment\n        special_scores = jnp.round(special_scores, 3)\n        is_special_care = jnp.any(special_scores > 0, axis=1, keepdims=True)\n        # Use a lower threshold if an image has any special care concept\n        special_adjustment = is_special_care * 0.01\n\n        concept_scores = cos_dist - self.concept_embeds_weights[None, :] + special_adjustment\n        concept_scores = jnp.round(concept_scores, 3)\n        has_nsfw_concepts = jnp.any(concept_scores > 0, axis=1)\n\n        return has_nsfw_concepts\n\n\nclass FlaxStableDiffusionSafetyChecker(FlaxPreTrainedModel):\n    config_class = CLIPConfig\n    main_input_name = \"clip_input\"\n    module_class = FlaxStableDiffusionSafetyCheckerModule\n\n    def __init__(\n        self,\n        config: CLIPConfig,\n        input_shape: Optional[Tuple] = None,\n        seed: int = 0,\n        dtype: jnp.dtype = jnp.float32,\n        _do_init: bool = True,\n        **kwargs,\n    ):\n        if input_shape is None:\n            input_shape = (1, 224, 224, 3)\n        module = self.module_class(config=config, dtype=dtype, **kwargs)\n        super().__init__(config, module, input_shape=input_shape, seed=seed, dtype=dtype, _do_init=_do_init)\n\n    def init_weights(self, rng: jax.random.KeyArray, input_shape: Tuple, params: FrozenDict = None) -> FrozenDict:\n        # init input tensor\n        clip_input = jax.random.normal(rng, input_shape)\n\n        params_rng, dropout_rng = jax.random.split(rng)\n        rngs = {\"params\": params_rng, \"dropout\": dropout_rng}\n\n        random_params = self.module.init(rngs, clip_input)[\"params\"]\n\n        return random_params\n\n    def __call__(\n        self,\n        clip_input,\n        params: dict = None,\n    ):\n        clip_input = jnp.transpose(clip_input, (0, 2, 3, 1))\n\n        return self.module.apply(\n            {\"params\": params or self.params},\n            jnp.array(clip_input, dtype=jnp.float32),\n            rngs={},\n        )\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion/stable_unclip_image_normalizer.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Optional, Union\n\nimport torch\nfrom torch import nn\n\nfrom ...configuration_utils import ConfigMixin, register_to_config\nfrom ...models.modeling_utils import ModelMixin\n\n\nclass StableUnCLIPImageNormalizer(ModelMixin, ConfigMixin):\n    \"\"\"\n    This class is used to hold the mean and standard deviation of the CLIP embedder used in stable unCLIP.\n\n    It is used to normalize the image embeddings before the noise is applied and un-normalize the noised image\n    embeddings.\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        embedding_dim: int = 768,\n    ):\n        super().__init__()\n\n        self.mean = nn.Parameter(torch.zeros(1, embedding_dim))\n        self.std = nn.Parameter(torch.ones(1, embedding_dim))\n\n    def to(\n        self,\n        torch_device: Optional[Union[str, torch.device]] = None,\n        torch_dtype: Optional[torch.dtype] = None,\n    ):\n        self.mean = nn.Parameter(self.mean.to(torch_device).to(torch_dtype))\n        self.std = nn.Parameter(self.std.to(torch_device).to(torch_dtype))\n        return self\n\n    def scale(self, embeds):\n        embeds = (embeds - self.mean) * 1.0 / self.std\n        return embeds\n\n    def unscale(self, embeds):\n        embeds = (embeds * self.std) + self.mean\n        return embeds\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion_safe/__init__.py",
    "content": "from dataclasses import dataclass\nfrom enum import Enum\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL\nfrom PIL import Image\n\nfrom ...utils import BaseOutput, is_torch_available, is_transformers_available\n\n\n@dataclass\nclass SafetyConfig(object):\n    WEAK = {\n        \"sld_warmup_steps\": 15,\n        \"sld_guidance_scale\": 20,\n        \"sld_threshold\": 0.0,\n        \"sld_momentum_scale\": 0.0,\n        \"sld_mom_beta\": 0.0,\n    }\n    MEDIUM = {\n        \"sld_warmup_steps\": 10,\n        \"sld_guidance_scale\": 1000,\n        \"sld_threshold\": 0.01,\n        \"sld_momentum_scale\": 0.3,\n        \"sld_mom_beta\": 0.4,\n    }\n    STRONG = {\n        \"sld_warmup_steps\": 7,\n        \"sld_guidance_scale\": 2000,\n        \"sld_threshold\": 0.025,\n        \"sld_momentum_scale\": 0.5,\n        \"sld_mom_beta\": 0.7,\n    }\n    MAX = {\n        \"sld_warmup_steps\": 0,\n        \"sld_guidance_scale\": 5000,\n        \"sld_threshold\": 1.0,\n        \"sld_momentum_scale\": 0.5,\n        \"sld_mom_beta\": 0.7,\n    }\n\n\n@dataclass\nclass StableDiffusionSafePipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for Safe Stable Diffusion pipelines.\n\n    Args:\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,\n            num_channels)`. PIL images or numpy array present the denoised images of the diffusion pipeline.\n        nsfw_content_detected (`List[bool]`)\n            List of flags denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, or `None` if safety checking could not be performed.\n        images (`List[PIL.Image.Image]` or `np.ndarray`)\n            List of denoised PIL images that were flagged by the safety checker any may contain \"not-safe-for-work\"\n            (nsfw) content, or `None` if no safety check was performed or no images were flagged.\n        applied_safety_concept (`str`)\n            The safety concept that was applied for safety guidance, or `None` if safety guidance was disabled\n    \"\"\"\n\n    images: Union[List[PIL.Image.Image], np.ndarray]\n    nsfw_content_detected: Optional[List[bool]]\n    unsafe_images: Optional[Union[List[PIL.Image.Image], np.ndarray]]\n    applied_safety_concept: Optional[str]\n\n\nif is_transformers_available() and is_torch_available():\n    from .pipeline_stable_diffusion_safe import StableDiffusionPipelineSafe\n    from .safety_checker import SafeStableDiffusionSafetyChecker\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion_safe/pipeline_stable_diffusion_safe.py",
    "content": "import inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport torch\nfrom packaging import version\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom ...configuration_utils import FrozenDict\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import deprecate, is_accelerate_available, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import StableDiffusionSafePipelineOutput\nfrom .safety_checker import SafeStableDiffusionSafetyChecker\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass StableDiffusionPipelineSafe(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-to-image generation using Safe Latent Diffusion.\n\n    The implementation is based on the [`StableDiffusionPipeline`]\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    _optional_components = [\"safety_checker\", \"feature_extractor\"]\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: SafeStableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__()\n        safety_concept: Optional[str] = (\n            \"an image showing hate, harassment, violence, suffering, humiliation, harm, suicide, sexual, nudity,\"\n            \" bodily fluids, blood, obscene gestures, illegal activity, drug use, theft, vandalism, weapons, child\"\n            \" abuse, brutality, cruelty\"\n        )\n\n        if hasattr(scheduler.config, \"steps_offset\") and scheduler.config.steps_offset != 1:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`\"\n                f\" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure \"\n                \"to update the config accordingly as leaving `steps_offset` might led to incorrect results\"\n                \" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,\"\n                \" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`\"\n                \" file\"\n            )\n            deprecate(\"steps_offset!=1\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"steps_offset\"] = 1\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if hasattr(scheduler.config, \"clip_sample\") and scheduler.config.clip_sample is True:\n            deprecation_message = (\n                f\"The configuration file of this scheduler: {scheduler} has not set the configuration `clip_sample`.\"\n                \" `clip_sample` should be set to False in the configuration file. Please make sure to update the\"\n                \" config accordingly as not setting `clip_sample` in the config might lead to incorrect results in\"\n                \" future versions. If you have downloaded this checkpoint from the Hugging Face Hub, it would be very\"\n                \" nice if you could open a Pull request for the `scheduler/scheduler_config.json` file\"\n            )\n            deprecate(\"clip_sample not set\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(scheduler.config)\n            new_config[\"clip_sample\"] = False\n            scheduler._internal_dict = FrozenDict(new_config)\n\n        if safety_checker is None and requires_safety_checker:\n            logger.warning(\n                f\"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure\"\n                \" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered\"\n                \" results in services or applications open to the public. Both the diffusers team and Hugging Face\"\n                \" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling\"\n                \" it only for use-cases that involve analyzing network behavior or auditing its results. For more\"\n                \" information, please have a look at https://github.com/huggingface/diffusers/pull/254 .\"\n            )\n\n        if safety_checker is not None and feature_extractor is None:\n            raise ValueError(\n                \"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety\"\n                \" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead.\"\n            )\n\n        is_unet_version_less_0_9_0 = hasattr(unet.config, \"_diffusers_version\") and version.parse(\n            version.parse(unet.config._diffusers_version).base_version\n        ) < version.parse(\"0.9.0.dev0\")\n        is_unet_sample_size_less_64 = hasattr(unet.config, \"sample_size\") and unet.config.sample_size < 64\n        if is_unet_version_less_0_9_0 and is_unet_sample_size_less_64:\n            deprecation_message = (\n                \"The configuration file of the unet has set the default `sample_size` to smaller than\"\n                \" 64 which seems highly unlikely .If you're checkpoint is a fine-tuned version of any of the\"\n                \" following: \\n- CompVis/stable-diffusion-v1-4 \\n- CompVis/stable-diffusion-v1-3 \\n-\"\n                \" CompVis/stable-diffusion-v1-2 \\n- CompVis/stable-diffusion-v1-1 \\n- runwayml/stable-diffusion-v1-5\"\n                \" \\n- runwayml/stable-diffusion-inpainting \\n you should change 'sample_size' to 64 in the\"\n                \" configuration file. Please make sure to update the config accordingly as leaving `sample_size=32`\"\n                \" in the config might lead to incorrect results in future versions. If you have downloaded this\"\n                \" checkpoint from the Hugging Face Hub, it would be very nice if you could open a Pull request for\"\n                \" the `unet/config.json` file\"\n            )\n            deprecate(\"sample_size<64\", \"1.0.0\", deprecation_message, standard_warn=False)\n            new_config = dict(unet.config)\n            new_config[\"sample_size\"] = 64\n            unet._internal_dict = FrozenDict(new_config)\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n            safety_checker=safety_checker,\n            feature_extractor=feature_extractor,\n        )\n        self._safety_text_concept = safety_concept\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.register_to_config(requires_safety_checker=requires_safety_checker)\n\n    @property\n    def safety_concept(self):\n        r\"\"\"\n        Getter method for the safety concept used with SLD\n\n        Returns:\n            `str`: The text describing the safety concept\n        \"\"\"\n        return self._safety_text_concept\n\n    @safety_concept.setter\n    def safety_concept(self, concept):\n        r\"\"\"\n        Setter method for the safety concept used with SLD\n\n        Args:\n            concept (`str`):\n                The text of the new safety concept\n        \"\"\"\n        self._safety_text_concept = concept\n\n    def enable_sequential_cpu_offload(self):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(\"cuda\")\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae, self.safety_checker]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt,\n        enable_safety_guidance,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n        \"\"\"\n        batch_size = len(prompt) if isinstance(prompt, list) else 1\n\n        text_inputs = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_tensors=\"pt\",\n        )\n        text_input_ids = text_inputs.input_ids\n        untruncated_ids = self.tokenizer(prompt, padding=\"max_length\", return_tensors=\"pt\").input_ids\n\n        if not torch.equal(text_input_ids, untruncated_ids):\n            removed_text = self.tokenizer.batch_decode(untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1])\n            logger.warning(\n                \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n            )\n\n        if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n            attention_mask = text_inputs.attention_mask.to(device)\n        else:\n            attention_mask = None\n\n        prompt_embeds = self.text_encoder(\n            text_input_ids.to(device),\n            attention_mask=attention_mask,\n        )\n        prompt_embeds = prompt_embeds[0]\n\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = text_input_ids.shape[-1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # Encode the safety concept text\n            if enable_safety_guidance:\n                safety_concept_input = self.tokenizer(\n                    [self._safety_text_concept],\n                    padding=\"max_length\",\n                    max_length=max_length,\n                    truncation=True,\n                    return_tensors=\"pt\",\n                )\n                safety_embeddings = self.text_encoder(safety_concept_input.input_ids.to(self.device))[0]\n\n                # duplicate safety embeddings for each generation per prompt, using mps friendly method\n                seq_len = safety_embeddings.shape[1]\n                safety_embeddings = safety_embeddings.repeat(batch_size, num_images_per_prompt, 1)\n                safety_embeddings = safety_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n                # For classifier free guidance + sld, we need to do three forward passes.\n                # Here we concatenate the unconditional and text embeddings into a single batch\n                # to avoid doing three forward passes\n                prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds, safety_embeddings])\n\n            else:\n                # For classifier free guidance, we need to do two forward passes.\n                # Here we concatenate the unconditional and text embeddings into a single batch\n                # to avoid doing two forward passes\n                prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def run_safety_checker(self, image, device, dtype, enable_safety_guidance):\n        if self.safety_checker is not None:\n            images = image.copy()\n            safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors=\"pt\").to(device)\n            image, has_nsfw_concept = self.safety_checker(\n                images=image, clip_input=safety_checker_input.pixel_values.to(dtype)\n            )\n            flagged_images = np.zeros((2, *image.shape[1:]))\n            if any(has_nsfw_concept):\n                logger.warning(\n                    \"Potential NSFW content was detected in one or more images. A black image will be returned\"\n                    \" instead.\"\n                    f\"{'You may look at this images in the `unsafe_images` variable of the output at your own discretion.' if enable_safety_guidance else 'Try again with a different prompt and/or seed.'}\"\n                )\n                for idx, has_nsfw_concept in enumerate(has_nsfw_concept):\n                    if has_nsfw_concept:\n                        flagged_images[idx] = images[idx]\n                        image[idx] = np.zeros(image[idx].shape)  # black image\n        else:\n            has_nsfw_concept = None\n            flagged_images = None\n        return image, has_nsfw_concept, flagged_images\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    def perform_safety_guidance(\n        self,\n        enable_safety_guidance,\n        safety_momentum,\n        noise_guidance,\n        noise_pred_out,\n        i,\n        sld_guidance_scale,\n        sld_warmup_steps,\n        sld_threshold,\n        sld_momentum_scale,\n        sld_mom_beta,\n    ):\n        # Perform SLD guidance\n        if enable_safety_guidance:\n            if safety_momentum is None:\n                safety_momentum = torch.zeros_like(noise_guidance)\n            noise_pred_text, noise_pred_uncond = noise_pred_out[0], noise_pred_out[1]\n            noise_pred_safety_concept = noise_pred_out[2]\n\n            # Equation 6\n            scale = torch.clamp(torch.abs((noise_pred_text - noise_pred_safety_concept)) * sld_guidance_scale, max=1.0)\n\n            # Equation 6\n            safety_concept_scale = torch.where(\n                (noise_pred_text - noise_pred_safety_concept) >= sld_threshold, torch.zeros_like(scale), scale\n            )\n\n            # Equation 4\n            noise_guidance_safety = torch.mul((noise_pred_safety_concept - noise_pred_uncond), safety_concept_scale)\n\n            # Equation 7\n            noise_guidance_safety = noise_guidance_safety + sld_momentum_scale * safety_momentum\n\n            # Equation 8\n            safety_momentum = sld_mom_beta * safety_momentum + (1 - sld_mom_beta) * noise_guidance_safety\n\n            if i >= sld_warmup_steps:  # Warmup\n                # Equation 3\n                noise_guidance = noise_guidance - noise_guidance_safety\n        return noise_guidance, safety_momentum\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        sld_guidance_scale: Optional[float] = 1000,\n        sld_warmup_steps: Optional[int] = 10,\n        sld_threshold: Optional[float] = 0.01,\n        sld_momentum_scale: Optional[float] = 0.3,\n        sld_mom_beta: Optional[float] = 0.4,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            sld_guidance_scale (`float`, *optional*, defaults to 1000):\n                Safe latent guidance as defined in [Safe Latent Diffusion](https://arxiv.org/abs/2211.05105).\n                `sld_guidance_scale` is defined as sS of Eq. 6. If set to be less than 1, safety guidance will be\n                disabled.\n            sld_warmup_steps (`int`, *optional*, defaults to 10):\n                Number of warmup steps for safety guidance. SLD will only be applied for diffusion steps greater than\n                `sld_warmup_steps`. `sld_warmup_steps` is defined as `delta` of [Safe Latent\n                Diffusion](https://arxiv.org/abs/2211.05105).\n            sld_threshold (`float`, *optional*, defaults to 0.01):\n                Threshold that separates the hyperplane between appropriate and inappropriate images. `sld_threshold`\n                is defined as `lamda` of Eq. 5 in [Safe Latent Diffusion](https://arxiv.org/abs/2211.05105).\n            sld_momentum_scale (`float`, *optional*, defaults to 0.3):\n                Scale of the SLD momentum to be added to the safety guidance at each diffusion step. If set to 0.0\n                momentum will be disabled. Momentum is already built up during warmup, i.e. for diffusion steps smaller\n                than `sld_warmup_steps`. `sld_momentum_scale` is defined as `sm` of Eq. 7 in [Safe Latent\n                Diffusion](https://arxiv.org/abs/2211.05105).\n            sld_mom_beta (`float`, *optional*, defaults to 0.4):\n                Defines how safety guidance momentum builds up. `sld_mom_beta` indicates how much of the previous\n                momentum will be kept. Momentum is already built up during warmup, i.e. for diffusion steps smaller\n                than `sld_warmup_steps`. `sld_mom_beta` is defined as `beta m` of Eq. 8 in [Safe Latent\n                Diffusion](https://arxiv.org/abs/2211.05105).\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(prompt, height, width, callback_steps)\n\n        # 2. Define call parameters\n        batch_size = 1 if isinstance(prompt, str) else len(prompt)\n        device = self._execution_device\n\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        enable_safety_guidance = sld_guidance_scale > 1.0 and do_classifier_free_guidance\n        if not enable_safety_guidance:\n            warnings.warn(\"Safety checker disabled!\")\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt, enable_safety_guidance\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs.\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        safety_momentum = None\n\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = (\n                    torch.cat([latents] * (3 if enable_safety_guidance else 2))\n                    if do_classifier_free_guidance\n                    else latents\n                )\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=prompt_embeds).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_out = noise_pred.chunk((3 if enable_safety_guidance else 2))\n                    noise_pred_uncond, noise_pred_text = noise_pred_out[0], noise_pred_out[1]\n\n                    # default classifier free guidance\n                    noise_guidance = noise_pred_text - noise_pred_uncond\n\n                    # Perform SLD guidance\n                    if enable_safety_guidance:\n                        if safety_momentum is None:\n                            safety_momentum = torch.zeros_like(noise_guidance)\n                        noise_pred_safety_concept = noise_pred_out[2]\n\n                        # Equation 6\n                        scale = torch.clamp(\n                            torch.abs((noise_pred_text - noise_pred_safety_concept)) * sld_guidance_scale, max=1.0\n                        )\n\n                        # Equation 6\n                        safety_concept_scale = torch.where(\n                            (noise_pred_text - noise_pred_safety_concept) >= sld_threshold,\n                            torch.zeros_like(scale),\n                            scale,\n                        )\n\n                        # Equation 4\n                        noise_guidance_safety = torch.mul(\n                            (noise_pred_safety_concept - noise_pred_uncond), safety_concept_scale\n                        )\n\n                        # Equation 7\n                        noise_guidance_safety = noise_guidance_safety + sld_momentum_scale * safety_momentum\n\n                        # Equation 8\n                        safety_momentum = sld_mom_beta * safety_momentum + (1 - sld_mom_beta) * noise_guidance_safety\n\n                        if i >= sld_warmup_steps:  # Warmup\n                            # Equation 3\n                            noise_guidance = noise_guidance - noise_guidance_safety\n\n                    noise_pred = noise_pred_uncond + guidance_scale * noise_guidance\n\n                    # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        # 8. Post-processing\n        image = self.decode_latents(latents)\n\n        # 9. Run safety checker\n        image, has_nsfw_concept, flagged_images = self.run_safety_checker(\n            image, device, prompt_embeds.dtype, enable_safety_guidance\n        )\n\n        # 10. Convert to PIL\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n            if flagged_images is not None:\n                flagged_images = self.numpy_to_pil(flagged_images)\n\n        if not return_dict:\n            return (\n                image,\n                has_nsfw_concept,\n                self._safety_text_concept if enable_safety_guidance else None,\n                flagged_images,\n            )\n\n        return StableDiffusionSafePipelineOutput(\n            images=image,\n            nsfw_content_detected=has_nsfw_concept,\n            applied_safety_concept=self._safety_text_concept if enable_safety_guidance else None,\n            unsafe_images=flagged_images,\n        )\n"
  },
  {
    "path": "diffusers/pipelines/stable_diffusion_safe/safety_checker.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\nimport torch.nn as nn\nfrom transformers import CLIPConfig, CLIPVisionModel, PreTrainedModel\n\nfrom ...utils import logging\n\n\nlogger = logging.get_logger(__name__)\n\n\ndef cosine_distance(image_embeds, text_embeds):\n    normalized_image_embeds = nn.functional.normalize(image_embeds)\n    normalized_text_embeds = nn.functional.normalize(text_embeds)\n    return torch.mm(normalized_image_embeds, normalized_text_embeds.t())\n\n\nclass SafeStableDiffusionSafetyChecker(PreTrainedModel):\n    config_class = CLIPConfig\n\n    _no_split_modules = [\"CLIPEncoderLayer\"]\n\n    def __init__(self, config: CLIPConfig):\n        super().__init__(config)\n\n        self.vision_model = CLIPVisionModel(config.vision_config)\n        self.visual_projection = nn.Linear(config.vision_config.hidden_size, config.projection_dim, bias=False)\n\n        self.concept_embeds = nn.Parameter(torch.ones(17, config.projection_dim), requires_grad=False)\n        self.special_care_embeds = nn.Parameter(torch.ones(3, config.projection_dim), requires_grad=False)\n\n        self.concept_embeds_weights = nn.Parameter(torch.ones(17), requires_grad=False)\n        self.special_care_embeds_weights = nn.Parameter(torch.ones(3), requires_grad=False)\n\n    @torch.no_grad()\n    def forward(self, clip_input, images):\n        pooled_output = self.vision_model(clip_input)[1]  # pooled_output\n        image_embeds = self.visual_projection(pooled_output)\n\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        special_cos_dist = cosine_distance(image_embeds, self.special_care_embeds).cpu().float().numpy()\n        cos_dist = cosine_distance(image_embeds, self.concept_embeds).cpu().float().numpy()\n\n        result = []\n        batch_size = image_embeds.shape[0]\n        for i in range(batch_size):\n            result_img = {\"special_scores\": {}, \"special_care\": [], \"concept_scores\": {}, \"bad_concepts\": []}\n\n            # increase this value to create a stronger `nfsw` filter\n            # at the cost of increasing the possibility of filtering benign images\n            adjustment = 0.0\n\n            for concept_idx in range(len(special_cos_dist[0])):\n                concept_cos = special_cos_dist[i][concept_idx]\n                concept_threshold = self.special_care_embeds_weights[concept_idx].item()\n                result_img[\"special_scores\"][concept_idx] = round(concept_cos - concept_threshold + adjustment, 3)\n                if result_img[\"special_scores\"][concept_idx] > 0:\n                    result_img[\"special_care\"].append({concept_idx, result_img[\"special_scores\"][concept_idx]})\n                    adjustment = 0.01\n\n            for concept_idx in range(len(cos_dist[0])):\n                concept_cos = cos_dist[i][concept_idx]\n                concept_threshold = self.concept_embeds_weights[concept_idx].item()\n                result_img[\"concept_scores\"][concept_idx] = round(concept_cos - concept_threshold + adjustment, 3)\n                if result_img[\"concept_scores\"][concept_idx] > 0:\n                    result_img[\"bad_concepts\"].append(concept_idx)\n\n            result.append(result_img)\n\n        has_nsfw_concepts = [len(res[\"bad_concepts\"]) > 0 for res in result]\n\n        return images, has_nsfw_concepts\n\n    @torch.no_grad()\n    def forward_onnx(self, clip_input: torch.FloatTensor, images: torch.FloatTensor):\n        pooled_output = self.vision_model(clip_input)[1]  # pooled_output\n        image_embeds = self.visual_projection(pooled_output)\n\n        special_cos_dist = cosine_distance(image_embeds, self.special_care_embeds)\n        cos_dist = cosine_distance(image_embeds, self.concept_embeds)\n\n        # increase this value to create a stronger `nsfw` filter\n        # at the cost of increasing the possibility of filtering benign images\n        adjustment = 0.0\n\n        special_scores = special_cos_dist - self.special_care_embeds_weights + adjustment\n        # special_scores = special_scores.round(decimals=3)\n        special_care = torch.any(special_scores > 0, dim=1)\n        special_adjustment = special_care * 0.01\n        special_adjustment = special_adjustment.unsqueeze(1).expand(-1, cos_dist.shape[1])\n\n        concept_scores = (cos_dist - self.concept_embeds_weights) + special_adjustment\n        # concept_scores = concept_scores.round(decimals=3)\n        has_nsfw_concepts = torch.any(concept_scores > 0, dim=1)\n\n        return images, has_nsfw_concepts\n"
  },
  {
    "path": "diffusers/pipelines/stochastic_karras_ve/__init__.py",
    "content": "from .pipeline_stochastic_karras_ve import KarrasVePipeline\n"
  },
  {
    "path": "diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\n\nfrom ...models import UNet2DModel\nfrom ...schedulers import KarrasVeScheduler\nfrom ...utils import randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nclass KarrasVePipeline(DiffusionPipeline):\n    r\"\"\"\n    Stochastic sampling from Karras et al. [1] tailored to the Variance-Expanding (VE) models [2]. Use Algorithm 2 and\n    the VE column of Table 1 from [1] for reference.\n\n    [1] Karras, Tero, et al. \"Elucidating the Design Space of Diffusion-Based Generative Models.\"\n    https://arxiv.org/abs/2206.00364 [2] Song, Yang, et al. \"Score-based generative modeling through stochastic\n    differential equations.\" https://arxiv.org/abs/2011.13456\n\n    Parameters:\n        unet ([`UNet2DModel`]): U-Net architecture to denoise the encoded image.\n        scheduler ([`KarrasVeScheduler`]):\n            Scheduler for the diffusion process to be used in combination with `unet` to denoise the encoded image.\n    \"\"\"\n\n    # add type hints for linting\n    unet: UNet2DModel\n    scheduler: KarrasVeScheduler\n\n    def __init__(self, unet: UNet2DModel, scheduler: KarrasVeScheduler):\n        super().__init__()\n        self.register_modules(unet=unet, scheduler=scheduler)\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        batch_size: int = 1,\n        num_inference_steps: int = 50,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        **kwargs,\n    ) -> Union[Tuple, ImagePipelineOutput]:\n        r\"\"\"\n        Args:\n            batch_size (`int`, *optional*, defaults to 1):\n                The number of images to generate.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~pipelines.utils.ImagePipelineOutput`] if `return_dict` is\n            True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n\n        img_size = self.unet.config.sample_size\n        shape = (batch_size, 3, img_size, img_size)\n\n        model = self.unet\n\n        # sample x_0 ~ N(0, sigma_0^2 * I)\n        sample = randn_tensor(shape, generator=generator, device=self.device) * self.scheduler.init_noise_sigma\n\n        self.scheduler.set_timesteps(num_inference_steps)\n\n        for t in self.progress_bar(self.scheduler.timesteps):\n            # here sigma_t == t_i from the paper\n            sigma = self.scheduler.schedule[t]\n            sigma_prev = self.scheduler.schedule[t - 1] if t > 0 else 0\n\n            # 1. Select temporarily increased noise level sigma_hat\n            # 2. Add new noise to move from sample_i to sample_hat\n            sample_hat, sigma_hat = self.scheduler.add_noise_to_input(sample, sigma, generator=generator)\n\n            # 3. Predict the noise residual given the noise magnitude `sigma_hat`\n            # The model inputs and output are adjusted by following eq. (213) in [1].\n            model_output = (sigma_hat / 2) * model((sample_hat + 1) / 2, sigma_hat / 2).sample\n\n            # 4. Evaluate dx/dt at sigma_hat\n            # 5. Take Euler step from sigma to sigma_prev\n            step_output = self.scheduler.step(model_output, sigma_hat, sigma_prev, sample_hat)\n\n            if sigma_prev != 0:\n                # 6. Apply 2nd order correction\n                # The model inputs and output are adjusted by following eq. (213) in [1].\n                model_output = (sigma_prev / 2) * model((step_output.prev_sample + 1) / 2, sigma_prev / 2).sample\n                step_output = self.scheduler.step_correct(\n                    model_output,\n                    sigma_hat,\n                    sigma_prev,\n                    sample_hat,\n                    step_output.prev_sample,\n                    step_output[\"derivative\"],\n                )\n            sample = step_output.prev_sample\n\n        sample = (sample / 2 + 0.5).clamp(0, 1)\n        image = sample.cpu().permute(0, 2, 3, 1).numpy()\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/text_to_video_synthesis/__init__.py",
    "content": "from dataclasses import dataclass\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport torch\n\nfrom ...utils import BaseOutput, OptionalDependencyNotAvailable, is_torch_available, is_transformers_available\n\n\n@dataclass\nclass TextToVideoSDPipelineOutput(BaseOutput):\n    \"\"\"\n    Output class for text to video pipelines.\n\n    Args:\n        frames (`List[np.ndarray]` or `torch.FloatTensor`)\n            List of denoised frames (essentially images) as NumPy arrays of shape `(height, width, num_channels)` or as\n            a `torch` tensor. NumPy array present the denoised images of the diffusion pipeline. The length of the list\n            denotes the video length i.e., the number of frames.\n    \"\"\"\n\n    frames: Union[List[np.ndarray], torch.FloatTensor]\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import *  # noqa F403\nelse:\n    from .pipeline_text_to_video_synth import TextToVideoSDPipeline  # noqa: F401\n    from .pipeline_text_to_video_zero import TextToVideoZeroPipeline\n"
  },
  {
    "path": "diffusers/pipelines/text_to_video_synthesis/pipeline_text_to_video_synth.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import Any, Callable, Dict, List, Optional, Union\n\nimport numpy as np\nimport torch\nfrom transformers import CLIPTextModel, CLIPTokenizer\n\nfrom ...loaders import TextualInversionLoaderMixin\nfrom ...models import AutoencoderKL, UNet3DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import (\n    is_accelerate_available,\n    is_accelerate_version,\n    logging,\n    randn_tensor,\n    replace_example_docstring,\n)\nfrom ..pipeline_utils import DiffusionPipeline\nfrom . import TextToVideoSDPipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nEXAMPLE_DOC_STRING = \"\"\"\n    Examples:\n        ```py\n        >>> import torch\n        >>> from diffusers import TextToVideoSDPipeline\n        >>> from diffusers.utils import export_to_video\n\n        >>> pipe = TextToVideoSDPipeline.from_pretrained(\n        ...     \"damo-vilab/text-to-video-ms-1.7b\", torch_dtype=torch.float16, variant=\"fp16\"\n        ... )\n        >>> pipe.enable_model_cpu_offload()\n\n        >>> prompt = \"Spiderman is surfing\"\n        >>> video_frames = pipe(prompt).frames\n        >>> video_path = export_to_video(video_frames)\n        >>> video_path\n        ```\n\"\"\"\n\n\ndef tensor2vid(video: torch.Tensor, mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) -> List[np.ndarray]:\n    # This code is copied from https://github.com/modelscope/modelscope/blob/1509fdb973e5871f37148a4b5e5964cafd43e64d/modelscope/pipelines/multi_modal/text_to_video_synthesis_pipeline.py#L78\n    # reshape to ncfhw\n    mean = torch.tensor(mean, device=video.device).reshape(1, -1, 1, 1, 1)\n    std = torch.tensor(std, device=video.device).reshape(1, -1, 1, 1, 1)\n    # unnormalize back to [0,1]\n    video = video.mul_(std).add_(mean)\n    video.clamp_(0, 1)\n    # prepare the final outputs\n    i, c, f, h, w = video.shape\n    images = video.permute(2, 3, 0, 4, 1).reshape(\n        f, h, i * w, c\n    )  # 1st (frames, h, batch_size, w, c) 2nd (frames, h, batch_size * w, c)\n    images = images.unbind(dim=0)  # prepare a list of indvidual (consecutive frames)\n    images = [(image.cpu().numpy() * 255).astype(\"uint8\") for image in images]  # f h w c\n    return images\n\n\nclass TextToVideoSDPipeline(DiffusionPipeline, TextualInversionLoaderMixin):\n    r\"\"\"\n    Pipeline for text-to-video generation.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Same as Stable Diffusion 2.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet3DConditionModel`]): Conditional U-Net architecture to denoise the encoded video latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet3DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            vae=vae,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            unet=unet,\n            scheduler=scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_slicing\n    def enable_vae_slicing(self):\n        r\"\"\"\n        Enable sliced VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor in slices to compute decoding in several\n        steps. This is useful to save some memory and allow larger batch sizes.\n        \"\"\"\n        self.vae.enable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_slicing\n    def disable_vae_slicing(self):\n        r\"\"\"\n        Disable sliced VAE decoding. If `enable_vae_slicing` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_slicing()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.enable_vae_tiling\n    def enable_vae_tiling(self):\n        r\"\"\"\n        Enable tiled VAE decoding.\n\n        When this option is enabled, the VAE will split the input tensor into tiles to compute decoding and encoding in\n        several steps. This is useful to save a large amount of memory and to allow the processing of larger images.\n        \"\"\"\n        self.vae.enable_tiling()\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.disable_vae_tiling\n    def disable_vae_tiling(self):\n        r\"\"\"\n        Disable tiled VAE decoding. If `enable_vae_tiling` was previously invoked, this method will go back to\n        computing decoding in one step.\n        \"\"\"\n        self.vae.disable_tiling()\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded\n        to GPU only when their specific submodule has its `forward` method called. Note that offloading happens on a\n        submodule basis. Memory savings are higher than with `enable_model_cpu_offload`, but performance is lower.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.14.0\"):\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"`enable_sequential_cpu_offload` requires `accelerate v0.14.0` or higher\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        for cpu_offloaded_model in [self.unet, self.text_encoder, self.vae]:\n            cpu_offload(cpu_offloaded_model, device)\n\n    def enable_model_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared\n        to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`\n        method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with\n        `enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.\n        \"\"\"\n        if is_accelerate_available() and is_accelerate_version(\">=\", \"0.17.0.dev0\"):\n            from accelerate import cpu_offload_with_hook\n        else:\n            raise ImportError(\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        if self.device.type != \"cpu\":\n            self.to(\"cpu\", silence_dtype_warnings=True)\n            torch.cuda.empty_cache()  # otherwise we don't see the memory savings (but they probably exist)\n\n        hook = None\n        for cpu_offloaded_model in [self.text_encoder, self.unet, self.vae]:\n            _, hook = cpu_offload_with_hook(cpu_offloaded_model, device, prev_module_hook=hook)\n\n        # We'll offload the last model manually.\n        self.final_offload_hook = hook\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.unet, \"_hf_hook\"):\n            return self.device\n        for module in self.unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._encode_prompt\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        negative_prompt=None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n    ):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n             prompt (`str` or `List[str]`, *optional*):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n        \"\"\"\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        if prompt_embeds is None:\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                prompt = self.maybe_convert_prompt(prompt, self.tokenizer)\n\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = text_inputs.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            prompt_embeds = self.text_encoder(\n                text_input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            prompt_embeds = prompt_embeds[0]\n\n        prompt_embeds = prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance and negative_prompt_embeds is None:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif prompt is not None and type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            # textual inversion: procecss multi-vector tokens if necessary\n            if isinstance(self, TextualInversionLoaderMixin):\n                uncond_tokens = self.maybe_convert_prompt(uncond_tokens, self.tokenizer)\n\n            max_length = prompt_embeds.shape[1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = negative_prompt_embeds[0]\n\n        if do_classifier_free_guidance:\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n\n            negative_prompt_embeds = negative_prompt_embeds.to(dtype=self.text_encoder.dtype, device=device)\n\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def decode_latents(self, latents):\n        latents = 1 / self.vae.config.scaling_factor * latents\n\n        batch_size, channels, num_frames, height, width = latents.shape\n        latents = latents.permute(0, 2, 1, 3, 4).reshape(batch_size * num_frames, channels, height, width)\n\n        image = self.vae.decode(latents).sample\n        video = (\n            image[None, :]\n            .reshape(\n                (\n                    batch_size,\n                    num_frames,\n                    -1,\n                )\n                + image.shape[2:]\n            )\n            .permute(0, 2, 1, 3, 4)\n        )\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        video = video.float()\n        return video\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    def prepare_latents(\n        self, batch_size, num_channels_latents, num_frames, height, width, dtype, device, generator, latents=None\n    ):\n        shape = (\n            batch_size,\n            num_channels_latents,\n            num_frames,\n            height // self.vae_scale_factor,\n            width // self.vae_scale_factor,\n        )\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    @replace_example_docstring(EXAMPLE_DOC_STRING)\n    def __call__(\n        self,\n        prompt: Union[str, List[str]] = None,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_frames: int = 16,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 9.0,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        prompt_embeds: Optional[torch.FloatTensor] = None,\n        negative_prompt_embeds: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"np\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the video generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated video.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated video.\n            num_frames (`int`, *optional*, defaults to 16):\n                The number of video frames that are generated. Defaults to 16 frames which at 8 frames per seconds\n                amounts to 2 seconds of video.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality videos at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate videos that are closely linked to the text `prompt`,\n                usually at the expense of lower video quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the video generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for video\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`. Latents should be of shape\n                `(batch_size, num_channel, num_frames, height, width)`.\n            prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not\n                provided, text embeddings will be generated from `prompt` input argument.\n            negative_prompt_embeds (`torch.FloatTensor`, *optional*):\n                Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt\n                weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input\n                argument.\n            output_type (`str`, *optional*, defaults to `\"np\"`):\n                The output format of the generate video. Choose between `torch.FloatTensor` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.TextToVideoSDPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Examples:\n\n        Returns:\n            [`~pipelines.stable_diffusion.TextToVideoSDPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.TextToVideoSDPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated frames.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        num_images_per_prompt = 1\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(\n            prompt, height, width, callback_steps, negative_prompt, prompt_embeds, negative_prompt_embeds\n        )\n\n        # 2. Define call parameters\n        if prompt is not None and isinstance(prompt, str):\n            batch_size = 1\n        elif prompt is not None and isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            batch_size = prompt_embeds.shape[0]\n\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt,\n            device,\n            num_images_per_prompt,\n            do_classifier_free_guidance,\n            negative_prompt,\n            prompt_embeds=prompt_embeds,\n            negative_prompt_embeds=negative_prompt_embeds,\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            num_frames,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n        with self.progress_bar(total=num_inference_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # reshape latents\n                bsz, channel, frames, width, height = latents.shape\n                latents = latents.permute(0, 2, 1, 3, 4).reshape(bsz * frames, channel, width, height)\n                noise_pred = noise_pred.permute(0, 2, 1, 3, 4).reshape(bsz * frames, channel, width, height)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # reshape latents back\n                latents = latents[None, :].reshape(bsz, frames, channel, width, height).permute(0, 2, 1, 3, 4)\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n\n        video_tensor = self.decode_latents(latents)\n\n        if output_type == \"pt\":\n            video = video_tensor\n        else:\n            video = tensor2vid(video_tensor)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (video,)\n\n        return TextToVideoSDPipelineOutput(frames=video)\n"
  },
  {
    "path": "diffusers/pipelines/text_to_video_synthesis/pipeline_text_to_video_zero.py",
    "content": "import copy\nfrom dataclasses import dataclass\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.nn.functional as F\nfrom torch.nn.functional import grid_sample\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer\n\nfrom diffusers.models import AutoencoderKL, UNet2DConditionModel\nfrom diffusers.pipelines.stable_diffusion import StableDiffusionPipeline, StableDiffusionSafetyChecker\nfrom diffusers.schedulers import KarrasDiffusionSchedulers\nfrom diffusers.utils import BaseOutput\n\n\ndef rearrange_0(tensor, f):\n    F, C, H, W = tensor.size()\n    tensor = torch.permute(torch.reshape(tensor, (F // f, f, C, H, W)), (0, 2, 1, 3, 4))\n    return tensor\n\n\ndef rearrange_1(tensor):\n    B, C, F, H, W = tensor.size()\n    return torch.reshape(torch.permute(tensor, (0, 2, 1, 3, 4)), (B * F, C, H, W))\n\n\ndef rearrange_3(tensor, f):\n    F, D, C = tensor.size()\n    return torch.reshape(tensor, (F // f, f, D, C))\n\n\ndef rearrange_4(tensor):\n    B, F, D, C = tensor.size()\n    return torch.reshape(tensor, (B * F, D, C))\n\n\nclass CrossFrameAttnProcessor:\n    \"\"\"\n    Cross frame attention processor. For each frame the self-attention is replaced with attention with first frame\n\n    Args:\n        batch_size: The number that represents actual batch size, other than the frames.\n            For example, using calling unet with a single prompt and num_images_per_prompt=1, batch_size should be\n            equal to 2, due to classifier-free guidance.\n    \"\"\"\n\n    def __init__(self, batch_size=2):\n        self.batch_size = batch_size\n\n    def __call__(self, attn, hidden_states, encoder_hidden_states=None, attention_mask=None):\n        batch_size, sequence_length, _ = hidden_states.shape\n        attention_mask = attn.prepare_attention_mask(attention_mask, sequence_length, batch_size)\n        query = attn.to_q(hidden_states)\n\n        is_cross_attention = encoder_hidden_states is not None\n        if encoder_hidden_states is None:\n            encoder_hidden_states = hidden_states\n        elif attn.norm_cross:\n            encoder_hidden_states = attn.norm_encoder_hidden_states(encoder_hidden_states)\n\n        key = attn.to_k(encoder_hidden_states)\n        value = attn.to_v(encoder_hidden_states)\n\n        # Sparse Attention\n        if not is_cross_attention:\n            video_length = key.size()[0] // self.batch_size\n            first_frame_index = [0] * video_length\n\n            # rearrange keys to have batch and frames in the 1st and 2nd dims respectively\n            key = rearrange_3(key, video_length)\n            key = key[:, first_frame_index]\n            # rearrange values to have batch and frames in the 1st and 2nd dims respectively\n            value = rearrange_3(value, video_length)\n            value = value[:, first_frame_index]\n\n            # rearrange back to original shape\n            key = rearrange_4(key)\n            value = rearrange_4(value)\n\n        query = attn.head_to_batch_dim(query)\n        key = attn.head_to_batch_dim(key)\n        value = attn.head_to_batch_dim(value)\n\n        attention_probs = attn.get_attention_scores(query, key, attention_mask)\n        hidden_states = torch.bmm(attention_probs, value)\n        hidden_states = attn.batch_to_head_dim(hidden_states)\n\n        # linear proj\n        hidden_states = attn.to_out[0](hidden_states)\n        # dropout\n        hidden_states = attn.to_out[1](hidden_states)\n\n        return hidden_states\n\n\n@dataclass\nclass TextToVideoPipelineOutput(BaseOutput):\n    images: Union[List[PIL.Image.Image], np.ndarray]\n    nsfw_content_detected: Optional[List[bool]]\n\n\ndef coords_grid(batch, ht, wd, device):\n    # Adapted from https://github.com/princeton-vl/RAFT/blob/master/core/utils/utils.py\n    coords = torch.meshgrid(torch.arange(ht, device=device), torch.arange(wd, device=device))\n    coords = torch.stack(coords[::-1], dim=0).float()\n    return coords[None].repeat(batch, 1, 1, 1)\n\n\ndef warp_single_latent(latent, reference_flow):\n    \"\"\"\n    Warp latent of a single frame with given flow\n\n    Args:\n        latent: latent code of a single frame\n        reference_flow: flow which to warp the latent with\n\n    Returns:\n        warped: warped latent\n    \"\"\"\n    _, _, H, W = reference_flow.size()\n    _, _, h, w = latent.size()\n    coords0 = coords_grid(1, H, W, device=latent.device).to(latent.dtype)\n\n    coords_t0 = coords0 + reference_flow\n    coords_t0[:, 0] /= W\n    coords_t0[:, 1] /= H\n\n    coords_t0 = coords_t0 * 2.0 - 1.0\n    coords_t0 = F.interpolate(coords_t0, size=(h, w), mode=\"bilinear\")\n    coords_t0 = torch.permute(coords_t0, (0, 2, 3, 1))\n\n    warped = grid_sample(latent, coords_t0, mode=\"nearest\", padding_mode=\"reflection\")\n    return warped\n\n\ndef create_motion_field(motion_field_strength_x, motion_field_strength_y, frame_ids, device, dtype):\n    \"\"\"\n    Create translation motion field\n\n    Args:\n        motion_field_strength_x: motion strength along x-axis\n        motion_field_strength_y: motion strength along y-axis\n        frame_ids: indexes of the frames the latents of which are being processed.\n            This is needed when we perform chunk-by-chunk inference\n        device: device\n        dtype: dtype\n\n    Returns:\n\n    \"\"\"\n    seq_length = len(frame_ids)\n    reference_flow = torch.zeros((seq_length, 2, 512, 512), device=device, dtype=dtype)\n    for fr_idx in range(seq_length):\n        reference_flow[fr_idx, 0, :, :] = motion_field_strength_x * (frame_ids[fr_idx])\n        reference_flow[fr_idx, 1, :, :] = motion_field_strength_y * (frame_ids[fr_idx])\n    return reference_flow\n\n\ndef create_motion_field_and_warp_latents(motion_field_strength_x, motion_field_strength_y, frame_ids, latents):\n    \"\"\"\n    Creates translation motion and warps the latents accordingly\n\n    Args:\n        motion_field_strength_x: motion strength along x-axis\n        motion_field_strength_y: motion strength along y-axis\n        frame_ids: indexes of the frames the latents of which are being processed.\n            This is needed when we perform chunk-by-chunk inference\n        latents: latent codes of frames\n\n    Returns:\n        warped_latents: warped latents\n    \"\"\"\n    motion_field = create_motion_field(\n        motion_field_strength_x=motion_field_strength_x,\n        motion_field_strength_y=motion_field_strength_y,\n        frame_ids=frame_ids,\n        device=latents.device,\n        dtype=latents.dtype,\n    )\n    warped_latents = latents.clone().detach()\n    for i in range(len(warped_latents)):\n        warped_latents[i] = warp_single_latent(latents[i][None], motion_field[i][None])\n    return warped_latents\n\n\nclass TextToVideoZeroPipeline(StableDiffusionPipeline):\n    r\"\"\"\n    Pipeline for zero-shot text-to-video generation using Stable Diffusion.\n\n    This model inherits from [`StableDiffusionPipeline`]. Check the superclass documentation for the generic methods\n    the library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    def __init__(\n        self,\n        vae: AutoencoderKL,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        unet: UNet2DConditionModel,\n        scheduler: KarrasDiffusionSchedulers,\n        safety_checker: StableDiffusionSafetyChecker,\n        feature_extractor: CLIPImageProcessor,\n        requires_safety_checker: bool = True,\n    ):\n        super().__init__(\n            vae, text_encoder, tokenizer, unet, scheduler, safety_checker, feature_extractor, requires_safety_checker\n        )\n        self.unet.set_attn_processor(CrossFrameAttnProcessor(batch_size=2))\n\n    def forward_loop(self, x_t0, t0, t1, generator):\n        \"\"\"\n        Perform ddpm forward process from time t0 to t1. This is the same as adding noise with corresponding variance.\n\n        Args:\n            x_t0: latent code at time t0\n            t0: t0\n            t1: t1\n            generator: torch.Generator object\n\n        Returns:\n            x_t1: forward process applied to x_t0 from time t0 to t1.\n        \"\"\"\n        eps = torch.randn(x_t0.size(), generator=generator, dtype=x_t0.dtype, device=x_t0.device)\n        alpha_vec = torch.prod(self.scheduler.alphas[t0:t1])\n        x_t1 = torch.sqrt(alpha_vec) * x_t0 + torch.sqrt(1 - alpha_vec) * eps\n        return x_t1\n\n    def backward_loop(\n        self,\n        latents,\n        timesteps,\n        prompt_embeds,\n        guidance_scale,\n        callback,\n        callback_steps,\n        num_warmup_steps,\n        extra_step_kwargs,\n        cross_attention_kwargs=None,\n    ):\n        \"\"\"\n        Perform backward process given list of time steps\n\n        Args:\n            latents: Latents at time timesteps[0].\n            timesteps: time steps, along which to perform backward process.\n            prompt_embeds: Pre-generated text embeddings\n            guidance_scale:\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            extra_step_kwargs: extra_step_kwargs.\n            cross_attention_kwargs: cross_attention_kwargs.\n            num_warmup_steps: number of warmup steps.\n\n        Returns:\n            latents: latents of backward process output at time timesteps[-1]\n        \"\"\"\n        do_classifier_free_guidance = guidance_scale > 1.0\n        num_steps = (len(timesteps) - num_warmup_steps) // self.scheduler.order\n        with self.progress_bar(total=num_steps) as progress_bar:\n            for i, t in enumerate(timesteps):\n                # expand the latents if we are doing classifier free guidance\n                latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n                latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n                # predict the noise residual\n                noise_pred = self.unet(\n                    latent_model_input,\n                    t,\n                    encoder_hidden_states=prompt_embeds,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                ).sample\n\n                # perform guidance\n                if do_classifier_free_guidance:\n                    noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                    noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n                # compute the previous noisy sample x_t -> x_t-1\n                latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n                # call the callback, if provided\n                if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):\n                    progress_bar.update()\n                    if callback is not None and i % callback_steps == 0:\n                        callback(i, t, latents)\n        return latents.clone().detach()\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        video_length: Optional[int] = 8,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_videos_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        motion_field_strength_x: float = 12,\n        motion_field_strength_y: float = 12,\n        output_type: Optional[str] = \"tensor\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: Optional[int] = 1,\n        t0: int = 44,\n        t1: int = 47,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.\n                instead.\n            video_length (`int`, *optional*, defaults to 8): The number of generated video frames\n            height (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. If not defined, one has to pass\n                `negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is\n                less than `1`).\n            num_videos_per_prompt (`int`, *optional*, defaults to 1):\n                The number of videos to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"numpy\"`):\n                The output format of the generated image. Choose between `\"latent\"` and `\"numpy\"`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n            motion_field_strength_x (`float`, *optional*, defaults to 12):\n                Strength of motion in generated video along x-axis. See the [paper](https://arxiv.org/abs/2303.13439),\n                Sect. 3.3.1.\n            motion_field_strength_y (`float`, *optional*, defaults to 12):\n                Strength of motion in generated video along y-axis. See the [paper](https://arxiv.org/abs/2303.13439),\n                Sect. 3.3.1.\n            t0 (`int`, *optional*, defaults to 44):\n                Timestep t0. Should be in the range [0, num_inference_steps - 1]. See the\n                [paper](https://arxiv.org/abs/2303.13439), Sect. 3.3.1.\n            t1 (`int`, *optional*, defaults to 47):\n                Timestep t0. Should be in the range [t0 + 1, num_inference_steps - 1]. See the\n                [paper](https://arxiv.org/abs/2303.13439), Sect. 3.3.1.\n\n        Returns:\n            [`~pipelines.text_to_video_synthesis.TextToVideoPipelineOutput`]:\n                The output contains a ndarray of the generated images, when output_type != 'latent', otherwise a latent\n                codes of generated image, and a list of `bool`s denoting whether the corresponding generated image\n                likely represents \"not-safe-for-work\" (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        assert video_length > 0\n        frame_ids = list(range(video_length))\n\n        assert num_videos_per_prompt == 1\n\n        if isinstance(prompt, str):\n            prompt = [prompt]\n        if isinstance(negative_prompt, str):\n            negative_prompt = [negative_prompt]\n\n        # Default height and width to unet\n        height = height or self.unet.config.sample_size * self.vae_scale_factor\n        width = width or self.unet.config.sample_size * self.vae_scale_factor\n\n        # Check inputs. Raise error if not correct\n        self.check_inputs(prompt, height, width, callback_steps)\n\n        # Define call parameters\n        batch_size = 1 if isinstance(prompt, str) else len(prompt)\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt, device, num_videos_per_prompt, do_classifier_free_guidance, negative_prompt\n        )\n\n        # Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # Prepare latent variables\n        num_channels_latents = self.unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_videos_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n        # Prepare extra step kwargs.\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n        num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order\n\n        # Perform the first backward process up to time T_1\n        x_1_t1 = self.backward_loop(\n            timesteps=timesteps[: -t1 - 1],\n            prompt_embeds=prompt_embeds,\n            latents=latents,\n            guidance_scale=guidance_scale,\n            callback=callback,\n            callback_steps=callback_steps,\n            extra_step_kwargs=extra_step_kwargs,\n            num_warmup_steps=num_warmup_steps,\n        )\n        scheduler_copy = copy.deepcopy(self.scheduler)\n\n        # Perform the second backward process up to time T_0\n        x_1_t0 = self.backward_loop(\n            timesteps=timesteps[-t1 - 1 : -t0 - 1],\n            prompt_embeds=prompt_embeds,\n            latents=x_1_t1,\n            guidance_scale=guidance_scale,\n            callback=callback,\n            callback_steps=callback_steps,\n            extra_step_kwargs=extra_step_kwargs,\n            num_warmup_steps=0,\n        )\n\n        # Propagate first frame latents at time T_0 to remaining frames\n        x_2k_t0 = x_1_t0.repeat(video_length - 1, 1, 1, 1)\n\n        # Add motion in latents at time T_0\n        x_2k_t0 = create_motion_field_and_warp_latents(\n            motion_field_strength_x=motion_field_strength_x,\n            motion_field_strength_y=motion_field_strength_y,\n            latents=x_2k_t0,\n            frame_ids=frame_ids[1:],\n        )\n\n        # Perform forward process up to time T_1\n        x_2k_t1 = self.forward_loop(\n            x_t0=x_2k_t0,\n            t0=timesteps[-t0 - 1].item(),\n            t1=timesteps[-t1 - 1].item(),\n            generator=generator,\n        )\n\n        # Perform backward process from time T_1 to 0\n        x_1k_t1 = torch.cat([x_1_t1, x_2k_t1])\n        b, l, d = prompt_embeds.size()\n        prompt_embeds = prompt_embeds[:, None].repeat(1, video_length, 1, 1).reshape(b * video_length, l, d)\n\n        self.scheduler = scheduler_copy\n        x_1k_0 = self.backward_loop(\n            timesteps=timesteps[-t1 - 1 :],\n            prompt_embeds=prompt_embeds,\n            latents=x_1k_t1,\n            guidance_scale=guidance_scale,\n            callback=callback,\n            callback_steps=callback_steps,\n            extra_step_kwargs=extra_step_kwargs,\n            num_warmup_steps=0,\n        )\n        latents = x_1k_0\n\n        # manually for max memory savings\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.unet.to(\"cpu\")\n        torch.cuda.empty_cache()\n\n        if output_type == \"latent\":\n            image = latents\n            has_nsfw_concept = None\n        else:\n            image = self.decode_latents(latents)\n            # Run safety checker\n            image, has_nsfw_concept = self.run_safety_checker(image, device, prompt_embeds.dtype)\n\n        # Offload last model to CPU\n        if hasattr(self, \"final_offload_hook\") and self.final_offload_hook is not None:\n            self.final_offload_hook.offload()\n\n        if not return_dict:\n            return (image, has_nsfw_concept)\n\n        return TextToVideoPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)\n"
  },
  {
    "path": "diffusers/pipelines/unclip/__init__.py",
    "content": "from ...utils import (\n    OptionalDependencyNotAvailable,\n    is_torch_available,\n    is_transformers_available,\n    is_transformers_version,\n)\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_transformers_version(\">=\", \"4.25.0\")):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline\nelse:\n    from .pipeline_unclip import UnCLIPPipeline\n    from .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline\n    from .text_proj import UnCLIPTextProjModel\n"
  },
  {
    "path": "diffusers/pipelines/unclip/pipeline_unclip.py",
    "content": "# Copyright 2023 Kakao Brain and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import List, Optional, Tuple, Union\n\nimport torch\nfrom torch.nn import functional as F\nfrom transformers import CLIPTextModelWithProjection, CLIPTokenizer\nfrom transformers.models.clip.modeling_clip import CLIPTextModelOutput\n\nfrom ...models import PriorTransformer, UNet2DConditionModel, UNet2DModel\nfrom ...pipelines import DiffusionPipeline\nfrom ...pipelines.pipeline_utils import ImagePipelineOutput\nfrom ...schedulers import UnCLIPScheduler\nfrom ...utils import is_accelerate_available, logging, randn_tensor\nfrom .text_proj import UnCLIPTextProjModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass UnCLIPPipeline(DiffusionPipeline):\n    \"\"\"\n    Pipeline for text-to-image generation using unCLIP\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        text_encoder ([`CLIPTextModelWithProjection`]):\n            Frozen text-encoder.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        prior ([`PriorTransformer`]):\n            The canonincal unCLIP prior to approximate the image embedding from the text embedding.\n        text_proj ([`UnCLIPTextProjModel`]):\n            Utility class to prepare and combine the embeddings before they are passed to the decoder.\n        decoder ([`UNet2DConditionModel`]):\n            The decoder to invert the image embedding into an image.\n        super_res_first ([`UNet2DModel`]):\n            Super resolution unet. Used in all but the last step of the super resolution diffusion process.\n        super_res_last ([`UNet2DModel`]):\n            Super resolution unet. Used in the last step of the super resolution diffusion process.\n        prior_scheduler ([`UnCLIPScheduler`]):\n            Scheduler used in the prior denoising process. Just a modified DDPMScheduler.\n        decoder_scheduler ([`UnCLIPScheduler`]):\n            Scheduler used in the decoder denoising process. Just a modified DDPMScheduler.\n        super_res_scheduler ([`UnCLIPScheduler`]):\n            Scheduler used in the super resolution denoising process. Just a modified DDPMScheduler.\n\n    \"\"\"\n\n    prior: PriorTransformer\n    decoder: UNet2DConditionModel\n    text_proj: UnCLIPTextProjModel\n    text_encoder: CLIPTextModelWithProjection\n    tokenizer: CLIPTokenizer\n    super_res_first: UNet2DModel\n    super_res_last: UNet2DModel\n\n    prior_scheduler: UnCLIPScheduler\n    decoder_scheduler: UnCLIPScheduler\n    super_res_scheduler: UnCLIPScheduler\n\n    def __init__(\n        self,\n        prior: PriorTransformer,\n        decoder: UNet2DConditionModel,\n        text_encoder: CLIPTextModelWithProjection,\n        tokenizer: CLIPTokenizer,\n        text_proj: UnCLIPTextProjModel,\n        super_res_first: UNet2DModel,\n        super_res_last: UNet2DModel,\n        prior_scheduler: UnCLIPScheduler,\n        decoder_scheduler: UnCLIPScheduler,\n        super_res_scheduler: UnCLIPScheduler,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            prior=prior,\n            decoder=decoder,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            text_proj=text_proj,\n            super_res_first=super_res_first,\n            super_res_last=super_res_last,\n            prior_scheduler=prior_scheduler,\n            decoder_scheduler=decoder_scheduler,\n            super_res_scheduler=super_res_scheduler,\n        )\n\n    def prepare_latents(self, shape, dtype, device, generator, latents, scheduler):\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            if latents.shape != shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {shape}\")\n            latents = latents.to(device)\n\n        latents = latents * scheduler.init_noise_sigma\n        return latents\n\n    def _encode_prompt(\n        self,\n        prompt,\n        device,\n        num_images_per_prompt,\n        do_classifier_free_guidance,\n        text_model_output: Optional[Union[CLIPTextModelOutput, Tuple]] = None,\n        text_attention_mask: Optional[torch.Tensor] = None,\n    ):\n        if text_model_output is None:\n            batch_size = len(prompt) if isinstance(prompt, list) else 1\n            # get prompt text embeddings\n            text_inputs = self.tokenizer(\n                prompt,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            text_input_ids = text_inputs.input_ids\n            text_mask = text_inputs.attention_mask.bool().to(device)\n\n            untruncated_ids = self.tokenizer(prompt, padding=\"longest\", return_tensors=\"pt\").input_ids\n\n            if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(\n                text_input_ids, untruncated_ids\n            ):\n                removed_text = self.tokenizer.batch_decode(\n                    untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1]\n                )\n                logger.warning(\n                    \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                    f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n                )\n                text_input_ids = text_input_ids[:, : self.tokenizer.model_max_length]\n\n            text_encoder_output = self.text_encoder(text_input_ids.to(device))\n\n            prompt_embeds = text_encoder_output.text_embeds\n            text_encoder_hidden_states = text_encoder_output.last_hidden_state\n\n        else:\n            batch_size = text_model_output[0].shape[0]\n            prompt_embeds, text_encoder_hidden_states = text_model_output[0], text_model_output[1]\n            text_mask = text_attention_mask\n\n        prompt_embeds = prompt_embeds.repeat_interleave(num_images_per_prompt, dim=0)\n        text_encoder_hidden_states = text_encoder_hidden_states.repeat_interleave(num_images_per_prompt, dim=0)\n        text_mask = text_mask.repeat_interleave(num_images_per_prompt, dim=0)\n\n        if do_classifier_free_guidance:\n            uncond_tokens = [\"\"] * batch_size\n\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=self.tokenizer.model_max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            uncond_text_mask = uncond_input.attention_mask.bool().to(device)\n            negative_prompt_embeds_text_encoder_output = self.text_encoder(uncond_input.input_ids.to(device))\n\n            negative_prompt_embeds = negative_prompt_embeds_text_encoder_output.text_embeds\n            uncond_text_encoder_hidden_states = negative_prompt_embeds_text_encoder_output.last_hidden_state\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len)\n\n            seq_len = uncond_text_encoder_hidden_states.shape[1]\n            uncond_text_encoder_hidden_states = uncond_text_encoder_hidden_states.repeat(1, num_images_per_prompt, 1)\n            uncond_text_encoder_hidden_states = uncond_text_encoder_hidden_states.view(\n                batch_size * num_images_per_prompt, seq_len, -1\n            )\n            uncond_text_mask = uncond_text_mask.repeat_interleave(num_images_per_prompt, dim=0)\n\n            # done duplicates\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n            text_encoder_hidden_states = torch.cat([uncond_text_encoder_hidden_states, text_encoder_hidden_states])\n\n            text_mask = torch.cat([uncond_text_mask, text_mask])\n\n        return prompt_embeds, text_encoder_hidden_states, text_mask\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        # TODO: self.prior.post_process_latents is not covered by the offload hooks, so it fails if added to the list\n        models = [\n            self.decoder,\n            self.text_proj,\n            self.text_encoder,\n            self.super_res_first,\n            self.super_res_last,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if self.device != torch.device(\"meta\") or not hasattr(self.decoder, \"_hf_hook\"):\n            return self.device\n        for module in self.decoder.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: int = 1,\n        prior_num_inference_steps: int = 25,\n        decoder_num_inference_steps: int = 25,\n        super_res_num_inference_steps: int = 7,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        prior_latents: Optional[torch.FloatTensor] = None,\n        decoder_latents: Optional[torch.FloatTensor] = None,\n        super_res_latents: Optional[torch.FloatTensor] = None,\n        text_model_output: Optional[Union[CLIPTextModelOutput, Tuple]] = None,\n        text_attention_mask: Optional[torch.Tensor] = None,\n        prior_guidance_scale: float = 4.0,\n        decoder_guidance_scale: float = 8.0,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation. This can only be left undefined if\n                `text_model_output` and `text_attention_mask` is passed.\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            prior_num_inference_steps (`int`, *optional*, defaults to 25):\n                The number of denoising steps for the prior. More denoising steps usually lead to a higher quality\n                image at the expense of slower inference.\n            decoder_num_inference_steps (`int`, *optional*, defaults to 25):\n                The number of denoising steps for the decoder. More denoising steps usually lead to a higher quality\n                image at the expense of slower inference.\n            super_res_num_inference_steps (`int`, *optional*, defaults to 7):\n                The number of denoising steps for super resolution. More denoising steps usually lead to a higher\n                quality image at the expense of slower inference.\n            generator (`torch.Generator` or `List[torch.Generator]`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            prior_latents (`torch.FloatTensor` of shape (batch size, embeddings dimension), *optional*):\n                Pre-generated noisy latents to be used as inputs for the prior.\n            decoder_latents (`torch.FloatTensor` of shape (batch size, channels, height, width), *optional*):\n                Pre-generated noisy latents to be used as inputs for the decoder.\n            super_res_latents (`torch.FloatTensor` of shape (batch size, channels, super res height, super res width), *optional*):\n                Pre-generated noisy latents to be used as inputs for the decoder.\n            prior_guidance_scale (`float`, *optional*, defaults to 4.0):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            decoder_guidance_scale (`float`, *optional*, defaults to 4.0):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            text_model_output (`CLIPTextModelOutput`, *optional*):\n                Pre-defined CLIPTextModel outputs that can be derived from the text encoder. Pre-defined text outputs\n                can be passed for tasks like text embedding interpolations. Make sure to also pass\n                `text_attention_mask` in this case. `prompt` can the be left to `None`.\n            text_attention_mask (`torch.Tensor`, *optional*):\n                Pre-defined CLIP text attention mask that can be derived from the tokenizer. Pre-defined text attention\n                masks are necessary when passing `text_model_output`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generated image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n        \"\"\"\n        if prompt is not None:\n            if isinstance(prompt, str):\n                batch_size = 1\n            elif isinstance(prompt, list):\n                batch_size = len(prompt)\n            else:\n                raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n        else:\n            batch_size = text_model_output[0].shape[0]\n\n        device = self._execution_device\n\n        batch_size = batch_size * num_images_per_prompt\n\n        do_classifier_free_guidance = prior_guidance_scale > 1.0 or decoder_guidance_scale > 1.0\n\n        prompt_embeds, text_encoder_hidden_states, text_mask = self._encode_prompt(\n            prompt, device, num_images_per_prompt, do_classifier_free_guidance, text_model_output, text_attention_mask\n        )\n\n        # prior\n\n        self.prior_scheduler.set_timesteps(prior_num_inference_steps, device=device)\n        prior_timesteps_tensor = self.prior_scheduler.timesteps\n\n        embedding_dim = self.prior.config.embedding_dim\n\n        prior_latents = self.prepare_latents(\n            (batch_size, embedding_dim),\n            prompt_embeds.dtype,\n            device,\n            generator,\n            prior_latents,\n            self.prior_scheduler,\n        )\n\n        for i, t in enumerate(self.progress_bar(prior_timesteps_tensor)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = torch.cat([prior_latents] * 2) if do_classifier_free_guidance else prior_latents\n\n            predicted_image_embedding = self.prior(\n                latent_model_input,\n                timestep=t,\n                proj_embedding=prompt_embeds,\n                encoder_hidden_states=text_encoder_hidden_states,\n                attention_mask=text_mask,\n            ).predicted_image_embedding\n\n            if do_classifier_free_guidance:\n                predicted_image_embedding_uncond, predicted_image_embedding_text = predicted_image_embedding.chunk(2)\n                predicted_image_embedding = predicted_image_embedding_uncond + prior_guidance_scale * (\n                    predicted_image_embedding_text - predicted_image_embedding_uncond\n                )\n\n            if i + 1 == prior_timesteps_tensor.shape[0]:\n                prev_timestep = None\n            else:\n                prev_timestep = prior_timesteps_tensor[i + 1]\n\n            prior_latents = self.prior_scheduler.step(\n                predicted_image_embedding,\n                timestep=t,\n                sample=prior_latents,\n                generator=generator,\n                prev_timestep=prev_timestep,\n            ).prev_sample\n\n        prior_latents = self.prior.post_process_latents(prior_latents)\n\n        image_embeddings = prior_latents\n\n        # done prior\n\n        # decoder\n\n        text_encoder_hidden_states, additive_clip_time_embeddings = self.text_proj(\n            image_embeddings=image_embeddings,\n            prompt_embeds=prompt_embeds,\n            text_encoder_hidden_states=text_encoder_hidden_states,\n            do_classifier_free_guidance=do_classifier_free_guidance,\n        )\n\n        if device.type == \"mps\":\n            # HACK: MPS: There is a panic when padding bool tensors,\n            # so cast to int tensor for the pad and back to bool afterwards\n            text_mask = text_mask.type(torch.int)\n            decoder_text_mask = F.pad(text_mask, (self.text_proj.clip_extra_context_tokens, 0), value=1)\n            decoder_text_mask = decoder_text_mask.type(torch.bool)\n        else:\n            decoder_text_mask = F.pad(text_mask, (self.text_proj.clip_extra_context_tokens, 0), value=True)\n\n        self.decoder_scheduler.set_timesteps(decoder_num_inference_steps, device=device)\n        decoder_timesteps_tensor = self.decoder_scheduler.timesteps\n\n        num_channels_latents = self.decoder.config.in_channels\n        height = self.decoder.config.sample_size\n        width = self.decoder.config.sample_size\n\n        decoder_latents = self.prepare_latents(\n            (batch_size, num_channels_latents, height, width),\n            text_encoder_hidden_states.dtype,\n            device,\n            generator,\n            decoder_latents,\n            self.decoder_scheduler,\n        )\n\n        for i, t in enumerate(self.progress_bar(decoder_timesteps_tensor)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = torch.cat([decoder_latents] * 2) if do_classifier_free_guidance else decoder_latents\n\n            noise_pred = self.decoder(\n                sample=latent_model_input,\n                timestep=t,\n                encoder_hidden_states=text_encoder_hidden_states,\n                class_labels=additive_clip_time_embeddings,\n                attention_mask=decoder_text_mask,\n            ).sample\n\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                noise_pred_uncond, _ = noise_pred_uncond.split(latent_model_input.shape[1], dim=1)\n                noise_pred_text, predicted_variance = noise_pred_text.split(latent_model_input.shape[1], dim=1)\n                noise_pred = noise_pred_uncond + decoder_guidance_scale * (noise_pred_text - noise_pred_uncond)\n                noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n            if i + 1 == decoder_timesteps_tensor.shape[0]:\n                prev_timestep = None\n            else:\n                prev_timestep = decoder_timesteps_tensor[i + 1]\n\n            # compute the previous noisy sample x_t -> x_t-1\n            decoder_latents = self.decoder_scheduler.step(\n                noise_pred, t, decoder_latents, prev_timestep=prev_timestep, generator=generator\n            ).prev_sample\n\n        decoder_latents = decoder_latents.clamp(-1, 1)\n\n        image_small = decoder_latents\n\n        # done decoder\n\n        # super res\n\n        self.super_res_scheduler.set_timesteps(super_res_num_inference_steps, device=device)\n        super_res_timesteps_tensor = self.super_res_scheduler.timesteps\n\n        channels = self.super_res_first.config.in_channels // 2\n        height = self.super_res_first.config.sample_size\n        width = self.super_res_first.config.sample_size\n\n        super_res_latents = self.prepare_latents(\n            (batch_size, channels, height, width),\n            image_small.dtype,\n            device,\n            generator,\n            super_res_latents,\n            self.super_res_scheduler,\n        )\n\n        if device.type == \"mps\":\n            # MPS does not support many interpolations\n            image_upscaled = F.interpolate(image_small, size=[height, width])\n        else:\n            interpolate_antialias = {}\n            if \"antialias\" in inspect.signature(F.interpolate).parameters:\n                interpolate_antialias[\"antialias\"] = True\n\n            image_upscaled = F.interpolate(\n                image_small, size=[height, width], mode=\"bicubic\", align_corners=False, **interpolate_antialias\n            )\n\n        for i, t in enumerate(self.progress_bar(super_res_timesteps_tensor)):\n            # no classifier free guidance\n\n            if i == super_res_timesteps_tensor.shape[0] - 1:\n                unet = self.super_res_last\n            else:\n                unet = self.super_res_first\n\n            latent_model_input = torch.cat([super_res_latents, image_upscaled], dim=1)\n\n            noise_pred = unet(\n                sample=latent_model_input,\n                timestep=t,\n            ).sample\n\n            if i + 1 == super_res_timesteps_tensor.shape[0]:\n                prev_timestep = None\n            else:\n                prev_timestep = super_res_timesteps_tensor[i + 1]\n\n            # compute the previous noisy sample x_t -> x_t-1\n            super_res_latents = self.super_res_scheduler.step(\n                noise_pred, t, super_res_latents, prev_timestep=prev_timestep, generator=generator\n            ).prev_sample\n\n        image = super_res_latents\n        # done super res\n\n        # post processing\n\n        image = image * 0.5 + 0.5\n        image = image.clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/unclip/pipeline_unclip_image_variation.py",
    "content": "# Copyright 2023 Kakao Brain and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nfrom typing import List, Optional, Union\n\nimport PIL\nimport torch\nfrom torch.nn import functional as F\nfrom transformers import (\n    CLIPImageProcessor,\n    CLIPTextModelWithProjection,\n    CLIPTokenizer,\n    CLIPVisionModelWithProjection,\n)\n\nfrom ...models import UNet2DConditionModel, UNet2DModel\nfrom ...pipelines import DiffusionPipeline, ImagePipelineOutput\nfrom ...schedulers import UnCLIPScheduler\nfrom ...utils import is_accelerate_available, logging, randn_tensor\nfrom .text_proj import UnCLIPTextProjModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass UnCLIPImageVariationPipeline(DiffusionPipeline):\n    \"\"\"\n    Pipeline to generate variations from an input image using unCLIP\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        text_encoder ([`CLIPTextModelWithProjection`]):\n            Frozen text-encoder.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `image_encoder`.\n        image_encoder ([`CLIPVisionModelWithProjection`]):\n            Frozen CLIP image-encoder. unCLIP Image Variation uses the vision portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPVisionModelWithProjection),\n            specifically the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        text_proj ([`UnCLIPTextProjModel`]):\n            Utility class to prepare and combine the embeddings before they are passed to the decoder.\n        decoder ([`UNet2DConditionModel`]):\n            The decoder to invert the image embedding into an image.\n        super_res_first ([`UNet2DModel`]):\n            Super resolution unet. Used in all but the last step of the super resolution diffusion process.\n        super_res_last ([`UNet2DModel`]):\n            Super resolution unet. Used in the last step of the super resolution diffusion process.\n        decoder_scheduler ([`UnCLIPScheduler`]):\n            Scheduler used in the decoder denoising process. Just a modified DDPMScheduler.\n        super_res_scheduler ([`UnCLIPScheduler`]):\n            Scheduler used in the super resolution denoising process. Just a modified DDPMScheduler.\n\n    \"\"\"\n\n    decoder: UNet2DConditionModel\n    text_proj: UnCLIPTextProjModel\n    text_encoder: CLIPTextModelWithProjection\n    tokenizer: CLIPTokenizer\n    feature_extractor: CLIPImageProcessor\n    image_encoder: CLIPVisionModelWithProjection\n    super_res_first: UNet2DModel\n    super_res_last: UNet2DModel\n\n    decoder_scheduler: UnCLIPScheduler\n    super_res_scheduler: UnCLIPScheduler\n\n    def __init__(\n        self,\n        decoder: UNet2DConditionModel,\n        text_encoder: CLIPTextModelWithProjection,\n        tokenizer: CLIPTokenizer,\n        text_proj: UnCLIPTextProjModel,\n        feature_extractor: CLIPImageProcessor,\n        image_encoder: CLIPVisionModelWithProjection,\n        super_res_first: UNet2DModel,\n        super_res_last: UNet2DModel,\n        decoder_scheduler: UnCLIPScheduler,\n        super_res_scheduler: UnCLIPScheduler,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            decoder=decoder,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            text_proj=text_proj,\n            feature_extractor=feature_extractor,\n            image_encoder=image_encoder,\n            super_res_first=super_res_first,\n            super_res_last=super_res_last,\n            decoder_scheduler=decoder_scheduler,\n            super_res_scheduler=super_res_scheduler,\n        )\n\n    # Copied from diffusers.pipelines.unclip.pipeline_unclip.UnCLIPPipeline.prepare_latents\n    def prepare_latents(self, shape, dtype, device, generator, latents, scheduler):\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            if latents.shape != shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {shape}\")\n            latents = latents.to(device)\n\n        latents = latents * scheduler.init_noise_sigma\n        return latents\n\n    def _encode_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance):\n        batch_size = len(prompt) if isinstance(prompt, list) else 1\n\n        # get prompt text embeddings\n        text_inputs = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            return_tensors=\"pt\",\n        )\n        text_input_ids = text_inputs.input_ids\n        text_mask = text_inputs.attention_mask.bool().to(device)\n        text_encoder_output = self.text_encoder(text_input_ids.to(device))\n\n        prompt_embeds = text_encoder_output.text_embeds\n        text_encoder_hidden_states = text_encoder_output.last_hidden_state\n\n        prompt_embeds = prompt_embeds.repeat_interleave(num_images_per_prompt, dim=0)\n        text_encoder_hidden_states = text_encoder_hidden_states.repeat_interleave(num_images_per_prompt, dim=0)\n        text_mask = text_mask.repeat_interleave(num_images_per_prompt, dim=0)\n\n        if do_classifier_free_guidance:\n            uncond_tokens = [\"\"] * batch_size\n\n            max_length = text_input_ids.shape[-1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n            uncond_text_mask = uncond_input.attention_mask.bool().to(device)\n            negative_prompt_embeds_text_encoder_output = self.text_encoder(uncond_input.input_ids.to(device))\n\n            negative_prompt_embeds = negative_prompt_embeds_text_encoder_output.text_embeds\n            uncond_text_encoder_hidden_states = negative_prompt_embeds_text_encoder_output.last_hidden_state\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len)\n\n            seq_len = uncond_text_encoder_hidden_states.shape[1]\n            uncond_text_encoder_hidden_states = uncond_text_encoder_hidden_states.repeat(1, num_images_per_prompt, 1)\n            uncond_text_encoder_hidden_states = uncond_text_encoder_hidden_states.view(\n                batch_size * num_images_per_prompt, seq_len, -1\n            )\n            uncond_text_mask = uncond_text_mask.repeat_interleave(num_images_per_prompt, dim=0)\n\n            # done duplicates\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n            text_encoder_hidden_states = torch.cat([uncond_text_encoder_hidden_states, text_encoder_hidden_states])\n\n            text_mask = torch.cat([uncond_text_mask, text_mask])\n\n        return prompt_embeds, text_encoder_hidden_states, text_mask\n\n    def _encode_image(self, image, device, num_images_per_prompt, image_embeddings: Optional[torch.Tensor] = None):\n        dtype = next(self.image_encoder.parameters()).dtype\n\n        if image_embeddings is None:\n            if not isinstance(image, torch.Tensor):\n                image = self.feature_extractor(images=image, return_tensors=\"pt\").pixel_values\n\n            image = image.to(device=device, dtype=dtype)\n            image_embeddings = self.image_encoder(image).image_embeds\n\n        image_embeddings = image_embeddings.repeat_interleave(num_images_per_prompt, dim=0)\n\n        return image_embeddings\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, the pipeline's\n        models have their state dicts saved to CPU and then are moved to a `torch.device('meta') and loaded to GPU only\n        when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        models = [\n            self.decoder,\n            self.text_proj,\n            self.text_encoder,\n            self.super_res_first,\n            self.super_res_last,\n        ]\n        for cpu_offloaded_model in models:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.unclip.pipeline_unclip.UnCLIPPipeline._execution_device\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if self.device != torch.device(\"meta\") or not hasattr(self.decoder, \"_hf_hook\"):\n            return self.device\n        for module in self.decoder.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        image: Optional[Union[PIL.Image.Image, List[PIL.Image.Image], torch.FloatTensor]] = None,\n        num_images_per_prompt: int = 1,\n        decoder_num_inference_steps: int = 25,\n        super_res_num_inference_steps: int = 7,\n        generator: Optional[torch.Generator] = None,\n        decoder_latents: Optional[torch.FloatTensor] = None,\n        super_res_latents: Optional[torch.FloatTensor] = None,\n        image_embeddings: Optional[torch.Tensor] = None,\n        decoder_guidance_scale: float = 8.0,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n    ):\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            image (`PIL.Image.Image` or `List[PIL.Image.Image]` or `torch.FloatTensor`):\n                The image or images to guide the image generation. If you provide a tensor, it needs to comply with the\n                configuration of\n                [this](https://huggingface.co/fusing/karlo-image-variations-diffusers/blob/main/feature_extractor/preprocessor_config.json)\n                `CLIPImageProcessor`. Can be left to `None` only when `image_embeddings` are passed.\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            decoder_num_inference_steps (`int`, *optional*, defaults to 25):\n                The number of denoising steps for the decoder. More denoising steps usually lead to a higher quality\n                image at the expense of slower inference.\n            super_res_num_inference_steps (`int`, *optional*, defaults to 7):\n                The number of denoising steps for super resolution. More denoising steps usually lead to a higher\n                quality image at the expense of slower inference.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            decoder_latents (`torch.FloatTensor` of shape (batch size, channels, height, width), *optional*):\n                Pre-generated noisy latents to be used as inputs for the decoder.\n            super_res_latents (`torch.FloatTensor` of shape (batch size, channels, super res height, super res width), *optional*):\n                Pre-generated noisy latents to be used as inputs for the decoder.\n            decoder_guidance_scale (`float`, *optional*, defaults to 4.0):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            image_embeddings (`torch.Tensor`, *optional*):\n                Pre-defined image embeddings that can be derived from the image encoder. Pre-defined image embeddings\n                can be passed for tasks like image interpolations. `image` can the be left to `None`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generated image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n        \"\"\"\n        if image is not None:\n            if isinstance(image, PIL.Image.Image):\n                batch_size = 1\n            elif isinstance(image, list):\n                batch_size = len(image)\n            else:\n                batch_size = image.shape[0]\n        else:\n            batch_size = image_embeddings.shape[0]\n\n        prompt = [\"\"] * batch_size\n\n        device = self._execution_device\n\n        batch_size = batch_size * num_images_per_prompt\n\n        do_classifier_free_guidance = decoder_guidance_scale > 1.0\n\n        prompt_embeds, text_encoder_hidden_states, text_mask = self._encode_prompt(\n            prompt, device, num_images_per_prompt, do_classifier_free_guidance\n        )\n\n        image_embeddings = self._encode_image(image, device, num_images_per_prompt, image_embeddings)\n\n        # decoder\n        text_encoder_hidden_states, additive_clip_time_embeddings = self.text_proj(\n            image_embeddings=image_embeddings,\n            prompt_embeds=prompt_embeds,\n            text_encoder_hidden_states=text_encoder_hidden_states,\n            do_classifier_free_guidance=do_classifier_free_guidance,\n        )\n\n        if device.type == \"mps\":\n            # HACK: MPS: There is a panic when padding bool tensors,\n            # so cast to int tensor for the pad and back to bool afterwards\n            text_mask = text_mask.type(torch.int)\n            decoder_text_mask = F.pad(text_mask, (self.text_proj.clip_extra_context_tokens, 0), value=1)\n            decoder_text_mask = decoder_text_mask.type(torch.bool)\n        else:\n            decoder_text_mask = F.pad(text_mask, (self.text_proj.clip_extra_context_tokens, 0), value=True)\n\n        self.decoder_scheduler.set_timesteps(decoder_num_inference_steps, device=device)\n        decoder_timesteps_tensor = self.decoder_scheduler.timesteps\n\n        num_channels_latents = self.decoder.config.in_channels\n        height = self.decoder.config.sample_size\n        width = self.decoder.config.sample_size\n\n        if decoder_latents is None:\n            decoder_latents = self.prepare_latents(\n                (batch_size, num_channels_latents, height, width),\n                text_encoder_hidden_states.dtype,\n                device,\n                generator,\n                decoder_latents,\n                self.decoder_scheduler,\n            )\n\n        for i, t in enumerate(self.progress_bar(decoder_timesteps_tensor)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = torch.cat([decoder_latents] * 2) if do_classifier_free_guidance else decoder_latents\n\n            noise_pred = self.decoder(\n                sample=latent_model_input,\n                timestep=t,\n                encoder_hidden_states=text_encoder_hidden_states,\n                class_labels=additive_clip_time_embeddings,\n                attention_mask=decoder_text_mask,\n            ).sample\n\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                noise_pred_uncond, _ = noise_pred_uncond.split(latent_model_input.shape[1], dim=1)\n                noise_pred_text, predicted_variance = noise_pred_text.split(latent_model_input.shape[1], dim=1)\n                noise_pred = noise_pred_uncond + decoder_guidance_scale * (noise_pred_text - noise_pred_uncond)\n                noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)\n\n            if i + 1 == decoder_timesteps_tensor.shape[0]:\n                prev_timestep = None\n            else:\n                prev_timestep = decoder_timesteps_tensor[i + 1]\n\n            # compute the previous noisy sample x_t -> x_t-1\n            decoder_latents = self.decoder_scheduler.step(\n                noise_pred, t, decoder_latents, prev_timestep=prev_timestep, generator=generator\n            ).prev_sample\n\n        decoder_latents = decoder_latents.clamp(-1, 1)\n\n        image_small = decoder_latents\n\n        # done decoder\n\n        # super res\n\n        self.super_res_scheduler.set_timesteps(super_res_num_inference_steps, device=device)\n        super_res_timesteps_tensor = self.super_res_scheduler.timesteps\n\n        channels = self.super_res_first.config.in_channels // 2\n        height = self.super_res_first.config.sample_size\n        width = self.super_res_first.config.sample_size\n\n        if super_res_latents is None:\n            super_res_latents = self.prepare_latents(\n                (batch_size, channels, height, width),\n                image_small.dtype,\n                device,\n                generator,\n                super_res_latents,\n                self.super_res_scheduler,\n            )\n\n        if device.type == \"mps\":\n            # MPS does not support many interpolations\n            image_upscaled = F.interpolate(image_small, size=[height, width])\n        else:\n            interpolate_antialias = {}\n            if \"antialias\" in inspect.signature(F.interpolate).parameters:\n                interpolate_antialias[\"antialias\"] = True\n\n            image_upscaled = F.interpolate(\n                image_small, size=[height, width], mode=\"bicubic\", align_corners=False, **interpolate_antialias\n            )\n\n        for i, t in enumerate(self.progress_bar(super_res_timesteps_tensor)):\n            # no classifier free guidance\n\n            if i == super_res_timesteps_tensor.shape[0] - 1:\n                unet = self.super_res_last\n            else:\n                unet = self.super_res_first\n\n            latent_model_input = torch.cat([super_res_latents, image_upscaled], dim=1)\n\n            noise_pred = unet(\n                sample=latent_model_input,\n                timestep=t,\n            ).sample\n\n            if i + 1 == super_res_timesteps_tensor.shape[0]:\n                prev_timestep = None\n            else:\n                prev_timestep = super_res_timesteps_tensor[i + 1]\n\n            # compute the previous noisy sample x_t -> x_t-1\n            super_res_latents = self.super_res_scheduler.step(\n                noise_pred, t, super_res_latents, prev_timestep=prev_timestep, generator=generator\n            ).prev_sample\n\n        image = super_res_latents\n\n        # done super res\n\n        # post processing\n\n        image = image * 0.5 + 0.5\n        image = image.clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/unclip/text_proj.py",
    "content": "# Copyright 2023 Kakao Brain and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport torch\nfrom torch import nn\n\nfrom ...configuration_utils import ConfigMixin, register_to_config\nfrom ...models import ModelMixin\n\n\nclass UnCLIPTextProjModel(ModelMixin, ConfigMixin):\n    \"\"\"\n    Utility class for CLIP embeddings. Used to combine the image and text embeddings into a format usable by the\n    decoder.\n\n    For more details, see the original paper: https://arxiv.org/abs/2204.06125 section 2.1\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        *,\n        clip_extra_context_tokens: int = 4,\n        clip_embeddings_dim: int = 768,\n        time_embed_dim: int,\n        cross_attention_dim,\n    ):\n        super().__init__()\n\n        self.learned_classifier_free_guidance_embeddings = nn.Parameter(torch.zeros(clip_embeddings_dim))\n\n        # parameters for additional clip time embeddings\n        self.embedding_proj = nn.Linear(clip_embeddings_dim, time_embed_dim)\n        self.clip_image_embeddings_project_to_time_embeddings = nn.Linear(clip_embeddings_dim, time_embed_dim)\n\n        # parameters for encoder hidden states\n        self.clip_extra_context_tokens = clip_extra_context_tokens\n        self.clip_extra_context_tokens_proj = nn.Linear(\n            clip_embeddings_dim, self.clip_extra_context_tokens * cross_attention_dim\n        )\n        self.encoder_hidden_states_proj = nn.Linear(clip_embeddings_dim, cross_attention_dim)\n        self.text_encoder_hidden_states_norm = nn.LayerNorm(cross_attention_dim)\n\n    def forward(self, *, image_embeddings, prompt_embeds, text_encoder_hidden_states, do_classifier_free_guidance):\n        if do_classifier_free_guidance:\n            # Add the classifier free guidance embeddings to the image embeddings\n            image_embeddings_batch_size = image_embeddings.shape[0]\n            classifier_free_guidance_embeddings = self.learned_classifier_free_guidance_embeddings.unsqueeze(0)\n            classifier_free_guidance_embeddings = classifier_free_guidance_embeddings.expand(\n                image_embeddings_batch_size, -1\n            )\n            image_embeddings = torch.cat([classifier_free_guidance_embeddings, image_embeddings], dim=0)\n\n        # The image embeddings batch size and the text embeddings batch size are equal\n        assert image_embeddings.shape[0] == prompt_embeds.shape[0]\n\n        batch_size = prompt_embeds.shape[0]\n\n        # \"Specifically, we modify the architecture described in Nichol et al. (2021) by projecting and\n        # adding CLIP embeddings to the existing timestep embedding, ...\n        time_projected_prompt_embeds = self.embedding_proj(prompt_embeds)\n        time_projected_image_embeddings = self.clip_image_embeddings_project_to_time_embeddings(image_embeddings)\n        additive_clip_time_embeddings = time_projected_image_embeddings + time_projected_prompt_embeds\n\n        # ... and by projecting CLIP embeddings into four\n        # extra tokens of context that are concatenated to the sequence of outputs from the GLIDE text encoder\"\n        clip_extra_context_tokens = self.clip_extra_context_tokens_proj(image_embeddings)\n        clip_extra_context_tokens = clip_extra_context_tokens.reshape(batch_size, -1, self.clip_extra_context_tokens)\n        clip_extra_context_tokens = clip_extra_context_tokens.permute(0, 2, 1)\n\n        text_encoder_hidden_states = self.encoder_hidden_states_proj(text_encoder_hidden_states)\n        text_encoder_hidden_states = self.text_encoder_hidden_states_norm(text_encoder_hidden_states)\n        text_encoder_hidden_states = torch.cat([clip_extra_context_tokens, text_encoder_hidden_states], dim=1)\n\n        return text_encoder_hidden_states, additive_clip_time_embeddings\n"
  },
  {
    "path": "diffusers/pipelines/versatile_diffusion/__init__.py",
    "content": "from ...utils import (\n    OptionalDependencyNotAvailable,\n    is_torch_available,\n    is_transformers_available,\n    is_transformers_version,\n)\n\n\ntry:\n    if not (is_transformers_available() and is_torch_available() and is_transformers_version(\">=\", \"4.25.0\")):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ...utils.dummy_torch_and_transformers_objects import (\n        VersatileDiffusionDualGuidedPipeline,\n        VersatileDiffusionImageVariationPipeline,\n        VersatileDiffusionPipeline,\n        VersatileDiffusionTextToImagePipeline,\n    )\nelse:\n    from .modeling_text_unet import UNetFlatConditionModel\n    from .pipeline_versatile_diffusion import VersatileDiffusionPipeline\n    from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline\n    from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline\n    from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline\n"
  },
  {
    "path": "diffusers/pipelines/versatile_diffusion/modeling_text_unet.py",
    "content": "from typing import Any, Dict, List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom ...configuration_utils import ConfigMixin, register_to_config\nfrom ...models import ModelMixin\nfrom ...models.attention import Attention\nfrom ...models.attention_processor import (\n    AttentionProcessor,\n    AttnAddedKVProcessor,\n    AttnAddedKVProcessor2_0,\n    AttnProcessor,\n)\nfrom ...models.dual_transformer_2d import DualTransformer2DModel\nfrom ...models.embeddings import GaussianFourierProjection, TextTimeEmbedding, TimestepEmbedding, Timesteps\nfrom ...models.transformer_2d import Transformer2DModel\nfrom ...models.unet_2d_condition import UNet2DConditionOutput\nfrom ...utils import is_torch_version, logging\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\ndef get_down_block(\n    down_block_type,\n    num_layers,\n    in_channels,\n    out_channels,\n    temb_channels,\n    add_downsample,\n    resnet_eps,\n    resnet_act_fn,\n    attn_num_head_channels,\n    resnet_groups=None,\n    cross_attention_dim=None,\n    downsample_padding=None,\n    dual_cross_attention=False,\n    use_linear_projection=False,\n    only_cross_attention=False,\n    upcast_attention=False,\n    resnet_time_scale_shift=\"default\",\n    resnet_skip_time_act=False,\n    resnet_out_scale_factor=1.0,\n    cross_attention_norm=None,\n):\n    down_block_type = down_block_type[7:] if down_block_type.startswith(\"UNetRes\") else down_block_type\n    if down_block_type == \"DownBlockFlat\":\n        return DownBlockFlat(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif down_block_type == \"CrossAttnDownBlockFlat\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for CrossAttnDownBlockFlat\")\n        return CrossAttnDownBlockFlat(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            temb_channels=temb_channels,\n            add_downsample=add_downsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            downsample_padding=downsample_padding,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            dual_cross_attention=dual_cross_attention,\n            use_linear_projection=use_linear_projection,\n            only_cross_attention=only_cross_attention,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    raise ValueError(f\"{down_block_type} is not supported.\")\n\n\ndef get_up_block(\n    up_block_type,\n    num_layers,\n    in_channels,\n    out_channels,\n    prev_output_channel,\n    temb_channels,\n    add_upsample,\n    resnet_eps,\n    resnet_act_fn,\n    attn_num_head_channels,\n    resnet_groups=None,\n    cross_attention_dim=None,\n    dual_cross_attention=False,\n    use_linear_projection=False,\n    only_cross_attention=False,\n    upcast_attention=False,\n    resnet_time_scale_shift=\"default\",\n    resnet_skip_time_act=False,\n    resnet_out_scale_factor=1.0,\n    cross_attention_norm=None,\n):\n    up_block_type = up_block_type[7:] if up_block_type.startswith(\"UNetRes\") else up_block_type\n    if up_block_type == \"UpBlockFlat\":\n        return UpBlockFlat(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    elif up_block_type == \"CrossAttnUpBlockFlat\":\n        if cross_attention_dim is None:\n            raise ValueError(\"cross_attention_dim must be specified for CrossAttnUpBlockFlat\")\n        return CrossAttnUpBlockFlat(\n            num_layers=num_layers,\n            in_channels=in_channels,\n            out_channels=out_channels,\n            prev_output_channel=prev_output_channel,\n            temb_channels=temb_channels,\n            add_upsample=add_upsample,\n            resnet_eps=resnet_eps,\n            resnet_act_fn=resnet_act_fn,\n            resnet_groups=resnet_groups,\n            cross_attention_dim=cross_attention_dim,\n            attn_num_head_channels=attn_num_head_channels,\n            dual_cross_attention=dual_cross_attention,\n            use_linear_projection=use_linear_projection,\n            only_cross_attention=only_cross_attention,\n            resnet_time_scale_shift=resnet_time_scale_shift,\n        )\n    raise ValueError(f\"{up_block_type} is not supported.\")\n\n\n# Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel with UNet2DConditionModel->UNetFlatConditionModel, nn.Conv2d->LinearMultiDim, Block2D->BlockFlat\nclass UNetFlatConditionModel(ModelMixin, ConfigMixin):\n    r\"\"\"\n    UNetFlatConditionModel is a conditional 2D UNet model that takes in a noisy sample, conditional state, and a\n    timestep and returns sample shaped output.\n\n    This model inherits from [`ModelMixin`]. Check the superclass documentation for the generic methods the library\n    implements for all the models (such as downloading or saving, etc.)\n\n    Parameters:\n        sample_size (`int` or `Tuple[int, int]`, *optional*, defaults to `None`):\n            Height and width of input/output sample.\n        in_channels (`int`, *optional*, defaults to 4): The number of channels in the input sample.\n        out_channels (`int`, *optional*, defaults to 4): The number of channels in the output.\n        center_input_sample (`bool`, *optional*, defaults to `False`): Whether to center the input sample.\n        flip_sin_to_cos (`bool`, *optional*, defaults to `False`):\n            Whether to flip the sin to cos in the time embedding.\n        freq_shift (`int`, *optional*, defaults to 0): The frequency shift to apply to the time embedding.\n        down_block_types (`Tuple[str]`, *optional*, defaults to `(\"CrossAttnDownBlockFlat\", \"CrossAttnDownBlockFlat\", \"CrossAttnDownBlockFlat\", \"DownBlockFlat\")`):\n            The tuple of downsample blocks to use.\n        mid_block_type (`str`, *optional*, defaults to `\"UNetMidBlockFlatCrossAttn\"`):\n            The mid block type. Choose from `UNetMidBlockFlatCrossAttn` or `UNetMidBlockFlatSimpleCrossAttn`, will skip\n            the mid block layer if `None`.\n        up_block_types (`Tuple[str]`, *optional*, defaults to `(\"UpBlockFlat\", \"CrossAttnUpBlockFlat\", \"CrossAttnUpBlockFlat\", \"CrossAttnUpBlockFlat\",)`):\n            The tuple of upsample blocks to use.\n        only_cross_attention(`bool` or `Tuple[bool]`, *optional*, default to `False`):\n            Whether to include self-attention in the basic transformer blocks, see\n            [`~models.attention.BasicTransformerBlock`].\n        block_out_channels (`Tuple[int]`, *optional*, defaults to `(320, 640, 1280, 1280)`):\n            The tuple of output channels for each block.\n        layers_per_block (`int`, *optional*, defaults to 2): The number of layers per block.\n        downsample_padding (`int`, *optional*, defaults to 1): The padding to use for the downsampling convolution.\n        mid_block_scale_factor (`float`, *optional*, defaults to 1.0): The scale factor to use for the mid block.\n        act_fn (`str`, *optional*, defaults to `\"silu\"`): The activation function to use.\n        norm_num_groups (`int`, *optional*, defaults to 32): The number of groups to use for the normalization.\n            If `None`, it will skip the normalization and activation layers in post-processing\n        norm_eps (`float`, *optional*, defaults to 1e-5): The epsilon to use for the normalization.\n        cross_attention_dim (`int` or `Tuple[int]`, *optional*, defaults to 1280):\n            The dimension of the cross attention features.\n        encoder_hid_dim (`int`, *optional*, defaults to None):\n            If given, `encoder_hidden_states` will be projected from this dimension to `cross_attention_dim`.\n        attention_head_dim (`int`, *optional*, defaults to 8): The dimension of the attention heads.\n        resnet_time_scale_shift (`str`, *optional*, defaults to `\"default\"`): Time scale shift config\n            for resnet blocks, see [`~models.resnet.ResnetBlockFlat`]. Choose from `default` or `scale_shift`.\n        class_embed_type (`str`, *optional*, defaults to None):\n            The type of class embedding to use which is ultimately summed with the time embeddings. Choose from `None`,\n            `\"timestep\"`, `\"identity\"`, `\"projection\"`, or `\"simple_projection\"`.\n        addition_embed_type (`str`, *optional*, defaults to None):\n            Configures an optional embedding which will be summed with the time embeddings. Choose from `None` or\n            \"text\". \"text\" will use the `TextTimeEmbedding` layer.\n        num_class_embeds (`int`, *optional*, defaults to None):\n            Input dimension of the learnable embedding matrix to be projected to `time_embed_dim`, when performing\n            class conditioning with `class_embed_type` equal to `None`.\n        time_embedding_type (`str`, *optional*, default to `positional`):\n            The type of position embedding to use for timesteps. Choose from `positional` or `fourier`.\n        time_embedding_dim (`int`, *optional*, default to `None`):\n            An optional override for the dimension of the projected time embedding.\n        time_embedding_act_fn (`str`, *optional*, default to `None`):\n            Optional activation function to use on the time embeddings only one time before they as passed to the rest\n            of the unet. Choose from `silu`, `mish`, `gelu`, and `swish`.\n        timestep_post_act (`str, *optional*, default to `None`):\n            The second activation function to use in timestep embedding. Choose from `silu`, `mish` and `gelu`.\n        time_cond_proj_dim (`int`, *optional*, default to `None`):\n            The dimension of `cond_proj` layer in timestep embedding.\n        conv_in_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_in` layer.\n        conv_out_kernel (`int`, *optional*, default to `3`): The kernel size of `conv_out` layer.\n        projection_class_embeddings_input_dim (`int`, *optional*): The dimension of the `class_labels` input when\n            using the \"projection\" `class_embed_type`. Required when using the \"projection\" `class_embed_type`.\n        class_embeddings_concat (`bool`, *optional*, defaults to `False`): Whether to concatenate the time\n            embeddings with the class embeddings.\n        mid_block_only_cross_attention (`bool`, *optional*, defaults to `None`):\n            Whether to use cross attention with the mid block when using the `UNetMidBlockFlatSimpleCrossAttn`. If\n            `only_cross_attention` is given as a single boolean and `mid_block_only_cross_attention` is None, the\n            `only_cross_attention` value will be used as the value for `mid_block_only_cross_attention`. Else, it will\n            default to `False`.\n    \"\"\"\n\n    _supports_gradient_checkpointing = True\n\n    @register_to_config\n    def __init__(\n        self,\n        sample_size: Optional[int] = None,\n        in_channels: int = 4,\n        out_channels: int = 4,\n        center_input_sample: bool = False,\n        flip_sin_to_cos: bool = True,\n        freq_shift: int = 0,\n        down_block_types: Tuple[str] = (\n            \"CrossAttnDownBlockFlat\",\n            \"CrossAttnDownBlockFlat\",\n            \"CrossAttnDownBlockFlat\",\n            \"DownBlockFlat\",\n        ),\n        mid_block_type: Optional[str] = \"UNetMidBlockFlatCrossAttn\",\n        up_block_types: Tuple[str] = (\n            \"UpBlockFlat\",\n            \"CrossAttnUpBlockFlat\",\n            \"CrossAttnUpBlockFlat\",\n            \"CrossAttnUpBlockFlat\",\n        ),\n        only_cross_attention: Union[bool, Tuple[bool]] = False,\n        block_out_channels: Tuple[int] = (320, 640, 1280, 1280),\n        layers_per_block: Union[int, Tuple[int]] = 2,\n        downsample_padding: int = 1,\n        mid_block_scale_factor: float = 1,\n        act_fn: str = \"silu\",\n        norm_num_groups: Optional[int] = 32,\n        norm_eps: float = 1e-5,\n        cross_attention_dim: Union[int, Tuple[int]] = 1280,\n        encoder_hid_dim: Optional[int] = None,\n        attention_head_dim: Union[int, Tuple[int]] = 8,\n        dual_cross_attention: bool = False,\n        use_linear_projection: bool = False,\n        class_embed_type: Optional[str] = None,\n        addition_embed_type: Optional[str] = None,\n        num_class_embeds: Optional[int] = None,\n        upcast_attention: bool = False,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_skip_time_act: bool = False,\n        resnet_out_scale_factor: int = 1.0,\n        time_embedding_type: str = \"positional\",\n        time_embedding_dim: Optional[int] = None,\n        time_embedding_act_fn: Optional[str] = None,\n        timestep_post_act: Optional[str] = None,\n        time_cond_proj_dim: Optional[int] = None,\n        conv_in_kernel: int = 3,\n        conv_out_kernel: int = 3,\n        projection_class_embeddings_input_dim: Optional[int] = None,\n        class_embeddings_concat: bool = False,\n        mid_block_only_cross_attention: Optional[bool] = None,\n        cross_attention_norm: Optional[str] = None,\n        addition_embed_type_num_heads=64,\n    ):\n        super().__init__()\n\n        self.sample_size = sample_size\n\n        # Check inputs\n        if len(down_block_types) != len(up_block_types):\n            raise ValueError(\n                \"Must provide the same number of `down_block_types` as `up_block_types`. `down_block_types`:\"\n                f\" {down_block_types}. `up_block_types`: {up_block_types}.\"\n            )\n\n        if len(block_out_channels) != len(down_block_types):\n            raise ValueError(\n                \"Must provide the same number of `block_out_channels` as `down_block_types`. `block_out_channels`:\"\n                f\" {block_out_channels}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(only_cross_attention, bool) and len(only_cross_attention) != len(down_block_types):\n            raise ValueError(\n                \"Must provide the same number of `only_cross_attention` as `down_block_types`.\"\n                f\" `only_cross_attention`: {only_cross_attention}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(attention_head_dim, int) and len(attention_head_dim) != len(down_block_types):\n            raise ValueError(\n                \"Must provide the same number of `attention_head_dim` as `down_block_types`. `attention_head_dim`:\"\n                f\" {attention_head_dim}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if isinstance(cross_attention_dim, list) and len(cross_attention_dim) != len(down_block_types):\n            raise ValueError(\n                \"Must provide the same number of `cross_attention_dim` as `down_block_types`. `cross_attention_dim`:\"\n                f\" {cross_attention_dim}. `down_block_types`: {down_block_types}.\"\n            )\n\n        if not isinstance(layers_per_block, int) and len(layers_per_block) != len(down_block_types):\n            raise ValueError(\n                \"Must provide the same number of `layers_per_block` as `down_block_types`. `layers_per_block`:\"\n                f\" {layers_per_block}. `down_block_types`: {down_block_types}.\"\n            )\n\n        # input\n        conv_in_padding = (conv_in_kernel - 1) // 2\n        self.conv_in = LinearMultiDim(\n            in_channels, block_out_channels[0], kernel_size=conv_in_kernel, padding=conv_in_padding\n        )\n\n        # time\n        if time_embedding_type == \"fourier\":\n            time_embed_dim = time_embedding_dim or block_out_channels[0] * 2\n            if time_embed_dim % 2 != 0:\n                raise ValueError(f\"`time_embed_dim` should be divisible by 2, but is {time_embed_dim}.\")\n            self.time_proj = GaussianFourierProjection(\n                time_embed_dim // 2, set_W_to_weight=False, log=False, flip_sin_to_cos=flip_sin_to_cos\n            )\n            timestep_input_dim = time_embed_dim\n        elif time_embedding_type == \"positional\":\n            time_embed_dim = time_embedding_dim or block_out_channels[0] * 4\n\n            self.time_proj = Timesteps(block_out_channels[0], flip_sin_to_cos, freq_shift)\n            timestep_input_dim = block_out_channels[0]\n        else:\n            raise ValueError(\n                f\"{time_embedding_type} does not exist. Please make sure to use one of `fourier` or `positional`.\"\n            )\n\n        self.time_embedding = TimestepEmbedding(\n            timestep_input_dim,\n            time_embed_dim,\n            act_fn=act_fn,\n            post_act_fn=timestep_post_act,\n            cond_proj_dim=time_cond_proj_dim,\n        )\n\n        if encoder_hid_dim is not None:\n            self.encoder_hid_proj = nn.Linear(encoder_hid_dim, cross_attention_dim)\n        else:\n            self.encoder_hid_proj = None\n\n        # class embedding\n        if class_embed_type is None and num_class_embeds is not None:\n            self.class_embedding = nn.Embedding(num_class_embeds, time_embed_dim)\n        elif class_embed_type == \"timestep\":\n            self.class_embedding = TimestepEmbedding(timestep_input_dim, time_embed_dim, act_fn=act_fn)\n        elif class_embed_type == \"identity\":\n            self.class_embedding = nn.Identity(time_embed_dim, time_embed_dim)\n        elif class_embed_type == \"projection\":\n            if projection_class_embeddings_input_dim is None:\n                raise ValueError(\n                    \"`class_embed_type`: 'projection' requires `projection_class_embeddings_input_dim` be set\"\n                )\n            # The projection `class_embed_type` is the same as the timestep `class_embed_type` except\n            # 1. the `class_labels` inputs are not first converted to sinusoidal embeddings\n            # 2. it projects from an arbitrary input dimension.\n            #\n            # Note that `TimestepEmbedding` is quite general, being mainly linear layers and activations.\n            # When used for embedding actual timesteps, the timesteps are first converted to sinusoidal embeddings.\n            # As a result, `TimestepEmbedding` can be passed arbitrary vectors.\n            self.class_embedding = TimestepEmbedding(projection_class_embeddings_input_dim, time_embed_dim)\n        elif class_embed_type == \"simple_projection\":\n            if projection_class_embeddings_input_dim is None:\n                raise ValueError(\n                    \"`class_embed_type`: 'simple_projection' requires `projection_class_embeddings_input_dim` be set\"\n                )\n            self.class_embedding = nn.Linear(projection_class_embeddings_input_dim, time_embed_dim)\n        else:\n            self.class_embedding = None\n\n        if addition_embed_type == \"text\":\n            if encoder_hid_dim is not None:\n                text_time_embedding_from_dim = encoder_hid_dim\n            else:\n                text_time_embedding_from_dim = cross_attention_dim\n\n            self.add_embedding = TextTimeEmbedding(\n                text_time_embedding_from_dim, time_embed_dim, num_heads=addition_embed_type_num_heads\n            )\n        elif addition_embed_type is not None:\n            raise ValueError(f\"addition_embed_type: {addition_embed_type} must be None or 'text'.\")\n\n        if time_embedding_act_fn is None:\n            self.time_embed_act = None\n        elif time_embedding_act_fn == \"swish\":\n            self.time_embed_act = lambda x: F.silu(x)\n        elif time_embedding_act_fn == \"mish\":\n            self.time_embed_act = nn.Mish()\n        elif time_embedding_act_fn == \"silu\":\n            self.time_embed_act = nn.SiLU()\n        elif time_embedding_act_fn == \"gelu\":\n            self.time_embed_act = nn.GELU()\n        else:\n            raise ValueError(f\"Unsupported activation function: {time_embedding_act_fn}\")\n\n        self.down_blocks = nn.ModuleList([])\n        self.up_blocks = nn.ModuleList([])\n\n        if isinstance(only_cross_attention, bool):\n            if mid_block_only_cross_attention is None:\n                mid_block_only_cross_attention = only_cross_attention\n\n            only_cross_attention = [only_cross_attention] * len(down_block_types)\n\n        if mid_block_only_cross_attention is None:\n            mid_block_only_cross_attention = False\n\n        if isinstance(attention_head_dim, int):\n            attention_head_dim = (attention_head_dim,) * len(down_block_types)\n\n        if isinstance(cross_attention_dim, int):\n            cross_attention_dim = (cross_attention_dim,) * len(down_block_types)\n\n        if isinstance(layers_per_block, int):\n            layers_per_block = [layers_per_block] * len(down_block_types)\n\n        if class_embeddings_concat:\n            # The time embeddings are concatenated with the class embeddings. The dimension of the\n            # time embeddings passed to the down, middle, and up blocks is twice the dimension of the\n            # regular time embeddings\n            blocks_time_embed_dim = time_embed_dim * 2\n        else:\n            blocks_time_embed_dim = time_embed_dim\n\n        # down\n        output_channel = block_out_channels[0]\n        for i, down_block_type in enumerate(down_block_types):\n            input_channel = output_channel\n            output_channel = block_out_channels[i]\n            is_final_block = i == len(block_out_channels) - 1\n\n            down_block = get_down_block(\n                down_block_type,\n                num_layers=layers_per_block[i],\n                in_channels=input_channel,\n                out_channels=output_channel,\n                temb_channels=blocks_time_embed_dim,\n                add_downsample=not is_final_block,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                cross_attention_dim=cross_attention_dim[i],\n                attn_num_head_channels=attention_head_dim[i],\n                downsample_padding=downsample_padding,\n                dual_cross_attention=dual_cross_attention,\n                use_linear_projection=use_linear_projection,\n                only_cross_attention=only_cross_attention[i],\n                upcast_attention=upcast_attention,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                resnet_skip_time_act=resnet_skip_time_act,\n                resnet_out_scale_factor=resnet_out_scale_factor,\n                cross_attention_norm=cross_attention_norm,\n            )\n            self.down_blocks.append(down_block)\n\n        # mid\n        if mid_block_type == \"UNetMidBlockFlatCrossAttn\":\n            self.mid_block = UNetMidBlockFlatCrossAttn(\n                in_channels=block_out_channels[-1],\n                temb_channels=blocks_time_embed_dim,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                output_scale_factor=mid_block_scale_factor,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                cross_attention_dim=cross_attention_dim[-1],\n                attn_num_head_channels=attention_head_dim[-1],\n                resnet_groups=norm_num_groups,\n                dual_cross_attention=dual_cross_attention,\n                use_linear_projection=use_linear_projection,\n                upcast_attention=upcast_attention,\n            )\n        elif mid_block_type == \"UNetMidBlockFlatSimpleCrossAttn\":\n            self.mid_block = UNetMidBlockFlatSimpleCrossAttn(\n                in_channels=block_out_channels[-1],\n                temb_channels=blocks_time_embed_dim,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                output_scale_factor=mid_block_scale_factor,\n                cross_attention_dim=cross_attention_dim[-1],\n                attn_num_head_channels=attention_head_dim[-1],\n                resnet_groups=norm_num_groups,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                skip_time_act=resnet_skip_time_act,\n                only_cross_attention=mid_block_only_cross_attention,\n                cross_attention_norm=cross_attention_norm,\n            )\n        elif mid_block_type is None:\n            self.mid_block = None\n        else:\n            raise ValueError(f\"unknown mid_block_type : {mid_block_type}\")\n\n        # count how many layers upsample the images\n        self.num_upsamplers = 0\n\n        # up\n        reversed_block_out_channels = list(reversed(block_out_channels))\n        reversed_attention_head_dim = list(reversed(attention_head_dim))\n        reversed_layers_per_block = list(reversed(layers_per_block))\n        reversed_cross_attention_dim = list(reversed(cross_attention_dim))\n        only_cross_attention = list(reversed(only_cross_attention))\n\n        output_channel = reversed_block_out_channels[0]\n        for i, up_block_type in enumerate(up_block_types):\n            is_final_block = i == len(block_out_channels) - 1\n\n            prev_output_channel = output_channel\n            output_channel = reversed_block_out_channels[i]\n            input_channel = reversed_block_out_channels[min(i + 1, len(block_out_channels) - 1)]\n\n            # add upsample block for all BUT final layer\n            if not is_final_block:\n                add_upsample = True\n                self.num_upsamplers += 1\n            else:\n                add_upsample = False\n\n            up_block = get_up_block(\n                up_block_type,\n                num_layers=reversed_layers_per_block[i] + 1,\n                in_channels=input_channel,\n                out_channels=output_channel,\n                prev_output_channel=prev_output_channel,\n                temb_channels=blocks_time_embed_dim,\n                add_upsample=add_upsample,\n                resnet_eps=norm_eps,\n                resnet_act_fn=act_fn,\n                resnet_groups=norm_num_groups,\n                cross_attention_dim=reversed_cross_attention_dim[i],\n                attn_num_head_channels=reversed_attention_head_dim[i],\n                dual_cross_attention=dual_cross_attention,\n                use_linear_projection=use_linear_projection,\n                only_cross_attention=only_cross_attention[i],\n                upcast_attention=upcast_attention,\n                resnet_time_scale_shift=resnet_time_scale_shift,\n                resnet_skip_time_act=resnet_skip_time_act,\n                resnet_out_scale_factor=resnet_out_scale_factor,\n                cross_attention_norm=cross_attention_norm,\n            )\n            self.up_blocks.append(up_block)\n            prev_output_channel = output_channel\n\n        # out\n        if norm_num_groups is not None:\n            self.conv_norm_out = nn.GroupNorm(\n                num_channels=block_out_channels[0], num_groups=norm_num_groups, eps=norm_eps\n            )\n\n            if act_fn == \"swish\":\n                self.conv_act = lambda x: F.silu(x)\n            elif act_fn == \"mish\":\n                self.conv_act = nn.Mish()\n            elif act_fn == \"silu\":\n                self.conv_act = nn.SiLU()\n            elif act_fn == \"gelu\":\n                self.conv_act = nn.GELU()\n            else:\n                raise ValueError(f\"Unsupported activation function: {act_fn}\")\n\n        else:\n            self.conv_norm_out = None\n            self.conv_act = None\n\n        conv_out_padding = (conv_out_kernel - 1) // 2\n        self.conv_out = LinearMultiDim(\n            block_out_channels[0], out_channels, kernel_size=conv_out_kernel, padding=conv_out_padding\n        )\n\n    @property\n    def attn_processors(self) -> Dict[str, AttentionProcessor]:\n        r\"\"\"\n        Returns:\n            `dict` of attention processors: A dictionary containing all attention processors used in the model with\n            indexed by its weight name.\n        \"\"\"\n        # set recursively\n        processors = {}\n\n        def fn_recursive_add_processors(name: str, module: torch.nn.Module, processors: Dict[str, AttentionProcessor]):\n            if hasattr(module, \"set_processor\"):\n                processors[f\"{name}.processor\"] = module.processor\n\n            for sub_name, child in module.named_children():\n                fn_recursive_add_processors(f\"{name}.{sub_name}\", child, processors)\n\n            return processors\n\n        for name, module in self.named_children():\n            fn_recursive_add_processors(name, module, processors)\n\n        return processors\n\n    def set_attn_processor(self, processor: Union[AttentionProcessor, Dict[str, AttentionProcessor]]):\n        r\"\"\"\n        Parameters:\n            `processor (`dict` of `AttentionProcessor` or `AttentionProcessor`):\n                The instantiated processor class or a dictionary of processor classes that will be set as the processor\n                of **all** `Attention` layers.\n            In case `processor` is a dict, the key needs to define the path to the corresponding cross attention processor. This is strongly recommended when setting trainable attention processors.:\n\n        \"\"\"\n        count = len(self.attn_processors.keys())\n\n        if isinstance(processor, dict) and len(processor) != count:\n            raise ValueError(\n                f\"A dict of processors was passed, but the number of processors {len(processor)} does not match the\"\n                f\" number of attention layers: {count}. Please make sure to pass {count} processor classes.\"\n            )\n\n        def fn_recursive_attn_processor(name: str, module: torch.nn.Module, processor):\n            if hasattr(module, \"set_processor\"):\n                if not isinstance(processor, dict):\n                    module.set_processor(processor)\n                else:\n                    module.set_processor(processor.pop(f\"{name}.processor\"))\n\n            for sub_name, child in module.named_children():\n                fn_recursive_attn_processor(f\"{name}.{sub_name}\", child, processor)\n\n        for name, module in self.named_children():\n            fn_recursive_attn_processor(name, module, processor)\n\n    def set_default_attn_processor(self):\n        \"\"\"\n        Disables custom attention processors and sets the default attention implementation.\n        \"\"\"\n        self.set_attn_processor(AttnProcessor())\n\n    def set_attention_slice(self, slice_size):\n        r\"\"\"\n        Enable sliced attention computation.\n\n        When this option is enabled, the attention module will split the input tensor in slices, to compute attention\n        in several steps. This is useful to save some memory in exchange for a small speed decrease.\n\n        Args:\n            slice_size (`str` or `int` or `list(int)`, *optional*, defaults to `\"auto\"`):\n                When `\"auto\"`, halves the input to the attention heads, so attention will be computed in two steps. If\n                `\"max\"`, maximum amount of memory will be saved by running only one slice at a time. If a number is\n                provided, uses as many slices as `attention_head_dim // slice_size`. In this case, `attention_head_dim`\n                must be a multiple of `slice_size`.\n        \"\"\"\n        sliceable_head_dims = []\n\n        def fn_recursive_retrieve_sliceable_dims(module: torch.nn.Module):\n            if hasattr(module, \"set_attention_slice\"):\n                sliceable_head_dims.append(module.sliceable_head_dim)\n\n            for child in module.children():\n                fn_recursive_retrieve_sliceable_dims(child)\n\n        # retrieve number of attention layers\n        for module in self.children():\n            fn_recursive_retrieve_sliceable_dims(module)\n\n        num_sliceable_layers = len(sliceable_head_dims)\n\n        if slice_size == \"auto\":\n            # half the attention head size is usually a good trade-off between\n            # speed and memory\n            slice_size = [dim // 2 for dim in sliceable_head_dims]\n        elif slice_size == \"max\":\n            # make smallest slice possible\n            slice_size = num_sliceable_layers * [1]\n\n        slice_size = num_sliceable_layers * [slice_size] if not isinstance(slice_size, list) else slice_size\n\n        if len(slice_size) != len(sliceable_head_dims):\n            raise ValueError(\n                f\"You have provided {len(slice_size)}, but {self.config} has {len(sliceable_head_dims)} different\"\n                f\" attention layers. Make sure to match `len(slice_size)` to be {len(sliceable_head_dims)}.\"\n            )\n\n        for i in range(len(slice_size)):\n            size = slice_size[i]\n            dim = sliceable_head_dims[i]\n            if size is not None and size > dim:\n                raise ValueError(f\"size {size} has to be smaller or equal to {dim}.\")\n\n        # Recursively walk through all the children.\n        # Any children which exposes the set_attention_slice method\n        # gets the message\n        def fn_recursive_set_attention_slice(module: torch.nn.Module, slice_size: List[int]):\n            if hasattr(module, \"set_attention_slice\"):\n                module.set_attention_slice(slice_size.pop())\n\n            for child in module.children():\n                fn_recursive_set_attention_slice(child, slice_size)\n\n        reversed_slice_size = list(reversed(slice_size))\n        for module in self.children():\n            fn_recursive_set_attention_slice(module, reversed_slice_size)\n\n    def _set_gradient_checkpointing(self, module, value=False):\n        if isinstance(module, (CrossAttnDownBlockFlat, DownBlockFlat, CrossAttnUpBlockFlat, UpBlockFlat)):\n            module.gradient_checkpointing = value\n\n    def forward(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[torch.Tensor, float, int],\n        encoder_hidden_states: torch.Tensor,\n        class_labels: Optional[torch.Tensor] = None,\n        timestep_cond: Optional[torch.Tensor] = None,\n        attention_mask: Optional[torch.Tensor] = None,\n        cross_attention_kwargs: Optional[Dict[str, Any]] = None,\n        down_block_additional_residuals: Optional[Tuple[torch.Tensor]] = None,\n        mid_block_additional_residual: Optional[torch.Tensor] = None,\n        return_dict: bool = True,\n    ) -> Union[UNet2DConditionOutput, Tuple]:\n        r\"\"\"\n        Args:\n            sample (`torch.FloatTensor`): (batch, channel, height, width) noisy inputs tensor\n            timestep (`torch.FloatTensor` or `float` or `int`): (batch) timesteps\n            encoder_hidden_states (`torch.FloatTensor`): (batch, sequence_length, feature_dim) encoder hidden states\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`models.unet_2d_condition.UNet2DConditionOutput`] instead of a plain tuple.\n            cross_attention_kwargs (`dict`, *optional*):\n                A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under\n                `self.processor` in\n                [diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).\n\n        Returns:\n            [`~models.unet_2d_condition.UNet2DConditionOutput`] or `tuple`:\n            [`~models.unet_2d_condition.UNet2DConditionOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        # By default samples have to be AT least a multiple of the overall upsampling factor.\n        # The overall upsampling factor is equal to 2 ** (# num of upsampling layers).\n        # However, the upsampling interpolation output size can be forced to fit any upsampling size\n        # on the fly if necessary.\n        default_overall_up_factor = 2**self.num_upsamplers\n\n        # upsample size should be forwarded when sample is not a multiple of `default_overall_up_factor`\n        forward_upsample_size = False\n        upsample_size = None\n\n        if any(s % default_overall_up_factor != 0 for s in sample.shape[-2:]):\n            logger.info(\"Forward upsample size to force interpolation output size.\")\n            forward_upsample_size = True\n\n        # prepare attention_mask\n        if attention_mask is not None:\n            attention_mask = (1 - attention_mask.to(sample.dtype)) * -10000.0\n            attention_mask = attention_mask.unsqueeze(1)\n\n        # 0. center input if necessary\n        if self.config.center_input_sample:\n            sample = 2 * sample - 1.0\n\n        # 1. time\n        timesteps = timestep\n        if not torch.is_tensor(timesteps):\n            # TODO: this requires sync between CPU and GPU. So try to pass timesteps as tensors if you can\n            # This would be a good case for the `match` statement (Python 3.10+)\n            is_mps = sample.device.type == \"mps\"\n            if isinstance(timestep, float):\n                dtype = torch.float32 if is_mps else torch.float64\n            else:\n                dtype = torch.int32 if is_mps else torch.int64\n            timesteps = torch.tensor([timesteps], dtype=dtype, device=sample.device)\n        elif len(timesteps.shape) == 0:\n            timesteps = timesteps[None].to(sample.device)\n\n        # broadcast to batch dimension in a way that's compatible with ONNX/Core ML\n        timesteps = timesteps.expand(sample.shape[0])\n\n        t_emb = self.time_proj(timesteps)\n\n        # `Timesteps` does not contain any weights and will always return f32 tensors\n        # but time_embedding might actually be running in fp16. so we need to cast here.\n        # there might be better ways to encapsulate this.\n        t_emb = t_emb.to(dtype=sample.dtype)\n\n        emb = self.time_embedding(t_emb, timestep_cond)\n\n        if self.class_embedding is not None:\n            if class_labels is None:\n                raise ValueError(\"class_labels should be provided when num_class_embeds > 0\")\n\n            if self.config.class_embed_type == \"timestep\":\n                class_labels = self.time_proj(class_labels)\n\n                # `Timesteps` does not contain any weights and will always return f32 tensors\n                # there might be better ways to encapsulate this.\n                class_labels = class_labels.to(dtype=sample.dtype)\n\n            class_emb = self.class_embedding(class_labels).to(dtype=sample.dtype)\n\n            if self.config.class_embeddings_concat:\n                emb = torch.cat([emb, class_emb], dim=-1)\n            else:\n                emb = emb + class_emb\n\n        if self.config.addition_embed_type == \"text\":\n            aug_emb = self.add_embedding(encoder_hidden_states)\n            emb = emb + aug_emb\n\n        if self.time_embed_act is not None:\n            emb = self.time_embed_act(emb)\n\n        if self.encoder_hid_proj is not None:\n            encoder_hidden_states = self.encoder_hid_proj(encoder_hidden_states)\n\n        # 2. pre-process\n        sample = self.conv_in(sample)\n\n        # 3. down\n        down_block_res_samples = (sample,)\n        for downsample_block in self.down_blocks:\n            if hasattr(downsample_block, \"has_cross_attention\") and downsample_block.has_cross_attention:\n                sample, res_samples = downsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    encoder_hidden_states=encoder_hidden_states,\n                    attention_mask=attention_mask,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                )\n            else:\n                sample, res_samples = downsample_block(hidden_states=sample, temb=emb)\n\n            down_block_res_samples += res_samples\n\n        if down_block_additional_residuals is not None:\n            new_down_block_res_samples = ()\n\n            for down_block_res_sample, down_block_additional_residual in zip(\n                down_block_res_samples, down_block_additional_residuals\n            ):\n                down_block_res_sample = down_block_res_sample + down_block_additional_residual\n                new_down_block_res_samples = new_down_block_res_samples + (down_block_res_sample,)\n\n            down_block_res_samples = new_down_block_res_samples\n\n        # 4. mid\n        if self.mid_block is not None:\n            sample = self.mid_block(\n                sample,\n                emb,\n                encoder_hidden_states=encoder_hidden_states,\n                attention_mask=attention_mask,\n                cross_attention_kwargs=cross_attention_kwargs,\n            )\n\n        if mid_block_additional_residual is not None:\n            sample = sample + mid_block_additional_residual\n\n        # 5. up\n        for i, upsample_block in enumerate(self.up_blocks):\n            is_final_block = i == len(self.up_blocks) - 1\n\n            res_samples = down_block_res_samples[-len(upsample_block.resnets) :]\n            down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)]\n\n            # if we have not reached the final block and need to forward the\n            # upsample size, we do it here\n            if not is_final_block and forward_upsample_size:\n                upsample_size = down_block_res_samples[-1].shape[2:]\n\n            if hasattr(upsample_block, \"has_cross_attention\") and upsample_block.has_cross_attention:\n                sample = upsample_block(\n                    hidden_states=sample,\n                    temb=emb,\n                    res_hidden_states_tuple=res_samples,\n                    encoder_hidden_states=encoder_hidden_states,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    upsample_size=upsample_size,\n                    attention_mask=attention_mask,\n                )\n            else:\n                sample = upsample_block(\n                    hidden_states=sample, temb=emb, res_hidden_states_tuple=res_samples, upsample_size=upsample_size\n                )\n\n        # 6. post-process\n        if self.conv_norm_out:\n            sample = self.conv_norm_out(sample)\n            sample = self.conv_act(sample)\n        sample = self.conv_out(sample)\n\n        if not return_dict:\n            return (sample,)\n\n        return UNet2DConditionOutput(sample=sample)\n\n\nclass LinearMultiDim(nn.Linear):\n    def __init__(self, in_features, out_features=None, second_dim=4, *args, **kwargs):\n        in_features = [in_features, second_dim, 1] if isinstance(in_features, int) else list(in_features)\n        if out_features is None:\n            out_features = in_features\n        out_features = [out_features, second_dim, 1] if isinstance(out_features, int) else list(out_features)\n        self.in_features_multidim = in_features\n        self.out_features_multidim = out_features\n        super().__init__(np.array(in_features).prod(), np.array(out_features).prod())\n\n    def forward(self, input_tensor, *args, **kwargs):\n        shape = input_tensor.shape\n        n_dim = len(self.in_features_multidim)\n        input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_features)\n        output_tensor = super().forward(input_tensor)\n        output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_features_multidim)\n        return output_tensor\n\n\nclass ResnetBlockFlat(nn.Module):\n    def __init__(\n        self,\n        *,\n        in_channels,\n        out_channels=None,\n        dropout=0.0,\n        temb_channels=512,\n        groups=32,\n        groups_out=None,\n        pre_norm=True,\n        eps=1e-6,\n        time_embedding_norm=\"default\",\n        use_in_shortcut=None,\n        second_dim=4,\n        **kwargs,\n    ):\n        super().__init__()\n        self.pre_norm = pre_norm\n        self.pre_norm = True\n\n        in_channels = [in_channels, second_dim, 1] if isinstance(in_channels, int) else list(in_channels)\n        self.in_channels_prod = np.array(in_channels).prod()\n        self.channels_multidim = in_channels\n\n        if out_channels is not None:\n            out_channels = [out_channels, second_dim, 1] if isinstance(out_channels, int) else list(out_channels)\n            out_channels_prod = np.array(out_channels).prod()\n            self.out_channels_multidim = out_channels\n        else:\n            out_channels_prod = self.in_channels_prod\n            self.out_channels_multidim = self.channels_multidim\n        self.time_embedding_norm = time_embedding_norm\n\n        if groups_out is None:\n            groups_out = groups\n\n        self.norm1 = torch.nn.GroupNorm(num_groups=groups, num_channels=self.in_channels_prod, eps=eps, affine=True)\n        self.conv1 = torch.nn.Conv2d(self.in_channels_prod, out_channels_prod, kernel_size=1, padding=0)\n\n        if temb_channels is not None:\n            self.time_emb_proj = torch.nn.Linear(temb_channels, out_channels_prod)\n        else:\n            self.time_emb_proj = None\n\n        self.norm2 = torch.nn.GroupNorm(num_groups=groups_out, num_channels=out_channels_prod, eps=eps, affine=True)\n        self.dropout = torch.nn.Dropout(dropout)\n        self.conv2 = torch.nn.Conv2d(out_channels_prod, out_channels_prod, kernel_size=1, padding=0)\n\n        self.nonlinearity = nn.SiLU()\n\n        self.use_in_shortcut = (\n            self.in_channels_prod != out_channels_prod if use_in_shortcut is None else use_in_shortcut\n        )\n\n        self.conv_shortcut = None\n        if self.use_in_shortcut:\n            self.conv_shortcut = torch.nn.Conv2d(\n                self.in_channels_prod, out_channels_prod, kernel_size=1, stride=1, padding=0\n            )\n\n    def forward(self, input_tensor, temb):\n        shape = input_tensor.shape\n        n_dim = len(self.channels_multidim)\n        input_tensor = input_tensor.reshape(*shape[0:-n_dim], self.in_channels_prod, 1, 1)\n        input_tensor = input_tensor.view(-1, self.in_channels_prod, 1, 1)\n\n        hidden_states = input_tensor\n\n        hidden_states = self.norm1(hidden_states)\n        hidden_states = self.nonlinearity(hidden_states)\n        hidden_states = self.conv1(hidden_states)\n\n        if temb is not None:\n            temb = self.time_emb_proj(self.nonlinearity(temb))[:, :, None, None]\n            hidden_states = hidden_states + temb\n\n        hidden_states = self.norm2(hidden_states)\n        hidden_states = self.nonlinearity(hidden_states)\n\n        hidden_states = self.dropout(hidden_states)\n        hidden_states = self.conv2(hidden_states)\n\n        if self.conv_shortcut is not None:\n            input_tensor = self.conv_shortcut(input_tensor)\n\n        output_tensor = input_tensor + hidden_states\n\n        output_tensor = output_tensor.view(*shape[0:-n_dim], -1)\n        output_tensor = output_tensor.view(*shape[0:-n_dim], *self.out_channels_multidim)\n\n        return output_tensor\n\n\n# Copied from diffusers.models.unet_2d_blocks.DownBlock2D with DownBlock2D->DownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim\nclass DownBlockFlat(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_downsample=True,\n        downsample_padding=1,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlockFlat(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    LinearMultiDim(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, temb=None):\n        output_states = ()\n\n        for resnet in self.resnets:\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n            output_states = output_states + (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n            output_states = output_states + (hidden_states,)\n\n        return hidden_states, output_states\n\n\n# Copied from diffusers.models.unet_2d_blocks.CrossAttnDownBlock2D with CrossAttnDownBlock2D->CrossAttnDownBlockFlat, ResnetBlock2D->ResnetBlockFlat, Downsample2D->LinearMultiDim\nclass CrossAttnDownBlockFlat(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        downsample_padding=1,\n        add_downsample=True,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        only_cross_attention=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        for i in range(num_layers):\n            in_channels = in_channels if i == 0 else out_channels\n            resnets.append(\n                ResnetBlockFlat(\n                    in_channels=in_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            if not dual_cross_attention:\n                attentions.append(\n                    Transformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                        use_linear_projection=use_linear_projection,\n                        only_cross_attention=only_cross_attention,\n                        upcast_attention=upcast_attention,\n                    )\n                )\n            else:\n                attentions.append(\n                    DualTransformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                    )\n                )\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_downsample:\n            self.downsamplers = nn.ModuleList(\n                [\n                    LinearMultiDim(\n                        out_channels, use_conv=True, out_channels=out_channels, padding=downsample_padding, name=\"op\"\n                    )\n                ]\n            )\n        else:\n            self.downsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        # TODO(Patrick, William) - attention mask is not used\n        output_states = ()\n\n        for resnet, attn in zip(self.resnets, self.attentions):\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                        use_reentrant=False,\n                    )[0]\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                    )[0]\n            else:\n                hidden_states = resnet(hidden_states, temb)\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n            output_states = output_states + (hidden_states,)\n\n        if self.downsamplers is not None:\n            for downsampler in self.downsamplers:\n                hidden_states = downsampler(hidden_states)\n\n            output_states = output_states + (hidden_states,)\n\n        return hidden_states, output_states\n\n\n# Copied from diffusers.models.unet_2d_blocks.UpBlock2D with UpBlock2D->UpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim\nclass UpBlockFlat(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        prev_output_channel: int,\n        out_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        output_scale_factor=1.0,\n        add_upsample=True,\n    ):\n        super().__init__()\n        resnets = []\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlockFlat(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(self, hidden_states, res_hidden_states_tuple, temb=None, upsample_size=None):\n        for resnet in self.resnets:\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module):\n                    def custom_forward(*inputs):\n                        return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n            else:\n                hidden_states = resnet(hidden_states, temb)\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, upsample_size)\n\n        return hidden_states\n\n\n# Copied from diffusers.models.unet_2d_blocks.CrossAttnUpBlock2D with CrossAttnUpBlock2D->CrossAttnUpBlockFlat, ResnetBlock2D->ResnetBlockFlat, Upsample2D->LinearMultiDim\nclass CrossAttnUpBlockFlat(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        out_channels: int,\n        prev_output_channel: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        cross_attention_dim=1280,\n        output_scale_factor=1.0,\n        add_upsample=True,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        only_cross_attention=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n        resnets = []\n        attentions = []\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n\n        for i in range(num_layers):\n            res_skip_channels = in_channels if (i == num_layers - 1) else out_channels\n            resnet_in_channels = prev_output_channel if i == 0 else out_channels\n\n            resnets.append(\n                ResnetBlockFlat(\n                    in_channels=resnet_in_channels + res_skip_channels,\n                    out_channels=out_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n            if not dual_cross_attention:\n                attentions.append(\n                    Transformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                        use_linear_projection=use_linear_projection,\n                        only_cross_attention=only_cross_attention,\n                        upcast_attention=upcast_attention,\n                    )\n                )\n            else:\n                attentions.append(\n                    DualTransformer2DModel(\n                        attn_num_head_channels,\n                        out_channels // attn_num_head_channels,\n                        in_channels=out_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                    )\n                )\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n        if add_upsample:\n            self.upsamplers = nn.ModuleList([LinearMultiDim(out_channels, use_conv=True, out_channels=out_channels)])\n        else:\n            self.upsamplers = None\n\n        self.gradient_checkpointing = False\n\n    def forward(\n        self,\n        hidden_states,\n        res_hidden_states_tuple,\n        temb=None,\n        encoder_hidden_states=None,\n        cross_attention_kwargs=None,\n        upsample_size=None,\n        attention_mask=None,\n    ):\n        # TODO(Patrick, William) - attention mask is not used\n        for resnet, attn in zip(self.resnets, self.attentions):\n            # pop res hidden states\n            res_hidden_states = res_hidden_states_tuple[-1]\n            res_hidden_states_tuple = res_hidden_states_tuple[:-1]\n            hidden_states = torch.cat([hidden_states, res_hidden_states], dim=1)\n\n            if self.training and self.gradient_checkpointing:\n\n                def create_custom_forward(module, return_dict=None):\n                    def custom_forward(*inputs):\n                        if return_dict is not None:\n                            return module(*inputs, return_dict=return_dict)\n                        else:\n                            return module(*inputs)\n\n                    return custom_forward\n\n                if is_torch_version(\">=\", \"1.11.0\"):\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb, use_reentrant=False\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                        use_reentrant=False,\n                    )[0]\n                else:\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(resnet), hidden_states, temb\n                    )\n                    hidden_states = torch.utils.checkpoint.checkpoint(\n                        create_custom_forward(attn, return_dict=False),\n                        hidden_states,\n                        encoder_hidden_states,\n                        cross_attention_kwargs,\n                    )[0]\n            else:\n                hidden_states = resnet(hidden_states, temb)\n                hidden_states = attn(\n                    hidden_states,\n                    encoder_hidden_states=encoder_hidden_states,\n                    cross_attention_kwargs=cross_attention_kwargs,\n                    return_dict=False,\n                )[0]\n\n        if self.upsamplers is not None:\n            for upsampler in self.upsamplers:\n                hidden_states = upsampler(hidden_states, upsample_size)\n\n        return hidden_states\n\n\n# Copied from diffusers.models.unet_2d_blocks.UNetMidBlock2DCrossAttn with UNetMidBlock2DCrossAttn->UNetMidBlockFlatCrossAttn, ResnetBlock2D->ResnetBlockFlat\nclass UNetMidBlockFlatCrossAttn(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        cross_attention_dim=1280,\n        dual_cross_attention=False,\n        use_linear_projection=False,\n        upcast_attention=False,\n    ):\n        super().__init__()\n\n        self.has_cross_attention = True\n        self.attn_num_head_channels = attn_num_head_channels\n        resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32)\n\n        # there is always at least one resnet\n        resnets = [\n            ResnetBlockFlat(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=resnet_groups,\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n            )\n        ]\n        attentions = []\n\n        for _ in range(num_layers):\n            if not dual_cross_attention:\n                attentions.append(\n                    Transformer2DModel(\n                        attn_num_head_channels,\n                        in_channels // attn_num_head_channels,\n                        in_channels=in_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                        use_linear_projection=use_linear_projection,\n                        upcast_attention=upcast_attention,\n                    )\n                )\n            else:\n                attentions.append(\n                    DualTransformer2DModel(\n                        attn_num_head_channels,\n                        in_channels // attn_num_head_channels,\n                        in_channels=in_channels,\n                        num_layers=1,\n                        cross_attention_dim=cross_attention_dim,\n                        norm_num_groups=resnet_groups,\n                    )\n                )\n            resnets.append(\n                ResnetBlockFlat(\n                    in_channels=in_channels,\n                    out_channels=in_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for attn, resnet in zip(self.attentions, self.resnets[1:]):\n            hidden_states = attn(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                cross_attention_kwargs=cross_attention_kwargs,\n                return_dict=False,\n            )[0]\n            hidden_states = resnet(hidden_states, temb)\n\n        return hidden_states\n\n\n# Copied from diffusers.models.unet_2d_blocks.UNetMidBlock2DSimpleCrossAttn with UNetMidBlock2DSimpleCrossAttn->UNetMidBlockFlatSimpleCrossAttn, ResnetBlock2D->ResnetBlockFlat\nclass UNetMidBlockFlatSimpleCrossAttn(nn.Module):\n    def __init__(\n        self,\n        in_channels: int,\n        temb_channels: int,\n        dropout: float = 0.0,\n        num_layers: int = 1,\n        resnet_eps: float = 1e-6,\n        resnet_time_scale_shift: str = \"default\",\n        resnet_act_fn: str = \"swish\",\n        resnet_groups: int = 32,\n        resnet_pre_norm: bool = True,\n        attn_num_head_channels=1,\n        output_scale_factor=1.0,\n        cross_attention_dim=1280,\n        skip_time_act=False,\n        only_cross_attention=False,\n        cross_attention_norm=None,\n    ):\n        super().__init__()\n\n        self.has_cross_attention = True\n\n        self.attn_num_head_channels = attn_num_head_channels\n        resnet_groups = resnet_groups if resnet_groups is not None else min(in_channels // 4, 32)\n\n        self.num_heads = in_channels // self.attn_num_head_channels\n\n        # there is always at least one resnet\n        resnets = [\n            ResnetBlockFlat(\n                in_channels=in_channels,\n                out_channels=in_channels,\n                temb_channels=temb_channels,\n                eps=resnet_eps,\n                groups=resnet_groups,\n                dropout=dropout,\n                time_embedding_norm=resnet_time_scale_shift,\n                non_linearity=resnet_act_fn,\n                output_scale_factor=output_scale_factor,\n                pre_norm=resnet_pre_norm,\n                skip_time_act=skip_time_act,\n            )\n        ]\n        attentions = []\n\n        for _ in range(num_layers):\n            processor = (\n                AttnAddedKVProcessor2_0() if hasattr(F, \"scaled_dot_product_attention\") else AttnAddedKVProcessor()\n            )\n\n            attentions.append(\n                Attention(\n                    query_dim=in_channels,\n                    cross_attention_dim=in_channels,\n                    heads=self.num_heads,\n                    dim_head=attn_num_head_channels,\n                    added_kv_proj_dim=cross_attention_dim,\n                    norm_num_groups=resnet_groups,\n                    bias=True,\n                    upcast_softmax=True,\n                    only_cross_attention=only_cross_attention,\n                    cross_attention_norm=cross_attention_norm,\n                    processor=processor,\n                )\n            )\n            resnets.append(\n                ResnetBlockFlat(\n                    in_channels=in_channels,\n                    out_channels=in_channels,\n                    temb_channels=temb_channels,\n                    eps=resnet_eps,\n                    groups=resnet_groups,\n                    dropout=dropout,\n                    time_embedding_norm=resnet_time_scale_shift,\n                    non_linearity=resnet_act_fn,\n                    output_scale_factor=output_scale_factor,\n                    pre_norm=resnet_pre_norm,\n                    skip_time_act=skip_time_act,\n                )\n            )\n\n        self.attentions = nn.ModuleList(attentions)\n        self.resnets = nn.ModuleList(resnets)\n\n    def forward(\n        self, hidden_states, temb=None, encoder_hidden_states=None, attention_mask=None, cross_attention_kwargs=None\n    ):\n        cross_attention_kwargs = cross_attention_kwargs if cross_attention_kwargs is not None else {}\n        hidden_states = self.resnets[0](hidden_states, temb)\n        for attn, resnet in zip(self.attentions, self.resnets[1:]):\n            # attn\n            hidden_states = attn(\n                hidden_states,\n                encoder_hidden_states=encoder_hidden_states,\n                attention_mask=attention_mask,\n                **cross_attention_kwargs,\n            )\n\n            # resnet\n            hidden_states = resnet(hidden_states, temb)\n\n        return hidden_states\n"
  },
  {
    "path": "diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion.py",
    "content": "import inspect\nfrom typing import Callable, List, Optional, Union\n\nimport PIL.Image\nimport torch\nfrom transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer, CLIPVisionModel\n\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import logging\nfrom ..pipeline_utils import DiffusionPipeline\nfrom .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline\nfrom .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline\nfrom .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass VersatileDiffusionPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-to-image generation using Stable Diffusion.\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vae ([`AutoencoderKL`]):\n            Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. Stable Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n        safety_checker ([`StableDiffusionMegaSafetyChecker`]):\n            Classification module that estimates whether generated images could be considered offensive or harmful.\n            Please, refer to the [model card](https://huggingface.co/runwayml/stable-diffusion-v1-5) for details.\n        feature_extractor ([`CLIPImageProcessor`]):\n            Model that extracts features from generated images to be used as inputs for the `safety_checker`.\n    \"\"\"\n\n    tokenizer: CLIPTokenizer\n    image_feature_extractor: CLIPImageProcessor\n    text_encoder: CLIPTextModel\n    image_encoder: CLIPVisionModel\n    image_unet: UNet2DConditionModel\n    text_unet: UNet2DConditionModel\n    vae: AutoencoderKL\n    scheduler: KarrasDiffusionSchedulers\n\n    def __init__(\n        self,\n        tokenizer: CLIPTokenizer,\n        image_feature_extractor: CLIPImageProcessor,\n        text_encoder: CLIPTextModel,\n        image_encoder: CLIPVisionModel,\n        image_unet: UNet2DConditionModel,\n        text_unet: UNet2DConditionModel,\n        vae: AutoencoderKL,\n        scheduler: KarrasDiffusionSchedulers,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            tokenizer=tokenizer,\n            image_feature_extractor=image_feature_extractor,\n            text_encoder=text_encoder,\n            image_encoder=image_encoder,\n            image_unet=image_unet,\n            text_unet=text_unet,\n            vae=vae,\n            scheduler=scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n\n    @torch.no_grad()\n    def image_variation(\n        self,\n        image: Union[torch.FloatTensor, PIL.Image.Image],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            image (`PIL.Image.Image`, `List[PIL.Image.Image]` or `torch.Tensor`):\n                The image prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import VersatileDiffusionPipeline\n        >>> import torch\n        >>> import requests\n        >>> from io import BytesIO\n        >>> from PIL import Image\n\n        >>> # let's download an initial image\n        >>> url = \"https://huggingface.co/datasets/diffusers/images/resolve/main/benz.jpg\"\n\n        >>> response = requests.get(url)\n        >>> image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n\n        >>> pipe = VersatileDiffusionPipeline.from_pretrained(\n        ...     \"shi-labs/versatile-diffusion\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> generator = torch.Generator(device=\"cuda\").manual_seed(0)\n        >>> image = pipe.image_variation(image, generator=generator).images[0]\n        >>> image.save(\"./car_variation.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        expected_components = inspect.signature(VersatileDiffusionImageVariationPipeline.__init__).parameters.keys()\n        components = {name: component for name, component in self.components.items() if name in expected_components}\n        return VersatileDiffusionImageVariationPipeline(**components)(\n            image=image,\n            height=height,\n            width=width,\n            num_inference_steps=num_inference_steps,\n            guidance_scale=guidance_scale,\n            negative_prompt=negative_prompt,\n            num_images_per_prompt=num_images_per_prompt,\n            eta=eta,\n            generator=generator,\n            latents=latents,\n            output_type=output_type,\n            return_dict=return_dict,\n            callback=callback,\n            callback_steps=callback_steps,\n        )\n\n    @torch.no_grad()\n    def text_to_image(\n        self,\n        prompt: Union[str, List[str]],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import VersatileDiffusionPipeline\n        >>> import torch\n\n        >>> pipe = VersatileDiffusionPipeline.from_pretrained(\n        ...     \"shi-labs/versatile-diffusion\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> generator = torch.Generator(device=\"cuda\").manual_seed(0)\n        >>> image = pipe.text_to_image(\"an astronaut riding on a horse on mars\", generator=generator).images[0]\n        >>> image.save(\"./astronaut.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        expected_components = inspect.signature(VersatileDiffusionTextToImagePipeline.__init__).parameters.keys()\n        components = {name: component for name, component in self.components.items() if name in expected_components}\n        temp_pipeline = VersatileDiffusionTextToImagePipeline(**components)\n        output = temp_pipeline(\n            prompt=prompt,\n            height=height,\n            width=width,\n            num_inference_steps=num_inference_steps,\n            guidance_scale=guidance_scale,\n            negative_prompt=negative_prompt,\n            num_images_per_prompt=num_images_per_prompt,\n            eta=eta,\n            generator=generator,\n            latents=latents,\n            output_type=output_type,\n            return_dict=return_dict,\n            callback=callback,\n            callback_steps=callback_steps,\n        )\n        # swap the attention blocks back to the original state\n        temp_pipeline._swap_unet_attention_blocks()\n\n        return output\n\n    @torch.no_grad()\n    def dual_guided(\n        self,\n        prompt: Union[PIL.Image.Image, List[PIL.Image.Image]],\n        image: Union[str, List[str]],\n        text_to_image_strength: float = 0.5,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import VersatileDiffusionPipeline\n        >>> import torch\n        >>> import requests\n        >>> from io import BytesIO\n        >>> from PIL import Image\n\n        >>> # let's download an initial image\n        >>> url = \"https://huggingface.co/datasets/diffusers/images/resolve/main/benz.jpg\"\n\n        >>> response = requests.get(url)\n        >>> image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> text = \"a red car in the sun\"\n\n        >>> pipe = VersatileDiffusionPipeline.from_pretrained(\n        ...     \"shi-labs/versatile-diffusion\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> generator = torch.Generator(device=\"cuda\").manual_seed(0)\n        >>> text_to_image_strength = 0.75\n\n        >>> image = pipe.dual_guided(\n        ...     prompt=text, image=image, text_to_image_strength=text_to_image_strength, generator=generator\n        ... ).images[0]\n        >>> image.save(\"./car_variation.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.ImagePipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.ImagePipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n\n        expected_components = inspect.signature(VersatileDiffusionDualGuidedPipeline.__init__).parameters.keys()\n        components = {name: component for name, component in self.components.items() if name in expected_components}\n        temp_pipeline = VersatileDiffusionDualGuidedPipeline(**components)\n        output = temp_pipeline(\n            prompt=prompt,\n            image=image,\n            text_to_image_strength=text_to_image_strength,\n            height=height,\n            width=width,\n            num_inference_steps=num_inference_steps,\n            guidance_scale=guidance_scale,\n            num_images_per_prompt=num_images_per_prompt,\n            eta=eta,\n            generator=generator,\n            latents=latents,\n            output_type=output_type,\n            return_dict=return_dict,\n            callback=callback,\n            callback_steps=callback_steps,\n        )\n        temp_pipeline._revert_dual_attention()\n\n        return output\n"
  },
  {
    "path": "diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_dual_guided.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Tuple, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.utils.checkpoint\nfrom transformers import (\n    CLIPImageProcessor,\n    CLIPTextModelWithProjection,\n    CLIPTokenizer,\n    CLIPVisionModelWithProjection,\n)\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...models import AutoencoderKL, DualTransformer2DModel, Transformer2DModel, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_available, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\nfrom .modeling_text_unet import UNetFlatConditionModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass VersatileDiffusionDualGuidedPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        vqvae ([`VQModel`]):\n            Vector-quantized (VQ) Model to encode and decode images to and from latent representations.\n        bert ([`LDMBertModel`]):\n            Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture.\n        tokenizer (`transformers.BertTokenizer`):\n            Tokenizer of class\n            [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n    tokenizer: CLIPTokenizer\n    image_feature_extractor: CLIPImageProcessor\n    text_encoder: CLIPTextModelWithProjection\n    image_encoder: CLIPVisionModelWithProjection\n    image_unet: UNet2DConditionModel\n    text_unet: UNetFlatConditionModel\n    vae: AutoencoderKL\n    scheduler: KarrasDiffusionSchedulers\n\n    _optional_components = [\"text_unet\"]\n\n    def __init__(\n        self,\n        tokenizer: CLIPTokenizer,\n        image_feature_extractor: CLIPImageProcessor,\n        text_encoder: CLIPTextModelWithProjection,\n        image_encoder: CLIPVisionModelWithProjection,\n        image_unet: UNet2DConditionModel,\n        text_unet: UNetFlatConditionModel,\n        vae: AutoencoderKL,\n        scheduler: KarrasDiffusionSchedulers,\n    ):\n        super().__init__()\n        self.register_modules(\n            tokenizer=tokenizer,\n            image_feature_extractor=image_feature_extractor,\n            text_encoder=text_encoder,\n            image_encoder=image_encoder,\n            image_unet=image_unet,\n            text_unet=text_unet,\n            vae=vae,\n            scheduler=scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n        if self.text_unet is not None and (\n            \"dual_cross_attention\" not in self.image_unet.config or not self.image_unet.config.dual_cross_attention\n        ):\n            # if loading from a universal checkpoint rather than a saved dual-guided pipeline\n            self._convert_to_dual_attention()\n\n    def remove_unused_weights(self):\n        self.register_modules(text_unet=None)\n\n    def _convert_to_dual_attention(self):\n        \"\"\"\n        Replace image_unet's `Transformer2DModel` blocks with `DualTransformer2DModel` that contains transformer blocks\n        from both `image_unet` and `text_unet`\n        \"\"\"\n        for name, module in self.image_unet.named_modules():\n            if isinstance(module, Transformer2DModel):\n                parent_name, index = name.rsplit(\".\", 1)\n                index = int(index)\n\n                image_transformer = self.image_unet.get_submodule(parent_name)[index]\n                text_transformer = self.text_unet.get_submodule(parent_name)[index]\n\n                config = image_transformer.config\n                dual_transformer = DualTransformer2DModel(\n                    num_attention_heads=config.num_attention_heads,\n                    attention_head_dim=config.attention_head_dim,\n                    in_channels=config.in_channels,\n                    num_layers=config.num_layers,\n                    dropout=config.dropout,\n                    norm_num_groups=config.norm_num_groups,\n                    cross_attention_dim=config.cross_attention_dim,\n                    attention_bias=config.attention_bias,\n                    sample_size=config.sample_size,\n                    num_vector_embeds=config.num_vector_embeds,\n                    activation_fn=config.activation_fn,\n                    num_embeds_ada_norm=config.num_embeds_ada_norm,\n                )\n                dual_transformer.transformers[0] = image_transformer\n                dual_transformer.transformers[1] = text_transformer\n\n                self.image_unet.get_submodule(parent_name)[index] = dual_transformer\n                self.image_unet.register_to_config(dual_cross_attention=True)\n\n    def _revert_dual_attention(self):\n        \"\"\"\n        Revert the image_unet `DualTransformer2DModel` blocks back to `Transformer2DModel` with image_unet weights Call\n        this function if you reuse `image_unet` in another pipeline, e.g. `VersatileDiffusionPipeline`\n        \"\"\"\n        for name, module in self.image_unet.named_modules():\n            if isinstance(module, DualTransformer2DModel):\n                parent_name, index = name.rsplit(\".\", 1)\n                index = int(index)\n                self.image_unet.get_submodule(parent_name)[index] = module.transformers[0]\n\n        self.image_unet.register_to_config(dual_cross_attention=False)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.image_unet, \"_hf_hook\"):\n            return self.device\n        for module in self.image_unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_text_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n        \"\"\"\n\n        def normalize_embeddings(encoder_output):\n            embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state)\n            embeds_pooled = encoder_output.text_embeds\n            embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True)\n            return embeds\n\n        batch_size = len(prompt)\n\n        text_inputs = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_tensors=\"pt\",\n        )\n        text_input_ids = text_inputs.input_ids\n        untruncated_ids = self.tokenizer(prompt, padding=\"max_length\", return_tensors=\"pt\").input_ids\n\n        if not torch.equal(text_input_ids, untruncated_ids):\n            removed_text = self.tokenizer.batch_decode(untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1])\n            logger.warning(\n                \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n            )\n\n        if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n            attention_mask = text_inputs.attention_mask.to(device)\n        else:\n            attention_mask = None\n\n        prompt_embeds = self.text_encoder(\n            text_input_ids.to(device),\n            attention_mask=attention_mask,\n        )\n        prompt_embeds = normalize_embeddings(prompt_embeds)\n\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance:\n            uncond_tokens = [\"\"] * batch_size\n            max_length = text_input_ids.shape[-1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = normalize_embeddings(negative_prompt_embeds)\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    def _encode_image_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n        \"\"\"\n\n        def normalize_embeddings(encoder_output):\n            embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state)\n            embeds = self.image_encoder.visual_projection(embeds)\n            embeds_pooled = embeds[:, 0:1]\n            embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True)\n            return embeds\n\n        batch_size = len(prompt) if isinstance(prompt, list) else 1\n\n        # get prompt text embeddings\n        image_input = self.image_feature_extractor(images=prompt, return_tensors=\"pt\")\n        pixel_values = image_input.pixel_values.to(device).to(self.image_encoder.dtype)\n        image_embeddings = self.image_encoder(pixel_values)\n        image_embeddings = normalize_embeddings(image_embeddings)\n\n        # duplicate image embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = image_embeddings.shape\n        image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1)\n        image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance:\n            uncond_images = [np.zeros((512, 512, 3)) + 0.5] * batch_size\n            uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors=\"pt\")\n            pixel_values = uncond_images.pixel_values.to(device).to(self.image_encoder.dtype)\n            negative_prompt_embeds = self.image_encoder(pixel_values)\n            negative_prompt_embeds = normalize_embeddings(negative_prompt_embeds)\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and conditional embeddings into a single batch\n            # to avoid doing two forward passes\n            image_embeddings = torch.cat([negative_prompt_embeds, image_embeddings])\n\n        return image_embeddings\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    def check_inputs(self, prompt, image, height, width, callback_steps):\n        if not isinstance(prompt, str) and not isinstance(prompt, PIL.Image.Image) and not isinstance(prompt, list):\n            raise ValueError(f\"`prompt` has to be of type `str` `PIL.Image` or `list` but is {type(prompt)}\")\n        if not isinstance(image, str) and not isinstance(image, PIL.Image.Image) and not isinstance(image, list):\n            raise ValueError(f\"`image` has to be of type `str` `PIL.Image` or `list` but is {type(image)}\")\n\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    def set_transformer_params(self, mix_ratio: float = 0.5, condition_types: Tuple = (\"text\", \"image\")):\n        for name, module in self.image_unet.named_modules():\n            if isinstance(module, DualTransformer2DModel):\n                module.mix_ratio = mix_ratio\n\n                for i, type in enumerate(condition_types):\n                    if type == \"text\":\n                        module.condition_lengths[i] = self.text_encoder.config.max_position_embeddings\n                        module.transformer_index_for_condition[i] = 1  # use the second (text) transformer\n                    else:\n                        module.condition_lengths[i] = 257\n                        module.transformer_index_for_condition[i] = 0  # use the first (image) transformer\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[PIL.Image.Image, List[PIL.Image.Image]],\n        image: Union[str, List[str]],\n        text_to_image_strength: float = 0.5,\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        **kwargs,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import VersatileDiffusionDualGuidedPipeline\n        >>> import torch\n        >>> import requests\n        >>> from io import BytesIO\n        >>> from PIL import Image\n\n        >>> # let's download an initial image\n        >>> url = \"https://huggingface.co/datasets/diffusers/images/resolve/main/benz.jpg\"\n\n        >>> response = requests.get(url)\n        >>> image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n        >>> text = \"a red car in the sun\"\n\n        >>> pipe = VersatileDiffusionDualGuidedPipeline.from_pretrained(\n        ...     \"shi-labs/versatile-diffusion\", torch_dtype=torch.float16\n        ... )\n        >>> pipe.remove_unused_weights()\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> generator = torch.Generator(device=\"cuda\").manual_seed(0)\n        >>> text_to_image_strength = 0.75\n\n        >>> image = pipe(\n        ...     prompt=text, image=image, text_to_image_strength=text_to_image_strength, generator=generator\n        ... ).images[0]\n        >>> image.save(\"./car_variation.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.ImagePipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.ImagePipelineOutput`] if `return_dict` is True, otherwise a `tuple. When\n            returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.image_unet.config.sample_size * self.vae_scale_factor\n        width = width or self.image_unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(prompt, image, height, width, callback_steps)\n\n        # 2. Define call parameters\n        prompt = [prompt] if not isinstance(prompt, list) else prompt\n        image = [image] if not isinstance(image, list) else image\n        batch_size = len(prompt)\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompts\n        prompt_embeds = self._encode_text_prompt(prompt, device, num_images_per_prompt, do_classifier_free_guidance)\n        image_embeddings = self._encode_image_prompt(image, device, num_images_per_prompt, do_classifier_free_guidance)\n        dual_prompt_embeddings = torch.cat([prompt_embeds, image_embeddings], dim=1)\n        prompt_types = (\"text\", \"image\")\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.image_unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            dual_prompt_embeddings.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs.\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Combine the attention blocks of the image and text UNets\n        self.set_transformer_params(text_to_image_strength, prompt_types)\n\n        # 8. Denoising loop\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            # predict the noise residual\n            noise_pred = self.image_unet(latent_model_input, t, encoder_hidden_states=dual_prompt_embeddings).sample\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n        else:\n            image = latents\n\n        image = self.image_processor.postprocess(image, output_type=output_type)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_image_variation.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport numpy as np\nimport PIL\nimport torch\nimport torch.utils.checkpoint\nfrom transformers import CLIPImageProcessor, CLIPVisionModelWithProjection\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...models import AutoencoderKL, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_available, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass VersatileDiffusionImageVariationPipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        vqvae ([`VQModel`]):\n            Vector-quantized (VQ) Model to encode and decode images to and from latent representations.\n        bert ([`LDMBertModel`]):\n            Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture.\n        tokenizer (`transformers.BertTokenizer`):\n            Tokenizer of class\n            [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n    image_feature_extractor: CLIPImageProcessor\n    image_encoder: CLIPVisionModelWithProjection\n    image_unet: UNet2DConditionModel\n    vae: AutoencoderKL\n    scheduler: KarrasDiffusionSchedulers\n\n    def __init__(\n        self,\n        image_feature_extractor: CLIPImageProcessor,\n        image_encoder: CLIPVisionModelWithProjection,\n        image_unet: UNet2DConditionModel,\n        vae: AutoencoderKL,\n        scheduler: KarrasDiffusionSchedulers,\n    ):\n        super().__init__()\n        self.register_modules(\n            image_feature_extractor=image_feature_extractor,\n            image_encoder=image_encoder,\n            image_unet=image_unet,\n            vae=vae,\n            scheduler=scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.image_unet, \"_hf_hook\"):\n            return self.device\n        for module in self.image_unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n        \"\"\"\n\n        def normalize_embeddings(encoder_output):\n            embeds = self.image_encoder.vision_model.post_layernorm(encoder_output.last_hidden_state)\n            embeds = self.image_encoder.visual_projection(embeds)\n            embeds_pooled = embeds[:, 0:1]\n            embeds = embeds / torch.norm(embeds_pooled, dim=-1, keepdim=True)\n            return embeds\n\n        if isinstance(prompt, torch.Tensor) and len(prompt.shape) == 4:\n            prompt = list(prompt)\n\n        batch_size = len(prompt) if isinstance(prompt, list) else 1\n\n        # get prompt text embeddings\n        image_input = self.image_feature_extractor(images=prompt, return_tensors=\"pt\")\n        pixel_values = image_input.pixel_values.to(device).to(self.image_encoder.dtype)\n        image_embeddings = self.image_encoder(pixel_values)\n        image_embeddings = normalize_embeddings(image_embeddings)\n\n        # duplicate image embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = image_embeddings.shape\n        image_embeddings = image_embeddings.repeat(1, num_images_per_prompt, 1)\n        image_embeddings = image_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance:\n            uncond_images: List[str]\n            if negative_prompt is None:\n                uncond_images = [np.zeros((512, 512, 3)) + 0.5] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, PIL.Image.Image):\n                uncond_images = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_images = negative_prompt\n\n            uncond_images = self.image_feature_extractor(images=uncond_images, return_tensors=\"pt\")\n            pixel_values = uncond_images.pixel_values.to(device).to(self.image_encoder.dtype)\n            negative_prompt_embeds = self.image_encoder(pixel_values)\n            negative_prompt_embeds = normalize_embeddings(negative_prompt_embeds)\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and conditional embeddings into a single batch\n            # to avoid doing two forward passes\n            image_embeddings = torch.cat([negative_prompt_embeds, image_embeddings])\n\n        return image_embeddings\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion_image_variation.StableDiffusionImageVariationPipeline.check_inputs\n    def check_inputs(self, image, height, width, callback_steps):\n        if (\n            not isinstance(image, torch.Tensor)\n            and not isinstance(image, PIL.Image.Image)\n            and not isinstance(image, list)\n        ):\n            raise ValueError(\n                \"`image` has to be of type `torch.FloatTensor` or `PIL.Image.Image` or `List[PIL.Image.Image]` but is\"\n                f\" {type(image)}\"\n            )\n\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        image: Union[PIL.Image.Image, List[PIL.Image.Image], torch.Tensor],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        **kwargs,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            image (`PIL.Image.Image`, `List[PIL.Image.Image]` or `torch.Tensor`):\n                The image prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import VersatileDiffusionImageVariationPipeline\n        >>> import torch\n        >>> import requests\n        >>> from io import BytesIO\n        >>> from PIL import Image\n\n        >>> # let's download an initial image\n        >>> url = \"https://huggingface.co/datasets/diffusers/images/resolve/main/benz.jpg\"\n\n        >>> response = requests.get(url)\n        >>> image = Image.open(BytesIO(response.content)).convert(\"RGB\")\n\n        >>> pipe = VersatileDiffusionImageVariationPipeline.from_pretrained(\n        ...     \"shi-labs/versatile-diffusion\", torch_dtype=torch.float16\n        ... )\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> generator = torch.Generator(device=\"cuda\").manual_seed(0)\n        >>> image = pipe(image, generator=generator).images[0]\n        >>> image.save(\"./car_variation.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.image_unet.config.sample_size * self.vae_scale_factor\n        width = width or self.image_unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(image, height, width, callback_steps)\n\n        # 2. Define call parameters\n        batch_size = 1 if isinstance(image, PIL.Image.Image) else len(image)\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        image_embeddings = self._encode_prompt(\n            image, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.image_unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            image_embeddings.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs.\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            # predict the noise residual\n            noise_pred = self.image_unet(latent_model_input, t, encoder_hidden_states=image_embeddings).sample\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n        else:\n            image = latents\n\n        image = self.image_processor.postprocess(image, output_type=output_type)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/versatile_diffusion/pipeline_versatile_diffusion_text_to_image.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport inspect\nimport warnings\nfrom typing import Callable, List, Optional, Union\n\nimport torch\nimport torch.utils.checkpoint\nfrom transformers import CLIPImageProcessor, CLIPTextModelWithProjection, CLIPTokenizer\n\nfrom ...image_processor import VaeImageProcessor\nfrom ...models import AutoencoderKL, Transformer2DModel, UNet2DConditionModel\nfrom ...schedulers import KarrasDiffusionSchedulers\nfrom ...utils import is_accelerate_available, logging, randn_tensor\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\nfrom .modeling_text_unet import UNetFlatConditionModel\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass VersatileDiffusionTextToImagePipeline(DiffusionPipeline):\n    r\"\"\"\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Parameters:\n        vqvae ([`VQModel`]):\n            Vector-quantized (VQ) Model to encode and decode images to and from latent representations.\n        bert ([`LDMBertModel`]):\n            Text-encoder model based on [BERT](https://huggingface.co/docs/transformers/model_doc/bert) architecture.\n        tokenizer (`transformers.BertTokenizer`):\n            Tokenizer of class\n            [BertTokenizer](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer).\n        unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.\n        scheduler ([`SchedulerMixin`]):\n            A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of\n            [`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].\n    \"\"\"\n    tokenizer: CLIPTokenizer\n    image_feature_extractor: CLIPImageProcessor\n    text_encoder: CLIPTextModelWithProjection\n    image_unet: UNet2DConditionModel\n    text_unet: UNetFlatConditionModel\n    vae: AutoencoderKL\n    scheduler: KarrasDiffusionSchedulers\n\n    _optional_components = [\"text_unet\"]\n\n    def __init__(\n        self,\n        tokenizer: CLIPTokenizer,\n        text_encoder: CLIPTextModelWithProjection,\n        image_unet: UNet2DConditionModel,\n        text_unet: UNetFlatConditionModel,\n        vae: AutoencoderKL,\n        scheduler: KarrasDiffusionSchedulers,\n    ):\n        super().__init__()\n        self.register_modules(\n            tokenizer=tokenizer,\n            text_encoder=text_encoder,\n            image_unet=image_unet,\n            text_unet=text_unet,\n            vae=vae,\n            scheduler=scheduler,\n        )\n        self.vae_scale_factor = 2 ** (len(self.vae.config.block_out_channels) - 1)\n        self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor)\n\n        if self.text_unet is not None:\n            self._swap_unet_attention_blocks()\n\n    def _swap_unet_attention_blocks(self):\n        \"\"\"\n        Swap the `Transformer2DModel` blocks between the image and text UNets\n        \"\"\"\n        for name, module in self.image_unet.named_modules():\n            if isinstance(module, Transformer2DModel):\n                parent_name, index = name.rsplit(\".\", 1)\n                index = int(index)\n                self.image_unet.get_submodule(parent_name)[index], self.text_unet.get_submodule(parent_name)[index] = (\n                    self.text_unet.get_submodule(parent_name)[index],\n                    self.image_unet.get_submodule(parent_name)[index],\n                )\n\n    def remove_unused_weights(self):\n        self.register_modules(text_unet=None)\n\n    def enable_sequential_cpu_offload(self, gpu_id=0):\n        r\"\"\"\n        Offloads all models to CPU using accelerate, significantly reducing memory usage. When called, unet,\n        text_encoder, vae and safety checker have their state dicts saved to CPU and then are moved to a\n        `torch.device('meta') and loaded to GPU only when their specific submodule has its `forward` method called.\n        \"\"\"\n        if is_accelerate_available():\n            from accelerate import cpu_offload\n        else:\n            raise ImportError(\"Please install accelerate via `pip install accelerate`\")\n\n        device = torch.device(f\"cuda:{gpu_id}\")\n\n        for cpu_offloaded_model in [self.image_unet, self.text_unet, self.text_encoder, self.vae]:\n            if cpu_offloaded_model is not None:\n                cpu_offload(cpu_offloaded_model, device)\n\n    @property\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device with unet->image_unet\n    def _execution_device(self):\n        r\"\"\"\n        Returns the device on which the pipeline's models will be executed. After calling\n        `pipeline.enable_sequential_cpu_offload()` the execution device can only be inferred from Accelerate's module\n        hooks.\n        \"\"\"\n        if not hasattr(self.image_unet, \"_hf_hook\"):\n            return self.device\n        for module in self.image_unet.modules():\n            if (\n                hasattr(module, \"_hf_hook\")\n                and hasattr(module._hf_hook, \"execution_device\")\n                and module._hf_hook.execution_device is not None\n            ):\n                return torch.device(module._hf_hook.execution_device)\n        return self.device\n\n    def _encode_prompt(self, prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt):\n        r\"\"\"\n        Encodes the prompt into text encoder hidden states.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                prompt to be encoded\n            device: (`torch.device`):\n                torch device\n            num_images_per_prompt (`int`):\n                number of images that should be generated per prompt\n            do_classifier_free_guidance (`bool`):\n                whether to use classifier free guidance or not\n            negative_prompt (`str` or `List[str]`):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n        \"\"\"\n\n        def normalize_embeddings(encoder_output):\n            embeds = self.text_encoder.text_projection(encoder_output.last_hidden_state)\n            embeds_pooled = encoder_output.text_embeds\n            embeds = embeds / torch.norm(embeds_pooled.unsqueeze(1), dim=-1, keepdim=True)\n            return embeds\n\n        batch_size = len(prompt) if isinstance(prompt, list) else 1\n\n        text_inputs = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            truncation=True,\n            return_tensors=\"pt\",\n        )\n        text_input_ids = text_inputs.input_ids\n        untruncated_ids = self.tokenizer(prompt, padding=\"max_length\", return_tensors=\"pt\").input_ids\n\n        if not torch.equal(text_input_ids, untruncated_ids):\n            removed_text = self.tokenizer.batch_decode(untruncated_ids[:, self.tokenizer.model_max_length - 1 : -1])\n            logger.warning(\n                \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n            )\n\n        if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n            attention_mask = text_inputs.attention_mask.to(device)\n        else:\n            attention_mask = None\n\n        prompt_embeds = self.text_encoder(\n            text_input_ids.to(device),\n            attention_mask=attention_mask,\n        )\n        prompt_embeds = normalize_embeddings(prompt_embeds)\n\n        # duplicate text embeddings for each generation per prompt, using mps friendly method\n        bs_embed, seq_len, _ = prompt_embeds.shape\n        prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)\n        prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)\n\n        # get unconditional embeddings for classifier free guidance\n        if do_classifier_free_guidance:\n            uncond_tokens: List[str]\n            if negative_prompt is None:\n                uncond_tokens = [\"\"] * batch_size\n            elif type(prompt) is not type(negative_prompt):\n                raise TypeError(\n                    f\"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !=\"\n                    f\" {type(prompt)}.\"\n                )\n            elif isinstance(negative_prompt, str):\n                uncond_tokens = [negative_prompt]\n            elif batch_size != len(negative_prompt):\n                raise ValueError(\n                    f\"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:\"\n                    f\" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches\"\n                    \" the batch size of `prompt`.\"\n                )\n            else:\n                uncond_tokens = negative_prompt\n\n            max_length = text_input_ids.shape[-1]\n            uncond_input = self.tokenizer(\n                uncond_tokens,\n                padding=\"max_length\",\n                max_length=max_length,\n                truncation=True,\n                return_tensors=\"pt\",\n            )\n\n            if hasattr(self.text_encoder.config, \"use_attention_mask\") and self.text_encoder.config.use_attention_mask:\n                attention_mask = uncond_input.attention_mask.to(device)\n            else:\n                attention_mask = None\n\n            negative_prompt_embeds = self.text_encoder(\n                uncond_input.input_ids.to(device),\n                attention_mask=attention_mask,\n            )\n            negative_prompt_embeds = normalize_embeddings(negative_prompt_embeds)\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.decode_latents\n    def decode_latents(self, latents):\n        warnings.warn(\n            \"The decode_latents method is deprecated and will be removed in a future version. Please\"\n            \" use VaeImageProcessor instead\",\n            FutureWarning,\n        )\n        latents = 1 / self.vae.config.scaling_factor * latents\n        image = self.vae.decode(latents, return_dict=False)[0]\n        image = (image / 2 + 0.5).clamp(0, 1)\n        # we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16\n        image = image.cpu().permute(0, 2, 3, 1).float().numpy()\n        return image\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_extra_step_kwargs\n    def prepare_extra_step_kwargs(self, generator, eta):\n        # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature\n        # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.\n        # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502\n        # and should be between [0, 1]\n\n        accepts_eta = \"eta\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        extra_step_kwargs = {}\n        if accepts_eta:\n            extra_step_kwargs[\"eta\"] = eta\n\n        # check if the scheduler accepts generator\n        accepts_generator = \"generator\" in set(inspect.signature(self.scheduler.step).parameters.keys())\n        if accepts_generator:\n            extra_step_kwargs[\"generator\"] = generator\n        return extra_step_kwargs\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.check_inputs\n    def check_inputs(\n        self,\n        prompt,\n        height,\n        width,\n        callback_steps,\n        negative_prompt=None,\n        prompt_embeds=None,\n        negative_prompt_embeds=None,\n    ):\n        if height % 8 != 0 or width % 8 != 0:\n            raise ValueError(f\"`height` and `width` have to be divisible by 8 but are {height} and {width}.\")\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        if prompt is not None and prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to\"\n                \" only forward one of the two.\"\n            )\n        elif prompt is None and prompt_embeds is None:\n            raise ValueError(\n                \"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined.\"\n            )\n        elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        if negative_prompt is not None and negative_prompt_embeds is not None:\n            raise ValueError(\n                f\"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:\"\n                f\" {negative_prompt_embeds}. Please make sure to only forward one of the two.\"\n            )\n\n        if prompt_embeds is not None and negative_prompt_embeds is not None:\n            if prompt_embeds.shape != negative_prompt_embeds.shape:\n                raise ValueError(\n                    \"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but\"\n                    f\" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`\"\n                    f\" {negative_prompt_embeds.shape}.\"\n                )\n\n    # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline.prepare_latents\n    def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, device, generator, latents=None):\n        shape = (batch_size, num_channels_latents, height // self.vae_scale_factor, width // self.vae_scale_factor)\n        if isinstance(generator, list) and len(generator) != batch_size:\n            raise ValueError(\n                f\"You have passed a list of generators of length {len(generator)}, but requested an effective batch\"\n                f\" size of {batch_size}. Make sure the batch size matches the length of the generators.\"\n            )\n\n        if latents is None:\n            latents = randn_tensor(shape, generator=generator, device=device, dtype=dtype)\n        else:\n            latents = latents.to(device)\n\n        # scale the initial noise by the standard deviation required by the scheduler\n        latents = latents * self.scheduler.init_noise_sigma\n        return latents\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        height: Optional[int] = None,\n        width: Optional[int] = None,\n        num_inference_steps: int = 50,\n        guidance_scale: float = 7.5,\n        negative_prompt: Optional[Union[str, List[str]]] = None,\n        num_images_per_prompt: Optional[int] = 1,\n        eta: float = 0.0,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n        **kwargs,\n    ):\n        r\"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            height (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The height in pixels of the generated image.\n            width (`int`, *optional*, defaults to self.image_unet.config.sample_size * self.vae_scale_factor):\n                The width in pixels of the generated image.\n            num_inference_steps (`int`, *optional*, defaults to 50):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            negative_prompt (`str` or `List[str]`, *optional*):\n                The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored\n                if `guidance_scale` is less than `1`).\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            eta (`float`, *optional*, defaults to 0.0):\n                Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to\n                [`schedulers.DDIMScheduler`], will be ignored for others.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor`, *optional*):\n                Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image\n                generation. Can be used to tweak the same generation with different prompts. If not provided, a latents\n                tensor will ge generated by sampling using the supplied random `generator`.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generate image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a\n                plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Examples:\n\n        ```py\n        >>> from diffusers import VersatileDiffusionTextToImagePipeline\n        >>> import torch\n\n        >>> pipe = VersatileDiffusionTextToImagePipeline.from_pretrained(\n        ...     \"shi-labs/versatile-diffusion\", torch_dtype=torch.float16\n        ... )\n        >>> pipe.remove_unused_weights()\n        >>> pipe = pipe.to(\"cuda\")\n\n        >>> generator = torch.Generator(device=\"cuda\").manual_seed(0)\n        >>> image = pipe(\"an astronaut riding on a horse on mars\", generator=generator).images[0]\n        >>> image.save(\"./astronaut.png\")\n        ```\n\n        Returns:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:\n            [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.\n            When returning a tuple, the first element is a list with the generated images, and the second element is a\n            list of `bool`s denoting whether the corresponding generated image likely represents \"not-safe-for-work\"\n            (nsfw) content, according to the `safety_checker`.\n        \"\"\"\n        # 0. Default height and width to unet\n        height = height or self.image_unet.config.sample_size * self.vae_scale_factor\n        width = width or self.image_unet.config.sample_size * self.vae_scale_factor\n\n        # 1. Check inputs. Raise error if not correct\n        self.check_inputs(prompt, height, width, callback_steps)\n\n        # 2. Define call parameters\n        batch_size = 1 if isinstance(prompt, str) else len(prompt)\n        device = self._execution_device\n        # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)\n        # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`\n        # corresponds to doing no classifier free guidance.\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        # 3. Encode input prompt\n        prompt_embeds = self._encode_prompt(\n            prompt, device, num_images_per_prompt, do_classifier_free_guidance, negative_prompt\n        )\n\n        # 4. Prepare timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=device)\n        timesteps = self.scheduler.timesteps\n\n        # 5. Prepare latent variables\n        num_channels_latents = self.image_unet.config.in_channels\n        latents = self.prepare_latents(\n            batch_size * num_images_per_prompt,\n            num_channels_latents,\n            height,\n            width,\n            prompt_embeds.dtype,\n            device,\n            generator,\n            latents,\n        )\n\n        # 6. Prepare extra step kwargs.\n        extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)\n\n        # 7. Denoising loop\n        for i, t in enumerate(self.progress_bar(timesteps)):\n            # expand the latents if we are doing classifier free guidance\n            latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents\n            latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)\n\n            # predict the noise residual\n            noise_pred = self.image_unet(latent_model_input, t, encoder_hidden_states=prompt_embeds).sample\n\n            # perform guidance\n            if do_classifier_free_guidance:\n                noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)\n                noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, latents)\n\n        if not output_type == \"latent\":\n            image = self.vae.decode(latents / self.vae.config.scaling_factor, return_dict=False)[0]\n        else:\n            image = latents\n\n        image = self.image_processor.postprocess(image, output_type=output_type)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n"
  },
  {
    "path": "diffusers/pipelines/vq_diffusion/__init__.py",
    "content": "from ...utils import is_torch_available, is_transformers_available\n\n\nif is_transformers_available() and is_torch_available():\n    from .pipeline_vq_diffusion import LearnedClassifierFreeSamplingEmbeddings, VQDiffusionPipeline\n"
  },
  {
    "path": "diffusers/pipelines/vq_diffusion/pipeline_vq_diffusion.py",
    "content": "# Copyright 2023 Microsoft and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Callable, List, Optional, Tuple, Union\n\nimport torch\nfrom transformers import CLIPTextModel, CLIPTokenizer\n\nfrom ...configuration_utils import ConfigMixin, register_to_config\nfrom ...models import ModelMixin, Transformer2DModel, VQModel\nfrom ...schedulers import VQDiffusionScheduler\nfrom ...utils import logging\nfrom ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\nclass LearnedClassifierFreeSamplingEmbeddings(ModelMixin, ConfigMixin):\n    \"\"\"\n    Utility class for storing learned text embeddings for classifier free sampling\n    \"\"\"\n\n    @register_to_config\n    def __init__(self, learnable: bool, hidden_size: Optional[int] = None, length: Optional[int] = None):\n        super().__init__()\n\n        self.learnable = learnable\n\n        if self.learnable:\n            assert hidden_size is not None, \"learnable=True requires `hidden_size` to be set\"\n            assert length is not None, \"learnable=True requires `length` to be set\"\n\n            embeddings = torch.zeros(length, hidden_size)\n        else:\n            embeddings = None\n\n        self.embeddings = torch.nn.Parameter(embeddings)\n\n\nclass VQDiffusionPipeline(DiffusionPipeline):\n    r\"\"\"\n    Pipeline for text-to-image generation using VQ Diffusion\n\n    This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the\n    library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)\n\n    Args:\n        vqvae ([`VQModel`]):\n            Vector Quantized Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent\n            representations.\n        text_encoder ([`CLIPTextModel`]):\n            Frozen text-encoder. VQ Diffusion uses the text portion of\n            [CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically\n            the [clip-vit-base-patch32](https://huggingface.co/openai/clip-vit-base-patch32) variant.\n        tokenizer (`CLIPTokenizer`):\n            Tokenizer of class\n            [CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).\n        transformer ([`Transformer2DModel`]):\n            Conditional transformer to denoise the encoded image latents.\n        scheduler ([`VQDiffusionScheduler`]):\n            A scheduler to be used in combination with `transformer` to denoise the encoded image latents.\n    \"\"\"\n\n    vqvae: VQModel\n    text_encoder: CLIPTextModel\n    tokenizer: CLIPTokenizer\n    transformer: Transformer2DModel\n    learned_classifier_free_sampling_embeddings: LearnedClassifierFreeSamplingEmbeddings\n    scheduler: VQDiffusionScheduler\n\n    def __init__(\n        self,\n        vqvae: VQModel,\n        text_encoder: CLIPTextModel,\n        tokenizer: CLIPTokenizer,\n        transformer: Transformer2DModel,\n        scheduler: VQDiffusionScheduler,\n        learned_classifier_free_sampling_embeddings: LearnedClassifierFreeSamplingEmbeddings,\n    ):\n        super().__init__()\n\n        self.register_modules(\n            vqvae=vqvae,\n            transformer=transformer,\n            text_encoder=text_encoder,\n            tokenizer=tokenizer,\n            scheduler=scheduler,\n            learned_classifier_free_sampling_embeddings=learned_classifier_free_sampling_embeddings,\n        )\n\n    def _encode_prompt(self, prompt, num_images_per_prompt, do_classifier_free_guidance):\n        batch_size = len(prompt) if isinstance(prompt, list) else 1\n\n        # get prompt text embeddings\n        text_inputs = self.tokenizer(\n            prompt,\n            padding=\"max_length\",\n            max_length=self.tokenizer.model_max_length,\n            return_tensors=\"pt\",\n        )\n        text_input_ids = text_inputs.input_ids\n\n        if text_input_ids.shape[-1] > self.tokenizer.model_max_length:\n            removed_text = self.tokenizer.batch_decode(text_input_ids[:, self.tokenizer.model_max_length :])\n            logger.warning(\n                \"The following part of your input was truncated because CLIP can only handle sequences up to\"\n                f\" {self.tokenizer.model_max_length} tokens: {removed_text}\"\n            )\n            text_input_ids = text_input_ids[:, : self.tokenizer.model_max_length]\n        prompt_embeds = self.text_encoder(text_input_ids.to(self.device))[0]\n\n        # NOTE: This additional step of normalizing the text embeddings is from VQ-Diffusion.\n        # While CLIP does normalize the pooled output of the text transformer when combining\n        # the image and text embeddings, CLIP does not directly normalize the last hidden state.\n        #\n        # CLIP normalizing the pooled output.\n        # https://github.com/huggingface/transformers/blob/d92e22d1f28324f513f3080e5c47c071a3916721/src/transformers/models/clip/modeling_clip.py#L1052-L1053\n        prompt_embeds = prompt_embeds / prompt_embeds.norm(dim=-1, keepdim=True)\n\n        # duplicate text embeddings for each generation per prompt\n        prompt_embeds = prompt_embeds.repeat_interleave(num_images_per_prompt, dim=0)\n\n        if do_classifier_free_guidance:\n            if self.learned_classifier_free_sampling_embeddings.learnable:\n                negative_prompt_embeds = self.learned_classifier_free_sampling_embeddings.embeddings\n                negative_prompt_embeds = negative_prompt_embeds.unsqueeze(0).repeat(batch_size, 1, 1)\n            else:\n                uncond_tokens = [\"\"] * batch_size\n\n                max_length = text_input_ids.shape[-1]\n                uncond_input = self.tokenizer(\n                    uncond_tokens,\n                    padding=\"max_length\",\n                    max_length=max_length,\n                    truncation=True,\n                    return_tensors=\"pt\",\n                )\n                negative_prompt_embeds = self.text_encoder(uncond_input.input_ids.to(self.device))[0]\n                # See comment for normalizing text embeddings\n                negative_prompt_embeds = negative_prompt_embeds / negative_prompt_embeds.norm(dim=-1, keepdim=True)\n\n            # duplicate unconditional embeddings for each generation per prompt, using mps friendly method\n            seq_len = negative_prompt_embeds.shape[1]\n            negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)\n            negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)\n\n            # For classifier free guidance, we need to do two forward passes.\n            # Here we concatenate the unconditional and text embeddings into a single batch\n            # to avoid doing two forward passes\n            prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])\n\n        return prompt_embeds\n\n    @torch.no_grad()\n    def __call__(\n        self,\n        prompt: Union[str, List[str]],\n        num_inference_steps: int = 100,\n        guidance_scale: float = 5.0,\n        truncation_rate: float = 1.0,\n        num_images_per_prompt: int = 1,\n        generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,\n        latents: Optional[torch.FloatTensor] = None,\n        output_type: Optional[str] = \"pil\",\n        return_dict: bool = True,\n        callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,\n        callback_steps: int = 1,\n    ) -> Union[ImagePipelineOutput, Tuple]:\n        \"\"\"\n        Function invoked when calling the pipeline for generation.\n\n        Args:\n            prompt (`str` or `List[str]`):\n                The prompt or prompts to guide the image generation.\n            num_inference_steps (`int`, *optional*, defaults to 100):\n                The number of denoising steps. More denoising steps usually lead to a higher quality image at the\n                expense of slower inference.\n            guidance_scale (`float`, *optional*, defaults to 7.5):\n                Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).\n                `guidance_scale` is defined as `w` of equation 2. of [Imagen\n                Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >\n                1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,\n                usually at the expense of lower image quality.\n            truncation_rate (`float`, *optional*, defaults to 1.0 (equivalent to no truncation)):\n                Used to \"truncate\" the predicted classes for x_0 such that the cumulative probability for a pixel is at\n                most `truncation_rate`. The lowest probabilities that would increase the cumulative probability above\n                `truncation_rate` are set to zero.\n            num_images_per_prompt (`int`, *optional*, defaults to 1):\n                The number of images to generate per prompt.\n            generator (`torch.Generator`, *optional*):\n                One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)\n                to make generation deterministic.\n            latents (`torch.FloatTensor` of shape (batch), *optional*):\n                Pre-generated noisy latents to be used as inputs for image generation. Must be valid embedding indices.\n                Can be used to tweak the same generation with different prompts. If not provided, a latents tensor will\n                be generated of completely masked latent pixels.\n            output_type (`str`, *optional*, defaults to `\"pil\"`):\n                The output format of the generated image. Choose between\n                [PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.\n            return_dict (`bool`, *optional*, defaults to `True`):\n                Whether or not to return a [`~pipelines.ImagePipelineOutput`] instead of a plain tuple.\n            callback (`Callable`, *optional*):\n                A function that will be called every `callback_steps` steps during inference. The function will be\n                called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.\n            callback_steps (`int`, *optional*, defaults to 1):\n                The frequency at which the `callback` function will be called. If not specified, the callback will be\n                called at every step.\n\n        Returns:\n            [`~pipelines.ImagePipelineOutput`] or `tuple`: [`~ pipeline_utils.ImagePipelineOutput `] if `return_dict`\n            is True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images.\n        \"\"\"\n        if isinstance(prompt, str):\n            batch_size = 1\n        elif isinstance(prompt, list):\n            batch_size = len(prompt)\n        else:\n            raise ValueError(f\"`prompt` has to be of type `str` or `list` but is {type(prompt)}\")\n\n        batch_size = batch_size * num_images_per_prompt\n\n        do_classifier_free_guidance = guidance_scale > 1.0\n\n        prompt_embeds = self._encode_prompt(prompt, num_images_per_prompt, do_classifier_free_guidance)\n\n        if (callback_steps is None) or (\n            callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)\n        ):\n            raise ValueError(\n                f\"`callback_steps` has to be a positive integer but is {callback_steps} of type\"\n                f\" {type(callback_steps)}.\"\n            )\n\n        # get the initial completely masked latents unless the user supplied it\n\n        latents_shape = (batch_size, self.transformer.num_latent_pixels)\n        if latents is None:\n            mask_class = self.transformer.num_vector_embeds - 1\n            latents = torch.full(latents_shape, mask_class).to(self.device)\n        else:\n            if latents.shape != latents_shape:\n                raise ValueError(f\"Unexpected latents shape, got {latents.shape}, expected {latents_shape}\")\n            if (latents < 0).any() or (latents >= self.transformer.num_vector_embeds).any():\n                raise ValueError(\n                    \"Unexpected latents value(s). All latents be valid embedding indices i.e. in the range 0,\"\n                    f\" {self.transformer.num_vector_embeds - 1} (inclusive).\"\n                )\n            latents = latents.to(self.device)\n\n        # set timesteps\n        self.scheduler.set_timesteps(num_inference_steps, device=self.device)\n\n        timesteps_tensor = self.scheduler.timesteps.to(self.device)\n\n        sample = latents\n\n        for i, t in enumerate(self.progress_bar(timesteps_tensor)):\n            # expand the sample if we are doing classifier free guidance\n            latent_model_input = torch.cat([sample] * 2) if do_classifier_free_guidance else sample\n\n            # predict the un-noised image\n            # model_output == `log_p_x_0`\n            model_output = self.transformer(latent_model_input, encoder_hidden_states=prompt_embeds, timestep=t).sample\n\n            if do_classifier_free_guidance:\n                model_output_uncond, model_output_text = model_output.chunk(2)\n                model_output = model_output_uncond + guidance_scale * (model_output_text - model_output_uncond)\n                model_output -= torch.logsumexp(model_output, dim=1, keepdim=True)\n\n            model_output = self.truncate(model_output, truncation_rate)\n\n            # remove `log(0)`'s (`-inf`s)\n            model_output = model_output.clamp(-70)\n\n            # compute the previous noisy sample x_t -> x_t-1\n            sample = self.scheduler.step(model_output, timestep=t, sample=sample, generator=generator).prev_sample\n\n            # call the callback, if provided\n            if callback is not None and i % callback_steps == 0:\n                callback(i, t, sample)\n\n        embedding_channels = self.vqvae.config.vq_embed_dim\n        embeddings_shape = (batch_size, self.transformer.height, self.transformer.width, embedding_channels)\n        embeddings = self.vqvae.quantize.get_codebook_entry(sample, shape=embeddings_shape)\n        image = self.vqvae.decode(embeddings, force_not_quantize=True).sample\n\n        image = (image / 2 + 0.5).clamp(0, 1)\n        image = image.cpu().permute(0, 2, 3, 1).numpy()\n\n        if output_type == \"pil\":\n            image = self.numpy_to_pil(image)\n\n        if not return_dict:\n            return (image,)\n\n        return ImagePipelineOutput(images=image)\n\n    def truncate(self, log_p_x_0: torch.FloatTensor, truncation_rate: float) -> torch.FloatTensor:\n        \"\"\"\n        Truncates log_p_x_0 such that for each column vector, the total cumulative probability is `truncation_rate` The\n        lowest probabilities that would increase the cumulative probability above `truncation_rate` are set to zero.\n        \"\"\"\n        sorted_log_p_x_0, indices = torch.sort(log_p_x_0, 1, descending=True)\n        sorted_p_x_0 = torch.exp(sorted_log_p_x_0)\n        keep_mask = sorted_p_x_0.cumsum(dim=1) < truncation_rate\n\n        # Ensure that at least the largest probability is not zeroed out\n        all_true = torch.full_like(keep_mask[:, 0:1, :], True)\n        keep_mask = torch.cat((all_true, keep_mask), dim=1)\n        keep_mask = keep_mask[:, :-1, :]\n\n        keep_mask = keep_mask.gather(1, indices.argsort(1))\n\n        rv = log_p_x_0.clone()\n\n        rv[~keep_mask] = -torch.inf  # -inf = log(0)\n\n        return rv\n"
  },
  {
    "path": "diffusers/schedulers/README.md",
    "content": "# Schedulers\n\nFor more information on the schedulers, please refer to the [docs](https://huggingface.co/docs/diffusers/api/schedulers/overview)."
  },
  {
    "path": "diffusers/schedulers/__init__.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom ..utils import (\n    OptionalDependencyNotAvailable,\n    is_flax_available,\n    is_scipy_available,\n    is_torch_available,\n    is_torchsde_available,\n)\n\n\ntry:\n    if not is_torch_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_pt_objects import *  # noqa F403\nelse:\n    from .scheduling_ddim import DDIMScheduler\n    from .scheduling_ddim_inverse import DDIMInverseScheduler\n    from .scheduling_ddpm import DDPMScheduler\n    from .scheduling_deis_multistep import DEISMultistepScheduler\n    from .scheduling_dpmsolver_multistep import DPMSolverMultistepScheduler\n    from .scheduling_dpmsolver_multistep_inverse import DPMSolverMultistepInverseScheduler\n    from .scheduling_dpmsolver_singlestep import DPMSolverSinglestepScheduler\n    from .scheduling_euler_ancestral_discrete import EulerAncestralDiscreteScheduler\n    from .scheduling_euler_discrete import EulerDiscreteScheduler\n    from .scheduling_heun_discrete import HeunDiscreteScheduler\n    from .scheduling_ipndm import IPNDMScheduler\n    from .scheduling_k_dpm_2_ancestral_discrete import KDPM2AncestralDiscreteScheduler\n    from .scheduling_k_dpm_2_discrete import KDPM2DiscreteScheduler\n    from .scheduling_karras_ve import KarrasVeScheduler\n    from .scheduling_pndm import PNDMScheduler\n    from .scheduling_repaint import RePaintScheduler\n    from .scheduling_sde_ve import ScoreSdeVeScheduler\n    from .scheduling_sde_vp import ScoreSdeVpScheduler\n    from .scheduling_unclip import UnCLIPScheduler\n    from .scheduling_unipc_multistep import UniPCMultistepScheduler\n    from .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin\n    from .scheduling_vq_diffusion import VQDiffusionScheduler\n\ntry:\n    if not is_flax_available():\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_flax_objects import *  # noqa F403\nelse:\n    from .scheduling_ddim_flax import FlaxDDIMScheduler\n    from .scheduling_ddpm_flax import FlaxDDPMScheduler\n    from .scheduling_dpmsolver_multistep_flax import FlaxDPMSolverMultistepScheduler\n    from .scheduling_karras_ve_flax import FlaxKarrasVeScheduler\n    from .scheduling_lms_discrete_flax import FlaxLMSDiscreteScheduler\n    from .scheduling_pndm_flax import FlaxPNDMScheduler\n    from .scheduling_sde_ve_flax import FlaxScoreSdeVeScheduler\n    from .scheduling_utils_flax import (\n        FlaxKarrasDiffusionSchedulers,\n        FlaxSchedulerMixin,\n        FlaxSchedulerOutput,\n        broadcast_to_shape_from_left,\n    )\n\n\ntry:\n    if not (is_torch_available() and is_scipy_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_torch_and_scipy_objects import *  # noqa F403\nelse:\n    from .scheduling_lms_discrete import LMSDiscreteScheduler\n\ntry:\n    if not (is_torch_available() and is_torchsde_available()):\n        raise OptionalDependencyNotAvailable()\nexcept OptionalDependencyNotAvailable:\n    from ..utils.dummy_torch_and_torchsde_objects import *  # noqa F403\nelse:\n    from .scheduling_dpmsolver_sde import DPMSolverSDEScheduler\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_ddim.py",
    "content": "# Copyright 2023 Stanford University Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This code is strongly influenced by https://github.com/pesser/pytorch_diffusion\n# and https://github.com/hojonathanho/diffusion\n\nimport math\nfrom dataclasses import dataclass\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, randn_tensor\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin\n\n\n@dataclass\n# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->DDIM\nclass DDIMSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999) -> torch.Tensor:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DDIMScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Denoising diffusion implicit models is a scheduler that extends the denoising procedure introduced in denoising\n    diffusion probabilistic models (DDPMs) with non-Markovian guidance.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2010.02502\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        clip_sample (`bool`, default `True`):\n            option to clip predicted sample for numerical stability.\n        clip_sample_range (`float`, default `1.0`):\n            the maximum magnitude for sample clipping. Valid only when `clip_sample=True`.\n        set_alpha_to_one (`bool`, default `True`):\n            each diffusion step uses the value of alphas product at that step and at the previous one. For the final\n            step there is no previous alpha. When this option is `True` the previous alpha product is fixed to `1`,\n            otherwise it uses the value of alpha at step 0.\n        steps_offset (`int`, default `0`):\n            an offset added to the inference steps. You can use a combination of `offset=1` and\n            `set_alpha_to_one=False`, to make the last step use step 0 for the previous alpha product, as done in\n            stable diffusion.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            Note that the thresholding method is unsuitable for latent-space diffusion models (such as\n            stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487). Valid only when `thresholding=True`.\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True`.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        skip_type = 'uniform',\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        clip_sample: bool = True,\n        set_alpha_to_one: bool = True,\n        steps_offset: int = 0,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        clip_sample_range: float = 1.0,\n        sample_max_value: float = 1.0,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        self.skip_type = skip_type\n\n        # At every step in ddim, we are looking into the previous alphas_cumprod\n        # For the final step, there is no previous alphas_cumprod because we are already at 0\n        # `set_alpha_to_one` decides whether we set this parameter simply to one or\n        # whether we use the final alpha of the \"non-previous\" one.\n        self.final_alpha_cumprod = torch.tensor(1.0) if set_alpha_to_one else self.alphas_cumprod[0]\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # setable values\n        self.num_inference_steps = None\n        self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy().astype(np.int64))\n\n    def scale_model_input(self, sample: torch.FloatTensor, timestep: Optional[int] = None) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def _get_variance(self, timestep, prev_timestep):\n        alpha_prod_t = self.alphas_cumprod[timestep]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.final_alpha_cumprod\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        variance = (beta_prod_t_prev / beta_prod_t) * (1 - alpha_prod_t / alpha_prod_t_prev)\n\n        return variance\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler._threshold_sample\n    def _threshold_sample(self, sample: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"\n        \"Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the\n        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by\n        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing\n        pixels from saturation at each step. We find that dynamic thresholding results in significantly better\n        photorealism as well as better image-text alignment, especially when using very large guidance weights.\"\n\n        https://arxiv.org/abs/2205.11487\n        \"\"\"\n        dtype = sample.dtype\n        batch_size, channels, height, width = sample.shape\n\n        if dtype not in (torch.float32, torch.float64):\n            sample = sample.float()  # upcast for quantile calculation, and clamp not implemented for cpu half\n\n        # Flatten sample for doing quantile calculation along each image\n        sample = sample.reshape(batch_size, channels * height * width)\n\n        abs_sample = sample.abs()  # \"a certain percentile absolute pixel value\"\n\n        s = torch.quantile(abs_sample, self.config.dynamic_thresholding_ratio, dim=1)\n        s = torch.clamp(\n            s, min=1, max=self.config.sample_max_value\n        )  # When clamped to min=1, equivalent to standard clipping to [-1, 1]\n\n        s = s.unsqueeze(1)  # (batch_size, 1) because clamp will broadcast along dim=0\n        sample = torch.clamp(sample, -s, s) / s  # \"we threshold xt0 to the range [-s, s] and then divide by s\"\n\n        sample = sample.reshape(batch_size, channels, height, width)\n        sample = sample.to(dtype)\n\n        return sample\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n\n        if num_inference_steps > self.config.num_train_timesteps:\n            raise ValueError(\n                f\"`num_inference_steps`: {num_inference_steps} cannot be larger than `self.config.train_timesteps`:\"\n                f\" {self.config.num_train_timesteps} as the unet model trained with this scheduler can only handle\"\n                f\" maximal {self.config.num_train_timesteps} timesteps.\"\n            )\n\n        self.num_inference_steps = num_inference_steps\n\n        if self.skip_type == 'uniform':\n            step_ratio = (self.config.num_train_timesteps-1) / (self.num_inference_steps-1)\n            timesteps = (np.arange(0, num_inference_steps) * step_ratio).round()[::-1].copy().astype(np.int64)\n            #print(timesteps)\n        elif self.skip_type == 'quad':\n            step_ratio = (self.config.num_train_timesteps-1) / (self.num_inference_steps-1)**2\n            timesteps = (np.arange(0, num_inference_steps)**2 * step_ratio).round()[::-1].copy().astype(np.int64)\n            #print(timesteps)\n        else:\n            raise NotImplementedError(f\"skip_type {self.skip_type} is not implemented\")\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n        self.timesteps += self.config.steps_offset\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        eta: float = 0.0,\n        use_clipped_model_output: bool = False,\n        generator=None,\n        variance_noise: Optional[torch.FloatTensor] = None,\n        return_dict: bool = True,\n    ) -> Union[DDIMSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            eta (`float`): weight of noise for added noise in diffusion step.\n            use_clipped_model_output (`bool`): if `True`, compute \"corrected\" `model_output` from the clipped\n                predicted original sample. Necessary because predicted original sample is clipped to [-1, 1] when\n                `self.config.clip_sample` is `True`. If no clipping has happened, \"corrected\" `model_output` would\n                coincide with the one provided as input and `use_clipped_model_output` will have not effect.\n            generator: random number generator.\n            variance_noise (`torch.FloatTensor`): instead of generating noise for the variance using `generator`, we\n                can directly provide the noise for the variance itself. This is useful for methods such as\n                CycleDiffusion. (https://arxiv.org/abs/2210.05559)\n            return_dict (`bool`): option for returning tuple rather than DDIMSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.DDIMSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.DDIMSchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        # See formulas (12) and (16) of DDIM paper https://arxiv.org/pdf/2010.02502.pdf\n        # Ideally, read DDIM paper in-detail understanding\n\n        # Notation (<variable name> -> <name in paper>\n        # - pred_noise_t -> e_theta(x_t, t)\n        # - pred_original_sample -> f_theta(x_t, t) or x_0\n        # - std_dev_t -> sigma_t\n        # - eta -> η\n        # - pred_sample_direction -> \"direction pointing to x_t\"\n        # - pred_prev_sample -> \"x_t-1\"\n\n        # 1. get previous step value (=t-1)\n        prev_timestep = timestep - self.config.num_train_timesteps // self.num_inference_steps\n\n        # 2. compute alphas, betas\n        alpha_prod_t = self.alphas_cumprod[timestep]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.final_alpha_cumprod\n\n        beta_prod_t = 1 - alpha_prod_t\n\n        # 3. compute predicted original sample from predicted noise also called\n        # \"predicted x_0\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = (sample - beta_prod_t ** (0.5) * model_output) / alpha_prod_t ** (0.5)\n            pred_epsilon = model_output\n        elif self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n            pred_epsilon = (sample - alpha_prod_t ** (0.5) * pred_original_sample) / beta_prod_t ** (0.5)\n        elif self.config.prediction_type == \"v_prediction\":\n            pred_original_sample = (alpha_prod_t**0.5) * sample - (beta_prod_t**0.5) * model_output\n            pred_epsilon = (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                \" `v_prediction`\"\n            )\n\n        # 4. Clip or threshold \"predicted x_0\"\n        if self.config.thresholding:\n            pred_original_sample = self._threshold_sample(pred_original_sample)\n        elif self.config.clip_sample:\n            pred_original_sample = pred_original_sample.clamp(\n                -self.config.clip_sample_range, self.config.clip_sample_range\n            )\n\n        # 5. compute variance: \"sigma_t(η)\" -> see formula (16)\n        # σ_t = sqrt((1 − α_t−1)/(1 − α_t)) * sqrt(1 − α_t/α_t−1)\n        variance = self._get_variance(timestep, prev_timestep)\n        std_dev_t = eta * variance ** (0.5)\n\n        if use_clipped_model_output:\n            # the pred_epsilon is always re-derived from the clipped x_0 in Glide\n            pred_epsilon = (sample - alpha_prod_t ** (0.5) * pred_original_sample) / beta_prod_t ** (0.5)\n\n        # 6. compute \"direction pointing to x_t\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        pred_sample_direction = (1 - alpha_prod_t_prev - std_dev_t**2) ** (0.5) * pred_epsilon\n\n        # 7. compute x_t without \"random noise\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        prev_sample = alpha_prod_t_prev ** (0.5) * pred_original_sample + pred_sample_direction\n\n        if eta > 0:\n            if variance_noise is not None and generator is not None:\n                raise ValueError(\n                    \"Cannot pass both generator and variance_noise. Please make sure that either `generator` or\"\n                    \" `variance_noise` stays `None`.\"\n                )\n\n            if variance_noise is None:\n                variance_noise = randn_tensor(\n                    model_output.shape, generator=generator, device=model_output.device, dtype=model_output.dtype\n                )\n            variance = std_dev_t * variance_noise\n\n            prev_sample = prev_sample + variance\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return DDIMSchedulerOutput(prev_sample=prev_sample, pred_original_sample=pred_original_sample)\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.get_velocity\n    def get_velocity(\n        self, sample: torch.FloatTensor, noise: torch.FloatTensor, timesteps: torch.IntTensor\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as sample\n        alphas_cumprod = self.alphas_cumprod.to(device=sample.device, dtype=sample.dtype)\n        timesteps = timesteps.to(sample.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(sample.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(sample.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        velocity = sqrt_alpha_prod * noise - sqrt_one_minus_alpha_prod * sample\n        return velocity\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_ddim_flax.py",
    "content": "# Copyright 2023 Stanford University Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This code is strongly influenced by https://github.com/pesser/pytorch_diffusion\n# and https://github.com/hojonathanho/diffusion\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport flax\nimport jax.numpy as jnp\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils_flax import (\n    CommonSchedulerState,\n    FlaxKarrasDiffusionSchedulers,\n    FlaxSchedulerMixin,\n    FlaxSchedulerOutput,\n    add_noise_common,\n    get_velocity_common,\n)\n\n\n@flax.struct.dataclass\nclass DDIMSchedulerState:\n    common: CommonSchedulerState\n    final_alpha_cumprod: jnp.ndarray\n\n    # setable values\n    init_noise_sigma: jnp.ndarray\n    timesteps: jnp.ndarray\n    num_inference_steps: Optional[int] = None\n\n    @classmethod\n    def create(\n        cls,\n        common: CommonSchedulerState,\n        final_alpha_cumprod: jnp.ndarray,\n        init_noise_sigma: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ):\n        return cls(\n            common=common,\n            final_alpha_cumprod=final_alpha_cumprod,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n        )\n\n\n@dataclass\nclass FlaxDDIMSchedulerOutput(FlaxSchedulerOutput):\n    state: DDIMSchedulerState\n\n\nclass FlaxDDIMScheduler(FlaxSchedulerMixin, ConfigMixin):\n    \"\"\"\n    Denoising diffusion implicit models is a scheduler that extends the denoising procedure introduced in denoising\n    diffusion probabilistic models (DDPMs) with non-Markovian guidance.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2010.02502\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`jnp.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        clip_sample (`bool`, default `True`):\n            option to clip predicted sample between -1 and 1 for numerical stability.\n        set_alpha_to_one (`bool`, default `True`):\n            each diffusion step uses the value of alphas product at that step and at the previous one. For the final\n            step there is no previous alpha. When this option is `True` the previous alpha product is fixed to `1`,\n            otherwise it uses the value of alpha at step 0.\n        steps_offset (`int`, default `0`):\n            an offset added to the inference steps. You can use a combination of `offset=1` and\n            `set_alpha_to_one=False`, to make the last step use step 0 for the previous alpha product, as done in\n            stable diffusion.\n        prediction_type (`str`, default `epsilon`):\n            indicates whether the model predicts the noise (epsilon), or the samples. One of `epsilon`, `sample`.\n            `v-prediction` is not supported for this scheduler.\n        dtype (`jnp.dtype`, *optional*, defaults to `jnp.float32`):\n            the `dtype` used for params and computation.\n    \"\"\"\n\n    _compatibles = [e.name for e in FlaxKarrasDiffusionSchedulers]\n\n    dtype: jnp.dtype\n\n    @property\n    def has_state(self):\n        return True\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[jnp.ndarray] = None,\n        set_alpha_to_one: bool = True,\n        steps_offset: int = 0,\n        prediction_type: str = \"epsilon\",\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        self.dtype = dtype\n\n    def create_state(self, common: Optional[CommonSchedulerState] = None) -> DDIMSchedulerState:\n        if common is None:\n            common = CommonSchedulerState.create(self)\n\n        # At every step in ddim, we are looking into the previous alphas_cumprod\n        # For the final step, there is no previous alphas_cumprod because we are already at 0\n        # `set_alpha_to_one` decides whether we set this parameter simply to one or\n        # whether we use the final alpha of the \"non-previous\" one.\n        final_alpha_cumprod = (\n            jnp.array(1.0, dtype=self.dtype) if self.config.set_alpha_to_one else common.alphas_cumprod[0]\n        )\n\n        # standard deviation of the initial noise distribution\n        init_noise_sigma = jnp.array(1.0, dtype=self.dtype)\n\n        timesteps = jnp.arange(0, self.config.num_train_timesteps).round()[::-1]\n\n        return DDIMSchedulerState.create(\n            common=common,\n            final_alpha_cumprod=final_alpha_cumprod,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n        )\n\n    def scale_model_input(\n        self, state: DDIMSchedulerState, sample: jnp.ndarray, timestep: Optional[int] = None\n    ) -> jnp.ndarray:\n        \"\"\"\n        Args:\n            state (`PNDMSchedulerState`): the `FlaxPNDMScheduler` state data class instance.\n            sample (`jnp.ndarray`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `jnp.ndarray`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(\n        self, state: DDIMSchedulerState, num_inference_steps: int, shape: Tuple = ()\n    ) -> DDIMSchedulerState:\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            state (`DDIMSchedulerState`):\n                the `FlaxDDIMScheduler` state data class instance.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n        step_ratio = self.config.num_train_timesteps // num_inference_steps\n        # creates integer timesteps by multiplying by ratio\n        # rounding to avoid issues when num_inference_step is power of 3\n        timesteps = (jnp.arange(0, num_inference_steps) * step_ratio).round()[::-1] + self.config.steps_offset\n\n        return state.replace(\n            num_inference_steps=num_inference_steps,\n            timesteps=timesteps,\n        )\n\n    def _get_variance(self, state: DDIMSchedulerState, timestep, prev_timestep):\n        alpha_prod_t = state.common.alphas_cumprod[timestep]\n        alpha_prod_t_prev = jnp.where(\n            prev_timestep >= 0, state.common.alphas_cumprod[prev_timestep], state.final_alpha_cumprod\n        )\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        variance = (beta_prod_t_prev / beta_prod_t) * (1 - alpha_prod_t / alpha_prod_t_prev)\n\n        return variance\n\n    def step(\n        self,\n        state: DDIMSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n        eta: float = 0.0,\n        return_dict: bool = True,\n    ) -> Union[FlaxDDIMSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            state (`DDIMSchedulerState`): the `FlaxDDIMScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than FlaxDDIMSchedulerOutput class\n\n        Returns:\n            [`FlaxDDIMSchedulerOutput`] or `tuple`: [`FlaxDDIMSchedulerOutput`] if `return_dict` is True, otherwise a\n            `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if state.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        # See formulas (12) and (16) of DDIM paper https://arxiv.org/pdf/2010.02502.pdf\n        # Ideally, read DDIM paper in-detail understanding\n\n        # Notation (<variable name> -> <name in paper>\n        # - pred_noise_t -> e_theta(x_t, t)\n        # - pred_original_sample -> f_theta(x_t, t) or x_0\n        # - std_dev_t -> sigma_t\n        # - eta -> η\n        # - pred_sample_direction -> \"direction pointing to x_t\"\n        # - pred_prev_sample -> \"x_t-1\"\n\n        # 1. get previous step value (=t-1)\n        prev_timestep = timestep - self.config.num_train_timesteps // state.num_inference_steps\n\n        alphas_cumprod = state.common.alphas_cumprod\n        final_alpha_cumprod = state.final_alpha_cumprod\n\n        # 2. compute alphas, betas\n        alpha_prod_t = alphas_cumprod[timestep]\n        alpha_prod_t_prev = jnp.where(prev_timestep >= 0, alphas_cumprod[prev_timestep], final_alpha_cumprod)\n\n        beta_prod_t = 1 - alpha_prod_t\n\n        # 3. compute predicted original sample from predicted noise also called\n        # \"predicted x_0\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = (sample - beta_prod_t ** (0.5) * model_output) / alpha_prod_t ** (0.5)\n            pred_epsilon = model_output\n        elif self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n            pred_epsilon = (sample - alpha_prod_t ** (0.5) * pred_original_sample) / beta_prod_t ** (0.5)\n        elif self.config.prediction_type == \"v_prediction\":\n            pred_original_sample = (alpha_prod_t**0.5) * sample - (beta_prod_t**0.5) * model_output\n            pred_epsilon = (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                \" `v_prediction`\"\n            )\n\n        # 4. compute variance: \"sigma_t(η)\" -> see formula (16)\n        # σ_t = sqrt((1 − α_t−1)/(1 − α_t)) * sqrt(1 − α_t/α_t−1)\n        variance = self._get_variance(state, timestep, prev_timestep)\n        std_dev_t = eta * variance ** (0.5)\n\n        # 5. compute \"direction pointing to x_t\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        pred_sample_direction = (1 - alpha_prod_t_prev - std_dev_t**2) ** (0.5) * pred_epsilon\n\n        # 6. compute x_t without \"random noise\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        prev_sample = alpha_prod_t_prev ** (0.5) * pred_original_sample + pred_sample_direction\n\n        if not return_dict:\n            return (prev_sample, state)\n\n        return FlaxDDIMSchedulerOutput(prev_sample=prev_sample, state=state)\n\n    def add_noise(\n        self,\n        state: DDIMSchedulerState,\n        original_samples: jnp.ndarray,\n        noise: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ) -> jnp.ndarray:\n        return add_noise_common(state.common, original_samples, noise, timesteps)\n\n    def get_velocity(\n        self,\n        state: DDIMSchedulerState,\n        sample: jnp.ndarray,\n        noise: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ) -> jnp.ndarray:\n        return get_velocity_common(state.common, sample, noise, timesteps)\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_ddim_inverse.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This code is strongly influenced by https://github.com/pesser/pytorch_diffusion\n# and https://github.com/hojonathanho/diffusion\nimport math\nfrom dataclasses import dataclass\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom diffusers.configuration_utils import ConfigMixin, register_to_config\nfrom diffusers.schedulers.scheduling_utils import SchedulerMixin\nfrom diffusers.utils import BaseOutput, deprecate\n\n\n@dataclass\n# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->DDIM\nclass DDIMSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999) -> torch.Tensor:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DDIMInverseScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    DDIMInverseScheduler is the reverse scheduler of [`DDIMScheduler`].\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2010.02502\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        clip_sample (`bool`, default `True`):\n            option to clip predicted sample for numerical stability.\n        clip_sample_range (`float`, default `1.0`):\n            the maximum magnitude for sample clipping. Valid only when `clip_sample=True`.\n        set_alpha_to_zero (`bool`, default `True`):\n            each diffusion step uses the value of alphas product at that step and at the previous one. For the final\n            step there is no previous alpha. When this option is `True` the previous alpha product is fixed to `0`,\n            otherwise it uses the value of alpha at step `num_train_timesteps - 1`.\n        steps_offset (`int`, default `0`):\n            an offset added to the inference steps. You can use a combination of `offset=1` and\n            `set_alpha_to_zero=False`, to make the last step use step `num_train_timesteps - 1` for the previous alpha\n            product.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n    \"\"\"\n\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        clip_sample: bool = True,\n        set_alpha_to_zero: bool = True,\n        steps_offset: int = 0,\n        prediction_type: str = \"epsilon\",\n        clip_sample_range: float = 1.0,\n        **kwargs,\n    ):\n        if kwargs.get(\"set_alpha_to_one\", None) is not None:\n            deprecation_message = (\n                \"The `set_alpha_to_one` argument is deprecated. Please use `set_alpha_to_zero` instead.\"\n            )\n            deprecate(\"set_alpha_to_one\", \"1.0.0\", deprecation_message, standard_warn=False)\n            set_alpha_to_zero = kwargs[\"set_alpha_to_one\"]\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        # At every step in inverted ddim, we are looking into the next alphas_cumprod\n        # For the final step, there is no next alphas_cumprod, and the index is out of bounds\n        # `set_alpha_to_zero` decides whether we set this parameter simply to zero\n        # in this case, self.step() just output the predicted noise\n        # or whether we use the final alpha of the \"non-previous\" one.\n        self.final_alpha_cumprod = torch.tensor(0.0) if set_alpha_to_zero else self.alphas_cumprod[-1]\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # setable values\n        self.num_inference_steps = None\n        self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps).copy().astype(np.int64))\n\n    # Copied from diffusers.schedulers.scheduling_ddim.DDIMScheduler.scale_model_input\n    def scale_model_input(self, sample: torch.FloatTensor, timestep: Optional[int] = None) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n\n        if num_inference_steps > self.config.num_train_timesteps:\n            raise ValueError(\n                f\"`num_inference_steps`: {num_inference_steps} cannot be larger than `self.config.train_timesteps`:\"\n                f\" {self.config.num_train_timesteps} as the unet model trained with this scheduler can only handle\"\n                f\" maximal {self.config.num_train_timesteps} timesteps.\"\n            )\n\n        self.num_inference_steps = num_inference_steps\n        step_ratio = self.config.num_train_timesteps // self.num_inference_steps\n        # creates integer timesteps by multiplying by ratio\n        # casting to int to avoid issues when num_inference_step is power of 3\n        timesteps = (np.arange(0, num_inference_steps) * step_ratio).round().copy().astype(np.int64)\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n        self.timesteps += self.config.steps_offset\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        eta: float = 0.0,\n        use_clipped_model_output: bool = False,\n        variance_noise: Optional[torch.FloatTensor] = None,\n        return_dict: bool = True,\n    ) -> Union[DDIMSchedulerOutput, Tuple]:\n        # 1. get previous step value (=t+1)\n        prev_timestep = timestep + self.config.num_train_timesteps // self.num_inference_steps\n\n        # 2. compute alphas, betas\n        # change original implementation to exactly match noise levels for analogous forward process\n        alpha_prod_t = self.alphas_cumprod[timestep]\n        alpha_prod_t_prev = (\n            self.alphas_cumprod[prev_timestep]\n            if prev_timestep < self.config.num_train_timesteps\n            else self.final_alpha_cumprod\n        )\n\n        beta_prod_t = 1 - alpha_prod_t\n\n        # 3. compute predicted original sample from predicted noise also called\n        # \"predicted x_0\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = (sample - beta_prod_t ** (0.5) * model_output) / alpha_prod_t ** (0.5)\n            pred_epsilon = model_output\n        elif self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n            pred_epsilon = (sample - alpha_prod_t ** (0.5) * pred_original_sample) / beta_prod_t ** (0.5)\n        elif self.config.prediction_type == \"v_prediction\":\n            pred_original_sample = (alpha_prod_t**0.5) * sample - (beta_prod_t**0.5) * model_output\n            pred_epsilon = (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                \" `v_prediction`\"\n            )\n\n        # 4. Clip or threshold \"predicted x_0\"\n        if self.config.clip_sample:\n            pred_original_sample = pred_original_sample.clamp(\n                -self.config.clip_sample_range, self.config.clip_sample_range\n            )\n\n        # 5. compute \"direction pointing to x_t\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        pred_sample_direction = (1 - alpha_prod_t_prev) ** (0.5) * pred_epsilon\n\n        # 6. compute x_t without \"random noise\" of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        prev_sample = alpha_prod_t_prev ** (0.5) * pred_original_sample + pred_sample_direction\n\n        if not return_dict:\n            return (prev_sample, pred_original_sample)\n        return DDIMSchedulerOutput(prev_sample=prev_sample, pred_original_sample=pred_original_sample)\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_ddpm.py",
    "content": "# Copyright 2023 UC Berkeley Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/ermongroup/ddim\n\nimport math\nfrom dataclasses import dataclass\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, randn_tensor\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin\n\n\n@dataclass\nclass DDPMSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DDPMScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Denoising diffusion probabilistic models (DDPMs) explores the connections between denoising score matching and\n    Langevin dynamics sampling.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2006.11239\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, `squaredcos_cap_v2` or `sigmoid`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        variance_type (`str`):\n            options to clip the variance used when adding noise to the denoised sample. Choose from `fixed_small`,\n            `fixed_small_log`, `fixed_large`, `fixed_large_log`, `learned` or `learned_range`.\n        clip_sample (`bool`, default `True`):\n            option to clip predicted sample for numerical stability.\n        clip_sample_range (`float`, default `1.0`):\n            the maximum magnitude for sample clipping. Valid only when `clip_sample=True`.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            Note that the thresholding method is unsuitable for latent-space diffusion models (such as\n            stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487). Valid only when `thresholding=True`.\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True`.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        variance_type: str = \"fixed_small\",\n        clip_sample: bool = True,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        clip_sample_range: float = 1.0,\n        sample_max_value: float = 1.0,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        elif beta_schedule == \"sigmoid\":\n            # GeoDiff sigmoid schedule\n            betas = torch.linspace(-6, 6, num_train_timesteps)\n            self.betas = torch.sigmoid(betas) * (beta_end - beta_start) + beta_start\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        self.one = torch.tensor(1.0)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # setable values\n        self.custom_timesteps = False\n        self.num_inference_steps = None\n        self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy())\n\n        self.variance_type = variance_type\n\n    def scale_model_input(self, sample: torch.FloatTensor, timestep: Optional[int] = None) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(\n        self,\n        num_inference_steps: Optional[int] = None,\n        device: Union[str, torch.device] = None,\n        timesteps: Optional[List[int]] = None,\n    ):\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`Optional[int]`):\n                the number of diffusion steps used when generating samples with a pre-trained model. If passed, then\n                `timesteps` must be `None`.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps are moved to.\n            custom_timesteps (`List[int]`, optional):\n                custom timesteps used to support arbitrary spacing between timesteps. If `None`, then the default\n                timestep spacing strategy of equal spacing between timesteps is used. If passed, `num_inference_steps`\n                must be `None`.\n\n        \"\"\"\n        if num_inference_steps is not None and timesteps is not None:\n            raise ValueError(\"Can only pass one of `num_inference_steps` or `custom_timesteps`.\")\n\n        if timesteps is not None:\n            for i in range(1, len(timesteps)):\n                if timesteps[i] >= timesteps[i - 1]:\n                    raise ValueError(\"`custom_timesteps` must be in descending order.\")\n\n            if timesteps[0] >= self.config.num_train_timesteps:\n                raise ValueError(\n                    f\"`timesteps` must start before `self.config.train_timesteps`:\"\n                    f\" {self.config.num_train_timesteps}.\"\n                )\n\n            timesteps = np.array(timesteps, dtype=np.int64)\n            self.custom_timesteps = True\n        else:\n            if num_inference_steps > self.config.num_train_timesteps:\n                raise ValueError(\n                    f\"`num_inference_steps`: {num_inference_steps} cannot be larger than `self.config.train_timesteps`:\"\n                    f\" {self.config.num_train_timesteps} as the unet model trained with this scheduler can only handle\"\n                    f\" maximal {self.config.num_train_timesteps} timesteps.\"\n                )\n\n            self.num_inference_steps = num_inference_steps\n\n            step_ratio = self.config.num_train_timesteps // self.num_inference_steps\n            timesteps = (np.arange(0, num_inference_steps) * step_ratio).round()[::-1].copy().astype(np.int64)\n            self.custom_timesteps = False\n\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n    def _get_variance(self, t, predicted_variance=None, variance_type=None):\n        prev_t = self.previous_timestep(t)\n\n        alpha_prod_t = self.alphas_cumprod[t]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_t] if prev_t >= 0 else self.one\n        current_beta_t = 1 - alpha_prod_t / alpha_prod_t_prev\n\n        # For t > 0, compute predicted variance βt (see formula (6) and (7) from https://arxiv.org/pdf/2006.11239.pdf)\n        # and sample from it to get previous sample\n        # x_{t-1} ~ N(pred_prev_sample, variance) == add variance to pred_sample\n        variance = (1 - alpha_prod_t_prev) / (1 - alpha_prod_t) * current_beta_t\n\n        # we always take the log of variance, so clamp it to ensure it's not 0\n        variance = torch.clamp(variance, min=1e-20)\n\n        if variance_type is None:\n            variance_type = self.config.variance_type\n\n        # hacks - were probably added for training stability\n        if variance_type == \"fixed_small\":\n            variance = variance\n        # for rl-diffuser https://arxiv.org/abs/2205.09991\n        elif variance_type == \"fixed_small_log\":\n            variance = torch.log(variance)\n            variance = torch.exp(0.5 * variance)\n        elif variance_type == \"fixed_large\":\n            variance = current_beta_t\n        elif variance_type == \"fixed_large_log\":\n            # Glide max_log\n            variance = torch.log(current_beta_t)\n        elif variance_type == \"learned\":\n            return predicted_variance\n        elif variance_type == \"learned_range\":\n            min_log = torch.log(variance)\n            max_log = torch.log(current_beta_t)\n            frac = (predicted_variance + 1) / 2\n            variance = frac * max_log + (1 - frac) * min_log\n\n        return variance\n\n    def _threshold_sample(self, sample: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"\n        \"Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the\n        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by\n        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing\n        pixels from saturation at each step. We find that dynamic thresholding results in significantly better\n        photorealism as well as better image-text alignment, especially when using very large guidance weights.\"\n\n        https://arxiv.org/abs/2205.11487\n        \"\"\"\n        dtype = sample.dtype\n        batch_size, channels, height, width = sample.shape\n\n        if dtype not in (torch.float32, torch.float64):\n            sample = sample.float()  # upcast for quantile calculation, and clamp not implemented for cpu half\n\n        # Flatten sample for doing quantile calculation along each image\n        sample = sample.reshape(batch_size, channels * height * width)\n\n        abs_sample = sample.abs()  # \"a certain percentile absolute pixel value\"\n\n        s = torch.quantile(abs_sample, self.config.dynamic_thresholding_ratio, dim=1)\n        s = torch.clamp(\n            s, min=1, max=self.config.sample_max_value\n        )  # When clamped to min=1, equivalent to standard clipping to [-1, 1]\n\n        s = s.unsqueeze(1)  # (batch_size, 1) because clamp will broadcast along dim=0\n        sample = torch.clamp(sample, -s, s) / s  # \"we threshold xt0 to the range [-s, s] and then divide by s\"\n\n        sample = sample.reshape(batch_size, channels, height, width)\n        sample = sample.to(dtype)\n\n        return sample\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        generator=None,\n        return_dict: bool = True,\n    ) -> Union[DDPMSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            generator: random number generator.\n            return_dict (`bool`): option for returning tuple rather than DDPMSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.DDPMSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.DDPMSchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        t = timestep\n\n        prev_t = self.previous_timestep(t)\n\n        if model_output.shape[1] == sample.shape[1] * 2 and self.variance_type in [\"learned\", \"learned_range\"]:\n            model_output, predicted_variance = torch.split(model_output, sample.shape[1], dim=1)\n        else:\n            predicted_variance = None\n\n        # 1. compute alphas, betas\n        alpha_prod_t = self.alphas_cumprod[t]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_t] if prev_t >= 0 else self.one\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n        current_alpha_t = alpha_prod_t / alpha_prod_t_prev\n        current_beta_t = 1 - current_alpha_t\n\n        # 2. compute predicted original sample from predicted noise also called\n        # \"predicted x_0\" of formula (15) from https://arxiv.org/pdf/2006.11239.pdf\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = (sample - beta_prod_t ** (0.5) * model_output) / alpha_prod_t ** (0.5)\n        elif self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            pred_original_sample = (alpha_prod_t**0.5) * sample - (beta_prod_t**0.5) * model_output\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample` or\"\n                \" `v_prediction`  for the DDPMScheduler.\"\n            )\n\n        # 3. Clip or threshold \"predicted x_0\"\n        if self.config.thresholding:\n            pred_original_sample = self._threshold_sample(pred_original_sample)\n        elif self.config.clip_sample:\n            pred_original_sample = pred_original_sample.clamp(\n                -self.config.clip_sample_range, self.config.clip_sample_range\n            )\n\n        # 4. Compute coefficients for pred_original_sample x_0 and current sample x_t\n        # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf\n        pred_original_sample_coeff = (alpha_prod_t_prev ** (0.5) * current_beta_t) / beta_prod_t\n        current_sample_coeff = current_alpha_t ** (0.5) * beta_prod_t_prev / beta_prod_t\n\n        # 5. Compute predicted previous sample µ_t\n        # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf\n        pred_prev_sample = pred_original_sample_coeff * pred_original_sample + current_sample_coeff * sample\n\n        # 6. Add noise\n        variance = 0\n        if t > 0:\n            device = model_output.device\n            variance_noise = randn_tensor(\n                model_output.shape, generator=generator, device=device, dtype=model_output.dtype\n            )\n            if self.variance_type == \"fixed_small_log\":\n                variance = self._get_variance(t, predicted_variance=predicted_variance) * variance_noise\n            elif self.variance_type == \"learned_range\":\n                variance = self._get_variance(t, predicted_variance=predicted_variance)\n                variance = torch.exp(0.5 * variance) * variance_noise\n            else:\n                variance = (self._get_variance(t, predicted_variance=predicted_variance) ** 0.5) * variance_noise\n\n        pred_prev_sample = pred_prev_sample + variance\n\n        if not return_dict:\n            return (pred_prev_sample,)\n\n        return DDPMSchedulerOutput(prev_sample=pred_prev_sample, pred_original_sample=pred_original_sample)\n\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    def get_velocity(\n        self, sample: torch.FloatTensor, noise: torch.FloatTensor, timesteps: torch.IntTensor\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as sample\n        alphas_cumprod = self.alphas_cumprod.to(device=sample.device, dtype=sample.dtype)\n        timesteps = timesteps.to(sample.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(sample.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(sample.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        velocity = sqrt_alpha_prod * noise - sqrt_one_minus_alpha_prod * sample\n        return velocity\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n\n    def previous_timestep(self, timestep):\n        if self.custom_timesteps:\n            index = (self.timesteps == timestep).nonzero(as_tuple=True)[0][0]\n            if index == self.timesteps.shape[0] - 1:\n                prev_t = torch.tensor(-1)\n            else:\n                prev_t = self.timesteps[index + 1]\n        else:\n            num_inference_steps = (\n                self.num_inference_steps if self.num_inference_steps else self.config.num_train_timesteps\n            )\n            prev_t = timestep - self.config.num_train_timesteps // num_inference_steps\n\n        return prev_t\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_ddpm_flax.py",
    "content": "# Copyright 2023 UC Berkeley Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/ermongroup/ddim\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport flax\nimport jax\nimport jax.numpy as jnp\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils_flax import (\n    CommonSchedulerState,\n    FlaxKarrasDiffusionSchedulers,\n    FlaxSchedulerMixin,\n    FlaxSchedulerOutput,\n    add_noise_common,\n    get_velocity_common,\n)\n\n\n@flax.struct.dataclass\nclass DDPMSchedulerState:\n    common: CommonSchedulerState\n\n    # setable values\n    init_noise_sigma: jnp.ndarray\n    timesteps: jnp.ndarray\n    num_inference_steps: Optional[int] = None\n\n    @classmethod\n    def create(cls, common: CommonSchedulerState, init_noise_sigma: jnp.ndarray, timesteps: jnp.ndarray):\n        return cls(common=common, init_noise_sigma=init_noise_sigma, timesteps=timesteps)\n\n\n@dataclass\nclass FlaxDDPMSchedulerOutput(FlaxSchedulerOutput):\n    state: DDPMSchedulerState\n\n\nclass FlaxDDPMScheduler(FlaxSchedulerMixin, ConfigMixin):\n    \"\"\"\n    Denoising diffusion probabilistic models (DDPMs) explores the connections between denoising score matching and\n    Langevin dynamics sampling.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2006.11239\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        variance_type (`str`):\n            options to clip the variance used when adding noise to the denoised sample. Choose from `fixed_small`,\n            `fixed_small_log`, `fixed_large`, `fixed_large_log`, `learned` or `learned_range`.\n        clip_sample (`bool`, default `True`):\n            option to clip predicted sample between -1 and 1 for numerical stability.\n        prediction_type (`str`, default `epsilon`):\n            indicates whether the model predicts the noise (epsilon), or the samples. One of `epsilon`, `sample`.\n            `v-prediction` is not supported for this scheduler.\n        dtype (`jnp.dtype`, *optional*, defaults to `jnp.float32`):\n            the `dtype` used for params and computation.\n    \"\"\"\n\n    _compatibles = [e.name for e in FlaxKarrasDiffusionSchedulers]\n\n    dtype: jnp.dtype\n\n    @property\n    def has_state(self):\n        return True\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[jnp.ndarray] = None,\n        variance_type: str = \"fixed_small\",\n        clip_sample: bool = True,\n        prediction_type: str = \"epsilon\",\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        self.dtype = dtype\n\n    def create_state(self, common: Optional[CommonSchedulerState] = None) -> DDPMSchedulerState:\n        if common is None:\n            common = CommonSchedulerState.create(self)\n\n        # standard deviation of the initial noise distribution\n        init_noise_sigma = jnp.array(1.0, dtype=self.dtype)\n\n        timesteps = jnp.arange(0, self.config.num_train_timesteps).round()[::-1]\n\n        return DDPMSchedulerState.create(\n            common=common,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n        )\n\n    def scale_model_input(\n        self, state: DDPMSchedulerState, sample: jnp.ndarray, timestep: Optional[int] = None\n    ) -> jnp.ndarray:\n        \"\"\"\n        Args:\n            state (`PNDMSchedulerState`): the `FlaxPNDMScheduler` state data class instance.\n            sample (`jnp.ndarray`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `jnp.ndarray`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(\n        self, state: DDPMSchedulerState, num_inference_steps: int, shape: Tuple = ()\n    ) -> DDPMSchedulerState:\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            state (`DDIMSchedulerState`):\n                the `FlaxDDPMScheduler` state data class instance.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n\n        step_ratio = self.config.num_train_timesteps // num_inference_steps\n        # creates integer timesteps by multiplying by ratio\n        # rounding to avoid issues when num_inference_step is power of 3\n        timesteps = (jnp.arange(0, num_inference_steps) * step_ratio).round()[::-1]\n\n        return state.replace(\n            num_inference_steps=num_inference_steps,\n            timesteps=timesteps,\n        )\n\n    def _get_variance(self, state: DDPMSchedulerState, t, predicted_variance=None, variance_type=None):\n        alpha_prod_t = state.common.alphas_cumprod[t]\n        alpha_prod_t_prev = jnp.where(t > 0, state.common.alphas_cumprod[t - 1], jnp.array(1.0, dtype=self.dtype))\n\n        # For t > 0, compute predicted variance βt (see formula (6) and (7) from https://arxiv.org/pdf/2006.11239.pdf)\n        # and sample from it to get previous sample\n        # x_{t-1} ~ N(pred_prev_sample, variance) == add variance to pred_sample\n        variance = (1 - alpha_prod_t_prev) / (1 - alpha_prod_t) * state.common.betas[t]\n\n        if variance_type is None:\n            variance_type = self.config.variance_type\n\n        # hacks - were probably added for training stability\n        if variance_type == \"fixed_small\":\n            variance = jnp.clip(variance, a_min=1e-20)\n        # for rl-diffuser https://arxiv.org/abs/2205.09991\n        elif variance_type == \"fixed_small_log\":\n            variance = jnp.log(jnp.clip(variance, a_min=1e-20))\n        elif variance_type == \"fixed_large\":\n            variance = state.common.betas[t]\n        elif variance_type == \"fixed_large_log\":\n            # Glide max_log\n            variance = jnp.log(state.common.betas[t])\n        elif variance_type == \"learned\":\n            return predicted_variance\n        elif variance_type == \"learned_range\":\n            min_log = variance\n            max_log = state.common.betas[t]\n            frac = (predicted_variance + 1) / 2\n            variance = frac * max_log + (1 - frac) * min_log\n\n        return variance\n\n    def step(\n        self,\n        state: DDPMSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n        key: Optional[jax.random.KeyArray] = None,\n        return_dict: bool = True,\n    ) -> Union[FlaxDDPMSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            state (`DDPMSchedulerState`): the `FlaxDDPMScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            key (`jax.random.KeyArray`): a PRNG key.\n            return_dict (`bool`): option for returning tuple rather than FlaxDDPMSchedulerOutput class\n\n        Returns:\n            [`FlaxDDPMSchedulerOutput`] or `tuple`: [`FlaxDDPMSchedulerOutput`] if `return_dict` is True, otherwise a\n            `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        t = timestep\n\n        if key is None:\n            key = jax.random.PRNGKey(0)\n\n        if model_output.shape[1] == sample.shape[1] * 2 and self.config.variance_type in [\"learned\", \"learned_range\"]:\n            model_output, predicted_variance = jnp.split(model_output, sample.shape[1], axis=1)\n        else:\n            predicted_variance = None\n\n        # 1. compute alphas, betas\n        alpha_prod_t = state.common.alphas_cumprod[t]\n        alpha_prod_t_prev = jnp.where(t > 0, state.common.alphas_cumprod[t - 1], jnp.array(1.0, dtype=self.dtype))\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        # 2. compute predicted original sample from predicted noise also called\n        # \"predicted x_0\" of formula (15) from https://arxiv.org/pdf/2006.11239.pdf\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = (sample - beta_prod_t ** (0.5) * model_output) / alpha_prod_t ** (0.5)\n        elif self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            pred_original_sample = (alpha_prod_t**0.5) * sample - (beta_prod_t**0.5) * model_output\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample` \"\n                \" for the FlaxDDPMScheduler.\"\n            )\n\n        # 3. Clip \"predicted x_0\"\n        if self.config.clip_sample:\n            pred_original_sample = jnp.clip(pred_original_sample, -1, 1)\n\n        # 4. Compute coefficients for pred_original_sample x_0 and current sample x_t\n        # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf\n        pred_original_sample_coeff = (alpha_prod_t_prev ** (0.5) * state.common.betas[t]) / beta_prod_t\n        current_sample_coeff = state.common.alphas[t] ** (0.5) * beta_prod_t_prev / beta_prod_t\n\n        # 5. Compute predicted previous sample µ_t\n        # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf\n        pred_prev_sample = pred_original_sample_coeff * pred_original_sample + current_sample_coeff * sample\n\n        # 6. Add noise\n        def random_variance():\n            split_key = jax.random.split(key, num=1)\n            noise = jax.random.normal(split_key, shape=model_output.shape, dtype=self.dtype)\n            return (self._get_variance(state, t, predicted_variance=predicted_variance) ** 0.5) * noise\n\n        variance = jnp.where(t > 0, random_variance(), jnp.zeros(model_output.shape, dtype=self.dtype))\n\n        pred_prev_sample = pred_prev_sample + variance\n\n        if not return_dict:\n            return (pred_prev_sample, state)\n\n        return FlaxDDPMSchedulerOutput(prev_sample=pred_prev_sample, state=state)\n\n    def add_noise(\n        self,\n        state: DDPMSchedulerState,\n        original_samples: jnp.ndarray,\n        noise: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ) -> jnp.ndarray:\n        return add_noise_common(state.common, original_samples, noise, timesteps)\n\n    def get_velocity(\n        self,\n        state: DDPMSchedulerState,\n        sample: jnp.ndarray,\n        noise: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ) -> jnp.ndarray:\n        return get_velocity_common(state.common, sample, noise, timesteps)\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_deis_multistep.py",
    "content": "# Copyright 2023 FLAIR Lab and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: check https://arxiv.org/abs/2204.13902 and https://github.com/qsh-zh/deis for more info\n# The codebase is modified based on https://github.com/huggingface/diffusers/blob/main/src/diffusers/schedulers/scheduling_dpmsolver_multistep.py\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DEISMultistepScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    DEIS (https://arxiv.org/abs/2204.13902) is a fast high order solver for diffusion ODEs. We slightly modify the\n    polynomial fitting formula in log-rho space instead of the original linear t space in DEIS paper. The modification\n    enjoys closed-form coefficients for exponential multistep update instead of replying on the numerical solver. More\n    variants of DEIS can be found in https://github.com/qsh-zh/deis.\n\n    Currently, we support the log-rho multistep DEIS. We recommend to use `solver_order=2 / 3` while `solver_order=1`\n    reduces to DDIM.\n\n    We also support the \"dynamic thresholding\" method in Imagen (https://arxiv.org/abs/2205.11487). For pixel-space\n    diffusion models, you can set `thresholding=True` to use the dynamic thresholding.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        solver_order (`int`, default `2`):\n            the order of DEIS; can be `1` or `2` or `3`. We recommend to use `solver_order=2` for guided sampling, and\n            `solver_order=3` for unconditional sampling.\n        prediction_type (`str`, default `epsilon`):\n            indicates whether the model predicts the noise (epsilon), or the data / `x0`. One of `epsilon`, `sample`,\n            or `v-prediction`.\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            Note that the thresholding method is unsuitable for latent-space diffusion models (such as\n            stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487).\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True`\n        algorithm_type (`str`, default `deis`):\n            the algorithm type for the solver. current we support multistep deis, we will add other variants of DEIS in\n            the future\n        lower_order_final (`bool`, default `True`):\n            whether to use lower-order solvers in the final steps. Only valid for < 15 inference steps. We empirically\n            find this trick can stabilize the sampling of DEIS for steps < 15, especially for steps <= 10.\n\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[np.ndarray] = None,\n        solver_order: int = 2,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        sample_max_value: float = 1.0,\n        algorithm_type: str = \"deis\",\n        solver_type: str = \"logrho\",\n        lower_order_final: bool = True,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        # Currently we only support VP-type noise schedule\n        self.alpha_t = torch.sqrt(self.alphas_cumprod)\n        self.sigma_t = torch.sqrt(1 - self.alphas_cumprod)\n        self.lambda_t = torch.log(self.alpha_t) - torch.log(self.sigma_t)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # settings for DEIS\n        if algorithm_type not in [\"deis\"]:\n            if algorithm_type in [\"dpmsolver\", \"dpmsolver++\"]:\n                self.register_to_config(algorithm_type=\"deis\")\n            else:\n                raise NotImplementedError(f\"{algorithm_type} does is not implemented for {self.__class__}\")\n\n        if solver_type not in [\"logrho\"]:\n            if solver_type in [\"midpoint\", \"heun\", \"bh1\", \"bh2\"]:\n                self.register_to_config(solver_type=\"logrho\")\n            else:\n                raise NotImplementedError(f\"solver type {solver_type} does is not implemented for {self.__class__}\")\n\n        # setable values\n        self.num_inference_steps = None\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=np.float32)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps)\n        self.model_outputs = [None] * solver_order\n        self.lower_order_nums = 0\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        timesteps = (\n            np.linspace(0, self.config.num_train_timesteps - 1, num_inference_steps + 1)\n            .round()[::-1][:-1]\n            .copy()\n            .astype(np.int64)\n        )\n\n        # when num_inference_steps == num_train_timesteps, we can end up with\n        # duplicates in timesteps.\n        _, unique_indices = np.unique(timesteps, return_index=True)\n        timesteps = timesteps[np.sort(unique_indices)]\n\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n        self.num_inference_steps = len(timesteps)\n\n        self.model_outputs = [\n            None,\n        ] * self.config.solver_order\n        self.lower_order_nums = 0\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler._threshold_sample\n    def _threshold_sample(self, sample: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"\n        \"Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the\n        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by\n        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing\n        pixels from saturation at each step. We find that dynamic thresholding results in significantly better\n        photorealism as well as better image-text alignment, especially when using very large guidance weights.\"\n\n        https://arxiv.org/abs/2205.11487\n        \"\"\"\n        dtype = sample.dtype\n        batch_size, channels, height, width = sample.shape\n\n        if dtype not in (torch.float32, torch.float64):\n            sample = sample.float()  # upcast for quantile calculation, and clamp not implemented for cpu half\n\n        # Flatten sample for doing quantile calculation along each image\n        sample = sample.reshape(batch_size, channels * height * width)\n\n        abs_sample = sample.abs()  # \"a certain percentile absolute pixel value\"\n\n        s = torch.quantile(abs_sample, self.config.dynamic_thresholding_ratio, dim=1)\n        s = torch.clamp(\n            s, min=1, max=self.config.sample_max_value\n        )  # When clamped to min=1, equivalent to standard clipping to [-1, 1]\n\n        s = s.unsqueeze(1)  # (batch_size, 1) because clamp will broadcast along dim=0\n        sample = torch.clamp(sample, -s, s) / s  # \"we threshold xt0 to the range [-s, s] and then divide by s\"\n\n        sample = sample.reshape(batch_size, channels, height, width)\n        sample = sample.to(dtype)\n\n        return sample\n\n    def convert_model_output(\n        self, model_output: torch.FloatTensor, timestep: int, sample: torch.FloatTensor\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Convert the model output to the corresponding type that the algorithm DEIS needs.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the converted model output.\n        \"\"\"\n        if self.config.prediction_type == \"epsilon\":\n            alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n            x0_pred = (sample - sigma_t * model_output) / alpha_t\n        elif self.config.prediction_type == \"sample\":\n            x0_pred = model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n            x0_pred = alpha_t * sample - sigma_t * model_output\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                \" `v_prediction` for the DEISMultistepScheduler.\"\n            )\n\n        if self.config.thresholding:\n            x0_pred = self._threshold_sample(x0_pred)\n\n        if self.config.algorithm_type == \"deis\":\n            alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n            return (sample - alpha_t * x0_pred) / sigma_t\n        else:\n            raise NotImplementedError(\"only support log-rho multistep deis now\")\n\n    def deis_first_order_update(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the first-order DEIS (equivalent to DDIM).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        lambda_t, lambda_s = self.lambda_t[prev_timestep], self.lambda_t[timestep]\n        alpha_t, alpha_s = self.alpha_t[prev_timestep], self.alpha_t[timestep]\n        sigma_t, _ = self.sigma_t[prev_timestep], self.sigma_t[timestep]\n        h = lambda_t - lambda_s\n        if self.config.algorithm_type == \"deis\":\n            x_t = (alpha_t / alpha_s) * sample - (sigma_t * (torch.exp(h) - 1.0)) * model_output\n        else:\n            raise NotImplementedError(\"only support log-rho multistep deis now\")\n        return x_t\n\n    def multistep_deis_second_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the second-order multistep DEIS.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1 = prev_timestep, timestep_list[-1], timestep_list[-2]\n        m0, m1 = model_output_list[-1], model_output_list[-2]\n        alpha_t, alpha_s0, alpha_s1 = self.alpha_t[t], self.alpha_t[s0], self.alpha_t[s1]\n        sigma_t, sigma_s0, sigma_s1 = self.sigma_t[t], self.sigma_t[s0], self.sigma_t[s1]\n\n        rho_t, rho_s0, rho_s1 = sigma_t / alpha_t, sigma_s0 / alpha_s0, sigma_s1 / alpha_s1\n\n        if self.config.algorithm_type == \"deis\":\n\n            def ind_fn(t, b, c):\n                # Integrate[(log(t) - log(c)) / (log(b) - log(c)), {t}]\n                return t * (-np.log(c) + np.log(t) - 1) / (np.log(b) - np.log(c))\n\n            coef1 = ind_fn(rho_t, rho_s0, rho_s1) - ind_fn(rho_s0, rho_s0, rho_s1)\n            coef2 = ind_fn(rho_t, rho_s1, rho_s0) - ind_fn(rho_s0, rho_s1, rho_s0)\n\n            x_t = alpha_t * (sample / alpha_s0 + coef1 * m0 + coef2 * m1)\n            return x_t\n        else:\n            raise NotImplementedError(\"only support log-rho multistep deis now\")\n\n    def multistep_deis_third_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the third-order multistep DEIS.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1, s2 = prev_timestep, timestep_list[-1], timestep_list[-2], timestep_list[-3]\n        m0, m1, m2 = model_output_list[-1], model_output_list[-2], model_output_list[-3]\n        alpha_t, alpha_s0, alpha_s1, alpha_s2 = self.alpha_t[t], self.alpha_t[s0], self.alpha_t[s1], self.alpha_t[s2]\n        sigma_t, sigma_s0, sigma_s1, simga_s2 = self.sigma_t[t], self.sigma_t[s0], self.sigma_t[s1], self.sigma_t[s2]\n        rho_t, rho_s0, rho_s1, rho_s2 = (\n            sigma_t / alpha_t,\n            sigma_s0 / alpha_s0,\n            sigma_s1 / alpha_s1,\n            simga_s2 / alpha_s2,\n        )\n\n        if self.config.algorithm_type == \"deis\":\n\n            def ind_fn(t, b, c, d):\n                # Integrate[(log(t) - log(c))(log(t) - log(d)) / (log(b) - log(c))(log(b) - log(d)), {t}]\n                numerator = t * (\n                    np.log(c) * (np.log(d) - np.log(t) + 1)\n                    - np.log(d) * np.log(t)\n                    + np.log(d)\n                    + np.log(t) ** 2\n                    - 2 * np.log(t)\n                    + 2\n                )\n                denominator = (np.log(b) - np.log(c)) * (np.log(b) - np.log(d))\n                return numerator / denominator\n\n            coef1 = ind_fn(rho_t, rho_s0, rho_s1, rho_s2) - ind_fn(rho_s0, rho_s0, rho_s1, rho_s2)\n            coef2 = ind_fn(rho_t, rho_s1, rho_s2, rho_s0) - ind_fn(rho_s0, rho_s1, rho_s2, rho_s0)\n            coef3 = ind_fn(rho_t, rho_s2, rho_s0, rho_s1) - ind_fn(rho_s0, rho_s2, rho_s0, rho_s1)\n\n            x_t = alpha_t * (sample / alpha_s0 + coef1 * m0 + coef2 * m1 + coef3 * m2)\n\n            return x_t\n        else:\n            raise NotImplementedError(\"only support log-rho multistep deis now\")\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the multistep DEIS.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero()\n        if len(step_index) == 0:\n            step_index = len(self.timesteps) - 1\n        else:\n            step_index = step_index.item()\n        prev_timestep = 0 if step_index == len(self.timesteps) - 1 else self.timesteps[step_index + 1]\n        lower_order_final = (\n            (step_index == len(self.timesteps) - 1) and self.config.lower_order_final and len(self.timesteps) < 15\n        )\n        lower_order_second = (\n            (step_index == len(self.timesteps) - 2) and self.config.lower_order_final and len(self.timesteps) < 15\n        )\n\n        model_output = self.convert_model_output(model_output, timestep, sample)\n        for i in range(self.config.solver_order - 1):\n            self.model_outputs[i] = self.model_outputs[i + 1]\n        self.model_outputs[-1] = model_output\n\n        if self.config.solver_order == 1 or self.lower_order_nums < 1 or lower_order_final:\n            prev_sample = self.deis_first_order_update(model_output, timestep, prev_timestep, sample)\n        elif self.config.solver_order == 2 or self.lower_order_nums < 2 or lower_order_second:\n            timestep_list = [self.timesteps[step_index - 1], timestep]\n            prev_sample = self.multistep_deis_second_order_update(\n                self.model_outputs, timestep_list, prev_timestep, sample\n            )\n        else:\n            timestep_list = [self.timesteps[step_index - 2], self.timesteps[step_index - 1], timestep]\n            prev_sample = self.multistep_deis_third_order_update(\n                self.model_outputs, timestep_list, prev_timestep, sample\n            )\n\n        if self.lower_order_nums < self.config.solver_order:\n            self.lower_order_nums += 1\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def scale_model_input(self, sample: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_dpmsolver_multistep.py",
    "content": "# Copyright 2023 TSAIL Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/LuChengTHU/dpm-solver\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import randn_tensor\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DPMSolverMultistepScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    DPM-Solver (and the improved version DPM-Solver++) is a fast dedicated high-order solver for diffusion ODEs with\n    the convergence order guarantee. Empirically, sampling by DPM-Solver with only 20 steps can generate high-quality\n    samples, and it can generate quite good samples even in only 10 steps.\n\n    For more details, see the original paper: https://arxiv.org/abs/2206.00927 and https://arxiv.org/abs/2211.01095\n\n    Currently, we support the multistep DPM-Solver for both noise prediction models and data prediction models. We\n    recommend to use `solver_order=2` for guided sampling, and `solver_order=3` for unconditional sampling.\n\n    We also support the \"dynamic thresholding\" method in Imagen (https://arxiv.org/abs/2205.11487). For pixel-space\n    diffusion models, you can set both `algorithm_type=\"dpmsolver++\"` and `thresholding=True` to use the dynamic\n    thresholding. Note that the thresholding method is unsuitable for latent-space diffusion models (such as\n    stable-diffusion).\n\n    We also support the SDE variant of DPM-Solver and DPM-Solver++, which is a fast SDE solver for the reverse\n    diffusion SDE. Currently we only support the first-order and second-order solvers. We recommend using the\n    second-order `sde-dpmsolver++`.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        solver_order (`int`, default `2`):\n            the order of DPM-Solver; can be `1` or `2` or `3`. We recommend to use `solver_order=2` for guided\n            sampling, and `solver_order=3` for unconditional sampling.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            For pixel-space diffusion models, you can set both `algorithm_type=dpmsolver++` and `thresholding=True` to\n            use the dynamic thresholding. Note that the thresholding method is unsuitable for latent-space diffusion\n            models (such as stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487).\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True` and\n            `algorithm_type=\"dpmsolver++`.\n        algorithm_type (`str`, default `dpmsolver++`):\n            the algorithm type for the solver. Either `dpmsolver` or `dpmsolver++` or `sde-dpmsolver` or\n            `sde-dpmsolver++`. The `dpmsolver` type implements the algorithms in https://arxiv.org/abs/2206.00927, and\n            the `dpmsolver++` type implements the algorithms in https://arxiv.org/abs/2211.01095. We recommend to use\n            `dpmsolver++` or `sde-dpmsolver++` with `solver_order=2` for guided sampling (e.g. stable-diffusion).\n        solver_type (`str`, default `midpoint`):\n            the solver type for the second-order solver. Either `midpoint` or `heun`. The solver type slightly affects\n            the sample quality, especially for small number of steps. We empirically find that `midpoint` solvers are\n            slightly better, so we recommend to use the `midpoint` type.\n        lower_order_final (`bool`, default `True`):\n            whether to use lower-order solvers in the final steps. Only valid for < 15 inference steps. We empirically\n            find this trick can stabilize the sampling of DPM-Solver for steps < 15, especially for steps <= 10.\n        use_karras_sigmas (`bool`, *optional*, defaults to `False`):\n             This parameter controls whether to use Karras sigmas (Karras et al. (2022) scheme) for step sizes in the\n             noise schedule during the sampling process. If True, the sigmas will be determined according to a sequence\n             of noise levels {σi} as defined in Equation (5) of the paper https://arxiv.org/pdf/2206.00364.pdf.\n        lambda_min_clipped (`float`, default `-inf`):\n            the clipping threshold for the minimum value of lambda(t) for numerical stability. This is critical for\n            cosine (squaredcos_cap_v2) noise schedule.\n        variance_type (`str`, *optional*):\n            Set to \"learned\" or \"learned_range\" for diffusion models that predict variance. For example, OpenAI's\n            guided-diffusion (https://github.com/openai/guided-diffusion) predicts both mean and variance of the\n            Gaussian distribution in the model's output. DPM-Solver only needs the \"mean\" output because it is based on\n            diffusion ODEs. whether the model's output contains the predicted Gaussian variance. For example, OpenAI's\n            guided-diffusion (https://github.com/openai/guided-diffusion) predicts both mean and variance of the\n            Gaussian distribution in the model's output. DPM-Solver only needs the \"mean\" output because it is based on\n            diffusion ODEs.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        solver_order: int = 2,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        sample_max_value: float = 1.0,\n        algorithm_type: str = \"dpmsolver++\",\n        solver_type: str = \"midpoint\",\n        lower_order_final: bool = True,\n        use_karras_sigmas: Optional[bool] = False,\n        lambda_min_clipped: float = -float(\"inf\"),\n        variance_type: Optional[str] = None,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        # Currently we only support VP-type noise schedule\n        self.alpha_t = torch.sqrt(self.alphas_cumprod)\n        self.sigma_t = torch.sqrt(1 - self.alphas_cumprod)\n        self.lambda_t = torch.log(self.alpha_t) - torch.log(self.sigma_t)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # settings for DPM-Solver\n        if algorithm_type not in [\"dpmsolver\", \"dpmsolver++\", \"sde-dpmsolver\", \"sde-dpmsolver++\"]:\n            if algorithm_type == \"deis\":\n                self.register_to_config(algorithm_type=\"dpmsolver++\")\n            else:\n                raise NotImplementedError(f\"{algorithm_type} does is not implemented for {self.__class__}\")\n\n        if solver_type not in [\"midpoint\", \"heun\"]:\n            if solver_type in [\"logrho\", \"bh1\", \"bh2\"]:\n                self.register_to_config(solver_type=\"midpoint\")\n            else:\n                raise NotImplementedError(f\"{solver_type} does is not implemented for {self.__class__}\")\n\n        # setable values\n        self.num_inference_steps = None\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=np.float32)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps)\n        self.model_outputs = [None] * solver_order\n        self.lower_order_nums = 0\n        self.use_karras_sigmas = use_karras_sigmas\n\n    def set_timesteps(self, num_inference_steps: int = None, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        # Clipping the minimum of all lambda(t) for numerical stability.\n        # This is critical for cosine (squaredcos_cap_v2) noise schedule.\n        clipped_idx = torch.searchsorted(torch.flip(self.lambda_t, [0]), self.config.lambda_min_clipped)\n        timesteps = (\n            np.linspace(0, self.config.num_train_timesteps - 1 - clipped_idx, num_inference_steps + 1)\n            .round()[::-1][:-1]\n            .copy()\n            .astype(np.int64)\n        )\n\n        if self.use_karras_sigmas:\n            sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n            log_sigmas = np.log(sigmas)\n            sigmas = self._convert_to_karras(in_sigmas=sigmas, num_inference_steps=num_inference_steps)\n            timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas]).round()\n            timesteps = np.flip(timesteps).copy().astype(np.int64)\n\n        # when num_inference_steps == num_train_timesteps, we can end up with\n        # duplicates in timesteps.\n        _, unique_indices = np.unique(timesteps, return_index=True)\n        timesteps = timesteps[np.sort(unique_indices)]\n\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n        self.num_inference_steps = len(timesteps)\n\n        self.model_outputs = [\n            None,\n        ] * self.config.solver_order\n        self.lower_order_nums = 0\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler._threshold_sample\n    def _threshold_sample(self, sample: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"\n        \"Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the\n        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by\n        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing\n        pixels from saturation at each step. We find that dynamic thresholding results in significantly better\n        photorealism as well as better image-text alignment, especially when using very large guidance weights.\"\n\n        https://arxiv.org/abs/2205.11487\n        \"\"\"\n        dtype = sample.dtype\n        batch_size, channels, height, width = sample.shape\n\n        if dtype not in (torch.float32, torch.float64):\n            sample = sample.float()  # upcast for quantile calculation, and clamp not implemented for cpu half\n\n        # Flatten sample for doing quantile calculation along each image\n        sample = sample.reshape(batch_size, channels * height * width)\n\n        abs_sample = sample.abs()  # \"a certain percentile absolute pixel value\"\n\n        s = torch.quantile(abs_sample, self.config.dynamic_thresholding_ratio, dim=1)\n        s = torch.clamp(\n            s, min=1, max=self.config.sample_max_value\n        )  # When clamped to min=1, equivalent to standard clipping to [-1, 1]\n\n        s = s.unsqueeze(1)  # (batch_size, 1) because clamp will broadcast along dim=0\n        sample = torch.clamp(sample, -s, s) / s  # \"we threshold xt0 to the range [-s, s] and then divide by s\"\n\n        sample = sample.reshape(batch_size, channels, height, width)\n        sample = sample.to(dtype)\n\n        return sample\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler._sigma_to_t\n    def _sigma_to_t(self, sigma, log_sigmas):\n        # get log sigma\n        log_sigma = np.log(sigma)\n\n        # get distribution\n        dists = log_sigma - log_sigmas[:, np.newaxis]\n\n        # get sigmas range\n        low_idx = np.cumsum((dists >= 0), axis=0).argmax(axis=0).clip(max=log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = log_sigmas[low_idx]\n        high = log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = np.clip(w, 0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.reshape(sigma.shape)\n        return t\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler._convert_to_karras\n    def _convert_to_karras(self, in_sigmas: torch.FloatTensor, num_inference_steps) -> torch.FloatTensor:\n        \"\"\"Constructs the noise schedule of Karras et al. (2022).\"\"\"\n\n        sigma_min: float = in_sigmas[-1].item()\n        sigma_max: float = in_sigmas[0].item()\n\n        rho = 7.0  # 7.0 is the value used in the paper\n        ramp = np.linspace(0, 1, num_inference_steps)\n        min_inv_rho = sigma_min ** (1 / rho)\n        max_inv_rho = sigma_max ** (1 / rho)\n        sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho\n        return sigmas\n\n    def convert_model_output(\n        self, model_output: torch.FloatTensor, timestep: int, sample: torch.FloatTensor\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Convert the model output to the corresponding type that the algorithm (DPM-Solver / DPM-Solver++) needs.\n\n        DPM-Solver is designed to discretize an integral of the noise prediction model, and DPM-Solver++ is designed to\n        discretize an integral of the data prediction model. So we need to first convert the model output to the\n        corresponding type to match the algorithm.\n\n        Note that the algorithm type and the model type is decoupled. That is to say, we can use either DPM-Solver or\n        DPM-Solver++ for both noise prediction model and data prediction model.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the converted model output.\n        \"\"\"\n\n        # DPM-Solver++ needs to solve an integral of the data prediction model.\n        if self.config.algorithm_type in [\"dpmsolver++\", \"sde-dpmsolver++\"]:\n            if self.config.prediction_type == \"epsilon\":\n                # DPM-Solver and DPM-Solver++ only need the \"mean\" output.\n                if self.config.variance_type in [\"learned\", \"learned_range\"]:\n                    model_output = model_output[:, :3]\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = (sample - sigma_t * model_output) / alpha_t\n            elif self.config.prediction_type == \"sample\":\n                x0_pred = model_output\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = alpha_t * sample - sigma_t * model_output\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the DPMSolverMultistepScheduler.\"\n                )\n\n            if self.config.thresholding:\n                x0_pred = self._threshold_sample(x0_pred)\n\n            return x0_pred\n\n        # DPM-Solver needs to solve an integral of the noise prediction model.\n        elif self.config.algorithm_type in [\"dpmsolver\", \"sde-dpmsolver\"]:\n            if self.config.prediction_type == \"epsilon\":\n                # DPM-Solver and DPM-Solver++ only need the \"mean\" output.\n                if self.config.variance_type in [\"learned\", \"learned_range\"]:\n                    epsilon = model_output[:, :3]\n                else:\n                    epsilon = model_output\n            elif self.config.prediction_type == \"sample\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = (sample - alpha_t * model_output) / sigma_t\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = alpha_t * model_output + sigma_t * sample\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the DPMSolverMultistepScheduler.\"\n                )\n\n            if self.config.thresholding:\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = (sample - sigma_t * epsilon) / alpha_t\n                x0_pred = self._threshold_sample(x0_pred)\n                epsilon = (sample - alpha_t * x0_pred) / sigma_t\n\n            return epsilon\n\n    def dpm_solver_first_order_update(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n        noise: Optional[torch.FloatTensor] = None,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the first-order DPM-Solver (equivalent to DDIM).\n\n        See https://arxiv.org/abs/2206.00927 for the detailed derivation.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        lambda_t, lambda_s = self.lambda_t[prev_timestep], self.lambda_t[timestep]\n        alpha_t, alpha_s = self.alpha_t[prev_timestep], self.alpha_t[timestep]\n        sigma_t, sigma_s = self.sigma_t[prev_timestep], self.sigma_t[timestep]\n        h = lambda_t - lambda_s\n        if self.config.algorithm_type == \"dpmsolver++\":\n            x_t = (sigma_t / sigma_s) * sample - (alpha_t * (torch.exp(-h) - 1.0)) * model_output\n        elif self.config.algorithm_type == \"dpmsolver\":\n            x_t = (alpha_t / alpha_s) * sample - (sigma_t * (torch.exp(h) - 1.0)) * model_output\n        elif self.config.algorithm_type == \"sde-dpmsolver++\":\n            assert noise is not None\n            x_t = (\n                (sigma_t / sigma_s * torch.exp(-h)) * sample\n                + (alpha_t * (1 - torch.exp(-2.0 * h))) * model_output\n                + sigma_t * torch.sqrt(1.0 - torch.exp(-2 * h)) * noise\n            )\n        elif self.config.algorithm_type == \"sde-dpmsolver\":\n            assert noise is not None\n            x_t = (\n                (alpha_t / alpha_s) * sample\n                - 2.0 * (sigma_t * (torch.exp(h) - 1.0)) * model_output\n                + sigma_t * torch.sqrt(torch.exp(2 * h) - 1.0) * noise\n            )\n        return x_t\n\n    def multistep_dpm_solver_second_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n        noise: Optional[torch.FloatTensor] = None,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the second-order multistep DPM-Solver.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1 = prev_timestep, timestep_list[-1], timestep_list[-2]\n        m0, m1 = model_output_list[-1], model_output_list[-2]\n        lambda_t, lambda_s0, lambda_s1 = self.lambda_t[t], self.lambda_t[s0], self.lambda_t[s1]\n        alpha_t, alpha_s0 = self.alpha_t[t], self.alpha_t[s0]\n        sigma_t, sigma_s0 = self.sigma_t[t], self.sigma_t[s0]\n        h, h_0 = lambda_t - lambda_s0, lambda_s0 - lambda_s1\n        r0 = h_0 / h\n        D0, D1 = m0, (1.0 / r0) * (m0 - m1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2211.01095 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (sigma_t / sigma_s0) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    - 0.5 * (alpha_t * (torch.exp(-h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (sigma_t / sigma_s0) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    + (alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0)) * D1\n                )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - 0.5 * (sigma_t * (torch.exp(h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                )\n        elif self.config.algorithm_type == \"sde-dpmsolver++\":\n            assert noise is not None\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (sigma_t / sigma_s0 * torch.exp(-h)) * sample\n                    + (alpha_t * (1 - torch.exp(-2.0 * h))) * D0\n                    + 0.5 * (alpha_t * (1 - torch.exp(-2.0 * h))) * D1\n                    + sigma_t * torch.sqrt(1.0 - torch.exp(-2 * h)) * noise\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (sigma_t / sigma_s0 * torch.exp(-h)) * sample\n                    + (alpha_t * (1 - torch.exp(-2.0 * h))) * D0\n                    + (alpha_t * ((1.0 - torch.exp(-2.0 * h)) / (-2.0 * h) + 1.0)) * D1\n                    + sigma_t * torch.sqrt(1.0 - torch.exp(-2 * h)) * noise\n                )\n        elif self.config.algorithm_type == \"sde-dpmsolver\":\n            assert noise is not None\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - 2.0 * (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D1\n                    + sigma_t * torch.sqrt(torch.exp(2 * h) - 1.0) * noise\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - 2.0 * (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - 2.0 * (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                    + sigma_t * torch.sqrt(torch.exp(2 * h) - 1.0) * noise\n                )\n        return x_t\n\n    def multistep_dpm_solver_third_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the third-order multistep DPM-Solver.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1, s2 = prev_timestep, timestep_list[-1], timestep_list[-2], timestep_list[-3]\n        m0, m1, m2 = model_output_list[-1], model_output_list[-2], model_output_list[-3]\n        lambda_t, lambda_s0, lambda_s1, lambda_s2 = (\n            self.lambda_t[t],\n            self.lambda_t[s0],\n            self.lambda_t[s1],\n            self.lambda_t[s2],\n        )\n        alpha_t, alpha_s0 = self.alpha_t[t], self.alpha_t[s0]\n        sigma_t, sigma_s0 = self.sigma_t[t], self.sigma_t[s0]\n        h, h_0, h_1 = lambda_t - lambda_s0, lambda_s0 - lambda_s1, lambda_s1 - lambda_s2\n        r0, r1 = h_0 / h, h_1 / h\n        D0 = m0\n        D1_0, D1_1 = (1.0 / r0) * (m0 - m1), (1.0 / r1) * (m1 - m2)\n        D1 = D1_0 + (r0 / (r0 + r1)) * (D1_0 - D1_1)\n        D2 = (1.0 / (r0 + r1)) * (D1_0 - D1_1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            x_t = (\n                (sigma_t / sigma_s0) * sample\n                - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                + (alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0)) * D1\n                - (alpha_t * ((torch.exp(-h) - 1.0 + h) / h**2 - 0.5)) * D2\n            )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            x_t = (\n                (alpha_t / alpha_s0) * sample\n                - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                - (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                - (sigma_t * ((torch.exp(h) - 1.0 - h) / h**2 - 0.5)) * D2\n            )\n        return x_t\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        generator=None,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the multistep DPM-Solver.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero()\n        if len(step_index) == 0:\n            step_index = len(self.timesteps) - 1\n        else:\n            step_index = step_index.item()\n        prev_timestep = 0 if step_index == len(self.timesteps) - 1 else self.timesteps[step_index + 1]\n        lower_order_final = (\n            (step_index == len(self.timesteps) - 1) and self.config.lower_order_final and len(self.timesteps) < 15\n        )\n        lower_order_second = (\n            (step_index == len(self.timesteps) - 2) and self.config.lower_order_final and len(self.timesteps) < 15\n        )\n\n        model_output = self.convert_model_output(model_output, timestep, sample)\n        for i in range(self.config.solver_order - 1):\n            self.model_outputs[i] = self.model_outputs[i + 1]\n        self.model_outputs[-1] = model_output\n\n        if self.config.algorithm_type in [\"sde-dpmsolver\", \"sde-dpmsolver++\"]:\n            noise = randn_tensor(\n                model_output.shape, generator=generator, device=model_output.device, dtype=model_output.dtype\n            )\n        else:\n            noise = None\n\n        if self.config.solver_order == 1 or self.lower_order_nums < 1 or lower_order_final:\n            prev_sample = self.dpm_solver_first_order_update(\n                model_output, timestep, prev_timestep, sample, noise=noise\n            )\n        elif self.config.solver_order == 2 or self.lower_order_nums < 2 or lower_order_second:\n            timestep_list = [self.timesteps[step_index - 1], timestep]\n            prev_sample = self.multistep_dpm_solver_second_order_update(\n                self.model_outputs, timestep_list, prev_timestep, sample, noise=noise\n            )\n        else:\n            timestep_list = [self.timesteps[step_index - 2], self.timesteps[step_index - 1], timestep]\n            prev_sample = self.multistep_dpm_solver_third_order_update(\n                self.model_outputs, timestep_list, prev_timestep, sample\n            )\n\n        if self.lower_order_nums < self.config.solver_order:\n            self.lower_order_nums += 1\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def scale_model_input(self, sample: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_dpmsolver_multistep_flax.py",
    "content": "# Copyright 2023 TSAIL Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/LuChengTHU/dpm-solver\n\nfrom dataclasses import dataclass\nfrom typing import List, Optional, Tuple, Union\n\nimport flax\nimport jax\nimport jax.numpy as jnp\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils_flax import (\n    CommonSchedulerState,\n    FlaxKarrasDiffusionSchedulers,\n    FlaxSchedulerMixin,\n    FlaxSchedulerOutput,\n    add_noise_common,\n)\n\n\n@flax.struct.dataclass\nclass DPMSolverMultistepSchedulerState:\n    common: CommonSchedulerState\n    alpha_t: jnp.ndarray\n    sigma_t: jnp.ndarray\n    lambda_t: jnp.ndarray\n\n    # setable values\n    init_noise_sigma: jnp.ndarray\n    timesteps: jnp.ndarray\n    num_inference_steps: Optional[int] = None\n\n    # running values\n    model_outputs: Optional[jnp.ndarray] = None\n    lower_order_nums: Optional[jnp.int32] = None\n    prev_timestep: Optional[jnp.int32] = None\n    cur_sample: Optional[jnp.ndarray] = None\n\n    @classmethod\n    def create(\n        cls,\n        common: CommonSchedulerState,\n        alpha_t: jnp.ndarray,\n        sigma_t: jnp.ndarray,\n        lambda_t: jnp.ndarray,\n        init_noise_sigma: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ):\n        return cls(\n            common=common,\n            alpha_t=alpha_t,\n            sigma_t=sigma_t,\n            lambda_t=lambda_t,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n        )\n\n\n@dataclass\nclass FlaxDPMSolverMultistepSchedulerOutput(FlaxSchedulerOutput):\n    state: DPMSolverMultistepSchedulerState\n\n\nclass FlaxDPMSolverMultistepScheduler(FlaxSchedulerMixin, ConfigMixin):\n    \"\"\"\n    DPM-Solver (and the improved version DPM-Solver++) is a fast dedicated high-order solver for diffusion ODEs with\n    the convergence order guarantee. Empirically, sampling by DPM-Solver with only 20 steps can generate high-quality\n    samples, and it can generate quite good samples even in only 10 steps.\n\n    For more details, see the original paper: https://arxiv.org/abs/2206.00927 and https://arxiv.org/abs/2211.01095\n\n    Currently, we support the multistep DPM-Solver for both noise prediction models and data prediction models. We\n    recommend to use `solver_order=2` for guided sampling, and `solver_order=3` for unconditional sampling.\n\n    We also support the \"dynamic thresholding\" method in Imagen (https://arxiv.org/abs/2205.11487). For pixel-space\n    diffusion models, you can set both `algorithm_type=\"dpmsolver++\"` and `thresholding=True` to use the dynamic\n    thresholding. Note that the thresholding method is unsuitable for latent-space diffusion models (such as\n    stable-diffusion).\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2206.00927 and https://arxiv.org/abs/2211.01095\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        solver_order (`int`, default `2`):\n            the order of DPM-Solver; can be `1` or `2` or `3`. We recommend to use `solver_order=2` for guided\n            sampling, and `solver_order=3` for unconditional sampling.\n        prediction_type (`str`, default `epsilon`):\n            indicates whether the model predicts the noise (epsilon), or the data / `x0`. One of `epsilon`, `sample`,\n            or `v-prediction`.\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            For pixel-space diffusion models, you can set both `algorithm_type=dpmsolver++` and `thresholding=True` to\n            use the dynamic thresholding. Note that the thresholding method is unsuitable for latent-space diffusion\n            models (such as stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487).\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True` and\n            `algorithm_type=\"dpmsolver++`.\n        algorithm_type (`str`, default `dpmsolver++`):\n            the algorithm type for the solver. Either `dpmsolver` or `dpmsolver++`. The `dpmsolver` type implements the\n            algorithms in https://arxiv.org/abs/2206.00927, and the `dpmsolver++` type implements the algorithms in\n            https://arxiv.org/abs/2211.01095. We recommend to use `dpmsolver++` with `solver_order=2` for guided\n            sampling (e.g. stable-diffusion).\n        solver_type (`str`, default `midpoint`):\n            the solver type for the second-order solver. Either `midpoint` or `heun`. The solver type slightly affects\n            the sample quality, especially for small number of steps. We empirically find that `midpoint` solvers are\n            slightly better, so we recommend to use the `midpoint` type.\n        lower_order_final (`bool`, default `True`):\n            whether to use lower-order solvers in the final steps. Only valid for < 15 inference steps. We empirically\n            find this trick can stabilize the sampling of DPM-Solver for steps < 15, especially for steps <= 10.\n        dtype (`jnp.dtype`, *optional*, defaults to `jnp.float32`):\n            the `dtype` used for params and computation.\n    \"\"\"\n\n    _compatibles = [e.name for e in FlaxKarrasDiffusionSchedulers]\n\n    dtype: jnp.dtype\n\n    @property\n    def has_state(self):\n        return True\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[jnp.ndarray] = None,\n        solver_order: int = 2,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        sample_max_value: float = 1.0,\n        algorithm_type: str = \"dpmsolver++\",\n        solver_type: str = \"midpoint\",\n        lower_order_final: bool = True,\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        self.dtype = dtype\n\n    def create_state(self, common: Optional[CommonSchedulerState] = None) -> DPMSolverMultistepSchedulerState:\n        if common is None:\n            common = CommonSchedulerState.create(self)\n\n        # Currently we only support VP-type noise schedule\n        alpha_t = jnp.sqrt(common.alphas_cumprod)\n        sigma_t = jnp.sqrt(1 - common.alphas_cumprod)\n        lambda_t = jnp.log(alpha_t) - jnp.log(sigma_t)\n\n        # settings for DPM-Solver\n        if self.config.algorithm_type not in [\"dpmsolver\", \"dpmsolver++\"]:\n            raise NotImplementedError(f\"{self.config.algorithm_type} does is not implemented for {self.__class__}\")\n        if self.config.solver_type not in [\"midpoint\", \"heun\"]:\n            raise NotImplementedError(f\"{self.config.solver_type} does is not implemented for {self.__class__}\")\n\n        # standard deviation of the initial noise distribution\n        init_noise_sigma = jnp.array(1.0, dtype=self.dtype)\n\n        timesteps = jnp.arange(0, self.config.num_train_timesteps).round()[::-1]\n\n        return DPMSolverMultistepSchedulerState.create(\n            common=common,\n            alpha_t=alpha_t,\n            sigma_t=sigma_t,\n            lambda_t=lambda_t,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n        )\n\n    def set_timesteps(\n        self, state: DPMSolverMultistepSchedulerState, num_inference_steps: int, shape: Tuple\n    ) -> DPMSolverMultistepSchedulerState:\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            state (`DPMSolverMultistepSchedulerState`):\n                the `FlaxDPMSolverMultistepScheduler` state data class instance.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            shape (`Tuple`):\n                the shape of the samples to be generated.\n        \"\"\"\n\n        timesteps = (\n            jnp.linspace(0, self.config.num_train_timesteps - 1, num_inference_steps + 1)\n            .round()[::-1][:-1]\n            .astype(jnp.int32)\n        )\n\n        # initial running values\n\n        model_outputs = jnp.zeros((self.config.solver_order,) + shape, dtype=self.dtype)\n        lower_order_nums = jnp.int32(0)\n        prev_timestep = jnp.int32(-1)\n        cur_sample = jnp.zeros(shape, dtype=self.dtype)\n\n        return state.replace(\n            num_inference_steps=num_inference_steps,\n            timesteps=timesteps,\n            model_outputs=model_outputs,\n            lower_order_nums=lower_order_nums,\n            prev_timestep=prev_timestep,\n            cur_sample=cur_sample,\n        )\n\n    def convert_model_output(\n        self,\n        state: DPMSolverMultistepSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n    ) -> jnp.ndarray:\n        \"\"\"\n        Convert the model output to the corresponding type that the algorithm (DPM-Solver / DPM-Solver++) needs.\n\n        DPM-Solver is designed to discretize an integral of the noise prediction model, and DPM-Solver++ is designed to\n        discretize an integral of the data prediction model. So we need to first convert the model output to the\n        corresponding type to match the algorithm.\n\n        Note that the algorithm type and the model type is decoupled. That is to say, we can use either DPM-Solver or\n        DPM-Solver++ for both noise prediction model and data prediction model.\n\n        Args:\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `jnp.ndarray`: the converted model output.\n        \"\"\"\n        # DPM-Solver++ needs to solve an integral of the data prediction model.\n        if self.config.algorithm_type == \"dpmsolver++\":\n            if self.config.prediction_type == \"epsilon\":\n                alpha_t, sigma_t = state.alpha_t[timestep], state.sigma_t[timestep]\n                x0_pred = (sample - sigma_t * model_output) / alpha_t\n            elif self.config.prediction_type == \"sample\":\n                x0_pred = model_output\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = state.alpha_t[timestep], state.sigma_t[timestep]\n                x0_pred = alpha_t * sample - sigma_t * model_output\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, \"\n                    \" or `v_prediction` for the FlaxDPMSolverMultistepScheduler.\"\n                )\n\n            if self.config.thresholding:\n                # Dynamic thresholding in https://arxiv.org/abs/2205.11487\n                dynamic_max_val = jnp.percentile(\n                    jnp.abs(x0_pred), self.config.dynamic_thresholding_ratio, axis=tuple(range(1, x0_pred.ndim))\n                )\n                dynamic_max_val = jnp.maximum(\n                    dynamic_max_val, self.config.sample_max_value * jnp.ones_like(dynamic_max_val)\n                )\n                x0_pred = jnp.clip(x0_pred, -dynamic_max_val, dynamic_max_val) / dynamic_max_val\n            return x0_pred\n        # DPM-Solver needs to solve an integral of the noise prediction model.\n        elif self.config.algorithm_type == \"dpmsolver\":\n            if self.config.prediction_type == \"epsilon\":\n                return model_output\n            elif self.config.prediction_type == \"sample\":\n                alpha_t, sigma_t = state.alpha_t[timestep], state.sigma_t[timestep]\n                epsilon = (sample - alpha_t * model_output) / sigma_t\n                return epsilon\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = state.alpha_t[timestep], state.sigma_t[timestep]\n                epsilon = alpha_t * model_output + sigma_t * sample\n                return epsilon\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, \"\n                    \" or `v_prediction` for the FlaxDPMSolverMultistepScheduler.\"\n                )\n\n    def dpm_solver_first_order_update(\n        self,\n        state: DPMSolverMultistepSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        prev_timestep: int,\n        sample: jnp.ndarray,\n    ) -> jnp.ndarray:\n        \"\"\"\n        One step for the first-order DPM-Solver (equivalent to DDIM).\n\n        See https://arxiv.org/abs/2206.00927 for the detailed derivation.\n\n        Args:\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `jnp.ndarray`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0 = prev_timestep, timestep\n        m0 = model_output\n        lambda_t, lambda_s = state.lambda_t[t], state.lambda_t[s0]\n        alpha_t, alpha_s = state.alpha_t[t], state.alpha_t[s0]\n        sigma_t, sigma_s = state.sigma_t[t], state.sigma_t[s0]\n        h = lambda_t - lambda_s\n        if self.config.algorithm_type == \"dpmsolver++\":\n            x_t = (sigma_t / sigma_s) * sample - (alpha_t * (jnp.exp(-h) - 1.0)) * m0\n        elif self.config.algorithm_type == \"dpmsolver\":\n            x_t = (alpha_t / alpha_s) * sample - (sigma_t * (jnp.exp(h) - 1.0)) * m0\n        return x_t\n\n    def multistep_dpm_solver_second_order_update(\n        self,\n        state: DPMSolverMultistepSchedulerState,\n        model_output_list: jnp.ndarray,\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: jnp.ndarray,\n    ) -> jnp.ndarray:\n        \"\"\"\n        One step for the second-order multistep DPM-Solver.\n\n        Args:\n            model_output_list (`List[jnp.ndarray]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `jnp.ndarray`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1 = prev_timestep, timestep_list[-1], timestep_list[-2]\n        m0, m1 = model_output_list[-1], model_output_list[-2]\n        lambda_t, lambda_s0, lambda_s1 = state.lambda_t[t], state.lambda_t[s0], state.lambda_t[s1]\n        alpha_t, alpha_s0 = state.alpha_t[t], state.alpha_t[s0]\n        sigma_t, sigma_s0 = state.sigma_t[t], state.sigma_t[s0]\n        h, h_0 = lambda_t - lambda_s0, lambda_s0 - lambda_s1\n        r0 = h_0 / h\n        D0, D1 = m0, (1.0 / r0) * (m0 - m1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2211.01095 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (sigma_t / sigma_s0) * sample\n                    - (alpha_t * (jnp.exp(-h) - 1.0)) * D0\n                    - 0.5 * (alpha_t * (jnp.exp(-h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (sigma_t / sigma_s0) * sample\n                    - (alpha_t * (jnp.exp(-h) - 1.0)) * D0\n                    + (alpha_t * ((jnp.exp(-h) - 1.0) / h + 1.0)) * D1\n                )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - (sigma_t * (jnp.exp(h) - 1.0)) * D0\n                    - 0.5 * (sigma_t * (jnp.exp(h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - (sigma_t * (jnp.exp(h) - 1.0)) * D0\n                    - (sigma_t * ((jnp.exp(h) - 1.0) / h - 1.0)) * D1\n                )\n        return x_t\n\n    def multistep_dpm_solver_third_order_update(\n        self,\n        state: DPMSolverMultistepSchedulerState,\n        model_output_list: jnp.ndarray,\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: jnp.ndarray,\n    ) -> jnp.ndarray:\n        \"\"\"\n        One step for the third-order multistep DPM-Solver.\n\n        Args:\n            model_output_list (`List[jnp.ndarray]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `jnp.ndarray`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1, s2 = prev_timestep, timestep_list[-1], timestep_list[-2], timestep_list[-3]\n        m0, m1, m2 = model_output_list[-1], model_output_list[-2], model_output_list[-3]\n        lambda_t, lambda_s0, lambda_s1, lambda_s2 = (\n            state.lambda_t[t],\n            state.lambda_t[s0],\n            state.lambda_t[s1],\n            state.lambda_t[s2],\n        )\n        alpha_t, alpha_s0 = state.alpha_t[t], state.alpha_t[s0]\n        sigma_t, sigma_s0 = state.sigma_t[t], state.sigma_t[s0]\n        h, h_0, h_1 = lambda_t - lambda_s0, lambda_s0 - lambda_s1, lambda_s1 - lambda_s2\n        r0, r1 = h_0 / h, h_1 / h\n        D0 = m0\n        D1_0, D1_1 = (1.0 / r0) * (m0 - m1), (1.0 / r1) * (m1 - m2)\n        D1 = D1_0 + (r0 / (r0 + r1)) * (D1_0 - D1_1)\n        D2 = (1.0 / (r0 + r1)) * (D1_0 - D1_1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            x_t = (\n                (sigma_t / sigma_s0) * sample\n                - (alpha_t * (jnp.exp(-h) - 1.0)) * D0\n                + (alpha_t * ((jnp.exp(-h) - 1.0) / h + 1.0)) * D1\n                - (alpha_t * ((jnp.exp(-h) - 1.0 + h) / h**2 - 0.5)) * D2\n            )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            x_t = (\n                (alpha_t / alpha_s0) * sample\n                - (sigma_t * (jnp.exp(h) - 1.0)) * D0\n                - (sigma_t * ((jnp.exp(h) - 1.0) / h - 1.0)) * D1\n                - (sigma_t * ((jnp.exp(h) - 1.0 - h) / h**2 - 0.5)) * D2\n            )\n        return x_t\n\n    def step(\n        self,\n        state: DPMSolverMultistepSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n        return_dict: bool = True,\n    ) -> Union[FlaxDPMSolverMultistepSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by DPM-Solver. Core function to propagate the diffusion process\n        from the learned model outputs (most often the predicted noise).\n\n        Args:\n            state (`DPMSolverMultistepSchedulerState`):\n                the `FlaxDPMSolverMultistepScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than FlaxDPMSolverMultistepSchedulerOutput class\n\n        Returns:\n            [`FlaxDPMSolverMultistepSchedulerOutput`] or `tuple`: [`FlaxDPMSolverMultistepSchedulerOutput`] if\n            `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if state.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        (step_index,) = jnp.where(state.timesteps == timestep, size=1)\n        step_index = step_index[0]\n\n        prev_timestep = jax.lax.select(step_index == len(state.timesteps) - 1, 0, state.timesteps[step_index + 1])\n\n        model_output = self.convert_model_output(state, model_output, timestep, sample)\n\n        model_outputs_new = jnp.roll(state.model_outputs, -1, axis=0)\n        model_outputs_new = model_outputs_new.at[-1].set(model_output)\n        state = state.replace(\n            model_outputs=model_outputs_new,\n            prev_timestep=prev_timestep,\n            cur_sample=sample,\n        )\n\n        def step_1(state: DPMSolverMultistepSchedulerState) -> jnp.ndarray:\n            return self.dpm_solver_first_order_update(\n                state,\n                state.model_outputs[-1],\n                state.timesteps[step_index],\n                state.prev_timestep,\n                state.cur_sample,\n            )\n\n        def step_23(state: DPMSolverMultistepSchedulerState) -> jnp.ndarray:\n            def step_2(state: DPMSolverMultistepSchedulerState) -> jnp.ndarray:\n                timestep_list = jnp.array([state.timesteps[step_index - 1], state.timesteps[step_index]])\n                return self.multistep_dpm_solver_second_order_update(\n                    state,\n                    state.model_outputs,\n                    timestep_list,\n                    state.prev_timestep,\n                    state.cur_sample,\n                )\n\n            def step_3(state: DPMSolverMultistepSchedulerState) -> jnp.ndarray:\n                timestep_list = jnp.array(\n                    [\n                        state.timesteps[step_index - 2],\n                        state.timesteps[step_index - 1],\n                        state.timesteps[step_index],\n                    ]\n                )\n                return self.multistep_dpm_solver_third_order_update(\n                    state,\n                    state.model_outputs,\n                    timestep_list,\n                    state.prev_timestep,\n                    state.cur_sample,\n                )\n\n            step_2_output = step_2(state)\n            step_3_output = step_3(state)\n\n            if self.config.solver_order == 2:\n                return step_2_output\n            elif self.config.lower_order_final and len(state.timesteps) < 15:\n                return jax.lax.select(\n                    state.lower_order_nums < 2,\n                    step_2_output,\n                    jax.lax.select(\n                        step_index == len(state.timesteps) - 2,\n                        step_2_output,\n                        step_3_output,\n                    ),\n                )\n            else:\n                return jax.lax.select(\n                    state.lower_order_nums < 2,\n                    step_2_output,\n                    step_3_output,\n                )\n\n        step_1_output = step_1(state)\n        step_23_output = step_23(state)\n\n        if self.config.solver_order == 1:\n            prev_sample = step_1_output\n\n        elif self.config.lower_order_final and len(state.timesteps) < 15:\n            prev_sample = jax.lax.select(\n                state.lower_order_nums < 1,\n                step_1_output,\n                jax.lax.select(\n                    step_index == len(state.timesteps) - 1,\n                    step_1_output,\n                    step_23_output,\n                ),\n            )\n\n        else:\n            prev_sample = jax.lax.select(\n                state.lower_order_nums < 1,\n                step_1_output,\n                step_23_output,\n            )\n\n        state = state.replace(\n            lower_order_nums=jnp.minimum(state.lower_order_nums + 1, self.config.solver_order),\n        )\n\n        if not return_dict:\n            return (prev_sample, state)\n\n        return FlaxDPMSolverMultistepSchedulerOutput(prev_sample=prev_sample, state=state)\n\n    def scale_model_input(\n        self, state: DPMSolverMultistepSchedulerState, sample: jnp.ndarray, timestep: Optional[int] = None\n    ) -> jnp.ndarray:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            state (`DPMSolverMultistepSchedulerState`):\n                the `FlaxDPMSolverMultistepScheduler` state data class instance.\n            sample (`jnp.ndarray`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `jnp.ndarray`: scaled input sample\n        \"\"\"\n        return sample\n\n    def add_noise(\n        self,\n        state: DPMSolverMultistepSchedulerState,\n        original_samples: jnp.ndarray,\n        noise: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ) -> jnp.ndarray:\n        return add_noise_common(state.common, original_samples, noise, timesteps)\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_dpmsolver_multistep_inverse.py",
    "content": "# Copyright 2023 TSAIL Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/LuChengTHU/dpm-solver\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import randn_tensor\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DPMSolverMultistepInverseScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    DPMSolverMultistepInverseScheduler is the reverse scheduler of [`DPMSolverMultistepScheduler`].\n\n    We also support the \"dynamic thresholding\" method in Imagen (https://arxiv.org/abs/2205.11487). For pixel-space\n    diffusion models, you can set both `algorithm_type=\"dpmsolver++\"` and `thresholding=True` to use the dynamic\n    thresholding. Note that the thresholding method is unsuitable for latent-space diffusion models (such as\n    stable-diffusion).\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        solver_order (`int`, default `2`):\n            the order of DPM-Solver; can be `1` or `2` or `3`. We recommend to use `solver_order=2` for guided\n            sampling, and `solver_order=3` for unconditional sampling.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            For pixel-space diffusion models, you can set both `algorithm_type=dpmsolver++` and `thresholding=True` to\n            use the dynamic thresholding. Note that the thresholding method is unsuitable for latent-space diffusion\n            models (such as stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487).\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True` and\n            `algorithm_type=\"dpmsolver++`.\n        algorithm_type (`str`, default `dpmsolver++`):\n            the algorithm type for the solver. Either `dpmsolver` or `dpmsolver++` or `sde-dpmsolver` or\n            `sde-dpmsolver++`. The `dpmsolver` type implements the algorithms in https://arxiv.org/abs/2206.00927, and\n            the `dpmsolver++` type implements the algorithms in https://arxiv.org/abs/2211.01095. We recommend to use\n            `dpmsolver++` or `sde-dpmsolver++` with `solver_order=2` for guided sampling (e.g. stable-diffusion).\n        solver_type (`str`, default `midpoint`):\n            the solver type for the second-order solver. Either `midpoint` or `heun`. The solver type slightly affects\n            the sample quality, especially for small number of steps. We empirically find that `midpoint` solvers are\n            slightly better, so we recommend to use the `midpoint` type.\n        lower_order_final (`bool`, default `True`):\n            whether to use lower-order solvers in the final steps. Only valid for < 15 inference steps. We empirically\n            find this trick can stabilize the sampling of DPM-Solver for steps < 15, especially for steps <= 10.\n        use_karras_sigmas (`bool`, *optional*, defaults to `False`):\n             This parameter controls whether to use Karras sigmas (Karras et al. (2022) scheme) for step sizes in the\n             noise schedule during the sampling process. If True, the sigmas will be determined according to a sequence\n             of noise levels {σi} as defined in Equation (5) of the paper https://arxiv.org/pdf/2206.00364.pdf.\n        lambda_min_clipped (`float`, default `-inf`):\n            the clipping threshold for the minimum value of lambda(t) for numerical stability. This is critical for\n            cosine (squaredcos_cap_v2) noise schedule.\n        variance_type (`str`, *optional*):\n            Set to \"learned\" or \"learned_range\" for diffusion models that predict variance. For example, OpenAI's\n            guided-diffusion (https://github.com/openai/guided-diffusion) predicts both mean and variance of the\n            Gaussian distribution in the model's output. DPM-Solver only needs the \"mean\" output because it is based on\n            diffusion ODEs. whether the model's output contains the predicted Gaussian variance. For example, OpenAI's\n            guided-diffusion (https://github.com/openai/guided-diffusion) predicts both mean and variance of the\n            Gaussian distribution in the model's output. DPM-Solver only needs the \"mean\" output because it is based on\n            diffusion ODEs.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        solver_order: int = 2,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        sample_max_value: float = 1.0,\n        algorithm_type: str = \"dpmsolver++\",\n        solver_type: str = \"midpoint\",\n        lower_order_final: bool = True,\n        use_karras_sigmas: Optional[bool] = False,\n        lambda_min_clipped: float = -float(\"inf\"),\n        variance_type: Optional[str] = None,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        # Currently we only support VP-type noise schedule\n        self.alpha_t = torch.sqrt(self.alphas_cumprod)\n        self.sigma_t = torch.sqrt(1 - self.alphas_cumprod)\n        self.lambda_t = torch.log(self.alpha_t) - torch.log(self.sigma_t)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # settings for DPM-Solver\n        if algorithm_type not in [\"dpmsolver\", \"dpmsolver++\", \"sde-dpmsolver\", \"sde-dpmsolver++\"]:\n            if algorithm_type == \"deis\":\n                self.register_to_config(algorithm_type=\"dpmsolver++\")\n            else:\n                raise NotImplementedError(f\"{algorithm_type} does is not implemented for {self.__class__}\")\n\n        if solver_type not in [\"midpoint\", \"heun\"]:\n            if solver_type in [\"logrho\", \"bh1\", \"bh2\"]:\n                self.register_to_config(solver_type=\"midpoint\")\n            else:\n                raise NotImplementedError(f\"{solver_type} does is not implemented for {self.__class__}\")\n\n        # setable values\n        self.num_inference_steps = None\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=np.float32).copy()\n        self.timesteps = torch.from_numpy(timesteps)\n        self.model_outputs = [None] * solver_order\n        self.lower_order_nums = 0\n        self.use_karras_sigmas = use_karras_sigmas\n\n    def set_timesteps(self, num_inference_steps: int = None, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        # Clipping the minimum of all lambda(t) for numerical stability.\n        # This is critical for cosine (squaredcos_cap_v2) noise schedule.\n        clipped_idx = torch.searchsorted(torch.flip(self.lambda_t, [0]), self.lambda_min_clipped)\n        self.noisiest_timestep = self.config.num_train_timesteps - 1 - clipped_idx\n        timesteps = (\n            np.linspace(0, self.noisiest_timestep, num_inference_steps + 1).round()[:-1].copy().astype(np.int64)\n        )\n\n        if self.use_karras_sigmas:\n            sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n            log_sigmas = np.log(sigmas)\n            sigmas = self._convert_to_karras(in_sigmas=sigmas, num_inference_steps=num_inference_steps)\n            timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas]).round()\n            timesteps = timesteps.copy().astype(np.int64)\n\n        # when num_inference_steps == num_train_timesteps, we can end up with\n        # duplicates in timesteps.\n        _, unique_indices = np.unique(timesteps, return_index=True)\n        timesteps = timesteps[np.sort(unique_indices)]\n\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n        self.num_inference_steps = len(timesteps)\n\n        self.model_outputs = [\n            None,\n        ] * self.config.solver_order\n        self.lower_order_nums = 0\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler._threshold_sample\n    def _threshold_sample(self, sample: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"\n        \"Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the\n        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by\n        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing\n        pixels from saturation at each step. We find that dynamic thresholding results in significantly better\n        photorealism as well as better image-text alignment, especially when using very large guidance weights.\"\n\n        https://arxiv.org/abs/2205.11487\n        \"\"\"\n        dtype = sample.dtype\n        batch_size, channels, height, width = sample.shape\n\n        if dtype not in (torch.float32, torch.float64):\n            sample = sample.float()  # upcast for quantile calculation, and clamp not implemented for cpu half\n\n        # Flatten sample for doing quantile calculation along each image\n        sample = sample.reshape(batch_size, channels * height * width)\n\n        abs_sample = sample.abs()  # \"a certain percentile absolute pixel value\"\n\n        s = torch.quantile(abs_sample, self.config.dynamic_thresholding_ratio, dim=1)\n        s = torch.clamp(\n            s, min=1, max=self.config.sample_max_value\n        )  # When clamped to min=1, equivalent to standard clipping to [-1, 1]\n\n        s = s.unsqueeze(1)  # (batch_size, 1) because clamp will broadcast along dim=0\n        sample = torch.clamp(sample, -s, s) / s  # \"we threshold xt0 to the range [-s, s] and then divide by s\"\n\n        sample = sample.reshape(batch_size, channels, height, width)\n        sample = sample.to(dtype)\n\n        return sample\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler._sigma_to_t\n    def _sigma_to_t(self, sigma, log_sigmas):\n        # get log sigma\n        log_sigma = np.log(sigma)\n\n        # get distribution\n        dists = log_sigma - log_sigmas[:, np.newaxis]\n\n        # get sigmas range\n        low_idx = np.cumsum((dists >= 0), axis=0).argmax(axis=0).clip(max=log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = log_sigmas[low_idx]\n        high = log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = np.clip(w, 0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.reshape(sigma.shape)\n        return t\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler._convert_to_karras\n    def _convert_to_karras(self, in_sigmas: torch.FloatTensor, num_inference_steps) -> torch.FloatTensor:\n        \"\"\"Constructs the noise schedule of Karras et al. (2022).\"\"\"\n\n        sigma_min: float = in_sigmas[-1].item()\n        sigma_max: float = in_sigmas[0].item()\n\n        rho = 7.0  # 7.0 is the value used in the paper\n        ramp = np.linspace(0, 1, num_inference_steps)\n        min_inv_rho = sigma_min ** (1 / rho)\n        max_inv_rho = sigma_max ** (1 / rho)\n        sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho\n        return sigmas\n\n    # Copied from diffusers.schedulers.scheduling_dpmsolver_multistep.DPMSolverMultistepScheduler.convert_model_output\n    def convert_model_output(\n        self, model_output: torch.FloatTensor, timestep: int, sample: torch.FloatTensor\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Convert the model output to the corresponding type that the algorithm (DPM-Solver / DPM-Solver++) needs.\n\n        DPM-Solver is designed to discretize an integral of the noise prediction model, and DPM-Solver++ is designed to\n        discretize an integral of the data prediction model. So we need to first convert the model output to the\n        corresponding type to match the algorithm.\n\n        Note that the algorithm type and the model type is decoupled. That is to say, we can use either DPM-Solver or\n        DPM-Solver++ for both noise prediction model and data prediction model.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the converted model output.\n        \"\"\"\n\n        # DPM-Solver++ needs to solve an integral of the data prediction model.\n        if self.config.algorithm_type in [\"dpmsolver++\", \"sde-dpmsolver++\"]:\n            if self.config.prediction_type == \"epsilon\":\n                # DPM-Solver and DPM-Solver++ only need the \"mean\" output.\n                if self.config.variance_type in [\"learned\", \"learned_range\"]:\n                    model_output = model_output[:, :3]\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = (sample - sigma_t * model_output) / alpha_t\n            elif self.config.prediction_type == \"sample\":\n                x0_pred = model_output\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = alpha_t * sample - sigma_t * model_output\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the DPMSolverMultistepScheduler.\"\n                )\n\n            if self.config.thresholding:\n                x0_pred = self._threshold_sample(x0_pred)\n\n            return x0_pred\n\n        # DPM-Solver needs to solve an integral of the noise prediction model.\n        elif self.config.algorithm_type in [\"dpmsolver\", \"sde-dpmsolver\"]:\n            if self.config.prediction_type == \"epsilon\":\n                # DPM-Solver and DPM-Solver++ only need the \"mean\" output.\n                if self.config.variance_type in [\"learned\", \"learned_range\"]:\n                    epsilon = model_output[:, :3]\n                else:\n                    epsilon = model_output\n            elif self.config.prediction_type == \"sample\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = (sample - alpha_t * model_output) / sigma_t\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = alpha_t * model_output + sigma_t * sample\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the DPMSolverMultistepScheduler.\"\n                )\n\n            if self.config.thresholding:\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = (sample - sigma_t * epsilon) / alpha_t\n                x0_pred = self._threshold_sample(x0_pred)\n                epsilon = (sample - alpha_t * x0_pred) / sigma_t\n\n            return epsilon\n\n    # Copied from diffusers.schedulers.scheduling_dpmsolver_multistep.DPMSolverMultistepScheduler.dpm_solver_first_order_update\n    def dpm_solver_first_order_update(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n        noise: Optional[torch.FloatTensor] = None,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the first-order DPM-Solver (equivalent to DDIM).\n\n        See https://arxiv.org/abs/2206.00927 for the detailed derivation.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        lambda_t, lambda_s = self.lambda_t[prev_timestep], self.lambda_t[timestep]\n        alpha_t, alpha_s = self.alpha_t[prev_timestep], self.alpha_t[timestep]\n        sigma_t, sigma_s = self.sigma_t[prev_timestep], self.sigma_t[timestep]\n        h = lambda_t - lambda_s\n        if self.config.algorithm_type == \"dpmsolver++\":\n            x_t = (sigma_t / sigma_s) * sample - (alpha_t * (torch.exp(-h) - 1.0)) * model_output\n        elif self.config.algorithm_type == \"dpmsolver\":\n            x_t = (alpha_t / alpha_s) * sample - (sigma_t * (torch.exp(h) - 1.0)) * model_output\n        elif self.config.algorithm_type == \"sde-dpmsolver++\":\n            assert noise is not None\n            x_t = (\n                (sigma_t / sigma_s * torch.exp(-h)) * sample\n                + (alpha_t * (1 - torch.exp(-2.0 * h))) * model_output\n                + sigma_t * torch.sqrt(1.0 - torch.exp(-2 * h)) * noise\n            )\n        elif self.config.algorithm_type == \"sde-dpmsolver\":\n            assert noise is not None\n            x_t = (\n                (alpha_t / alpha_s) * sample\n                - 2.0 * (sigma_t * (torch.exp(h) - 1.0)) * model_output\n                + sigma_t * torch.sqrt(torch.exp(2 * h) - 1.0) * noise\n            )\n        return x_t\n\n    # Copied from diffusers.schedulers.scheduling_dpmsolver_multistep.DPMSolverMultistepScheduler.multistep_dpm_solver_second_order_update\n    def multistep_dpm_solver_second_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n        noise: Optional[torch.FloatTensor] = None,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the second-order multistep DPM-Solver.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1 = prev_timestep, timestep_list[-1], timestep_list[-2]\n        m0, m1 = model_output_list[-1], model_output_list[-2]\n        lambda_t, lambda_s0, lambda_s1 = self.lambda_t[t], self.lambda_t[s0], self.lambda_t[s1]\n        alpha_t, alpha_s0 = self.alpha_t[t], self.alpha_t[s0]\n        sigma_t, sigma_s0 = self.sigma_t[t], self.sigma_t[s0]\n        h, h_0 = lambda_t - lambda_s0, lambda_s0 - lambda_s1\n        r0 = h_0 / h\n        D0, D1 = m0, (1.0 / r0) * (m0 - m1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2211.01095 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (sigma_t / sigma_s0) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    - 0.5 * (alpha_t * (torch.exp(-h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (sigma_t / sigma_s0) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    + (alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0)) * D1\n                )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - 0.5 * (sigma_t * (torch.exp(h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                )\n        elif self.config.algorithm_type == \"sde-dpmsolver++\":\n            assert noise is not None\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (sigma_t / sigma_s0 * torch.exp(-h)) * sample\n                    + (alpha_t * (1 - torch.exp(-2.0 * h))) * D0\n                    + 0.5 * (alpha_t * (1 - torch.exp(-2.0 * h))) * D1\n                    + sigma_t * torch.sqrt(1.0 - torch.exp(-2 * h)) * noise\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (sigma_t / sigma_s0 * torch.exp(-h)) * sample\n                    + (alpha_t * (1 - torch.exp(-2.0 * h))) * D0\n                    + (alpha_t * ((1.0 - torch.exp(-2.0 * h)) / (-2.0 * h) + 1.0)) * D1\n                    + sigma_t * torch.sqrt(1.0 - torch.exp(-2 * h)) * noise\n                )\n        elif self.config.algorithm_type == \"sde-dpmsolver\":\n            assert noise is not None\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - 2.0 * (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D1\n                    + sigma_t * torch.sqrt(torch.exp(2 * h) - 1.0) * noise\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (alpha_t / alpha_s0) * sample\n                    - 2.0 * (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - 2.0 * (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                    + sigma_t * torch.sqrt(torch.exp(2 * h) - 1.0) * noise\n                )\n        return x_t\n\n    # Copied from diffusers.schedulers.scheduling_dpmsolver_multistep.DPMSolverMultistepScheduler.multistep_dpm_solver_third_order_update\n    def multistep_dpm_solver_third_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the third-order multistep DPM-Solver.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1, s2 = prev_timestep, timestep_list[-1], timestep_list[-2], timestep_list[-3]\n        m0, m1, m2 = model_output_list[-1], model_output_list[-2], model_output_list[-3]\n        lambda_t, lambda_s0, lambda_s1, lambda_s2 = (\n            self.lambda_t[t],\n            self.lambda_t[s0],\n            self.lambda_t[s1],\n            self.lambda_t[s2],\n        )\n        alpha_t, alpha_s0 = self.alpha_t[t], self.alpha_t[s0]\n        sigma_t, sigma_s0 = self.sigma_t[t], self.sigma_t[s0]\n        h, h_0, h_1 = lambda_t - lambda_s0, lambda_s0 - lambda_s1, lambda_s1 - lambda_s2\n        r0, r1 = h_0 / h, h_1 / h\n        D0 = m0\n        D1_0, D1_1 = (1.0 / r0) * (m0 - m1), (1.0 / r1) * (m1 - m2)\n        D1 = D1_0 + (r0 / (r0 + r1)) * (D1_0 - D1_1)\n        D2 = (1.0 / (r0 + r1)) * (D1_0 - D1_1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            x_t = (\n                (sigma_t / sigma_s0) * sample\n                - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                + (alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0)) * D1\n                - (alpha_t * ((torch.exp(-h) - 1.0 + h) / h**2 - 0.5)) * D2\n            )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            x_t = (\n                (alpha_t / alpha_s0) * sample\n                - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                - (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                - (sigma_t * ((torch.exp(h) - 1.0 - h) / h**2 - 0.5)) * D2\n            )\n        return x_t\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        generator=None,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the multistep DPM-Solver.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero()\n        if len(step_index) == 0:\n            step_index = len(self.timesteps) - 1\n        else:\n            step_index = step_index.item()\n        prev_timestep = (\n            self.noisiest_timestep if step_index == len(self.timesteps) - 1 else self.timesteps[step_index + 1]\n        )\n        lower_order_final = (\n            (step_index == len(self.timesteps) - 1) and self.config.lower_order_final and len(self.timesteps) < 15\n        )\n        lower_order_second = (\n            (step_index == len(self.timesteps) - 2) and self.config.lower_order_final and len(self.timesteps) < 15\n        )\n\n        model_output = self.convert_model_output(model_output, timestep, sample)\n        for i in range(self.config.solver_order - 1):\n            self.model_outputs[i] = self.model_outputs[i + 1]\n        self.model_outputs[-1] = model_output\n\n        if self.config.algorithm_type in [\"sde-dpmsolver\", \"sde-dpmsolver++\"]:\n            noise = randn_tensor(\n                model_output.shape, generator=generator, device=model_output.device, dtype=model_output.dtype\n            )\n        else:\n            noise = None\n\n        if self.config.solver_order == 1 or self.lower_order_nums < 1 or lower_order_final:\n            prev_sample = self.dpm_solver_first_order_update(\n                model_output, timestep, prev_timestep, sample, noise=noise\n            )\n        elif self.config.solver_order == 2 or self.lower_order_nums < 2 or lower_order_second:\n            timestep_list = [self.timesteps[step_index - 1], timestep]\n            prev_sample = self.multistep_dpm_solver_second_order_update(\n                self.model_outputs, timestep_list, prev_timestep, sample, noise=noise\n            )\n        else:\n            timestep_list = [self.timesteps[step_index - 2], self.timesteps[step_index - 1], timestep]\n            prev_sample = self.multistep_dpm_solver_third_order_update(\n                self.model_outputs, timestep_list, prev_timestep, sample\n            )\n\n        if self.lower_order_nums < self.config.solver_order:\n            self.lower_order_nums += 1\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    # Copied from diffusers.schedulers.scheduling_dpmsolver_multistep.DPMSolverMultistepScheduler.scale_model_input\n    def scale_model_input(self, sample: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_dpmsolver_sde.py",
    "content": "# Copyright 2023 Katherine Crowson, The HuggingFace Team and hlky. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\nimport torchsde\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\nclass BatchedBrownianTree:\n    \"\"\"A wrapper around torchsde.BrownianTree that enables batches of entropy.\"\"\"\n\n    def __init__(self, x, t0, t1, seed=None, **kwargs):\n        t0, t1, self.sign = self.sort(t0, t1)\n        w0 = kwargs.get(\"w0\", torch.zeros_like(x))\n        if seed is None:\n            seed = torch.randint(0, 2**63 - 1, []).item()\n        self.batched = True\n        try:\n            assert len(seed) == x.shape[0]\n            w0 = w0[0]\n        except TypeError:\n            seed = [seed]\n            self.batched = False\n        self.trees = [torchsde.BrownianTree(t0, w0, t1, entropy=s, **kwargs) for s in seed]\n\n    @staticmethod\n    def sort(a, b):\n        return (a, b, 1) if a < b else (b, a, -1)\n\n    def __call__(self, t0, t1):\n        t0, t1, sign = self.sort(t0, t1)\n        w = torch.stack([tree(t0, t1) for tree in self.trees]) * (self.sign * sign)\n        return w if self.batched else w[0]\n\n\nclass BrownianTreeNoiseSampler:\n    \"\"\"A noise sampler backed by a torchsde.BrownianTree.\n\n    Args:\n        x (Tensor): The tensor whose shape, device and dtype to use to generate\n            random samples.\n        sigma_min (float): The low end of the valid interval.\n        sigma_max (float): The high end of the valid interval.\n        seed (int or List[int]): The random seed. If a list of seeds is\n            supplied instead of a single integer, then the noise sampler will use one BrownianTree per batch item, each\n            with its own seed.\n        transform (callable): A function that maps sigma to the sampler's\n            internal timestep.\n    \"\"\"\n\n    def __init__(self, x, sigma_min, sigma_max, seed=None, transform=lambda x: x):\n        self.transform = transform\n        t0, t1 = self.transform(torch.as_tensor(sigma_min)), self.transform(torch.as_tensor(sigma_max))\n        self.tree = BatchedBrownianTree(x, t0, t1, seed)\n\n    def __call__(self, sigma, sigma_next):\n        t0, t1 = self.transform(torch.as_tensor(sigma)), self.transform(torch.as_tensor(sigma_next))\n        return self.tree(t0, t1) / (t1 - t0).abs().sqrt()\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999) -> torch.Tensor:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DPMSolverSDEScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Implements Stochastic Sampler (Algorithm 2) from Karras et al. (2022). Based on the original k-diffusion\n    implementation by Katherine Crowson:\n    https://github.com/crowsonkb/k-diffusion/blob/41b4cb6df0506694a7776af31349acf082bf6091/k_diffusion/sampling.py#L543\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model. beta_start (`float`): the\n        starting `beta` value of inference. beta_end (`float`): the final `beta` value. beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        use_karras_sigmas (`bool`, *optional*, defaults to `False`):\n             This parameter controls whether to use Karras sigmas (Karras et al. (2022) scheme) for step sizes in the\n             noise schedule during the sampling process. If True, the sigmas will be determined according to a sequence\n             of noise levels {σi} as defined in Equation (5) of the paper https://arxiv.org/pdf/2206.00364.pdf.\n        noise_sampler_seed (`int`, *optional*, defaults to `None`):\n            The random seed to use for the noise sampler. If `None`, a random seed will be generated.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 2\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.00085,  # sensible defaults\n        beta_end: float = 0.012,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        prediction_type: str = \"epsilon\",\n        use_karras_sigmas: Optional[bool] = False,\n        noise_sampler_seed: Optional[int] = None,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        #  set all values\n        self.set_timesteps(num_train_timesteps, None, num_train_timesteps)\n        self.use_karras_sigmas = use_karras_sigmas\n        self.noise_sampler = None\n        self.noise_sampler_seed = noise_sampler_seed\n\n    # Copied from diffusers.schedulers.scheduling_heun_discrete.HeunDiscreteScheduler.index_for_timestep\n    def index_for_timestep(self, timestep, schedule_timesteps=None):\n        if schedule_timesteps is None:\n            schedule_timesteps = self.timesteps\n\n        indices = (schedule_timesteps == timestep).nonzero()\n\n        if self.state_in_first_order:\n            pos = -1\n        else:\n            pos = 0\n        return indices[pos].item()\n\n    def scale_model_input(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[float, torch.FloatTensor],\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Args:\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n            sample (`torch.FloatTensor`): input sample timestep (`int`, optional): current timestep\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        sigma = self.sigmas[step_index]\n        sigma_input = sigma if self.state_in_first_order else self.mid_point_sigma\n        sample = sample / ((sigma_input**2 + 1) ** 0.5)\n        return sample\n\n    def set_timesteps(\n        self,\n        num_inference_steps: int,\n        device: Union[str, torch.device] = None,\n        num_train_timesteps: Optional[int] = None,\n    ):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n\n        num_train_timesteps = num_train_timesteps or self.config.num_train_timesteps\n\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        log_sigmas = np.log(sigmas)\n        sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)\n\n        if self.use_karras_sigmas:\n            sigmas = self._convert_to_karras(in_sigmas=sigmas)\n            timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas])\n\n        second_order_timesteps = self._second_order_timesteps(sigmas, log_sigmas)\n\n        sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)\n        sigmas = torch.from_numpy(sigmas).to(device=device)\n        self.sigmas = torch.cat([sigmas[:1], sigmas[1:-1].repeat_interleave(2), sigmas[-1:]])\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = self.sigmas.max()\n\n        timesteps = torch.from_numpy(timesteps)\n        second_order_timesteps = torch.from_numpy(second_order_timesteps)\n        timesteps = torch.cat([timesteps[:1], timesteps[1:].repeat_interleave(2)])\n        timesteps[1::2] = second_order_timesteps\n\n        if str(device).startswith(\"mps\"):\n            # mps does not support float64\n            self.timesteps = timesteps.to(device, dtype=torch.float32)\n        else:\n            self.timesteps = timesteps.to(device=device)\n\n        # empty first order variables\n        self.sample = None\n        self.mid_point_sigma = None\n\n    def _second_order_timesteps(self, sigmas, log_sigmas):\n        def sigma_fn(_t):\n            return np.exp(-_t)\n\n        def t_fn(_sigma):\n            return -np.log(_sigma)\n\n        midpoint_ratio = 0.5\n        t = t_fn(sigmas)\n        delta_time = np.diff(t)\n        t_proposed = t[:-1] + delta_time * midpoint_ratio\n        sig_proposed = sigma_fn(t_proposed)\n        timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sig_proposed])\n        return timesteps\n\n    # copied from diffusers.schedulers.scheduling_euler_discrete._sigma_to_t\n    def _sigma_to_t(self, sigma, log_sigmas):\n        # get log sigma\n        log_sigma = np.log(sigma)\n\n        # get distribution\n        dists = log_sigma - log_sigmas[:, np.newaxis]\n\n        # get sigmas range\n        low_idx = np.cumsum((dists >= 0), axis=0).argmax(axis=0).clip(max=log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = log_sigmas[low_idx]\n        high = log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = np.clip(w, 0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.reshape(sigma.shape)\n        return t\n\n    # copied from diffusers.schedulers.scheduling_euler_discrete._convert_to_karras\n    def _convert_to_karras(self, in_sigmas: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"Constructs the noise schedule of Karras et al. (2022).\"\"\"\n\n        sigma_min: float = in_sigmas[-1].item()\n        sigma_max: float = in_sigmas[0].item()\n\n        rho = 7.0  # 7.0 is the value used in the paper\n        ramp = np.linspace(0, 1, self.num_inference_steps)\n        min_inv_rho = sigma_min ** (1 / rho)\n        max_inv_rho = sigma_max ** (1 / rho)\n        sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho\n        return sigmas\n\n    @property\n    def state_in_first_order(self):\n        return self.sample is None\n\n    def step(\n        self,\n        model_output: Union[torch.FloatTensor, np.ndarray],\n        timestep: Union[float, torch.FloatTensor],\n        sample: Union[torch.FloatTensor, np.ndarray],\n        return_dict: bool = True,\n        s_noise: float = 1.0,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Args:\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n        model_output (Union[torch.FloatTensor, np.ndarray]): Direct output from learned diffusion model.\n        timestep (Union[float, torch.FloatTensor]): Current discrete timestep in the diffusion chain.\n        sample (Union[torch.FloatTensor, np.ndarray]): Current instance of sample being created by diffusion process.\n        return_dict (bool, optional): Option for returning tuple rather than SchedulerOutput class. Defaults to True.\n        s_noise (float, optional): Scaling factor for the noise added to the sample. Defaults to 1.0.\n        Returns:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        # Create a noise sampler if it hasn't been created yet\n        if self.noise_sampler is None:\n            min_sigma, max_sigma = self.sigmas[self.sigmas > 0].min(), self.sigmas.max()\n            self.noise_sampler = BrownianTreeNoiseSampler(sample, min_sigma, max_sigma, self.noise_sampler_seed)\n\n        # Define functions to compute sigma and t from each other\n        def sigma_fn(_t: torch.FloatTensor) -> torch.FloatTensor:\n            return _t.neg().exp()\n\n        def t_fn(_sigma: torch.FloatTensor) -> torch.FloatTensor:\n            return _sigma.log().neg()\n\n        if self.state_in_first_order:\n            sigma = self.sigmas[step_index]\n            sigma_next = self.sigmas[step_index + 1]\n        else:\n            # 2nd order\n            sigma = self.sigmas[step_index - 1]\n            sigma_next = self.sigmas[step_index]\n\n        # Set the midpoint and step size for the current step\n        midpoint_ratio = 0.5\n        t, t_next = t_fn(sigma), t_fn(sigma_next)\n        delta_time = t_next - t\n        t_proposed = t + delta_time * midpoint_ratio\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        if self.config.prediction_type == \"epsilon\":\n            sigma_input = sigma if self.state_in_first_order else sigma_fn(t_proposed)\n            pred_original_sample = sample - sigma_input * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            sigma_input = sigma if self.state_in_first_order else sigma_fn(t_proposed)\n            pred_original_sample = model_output * (-sigma_input / (sigma_input**2 + 1) ** 0.5) + (\n                sample / (sigma_input**2 + 1)\n            )\n        elif self.config.prediction_type == \"sample\":\n            raise NotImplementedError(\"prediction_type not implemented yet: sample\")\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        if sigma_next == 0:\n            derivative = (sample - pred_original_sample) / sigma\n            dt = sigma_next - sigma\n            prev_sample = sample + derivative * dt\n        else:\n            if self.state_in_first_order:\n                t_next = t_proposed\n            else:\n                sample = self.sample\n\n            sigma_from = sigma_fn(t)\n            sigma_to = sigma_fn(t_next)\n            sigma_up = min(sigma_to, (sigma_to**2 * (sigma_from**2 - sigma_to**2) / sigma_from**2) ** 0.5)\n            sigma_down = (sigma_to**2 - sigma_up**2) ** 0.5\n            ancestral_t = t_fn(sigma_down)\n            prev_sample = (sigma_fn(ancestral_t) / sigma_fn(t)) * sample - (\n                t - ancestral_t\n            ).expm1() * pred_original_sample\n            prev_sample = prev_sample + self.noise_sampler(sigma_fn(t), sigma_fn(t_next)) * s_noise * sigma_up\n\n            if self.state_in_first_order:\n                # store for 2nd order step\n                self.sample = sample\n                self.mid_point_sigma = sigma_fn(t_next)\n            else:\n                # free for \"first order mode\"\n                self.sample = None\n                self.mid_point_sigma = None\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    # Copied from diffusers.schedulers.scheduling_heun_discrete.HeunDiscreteScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        sigmas = self.sigmas.to(device=original_samples.device, dtype=original_samples.dtype)\n        if original_samples.device.type == \"mps\" and torch.is_floating_point(timesteps):\n            # mps does not support float64\n            schedule_timesteps = self.timesteps.to(original_samples.device, dtype=torch.float32)\n            timesteps = timesteps.to(original_samples.device, dtype=torch.float32)\n        else:\n            schedule_timesteps = self.timesteps.to(original_samples.device)\n            timesteps = timesteps.to(original_samples.device)\n\n        step_indices = [self.index_for_timestep(t, schedule_timesteps) for t in timesteps]\n\n        sigma = sigmas[step_indices].flatten()\n        while len(sigma.shape) < len(original_samples.shape):\n            sigma = sigma.unsqueeze(-1)\n\n        noisy_samples = original_samples + noise * sigma\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_dpmsolver_singlestep.py",
    "content": "# Copyright 2023 TSAIL Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/LuChengTHU/dpm-solver\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass DPMSolverSinglestepScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    DPM-Solver (and the improved version DPM-Solver++) is a fast dedicated high-order solver for diffusion ODEs with\n    the convergence order guarantee. Empirically, sampling by DPM-Solver with only 20 steps can generate high-quality\n    samples, and it can generate quite good samples even in only 10 steps.\n\n    For more details, see the original paper: https://arxiv.org/abs/2206.00927 and https://arxiv.org/abs/2211.01095\n\n    Currently, we support the singlestep DPM-Solver for both noise prediction models and data prediction models. We\n    recommend to use `solver_order=2` for guided sampling, and `solver_order=3` for unconditional sampling.\n\n    We also support the \"dynamic thresholding\" method in Imagen (https://arxiv.org/abs/2205.11487). For pixel-space\n    diffusion models, you can set both `algorithm_type=\"dpmsolver++\"` and `thresholding=True` to use the dynamic\n    thresholding. Note that the thresholding method is unsuitable for latent-space diffusion models (such as\n    stable-diffusion).\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        solver_order (`int`, default `2`):\n            the order of DPM-Solver; can be `1` or `2` or `3`. We recommend to use `solver_order=2` for guided\n            sampling, and `solver_order=3` for unconditional sampling.\n        prediction_type (`str`, default `epsilon`):\n            indicates whether the model predicts the noise (epsilon), or the data / `x0`. One of `epsilon`, `sample`,\n            or `v-prediction`.\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            For pixel-space diffusion models, you can set both `algorithm_type=dpmsolver++` and `thresholding=True` to\n            use the dynamic thresholding. Note that the thresholding method is unsuitable for latent-space diffusion\n            models (such as stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487).\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True` and\n            `algorithm_type=\"dpmsolver++`.\n        algorithm_type (`str`, default `dpmsolver++`):\n            the algorithm type for the solver. Either `dpmsolver` or `dpmsolver++`. The `dpmsolver` type implements the\n            algorithms in https://arxiv.org/abs/2206.00927, and the `dpmsolver++` type implements the algorithms in\n            https://arxiv.org/abs/2211.01095. We recommend to use `dpmsolver++` with `solver_order=2` for guided\n            sampling (e.g. stable-diffusion).\n        solver_type (`str`, default `midpoint`):\n            the solver type for the second-order solver. Either `midpoint` or `heun`. The solver type slightly affects\n            the sample quality, especially for small number of steps. We empirically find that `midpoint` solvers are\n            slightly better, so we recommend to use the `midpoint` type.\n        lower_order_final (`bool`, default `True`):\n            whether to use lower-order solvers in the final steps. For singlestep schedulers, we recommend to enable\n            this to use up all the function evaluations.\n        lambda_min_clipped (`float`, default `-inf`):\n            the clipping threshold for the minimum value of lambda(t) for numerical stability. This is critical for\n            cosine (squaredcos_cap_v2) noise schedule.\n        variance_type (`str`, *optional*):\n            Set to \"learned\" or \"learned_range\" for diffusion models that predict variance. For example, OpenAI's\n            guided-diffusion (https://github.com/openai/guided-diffusion) predicts both mean and variance of the\n            Gaussian distribution in the model's output. DPM-Solver only needs the \"mean\" output because it is based on\n            diffusion ODEs. whether the model's output contains the predicted Gaussian variance. For example, OpenAI's\n            guided-diffusion (https://github.com/openai/guided-diffusion) predicts both mean and variance of the\n            Gaussian distribution in the model's output. DPM-Solver only needs the \"mean\" output because it is based on\n            diffusion ODEs.\n\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[np.ndarray] = None,\n        solver_order: int = 2,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        sample_max_value: float = 1.0,\n        algorithm_type: str = \"dpmsolver++\",\n        solver_type: str = \"midpoint\",\n        lower_order_final: bool = True,\n        lambda_min_clipped: float = -float(\"inf\"),\n        variance_type: Optional[str] = None,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        # Currently we only support VP-type noise schedule\n        self.alpha_t = torch.sqrt(self.alphas_cumprod)\n        self.sigma_t = torch.sqrt(1 - self.alphas_cumprod)\n        self.lambda_t = torch.log(self.alpha_t) - torch.log(self.sigma_t)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # settings for DPM-Solver\n        if algorithm_type not in [\"dpmsolver\", \"dpmsolver++\"]:\n            if algorithm_type == \"deis\":\n                self.register_to_config(algorithm_type=\"dpmsolver++\")\n            else:\n                raise NotImplementedError(f\"{algorithm_type} does is not implemented for {self.__class__}\")\n        if solver_type not in [\"midpoint\", \"heun\"]:\n            if solver_type in [\"logrho\", \"bh1\", \"bh2\"]:\n                self.register_to_config(solver_type=\"midpoint\")\n            else:\n                raise NotImplementedError(f\"{solver_type} does is not implemented for {self.__class__}\")\n\n        # setable values\n        self.num_inference_steps = None\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=np.float32)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps)\n        self.model_outputs = [None] * solver_order\n        self.sample = None\n        self.order_list = self.get_order_list(num_train_timesteps)\n\n    def get_order_list(self, num_inference_steps: int) -> List[int]:\n        \"\"\"\n        Computes the solver order at each time step.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n        steps = num_inference_steps\n        order = self.config.solver_order\n        if self.config.lower_order_final:\n            if order == 3:\n                if steps % 3 == 0:\n                    orders = [1, 2, 3] * (steps // 3 - 1) + [1, 2] + [1]\n                elif steps % 3 == 1:\n                    orders = [1, 2, 3] * (steps // 3) + [1]\n                else:\n                    orders = [1, 2, 3] * (steps // 3) + [1, 2]\n            elif order == 2:\n                if steps % 2 == 0:\n                    orders = [1, 2] * (steps // 2)\n                else:\n                    orders = [1, 2] * (steps // 2) + [1]\n            elif order == 1:\n                orders = [1] * steps\n        else:\n            if order == 3:\n                orders = [1, 2, 3] * (steps // 3)\n            elif order == 2:\n                orders = [1, 2] * (steps // 2)\n            elif order == 1:\n                orders = [1] * steps\n        return orders\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n        # Clipping the minimum of all lambda(t) for numerical stability.\n        # This is critical for cosine (squaredcos_cap_v2) noise schedule.\n        clipped_idx = torch.searchsorted(torch.flip(self.lambda_t, [0]), self.config.lambda_min_clipped)\n        timesteps = (\n            np.linspace(0, self.config.num_train_timesteps - 1 - clipped_idx, num_inference_steps + 1)\n            .round()[::-1][:-1]\n            .copy()\n            .astype(np.int64)\n        )\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n        self.model_outputs = [None] * self.config.solver_order\n        self.sample = None\n        self.orders = self.get_order_list(num_inference_steps)\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler._threshold_sample\n    def _threshold_sample(self, sample: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"\n        \"Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the\n        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by\n        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing\n        pixels from saturation at each step. We find that dynamic thresholding results in significantly better\n        photorealism as well as better image-text alignment, especially when using very large guidance weights.\"\n\n        https://arxiv.org/abs/2205.11487\n        \"\"\"\n        dtype = sample.dtype\n        batch_size, channels, height, width = sample.shape\n\n        if dtype not in (torch.float32, torch.float64):\n            sample = sample.float()  # upcast for quantile calculation, and clamp not implemented for cpu half\n\n        # Flatten sample for doing quantile calculation along each image\n        sample = sample.reshape(batch_size, channels * height * width)\n\n        abs_sample = sample.abs()  # \"a certain percentile absolute pixel value\"\n\n        s = torch.quantile(abs_sample, self.config.dynamic_thresholding_ratio, dim=1)\n        s = torch.clamp(\n            s, min=1, max=self.config.sample_max_value\n        )  # When clamped to min=1, equivalent to standard clipping to [-1, 1]\n\n        s = s.unsqueeze(1)  # (batch_size, 1) because clamp will broadcast along dim=0\n        sample = torch.clamp(sample, -s, s) / s  # \"we threshold xt0 to the range [-s, s] and then divide by s\"\n\n        sample = sample.reshape(batch_size, channels, height, width)\n        sample = sample.to(dtype)\n\n        return sample\n\n    def convert_model_output(\n        self, model_output: torch.FloatTensor, timestep: int, sample: torch.FloatTensor\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Convert the model output to the corresponding type that the algorithm (DPM-Solver / DPM-Solver++) needs.\n\n        DPM-Solver is designed to discretize an integral of the noise prediction model, and DPM-Solver++ is designed to\n        discretize an integral of the data prediction model. So we need to first convert the model output to the\n        corresponding type to match the algorithm.\n\n        Note that the algorithm type and the model type is decoupled. That is to say, we can use either DPM-Solver or\n        DPM-Solver++ for both noise prediction model and data prediction model.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the converted model output.\n        \"\"\"\n        # DPM-Solver++ needs to solve an integral of the data prediction model.\n        if self.config.algorithm_type == \"dpmsolver++\":\n            if self.config.prediction_type == \"epsilon\":\n                # DPM-Solver and DPM-Solver++ only need the \"mean\" output.\n                if self.config.variance_type in [\"learned_range\"]:\n                    model_output = model_output[:, :3]\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = (sample - sigma_t * model_output) / alpha_t\n            elif self.config.prediction_type == \"sample\":\n                x0_pred = model_output\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = alpha_t * sample - sigma_t * model_output\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the DPMSolverSinglestepScheduler.\"\n                )\n\n            if self.config.thresholding:\n                x0_pred = self._threshold_sample(x0_pred)\n\n            return x0_pred\n        # DPM-Solver needs to solve an integral of the noise prediction model.\n        elif self.config.algorithm_type == \"dpmsolver\":\n            if self.config.prediction_type == \"epsilon\":\n                # DPM-Solver and DPM-Solver++ only need the \"mean\" output.\n                if self.config.variance_type in [\"learned_range\"]:\n                    model_output = model_output[:, :3]\n                return model_output\n            elif self.config.prediction_type == \"sample\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = (sample - alpha_t * model_output) / sigma_t\n                return epsilon\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = alpha_t * model_output + sigma_t * sample\n                return epsilon\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the DPMSolverSinglestepScheduler.\"\n                )\n\n    def dpm_solver_first_order_update(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the first-order DPM-Solver (equivalent to DDIM).\n\n        See https://arxiv.org/abs/2206.00927 for the detailed derivation.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        lambda_t, lambda_s = self.lambda_t[prev_timestep], self.lambda_t[timestep]\n        alpha_t, alpha_s = self.alpha_t[prev_timestep], self.alpha_t[timestep]\n        sigma_t, sigma_s = self.sigma_t[prev_timestep], self.sigma_t[timestep]\n        h = lambda_t - lambda_s\n        if self.config.algorithm_type == \"dpmsolver++\":\n            x_t = (sigma_t / sigma_s) * sample - (alpha_t * (torch.exp(-h) - 1.0)) * model_output\n        elif self.config.algorithm_type == \"dpmsolver\":\n            x_t = (alpha_t / alpha_s) * sample - (sigma_t * (torch.exp(h) - 1.0)) * model_output\n        return x_t\n\n    def singlestep_dpm_solver_second_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the second-order singlestep DPM-Solver.\n\n        It computes the solution at time `prev_timestep` from the time `timestep_list[-2]`.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1 = prev_timestep, timestep_list[-1], timestep_list[-2]\n        m0, m1 = model_output_list[-1], model_output_list[-2]\n        lambda_t, lambda_s0, lambda_s1 = self.lambda_t[t], self.lambda_t[s0], self.lambda_t[s1]\n        alpha_t, alpha_s1 = self.alpha_t[t], self.alpha_t[s1]\n        sigma_t, sigma_s1 = self.sigma_t[t], self.sigma_t[s1]\n        h, h_0 = lambda_t - lambda_s1, lambda_s0 - lambda_s1\n        r0 = h_0 / h\n        D0, D1 = m1, (1.0 / r0) * (m0 - m1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2211.01095 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (sigma_t / sigma_s1) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    - 0.5 * (alpha_t * (torch.exp(-h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (sigma_t / sigma_s1) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    + (alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0)) * D1\n                )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (alpha_t / alpha_s1) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - 0.5 * (sigma_t * (torch.exp(h) - 1.0)) * D1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (alpha_t / alpha_s1) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                )\n        return x_t\n\n    def singlestep_dpm_solver_third_order_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the third-order singlestep DPM-Solver.\n\n        It computes the solution at time `prev_timestep` from the time `timestep_list[-3]`.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        t, s0, s1, s2 = prev_timestep, timestep_list[-1], timestep_list[-2], timestep_list[-3]\n        m0, m1, m2 = model_output_list[-1], model_output_list[-2], model_output_list[-3]\n        lambda_t, lambda_s0, lambda_s1, lambda_s2 = (\n            self.lambda_t[t],\n            self.lambda_t[s0],\n            self.lambda_t[s1],\n            self.lambda_t[s2],\n        )\n        alpha_t, alpha_s2 = self.alpha_t[t], self.alpha_t[s2]\n        sigma_t, sigma_s2 = self.sigma_t[t], self.sigma_t[s2]\n        h, h_0, h_1 = lambda_t - lambda_s2, lambda_s0 - lambda_s2, lambda_s1 - lambda_s2\n        r0, r1 = h_0 / h, h_1 / h\n        D0 = m2\n        D1_0, D1_1 = (1.0 / r1) * (m1 - m2), (1.0 / r0) * (m0 - m2)\n        D1 = (r0 * D1_0 - r1 * D1_1) / (r0 - r1)\n        D2 = 2.0 * (D1_1 - D1_0) / (r0 - r1)\n        if self.config.algorithm_type == \"dpmsolver++\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (sigma_t / sigma_s2) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    + (alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0)) * D1_1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (sigma_t / sigma_s2) * sample\n                    - (alpha_t * (torch.exp(-h) - 1.0)) * D0\n                    + (alpha_t * ((torch.exp(-h) - 1.0) / h + 1.0)) * D1\n                    - (alpha_t * ((torch.exp(-h) - 1.0 + h) / h**2 - 0.5)) * D2\n                )\n        elif self.config.algorithm_type == \"dpmsolver\":\n            # See https://arxiv.org/abs/2206.00927 for detailed derivations\n            if self.config.solver_type == \"midpoint\":\n                x_t = (\n                    (alpha_t / alpha_s2) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1_1\n                )\n            elif self.config.solver_type == \"heun\":\n                x_t = (\n                    (alpha_t / alpha_s2) * sample\n                    - (sigma_t * (torch.exp(h) - 1.0)) * D0\n                    - (sigma_t * ((torch.exp(h) - 1.0) / h - 1.0)) * D1\n                    - (sigma_t * ((torch.exp(h) - 1.0 - h) / h**2 - 0.5)) * D2\n                )\n        return x_t\n\n    def singlestep_dpm_solver_update(\n        self,\n        model_output_list: List[torch.FloatTensor],\n        timestep_list: List[int],\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n        order: int,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the singlestep DPM-Solver.\n\n        Args:\n            model_output_list (`List[torch.FloatTensor]`):\n                direct outputs from learned diffusion model at current and latter timesteps.\n            timestep (`int`): current and latter discrete timestep in the diffusion chain.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            order (`int`):\n                the solver order at this step.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        if order == 1:\n            return self.dpm_solver_first_order_update(model_output_list[-1], timestep_list[-1], prev_timestep, sample)\n        elif order == 2:\n            return self.singlestep_dpm_solver_second_order_update(\n                model_output_list, timestep_list, prev_timestep, sample\n            )\n        elif order == 3:\n            return self.singlestep_dpm_solver_third_order_update(\n                model_output_list, timestep_list, prev_timestep, sample\n            )\n        else:\n            raise ValueError(f\"Order must be 1, 2, 3, got {order}\")\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the singlestep DPM-Solver.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero()\n        if len(step_index) == 0:\n            step_index = len(self.timesteps) - 1\n        else:\n            step_index = step_index.item()\n        prev_timestep = 0 if step_index == len(self.timesteps) - 1 else self.timesteps[step_index + 1]\n\n        model_output = self.convert_model_output(model_output, timestep, sample)\n        for i in range(self.config.solver_order - 1):\n            self.model_outputs[i] = self.model_outputs[i + 1]\n        self.model_outputs[-1] = model_output\n\n        order = self.order_list[step_index]\n        # For single-step solvers, we use the initial value at each time with order = 1.\n        if order == 1:\n            self.sample = sample\n\n        timestep_list = [self.timesteps[step_index - i] for i in range(order - 1, 0, -1)] + [timestep]\n        prev_sample = self.singlestep_dpm_solver_update(\n            self.model_outputs, timestep_list, prev_timestep, self.sample, order\n        )\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def scale_model_input(self, sample: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_euler_ancestral_discrete.py",
    "content": "# Copyright 2023 Katherine Crowson and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom dataclasses import dataclass\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, logging, randn_tensor\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n@dataclass\n# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->EulerAncestralDiscrete\nclass EulerAncestralDiscreteSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999) -> torch.Tensor:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass EulerAncestralDiscreteScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Ancestral sampling with Euler method steps. Based on the original k-diffusion implementation by Katherine Crowson:\n    https://github.com/crowsonkb/k-diffusion/blob/481677d114f6ea445aa009cf5bd7a9cdee909e47/k_diffusion/sampling.py#L72\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        prediction_type: str = \"epsilon\",\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        sigmas = np.concatenate([sigmas[::-1], [0.0]]).astype(np.float32)\n        self.sigmas = torch.from_numpy(sigmas)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = self.sigmas.max()\n\n        # setable values\n        self.num_inference_steps = None\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=float)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps)\n        self.is_scale_input_called = False\n\n    def scale_model_input(\n        self, sample: torch.FloatTensor, timestep: Union[float, torch.FloatTensor]\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Scales the denoising model input by `(sigma**2 + 1) ** 0.5` to match the Euler algorithm.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`float` or `torch.FloatTensor`): the current timestep in the diffusion chain\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero().item()\n        sigma = self.sigmas[step_index]\n        sample = sample / ((sigma**2 + 1) ** 0.5)\n        self.is_scale_input_called = True\n        return sample\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n\n        timesteps = np.linspace(0, self.config.num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)\n        sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)\n        self.sigmas = torch.from_numpy(sigmas).to(device=device)\n        if str(device).startswith(\"mps\"):\n            # mps does not support float64\n            self.timesteps = torch.from_numpy(timesteps).to(device, dtype=torch.float32)\n        else:\n            self.timesteps = torch.from_numpy(timesteps).to(device=device)\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: Union[float, torch.FloatTensor],\n        sample: torch.FloatTensor,\n        generator: Optional[torch.Generator] = None,\n        return_dict: bool = True,\n    ) -> Union[EulerAncestralDiscreteSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`float`): current timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            generator (`torch.Generator`, optional): Random number generator.\n            return_dict (`bool`): option for returning tuple rather than EulerAncestralDiscreteSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.EulerAncestralDiscreteSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.EulerAncestralDiscreteSchedulerOutput`] if `return_dict` is True, otherwise\n            a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n\n        if (\n            isinstance(timestep, int)\n            or isinstance(timestep, torch.IntTensor)\n            or isinstance(timestep, torch.LongTensor)\n        ):\n            raise ValueError(\n                (\n                    \"Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to\"\n                    \" `EulerDiscreteScheduler.step()` is not supported. Make sure to pass\"\n                    \" one of the `scheduler.timesteps` as a timestep.\"\n                ),\n            )\n\n        if not self.is_scale_input_called:\n            logger.warning(\n                \"The `scale_model_input` function should be called before `step` to ensure correct denoising. \"\n                \"See `StableDiffusionPipeline` for a usage example.\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n\n        step_index = (self.timesteps == timestep).nonzero().item()\n        sigma = self.sigmas[step_index]\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = sample - sigma * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            # * c_out + input * c_skip\n            pred_original_sample = model_output * (-sigma / (sigma**2 + 1) ** 0.5) + (sample / (sigma**2 + 1))\n        elif self.config.prediction_type == \"sample\":\n            raise NotImplementedError(\"prediction_type not implemented yet: sample\")\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        sigma_from = self.sigmas[step_index]\n        sigma_to = self.sigmas[step_index + 1]\n        sigma_up = (sigma_to**2 * (sigma_from**2 - sigma_to**2) / sigma_from**2) ** 0.5\n        sigma_down = (sigma_to**2 - sigma_up**2) ** 0.5\n\n        # 2. Convert to an ODE derivative\n        derivative = (sample - pred_original_sample) / sigma\n\n        dt = sigma_down - sigma\n\n        prev_sample = sample + derivative * dt\n\n        device = model_output.device\n        noise = randn_tensor(model_output.shape, dtype=model_output.dtype, device=device, generator=generator)\n\n        prev_sample = prev_sample + noise * sigma_up\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return EulerAncestralDiscreteSchedulerOutput(\n            prev_sample=prev_sample, pred_original_sample=pred_original_sample\n        )\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        sigmas = self.sigmas.to(device=original_samples.device, dtype=original_samples.dtype)\n        if original_samples.device.type == \"mps\" and torch.is_floating_point(timesteps):\n            # mps does not support float64\n            schedule_timesteps = self.timesteps.to(original_samples.device, dtype=torch.float32)\n            timesteps = timesteps.to(original_samples.device, dtype=torch.float32)\n        else:\n            schedule_timesteps = self.timesteps.to(original_samples.device)\n            timesteps = timesteps.to(original_samples.device)\n\n        step_indices = [(schedule_timesteps == t).nonzero().item() for t in timesteps]\n\n        sigma = sigmas[step_indices].flatten()\n        while len(sigma.shape) < len(original_samples.shape):\n            sigma = sigma.unsqueeze(-1)\n\n        noisy_samples = original_samples + noise * sigma\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_euler_discrete.py",
    "content": "# Copyright 2023 Katherine Crowson and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom dataclasses import dataclass\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, logging, randn_tensor\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\n@dataclass\n# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->EulerDiscrete\nclass EulerDiscreteSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass EulerDiscreteScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Euler scheduler (Algorithm 2) from Karras et al. (2022) https://arxiv.org/abs/2206.00364. . Based on the original\n    k-diffusion implementation by Katherine Crowson:\n    https://github.com/crowsonkb/k-diffusion/blob/481677d114f6ea445aa009cf5bd7a9cdee909e47/k_diffusion/sampling.py#L51\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        prediction_type (`str`, default `\"epsilon\"`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        interpolation_type (`str`, default `\"linear\"`, optional):\n            interpolation type to compute intermediate sigmas for the scheduler denoising steps. Should be one of\n            [`\"linear\"`, `\"log_linear\"`].\n        use_karras_sigmas (`bool`, *optional*, defaults to `False`):\n             This parameter controls whether to use Karras sigmas (Karras et al. (2022) scheme) for step sizes in the\n             noise schedule during the sampling process. If True, the sigmas will be determined according to a sequence\n             of noise levels {σi} as defined in Equation (5) of the paper https://arxiv.org/pdf/2206.00364.pdf.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        prediction_type: str = \"epsilon\",\n        interpolation_type: str = \"linear\",\n        use_karras_sigmas: Optional[bool] = False,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        sigmas = np.concatenate([sigmas[::-1], [0.0]]).astype(np.float32)\n        self.sigmas = torch.from_numpy(sigmas)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = self.sigmas.max()\n\n        # setable values\n        self.num_inference_steps = None\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=float)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps)\n        self.is_scale_input_called = False\n        self.use_karras_sigmas = use_karras_sigmas\n\n    def scale_model_input(\n        self, sample: torch.FloatTensor, timestep: Union[float, torch.FloatTensor]\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Scales the denoising model input by `(sigma**2 + 1) ** 0.5` to match the Euler algorithm.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`float` or `torch.FloatTensor`): the current timestep in the diffusion chain\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero().item()\n        sigma = self.sigmas[step_index]\n\n        sample = sample / ((sigma**2 + 1) ** 0.5)\n\n        self.is_scale_input_called = True\n        return sample\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n\n        timesteps = np.linspace(0, self.config.num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        log_sigmas = np.log(sigmas)\n\n        if self.config.interpolation_type == \"linear\":\n            sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)\n        elif self.config.interpolation_type == \"log_linear\":\n            sigmas = torch.linspace(np.log(sigmas[-1]), np.log(sigmas[0]), num_inference_steps + 1).exp()\n        else:\n            raise ValueError(\n                f\"{self.config.interpolation_type} is not implemented. Please specify interpolation_type to either\"\n                \" 'linear' or 'log_linear'\"\n            )\n\n        if self.use_karras_sigmas:\n            sigmas = self._convert_to_karras(in_sigmas=sigmas, num_inference_steps=self.num_inference_steps)\n            timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas])\n\n        sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)\n        self.sigmas = torch.from_numpy(sigmas).to(device=device)\n        if str(device).startswith(\"mps\"):\n            # mps does not support float64\n            self.timesteps = torch.from_numpy(timesteps).to(device, dtype=torch.float32)\n        else:\n            self.timesteps = torch.from_numpy(timesteps).to(device=device)\n\n    def _sigma_to_t(self, sigma, log_sigmas):\n        # get log sigma\n        log_sigma = np.log(sigma)\n\n        # get distribution\n        dists = log_sigma - log_sigmas[:, np.newaxis]\n\n        # get sigmas range\n        low_idx = np.cumsum((dists >= 0), axis=0).argmax(axis=0).clip(max=log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = log_sigmas[low_idx]\n        high = log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = np.clip(w, 0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.reshape(sigma.shape)\n        return t\n\n    # Copied from https://github.com/crowsonkb/k-diffusion/blob/686dbad0f39640ea25c8a8c6a6e56bb40eacefa2/k_diffusion/sampling.py#L17\n    def _convert_to_karras(self, in_sigmas: torch.FloatTensor, num_inference_steps) -> torch.FloatTensor:\n        \"\"\"Constructs the noise schedule of Karras et al. (2022).\"\"\"\n\n        sigma_min: float = in_sigmas[-1].item()\n        sigma_max: float = in_sigmas[0].item()\n\n        rho = 7.0  # 7.0 is the value used in the paper\n        ramp = np.linspace(0, 1, num_inference_steps)\n        min_inv_rho = sigma_min ** (1 / rho)\n        max_inv_rho = sigma_max ** (1 / rho)\n        sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho\n        return sigmas\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: Union[float, torch.FloatTensor],\n        sample: torch.FloatTensor,\n        s_churn: float = 0.0,\n        s_tmin: float = 0.0,\n        s_tmax: float = float(\"inf\"),\n        s_noise: float = 1.0,\n        generator: Optional[torch.Generator] = None,\n        return_dict: bool = True,\n    ) -> Union[EulerDiscreteSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`float`): current timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            s_churn (`float`)\n            s_tmin  (`float`)\n            s_tmax  (`float`)\n            s_noise (`float`)\n            generator (`torch.Generator`, optional): Random number generator.\n            return_dict (`bool`): option for returning tuple rather than EulerDiscreteSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.EulerDiscreteSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.EulerDiscreteSchedulerOutput`] if `return_dict` is True, otherwise a\n            `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n\n        if (\n            isinstance(timestep, int)\n            or isinstance(timestep, torch.IntTensor)\n            or isinstance(timestep, torch.LongTensor)\n        ):\n            raise ValueError(\n                (\n                    \"Passing integer indices (e.g. from `enumerate(timesteps)`) as timesteps to\"\n                    \" `EulerDiscreteScheduler.step()` is not supported. Make sure to pass\"\n                    \" one of the `scheduler.timesteps` as a timestep.\"\n                ),\n            )\n\n        if not self.is_scale_input_called:\n            logger.warning(\n                \"The `scale_model_input` function should be called before `step` to ensure correct denoising. \"\n                \"See `StableDiffusionPipeline` for a usage example.\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n\n        step_index = (self.timesteps == timestep).nonzero().item()\n        sigma = self.sigmas[step_index]\n\n        gamma = min(s_churn / (len(self.sigmas) - 1), 2**0.5 - 1) if s_tmin <= sigma <= s_tmax else 0.0\n\n        noise = randn_tensor(\n            model_output.shape, dtype=model_output.dtype, device=model_output.device, generator=generator\n        )\n\n        eps = noise * s_noise\n        sigma_hat = sigma * (gamma + 1)\n\n        if gamma > 0:\n            sample = sample + eps * (sigma_hat**2 - sigma**2) ** 0.5\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        # NOTE: \"original_sample\" should not be an expected prediction_type but is left in for\n        # backwards compatibility\n        if self.config.prediction_type == \"original_sample\" or self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n        elif self.config.prediction_type == \"epsilon\":\n            pred_original_sample = sample - sigma_hat * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            # * c_out + input * c_skip\n            pred_original_sample = model_output * (-sigma / (sigma**2 + 1) ** 0.5) + (sample / (sigma**2 + 1))\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        # 2. Convert to an ODE derivative\n        derivative = (sample - pred_original_sample) / sigma_hat\n\n        dt = self.sigmas[step_index + 1] - sigma_hat\n\n        prev_sample = sample + derivative * dt\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return EulerDiscreteSchedulerOutput(prev_sample=prev_sample, pred_original_sample=pred_original_sample)\n\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        sigmas = self.sigmas.to(device=original_samples.device, dtype=original_samples.dtype)\n        if original_samples.device.type == \"mps\" and torch.is_floating_point(timesteps):\n            # mps does not support float64\n            schedule_timesteps = self.timesteps.to(original_samples.device, dtype=torch.float32)\n            timesteps = timesteps.to(original_samples.device, dtype=torch.float32)\n        else:\n            schedule_timesteps = self.timesteps.to(original_samples.device)\n            timesteps = timesteps.to(original_samples.device)\n\n        step_indices = [(schedule_timesteps == t).nonzero().item() for t in timesteps]\n\n        sigma = sigmas[step_indices].flatten()\n        while len(sigma.shape) < len(original_samples.shape):\n            sigma = sigma.unsqueeze(-1)\n\n        noisy_samples = original_samples + noise * sigma\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_heun_discrete.py",
    "content": "# Copyright 2023 Katherine Crowson, The HuggingFace Team and hlky. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999) -> torch.Tensor:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass HeunDiscreteScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Implements Algorithm 2 (Heun steps) from Karras et al. (2022). for discrete beta schedules. Based on the original\n    k-diffusion implementation by Katherine Crowson:\n    https://github.com/crowsonkb/k-diffusion/blob/481677d114f6ea445aa009cf5bd7a9cdee909e47/k_diffusion/sampling.py#L90\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model. beta_start (`float`): the\n        starting `beta` value of inference. beta_end (`float`): the final `beta` value. beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf).\n        use_karras_sigmas (`bool`, *optional*, defaults to `False`):\n             This parameter controls whether to use Karras sigmas (Karras et al. (2022) scheme) for step sizes in the\n             noise schedule during the sampling process. If True, the sigmas will be determined according to a sequence\n             of noise levels {σi} as defined in Equation (5) of the paper https://arxiv.org/pdf/2206.00364.pdf.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 2\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.00085,  # sensible defaults\n        beta_end: float = 0.012,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        prediction_type: str = \"epsilon\",\n        use_karras_sigmas: Optional[bool] = False,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        #  set all values\n        self.set_timesteps(num_train_timesteps, None, num_train_timesteps)\n        self.use_karras_sigmas = use_karras_sigmas\n\n    def index_for_timestep(self, timestep, schedule_timesteps=None):\n        if schedule_timesteps is None:\n            schedule_timesteps = self.timesteps\n\n        indices = (schedule_timesteps == timestep).nonzero()\n\n        if self.state_in_first_order:\n            pos = -1\n        else:\n            pos = 0\n        return indices[pos].item()\n\n    def scale_model_input(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[float, torch.FloatTensor],\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Args:\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n            sample (`torch.FloatTensor`): input sample timestep (`int`, optional): current timestep\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        sigma = self.sigmas[step_index]\n        sample = sample / ((sigma**2 + 1) ** 0.5)\n        return sample\n\n    def set_timesteps(\n        self,\n        num_inference_steps: int,\n        device: Union[str, torch.device] = None,\n        num_train_timesteps: Optional[int] = None,\n    ):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n\n        num_train_timesteps = num_train_timesteps or self.config.num_train_timesteps\n\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        log_sigmas = np.log(sigmas)\n        sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)\n\n        if self.use_karras_sigmas:\n            sigmas = self._convert_to_karras(in_sigmas=sigmas, num_inference_steps=self.num_inference_steps)\n            timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas])\n\n        sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)\n        sigmas = torch.from_numpy(sigmas).to(device=device)\n        self.sigmas = torch.cat([sigmas[:1], sigmas[1:-1].repeat_interleave(2), sigmas[-1:]])\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = self.sigmas.max()\n\n        timesteps = torch.from_numpy(timesteps)\n        timesteps = torch.cat([timesteps[:1], timesteps[1:].repeat_interleave(2)])\n\n        if str(device).startswith(\"mps\"):\n            # mps does not support float64\n            self.timesteps = timesteps.to(device, dtype=torch.float32)\n        else:\n            self.timesteps = timesteps.to(device=device)\n\n        # empty dt and derivative\n        self.prev_derivative = None\n        self.dt = None\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler._sigma_to_t\n    def _sigma_to_t(self, sigma, log_sigmas):\n        # get log sigma\n        log_sigma = np.log(sigma)\n\n        # get distribution\n        dists = log_sigma - log_sigmas[:, np.newaxis]\n\n        # get sigmas range\n        low_idx = np.cumsum((dists >= 0), axis=0).argmax(axis=0).clip(max=log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = log_sigmas[low_idx]\n        high = log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = np.clip(w, 0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.reshape(sigma.shape)\n        return t\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler._convert_to_karras\n    def _convert_to_karras(self, in_sigmas: torch.FloatTensor, num_inference_steps) -> torch.FloatTensor:\n        \"\"\"Constructs the noise schedule of Karras et al. (2022).\"\"\"\n\n        sigma_min: float = in_sigmas[-1].item()\n        sigma_max: float = in_sigmas[0].item()\n\n        rho = 7.0  # 7.0 is the value used in the paper\n        ramp = np.linspace(0, 1, num_inference_steps)\n        min_inv_rho = sigma_min ** (1 / rho)\n        max_inv_rho = sigma_max ** (1 / rho)\n        sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho\n        return sigmas\n\n    @property\n    def state_in_first_order(self):\n        return self.dt is None\n\n    def step(\n        self,\n        model_output: Union[torch.FloatTensor, np.ndarray],\n        timestep: Union[float, torch.FloatTensor],\n        sample: Union[torch.FloatTensor, np.ndarray],\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Args:\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n            model_output (`torch.FloatTensor` or `np.ndarray`): direct output from learned diffusion model. timestep\n            (`int`): current discrete timestep in the diffusion chain. sample (`torch.FloatTensor` or `np.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n        Returns:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        if self.state_in_first_order:\n            sigma = self.sigmas[step_index]\n            sigma_next = self.sigmas[step_index + 1]\n        else:\n            # 2nd order / Heun's method\n            sigma = self.sigmas[step_index - 1]\n            sigma_next = self.sigmas[step_index]\n\n        # currently only gamma=0 is supported. This usually works best anyways.\n        # We can support gamma in the future but then need to scale the timestep before\n        # passing it to the model which requires a change in API\n        gamma = 0\n        sigma_hat = sigma * (gamma + 1)  # Note: sigma_hat == sigma for now\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        if self.config.prediction_type == \"epsilon\":\n            sigma_input = sigma_hat if self.state_in_first_order else sigma_next\n            pred_original_sample = sample - sigma_input * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            sigma_input = sigma_hat if self.state_in_first_order else sigma_next\n            pred_original_sample = model_output * (-sigma_input / (sigma_input**2 + 1) ** 0.5) + (\n                sample / (sigma_input**2 + 1)\n            )\n        elif self.config.prediction_type == \"sample\":\n            raise NotImplementedError(\"prediction_type not implemented yet: sample\")\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        if self.state_in_first_order:\n            # 2. Convert to an ODE derivative for 1st order\n            derivative = (sample - pred_original_sample) / sigma_hat\n            # 3. delta timestep\n            dt = sigma_next - sigma_hat\n\n            # store for 2nd order step\n            self.prev_derivative = derivative\n            self.dt = dt\n            self.sample = sample\n        else:\n            # 2. 2nd order / Heun's method\n            derivative = (sample - pred_original_sample) / sigma_next\n            derivative = (self.prev_derivative + derivative) / 2\n\n            # 3. take prev timestep & sample\n            dt = self.dt\n            sample = self.sample\n\n            # free dt and derivative\n            # Note, this puts the scheduler in \"first order mode\"\n            self.prev_derivative = None\n            self.dt = None\n            self.sample = None\n\n        prev_sample = sample + derivative * dt\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        sigmas = self.sigmas.to(device=original_samples.device, dtype=original_samples.dtype)\n        if original_samples.device.type == \"mps\" and torch.is_floating_point(timesteps):\n            # mps does not support float64\n            schedule_timesteps = self.timesteps.to(original_samples.device, dtype=torch.float32)\n            timesteps = timesteps.to(original_samples.device, dtype=torch.float32)\n        else:\n            schedule_timesteps = self.timesteps.to(original_samples.device)\n            timesteps = timesteps.to(original_samples.device)\n\n        step_indices = [self.index_for_timestep(t, schedule_timesteps) for t in timesteps]\n\n        sigma = sigmas[step_indices].flatten()\n        while len(sigma.shape) < len(original_samples.shape):\n            sigma = sigma.unsqueeze(-1)\n\n        noisy_samples = original_samples + noise * sigma\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_ipndm.py",
    "content": "# Copyright 2023 Zhejiang University Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import SchedulerMixin, SchedulerOutput\n\n\nclass IPNDMScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Improved Pseudo numerical methods for diffusion models (iPNDM) ported from @crowsonkb's amazing k-diffusion\n    [library](https://github.com/crowsonkb/v-diffusion-pytorch/blob/987f8985e38208345c1959b0ea767a625831cc9b/diffusion/sampling.py#L296)\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2202.09778\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n    \"\"\"\n\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self, num_train_timesteps: int = 1000, trained_betas: Optional[Union[np.ndarray, List[float]]] = None\n    ):\n        # set `betas`, `alphas`, `timesteps`\n        self.set_timesteps(num_train_timesteps)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # For now we only support F-PNDM, i.e. the runge-kutta method\n        # For more information on the algorithm please take a look at the paper: https://arxiv.org/pdf/2202.09778.pdf\n        # mainly at formula (9), (12), (13) and the Algorithm 2.\n        self.pndm_order = 4\n\n        # running values\n        self.ets = []\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n        steps = torch.linspace(1, 0, num_inference_steps + 1)[:-1]\n        steps = torch.cat([steps, torch.tensor([0.0])])\n\n        if self.config.trained_betas is not None:\n            self.betas = torch.tensor(self.config.trained_betas, dtype=torch.float32)\n        else:\n            self.betas = torch.sin(steps * math.pi / 2) ** 2\n\n        self.alphas = (1.0 - self.betas**2) ** 0.5\n\n        timesteps = (torch.atan2(self.betas, self.alphas) / math.pi * 2)[:-1]\n        self.timesteps = timesteps.to(device)\n\n        self.ets = []\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the linear multi-step method. This has one forward pass with multiple\n        times to approximate the solution.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        timestep_index = (self.timesteps == timestep).nonzero().item()\n        prev_timestep_index = timestep_index + 1\n\n        ets = sample * self.betas[timestep_index] + model_output * self.alphas[timestep_index]\n        self.ets.append(ets)\n\n        if len(self.ets) == 1:\n            ets = self.ets[-1]\n        elif len(self.ets) == 2:\n            ets = (3 * self.ets[-1] - self.ets[-2]) / 2\n        elif len(self.ets) == 3:\n            ets = (23 * self.ets[-1] - 16 * self.ets[-2] + 5 * self.ets[-3]) / 12\n        else:\n            ets = (1 / 24) * (55 * self.ets[-1] - 59 * self.ets[-2] + 37 * self.ets[-3] - 9 * self.ets[-4])\n\n        prev_sample = self._get_prev_sample(sample, timestep_index, prev_timestep_index, ets)\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def scale_model_input(self, sample: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def _get_prev_sample(self, sample, timestep_index, prev_timestep_index, ets):\n        alpha = self.alphas[timestep_index]\n        sigma = self.betas[timestep_index]\n\n        next_alpha = self.alphas[prev_timestep_index]\n        next_sigma = self.betas[prev_timestep_index]\n\n        pred = (sample - sigma * ets) / max(alpha, 1e-8)\n        prev_sample = next_alpha * pred + ets * next_sigma\n\n        return prev_sample\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_k_dpm_2_ancestral_discrete.py",
    "content": "# Copyright 2023 Katherine Crowson, The HuggingFace Team and hlky. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import randn_tensor\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999) -> torch.Tensor:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass KDPM2AncestralDiscreteScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Scheduler created by @crowsonkb in [k_diffusion](https://github.com/crowsonkb/k-diffusion), see:\n    https://github.com/crowsonkb/k-diffusion/blob/5b3af030dd83e0297272d861c19477735d0317ec/k_diffusion/sampling.py#L188\n\n    Scheduler inspired by DPM-Solver-2 and Algorthim 2 from Karras et al. (2022).\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model. beta_start (`float`): the\n        starting `beta` value of inference. beta_end (`float`): the final `beta` value. beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n            options to clip the variance used when adding noise to the denoised sample. Choose from `fixed_small`,\n            `fixed_small_log`, `fixed_large`, `fixed_large_log`, `learned` or `learned_range`.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 2\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.00085,  # sensible defaults\n        beta_end: float = 0.012,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        prediction_type: str = \"epsilon\",\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        #  set all values\n        self.set_timesteps(num_train_timesteps, None, num_train_timesteps)\n\n    # Copied from diffusers.schedulers.scheduling_heun_discrete.HeunDiscreteScheduler.index_for_timestep\n    def index_for_timestep(self, timestep, schedule_timesteps=None):\n        if schedule_timesteps is None:\n            schedule_timesteps = self.timesteps\n\n        indices = (schedule_timesteps == timestep).nonzero()\n\n        if self.state_in_first_order:\n            pos = -1\n        else:\n            pos = 0\n        return indices[pos].item()\n\n    def scale_model_input(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[float, torch.FloatTensor],\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Args:\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n            sample (`torch.FloatTensor`): input sample timestep (`int`, optional): current timestep\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        if self.state_in_first_order:\n            sigma = self.sigmas[step_index]\n        else:\n            sigma = self.sigmas_interpol[step_index - 1]\n\n        sample = sample / ((sigma**2 + 1) ** 0.5)\n        return sample\n\n    def set_timesteps(\n        self,\n        num_inference_steps: int,\n        device: Union[str, torch.device] = None,\n        num_train_timesteps: Optional[int] = None,\n    ):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n\n        num_train_timesteps = num_train_timesteps or self.config.num_train_timesteps\n\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        self.log_sigmas = torch.from_numpy(np.log(sigmas)).to(device)\n\n        sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)\n        sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)\n        sigmas = torch.from_numpy(sigmas).to(device=device)\n\n        # compute up and down sigmas\n        sigmas_next = sigmas.roll(-1)\n        sigmas_next[-1] = 0.0\n        sigmas_up = (sigmas_next**2 * (sigmas**2 - sigmas_next**2) / sigmas**2) ** 0.5\n        sigmas_down = (sigmas_next**2 - sigmas_up**2) ** 0.5\n        sigmas_down[-1] = 0.0\n\n        # compute interpolated sigmas\n        sigmas_interpol = sigmas.log().lerp(sigmas_down.log(), 0.5).exp()\n        sigmas_interpol[-2:] = 0.0\n\n        # set sigmas\n        self.sigmas = torch.cat([sigmas[:1], sigmas[1:].repeat_interleave(2), sigmas[-1:]])\n        self.sigmas_interpol = torch.cat(\n            [sigmas_interpol[:1], sigmas_interpol[1:].repeat_interleave(2), sigmas_interpol[-1:]]\n        )\n        self.sigmas_up = torch.cat([sigmas_up[:1], sigmas_up[1:].repeat_interleave(2), sigmas_up[-1:]])\n        self.sigmas_down = torch.cat([sigmas_down[:1], sigmas_down[1:].repeat_interleave(2), sigmas_down[-1:]])\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = self.sigmas.max()\n\n        if str(device).startswith(\"mps\"):\n            # mps does not support float64\n            timesteps = torch.from_numpy(timesteps).to(device, dtype=torch.float32)\n        else:\n            timesteps = torch.from_numpy(timesteps).to(device)\n\n        timesteps_interpol = self.sigma_to_t(sigmas_interpol).to(device, dtype=timesteps.dtype)\n        interleaved_timesteps = torch.stack((timesteps_interpol[:-2, None], timesteps[1:, None]), dim=-1).flatten()\n\n        self.timesteps = torch.cat([timesteps[:1], interleaved_timesteps])\n\n        self.sample = None\n\n    def sigma_to_t(self, sigma):\n        # get log sigma\n        log_sigma = sigma.log()\n\n        # get distribution\n        dists = log_sigma - self.log_sigmas[:, None]\n\n        # get sigmas range\n        low_idx = dists.ge(0).cumsum(dim=0).argmax(dim=0).clamp(max=self.log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = self.log_sigmas[low_idx]\n        high = self.log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = w.clamp(0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.view(sigma.shape)\n        return t\n\n    @property\n    def state_in_first_order(self):\n        return self.sample is None\n\n    def step(\n        self,\n        model_output: Union[torch.FloatTensor, np.ndarray],\n        timestep: Union[float, torch.FloatTensor],\n        sample: Union[torch.FloatTensor, np.ndarray],\n        generator: Optional[torch.Generator] = None,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Args:\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n            model_output (`torch.FloatTensor` or `np.ndarray`): direct output from learned diffusion model. timestep\n            (`int`): current discrete timestep in the diffusion chain. sample (`torch.FloatTensor` or `np.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n        Returns:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        if self.state_in_first_order:\n            sigma = self.sigmas[step_index]\n            sigma_interpol = self.sigmas_interpol[step_index]\n            sigma_up = self.sigmas_up[step_index]\n            sigma_down = self.sigmas_down[step_index - 1]\n        else:\n            # 2nd order / KPDM2's method\n            sigma = self.sigmas[step_index - 1]\n            sigma_interpol = self.sigmas_interpol[step_index - 1]\n            sigma_up = self.sigmas_up[step_index - 1]\n            sigma_down = self.sigmas_down[step_index - 1]\n\n        # currently only gamma=0 is supported. This usually works best anyways.\n        # We can support gamma in the future but then need to scale the timestep before\n        # passing it to the model which requires a change in API\n        gamma = 0\n        sigma_hat = sigma * (gamma + 1)  # Note: sigma_hat == sigma for now\n\n        device = model_output.device\n        noise = randn_tensor(model_output.shape, dtype=model_output.dtype, device=device, generator=generator)\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        if self.config.prediction_type == \"epsilon\":\n            sigma_input = sigma_hat if self.state_in_first_order else sigma_interpol\n            pred_original_sample = sample - sigma_input * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            sigma_input = sigma_hat if self.state_in_first_order else sigma_interpol\n            pred_original_sample = model_output * (-sigma_input / (sigma_input**2 + 1) ** 0.5) + (\n                sample / (sigma_input**2 + 1)\n            )\n        elif self.config.prediction_type == \"sample\":\n            raise NotImplementedError(\"prediction_type not implemented yet: sample\")\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        if self.state_in_first_order:\n            # 2. Convert to an ODE derivative for 1st order\n            derivative = (sample - pred_original_sample) / sigma_hat\n            # 3. delta timestep\n            dt = sigma_interpol - sigma_hat\n\n            # store for 2nd order step\n            self.sample = sample\n            self.dt = dt\n            prev_sample = sample + derivative * dt\n        else:\n            # DPM-Solver-2\n            # 2. Convert to an ODE derivative for 2nd order\n            derivative = (sample - pred_original_sample) / sigma_interpol\n            # 3. delta timestep\n            dt = sigma_down - sigma_hat\n\n            sample = self.sample\n            self.sample = None\n\n            prev_sample = sample + derivative * dt\n            prev_sample = prev_sample + noise * sigma_up\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    # Copied from diffusers.schedulers.scheduling_heun_discrete.HeunDiscreteScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        sigmas = self.sigmas.to(device=original_samples.device, dtype=original_samples.dtype)\n        if original_samples.device.type == \"mps\" and torch.is_floating_point(timesteps):\n            # mps does not support float64\n            schedule_timesteps = self.timesteps.to(original_samples.device, dtype=torch.float32)\n            timesteps = timesteps.to(original_samples.device, dtype=torch.float32)\n        else:\n            schedule_timesteps = self.timesteps.to(original_samples.device)\n            timesteps = timesteps.to(original_samples.device)\n\n        step_indices = [self.index_for_timestep(t, schedule_timesteps) for t in timesteps]\n\n        sigma = sigmas[step_indices].flatten()\n        while len(sigma.shape) < len(original_samples.shape):\n            sigma = sigma.unsqueeze(-1)\n\n        noisy_samples = original_samples + noise * sigma\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_k_dpm_2_discrete.py",
    "content": "# Copyright 2023 Katherine Crowson, The HuggingFace Team and hlky. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999) -> torch.Tensor:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass KDPM2DiscreteScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Scheduler created by @crowsonkb in [k_diffusion](https://github.com/crowsonkb/k-diffusion), see:\n    https://github.com/crowsonkb/k-diffusion/blob/5b3af030dd83e0297272d861c19477735d0317ec/k_diffusion/sampling.py#L188\n\n    Scheduler inspired by DPM-Solver-2 and Algorthim 2 from Karras et al. (2022).\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model. beta_start (`float`): the\n        starting `beta` value of inference. beta_end (`float`): the final `beta` value. beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n            options to clip the variance used when adding noise to the denoised sample. Choose from `fixed_small`,\n            `fixed_small_log`, `fixed_large`, `fixed_large_log`, `learned` or `learned_range`.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 2\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.00085,  # sensible defaults\n        beta_end: float = 0.012,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        prediction_type: str = \"epsilon\",\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        #  set all values\n        self.set_timesteps(num_train_timesteps, None, num_train_timesteps)\n\n    # Copied from diffusers.schedulers.scheduling_heun_discrete.HeunDiscreteScheduler.index_for_timestep\n    def index_for_timestep(self, timestep, schedule_timesteps=None):\n        if schedule_timesteps is None:\n            schedule_timesteps = self.timesteps\n\n        indices = (schedule_timesteps == timestep).nonzero()\n\n        if self.state_in_first_order:\n            pos = -1\n        else:\n            pos = 0\n        return indices[pos].item()\n\n    def scale_model_input(\n        self,\n        sample: torch.FloatTensor,\n        timestep: Union[float, torch.FloatTensor],\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Args:\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n            sample (`torch.FloatTensor`): input sample timestep (`int`, optional): current timestep\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        if self.state_in_first_order:\n            sigma = self.sigmas[step_index]\n        else:\n            sigma = self.sigmas_interpol[step_index]\n\n        sample = sample / ((sigma**2 + 1) ** 0.5)\n        return sample\n\n    def set_timesteps(\n        self,\n        num_inference_steps: int,\n        device: Union[str, torch.device] = None,\n        num_train_timesteps: Optional[int] = None,\n    ):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n\n        num_train_timesteps = num_train_timesteps or self.config.num_train_timesteps\n\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        self.log_sigmas = torch.from_numpy(np.log(sigmas)).to(device)\n\n        sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)\n        sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)\n        sigmas = torch.from_numpy(sigmas).to(device=device)\n\n        # interpolate sigmas\n        sigmas_interpol = sigmas.log().lerp(sigmas.roll(1).log(), 0.5).exp()\n\n        self.sigmas = torch.cat([sigmas[:1], sigmas[1:].repeat_interleave(2), sigmas[-1:]])\n        self.sigmas_interpol = torch.cat(\n            [sigmas_interpol[:1], sigmas_interpol[1:].repeat_interleave(2), sigmas_interpol[-1:]]\n        )\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = self.sigmas.max()\n\n        if str(device).startswith(\"mps\"):\n            # mps does not support float64\n            timesteps = torch.from_numpy(timesteps).to(device, dtype=torch.float32)\n        else:\n            timesteps = torch.from_numpy(timesteps).to(device)\n\n        # interpolate timesteps\n        timesteps_interpol = self.sigma_to_t(sigmas_interpol).to(device, dtype=timesteps.dtype)\n        interleaved_timesteps = torch.stack((timesteps_interpol[1:-1, None], timesteps[1:, None]), dim=-1).flatten()\n\n        self.timesteps = torch.cat([timesteps[:1], interleaved_timesteps])\n\n        self.sample = None\n\n    def sigma_to_t(self, sigma):\n        # get log sigma\n        log_sigma = sigma.log()\n\n        # get distribution\n        dists = log_sigma - self.log_sigmas[:, None]\n\n        # get sigmas range\n        low_idx = dists.ge(0).cumsum(dim=0).argmax(dim=0).clamp(max=self.log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = self.log_sigmas[low_idx]\n        high = self.log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = w.clamp(0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.view(sigma.shape)\n        return t\n\n    @property\n    def state_in_first_order(self):\n        return self.sample is None\n\n    def step(\n        self,\n        model_output: Union[torch.FloatTensor, np.ndarray],\n        timestep: Union[float, torch.FloatTensor],\n        sample: Union[torch.FloatTensor, np.ndarray],\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Args:\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n            model_output (`torch.FloatTensor` or `np.ndarray`): direct output from learned diffusion model. timestep\n            (`int`): current discrete timestep in the diffusion chain. sample (`torch.FloatTensor` or `np.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n        Returns:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        step_index = self.index_for_timestep(timestep)\n\n        if self.state_in_first_order:\n            sigma = self.sigmas[step_index]\n            sigma_interpol = self.sigmas_interpol[step_index + 1]\n            sigma_next = self.sigmas[step_index + 1]\n        else:\n            # 2nd order / KDPM2's method\n            sigma = self.sigmas[step_index - 1]\n            sigma_interpol = self.sigmas_interpol[step_index]\n            sigma_next = self.sigmas[step_index]\n\n        # currently only gamma=0 is supported. This usually works best anyways.\n        # We can support gamma in the future but then need to scale the timestep before\n        # passing it to the model which requires a change in API\n        gamma = 0\n        sigma_hat = sigma * (gamma + 1)  # Note: sigma_hat == sigma for now\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        if self.config.prediction_type == \"epsilon\":\n            sigma_input = sigma_hat if self.state_in_first_order else sigma_interpol\n            pred_original_sample = sample - sigma_input * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            sigma_input = sigma_hat if self.state_in_first_order else sigma_interpol\n            pred_original_sample = model_output * (-sigma_input / (sigma_input**2 + 1) ** 0.5) + (\n                sample / (sigma_input**2 + 1)\n            )\n        elif self.config.prediction_type == \"sample\":\n            raise NotImplementedError(\"prediction_type not implemented yet: sample\")\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        if self.state_in_first_order:\n            # 2. Convert to an ODE derivative for 1st order\n            derivative = (sample - pred_original_sample) / sigma_hat\n            # 3. delta timestep\n            dt = sigma_interpol - sigma_hat\n\n            # store for 2nd order step\n            self.sample = sample\n        else:\n            # DPM-Solver-2\n            # 2. Convert to an ODE derivative for 2nd order\n            derivative = (sample - pred_original_sample) / sigma_interpol\n\n            # 3. delta timestep\n            dt = sigma_next - sigma_hat\n\n            sample = self.sample\n            self.sample = None\n\n        prev_sample = sample + derivative * dt\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    # Copied from diffusers.schedulers.scheduling_heun_discrete.HeunDiscreteScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        sigmas = self.sigmas.to(device=original_samples.device, dtype=original_samples.dtype)\n        if original_samples.device.type == \"mps\" and torch.is_floating_point(timesteps):\n            # mps does not support float64\n            schedule_timesteps = self.timesteps.to(original_samples.device, dtype=torch.float32)\n            timesteps = timesteps.to(original_samples.device, dtype=torch.float32)\n        else:\n            schedule_timesteps = self.timesteps.to(original_samples.device)\n            timesteps = timesteps.to(original_samples.device)\n\n        step_indices = [self.index_for_timestep(t, schedule_timesteps) for t in timesteps]\n\n        sigma = sigmas[step_indices].flatten()\n        while len(sigma.shape) < len(original_samples.shape):\n            sigma = sigma.unsqueeze(-1)\n\n        noisy_samples = original_samples + noise * sigma\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_karras_ve.py",
    "content": "# Copyright 2023 NVIDIA and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, randn_tensor\nfrom .scheduling_utils import SchedulerMixin\n\n\n@dataclass\nclass KarrasVeOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        derivative (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Derivative of predicted original image sample (x_0).\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    derivative: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\nclass KarrasVeScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Stochastic sampling from Karras et al. [1] tailored to the Variance-Expanding (VE) models [2]. Use Algorithm 2 and\n    the VE column of Table 1 from [1] for reference.\n\n    [1] Karras, Tero, et al. \"Elucidating the Design Space of Diffusion-Based Generative Models.\"\n    https://arxiv.org/abs/2206.00364 [2] Song, Yang, et al. \"Score-based generative modeling through stochastic\n    differential equations.\" https://arxiv.org/abs/2011.13456\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details on the parameters, see the original paper's Appendix E.: \"Elucidating the Design Space of\n    Diffusion-Based Generative Models.\" https://arxiv.org/abs/2206.00364. The grid search values used to find the\n    optimal {s_noise, s_churn, s_min, s_max} for a specific model are described in Table 5 of the paper.\n\n    Args:\n        sigma_min (`float`): minimum noise magnitude\n        sigma_max (`float`): maximum noise magnitude\n        s_noise (`float`): the amount of additional noise to counteract loss of detail during sampling.\n            A reasonable range is [1.000, 1.011].\n        s_churn (`float`): the parameter controlling the overall amount of stochasticity.\n            A reasonable range is [0, 100].\n        s_min (`float`): the start value of the sigma range where we add noise (enable stochasticity).\n            A reasonable range is [0, 10].\n        s_max (`float`): the end value of the sigma range where we add noise.\n            A reasonable range is [0.2, 80].\n\n    \"\"\"\n\n    order = 2\n\n    @register_to_config\n    def __init__(\n        self,\n        sigma_min: float = 0.02,\n        sigma_max: float = 100,\n        s_noise: float = 1.007,\n        s_churn: float = 80,\n        s_min: float = 0.05,\n        s_max: float = 50,\n    ):\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = sigma_max\n\n        # setable values\n        self.num_inference_steps: int = None\n        self.timesteps: np.IntTensor = None\n        self.schedule: torch.FloatTensor = None  # sigma(t_i)\n\n    def scale_model_input(self, sample: torch.FloatTensor, timestep: Optional[int] = None) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the continuous timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n        timesteps = np.arange(0, self.num_inference_steps)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n        schedule = [\n            (\n                self.config.sigma_max**2\n                * (self.config.sigma_min**2 / self.config.sigma_max**2) ** (i / (num_inference_steps - 1))\n            )\n            for i in self.timesteps\n        ]\n        self.schedule = torch.tensor(schedule, dtype=torch.float32, device=device)\n\n    def add_noise_to_input(\n        self, sample: torch.FloatTensor, sigma: float, generator: Optional[torch.Generator] = None\n    ) -> Tuple[torch.FloatTensor, float]:\n        \"\"\"\n        Explicit Langevin-like \"churn\" step of adding noise to the sample according to a factor gamma_i ≥ 0 to reach a\n        higher noise level sigma_hat = sigma_i + gamma_i*sigma_i.\n\n        TODO Args:\n        \"\"\"\n        if self.config.s_min <= sigma <= self.config.s_max:\n            gamma = min(self.config.s_churn / self.num_inference_steps, 2**0.5 - 1)\n        else:\n            gamma = 0\n\n        # sample eps ~ N(0, S_noise^2 * I)\n        eps = self.config.s_noise * randn_tensor(sample.shape, generator=generator).to(sample.device)\n        sigma_hat = sigma + gamma * sigma\n        sample_hat = sample + ((sigma_hat**2 - sigma**2) ** 0.5 * eps)\n\n        return sample_hat, sigma_hat\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        sigma_hat: float,\n        sigma_prev: float,\n        sample_hat: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[KarrasVeOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            sigma_hat (`float`): TODO\n            sigma_prev (`float`): TODO\n            sample_hat (`torch.FloatTensor`): TODO\n            return_dict (`bool`): option for returning tuple rather than KarrasVeOutput class\n\n            KarrasVeOutput: updated sample in the diffusion chain and derivative (TODO double check).\n        Returns:\n            [`~schedulers.scheduling_karras_ve.KarrasVeOutput`] or `tuple`:\n            [`~schedulers.scheduling_karras_ve.KarrasVeOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n\n        pred_original_sample = sample_hat + sigma_hat * model_output\n        derivative = (sample_hat - pred_original_sample) / sigma_hat\n        sample_prev = sample_hat + (sigma_prev - sigma_hat) * derivative\n\n        if not return_dict:\n            return (sample_prev, derivative)\n\n        return KarrasVeOutput(\n            prev_sample=sample_prev, derivative=derivative, pred_original_sample=pred_original_sample\n        )\n\n    def step_correct(\n        self,\n        model_output: torch.FloatTensor,\n        sigma_hat: float,\n        sigma_prev: float,\n        sample_hat: torch.FloatTensor,\n        sample_prev: torch.FloatTensor,\n        derivative: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[KarrasVeOutput, Tuple]:\n        \"\"\"\n        Correct the predicted sample based on the output model_output of the network. TODO complete description\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            sigma_hat (`float`): TODO\n            sigma_prev (`float`): TODO\n            sample_hat (`torch.FloatTensor`): TODO\n            sample_prev (`torch.FloatTensor`): TODO\n            derivative (`torch.FloatTensor`): TODO\n            return_dict (`bool`): option for returning tuple rather than KarrasVeOutput class\n\n        Returns:\n            prev_sample (TODO): updated sample in the diffusion chain. derivative (TODO): TODO\n\n        \"\"\"\n        pred_original_sample = sample_prev + sigma_prev * model_output\n        derivative_corr = (sample_prev - pred_original_sample) / sigma_prev\n        sample_prev = sample_hat + (sigma_prev - sigma_hat) * (0.5 * derivative + 0.5 * derivative_corr)\n\n        if not return_dict:\n            return (sample_prev, derivative)\n\n        return KarrasVeOutput(\n            prev_sample=sample_prev, derivative=derivative, pred_original_sample=pred_original_sample\n        )\n\n    def add_noise(self, original_samples, noise, timesteps):\n        raise NotImplementedError()\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_karras_ve_flax.py",
    "content": "# Copyright 2023 NVIDIA and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport flax\nimport jax.numpy as jnp\nfrom jax import random\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .scheduling_utils_flax import FlaxSchedulerMixin\n\n\n@flax.struct.dataclass\nclass KarrasVeSchedulerState:\n    # setable values\n    num_inference_steps: Optional[int] = None\n    timesteps: Optional[jnp.ndarray] = None\n    schedule: Optional[jnp.ndarray] = None  # sigma(t_i)\n\n    @classmethod\n    def create(cls):\n        return cls()\n\n\n@dataclass\nclass FlaxKarrasVeOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`jnp.ndarray` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        derivative (`jnp.ndarray` of shape `(batch_size, num_channels, height, width)` for images):\n            Derivative of predicted original image sample (x_0).\n        state (`KarrasVeSchedulerState`): the `FlaxKarrasVeScheduler` state data class.\n    \"\"\"\n\n    prev_sample: jnp.ndarray\n    derivative: jnp.ndarray\n    state: KarrasVeSchedulerState\n\n\nclass FlaxKarrasVeScheduler(FlaxSchedulerMixin, ConfigMixin):\n    \"\"\"\n    Stochastic sampling from Karras et al. [1] tailored to the Variance-Expanding (VE) models [2]. Use Algorithm 2 and\n    the VE column of Table 1 from [1] for reference.\n\n    [1] Karras, Tero, et al. \"Elucidating the Design Space of Diffusion-Based Generative Models.\"\n    https://arxiv.org/abs/2206.00364 [2] Song, Yang, et al. \"Score-based generative modeling through stochastic\n    differential equations.\" https://arxiv.org/abs/2011.13456\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details on the parameters, see the original paper's Appendix E.: \"Elucidating the Design Space of\n    Diffusion-Based Generative Models.\" https://arxiv.org/abs/2206.00364. The grid search values used to find the\n    optimal {s_noise, s_churn, s_min, s_max} for a specific model are described in Table 5 of the paper.\n\n    Args:\n        sigma_min (`float`): minimum noise magnitude\n        sigma_max (`float`): maximum noise magnitude\n        s_noise (`float`): the amount of additional noise to counteract loss of detail during sampling.\n            A reasonable range is [1.000, 1.011].\n        s_churn (`float`): the parameter controlling the overall amount of stochasticity.\n            A reasonable range is [0, 100].\n        s_min (`float`): the start value of the sigma range where we add noise (enable stochasticity).\n            A reasonable range is [0, 10].\n        s_max (`float`): the end value of the sigma range where we add noise.\n            A reasonable range is [0.2, 80].\n    \"\"\"\n\n    @property\n    def has_state(self):\n        return True\n\n    @register_to_config\n    def __init__(\n        self,\n        sigma_min: float = 0.02,\n        sigma_max: float = 100,\n        s_noise: float = 1.007,\n        s_churn: float = 80,\n        s_min: float = 0.05,\n        s_max: float = 50,\n    ):\n        pass\n\n    def create_state(self):\n        return KarrasVeSchedulerState.create()\n\n    def set_timesteps(\n        self, state: KarrasVeSchedulerState, num_inference_steps: int, shape: Tuple = ()\n    ) -> KarrasVeSchedulerState:\n        \"\"\"\n        Sets the continuous timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            state (`KarrasVeSchedulerState`):\n                the `FlaxKarrasVeScheduler` state data class.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n\n        \"\"\"\n        timesteps = jnp.arange(0, num_inference_steps)[::-1].copy()\n        schedule = [\n            (\n                self.config.sigma_max**2\n                * (self.config.sigma_min**2 / self.config.sigma_max**2) ** (i / (num_inference_steps - 1))\n            )\n            for i in timesteps\n        ]\n\n        return state.replace(\n            num_inference_steps=num_inference_steps,\n            schedule=jnp.array(schedule, dtype=jnp.float32),\n            timesteps=timesteps,\n        )\n\n    def add_noise_to_input(\n        self,\n        state: KarrasVeSchedulerState,\n        sample: jnp.ndarray,\n        sigma: float,\n        key: random.KeyArray,\n    ) -> Tuple[jnp.ndarray, float]:\n        \"\"\"\n        Explicit Langevin-like \"churn\" step of adding noise to the sample according to a factor gamma_i ≥ 0 to reach a\n        higher noise level sigma_hat = sigma_i + gamma_i*sigma_i.\n\n        TODO Args:\n        \"\"\"\n        if self.config.s_min <= sigma <= self.config.s_max:\n            gamma = min(self.config.s_churn / state.num_inference_steps, 2**0.5 - 1)\n        else:\n            gamma = 0\n\n        # sample eps ~ N(0, S_noise^2 * I)\n        key = random.split(key, num=1)\n        eps = self.config.s_noise * random.normal(key=key, shape=sample.shape)\n        sigma_hat = sigma + gamma * sigma\n        sample_hat = sample + ((sigma_hat**2 - sigma**2) ** 0.5 * eps)\n\n        return sample_hat, sigma_hat\n\n    def step(\n        self,\n        state: KarrasVeSchedulerState,\n        model_output: jnp.ndarray,\n        sigma_hat: float,\n        sigma_prev: float,\n        sample_hat: jnp.ndarray,\n        return_dict: bool = True,\n    ) -> Union[FlaxKarrasVeOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            state (`KarrasVeSchedulerState`): the `FlaxKarrasVeScheduler` state data class.\n            model_output (`torch.FloatTensor` or `np.ndarray`): direct output from learned diffusion model.\n            sigma_hat (`float`): TODO\n            sigma_prev (`float`): TODO\n            sample_hat (`torch.FloatTensor` or `np.ndarray`): TODO\n            return_dict (`bool`): option for returning tuple rather than FlaxKarrasVeOutput class\n\n        Returns:\n            [`~schedulers.scheduling_karras_ve_flax.FlaxKarrasVeOutput`] or `tuple`: Updated sample in the diffusion\n            chain and derivative. [`~schedulers.scheduling_karras_ve_flax.FlaxKarrasVeOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n        \"\"\"\n\n        pred_original_sample = sample_hat + sigma_hat * model_output\n        derivative = (sample_hat - pred_original_sample) / sigma_hat\n        sample_prev = sample_hat + (sigma_prev - sigma_hat) * derivative\n\n        if not return_dict:\n            return (sample_prev, derivative, state)\n\n        return FlaxKarrasVeOutput(prev_sample=sample_prev, derivative=derivative, state=state)\n\n    def step_correct(\n        self,\n        state: KarrasVeSchedulerState,\n        model_output: jnp.ndarray,\n        sigma_hat: float,\n        sigma_prev: float,\n        sample_hat: jnp.ndarray,\n        sample_prev: jnp.ndarray,\n        derivative: jnp.ndarray,\n        return_dict: bool = True,\n    ) -> Union[FlaxKarrasVeOutput, Tuple]:\n        \"\"\"\n        Correct the predicted sample based on the output model_output of the network. TODO complete description\n\n        Args:\n            state (`KarrasVeSchedulerState`): the `FlaxKarrasVeScheduler` state data class.\n            model_output (`torch.FloatTensor` or `np.ndarray`): direct output from learned diffusion model.\n            sigma_hat (`float`): TODO\n            sigma_prev (`float`): TODO\n            sample_hat (`torch.FloatTensor` or `np.ndarray`): TODO\n            sample_prev (`torch.FloatTensor` or `np.ndarray`): TODO\n            derivative (`torch.FloatTensor` or `np.ndarray`): TODO\n            return_dict (`bool`): option for returning tuple rather than FlaxKarrasVeOutput class\n\n        Returns:\n            prev_sample (TODO): updated sample in the diffusion chain. derivative (TODO): TODO\n\n        \"\"\"\n        pred_original_sample = sample_prev + sigma_prev * model_output\n        derivative_corr = (sample_prev - pred_original_sample) / sigma_prev\n        sample_prev = sample_hat + (sigma_prev - sigma_hat) * (0.5 * derivative + 0.5 * derivative_corr)\n\n        if not return_dict:\n            return (sample_prev, derivative, state)\n\n        return FlaxKarrasVeOutput(prev_sample=sample_prev, derivative=derivative, state=state)\n\n    def add_noise(self, state: KarrasVeSchedulerState, original_samples, noise, timesteps):\n        raise NotImplementedError()\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_lms_discrete.py",
    "content": "# Copyright 2023 Katherine Crowson and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\nimport warnings\nfrom dataclasses import dataclass\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\nfrom scipy import integrate\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin\n\n\n@dataclass\n# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->LMSDiscrete\nclass LMSDiscreteSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Linear Multistep Scheduler for discrete beta schedules. Based on the original k-diffusion implementation by\n    Katherine Crowson:\n    https://github.com/crowsonkb/k-diffusion/blob/481677d114f6ea445aa009cf5bd7a9cdee909e47/k_diffusion/sampling.py#L181\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        use_karras_sigmas (`bool`, *optional*, defaults to `False`):\n            This parameter controls whether to use Karras sigmas (Karras et al. (2022) scheme) for step sizes in the\n            noise schedule during the sampling process. If True, the sigmas will be determined according to a sequence\n            of noise levels {σi} as defined in Equation (5) of the paper https://arxiv.org/pdf/2206.00364.pdf.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        use_karras_sigmas: Optional[bool] = False,\n        prediction_type: str = \"epsilon\",\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        sigmas = np.concatenate([sigmas[::-1], [0.0]]).astype(np.float32)\n        self.sigmas = torch.from_numpy(sigmas)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = self.sigmas.max()\n\n        # setable values\n        self.num_inference_steps = None\n        self.use_karras_sigmas = use_karras_sigmas\n        self.set_timesteps(num_train_timesteps, None)\n        self.derivatives = []\n        self.is_scale_input_called = False\n\n    def scale_model_input(\n        self, sample: torch.FloatTensor, timestep: Union[float, torch.FloatTensor]\n    ) -> torch.FloatTensor:\n        \"\"\"\n        Scales the denoising model input by `(sigma**2 + 1) ** 0.5` to match the K-LMS algorithm.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`float` or `torch.FloatTensor`): the current timestep in the diffusion chain\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero().item()\n        sigma = self.sigmas[step_index]\n        sample = sample / ((sigma**2 + 1) ** 0.5)\n        self.is_scale_input_called = True\n        return sample\n\n    def get_lms_coefficient(self, order, t, current_order):\n        \"\"\"\n        Compute a linear multistep coefficient.\n\n        Args:\n            order (TODO):\n            t (TODO):\n            current_order (TODO):\n        \"\"\"\n\n        def lms_derivative(tau):\n            prod = 1.0\n            for k in range(order):\n                if current_order == k:\n                    continue\n                prod *= (tau - self.sigmas[t - k]) / (self.sigmas[t - current_order] - self.sigmas[t - k])\n            return prod\n\n        integrated_coeff = integrate.quad(lms_derivative, self.sigmas[t], self.sigmas[t + 1], epsrel=1e-4)[0]\n\n        return integrated_coeff\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n\n        timesteps = np.linspace(0, self.config.num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()\n\n        sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)\n        log_sigmas = np.log(sigmas)\n        sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)\n\n        if self.use_karras_sigmas:\n            sigmas = self._convert_to_karras(in_sigmas=sigmas)\n            timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas])\n\n        sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)\n\n        self.sigmas = torch.from_numpy(sigmas).to(device=device)\n        if str(device).startswith(\"mps\"):\n            # mps does not support float64\n            self.timesteps = torch.from_numpy(timesteps).to(device, dtype=torch.float32)\n        else:\n            self.timesteps = torch.from_numpy(timesteps).to(device=device)\n\n        self.derivatives = []\n\n    # copied from diffusers.schedulers.scheduling_euler_discrete._sigma_to_t\n    def _sigma_to_t(self, sigma, log_sigmas):\n        # get log sigma\n        log_sigma = np.log(sigma)\n\n        # get distribution\n        dists = log_sigma - log_sigmas[:, np.newaxis]\n\n        # get sigmas range\n        low_idx = np.cumsum((dists >= 0), axis=0).argmax(axis=0).clip(max=log_sigmas.shape[0] - 2)\n        high_idx = low_idx + 1\n\n        low = log_sigmas[low_idx]\n        high = log_sigmas[high_idx]\n\n        # interpolate sigmas\n        w = (low - log_sigma) / (low - high)\n        w = np.clip(w, 0, 1)\n\n        # transform interpolation to time range\n        t = (1 - w) * low_idx + w * high_idx\n        t = t.reshape(sigma.shape)\n        return t\n\n    # copied from diffusers.schedulers.scheduling_euler_discrete._convert_to_karras\n    def _convert_to_karras(self, in_sigmas: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"Constructs the noise schedule of Karras et al. (2022).\"\"\"\n\n        sigma_min: float = in_sigmas[-1].item()\n        sigma_max: float = in_sigmas[0].item()\n\n        rho = 7.0  # 7.0 is the value used in the paper\n        ramp = np.linspace(0, 1, self.num_inference_steps)\n        min_inv_rho = sigma_min ** (1 / rho)\n        max_inv_rho = sigma_max ** (1 / rho)\n        sigmas = (max_inv_rho + ramp * (min_inv_rho - max_inv_rho)) ** rho\n        return sigmas\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: Union[float, torch.FloatTensor],\n        sample: torch.FloatTensor,\n        order: int = 4,\n        return_dict: bool = True,\n    ) -> Union[LMSDiscreteSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`float`): current timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            order: coefficient for multi-step inference.\n            return_dict (`bool`): option for returning tuple rather than LMSDiscreteSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.LMSDiscreteSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.LMSDiscreteSchedulerOutput`] if `return_dict` is True, otherwise a `tuple`.\n            When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if not self.is_scale_input_called:\n            warnings.warn(\n                \"The `scale_model_input` function should be called before `step` to ensure correct denoising. \"\n                \"See `StableDiffusionPipeline` for a usage example.\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero().item()\n        sigma = self.sigmas[step_index]\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = sample - sigma * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            # * c_out + input * c_skip\n            pred_original_sample = model_output * (-sigma / (sigma**2 + 1) ** 0.5) + (sample / (sigma**2 + 1))\n        elif self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        # 2. Convert to an ODE derivative\n        derivative = (sample - pred_original_sample) / sigma\n        self.derivatives.append(derivative)\n        if len(self.derivatives) > order:\n            self.derivatives.pop(0)\n\n        # 3. Compute linear multistep coefficients\n        order = min(step_index + 1, order)\n        lms_coeffs = [self.get_lms_coefficient(order, step_index, curr_order) for curr_order in range(order)]\n\n        # 4. Compute previous sample based on the derivatives path\n        prev_sample = sample + sum(\n            coeff * derivative for coeff, derivative in zip(lms_coeffs, reversed(self.derivatives))\n        )\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return LMSDiscreteSchedulerOutput(prev_sample=prev_sample, pred_original_sample=pred_original_sample)\n\n    # Copied from diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        sigmas = self.sigmas.to(device=original_samples.device, dtype=original_samples.dtype)\n        if original_samples.device.type == \"mps\" and torch.is_floating_point(timesteps):\n            # mps does not support float64\n            schedule_timesteps = self.timesteps.to(original_samples.device, dtype=torch.float32)\n            timesteps = timesteps.to(original_samples.device, dtype=torch.float32)\n        else:\n            schedule_timesteps = self.timesteps.to(original_samples.device)\n            timesteps = timesteps.to(original_samples.device)\n\n        step_indices = [(schedule_timesteps == t).nonzero().item() for t in timesteps]\n\n        sigma = sigmas[step_indices].flatten()\n        while len(sigma.shape) < len(original_samples.shape):\n            sigma = sigma.unsqueeze(-1)\n\n        noisy_samples = original_samples + noise * sigma\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_lms_discrete_flax.py",
    "content": "# Copyright 2023 Katherine Crowson and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport flax\nimport jax.numpy as jnp\nfrom scipy import integrate\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils_flax import (\n    CommonSchedulerState,\n    FlaxKarrasDiffusionSchedulers,\n    FlaxSchedulerMixin,\n    FlaxSchedulerOutput,\n    broadcast_to_shape_from_left,\n)\n\n\n@flax.struct.dataclass\nclass LMSDiscreteSchedulerState:\n    common: CommonSchedulerState\n\n    # setable values\n    init_noise_sigma: jnp.ndarray\n    timesteps: jnp.ndarray\n    sigmas: jnp.ndarray\n    num_inference_steps: Optional[int] = None\n\n    # running values\n    derivatives: Optional[jnp.ndarray] = None\n\n    @classmethod\n    def create(\n        cls, common: CommonSchedulerState, init_noise_sigma: jnp.ndarray, timesteps: jnp.ndarray, sigmas: jnp.ndarray\n    ):\n        return cls(common=common, init_noise_sigma=init_noise_sigma, timesteps=timesteps, sigmas=sigmas)\n\n\n@dataclass\nclass FlaxLMSSchedulerOutput(FlaxSchedulerOutput):\n    state: LMSDiscreteSchedulerState\n\n\nclass FlaxLMSDiscreteScheduler(FlaxSchedulerMixin, ConfigMixin):\n    \"\"\"\n    Linear Multistep Scheduler for discrete beta schedules. Based on the original k-diffusion implementation by\n    Katherine Crowson:\n    https://github.com/crowsonkb/k-diffusion/blob/481677d114f6ea445aa009cf5bd7a9cdee909e47/k_diffusion/sampling.py#L181\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear` or `scaled_linear`.\n        trained_betas (`jnp.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        dtype (`jnp.dtype`, *optional*, defaults to `jnp.float32`):\n            the `dtype` used for params and computation.\n    \"\"\"\n\n    _compatibles = [e.name for e in FlaxKarrasDiffusionSchedulers]\n\n    dtype: jnp.dtype\n\n    @property\n    def has_state(self):\n        return True\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[jnp.ndarray] = None,\n        prediction_type: str = \"epsilon\",\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        self.dtype = dtype\n\n    def create_state(self, common: Optional[CommonSchedulerState] = None) -> LMSDiscreteSchedulerState:\n        if common is None:\n            common = CommonSchedulerState.create(self)\n\n        timesteps = jnp.arange(0, self.config.num_train_timesteps).round()[::-1]\n        sigmas = ((1 - common.alphas_cumprod) / common.alphas_cumprod) ** 0.5\n\n        # standard deviation of the initial noise distribution\n        init_noise_sigma = sigmas.max()\n\n        return LMSDiscreteSchedulerState.create(\n            common=common,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n            sigmas=sigmas,\n        )\n\n    def scale_model_input(self, state: LMSDiscreteSchedulerState, sample: jnp.ndarray, timestep: int) -> jnp.ndarray:\n        \"\"\"\n        Scales the denoising model input by `(sigma**2 + 1) ** 0.5` to match the K-LMS algorithm.\n\n        Args:\n            state (`LMSDiscreteSchedulerState`):\n                the `FlaxLMSDiscreteScheduler` state data class instance.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            timestep (`int`):\n                current discrete timestep in the diffusion chain.\n\n        Returns:\n            `jnp.ndarray`: scaled input sample\n        \"\"\"\n        (step_index,) = jnp.where(state.timesteps == timestep, size=1)\n        step_index = step_index[0]\n\n        sigma = state.sigmas[step_index]\n        sample = sample / ((sigma**2 + 1) ** 0.5)\n        return sample\n\n    def get_lms_coefficient(self, state: LMSDiscreteSchedulerState, order, t, current_order):\n        \"\"\"\n        Compute a linear multistep coefficient.\n\n        Args:\n            order (TODO):\n            t (TODO):\n            current_order (TODO):\n        \"\"\"\n\n        def lms_derivative(tau):\n            prod = 1.0\n            for k in range(order):\n                if current_order == k:\n                    continue\n                prod *= (tau - state.sigmas[t - k]) / (state.sigmas[t - current_order] - state.sigmas[t - k])\n            return prod\n\n        integrated_coeff = integrate.quad(lms_derivative, state.sigmas[t], state.sigmas[t + 1], epsrel=1e-4)[0]\n\n        return integrated_coeff\n\n    def set_timesteps(\n        self, state: LMSDiscreteSchedulerState, num_inference_steps: int, shape: Tuple = ()\n    ) -> LMSDiscreteSchedulerState:\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            state (`LMSDiscreteSchedulerState`):\n                the `FlaxLMSDiscreteScheduler` state data class instance.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n\n        timesteps = jnp.linspace(self.config.num_train_timesteps - 1, 0, num_inference_steps, dtype=self.dtype)\n\n        low_idx = jnp.floor(timesteps).astype(jnp.int32)\n        high_idx = jnp.ceil(timesteps).astype(jnp.int32)\n\n        frac = jnp.mod(timesteps, 1.0)\n\n        sigmas = ((1 - state.common.alphas_cumprod) / state.common.alphas_cumprod) ** 0.5\n        sigmas = (1 - frac) * sigmas[low_idx] + frac * sigmas[high_idx]\n        sigmas = jnp.concatenate([sigmas, jnp.array([0.0], dtype=self.dtype)])\n\n        timesteps = timesteps.astype(jnp.int32)\n\n        # initial running values\n        derivatives = jnp.zeros((0,) + shape, dtype=self.dtype)\n\n        return state.replace(\n            timesteps=timesteps,\n            sigmas=sigmas,\n            num_inference_steps=num_inference_steps,\n            derivatives=derivatives,\n        )\n\n    def step(\n        self,\n        state: LMSDiscreteSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n        order: int = 4,\n        return_dict: bool = True,\n    ) -> Union[FlaxLMSSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            state (`LMSDiscreteSchedulerState`): the `FlaxLMSDiscreteScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            order: coefficient for multi-step inference.\n            return_dict (`bool`): option for returning tuple rather than FlaxLMSSchedulerOutput class\n\n        Returns:\n            [`FlaxLMSSchedulerOutput`] or `tuple`: [`FlaxLMSSchedulerOutput`] if `return_dict` is True, otherwise a\n            `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if state.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        sigma = state.sigmas[timestep]\n\n        # 1. compute predicted original sample (x_0) from sigma-scaled predicted noise\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = sample - sigma * model_output\n        elif self.config.prediction_type == \"v_prediction\":\n            # * c_out + input * c_skip\n            pred_original_sample = model_output * (-sigma / (sigma**2 + 1) ** 0.5) + (sample / (sigma**2 + 1))\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`\"\n            )\n\n        # 2. Convert to an ODE derivative\n        derivative = (sample - pred_original_sample) / sigma\n        state = state.replace(derivatives=jnp.append(state.derivatives, derivative))\n        if len(state.derivatives) > order:\n            state = state.replace(derivatives=jnp.delete(state.derivatives, 0))\n\n        # 3. Compute linear multistep coefficients\n        order = min(timestep + 1, order)\n        lms_coeffs = [self.get_lms_coefficient(state, order, timestep, curr_order) for curr_order in range(order)]\n\n        # 4. Compute previous sample based on the derivatives path\n        prev_sample = sample + sum(\n            coeff * derivative for coeff, derivative in zip(lms_coeffs, reversed(state.derivatives))\n        )\n\n        if not return_dict:\n            return (prev_sample, state)\n\n        return FlaxLMSSchedulerOutput(prev_sample=prev_sample, state=state)\n\n    def add_noise(\n        self,\n        state: LMSDiscreteSchedulerState,\n        original_samples: jnp.ndarray,\n        noise: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ) -> jnp.ndarray:\n        sigma = state.sigmas[timesteps].flatten()\n        sigma = broadcast_to_shape_from_left(sigma, noise.shape)\n\n        noisy_samples = original_samples + noise * sigma\n\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_pndm.py",
    "content": "# Copyright 2023 Zhejiang University Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/ermongroup/ddim\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass PNDMScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    Pseudo numerical methods for diffusion models (PNDM) proposes using more advanced ODE integration techniques,\n    namely Runge-Kutta method and a linear multi-step method.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2202.09778\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        skip_prk_steps (`bool`):\n            allows the scheduler to skip the Runge-Kutta steps that are defined in the original paper as being required\n            before plms steps; defaults to `False`.\n        set_alpha_to_one (`bool`, default `False`):\n            each diffusion step uses the value of alphas product at that step and at the previous one. For the final\n            step there is no previous alpha. When this option is `True` the previous alpha product is fixed to `1`,\n            otherwise it uses the value of alpha at step 0.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion process)\n            or `v_prediction` (see section 2.4 https://imagen.research.google/video/paper.pdf)\n        steps_offset (`int`, default `0`):\n            an offset added to the inference steps. You can use a combination of `offset=1` and\n            `set_alpha_to_one=False`, to make the last step use step 0 for the previous alpha product, as done in\n            stable diffusion.\n\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        skip_prk_steps: bool = False,\n        set_alpha_to_one: bool = False,\n        prediction_type: str = \"epsilon\",\n        steps_offset: int = 0,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n\n        self.final_alpha_cumprod = torch.tensor(1.0) if set_alpha_to_one else self.alphas_cumprod[0]\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # For now we only support F-PNDM, i.e. the runge-kutta method\n        # For more information on the algorithm please take a look at the paper: https://arxiv.org/pdf/2202.09778.pdf\n        # mainly at formula (9), (12), (13) and the Algorithm 2.\n        self.pndm_order = 4\n\n        # running values\n        self.cur_model_output = 0\n        self.counter = 0\n        self.cur_sample = None\n        self.ets = []\n\n        # setable values\n        self.num_inference_steps = None\n        self._timesteps = np.arange(0, num_train_timesteps)[::-1].copy()\n        self.prk_timesteps = None\n        self.plms_timesteps = None\n        self.timesteps = None\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n\n        self.num_inference_steps = num_inference_steps\n        step_ratio = self.config.num_train_timesteps // self.num_inference_steps\n        # creates integer timesteps by multiplying by ratio\n        # casting to int to avoid issues when num_inference_step is power of 3\n        self._timesteps = (np.arange(0, num_inference_steps) * step_ratio).round()\n        self._timesteps += self.config.steps_offset\n\n        if self.config.skip_prk_steps:\n            # for some models like stable diffusion the prk steps can/should be skipped to\n            # produce better results. When using PNDM with `self.config.skip_prk_steps` the implementation\n            # is based on crowsonkb's PLMS sampler implementation: https://github.com/CompVis/latent-diffusion/pull/51\n            self.prk_timesteps = np.array([])\n            self.plms_timesteps = np.concatenate([self._timesteps[:-1], self._timesteps[-2:-1], self._timesteps[-1:]])[\n                ::-1\n            ].copy()\n        else:\n            prk_timesteps = np.array(self._timesteps[-self.pndm_order :]).repeat(2) + np.tile(\n                np.array([0, self.config.num_train_timesteps // num_inference_steps // 2]), self.pndm_order\n            )\n            self.prk_timesteps = (prk_timesteps[:-1].repeat(2)[1:-1])[::-1].copy()\n            self.plms_timesteps = self._timesteps[:-3][\n                ::-1\n            ].copy()  # we copy to avoid having negative strides which are not supported by torch.from_numpy\n\n        timesteps = np.concatenate([self.prk_timesteps, self.plms_timesteps]).astype(np.int64)\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n        self.ets = []\n        self.counter = 0\n        self.cur_model_output = 0\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        This function calls `step_prk()` or `step_plms()` depending on the internal variable `counter`.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.SchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.counter < len(self.prk_timesteps) and not self.config.skip_prk_steps:\n            return self.step_prk(model_output=model_output, timestep=timestep, sample=sample, return_dict=return_dict)\n        else:\n            return self.step_plms(model_output=model_output, timestep=timestep, sample=sample, return_dict=return_dict)\n\n    def step_prk(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the Runge-Kutta method. RK takes 4 forward passes to approximate the\n        solution to the differential equation.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        diff_to_prev = 0 if self.counter % 2 else self.config.num_train_timesteps // self.num_inference_steps // 2\n        prev_timestep = timestep - diff_to_prev\n        timestep = self.prk_timesteps[self.counter // 4 * 4]\n\n        if self.counter % 4 == 0:\n            self.cur_model_output += 1 / 6 * model_output\n            self.ets.append(model_output)\n            self.cur_sample = sample\n        elif (self.counter - 1) % 4 == 0:\n            self.cur_model_output += 1 / 3 * model_output\n        elif (self.counter - 2) % 4 == 0:\n            self.cur_model_output += 1 / 3 * model_output\n        elif (self.counter - 3) % 4 == 0:\n            model_output = self.cur_model_output + 1 / 6 * model_output\n            self.cur_model_output = 0\n\n        # cur_sample should not be `None`\n        cur_sample = self.cur_sample if self.cur_sample is not None else sample\n\n        prev_sample = self._get_prev_sample(cur_sample, timestep, prev_timestep, model_output)\n        self.counter += 1\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def step_plms(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the linear multi-step method. This has one forward pass with multiple\n        times to approximate the solution.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        if not self.config.skip_prk_steps and len(self.ets) < 3:\n            raise ValueError(\n                f\"{self.__class__} can only be run AFTER scheduler has been run \"\n                \"in 'prk' mode for at least 12 iterations \"\n                \"See: https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/pipeline_pndm.py \"\n                \"for more information.\"\n            )\n\n        prev_timestep = timestep - self.config.num_train_timesteps // self.num_inference_steps\n\n        if self.counter != 1:\n            self.ets = self.ets[-3:]\n            self.ets.append(model_output)\n        else:\n            prev_timestep = timestep\n            timestep = timestep + self.config.num_train_timesteps // self.num_inference_steps\n\n        if len(self.ets) == 1 and self.counter == 0:\n            model_output = model_output\n            self.cur_sample = sample\n        elif len(self.ets) == 1 and self.counter == 1:\n            model_output = (model_output + self.ets[-1]) / 2\n            sample = self.cur_sample\n            self.cur_sample = None\n        elif len(self.ets) == 2:\n            model_output = (3 * self.ets[-1] - self.ets[-2]) / 2\n        elif len(self.ets) == 3:\n            model_output = (23 * self.ets[-1] - 16 * self.ets[-2] + 5 * self.ets[-3]) / 12\n        else:\n            model_output = (1 / 24) * (55 * self.ets[-1] - 59 * self.ets[-2] + 37 * self.ets[-3] - 9 * self.ets[-4])\n\n        prev_sample = self._get_prev_sample(sample, timestep, prev_timestep, model_output)\n        self.counter += 1\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def scale_model_input(self, sample: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def _get_prev_sample(self, sample, timestep, prev_timestep, model_output):\n        # See formula (9) of PNDM paper https://arxiv.org/pdf/2202.09778.pdf\n        # this function computes x_(t−δ) using the formula of (9)\n        # Note that x_t needs to be added to both sides of the equation\n\n        # Notation (<variable name> -> <name in paper>\n        # alpha_prod_t -> α_t\n        # alpha_prod_t_prev -> α_(t−δ)\n        # beta_prod_t -> (1 - α_t)\n        # beta_prod_t_prev -> (1 - α_(t−δ))\n        # sample -> x_t\n        # model_output -> e_θ(x_t, t)\n        # prev_sample -> x_(t−δ)\n        alpha_prod_t = self.alphas_cumprod[timestep]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.final_alpha_cumprod\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        if self.config.prediction_type == \"v_prediction\":\n            model_output = (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        elif self.config.prediction_type != \"epsilon\":\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon` or `v_prediction`\"\n            )\n\n        # corresponds to (α_(t−δ) - α_t) divided by\n        # denominator of x_t in formula (9) and plus 1\n        # Note: (α_(t−δ) - α_t) / (sqrt(α_t) * (sqrt(α_(t−δ)) + sqr(α_t))) =\n        # sqrt(α_(t−δ)) / sqrt(α_t))\n        sample_coeff = (alpha_prod_t_prev / alpha_prod_t) ** (0.5)\n\n        # corresponds to denominator of e_θ(x_t, t) in formula (9)\n        model_output_denom_coeff = alpha_prod_t * beta_prod_t_prev ** (0.5) + (\n            alpha_prod_t * beta_prod_t * alpha_prod_t_prev\n        ) ** (0.5)\n\n        # full formula (9)\n        prev_sample = (\n            sample_coeff * sample - (alpha_prod_t_prev - alpha_prod_t) * model_output / model_output_denom_coeff\n        )\n\n        return prev_sample\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_pndm_flax.py",
    "content": "# Copyright 2023 Zhejiang University Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/ermongroup/ddim\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport flax\nimport jax\nimport jax.numpy as jnp\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils_flax import (\n    CommonSchedulerState,\n    FlaxKarrasDiffusionSchedulers,\n    FlaxSchedulerMixin,\n    FlaxSchedulerOutput,\n    add_noise_common,\n)\n\n\n@flax.struct.dataclass\nclass PNDMSchedulerState:\n    common: CommonSchedulerState\n    final_alpha_cumprod: jnp.ndarray\n\n    # setable values\n    init_noise_sigma: jnp.ndarray\n    timesteps: jnp.ndarray\n    num_inference_steps: Optional[int] = None\n    prk_timesteps: Optional[jnp.ndarray] = None\n    plms_timesteps: Optional[jnp.ndarray] = None\n\n    # running values\n    cur_model_output: Optional[jnp.ndarray] = None\n    counter: Optional[jnp.int32] = None\n    cur_sample: Optional[jnp.ndarray] = None\n    ets: Optional[jnp.ndarray] = None\n\n    @classmethod\n    def create(\n        cls,\n        common: CommonSchedulerState,\n        final_alpha_cumprod: jnp.ndarray,\n        init_noise_sigma: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ):\n        return cls(\n            common=common,\n            final_alpha_cumprod=final_alpha_cumprod,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n        )\n\n\n@dataclass\nclass FlaxPNDMSchedulerOutput(FlaxSchedulerOutput):\n    state: PNDMSchedulerState\n\n\nclass FlaxPNDMScheduler(FlaxSchedulerMixin, ConfigMixin):\n    \"\"\"\n    Pseudo numerical methods for diffusion models (PNDM) proposes using more advanced ODE integration techniques,\n    namely Runge-Kutta method and a linear multi-step method.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2202.09778\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`jnp.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        skip_prk_steps (`bool`):\n            allows the scheduler to skip the Runge-Kutta steps that are defined in the original paper as being required\n            before plms steps; defaults to `False`.\n        set_alpha_to_one (`bool`, default `False`):\n            each diffusion step uses the value of alphas product at that step and at the previous one. For the final\n            step there is no previous alpha. When this option is `True` the previous alpha product is fixed to `1`,\n            otherwise it uses the value of alpha at step 0.\n        steps_offset (`int`, default `0`):\n            an offset added to the inference steps. You can use a combination of `offset=1` and\n            `set_alpha_to_one=False`, to make the last step use step 0 for the previous alpha product, as done in\n            stable diffusion.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        dtype (`jnp.dtype`, *optional*, defaults to `jnp.float32`):\n            the `dtype` used for params and computation.\n    \"\"\"\n\n    _compatibles = [e.name for e in FlaxKarrasDiffusionSchedulers]\n\n    dtype: jnp.dtype\n    pndm_order: int\n\n    @property\n    def has_state(self):\n        return True\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[jnp.ndarray] = None,\n        skip_prk_steps: bool = False,\n        set_alpha_to_one: bool = False,\n        steps_offset: int = 0,\n        prediction_type: str = \"epsilon\",\n        dtype: jnp.dtype = jnp.float32,\n    ):\n        self.dtype = dtype\n\n        # For now we only support F-PNDM, i.e. the runge-kutta method\n        # For more information on the algorithm please take a look at the paper: https://arxiv.org/pdf/2202.09778.pdf\n        # mainly at formula (9), (12), (13) and the Algorithm 2.\n        self.pndm_order = 4\n\n    def create_state(self, common: Optional[CommonSchedulerState] = None) -> PNDMSchedulerState:\n        if common is None:\n            common = CommonSchedulerState.create(self)\n\n        # At every step in ddim, we are looking into the previous alphas_cumprod\n        # For the final step, there is no previous alphas_cumprod because we are already at 0\n        # `set_alpha_to_one` decides whether we set this parameter simply to one or\n        # whether we use the final alpha of the \"non-previous\" one.\n        final_alpha_cumprod = (\n            jnp.array(1.0, dtype=self.dtype) if self.config.set_alpha_to_one else common.alphas_cumprod[0]\n        )\n\n        # standard deviation of the initial noise distribution\n        init_noise_sigma = jnp.array(1.0, dtype=self.dtype)\n\n        timesteps = jnp.arange(0, self.config.num_train_timesteps).round()[::-1]\n\n        return PNDMSchedulerState.create(\n            common=common,\n            final_alpha_cumprod=final_alpha_cumprod,\n            init_noise_sigma=init_noise_sigma,\n            timesteps=timesteps,\n        )\n\n    def set_timesteps(self, state: PNDMSchedulerState, num_inference_steps: int, shape: Tuple) -> PNDMSchedulerState:\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            state (`PNDMSchedulerState`):\n                the `FlaxPNDMScheduler` state data class instance.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            shape (`Tuple`):\n                the shape of the samples to be generated.\n        \"\"\"\n\n        step_ratio = self.config.num_train_timesteps // num_inference_steps\n        # creates integer timesteps by multiplying by ratio\n        # rounding to avoid issues when num_inference_step is power of 3\n        _timesteps = (jnp.arange(0, num_inference_steps) * step_ratio).round() + self.config.steps_offset\n\n        if self.config.skip_prk_steps:\n            # for some models like stable diffusion the prk steps can/should be skipped to\n            # produce better results. When using PNDM with `self.config.skip_prk_steps` the implementation\n            # is based on crowsonkb's PLMS sampler implementation: https://github.com/CompVis/latent-diffusion/pull/51\n\n            prk_timesteps = jnp.array([], dtype=jnp.int32)\n            plms_timesteps = jnp.concatenate([_timesteps[:-1], _timesteps[-2:-1], _timesteps[-1:]])[::-1]\n\n        else:\n            prk_timesteps = _timesteps[-self.pndm_order :].repeat(2) + jnp.tile(\n                jnp.array([0, self.config.num_train_timesteps // num_inference_steps // 2], dtype=jnp.int32),\n                self.pndm_order,\n            )\n\n            prk_timesteps = (prk_timesteps[:-1].repeat(2)[1:-1])[::-1]\n            plms_timesteps = _timesteps[:-3][::-1]\n\n        timesteps = jnp.concatenate([prk_timesteps, plms_timesteps])\n\n        # initial running values\n\n        cur_model_output = jnp.zeros(shape, dtype=self.dtype)\n        counter = jnp.int32(0)\n        cur_sample = jnp.zeros(shape, dtype=self.dtype)\n        ets = jnp.zeros((4,) + shape, dtype=self.dtype)\n\n        return state.replace(\n            timesteps=timesteps,\n            num_inference_steps=num_inference_steps,\n            prk_timesteps=prk_timesteps,\n            plms_timesteps=plms_timesteps,\n            cur_model_output=cur_model_output,\n            counter=counter,\n            cur_sample=cur_sample,\n            ets=ets,\n        )\n\n    def scale_model_input(\n        self, state: PNDMSchedulerState, sample: jnp.ndarray, timestep: Optional[int] = None\n    ) -> jnp.ndarray:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            state (`PNDMSchedulerState`): the `FlaxPNDMScheduler` state data class instance.\n            sample (`jnp.ndarray`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `jnp.ndarray`: scaled input sample\n        \"\"\"\n        return sample\n\n    def step(\n        self,\n        state: PNDMSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n        return_dict: bool = True,\n    ) -> Union[FlaxPNDMSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        This function calls `step_prk()` or `step_plms()` depending on the internal variable `counter`.\n\n        Args:\n            state (`PNDMSchedulerState`): the `FlaxPNDMScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than FlaxPNDMSchedulerOutput class\n\n        Returns:\n            [`FlaxPNDMSchedulerOutput`] or `tuple`: [`FlaxPNDMSchedulerOutput`] if `return_dict` is True, otherwise a\n            `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n\n        if state.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        if self.config.skip_prk_steps:\n            prev_sample, state = self.step_plms(state, model_output, timestep, sample)\n        else:\n            prk_prev_sample, prk_state = self.step_prk(state, model_output, timestep, sample)\n            plms_prev_sample, plms_state = self.step_plms(state, model_output, timestep, sample)\n\n            cond = state.counter < len(state.prk_timesteps)\n\n            prev_sample = jax.lax.select(cond, prk_prev_sample, plms_prev_sample)\n\n            state = state.replace(\n                cur_model_output=jax.lax.select(cond, prk_state.cur_model_output, plms_state.cur_model_output),\n                ets=jax.lax.select(cond, prk_state.ets, plms_state.ets),\n                cur_sample=jax.lax.select(cond, prk_state.cur_sample, plms_state.cur_sample),\n                counter=jax.lax.select(cond, prk_state.counter, plms_state.counter),\n            )\n\n        if not return_dict:\n            return (prev_sample, state)\n\n        return FlaxPNDMSchedulerOutput(prev_sample=prev_sample, state=state)\n\n    def step_prk(\n        self,\n        state: PNDMSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n    ) -> Union[FlaxPNDMSchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the Runge-Kutta method. RK takes 4 forward passes to approximate the\n        solution to the differential equation.\n\n        Args:\n            state (`PNDMSchedulerState`): the `FlaxPNDMScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than FlaxPNDMSchedulerOutput class\n\n        Returns:\n            [`FlaxPNDMSchedulerOutput`] or `tuple`: [`FlaxPNDMSchedulerOutput`] if `return_dict` is True, otherwise a\n            `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n\n        if state.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        diff_to_prev = jnp.where(\n            state.counter % 2, 0, self.config.num_train_timesteps // state.num_inference_steps // 2\n        )\n        prev_timestep = timestep - diff_to_prev\n        timestep = state.prk_timesteps[state.counter // 4 * 4]\n\n        model_output = jax.lax.select(\n            (state.counter % 4) != 3,\n            model_output,  # remainder 0, 1, 2\n            state.cur_model_output + 1 / 6 * model_output,  # remainder 3\n        )\n\n        state = state.replace(\n            cur_model_output=jax.lax.select_n(\n                state.counter % 4,\n                state.cur_model_output + 1 / 6 * model_output,  # remainder 0\n                state.cur_model_output + 1 / 3 * model_output,  # remainder 1\n                state.cur_model_output + 1 / 3 * model_output,  # remainder 2\n                jnp.zeros_like(state.cur_model_output),  # remainder 3\n            ),\n            ets=jax.lax.select(\n                (state.counter % 4) == 0,\n                state.ets.at[0:3].set(state.ets[1:4]).at[3].set(model_output),  # remainder 0\n                state.ets,  # remainder 1, 2, 3\n            ),\n            cur_sample=jax.lax.select(\n                (state.counter % 4) == 0,\n                sample,  # remainder 0\n                state.cur_sample,  # remainder 1, 2, 3\n            ),\n        )\n\n        cur_sample = state.cur_sample\n        prev_sample = self._get_prev_sample(state, cur_sample, timestep, prev_timestep, model_output)\n        state = state.replace(counter=state.counter + 1)\n\n        return (prev_sample, state)\n\n    def step_plms(\n        self,\n        state: PNDMSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n    ) -> Union[FlaxPNDMSchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the linear multi-step method. This has one forward pass with multiple\n        times to approximate the solution.\n\n        Args:\n            state (`PNDMSchedulerState`): the `FlaxPNDMScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than FlaxPNDMSchedulerOutput class\n\n        Returns:\n            [`FlaxPNDMSchedulerOutput`] or `tuple`: [`FlaxPNDMSchedulerOutput`] if `return_dict` is True, otherwise a\n            `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n\n        if state.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        # NOTE: There is no way to check in the jitted runtime if the prk mode was ran before\n\n        prev_timestep = timestep - self.config.num_train_timesteps // state.num_inference_steps\n        prev_timestep = jnp.where(prev_timestep > 0, prev_timestep, 0)\n\n        # Reference:\n        # if state.counter != 1:\n        #     state.ets.append(model_output)\n        # else:\n        #     prev_timestep = timestep\n        #     timestep = timestep + self.config.num_train_timesteps // state.num_inference_steps\n\n        prev_timestep = jnp.where(state.counter == 1, timestep, prev_timestep)\n        timestep = jnp.where(\n            state.counter == 1, timestep + self.config.num_train_timesteps // state.num_inference_steps, timestep\n        )\n\n        # Reference:\n        # if len(state.ets) == 1 and state.counter == 0:\n        #     model_output = model_output\n        #     state.cur_sample = sample\n        # elif len(state.ets) == 1 and state.counter == 1:\n        #     model_output = (model_output + state.ets[-1]) / 2\n        #     sample = state.cur_sample\n        #     state.cur_sample = None\n        # elif len(state.ets) == 2:\n        #     model_output = (3 * state.ets[-1] - state.ets[-2]) / 2\n        # elif len(state.ets) == 3:\n        #     model_output = (23 * state.ets[-1] - 16 * state.ets[-2] + 5 * state.ets[-3]) / 12\n        # else:\n        #     model_output = (1 / 24) * (55 * state.ets[-1] - 59 * state.ets[-2] + 37 * state.ets[-3] - 9 * state.ets[-4])\n\n        state = state.replace(\n            ets=jax.lax.select(\n                state.counter != 1,\n                state.ets.at[0:3].set(state.ets[1:4]).at[3].set(model_output),  # counter != 1\n                state.ets,  # counter 1\n            ),\n            cur_sample=jax.lax.select(\n                state.counter != 1,\n                sample,  # counter != 1\n                state.cur_sample,  # counter 1\n            ),\n        )\n\n        state = state.replace(\n            cur_model_output=jax.lax.select_n(\n                jnp.clip(state.counter, 0, 4),\n                model_output,  # counter 0\n                (model_output + state.ets[-1]) / 2,  # counter 1\n                (3 * state.ets[-1] - state.ets[-2]) / 2,  # counter 2\n                (23 * state.ets[-1] - 16 * state.ets[-2] + 5 * state.ets[-3]) / 12,  # counter 3\n                (1 / 24)\n                * (55 * state.ets[-1] - 59 * state.ets[-2] + 37 * state.ets[-3] - 9 * state.ets[-4]),  # counter >= 4\n            ),\n        )\n\n        sample = state.cur_sample\n        model_output = state.cur_model_output\n        prev_sample = self._get_prev_sample(state, sample, timestep, prev_timestep, model_output)\n        state = state.replace(counter=state.counter + 1)\n\n        return (prev_sample, state)\n\n    def _get_prev_sample(self, state: PNDMSchedulerState, sample, timestep, prev_timestep, model_output):\n        # See formula (9) of PNDM paper https://arxiv.org/pdf/2202.09778.pdf\n        # this function computes x_(t−δ) using the formula of (9)\n        # Note that x_t needs to be added to both sides of the equation\n\n        # Notation (<variable name> -> <name in paper>\n        # alpha_prod_t -> α_t\n        # alpha_prod_t_prev -> α_(t−δ)\n        # beta_prod_t -> (1 - α_t)\n        # beta_prod_t_prev -> (1 - α_(t−δ))\n        # sample -> x_t\n        # model_output -> e_θ(x_t, t)\n        # prev_sample -> x_(t−δ)\n        alpha_prod_t = state.common.alphas_cumprod[timestep]\n        alpha_prod_t_prev = jnp.where(\n            prev_timestep >= 0, state.common.alphas_cumprod[prev_timestep], state.final_alpha_cumprod\n        )\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        if self.config.prediction_type == \"v_prediction\":\n            model_output = (alpha_prod_t**0.5) * model_output + (beta_prod_t**0.5) * sample\n        elif self.config.prediction_type != \"epsilon\":\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon` or `v_prediction`\"\n            )\n\n        # corresponds to (α_(t−δ) - α_t) divided by\n        # denominator of x_t in formula (9) and plus 1\n        # Note: (α_(t−δ) - α_t) / (sqrt(α_t) * (sqrt(α_(t−δ)) + sqr(α_t))) =\n        # sqrt(α_(t−δ)) / sqrt(α_t))\n        sample_coeff = (alpha_prod_t_prev / alpha_prod_t) ** (0.5)\n\n        # corresponds to denominator of e_θ(x_t, t) in formula (9)\n        model_output_denom_coeff = alpha_prod_t * beta_prod_t_prev ** (0.5) + (\n            alpha_prod_t * beta_prod_t * alpha_prod_t_prev\n        ) ** (0.5)\n\n        # full formula (9)\n        prev_sample = (\n            sample_coeff * sample - (alpha_prod_t_prev - alpha_prod_t) * model_output / model_output_denom_coeff\n        )\n\n        return prev_sample\n\n    def add_noise(\n        self,\n        state: PNDMSchedulerState,\n        original_samples: jnp.ndarray,\n        noise: jnp.ndarray,\n        timesteps: jnp.ndarray,\n    ) -> jnp.ndarray:\n        return add_noise_common(state.common, original_samples, noise, timesteps)\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_repaint.py",
    "content": "# Copyright 2023 ETH Zurich Computer Vision Lab and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, randn_tensor\nfrom .scheduling_utils import SchedulerMixin\n\n\n@dataclass\nclass RePaintSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from\n             the current timestep. `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: torch.FloatTensor\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass RePaintScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    RePaint is a schedule for DDPM inpainting inside a given mask.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/pdf/2201.09865.pdf\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, `squaredcos_cap_v2` or `sigmoid`.\n        eta (`float`):\n            The weight of noise for added noise in a diffusion step. Its value is between 0.0 and 1.0 -0.0 is DDIM and\n            1.0 is DDPM scheduler respectively.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        variance_type (`str`):\n            options to clip the variance used when adding noise to the denoised sample. Choose from `fixed_small`,\n            `fixed_small_log`, `fixed_large`, `fixed_large_log`, `learned` or `learned_range`.\n        clip_sample (`bool`, default `True`):\n            option to clip predicted sample between -1 and 1 for numerical stability.\n\n    \"\"\"\n\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        eta: float = 0.0,\n        trained_betas: Optional[np.ndarray] = None,\n        clip_sample: bool = True,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.from_numpy(trained_betas)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        elif beta_schedule == \"sigmoid\":\n            # GeoDiff sigmoid schedule\n            betas = torch.linspace(-6, 6, num_train_timesteps)\n            self.betas = torch.sigmoid(betas) * (beta_end - beta_start) + beta_start\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        self.one = torch.tensor(1.0)\n\n        self.final_alpha_cumprod = torch.tensor(1.0)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # setable values\n        self.num_inference_steps = None\n        self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy())\n\n        self.eta = eta\n\n    def scale_model_input(self, sample: torch.FloatTensor, timestep: Optional[int] = None) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(\n        self,\n        num_inference_steps: int,\n        jump_length: int = 10,\n        jump_n_sample: int = 10,\n        device: Union[str, torch.device] = None,\n    ):\n        num_inference_steps = min(self.config.num_train_timesteps, num_inference_steps)\n        self.num_inference_steps = num_inference_steps\n\n        timesteps = []\n\n        jumps = {}\n        for j in range(0, num_inference_steps - jump_length, jump_length):\n            jumps[j] = jump_n_sample - 1\n\n        t = num_inference_steps\n        while t >= 1:\n            t = t - 1\n            timesteps.append(t)\n\n            if jumps.get(t, 0) > 0:\n                jumps[t] = jumps[t] - 1\n                for _ in range(jump_length):\n                    t = t + 1\n                    timesteps.append(t)\n\n        timesteps = np.array(timesteps) * (self.config.num_train_timesteps // self.num_inference_steps)\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n    def _get_variance(self, t):\n        prev_timestep = t - self.config.num_train_timesteps // self.num_inference_steps\n\n        alpha_prod_t = self.alphas_cumprod[t]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.final_alpha_cumprod\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        # For t > 0, compute predicted variance βt (see formula (6) and (7) from\n        # https://arxiv.org/pdf/2006.11239.pdf) and sample from it to get\n        # previous sample x_{t-1} ~ N(pred_prev_sample, variance) == add\n        # variance to pred_sample\n        # Is equivalent to formula (16) in https://arxiv.org/pdf/2010.02502.pdf\n        # without eta.\n        # variance = (1 - alpha_prod_t_prev) / (1 - alpha_prod_t) * self.betas[t]\n        variance = (beta_prod_t_prev / beta_prod_t) * (1 - alpha_prod_t / alpha_prod_t_prev)\n\n        return variance\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        original_image: torch.FloatTensor,\n        mask: torch.FloatTensor,\n        generator: Optional[torch.Generator] = None,\n        return_dict: bool = True,\n    ) -> Union[RePaintSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned\n                diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            original_image (`torch.FloatTensor`):\n                the original image to inpaint on.\n            mask (`torch.FloatTensor`):\n                the mask where 0.0 values define which part of the original image to inpaint (change).\n            generator (`torch.Generator`, *optional*): random number generator.\n            return_dict (`bool`): option for returning tuple rather than\n                DDPMSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.RePaintSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.RePaintSchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        t = timestep\n        prev_timestep = timestep - self.config.num_train_timesteps // self.num_inference_steps\n\n        # 1. compute alphas, betas\n        alpha_prod_t = self.alphas_cumprod[t]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.final_alpha_cumprod\n        beta_prod_t = 1 - alpha_prod_t\n\n        # 2. compute predicted original sample from predicted noise also called\n        # \"predicted x_0\" of formula (15) from https://arxiv.org/pdf/2006.11239.pdf\n        pred_original_sample = (sample - beta_prod_t**0.5 * model_output) / alpha_prod_t**0.5\n\n        # 3. Clip \"predicted x_0\"\n        if self.config.clip_sample:\n            pred_original_sample = torch.clamp(pred_original_sample, -1, 1)\n\n        # We choose to follow RePaint Algorithm 1 to get x_{t-1}, however we\n        # substitute formula (7) in the algorithm coming from DDPM paper\n        # (formula (4) Algorithm 2 - Sampling) with formula (12) from DDIM paper.\n        # DDIM schedule gives the same results as DDPM with eta = 1.0\n        # Noise is being reused in 7. and 8., but no impact on quality has\n        # been observed.\n\n        # 5. Add noise\n        device = model_output.device\n        noise = randn_tensor(model_output.shape, generator=generator, device=device, dtype=model_output.dtype)\n        std_dev_t = self.eta * self._get_variance(timestep) ** 0.5\n\n        variance = 0\n        if t > 0 and self.eta > 0:\n            variance = std_dev_t * noise\n\n        # 6. compute \"direction pointing to x_t\" of formula (12)\n        # from https://arxiv.org/pdf/2010.02502.pdf\n        pred_sample_direction = (1 - alpha_prod_t_prev - std_dev_t**2) ** 0.5 * model_output\n\n        # 7. compute x_{t-1} of formula (12) from https://arxiv.org/pdf/2010.02502.pdf\n        prev_unknown_part = alpha_prod_t_prev**0.5 * pred_original_sample + pred_sample_direction + variance\n\n        # 8. Algorithm 1 Line 5 https://arxiv.org/pdf/2201.09865.pdf\n        prev_known_part = (alpha_prod_t_prev**0.5) * original_image + ((1 - alpha_prod_t_prev) ** 0.5) * noise\n\n        # 9. Algorithm 1 Line 8 https://arxiv.org/pdf/2201.09865.pdf\n        pred_prev_sample = mask * prev_known_part + (1.0 - mask) * prev_unknown_part\n\n        if not return_dict:\n            return (\n                pred_prev_sample,\n                pred_original_sample,\n            )\n\n        return RePaintSchedulerOutput(prev_sample=pred_prev_sample, pred_original_sample=pred_original_sample)\n\n    def undo_step(self, sample, timestep, generator=None):\n        n = self.config.num_train_timesteps // self.num_inference_steps\n\n        for i in range(n):\n            beta = self.betas[timestep + i]\n            if sample.device.type == \"mps\":\n                # randn does not work reproducibly on mps\n                noise = randn_tensor(sample.shape, dtype=sample.dtype, generator=generator)\n                noise = noise.to(sample.device)\n            else:\n                noise = randn_tensor(sample.shape, generator=generator, device=sample.device, dtype=sample.dtype)\n\n            # 10. Algorithm 1 Line 10 https://arxiv.org/pdf/2201.09865.pdf\n            sample = (1 - beta) ** 0.5 * sample + beta**0.5 * noise\n\n        return sample\n\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        raise NotImplementedError(\"Use `DDPMScheduler.add_noise()` to train for sampling with RePaint.\")\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_sde_ve.py",
    "content": "# Copyright 2023 Google Brain and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/yang-song/score_sde_pytorch\n\nimport math\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, randn_tensor\nfrom .scheduling_utils import SchedulerMixin, SchedulerOutput\n\n\n@dataclass\nclass SdeVeOutput(BaseOutput):\n    \"\"\"\n    Output class for the ScoreSdeVeScheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        prev_sample_mean (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Mean averaged `prev_sample`. Same as `prev_sample`, only mean-averaged over previous timesteps.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    prev_sample_mean: torch.FloatTensor\n\n\nclass ScoreSdeVeScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    The variance exploding stochastic differential equation (SDE) scheduler.\n\n    For more information, see the original paper: https://arxiv.org/abs/2011.13456\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        snr (`float`):\n            coefficient weighting the step from the model_output sample (from the network) to the random noise.\n        sigma_min (`float`):\n                initial noise scale for sigma sequence in sampling procedure. The minimum sigma should mirror the\n                distribution of the data.\n        sigma_max (`float`): maximum value used for the range of continuous timesteps passed into the model.\n        sampling_eps (`float`): the end value of sampling, where timesteps decrease progressively from 1 to\n        epsilon.\n        correct_steps (`int`): number of correction steps performed on a produced sample.\n    \"\"\"\n\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 2000,\n        snr: float = 0.15,\n        sigma_min: float = 0.01,\n        sigma_max: float = 1348.0,\n        sampling_eps: float = 1e-5,\n        correct_steps: int = 1,\n    ):\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = sigma_max\n\n        # setable values\n        self.timesteps = None\n\n        self.set_sigmas(num_train_timesteps, sigma_min, sigma_max, sampling_eps)\n\n    def scale_model_input(self, sample: torch.FloatTensor, timestep: Optional[int] = None) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(\n        self, num_inference_steps: int, sampling_eps: float = None, device: Union[str, torch.device] = None\n    ):\n        \"\"\"\n        Sets the continuous timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            sampling_eps (`float`, optional):\n                final timestep value (overrides value given at Scheduler instantiation).\n\n        \"\"\"\n        sampling_eps = sampling_eps if sampling_eps is not None else self.config.sampling_eps\n\n        self.timesteps = torch.linspace(1, sampling_eps, num_inference_steps, device=device)\n\n    def set_sigmas(\n        self, num_inference_steps: int, sigma_min: float = None, sigma_max: float = None, sampling_eps: float = None\n    ):\n        \"\"\"\n        Sets the noise scales used for the diffusion chain. Supporting function to be run before inference.\n\n        The sigmas control the weight of the `drift` and `diffusion` components of sample update.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            sigma_min (`float`, optional):\n                initial noise scale value (overrides value given at Scheduler instantiation).\n            sigma_max (`float`, optional):\n                final noise scale value (overrides value given at Scheduler instantiation).\n            sampling_eps (`float`, optional):\n                final timestep value (overrides value given at Scheduler instantiation).\n\n        \"\"\"\n        sigma_min = sigma_min if sigma_min is not None else self.config.sigma_min\n        sigma_max = sigma_max if sigma_max is not None else self.config.sigma_max\n        sampling_eps = sampling_eps if sampling_eps is not None else self.config.sampling_eps\n        if self.timesteps is None:\n            self.set_timesteps(num_inference_steps, sampling_eps)\n\n        self.sigmas = sigma_min * (sigma_max / sigma_min) ** (self.timesteps / sampling_eps)\n        self.discrete_sigmas = torch.exp(torch.linspace(math.log(sigma_min), math.log(sigma_max), num_inference_steps))\n        self.sigmas = torch.tensor([sigma_min * (sigma_max / sigma_min) ** t for t in self.timesteps])\n\n    def get_adjacent_sigma(self, timesteps, t):\n        return torch.where(\n            timesteps == 0,\n            torch.zeros_like(t.to(timesteps.device)),\n            self.discrete_sigmas[timesteps - 1].to(timesteps.device),\n        )\n\n    def step_pred(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        generator: Optional[torch.Generator] = None,\n        return_dict: bool = True,\n    ) -> Union[SdeVeOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            generator: random number generator.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_sde_ve.SdeVeOutput`] or `tuple`: [`~schedulers.scheduling_sde_ve.SdeVeOutput`] if\n            `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.timesteps is None:\n            raise ValueError(\n                \"`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        timestep = timestep * torch.ones(\n            sample.shape[0], device=sample.device\n        )  # torch.repeat_interleave(timestep, sample.shape[0])\n        timesteps = (timestep * (len(self.timesteps) - 1)).long()\n\n        # mps requires indices to be in the same device, so we use cpu as is the default with cuda\n        timesteps = timesteps.to(self.discrete_sigmas.device)\n\n        sigma = self.discrete_sigmas[timesteps].to(sample.device)\n        adjacent_sigma = self.get_adjacent_sigma(timesteps, timestep).to(sample.device)\n        drift = torch.zeros_like(sample)\n        diffusion = (sigma**2 - adjacent_sigma**2) ** 0.5\n\n        # equation 6 in the paper: the model_output modeled by the network is grad_x log pt(x)\n        # also equation 47 shows the analog from SDE models to ancestral sampling methods\n        diffusion = diffusion.flatten()\n        while len(diffusion.shape) < len(sample.shape):\n            diffusion = diffusion.unsqueeze(-1)\n        drift = drift - diffusion**2 * model_output\n\n        #  equation 6: sample noise for the diffusion term of\n        noise = randn_tensor(\n            sample.shape, layout=sample.layout, generator=generator, device=sample.device, dtype=sample.dtype\n        )\n        prev_sample_mean = sample - drift  # subtract because `dt` is a small negative timestep\n        # TODO is the variable diffusion the correct scaling term for the noise?\n        prev_sample = prev_sample_mean + diffusion * noise  # add impact of diffusion field g\n\n        if not return_dict:\n            return (prev_sample, prev_sample_mean)\n\n        return SdeVeOutput(prev_sample=prev_sample, prev_sample_mean=prev_sample_mean)\n\n    def step_correct(\n        self,\n        model_output: torch.FloatTensor,\n        sample: torch.FloatTensor,\n        generator: Optional[torch.Generator] = None,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Correct the predicted sample based on the output model_output of the network. This is often run repeatedly\n        after making the prediction for the previous timestep.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            generator: random number generator.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_sde_ve.SdeVeOutput`] or `tuple`: [`~schedulers.scheduling_sde_ve.SdeVeOutput`] if\n            `return_dict` is True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if self.timesteps is None:\n            raise ValueError(\n                \"`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        # For small batch sizes, the paper \"suggest replacing norm(z) with sqrt(d), where d is the dim. of z\"\n        # sample noise for correction\n        noise = randn_tensor(sample.shape, layout=sample.layout, generator=generator).to(sample.device)\n\n        # compute step size from the model_output, the noise, and the snr\n        grad_norm = torch.norm(model_output.reshape(model_output.shape[0], -1), dim=-1).mean()\n        noise_norm = torch.norm(noise.reshape(noise.shape[0], -1), dim=-1).mean()\n        step_size = (self.config.snr * noise_norm / grad_norm) ** 2 * 2\n        step_size = step_size * torch.ones(sample.shape[0]).to(sample.device)\n        # self.repeat_scalar(step_size, sample.shape[0])\n\n        # compute corrected sample: model_output term and noise term\n        step_size = step_size.flatten()\n        while len(step_size.shape) < len(sample.shape):\n            step_size = step_size.unsqueeze(-1)\n        prev_sample_mean = sample + step_size * model_output\n        prev_sample = prev_sample_mean + ((step_size * 2) ** 0.5) * noise\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.FloatTensor,\n    ) -> torch.FloatTensor:\n        # Make sure sigmas and timesteps have the same device and dtype as original_samples\n        timesteps = timesteps.to(original_samples.device)\n        sigmas = self.discrete_sigmas.to(original_samples.device)[timesteps]\n        noise = torch.randn_like(original_samples) * sigmas[:, None, None, None]\n        noisy_samples = noise + original_samples\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_sde_ve_flax.py",
    "content": "# Copyright 2023 Google Brain and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/yang-song/score_sde_pytorch\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport flax\nimport jax.numpy as jnp\nfrom jax import random\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils_flax import FlaxSchedulerMixin, FlaxSchedulerOutput, broadcast_to_shape_from_left\n\n\n@flax.struct.dataclass\nclass ScoreSdeVeSchedulerState:\n    # setable values\n    timesteps: Optional[jnp.ndarray] = None\n    discrete_sigmas: Optional[jnp.ndarray] = None\n    sigmas: Optional[jnp.ndarray] = None\n\n    @classmethod\n    def create(cls):\n        return cls()\n\n\n@dataclass\nclass FlaxSdeVeOutput(FlaxSchedulerOutput):\n    \"\"\"\n    Output class for the ScoreSdeVeScheduler's step function output.\n\n    Args:\n        state (`ScoreSdeVeSchedulerState`):\n        prev_sample (`jnp.ndarray` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        prev_sample_mean (`jnp.ndarray` of shape `(batch_size, num_channels, height, width)` for images):\n            Mean averaged `prev_sample`. Same as `prev_sample`, only mean-averaged over previous timesteps.\n    \"\"\"\n\n    state: ScoreSdeVeSchedulerState\n    prev_sample: jnp.ndarray\n    prev_sample_mean: Optional[jnp.ndarray] = None\n\n\nclass FlaxScoreSdeVeScheduler(FlaxSchedulerMixin, ConfigMixin):\n    \"\"\"\n    The variance exploding stochastic differential equation (SDE) scheduler.\n\n    For more information, see the original paper: https://arxiv.org/abs/2011.13456\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        snr (`float`):\n            coefficient weighting the step from the model_output sample (from the network) to the random noise.\n        sigma_min (`float`):\n                initial noise scale for sigma sequence in sampling procedure. The minimum sigma should mirror the\n                distribution of the data.\n        sigma_max (`float`): maximum value used for the range of continuous timesteps passed into the model.\n        sampling_eps (`float`): the end value of sampling, where timesteps decrease progressively from 1 to\n        epsilon.\n        correct_steps (`int`): number of correction steps performed on a produced sample.\n    \"\"\"\n\n    @property\n    def has_state(self):\n        return True\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 2000,\n        snr: float = 0.15,\n        sigma_min: float = 0.01,\n        sigma_max: float = 1348.0,\n        sampling_eps: float = 1e-5,\n        correct_steps: int = 1,\n    ):\n        pass\n\n    def create_state(self):\n        state = ScoreSdeVeSchedulerState.create()\n        return self.set_sigmas(\n            state,\n            self.config.num_train_timesteps,\n            self.config.sigma_min,\n            self.config.sigma_max,\n            self.config.sampling_eps,\n        )\n\n    def set_timesteps(\n        self, state: ScoreSdeVeSchedulerState, num_inference_steps: int, shape: Tuple = (), sampling_eps: float = None\n    ) -> ScoreSdeVeSchedulerState:\n        \"\"\"\n        Sets the continuous timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            state (`ScoreSdeVeSchedulerState`): the `FlaxScoreSdeVeScheduler` state data class instance.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            sampling_eps (`float`, optional):\n                final timestep value (overrides value given at Scheduler instantiation).\n\n        \"\"\"\n        sampling_eps = sampling_eps if sampling_eps is not None else self.config.sampling_eps\n\n        timesteps = jnp.linspace(1, sampling_eps, num_inference_steps)\n        return state.replace(timesteps=timesteps)\n\n    def set_sigmas(\n        self,\n        state: ScoreSdeVeSchedulerState,\n        num_inference_steps: int,\n        sigma_min: float = None,\n        sigma_max: float = None,\n        sampling_eps: float = None,\n    ) -> ScoreSdeVeSchedulerState:\n        \"\"\"\n        Sets the noise scales used for the diffusion chain. Supporting function to be run before inference.\n\n        The sigmas control the weight of the `drift` and `diffusion` components of sample update.\n\n        Args:\n            state (`ScoreSdeVeSchedulerState`): the `FlaxScoreSdeVeScheduler` state data class instance.\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            sigma_min (`float`, optional):\n                initial noise scale value (overrides value given at Scheduler instantiation).\n            sigma_max (`float`, optional):\n                final noise scale value (overrides value given at Scheduler instantiation).\n            sampling_eps (`float`, optional):\n                final timestep value (overrides value given at Scheduler instantiation).\n        \"\"\"\n        sigma_min = sigma_min if sigma_min is not None else self.config.sigma_min\n        sigma_max = sigma_max if sigma_max is not None else self.config.sigma_max\n        sampling_eps = sampling_eps if sampling_eps is not None else self.config.sampling_eps\n        if state.timesteps is None:\n            state = self.set_timesteps(state, num_inference_steps, sampling_eps)\n\n        discrete_sigmas = jnp.exp(jnp.linspace(jnp.log(sigma_min), jnp.log(sigma_max), num_inference_steps))\n        sigmas = jnp.array([sigma_min * (sigma_max / sigma_min) ** t for t in state.timesteps])\n\n        return state.replace(discrete_sigmas=discrete_sigmas, sigmas=sigmas)\n\n    def get_adjacent_sigma(self, state, timesteps, t):\n        return jnp.where(timesteps == 0, jnp.zeros_like(t), state.discrete_sigmas[timesteps - 1])\n\n    def step_pred(\n        self,\n        state: ScoreSdeVeSchedulerState,\n        model_output: jnp.ndarray,\n        timestep: int,\n        sample: jnp.ndarray,\n        key: random.KeyArray,\n        return_dict: bool = True,\n    ) -> Union[FlaxSdeVeOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            state (`ScoreSdeVeSchedulerState`): the `FlaxScoreSdeVeScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            generator: random number generator.\n            return_dict (`bool`): option for returning tuple rather than FlaxSdeVeOutput class\n\n        Returns:\n            [`FlaxSdeVeOutput`] or `tuple`: [`FlaxSdeVeOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if state.timesteps is None:\n            raise ValueError(\n                \"`state.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        timestep = timestep * jnp.ones(\n            sample.shape[0],\n        )\n        timesteps = (timestep * (len(state.timesteps) - 1)).long()\n\n        sigma = state.discrete_sigmas[timesteps]\n        adjacent_sigma = self.get_adjacent_sigma(state, timesteps, timestep)\n        drift = jnp.zeros_like(sample)\n        diffusion = (sigma**2 - adjacent_sigma**2) ** 0.5\n\n        # equation 6 in the paper: the model_output modeled by the network is grad_x log pt(x)\n        # also equation 47 shows the analog from SDE models to ancestral sampling methods\n        diffusion = diffusion.flatten()\n        diffusion = broadcast_to_shape_from_left(diffusion, sample.shape)\n        drift = drift - diffusion**2 * model_output\n\n        #  equation 6: sample noise for the diffusion term of\n        key = random.split(key, num=1)\n        noise = random.normal(key=key, shape=sample.shape)\n        prev_sample_mean = sample - drift  # subtract because `dt` is a small negative timestep\n        # TODO is the variable diffusion the correct scaling term for the noise?\n        prev_sample = prev_sample_mean + diffusion * noise  # add impact of diffusion field g\n\n        if not return_dict:\n            return (prev_sample, prev_sample_mean, state)\n\n        return FlaxSdeVeOutput(prev_sample=prev_sample, prev_sample_mean=prev_sample_mean, state=state)\n\n    def step_correct(\n        self,\n        state: ScoreSdeVeSchedulerState,\n        model_output: jnp.ndarray,\n        sample: jnp.ndarray,\n        key: random.KeyArray,\n        return_dict: bool = True,\n    ) -> Union[FlaxSdeVeOutput, Tuple]:\n        \"\"\"\n        Correct the predicted sample based on the output model_output of the network. This is often run repeatedly\n        after making the prediction for the previous timestep.\n\n        Args:\n            state (`ScoreSdeVeSchedulerState`): the `FlaxScoreSdeVeScheduler` state data class instance.\n            model_output (`jnp.ndarray`): direct output from learned diffusion model.\n            sample (`jnp.ndarray`):\n                current instance of sample being created by diffusion process.\n            generator: random number generator.\n            return_dict (`bool`): option for returning tuple rather than FlaxSdeVeOutput class\n\n        Returns:\n            [`FlaxSdeVeOutput`] or `tuple`: [`FlaxSdeVeOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        if state.timesteps is None:\n            raise ValueError(\n                \"`state.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        # For small batch sizes, the paper \"suggest replacing norm(z) with sqrt(d), where d is the dim. of z\"\n        # sample noise for correction\n        key = random.split(key, num=1)\n        noise = random.normal(key=key, shape=sample.shape)\n\n        # compute step size from the model_output, the noise, and the snr\n        grad_norm = jnp.linalg.norm(model_output)\n        noise_norm = jnp.linalg.norm(noise)\n        step_size = (self.config.snr * noise_norm / grad_norm) ** 2 * 2\n        step_size = step_size * jnp.ones(sample.shape[0])\n\n        # compute corrected sample: model_output term and noise term\n        step_size = step_size.flatten()\n        step_size = broadcast_to_shape_from_left(step_size, sample.shape)\n        prev_sample_mean = sample + step_size * model_output\n        prev_sample = prev_sample_mean + ((step_size * 2) ** 0.5) * noise\n\n        if not return_dict:\n            return (prev_sample, state)\n\n        return FlaxSdeVeOutput(prev_sample=prev_sample, state=state)\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_sde_vp.py",
    "content": "# Copyright 2023 Google Brain and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: This file is strongly influenced by https://github.com/yang-song/score_sde_pytorch\n\nimport math\nfrom typing import Union\n\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import randn_tensor\nfrom .scheduling_utils import SchedulerMixin\n\n\nclass ScoreSdeVpScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    The variance preserving stochastic differential equation (SDE) scheduler.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more information, see the original paper: https://arxiv.org/abs/2011.13456\n\n    UNDER CONSTRUCTION\n\n    \"\"\"\n\n    order = 1\n\n    @register_to_config\n    def __init__(self, num_train_timesteps=2000, beta_min=0.1, beta_max=20, sampling_eps=1e-3):\n        self.sigmas = None\n        self.discrete_sigmas = None\n        self.timesteps = None\n\n    def set_timesteps(self, num_inference_steps, device: Union[str, torch.device] = None):\n        self.timesteps = torch.linspace(1, self.config.sampling_eps, num_inference_steps, device=device)\n\n    def step_pred(self, score, x, t, generator=None):\n        if self.timesteps is None:\n            raise ValueError(\n                \"`self.timesteps` is not set, you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        # TODO(Patrick) better comments + non-PyTorch\n        # postprocess model score\n        log_mean_coeff = (\n            -0.25 * t**2 * (self.config.beta_max - self.config.beta_min) - 0.5 * t * self.config.beta_min\n        )\n        std = torch.sqrt(1.0 - torch.exp(2.0 * log_mean_coeff))\n        std = std.flatten()\n        while len(std.shape) < len(score.shape):\n            std = std.unsqueeze(-1)\n        score = -score / std\n\n        # compute\n        dt = -1.0 / len(self.timesteps)\n\n        beta_t = self.config.beta_min + t * (self.config.beta_max - self.config.beta_min)\n        beta_t = beta_t.flatten()\n        while len(beta_t.shape) < len(x.shape):\n            beta_t = beta_t.unsqueeze(-1)\n        drift = -0.5 * beta_t * x\n\n        diffusion = torch.sqrt(beta_t)\n        drift = drift - diffusion**2 * score\n        x_mean = x + drift * dt\n\n        # add noise\n        noise = randn_tensor(x.shape, layout=x.layout, generator=generator, device=x.device, dtype=x.dtype)\n        x = x_mean + diffusion * math.sqrt(-dt) * noise\n\n        return x, x_mean\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_unclip.py",
    "content": "# Copyright 2023 Kakao Brain and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport math\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput, randn_tensor\nfrom .scheduling_utils import SchedulerMixin\n\n\n@dataclass\n# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->UnCLIP\nclass UnCLIPSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n        pred_original_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            The predicted denoised sample (x_{0}) based on the model output from the current timestep.\n            `pred_original_sample` can be used to preview progress or for guidance.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n    pred_original_sample: Optional[torch.FloatTensor] = None\n\n\n# Copied from diffusers.schedulers.scheduling_ddpm.betas_for_alpha_bar\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass UnCLIPScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    NOTE: do not use this scheduler. The DDPM scheduler has been updated to support the changes made here. This\n    scheduler will be removed and replaced with DDPM.\n\n    This is a modified DDPM Scheduler specifically for the karlo unCLIP model.\n\n    This scheduler has some minor variations in how it calculates the learned range variance and dynamically\n    re-calculates betas based off the timesteps it is skipping.\n\n    The scheduler also uses a slightly different step ratio when computing timesteps to use for inference.\n\n    See [`~DDPMScheduler`] for more information on DDPM scheduling\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        variance_type (`str`):\n            options to clip the variance used when adding noise to the denoised sample. Choose from `fixed_small_log`\n            or `learned_range`.\n        clip_sample (`bool`, default `True`):\n            option to clip predicted sample between `-clip_sample_range` and `clip_sample_range` for numerical\n            stability.\n        clip_sample_range (`float`, default `1.0`):\n            The range to clip the sample between. See `clip_sample`.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion process)\n            or `sample` (directly predicting the noisy sample`)\n    \"\"\"\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        variance_type: str = \"fixed_small_log\",\n        clip_sample: bool = True,\n        clip_sample_range: Optional[float] = 1.0,\n        prediction_type: str = \"epsilon\",\n        beta_schedule: str = \"squaredcos_cap_v2\",\n    ):\n        if beta_schedule != \"squaredcos_cap_v2\":\n            raise ValueError(\"UnCLIPScheduler only supports `beta_schedule`: 'squaredcos_cap_v2'\")\n\n        self.betas = betas_for_alpha_bar(num_train_timesteps)\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        self.one = torch.tensor(1.0)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        # setable values\n        self.num_inference_steps = None\n        self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy())\n\n        self.variance_type = variance_type\n\n    def scale_model_input(self, sample: torch.FloatTensor, timestep: Optional[int] = None) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n            timestep (`int`, optional): current timestep\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Note that this scheduler uses a slightly different step ratio than the other diffusers schedulers. The\n        different step ratio is to mimic the original karlo implementation and does not affect the quality or accuracy\n        of the results.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n        step_ratio = (self.config.num_train_timesteps - 1) / (self.num_inference_steps - 1)\n        timesteps = (np.arange(0, num_inference_steps) * step_ratio).round()[::-1].copy().astype(np.int64)\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n    def _get_variance(self, t, prev_timestep=None, predicted_variance=None, variance_type=None):\n        if prev_timestep is None:\n            prev_timestep = t - 1\n\n        alpha_prod_t = self.alphas_cumprod[t]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.one\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        if prev_timestep == t - 1:\n            beta = self.betas[t]\n        else:\n            beta = 1 - alpha_prod_t / alpha_prod_t_prev\n\n        # For t > 0, compute predicted variance βt (see formula (6) and (7) from https://arxiv.org/pdf/2006.11239.pdf)\n        # and sample from it to get previous sample\n        # x_{t-1} ~ N(pred_prev_sample, variance) == add variance to pred_sample\n        variance = beta_prod_t_prev / beta_prod_t * beta\n\n        if variance_type is None:\n            variance_type = self.config.variance_type\n\n        # hacks - were probably added for training stability\n        if variance_type == \"fixed_small_log\":\n            variance = torch.log(torch.clamp(variance, min=1e-20))\n            variance = torch.exp(0.5 * variance)\n        elif variance_type == \"learned_range\":\n            # NOTE difference with DDPM scheduler\n            min_log = variance.log()\n            max_log = beta.log()\n\n            frac = (predicted_variance + 1) / 2\n            variance = frac * max_log + (1 - frac) * min_log\n\n        return variance\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        prev_timestep: Optional[int] = None,\n        generator=None,\n        return_dict: bool = True,\n    ) -> Union[UnCLIPSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion\n        process from the learned model outputs (most often the predicted noise).\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            prev_timestep (`int`, *optional*): The previous timestep to predict the previous sample at.\n                Used to dynamically compute beta. If not given, `t-1` is used and the pre-computed beta is used.\n            generator: random number generator.\n            return_dict (`bool`): option for returning tuple rather than UnCLIPSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.UnCLIPSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.UnCLIPSchedulerOutput`] if `return_dict` is True, otherwise a `tuple`. When\n            returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n        t = timestep\n\n        if model_output.shape[1] == sample.shape[1] * 2 and self.variance_type == \"learned_range\":\n            model_output, predicted_variance = torch.split(model_output, sample.shape[1], dim=1)\n        else:\n            predicted_variance = None\n\n        # 1. compute alphas, betas\n        if prev_timestep is None:\n            prev_timestep = t - 1\n\n        alpha_prod_t = self.alphas_cumprod[t]\n        alpha_prod_t_prev = self.alphas_cumprod[prev_timestep] if prev_timestep >= 0 else self.one\n        beta_prod_t = 1 - alpha_prod_t\n        beta_prod_t_prev = 1 - alpha_prod_t_prev\n\n        if prev_timestep == t - 1:\n            beta = self.betas[t]\n            alpha = self.alphas[t]\n        else:\n            beta = 1 - alpha_prod_t / alpha_prod_t_prev\n            alpha = 1 - beta\n\n        # 2. compute predicted original sample from predicted noise also called\n        # \"predicted x_0\" of formula (15) from https://arxiv.org/pdf/2006.11239.pdf\n        if self.config.prediction_type == \"epsilon\":\n            pred_original_sample = (sample - beta_prod_t ** (0.5) * model_output) / alpha_prod_t ** (0.5)\n        elif self.config.prediction_type == \"sample\":\n            pred_original_sample = model_output\n        else:\n            raise ValueError(\n                f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon` or `sample`\"\n                \" for the UnCLIPScheduler.\"\n            )\n\n        # 3. Clip \"predicted x_0\"\n        if self.config.clip_sample:\n            pred_original_sample = torch.clamp(\n                pred_original_sample, -self.config.clip_sample_range, self.config.clip_sample_range\n            )\n\n        # 4. Compute coefficients for pred_original_sample x_0 and current sample x_t\n        # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf\n        pred_original_sample_coeff = (alpha_prod_t_prev ** (0.5) * beta) / beta_prod_t\n        current_sample_coeff = alpha ** (0.5) * beta_prod_t_prev / beta_prod_t\n\n        # 5. Compute predicted previous sample µ_t\n        # See formula (7) from https://arxiv.org/pdf/2006.11239.pdf\n        pred_prev_sample = pred_original_sample_coeff * pred_original_sample + current_sample_coeff * sample\n\n        # 6. Add noise\n        variance = 0\n        if t > 0:\n            variance_noise = randn_tensor(\n                model_output.shape, dtype=model_output.dtype, generator=generator, device=model_output.device\n            )\n\n            variance = self._get_variance(\n                t,\n                predicted_variance=predicted_variance,\n                prev_timestep=prev_timestep,\n            )\n\n            if self.variance_type == \"fixed_small_log\":\n                variance = variance\n            elif self.variance_type == \"learned_range\":\n                variance = (0.5 * variance).exp()\n            else:\n                raise ValueError(\n                    f\"variance_type given as {self.variance_type} must be one of `fixed_small_log` or `learned_range`\"\n                    \" for the UnCLIPScheduler.\"\n                )\n\n            variance = variance * variance_noise\n\n        pred_prev_sample = pred_prev_sample + variance\n\n        if not return_dict:\n            return (pred_prev_sample,)\n\n        return UnCLIPSchedulerOutput(prev_sample=pred_prev_sample, pred_original_sample=pred_original_sample)\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_unipc_multistep.py",
    "content": "# Copyright 2023 TSAIL Team and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# DISCLAIMER: check https://arxiv.org/abs/2302.04867 and https://github.com/wl-zhao/UniPC for more info\n# The codebase is modified based on https://github.com/huggingface/diffusers/blob/main/src/diffusers/schedulers/scheduling_dpmsolver_multistep.py\n\nimport math\nfrom typing import List, Optional, Tuple, Union\n\nimport numpy as np\nimport torch\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom .scheduling_utils import KarrasDiffusionSchedulers, SchedulerMixin, SchedulerOutput\n\n\ndef betas_for_alpha_bar(num_diffusion_timesteps, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`np.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return torch.tensor(betas, dtype=torch.float32)\n\n\nclass UniPCMultistepScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    UniPC is a training-free framework designed for the fast sampling of diffusion models, which consists of a\n    corrector (UniC) and a predictor (UniP) that share a unified analytical form and support arbitrary orders. UniPC is\n    by desinged model-agnostic, supporting pixel-space/latent-space DPMs on unconditional/conditional sampling. It can\n    also be applied to both noise prediction model and data prediction model. The corrector UniC can be also applied\n    after any off-the-shelf solvers to increase the order of accuracy.\n\n    For more details, see the original paper: https://arxiv.org/abs/2302.04867\n\n    Currently, we support the multistep UniPC for both noise prediction models and data prediction models. We recommend\n    to use `solver_order=2` for guided sampling, and `solver_order=3` for unconditional sampling.\n\n    We also support the \"dynamic thresholding\" method in Imagen (https://arxiv.org/abs/2205.11487). For pixel-space\n    diffusion models, you can set both `predict_x0=True` and `thresholding=True` to use the dynamic thresholding. Note\n    that the thresholding method is unsuitable for latent-space diffusion models (such as stable-diffusion).\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    Args:\n        num_train_timesteps (`int`): number of diffusion steps used to train the model.\n        beta_start (`float`): the starting `beta` value of inference.\n        beta_end (`float`): the final `beta` value.\n        beta_schedule (`str`):\n            the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from\n            `linear`, `scaled_linear`, or `squaredcos_cap_v2`.\n        trained_betas (`np.ndarray`, optional):\n            option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.\n        solver_order (`int`, default `2`):\n            the order of UniPC, also the p in UniPC-p; can be any positive integer. Note that the effective order of\n            accuracy is `solver_order + 1` due to the UniC. We recommend to use `solver_order=2` for guided sampling,\n            and `solver_order=3` for unconditional sampling.\n        prediction_type (`str`, default `epsilon`, optional):\n            prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion\n            process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4\n            https://imagen.research.google/video/paper.pdf)\n        thresholding (`bool`, default `False`):\n            whether to use the \"dynamic thresholding\" method (introduced by Imagen, https://arxiv.org/abs/2205.11487).\n            For pixel-space diffusion models, you can set both `predict_x0=True` and `thresholding=True` to use the\n            dynamic thresholding. Note that the thresholding method is unsuitable for latent-space diffusion models\n            (such as stable-diffusion).\n        dynamic_thresholding_ratio (`float`, default `0.995`):\n            the ratio for the dynamic thresholding method. Default is `0.995`, the same as Imagen\n            (https://arxiv.org/abs/2205.11487).\n        sample_max_value (`float`, default `1.0`):\n            the threshold value for dynamic thresholding. Valid only when `thresholding=True` and `predict_x0=True`.\n        predict_x0 (`bool`, default `True`):\n            whether to use the updating algrithm on the predicted x0. See https://arxiv.org/abs/2211.01095 for details\n        solver_type (`str`, default `bh2`):\n            the solver type of UniPC. We recommend use `bh1` for unconditional sampling when steps < 10, and use `bh2`\n            otherwise.\n        lower_order_final (`bool`, default `True`):\n            whether to use lower-order solvers in the final steps. Only valid for < 15 inference steps. We empirically\n            find this trick can stabilize the sampling of DPM-Solver for steps < 15, especially for steps <= 10.\n        disable_corrector (`list`, default `[]`):\n            decide which step to disable the corrector. For large guidance scale, the misalignment between the\n            `epsilon_theta(x_t, c)`and `epsilon_theta(x_t^c, c)` might influence the convergence. This can be mitigated\n            by disable the corrector at the first few steps (e.g., disable_corrector=[0])\n        solver_p (`SchedulerMixin`, default `None`):\n            can be any other scheduler. If specified, the algorithm will become solver_p + UniC.\n    \"\"\"\n\n    _compatibles = [e.name for e in KarrasDiffusionSchedulers]\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_train_timesteps: int = 1000,\n        beta_start: float = 0.0001,\n        beta_end: float = 0.02,\n        beta_schedule: str = \"linear\",\n        trained_betas: Optional[Union[np.ndarray, List[float]]] = None,\n        solver_order: int = 2,\n        prediction_type: str = \"epsilon\",\n        thresholding: bool = False,\n        dynamic_thresholding_ratio: float = 0.995,\n        sample_max_value: float = 1.0,\n        predict_x0: bool = True,\n        solver_type: str = \"bh2\",\n        lower_order_final: bool = True,\n        disable_corrector: List[int] = [],\n        solver_p: SchedulerMixin = None,\n    ):\n        if trained_betas is not None:\n            self.betas = torch.tensor(trained_betas, dtype=torch.float32)\n        elif beta_schedule == \"linear\":\n            self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)\n        elif beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            self.betas = (\n                torch.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype=torch.float32) ** 2\n            )\n        elif beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            self.betas = betas_for_alpha_bar(num_train_timesteps)\n        else:\n            raise NotImplementedError(f\"{beta_schedule} does is not implemented for {self.__class__}\")\n\n        self.alphas = 1.0 - self.betas\n        self.alphas_cumprod = torch.cumprod(self.alphas, dim=0)\n        # Currently we only support VP-type noise schedule\n        self.alpha_t = torch.sqrt(self.alphas_cumprod)\n        self.sigma_t = torch.sqrt(1 - self.alphas_cumprod)\n        self.lambda_t = torch.log(self.alpha_t) - torch.log(self.sigma_t)\n\n        # standard deviation of the initial noise distribution\n        self.init_noise_sigma = 1.0\n\n        if solver_type not in [\"bh1\", \"bh2\"]:\n            if solver_type in [\"midpoint\", \"heun\", \"logrho\"]:\n                self.register_to_config(solver_type=\"bh1\")\n            else:\n                raise NotImplementedError(f\"{solver_type} does is not implemented for {self.__class__}\")\n\n        self.predict_x0 = predict_x0\n        # setable values\n        self.num_inference_steps = None\n        timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=np.float32)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps)\n        self.model_outputs = [None] * solver_order\n        self.timestep_list = [None] * solver_order\n        self.lower_order_nums = 0\n        self.disable_corrector = disable_corrector\n        self.solver_p = solver_p\n        self.last_sample = None\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n            device (`str` or `torch.device`, optional):\n                the device to which the timesteps should be moved to. If `None`, the timesteps are not moved.\n        \"\"\"\n        timesteps = (\n            np.linspace(0, self.config.num_train_timesteps - 1, num_inference_steps + 1)\n            .round()[::-1][:-1]\n            .copy()\n            .astype(np.int64)\n        )\n\n        # when num_inference_steps == num_train_timesteps, we can end up with\n        # duplicates in timesteps.\n        _, unique_indices = np.unique(timesteps, return_index=True)\n        timesteps = timesteps[np.sort(unique_indices)]\n\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n        self.num_inference_steps = len(timesteps)\n\n        self.model_outputs = [\n            None,\n        ] * self.config.solver_order\n        self.lower_order_nums = 0\n        self.last_sample = None\n        if self.solver_p:\n            self.solver_p.set_timesteps(self.num_inference_steps, device=device)\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler._threshold_sample\n    def _threshold_sample(self, sample: torch.FloatTensor) -> torch.FloatTensor:\n        \"\"\"\n        \"Dynamic thresholding: At each sampling step we set s to a certain percentile absolute pixel value in xt0 (the\n        prediction of x_0 at timestep t), and if s > 1, then we threshold xt0 to the range [-s, s] and then divide by\n        s. Dynamic thresholding pushes saturated pixels (those near -1 and 1) inwards, thereby actively preventing\n        pixels from saturation at each step. We find that dynamic thresholding results in significantly better\n        photorealism as well as better image-text alignment, especially when using very large guidance weights.\"\n\n        https://arxiv.org/abs/2205.11487\n        \"\"\"\n        dtype = sample.dtype\n        batch_size, channels, height, width = sample.shape\n\n        if dtype not in (torch.float32, torch.float64):\n            sample = sample.float()  # upcast for quantile calculation, and clamp not implemented for cpu half\n\n        # Flatten sample for doing quantile calculation along each image\n        sample = sample.reshape(batch_size, channels * height * width)\n\n        abs_sample = sample.abs()  # \"a certain percentile absolute pixel value\"\n\n        s = torch.quantile(abs_sample, self.config.dynamic_thresholding_ratio, dim=1)\n        s = torch.clamp(\n            s, min=1, max=self.config.sample_max_value\n        )  # When clamped to min=1, equivalent to standard clipping to [-1, 1]\n\n        s = s.unsqueeze(1)  # (batch_size, 1) because clamp will broadcast along dim=0\n        sample = torch.clamp(sample, -s, s) / s  # \"we threshold xt0 to the range [-s, s] and then divide by s\"\n\n        sample = sample.reshape(batch_size, channels, height, width)\n        sample = sample.to(dtype)\n\n        return sample\n\n    def convert_model_output(\n        self, model_output: torch.FloatTensor, timestep: int, sample: torch.FloatTensor\n    ) -> torch.FloatTensor:\n        r\"\"\"\n        Convert the model output to the corresponding type that the algorithm PC needs.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n\n        Returns:\n            `torch.FloatTensor`: the converted model output.\n        \"\"\"\n        if self.predict_x0:\n            if self.config.prediction_type == \"epsilon\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = (sample - sigma_t * model_output) / alpha_t\n            elif self.config.prediction_type == \"sample\":\n                x0_pred = model_output\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                x0_pred = alpha_t * sample - sigma_t * model_output\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the UniPCMultistepScheduler.\"\n                )\n\n            if self.config.thresholding:\n                x0_pred = self._threshold_sample(x0_pred)\n\n            return x0_pred\n        else:\n            if self.config.prediction_type == \"epsilon\":\n                return model_output\n            elif self.config.prediction_type == \"sample\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = (sample - alpha_t * model_output) / sigma_t\n                return epsilon\n            elif self.config.prediction_type == \"v_prediction\":\n                alpha_t, sigma_t = self.alpha_t[timestep], self.sigma_t[timestep]\n                epsilon = alpha_t * model_output + sigma_t * sample\n                return epsilon\n            else:\n                raise ValueError(\n                    f\"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, `sample`, or\"\n                    \" `v_prediction` for the UniPCMultistepScheduler.\"\n                )\n\n    def multistep_uni_p_bh_update(\n        self,\n        model_output: torch.FloatTensor,\n        prev_timestep: int,\n        sample: torch.FloatTensor,\n        order: int,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the UniP (B(h) version). Alternatively, `self.solver_p` is used if is specified.\n\n        Args:\n            model_output (`torch.FloatTensor`):\n                direct outputs from learned diffusion model at the current timestep.\n            prev_timestep (`int`): previous discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            order (`int`): the order of UniP at this step, also the p in UniPC-p.\n\n        Returns:\n            `torch.FloatTensor`: the sample tensor at the previous timestep.\n        \"\"\"\n        timestep_list = self.timestep_list\n        model_output_list = self.model_outputs\n\n        s0, t = self.timestep_list[-1], prev_timestep\n        m0 = model_output_list[-1]\n        x = sample\n\n        if self.solver_p:\n            x_t = self.solver_p.step(model_output, s0, x).prev_sample\n            return x_t\n\n        lambda_t, lambda_s0 = self.lambda_t[t], self.lambda_t[s0]\n        alpha_t, alpha_s0 = self.alpha_t[t], self.alpha_t[s0]\n        sigma_t, sigma_s0 = self.sigma_t[t], self.sigma_t[s0]\n\n        h = lambda_t - lambda_s0\n        device = sample.device\n\n        rks = []\n        D1s = []\n        for i in range(1, order):\n            si = timestep_list[-(i + 1)]\n            mi = model_output_list[-(i + 1)]\n            lambda_si = self.lambda_t[si]\n            rk = (lambda_si - lambda_s0) / h\n            rks.append(rk)\n            D1s.append((mi - m0) / rk)\n\n        rks.append(1.0)\n        rks = torch.tensor(rks, device=device)\n\n        R = []\n        b = []\n\n        hh = -h if self.predict_x0 else h\n        h_phi_1 = torch.expm1(hh)  # h\\phi_1(h) = e^h - 1\n        h_phi_k = h_phi_1 / hh - 1\n\n        factorial_i = 1\n\n        if self.config.solver_type == \"bh1\":\n            B_h = hh\n        elif self.config.solver_type == \"bh2\":\n            B_h = torch.expm1(hh)\n        else:\n            raise NotImplementedError()\n\n        for i in range(1, order + 1):\n            R.append(torch.pow(rks, i - 1))\n            b.append(h_phi_k * factorial_i / B_h)\n            factorial_i *= i + 1\n            h_phi_k = h_phi_k / hh - 1 / factorial_i\n\n        R = torch.stack(R)\n        b = torch.tensor(b, device=device)\n\n        if len(D1s) > 0:\n            D1s = torch.stack(D1s, dim=1)  # (B, K)\n            # for order 2, we use a simplified version\n            if order == 2:\n                rhos_p = torch.tensor([0.5], dtype=x.dtype, device=device)\n            else:\n                rhos_p = torch.linalg.solve(R[:-1, :-1], b[:-1])\n        else:\n            D1s = None\n\n        if self.predict_x0:\n            x_t_ = sigma_t / sigma_s0 * x - alpha_t * h_phi_1 * m0\n            if D1s is not None:\n                pred_res = torch.einsum(\"k,bkchw->bchw\", rhos_p, D1s)\n            else:\n                pred_res = 0\n            x_t = x_t_ - alpha_t * B_h * pred_res\n        else:\n            x_t_ = alpha_t / alpha_s0 * x - sigma_t * h_phi_1 * m0\n            if D1s is not None:\n                pred_res = torch.einsum(\"k,bkchw->bchw\", rhos_p, D1s)\n            else:\n                pred_res = 0\n            x_t = x_t_ - sigma_t * B_h * pred_res\n\n        x_t = x_t.to(x.dtype)\n        return x_t\n\n    def multistep_uni_c_bh_update(\n        self,\n        this_model_output: torch.FloatTensor,\n        this_timestep: int,\n        last_sample: torch.FloatTensor,\n        this_sample: torch.FloatTensor,\n        order: int,\n    ) -> torch.FloatTensor:\n        \"\"\"\n        One step for the UniC (B(h) version).\n\n        Args:\n            this_model_output (`torch.FloatTensor`): the model outputs at `x_t`\n            this_timestep (`int`): the current timestep `t`\n            last_sample (`torch.FloatTensor`): the generated sample before the last predictor: `x_{t-1}`\n            this_sample (`torch.FloatTensor`): the generated sample after the last predictor: `x_{t}`\n            order (`int`): the `p` of UniC-p at this step. Note that the effective order of accuracy\n                should be order + 1\n\n        Returns:\n            `torch.FloatTensor`: the corrected sample tensor at the current timestep.\n        \"\"\"\n        timestep_list = self.timestep_list\n        model_output_list = self.model_outputs\n\n        s0, t = timestep_list[-1], this_timestep\n        m0 = model_output_list[-1]\n        x = last_sample\n        x_t = this_sample\n        model_t = this_model_output\n\n        lambda_t, lambda_s0 = self.lambda_t[t], self.lambda_t[s0]\n        alpha_t, alpha_s0 = self.alpha_t[t], self.alpha_t[s0]\n        sigma_t, sigma_s0 = self.sigma_t[t], self.sigma_t[s0]\n\n        h = lambda_t - lambda_s0\n        device = this_sample.device\n\n        rks = []\n        D1s = []\n        for i in range(1, order):\n            si = timestep_list[-(i + 1)]\n            mi = model_output_list[-(i + 1)]\n            lambda_si = self.lambda_t[si]\n            rk = (lambda_si - lambda_s0) / h\n            rks.append(rk)\n            D1s.append((mi - m0) / rk)\n\n        rks.append(1.0)\n        rks = torch.tensor(rks, device=device)\n\n        R = []\n        b = []\n\n        hh = -h if self.predict_x0 else h\n        h_phi_1 = torch.expm1(hh)  # h\\phi_1(h) = e^h - 1\n        h_phi_k = h_phi_1 / hh - 1\n\n        factorial_i = 1\n\n        if self.config.solver_type == \"bh1\":\n            B_h = hh\n        elif self.config.solver_type == \"bh2\":\n            B_h = torch.expm1(hh)\n        else:\n            raise NotImplementedError()\n\n        for i in range(1, order + 1):\n            R.append(torch.pow(rks, i - 1))\n            b.append(h_phi_k * factorial_i / B_h)\n            factorial_i *= i + 1\n            h_phi_k = h_phi_k / hh - 1 / factorial_i\n\n        R = torch.stack(R)\n        b = torch.tensor(b, device=device)\n\n        if len(D1s) > 0:\n            D1s = torch.stack(D1s, dim=1)\n        else:\n            D1s = None\n\n        # for order 1, we use a simplified version\n        if order == 1:\n            rhos_c = torch.tensor([0.5], dtype=x.dtype, device=device)\n        else:\n            rhos_c = torch.linalg.solve(R, b)\n\n        if self.predict_x0:\n            x_t_ = sigma_t / sigma_s0 * x - alpha_t * h_phi_1 * m0\n            if D1s is not None:\n                corr_res = torch.einsum(\"k,bkchw->bchw\", rhos_c[:-1], D1s)\n            else:\n                corr_res = 0\n            D1_t = model_t - m0\n            x_t = x_t_ - alpha_t * B_h * (corr_res + rhos_c[-1] * D1_t)\n        else:\n            x_t_ = alpha_t / alpha_s0 * x - sigma_t * h_phi_1 * m0\n            if D1s is not None:\n                corr_res = torch.einsum(\"k,bkchw->bchw\", rhos_c[:-1], D1s)\n            else:\n                corr_res = 0\n            D1_t = model_t - m0\n            x_t = x_t_ - sigma_t * B_h * (corr_res + rhos_c[-1] * D1_t)\n        x_t = x_t.to(x.dtype)\n        return x_t\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: int,\n        sample: torch.FloatTensor,\n        return_dict: bool = True,\n    ) -> Union[SchedulerOutput, Tuple]:\n        \"\"\"\n        Step function propagating the sample with the multistep UniPC.\n\n        Args:\n            model_output (`torch.FloatTensor`): direct output from learned diffusion model.\n            timestep (`int`): current discrete timestep in the diffusion chain.\n            sample (`torch.FloatTensor`):\n                current instance of sample being created by diffusion process.\n            return_dict (`bool`): option for returning tuple rather than SchedulerOutput class\n\n        Returns:\n            [`~scheduling_utils.SchedulerOutput`] or `tuple`: [`~scheduling_utils.SchedulerOutput`] if `return_dict` is\n            True, otherwise a `tuple`. When returning a tuple, the first element is the sample tensor.\n\n        \"\"\"\n\n        if self.num_inference_steps is None:\n            raise ValueError(\n                \"Number of inference steps is 'None', you need to run 'set_timesteps' after creating the scheduler\"\n            )\n\n        if isinstance(timestep, torch.Tensor):\n            timestep = timestep.to(self.timesteps.device)\n        step_index = (self.timesteps == timestep).nonzero()\n        if len(step_index) == 0:\n            step_index = len(self.timesteps) - 1\n        else:\n            step_index = step_index.item()\n\n        use_corrector = (\n            step_index > 0 and step_index - 1 not in self.disable_corrector and self.last_sample is not None\n        )\n\n        model_output_convert = self.convert_model_output(model_output, timestep, sample)\n        if use_corrector:\n            sample = self.multistep_uni_c_bh_update(\n                this_model_output=model_output_convert,\n                this_timestep=timestep,\n                last_sample=self.last_sample,\n                this_sample=sample,\n                order=self.this_order,\n            )\n\n        # now prepare to run the predictor\n        prev_timestep = 0 if step_index == len(self.timesteps) - 1 else self.timesteps[step_index + 1]\n\n        for i in range(self.config.solver_order - 1):\n            self.model_outputs[i] = self.model_outputs[i + 1]\n            self.timestep_list[i] = self.timestep_list[i + 1]\n\n        self.model_outputs[-1] = model_output_convert\n        self.timestep_list[-1] = timestep\n\n        if self.config.lower_order_final:\n            this_order = min(self.config.solver_order, len(self.timesteps) - step_index)\n        else:\n            this_order = self.config.solver_order\n\n        self.this_order = min(this_order, self.lower_order_nums + 1)  # warmup for multistep\n        assert self.this_order > 0\n\n        self.last_sample = sample\n        prev_sample = self.multistep_uni_p_bh_update(\n            model_output=model_output,  # pass the original non-converted model output, in case solver-p is used\n            prev_timestep=prev_timestep,\n            sample=sample,\n            order=self.this_order,\n        )\n\n        if self.lower_order_nums < self.config.solver_order:\n            self.lower_order_nums += 1\n\n        if not return_dict:\n            return (prev_sample,)\n\n        return SchedulerOutput(prev_sample=prev_sample)\n\n    def scale_model_input(self, sample: torch.FloatTensor, *args, **kwargs) -> torch.FloatTensor:\n        \"\"\"\n        Ensures interchangeability with schedulers that need to scale the denoising model input depending on the\n        current timestep.\n\n        Args:\n            sample (`torch.FloatTensor`): input sample\n\n        Returns:\n            `torch.FloatTensor`: scaled input sample\n        \"\"\"\n        return sample\n\n    # Copied from diffusers.schedulers.scheduling_ddpm.DDPMScheduler.add_noise\n    def add_noise(\n        self,\n        original_samples: torch.FloatTensor,\n        noise: torch.FloatTensor,\n        timesteps: torch.IntTensor,\n    ) -> torch.FloatTensor:\n        # Make sure alphas_cumprod and timestep have same device and dtype as original_samples\n        alphas_cumprod = self.alphas_cumprod.to(device=original_samples.device, dtype=original_samples.dtype)\n        timesteps = timesteps.to(original_samples.device)\n\n        sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n        sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n        while len(sqrt_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_alpha_prod = sqrt_alpha_prod.unsqueeze(-1)\n\n        sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n        sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n        while len(sqrt_one_minus_alpha_prod.shape) < len(original_samples.shape):\n            sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.unsqueeze(-1)\n\n        noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n        return noisy_samples\n\n    def __len__(self):\n        return self.config.num_train_timesteps\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_utils.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport importlib\nimport os\nfrom dataclasses import dataclass\nfrom enum import Enum\nfrom typing import Any, Dict, Optional, Union\n\nimport torch\n\nfrom ..utils import BaseOutput\n\n\nSCHEDULER_CONFIG_NAME = \"scheduler_config.json\"\n\n\n# NOTE: We make this type an enum because it simplifies usage in docs and prevents\n# circular imports when used for `_compatibles` within the schedulers module.\n# When it's used as a type in pipelines, it really is a Union because the actual\n# scheduler instance is passed in.\nclass KarrasDiffusionSchedulers(Enum):\n    DDIMScheduler = 1\n    DDPMScheduler = 2\n    PNDMScheduler = 3\n    LMSDiscreteScheduler = 4\n    EulerDiscreteScheduler = 5\n    HeunDiscreteScheduler = 6\n    EulerAncestralDiscreteScheduler = 7\n    DPMSolverMultistepScheduler = 8\n    DPMSolverSinglestepScheduler = 9\n    KDPM2DiscreteScheduler = 10\n    KDPM2AncestralDiscreteScheduler = 11\n    DEISMultistepScheduler = 12\n    UniPCMultistepScheduler = 13\n    DPMSolverSDEScheduler = 14\n\n\n@dataclass\nclass SchedulerOutput(BaseOutput):\n    \"\"\"\n    Base class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.FloatTensor` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n    \"\"\"\n\n    prev_sample: torch.FloatTensor\n\n\nclass SchedulerMixin:\n    \"\"\"\n    Mixin containing common functions for the schedulers.\n\n    Class attributes:\n        - **_compatibles** (`List[str]`) -- A list of classes that are compatible with the parent class, so that\n          `from_config` can be used from a class different than the one used to save the config (should be overridden\n          by parent class).\n    \"\"\"\n\n    config_name = SCHEDULER_CONFIG_NAME\n    _compatibles = []\n    has_compatibles = True\n\n    @classmethod\n    def from_pretrained(\n        cls,\n        pretrained_model_name_or_path: Dict[str, Any] = None,\n        subfolder: Optional[str] = None,\n        return_unused_kwargs=False,\n        **kwargs,\n    ):\n        r\"\"\"\n        Instantiate a Scheduler class from a pre-defined JSON configuration file inside a directory or Hub repo.\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike`, *optional*):\n                Can be either:\n\n                    - A string, the *model id* of a model repo on huggingface.co. Valid model ids should have an\n                      organization name, like `google/ddpm-celebahq-256`.\n                    - A path to a *directory* containing the schedluer configurations saved using\n                      [`~SchedulerMixin.save_pretrained`], e.g., `./my_model_directory/`.\n            subfolder (`str`, *optional*):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n            return_unused_kwargs (`bool`, *optional*, defaults to `False`):\n                Whether kwargs that are not consumed by the Python class should be returned or not.\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            output_loading_info(`bool`, *optional*, defaults to `False`):\n                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `transformers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n\n        <Tip>\n\n        Activate the special [\"offline-mode\"](https://huggingface.co/transformers/installation.html#offline-mode) to\n        use this method in a firewalled environment.\n\n        </Tip>\n\n        \"\"\"\n        config, kwargs, commit_hash = cls.load_config(\n            pretrained_model_name_or_path=pretrained_model_name_or_path,\n            subfolder=subfolder,\n            return_unused_kwargs=True,\n            return_commit_hash=True,\n            **kwargs,\n        )\n        return cls.from_config(config, return_unused_kwargs=return_unused_kwargs, **kwargs)\n\n    def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):\n        \"\"\"\n        Save a scheduler configuration object to the directory `save_directory`, so that it can be re-loaded using the\n        [`~SchedulerMixin.from_pretrained`] class method.\n\n        Args:\n            save_directory (`str` or `os.PathLike`):\n                Directory where the configuration JSON file will be saved (will be created if it does not exist).\n        \"\"\"\n        self.save_config(save_directory=save_directory, push_to_hub=push_to_hub, **kwargs)\n\n    @property\n    def compatibles(self):\n        \"\"\"\n        Returns all schedulers that are compatible with this scheduler\n\n        Returns:\n            `List[SchedulerMixin]`: List of compatible schedulers\n        \"\"\"\n        return self._get_compatibles()\n\n    @classmethod\n    def _get_compatibles(cls):\n        compatible_classes_str = list(set([cls.__name__] + cls._compatibles))\n        diffusers_library = importlib.import_module(__name__.split(\".\")[0])\n        compatible_classes = [\n            getattr(diffusers_library, c) for c in compatible_classes_str if hasattr(diffusers_library, c)\n        ]\n        return compatible_classes\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_utils_flax.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport importlib\nimport math\nimport os\nfrom dataclasses import dataclass\nfrom enum import Enum\nfrom typing import Any, Dict, Optional, Tuple, Union\n\nimport flax\nimport jax.numpy as jnp\n\nfrom ..utils import BaseOutput\n\n\nSCHEDULER_CONFIG_NAME = \"scheduler_config.json\"\n\n\n# NOTE: We make this type an enum because it simplifies usage in docs and prevents\n# circular imports when used for `_compatibles` within the schedulers module.\n# When it's used as a type in pipelines, it really is a Union because the actual\n# scheduler instance is passed in.\nclass FlaxKarrasDiffusionSchedulers(Enum):\n    FlaxDDIMScheduler = 1\n    FlaxDDPMScheduler = 2\n    FlaxPNDMScheduler = 3\n    FlaxLMSDiscreteScheduler = 4\n    FlaxDPMSolverMultistepScheduler = 5\n\n\n@dataclass\nclass FlaxSchedulerOutput(BaseOutput):\n    \"\"\"\n    Base class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`jnp.ndarray` of shape `(batch_size, num_channels, height, width)` for images):\n            Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n    \"\"\"\n\n    prev_sample: jnp.ndarray\n\n\nclass FlaxSchedulerMixin:\n    \"\"\"\n    Mixin containing common functions for the schedulers.\n\n    Class attributes:\n        - **_compatibles** (`List[str]`) -- A list of classes that are compatible with the parent class, so that\n          `from_config` can be used from a class different than the one used to save the config (should be overridden\n          by parent class).\n    \"\"\"\n\n    config_name = SCHEDULER_CONFIG_NAME\n    ignore_for_config = [\"dtype\"]\n    _compatibles = []\n    has_compatibles = True\n\n    @classmethod\n    def from_pretrained(\n        cls,\n        pretrained_model_name_or_path: Dict[str, Any] = None,\n        subfolder: Optional[str] = None,\n        return_unused_kwargs=False,\n        **kwargs,\n    ):\n        r\"\"\"\n        Instantiate a Scheduler class from a pre-defined JSON-file.\n\n        Parameters:\n            pretrained_model_name_or_path (`str` or `os.PathLike`, *optional*):\n                Can be either:\n\n                    - A string, the *model id* of a model repo on huggingface.co. Valid model ids should have an\n                      organization name, like `google/ddpm-celebahq-256`.\n                    - A path to a *directory* containing model weights saved using [`~SchedulerMixin.save_pretrained`],\n                      e.g., `./my_model_directory/`.\n            subfolder (`str`, *optional*):\n                In case the relevant files are located inside a subfolder of the model repo (either remote in\n                huggingface.co or downloaded locally), you can specify the folder name here.\n            return_unused_kwargs (`bool`, *optional*, defaults to `False`):\n                Whether kwargs that are not consumed by the Python class should be returned or not.\n\n            cache_dir (`Union[str, os.PathLike]`, *optional*):\n                Path to a directory in which a downloaded pretrained model configuration should be cached if the\n                standard cache should not be used.\n            force_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to force the (re-)download of the model weights and configuration files, overriding the\n                cached versions if they exist.\n            resume_download (`bool`, *optional*, defaults to `False`):\n                Whether or not to delete incompletely received files. Will attempt to resume the download if such a\n                file exists.\n            proxies (`Dict[str, str]`, *optional*):\n                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n                'http://hostname': 'foo.bar:4012'}`. The proxies are used on each request.\n            output_loading_info(`bool`, *optional*, defaults to `False`):\n                Whether or not to also return a dictionary containing missing keys, unexpected keys and error messages.\n            local_files_only(`bool`, *optional*, defaults to `False`):\n                Whether or not to only look at local files (i.e., do not try to download the model).\n            use_auth_token (`str` or *bool*, *optional*):\n                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n                when running `transformers-cli login` (stored in `~/.huggingface`).\n            revision (`str`, *optional*, defaults to `\"main\"`):\n                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n                identifier allowed by git.\n\n        <Tip>\n\n         It is required to be logged in (`huggingface-cli login`) when you want to use private or [gated\n         models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n        </Tip>\n\n        <Tip>\n\n        Activate the special [\"offline-mode\"](https://huggingface.co/transformers/installation.html#offline-mode) to\n        use this method in a firewalled environment.\n\n        </Tip>\n\n        \"\"\"\n        config, kwargs = cls.load_config(\n            pretrained_model_name_or_path=pretrained_model_name_or_path,\n            subfolder=subfolder,\n            return_unused_kwargs=True,\n            **kwargs,\n        )\n        scheduler, unused_kwargs = cls.from_config(config, return_unused_kwargs=True, **kwargs)\n\n        if hasattr(scheduler, \"create_state\") and getattr(scheduler, \"has_state\", False):\n            state = scheduler.create_state()\n\n        if return_unused_kwargs:\n            return scheduler, state, unused_kwargs\n\n        return scheduler, state\n\n    def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):\n        \"\"\"\n        Save a scheduler configuration object to the directory `save_directory`, so that it can be re-loaded using the\n        [`~FlaxSchedulerMixin.from_pretrained`] class method.\n\n        Args:\n            save_directory (`str` or `os.PathLike`):\n                Directory where the configuration JSON file will be saved (will be created if it does not exist).\n        \"\"\"\n        self.save_config(save_directory=save_directory, push_to_hub=push_to_hub, **kwargs)\n\n    @property\n    def compatibles(self):\n        \"\"\"\n        Returns all schedulers that are compatible with this scheduler\n\n        Returns:\n            `List[SchedulerMixin]`: List of compatible schedulers\n        \"\"\"\n        return self._get_compatibles()\n\n    @classmethod\n    def _get_compatibles(cls):\n        compatible_classes_str = list(set([cls.__name__] + cls._compatibles))\n        diffusers_library = importlib.import_module(__name__.split(\".\")[0])\n        compatible_classes = [\n            getattr(diffusers_library, c) for c in compatible_classes_str if hasattr(diffusers_library, c)\n        ]\n        return compatible_classes\n\n\ndef broadcast_to_shape_from_left(x: jnp.ndarray, shape: Tuple[int]) -> jnp.ndarray:\n    assert len(shape) >= x.ndim\n    return jnp.broadcast_to(x.reshape(x.shape + (1,) * (len(shape) - x.ndim)), shape)\n\n\ndef betas_for_alpha_bar(num_diffusion_timesteps: int, max_beta=0.999, dtype=jnp.float32) -> jnp.ndarray:\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function, which defines the cumulative product of\n    (1-beta) over time from t = [0,1].\n\n    Contains a function alpha_bar that takes an argument t and transforms it to the cumulative product of (1-beta) up\n    to that part of the diffusion process.\n\n\n    Args:\n        num_diffusion_timesteps (`int`): the number of betas to produce.\n        max_beta (`float`): the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n\n    Returns:\n        betas (`jnp.ndarray`): the betas used by the scheduler to step the model outputs\n    \"\"\"\n\n    def alpha_bar(time_step):\n        return math.cos((time_step + 0.008) / 1.008 * math.pi / 2) ** 2\n\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return jnp.array(betas, dtype=dtype)\n\n\n@flax.struct.dataclass\nclass CommonSchedulerState:\n    alphas: jnp.ndarray\n    betas: jnp.ndarray\n    alphas_cumprod: jnp.ndarray\n\n    @classmethod\n    def create(cls, scheduler):\n        config = scheduler.config\n\n        if config.trained_betas is not None:\n            betas = jnp.asarray(config.trained_betas, dtype=scheduler.dtype)\n        elif config.beta_schedule == \"linear\":\n            betas = jnp.linspace(config.beta_start, config.beta_end, config.num_train_timesteps, dtype=scheduler.dtype)\n        elif config.beta_schedule == \"scaled_linear\":\n            # this schedule is very specific to the latent diffusion model.\n            betas = (\n                jnp.linspace(\n                    config.beta_start**0.5, config.beta_end**0.5, config.num_train_timesteps, dtype=scheduler.dtype\n                )\n                ** 2\n            )\n        elif config.beta_schedule == \"squaredcos_cap_v2\":\n            # Glide cosine schedule\n            betas = betas_for_alpha_bar(config.num_train_timesteps, dtype=scheduler.dtype)\n        else:\n            raise NotImplementedError(\n                f\"beta_schedule {config.beta_schedule} is not implemented for scheduler {scheduler.__class__.__name__}\"\n            )\n\n        alphas = 1.0 - betas\n\n        alphas_cumprod = jnp.cumprod(alphas, axis=0)\n\n        return cls(\n            alphas=alphas,\n            betas=betas,\n            alphas_cumprod=alphas_cumprod,\n        )\n\n\ndef get_sqrt_alpha_prod(\n    state: CommonSchedulerState, original_samples: jnp.ndarray, noise: jnp.ndarray, timesteps: jnp.ndarray\n):\n    alphas_cumprod = state.alphas_cumprod\n\n    sqrt_alpha_prod = alphas_cumprod[timesteps] ** 0.5\n    sqrt_alpha_prod = sqrt_alpha_prod.flatten()\n    sqrt_alpha_prod = broadcast_to_shape_from_left(sqrt_alpha_prod, original_samples.shape)\n\n    sqrt_one_minus_alpha_prod = (1 - alphas_cumprod[timesteps]) ** 0.5\n    sqrt_one_minus_alpha_prod = sqrt_one_minus_alpha_prod.flatten()\n    sqrt_one_minus_alpha_prod = broadcast_to_shape_from_left(sqrt_one_minus_alpha_prod, original_samples.shape)\n\n    return sqrt_alpha_prod, sqrt_one_minus_alpha_prod\n\n\ndef add_noise_common(\n    state: CommonSchedulerState, original_samples: jnp.ndarray, noise: jnp.ndarray, timesteps: jnp.ndarray\n):\n    sqrt_alpha_prod, sqrt_one_minus_alpha_prod = get_sqrt_alpha_prod(state, original_samples, noise, timesteps)\n    noisy_samples = sqrt_alpha_prod * original_samples + sqrt_one_minus_alpha_prod * noise\n    return noisy_samples\n\n\ndef get_velocity_common(state: CommonSchedulerState, sample: jnp.ndarray, noise: jnp.ndarray, timesteps: jnp.ndarray):\n    sqrt_alpha_prod, sqrt_one_minus_alpha_prod = get_sqrt_alpha_prod(state, sample, noise, timesteps)\n    velocity = sqrt_alpha_prod * noise - sqrt_one_minus_alpha_prod * sample\n    return velocity\n"
  },
  {
    "path": "diffusers/schedulers/scheduling_vq_diffusion.py",
    "content": "# Copyright 2023 Microsoft and The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom dataclasses import dataclass\nfrom typing import Optional, Tuple, Union\n\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\n\nfrom ..configuration_utils import ConfigMixin, register_to_config\nfrom ..utils import BaseOutput\nfrom .scheduling_utils import SchedulerMixin\n\n\n@dataclass\nclass VQDiffusionSchedulerOutput(BaseOutput):\n    \"\"\"\n    Output class for the scheduler's step function output.\n\n    Args:\n        prev_sample (`torch.LongTensor` of shape `(batch size, num latent pixels)`):\n            Computed sample x_{t-1} of previous timestep. `prev_sample` should be used as next model input in the\n            denoising loop.\n    \"\"\"\n\n    prev_sample: torch.LongTensor\n\n\ndef index_to_log_onehot(x: torch.LongTensor, num_classes: int) -> torch.FloatTensor:\n    \"\"\"\n    Convert batch of vector of class indices into batch of log onehot vectors\n\n    Args:\n        x (`torch.LongTensor` of shape `(batch size, vector length)`):\n            Batch of class indices\n\n        num_classes (`int`):\n            number of classes to be used for the onehot vectors\n\n    Returns:\n        `torch.FloatTensor` of shape `(batch size, num classes, vector length)`:\n            Log onehot vectors\n    \"\"\"\n    x_onehot = F.one_hot(x, num_classes)\n    x_onehot = x_onehot.permute(0, 2, 1)\n    log_x = torch.log(x_onehot.float().clamp(min=1e-30))\n    return log_x\n\n\ndef gumbel_noised(logits: torch.FloatTensor, generator: Optional[torch.Generator]) -> torch.FloatTensor:\n    \"\"\"\n    Apply gumbel noise to `logits`\n    \"\"\"\n    uniform = torch.rand(logits.shape, device=logits.device, generator=generator)\n    gumbel_noise = -torch.log(-torch.log(uniform + 1e-30) + 1e-30)\n    noised = gumbel_noise + logits\n    return noised\n\n\ndef alpha_schedules(num_diffusion_timesteps: int, alpha_cum_start=0.99999, alpha_cum_end=0.000009):\n    \"\"\"\n    Cumulative and non-cumulative alpha schedules.\n\n    See section 4.1.\n    \"\"\"\n    att = (\n        np.arange(0, num_diffusion_timesteps) / (num_diffusion_timesteps - 1) * (alpha_cum_end - alpha_cum_start)\n        + alpha_cum_start\n    )\n    att = np.concatenate(([1], att))\n    at = att[1:] / att[:-1]\n    att = np.concatenate((att[1:], [1]))\n    return at, att\n\n\ndef gamma_schedules(num_diffusion_timesteps: int, gamma_cum_start=0.000009, gamma_cum_end=0.99999):\n    \"\"\"\n    Cumulative and non-cumulative gamma schedules.\n\n    See section 4.1.\n    \"\"\"\n    ctt = (\n        np.arange(0, num_diffusion_timesteps) / (num_diffusion_timesteps - 1) * (gamma_cum_end - gamma_cum_start)\n        + gamma_cum_start\n    )\n    ctt = np.concatenate(([0], ctt))\n    one_minus_ctt = 1 - ctt\n    one_minus_ct = one_minus_ctt[1:] / one_minus_ctt[:-1]\n    ct = 1 - one_minus_ct\n    ctt = np.concatenate((ctt[1:], [0]))\n    return ct, ctt\n\n\nclass VQDiffusionScheduler(SchedulerMixin, ConfigMixin):\n    \"\"\"\n    The VQ-diffusion transformer outputs predicted probabilities of the initial unnoised image.\n\n    The VQ-diffusion scheduler converts the transformer's output into a sample for the unnoised image at the previous\n    diffusion timestep.\n\n    [`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`\n    function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.\n    [`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and\n    [`~SchedulerMixin.from_pretrained`] functions.\n\n    For more details, see the original paper: https://arxiv.org/abs/2111.14822\n\n    Args:\n        num_vec_classes (`int`):\n            The number of classes of the vector embeddings of the latent pixels. Includes the class for the masked\n            latent pixel.\n\n        num_train_timesteps (`int`):\n            Number of diffusion steps used to train the model.\n\n        alpha_cum_start (`float`):\n            The starting cumulative alpha value.\n\n        alpha_cum_end (`float`):\n            The ending cumulative alpha value.\n\n        gamma_cum_start (`float`):\n            The starting cumulative gamma value.\n\n        gamma_cum_end (`float`):\n            The ending cumulative gamma value.\n    \"\"\"\n\n    order = 1\n\n    @register_to_config\n    def __init__(\n        self,\n        num_vec_classes: int,\n        num_train_timesteps: int = 100,\n        alpha_cum_start: float = 0.99999,\n        alpha_cum_end: float = 0.000009,\n        gamma_cum_start: float = 0.000009,\n        gamma_cum_end: float = 0.99999,\n    ):\n        self.num_embed = num_vec_classes\n\n        # By convention, the index for the mask class is the last class index\n        self.mask_class = self.num_embed - 1\n\n        at, att = alpha_schedules(num_train_timesteps, alpha_cum_start=alpha_cum_start, alpha_cum_end=alpha_cum_end)\n        ct, ctt = gamma_schedules(num_train_timesteps, gamma_cum_start=gamma_cum_start, gamma_cum_end=gamma_cum_end)\n\n        num_non_mask_classes = self.num_embed - 1\n        bt = (1 - at - ct) / num_non_mask_classes\n        btt = (1 - att - ctt) / num_non_mask_classes\n\n        at = torch.tensor(at.astype(\"float64\"))\n        bt = torch.tensor(bt.astype(\"float64\"))\n        ct = torch.tensor(ct.astype(\"float64\"))\n        log_at = torch.log(at)\n        log_bt = torch.log(bt)\n        log_ct = torch.log(ct)\n\n        att = torch.tensor(att.astype(\"float64\"))\n        btt = torch.tensor(btt.astype(\"float64\"))\n        ctt = torch.tensor(ctt.astype(\"float64\"))\n        log_cumprod_at = torch.log(att)\n        log_cumprod_bt = torch.log(btt)\n        log_cumprod_ct = torch.log(ctt)\n\n        self.log_at = log_at.float()\n        self.log_bt = log_bt.float()\n        self.log_ct = log_ct.float()\n        self.log_cumprod_at = log_cumprod_at.float()\n        self.log_cumprod_bt = log_cumprod_bt.float()\n        self.log_cumprod_ct = log_cumprod_ct.float()\n\n        # setable values\n        self.num_inference_steps = None\n        self.timesteps = torch.from_numpy(np.arange(0, num_train_timesteps)[::-1].copy())\n\n    def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.device] = None):\n        \"\"\"\n        Sets the discrete timesteps used for the diffusion chain. Supporting function to be run before inference.\n\n        Args:\n            num_inference_steps (`int`):\n                the number of diffusion steps used when generating samples with a pre-trained model.\n\n            device (`str` or `torch.device`):\n                device to place the timesteps and the diffusion process parameters (alpha, beta, gamma) on.\n        \"\"\"\n        self.num_inference_steps = num_inference_steps\n        timesteps = np.arange(0, self.num_inference_steps)[::-1].copy()\n        self.timesteps = torch.from_numpy(timesteps).to(device)\n\n        self.log_at = self.log_at.to(device)\n        self.log_bt = self.log_bt.to(device)\n        self.log_ct = self.log_ct.to(device)\n        self.log_cumprod_at = self.log_cumprod_at.to(device)\n        self.log_cumprod_bt = self.log_cumprod_bt.to(device)\n        self.log_cumprod_ct = self.log_cumprod_ct.to(device)\n\n    def step(\n        self,\n        model_output: torch.FloatTensor,\n        timestep: torch.long,\n        sample: torch.LongTensor,\n        generator: Optional[torch.Generator] = None,\n        return_dict: bool = True,\n    ) -> Union[VQDiffusionSchedulerOutput, Tuple]:\n        \"\"\"\n        Predict the sample at the previous timestep via the reverse transition distribution i.e. Equation (11). See the\n        docstring for `self.q_posterior` for more in depth docs on how Equation (11) is computed.\n\n        Args:\n            log_p_x_0: (`torch.FloatTensor` of shape `(batch size, num classes - 1, num latent pixels)`):\n                The log probabilities for the predicted classes of the initial latent pixels. Does not include a\n                prediction for the masked class as the initial unnoised image cannot be masked.\n\n            t (`torch.long`):\n                The timestep that determines which transition matrices are used.\n\n            x_t: (`torch.LongTensor` of shape `(batch size, num latent pixels)`):\n                The classes of each latent pixel at time `t`\n\n            generator: (`torch.Generator` or None):\n                RNG for the noise applied to p(x_{t-1} | x_t) before it is sampled from.\n\n            return_dict (`bool`):\n                option for returning tuple rather than VQDiffusionSchedulerOutput class\n\n        Returns:\n            [`~schedulers.scheduling_utils.VQDiffusionSchedulerOutput`] or `tuple`:\n            [`~schedulers.scheduling_utils.VQDiffusionSchedulerOutput`] if `return_dict` is True, otherwise a `tuple`.\n            When returning a tuple, the first element is the sample tensor.\n        \"\"\"\n        if timestep == 0:\n            log_p_x_t_min_1 = model_output\n        else:\n            log_p_x_t_min_1 = self.q_posterior(model_output, sample, timestep)\n\n        log_p_x_t_min_1 = gumbel_noised(log_p_x_t_min_1, generator)\n\n        x_t_min_1 = log_p_x_t_min_1.argmax(dim=1)\n\n        if not return_dict:\n            return (x_t_min_1,)\n\n        return VQDiffusionSchedulerOutput(prev_sample=x_t_min_1)\n\n    def q_posterior(self, log_p_x_0, x_t, t):\n        \"\"\"\n        Calculates the log probabilities for the predicted classes of the image at timestep `t-1`. I.e. Equation (11).\n\n        Instead of directly computing equation (11), we use Equation (5) to restate Equation (11) in terms of only\n        forward probabilities.\n\n        Equation (11) stated in terms of forward probabilities via Equation (5):\n\n        Where:\n        - the sum is over x_0 = {C_0 ... C_{k-1}} (classes for x_0)\n\n        p(x_{t-1} | x_t) = sum( q(x_t | x_{t-1}) * q(x_{t-1} | x_0) * p(x_0) / q(x_t | x_0) )\n\n        Args:\n            log_p_x_0: (`torch.FloatTensor` of shape `(batch size, num classes - 1, num latent pixels)`):\n                The log probabilities for the predicted classes of the initial latent pixels. Does not include a\n                prediction for the masked class as the initial unnoised image cannot be masked.\n\n            x_t: (`torch.LongTensor` of shape `(batch size, num latent pixels)`):\n                The classes of each latent pixel at time `t`\n\n            t (torch.Long):\n                The timestep that determines which transition matrix is used.\n\n        Returns:\n            `torch.FloatTensor` of shape `(batch size, num classes, num latent pixels)`:\n                The log probabilities for the predicted classes of the image at timestep `t-1`. I.e. Equation (11).\n        \"\"\"\n        log_onehot_x_t = index_to_log_onehot(x_t, self.num_embed)\n\n        log_q_x_t_given_x_0 = self.log_Q_t_transitioning_to_known_class(\n            t=t, x_t=x_t, log_onehot_x_t=log_onehot_x_t, cumulative=True\n        )\n\n        log_q_t_given_x_t_min_1 = self.log_Q_t_transitioning_to_known_class(\n            t=t, x_t=x_t, log_onehot_x_t=log_onehot_x_t, cumulative=False\n        )\n\n        # p_0(x_0=C_0 | x_t) / q(x_t | x_0=C_0)          ...      p_n(x_0=C_0 | x_t) / q(x_t | x_0=C_0)\n        #               .                    .                                   .\n        #               .                            .                           .\n        #               .                                      .                 .\n        # p_0(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1})  ...      p_n(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1})\n        q = log_p_x_0 - log_q_x_t_given_x_0\n\n        # sum_0 = p_0(x_0=C_0 | x_t) / q(x_t | x_0=C_0) + ... + p_0(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1}), ... ,\n        # sum_n = p_n(x_0=C_0 | x_t) / q(x_t | x_0=C_0) + ... + p_n(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1})\n        q_log_sum_exp = torch.logsumexp(q, dim=1, keepdim=True)\n\n        # p_0(x_0=C_0 | x_t) / q(x_t | x_0=C_0) / sum_0          ...      p_n(x_0=C_0 | x_t) / q(x_t | x_0=C_0) / sum_n\n        #                        .                             .                                   .\n        #                        .                                     .                           .\n        #                        .                                               .                 .\n        # p_0(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1}) / sum_0  ...      p_n(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1}) / sum_n\n        q = q - q_log_sum_exp\n\n        # (p_0(x_0=C_0 | x_t) / q(x_t | x_0=C_0) / sum_0) * a_cumulative_{t-1} + b_cumulative_{t-1}          ...      (p_n(x_0=C_0 | x_t) / q(x_t | x_0=C_0) / sum_n) * a_cumulative_{t-1} + b_cumulative_{t-1}\n        #                                         .                                                .                                              .\n        #                                         .                                                        .                                      .\n        #                                         .                                                                  .                            .\n        # (p_0(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1}) / sum_0) * a_cumulative_{t-1} + b_cumulative_{t-1}  ...      (p_n(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1}) / sum_n) * a_cumulative_{t-1} + b_cumulative_{t-1}\n        # c_cumulative_{t-1}                                                                                 ...      c_cumulative_{t-1}\n        q = self.apply_cumulative_transitions(q, t - 1)\n\n        # ((p_0(x_0=C_0 | x_t) / q(x_t | x_0=C_0) / sum_0) * a_cumulative_{t-1} + b_cumulative_{t-1}) * q(x_t | x_{t-1}=C_0) * sum_0              ...      ((p_n(x_0=C_0 | x_t) / q(x_t | x_0=C_0) / sum_n) * a_cumulative_{t-1} + b_cumulative_{t-1}) * q(x_t | x_{t-1}=C_0) * sum_n\n        #                                                            .                                                                 .                                              .\n        #                                                            .                                                                         .                                      .\n        #                                                            .                                                                                   .                            .\n        # ((p_0(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1}) / sum_0) * a_cumulative_{t-1} + b_cumulative_{t-1}) * q(x_t | x_{t-1}=C_{k-1}) * sum_0  ...      ((p_n(x_0=C_{k-1} | x_t) / q(x_t | x_0=C_{k-1}) / sum_n) * a_cumulative_{t-1} + b_cumulative_{t-1}) * q(x_t | x_{t-1}=C_{k-1}) * sum_n\n        # c_cumulative_{t-1} * q(x_t | x_{t-1}=C_k) * sum_0                                                                                       ...      c_cumulative_{t-1} * q(x_t | x_{t-1}=C_k) * sum_0\n        log_p_x_t_min_1 = q + log_q_t_given_x_t_min_1 + q_log_sum_exp\n\n        # For each column, there are two possible cases.\n        #\n        # Where:\n        # - sum(p_n(x_0))) is summing over all classes for x_0\n        # - C_i is the class transitioning from (not to be confused with c_t and c_cumulative_t being used for gamma's)\n        # - C_j is the class transitioning to\n        #\n        # 1. x_t is masked i.e. x_t = c_k\n        #\n        # Simplifying the expression, the column vector is:\n        #                                                      .\n        #                                                      .\n        #                                                      .\n        # (c_t / c_cumulative_t) * (a_cumulative_{t-1} * p_n(x_0 = C_i | x_t) + b_cumulative_{t-1} * sum(p_n(x_0)))\n        #                                                      .\n        #                                                      .\n        #                                                      .\n        # (c_cumulative_{t-1} / c_cumulative_t) * sum(p_n(x_0))\n        #\n        # From equation (11) stated in terms of forward probabilities, the last row is trivially verified.\n        #\n        # For the other rows, we can state the equation as ...\n        #\n        # (c_t / c_cumulative_t) * [b_cumulative_{t-1} * p(x_0=c_0) + ... + (a_cumulative_{t-1} + b_cumulative_{t-1}) * p(x_0=C_i) + ... + b_cumulative_{k-1} * p(x_0=c_{k-1})]\n        #\n        # This verifies the other rows.\n        #\n        # 2. x_t is not masked\n        #\n        # Simplifying the expression, there are two cases for the rows of the column vector, where C_j = C_i and where C_j != C_i:\n        #                                                      .\n        #                                                      .\n        #                                                      .\n        # C_j != C_i:        b_t * ((b_cumulative_{t-1} / b_cumulative_t) * p_n(x_0 = c_0) + ... + ((a_cumulative_{t-1} + b_cumulative_{t-1}) / b_cumulative_t) * p_n(x_0 = C_i) + ... + (b_cumulative_{t-1} / (a_cumulative_t + b_cumulative_t)) * p_n(c_0=C_j) + ... + (b_cumulative_{t-1} / b_cumulative_t) * p_n(x_0 = c_{k-1}))\n        #                                                      .\n        #                                                      .\n        #                                                      .\n        # C_j = C_i: (a_t + b_t) * ((b_cumulative_{t-1} / b_cumulative_t) * p_n(x_0 = c_0) + ... + ((a_cumulative_{t-1} + b_cumulative_{t-1}) / (a_cumulative_t + b_cumulative_t)) * p_n(x_0 = C_i = C_j) + ... + (b_cumulative_{t-1} / b_cumulative_t) * p_n(x_0 = c_{k-1}))\n        #                                                      .\n        #                                                      .\n        #                                                      .\n        # 0\n        #\n        # The last row is trivially verified. The other rows can be verified by directly expanding equation (11) stated in terms of forward probabilities.\n        return log_p_x_t_min_1\n\n    def log_Q_t_transitioning_to_known_class(\n        self, *, t: torch.int, x_t: torch.LongTensor, log_onehot_x_t: torch.FloatTensor, cumulative: bool\n    ):\n        \"\"\"\n        Returns the log probabilities of the rows from the (cumulative or non-cumulative) transition matrix for each\n        latent pixel in `x_t`.\n\n        See equation (7) for the complete non-cumulative transition matrix. The complete cumulative transition matrix\n        is the same structure except the parameters (alpha, beta, gamma) are the cumulative analogs.\n\n        Args:\n            t (torch.Long):\n                The timestep that determines which transition matrix is used.\n\n            x_t (`torch.LongTensor` of shape `(batch size, num latent pixels)`):\n                The classes of each latent pixel at time `t`.\n\n            log_onehot_x_t (`torch.FloatTensor` of shape `(batch size, num classes, num latent pixels)`):\n                The log one-hot vectors of `x_t`\n\n            cumulative (`bool`):\n                If cumulative is `False`, we use the single step transition matrix `t-1`->`t`. If cumulative is `True`,\n                we use the cumulative transition matrix `0`->`t`.\n\n        Returns:\n            `torch.FloatTensor` of shape `(batch size, num classes - 1, num latent pixels)`:\n                Each _column_ of the returned matrix is a _row_ of log probabilities of the complete probability\n                transition matrix.\n\n                When non cumulative, returns `self.num_classes - 1` rows because the initial latent pixel cannot be\n                masked.\n\n                Where:\n                - `q_n` is the probability distribution for the forward process of the `n`th latent pixel.\n                - C_0 is a class of a latent pixel embedding\n                - C_k is the class of the masked latent pixel\n\n                non-cumulative result (omitting logarithms):\n                ```\n                q_0(x_t | x_{t-1} = C_0) ... q_n(x_t | x_{t-1} = C_0)\n                          .      .                     .\n                          .               .            .\n                          .                      .     .\n                q_0(x_t | x_{t-1} = C_k) ... q_n(x_t | x_{t-1} = C_k)\n                ```\n\n                cumulative result (omitting logarithms):\n                ```\n                q_0_cumulative(x_t | x_0 = C_0)    ...  q_n_cumulative(x_t | x_0 = C_0)\n                          .               .                          .\n                          .                        .                 .\n                          .                               .          .\n                q_0_cumulative(x_t | x_0 = C_{k-1}) ... q_n_cumulative(x_t | x_0 = C_{k-1})\n                ```\n        \"\"\"\n        if cumulative:\n            a = self.log_cumprod_at[t]\n            b = self.log_cumprod_bt[t]\n            c = self.log_cumprod_ct[t]\n        else:\n            a = self.log_at[t]\n            b = self.log_bt[t]\n            c = self.log_ct[t]\n\n        if not cumulative:\n            # The values in the onehot vector can also be used as the logprobs for transitioning\n            # from masked latent pixels. If we are not calculating the cumulative transitions,\n            # we need to save these vectors to be re-appended to the final matrix so the values\n            # aren't overwritten.\n            #\n            # `P(x_t!=mask|x_{t-1=mask}) = 0` and 0 will be the value of the last row of the onehot vector\n            # if x_t is not masked\n            #\n            # `P(x_t=mask|x_{t-1=mask}) = 1` and 1 will be the value of the last row of the onehot vector\n            # if x_t is masked\n            log_onehot_x_t_transitioning_from_masked = log_onehot_x_t[:, -1, :].unsqueeze(1)\n\n        # `index_to_log_onehot` will add onehot vectors for masked pixels,\n        # so the default one hot matrix has one too many rows. See the doc string\n        # for an explanation of the dimensionality of the returned matrix.\n        log_onehot_x_t = log_onehot_x_t[:, :-1, :]\n\n        # this is a cheeky trick to produce the transition probabilities using log one-hot vectors.\n        #\n        # Don't worry about what values this sets in the columns that mark transitions\n        # to masked latent pixels. They are overwrote later with the `mask_class_mask`.\n        #\n        # Looking at the below logspace formula in non-logspace, each value will evaluate to either\n        # `1 * a + b = a + b` where `log_Q_t` has the one hot value in the column\n        # or\n        # `0 * a + b = b` where `log_Q_t` has the 0 values in the column.\n        #\n        # See equation 7 for more details.\n        log_Q_t = (log_onehot_x_t + a).logaddexp(b)\n\n        # The whole column of each masked pixel is `c`\n        mask_class_mask = x_t == self.mask_class\n        mask_class_mask = mask_class_mask.unsqueeze(1).expand(-1, self.num_embed - 1, -1)\n        log_Q_t[mask_class_mask] = c\n\n        if not cumulative:\n            log_Q_t = torch.cat((log_Q_t, log_onehot_x_t_transitioning_from_masked), dim=1)\n\n        return log_Q_t\n\n    def apply_cumulative_transitions(self, q, t):\n        bsz = q.shape[0]\n        a = self.log_cumprod_at[t]\n        b = self.log_cumprod_bt[t]\n        c = self.log_cumprod_ct[t]\n\n        num_latent_pixels = q.shape[2]\n        c = c.expand(bsz, 1, num_latent_pixels)\n\n        q = (q + a).logaddexp(b)\n        q = torch.cat((q, c), dim=1)\n\n        return q\n"
  },
  {
    "path": "diffusers/training_utils.py",
    "content": "import contextlib\nimport copy\nimport os\nimport random\nfrom typing import Any, Dict, Iterable, Optional, Union\n\nimport numpy as np\nimport torch\n\nfrom .utils import deprecate, is_transformers_available\n\n\nif is_transformers_available():\n    import transformers\n\n\ndef enable_full_determinism(seed: int):\n    \"\"\"\n    Helper function for reproducible behavior during distributed training. See\n    - https://pytorch.org/docs/stable/notes/randomness.html for pytorch\n    \"\"\"\n    # set seed first\n    set_seed(seed)\n\n    #  Enable PyTorch deterministic mode. This potentially requires either the environment\n    #  variable 'CUDA_LAUNCH_BLOCKING' or 'CUBLAS_WORKSPACE_CONFIG' to be set,\n    # depending on the CUDA version, so we set them both here\n    os.environ[\"CUDA_LAUNCH_BLOCKING\"] = \"1\"\n    os.environ[\"CUBLAS_WORKSPACE_CONFIG\"] = \":16:8\"\n    torch.use_deterministic_algorithms(True)\n\n    # Enable CUDNN deterministic mode\n    torch.backends.cudnn.deterministic = True\n    torch.backends.cudnn.benchmark = False\n\n\ndef set_seed(seed: int):\n    \"\"\"\n    Args:\n    Helper function for reproducible behavior to set the seed in `random`, `numpy`, `torch`.\n        seed (`int`): The seed to set.\n    \"\"\"\n    random.seed(seed)\n    np.random.seed(seed)\n    torch.manual_seed(seed)\n    torch.cuda.manual_seed_all(seed)\n    # ^^ safe to call this function even if cuda is not available\n\n\n# Adapted from torch-ema https://github.com/fadel/pytorch_ema/blob/master/torch_ema/ema.py#L14\nclass EMAModel:\n    \"\"\"\n    Exponential Moving Average of models weights\n    \"\"\"\n\n    def __init__(\n        self,\n        parameters: Iterable[torch.nn.Parameter],\n        decay: float = 0.9999,\n        min_decay: float = 0.0,\n        update_after_step: int = 0,\n        use_ema_warmup: bool = False,\n        inv_gamma: Union[float, int] = 1.0,\n        power: Union[float, int] = 2 / 3,\n        model_cls: Optional[Any] = None,\n        model_config: Dict[str, Any] = None,\n        **kwargs,\n    ):\n        \"\"\"\n        Args:\n            parameters (Iterable[torch.nn.Parameter]): The parameters to track.\n            decay (float): The decay factor for the exponential moving average.\n            min_decay (float): The minimum decay factor for the exponential moving average.\n            update_after_step (int): The number of steps to wait before starting to update the EMA weights.\n            use_ema_warmup (bool): Whether to use EMA warmup.\n            inv_gamma (float):\n                Inverse multiplicative factor of EMA warmup. Default: 1. Only used if `use_ema_warmup` is True.\n            power (float): Exponential factor of EMA warmup. Default: 2/3. Only used if `use_ema_warmup` is True.\n            device (Optional[Union[str, torch.device]]): The device to store the EMA weights on. If None, the EMA\n                        weights will be stored on CPU.\n\n        @crowsonkb's notes on EMA Warmup:\n            If gamma=1 and power=1, implements a simple average. gamma=1, power=2/3 are good values for models you plan\n            to train for a million or more steps (reaches decay factor 0.999 at 31.6K steps, 0.9999 at 1M steps),\n            gamma=1, power=3/4 for models you plan to train for less (reaches decay factor 0.999 at 10K steps, 0.9999\n            at 215.4k steps).\n        \"\"\"\n\n        if isinstance(parameters, torch.nn.Module):\n            deprecation_message = (\n                \"Passing a `torch.nn.Module` to `ExponentialMovingAverage` is deprecated. \"\n                \"Please pass the parameters of the module instead.\"\n            )\n            deprecate(\n                \"passing a `torch.nn.Module` to `ExponentialMovingAverage`\",\n                \"1.0.0\",\n                deprecation_message,\n                standard_warn=False,\n            )\n            parameters = parameters.parameters()\n\n            # set use_ema_warmup to True if a torch.nn.Module is passed for backwards compatibility\n            use_ema_warmup = True\n\n        if kwargs.get(\"max_value\", None) is not None:\n            deprecation_message = \"The `max_value` argument is deprecated. Please use `decay` instead.\"\n            deprecate(\"max_value\", \"1.0.0\", deprecation_message, standard_warn=False)\n            decay = kwargs[\"max_value\"]\n\n        if kwargs.get(\"min_value\", None) is not None:\n            deprecation_message = \"The `min_value` argument is deprecated. Please use `min_decay` instead.\"\n            deprecate(\"min_value\", \"1.0.0\", deprecation_message, standard_warn=False)\n            min_decay = kwargs[\"min_value\"]\n\n        parameters = list(parameters)\n        self.shadow_params = [p.clone().detach() for p in parameters]\n\n        if kwargs.get(\"device\", None) is not None:\n            deprecation_message = \"The `device` argument is deprecated. Please use `to` instead.\"\n            deprecate(\"device\", \"1.0.0\", deprecation_message, standard_warn=False)\n            self.to(device=kwargs[\"device\"])\n\n        self.temp_stored_params = None\n\n        self.decay = decay\n        self.min_decay = min_decay\n        self.update_after_step = update_after_step\n        self.use_ema_warmup = use_ema_warmup\n        self.inv_gamma = inv_gamma\n        self.power = power\n        self.optimization_step = 0\n        self.cur_decay_value = None  # set in `step()`\n\n        self.model_cls = model_cls\n        self.model_config = model_config\n\n    @classmethod\n    def from_pretrained(cls, path, model_cls) -> \"EMAModel\":\n        _, ema_kwargs = model_cls.load_config(path, return_unused_kwargs=True)\n        model = model_cls.from_pretrained(path)\n\n        ema_model = cls(model.parameters(), model_cls=model_cls, model_config=model.config)\n\n        ema_model.load_state_dict(ema_kwargs)\n        return ema_model\n\n    def save_pretrained(self, path):\n        if self.model_cls is None:\n            raise ValueError(\"`save_pretrained` can only be used if `model_cls` was defined at __init__.\")\n\n        if self.model_config is None:\n            raise ValueError(\"`save_pretrained` can only be used if `model_config` was defined at __init__.\")\n\n        model = self.model_cls.from_config(self.model_config)\n        state_dict = self.state_dict()\n        state_dict.pop(\"shadow_params\", None)\n\n        model.register_to_config(**state_dict)\n        self.copy_to(model.parameters())\n        model.save_pretrained(path)\n\n    def get_decay(self, optimization_step: int) -> float:\n        \"\"\"\n        Compute the decay factor for the exponential moving average.\n        \"\"\"\n        step = max(0, optimization_step - self.update_after_step - 1)\n\n        if step <= 0:\n            return 0.0\n\n        if self.use_ema_warmup:\n            cur_decay_value = 1 - (1 + step / self.inv_gamma) ** -self.power\n        else:\n            cur_decay_value = (1 + step) / (10 + step)\n\n        cur_decay_value = min(cur_decay_value, self.decay)\n        # make sure decay is not smaller than min_decay\n        cur_decay_value = max(cur_decay_value, self.min_decay)\n        return cur_decay_value\n\n    @torch.no_grad()\n    def step(self, parameters: Iterable[torch.nn.Parameter]):\n        if isinstance(parameters, torch.nn.Module):\n            deprecation_message = (\n                \"Passing a `torch.nn.Module` to `ExponentialMovingAverage.step` is deprecated. \"\n                \"Please pass the parameters of the module instead.\"\n            )\n            deprecate(\n                \"passing a `torch.nn.Module` to `ExponentialMovingAverage.step`\",\n                \"1.0.0\",\n                deprecation_message,\n                standard_warn=False,\n            )\n            parameters = parameters.parameters()\n\n        parameters = list(parameters)\n\n        self.optimization_step += 1\n\n        # Compute the decay factor for the exponential moving average.\n        decay = self.decay #self.get_decay(self.optimization_step)\n        self.cur_decay_value = decay\n        one_minus_decay = 1 - decay\n\n        context_manager = contextlib.nullcontext\n        if is_transformers_available() and transformers.deepspeed.is_deepspeed_zero3_enabled():\n            import deepspeed\n\n        for s_param, param in zip(self.shadow_params, parameters):\n            if is_transformers_available() and transformers.deepspeed.is_deepspeed_zero3_enabled():\n                context_manager = deepspeed.zero.GatheredParameters(param, modifier_rank=None)\n\n            with context_manager():\n                if param.requires_grad:\n                    #s_param.sub_(one_minus_decay * (s_param - param))\n                    s_param.data = one_minus_decay * param.data + decay * s_param.data\n                else:\n                    s_param.copy_(param)\n\n    def copy_to(self, parameters: Iterable[torch.nn.Parameter]) -> None:\n        \"\"\"\n        Copy current averaged parameters into given collection of parameters.\n\n        Args:\n            parameters: Iterable of `torch.nn.Parameter`; the parameters to be\n                updated with the stored moving averages. If `None`, the parameters with which this\n                `ExponentialMovingAverage` was initialized will be used.\n        \"\"\"\n        parameters = list(parameters)\n        for s_param, param in zip(self.shadow_params, parameters):\n            param.data.copy_(s_param.to(param.device).data)\n\n    def to(self, device=None, dtype=None) -> None:\n        r\"\"\"Move internal buffers of the ExponentialMovingAverage to `device`.\n\n        Args:\n            device: like `device` argument to `torch.Tensor.to`\n        \"\"\"\n        # .to() on the tensors handles None correctly\n        self.shadow_params = [\n            p.to(device=device, dtype=dtype) if p.is_floating_point() else p.to(device=device)\n            for p in self.shadow_params\n        ]\n\n    def state_dict(self) -> dict:\n        r\"\"\"\n        Returns the state of the ExponentialMovingAverage as a dict. This method is used by accelerate during\n        checkpointing to save the ema state dict.\n        \"\"\"\n        # Following PyTorch conventions, references to tensors are returned:\n        # \"returns a reference to the state and not its copy!\" -\n        # https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict\n        return {\n            \"decay\": self.decay,\n            \"min_decay\": self.min_decay,\n            \"optimization_step\": self.optimization_step,\n            \"update_after_step\": self.update_after_step,\n            \"use_ema_warmup\": self.use_ema_warmup,\n            \"inv_gamma\": self.inv_gamma,\n            \"power\": self.power,\n            \"shadow_params\": self.shadow_params,\n        }\n\n    def store(self, parameters: Iterable[torch.nn.Parameter]) -> None:\n        r\"\"\"\n        Args:\n        Save the current parameters for restoring later.\n            parameters: Iterable of `torch.nn.Parameter`; the parameters to be\n                temporarily stored.\n        \"\"\"\n        self.temp_stored_params = [param.detach().cpu().clone() for param in parameters]\n\n    def restore(self, parameters: Iterable[torch.nn.Parameter]) -> None:\n        r\"\"\"\n        Args:\n        Restore the parameters stored with the `store` method. Useful to validate the model with EMA parameters without:\n        affecting the original optimization process. Store the parameters before the `copy_to()` method. After\n        validation (or model saving), use this to restore the former parameters.\n            parameters: Iterable of `torch.nn.Parameter`; the parameters to be\n                updated with the stored parameters. If `None`, the parameters with which this\n                `ExponentialMovingAverage` was initialized will be used.\n        \"\"\"\n        if self.temp_stored_params is None:\n            raise RuntimeError(\"This ExponentialMovingAverage has no `store()`ed weights \" \"to `restore()`\")\n        for c_param, param in zip(self.temp_stored_params, parameters):\n            param.data.copy_(c_param.data)\n\n        # Better memory-wise.\n        self.temp_stored_params = None\n\n    def load_state_dict(self, state_dict: dict) -> None:\n        r\"\"\"\n        Args:\n        Loads the ExponentialMovingAverage state. This method is used by accelerate during checkpointing to save the\n        ema state dict.\n            state_dict (dict): EMA state. Should be an object returned\n                from a call to :meth:`state_dict`.\n        \"\"\"\n        # deepcopy, to be consistent with module API\n        state_dict = copy.deepcopy(state_dict)\n\n        self.decay = state_dict.get(\"decay\", self.decay)\n        if self.decay < 0.0 or self.decay > 1.0:\n            raise ValueError(\"Decay must be between 0 and 1\")\n\n        self.min_decay = state_dict.get(\"min_decay\", self.min_decay)\n        if not isinstance(self.min_decay, float):\n            raise ValueError(\"Invalid min_decay\")\n\n        self.optimization_step = state_dict.get(\"optimization_step\", self.optimization_step)\n        if not isinstance(self.optimization_step, int):\n            raise ValueError(\"Invalid optimization_step\")\n\n        self.update_after_step = state_dict.get(\"update_after_step\", self.update_after_step)\n        if not isinstance(self.update_after_step, int):\n            raise ValueError(\"Invalid update_after_step\")\n\n        self.use_ema_warmup = state_dict.get(\"use_ema_warmup\", self.use_ema_warmup)\n        if not isinstance(self.use_ema_warmup, bool):\n            raise ValueError(\"Invalid use_ema_warmup\")\n\n        self.inv_gamma = state_dict.get(\"inv_gamma\", self.inv_gamma)\n        if not isinstance(self.inv_gamma, (float, int)):\n            raise ValueError(\"Invalid inv_gamma\")\n\n        self.power = state_dict.get(\"power\", self.power)\n        if not isinstance(self.power, (float, int)):\n            raise ValueError(\"Invalid power\")\n\n        shadow_params = state_dict.get(\"shadow_params\", None)\n        if shadow_params is not None:\n            self.shadow_params = shadow_params\n            if not isinstance(self.shadow_params, list):\n                raise ValueError(\"shadow_params must be a list\")\n            if not all(isinstance(p, torch.Tensor) for p in self.shadow_params):\n                raise ValueError(\"shadow_params must all be Tensors\")\n"
  },
  {
    "path": "diffusers/utils/__init__.py",
    "content": "# Copyright 2023 The HuggingFace Inc. team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport os\n\nfrom packaging import version\n\nfrom .. import __version__\nfrom .accelerate_utils import apply_forward_hook\nfrom .constants import (\n    CONFIG_NAME,\n    DEPRECATED_REVISION_ARGS,\n    DIFFUSERS_CACHE,\n    DIFFUSERS_DYNAMIC_MODULE_NAME,\n    FLAX_WEIGHTS_NAME,\n    HF_MODULES_CACHE,\n    HUGGINGFACE_CO_RESOLVE_ENDPOINT,\n    ONNX_EXTERNAL_WEIGHTS_NAME,\n    ONNX_WEIGHTS_NAME,\n    SAFETENSORS_WEIGHTS_NAME,\n    TEXT_ENCODER_TARGET_MODULES,\n    WEIGHTS_NAME,\n)\nfrom .deprecation_utils import deprecate\nfrom .doc_utils import replace_example_docstring\nfrom .dynamic_modules_utils import get_class_from_dynamic_module\nfrom .hub_utils import (\n    HF_HUB_OFFLINE,\n    _add_variant,\n    _get_model_file,\n    extract_commit_hash,\n    http_user_agent,\n)\nfrom .import_utils import (\n    BACKENDS_MAPPING,\n    ENV_VARS_TRUE_AND_AUTO_VALUES,\n    ENV_VARS_TRUE_VALUES,\n    USE_JAX,\n    USE_TF,\n    USE_TORCH,\n    DummyObject,\n    OptionalDependencyNotAvailable,\n    is_accelerate_available,\n    is_accelerate_version,\n    is_bs4_available,\n    is_flax_available,\n    is_ftfy_available,\n    is_inflect_available,\n    is_k_diffusion_available,\n    is_k_diffusion_version,\n    is_librosa_available,\n    is_note_seq_available,\n    is_omegaconf_available,\n    is_onnx_available,\n    is_safetensors_available,\n    is_scipy_available,\n    is_tensorboard_available,\n    is_tf_available,\n    is_torch_available,\n    is_torch_version,\n    is_torchsde_available,\n    is_transformers_available,\n    is_transformers_version,\n    is_unidecode_available,\n    is_wandb_available,\n    is_xformers_available,\n    requires_backends,\n)\nfrom .logging import get_logger\nfrom .outputs import BaseOutput\nfrom .pil_utils import PIL_INTERPOLATION, numpy_to_pil, pt_to_pil\nfrom .torch_utils import is_compiled_module, randn_tensor\n\n\nif is_torch_available():\n    from .testing_utils import (\n        floats_tensor,\n        load_hf_numpy,\n        load_image,\n        load_numpy,\n        load_pt,\n        nightly,\n        parse_flag_from_env,\n        print_tensor_test,\n        require_torch_2,\n        require_torch_gpu,\n        skip_mps,\n        slow,\n        torch_all_close,\n        torch_device,\n    )\n    from .torch_utils import maybe_allow_in_graph\n\nfrom .testing_utils import export_to_video\n\n\nlogger = get_logger(__name__)\n\n\ndef check_min_version(min_version):\n    if version.parse(__version__) < version.parse(min_version):\n        if \"dev\" in min_version:\n            error_message = (\n                \"This example requires a source install from HuggingFace diffusers (see \"\n                \"`https://huggingface.co/docs/diffusers/installation#install-from-source`),\"\n            )\n        else:\n            error_message = f\"This example requires a minimum version of {min_version},\"\n        error_message += f\" but the version found is {__version__}.\\n\"\n        raise ImportError(error_message)\n"
  },
  {
    "path": "diffusers/utils/accelerate_utils.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nAccelerate utilities: Utilities related to accelerate\n\"\"\"\n\nfrom packaging import version\n\nfrom .import_utils import is_accelerate_available\n\n\nif is_accelerate_available():\n    import accelerate\n\n\ndef apply_forward_hook(method):\n    \"\"\"\n    Decorator that applies a registered CpuOffload hook to an arbitrary function rather than `forward`. This is useful\n    for cases where a PyTorch module provides functions other than `forward` that should trigger a move to the\n    appropriate acceleration device. This is the case for `encode` and `decode` in [`AutoencoderKL`].\n\n    This decorator looks inside the internal `_hf_hook` property to find a registered offload hook.\n\n    :param method: The method to decorate. This method should be a method of a PyTorch module.\n    \"\"\"\n    if not is_accelerate_available():\n        return method\n    accelerate_version = version.parse(accelerate.__version__).base_version\n    if version.parse(accelerate_version) < version.parse(\"0.17.0\"):\n        return method\n\n    def wrapper(self, *args, **kwargs):\n        if hasattr(self, \"_hf_hook\") and hasattr(self._hf_hook, \"pre_forward\"):\n            self._hf_hook.pre_forward(self)\n        return method(self, *args, **kwargs)\n\n    return wrapper\n"
  },
  {
    "path": "diffusers/utils/constants.py",
    "content": "# Copyright 2023 The HuggingFace Inc. team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport os\n\nfrom huggingface_hub.constants import HUGGINGFACE_HUB_CACHE, hf_cache_home\n\n\ndefault_cache_path = HUGGINGFACE_HUB_CACHE\n\n\nCONFIG_NAME = \"config.json\"\nWEIGHTS_NAME = \"diffusion_pytorch_model.bin\"\nFLAX_WEIGHTS_NAME = \"diffusion_flax_model.msgpack\"\nONNX_WEIGHTS_NAME = \"model.onnx\"\nSAFETENSORS_WEIGHTS_NAME = \"diffusion_pytorch_model.safetensors\"\nONNX_EXTERNAL_WEIGHTS_NAME = \"weights.pb\"\nHUGGINGFACE_CO_RESOLVE_ENDPOINT = \"https://huggingface.co\"\nDIFFUSERS_CACHE = default_cache_path\nDIFFUSERS_DYNAMIC_MODULE_NAME = \"diffusers_modules\"\nHF_MODULES_CACHE = os.getenv(\"HF_MODULES_CACHE\", os.path.join(hf_cache_home, \"modules\"))\nDEPRECATED_REVISION_ARGS = [\"fp16\", \"non-ema\"]\nTEXT_ENCODER_TARGET_MODULES = [\"q_proj\", \"v_proj\", \"k_proj\", \"out_proj\"]\n"
  },
  {
    "path": "diffusers/utils/deprecation_utils.py",
    "content": "import inspect\nimport warnings\nfrom typing import Any, Dict, Optional, Union\n\nfrom packaging import version\n\n\ndef deprecate(*args, take_from: Optional[Union[Dict, Any]] = None, standard_warn=True, stacklevel=2):\n    from .. import __version__\n\n    deprecated_kwargs = take_from\n    values = ()\n    if not isinstance(args[0], tuple):\n        args = (args,)\n\n    for attribute, version_name, message in args:\n        if version.parse(version.parse(__version__).base_version) >= version.parse(version_name):\n            raise ValueError(\n                f\"The deprecation tuple {(attribute, version_name, message)} should be removed since diffusers'\"\n                f\" version {__version__} is >= {version_name}\"\n            )\n\n        warning = None\n        if isinstance(deprecated_kwargs, dict) and attribute in deprecated_kwargs:\n            values += (deprecated_kwargs.pop(attribute),)\n            warning = f\"The `{attribute}` argument is deprecated and will be removed in version {version_name}.\"\n        elif hasattr(deprecated_kwargs, attribute):\n            values += (getattr(deprecated_kwargs, attribute),)\n            warning = f\"The `{attribute}` attribute is deprecated and will be removed in version {version_name}.\"\n        elif deprecated_kwargs is None:\n            warning = f\"`{attribute}` is deprecated and will be removed in version {version_name}.\"\n\n        if warning is not None:\n            warning = warning + \" \" if standard_warn else \"\"\n            warnings.warn(warning + message, FutureWarning, stacklevel=stacklevel)\n\n    if isinstance(deprecated_kwargs, dict) and len(deprecated_kwargs) > 0:\n        call_frame = inspect.getouterframes(inspect.currentframe())[1]\n        filename = call_frame.filename\n        line_number = call_frame.lineno\n        function = call_frame.function\n        key, value = next(iter(deprecated_kwargs.items()))\n        raise TypeError(f\"{function} in {filename} line {line_number-1} got an unexpected keyword argument `{key}`\")\n\n    if len(values) == 0:\n        return\n    elif len(values) == 1:\n        return values[0]\n    return values\n"
  },
  {
    "path": "diffusers/utils/doc_utils.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nDoc utilities: Utilities related to documentation\n\"\"\"\nimport re\n\n\ndef replace_example_docstring(example_docstring):\n    def docstring_decorator(fn):\n        func_doc = fn.__doc__\n        lines = func_doc.split(\"\\n\")\n        i = 0\n        while i < len(lines) and re.search(r\"^\\s*Examples?:\\s*$\", lines[i]) is None:\n            i += 1\n        if i < len(lines):\n            lines[i] = example_docstring\n            func_doc = \"\\n\".join(lines)\n        else:\n            raise ValueError(\n                f\"The function {fn} should have an empty 'Examples:' in its docstring as placeholder, \"\n                f\"current docstring is:\\n{func_doc}\"\n            )\n        fn.__doc__ = func_doc\n        return fn\n\n    return docstring_decorator\n"
  },
  {
    "path": "diffusers/utils/dummy_flax_and_transformers_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass FlaxStableDiffusionControlNetPipeline(metaclass=DummyObject):\n    _backends = [\"flax\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n\n\nclass FlaxStableDiffusionImg2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"flax\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n\n\nclass FlaxStableDiffusionInpaintPipeline(metaclass=DummyObject):\n    _backends = [\"flax\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n\n\nclass FlaxStableDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"flax\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\", \"transformers\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_flax_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass FlaxControlNetModel(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxModelMixin(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxUNet2DConditionModel(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxAutoencoderKL(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxDDIMScheduler(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxDDPMScheduler(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxDPMSolverMultistepScheduler(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxKarrasVeScheduler(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxLMSDiscreteScheduler(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxPNDMScheduler(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxSchedulerMixin(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n\nclass FlaxScoreSdeVeScheduler(metaclass=DummyObject):\n    _backends = [\"flax\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"flax\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"flax\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_note_seq_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass MidiProcessor(metaclass=DummyObject):\n    _backends = [\"note_seq\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"note_seq\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"note_seq\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"note_seq\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_onnx_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass OnnxRuntimeModel(metaclass=DummyObject):\n    _backends = [\"onnx\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"onnx\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"onnx\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"onnx\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_pt_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass AutoencoderKL(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass ControlNetModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass ModelMixin(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass PriorTransformer(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass T5FilmDecoder(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass Transformer2DModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass UNet1DModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass UNet2DConditionModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass UNet2DModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass UNet3DConditionModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass VQModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\ndef get_constant_schedule(*args, **kwargs):\n    requires_backends(get_constant_schedule, [\"torch\"])\n\n\ndef get_constant_schedule_with_warmup(*args, **kwargs):\n    requires_backends(get_constant_schedule_with_warmup, [\"torch\"])\n\n\ndef get_cosine_schedule_with_warmup(*args, **kwargs):\n    requires_backends(get_cosine_schedule_with_warmup, [\"torch\"])\n\n\ndef get_cosine_with_hard_restarts_schedule_with_warmup(*args, **kwargs):\n    requires_backends(get_cosine_with_hard_restarts_schedule_with_warmup, [\"torch\"])\n\n\ndef get_linear_schedule_with_warmup(*args, **kwargs):\n    requires_backends(get_linear_schedule_with_warmup, [\"torch\"])\n\n\ndef get_polynomial_decay_schedule_with_warmup(*args, **kwargs):\n    requires_backends(get_polynomial_decay_schedule_with_warmup, [\"torch\"])\n\n\ndef get_scheduler(*args, **kwargs):\n    requires_backends(get_scheduler, [\"torch\"])\n\n\nclass AudioPipelineOutput(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DanceDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DDIMPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DDPMPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DiTPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass ImagePipelineOutput(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass KarrasVePipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass LDMPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass LDMSuperResolutionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass PNDMPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass RePaintPipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass ScoreSdeVePipeline(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DDIMInverseScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DDIMScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DDPMScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DEISMultistepScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DPMSolverMultistepInverseScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DPMSolverMultistepScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass DPMSolverSinglestepScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass EulerAncestralDiscreteScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass EulerDiscreteScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass HeunDiscreteScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass IPNDMScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass KarrasVeScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass KDPM2AncestralDiscreteScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass KDPM2DiscreteScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass PNDMScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass RePaintScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass SchedulerMixin(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass ScoreSdeVeScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass UnCLIPScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass UniPCMultistepScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass VQDiffusionScheduler(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n\nclass EMAModel(metaclass=DummyObject):\n    _backends = [\"torch\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_torch_and_librosa_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass AudioDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"librosa\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"librosa\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"librosa\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"librosa\"])\n\n\nclass Mel(metaclass=DummyObject):\n    _backends = [\"torch\", \"librosa\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"librosa\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"librosa\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"librosa\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_torch_and_scipy_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass LMSDiscreteScheduler(metaclass=DummyObject):\n    _backends = [\"torch\", \"scipy\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"scipy\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"scipy\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"scipy\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_torch_and_torchsde_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass DPMSolverSDEScheduler(metaclass=DummyObject):\n    _backends = [\"torch\", \"torchsde\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"torchsde\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"torchsde\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"torchsde\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_torch_and_transformers_and_k_diffusion_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass StableDiffusionKDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\", \"k_diffusion\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\", \"k_diffusion\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"k_diffusion\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"k_diffusion\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_torch_and_transformers_and_onnx_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass OnnxStableDiffusionImg2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\", \"onnx\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n\nclass OnnxStableDiffusionInpaintPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\", \"onnx\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n\nclass OnnxStableDiffusionInpaintPipelineLegacy(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\", \"onnx\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n\nclass OnnxStableDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\", \"onnx\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n\nclass OnnxStableDiffusionUpscalePipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\", \"onnx\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n\nclass StableDiffusionOnnxPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\", \"onnx\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\", \"onnx\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_torch_and_transformers_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass AltDiffusionImg2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass AltDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass AudioLDMPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass CycleDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass IFImg2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass IFImg2ImgSuperResolutionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass IFInpaintingPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass IFInpaintingSuperResolutionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass IFPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass IFSuperResolutionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass LDMTextToImagePipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass PaintByExamplePipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass SemanticStableDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionAttendAndExcitePipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionControlNetImg2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionControlNetInpaintPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionControlNetPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionDepth2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionDiffEditPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionImageVariationPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionImg2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionInpaintPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionInpaintPipelineLegacy(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionInstructPix2PixPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionLatentUpscalePipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionModelEditingPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionPanoramaPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionPipelineSafe(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionPix2PixZeroPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionSAGPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableDiffusionUpscalePipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableUnCLIPImg2ImgPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass StableUnCLIPPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass TextToVideoSDPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass TextToVideoZeroPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass UnCLIPImageVariationPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass UnCLIPPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass VersatileDiffusionDualGuidedPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass VersatileDiffusionImageVariationPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass VersatileDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass VersatileDiffusionTextToImagePipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n\nclass VQDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"torch\", \"transformers\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"torch\", \"transformers\"])\n"
  },
  {
    "path": "diffusers/utils/dummy_transformers_and_torch_and_note_seq_objects.py",
    "content": "# This file is autogenerated by the command `make fix-copies`, do not edit.\nfrom ..utils import DummyObject, requires_backends\n\n\nclass SpectrogramDiffusionPipeline(metaclass=DummyObject):\n    _backends = [\"transformers\", \"torch\", \"note_seq\"]\n\n    def __init__(self, *args, **kwargs):\n        requires_backends(self, [\"transformers\", \"torch\", \"note_seq\"])\n\n    @classmethod\n    def from_config(cls, *args, **kwargs):\n        requires_backends(cls, [\"transformers\", \"torch\", \"note_seq\"])\n\n    @classmethod\n    def from_pretrained(cls, *args, **kwargs):\n        requires_backends(cls, [\"transformers\", \"torch\", \"note_seq\"])\n"
  },
  {
    "path": "diffusers/utils/dynamic_modules_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Utilities to dynamically load objects from the Hub.\"\"\"\n\nimport importlib\nimport inspect\nimport json\nimport os\nimport re\nimport shutil\nimport sys\nfrom distutils.version import StrictVersion\nfrom pathlib import Path\nfrom typing import Dict, Optional, Union\nfrom urllib import request\n\nfrom huggingface_hub import HfFolder, cached_download, hf_hub_download, model_info\n\nfrom .. import __version__\nfrom . import DIFFUSERS_DYNAMIC_MODULE_NAME, HF_MODULES_CACHE, logging\n\n\nCOMMUNITY_PIPELINES_URL = (\n    \"https://raw.githubusercontent.com/huggingface/diffusers/{revision}/examples/community/{pipeline}.py\"\n)\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\n\ndef get_diffusers_versions():\n    url = \"https://pypi.org/pypi/diffusers/json\"\n    releases = json.loads(request.urlopen(url).read())[\"releases\"].keys()\n    return sorted(releases, key=StrictVersion)\n\n\ndef init_hf_modules():\n    \"\"\"\n    Creates the cache directory for modules with an init, and adds it to the Python path.\n    \"\"\"\n    # This function has already been executed if HF_MODULES_CACHE already is in the Python path.\n    if HF_MODULES_CACHE in sys.path:\n        return\n\n    sys.path.append(HF_MODULES_CACHE)\n    os.makedirs(HF_MODULES_CACHE, exist_ok=True)\n    init_path = Path(HF_MODULES_CACHE) / \"__init__.py\"\n    if not init_path.exists():\n        init_path.touch()\n\n\ndef create_dynamic_module(name: Union[str, os.PathLike]):\n    \"\"\"\n    Creates a dynamic module in the cache directory for modules.\n    \"\"\"\n    init_hf_modules()\n    dynamic_module_path = Path(HF_MODULES_CACHE) / name\n    # If the parent module does not exist yet, recursively create it.\n    if not dynamic_module_path.parent.exists():\n        create_dynamic_module(dynamic_module_path.parent)\n    os.makedirs(dynamic_module_path, exist_ok=True)\n    init_path = dynamic_module_path / \"__init__.py\"\n    if not init_path.exists():\n        init_path.touch()\n\n\ndef get_relative_imports(module_file):\n    \"\"\"\n    Get the list of modules that are relatively imported in a module file.\n\n    Args:\n        module_file (`str` or `os.PathLike`): The module file to inspect.\n    \"\"\"\n    with open(module_file, \"r\", encoding=\"utf-8\") as f:\n        content = f.read()\n\n    # Imports of the form `import .xxx`\n    relative_imports = re.findall(\"^\\s*import\\s+\\.(\\S+)\\s*$\", content, flags=re.MULTILINE)\n    # Imports of the form `from .xxx import yyy`\n    relative_imports += re.findall(\"^\\s*from\\s+\\.(\\S+)\\s+import\", content, flags=re.MULTILINE)\n    # Unique-ify\n    return list(set(relative_imports))\n\n\ndef get_relative_import_files(module_file):\n    \"\"\"\n    Get the list of all files that are needed for a given module. Note that this function recurses through the relative\n    imports (if a imports b and b imports c, it will return module files for b and c).\n\n    Args:\n        module_file (`str` or `os.PathLike`): The module file to inspect.\n    \"\"\"\n    no_change = False\n    files_to_check = [module_file]\n    all_relative_imports = []\n\n    # Let's recurse through all relative imports\n    while not no_change:\n        new_imports = []\n        for f in files_to_check:\n            new_imports.extend(get_relative_imports(f))\n\n        module_path = Path(module_file).parent\n        new_import_files = [str(module_path / m) for m in new_imports]\n        new_import_files = [f for f in new_import_files if f not in all_relative_imports]\n        files_to_check = [f\"{f}.py\" for f in new_import_files]\n\n        no_change = len(new_import_files) == 0\n        all_relative_imports.extend(files_to_check)\n\n    return all_relative_imports\n\n\ndef check_imports(filename):\n    \"\"\"\n    Check if the current Python environment contains all the libraries that are imported in a file.\n    \"\"\"\n    with open(filename, \"r\", encoding=\"utf-8\") as f:\n        content = f.read()\n\n    # Imports of the form `import xxx`\n    imports = re.findall(\"^\\s*import\\s+(\\S+)\\s*$\", content, flags=re.MULTILINE)\n    # Imports of the form `from xxx import yyy`\n    imports += re.findall(\"^\\s*from\\s+(\\S+)\\s+import\", content, flags=re.MULTILINE)\n    # Only keep the top-level module\n    imports = [imp.split(\".\")[0] for imp in imports if not imp.startswith(\".\")]\n\n    # Unique-ify and test we got them all\n    imports = list(set(imports))\n    missing_packages = []\n    for imp in imports:\n        try:\n            importlib.import_module(imp)\n        except ImportError:\n            missing_packages.append(imp)\n\n    if len(missing_packages) > 0:\n        raise ImportError(\n            \"This modeling file requires the following packages that were not found in your environment: \"\n            f\"{', '.join(missing_packages)}. Run `pip install {' '.join(missing_packages)}`\"\n        )\n\n    return get_relative_imports(filename)\n\n\ndef get_class_in_module(class_name, module_path):\n    \"\"\"\n    Import a module on the cache directory for modules and extract a class from it.\n    \"\"\"\n    module_path = module_path.replace(os.path.sep, \".\")\n    module = importlib.import_module(module_path)\n\n    if class_name is None:\n        return find_pipeline_class(module)\n    return getattr(module, class_name)\n\n\ndef find_pipeline_class(loaded_module):\n    \"\"\"\n    Retrieve pipeline class that inherits from `DiffusionPipeline`. Note that there has to be exactly one class\n    inheriting from `DiffusionPipeline`.\n    \"\"\"\n    from ..pipelines import DiffusionPipeline\n\n    cls_members = dict(inspect.getmembers(loaded_module, inspect.isclass))\n\n    pipeline_class = None\n    for cls_name, cls in cls_members.items():\n        if (\n            cls_name != DiffusionPipeline.__name__\n            and issubclass(cls, DiffusionPipeline)\n            and cls.__module__.split(\".\")[0] != \"diffusers\"\n        ):\n            if pipeline_class is not None:\n                raise ValueError(\n                    f\"Multiple classes that inherit from {DiffusionPipeline.__name__} have been found:\"\n                    f\" {pipeline_class.__name__}, and {cls_name}. Please make sure to define only one in\"\n                    f\" {loaded_module}.\"\n                )\n            pipeline_class = cls\n\n    return pipeline_class\n\n\ndef get_cached_module_file(\n    pretrained_model_name_or_path: Union[str, os.PathLike],\n    module_file: str,\n    cache_dir: Optional[Union[str, os.PathLike]] = None,\n    force_download: bool = False,\n    resume_download: bool = False,\n    proxies: Optional[Dict[str, str]] = None,\n    use_auth_token: Optional[Union[bool, str]] = None,\n    revision: Optional[str] = None,\n    local_files_only: bool = False,\n):\n    \"\"\"\n    Prepares Downloads a module from a local folder or a distant repo and returns its path inside the cached\n    Transformers module.\n\n    Args:\n        pretrained_model_name_or_path (`str` or `os.PathLike`):\n            This can be either:\n\n            - a string, the *model id* of a pretrained model configuration hosted inside a model repo on\n              huggingface.co. Valid model ids can be located at the root-level, like `bert-base-uncased`, or namespaced\n              under a user or organization name, like `dbmdz/bert-base-german-cased`.\n            - a path to a *directory* containing a configuration file saved using the\n              [`~PreTrainedTokenizer.save_pretrained`] method, e.g., `./my_model_directory/`.\n\n        module_file (`str`):\n            The name of the module file containing the class to look for.\n        cache_dir (`str` or `os.PathLike`, *optional*):\n            Path to a directory in which a downloaded pretrained model configuration should be cached if the standard\n            cache should not be used.\n        force_download (`bool`, *optional*, defaults to `False`):\n            Whether or not to force to (re-)download the configuration files and override the cached versions if they\n            exist.\n        resume_download (`bool`, *optional*, defaults to `False`):\n            Whether or not to delete incompletely received file. Attempts to resume the download if such a file exists.\n        proxies (`Dict[str, str]`, *optional*):\n            A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n            'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.\n        use_auth_token (`str` or *bool*, *optional*):\n            The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n            when running `transformers-cli login` (stored in `~/.huggingface`).\n        revision (`str`, *optional*, defaults to `\"main\"`):\n            The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n            git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n            identifier allowed by git.\n        local_files_only (`bool`, *optional*, defaults to `False`):\n            If `True`, will only try to load the tokenizer configuration from local files.\n\n    <Tip>\n\n    You may pass a token in `use_auth_token` if you are not logged in (`huggingface-cli long`) and want to use private\n    or [gated models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n    </Tip>\n\n    Returns:\n        `str`: The path to the module inside the cache.\n    \"\"\"\n    # Download and cache module_file from the repo `pretrained_model_name_or_path` of grab it if it's a local file.\n    pretrained_model_name_or_path = str(pretrained_model_name_or_path)\n\n    module_file_or_url = os.path.join(pretrained_model_name_or_path, module_file)\n\n    if os.path.isfile(module_file_or_url):\n        resolved_module_file = module_file_or_url\n        submodule = \"local\"\n    elif pretrained_model_name_or_path.count(\"/\") == 0:\n        available_versions = get_diffusers_versions()\n        # cut \".dev0\"\n        latest_version = \"v\" + \".\".join(__version__.split(\".\")[:3])\n\n        # retrieve github version that matches\n        if revision is None:\n            revision = latest_version if latest_version[1:] in available_versions else \"main\"\n            logger.info(f\"Defaulting to latest_version: {revision}.\")\n        elif revision in available_versions:\n            revision = f\"v{revision}\"\n        elif revision == \"main\":\n            revision = revision\n        else:\n            raise ValueError(\n                f\"`custom_revision`: {revision} does not exist. Please make sure to choose one of\"\n                f\" {', '.join(available_versions + ['main'])}.\"\n            )\n\n        # community pipeline on GitHub\n        github_url = COMMUNITY_PIPELINES_URL.format(revision=revision, pipeline=pretrained_model_name_or_path)\n        try:\n            resolved_module_file = cached_download(\n                github_url,\n                cache_dir=cache_dir,\n                force_download=force_download,\n                proxies=proxies,\n                resume_download=resume_download,\n                local_files_only=local_files_only,\n                use_auth_token=False,\n            )\n            submodule = \"git\"\n            module_file = pretrained_model_name_or_path + \".py\"\n        except EnvironmentError:\n            logger.error(f\"Could not locate the {module_file} inside {pretrained_model_name_or_path}.\")\n            raise\n    else:\n        try:\n            # Load from URL or cache if already cached\n            resolved_module_file = hf_hub_download(\n                pretrained_model_name_or_path,\n                module_file,\n                cache_dir=cache_dir,\n                force_download=force_download,\n                proxies=proxies,\n                resume_download=resume_download,\n                local_files_only=local_files_only,\n                use_auth_token=use_auth_token,\n            )\n            submodule = os.path.join(\"local\", \"--\".join(pretrained_model_name_or_path.split(\"/\")))\n        except EnvironmentError:\n            logger.error(f\"Could not locate the {module_file} inside {pretrained_model_name_or_path}.\")\n            raise\n\n    # Check we have all the requirements in our environment\n    modules_needed = check_imports(resolved_module_file)\n\n    # Now we move the module inside our cached dynamic modules.\n    full_submodule = DIFFUSERS_DYNAMIC_MODULE_NAME + os.path.sep + submodule\n    create_dynamic_module(full_submodule)\n    submodule_path = Path(HF_MODULES_CACHE) / full_submodule\n    if submodule == \"local\" or submodule == \"git\":\n        # We always copy local files (we could hash the file to see if there was a change, and give them the name of\n        # that hash, to only copy when there is a modification but it seems overkill for now).\n        # The only reason we do the copy is to avoid putting too many folders in sys.path.\n        shutil.copy(resolved_module_file, submodule_path / module_file)\n        for module_needed in modules_needed:\n            module_needed = f\"{module_needed}.py\"\n            shutil.copy(os.path.join(pretrained_model_name_or_path, module_needed), submodule_path / module_needed)\n    else:\n        # Get the commit hash\n        # TODO: we will get this info in the etag soon, so retrieve it from there and not here.\n        if isinstance(use_auth_token, str):\n            token = use_auth_token\n        elif use_auth_token is True:\n            token = HfFolder.get_token()\n        else:\n            token = None\n\n        commit_hash = model_info(pretrained_model_name_or_path, revision=revision, token=token).sha\n\n        # The module file will end up being placed in a subfolder with the git hash of the repo. This way we get the\n        # benefit of versioning.\n        submodule_path = submodule_path / commit_hash\n        full_submodule = full_submodule + os.path.sep + commit_hash\n        create_dynamic_module(full_submodule)\n\n        if not (submodule_path / module_file).exists():\n            shutil.copy(resolved_module_file, submodule_path / module_file)\n        # Make sure we also have every file with relative\n        for module_needed in modules_needed:\n            if not (submodule_path / module_needed).exists():\n                get_cached_module_file(\n                    pretrained_model_name_or_path,\n                    f\"{module_needed}.py\",\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    resume_download=resume_download,\n                    proxies=proxies,\n                    use_auth_token=use_auth_token,\n                    revision=revision,\n                    local_files_only=local_files_only,\n                )\n    return os.path.join(full_submodule, module_file)\n\n\ndef get_class_from_dynamic_module(\n    pretrained_model_name_or_path: Union[str, os.PathLike],\n    module_file: str,\n    class_name: Optional[str] = None,\n    cache_dir: Optional[Union[str, os.PathLike]] = None,\n    force_download: bool = False,\n    resume_download: bool = False,\n    proxies: Optional[Dict[str, str]] = None,\n    use_auth_token: Optional[Union[bool, str]] = None,\n    revision: Optional[str] = None,\n    local_files_only: bool = False,\n    **kwargs,\n):\n    \"\"\"\n    Extracts a class from a module file, present in the local folder or repository of a model.\n\n    <Tip warning={true}>\n\n    Calling this function will execute the code in the module file found locally or downloaded from the Hub. It should\n    therefore only be called on trusted repos.\n\n    </Tip>\n\n    Args:\n        pretrained_model_name_or_path (`str` or `os.PathLike`):\n            This can be either:\n\n            - a string, the *model id* of a pretrained model configuration hosted inside a model repo on\n              huggingface.co. Valid model ids can be located at the root-level, like `bert-base-uncased`, or namespaced\n              under a user or organization name, like `dbmdz/bert-base-german-cased`.\n            - a path to a *directory* containing a configuration file saved using the\n              [`~PreTrainedTokenizer.save_pretrained`] method, e.g., `./my_model_directory/`.\n\n        module_file (`str`):\n            The name of the module file containing the class to look for.\n        class_name (`str`):\n            The name of the class to import in the module.\n        cache_dir (`str` or `os.PathLike`, *optional*):\n            Path to a directory in which a downloaded pretrained model configuration should be cached if the standard\n            cache should not be used.\n        force_download (`bool`, *optional*, defaults to `False`):\n            Whether or not to force to (re-)download the configuration files and override the cached versions if they\n            exist.\n        resume_download (`bool`, *optional*, defaults to `False`):\n            Whether or not to delete incompletely received file. Attempts to resume the download if such a file exists.\n        proxies (`Dict[str, str]`, *optional*):\n            A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',\n            'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.\n        use_auth_token (`str` or `bool`, *optional*):\n            The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated\n            when running `transformers-cli login` (stored in `~/.huggingface`).\n        revision (`str`, *optional*, defaults to `\"main\"`):\n            The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a\n            git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any\n            identifier allowed by git.\n        local_files_only (`bool`, *optional*, defaults to `False`):\n            If `True`, will only try to load the tokenizer configuration from local files.\n\n    <Tip>\n\n    You may pass a token in `use_auth_token` if you are not logged in (`huggingface-cli long`) and want to use private\n    or [gated models](https://huggingface.co/docs/hub/models-gated#gated-models).\n\n    </Tip>\n\n    Returns:\n        `type`: The class, dynamically imported from the module.\n\n    Examples:\n\n    ```python\n    # Download module `modeling.py` from huggingface.co and cache then extract the class `MyBertModel` from this\n    # module.\n    cls = get_class_from_dynamic_module(\"sgugger/my-bert-model\", \"modeling.py\", \"MyBertModel\")\n    ```\"\"\"\n    # And lastly we get the class inside our newly created module\n    final_module = get_cached_module_file(\n        pretrained_model_name_or_path,\n        module_file,\n        cache_dir=cache_dir,\n        force_download=force_download,\n        resume_download=resume_download,\n        proxies=proxies,\n        use_auth_token=use_auth_token,\n        revision=revision,\n        local_files_only=local_files_only,\n    )\n    return get_class_in_module(class_name, final_module.replace(\".py\", \"\"))\n"
  },
  {
    "path": "diffusers/utils/hub_utils.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nimport os\nimport re\nimport sys\nimport traceback\nimport warnings\nfrom pathlib import Path\nfrom typing import Dict, Optional, Union\nfrom uuid import uuid4\n\nfrom huggingface_hub import HfFolder, ModelCard, ModelCardData, hf_hub_download, whoami\nfrom huggingface_hub.file_download import REGEX_COMMIT_HASH\nfrom huggingface_hub.utils import (\n    EntryNotFoundError,\n    RepositoryNotFoundError,\n    RevisionNotFoundError,\n    is_jinja_available,\n)\nfrom packaging import version\nfrom requests import HTTPError\n\nfrom .. import __version__\nfrom .constants import (\n    DEPRECATED_REVISION_ARGS,\n    DIFFUSERS_CACHE,\n    HUGGINGFACE_CO_RESOLVE_ENDPOINT,\n    SAFETENSORS_WEIGHTS_NAME,\n    WEIGHTS_NAME,\n)\nfrom .import_utils import (\n    ENV_VARS_TRUE_VALUES,\n    _flax_version,\n    _jax_version,\n    _onnxruntime_version,\n    _torch_version,\n    is_flax_available,\n    is_onnx_available,\n    is_torch_available,\n)\nfrom .logging import get_logger\n\n\nlogger = get_logger(__name__)\n\n\nMODEL_CARD_TEMPLATE_PATH = Path(__file__).parent / \"model_card_template.md\"\nSESSION_ID = uuid4().hex\nHF_HUB_OFFLINE = os.getenv(\"HF_HUB_OFFLINE\", \"\").upper() in ENV_VARS_TRUE_VALUES\nDISABLE_TELEMETRY = os.getenv(\"DISABLE_TELEMETRY\", \"\").upper() in ENV_VARS_TRUE_VALUES\nHUGGINGFACE_CO_TELEMETRY = HUGGINGFACE_CO_RESOLVE_ENDPOINT + \"/api/telemetry/\"\n\n\ndef http_user_agent(user_agent: Union[Dict, str, None] = None) -> str:\n    \"\"\"\n    Formats a user-agent string with basic info about a request.\n    \"\"\"\n    ua = f\"diffusers/{__version__}; python/{sys.version.split()[0]}; session_id/{SESSION_ID}\"\n    if DISABLE_TELEMETRY or HF_HUB_OFFLINE:\n        return ua + \"; telemetry/off\"\n    if is_torch_available():\n        ua += f\"; torch/{_torch_version}\"\n    if is_flax_available():\n        ua += f\"; jax/{_jax_version}\"\n        ua += f\"; flax/{_flax_version}\"\n    if is_onnx_available():\n        ua += f\"; onnxruntime/{_onnxruntime_version}\"\n    # CI will set this value to True\n    if os.environ.get(\"DIFFUSERS_IS_CI\", \"\").upper() in ENV_VARS_TRUE_VALUES:\n        ua += \"; is_ci/true\"\n    if isinstance(user_agent, dict):\n        ua += \"; \" + \"; \".join(f\"{k}/{v}\" for k, v in user_agent.items())\n    elif isinstance(user_agent, str):\n        ua += \"; \" + user_agent\n    return ua\n\n\ndef get_full_repo_name(model_id: str, organization: Optional[str] = None, token: Optional[str] = None):\n    if token is None:\n        token = HfFolder.get_token()\n    if organization is None:\n        username = whoami(token)[\"name\"]\n        return f\"{username}/{model_id}\"\n    else:\n        return f\"{organization}/{model_id}\"\n\n\ndef create_model_card(args, model_name):\n    if not is_jinja_available():\n        raise ValueError(\n            \"Modelcard rendering is based on Jinja templates.\"\n            \" Please make sure to have `jinja` installed before using `create_model_card`.\"\n            \" To install it, please run `pip install Jinja2`.\"\n        )\n\n    if hasattr(args, \"local_rank\") and args.local_rank not in [-1, 0]:\n        return\n\n    hub_token = args.hub_token if hasattr(args, \"hub_token\") else None\n    repo_name = get_full_repo_name(model_name, token=hub_token)\n\n    model_card = ModelCard.from_template(\n        card_data=ModelCardData(  # Card metadata object that will be converted to YAML block\n            language=\"en\",\n            license=\"apache-2.0\",\n            library_name=\"diffusers\",\n            tags=[],\n            datasets=args.dataset_name,\n            metrics=[],\n        ),\n        template_path=MODEL_CARD_TEMPLATE_PATH,\n        model_name=model_name,\n        repo_name=repo_name,\n        dataset_name=args.dataset_name if hasattr(args, \"dataset_name\") else None,\n        learning_rate=args.learning_rate,\n        train_batch_size=args.train_batch_size,\n        eval_batch_size=args.eval_batch_size,\n        gradient_accumulation_steps=(\n            args.gradient_accumulation_steps if hasattr(args, \"gradient_accumulation_steps\") else None\n        ),\n        adam_beta1=args.adam_beta1 if hasattr(args, \"adam_beta1\") else None,\n        adam_beta2=args.adam_beta2 if hasattr(args, \"adam_beta2\") else None,\n        adam_weight_decay=args.adam_weight_decay if hasattr(args, \"adam_weight_decay\") else None,\n        adam_epsilon=args.adam_epsilon if hasattr(args, \"adam_epsilon\") else None,\n        lr_scheduler=args.lr_scheduler if hasattr(args, \"lr_scheduler\") else None,\n        lr_warmup_steps=args.lr_warmup_steps if hasattr(args, \"lr_warmup_steps\") else None,\n        ema_inv_gamma=args.ema_inv_gamma if hasattr(args, \"ema_inv_gamma\") else None,\n        ema_power=args.ema_power if hasattr(args, \"ema_power\") else None,\n        ema_max_decay=args.ema_max_decay if hasattr(args, \"ema_max_decay\") else None,\n        mixed_precision=args.mixed_precision,\n    )\n\n    card_path = os.path.join(args.output_dir, \"README.md\")\n    model_card.save(card_path)\n\n\ndef extract_commit_hash(resolved_file: Optional[str], commit_hash: Optional[str] = None):\n    \"\"\"\n    Extracts the commit hash from a resolved filename toward a cache file.\n    \"\"\"\n    if resolved_file is None or commit_hash is not None:\n        return commit_hash\n    resolved_file = str(Path(resolved_file).as_posix())\n    search = re.search(r\"snapshots/([^/]+)/\", resolved_file)\n    if search is None:\n        return None\n    commit_hash = search.groups()[0]\n    return commit_hash if REGEX_COMMIT_HASH.match(commit_hash) else None\n\n\n# Old default cache path, potentially to be migrated.\n# This logic was more or less taken from `transformers`, with the following differences:\n# - Diffusers doesn't use custom environment variables to specify the cache path.\n# - There is no need to migrate the cache format, just move the files to the new location.\nhf_cache_home = os.path.expanduser(\n    os.getenv(\"HF_HOME\", os.path.join(os.getenv(\"XDG_CACHE_HOME\", \"~/.cache\"), \"huggingface\"))\n)\nold_diffusers_cache = os.path.join(hf_cache_home, \"diffusers\")\n\n\ndef move_cache(old_cache_dir: Optional[str] = None, new_cache_dir: Optional[str] = None) -> None:\n    if new_cache_dir is None:\n        new_cache_dir = DIFFUSERS_CACHE\n    if old_cache_dir is None:\n        old_cache_dir = old_diffusers_cache\n\n    old_cache_dir = Path(old_cache_dir).expanduser()\n    new_cache_dir = Path(new_cache_dir).expanduser()\n    for old_blob_path in old_cache_dir.glob(\"**/blobs/*\"):\n        if old_blob_path.is_file() and not old_blob_path.is_symlink():\n            new_blob_path = new_cache_dir / old_blob_path.relative_to(old_cache_dir)\n            new_blob_path.parent.mkdir(parents=True, exist_ok=True)\n            os.replace(old_blob_path, new_blob_path)\n            try:\n                os.symlink(new_blob_path, old_blob_path)\n            except OSError:\n                logger.warning(\n                    \"Could not create symlink between old cache and new cache. If you use an older version of diffusers again, files will be re-downloaded.\"\n                )\n    # At this point, old_cache_dir contains symlinks to the new cache (it can still be used).\n\n\ncache_version_file = os.path.join(DIFFUSERS_CACHE, \"version_diffusers_cache.txt\")\nif not os.path.isfile(cache_version_file):\n    cache_version = 0\nelse:\n    with open(cache_version_file) as f:\n        try:\n            cache_version = int(f.read())\n        except ValueError:\n            cache_version = 0\n\nif cache_version < 1:\n    old_cache_is_not_empty = os.path.isdir(old_diffusers_cache) and len(os.listdir(old_diffusers_cache)) > 0\n    if old_cache_is_not_empty:\n        logger.warning(\n            \"The cache for model files in Diffusers v0.14.0 has moved to a new location. Moving your \"\n            \"existing cached models. This is a one-time operation, you can interrupt it or run it \"\n            \"later by calling `diffusers.utils.hub_utils.move_cache()`.\"\n        )\n        try:\n            move_cache()\n        except Exception as e:\n            trace = \"\\n\".join(traceback.format_tb(e.__traceback__))\n            logger.error(\n                f\"There was a problem when trying to move your cache:\\n\\n{trace}\\n{e.__class__.__name__}: {e}\\n\\nPlease \"\n                \"file an issue at https://github.com/huggingface/diffusers/issues/new/choose, copy paste this whole \"\n                \"message and we will do our best to help.\"\n            )\n\nif cache_version < 1:\n    try:\n        os.makedirs(DIFFUSERS_CACHE, exist_ok=True)\n        with open(cache_version_file, \"w\") as f:\n            f.write(\"1\")\n    except Exception:\n        logger.warning(\n            f\"There was a problem when trying to write in your cache folder ({DIFFUSERS_CACHE}). Please, ensure \"\n            \"the directory exists and can be written to.\"\n        )\n\n\ndef _add_variant(weights_name: str, variant: Optional[str] = None) -> str:\n    if variant is not None:\n        splits = weights_name.split(\".\")\n        splits = splits[:-1] + [variant] + splits[-1:]\n        weights_name = \".\".join(splits)\n\n    return weights_name\n\n\ndef _get_model_file(\n    pretrained_model_name_or_path,\n    *,\n    weights_name,\n    subfolder,\n    cache_dir,\n    force_download,\n    proxies,\n    resume_download,\n    local_files_only,\n    use_auth_token,\n    user_agent,\n    revision,\n    commit_hash=None,\n):\n    pretrained_model_name_or_path = str(pretrained_model_name_or_path)\n    if os.path.isfile(pretrained_model_name_or_path):\n        return pretrained_model_name_or_path\n    elif os.path.isdir(pretrained_model_name_or_path):\n        if os.path.isfile(os.path.join(pretrained_model_name_or_path, weights_name)):\n            # Load from a PyTorch checkpoint\n            model_file = os.path.join(pretrained_model_name_or_path, weights_name)\n            return model_file\n        elif subfolder is not None and os.path.isfile(\n            os.path.join(pretrained_model_name_or_path, subfolder, weights_name)\n        ):\n            model_file = os.path.join(pretrained_model_name_or_path, subfolder, weights_name)\n            return model_file\n        else:\n            raise EnvironmentError(\n                f\"Error no file named {weights_name} found in directory {pretrained_model_name_or_path}.\"\n            )\n    else:\n        # 1. First check if deprecated way of loading from branches is used\n        if (\n            revision in DEPRECATED_REVISION_ARGS\n            and (weights_name == WEIGHTS_NAME or weights_name == SAFETENSORS_WEIGHTS_NAME)\n            and version.parse(version.parse(__version__).base_version) >= version.parse(\"0.18.0\")\n        ):\n            try:\n                model_file = hf_hub_download(\n                    pretrained_model_name_or_path,\n                    filename=_add_variant(weights_name, revision),\n                    cache_dir=cache_dir,\n                    force_download=force_download,\n                    proxies=proxies,\n                    resume_download=resume_download,\n                    local_files_only=local_files_only,\n                    use_auth_token=use_auth_token,\n                    user_agent=user_agent,\n                    subfolder=subfolder,\n                    revision=revision or commit_hash,\n                )\n                warnings.warn(\n                    f\"Loading the variant {revision} from {pretrained_model_name_or_path} via `revision='{revision}'` is deprecated. Loading instead from `revision='main'` with `variant={revision}`. Loading model variants via `revision='{revision}'` will be removed in diffusers v1. Please use `variant='{revision}'` instead.\",\n                    FutureWarning,\n                )\n                return model_file\n            except:  # noqa: E722\n                warnings.warn(\n                    f\"You are loading the variant {revision} from {pretrained_model_name_or_path} via `revision='{revision}'`. This behavior is deprecated and will be removed in diffusers v1. One should use `variant='{revision}'` instead. However, it appears that {pretrained_model_name_or_path} currently does not have a {_add_variant(weights_name, revision)} file in the 'main' branch of {pretrained_model_name_or_path}. \\n The Diffusers team and community would be very grateful if you could open an issue: https://github.com/huggingface/diffusers/issues/new with the title '{pretrained_model_name_or_path} is missing {_add_variant(weights_name, revision)}' so that the correct variant file can be added.\",\n                    FutureWarning,\n                )\n        try:\n            # 2. Load model file as usual\n            model_file = hf_hub_download(\n                pretrained_model_name_or_path,\n                filename=weights_name,\n                cache_dir=cache_dir,\n                force_download=force_download,\n                proxies=proxies,\n                resume_download=resume_download,\n                local_files_only=local_files_only,\n                use_auth_token=use_auth_token,\n                user_agent=user_agent,\n                subfolder=subfolder,\n                revision=revision or commit_hash,\n            )\n            return model_file\n\n        except RepositoryNotFoundError:\n            raise EnvironmentError(\n                f\"{pretrained_model_name_or_path} is not a local folder and is not a valid model identifier \"\n                \"listed on 'https://huggingface.co/models'\\nIf this is a private repository, make sure to pass a \"\n                \"token having permission to this repo with `use_auth_token` or log in with `huggingface-cli \"\n                \"login`.\"\n            )\n        except RevisionNotFoundError:\n            raise EnvironmentError(\n                f\"{revision} is not a valid git identifier (branch name, tag name or commit id) that exists for \"\n                \"this model name. Check the model page at \"\n                f\"'https://huggingface.co/{pretrained_model_name_or_path}' for available revisions.\"\n            )\n        except EntryNotFoundError:\n            raise EnvironmentError(\n                f\"{pretrained_model_name_or_path} does not appear to have a file named {weights_name}.\"\n            )\n        except HTTPError as err:\n            raise EnvironmentError(\n                f\"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\\n{err}\"\n            )\n        except ValueError:\n            raise EnvironmentError(\n                f\"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this model, couldn't find it\"\n                f\" in the cached files and it looks like {pretrained_model_name_or_path} is not the path to a\"\n                f\" directory containing a file named {weights_name} or\"\n                \" \\nCheckout your internet connection or see how to run the library in\"\n                \" offline mode at 'https://huggingface.co/docs/diffusers/installation#offline-mode'.\"\n            )\n        except EnvironmentError:\n            raise EnvironmentError(\n                f\"Can't load the model for '{pretrained_model_name_or_path}'. If you were trying to load it from \"\n                \"'https://huggingface.co/models', make sure you don't have a local directory with the same name. \"\n                f\"Otherwise, make sure '{pretrained_model_name_or_path}' is the correct path to a directory \"\n                f\"containing a file named {weights_name}\"\n            )\n"
  },
  {
    "path": "diffusers/utils/import_utils.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nImport utilities: Utilities related to imports and our lazy inits.\n\"\"\"\nimport importlib.util\nimport operator as op\nimport os\nimport sys\nfrom collections import OrderedDict\nfrom typing import Union\n\nfrom huggingface_hub.utils import is_jinja_available  # noqa: F401\nfrom packaging import version\nfrom packaging.version import Version, parse\n\nfrom . import logging\n\n\n# The package importlib_metadata is in a different place, depending on the python version.\nif sys.version_info < (3, 8):\n    import importlib_metadata\nelse:\n    import importlib.metadata as importlib_metadata\n\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\nENV_VARS_TRUE_VALUES = {\"1\", \"ON\", \"YES\", \"TRUE\"}\nENV_VARS_TRUE_AND_AUTO_VALUES = ENV_VARS_TRUE_VALUES.union({\"AUTO\"})\n\nUSE_TF = os.environ.get(\"USE_TF\", \"AUTO\").upper()\nUSE_TORCH = os.environ.get(\"USE_TORCH\", \"AUTO\").upper()\nUSE_JAX = os.environ.get(\"USE_FLAX\", \"AUTO\").upper()\nUSE_SAFETENSORS = os.environ.get(\"USE_SAFETENSORS\", \"AUTO\").upper()\n\nSTR_OPERATION_TO_FUNC = {\">\": op.gt, \">=\": op.ge, \"==\": op.eq, \"!=\": op.ne, \"<=\": op.le, \"<\": op.lt}\n\n_torch_version = \"N/A\"\nif USE_TORCH in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TF not in ENV_VARS_TRUE_VALUES:\n    _torch_available = importlib.util.find_spec(\"torch\") is not None\n    if _torch_available:\n        try:\n            _torch_version = importlib_metadata.version(\"torch\")\n            logger.info(f\"PyTorch version {_torch_version} available.\")\n        except importlib_metadata.PackageNotFoundError:\n            _torch_available = False\nelse:\n    logger.info(\"Disabling PyTorch because USE_TORCH is set\")\n    _torch_available = False\n\n\n_tf_version = \"N/A\"\nif USE_TF in ENV_VARS_TRUE_AND_AUTO_VALUES and USE_TORCH not in ENV_VARS_TRUE_VALUES:\n    _tf_available = importlib.util.find_spec(\"tensorflow\") is not None\n    if _tf_available:\n        candidates = (\n            \"tensorflow\",\n            \"tensorflow-cpu\",\n            \"tensorflow-gpu\",\n            \"tf-nightly\",\n            \"tf-nightly-cpu\",\n            \"tf-nightly-gpu\",\n            \"intel-tensorflow\",\n            \"intel-tensorflow-avx512\",\n            \"tensorflow-rocm\",\n            \"tensorflow-macos\",\n            \"tensorflow-aarch64\",\n        )\n        _tf_version = None\n        # For the metadata, we have to look for both tensorflow and tensorflow-cpu\n        for pkg in candidates:\n            try:\n                _tf_version = importlib_metadata.version(pkg)\n                break\n            except importlib_metadata.PackageNotFoundError:\n                pass\n        _tf_available = _tf_version is not None\n    if _tf_available:\n        if version.parse(_tf_version) < version.parse(\"2\"):\n            logger.info(f\"TensorFlow found but with version {_tf_version}. Diffusers requires version 2 minimum.\")\n            _tf_available = False\n        else:\n            logger.info(f\"TensorFlow version {_tf_version} available.\")\nelse:\n    logger.info(\"Disabling Tensorflow because USE_TORCH is set\")\n    _tf_available = False\n\n_jax_version = \"N/A\"\n_flax_version = \"N/A\"\nif USE_JAX in ENV_VARS_TRUE_AND_AUTO_VALUES:\n    _flax_available = importlib.util.find_spec(\"jax\") is not None and importlib.util.find_spec(\"flax\") is not None\n    if _flax_available:\n        try:\n            _jax_version = importlib_metadata.version(\"jax\")\n            _flax_version = importlib_metadata.version(\"flax\")\n            logger.info(f\"JAX version {_jax_version}, Flax version {_flax_version} available.\")\n        except importlib_metadata.PackageNotFoundError:\n            _flax_available = False\nelse:\n    _flax_available = False\n\nif USE_SAFETENSORS in ENV_VARS_TRUE_AND_AUTO_VALUES:\n    _safetensors_available = importlib.util.find_spec(\"safetensors\") is not None\n    if _safetensors_available:\n        try:\n            _safetensors_version = importlib_metadata.version(\"safetensors\")\n            logger.info(f\"Safetensors version {_safetensors_version} available.\")\n        except importlib_metadata.PackageNotFoundError:\n            _safetensors_available = False\nelse:\n    logger.info(\"Disabling Safetensors because USE_TF is set\")\n    _safetensors_available = False\n\n_transformers_available = importlib.util.find_spec(\"transformers\") is not None\ntry:\n    _transformers_version = importlib_metadata.version(\"transformers\")\n    logger.debug(f\"Successfully imported transformers version {_transformers_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _transformers_available = False\n\n\n_inflect_available = importlib.util.find_spec(\"inflect\") is not None\ntry:\n    _inflect_version = importlib_metadata.version(\"inflect\")\n    logger.debug(f\"Successfully imported inflect version {_inflect_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _inflect_available = False\n\n\n_unidecode_available = importlib.util.find_spec(\"unidecode\") is not None\ntry:\n    _unidecode_version = importlib_metadata.version(\"unidecode\")\n    logger.debug(f\"Successfully imported unidecode version {_unidecode_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _unidecode_available = False\n\n\n_onnxruntime_version = \"N/A\"\n_onnx_available = importlib.util.find_spec(\"onnxruntime\") is not None\nif _onnx_available:\n    candidates = (\n        \"onnxruntime\",\n        \"onnxruntime-gpu\",\n        \"ort_nightly_gpu\",\n        \"onnxruntime-directml\",\n        \"onnxruntime-openvino\",\n        \"ort_nightly_directml\",\n        \"onnxruntime-rocm\",\n        \"onnxruntime-training\",\n    )\n    _onnxruntime_version = None\n    # For the metadata, we have to look for both onnxruntime and onnxruntime-gpu\n    for pkg in candidates:\n        try:\n            _onnxruntime_version = importlib_metadata.version(pkg)\n            break\n        except importlib_metadata.PackageNotFoundError:\n            pass\n    _onnx_available = _onnxruntime_version is not None\n    if _onnx_available:\n        logger.debug(f\"Successfully imported onnxruntime version {_onnxruntime_version}\")\n\n# (sayakpaul): importlib.util.find_spec(\"opencv-python\") returns None even when it's installed.\n# _opencv_available = importlib.util.find_spec(\"opencv-python\") is not None\ntry:\n    candidates = (\n        \"opencv-python\",\n        \"opencv-contrib-python\",\n        \"opencv-python-headless\",\n        \"opencv-contrib-python-headless\",\n    )\n    _opencv_version = None\n    for pkg in candidates:\n        try:\n            _opencv_version = importlib_metadata.version(pkg)\n            break\n        except importlib_metadata.PackageNotFoundError:\n            pass\n    _opencv_available = _opencv_version is not None\n    if _opencv_available:\n        logger.debug(f\"Successfully imported cv2 version {_opencv_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _opencv_available = False\n\n_scipy_available = importlib.util.find_spec(\"scipy\") is not None\ntry:\n    _scipy_version = importlib_metadata.version(\"scipy\")\n    logger.debug(f\"Successfully imported scipy version {_scipy_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _scipy_available = False\n\n_librosa_available = importlib.util.find_spec(\"librosa\") is not None\ntry:\n    _librosa_version = importlib_metadata.version(\"librosa\")\n    logger.debug(f\"Successfully imported librosa version {_librosa_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _librosa_available = False\n\n_accelerate_available = importlib.util.find_spec(\"accelerate\") is not None\ntry:\n    _accelerate_version = importlib_metadata.version(\"accelerate\")\n    logger.debug(f\"Successfully imported accelerate version {_accelerate_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _accelerate_available = False\n\n_xformers_available = importlib.util.find_spec(\"xformers\") is not None\ntry:\n    _xformers_version = importlib_metadata.version(\"xformers\")\n    if _torch_available:\n        import torch\n\n        if version.Version(torch.__version__) < version.Version(\"1.12\"):\n            raise ValueError(\"PyTorch should be >= 1.12\")\n    logger.debug(f\"Successfully imported xformers version {_xformers_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _xformers_available = False\n\n_k_diffusion_available = importlib.util.find_spec(\"k_diffusion\") is not None\ntry:\n    _k_diffusion_version = importlib_metadata.version(\"k_diffusion\")\n    logger.debug(f\"Successfully imported k-diffusion version {_k_diffusion_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _k_diffusion_available = False\n\n_note_seq_available = importlib.util.find_spec(\"note_seq\") is not None\ntry:\n    _note_seq_version = importlib_metadata.version(\"note_seq\")\n    logger.debug(f\"Successfully imported note-seq version {_note_seq_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _note_seq_available = False\n\n_wandb_available = importlib.util.find_spec(\"wandb\") is not None\ntry:\n    _wandb_version = importlib_metadata.version(\"wandb\")\n    logger.debug(f\"Successfully imported wandb version {_wandb_version }\")\nexcept importlib_metadata.PackageNotFoundError:\n    _wandb_available = False\n\n_omegaconf_available = importlib.util.find_spec(\"omegaconf\") is not None\ntry:\n    _omegaconf_version = importlib_metadata.version(\"omegaconf\")\n    logger.debug(f\"Successfully imported omegaconf version {_omegaconf_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _omegaconf_available = False\n\n_tensorboard_available = importlib.util.find_spec(\"tensorboard\")\ntry:\n    _tensorboard_version = importlib_metadata.version(\"tensorboard\")\n    logger.debug(f\"Successfully imported tensorboard version {_tensorboard_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _tensorboard_available = False\n\n\n_compel_available = importlib.util.find_spec(\"compel\")\ntry:\n    _compel_version = importlib_metadata.version(\"compel\")\n    logger.debug(f\"Successfully imported compel version {_compel_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _compel_available = False\n\n\n_ftfy_available = importlib.util.find_spec(\"ftfy\") is not None\ntry:\n    _ftfy_version = importlib_metadata.version(\"ftfy\")\n    logger.debug(f\"Successfully imported ftfy version {_ftfy_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _ftfy_available = False\n\n\n_bs4_available = importlib.util.find_spec(\"bs4\") is not None\ntry:\n    # importlib metadata under different name\n    _bs4_version = importlib_metadata.version(\"beautifulsoup4\")\n    logger.debug(f\"Successfully imported ftfy version {_bs4_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _bs4_available = False\n\n_torchsde_available = importlib.util.find_spec(\"torchsde\") is not None\ntry:\n    _torchsde_version = importlib_metadata.version(\"torchsde\")\n    logger.debug(f\"Successfully imported torchsde version {_torchsde_version}\")\nexcept importlib_metadata.PackageNotFoundError:\n    _torchsde_available = False\n\n\ndef is_torch_available():\n    return _torch_available\n\n\ndef is_safetensors_available():\n    return _safetensors_available\n\n\ndef is_tf_available():\n    return _tf_available\n\n\ndef is_flax_available():\n    return _flax_available\n\n\ndef is_transformers_available():\n    return _transformers_available\n\n\ndef is_inflect_available():\n    return _inflect_available\n\n\ndef is_unidecode_available():\n    return _unidecode_available\n\n\ndef is_onnx_available():\n    return _onnx_available\n\n\ndef is_opencv_available():\n    return _opencv_available\n\n\ndef is_scipy_available():\n    return _scipy_available\n\n\ndef is_librosa_available():\n    return _librosa_available\n\n\ndef is_xformers_available():\n    return _xformers_available\n\n\ndef is_accelerate_available():\n    return _accelerate_available\n\n\ndef is_k_diffusion_available():\n    return _k_diffusion_available\n\n\ndef is_note_seq_available():\n    return _note_seq_available\n\n\ndef is_wandb_available():\n    return _wandb_available\n\n\ndef is_omegaconf_available():\n    return _omegaconf_available\n\n\ndef is_tensorboard_available():\n    return _tensorboard_available\n\n\ndef is_compel_available():\n    return _compel_available\n\n\ndef is_ftfy_available():\n    return _ftfy_available\n\n\ndef is_bs4_available():\n    return _bs4_available\n\n\ndef is_torchsde_available():\n    return _torchsde_available\n\n\n# docstyle-ignore\nFLAX_IMPORT_ERROR = \"\"\"\n{0} requires the FLAX library but it was not found in your environment. Checkout the instructions on the\ninstallation page: https://github.com/google/flax and follow the ones that match your environment.\n\"\"\"\n\n# docstyle-ignore\nINFLECT_IMPORT_ERROR = \"\"\"\n{0} requires the inflect library but it was not found in your environment. You can install it with pip: `pip install\ninflect`\n\"\"\"\n\n# docstyle-ignore\nPYTORCH_IMPORT_ERROR = \"\"\"\n{0} requires the PyTorch library but it was not found in your environment. Checkout the instructions on the\ninstallation page: https://pytorch.org/get-started/locally/ and follow the ones that match your environment.\n\"\"\"\n\n# docstyle-ignore\nONNX_IMPORT_ERROR = \"\"\"\n{0} requires the onnxruntime library but it was not found in your environment. You can install it with pip: `pip\ninstall onnxruntime`\n\"\"\"\n\n# docstyle-ignore\nOPENCV_IMPORT_ERROR = \"\"\"\n{0} requires the OpenCV library but it was not found in your environment. You can install it with pip: `pip\ninstall opencv-python`\n\"\"\"\n\n# docstyle-ignore\nSCIPY_IMPORT_ERROR = \"\"\"\n{0} requires the scipy library but it was not found in your environment. You can install it with pip: `pip install\nscipy`\n\"\"\"\n\n# docstyle-ignore\nLIBROSA_IMPORT_ERROR = \"\"\"\n{0} requires the librosa library but it was not found in your environment.  Checkout the instructions on the\ninstallation page: https://librosa.org/doc/latest/install.html and follow the ones that match your environment.\n\"\"\"\n\n# docstyle-ignore\nTRANSFORMERS_IMPORT_ERROR = \"\"\"\n{0} requires the transformers library but it was not found in your environment. You can install it with pip: `pip\ninstall transformers`\n\"\"\"\n\n# docstyle-ignore\nUNIDECODE_IMPORT_ERROR = \"\"\"\n{0} requires the unidecode library but it was not found in your environment. You can install it with pip: `pip install\nUnidecode`\n\"\"\"\n\n# docstyle-ignore\nK_DIFFUSION_IMPORT_ERROR = \"\"\"\n{0} requires the k-diffusion library but it was not found in your environment. You can install it with pip: `pip\ninstall k-diffusion`\n\"\"\"\n\n# docstyle-ignore\nNOTE_SEQ_IMPORT_ERROR = \"\"\"\n{0} requires the note-seq library but it was not found in your environment. You can install it with pip: `pip\ninstall note-seq`\n\"\"\"\n\n# docstyle-ignore\nWANDB_IMPORT_ERROR = \"\"\"\n{0} requires the wandb library but it was not found in your environment. You can install it with pip: `pip\ninstall wandb`\n\"\"\"\n\n# docstyle-ignore\nOMEGACONF_IMPORT_ERROR = \"\"\"\n{0} requires the omegaconf library but it was not found in your environment. You can install it with pip: `pip\ninstall omegaconf`\n\"\"\"\n\n# docstyle-ignore\nTENSORBOARD_IMPORT_ERROR = \"\"\"\n{0} requires the tensorboard library but it was not found in your environment. You can install it with pip: `pip\ninstall tensorboard`\n\"\"\"\n\n\n# docstyle-ignore\nCOMPEL_IMPORT_ERROR = \"\"\"\n{0} requires the compel library but it was not found in your environment. You can install it with pip: `pip install compel`\n\"\"\"\n\n# docstyle-ignore\nBS4_IMPORT_ERROR = \"\"\"\n{0} requires the Beautiful Soup library but it was not found in your environment. You can install it with pip:\n`pip install beautifulsoup4`. Please note that you may need to restart your runtime after installation.\n\"\"\"\n\n# docstyle-ignore\nFTFY_IMPORT_ERROR = \"\"\"\n{0} requires the ftfy library but it was not found in your environment. Checkout the instructions on the\ninstallation section: https://github.com/rspeer/python-ftfy/tree/master#installing and follow the ones\nthat match your environment. Please note that you may need to restart your runtime after installation.\n\"\"\"\n\n# docstyle-ignore\nTORCHSDE_IMPORT_ERROR = \"\"\"\n{0} requires the torchsde library but it was not found in your environment. You can install it with pip: `pip install torchsde`\n\"\"\"\n\n\nBACKENDS_MAPPING = OrderedDict(\n    [\n        (\"bs4\", (is_bs4_available, BS4_IMPORT_ERROR)),\n        (\"flax\", (is_flax_available, FLAX_IMPORT_ERROR)),\n        (\"inflect\", (is_inflect_available, INFLECT_IMPORT_ERROR)),\n        (\"onnx\", (is_onnx_available, ONNX_IMPORT_ERROR)),\n        (\"opencv\", (is_opencv_available, OPENCV_IMPORT_ERROR)),\n        (\"scipy\", (is_scipy_available, SCIPY_IMPORT_ERROR)),\n        (\"torch\", (is_torch_available, PYTORCH_IMPORT_ERROR)),\n        (\"transformers\", (is_transformers_available, TRANSFORMERS_IMPORT_ERROR)),\n        (\"unidecode\", (is_unidecode_available, UNIDECODE_IMPORT_ERROR)),\n        (\"librosa\", (is_librosa_available, LIBROSA_IMPORT_ERROR)),\n        (\"k_diffusion\", (is_k_diffusion_available, K_DIFFUSION_IMPORT_ERROR)),\n        (\"note_seq\", (is_note_seq_available, NOTE_SEQ_IMPORT_ERROR)),\n        (\"wandb\", (is_wandb_available, WANDB_IMPORT_ERROR)),\n        (\"omegaconf\", (is_omegaconf_available, OMEGACONF_IMPORT_ERROR)),\n        (\"tensorboard\", (_tensorboard_available, TENSORBOARD_IMPORT_ERROR)),\n        (\"compel\", (_compel_available, COMPEL_IMPORT_ERROR)),\n        (\"ftfy\", (is_ftfy_available, FTFY_IMPORT_ERROR)),\n        (\"torchsde\", (_torchsde_available, TORCHSDE_IMPORT_ERROR)),\n    ]\n)\n\n\ndef requires_backends(obj, backends):\n    if not isinstance(backends, (list, tuple)):\n        backends = [backends]\n\n    name = obj.__name__ if hasattr(obj, \"__name__\") else obj.__class__.__name__\n    checks = (BACKENDS_MAPPING[backend] for backend in backends)\n    failed = [msg.format(name) for available, msg in checks if not available()]\n    if failed:\n        raise ImportError(\"\".join(failed))\n\n    if name in [\n        \"VersatileDiffusionTextToImagePipeline\",\n        \"VersatileDiffusionPipeline\",\n        \"VersatileDiffusionDualGuidedPipeline\",\n        \"StableDiffusionImageVariationPipeline\",\n        \"UnCLIPPipeline\",\n    ] and is_transformers_version(\"<\", \"4.25.0\"):\n        raise ImportError(\n            f\"You need to install `transformers>=4.25` in order to use {name}: \\n```\\n pip install\"\n            \" --upgrade transformers \\n```\"\n        )\n\n    if name in [\"StableDiffusionDepth2ImgPipeline\", \"StableDiffusionPix2PixZeroPipeline\"] and is_transformers_version(\n        \"<\", \"4.26.0\"\n    ):\n        raise ImportError(\n            f\"You need to install `transformers>=4.26` in order to use {name}: \\n```\\n pip install\"\n            \" --upgrade transformers \\n```\"\n        )\n\n\nclass DummyObject(type):\n    \"\"\"\n    Metaclass for the dummy objects. Any class inheriting from it will return the ImportError generated by\n    `requires_backend` each time a user tries to access any method of that class.\n    \"\"\"\n\n    def __getattr__(cls, key):\n        if key.startswith(\"_\"):\n            return super().__getattr__(cls, key)\n        requires_backends(cls, cls._backends)\n\n\n# This function was copied from: https://github.com/huggingface/accelerate/blob/874c4967d94badd24f893064cc3bef45f57cadf7/src/accelerate/utils/versions.py#L319\ndef compare_versions(library_or_version: Union[str, Version], operation: str, requirement_version: str):\n    \"\"\"\n    Args:\n    Compares a library version to some requirement using a given operation.\n        library_or_version (`str` or `packaging.version.Version`):\n            A library name or a version to check.\n        operation (`str`):\n            A string representation of an operator, such as `\">\"` or `\"<=\"`.\n        requirement_version (`str`):\n            The version to compare the library version against\n    \"\"\"\n    if operation not in STR_OPERATION_TO_FUNC.keys():\n        raise ValueError(f\"`operation` must be one of {list(STR_OPERATION_TO_FUNC.keys())}, received {operation}\")\n    operation = STR_OPERATION_TO_FUNC[operation]\n    if isinstance(library_or_version, str):\n        library_or_version = parse(importlib_metadata.version(library_or_version))\n    return operation(library_or_version, parse(requirement_version))\n\n\n# This function was copied from: https://github.com/huggingface/accelerate/blob/874c4967d94badd24f893064cc3bef45f57cadf7/src/accelerate/utils/versions.py#L338\ndef is_torch_version(operation: str, version: str):\n    \"\"\"\n    Args:\n    Compares the current PyTorch version to a given reference with an operation.\n        operation (`str`):\n            A string representation of an operator, such as `\">\"` or `\"<=\"`\n        version (`str`):\n            A string version of PyTorch\n    \"\"\"\n    return compare_versions(parse(_torch_version), operation, version)\n\n\ndef is_transformers_version(operation: str, version: str):\n    \"\"\"\n    Args:\n    Compares the current Transformers version to a given reference with an operation.\n        operation (`str`):\n            A string representation of an operator, such as `\">\"` or `\"<=\"`\n        version (`str`):\n            A version string\n    \"\"\"\n    if not _transformers_available:\n        return False\n    return compare_versions(parse(_transformers_version), operation, version)\n\n\ndef is_accelerate_version(operation: str, version: str):\n    \"\"\"\n    Args:\n    Compares the current Accelerate version to a given reference with an operation.\n        operation (`str`):\n            A string representation of an operator, such as `\">\"` or `\"<=\"`\n        version (`str`):\n            A version string\n    \"\"\"\n    if not _accelerate_available:\n        return False\n    return compare_versions(parse(_accelerate_version), operation, version)\n\n\ndef is_k_diffusion_version(operation: str, version: str):\n    \"\"\"\n    Args:\n    Compares the current k-diffusion version to a given reference with an operation.\n        operation (`str`):\n            A string representation of an operator, such as `\">\"` or `\"<=\"`\n        version (`str`):\n            A version string\n    \"\"\"\n    if not _k_diffusion_available:\n        return False\n    return compare_versions(parse(_k_diffusion_version), operation, version)\n\n\nclass OptionalDependencyNotAvailable(BaseException):\n    \"\"\"An error indicating that an optional dependency of Diffusers was not found in the environment.\"\"\"\n"
  },
  {
    "path": "diffusers/utils/logging.py",
    "content": "# coding=utf-8\n# Copyright 2023 Optuna, Hugging Face\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\" Logging utilities.\"\"\"\n\nimport logging\nimport os\nimport sys\nimport threading\nfrom logging import (\n    CRITICAL,  # NOQA\n    DEBUG,  # NOQA\n    ERROR,  # NOQA\n    FATAL,  # NOQA\n    INFO,  # NOQA\n    NOTSET,  # NOQA\n    WARN,  # NOQA\n    WARNING,  # NOQA\n)\nfrom typing import Optional\n\nfrom tqdm import auto as tqdm_lib\n\n\n_lock = threading.Lock()\n_default_handler: Optional[logging.Handler] = None\n\nlog_levels = {\n    \"debug\": logging.DEBUG,\n    \"info\": logging.INFO,\n    \"warning\": logging.WARNING,\n    \"error\": logging.ERROR,\n    \"critical\": logging.CRITICAL,\n}\n\n_default_log_level = logging.WARNING\n\n_tqdm_active = True\n\n\ndef _get_default_logging_level():\n    \"\"\"\n    If DIFFUSERS_VERBOSITY env var is set to one of the valid choices return that as the new default level. If it is\n    not - fall back to `_default_log_level`\n    \"\"\"\n    env_level_str = os.getenv(\"DIFFUSERS_VERBOSITY\", None)\n    if env_level_str:\n        if env_level_str in log_levels:\n            return log_levels[env_level_str]\n        else:\n            logging.getLogger().warning(\n                f\"Unknown option DIFFUSERS_VERBOSITY={env_level_str}, \"\n                f\"has to be one of: { ', '.join(log_levels.keys()) }\"\n            )\n    return _default_log_level\n\n\ndef _get_library_name() -> str:\n    return __name__.split(\".\")[0]\n\n\ndef _get_library_root_logger() -> logging.Logger:\n    return logging.getLogger(_get_library_name())\n\n\ndef _configure_library_root_logger() -> None:\n    global _default_handler\n\n    with _lock:\n        if _default_handler:\n            # This library has already configured the library root logger.\n            return\n        _default_handler = logging.StreamHandler()  # Set sys.stderr as stream.\n        _default_handler.flush = sys.stderr.flush\n\n        # Apply our default configuration to the library root logger.\n        library_root_logger = _get_library_root_logger()\n        library_root_logger.addHandler(_default_handler)\n        library_root_logger.setLevel(_get_default_logging_level())\n        library_root_logger.propagate = False\n\n\ndef _reset_library_root_logger() -> None:\n    global _default_handler\n\n    with _lock:\n        if not _default_handler:\n            return\n\n        library_root_logger = _get_library_root_logger()\n        library_root_logger.removeHandler(_default_handler)\n        library_root_logger.setLevel(logging.NOTSET)\n        _default_handler = None\n\n\ndef get_log_levels_dict():\n    return log_levels\n\n\ndef get_logger(name: Optional[str] = None) -> logging.Logger:\n    \"\"\"\n    Return a logger with the specified name.\n\n    This function is not supposed to be directly accessed unless you are writing a custom diffusers module.\n    \"\"\"\n\n    if name is None:\n        name = _get_library_name()\n\n    _configure_library_root_logger()\n    return logging.getLogger(name)\n\n\ndef get_verbosity() -> int:\n    \"\"\"\n    Return the current level for the 🤗 Diffusers' root logger as an int.\n\n    Returns:\n        `int`: The logging level.\n\n    <Tip>\n\n    🤗 Diffusers has following logging levels:\n\n    - 50: `diffusers.logging.CRITICAL` or `diffusers.logging.FATAL`\n    - 40: `diffusers.logging.ERROR`\n    - 30: `diffusers.logging.WARNING` or `diffusers.logging.WARN`\n    - 20: `diffusers.logging.INFO`\n    - 10: `diffusers.logging.DEBUG`\n\n    </Tip>\"\"\"\n\n    _configure_library_root_logger()\n    return _get_library_root_logger().getEffectiveLevel()\n\n\ndef set_verbosity(verbosity: int) -> None:\n    \"\"\"\n    Set the verbosity level for the 🤗 Diffusers' root logger.\n\n    Args:\n        verbosity (`int`):\n            Logging level, e.g., one of:\n\n            - `diffusers.logging.CRITICAL` or `diffusers.logging.FATAL`\n            - `diffusers.logging.ERROR`\n            - `diffusers.logging.WARNING` or `diffusers.logging.WARN`\n            - `diffusers.logging.INFO`\n            - `diffusers.logging.DEBUG`\n    \"\"\"\n\n    _configure_library_root_logger()\n    _get_library_root_logger().setLevel(verbosity)\n\n\ndef set_verbosity_info():\n    \"\"\"Set the verbosity to the `INFO` level.\"\"\"\n    return set_verbosity(INFO)\n\n\ndef set_verbosity_warning():\n    \"\"\"Set the verbosity to the `WARNING` level.\"\"\"\n    return set_verbosity(WARNING)\n\n\ndef set_verbosity_debug():\n    \"\"\"Set the verbosity to the `DEBUG` level.\"\"\"\n    return set_verbosity(DEBUG)\n\n\ndef set_verbosity_error():\n    \"\"\"Set the verbosity to the `ERROR` level.\"\"\"\n    return set_verbosity(ERROR)\n\n\ndef disable_default_handler() -> None:\n    \"\"\"Disable the default handler of the HuggingFace Diffusers' root logger.\"\"\"\n\n    _configure_library_root_logger()\n\n    assert _default_handler is not None\n    _get_library_root_logger().removeHandler(_default_handler)\n\n\ndef enable_default_handler() -> None:\n    \"\"\"Enable the default handler of the HuggingFace Diffusers' root logger.\"\"\"\n\n    _configure_library_root_logger()\n\n    assert _default_handler is not None\n    _get_library_root_logger().addHandler(_default_handler)\n\n\ndef add_handler(handler: logging.Handler) -> None:\n    \"\"\"adds a handler to the HuggingFace Diffusers' root logger.\"\"\"\n\n    _configure_library_root_logger()\n\n    assert handler is not None\n    _get_library_root_logger().addHandler(handler)\n\n\ndef remove_handler(handler: logging.Handler) -> None:\n    \"\"\"removes given handler from the HuggingFace Diffusers' root logger.\"\"\"\n\n    _configure_library_root_logger()\n\n    assert handler is not None and handler not in _get_library_root_logger().handlers\n    _get_library_root_logger().removeHandler(handler)\n\n\ndef disable_propagation() -> None:\n    \"\"\"\n    Disable propagation of the library log outputs. Note that log propagation is disabled by default.\n    \"\"\"\n\n    _configure_library_root_logger()\n    _get_library_root_logger().propagate = False\n\n\ndef enable_propagation() -> None:\n    \"\"\"\n    Enable propagation of the library log outputs. Please disable the HuggingFace Diffusers' default handler to prevent\n    double logging if the root logger has been configured.\n    \"\"\"\n\n    _configure_library_root_logger()\n    _get_library_root_logger().propagate = True\n\n\ndef enable_explicit_format() -> None:\n    \"\"\"\n    Enable explicit formatting for every HuggingFace Diffusers' logger. The explicit formatter is as follows:\n    ```\n        [LEVELNAME|FILENAME|LINE NUMBER] TIME >> MESSAGE\n    ```\n    All handlers currently bound to the root logger are affected by this method.\n    \"\"\"\n    handlers = _get_library_root_logger().handlers\n\n    for handler in handlers:\n        formatter = logging.Formatter(\"[%(levelname)s|%(filename)s:%(lineno)s] %(asctime)s >> %(message)s\")\n        handler.setFormatter(formatter)\n\n\ndef reset_format() -> None:\n    \"\"\"\n    Resets the formatting for HuggingFace Diffusers' loggers.\n\n    All handlers currently bound to the root logger are affected by this method.\n    \"\"\"\n    handlers = _get_library_root_logger().handlers\n\n    for handler in handlers:\n        handler.setFormatter(None)\n\n\ndef warning_advice(self, *args, **kwargs):\n    \"\"\"\n    This method is identical to `logger.warning()`, but if env var DIFFUSERS_NO_ADVISORY_WARNINGS=1 is set, this\n    warning will not be printed\n    \"\"\"\n    no_advisory_warnings = os.getenv(\"DIFFUSERS_NO_ADVISORY_WARNINGS\", False)\n    if no_advisory_warnings:\n        return\n    self.warning(*args, **kwargs)\n\n\nlogging.Logger.warning_advice = warning_advice\n\n\nclass EmptyTqdm:\n    \"\"\"Dummy tqdm which doesn't do anything.\"\"\"\n\n    def __init__(self, *args, **kwargs):  # pylint: disable=unused-argument\n        self._iterator = args[0] if args else None\n\n    def __iter__(self):\n        return iter(self._iterator)\n\n    def __getattr__(self, _):\n        \"\"\"Return empty function.\"\"\"\n\n        def empty_fn(*args, **kwargs):  # pylint: disable=unused-argument\n            return\n\n        return empty_fn\n\n    def __enter__(self):\n        return self\n\n    def __exit__(self, type_, value, traceback):\n        return\n\n\nclass _tqdm_cls:\n    def __call__(self, *args, **kwargs):\n        if _tqdm_active:\n            return tqdm_lib.tqdm(*args, **kwargs)\n        else:\n            return EmptyTqdm(*args, **kwargs)\n\n    def set_lock(self, *args, **kwargs):\n        self._lock = None\n        if _tqdm_active:\n            return tqdm_lib.tqdm.set_lock(*args, **kwargs)\n\n    def get_lock(self):\n        if _tqdm_active:\n            return tqdm_lib.tqdm.get_lock()\n\n\ntqdm = _tqdm_cls()\n\n\ndef is_progress_bar_enabled() -> bool:\n    \"\"\"Return a boolean indicating whether tqdm progress bars are enabled.\"\"\"\n    global _tqdm_active\n    return bool(_tqdm_active)\n\n\ndef enable_progress_bar():\n    \"\"\"Enable tqdm progress bar.\"\"\"\n    global _tqdm_active\n    _tqdm_active = True\n\n\ndef disable_progress_bar():\n    \"\"\"Disable tqdm progress bar.\"\"\"\n    global _tqdm_active\n    _tqdm_active = False\n"
  },
  {
    "path": "diffusers/utils/model_card_template.md",
    "content": "---\n{{ card_data }}\n---\n\n<!-- This model card has been generated automatically according to the information the training script had access to. You\nshould probably proofread and complete it, then remove this comment. -->\n\n# {{ model_name | default(\"Diffusion Model\") }}\n\n## Model description\n\nThis diffusion model is trained with the [🤗 Diffusers](https://github.com/huggingface/diffusers) library \non the `{{ dataset_name }}` dataset.\n\n## Intended uses & limitations\n\n#### How to use\n\n```python\n# TODO: add an example code snippet for running this diffusion pipeline\n```\n\n#### Limitations and bias\n\n[TODO: provide examples of latent issues and potential remediations]\n\n## Training data\n\n[TODO: describe the data used to train the model]\n\n### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: {{ learning_rate }}\n- train_batch_size: {{ train_batch_size }}\n- eval_batch_size: {{ eval_batch_size }}\n- gradient_accumulation_steps: {{ gradient_accumulation_steps }}\n- optimizer: AdamW with betas=({{ adam_beta1 }}, {{ adam_beta2 }}), weight_decay={{ adam_weight_decay }} and epsilon={{ adam_epsilon }}\n- lr_scheduler: {{ lr_scheduler }}\n- lr_warmup_steps: {{ lr_warmup_steps }}\n- ema_inv_gamma: {{ ema_inv_gamma }}\n- ema_inv_gamma: {{ ema_power }}\n- ema_inv_gamma: {{ ema_max_decay }}\n- mixed_precision: {{ mixed_precision }}\n\n### Training results\n\n📈 [TensorBoard logs](https://huggingface.co/{{ repo_name }}/tensorboard?#scalars)\n\n\n"
  },
  {
    "path": "diffusers/utils/outputs.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nGeneric utilities\n\"\"\"\n\nfrom collections import OrderedDict\nfrom dataclasses import fields\nfrom typing import Any, Tuple\n\nimport numpy as np\n\nfrom .import_utils import is_torch_available\n\n\ndef is_tensor(x):\n    \"\"\"\n    Tests if `x` is a `torch.Tensor` or `np.ndarray`.\n    \"\"\"\n    if is_torch_available():\n        import torch\n\n        if isinstance(x, torch.Tensor):\n            return True\n\n    return isinstance(x, np.ndarray)\n\n\nclass BaseOutput(OrderedDict):\n    \"\"\"\n    Base class for all model outputs as dataclass. Has a `__getitem__` that allows indexing by integer or slice (like a\n    tuple) or strings (like a dictionary) that will ignore the `None` attributes. Otherwise behaves like a regular\n    python dictionary.\n\n    <Tip warning={true}>\n\n    You can't unpack a `BaseOutput` directly. Use the [`~utils.BaseOutput.to_tuple`] method to convert it to a tuple\n    before.\n\n    </Tip>\n    \"\"\"\n\n    def __post_init__(self):\n        class_fields = fields(self)\n\n        # Safety and consistency checks\n        if not len(class_fields):\n            raise ValueError(f\"{self.__class__.__name__} has no fields.\")\n\n        first_field = getattr(self, class_fields[0].name)\n        other_fields_are_none = all(getattr(self, field.name) is None for field in class_fields[1:])\n\n        if other_fields_are_none and isinstance(first_field, dict):\n            for key, value in first_field.items():\n                self[key] = value\n        else:\n            for field in class_fields:\n                v = getattr(self, field.name)\n                if v is not None:\n                    self[field.name] = v\n\n    def __delitem__(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``__delitem__`` on a {self.__class__.__name__} instance.\")\n\n    def setdefault(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``setdefault`` on a {self.__class__.__name__} instance.\")\n\n    def pop(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``pop`` on a {self.__class__.__name__} instance.\")\n\n    def update(self, *args, **kwargs):\n        raise Exception(f\"You cannot use ``update`` on a {self.__class__.__name__} instance.\")\n\n    def __getitem__(self, k):\n        if isinstance(k, str):\n            inner_dict = dict(self.items())\n            return inner_dict[k]\n        else:\n            return self.to_tuple()[k]\n\n    def __setattr__(self, name, value):\n        if name in self.keys() and value is not None:\n            # Don't call self.__setitem__ to avoid recursion errors\n            super().__setitem__(name, value)\n        super().__setattr__(name, value)\n\n    def __setitem__(self, key, value):\n        # Will raise a KeyException if needed\n        super().__setitem__(key, value)\n        # Don't call self.__setattr__ to avoid recursion errors\n        super().__setattr__(key, value)\n\n    def to_tuple(self) -> Tuple[Any]:\n        \"\"\"\n        Convert self to a tuple containing all the attributes/keys that are not `None`.\n        \"\"\"\n        return tuple(self[k] for k in self.keys())\n"
  },
  {
    "path": "diffusers/utils/pil_utils.py",
    "content": "import PIL.Image\nimport PIL.ImageOps\nfrom packaging import version\nfrom PIL import Image\n\n\nif version.parse(version.parse(PIL.__version__).base_version) >= version.parse(\"9.1.0\"):\n    PIL_INTERPOLATION = {\n        \"linear\": PIL.Image.Resampling.BILINEAR,\n        \"bilinear\": PIL.Image.Resampling.BILINEAR,\n        \"bicubic\": PIL.Image.Resampling.BICUBIC,\n        \"lanczos\": PIL.Image.Resampling.LANCZOS,\n        \"nearest\": PIL.Image.Resampling.NEAREST,\n    }\nelse:\n    PIL_INTERPOLATION = {\n        \"linear\": PIL.Image.LINEAR,\n        \"bilinear\": PIL.Image.BILINEAR,\n        \"bicubic\": PIL.Image.BICUBIC,\n        \"lanczos\": PIL.Image.LANCZOS,\n        \"nearest\": PIL.Image.NEAREST,\n    }\n\n\ndef pt_to_pil(images):\n    images = (images / 2 + 0.5).clamp(0, 1)\n    images = images.cpu().permute(0, 2, 3, 1).float().numpy()\n    images = numpy_to_pil(images)\n    return images\n\n\ndef numpy_to_pil(images):\n    \"\"\"\n    Convert a numpy image or a batch of images to a PIL image.\n    \"\"\"\n    if images.ndim == 3:\n        images = images[None, ...]\n    images = (images * 255).round().astype(\"uint8\")\n    if images.shape[-1] == 1:\n        # special case for grayscale (single channel) images\n        pil_images = [Image.fromarray(image.squeeze(), mode=\"L\") for image in images]\n    else:\n        pil_images = [Image.fromarray(image) for image in images]\n\n    return pil_images\n"
  },
  {
    "path": "diffusers/utils/testing_utils.py",
    "content": "import inspect\nimport logging\nimport os\nimport random\nimport re\nimport tempfile\nimport unittest\nimport urllib.parse\nfrom distutils.util import strtobool\nfrom io import BytesIO, StringIO\nfrom pathlib import Path\nfrom typing import List, Optional, Union\n\nimport numpy as np\nimport PIL.Image\nimport PIL.ImageOps\nimport requests\nfrom packaging import version\n\nfrom .import_utils import (\n    BACKENDS_MAPPING,\n    is_compel_available,\n    is_flax_available,\n    is_note_seq_available,\n    is_onnx_available,\n    is_opencv_available,\n    is_torch_available,\n    is_torch_version,\n    is_torchsde_available,\n)\nfrom .logging import get_logger\n\n\nglobal_rng = random.Random()\n\nlogger = get_logger(__name__)\n\nif is_torch_available():\n    import torch\n\n    if \"DIFFUSERS_TEST_DEVICE\" in os.environ:\n        torch_device = os.environ[\"DIFFUSERS_TEST_DEVICE\"]\n\n        available_backends = [\"cuda\", \"cpu\", \"mps\"]\n        if torch_device not in available_backends:\n            raise ValueError(\n                f\"unknown torch backend for diffusers tests: {torch_device}. Available backends are:\"\n                f\" {available_backends}\"\n            )\n        logger.info(f\"torch_device overrode to {torch_device}\")\n    else:\n        torch_device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n        is_torch_higher_equal_than_1_12 = version.parse(\n            version.parse(torch.__version__).base_version\n        ) >= version.parse(\"1.12\")\n\n        if is_torch_higher_equal_than_1_12:\n            # Some builds of torch 1.12 don't have the mps backend registered. See #892 for more details\n            mps_backend_registered = hasattr(torch.backends, \"mps\")\n            torch_device = \"mps\" if (mps_backend_registered and torch.backends.mps.is_available()) else torch_device\n\n\ndef torch_all_close(a, b, *args, **kwargs):\n    if not is_torch_available():\n        raise ValueError(\"PyTorch needs to be installed to use this function.\")\n    if not torch.allclose(a, b, *args, **kwargs):\n        assert False, f\"Max diff is absolute {(a - b).abs().max()}. Diff tensor is {(a - b).abs()}.\"\n    return True\n\n\ndef print_tensor_test(tensor, filename=\"test_corrections.txt\", expected_tensor_name=\"expected_slice\"):\n    test_name = os.environ.get(\"PYTEST_CURRENT_TEST\")\n    if not torch.is_tensor(tensor):\n        tensor = torch.from_numpy(tensor)\n\n    tensor_str = str(tensor.detach().cpu().flatten().to(torch.float32)).replace(\"\\n\", \"\")\n    # format is usually:\n    # expected_slice = np.array([-0.5713, -0.3018, -0.9814, 0.04663, -0.879, 0.76, -1.734, 0.1044, 1.161])\n    output_str = tensor_str.replace(\"tensor\", f\"{expected_tensor_name} = np.array\")\n    test_file, test_class, test_fn = test_name.split(\"::\")\n    test_fn = test_fn.split()[0]\n    with open(filename, \"a\") as f:\n        print(\";\".join([test_file, test_class, test_fn, output_str]), file=f)\n\n\ndef get_tests_dir(append_path=None):\n    \"\"\"\n    Args:\n        append_path: optional path to append to the tests dir path\n    Return:\n        The full path to the `tests` dir, so that the tests can be invoked from anywhere. Optionally `append_path` is\n        joined after the `tests` dir the former is provided.\n    \"\"\"\n    # this function caller's __file__\n    caller__file__ = inspect.stack()[1][1]\n    tests_dir = os.path.abspath(os.path.dirname(caller__file__))\n\n    while not tests_dir.endswith(\"tests\"):\n        tests_dir = os.path.dirname(tests_dir)\n\n    if append_path:\n        return os.path.join(tests_dir, append_path)\n    else:\n        return tests_dir\n\n\ndef parse_flag_from_env(key, default=False):\n    try:\n        value = os.environ[key]\n    except KeyError:\n        # KEY isn't set, default to `default`.\n        _value = default\n    else:\n        # KEY is set, convert it to True or False.\n        try:\n            _value = strtobool(value)\n        except ValueError:\n            # More values are supported, but let's keep the message simple.\n            raise ValueError(f\"If set, {key} must be yes or no.\")\n    return _value\n\n\n_run_slow_tests = parse_flag_from_env(\"RUN_SLOW\", default=False)\n_run_nightly_tests = parse_flag_from_env(\"RUN_NIGHTLY\", default=False)\n\n\ndef floats_tensor(shape, scale=1.0, rng=None, name=None):\n    \"\"\"Creates a random float32 tensor\"\"\"\n    if rng is None:\n        rng = global_rng\n\n    total_dims = 1\n    for dim in shape:\n        total_dims *= dim\n\n    values = []\n    for _ in range(total_dims):\n        values.append(rng.random() * scale)\n\n    return torch.tensor(data=values, dtype=torch.float).view(shape).contiguous()\n\n\ndef slow(test_case):\n    \"\"\"\n    Decorator marking a test as slow.\n\n    Slow tests are skipped by default. Set the RUN_SLOW environment variable to a truthy value to run them.\n\n    \"\"\"\n    return unittest.skipUnless(_run_slow_tests, \"test is slow\")(test_case)\n\n\ndef nightly(test_case):\n    \"\"\"\n    Decorator marking a test that runs nightly in the diffusers CI.\n\n    Slow tests are skipped by default. Set the RUN_NIGHTLY environment variable to a truthy value to run them.\n\n    \"\"\"\n    return unittest.skipUnless(_run_nightly_tests, \"test is nightly\")(test_case)\n\n\ndef require_torch(test_case):\n    \"\"\"\n    Decorator marking a test that requires PyTorch. These tests are skipped when PyTorch isn't installed.\n    \"\"\"\n    return unittest.skipUnless(is_torch_available(), \"test requires PyTorch\")(test_case)\n\n\ndef require_torch_2(test_case):\n    \"\"\"\n    Decorator marking a test that requires PyTorch 2. These tests are skipped when it isn't installed.\n    \"\"\"\n    return unittest.skipUnless(is_torch_available() and is_torch_version(\">=\", \"2.0.0\"), \"test requires PyTorch 2\")(\n        test_case\n    )\n\n\ndef require_torch_gpu(test_case):\n    \"\"\"Decorator marking a test that requires CUDA and PyTorch.\"\"\"\n    return unittest.skipUnless(is_torch_available() and torch_device == \"cuda\", \"test requires PyTorch+CUDA\")(\n        test_case\n    )\n\n\ndef skip_mps(test_case):\n    \"\"\"Decorator marking a test to skip if torch_device is 'mps'\"\"\"\n    return unittest.skipUnless(torch_device != \"mps\", \"test requires non 'mps' device\")(test_case)\n\n\ndef require_flax(test_case):\n    \"\"\"\n    Decorator marking a test that requires JAX & Flax. These tests are skipped when one / both are not installed\n    \"\"\"\n    return unittest.skipUnless(is_flax_available(), \"test requires JAX & Flax\")(test_case)\n\n\ndef require_compel(test_case):\n    \"\"\"\n    Decorator marking a test that requires compel: https://github.com/damian0815/compel. These tests are skipped when\n    the library is not installed.\n    \"\"\"\n    return unittest.skipUnless(is_compel_available(), \"test requires compel\")(test_case)\n\n\ndef require_onnxruntime(test_case):\n    \"\"\"\n    Decorator marking a test that requires onnxruntime. These tests are skipped when onnxruntime isn't installed.\n    \"\"\"\n    return unittest.skipUnless(is_onnx_available(), \"test requires onnxruntime\")(test_case)\n\n\ndef require_note_seq(test_case):\n    \"\"\"\n    Decorator marking a test that requires note_seq. These tests are skipped when note_seq isn't installed.\n    \"\"\"\n    return unittest.skipUnless(is_note_seq_available(), \"test requires note_seq\")(test_case)\n\n\ndef require_torchsde(test_case):\n    \"\"\"\n    Decorator marking a test that requires torchsde. These tests are skipped when torchsde isn't installed.\n    \"\"\"\n    return unittest.skipUnless(is_torchsde_available(), \"test requires torchsde\")(test_case)\n\n\ndef load_numpy(arry: Union[str, np.ndarray], local_path: Optional[str] = None) -> np.ndarray:\n    if isinstance(arry, str):\n        # local_path = \"/home/patrick_huggingface_co/\"\n        if local_path is not None:\n            # local_path can be passed to correct images of tests\n            return os.path.join(local_path, \"/\".join([arry.split(\"/\")[-5], arry.split(\"/\")[-2], arry.split(\"/\")[-1]]))\n        elif arry.startswith(\"http://\") or arry.startswith(\"https://\"):\n            response = requests.get(arry)\n            response.raise_for_status()\n            arry = np.load(BytesIO(response.content))\n        elif os.path.isfile(arry):\n            arry = np.load(arry)\n        else:\n            raise ValueError(\n                f\"Incorrect path or url, URLs must start with `http://` or `https://`, and {arry} is not a valid path\"\n            )\n    elif isinstance(arry, np.ndarray):\n        pass\n    else:\n        raise ValueError(\n            \"Incorrect format used for numpy ndarray. Should be an url linking to an image, a local path, or a\"\n            \" ndarray.\"\n        )\n\n    return arry\n\n\ndef load_pt(url: str):\n    response = requests.get(url)\n    response.raise_for_status()\n    arry = torch.load(BytesIO(response.content))\n    return arry\n\n\ndef load_image(image: Union[str, PIL.Image.Image]) -> PIL.Image.Image:\n    \"\"\"\n    Args:\n    Loads `image` to a PIL Image.\n        image (`str` or `PIL.Image.Image`):\n            The image to convert to the PIL Image format.\n    Returns:\n        `PIL.Image.Image`: A PIL Image.\n    \"\"\"\n    if isinstance(image, str):\n        if image.startswith(\"http://\") or image.startswith(\"https://\"):\n            image = PIL.Image.open(requests.get(image, stream=True).raw)\n        elif os.path.isfile(image):\n            image = PIL.Image.open(image)\n        else:\n            raise ValueError(\n                f\"Incorrect path or url, URLs must start with `http://` or `https://`, and {image} is not a valid path\"\n            )\n    elif isinstance(image, PIL.Image.Image):\n        image = image\n    else:\n        raise ValueError(\n            \"Incorrect format used for image. Should be an url linking to an image, a local path, or a PIL image.\"\n        )\n    image = PIL.ImageOps.exif_transpose(image)\n    image = image.convert(\"RGB\")\n    return image\n\n\ndef preprocess_image(image: PIL.Image, batch_size: int):\n    w, h = image.size\n    w, h = (x - x % 8 for x in (w, h))  # resize to integer multiple of 8\n    image = image.resize((w, h), resample=PIL.Image.LANCZOS)\n    image = np.array(image).astype(np.float32) / 255.0\n    image = np.vstack([image[None].transpose(0, 3, 1, 2)] * batch_size)\n    image = torch.from_numpy(image)\n    return 2.0 * image - 1.0\n\n\ndef export_to_video(video_frames: List[np.ndarray], output_video_path: str = None) -> str:\n    if is_opencv_available():\n        import cv2\n    else:\n        raise ImportError(BACKENDS_MAPPING[\"opencv\"][1].format(\"export_to_video\"))\n    if output_video_path is None:\n        output_video_path = tempfile.NamedTemporaryFile(suffix=\".mp4\").name\n\n    fourcc = cv2.VideoWriter_fourcc(*\"mp4v\")\n    h, w, c = video_frames[0].shape\n    video_writer = cv2.VideoWriter(output_video_path, fourcc, fps=8, frameSize=(w, h))\n    for i in range(len(video_frames)):\n        img = cv2.cvtColor(video_frames[i], cv2.COLOR_RGB2BGR)\n        video_writer.write(img)\n    return output_video_path\n\n\ndef load_hf_numpy(path) -> np.ndarray:\n    if not path.startswith(\"http://\") or path.startswith(\"https://\"):\n        path = os.path.join(\n            \"https://huggingface.co/datasets/fusing/diffusers-testing/resolve/main\", urllib.parse.quote(path)\n        )\n\n    return load_numpy(path)\n\n\n# --- pytest conf functions --- #\n\n# to avoid multiple invocation from tests/conftest.py and examples/conftest.py - make sure it's called only once\npytest_opt_registered = {}\n\n\ndef pytest_addoption_shared(parser):\n    \"\"\"\n    This function is to be called from `conftest.py` via `pytest_addoption` wrapper that has to be defined there.\n\n    It allows loading both `conftest.py` files at once without causing a failure due to adding the same `pytest`\n    option.\n\n    \"\"\"\n    option = \"--make-reports\"\n    if option not in pytest_opt_registered:\n        parser.addoption(\n            option,\n            action=\"store\",\n            default=False,\n            help=\"generate report files. The value of this option is used as a prefix to report names\",\n        )\n        pytest_opt_registered[option] = 1\n\n\ndef pytest_terminal_summary_main(tr, id):\n    \"\"\"\n    Generate multiple reports at the end of test suite run - each report goes into a dedicated file in the current\n    directory. The report files are prefixed with the test suite name.\n\n    This function emulates --duration and -rA pytest arguments.\n\n    This function is to be called from `conftest.py` via `pytest_terminal_summary` wrapper that has to be defined\n    there.\n\n    Args:\n    - tr: `terminalreporter` passed from `conftest.py`\n    - id: unique id like `tests` or `examples` that will be incorporated into the final reports filenames - this is\n      needed as some jobs have multiple runs of pytest, so we can't have them overwrite each other.\n\n    NB: this functions taps into a private _pytest API and while unlikely, it could break should\n    pytest do internal changes - also it calls default internal methods of terminalreporter which\n    can be hijacked by various `pytest-` plugins and interfere.\n\n    \"\"\"\n    from _pytest.config import create_terminal_writer\n\n    if not len(id):\n        id = \"tests\"\n\n    config = tr.config\n    orig_writer = config.get_terminal_writer()\n    orig_tbstyle = config.option.tbstyle\n    orig_reportchars = tr.reportchars\n\n    dir = \"reports\"\n    Path(dir).mkdir(parents=True, exist_ok=True)\n    report_files = {\n        k: f\"{dir}/{id}_{k}.txt\"\n        for k in [\n            \"durations\",\n            \"errors\",\n            \"failures_long\",\n            \"failures_short\",\n            \"failures_line\",\n            \"passes\",\n            \"stats\",\n            \"summary_short\",\n            \"warnings\",\n        ]\n    }\n\n    # custom durations report\n    # note: there is no need to call pytest --durations=XX to get this separate report\n    # adapted from https://github.com/pytest-dev/pytest/blob/897f151e/src/_pytest/runner.py#L66\n    dlist = []\n    for replist in tr.stats.values():\n        for rep in replist:\n            if hasattr(rep, \"duration\"):\n                dlist.append(rep)\n    if dlist:\n        dlist.sort(key=lambda x: x.duration, reverse=True)\n        with open(report_files[\"durations\"], \"w\") as f:\n            durations_min = 0.05  # sec\n            f.write(\"slowest durations\\n\")\n            for i, rep in enumerate(dlist):\n                if rep.duration < durations_min:\n                    f.write(f\"{len(dlist)-i} durations < {durations_min} secs were omitted\")\n                    break\n                f.write(f\"{rep.duration:02.2f}s {rep.when:<8} {rep.nodeid}\\n\")\n\n    def summary_failures_short(tr):\n        # expecting that the reports were --tb=long (default) so we chop them off here to the last frame\n        reports = tr.getreports(\"failed\")\n        if not reports:\n            return\n        tr.write_sep(\"=\", \"FAILURES SHORT STACK\")\n        for rep in reports:\n            msg = tr._getfailureheadline(rep)\n            tr.write_sep(\"_\", msg, red=True, bold=True)\n            # chop off the optional leading extra frames, leaving only the last one\n            longrepr = re.sub(r\".*_ _ _ (_ ){10,}_ _ \", \"\", rep.longreprtext, 0, re.M | re.S)\n            tr._tw.line(longrepr)\n            # note: not printing out any rep.sections to keep the report short\n\n    # use ready-made report funcs, we are just hijacking the filehandle to log to a dedicated file each\n    # adapted from https://github.com/pytest-dev/pytest/blob/897f151e/src/_pytest/terminal.py#L814\n    # note: some pytest plugins may interfere by hijacking the default `terminalreporter` (e.g.\n    # pytest-instafail does that)\n\n    # report failures with line/short/long styles\n    config.option.tbstyle = \"auto\"  # full tb\n    with open(report_files[\"failures_long\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        tr.summary_failures()\n\n    # config.option.tbstyle = \"short\" # short tb\n    with open(report_files[\"failures_short\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        summary_failures_short(tr)\n\n    config.option.tbstyle = \"line\"  # one line per error\n    with open(report_files[\"failures_line\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        tr.summary_failures()\n\n    with open(report_files[\"errors\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        tr.summary_errors()\n\n    with open(report_files[\"warnings\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        tr.summary_warnings()  # normal warnings\n        tr.summary_warnings()  # final warnings\n\n    tr.reportchars = \"wPpsxXEf\"  # emulate -rA (used in summary_passes() and short_test_summary())\n    with open(report_files[\"passes\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        tr.summary_passes()\n\n    with open(report_files[\"summary_short\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        tr.short_test_summary()\n\n    with open(report_files[\"stats\"], \"w\") as f:\n        tr._tw = create_terminal_writer(config, f)\n        tr.summary_stats()\n\n    # restore:\n    tr._tw = orig_writer\n    tr.reportchars = orig_reportchars\n    config.option.tbstyle = orig_tbstyle\n\n\nclass CaptureLogger:\n    \"\"\"\n    Args:\n    Context manager to capture `logging` streams\n        logger: 'logging` logger object\n    Returns:\n        The captured output is available via `self.out`\n    Example:\n    ```python\n    >>> from diffusers import logging\n    >>> from diffusers.testing_utils import CaptureLogger\n\n    >>> msg = \"Testing 1, 2, 3\"\n    >>> logging.set_verbosity_info()\n    >>> logger = logging.get_logger(\"diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.py\")\n    >>> with CaptureLogger(logger) as cl:\n    ...     logger.info(msg)\n    >>> assert cl.out, msg + \"\\n\"\n    ```\n    \"\"\"\n\n    def __init__(self, logger):\n        self.logger = logger\n        self.io = StringIO()\n        self.sh = logging.StreamHandler(self.io)\n        self.out = \"\"\n\n    def __enter__(self):\n        self.logger.addHandler(self.sh)\n        return self\n\n    def __exit__(self, *exc):\n        self.logger.removeHandler(self.sh)\n        self.out = self.io.getvalue()\n\n    def __repr__(self):\n        return f\"captured: {self.out}\\n\"\n"
  },
  {
    "path": "diffusers/utils/torch_utils.py",
    "content": "# Copyright 2023 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nPyTorch utilities: Utilities related to PyTorch\n\"\"\"\nfrom typing import List, Optional, Tuple, Union\n\nfrom . import logging\nfrom .import_utils import is_torch_available, is_torch_version\n\n\nif is_torch_available():\n    import torch\n\nlogger = logging.get_logger(__name__)  # pylint: disable=invalid-name\n\ntry:\n    from torch._dynamo import allow_in_graph as maybe_allow_in_graph\nexcept (ImportError, ModuleNotFoundError):\n\n    def maybe_allow_in_graph(cls):\n        return cls\n\n\ndef randn_tensor(\n    shape: Union[Tuple, List],\n    generator: Optional[Union[List[\"torch.Generator\"], \"torch.Generator\"]] = None,\n    device: Optional[\"torch.device\"] = None,\n    dtype: Optional[\"torch.dtype\"] = None,\n    layout: Optional[\"torch.layout\"] = None,\n):\n    \"\"\"This is a helper function that allows to create random tensors on the desired `device` with the desired `dtype`. When\n    passing a list of generators one can seed each batched size individually. If CPU generators are passed the tensor\n    will always be created on CPU.\n    \"\"\"\n    # device on which tensor is created defaults to device\n    rand_device = device\n    batch_size = shape[0]\n\n    layout = layout or torch.strided\n    device = device or torch.device(\"cpu\")\n\n    if generator is not None:\n        gen_device_type = generator.device.type if not isinstance(generator, list) else generator[0].device.type\n        if gen_device_type != device.type and gen_device_type == \"cpu\":\n            rand_device = \"cpu\"\n            if device != \"mps\":\n                logger.info(\n                    f\"The passed generator was created on 'cpu' even though a tensor on {device} was expected.\"\n                    f\" Tensors will be created on 'cpu' and then moved to {device}. Note that one can probably\"\n                    f\" slighly speed up this function by passing a generator that was created on the {device} device.\"\n                )\n        elif gen_device_type != device.type and gen_device_type == \"cuda\":\n            raise ValueError(f\"Cannot generate a {device} tensor from a generator of type {gen_device_type}.\")\n\n    if isinstance(generator, list):\n        shape = (1,) + shape[1:]\n        latents = [\n            torch.randn(shape, generator=generator[i], device=rand_device, dtype=dtype, layout=layout)\n            for i in range(batch_size)\n        ]\n        latents = torch.cat(latents, dim=0).to(device)\n    else:\n        latents = torch.randn(shape, generator=generator, device=rand_device, dtype=dtype, layout=layout).to(device)\n\n    return latents\n\n\ndef is_compiled_module(module):\n    \"\"\"Check whether the module was compiled with torch.compile()\"\"\"\n    if is_torch_version(\"<\", \"2.0.0\") or not hasattr(torch, \"_dynamo\"):\n        return False\n    return isinstance(module, torch._dynamo.eval_frame.OptimizedModule)\n"
  },
  {
    "path": "fid_score.py",
    "content": "\"\"\"Calculates the Frechet Inception Distance (FID) to evalulate GANs\n\nThe FID metric calculates the distance between two distributions of images.\nTypically, we have summary statistics (mean & covariance matrix) of one\nof these distributions, while the 2nd distribution is given by a GAN.\n\nWhen run as a stand-alone program, it compares the distribution of\nimages that are stored as PNG/JPEG at a specified location with a\ndistribution given by summary statistics (in pickle format).\n\nThe FID is calculated by assuming that X_1 and X_2 are the activations of\nthe pool_3 layer of the inception net for generated samples and real world\nsamples respectively.\n\nSee --help to see further details.\n\nCode apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead\nof Tensorflow\n\nCopyright 2018 Institute of Bioinformatics, JKU Linz\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n   http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\nimport os\nimport pathlib\nfrom argparse import ArgumentDefaultsHelpFormatter, ArgumentParser\n\nimport numpy as np\nimport torch\nimport torchvision.transforms as TF\nfrom PIL import Image\nfrom scipy import linalg\nfrom torch.nn.functional import adaptive_avg_pool2d\n\ntry:\n    from tqdm import tqdm\nexcept ImportError:\n    # If tqdm is not available, provide a mock version of it\n    def tqdm(x):\n        return x\n\nfrom inception import InceptionV3\n\nparser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)\nparser.add_argument('--batch-size', type=int, default=50,\n                    help='Batch size to use')\nparser.add_argument('--dataset_name', type=str, default=None)\nparser.add_argument('--num-workers', type=int,\n                    help=('Number of processes to use for data loading. '\n                          'Defaults to `min(8, num_cpus)`'))\nparser.add_argument('--device', type=str, default=None,\n                    help='Device to use. Like cuda, cuda:0 or cpu')\nparser.add_argument('--dims', type=int, default=2048,\n                    choices=list(InceptionV3.BLOCK_INDEX_BY_DIM),\n                    help=('Dimensionality of Inception features to use. '\n                          'By default, uses pool3 features'))\nparser.add_argument('--num_samples', type=int, default=None,\n                    help=('Number of samples for FID estimation'))\nparser.add_argument('--res', type=int, default=None,\n                    help=('Resolutions of samples for FID estimation'))\nparser.add_argument('--save-stats', action='store_true',\n                    help=('Generate an npz archive from a directory of samples. '\n                          'The first path is used as input and the second as output.'))\n\nparser.add_argument('path', type=str, nargs=2,\n                    help=('Paths to the generated images or '\n                          'to .npz statistic files'))\n\n\nIMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',\n                    'tif', 'tiff', 'webp'}\n\n\nclass ImagePathDataset(torch.utils.data.Dataset):\n    def __init__(self, files, transforms=None):\n        self.files = files\n        self.transforms = transforms\n\n    def __len__(self):\n        return len(self.files)\n\n    def __getitem__(self, i):\n        path = self.files[i]\n        img = Image.open(path).convert('RGB')\n        if self.transforms is not None:\n            img = self.transforms(img)\n        return img\n\n\ndef get_activations(files, model, batch_size=50, dims=2048, device='cpu',\n                    num_workers=1, res=None, dataset_name=None):\n    \"\"\"Calculates the activations of the pool_3 layer for all images.\n\n    Params:\n    -- files       : List of image files paths\n    -- model       : Instance of inception model\n    -- batch_size  : Batch size of images for the model to process at once.\n                     Make sure that the number of samples is a multiple of\n                     the batch size, otherwise some samples are ignored. This\n                     behavior is retained to match the original FID score\n                     implementation.\n    -- dims        : Dimensionality of features returned by Inception\n    -- device      : Device to run calculations\n    -- num_workers : Number of parallel dataloader workers\n\n    Returns:\n    -- A numpy array of dimension (num images, dims) that contains the\n       activations of the given tensor when feeding inception with the\n       query tensor.\n    \"\"\"\n    model.eval()\n\n    if batch_size > len(files):\n        print(('Warning: batch size is bigger than the data size. '\n               'Setting batch size to data size'))\n        batch_size = len(files)\n\n    if res is None:\n        trans = TF.ToTensor()\n    else:\n        if dataset_name == 'celeba':\n            from datasets import Crop\n            cx = 89\n            cy = 121\n            x1 = cy - 64\n            x2 = cy + 64\n            y1 = cx - 64\n            y2 = cx + 64\n            trans = TF.Compose([\n                        Crop(x1, x2, y1, y2),\n                        TF.Resize(res),\n                        TF.ToTensor(),\n            ])\n        else:\n            trans = TF.Compose([\n                TF.Resize(res),\n                TF.CenterCrop(res),\n                TF.ToTensor()\n            ])\n    \n    dataset = ImagePathDataset(files, transforms=trans)\n    dataloader = torch.utils.data.DataLoader(dataset,\n                                             batch_size=batch_size,\n                                             shuffle=False,\n                                             drop_last=False,\n                                             num_workers=num_workers)\n\n    pred_arr = np.empty((len(files), dims))\n\n    start_idx = 0\n\n    for batch in tqdm(dataloader):\n        batch = batch.to(device)\n\n        with torch.no_grad():\n            pred = model(batch)[0]\n\n        # If model output is not scalar, apply global spatial average pooling.\n        # This happens if you choose a dimensionality not equal 2048.\n        if pred.size(2) != 1 or pred.size(3) != 1:\n            pred = adaptive_avg_pool2d(pred, output_size=(1, 1))\n\n        pred = pred.squeeze(3).squeeze(2).cpu().numpy()\n\n        pred_arr[start_idx:start_idx + pred.shape[0]] = pred\n\n        start_idx = start_idx + pred.shape[0]\n\n    return pred_arr\n\n\ndef calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):\n    \"\"\"Numpy implementation of the Frechet Distance.\n    The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)\n    and X_2 ~ N(mu_2, C_2) is\n            d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).\n\n    Stable version by Dougal J. Sutherland.\n\n    Params:\n    -- mu1   : Numpy array containing the activations of a layer of the\n               inception net (like returned by the function 'get_predictions')\n               for generated samples.\n    -- mu2   : The sample mean over activations, precalculated on an\n               representative data set.\n    -- sigma1: The covariance matrix over activations for generated samples.\n    -- sigma2: The covariance matrix over activations, precalculated on an\n               representative data set.\n\n    Returns:\n    --   : The Frechet Distance.\n    \"\"\"\n\n    mu1 = np.atleast_1d(mu1)\n    mu2 = np.atleast_1d(mu2)\n\n    sigma1 = np.atleast_2d(sigma1)\n    sigma2 = np.atleast_2d(sigma2)\n\n    assert mu1.shape == mu2.shape, \\\n        'Training and test mean vectors have different lengths'\n    assert sigma1.shape == sigma2.shape, \\\n        'Training and test covariances have different dimensions'\n\n    diff = mu1 - mu2\n\n    # Product might be almost singular\n    covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)\n    if not np.isfinite(covmean).all():\n        msg = ('fid calculation produces singular product; '\n               'adding %s to diagonal of cov estimates') % eps\n        print(msg)\n        offset = np.eye(sigma1.shape[0]) * eps\n        covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))\n\n    # Numerical error might give slight imaginary component\n    if np.iscomplexobj(covmean):\n        if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):\n            m = np.max(np.abs(covmean.imag))\n            raise ValueError('Imaginary component {}'.format(m))\n        covmean = covmean.real\n\n    tr_covmean = np.trace(covmean)\n\n    return (diff.dot(diff) + np.trace(sigma1)\n            + np.trace(sigma2) - 2 * tr_covmean)\n\n\ndef calculate_activation_statistics(files, model, batch_size=50, dims=2048,\n                                    device='cpu', num_workers=1, res=None, dataset_name=None):\n    \"\"\"Calculation of the statistics used by the FID.\n    Params:\n    -- files       : List of image files paths\n    -- model       : Instance of inception model\n    -- batch_size  : The images numpy array is split into batches with\n                     batch size batch_size. A reasonable batch size\n                     depends on the hardware.\n    -- dims        : Dimensionality of features returned by Inception\n    -- device      : Device to run calculations\n    -- num_workers : Number of parallel dataloader workers\n\n    Returns:\n    -- mu    : The mean over samples of the activations of the pool_3 layer of\n               the inception model.\n    -- sigma : The covariance matrix of the activations of the pool_3 layer of\n               the inception model.\n    \"\"\"\n    act = get_activations(files, model, batch_size, dims, device, num_workers, res=res, dataset_name=dataset_name)\n    mu = np.mean(act, axis=0)\n    sigma = np.cov(act, rowvar=False)\n    return mu, sigma\n\n\ndef compute_statistics_of_path(path, model, batch_size, dims, device,\n                               num_workers=1, num_samples=None, res=None, dataset_name=None):\n    if path.endswith('.npz'):\n        with np.load(path) as f:\n            m, s = f['mu'][:], f['sigma'][:]\n    else:\n        path = pathlib.Path(path)\n\n        files = sorted([file for ext in IMAGE_EXTENSIONS\n                       for file in path.glob('**/*.{}'.format(ext))])\n        if num_samples is not None:\n            #import random\n            #files = random.sample(files, num_samples)\n            files = files[:num_samples]\n        print(\"Found %d files.\" % len(files))\n        m, s = calculate_activation_statistics(files, model, batch_size,\n                                               dims, device, num_workers, res=res, dataset_name=dataset_name)\n\n    return m, s\n\n\ndef calculate_fid_given_paths(paths, batch_size, device, dims, num_workers=1, num_samples=None, res=None, dataset_name=None):\n    \"\"\"Calculates the FID of two paths\"\"\"\n    for p in paths:\n        if not os.path.exists(p):\n            raise RuntimeError('Invalid path: %s' % p)\n\n    block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]\n\n    model = InceptionV3([block_idx]).to(device)\n\n    m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n    m2, s2 = compute_statistics_of_path(paths[1], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n    fid_value = calculate_frechet_distance(m1, s1, m2, s2)\n\n    return fid_value\n\n\ndef save_fid_stats(paths, batch_size, device, dims, num_workers=1, num_samples=None, res=None, dataset_name=None):\n    \"\"\"Calculates the FID of two paths\"\"\"\n    if not os.path.exists(paths[0]):\n        raise RuntimeError('Invalid path: %s' % paths[0])\n\n    if os.path.exists(paths[1]):\n        raise RuntimeError('Existing output file: %s' % paths[1])\n\n    block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]\n\n    model = InceptionV3([block_idx]).to(device)\n\n    print(f\"Saving statistics for {paths[0]}\")\n\n    m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n\n    np.savez_compressed(paths[1], mu=m1, sigma=s1)\n\n\ndef main():\n    args = parser.parse_args()\n\n    if args.device is None:\n        device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')\n    else:\n        device = torch.device(args.device)\n\n    if args.num_workers is None:\n        try:\n            num_cpus = len(os.sched_getaffinity(0))\n        except AttributeError:\n            # os.sched_getaffinity is not available under Windows, use\n            # os.cpu_count instead (which may not return the *available* number\n            # of CPUs).\n            num_cpus = os.cpu_count()\n\n        num_workers = min(num_cpus, 8) if num_cpus is not None else 0\n    else:\n        num_workers = args.num_workers\n\n    if args.save_stats:\n        save_fid_stats(args.path, args.batch_size, device, args.dims, num_workers, num_samples=args.num_samples, res=args.res, dataset_name=args.dataset_name)\n        return\n\n    fid_value = calculate_fid_given_paths(args.path,\n                                          args.batch_size,\n                                          device,\n                                          args.dims,\n                                          num_workers,\n                                          num_samples=args.num_samples,\n                                          res = args.res, dataset_name=args.dataset_name)\n    print('FID: ', fid_value)\n\n\nif __name__ == '__main__':\n    main()"
  },
  {
    "path": "inception.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision\n\ntry:\n    from torchvision.models.utils import load_state_dict_from_url\nexcept ImportError:\n    from torch.utils.model_zoo import load_url as load_state_dict_from_url\n\n# Inception weights ported to Pytorch from\n# http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz\nFID_WEIGHTS_URL = 'https://github.com/mseitzer/pytorch-fid/releases/download/fid_weights/pt_inception-2015-12-05-6726825d.pth'  # noqa: E501\n\n\nclass InceptionV3(nn.Module):\n    \"\"\"Pretrained InceptionV3 network returning feature maps\"\"\"\n\n    # Index of default block of inception to return,\n    # corresponds to output of final average pooling\n    DEFAULT_BLOCK_INDEX = 3\n\n    # Maps feature dimensionality to their output blocks indices\n    BLOCK_INDEX_BY_DIM = {\n        64: 0,   # First max pooling features\n        192: 1,  # Second max pooling featurs\n        768: 2,  # Pre-aux classifier features\n        2048: 3  # Final average pooling features\n    }\n\n    def __init__(self,\n                 output_blocks=(DEFAULT_BLOCK_INDEX,),\n                 resize_input=True,\n                 normalize_input=True,\n                 requires_grad=False,\n                 use_fid_inception=True):\n        \"\"\"Build pretrained InceptionV3\n\n        Parameters\n        ----------\n        output_blocks : list of int\n            Indices of blocks to return features of. Possible values are:\n                - 0: corresponds to output of first max pooling\n                - 1: corresponds to output of second max pooling\n                - 2: corresponds to output which is fed to aux classifier\n                - 3: corresponds to output of final average pooling\n        resize_input : bool\n            If true, bilinearly resizes input to width and height 299 before\n            feeding input to model. As the network without fully connected\n            layers is fully convolutional, it should be able to handle inputs\n            of arbitrary size, so resizing might not be strictly needed\n        normalize_input : bool\n            If true, scales the input from range (0, 1) to the range the\n            pretrained Inception network expects, namely (-1, 1)\n        requires_grad : bool\n            If true, parameters of the model require gradients. Possibly useful\n            for finetuning the network\n        use_fid_inception : bool\n            If true, uses the pretrained Inception model used in Tensorflow's\n            FID implementation. If false, uses the pretrained Inception model\n            available in torchvision. The FID Inception model has different\n            weights and a slightly different structure from torchvision's\n            Inception model. If you want to compute FID scores, you are\n            strongly advised to set this parameter to true to get comparable\n            results.\n        \"\"\"\n        super(InceptionV3, self).__init__()\n\n        self.resize_input = resize_input\n        self.normalize_input = normalize_input\n        self.output_blocks = sorted(output_blocks)\n        self.last_needed_block = max(output_blocks)\n\n        assert self.last_needed_block <= 3, \\\n            'Last possible output block index is 3'\n\n        self.blocks = nn.ModuleList()\n\n        if use_fid_inception:\n            inception = fid_inception_v3()\n        else:\n            inception = _inception_v3(weights='DEFAULT')\n\n        # Block 0: input to maxpool1\n        block0 = [\n            inception.Conv2d_1a_3x3,\n            inception.Conv2d_2a_3x3,\n            inception.Conv2d_2b_3x3,\n            nn.MaxPool2d(kernel_size=3, stride=2)\n        ]\n        self.blocks.append(nn.Sequential(*block0))\n\n        # Block 1: maxpool1 to maxpool2\n        if self.last_needed_block >= 1:\n            block1 = [\n                inception.Conv2d_3b_1x1,\n                inception.Conv2d_4a_3x3,\n                nn.MaxPool2d(kernel_size=3, stride=2)\n            ]\n            self.blocks.append(nn.Sequential(*block1))\n\n        # Block 2: maxpool2 to aux classifier\n        if self.last_needed_block >= 2:\n            block2 = [\n                inception.Mixed_5b,\n                inception.Mixed_5c,\n                inception.Mixed_5d,\n                inception.Mixed_6a,\n                inception.Mixed_6b,\n                inception.Mixed_6c,\n                inception.Mixed_6d,\n                inception.Mixed_6e,\n            ]\n            self.blocks.append(nn.Sequential(*block2))\n\n        # Block 3: aux classifier to final avgpool\n        if self.last_needed_block >= 3:\n            block3 = [\n                inception.Mixed_7a,\n                inception.Mixed_7b,\n                inception.Mixed_7c,\n                nn.AdaptiveAvgPool2d(output_size=(1, 1))\n            ]\n            self.blocks.append(nn.Sequential(*block3))\n\n        for param in self.parameters():\n            param.requires_grad = requires_grad\n\n    def forward(self, inp):\n        \"\"\"Get Inception feature maps\n\n        Parameters\n        ----------\n        inp : torch.autograd.Variable\n            Input tensor of shape Bx3xHxW. Values are expected to be in\n            range (0, 1)\n\n        Returns\n        -------\n        List of torch.autograd.Variable, corresponding to the selected output\n        block, sorted ascending by index\n        \"\"\"\n        outp = []\n        x = inp\n\n        if self.resize_input:\n            x = F.interpolate(x,\n                              size=(299, 299),\n                              mode='bilinear',\n                              align_corners=False)\n\n        if self.normalize_input:\n            x = 2 * x - 1  # Scale from range (0, 1) to range (-1, 1)\n\n        for idx, block in enumerate(self.blocks):\n            x = block(x)\n            if idx in self.output_blocks:\n                outp.append(x)\n\n            if idx == self.last_needed_block:\n                break\n\n        return outp\n\n\ndef _inception_v3(*args, **kwargs):\n    \"\"\"Wraps `torchvision.models.inception_v3`\"\"\"\n    try:\n        version = tuple(map(int, torchvision.__version__.split('.')[:2]))\n    except ValueError:\n        # Just a caution against weird version strings\n        version = (0,)\n\n    # Skips default weight inititialization if supported by torchvision\n    # version. See https://github.com/mseitzer/pytorch-fid/issues/28.\n    if version >= (0, 6):\n        kwargs['init_weights'] = False\n\n    # Backwards compatibility: `weights` argument was handled by `pretrained`\n    # argument prior to version 0.13.\n    if version < (0, 13) and 'weights' in kwargs:\n        if kwargs['weights'] == 'DEFAULT':\n            kwargs['pretrained'] = True\n        elif kwargs['weights'] is None:\n            kwargs['pretrained'] = False\n        else:\n            raise ValueError(\n                'weights=={} not supported in torchvision {}'.format(\n                    kwargs['weights'], torchvision.__version__\n                )\n            )\n        del kwargs['weights']\n\n    return torchvision.models.inception_v3(*args, **kwargs)\n\n\ndef fid_inception_v3():\n    \"\"\"Build pretrained Inception model for FID computation\n\n    The Inception model for FID computation uses a different set of weights\n    and has a slightly different structure than torchvision's Inception.\n\n    This method first constructs torchvision's Inception and then patches the\n    necessary parts that are different in the FID Inception model.\n    \"\"\"\n    inception = _inception_v3(num_classes=1008,\n                              aux_logits=False,\n                              weights=None)\n    inception.Mixed_5b = FIDInceptionA(192, pool_features=32)\n    inception.Mixed_5c = FIDInceptionA(256, pool_features=64)\n    inception.Mixed_5d = FIDInceptionA(288, pool_features=64)\n    inception.Mixed_6b = FIDInceptionC(768, channels_7x7=128)\n    inception.Mixed_6c = FIDInceptionC(768, channels_7x7=160)\n    inception.Mixed_6d = FIDInceptionC(768, channels_7x7=160)\n    inception.Mixed_6e = FIDInceptionC(768, channels_7x7=192)\n    inception.Mixed_7b = FIDInceptionE_1(1280)\n    inception.Mixed_7c = FIDInceptionE_2(2048)\n\n    state_dict = load_state_dict_from_url(FID_WEIGHTS_URL, progress=True)\n    inception.load_state_dict(state_dict)\n    return inception\n\n\nclass FIDInceptionA(torchvision.models.inception.InceptionA):\n    \"\"\"InceptionA block patched for FID computation\"\"\"\n    def __init__(self, in_channels, pool_features):\n        super(FIDInceptionA, self).__init__(in_channels, pool_features)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch5x5 = self.branch5x5_1(x)\n        branch5x5 = self.branch5x5_2(branch5x5)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionC(torchvision.models.inception.InceptionC):\n    \"\"\"InceptionC block patched for FID computation\"\"\"\n    def __init__(self, in_channels, channels_7x7):\n        super(FIDInceptionC, self).__init__(in_channels, channels_7x7)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch7x7 = self.branch7x7_1(x)\n        branch7x7 = self.branch7x7_2(branch7x7)\n        branch7x7 = self.branch7x7_3(branch7x7)\n\n        branch7x7dbl = self.branch7x7dbl_1(x)\n        branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionE_1(torchvision.models.inception.InceptionE):\n    \"\"\"First InceptionE block patched for FID computation\"\"\"\n    def __init__(self, in_channels):\n        super(FIDInceptionE_1, self).__init__(in_channels)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = [\n            self.branch3x3_2a(branch3x3),\n            self.branch3x3_2b(branch3x3),\n        ]\n        branch3x3 = torch.cat(branch3x3, 1)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = [\n            self.branch3x3dbl_3a(branch3x3dbl),\n            self.branch3x3dbl_3b(branch3x3dbl),\n        ]\n        branch3x3dbl = torch.cat(branch3x3dbl, 1)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionE_2(torchvision.models.inception.InceptionE):\n    \"\"\"Second InceptionE block patched for FID computation\"\"\"\n    def __init__(self, in_channels):\n        super(FIDInceptionE_2, self).__init__(in_channels)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = [\n            self.branch3x3_2a(branch3x3),\n            self.branch3x3_2b(branch3x3),\n        ]\n        branch3x3 = torch.cat(branch3x3, 1)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = [\n            self.branch3x3dbl_3a(branch3x3dbl),\n            self.branch3x3dbl_3b(branch3x3dbl),\n        ]\n        branch3x3dbl = torch.cat(branch3x3dbl, 1)\n\n        # Patch: The FID Inception model uses max pooling instead of average\n        # pooling. This is likely an error in this specific Inception\n        # implementation, as other Inception models use average pooling here\n        # (which matches the description in the paper).\n        branch_pool = F.max_pool2d(x, kernel_size=3, stride=1, padding=1)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)"
  },
  {
    "path": "ldm_exp/LICENSE",
    "content": "MIT License\n\nCopyright (c) 2022 Machine Vision and Learning Group, LMU Munich\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "ldm_exp/README.md",
    "content": "# Latent Diffusion Models\n[arXiv](https://arxiv.org/abs/2112.10752) | [BibTeX](#bibtex)\n\n<p align=\"center\">\n<img src=assets/results.gif />\n</p>\n\n\n\n[**High-Resolution Image Synthesis with Latent Diffusion Models**](https://arxiv.org/abs/2112.10752)<br/>\n[Robin Rombach](https://github.com/rromb)\\*,\n[Andreas Blattmann](https://github.com/ablattmann)\\*,\n[Dominik Lorenz](https://github.com/qp-qp)\\,\n[Patrick Esser](https://github.com/pesser),\n[Björn Ommer](https://hci.iwr.uni-heidelberg.de/Staff/bommer)<br/>\n\\* equal contribution\n\n<p align=\"center\">\n<img src=assets/modelfigure.png />\n</p>\n\n## News\n\n### July 2022\n- Inference code and model weights to run our [retrieval-augmented diffusion models](https://arxiv.org/abs/2204.11824) are now available. See [this section](#retrieval-augmented-diffusion-models).\n### April 2022\n- Thanks to [Katherine Crowson](https://github.com/crowsonkb), classifier-free guidance received a ~2x speedup and the [PLMS sampler](https://arxiv.org/abs/2202.09778) is available. See also [this PR](https://github.com/CompVis/latent-diffusion/pull/51).\n\n- Our 1.45B [latent diffusion LAION model](#text-to-image) was integrated into [Huggingface Spaces 🤗](https://huggingface.co/spaces) using [Gradio](https://github.com/gradio-app/gradio). Try out the Web Demo: [![Hugging Face Spaces](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/multimodalart/latentdiffusion)\n\n- More pre-trained LDMs are available: \n  - A 1.45B [model](#text-to-image) trained on the [LAION-400M](https://arxiv.org/abs/2111.02114) database.\n  - A class-conditional model on ImageNet, achieving a FID of 3.6 when using [classifier-free guidance](https://openreview.net/pdf?id=qw8AKxfYbI) Available via a [colab notebook](https://colab.research.google.com/github/CompVis/latent-diffusion/blob/main/scripts/latent_imagenet_diffusion.ipynb) [![][colab]][colab-cin].\n  \n## Requirements\nA suitable [conda](https://conda.io/) environment named `ldm` can be created\nand activated with:\n\n```\nconda env create -f environment.yaml\nconda activate ldm\n```\n\n# Pretrained Models\nA general list of all available checkpoints is available in via our [model zoo](#model-zoo).\nIf you use any of these models in your work, we are always happy to receive a [citation](#bibtex).\n\n## Retrieval Augmented Diffusion Models\n![rdm-figure](assets/rdm-preview.jpg)\nWe include inference code to run our retrieval-augmented diffusion models (RDMs) as described in [https://arxiv.org/abs/2204.11824](https://arxiv.org/abs/2204.11824).\n\n\nTo get started, install the additionally required python packages into your `ldm` environment\n```shell script\npip install transformers==4.19.2 scann kornia==0.6.4 torchmetrics==0.6.0\npip install git+https://github.com/arogozhnikov/einops.git\n```\nand download the trained weights (preliminary ceckpoints):\n\n```bash\nmkdir -p models/rdm/rdm768x768/\nwget -O models/rdm/rdm768x768/model.ckpt https://ommer-lab.com/files/rdm/model.ckpt\n```\nAs these models are conditioned on a set of CLIP image embeddings, our RDMs support different inference modes, \nwhich are described in the following.\n#### RDM with text-prompt only (no explicit retrieval needed)\nSince CLIP offers a shared image/text feature space, and RDMs learn to cover a neighborhood of a given\nexample during training, we can directly take a CLIP text embedding of a given prompt and condition on it.\nRun this mode via\n```\npython scripts/knn2img.py  --prompt \"a happy bear reading a newspaper, oil on canvas\"\n```\n\n#### RDM with text-to-image retrieval\n\nTo be able to run a RDM conditioned on a text-prompt and additionally images retrieved from this prompt, you will also need to download the corresponding retrieval database. \nWe provide two distinct databases extracted from the [Openimages-](https://storage.googleapis.com/openimages/web/index.html) and [ArtBench-](https://github.com/liaopeiyuan/artbench) datasets. \nInterchanging the databases results in different capabilities of the model as visualized below, although the learned weights are the same in both cases. \n\nDownload the retrieval-databases which contain the retrieval-datasets ([Openimages](https://storage.googleapis.com/openimages/web/index.html) (~11GB) and [ArtBench](https://github.com/liaopeiyuan/artbench) (~82MB)) compressed into CLIP image embeddings:\n```bash\nmkdir -p data/rdm/retrieval_databases\nwget -O data/rdm/retrieval_databases/artbench.zip https://ommer-lab.com/files/rdm/artbench_databases.zip\nwget -O data/rdm/retrieval_databases/openimages.zip https://ommer-lab.com/files/rdm/openimages_database.zip\nunzip data/rdm/retrieval_databases/artbench.zip -d data/rdm/retrieval_databases/\nunzip data/rdm/retrieval_databases/openimages.zip -d data/rdm/retrieval_databases/\n```\nWe also provide trained [ScaNN](https://github.com/google-research/google-research/tree/master/scann) search indices for ArtBench. Download and extract via\n```bash\nmkdir -p data/rdm/searchers\nwget -O data/rdm/searchers/artbench.zip https://ommer-lab.com/files/rdm/artbench_searchers.zip\nunzip data/rdm/searchers/artbench.zip -d data/rdm/searchers\n```\n\nSince the index for OpenImages is large (~21 GB), we provide a script to create and save it for usage during sampling. Note however,\nthat sampling with the OpenImages database will not be possible without this index. Run the script via\n```bash\npython scripts/train_searcher.py\n```\n\nRetrieval based text-guided sampling with visual nearest neighbors can be started via \n```\npython scripts/knn2img.py  --prompt \"a happy pineapple\" --use_neighbors --knn <number_of_neighbors> \n```\nNote that the maximum supported number of neighbors is 20. \nThe database can be changed via the cmd parameter ``--database`` which can be `[openimages, artbench-art_nouveau, artbench-baroque, artbench-expressionism, artbench-impressionism, artbench-post_impressionism, artbench-realism, artbench-renaissance, artbench-romanticism, artbench-surrealism, artbench-ukiyo_e]`.\nFor using `--database openimages`, the above script (`scripts/train_searcher.py`) must be executed before.\nDue to their relatively small size, the artbench datasetbases are best suited for creating more abstract concepts and do not work well for detailed text control. \n\n\n#### Coming Soon\n- better models\n- more resolutions\n- image-to-image retrieval\n\n## Text-to-Image\n![text2img-figure](assets/txt2img-preview.png) \n\n\nDownload the pre-trained weights (5.7GB)\n```\nmkdir -p models/ldm/text2img-large/\nwget -O models/ldm/text2img-large/model.ckpt https://ommer-lab.com/files/latent-diffusion/nitro/txt2img-f8-large/model.ckpt\n```\nand sample with\n```\npython scripts/txt2img.py --prompt \"a virus monster is playing guitar, oil on canvas\" --ddim_eta 0.0 --n_samples 4 --n_iter 4 --scale 5.0  --ddim_steps 50\n```\nThis will save each sample individually as well as a grid of size `n_iter` x `n_samples` at the specified output location (default: `outputs/txt2img-samples`).\nQuality, sampling speed and diversity are best controlled via the `scale`, `ddim_steps` and `ddim_eta` arguments.\nAs a rule of thumb, higher values of `scale` produce better samples at the cost of a reduced output diversity.   \nFurthermore, increasing `ddim_steps` generally also gives higher quality samples, but returns are diminishing for values > 250.\nFast sampling (i.e. low values of `ddim_steps`) while retaining good quality can be achieved by using `--ddim_eta 0.0`.  \nFaster sampling (i.e. even lower values of `ddim_steps`) while retaining good quality can be achieved by using `--ddim_eta 0.0` and `--plms` (see [Pseudo Numerical Methods for Diffusion Models on Manifolds](https://arxiv.org/abs/2202.09778)).\n\n#### Beyond 256²\n\nFor certain inputs, simply running the model in a convolutional fashion on larger features than it was trained on\ncan sometimes result in interesting results. To try it out, tune the `H` and `W` arguments (which will be integer-divided\nby 8 in order to calculate the corresponding latent size), e.g. run\n\n```\npython scripts/txt2img.py --prompt \"a sunset behind a mountain range, vector image\" --ddim_eta 1.0 --n_samples 1 --n_iter 1 --H 384 --W 1024 --scale 5.0  \n```\nto create a sample of size 384x1024. Note, however, that controllability is reduced compared to the 256x256 setting. \n\nThe example below was generated using the above command. \n![text2img-figure-conv](assets/txt2img-convsample.png)\n\n\n\n## Inpainting\n![inpainting](assets/inpainting.png)\n\nDownload the pre-trained weights\n```\nwget -O models/ldm/inpainting_big/last.ckpt https://heibox.uni-heidelberg.de/f/4d9ac7ea40c64582b7c9/?dl=1\n```\n\nand sample with\n```\npython scripts/inpaint.py --indir data/inpainting_examples/ --outdir outputs/inpainting_results\n```\n`indir` should contain images `*.png` and masks `<image_fname>_mask.png` like\nthe examples provided in `data/inpainting_examples`.\n\n## Class-Conditional ImageNet\n\nAvailable via a [notebook](scripts/latent_imagenet_diffusion.ipynb) [![][colab]][colab-cin].\n![class-conditional](assets/birdhouse.png)\n\n[colab]: <https://colab.research.google.com/assets/colab-badge.svg>\n[colab-cin]: <https://colab.research.google.com/github/CompVis/latent-diffusion/blob/main/scripts/latent_imagenet_diffusion.ipynb>\n\n\n## Unconditional Models\n\nWe also provide a script for sampling from unconditional LDMs (e.g. LSUN, FFHQ, ...). Start it via\n\n```shell script\nCUDA_VISIBLE_DEVICES=<GPU_ID> python scripts/sample_diffusion.py -r models/ldm/<model_spec>/model.ckpt -l <logdir> -n <\\#samples> --batch_size <batch_size> -c <\\#ddim steps> -e <\\#eta> \n```\n\n# Train your own LDMs\n\n## Data preparation\n\n### Faces \nFor downloading the CelebA-HQ and FFHQ datasets, proceed as described in the [taming-transformers](https://github.com/CompVis/taming-transformers#celeba-hq) \nrepository.\n\n### LSUN \n\nThe LSUN datasets can be conveniently downloaded via the script available [here](https://github.com/fyu/lsun).\nWe performed a custom split into training and validation images, and provide the corresponding filenames\nat [https://ommer-lab.com/files/lsun.zip](https://ommer-lab.com/files/lsun.zip). \nAfter downloading, extract them to `./data/lsun`. The beds/cats/churches subsets should\nalso be placed/symlinked at `./data/lsun/bedrooms`/`./data/lsun/cats`/`./data/lsun/churches`, respectively.\n\n### ImageNet\nThe code will try to download (through [Academic\nTorrents](http://academictorrents.com/)) and prepare ImageNet the first time it\nis used. However, since ImageNet is quite large, this requires a lot of disk\nspace and time. If you already have ImageNet on your disk, you can speed things\nup by putting the data into\n`${XDG_CACHE}/autoencoders/data/ILSVRC2012_{split}/data/` (which defaults to\n`~/.cache/autoencoders/data/ILSVRC2012_{split}/data/`), where `{split}` is one\nof `train`/`validation`. It should have the following structure:\n\n```\n${XDG_CACHE}/autoencoders/data/ILSVRC2012_{split}/data/\n├── n01440764\n│   ├── n01440764_10026.JPEG\n│   ├── n01440764_10027.JPEG\n│   ├── ...\n├── n01443537\n│   ├── n01443537_10007.JPEG\n│   ├── n01443537_10014.JPEG\n│   ├── ...\n├── ...\n```\n\nIf you haven't extracted the data, you can also place\n`ILSVRC2012_img_train.tar`/`ILSVRC2012_img_val.tar` (or symlinks to them) into\n`${XDG_CACHE}/autoencoders/data/ILSVRC2012_train/` /\n`${XDG_CACHE}/autoencoders/data/ILSVRC2012_validation/`, which will then be\nextracted into above structure without downloading it again.  Note that this\nwill only happen if neither a folder\n`${XDG_CACHE}/autoencoders/data/ILSVRC2012_{split}/data/` nor a file\n`${XDG_CACHE}/autoencoders/data/ILSVRC2012_{split}/.ready` exist. Remove them\nif you want to force running the dataset preparation again.\n\n\n## Model Training\n\nLogs and checkpoints for trained models are saved to `logs/<START_DATE_AND_TIME>_<config_spec>`.\n\n### Training autoencoder models\n\nConfigs for training a KL-regularized autoencoder on ImageNet are provided at `configs/autoencoder`.\nTraining can be started by running\n```\nCUDA_VISIBLE_DEVICES=<GPU_ID> python main.py --base configs/autoencoder/<config_spec>.yaml -t --gpus 0,    \n```\nwhere `config_spec` is one of {`autoencoder_kl_8x8x64`(f=32, d=64), `autoencoder_kl_16x16x16`(f=16, d=16), \n`autoencoder_kl_32x32x4`(f=8, d=4), `autoencoder_kl_64x64x3`(f=4, d=3)}.\n\nFor training VQ-regularized models, see the [taming-transformers](https://github.com/CompVis/taming-transformers) \nrepository.\n\n### Training LDMs \n\nIn ``configs/latent-diffusion/`` we provide configs for training LDMs on the LSUN-, CelebA-HQ, FFHQ and ImageNet datasets. \nTraining can be started by running\n\n```shell script\nCUDA_VISIBLE_DEVICES=<GPU_ID> python main.py --base configs/latent-diffusion/<config_spec>.yaml -t --gpus 0,\n``` \n\nwhere ``<config_spec>`` is one of {`celebahq-ldm-vq-4`(f=4, VQ-reg. autoencoder, spatial size 64x64x3),`ffhq-ldm-vq-4`(f=4, VQ-reg. autoencoder, spatial size 64x64x3),\n`lsun_bedrooms-ldm-vq-4`(f=4, VQ-reg. autoencoder, spatial size 64x64x3),\n`lsun_churches-ldm-vq-4`(f=8, KL-reg. autoencoder, spatial size 32x32x4),`cin-ldm-vq-8`(f=8, VQ-reg. autoencoder, spatial size 32x32x4)}.\n\n# Model Zoo \n\n## Pretrained Autoencoding Models\n![rec2](assets/reconstruction2.png)\n\nAll models were trained until convergence (no further substantial improvement in rFID).\n\n| Model                   | rFID vs val | train steps           |PSNR           | PSIM          | Link                                                                                                                                                  | Comments              \n|-------------------------|------------|----------------|----------------|---------------|-------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------|\n| f=4, VQ (Z=8192, d=3)   | 0.58       | 533066 | 27.43  +/- 4.26 | 0.53 +/- 0.21 |     https://ommer-lab.com/files/latent-diffusion/vq-f4.zip                   |  |\n| f=4, VQ (Z=8192, d=3)   | 1.06       | 658131 | 25.21 +/-  4.17 | 0.72 +/- 0.26 | https://heibox.uni-heidelberg.de/f/9c6681f64bb94338a069/?dl=1  | no attention          |\n| f=8, VQ (Z=16384, d=4)  | 1.14       | 971043 | 23.07 +/- 3.99 | 1.17 +/- 0.36 |       https://ommer-lab.com/files/latent-diffusion/vq-f8.zip                     |                       |\n| f=8, VQ (Z=256, d=4)    | 1.49       | 1608649 | 22.35 +/- 3.81 | 1.26 +/- 0.37 |   https://ommer-lab.com/files/latent-diffusion/vq-f8-n256.zip |  \n| f=16, VQ (Z=16384, d=8) | 5.15       | 1101166 | 20.83 +/- 3.61 | 1.73 +/- 0.43 |             https://heibox.uni-heidelberg.de/f/0e42b04e2e904890a9b6/?dl=1                        |                       |\n|                         |            |  |                |               |                                                                                                                                                    |                       |\n| f=4, KL                 | 0.27       | 176991 | 27.53 +/- 4.54 | 0.55 +/- 0.24 |     https://ommer-lab.com/files/latent-diffusion/kl-f4.zip                                   |                       |\n| f=8, KL                 | 0.90       | 246803 | 24.19 +/- 4.19 | 1.02 +/- 0.35 |             https://ommer-lab.com/files/latent-diffusion/kl-f8.zip                            |                       |\n| f=16, KL     (d=16)     | 0.87       | 442998 | 24.08 +/- 4.22 | 1.07 +/- 0.36 |      https://ommer-lab.com/files/latent-diffusion/kl-f16.zip                                  |                       |\n | f=32, KL     (d=64)     | 2.04       | 406763 | 22.27 +/- 3.93 | 1.41 +/- 0.40 |             https://ommer-lab.com/files/latent-diffusion/kl-f32.zip                            |                       |\n\n### Get the models\n\nRunning the following script downloads und extracts all available pretrained autoencoding models.   \n```shell script\nbash scripts/download_first_stages.sh\n```\n\nThe first stage models can then be found in `models/first_stage_models/<model_spec>`\n\n\n\n## Pretrained LDMs\n| Datset                          |   Task    | Model        | FID           | IS              | Prec | Recall | Link                                                                                                                                                                                   | Comments                                        \n|---------------------------------|------|--------------|---------------|-----------------|------|------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------|\n| CelebA-HQ                       | Unconditional Image Synthesis    |  LDM-VQ-4 (200 DDIM steps, eta=0)| 5.11 (5.11)          | 3.29            | 0.72    | 0.49 |    https://ommer-lab.com/files/latent-diffusion/celeba.zip     |                                                 |  \n| FFHQ                            | Unconditional Image Synthesis    |  LDM-VQ-4 (200 DDIM steps, eta=1)| 4.98 (4.98)  | 4.50 (4.50)   | 0.73 | 0.50 |              https://ommer-lab.com/files/latent-diffusion/ffhq.zip                                              |                                                 |\n| LSUN-Churches                   | Unconditional Image Synthesis   |  LDM-KL-8 (400 DDIM steps, eta=0)| 4.02 (4.02) | 2.72 | 0.64 | 0.52 |         https://ommer-lab.com/files/latent-diffusion/lsun_churches.zip        |                                                 |  \n| LSUN-Bedrooms                   | Unconditional Image Synthesis   |  LDM-VQ-4 (200 DDIM steps, eta=1)| 2.95 (3.0)          | 2.22 (2.23)| 0.66 | 0.48 | https://ommer-lab.com/files/latent-diffusion/lsun_bedrooms.zip |                                                 |  \n| ImageNet                        | Class-conditional Image Synthesis | LDM-VQ-8 (200 DDIM steps, eta=1) | 7.77(7.76)* /15.82** | 201.56(209.52)* /78.82** | 0.84* / 0.65** | 0.35* / 0.63** |   https://ommer-lab.com/files/latent-diffusion/cin.zip                                                                   | *: w/ guiding, classifier_scale 10  **: w/o guiding, scores in bracket calculated with script provided by [ADM](https://github.com/openai/guided-diffusion) |   \n| Conceptual Captions             |  Text-conditional Image Synthesis | LDM-VQ-f4 (100 DDIM steps, eta=0) | 16.79         | 13.89           | N/A | N/A |              https://ommer-lab.com/files/latent-diffusion/text2img.zip                                | finetuned from LAION                            |   \n| OpenImages                      | Super-resolution   | LDM-VQ-4     | N/A            | N/A               | N/A    | N/A    |                                    https://ommer-lab.com/files/latent-diffusion/sr_bsr.zip                                    | BSR image degradation                           |\n| OpenImages                      | Layout-to-Image Synthesis    | LDM-VQ-4 (200 DDIM steps, eta=0) | 32.02         | 15.92           | N/A    | N/A    |                  https://ommer-lab.com/files/latent-diffusion/layout2img_model.zip                                           |                                                 | \n| Landscapes      |  Semantic Image Synthesis   | LDM-VQ-4  | N/A             | N/A               | N/A    | N/A    |           https://ommer-lab.com/files/latent-diffusion/semantic_synthesis256.zip                                    |                                                 |\n| Landscapes       |  Semantic Image Synthesis   | LDM-VQ-4  | N/A             | N/A               | N/A    | N/A    |           https://ommer-lab.com/files/latent-diffusion/semantic_synthesis.zip                                    |             finetuned on resolution 512x512                                     |\n\n\n### Get the models\n\nThe LDMs listed above can jointly be downloaded and extracted via\n\n```shell script\nbash scripts/download_models.sh\n```\n\nThe models can then be found in `models/ldm/<model_spec>`.\n\n\n\n## Coming Soon...\n\n* More inference scripts for conditional LDMs.\n* In the meantime, you can play with our colab notebook https://colab.research.google.com/drive/1xqzUi2iXQXDqXBHQGP9Mqt2YrYW6cx-J?usp=sharing\n\n## Comments \n\n- Our codebase for the diffusion models builds heavily on [OpenAI's ADM codebase](https://github.com/openai/guided-diffusion)\nand [https://github.com/lucidrains/denoising-diffusion-pytorch](https://github.com/lucidrains/denoising-diffusion-pytorch). \nThanks for open-sourcing!\n\n- The implementation of the transformer encoder is from [x-transformers](https://github.com/lucidrains/x-transformers) by [lucidrains](https://github.com/lucidrains?tab=repositories). \n\n\n## BibTeX\n\n```\n@misc{rombach2021highresolution,\n      title={High-Resolution Image Synthesis with Latent Diffusion Models}, \n      author={Robin Rombach and Andreas Blattmann and Dominik Lorenz and Patrick Esser and Björn Ommer},\n      year={2021},\n      eprint={2112.10752},\n      archivePrefix={arXiv},\n      primaryClass={cs.CV}\n}\n\n@misc{https://doi.org/10.48550/arxiv.2204.11824,\n  doi = {10.48550/ARXIV.2204.11824},\n  url = {https://arxiv.org/abs/2204.11824},\n  author = {Blattmann, Andreas and Rombach, Robin and Oktay, Kaan and Ommer, Björn},\n  keywords = {Computer Vision and Pattern Recognition (cs.CV), FOS: Computer and information sciences, FOS: Computer and information sciences},\n  title = {Retrieval-Augmented Diffusion Models},\n  publisher = {arXiv},\n  year = {2022},  \n  copyright = {arXiv.org perpetual, non-exclusive license}\n}\n\n\n```\n\n\n"
  },
  {
    "path": "ldm_exp/configs/autoencoder/autoencoder_kl_16x16x16.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-6\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: \"val/rec_loss\"\n    embed_dim: 16\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 0.000001\n        disc_weight: 0.5\n\n    ddconfig:\n      double_z: True\n      z_channels: 16\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult: [ 1,1,2,2,4]  # num_down = len(ch_mult)-1\n      num_res_blocks: 2\n      attn_resolutions: [16]\n      dropout: 0.0\n\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 12\n    wrap: True\n    train:\n      target: ldm.data.imagenet.ImageNetSRTrain\n      params:\n        size: 256\n        degradation: pil_nearest\n    validation:\n      target: ldm.data.imagenet.ImageNetSRValidation\n      params:\n        size: 256\n        degradation: pil_nearest\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 1000\n        max_images: 8\n        increase_log_steps: True\n\n  trainer:\n    benchmark: True\n    accumulate_grad_batches: 2\n"
  },
  {
    "path": "ldm_exp/configs/autoencoder/autoencoder_kl_32x32x4.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-6\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: \"val/rec_loss\"\n    embed_dim: 4\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 0.000001\n        disc_weight: 0.5\n\n    ddconfig:\n      double_z: True\n      z_channels: 4\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult: [ 1,2,4,4 ]  # num_down = len(ch_mult)-1\n      num_res_blocks: 2\n      attn_resolutions: [ ]\n      dropout: 0.0\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 12\n    wrap: True\n    train:\n      target: ldm.data.imagenet.ImageNetSRTrain\n      params:\n        size: 256\n        degradation: pil_nearest\n    validation:\n      target: ldm.data.imagenet.ImageNetSRValidation\n      params:\n        size: 256\n        degradation: pil_nearest\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 1000\n        max_images: 8\n        increase_log_steps: True\n\n  trainer:\n    benchmark: True\n    accumulate_grad_batches: 2\n"
  },
  {
    "path": "ldm_exp/configs/autoencoder/autoencoder_kl_64x64x3.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-6\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: \"val/rec_loss\"\n    embed_dim: 3\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 0.000001\n        disc_weight: 0.5\n\n    ddconfig:\n      double_z: True\n      z_channels: 3\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult: [ 1,2,4 ]  # num_down = len(ch_mult)-1\n      num_res_blocks: 2\n      attn_resolutions: [ ]\n      dropout: 0.0\n\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 12\n    wrap: True\n    train:\n      target: ldm.data.imagenet.ImageNetSRTrain\n      params:\n        size: 256\n        degradation: pil_nearest\n    validation:\n      target: ldm.data.imagenet.ImageNetSRValidation\n      params:\n        size: 256\n        degradation: pil_nearest\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 1000\n        max_images: 8\n        increase_log_steps: True\n\n  trainer:\n    benchmark: True\n    accumulate_grad_batches: 2\n"
  },
  {
    "path": "ldm_exp/configs/autoencoder/autoencoder_kl_8x8x64.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-6\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: \"val/rec_loss\"\n    embed_dim: 64\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 0.000001\n        disc_weight: 0.5\n\n    ddconfig:\n      double_z: True\n      z_channels: 64\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult: [ 1,1,2,2,4,4]  # num_down = len(ch_mult)-1\n      num_res_blocks: 2\n      attn_resolutions: [16,8]\n      dropout: 0.0\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 12\n    wrap: True\n    train:\n      target: ldm.data.imagenet.ImageNetSRTrain\n      params:\n        size: 256\n        degradation: pil_nearest\n    validation:\n      target: ldm.data.imagenet.ImageNetSRValidation\n      params:\n        size: 256\n        degradation: pil_nearest\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 1000\n        max_images: 8\n        increase_log_steps: True\n\n  trainer:\n    benchmark: True\n    accumulate_grad_batches: 2\n"
  },
  {
    "path": "ldm_exp/configs/latent-diffusion/celebahq-ldm-vq-4.yaml",
    "content": "model:\n  base_learning_rate: 2.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    image_size: 64\n    channels: 3\n    monitor: val/loss_simple_ema\n\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 224\n        attention_resolutions:\n        # note: this isn\\t actually the resolution but\n        # the downsampling factor, i.e. this corresnponds to\n        # attention on spatial resolution 8,16,32, as the\n        # spatial reolution of the latents is 64 for f4\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_head_channels: 32\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ckpt_path: models/first_stage_models/vq-f4/model.ckpt\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config: __is_unconditional__\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 48\n    num_workers: 5\n    wrap: false\n    train:\n      target: taming.data.faceshq.CelebAHQTrain\n      params:\n        size: 256\n    validation:\n      target: taming.data.faceshq.CelebAHQValidation\n      params:\n        size: 256\n\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 5000\n        max_images: 8\n        increase_log_steps: False\n\n  trainer:\n    benchmark: True"
  },
  {
    "path": "ldm_exp/configs/latent-diffusion/cin-ldm-vq-f8.yaml",
    "content": "model:\n  base_learning_rate: 1.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: class_label\n    image_size: 32\n    channels: 4\n    cond_stage_trainable: true\n    conditioning_key: crossattn\n    monitor: val/loss_simple_ema\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 32\n        in_channels: 4\n        out_channels: 4\n        model_channels: 256\n        attention_resolutions:\n        #note: this isn\\t actually the resolution but\n        # the downsampling factor, i.e. this corresnponds to\n        # attention on spatial resolution 8,16,32, as the\n        # spatial reolution of the latents is 32 for f8\n        - 4\n        - 2\n        - 1\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 4\n        num_head_channels: 32\n        use_spatial_transformer: true\n        transformer_depth: 1\n        context_dim: 512\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 4\n        n_embed: 16384\n        ckpt_path: models/first_stage_models/vq-f8/model.ckpt\n        ddconfig:\n          double_z: false\n          z_channels: 4\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions:\n          - 32\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: ldm.modules.encoders.modules.ClassEmbedder\n      params:\n        embed_dim: 512\n        key: class_label\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 12\n    num_workers: 4\n    wrap: false\n    train:\n      target: ldm.data.imagenet.ImageNetTrain\n      params:\n        config:\n          size: 256\n    validation:\n      target: ldm.data.imagenet.ImageNetValidation\n      params:\n        config:\n          size: 256\n\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 1000\n        max_images: 8\n        increase_log_steps: False\n\n  trainer:\n    benchmark: True"
  },
  {
    "path": "ldm_exp/configs/latent-diffusion/cin256-v2.yaml",
    "content": "model:\n  base_learning_rate: 2e-6\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: class_label\n    image_size: 64\n    channels: 3\n    cond_stage_trainable: true\n    conditioning_key: crossattn\n    monitor: val/loss\n    use_ema: False\n    \n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 192\n        attention_resolutions:\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 5\n        num_heads: 1\n        use_spatial_transformer: true\n        transformer_depth: 1\n        context_dim: 512\n    \n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    \n    cond_stage_config:\n      target: ldm.modules.encoders.modules.ClassEmbedder\n      params:\n        n_classes: 1001\n        embed_dim: 512\n        key: class_label\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 16\n    num_workers: 4\n    wrap: false\n    train:\n      target: ldm.data.imagenet.ImageNetTrain\n      params:\n        config:\n          size: 256\n    validation:\n      target: ldm.data.imagenet.ImageNetValidation\n      params:\n        config:\n          size: 256\n      \nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 500\n        max_images: 8\n        increase_log_steps: False"
  },
  {
    "path": "ldm_exp/configs/latent-diffusion/ffhq-ldm-vq-4.yaml",
    "content": "model:\n  base_learning_rate: 2.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    image_size: 64\n    channels: 3\n    monitor: val/loss_simple_ema\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 224\n        attention_resolutions:\n        # note: this isn\\t actually the resolution but\n        # the downsampling factor, i.e. this corresnponds to\n        # attention on spatial resolution 8,16,32, as the\n        # spatial reolution of the latents is 64 for f4\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_head_channels: 32\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ckpt_path: configs/first_stage_models/vq-f4/model.yaml\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config: __is_unconditional__\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 42\n    num_workers: 5\n    wrap: false\n    train:\n      target: taming.data.faceshq.FFHQTrain\n      params:\n        size: 256\n    validation:\n      target: taming.data.faceshq.FFHQValidation\n      params:\n        size: 256\n\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 5000\n        max_images: 8\n        increase_log_steps: False\n\n  trainer:\n    benchmark: True"
  },
  {
    "path": "ldm_exp/configs/latent-diffusion/lsun_bedrooms-ldm-vq-4.yaml",
    "content": "model:\n  base_learning_rate: 1.0e-05\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    image_size: 64\n    channels: 3\n    monitor: val/loss_simple_ema\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 224\n        attention_resolutions:\n        # note: this isn\\t actually the resolution but\n        # the downsampling factor, i.e. this corresnponds to\n        # attention on spatial resolution 8,16,32, as the\n        # spatial reolution of the latents is 64 for f4\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_head_channels: 32\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        ckpt_path: configs/first_stage_models/vq-f4/model.yaml\n        embed_dim: 3\n        n_embed: 8192\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config: __is_unconditional__\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 48\n    num_workers: 5\n    wrap: false\n    train:\n      target: ldm.data.lsun.LSUNBedroomsTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.lsun.LSUNBedroomsValidation\n      params:\n        size: 256\n\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 5000\n        max_images: 8\n        increase_log_steps: False\n\n  trainer:\n    benchmark: True"
  },
  {
    "path": "ldm_exp/configs/latent-diffusion/lsun_churches-ldm-kl-8.yaml",
    "content": "model:\n  base_learning_rate: 5.0e-5   # set to target_lr by starting main.py with '--scale_lr False'\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0155\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    loss_type: l1\n    first_stage_key: \"image\"\n    cond_stage_key: \"image\"\n    image_size: 32\n    channels: 4\n    cond_stage_trainable: False\n    concat_mode: False\n    scale_by_std: True\n    monitor: 'val/loss_simple_ema'\n\n    scheduler_config: # 10000 warmup steps\n      target: ldm.lr_scheduler.LambdaLinearScheduler\n      params:\n        warm_up_steps: [10000]\n        cycle_lengths: [10000000000000]\n        f_start: [1.e-6]\n        f_max: [1.]\n        f_min: [ 1.]\n\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 32\n        in_channels: 4\n        out_channels: 4\n        model_channels: 192\n        attention_resolutions: [ 1, 2, 4, 8 ]   # 32, 16, 8, 4\n        num_res_blocks: 2\n        channel_mult: [ 1,2,2,4,4 ]  # 32, 16, 8, 4, 2\n        num_heads: 8\n        use_scale_shift_norm: True\n        resblock_updown: True\n\n    first_stage_config:\n      target: ldm.models.autoencoder.AutoencoderKL\n      params:\n        embed_dim: 4\n        monitor: \"val/rec_loss\"\n        ckpt_path: \"models/first_stage_models/kl-f8/model.ckpt\"\n        ddconfig:\n          double_z: True\n          z_channels: 4\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult: [ 1,2,4,4 ]  # num_down = len(ch_mult)-1\n          num_res_blocks: 2\n          attn_resolutions: [ ]\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n\n    cond_stage_config: \"__is_unconditional__\"\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 96\n    num_workers: 5\n    wrap: False\n    train:\n      target: ldm.data.lsun.LSUNChurchesTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.lsun.LSUNChurchesValidation\n      params:\n        size: 256\n\nlightning:\n  callbacks:\n    image_logger:\n      target: main.ImageLogger\n      params:\n        batch_frequency: 5000\n        max_images: 8\n        increase_log_steps: False\n\n\n  trainer:\n    benchmark: True"
  },
  {
    "path": "ldm_exp/configs/latent-diffusion/txt2img-1p4B-eval.yaml",
    "content": "model:\n  base_learning_rate: 5.0e-05\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.00085\n    linear_end: 0.012\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: caption\n    image_size: 32\n    channels: 4\n    cond_stage_trainable: true\n    conditioning_key: crossattn\n    monitor: val/loss_simple_ema\n    scale_factor: 0.18215\n    use_ema: False\n\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 32\n        in_channels: 4\n        out_channels: 4\n        model_channels: 320\n        attention_resolutions:\n        - 4\n        - 2\n        - 1\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 4\n        - 4\n        num_heads: 8\n        use_spatial_transformer: true\n        transformer_depth: 1\n        context_dim: 1280\n        use_checkpoint: true\n        legacy: False\n\n    first_stage_config:\n      target: ldm.models.autoencoder.AutoencoderKL\n      params:\n        embed_dim: 4\n        monitor: val/rec_loss\n        ddconfig:\n          double_z: true\n          z_channels: 4\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n\n    cond_stage_config:\n      target: ldm.modules.encoders.modules.BERTEmbedder\n      params:\n        n_embed: 1280\n        n_layer: 32\n"
  },
  {
    "path": "ldm_exp/configs/retrieval-augmented-diffusion/768x768.yaml",
    "content": "model:\n  base_learning_rate: 0.0001\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.015\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: jpg\n    cond_stage_key: nix\n    image_size: 48\n    channels: 16\n    cond_stage_trainable: false\n    conditioning_key: crossattn\n    monitor: val/loss_simple_ema\n    scale_by_std: false\n    scale_factor: 0.22765929\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 48\n        in_channels: 16\n        out_channels: 16\n        model_channels: 448\n        attention_resolutions:\n        - 4\n        - 2\n        - 1\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        use_scale_shift_norm: false\n        resblock_updown: false\n        num_head_channels: 32\n        use_spatial_transformer: true\n        transformer_depth: 1\n        context_dim: 768\n        use_checkpoint: true\n    first_stage_config:\n      target: ldm.models.autoencoder.AutoencoderKL\n      params:\n        monitor: val/rec_loss\n        embed_dim: 16\n        ddconfig:\n          double_z: true\n          z_channels: 16\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 1\n          - 2\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions:\n          - 16\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: torch.nn.Identity"
  },
  {
    "path": "ldm_exp/environment.yaml",
    "content": "name: ldm\nchannels:\n  - pytorch\n  - defaults\ndependencies:\n  - python=3.8.5\n  - pip=20.3\n  - cudatoolkit=11.0\n  - pytorch=1.7.0\n  - torchvision=0.8.1\n  - numpy=1.19.2\n  - pip:\n    - albumentations==0.4.3\n    - opencv-python==4.1.2.30\n    - pudb==2019.2\n    - imageio==2.9.0\n    - imageio-ffmpeg==0.4.2\n    - pytorch-lightning==1.4.2\n    - omegaconf==2.1.1\n    - test-tube>=0.7.5\n    - streamlit>=0.73.1\n    - einops==0.3.0\n    - torch-fidelity==0.3.0\n    - transformers==4.3.1\n    - -e git+https://github.com/CompVis/taming-transformers.git@master#egg=taming-transformers\n    - -e git+https://github.com/openai/CLIP.git@main#egg=clip\n    - -e ."
  },
  {
    "path": "ldm_exp/fid_score.py",
    "content": "\"\"\"Calculates the Frechet Inception Distance (FID) to evalulate GANs\n\nThe FID metric calculates the distance between two distributions of images.\nTypically, we have summary statistics (mean & covariance matrix) of one\nof these distributions, while the 2nd distribution is given by a GAN.\n\nWhen run as a stand-alone program, it compares the distribution of\nimages that are stored as PNG/JPEG at a specified location with a\ndistribution given by summary statistics (in pickle format).\n\nThe FID is calculated by assuming that X_1 and X_2 are the activations of\nthe pool_3 layer of the inception net for generated samples and real world\nsamples respectively.\n\nSee --help to see further details.\n\nCode apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead\nof Tensorflow\n\nCopyright 2018 Institute of Bioinformatics, JKU Linz\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n   http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\"\"\"\nimport os\nimport pathlib\nfrom argparse import ArgumentDefaultsHelpFormatter, ArgumentParser\nfrom PIL import ImageFile\nImageFile.LOAD_TRUNCATED_IMAGES = True\n\nimport numpy as np\nimport torch\nimport torchvision.transforms as TF\nfrom PIL import Image\nfrom scipy import linalg\nfrom torch.nn.functional import adaptive_avg_pool2d\n\ntry:\n    from tqdm import tqdm\nexcept ImportError:\n    # If tqdm is not available, provide a mock version of it\n    def tqdm(x):\n        return x\n\nfrom inception import InceptionV3\n\nparser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)\nparser.add_argument('--batch-size', type=int, default=50,\n                    help='Batch size to use')\nparser.add_argument('--dataset_name', type=str, default=None)\nparser.add_argument('--num-workers', type=int,\n                    help=('Number of processes to use for data loading. '\n                          'Defaults to `min(8, num_cpus)`'))\nparser.add_argument('--device', type=str, default=None,\n                    help='Device to use. Like cuda, cuda:0 or cpu')\nparser.add_argument('--dims', type=int, default=2048,\n                    choices=list(InceptionV3.BLOCK_INDEX_BY_DIM),\n                    help=('Dimensionality of Inception features to use. '\n                          'By default, uses pool3 features'))\nparser.add_argument('--num_samples', type=int, default=None,\n                    help=('Number of samples for FID estimation'))\nparser.add_argument('--res', type=int, default=None,\n                    help=('Resolutions of samples for FID estimation'))\nparser.add_argument('--save-stats', action='store_true',\n                    help=('Generate an npz archive from a directory of samples. '\n                          'The first path is used as input and the second as output.'))\n\nparser.add_argument('path', type=str, nargs=2,\n                    help=('Paths to the generated images or '\n                          'to .npz statistic files'))\n\n\nIMAGE_EXTENSIONS = {'bmp', 'jpg', 'jpeg', 'pgm', 'png', 'ppm',\n                    'tif', 'tiff', 'webp', 'JPEG'}\n\n\nclass ImagePathDataset(torch.utils.data.Dataset):\n    def __init__(self, files, transforms=None):\n        self.files = files\n        self.transforms = transforms\n\n    def __len__(self):\n        return len(self.files)\n\n    def __getitem__(self, i):\n        path = self.files[i]\n        img = Image.open(path).convert('RGB')\n        if self.transforms is not None:\n            img = self.transforms(img)\n        return img\n\n\ndef get_activations(files, model, batch_size=50, dims=2048, device='cpu',\n                    num_workers=1, res=None, dataset_name=None):\n    \"\"\"Calculates the activations of the pool_3 layer for all images.\n\n    Params:\n    -- files       : List of image files paths\n    -- model       : Instance of inception model\n    -- batch_size  : Batch size of images for the model to process at once.\n                     Make sure that the number of samples is a multiple of\n                     the batch size, otherwise some samples are ignored. This\n                     behavior is retained to match the original FID score\n                     implementation.\n    -- dims        : Dimensionality of features returned by Inception\n    -- device      : Device to run calculations\n    -- num_workers : Number of parallel dataloader workers\n\n    Returns:\n    -- A numpy array of dimension (num images, dims) that contains the\n       activations of the given tensor when feeding inception with the\n       query tensor.\n    \"\"\"\n    model.eval()\n\n    if batch_size > len(files):\n        print(('Warning: batch size is bigger than the data size. '\n               'Setting batch size to data size'))\n        batch_size = len(files)\n\n    if res is None:\n        trans = TF.ToTensor()\n    else:\n        if dataset_name == 'celeba':\n            from datasets import Crop\n            cx = 89\n            cy = 121\n            x1 = cy - 64\n            x2 = cy + 64\n            y1 = cx - 64\n            y2 = cx + 64\n            trans = TF.Compose([\n                        Crop(x1, x2, y1, y2),\n                        TF.Resize(res),\n                        TF.ToTensor(),\n            ])\n        else:\n            trans = TF.Compose([\n                TF.Resize(res),\n                TF.CenterCrop(res),\n                TF.ToTensor()\n            ])\n    \n    dataset = ImagePathDataset(files, transforms=trans)\n    dataloader = torch.utils.data.DataLoader(dataset,\n                                             batch_size=batch_size,\n                                             shuffle=False,\n                                             drop_last=False,\n                                             num_workers=num_workers)\n\n    pred_arr = np.empty((len(files), dims))\n\n    start_idx = 0\n\n    for batch in tqdm(dataloader):\n        batch = batch.to(device)\n\n        with torch.no_grad():\n            pred = model(batch)[0]\n\n        # If model output is not scalar, apply global spatial average pooling.\n        # This happens if you choose a dimensionality not equal 2048.\n        if pred.size(2) != 1 or pred.size(3) != 1:\n            pred = adaptive_avg_pool2d(pred, output_size=(1, 1))\n\n        pred = pred.squeeze(3).squeeze(2).cpu().numpy()\n\n        pred_arr[start_idx:start_idx + pred.shape[0]] = pred\n\n        start_idx = start_idx + pred.shape[0]\n\n    return pred_arr\n\n\ndef calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):\n    \"\"\"Numpy implementation of the Frechet Distance.\n    The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)\n    and X_2 ~ N(mu_2, C_2) is\n            d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).\n\n    Stable version by Dougal J. Sutherland.\n\n    Params:\n    -- mu1   : Numpy array containing the activations of a layer of the\n               inception net (like returned by the function 'get_predictions')\n               for generated samples.\n    -- mu2   : The sample mean over activations, precalculated on an\n               representative data set.\n    -- sigma1: The covariance matrix over activations for generated samples.\n    -- sigma2: The covariance matrix over activations, precalculated on an\n               representative data set.\n\n    Returns:\n    --   : The Frechet Distance.\n    \"\"\"\n\n    mu1 = np.atleast_1d(mu1)\n    mu2 = np.atleast_1d(mu2)\n\n    sigma1 = np.atleast_2d(sigma1)\n    sigma2 = np.atleast_2d(sigma2)\n\n    assert mu1.shape == mu2.shape, \\\n        'Training and test mean vectors have different lengths'\n    assert sigma1.shape == sigma2.shape, \\\n        'Training and test covariances have different dimensions'\n\n    diff = mu1 - mu2\n\n    # Product might be almost singular\n    covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)\n    if not np.isfinite(covmean).all():\n        msg = ('fid calculation produces singular product; '\n               'adding %s to diagonal of cov estimates') % eps\n        print(msg)\n        offset = np.eye(sigma1.shape[0]) * eps\n        covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))\n\n    # Numerical error might give slight imaginary component\n    if np.iscomplexobj(covmean):\n        if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):\n            m = np.max(np.abs(covmean.imag))\n            raise ValueError('Imaginary component {}'.format(m))\n        covmean = covmean.real\n\n    tr_covmean = np.trace(covmean)\n\n    return (diff.dot(diff) + np.trace(sigma1)\n            + np.trace(sigma2) - 2 * tr_covmean)\n\n\ndef calculate_activation_statistics(files, model, batch_size=50, dims=2048,\n                                    device='cpu', num_workers=1, res=None, dataset_name=None):\n    \"\"\"Calculation of the statistics used by the FID.\n    Params:\n    -- files       : List of image files paths\n    -- model       : Instance of inception model\n    -- batch_size  : The images numpy array is split into batches with\n                     batch size batch_size. A reasonable batch size\n                     depends on the hardware.\n    -- dims        : Dimensionality of features returned by Inception\n    -- device      : Device to run calculations\n    -- num_workers : Number of parallel dataloader workers\n\n    Returns:\n    -- mu    : The mean over samples of the activations of the pool_3 layer of\n               the inception model.\n    -- sigma : The covariance matrix of the activations of the pool_3 layer of\n               the inception model.\n    \"\"\"\n    act = get_activations(files, model, batch_size, dims, device, num_workers, res=res, dataset_name=dataset_name)\n    mu = np.mean(act, axis=0)\n    sigma = np.cov(act, rowvar=False)\n    return mu, sigma\n\n\ndef compute_statistics_of_path(path, model, batch_size, dims, device,\n                               num_workers=1, num_samples=None, res=None, dataset_name=None):\n    if path.endswith('.npz'):\n        with np.load(path) as f:\n            m, s = f['mu'][:], f['sigma'][:]\n    else:\n        path = pathlib.Path(path)\n\n        files = sorted([file for ext in IMAGE_EXTENSIONS\n                       for file in path.glob('**/*.{}'.format(ext))])\n        if num_samples is not None:\n            subfolders = []\n            for f in os.listdir(path):\n                if os.path.isdir(os.path.join(path, f)):\n                    subfolders.append(f)\n \n            imgs_per_subfolder = num_samples // len(subfolders)\n            files = []\n            for subfolder in subfolders:\n                subfolder_path = os.path.join(path, subfolder)\n                subfolder_files = sorted([file for ext in IMAGE_EXTENSIONS\n                       for file in pathlib.Path(subfolder_path).glob('**/*.{}'.format(ext))])\n                num_images = min(len(subfolder_files), imgs_per_subfolder)\n                files += subfolder_files[:num_images]\n    \n        print(\"Found %d files.\" % len(files))\n        m, s = calculate_activation_statistics(files, model, batch_size,\n                                               dims, device, num_workers, res=res, dataset_name=dataset_name)\n\n    return m, s\n\n\ndef calculate_fid_given_paths(paths, batch_size, device, dims, num_workers=1, num_samples=None, res=None, dataset_name=None):\n    \"\"\"Calculates the FID of two paths\"\"\"\n    for p in paths:\n        if not os.path.exists(p):\n            raise RuntimeError('Invalid path: %s' % p)\n\n    block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]\n\n    model = InceptionV3([block_idx]).to(device)\n\n    m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n    m2, s2 = compute_statistics_of_path(paths[1], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n    fid_value = calculate_frechet_distance(m1, s1, m2, s2)\n\n    return fid_value\n\n\ndef save_fid_stats(paths, batch_size, device, dims, num_workers=1, num_samples=None, res=None, dataset_name=None):\n    \"\"\"Calculates the FID of two paths\"\"\"\n    if not os.path.exists(paths[0]):\n        raise RuntimeError('Invalid path: %s' % paths[0])\n\n    if os.path.exists(paths[1]):\n        raise RuntimeError('Existing output file: %s' % paths[1])\n\n    block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]\n\n    model = InceptionV3([block_idx]).to(device)\n\n    print(f\"Saving statistics for {paths[0]}\")\n\n    m1, s1 = compute_statistics_of_path(paths[0], model, batch_size,\n                                        dims, device, num_workers, num_samples=num_samples, res=res, dataset_name=dataset_name)\n\n    np.savez_compressed(paths[1], mu=m1, sigma=s1)\n\n\ndef main():\n    args = parser.parse_args()\n\n    if args.device is None:\n        device = torch.device('cuda' if (torch.cuda.is_available()) else 'cpu')\n    else:\n        device = torch.device(args.device)\n\n    if args.num_workers is None:\n        try:\n            num_cpus = len(os.sched_getaffinity(0))\n        except AttributeError:\n            # os.sched_getaffinity is not available under Windows, use\n            # os.cpu_count instead (which may not return the *available* number\n            # of CPUs).\n            num_cpus = os.cpu_count()\n\n        num_workers = min(num_cpus, 8) if num_cpus is not None else 0\n    else:\n        num_workers = args.num_workers\n\n    if args.save_stats:\n        save_fid_stats(args.path, args.batch_size, device, args.dims, num_workers, num_samples=args.num_samples, res=args.res, dataset_name=args.dataset_name)\n        return\n\n    fid_value = calculate_fid_given_paths(args.path,\n                                          args.batch_size,\n                                          device,\n                                          args.dims,\n                                          num_workers,\n                                          num_samples=args.num_samples,\n                                          res = args.res, dataset_name=args.dataset_name)\n    print('FID: ', fid_value)\n\n\nif __name__ == '__main__':\n    main()"
  },
  {
    "path": "ldm_exp/inception.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torchvision\n\ntry:\n    from torchvision.models.utils import load_state_dict_from_url\nexcept ImportError:\n    from torch.utils.model_zoo import load_url as load_state_dict_from_url\n\n# Inception weights ported to Pytorch from\n# http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz\nFID_WEIGHTS_URL = 'https://github.com/mseitzer/pytorch-fid/releases/download/fid_weights/pt_inception-2015-12-05-6726825d.pth'  # noqa: E501\n\n\nclass InceptionV3(nn.Module):\n    \"\"\"Pretrained InceptionV3 network returning feature maps\"\"\"\n\n    # Index of default block of inception to return,\n    # corresponds to output of final average pooling\n    DEFAULT_BLOCK_INDEX = 3\n\n    # Maps feature dimensionality to their output blocks indices\n    BLOCK_INDEX_BY_DIM = {\n        64: 0,   # First max pooling features\n        192: 1,  # Second max pooling featurs\n        768: 2,  # Pre-aux classifier features\n        2048: 3  # Final average pooling features\n    }\n\n    def __init__(self,\n                 output_blocks=(DEFAULT_BLOCK_INDEX,),\n                 resize_input=True,\n                 normalize_input=True,\n                 requires_grad=False,\n                 use_fid_inception=True):\n        \"\"\"Build pretrained InceptionV3\n\n        Parameters\n        ----------\n        output_blocks : list of int\n            Indices of blocks to return features of. Possible values are:\n                - 0: corresponds to output of first max pooling\n                - 1: corresponds to output of second max pooling\n                - 2: corresponds to output which is fed to aux classifier\n                - 3: corresponds to output of final average pooling\n        resize_input : bool\n            If true, bilinearly resizes input to width and height 299 before\n            feeding input to model. As the network without fully connected\n            layers is fully convolutional, it should be able to handle inputs\n            of arbitrary size, so resizing might not be strictly needed\n        normalize_input : bool\n            If true, scales the input from range (0, 1) to the range the\n            pretrained Inception network expects, namely (-1, 1)\n        requires_grad : bool\n            If true, parameters of the model require gradients. Possibly useful\n            for finetuning the network\n        use_fid_inception : bool\n            If true, uses the pretrained Inception model used in Tensorflow's\n            FID implementation. If false, uses the pretrained Inception model\n            available in torchvision. The FID Inception model has different\n            weights and a slightly different structure from torchvision's\n            Inception model. If you want to compute FID scores, you are\n            strongly advised to set this parameter to true to get comparable\n            results.\n        \"\"\"\n        super(InceptionV3, self).__init__()\n\n        self.resize_input = resize_input\n        self.normalize_input = normalize_input\n        self.output_blocks = sorted(output_blocks)\n        self.last_needed_block = max(output_blocks)\n\n        assert self.last_needed_block <= 3, \\\n            'Last possible output block index is 3'\n\n        self.blocks = nn.ModuleList()\n\n        if use_fid_inception:\n            inception = fid_inception_v3()\n        else:\n            inception = _inception_v3(weights='DEFAULT')\n\n        # Block 0: input to maxpool1\n        block0 = [\n            inception.Conv2d_1a_3x3,\n            inception.Conv2d_2a_3x3,\n            inception.Conv2d_2b_3x3,\n            nn.MaxPool2d(kernel_size=3, stride=2)\n        ]\n        self.blocks.append(nn.Sequential(*block0))\n\n        # Block 1: maxpool1 to maxpool2\n        if self.last_needed_block >= 1:\n            block1 = [\n                inception.Conv2d_3b_1x1,\n                inception.Conv2d_4a_3x3,\n                nn.MaxPool2d(kernel_size=3, stride=2)\n            ]\n            self.blocks.append(nn.Sequential(*block1))\n\n        # Block 2: maxpool2 to aux classifier\n        if self.last_needed_block >= 2:\n            block2 = [\n                inception.Mixed_5b,\n                inception.Mixed_5c,\n                inception.Mixed_5d,\n                inception.Mixed_6a,\n                inception.Mixed_6b,\n                inception.Mixed_6c,\n                inception.Mixed_6d,\n                inception.Mixed_6e,\n            ]\n            self.blocks.append(nn.Sequential(*block2))\n\n        # Block 3: aux classifier to final avgpool\n        if self.last_needed_block >= 3:\n            block3 = [\n                inception.Mixed_7a,\n                inception.Mixed_7b,\n                inception.Mixed_7c,\n                nn.AdaptiveAvgPool2d(output_size=(1, 1))\n            ]\n            self.blocks.append(nn.Sequential(*block3))\n\n        for param in self.parameters():\n            param.requires_grad = requires_grad\n\n    def forward(self, inp):\n        \"\"\"Get Inception feature maps\n\n        Parameters\n        ----------\n        inp : torch.autograd.Variable\n            Input tensor of shape Bx3xHxW. Values are expected to be in\n            range (0, 1)\n\n        Returns\n        -------\n        List of torch.autograd.Variable, corresponding to the selected output\n        block, sorted ascending by index\n        \"\"\"\n        outp = []\n        x = inp\n\n        if self.resize_input:\n            x = F.interpolate(x,\n                              size=(299, 299),\n                              mode='bilinear',\n                              align_corners=False)\n\n        if self.normalize_input:\n            x = 2 * x - 1  # Scale from range (0, 1) to range (-1, 1)\n\n        for idx, block in enumerate(self.blocks):\n            x = block(x)\n            if idx in self.output_blocks:\n                outp.append(x)\n\n            if idx == self.last_needed_block:\n                break\n\n        return outp\n\n\ndef _inception_v3(*args, **kwargs):\n    \"\"\"Wraps `torchvision.models.inception_v3`\"\"\"\n    try:\n        version = tuple(map(int, torchvision.__version__.split('.')[:2]))\n    except ValueError:\n        # Just a caution against weird version strings\n        version = (0,)\n\n    # Skips default weight inititialization if supported by torchvision\n    # version. See https://github.com/mseitzer/pytorch-fid/issues/28.\n    if version >= (0, 6):\n        kwargs['init_weights'] = False\n\n    # Backwards compatibility: `weights` argument was handled by `pretrained`\n    # argument prior to version 0.13.\n    if version < (0, 13) and 'weights' in kwargs:\n        if kwargs['weights'] == 'DEFAULT':\n            kwargs['pretrained'] = True\n        elif kwargs['weights'] is None:\n            kwargs['pretrained'] = False\n        else:\n            raise ValueError(\n                'weights=={} not supported in torchvision {}'.format(\n                    kwargs['weights'], torchvision.__version__\n                )\n            )\n        del kwargs['weights']\n\n    return torchvision.models.inception_v3(*args, **kwargs)\n\n\ndef fid_inception_v3():\n    \"\"\"Build pretrained Inception model for FID computation\n\n    The Inception model for FID computation uses a different set of weights\n    and has a slightly different structure than torchvision's Inception.\n\n    This method first constructs torchvision's Inception and then patches the\n    necessary parts that are different in the FID Inception model.\n    \"\"\"\n    inception = _inception_v3(num_classes=1008,\n                              aux_logits=False,\n                              weights=None)\n    inception.Mixed_5b = FIDInceptionA(192, pool_features=32)\n    inception.Mixed_5c = FIDInceptionA(256, pool_features=64)\n    inception.Mixed_5d = FIDInceptionA(288, pool_features=64)\n    inception.Mixed_6b = FIDInceptionC(768, channels_7x7=128)\n    inception.Mixed_6c = FIDInceptionC(768, channels_7x7=160)\n    inception.Mixed_6d = FIDInceptionC(768, channels_7x7=160)\n    inception.Mixed_6e = FIDInceptionC(768, channels_7x7=192)\n    inception.Mixed_7b = FIDInceptionE_1(1280)\n    inception.Mixed_7c = FIDInceptionE_2(2048)\n\n    state_dict = load_state_dict_from_url(FID_WEIGHTS_URL, progress=True)\n    inception.load_state_dict(state_dict)\n    return inception\n\n\nclass FIDInceptionA(torchvision.models.inception.InceptionA):\n    \"\"\"InceptionA block patched for FID computation\"\"\"\n    def __init__(self, in_channels, pool_features):\n        super(FIDInceptionA, self).__init__(in_channels, pool_features)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch5x5 = self.branch5x5_1(x)\n        branch5x5 = self.branch5x5_2(branch5x5)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionC(torchvision.models.inception.InceptionC):\n    \"\"\"InceptionC block patched for FID computation\"\"\"\n    def __init__(self, in_channels, channels_7x7):\n        super(FIDInceptionC, self).__init__(in_channels, channels_7x7)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch7x7 = self.branch7x7_1(x)\n        branch7x7 = self.branch7x7_2(branch7x7)\n        branch7x7 = self.branch7x7_3(branch7x7)\n\n        branch7x7dbl = self.branch7x7dbl_1(x)\n        branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)\n        branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionE_1(torchvision.models.inception.InceptionE):\n    \"\"\"First InceptionE block patched for FID computation\"\"\"\n    def __init__(self, in_channels):\n        super(FIDInceptionE_1, self).__init__(in_channels)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = [\n            self.branch3x3_2a(branch3x3),\n            self.branch3x3_2b(branch3x3),\n        ]\n        branch3x3 = torch.cat(branch3x3, 1)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = [\n            self.branch3x3dbl_3a(branch3x3dbl),\n            self.branch3x3dbl_3b(branch3x3dbl),\n        ]\n        branch3x3dbl = torch.cat(branch3x3dbl, 1)\n\n        # Patch: Tensorflow's average pool does not use the padded zero's in\n        # its average calculation\n        branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1,\n                                   count_include_pad=False)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)\n\n\nclass FIDInceptionE_2(torchvision.models.inception.InceptionE):\n    \"\"\"Second InceptionE block patched for FID computation\"\"\"\n    def __init__(self, in_channels):\n        super(FIDInceptionE_2, self).__init__(in_channels)\n\n    def forward(self, x):\n        branch1x1 = self.branch1x1(x)\n\n        branch3x3 = self.branch3x3_1(x)\n        branch3x3 = [\n            self.branch3x3_2a(branch3x3),\n            self.branch3x3_2b(branch3x3),\n        ]\n        branch3x3 = torch.cat(branch3x3, 1)\n\n        branch3x3dbl = self.branch3x3dbl_1(x)\n        branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)\n        branch3x3dbl = [\n            self.branch3x3dbl_3a(branch3x3dbl),\n            self.branch3x3dbl_3b(branch3x3dbl),\n        ]\n        branch3x3dbl = torch.cat(branch3x3dbl, 1)\n\n        # Patch: The FID Inception model uses max pooling instead of average\n        # pooling. This is likely an error in this specific Inception\n        # implementation, as other Inception models use average pooling here\n        # (which matches the description in the paper).\n        branch_pool = F.max_pool2d(x, kernel_size=3, stride=1, padding=1)\n        branch_pool = self.branch_pool(branch_pool)\n\n        outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]\n        return torch.cat(outputs, 1)"
  },
  {
    "path": "ldm_exp/ldm/lr_scheduler.py",
    "content": "import numpy as np\n\n\nclass LambdaWarmUpCosineScheduler:\n    \"\"\"\n    note: use with a base_lr of 1.0\n    \"\"\"\n    def __init__(self, warm_up_steps, lr_min, lr_max, lr_start, max_decay_steps, verbosity_interval=0):\n        self.lr_warm_up_steps = warm_up_steps\n        self.lr_start = lr_start\n        self.lr_min = lr_min\n        self.lr_max = lr_max\n        self.lr_max_decay_steps = max_decay_steps\n        self.last_lr = 0.\n        self.verbosity_interval = verbosity_interval\n\n    def schedule(self, n, **kwargs):\n        if self.verbosity_interval > 0:\n            if n % self.verbosity_interval == 0: print(f\"current step: {n}, recent lr-multiplier: {self.last_lr}\")\n        if n < self.lr_warm_up_steps:\n            lr = (self.lr_max - self.lr_start) / self.lr_warm_up_steps * n + self.lr_start\n            self.last_lr = lr\n            return lr\n        else:\n            t = (n - self.lr_warm_up_steps) / (self.lr_max_decay_steps - self.lr_warm_up_steps)\n            t = min(t, 1.0)\n            lr = self.lr_min + 0.5 * (self.lr_max - self.lr_min) * (\n                    1 + np.cos(t * np.pi))\n            self.last_lr = lr\n            return lr\n\n    def __call__(self, n, **kwargs):\n        return self.schedule(n,**kwargs)\n\n\nclass LambdaWarmUpCosineScheduler2:\n    \"\"\"\n    supports repeated iterations, configurable via lists\n    note: use with a base_lr of 1.0.\n    \"\"\"\n    def __init__(self, warm_up_steps, f_min, f_max, f_start, cycle_lengths, verbosity_interval=0):\n        assert len(warm_up_steps) == len(f_min) == len(f_max) == len(f_start) == len(cycle_lengths)\n        self.lr_warm_up_steps = warm_up_steps\n        self.f_start = f_start\n        self.f_min = f_min\n        self.f_max = f_max\n        self.cycle_lengths = cycle_lengths\n        self.cum_cycles = np.cumsum([0] + list(self.cycle_lengths))\n        self.last_f = 0.\n        self.verbosity_interval = verbosity_interval\n\n    def find_in_interval(self, n):\n        interval = 0\n        for cl in self.cum_cycles[1:]:\n            if n <= cl:\n                return interval\n            interval += 1\n\n    def schedule(self, n, **kwargs):\n        cycle = self.find_in_interval(n)\n        n = n - self.cum_cycles[cycle]\n        if self.verbosity_interval > 0:\n            if n % self.verbosity_interval == 0: print(f\"current step: {n}, recent lr-multiplier: {self.last_f}, \"\n                                                       f\"current cycle {cycle}\")\n        if n < self.lr_warm_up_steps[cycle]:\n            f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[cycle] * n + self.f_start[cycle]\n            self.last_f = f\n            return f\n        else:\n            t = (n - self.lr_warm_up_steps[cycle]) / (self.cycle_lengths[cycle] - self.lr_warm_up_steps[cycle])\n            t = min(t, 1.0)\n            f = self.f_min[cycle] + 0.5 * (self.f_max[cycle] - self.f_min[cycle]) * (\n                    1 + np.cos(t * np.pi))\n            self.last_f = f\n            return f\n\n    def __call__(self, n, **kwargs):\n        return self.schedule(n, **kwargs)\n\n\nclass LambdaLinearScheduler(LambdaWarmUpCosineScheduler2):\n\n    def schedule(self, n, **kwargs):\n        cycle = self.find_in_interval(n)\n        n = n - self.cum_cycles[cycle]\n        if self.verbosity_interval > 0:\n            if n % self.verbosity_interval == 0: print(f\"current step: {n}, recent lr-multiplier: {self.last_f}, \"\n                                                       f\"current cycle {cycle}\")\n\n        if n < self.lr_warm_up_steps[cycle]:\n            f = (self.f_max[cycle] - self.f_start[cycle]) / self.lr_warm_up_steps[cycle] * n + self.f_start[cycle]\n            self.last_f = f\n            return f\n        else:\n            f = self.f_min[cycle] + (self.f_max[cycle] - self.f_min[cycle]) * (self.cycle_lengths[cycle] - n) / (self.cycle_lengths[cycle])\n            self.last_f = f\n            return f\n\n"
  },
  {
    "path": "ldm_exp/ldm/models/autoencoder.py",
    "content": "import torch\nimport pytorch_lightning as pl\nimport torch.nn.functional as F\nfrom contextlib import contextmanager\n\nfrom taming.modules.vqvae.quantize import VectorQuantizer2 as VectorQuantizer\n\nfrom ldm.modules.diffusionmodules.model import Encoder, Decoder\nfrom ldm.modules.distributions.distributions import DiagonalGaussianDistribution\n\nfrom ldm.util import instantiate_from_config\n\n\nclass VQModel(pl.LightningModule):\n    def __init__(self,\n                 ddconfig,\n                 lossconfig,\n                 n_embed,\n                 embed_dim,\n                 ckpt_path=None,\n                 ignore_keys=[],\n                 image_key=\"image\",\n                 colorize_nlabels=None,\n                 monitor=None,\n                 batch_resize_range=None,\n                 scheduler_config=None,\n                 lr_g_factor=1.0,\n                 remap=None,\n                 sane_index_shape=False, # tell vector quantizer to return indices as bhw\n                 use_ema=False\n                 ):\n        super().__init__()\n        self.embed_dim = embed_dim\n        self.n_embed = n_embed\n        self.image_key = image_key\n        self.encoder = Encoder(**ddconfig)\n        self.decoder = Decoder(**ddconfig)\n        self.loss = instantiate_from_config(lossconfig)\n        self.quantize = VectorQuantizer(n_embed, embed_dim, beta=0.25,\n                                        remap=remap,\n                                        sane_index_shape=sane_index_shape)\n        self.quant_conv = torch.nn.Conv2d(ddconfig[\"z_channels\"], embed_dim, 1)\n        self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig[\"z_channels\"], 1)\n        if colorize_nlabels is not None:\n            assert type(colorize_nlabels)==int\n            self.register_buffer(\"colorize\", torch.randn(3, colorize_nlabels, 1, 1))\n        if monitor is not None:\n            self.monitor = monitor\n        self.batch_resize_range = batch_resize_range\n        if self.batch_resize_range is not None:\n            print(f\"{self.__class__.__name__}: Using per-batch resizing in range {batch_resize_range}.\")\n\n        self.use_ema = use_ema\n        if self.use_ema:\n            self.model_ema = LitEma(self)\n            print(f\"Keeping EMAs of {len(list(self.model_ema.buffers()))}.\")\n\n        if ckpt_path is not None:\n            self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys)\n        self.scheduler_config = scheduler_config\n        self.lr_g_factor = lr_g_factor\n\n    @contextmanager\n    def ema_scope(self, context=None):\n        if self.use_ema:\n            self.model_ema.store(self.parameters())\n            self.model_ema.copy_to(self)\n            if context is not None:\n                print(f\"{context}: Switched to EMA weights\")\n        try:\n            yield None\n        finally:\n            if self.use_ema:\n                self.model_ema.restore(self.parameters())\n                if context is not None:\n                    print(f\"{context}: Restored training weights\")\n\n    def init_from_ckpt(self, path, ignore_keys=list()):\n        sd = torch.load(path, map_location=\"cpu\")[\"state_dict\"]\n        keys = list(sd.keys())\n        for k in keys:\n            for ik in ignore_keys:\n                if k.startswith(ik):\n                    print(\"Deleting key {} from state_dict.\".format(k))\n                    del sd[k]\n        missing, unexpected = self.load_state_dict(sd, strict=False)\n        print(f\"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys\")\n        if len(missing) > 0:\n            print(f\"Missing Keys: {missing}\")\n            print(f\"Unexpected Keys: {unexpected}\")\n\n    def on_train_batch_end(self, *args, **kwargs):\n        if self.use_ema:\n            self.model_ema(self)\n\n    def encode(self, x):\n        h = self.encoder(x)\n        h = self.quant_conv(h)\n        quant, emb_loss, info = self.quantize(h)\n        return quant, emb_loss, info\n\n    def encode_to_prequant(self, x):\n        h = self.encoder(x)\n        h = self.quant_conv(h)\n        return h\n\n    def decode(self, quant):\n        quant = self.post_quant_conv(quant)\n        dec = self.decoder(quant)\n        return dec\n\n    def decode_code(self, code_b):\n        quant_b = self.quantize.embed_code(code_b)\n        dec = self.decode(quant_b)\n        return dec\n\n    def forward(self, input, return_pred_indices=False):\n        quant, diff, (_,_,ind) = self.encode(input)\n        dec = self.decode(quant)\n        if return_pred_indices:\n            return dec, diff, ind\n        return dec, diff\n\n    def get_input(self, batch, k):\n        x = batch[k]\n        if len(x.shape) == 3:\n            x = x[..., None]\n        x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format).float()\n        if self.batch_resize_range is not None:\n            lower_size = self.batch_resize_range[0]\n            upper_size = self.batch_resize_range[1]\n            if self.global_step <= 4:\n                # do the first few batches with max size to avoid later oom\n                new_resize = upper_size\n            else:\n                new_resize = np.random.choice(np.arange(lower_size, upper_size+16, 16))\n            if new_resize != x.shape[2]:\n                x = F.interpolate(x, size=new_resize, mode=\"bicubic\")\n            x = x.detach()\n        return x\n\n    def training_step(self, batch, batch_idx, optimizer_idx):\n        # https://github.com/pytorch/pytorch/issues/37142\n        # try not to fool the heuristics\n        x = self.get_input(batch, self.image_key)\n        xrec, qloss, ind = self(x, return_pred_indices=True)\n\n        if optimizer_idx == 0:\n            # autoencode\n            aeloss, log_dict_ae = self.loss(qloss, x, xrec, optimizer_idx, self.global_step,\n                                            last_layer=self.get_last_layer(), split=\"train\",\n                                            predicted_indices=ind)\n\n            self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=True)\n            return aeloss\n\n        if optimizer_idx == 1:\n            # discriminator\n            discloss, log_dict_disc = self.loss(qloss, x, xrec, optimizer_idx, self.global_step,\n                                            last_layer=self.get_last_layer(), split=\"train\")\n            self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=True)\n            return discloss\n\n    def validation_step(self, batch, batch_idx):\n        log_dict = self._validation_step(batch, batch_idx)\n        with self.ema_scope():\n            log_dict_ema = self._validation_step(batch, batch_idx, suffix=\"_ema\")\n        return log_dict\n\n    def _validation_step(self, batch, batch_idx, suffix=\"\"):\n        x = self.get_input(batch, self.image_key)\n        xrec, qloss, ind = self(x, return_pred_indices=True)\n        aeloss, log_dict_ae = self.loss(qloss, x, xrec, 0,\n                                        self.global_step,\n                                        last_layer=self.get_last_layer(),\n                                        split=\"val\"+suffix,\n                                        predicted_indices=ind\n                                        )\n\n        discloss, log_dict_disc = self.loss(qloss, x, xrec, 1,\n                                            self.global_step,\n                                            last_layer=self.get_last_layer(),\n                                            split=\"val\"+suffix,\n                                            predicted_indices=ind\n                                            )\n        rec_loss = log_dict_ae[f\"val{suffix}/rec_loss\"]\n        self.log(f\"val{suffix}/rec_loss\", rec_loss,\n                   prog_bar=True, logger=True, on_step=False, on_epoch=True, sync_dist=True)\n        self.log(f\"val{suffix}/aeloss\", aeloss,\n                   prog_bar=True, logger=True, on_step=False, on_epoch=True, sync_dist=True)\n        if version.parse(pl.__version__) >= version.parse('1.4.0'):\n            del log_dict_ae[f\"val{suffix}/rec_loss\"]\n        self.log_dict(log_dict_ae)\n        self.log_dict(log_dict_disc)\n        return self.log_dict\n\n    def configure_optimizers(self):\n        lr_d = self.learning_rate\n        lr_g = self.lr_g_factor*self.learning_rate\n        print(\"lr_d\", lr_d)\n        print(\"lr_g\", lr_g)\n        opt_ae = torch.optim.Adam(list(self.encoder.parameters())+\n                                  list(self.decoder.parameters())+\n                                  list(self.quantize.parameters())+\n                                  list(self.quant_conv.parameters())+\n                                  list(self.post_quant_conv.parameters()),\n                                  lr=lr_g, betas=(0.5, 0.9))\n        opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(),\n                                    lr=lr_d, betas=(0.5, 0.9))\n\n        if self.scheduler_config is not None:\n            scheduler = instantiate_from_config(self.scheduler_config)\n\n            print(\"Setting up LambdaLR scheduler...\")\n            scheduler = [\n                {\n                    'scheduler': LambdaLR(opt_ae, lr_lambda=scheduler.schedule),\n                    'interval': 'step',\n                    'frequency': 1\n                },\n                {\n                    'scheduler': LambdaLR(opt_disc, lr_lambda=scheduler.schedule),\n                    'interval': 'step',\n                    'frequency': 1\n                },\n            ]\n            return [opt_ae, opt_disc], scheduler\n        return [opt_ae, opt_disc], []\n\n    def get_last_layer(self):\n        return self.decoder.conv_out.weight\n\n    def log_images(self, batch, only_inputs=False, plot_ema=False, **kwargs):\n        log = dict()\n        x = self.get_input(batch, self.image_key)\n        x = x.to(self.device)\n        if only_inputs:\n            log[\"inputs\"] = x\n            return log\n        xrec, _ = self(x)\n        if x.shape[1] > 3:\n            # colorize with random projection\n            assert xrec.shape[1] > 3\n            x = self.to_rgb(x)\n            xrec = self.to_rgb(xrec)\n        log[\"inputs\"] = x\n        log[\"reconstructions\"] = xrec\n        if plot_ema:\n            with self.ema_scope():\n                xrec_ema, _ = self(x)\n                if x.shape[1] > 3: xrec_ema = self.to_rgb(xrec_ema)\n                log[\"reconstructions_ema\"] = xrec_ema\n        return log\n\n    def to_rgb(self, x):\n        assert self.image_key == \"segmentation\"\n        if not hasattr(self, \"colorize\"):\n            self.register_buffer(\"colorize\", torch.randn(3, x.shape[1], 1, 1).to(x))\n        x = F.conv2d(x, weight=self.colorize)\n        x = 2.*(x-x.min())/(x.max()-x.min()) - 1.\n        return x\n\n\nclass VQModelInterface(VQModel):\n    def __init__(self, embed_dim, *args, **kwargs):\n        super().__init__(embed_dim=embed_dim, *args, **kwargs)\n        self.embed_dim = embed_dim\n\n    def encode(self, x):\n        h = self.encoder(x)\n        h = self.quant_conv(h)\n        return h\n\n    def decode(self, h, force_not_quantize=False):\n        # also go through quantization layer\n        if not force_not_quantize:\n            quant, emb_loss, info = self.quantize(h)\n        else:\n            quant = h\n        quant = self.post_quant_conv(quant)\n        dec = self.decoder(quant)\n        return dec\n\n\nclass AutoencoderKL(pl.LightningModule):\n    def __init__(self,\n                 ddconfig,\n                 lossconfig,\n                 embed_dim,\n                 ckpt_path=None,\n                 ignore_keys=[],\n                 image_key=\"image\",\n                 colorize_nlabels=None,\n                 monitor=None,\n                 ):\n        super().__init__()\n        self.image_key = image_key\n        self.encoder = Encoder(**ddconfig)\n        self.decoder = Decoder(**ddconfig)\n        self.loss = instantiate_from_config(lossconfig)\n        assert ddconfig[\"double_z\"]\n        self.quant_conv = torch.nn.Conv2d(2*ddconfig[\"z_channels\"], 2*embed_dim, 1)\n        self.post_quant_conv = torch.nn.Conv2d(embed_dim, ddconfig[\"z_channels\"], 1)\n        self.embed_dim = embed_dim\n        if colorize_nlabels is not None:\n            assert type(colorize_nlabels)==int\n            self.register_buffer(\"colorize\", torch.randn(3, colorize_nlabels, 1, 1))\n        if monitor is not None:\n            self.monitor = monitor\n        if ckpt_path is not None:\n            self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys)\n\n    def init_from_ckpt(self, path, ignore_keys=list()):\n        sd = torch.load(path, map_location=\"cpu\")[\"state_dict\"]\n        keys = list(sd.keys())\n        for k in keys:\n            for ik in ignore_keys:\n                if k.startswith(ik):\n                    print(\"Deleting key {} from state_dict.\".format(k))\n                    del sd[k]\n        self.load_state_dict(sd, strict=False)\n        print(f\"Restored from {path}\")\n\n    def encode(self, x):\n        h = self.encoder(x)\n        moments = self.quant_conv(h)\n        posterior = DiagonalGaussianDistribution(moments)\n        return posterior\n\n    def decode(self, z):\n        z = self.post_quant_conv(z)\n        dec = self.decoder(z)\n        return dec\n\n    def forward(self, input, sample_posterior=True):\n        posterior = self.encode(input)\n        if sample_posterior:\n            z = posterior.sample()\n        else:\n            z = posterior.mode()\n        dec = self.decode(z)\n        return dec, posterior\n\n    def get_input(self, batch, k):\n        x = batch[k]\n        if len(x.shape) == 3:\n            x = x[..., None]\n        x = x.permute(0, 3, 1, 2).to(memory_format=torch.contiguous_format).float()\n        return x\n\n    def training_step(self, batch, batch_idx, optimizer_idx):\n        inputs = self.get_input(batch, self.image_key)\n        reconstructions, posterior = self(inputs)\n\n        if optimizer_idx == 0:\n            # train encoder+decoder+logvar\n            aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step,\n                                            last_layer=self.get_last_layer(), split=\"train\")\n            self.log(\"aeloss\", aeloss, prog_bar=True, logger=True, on_step=True, on_epoch=True)\n            self.log_dict(log_dict_ae, prog_bar=False, logger=True, on_step=True, on_epoch=False)\n            return aeloss\n\n        if optimizer_idx == 1:\n            # train the discriminator\n            discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, optimizer_idx, self.global_step,\n                                                last_layer=self.get_last_layer(), split=\"train\")\n\n            self.log(\"discloss\", discloss, prog_bar=True, logger=True, on_step=True, on_epoch=True)\n            self.log_dict(log_dict_disc, prog_bar=False, logger=True, on_step=True, on_epoch=False)\n            return discloss\n\n    def validation_step(self, batch, batch_idx):\n        inputs = self.get_input(batch, self.image_key)\n        reconstructions, posterior = self(inputs)\n        aeloss, log_dict_ae = self.loss(inputs, reconstructions, posterior, 0, self.global_step,\n                                        last_layer=self.get_last_layer(), split=\"val\")\n\n        discloss, log_dict_disc = self.loss(inputs, reconstructions, posterior, 1, self.global_step,\n                                            last_layer=self.get_last_layer(), split=\"val\")\n\n        self.log(\"val/rec_loss\", log_dict_ae[\"val/rec_loss\"])\n        self.log_dict(log_dict_ae)\n        self.log_dict(log_dict_disc)\n        return self.log_dict\n\n    def configure_optimizers(self):\n        lr = self.learning_rate\n        opt_ae = torch.optim.Adam(list(self.encoder.parameters())+\n                                  list(self.decoder.parameters())+\n                                  list(self.quant_conv.parameters())+\n                                  list(self.post_quant_conv.parameters()),\n                                  lr=lr, betas=(0.5, 0.9))\n        opt_disc = torch.optim.Adam(self.loss.discriminator.parameters(),\n                                    lr=lr, betas=(0.5, 0.9))\n        return [opt_ae, opt_disc], []\n\n    def get_last_layer(self):\n        return self.decoder.conv_out.weight\n\n    @torch.no_grad()\n    def log_images(self, batch, only_inputs=False, **kwargs):\n        log = dict()\n        x = self.get_input(batch, self.image_key)\n        x = x.to(self.device)\n        if not only_inputs:\n            xrec, posterior = self(x)\n            if x.shape[1] > 3:\n                # colorize with random projection\n                assert xrec.shape[1] > 3\n                x = self.to_rgb(x)\n                xrec = self.to_rgb(xrec)\n            log[\"samples\"] = self.decode(torch.randn_like(posterior.sample()))\n            log[\"reconstructions\"] = xrec\n        log[\"inputs\"] = x\n        return log\n\n    def to_rgb(self, x):\n        assert self.image_key == \"segmentation\"\n        if not hasattr(self, \"colorize\"):\n            self.register_buffer(\"colorize\", torch.randn(3, x.shape[1], 1, 1).to(x))\n        x = F.conv2d(x, weight=self.colorize)\n        x = 2.*(x-x.min())/(x.max()-x.min()) - 1.\n        return x\n\n\nclass IdentityFirstStage(torch.nn.Module):\n    def __init__(self, *args, vq_interface=False, **kwargs):\n        self.vq_interface = vq_interface  # TODO: Should be true by default but check to not break older stuff\n        super().__init__()\n\n    def encode(self, x, *args, **kwargs):\n        return x\n\n    def decode(self, x, *args, **kwargs):\n        return x\n\n    def quantize(self, x, *args, **kwargs):\n        if self.vq_interface:\n            return x, None, [None, None, None]\n        return x\n\n    def forward(self, x, *args, **kwargs):\n        return x\n"
  },
  {
    "path": "ldm_exp/ldm/models/diffusion/__init__.py",
    "content": ""
  },
  {
    "path": "ldm_exp/ldm/models/diffusion/classifier.py",
    "content": "import os\nimport torch\nimport pytorch_lightning as pl\nfrom omegaconf import OmegaConf\nfrom torch.nn import functional as F\nfrom torch.optim import AdamW\nfrom torch.optim.lr_scheduler import LambdaLR\nfrom copy import deepcopy\nfrom einops import rearrange\nfrom glob import glob\nfrom natsort import natsorted\n\nfrom ldm.modules.diffusionmodules.openaimodel import EncoderUNetModel, UNetModel\nfrom ldm.util import log_txt_as_img, default, ismap, instantiate_from_config\n\n__models__ = {\n    'class_label': EncoderUNetModel,\n    'segmentation': UNetModel\n}\n\n\ndef disabled_train(self, mode=True):\n    \"\"\"Overwrite model.train with this function to make sure train/eval mode\n    does not change anymore.\"\"\"\n    return self\n\n\nclass NoisyLatentImageClassifier(pl.LightningModule):\n\n    def __init__(self,\n                 diffusion_path,\n                 num_classes,\n                 ckpt_path=None,\n                 pool='attention',\n                 label_key=None,\n                 diffusion_ckpt_path=None,\n                 scheduler_config=None,\n                 weight_decay=1.e-2,\n                 log_steps=10,\n                 monitor='val/loss',\n                 *args,\n                 **kwargs):\n        super().__init__(*args, **kwargs)\n        self.num_classes = num_classes\n        # get latest config of diffusion model\n        diffusion_config = natsorted(glob(os.path.join(diffusion_path, 'configs', '*-project.yaml')))[-1]\n        self.diffusion_config = OmegaConf.load(diffusion_config).model\n        self.diffusion_config.params.ckpt_path = diffusion_ckpt_path\n        self.load_diffusion()\n\n        self.monitor = monitor\n        self.numd = self.diffusion_model.first_stage_model.encoder.num_resolutions - 1\n        self.log_time_interval = self.diffusion_model.num_timesteps // log_steps\n        self.log_steps = log_steps\n\n        self.label_key = label_key if not hasattr(self.diffusion_model, 'cond_stage_key') \\\n            else self.diffusion_model.cond_stage_key\n\n        assert self.label_key is not None, 'label_key neither in diffusion model nor in model.params'\n\n        if self.label_key not in __models__:\n            raise NotImplementedError()\n\n        self.load_classifier(ckpt_path, pool)\n\n        self.scheduler_config = scheduler_config\n        self.use_scheduler = self.scheduler_config is not None\n        self.weight_decay = weight_decay\n\n    def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):\n        sd = torch.load(path, map_location=\"cpu\")\n        if \"state_dict\" in list(sd.keys()):\n            sd = sd[\"state_dict\"]\n        keys = list(sd.keys())\n        for k in keys:\n            for ik in ignore_keys:\n                if k.startswith(ik):\n                    print(\"Deleting key {} from state_dict.\".format(k))\n                    del sd[k]\n        missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(\n            sd, strict=False)\n        print(f\"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys\")\n        if len(missing) > 0:\n            print(f\"Missing Keys: {missing}\")\n        if len(unexpected) > 0:\n            print(f\"Unexpected Keys: {unexpected}\")\n\n    def load_diffusion(self):\n        model = instantiate_from_config(self.diffusion_config)\n        self.diffusion_model = model.eval()\n        self.diffusion_model.train = disabled_train\n        for param in self.diffusion_model.parameters():\n            param.requires_grad = False\n\n    def load_classifier(self, ckpt_path, pool):\n        model_config = deepcopy(self.diffusion_config.params.unet_config.params)\n        model_config.in_channels = self.diffusion_config.params.unet_config.params.out_channels\n        model_config.out_channels = self.num_classes\n        if self.label_key == 'class_label':\n            model_config.pool = pool\n\n        self.model = __models__[self.label_key](**model_config)\n        if ckpt_path is not None:\n            print('#####################################################################')\n            print(f'load from ckpt \"{ckpt_path}\"')\n            print('#####################################################################')\n            self.init_from_ckpt(ckpt_path)\n\n    @torch.no_grad()\n    def get_x_noisy(self, x, t, noise=None):\n        noise = default(noise, lambda: torch.randn_like(x))\n        continuous_sqrt_alpha_cumprod = None\n        if self.diffusion_model.use_continuous_noise:\n            continuous_sqrt_alpha_cumprod = self.diffusion_model.sample_continuous_noise_level(x.shape[0], t + 1)\n            # todo: make sure t+1 is correct here\n\n        return self.diffusion_model.q_sample(x_start=x, t=t, noise=noise,\n                                             continuous_sqrt_alpha_cumprod=continuous_sqrt_alpha_cumprod)\n\n    def forward(self, x_noisy, t, *args, **kwargs):\n        return self.model(x_noisy, t)\n\n    @torch.no_grad()\n    def get_input(self, batch, k):\n        x = batch[k]\n        if len(x.shape) == 3:\n            x = x[..., None]\n        x = rearrange(x, 'b h w c -> b c h w')\n        x = x.to(memory_format=torch.contiguous_format).float()\n        return x\n\n    @torch.no_grad()\n    def get_conditioning(self, batch, k=None):\n        if k is None:\n            k = self.label_key\n        assert k is not None, 'Needs to provide label key'\n\n        targets = batch[k].to(self.device)\n\n        if self.label_key == 'segmentation':\n            targets = rearrange(targets, 'b h w c -> b c h w')\n            for down in range(self.numd):\n                h, w = targets.shape[-2:]\n                targets = F.interpolate(targets, size=(h // 2, w // 2), mode='nearest')\n\n            # targets = rearrange(targets,'b c h w -> b h w c')\n\n        return targets\n\n    def compute_top_k(self, logits, labels, k, reduction=\"mean\"):\n        _, top_ks = torch.topk(logits, k, dim=1)\n        if reduction == \"mean\":\n            return (top_ks == labels[:, None]).float().sum(dim=-1).mean().item()\n        elif reduction == \"none\":\n            return (top_ks == labels[:, None]).float().sum(dim=-1)\n\n    def on_train_epoch_start(self):\n        # save some memory\n        self.diffusion_model.model.to('cpu')\n\n    @torch.no_grad()\n    def write_logs(self, loss, logits, targets):\n        log_prefix = 'train' if self.training else 'val'\n        log = {}\n        log[f\"{log_prefix}/loss\"] = loss.mean()\n        log[f\"{log_prefix}/acc@1\"] = self.compute_top_k(\n            logits, targets, k=1, reduction=\"mean\"\n        )\n        log[f\"{log_prefix}/acc@5\"] = self.compute_top_k(\n            logits, targets, k=5, reduction=\"mean\"\n        )\n\n        self.log_dict(log, prog_bar=False, logger=True, on_step=self.training, on_epoch=True)\n        self.log('loss', log[f\"{log_prefix}/loss\"], prog_bar=True, logger=False)\n        self.log('global_step', self.global_step, logger=False, on_epoch=False, prog_bar=True)\n        lr = self.optimizers().param_groups[0]['lr']\n        self.log('lr_abs', lr, on_step=True, logger=True, on_epoch=False, prog_bar=True)\n\n    def shared_step(self, batch, t=None):\n        x, *_ = self.diffusion_model.get_input(batch, k=self.diffusion_model.first_stage_key)\n        targets = self.get_conditioning(batch)\n        if targets.dim() == 4:\n            targets = targets.argmax(dim=1)\n        if t is None:\n            t = torch.randint(0, self.diffusion_model.num_timesteps, (x.shape[0],), device=self.device).long()\n        else:\n            t = torch.full(size=(x.shape[0],), fill_value=t, device=self.device).long()\n        x_noisy = self.get_x_noisy(x, t)\n        logits = self(x_noisy, t)\n\n        loss = F.cross_entropy(logits, targets, reduction='none')\n\n        self.write_logs(loss.detach(), logits.detach(), targets.detach())\n\n        loss = loss.mean()\n        return loss, logits, x_noisy, targets\n\n    def training_step(self, batch, batch_idx):\n        loss, *_ = self.shared_step(batch)\n        return loss\n\n    def reset_noise_accs(self):\n        self.noisy_acc = {t: {'acc@1': [], 'acc@5': []} for t in\n                          range(0, self.diffusion_model.num_timesteps, self.diffusion_model.log_every_t)}\n\n    def on_validation_start(self):\n        self.reset_noise_accs()\n\n    @torch.no_grad()\n    def validation_step(self, batch, batch_idx):\n        loss, *_ = self.shared_step(batch)\n\n        for t in self.noisy_acc:\n            _, logits, _, targets = self.shared_step(batch, t)\n            self.noisy_acc[t]['acc@1'].append(self.compute_top_k(logits, targets, k=1, reduction='mean'))\n            self.noisy_acc[t]['acc@5'].append(self.compute_top_k(logits, targets, k=5, reduction='mean'))\n\n        return loss\n\n    def configure_optimizers(self):\n        optimizer = AdamW(self.model.parameters(), lr=self.learning_rate, weight_decay=self.weight_decay)\n\n        if self.use_scheduler:\n            scheduler = instantiate_from_config(self.scheduler_config)\n\n            print(\"Setting up LambdaLR scheduler...\")\n            scheduler = [\n                {\n                    'scheduler': LambdaLR(optimizer, lr_lambda=scheduler.schedule),\n                    'interval': 'step',\n                    'frequency': 1\n                }]\n            return [optimizer], scheduler\n\n        return optimizer\n\n    @torch.no_grad()\n    def log_images(self, batch, N=8, *args, **kwargs):\n        log = dict()\n        x = self.get_input(batch, self.diffusion_model.first_stage_key)\n        log['inputs'] = x\n\n        y = self.get_conditioning(batch)\n\n        if self.label_key == 'class_label':\n            y = log_txt_as_img((x.shape[2], x.shape[3]), batch[\"human_label\"])\n            log['labels'] = y\n\n        if ismap(y):\n            log['labels'] = self.diffusion_model.to_rgb(y)\n\n            for step in range(self.log_steps):\n                current_time = step * self.log_time_interval\n\n                _, logits, x_noisy, _ = self.shared_step(batch, t=current_time)\n\n                log[f'inputs@t{current_time}'] = x_noisy\n\n                pred = F.one_hot(logits.argmax(dim=1), num_classes=self.num_classes)\n                pred = rearrange(pred, 'b h w c -> b c h w')\n\n                log[f'pred@t{current_time}'] = self.diffusion_model.to_rgb(pred)\n\n        for key in log:\n            log[key] = log[key][:N]\n\n        return log\n"
  },
  {
    "path": "ldm_exp/ldm/models/diffusion/ddim.py",
    "content": "\"\"\"SAMPLING ONLY.\"\"\"\n\nimport torch\nimport numpy as np\nfrom tqdm import tqdm\nfrom functools import partial\n\nfrom ldm.modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like\n\n\nclass DDIMSampler(object):\n    def __init__(self, model, schedule=\"linear\", **kwargs):\n        super().__init__()\n        self.model = model\n        self.ddpm_num_timesteps = model.num_timesteps\n        self.schedule = schedule\n\n    def register_buffer(self, name, attr):\n        if type(attr) == torch.Tensor:\n            if attr.device != torch.device(\"cuda\"):\n                attr = attr.to(torch.device(\"cuda\"))\n        setattr(self, name, attr)\n\n    def make_schedule(self, ddim_num_steps, ddim_discretize=\"uniform\", ddim_eta=0., verbose=True):\n        self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,\n                                                  num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)\n        alphas_cumprod = self.model.alphas_cumprod\n        assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'\n        to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)\n\n        self.register_buffer('betas', to_torch(self.model.betas))\n        self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))\n        self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev))\n\n        # calculations for diffusion q(x_t | x_{t-1}) and others\n        self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())))\n        self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu())))\n        self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu())))\n        self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu())))\n        self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1)))\n\n        # ddim sampling parameters\n        ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(),\n                                                                                   ddim_timesteps=self.ddim_timesteps,\n                                                                                   eta=ddim_eta,verbose=verbose)\n        self.register_buffer('ddim_sigmas', ddim_sigmas)\n        self.register_buffer('ddim_alphas', ddim_alphas)\n        self.register_buffer('ddim_alphas_prev', ddim_alphas_prev)\n        self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas))\n        sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt(\n            (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * (\n                        1 - self.alphas_cumprod / self.alphas_cumprod_prev))\n        self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps)\n\n    @torch.no_grad()\n    def sample(self,\n               S,\n               batch_size,\n               shape,\n               conditioning=None,\n               callback=None,\n               normals_sequence=None,\n               img_callback=None,\n               quantize_x0=False,\n               eta=0.,\n               mask=None,\n               x0=None,\n               temperature=1.,\n               noise_dropout=0.,\n               score_corrector=None,\n               corrector_kwargs=None,\n               verbose=True,\n               x_T=None,\n               log_every_t=100,\n               unconditional_guidance_scale=1.,\n               unconditional_conditioning=None,\n               # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...\n               **kwargs\n               ):\n        if conditioning is not None:\n            if isinstance(conditioning, dict):\n                cbs = conditioning[list(conditioning.keys())[0]].shape[0]\n                if cbs != batch_size:\n                    print(f\"Warning: Got {cbs} conditionings but batch-size is {batch_size}\")\n            else:\n                if conditioning.shape[0] != batch_size:\n                    print(f\"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}\")\n\n        self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose)\n        # sampling\n        C, H, W = shape\n        size = (batch_size, C, H, W)\n        print(f'Data shape for DDIM sampling is {size}, eta {eta}')\n\n        samples, intermediates = self.ddim_sampling(conditioning, size,\n                                                    callback=callback,\n                                                    img_callback=img_callback,\n                                                    quantize_denoised=quantize_x0,\n                                                    mask=mask, x0=x0,\n                                                    ddim_use_original_steps=False,\n                                                    noise_dropout=noise_dropout,\n                                                    temperature=temperature,\n                                                    score_corrector=score_corrector,\n                                                    corrector_kwargs=corrector_kwargs,\n                                                    x_T=x_T,\n                                                    log_every_t=log_every_t,\n                                                    unconditional_guidance_scale=unconditional_guidance_scale,\n                                                    unconditional_conditioning=unconditional_conditioning,\n                                                    )\n        return samples, intermediates\n\n    @torch.no_grad()\n    def ddim_sampling(self, cond, shape,\n                      x_T=None, ddim_use_original_steps=False,\n                      callback=None, timesteps=None, quantize_denoised=False,\n                      mask=None, x0=None, img_callback=None, log_every_t=100,\n                      temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,\n                      unconditional_guidance_scale=1., unconditional_conditioning=None,):\n        device = self.model.betas.device\n        b = shape[0]\n        if x_T is None:\n            img = torch.randn(shape, device=device)\n        else:\n            img = x_T\n\n        if timesteps is None:\n            timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps\n        elif timesteps is not None and not ddim_use_original_steps:\n            subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1\n            timesteps = self.ddim_timesteps[:subset_end]\n\n        intermediates = {'x_inter': [img], 'pred_x0': [img]}\n        time_range = reversed(range(0,timesteps)) if ddim_use_original_steps else np.flip(timesteps)\n        total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0]\n        print(f\"Running DDIM Sampling with {total_steps} timesteps\")\n\n        iterator = tqdm(time_range, desc='DDIM Sampler', total=total_steps)\n\n        for i, step in enumerate(iterator):\n            index = total_steps - i - 1\n            ts = torch.full((b,), step, device=device, dtype=torch.long)\n\n            if mask is not None:\n                assert x0 is not None\n                img_orig = self.model.q_sample(x0, ts)  # TODO: deterministic forward pass?\n                img = img_orig * mask + (1. - mask) * img\n\n            outs = self.p_sample_ddim(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps,\n                                      quantize_denoised=quantize_denoised, temperature=temperature,\n                                      noise_dropout=noise_dropout, score_corrector=score_corrector,\n                                      corrector_kwargs=corrector_kwargs,\n                                      unconditional_guidance_scale=unconditional_guidance_scale,\n                                      unconditional_conditioning=unconditional_conditioning)\n            img, pred_x0 = outs\n            if callback: callback(i)\n            if img_callback: img_callback(pred_x0, i)\n\n            if index % log_every_t == 0 or index == total_steps - 1:\n                intermediates['x_inter'].append(img)\n                intermediates['pred_x0'].append(pred_x0)\n\n        return img, intermediates\n\n    @torch.no_grad()\n    def p_sample_ddim(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False,\n                      temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,\n                      unconditional_guidance_scale=1., unconditional_conditioning=None):\n        b, *_, device = *x.shape, x.device\n\n        if unconditional_conditioning is None or unconditional_guidance_scale == 1.:\n            e_t = self.model.apply_model(x, t, c)\n        else:\n            x_in = torch.cat([x] * 2)\n            t_in = torch.cat([t] * 2)\n            c_in = torch.cat([unconditional_conditioning, c])\n            e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2)\n            e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond)\n\n        if score_corrector is not None:\n            assert self.model.parameterization == \"eps\"\n            e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs)\n\n        alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas\n        alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev\n        sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas\n        sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas\n        # select parameters corresponding to the currently considered timestep\n        a_t = torch.full((b, 1, 1, 1), alphas[index], device=device)\n        a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device)\n        sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device)\n        sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device)\n\n        # current prediction for x_0\n        pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt()\n        if quantize_denoised:\n            pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0)\n        # direction pointing to x_t\n        dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t\n        noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature\n        if noise_dropout > 0.:\n            noise = torch.nn.functional.dropout(noise, p=noise_dropout)\n        x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise\n        return x_prev, pred_x0\n"
  },
  {
    "path": "ldm_exp/ldm/models/diffusion/ddpm.py",
    "content": "\"\"\"\nwild mixture of\nhttps://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py\nhttps://github.com/openai/improved-diffusion/blob/e94489283bb876ac1477d5dd7709bbbd2d9902ce/improved_diffusion/gaussian_diffusion.py\nhttps://github.com/CompVis/taming-transformers\n-- merci\n\"\"\"\n\nimport torch\nimport torch.nn as nn\nimport numpy as np\nimport pytorch_lightning as pl\nfrom torch.optim.lr_scheduler import LambdaLR\nfrom einops import rearrange, repeat\nfrom contextlib import contextmanager\nfrom functools import partial\nfrom tqdm import tqdm\nfrom torchvision.utils import make_grid\nfrom pytorch_lightning.utilities.distributed import rank_zero_only\n\nfrom ldm.util import log_txt_as_img, exists, default, ismap, isimage, mean_flat, count_params, instantiate_from_config\nfrom ldm.modules.ema import LitEma\nfrom ldm.modules.distributions.distributions import normal_kl, DiagonalGaussianDistribution\nfrom ldm.models.autoencoder import VQModelInterface, IdentityFirstStage, AutoencoderKL\nfrom ldm.modules.diffusionmodules.util import make_beta_schedule, extract_into_tensor, noise_like\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\n\n__conditioning_keys__ = {'concat': 'c_concat',\n                         'crossattn': 'c_crossattn',\n                         'adm': 'y'}\n\n\ndef disabled_train(self, mode=True):\n    \"\"\"Overwrite model.train with this function to make sure train/eval mode\n    does not change anymore.\"\"\"\n    return self\n\n\ndef uniform_on_device(r1, r2, shape, device):\n    return (r1 - r2) * torch.rand(*shape, device=device) + r2\n\n\nclass DDPM(pl.LightningModule):\n    # classic DDPM with Gaussian diffusion, in image space\n    def __init__(self,\n                 unet_config,\n                 timesteps=1000,\n                 beta_schedule=\"linear\",\n                 loss_type=\"l2\",\n                 ckpt_path=None,\n                 ignore_keys=[],\n                 load_only_unet=False,\n                 monitor=\"val/loss\",\n                 use_ema=True,\n                 first_stage_key=\"image\",\n                 image_size=256,\n                 channels=3,\n                 log_every_t=100,\n                 clip_denoised=True,\n                 linear_start=1e-4,\n                 linear_end=2e-2,\n                 cosine_s=8e-3,\n                 given_betas=None,\n                 original_elbo_weight=0.,\n                 v_posterior=0.,  # weight for choosing posterior variance as sigma = (1-v) * beta_tilde + v * beta\n                 l_simple_weight=1.,\n                 conditioning_key=None,\n                 parameterization=\"eps\",  # all assuming fixed variance schedules\n                 scheduler_config=None,\n                 use_positional_encodings=False,\n                 learn_logvar=False,\n                 logvar_init=0.,\n                 ):\n        super().__init__()\n        assert parameterization in [\"eps\", \"x0\"], 'currently only supporting \"eps\" and \"x0\"'\n        self.parameterization = parameterization\n        print(f\"{self.__class__.__name__}: Running in {self.parameterization}-prediction mode\")\n        self.cond_stage_model = None\n        self.clip_denoised = clip_denoised\n        self.log_every_t = log_every_t\n        self.first_stage_key = first_stage_key\n        self.image_size = image_size  # try conv?\n        self.channels = channels\n        self.use_positional_encodings = use_positional_encodings\n        self.model = DiffusionWrapper(unet_config, conditioning_key)\n        count_params(self.model, verbose=True)\n        self.use_ema = use_ema\n        if self.use_ema:\n            self.model_ema = LitEma(self.model)\n            print(f\"Keeping EMAs of {len(list(self.model_ema.buffers()))}.\")\n\n        self.use_scheduler = scheduler_config is not None\n        if self.use_scheduler:\n            self.scheduler_config = scheduler_config\n\n        self.v_posterior = v_posterior\n        self.original_elbo_weight = original_elbo_weight\n        self.l_simple_weight = l_simple_weight\n\n        if monitor is not None:\n            self.monitor = monitor\n        if ckpt_path is not None:\n            self.init_from_ckpt(ckpt_path, ignore_keys=ignore_keys, only_model=load_only_unet)\n\n        self.register_schedule(given_betas=given_betas, beta_schedule=beta_schedule, timesteps=timesteps,\n                               linear_start=linear_start, linear_end=linear_end, cosine_s=cosine_s)\n\n        self.loss_type = loss_type\n\n        self.learn_logvar = learn_logvar\n        self.logvar = torch.full(fill_value=logvar_init, size=(self.num_timesteps,))\n        if self.learn_logvar:\n            self.logvar = nn.Parameter(self.logvar, requires_grad=True)\n\n\n    def register_schedule(self, given_betas=None, beta_schedule=\"linear\", timesteps=1000,\n                          linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):\n        if exists(given_betas):\n            betas = given_betas\n        else:\n            betas = make_beta_schedule(beta_schedule, timesteps, linear_start=linear_start, linear_end=linear_end,\n                                       cosine_s=cosine_s)\n        alphas = 1. - betas\n        alphas_cumprod = np.cumprod(alphas, axis=0)\n        alphas_cumprod_prev = np.append(1., alphas_cumprod[:-1])\n\n        timesteps, = betas.shape\n        self.num_timesteps = int(timesteps)\n        self.linear_start = linear_start\n        self.linear_end = linear_end\n        assert alphas_cumprod.shape[0] == self.num_timesteps, 'alphas have to be defined for each timestep'\n\n        to_torch = partial(torch.tensor, dtype=torch.float32)\n\n        self.register_buffer('betas', to_torch(betas))\n        self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))\n        self.register_buffer('alphas_cumprod_prev', to_torch(alphas_cumprod_prev))\n\n        # calculations for diffusion q(x_t | x_{t-1}) and others\n        self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod)))\n        self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod)))\n        self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod)))\n        self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod)))\n        self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod - 1)))\n\n        # calculations for posterior q(x_{t-1} | x_t, x_0)\n        posterior_variance = (1 - self.v_posterior) * betas * (1. - alphas_cumprod_prev) / (\n                    1. - alphas_cumprod) + self.v_posterior * betas\n        # above: equal to 1. / (1. / (1. - alpha_cumprod_tm1) + alpha_t / beta_t)\n        self.register_buffer('posterior_variance', to_torch(posterior_variance))\n        # below: log calculation clipped because the posterior variance is 0 at the beginning of the diffusion chain\n        self.register_buffer('posterior_log_variance_clipped', to_torch(np.log(np.maximum(posterior_variance, 1e-20))))\n        self.register_buffer('posterior_mean_coef1', to_torch(\n            betas * np.sqrt(alphas_cumprod_prev) / (1. - alphas_cumprod)))\n        self.register_buffer('posterior_mean_coef2', to_torch(\n            (1. - alphas_cumprod_prev) * np.sqrt(alphas) / (1. - alphas_cumprod)))\n\n        if self.parameterization == \"eps\":\n            lvlb_weights = self.betas ** 2 / (\n                        2 * self.posterior_variance * to_torch(alphas) * (1 - self.alphas_cumprod))\n        elif self.parameterization == \"x0\":\n            lvlb_weights = 0.5 * np.sqrt(torch.Tensor(alphas_cumprod)) / (2. * 1 - torch.Tensor(alphas_cumprod))\n        else:\n            raise NotImplementedError(\"mu not supported\")\n        # TODO how to choose this term\n        lvlb_weights[0] = lvlb_weights[1]\n        self.register_buffer('lvlb_weights', lvlb_weights, persistent=False)\n        assert not torch.isnan(self.lvlb_weights).all()\n\n    @contextmanager\n    def ema_scope(self, context=None):\n        if self.use_ema:\n            self.model_ema.store(self.model.parameters())\n            self.model_ema.copy_to(self.model)\n            if context is not None:\n                print(f\"{context}: Switched to EMA weights\")\n        try:\n            yield None\n        finally:\n            if self.use_ema:\n                self.model_ema.restore(self.model.parameters())\n                if context is not None:\n                    print(f\"{context}: Restored training weights\")\n\n    def init_from_ckpt(self, path, ignore_keys=list(), only_model=False):\n        sd = torch.load(path, map_location=\"cpu\")\n        if \"state_dict\" in list(sd.keys()):\n            sd = sd[\"state_dict\"]\n        keys = list(sd.keys())\n        for k in keys:\n            for ik in ignore_keys:\n                if k.startswith(ik):\n                    print(\"Deleting key {} from state_dict.\".format(k))\n                    del sd[k]\n        missing, unexpected = self.load_state_dict(sd, strict=False) if not only_model else self.model.load_state_dict(\n            sd, strict=False)\n        print(f\"Restored from {path} with {len(missing)} missing and {len(unexpected)} unexpected keys\")\n        if len(missing) > 0:\n            print(f\"Missing Keys: {missing}\")\n        if len(unexpected) > 0:\n            print(f\"Unexpected Keys: {unexpected}\")\n\n    def q_mean_variance(self, x_start, t):\n        \"\"\"\n        Get the distribution q(x_t | x_0).\n        :param x_start: the [N x C x ...] tensor of noiseless inputs.\n        :param t: the number of diffusion steps (minus 1). Here, 0 means one step.\n        :return: A tuple (mean, variance, log_variance), all of x_start's shape.\n        \"\"\"\n        mean = (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start)\n        variance = extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape)\n        log_variance = extract_into_tensor(self.log_one_minus_alphas_cumprod, t, x_start.shape)\n        return mean, variance, log_variance\n\n    def predict_start_from_noise(self, x_t, t, noise):\n        return (\n                extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t -\n                extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * noise\n        )\n\n    def q_posterior(self, x_start, x_t, t):\n        posterior_mean = (\n                extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start +\n                extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t\n        )\n        posterior_variance = extract_into_tensor(self.posterior_variance, t, x_t.shape)\n        posterior_log_variance_clipped = extract_into_tensor(self.posterior_log_variance_clipped, t, x_t.shape)\n        return posterior_mean, posterior_variance, posterior_log_variance_clipped\n\n    def p_mean_variance(self, x, t, clip_denoised: bool):\n        model_out = self.model(x, t)\n        if self.parameterization == \"eps\":\n            x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)\n        elif self.parameterization == \"x0\":\n            x_recon = model_out\n        if clip_denoised:\n            x_recon.clamp_(-1., 1.)\n\n        model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)\n        return model_mean, posterior_variance, posterior_log_variance\n\n    @torch.no_grad()\n    def p_sample(self, x, t, clip_denoised=True, repeat_noise=False):\n        b, *_, device = *x.shape, x.device\n        model_mean, _, model_log_variance = self.p_mean_variance(x=x, t=t, clip_denoised=clip_denoised)\n        noise = noise_like(x.shape, device, repeat_noise)\n        # no noise when t == 0\n        nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))\n        return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise\n\n    @torch.no_grad()\n    def p_sample_loop(self, shape, return_intermediates=False):\n        device = self.betas.device\n        b = shape[0]\n        img = torch.randn(shape, device=device)\n        intermediates = [img]\n        for i in tqdm(reversed(range(0, self.num_timesteps)), desc='Sampling t', total=self.num_timesteps):\n            img = self.p_sample(img, torch.full((b,), i, device=device, dtype=torch.long),\n                                clip_denoised=self.clip_denoised)\n            if i % self.log_every_t == 0 or i == self.num_timesteps - 1:\n                intermediates.append(img)\n        if return_intermediates:\n            return img, intermediates\n        return img\n\n    @torch.no_grad()\n    def sample(self, batch_size=16, return_intermediates=False):\n        image_size = self.image_size\n        channels = self.channels\n        return self.p_sample_loop((batch_size, channels, image_size, image_size),\n                                  return_intermediates=return_intermediates)\n\n    def q_sample(self, x_start, t, noise=None):\n        noise = default(noise, lambda: torch.randn_like(x_start))\n        return (extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start +\n                extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) * noise)\n\n    def get_loss(self, pred, target, mean=True):\n        if self.loss_type == 'l1':\n            loss = (target - pred).abs()\n            if mean:\n                loss = loss.mean()\n        elif self.loss_type == 'l2':\n            if mean:\n                loss = torch.nn.functional.mse_loss(target, pred)\n            else:\n                loss = torch.nn.functional.mse_loss(target, pred, reduction='none')\n        else:\n            raise NotImplementedError(\"unknown loss type '{loss_type}'\")\n\n        return loss\n\n    def p_losses(self, x_start, t, noise=None):\n        noise = default(noise, lambda: torch.randn_like(x_start))\n        x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)\n        model_out = self.model(x_noisy, t)\n\n        loss_dict = {}\n        if self.parameterization == \"eps\":\n            target = noise\n        elif self.parameterization == \"x0\":\n            target = x_start\n        else:\n            raise NotImplementedError(f\"Paramterization {self.parameterization} not yet supported\")\n\n        loss = self.get_loss(model_out, target, mean=False).mean(dim=[1, 2, 3])\n\n        log_prefix = 'train' if self.training else 'val'\n\n        loss_dict.update({f'{log_prefix}/loss_simple': loss.mean()})\n        loss_simple = loss.mean() * self.l_simple_weight\n\n        loss_vlb = (self.lvlb_weights[t] * loss).mean()\n        loss_dict.update({f'{log_prefix}/loss_vlb': loss_vlb})\n\n        loss = loss_simple + self.original_elbo_weight * loss_vlb\n\n        loss_dict.update({f'{log_prefix}/loss': loss})\n\n        return loss, loss_dict\n\n    def forward(self, x, *args, **kwargs):\n        # b, c, h, w, device, img_size, = *x.shape, x.device, self.image_size\n        # assert h == img_size and w == img_size, f'height and width of image must be {img_size}'\n        t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()\n        return self.p_losses(x, t, *args, **kwargs)\n\n    def get_input(self, batch, k):\n        x = batch[k]\n        if len(x.shape) == 3:\n            x = x[..., None]\n        x = rearrange(x, 'b h w c -> b c h w')\n        x = x.to(memory_format=torch.contiguous_format).float()\n        return x\n\n    def shared_step(self, batch):\n        x = self.get_input(batch, self.first_stage_key)\n        loss, loss_dict = self(x)\n        return loss, loss_dict\n\n    def training_step(self, batch, batch_idx):\n        loss, loss_dict = self.shared_step(batch)\n\n        self.log_dict(loss_dict, prog_bar=True,\n                      logger=True, on_step=True, on_epoch=True)\n\n        self.log(\"global_step\", self.global_step,\n                 prog_bar=True, logger=True, on_step=True, on_epoch=False)\n\n        if self.use_scheduler:\n            lr = self.optimizers().param_groups[0]['lr']\n            self.log('lr_abs', lr, prog_bar=True, logger=True, on_step=True, on_epoch=False)\n\n        return loss\n\n    @torch.no_grad()\n    def validation_step(self, batch, batch_idx):\n        _, loss_dict_no_ema = self.shared_step(batch)\n        with self.ema_scope():\n            _, loss_dict_ema = self.shared_step(batch)\n            loss_dict_ema = {key + '_ema': loss_dict_ema[key] for key in loss_dict_ema}\n        self.log_dict(loss_dict_no_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)\n        self.log_dict(loss_dict_ema, prog_bar=False, logger=True, on_step=False, on_epoch=True)\n\n    def on_train_batch_end(self, *args, **kwargs):\n        if self.use_ema:\n            self.model_ema(self.model)\n\n    def _get_rows_from_list(self, samples):\n        n_imgs_per_row = len(samples)\n        denoise_grid = rearrange(samples, 'n b c h w -> b n c h w')\n        denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')\n        denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)\n        return denoise_grid\n\n    @torch.no_grad()\n    def log_images(self, batch, N=8, n_row=2, sample=True, return_keys=None, **kwargs):\n        log = dict()\n        x = self.get_input(batch, self.first_stage_key)\n        N = min(x.shape[0], N)\n        n_row = min(x.shape[0], n_row)\n        x = x.to(self.device)[:N]\n        log[\"inputs\"] = x\n\n        # get diffusion row\n        diffusion_row = list()\n        x_start = x[:n_row]\n\n        for t in range(self.num_timesteps):\n            if t % self.log_every_t == 0 or t == self.num_timesteps - 1:\n                t = repeat(torch.tensor([t]), '1 -> b', b=n_row)\n                t = t.to(self.device).long()\n                noise = torch.randn_like(x_start)\n                x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)\n                diffusion_row.append(x_noisy)\n\n        log[\"diffusion_row\"] = self._get_rows_from_list(diffusion_row)\n\n        if sample:\n            # get denoise row\n            with self.ema_scope(\"Plotting\"):\n                samples, denoise_row = self.sample(batch_size=N, return_intermediates=True)\n\n            log[\"samples\"] = samples\n            log[\"denoise_row\"] = self._get_rows_from_list(denoise_row)\n\n        if return_keys:\n            if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:\n                return log\n            else:\n                return {key: log[key] for key in return_keys}\n        return log\n\n    def configure_optimizers(self):\n        lr = self.learning_rate\n        params = list(self.model.parameters())\n        if self.learn_logvar:\n            params = params + [self.logvar]\n        opt = torch.optim.AdamW(params, lr=lr)\n        return opt\n\n\nclass LatentDiffusion(DDPM):\n    \"\"\"main class\"\"\"\n    def __init__(self,\n                 first_stage_config,\n                 cond_stage_config,\n                 num_timesteps_cond=None,\n                 cond_stage_key=\"image\",\n                 cond_stage_trainable=False,\n                 concat_mode=True,\n                 cond_stage_forward=None,\n                 conditioning_key=None,\n                 scale_factor=1.0,\n                 scale_by_std=False,\n                 *args, **kwargs):\n        self.num_timesteps_cond = default(num_timesteps_cond, 1)\n        self.scale_by_std = scale_by_std\n        assert self.num_timesteps_cond <= kwargs['timesteps']\n        # for backwards compatibility after implementation of DiffusionWrapper\n        if conditioning_key is None:\n            conditioning_key = 'concat' if concat_mode else 'crossattn'\n        if cond_stage_config == '__is_unconditional__':\n            conditioning_key = None\n        ckpt_path = kwargs.pop(\"ckpt_path\", None)\n        ignore_keys = kwargs.pop(\"ignore_keys\", [])\n        super().__init__(conditioning_key=conditioning_key, *args, **kwargs)\n        self.concat_mode = concat_mode\n        self.cond_stage_trainable = cond_stage_trainable\n        self.cond_stage_key = cond_stage_key\n        try:\n            self.num_downs = len(first_stage_config.params.ddconfig.ch_mult) - 1\n        except:\n            self.num_downs = 0\n        if not scale_by_std:\n            self.scale_factor = scale_factor\n        else:\n            self.register_buffer('scale_factor', torch.tensor(scale_factor))\n        self.instantiate_first_stage(first_stage_config)\n        self.instantiate_cond_stage(cond_stage_config)\n        self.cond_stage_forward = cond_stage_forward\n        self.clip_denoised = False\n        self.bbox_tokenizer = None  \n\n        self.restarted_from_ckpt = False\n        if ckpt_path is not None:\n            self.init_from_ckpt(ckpt_path, ignore_keys)\n            self.restarted_from_ckpt = True\n\n    def make_cond_schedule(self, ):\n        self.cond_ids = torch.full(size=(self.num_timesteps,), fill_value=self.num_timesteps - 1, dtype=torch.long)\n        ids = torch.round(torch.linspace(0, self.num_timesteps - 1, self.num_timesteps_cond)).long()\n        self.cond_ids[:self.num_timesteps_cond] = ids\n\n    @rank_zero_only\n    @torch.no_grad()\n    def on_train_batch_start(self, batch, batch_idx, dataloader_idx):\n        # only for very first batch\n        if self.scale_by_std and self.current_epoch == 0 and self.global_step == 0 and batch_idx == 0 and not self.restarted_from_ckpt:\n            assert self.scale_factor == 1., 'rather not use custom rescaling and std-rescaling simultaneously'\n            # set rescale weight to 1./std of encodings\n            print(\"### USING STD-RESCALING ###\")\n            x = super().get_input(batch, self.first_stage_key)\n            x = x.to(self.device)\n            encoder_posterior = self.encode_first_stage(x)\n            z = self.get_first_stage_encoding(encoder_posterior).detach()\n            del self.scale_factor\n            self.register_buffer('scale_factor', 1. / z.flatten().std())\n            print(f\"setting self.scale_factor to {self.scale_factor}\")\n            print(\"### USING STD-RESCALING ###\")\n\n    def register_schedule(self,\n                          given_betas=None, beta_schedule=\"linear\", timesteps=1000,\n                          linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):\n        super().register_schedule(given_betas, beta_schedule, timesteps, linear_start, linear_end, cosine_s)\n\n        self.shorten_cond_schedule = self.num_timesteps_cond > 1\n        if self.shorten_cond_schedule:\n            self.make_cond_schedule()\n\n    def instantiate_first_stage(self, config):\n        model = instantiate_from_config(config)\n        self.first_stage_model = model.eval()\n        self.first_stage_model.train = disabled_train\n        for param in self.first_stage_model.parameters():\n            param.requires_grad = False\n\n    def instantiate_cond_stage(self, config):\n        if not self.cond_stage_trainable:\n            if config == \"__is_first_stage__\":\n                print(\"Using first stage also as cond stage.\")\n                self.cond_stage_model = self.first_stage_model\n            elif config == \"__is_unconditional__\":\n                print(f\"Training {self.__class__.__name__} as an unconditional model.\")\n                self.cond_stage_model = None\n                # self.be_unconditional = True\n            else:\n                model = instantiate_from_config(config)\n                self.cond_stage_model = model.eval()\n                self.cond_stage_model.train = disabled_train\n                for param in self.cond_stage_model.parameters():\n                    param.requires_grad = False\n        else:\n            assert config != '__is_first_stage__'\n            assert config != '__is_unconditional__'\n            model = instantiate_from_config(config)\n            self.cond_stage_model = model\n\n    def _get_denoise_row_from_list(self, samples, desc='', force_no_decoder_quantization=False):\n        denoise_row = []\n        for zd in tqdm(samples, desc=desc):\n            denoise_row.append(self.decode_first_stage(zd.to(self.device),\n                                                            force_not_quantize=force_no_decoder_quantization))\n        n_imgs_per_row = len(denoise_row)\n        denoise_row = torch.stack(denoise_row)  # n_log_step, n_row, C, H, W\n        denoise_grid = rearrange(denoise_row, 'n b c h w -> b n c h w')\n        denoise_grid = rearrange(denoise_grid, 'b n c h w -> (b n) c h w')\n        denoise_grid = make_grid(denoise_grid, nrow=n_imgs_per_row)\n        return denoise_grid\n\n    def get_first_stage_encoding(self, encoder_posterior):\n        if isinstance(encoder_posterior, DiagonalGaussianDistribution):\n            z = encoder_posterior.sample()\n        elif isinstance(encoder_posterior, torch.Tensor):\n            z = encoder_posterior\n        else:\n            raise NotImplementedError(f\"encoder_posterior of type '{type(encoder_posterior)}' not yet implemented\")\n        return self.scale_factor * z\n\n    def get_learned_conditioning(self, c):\n        if self.cond_stage_forward is None:\n            if hasattr(self.cond_stage_model, 'encode') and callable(self.cond_stage_model.encode):\n                c = self.cond_stage_model.encode(c)\n                if isinstance(c, DiagonalGaussianDistribution):\n                    c = c.mode()\n            else:\n                c = self.cond_stage_model(c)\n        else:\n            assert hasattr(self.cond_stage_model, self.cond_stage_forward)\n            c = getattr(self.cond_stage_model, self.cond_stage_forward)(c)\n        return c\n\n    def meshgrid(self, h, w):\n        y = torch.arange(0, h).view(h, 1, 1).repeat(1, w, 1)\n        x = torch.arange(0, w).view(1, w, 1).repeat(h, 1, 1)\n\n        arr = torch.cat([y, x], dim=-1)\n        return arr\n\n    def delta_border(self, h, w):\n        \"\"\"\n        :param h: height\n        :param w: width\n        :return: normalized distance to image border,\n         wtith min distance = 0 at border and max dist = 0.5 at image center\n        \"\"\"\n        lower_right_corner = torch.tensor([h - 1, w - 1]).view(1, 1, 2)\n        arr = self.meshgrid(h, w) / lower_right_corner\n        dist_left_up = torch.min(arr, dim=-1, keepdims=True)[0]\n        dist_right_down = torch.min(1 - arr, dim=-1, keepdims=True)[0]\n        edge_dist = torch.min(torch.cat([dist_left_up, dist_right_down], dim=-1), dim=-1)[0]\n        return edge_dist\n\n    def get_weighting(self, h, w, Ly, Lx, device):\n        weighting = self.delta_border(h, w)\n        weighting = torch.clip(weighting, self.split_input_params[\"clip_min_weight\"],\n                               self.split_input_params[\"clip_max_weight\"], )\n        weighting = weighting.view(1, h * w, 1).repeat(1, 1, Ly * Lx).to(device)\n\n        if self.split_input_params[\"tie_braker\"]:\n            L_weighting = self.delta_border(Ly, Lx)\n            L_weighting = torch.clip(L_weighting,\n                                     self.split_input_params[\"clip_min_tie_weight\"],\n                                     self.split_input_params[\"clip_max_tie_weight\"])\n\n            L_weighting = L_weighting.view(1, 1, Ly * Lx).to(device)\n            weighting = weighting * L_weighting\n        return weighting\n\n    def get_fold_unfold(self, x, kernel_size, stride, uf=1, df=1):  # todo load once not every time, shorten code\n        \"\"\"\n        :param x: img of size (bs, c, h, w)\n        :return: n img crops of size (n, bs, c, kernel_size[0], kernel_size[1])\n        \"\"\"\n        bs, nc, h, w = x.shape\n\n        # number of crops in image\n        Ly = (h - kernel_size[0]) // stride[0] + 1\n        Lx = (w - kernel_size[1]) // stride[1] + 1\n\n        if uf == 1 and df == 1:\n            fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)\n            unfold = torch.nn.Unfold(**fold_params)\n\n            fold = torch.nn.Fold(output_size=x.shape[2:], **fold_params)\n\n            weighting = self.get_weighting(kernel_size[0], kernel_size[1], Ly, Lx, x.device).to(x.dtype)\n            normalization = fold(weighting).view(1, 1, h, w)  # normalizes the overlap\n            weighting = weighting.view((1, 1, kernel_size[0], kernel_size[1], Ly * Lx))\n\n        elif uf > 1 and df == 1:\n            fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)\n            unfold = torch.nn.Unfold(**fold_params)\n\n            fold_params2 = dict(kernel_size=(kernel_size[0] * uf, kernel_size[0] * uf),\n                                dilation=1, padding=0,\n                                stride=(stride[0] * uf, stride[1] * uf))\n            fold = torch.nn.Fold(output_size=(x.shape[2] * uf, x.shape[3] * uf), **fold_params2)\n\n            weighting = self.get_weighting(kernel_size[0] * uf, kernel_size[1] * uf, Ly, Lx, x.device).to(x.dtype)\n            normalization = fold(weighting).view(1, 1, h * uf, w * uf)  # normalizes the overlap\n            weighting = weighting.view((1, 1, kernel_size[0] * uf, kernel_size[1] * uf, Ly * Lx))\n\n        elif df > 1 and uf == 1:\n            fold_params = dict(kernel_size=kernel_size, dilation=1, padding=0, stride=stride)\n            unfold = torch.nn.Unfold(**fold_params)\n\n            fold_params2 = dict(kernel_size=(kernel_size[0] // df, kernel_size[0] // df),\n                                dilation=1, padding=0,\n                                stride=(stride[0] // df, stride[1] // df))\n            fold = torch.nn.Fold(output_size=(x.shape[2] // df, x.shape[3] // df), **fold_params2)\n\n            weighting = self.get_weighting(kernel_size[0] // df, kernel_size[1] // df, Ly, Lx, x.device).to(x.dtype)\n            normalization = fold(weighting).view(1, 1, h // df, w // df)  # normalizes the overlap\n            weighting = weighting.view((1, 1, kernel_size[0] // df, kernel_size[1] // df, Ly * Lx))\n\n        else:\n            raise NotImplementedError\n\n        return fold, unfold, normalization, weighting\n\n    @torch.no_grad()\n    def get_input(self, batch, k, return_first_stage_outputs=False, force_c_encode=False,\n                  cond_key=None, return_original_cond=False, bs=None):\n        x = super().get_input(batch, k)\n        if bs is not None:\n            x = x[:bs]\n        x = x.to(self.device)\n        encoder_posterior = self.encode_first_stage(x)\n        z = self.get_first_stage_encoding(encoder_posterior).detach()\n\n        if self.model.conditioning_key is not None:\n            if cond_key is None:\n                cond_key = self.cond_stage_key\n            if cond_key != self.first_stage_key:\n                if cond_key in ['caption', 'coordinates_bbox']:\n                    xc = batch[cond_key]\n                elif cond_key == 'class_label':\n                    xc = batch\n                else:\n                    xc = super().get_input(batch, cond_key).to(self.device)\n            else:\n                xc = x\n            if not self.cond_stage_trainable or force_c_encode:\n                if isinstance(xc, dict) or isinstance(xc, list):\n                    # import pudb; pudb.set_trace()\n                    c = self.get_learned_conditioning(xc)\n                else:\n                    c = self.get_learned_conditioning(xc.to(self.device))\n            else:\n                c = xc\n            if bs is not None:\n                c = c[:bs]\n\n            if self.use_positional_encodings:\n                pos_x, pos_y = self.compute_latent_shifts(batch)\n                ckey = __conditioning_keys__[self.model.conditioning_key]\n                c = {ckey: c, 'pos_x': pos_x, 'pos_y': pos_y}\n\n        else:\n            c = None\n            xc = None\n            if self.use_positional_encodings:\n                pos_x, pos_y = self.compute_latent_shifts(batch)\n                c = {'pos_x': pos_x, 'pos_y': pos_y}\n        out = [z, c]\n        if return_first_stage_outputs:\n            xrec = self.decode_first_stage(z)\n            out.extend([x, xrec])\n        if return_original_cond:\n            out.append(xc)\n        return out\n\n    @torch.no_grad()\n    def decode_first_stage(self, z, predict_cids=False, force_not_quantize=False):\n        if predict_cids:\n            if z.dim() == 4:\n                z = torch.argmax(z.exp(), dim=1).long()\n            z = self.first_stage_model.quantize.get_codebook_entry(z, shape=None)\n            z = rearrange(z, 'b h w c -> b c h w').contiguous()\n\n        z = 1. / self.scale_factor * z\n\n        if hasattr(self, \"split_input_params\"):\n            if self.split_input_params[\"patch_distributed_vq\"]:\n                ks = self.split_input_params[\"ks\"]  # eg. (128, 128)\n                stride = self.split_input_params[\"stride\"]  # eg. (64, 64)\n                uf = self.split_input_params[\"vqf\"]\n                bs, nc, h, w = z.shape\n                if ks[0] > h or ks[1] > w:\n                    ks = (min(ks[0], h), min(ks[1], w))\n                    print(\"reducing Kernel\")\n\n                if stride[0] > h or stride[1] > w:\n                    stride = (min(stride[0], h), min(stride[1], w))\n                    print(\"reducing stride\")\n\n                fold, unfold, normalization, weighting = self.get_fold_unfold(z, ks, stride, uf=uf)\n\n                z = unfold(z)  # (bn, nc * prod(**ks), L)\n                # 1. Reshape to img shape\n                z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1]))  # (bn, nc, ks[0], ks[1], L )\n\n                # 2. apply model loop over last dim\n                if isinstance(self.first_stage_model, VQModelInterface):\n                    output_list = [self.first_stage_model.decode(z[:, :, :, :, i],\n                                                                 force_not_quantize=predict_cids or force_not_quantize)\n                                   for i in range(z.shape[-1])]\n                else:\n\n                    output_list = [self.first_stage_model.decode(z[:, :, :, :, i])\n                                   for i in range(z.shape[-1])]\n\n                o = torch.stack(output_list, axis=-1)  # # (bn, nc, ks[0], ks[1], L)\n                o = o * weighting\n                # Reverse 1. reshape to img shape\n                o = o.view((o.shape[0], -1, o.shape[-1]))  # (bn, nc * ks[0] * ks[1], L)\n                # stitch crops together\n                decoded = fold(o)\n                decoded = decoded / normalization  # norm is shape (1, 1, h, w)\n                return decoded\n            else:\n                if isinstance(self.first_stage_model, VQModelInterface):\n                    return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize)\n                else:\n                    return self.first_stage_model.decode(z)\n\n        else:\n            if isinstance(self.first_stage_model, VQModelInterface):\n                return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize)\n            else:\n                return self.first_stage_model.decode(z)\n\n    # same as above but without decorator\n    def differentiable_decode_first_stage(self, z, predict_cids=False, force_not_quantize=False):\n        if predict_cids:\n            if z.dim() == 4:\n                z = torch.argmax(z.exp(), dim=1).long()\n            z = self.first_stage_model.quantize.get_codebook_entry(z, shape=None)\n            z = rearrange(z, 'b h w c -> b c h w').contiguous()\n\n        z = 1. / self.scale_factor * z\n\n        if hasattr(self, \"split_input_params\"):\n            if self.split_input_params[\"patch_distributed_vq\"]:\n                ks = self.split_input_params[\"ks\"]  # eg. (128, 128)\n                stride = self.split_input_params[\"stride\"]  # eg. (64, 64)\n                uf = self.split_input_params[\"vqf\"]\n                bs, nc, h, w = z.shape\n                if ks[0] > h or ks[1] > w:\n                    ks = (min(ks[0], h), min(ks[1], w))\n                    print(\"reducing Kernel\")\n\n                if stride[0] > h or stride[1] > w:\n                    stride = (min(stride[0], h), min(stride[1], w))\n                    print(\"reducing stride\")\n\n                fold, unfold, normalization, weighting = self.get_fold_unfold(z, ks, stride, uf=uf)\n\n                z = unfold(z)  # (bn, nc * prod(**ks), L)\n                # 1. Reshape to img shape\n                z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1]))  # (bn, nc, ks[0], ks[1], L )\n\n                # 2. apply model loop over last dim\n                if isinstance(self.first_stage_model, VQModelInterface):  \n                    output_list = [self.first_stage_model.decode(z[:, :, :, :, i],\n                                                                 force_not_quantize=predict_cids or force_not_quantize)\n                                   for i in range(z.shape[-1])]\n                else:\n\n                    output_list = [self.first_stage_model.decode(z[:, :, :, :, i])\n                                   for i in range(z.shape[-1])]\n\n                o = torch.stack(output_list, axis=-1)  # # (bn, nc, ks[0], ks[1], L)\n                o = o * weighting\n                # Reverse 1. reshape to img shape\n                o = o.view((o.shape[0], -1, o.shape[-1]))  # (bn, nc * ks[0] * ks[1], L)\n                # stitch crops together\n                decoded = fold(o)\n                decoded = decoded / normalization  # norm is shape (1, 1, h, w)\n                return decoded\n            else:\n                if isinstance(self.first_stage_model, VQModelInterface):\n                    return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize)\n                else:\n                    return self.first_stage_model.decode(z)\n\n        else:\n            if isinstance(self.first_stage_model, VQModelInterface):\n                return self.first_stage_model.decode(z, force_not_quantize=predict_cids or force_not_quantize)\n            else:\n                return self.first_stage_model.decode(z)\n\n    @torch.no_grad()\n    def encode_first_stage(self, x):\n        if hasattr(self, \"split_input_params\"):\n            if self.split_input_params[\"patch_distributed_vq\"]:\n                ks = self.split_input_params[\"ks\"]  # eg. (128, 128)\n                stride = self.split_input_params[\"stride\"]  # eg. (64, 64)\n                df = self.split_input_params[\"vqf\"]\n                self.split_input_params['original_image_size'] = x.shape[-2:]\n                bs, nc, h, w = x.shape\n                if ks[0] > h or ks[1] > w:\n                    ks = (min(ks[0], h), min(ks[1], w))\n                    print(\"reducing Kernel\")\n\n                if stride[0] > h or stride[1] > w:\n                    stride = (min(stride[0], h), min(stride[1], w))\n                    print(\"reducing stride\")\n\n                fold, unfold, normalization, weighting = self.get_fold_unfold(x, ks, stride, df=df)\n                z = unfold(x)  # (bn, nc * prod(**ks), L)\n                # Reshape to img shape\n                z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1]))  # (bn, nc, ks[0], ks[1], L )\n\n                output_list = [self.first_stage_model.encode(z[:, :, :, :, i])\n                               for i in range(z.shape[-1])]\n\n                o = torch.stack(output_list, axis=-1)\n                o = o * weighting\n\n                # Reverse reshape to img shape\n                o = o.view((o.shape[0], -1, o.shape[-1]))  # (bn, nc * ks[0] * ks[1], L)\n                # stitch crops together\n                decoded = fold(o)\n                decoded = decoded / normalization\n                return decoded\n\n            else:\n                return self.first_stage_model.encode(x)\n        else:\n            return self.first_stage_model.encode(x)\n\n    def shared_step(self, batch, **kwargs):\n        x, c = self.get_input(batch, self.first_stage_key)\n        loss = self(x, c)\n        return loss\n\n    def forward(self, x, c, *args, **kwargs):\n        t = torch.randint(0, self.num_timesteps, (x.shape[0],), device=self.device).long()\n        if self.model.conditioning_key is not None:\n            assert c is not None\n            if self.cond_stage_trainable:\n                c = self.get_learned_conditioning(c)\n            if self.shorten_cond_schedule:  # TODO: drop this option\n                tc = self.cond_ids[t].to(self.device)\n                c = self.q_sample(x_start=c, t=tc, noise=torch.randn_like(c.float()))\n        return self.p_losses(x, c, t, *args, **kwargs)\n    \n    def get_loss_at_t(self, x, c, t, *args, **kwargs):\n        if self.model.conditioning_key is not None:\n            assert c is not None\n            if self.cond_stage_trainable:\n                c = self.get_learned_conditioning(c)\n            if self.shorten_cond_schedule:  # TODO: drop this option\n                tc = self.cond_ids[t].to(self.device)\n                c = self.q_sample(x_start=c, t=tc, noise=torch.randn_like(c.float()))\n        return self.p_losses(x, c, t, *args, **kwargs)\n\n    def _rescale_annotations(self, bboxes, crop_coordinates):  # TODO: move to dataset\n        def rescale_bbox(bbox):\n            x0 = clamp((bbox[0] - crop_coordinates[0]) / crop_coordinates[2])\n            y0 = clamp((bbox[1] - crop_coordinates[1]) / crop_coordinates[3])\n            w = min(bbox[2] / crop_coordinates[2], 1 - x0)\n            h = min(bbox[3] / crop_coordinates[3], 1 - y0)\n            return x0, y0, w, h\n\n        return [rescale_bbox(b) for b in bboxes]\n\n    def apply_model(self, x_noisy, t, cond, return_ids=False):\n\n        if isinstance(cond, dict):\n            # hybrid case, cond is exptected to be a dict\n            pass\n        else:\n            if not isinstance(cond, list):\n                cond = [cond]\n            key = 'c_concat' if self.model.conditioning_key == 'concat' else 'c_crossattn'\n            cond = {key: cond}\n\n        if hasattr(self, \"split_input_params\"):\n            assert len(cond) == 1  # todo can only deal with one conditioning atm\n            assert not return_ids  \n            ks = self.split_input_params[\"ks\"]  # eg. (128, 128)\n            stride = self.split_input_params[\"stride\"]  # eg. (64, 64)\n\n            h, w = x_noisy.shape[-2:]\n\n            fold, unfold, normalization, weighting = self.get_fold_unfold(x_noisy, ks, stride)\n\n            z = unfold(x_noisy)  # (bn, nc * prod(**ks), L)\n            # Reshape to img shape\n            z = z.view((z.shape[0], -1, ks[0], ks[1], z.shape[-1]))  # (bn, nc, ks[0], ks[1], L )\n            z_list = [z[:, :, :, :, i] for i in range(z.shape[-1])]\n\n            if self.cond_stage_key in [\"image\", \"LR_image\", \"segmentation\",\n                                       'bbox_img'] and self.model.conditioning_key:  # todo check for completeness\n                c_key = next(iter(cond.keys()))  # get key\n                c = next(iter(cond.values()))  # get value\n                assert (len(c) == 1)  # todo extend to list with more than one elem\n                c = c[0]  # get element\n\n                c = unfold(c)\n                c = c.view((c.shape[0], -1, ks[0], ks[1], c.shape[-1]))  # (bn, nc, ks[0], ks[1], L )\n\n                cond_list = [{c_key: [c[:, :, :, :, i]]} for i in range(c.shape[-1])]\n\n            elif self.cond_stage_key == 'coordinates_bbox':\n                assert 'original_image_size' in self.split_input_params, 'BoudingBoxRescaling is missing original_image_size'\n\n                # assuming padding of unfold is always 0 and its dilation is always 1\n                n_patches_per_row = int((w - ks[0]) / stride[0] + 1)\n                full_img_h, full_img_w = self.split_input_params['original_image_size']\n                # as we are operating on latents, we need the factor from the original image size to the\n                # spatial latent size to properly rescale the crops for regenerating the bbox annotations\n                num_downs = self.first_stage_model.encoder.num_resolutions - 1\n                rescale_latent = 2 ** (num_downs)\n\n                # get top left postions of patches as conforming for the bbbox tokenizer, therefore we\n                # need to rescale the tl patch coordinates to be in between (0,1)\n                tl_patch_coordinates = [(rescale_latent * stride[0] * (patch_nr % n_patches_per_row) / full_img_w,\n                                         rescale_latent * stride[1] * (patch_nr // n_patches_per_row) / full_img_h)\n                                        for patch_nr in range(z.shape[-1])]\n\n                # patch_limits are tl_coord, width and height coordinates as (x_tl, y_tl, h, w)\n                patch_limits = [(x_tl, y_tl,\n                                 rescale_latent * ks[0] / full_img_w,\n                                 rescale_latent * ks[1] / full_img_h) for x_tl, y_tl in tl_patch_coordinates]\n                # patch_values = [(np.arange(x_tl,min(x_tl+ks, 1.)),np.arange(y_tl,min(y_tl+ks, 1.))) for x_tl, y_tl in tl_patch_coordinates]\n\n                # tokenize crop coordinates for the bounding boxes of the respective patches\n                patch_limits_tknzd = [torch.LongTensor(self.bbox_tokenizer._crop_encoder(bbox))[None].to(self.device)\n                                      for bbox in patch_limits]  # list of length l with tensors of shape (1, 2)\n                print(patch_limits_tknzd[0].shape)\n                # cut tknzd crop position from conditioning\n                assert isinstance(cond, dict), 'cond must be dict to be fed into model'\n                cut_cond = cond['c_crossattn'][0][..., :-2].to(self.device)\n                print(cut_cond.shape)\n\n                adapted_cond = torch.stack([torch.cat([cut_cond, p], dim=1) for p in patch_limits_tknzd])\n                adapted_cond = rearrange(adapted_cond, 'l b n -> (l b) n')\n                print(adapted_cond.shape)\n                adapted_cond = self.get_learned_conditioning(adapted_cond)\n                print(adapted_cond.shape)\n                adapted_cond = rearrange(adapted_cond, '(l b) n d -> l b n d', l=z.shape[-1])\n                print(adapted_cond.shape)\n\n                cond_list = [{'c_crossattn': [e]} for e in adapted_cond]\n\n            else:\n                cond_list = [cond for i in range(z.shape[-1])]  # Todo make this more efficient\n\n            # apply model by loop over crops\n            output_list = [self.model(z_list[i], t, **cond_list[i]) for i in range(z.shape[-1])]\n            assert not isinstance(output_list[0],\n                                  tuple)  # todo cant deal with multiple model outputs check this never happens\n\n            o = torch.stack(output_list, axis=-1)\n            o = o * weighting\n            # Reverse reshape to img shape\n            o = o.view((o.shape[0], -1, o.shape[-1]))  # (bn, nc * ks[0] * ks[1], L)\n            # stitch crops together\n            x_recon = fold(o) / normalization\n\n        else:\n            x_recon = self.model(x_noisy, t, **cond)\n\n        if isinstance(x_recon, tuple) and not return_ids:\n            return x_recon[0]\n        else:\n            return x_recon\n\n    def _predict_eps_from_xstart(self, x_t, t, pred_xstart):\n        return (extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t - pred_xstart) / \\\n               extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape)\n\n    def _prior_bpd(self, x_start):\n        \"\"\"\n        Get the prior KL term for the variational lower-bound, measured in\n        bits-per-dim.\n        This term can't be optimized, as it only depends on the encoder.\n        :param x_start: the [N x C x ...] tensor of inputs.\n        :return: a batch of [N] KL values (in bits), one per batch element.\n        \"\"\"\n        batch_size = x_start.shape[0]\n        t = torch.tensor([self.num_timesteps - 1] * batch_size, device=x_start.device)\n        qt_mean, _, qt_log_variance = self.q_mean_variance(x_start, t)\n        kl_prior = normal_kl(mean1=qt_mean, logvar1=qt_log_variance, mean2=0.0, logvar2=0.0)\n        return mean_flat(kl_prior) / np.log(2.0)\n\n    def p_losses(self, x_start, cond, t, noise=None):\n        noise = default(noise, lambda: torch.randn_like(x_start))\n        x_noisy = self.q_sample(x_start=x_start, t=t, noise=noise)\n        model_output = self.apply_model(x_noisy, t, cond)\n\n        loss_dict = {}\n        prefix = 'train' if self.training else 'val'\n\n        if self.parameterization == \"x0\":\n            target = x_start\n        elif self.parameterization == \"eps\":\n            target = noise\n        else:\n            raise NotImplementedError()\n\n        loss_simple = self.get_loss(model_output, target, mean=False).mean([1, 2, 3])\n        loss_dict.update({f'{prefix}/loss_simple': loss_simple.mean()})\n\n        self.logvar = self.logvar.to(self.device)\n        logvar_t = self.logvar[t].to(self.device)\n        loss = loss_simple / torch.exp(logvar_t) + logvar_t\n        # loss = loss_simple / torch.exp(self.logvar) + self.logvar\n        if self.learn_logvar:\n            loss_dict.update({f'{prefix}/loss_gamma': loss.mean()})\n            loss_dict.update({'logvar': self.logvar.data.mean()})\n\n        loss = self.l_simple_weight * loss.mean()\n\n        loss_vlb = self.get_loss(model_output, target, mean=False).mean(dim=(1, 2, 3))\n        loss_vlb = (self.lvlb_weights[t] * loss_vlb).mean()\n        loss_dict.update({f'{prefix}/loss_vlb': loss_vlb})\n        loss += (self.original_elbo_weight * loss_vlb)\n        loss_dict.update({f'{prefix}/loss': loss})\n\n        return loss, loss_dict\n\n    def p_mean_variance(self, x, c, t, clip_denoised: bool, return_codebook_ids=False, quantize_denoised=False,\n                        return_x0=False, score_corrector=None, corrector_kwargs=None):\n        t_in = t\n        model_out = self.apply_model(x, t_in, c, return_ids=return_codebook_ids)\n\n        if score_corrector is not None:\n            assert self.parameterization == \"eps\"\n            model_out = score_corrector.modify_score(self, model_out, x, t, c, **corrector_kwargs)\n\n        if return_codebook_ids:\n            model_out, logits = model_out\n\n        if self.parameterization == \"eps\":\n            x_recon = self.predict_start_from_noise(x, t=t, noise=model_out)\n        elif self.parameterization == \"x0\":\n            x_recon = model_out\n        else:\n            raise NotImplementedError()\n\n        if clip_denoised:\n            x_recon.clamp_(-1., 1.)\n        if quantize_denoised:\n            x_recon, _, [_, _, indices] = self.first_stage_model.quantize(x_recon)\n        model_mean, posterior_variance, posterior_log_variance = self.q_posterior(x_start=x_recon, x_t=x, t=t)\n        if return_codebook_ids:\n            return model_mean, posterior_variance, posterior_log_variance, logits\n        elif return_x0:\n            return model_mean, posterior_variance, posterior_log_variance, x_recon\n        else:\n            return model_mean, posterior_variance, posterior_log_variance\n\n    @torch.no_grad()\n    def p_sample(self, x, c, t, clip_denoised=False, repeat_noise=False,\n                 return_codebook_ids=False, quantize_denoised=False, return_x0=False,\n                 temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None):\n        b, *_, device = *x.shape, x.device\n        outputs = self.p_mean_variance(x=x, c=c, t=t, clip_denoised=clip_denoised,\n                                       return_codebook_ids=return_codebook_ids,\n                                       quantize_denoised=quantize_denoised,\n                                       return_x0=return_x0,\n                                       score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)\n        if return_codebook_ids:\n            raise DeprecationWarning(\"Support dropped.\")\n            model_mean, _, model_log_variance, logits = outputs\n        elif return_x0:\n            model_mean, _, model_log_variance, x0 = outputs\n        else:\n            model_mean, _, model_log_variance = outputs\n\n        noise = noise_like(x.shape, device, repeat_noise) * temperature\n        if noise_dropout > 0.:\n            noise = torch.nn.functional.dropout(noise, p=noise_dropout)\n        # no noise when t == 0\n        nonzero_mask = (1 - (t == 0).float()).reshape(b, *((1,) * (len(x.shape) - 1)))\n\n        if return_codebook_ids:\n            return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, logits.argmax(dim=1)\n        if return_x0:\n            return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise, x0\n        else:\n            return model_mean + nonzero_mask * (0.5 * model_log_variance).exp() * noise\n\n    @torch.no_grad()\n    def progressive_denoising(self, cond, shape, verbose=True, callback=None, quantize_denoised=False,\n                              img_callback=None, mask=None, x0=None, temperature=1., noise_dropout=0.,\n                              score_corrector=None, corrector_kwargs=None, batch_size=None, x_T=None, start_T=None,\n                              log_every_t=None):\n        if not log_every_t:\n            log_every_t = self.log_every_t\n        timesteps = self.num_timesteps\n        if batch_size is not None:\n            b = batch_size if batch_size is not None else shape[0]\n            shape = [batch_size] + list(shape)\n        else:\n            b = batch_size = shape[0]\n        if x_T is None:\n            img = torch.randn(shape, device=self.device)\n        else:\n            img = x_T\n        intermediates = []\n        if cond is not None:\n            if isinstance(cond, dict):\n                cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else\n                list(map(lambda x: x[:batch_size], cond[key])) for key in cond}\n            else:\n                cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]\n\n        if start_T is not None:\n            timesteps = min(timesteps, start_T)\n        iterator = tqdm(reversed(range(0, timesteps)), desc='Progressive Generation',\n                        total=timesteps) if verbose else reversed(\n            range(0, timesteps))\n        if type(temperature) == float:\n            temperature = [temperature] * timesteps\n\n        for i in iterator:\n            ts = torch.full((b,), i, device=self.device, dtype=torch.long)\n            if self.shorten_cond_schedule:\n                assert self.model.conditioning_key != 'hybrid'\n                tc = self.cond_ids[ts].to(cond.device)\n                cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))\n\n            img, x0_partial = self.p_sample(img, cond, ts,\n                                            clip_denoised=self.clip_denoised,\n                                            quantize_denoised=quantize_denoised, return_x0=True,\n                                            temperature=temperature[i], noise_dropout=noise_dropout,\n                                            score_corrector=score_corrector, corrector_kwargs=corrector_kwargs)\n            if mask is not None:\n                assert x0 is not None\n                img_orig = self.q_sample(x0, ts)\n                img = img_orig * mask + (1. - mask) * img\n\n            if i % log_every_t == 0 or i == timesteps - 1:\n                intermediates.append(x0_partial)\n            if callback: callback(i)\n            if img_callback: img_callback(img, i)\n        return img, intermediates\n\n    @torch.no_grad()\n    def p_sample_loop(self, cond, shape, return_intermediates=False,\n                      x_T=None, verbose=True, callback=None, timesteps=None, quantize_denoised=False,\n                      mask=None, x0=None, img_callback=None, start_T=None,\n                      log_every_t=None):\n\n        if not log_every_t:\n            log_every_t = self.log_every_t\n        device = self.betas.device\n        b = shape[0]\n        if x_T is None:\n            img = torch.randn(shape, device=device)\n        else:\n            img = x_T\n\n        intermediates = [img]\n        if timesteps is None:\n            timesteps = self.num_timesteps\n\n        if start_T is not None:\n            timesteps = min(timesteps, start_T)\n        iterator = tqdm(reversed(range(0, timesteps)), desc='Sampling t', total=timesteps) if verbose else reversed(\n            range(0, timesteps))\n\n        if mask is not None:\n            assert x0 is not None\n            assert x0.shape[2:3] == mask.shape[2:3]  # spatial size has to match\n\n        for i in iterator:\n            ts = torch.full((b,), i, device=device, dtype=torch.long)\n            if self.shorten_cond_schedule:\n                assert self.model.conditioning_key != 'hybrid'\n                tc = self.cond_ids[ts].to(cond.device)\n                cond = self.q_sample(x_start=cond, t=tc, noise=torch.randn_like(cond))\n\n            img = self.p_sample(img, cond, ts,\n                                clip_denoised=self.clip_denoised,\n                                quantize_denoised=quantize_denoised)\n            if mask is not None:\n                img_orig = self.q_sample(x0, ts)\n                img = img_orig * mask + (1. - mask) * img\n\n            if i % log_every_t == 0 or i == timesteps - 1:\n                intermediates.append(img)\n            if callback: callback(i)\n            if img_callback: img_callback(img, i)\n\n        if return_intermediates:\n            return img, intermediates\n        return img\n\n    @torch.no_grad()\n    def sample(self, cond, batch_size=16, return_intermediates=False, x_T=None,\n               verbose=True, timesteps=None, quantize_denoised=False,\n               mask=None, x0=None, shape=None,**kwargs):\n        if shape is None:\n            shape = (batch_size, self.channels, self.image_size, self.image_size)\n        if cond is not None:\n            if isinstance(cond, dict):\n                cond = {key: cond[key][:batch_size] if not isinstance(cond[key], list) else\n                list(map(lambda x: x[:batch_size], cond[key])) for key in cond}\n            else:\n                cond = [c[:batch_size] for c in cond] if isinstance(cond, list) else cond[:batch_size]\n        return self.p_sample_loop(cond,\n                                  shape,\n                                  return_intermediates=return_intermediates, x_T=x_T,\n                                  verbose=verbose, timesteps=timesteps, quantize_denoised=quantize_denoised,\n                                  mask=mask, x0=x0)\n\n    @torch.no_grad()\n    def sample_log(self,cond,batch_size,ddim, ddim_steps,**kwargs):\n\n        if ddim:\n            ddim_sampler = DDIMSampler(self)\n            shape = (self.channels, self.image_size, self.image_size)\n            samples, intermediates =ddim_sampler.sample(ddim_steps,batch_size,\n                                                        shape,cond,verbose=False,**kwargs)\n\n        else:\n            samples, intermediates = self.sample(cond=cond, batch_size=batch_size,\n                                                 return_intermediates=True,**kwargs)\n\n        return samples, intermediates\n\n\n    @torch.no_grad()\n    def log_images(self, batch, N=8, n_row=4, sample=True, ddim_steps=200, ddim_eta=1., return_keys=None,\n                   quantize_denoised=True, inpaint=True, plot_denoise_rows=False, plot_progressive_rows=True,\n                   plot_diffusion_rows=True, **kwargs):\n\n        use_ddim = ddim_steps is not None\n\n        log = dict()\n        z, c, x, xrec, xc = self.get_input(batch, self.first_stage_key,\n                                           return_first_stage_outputs=True,\n                                           force_c_encode=True,\n                                           return_original_cond=True,\n                                           bs=N)\n        N = min(x.shape[0], N)\n        n_row = min(x.shape[0], n_row)\n        log[\"inputs\"] = x\n        log[\"reconstruction\"] = xrec\n        if self.model.conditioning_key is not None:\n            if hasattr(self.cond_stage_model, \"decode\"):\n                xc = self.cond_stage_model.decode(c)\n                log[\"conditioning\"] = xc\n            elif self.cond_stage_key in [\"caption\"]:\n                xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[\"caption\"])\n                log[\"conditioning\"] = xc\n            elif self.cond_stage_key == 'class_label':\n                xc = log_txt_as_img((x.shape[2], x.shape[3]), batch[\"human_label\"])\n                log['conditioning'] = xc\n            elif isimage(xc):\n                log[\"conditioning\"] = xc\n            if ismap(xc):\n                log[\"original_conditioning\"] = self.to_rgb(xc)\n\n        if plot_diffusion_rows:\n            # get diffusion row\n            diffusion_row = list()\n            z_start = z[:n_row]\n            for t in range(self.num_timesteps):\n                if t % self.log_every_t == 0 or t == self.num_timesteps - 1:\n                    t = repeat(torch.tensor([t]), '1 -> b', b=n_row)\n                    t = t.to(self.device).long()\n                    noise = torch.randn_like(z_start)\n                    z_noisy = self.q_sample(x_start=z_start, t=t, noise=noise)\n                    diffusion_row.append(self.decode_first_stage(z_noisy))\n\n            diffusion_row = torch.stack(diffusion_row)  # n_log_step, n_row, C, H, W\n            diffusion_grid = rearrange(diffusion_row, 'n b c h w -> b n c h w')\n            diffusion_grid = rearrange(diffusion_grid, 'b n c h w -> (b n) c h w')\n            diffusion_grid = make_grid(diffusion_grid, nrow=diffusion_row.shape[0])\n            log[\"diffusion_row\"] = diffusion_grid\n\n        if sample:\n            # get denoise row\n            with self.ema_scope(\"Plotting\"):\n                samples, z_denoise_row = self.sample_log(cond=c,batch_size=N,ddim=use_ddim,\n                                                         ddim_steps=ddim_steps,eta=ddim_eta)\n                # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True)\n            x_samples = self.decode_first_stage(samples)\n            log[\"samples\"] = x_samples\n            if plot_denoise_rows:\n                denoise_grid = self._get_denoise_row_from_list(z_denoise_row)\n                log[\"denoise_row\"] = denoise_grid\n\n            if quantize_denoised and not isinstance(self.first_stage_model, AutoencoderKL) and not isinstance(\n                    self.first_stage_model, IdentityFirstStage):\n                # also display when quantizing x0 while sampling\n                with self.ema_scope(\"Plotting Quantized Denoised\"):\n                    samples, z_denoise_row = self.sample_log(cond=c,batch_size=N,ddim=use_ddim,\n                                                             ddim_steps=ddim_steps,eta=ddim_eta,\n                                                             quantize_denoised=True)\n                    # samples, z_denoise_row = self.sample(cond=c, batch_size=N, return_intermediates=True,\n                    #                                      quantize_denoised=True)\n                x_samples = self.decode_first_stage(samples.to(self.device))\n                log[\"samples_x0_quantized\"] = x_samples\n\n            if inpaint:\n                # make a simple center square\n                b, h, w = z.shape[0], z.shape[2], z.shape[3]\n                mask = torch.ones(N, h, w).to(self.device)\n                # zeros will be filled in\n                mask[:, h // 4:3 * h // 4, w // 4:3 * w // 4] = 0.\n                mask = mask[:, None, ...]\n                with self.ema_scope(\"Plotting Inpaint\"):\n\n                    samples, _ = self.sample_log(cond=c,batch_size=N,ddim=use_ddim, eta=ddim_eta,\n                                                ddim_steps=ddim_steps, x0=z[:N], mask=mask)\n                x_samples = self.decode_first_stage(samples.to(self.device))\n                log[\"samples_inpainting\"] = x_samples\n                log[\"mask\"] = mask\n\n                # outpaint\n                with self.ema_scope(\"Plotting Outpaint\"):\n                    samples, _ = self.sample_log(cond=c, batch_size=N, ddim=use_ddim,eta=ddim_eta,\n                                                ddim_steps=ddim_steps, x0=z[:N], mask=mask)\n                x_samples = self.decode_first_stage(samples.to(self.device))\n                log[\"samples_outpainting\"] = x_samples\n\n        if plot_progressive_rows:\n            with self.ema_scope(\"Plotting Progressives\"):\n                img, progressives = self.progressive_denoising(c,\n                                                               shape=(self.channels, self.image_size, self.image_size),\n                                                               batch_size=N)\n            prog_row = self._get_denoise_row_from_list(progressives, desc=\"Progressive Generation\")\n            log[\"progressive_row\"] = prog_row\n\n        if return_keys:\n            if np.intersect1d(list(log.keys()), return_keys).shape[0] == 0:\n                return log\n            else:\n                return {key: log[key] for key in return_keys}\n        return log\n\n    def configure_optimizers(self):\n        lr = self.learning_rate\n        params = list(self.model.parameters())\n        if self.cond_stage_trainable:\n            print(f\"{self.__class__.__name__}: Also optimizing conditioner params!\")\n            params = params + list(self.cond_stage_model.parameters())\n        if self.learn_logvar:\n            print('Diffusion model optimizing logvar')\n            params.append(self.logvar)\n        opt = torch.optim.AdamW(params, lr=lr)\n        if self.use_scheduler:\n            assert 'target' in self.scheduler_config\n            scheduler = instantiate_from_config(self.scheduler_config)\n\n            print(\"Setting up LambdaLR scheduler...\")\n            scheduler = [\n                {\n                    'scheduler': LambdaLR(opt, lr_lambda=scheduler.schedule),\n                    'interval': 'step',\n                    'frequency': 1\n                }]\n            return [opt], scheduler\n        return opt\n\n    @torch.no_grad()\n    def to_rgb(self, x):\n        x = x.float()\n        if not hasattr(self, \"colorize\"):\n            self.colorize = torch.randn(3, x.shape[1], 1, 1).to(x)\n        x = nn.functional.conv2d(x, weight=self.colorize)\n        x = 2. * (x - x.min()) / (x.max() - x.min()) - 1.\n        return x\n\n\nclass DiffusionWrapper(pl.LightningModule):\n    def __init__(self, diff_model_config, conditioning_key):\n        super().__init__()\n        self.diffusion_model = instantiate_from_config(diff_model_config)\n        self.conditioning_key = conditioning_key\n        assert self.conditioning_key in [None, 'concat', 'crossattn', 'hybrid', 'adm']\n\n    def forward(self, x, t, c_concat: list = None, c_crossattn: list = None):\n        if self.conditioning_key is None:\n            out = self.diffusion_model(x, t)\n        elif self.conditioning_key == 'concat':\n            xc = torch.cat([x] + c_concat, dim=1)\n            out = self.diffusion_model(xc, t)\n        elif self.conditioning_key == 'crossattn':\n            cc = torch.cat(c_crossattn, 1)\n            out = self.diffusion_model(x, t, context=cc)\n        elif self.conditioning_key == 'hybrid':\n            xc = torch.cat([x] + c_concat, dim=1)\n            cc = torch.cat(c_crossattn, 1)\n            out = self.diffusion_model(xc, t, context=cc)\n        elif self.conditioning_key == 'adm':\n            cc = c_crossattn[0]\n            out = self.diffusion_model(x, t, y=cc)\n        else:\n            raise NotImplementedError()\n\n        return out\n\n\nclass Layout2ImgDiffusion(LatentDiffusion):\n    # TODO: move all layout-specific hacks to this class\n    def __init__(self, cond_stage_key, *args, **kwargs):\n        assert cond_stage_key == 'coordinates_bbox', 'Layout2ImgDiffusion only for cond_stage_key=\"coordinates_bbox\"'\n        super().__init__(cond_stage_key=cond_stage_key, *args, **kwargs)\n\n    def log_images(self, batch, N=8, *args, **kwargs):\n        logs = super().log_images(batch=batch, N=N, *args, **kwargs)\n\n        key = 'train' if self.training else 'validation'\n        dset = self.trainer.datamodule.datasets[key]\n        mapper = dset.conditional_builders[self.cond_stage_key]\n\n        bbox_imgs = []\n        map_fn = lambda catno: dset.get_textual_label(dset.get_category_id(catno))\n        for tknzd_bbox in batch[self.cond_stage_key][:N]:\n            bboximg = mapper.plot(tknzd_bbox.detach().cpu(), map_fn, (256, 256))\n            bbox_imgs.append(bboximg)\n\n        cond_img = torch.stack(bbox_imgs, dim=0)\n        logs['bbox_image'] = cond_img\n        return logs\n"
  },
  {
    "path": "ldm_exp/ldm/models/diffusion/plms.py",
    "content": "\"\"\"SAMPLING ONLY.\"\"\"\n\nimport torch\nimport numpy as np\nfrom tqdm import tqdm\nfrom functools import partial\n\nfrom ldm.modules.diffusionmodules.util import make_ddim_sampling_parameters, make_ddim_timesteps, noise_like\n\n\nclass PLMSSampler(object):\n    def __init__(self, model, schedule=\"linear\", **kwargs):\n        super().__init__()\n        self.model = model\n        self.ddpm_num_timesteps = model.num_timesteps\n        self.schedule = schedule\n\n    def register_buffer(self, name, attr):\n        if type(attr) == torch.Tensor:\n            if attr.device != torch.device(\"cuda\"):\n                attr = attr.to(torch.device(\"cuda\"))\n        setattr(self, name, attr)\n\n    def make_schedule(self, ddim_num_steps, ddim_discretize=\"uniform\", ddim_eta=0., verbose=True):\n        if ddim_eta != 0:\n            raise ValueError('ddim_eta must be 0 for PLMS')\n        self.ddim_timesteps = make_ddim_timesteps(ddim_discr_method=ddim_discretize, num_ddim_timesteps=ddim_num_steps,\n                                                  num_ddpm_timesteps=self.ddpm_num_timesteps,verbose=verbose)\n        alphas_cumprod = self.model.alphas_cumprod\n        assert alphas_cumprod.shape[0] == self.ddpm_num_timesteps, 'alphas have to be defined for each timestep'\n        to_torch = lambda x: x.clone().detach().to(torch.float32).to(self.model.device)\n\n        self.register_buffer('betas', to_torch(self.model.betas))\n        self.register_buffer('alphas_cumprod', to_torch(alphas_cumprod))\n        self.register_buffer('alphas_cumprod_prev', to_torch(self.model.alphas_cumprod_prev))\n\n        # calculations for diffusion q(x_t | x_{t-1}) and others\n        self.register_buffer('sqrt_alphas_cumprod', to_torch(np.sqrt(alphas_cumprod.cpu())))\n        self.register_buffer('sqrt_one_minus_alphas_cumprod', to_torch(np.sqrt(1. - alphas_cumprod.cpu())))\n        self.register_buffer('log_one_minus_alphas_cumprod', to_torch(np.log(1. - alphas_cumprod.cpu())))\n        self.register_buffer('sqrt_recip_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu())))\n        self.register_buffer('sqrt_recipm1_alphas_cumprod', to_torch(np.sqrt(1. / alphas_cumprod.cpu() - 1)))\n\n        # ddim sampling parameters\n        ddim_sigmas, ddim_alphas, ddim_alphas_prev = make_ddim_sampling_parameters(alphacums=alphas_cumprod.cpu(),\n                                                                                   ddim_timesteps=self.ddim_timesteps,\n                                                                                   eta=ddim_eta,verbose=verbose)\n        self.register_buffer('ddim_sigmas', ddim_sigmas)\n        self.register_buffer('ddim_alphas', ddim_alphas)\n        self.register_buffer('ddim_alphas_prev', ddim_alphas_prev)\n        self.register_buffer('ddim_sqrt_one_minus_alphas', np.sqrt(1. - ddim_alphas))\n        sigmas_for_original_sampling_steps = ddim_eta * torch.sqrt(\n            (1 - self.alphas_cumprod_prev) / (1 - self.alphas_cumprod) * (\n                        1 - self.alphas_cumprod / self.alphas_cumprod_prev))\n        self.register_buffer('ddim_sigmas_for_original_num_steps', sigmas_for_original_sampling_steps)\n\n    @torch.no_grad()\n    def sample(self,\n               S,\n               batch_size,\n               shape,\n               conditioning=None,\n               callback=None,\n               normals_sequence=None,\n               img_callback=None,\n               quantize_x0=False,\n               eta=0.,\n               mask=None,\n               x0=None,\n               temperature=1.,\n               noise_dropout=0.,\n               score_corrector=None,\n               corrector_kwargs=None,\n               verbose=True,\n               x_T=None,\n               log_every_t=100,\n               unconditional_guidance_scale=1.,\n               unconditional_conditioning=None,\n               # this has to come in the same format as the conditioning, # e.g. as encoded tokens, ...\n               **kwargs\n               ):\n        if conditioning is not None:\n            if isinstance(conditioning, dict):\n                cbs = conditioning[list(conditioning.keys())[0]].shape[0]\n                if cbs != batch_size:\n                    print(f\"Warning: Got {cbs} conditionings but batch-size is {batch_size}\")\n            else:\n                if conditioning.shape[0] != batch_size:\n                    print(f\"Warning: Got {conditioning.shape[0]} conditionings but batch-size is {batch_size}\")\n\n        self.make_schedule(ddim_num_steps=S, ddim_eta=eta, verbose=verbose)\n        # sampling\n        C, H, W = shape\n        size = (batch_size, C, H, W)\n        print(f'Data shape for PLMS sampling is {size}')\n\n        samples, intermediates = self.plms_sampling(conditioning, size,\n                                                    callback=callback,\n                                                    img_callback=img_callback,\n                                                    quantize_denoised=quantize_x0,\n                                                    mask=mask, x0=x0,\n                                                    ddim_use_original_steps=False,\n                                                    noise_dropout=noise_dropout,\n                                                    temperature=temperature,\n                                                    score_corrector=score_corrector,\n                                                    corrector_kwargs=corrector_kwargs,\n                                                    x_T=x_T,\n                                                    log_every_t=log_every_t,\n                                                    unconditional_guidance_scale=unconditional_guidance_scale,\n                                                    unconditional_conditioning=unconditional_conditioning,\n                                                    )\n        return samples, intermediates\n\n    @torch.no_grad()\n    def plms_sampling(self, cond, shape,\n                      x_T=None, ddim_use_original_steps=False,\n                      callback=None, timesteps=None, quantize_denoised=False,\n                      mask=None, x0=None, img_callback=None, log_every_t=100,\n                      temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,\n                      unconditional_guidance_scale=1., unconditional_conditioning=None,):\n        device = self.model.betas.device\n        b = shape[0]\n        if x_T is None:\n            img = torch.randn(shape, device=device)\n        else:\n            img = x_T\n\n        if timesteps is None:\n            timesteps = self.ddpm_num_timesteps if ddim_use_original_steps else self.ddim_timesteps\n        elif timesteps is not None and not ddim_use_original_steps:\n            subset_end = int(min(timesteps / self.ddim_timesteps.shape[0], 1) * self.ddim_timesteps.shape[0]) - 1\n            timesteps = self.ddim_timesteps[:subset_end]\n\n        intermediates = {'x_inter': [img], 'pred_x0': [img]}\n        time_range = list(reversed(range(0,timesteps))) if ddim_use_original_steps else np.flip(timesteps)\n        total_steps = timesteps if ddim_use_original_steps else timesteps.shape[0]\n        print(f\"Running PLMS Sampling with {total_steps} timesteps\")\n\n        iterator = tqdm(time_range, desc='PLMS Sampler', total=total_steps)\n        old_eps = []\n\n        for i, step in enumerate(iterator):\n            index = total_steps - i - 1\n            ts = torch.full((b,), step, device=device, dtype=torch.long)\n            ts_next = torch.full((b,), time_range[min(i + 1, len(time_range) - 1)], device=device, dtype=torch.long)\n\n            if mask is not None:\n                assert x0 is not None\n                img_orig = self.model.q_sample(x0, ts)  # TODO: deterministic forward pass?\n                img = img_orig * mask + (1. - mask) * img\n\n            outs = self.p_sample_plms(img, cond, ts, index=index, use_original_steps=ddim_use_original_steps,\n                                      quantize_denoised=quantize_denoised, temperature=temperature,\n                                      noise_dropout=noise_dropout, score_corrector=score_corrector,\n                                      corrector_kwargs=corrector_kwargs,\n                                      unconditional_guidance_scale=unconditional_guidance_scale,\n                                      unconditional_conditioning=unconditional_conditioning,\n                                      old_eps=old_eps, t_next=ts_next)\n            img, pred_x0, e_t = outs\n            old_eps.append(e_t)\n            if len(old_eps) >= 4:\n                old_eps.pop(0)\n            if callback: callback(i)\n            if img_callback: img_callback(pred_x0, i)\n\n            if index % log_every_t == 0 or index == total_steps - 1:\n                intermediates['x_inter'].append(img)\n                intermediates['pred_x0'].append(pred_x0)\n\n        return img, intermediates\n\n    @torch.no_grad()\n    def p_sample_plms(self, x, c, t, index, repeat_noise=False, use_original_steps=False, quantize_denoised=False,\n                      temperature=1., noise_dropout=0., score_corrector=None, corrector_kwargs=None,\n                      unconditional_guidance_scale=1., unconditional_conditioning=None, old_eps=None, t_next=None):\n        b, *_, device = *x.shape, x.device\n\n        def get_model_output(x, t):\n            if unconditional_conditioning is None or unconditional_guidance_scale == 1.:\n                e_t = self.model.apply_model(x, t, c)\n            else:\n                x_in = torch.cat([x] * 2)\n                t_in = torch.cat([t] * 2)\n                c_in = torch.cat([unconditional_conditioning, c])\n                e_t_uncond, e_t = self.model.apply_model(x_in, t_in, c_in).chunk(2)\n                e_t = e_t_uncond + unconditional_guidance_scale * (e_t - e_t_uncond)\n\n            if score_corrector is not None:\n                assert self.model.parameterization == \"eps\"\n                e_t = score_corrector.modify_score(self.model, e_t, x, t, c, **corrector_kwargs)\n\n            return e_t\n\n        alphas = self.model.alphas_cumprod if use_original_steps else self.ddim_alphas\n        alphas_prev = self.model.alphas_cumprod_prev if use_original_steps else self.ddim_alphas_prev\n        sqrt_one_minus_alphas = self.model.sqrt_one_minus_alphas_cumprod if use_original_steps else self.ddim_sqrt_one_minus_alphas\n        sigmas = self.model.ddim_sigmas_for_original_num_steps if use_original_steps else self.ddim_sigmas\n\n        def get_x_prev_and_pred_x0(e_t, index):\n            # select parameters corresponding to the currently considered timestep\n            a_t = torch.full((b, 1, 1, 1), alphas[index], device=device)\n            a_prev = torch.full((b, 1, 1, 1), alphas_prev[index], device=device)\n            sigma_t = torch.full((b, 1, 1, 1), sigmas[index], device=device)\n            sqrt_one_minus_at = torch.full((b, 1, 1, 1), sqrt_one_minus_alphas[index],device=device)\n\n            # current prediction for x_0\n            pred_x0 = (x - sqrt_one_minus_at * e_t) / a_t.sqrt()\n            if quantize_denoised:\n                pred_x0, _, *_ = self.model.first_stage_model.quantize(pred_x0)\n            # direction pointing to x_t\n            dir_xt = (1. - a_prev - sigma_t**2).sqrt() * e_t\n            noise = sigma_t * noise_like(x.shape, device, repeat_noise) * temperature\n            if noise_dropout > 0.:\n                noise = torch.nn.functional.dropout(noise, p=noise_dropout)\n            x_prev = a_prev.sqrt() * pred_x0 + dir_xt + noise\n            return x_prev, pred_x0\n\n        e_t = get_model_output(x, t)\n        if len(old_eps) == 0:\n            # Pseudo Improved Euler (2nd order)\n            x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t, index)\n            e_t_next = get_model_output(x_prev, t_next)\n            e_t_prime = (e_t + e_t_next) / 2\n        elif len(old_eps) == 1:\n            # 2nd order Pseudo Linear Multistep (Adams-Bashforth)\n            e_t_prime = (3 * e_t - old_eps[-1]) / 2\n        elif len(old_eps) == 2:\n            # 3nd order Pseudo Linear Multistep (Adams-Bashforth)\n            e_t_prime = (23 * e_t - 16 * old_eps[-1] + 5 * old_eps[-2]) / 12\n        elif len(old_eps) >= 3:\n            # 4nd order Pseudo Linear Multistep (Adams-Bashforth)\n            e_t_prime = (55 * e_t - 59 * old_eps[-1] + 37 * old_eps[-2] - 9 * old_eps[-3]) / 24\n\n        x_prev, pred_x0 = get_x_prev_and_pred_x0(e_t_prime, index)\n\n        return x_prev, pred_x0, e_t\n"
  },
  {
    "path": "ldm_exp/ldm/modules/__init__.py",
    "content": "from . import attention"
  },
  {
    "path": "ldm_exp/ldm/modules/attention.py",
    "content": "from inspect import isfunction\nimport math\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn, einsum\nfrom einops import rearrange, repeat\n\nfrom ldm.modules.diffusionmodules.util import checkpoint\n\n\ndef exists(val):\n    return val is not None\n\n\ndef uniq(arr):\n    return{el: True for el in arr}.keys()\n\n\ndef default(val, d):\n    if exists(val):\n        return val\n    return d() if isfunction(d) else d\n\n\ndef max_neg_value(t):\n    return -torch.finfo(t.dtype).max\n\n\ndef init_(tensor):\n    dim = tensor.shape[-1]\n    std = 1 / math.sqrt(dim)\n    tensor.uniform_(-std, std)\n    return tensor\n\n\n# feedforward\nclass GEGLU(nn.Module):\n    def __init__(self, dim_in, dim_out):\n        super().__init__()\n        self.proj = nn.Linear(dim_in, dim_out * 2)\n\n    def forward(self, x):\n        x, gate = self.proj(x).chunk(2, dim=-1)\n        return x * F.gelu(gate)\n\n\nclass FeedForward(nn.Module):\n    def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):\n        super().__init__()\n        inner_dim = int(dim * mult)\n        dim_out = default(dim_out, dim)\n        project_in = nn.Sequential(\n            nn.Linear(dim, inner_dim),\n            nn.GELU()\n        ) if not glu else GEGLU(dim, inner_dim)\n\n        self.net = nn.Sequential(\n            project_in,\n            nn.Dropout(dropout),\n            nn.Linear(inner_dim, dim_out)\n        )\n\n    def forward(self, x):\n        return self.net(x)\n\n\ndef zero_module(module):\n    \"\"\"\n    Zero out the parameters of a module and return it.\n    \"\"\"\n    for p in module.parameters():\n        p.detach().zero_()\n    return module\n\n\ndef Normalize(in_channels):\n    return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True)\n\n\nclass LinearAttention(nn.Module):\n    def __init__(self, dim, heads=4, dim_head=32):\n        super().__init__()\n        self.heads = heads\n        hidden_dim = dim_head * heads\n        self.to_qkv = nn.Conv2d(dim, hidden_dim * 3, 1, bias = False)\n        self.to_out = nn.Conv2d(hidden_dim, dim, 1)\n\n    def forward(self, x):\n        b, c, h, w = x.shape\n        qkv = self.to_qkv(x)\n        q, k, v = rearrange(qkv, 'b (qkv heads c) h w -> qkv b heads c (h w)', heads = self.heads, qkv=3)\n        k = k.softmax(dim=-1)  \n        context = torch.einsum('bhdn,bhen->bhde', k, v)\n        out = torch.einsum('bhde,bhdn->bhen', context, q)\n        out = rearrange(out, 'b heads c (h w) -> b (heads c) h w', heads=self.heads, h=h, w=w)\n        return self.to_out(out)\n\n\nclass SpatialSelfAttention(nn.Module):\n    def __init__(self, in_channels):\n        super().__init__()\n        self.in_channels = in_channels\n\n        self.norm = Normalize(in_channels)\n        self.q = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.k = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.v = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.proj_out = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=1,\n                                        stride=1,\n                                        padding=0)\n\n    def forward(self, x):\n        h_ = x\n        h_ = self.norm(h_)\n        q = self.q(h_)\n        k = self.k(h_)\n        v = self.v(h_)\n\n        # compute attention\n        b,c,h,w = q.shape\n        q = rearrange(q, 'b c h w -> b (h w) c')\n        k = rearrange(k, 'b c h w -> b c (h w)')\n        w_ = torch.einsum('bij,bjk->bik', q, k)\n\n        w_ = w_ * (int(c)**(-0.5))\n        w_ = torch.nn.functional.softmax(w_, dim=2)\n\n        # attend to values\n        v = rearrange(v, 'b c h w -> b c (h w)')\n        w_ = rearrange(w_, 'b i j -> b j i')\n        h_ = torch.einsum('bij,bjk->bik', v, w_)\n        h_ = rearrange(h_, 'b c (h w) -> b c h w', h=h)\n        h_ = self.proj_out(h_)\n\n        return x+h_\n\n\nclass CrossAttention(nn.Module):\n    def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.):\n        super().__init__()\n        inner_dim = dim_head * heads\n        context_dim = default(context_dim, query_dim)\n\n        self.scale = dim_head ** -0.5\n        self.heads = heads\n\n        self.to_q = nn.Linear(query_dim, inner_dim, bias=False)\n        self.to_k = nn.Linear(context_dim, inner_dim, bias=False)\n        self.to_v = nn.Linear(context_dim, inner_dim, bias=False)\n\n        self.to_out = nn.Sequential(\n            nn.Linear(inner_dim, query_dim),\n            nn.Dropout(dropout)\n        )\n\n    def forward(self, x, context=None, mask=None):\n        h = self.heads\n\n        q = self.to_q(x)\n        context = default(context, x)\n        k = self.to_k(context)\n        v = self.to_v(context)\n\n        q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v))\n\n        sim = einsum('b i d, b j d -> b i j', q, k) * self.scale\n\n        if exists(mask):\n            mask = rearrange(mask, 'b ... -> b (...)')\n            max_neg_value = -torch.finfo(sim.dtype).max\n            mask = repeat(mask, 'b j -> (b h) () j', h=h)\n            sim.masked_fill_(~mask, max_neg_value)\n\n        # attention, what we cannot get enough of\n        attn = sim.softmax(dim=-1)\n\n        out = einsum('b i j, b j d -> b i d', attn, v)\n        out = rearrange(out, '(b h) n d -> b n (h d)', h=h)\n        return self.to_out(out)\n\n\nclass BasicTransformerBlock(nn.Module):\n    def __init__(self, dim, n_heads, d_head, dropout=0., context_dim=None, gated_ff=True, checkpoint=True):\n        super().__init__()\n        self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout)  # is a self-attention\n        self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)\n        self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim,\n                                    heads=n_heads, dim_head=d_head, dropout=dropout)  # is self-attn if context is none\n        self.norm1 = nn.LayerNorm(dim)\n        self.norm2 = nn.LayerNorm(dim)\n        self.norm3 = nn.LayerNorm(dim)\n        self.checkpoint = checkpoint\n\n    def forward(self, x, context=None):\n        x = self.attn1(self.norm1(x)) + x\n        x = self.attn2(self.norm2(x), context=context) + x\n        x = self.ff(self.norm3(x)) + x\n        return x\n\n\nclass SpatialTransformer(nn.Module):\n    \"\"\"\n    Transformer block for image-like data.\n    First, project the input (aka embedding)\n    and reshape to b, t, d.\n    Then apply standard transformer action.\n    Finally, reshape to image\n    \"\"\"\n    def __init__(self, in_channels, n_heads, d_head,\n                 depth=1, dropout=0., context_dim=None):\n        super().__init__()\n        self.in_channels = in_channels\n        inner_dim = n_heads * d_head\n        self.norm = Normalize(in_channels)\n\n        self.proj_in = nn.Conv2d(in_channels,\n                                 inner_dim,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n\n        self.transformer_blocks = nn.ModuleList(\n            [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim)\n                for d in range(depth)]\n        )\n\n        self.proj_out = zero_module(nn.Conv2d(inner_dim,\n                                              in_channels,\n                                              kernel_size=1,\n                                              stride=1,\n                                              padding=0))\n\n    def forward(self, x, context=None):\n        # note: if no context is given, cross-attention defaults to self-attention\n        b, c, h, w = x.shape\n        x_in = x\n        x = self.norm(x)\n        x = self.proj_in(x)\n        x = rearrange(x, 'b c h w -> b (h w) c')\n        for block in self.transformer_blocks:\n            x = block(x, context=context)\n        x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w)\n        x = self.proj_out(x)\n        return x + x_in"
  },
  {
    "path": "ldm_exp/ldm/modules/diffusionmodules/__init__.py",
    "content": ""
  },
  {
    "path": "ldm_exp/ldm/modules/diffusionmodules/model.py",
    "content": "# pytorch_diffusion + derived encoder decoder\nimport math\nimport torch\nimport torch.nn as nn\nimport numpy as np\nfrom einops import rearrange\n\nfrom ldm.util import instantiate_from_config\nfrom ldm.modules.attention import LinearAttention\n\n\ndef get_timestep_embedding(timesteps, embedding_dim):\n    \"\"\"\n    This matches the implementation in Denoising Diffusion Probabilistic Models:\n    From Fairseq.\n    Build sinusoidal embeddings.\n    This matches the implementation in tensor2tensor, but differs slightly\n    from the description in Section 3.5 of \"Attention Is All You Need\".\n    \"\"\"\n    assert len(timesteps.shape) == 1\n\n    half_dim = embedding_dim // 2\n    emb = math.log(10000) / (half_dim - 1)\n    emb = torch.exp(torch.arange(half_dim, dtype=torch.float32) * -emb)\n    emb = emb.to(device=timesteps.device)\n    emb = timesteps.float()[:, None] * emb[None, :]\n    emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1)\n    if embedding_dim % 2 == 1:  # zero pad\n        emb = torch.nn.functional.pad(emb, (0,1,0,0))\n    return emb\n\n\ndef nonlinearity(x):\n    # swish\n    return x*torch.sigmoid(x)\n\n\ndef Normalize(in_channels, num_groups=32):\n    return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True)\n\n\nclass Upsample(nn.Module):\n    def __init__(self, in_channels, with_conv):\n        super().__init__()\n        self.with_conv = with_conv\n        if self.with_conv:\n            self.conv = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, x):\n        x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode=\"nearest\")\n        if self.with_conv:\n            x = self.conv(x)\n        return x\n\n\nclass Downsample(nn.Module):\n    def __init__(self, in_channels, with_conv):\n        super().__init__()\n        self.with_conv = with_conv\n        if self.with_conv:\n            # no asymmetric padding in torch conv, must do it ourselves\n            self.conv = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=3,\n                                        stride=2,\n                                        padding=0)\n\n    def forward(self, x):\n        if self.with_conv:\n            pad = (0,1,0,1)\n            x = torch.nn.functional.pad(x, pad, mode=\"constant\", value=0)\n            x = self.conv(x)\n        else:\n            x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2)\n        return x\n\n\nclass ResnetBlock(nn.Module):\n    def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False,\n                 dropout, temb_channels=512):\n        super().__init__()\n        self.in_channels = in_channels\n        out_channels = in_channels if out_channels is None else out_channels\n        self.out_channels = out_channels\n        self.use_conv_shortcut = conv_shortcut\n\n        self.norm1 = Normalize(in_channels)\n        self.conv1 = torch.nn.Conv2d(in_channels,\n                                     out_channels,\n                                     kernel_size=3,\n                                     stride=1,\n                                     padding=1)\n        if temb_channels > 0:\n            self.temb_proj = torch.nn.Linear(temb_channels,\n                                             out_channels)\n        self.norm2 = Normalize(out_channels)\n        self.dropout = torch.nn.Dropout(dropout)\n        self.conv2 = torch.nn.Conv2d(out_channels,\n                                     out_channels,\n                                     kernel_size=3,\n                                     stride=1,\n                                     padding=1)\n        if self.in_channels != self.out_channels:\n            if self.use_conv_shortcut:\n                self.conv_shortcut = torch.nn.Conv2d(in_channels,\n                                                     out_channels,\n                                                     kernel_size=3,\n                                                     stride=1,\n                                                     padding=1)\n            else:\n                self.nin_shortcut = torch.nn.Conv2d(in_channels,\n                                                    out_channels,\n                                                    kernel_size=1,\n                                                    stride=1,\n                                                    padding=0)\n\n    def forward(self, x, temb):\n        h = x\n        h = self.norm1(h)\n        h = nonlinearity(h)\n        h = self.conv1(h)\n\n        if temb is not None:\n            h = h + self.temb_proj(nonlinearity(temb))[:,:,None,None]\n\n        h = self.norm2(h)\n        h = nonlinearity(h)\n        h = self.dropout(h)\n        h = self.conv2(h)\n\n        if self.in_channels != self.out_channels:\n            if self.use_conv_shortcut:\n                x = self.conv_shortcut(x)\n            else:\n                x = self.nin_shortcut(x)\n\n        return x+h\n\n\nclass LinAttnBlock(LinearAttention):\n    \"\"\"to match AttnBlock usage\"\"\"\n    def __init__(self, in_channels):\n        super().__init__(dim=in_channels, heads=1, dim_head=in_channels)\n\n\nclass AttnBlock(nn.Module):\n    def __init__(self, in_channels):\n        super().__init__()\n        self.in_channels = in_channels\n\n        self.norm = Normalize(in_channels)\n        self.q = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.k = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.v = torch.nn.Conv2d(in_channels,\n                                 in_channels,\n                                 kernel_size=1,\n                                 stride=1,\n                                 padding=0)\n        self.proj_out = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=1,\n                                        stride=1,\n                                        padding=0)\n\n\n    def forward(self, x):\n        h_ = x\n        h_ = self.norm(h_)\n        q = self.q(h_)\n        k = self.k(h_)\n        v = self.v(h_)\n\n        # compute attention\n        b,c,h,w = q.shape\n        q = q.reshape(b,c,h*w)\n        q = q.permute(0,2,1)   # b,hw,c\n        k = k.reshape(b,c,h*w) # b,c,hw\n        w_ = torch.bmm(q,k)     # b,hw,hw    w[b,i,j]=sum_c q[b,i,c]k[b,c,j]\n        w_ = w_ * (int(c)**(-0.5))\n        w_ = torch.nn.functional.softmax(w_, dim=2)\n\n        # attend to values\n        v = v.reshape(b,c,h*w)\n        w_ = w_.permute(0,2,1)   # b,hw,hw (first hw of k, second of q)\n        h_ = torch.bmm(v,w_)     # b, c,hw (hw of q) h_[b,c,j] = sum_i v[b,c,i] w_[b,i,j]\n        h_ = h_.reshape(b,c,h,w)\n\n        h_ = self.proj_out(h_)\n\n        return x+h_\n\n\ndef make_attn(in_channels, attn_type=\"vanilla\"):\n    assert attn_type in [\"vanilla\", \"linear\", \"none\"], f'attn_type {attn_type} unknown'\n    print(f\"making attention of type '{attn_type}' with {in_channels} in_channels\")\n    if attn_type == \"vanilla\":\n        return AttnBlock(in_channels)\n    elif attn_type == \"none\":\n        return nn.Identity(in_channels)\n    else:\n        return LinAttnBlock(in_channels)\n\n\nclass Model(nn.Module):\n    def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,\n                 attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,\n                 resolution, use_timestep=True, use_linear_attn=False, attn_type=\"vanilla\"):\n        super().__init__()\n        if use_linear_attn: attn_type = \"linear\"\n        self.ch = ch\n        self.temb_ch = self.ch*4\n        self.num_resolutions = len(ch_mult)\n        self.num_res_blocks = num_res_blocks\n        self.resolution = resolution\n        self.in_channels = in_channels\n\n        self.use_timestep = use_timestep\n        if self.use_timestep:\n            # timestep embedding\n            self.temb = nn.Module()\n            self.temb.dense = nn.ModuleList([\n                torch.nn.Linear(self.ch,\n                                self.temb_ch),\n                torch.nn.Linear(self.temb_ch,\n                                self.temb_ch),\n            ])\n\n        # downsampling\n        self.conv_in = torch.nn.Conv2d(in_channels,\n                                       self.ch,\n                                       kernel_size=3,\n                                       stride=1,\n                                       padding=1)\n\n        curr_res = resolution\n        in_ch_mult = (1,)+tuple(ch_mult)\n        self.down = nn.ModuleList()\n        for i_level in range(self.num_resolutions):\n            block = nn.ModuleList()\n            attn = nn.ModuleList()\n            block_in = ch*in_ch_mult[i_level]\n            block_out = ch*ch_mult[i_level]\n            for i_block in range(self.num_res_blocks):\n                block.append(ResnetBlock(in_channels=block_in,\n                                         out_channels=block_out,\n                                         temb_channels=self.temb_ch,\n                                         dropout=dropout))\n                block_in = block_out\n                if curr_res in attn_resolutions:\n                    attn.append(make_attn(block_in, attn_type=attn_type))\n            down = nn.Module()\n            down.block = block\n            down.attn = attn\n            if i_level != self.num_resolutions-1:\n                down.downsample = Downsample(block_in, resamp_with_conv)\n                curr_res = curr_res // 2\n            self.down.append(down)\n\n        # middle\n        self.mid = nn.Module()\n        self.mid.block_1 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n        self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)\n        self.mid.block_2 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n\n        # upsampling\n        self.up = nn.ModuleList()\n        for i_level in reversed(range(self.num_resolutions)):\n            block = nn.ModuleList()\n            attn = nn.ModuleList()\n            block_out = ch*ch_mult[i_level]\n            skip_in = ch*ch_mult[i_level]\n            for i_block in range(self.num_res_blocks+1):\n                if i_block == self.num_res_blocks:\n                    skip_in = ch*in_ch_mult[i_level]\n                block.append(ResnetBlock(in_channels=block_in+skip_in,\n                                         out_channels=block_out,\n                                         temb_channels=self.temb_ch,\n                                         dropout=dropout))\n                block_in = block_out\n                if curr_res in attn_resolutions:\n                    attn.append(make_attn(block_in, attn_type=attn_type))\n            up = nn.Module()\n            up.block = block\n            up.attn = attn\n            if i_level != 0:\n                up.upsample = Upsample(block_in, resamp_with_conv)\n                curr_res = curr_res * 2\n            self.up.insert(0, up) # prepend to get consistent order\n\n        # end\n        self.norm_out = Normalize(block_in)\n        self.conv_out = torch.nn.Conv2d(block_in,\n                                        out_ch,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, x, t=None, context=None):\n        #assert x.shape[2] == x.shape[3] == self.resolution\n        if context is not None:\n            # assume aligned context, cat along channel axis\n            x = torch.cat((x, context), dim=1)\n        if self.use_timestep:\n            # timestep embedding\n            assert t is not None\n            temb = get_timestep_embedding(t, self.ch)\n            temb = self.temb.dense[0](temb)\n            temb = nonlinearity(temb)\n            temb = self.temb.dense[1](temb)\n        else:\n            temb = None\n\n        # downsampling\n        hs = [self.conv_in(x)]\n        for i_level in range(self.num_resolutions):\n            for i_block in range(self.num_res_blocks):\n                h = self.down[i_level].block[i_block](hs[-1], temb)\n                if len(self.down[i_level].attn) > 0:\n                    h = self.down[i_level].attn[i_block](h)\n                hs.append(h)\n            if i_level != self.num_resolutions-1:\n                hs.append(self.down[i_level].downsample(hs[-1]))\n\n        # middle\n        h = hs[-1]\n        h = self.mid.block_1(h, temb)\n        h = self.mid.attn_1(h)\n        h = self.mid.block_2(h, temb)\n\n        # upsampling\n        for i_level in reversed(range(self.num_resolutions)):\n            for i_block in range(self.num_res_blocks+1):\n                h = self.up[i_level].block[i_block](\n                    torch.cat([h, hs.pop()], dim=1), temb)\n                if len(self.up[i_level].attn) > 0:\n                    h = self.up[i_level].attn[i_block](h)\n            if i_level != 0:\n                h = self.up[i_level].upsample(h)\n\n        # end\n        h = self.norm_out(h)\n        h = nonlinearity(h)\n        h = self.conv_out(h)\n        return h\n\n    def get_last_layer(self):\n        return self.conv_out.weight\n\n\nclass Encoder(nn.Module):\n    def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,\n                 attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,\n                 resolution, z_channels, double_z=True, use_linear_attn=False, attn_type=\"vanilla\",\n                 **ignore_kwargs):\n        super().__init__()\n        if use_linear_attn: attn_type = \"linear\"\n        self.ch = ch\n        self.temb_ch = 0\n        self.num_resolutions = len(ch_mult)\n        self.num_res_blocks = num_res_blocks\n        self.resolution = resolution\n        self.in_channels = in_channels\n\n        # downsampling\n        self.conv_in = torch.nn.Conv2d(in_channels,\n                                       self.ch,\n                                       kernel_size=3,\n                                       stride=1,\n                                       padding=1)\n\n        curr_res = resolution\n        in_ch_mult = (1,)+tuple(ch_mult)\n        self.in_ch_mult = in_ch_mult\n        self.down = nn.ModuleList()\n        for i_level in range(self.num_resolutions):\n            block = nn.ModuleList()\n            attn = nn.ModuleList()\n            block_in = ch*in_ch_mult[i_level]\n            block_out = ch*ch_mult[i_level]\n            for i_block in range(self.num_res_blocks):\n                block.append(ResnetBlock(in_channels=block_in,\n                                         out_channels=block_out,\n                                         temb_channels=self.temb_ch,\n                                         dropout=dropout))\n                block_in = block_out\n                if curr_res in attn_resolutions:\n                    attn.append(make_attn(block_in, attn_type=attn_type))\n            down = nn.Module()\n            down.block = block\n            down.attn = attn\n            if i_level != self.num_resolutions-1:\n                down.downsample = Downsample(block_in, resamp_with_conv)\n                curr_res = curr_res // 2\n            self.down.append(down)\n\n        # middle\n        self.mid = nn.Module()\n        self.mid.block_1 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n        self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)\n        self.mid.block_2 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n\n        # end\n        self.norm_out = Normalize(block_in)\n        self.conv_out = torch.nn.Conv2d(block_in,\n                                        2*z_channels if double_z else z_channels,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, x):\n        # timestep embedding\n        temb = None\n\n        # downsampling\n        hs = [self.conv_in(x)]\n        for i_level in range(self.num_resolutions):\n            for i_block in range(self.num_res_blocks):\n                h = self.down[i_level].block[i_block](hs[-1], temb)\n                if len(self.down[i_level].attn) > 0:\n                    h = self.down[i_level].attn[i_block](h)\n                hs.append(h)\n            if i_level != self.num_resolutions-1:\n                hs.append(self.down[i_level].downsample(hs[-1]))\n\n        # middle\n        h = hs[-1]\n        h = self.mid.block_1(h, temb)\n        h = self.mid.attn_1(h)\n        h = self.mid.block_2(h, temb)\n\n        # end\n        h = self.norm_out(h)\n        h = nonlinearity(h)\n        h = self.conv_out(h)\n        return h\n\n\nclass Decoder(nn.Module):\n    def __init__(self, *, ch, out_ch, ch_mult=(1,2,4,8), num_res_blocks,\n                 attn_resolutions, dropout=0.0, resamp_with_conv=True, in_channels,\n                 resolution, z_channels, give_pre_end=False, tanh_out=False, use_linear_attn=False,\n                 attn_type=\"vanilla\", **ignorekwargs):\n        super().__init__()\n        if use_linear_attn: attn_type = \"linear\"\n        self.ch = ch\n        self.temb_ch = 0\n        self.num_resolutions = len(ch_mult)\n        self.num_res_blocks = num_res_blocks\n        self.resolution = resolution\n        self.in_channels = in_channels\n        self.give_pre_end = give_pre_end\n        self.tanh_out = tanh_out\n\n        # compute in_ch_mult, block_in and curr_res at lowest res\n        in_ch_mult = (1,)+tuple(ch_mult)\n        block_in = ch*ch_mult[self.num_resolutions-1]\n        curr_res = resolution // 2**(self.num_resolutions-1)\n        self.z_shape = (1,z_channels,curr_res,curr_res)\n        print(\"Working with z of shape {} = {} dimensions.\".format(\n            self.z_shape, np.prod(self.z_shape)))\n\n        # z to block_in\n        self.conv_in = torch.nn.Conv2d(z_channels,\n                                       block_in,\n                                       kernel_size=3,\n                                       stride=1,\n                                       padding=1)\n\n        # middle\n        self.mid = nn.Module()\n        self.mid.block_1 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n        self.mid.attn_1 = make_attn(block_in, attn_type=attn_type)\n        self.mid.block_2 = ResnetBlock(in_channels=block_in,\n                                       out_channels=block_in,\n                                       temb_channels=self.temb_ch,\n                                       dropout=dropout)\n\n        # upsampling\n        self.up = nn.ModuleList()\n        for i_level in reversed(range(self.num_resolutions)):\n            block = nn.ModuleList()\n            attn = nn.ModuleList()\n            block_out = ch*ch_mult[i_level]\n            for i_block in range(self.num_res_blocks+1):\n                block.append(ResnetBlock(in_channels=block_in,\n                                         out_channels=block_out,\n                                         temb_channels=self.temb_ch,\n                                         dropout=dropout))\n                block_in = block_out\n                if curr_res in attn_resolutions:\n                    attn.append(make_attn(block_in, attn_type=attn_type))\n            up = nn.Module()\n            up.block = block\n            up.attn = attn\n            if i_level != 0:\n                up.upsample = Upsample(block_in, resamp_with_conv)\n                curr_res = curr_res * 2\n            self.up.insert(0, up) # prepend to get consistent order\n\n        # end\n        self.norm_out = Normalize(block_in)\n        self.conv_out = torch.nn.Conv2d(block_in,\n                                        out_ch,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, z):\n        #assert z.shape[1:] == self.z_shape[1:]\n        self.last_z_shape = z.shape\n\n        # timestep embedding\n        temb = None\n\n        # z to block_in\n        h = self.conv_in(z)\n\n        # middle\n        h = self.mid.block_1(h, temb)\n        h = self.mid.attn_1(h)\n        h = self.mid.block_2(h, temb)\n\n        # upsampling\n        for i_level in reversed(range(self.num_resolutions)):\n            for i_block in range(self.num_res_blocks+1):\n                h = self.up[i_level].block[i_block](h, temb)\n                if len(self.up[i_level].attn) > 0:\n                    h = self.up[i_level].attn[i_block](h)\n            if i_level != 0:\n                h = self.up[i_level].upsample(h)\n\n        # end\n        if self.give_pre_end:\n            return h\n\n        h = self.norm_out(h)\n        h = nonlinearity(h)\n        h = self.conv_out(h)\n        if self.tanh_out:\n            h = torch.tanh(h)\n        return h\n\n\nclass SimpleDecoder(nn.Module):\n    def __init__(self, in_channels, out_channels, *args, **kwargs):\n        super().__init__()\n        self.model = nn.ModuleList([nn.Conv2d(in_channels, in_channels, 1),\n                                     ResnetBlock(in_channels=in_channels,\n                                                 out_channels=2 * in_channels,\n                                                 temb_channels=0, dropout=0.0),\n                                     ResnetBlock(in_channels=2 * in_channels,\n                                                out_channels=4 * in_channels,\n                                                temb_channels=0, dropout=0.0),\n                                     ResnetBlock(in_channels=4 * in_channels,\n                                                out_channels=2 * in_channels,\n                                                temb_channels=0, dropout=0.0),\n                                     nn.Conv2d(2*in_channels, in_channels, 1),\n                                     Upsample(in_channels, with_conv=True)])\n        # end\n        self.norm_out = Normalize(in_channels)\n        self.conv_out = torch.nn.Conv2d(in_channels,\n                                        out_channels,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, x):\n        for i, layer in enumerate(self.model):\n            if i in [1,2,3]:\n                x = layer(x, None)\n            else:\n                x = layer(x)\n\n        h = self.norm_out(x)\n        h = nonlinearity(h)\n        x = self.conv_out(h)\n        return x\n\n\nclass UpsampleDecoder(nn.Module):\n    def __init__(self, in_channels, out_channels, ch, num_res_blocks, resolution,\n                 ch_mult=(2,2), dropout=0.0):\n        super().__init__()\n        # upsampling\n        self.temb_ch = 0\n        self.num_resolutions = len(ch_mult)\n        self.num_res_blocks = num_res_blocks\n        block_in = in_channels\n        curr_res = resolution // 2 ** (self.num_resolutions - 1)\n        self.res_blocks = nn.ModuleList()\n        self.upsample_blocks = nn.ModuleList()\n        for i_level in range(self.num_resolutions):\n            res_block = []\n            block_out = ch * ch_mult[i_level]\n            for i_block in range(self.num_res_blocks + 1):\n                res_block.append(ResnetBlock(in_channels=block_in,\n                                         out_channels=block_out,\n                                         temb_channels=self.temb_ch,\n                                         dropout=dropout))\n                block_in = block_out\n            self.res_blocks.append(nn.ModuleList(res_block))\n            if i_level != self.num_resolutions - 1:\n                self.upsample_blocks.append(Upsample(block_in, True))\n                curr_res = curr_res * 2\n\n        # end\n        self.norm_out = Normalize(block_in)\n        self.conv_out = torch.nn.Conv2d(block_in,\n                                        out_channels,\n                                        kernel_size=3,\n                                        stride=1,\n                                        padding=1)\n\n    def forward(self, x):\n        # upsampling\n        h = x\n        for k, i_level in enumerate(range(self.num_resolutions)):\n            for i_block in range(self.num_res_blocks + 1):\n                h = self.res_blocks[i_level][i_block](h, None)\n            if i_level != self.num_resolutions - 1:\n                h = self.upsample_blocks[k](h)\n        h = self.norm_out(h)\n        h = nonlinearity(h)\n        h = self.conv_out(h)\n        return h\n\n\nclass LatentRescaler(nn.Module):\n    def __init__(self, factor, in_channels, mid_channels, out_channels, depth=2):\n        super().__init__()\n        # residual block, interpolate, residual block\n        self.factor = factor\n        self.conv_in = nn.Conv2d(in_channels,\n                                 mid_channels,\n                                 kernel_size=3,\n                                 stride=1,\n                                 padding=1)\n        self.res_block1 = nn.ModuleList([ResnetBlock(in_channels=mid_channels,\n                                                     out_channels=mid_channels,\n                                                     temb_channels=0,\n                                                     dropout=0.0) for _ in range(depth)])\n        self.attn = AttnBlock(mid_channels)\n        self.res_block2 = nn.ModuleList([ResnetBlock(in_channels=mid_channels,\n                                                     out_channels=mid_channels,\n                                                     temb_channels=0,\n                                                     dropout=0.0) for _ in range(depth)])\n\n        self.conv_out = nn.Conv2d(mid_channels,\n                                  out_channels,\n                                  kernel_size=1,\n                                  )\n\n    def forward(self, x):\n        x = self.conv_in(x)\n        for block in self.res_block1:\n            x = block(x, None)\n        x = torch.nn.functional.interpolate(x, size=(int(round(x.shape[2]*self.factor)), int(round(x.shape[3]*self.factor))))\n        x = self.attn(x)\n        for block in self.res_block2:\n            x = block(x, None)\n        x = self.conv_out(x)\n        return x\n\n\nclass MergedRescaleEncoder(nn.Module):\n    def __init__(self, in_channels, ch, resolution, out_ch, num_res_blocks,\n                 attn_resolutions, dropout=0.0, resamp_with_conv=True,\n                 ch_mult=(1,2,4,8), rescale_factor=1.0, rescale_module_depth=1):\n        super().__init__()\n        intermediate_chn = ch * ch_mult[-1]\n        self.encoder = Encoder(in_channels=in_channels, num_res_blocks=num_res_blocks, ch=ch, ch_mult=ch_mult,\n                               z_channels=intermediate_chn, double_z=False, resolution=resolution,\n                               attn_resolutions=attn_resolutions, dropout=dropout, resamp_with_conv=resamp_with_conv,\n                               out_ch=None)\n        self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=intermediate_chn,\n                                       mid_channels=intermediate_chn, out_channels=out_ch, depth=rescale_module_depth)\n\n    def forward(self, x):\n        x = self.encoder(x)\n        x = self.rescaler(x)\n        return x\n\n\nclass MergedRescaleDecoder(nn.Module):\n    def __init__(self, z_channels, out_ch, resolution, num_res_blocks, attn_resolutions, ch, ch_mult=(1,2,4,8),\n                 dropout=0.0, resamp_with_conv=True, rescale_factor=1.0, rescale_module_depth=1):\n        super().__init__()\n        tmp_chn = z_channels*ch_mult[-1]\n        self.decoder = Decoder(out_ch=out_ch, z_channels=tmp_chn, attn_resolutions=attn_resolutions, dropout=dropout,\n                               resamp_with_conv=resamp_with_conv, in_channels=None, num_res_blocks=num_res_blocks,\n                               ch_mult=ch_mult, resolution=resolution, ch=ch)\n        self.rescaler = LatentRescaler(factor=rescale_factor, in_channels=z_channels, mid_channels=tmp_chn,\n                                       out_channels=tmp_chn, depth=rescale_module_depth)\n\n    def forward(self, x):\n        x = self.rescaler(x)\n        x = self.decoder(x)\n        return x\n\n\nclass Upsampler(nn.Module):\n    def __init__(self, in_size, out_size, in_channels, out_channels, ch_mult=2):\n        super().__init__()\n        assert out_size >= in_size\n        num_blocks = int(np.log2(out_size//in_size))+1\n        factor_up = 1.+ (out_size % in_size)\n        print(f\"Building {self.__class__.__name__} with in_size: {in_size} --> out_size {out_size} and factor {factor_up}\")\n        self.rescaler = LatentRescaler(factor=factor_up, in_channels=in_channels, mid_channels=2*in_channels,\n                                       out_channels=in_channels)\n        self.decoder = Decoder(out_ch=out_channels, resolution=out_size, z_channels=in_channels, num_res_blocks=2,\n                               attn_resolutions=[], in_channels=None, ch=in_channels,\n                               ch_mult=[ch_mult for _ in range(num_blocks)])\n\n    def forward(self, x):\n        x = self.rescaler(x)\n        x = self.decoder(x)\n        return x\n\n\nclass Resize(nn.Module):\n    def __init__(self, in_channels=None, learned=False, mode=\"bilinear\"):\n        super().__init__()\n        self.with_conv = learned\n        self.mode = mode\n        if self.with_conv:\n            print(f\"Note: {self.__class__.__name} uses learned downsampling and will ignore the fixed {mode} mode\")\n            raise NotImplementedError()\n            assert in_channels is not None\n            # no asymmetric padding in torch conv, must do it ourselves\n            self.conv = torch.nn.Conv2d(in_channels,\n                                        in_channels,\n                                        kernel_size=4,\n                                        stride=2,\n                                        padding=1)\n\n    def forward(self, x, scale_factor=1.0):\n        if scale_factor==1.0:\n            return x\n        else:\n            x = torch.nn.functional.interpolate(x, mode=self.mode, align_corners=False, scale_factor=scale_factor)\n        return x\n\nclass FirstStagePostProcessor(nn.Module):\n\n    def __init__(self, ch_mult:list, in_channels,\n                 pretrained_model:nn.Module=None,\n                 reshape=False,\n                 n_channels=None,\n                 dropout=0.,\n                 pretrained_config=None):\n        super().__init__()\n        if pretrained_config is None:\n            assert pretrained_model is not None, 'Either \"pretrained_model\" or \"pretrained_config\" must not be None'\n            self.pretrained_model = pretrained_model\n        else:\n            assert pretrained_config is not None, 'Either \"pretrained_model\" or \"pretrained_config\" must not be None'\n            self.instantiate_pretrained(pretrained_config)\n\n        self.do_reshape = reshape\n\n        if n_channels is None:\n            n_channels = self.pretrained_model.encoder.ch\n\n        self.proj_norm = Normalize(in_channels,num_groups=in_channels//2)\n        self.proj = nn.Conv2d(in_channels,n_channels,kernel_size=3,\n                            stride=1,padding=1)\n\n        blocks = []\n        downs = []\n        ch_in = n_channels\n        for m in ch_mult:\n            blocks.append(ResnetBlock(in_channels=ch_in,out_channels=m*n_channels,dropout=dropout))\n            ch_in = m * n_channels\n            downs.append(Downsample(ch_in, with_conv=False))\n\n        self.model = nn.ModuleList(blocks)\n        self.downsampler = nn.ModuleList(downs)\n\n\n    def instantiate_pretrained(self, config):\n        model = instantiate_from_config(config)\n        self.pretrained_model = model.eval()\n        # self.pretrained_model.train = False\n        for param in self.pretrained_model.parameters():\n            param.requires_grad = False\n\n\n    @torch.no_grad()\n    def encode_with_pretrained(self,x):\n        c = self.pretrained_model.encode(x)\n        if isinstance(c, DiagonalGaussianDistribution):\n            c = c.mode()\n        return  c\n\n    def forward(self,x):\n        z_fs = self.encode_with_pretrained(x)\n        z = self.proj_norm(z_fs)\n        z = self.proj(z)\n        z = nonlinearity(z)\n\n        for submodel, downmodel in zip(self.model,self.downsampler):\n            z = submodel(z,temb=None)\n            z = downmodel(z)\n\n        if self.do_reshape:\n            z = rearrange(z,'b c h w -> b (h w) c')\n        return z\n\n"
  },
  {
    "path": "ldm_exp/ldm/modules/diffusionmodules/openaimodel.py",
    "content": "from abc import abstractmethod\nfrom functools import partial\nimport math\nfrom typing import Iterable\n\nimport numpy as np\nimport torch as th\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom ldm.modules.diffusionmodules.util import (\n    checkpoint,\n    conv_nd,\n    linear,\n    avg_pool_nd,\n    zero_module,\n    normalization,\n    timestep_embedding,\n)\nfrom ldm.modules.attention import SpatialTransformer\n\n\n# dummy replace\ndef convert_module_to_f16(x):\n    pass\n\ndef convert_module_to_f32(x):\n    pass\n\n\n## go\nclass AttentionPool2d(nn.Module):\n    \"\"\"\n    Adapted from CLIP: https://github.com/openai/CLIP/blob/main/clip/model.py\n    \"\"\"\n\n    def __init__(\n        self,\n        spacial_dim: int,\n        embed_dim: int,\n        num_heads_channels: int,\n        output_dim: int = None,\n    ):\n        super().__init__()\n        self.positional_embedding = nn.Parameter(th.randn(embed_dim, spacial_dim ** 2 + 1) / embed_dim ** 0.5)\n        self.qkv_proj = conv_nd(1, embed_dim, 3 * embed_dim, 1)\n        self.c_proj = conv_nd(1, embed_dim, output_dim or embed_dim, 1)\n        self.num_heads = embed_dim // num_heads_channels\n        self.attention = QKVAttention(self.num_heads)\n\n    def forward(self, x):\n        b, c, *_spatial = x.shape\n        x = x.reshape(b, c, -1)  # NC(HW)\n        x = th.cat([x.mean(dim=-1, keepdim=True), x], dim=-1)  # NC(HW+1)\n        x = x + self.positional_embedding[None, :, :].to(x.dtype)  # NC(HW+1)\n        x = self.qkv_proj(x)\n        x = self.attention(x)\n        x = self.c_proj(x)\n        return x[:, :, 0]\n\n\nclass TimestepBlock(nn.Module):\n    \"\"\"\n    Any module where forward() takes timestep embeddings as a second argument.\n    \"\"\"\n\n    @abstractmethod\n    def forward(self, x, emb):\n        \"\"\"\n        Apply the module to `x` given `emb` timestep embeddings.\n        \"\"\"\n\n\nclass TimestepEmbedSequential(nn.Sequential, TimestepBlock):\n    \"\"\"\n    A sequential module that passes timestep embeddings to the children that\n    support it as an extra input.\n    \"\"\"\n\n    def forward(self, x, emb, context=None):\n        for layer in self:\n            if isinstance(layer, TimestepBlock):\n                x = layer(x, emb)\n            elif isinstance(layer, SpatialTransformer):\n                x = layer(x, context)\n            else:\n                x = layer(x)\n        return x\n\n\nclass Upsample(nn.Module):\n    \"\"\"\n    An upsampling layer with an optional convolution.\n    :param channels: channels in the inputs and outputs.\n    :param use_conv: a bool determining if a convolution is applied.\n    :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then\n                 upsampling occurs in the inner-two dimensions.\n    \"\"\"\n\n    def __init__(self, channels, use_conv, dims=2, out_channels=None, padding=1):\n        super().__init__()\n        self.channels = channels\n        self.out_channels = out_channels or channels\n        self.use_conv = use_conv\n        self.dims = dims\n        if use_conv:\n            self.conv = conv_nd(dims, self.channels, self.out_channels, 3, padding=padding)\n\n    def forward(self, x):\n        #assert x.shape[1] == self.channels\n        if self.dims == 3:\n            x = F.interpolate(\n                x, (x.shape[2], x.shape[3] * 2, x.shape[4] * 2), mode=\"nearest\"\n            )\n        else:\n            x = F.interpolate(x, scale_factor=2, mode=\"nearest\")\n        if self.use_conv:\n            x = self.conv(x)\n        return x\n\nclass TransposedUpsample(nn.Module):\n    'Learned 2x upsampling without padding'\n    def __init__(self, channels, out_channels=None, ks=5):\n        super().__init__()\n        self.channels = channels\n        self.out_channels = out_channels or channels\n\n        self.up = nn.ConvTranspose2d(self.channels,self.out_channels,kernel_size=ks,stride=2)\n\n    def forward(self,x):\n        return self.up(x)\n\n\nclass Downsample(nn.Module):\n    \"\"\"\n    A downsampling layer with an optional convolution.\n    :param channels: channels in the inputs and outputs.\n    :param use_conv: a bool determining if a convolution is applied.\n    :param dims: determines if the signal is 1D, 2D, or 3D. If 3D, then\n                 downsampling occurs in the inner-two dimensions.\n    \"\"\"\n\n    def __init__(self, channels, use_conv, dims=2, out_channels=None,padding=1):\n        super().__init__()\n        self.channels = channels\n        self.out_channels = out_channels or channels\n        self.use_conv = use_conv\n        self.dims = dims\n        stride = 2 if dims != 3 else (1, 2, 2)\n        if use_conv:\n            self.op = conv_nd(\n                dims, self.channels, self.out_channels, 3, stride=stride, padding=padding\n            )\n        else:\n            assert self.channels == self.out_channels\n            self.op = avg_pool_nd(dims, kernel_size=stride, stride=stride)\n\n    def forward(self, x):\n        #assert x.shape[1] == self.channels\n        return self.op(x)\n\n\nclass ResBlock(TimestepBlock):\n    \"\"\"\n    A residual block that can optionally change the number of channels.\n    :param channels: the number of input channels.\n    :param emb_channels: the number of timestep embedding channels.\n    :param dropout: the rate of dropout.\n    :param out_channels: if specified, the number of out channels.\n    :param use_conv: if True and out_channels is specified, use a spatial\n        convolution instead of a smaller 1x1 convolution to change the\n        channels in the skip connection.\n    :param dims: determines if the signal is 1D, 2D, or 3D.\n    :param use_checkpoint: if True, use gradient checkpointing on this module.\n    :param up: if True, use this block for upsampling.\n    :param down: if True, use this block for downsampling.\n    \"\"\"\n\n    def __init__(\n        self,\n        channels,\n        emb_channels,\n        dropout,\n        out_channels=None,\n        use_conv=False,\n        use_scale_shift_norm=False,\n        dims=2,\n        use_checkpoint=False,\n        up=False,\n        down=False,\n    ):\n        super().__init__()\n        self.channels = channels\n        self.emb_channels = emb_channels\n        self.dropout = dropout\n        self.out_channels = out_channels or channels\n        self.use_conv = use_conv\n        self.use_checkpoint = use_checkpoint\n        self.use_scale_shift_norm = use_scale_shift_norm\n\n        self.in_layers = nn.Sequential(\n            normalization(channels),\n            nn.SiLU(),\n            conv_nd(dims, channels, self.out_channels, 3, padding=1),\n        )\n\n        self.updown = up or down\n\n        if up:\n            self.h_upd = Upsample(channels, False, dims)\n            self.x_upd = Upsample(channels, False, dims)\n        elif down:\n            self.h_upd = Downsample(channels, False, dims)\n            self.x_upd = Downsample(channels, False, dims)\n        else:\n            self.h_upd = self.x_upd = nn.Identity()\n\n        self.emb_layers = nn.Sequential(\n            nn.SiLU(),\n            linear(\n                emb_channels,\n                2 * self.out_channels if use_scale_shift_norm else self.out_channels,\n            ),\n        )\n        self.out_layers = nn.Sequential(\n            normalization(self.out_channels),\n            nn.SiLU(),\n            nn.Dropout(p=dropout),\n            zero_module(\n                conv_nd(dims, self.out_channels, self.out_channels, 3, padding=1)\n            ),\n        )\n\n        if self.out_channels == channels:\n            self.skip_connection = nn.Identity()\n        elif use_conv:\n            self.skip_connection = conv_nd(\n                dims, channels, self.out_channels, 3, padding=1\n            )\n        else:\n            self.skip_connection = conv_nd(dims, channels, self.out_channels, 1)\n\n    def forward(self, x, emb):\n        \"\"\"\n        Apply the block to a Tensor, conditioned on a timestep embedding.\n        :param x: an [N x C x ...] Tensor of features.\n        :param emb: an [N x emb_channels] Tensor of timestep embeddings.\n        :return: an [N x C x ...] Tensor of outputs.\n        \"\"\"\n        return checkpoint(\n            self._forward, (x, emb), self.parameters(), self.use_checkpoint\n        )\n\n\n    def _forward(self, x, emb):\n        if self.updown:\n            in_rest, in_conv = self.in_layers[:-1], self.in_layers[-1]\n            h = in_rest(x)\n            h = self.h_upd(h)\n            x = self.x_upd(x)\n            h = in_conv(h)\n        else:\n            h = self.in_layers(x)\n        emb_out = self.emb_layers(emb).type(h.dtype)\n        while len(emb_out.shape) < len(h.shape):\n            emb_out = emb_out[..., None]\n        if self.use_scale_shift_norm:\n            out_norm, out_rest = self.out_layers[0], self.out_layers[1:]\n            scale, shift = th.chunk(emb_out, 2, dim=1)\n            h = out_norm(h) * (1 + scale) + shift\n            h = out_rest(h)\n        else:\n            h = h + emb_out\n            h = self.out_layers(h)\n        return self.skip_connection(x) + h\n\n\nclass AttentionBlock(nn.Module):\n    \"\"\"\n    An attention block that allows spatial positions to attend to each other.\n    Originally ported from here, but adapted to the N-d case.\n    https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/models/unet.py#L66.\n    \"\"\"\n\n    def __init__(\n        self,\n        channels,\n        num_heads=1,\n        num_head_channels=-1,\n        use_checkpoint=False,\n        use_new_attention_order=False,\n    ):\n        super().__init__()\n        self.channels = channels\n        if num_head_channels == -1:\n            self.num_heads = num_heads\n        else:\n            assert (\n                channels % num_head_channels == 0\n            ), f\"q,k,v channels {channels} is not divisible by num_head_channels {num_head_channels}\"\n            self.num_heads = channels // num_head_channels\n        self.use_checkpoint = use_checkpoint\n        self.norm = normalization(channels)\n        self.qkv = conv_nd(1, channels, channels * 3, 1)\n        if use_new_attention_order:\n            # split qkv before split heads\n            self.attention = QKVAttention(self.num_heads)\n        else:\n            # split heads before split qkv\n            self.attention = QKVAttentionLegacy(self.num_heads)\n\n        self.proj_out = zero_module(conv_nd(1, channels, channels, 1))\n\n    def forward(self, x):\n        return checkpoint(self._forward, (x,), self.parameters(), True)   # TODO: check checkpoint usage, is True # TODO: fix the .half call!!!\n        #return pt_checkpoint(self._forward, x)  # pytorch\n\n    def _forward(self, x):\n        b, c, *spatial = x.shape\n        x = x.reshape(b, c, -1)\n        qkv = self.qkv(self.norm(x))\n        h = self.attention(qkv)\n        h = self.proj_out(h)\n        return (x + h).reshape(b, c, *spatial)\n\n\ndef count_flops_attn(model, _x, y):\n    \"\"\"\n    A counter for the `thop` package to count the operations in an\n    attention operation.\n    Meant to be used like:\n        macs, params = thop.profile(\n            model,\n            inputs=(inputs, timestamps),\n            custom_ops={QKVAttention: QKVAttention.count_flops},\n        )\n    \"\"\"\n    b, c, *spatial = y[0].shape\n    num_spatial = int(np.prod(spatial))\n    # We perform two matmuls with the same number of ops.\n    # The first computes the weight matrix, the second computes\n    # the combination of the value vectors.\n    matmul_ops = 2 * b * (num_spatial ** 2) * c\n    model.total_ops += th.DoubleTensor([matmul_ops])\n\n\nclass QKVAttentionLegacy(nn.Module):\n    \"\"\"\n    A module which performs QKV attention. Matches legacy QKVAttention + input/ouput heads shaping\n    \"\"\"\n\n    def __init__(self, n_heads):\n        super().__init__()\n        self.n_heads = n_heads\n\n    def forward(self, qkv):\n        \"\"\"\n        Apply QKV attention.\n        :param qkv: an [N x (H * 3 * C) x T] tensor of Qs, Ks, and Vs.\n        :return: an [N x (H * C) x T] tensor after attention.\n        \"\"\"\n        bs, width, length = qkv.shape\n        assert width % (3 * self.n_heads) == 0\n        ch = width // (3 * self.n_heads)\n        q, k, v = qkv.reshape(bs * self.n_heads, ch * 3, length).split(ch, dim=1)\n        scale = 1 / math.sqrt(math.sqrt(ch))\n        weight = th.einsum(\n            \"bct,bcs->bts\", q * scale, k * scale\n        )  # More stable with f16 than dividing afterwards\n        weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)\n        a = th.einsum(\"bts,bcs->bct\", weight, v)\n        return a.reshape(bs, -1, length)\n\n    @staticmethod\n    def count_flops(model, _x, y):\n        return count_flops_attn(model, _x, y)\n\n\nclass QKVAttention(nn.Module):\n    \"\"\"\n    A module which performs QKV attention and splits in a different order.\n    \"\"\"\n\n    def __init__(self, n_heads):\n        super().__init__()\n        self.n_heads = n_heads\n\n    def forward(self, qkv):\n        \"\"\"\n        Apply QKV attention.\n        :param qkv: an [N x (3 * H * C) x T] tensor of Qs, Ks, and Vs.\n        :return: an [N x (H * C) x T] tensor after attention.\n        \"\"\"\n        bs, width, length = qkv.shape\n        assert width % (3 * self.n_heads) == 0\n        ch = width // (3 * self.n_heads)\n        q, k, v = qkv.chunk(3, dim=1)\n        scale = 1 / math.sqrt(math.sqrt(ch))\n        weight = th.einsum(\n            \"bct,bcs->bts\",\n            (q * scale).view(bs * self.n_heads, ch, length),\n            (k * scale).view(bs * self.n_heads, ch, length),\n        )  # More stable with f16 than dividing afterwards\n        weight = th.softmax(weight.float(), dim=-1).type(weight.dtype)\n        a = th.einsum(\"bts,bcs->bct\", weight, v.reshape(bs * self.n_heads, ch, length))\n        return a.reshape(bs, -1, length)\n\n    @staticmethod\n    def count_flops(model, _x, y):\n        return count_flops_attn(model, _x, y)\n\n\nclass UNetModel(nn.Module):\n    \"\"\"\n    The full UNet model with attention and timestep embedding.\n    :param in_channels: channels in the input Tensor.\n    :param model_channels: base channel count for the model.\n    :param out_channels: channels in the output Tensor.\n    :param num_res_blocks: number of residual blocks per downsample.\n    :param attention_resolutions: a collection of downsample rates at which\n        attention will take place. May be a set, list, or tuple.\n        For example, if this contains 4, then at 4x downsampling, attention\n        will be used.\n    :param dropout: the dropout probability.\n    :param channel_mult: channel multiplier for each level of the UNet.\n    :param conv_resample: if True, use learned convolutions for upsampling and\n        downsampling.\n    :param dims: determines if the signal is 1D, 2D, or 3D.\n    :param num_classes: if specified (as an int), then this model will be\n        class-conditional with `num_classes` classes.\n    :param use_checkpoint: use gradient checkpointing to reduce memory usage.\n    :param num_heads: the number of attention heads in each attention layer.\n    :param num_heads_channels: if specified, ignore num_heads and instead use\n                               a fixed channel width per attention head.\n    :param num_heads_upsample: works with num_heads to set a different number\n                               of heads for upsampling. Deprecated.\n    :param use_scale_shift_norm: use a FiLM-like conditioning mechanism.\n    :param resblock_updown: use residual blocks for up/downsampling.\n    :param use_new_attention_order: use a different attention pattern for potentially\n                                    increased efficiency.\n    \"\"\"\n\n    def __init__(\n        self,\n        image_size,\n        in_channels,\n        model_channels,\n        out_channels,\n        num_res_blocks,\n        attention_resolutions,\n        dropout=0,\n        channel_mult=(1, 2, 4, 8),\n        conv_resample=True,\n        dims=2,\n        num_classes=None,\n        use_checkpoint=False,\n        use_fp16=False,\n        num_heads=-1,\n        num_head_channels=-1,\n        num_heads_upsample=-1,\n        use_scale_shift_norm=False,\n        resblock_updown=False,\n        use_new_attention_order=False,\n        use_spatial_transformer=False,    # custom transformer support\n        transformer_depth=1,              # custom transformer support\n        context_dim=None,                 # custom transformer support\n        n_embed=None,                     # custom support for prediction of discrete ids into codebook of first stage vq model\n        legacy=True,\n    ):\n        super().__init__()\n        if use_spatial_transformer:\n            assert context_dim is not None, 'Fool!! You forgot to include the dimension of your cross-attention conditioning...'\n\n        if context_dim is not None:\n            assert use_spatial_transformer, 'Fool!! You forgot to use the spatial transformer for your cross-attention conditioning...'\n            from omegaconf.listconfig import ListConfig\n            if type(context_dim) == ListConfig:\n                context_dim = list(context_dim)\n\n        if num_heads_upsample == -1:\n            num_heads_upsample = num_heads\n\n        if num_heads == -1:\n            assert num_head_channels != -1, 'Either num_heads or num_head_channels has to be set'\n\n        if num_head_channels == -1:\n            assert num_heads != -1, 'Either num_heads or num_head_channels has to be set'\n\n        self.image_size = image_size\n        self.in_channels = in_channels\n        self.model_channels = model_channels\n        self.out_channels = out_channels\n        self.num_res_blocks = num_res_blocks\n        self.attention_resolutions = attention_resolutions\n        self.dropout = dropout\n        self.channel_mult = channel_mult\n        self.conv_resample = conv_resample\n        self.num_classes = num_classes\n        self.use_checkpoint = use_checkpoint\n        self.dtype = th.float16 if use_fp16 else th.float32\n        self.num_heads = num_heads\n        self.num_head_channels = num_head_channels\n        self.num_heads_upsample = num_heads_upsample\n        self.predict_codebook_ids = n_embed is not None\n\n        time_embed_dim = model_channels * 4\n        self.time_embed = nn.Sequential(\n            linear(model_channels, time_embed_dim),\n            nn.SiLU(),\n            linear(time_embed_dim, time_embed_dim),\n        )\n\n        if self.num_classes is not None:\n            self.label_emb = nn.Embedding(num_classes, time_embed_dim)\n\n        self.input_blocks = nn.ModuleList(\n            [\n                TimestepEmbedSequential(\n                    conv_nd(dims, in_channels, model_channels, 3, padding=1)\n                )\n            ]\n        )\n        self._feature_size = model_channels\n        input_block_chans = [model_channels]\n        ch = model_channels\n        ds = 1\n        for level, mult in enumerate(channel_mult):\n            for _ in range(num_res_blocks):\n                layers = [\n                    ResBlock(\n                        ch,\n                        time_embed_dim,\n                        dropout,\n                        out_channels=mult * model_channels,\n                        dims=dims,\n                        use_checkpoint=use_checkpoint,\n                        use_scale_shift_norm=use_scale_shift_norm,\n                    )\n                ]\n                ch = mult * model_channels\n                if ds in attention_resolutions:\n                    if num_head_channels == -1:\n                        dim_head = ch // num_heads\n                    else:\n                        num_heads = ch // num_head_channels\n                        dim_head = num_head_channels\n                    if legacy:\n                        #num_heads = 1\n                        dim_head = ch // num_heads if use_spatial_transformer else num_head_channels\n                    layers.append(\n                        AttentionBlock(\n                            ch,\n                            use_checkpoint=use_checkpoint,\n                            num_heads=num_heads,\n                            num_head_channels=dim_head,\n                            use_new_attention_order=use_new_attention_order,\n                        ) if not use_spatial_transformer else SpatialTransformer(\n                            ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim\n                        )\n                    )\n                self.input_blocks.append(TimestepEmbedSequential(*layers))\n                self._feature_size += ch\n                input_block_chans.append(ch)\n            if level != len(channel_mult) - 1:\n                out_ch = ch\n                self.input_blocks.append(\n                    TimestepEmbedSequential(\n                        ResBlock(\n                            ch,\n                            time_embed_dim,\n                            dropout,\n                            out_channels=out_ch,\n                            dims=dims,\n                            use_checkpoint=use_checkpoint,\n                            use_scale_shift_norm=use_scale_shift_norm,\n                            down=True,\n                        )\n                        if resblock_updown\n                        else Downsample(\n                            ch, conv_resample, dims=dims, out_channels=out_ch\n                        )\n                    )\n                )\n                ch = out_ch\n                input_block_chans.append(ch)\n                ds *= 2\n                self._feature_size += ch\n\n        if num_head_channels == -1:\n            dim_head = ch // num_heads\n        else:\n            num_heads = ch // num_head_channels\n            dim_head = num_head_channels\n        if legacy:\n            #num_heads = 1\n            dim_head = ch // num_heads if use_spatial_transformer else num_head_channels\n        self.middle_block = TimestepEmbedSequential(\n            ResBlock(\n                ch,\n                time_embed_dim,\n                dropout,\n                dims=dims,\n                use_checkpoint=use_checkpoint,\n                use_scale_shift_norm=use_scale_shift_norm,\n            ),\n            AttentionBlock(\n                ch,\n                use_checkpoint=use_checkpoint,\n                num_heads=num_heads,\n                num_head_channels=dim_head,\n                use_new_attention_order=use_new_attention_order,\n            ) if not use_spatial_transformer else SpatialTransformer(\n                            ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim\n                        ),\n            ResBlock(\n                ch,\n                time_embed_dim,\n                dropout,\n                dims=dims,\n                use_checkpoint=use_checkpoint,\n                use_scale_shift_norm=use_scale_shift_norm,\n            ),\n        )\n        self._feature_size += ch\n\n        self.output_blocks = nn.ModuleList([])\n        for level, mult in list(enumerate(channel_mult))[::-1]:\n            for i in range(num_res_blocks + 1):\n                ich = input_block_chans.pop()\n                layers = [\n                    ResBlock(\n                        ch + ich,\n                        time_embed_dim,\n                        dropout,\n                        out_channels=model_channels * mult,\n                        dims=dims,\n                        use_checkpoint=use_checkpoint,\n                        use_scale_shift_norm=use_scale_shift_norm,\n                    )\n                ]\n                ch = model_channels * mult\n                if ds in attention_resolutions:\n                    if num_head_channels == -1:\n                        dim_head = ch // num_heads\n                    else:\n                        num_heads = ch // num_head_channels\n                        dim_head = num_head_channels\n                    if legacy:\n                        #num_heads = 1\n                        dim_head = ch // num_heads if use_spatial_transformer else num_head_channels\n                    layers.append(\n                        AttentionBlock(\n                            ch,\n                            use_checkpoint=use_checkpoint,\n                            num_heads=num_heads_upsample,\n                            num_head_channels=dim_head,\n                            use_new_attention_order=use_new_attention_order,\n                        ) if not use_spatial_transformer else SpatialTransformer(\n                            ch, num_heads, dim_head, depth=transformer_depth, context_dim=context_dim\n                        )\n                    )\n                if level and i == num_res_blocks:\n                    out_ch = ch\n                    layers.append(\n                        ResBlock(\n                            ch,\n                            time_embed_dim,\n                            dropout,\n                            out_channels=out_ch,\n                            dims=dims,\n                            use_checkpoint=use_checkpoint,\n                            use_scale_shift_norm=use_scale_shift_norm,\n                            up=True,\n                        )\n                        if resblock_updown\n                        else Upsample(ch, conv_resample, dims=dims, out_channels=out_ch)\n                    )\n                    ds //= 2\n                self.output_blocks.append(TimestepEmbedSequential(*layers))\n                self._feature_size += ch\n\n        self.out = nn.Sequential(\n            normalization(ch),\n            nn.SiLU(),\n            zero_module(conv_nd(dims, model_channels, out_channels, 3, padding=1)),\n        )\n        if self.predict_codebook_ids:\n            self.id_predictor = nn.Sequential(\n            normalization(ch),\n            conv_nd(dims, model_channels, n_embed, 1),\n            #nn.LogSoftmax(dim=1)  # change to cross_entropy and produce non-normalized logits\n        )\n\n    def convert_to_fp16(self):\n        \"\"\"\n        Convert the torso of the model to float16.\n        \"\"\"\n        self.input_blocks.apply(convert_module_to_f16)\n        self.middle_block.apply(convert_module_to_f16)\n        self.output_blocks.apply(convert_module_to_f16)\n\n    def convert_to_fp32(self):\n        \"\"\"\n        Convert the torso of the model to float32.\n        \"\"\"\n        self.input_blocks.apply(convert_module_to_f32)\n        self.middle_block.apply(convert_module_to_f32)\n        self.output_blocks.apply(convert_module_to_f32)\n\n    def forward(self, x, timesteps=None, context=None, y=None,**kwargs):\n        \"\"\"\n        Apply the model to an input batch.\n        :param x: an [N x C x ...] Tensor of inputs.\n        :param timesteps: a 1-D batch of timesteps.\n        :param context: conditioning plugged in via crossattn\n        :param y: an [N] Tensor of labels, if class-conditional.\n        :return: an [N x C x ...] Tensor of outputs.\n        \"\"\"\n        assert (y is not None) == (\n            self.num_classes is not None\n        ), \"must specify y if and only if the model is class-conditional\"\n        hs = []\n        t_emb = timestep_embedding(timesteps, self.model_channels, repeat_only=False)\n        emb = self.time_embed(t_emb)\n\n        if self.num_classes is not None:\n            assert y.shape == (x.shape[0],)\n            emb = emb + self.label_emb(y)\n\n        h = x.type(self.dtype)\n        for module in self.input_blocks:\n            h = module(h, emb, context)\n            hs.append(h)\n        h = self.middle_block(h, emb, context)\n        for module in self.output_blocks:\n            h = th.cat([h, hs.pop()], dim=1)\n            h = module(h, emb, context)\n        h = h.type(x.dtype)\n        if self.predict_codebook_ids:\n            return self.id_predictor(h)\n        else:\n            return self.out(h)\n\n\nclass EncoderUNetModel(nn.Module):\n    \"\"\"\n    The half UNet model with attention and timestep embedding.\n    For usage, see UNet.\n    \"\"\"\n\n    def __init__(\n        self,\n        image_size,\n        in_channels,\n        model_channels,\n        out_channels,\n        num_res_blocks,\n        attention_resolutions,\n        dropout=0,\n        channel_mult=(1, 2, 4, 8),\n        conv_resample=True,\n        dims=2,\n        use_checkpoint=False,\n        use_fp16=False,\n        num_heads=1,\n        num_head_channels=-1,\n        num_heads_upsample=-1,\n        use_scale_shift_norm=False,\n        resblock_updown=False,\n        use_new_attention_order=False,\n        pool=\"adaptive\",\n        *args,\n        **kwargs\n    ):\n        super().__init__()\n\n        if num_heads_upsample == -1:\n            num_heads_upsample = num_heads\n\n        self.in_channels = in_channels\n        self.model_channels = model_channels\n        self.out_channels = out_channels\n        self.num_res_blocks = num_res_blocks\n        self.attention_resolutions = attention_resolutions\n        self.dropout = dropout\n        self.channel_mult = channel_mult\n        self.conv_resample = conv_resample\n        self.use_checkpoint = use_checkpoint\n        self.dtype = th.float16 if use_fp16 else th.float32\n        self.num_heads = num_heads\n        self.num_head_channels = num_head_channels\n        self.num_heads_upsample = num_heads_upsample\n\n        time_embed_dim = model_channels * 4\n        self.time_embed = nn.Sequential(\n            linear(model_channels, time_embed_dim),\n            nn.SiLU(),\n            linear(time_embed_dim, time_embed_dim),\n        )\n\n        self.input_blocks = nn.ModuleList(\n            [\n                TimestepEmbedSequential(\n                    conv_nd(dims, in_channels, model_channels, 3, padding=1)\n                )\n            ]\n        )\n        self._feature_size = model_channels\n        input_block_chans = [model_channels]\n        ch = model_channels\n        ds = 1\n        for level, mult in enumerate(channel_mult):\n            for _ in range(num_res_blocks):\n                layers = [\n                    ResBlock(\n                        ch,\n                        time_embed_dim,\n                        dropout,\n                        out_channels=mult * model_channels,\n                        dims=dims,\n                        use_checkpoint=use_checkpoint,\n                        use_scale_shift_norm=use_scale_shift_norm,\n                    )\n                ]\n                ch = mult * model_channels\n                if ds in attention_resolutions:\n                    layers.append(\n                        AttentionBlock(\n                            ch,\n                            use_checkpoint=use_checkpoint,\n                            num_heads=num_heads,\n                            num_head_channels=num_head_channels,\n                            use_new_attention_order=use_new_attention_order,\n                        )\n                    )\n                self.input_blocks.append(TimestepEmbedSequential(*layers))\n                self._feature_size += ch\n                input_block_chans.append(ch)\n            if level != len(channel_mult) - 1:\n                out_ch = ch\n                self.input_blocks.append(\n                    TimestepEmbedSequential(\n                        ResBlock(\n                            ch,\n                            time_embed_dim,\n                            dropout,\n                            out_channels=out_ch,\n                            dims=dims,\n                            use_checkpoint=use_checkpoint,\n                            use_scale_shift_norm=use_scale_shift_norm,\n                            down=True,\n                        )\n                        if resblock_updown\n                        else Downsample(\n                            ch, conv_resample, dims=dims, out_channels=out_ch\n                        )\n                    )\n                )\n                ch = out_ch\n                input_block_chans.append(ch)\n                ds *= 2\n                self._feature_size += ch\n\n        self.middle_block = TimestepEmbedSequential(\n            ResBlock(\n                ch,\n                time_embed_dim,\n                dropout,\n                dims=dims,\n                use_checkpoint=use_checkpoint,\n                use_scale_shift_norm=use_scale_shift_norm,\n            ),\n            AttentionBlock(\n                ch,\n                use_checkpoint=use_checkpoint,\n                num_heads=num_heads,\n                num_head_channels=num_head_channels,\n                use_new_attention_order=use_new_attention_order,\n            ),\n            ResBlock(\n                ch,\n                time_embed_dim,\n                dropout,\n                dims=dims,\n                use_checkpoint=use_checkpoint,\n                use_scale_shift_norm=use_scale_shift_norm,\n            ),\n        )\n        self._feature_size += ch\n        self.pool = pool\n        if pool == \"adaptive\":\n            self.out = nn.Sequential(\n                normalization(ch),\n                nn.SiLU(),\n                nn.AdaptiveAvgPool2d((1, 1)),\n                zero_module(conv_nd(dims, ch, out_channels, 1)),\n                nn.Flatten(),\n            )\n        elif pool == \"attention\":\n            assert num_head_channels != -1\n            self.out = nn.Sequential(\n                normalization(ch),\n                nn.SiLU(),\n                AttentionPool2d(\n                    (image_size // ds), ch, num_head_channels, out_channels\n                ),\n            )\n        elif pool == \"spatial\":\n            self.out = nn.Sequential(\n                nn.Linear(self._feature_size, 2048),\n                nn.ReLU(),\n                nn.Linear(2048, self.out_channels),\n            )\n        elif pool == \"spatial_v2\":\n            self.out = nn.Sequential(\n                nn.Linear(self._feature_size, 2048),\n                normalization(2048),\n                nn.SiLU(),\n                nn.Linear(2048, self.out_channels),\n            )\n        else:\n            raise NotImplementedError(f\"Unexpected {pool} pooling\")\n\n    def convert_to_fp16(self):\n        \"\"\"\n        Convert the torso of the model to float16.\n        \"\"\"\n        self.input_blocks.apply(convert_module_to_f16)\n        self.middle_block.apply(convert_module_to_f16)\n\n    def convert_to_fp32(self):\n        \"\"\"\n        Convert the torso of the model to float32.\n        \"\"\"\n        self.input_blocks.apply(convert_module_to_f32)\n        self.middle_block.apply(convert_module_to_f32)\n\n    def forward(self, x, timesteps):\n        \"\"\"\n        Apply the model to an input batch.\n        :param x: an [N x C x ...] Tensor of inputs.\n        :param timesteps: a 1-D batch of timesteps.\n        :return: an [N x K] Tensor of outputs.\n        \"\"\"\n        emb = self.time_embed(timestep_embedding(timesteps, self.model_channels))\n\n        results = []\n        h = x.type(self.dtype)\n        for module in self.input_blocks:\n            h = module(h, emb)\n            if self.pool.startswith(\"spatial\"):\n                results.append(h.type(x.dtype).mean(dim=(2, 3)))\n        h = self.middle_block(h, emb)\n        if self.pool.startswith(\"spatial\"):\n            results.append(h.type(x.dtype).mean(dim=(2, 3)))\n            h = th.cat(results, axis=-1)\n            return self.out(h)\n        else:\n            h = h.type(x.dtype)\n            return self.out(h)\n\n"
  },
  {
    "path": "ldm_exp/ldm/modules/diffusionmodules/util.py",
    "content": "# adopted from\n# https://github.com/openai/improved-diffusion/blob/main/improved_diffusion/gaussian_diffusion.py\n# and\n# https://github.com/lucidrains/denoising-diffusion-pytorch/blob/7706bdfc6f527f58d33f84b7b522e61e6e3164b3/denoising_diffusion_pytorch/denoising_diffusion_pytorch.py\n# and\n# https://github.com/openai/guided-diffusion/blob/0ba878e517b276c45d1195eb29f6f5f72659a05b/guided_diffusion/nn.py\n#\n# thanks!\n\n\nimport os\nimport math\nimport torch\nimport torch.nn as nn\nimport numpy as np\nfrom einops import repeat\n\nfrom ldm.util import instantiate_from_config\n\n\ndef make_beta_schedule(schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3):\n    if schedule == \"linear\":\n        betas = (\n                torch.linspace(linear_start ** 0.5, linear_end ** 0.5, n_timestep, dtype=torch.float64) ** 2\n        )\n\n    elif schedule == \"cosine\":\n        timesteps = (\n                torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s\n        )\n        alphas = timesteps / (1 + cosine_s) * np.pi / 2\n        alphas = torch.cos(alphas).pow(2)\n        alphas = alphas / alphas[0]\n        betas = 1 - alphas[1:] / alphas[:-1]\n        betas = np.clip(betas, a_min=0, a_max=0.999)\n\n    elif schedule == \"sqrt_linear\":\n        betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64)\n    elif schedule == \"sqrt\":\n        betas = torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) ** 0.5\n    else:\n        raise ValueError(f\"schedule '{schedule}' unknown.\")\n    return betas.numpy()\n\n\ndef make_ddim_timesteps(ddim_discr_method, num_ddim_timesteps, num_ddpm_timesteps, verbose=True):\n    if ddim_discr_method == 'uniform':\n        c = num_ddpm_timesteps // num_ddim_timesteps\n        ddim_timesteps = np.asarray(list(range(0, num_ddpm_timesteps, c)))\n    elif ddim_discr_method == 'quad':\n        ddim_timesteps = ((np.linspace(0, np.sqrt(num_ddpm_timesteps * .8), num_ddim_timesteps)) ** 2).astype(int)\n    else:\n        raise NotImplementedError(f'There is no ddim discretization method called \"{ddim_discr_method}\"')\n\n    # assert ddim_timesteps.shape[0] == num_ddim_timesteps\n    # add one to get the final alpha values right (the ones from first scale to data during sampling)\n    steps_out = ddim_timesteps + 1\n    if verbose:\n        print(f'Selected timesteps for ddim sampler: {steps_out}')\n    return steps_out\n\n\ndef make_ddim_sampling_parameters(alphacums, ddim_timesteps, eta, verbose=True):\n    # select alphas for computing the variance schedule\n    alphas = alphacums[ddim_timesteps]\n    alphas_prev = np.asarray([alphacums[0]] + alphacums[ddim_timesteps[:-1]].tolist())\n\n    # according the the formula provided in https://arxiv.org/abs/2010.02502\n    sigmas = eta * np.sqrt((1 - alphas_prev) / (1 - alphas) * (1 - alphas / alphas_prev))\n    if verbose:\n        print(f'Selected alphas for ddim sampler: a_t: {alphas}; a_(t-1): {alphas_prev}')\n        print(f'For the chosen value of eta, which is {eta}, '\n              f'this results in the following sigma_t schedule for ddim sampler {sigmas}')\n    return sigmas, alphas, alphas_prev\n\n\ndef betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999):\n    \"\"\"\n    Create a beta schedule that discretizes the given alpha_t_bar function,\n    which defines the cumulative product of (1-beta) over time from t = [0,1].\n    :param num_diffusion_timesteps: the number of betas to produce.\n    :param alpha_bar: a lambda that takes an argument t from 0 to 1 and\n                      produces the cumulative product of (1-beta) up to that\n                      part of the diffusion process.\n    :param max_beta: the maximum beta to use; use values lower than 1 to\n                     prevent singularities.\n    \"\"\"\n    betas = []\n    for i in range(num_diffusion_timesteps):\n        t1 = i / num_diffusion_timesteps\n        t2 = (i + 1) / num_diffusion_timesteps\n        betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta))\n    return np.array(betas)\n\n\ndef extract_into_tensor(a, t, x_shape):\n    b, *_ = t.shape\n    out = a.gather(-1, t)\n    return out.reshape(b, *((1,) * (len(x_shape) - 1)))\n\n\ndef checkpoint(func, inputs, params, flag):\n    \"\"\"\n    Evaluate a function without caching intermediate activations, allowing for\n    reduced memory at the expense of extra compute in the backward pass.\n    :param func: the function to evaluate.\n    :param inputs: the argument sequence to pass to `func`.\n    :param params: a sequence of parameters `func` depends on but does not\n                   explicitly take as arguments.\n    :param flag: if False, disable gradient checkpointing.\n    \"\"\"\n    if flag:\n        args = tuple(inputs) + tuple(params)\n        return CheckpointFunction.apply(func, len(inputs), *args)\n    else:\n        return func(*inputs)\n\n\nclass CheckpointFunction(torch.autograd.Function):\n    @staticmethod\n    def forward(ctx, run_function, length, *args):\n        ctx.run_function = run_function\n        ctx.input_tensors = list(args[:length])\n        ctx.input_params = list(args[length:])\n\n        with torch.no_grad():\n            output_tensors = ctx.run_function(*ctx.input_tensors)\n        return output_tensors\n\n    @staticmethod\n    def backward(ctx, *output_grads):\n        ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors]\n        with torch.enable_grad():\n            # Fixes a bug where the first op in run_function modifies the\n            # Tensor storage in place, which is not allowed for detach()'d\n            # Tensors.\n            shallow_copies = [x.view_as(x) for x in ctx.input_tensors]\n            output_tensors = ctx.run_function(*shallow_copies)\n        input_grads = torch.autograd.grad(\n            output_tensors,\n            ctx.input_tensors + ctx.input_params,\n            output_grads,\n            allow_unused=True,\n        )\n        del ctx.input_tensors\n        del ctx.input_params\n        del output_tensors\n        return (None, None) + input_grads\n\n\ndef timestep_embedding(timesteps, dim, max_period=10000, repeat_only=False):\n    \"\"\"\n    Create sinusoidal timestep embeddings.\n    :param timesteps: a 1-D Tensor of N indices, one per batch element.\n                      These may be fractional.\n    :param dim: the dimension of the output.\n    :param max_period: controls the minimum frequency of the embeddings.\n    :return: an [N x dim] Tensor of positional embeddings.\n    \"\"\"\n    if not repeat_only:\n        half = dim // 2\n        freqs = torch.exp(\n            -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half\n        ).to(device=timesteps.device)\n        args = timesteps[:, None].float() * freqs[None]\n        embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)\n        if dim % 2:\n            embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1)\n    else:\n        embedding = repeat(timesteps, 'b -> b d', d=dim)\n    return embedding\n\n\ndef zero_module(module):\n    \"\"\"\n    Zero out the parameters of a module and return it.\n    \"\"\"\n    for p in module.parameters():\n        p.detach().zero_()\n    return module\n\n\ndef scale_module(module, scale):\n    \"\"\"\n    Scale the parameters of a module and return it.\n    \"\"\"\n    for p in module.parameters():\n        p.detach().mul_(scale)\n    return module\n\n\ndef mean_flat(tensor):\n    \"\"\"\n    Take the mean over all non-batch dimensions.\n    \"\"\"\n    return tensor.mean(dim=list(range(1, len(tensor.shape))))\n\n\ndef normalization(channels):\n    \"\"\"\n    Make a standard normalization layer.\n    :param channels: number of input channels.\n    :return: an nn.Module for normalization.\n    \"\"\"\n    return GroupNorm32(32, channels)\n\n\n# PyTorch 1.7 has SiLU, but we support PyTorch 1.5.\nclass SiLU(nn.Module):\n    def forward(self, x):\n        return x * torch.sigmoid(x)\n\n\nclass GroupNorm32(nn.GroupNorm):\n    def forward(self, x):\n        return super().forward(x.float()).type(x.dtype)\n\ndef conv_nd(dims, *args, **kwargs):\n    \"\"\"\n    Create a 1D, 2D, or 3D convolution module.\n    \"\"\"\n    if dims == 1:\n        return nn.Conv1d(*args, **kwargs)\n    elif dims == 2:\n        return nn.Conv2d(*args, **kwargs)\n    elif dims == 3:\n        return nn.Conv3d(*args, **kwargs)\n    raise ValueError(f\"unsupported dimensions: {dims}\")\n\n\ndef linear(*args, **kwargs):\n    \"\"\"\n    Create a linear module.\n    \"\"\"\n    return nn.Linear(*args, **kwargs)\n\n\ndef avg_pool_nd(dims, *args, **kwargs):\n    \"\"\"\n    Create a 1D, 2D, or 3D average pooling module.\n    \"\"\"\n    if dims == 1:\n        return nn.AvgPool1d(*args, **kwargs)\n    elif dims == 2:\n        return nn.AvgPool2d(*args, **kwargs)\n    elif dims == 3:\n        return nn.AvgPool3d(*args, **kwargs)\n    raise ValueError(f\"unsupported dimensions: {dims}\")\n\n\nclass HybridConditioner(nn.Module):\n\n    def __init__(self, c_concat_config, c_crossattn_config):\n        super().__init__()\n        self.concat_conditioner = instantiate_from_config(c_concat_config)\n        self.crossattn_conditioner = instantiate_from_config(c_crossattn_config)\n\n    def forward(self, c_concat, c_crossattn):\n        c_concat = self.concat_conditioner(c_concat)\n        c_crossattn = self.crossattn_conditioner(c_crossattn)\n        return {'c_concat': [c_concat], 'c_crossattn': [c_crossattn]}\n\n\ndef noise_like(shape, device, repeat=False):\n    repeat_noise = lambda: torch.randn((1, *shape[1:]), device=device).repeat(shape[0], *((1,) * (len(shape) - 1)))\n    noise = lambda: torch.randn(shape, device=device)\n    return repeat_noise() if repeat else noise()"
  },
  {
    "path": "ldm_exp/ldm/modules/distributions/__init__.py",
    "content": ""
  },
  {
    "path": "ldm_exp/ldm/modules/distributions/distributions.py",
    "content": "import torch\nimport numpy as np\n\n\nclass AbstractDistribution:\n    def sample(self):\n        raise NotImplementedError()\n\n    def mode(self):\n        raise NotImplementedError()\n\n\nclass DiracDistribution(AbstractDistribution):\n    def __init__(self, value):\n        self.value = value\n\n    def sample(self):\n        return self.value\n\n    def mode(self):\n        return self.value\n\n\nclass DiagonalGaussianDistribution(object):\n    def __init__(self, parameters, deterministic=False):\n        self.parameters = parameters\n        self.mean, self.logvar = torch.chunk(parameters, 2, dim=1)\n        self.logvar = torch.clamp(self.logvar, -30.0, 20.0)\n        self.deterministic = deterministic\n        self.std = torch.exp(0.5 * self.logvar)\n        self.var = torch.exp(self.logvar)\n        if self.deterministic:\n            self.var = self.std = torch.zeros_like(self.mean).to(device=self.parameters.device)\n\n    def sample(self):\n        x = self.mean + self.std * torch.randn(self.mean.shape).to(device=self.parameters.device)\n        return x\n\n    def kl(self, other=None):\n        if self.deterministic:\n            return torch.Tensor([0.])\n        else:\n            if other is None:\n                return 0.5 * torch.sum(torch.pow(self.mean, 2)\n                                       + self.var - 1.0 - self.logvar,\n                                       dim=[1, 2, 3])\n            else:\n                return 0.5 * torch.sum(\n                    torch.pow(self.mean - other.mean, 2) / other.var\n                    + self.var / other.var - 1.0 - self.logvar + other.logvar,\n                    dim=[1, 2, 3])\n\n    def nll(self, sample, dims=[1,2,3]):\n        if self.deterministic:\n            return torch.Tensor([0.])\n        logtwopi = np.log(2.0 * np.pi)\n        return 0.5 * torch.sum(\n            logtwopi + self.logvar + torch.pow(sample - self.mean, 2) / self.var,\n            dim=dims)\n\n    def mode(self):\n        return self.mean\n\n\ndef normal_kl(mean1, logvar1, mean2, logvar2):\n    \"\"\"\n    source: https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/losses.py#L12\n    Compute the KL divergence between two gaussians.\n    Shapes are automatically broadcasted, so batches can be compared to\n    scalars, among other use cases.\n    \"\"\"\n    tensor = None\n    for obj in (mean1, logvar1, mean2, logvar2):\n        if isinstance(obj, torch.Tensor):\n            tensor = obj\n            break\n    assert tensor is not None, \"at least one argument must be a Tensor\"\n\n    # Force variances to be Tensors. Broadcasting helps convert scalars to\n    # Tensors, but it does not work for torch.exp().\n    logvar1, logvar2 = [\n        x if isinstance(x, torch.Tensor) else torch.tensor(x).to(tensor)\n        for x in (logvar1, logvar2)\n    ]\n\n    return 0.5 * (\n        -1.0\n        + logvar2\n        - logvar1\n        + torch.exp(logvar1 - logvar2)\n        + ((mean1 - mean2) ** 2) * torch.exp(-logvar2)\n    )\n"
  },
  {
    "path": "ldm_exp/ldm/modules/ema.py",
    "content": "import torch\nfrom torch import nn\n\n\nclass LitEma(nn.Module):\n    def __init__(self, model, decay=0.9999, use_num_upates=True):\n        super().__init__()\n        if decay < 0.0 or decay > 1.0:\n            raise ValueError('Decay must be between 0 and 1')\n\n        self.m_name2s_name = {}\n        self.register_buffer('decay', torch.tensor(decay, dtype=torch.float32))\n        self.register_buffer('num_updates', torch.tensor(0,dtype=torch.int) if use_num_upates\n                             else torch.tensor(-1,dtype=torch.int))\n\n        for name, p in model.named_parameters():\n            if p.requires_grad:\n                #remove as '.'-character is not allowed in buffers\n                s_name = name.replace('.','')\n                self.m_name2s_name.update({name:s_name})\n                self.register_buffer(s_name,p.clone().detach().data)\n\n        self.collected_params = []\n\n    def forward(self,model):\n        decay = self.decay\n\n        if self.num_updates >= 0:\n            self.num_updates += 1\n            decay = min(self.decay,(1 + self.num_updates) / (10 + self.num_updates))\n\n        one_minus_decay = 1.0 - decay\n\n        with torch.no_grad():\n            m_param = dict(model.named_parameters())\n            shadow_params = dict(self.named_buffers())\n\n            for key in m_param:\n                if m_param[key].requires_grad:\n                    sname = self.m_name2s_name[key]\n                    shadow_params[sname] = shadow_params[sname].type_as(m_param[key])\n                    shadow_params[sname].sub_(one_minus_decay * (shadow_params[sname] - m_param[key]))\n                else:\n                    assert not key in self.m_name2s_name\n\n    def copy_to(self, model):\n        m_param = dict(model.named_parameters())\n        shadow_params = dict(self.named_buffers())\n        for key in m_param:\n            if m_param[key].requires_grad:\n                m_param[key].data.copy_(shadow_params[self.m_name2s_name[key]].data)\n            else:\n                assert not key in self.m_name2s_name\n\n    def store(self, parameters):\n        \"\"\"\n        Save the current parameters for restoring later.\n        Args:\n          parameters: Iterable of `torch.nn.Parameter`; the parameters to be\n            temporarily stored.\n        \"\"\"\n        self.collected_params = [param.clone() for param in parameters]\n\n    def restore(self, parameters):\n        \"\"\"\n        Restore the parameters stored with the `store` method.\n        Useful to validate the model with EMA parameters without affecting the\n        original optimization process. Store the parameters before the\n        `copy_to` method. After validation (or model saving), use this to\n        restore the former parameters.\n        Args:\n          parameters: Iterable of `torch.nn.Parameter`; the parameters to be\n            updated with the stored parameters.\n        \"\"\"\n        for c_param, param in zip(self.collected_params, parameters):\n            param.data.copy_(c_param.data)\n"
  },
  {
    "path": "ldm_exp/ldm/modules/encoders/__init__.py",
    "content": ""
  },
  {
    "path": "ldm_exp/ldm/modules/encoders/modules.py",
    "content": "import torch\nimport torch.nn as nn\nfrom functools import partial\nimport clip\nfrom einops import rearrange, repeat\nimport kornia\n\n\nfrom ldm.modules.x_transformer import Encoder, TransformerWrapper  # TODO: can we directly rely on lucidrains code and simply add this as a reuirement? --> test\n\n\nclass AbstractEncoder(nn.Module):\n    def __init__(self):\n        super().__init__()\n\n    def encode(self, *args, **kwargs):\n        raise NotImplementedError\n\n\n\nclass ClassEmbedder(nn.Module):\n    def __init__(self, embed_dim, n_classes=1000, key='class'):\n        super().__init__()\n        self.key = key\n        self.embedding = nn.Embedding(n_classes, embed_dim)\n\n    def forward(self, batch, key=None):\n        if key is None:\n            key = self.key\n        # this is for use in crossattn\n        c = batch[key][:, None]\n        c = self.embedding(c)\n        return c\n\n\nclass TransformerEmbedder(AbstractEncoder):\n    \"\"\"Some transformer encoder layers\"\"\"\n    def __init__(self, n_embed, n_layer, vocab_size, max_seq_len=77, device=\"cuda\"):\n        super().__init__()\n        self.device = device\n        self.transformer = TransformerWrapper(num_tokens=vocab_size, max_seq_len=max_seq_len,\n                                              attn_layers=Encoder(dim=n_embed, depth=n_layer))\n\n    def forward(self, tokens):\n        tokens = tokens.to(self.device)  # meh\n        z = self.transformer(tokens, return_embeddings=True)\n        return z\n\n    def encode(self, x):\n        return self(x)\n\n\nclass BERTTokenizer(AbstractEncoder):\n    \"\"\" Uses a pretrained BERT tokenizer by huggingface. Vocab size: 30522 (?)\"\"\"\n    def __init__(self, device=\"cuda\", vq_interface=True, max_length=77):\n        super().__init__()\n        from transformers import BertTokenizerFast  # TODO: add to reuquirements\n        self.tokenizer = BertTokenizerFast.from_pretrained(\"bert-base-uncased\")\n        self.device = device\n        self.vq_interface = vq_interface\n        self.max_length = max_length\n\n    def forward(self, text):\n        batch_encoding = self.tokenizer(text, truncation=True, max_length=self.max_length, return_length=True,\n                                        return_overflowing_tokens=False, padding=\"max_length\", return_tensors=\"pt\")\n        tokens = batch_encoding[\"input_ids\"].to(self.device)\n        return tokens\n\n    @torch.no_grad()\n    def encode(self, text):\n        tokens = self(text)\n        if not self.vq_interface:\n            return tokens\n        return None, None, [None, None, tokens]\n\n    def decode(self, text):\n        return text\n\n\nclass BERTEmbedder(AbstractEncoder):\n    \"\"\"Uses the BERT tokenizr model and add some transformer encoder layers\"\"\"\n    def __init__(self, n_embed, n_layer, vocab_size=30522, max_seq_len=77,\n                 device=\"cuda\",use_tokenizer=True, embedding_dropout=0.0):\n        super().__init__()\n        self.use_tknz_fn = use_tokenizer\n        if self.use_tknz_fn:\n            self.tknz_fn = BERTTokenizer(vq_interface=False, max_length=max_seq_len)\n        self.device = device\n        self.transformer = TransformerWrapper(num_tokens=vocab_size, max_seq_len=max_seq_len,\n                                              attn_layers=Encoder(dim=n_embed, depth=n_layer),\n                                              emb_dropout=embedding_dropout)\n\n    def forward(self, text):\n        if self.use_tknz_fn:\n            tokens = self.tknz_fn(text)#.to(self.device)\n        else:\n            tokens = text\n        z = self.transformer(tokens, return_embeddings=True)\n        return z\n\n    def encode(self, text):\n        # output of length 77\n        return self(text)\n\n\nclass SpatialRescaler(nn.Module):\n    def __init__(self,\n                 n_stages=1,\n                 method='bilinear',\n                 multiplier=0.5,\n                 in_channels=3,\n                 out_channels=None,\n                 bias=False):\n        super().__init__()\n        self.n_stages = n_stages\n        assert self.n_stages >= 0\n        assert method in ['nearest','linear','bilinear','trilinear','bicubic','area']\n        self.multiplier = multiplier\n        self.interpolator = partial(torch.nn.functional.interpolate, mode=method)\n        self.remap_output = out_channels is not None\n        if self.remap_output:\n            print(f'Spatial Rescaler mapping from {in_channels} to {out_channels} channels after resizing.')\n            self.channel_mapper = nn.Conv2d(in_channels,out_channels,1,bias=bias)\n\n    def forward(self,x):\n        for stage in range(self.n_stages):\n            x = self.interpolator(x, scale_factor=self.multiplier)\n\n\n        if self.remap_output:\n            x = self.channel_mapper(x)\n        return x\n\n    def encode(self, x):\n        return self(x)\n\n\nclass FrozenCLIPTextEmbedder(nn.Module):\n    \"\"\"\n    Uses the CLIP transformer encoder for text.\n    \"\"\"\n    def __init__(self, version='ViT-L/14', device=\"cuda\", max_length=77, n_repeat=1, normalize=True):\n        super().__init__()\n        self.model, _ = clip.load(version, jit=False, device=\"cpu\")\n        self.device = device\n        self.max_length = max_length\n        self.n_repeat = n_repeat\n        self.normalize = normalize\n\n    def freeze(self):\n        self.model = self.model.eval()\n        for param in self.parameters():\n            param.requires_grad = False\n\n    def forward(self, text):\n        tokens = clip.tokenize(text).to(self.device)\n        z = self.model.encode_text(tokens)\n        if self.normalize:\n            z = z / torch.linalg.norm(z, dim=1, keepdim=True)\n        return z\n\n    def encode(self, text):\n        z = self(text)\n        if z.ndim==2:\n            z = z[:, None, :]\n        z = repeat(z, 'b 1 d -> b k d', k=self.n_repeat)\n        return z\n\n\nclass FrozenClipImageEmbedder(nn.Module):\n    \"\"\"\n        Uses the CLIP image encoder.\n        \"\"\"\n    def __init__(\n            self,\n            model,\n            jit=False,\n            device='cuda' if torch.cuda.is_available() else 'cpu',\n            antialias=False,\n        ):\n        super().__init__()\n        self.model, _ = clip.load(name=model, device=device, jit=jit)\n\n        self.antialias = antialias\n\n        self.register_buffer('mean', torch.Tensor([0.48145466, 0.4578275, 0.40821073]), persistent=False)\n        self.register_buffer('std', torch.Tensor([0.26862954, 0.26130258, 0.27577711]), persistent=False)\n\n    def preprocess(self, x):\n        # normalize to [0,1]\n        x = kornia.geometry.resize(x, (224, 224),\n                                   interpolation='bicubic',align_corners=True,\n                                   antialias=self.antialias)\n        x = (x + 1.) / 2.\n        # renormalize according to clip\n        x = kornia.enhance.normalize(x, self.mean, self.std)\n        return x\n\n    def forward(self, x):\n        # x is assumed to be in range [-1,1]\n        return self.model.encode_image(self.preprocess(x))\n\n"
  },
  {
    "path": "ldm_exp/ldm/modules/image_degradation/__init__.py",
    "content": "from ldm.modules.image_degradation.bsrgan import degradation_bsrgan_variant as degradation_fn_bsr\nfrom ldm.modules.image_degradation.bsrgan_light import degradation_bsrgan_variant as degradation_fn_bsr_light\n"
  },
  {
    "path": "ldm_exp/ldm/modules/image_degradation/bsrgan.py",
    "content": "# -*- coding: utf-8 -*-\n\"\"\"\n# --------------------------------------------\n# Super-Resolution\n# --------------------------------------------\n#\n# Kai Zhang (cskaizhang@gmail.com)\n# https://github.com/cszn\n# From 2019/03--2021/08\n# --------------------------------------------\n\"\"\"\n\nimport numpy as np\nimport cv2\nimport torch\n\nfrom functools import partial\nimport random\nfrom scipy import ndimage\nimport scipy\nimport scipy.stats as ss\nfrom scipy.interpolate import interp2d\nfrom scipy.linalg import orth\nimport albumentations\n\nimport ldm.modules.image_degradation.utils_image as util\n\n\ndef modcrop_np(img, sf):\n    '''\n    Args:\n        img: numpy image, WxH or WxHxC\n        sf: scale factor\n    Return:\n        cropped image\n    '''\n    w, h = img.shape[:2]\n    im = np.copy(img)\n    return im[:w - w % sf, :h - h % sf, ...]\n\n\n\"\"\"\n# --------------------------------------------\n# anisotropic Gaussian kernels\n# --------------------------------------------\n\"\"\"\n\n\ndef analytic_kernel(k):\n    \"\"\"Calculate the X4 kernel from the X2 kernel (for proof see appendix in paper)\"\"\"\n    k_size = k.shape[0]\n    # Calculate the big kernels size\n    big_k = np.zeros((3 * k_size - 2, 3 * k_size - 2))\n    # Loop over the small kernel to fill the big one\n    for r in range(k_size):\n        for c in range(k_size):\n            big_k[2 * r:2 * r + k_size, 2 * c:2 * c + k_size] += k[r, c] * k\n    # Crop the edges of the big kernel to ignore very small values and increase run time of SR\n    crop = k_size // 2\n    cropped_big_k = big_k[crop:-crop, crop:-crop]\n    # Normalize to 1\n    return cropped_big_k / cropped_big_k.sum()\n\n\ndef anisotropic_Gaussian(ksize=15, theta=np.pi, l1=6, l2=6):\n    \"\"\" generate an anisotropic Gaussian kernel\n    Args:\n        ksize : e.g., 15, kernel size\n        theta : [0,  pi], rotation angle range\n        l1    : [0.1,50], scaling of eigenvalues\n        l2    : [0.1,l1], scaling of eigenvalues\n        If l1 = l2, will get an isotropic Gaussian kernel.\n    Returns:\n        k     : kernel\n    \"\"\"\n\n    v = np.dot(np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]), np.array([1., 0.]))\n    V = np.array([[v[0], v[1]], [v[1], -v[0]]])\n    D = np.array([[l1, 0], [0, l2]])\n    Sigma = np.dot(np.dot(V, D), np.linalg.inv(V))\n    k = gm_blur_kernel(mean=[0, 0], cov=Sigma, size=ksize)\n\n    return k\n\n\ndef gm_blur_kernel(mean, cov, size=15):\n    center = size / 2.0 + 0.5\n    k = np.zeros([size, size])\n    for y in range(size):\n        for x in range(size):\n            cy = y - center + 1\n            cx = x - center + 1\n            k[y, x] = ss.multivariate_normal.pdf([cx, cy], mean=mean, cov=cov)\n\n    k = k / np.sum(k)\n    return k\n\n\ndef shift_pixel(x, sf, upper_left=True):\n    \"\"\"shift pixel for super-resolution with different scale factors\n    Args:\n        x: WxHxC or WxH\n        sf: scale factor\n        upper_left: shift direction\n    \"\"\"\n    h, w = x.shape[:2]\n    shift = (sf - 1) * 0.5\n    xv, yv = np.arange(0, w, 1.0), np.arange(0, h, 1.0)\n    if upper_left:\n        x1 = xv + shift\n        y1 = yv + shift\n    else:\n        x1 = xv - shift\n        y1 = yv - shift\n\n    x1 = np.clip(x1, 0, w - 1)\n    y1 = np.clip(y1, 0, h - 1)\n\n    if x.ndim == 2:\n        x = interp2d(xv, yv, x)(x1, y1)\n    if x.ndim == 3:\n        for i in range(x.shape[-1]):\n            x[:, :, i] = interp2d(xv, yv, x[:, :, i])(x1, y1)\n\n    return x\n\n\ndef blur(x, k):\n    '''\n    x: image, NxcxHxW\n    k: kernel, Nx1xhxw\n    '''\n    n, c = x.shape[:2]\n    p1, p2 = (k.shape[-2] - 1) // 2, (k.shape[-1] - 1) // 2\n    x = torch.nn.functional.pad(x, pad=(p1, p2, p1, p2), mode='replicate')\n    k = k.repeat(1, c, 1, 1)\n    k = k.view(-1, 1, k.shape[2], k.shape[3])\n    x = x.view(1, -1, x.shape[2], x.shape[3])\n    x = torch.nn.functional.conv2d(x, k, bias=None, stride=1, padding=0, groups=n * c)\n    x = x.view(n, c, x.shape[2], x.shape[3])\n\n    return x\n\n\ndef gen_kernel(k_size=np.array([15, 15]), scale_factor=np.array([4, 4]), min_var=0.6, max_var=10., noise_level=0):\n    \"\"\"\"\n    # modified version of https://github.com/assafshocher/BlindSR_dataset_generator\n    # Kai Zhang\n    # min_var = 0.175 * sf  # variance of the gaussian kernel will be sampled between min_var and max_var\n    # max_var = 2.5 * sf\n    \"\"\"\n    # Set random eigen-vals (lambdas) and angle (theta) for COV matrix\n    lambda_1 = min_var + np.random.rand() * (max_var - min_var)\n    lambda_2 = min_var + np.random.rand() * (max_var - min_var)\n    theta = np.random.rand() * np.pi  # random theta\n    noise = -noise_level + np.random.rand(*k_size) * noise_level * 2\n\n    # Set COV matrix using Lambdas and Theta\n    LAMBDA = np.diag([lambda_1, lambda_2])\n    Q = np.array([[np.cos(theta), -np.sin(theta)],\n                  [np.sin(theta), np.cos(theta)]])\n    SIGMA = Q @ LAMBDA @ Q.T\n    INV_SIGMA = np.linalg.inv(SIGMA)[None, None, :, :]\n\n    # Set expectation position (shifting kernel for aligned image)\n    MU = k_size // 2 - 0.5 * (scale_factor - 1)  # - 0.5 * (scale_factor - k_size % 2)\n    MU = MU[None, None, :, None]\n\n    # Create meshgrid for Gaussian\n    [X, Y] = np.meshgrid(range(k_size[0]), range(k_size[1]))\n    Z = np.stack([X, Y], 2)[:, :, :, None]\n\n    # Calcualte Gaussian for every pixel of the kernel\n    ZZ = Z - MU\n    ZZ_t = ZZ.transpose(0, 1, 3, 2)\n    raw_kernel = np.exp(-0.5 * np.squeeze(ZZ_t @ INV_SIGMA @ ZZ)) * (1 + noise)\n\n    # shift the kernel so it will be centered\n    # raw_kernel_centered = kernel_shift(raw_kernel, scale_factor)\n\n    # Normalize the kernel and return\n    # kernel = raw_kernel_centered / np.sum(raw_kernel_centered)\n    kernel = raw_kernel / np.sum(raw_kernel)\n    return kernel\n\n\ndef fspecial_gaussian(hsize, sigma):\n    hsize = [hsize, hsize]\n    siz = [(hsize[0] - 1.0) / 2.0, (hsize[1] - 1.0) / 2.0]\n    std = sigma\n    [x, y] = np.meshgrid(np.arange(-siz[1], siz[1] + 1), np.arange(-siz[0], siz[0] + 1))\n    arg = -(x * x + y * y) / (2 * std * std)\n    h = np.exp(arg)\n    h[h < scipy.finfo(float).eps * h.max()] = 0\n    sumh = h.sum()\n    if sumh != 0:\n        h = h / sumh\n    return h\n\n\ndef fspecial_laplacian(alpha):\n    alpha = max([0, min([alpha, 1])])\n    h1 = alpha / (alpha + 1)\n    h2 = (1 - alpha) / (alpha + 1)\n    h = [[h1, h2, h1], [h2, -4 / (alpha + 1), h2], [h1, h2, h1]]\n    h = np.array(h)\n    return h\n\n\ndef fspecial(filter_type, *args, **kwargs):\n    '''\n    python code from:\n    https://github.com/ronaldosena/imagens-medicas-2/blob/40171a6c259edec7827a6693a93955de2bd39e76/Aulas/aula_2_-_uniform_filter/matlab_fspecial.py\n    '''\n    if filter_type == 'gaussian':\n        return fspecial_gaussian(*args, **kwargs)\n    if filter_type == 'laplacian':\n        return fspecial_laplacian(*args, **kwargs)\n\n\n\"\"\"\n# --------------------------------------------\n# degradation models\n# --------------------------------------------\n\"\"\"\n\n\ndef bicubic_degradation(x, sf=3):\n    '''\n    Args:\n        x: HxWxC image, [0, 1]\n        sf: down-scale factor\n    Return:\n        bicubicly downsampled LR image\n    '''\n    x = util.imresize_np(x, scale=1 / sf)\n    return x\n\n\ndef srmd_degradation(x, k, sf=3):\n    ''' blur + bicubic downsampling\n    Args:\n        x: HxWxC image, [0, 1]\n        k: hxw, double\n        sf: down-scale factor\n    Return:\n        downsampled LR image\n    Reference:\n        @inproceedings{zhang2018learning,\n          title={Learning a single convolutional super-resolution network for multiple degradations},\n          author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},\n          booktitle={IEEE Conference on Computer Vision and Pattern Recognition},\n          pages={3262--3271},\n          year={2018}\n        }\n    '''\n    x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')  # 'nearest' | 'mirror'\n    x = bicubic_degradation(x, sf=sf)\n    return x\n\n\ndef dpsr_degradation(x, k, sf=3):\n    ''' bicubic downsampling + blur\n    Args:\n        x: HxWxC image, [0, 1]\n        k: hxw, double\n        sf: down-scale factor\n    Return:\n        downsampled LR image\n    Reference:\n        @inproceedings{zhang2019deep,\n          title={Deep Plug-and-Play Super-Resolution for Arbitrary Blur Kernels},\n          author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},\n          booktitle={IEEE Conference on Computer Vision and Pattern Recognition},\n          pages={1671--1681},\n          year={2019}\n        }\n    '''\n    x = bicubic_degradation(x, sf=sf)\n    x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')\n    return x\n\n\ndef classical_degradation(x, k, sf=3):\n    ''' blur + downsampling\n    Args:\n        x: HxWxC image, [0, 1]/[0, 255]\n        k: hxw, double\n        sf: down-scale factor\n    Return:\n        downsampled LR image\n    '''\n    x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')\n    # x = filters.correlate(x, np.expand_dims(np.flip(k), axis=2))\n    st = 0\n    return x[st::sf, st::sf, ...]\n\n\ndef add_sharpening(img, weight=0.5, radius=50, threshold=10):\n    \"\"\"USM sharpening. borrowed from real-ESRGAN\n    Input image: I; Blurry image: B.\n    1. K = I + weight * (I - B)\n    2. Mask = 1 if abs(I - B) > threshold, else: 0\n    3. Blur mask:\n    4. Out = Mask * K + (1 - Mask) * I\n    Args:\n        img (Numpy array): Input image, HWC, BGR; float32, [0, 1].\n        weight (float): Sharp weight. Default: 1.\n        radius (float): Kernel size of Gaussian blur. Default: 50.\n        threshold (int):\n    \"\"\"\n    if radius % 2 == 0:\n        radius += 1\n    blur = cv2.GaussianBlur(img, (radius, radius), 0)\n    residual = img - blur\n    mask = np.abs(residual) * 255 > threshold\n    mask = mask.astype('float32')\n    soft_mask = cv2.GaussianBlur(mask, (radius, radius), 0)\n\n    K = img + weight * residual\n    K = np.clip(K, 0, 1)\n    return soft_mask * K + (1 - soft_mask) * img\n\n\ndef add_blur(img, sf=4):\n    wd2 = 4.0 + sf\n    wd = 2.0 + 0.2 * sf\n    if random.random() < 0.5:\n        l1 = wd2 * random.random()\n        l2 = wd2 * random.random()\n        k = anisotropic_Gaussian(ksize=2 * random.randint(2, 11) + 3, theta=random.random() * np.pi, l1=l1, l2=l2)\n    else:\n        k = fspecial('gaussian', 2 * random.randint(2, 11) + 3, wd * random.random())\n    img = ndimage.filters.convolve(img, np.expand_dims(k, axis=2), mode='mirror')\n\n    return img\n\n\ndef add_resize(img, sf=4):\n    rnum = np.random.rand()\n    if rnum > 0.8:  # up\n        sf1 = random.uniform(1, 2)\n    elif rnum < 0.7:  # down\n        sf1 = random.uniform(0.5 / sf, 1)\n    else:\n        sf1 = 1.0\n    img = cv2.resize(img, (int(sf1 * img.shape[1]), int(sf1 * img.shape[0])), interpolation=random.choice([1, 2, 3]))\n    img = np.clip(img, 0.0, 1.0)\n\n    return img\n\n\n# def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):\n#     noise_level = random.randint(noise_level1, noise_level2)\n#     rnum = np.random.rand()\n#     if rnum > 0.6:  # add color Gaussian noise\n#         img += np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)\n#     elif rnum < 0.4:  # add grayscale Gaussian noise\n#         img += np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)\n#     else:  # add  noise\n#         L = noise_level2 / 255.\n#         D = np.diag(np.random.rand(3))\n#         U = orth(np.random.rand(3, 3))\n#         conv = np.dot(np.dot(np.transpose(U), D), U)\n#         img += np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)\n#     img = np.clip(img, 0.0, 1.0)\n#     return img\n\ndef add_Gaussian_noise(img, noise_level1=2, noise_level2=25):\n    noise_level = random.randint(noise_level1, noise_level2)\n    rnum = np.random.rand()\n    if rnum > 0.6:  # add color Gaussian noise\n        img = img + np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)\n    elif rnum < 0.4:  # add grayscale Gaussian noise\n        img = img + np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)\n    else:  # add  noise\n        L = noise_level2 / 255.\n        D = np.diag(np.random.rand(3))\n        U = orth(np.random.rand(3, 3))\n        conv = np.dot(np.dot(np.transpose(U), D), U)\n        img = img + np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)\n    img = np.clip(img, 0.0, 1.0)\n    return img\n\n\ndef add_speckle_noise(img, noise_level1=2, noise_level2=25):\n    noise_level = random.randint(noise_level1, noise_level2)\n    img = np.clip(img, 0.0, 1.0)\n    rnum = random.random()\n    if rnum > 0.6:\n        img += img * np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)\n    elif rnum < 0.4:\n        img += img * np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)\n    else:\n        L = noise_level2 / 255.\n        D = np.diag(np.random.rand(3))\n        U = orth(np.random.rand(3, 3))\n        conv = np.dot(np.dot(np.transpose(U), D), U)\n        img += img * np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)\n    img = np.clip(img, 0.0, 1.0)\n    return img\n\n\ndef add_Poisson_noise(img):\n    img = np.clip((img * 255.0).round(), 0, 255) / 255.\n    vals = 10 ** (2 * random.random() + 2.0)  # [2, 4]\n    if random.random() < 0.5:\n        img = np.random.poisson(img * vals).astype(np.float32) / vals\n    else:\n        img_gray = np.dot(img[..., :3], [0.299, 0.587, 0.114])\n        img_gray = np.clip((img_gray * 255.0).round(), 0, 255) / 255.\n        noise_gray = np.random.poisson(img_gray * vals).astype(np.float32) / vals - img_gray\n        img += noise_gray[:, :, np.newaxis]\n    img = np.clip(img, 0.0, 1.0)\n    return img\n\n\ndef add_JPEG_noise(img):\n    quality_factor = random.randint(30, 95)\n    img = cv2.cvtColor(util.single2uint(img), cv2.COLOR_RGB2BGR)\n    result, encimg = cv2.imencode('.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), quality_factor])\n    img = cv2.imdecode(encimg, 1)\n    img = cv2.cvtColor(util.uint2single(img), cv2.COLOR_BGR2RGB)\n    return img\n\n\ndef random_crop(lq, hq, sf=4, lq_patchsize=64):\n    h, w = lq.shape[:2]\n    rnd_h = random.randint(0, h - lq_patchsize)\n    rnd_w = random.randint(0, w - lq_patchsize)\n    lq = lq[rnd_h:rnd_h + lq_patchsize, rnd_w:rnd_w + lq_patchsize, :]\n\n    rnd_h_H, rnd_w_H = int(rnd_h * sf), int(rnd_w * sf)\n    hq = hq[rnd_h_H:rnd_h_H + lq_patchsize * sf, rnd_w_H:rnd_w_H + lq_patchsize * sf, :]\n    return lq, hq\n\n\ndef degradation_bsrgan(img, sf=4, lq_patchsize=72, isp_model=None):\n    \"\"\"\n    This is the degradation model of BSRGAN from the paper\n    \"Designing a Practical Degradation Model for Deep Blind Image Super-Resolution\"\n    ----------\n    img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)\n    sf: scale factor\n    isp_model: camera ISP model\n    Returns\n    -------\n    img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]\n    hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]\n    \"\"\"\n    isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25\n    sf_ori = sf\n\n    h1, w1 = img.shape[:2]\n    img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...]  # mod crop\n    h, w = img.shape[:2]\n\n    if h < lq_patchsize * sf or w < lq_patchsize * sf:\n        raise ValueError(f'img size ({h1}X{w1}) is too small!')\n\n    hq = img.copy()\n\n    if sf == 4 and random.random() < scale2_prob:  # downsample1\n        if np.random.rand() < 0.5:\n            img = cv2.resize(img, (int(1 / 2 * img.shape[1]), int(1 / 2 * img.shape[0])),\n                             interpolation=random.choice([1, 2, 3]))\n        else:\n            img = util.imresize_np(img, 1 / 2, True)\n        img = np.clip(img, 0.0, 1.0)\n        sf = 2\n\n    shuffle_order = random.sample(range(7), 7)\n    idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)\n    if idx1 > idx2:  # keep downsample3 last\n        shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]\n\n    for i in shuffle_order:\n\n        if i == 0:\n            img = add_blur(img, sf=sf)\n\n        elif i == 1:\n            img = add_blur(img, sf=sf)\n\n        elif i == 2:\n            a, b = img.shape[1], img.shape[0]\n            # downsample2\n            if random.random() < 0.75:\n                sf1 = random.uniform(1, 2 * sf)\n                img = cv2.resize(img, (int(1 / sf1 * img.shape[1]), int(1 / sf1 * img.shape[0])),\n                                 interpolation=random.choice([1, 2, 3]))\n            else:\n                k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))\n                k_shifted = shift_pixel(k, sf)\n                k_shifted = k_shifted / k_shifted.sum()  # blur with shifted kernel\n                img = ndimage.filters.convolve(img, np.expand_dims(k_shifted, axis=2), mode='mirror')\n                img = img[0::sf, 0::sf, ...]  # nearest downsampling\n            img = np.clip(img, 0.0, 1.0)\n\n        elif i == 3:\n            # downsample3\n            img = cv2.resize(img, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))\n            img = np.clip(img, 0.0, 1.0)\n\n        elif i == 4:\n            # add Gaussian noise\n            img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)\n\n        elif i == 5:\n            # add JPEG noise\n            if random.random() < jpeg_prob:\n                img = add_JPEG_noise(img)\n\n        elif i == 6:\n            # add processed camera sensor noise\n            if random.random() < isp_prob and isp_model is not None:\n                with torch.no_grad():\n                    img, hq = isp_model.forward(img.copy(), hq)\n\n    # add final JPEG compression noise\n    img = add_JPEG_noise(img)\n\n    # random crop\n    img, hq = random_crop(img, hq, sf_ori, lq_patchsize)\n\n    return img, hq\n\n\n# todo no isp_model?\ndef degradation_bsrgan_variant(image, sf=4, isp_model=None):\n    \"\"\"\n    This is the degradation model of BSRGAN from the paper\n    \"Designing a Practical Degradation Model for Deep Blind Image Super-Resolution\"\n    ----------\n    sf: scale factor\n    isp_model: camera ISP model\n    Returns\n    -------\n    img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]\n    hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]\n    \"\"\"\n    image = util.uint2single(image)\n    isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25\n    sf_ori = sf\n\n    h1, w1 = image.shape[:2]\n    image = image.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...]  # mod crop\n    h, w = image.shape[:2]\n\n    hq = image.copy()\n\n    if sf == 4 and random.random() < scale2_prob:  # downsample1\n        if np.random.rand() < 0.5:\n            image = cv2.resize(image, (int(1 / 2 * image.shape[1]), int(1 / 2 * image.shape[0])),\n                               interpolation=random.choice([1, 2, 3]))\n        else:\n            image = util.imresize_np(image, 1 / 2, True)\n        image = np.clip(image, 0.0, 1.0)\n        sf = 2\n\n    shuffle_order = random.sample(range(7), 7)\n    idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)\n    if idx1 > idx2:  # keep downsample3 last\n        shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]\n\n    for i in shuffle_order:\n\n        if i == 0:\n            image = add_blur(image, sf=sf)\n\n        elif i == 1:\n            image = add_blur(image, sf=sf)\n\n        elif i == 2:\n            a, b = image.shape[1], image.shape[0]\n            # downsample2\n            if random.random() < 0.75:\n                sf1 = random.uniform(1, 2 * sf)\n                image = cv2.resize(image, (int(1 / sf1 * image.shape[1]), int(1 / sf1 * image.shape[0])),\n                                   interpolation=random.choice([1, 2, 3]))\n            else:\n                k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))\n                k_shifted = shift_pixel(k, sf)\n                k_shifted = k_shifted / k_shifted.sum()  # blur with shifted kernel\n                image = ndimage.filters.convolve(image, np.expand_dims(k_shifted, axis=2), mode='mirror')\n                image = image[0::sf, 0::sf, ...]  # nearest downsampling\n            image = np.clip(image, 0.0, 1.0)\n\n        elif i == 3:\n            # downsample3\n            image = cv2.resize(image, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))\n            image = np.clip(image, 0.0, 1.0)\n\n        elif i == 4:\n            # add Gaussian noise\n            image = add_Gaussian_noise(image, noise_level1=2, noise_level2=25)\n\n        elif i == 5:\n            # add JPEG noise\n            if random.random() < jpeg_prob:\n                image = add_JPEG_noise(image)\n\n        # elif i == 6:\n        #     # add processed camera sensor noise\n        #     if random.random() < isp_prob and isp_model is not None:\n        #         with torch.no_grad():\n        #             img, hq = isp_model.forward(img.copy(), hq)\n\n    # add final JPEG compression noise\n    image = add_JPEG_noise(image)\n    image = util.single2uint(image)\n    example = {\"image\":image}\n    return example\n\n\n# TODO incase there is a pickle error one needs to replace a += x with a = a + x in add_speckle_noise etc...\ndef degradation_bsrgan_plus(img, sf=4, shuffle_prob=0.5, use_sharp=True, lq_patchsize=64, isp_model=None):\n    \"\"\"\n    This is an extended degradation model by combining\n    the degradation models of BSRGAN and Real-ESRGAN\n    ----------\n    img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)\n    sf: scale factor\n    use_shuffle: the degradation shuffle\n    use_sharp: sharpening the img\n    Returns\n    -------\n    img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]\n    hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]\n    \"\"\"\n\n    h1, w1 = img.shape[:2]\n    img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...]  # mod crop\n    h, w = img.shape[:2]\n\n    if h < lq_patchsize * sf or w < lq_patchsize * sf:\n        raise ValueError(f'img size ({h1}X{w1}) is too small!')\n\n    if use_sharp:\n        img = add_sharpening(img)\n    hq = img.copy()\n\n    if random.random() < shuffle_prob:\n        shuffle_order = random.sample(range(13), 13)\n    else:\n        shuffle_order = list(range(13))\n        # local shuffle for noise, JPEG is always the last one\n        shuffle_order[2:6] = random.sample(shuffle_order[2:6], len(range(2, 6)))\n        shuffle_order[9:13] = random.sample(shuffle_order[9:13], len(range(9, 13)))\n\n    poisson_prob, speckle_prob, isp_prob = 0.1, 0.1, 0.1\n\n    for i in shuffle_order:\n        if i == 0:\n            img = add_blur(img, sf=sf)\n        elif i == 1:\n            img = add_resize(img, sf=sf)\n        elif i == 2:\n            img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)\n        elif i == 3:\n            if random.random() < poisson_prob:\n                img = add_Poisson_noise(img)\n        elif i == 4:\n            if random.random() < speckle_prob:\n                img = add_speckle_noise(img)\n        elif i == 5:\n            if random.random() < isp_prob and isp_model is not None:\n                with torch.no_grad():\n                    img, hq = isp_model.forward(img.copy(), hq)\n        elif i == 6:\n            img = add_JPEG_noise(img)\n        elif i == 7:\n            img = add_blur(img, sf=sf)\n        elif i == 8:\n            img = add_resize(img, sf=sf)\n        elif i == 9:\n            img = add_Gaussian_noise(img, noise_level1=2, noise_level2=25)\n        elif i == 10:\n            if random.random() < poisson_prob:\n                img = add_Poisson_noise(img)\n        elif i == 11:\n            if random.random() < speckle_prob:\n                img = add_speckle_noise(img)\n        elif i == 12:\n            if random.random() < isp_prob and isp_model is not None:\n                with torch.no_grad():\n                    img, hq = isp_model.forward(img.copy(), hq)\n        else:\n            print('check the shuffle!')\n\n    # resize to desired size\n    img = cv2.resize(img, (int(1 / sf * hq.shape[1]), int(1 / sf * hq.shape[0])),\n                     interpolation=random.choice([1, 2, 3]))\n\n    # add final JPEG compression noise\n    img = add_JPEG_noise(img)\n\n    # random crop\n    img, hq = random_crop(img, hq, sf, lq_patchsize)\n\n    return img, hq\n\n\nif __name__ == '__main__':\n\tprint(\"hey\")\n\timg = util.imread_uint('utils/test.png', 3)\n\tprint(img)\n\timg = util.uint2single(img)\n\tprint(img)\n\timg = img[:448, :448]\n\th = img.shape[0] // 4\n\tprint(\"resizing to\", h)\n\tsf = 4\n\tdeg_fn = partial(degradation_bsrgan_variant, sf=sf)\n\tfor i in range(20):\n\t\tprint(i)\n\t\timg_lq = deg_fn(img)\n\t\tprint(img_lq)\n\t\timg_lq_bicubic = albumentations.SmallestMaxSize(max_size=h, interpolation=cv2.INTER_CUBIC)(image=img)[\"image\"]\n\t\tprint(img_lq.shape)\n\t\tprint(\"bicubic\", img_lq_bicubic.shape)\n\t\tprint(img_hq.shape)\n\t\tlq_nearest = cv2.resize(util.single2uint(img_lq), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),\n\t\t                        interpolation=0)\n\t\tlq_bicubic_nearest = cv2.resize(util.single2uint(img_lq_bicubic), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),\n\t\t                        interpolation=0)\n\t\timg_concat = np.concatenate([lq_bicubic_nearest, lq_nearest, util.single2uint(img_hq)], axis=1)\n\t\tutil.imsave(img_concat, str(i) + '.png')\n\n\n"
  },
  {
    "path": "ldm_exp/ldm/modules/image_degradation/bsrgan_light.py",
    "content": "# -*- coding: utf-8 -*-\nimport numpy as np\nimport cv2\nimport torch\n\nfrom functools import partial\nimport random\nfrom scipy import ndimage\nimport scipy\nimport scipy.stats as ss\nfrom scipy.interpolate import interp2d\nfrom scipy.linalg import orth\nimport albumentations\n\nimport ldm.modules.image_degradation.utils_image as util\n\n\"\"\"\n# --------------------------------------------\n# Super-Resolution\n# --------------------------------------------\n#\n# Kai Zhang (cskaizhang@gmail.com)\n# https://github.com/cszn\n# From 2019/03--2021/08\n# --------------------------------------------\n\"\"\"\n\n\ndef modcrop_np(img, sf):\n    '''\n    Args:\n        img: numpy image, WxH or WxHxC\n        sf: scale factor\n    Return:\n        cropped image\n    '''\n    w, h = img.shape[:2]\n    im = np.copy(img)\n    return im[:w - w % sf, :h - h % sf, ...]\n\n\n\"\"\"\n# --------------------------------------------\n# anisotropic Gaussian kernels\n# --------------------------------------------\n\"\"\"\n\n\ndef analytic_kernel(k):\n    \"\"\"Calculate the X4 kernel from the X2 kernel (for proof see appendix in paper)\"\"\"\n    k_size = k.shape[0]\n    # Calculate the big kernels size\n    big_k = np.zeros((3 * k_size - 2, 3 * k_size - 2))\n    # Loop over the small kernel to fill the big one\n    for r in range(k_size):\n        for c in range(k_size):\n            big_k[2 * r:2 * r + k_size, 2 * c:2 * c + k_size] += k[r, c] * k\n    # Crop the edges of the big kernel to ignore very small values and increase run time of SR\n    crop = k_size // 2\n    cropped_big_k = big_k[crop:-crop, crop:-crop]\n    # Normalize to 1\n    return cropped_big_k / cropped_big_k.sum()\n\n\ndef anisotropic_Gaussian(ksize=15, theta=np.pi, l1=6, l2=6):\n    \"\"\" generate an anisotropic Gaussian kernel\n    Args:\n        ksize : e.g., 15, kernel size\n        theta : [0,  pi], rotation angle range\n        l1    : [0.1,50], scaling of eigenvalues\n        l2    : [0.1,l1], scaling of eigenvalues\n        If l1 = l2, will get an isotropic Gaussian kernel.\n    Returns:\n        k     : kernel\n    \"\"\"\n\n    v = np.dot(np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]]), np.array([1., 0.]))\n    V = np.array([[v[0], v[1]], [v[1], -v[0]]])\n    D = np.array([[l1, 0], [0, l2]])\n    Sigma = np.dot(np.dot(V, D), np.linalg.inv(V))\n    k = gm_blur_kernel(mean=[0, 0], cov=Sigma, size=ksize)\n\n    return k\n\n\ndef gm_blur_kernel(mean, cov, size=15):\n    center = size / 2.0 + 0.5\n    k = np.zeros([size, size])\n    for y in range(size):\n        for x in range(size):\n            cy = y - center + 1\n            cx = x - center + 1\n            k[y, x] = ss.multivariate_normal.pdf([cx, cy], mean=mean, cov=cov)\n\n    k = k / np.sum(k)\n    return k\n\n\ndef shift_pixel(x, sf, upper_left=True):\n    \"\"\"shift pixel for super-resolution with different scale factors\n    Args:\n        x: WxHxC or WxH\n        sf: scale factor\n        upper_left: shift direction\n    \"\"\"\n    h, w = x.shape[:2]\n    shift = (sf - 1) * 0.5\n    xv, yv = np.arange(0, w, 1.0), np.arange(0, h, 1.0)\n    if upper_left:\n        x1 = xv + shift\n        y1 = yv + shift\n    else:\n        x1 = xv - shift\n        y1 = yv - shift\n\n    x1 = np.clip(x1, 0, w - 1)\n    y1 = np.clip(y1, 0, h - 1)\n\n    if x.ndim == 2:\n        x = interp2d(xv, yv, x)(x1, y1)\n    if x.ndim == 3:\n        for i in range(x.shape[-1]):\n            x[:, :, i] = interp2d(xv, yv, x[:, :, i])(x1, y1)\n\n    return x\n\n\ndef blur(x, k):\n    '''\n    x: image, NxcxHxW\n    k: kernel, Nx1xhxw\n    '''\n    n, c = x.shape[:2]\n    p1, p2 = (k.shape[-2] - 1) // 2, (k.shape[-1] - 1) // 2\n    x = torch.nn.functional.pad(x, pad=(p1, p2, p1, p2), mode='replicate')\n    k = k.repeat(1, c, 1, 1)\n    k = k.view(-1, 1, k.shape[2], k.shape[3])\n    x = x.view(1, -1, x.shape[2], x.shape[3])\n    x = torch.nn.functional.conv2d(x, k, bias=None, stride=1, padding=0, groups=n * c)\n    x = x.view(n, c, x.shape[2], x.shape[3])\n\n    return x\n\n\ndef gen_kernel(k_size=np.array([15, 15]), scale_factor=np.array([4, 4]), min_var=0.6, max_var=10., noise_level=0):\n    \"\"\"\"\n    # modified version of https://github.com/assafshocher/BlindSR_dataset_generator\n    # Kai Zhang\n    # min_var = 0.175 * sf  # variance of the gaussian kernel will be sampled between min_var and max_var\n    # max_var = 2.5 * sf\n    \"\"\"\n    # Set random eigen-vals (lambdas) and angle (theta) for COV matrix\n    lambda_1 = min_var + np.random.rand() * (max_var - min_var)\n    lambda_2 = min_var + np.random.rand() * (max_var - min_var)\n    theta = np.random.rand() * np.pi  # random theta\n    noise = -noise_level + np.random.rand(*k_size) * noise_level * 2\n\n    # Set COV matrix using Lambdas and Theta\n    LAMBDA = np.diag([lambda_1, lambda_2])\n    Q = np.array([[np.cos(theta), -np.sin(theta)],\n                  [np.sin(theta), np.cos(theta)]])\n    SIGMA = Q @ LAMBDA @ Q.T\n    INV_SIGMA = np.linalg.inv(SIGMA)[None, None, :, :]\n\n    # Set expectation position (shifting kernel for aligned image)\n    MU = k_size // 2 - 0.5 * (scale_factor - 1)  # - 0.5 * (scale_factor - k_size % 2)\n    MU = MU[None, None, :, None]\n\n    # Create meshgrid for Gaussian\n    [X, Y] = np.meshgrid(range(k_size[0]), range(k_size[1]))\n    Z = np.stack([X, Y], 2)[:, :, :, None]\n\n    # Calcualte Gaussian for every pixel of the kernel\n    ZZ = Z - MU\n    ZZ_t = ZZ.transpose(0, 1, 3, 2)\n    raw_kernel = np.exp(-0.5 * np.squeeze(ZZ_t @ INV_SIGMA @ ZZ)) * (1 + noise)\n\n    # shift the kernel so it will be centered\n    # raw_kernel_centered = kernel_shift(raw_kernel, scale_factor)\n\n    # Normalize the kernel and return\n    # kernel = raw_kernel_centered / np.sum(raw_kernel_centered)\n    kernel = raw_kernel / np.sum(raw_kernel)\n    return kernel\n\n\ndef fspecial_gaussian(hsize, sigma):\n    hsize = [hsize, hsize]\n    siz = [(hsize[0] - 1.0) / 2.0, (hsize[1] - 1.0) / 2.0]\n    std = sigma\n    [x, y] = np.meshgrid(np.arange(-siz[1], siz[1] + 1), np.arange(-siz[0], siz[0] + 1))\n    arg = -(x * x + y * y) / (2 * std * std)\n    h = np.exp(arg)\n    h[h < scipy.finfo(float).eps * h.max()] = 0\n    sumh = h.sum()\n    if sumh != 0:\n        h = h / sumh\n    return h\n\n\ndef fspecial_laplacian(alpha):\n    alpha = max([0, min([alpha, 1])])\n    h1 = alpha / (alpha + 1)\n    h2 = (1 - alpha) / (alpha + 1)\n    h = [[h1, h2, h1], [h2, -4 / (alpha + 1), h2], [h1, h2, h1]]\n    h = np.array(h)\n    return h\n\n\ndef fspecial(filter_type, *args, **kwargs):\n    '''\n    python code from:\n    https://github.com/ronaldosena/imagens-medicas-2/blob/40171a6c259edec7827a6693a93955de2bd39e76/Aulas/aula_2_-_uniform_filter/matlab_fspecial.py\n    '''\n    if filter_type == 'gaussian':\n        return fspecial_gaussian(*args, **kwargs)\n    if filter_type == 'laplacian':\n        return fspecial_laplacian(*args, **kwargs)\n\n\n\"\"\"\n# --------------------------------------------\n# degradation models\n# --------------------------------------------\n\"\"\"\n\n\ndef bicubic_degradation(x, sf=3):\n    '''\n    Args:\n        x: HxWxC image, [0, 1]\n        sf: down-scale factor\n    Return:\n        bicubicly downsampled LR image\n    '''\n    x = util.imresize_np(x, scale=1 / sf)\n    return x\n\n\ndef srmd_degradation(x, k, sf=3):\n    ''' blur + bicubic downsampling\n    Args:\n        x: HxWxC image, [0, 1]\n        k: hxw, double\n        sf: down-scale factor\n    Return:\n        downsampled LR image\n    Reference:\n        @inproceedings{zhang2018learning,\n          title={Learning a single convolutional super-resolution network for multiple degradations},\n          author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},\n          booktitle={IEEE Conference on Computer Vision and Pattern Recognition},\n          pages={3262--3271},\n          year={2018}\n        }\n    '''\n    x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')  # 'nearest' | 'mirror'\n    x = bicubic_degradation(x, sf=sf)\n    return x\n\n\ndef dpsr_degradation(x, k, sf=3):\n    ''' bicubic downsampling + blur\n    Args:\n        x: HxWxC image, [0, 1]\n        k: hxw, double\n        sf: down-scale factor\n    Return:\n        downsampled LR image\n    Reference:\n        @inproceedings{zhang2019deep,\n          title={Deep Plug-and-Play Super-Resolution for Arbitrary Blur Kernels},\n          author={Zhang, Kai and Zuo, Wangmeng and Zhang, Lei},\n          booktitle={IEEE Conference on Computer Vision and Pattern Recognition},\n          pages={1671--1681},\n          year={2019}\n        }\n    '''\n    x = bicubic_degradation(x, sf=sf)\n    x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')\n    return x\n\n\ndef classical_degradation(x, k, sf=3):\n    ''' blur + downsampling\n    Args:\n        x: HxWxC image, [0, 1]/[0, 255]\n        k: hxw, double\n        sf: down-scale factor\n    Return:\n        downsampled LR image\n    '''\n    x = ndimage.filters.convolve(x, np.expand_dims(k, axis=2), mode='wrap')\n    # x = filters.correlate(x, np.expand_dims(np.flip(k), axis=2))\n    st = 0\n    return x[st::sf, st::sf, ...]\n\n\ndef add_sharpening(img, weight=0.5, radius=50, threshold=10):\n    \"\"\"USM sharpening. borrowed from real-ESRGAN\n    Input image: I; Blurry image: B.\n    1. K = I + weight * (I - B)\n    2. Mask = 1 if abs(I - B) > threshold, else: 0\n    3. Blur mask:\n    4. Out = Mask * K + (1 - Mask) * I\n    Args:\n        img (Numpy array): Input image, HWC, BGR; float32, [0, 1].\n        weight (float): Sharp weight. Default: 1.\n        radius (float): Kernel size of Gaussian blur. Default: 50.\n        threshold (int):\n    \"\"\"\n    if radius % 2 == 0:\n        radius += 1\n    blur = cv2.GaussianBlur(img, (radius, radius), 0)\n    residual = img - blur\n    mask = np.abs(residual) * 255 > threshold\n    mask = mask.astype('float32')\n    soft_mask = cv2.GaussianBlur(mask, (radius, radius), 0)\n\n    K = img + weight * residual\n    K = np.clip(K, 0, 1)\n    return soft_mask * K + (1 - soft_mask) * img\n\n\ndef add_blur(img, sf=4):\n    wd2 = 4.0 + sf\n    wd = 2.0 + 0.2 * sf\n\n    wd2 = wd2/4\n    wd = wd/4\n\n    if random.random() < 0.5:\n        l1 = wd2 * random.random()\n        l2 = wd2 * random.random()\n        k = anisotropic_Gaussian(ksize=random.randint(2, 11) + 3, theta=random.random() * np.pi, l1=l1, l2=l2)\n    else:\n        k = fspecial('gaussian', random.randint(2, 4) + 3, wd * random.random())\n    img = ndimage.filters.convolve(img, np.expand_dims(k, axis=2), mode='mirror')\n\n    return img\n\n\ndef add_resize(img, sf=4):\n    rnum = np.random.rand()\n    if rnum > 0.8:  # up\n        sf1 = random.uniform(1, 2)\n    elif rnum < 0.7:  # down\n        sf1 = random.uniform(0.5 / sf, 1)\n    else:\n        sf1 = 1.0\n    img = cv2.resize(img, (int(sf1 * img.shape[1]), int(sf1 * img.shape[0])), interpolation=random.choice([1, 2, 3]))\n    img = np.clip(img, 0.0, 1.0)\n\n    return img\n\n\n# def add_Gaussian_noise(img, noise_level1=2, noise_level2=25):\n#     noise_level = random.randint(noise_level1, noise_level2)\n#     rnum = np.random.rand()\n#     if rnum > 0.6:  # add color Gaussian noise\n#         img += np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)\n#     elif rnum < 0.4:  # add grayscale Gaussian noise\n#         img += np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)\n#     else:  # add  noise\n#         L = noise_level2 / 255.\n#         D = np.diag(np.random.rand(3))\n#         U = orth(np.random.rand(3, 3))\n#         conv = np.dot(np.dot(np.transpose(U), D), U)\n#         img += np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)\n#     img = np.clip(img, 0.0, 1.0)\n#     return img\n\ndef add_Gaussian_noise(img, noise_level1=2, noise_level2=25):\n    noise_level = random.randint(noise_level1, noise_level2)\n    rnum = np.random.rand()\n    if rnum > 0.6:  # add color Gaussian noise\n        img = img + np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)\n    elif rnum < 0.4:  # add grayscale Gaussian noise\n        img = img + np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)\n    else:  # add  noise\n        L = noise_level2 / 255.\n        D = np.diag(np.random.rand(3))\n        U = orth(np.random.rand(3, 3))\n        conv = np.dot(np.dot(np.transpose(U), D), U)\n        img = img + np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)\n    img = np.clip(img, 0.0, 1.0)\n    return img\n\n\ndef add_speckle_noise(img, noise_level1=2, noise_level2=25):\n    noise_level = random.randint(noise_level1, noise_level2)\n    img = np.clip(img, 0.0, 1.0)\n    rnum = random.random()\n    if rnum > 0.6:\n        img += img * np.random.normal(0, noise_level / 255.0, img.shape).astype(np.float32)\n    elif rnum < 0.4:\n        img += img * np.random.normal(0, noise_level / 255.0, (*img.shape[:2], 1)).astype(np.float32)\n    else:\n        L = noise_level2 / 255.\n        D = np.diag(np.random.rand(3))\n        U = orth(np.random.rand(3, 3))\n        conv = np.dot(np.dot(np.transpose(U), D), U)\n        img += img * np.random.multivariate_normal([0, 0, 0], np.abs(L ** 2 * conv), img.shape[:2]).astype(np.float32)\n    img = np.clip(img, 0.0, 1.0)\n    return img\n\n\ndef add_Poisson_noise(img):\n    img = np.clip((img * 255.0).round(), 0, 255) / 255.\n    vals = 10 ** (2 * random.random() + 2.0)  # [2, 4]\n    if random.random() < 0.5:\n        img = np.random.poisson(img * vals).astype(np.float32) / vals\n    else:\n        img_gray = np.dot(img[..., :3], [0.299, 0.587, 0.114])\n        img_gray = np.clip((img_gray * 255.0).round(), 0, 255) / 255.\n        noise_gray = np.random.poisson(img_gray * vals).astype(np.float32) / vals - img_gray\n        img += noise_gray[:, :, np.newaxis]\n    img = np.clip(img, 0.0, 1.0)\n    return img\n\n\ndef add_JPEG_noise(img):\n    quality_factor = random.randint(80, 95)\n    img = cv2.cvtColor(util.single2uint(img), cv2.COLOR_RGB2BGR)\n    result, encimg = cv2.imencode('.jpg', img, [int(cv2.IMWRITE_JPEG_QUALITY), quality_factor])\n    img = cv2.imdecode(encimg, 1)\n    img = cv2.cvtColor(util.uint2single(img), cv2.COLOR_BGR2RGB)\n    return img\n\n\ndef random_crop(lq, hq, sf=4, lq_patchsize=64):\n    h, w = lq.shape[:2]\n    rnd_h = random.randint(0, h - lq_patchsize)\n    rnd_w = random.randint(0, w - lq_patchsize)\n    lq = lq[rnd_h:rnd_h + lq_patchsize, rnd_w:rnd_w + lq_patchsize, :]\n\n    rnd_h_H, rnd_w_H = int(rnd_h * sf), int(rnd_w * sf)\n    hq = hq[rnd_h_H:rnd_h_H + lq_patchsize * sf, rnd_w_H:rnd_w_H + lq_patchsize * sf, :]\n    return lq, hq\n\n\ndef degradation_bsrgan(img, sf=4, lq_patchsize=72, isp_model=None):\n    \"\"\"\n    This is the degradation model of BSRGAN from the paper\n    \"Designing a Practical Degradation Model for Deep Blind Image Super-Resolution\"\n    ----------\n    img: HXWXC, [0, 1], its size should be large than (lq_patchsizexsf)x(lq_patchsizexsf)\n    sf: scale factor\n    isp_model: camera ISP model\n    Returns\n    -------\n    img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]\n    hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]\n    \"\"\"\n    isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25\n    sf_ori = sf\n\n    h1, w1 = img.shape[:2]\n    img = img.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...]  # mod crop\n    h, w = img.shape[:2]\n\n    if h < lq_patchsize * sf or w < lq_patchsize * sf:\n        raise ValueError(f'img size ({h1}X{w1}) is too small!')\n\n    hq = img.copy()\n\n    if sf == 4 and random.random() < scale2_prob:  # downsample1\n        if np.random.rand() < 0.5:\n            img = cv2.resize(img, (int(1 / 2 * img.shape[1]), int(1 / 2 * img.shape[0])),\n                             interpolation=random.choice([1, 2, 3]))\n        else:\n            img = util.imresize_np(img, 1 / 2, True)\n        img = np.clip(img, 0.0, 1.0)\n        sf = 2\n\n    shuffle_order = random.sample(range(7), 7)\n    idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)\n    if idx1 > idx2:  # keep downsample3 last\n        shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]\n\n    for i in shuffle_order:\n\n        if i == 0:\n            img = add_blur(img, sf=sf)\n\n        elif i == 1:\n            img = add_blur(img, sf=sf)\n\n        elif i == 2:\n            a, b = img.shape[1], img.shape[0]\n            # downsample2\n            if random.random() < 0.75:\n                sf1 = random.uniform(1, 2 * sf)\n                img = cv2.resize(img, (int(1 / sf1 * img.shape[1]), int(1 / sf1 * img.shape[0])),\n                                 interpolation=random.choice([1, 2, 3]))\n            else:\n                k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))\n                k_shifted = shift_pixel(k, sf)\n                k_shifted = k_shifted / k_shifted.sum()  # blur with shifted kernel\n                img = ndimage.filters.convolve(img, np.expand_dims(k_shifted, axis=2), mode='mirror')\n                img = img[0::sf, 0::sf, ...]  # nearest downsampling\n            img = np.clip(img, 0.0, 1.0)\n\n        elif i == 3:\n            # downsample3\n            img = cv2.resize(img, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))\n            img = np.clip(img, 0.0, 1.0)\n\n        elif i == 4:\n            # add Gaussian noise\n            img = add_Gaussian_noise(img, noise_level1=2, noise_level2=8)\n\n        elif i == 5:\n            # add JPEG noise\n            if random.random() < jpeg_prob:\n                img = add_JPEG_noise(img)\n\n        elif i == 6:\n            # add processed camera sensor noise\n            if random.random() < isp_prob and isp_model is not None:\n                with torch.no_grad():\n                    img, hq = isp_model.forward(img.copy(), hq)\n\n    # add final JPEG compression noise\n    img = add_JPEG_noise(img)\n\n    # random crop\n    img, hq = random_crop(img, hq, sf_ori, lq_patchsize)\n\n    return img, hq\n\n\n# todo no isp_model?\ndef degradation_bsrgan_variant(image, sf=4, isp_model=None):\n    \"\"\"\n    This is the degradation model of BSRGAN from the paper\n    \"Designing a Practical Degradation Model for Deep Blind Image Super-Resolution\"\n    ----------\n    sf: scale factor\n    isp_model: camera ISP model\n    Returns\n    -------\n    img: low-quality patch, size: lq_patchsizeXlq_patchsizeXC, range: [0, 1]\n    hq: corresponding high-quality patch, size: (lq_patchsizexsf)X(lq_patchsizexsf)XC, range: [0, 1]\n    \"\"\"\n    image = util.uint2single(image)\n    isp_prob, jpeg_prob, scale2_prob = 0.25, 0.9, 0.25\n    sf_ori = sf\n\n    h1, w1 = image.shape[:2]\n    image = image.copy()[:w1 - w1 % sf, :h1 - h1 % sf, ...]  # mod crop\n    h, w = image.shape[:2]\n\n    hq = image.copy()\n\n    if sf == 4 and random.random() < scale2_prob:  # downsample1\n        if np.random.rand() < 0.5:\n            image = cv2.resize(image, (int(1 / 2 * image.shape[1]), int(1 / 2 * image.shape[0])),\n                               interpolation=random.choice([1, 2, 3]))\n        else:\n            image = util.imresize_np(image, 1 / 2, True)\n        image = np.clip(image, 0.0, 1.0)\n        sf = 2\n\n    shuffle_order = random.sample(range(7), 7)\n    idx1, idx2 = shuffle_order.index(2), shuffle_order.index(3)\n    if idx1 > idx2:  # keep downsample3 last\n        shuffle_order[idx1], shuffle_order[idx2] = shuffle_order[idx2], shuffle_order[idx1]\n\n    for i in shuffle_order:\n\n        if i == 0:\n            image = add_blur(image, sf=sf)\n\n        # elif i == 1:\n        #     image = add_blur(image, sf=sf)\n\n        if i == 0:\n            pass\n\n        elif i == 2:\n            a, b = image.shape[1], image.shape[0]\n            # downsample2\n            if random.random() < 0.8:\n                sf1 = random.uniform(1, 2 * sf)\n                image = cv2.resize(image, (int(1 / sf1 * image.shape[1]), int(1 / sf1 * image.shape[0])),\n                                   interpolation=random.choice([1, 2, 3]))\n            else:\n                k = fspecial('gaussian', 25, random.uniform(0.1, 0.6 * sf))\n                k_shifted = shift_pixel(k, sf)\n                k_shifted = k_shifted / k_shifted.sum()  # blur with shifted kernel\n                image = ndimage.filters.convolve(image, np.expand_dims(k_shifted, axis=2), mode='mirror')\n                image = image[0::sf, 0::sf, ...]  # nearest downsampling\n\n            image = np.clip(image, 0.0, 1.0)\n\n        elif i == 3:\n            # downsample3\n            image = cv2.resize(image, (int(1 / sf * a), int(1 / sf * b)), interpolation=random.choice([1, 2, 3]))\n            image = np.clip(image, 0.0, 1.0)\n\n        elif i == 4:\n            # add Gaussian noise\n            image = add_Gaussian_noise(image, noise_level1=1, noise_level2=2)\n\n        elif i == 5:\n            # add JPEG noise\n            if random.random() < jpeg_prob:\n                image = add_JPEG_noise(image)\n        #\n        # elif i == 6:\n        #     # add processed camera sensor noise\n        #     if random.random() < isp_prob and isp_model is not None:\n        #         with torch.no_grad():\n        #             img, hq = isp_model.forward(img.copy(), hq)\n\n    # add final JPEG compression noise\n    image = add_JPEG_noise(image)\n    image = util.single2uint(image)\n    example = {\"image\": image}\n    return example\n\n\n\n\nif __name__ == '__main__':\n    print(\"hey\")\n    img = util.imread_uint('utils/test.png', 3)\n    img = img[:448, :448]\n    h = img.shape[0] // 4\n    print(\"resizing to\", h)\n    sf = 4\n    deg_fn = partial(degradation_bsrgan_variant, sf=sf)\n    for i in range(20):\n        print(i)\n        img_hq = img\n        img_lq = deg_fn(img)[\"image\"]\n        img_hq, img_lq = util.uint2single(img_hq), util.uint2single(img_lq)\n        print(img_lq)\n        img_lq_bicubic = albumentations.SmallestMaxSize(max_size=h, interpolation=cv2.INTER_CUBIC)(image=img_hq)[\"image\"]\n        print(img_lq.shape)\n        print(\"bicubic\", img_lq_bicubic.shape)\n        print(img_hq.shape)\n        lq_nearest = cv2.resize(util.single2uint(img_lq), (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),\n                                interpolation=0)\n        lq_bicubic_nearest = cv2.resize(util.single2uint(img_lq_bicubic),\n                                        (int(sf * img_lq.shape[1]), int(sf * img_lq.shape[0])),\n                                        interpolation=0)\n        img_concat = np.concatenate([lq_bicubic_nearest, lq_nearest, util.single2uint(img_hq)], axis=1)\n        util.imsave(img_concat, str(i) + '.png')\n"
  },
  {
    "path": "ldm_exp/ldm/modules/image_degradation/utils_image.py",
    "content": "import os\nimport math\nimport random\nimport numpy as np\nimport torch\nimport cv2\nfrom torchvision.utils import make_grid\nfrom datetime import datetime\n#import matplotlib.pyplot as plt   # TODO: check with Dominik, also bsrgan.py vs bsrgan_light.py\n\n\nos.environ[\"KMP_DUPLICATE_LIB_OK\"]=\"TRUE\"\n\n\n'''\n# --------------------------------------------\n# Kai Zhang (github: https://github.com/cszn)\n# 03/Mar/2019\n# --------------------------------------------\n# https://github.com/twhui/SRGAN-pyTorch\n# https://github.com/xinntao/BasicSR\n# --------------------------------------------\n'''\n\n\nIMG_EXTENSIONS = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP', '.tif']\n\n\ndef is_image_file(filename):\n    return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)\n\n\ndef get_timestamp():\n    return datetime.now().strftime('%y%m%d-%H%M%S')\n\n\ndef imshow(x, title=None, cbar=False, figsize=None):\n    plt.figure(figsize=figsize)\n    plt.imshow(np.squeeze(x), interpolation='nearest', cmap='gray')\n    if title:\n        plt.title(title)\n    if cbar:\n        plt.colorbar()\n    plt.show()\n\n\ndef surf(Z, cmap='rainbow', figsize=None):\n    plt.figure(figsize=figsize)\n    ax3 = plt.axes(projection='3d')\n\n    w, h = Z.shape[:2]\n    xx = np.arange(0,w,1)\n    yy = np.arange(0,h,1)\n    X, Y = np.meshgrid(xx, yy)\n    ax3.plot_surface(X,Y,Z,cmap=cmap)\n    #ax3.contour(X,Y,Z, zdim='z',offset=-2，cmap=cmap)\n    plt.show()\n\n\n'''\n# --------------------------------------------\n# get image pathes\n# --------------------------------------------\n'''\n\n\ndef get_image_paths(dataroot):\n    paths = None  # return None if dataroot is None\n    if dataroot is not None:\n        paths = sorted(_get_paths_from_images(dataroot))\n    return paths\n\n\ndef _get_paths_from_images(path):\n    assert os.path.isdir(path), '{:s} is not a valid directory'.format(path)\n    images = []\n    for dirpath, _, fnames in sorted(os.walk(path)):\n        for fname in sorted(fnames):\n            if is_image_file(fname):\n                img_path = os.path.join(dirpath, fname)\n                images.append(img_path)\n    assert images, '{:s} has no valid image file'.format(path)\n    return images\n\n\n'''\n# --------------------------------------------\n# split large images into small images \n# --------------------------------------------\n'''\n\n\ndef patches_from_image(img, p_size=512, p_overlap=64, p_max=800):\n    w, h = img.shape[:2]\n    patches = []\n    if w > p_max and h > p_max:\n        w1 = list(np.arange(0, w-p_size, p_size-p_overlap, dtype=np.int))\n        h1 = list(np.arange(0, h-p_size, p_size-p_overlap, dtype=np.int))\n        w1.append(w-p_size)\n        h1.append(h-p_size)\n#        print(w1)\n#        print(h1)\n        for i in w1:\n            for j in h1:\n                patches.append(img[i:i+p_size, j:j+p_size,:])\n    else:\n        patches.append(img)\n\n    return patches\n\n\ndef imssave(imgs, img_path):\n    \"\"\"\n    imgs: list, N images of size WxHxC\n    \"\"\"\n    img_name, ext = os.path.splitext(os.path.basename(img_path))\n\n    for i, img in enumerate(imgs):\n        if img.ndim == 3:\n            img = img[:, :, [2, 1, 0]]\n        new_path = os.path.join(os.path.dirname(img_path), img_name+str('_s{:04d}'.format(i))+'.png')\n        cv2.imwrite(new_path, img)\n\n\ndef split_imageset(original_dataroot, taget_dataroot, n_channels=3, p_size=800, p_overlap=96, p_max=1000):\n    \"\"\"\n    split the large images from original_dataroot into small overlapped images with size (p_size)x(p_size),\n    and save them into taget_dataroot; only the images with larger size than (p_max)x(p_max)\n    will be splitted.\n    Args:\n        original_dataroot:\n        taget_dataroot:\n        p_size: size of small images\n        p_overlap: patch size in training is a good choice\n        p_max: images with smaller size than (p_max)x(p_max) keep unchanged.\n    \"\"\"\n    paths = get_image_paths(original_dataroot)\n    for img_path in paths:\n        # img_name, ext = os.path.splitext(os.path.basename(img_path))\n        img = imread_uint(img_path, n_channels=n_channels)\n        patches = patches_from_image(img, p_size, p_overlap, p_max)\n        imssave(patches, os.path.join(taget_dataroot,os.path.basename(img_path)))\n        #if original_dataroot == taget_dataroot:\n        #del img_path\n\n'''\n# --------------------------------------------\n# makedir\n# --------------------------------------------\n'''\n\n\ndef mkdir(path):\n    if not os.path.exists(path):\n        os.makedirs(path)\n\n\ndef mkdirs(paths):\n    if isinstance(paths, str):\n        mkdir(paths)\n    else:\n        for path in paths:\n            mkdir(path)\n\n\ndef mkdir_and_rename(path):\n    if os.path.exists(path):\n        new_name = path + '_archived_' + get_timestamp()\n        print('Path already exists. Rename it to [{:s}]'.format(new_name))\n        os.rename(path, new_name)\n    os.makedirs(path)\n\n\n'''\n# --------------------------------------------\n# read image from path\n# opencv is fast, but read BGR numpy image\n# --------------------------------------------\n'''\n\n\n# --------------------------------------------\n# get uint8 image of size HxWxn_channles (RGB)\n# --------------------------------------------\ndef imread_uint(path, n_channels=3):\n    #  input: path\n    # output: HxWx3(RGB or GGG), or HxWx1 (G)\n    if n_channels == 1:\n        img = cv2.imread(path, 0)  # cv2.IMREAD_GRAYSCALE\n        img = np.expand_dims(img, axis=2)  # HxWx1\n    elif n_channels == 3:\n        img = cv2.imread(path, cv2.IMREAD_UNCHANGED)  # BGR or G\n        if img.ndim == 2:\n            img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)  # GGG\n        else:\n            img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)  # RGB\n    return img\n\n\n# --------------------------------------------\n# matlab's imwrite\n# --------------------------------------------\ndef imsave(img, img_path):\n    img = np.squeeze(img)\n    if img.ndim == 3:\n        img = img[:, :, [2, 1, 0]]\n    cv2.imwrite(img_path, img)\n\ndef imwrite(img, img_path):\n    img = np.squeeze(img)\n    if img.ndim == 3:\n        img = img[:, :, [2, 1, 0]]\n    cv2.imwrite(img_path, img)\n\n\n\n# --------------------------------------------\n# get single image of size HxWxn_channles (BGR)\n# --------------------------------------------\ndef read_img(path):\n    # read image by cv2\n    # return: Numpy float32, HWC, BGR, [0,1]\n    img = cv2.imread(path, cv2.IMREAD_UNCHANGED)  # cv2.IMREAD_GRAYSCALE\n    img = img.astype(np.float32) / 255.\n    if img.ndim == 2:\n        img = np.expand_dims(img, axis=2)\n    # some images have 4 channels\n    if img.shape[2] > 3:\n        img = img[:, :, :3]\n    return img\n\n\n'''\n# --------------------------------------------\n# image format conversion\n# --------------------------------------------\n# numpy(single) <--->  numpy(unit)\n# numpy(single) <--->  tensor\n# numpy(unit)   <--->  tensor\n# --------------------------------------------\n'''\n\n\n# --------------------------------------------\n# numpy(single) [0, 1] <--->  numpy(unit)\n# --------------------------------------------\n\n\ndef uint2single(img):\n\n    return np.float32(img/255.)\n\n\ndef single2uint(img):\n\n    return np.uint8((img.clip(0, 1)*255.).round())\n\n\ndef uint162single(img):\n\n    return np.float32(img/65535.)\n\n\ndef single2uint16(img):\n\n    return np.uint16((img.clip(0, 1)*65535.).round())\n\n\n# --------------------------------------------\n# numpy(unit) (HxWxC or HxW) <--->  tensor\n# --------------------------------------------\n\n\n# convert uint to 4-dimensional torch tensor\ndef uint2tensor4(img):\n    if img.ndim == 2:\n        img = np.expand_dims(img, axis=2)\n    return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().div(255.).unsqueeze(0)\n\n\n# convert uint to 3-dimensional torch tensor\ndef uint2tensor3(img):\n    if img.ndim == 2:\n        img = np.expand_dims(img, axis=2)\n    return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().div(255.)\n\n\n# convert 2/3/4-dimensional torch tensor to uint\ndef tensor2uint(img):\n    img = img.data.squeeze().float().clamp_(0, 1).cpu().numpy()\n    if img.ndim == 3:\n        img = np.transpose(img, (1, 2, 0))\n    return np.uint8((img*255.0).round())\n\n\n# --------------------------------------------\n# numpy(single) (HxWxC) <--->  tensor\n# --------------------------------------------\n\n\n# convert single (HxWxC) to 3-dimensional torch tensor\ndef single2tensor3(img):\n    return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float()\n\n\n# convert single (HxWxC) to 4-dimensional torch tensor\ndef single2tensor4(img):\n    return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1).float().unsqueeze(0)\n\n\n# convert torch tensor to single\ndef tensor2single(img):\n    img = img.data.squeeze().float().cpu().numpy()\n    if img.ndim == 3:\n        img = np.transpose(img, (1, 2, 0))\n\n    return img\n\n# convert torch tensor to single\ndef tensor2single3(img):\n    img = img.data.squeeze().float().cpu().numpy()\n    if img.ndim == 3:\n        img = np.transpose(img, (1, 2, 0))\n    elif img.ndim == 2:\n        img = np.expand_dims(img, axis=2)\n    return img\n\n\ndef single2tensor5(img):\n    return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1, 3).float().unsqueeze(0)\n\n\ndef single32tensor5(img):\n    return torch.from_numpy(np.ascontiguousarray(img)).float().unsqueeze(0).unsqueeze(0)\n\n\ndef single42tensor4(img):\n    return torch.from_numpy(np.ascontiguousarray(img)).permute(2, 0, 1, 3).float()\n\n\n# from skimage.io import imread, imsave\ndef tensor2img(tensor, out_type=np.uint8, min_max=(0, 1)):\n    '''\n    Converts a torch Tensor into an image Numpy array of BGR channel order\n    Input: 4D(B,(3/1),H,W), 3D(C,H,W), or 2D(H,W), any range, RGB channel order\n    Output: 3D(H,W,C) or 2D(H,W), [0,255], np.uint8 (default)\n    '''\n    tensor = tensor.squeeze().float().cpu().clamp_(*min_max)  # squeeze first, then clamp\n    tensor = (tensor - min_max[0]) / (min_max[1] - min_max[0])  # to range [0,1]\n    n_dim = tensor.dim()\n    if n_dim == 4:\n        n_img = len(tensor)\n        img_np = make_grid(tensor, nrow=int(math.sqrt(n_img)), normalize=False).numpy()\n        img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0))  # HWC, BGR\n    elif n_dim == 3:\n        img_np = tensor.numpy()\n        img_np = np.transpose(img_np[[2, 1, 0], :, :], (1, 2, 0))  # HWC, BGR\n    elif n_dim == 2:\n        img_np = tensor.numpy()\n    else:\n        raise TypeError(\n            'Only support 4D, 3D and 2D tensor. But received with dimension: {:d}'.format(n_dim))\n    if out_type == np.uint8:\n        img_np = (img_np * 255.0).round()\n        # Important. Unlike matlab, numpy.unit8() WILL NOT round by default.\n    return img_np.astype(out_type)\n\n\n'''\n# --------------------------------------------\n# Augmentation, flipe and/or rotate\n# --------------------------------------------\n# The following two are enough.\n# (1) augmet_img: numpy image of WxHxC or WxH\n# (2) augment_img_tensor4: tensor image 1xCxWxH\n# --------------------------------------------\n'''\n\n\ndef augment_img(img, mode=0):\n    '''Kai Zhang (github: https://github.com/cszn)\n    '''\n    if mode == 0:\n        return img\n    elif mode == 1:\n        return np.flipud(np.rot90(img))\n    elif mode == 2:\n        return np.flipud(img)\n    elif mode == 3:\n        return np.rot90(img, k=3)\n    elif mode == 4:\n        return np.flipud(np.rot90(img, k=2))\n    elif mode == 5:\n        return np.rot90(img)\n    elif mode == 6:\n        return np.rot90(img, k=2)\n    elif mode == 7:\n        return np.flipud(np.rot90(img, k=3))\n\n\ndef augment_img_tensor4(img, mode=0):\n    '''Kai Zhang (github: https://github.com/cszn)\n    '''\n    if mode == 0:\n        return img\n    elif mode == 1:\n        return img.rot90(1, [2, 3]).flip([2])\n    elif mode == 2:\n        return img.flip([2])\n    elif mode == 3:\n        return img.rot90(3, [2, 3])\n    elif mode == 4:\n        return img.rot90(2, [2, 3]).flip([2])\n    elif mode == 5:\n        return img.rot90(1, [2, 3])\n    elif mode == 6:\n        return img.rot90(2, [2, 3])\n    elif mode == 7:\n        return img.rot90(3, [2, 3]).flip([2])\n\n\ndef augment_img_tensor(img, mode=0):\n    '''Kai Zhang (github: https://github.com/cszn)\n    '''\n    img_size = img.size()\n    img_np = img.data.cpu().numpy()\n    if len(img_size) == 3:\n        img_np = np.transpose(img_np, (1, 2, 0))\n    elif len(img_size) == 4:\n        img_np = np.transpose(img_np, (2, 3, 1, 0))\n    img_np = augment_img(img_np, mode=mode)\n    img_tensor = torch.from_numpy(np.ascontiguousarray(img_np))\n    if len(img_size) == 3:\n        img_tensor = img_tensor.permute(2, 0, 1)\n    elif len(img_size) == 4:\n        img_tensor = img_tensor.permute(3, 2, 0, 1)\n\n    return img_tensor.type_as(img)\n\n\ndef augment_img_np3(img, mode=0):\n    if mode == 0:\n        return img\n    elif mode == 1:\n        return img.transpose(1, 0, 2)\n    elif mode == 2:\n        return img[::-1, :, :]\n    elif mode == 3:\n        img = img[::-1, :, :]\n        img = img.transpose(1, 0, 2)\n        return img\n    elif mode == 4:\n        return img[:, ::-1, :]\n    elif mode == 5:\n        img = img[:, ::-1, :]\n        img = img.transpose(1, 0, 2)\n        return img\n    elif mode == 6:\n        img = img[:, ::-1, :]\n        img = img[::-1, :, :]\n        return img\n    elif mode == 7:\n        img = img[:, ::-1, :]\n        img = img[::-1, :, :]\n        img = img.transpose(1, 0, 2)\n        return img\n\n\ndef augment_imgs(img_list, hflip=True, rot=True):\n    # horizontal flip OR rotate\n    hflip = hflip and random.random() < 0.5\n    vflip = rot and random.random() < 0.5\n    rot90 = rot and random.random() < 0.5\n\n    def _augment(img):\n        if hflip:\n            img = img[:, ::-1, :]\n        if vflip:\n            img = img[::-1, :, :]\n        if rot90:\n            img = img.transpose(1, 0, 2)\n        return img\n\n    return [_augment(img) for img in img_list]\n\n\n'''\n# --------------------------------------------\n# modcrop and shave\n# --------------------------------------------\n'''\n\n\ndef modcrop(img_in, scale):\n    # img_in: Numpy, HWC or HW\n    img = np.copy(img_in)\n    if img.ndim == 2:\n        H, W = img.shape\n        H_r, W_r = H % scale, W % scale\n        img = img[:H - H_r, :W - W_r]\n    elif img.ndim == 3:\n        H, W, C = img.shape\n        H_r, W_r = H % scale, W % scale\n        img = img[:H - H_r, :W - W_r, :]\n    else:\n        raise ValueError('Wrong img ndim: [{:d}].'.format(img.ndim))\n    return img\n\n\ndef shave(img_in, border=0):\n    # img_in: Numpy, HWC or HW\n    img = np.copy(img_in)\n    h, w = img.shape[:2]\n    img = img[border:h-border, border:w-border]\n    return img\n\n\n'''\n# --------------------------------------------\n# image processing process on numpy image\n# channel_convert(in_c, tar_type, img_list):\n# rgb2ycbcr(img, only_y=True):\n# bgr2ycbcr(img, only_y=True):\n# ycbcr2rgb(img):\n# --------------------------------------------\n'''\n\n\ndef rgb2ycbcr(img, only_y=True):\n    '''same as matlab rgb2ycbcr\n    only_y: only return Y channel\n    Input:\n        uint8, [0, 255]\n        float, [0, 1]\n    '''\n    in_img_type = img.dtype\n    img.astype(np.float32)\n    if in_img_type != np.uint8:\n        img *= 255.\n    # convert\n    if only_y:\n        rlt = np.dot(img, [65.481, 128.553, 24.966]) / 255.0 + 16.0\n    else:\n        rlt = np.matmul(img, [[65.481, -37.797, 112.0], [128.553, -74.203, -93.786],\n                              [24.966, 112.0, -18.214]]) / 255.0 + [16, 128, 128]\n    if in_img_type == np.uint8:\n        rlt = rlt.round()\n    else:\n        rlt /= 255.\n    return rlt.astype(in_img_type)\n\n\ndef ycbcr2rgb(img):\n    '''same as matlab ycbcr2rgb\n    Input:\n        uint8, [0, 255]\n        float, [0, 1]\n    '''\n    in_img_type = img.dtype\n    img.astype(np.float32)\n    if in_img_type != np.uint8:\n        img *= 255.\n    # convert\n    rlt = np.matmul(img, [[0.00456621, 0.00456621, 0.00456621], [0, -0.00153632, 0.00791071],\n                          [0.00625893, -0.00318811, 0]]) * 255.0 + [-222.921, 135.576, -276.836]\n    if in_img_type == np.uint8:\n        rlt = rlt.round()\n    else:\n        rlt /= 255.\n    return rlt.astype(in_img_type)\n\n\ndef bgr2ycbcr(img, only_y=True):\n    '''bgr version of rgb2ycbcr\n    only_y: only return Y channel\n    Input:\n        uint8, [0, 255]\n        float, [0, 1]\n    '''\n    in_img_type = img.dtype\n    img.astype(np.float32)\n    if in_img_type != np.uint8:\n        img *= 255.\n    # convert\n    if only_y:\n        rlt = np.dot(img, [24.966, 128.553, 65.481]) / 255.0 + 16.0\n    else:\n        rlt = np.matmul(img, [[24.966, 112.0, -18.214], [128.553, -74.203, -93.786],\n                              [65.481, -37.797, 112.0]]) / 255.0 + [16, 128, 128]\n    if in_img_type == np.uint8:\n        rlt = rlt.round()\n    else:\n        rlt /= 255.\n    return rlt.astype(in_img_type)\n\n\ndef channel_convert(in_c, tar_type, img_list):\n    # conversion among BGR, gray and y\n    if in_c == 3 and tar_type == 'gray':  # BGR to gray\n        gray_list = [cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) for img in img_list]\n        return [np.expand_dims(img, axis=2) for img in gray_list]\n    elif in_c == 3 and tar_type == 'y':  # BGR to y\n        y_list = [bgr2ycbcr(img, only_y=True) for img in img_list]\n        return [np.expand_dims(img, axis=2) for img in y_list]\n    elif in_c == 1 and tar_type == 'RGB':  # gray/y to BGR\n        return [cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) for img in img_list]\n    else:\n        return img_list\n\n\n'''\n# --------------------------------------------\n# metric, PSNR and SSIM\n# --------------------------------------------\n'''\n\n\n# --------------------------------------------\n# PSNR\n# --------------------------------------------\ndef calculate_psnr(img1, img2, border=0):\n    # img1 and img2 have range [0, 255]\n    #img1 = img1.squeeze()\n    #img2 = img2.squeeze()\n    if not img1.shape == img2.shape:\n        raise ValueError('Input images must have the same dimensions.')\n    h, w = img1.shape[:2]\n    img1 = img1[border:h-border, border:w-border]\n    img2 = img2[border:h-border, border:w-border]\n\n    img1 = img1.astype(np.float64)\n    img2 = img2.astype(np.float64)\n    mse = np.mean((img1 - img2)**2)\n    if mse == 0:\n        return float('inf')\n    return 20 * math.log10(255.0 / math.sqrt(mse))\n\n\n# --------------------------------------------\n# SSIM\n# --------------------------------------------\ndef calculate_ssim(img1, img2, border=0):\n    '''calculate SSIM\n    the same outputs as MATLAB's\n    img1, img2: [0, 255]\n    '''\n    #img1 = img1.squeeze()\n    #img2 = img2.squeeze()\n    if not img1.shape == img2.shape:\n        raise ValueError('Input images must have the same dimensions.')\n    h, w = img1.shape[:2]\n    img1 = img1[border:h-border, border:w-border]\n    img2 = img2[border:h-border, border:w-border]\n\n    if img1.ndim == 2:\n        return ssim(img1, img2)\n    elif img1.ndim == 3:\n        if img1.shape[2] == 3:\n            ssims = []\n            for i in range(3):\n                ssims.append(ssim(img1[:,:,i], img2[:,:,i]))\n            return np.array(ssims).mean()\n        elif img1.shape[2] == 1:\n            return ssim(np.squeeze(img1), np.squeeze(img2))\n    else:\n        raise ValueError('Wrong input image dimensions.')\n\n\ndef ssim(img1, img2):\n    C1 = (0.01 * 255)**2\n    C2 = (0.03 * 255)**2\n\n    img1 = img1.astype(np.float64)\n    img2 = img2.astype(np.float64)\n    kernel = cv2.getGaussianKernel(11, 1.5)\n    window = np.outer(kernel, kernel.transpose())\n\n    mu1 = cv2.filter2D(img1, -1, window)[5:-5, 5:-5]  # valid\n    mu2 = cv2.filter2D(img2, -1, window)[5:-5, 5:-5]\n    mu1_sq = mu1**2\n    mu2_sq = mu2**2\n    mu1_mu2 = mu1 * mu2\n    sigma1_sq = cv2.filter2D(img1**2, -1, window)[5:-5, 5:-5] - mu1_sq\n    sigma2_sq = cv2.filter2D(img2**2, -1, window)[5:-5, 5:-5] - mu2_sq\n    sigma12 = cv2.filter2D(img1 * img2, -1, window)[5:-5, 5:-5] - mu1_mu2\n\n    ssim_map = ((2 * mu1_mu2 + C1) * (2 * sigma12 + C2)) / ((mu1_sq + mu2_sq + C1) *\n                                                            (sigma1_sq + sigma2_sq + C2))\n    return ssim_map.mean()\n\n\n'''\n# --------------------------------------------\n# matlab's bicubic imresize (numpy and torch) [0, 1]\n# --------------------------------------------\n'''\n\n\n# matlab 'imresize' function, now only support 'bicubic'\ndef cubic(x):\n    absx = torch.abs(x)\n    absx2 = absx**2\n    absx3 = absx**3\n    return (1.5*absx3 - 2.5*absx2 + 1) * ((absx <= 1).type_as(absx)) + \\\n        (-0.5*absx3 + 2.5*absx2 - 4*absx + 2) * (((absx > 1)*(absx <= 2)).type_as(absx))\n\n\ndef calculate_weights_indices(in_length, out_length, scale, kernel, kernel_width, antialiasing):\n    if (scale < 1) and (antialiasing):\n        # Use a modified kernel to simultaneously interpolate and antialias- larger kernel width\n        kernel_width = kernel_width / scale\n\n    # Output-space coordinates\n    x = torch.linspace(1, out_length, out_length)\n\n    # Input-space coordinates. Calculate the inverse mapping such that 0.5\n    # in output space maps to 0.5 in input space, and 0.5+scale in output\n    # space maps to 1.5 in input space.\n    u = x / scale + 0.5 * (1 - 1 / scale)\n\n    # What is the left-most pixel that can be involved in the computation?\n    left = torch.floor(u - kernel_width / 2)\n\n    # What is the maximum number of pixels that can be involved in the\n    # computation?  Note: it's OK to use an extra pixel here; if the\n    # corresponding weights are all zero, it will be eliminated at the end\n    # of this function.\n    P = math.ceil(kernel_width) + 2\n\n    # The indices of the input pixels involved in computing the k-th output\n    # pixel are in row k of the indices matrix.\n    indices = left.view(out_length, 1).expand(out_length, P) + torch.linspace(0, P - 1, P).view(\n        1, P).expand(out_length, P)\n\n    # The weights used to compute the k-th output pixel are in row k of the\n    # weights matrix.\n    distance_to_center = u.view(out_length, 1).expand(out_length, P) - indices\n    # apply cubic kernel\n    if (scale < 1) and (antialiasing):\n        weights = scale * cubic(distance_to_center * scale)\n    else:\n        weights = cubic(distance_to_center)\n    # Normalize the weights matrix so that each row sums to 1.\n    weights_sum = torch.sum(weights, 1).view(out_length, 1)\n    weights = weights / weights_sum.expand(out_length, P)\n\n    # If a column in weights is all zero, get rid of it. only consider the first and last column.\n    weights_zero_tmp = torch.sum((weights == 0), 0)\n    if not math.isclose(weights_zero_tmp[0], 0, rel_tol=1e-6):\n        indices = indices.narrow(1, 1, P - 2)\n        weights = weights.narrow(1, 1, P - 2)\n    if not math.isclose(weights_zero_tmp[-1], 0, rel_tol=1e-6):\n        indices = indices.narrow(1, 0, P - 2)\n        weights = weights.narrow(1, 0, P - 2)\n    weights = weights.contiguous()\n    indices = indices.contiguous()\n    sym_len_s = -indices.min() + 1\n    sym_len_e = indices.max() - in_length\n    indices = indices + sym_len_s - 1\n    return weights, indices, int(sym_len_s), int(sym_len_e)\n\n\n# --------------------------------------------\n# imresize for tensor image [0, 1]\n# --------------------------------------------\ndef imresize(img, scale, antialiasing=True):\n    # Now the scale should be the same for H and W\n    # input: img: pytorch tensor, CHW or HW [0,1]\n    # output: CHW or HW [0,1] w/o round\n    need_squeeze = True if img.dim() == 2 else False\n    if need_squeeze:\n        img.unsqueeze_(0)\n    in_C, in_H, in_W = img.size()\n    out_C, out_H, out_W = in_C, math.ceil(in_H * scale), math.ceil(in_W * scale)\n    kernel_width = 4\n    kernel = 'cubic'\n\n    # Return the desired dimension order for performing the resize.  The\n    # strategy is to perform the resize first along the dimension with the\n    # smallest scale factor.\n    # Now we do not support this.\n\n    # get weights and indices\n    weights_H, indices_H, sym_len_Hs, sym_len_He = calculate_weights_indices(\n        in_H, out_H, scale, kernel, kernel_width, antialiasing)\n    weights_W, indices_W, sym_len_Ws, sym_len_We = calculate_weights_indices(\n        in_W, out_W, scale, kernel, kernel_width, antialiasing)\n    # process H dimension\n    # symmetric copying\n    img_aug = torch.FloatTensor(in_C, in_H + sym_len_Hs + sym_len_He, in_W)\n    img_aug.narrow(1, sym_len_Hs, in_H).copy_(img)\n\n    sym_patch = img[:, :sym_len_Hs, :]\n    inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(1, inv_idx)\n    img_aug.narrow(1, 0, sym_len_Hs).copy_(sym_patch_inv)\n\n    sym_patch = img[:, -sym_len_He:, :]\n    inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(1, inv_idx)\n    img_aug.narrow(1, sym_len_Hs + in_H, sym_len_He).copy_(sym_patch_inv)\n\n    out_1 = torch.FloatTensor(in_C, out_H, in_W)\n    kernel_width = weights_H.size(1)\n    for i in range(out_H):\n        idx = int(indices_H[i][0])\n        for j in range(out_C):\n            out_1[j, i, :] = img_aug[j, idx:idx + kernel_width, :].transpose(0, 1).mv(weights_H[i])\n\n    # process W dimension\n    # symmetric copying\n    out_1_aug = torch.FloatTensor(in_C, out_H, in_W + sym_len_Ws + sym_len_We)\n    out_1_aug.narrow(2, sym_len_Ws, in_W).copy_(out_1)\n\n    sym_patch = out_1[:, :, :sym_len_Ws]\n    inv_idx = torch.arange(sym_patch.size(2) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(2, inv_idx)\n    out_1_aug.narrow(2, 0, sym_len_Ws).copy_(sym_patch_inv)\n\n    sym_patch = out_1[:, :, -sym_len_We:]\n    inv_idx = torch.arange(sym_patch.size(2) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(2, inv_idx)\n    out_1_aug.narrow(2, sym_len_Ws + in_W, sym_len_We).copy_(sym_patch_inv)\n\n    out_2 = torch.FloatTensor(in_C, out_H, out_W)\n    kernel_width = weights_W.size(1)\n    for i in range(out_W):\n        idx = int(indices_W[i][0])\n        for j in range(out_C):\n            out_2[j, :, i] = out_1_aug[j, :, idx:idx + kernel_width].mv(weights_W[i])\n    if need_squeeze:\n        out_2.squeeze_()\n    return out_2\n\n\n# --------------------------------------------\n# imresize for numpy image [0, 1]\n# --------------------------------------------\ndef imresize_np(img, scale, antialiasing=True):\n    # Now the scale should be the same for H and W\n    # input: img: Numpy, HWC or HW [0,1]\n    # output: HWC or HW [0,1] w/o round\n    img = torch.from_numpy(img)\n    need_squeeze = True if img.dim() == 2 else False\n    if need_squeeze:\n        img.unsqueeze_(2)\n\n    in_H, in_W, in_C = img.size()\n    out_C, out_H, out_W = in_C, math.ceil(in_H * scale), math.ceil(in_W * scale)\n    kernel_width = 4\n    kernel = 'cubic'\n\n    # Return the desired dimension order for performing the resize.  The\n    # strategy is to perform the resize first along the dimension with the\n    # smallest scale factor.\n    # Now we do not support this.\n\n    # get weights and indices\n    weights_H, indices_H, sym_len_Hs, sym_len_He = calculate_weights_indices(\n        in_H, out_H, scale, kernel, kernel_width, antialiasing)\n    weights_W, indices_W, sym_len_Ws, sym_len_We = calculate_weights_indices(\n        in_W, out_W, scale, kernel, kernel_width, antialiasing)\n    # process H dimension\n    # symmetric copying\n    img_aug = torch.FloatTensor(in_H + sym_len_Hs + sym_len_He, in_W, in_C)\n    img_aug.narrow(0, sym_len_Hs, in_H).copy_(img)\n\n    sym_patch = img[:sym_len_Hs, :, :]\n    inv_idx = torch.arange(sym_patch.size(0) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(0, inv_idx)\n    img_aug.narrow(0, 0, sym_len_Hs).copy_(sym_patch_inv)\n\n    sym_patch = img[-sym_len_He:, :, :]\n    inv_idx = torch.arange(sym_patch.size(0) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(0, inv_idx)\n    img_aug.narrow(0, sym_len_Hs + in_H, sym_len_He).copy_(sym_patch_inv)\n\n    out_1 = torch.FloatTensor(out_H, in_W, in_C)\n    kernel_width = weights_H.size(1)\n    for i in range(out_H):\n        idx = int(indices_H[i][0])\n        for j in range(out_C):\n            out_1[i, :, j] = img_aug[idx:idx + kernel_width, :, j].transpose(0, 1).mv(weights_H[i])\n\n    # process W dimension\n    # symmetric copying\n    out_1_aug = torch.FloatTensor(out_H, in_W + sym_len_Ws + sym_len_We, in_C)\n    out_1_aug.narrow(1, sym_len_Ws, in_W).copy_(out_1)\n\n    sym_patch = out_1[:, :sym_len_Ws, :]\n    inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(1, inv_idx)\n    out_1_aug.narrow(1, 0, sym_len_Ws).copy_(sym_patch_inv)\n\n    sym_patch = out_1[:, -sym_len_We:, :]\n    inv_idx = torch.arange(sym_patch.size(1) - 1, -1, -1).long()\n    sym_patch_inv = sym_patch.index_select(1, inv_idx)\n    out_1_aug.narrow(1, sym_len_Ws + in_W, sym_len_We).copy_(sym_patch_inv)\n\n    out_2 = torch.FloatTensor(out_H, out_W, in_C)\n    kernel_width = weights_W.size(1)\n    for i in range(out_W):\n        idx = int(indices_W[i][0])\n        for j in range(out_C):\n            out_2[:, i, j] = out_1_aug[:, idx:idx + kernel_width, j].mv(weights_W[i])\n    if need_squeeze:\n        out_2.squeeze_()\n\n    return out_2.numpy()\n\n\nif __name__ == '__main__':\n    print('---')\n#    img = imread_uint('test.bmp', 3)\n#    img = uint2single(img)\n#    img_bicubic = imresize_np(img, 1/4)"
  },
  {
    "path": "ldm_exp/ldm/modules/losses/__init__.py",
    "content": "from ldm.modules.losses.contperceptual import LPIPSWithDiscriminator"
  },
  {
    "path": "ldm_exp/ldm/modules/losses/contperceptual.py",
    "content": "import torch\nimport torch.nn as nn\n\nfrom taming.modules.losses.vqperceptual import *  # TODO: taming dependency yes/no?\n\n\nclass LPIPSWithDiscriminator(nn.Module):\n    def __init__(self, disc_start, logvar_init=0.0, kl_weight=1.0, pixelloss_weight=1.0,\n                 disc_num_layers=3, disc_in_channels=3, disc_factor=1.0, disc_weight=1.0,\n                 perceptual_weight=1.0, use_actnorm=False, disc_conditional=False,\n                 disc_loss=\"hinge\"):\n\n        super().__init__()\n        assert disc_loss in [\"hinge\", \"vanilla\"]\n        self.kl_weight = kl_weight\n        self.pixel_weight = pixelloss_weight\n        self.perceptual_loss = LPIPS().eval()\n        self.perceptual_weight = perceptual_weight\n        # output log variance\n        self.logvar = nn.Parameter(torch.ones(size=()) * logvar_init)\n\n        self.discriminator = NLayerDiscriminator(input_nc=disc_in_channels,\n                                                 n_layers=disc_num_layers,\n                                                 use_actnorm=use_actnorm\n                                                 ).apply(weights_init)\n        self.discriminator_iter_start = disc_start\n        self.disc_loss = hinge_d_loss if disc_loss == \"hinge\" else vanilla_d_loss\n        self.disc_factor = disc_factor\n        self.discriminator_weight = disc_weight\n        self.disc_conditional = disc_conditional\n\n    def calculate_adaptive_weight(self, nll_loss, g_loss, last_layer=None):\n        if last_layer is not None:\n            nll_grads = torch.autograd.grad(nll_loss, last_layer, retain_graph=True)[0]\n            g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0]\n        else:\n            nll_grads = torch.autograd.grad(nll_loss, self.last_layer[0], retain_graph=True)[0]\n            g_grads = torch.autograd.grad(g_loss, self.last_layer[0], retain_graph=True)[0]\n\n        d_weight = torch.norm(nll_grads) / (torch.norm(g_grads) + 1e-4)\n        d_weight = torch.clamp(d_weight, 0.0, 1e4).detach()\n        d_weight = d_weight * self.discriminator_weight\n        return d_weight\n\n    def forward(self, inputs, reconstructions, posteriors, optimizer_idx,\n                global_step, last_layer=None, cond=None, split=\"train\",\n                weights=None):\n        rec_loss = torch.abs(inputs.contiguous() - reconstructions.contiguous())\n        if self.perceptual_weight > 0:\n            p_loss = self.perceptual_loss(inputs.contiguous(), reconstructions.contiguous())\n            rec_loss = rec_loss + self.perceptual_weight * p_loss\n\n        nll_loss = rec_loss / torch.exp(self.logvar) + self.logvar\n        weighted_nll_loss = nll_loss\n        if weights is not None:\n            weighted_nll_loss = weights*nll_loss\n        weighted_nll_loss = torch.sum(weighted_nll_loss) / weighted_nll_loss.shape[0]\n        nll_loss = torch.sum(nll_loss) / nll_loss.shape[0]\n        kl_loss = posteriors.kl()\n        kl_loss = torch.sum(kl_loss) / kl_loss.shape[0]\n\n        # now the GAN part\n        if optimizer_idx == 0:\n            # generator update\n            if cond is None:\n                assert not self.disc_conditional\n                logits_fake = self.discriminator(reconstructions.contiguous())\n            else:\n                assert self.disc_conditional\n                logits_fake = self.discriminator(torch.cat((reconstructions.contiguous(), cond), dim=1))\n            g_loss = -torch.mean(logits_fake)\n\n            if self.disc_factor > 0.0:\n                try:\n                    d_weight = self.calculate_adaptive_weight(nll_loss, g_loss, last_layer=last_layer)\n                except RuntimeError:\n                    assert not self.training\n                    d_weight = torch.tensor(0.0)\n            else:\n                d_weight = torch.tensor(0.0)\n\n            disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)\n            loss = weighted_nll_loss + self.kl_weight * kl_loss + d_weight * disc_factor * g_loss\n\n            log = {\"{}/total_loss\".format(split): loss.clone().detach().mean(), \"{}/logvar\".format(split): self.logvar.detach(),\n                   \"{}/kl_loss\".format(split): kl_loss.detach().mean(), \"{}/nll_loss\".format(split): nll_loss.detach().mean(),\n                   \"{}/rec_loss\".format(split): rec_loss.detach().mean(),\n                   \"{}/d_weight\".format(split): d_weight.detach(),\n                   \"{}/disc_factor\".format(split): torch.tensor(disc_factor),\n                   \"{}/g_loss\".format(split): g_loss.detach().mean(),\n                   }\n            return loss, log\n\n        if optimizer_idx == 1:\n            # second pass for discriminator update\n            if cond is None:\n                logits_real = self.discriminator(inputs.contiguous().detach())\n                logits_fake = self.discriminator(reconstructions.contiguous().detach())\n            else:\n                logits_real = self.discriminator(torch.cat((inputs.contiguous().detach(), cond), dim=1))\n                logits_fake = self.discriminator(torch.cat((reconstructions.contiguous().detach(), cond), dim=1))\n\n            disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)\n            d_loss = disc_factor * self.disc_loss(logits_real, logits_fake)\n\n            log = {\"{}/disc_loss\".format(split): d_loss.clone().detach().mean(),\n                   \"{}/logits_real\".format(split): logits_real.detach().mean(),\n                   \"{}/logits_fake\".format(split): logits_fake.detach().mean()\n                   }\n            return d_loss, log\n\n"
  },
  {
    "path": "ldm_exp/ldm/modules/losses/vqperceptual.py",
    "content": "import torch\nfrom torch import nn\nimport torch.nn.functional as F\nfrom einops import repeat\n\nfrom taming.modules.discriminator.model import NLayerDiscriminator, weights_init\nfrom taming.modules.losses.lpips import LPIPS\nfrom taming.modules.losses.vqperceptual import hinge_d_loss, vanilla_d_loss\n\n\ndef hinge_d_loss_with_exemplar_weights(logits_real, logits_fake, weights):\n    assert weights.shape[0] == logits_real.shape[0] == logits_fake.shape[0]\n    loss_real = torch.mean(F.relu(1. - logits_real), dim=[1,2,3])\n    loss_fake = torch.mean(F.relu(1. + logits_fake), dim=[1,2,3])\n    loss_real = (weights * loss_real).sum() / weights.sum()\n    loss_fake = (weights * loss_fake).sum() / weights.sum()\n    d_loss = 0.5 * (loss_real + loss_fake)\n    return d_loss\n\ndef adopt_weight(weight, global_step, threshold=0, value=0.):\n    if global_step < threshold:\n        weight = value\n    return weight\n\n\ndef measure_perplexity(predicted_indices, n_embed):\n    # src: https://github.com/karpathy/deep-vector-quantization/blob/main/model.py\n    # eval cluster perplexity. when perplexity == num_embeddings then all clusters are used exactly equally\n    encodings = F.one_hot(predicted_indices, n_embed).float().reshape(-1, n_embed)\n    avg_probs = encodings.mean(0)\n    perplexity = (-(avg_probs * torch.log(avg_probs + 1e-10)).sum()).exp()\n    cluster_use = torch.sum(avg_probs > 0)\n    return perplexity, cluster_use\n\ndef l1(x, y):\n    return torch.abs(x-y)\n\n\ndef l2(x, y):\n    return torch.pow((x-y), 2)\n\n\nclass VQLPIPSWithDiscriminator(nn.Module):\n    def __init__(self, disc_start, codebook_weight=1.0, pixelloss_weight=1.0,\n                 disc_num_layers=3, disc_in_channels=3, disc_factor=1.0, disc_weight=1.0,\n                 perceptual_weight=1.0, use_actnorm=False, disc_conditional=False,\n                 disc_ndf=64, disc_loss=\"hinge\", n_classes=None, perceptual_loss=\"lpips\",\n                 pixel_loss=\"l1\"):\n        super().__init__()\n        assert disc_loss in [\"hinge\", \"vanilla\"]\n        assert perceptual_loss in [\"lpips\", \"clips\", \"dists\"]\n        assert pixel_loss in [\"l1\", \"l2\"]\n        self.codebook_weight = codebook_weight\n        self.pixel_weight = pixelloss_weight\n        if perceptual_loss == \"lpips\":\n            print(f\"{self.__class__.__name__}: Running with LPIPS.\")\n            self.perceptual_loss = LPIPS().eval()\n        else:\n            raise ValueError(f\"Unknown perceptual loss: >> {perceptual_loss} <<\")\n        self.perceptual_weight = perceptual_weight\n\n        if pixel_loss == \"l1\":\n            self.pixel_loss = l1\n        else:\n            self.pixel_loss = l2\n\n        self.discriminator = NLayerDiscriminator(input_nc=disc_in_channels,\n                                                 n_layers=disc_num_layers,\n                                                 use_actnorm=use_actnorm,\n                                                 ndf=disc_ndf\n                                                 ).apply(weights_init)\n        self.discriminator_iter_start = disc_start\n        if disc_loss == \"hinge\":\n            self.disc_loss = hinge_d_loss\n        elif disc_loss == \"vanilla\":\n            self.disc_loss = vanilla_d_loss\n        else:\n            raise ValueError(f\"Unknown GAN loss '{disc_loss}'.\")\n        print(f\"VQLPIPSWithDiscriminator running with {disc_loss} loss.\")\n        self.disc_factor = disc_factor\n        self.discriminator_weight = disc_weight\n        self.disc_conditional = disc_conditional\n        self.n_classes = n_classes\n\n    def calculate_adaptive_weight(self, nll_loss, g_loss, last_layer=None):\n        if last_layer is not None:\n            nll_grads = torch.autograd.grad(nll_loss, last_layer, retain_graph=True)[0]\n            g_grads = torch.autograd.grad(g_loss, last_layer, retain_graph=True)[0]\n        else:\n            nll_grads = torch.autograd.grad(nll_loss, self.last_layer[0], retain_graph=True)[0]\n            g_grads = torch.autograd.grad(g_loss, self.last_layer[0], retain_graph=True)[0]\n\n        d_weight = torch.norm(nll_grads) / (torch.norm(g_grads) + 1e-4)\n        d_weight = torch.clamp(d_weight, 0.0, 1e4).detach()\n        d_weight = d_weight * self.discriminator_weight\n        return d_weight\n\n    def forward(self, codebook_loss, inputs, reconstructions, optimizer_idx,\n                global_step, last_layer=None, cond=None, split=\"train\", predicted_indices=None):\n        if not exists(codebook_loss):\n            codebook_loss = torch.tensor([0.]).to(inputs.device)\n        #rec_loss = torch.abs(inputs.contiguous() - reconstructions.contiguous())\n        rec_loss = self.pixel_loss(inputs.contiguous(), reconstructions.contiguous())\n        if self.perceptual_weight > 0:\n            p_loss = self.perceptual_loss(inputs.contiguous(), reconstructions.contiguous())\n            rec_loss = rec_loss + self.perceptual_weight * p_loss\n        else:\n            p_loss = torch.tensor([0.0])\n\n        nll_loss = rec_loss\n        #nll_loss = torch.sum(nll_loss) / nll_loss.shape[0]\n        nll_loss = torch.mean(nll_loss)\n\n        # now the GAN part\n        if optimizer_idx == 0:\n            # generator update\n            if cond is None:\n                assert not self.disc_conditional\n                logits_fake = self.discriminator(reconstructions.contiguous())\n            else:\n                assert self.disc_conditional\n                logits_fake = self.discriminator(torch.cat((reconstructions.contiguous(), cond), dim=1))\n            g_loss = -torch.mean(logits_fake)\n\n            try:\n                d_weight = self.calculate_adaptive_weight(nll_loss, g_loss, last_layer=last_layer)\n            except RuntimeError:\n                assert not self.training\n                d_weight = torch.tensor(0.0)\n\n            disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)\n            loss = nll_loss + d_weight * disc_factor * g_loss + self.codebook_weight * codebook_loss.mean()\n\n            log = {\"{}/total_loss\".format(split): loss.clone().detach().mean(),\n                   \"{}/quant_loss\".format(split): codebook_loss.detach().mean(),\n                   \"{}/nll_loss\".format(split): nll_loss.detach().mean(),\n                   \"{}/rec_loss\".format(split): rec_loss.detach().mean(),\n                   \"{}/p_loss\".format(split): p_loss.detach().mean(),\n                   \"{}/d_weight\".format(split): d_weight.detach(),\n                   \"{}/disc_factor\".format(split): torch.tensor(disc_factor),\n                   \"{}/g_loss\".format(split): g_loss.detach().mean(),\n                   }\n            if predicted_indices is not None:\n                assert self.n_classes is not None\n                with torch.no_grad():\n                    perplexity, cluster_usage = measure_perplexity(predicted_indices, self.n_classes)\n                log[f\"{split}/perplexity\"] = perplexity\n                log[f\"{split}/cluster_usage\"] = cluster_usage\n            return loss, log\n\n        if optimizer_idx == 1:\n            # second pass for discriminator update\n            if cond is None:\n                logits_real = self.discriminator(inputs.contiguous().detach())\n                logits_fake = self.discriminator(reconstructions.contiguous().detach())\n            else:\n                logits_real = self.discriminator(torch.cat((inputs.contiguous().detach(), cond), dim=1))\n                logits_fake = self.discriminator(torch.cat((reconstructions.contiguous().detach(), cond), dim=1))\n\n            disc_factor = adopt_weight(self.disc_factor, global_step, threshold=self.discriminator_iter_start)\n            d_loss = disc_factor * self.disc_loss(logits_real, logits_fake)\n\n            log = {\"{}/disc_loss\".format(split): d_loss.clone().detach().mean(),\n                   \"{}/logits_real\".format(split): logits_real.detach().mean(),\n                   \"{}/logits_fake\".format(split): logits_fake.detach().mean()\n                   }\n            return d_loss, log\n"
  },
  {
    "path": "ldm_exp/ldm/modules/x_transformer.py",
    "content": "\"\"\"shout-out to https://github.com/lucidrains/x-transformers/tree/main/x_transformers\"\"\"\nimport torch\nfrom torch import nn, einsum\nimport torch.nn.functional as F\nfrom functools import partial\nfrom inspect import isfunction\nfrom collections import namedtuple\nfrom einops import rearrange, repeat, reduce\n\n# constants\n\nDEFAULT_DIM_HEAD = 64\n\nIntermediates = namedtuple('Intermediates', [\n    'pre_softmax_attn',\n    'post_softmax_attn'\n])\n\nLayerIntermediates = namedtuple('Intermediates', [\n    'hiddens',\n    'attn_intermediates'\n])\n\n\nclass AbsolutePositionalEmbedding(nn.Module):\n    def __init__(self, dim, max_seq_len):\n        super().__init__()\n        self.emb = nn.Embedding(max_seq_len, dim)\n        self.init_()\n\n    def init_(self):\n        nn.init.normal_(self.emb.weight, std=0.02)\n\n    def forward(self, x):\n        n = torch.arange(x.shape[1], device=x.device)\n        return self.emb(n)[None, :, :]\n\n\nclass FixedPositionalEmbedding(nn.Module):\n    def __init__(self, dim):\n        super().__init__()\n        inv_freq = 1. / (10000 ** (torch.arange(0, dim, 2).float() / dim))\n        self.register_buffer('inv_freq', inv_freq)\n\n    def forward(self, x, seq_dim=1, offset=0):\n        t = torch.arange(x.shape[seq_dim], device=x.device).type_as(self.inv_freq) + offset\n        sinusoid_inp = torch.einsum('i , j -> i j', t, self.inv_freq)\n        emb = torch.cat((sinusoid_inp.sin(), sinusoid_inp.cos()), dim=-1)\n        return emb[None, :, :]\n\n\n# helpers\n\ndef exists(val):\n    return val is not None\n\n\ndef default(val, d):\n    if exists(val):\n        return val\n    return d() if isfunction(d) else d\n\n\ndef always(val):\n    def inner(*args, **kwargs):\n        return val\n    return inner\n\n\ndef not_equals(val):\n    def inner(x):\n        return x != val\n    return inner\n\n\ndef equals(val):\n    def inner(x):\n        return x == val\n    return inner\n\n\ndef max_neg_value(tensor):\n    return -torch.finfo(tensor.dtype).max\n\n\n# keyword argument helpers\n\ndef pick_and_pop(keys, d):\n    values = list(map(lambda key: d.pop(key), keys))\n    return dict(zip(keys, values))\n\n\ndef group_dict_by_key(cond, d):\n    return_val = [dict(), dict()]\n    for key in d.keys():\n        match = bool(cond(key))\n        ind = int(not match)\n        return_val[ind][key] = d[key]\n    return (*return_val,)\n\n\ndef string_begins_with(prefix, str):\n    return str.startswith(prefix)\n\n\ndef group_by_key_prefix(prefix, d):\n    return group_dict_by_key(partial(string_begins_with, prefix), d)\n\n\ndef groupby_prefix_and_trim(prefix, d):\n    kwargs_with_prefix, kwargs = group_dict_by_key(partial(string_begins_with, prefix), d)\n    kwargs_without_prefix = dict(map(lambda x: (x[0][len(prefix):], x[1]), tuple(kwargs_with_prefix.items())))\n    return kwargs_without_prefix, kwargs\n\n\n# classes\nclass Scale(nn.Module):\n    def __init__(self, value, fn):\n        super().__init__()\n        self.value = value\n        self.fn = fn\n\n    def forward(self, x, **kwargs):\n        x, *rest = self.fn(x, **kwargs)\n        return (x * self.value, *rest)\n\n\nclass Rezero(nn.Module):\n    def __init__(self, fn):\n        super().__init__()\n        self.fn = fn\n        self.g = nn.Parameter(torch.zeros(1))\n\n    def forward(self, x, **kwargs):\n        x, *rest = self.fn(x, **kwargs)\n        return (x * self.g, *rest)\n\n\nclass ScaleNorm(nn.Module):\n    def __init__(self, dim, eps=1e-5):\n        super().__init__()\n        self.scale = dim ** -0.5\n        self.eps = eps\n        self.g = nn.Parameter(torch.ones(1))\n\n    def forward(self, x):\n        norm = torch.norm(x, dim=-1, keepdim=True) * self.scale\n        return x / norm.clamp(min=self.eps) * self.g\n\n\nclass RMSNorm(nn.Module):\n    def __init__(self, dim, eps=1e-8):\n        super().__init__()\n        self.scale = dim ** -0.5\n        self.eps = eps\n        self.g = nn.Parameter(torch.ones(dim))\n\n    def forward(self, x):\n        norm = torch.norm(x, dim=-1, keepdim=True) * self.scale\n        return x / norm.clamp(min=self.eps) * self.g\n\n\nclass Residual(nn.Module):\n    def forward(self, x, residual):\n        return x + residual\n\n\nclass GRUGating(nn.Module):\n    def __init__(self, dim):\n        super().__init__()\n        self.gru = nn.GRUCell(dim, dim)\n\n    def forward(self, x, residual):\n        gated_output = self.gru(\n            rearrange(x, 'b n d -> (b n) d'),\n            rearrange(residual, 'b n d -> (b n) d')\n        )\n\n        return gated_output.reshape_as(x)\n\n\n# feedforward\n\nclass GEGLU(nn.Module):\n    def __init__(self, dim_in, dim_out):\n        super().__init__()\n        self.proj = nn.Linear(dim_in, dim_out * 2)\n\n    def forward(self, x):\n        x, gate = self.proj(x).chunk(2, dim=-1)\n        return x * F.gelu(gate)\n\n\nclass FeedForward(nn.Module):\n    def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):\n        super().__init__()\n        inner_dim = int(dim * mult)\n        dim_out = default(dim_out, dim)\n        project_in = nn.Sequential(\n            nn.Linear(dim, inner_dim),\n            nn.GELU()\n        ) if not glu else GEGLU(dim, inner_dim)\n\n        self.net = nn.Sequential(\n            project_in,\n            nn.Dropout(dropout),\n            nn.Linear(inner_dim, dim_out)\n        )\n\n    def forward(self, x):\n        return self.net(x)\n\n\n# attention.\nclass Attention(nn.Module):\n    def __init__(\n            self,\n            dim,\n            dim_head=DEFAULT_DIM_HEAD,\n            heads=8,\n            causal=False,\n            mask=None,\n            talking_heads=False,\n            sparse_topk=None,\n            use_entmax15=False,\n            num_mem_kv=0,\n            dropout=0.,\n            on_attn=False\n    ):\n        super().__init__()\n        if use_entmax15:\n            raise NotImplementedError(\"Check out entmax activation instead of softmax activation!\")\n        self.scale = dim_head ** -0.5\n        self.heads = heads\n        self.causal = causal\n        self.mask = mask\n\n        inner_dim = dim_head * heads\n\n        self.to_q = nn.Linear(dim, inner_dim, bias=False)\n        self.to_k = nn.Linear(dim, inner_dim, bias=False)\n        self.to_v = nn.Linear(dim, inner_dim, bias=False)\n        self.dropout = nn.Dropout(dropout)\n\n        # talking heads\n        self.talking_heads = talking_heads\n        if talking_heads:\n            self.pre_softmax_proj = nn.Parameter(torch.randn(heads, heads))\n            self.post_softmax_proj = nn.Parameter(torch.randn(heads, heads))\n\n        # explicit topk sparse attention\n        self.sparse_topk = sparse_topk\n\n        # entmax\n        #self.attn_fn = entmax15 if use_entmax15 else F.softmax\n        self.attn_fn = F.softmax\n\n        # add memory key / values\n        self.num_mem_kv = num_mem_kv\n        if num_mem_kv > 0:\n            self.mem_k = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head))\n            self.mem_v = nn.Parameter(torch.randn(heads, num_mem_kv, dim_head))\n\n        # attention on attention\n        self.attn_on_attn = on_attn\n        self.to_out = nn.Sequential(nn.Linear(inner_dim, dim * 2), nn.GLU()) if on_attn else nn.Linear(inner_dim, dim)\n\n    def forward(\n            self,\n            x,\n            context=None,\n            mask=None,\n            context_mask=None,\n            rel_pos=None,\n            sinusoidal_emb=None,\n            prev_attn=None,\n            mem=None\n    ):\n        b, n, _, h, talking_heads, device = *x.shape, self.heads, self.talking_heads, x.device\n        kv_input = default(context, x)\n\n        q_input = x\n        k_input = kv_input\n        v_input = kv_input\n\n        if exists(mem):\n            k_input = torch.cat((mem, k_input), dim=-2)\n            v_input = torch.cat((mem, v_input), dim=-2)\n\n        if exists(sinusoidal_emb):\n            # in shortformer, the query would start at a position offset depending on the past cached memory\n            offset = k_input.shape[-2] - q_input.shape[-2]\n            q_input = q_input + sinusoidal_emb(q_input, offset=offset)\n            k_input = k_input + sinusoidal_emb(k_input)\n\n        q = self.to_q(q_input)\n        k = self.to_k(k_input)\n        v = self.to_v(v_input)\n\n        q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h=h), (q, k, v))\n\n        input_mask = None\n        if any(map(exists, (mask, context_mask))):\n            q_mask = default(mask, lambda: torch.ones((b, n), device=device).bool())\n            k_mask = q_mask if not exists(context) else context_mask\n            k_mask = default(k_mask, lambda: torch.ones((b, k.shape[-2]), device=device).bool())\n            q_mask = rearrange(q_mask, 'b i -> b () i ()')\n            k_mask = rearrange(k_mask, 'b j -> b () () j')\n            input_mask = q_mask * k_mask\n\n        if self.num_mem_kv > 0:\n            mem_k, mem_v = map(lambda t: repeat(t, 'h n d -> b h n d', b=b), (self.mem_k, self.mem_v))\n            k = torch.cat((mem_k, k), dim=-2)\n            v = torch.cat((mem_v, v), dim=-2)\n            if exists(input_mask):\n                input_mask = F.pad(input_mask, (self.num_mem_kv, 0), value=True)\n\n        dots = einsum('b h i d, b h j d -> b h i j', q, k) * self.scale\n        mask_value = max_neg_value(dots)\n\n        if exists(prev_attn):\n            dots = dots + prev_attn\n\n        pre_softmax_attn = dots\n\n        if talking_heads:\n            dots = einsum('b h i j, h k -> b k i j', dots, self.pre_softmax_proj).contiguous()\n\n        if exists(rel_pos):\n            dots = rel_pos(dots)\n\n        if exists(input_mask):\n            dots.masked_fill_(~input_mask, mask_value)\n            del input_mask\n\n        if self.causal:\n            i, j = dots.shape[-2:]\n            r = torch.arange(i, device=device)\n            mask = rearrange(r, 'i -> () () i ()') < rearrange(r, 'j -> () () () j')\n            mask = F.pad(mask, (j - i, 0), value=False)\n            dots.masked_fill_(mask, mask_value)\n            del mask\n\n        if exists(self.sparse_topk) and self.sparse_topk < dots.shape[-1]:\n            top, _ = dots.topk(self.sparse_topk, dim=-1)\n            vk = top[..., -1].unsqueeze(-1).expand_as(dots)\n            mask = dots < vk\n            dots.masked_fill_(mask, mask_value)\n            del mask\n\n        attn = self.attn_fn(dots, dim=-1)\n        post_softmax_attn = attn\n\n        attn = self.dropout(attn)\n\n        if talking_heads:\n            attn = einsum('b h i j, h k -> b k i j', attn, self.post_softmax_proj).contiguous()\n\n        out = einsum('b h i j, b h j d -> b h i d', attn, v)\n        out = rearrange(out, 'b h n d -> b n (h d)')\n\n        intermediates = Intermediates(\n            pre_softmax_attn=pre_softmax_attn,\n            post_softmax_attn=post_softmax_attn\n        )\n\n        return self.to_out(out), intermediates\n\n\nclass AttentionLayers(nn.Module):\n    def __init__(\n            self,\n            dim,\n            depth,\n            heads=8,\n            causal=False,\n            cross_attend=False,\n            only_cross=False,\n            use_scalenorm=False,\n            use_rmsnorm=False,\n            use_rezero=False,\n            rel_pos_num_buckets=32,\n            rel_pos_max_distance=128,\n            position_infused_attn=False,\n            custom_layers=None,\n            sandwich_coef=None,\n            par_ratio=None,\n            residual_attn=False,\n            cross_residual_attn=False,\n            macaron=False,\n            pre_norm=True,\n            gate_residual=False,\n            **kwargs\n    ):\n        super().__init__()\n        ff_kwargs, kwargs = groupby_prefix_and_trim('ff_', kwargs)\n        attn_kwargs, _ = groupby_prefix_and_trim('attn_', kwargs)\n\n        dim_head = attn_kwargs.get('dim_head', DEFAULT_DIM_HEAD)\n\n        self.dim = dim\n        self.depth = depth\n        self.layers = nn.ModuleList([])\n\n        self.has_pos_emb = position_infused_attn\n        self.pia_pos_emb = FixedPositionalEmbedding(dim) if position_infused_attn else None\n        self.rotary_pos_emb = always(None)\n\n        assert rel_pos_num_buckets <= rel_pos_max_distance, 'number of relative position buckets must be less than the relative position max distance'\n        self.rel_pos = None\n\n        self.pre_norm = pre_norm\n\n        self.residual_attn = residual_attn\n        self.cross_residual_attn = cross_residual_attn\n\n        norm_class = ScaleNorm if use_scalenorm else nn.LayerNorm\n        norm_class = RMSNorm if use_rmsnorm else norm_class\n        norm_fn = partial(norm_class, dim)\n\n        norm_fn = nn.Identity if use_rezero else norm_fn\n        branch_fn = Rezero if use_rezero else None\n\n        if cross_attend and not only_cross:\n            default_block = ('a', 'c', 'f')\n        elif cross_attend and only_cross:\n            default_block = ('c', 'f')\n        else:\n            default_block = ('a', 'f')\n\n        if macaron:\n            default_block = ('f',) + default_block\n\n        if exists(custom_layers):\n            layer_types = custom_layers\n        elif exists(par_ratio):\n            par_depth = depth * len(default_block)\n            assert 1 < par_ratio <= par_depth, 'par ratio out of range'\n            default_block = tuple(filter(not_equals('f'), default_block))\n            par_attn = par_depth // par_ratio\n            depth_cut = par_depth * 2 // 3  # 2 / 3 attention layer cutoff suggested by PAR paper\n            par_width = (depth_cut + depth_cut // par_attn) // par_attn\n            assert len(default_block) <= par_width, 'default block is too large for par_ratio'\n            par_block = default_block + ('f',) * (par_width - len(default_block))\n            par_head = par_block * par_attn\n            layer_types = par_head + ('f',) * (par_depth - len(par_head))\n        elif exists(sandwich_coef):\n            assert sandwich_coef > 0 and sandwich_coef <= depth, 'sandwich coefficient should be less than the depth'\n            layer_types = ('a',) * sandwich_coef + default_block * (depth - sandwich_coef) + ('f',) * sandwich_coef\n        else:\n            layer_types = default_block * depth\n\n        self.layer_types = layer_types\n        self.num_attn_layers = len(list(filter(equals('a'), layer_types)))\n\n        for layer_type in self.layer_types:\n            if layer_type == 'a':\n                layer = Attention(dim, heads=heads, causal=causal, **attn_kwargs)\n            elif layer_type == 'c':\n                layer = Attention(dim, heads=heads, **attn_kwargs)\n            elif layer_type == 'f':\n                layer = FeedForward(dim, **ff_kwargs)\n                layer = layer if not macaron else Scale(0.5, layer)\n            else:\n                raise Exception(f'invalid layer type {layer_type}')\n\n            if isinstance(layer, Attention) and exists(branch_fn):\n                layer = branch_fn(layer)\n\n            if gate_residual:\n                residual_fn = GRUGating(dim)\n            else:\n                residual_fn = Residual()\n\n            self.layers.append(nn.ModuleList([\n                norm_fn(),\n                layer,\n                residual_fn\n            ]))\n\n    def forward(\n            self,\n            x,\n            context=None,\n            mask=None,\n            context_mask=None,\n            mems=None,\n            return_hiddens=False\n    ):\n        hiddens = []\n        intermediates = []\n        prev_attn = None\n        prev_cross_attn = None\n\n        mems = mems.copy() if exists(mems) else [None] * self.num_attn_layers\n\n        for ind, (layer_type, (norm, block, residual_fn)) in enumerate(zip(self.layer_types, self.layers)):\n            is_last = ind == (len(self.layers) - 1)\n\n            if layer_type == 'a':\n                hiddens.append(x)\n                layer_mem = mems.pop(0)\n\n            residual = x\n\n            if self.pre_norm:\n                x = norm(x)\n\n            if layer_type == 'a':\n                out, inter = block(x, mask=mask, sinusoidal_emb=self.pia_pos_emb, rel_pos=self.rel_pos,\n                                   prev_attn=prev_attn, mem=layer_mem)\n            elif layer_type == 'c':\n                out, inter = block(x, context=context, mask=mask, context_mask=context_mask, prev_attn=prev_cross_attn)\n            elif layer_type == 'f':\n                out = block(x)\n\n            x = residual_fn(out, residual)\n\n            if layer_type in ('a', 'c'):\n                intermediates.append(inter)\n\n            if layer_type == 'a' and self.residual_attn:\n                prev_attn = inter.pre_softmax_attn\n            elif layer_type == 'c' and self.cross_residual_attn:\n                prev_cross_attn = inter.pre_softmax_attn\n\n            if not self.pre_norm and not is_last:\n                x = norm(x)\n\n        if return_hiddens:\n            intermediates = LayerIntermediates(\n                hiddens=hiddens,\n                attn_intermediates=intermediates\n            )\n\n            return x, intermediates\n\n        return x\n\n\nclass Encoder(AttentionLayers):\n    def __init__(self, **kwargs):\n        assert 'causal' not in kwargs, 'cannot set causality on encoder'\n        super().__init__(causal=False, **kwargs)\n\n\n\nclass TransformerWrapper(nn.Module):\n    def __init__(\n            self,\n            *,\n            num_tokens,\n            max_seq_len,\n            attn_layers,\n            emb_dim=None,\n            max_mem_len=0.,\n            emb_dropout=0.,\n            num_memory_tokens=None,\n            tie_embedding=False,\n            use_pos_emb=True\n    ):\n        super().__init__()\n        assert isinstance(attn_layers, AttentionLayers), 'attention layers must be one of Encoder or Decoder'\n\n        dim = attn_layers.dim\n        emb_dim = default(emb_dim, dim)\n\n        self.max_seq_len = max_seq_len\n        self.max_mem_len = max_mem_len\n        self.num_tokens = num_tokens\n\n        self.token_emb = nn.Embedding(num_tokens, emb_dim)\n        self.pos_emb = AbsolutePositionalEmbedding(emb_dim, max_seq_len) if (\n                    use_pos_emb and not attn_layers.has_pos_emb) else always(0)\n        self.emb_dropout = nn.Dropout(emb_dropout)\n\n        self.project_emb = nn.Linear(emb_dim, dim) if emb_dim != dim else nn.Identity()\n        self.attn_layers = attn_layers\n        self.norm = nn.LayerNorm(dim)\n\n        self.init_()\n\n        self.to_logits = nn.Linear(dim, num_tokens) if not tie_embedding else lambda t: t @ self.token_emb.weight.t()\n\n        # memory tokens (like [cls]) from Memory Transformers paper\n        num_memory_tokens = default(num_memory_tokens, 0)\n        self.num_memory_tokens = num_memory_tokens\n        if num_memory_tokens > 0:\n            self.memory_tokens = nn.Parameter(torch.randn(num_memory_tokens, dim))\n\n            # let funnel encoder know number of memory tokens, if specified\n            if hasattr(attn_layers, 'num_memory_tokens'):\n                attn_layers.num_memory_tokens = num_memory_tokens\n\n    def init_(self):\n        nn.init.normal_(self.token_emb.weight, std=0.02)\n\n    def forward(\n            self,\n            x,\n            return_embeddings=False,\n            mask=None,\n            return_mems=False,\n            return_attn=False,\n            mems=None,\n            **kwargs\n    ):\n        b, n, device, num_mem = *x.shape, x.device, self.num_memory_tokens\n        x = self.token_emb(x)\n        x += self.pos_emb(x)\n        x = self.emb_dropout(x)\n\n        x = self.project_emb(x)\n\n        if num_mem > 0:\n            mem = repeat(self.memory_tokens, 'n d -> b n d', b=b)\n            x = torch.cat((mem, x), dim=1)\n\n            # auto-handle masking after appending memory tokens\n            if exists(mask):\n                mask = F.pad(mask, (num_mem, 0), value=True)\n\n        x, intermediates = self.attn_layers(x, mask=mask, mems=mems, return_hiddens=True, **kwargs)\n        x = self.norm(x)\n\n        mem, x = x[:, :num_mem], x[:, num_mem:]\n\n        out = self.to_logits(x) if not return_embeddings else x\n\n        if return_mems:\n            hiddens = intermediates.hiddens\n            new_mems = list(map(lambda pair: torch.cat(pair, dim=-2), zip(mems, hiddens))) if exists(mems) else hiddens\n            new_mems = list(map(lambda t: t[..., -self.max_mem_len:, :].detach(), new_mems))\n            return out, new_mems\n\n        if return_attn:\n            attn_maps = list(map(lambda t: t.post_softmax_attn, intermediates.attn_intermediates))\n            return out, attn_maps\n\n        return out\n\n"
  },
  {
    "path": "ldm_exp/ldm/util.py",
    "content": "import importlib\n\nimport torch\nimport numpy as np\nfrom collections import abc\nfrom einops import rearrange\nfrom functools import partial\n\nimport multiprocessing as mp\nfrom threading import Thread\nfrom queue import Queue\n\nfrom inspect import isfunction\nfrom PIL import Image, ImageDraw, ImageFont\n\n\ndef log_txt_as_img(wh, xc, size=10):\n    # wh a tuple of (width, height)\n    # xc a list of captions to plot\n    b = len(xc)\n    txts = list()\n    for bi in range(b):\n        txt = Image.new(\"RGB\", wh, color=\"white\")\n        draw = ImageDraw.Draw(txt)\n        font = ImageFont.truetype('data/DejaVuSans.ttf', size=size)\n        nc = int(40 * (wh[0] / 256))\n        lines = \"\\n\".join(xc[bi][start:start + nc] for start in range(0, len(xc[bi]), nc))\n\n        try:\n            draw.text((0, 0), lines, fill=\"black\", font=font)\n        except UnicodeEncodeError:\n            print(\"Cant encode string for logging. Skipping.\")\n\n        txt = np.array(txt).transpose(2, 0, 1) / 127.5 - 1.0\n        txts.append(txt)\n    txts = np.stack(txts)\n    txts = torch.tensor(txts)\n    return txts\n\n\ndef ismap(x):\n    if not isinstance(x, torch.Tensor):\n        return False\n    return (len(x.shape) == 4) and (x.shape[1] > 3)\n\n\ndef isimage(x):\n    if not isinstance(x, torch.Tensor):\n        return False\n    return (len(x.shape) == 4) and (x.shape[1] == 3 or x.shape[1] == 1)\n\n\ndef exists(x):\n    return x is not None\n\n\ndef default(val, d):\n    if exists(val):\n        return val\n    return d() if isfunction(d) else d\n\n\ndef mean_flat(tensor):\n    \"\"\"\n    https://github.com/openai/guided-diffusion/blob/27c20a8fab9cb472df5d6bdd6c8d11c8f430b924/guided_diffusion/nn.py#L86\n    Take the mean over all non-batch dimensions.\n    \"\"\"\n    return tensor.mean(dim=list(range(1, len(tensor.shape))))\n\n\ndef count_params(model, verbose=False):\n    total_params = sum(p.numel() for p in model.parameters())\n    if verbose:\n        print(f\"{model.__class__.__name__} has {total_params * 1.e-6:.2f} M params.\")\n    return total_params\n\n\ndef instantiate_from_config(config):\n    if not \"target\" in config:\n        if config == '__is_first_stage__':\n            return None\n        elif config == \"__is_unconditional__\":\n            return None\n        raise KeyError(\"Expected key `target` to instantiate.\")\n    return get_obj_from_str(config[\"target\"])(**config.get(\"params\", dict()))\n\n\ndef get_obj_from_str(string, reload=False):\n    module, cls = string.rsplit(\".\", 1)\n    if reload:\n        module_imp = importlib.import_module(module)\n        importlib.reload(module_imp)\n    return getattr(importlib.import_module(module, package=None), cls)\n\n\ndef _do_parallel_data_prefetch(func, Q, data, idx, idx_to_fn=False):\n    # create dummy dataset instance\n\n    # run prefetching\n    if idx_to_fn:\n        res = func(data, worker_id=idx)\n    else:\n        res = func(data)\n    Q.put([idx, res])\n    Q.put(\"Done\")\n\n\ndef parallel_data_prefetch(\n        func: callable, data, n_proc, target_data_type=\"ndarray\", cpu_intensive=True, use_worker_id=False\n):\n    # if target_data_type not in [\"ndarray\", \"list\"]:\n    #     raise ValueError(\n    #         \"Data, which is passed to parallel_data_prefetch has to be either of type list or ndarray.\"\n    #     )\n    if isinstance(data, np.ndarray) and target_data_type == \"list\":\n        raise ValueError(\"list expected but function got ndarray.\")\n    elif isinstance(data, abc.Iterable):\n        if isinstance(data, dict):\n            print(\n                f'WARNING:\"data\" argument passed to parallel_data_prefetch is a dict: Using only its values and disregarding keys.'\n            )\n            data = list(data.values())\n        if target_data_type == \"ndarray\":\n            data = np.asarray(data)\n        else:\n            data = list(data)\n    else:\n        raise TypeError(\n            f\"The data, that shall be processed parallel has to be either an np.ndarray or an Iterable, but is actually {type(data)}.\"\n        )\n\n    if cpu_intensive:\n        Q = mp.Queue(1000)\n        proc = mp.Process\n    else:\n        Q = Queue(1000)\n        proc = Thread\n    # spawn processes\n    if target_data_type == \"ndarray\":\n        arguments = [\n            [func, Q, part, i, use_worker_id]\n            for i, part in enumerate(np.array_split(data, n_proc))\n        ]\n    else:\n        step = (\n            int(len(data) / n_proc + 1)\n            if len(data) % n_proc != 0\n            else int(len(data) / n_proc)\n        )\n        arguments = [\n            [func, Q, part, i, use_worker_id]\n            for i, part in enumerate(\n                [data[i: i + step] for i in range(0, len(data), step)]\n            )\n        ]\n    processes = []\n    for i in range(n_proc):\n        p = proc(target=_do_parallel_data_prefetch, args=arguments[i])\n        processes += [p]\n\n    # start processes\n    print(f\"Start prefetching...\")\n    import time\n\n    start = time.time()\n    gather_res = [[] for _ in range(n_proc)]\n    try:\n        for p in processes:\n            p.start()\n\n        k = 0\n        while k < n_proc:\n            # get result\n            res = Q.get()\n            if res == \"Done\":\n                k += 1\n            else:\n                gather_res[res[0]] = res[1]\n\n    except Exception as e:\n        print(\"Exception: \", e)\n        for p in processes:\n            p.terminate()\n\n        raise e\n    finally:\n        for p in processes:\n            p.join()\n        print(f\"Prefetching complete. [{time.time() - start} sec.]\")\n\n    if target_data_type == 'ndarray':\n        if not isinstance(gather_res[0], np.ndarray):\n            return np.concatenate([np.asarray(r) for r in gather_res], axis=0)\n\n        # order outputs\n        return np.concatenate(gather_res, axis=0)\n    elif target_data_type == 'list':\n        out = []\n        for r in gather_res:\n            out.extend(r)\n        return out\n    else:\n        return gather_res\n"
  },
  {
    "path": "ldm_exp/main.py",
    "content": "import argparse, os, sys, datetime, glob, importlib, csv\nimport numpy as np\nimport time\nimport torch\nimport torchvision\nimport pytorch_lightning as pl\n\nfrom packaging import version\nfrom omegaconf import OmegaConf\nfrom torch.utils.data import random_split, DataLoader, Dataset, Subset\nfrom functools import partial\nfrom PIL import ImageFile\nImageFile.LOAD_TRUNCATED_IMAGES = True\nfrom PIL import Image\n\n\n\nfrom pytorch_lightning import seed_everything\nfrom pytorch_lightning.trainer import Trainer\nfrom pytorch_lightning.callbacks import ModelCheckpoint, Callback, LearningRateMonitor\nfrom pytorch_lightning.utilities.distributed import rank_zero_only\nfrom pytorch_lightning.utilities import rank_zero_info\n\nfrom ldm.data.base import Txt2ImgIterableBaseDataset\nfrom ldm.util import instantiate_from_config\n\n\ndef get_parser(**parser_kwargs):\n    def str2bool(v):\n        if isinstance(v, bool):\n            return v\n        if v.lower() in (\"yes\", \"true\", \"t\", \"y\", \"1\"):\n            return True\n        elif v.lower() in (\"no\", \"false\", \"f\", \"n\", \"0\"):\n            return False\n        else:\n            raise argparse.ArgumentTypeError(\"Boolean value expected.\")\n\n    parser = argparse.ArgumentParser(**parser_kwargs)\n    parser.add_argument(\n        \"-n\",\n        \"--name\",\n        type=str,\n        const=True,\n        default=\"\",\n        nargs=\"?\",\n        help=\"postfix for logdir\",\n    )\n    parser.add_argument(\n        \"-r\",\n        \"--resume\",\n        type=str,\n        const=True,\n        default=\"\",\n        nargs=\"?\",\n        help=\"resume from logdir or checkpoint in logdir\",\n    )\n    parser.add_argument(\n        \"--load_pruned_model\",\n        type=str,\n        default=None,\n    )\n    parser.add_argument(\n        \"-b\",\n        \"--base\",\n        nargs=\"*\",\n        metavar=\"base_config.yaml\",\n        help=\"paths to base configs. Loaded from left-to-right. \"\n             \"Parameters can be overwritten or added with command-line options of the form `--key value`.\",\n        default=list(),\n    )\n    parser.add_argument(\n        \"-t\",\n        \"--train\",\n        type=str2bool,\n        const=True,\n        default=False,\n        nargs=\"?\",\n        help=\"train\",\n    )\n    parser.add_argument(\n        \"--no-test\",\n        type=str2bool,\n        const=True,\n        default=False,\n        nargs=\"?\",\n        help=\"disable test\",\n    )\n    parser.add_argument(\n        \"-p\",\n        \"--project\",\n        help=\"name of new or path to existing project\"\n    )\n    parser.add_argument(\n        \"-d\",\n        \"--debug\",\n        type=str2bool,\n        nargs=\"?\",\n        const=True,\n        default=False,\n        help=\"enable post-mortem debugging\",\n    )\n    parser.add_argument(\n        \"-s\",\n        \"--seed\",\n        type=int,\n        default=23,\n        help=\"seed for seed_everything\",\n    )\n    parser.add_argument(\n        \"-f\",\n        \"--postfix\",\n        type=str,\n        default=\"\",\n        help=\"post-postfix for default name\",\n    )\n    parser.add_argument(\n        \"-l\",\n        \"--logdir\",\n        type=str,\n        default=\"logs\",\n        help=\"directory for logging dat shit\",\n    )\n    parser.add_argument(\n        \"--scale_lr\",\n        type=str2bool,\n        nargs=\"?\",\n        const=True,\n        default=True,\n        help=\"scale base-lr by ngpu * batch_size * n_accumulate\",\n    )\n    return parser\n\n\ndef nondefault_trainer_args(opt):\n    parser = argparse.ArgumentParser()\n    parser = Trainer.add_argparse_args(parser)\n    args = parser.parse_args([])\n    return sorted(k for k in vars(args) if getattr(opt, k) != getattr(args, k))\n\n\nclass WrappedDataset(Dataset):\n    \"\"\"Wraps an arbitrary object with __len__ and __getitem__ into a pytorch dataset\"\"\"\n\n    def __init__(self, dataset):\n        self.data = dataset\n\n    def __len__(self):\n        return len(self.data)\n\n    def __getitem__(self, idx):\n        return self.data[idx]\n\n\ndef worker_init_fn(_):\n    worker_info = torch.utils.data.get_worker_info()\n\n    dataset = worker_info.dataset\n    worker_id = worker_info.id\n\n    if isinstance(dataset, Txt2ImgIterableBaseDataset):\n        split_size = dataset.num_records // worker_info.num_workers\n        # reset num_records to the true number to retain reliable length information\n        dataset.sample_ids = dataset.valid_ids[worker_id * split_size:(worker_id + 1) * split_size]\n        current_id = np.random.choice(len(np.random.get_state()[1]), 1)\n        return np.random.seed(np.random.get_state()[1][current_id] + worker_id)\n    else:\n        return np.random.seed(np.random.get_state()[1][0] + worker_id)\n\n\nclass DataModuleFromConfig(pl.LightningDataModule):\n    def __init__(self, batch_size, train=None, validation=None, test=None, predict=None,\n                 wrap=False, num_workers=None, shuffle_test_loader=False, use_worker_init_fn=False,\n                 shuffle_val_dataloader=False):\n        super().__init__()\n        self.batch_size = batch_size\n        self.dataset_configs = dict()\n        self.num_workers = num_workers if num_workers is not None else batch_size * 2\n        self.use_worker_init_fn = use_worker_init_fn\n        if train is not None:\n            self.dataset_configs[\"train\"] = train\n            self.train_dataloader = self._train_dataloader\n        if validation is not None:\n            self.dataset_configs[\"validation\"] = validation\n            self.val_dataloader = partial(self._val_dataloader, shuffle=shuffle_val_dataloader)\n        if test is not None:\n            self.dataset_configs[\"test\"] = test\n            self.test_dataloader = partial(self._test_dataloader, shuffle=shuffle_test_loader)\n        if predict is not None:\n            self.dataset_configs[\"predict\"] = predict\n            self.predict_dataloader = self._predict_dataloader\n        self.wrap = wrap\n\n    def prepare_data(self):\n        for data_cfg in self.dataset_configs.values():\n            instantiate_from_config(data_cfg)\n\n    def setup(self, stage=None):\n        self.datasets = dict(\n            (k, instantiate_from_config(self.dataset_configs[k]))\n            for k in self.dataset_configs)\n        if self.wrap:\n            for k in self.datasets:\n                self.datasets[k] = WrappedDataset(self.datasets[k])\n\n    def _train_dataloader(self):\n        is_iterable_dataset = isinstance(self.datasets['train'], Txt2ImgIterableBaseDataset)\n        if is_iterable_dataset or self.use_worker_init_fn:\n            init_fn = worker_init_fn\n        else:\n            init_fn = None\n        return DataLoader(self.datasets[\"train\"], batch_size=self.batch_size,\n                          num_workers=self.num_workers, shuffle=False if is_iterable_dataset else True,\n                          worker_init_fn=init_fn)\n\n    def _val_dataloader(self, shuffle=False):\n        if isinstance(self.datasets['validation'], Txt2ImgIterableBaseDataset) or self.use_worker_init_fn:\n            init_fn = worker_init_fn\n        else:\n            init_fn = None\n        return DataLoader(self.datasets[\"validation\"],\n                          batch_size=self.batch_size,\n                          num_workers=self.num_workers,\n                          worker_init_fn=init_fn,\n                          shuffle=shuffle)\n\n    def _test_dataloader(self, shuffle=False):\n        is_iterable_dataset = isinstance(self.datasets['train'], Txt2ImgIterableBaseDataset)\n        if is_iterable_dataset or self.use_worker_init_fn:\n            init_fn = worker_init_fn\n        else:\n            init_fn = None\n\n        # do not shuffle dataloader for iterable dataset\n        shuffle = shuffle and (not is_iterable_dataset)\n\n        return DataLoader(self.datasets[\"test\"], batch_size=self.batch_size,\n                          num_workers=self.num_workers, worker_init_fn=init_fn, shuffle=shuffle)\n\n    def _predict_dataloader(self, shuffle=False):\n        if isinstance(self.datasets['predict'], Txt2ImgIterableBaseDataset) or self.use_worker_init_fn:\n            init_fn = worker_init_fn\n        else:\n            init_fn = None\n        return DataLoader(self.datasets[\"predict\"], batch_size=self.batch_size,\n                          num_workers=self.num_workers, worker_init_fn=init_fn)\n\n\nclass SetupCallback(Callback):\n    def __init__(self, resume, now, logdir, ckptdir, cfgdir, config, lightning_config):\n        super().__init__()\n        self.resume = resume\n        self.now = now\n        self.logdir = logdir\n        self.ckptdir = ckptdir\n        self.cfgdir = cfgdir\n        self.config = config\n        self.lightning_config = lightning_config\n\n    def on_keyboard_interrupt(self, trainer, pl_module):\n        if trainer.global_rank == 0:\n            print(\"Summoning checkpoint.\")\n            ckpt_path = os.path.join(self.ckptdir, \"last.ckpt\")\n            trainer.save_checkpoint(ckpt_path)\n\n    def on_pretrain_routine_start(self, trainer, pl_module):\n        if trainer.global_rank == 0:\n            # Create logdirs and save configs\n            os.makedirs(self.logdir, exist_ok=True)\n            os.makedirs(self.ckptdir, exist_ok=True)\n            os.makedirs(self.cfgdir, exist_ok=True)\n\n            with open(os.path.join(self.ckptdir, 'train.sh'), 'w') as f:\n                f.write('python ' + ' '.join(sys.argv))\n                \n            if \"callbacks\" in self.lightning_config:\n                if 'metrics_over_trainsteps_checkpoint' in self.lightning_config['callbacks']:\n                    os.makedirs(os.path.join(self.ckptdir, 'trainstep_checkpoints'), exist_ok=True)\n            print(\"Project config\")\n            print(OmegaConf.to_yaml(self.config))\n            OmegaConf.save(self.config,\n                           os.path.join(self.cfgdir, \"{}-project.yaml\".format(self.now)))\n\n            print(\"Lightning config\")\n            print(OmegaConf.to_yaml(self.lightning_config))\n            OmegaConf.save(OmegaConf.create({\"lightning\": self.lightning_config}),\n                           os.path.join(self.cfgdir, \"{}-lightning.yaml\".format(self.now)))\n\n        else:\n            # ModelCheckpoint callback created log directory --- remove it\n            if not self.resume and os.path.exists(self.logdir):\n                dst, name = os.path.split(self.logdir)\n                dst = os.path.join(dst, \"child_runs\", name)\n                os.makedirs(os.path.split(dst)[0], exist_ok=True)\n                try:\n                    os.rename(self.logdir, dst)\n                except FileNotFoundError:\n                    pass\n\n\nclass ImageLogger(Callback):\n    def __init__(self, batch_frequency, max_images, clamp=True, increase_log_steps=False,\n                 rescale=True, disabled=False, log_on_batch_idx=False, log_first_step=False,\n                 log_images_kwargs=None):\n        super().__init__()\n        self.rescale = rescale\n        self.batch_freq = batch_frequency\n        self.max_images = max_images\n        self.logger_log_images = {\n            pl.loggers.TestTubeLogger: self._testtube,\n        }\n        self.log_steps = [2 ** n for n in range(int(np.log2(self.batch_freq)) + 1)]\n        if not increase_log_steps:\n            self.log_steps = [self.batch_freq]\n        self.clamp = clamp\n        self.disabled = disabled\n        self.log_on_batch_idx = log_on_batch_idx\n        self.log_images_kwargs = log_images_kwargs if log_images_kwargs else {}\n        self.log_first_step = log_first_step\n\n    @rank_zero_only\n    def _testtube(self, pl_module, images, batch_idx, split):\n        for k in images:\n            grid = torchvision.utils.make_grid(images[k])\n            grid = (grid + 1.0) / 2.0  # -1,1 -> 0,1; c,h,w\n\n            tag = f\"{split}/{k}\"\n            pl_module.logger.experiment.add_image(\n                tag, grid,\n                global_step=pl_module.global_step)\n\n    @rank_zero_only\n    def log_local(self, save_dir, split, images,\n                  global_step, current_epoch, batch_idx):\n        root = os.path.join(save_dir, \"images\", split)\n        for k in images:\n            grid = torchvision.utils.make_grid(images[k], nrow=4)\n            if self.rescale:\n                grid = (grid + 1.0) / 2.0  # -1,1 -> 0,1; c,h,w\n            grid = grid.transpose(0, 1).transpose(1, 2).squeeze(-1)\n            grid = grid.numpy()\n            grid = (grid * 255).astype(np.uint8)\n            filename = \"{}_gs-{:06}_e-{:06}_b-{:06}.png\".format(\n                k,\n                global_step,\n                current_epoch,\n                batch_idx)\n            path = os.path.join(root, filename)\n            os.makedirs(os.path.split(path)[0], exist_ok=True)\n            Image.fromarray(grid).save(path)\n\n    def log_img(self, pl_module, batch, batch_idx, split=\"train\"):\n        check_idx = batch_idx if self.log_on_batch_idx else pl_module.global_step\n        if (self.check_frequency(check_idx) and  # batch_idx % self.batch_freq == 0\n                hasattr(pl_module, \"log_images\") and\n                callable(pl_module.log_images) and\n                self.max_images > 0):\n            logger = type(pl_module.logger)\n\n            is_train = pl_module.training\n            if is_train:\n                pl_module.eval()\n\n            with torch.no_grad():\n                images = pl_module.log_images(batch, split=split, **self.log_images_kwargs)\n\n            for k in images:\n                N = min(images[k].shape[0], self.max_images)\n                images[k] = images[k][:N]\n                if isinstance(images[k], torch.Tensor):\n                    images[k] = images[k].detach().cpu()\n                    if self.clamp:\n                        images[k] = torch.clamp(images[k], -1., 1.)\n\n            self.log_local(pl_module.logger.save_dir, split, images,\n                           pl_module.global_step, pl_module.current_epoch, batch_idx)\n\n            logger_log_images = self.logger_log_images.get(logger, lambda *args, **kwargs: None)\n            logger_log_images(pl_module, images, pl_module.global_step, split)\n\n            if is_train:\n                pl_module.train()\n\n    def check_frequency(self, check_idx):\n        if ((check_idx % self.batch_freq) == 0 or (check_idx in self.log_steps)) and (\n                check_idx > 0 or self.log_first_step):\n            try:\n                self.log_steps.pop(0)\n            except IndexError as e:\n                print(e)\n                pass\n            return True\n        return False\n\n    def on_train_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx):\n        if not self.disabled and (pl_module.global_step > 0 or self.log_first_step):\n            self.log_img(pl_module, batch, batch_idx, split=\"train\")\n\n    def on_validation_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx):\n        if not self.disabled and pl_module.global_step > 0:\n            self.log_img(pl_module, batch, batch_idx, split=\"val\")\n        if hasattr(pl_module, 'calibrate_grad_norm'):\n            if (pl_module.calibrate_grad_norm and batch_idx % 25 == 0) and batch_idx > 0:\n                self.log_gradients(trainer, pl_module, batch_idx=batch_idx)\n\n\nclass CUDACallback(Callback):\n    # see https://github.com/SeanNaren/minGPT/blob/master/mingpt/callback.py\n    def on_train_epoch_start(self, trainer, pl_module):\n        # Reset the memory use counter\n        torch.cuda.reset_peak_memory_stats(trainer.root_gpu)\n        torch.cuda.synchronize(trainer.root_gpu)\n        self.start_time = time.time()\n\n    def on_train_epoch_end(self, trainer, pl_module, outputs):\n        torch.cuda.synchronize(trainer.root_gpu)\n        max_memory = torch.cuda.max_memory_allocated(trainer.root_gpu) / 2 ** 20\n        epoch_time = time.time() - self.start_time\n\n        try:\n            max_memory = trainer.training_type_plugin.reduce(max_memory)\n            epoch_time = trainer.training_type_plugin.reduce(epoch_time)\n\n            rank_zero_info(f\"Average Epoch time: {epoch_time:.2f} seconds\")\n            rank_zero_info(f\"Average Peak memory {max_memory:.2f}MiB\")\n        except AttributeError:\n            pass\n\n\nif __name__ == \"__main__\":\n    # custom parser to specify config files, train, test and debug mode,\n    # postfix, resume.\n    # `--key value` arguments are interpreted as arguments to the trainer.\n    # `nested.key=value` arguments are interpreted as config parameters.\n    # configs are merged from left-to-right followed by command line parameters.\n\n    # model:\n    #   base_learning_rate: float\n    #   target: path to lightning module\n    #   params:\n    #       key: value\n    # data:\n    #   target: main.DataModuleFromConfig\n    #   params:\n    #      batch_size: int\n    #      wrap: bool\n    #      train:\n    #          target: path to train dataset\n    #          params:\n    #              key: value\n    #      validation:\n    #          target: path to validation dataset\n    #          params:\n    #              key: value\n    #      test:\n    #          target: path to test dataset\n    #          params:\n    #              key: value\n    # lightning: (optional, has sane defaults and can be specified on cmdline)\n    #   trainer:\n    #       additional arguments to trainer\n    #   logger:\n    #       logger to instantiate\n    #   modelcheckpoint:\n    #       modelcheckpoint to instantiate\n    #   callbacks:\n    #       callback1:\n    #           target: importpath\n    #           params:\n    #               key: value\n\n    now = datetime.datetime.now().strftime(\"%Y-%m-%dT%H-%M-%S\")\n\n    # add cwd for convenience and to make classes in this file available when\n    # running as `python main.py`\n    # (in particular `main.DataModuleFromConfig`)\n    sys.path.append(os.getcwd())\n\n    parser = get_parser()\n    parser = Trainer.add_argparse_args(parser)\n\n    opt, unknown = parser.parse_known_args()\n    if opt.name and opt.resume:\n        raise ValueError(\n            \"-n/--name and -r/--resume cannot be specified both.\"\n            \"If you want to resume training in a new log folder, \"\n            \"use -n/--name in combination with --resume_from_checkpoint\"\n        )\n    if opt.resume:\n        if not os.path.exists(opt.resume):\n            raise ValueError(\"Cannot find {}\".format(opt.resume))\n        if os.path.isfile(opt.resume):\n            paths = opt.resume.split(\"/\")\n            # idx = len(paths)-paths[::-1].index(\"logs\")+1\n            # logdir = \"/\".join(paths[:idx])\n            logdir = \"/\".join(paths[:-2])\n            ckpt = opt.resume\n        else:\n            assert os.path.isdir(opt.resume), opt.resume\n            logdir = opt.resume.rstrip(\"/\")\n            ckpt = os.path.join(logdir, \"checkpoints\", \"last.ckpt\")\n\n        opt.resume_from_checkpoint = ckpt\n        base_configs = sorted(glob.glob(os.path.join(logdir, \"configs/*.yaml\")))\n        opt.base = base_configs + opt.base\n        _tmp = logdir.split(\"/\")\n        nowname = _tmp[-1]\n    else:\n        if opt.name:\n            name = \"_\" + opt.name\n        elif opt.base:\n            cfg_fname = os.path.split(opt.base[0])[-1]\n            cfg_name = os.path.splitext(cfg_fname)[0]\n            name = \"_\" + cfg_name\n        else:\n            name = \"\"\n        nowname = now + name + opt.postfix\n        logdir = os.path.join(opt.logdir, nowname)\n\n    ckptdir = os.path.join(logdir, \"checkpoints\")\n    cfgdir = os.path.join(logdir, \"configs\")\n    seed_everything(opt.seed)\n\n    try:\n        # init and save configs\n        configs = [OmegaConf.load(cfg) for cfg in opt.base]\n        cli = OmegaConf.from_dotlist(unknown)\n        config = OmegaConf.merge(*configs, cli)\n        lightning_config = config.pop(\"lightning\", OmegaConf.create())\n        # merge trainer cli with config\n        trainer_config = lightning_config.get(\"trainer\", OmegaConf.create())\n        # default to ddp\n        trainer_config[\"accelerator\"] = \"ddp\"\n        for k in nondefault_trainer_args(opt):\n            trainer_config[k] = getattr(opt, k)\n        trainer_config.pop('load_pruned_model', None)\n        if not \"gpus\" in trainer_config:\n            del trainer_config[\"accelerator\"]\n            cpu = True\n        else:\n            gpuinfo = trainer_config[\"gpus\"]\n            print(f\"Running on GPUs {gpuinfo}\")\n            cpu = False\n        trainer_opt = argparse.Namespace(**trainer_config)\n        lightning_config.trainer = trainer_config\n\n        # model\n        def load_model_from_config(config, ckpt):\n            print(f\"Loading model from {ckpt}\")\n            pl_sd = torch.load(ckpt)#, map_location=\"cpu\")\n            sd = pl_sd[\"state_dict\"]\n            model = instantiate_from_config(config.model)\n            m, u = model.load_state_dict(sd, strict=False)\n            model.cuda()\n            model.eval()\n            return model\n        \n        def get_model():\n            config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n            model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n            return model\n        \n\n        if opt.load_pruned_model is not None:\n            print(\"Loading pruned model from {}\".format(opt.load_pruned_model))\n            model = torch.load(opt.load_pruned_model)\n        else:\n            model = get_model()\n\n        # trainer and callbacks\n        trainer_kwargs = dict()\n\n        # default logger configs\n        default_logger_cfgs = {\n            \"wandb\": {\n                \"target\": \"pytorch_lightning.loggers.WandbLogger\",\n                \"params\": {\n                    \"name\": nowname,\n                    \"save_dir\": logdir,\n                    \"offline\": opt.debug,\n                    \"id\": nowname,\n                }\n            },\n            \"testtube\": {\n                \"target\": \"pytorch_lightning.loggers.TestTubeLogger\",\n                \"params\": {\n                    \"name\": \"testtube\",\n                    \"save_dir\": logdir,\n                }\n            },\n        }\n        default_logger_cfg = default_logger_cfgs[\"testtube\"]\n        if \"logger\" in lightning_config:\n            logger_cfg = lightning_config.logger\n        else:\n            logger_cfg = OmegaConf.create()\n        logger_cfg = OmegaConf.merge(default_logger_cfg, logger_cfg)\n        trainer_kwargs[\"logger\"] = instantiate_from_config(logger_cfg)\n\n        # modelcheckpoint - use TrainResult/EvalResult(checkpoint_on=metric) to\n        # specify which metric is used to determine best models\n        default_modelckpt_cfg = {\n            \"target\": \"pytorch_lightning.callbacks.ModelCheckpoint\",\n            \"params\": {\n                \"dirpath\": ckptdir,\n                \"filename\": \"{epoch:06}\",\n                \"verbose\": True,\n                \"save_last\": True,\n            }\n        }\n        if hasattr(model, \"monitor\"):\n            print(f\"Monitoring {model.monitor} as checkpoint metric.\")\n            default_modelckpt_cfg[\"params\"][\"monitor\"] = model.monitor\n            default_modelckpt_cfg[\"params\"][\"save_top_k\"] = 3\n\n        if \"modelcheckpoint\" in lightning_config:\n            modelckpt_cfg = lightning_config.modelcheckpoint\n        else:\n            modelckpt_cfg =  OmegaConf.create()\n        modelckpt_cfg = OmegaConf.merge(default_modelckpt_cfg, modelckpt_cfg)\n        print(f\"Merged modelckpt-cfg: \\n{modelckpt_cfg}\")\n        if version.parse(pl.__version__) < version.parse('1.4.0'):\n            trainer_kwargs[\"checkpoint_callback\"] = instantiate_from_config(modelckpt_cfg)\n\n        # add callback which sets up log directory\n        default_callbacks_cfg = {\n            \"setup_callback\": {\n                \"target\": \"main.SetupCallback\",\n                \"params\": {\n                    \"resume\": opt.resume,\n                    \"now\": now,\n                    \"logdir\": logdir,\n                    \"ckptdir\": ckptdir,\n                    \"cfgdir\": cfgdir,\n                    \"config\": config,\n                    \"lightning_config\": lightning_config,\n                }\n            },\n            \"image_logger\": {\n                \"target\": \"main.ImageLogger\",\n                \"params\": {\n                    \"batch_frequency\": 750,\n                    \"max_images\": 4,\n                    \"clamp\": True\n                }\n            },\n            \"learning_rate_logger\": {\n                \"target\": \"main.LearningRateMonitor\",\n                \"params\": {\n                    \"logging_interval\": \"step\",\n                    # \"log_momentum\": True\n                }\n            },\n            \"cuda_callback\": {\n                \"target\": \"main.CUDACallback\"\n            },\n        }\n        if version.parse(pl.__version__) >= version.parse('1.4.0'):\n            default_callbacks_cfg.update({'checkpoint_callback': modelckpt_cfg})\n\n        if \"callbacks\" in lightning_config:\n            callbacks_cfg = lightning_config.callbacks\n        else:\n            callbacks_cfg = OmegaConf.create()\n\n        if 'metrics_over_trainsteps_checkpoint' in callbacks_cfg:\n            print(\n                'Caution: Saving checkpoints every n train steps without deleting. This might require some free space.')\n            default_metrics_over_trainsteps_ckpt_dict = {\n                'metrics_over_trainsteps_checkpoint':\n                    {\"target\": 'pytorch_lightning.callbacks.ModelCheckpoint',\n                     'params': {\n                         \"dirpath\": os.path.join(ckptdir, 'trainstep_checkpoints'),\n                         \"filename\": \"{epoch:06}-{step:09}\",\n                         \"verbose\": True,\n                         'save_top_k': -1,\n                         'every_n_train_steps': 10000,\n                         'save_weights_only': True\n                     }\n                     }\n            }\n            default_callbacks_cfg.update(default_metrics_over_trainsteps_ckpt_dict)\n\n        callbacks_cfg = OmegaConf.merge(default_callbacks_cfg, callbacks_cfg)\n        if 'ignore_keys_callback' in callbacks_cfg and hasattr(trainer_opt, 'resume_from_checkpoint'):\n            callbacks_cfg.ignore_keys_callback.params['ckpt_path'] = trainer_opt.resume_from_checkpoint\n        elif 'ignore_keys_callback' in callbacks_cfg:\n            del callbacks_cfg['ignore_keys_callback']\n\n        trainer_kwargs[\"callbacks\"] = [instantiate_from_config(callbacks_cfg[k]) for k in callbacks_cfg]\n\n        trainer = Trainer.from_argparse_args(trainer_opt, **trainer_kwargs)\n        trainer.logdir = logdir  ###\n\n        # data\n        data = instantiate_from_config(config.data)\n        # NOTE according to https://pytorch-lightning.readthedocs.io/en/latest/datamodules.html\n        # calling these ourselves should not be necessary but it is.\n        # lightning still takes care of proper multiprocessing though\n        data.prepare_data()\n        data.setup()\n        print(\"#### Data #####\")\n        for k in data.datasets:\n            print(f\"{k}, {data.datasets[k].__class__.__name__}, {len(data.datasets[k])}\")\n\n        # configure learning rate\n        bs, base_lr = config.data.params.batch_size, config.model.base_learning_rate\n        if not cpu:\n            ngpu = len(lightning_config.trainer.gpus.strip(\",\").split(','))\n        else:\n            ngpu = 1\n        if 'accumulate_grad_batches' in lightning_config.trainer:\n            accumulate_grad_batches = lightning_config.trainer.accumulate_grad_batches\n        else:\n            accumulate_grad_batches = 1\n        print(f\"accumulate_grad_batches = {accumulate_grad_batches}\")\n        lightning_config.trainer.accumulate_grad_batches = accumulate_grad_batches\n        if opt.scale_lr:\n            model.learning_rate = accumulate_grad_batches * ngpu * bs * base_lr\n            print(\n                \"Setting learning rate to {:.2e} = {} (accumulate_grad_batches) * {} (num_gpus) * {} (batchsize) * {:.2e} (base_lr)\".format(\n                    model.learning_rate, accumulate_grad_batches, ngpu, bs, base_lr))\n        else:\n            model.learning_rate = base_lr\n            print(\"++++ NOT USING LR SCALING ++++\")\n            print(f\"Setting learning rate to {model.learning_rate:.2e}\")\n\n\n        # allow checkpointing via USR1\n        def melk(*args, **kwargs):\n            # run all checkpoint hooks\n            if trainer.global_rank == 0:\n                print(\"Summoning checkpoint.\")\n                ckpt_path = os.path.join(ckptdir, \"last.ckpt\")\n                trainer.save_checkpoint(ckpt_path)\n\n\n        def divein(*args, **kwargs):\n            if trainer.global_rank == 0:\n                import pudb;\n                pudb.set_trace()\n\n\n        import signal\n\n        signal.signal(signal.SIGUSR1, melk)\n        signal.signal(signal.SIGUSR2, divein)\n\n        # run\n        if opt.train:\n            try:\n                trainer.fit(model, data)\n            except Exception:\n                melk()\n                raise\n        if not opt.no_test and not trainer.interrupted:\n            trainer.test(model, data)\n    except Exception:\n        if opt.debug and trainer.global_rank == 0:\n            try:\n                import pudb as debugger\n            except ImportError:\n                import pdb as debugger\n            debugger.post_mortem()\n        raise\n    finally:\n        # move newly created debug project to debug_runs\n        if opt.debug and not opt.resume and trainer.global_rank == 0:\n            dst, name = os.path.split(logdir)\n            dst = os.path.join(dst, \"debug_runs\", name)\n            os.makedirs(os.path.split(dst)[0], exist_ok=True)\n            os.rename(logdir, dst)\n        if trainer.global_rank == 0:\n            print(trainer.profiler.summary())\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/kl-f16/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: val/rec_loss\n    embed_dim: 16\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 1.0e-06\n        disc_weight: 0.5\n    ddconfig:\n      double_z: true\n      z_channels: 16\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 1\n      - 2\n      - 2\n      - 4\n      num_res_blocks: 2\n      attn_resolutions:\n      - 16\n      dropout: 0.0\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 6\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        size: 384\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        size: 384\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/kl-f32/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: val/rec_loss\n    embed_dim: 64\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 1.0e-06\n        disc_weight: 0.5\n    ddconfig:\n      double_z: true\n      z_channels: 64\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 1\n      - 2\n      - 2\n      - 4\n      - 4\n      num_res_blocks: 2\n      attn_resolutions:\n      - 16\n      - 8\n      dropout: 0.0\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 6\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        size: 384\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        size: 384\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/kl-f4/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: val/rec_loss\n    embed_dim: 3\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 1.0e-06\n        disc_weight: 0.5\n    ddconfig:\n      double_z: true\n      z_channels: 3\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 2\n      - 4\n      num_res_blocks: 2\n      attn_resolutions: []\n      dropout: 0.0\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 10\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        size: 384\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        size: 384\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/kl-f8/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.AutoencoderKL\n  params:\n    monitor: val/rec_loss\n    embed_dim: 4\n    lossconfig:\n      target: ldm.modules.losses.LPIPSWithDiscriminator\n      params:\n        disc_start: 50001\n        kl_weight: 1.0e-06\n        disc_weight: 0.5\n    ddconfig:\n      double_z: true\n      z_channels: 4\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 2\n      - 4\n      - 4\n      num_res_blocks: 2\n      attn_resolutions: []\n      dropout: 0.0\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 4\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        size: 384\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        size: 384\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/vq-f16/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.VQModel\n  params:\n    embed_dim: 8\n    n_embed: 16384\n    ddconfig:\n      double_z: false\n      z_channels: 8\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 1\n      - 2\n      - 2\n      - 4\n      num_res_blocks: 2\n      attn_resolutions:\n      - 16\n      dropout: 0.0\n    lossconfig:\n      target: taming.modules.losses.vqperceptual.VQLPIPSWithDiscriminator\n      params:\n        disc_conditional: false\n        disc_in_channels: 3\n        disc_start: 250001\n        disc_weight: 0.75\n        disc_num_layers: 2\n        codebook_weight: 1.0\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 14\n    num_workers: 20\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        size: 384\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        size: 384\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/vq-f4/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.VQModel\n  params:\n    embed_dim: 3\n    n_embed: 8192\n    monitor: val/rec_loss\n\n    ddconfig:\n      double_z: false\n      z_channels: 3\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 2\n      - 4\n      num_res_blocks: 2\n      attn_resolutions: []\n      dropout: 0.0\n    lossconfig:\n      target: taming.modules.losses.vqperceptual.VQLPIPSWithDiscriminator\n      params:\n        disc_conditional: false\n        disc_in_channels: 3\n        disc_start: 0\n        disc_weight: 0.75\n        codebook_weight: 1.0\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 8\n    num_workers: 16\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/vq-f4-noattn/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.VQModel\n  params:\n    embed_dim: 3\n    n_embed: 8192\n    monitor: val/rec_loss\n\n    ddconfig:\n      attn_type: none\n      double_z: false\n      z_channels: 3\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 2\n      - 4\n      num_res_blocks: 2\n      attn_resolutions: []\n      dropout: 0.0\n    lossconfig:\n      target: taming.modules.losses.vqperceptual.VQLPIPSWithDiscriminator\n      params:\n        disc_conditional: false\n        disc_in_channels: 3\n        disc_start: 11\n        disc_weight: 0.75\n        codebook_weight: 1.0\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 8\n    num_workers: 12\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/vq-f8/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.VQModel\n  params:\n    embed_dim: 4\n    n_embed: 16384\n    monitor: val/rec_loss\n    ddconfig:\n      double_z: false\n      z_channels: 4\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 2\n      - 2\n      - 4\n      num_res_blocks: 2\n      attn_resolutions:\n      - 32\n      dropout: 0.0\n    lossconfig:\n      target: taming.modules.losses.vqperceptual.VQLPIPSWithDiscriminator\n      params:\n        disc_conditional: false\n        disc_in_channels: 3\n        disc_num_layers: 2\n        disc_start: 1\n        disc_weight: 0.6\n        codebook_weight: 1.0\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 10\n    num_workers: 20\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        size: 384\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        size: 384\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/first_stage_models/vq-f8-n256/config.yaml",
    "content": "model:\n  base_learning_rate: 4.5e-06\n  target: ldm.models.autoencoder.VQModel\n  params:\n    embed_dim: 4\n    n_embed: 256\n    monitor: val/rec_loss\n    ddconfig:\n      double_z: false\n      z_channels: 4\n      resolution: 256\n      in_channels: 3\n      out_ch: 3\n      ch: 128\n      ch_mult:\n      - 1\n      - 2\n      - 2\n      - 4\n      num_res_blocks: 2\n      attn_resolutions:\n      - 32\n      dropout: 0.0\n    lossconfig:\n      target: taming.modules.losses.vqperceptual.VQLPIPSWithDiscriminator\n      params:\n        disc_conditional: false\n        disc_in_channels: 3\n        disc_start: 250001\n        disc_weight: 0.75\n        codebook_weight: 1.0\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 10\n    num_workers: 20\n    wrap: true\n    train:\n      target: ldm.data.openimages.FullOpenImagesTrain\n      params:\n        size: 384\n        crop_size: 256\n    validation:\n      target: ldm.data.openimages.FullOpenImagesValidation\n      params:\n        size: 384\n        crop_size: 256\n"
  },
  {
    "path": "ldm_exp/models/ldm/bsr_sr/config.yaml",
    "content": "model:\n  base_learning_rate: 1.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0155\n    log_every_t: 100\n    timesteps: 1000\n    loss_type: l2\n    first_stage_key: image\n    cond_stage_key: LR_image\n    image_size: 64\n    channels: 3\n    concat_mode: true\n    cond_stage_trainable: false\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 6\n        out_channels: 3\n        model_channels: 160\n        attention_resolutions:\n        - 16\n        - 8\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 2\n        - 4\n        num_head_channels: 32\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        monitor: val/rec_loss\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: torch.nn.Identity\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 64\n    wrap: false\n    num_workers: 12\n    train:\n      target: ldm.data.openimages.SuperresOpenImagesAdvancedTrain\n      params:\n        size: 256\n        degradation: bsrgan_light\n        downscale_f: 4\n        min_crop_f: 0.5\n        max_crop_f: 1.0\n        random_crop: true\n    validation:\n      target: ldm.data.openimages.SuperresOpenImagesAdvancedValidation\n      params:\n        size: 256\n        degradation: bsrgan_light\n        downscale_f: 4\n        min_crop_f: 0.5\n        max_crop_f: 1.0\n        random_crop: true\n"
  },
  {
    "path": "ldm_exp/models/ldm/celeba256/config.yaml",
    "content": "model:\n  base_learning_rate: 2.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: class_label\n    image_size: 64\n    channels: 3\n    cond_stage_trainable: false\n    concat_mode: false\n    monitor: val/loss\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 224\n        attention_resolutions:\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_head_channels: 32\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config: __is_unconditional__\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 48\n    num_workers: 5\n    wrap: false\n    train:\n      target: ldm.data.faceshq.CelebAHQTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.faceshq.CelebAHQValidation\n      params:\n        size: 256\n"
  },
  {
    "path": "ldm_exp/models/ldm/cin256/config.yaml",
    "content": "model:\n  base_learning_rate: 1.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: class_label\n    image_size: 32\n    channels: 4\n    cond_stage_trainable: true\n    conditioning_key: crossattn\n    monitor: val/loss_simple_ema\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 32\n        in_channels: 4\n        out_channels: 4\n        model_channels: 256\n        attention_resolutions:\n        - 4\n        - 2\n        - 1\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 4\n        num_head_channels: 32\n        use_spatial_transformer: true\n        transformer_depth: 1\n        context_dim: 512\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 4\n        n_embed: 16384\n        ddconfig:\n          double_z: false\n          z_channels: 4\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions:\n          - 32\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: ldm.modules.encoders.modules.ClassEmbedder\n      params:\n        embed_dim: 512\n        key: class_label\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 64\n    num_workers: 12\n    wrap: false\n    train:\n      target: ldm.data.imagenet.ImageNetTrain\n      params:\n        config:\n          size: 256\n    validation:\n      target: ldm.data.imagenet.ImageNetValidation\n      params:\n        config:\n          size: 256\n"
  },
  {
    "path": "ldm_exp/models/ldm/ffhq256/config.yaml",
    "content": "model:\n  base_learning_rate: 2.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: class_label\n    image_size: 64\n    channels: 3\n    cond_stage_trainable: false\n    concat_mode: false\n    monitor: val/loss\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 224\n        attention_resolutions:\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_head_channels: 32\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config: __is_unconditional__\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 42\n    num_workers: 5\n    wrap: false\n    train:\n      target: ldm.data.faceshq.FFHQTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.faceshq.FFHQValidation\n      params:\n        size: 256\n"
  },
  {
    "path": "ldm_exp/models/ldm/inpainting_big/config.yaml",
    "content": "model:\n  base_learning_rate: 1.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0205\n    log_every_t: 100\n    timesteps: 1000\n    loss_type: l1\n    first_stage_key: image\n    cond_stage_key: masked_image\n    image_size: 64\n    channels: 3\n    concat_mode: true\n    monitor: val/loss\n    scheduler_config:\n      target: ldm.lr_scheduler.LambdaWarmUpCosineScheduler\n      params:\n        verbosity_interval: 0\n        warm_up_steps: 1000\n        max_decay_steps: 50000\n        lr_start: 0.001\n        lr_max: 0.1\n        lr_min: 0.0001\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 7\n        out_channels: 3\n        model_channels: 256\n        attention_resolutions:\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_heads: 8\n        resblock_updown: true\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        monitor: val/rec_loss\n        ddconfig:\n          attn_type: none\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: ldm.modules.losses.contperceptual.DummyLoss\n    cond_stage_config: __is_first_stage__\n"
  },
  {
    "path": "ldm_exp/models/ldm/layout2img-openimages256/config.yaml",
    "content": "model:\n  base_learning_rate: 2.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0205\n    log_every_t: 100\n    timesteps: 1000\n    loss_type: l1\n    first_stage_key: image\n    cond_stage_key: coordinates_bbox\n    image_size: 64\n    channels: 3\n    conditioning_key: crossattn\n    cond_stage_trainable: true\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 128\n        attention_resolutions:\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_head_channels: 32\n        use_spatial_transformer: true\n        transformer_depth: 3\n        context_dim: 512\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        monitor: val/rec_loss\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: ldm.modules.encoders.modules.BERTEmbedder\n      params:\n        n_embed: 512\n        n_layer: 16\n        vocab_size: 8192\n        max_seq_len: 92\n        use_tokenizer: false\n    monitor: val/loss_simple_ema\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 24\n    wrap: false\n    num_workers: 10\n    train:\n      target: ldm.data.openimages.OpenImagesBBoxTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.openimages.OpenImagesBBoxValidation\n      params:\n        size: 256\n"
  },
  {
    "path": "ldm_exp/models/ldm/lsun_beds256/config.yaml",
    "content": "model:\n  base_learning_rate: 2.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: class_label\n    image_size: 64\n    channels: 3\n    cond_stage_trainable: false\n    concat_mode: false\n    monitor: val/loss\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 224\n        attention_resolutions:\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 4\n        num_head_channels: 32\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config: __is_unconditional__\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 48\n    num_workers: 5\n    wrap: false\n    train:\n      target: ldm.data.lsun.LSUNBedroomsTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.lsun.LSUNBedroomsValidation\n      params:\n        size: 256\n"
  },
  {
    "path": "ldm_exp/models/ldm/lsun_churches256/config.yaml",
    "content": "model:\n  base_learning_rate: 5.0e-05\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0155\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    loss_type: l1\n    first_stage_key: image\n    cond_stage_key: image\n    image_size: 32\n    channels: 4\n    cond_stage_trainable: false\n    concat_mode: false\n    scale_by_std: true\n    monitor: val/loss_simple_ema\n    scheduler_config:\n      target: ldm.lr_scheduler.LambdaLinearScheduler\n      params:\n        warm_up_steps:\n        - 10000\n        cycle_lengths:\n        - 10000000000000\n        f_start:\n        - 1.0e-06\n        f_max:\n        - 1.0\n        f_min:\n        - 1.0\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 32\n        in_channels: 4\n        out_channels: 4\n        model_channels: 192\n        attention_resolutions:\n        - 1\n        - 2\n        - 4\n        - 8\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 2\n        - 4\n        - 4\n        num_heads: 8\n        use_scale_shift_norm: true\n        resblock_updown: true\n    first_stage_config:\n      target: ldm.models.autoencoder.AutoencoderKL\n      params:\n        embed_dim: 4\n        monitor: val/rec_loss\n        ddconfig:\n          double_z: true\n          z_channels: 4\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n\n    cond_stage_config: '__is_unconditional__'\n\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 96\n    num_workers: 5\n    wrap: false\n    train:\n      target: ldm.data.lsun.LSUNChurchesTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.lsun.LSUNChurchesValidation\n      params:\n        size: 256\n"
  },
  {
    "path": "ldm_exp/models/ldm/semantic_synthesis256/config.yaml",
    "content": "model:\n  base_learning_rate: 1.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0205\n    log_every_t: 100\n    timesteps: 1000\n    loss_type: l1\n    first_stage_key: image\n    cond_stage_key: segmentation\n    image_size: 64\n    channels: 3\n    concat_mode: true\n    cond_stage_trainable: true\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 6\n        out_channels: 3\n        model_channels: 128\n        attention_resolutions:\n        - 32\n        - 16\n        - 8\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 4\n        - 8\n        num_heads: 8\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: ldm.modules.encoders.modules.SpatialRescaler\n      params:\n        n_stages: 2\n        in_channels: 182\n        out_channels: 3\n"
  },
  {
    "path": "ldm_exp/models/ldm/semantic_synthesis512/config.yaml",
    "content": "model:\n  base_learning_rate: 1.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0205\n    log_every_t: 100\n    timesteps: 1000\n    loss_type: l1\n    first_stage_key: image\n    cond_stage_key: segmentation\n    image_size: 128\n    channels: 3\n    concat_mode: true\n    cond_stage_trainable: true\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 128\n        in_channels: 6\n        out_channels: 3\n        model_channels: 128\n        attention_resolutions:\n        - 32\n        - 16\n        - 8\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 4\n        - 8\n        num_heads: 8\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        monitor: val/rec_loss\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: ldm.modules.encoders.modules.SpatialRescaler\n      params:\n        n_stages: 2\n        in_channels: 182\n        out_channels: 3\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 8\n    wrap: false\n    num_workers: 10\n    train:\n      target: ldm.data.landscapes.RFWTrain\n      params:\n        size: 768\n        crop_size: 512\n        segmentation_to_float32: true\n    validation:\n      target: ldm.data.landscapes.RFWValidation\n      params:\n        size: 768\n        crop_size: 512\n        segmentation_to_float32: true\n"
  },
  {
    "path": "ldm_exp/models/ldm/text2img256/config.yaml",
    "content": "model:\n  base_learning_rate: 2.0e-06\n  target: ldm.models.diffusion.ddpm.LatentDiffusion\n  params:\n    linear_start: 0.0015\n    linear_end: 0.0195\n    num_timesteps_cond: 1\n    log_every_t: 200\n    timesteps: 1000\n    first_stage_key: image\n    cond_stage_key: caption\n    image_size: 64\n    channels: 3\n    cond_stage_trainable: true\n    conditioning_key: crossattn\n    monitor: val/loss_simple_ema\n    unet_config:\n      target: ldm.modules.diffusionmodules.openaimodel.UNetModel\n      params:\n        image_size: 64\n        in_channels: 3\n        out_channels: 3\n        model_channels: 192\n        attention_resolutions:\n        - 8\n        - 4\n        - 2\n        num_res_blocks: 2\n        channel_mult:\n        - 1\n        - 2\n        - 3\n        - 5\n        num_head_channels: 32\n        use_spatial_transformer: true\n        transformer_depth: 1\n        context_dim: 640\n    first_stage_config:\n      target: ldm.models.autoencoder.VQModelInterface\n      params:\n        embed_dim: 3\n        n_embed: 8192\n        ddconfig:\n          double_z: false\n          z_channels: 3\n          resolution: 256\n          in_channels: 3\n          out_ch: 3\n          ch: 128\n          ch_mult:\n          - 1\n          - 2\n          - 4\n          num_res_blocks: 2\n          attn_resolutions: []\n          dropout: 0.0\n        lossconfig:\n          target: torch.nn.Identity\n    cond_stage_config:\n      target: ldm.modules.encoders.modules.BERTEmbedder\n      params:\n        n_embed: 640\n        n_layer: 32\ndata:\n  target: main.DataModuleFromConfig\n  params:\n    batch_size: 28\n    num_workers: 10\n    wrap: false\n    train:\n      target: ldm.data.previews.pytorch_dataset.PreviewsTrain\n      params:\n        size: 256\n    validation:\n      target: ldm.data.previews.pytorch_dataset.PreviewsValidation\n      params:\n        size: 256\n"
  },
  {
    "path": "ldm_exp/notebook_helpers.py",
    "content": "from torchvision.datasets.utils import download_url\nfrom ldm.util import instantiate_from_config\nimport torch\nimport os\n# todo ?\nfrom google.colab import files\nfrom IPython.display import Image as ipyimg\nimport ipywidgets as widgets\nfrom PIL import Image\nfrom numpy import asarray\nfrom einops import rearrange, repeat\nimport torch, torchvision\nfrom ldm.models.diffusion.ddim import DDIMSampler\nfrom ldm.util import ismap\nimport time\nfrom omegaconf import OmegaConf\n\n\ndef download_models(mode):\n\n    if mode == \"superresolution\":\n        # this is the small bsr light model\n        url_conf = 'https://heibox.uni-heidelberg.de/f/31a76b13ea27482981b4/?dl=1'\n        url_ckpt = 'https://heibox.uni-heidelberg.de/f/578df07c8fc04ffbadf3/?dl=1'\n\n        path_conf = 'logs/diffusion/superresolution_bsr/configs/project.yaml'\n        path_ckpt = 'logs/diffusion/superresolution_bsr/checkpoints/last.ckpt'\n\n        download_url(url_conf, path_conf)\n        download_url(url_ckpt, path_ckpt)\n\n        path_conf = path_conf + '/?dl=1' # fix it\n        path_ckpt = path_ckpt + '/?dl=1' # fix it\n        return path_conf, path_ckpt\n\n    else:\n        raise NotImplementedError\n\n\ndef load_model_from_config(config, ckpt):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    global_step = pl_sd[\"global_step\"]\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return {\"model\": model}, global_step\n\n\ndef get_model(mode):\n    path_conf, path_ckpt = download_models(mode)\n    config = OmegaConf.load(path_conf)\n    model, step = load_model_from_config(config, path_ckpt)\n    return model\n\n\ndef get_custom_cond(mode):\n    dest = \"data/example_conditioning\"\n\n    if mode == \"superresolution\":\n        uploaded_img = files.upload()\n        filename = next(iter(uploaded_img))\n        name, filetype = filename.split(\".\") # todo assumes just one dot in name !\n        os.rename(f\"{filename}\", f\"{dest}/{mode}/custom_{name}.{filetype}\")\n\n    elif mode == \"text_conditional\":\n        w = widgets.Text(value='A cake with cream!', disabled=True)\n        display(w)\n\n        with open(f\"{dest}/{mode}/custom_{w.value[:20]}.txt\", 'w') as f:\n            f.write(w.value)\n\n    elif mode == \"class_conditional\":\n        w = widgets.IntSlider(min=0, max=1000)\n        display(w)\n        with open(f\"{dest}/{mode}/custom.txt\", 'w') as f:\n            f.write(w.value)\n\n    else:\n        raise NotImplementedError(f\"cond not implemented for mode{mode}\")\n\n\ndef get_cond_options(mode):\n    path = \"data/example_conditioning\"\n    path = os.path.join(path, mode)\n    onlyfiles = [f for f in sorted(os.listdir(path))]\n    return path, onlyfiles\n\n\ndef select_cond_path(mode):\n    path = \"data/example_conditioning\"  # todo\n    path = os.path.join(path, mode)\n    onlyfiles = [f for f in sorted(os.listdir(path))]\n\n    selected = widgets.RadioButtons(\n        options=onlyfiles,\n        description='Select conditioning:',\n        disabled=False\n    )\n    display(selected)\n    selected_path = os.path.join(path, selected.value)\n    return selected_path\n\n\ndef get_cond(mode, selected_path):\n    example = dict()\n    if mode == \"superresolution\":\n        up_f = 4\n        visualize_cond_img(selected_path)\n\n        c = Image.open(selected_path)\n        c = torch.unsqueeze(torchvision.transforms.ToTensor()(c), 0)\n        c_up = torchvision.transforms.functional.resize(c, size=[up_f * c.shape[2], up_f * c.shape[3]], antialias=True)\n        c_up = rearrange(c_up, '1 c h w -> 1 h w c')\n        c = rearrange(c, '1 c h w -> 1 h w c')\n        c = 2. * c - 1.\n\n        c = c.to(torch.device(\"cuda\"))\n        example[\"LR_image\"] = c\n        example[\"image\"] = c_up\n\n    return example\n\n\ndef visualize_cond_img(path):\n    display(ipyimg(filename=path))\n\n\ndef run(model, selected_path, task, custom_steps, resize_enabled=False, classifier_ckpt=None, global_step=None):\n\n    example = get_cond(task, selected_path)\n\n    save_intermediate_vid = False\n    n_runs = 1\n    masked = False\n    guider = None\n    ckwargs = None\n    mode = 'ddim'\n    ddim_use_x0_pred = False\n    temperature = 1.\n    eta = 1.\n    make_progrow = True\n    custom_shape = None\n\n    height, width = example[\"image\"].shape[1:3]\n    split_input = height >= 128 and width >= 128\n\n    if split_input:\n        ks = 128\n        stride = 64\n        vqf = 4  #\n        model.split_input_params = {\"ks\": (ks, ks), \"stride\": (stride, stride),\n                                    \"vqf\": vqf,\n                                    \"patch_distributed_vq\": True,\n                                    \"tie_braker\": False,\n                                    \"clip_max_weight\": 0.5,\n                                    \"clip_min_weight\": 0.01,\n                                    \"clip_max_tie_weight\": 0.5,\n                                    \"clip_min_tie_weight\": 0.01}\n    else:\n        if hasattr(model, \"split_input_params\"):\n            delattr(model, \"split_input_params\")\n\n    invert_mask = False\n\n    x_T = None\n    for n in range(n_runs):\n        if custom_shape is not None:\n            x_T = torch.randn(1, custom_shape[1], custom_shape[2], custom_shape[3]).to(model.device)\n            x_T = repeat(x_T, '1 c h w -> b c h w', b=custom_shape[0])\n\n        logs = make_convolutional_sample(example, model,\n                                         mode=mode, custom_steps=custom_steps,\n                                         eta=eta, swap_mode=False , masked=masked,\n                                         invert_mask=invert_mask, quantize_x0=False,\n                                         custom_schedule=None, decode_interval=10,\n                                         resize_enabled=resize_enabled, custom_shape=custom_shape,\n                                         temperature=temperature, noise_dropout=0.,\n                                         corrector=guider, corrector_kwargs=ckwargs, x_T=x_T, save_intermediate_vid=save_intermediate_vid,\n                                         make_progrow=make_progrow,ddim_use_x0_pred=ddim_use_x0_pred\n                                         )\n    return logs\n\n\n@torch.no_grad()\ndef convsample_ddim(model, cond, steps, shape, eta=1.0, callback=None, normals_sequence=None,\n                    mask=None, x0=None, quantize_x0=False, img_callback=None,\n                    temperature=1., noise_dropout=0., score_corrector=None,\n                    corrector_kwargs=None, x_T=None, log_every_t=None\n                    ):\n\n    ddim = DDIMSampler(model)\n    bs = shape[0]  # dont know where this comes from but wayne\n    shape = shape[1:]  # cut batch dim\n    print(f\"Sampling with eta = {eta}; steps: {steps}\")\n    samples, intermediates = ddim.sample(steps, batch_size=bs, shape=shape, conditioning=cond, callback=callback,\n                                         normals_sequence=normals_sequence, quantize_x0=quantize_x0, eta=eta,\n                                         mask=mask, x0=x0, temperature=temperature, verbose=False,\n                                         score_corrector=score_corrector,\n                                         corrector_kwargs=corrector_kwargs, x_T=x_T)\n\n    return samples, intermediates\n\n\n@torch.no_grad()\ndef make_convolutional_sample(batch, model, mode=\"vanilla\", custom_steps=None, eta=1.0, swap_mode=False, masked=False,\n                              invert_mask=True, quantize_x0=False, custom_schedule=None, decode_interval=1000,\n                              resize_enabled=False, custom_shape=None, temperature=1., noise_dropout=0., corrector=None,\n                              corrector_kwargs=None, x_T=None, save_intermediate_vid=False, make_progrow=True,ddim_use_x0_pred=False):\n    log = dict()\n\n    z, c, x, xrec, xc = model.get_input(batch, model.first_stage_key,\n                                        return_first_stage_outputs=True,\n                                        force_c_encode=not (hasattr(model, 'split_input_params')\n                                                            and model.cond_stage_key == 'coordinates_bbox'),\n                                        return_original_cond=True)\n\n    log_every_t = 1 if save_intermediate_vid else None\n\n    if custom_shape is not None:\n        z = torch.randn(custom_shape)\n        print(f\"Generating {custom_shape[0]} samples of shape {custom_shape[1:]}\")\n\n    z0 = None\n\n    log[\"input\"] = x\n    log[\"reconstruction\"] = xrec\n\n    if ismap(xc):\n        log[\"original_conditioning\"] = model.to_rgb(xc)\n        if hasattr(model, 'cond_stage_key'):\n            log[model.cond_stage_key] = model.to_rgb(xc)\n\n    else:\n        log[\"original_conditioning\"] = xc if xc is not None else torch.zeros_like(x)\n        if model.cond_stage_model:\n            log[model.cond_stage_key] = xc if xc is not None else torch.zeros_like(x)\n            if model.cond_stage_key =='class_label':\n                log[model.cond_stage_key] = xc[model.cond_stage_key]\n\n    with model.ema_scope(\"Plotting\"):\n        t0 = time.time()\n        img_cb = None\n\n        sample, intermediates = convsample_ddim(model, c, steps=custom_steps, shape=z.shape,\n                                                eta=eta,\n                                                quantize_x0=quantize_x0, img_callback=img_cb, mask=None, x0=z0,\n                                                temperature=temperature, noise_dropout=noise_dropout,\n                                                score_corrector=corrector, corrector_kwargs=corrector_kwargs,\n                                                x_T=x_T, log_every_t=log_every_t)\n        t1 = time.time()\n\n        if ddim_use_x0_pred:\n            sample = intermediates['pred_x0'][-1]\n\n    x_sample = model.decode_first_stage(sample)\n\n    try:\n        x_sample_noquant = model.decode_first_stage(sample, force_not_quantize=True)\n        log[\"sample_noquant\"] = x_sample_noquant\n        log[\"sample_diff\"] = torch.abs(x_sample_noquant - x_sample)\n    except:\n        pass\n\n    log[\"sample\"] = x_sample\n    log[\"time\"] = t1 - t0\n\n    return log"
  },
  {
    "path": "ldm_exp/profile_ldm.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \nimport argparse\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--pruned_model\", type=str)\nparser.add_argument(\"--finetuned_ckpt\", type=str)\n\nargs = parser.parse_args()\n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nprint(\"Loading model from \", args.pruned_model)\nmodel = torch.load(args.pruned_model, map_location=\"cpu\")\nprint(\"Loading finetuned parameters from \", args.finetuned_ckpt)\npl_sd = torch.load(args.finetuned_ckpt, map_location=\"cpu\")\nsd = pl_sd[\"state_dict\"]\nm, u = model.load_state_dict(sd, strict=False)\nmodel.cuda()\nprint(model)\nsampler = DDIMSampler(model)\n\nn_samples_per_class=1\nxc = torch.tensor(n_samples_per_class*[0])\nc = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\nexample_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\nnum_macs, num_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\nprint(\"Number of parameters: {}\", num_params/1000000, \"M\")\nprint(\"Number of MACs: {}\", num_macs/1e9, \"G\")\n\nused = torch.cuda.max_memory_allocated()\nprint(\"gpu used {} MB\".format(used/1024**2))"
  },
  {
    "path": "ldm_exp/profile_ldm_pretrained.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \nimport torch\nimport torchvision.models as models\nfrom torch.profiler import profile, record_function, ProfilerActivity\n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\ndef load_model_from_config(config, ckpt):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\n\ndef get_model():\n    config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n    model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n    return model\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nmodel = get_model()\nsampler = DDIMSampler(model)\n\nn_samples_per_class=1\nxc = torch.tensor(n_samples_per_class*[0])\nc = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\nexample_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\nnum_macs, num_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\n\n\nwith profile(activities=[ProfilerActivity.CPU, ProfilerActivity.CUDA],\n        profile_memory=True, record_shapes=True) as prof:\n    model.model.diffusion_model(**example_inputs)\n\nprint(prof.key_averages().table(sort_by=\"self_cpu_memory_usage\", row_limit=10))"
  },
  {
    "path": "ldm_exp/profile_model.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \nimport argparse\nfrom ldm.modules.attention import CrossAttention\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--sparsity\", type=float, default=0.0)\nparser.add_argument(\"--pruner\", type=str, choices=[\"magnitude\", \"random\", \"taylor\", \"diff-pruning\", \"reinit\", \"diff0\"], default=\"magnitude\")\nargs = parser.parse_args()\n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\ndef load_model_from_config(config, ckpt):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\n\ndef get_model():\n    config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n    model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n    return model\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nmodel = get_model()\nsampler = DDIMSampler(model)\n\nimport numpy as np \nfrom PIL import Image\nfrom einops import rearrange\nfrom torchvision.utils import make_grid\n\n\nclasses = [25, 187, 448, 992]   # define classes to be sampled here\nn_samples_per_class = 6\n\nddim_steps = 20\nddim_eta = 0.0\nscale = 3.0   # for unconditional guidance\n\nprint(model)\n\nprint(\"Pruning ...\")\nmodel.eval()\n\nif args.pruner == \"magnitude\":\n    imp = tp.importance.MagnitudeImportance()\nelif args.pruner == \"random\":\n    imp = tp.importance.RandomImportance()\nelif args.pruner == 'taylor':\n    imp = tp.importance.TaylorImportance(multivariable=True) # standard first-order taylor expansion\nelif args.pruner == 'diff-pruning' or args.pruner == 'diff0':\n    imp = tp.importance.TaylorImportance(multivariable=False) # a modified version, estimating the accumulated error of weight removal\nelse:\n    raise ValueError(f\"Unknown pruner '{args.pruner}'\")\n\nignored_layers = [model.model.diffusion_model.out]\nchannel_groups = {}\niterative_steps = 1\nuc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n            )\n\n\nfor m in model.model.diffusion_model.modules():\n    if isinstance(m, CrossAttention):\n        channel_groups[m.to_q] = m.heads\n        channel_groups[m.to_k] = m.heads\n        channel_groups[m.to_v] = m.heads\n\n\nxc = torch.tensor(n_samples_per_class*[classes[0]])\nc = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\nexample_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\nbase_macs, base_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\npruner = tp.pruner.MagnitudePruner(\n    model.model.diffusion_model,\n    example_inputs,\n    importance=imp,\n    iterative_steps=1,\n    channel_groups =channel_groups,\n    ch_sparsity=args.sparsity, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n    ignored_layers=ignored_layers,\n    root_module_types=[torch.nn.Conv2d, torch.nn.Linear],\n    round_to=2\n)\nmodel.zero_grad()\n\nimport random\nmax_loss = -1\nfor t in range(1000):\n    if args.pruner not in ['diff-pruning', 'taylor', 'diff0']:\n        break\n    xc = torch.tensor(random.sample(range(1000), n_samples_per_class))\n    #xc = torch.tensor(n_samples_per_class*[class_label])\n    c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n    samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                    conditioning=c,\n                                    batch_size=n_samples_per_class,\n                                    shape=[3, 64, 64],\n                                    verbose=False,\n                                    unconditional_guidance_scale=scale,\n                                    unconditional_conditioning=uc, \n                                    eta=ddim_eta)\n\n    encoded = model.encode_first_stage(samples_ddim)\n    example_inputs = {\"x\": encoded.to(model.device), \"timesteps\": torch.full((n_samples_per_class,), t, device=model.device, dtype=torch.long), \"context\": c}\n    loss = model.get_loss_at_t(example_inputs['x'], {model.cond_stage_key: xc.to(model.device)}, example_inputs['timesteps'])\n    loss = loss[0]\n    if loss > max_loss:\n        max_loss = loss\n    thres = 0.1 if args.pruner == 'diff-pruning' else 0.0\n    if args.pruner == 'diff-pruning' or args.pruner == 'diff0':\n        if loss / max_loss<thres:\n            break\n    print(t, (loss / max_loss).item(), loss.item(), max_loss.item())\n    loss.backward()\npruner.step() \n\nprint(\"After pruning\")\nprint(model)\n\npruend_macs, pruned_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\nprint(f\"MACs: {pruend_macs / base_macs * 100:.2f}%, {base_macs / 1e9:.2f}G => {pruend_macs / 1e9:.2f}G\")\nprint(f\"Params: {pruned_params / base_params * 100:.2f}%, {base_params / 1e6:.2f}M => {pruned_params / 1e6:.2f}M\")\n\ndevice = torch.device(\"cuda\")\nmodel.to(device)\n\n#base_macs, base_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\n#print(\"Number of parameters: {}\", base_params/1000000, \"M\")\n#print(\"Number of MACs: {}\", base_macs/1e9, \"G\")\n\n# INIT LOGGERS\nstarter, ender = torch.cuda.Event(enable_timing=True), torch.cuda.Event(enable_timing=True)\nrepetitions = 30\ntimings=np.zeros((repetitions,1))\n\n#GPU-WARM-UP\nfor _ in range(10):\n    _ = model.model.diffusion_model(**example_inputs)\n\n# MEASURE PERFORMANCE\nwith torch.no_grad():\n    for rep in range(repetitions):\n        \n        starter.record()\n        _ = model.model.diffusion_model(**example_inputs)\n        ender.record()\n        # WAIT FOR GPU SYNC\n        torch.cuda.synchronize()\n        if rep == 0:\n            print(torch.cuda.max_memory_allocated() / 1024 / 1024 / 1024, \"GB\")\n        curr_time = starter.elapsed_time(ender)\n        timings[rep] = curr_time\n\nmean_syn = np.sum(timings) / repetitions\nstd_syn = np.std(timings)\nprint(\"Inference:\")\nprint(\"Latency:\", mean_syn)\nprint(\"FPS:\", 1000/mean_syn)\nprint(\"-----\")\n\n\nfor _ in range(10):\n    _ = model.model.diffusion_model(**example_inputs)\n\nstarter, ender = torch.cuda.Event(enable_timing=True), torch.cuda.Event(enable_timing=True)\nrepetitions = 30\ntimings=np.zeros((repetitions,1))\n\nfor rep in range(repetitions):\n    c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n    example_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\n    starter.record()\n    model.model.diffusion_model.zero_grad()\n    output = model.model.diffusion_model(**example_inputs)\n    loss = torch.nn.functional.mse_loss(output, torch.randn_like(output))\n    loss.backward()\n    if rep == 0:\n        print(torch.cuda.max_memory_allocated() / 1024 / 1024 / 1024, \"GB\")\n    \n    ender.record()\n    # WAIT FOR GPU SYNC\n    torch.cuda.synchronize()\n    curr_time = starter.elapsed_time(ender)\n    timings[rep] = curr_time\n\nmean_syn = np.sum(timings) / repetitions\nstd_syn = np.std(timings)\nprint(\"Training:\")\nprint(\"Latency:\", mean_syn)\nprint(\"FPS:\", 1000/mean_syn)\nprint(\"-----\")\n\n"
  },
  {
    "path": "ldm_exp/prune_ldm.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \nimport argparse\nfrom ldm.modules.attention import CrossAttention\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--sparsity\", type=float, default=0.0)\nparser.add_argument(\"--pruner\", type=str, choices=[\"magnitude\", \"random\", \"taylor\", \"diff-pruning\", \"reinit\", \"diff0\"], default=\"magnitude\")\nargs = parser.parse_args()\n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\ndef load_model_from_config(config, ckpt):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\n\ndef get_model():\n    config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n    model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n    return model\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nmodel = get_model()\nsampler = DDIMSampler(model)\n\nimport numpy as np \nfrom PIL import Image\nfrom einops import rearrange\nfrom torchvision.utils import make_grid\n\n\nclasses = [25, 187, 448, 992]   # define classes to be sampled here\nn_samples_per_class = 6\n\nddim_steps = 20\nddim_eta = 0.0\nscale = 3.0   # for unconditional guidance\n\nprint(model)\n\nprint(\"Pruning ...\")\nmodel.eval()\n\nif args.pruner == \"magnitude\":\n    imp = tp.importance.MagnitudeImportance()\nelif args.pruner == \"random\":\n    imp = tp.importance.RandomImportance()\nelif args.pruner == 'taylor':\n    imp = tp.importance.TaylorImportance(multivariable=True) # standard first-order taylor expansion\nelif args.pruner == 'diff-pruning' or args.pruner == 'diff0':\n    imp = tp.importance.TaylorImportance(multivariable=False) # a modified version, estimating the accumulated error of weight removal\nelse:\n    raise ValueError(f\"Unknown pruner '{args.pruner}'\")\n\nignored_layers = [model.model.diffusion_model.out]\nchannel_groups = {}\niterative_steps = 1\nuc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n            )\n\n\nfor m in model.model.diffusion_model.modules():\n    if isinstance(m, CrossAttention):\n        channel_groups[m.to_q] = m.heads\n        channel_groups[m.to_k] = m.heads\n        channel_groups[m.to_v] = m.heads\n\n\nxc = torch.tensor(n_samples_per_class*[classes[0]])\nc = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\nexample_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\nbase_macs, base_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\npruner = tp.pruner.MagnitudePruner(\n    model.model.diffusion_model,\n    example_inputs,\n    importance=imp,\n    iterative_steps=1,\n    channel_groups =channel_groups,\n    ch_sparsity=args.sparsity, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n    ignored_layers=ignored_layers,\n    target_layer_types=[torch.nn.Conv2d, torch.nn.Linear],\n    round_to=2\n)\nmodel.zero_grad()\n\nimport random\nmax_loss = -1\nfor t in range(1000):\n    if args.pruner not in ['diff-pruning', 'taylor', 'diff0']:\n        break\n    xc = torch.tensor(random.sample(range(1000), n_samples_per_class))\n    #xc = torch.tensor(n_samples_per_class*[class_label])\n    c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n    samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                    conditioning=c,\n                                    batch_size=n_samples_per_class,\n                                    shape=[3, 64, 64],\n                                    verbose=False,\n                                    unconditional_guidance_scale=scale,\n                                    unconditional_conditioning=uc, \n                                    eta=ddim_eta)\n\n    #encoded = model.encode_first_stage(samples_ddim)\n    example_inputs = {\"x\": samples_ddim.to(model.device), \"timesteps\": torch.full((n_samples_per_class,), t, device=model.device, dtype=torch.long), \"context\": c}\n    loss = model.get_loss_at_t(example_inputs['x'], {model.cond_stage_key: xc.to(model.device)}, example_inputs['timesteps'])\n    loss = loss[0]\n    if loss > max_loss:\n        max_loss = loss\n    thres = 0.1 if args.pruner == 'diff-pruning' else 0.0\n    if args.pruner == 'diff-pruning' or args.pruner == 'diff0':\n        if loss / max_loss<thres:\n            break\n    print(t, (loss / max_loss).item(), loss.item(), max_loss.item())\n    loss.backward()\npruner.step() \n\nprint(\"After pruning\")\nprint(model)\n\npruend_macs, pruned_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\nprint(f\"MACs: {pruend_macs / base_macs * 100:.2f}%, {base_macs / 1e9:.2f}G => {pruend_macs / 1e9:.2f}G\")\nprint(f\"Params: {pruned_params / base_params * 100:.2f}%, {base_params / 1e6:.2f}M => {pruned_params / 1e6:.2f}M\")\n\nall_samples = list()\n\nwith torch.no_grad():\n    with model.ema_scope():\n        uc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n            )\n        \n        for class_label in classes:\n            print(f\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\")\n            xc = torch.tensor(n_samples_per_class*[class_label])\n            c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n            \n            samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                             conditioning=c,\n                                             batch_size=n_samples_per_class,\n                                             shape=[3, 64, 64],\n                                             verbose=False,\n                                             unconditional_guidance_scale=scale,\n                                             unconditional_conditioning=uc, \n                                             eta=ddim_eta)\n\n            x_samples_ddim = model.decode_first_stage(samples_ddim)\n            x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, \n                                         min=0.0, max=1.0)\n            all_samples.append(x_samples_ddim)\n\n\n# display as grid\ngrid = torch.stack(all_samples, 0)\ngrid = rearrange(grid, 'n b c h w -> (n b) c h w')\ngrid = make_grid(grid, nrow=n_samples_per_class)\n\n# to image\ngrid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\nimg = Image.fromarray(grid.astype(np.uint8))\nimg.save(\"samples.png\")\n\n\n\nprint(\"Saving pruned model ...\")\ntorch.save(model, \"logs/pruned_model_{}_{}.pt\".format(args.sparsity, args.pruner))"
  },
  {
    "path": "ldm_exp/prune_ldm_no_grad.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \n\nimport argparse\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--sparsity\", type=float, default=0.0)\nparser.add_argument(\"--pruner\", type=str, choices=[\"magnitude\", \"random\"], default=\"magnitude\")\n\n\nargs = parser.parse_args()\n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\ndef load_model_from_config(config, ckpt):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt)#, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\n\ndef get_model():\n    config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n    model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n    return model\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nmodel = get_model()\nsampler = DDIMSampler(model)\n\nimport numpy as np \nfrom PIL import Image\nfrom einops import rearrange\nfrom torchvision.utils import make_grid\n\n\nclasses = [25, 187, 448, 992]   # define classes to be sampled here\nn_samples_per_class = 6\n\nddim_steps = 20\nddim_eta = 0.0\nscale = 3.0   # for unconditional guidance\n\nprint(model)\n\nprint(\"Pruning ...\")\nmodel.eval()\n\nif args.pruner == \"magnitude\":\n    imp = tp.importance.MagnitudeImportance()\nelif args.pruner == \"random\":\n    imp = tp.importance.RandomImportance()\nif args.pruner == 'taylor':\n    imp = tp.importance.TaylorImportance(multivariable=True) # standard first-order taylor expansion\nelif args.pruner == 'diff-pruning':\n    imp = tp.importance.TaylorImportance(multivariable=False) # a modified version, estimating the accumulated error of weight removal\nelse:\n    raise ValueError(f\"Unknown pruner '{args.pruner}'\")\n\nignored_layers = [model.model.diffusion_model.out]\nchannel_groups = {}\niterative_steps = 1\nuc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n            )\nclass_label = classes[0]\nprint(f\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\")\nxc = torch.tensor(n_samples_per_class*[class_label])\nc = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\nsamples_ddim, _ = sampler.sample(S=ddim_steps,\n                                conditioning=c,\n                                batch_size=n_samples_per_class,\n                                shape=[3, 64, 64],\n                                verbose=False,\n                                unconditional_guidance_scale=scale,\n                                unconditional_conditioning=uc, \n                                eta=ddim_eta)\n\n\nexample_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\nbase_macs, base_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\npruner = tp.pruner.MagnitudePruner(\n    model.model.diffusion_model,\n    example_inputs,\n    importance=imp,\n    iterative_steps=1,\n    channel_groups =channel_groups,\n    ch_sparsity=args.sparsity, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n    ignored_layers=ignored_layers,\n    root_module_types=[torch.nn.Conv2d, torch.nn.Linear]\n)\n\npruner.step() \n\nprint(\"After pruning\")\nprint(model)\n\npruend_macs, pruned_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\nprint(f\"MACs: {pruend_macs / base_macs * 100:.2f}%, {base_macs / 1e9:.2f}G => {pruend_macs / 1e9:.2f}G\")\nprint(f\"Params: {pruned_params / base_params * 100:.2f}%, {base_params / 1e6:.2f}M => {pruned_params / 1e6:.2f}M\")\n\nall_samples = list()\n\nwith torch.no_grad():\n    with model.ema_scope():\n        uc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n            )\n        \n        for class_label in classes:\n            print(f\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\")\n            xc = torch.tensor(n_samples_per_class*[class_label])\n            c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n            \n            samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                             conditioning=c,\n                                             batch_size=n_samples_per_class,\n                                             shape=[3, 64, 64],\n                                             verbose=False,\n                                             unconditional_guidance_scale=scale,\n                                             unconditional_conditioning=uc, \n                                             eta=ddim_eta)\n\n            x_samples_ddim = model.decode_first_stage(samples_ddim)\n            x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, \n                                         min=0.0, max=1.0)\n            all_samples.append(x_samples_ddim)\n\n\n# display as grid\ngrid = torch.stack(all_samples, 0)\ngrid = rearrange(grid, 'n b c h w -> (n b) c h w')\ngrid = make_grid(grid, nrow=n_samples_per_class)\n\n# to image\ngrid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\nimg = Image.fromarray(grid.astype(np.uint8))\nimg.save(\"samples.png\")\n\n\n\nprint(\"Saving pruned model ...\")\ntorch.save(model, \"logs/pruned_model_{}_{}.pt\".format(args.sparsity, args.pruner))"
  },
  {
    "path": "ldm_exp/run.sh",
    "content": "CUDA_VISIBLE_DEVICES=0 python prune_ldm.py --sparsity 0.3 --pruner diff-pruning\npython main.py --base configs/latent-diffusion/cin256-v2.yaml  -t --gpus 0,1,2,3 --load_pruned_model logs/pruned_model_0.3_diff-pruning.pt"
  },
  {
    "path": "ldm_exp/sample_for_FID.py",
    "content": "import sys, os\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \nimport argparse\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--pruned_model\", type=str, default=None)\nparser.add_argument(\"--finetuned_ckpt\", type=str, default=None)\nparser.add_argument(\"--ipc\", type=int, default=50)\nparser.add_argument(\"--output\", type=str, default='run')\nparser.add_argument(\"--batch_size\", type=int, default=50)\n\nargs = parser.parse_args()\n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\ndef load_model_from_config(config, ckpt):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\ndef get_model():\n    config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n    model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n    return model\n\nif args.pruned_model is None:\n    model = get_model()\nelse:\n    print(\"Loading model from \", args.pruned_model)\n    model = torch.load(args.pruned_model, map_location=\"cpu\")\n    print(\"Loading finetuned parameters from \", args.finetuned_ckpt)\n    pl_sd = torch.load(args.finetuned_ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    m, u = model.load_state_dict(sd, strict=False)\nmodel.cuda()\nprint(model)\nsampler = DDIMSampler(model)\n\nnum_params = sum(p.numel() for p in model.parameters())\nprint(\"Number of parameters: {}\", num_params/1000000, \"M\")\n\nimport numpy as np \nfrom PIL import Image\nfrom einops import rearrange\nfrom torchvision.utils import make_grid\n\n\nclasses = range(1000)   # define classes to be sampled here\nn_samples_per_class = args.batch_size\nn_batch_per_class = args.ipc // args.batch_size\n\nddim_steps = 250\nddim_eta = 0.0\nscale = 3.0   # for unconditional guidance\n\nall_samples = list()\n\nfrom torchvision import utils as tvu\nos.makedirs(args.output, exist_ok=True)\n\nimg_id = 0\nwith torch.no_grad():\n    with model.ema_scope():\n        uc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n        )\n        \n        for _ in range(n_batch_per_class):\n            for class_label in classes:\n                print(f\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\")\n                xc = torch.tensor(n_samples_per_class*[class_label])\n                c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n                \n                samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                                conditioning=c,\n                                                batch_size=n_samples_per_class,\n                                                shape=[3, 64, 64],\n                                                verbose=False,\n                                                unconditional_guidance_scale=scale,\n                                                unconditional_conditioning=uc, \n                                                eta=ddim_eta)\n\n                x_samples_ddim = model.decode_first_stage(samples_ddim)\n                x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, \n                                            min=0.0, max=1.0)\n                #all_samples.append(x_samples_ddim)\n                for i in range(len(x_samples_ddim)):\n                    tvu.save_image(\n                        x_samples_ddim[i], os.path.join(args.output, f\"{class_label}_{img_id}.png\")\n                    )\n                    img_id += 1\n                "
  },
  {
    "path": "ldm_exp/sample_imagenet.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\ndef load_model_from_config(config, ckpt):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\n\ndef get_model():\n    config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n    model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n    return model\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nmodel = get_model()\nsampler = DDIMSampler(model)\n\nprint(model)\n\nnum_params = sum(p.numel() for p in model.parameters())\nprint(\"Number of parameters: {}\", num_params/1000000, \"M\")\n\nimport numpy as np \nfrom PIL import Image\nfrom einops import rearrange\nfrom torchvision.utils import make_grid\n\n\nclasses = [0, 1, 2, 3]#[25, 187, 448, 992]   # define classes to be sampled here\nn_samples_per_class = 6\n\nddim_steps = 20\nddim_eta = 0.0\nscale = 1.5   # for unconditional guidance\n\nall_samples = list()\n\nwith torch.no_grad():\n    with model.ema_scope():\n        uc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n            )\n        \n        for class_label in classes:\n            print(f\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\")\n            xc = torch.tensor(n_samples_per_class*[class_label])\n            c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n            \n            samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                             conditioning=c,\n                                             batch_size=n_samples_per_class,\n                                             shape=[3, 64, 64],\n                                             verbose=False,\n                                             unconditional_guidance_scale=scale,\n                                             unconditional_conditioning=uc, \n                                             eta=ddim_eta)\n\n            x_samples_ddim = model.decode_first_stage(samples_ddim)\n            x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, \n                                         min=0.0, max=1.0)\n            all_samples.append(x_samples_ddim)\n\n\n# display as grid\ngrid = torch.stack(all_samples, 0)\ngrid = rearrange(grid, 'n b c h w -> (n b) c h w')\ngrid = make_grid(grid, nrow=n_samples_per_class)\n\n# to image\ngrid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\nimg = Image.fromarray(grid.astype(np.uint8))\nimg.save(\"samples_pretrained.png\")"
  },
  {
    "path": "ldm_exp/sample_pruned.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \nimport argparse\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--pruned_model\", type=str)\nparser.add_argument(\"--finetuned_ckpt\", type=str)\n\nargs = parser.parse_args()\n\n#@title loading utils\nimport torch\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nprint(\"Loading model from \", args.pruned_model)\nmodel = torch.load(args.pruned_model, map_location=\"cpu\")\nprint(\"Loading finetuned parameters from \", args.finetuned_ckpt)\npl_sd = torch.load(args.finetuned_ckpt, map_location=\"cpu\")\nsd = pl_sd[\"state_dict\"]\nm, u = model.load_state_dict(sd, strict=False)\nmodel.cuda()\nprint(model)\nsampler = DDIMSampler(model)\n\nn_samples_per_class=1\nxc = torch.tensor(n_samples_per_class*[0])\nc = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\nexample_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\nnum_macs, num_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\nprint(\"Number of parameters: {}\", num_params/1000000, \"M\")\nprint(\"Number of MACs: {}\", num_macs/1e9, \"G\")\n\nimport numpy as np \nfrom PIL import Image\nfrom einops import rearrange\nfrom torchvision.utils import make_grid\n\n\nclasses = [0, 1, 2, 3] #[25, 187, 448, 992]   # define classes to be sampled here\nn_samples_per_class = 6\n\nddim_steps = 250\nddim_eta = 0.0\nscale = 3   # for unconditional guidance\n\nall_samples = list()\n\nwith torch.no_grad():\n    with model.ema_scope():\n        uc = model.get_learned_conditioning(\n            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n            )\n        \n        for class_label in classes:\n            print(f\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\")\n            xc = torch.tensor(n_samples_per_class*[class_label])\n            c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n            \n            samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                             conditioning=c,\n                                             batch_size=n_samples_per_class,\n                                             shape=[3, 64, 64],\n                                             verbose=False,\n                                             unconditional_guidance_scale=scale,\n                                             unconditional_conditioning=uc, \n                                             eta=ddim_eta)\n\n            x_samples_ddim = model.decode_first_stage(samples_ddim)\n            x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, \n                                         min=0.0, max=1.0)\n            all_samples.append(x_samples_ddim)\n\n\n# display as grid\ngrid = torch.stack(all_samples, 0)\ngrid = rearrange(grid, 'n b c h w -> (n b) c h w')\ngrid = make_grid(grid, nrow=n_samples_per_class)\n\n# to image\ngrid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\nimg = Image.fromarray(grid.astype(np.uint8))\nimg.save(\"samples.png\")"
  },
  {
    "path": "ldm_exp/scripts/download_first_stages.sh",
    "content": "#!/bin/bash\nwget -O models/first_stage_models/kl-f4/model.zip https://ommer-lab.com/files/latent-diffusion/kl-f4.zip\nwget -O models/first_stage_models/kl-f8/model.zip https://ommer-lab.com/files/latent-diffusion/kl-f8.zip\nwget -O models/first_stage_models/kl-f16/model.zip https://ommer-lab.com/files/latent-diffusion/kl-f16.zip\nwget -O models/first_stage_models/kl-f32/model.zip https://ommer-lab.com/files/latent-diffusion/kl-f32.zip\nwget -O models/first_stage_models/vq-f4/model.zip https://ommer-lab.com/files/latent-diffusion/vq-f4.zip\nwget -O models/first_stage_models/vq-f4-noattn/model.zip https://ommer-lab.com/files/latent-diffusion/vq-f4-noattn.zip\nwget -O models/first_stage_models/vq-f8/model.zip https://ommer-lab.com/files/latent-diffusion/vq-f8.zip\nwget -O models/first_stage_models/vq-f8-n256/model.zip https://ommer-lab.com/files/latent-diffusion/vq-f8-n256.zip\nwget -O models/first_stage_models/vq-f16/model.zip https://ommer-lab.com/files/latent-diffusion/vq-f16.zip\n\n\n\ncd models/first_stage_models/kl-f4\nunzip -o model.zip\n\ncd ../kl-f8\nunzip -o model.zip\n\ncd ../kl-f16\nunzip -o model.zip\n\ncd ../kl-f32\nunzip -o model.zip\n\ncd ../vq-f4\nunzip -o model.zip\n\ncd ../vq-f4-noattn\nunzip -o model.zip\n\ncd ../vq-f8\nunzip -o model.zip\n\ncd ../vq-f8-n256\nunzip -o model.zip\n\ncd ../vq-f16\nunzip -o model.zip\n\ncd ../.."
  },
  {
    "path": "ldm_exp/scripts/download_models.sh",
    "content": "#!/bin/bash\nwget -O models/ldm/celeba256/celeba-256.zip https://ommer-lab.com/files/latent-diffusion/celeba.zip\nwget -O models/ldm/ffhq256/ffhq-256.zip https://ommer-lab.com/files/latent-diffusion/ffhq.zip\nwget -O models/ldm/lsun_churches256/lsun_churches-256.zip https://ommer-lab.com/files/latent-diffusion/lsun_churches.zip\nwget -O models/ldm/lsun_beds256/lsun_beds-256.zip https://ommer-lab.com/files/latent-diffusion/lsun_bedrooms.zip\nwget -O models/ldm/text2img256/model.zip https://ommer-lab.com/files/latent-diffusion/text2img.zip\nwget -O models/ldm/cin256/model.zip https://ommer-lab.com/files/latent-diffusion/cin.zip\nwget -O models/ldm/semantic_synthesis512/model.zip https://ommer-lab.com/files/latent-diffusion/semantic_synthesis.zip\nwget -O models/ldm/semantic_synthesis256/model.zip https://ommer-lab.com/files/latent-diffusion/semantic_synthesis256.zip\nwget -O models/ldm/bsr_sr/model.zip https://ommer-lab.com/files/latent-diffusion/sr_bsr.zip\nwget -O models/ldm/layout2img-openimages256/model.zip https://ommer-lab.com/files/latent-diffusion/layout2img_model.zip\nwget -O models/ldm/inpainting_big/model.zip https://ommer-lab.com/files/latent-diffusion/inpainting_big.zip\n\n\n\ncd models/ldm/celeba256\nunzip -o celeba-256.zip\n\ncd ../ffhq256\nunzip -o ffhq-256.zip\n\ncd ../lsun_churches256\nunzip -o lsun_churches-256.zip\n\ncd ../lsun_beds256\nunzip -o lsun_beds-256.zip\n\ncd ../text2img256\nunzip -o model.zip\n\ncd ../cin256\nunzip -o model.zip\n\ncd ../semantic_synthesis512\nunzip -o model.zip\n\ncd ../semantic_synthesis256\nunzip -o model.zip\n\ncd ../bsr_sr\nunzip -o model.zip\n\ncd ../layout2img-openimages256\nunzip -o model.zip\n\ncd ../inpainting_big\nunzip -o model.zip\n\ncd ../..\n"
  },
  {
    "path": "ldm_exp/scripts/inpaint.py",
    "content": "import argparse, os, sys, glob\nfrom omegaconf import OmegaConf\nfrom PIL import Image\nfrom tqdm import tqdm\nimport numpy as np\nimport torch\nfrom main import instantiate_from_config\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\n\ndef make_batch(image, mask, device):\n    image = np.array(Image.open(image).convert(\"RGB\"))\n    image = image.astype(np.float32)/255.0\n    image = image[None].transpose(0,3,1,2)\n    image = torch.from_numpy(image)\n\n    mask = np.array(Image.open(mask).convert(\"L\"))\n    mask = mask.astype(np.float32)/255.0\n    mask = mask[None,None]\n    mask[mask < 0.5] = 0\n    mask[mask >= 0.5] = 1\n    mask = torch.from_numpy(mask)\n\n    masked_image = (1-mask)*image\n\n    batch = {\"image\": image, \"mask\": mask, \"masked_image\": masked_image}\n    for k in batch:\n        batch[k] = batch[k].to(device=device)\n        batch[k] = batch[k]*2.0-1.0\n    return batch\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\n        \"--indir\",\n        type=str,\n        nargs=\"?\",\n        help=\"dir containing image-mask pairs (`example.png` and `example_mask.png`)\",\n    )\n    parser.add_argument(\n        \"--outdir\",\n        type=str,\n        nargs=\"?\",\n        help=\"dir to write results to\",\n    )\n    parser.add_argument(\n        \"--steps\",\n        type=int,\n        default=50,\n        help=\"number of ddim sampling steps\",\n    )\n    opt = parser.parse_args()\n\n    masks = sorted(glob.glob(os.path.join(opt.indir, \"*_mask.png\")))\n    images = [x.replace(\"_mask.png\", \".png\") for x in masks]\n    print(f\"Found {len(masks)} inputs.\")\n\n    config = OmegaConf.load(\"models/ldm/inpainting_big/config.yaml\")\n    model = instantiate_from_config(config.model)\n    model.load_state_dict(torch.load(\"models/ldm/inpainting_big/last.ckpt\")[\"state_dict\"],\n                          strict=False)\n\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    model = model.to(device)\n    sampler = DDIMSampler(model)\n\n    os.makedirs(opt.outdir, exist_ok=True)\n    with torch.no_grad():\n        with model.ema_scope():\n            for image, mask in tqdm(zip(images, masks)):\n                outpath = os.path.join(opt.outdir, os.path.split(image)[1])\n                batch = make_batch(image, mask, device=device)\n\n                # encode masked image and concat downsampled mask\n                c = model.cond_stage_model.encode(batch[\"masked_image\"])\n                cc = torch.nn.functional.interpolate(batch[\"mask\"],\n                                                     size=c.shape[-2:])\n                c = torch.cat((c, cc), dim=1)\n\n                shape = (c.shape[1]-1,)+c.shape[2:]\n                samples_ddim, _ = sampler.sample(S=opt.steps,\n                                                 conditioning=c,\n                                                 batch_size=c.shape[0],\n                                                 shape=shape,\n                                                 verbose=False)\n                x_samples_ddim = model.decode_first_stage(samples_ddim)\n\n                image = torch.clamp((batch[\"image\"]+1.0)/2.0,\n                                    min=0.0, max=1.0)\n                mask = torch.clamp((batch[\"mask\"]+1.0)/2.0,\n                                   min=0.0, max=1.0)\n                predicted_image = torch.clamp((x_samples_ddim+1.0)/2.0,\n                                              min=0.0, max=1.0)\n\n                inpainted = (1-mask)*image+mask*predicted_image\n                inpainted = inpainted.cpu().numpy().transpose(0,2,3,1)[0]*255\n                Image.fromarray(inpainted.astype(np.uint8)).save(outpath)\n"
  },
  {
    "path": "ldm_exp/scripts/knn2img.py",
    "content": "import argparse, os, sys, glob\nimport clip\nimport torch\nimport torch.nn as nn\nimport numpy as np\nfrom omegaconf import OmegaConf\nfrom PIL import Image\nfrom tqdm import tqdm, trange\nfrom itertools import islice\nfrom einops import rearrange, repeat\nfrom torchvision.utils import make_grid\nimport scann\nimport time\nfrom multiprocessing import cpu_count\n\nfrom ldm.util import instantiate_from_config, parallel_data_prefetch\nfrom ldm.models.diffusion.ddim import DDIMSampler\nfrom ldm.models.diffusion.plms import PLMSSampler\nfrom ldm.modules.encoders.modules import FrozenClipImageEmbedder, FrozenCLIPTextEmbedder\n\nDATABASES = [\n    \"openimages\",\n    \"artbench-art_nouveau\",\n    \"artbench-baroque\",\n    \"artbench-expressionism\",\n    \"artbench-impressionism\",\n    \"artbench-post_impressionism\",\n    \"artbench-realism\",\n    \"artbench-romanticism\",\n    \"artbench-renaissance\",\n    \"artbench-surrealism\",\n    \"artbench-ukiyo_e\",\n]\n\n\ndef chunk(it, size):\n    it = iter(it)\n    return iter(lambda: tuple(islice(it, size)), ())\n\n\ndef load_model_from_config(config, ckpt, verbose=False):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    if \"global_step\" in pl_sd:\n        print(f\"Global Step: {pl_sd['global_step']}\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    if len(m) > 0 and verbose:\n        print(\"missing keys:\")\n        print(m)\n    if len(u) > 0 and verbose:\n        print(\"unexpected keys:\")\n        print(u)\n\n    model.cuda()\n    model.eval()\n    return model\n\n\nclass Searcher(object):\n    def __init__(self, database, retriever_version='ViT-L/14'):\n        assert database in DATABASES\n        # self.database = self.load_database(database)\n        self.database_name = database\n        self.searcher_savedir = f'data/rdm/searchers/{self.database_name}'\n        self.database_path = f'data/rdm/retrieval_databases/{self.database_name}'\n        self.retriever = self.load_retriever(version=retriever_version)\n        self.database = {'embedding': [],\n                         'img_id': [],\n                         'patch_coords': []}\n        self.load_database()\n        self.load_searcher()\n\n    def train_searcher(self, k,\n                       metric='dot_product',\n                       searcher_savedir=None):\n\n        print('Start training searcher')\n        searcher = scann.scann_ops_pybind.builder(self.database['embedding'] /\n                                                  np.linalg.norm(self.database['embedding'], axis=1)[:, np.newaxis],\n                                                  k, metric)\n        self.searcher = searcher.score_brute_force().build()\n        print('Finish training searcher')\n\n        if searcher_savedir is not None:\n            print(f'Save trained searcher under \"{searcher_savedir}\"')\n            os.makedirs(searcher_savedir, exist_ok=True)\n            self.searcher.serialize(searcher_savedir)\n\n    def load_single_file(self, saved_embeddings):\n        compressed = np.load(saved_embeddings)\n        self.database = {key: compressed[key] for key in compressed.files}\n        print('Finished loading of clip embeddings.')\n\n    def load_multi_files(self, data_archive):\n        out_data = {key: [] for key in self.database}\n        for d in tqdm(data_archive, desc=f'Loading datapool from {len(data_archive)} individual files.'):\n            for key in d.files:\n                out_data[key].append(d[key])\n\n        return out_data\n\n    def load_database(self):\n\n        print(f'Load saved patch embedding from \"{self.database_path}\"')\n        file_content = glob.glob(os.path.join(self.database_path, '*.npz'))\n\n        if len(file_content) == 1:\n            self.load_single_file(file_content[0])\n        elif len(file_content) > 1:\n            data = [np.load(f) for f in file_content]\n            prefetched_data = parallel_data_prefetch(self.load_multi_files, data,\n                                                     n_proc=min(len(data), cpu_count()), target_data_type='dict')\n\n            self.database = {key: np.concatenate([od[key] for od in prefetched_data], axis=1)[0] for key in\n                             self.database}\n        else:\n            raise ValueError(f'No npz-files in specified path \"{self.database_path}\" is this directory existing?')\n\n        print(f'Finished loading of retrieval database of length {self.database[\"embedding\"].shape[0]}.')\n\n    def load_retriever(self, version='ViT-L/14', ):\n        model = FrozenClipImageEmbedder(model=version)\n        if torch.cuda.is_available():\n            model.cuda()\n        model.eval()\n        return model\n\n    def load_searcher(self):\n        print(f'load searcher for database {self.database_name} from {self.searcher_savedir}')\n        self.searcher = scann.scann_ops_pybind.load_searcher(self.searcher_savedir)\n        print('Finished loading searcher.')\n\n    def search(self, x, k):\n        if self.searcher is None and self.database['embedding'].shape[0] < 2e4:\n            self.train_searcher(k)   # quickly fit searcher on the fly for small databases\n        assert self.searcher is not None, 'Cannot search with uninitialized searcher'\n        if isinstance(x, torch.Tensor):\n            x = x.detach().cpu().numpy()\n        if len(x.shape) == 3:\n            x = x[:, 0]\n        query_embeddings = x / np.linalg.norm(x, axis=1)[:, np.newaxis]\n\n        start = time.time()\n        nns, distances = self.searcher.search_batched(query_embeddings, final_num_neighbors=k)\n        end = time.time()\n\n        out_embeddings = self.database['embedding'][nns]\n        out_img_ids = self.database['img_id'][nns]\n        out_pc = self.database['patch_coords'][nns]\n\n        out = {'nn_embeddings': out_embeddings / np.linalg.norm(out_embeddings, axis=-1)[..., np.newaxis],\n               'img_ids': out_img_ids,\n               'patch_coords': out_pc,\n               'queries': x,\n               'exec_time': end - start,\n               'nns': nns,\n               'q_embeddings': query_embeddings}\n\n        return out\n\n    def __call__(self, x, n):\n        return self.search(x, n)\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser()\n    # TODO: add n_neighbors and modes (text-only, text-image-retrieval, image-image retrieval etc)\n    # TODO: add 'image variation' mode when knn=0 but a single image is given instead of a text prompt?\n    parser.add_argument(\n        \"--prompt\",\n        type=str,\n        nargs=\"?\",\n        default=\"a painting of a virus monster playing guitar\",\n        help=\"the prompt to render\"\n    )\n\n    parser.add_argument(\n        \"--outdir\",\n        type=str,\n        nargs=\"?\",\n        help=\"dir to write results to\",\n        default=\"outputs/txt2img-samples\"\n    )\n\n    parser.add_argument(\n        \"--skip_grid\",\n        action='store_true',\n        help=\"do not save a grid, only individual samples. Helpful when evaluating lots of samples\",\n    )\n\n    parser.add_argument(\n        \"--ddim_steps\",\n        type=int,\n        default=50,\n        help=\"number of ddim sampling steps\",\n    )\n\n    parser.add_argument(\n        \"--n_repeat\",\n        type=int,\n        default=1,\n        help=\"number of repeats in CLIP latent space\",\n    )\n\n    parser.add_argument(\n        \"--plms\",\n        action='store_true',\n        help=\"use plms sampling\",\n    )\n\n    parser.add_argument(\n        \"--ddim_eta\",\n        type=float,\n        default=0.0,\n        help=\"ddim eta (eta=0.0 corresponds to deterministic sampling\",\n    )\n    parser.add_argument(\n        \"--n_iter\",\n        type=int,\n        default=1,\n        help=\"sample this often\",\n    )\n\n    parser.add_argument(\n        \"--H\",\n        type=int,\n        default=768,\n        help=\"image height, in pixel space\",\n    )\n\n    parser.add_argument(\n        \"--W\",\n        type=int,\n        default=768,\n        help=\"image width, in pixel space\",\n    )\n\n    parser.add_argument(\n        \"--n_samples\",\n        type=int,\n        default=3,\n        help=\"how many samples to produce for each given prompt. A.k.a batch size\",\n    )\n\n    parser.add_argument(\n        \"--n_rows\",\n        type=int,\n        default=0,\n        help=\"rows in the grid (default: n_samples)\",\n    )\n\n    parser.add_argument(\n        \"--scale\",\n        type=float,\n        default=5.0,\n        help=\"unconditional guidance scale: eps = eps(x, empty) + scale * (eps(x, cond) - eps(x, empty))\",\n    )\n\n    parser.add_argument(\n        \"--from-file\",\n        type=str,\n        help=\"if specified, load prompts from this file\",\n    )\n\n    parser.add_argument(\n        \"--config\",\n        type=str,\n        default=\"configs/retrieval-augmented-diffusion/768x768.yaml\",\n        help=\"path to config which constructs model\",\n    )\n\n    parser.add_argument(\n        \"--ckpt\",\n        type=str,\n        default=\"models/rdm/rdm768x768/model.ckpt\",\n        help=\"path to checkpoint of model\",\n    )\n\n    parser.add_argument(\n        \"--clip_type\",\n        type=str,\n        default=\"ViT-L/14\",\n        help=\"which CLIP model to use for retrieval and NN encoding\",\n    )\n    parser.add_argument(\n        \"--database\",\n        type=str,\n        default='artbench-surrealism',\n        choices=DATABASES,\n        help=\"The database used for the search, only applied when --use_neighbors=True\",\n    )\n    parser.add_argument(\n        \"--use_neighbors\",\n        default=False,\n        action='store_true',\n        help=\"Include neighbors in addition to text prompt for conditioning\",\n    )\n    parser.add_argument(\n        \"--knn\",\n        default=10,\n        type=int,\n        help=\"The number of included neighbors, only applied when --use_neighbors=True\",\n    )\n\n    opt = parser.parse_args()\n\n    config = OmegaConf.load(f\"{opt.config}\")\n    model = load_model_from_config(config, f\"{opt.ckpt}\")\n\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    model = model.to(device)\n\n    clip_text_encoder = FrozenCLIPTextEmbedder(opt.clip_type).to(device)\n\n    if opt.plms:\n        sampler = PLMSSampler(model)\n    else:\n        sampler = DDIMSampler(model)\n\n    os.makedirs(opt.outdir, exist_ok=True)\n    outpath = opt.outdir\n\n    batch_size = opt.n_samples\n    n_rows = opt.n_rows if opt.n_rows > 0 else batch_size\n    if not opt.from_file:\n        prompt = opt.prompt\n        assert prompt is not None\n        data = [batch_size * [prompt]]\n\n    else:\n        print(f\"reading prompts from {opt.from_file}\")\n        with open(opt.from_file, \"r\") as f:\n            data = f.read().splitlines()\n            data = list(chunk(data, batch_size))\n\n    sample_path = os.path.join(outpath, \"samples\")\n    os.makedirs(sample_path, exist_ok=True)\n    base_count = len(os.listdir(sample_path))\n    grid_count = len(os.listdir(outpath)) - 1\n\n    print(f\"sampling scale for cfg is {opt.scale:.2f}\")\n\n    searcher = None\n    if opt.use_neighbors:\n        searcher = Searcher(opt.database)\n\n    with torch.no_grad():\n        with model.ema_scope():\n            for n in trange(opt.n_iter, desc=\"Sampling\"):\n                all_samples = list()\n                for prompts in tqdm(data, desc=\"data\"):\n                    print(\"sampling prompts:\", prompts)\n                    if isinstance(prompts, tuple):\n                        prompts = list(prompts)\n                    c = clip_text_encoder.encode(prompts)\n                    uc = None\n                    if searcher is not None:\n                        nn_dict = searcher(c, opt.knn)\n                        c = torch.cat([c, torch.from_numpy(nn_dict['nn_embeddings']).cuda()], dim=1)\n                    if opt.scale != 1.0:\n                        uc = torch.zeros_like(c)\n                    if isinstance(prompts, tuple):\n                        prompts = list(prompts)\n                    shape = [16, opt.H // 16, opt.W // 16]  # note: currently hardcoded for f16 model\n                    samples_ddim, _ = sampler.sample(S=opt.ddim_steps,\n                                                     conditioning=c,\n                                                     batch_size=c.shape[0],\n                                                     shape=shape,\n                                                     verbose=False,\n                                                     unconditional_guidance_scale=opt.scale,\n                                                     unconditional_conditioning=uc,\n                                                     eta=opt.ddim_eta,\n                                                     )\n\n                    x_samples_ddim = model.decode_first_stage(samples_ddim)\n                    x_samples_ddim = torch.clamp((x_samples_ddim + 1.0) / 2.0, min=0.0, max=1.0)\n\n                    for x_sample in x_samples_ddim:\n                        x_sample = 255. * rearrange(x_sample.cpu().numpy(), 'c h w -> h w c')\n                        Image.fromarray(x_sample.astype(np.uint8)).save(\n                            os.path.join(sample_path, f\"{base_count:05}.png\"))\n                        base_count += 1\n                    all_samples.append(x_samples_ddim)\n\n                if not opt.skip_grid:\n                    # additionally, save as grid\n                    grid = torch.stack(all_samples, 0)\n                    grid = rearrange(grid, 'n b c h w -> (n b) c h w')\n                    grid = make_grid(grid, nrow=n_rows)\n\n                    # to image\n                    grid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\n                    Image.fromarray(grid.astype(np.uint8)).save(os.path.join(outpath, f'grid-{grid_count:04}.png'))\n                    grid_count += 1\n\n    print(f\"Your samples are ready and waiting for you here: \\n{outpath} \\nEnjoy.\")\n"
  },
  {
    "path": "ldm_exp/scripts/latent_imagenet_diffusion.ipynb",
    "content": "{\n \"nbformat\": 4,\n \"nbformat_minor\": 0,\n \"metadata\": {\n  \"colab\": {\n   \"name\": \"latent-imagenet-diffusion.ipynb\",\n   \"provenance\": [],\n   \"collapsed_sections\": []\n  },\n  \"kernelspec\": {\n   \"name\": \"python3\",\n   \"display_name\": \"Python 3\"\n  },\n  \"language_info\": {\n   \"name\": \"python\"\n  },\n  \"accelerator\": \"GPU\"\n },\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"# Class-Conditional Synthesis with Latent Diffusion Models\"\n   ],\n   \"metadata\": {\n    \"id\": \"NUmmV5ZvrPbP\"\n   }\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"Install all the requirements\"\n   ],\n   \"metadata\": {\n    \"id\": \"zh7u8gOx0ivw\"\n   }\n  },\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": null,\n   \"metadata\": {\n    \"id\": \"NHgUAp48qwoG\",\n    \"colab\": {\n     \"base_uri\": \"https://localhost:8080/\"\n    },\n    \"outputId\": \"411d4df6-d91a-42d4-819e-9cf641c12248\",\n    \"cellView\": \"form\"\n   },\n   \"outputs\": [\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"Cloning into 'latent-diffusion'...\\n\",\n      \"remote: Enumerating objects: 992, done.\\u001B[K\\n\",\n      \"remote: Counting objects: 100% (695/695), done.\\u001B[K\\n\",\n      \"remote: Compressing objects: 100% (397/397), done.\\u001B[K\\n\",\n      \"remote: Total 992 (delta 375), reused 564 (delta 253), pack-reused 297\\u001B[K\\n\",\n      \"Receiving objects: 100% (992/992), 30.78 MiB | 29.43 MiB/s, done.\\n\",\n      \"Resolving deltas: 100% (510/510), done.\\n\",\n      \"Cloning into 'taming-transformers'...\\n\",\n      \"remote: Enumerating objects: 1335, done.\\u001B[K\\n\",\n      \"remote: Counting objects: 100% (525/525), done.\\u001B[K\\n\",\n      \"remote: Compressing objects: 100% (493/493), done.\\u001B[K\\n\",\n      \"remote: Total 1335 (delta 58), reused 481 (delta 30), pack-reused 810\\u001B[K\\n\",\n      \"Receiving objects: 100% (1335/1335), 412.35 MiB | 30.53 MiB/s, done.\\n\",\n      \"Resolving deltas: 100% (267/267), done.\\n\",\n      \"Obtaining file:///content/taming-transformers\\n\",\n      \"Requirement already satisfied: torch in /usr/local/lib/python3.7/dist-packages (from taming-transformers==0.0.1) (1.10.0+cu111)\\n\",\n      \"Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from taming-transformers==0.0.1) (1.21.5)\\n\",\n      \"Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from taming-transformers==0.0.1) (4.63.0)\\n\",\n      \"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch->taming-transformers==0.0.1) (3.10.0.2)\\n\",\n      \"Installing collected packages: taming-transformers\\n\",\n      \"  Running setup.py develop for taming-transformers\\n\",\n      \"Successfully installed taming-transformers-0.0.1\\n\",\n      \"\\u001B[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\\n\",\n      \"tensorflow 2.8.0 requires tf-estimator-nightly==2.8.0.dev2021122109, which is not installed.\\n\",\n      \"arviz 0.11.4 requires typing-extensions<4,>=3.7.4.3, but you have typing-extensions 4.1.1 which is incompatible.\\u001B[0m\\n\"\n     ]\n    }\n   ],\n   \"source\": [\n    \"#@title Installation\\n\",\n    \"!git clone https://github.com/CompVis/latent-diffusion.git\\n\",\n    \"!git clone https://github.com/CompVis/taming-transformers\\n\",\n    \"!pip install -e ./taming-transformers\\n\",\n    \"!pip install omegaconf>=2.0.0 pytorch-lightning>=1.0.8 torch-fidelity einops\\n\",\n    \"\\n\",\n    \"import sys\\n\",\n    \"sys.path.append(\\\".\\\")\\n\",\n    \"sys.path.append('./taming-transformers')\\n\",\n    \"from taming.models import vqgan \"\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"Now, download the checkpoint (~1.7 GB). This will usually take 1-2 minutes.\"\n   ],\n   \"metadata\": {\n    \"id\": \"fNqCqQDoyZmq\"\n   }\n  },\n  {\n   \"cell_type\": \"code\",\n   \"source\": [\n    \"#@title Download\\n\",\n    \"%cd latent-diffusion/ \\n\",\n    \"\\n\",\n    \"!mkdir -p models/ldm/cin256-v2/\\n\",\n    \"!wget -O models/ldm/cin256-v2/model.ckpt https://ommer-lab.com/files/latent-diffusion/nitro/cin/model.ckpt \"\n   ],\n   \"metadata\": {\n    \"colab\": {\n     \"base_uri\": \"https://localhost:8080/\"\n    },\n    \"id\": \"cNHvQBhzyXCI\",\n    \"outputId\": \"0a79e979-8484-4c62-96d9-7c79b1835162\",\n    \"cellView\": \"form\"\n   },\n   \"execution_count\": null,\n   \"outputs\": [\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"/content/latent-diffusion\\n\",\n      \"--2022-04-03 13:04:51--  https://ommer-lab.com/files/latent-diffusion/nitro/cin/model.ckpt\\n\",\n      \"Resolving ommer-lab.com (ommer-lab.com)... 141.84.41.65\\n\",\n      \"Connecting to ommer-lab.com (ommer-lab.com)|141.84.41.65|:443... connected.\\n\",\n      \"HTTP request sent, awaiting response... 200 OK\\n\",\n      \"Length: 1827378153 (1.7G)\\n\",\n      \"Saving to: ‘models/ldm/cin256-v2/model.ckpt’\\n\",\n      \"\\n\",\n      \"models/ldm/cin256-v 100%[===================>]   1.70G  24.9MB/s    in 70s     \\n\",\n      \"\\n\",\n      \"2022-04-03 13:06:02 (24.9 MB/s) - ‘models/ldm/cin256-v2/model.ckpt’ saved [1827378153/1827378153]\\n\",\n      \"\\n\"\n     ]\n    }\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"Let's also check what type of GPU we've got.\"\n   ],\n   \"metadata\": {\n    \"id\": \"ThxmCePqt1mt\"\n   }\n  },\n  {\n   \"cell_type\": \"code\",\n   \"source\": [\n    \"!nvidia-smi\"\n   ],\n   \"metadata\": {\n    \"colab\": {\n     \"base_uri\": \"https://localhost:8080/\"\n    },\n    \"id\": \"jbL2zJ7Pt7Jl\",\n    \"outputId\": \"c8242be9-dba2-4a9f-da44-a294a70bb449\"\n   },\n   \"execution_count\": null,\n   \"outputs\": [\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"Sun Apr  3 13:06:21 2022       \\n\",\n      \"+-----------------------------------------------------------------------------+\\n\",\n      \"| NVIDIA-SMI 460.32.03    Driver Version: 460.32.03    CUDA Version: 11.2     |\\n\",\n      \"|-------------------------------+----------------------+----------------------+\\n\",\n      \"| GPU  Name        Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC |\\n\",\n      \"| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |\\n\",\n      \"|                               |                      |               MIG M. |\\n\",\n      \"|===============================+======================+======================|\\n\",\n      \"|   0  Tesla K80           Off  | 00000000:00:04.0 Off |                    0 |\\n\",\n      \"| N/A   66C    P8    33W / 149W |      0MiB / 11441MiB |      0%      Default |\\n\",\n      \"|                               |                      |                  N/A |\\n\",\n      \"+-------------------------------+----------------------+----------------------+\\n\",\n      \"                                                                               \\n\",\n      \"+-----------------------------------------------------------------------------+\\n\",\n      \"| Processes:                                                                  |\\n\",\n      \"|  GPU   GI   CI        PID   Type   Process name                  GPU Memory |\\n\",\n      \"|        ID   ID                                                   Usage      |\\n\",\n      \"|=============================================================================|\\n\",\n      \"|  No running processes found                                                 |\\n\",\n      \"+-----------------------------------------------------------------------------+\\n\"\n     ]\n    }\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"Load it.\"\n   ],\n   \"metadata\": {\n    \"id\": \"1tWAqdwk0Nrn\"\n   }\n  },\n  {\n   \"cell_type\": \"code\",\n   \"source\": [\n    \"#@title loading utils\\n\",\n    \"import torch\\n\",\n    \"from omegaconf import OmegaConf\\n\",\n    \"\\n\",\n    \"from ldm.util import instantiate_from_config\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def load_model_from_config(config, ckpt):\\n\",\n    \"    print(f\\\"Loading model from {ckpt}\\\")\\n\",\n    \"    pl_sd = torch.load(ckpt)#, map_location=\\\"cpu\\\")\\n\",\n    \"    sd = pl_sd[\\\"state_dict\\\"]\\n\",\n    \"    model = instantiate_from_config(config.model)\\n\",\n    \"    m, u = model.load_state_dict(sd, strict=False)\\n\",\n    \"    model.cuda()\\n\",\n    \"    model.eval()\\n\",\n    \"    return model\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"def get_model():\\n\",\n    \"    config = OmegaConf.load(\\\"configs/latent-diffusion/cin256-v2.yaml\\\")  \\n\",\n    \"    model = load_model_from_config(config, \\\"models/ldm/cin256-v2/model.ckpt\\\")\\n\",\n    \"    return model\"\n   ],\n   \"metadata\": {\n    \"id\": \"fnGwQRhtyBhb\",\n    \"cellView\": \"form\"\n   },\n   \"execution_count\": null,\n   \"outputs\": []\n  },\n  {\n   \"cell_type\": \"code\",\n   \"source\": [\n    \"from ldm.models.diffusion.ddim import DDIMSampler\\n\",\n    \"\\n\",\n    \"model = get_model()\\n\",\n    \"sampler = DDIMSampler(model)\"\n   ],\n   \"metadata\": {\n    \"colab\": {\n     \"base_uri\": \"https://localhost:8080/\"\n    },\n    \"id\": \"BPnyd-XUKbfE\",\n    \"outputId\": \"0fcd10e4-0df2-4ab9-cbf5-f08f4902c954\"\n   },\n   \"execution_count\": null,\n   \"outputs\": [\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"Loading model from models/ldm/cin256-v2/model.ckpt\\n\",\n      \"LatentDiffusion: Running in eps-prediction mode\\n\",\n      \"DiffusionWrapper has 400.92 M params.\\n\",\n      \"making attention of type 'vanilla' with 512 in_channels\\n\",\n      \"Working with z of shape (1, 3, 64, 64) = 12288 dimensions.\\n\",\n      \"making attention of type 'vanilla' with 512 in_channels\\n\"\n     ]\n    }\n   ]\n  },\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"And go. Quality, sampling speed and diversity are best controlled via the `scale`, `ddim_steps` and `ddim_eta` variables. As a rule of thumb, higher values of `scale` produce better samples at the cost of a reduced output diversity. Furthermore, increasing `ddim_steps` generally also gives higher quality samples, but returns are diminishing for values > 250. Fast sampling (i e. low values of `ddim_steps`) while retaining good quality can be achieved by using `ddim_eta = 0.0`.\"\n   ],\n   \"metadata\": {\n    \"id\": \"iIEAhY8AhUrh\"\n   }\n  },\n  {\n   \"cell_type\": \"code\",\n   \"source\": [\n    \"import numpy as np \\n\",\n    \"from PIL import Image\\n\",\n    \"from einops import rearrange\\n\",\n    \"from torchvision.utils import make_grid\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"classes = [25, 187, 448, 992]   # define classes to be sampled here\\n\",\n    \"n_samples_per_class = 6\\n\",\n    \"\\n\",\n    \"ddim_steps = 20\\n\",\n    \"ddim_eta = 0.0\\n\",\n    \"scale = 3.0   # for unconditional guidance\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"all_samples = list()\\n\",\n    \"\\n\",\n    \"with torch.no_grad():\\n\",\n    \"    with model.ema_scope():\\n\",\n    \"        uc = model.get_learned_conditioning(\\n\",\n    \"            {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\\n\",\n    \"            )\\n\",\n    \"        \\n\",\n    \"        for class_label in classes:\\n\",\n    \"            print(f\\\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\\\")\\n\",\n    \"            xc = torch.tensor(n_samples_per_class*[class_label])\\n\",\n    \"            c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\\n\",\n    \"            \\n\",\n    \"            samples_ddim, _ = sampler.sample(S=ddim_steps,\\n\",\n    \"                                             conditioning=c,\\n\",\n    \"                                             batch_size=n_samples_per_class,\\n\",\n    \"                                             shape=[3, 64, 64],\\n\",\n    \"                                             verbose=False,\\n\",\n    \"                                             unconditional_guidance_scale=scale,\\n\",\n    \"                                             unconditional_conditioning=uc, \\n\",\n    \"                                             eta=ddim_eta)\\n\",\n    \"\\n\",\n    \"            x_samples_ddim = model.decode_first_stage(samples_ddim)\\n\",\n    \"            x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, \\n\",\n    \"                                         min=0.0, max=1.0)\\n\",\n    \"            all_samples.append(x_samples_ddim)\\n\",\n    \"\\n\",\n    \"\\n\",\n    \"# display as grid\\n\",\n    \"grid = torch.stack(all_samples, 0)\\n\",\n    \"grid = rearrange(grid, 'n b c h w -> (n b) c h w')\\n\",\n    \"grid = make_grid(grid, nrow=n_samples_per_class)\\n\",\n    \"\\n\",\n    \"# to image\\n\",\n    \"grid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\\n\",\n    \"Image.fromarray(grid.astype(np.uint8))\"\n   ],\n   \"metadata\": {\n    \"id\": \"jcbqWX2Ytu9t\",\n    \"colab\": {\n     \"base_uri\": \"https://localhost:8080/\",\n     \"height\": 1000\n    },\n    \"outputId\": \"3b7adde0-d80e-4c01-82d2-bf988aee7455\"\n   },\n   \"execution_count\": null,\n   \"outputs\": [\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"rendering 6 examples of class '25' in 20 steps and using s=3.00.\\n\",\n      \"Data shape for DDIM sampling is (6, 3, 64, 64), eta 0.0\\n\",\n      \"Running DDIM Sampling with 20 timesteps\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stderr\",\n     \"text\": [\n      \"DDIM Sampler: 100%|██████████| 20/20 [00:37<00:00,  1.89s/it]\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"rendering 6 examples of class '187' in 20 steps and using s=3.00.\\n\",\n      \"Data shape for DDIM sampling is (6, 3, 64, 64), eta 0.0\\n\",\n      \"Running DDIM Sampling with 20 timesteps\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stderr\",\n     \"text\": [\n      \"DDIM Sampler: 100%|██████████| 20/20 [00:37<00:00,  1.87s/it]\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"rendering 6 examples of class '448' in 20 steps and using s=3.00.\\n\",\n      \"Data shape for DDIM sampling is (6, 3, 64, 64), eta 0.0\\n\",\n      \"Running DDIM Sampling with 20 timesteps\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stderr\",\n     \"text\": [\n      \"DDIM Sampler: 100%|██████████| 20/20 [00:37<00:00,  1.86s/it]\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stdout\",\n     \"text\": [\n      \"rendering 6 examples of class '992' in 20 steps and using s=3.00.\\n\",\n      \"Data shape for DDIM sampling is (6, 3, 64, 64), eta 0.0\\n\",\n      \"Running DDIM Sampling with 20 timesteps\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"stream\",\n     \"name\": \"stderr\",\n     \"text\": [\n      \"DDIM Sampler: 100%|██████████| 20/20 [00:37<00:00,  1.86s/it]\\n\"\n     ]\n    },\n    {\n     \"output_type\": \"execute_result\",\n     \"data\": {\n      \"text/plain\": [\n       \"<PIL.Image.Image image mode=RGB size=1550x1034 at 0x7FF8B6840F50>\"\n      ],\n      \"image/png\": \"iVBORw0KGgoAAAANSUhEUgAABg4AAAQKCAIAAAAXQRbAAAEAAElEQVR4nOz915Nt2Zbeh40xpll2m8yd7vhzyt57q+p617fR3WgAjWiAEKEgqSAhECAfJFKBBwYYwVAoQm+QieCDvMRQhKgAJQgSCME0TANNROMCbW/fvra8PaeOTZ+57XLTjKGHtXeegv6GmlWRJ3Obteaca61pfvP7xgT4PH2ePk+fp8/T5+nz9Hn6PH2ePk+fp8/T5+nz9Hn6PH2ePk+fp8/T5+nz9Hn6PH2ePk+fp8/T5+nz9Hn6PH2ePk+fp88m7P959YUDcMiaR7ltHRbD3FjLJrEaYlhVres8HhzsjkdDrSBWq9ViVjetTckaU9Wua32IUQDKPE1T45x3UazVolReZr72i1nd+sBAmbEhQjHIdgZZvVwCYDHKq9oL4yClotSxDYtFJcrcuLNXLZ1rwmR76/qNvUTpqgouqtFkJI5CHYvBqBxOSNPZybNBni1XSyBvsyRyRHJts4hNB6gO7k3qOhyfnO3vaYrx9HKxPSkMqrZrrNI2UdNZpYzSJNYgoRhrfBsZRBM2XccCJArI6MR0deNCk+dGoYTA2mgA8S4qA0ar6ONq5T99fF4Oh9akTePuXN9TQMvZbLCbmUSzC3madF0Acnmq5tNKkJPEVFWrNSGiDyCIibWgkBmcc6mxbdt1nbNpqrW1qe6WKw6xC8ygQSuK3LZeIGqGeVXfur3/7Mn85HSWFwnp1GMcZsCedaKSXM2nq6Zy29vlzYOtqgrONSJmXncH2yPAkGcJQ+hcq1QR0eQDG1375PEzdl1aDr/6tS8uZ8vDYz9b+cPjs0ePzhbT6pUX9+5ez5NMXIiCirSyVhVlUtedEAGBNZrZN65lQBScTZum8ghGSO3ubu3vjuu6U+kgH28xQ2JpsjWArtXaXFxeJgm1q/mjp08jh8cPjsyw3J+MQIxHWwwTHyhJ9O2DLYyxrUPT1nmmjU7rjsloiWE2XXhfJ6mpVzWCRPH5UOVJYo09PqlRJylJdC4hEEz+y//zvwaA//X//K8hkNJIgESIiIAAgEgoAsICICIQRfo3hEUQAYCIkJQiFI7MMYYom+cLQQQAkQiBlEJErRSu34H+FNifqv9BAAAijIDCLCACCCICyDFGZh+C9yGyKEJCUkRKKURERBaOMQJIiMwiIiDCINwfPDE6TYzVVmtNiNQXCgAAiRAEBQAR+xeg/7YwiMimqaB1BvtPIYsAMPSVAoJIhLR+D1EAhYX7jIgIQGQREAEWBgEg7GsFNqfFzQdF+oaprzsRAGBhEQABJCQk6d8Ukf68pLRRKIiE6/yICEhfw0iA6//Wh15njJkFBIQQrxpEYZH1dcH+FmAWRACBdf0gKkJEREJhFoF1XQOICAvDZ3KAz7/UF7YvaV8+FJHNTSVERESbo/A694Dre66/x5AQBAEYhJl5XRtISH1VAEiMMTIjoggQIhCiCBICEgAQEiH2F73P4NUlZub/yX/xvwSAhx/83aQpGuc++OPf06F+cnLx5OiiO188vX+OmX3jSzev723PpmdGu3fun779eN4C1i1XTdRG7YwSC5KSymxq81KsevDp06r2aaLv7G/nSQYxVC5EkjKhYZbsDotRQpm1aRrZ89llNRooAJjNu6OLubHqhZdub5fF+dHpvRdvmGEWs2LZ4f13HsQQVqvu6Hx2sZw7jGhUbvyt/eLk6GK8P37xC7dffO0OCdfdUrimEE+PVu++ffz0qFEhJsw5S6LQR946OPirf/0/27r7DbB5gfWPfuPv/P5v/ubR03MwKlVcZpBaGOWKAJoYCcU5nFZh2cUuImnDSntBbXVR5Dvb6d2Xt154de/W3euDYZGnKjEISqmkcISDYS6gVJFCtwAJwS1WgjHKR7/7zt//f/3gBAafHraPH8+BtXMxMpiECMUHQLO+KxQBCwJpRQBI3kdhtkYRAAuCAAjmFlOFCsLI4kHCr06ygbSTnMapVk6MUYpAojRNFzvv2pBqRmAFrElrSrxgkRthQJ2cLrp0Zz89uP3iF18fjEbvv/n2b/7jf/nmuXeTiU34crpcdNGUBgCcY6PJpgTMTeuRAEjFIAqwSK0IARL72LjgY8QYSosHJb1+I9vJaXeohla0jnmRGTvOy/FwWLTzWQgtkEQRDhBF7+1NqqbNClvP27qq062cRMWIZTkcDjICaruV6+oqdHmOCSWEiTLF+No1SIvV0+M3f/tfxSid89F5Zu8lYGpu3dmx2l5czL33hiNLbDrvhpO/+rffA4D/y3/+HylmL53EGBlc5Mn+cLX0nYtpnmgA9pIPUpsmgDp0XI7Hw/FkeXL24bsf3Xv11tHDDx9/enS5aIe5Mai1iqOtHICePDva2d0/uHvnO7/0J2bzlWN38fT88mIxGfHTRx+98+ZHD49nXmcg0HahCqwVJQSd940XVkoRIvBWbva28hi4yG23aLrG7QzSXOFi1trUDIfpqg13XrwmgBFw7+bBrRs3kiwlhUoppQ0LI0JwTIjKaEBBkAhECo0xro4iZIvEaBUjKGOFpW26vLC+7Zz3iFEpLQAxBGFQRhmVeFDlaCgRg+esxMXFhTCHwGQURN9UnYDWKVmjQVAZQlRKK0U0v1wCxnJQok41cT2d+ujPT8/btgXB+XSRFxYQTy9mJMECoeTD/Wtf+NpXgujJ7p5j0qnmgKCS8dYodDEszz59/837b//kt7//gxdfe+mbr77SreqD2zug3agcnZ9U5SQ5eno8ngy2r28fHc3amm/euHb+dDGbLhbN8otffaGtYPps+sHbP9maDO9/dP/Z6UUXQJDHZZJySGJUJPlQr2q/crw7LAVVMRpO9iaTye7jJ9NBmQ0Kruu4bJ22WtliUOS7u3uD0eDifDacbA0mxfGTYyB1cG1PGetcUGSQxc3OLpaL3ZvXg3dKJVpDDL4Y5L7zAqItSdi07ajzcshAXdNVlxdki52b16vTp9Vybq1mhjS34mPXtABMKrRdxCQPHMfjyWqx6lzbOEqSMLSKtJpfzmP0yaB47QtfOjta2IQgt7/0l/9nAPBr3/0qRHQcWaIxFAOXRZomlhTGGNq2Cz5yFKVVnqVGKSKFikgRiwhI4BhiiAh5YgdFopVSiJ33TdM1dVNXrvXCItYaa02WGqVUYizHCAxkdJJoRWo4yLMsFSSlCCLHGAgVADILAJGCGGPwgUEiR45BKdRWm0QTUfAeMQoCE81XdUDvQg3GJUOlcgwCidYoLDGmxiq0Eg2qJLRdqJ1voyZtySRGB+AQgQQJVVN5RQZCgOiWy3q6bBbLmpmt1VmZDQfDIs3KLEdUViuBsFp2nXeLqgLE2XQ5r2pllIioJLFlsrMzSjVEiaQBxUfvus6TVoxoU40gbeeMsWlqu8CMIs65tkWtEFEYYoyEKER9tccQPcg4TxUIKOUgutASRwJhTQJYr9q6rl1gVJQaEzzEwIW2CWpgdD4wRa3JKsxykyS6aZ0xRkQ3tbCXdtGGIKlSmUn6EVXoQtsGRLJGt42LMexsbxd5cXj49Hy+MIlJbAJBgo8mMUAgQRQRkdZK5UWqAEIQBlJKEak0sZnVIII6XswWnQuRhVAprUUwxuB9ABTsx1lKaW20MSDiXVu3rTZ6e7xFZICFUZQCTUpZY40OPnRtF4J33nNkEUgTgyzMjBIjswsxhIhKaQGOISGExPy/f/uPAODu94aKQIgRGVBAUPrR4Xr8iST9mGo95sN+yIwI6wH2evyMAIJXk275zPy7H+iu/18P+z7zGbyarGM/FN28BfDZU2wOhoj4bx7+6ryfGV2vh+lXQ2MEBAK1Pjn2o20BeV4C+cwP2bxydYr+UFfv4/rrKP1HZF03CCSbygKU54eQfnyOAIiwHs9gP7xdzylgPXjux/gggMDS15ZcZen5UB4AoT/780p/XtR/Y7bTn5TWr69/rvO8LtF6IN6fZj1HwOeXYXOZ1pPGvg77Ml5dx+cXcj3FkPW1kM/MfdZDfhRBQBQhBhJAARIgQBRAQAIiAARaX+z1rUEItD4NrXPGAAIiIAwgwOs/+2mKgMB6egb9RFaEcH1ToAggASKbn/32IQDoPu9kqQ3RJnpWtVliFIaqCtPV8rWvv5CXW4cfP5vkpkjtYjYfTYaUJmGOkTF4WCwabbCLULexyG1A6GIERQgcQVzTOhcIwWaKgZXSsQvACOzmc4+Rs8J0rQ8dI4Xh1ihNiQ1pRQ5I0DaN961EF1Xspqfno+GOcIgtt5dNouzg7rjcyuppMxkNXVUtz44m+6WGorlYaqmMWpluSUS4wsVp25yfVyph5oEZ1OfS1svdPZ0pNmgHNtU2JQkEXrjDrqYQ2QlYSo2YNAk1NU3s2ghOBnkeutrkyiTYzza14mKQzc+XsfUo5uUXbj17Mm/bICp5+mx174Xb1+5uPzt8TNBgtRjdvj67OM/ypG1IMQrharpQGPIyZzJh4YmUd42g7lqXJuibbjjM1Pa4rmK9aiQaANXVSw+gtCmyYex8DGGwUzQXVVzxj99+TIzFYJRmab1qtidqZ1JU83r3+mhrdzQ/rtpFE3ysLqogvLtbnJ7MD/a3u7bTIQgGic2ta5P5rNOFzcrs8NPVQbLTabd7bQyXJy/f2z159Mk2qclL16s2Ng7yLPVtk+bWGup8GI9y77zv2lQzJYoUsffp0GwpXM7qKIS5zdCsarhcNKWZu2FiSSjE+ryqljAc5UbRYDBYdTEvdm7c2p5PL3du3Votmkx/zNKUBV6cVJ2bxyY7vQgH10dPXVckVjyMR7kxanYxQzKp3RKr3eI8Om/bsDcp5tOqWnU2NRAazDITWFkLxEluytKupt36KUAgQsT+MUNhEBAikAj9TFsEQgiIGCX2PQIRIZCECEpYdIyRhWOIRNQzFeZ+fk59u6IQ188nACAoRGEhQmFRChkEI2yaxnWHQ4jMwhIFMEQOkSOLiMQopIlFNCACBh9YODIDSowcWaAHAQjAbLQWgOhDRDJKIQIICgshATDw1clU5L5HRBHGDacQlr5WeigmLABASCIMgMDrDlIg9gUFxDWGYUEUYPGRhbBHWAAAQoKCLACilQbEno5wXJ+UQZQiQmSRGJiUEmZCAAFa9xASQ+xb/SAhBOyzsGk5of86EhIDEInIms6JMAAzX5GsCNB/FhBZQJh7psOIRCTMkSORWl80Qo4AgKrHUohEFH1EgMiM1Be3Z0B9x9IzGwAWQekPDgDMAogSuc8qiABHBEQiUKrvR1gwhkj9vceRJRIRi/jg++oy1gKCkBBSiI4FOHJk7sETIGpl1qyNpb8le4CoiBhAOCJClIiCuBkT5INQtfUqNdd++RuHf/B7f/yv/8Da3JBBCTGo80VzuHx84yD9rT+8/+i8njUxH6WNiwwQAc6XzUBTkquD3fH2zuCw6nZ2B5O22RnluwejrdF2XXd18OORvpYriiHP7PZk9/xsNS6zf/Hbf+iAdrC4nIWLRbN9bXTvlduHT6dN5K27+5/Muvf+6OOLylfOtRdzjaFZNVlutg6KkcVr10f//n/8508ePnnnj384ur370ve+CIPs+PEZKCpH44/f+vBHP3/cdoYTVgq1MikhhbB//eDVb/9iefOF2rnm8uLo+J0HP/5D08zu7qdtgKLEa9eS8STDiKlVWZGkiVpNfUTNBM7TbOHPF63X6nIZ6to/Pl59dDz1v/eR61rFPM5z68g1XmmTDlRpSaWkMjrYK3a2y2g8jjM1MJcfzGw5fPHaniqXwvH83KHGzkdlSRgQBYg4MiFFASQ0lgQgBlYGGQgVKaTQeKWJIwSFLULoApPaG5rpsikSOr/wNASYNfdeOGi6rqsandrTmfMdpSlaqy27QimOwKQhSX3LOktTSpfBjMbbjVB31u0kOxgwLbKYsEPFRieZ8SJN49LUKEtkdFN1YtAkynesLGkEoGBQGSWiwSYKEQ62hlu5ujOIL+wZS3GYm7iqEcOg1D6CSSjL7fy4bhbLpEiTUYmp6irftW2embZurIZkK7EjW180GKUskrzQ9z940taNyqxKDRnLzmut891xlQyLgzvX0q0//G9/A5I0GpTMtpWI0k0bpqvVoEiCEtdB3UWloyptfTntn4LJdjafrYbDrfPjS0WSKt3UngTyJMvyUbNcEDOR7rwkGqXjTKXWWizS26/evXW3cGeDQzmfjHQxTKv59MatHWb8+L2H125sPXz46f6uXZw+WizqcrKbDYb39kbD3Cm7enz/4Xir8CadTqtOJBKa1FaN61hFYGt1CKyV7gSbEA2haGJFZHXrY+eA8rTTeinaSahdZzQt6rCnjUMA5mGZhcoTR8ZojAGWYpjXTZOXWezi8nI+HJd5mqocQuQszQGjayoSUalOLGiNjn2SKGPMar7M8iwfDpbTmTXI4mLgIt3tgpfgLeQYPLumrerh7sSmWlyIDFaRtXo1XyjKbGaNVr71XK2GW8Pcpo0LWaZPLy+9hNC1iTVpaQhbVOqdtz+1idrbGxtJtRnfeOlOtrWV2kJpuzhfbOdbnoS0To2dnj989s7b9fnjpr648/K1b/2FXz8wWi3P3/3Zj19948Xh/uhyKb4NNkqR2Hd//NHWMKvPLltj68vLva19bVMHmVNxsLX96qs3X/jC9uHT++UgbRddi+qpD9tK3RynXVVbZYMmpcRbzcyFgazMGpZyb0dzt7Wbn39yXLUddH6wle3f21s2sj0utw22TTe/aGPX7Ax3ZNktobr10r3pclUkucrhD/6/v/2ll/6DKgHnxSiKUcfOh65RpNJi0PjaKDKJbb2zlkaT3cXZ+Z2dW//8N38zNq/v7uSmVEjYtXGQUFQoHnzjwDs3W9Tt02s3d7doIj5YYyi6G9eK2bNL0MadL8rtslsu3n77Jy+89qV25cNyPSKyVrEgkYohCsiozPI8ARGJ7BDIKAIgg5rIWK2RANAmhhHERxYEguCjsRZQMyvS5H1gxsZJF3C5aEQZQem6YLPgos+SpKq7LEuMMZ2LLJgn6H0EdKC00gqC1yBCEAMgojCzi4yslPKe2QcMATUiBhDNqBR7ndBy1a2WTbAGTGd1TEot5IExVcpSZAckZJBIa9AKNHXLToJTBFqhMHZtjLHTVnkn0TMp7YKTNnZde36xrJ3XRjOQyWxWJEmuilKTiYPMcudSTeiAWEYHO5fzNmZIrBmiCw6AS42pBPIRDEWJCr1JmAx6Ee+4qmP0LjHIzJ13ERC08nWrIJJEFvIuKq2ZQ+jEFokAdM53LliG8ShBLatVbTOFPnZtA5A4BwwCmlBAkD2HEIREIaPWCjS2HFwQRQKaujZ2HStNwQdgUaiYsA0CgVEZEUYSpXQdQ+RgrK3bDjUpmwWhLC+3treXrlOalFJk0WCilE6s0UiaFKJKE0MiIhKCKGsIlVKYpBoEZtNFs1zVras7n6apAAXHNkkVIgMH7wEEtCKtGFVaFCjACOS9Qvbe5bmlxCAhcoQYhUNdee+CSGiapnMOAYhIGZAgCtEY7VxHmpLEhMDCUVsbkNnH9bwgWS/pAaIAk4J+6r2epm+WF/vVtzXHWJOCNZEQkDVWwA1p+Swkgqtl0Q0tumIoiCDSLygDPf/4Zmz5/I/+CIib08oGTKyHxFdfkSt0ASBA0KMWBKAecWyOIrLBPZ/NXl+QPmObA8F6xXwzaVlPGwRBpF/bBBAWgvUsQQD7j9A6M7jOW3+KfigqnwE8uC77urC4riQBkH48C7Ae+YsIAspmuVo+m+PNr5sL0fM0BGGg54WRK8KHsrlK/SxKALE/wGYmscnS8+ql9eRDNmjw+SUC2mRysy7+/Bjr6eB64RZQsF9KXkMiEBIg6IERIggBI6zXjWG9KHyVXbxiXQJXi+BXyoR16dfihyuS1ksFBBEEQfoJB2JEUOj7I6xR0aKSyf4osUG85S4+fXh+86Wb2y/uDLfz6eHTF+8NR3l5dNEVQzubrZzriixViSb2SMgArZfBsACEEDjLkrJMXOOWqypN9XJej7ZKhXo0zhObdSvnvB+NjKs9aHXn1f3zp8vxUO3sZ++89+n29nirHFRd1bjoA+Zpeea7C9+9sTvuVjUaeeObr73z3sObL9ys583l1HW+mx3NfbMqBzrNRqFlsWb/xq3H7/1gZz/Jdm02zI/OLorhOC32Wjfj3JaTPWJ7dHq5c2uvWxy7zhejIesUiEe5np4/SsvEd6Ccnl1OGbAQbBqvyKRFAj4YiwaHqAMZY3Rez89c12FFSV5SYaany3ZaDzM72CtXNVzMWhzsLNrGe9jfLZIRsInXX7rpOjirfFnY6KtmucrSdFlDkuus1EmKnfNdxyah4XbW1k4QlLGguRznoMtlNb9+b7KoZgisM+M11DVXlT8/qy7reHTevvrF66+8dKer29V8fu16oYhuv/ZCUeDydHn75iQv8kdPL7yC2ayiglTVPrlc6UJ95Y2Xn95/d5jbeVW1LOKqZRVvfOlOezofH1yz+2nWTH/y5vs/vf/pF+/d3Mr42ovXPz6+CCnt3BrOplNA0wl0DAGpXa3KMtGBrTEdOwycaIncsdBgJ782GN9/sGw1QIIffPjR3RdvliMLwimmycC+9+Dhwa1rIcr+uPz06VSjGY4Hgcwb3/nW7OLJ8uxpPlSDcXl+0u3slB2H41nIbScRO2vccrk1TJfT5vjZeZIbuzMSn3hs9l+/d/7H75flPo6YeFWtXJqWHVPTNGmRUkdo7OZxUiGw0kiESBRjRAEGFAGO61k6A8YYhVlpDSAxBkUUoxCARBd8EOGe9UgUJFBKAZLRipkVremC9NwAyUdGgBCj6htcIiBkEGEBESLq1TTMEqIIcAix63xk7vsfbVApxSLMDETOxRBiL8FRWvW9VBRRSgkRKR2ZEZUAbWg9A0Kv7+mZNgvGDbXnwP2nYowgohABMcYoAogKCViAuRdAAQIiKY5xLa0RBqQegQFCjBwDC5GPzMJa6zUMAwDBEFmCAKDWipQiROYoAAzAPgCS0ooB+iV1RIJNjwyoBGKMkUFIeji1KbgAgBJCBRRBkJ/rowghxIiA3Eu3AAQginCMPZhDBCIFIpHFhQAcAUEhcWREFBZhUZoEMISgSCESEnJci5QQMUYRZqI+w0BILIwgCMSMwAzCLKAUMTP0ME/EWL3mbgDCrIgAxGgFIAgYIgNCEA6h/1IP5VAQBSDEGKPEGHDdzTOQNkYDEiHFyIQCINz3GSw+BBEU6TElxsiA3D8Fjz9+1C6Ib770sz98p/7Jh5Px+Ozk8vYX7nAMl4v2/YeHh20LH/HhaeNYbGLOlz5RNBjapvaXyzbdy7757Vc//vl7d2/aJ5fTzIQsdndG+c19eHL4OATcmiR3r29ffvBQYpwjPfr08cllOJrOGM1FE+5frkjZycH4yWX96e+9D0wRXJINpsu2ni8BcHs7zQp5/Qu3Dm7sv/rlLx7cvjYslMWZx/PfffcPfuXf+266N5n77HS+zK+PhpPb//D/9v0HH8we35csgzLRmYHdieXL1e7O4NXXv/z6934lL9OzTz85f++nuPh4Mpzlt2gwsGST4TjJB7YcjwTs1u6ImdIiDZ7zQc4xKmVCG5CcSeHicAWIHKSqW5vao6Np1frlKnz6yfmsCXXtSMTNu7iCDuXwwcIHnjeOiZPStqcN2GTmnygj6XDAnc/KxLKqVp4UZZl1PqaJiczes1IQQkRAH9gYojXYFUWoEJVBBoki2Va6qrtxYb68lx4k+qMPD7UxX/jOPRa9vIjNzHfOKQPaKkMQ2L+8P1hdLKzV02WzglCMCzDRe3ftlS/q8dgOh9Vhffv29v6WfXjROZLat3dvbD+dLiBENcyUoegCoDYZYERh0IoSg7FxOceXrxdfemXbCtpUj7YyrJwlt5362NVdCxTId37rIE2LhFeCbX366czXTtk0H21zkqR5uVycU2dUWixW1e61cWzry8PpsMxaHY5PzrqHT+oqmKREKrRGrYgyHl+f1MnoSAY3h7d4XusyF4VkYNV6MpjkVFf+claxhHxQtBLGo/xyOm+dz9P12MoHo+2grfxge0TgL08X4KUcZns39lY1l2YsIfgoddXQQNmhWbSLIrk+a3gVZP72+9Pzh9/789/98J2Pm7q++6UXE9Igdve6y0fwla+/dPb02b94+Buvf+P1VXVhym1lx3/wBz8eJXp39/rKnbhcD0b64f2LatGqXGOiVm1nEwMxWAWDwg5SvTVI2Yd6uuI2KiVbRXlyNtvZn1zMVxy7cpicn5xcuzEpMhOCZ8HgRJNRmRIB7yOD0plZdd5Hwk4U6Hy8xQaWzFkxdsu66gQiK0q9Z0pMFJzNOglkSJGQHYxY+HJaoacYo4BK8uLkfIpKl0lxcT5HUIjamDwE612IZPJh2nWd81GZDEBPF357qNMsLYaZSpQHjko9Ozy/nNaRQvC8WkwHUKKYxXmTmrwc6xu3DupFsOlWvr0XpFg2aI2gstWqA002dWdPPjn66IPLo8M7X7j2+Mm71+68GLbvPH12MgmFr9Vo59bJ1HeUK4k3v/DGp08fmmJwcHcvdvHs/OTOF++C6JOn8dGzWXUxHYa4PUy+/1t/dH42+8q3Xzk/Xv7sg09u3dq5eLK8WIQEokS/PUgb55dns2xM3/6lb33y0WqQG1Z2vnTV0WIVOGi+fefm0dHy/LJu0BZeLy67UM93t8rJ7sGtF148efapDurk6HS2crv72oK6+8WvnM6roCKhAZYomGhjNEbPq9q3i84kKpLqvHc+HD18Uk8v6tywpg8efjgcf529DrFWyta1c02rlMmGZZpnDhKqV7Pzs0fv/Phr3/p1GO0sz2cXp0tywkZe+MbLh09Oj5/WZZk/eHg5Gg3G28V6diOotdYKGhatqbRqNEhdiE3VJNqwoi52RIq0QiRBVEqJCAMGAQYx1ihjBDG1RltJc9M14lpfu1jXAYxtOg8IHLkTbjrfJVEpHRhIBSATIpvERODoPZEW56VzmVUYMUa0RgFKjDEgWI2swJAxqW7aTiF5MrVjdt5X3sUWDKpSZ2NSCVGCXeCglLXYuRAi5knekaY0hdD5bmETmC27NM1VVtSVsGOMJka6OJ0BYGIVIV6eLz1D56HzgCqqhLYnpdZoSIU2dg2j0xK6Zde5LiIq3wRSyWQvHXZZZLeoqlndMEjt2jzTjXQ6TXzXgXdC6JAgNTEyoOgECdC3IQSOLbnWaQMYAIgBEBQhsiGKzsfWSYCuapYhmASDuAARvJfYeXapTnJTXqxqH1uTaGOV79xoKw81W8DJzqBqwtS3nUBmjEqUIHgGlSWoyFfeeW4aIdJJQpFBhLrOx+iiYwHpegolEBSO07QcDIKvJnWpFKR5SqSrKqRJkmd5liYKtY+AwOK9sSrEmKS6bVyIYbZqOgfzRb1qKuZIiBSjRkrSNM0LJbFeBaCoreo8kzLGJkqnqbEhhuV86mI0xkUW77y1BoVD6NhLZFwtG+bY1E1kp4lYpKoqrQ0KoyLmiAhaJywgMaDCGILijbaHGamff28EIwJAa2IBBBuO8m/Qmg2JWItbPiO/Wb+/4T7yWSWLXP12BR0++8tzDrGmHlcE6gqq9LoTwefn/Yxofq0putK3rJU8vQsAhbBnLRvNDa6hlmwEOriGRkAo60lID9A2aAZxA1KeH37NMBDl6sDP60lkI7650kJ9pli4hiwg/Th3c0We198VdJPndbOubn5e1J4WiQit2dcVMyPcXCjc+Algkz15fmTE3nXRy3PkuTmgZ0ciSP0L65nJhgluqpnW4qd12TeUiAjXYp8NIcR+pI5rCZb0d9z6p2yG/M8vqQisXRZX3Kp3KgACb/7cqM43oiXY3BdryPm84Ff1K4K0von619aoaFBaiV1X+1SXg62ts9Pl1sEEy/H7b74t1ckXfvX1s8OnMRY7ezsKa5bUQIydb2PXXTQSAaIM8/T8ZJ5laUQdSYVG9nf3dFaUSVWWo8vLJXpE0kWGeUq39obHJ7Pas221CU1YcWfa3XLSNnC4mFvDX/7SzaNn08k27b146w9//+fv/rS9c3Cwmk1nnz6m+dyxSXTK81ViyhyjL9V4d18rVFI/evpRcXdv+1oy2E4+ffzBC+mt7YGuWxcJ967vvfKFOz//6XsKRmXulosTBdGUSZrrql6E0LZdTJJA6K1RrYsREo1lagpIp5RwkXfsG4lAarDyXrMOrkak4ShvVkuVmvmyMuPi3s1777/5caLMvL6Mrn33ow9/+U/++dicGVslSO1qNt6dkKUiMxbFTy+3JqnVKBHbZsXMFFXwnGaZNXY57YhMau1yOg9epzZz3SLEuu04S7R3TaiX1bLb2x2dzpaPjy5R53u7u3fuvtRUyyzRe69cG2wXy7l3nPiLFljX0TUrFxUGEVT06f3z1KZuUfmu+ejNNzNLTdfU0bFCjCrlcUymGFaZmnz87uHtnb0P3z784r3bKbnDx+8Ntu/e2d+eFOXp4aPRZKSUbZYLVy2Z2WqbqbTM8/PFCWpNOFguT4fjhCi5PJ/V2qnMTo8rrYp0ND5ftCez893dkqtQrzSG7MlH52m+neDe1u6ter40jTJ2CFyN8m2XLLQO3/raKz/6wc+62rPBT57M46BUqI4vzxih44aiLB2otAAIdbckN6+PwsG2eIGnZ8tBpu++8NIn775T5qPJ9t6Tk8uubuaXi/4pYIlK0VpIIrFfLmAfRQCJfIjMzMIAGJmD88CROSqiXrIBoHq9j9ZEhICgCZWivjFU2E/VQSniyDFEJOIeKKwNasAxoPSCS2HuFyuYSCnCwBCYPXMEiSDAopUKIYKIVjqEEAA650IMa+mNIiL0MUgPcTg4z7lNEKEnS8xBKUJUKLJu8oXXbSkzAvaNGCJphSJCCBxD36wwR2TsUYhCFIksLJGBuXfnKVKRue9MFCkA0QJeWDgi0XodpgflgFGEkIiwZzGgeogFIAxEIsyAIIKkhBkRAFEhRBbmwCIxBABgBE2ECnoNqyKFwhJFhDZYbC21jOsFCUYAIoyRRYRIXfUpHAWBEYGZow9EyMwiHgC01j3GiiFGAKV6gAaIgIqirJ1hAEBrJVCkdfcIwjFKr8dHrfsbhLWi3jlIADHGEPyVAA2VAPTuuL7djwCoe+CkSSkFAkgQQpCePCFQj/2QlNYCCEDCIgSbPrj3jDCC9P26VoaFtSYvfLUw9Bv/4J/kDX3yoDk5c/f2zCBJL1dy//E5OC8+mCRpWq6qlhmv7xTTaaMj5WU+LkvFq8RYF9TbP3t426TTj55Vs8WNG7u/8JVb7/70w8NmFjxpMM25//GHT5oqVqvOkkqtWjgfgCURiGKKdF5Huqy82JbofFUNRrmp9db44IVbN7/znRff+PKr4XJ+89714uAm5ePZ8UVC0w9//HuPz3/6nb/wC1XIq9MoAwAvi7Pm3e8/eO93PonB7NgiQUoixLOZ8e2d25Nf/4//Ek2+OG/yT37vp83hm/7igQ6ntw/s8Iv3Lk9W+bjMt4Z1I5Bta1tAPoqeK9HJqGiNamunQIsNSlYRYzJWRmuDdhQlK/KDW1ElFhUtLiqbJBQxGZh2WQlEQupWjU5sNau1xs53l0eHreAnH595Ds8W9UdExyt/3gToYhQkzxJJKdu1rdEqMbpzARFaH0Qlmsi1XhESKKM0g/jGW4VaBV17XYdfeuPeyf0HX3pxvDhZ6W718HipytH5vLNWF4lEHzFqUepo5hOnJcThyDKo4ONyNq0IX9zdcSrzddssKru9rUPSXi5WIVSkp7jwi04TZqP0bFppwMViJcQE3oSYodnbTl95dffmyA6g3ktCrH1GySRJZpdLdg0FKYfWG2DEXBWhaWp/WQ52uF1QiPu3tlpP1pbTy8iBbrz29SzLzh8/yPLy4rxKQEssyWB1ehK9hxgPdnfMYFcg7ZpzbnxEZqVUXly/dmdrXLbPyoW3oap0pqrKpdYmlpKawMemDtkAnPfeIhHWS18Uaf8UkFEHNw4W02XdLPMs56h8jBIxNrFdNPkgs3nquti13nvXteGHv/OP/of/478+2r7+zlt/iPNzqN0nb36yeHq0f/3gzp1Xz+bV1nBwbe/Gv/inf+8Xvvd1aG3TxrMnp8fPjuywfPm1129Oblwsnn75m9/Q+YdvfvSM2JVA3iTVMnY+BlCu8QeDQZrQsqqH1ozHW0dPjrbGg2rWVnV7PqsC4sXlNM+HLoTjw8Wt29ureZPkfHF8sbd/fefa2HcegQbD0nqcXS6Go7FK0948gooyZZaL+Wp1eOfeG4M8baMDokRT8NEaCj5aq0ye1tWKTBZ9G6MjiOUw8RCaqsnyLfSB0aAAxyZLKdsan59MQ7MKURiiMSiBI3JeFonO4sprm7TLCw5du2qlme/d/cLxg/shNMVWeTldDspsnOWPjh4B5XdfumnTeHp0ViTjrcnk2p27FxeNMibN87Kwy/PTk8dPywLr8xONFgtFk92Xv/Hdt95599WbyZPzUC9W3/2T337wzgev/MIv51tmcX6J27tfefnlt37yZrNo6rOp76r0xf3LpU9sWU5G+7vbN4fD/+f/9f8wKNPJ4Hq8ADnjrNNHjzt2ijS1jovUXJzNtraS4Wi0fX3n/v2WOZ/sbCsXL5rF6eHMaKWEpseXqZTY6cGweOsPf/rqKzfvfPHO2ScPdweD42ePfFtdv7796NPTvf2JWx7aNP3WN197dP/+7s3brYdmWYNAQSkpqNvGdT4tM++dsL5367WzwzeV4ms3zcXp9E/9uX/rvY8epeV4Fc4NGtc5aSOBKEU+yLxyQDrLi2JLP33vD44//kF29xfsYPegHM6fPmxD++TBh9lga+/aXrVsDen54ZOBvbWZFySpNj4Eq02W2cyoKMKty5RZtm2qlbEpKRNBAMi5kKWaI4fAuLFnWKOb4JHEKNBKWAMhJxZ9YowhsAoRmjZEjj4y+ai9sBdCRCW2SFeLSok3xopzipQSXNYrUonCBKJI9EohkmqqToDFKC+BFDFT0/jz+ZJ9x8rlEzIDwcTFAh0G51kbwESEaqtEoQ2hc84hIoZQnZ0Zh+JVXS9D4jtvKKb13IsLTQXMISgGCK6OpDUEyIyZbOdkAkKnQFv2bumYzYW4pllmSWmsAQmaUGeEmZIV+iZQhpPRICK3oXNVFQGxbRF8alWaWNe5EBujjHCMbStKk00RCCOYNBWORlGMnVbAbUcQFEKIwgF8FywEhbJaTcuciLheLIo0yRMjsQaI1jOHmGIklpJ0EgIQZ8ZkwEyQM0MI2Aqg0okNPoi2Ssh1vl114pQ4RmW8i+CibxyB1iTinUnSJLFCyawjRDudzq1Od0c7gT0CKtLFJM2KMYJO8ox9RM/WEgTf1DWw844jx8V8XjWuE1p1jgEF0Pso4KzCfKBJaYqU2lyiSGSJ4H20qQkBWowhsiItkZu66RwgaqWV0sCu6VcpQ4hd59gHZicKnfNICpWVGIUFgJFAqUaAFAIpij5AWKuK1lROsDfo9PN27CMwbFQhGzMTXi0G95/f0I6eHPTjrv5zyM/FP1fICDchEkQ21AA3bAo3TGkzoNyIZ57DIkFARALuaULvDMC1vgY3OGTDQPAqWkR/Roa1uJxxc8QrodTmxGuZDay5WL/KHJ8zIUDq8cZzIAHPKwAEgAWoD49AwmENqaB3lMFzvdPaBIXPXwUEYKRNhQmIiOCVjW1TN1emrp7xIa1VNCwggtILg/i5fayfDSADKOz/6KM09IXpR++bABOwCRXSVzYzIyLQVSmvxEFAgEIIwLiuCdncJ9Svq0OfHb6yj61f6MtGiPxcHfS8/vAK88kVx1kXaSPk4ueVzwj91IgBAYWFgIDXrjTGtVoKn+cd1kBOhKi3jNCGl25QkQdXZPnk5st3X/7Vf/RP/8k3/sx3fvF7d//5P/5hs2yzZPT2B0cHo0JhXM3OxatV14rzXQi2MJCnl8ezjqFTkfLdYE2wmd0qK3ceYkakA4RFvQgSbt266RdV52rv45OjKXNM02S6aPRgMNzTw3GWeYmeWi8c6lmz1AYWl8t6Pt3ZKowyxSAdWF2fLHHOi+rCDHNtt5bTU61aY2V1+UyaJVO4e2MU/dKkacty98XX8lQrY0ZBtXV3+fToh09PXA1dN0+HgxBZlxaMXqzmBinTCfCSIbSBfd25Ni2LMUiyXC1MQp2vZBVzYyIHVC7PUqYkCokXrSMITS/nojNPybwxeZpdnJ8DhPFWej47L2I7HOxdPnvbpVEZs1o089N5zarTUE3PkoE2ZdJ2VUCymQocV4u2SLLpxUI8KeWU5mKUxQ4j+6LMUAbOt+y70HZJYgaj8QfvPzyer/ZujA9uHmRJtj2MLDlAjAHmMyHIq0tvMWQDEyF08/PxeCsoEXCT6+V82pkEfECGXBRMxtnJ6aPhVlEUafB2tbgclaV38WDvdjlOkUK1PCt3sr3d0bJzg9gtzk9u3xy2Tc3YKImua/LhSOk0G4xY3Hiy5VzTxirNbdNOdRaKSeFczEt54d52veys1q5eIXE9bzXGxep079pdIrVcnj/86PTx47MvfuFLq0U72imvX7s2PT/a0fvF9vaj+6dlua2k89rvXyudJ9LauQAIFxdL8iHLithKVzvSdLB/cHk8TbJSZzIYDtpF+/TJqbVJ08589MQKmZVaTw9CiIpASBDEBUaAyL3GQ7QhEYiRexIQYwghgjACBEWKkIMAci8wAcQQ2WhF63ZTJEpvFRGRtUiHkOEqmA2wMHHfMq37C0WISCIoQMwhxuhD8CFGkcAsUYiQASMDSBSREGNkZhFA1EqFyOJjLwj1woSoED0zhhhYFJFWRCA9pOAYEUApJYC81sUEot6C3beuiCBK9SsU616xh+u9drN3ySEBoe7lUWsN0DqBAMbAxigGXNvCmEFEEfXOMkW48cuu3Vtrr5paUyeWSNLTK1yLJ0UAgRT1SxWEqLQChF5BJCJ6bRIT5v5KrnvdtTpJBASIUKHqORL1bxJyjCLC6yg/fftPItEH33MnAeo7fkJCIhaIwUvsoda604T1yg9G7tXKIMK4js5EiqgX+CAiAUWJHPuWnglQqR7EAQByZABRSgkLKDJqvVbU1xJsFmcICbVSQEhERMwiMQL19RxFRJiZowiAMClSSpFCQuV9eG6jBrj7hV04vvz4vRNw+EdvLxof9yf5vf0t7Fw7m+0dFM/qGiNFJ1UbBmUiLJNJPizzzOLFrPOBd7fNa9dgNb2sddpUix+9c36wZbUVneoiwQ8/XF40xkfYGaohkSZIC8zK7GzGSWZdor1NZx3/5//T/9Rg8uH77+4c5N1F+Orr1/f2k8XpBUFT3t3Lt7fbzjJwPswnevXbv/Uvv/cXvnrrtW8eP6lWywUoKnN18eHFP/ib38+Scdu1g1yHpdst9LVd+/Uv337jV/8UbH+h5nygfXN5v+iOIFvlg7Kah7amrb1dUbyY1eloJy+3mbGrRCWpsQmLiV50WnIIyigVZbWqrdkKSoOyKrchITGRESPHZBuzcgBBma0yR3F1l6QJhwAAWyFqADD4Cn3VufZXtQ0cI8vFcvnTH73/9//Zj58cd53QdM5VFGUxI9u1XeeZQSRymlH/pCpDWiv24jkqhaQVoGjnbmZWd+Gf/uZ7HPygVCOB08eLPLGHp2d5Ya0hLQGFvfdpaqNTHq0H3jJKIXkUAR6O9gbpIKBE14wm+cVyde3OdnZ8bDWmmpuuFXHQYuCwLVGHLrdqsqOv7eSvvHDt3q27q/mF1e3FozMCVsLaMHXcLmE8VoiJACzmjTGJGEhHBWDSLeu2a7e2R8wUTZKWZeiynVdvcbqlRteca5ar+zZJdl/8Qrfk6vjJdHXEGJJCQdQ6TULsQmiV1ZSgSo2rK62e2mU4//QtXOqta5OTpxUT6MR2LNNZG30M7FwAZVEZnM0qiawTWzWhfwqMVQ4kprm2yfxyxpjbQrrG1TFmg6xpXFSqWnWBI1Jsq0UMq9/9rb/9F/7SX7t1ey+cXi7Plq6d3r67wzE+e/JUyvJ0deofn9956U42Lq6Xo3w4zovyg7feTCw9++RJ3XmvuhfuDnRqxtfGRrRnB623NRxfrgAwAmaZubE3/uRhTYjnF7PBoKzrOnJAgeCZtF56/rU//VfefOun7vDjrjPFwCepBgjVbKlu3yBE3wYkNbu87Oq2Ta2wBdE6wnK2HGwPi2KolZ7NTvJ8YIthW1Vt5xRStVoIKJPkTdMgqdl0BSqmKYUInUTEON4eOtdyiKhxtVyBghBj1TSoo7VZpm3VtaQTIhGiwIgx6ARb3wTn6lWVlWlis2cffxjr2hgdOG5tF82y+fijB7uTneNFvZNbkpCn5Wg0aubN7GJpbJkk+exkqng+P3rg/Gy0f+vk4TTPR8Px+MG7n04fPLidFc9+618Mi13XheWqy8r0g7c//tKf/vWFx/vn7pu3Xrr1xVu/83f+q1/75T/xg9//Z2++897ewQu4atmt6q7+f/z4d99/8mxo8z/zZ/7k7Oj01a8e2D35ow+fgkYxRmnqON64s7V3fYu5bHWhs+FwpA6fPF4sVl3rVIpZalbTbjTevnbvdSf+6bNTYn16eOi7enu0XTs/vr7bNfTs6dO2bvIq93U7n7lu3rqmqxezbLAFmXExdm3HwaOGZJByFNepWIenjx6184sQLuP2lq+6y4c/3jFZN3+apYk1RT1fURSbKtSWBBxra4Z+tWTffOMXvnv25Hw7KQeDnfd/52+Pc5tcL0b7w8V5pfP9+uj09PHjOzcndVX1T0GRZTFKXiRdjNZaAmBmIsMS0ixXSiepij6qvkfTmjSSIiYPhKh0b4g2SMCstA3Oi7BN1QBTSkLwXjsFAmkKrfPBCzIqhVoRR0DEEAIhtxV75URYaSUxAgihNykwsAIJnWetjNYB0AtFx45jy6EDjwmj4tTarAzDfdvELigHSJFUZG+VCm1jFAEqwIiAGH03a6nSvhatjFVKWkHXxcgS2PtOaU6NyrRqG1cObRAQz6bQJmFjIS9yYY2S2hS6hp0PUfSydX5VDcuiWZ0U+8O0GKdlsjqv25VkGWoWBSCRxYtrWDh4RUtxogkFuq41Slrpxtu2HOcuBCItjNFH4hg8atXHh2SR4Np2aC1AwuLT3CKxNeg975R7KFGjaTtndJYN0VnlXCshJjbnAJG9UXl02gDsjbZc3jF7ZVXneJAUkVG6mNkUM4wG6uhC8FmW+s4hRqO1Nqosh1qbGABI7wyTVCEAaGOK0UhYjLHshYxVOtNKQQSlUJNBAh+C1sYzL1fLwB1DjBy8E4miLHFkJEVEXiSwOO8yZbO8jMG56LQmNEYEnHcCiCRplkQvznvnKkSSoBBESSSEwOC9N9ZEZOeUAGhjADDSOohLHzyGQWLsWFCcIIASWpOi3hGFQILrqC5rZxmsKcfazbRREfXz7XVYnM/yItwIceQz839Z693XspyNWmWNcXp1yXMr0XM9CV4plT4z1+9DS9DVX1ckaY191iIjvOIOGwJCCEDQD0/7OJwbqUnvcrpSrQDA1SC2zzwSXBW8/8BGuwRrmf1VjjeAZk3U+lzxVc77OsCrY+OmcEgbldAaBkmvtREAIdlU22cMV7Bxe21quP9sP5vrX10HIe1DFQEK8poNySY/0of06U/ESCjIV4AJEVCtiwFrAc9GsrU2EPbXaO00Y5C1YmhTQZ+VBgmvZyd9AA2+Upz1y7/rQEVXV/zqElyJ3NbEaQ3VNuF0cWNtg7VDEjYKMVgbAWUzWVm/3yumel8IAgr9/6mKSGdJcuOFu7/+1ocP/+Jf+LND/egP//E/KptkdGd0Pl0MM2N1DOIhYLtoGenyrLo88fdefjm27d7BjWS0C2iaJpQ5LM9mMe69/OUvpRp/97d/5/LoSCVha2fy8x9++NJL+8Od7cuzExG+cbA72SofPnm8f+P6IAmpTUcx2FSP9rer8+n0bAUT1cz8SrNkWHXu8uh0WNimM7GYcFtNtsZJnkxPQ1f7rUGRFPri6fFqtdobbpeZWVRAISnz9OTp5bAcUlhML1arTulsnBizt59cTmeJKYKPJqA1SXQ1idFJBuIZo2iikDSNz3IyWhQxodYKBVlpJgoQ0SBGMi4xbXMRmdNMRRXufemr5/ePbuylT09BnEEys2r65K0/3r13pzAvhrjg0B4+WIV5YzI9c8vhKLWKSCTNk4BmkCBKuH37pTZuvfjKvSePHq2qk+iXKOS6FSClaZIjdhKBxGblatq+df/h8eHiW995LdNhnJvom/lRdfPOdaG4mreedTnZStN4+eycWzfezfKt5PDp/YBmUYemVpenzXCwPdjeeXp20rgqARfAhc6IpoCVSYfZeDsfjtP8YFnNmq62STKvkSMLJfvbxeX03CQFJtA0jWYzLrYFiQBC6DhympUhhvl0qUkbncxn03KwA0AZcefby4saymJrkjfNIkanE72zt9+1y7qeab3dVX45fXbv3gul1s2inmKYLd2Nmy+eTy8G47KtPU9mAVf2ZDk7a1vP5bBcNeHsdJqiyQ1X7bTMBqvWnS/89jA/O59PdkfRG+exq9syS72whJAVNk1s7Vf9U8ACKLKOQSUgICF6ESQi9g7WcZqFhUMIIUYQicwUcB0WR0TrdSurjSJSvVaIsGcywn0LgKiUIuEQGUS0RkDQQD2BFoCeoQCiIhSJwszMHAPHKBI5RoiMSDEEBAAFUTiEGIWBRQEgP7euhhhFQBGgohAigufIiMoYBU7yxBKRBlBawUZy2XMr3YteRPrg2hwDIygEIurjZG8WRiKsBZLSR2VmYSJadwqCIhJDBKK1LFairDsK7h12IH0EIl7HCMee+K/jWIsIRwEQAuyV7aq3AQojolIEMQApXEf4RllH/WQkEg7rgL8svQ97o4ECEOjlzc81QbJuNSNHAUFZO395nWHywWFPxBARCVBAGBgFAZg3VvZeVtW7pplIISJvGFWvIl2DHCBhed6P9V0gMwOIiDK6f6EXpSKtjc2AxCGSNr0QLQL70PXf1UYrY7RWEJn6UNwKAXqr3bpXln5UhABESmlFa7mRUipCjHFtQHvhjbunEq7f2f/k8QMsktl5085c8bT62osHtu6W58tXro+nbXu6dB5JK31xWp2ertpluPXy7qw9n2j82m37+nWoB7p+dzF1icm1srSdm6qpP35WH079EtQo1S8dZOhjYjPZzicH9vQivvNktYzUVM2q9f/1/+7/+F/81f/oT33l1ntvv3lQ5rYlv8gSq7LxJJ9cU/lEWp3k1p/P/5v/+n8/2dodjO8uV4JZPipSF1qL1U9+9GOweRN8noFNYCzhSweD7/3SV1/55V9ZZLcDDOTspF7erw4/wPbCmI7K4Wg8TMuyaVqtdKqDVB3olVZWWYPE0TeRGm04Lbfreul8jYCZLdsqZFsDUJqdiIesGOhE1bNLk5okycBoiEnwoIsiYDRWORcx0Z33aHLfOmVBEFijc3H72vXvfKc4Ojpuow6cfXh/+tHj6emy1blhkLr2WmNkVICOowZm4SawRo0kgyxpvMMQdgwOWzfiAhrHpKtOdrayV168+3s/frf1QRzYQYaEGlglapTkI5U11Wpy86Y27uT4kJVYxZP9bb9amdGWHWWrhURIXv/SC//8d36ymw9VDDXRiwdmL9N7Y/PivdF2orfH6Xg/q5bLNCvn58fL5SFaKZWPrHKT2IKVUs5VMUCaGSc0Gg8ZKJ3srFYtQGeHSTE+SIZbAtYHTEdDoCzqod2+IbqgmL/0ta8+fO/3q9X8zhe/w828OXs4LjKbjVgNknzQLS61ieXWmHg5KHRoLxfHT82TT06frG6+/PXJ9s6Djz4dah4M09llIySEbEvrPfvWD69vL30NNmpt5pfL/iloqqadrkxWTu7u16l58smxsYZsrEMTa59S4kMwCXAXL05PL58cciX/+p/+ZBT+XnJt7/7798dbxXB7UpRp28mSsRxuv/7lF/4/v/NfJSDX9vZVxs8ePR7sTO597QuLi9n9h4c7N7bv3n7x9GJaloNy0RmJo8HY42qY54dncyCFRCens7s3doaTohFJosuU2dsdvv/Ow8KmZW4cyKrtfv93//b2YGtUwJ0XDk4+ft8qr3Vol/P5xbxMk3xYts5fv37v+NkTt6woj0m57Ver0WTAGIy2KhmsqhVH364qrTA4MVm2mp7ZpPDgtbFpkTV6YRJlCZq6iZ0DkJVrrC0IFQmCVoAWXIMogzxr66DYDnOjLdWLzmQlEtRVZ4yOXQ0cB6PB0ycPdnZv6MSOhvnlPMwupuNx2bSrfDDIs/yFcsdQcHVbFFmIYbRluG0TlS7Ojh4+ePjCZOjbOvr2/Xd+hpSgVhBgkg1X4hOdE0BeqmZK0+Wqc4enn37IgNdfujcZppJK5y5/6c/9UjHggzsvG4E7N6/97Cc/2x8OfuO3fvfhs+MvvP7a7Vs3HAkkKi8GOc13lTmqawG6c3s7LqtRmX39V37993708TArChOCW51cnDSVp1bBVn56Pt8aHtx7/Tsn09PxMH3jKzfPnjwrNR4dzsps5+DuLadRRdfN2+NHn4y3trb3xj4gAOZ+7GpGcgIhNVqRdS4kOm1jDIHTNAeUp4/feun6jRBst6qCl8X5DFS1u38rxFh1MXQxTTUTEIomzaKMLXTwzfmzMksMYnf20C9W3eliOkre+tkH3/u3/9rkelw0y+3dXKJniFu74/4pSJOEGJTRhhmJ2AdRJpALMaYm1YSEKFohqMjiGUQhISZJIgqMMiBCIJ6jAGMUlSSKYlfXRuGosIHRdzEGCC4mifZBggMQIUClUGkrEJQiz7G3WbddUAggmCY6CncsClWIjrs2Sy0Z8h2joZbD3LmAPk1MkOhVKDGpVxKhg9KIA2yR2fjAqVUKCBAEgzEUQtusHNfataqNSIqM0b4JIcQYmRQkOZIKrLvBmBoXFGoDVrSORgFEL6SYhFGZNEUSg+WIVtWKPToAUdC03eLwiba2qQIEiMsGMfgQNBFHhggxqNgSkvGRQ9NYo1mDSopqBUR1MdCaojYEKQkjoEHACApIAYdhGRKD2lCIkUMsypQQfVAipIgQlAyNRBYmiVxXFaIhTENgAE6SzCgVvYvBR3YMIRB4BpNkqLVCFVtf6yY4V9gYgzBgWuQaCUApgESDVoYDI2jUhgUAsHPRKJ2kSZLlKP1qnxDE6EOMTmIAQhFRiGRUZ1MAo5I0UiWtI+YQQ8BIygApk1hQFAGrtk2UoCKDSpECmwAhIXnvYuc4xuAjAnKMgjE6JyyZ0TZPSBC1CiFEYZEQomitgSgGDoF1zwQYCEFrbRCYBYjCJlYR9APVtdOsl8Ij9OalzYJlP/6jfgESNsO3jZLnOcLYxJPhNUhYU6LPOIjWhq61wgXgM4gG1vauNWz5zPoeXoW72SCaNaN67ixay4M24GlzULlaykVB2Tih+iCn/Yhx7Um7Ihv959cqdFkrguCqBOuooJv3+lrbyKZwA4nWI9p1UZ6vPcOm0H0lb1RN/QrsZynZppY3/AfXRenVO5sa7IfVsC4YiPSjcYSrANACvFbd96avzQp9fzzYRBeSHk5dhS9aXwxZz2X6+pO1BW8NtPrP06aeNwN6AaDNReyXq3ses2Fb0m9Gs/adkQDyWqe1jkK+gXybe+95cCXA/gJw/+2+zp4b0AAZgEGgx259YKlNeKn1jbm+R69waIT1vGCNikqTJlF98uYPl36Z4GtuPt3duXHyeLpqZUXlo+NHg5cm4vnui6/crx4ff4rf+KX/kdHbDz99j0z70pe/BTkMC7M6nh59/CNGc304vHPvlRj1N7+5dfTxB09P3+c4bSO99/A8eXRyb88a8eSS05OqSOzdO3cGdrmYVj4IEdeXM+zanf1BvlU+fuckRF3kyScf3r/9xVF1dDkY7+7fyo4+PVZx27Xd5PqdZnVhrFutnqmUr+3uMAQ2pg5dt5Kqs6LueErPzqZJMtm//UbrYHr4sSy7JB/pdKBNs6zn1oJjn4HyQQSga+uwFAmlsRp1J9xanQDkSZZ4d5FnOYjqHHar2haWPbU1X5zORltjraM/Pqsvji6ap1mRrtoQOe7f2w2KwBTdKp9c3z6/eLr3Ynl0/9MIaOyonAxX7Wxn/zZrvnXz2vFHf+zbmBdK2m528XG9mpPCvd1XFqtHk73hxeV0tZwKS2QeDsvjZ/63/tVPb7306p/8U19RuvNt1bUBAb3o+4+PlenG+fZka2t6uuyWq8NHD5TmeD/s7Razi0Xl8dnhWZEfDIpdhvT9j8+SSSlNO1v4Mi9DF5eyaprzG9fz5fSyHNxzfutHP31r5/r+nRvZxeEpJUAF794Y4qO2ZQFhBWSy3AWXKG3IKCRNNF+svKDWI3YCqLeHWxHl5PR8K0uwddxxIzGx4ONKqyR0biWdscn5yfners3TpBhni+UhjSdbe/tlYa1Ct5Inxxff+fpXp4tu9uyj6KoslUtYQQtOq/mFGxfDBDFVnkUSBc/mlQMsS81GOMQ7e3fePv3wYlVlt7fmF5e7uweXq/rwwSkm+fppFgmRRWKMoW+BnHPMoo3WSomAUhoAhKEPxRNjFEAgiiyIkFgDAhKYkfvYfWtDFpJIIMI+0nKvXgHAXtojIGod1gd5jfKFkITjWkMQxUdmFkRaE2xmEBYEH4JWhIDMQqR6ExOLAAsReY4+BhGwRoGwD9w5r4iIlLFktSZkIjIEFo0hjUI9bOYQ+4DQKEBKAcT1NgQSOUSt9AZzMPY9CogAK2V6miYEzMIgau3q6sWf66ZNK83CEnv/XWQWQkDAyMIApJQIyMbodyUqDTEqpRBJ1oJQBiBFaKxeR/tnQQGlqK/wvumMvHaZEVIvf2JmBOwJDxIpUn0Uo/X6D/SqMUTAGGOfMRYRjpGZELQ2ABCjM8YSUoyBmdb9glIh+L65FeH1msTGk80C/V51wP0mA8jrLeT6lRREVKgARYgUkSIURSrG0HcCESSKgEi/251EFkSWiKQEQGudJKkirRRF33EMsJZl8RUkYpbekN+rhrUiJFoHSBeWfmsEAAD43/6Nv7OjyvkstOAjUpZbMvTm/ceWw8u3x3DZVK4ZZtqLuWjiqu0Cok70yXIVnqJqmm/eKF9JdTg+sVwdWG/ITMk+W8ps5VZ1OD6JyaC4tTXGy+XRo9O97fTlFw5++PT40hdNRQbNL3/75mvfuvPmHz/9h3/vB++99f3v7X/15dfGB3s7nccgutjeaVutGmM4amVToKcPfvzgwUf/1l/5RnGtPDo6N3YLUOejMXfxww+cTgc2tnd2KI3ulRvFn/gTb0xe+uapO2AshtjY+oOnb/+rwjZO2qzY8j4xSVr7tI6SxsCRJHq/WqZFMRhfW3WdMpSmg6Y9X84W0dcsdZ6UHGORjQCsAGFCJNFJbGpnMrusFm2E7d3rnk3wHmvxLSuFAsQi1qTMRoIKgkGEgNhFhiSn9sXtbUR85etf+YMfvJ3F5TtP+JPzJorOEqsNtHVHyDtFulouOIrSdPf69nJRt1UFq+VLY/qlA/3SbhYbf/5kkYzHR124uAg/rB6u6pYYJ0WCwgnIrRvbw8F4e3TtvZ//ME103ZyHFqKi2byqXPfq7YModWiNSrLOUVOx7vSt7eRMuf2JwSH92T99o8SmTKHcUXHGCjvvAkYXoyu30+tqe3r4DMG5zqzmJh8me5PrMV4oQmUL6fzlsk6HgxDHNXcxnO/vTGx5nfW4iUqPi/zazebynJuqPn7c+rTc3rIq8xJSPObFT7r5u8yLEPTqdJaPlbKicGG0QTdH7s4fXgpL03RN7UmPn356FmkApOdVZQvLyPOqunF9e1WtmtY70G7aTsqDVXW6mlWta/unYDhIqkdn93/2tnqX3vjmVycDuTw7n61WWzcOJrs3n33wQZ5nUcJiPutWs8XZxWtfvmXzwc8++NnXd79x85XbVSNnM6mamFoNGanFxVv/8C03nf2pf/vPnFycTkbjvLBdy9PzVVs7nZuA4aPjp7sHOxDphbvXL04/DW3t5nVAPynyo9oT4vbYWA5WMB0MC+Rnh0+0zSIr77gGLyIDpfx8cT5bpYP07PgEgM4Pz/PUjLe3JTpUqqora9NlXds0i15HUcPh3jJ4kaCVXlVNNiy3dnYU6uW0Vak1iXZtvT0ZNZ0fjcaR43x+ISGItV0MQAEBAJXvQlYkyuT1atm0bZoYJKi7epSXiBCcU0QcoiI5P3lw7fZLZTmE6LomzKaXShFBsZq7civrgtMoo6K0msqyJJVH70M3j6h8F42l47NFPpgsjx9c1jUmeG13Oyrv2bOiduFHOwd6sLM8nWr0+3f2EqMOL+el3q1h+bOf/N6tW4O5xDs2f/DDn9y4sxfH7of/7d/bOdj5pT/3F7dufv30/oNH9x8szg+nl9NxgV9645Xv/qX/5PDd97i7fHz4QNru3ou3b16f/MHPP3xwWR8eL8ddO7k7nj49XEyr/cntxZP3meskode//PWffP/n47LcStLxzq0AYWBjfXpIo2wysouzpTXZ9u1bT2dLkyQ7iR3v3U6Gu+l4mzWiUiC0fXscu6ZrOmFoLueTLfv06cPBaFDu7htjlvNlmsDtu3e74LpO8jJTIsTJ42en+4pyQ6hSGWZt0zbeQdOleQoS29l5c37EzQwaHowHaVJhJi/+4v5ivvpz3/rLVf7yxw/fWh1/8uKNRCdZDPH06Lx/CqxNfOuCjyKgNQpgdAHJpiY1aCFWSkRpjUguBCFykbsIZZmRVUoEok81Va3nGNgBKFJaJUaxC4QSEQBiP35QWisit16Ii0YpTQQBgnNEgNKPcCgIA2pQYlWo65rDOlikxejbaMtMWbtsuxU1yQBmVR25zlAtfRdXbBVLVLyCUEVlNBQh2Uma1comiQBIG7oOQix8064WXhsj7EkLclTKkIlpAYOBrrm1QyqGCpbStth20UdJiNLEzJbzEeXCkqZJOswU6yCyPUiWFdX1nL3Drh3mql4tVaOCoElNwzEiIpFJrdJGah8ie4fBewQVAtbLNils6KCrGu5So70xrBKMHIoiy5Jy2qFSQ4gdsfgIbRcExJqk7QzEyJQwGQlEggaJIPiIiUoG+bbWuQsYfWNMTJIkRC4KkigSndbiIxBpQESbJEnqm86NHIrvqsazRKHAwBGAIXqv2Iv3aWYRVBCKAI1npQ1qA6iZiZQGDBIapIgYnFv6pkHEzrm8GKXZsCzLNrJj0S2kKUHwRqATqttARAmBjR36LnaessIa23ZRgQJA4MDg2XWubbVCrYxRmGRUNw0hKARtNGljSBsR570HTi3GwERKWDQqMWazIW5EUhIZURRCDDExZg0FaM1PYL1DyZV25mr+TxuhEONmTMcgVyPYDRhY44V+mC2wlq1cxcfp9R5X+iTZeJ02waE34Xg2UqGrg68JBAIQ8kZwgxulU89mnkOZjR6lJys9y9qgng3UWdOFq0/37GcdiEc2VALXgY3WjEN6CIGbcuCmnOs8IADwFbcAXLuv1rzluRCn5xe4MT08l2ddkSAC6Uf8PQ7byHSuIhhtjH6wJkM9kentYGt+s/YMMMha7oMo/RY4/SYvAgDrYBBrNxcKg9A6aBHKZieeDRjqKR0x8lpIRFcbyD2HYhsus975GJHW7AY27sDNxwkBkFkYSKDnWet44hsF1TocUV9O6KmSMAqQBhRG6I0ujJuVb8S1+0xU71+AzZVYX72+RrnfRwcJN4a9z6Ci5apxq7OXvzz88i/9YnfyzvHTd4MK+UB0Wd6696W03dVdlWj13/2r+/OZ/9U//5+ZbPDs8JOzs7ds2xz+6Kkv5Avf+tqTdz9azs9+4U/+6RdfO/jw8bwL+NGjjxu//Ot/42/8r/4X/4mIXTYh1PV3vvLN8QADuus37+gg82YWQ2PT+MLtm6fT82Xjt7aL82kbIt+4c/34PAxturd9pz09K9PBR29d3jRVun3z/U+XB+PUx2dE3WxexwBGDx3li+XFyBamNKaUyeSVp8+OHh09GBaqbqU+ml1cXOSWYQXeVzSd7x4USaZJZxbSyL7p2uicB5Wn5bOTubFqK5VsYEL0PtquA8AhBTbaeoAk1wA6hIYje7HjyW2ul08fftJ0XZGWFslB14ao6mQ6Cze+8cI4Hz149H5s3DA527qh0sHo7HB2NjtLrD5fNFqp+80DFUjEnpzMXYjFYHSwjYulf3byEFWXasyy1LkAhE3X/e6Pnv3+D55+69vf2L61e9m1OUme6r3rI5uZWeUu264otsL0cn7+uKtc8M3rX33lhz//ONpkfqm6KiOQ8bWbu8ODptN2mL1+d3+wPz76kJvLBeWFTkKULktSlaFKitsvfWXpxx98+DevG7ccBMqgLAxT59vlwfV8tYK2S1U6NmnWVMd74wnGOrEUg1stLyeja43OjUIDoQuO8fL6wbCdLRijIrOsOs8OZHHz1t3FasEAzOH2vZ0YKsCos+HZ48eri3p7XDxd+K+89JXDJ/PJ8FvPjhPO0t1b312eP7TDh8Pd64cfP3x87oQEiC+8f2P3Orju+OnJ/mTyZDk7mVOeD5dtF+fPjHLRoQuk8m3SO03XPXp8/sLLW/1TwMyIiiVGkd7wFXzovV1IZLTut1cHQKONEIhiBCRFPnirlCIFIowsiCIYo0dlYmQARmIUBYCRI/ZhrUFEIqEWEebYy/x6MTGhQkGGXlaDkTlGiSyeo/euD2LdsxgB7EUvwkLEChAgEioBDgyBoxDFyIElAESJqJQGic7pgGWKAJDahBliFOGgSaA3XAn7XulN/f5ofTPICIBIClAIY+xD3qydt6QUokIAJGARicEzOPYoKJFBgGOMEiJyLy0lRQQYN25t3LjGIoistzZDAQkhbhYWZN2IR+m3WVZaEa63DCAkXmuJVR8Cj4WZmahfG1AIxMyIpFAFDooUKtW3kApVrwYiBEClFEDcKDKx15HFNbhCjBz7ltWH0HdfWhEAsAg7H2MEhMhstELZ6JWIFGlUtNY9IYtIiJ4BFSkBgRgQiDkAoFZESmulesuYUiRC3Iug+Eo5GgDQe4+I0u/GnqSZyYgwxIiopK9XBGTRRL1QSwBJYR/buw8+ui4hYvQhxijrhRc4f382y/Tx5Zk1mkPsGNGJC/zx8fFX3rjduIuLo7P93V3J5Ze/+sZv/NY7h2ertmtyq5Z19T/4ta/+u68Ozj987+D6NgUzX50fP+pWWXo6l1BVnFqlC2S1PJvfsnz7+vDLX7l23sze+MqNH7033T+4/uq1a2XB9QX84q/92td/+Rff+aN/ng86k6QuNhFSAYOcDAYFJlpnxiYD4+e/9c/+rp0IlCw6FJNBsyJr82cny7/7f/q7Y1G3dlPtMesWX3qp/NO/9vXRvW9f8h2T7ueR549+/uBf/gZ0l0VBOila5Y2HGJQioyy0be3rVoJPtQ+urlYxyYqqagF2XFN5cYlFFGlWC4IqGdum6jAdpoUO0TfLpTaGlW/qpSJD0fqqFd8WRRJ9yy4k+RBM3lWOTCsBGSEyZINSZbZuW5vufu3X/kNfzaYX0xzSr7x8886r8Df/0VtNgNozgo6IgcVG50VKa0OUuu3qbqli/KW7+V1VvzryQ+i8+Je/WdJ46/c+uJxfem6jLvNtI/tlOlv6lEwd+MmnT4AelgZd8LKoXIgdu1bpwY2d2i8zqNAx4nWTmYvj2bXx6IWXRtXR5XhIo5uphkWSRJuhJiNJzmwlpNkgFwir+RyE7732an16ePTJo8QG8eWqboTFe8/OR22LLNfo4+ykMDofGyMxLs4obaU1XKerdimcAXVpHglCkWV+iauLJpHw6NkhdsvJrsYo3DTt/FlX+aGxvhuKbqNbJdasVjUZazPTeLnz8u1PHzXZIA1te3a2JA0eYOUlzweeV2LVxfmCxvmXv/XGOz/9WQIMsAQAk4eb37zx8PBR6/n3f+9HW9vjxbQGMQvU9SwgYLVqmmo1vTgej1JP8Y9//Im345OnF+2//OP//l/+czIY2OEWtTT78H45SS4+/fDkk0+hrc/PPxyMD5KBGYyK6WWLXYuu+e43v/f06PRy1RX53unh2XmY1kafVRyizdP8xWv26JMTRNNFXw5t1trRqLy2f3PVhOl8rhPTdQG9QETmYIwKLhZE+5k+Xbgs1SF6abtnj54Nv3CbMO7tX3/67IHSSVHaCGpZT13otBhFpTC4GpiRMBpbpsX2sj5uOiBKULhpF22zMJRQWjShRWQtKstsFEDxrlugXwnoPMuNMb4LZVoohLxEItN08fL8MsuKyWRP61JZ41dnDF5pzFITPTDL00cPx5NCmBVB1cXLJtzZLas4KwuVaNXW6WhrdLxAR+ns7LDU6Z3b154enjZdl2VqXO6yjyE4YNIciD3Hrgowd+l7T8JiXr7yjX/v9p3izb/1t/KPf7Kt1PHH1fT88Obu7nzW/fAHf/Qn/uK/3zHCeXzlK9+qF4/Oz975S//pX/3+T57lwQ8mqfnOS0Bh5iLYwa/8+X/n3b/193RJt18ev/Clm7/1uz/55Bx/7d/9K9X1waN3HijwM0ddoe++/p20m7LPDz94f1TStf2thw8eWc0Icuf27fn5ezsvvBKrFoSSrUlqTBd9s1wmqXKti0Dg9eHjh5ODnb27d3/20x995YWvfnT/Z5Mb15OENJgYGm2VE7W1f4Mb//j+o8l46+bNmxpEhRq5AT3UJh0N75FzVofZ7FFTnxE4m5pmdZklaTWdN6dzGPi4dNdezb7/+I++++2v/84/+EBR0nURQruVb/d9QZanUaBru857FmCO2hhERagKRQHahExqMw9RI0TADp1KbZlY0rrIlHNV1TokcG2XmqHChEAUOkPeexd8IECtdXSta4L36D2QSZVSIsA+KsREFwLRIKJBIE2oSSGpEFzDbdXH4XMRE4Y0UUThYn6JnnMTUhajGRNjc2IVRVNgrGdeVWniNVcxVQO/8jrVVUQf0C1rCYPlyluhJFMY0Uvk6IvCWCOsQpIF1p22JIlddYEEU4uIqgvkfFdVkgIRiVbC5JswJ0RAYhcNOmHX+VqxG7AZlep0Mc9MapSF1Cwq3/nef47eBw5ojMlS1XWtRMmLggU4UAy4mEYi1omY3IjIcrlMtNMmbzkMdQ4hLrvORwzMIo4jZalhdCpVMaB0PtGiQIgUZwNNttP9sE0oqnbRJVmKYqLvOAqCiSEAhMRoXnXoMCOtI8cgHFMUQaVdjCxRBFGDQkXWdr7zzFHQB7Eq0UppbUCQA0ts27qOvoq+qptaYhdDhwjWGIkiIab5kAJSJ8m4WFYYV6s2BiaVDYY2S1II3XIl4o1GAQmiRoNB470X77qu5wT9imlqM4GASg2GWmvtWgcRQIwwJVmS5OTaBGKMLkiMFk0EHyWEXk2DrGwSnVMgwCEI00YQAptQzZsAOrAWzWziUMpzkYusx41wFc0ZNpqj/lfuReIbWiRXfqR+7L0mRT1poatA0Jv5PF4db5O1jbIG17oSBuwzvj73vxFrCDb6pt5Ht1ap4IZLgaAwSx9/ep0v6jfigivOtNEs0SaI0lqQJBs4soZXvKYTG4dDD24ErhjWuhbgimv1whxA6GX9VxwDaD1J+EyxpY+73IdY7XPXz6rW8As+E3qpr3zmDdfrcwebuu1Dcfd4RhCFJQLEPpIIr1VTQiSwLiUD0poD4pXFbX15BaKAbDZH3oCrvnhXPruriyjY+x420Z3WPGyzwMyCggpA9RNKBhQiULTZrhrgam82oPUqfS+SBCCQPkw5ovQ7DwGt1/QFAHu3hGC/Tr6JU4prL6H0YgUUWc8BP4OK7t4Zv/7Gl+LiovvwX7ezZxSmb3z7DXsRO1HJflpd5E2sT4/OMs6E8jQpmdrzk59893s3H7z1Noezto6rpx9gd3Ztd9xxfPDxYdPGF+6+HG/YTz558v2//1+i64JrEgUv3xq/+uJ1XUaT2nK4W50/SfPhqLi2WM7bTj1++HQ8vmaH27sTqOt6fn6BnR0nGYL62cNPX71259rdXRWDUeb27X0QtgN1fvZ0f2+v88m8ptXK7O9/I8i8W52mXJ88+3ls6kQJRKIo1pjtkalWjdKpIrxcLA720jLTTWhQIPZE0FNmivPjWbsKycAkOiUIg6Koa25CZPAeKXhPRIhuNT9dTVtAlSbFbN5Zxq7p0mFOKC/cuzGr3h2Mxk0Y7N57xYNQu6L5GftV9JDlCTaSIYBAQmqkvES3OJoWBWepKvL07KLpVm2zdNokA1M2DdRtrBdzAUEtDz69+OiT+e2Dvd2tJMFVYjlNiH1sK4jeZMXwRq62xmWlHs7mKzsm43WrVq997eayqgLDxWUXYlSh2x+nVeN3b2jSzbOPH+Ze7CCr6256Xhc5TCb5yaOLey/f/eiPf/CP/8U/3sYwKqhpg/NA2A23cq2T45OLJNluAtSVkwijJO+auQZk7oQjSVovl0Ldqm63BtuJplXrWnazWSU6278z3Iog0hGaIF1qgUAuLp8Sb6fJcD5t0JwNBltFRseHp5Ph3o/f/iQ35fXdGy46T/bWwZ2ff/Tp7nhrsN/RUu3eHP7gx48dZ2LyWRXQSxCLIQCqOsDQFsgdmm68lZxzrKYtcPLJw4sPHi9SWywXbo2KBFki+xhj9N4LSOwxBzAykuoDHYtSGgEJQBsjMYqI1cZoBUAcPINopYUIieJmg3Nc6z6vgvswIQGpvgHu492AxL4p6XFSLyNywYtAkOg5uhgDb9SAmgJzFGGGyBEQdE9qBAVFaxNFFGlDBEZ6RaIC8hK7zgkLkHGRTVQ+rBEyATBFUoqZOUYRJkUgSgIbnXDkCJst5GFNZ4hUiAFBERlSRpgFWEAa1waJlQt9w2eAjNKRY4Q+zFNUvb6S+lWNXmHJMcYQmbHfiywKC0Mfu5qJwGitCQVYKaUUaVK632yFITJHiX3E6z46nwgo0thH4iEkov6Vnq8R6d6Y3QfyAwBZf703hREi99u8RY59gLee7K3tasAs7KMoJI0YkQUkBNc33TEKEERmRaCVQQQios0WBcwR1qGYRATiWjnL0vvpiHr7IvL6XL0FD3EdKSlyDOKsVlc9KxIiiAaR4FBR9D6E4NkbrfsBBYsAYM+5kKjfsq5XTz0PBrgxy69R0VKwbpWxRmOi9VaezKoWo6o8v/XewxI7g3q1akXbRx988ifu7fwH/73vPvz4w4vj89NlkKP3jiHfKnNWxjU42BZ5dDa9bJdLLAuLgti5DGBvr7yzO1jNl/efVDahYYrf+cKdg5vX9l59sVpeSmq7xZmW6rvf+iIBNjV2tTc2SYbZcDiMQE3nhGJoqr/5f//f3P/k2Z/9D1/bv3tr1aZJortFEIm//0++f/Tm0WRcUnO5m1vdyYt7t9L8pUu36/RA5vOzD3/87Mf/XXt0uH0zZbAAOslTA6r1wa1WOrVdE8RBVhaguGkXaV5ANEZrjGmZbC2dR16WedauasWMUlvK0arIkYUZmRIjQkmxpbNisZrm5cgtZovL48VyQSbdAiSrwDvvJckTATIEoZm6KAo1I5NWJiu2rydfH3xzfvTszZ998Noke/vZctXFNmZaq9AFYhxnWWmSLsTLs2luZSdTW4JUQ7MQo70miqzZwZe+fO2Pv//wourGJYqm2bxxTiBR81OHSGmWmhTYdwxRhB2rltX+eDR/+JSNUYUrh19ITJHm1vmYY6IW7fUv2Js386GlsKptNvGuyLZeXbX5aHz74sljkS7bvRsCN21dr8539yd5Zhczi8EPi63QVoIdKUgsPnv0YGt0U6ApxkNTZFWzVAZLypvlRd06M7y+/6WvfPr2D5Msb+aN9/mr3/7y8f0fWiUssZnF2HXVvIakIJSuyLbGr10uToc6X8xPFtPaDohVpkx5cTy/OJqzi4Nh2jS+g5glgxXj9mB7Pu+soqGVxfTk8DHt7A2rzACcAcDHb386GCTD3Xw2r45PVpgkAuFgZ/toPr25NWw9u7pm5MFouKiXTy6Xv/grvzrcvpFLvaXq5nL18VtP0kzmxxfV4ZM0N4rlxRf3zi5Wi5NuMqBEaHkxD4uYZqzarp2e74zT7YMRkgRuD9+/P5jsXr92YzGdzaZLz83A4spBtehc7TRAs1id4/FLr73y5o9/jq42BkqjXeC2I1J679q4gObTjz7VGb58Z/dsujQKdsbWzc8Q7bF4YhWDhyTjGLXxOktDFJ2aFDFJM0oS39Tsq+npZZKZ7e0BR9Km8L5G0P3CRGYzFPCxmc9W2iau6xRSkhnFYgwtZ2eJscqotq0FObVFiG5nd+SZdFLqLH12/8HIikIUlGVdqTQjoYRLVCZi8/DT+wcvvDbc2Z8tltJ5o3VCWlvbxjZL+dHTh7u3bvjGffrpCYJo1uxpMfUMalWtMKlD7VOKAPrJk08nL7y2UJjffunBk8eOLr/23W9fHD24+cUvn53Nzd7WQBt/ePno449ff/qEPTGZbFR+6xe/8f6Hpz/6/tuvfPFb17fMo2cfz2fQtaQCNA672fTf+bXvvP3WT37y8dFoRNa0Y5s9/uDjqLDD8enhfY7dqy/d4eqs7Rpxy0lmW+dn09np6dO9ySAxfPL4o3IwYZOQkFcl6MxkaAxKorVRWqvQRYV4+86Njz78qVbtq198QVs9vnZ71gjNV7FxSDHXxlX/P7L+68m2LUvvw8YY0yy3fZqTx197rilfXd3VXQ3XJACCoCQwgnKkQiLfpFf9E9KzIqTQA8VgiDKEIiiSgiASIBpAA4Xuqi5vr7/Hm7TbLzPdGHpYe+cpSHnvQ57M3GvPNdfac675m9/3DWyXy7bemOHMzG4mrpeLpZa2GlSSVqCY49Z3i8XqWaYHk6z0aUjAmigrR69Oaz2eHt872V6sn7y8+Mq9G83y0/ffuyNcl7m2eYai+rnAu+i9S8FbrYiAUJEywAnE+QSaSFD61AwGDDFCElLSdZ0L0tSoKHXeWWswK6zRCTD44DveuoCoggMfokuurZu28d6lmExeyWhUaKOstSQgCFVRFVqH6BVlWVaQFlJdvVnYLGNIWtsACIIJWCFC7aFNMXi3wmJYkpHMZikEGFoILYVoNQ9z1bVefNOtYlg31XRCnjDkzhPFxESUqbBtE6Ss0KpiZaJwq0vLyFVlE1nvUTh2XWQyKJhcRDGNSyGsh6WbzFKmlHdBWZsAUnSiQjYdb7t66Ygb9qx9F5oOJLcAme8anci5YLRBgs7VmkAYqK+bhBh8ipxsXpiMyELbpeAEEaPyh9PqRjmqVLHYLqtMN11U2kYfEYRiEJTUAQsghxjYBa+10UpRbowxEoLudRoJm02rbARx3XbZuti5qCSNykyJMjojZXwbyCAzOoZ+lrNGISrvWuZIimpfExogFSKTJSHF4IWl6TaSYky+6zZdaNarGiSVpR4NM0G/2G5KHyuwPoIIotZFUQlSkaILSfTAGuL2klFAaTQ6L4ZKa4VZpjSm0D+6kVKQsQhn/RTNVLed1rYY5ClxTGCKzNishx3JRwNOoyTnULiwmU+MgklYSJeVAU5d1xHH5Bn+ta8dNti7h3ivF7rOxNklzAjQnuzsbGE7mdCOHAhcK0l2OpidP2mfM4D7yKC9felfUwNdG45kz6YA+oJs0pdQ73Nz6Fovgvvvdq3b5eHs6VbPUvq4CdnVY4G9aEh2iaq4Q1mIKDu/FFzzCsDr88U+7EH6AnEisA9SFoF+wQPXz517QLE/i77FIiLAffA1vhYa7VRZ12qXXbN35q9dOs/eDbdXZO2zhnaH6C0bO9NWL7W57sGenEgfJ9pjq71bbkcGeaeUQsA+L3Xna5Od2IdYePeM3S9tXsNCEYQ+C+laH7TzjCGB9DBMdoHcuL/gfXFjEgHuw0mwh2Z79re7dXrXmyCwKGFMAAxKAEWRIPJOHyYAIrwvf9a/OdOONsEedfY9hALAu9rfrw13O1R0/8F0OI2nzy/ycZG0PTq511213VWXaKBL6Tbucj4/vbw6P88OpydPH/0EpfOXL774aHFx8fzdB1+tzzdf/uJjjPjN7/0BDEKRZ7xial7+7C//0YfvHBxP8T/8H383hphc88a9mY+XpPLcmm5xpkKgVieVXLtwke+dvJ+cbpa6i267WA8LuXsr++RXv0R5e321/NWyHuSjPz758OrFS6lqg2CgKTtH9RVEHNjj+we3vnj4CapkQ5sMtX6Tl8ZFcZ4av/Z+a2fTwfj2q6fPqiIfjY+bbYuaxYjJch+bFGtO/mxecwvjbDAsdd1srRCwgRCq7D5gLpIisrExSnDB2UKttkx2pmmgeGWRXCdzvx02V4ez2dXS10G9c/vO6YuH7bOPyriqjrKD24f1urYIoyMcJtWsmsXFVWFL12BIuHS+8POqKlUKZSk+duVgqM1gu55ns0HtmocPX51d8WBwNB7NyHfrl8uDaaHzEtDEoGxVGsxD6JrlBjgfDo016erialmvFMHBsKg3QQ8yyKsbh7eef/wFMCxebSajogI/n9dA0xSx3nC9Cm2dwNNi/ZmkYMW9+97h5fmrrqWmiakj77egZTYa5XnebBpQWeqCyggIXZK22ZRlVcfAcTWoDmbDwXK5bmMS6zWhNsXpan50mBfWKlWmoLeBJwe32lUzyAcKiANYqwK5Nl6Rk0wVj+uXt2a3LG4pv1ienQddLMbZ3fduDfMD1u2tg+2jR5/fxfXwVhlQ1ZtlNjkgPFg1q9FBXpZmu1gdj3L2UQDa7XZ4Y0w4WnqX5eHb33hbqQ38IwAA770gphhjTCy89x1Ln663wzqq/0gxCIToFfYaQRRhowwS6j7IBjgm1qQUKUJCQhEWYaMyBGSOIqJ2GiIgon6YkP2oxpyYIXJ0IbFAiCmmGFJCICDRQABoFCJzikmYlVKSWIh2TVSkAAyiNQaFo48sEpgRJKIwsiVl+o0YkBASokaixBABIjMkEEHVVw4DCDEBCJBKIhwjkQIBo5ABkTLSGomANEMKAqumTsEzp8gMDIKIpBUxEmpRCfZiG4YYGQGFOQqkxIDAAiFFFk4xQW9JYxEEhUQJohJjlNbaGqN2AlAmxASQUtpv7vTRdLgrPyksqJmhp3S9GAuJiKUvjJCY+6FyN14KCicAlF7HhSz7ubOfOVKMPYsKklCZxJBiEEQkEsAYPQoSECjoAyexV4qlRLTD/inGlHrNVF9pDjT1QUqA0FvQGFgBAe+wIDIzp9DTMwJI3NeY28maiTAFJxBYa+89AChEENGkmbD34CnVxx312eG9sleQSDgxp37TY/92QJklFJNl27aLHEdFOcwyCSEmXq9W3/qDB88XL6aj8cvz9YThax8efvU796d/5w/q5fLZk0cvfvDDl589SYcn1YMTnN268+aD7JN/pBdprPKAarlt3j+x/+G//921nmxacvNLdvXIUEyYV1YXdrOdU4wp2SxHyyovqk0LprhjDJExZO22aTTlmspyMH74q4/+8hc/OLwzgNI+e7mdHJ+sT7d2OHp59WL+5NNb40ynEK+a3Kg//lvfffC9P9rkh8Pjt/Pt1tWnv/zZn7anz3OA7Ta1uj0+rDQBEnIIxhiFHADJ5pisRkOK600bIpMea22z/KD2C01du1gJpDzL6uWymIBE29f30wpC2xqiFONmuTBqDVz77appa2ASVq5zFK4ANUdOOimLKUTfbE05NMUguJZUoYxJLtl8cOute3kO1bg8+4//seS6Vay1qj20DiQGM7Yts7JaK8YUKqPK0uqRKYuKXXp22maHXXVQrjf18ejgcJoZ4vXVulRIKDbTPUvceN62DtGXVQEis9nsYDL0l2eKWDan7sVfpuo2sNq69sbNO18xm/ffwa69nN04wsHYGmr9vF2+rMZ/IHo0Ohx7t7VlGbGC2pXhyi9juz4fFgeEa3GN0lRU7bp2XS333rzdtWMLtp5fpdWyGB+2C4cpSwkD1F27zoZlZXPXdFv36sXp5tb9O9v5VtrFwEpkC2iYjC0GlgNvtvPupz5hUxBpUw2Hqiz09OjlqUs+Tu4eLk7Hy4vnOtN5Zher7XCmV6vFoMp0Drnh83V39vSiKs31p6Bdb7eLze03DjpJeqCZwvHJYL64cjG9OD2bDSsw6LdhejD77PLqzfe+/kff+7fXq6s7w/qjH/7T43fuJRc++tFvs+JYFdXx/aMHD+79+V/8+O3vfOfdr3+1W5+1Pmbjqm7n08PxgWSvXs4HxQy69vTF2fNHzx+8dWe72TRUb92cS1NmhbrsSpRRZlcXi3ygGte9enpaDu2DD97+/Ge/sULDwq5r3ynZsvj15kQno2zdhsW6qfL89OWzu+9+2KxXB8c3z149uv/eN6NP7bYtR5P5/HI6nWZl1viuLIeoch9iXlgvcbteFcVxcBGV3SwWJkOtbNcGaeu8LJMEEQdKlNJlroB0TA6jT5IGg+F2uSDUzJISBeK27fJ8ZLIyNG3A9cGois1y61pUhEICyhhVDgacWmX1g/cfPLvauPX60JYSwEDOgequxbgd5tOh5EfDk4vVl9nAENLG4/GNN89fvfKJfbt29aW27v4bRz/52bNidGj99sbQpqPq/ZNv/vRHP3nr7XeOZ5OXjx9t1kt7UD381ZeHR8fTbHTxm89blyR5RP3bnzzcXtTvPNAvn3wy6PAnf/rj4fTka3/0V+aLx3eLsV+s779x7zc/+3GN1T/8wdmNw+HxjfKP/uT9n/38V3d//6v+n2/bxfK7X3173S4YsCxzbeHVvJPxsMuHV0388smZ6/DO4Vr+4tf/1v/gr1c3hpCZ5EK3DczeZplRA9+tSEVU6s23P6hXGxZaxmc6r/SAyGVGK0DJSmJxTz774p0HH0zeeHtxem48DwYDbiAfDBan58GtrTJkrG9NnuXr9cOucbPBTJKB8fHgw3EcjNaxZiur88u62d66NSFums02p2l1NG03u5QWY7REkZRIIUg0ZCRGLeCjj2SNyaLE2jdAmDiVWda2XbN1LFQ70cTWUmZ17zrPc+29d63zLiWnklLe8WbDjfObTegd9Ug0NFoTDiqlreYQFWllSVCqrCjyEhEi++DdoDQx65fP2PkUY+KAbQxhi6ur4JntVkM/3DbGlINuUWcGM2uzUjp3hZkAKucQsAzLFBLGlryLKCgGOwg0RlaYcmh0JwYUyyqtKzTgmTGFqLALKkBuTAgCkAUvTS1tl+p1vV77gwFPBsV2s3RMLsZEqI0FTs0maDLWDlKXUGxqpA0dCKQULKoUfUgxiAipPt9JOIE2RVGgNnpYZENLhQLXpZWSmERjiLSqLzulrFXWZqzZFHm9dYZJF1kHCChN0+QZaeLoweRFXmiUlBcsGF6+vFisN5FB2SImIXDRNUKEmpTgqqkzRZm1BColEYHoeetibgwEr1BprSExCyprkAxaq7VhSD7GxG0dgzUqeZbonYTTi6ut7xKQIk1cGKdyazxQiiI+KmNSYklRMMvzQWEodD4EJOBEyo7GjfPGKBFKjEppjmxUBlqR6uv8ciKyxUQplSFPJioyCEdGiQBGKxbUpLLCrDctKmW4i0CKFe5F+EqQU+pVFpp09EnS6/2zPZG5jubZhz3zjnjsF/j72J1rnQnsWJIA75gK7H+x/34vF9qDkX3Uz/WPEK+RwI5q7EiS7I5Dr41u1+8se7qC+1igayPX6297AQv/bkt6p9auzb07TAFQH4wjO4lPD576AyFA/4C9hzC7mO+eh8l1KOdeAtOvdAB3edKyC4HeEyS6VmntwolenwjC3oInfe0z+F2Wcd09O2HSvgDc62u4S1+Fa1UT4L4zIRGR9C4E2OVYv+Zs+ybtWZTsNmn7O4EZiAD7XBGGvhYR9GlIO1goAqhoR3Zw55Hb3wi7I9HrU4B91BADya4VO72Q7OHiLjtdYFcsDQWQCRkhEQhhAgQA7vNPZH9b/s7djDudFvZJFT1QSgJaIQAgEQL//xvQXl0t7rw9Obg/ePzq5Qd/+DfXT1+x29rhcUjV8/O26brhzYObTfnidPXWg/dfrurppLxz5+3N4nSgjySacjod3jh89Mnls1O78a+++d4hrM+en3/xb/73j+69OV2dbW8cZ9pqrcaJ/UAPIkM50NvgsmGV5flieaGNNsVBbk68btEUCHLvzbc//+2fvrhcZneG48HsbXf/p799WYP/81/96g/ef4MqmF90r7o2I9e6djydbRbny+fPU+fyyezozt1Xz74EBKO0seWNfLLu/HtfeeuTpw+NtrMP31pvgjJCLCHVhAlN5M4VZd5tabOYF7YknTa+Oz6aOGYnxTAfAGmA6Dls1xcERezi/GozHI3ZY2zDFy+ffPD2bDwbnG98Phg0nfgNGylPTt7Ufp67s+p4uLxcF9V0cPhB036JqZ1fPj48KFm8zUgZMQNCXT3edIe2nA3fTIuH7MO2dg7WSWLk0LY8v9z+8gePbt7+egZhXFKWCapMWxqVg67hvBpU4wIpZoyp89vOJWQEUwwmk9tH9fIqdD55LmzZYP784qo8GI8qEnbrZbsKKWiTq4rTZjAZN+KxMKxA5xlpm1NxsbjKCj0qRpuCE6saUrvpJKHETjO3nTPZ0IcgKMao8aAASlbH4XC02nYr0VGZdTDjYri+emn1cDoutBo1m85m5AKi0uw415PJyUGNVyzrbrmdHBal1Rcvz9cbd0Vk30VTjU5ftUc3D7pEr148sRS1Gb961r197+8cnnz76+999MWXP7hYrLTN150JTAfj6dPVXFTyjXcUx7OSlb9ze+zq9sm5g/Hwxq3x/OLKVjtVUUpREGOKvZQDQbRWhEpr0rqXsSSFuldi94oPpRUSxBQtaha2xhBI2gU2A/RabSLp5zeifTbbTtYoIolFSSQkpXRMMaWECDGyj5EBIrMLgbl3hIESsUpbqwjRxWg0sGEAICAEMUYrjX2QEQBoTUYhgbJaE1IIMSHEEGNM2uhca02AwkjEDAmBOaWU+pE6cqTIAkIAGgUAQwxISgRIRJMOjCBiNSnSfWxT55zD2AbHIUTXAZJGKoqSele5gA+etBIG72MflxNjVKT7yl8A0OeFp5SUUv1MRYoEQVEfBq32fH830KXEMaV+dbcriAaApBCJOYBIX4iKSAP3M+Iupc8YAywpMXNkTkRKmFmEQPdBSTGFXu0lwjFFASCixJxiSikSKU2KAAmJJSFACFEQ+iglhJ6xUUqBSPcBciwiKUVOPQUUAaU17mZ9FEksBByBSEAYGVLvpN5t8ezUKogsjBFIIYBKKSEKko6RFangQj+LMAuhSTGgkDADoNKasH+oAwbubZQp9esBJEJQsHe1Q2RRkiKnwSjv2rhaNyRiCKsyJxKt/Bsn1YM3Dz+p+J0P7j/41vvcbuerz7qL+c17WZWOf/TkySb6SOKCUyH/3rfeulh8/vgcQGezsbp1Z+SI1o0EzO9984PLL3599uTs1p03Dt64kxh8myRFMCnLYXtVP/78uWj+2u9/JbBmQEGqWyKJh0dg8ew//b/9b0YH6et/dD+FgcGT2FZZXj5/ePZf/1//X2rhTg5sc9lM743eefDW4XvfXOd3qDxq5j5++eNP/+IfdKfPp8fZqJxtW5kejDoW7VlTYiQAabd1YM4y2/lUeACKIGwLKxxTN9+4lQTwrvbdBgglBUnil53yzWA8Xc3X2ma5HW7W2/Fsghmt5+vQJGGPmbG5PbvcBCgQmqoaEEj00mwb4dg1mwooKwcIIWx9lpdaUXAoqAeD8dv3b/x7f/KNP/vk6S8e1g2BABhNopSLITGPSjPRMuzCarHJRiJcbmpGF6pJwUN84+vvmH/wkbZSDE1h7Mq1hzdGVxfLtglVrred15mSfCAxbj1iXoyHw7RuyYFncVy7za+L43W0B9nhjY7utdt5Mci785fJHaQooLyqbgDeSTQ00CT3iusmwbGtitp1WVbq8aE1xM6Vg7Q+v6ibuQAqymyWIY3U4GBYzObnP00419VyODq4eDkfH9xQNGja7WL+LC/GmGukbDTBsDozKqnSppS2HbQuaV0p0sG3bduowALUYQFWm9yKxefzswff/RttONg2sToYr9YvyVD0KQSerzs1KxlcaAExDceZDzxftHfeOOw/Bcvt8uj48MmXz19crJuYxmW+WXdIkOV2uWmvLi+PJtVoUj59+PjqxaP/+f/qf/2jP/sHn3/yBSznt25NfvLx86zzf/xvfO8bf/N/+slPfvji9FGb0rd//6u6Gm6bpTC1i/VkMq2qsnUdsNjJQVaW6+U60+bm+1+hLF+c/shtGuri5Gj6zq2ji6tgjUjrm6YNbapGw+PjSX21rsaj2eG0XTZnmw4YHEATRKH/ow8fXL04rWKILrrt+vY7J023LLNMFN699+Fy3RRFaUqLWTWYAJJJiZQ2LgIl5hB8Cj42g+GIlGoayUqdFyWgJy3KELKOHOuuHlUDBG1U6VILyEUx8J2bL+eDie4kpcaXw/FwPIwBB9ORC4wpjmaTxeIKUtx22/l6cXJ8glHWW7+rL8OGDGSFaV9czcbD4dQmZxMhp6hzVQ6KoZ79i+//kG3Rtessn7BKhDhfPDEkkOKd2/cLox4+/OTX7hPP8db9B6ob/PRnn7zxN76N1L713jvN5nS7aU5u3fzFb358+8G742pwcPugunX/atFObx5yh+t1OLwz+86//VfP2njn69/l1avG2WzZDDaLh0+/7IqiPa/PvvjswduH68fzrh282MbJVP3mL/7CZENlw1e/8/Wf/8WvVsvUumYyKbu2NSihrhcL9c5Xv5ND+5sX/zKf5XGSTWfjBIXrhF1TVtaYPCZA9imuLQIpUpoms3ug5raALHaa7MsXZyfHN4rpcHm56NbeWvPVb/41Sd36xUtsW+5aGOdCsq4vzQDBlL4l13B2+CBTEsJVW3eu254+m7917/dUUaUYyEk1HY4OquXllW9DXuSjyahx/vLVBeBuXeB8B8CdC413NjOjwqBSEpNRVpRiUj6lnCFTGhW6EANIgCSK2IJPAgwYSHwcD2wIIYXU1G0XuA3QBR9DdIFDhCCYQBlFimyu1agwGQAEr5AiJ8XaWFPmBRIoYyCBcMGgCaPqS7VCUghZlrcu2bLMPRUAbes3y9an1G4hK1wxymCQO9cGB9aWCLBdd4oMkPGNdi6JkNG5xC74LhtqMClhkBxEJadEhQ4YgPM6WICySYE6lYFNyeRISTqj1Xg6WV4u2m37Yr1a5DAtfVlmnFpbFYrAbbs8kbWFMaYoBqnwTdOS0jYq0jp0tVZISrlIdeeUJhDFAThhVuXFoMhybXNbZuRBTJY51SRBZNVuPANzoTuIqCEq2nQbazIwRpdlidoClqaS4BCimCQslIgBVsv18mpxdn61qlsGGE1xOBxDRO9C6zy3MQRGANTk6nmRZXmuOSajCUhJiAaJNJFRthxEsDbLilwHH5SWBC5yCq51ITQOmtqF4LrQbdquSTEB5UYPQCdRWTYy4DzzJrSGoMjLjKzWVUzoYyyGgxS5dVuhgfcdJQZAl0QLC3tEAjRKa0EPjDHFxHrdtkRqUBTjYkIKASkiunalVOIoEiIiawLHKQSvFUZhFIiAETiz2iqTUsqzAimSUUq9pgw7SiHS58sIAPd1W+j1Mh8R9tW8dkgD/7U67rjP7UGG64ShHnHslv6yU3/v4NPOdwa9D2wPcnaaJpBdPs7ur/bCcNpJTnYt3mtQ9q/qiYH8a+fVl0y7Pir8ToQ1IFIvhMKdN6mX28juDX5XHcS9qU129drlWpOFvwNZ8Loh8jsh0SICqPou6IU613vK0FOfvR8N9x25T7C+PrudxqbnO3skRTuas7t6uIdPuFf67F/dgxJJ1JsNEVJvnYM9mUOEfVARvL6sAgCoAK5xXG8eoV2AKe1R3z4DqhdpYU94aCfx6VvYR0f1kiju76udkxFgJ6Tquw+RARFIdnKvnlAhJBBWyISgUHpf3U4OtncK4m4R9Ppm3qmk9l2+kxTt+OEeSvVfuynhvTuzn/zTH3Hyb79x1z07S1vOqhvl+M3jKjtdfb5+5hSoW0eTXF1Jau7duem8m2/98dtvWCy2bfvRb3594+iQhkd379wrVb48+8WXH/3gzXvZ7XfugmsPZkMBibG1Rd4s26Q0MDfCgzILrQfkcpQxjSAWrfMsMMhm3Wb+6tXZcrVdXl3O7tz+8smPP/z9e1+uLqiDq4uz2lU337x5FbqRnQyn4265Gg6hrq9smYlFF+LVYqXViL1IO70835RHMJ3e+PTzzwqjtK/n66X3QAbLsWpWa11ys1lAMH5TvHh6NRlNTc5IEUmnCCDAEJsUEp/ZHLTF8bCEpGJCxFG9hvH4ZtN0Dx9ePrKmyBGMAo/rFkMsf+/3/sjZQeqe1FdnUcdiUHIrX3788c3pRFm5+eYf+tW6yK6QtnXXdF1bVPoonwydCpfnmDhFzvQoRR2D29ZxDUmo/Ft/8u+dz/3jF1/Uy3VBGeTk2J6eLseTyubpcKjW9dKQJIOSWw+K0AilEAuUWmVpemAHpblyQplp17WorMyVa9L927N2sXn2+HRyqwxtl2o3PhxZY6GWR68uq9IMxqkyJBGVMRHs2SpYiG09b7atSpnhQumBzoedb5TOWbIUIqVp21hCEKUJ44HVhbVYDTONE6UdA1uDmOmsOjo+clcvE4Uoc6sgUYW0Th2s15vkk1A1zIZds0gTcK6+OmuLbMRJb1lJi9XNN54u3K0bt++990Z1+40//9P//Pb06LOXbuODUnAwPBgNZ8/mn25ysD651m1qV3fVz3/z9K/8tW9mOUMXbVH1n4IQQj/makUCqBVprbQ2RP3iXyXeO5ZEkBSScI8YELCfuIg4RU2KenOZABEhsLbUr8n7mgn9OADAwpwSM7BWWjglYRaJKYUQfUgM0IXoY+wHnsxoq5TRWgEopbTSvQ5TI/aaJ6UIeq8tMyCiIhE22vQKJkIEgqT7yrYILJyi0tiTL+6Jdu9sTYkQFPaFNkhICScRUUBIiKi0ssK7qveRBSQhCmnyXS+6AdI6Rk6JU0iADEhEymrTBRcSJwEfuR+INEWNymjFIDHui7kLCIBSivroaSIAUFr3018PeFB25eVZUv9DAFREiRlECASREifofbm9UIgTABCh876vhdBHFHHa1ZwT8Sml3i0YUyJmASGknatNJKbIAMIJJRJq6O+AfmMBiRSRJtOnICH0N8yu/hlijAlAuEd+RAjUQ6XdrCHQB6ojClHvhSbmCLvnC0GCyFHtFGoSOfUTDLGQosgRSaWUmJlIhxAyY2OKAIKkRdLO07yTsPW3MYqw6lVLzIg700FGkhvcBlpunEpKdBKIg9JkJR2NbYjdjcng8mx578P3qpPbDx8uzMVjla+287Piaggeh9NJ7cLLF5eTW7c2m+aDb7z18otnVYZPOrp1f5qywfNXbQKdLL544rrLtLhczGbjongzeFaldk2bQzuAQDjf0Hx9tWpe/KLBaXl8bzQ5ck00Vnf15f/pf/+/tbDIhtLVYXp0Y1gdRdao8Kd/+i+3n12VpJbSTir9nT9+/6v/xt/11e2ohxrt4otPfvj3/xPNzWyaM1mH2fSoImWYAxUDArGoRUiQfbfVVCija+8HlRFhIKvLYrveorLOJWM8ECQfQhJlMASWdrPYblBrRSxMk7IIrQNEaytgU5SjxWrNWTmsqq4Ok9Eg1h41gFCqnQhBVBzQresQvLD0+aDBS4ypW8Vu6f/6d9+DxCq8+PTVqgYKKSkURgghrusQIY1zHM8qq5zzqTS0WYVyaKLz/qoriJQL3ablSr313oOD42Ftn+EqpmbT1N4Ya1CzB8nt8VtvnkyPl48etYsoCGaimlTT5lJl7BUDHL/14G8m/3RQWJKD4HXky9HoHkRcPfx1azaYtjYbwrbzm5dYr7t2oZMvbb7ebLfzNvqgwBqaRjad20hcNPyFyb4yPPigqT8N9WrRnJaDceMvqoPbZT64utxcXcWjk3u2yidlXLx81c3nk3HZRQqe2iaBpODqQqdyUA6KsnWRdbZYdbA1+ZCSHodw9Osf/CqvaLFcJCUcvSQYj4srnxjCYGiXV1sJDEg+glJYb3ex1qurujL5Yllvto4VnJ3N8WBwPJgeDWcXy/NBVpy+OJ9Oirbuxvnwv/1H//DJF09uzaYH949eXSxAk8bg0+rxxRfr6ajpbm6fPx7r8NbJXW8y5Mib2C0vnV/FvMzzjENIgQajwmn46v2vffn4/MbJu2+9GX77y8+3Ch+/OgMlxtB0OLw8X2qTL+fNcDyYXy3rde22bQhxWOZn6zrP7dpFIPjtkyeHuopd6zNyHfvQWtuZokDErBhQ8r6NqO2tewcXFxvf+dC00+OTZDPXpulwGNzV5fLZdHpTGRrNhi4AaZNch8oohQKegxSq4qQJVdN0wiAxKUgJ6Nadt9rGU6HzglzXbTbO2ioZnRXGb7ebq0VoXUqh2YQUyqYBA8ARIMXIHEJYrbfVdHA0mabQACMDKKO321ZlWinzo5//8MNvfNeORtWIuk0da19NR8kFDnxy637rYivmzht/8PMf/auU0vmTH9+58x0RLKU9GithtQbyWl21i8PbdzAkk2R7dWEOcL3ZjqajUVWt2uVZuzbD8ff/m3/wjhz/3oP3VprOL553/3gDdjM8HGVoF2evxrPZX33//Xe++tf++T/+sztjG19eHt4bfv6rn7z/9W8f3r13Nn9558bxrTuDzz/+NEZ1YzT2bELntpv0/sn7j758VI6rN998o2M7yofRKNc5bUErm5wPzvkQq1EeOpn7erHeHmbFcFyuF5sc0K0awix4Ahx4xm0nB2VOfjEaVl6zNfnV5dz79ujmTT0cK8i1D9uLs4hbQ2lyULV+ffLVd1M+KIxWIu3GI0OVmdwUFy+fDarRGx/efPr0MxfQVtP+U1Bv23rVdM6DgsigsCMdtck1ocksoJRRWUkuNF0XXMJIpLKMsiL6zqK1aCAm7xHQ+OivFqvWhSTYeGYhFpVlGilk1kRJSqvpsJpURaZIK7XerjObF0UGLCSBUKcEsV9+GWMEFeUKoe1WZVl6H6IkMqgzPR1PUOPIhXbddI2PQWrXpi4xUkLSibZbiEmUFIm0czDItTAxs4PgfUKDoXHZIB0MTJKEAEZZiWSNtc76LjNSuAQaACPH0CUlLGgxy42+MRurg9nzxapJsmg7KoajoS7KMoHKpPYuGmOKQS4SU54XgwpQee89CI+HmVZK6SAyC75uuhAjCSMLKNS5FmG/XYOHzgeFclAaECWBVOxzxFJdOx9bVDQYlpSxS64NVNmiKsuMMipHpDgm5zofYuy6drXauq5tnRMQrUlpnVmjtc6Vyr0LPm223aZtu8aJSB06FVRhi3FejifjKtNFZnKt8yxLHNAWRTkwKvONW15duVDXXSMc100XQnI+hhh9CF1MgKi19iHUm7WRUnBVVWbrgjaaLFFWkCoRCwI/LEpLuN5uY4fOMyYqskFmbSIdgutiTJFBOgI0OilFxtqMtE8Qne9Y1iBKgbLZpvGYnDUpzy0prtcbdsLJCzIpynXWuY6stXmmCDJjxCulRJDKYRVj3K2S944d2Eczy45EXPsAemPPHhZcgxbYyYWuRSR7zLP7f5fsg7sEG0IABEL+ndfKNaeC10FD17+FvaNN9qayHiBc45i9qalX9Oyat1vE7OHKTqsiINd5mLumyp4K7WiK7ORDO1UTXWuI9uXXez7UP9XK6wZeS4Z2fXfdr9f9gn3ONOxzjZCuyRju3rAPI9pzFt4by/Ybrjtz2Q627Pkc9Cv4nRVQ9g1NsO+E3gGGO14mQMTIO83QDtrBntgIv+Y1/Q92ByQE4OugV+jL6vQ+umtquBN2yT5LaeeUgz7uFaSvtwa4Vxz1Xc7XlAp6drb/bU/T+ustiEIEgKIIFOx+cg0XiXfqKtnTw970AAxCQLILQupbs/O37G6+a8h1jYp+/INPyEM10+++ebJdsbox0cODSWGb+efh6lXWuZk6+Wc//NHs6K0IkLpVUQ2aLrmNJwO+aT64e0dil6R58fE/w/UXYM//8G/cJCXLy2YyGGZT670TJkQoqiONVfDbztUp+ul4PF9fWDvJTcnA1hCo3CpfGOeazc3D4xzEqnI0m62fL/JIbXAHJwdt8t1yUQYj61XbGaD46vmzIClXY6Ws89vYwOTg1mrtN0nuv/vWUG8TrMfDcdd1QaTPJYHkV/OGzHZ2nK/nrt24iyU0dZwcUIC21BJFb1snwFVVaZsl3xKK0ToJCsH89CwvJ1U+WGxqxjAdk4t1YavZUbWdbxEIh0dNDL5+FFcvDsb56Obh44cPS4vT0rK7Yuwm974bS335+OdXyyeqzG1m63WdEpWHA5JgS52X1XoD821XDPWEbLcNP//k5d/99vc+f/QvUIVsPBycjHxIMbHjzTuHI2tW245i8ijgmmhtVujs8aOzZ1fn48H0eABkI+S4aTutqlznVGKzbVLrKpuHbfri8dPZ8FaRy+EgHwykxa519Qwn/jLBwE9GqvEx1HWeV1lZHR/q1enHKk9kNMRUxuz5+SLXUpQamDsXlM6Tts/mF/dv3NLc+HaTEtUtCiAqu7pcQVmSzi4uFtWg6JahzBgrQZIUJIjcPDnqfLtu/cuLi4NbYx063CzWr9rM5FafXGyvvDKQj+r14rYuQurmXZH0tBz98fvfkV/+8B8Mh4NN0whAaJurrr1354RUZ6xwCCMqPKsPH9yvKITQvPXgttOD3SLZ6N4gq7UiJK0IkfrS7ywiIIo0AKGwImJERcjMSoSU7kUowEKKgGU3YIBISqgVgeqD1vqKXElSDBFAYkoMQITCJDEBSIgxpBhTYoHEIhwVgiRWgFrQEhXW9JbhxIxECKAQ8sxqrQVkXxi+1y8jAHBK0NMfQqWU1YqF++ICKWFPrZIkRFSke7FpErULhAYEkCQRAQgxRq9JK03AgXBnyyMQrXWPwWw0DNxGH5LvXAChNsZMG2tUb+wIMYbIgYUBEwCL5Fpb1Y/inPp8pj7ZB9ForUkLMO0GtoQILOIRE8fdmEY7WzKh6rdYFPTwHmDXARAlxci005yyEuqL2wMSc0oCiNiLePq8cIRekSuBEwIqpfrUZ0FkEUQlAJxSSmyUUZqEmYhIadXDoj3E78Ff4khEiLsEcGEkIE16d2mIBKFnWCGFJKJQUf+wgLGfxHtZFQgqNIZIsK9dAP0NFjmlfhcipcSM0OdoiQu+3wQjEFIadzsGIryLrAMg3s28SELXMVhffTDZbsKL08Yq3TFnVgkrv402+Dc/OLh5PEibOrOD1pdZGr1xp3z66l/SYn5QMgnHblLaYtF0Ly63xW2EQbYS/Ft/73vvP3L/lz/9VFzIDrXWdnO5zAd1aqVbrYMPzWZ+8fjTcnCIUurYOD8Pq+bi9LwOm+Fstlg+o0q6zdBtu6rMDybV57/64YtXT2/eH+oS77x9uxwd+E6a9eov/um//OKXHw/KrIyYs7199+aHf/BX/eBo40HFxaOf/vTR9/+0Ij++VRyeHHaObFkWWQ6Cw1Elyvqu1ai2602Z6Wo8SihFkQGTyjJSCFr52OWDipVadG0FOiasqiHHVJTFtm26bWMzu1pezvTNhNhxHA6nACqyBs7QmOE4T97bUueGhdeocLtel1WlLCEoU5Qmg5gaETQmQ6WiDwyYV5X4bQh+oOFrbxxs14t3JzqW5b/8/PRimVonjApQ2sQboY5sARwjisbptFRWS5MWD9fkETMyeVEMRskMfvxicXh0a7V6OimoYruqOyc6S3B4fDQcjeq2rX0iW/gI3ao2I5M4hm6jlaHpbVUduMWrQVVgTiQ2LPTl06e5MbFxrKId5jpToKBdnaXU5RWELbOSpBNR4TcqJGY3ANWVo9I1q2lhqPm0jZrFpxi588nHEFzcdknycnhQFZpXz+cvltK1XbNhobPT2loLpFABSQKEpk1AmknW27iNKwITtinmo7e/87eLw9uzw8vTZ1/Ol01RZLFzm207GGbC2G7DJkVJjCBFlcU2WoPNsuk/BUc3Js9fXiTAmzdHLnKmMbPw8OkXk8nJ2flpfnBoKUnyRc6//51v/eIXX944HrYpzOsEVr88W90d0NlHr77yV60guq3RN6bTKnNBJaWrPK/nZwLu5Maxa2Pj67ff+fDhoy90Nj6opg8/+vyN+3fxang2vzg5Pt4E/ulPf6uK4tWr+Y37b7DMUwKTm2bbFBq0BFYQOCDYYW4VcWbQEHET21LWTdKdsCA8W916gMnHtut4Nc8Gs0yVXRuXp099W7POh9Pq6uJldXTHKF1vzuvt08loPCyGm1U9PJwoo7rNUgF0Xdv5bmCzANC5usomijJhApDD2ezV2TMRbHXnuoDsHXFiVEyXVxfFrHKiwCfBqDQgak1ZkQEqUzdb0jQajM4XZ9rijZPpcrUtbNkxdC5tlnVrTVlmuTXed/fef//Gm+88evh4NlBlWapBVU1HoQvOs8ltORrXbWdN/uCrf7S6nN+8cbxY1LcK+S///v/uv/fX/87p48thiQc35L/6+//Pv/O3/l7bxHXdXpw9u/luRCjPHn0exgOxZjo9XjxZjY2ZUdOczj/+1cM//ub9YQ7f+zf/+OJ89auf/fTf/Z/9zRcPL8+v9IuP/vyPvnn49gd3nz8+65rV7WqcNfXtCdpstlotFpcYXDsZzbTKv/b+1z7+zccD4x986+6DA7NhF9zVJx99PJr8TWMPQZtt66pMRR9Z0FaGISidAcjhbALeN6tOOE2mRsBp40Yzi9oEThOA4WBw/uK58+uysNHw4PgoNCvmViC/WDnLdJhXv/xXf3bzUHeuGRyO2ear+hyaLB/k4AKIGU4ngwGjbrtt/fjpJ5evXg6mt2LY6ayFud+94MRdDImTIRoOwOY6ubrIsgTJc2y9Xy2XAioCD8pxSqk0urAZEQWUTRsWGycW143vM18zTaQUgrZZ1jWdhiAgwHh0MCmLrGm2dV1zgpiSSawBJKXoHBARKczyBFYLx9AxAOhSUkQEkJQVBo1gnm0228HYTie2XddtG1sfs0JLDAakXm2VyXJtuugTRptb0lGEgwsKsSyINGZDVQ6JwUlC17XEYCHXyeqI3jcptRmAtZkyOiQf0GQmt2KiMCkSq26ejC+uNi3aajA+Op64jQOGPFcKO+aoNZHOvQuIigG11jGlkFJQyEli2wzyzFb5pmlYSProQx+AI6fQuSQgVkNuiYAHw5mW8eMXL5ZXm3rTKKOyzDrPRZGhVl1d35zOJpmJyZGpFClG60GaGDofQgi5tVZbo8FaZRWz2wKo3ChEk2wmSHVbx+CcS4JKS55Ze3T4xqiwh7NMwMfgQUEGBiDlikARGN2mVHd+sd4GSdvgmNkqDQDAYHWWFZkkyYQ0S912TQydWG3y0phhOVBoQfrCZJBSt/WegREls7kh1EoBYURqujZ0TXQeAZ1zVWkLmxXFwOoqpc5YQfA+AEVqNpssz2NwbdvFVJKCmFgYjFFNFwOr/nnZpVRkwxxFQWKEmGIStHkeY9ojmz0a6MnD7sGzT5nZO5x2avKdPuS162r3GCqvvUXXX3vZyu+8CexSlnel1Xa+ox24wR3+uVYF4fVhAIVf6436Ff/esPWv1ZjviUyPf+BaIyP7HUnY06ydYof2jYI9men1UzvFCl63dY+x9iXKXjcPkGSXqnRt78bXv90HZRLQzk3WY6ie3uDe/9Q/p14Hbu9LrwGoa5TUH+ZaVLQ79I57yV4vs78MPZnbUa8dpOq9XNfyHkAGRTsoSIivRTb9XusuBfUa9u2bsJNG7QKnQPD65hHukyfkdSL2TpFFfcfKtdVtDyJ3V4BgV7S+7/DdvUf9KfcMjYFYqFdo9fnWu9ioXftwf4RrEVWfa7Xrk52AC/fXbv+6/YXaoaLJdNi1m5zS5x9/duftd1mlLq7Pzl52i8XF+UXdwMOHuG0Ho7zczDflOA6PBtNhXgyG3eJyNhtbfTR/9rnoen3+0YDWb907PjocXi2awWhclGVRTlUblcYkTsHAZoeE7NjlxRTtuByUhoer7VIBV9mBkrS5fCHcJFmPb44W9cWm2TRX28GwuvfW8bNX87qLj15cyrZ+5+4HWT57OV9zIVlecpmPbxyvzprB4WR2ePD4yWf3P7hp7WCx7M5evcyg1gbm9SJDpfOxJY4pnT6bg6yW7bxLaYJvrLbx3s1DxhY5aFN5nzSIgIWESDpTkrwPKmiEkIQgSQrttgbUs1np23axaaMz0g6XrnnnvfuD2RupPrdqe3i/PD+9iHV7Yzxbr88aHY33ZZl1m8tqfPv2uw/cZ5+bDNttZE7TYTXKfNdskrEJEmsbOJKH7Wp19nCtw3HynaH1O/dmLxfr23agU2Klilwf3qiQvTJdVQ5jSFFqprRYXty4M7519/i//e/+yYM//n2TSRe7bdspoykaZMiyosDhatF+/OLVML+bvEAItQvjssyxOD17Pj34YDO/OLQl+VAM81W33nSL0si8hmJQaF2Vw7LZLmIdMsg1a0SlTeFZ2Xzim8tpOQK3acJaQcyLrHFptfVE2hSDVRuHh+XXvnH4s1/8uMxGpcm8D4nbshhx45NopbO8HL79zujVKsb1pmpDd9Xceusu2LHzNCpmA2su5s+WKeWQfXF6YWaLUUUG7fjum6h9nQTTQM0lxGTIxC5wChypOLh/+eQsNSvF+eV81R4dmMmw/xRYa5FIqZ3cI7NGAHuPWA8fUop7KQgRUW9uUohKIQJpROltQwjMwhJVr4kRZBaliIWBIXGMiRNwYPbehyRaK6OkdzMzcxRhgZSYeU97CY0x1hhrFBIKw46eKMitMURaaQBAUkqpvniZQoo7mKJSDISolQJEo7TsKnqJg/1uiZAhRagUEQArRCSMKfb5RyEkpRSgkFLYF+3qcXOKIioK9bk5BJQZHaIX5q7zzgcWSkC5FRNVn82cAJioF0EZpZIIEjFiQkHsK4SJov4fu5wn2A3wKaVEgIpUYgYAVMgpISilCECIiEDhLkBPmBl2oW0sIpCYhSMzC2tSu2Fa4Q7l824zAoF2+XIsCZiTIEDqXwdCpBKz9NJjZlQ6siSfAMkoUoCaNHMSpH6UJ6I+UVqR4pgUKRZRBASESAJMiq7vrpRi7w9n4BCEdtl6RKRYdqUpYJ/W108VhJCSMDP2DYfd3ktPqHpZM/XpgikxIqES4esti35TgzkxAAvvUtUB3n2v8Bv95r3ql59czBkUJUlSjbNhTt7H5bqrlLa3xh/8yVfPnz09//IHFV5cnq8OjnRBcbtyx/m4iWm78ZvTpZ1MX5x2MBrc+urt/+DW9Gr5+PmLqxztdFK9enkxsJjndHJyFNt68fDL4uZmMjhoOaR27SBWmc2r48m9t7LJLZPfWp0H79fQzb//0aMf/vj7eli9WG/uTKrD27cTjhkyqPlH//QX1hZd3VnQ3/yTb/3t/+h/sgyYTwo5P50/++I3f/aP8213463j8f1hiHJ4PDXlkAC5cwxK6YoVWU1ZLkBRUhd8m08mKagUEqmsadOoHHaxIGNObkyWVy+1saiMEgydU2KtHaBS49EQsDRlERmjGGNLv3FZOdZZYcqY3Dq5DUlQFGMIg0FhM+VdCCmNJpMkAilJIibe6bBRVvPl6tXl5M6sPT395S+/qJfNvTu3vvM3vjH74Uf/5Me/fbmJTYeJdfBwxvLbV/W7M2OZh4XOhurzR8vbb86szd//xv3zF5c370zaoLDM/Phk9uHvf+ur7p//Z/+HjPTRJJNK+UUcHh4bVbXhopwNrlYLrYd3b31jvvmoc37TtGW7Lct69vZofSnx5TKHWB7ejpCzD9kg21ytWakEHGOLhL5pBGOMSYJLwABZt3GTo7cXq+2yXsVUH0QeVSPfOsKrbZvy0YCyqqkdAqiscFufUgQGpHlousXFPMuUyZUiUNaMqupqscwKqgZmsejK0Wh2++S3Hz1d1rzatG+9eWt8Ml15GBxPvenu/94bFxePBSlGiFGQVXACSgmopm6LjEJIkgQT+xhp58qEAHxwc3J1sWw2ddP58e07aPXlZlGW9qtvveO3F1fb7fk83rox+/S3j33r7r5z+PDlOppsXFl75+igwKun5y9++aNsfIuurqJvpndvtQwuxM5xNRlShyGEPCtcCl88/gIid65WXMxUxsFVw2qoJJXZu3dOfvWbj1QG49nw3/9f/kf/yX/6f3xyur57NE1NqAaFr5tBqa0ZXa62bYKCcAA40JQAn726GI/LSNi1QK03ma1GWYihWV5WmmB4GKNmNuxX69UFzWbWaGQBDNbSebcsh8XV8mI8ueObJoIFCdpQcEEZq23lu001yCFxZE9WOZ/m9ZoFFHNGvOZNYYyxmkRc64qh0grZSdc2YHhc6bOrKxe9QV3Ykj0639YpiBVEtW3awSg3OkPiEB1RQqVE4ZOXV++8eWt2eIhxcTzWSkFmcgHs6tZam9j5LpBSdb3xRS7VeDSsJrdunf7iJ0fvvP2/+MPff/aTx/mk3C5fPvzp43J4bEYzYyXySkKGtcwOy5dffnr2sD24e+vhxZP56fab3/zg8GB4XOq//r0Pn52evTitv7b91np5yHDrt5/BZg1vvXNfJ9e5zofIrKyFm28crOeXk6HNLDfrVbdlK3Y8mClrnj7/xNeXHCMexjfuZx8/PKtGx7/35u8//ezh7bcMkANltGgRpcgihcSxczEvKySussHps8dgUkSIWt+e2spqFz13gSh1dWu1KvNKgUhKgCq1/uLxizvvfvtwXKqulvXZnTeO6uXV2Wk8/uq3Ywpt6CSE6nBiRqx0ttluQrsdVNXicnl08+bduw9+8P1/lc12S7rOe1KQXAQRrZX3QYx1scvEkGDXSds6n2JKiUl3iQPDZrkdFPl0WGYlNL7VCp33iVijBkUhRAEojM5zK0haY6mz6JglZabItfFRsS3bdbDGVGWBwAaR2YPYGLpMScbCqAzaDq0LkUCDIlNmKqTovUqswE2KzAXHkAY5EKRqaCIHAfABqdPCtG0cZpxPLJkImISjzUFAcUrFUOcjSuLZJS250ZnSxqBtVi2xuOiNJCElJCwEKkzKw9b5kbUdpnWXNqtGKexCIOJmc85jujGbrjo+v1h0PhojqWlIAaHxzhflQCnwTVvXNaKqinyUWwhR6TQZGVHILnKKm7pBwS5C49l7Z6xqIyiWWl9Z2rgYWCk7rEIKScFsWnWuE5EAvGnWcxOH+cC3iV1mDK43NQtoKlBFH5wxVik1LgwaEAwxhAQqxMgMKCk3GKPyXQRtFCgtkCtUyJ2LYsglCDGO82yUl5oAFDB4Vaj2Kjhm7uuiKLLGIqENMSVAhWWVFcq41icO29b5bZqOjCJVmUzQKG0znfvAXVN733Li6FutTJbZqsjrZtM1XVdv2dXJOURQBNGnwMkqSeIzVCm5ENnFuAMI6DEEIhUCg2dCnVlY1TUjdVE0oiJtKTNIoyJvQ5dSMGSMUs7565SWnUXs2oqzoyLcW6H6x7l+Md6rMSTBvixJ2v8x7NUasscKe70NIgjvl+r7nJ/fxQ+wC8WRHVzaKXR6DrEvpAV74cyeVvVimmthTX/wfbzOTjLUy0xoX3KL9zxoJ4XZw4/XcUDwesVw/QwJPXJiRMRd5fhd+bO9GKd/ztxFJu/Z0E671IuMFP4OkxDZnfc+Eoh3b7ITPCH1VOWaD+2hy3XfvP7hLh2qlwPtrHUIe6mP7Lq3VwS9trkhC/Sp8oJC+LtaLrnma7t7YC/Xv4Y8emcs6xPjd8wFiXbrGOGdqqsX/OwjJmAXO7RXhzHvSc11SwFkLzzbia52vYl9OsPOcSbUC5Ve3xe4fzFcQyHcGR15R5F20HIfWoW7qwVC/7+qoien27u3D148efjq2YrVdHY3qwy/fP4cEIw1h9X48aNtkU3eePOtxBsH/tGnj7bbpq637LflTClpl/Pf3r09fPvDdw9vv8nbdlsnLZWiMkSo2w7BlrZw0WsFTXcawiLLhoqm0SvFrIjyrBRRwQWfauDEzJ7x9NUCRKmC3v7K/fVme9NQPip+++mTD75y/8Wvn33x/NUgcxGHVZY3KVgcNmtOUG7mXnXrAY+6V6d1iNuuPZpMfCQhEzZZpsg5X/uG0SeLFZXiolvWt2+982j7M3VTulgrwHXreBcvAyCNc7VREDhC0qSBNA0nhVXD81d1B7jcOqSitMVmk6Et1eAtZUbrs8dHZXT+QqbToiiWV5daweRwMpiOuVlz4NXVsvY8rqpicNCsz2JyPnpNGjC/89adX3/8l3duz6qBMYpXq7WRdONG0bySX33xkyam9avz0WSShYS8+eDBu7bK3Posy3NtMh/WKcJggL1c9/Llk8fPn9+9P2jjpg2+GphhUdW+SxFSl7Q1BvIQVkbls+md+yfHjx79qBwUm6b23E7zMng3OS7LkazqLhEayooyF4BpNUacuMjLZa1SGk7yrpar9SbjCkhMWa3W52FbFyVxZqrR6Hz+Miw3+WBAhXp5uZiND8phRT5dPHkVtzUeVT56a1GAMePguuhVEF3k1VGVt3HeYZhMh60PoM1iU1sajjU367PpYDyZzC4vL4zymM6ePLo6PCrJTkJ9SaK3W58ND27euJmZ7uzRI0iIxv7X//Q3k+Kw0KZL/O6HdyUKh9B/CjJrABUCkFJKESKgUAJm4CSCIrvEnJ5WCyAQImrSu5xrhX34UB9EjYDSgx2JIqjFCEiKfQKQhMhd8L7zgooFEgNzUlohIjOw9MomZE79BoCxVgkAEgv3AcekSBtNSgsCkCKilFgYAZQAKzKa485XhorUjsLv7HLcp56pyBEEjcoUEQIyMyoUFBYRxESSkEUhA5AygpQEErCLjgS0IoPIzBIQI3FiRhGGzGROexb0IZHSSZGw9MXABEiRyjIrLCFCX2i+90kzR0WkEIWZSPXlFlhEIQmIMApjQhFm5iQAnLhHSTtVqWBfKTMxK6UUUQ9FGBlRCCAmFmBGZBQCNEYLAJHuWVKPB2OMBKCVIpAYonMekQBYq55P7UrUaaUFKAqkGIRFa7PbD0nQ8zgi1aN7BYpBRFgRAYhw5N0s1nsYiSOHGASBBQiVSNpxK0UxJlKw9y9rUr1BTDgJECoiEEDZBRhxYkQySomwpATAqJTwbpxPzAB9OQQgUn3MU9pLlUOInPh1KVIr2VjGt4o3v/Lh0482Z+fLs8tNlokKDTllUB/cOi4PDl+dXpquO/14gV78Wr1o0ebdwbQ6ujN5eb4dKHKrhUm3jAEsdCr0jbdP3qXyXy6//+LLJ/n0HhkTkBfz2pDcP6w2L04/f3Z24+IgH5nDWyWWU5lMssnR87WuBgf1Aiqjx1n9T/7h/+OLizkbLVa0wbffvdXWkVVadJvPfvaj0ciG1k/H6u6tGx9877sXAX3X4YvPX/7lv/jFn//0qCzf+eM3q/E4lfm4zLcb7zs9m5Q+bVMdEUWrIvhGKTOazRaL0yovltsQOQxyIwzD8cGgmtbLJA4atwFUopRotJlJrZsMT5LY5XaeFcOsmgIFid67wMkNhqWkjd+e2UJnecbKtJto8kLbECUJQr//1Ky3beN1RkrndR1sIUDUbdcXz18G6V4sN09//TlPBw8+/OrbX/vGqtt85evf+OYff/U//y/+mx/8+FU0hc50RPi8kdOQJiodnScb4RDN/Q/Hw4NJev6UbXQRXl6EN2/fGU+mi/Pt5slHgxtHJnWuTSxmdHKzSXp7uVhcLLJhSYdHEYbnoWPD1TBPqbMqqm5VVnaelOY4Ox7MV0tj8xTD+atLsjlq9CE6v1SmQ4wiHGIKHadVN6iqbJiv4kYq0mKgLTlx16m+8CwCNU3YLJvYxDbRILd+7ROgAOaZAiVUkAuxmUdmMEZd1unS601M0HC7IZm38clDq5RkxfDO7Yt13aT6xo2DZz/8QXXjlsp1aRVZXC0bA1BUNjP6qm4b8TfuzBYX58TsG4+Jk4u8fyhenjejcQUAzsWm6UYHMzM6/vrsa1dPPj57+mRa5vOrZgnwann24O6RQdOt3FBpsnrTxsh4XvtSqcPbNx5+8jzLyuxg1oEwwuSgCF30gQ7vfCVslhxr8rXWYMsxUek83r53snHLalSag+zP/ttPzi/nJ3cPsRh++fjilZdv/+Gf/Oo/+/t3JoPZMEtJNut2MqgEkEjpJCjYJVGQsoJKq4i4Y4nMWisWxTEpxOC3inJlJsX0aLO8VKY4mY3Pz8/HoxuSxDfrTlbD4YnWh1qjqNI5rywMp5PL00ej4WHnY/BOI5VZWTcucYygynLYNrU21m8Wm7Wbjqt63WwWcdO0b7395naziW3jGj+dTk6vLuqN2q42N46OE3Pdrbu2Y4HtwifUthq9ePXk1s3JZrtaLVbTg8nh8cGTR4/Kdlo7vHXn7aePn5ytHh8e3oDSDI+H21Xn2oaAXNthJimOh9VUQDZd3dUXl9DdOCyuXj1qm1LsxGZ2Xc8/fvxkfHBy0UbfxdwMv/WVN2p3rsLcZuv3/uCD//t/8V+9/9b7r862f/zV74U2zdfP8yJ7dba6ef/+VasP7p9Mjya+jXY04IQJFWK2uvAKS5GwXLt8MAzz7upiMzu8UWZ5VR4mSEr8yfFh3DYXV+dXC8qVMkq7q+2r+tG2Xv3wX/32u3/w1wjN2dUT5xaT2Z0bk9uBaRVcYlGCZ6dXxXhoR/bV2bxu+XLeTXKT5eB9qwkJVFaOzGA4v5xTh8l1h7NZu76I3E1GN5bnT5fPvnCcll3GN97/9VU54Cc3bs2Sl+hqa2w1OdhcPDNaq3y2LfMNHsp2VVQVqtfLYp0ZdD71BUoRXfSEgByM0toaICIm1LBeuSaxtjohgdZNiGlbZ4YiiO/WKitd55hFaVtYXSg1tFkC7kKMLH0Aq7VaEeaVrZXNWbhLSRkLwftGIDTeG03BiyTvUyIg0JbBKlRRa0PKcJK2ZdclD4mYSVibZbNFEmafCKqiatoaQYQ5M2JHNL0JpshEoRkMksPoBCPaQqNSKQp4bjcJtVgj0HJG+WbrEQprWSutUGJwyKppNinhvAkdw6aLkRSH1LjUbrZVrsflxrdcd2m+rIUkAwidiyFkeQYCm80amH2KzOy6JK3PpzNjddP6rNCE4lL0zoWQ2i60QVxkQGzruO3SsMgIUscBcptnAyWQF9oqsYaohu2m1iQCetuFulsNh1PksHp51TadyjWjDiEYrUqdB++60KYkVpuU2Jgy9KU+tC3GUztOdtBtmsg+JddcXr06OTmWwAq0MCgiZB09ZwaC9/V641wdfUsQRbjQSivSpJXSHsBxpymzpEkZnWsQr4WUQiHtA28bR1ZZ0EoFJAYM3tcJMYFURa607iJ7BqvUeFBGTasQg3chRSKSLHXtajw4YCQXHSrKB4pFV0UpzEpZH0NfEVhp1ERWIoeAIQgzAMQU19uzUXFii6L2zgBxjEprfp22cy21eU1MdtQCVB/0uAM8LIC0K261fzBl3hu2Xr/6d81ZuBOt4LXZC/fyn54UiFzHYe/Ay17AA7KX/8CuYJhc62muCcG1KmmHknYZQLILFRLm63ft2ckeDuzENrhLaX4Nc65rr12fEu3NeLvdV9iV++rBEO40Mn17ZPfILvK6bQg9L+u7CXfK+J06qN+a35OdXa24vbRqB4TwmuX1aT68TyW6vn7wuj9wvzO8A2OAuOMrAChAKAhAiArxdZB5z/5Afoe8YJ9StQ9P2sEYSrQXEe0QDIJA2vUr7a5PD5R2pjvaS3oAkFkESb3ufRC+RlQiSL1kTAAZkFFEISAw7mxoOyPCvnwb9FWW5HdunL7nmftz6f0OfZhrf68BI+4iNYRe3w07VNRchS+Xc99Cbkf/7E9fvPuh2Dxzm66c6pBscPjw0xcnJwf5FC4u1sVwBCTL9VwxlLk6//KTGM+++nv5G2+PgEPbbbPMaMZypOwg77rah6vOu6KaViPTuQWmsQmHGocco5cWUoGVUkwIGikkALb5pDrs5gKuPZhUV5tFZrnKtW/TFMuJzQK3977y9unTtNokF9u/8u5bq+VFt3Htqimq0dHxdP3iYn61iMchKy2Frrm8Wtc+ZUZ8u00heWz81pR5XpRtHUQGR0fvnRy+kxc/zQakJIMEgbnKS0CsuzlCZsR49jbPtcIQQKKEBkyRc3CDg3GS1it591vvNM4aZY/fvHX5+FdlbEprBbrF5dWoumG0Cah8qhNAPrnZzS+qgm/d4s8++Wi52GAEF8Kbdw6sGS/m7aNfPjZ0cPoitN0Tq9VivvKO0Jj59iwFdXKjQgUpOjHr6YDZP2qkvv/Wg2YrigBCdNuuKEzg8PTz8xi4ygbT6SABKTCCeUipGBuBhKG1uX755BWwHaqBjptX82VWFUUmWFQvLi8Obkx++KsfNKbMxebaRmU5bkvSXb0aDWZIJeowuzvhVl4+u+ocMOZbFwjkqFJBd5gnJDidXxaDklnltjT2aNu68XiSDUof175ZhW17OLzz6Mn6vXduI3Vt7TbNgoAH+dgk44L3LpQkF/XV+eDg3tv3tyFOEjs3v4yLXJfes9u4cmiUij7WZQ7LxWqzddCBa/HNO7fWW/vsyavC1DGm1WZrMx0Xfr66ypiDZ1Xkbr0is+0/BXlmAJWwkFJKKQAQZkSVhBUScz89ECEkSAhCRFbrXjCTkBkgpJRi3EUFIQZhQdZKIWAbPO4Rc4ipc7HfaOGY+s+0UsQsBKIQjVKalAibTKeUBEQr1QtmuDeaKk0IKJj2QXuUBARS8IlZKxIRFFGAhGiUTikBYWRJKD0D4xhTCv3U0Yt2CHdu2SjRxd6DBWav7sG+eDxgTClxIiTPSXwLIIXJtNLGKE4RJIkko1EEjLKstDKGEmpUlBWoNKKiPiKPMHJKMUJiBAaFivpxkEkpxl2tTJEYY2ARBFGo9ml2gAKSGJSOHLXqMToDktKqjw8SYRDUpJRCENYJFSvXqwYEAIQQWcRYA4BJRHYwvq+7FkESSEq9ezgBswAwESEjSxRCYgAEpRUqEGABFTghoQimJEYrtZ//iRTHlDgiICEE9hE0IRLvNkD6/YZ+2jCkBTExExGCpBQQSVBpoj6PCftSbgD9FCIgfG1jJEFAIgJB4L7qWeQkgLSzPyIyRyBCRBFOLClFYSbClHbA9O4HR5t1Pb+sm2Zz/FZ558HJ4eH7f/GjL+PSFKVqttvZ0bsmC5cvHv/lP/7+OLJissODSFivtkqHxj8eH9KjJ5tt8tmLZ2/dOuau5e6wWXAxGLwxu3d19mI6nual//yjLxnIZlRfLHMSLsuXTxsVG9vNRrcqX66K8eT45EiNJlknvlnRYEvTDdVRl3gxbyvEq/n8xv1FTDI6uPHZb39x553p4snV/ZsHf/ff/3sun7Xs0W1+8pff//Sf/+DWQXU0VdZ6LNL2coHDoWYCJVo8ZZpEVG4G+fjT3z62NrWdt9U0M4OEJkE4GOXtfK5QBXbFcMaOXdfYapDILdeX48JkoEBI60k1GiZZX1y8ODw+ycsppGCMkugjd22z0FSl4GPCohyCgq4RTqBKk1cYQ0qtW17NTQWD0WS7Jpmv88K6Zv3o4Sen28XwYLKJ5ujkRnly+zzxYpuGMoqN++vf+RYE/vRFc3baYVWAVeetnyt6Sjrb8u8fZkVZbVfbD759N8JqLeHkww+z8fRb7339yS9/3K5b0mVgVAXZo7eP77213Wzb+XMiyPNcjFHFZL0NwRWB2QxzbH3z8jRr6PnHl2M/f/Pdg3BWdww+ePadHRcCIL5OISHpzEoKod66Yji0RdV6TkRRYLual6YgVm4TUmGaJuVVSaQ777c1r7dciKk3AQLW22Y4iojifUzMw1FBmV6u/LaOwarLQBeeZpNKVdJsOmWLLPL8vNNmdVLgfP7y69995/0P3zxfDorB7KMffuICpgRKgwPpui6KOl3U01Gsyso3LRNJSnmVpb1E/vhoxil+8zvvvnh8ltJ4PT+7M6nGN0cDOHZXpynx6GBQ1+Gzx+dvPrj7nW9/AGGLLAcH1fNni5cvV6513vk//yd/9ub7X6FMjY8m6yaJDzrPok8++LOrM+2Dc1dllg1n44urlQBKpOA7Seyb+nKz9utw0WKOg6T0O+/d/+jzL/+dv/u3/+yf/2nXriQbk+iD2bhuXW6LyuallhhjA6xIcm11Jm3jwCgEsGWGebGtnUFB4vrqnCNlJ6PBjZPl2Vy5NBrOhtMZMAdRrrHj2VEiY61gZvLMcvCJcTg4SikZKiAF7xvOKhTJdB5QLKjAzhRmMj56/vTFeDaTUpqlL5S5ulgJhoPJWEFjckMk51dXKhYpZMrGGIOkWI6qesvPnj+N/HIyHHdt16zXvY399OIU0D749vdW6/D04Zcxbj/89t3Pfv1iBLP28kzz8P7tD08vL0+Ob5qcV88vbGbPFqdlOSRIP/rH/42S+M43/yjm9WhGN2+fPHn05yXrP/7uN2sgldDbTM2On//0s+mgSJL/+W9+Xt16Vx28NSyBph+6s1c6Xk1m1V/7t79zedpYa+2AaVTdOXhn8eh5vZ0f3pi0nSMw5WxgFZxdLjBCpiifTfJhxiGR5a6pB6Nxu1mDih98++3LszPw/uRkfHq2dV3zzje+NiynOSmwxdBVKPdenb7smnXnQ55Pk/damTfvv/mzX/2zMR8gmrfvHLHT7bLND1SVo80rDOBDw5jAyKictCusqtRVZn31dJDh+uWL0pa6xHj7nT/4zv/w7Oz0t//05xm749tvLJeLqigTxbzMNBgx6cFX3tCJ1Hpz897N2u0Su4ILKXKf0Nh1QWmltQlO6shlppXOiagsFXCSkaaua6NLkkAhKhMCE2DdbllCpnWbokKyxlRZaXsHNyatdUrcdj4BlSbLRyOlKSosdYtWytxG1xv5NQqQKOToQ2gTKFFax7wqqiLbSPTeQVfXmytk8QmClhYErcqskpBiDCrP2yYMjTED8D7aShc31OS48G1USg8zKyhQqoyhaaJzED2CKoupLof5NOerhy87xthGSABlVhSVcNRgHXJKnLrQdeS8cGIH3aZro6Siytqu++LxaUZZEmhiFOCiVDoDpYW7BJAAojYaCGIXMqMzwz512xYEyW29QOy8cykGF1MEAUWgFOnASaLUTlofslyNp9VkPBuVuVVqcXmxXG67kFznUWRex62Nou35RlRKOjpKKXQtWK0zGwFIo0ez9T461BGIgBufW50VOWkjwinGXOtkulwhh7h1m6uNNQ0pTUbxsChJGe8iEPjEnffrbR04GKOUMBEgRJAUIxiFeVECGETyIbQhKkU6zwBRa+N8WK4WWdYF26pB7lIAwFsnd7ch+M75LnRN7X0AVJkth0XZUBtcWseVa7qkM1E4wGy9cahSJEawECTPNClRmiBx18XEnBshyhgwLwYeupgwxlBUZfKubXzdhmpQVeUwxpRiIKUk7vnMa0HG74AaJABC6VEREu3sRgxAyAICzK9ZhexRwDUfgR1oQQQEtVO6YC9h6d+Q9ojnms2gyC7daCf62MmYrvXksCc4fWQO7hRCcO11Q0BgEJRdQnNPp0QYr/kV7k9zD0V2B5WdbAoRruu37EBUf6J9jWigvUamV1r1KRawVxnB73jzcB+wsGv1TjCPCCKEBH1iA2KfI36tstlRuuuW9auJ/Q5sH6mNtNdY9U/B2Avz971PAD2N2h1ob8CifXroTqPUp0MTIu5J0Oscqmuxj+z+434JIdDHZ++7R/YeNxa4vrg7iVMfZbu7xr3Gh/bXtt/Y36dZ7IAgiCRB7GmW9L4JRdJHWUMv6iIRACLEXaz2riAb7t4UXuvWeo6EKH3+d48L+7XTXpn0uoTcDhVVmWXEm/duvHw0l+B/+6svjm/f//o3Pvjisy+fPfmyyMvhsDo/fTWcmZs3J6fnp8v1ZnJYHh2M5p8/H1oa3x589w/f1lZW5+AZcqN92zq3BUyZLS3YQT5CcKv1siwPZpOj1WUnlIxNEghUxsm17UXr4+HJDFPkTlbtedusWrfppB6Pj+abjU441pnk8jd+716I66uL4OJ2PLqzflX/4ue/mIzHo+OxcKzdenO6ZB/Kw0HImpSU87R8daV1vo2L6TCODwZdxyyYErlAygwWDY8z+cnz79+6XSmV8txuVy0ISPAJKHluyZnMAmpjdEqcAtYbIHN3WNw6ww1Ae/9utakcbp5DgNF4evHRRzaZpt4+nG9v3BwfHBxonTcpkcnazaLZXrHegLi4WX36g09SMgagLIcsWnw7nRhshf3oanVaHY26trs6W6y3AcAkiJOBbmuhjkNIPm59LUc3jzSm2umz8xSiCskHpuCiAn92dgnRKFFt46cx6dIrgaarQ5BuGXJL9bpNrKvp2OhsevMguvli0ybHOol3rtvCi2bbboJSrqyKhGmzacV1dXC5zq+u1kZDIu7qrbTrlDLhMBhmHaRhVTTuSmUUfWShw4PZpnYHkxvIuKrDtKgkhfXqtPabO4eDi/VmPD38yle+969+9P3ZoSYdXLe9eeP20eB21y3M2BK5UVbevPnGjz9a6YcwHOZb101ns9DycFZNB9PzzdxT3nbdYvVKbJ5XBa5929WE+OjTL4kn86vL2WGedPfG24cpxbfvuvMLxx3XW/jVby9MDLNZvht1GAFFKSIi4R6pIMeEOzmjRGFM3IuKEJVVBMIKCREiJ59iCFFYBEAr1bvUACgmBkgsgkAIRETSJx9pJRE0oVIEgFqpfhAzStPODq2ABBCUUpwAEYDQR6eVAtDYC3CElFIAGDmmkJgTpMSKqMfyRDvHFkIMkUEJEEMU4RBCTIxaCxIwC1IiEUm+8YiQBADQaKNIFcZopQQkCXDiGKNGTCl1IaLSRCDeG8NBMDI7F3wISmnLqJRJCYHRaJXbAnVGpIy1KMIikVMECYBBWCQpvdNBIqreXQfCO93qLgaORQiECShJHwKehAUUiPTiKiQURZqQQnLMolUf6a2RCJEQhDgxC0Cf8612VxZ3teL7YnBCwIDaWMO9jghEBDgSUA+uIggoRaQKm2Xa4i7xiVNix1FrrUilmPq/QURk6jWd+2gkQqS+9iWREoBdXVYEq7PdNImYuNcxQ68n6mVASeIOL/VpTMIgwLybUiRwfwRAYI47J3m/nyMgkBBJhJl3CrIknFJEgfR6wgXElA/0QVYhhnbLq7bmEO5+MMkD4bpmhk8ePd5cXoUQNXs9LENDL59v3Cy7cVCiErRsMgUD41X5Yl4/eAM2l5eoRlV12G0vj2/d/oPy8KPPzrXUX3swAzAffnBn/vBjbmBTa14I5fTyUbM5f3r/67PVr56pwzvZvXU+GLxafPabX/1oMAiz41THcHCENqf5fPH0058WVG3X5uZocPb04uQg/8o33uKhvVpcLF+uP/7+j+afPa+8soUuB9npo1M6X2mt6/PnIpyXprlia4u8GnCtG9RFEVkSJBiNpxar1WY9rCwmQaDtctXlhpUjUVlOqKTMrPgscsqtCr4l2CRJBP5wNMaQUAVlYFvPrS1tNTCCoLXKB6F1jUeVKdYFgzg2m2WdF9nl2cW/+Is/u//WmwcHq9jRjdvD07l78vmTddd8/vTFX7/31tFX7unB4NVl0saPb94apeyTv/zz+2/P/p2/9231//nZUPjFqougI0qZkwYqDD84Lu7fm6y36+3pBjdNty2LkcTLiy++/6cvv/ximPN4drTZ1tUbX33jT/6DxSe/Pvvyv5vmeHD7yJQVqeHswddezbv1Vdmun6duTs5tXlyF8+dHo/H5F+HXf/GlzkUPim3bbRZzs7FoGbhjn5CM1YiKtc3Xm64oR826wyTj2eRgqkPnko7CsFqurRmul7GLQbBrg8819sElHUsxKzbbxppMW6NId6zWzr3atjWY8TQ/vlXa5bZZNZsmDKs8hlYp8957t84ut+frF3/tTz68mF8cXBybatKucXG6GpaFptx3bRdjbJzWmdLGR8oYRcS5ZIwmA33+CQDkRheTittw6/Bws2q/+OxlXMRifLGZL/NCbVqHChF5OM604lFVbRZ+fDAkFY9vHI0OD2/dGf7sz358++3D0cm0degEiqGNLQmTIkyxQzJZljEMIsfVujU6I7CJsGnr1WKdxGP0s6PBirO//Sfv/5//4//y+P7N27cP1hebXDJmMUptW0/GBu+1UGW0BO8VbNuQQK3bToN0IaokClkzKFNkJo+NL4dmtbhE2uaHjsqjwURhOJcUgF0KqXPt9ODgar6c3LhrNJKyIbDSha833nFe2NYFgyzIrWuEExrDLmxdE1IrpFMy+WC42dQxBqUJuFO6AjQuRcrU5eLcmuLu3VvPHz9b1ZsiQnBhdjD5zUcf1b4+un2v20Qfo/PtoCpijIgpN+Hr3/mrElnWl6VRC8/bJgAoH7zQeLW+pGLWuU6UjEmhql+++BSUfuNrX//04/qP/s7f++gHPygm7ztqxrHbfvLzhz//+d/7N/4kCmNuRgg///zzfEY33jzmzl2e+YPD2ft/4/cPhjfbq/XTT79sTl/8u/+jP3z8//5cCeccfvrDn3zz977VODk6/pr3ndK4XazJ5NW03LRR6zIv8nyQ+cZlRelZbE6jg7ziY43o63U5Bd+FzFb5ePaLv/wpQBVM2i7OIHKTaHzzTpZn9daNBqNN3RDmGNgWRXTtw6cfF+NyMh3HZJAg+uC2bpOl8SAPrX/18jlxKrY12aLeXBZ51nWB8tH6apEHfOcb33z4059Egdnbd67azbZenty8ZaFpXbRZjgqVqUCZ+XI51jpzdbtqZLXuktOV7T8FbdtJFB+jIBMBIQEpQGV1ZoytytLkuTASozWVrdetax17kuRdk1vbNm1o62k1mQ3Gl81qt4EGYlEbTV5AmSx1XWGIFFEMHDoEk7ZunBFp7dzWaALRITDprOGOXQMcXYiDYoyifLsx2CrpdOdS05mUGEijmjdtrXWX4oiM6hyJH+RaoxTDKiGWWQ6qMyUntxkUGUSXNk1mysQ6RQqOo2fwQjrmhTadb5tWgQZwzm1yNQpJr0JiSApJFxZCTRwH2aDttk3bdpLQiITkPVtNMSXvtt6J0phZ1CiFtVllADH4lkxmtEJS1lgRKYuCRaaD0apxiaOPoXFtYkHAqsyJCZKQopRUEPKQElExzI8HBl3TrFcXLjWta0LMMjMYVl3dxJDmy7Uq8jJLqXHonWLonM/G+cAObGZd5OW2XS/aJKRTGE7zfDgQqzqFosUnVhqqaZ4NSAkHrzngMnZZoiEYCuy4Zh+MzZb1mpQKwafotdJERMqSNsHXzOCjy7AAoBi9Fq1IW6LIrLUhAIkhMrtuG6Hs/Jq5HI5neXFgaZzHdTGsa70+e7UllQ3Hs5PDW36zak5fEZphUSKHRgQleh+0wWFZ+BSUNuyC8zGjvEsQfCtolAIQca1LCEAYXPAhZta2wQsAKVu3PsFGaY2CnABQlDV7GrFPltmLRXZkQrDPi3md+gOAfe15BNkpbF47gfawYKfQufaLgQBgv7bfy3f2ZinZbXn2vqr9Mn8nr9kDi9/9uj7eTlMC+Lrt16irP5jsPWjXpqrdw+5ru9MO7Oy5VF9sS/b+NHXdDb+jt9progQAuP9Dkv0DaN/216HOu5Rm3K1lcJeisHdFQR+W1JedB0Lp40r7riDcu7xwf7AeYu3IHlz39HVkUY+Hfkcatc9zAOirDe8LvO0uJPVyo57o7aFdL2vqr98+PUp24qedPqjPIWGRfcDQzoKyS5uCPYoCAJSeZe0MZbIrjrdXXf2u7bHnRQQgCLu9ceqTSEWj9FKo/v1519W9sXHPE/l3Tp0QlaKEDPg6yry/SHsrwp5p7huwQ0Wjg+mvP35M05vrbZczbVp6/OLTo+Fbb77/vZen6/l6267mx8dZkfHly+eFxperi0l189Vnj7Bb1an+vW995ezliyIbS8qi921wCEFb1AY0JgFSKjMiXedQx7p5AUWpMLq4Gk1uzRc+psbqrCrGoe2SQGSNoMflsQHr3blSIxrqcVlis23bdVaYFA0dKaRx15r7Byfpsn728uLD44PgIyuvNBubar9698E3nrz40lYH5Y1hpYvw4okSWi+7xi1GoxHmeTEstsuL4Y3cSJvEZ5o5xDZ1yKHUQ0VaWj8wlVLInQyH06aVweh2YabG2oOju1fPf/TOV24kAHTL0m+7tq1UPiHdbV4ezO5sUL28ikGqOlUHsxODq7pZ371x3G7nACGBQyUO0nbbgdZMJoovsoPLy6X45vatewqXXbfRmQVjTk5GbS1Xl5t265frNBscWuSRxUwPHz5fvnPvsBzf12o2mh1u5k8Fw+RgFNrzw1tjyGVQHLqni/l6VY54Nsudi6tO0BHHXMsoBZ/nZlzqtlm3vs2ysnaejZkvPcDRw4cvYjCDAnzTlJXVkK1TENYimjvu4twzxCTDnJTRBwfH88USC49BkZPZ8Kazed1u8+JICAIHD6hynerlanHlIHlNXz4+Vyzh4uL89F9dXW42rnjj/vFwMEqiV92ZlWB0YbUBdhpV1rSgeBXSHJemPCpUtVolo1KlB9GB+LRdLsVOUvTbi+cc/cnRjcXWO90d3qjYCLSyWUYDbqrji/nKlsODOwddrpurxcGd8W6Uoz4cBlW/uGZ2IcQoLCyAKTJzAoTIrElZQ33NzATAHEOMzAkFkIiIjNF96HUfcrTLxiESgV3BKRAUsYq0tgoxAWjdp1uLVogM/S6FIGiTKcREIsIhRlQE+5xkozUIgyRJkphZogAgSUoxIRileCdEYiQCwMQspDhyjCGkFIVBhIhyY1zoSFNiDrEXTKncaq0gy/rMagQkZGCSXCuGPmtHBNhqTSAIyYfkIyeJSpFVmkx/sgCA1pgizwVIKwXEiROAAAcQJox9vS9OCUn1oluUXpoMIqCQrotLoiD3s0UK/YDGwNhHQfU6VcC9eIeURiLMjVWoejGWpKQRhUSR6kXBhMTChIiKYkoxpX6Sstb2Xr4QEgOLCGnFMfWRDYl5N6UwM0dCTLuxFQAoxgQa+5xpJBIRZuHeNwgCiCGFDBFQS7/Bw8IMHEUTRkm/o0aCEKNRmnaa5t7oTMwBBIioL9bWO/UAkbB3LDJA7LOQdipXEZFERDvJsQiixBS5d7qxiIgidS0mbusm+jQYzCZHkxf1l6PxoPWRYmKMxWGRldVqvkiF8SluY1wta2CM1ryo00UMfB6++cbNccZmkOZOA0PXoAri5ov1vBlOMuJQFYPDQVMV7RtfPwQ79l3M35tOprPz0+7gos2mx2dfbHi+3rx6jsBnz07Pv/8LR6DG8eaD4jefLmvM1j6NpmZ2qGAT6kUdZVNfEgjdeav6w9+7OxqXy1ePVJIwv3j28VOqpSr02aVXIc5GRWjrjVvNbk04xuWyS0kBw407x9ODWbuqQ/TFIJPkIbamJK29MSQG88Nqu95s69VohrPZ9PnjKwv5drtlSJTZsjioGxqXJbsGEmiUYlA4F4KPeZYRMMZgSQNjSpFQyf+Xqz8Jti3JzjOxtZa77/50t7+vb6LLyMzIDkCiBwiChWLPUpmVxILMykzFkUZlppFME5lMZpqwNNFEpUGxVCoTKU5EAih2IAGQSACZyCYyo29f/25/+t16s5YG+5wbQYWFxbtx3rlnu++9z3b3z///X4LWIogRo1xQlW+X8/X51VU8Onx2Wb64XCuGT15IAGWdQ0q++a3fYh+dlEtbOmxhsvaPnn5kZ+skHc4Xg2pV//zX7t8dn7778XRpcWGzUNtU2r/xnRtfzeDi9AUaY6JRku0os+9r9fT509I75eta9Hzm7v76L333v/5vWnNDd0310Z+kMbSds0ENBoXeGed7UD1dDXLnF0sd02gva69OL5581jrVKUOxm15eOlAWwXbBd86Fltgoo8AGH6xKJC+MbQNIEkfm9KyM4xCCuMYmkQaOVosuG+RVVbXOKs2R4rJuichbHGMaxWlAbISenKxrNK0Kr9w7eLi/uyht8F0kwSEuWBKin/+F19/+8U+BC+UW+2myfr7WRdpcQDY2FOvbt/afvjjzdTXMzXoVjDImTpfr+dWiGh9k9apsuzpLYw7UuQ0qso7b0oKNhlHS1eVksjNfl7NlRcTrrjs9ncdGgzbI8tFHT37lW9+qylbFSRKRUkpJuLyqHv7ctyaTuO2UTqLLk+nu/qQuO0GP2EZGt13l89ioKImjyne2tXFikjxeNvXkRvH82aJxzWvfevB7f/T+R+9+lKH+/O0n+zef3Ltx+2/9r//2P/sf//vjG4dC5sNPn0/2d+yqk2C7rtUqirUhhYLIwsoorXW97hRSZDJNoIIJ3TqN8+A7NzvlkAeKFRnbNuvpzMQpKeUDpvmEwUQ68iLeujRPMUvYrU2ivEBo6yQukHQITRtqHWfAHEWDrmqTRNumUgnEUYqkPZphcety+iK4Kk7yxrlcJU3bKB2MUXFkqqr92WePLqaLmzdvpXl2efJ0UAwPjvfOTqfCcHZ5+dZrbyTJKKzKw1EkFMPOXpQfjHYG6YhUnJy9+GxVrr/65i9O1+dl2c1nZ7fu3qvr8md/8cc6H+s0czb++Cc/oUI9/Lk3/uB7P646cCaql61GryPAtrp7cHR28WxW+emFS9P90fBhff78KI8nD47Oktquz2/fulVB09TPxcQ+Mrcnr33/9/9g9yB++O1vBbuqlpa0HhUaIEx2imw0qqOKRRkgY1DFkbiAEtLhEIBAvDd5AH24/+p6fXVwe3R6cvLaX/25tnLeqo6DczjeOXCu6xpvIhXHpg02LyZ+5SFotIIsSZwkh4nJa6VDuaoHeTYcTn749o8fPvhaNhkECNYaHe8/eONwevJi2aGzVV3Gsw8eHX61MLR87dde/+hHbwPR4eGdy/lVmg+CD4NsmDGcP//kzqv3pzSYPlsc7G1mREqrzlsLgRQBAhM6b0lRamJMckpyr1ArZOeC2KKI8gyW6yBKWQcMQtooMoM0DtjoSKnAsSKFqMQHRkTkrou8g9AmSNh2vAYyJjOqXpYhoA9OR8jeCnM/q2EPSkUqSZwxTgidqy6WwG2oSsUUq6RsW9FRLBFJqlEiZcCA0iYejzzPuiQwaV0Yo6Gzcx+wsZ0KksbDmMQHSHQREmptl0ZotBbnOfR1HxSpMBnF3sOsWw2KiQIlBOneIIF8RdOrWVkHK0SxMiECrW2kIM/NqrERmh1UtmoHAzKZsswYRT4wxQlo7fs1YaJFsGZBCbZdOfbO2sAc6RhRA4dYKyRCli64LE5Lx6nRJoqM5246Z4yc5dOLUoDSYpDqJFJaA6jOgrR1Wy4WcwPGW0ijRFSST4b5mEyqlZPlovKtZyBSqEyc5iZKFEZsGYJtMXASqSwiALIahVJGE6E2ojQHBOpaCRCQsHH2cjF3Xaf6JF5RQQijlFi0iYEUBAliibRIsOLR6DiNUiTlOwkgIo3rALRj8thOVDg+Tg41fvrZyXy+DD6Mx/nh8UE+2olTEzioCOt2AZFyq7VrW/AsxC4AqcigYnZKmfWyLBsX2I2GoyxTSOht8B7atmtd10+ajdLggtE6BLaBNYlwiCLNIF1nv8RP5Hp1v1l8b8Q6W/aAyCJ9ykC/Pg8ACMhfSFlkS3yus6a3n7ZdkguqrUlok3HRi2Z69oHbKl+wlTbJNmMTN0EMW6vUtUoHrn1Im4P0pjj8gghtEj17Z5QAbEiJwMZ31m9Q9wffQDPq9UW4pU6beB0RxI2j6pqI9D/0jbiOQoKNbgi+xHKYiASkX9f0c9reGNavyXrSI9TX5IFNyaCevG0+b3uZUHpBFPOmi/2uO1yDna0vS66dbptZdC+Kwq0ra+M++xKM20qFNoqvrYVuEwF9ff43CxPZ7kNv1Tz9j73NbHuJZJMNTht2eF1Tr69SDbCtDLe5TwQVIApgEGQiIWECQQLVlzamrT+uB3u0bbb0O9Agm0wPZAQghCCI6jo7HOTauMbSA0X8/zOgRSb7nb/+V6d1q2z0+dvvJdnw5s3hz97/wf/+7//95x+8/+7V+2IiF+jqfD7SeHRj7/i7tx59Pl9efXLjXvGdX/p6HierxaSrlEZZ1fPxwe7ewQ4yAxrnwniwdzk9G46Igm46Sid79Xw1KGy9PF/UzsnB4f7IVu108WL3eHK1sKPJjSQbnp09GY3jKN6fruqyCRHUyoamCyJGANIs2ovlL/7sxfFgN4+im4cH77zzwc3JcTpsF242inO0/PKznwoESIq6KYPm4eS4Yx5PkJcU2EbQqK4x9cpg8vxkPR6NvPYacbA77uxSUxQlgxBWwdWC0HawM/56mh/cvPPt+dk70l29ePT9pnxxlGUJaa7rOCJDRWW1tX7/zp35at00893RAJGUSS2qOBvmaey6i2C74F2SwYuXL2YXc6CDF6f2/q3hjaPxYu3ysc6ywWz9EtF3XV02TotChsAhHZvjceyfu+ez9c6wuHkwYkETJ+P9SVW5IkFn11kcmVRfXZ1PhtFwkJbr1cXpiQuSTnKHprOh63wQHhbj+dWU4vTW8a354pwDTHaGOstVOvj0p58++vzF4yfT/cmhiQdOuXw37rrWOZsYEcYE1U46LP1cBe5K+/xkdvvGKDS2yMJ4Z4yRRVRaQ23bVcDx8KixvKrsIDbL0sYoOgQkS84e7u+vmYOT19+8f/p4HifjZHcHgJsAsaFZPTsqhgLibOfqhtG89bUbXdNWGDqUVVOa8bBBvrK2c01EkkT68Pj2xdIh860bx0+fvfjgs2ca1Gh/t21tlsU37t46e361O5Tjm8OPP60l6CKNqnpVZNCW5fZRgIJgne8jq73nEEIPf8QHQtRaO++10gjQB0WHEECQgQFBGUUkRMpojQJKKVEQAvfmWgLu44RYAgBoTQQISLTJxkMUUESRUoSgtArCW6ULCYNS5HzoI4SUMkiqDzhCEB/8RomjtPMOEQKyUjogsQhIABF2gSEAUBBmbwOLk+CYQUKq4j4amQMzh37AM1or0lqhIUVIhCSALAEQgRRLr5U0iAiCjCDMtnMMoJVmhiCCikQghCBIiCQcFKHnIIzMwQffG+tEmBB63dMXmX8IfcAQKcUcgHr5jBiMCMkFaz0H9pqUEpQgFBGwgIbAARGQFPBmBGWSXp1EvY03gCLT7wz0IA+JZLvzQJsxUZSmEID60m3Cnj2zBCJhASQKnogQqLeAIargPSna6IsBnfeBgwIMwERakULq2T8EEUM6BPbogveajAh7ZkTqK5ghUhD2HPpqawx9RDoLsAAEEe8dCIBCYEEQAkObFCoGASQVgidSgKAUkWwGfREmJOlvhl5KK9zvgiCRVjrwpjRsOk672q7Lde3WeaY723atB208EWdEBStPlJv8Rprf2lkune+cCYEbnq+bykftS05siUGvrAvKfPJien9vkqR6XTXapLBs7+4PXnktDV2dZ1cO7eWzKTS+nWOW08GwEsXmCMNokI722vmydeXcN5ilV1Uon9o4y0sXWEzFbJqgWqjb8ta90WB38NzP4jw6bcqTq+dHB29dnV38m9/7UUImzcL+btFWXTLZV7uDuqvzdDhvq/WsK/Ldk/O11lgvymnb7mRJmiar+TxKsVyFyeGeMWlTttPZdLK/s1o7EZDKt3bFZBrvy7LxXXl8eKOuPHNUrddAEqeJ9d16vVakhFS/r1TNFyYrwOLq5amJi/GNe6tVffb0GWNbh264W3zywcedbU6vlgEgH0bdvGTvdu8efuOXv/Kv/tm7q8/ej7PoldfvHtw8sJfPp08ezxfr2obb94umXYXGZiPsbH3nKK5qnJXw2u0kb+pvTjo3Ky8+rQeTvfRgdFV5E3dfuzf01eeRUvOL5SQ/mLzyhnrjVxsZuWbx9N1/lyQhK9R6uY5H6eBwYLKkWy4BDQfdrJpJHMzIpgezwZ34pz/qfvRZGO8RL60OsDsxGSUeYutj71BxzNIE4WrVvpoOA+7dePjttu4G95z3zaMPPlNcXj45v32wYxRVdX25Lj1KaqKKw7LqTBrtjIvzxpWNp4zqpqw6F0+KLI92dsYQyjzqTK4OdwZvv3Mxc2G96tx7HymCTx8/xdq/8eAG8Ojw/ld3H3zj4sqsLkMHxarhqgsm1sEHTVi1XWzixslqVSHizs6g6po4jsBstHXHd/ZXVVeuXJwlO7cOg1J/9Kd/+fxktbeXWQaKY4W0tze+aNpZ1UyXy1t3Dqp1neXFYJDMztciISJslq6r2nQYHRwdrFfLsqzzPBKxpKPbhzc+P3mZETsHKoo8s/KOPZVdw6qdzq8SE98cH/617w5XF08xMo7ks2dn33YIyVFlxs/Wzd39odZRlBDXIBACYRRpcVyHAJ0dpybW5Fk0wXCUjXfGV2fTVKfNumq9jQvqXLWTyLrzaZYhjy8vZ8OJGowK64PW2rcrJQmaaDwuQteheFTKem+SnAAk2KZaK5LZcn7z8Csi7D0bE3sPWVGQCAsRJsRytZibiNh7QZgM99DzfDX3TWd0fnU5Pz05CRp2d3fjJC6XJVjGIPPpUpM6PD4+uTz/5q/8lScffyp1m2bR84vTe2++5Vuf5PHV1Wyyk3zzF37z/Pmnn773zv2v3W8UGJURYNOd/uZv//JP3v5p2kwnRRyNRy6F+frsajX9O3//P8egH3/20cDpg9cfZnlaW97d32/m/Na3fuW1r956fjkdoW9n1fNnK5dRnTz87NH7WnUJyc/9+i+cP1/89ON//V/+F/+rn/3gzw93bsxKmxc7EkJksvVqjghdaxEhLxITJ8yBgULrBJQxEDC0ZSVqkCTF5IbUz7rOFacXF//uez/8xitvMTSD3YHS3voQKKp9l7gg2ASWnYNxMkiUwaCFGZgBRVwn9eUCBX3oar/65i98i206na1uvnJkvH752aPEROLqfDweHA+N7Njh/lC5k/nFy06S9JAgfXZ+OdwZLxfzWzfuOj6fvnw2v7pSaeGw2L//hok3wDQdjuPM82Ltwduujg1EpOPIxIY0Bu+7wGBD0ALCwSMHBBXFLKA0us567tI8NYnp2g4dxlqDs8zgEJquNSbRhJrQCdSNBcDGrSJtMDbehRA4Ilyv1kkSaSRiLij3URpUaH0InQ+1BWfFOwbryiZF4yQwABNP0iGBuMBtYz0BRmh1a4xmcgiCoD0jQ6QoliAKTVNFDTtPUaSEQ6mMV6xEJATuWnKAbcWxT3cmo9mqOTzc9yJYNlx3J9OlFzLEznYgHgVyY3SiBdBoiNMon+wgaBOCTasArvJCJpaglIoArHUBECx7QlKA1rmWrXYqUTqAaBWhUaRTtsvGVey1Bmo6bqG1jrPIxCoJXhaNLcum7awg7e/uJVk6SJNgKdovBsLVomzL9cJepomGLAYwaaHygodD6kJNhia7hkJSdpimyiSiIu9cCxaBsiSwBIsSC4QQgiOJYgQRA2RYiw8dBwFsrW261nnXtNaQimPj2QsI6oQVCVIbAopQaGMddbYhNLHWOom0UYVOutoH8Z5dU3ccmIpx11az2aVS4NZX09nVYrkaT0YHe4PMaOdaidRgf7cDWS+4bUIyIAw+T2Ih5XwHoGvbaZA4Buuc4xAbbbQm0sxgrRWgfjfRkArs0MRotI5Ia/QCjbXsXDBqgzquGcEG9QB9CRv0kAW3ZifCL6QnAoC8kQP1b+s5zFaysjUC0bY4ek9beuizzRRi2cpBUKDPq/myzGQ7WRZA2uqWNn98qU7ZVr0jG6nTpnVf9kZtpr8bQNWndG8sS7T1tvG2ZtqWb4kAbMDENnQbYGuckj6gCa8DhngL12QLXAA3deL6BAwUpO0O6ebs9ieFYPOOPieo5x2CIMi0kfP0Jg+EvtjO5vg95UHBDfz6QoWFm1NDdA14+ldkY0Dru7GVZW36hQjCW6gnGyXWJqQbr22DG3q1gW0IKMDcF0/blJTpr7MI0MawB8iwtZch9taFnjb1XeubghudmSAEQQESQiEUItAKNPXyq36+v7me3JvvvpBCQR/BRIJ9Rze0sxerbeRPW8VTr5H64htwjYreeHgLnbz6tVfoa2/+hyT+s+/9ZMegQveP/h//x9/+zd/42s8/+Kf/9A8GRZbGxd7t4ydPXz5+/73dkfq1X3v1zV++dfXs5XrWJVE6nOw3JUeUkQi4oVHofcNc2bY53Dt2fJkNb3RhSFiMB8ni/MdxpLWOZ2u/qmauBhWGdtaqrgY+Z16OjO/qclkvI2N2I5XooI0nk0RmrKSurMtTOhqlGaimabLh5Ne/+5/+m3//e79w76vjSXF1fjaYjD786Wfj3WPK1WQwtB10TfPhk5cH+/kr9+4MinB28iRO3SgbTGdloQaaI00UEAMbTbpum8uyzlUyTAqb4Ve/+/OhGlfSPv38D6mZqYCK1WA4qLtuffnZq0cHdd0Nbz48uvXLz97+UeqbbAxGdXGUzZta100y0oAEwUamaHXhuXp+emLNyGeTNL09gFXtZhfz2TAdXE2v0jRdrUoInQves9sZj2JNHa9nV2WUxrsD+ehZ2bQhL/YSxuNEnV8u0ywr/UK8QgkmMglRXcqLFwulTRzFkxjGu6qIRZNU9TJKo8n+UFMAn6Q49mmbmEg8aCqQce8gRz9cX2Bbk0ri+bpLcyROlMY4iooIjLKr+jlBpUCzyM07uXfCIekCTiJyHHSUNF0NRiX5OI2SBlUWVZFOD0d4eflYJXYwTAgSH3wWQz4ePX36vMiy3TSBjJbLlhVTnCaKhBJluIiI9Ojpk8ujvclVWRoXBoP45eeP7GiRDVLKB4d3jlzb+a4JpJUhCtXJi3NxiRFM8iIbTcrV6fqy025W183d27tCkO2ne3n29ftx/MwGBX/77/3GP/4nP4aeHQQGBOucs67HwkGCYhQGoo35rP9SWWclBE3IQqSQSAMRGdVDn21xgb6oVz9OgAIgpUWEMIiQVhugzRw8sw9BASndm2VZKVREpFQfPROEiYlFQgDRQaB/W8A+hxshiHfe+wDYC8UBkAgFWXoJSb/XISABFYJwjyUibTQqAkIi6x0H6S1yGpRGBSw+CJreSYdKCwtw8NY55wMAK1LWs3N+Y7XjAAAMfbQTCAtpBUAC4oK3/enaWMpYiAMHz462g1gQJkCjNBEi9KnVG4UybULnwHPwwfc6qX7YVUr3Pq++CCgIiZCIeB8Cg0DQihCV97Yf6frLysKIRIR9eKF3fR1fYpE+GaoPeAp9rLhHRtakhcV63kqJWVic86Ck34Lo9boMQoihVyFjIAJt+ikACVGwDgACi+f+TrPe+35cJhCtFLsAgDYEAFREaFQQIUQg9H0AdWDoy2qiUkqJQOCgtQGQjV6XUCli6d3RILKhQ4gowogYvEPqZUaC1Keqh2sD2o3bh6t1tZy3AVzwVjTnkdaJQeFysWqunFJxFwRNBPtRPoqAKVZQpNnqqplO7fKqnc1kEKlJjoM0+fT52fHuxKFPdIhwdXRcvPzZB8FdHd/1i4/Oo8ODyX720391Ghr15uu74wFdTU8t06rGxhyKEBjYuxmfdS4xyfPLtgRIJlikogcKDEvSUhqfVHUzX8FIX64aa8x3vvrmH/7+zz59+6UGnaUoAcrGTcZjTCclJTOIhnFSTHZdXLVgB2Oo5henVfXuRyfHebw/TvNBlEdushfXtRfuIM0pP5qX4nGgcrOoQ2L0YJQvl+eWdL5zOF2JDFRkorpuh+NM0DMoRLValelgqNNUqShYw0Rd181tKJdXULZPHj2+ePkyBFeMiqhIlpdz50LtgVjWa5/lxZtffetk0V68SO7eerAsl5KZ+WJ+8vJRdXV282BysLMXD3cne6Pl/GL3TvL47PTKdZFTfhUmNrx+FO+NdI5lk9hoNOnYjncmR3tv/f7v/1FqBorbqgu3Xt2Zz8LZrP768UN21rgVVKeL2Xx+1WqVShuWa6uzwWGSLB69n9rF4atH1NWzs7PnP3t3fzwgwwtQT85ag7Q/NPM65BwUCImR1pPmoFhHUIzzmqlaX569/8cBzO03XhUyD3/557X38OnH686ePnmOTpRiSxQFm8caBvnKSrdu0jiukJl9upvfuxmvGxkMzMXJZVfOsyLKRunHnz8y6aQY6Mt5BUK7mYnieLQ/ljRLisHOrUl8kEUQHe6PZk9+BlpRltQ2AFMaJetFPU4jSRKHjdhQDBUFXdddL6oHgGefPLlzY7+15bsfXUTBB2DneTQwAjJdNSbSDx/cCE1QMdUd/Yt/95f/2//6P/eL5apuV+sGbTyvy4lVu5Po4HgyX/MoKhp/OUjTYZHX1cp3dL5Y63joQpvECoDyPEIQZh4Nxo9fPEtHt28dDF8+P4uz4a23HvgsfPbP/uyDdz46/gdwvLN/fPTak2cf/vov/tKstqv1EowbDZPT6nJR14Dae0bE1gVUEpCaDsbDoQ8hGQ2ow2yw41d1lseKLbRlMTzqypkQ3nn4yvnlZSZqvp4PEsqywXreJCNO00HH3PngOocoepCJ1sDBOZeOdibRcF2utQmGtLXrNMkHw8HV5QkDA9GDr3/zxckzkmgnL54/PY9iFVwQsemQRLmW7aKpXvvKt8+m07KpQ9mlUW6t9UBGR08fn4zyg65aD8ZFcTQ5OzkNXWMvLozKsqzAwSgEPr+6iPK0LOv5VWddqyi+uGivXs5eZH854OXL9x8NiwMahuz44O0f/uhqarHBqa33bu3P1+7j508pMj/66U9/7Ve/1Tqe7B8UeTYup19//cGP//j7xdHgvc8/X/7Fv3/x+dkwznA4evnCzUu4WC9DZH/2wU/uvfENSpTKsq6s17Y0WRzFie9C1zplTBTF3jlQSqkYHELo8lEY7OjpZXt1dX764kNlzOnpaXZwcHI6/ezD3/t7f/2vzi6b0e4hKGpXVZFl6TDp4+1a5xExipPad0Bqd2+4nF3pOEsitZzNXnnj9U8+fBF7u7q4TAc7nRMb4OjO7XL2BDh78Xxetc3Nr92e+vHTj54WhWqWjYrUuq5B2/E4H6V5Pb/gehbnRqY0PV/kRxNQ0fzyov8WfOX1B4tFc+82XkyvpovZet3kxSCK8zRPUqWILQQGwMCBxXWdd97GJkURsU6Eq7oaFhGIt23XtQxGKfLsXGR0rEzjggcE13bO+hBMYgI4BmrKNgTUpANJniRxTEaZrraVb5xInMW5MRHwcrVuV0tnPUZakWYABkegjCbXzQYYQRNCa6IRjHZ3auWTtKBMt64X6pphMY7FKM61V/OqsdSVotKIUwOJtpmKvONla7sWLGPTehDV1G2aJBGEiF2c4OW8Wczd3IW8iMbjtMgZWMAzkkPkOE5FiNjGkfYc4mGyaqiqO/DBl1UWJ7EmQY2KIkWKASR4xEhFiBoYDBKBttaKbzVxouOAijAWEN/4VIEEnM4aZ7mtWwA1ygbZOB7vDSWwMoJkWlIaozgBAxADOA7sQEVmfz+TqGa/RrERmXgUCg2l1caQVzbwOtigsHDcBOkSgwG11iaNExQkbjViaBZxNKJYEanS2i5IY7vWCQMSgZWQRTrSiaMEUANSjNy2JTGBEGAkqIhUjJpQBeYAFFA5gMBMAtVqDd63Sbeql2QbH7q+PEwQQAptWwUWCna4O2wSrNuOa5uNMtc0sTFBpAnO+QBRZDsrQo70aDSJ8qFzTdfYECBQ6Lgj0BhAiahYTBprhaQI2Rsky9Q6652LTfQFKNpIfbYUYWuQQtzUI+4hyUa5I9BnA18nCW2dVxvxhmw5wBdIgrbqFNl4s2SbJyPcA4kNpZKtpOhaHwMCvW8Jt3qVbeYRAGwjk0E2LdiqRDa6lh5n9QSod7j1heC3SEs2tU+unW2ASL0WBrbqky3VkWuysPXkbaKDtk3uc4Rk434DwU3AE2K/ntkIrDYdoWuCtYFphBtGJdKX/d1WPMMeoclWLgWAhCibel60NW4h4TUT6X2DQEhfaIeuzx9sYq43zA02S6ftKYKNyGorvNnQMUDaYBfcRDAJ4iZPCED1C5brC75BZrJJIN+8tj2FWwi24Vj92eptbCBMqFAIUYEoFLUpngYk8EXEKwAC4SYwa3Oz9SuBrUIOpD+jgMhwHVRFG10TbDHZ9qJuUNHBJK8varNuKm9/97/633z3F3/pvb/80fPHL2C+/PDDH9UVfuXukQvy6bPp4+cLdovdw/S/+vu/tTOqq84OJgeR0a6eFUVYV3MyJktH1hnram2IMa5tZ+JYQoeoU2275aXtwmA0nM8/Wten6fjn15fTnZ2H4/Heony+e1B0lZ1fLa23wpaMJIVybdPUQgT5YHc175Qtmb12NI5hfbVM073HnzwtV+XNo53Tp4/0aVCKP736+MbhAcWNKYb1atm0+vbtVw52b7337jvv/fST3V2TF6jU5E//8kNG843Xb5bNZWq6mS1LqyZxosSMs/ze8WsfffDT3Iyr+Wk3f6rTOBKOhtBV7vN3n44Odgfjyb27P6f81cFu0dn20UfvxJRcvnj58JVRrWOlTTEoFBIBKwW+4XxYFDAMa2+awWB8FKXZ8qJ+9ebOajqPIm1yzaiuqnWxN24XZJnv33twdfZSo4sM7U5ySvRkDzp0Jy/XJ+fq8Oa+GYwCt3EaZakYZcRrQBnuDLjD1dwVxQhh0S4X1EWd53iU7e8PlCTz9TzPTOjCdH4BWsajUai71XrqbUd2Da7ay6H1NG8XN3YwU63VEBV5nOuqWhVKokju3Rh/9MmMWRMGg+LE1V24uGgS5c1o4K0LnSsi364WKhvsjbP1YsEcgi+7jpIoiaLo4vwyQo2BRVKWYlX70NadrUwSoFpl2ozybL06KXbzeoUvTxav3//abL6CAJcXSw5R1S1ns5MsH8wun3LQe7uTwWSnnK8pwYOjo3fefdK2eFmVNo2L4Wh9MVsvpEiKZx+vf/zJRWrSo10zn13sjKIQ4Ad//rP+W0BELBKYmblXLTKARhUphYA+eBe898F6C4geAbQRJlIYkQEAAlRIIGK0FhHpLUcgWikBCuxYhMUDAIEKmyLnIUAAYecDIQTU1kPPtY0xilBAWBiBvPcs3CcgIaD1TikFhCKsFQUU64P1ToAUUKwNAAgLS+BrGSspH5wmpZRhICMoIoa0IuO988KttcEHUloUWh+QMDa61w0FZthk82+fhb1WCMl621fMAMQ0S4n6IpPiHaMSbTQh+dAnOPVjQcAN5sYgfZh1L4/cRAP21IdI9UMLISqlwyZ4D0ghuA2GAwACinTUZwIF6KOyPYc+Z08A2HvxIRBij/h65zESYV8XQLAPlN6UHBABhMgYAuyLLvSPzETFITAgCrPywXnVuQ4BSaHW/cFBa419IBEAIgXvBMAHVoieGcH3AwUiBWbH7IJnCT6wCx6RDJFBBELnGQAZBJGcdSQatBIgoE1TCQGECElr3bMthaafrRilFCpB6UdAZvbM0l9CQBYhVCKMQAQqAPfxU/1f4XaV/PzjFZkoSfdHx5PZ9DzPotnlrK7XcYQ6SoRio2O2PhomrEAQbO290KqsLIS0YEIdWi0YPEljJc9Hz1/OXdUNU3Jn1UsafvCoikN1/DAhhrBcLZbV6hIvLvT8bP7Xf+f+4REtl6ddF7/z4fkgTQUwP1J1Va26rtCxVtTYUFeMHg9uh699ezAv48s1Yiyu8+0lf/i0/cm733MnTCSplhh47yi/df9wPNwbHtzJjm62T18Oc1ovlo5irWIvbNIdHaUAdHJ+sV619moGj58/eLDeKY7T0Z26TMd3Hhzs5h99/GHbyfHxgRH68bs/TXJv6yZrJNjURfnhaO/88YcyX2RF5Cx6gHpVzsvHk+PD1aLUWWwi1Tbt6dllE6BFFnEWazLkXN1drlKlXOD9O/vry3nbIencuyKTNNfR5KsPnp49jnfS048fdy2Mbt4Q1Kgjzb6enjTl7NmTyxdna9WFROROQgejCJf1mpiGenxYrMqGMHB90a5Xh4Nkfzf+8N1pOhyhE+VF6qV69n1Zjd7//h8vPv48ycx6Ue0dxxQRDwZUDM4/fPf8w6dVffXgW69ePHu588rDKGrOTy93duGi86OhiclPRkECBrGIiKGPjLRaS5rgZDJ0XUhGiVjPtnzyyduO1dHtu3uHN++88er06uToQf7o0xeIeDlvQhwvLVsPLcA40cqQQGQGSTEquLb1fE5dV+zF2W56NV9QZCaj4awO1ao2SsdJJKCP9oqDwSBDlsWLT/9kxt9cY/pA6/XJhz9OOFBkwKKKMB9me15Wbbtq2/F+hKk+uVhlaeob3/lNuHse43w2++TjR2uM7x5P2rIj5Ns3xo2TFUvTuPc+eP67f+N3fvDJR1rFLy+ns1m5e7h7/vJsZ2fv5cX09W9/Zf7ysUIjyErjdPqiqi91NNQ4HGRx2dS2W3srg2JUtYsiKxAxuNBWrdGJXXXLevnq/f0ojtHQ9GxxtHv427/9zd//gx/8w//z/2l/7+hPv//hIDP/n9/719/89rdNPvLptF1M4zzxdZnqfpaJcURNY6NYmRxjo5t1GY8mXceKkiwfA4oNuLic5mrga0cIXV3ujEdIMByN0DNgZxLxXembzLVNbFDladM1wVM/qz+8cWc6u9JRbGJUKmEfsmwUgmtDbdLYeVs1jQ9+NBzU1Wq+rknrtmyauqnaFWOwrVvVTTIYXc4uhX1qFBcZMEa56bwXDuvqcnd8Z/byzFtvJnmk9MMHrwdr5tN1YQZxnHlXC4dipxABty5NEtFgVCTjrDA/+8kfBO+aldq9dRgu1l3olovZaH/n+GDv5dvv7e3F+UEy3CmerZe3j46Xs1XTuO/9+3+zS79aVtXbf/kiy9IQrVQ0ff78xd2vvX53dPNH7//kYDyZDBw1yeXVi7/xd/+e89YgxZEaHd25OD81hrz3nj0zs3UBGts6k8WGcL2cV6tzU6C33eJytVyutWs1ya/80jc+eP/daD997+RpKGs1HDW1JSWCFKyfXp3HUZ4WRXDBO8cBtDHW4XpRh9YlsV539cXlSWcb23KSFrs3dkyaa2PqtSPiYu/oorxswOvjNzu9AwjFJIm0DKKkLlf5IKYosm1ddlLspIdv3l9enh+C65x2wWo9ipO8/xbMp5fBBTRRrGmQpEYngQWViMLGd9AFQAaRql73MUaE4F0QEQIGQa0TAlQSiJl9AGUANQs7653iSkQ8YmeD60ysrG1JRf2SWKm+vCqRTmrXkjfO2rKzOtIpSGHALtfGWRHIs8hKQMIgIEDMgb2LYm205oCu8oiCZTuYRCiIpERxhGhIs3AUmZR2ltOz1q4D+qLIRkXiXOtcKF1nKFOUkHExIAZ0lQX2wIrr5XiUnb1cTeflxdwumDqNGtIsoog4WPY+ACNY4WAhsNE6juHyahGACEPjOkKsbeO9DiIqoixJFKJzHkViijQmVbsO7EU6TYqUjEeDLE7i0ai0XJWO183sYrauurLlqvFFke2M8/3dscmE2UmQpnZZFCeA7K0hwiQhDty2LGxSZOrEdBLb2HDHDWXGDJSt2PrGcjDaaDASKFJGJFapXkOpktwAUQihLoMLA507Z43JGcVj6GzDPhjQAUkrrQB6pYNG5ZmDd4Y9eB8ZU7YtEjJIABeZzBhFQBLFnWVNGo3y1kaRYWQJVXC1q23XteMiUQjVojKY+IDz2ZUP3e7+UcpuHGsbjO8U5elyWZnYWGdFACAysYnjBLVOI6MgECFGKqB4BIoSAmLn0jiP04hMHyXZZxJSHyfBLFXVwvU/vSRlaxbbLPi3i+7elSUbtYkIcF/iCvlLxjURhL4yzWYVjoQbbcrGo7QlPLjR6nzx7/Vr/EV8Dfbz8G3+kQBsgqB7GZBsWrm1v/XvwesfcVu+fhscBL3GB7a5QtLX3hKQbbOgn+eD4Cb+pq8Gj9JXIAYUDtu29e/5Uh22HjNtlUVAG0PbBg7RdYE0RNp4snoY9x+56DZ6LdjKjHhzqjfiqs1htuL+TUtkC4Jg46/akCigfhd742PoPR5b3rb5wM2J38iw+kiMvrP9WdoccEt/NpcCv3iZ+uWcbCnMtdqLN4forQnb1m+a/CW81r/YVzcDEAYFChmVkDAqIezvG1CICPwlYNfP52lzIyF+GatBbxJlZiDBPn+cUCRc96Jf0W1PwpdQUV1bEyWugbLqpldPbh7tvPpf/p1I13/yL/7wZLoCZZbLVePFem8Qf/XnX/3G1/YjmXVBW6bY5ATkfZhdfOyBdsb3xDdFNKibijSleSp+6EPnu6VJQgg20lQ75VISwlV1dngUAQ+C6y7t88g4QqjahSRpFk9sux5ksiqXobW+5SgxU/tcQaEwBOBiXDz82ugP/uCHt2j3xt1DH2B/sjO7XEIZddaOIAodr+x6Nz1YTK/2hkcnL95P8/HOUUY6d64tu5LWzrfRaGdsQ2WMH+fZbN0Fl7QCMSJS+PTlp7vHYxC/nl8Mk1HgioRns2p61WSDoWuSTuyZPXn9VuGkMxq/c//BD7/3H44PCx/K1q7Jt1Fi0NdKHDIgse0cKt21XWbS5bOng3xvMknr+XRxuqS5LxPIi0FTrzSSqypXLa84EESz5RpAkYq8U6niVw7SwzQ6vRQTwmI+u3087laLIt5N8qitvQ3c1s1k94a6aE8up223Otg3Qk5T1FnJh1G5qiO9o9Hl46issXRsSbxvmGccgoKgI//1t+79i3/7vlOiU8Ou2j8aFKNQV4thRsqi0sPzRWX1YNE0cST17GUx2Nm/ectw59t1A3WSJ1pljGS0uODnq3VMbJtmko+ViefLZV01ylPX2my/2BsMXEhQLLH32CVR5KvWUrMm7YLUNT15evnw6K5tFsWYksKEuZOqGWewBtG6FkflUl5erkUeFXk+B5skVMR8eTbV2ejRJ5VvbAbKNjCajMqybVz82it7x7czH+pgZTK8+dHnZ/23QGlFghh8b+ZCQhSIlBLui5ErZs+KNDMjBgCPqBQCUQAkxCAMwqp/JpKAAPUMGIRBiEiYqTcwQa8BhMDsw6Y+ujKGAwcE1JqoT24WF3qXkPI+EEGkjZegEIAUEhIpAAwC1rsQBFARwvVTt3/oMAcRBA4Cool6/aMCjBEBMHh2oXMiZdPawMKsFGjFSislSIENQf+IYwTuI/YQiZTSqvU2EjJRigh9PJNWxAG0iRAxxEIESpEEDr3XSzg4qwAEpXef9QY5BYhKSV/HHagP99dK9yJdAUKWEBgRWBgQhEiEfQiGVD/ObYYl5g2NwQ0c4+tSdCxKKRFGFqWUkj65nHtjORExByDVP66NNj1qCcT9sRAIkXsO3+dECbBC1Aa11sELbmoZgEIlIiyitO63TZTW/RjIwCF4EQ7snQ+d8yGEEJg2cXcCCN75HrQRoYgoon4/oL9FQvCaVD+piIxWpIg2YzYzw/ZDEFBC2ApcETaUTbjnTAIoECRwfzEBiAgFriugPXr/cu/wMKamPg/xXholxd5o0ERrhNbrThCjGFPmumxGgyLi8vnFxeTObYVqXi2zWO3vjopC1WVTX87PZu3hKHKZxkEcgd07GP9f/vu39/PhL98fDrK2RGtMMsrMziGvaz5t8ff/4vT1u+6NhxPe4ei0e3beWaMmOU0O99fzmeu8eMoGidPYepauGU9IDTI7HpzPl6E5vX0PLy58TblKmuOd+M5O4lddOhmPH9wbjvZYjc4aHo+PfTddN/Vk7wBJFxEuz22ssr2jYTLO0sLUi/L5kxclq/X51f0br7319V/40//wlx/+9EU5myeDWKZX+4d3besHO8l8cfny5cXOzt2TT97NX+pxZFrb7cb7xWjYurC0hdPJp9MSIdQXl7OX08Eos65dlw0SOe+Ws3K8N9A6TGer0SA1UTIxGZlVva4GXp+fPf9P/sbfPHn/xx/+6N15fR4I0cFoPDi8dxsDHt5Inr/z6cXJ5fOTq+m80YNkqHTmuzs70Z1xRA1WDUzntmo773i8A4sXz2/de/jj7smfv/3RXpYu54tqvSCIbjxI1p9+72KxVKsqwZAkBRRFqJr8xtjcvNXa5rgw754911Hy4Y+fNgrPHp3+tb/y5nvvvxMlg51BdBnBN+/sradzSILJsbMhM6Zet6FmCZRO8lXX5nGeZ2ndziz6ZJRFTpXzU+IOFPlQT4rozp0RaHr9a3ebiqtV6zxIlHjfgvhb+6OD23vlGu3VktlL6E4vZoe7iVZpUzrfibNumGdl7VzpqsDpCE8eP9dZ8jd/5zecHb84VfvHYNef7+4WT84uVnVIDI3jCECQOCA2Xegqr8gXaZSmurRWq82M6PHzS6668Wi4CuHJ5QwtS/DrGQwHxU5enNqlzuk3fvPn/uUP/uT985WX8P3v/fi3f+u7cYzaUIDG2y5J8jwff/zhx+n+7eO9CRFXq/WqWlhuNcfMECM26+VwlNvGetvFWawSUgxppO7ceODb1WicqCjxbWfi+NXXvnJn/+Ozs/n5yqokXnv+/MV5Wf75d7/xjb3h6PPTUwm8vzdarlod6aqxACpPEw5BtNx85bYLnCqVDjMlWb2sy8V5lg0l9si+GA5ba53r0igCYAhOGeV8A0G0FtdNiYzvvCAS6jwaVeVL365LX8UJaelc1wYwKKiTwjvhTjAY8S5B7ebPPIfM4Kr2SoPOs5OzqxBaJ56AYh251oXlSgh3btw7vXoeZZmzXK2q8U5xePvug1e+Uq+bRJvVlfNKZ4pNHAZjGprU1pcorUmwXi8BiWLJisijqro2yseTwSu2u9zbmxy9cvzik0e0NquLs65tf/Tnf7534/anP/nZwdGeMaIUnV08fuMrv/rpR89F6qqcacV33rzx7/+/f3q5OEkH0YOHt5+fnWTr6d/9W7/4Z3/+vVGerC7arlbDndFqvlY+9o1rpYmLHYCgXAWEcRaJgO+sAurK1Xp67m29Xi6PhvfImNFQ37tzf3FR/ugvfpzAR6tnLyYHO9/95qsX09nr91+3BmKl2/m8a5t//af/5uuvvHW0dyvJi+FOsp6uDoeHTbs2lBpjFlfrOE1vHL5areq7d2+U7YVJVF2vB3FS5EOCalmXq6q+/cprcuObixdzrVw+SU8fPzs4OtzZ250vVgf7u5enl8nNg+GdW7UkVnD3MHv06HE6SpD03vFO/y14+vI0NmnXOkNIpIJ1cRIpYA6Wgnde2q4WliBEIiF4EJ9lpsji1krwjTADa9t1oWMNqElFJrG2BmHvXIeoUSMgg24t+xA0cZaRVqAVRnHkva/bxgXWiIaM0cDeVcuquXLKBUOEJjKEaDtgjpKREFjXBRFSqgveMwNLs5LyvLtztOONbQKYOMqKyEAQJAP+cn66cktPdpQkxmACHigBrZ1zHJDFsw9KFHbsq04DLZfLGGF2Vl7N6q4TAjXIdK4Bg01NnEW63CSCBe8DIoSgF2Wj0CIG2wTfZzcSSeCAQSOh78gJExJwF5wBcM4rIR+w5ZAXajQekudIk4EukjDZHXVRdPLsbDYvWyeI2rdBgDxy17aJQ61irUiBBuedba31xAwoaUJpzOmQOrWahzJCkxsMwQIiS+hisuJaFvZxLKgjJaiAgJWMi+EQjXSNVrIqHThh9EG8DY0y5G2Xa00U6s6GXrVsIh0pZktAhNwXO1HiWxuExHbW98g1dwABAABJREFUxEkSxYRKASoiIV11TKINCaZESklgkqC1psjESSQKPIsAdLVdrOblesrBSr2OlM4YlTLzTkBBnCSAFEdYd8F6GaRRlsRGkUIRZ6MoUwCln6OESJFSihIko6MsDQE0gCalCIIXMJpAvPN2OyO6VrvAVmmx8WJdS4w2pOdLCUTbCXgv7OhF3dLLVL7099LLvHs9+Ib4yKbo+jXYIexhgvSWLuyTh/v9VsHtNG+DJ3oWsiUs8AXCkJ6LbAwOPaShDYbqI5DwGpn0r3y5g9vfESSmTUEy2NCba/FRHyl6zbMYQGhjr0LeIq1rjVD/if0Wsur7QJtWXDvdeqFR7xXYnvJrjRP2kiXErWr+y5ANrqEZbLVXW6NY/yvbTGuh69Z8gdOu0VgvhPoSPNpQPbm+PtscKuh3w3v9GGJPafrgka2bS+h6VSabjKTNbrRspvFfeL564dk15oLroxAKohAIKlBfkhfBtjTe9qpuEFR/1pg3jafN5nifWkrYp4Oj2jJCkS9FOF0jvi+hopt3bkpD84vLB3eKFDxBXS1WdRIGO6Nn7z4nE58+OUsyfOP24cHu6PU3945uxMvVcjg4LkZDLZgafWZXLFWmTJYmGlMAdHGIoqCcoB5ozJfWo7oo0rHOdjUPULnxwU2dH+hkDMqgSTDw6bP3xuODLIkoiabrxfT8hHeLpNgdTEy3ngffsehYpa7FycH+orpAVb31i1//+O1ZEat6ukIqBuOdIKOTj18YJdPF6eHt3eFw1x86A/pwOCnXdZrpdJCdvagMyepiXq/Lr339tWV3UdftsuuYktFgR4uz3kpQdQfBW+O6pvO4ly2X1XB/d1mVXQnWw7Lj3dh1y+l8oCy4/VHywft/Hidh1i7zKKIkOnv2Yvc4L5Jx2SwRdKZ16Fh0unv0mm0W69X06cv3R8O9ZsFJsUuEzbpdt86Mk8VsJmW1buaedZqPQih3R2M2YrIBhDKNktu3BlW5Wl/5wP7uLZeiK0vLolwXXswvX7//2o++97O7N28vwTahHk5uZpkWl3rb5XH05OrR7aNjo3TVVC0HQLo8vzTsDu4On364+MEPTldokB/pYVTEfHicowXuKtN509VplqnEdF2onWq76MZ+osCG9CjJB634yCgD6cuTs8P9iZjYi/VIUaLBpJ1dp1naVSWGrkhV2Vqd6TgzJi8gisfJ3np5SUqlySDNpPTrddeRqydFkiVxKc1f+853nzz/qMhTJHz9a/f+7Pvvrsq6kwiaBhhRaGf/0Nbt7k786p27H7z3wdfeOH711T1jirKVF89OJ0VycdrGyeCjurUKwcjZdHnnzm7bNK1dNV2zfbAjAEYmEuxrpjMBKkIhCsD9I8AYccwMwISayPRlyFGcd4SEGgjIgwVEtcUKzCwiRCoIMDAhEXIABt58g4EwItSaCJQ2GgCBiBFs2/WiICKMtNZKg4AhZbTxHBQiMHBvHRIEANJKIaCgF2D2ihQAEuogQRB88IAQKe2DB0ZFyjFbFwJCE3ztnQ+CwgkhInVdJxIkMgqRgbXSyhgUCMAIFJmIGOAaixBG2qRpRsBKR4gKBPuhobfRsUjTtWGT/sQszOx65ZbqAzMBaINIQKESCP2TnkgxCGEgQmZRyvgQeldVYNaktFZE/a5HX51TjDayCckmBubQG9mQEXqDMAKBiHAAEeaAhNjndoMAkupTiFCoj77uDeMARqmeLvngCbXW1A+LRhnQsH3EiwQBRRGpTaEEQNIKhIMPzlukDXRiZtIkwCgUGU2KDKkQvFIKkfqoLEKFiIH7OPWg1HZjZTNOboYTUnQ9cWHuN2pRKx1EGIAEAjOIsLALAQkRUJMKzIQKiQKLC35bAAEA4PGHi+kjGWgVD7Uexce3b4yP9ifDPVThsj0FwqbuKNJBmmbpfa3v33zIAZh5PBozh7oOJo0Pd1J9d+9H338+taKXbToaJ0n23/5Pf/o4DGikV8Z9+Ml6lGlEFxL46i8M4mL98Ut6OrXhhCbDTFN9K2aVyXOKl45mF3W+Oxrnajqtl86mUcyW5qfu0w9cfESHr7wa7VysLmVctAc34+VcXr9ZvHE4UB653R0c3k+ifVR5Vkz2852zxy+8RPfuvWZS9eTZ6eHkePjg9bPHpwqiw5v3O9u65urweEjGlKtmPlv+k//n/221KIP2h4cD55pO8l//O7/zL/+Hi6efvXd4K8n3boqanD17efPWwcmTZ9/81puXF6tP3n301W+8uma3bIND09VdbIwZxKbARCWtb4nErVySqMCOLBVJSqizqJifrBTG5Baz89n0pNS6ahdXofVxhL/wy7/08vGJynVZNnUXPvjgnZNHpyBBgYpMIqxD3exPkuNRpNmNh5lE0cI1AJagtbYxMZjYG8SLstnZyZtZMJoMhnp20V45t2iyyAxu7p2uW9vazMRxcSx6MMDm+Q/+sHlxcVns/Whev1eVtWpdnN/7yltvv//itPbNJH42tQOdJKlNM2m9W6OHgfHY2safdz4m47q2mU6lA+/A5Olkb2e9mikKk8Fu29DibOVdlw4KHWMkND7cjYx59PlZGuFoNFwsqzVpBp2k0WiSv3jWeEheTF0EkXWhXNt0nN+6f/PRxy+7yhZZzN7fvbEfj/bK0lxWTYiTozTWIZmvpsNRMusWEEeJyep1O127mRczHMbjqJ7Posysy4Z9aLffglu398XZueOnn11khpQN929Mlufr149u/O3f/M3/63/3Pzx6efLs8ZN7x/vvPpl6z5+enf293ZG/mH7+8Tu//N2/OvOEys1m8939G2tWZYse6d7dm1eXKy08mSQs0bOXz402EkhrA6KJKI2TLE3zVVTVLemsKqsihzwxy3U1L2f/i9/9a3/wT/7Ni2nbdkFATGKuyvWqXrzx8PXPH6eD4ywzkuf48tmJE2cdagVDTesa0CQKuJwv02ScDCdRN8zFCVrBNoqV8z7Nc2+jEELXNs53SZKz923dxFkU6i4rJmmcLVdzjWpdXYTQ6SgS5rpu9if7ddsG26ZJvihnwLJ/eL+u127NeZHFeVRfTevSgQTbNnVTt34WU+ytjEaZ9xJCFySwMmfrmRjFCkMd6uXy3o3j/Vv3dBJ3rRse7FcLLsvqzuHe9PJ5msvF+Xu2XuvY5KOjxWxtkmJ8MAbEdt3ZJuzcuht/45fml5/87J2f7N47cl2JIL/xW9/8k//w/Xcff14+Ots5KI4OD//w3//st3755/dk9a/+p39WTI6zNPnBn//06foUv28++vDpP/zf/R8WF8/+1R/90c//1ht4Wf/pH/7rYpzffvXG56fL0qpwfhWZSIusyko7USZFduV6YaIYggDoar4EkLxQtmzQ4M37D6vGGhbX8mLdUpE/+M43ysUTMxo7SNZXK5NmdYschFWn0Wdx9Fu/8jv7k7xetcwNar1zNPzR2396fPDQds3R4UTIR1mcDNN4bBuQLuDJk9MiHXbVaQjz47uDtCj2b3+lcpldtImyi/nZKtRtaE+vTkbD3Oh0Or8UAauyWt9KJCGyGuoHr94+P3fLq8vS+O0KE9quc96LVmKd9CsixrpqjULP6BiExQkSA4DRQASiQLQIsvQJGkppFWsJofFNJ5YB2QsDpElklGGEVpghRKhAVBJFidGkSEVawNedj+MsjTOCVjpZLcsQXLDeIEFntSAbBSpWSpRWgb1CUSoOwSMJatrbG5+u6kUtZtblO0pIx3kcJWEQqcWsW6zri6k1Wue5GuUUWDrnaud7535dLoi1cmCrjoJSrovJRCYqV5VHneRZ45YmxixXaapcZ6sycASgokjFHTaBxPvASgmowJoQFPooSoL4SEcI2LqSwYn41iEgJUolUUJknISuti6EbJhmGe5kKVSdLdtqsS4bflZdXF6tLqbrtgudDVmcQRTapll1hiIhRVqD877paqPB2craoFQkQKKJOTipREmjDaiUvORqhDBY+CtnImDP7CBoQ1FmChOrdXspGFJMY29cA5X1tlWd9wpAyCEBBCOsRJEgRbFOSPf+FeetYnKhDc61XRNrVdetIJKimHSMKjGGRVwIgdGGoHQUAiultBJAqG0DAdNcpcO4c37Vdau2zQceoHNt2bRlWVaxbrSKlIoGRVoMx1VXJzG1nVVIcSyRijVpQuWtE0Rl4tZ6QyZWGkSTQcuuSLPQh0eKCcLIARlIKM8KaDt2lVJfiCy+8HN9CQXhRpwhuHVC0VY/0oMSFASQfu9WtrkUuOVBuJn3bqd129U9APB2xb4Jv6FNTs7WU7UJX75uTx/209MARIT/uFTZFpf0gnfA3v62xRqbd2wCcrZZ0Vv60xu0YJsK3bvd+kAj2Gh8rlHRRnfTG6BwG32wZTtfLrK2FWTBVjG01c9srVgbbdAXFcv6fGbcyrhC7xrbcJB+5da3nTfMiPpebs7uF3PnjawGNxdvS6a+BAI3DZBrMxlenyrq/0IYtxaEjdRpo2L6Yhrdn5/+fPSH64OrNwfaZIhv9ohlq2TaqJSuNVH9f/tdYAYCJaJBEAVJVN95EgBg6kuybWxkm4uLsPFSyDarqre59GqxTYopbWnQhmQiCG2K3fzHt/sGFbkQguuYZF2uk7hYrdrnT668Tj58/2Q+az03hPDwMPrVb+V3Xt9979PPdycPFBlsnA9VmqXNuh6PD5JBfH511TkHOmM/7+r5MJm44NuuGU3u7CeT2fLtbjotdAdkllfteKChqp6dfW9vcsNkO+uyGe7sJib2YTXKB87C5P6NpuqaWtJhlviF0btHyf2Xzy6ApavL2JiuWhYqBamxi1LtQ9eG3dHoIN1tdyIiWrZZXHz2/LP9qNAmqlcurLumsatZLeh2j4qr09ZCfbWcei0Uxa3gKMtRSVmWOpY7D19dezvSYM/Phehq0dQVV56fvfC5i9Z1dxnsw9duTMa3VlztHQzXK98uMEmLZbuo5iFFszt+aNTEd1SYgQcBAp2Y9exMo7XVTBG/8ubDnZ29l48uRNK9G6+8887bsQ+v3jt6+eyz2htSQ6CodbB/+OYoHa3aKXrHLmOv5ws+PJ4coL5aLebT9uY3Rutl4+uIfCR1Xl/hW3der8PiYE/fLO6Cs6GTunLj4R54f7z/hjaTOOYgYEQEBuVqXbYwfcqfPelqNwwEtatu38gPc5cqiwnbEBDceKgB2zQGBaFsTUSYJqq8ujg42s2L4VVt3LoU74fZDvskz4tkdLScnvDqqiurJMZ151yzNiZCJBHfAheD4ZR5R+muqdLJziDWT19+JsavmrWwMtaOlbma1zZI4C7NYis6dLicwc1br0+G5mTanr18sZNleWqKwxuRNtwtFuXZ3m7ubZskpKQl61+5PxqNcJDa87PV7m4EJKMMslbGSkc3d8uVXbebWGulCLEXqjAQolIg4gMEhsDM0kMHQCVKGUZWCOIlSAAk0oYAQ2BjCAA1ktZaQBTpwME57714FhAIwIEDCzOL94FBIqONwlgrbWLSemOdkqC04hC8d0SxiaJIG2EOAQRQkRLmfpjxPpBC0qS1EmYAYEEC3WfqCAdERYBBPAuzYkD07F3gjtlyaD1vFC4iCtF6T8AYRV1gLRAAdV9JUqjPlu6HMg4izKQUs3Tec2ClKY10rJF0xIx9ThOIEIgP7EmJ9yIcvJPtQ036GvBIRum+WCUKMDACBB8UkSgMHESCQqU1klZKKZIAwN6TEPYaVEAOLES9ZouofwT2iF0b6FOdEQNwX9Vek+5lTbgtbtlPBYIPqEApMloHERZmESTQqPvKC0gYkelsZwhJKUNGWEgpAWaR4IPWWm3sh2KMRoTAEhyjiNIaAJiDMWagtBUJIeh+MERCwKCM9OM4olJIqAAhADALIFnvCaWPWNKoOu8JJTJKfCDCXsjaT+UVQQCRPnsL0YfAEljA88YCrRQrpTwzcu/C22bh9WPBwus9E8CFmhfnq8WjZZo9jhJzdP/Wzq3x5MbearVQiZnZZZHC6Fg18/X5xcx2XZrHAgaCAlTV3CW5Ojjad6W1Qd59dDkfps9Wpo3p53/9Xjz9bDqXdpUQp3qnsWF+80Blo+jy3fa8Cv/2vfLVSbh7ODy8o/l0XUXRogLvZNVYjIla621969aE6ejli4Wu2oMIVLzblYunsxUaufMw+daDQVh49KNhcjwY7rRzrCvUJg5RzBhl2e5kN4c0XM6WhrGcVQdHr+8e3z6bN7oYhXphYvvs6cc6Os8GSufg5i0ZOjub5So+fvM7F1frfO+wefbTWKXF3t6f/If3RUUffvL49tHh++9fEcTF+OFqWmRx/urXHpw8//RHf/ZDA81oQK7ryrJenS2zPC3ihG2zO95LhJx3f+W7v0F1d7E6Dca9+Z/9yk9+/J4LAqHCVN3/6utXVT1tu3RoktS9ePni0fP5crpg1CKcR7rxbkj8yv5g1DSr07IywLleOyscsiKOFcznU2WkfP7kqKDny/DBi9k4pr08diDvPz5frucHe3mztDfu3uI4OX8xffPN10b3H7ap1s38x3/0l+s1S5FfZvzO05OkoH/6vc//m//0V89Pni0Vz6cLnKQ3bwywrIvCByO+s3mkR+NEj3lZ+TQeJID1sklUNBwNQgfr+YoCE1ZJ7pqqiYlc4HreiuXA4JdVniUpStfYtXVJqqltOsuSpXu7+3UF7ew0K4rV0qWDkSq0a6zq4iKNMqPbqgmUfuXbN1+c2Mg2OS6//Xd+4dMfn0bASZJdLueR1q71V+vy3u2jlcdmug6OT6s2N+b0shwPYirS2CBACQCHBweLk7O0av6Xv/rd995/p2ndYJKeXdafn89+MR187c0HLxez7/3kh6urdQJSG/18uX7no+f3DvMkSoOUCky+NwAZRumgfXZpK+/qZkVaUGXDbFEubfCHN3e59QDCYmMTIai29QG7ozu3fGmttVoxUjAqikj5VuqS/+7v/v3f++e/v358Cire2x2srpbPnj49GsVgECMTvBfUd4728Wq2cmJ98G332r0xeHGrVbITxbHYtkQ1KIbp7OIxaVlcvMjyoSRR11nnQpzlRmsQcD7keQJgAAAD182qyFJvHfvGJHEIzjUWBZbljAwak5qkSJIshKLFpAU7OZq0XXl+OdMAxphytu4qe3F5mQ9Hq3ndVRYHKH18HhNjaOo2y5KLl7PhaPCLv/FrRbYXfKeBTZFa25arVWjrJx9fScX57qgo9mi81/guz/ZCJ9Zasd4jJlGM4Gazl0Ek3r398E1VV951wKHcleSV3R3jF3/23tPnz+jZx48N5f/zH/1sfwQH+/lFffUbf/M3py8vf/yPPx7DYYiP/+H/6w9++95Xy1P34u2zh0e7tl6/8vXdn/3gz+qV7O+P2sUyzrKs0G3duWbdlnWklRIxQs53rWtVmjWtm+Sx0sCBnW9964pizAV3oS52Clh2qOGVrxzP5vWth0fNkmJ3JfFOu7YKzA9+9IcPX70r+X7wUR4l86fnjaVIJnvDG2y8840wuiaU9YKUXk6vXLU6PDqM4+Tq4rGotJy3KivAFEl+sHfzePHomeZ5xxIkaDGrqhVpRpNBUezmkdpJOfgwvrH39CfvR0nUrm3d+fFe1o8Fy+kyMYmOSKNQTJExpJglGKUVYSusU5PEERIhcO0UusC+9eJccOumMmneZ+sRUoCOEavWey+Z1kWaEoQYHMWqFtV2wkA6ihUBizcqbZsqcCBACqyU6jqniEaTkQuhLBvbNbE2wBIUKZU04jvgRCMp5QIkWYKivGUHLh1oyXULGDFELMb7atYu665ag+0EQA2KLMuiLI9dB03HJExexHLkERlC52NUZHh8PIQALXN8OHk5W1QN05A0IhsIWkiRF/ZonO2sahEDkFYGHFiEKItT37aRIUIBEsbWi9cGtEkRoHMMQJ7BaNNb5NNMJx7G450srtv50rahLr1t3Kz2i7Jdlm3dOlRo0shxcMGVVc1XIRvElClxbYSMSncdeOs0Gue6ynofSZJH2rAQE2puWsBgtBYPOQ7YIjfVwGFEkCkGNyVFuXCMceqTahm6JQNHcTCEwTM1gDujMQOGNtjKxpQRaJTQx+akgkGk6RpxDAE65o2eQwQYGISVYk1KIbDzoXHeAwQvgAKKOIoxJpPGFCk1iJUoCFVXV4tgV0ZznkSNbUWrNriYyIYQQJI00droKK7LirxNjEpjpTULITKKFw7eiuXgNCoW0to4H5QGhWJIBe9RfPCOAAEojkw8GlrfbZEQfkmus8EGWznOdcGwnqnAlyxi2ygiEWTYgpWtfQy3UUTXVjZh3GYMb4hKbx3akJOemOAXlqAvKT42uhi6pi7cLxJ6cxZe/wJtvFnXvrYNMICNGF+uHWEbAVBPoLavwhfpyPKFMAj60jR9QeAt6rgOpeg/Rq6NYVsBD20Pu2VdX5qE4rYr2OtbcCO1Atn6p7ZoadvVTUs2P/SN3FCYbS+vf4s2vev3jbccZ9NN2RIf2MQ1yabW2jb2E7eH3vaEt3Kwayh3HS5+rVECAWYBEgbqPx8FttnS130R2GYd0RdmNN4Y/FD6yGuNgChIoADgWowGKLj14vVJULKNKIX+f2kbSITQl5nu9USbk0abO/KLKnW9xOn6xrtGRU8/f1bEA0aqltXl1XzdtGDM5589eeenz3RCBvzd3ezrr+7nA1xV6wcP7yWUDoYxCyybKpEQWLTRvhvEEa0riOIMQjkYH3ZYNaEejY9twCS9d/OIVufvQnRgcJiqPQUXRqIhxoDQ2DaOzWQ/f/nkPEG/fvoiBJiMsxwT1/D5s8udAnNT2LbaGxdtINQuiiSiPMzaWIfl6TzVcWDvL2vfTQdRYNvuD6J0EK9qQkBrK6Xii7IkMDqKOu+vzmfTy7PdIj0cD59fneztD2trxXmT6qwY6gwf3Lvz6UcfNdO1CW5ytLOuBYUXV2e8rnB4cLQ/vH9YxO28qptcqfVstbtzHBWHq1WZxmCiuFtPV/NpFqV5Ory4utR5Pog1i2ZPZV0Pi50sAd/p6XmT6GJxvnhRvnc0VvXV+YuPXnTWGkoigzpVM4ZHV+v7N2+KVKNBsbq4yky8Wpex9h203K3WM3j+eJ4nyeTWiOvw4P7t5dnltGoO74xms+pomOk4AoFoWKDi+epMUJNu67oJXVuMCse+rqYierWw0MLeKHlxeTEeGWVCi2hbWS98Oh4DZ65eM7ejQbJer7wgB6had/vunfWiRKb1sj7aGy7OXyZa7+zuNFXXXr2opqdtV7LAcH+ilLKJSbKkq8oiksPD/SyLu6BjjJ1Pa48ra4fD4Xp1Vox2zy/mWIHvqkXZvXLn/nR2ZW3QRhlTjPXAQqkafyPJD+7dvHkwNEa9nK8jjFShsoOdy2cnXfAtmN3dog3LrusuL2yc6apesONXbudHhVjfKrv++GeXO8Mbh5Px9tHcf4WBN9RdRMC6nikAISmlADZ1xQXEaI0KHLMTht69RRiYr+WHCOB9EJAQvPPBek+KmAORIoXM3OfcxDqOI5Uk0WZXADCwcPB95A2KxJEx2gACCzOgeNaajNGEyN7pzagjyBCCYK/kBAgcAjOw9Nlp1jvYOIo5hOAZrAsOxPtAwkYpYYiMIoBYKyLsfcAhBNEGiIQZAZE0cu9UYwAJwQfvbWdZKUAOwYBCzaKVNmQISUDYc/C+319yzgUOLAFgk3+EQERKuH8gb4S6SKSJQg9GEJXSJEBK9ZJJUOgJiAQAPTOFACBESilCIhG85llIqJWWDSLZGLKh53vAIXgAVArZiwgDAvR52gx91FTP3fpNFUIMIRASgwCQ4EaZ2mdwI0ZBAm3KiIJWmjbJS0IIfbacJhAAraV/IuvA7EM/+QjeAyokcSwchJn7MKUQIAToq+Zx8MJeaYUIomMNhBqYmUEINRIKKGaPgCF44SCbQq3cp1YLYGCx3jOD0coYg/3+kaCIKKWv5x116Z+tzoqMhmMdmzg0tu2cQ3r7k7NiEt/7yp1bD2+ogWSShNIuWpeoSBsTFwMgyLKkXPmqKof5SCjs7OkVXJ5/fCUYD4bqtW/fTwbxyds/jaLm+P7wk8/9MIrfPJgsL54Od6MHr+cPlu3lC9c1+PmZTVO1E+lXjgcXNgx3B80Y575N0iw/LpSKrMOyapMsc6E5++CHg3GWJrh7Y5wNurNn9Q/fOfnOq1+lEKEn9DDICzXYGw7Gq7K+ceOYXX12/mK0l97YPazKdmd3P873rJMiyXcPb85P/Wz62Z2bw5Pm5eMPPkoHcZD6aGennDWvvnr76O7duFBR3r7+zVdWs+WHP/xAwK+sHQx3Zld1Nrr3xtfeena2GO4Mnb36/MOfJaa7cX/naK84OT09mdbf+bnv/Pb4cPry5dnTR7IPd9/6+aydh3IeLp8163r/cBztHJy9KNP4wEh9dPNgeXb24efPzfDg5boeKj75yw9OT0539sbxMDm/6oS99T4SOaDkUIcikmGku9C5eh2h2kli7hpPkFAUbFDOvnY7e7SowGJwcrJyL2s7X4S59b80UQ+OduJocO/BK+vLsHv3FS9uJM3puz+xFG68dfzRifv+yzOrQtcFJ+H//Sff3xkkrrXGkBV5dLp+426sSKI8jlLgJtSNTRCUxRC8VSbNc7uo0dZZkjpnXddyFn9aP1NalIYkjZu6CR68BGOSy9NLVqQiJd5Z2yZRFIGxS1lzbbQcHhXGIHTUehhm2dlV6+qA82q4l4+TdFGW77z7LDW7F815U5Xv/N6/HQ7uUpIND0YXixcYwiAxiYke3j344c8+riuqW2g6uj+Ky7o+PiqGk+TJxbz/Fty9e/z6qzvLp9Offvz8dpFMxS/O5kOFifWhWRzu72RR/JNPnx7tFvfu7n58ugCNf/aXP37tP/tN8Xj64jwb7+s8bTqOIimyGMDtjY/Wy3XZdBRHJh606wZNupy+mAzHSZQQKxFBioERBaI0IuDJ5KC1IQDvHeyMJsn3v/+jNL741quvTJ+fVOjDej1RcJDFV2cXl1fLV+7eyeLiw88f/frX7nsV7Ml8Moz9rDy+uW8htKtGDQYCoZhMXIiI7ZDLDr0T7loXj+M416FpWRiYvQ8miRECB27synOsACkT13WCoClO4qwMkuWjxlbDQU6CdeXTPLNONU0dvGvbSsfoLdngDJm2sdZ1RMrZkKZpluVeUWfdqlo/fPXh2dWlqKgs10qTirLdvRuz81VoSxpnRhmNZrw/Gu0dnHz+eRIPB3u7H3z43o3jfRK2dtG6Mk5GCJlzLk3jztZoXZEPkiLq6vTy+Sevfuu12XRqhW7c+srP/drht7/9yXIx/94P35+t3bJ0zqmbe+Obt2//29//t48uz7KC7u0OH8+aJ2dP9/+T737F33jrzYcfv/fZedV89OT8yeMrpqyazTWZtnVBOiIibUIIznmKiGIdmrVtq7gYZENTN4vnn35w8/Z9V5Fv/MJ3QYKJkmqxHiQD36zBuwzVINJeN7Pzx3vj3WKY2vXy1uHNQX4QKK6rerwb7w93lmV9eGNUtucRROACoA7eKgmZIZNjNMwD2sQko/GodRjHcVN2sYnR4uzx49XFlW/bYjAcHU6CNZbBQ8iNiSOeP3//YGxQsovpZTSIxUkxSKMCzWZZAKNBHqkoSlWaagKOjaIAbWfTOAL0+7sjrWNXN0mRZinNS3nx4nQwyhGZHDM7H4In9K1tu67pWmZ2ATQZEQFDCpGDCGKiVDaMoygJSiFBYCfoDCA6jtJoNCi87yjRbd32qYCUZVGR+KbTkWHmpqmVilScBOjEtqQoACqtRLCjkI6T4tY4GaXs18xdvSyrtmuWbfBxEse7ozjPUgEuSwuiRCA2CbN0lfMttav28mKdF6PRANNxPB4XAXxDtomcu6ryQR4IHENbNiEgI7kuGEIRjkilJgVhI1qhBgBlYiedD22sosQMKLQQApAGDpkxAOTYV21LoLMoAxEWf3F+Pk6VtBUpRUpX1r04X7SeWxtEIIm0VqRjpY0QemLlGq4CBkOlbQBYk4mUIeyLnTAVcX6Qoa5t0OjQdxwUtgFItA/cdd4EJaEJgK1Gomjd2FTn7CLPqXNehCORrqvjRCMpnQ08xJ6doEkizW0NECKNIhI4kEgI3BdwEdYcfKQjCdA6K8C+66IkIjAQpHPOW9fvenYcjImYJdZJqsg11f7BZHeU8unFqvKL+dVgNCrilChCRAUSGUXAbb0GQB1ppTUxR4pYI2JAICXKREnw7Ky3touMEQyNdwEwpTwiABeIJISSvfPBISpljEmMIlIEnuMtiIEtK5Ft7FDvjNqSjo2mpucO18IO6id+vA2I2TCYL3KDtzYvuJaZAMCXoqBhm+nTU7YNDMEeB2xzJr6kI9nMb+kLcRHRF4gEmDb6F1EbjY9cy4WuDw/9TnlPaP4jlsV9b6kPFtq4v3opzxbl0NaP9aVUbNrAk140JLwN8+5XLVusse3GpmHYd1W2GK1/7ybpuk9PAtrqXzZyqGtd0lbYsyn0Ll9ckWu3WH+dGLZp2rDVDW3fuO3AlpwIQB90zcCISmQbV/Qfyba2uqUtv+kvVV/v7PqSyhcuuZ7l0Ma1tj0H23yhXg4EveOMhBQoYSEg4K0aqrcKAvK27SKbUsjSY8dtHFUforpNuhBRPQmTviiakGxjtK/DSza6p43s7RoVtXU9Ho/Kql3X5ehgvymnH3z42epqFiVIiookMgZ29/MXJ48eDO4ZrD0AmgxZFZkiEZWpYZEuzl8kUWpGO6lOnawGO/Ty/Hw8OdBSgz+vauQ8Z7VrAVg6wGA7663fOX4Du65aSd1UV+1VFhckZefYZMM4y9Rg7KLlzuBI2mW1rhCrtvPpcBBANyvf2iqdJLe/evjY1ofjG/NFKaVv69loPCGV1HVVz7t8dCMJZrnukonWRZmleRHH5zMfQpSn49nF+fPzC2US17KOk7Zr4nTEbbOa1v/u9//VG68+lMGgbrI27ATX5RnjxD64/9XL5fl4kMAI/MKRjuI0Gw+dDTbE4MCKtVqjYx+gaxbnBt1wfFTPnuUHk6tl5SwW48yGdHD01bJZr1eXeb7z4Dtf/fSDD6MYJFl99P5jMHuL+fRgL2czpUJMqL//gz+8fTApF2p3Mj443gtnMn0xy4fRZJSdXFxkWep0VcSLJCuqciFElyeXdx4eR5SiJRsky1OPepjm7UyCuGwfW8C6laYuQ5BhRium80fd5WWlIQwSmiS4N0QR8hDbZFSvA3jK1Cg2o9U6QlXEOjTLRd3xY+sO97/aNes09tPZrKksaZmXHaOoei3OJgnFcbEuV0H83u7YqqqYxEk+qvQwAchItELWMImy8+mziJsIAgroSHwEVciq+frOWyn7eZQnWZI3C3u1mloxSRxx1YwGcRdCHSAvinGeX87PmSDJ0k/eewn5TYOoVXK2uBQd7d4osgcYd96Wq4urdifNzleh9vtP37/A+Pr5IJuHC4AAeBdCCD4A9unFSoEAIhlFAEiksc+hC0FCkCBAZLRiRsfMQZx4pZT1QTg45x0HIXKBNaoACkJAVMYopSiOI60QkRRRL2VBEa2IGYgQRSEjiljvObAPQohKsF/8a2UQcBuNBhqJhYGRgwMRQxSYGcR75wMHYcuCAorIcQjMIbBBjOKYBCJttEYAUEqBiIoi0EqIHHNwXWQi4eA5MAsHT0iCaK1l54jABWdrbxkDNImRLNIQnFKqr7zGHLxvGEIADgCegYOnHrgT+RAERGEPVog3ehgBAPGB+tMiAswAGAIHBh8YBRRSnyjY1xdDoH6PB4mYfT8WheAV6j6NiMX3fm8fPACgIulZC4gLXmm92W8IARB8CN57ESSFgQMAkNLUP121YeGN3VcAUPr9NFIkAEEYGVED84b3K+p94SCCoKkfcY0Wr3xwrk9+4j7OyYfAwCAheA5BgPoT4ZxzwSlF3gdAEHBiYvaBjCalWTbeM9qqfkMvN+UAgICEwCzonAuBG+eNx0zEKFLabMqkbRTBAABNy7mmyoKfdeDKG8cTnenp+YJ01Ir88Hsfnj673Ls1vP/Wa3Gsl2u78s4zGx23XTuMMuDlqBhoQ2fn8/29AS0WK4Przr94sbh1tD+KoztfecDV0t6489N3fry7mN69FcajbDmvl83yMB21Zt52OLPmnedhPK9u3o0ffm3308spjAqdZas2dJFOd4+iOi5ca/LFo8efS3MFazJ6tBcn3bx+uJPW/lC8IVFdp8Y4yYo9loyVGY5H3HVBy3jn1miUBesj0+0dHbWsdVQkg8F8OrPtSbu8WE1XEarGhssXUxf0YmajJD1+463b92/9xR/8jzF3bb08f36pdHx5eiUI+c0DYorz5PDWrde++41/8o/+0V6Cp4+e37izI5114t/8lV/5anL717/1HXd+8oP5PysGWOwN3nvn+9++t3drLxYthwevnLxYW+F8XBS37rz7kx//2Y8+YlnPp4tfe/N+unPr9/7xP/XLMo6yuub1usmSeJBlqmnGCm9mETQNErAy7CwDM7NiRoyr2sekBKGclbvHxRs78eUL+9rrd/759z67SPS8pTFhQpEt/cc//MsHo/yN7/zi6M5djHk8oA+fvzidlzC++e8/PZ1zF4Sl5RbVnHmcJ+vFMhnFbVvHg3B8f3eym7fOi4BEFFPazVzr9PlVGIyLwV5CXieGQlvHiYmTXLSyZcvBZToxSQzCXZBqta6wHheTVV0VUcyu6XxX1yGwtwGVE6RICR3sFamEeemuZiuJeO/mSOxoPmvSnICjcmk+O3uxP1IPbt1andq9AaHvuCkTRfN1V1lptXp5epEZmqSQ5+bisgqD5GBv3Hk6PanS0SbQdzY9u3tv8vbjxy9P6jzBlMCX1Z2bB3vFxIRVHrPr7MmyPRiPbONJgAP97J3PPnrtropk0Va7o3t129mWu6YNjsnoKE+p7chypNPpVRWC7ko9nuz6tmPfKjJplnPApm61585aAfaOCFXTOBLXrKq9vb0//vMf//bf+tt/+29F//KP/wy10Y6L8RDr7u7O7iCLUJlbN++2bZsZDa1TaO7cOnjw5lch24mVR5NrbbxrMMuaCpL0MI26qi6bcqnXSyvdaLTXNmXXNIgYKV2vS6UiJSqLB03ToHAUpY7BN0FcJwzOOoWJc5G3lj361jW2yZI4Gw39ekWKSEJV1vMq2K5lYkE0ouq6SbKsqYVQjYaT04urrmklsLXdwdHtr7zxVnBVHEmcjYRM17RtPRcFSZvpZHd6MV9KdO/VN2w5UxR5VPnkAEKYzk8TMwQk7wDBcOtndWvyyc6NuyHGMqj9W/f3X91fTS+Kvdff+M74F377r714fPov/+3bRzfv/u4/+Af/9//uvyWnb+3f+N3/4u/+y3/yz3/htTt/8eEn//SP/3TQlKenl7GOkr1JLebV1x48enxWFLkX6jqbqDiOIqPT6fJcKXTcVeUCLRIY5ig43zRdPtl7590PfuW3f91yXVdNlqdpnC7rVTE55ijLYIphurhYdZZny8+KvYcBtaZw9/6rOjVluT66f0cjlOUq1jGEAK7BWDvrPDf5KE8Ts1qdo251lqyuyqYpq3ItURzHYNcuu7m/c/v+1YtHEHD/eG8+r7UWraDplqB1Y1kEs2Tn6ulFcF7FylFoXVU2Mtnfa8pV/y3Y35nEcRSnEZEYDr5pAvAkHyCQ6MgkGQjkOxMXguuca7pBmnoQ0ioorchEOjFADqTufADlmYWICZkkSjjLEvZOkwnCsaIoBohi560DreK4a72KItQEGiOlu7Yrisg5qwWJRSnEQRaQxAeWgIG9b3WM2WDkvDBCw84l7AvSN5LklWEEsbtqfdcuF03VWiKNCqJMoSZQYgih6RRp9g6cCo7YorBZlfWqkatysbvSAlFVrtHIvKtK9kmeIQMFH2uJTVIzA1BwVmvFAqi8NoEYjCQo3Pg6AGmTBAxEOkBQpBLScVx0bRUReg/Bo8I01XFXNWyD967xXnM0ySK0fLFspst63XHTOWJWRhORVoYUBgnceqMoQOS8WBAlChTqBOrQsQtEKICJMWmBMePsqk5Q+SBRkiqVgWRdHRRgpIMN2IUWQkSgRA0sJmXlMFQpGQBuXdsFq6wMjUnYFbrrgCrX9mValKY40iBgHRNDcNZo1WtN4jSPItN1LG3b+CY2uqqqPIm7EJQyKKKVzuMkIk0KlHh0jl0o0lEQ6Fp/ONrTTpZNwzY0vgPQg/ygqssQQqQhBO9DiCAKwYOIJiiySCEp1D6wUiAYlFZFFntxXdcaFSlAAi/MidbEAAAueBDyQQSCSkBJyONcFG1XzVvU8wVVwOscHYTrYBdBoi+CcjYrfYBt0iVsiczGWQbYl53dLDkQeLPNvNmKhg0huuYLX2AQ2SKcHn1ss5v7BGoEAKTrFm4jdJAApE+37I1hfUTS9S7qphUb01RvngvQWwr6dspGorIRUW3ZF2zinvG624AkQtDTJdnwn41HD7eoRK4JycaatXWjfSF7QhJAFBTsIVSPYTa/2eu8FDBjX3m4XwiJYJ8UCxu9Uk89Nohu8/mbQ20lQn1H6FqetJVAbWVRGw7U7zV/Sd8kX9AnvM4Khevs7x62XYuTNpKr/j7ZiLjg2pp3fYNsaA1tIRSCAgYU2uifNua27YdjbydE2OiJthIlkWuUtYni3mBB4T71lfqTI33COeMW0BEKSx+lBCKwcWFuUVGUmfPzS+daF/jzl6c//eEn9WxapPatVyemKM6u1pflygb8yhtvprnqVLQo/WASISQsXTJOate2AeLRfl051sTkSNPzk6fj/Z355exgdzfOYg5dLFkyPlhVqyglpXWz8Bin9bqKQxCHzraDLHr67PPhJI1Gk3gw9CEirW/f2H929ixOMEkyAQmNI2Mg6KprWWWEkBYqKkzVWBPR4f6kqrpFuVqvL11TtUEmB6N22R3v7Z8+ex6bDnRbV8IdHNzcDcN0VtWUxHkxLNezvXFSJOn04iTXkVbatvrFs7Mo1iYu1s8XrpNsRFGR64Lu7e/YuvLUOfRL27lGkFvGnLIa4zhTdnpyslg+Hxb5+cVV3VRlKKv2vKwSTztxvFtd+Ru37szOTo2m/x9XfxorW5bdd2JrrT2cKeYbd3xzTi+HyqysKtZAsoossUhRIkWp1eomLDXgtt1wyw3DgBv+ZEANw7ABW4CBNgw00G3YLQgNSaYlUaIkdpElijNZxRqzMiuzMvPN7935xo35jHvvtfzhxH1Z8odMvHfvi4gTO+KcffZv/4ebN95YN3Gen4SyWNQuijpv/uwvTZdcffBeGZzT7nM3xz/+8eFbL70BWtXlcj6X88sPapx+/ktvHz48G9iRiADGq3w+uWxubkXFulwvq87e7kVe7V/brdfNugzJVgQMR8fPykWRdLPJ5cRoW9RVU1QcqCzd44mbFsiRIoLhsAtcrisnAQfddL7KlWJ2AajT644n68rXflVMt7oJnxdNKU/rk5Q8cRXFNN5Kp4tqVZa7B9fq+TyfVRGGXj9zSycueK6sTgbdcWdwQ7HqSaGoMTaqK5ysFlVZNvVMGzifXJCyiuHx4elre8Pp9MJoxEjlhR93t48mh2sBne3YTsdZM8iG57M1RG5SLGtf+8sSirA92NP9gzqflG515+beycStLhtNpnYupng997FKy1U4WvtCohf3t9qzgAWCDwDgQ0tEOAQPpJCoTZoXEJagABQpQCTSwIBICgA1tFWFgICKEJFFmrpCrQQlIAhtsuK0UojU5kMba5VCrRUwe8dAAiLeeVSEKCEIkUJQiIoDhxBEGBEUKdVqVZAASDgQomePqAILswTwgaXVx0DrjQLm9vJFqhUbKpAYjFZCiJG1KO0fDAMopUEAtRIiBAgtk3aNMDOHwBKCN6RD8Ng6vhADgFLaBV87QqmFfXtNNIpEpKobH3wAaeraBd/GeGtSHGqtGEkhtyE/uOEviMIBALDNFmIBllbn5J0XYCKN7EMICOKZlCJA8OxbbASCITApJQJaKZZWW4NKmTbMmUFEghJkkRAcIHE7k7BI41GpJgQGYWFCwk2RhSBoIsXMSkloPBC5wCCiSYEAA2utEdGHUAdn2RitDCloWZYgew+g2rYBQQwuMDMqEkEvLVATDhy898x1UwMzklJKB++9b4KExgkqQgTxAQFFKaPVZvchEAgDSggBSbz3ArBx5AEAkAhrpTk0bQYXC7MAtbHfABz88x2V0SBSQVyooziJEr0smzJvxGNd5LDIB/3OelEHt0zsaZzopuFuv6dcOh7vffDs0K+XniWOwNehu713MbuQRu/c2D/94Nio+HiS56y6g/F0zqvj6LNf/vnJu9/5zocXP/fWzuXheWdrpJxZTmRS1rdf6FcVP77I57WfrOvBC926KCHGgFgDQxAb923U50jAJrdvDZ48OqpqhEPU2c3R4MZffPd+94YZj3rajFGbVVklaTbc2XEVJeNocno07Hb6W/3ZZJ3FtFxTlMadCLh4+ge/9f+dnj/9hV/7asQ73/g3X88bD0hbvTgJSbd3PZ8mz3700fe//92f++m3+53swcdHTqqd6+Nef+ujj+a9Af1v/hdfe++Pv3Pvt+5Pi8sbX7z7qfErH3z/vjjKXtjd6r7a6+49+/Hjd/7s9xUui7JePSzeuPvG3Vd2z58+7O8NluznRdVw5ZbLe9/94eHJ8eV0trvfe+Olz/DZ5F/849+Me4P9W8PTB6euctbSaLvjXUOldIiGqbZog4NHF42xph9TtVqPtBiF4gOm1BukVWhipJe29DjOHh09FiPWqm4C2z2lwC3n5eRiDX/27cH1t2++/cXlbDo9Xj5+Vnw80b93dHgRgiiJEAlNP473D7a6XfjyG3unk9W1ceazgnS1bor+KIswcStVFum798+fna9P59IZyJ2d6vWbfWMRm7ppXBRTJ4q0V6wUWl25BlRKNtIhyvN5uVoDUVHmCdcuVOIQUNJulxTXYBoXnpzkoQRUEMVquQp3XrtTz4+hgYbd9sFOx2S2cKqvjpriS1/9K9PH09hX86MLjWI1CUhR+ycXq/PcNdou85qNqpS88sK16dmkN96qzObO6NHDs8cfn7hG793Y6mh34+bW5dFMrJ1PVg/f/1gwbPeiqJ8qiRZ1qZRiwSg1H92799Vf/sLF0fHx04d7t15Isqwo1ibV1qjTyRSYskE/b8J4/+ajh0/Pn9472O8liqJEeRca33gHtCkQIqO11prR5HkOXoySvd3RZz7z+u9+/Rsv37xx4+DW8WRWmvBsVdB0/vrtW4nVx/N1DWYdVJolt/ZGIjI6GKydG5gkS6MoQhuljCpOUgQnIZ9ezqJulA46Te2U1cW6cK6pm8AYAoJRFkW/fvPt+0c/GvR7VT0jlQCTNSYIc3BRnHkm30DwIUtiQbEIChAco3BTFGU+zTrpcn7JFBoOeVkv6yJLsqrA7vD2fP7w4vS48s1oe8tVzWpaf+Fzb0wOz0LP2jhhxePxtbPTQ4PY6XbyZZGm/QfF6f44pk43n04Rwnp2Ouil487YYB8p1bFVqtFKGdL1YmUoAR03yyo1kVSuCFV/vDvavzFZuHxadmP1ta/QgwfP/m//9d8/z4tdPfz9P3n4dP4b7uLs1TfNnZvjp2eL2pidnZ3l5ezo8NnoC68sfUVKXRyd9Hf20jRhCWVdh+CHW1uNKxilWdaktTK2P+yfHz6z7JqqjBLrPJOycUxKca/TCUSdrL+8PFlXSxtpwFQnqKy+OPrR7u3Pku4slzNsahPZRVlbTMu1pKlGqVCb7rA7ryf1usbcLtdTRYG9cwE9g2uqpNdxgRl8OuoEkUW1ZKPTzk5VL4NW03KugIKs94YvVU1TlQ64Xi2KLOvkef1sMbux29cJTE6PzJWsaNhNTWwBkZhDVWdR0tSujX4vseHgrI4DkkhgRhCsSlcF7vUzaUBCMBJ8lYuvjBUgA54EsBsnicEk1QFcZFQaZQGBEKA1Ewmg1p6ZCNvCcHZtoa6zGHxRomjr2fkq6XbzurFKazRlk8eWiEGRZe10BB7WKlXZdk9lrLA2pFQvcnldnHFZUadjO5ntdLKmDsW8jER0CI6bxTxHjESUiWJJVCNuWVai7LLx4XgWxQjkWUmIjY1RKxoOe4qcd3i6KIJvZUIqimLhyotY0ITKs9PIFgjJKsAgwQWJlGGWvKy1KCRNRInVrvEuVIBNCIICSZpE3Ti2qpPifHa+WhTAolCUVgDgnKSRGo46rq4AuHKeBIMPRiC2VCrfSWOrkVGsMVbpJEurVWhqbIq45kZrq6OEMBFtSMVGMXqf6C4DIhgOIAgcApSNr11tPARBtIBQl1ITYCg0NJ5Fe0QxcZIEaFfsoBWigoRsEAEOcZQoo20UpWlETUEFeueNNoo0gDLWGKMtWqtMBBTAcZAotiiiiAiVUmm3kzFhXOdVXhtjV+tSBQ7ela40WilmbaxjJ6CRSCsSEDIqoCf2GmNQ2trUucajsbENTSMBvUjtmQgIIXiPpKW9TSZb5zUZhYlCMhtU9Fws1DqxWtVIu54nulKc0HOsQ1eriY00hxngiptcreWJaENENhhKhOW5zevqfxsPk2wax+A5NsKNWenKYYZXC/v2ADY/E9rYDTacY1PycqUVERQIgpsaduIrcQ5uxD2tg0yAA7SrDSAgks2b31jGNtTmObJqZTXPQccGYLSJ3Jv3gMgAzFdxmxua1Cp52rSm9q0iMgBBuDL+gdBVjTBQSzmuXHtXDV8bqCNXGp3WJ7IRfMlzmVS7s3xlrkK6EgRtkFB7RCRXj8DW5YYAz4f0EzxHG4HQlfgMn1u2ngt0WoUXbrDVBpRtHo6tLe/5I6SNZ2IAERREbpOJiISu8pngOSLEjWKrFRMBf5JVhAxtzU97pAzURuMGIRAUIQECgSDC7WG1zbXArZJI5MqKJlcZU1eqIifHTyedjj2fr9776EiqetCt/uov3bl796B7sPvgw4t/9yc/XjfOZh3PJaVJs2jWl6VAZRMC0q6qiqWLYzvc7lcl28Sczeq6Stf5uNO9jjKaT897o1Auz0HQu2q1fLa7c22wt31y8jSOco8qyuKOsSbTcTepvVlP/EFsxbvl4lz7SKoy6FQlcWx7gRbtiRZFluJsuTrVjrZ27PH9dRp3LyeuCS7KoL89Wk4tgl8UZz5U57PmssxvXx/tHwynz55i0+Rz76vVtYMhaLgoJt0svby4yEwgkVXlk16/8nU99YNBAqoxpjQ66vU7+eJidjHLuqqpc4am1+0Mt4aDLJtOntZV5Ranwo24ZW+oI5MVziddlQ2HDW3duP1iPjuLo+tiBuirxWRtmKD0ypvDs/z6aLx37cWPDh/Ga78/7PvJxfWDncPD4+myevfdD7b6uypOsu7IdfrXb978vT/+5z2XdajX0cuq8BrM4UnzwsG4qsEXpUCT9Gg6racPntpXRqkla1w+n3DDrnbONV3VibPsyaMjJkcdff/exWrqmpq8IzKiDQRiE0kNngSK4iL4xjPsb++UQdeKHBIojlQUxTQaJvM1TJZzNvraTmdRzOfrZeNxNE7q9XrF6sXPfeneve/P1sGiJlQguhMPuIlWqxojHXfTssgJo8U6FzH90fjDB4+HWRxbW5bS5NXl2frg7VeMXmujSxYfuGTFPs3iJF9TSaFCl0IXWJduTc1s0GUlcnxWZduvJaNOdbmk2tZuamynF3dWlzl7SpKuUaBUlEUA65yR62azPHDOA5JzDhA9BxFAUMKgkEhQQlCkmUDaE2ojKwHnPSArQgAFQK15beNhAgCGtpKDAwFIbGMCIKU4MCKQQkQMzITAIbSntZeAzIG5bjyLREYDQu1rFzwhRUprBEuq7dLwod01EhYQ5IDQsBMOCK3AUpiFhZUmrXQAMUobUswMqH0QL0yIChWIBOZWjcIsWmtmQQDvG0SwRgNw8LVSpoUqwTcheARkFgZAAVKiCS0hsPcsIXhhKEGE2Xvf0hrvffBtLjU7ho15HzW0IwUb1xvixgOIwh5YKa0QQggAIQgLgBduZ+0g5BnIM2hEUG0Vqw8OEUMIiIhIrZsM6SoJqPXcITgJwTlmIU2kEIlUy002qdhIqIS5zbxmYWEOm2upKKWDMAIF9j74jQo3MAv74AVJIVqlBdiHln0BKQVAjG0fHIhwkNDu7AT27AJ7ruq6Cb72rvYOCMW5dvbk4JAgsGgVoYCQCoEVUWAJ7BWh0YSbW3YG4RACCxIBaQrMHAQQCbk9KquVIk2EznuldMvBSG/20K5fG+Vns+F49OHjo91h99XXXiwgTNuGvCJsD7rFoiy9fPDuIVQejdreHW6Ne0/WR5b08nL5whu3bKSPz5a7B7eG1nx0uNQob7xwu1jColh+/OhiMq3vvHD9o6frGwf9Wd794eOpYP0Ln/vsg/cOeyN1+4bNFuzrxjtoAJ9N6PjcbU2W/Rf1i3d2VVWrpg7zy+BlUdZLd3lS6pN1SHpv9JLBbHKi+frX/+EH9UkVF3jry9eNwrpputlWN07qRe6ZJLhOf+Cr4Otub2tLWfaOO2n20bf/6Aff/V12a0ZWUDz66H1vwt7e8PDRRJOtS/jqL/3N+erk4un3f+Zn7k4Xj2sPL7990yQ7jdmdzsuvvpEsTs++/Xv/9Nmjy8vJQveSk7O1XxZ37t5Ks9HX/tZf+/D9s9nxk0cff4gN3bn9ikX7qdfunp+vLpervIZH33083toz1Hdc3vv4/ZPZRMV2f3drkOiPfvT99WLdS/pa9c5Oi2u3bpTLnMUNY+1ClSZqh1CWy1iB99iPVBPkclYO+0mWQAQCRpKBbSQkaby4qKer1STwl7927Q//+4/XPkMlmvj0bK69H3WzxdNjga7zxejgBSz9o7m/DGZWs6AQS4fsIFHXhgNdueOyWTdlZuX1l6/dfDmt4lmNiycPzlnxw8fu3vGkmsOqUBiZqvI/vreaLZfDrv3qp3dTA+U6D8g2VSqK54Wvi0AqygbDbHTNZvPzk8MkjQKX4FVm++1Nc1UXTeXAZDazF7M8dzhMo/5ub3ZRnfzox9d3dw4Xp2iT1bxcUf7KT72y1e3c+/D44cNDW9N2YkcHvZOzRTfRwYfRuDdZFEmvU5Z+PitBCSF8/MP7r751e7UqD88v2rMgi9Pp4Xl3mDUAOkkv50D9/pOHlxrpMz/14u/+u2/OLqZv38z+s//0P/kv/g9/T+mYmYKi73/89Ff+g1+qVh+l6pYvm1VR7l/bu5xceGbvaDKd72yNXVnl5VmcEpjuuuCV+IHo2BiNpAwoBEOmDiDBq6AcSJplESYnx4+C4evXh4tZ9+j0yd6122/uZ99673GjaPv67itvv16s52984e4f/MkH945mn3nzxZ///Nu//7t/3DnYuvXyG/PzwhFmNpvN11lnxAG1ogpkvL1du2WaZOKMMsqLD03opAkiAkqM5Gp/fPqBhtIV0hv1XAMBCTAANFkaAZIhTRgtq4UXCUGiOPa+IdTKquAlTXuCIbhisSrWq8qBXDvYP76Yf/DD9/pbTwfDxAWIokRFxmDUHd0E40d7g1Try9kq9dElnTKKoqRa+uny4s4rr3zxZz737MnDs8fng6zrfOinw8gkDQRAaZq1TgaIKBKEMEmMd9X506d3br/6+lt33/vBe41LdGQNK5ukw1euZ3pyJz24//j46P5T7vW/czn9B//oT/7Zv/h/TtLfXs/WbrU6fDI7z+Jbb37uZ3/2l773rT9asTx49PTF3a04TaLIxKO+bypX1sZahpCXxdbWjqbu/PzSs8sXUwqV0bicLWpXiyuWs2CstZGtAxmFVTm/cf3m+dOFibFpGjLJ9qh/eP9Bs3jc3b+b9Tv5Kk/7iShtjY7J1t4tlo018uzxM80mjvsGuzFR2o+Kau4Aun0TqlLHuFzny1nV6WQEwTar7YOtuarXMzeI0xlepjp2dRJxwqxRmUGvt1qtHaiiLDUlk9OVcssIIO5327OgmyoRn8SGnTQGmqYJHHzttI0zG6EyDBoZOfi6qYI47/M07ioW9syAShGHJoTaGG20Qgux0kkUG3CKwKA1aK78FsiobKJCXTtQTV5pRKOwqetq7RHRGirWZbUql6umajwYiVdllnSBHIeaXUU2ieMECACVV0086JYmxN1+Z9hPk5iCFI2fz5rpZc2NgqqJulBJxQFjE9cBjp4uisqzYLebdnrdpffr0EjXpNvxYs2NUUyMloI4JKW1FiGNRrOygBlFFCdVVbJyyigHQeuBQGyELUIlgZRqGiccCImUJm20oK/qUDeEgoYPdnuI3cnlVCtElS2mvnJgu8nWoJf4oNi3UnARxyxEQEBKkbVoUEajrWW5CtSIEyuiEUhjnJrImixWpOIkshERA1ZLXJ7kaZRGNo1QNSvtNemO6kXj2q0RxXvQpJkUKOwl3cwm6+ZyWa+b2re3ZE0dIMBqXa/RTxeSRDaLI6M9gtOaoG3yJgUUSKFjaDwF4k6sAUnARyS623GBCRQyCnMTPKGKosQ7713N7BWJMbZ2HrSN4i6bmJW2cYIKsyityspqWK3XTbmuGVhAIzR1Yyx1uxkpxe0aG8koYxVpjEGYwBAFQja6E6RopAFAVMojoHiNpLVqXf9oLCKgNaumVuF5uPtG0rORo1wphVrtznOXVqtBQdnoSpCBrpJ0nstGkFoHwCdCJfgJdYpsfFdXIhX8hDhcGcauiuHbf9f6sAChbVZ7jkc2RyP/nvvqCtQ819FAG6q9sYEBXAUoX+0attxHkLDNZxBEwLCRsLQACzcbodLqpDa6n/Y+e4POpBW2b8aOZKP6CbiRygi0d9fta+HmfQkAIgtRazcDah8vz6kdAiISCbUBqlcGQLmy6m1K0TZvf/Orq8GjKwFQi4U244LCVzCmHeDNUwpexVSDYMuPGFtz4Wa0r9xr0jK5K8FWO4jt38KVomqT1NrWxT3POW8/jfbfb3RXsGk6A9CIKNKuThSLqE1w1id0DgA2TccbPdHVfwjtAmKDj0BaucNVRFUQhCs+1NI/FrlqZAYU/OQLtEFFeVmO97dODk8//OAp1ry/3fzqX3/h1bduzs9nyzl1e/Dqa9c/eHSUZXqvS1ItR1s7BE7bbkAuHJfOAxgd9wPyfDXHHJLOQJkxqG2BYu0KJncyedLvb6EzUdLLq+UyX7tQjPfHXPk8z0MI0Mh6tvB1Ddr0R1uKQBlvmYHC9u7OIi8e3X+0Pboe9bvLxTRSaGJiguG4b1dlVcv2rSHUcYfVx/fuDylWVmsklajd7ejRg4lL1J2XX1zN5s8uJuLX6SAtXYNAkaHGi1KURLC/c+vJ4Ts379yui6YW6e8YdKaqm9VkYnQz7I7XS0cA3rnFvEpjtTUcOF+4whXi+8OtlLWnrC6L5bQBFG1NTLgz7sXxaMqD+XQVkU0SAMVogjZ1QNSmt/Tm1gs3sTqb5cXu/pZuSgnl/m6qwLnmoiuDINgIHz27t7N3qxf0mTxVPv/FX/nlylWjvf7k6KLfzc4ul64QC+7k/rETD6QvL/K9/YOL00sEn/W6pOzZs+MoyyqB9+891Z3lalqWZcUG84UHJ2mEFBrb072u8a7UinSaQuOgrq1VEKDgnKJOvr5I0Ij2DfH5/LJxom0yyrS1gJRnHUJCaXC7T73MnK/C6bMHA0KLTZoqET3sb9dNYPCpYiE+PXliFQbQu1vbp5dF0kuv7YybaqUgRJE+WUx/4afvDpI8KF+7qqnywJ354qK9uAw62cXl2Wg0nh6doTamXxiji/UKG57OtXfRa3vxfHWhrH3h7sH7H6xmsxmX3pU+yYYLXq/ylfOold7eylaLTay1CwGQNw3iiKRIGInQqtaYBQAsQYJIkADCRIFItdNIGwejFCptQIDZt1dd55ogjIghcAsftNIiLVoSEGlca4YC4eC830SO4eacRlJaIYhDQK0soiAhC7jgEBURALAPIhJca11qu7euVIzGRB4aZNRKE2HgoDd1YAqJ2sx8FmiCtJYnCt5obZVq1Ucsoki1l0xCim0iLCE4qzS3MQNIEhpEZW17N0jAIMJl07CEIOJ80IoICBgAUaESEmHw0BpwtVJaJCilpVW6AiByy6xa0KMICcl735YOECnPQSvdyiaREBCCsBJwIRCTUpraICQEAArsFWkEBcKtooeBlULhjViqnRkMGYWqvVgGCJ4DAQEHrajxtSbdKrM8ICBqUiAtlmG88q0HYc8swoEFUJjBc7hCh0xErbdOABnAOcfe+8AsjAjOewksDAECA/vgXHC+8QCgNQEjERAqG5lW4xaYXQioEIMzRjNIkKCQRABauTkhMhCBIhKBwJ4FvA8s0vrfCJGFWxkskeIrsAkAb3/t9mreiVIzXgwSq3xNk4dHiwC//Ou/Zqpw77sff/TgTIuKs5idn53ml5NlnCjlpdvvdDNdppJud3dqmH/vnXk+o9xpDf2ko+Km9orndb4oHj067Pa761lIkzTrjZ/Nin/xh49f6EfjSIY92RllszyczItBJzo6kfNT71EuluWyOL/12W7PFPPVRdaroAOhqlKvLk4XN3eu/dLPfumdf/Fv3vn9j7sLSWx6/95FN/7o85/vG8xCXXnyg146XxV18J2st3fzphA8eXJoPWddVeWPv//d35mXx3FmEfA7f/C9pigWy6p2kJAe9XrdrdHZyY9Ozo7UxeOyCONxJyz93vXrr73ypftPQ3H6YZ90NEg+fPfD/vWdN155qZ47RZ2tF15/6ZWXjE4fv3/8+N17/U76hZ/7mY7d+uDb75pudDJVjx8sxM0Sw6998RfJJPlquZw8WKxn3pc7mZ0cTVYVpzGh9DpRtMqXr7+8RUGWrJcrN58UfYDY++7Ap1HVt/H6uIHQqRroWNvZUlAtrTXXdmLM1MVpA4wkPiN2gvt9/eIo+WAlnQRu7ycdS1I01671JvPlen74w//xt7766/9rsN2P750UDEAYCbzQi7/w0nh30Jmc5SXBn3543mgLBv7fv3Nf/Tu+sRu/+eY2YeZdYUjtdWjKcrkohBkNpZkKID9+tlCm+eIr4441vgmkUJHOrBaRpDsICpb5pUYYZNlyNYkjNUz6uXdRHBXlmj0OBt2VA22g10vswGyl1lDYUlU3cALU7/QWRY61lE1Tnl/UddkNji6fRel2p98VgxQrn4eAATjEicl9UCC4gOBhvqhfvbWrdrZcdbIuNtsGL7980187mJflB/eOp1U5rZu6LosCDbg/fef+O48njbZ/9e/85b2b47/+qz/zz3/nO5VjG5sYsN/rJlln7cP84jJfrwnQKF1XSxXBwU4vsF/XBQJoo9NO1u8PHjx62u91ojg6P32cJZ0ojh2i5xCqar1uGqO0VquiWi+LMtRVVfWzziJpLk/P00SnJi7Wvru9P5u7DpEqwvR8aWzqqff0NN/aO4g73fVqqUm3LZHdNEOlfVVIaEJTkNFxnAHSqp4qpyNrCAKR5sABApvIi2BwzHrVlBRppSLny6YukiwTQd8wQA3Y9HrdIL7MiyiNFvnF1uA6OXF1k3R6eZHrdNTV6enRA6/U6eX848f3TaLGO/2qzLXV3V5WLFwvTbNET4+Ptnd6yc6420SRSVfrfJEX/bS7M94aD/RiMXcVx1HHxNT40IRg0thHdrVaWERClbs6zpJqlS8XKxOZuJuM98c/Pnr//bPgXfnZT70VETSKgcv90Wh9ubp4ch4n1S/+lVd+/4/ff/sLX90Zlf/Jf/jVx/eODj9+f7WILlbVeQ7/9He/8S++/vWeIWMVNc5A0emNBJPLJW/1+8hlYA/oer2RwkhHEncTVFoZioYdK9Vwe1gePw1NY0h30xiUnJ2dDnoD8RyUoIpsbMdZupytq/UanTt7+IGOdm2SatAU2DXFxdlkPTtygt3xKMusq5UnSbK019sOEyjr1bps0t74/QePX7q2v1pPDx8/HfQP4sSkNp0/Prv48MGNO9dn80Z34p7qK6siGy/XeeODMbIsKiLl1uuTJ08g01r7OzcPwnJeV3l7FjSVU4qavPYutHbrwCGIJzHAZETyetlwCMGbSBtjmXuzVQWeSUPaSZSiJIqDhSYEAO4lRgP6UCpCZAUK6qYSCKAsEzV1LQ1VRVlULD6kERjFLF4hOd+EnOuGJ/OidNKweIGBkUh8qk0QjpJYFHrh2BhCwq7SA5tkUdrtadLgQ5mX84tVPm18bn1F+UXhhn4fo9ikz57MVxUvJuuqbjiEg2t6sLUdwgIjgESZVKhyFGvTM5RhQqlrRBljEEF8nhdsrCZoOCBBrGNShrgkMh50XRdBAMl48ZWvhRulI0ISHxogz1iWgRSzYp+vram2e7Yqc2WiZCcOrZ1ttSCBdVmXVeMEkbSN2JAmwUgTO1eXpVEqsxYZHIfgnbXWGt3rp91e7JvGs2i0VRMWs7qpqwR0cHWkbT/KSh/EM5UlKp+wE9EAOgRuALPBsGNSaiQ11kU2oHY+NM61WvXaBx+c1jqIT5IkimIg1EqhVszEQUhpq1GCD85prTtxggEch7STeJTCsa8ZEZrgQwhCWLqaAEJwwuxd7UId6QwYq6pEMpWrtDZAHHztgqxXZVPXQGIUaQQETOM4TlSsyeqorJsAQgIK0VgLgOyVc8E7zzoo9EppY0JEWhtTeu9FFCrnvCKVWMvMDAEYAkN5NRfAFSGiK//ZFVB4DhkI8SeMVYACjPQ8zQYB+ROjF+KVgwif+4SuEJHghshsfr+xm0mLMH4iWAhAWNrmk9atAG0ERXtoBBtmcyVQQrzS9cBzlAEoyCgkKM8p08Z0hVdNawCwiSig9viA+OqOFxERFUB4rm5pjxHlOScB3FS3twgIEdSVtAYZAoCHqxGSDZVpjVybhO+Nmorapcbm/W1ce3IVyozQwqrnmUXyPHXoeWg0bSRNP6HJAbgKm4KrYcKrUXhOXjaH06puWBAJJWwG6YqEPZd7bT7qduFxBY6IEEGQAIVFeFODt9HwPIdX7ZKNQQBb+0lb0yyokIAFha78eC2BwysQxNAuNloZmAgIBtm0s7X1Z63bgKX1mgVAAGRpnRLIQMDIV5VCV9+v9sv4iR7rJ1DRclU1RfPu+w+A6xvXop/7yo3rt3fympPxdrkIqZWDrnl/vnrwMd384ouxEV/lqzq8+PJ4uloWjaAykTYi1fnFuY63dHBc1bVz3Z5VQdhzsLbbGRPa2i3jzq3hjS/Xywfe92oXF9P34yzx5WprMFxXFVBtKTLQ1PVaxHfSxDE0Xqts3BvQdHr86v4L5IJvVGBfVXWKSgER1AhlmpnVxWz39rXB1iDPl1lmHz39QKdhL+vOFnmIjyNAo3Xn2vhsMhn0B/ls1cmSAHYxX5JOp2W9tXc7ryx4geCXs7O98bU6BCOkGgbfIBsiVtb1xv3lxazOISAGQJULA5kkSuKMdGTigRKpFhduOqu9oA/D8TU3f6qhtKpUkSrK6WKVx1FP6W3tV9e3tlezo4k7dU2o5mtQRiexxUoF9fLN/R99eGSHfSmX46F56+U3f/PrX9/tHDQ5iyEU3xtE/WFyfFSSUt2hns+nSZIqVj1rHn90f29nd7HIbSRRl1GlbtpcrmqldDHLi7nnIJqIwKaJ8eXaQNOPYRBR7lxkUq3VolyzYK/XXzZhmV+OtGz1o/WimBd5xTmS5LXsdIkUT6YT1yhE6HZIiUg9R8p3Rz1a17afZl28ODmO0sHl4nRnNPK+rJaVzRKFgUDy5aTSoZdtzcsCRDWFZzarYI/P51/7PHGziNNkPNh/8OR4POxPl41NbOPm62lJzi3zhfHBl2HQS/OmOLmEbdOLjTTl05cOvrQ8UUXZPHk8I+F+R6Wpfef06al0SdHueDCfT0NTumIx7HXas6BuHJFCAEVIREZpITZKE4ihthgeBJDZB2aFKMLBA3MAhQjQRgwDOIWEIt6F0LaXM18pRdk1jShGamPoN7sEDsQ57+pGBEGhCCtEa4xCFWkTKYOI7D21vbOATmolyNDWWoW25d4zK0T2TADUenxRmAMKqHaSEdRErQ4IAESQvW8CM6H33lWORawmHyQgtpCopUlKKauUCFmlEcQjVL5x7IHIh6C10UbHNrLaEKAP4lyD7WYKg2wuuaBJMUO7n1IHJwCIxIABgBRtoLcwiDBwS+k2CiNmIiQi2aQkBkMKAIL4thi+fTeeRQGQwtDazQSUsnilfm01XhwECRRSAA7MzNw6swgJZFMuxhwY5GpeApG2YLLNNMIgAkRMm8t5eykNHBAoBNcaw5Gl5YfeBQbf3lBoBFDUVlAKB3a+lf40IXAIjIgizoXAXAdX+1BWjpkRKXDQRDYyGrQmTTr27FChIAYkx4zMhij4K8MyUODQeteI2rlPiCgEFkBUyiqlEH3wCKgUhcBK4dX+AwBA3auifrJYNb0b+zdeffHht492q0rOpu/9+Z8uzvL1yWrY711O68XFylNQERWaxRjvMT/P18huXaWp2MSMtnpUFimqUac7z5enk8m1O7e85MeHl7N1FR5M7nl39/ZW6dRqqZWn2aSpa1uvy5svdEe9KIvKvYPOh9R85ODheZnq/vffqaf18rNfSWWgPbr+VvrxB4vmNPr8YPev393/6Ld/c3BZjC/L6azpbkW3XrkRd7NiXg0GPuuaCJG5TAxSpKxhxno5WSckUUrLy8MP3/nzyewiV/zwo9PMxdEwWxbV6elif79jiMZ742HcOXzy4fWDg9z3puXFX/nqr9z/9o+apf/eD7776ud+TsuNuGt6u68uc/JgHt9zB9eu/ep//HeaReE9/9Hv/VHk4e0vfra3t00mPp/Wu29+ani9/84ff2N8Zz/C8cnRSbxz9+Jw8p1vf38wam69fP3jB4+ms2pyOtvd7o73dvZvbWvOqzLfH/Z+9M4970UJdBOl1rPR9ejn/vbPqq3Ezuf5H/343rfmS47f+PnPde8Olh9+a/1kYqgRgiQSp9gYGNj42+8WN9+ptrW5oZutRJKEjVW1w4vFShz7pnz0zW/98l/9VcySjycnHnSHzKu7wzd6ya6K/bTIPPcG6Us7/Q9PyuAJyS6mzWrKH96fKPYvvZx95a99iUL98puf+a/+z/9gMikb5/v9ZP/23uzB2eNZqL93+KWX925f28qr2jeBPXthXk/SxG4nLnBYFHknIfC+cWvnGhFvRcgYE0ehyeuKVjV1d7vocsvu2t74tU+/dXL4JIvDzrW9p8+mq8W0rnlS8uDWjZ3hjszw9MEJF7ieVhzIRnq5quNIhbz0jYsNcKwv6urlrbTcuTk+uPvpv+z+X3/3vwWAJMp0v7vbT7ev7Rkrv/Eb3zo8WQ5u7L3+ymuNyGWOX/yZLwx3bn7vL9791Mt3/903vl+z06TTJF6sysFglGZpw42BeDotX9w7YFiX60L1de28UUYpql0ZghMKMen5ZKKaTifpsAu11GiMjWIDtMxLoKiYLzk03VG/uphCI1liP/v2ncN753XwxXIRR+bidPJrX/5sN6zvffx4G5uf+9W3V9HW4mjenefX98crkP6wXxclYjAWV9Uy1ipOuwC7oVoURZHGKlKxIFqTmtjWZR2C62eJb1ZxQmmWXk5XkTfsyXkPAUklggZQgFkZwxKyJJ0tFkZpEd7ZvWHVWDWuDL4pXdsIqcSOxwMx5oMHP7595+ZqsgjeGWPq0tdV0BSnw/7rn3rt/MlpOV0+WUhnnJbLU6L0pf29o/NnLsSs2UQYx1HdcLo1UEEuT06rfJ0kymRJ8E3S7VSVq9dFucwVUaSxWJXnx7MP7937+a9+xc0vj++9201MiQYt/sXjD08Oj4tFVTYcEwy6ndnZ0WhH/cGf/kVWBz9pXrqxm/10tzL2t//wQ6dosVxPLso7N4Y2szoVnaisMwjrdbVaOld3R0MUWuXrrBvbTqa0DtxoTNaX1cH1l01iAMphb3t2eba9M1qDiJIkVdEWpNJhr2IbdzuyuDy/dmd/sVovL551RvtZv1NWRbWYRHHXJ10Lg+1rL0j9DMX1s7isq8v5syCumxmro2Dc137hV37w7W93OoPt/eu7128W9Wq0r0/mC7cutDLb127Op5cBgCTyAXzg9Tof9DMG3BqPP3hwGOqyN4y3x8N+r7csc1dfLZJRgIUJOTSEElnFAlZZ0cBCrvGKRZhtnAJJzaKieKAiV7vaBxSIdUzQiCL2tVVaQZAQUpshgBNi35BWhKgiVTr0jMt1tbhcN56TiIiRYrCxJebKBxEKngE0ISktkOpkECnEKlSKSACRKHBw4iCz2ItCpJI4i0Ar17h1yezr9WI9dcW0rFdSN65YunKBOiqX83KxqsV7GylStFwVT549Soexk6ZZ103prTEVhW5EisAgGYWAQggKxAffoMlDVWPQGIIPiSZC8L7xqD0LoAqhTQJRirDxHhp2PnDDAmgJu3GiEIqcOaZZUfT7iatqAWeNihRqLfO5//De2fmyrGomrRRpo1VijbFKAbCi3DVDmw37/aKsQ2RsFEVpFFwzn6wEoCpqwJqUagDZBwDvxPcUGV8CEXsxHDQhSsjSOBSlEtRxmhmTWVus10pCN4uYwDWuCsZz8EXN3iN7o3RsKUkskgEQrWMCYkKHvt3TA8EkMlqhsNfatmkABgnEBwzBB2ZxTcOMwBBpbWJV5jUj53mpMssay4JDcIG0Z5fXVWIUsniFoIlLJqMliLW200lNRNbqcpU7QURiACRCEmiTiAOKcoQhsJAoQUKlRERZq5VWAjpB9MFqy75erwt2EkQcP+dE9Mmy/nn4MPwEGEAB4Stg1Co7PsFBmxU3brJyrhKbn6cPPX+mtmIEEEA2oUWw2fxr+Uu7r4kILPgJZGg1KrjpecFPHgfY5u+IUKtBEZSrSCACEtnIazYMCAARqRXXcPt+BYRZWpeYUBvM1PYqtysDJIV41V62oTBXQIYQhVqh1NVBtRzpuciK4KpQjJHaupgrNNPSJmpVLRIIkNqtZRRA3vwNEUGYkVS7dw8bDwFfUSLe0K+fVEptsM+GXF1JiK4sfG0gdOsExJ+UX20WTQSbiCR4vkbY0LXNsyLgJtcJGADbkh1Epo3VZBMM+pMUT64cghCYEBEYN9gMYOMpo9agK4AkbRAKbryEVyyS25wRae0um2ofgbYhp/0+hPbVBJiBRZhRRAIKBOF2OJ/DTHre5ScCov49VHQ+mU/PS9T+2m316//BGxDCYmWgTtOUHDOhs13987/8+d//ve//JZugcv2trYvT8vRkXvoq7feqEmbF2cHutoTBoHd3MT0G7WaTExPNO7H29aJ0x9nWOIsU1rUrjwW8b0rwUbGqKB7lxer27Zcvz56ZNIx3o+VkrfysNzqoGyBl5vOyBBoOewBZWRQPn90b9Ybd3sAFWV1OGteUxcJo8tVssaowxNOy6PfGrogjtXPjoJfs+MXxhxqq/HyadNNKbF2wJluXVfA+LwovrqN1VTTL4LZ6sSEFEhKbxDtpIUb3u7EyPcvrfJrPa605NNXFeVUsiuFYv/zW3Vl1GQA7gxSBxa21VwgpqJ5HJuNOJw/2hhhnM+RSpFrMFiq66PfiUFZx1L9z+8bl4aMffe+3oQ427hEk3d71Re2+/e6HN0cdB1s//GhVVKp6diaZ4eni63/46Ov/9v7/8X//n1+cPuiNDXtXc4EgtqNLgVmeJ91uvz84fHBubTra7jQU98adw+OLYdSbLcrZYqkjOxwnHBqAkGa2XDrnJLZoI51kqnJ5URMDOudt1E9IamZCpUKdpkP2zXJZrta8bKrEWE1gLbngIIoGuzd0ZKVZTxcTk8TLHDRgFIdsqzuZrqSyJhtf5MtRp8tQewmkDdm0DoxKqdjWjTdSpZmOlpEZbE9n1fRi/gtfeCFNy7om9tF6XqtotKiq8e4AgS8XeVlUnuHoyfm10U5Z62rm1lVZliqi3BfsVfXt9/4s7vaVa+pSx5ECcoab7a2sqL0oO6vywpV1WfmKk9huqDAHFtBGaaMQAIEjrQFEk0ZAAlQIzJvAYUHcWK1aoYhAEGBpNOlIW5CW45IPzgcGgNBiG2EDkUYtzN75dhlfBx9AvHcIqEgholKqnVYJRDjAxufLEsCBICoGCCwt6gkgbQE8iigUTTawC94LgARuiYEL3mjDm9sFYJYg4r33ICzkQnAcAKB0AQi8EhZJotiQQWQNyJ6V1pu5UhEEIEBQiKQVaaOMVlprDYLiG1IKOHjnmVmj0mg4eCcsDE1gAQGlQwghsAdPwVljCYAAlVJtKB2iQiTgICJEpJUSZmbxzEQUmBFAKU3atNIgQUBShAhIzMISCBVAQFAi7Lxvd4SU1sLA4onIIwVgQtTKqDY9KgioK681gdXGh+B98CF45Dbqu53dPQcRb8i0NyUMHHxDSIjahZpaTQ+z885o3aqRgwTyJAJKaxAkpCDsna+9cz4wgkCLikSQPQpZhYwMSFoBEaMKiBycAmRhhaCNEe8bQgHxAJooMtFGKE2aEBUpjRgCCyIzA4BSupWRizCSDhxak7h3DpHUVYjj08P53k7X2Ox8FcxyjT26/eZwsb5QwNdu7UzQIhIbWh7nAcyqqVfOYemUo4z0rVfufvbNlx++/65VajZZoSal4fxiFnXi3cG4nlWjXrbK1r6s14GTfvZwXadRHBQsyyZ04umzMg4k+/qVcXd/KFr83RfMzWu9P3938uB8nUTpyVP/8F3m2OsxnZ4WyyMLpxQ+Pp9X77jj4+3d+D/7lZvp+Ob//R9+3dZ4+Pj0Rn+n1Os4rnM3R1SIyghNJ09BmjQeQ3BP3/3R0emH3/nut5qmTkdpfyv55a/90nvf+sHRcjY86BVBsfcXq7JYr8XnbpZ2kv5Lb33+g3dOq0lJ1pXIl0+fcYnPDtf0yA3Hr1qlOond2rp+9riZnz+Ynz4lU+1t383SPQ7Z+ckCiX1+9qOPv7XTdXVdP/jwYe/G7jd/8BdvvvH61/7G5w8f/NHpvdO9fVxNefjanbSjb965dn46m56fL1f52bOZd4g+4LIQU3/mC/23f/kzPNg+na5vJvHO3X5zWJ2e2Zt3b/7wo28uj2Z7410Qt16suAEH3iq48fKL/snZpKS9EW0NdYyynOZJP4HQeJc+ezC1WQ/t6o/+4f/jWVXXhT/oRZ+5Nf78C7v5xYxdPZ02W4Phg2f5cubGYwuCiJq6qDoGtSqq5vEUfvDf/snBTvLlSfT6W2/94R9+02jlPE4uF2lslMTHy/oHT+cNyvWdzCp0lUME4Prs5IyxBjDl2iXdOI5UQFFKN871+oMQmco7mxoH0SpvdNWMUV568cVmYJ89m/c6gyqDuq7rUHXG8Ulece5vUXTj+q1isoqsTlIdm6gqZdQZXFzOsq0MfHClB2FA9aOz5a+/8dWbn/vV7//we9/67X/WngVKx5cX65FGHfjy8cnXPvPiVz5zR/r9l159pV4uH977qKPDnWG6+uhysNMdJ7YI6qKoktgu16UCtTif7O1103Hv7/xv/7v/6n/5HycxmyzLV3VRVqhp3NnJq1wEKfdxFC/XMzZjxx5AxHujkLlpmqI/yEoxFrLpPI9iG3fSTpZczi6LZWlSy15uXRvmi2aWV//kt/9gGPOTR2ccqPnGe1/8q38pMsSD9GJW2A5J4m2ky6ZuQqiaQDo2JlE21uhEsRBFnazxPuiY4i3nForKVbHQUkZR1IQ6TqLg2SM0nmNtNOoAqBUp6/IyT9KsDEXgIjgfkLfSa1WFVdN00+7p8Unhm8I5Y01dNcvpXHHCNcQmzReVtpT10r2D7VUOSRxfXhw7lXO9UrHf2R1wlU7Pi8vJvXy+erouRwfjYPywP7o4ferWcbWYdQZdnRAvTyIT5/VadUX5GtGmPbu107n343tGx0ZFd9/4UhPfiPd2bx689M6ffh3iTCf+9HwhUTJ+8WCY8ZOHH/7NX/vKH/z54W/9D/9dbzfdzcbq2stYr7NKKYJf+PTt6WL1w2eOGEJZz07de9/5/ksvvxYPy6bOhSVJB1E0rJolkFI2jhJVVYVvXJQk0tvOElNwuZgedqL06NkxQagouVBupHfK1Trp9pt5WRfrUPnE9uerC9Lw5MG71wkqnw3H/azfvXb7xVURikqt8lIVCB66HXX89OmNF94c7PXPHn9crAqM/BP/Y7AlhyTpjZ1mVYR6NvWY19hMLs8xzSCO17OFrcEQeud3ru25yoemPj99eu3mAVvfGcUuVOvpbDDoZdlmZaUQ2t4JMhESNN6pyCBC7nxZIQhoYyJrTNx1hJ457aIryjBboKK8WA+TkbWmDk0/6SoUhQxoQgAWzl0NCrtR10S29rXWkbg6NLUCp0niKGXFPrARbDwjKgAwWg0GUVF6HcdeSQchRiITR8Z4H8iEKNOQKhol0LWNYudcVdaGVLH2F+fL40fLi6nLF86gdYKuDK5eJt24CQFNUAbSrlVa1c7lJS2bFWhOky7YJEQm6USELgZ0ZW4p9dBULEqhNVHpcqbIk3FstEBdO0KIrK7rygCKD1VVCwkBOld5H4RNYrMy1N1UpxYPxsO6qVerfLqqZjXn0GQ6GPBWJcIwnxePjqtHl00ZwANFiqLIGKtGw77SLI4lhODZVQ4DWmUpzWoXFrm44NfzRWQse0/ibWpsFhmN4gSEykZsJaEpEhWJNMESAOdlnpoouICAifO+WvuqYQ4hBKsSJCEFrtubY66aJkaTpTaNEqUiRkKCBkSBDyCMoIhc8Aiklap9oY3VyjpfNS4ASN04Yw1jqOqqjb7RUdIGg9aN864OLKv10jlLqLIsIQP5uqydBKubygURJEriWEhlqVZEEQmIlGXd+IBKW1KRNcZgqCvBpqjzLOrYSHMbD0o6wogFImsgTWvnoPGkCImARQS01o5dsarKqrliC5+4uPD5Ov/5zz6RCMFmfX+lXtmoiGAT1oNXrWJXSTq4IQ0tINjQqDZ55yp0hlB+IsmnJQtI1Mo/cKMG2chMWjXRVdzQBjXAT2x8tj/eoJiNM2rjUkME5M3vN2Y35iu3Gj0X47RhqVfRSRtFDm2SkaAN097IiPiKOTwXGQm0frn2FX0btYqbqq2rUd34rgBRFGLrSVMiIESw4W9X1igQUQQSgJBAKRG+SjJqN+KvxrhFMleip0/wT/uHn0iL2gCllhMhAtLmI7ziO1ehPlfsSQRAaEOVpM1narFae6wtWQNmaaOGoHXrASDAVdA5g4QWLgJuIM9GQ9aGjtNG3gOM0KqNYGNboLZlh9snbLUHrSohMIsAQ6s2ak1xIq0TDYFbDRbKlZKrbUrefNU3OjcW/qR27ydQ0dP7jxKyL91WX/7ygdE12rgTD5XEaazXkC+XjQt+URWVrz5+cH59fzBdT4+PLrvdzue/+Kl8UWtlh9vj8XDv4ZPzy8uagul0Bnfu9Ov10XrtrcFMZcuLwg8E2Xm/6lE/7o5cIZWrdHpj+2B7cfnM1ZFz1Wq5Ho+vW6Fmeem9T7uRpPXQiJfj3b0R+D2RSouuGkabKUxcmXc63a39wbPj8053D6qol8DR0wtjUpBZ2cxG21tzCaPxloeqNxhyFX9wdP7SC3voZ9Th1XKZpYOIdOkoSfpWibI67QxISPmQgXZiF+uZ1amOBjZGbaLjpxezi3W5VI9P8iWf3XxBR2pZFGuttYHMqoSNDhTZ/lZ3oDoLXsxm8/kTo0LwTeObgbHz84lN06Y4fvp4Wuf5aCtqCiiWZazJRB1i3L21n8S9y4eHSTLa3Ro/eXLYHyWhXv/Lf/b1va2tdXnaGVs7INvgsw/OUff39veXy1WxFGKELWWSFMR0O+mz80Wa8XCYFeswvWyMTWOrmkXDFUvRkA7WekyjOPNEnjT0swiAFWGiTLFaelZR1rO6QdGgfO3L0lsvNOjvDuJrs8lhJ7ORniaK0sFgXkGE/SZNhqOu99IUZTVde4Mc9+sQUMn+zigWEq77g8xSdLkqrLbDwfV1mS/qNTdLFgc1+FpXpTk7O/ny3/zqYnGi0kxCnC/BCbciW98snFsNBp2nR1NjbWETiTIPDYS6l5KWxjELwuW86PZ6ZRBNcfAwXc62t6OD/fTwpJTYTPIcA4Cy2cAsy3pzWSYipaiVDDI/t70ysMK2elxC8IGDY/AiqIBBtWdV8EEQSGnwTrxHIhFxws47z+JCAADRGgHZuSaEzc4ASAje+4Y3FyMwSimi2BhNSiOKgPdM1HqB2+BAIkXM3HjPLALiAitFCBu7MgATISp0jXeByQejlSYVWNpZ1ktwHJrGM3NAYAnBuxZ3E4LWOtJGk460Nlpp1ADMgTWiNcoL11UdfGgptzbGWquUFkbHrcCJPYfGOxeCBCZiCQEAPQcf2osPIylrDSJ57wEoBGYBrYhYFKn2kocAClEIjVaKCAi9BxIGaadHRCQBaD14pNCQ+omJxrZXfBYObT45s1J05ThuSwBEKw3CRmuFKCBKERCKoGrdYqgUQUBoTX+KVAtZSNrtmdZZJqGdEVHXwYn3CALtfRIHVFS6hvCqSFQphUp8LSAheBdCYPbeM4sLvGmqwLa8jAwhKiGto8gEAUMkwYuAtKFJElwQBAwuiMRGkQiDcpqUMLd1C8ABSLdzPyEBQGg7SyUEDi3KZJC2gZQUtC1vADDa3Yp2zPRyFRJwGczdSVGdX399AHVGOhZMy8t12omccmfzZj2TyESuZifBE3zvyb2zYvbZu6+8dvv62fmj2Wwy6HfOJ7NQiUUsghRFJd73IuW9n66K4CG2waAgQkV6JcqXIX9Qzdbh59/oGcmlKsa75mtfHN5+Wv/r750v8+hH5erFTyXjXnx+era98m9e2/vSwev3v/eDm2NDhi8ePnxxGP2X/8VfO1/ze+/8iKrLwf51hUXwYT7Vo+3+fDF9+uCjbDq/89prxSqfXJ4HaEbXBocPDqvFctxJRtnw+vVbJ9MpGPXgcJEk6mR69sLu1ld/+Svvfufe3rWbw96wWLyHqU8Hphd1mJuqsKhGNt5fNMs7r966Ee/sjjs//uGPTy+OOVz++v/0f/Xog5mP08Bw46X9cr36i298I5+fJdvDj97/KOpGs5NTnZ49+/Hx0b2PBnGdn8/ypiwLNqbbSZLDH793cb5CwUhFy9K7uukrv9XBa3vRp3/xNXNjF7J03PfJ5eL4vael12qg73/87W5KPu4t10wsRkUs+YvXh27ZPHw2O1s3j06W10aq1ze+DriAjqG113ktTkdJlgaRBw+fHpXhlWvDn39p3FlBOZnXTW1jevHOYLB3c3n4dNiZr60US3LMmtArrgPbSCGpvu7kBfybf/2tqKPHaWwjuz3sXZxfrtd1d9RL+r2JyAenax3R3jiqglsui9RGFEnTsHcBFVVVFdgkCkDrLI0943zqMebS8IP16Zf/yhevGc8/WrzQu07Xbj3+8YWh9XBLpqtlua46vdhyXFysJ49O7wU1MqNBJ1ou16gNRexV2ukElsKHOrHoFdWpqS/1W299dbAVJV/+4r3f+f+0Z0G21e3vjMLibHZ+1JQz5ymvqne/9/0ffPPPOfiyhsXy8u//X//rT712+6KO48xmoYpGIwlUrtaDKDx+8LibferPP7q3s6W76da1g3CeN8jg2VWlTzqrfm9QLnNNzFy8dvfGfMXWdKIOam0uL6e1r0Luk0RHinUn0tLPm1qgLooyjRNG/e57727tDPr9zhsvbP3Fdx8eni8+LEsbGx3rP/jhxz/7q1/bPejnCXEoLViuqyiN10WRJoNOnJmA4OoQgtHa6oQwrlxhLft8EtbL7nCQZHsXJ/Wg2ycyy8VMQDWu4tAAAypEpLIoMEpYYNAfiWgMEhuD2p7nRaITSovL1ToddDtlfHk00VlsIpWHUkdqa293vD26ODvLVJL2M9eIlnTcT165ef348JEB//JPffl/+Mf/eLh14POLNNWL6eXbn3n9o3ce2Bqnx/OHFz/wLgSbKWurk8teaiaHZwf7OwD85NHD/KLs93fn68u9W/vpMNPGdwf6059+4/17j/ISvnf+J9m409vaasoqK/na7VdW04uDjr4/CQ/mH+9l49Xs9Mt/69fv/f6/tJHvbvXGBzsfPjm68+LuK+bgfJGPt0dKy61kRJi98733X3z1VuMmL7/8qeV0Fbpx2knyMjSV5qosl2V3NLKJrqtVUbnh+Loeph/84IcHN/aSTpbZtHdwcPzksOd8NN5uQvCuVpEdbd+s7jdCxYsv3rxx++7xrEabxEN9dDYnpNVs1unvbW2NF+uLxXoek50en1TFOTInJlrntahye393vWoOD09f6l0/OLj5+L0Pa/aTw2erywuzd+ulV17sg4kMaE1hpqrGG22qvBj0VVMtziZHN1//Sr/bxKAvnh1fTjYNaOw5+NLGsbJGSKeJ0uKIQ6jLZQ0eCUXpODHKDnuqAAoipecVIgAnaQJGCTaDTlbXlVIcmbhpoKkdAjOEWojrMoAErp3zVVUiuDgCqGphAmMpzargWMSDduyttRgkJWsVsohmVIRKGU0KtVIRxJ2oUJ5IYqOIvOKgAMq1uzhaffjhZH5ReibvkSn4EGyk48xGHaOVaVxNmkgTEWokZ9kY1MqQBh2TQoq00hSUYGS6KBGjA9UEwkq84lqJaPZF7UClgiiBXcOGiFyoG9+4UAQ36GRJlK6bXBx6lii241EvtuQFO72kN+gcXi5Wq/WD4+kogb2eUT21LPzJs9nRpFnUnghRASME4Y4yyE3PRlkSe6B8kYcQatdoA1FiNencFetVCQEqV3HwSAaN8ZXPy5JEjKGIWSH1Y6tFQYCidsF5At+PNXBADi7U7NW6rFmxc46awlLEIFESDbMhhhAzZlGEAqSovWMWEgZ2zlltFRkMgEAEmOhMiQIxCI4FXGAiVbuirCpBcM6nUZLorGkKYY0qKvKqahyBj62LrJ2vc5bAwTOSJpUYGyVJEnXqqiAiZLYaGXyeO/FIhME3JknjyCqDHqlpWJMNAMwBhJzjYCGKoziKt/r9rNPhJp+eneV10/iAygahxktR+XVRF1frAoBP5Dq0MZ49Vwt9ojGCq3as56jik9ibjaPsyuR/hZYQsA2CaD0HmxxhBBCmDV1puUQLD0gAQWgjWGLZuKPkilCIbADHBoQIg1w50zbKn7a4S64qwRBbYxOCCG1ysdvAY9gQhisEJvBJR1nb07IZFblSIz1X3CDhVXVZK9sHwZafbGgEAAjTlagKqaXIAkhXvWoCbfoyMhFJK3lRwBIQ253yK4kTAgkGEULkq5ygVgbWZjxvqNxz1PWJvunqrbUEqcVVRG0qEwDw1TAgbZrRpA2EYhZgRBDhTS0bbARHz/nO5rCA21FtFx0EuPlA2s+O2+ORtnZ7E4EEgtLGQnySZiUCvJEstV8PYuF2dbP52kFrE+RWVdTW8whsXqxNzG0fCwjc0isEaSHV1bcMNoq3T76iV27Ef9+Alhj/wjXzH/3tu6uyFtRAidIJepUXhQAkSaQj7Kfmg/eeEuokSfKmXleuLC8/+vDDTtrL4p7UWBXVaNBHbbqd4eT4ERgEKXQU3br9wsXRfSclmYUPVV40aawqp4OPueTLma+dNiSdwWi1bCJzPe6+vJw8bdjbCI6nT1BQB+DGuWq2Pep1+zeqclJV9XpaK1FpNGr85eJiOR50J+dzV/Nufyvrpxj3+rGlk3mxmCig88WlTWGoOk9Ojz64f9Lr4PYAJ5eXQaCuV6FgFrV7sAVKkdVWG2kEXJ2myhg3iKygcywU/DovmqqIDaFhm1Ls8svHy+x66q2qm2p3tJUlg7yGQEXSD8WiUAhVLWfr2Wir30mi3e3+xdnT8d7YuwrYV/Pl/Pys+8Kd9Xo9nZ9qGhXaB0q2+1ur+TLPCzAIoCxjXLtVsc4UARcVr73DfOoi1yTxUFtrItXvDuaXDlzliurgoB88ilGVToT8el66Ra0DLs8Xw1sDV5XDfp8GJnfTpK8HW+lqta4Wdaipl3WLMkeRZblC269IB5HEGnarxhV71/cW88p5F4BXsK5Mw1JtpVgsLs1ajbu7cZbikLxvhGC14DjTndFOebasymUIjgCMNper1ZCaCLkqqjjtTi+PKY7iRDNDXVRlXvsK58v1rRvjk8nJcrZM+oZw3Um3hGIWYNQm7UBTFFwNsqh2Zl3mvQ41q1MKoowmo4kxiWMhWE1zV/HO9pgEummPmZVWZTGxSMNuUq6xaWpjyMsmvk6RasOB2Xlso/8FjNa0oczKe98Ws6MwATgXGvGIwiEgoyBT8IIIRBCovW6IyAbtCzjvhVkRCYA1hmSDio3WgNjeu7dZQgqpXeS7EEBEo2mzdbTSzByYgYA3EB0IsG0JI8QQvGcGEOd8YHbeaWWkCaxQ2DMAKGyCFwEfggCgUuC9AoisjaxRCEiYWkuICoBDEIU+NCGwADO61nVrlCKllFZIqIkIIbRABIFFOAQUNKgEiUFq9s4HTQCARimlrVKm9aYxqsDMKISolQJp60Lbty4IQrSpvWwjtz1ICF4RmbYLVEShIkIA0W0YELNspud2n0AE0QcWAETFwqpNuQ6hnbWVItxM8MKCyEAIDIyg2gg4ljY1fBO8JyKefUsQkQgFmAOwuOAdexeCIrIbeQ4SKwEIobFKAwCERimtCF3TtBpQkWCNZgDtAxGKkKCQIgkCErRR7WYPkArekzGA1DQeRIQRiVwIqJRzTgIprUIjWjMBKBBEYcDG+3Z3h0grQGYhBKO0QlUHzwLtCxF+YlYHgMF2V8UchfLxDz7G9Wzv+t6kWsaj0eXRMlPR1gvDZddU67q+KDtd450KpYgLSlPj/JrUR0enDw5PRr10kHX3drNG+SjtkTBFgFVA7ZQZzVdzFcdhUmBqGobKiwF5/Gw2yFS/mzydlgnK4di/tJ2kAx2Y4wxefyUuwuD7D4rzMpQnDUfZX797yzaTa9tptZxub0Wnz5bVMawbnJx98MVfUWOEn/301sk0rJY/wqqbdHay7s3xzcH63nLrTp9V570ffNwdGGfXg4OoeLIWbm5s37x57aXu6FN33tz75ne/2VH21RudxrlRL0my7McPTrLRVu2q7/7pH69Xp3E/62z1Hn34bGev/+qrn26CJtbxYMjM6/nFfP348ul7P/eLX5kenrzzp+91utlwuJWXBftqPnmSdaTX6xGX1+/2JQqHj47PHp4dPTHFpHQDyMvy2dFlrx8XK788v7CafB0goPFFP4o6SfPa7eTNF4adkcv0ubt/TI2W89X978/Xp2xSO7g58E0xmyyLlWdvuHBIfG23fzkPDy/d907PfdK9+eJBVp8a6+sygJOLi3KO0Y8mebHmvZB/+tpg2EvY+Nvj8QgDEdZ5INHK8Ut7+++dXL774NELP/elP/j276e9fgY6s7GKk3snMxuZQWam08IFjgeRjZQvIdShafKGHabkW9WzwPGqUqEpy7SnMbERAiVpAormZZGZOIvSdVU7dgCBwIGx03nurNhx59r+7pdu7ZcfHw5v3lie1we3bqSj9OTRd4Yd7Ay6b7z50mp90SfefmFrflZWIDCMITYMYBOKgYp6KejzqqmDL2sPpCLPr4yy/+b/9Hfj4B/Pi93eqD0L1pdzadYPfviDh88m3loV92walxCtZ6WNTFELEC4933t8GScRCaEP26nNK3hw7+PPfPr6tZs3QlNzOfnMizeW+bmcaIwVBh2hQaXLsvEW1k2VKSsS7j95qlXP1y7LYq05L5ZJlu3uHfimaaqmRqAAi7MzNvba3u6zR2ei8Gc++7nDyXHZ+Mv57OaNzvl8uSylAkD22iKuq2AFRQ2G46JsLufTvupZmwiIsixNHhr2oCrvCdGaZDqddbtUzVcAqHQ56I3SdKuqmJUUZVDQpFnEzPmybOqAxhORsCil0iTJ1y7UoeY6TmR3O1svziQ0qVGTs/P1aqaC84vy/tFxbCMms5gsq3JtjYpsZLXJbJRPV/s3xsK+140ObuzNofzr/+GvnTw6PD4++exPvTGI1D/7jd/qR9F8GplsMNjfB0ze+upf7e+MPvrwcn9n1OmEb/7WP7k4Pnzxp14/e7g4enZ093N37394r1MOY8PY8O9//d8d7G1riiihqMs//OE717cPxtlWPS2vja51rdu/tnf46IevvfbZP/vz73zjn/yrsXNZ1O2lHe99lMTVIq8q99L+9nc+evjqKwcwn4fq9O0vfhp07HLlHLGW4FZaKfDSuEqjREm3qb0osZERB0EiX8eh4TjL1q5KURWTs600MVqH4E2amtQyoLfR8NrBYv4UERqf+5qWk6Krk8GwUy7XezsdhTKbHhV10dnuZSMsKt8fDdeXMx3rnesHJxermIzz+Su3toY9VVfrsqxCBOl2akjlzpeODOt6eeksOwfsIRllgQNY9jVcv/2pxaTQBT94fB805VV1taCFNI0FKZASG2c2lvVitViWRVM2oZEaA3Ch5+cyHEUNKSFydVOv1ogcfMPgvXfMZAk5BEVsARUREhobL5uKfTMrlklshI0vy8AhBCaiqqqtEGXKJEqTWRSOgA1JKGoHDhQoUKR0EmcszOK1VkphqDnNTKYNlI01gCoqLoujp8sH9y8ms9oH0ZbiVCnGxBhr1dZWSgor9t1BR8dIICyou0ne+F6syHn2EGcawLD3LKiV0doAIPtKiRJQRunUZhw8iwOgpqmAFHtPWkU2jbSq66XWatCNlSgtpp9mIEzApP2wA9qQIJGl4DFJUgNlOXfns3o5IatXpef1snQCNkKrqTV16eCNJ9PgOB10ktHx2aUhA+jBswEfERNxiqEWh0bYCxobx1G32w0QyiIvS+8aaWqqm4DD7qhjEJjJBkJf+Um16CVGg6pKUWA0+AC8KucaKEkGWZqUIJE2hEp50UhiqGKJtQpBkAEBldLCrVcqNOxJWw0qb6qVrxBEkfKena+VwchY7+pIxwTY+AIIVGSQbVAohI4h1K7ebGN6YraRFfR9m2ilFFISWZHgPQYB74MLTiM1Xoy1pKwiKyFkWceFJQra2LI4g5GI1yoWVppsN4lNcHmVN1Xe1M4Deseu8UVet5acwFcOtOd0B58DoCvGAHClRblS81yF28hGwdNmT1zBF9wIflr5xk/Ica5+89zctNH2tFSqTfwB1TrGrqiEiFw1jbUak+dw6XnsjCACI7dYaGOvukI0G66Fgu0S5ScysK/40UY485NEaKMCgs0GZKskEtikE7UB1BvHHWyO8Iqobd6RXI0JbNrSCNpIB9iEFgFI2xUMCEICJIAi5PE59xGCq4ouwpYqbQxnKK3PCzYj+FxedCX2urrT3TCjjQRro4V6DkquArY3w3m1lGtZ3NXYMvCVew2htZ49F4xtaug+ATGAsIlWlSu6t/EwMCC13sD2pUSQhZWoDbdpYy0QECCIIAbZrHAQPkmw5o2qiEUAgjA+T/LeZISDIG/8iq1v7spM2Lr8rlRusFHO8UZq9v+vKvri53c+/enthsu4k/SyPpNW2s4nF4gIoJSlumLlmzde3js/nrz86piaevdabzzqV3lxej69fT1LSKbnzyjpRlkGIVis0rRT13Y6P7s4DgKNQVfnS1TZjev7+XQu4PrDHc5S0/RBJb7Oi6ZUkN588WfmDe7e2b84PjN2Va2eUKSEmxjQzWeDcSf4RkLdsZFRerau2CgbZ9OjC2FZrk/2+y/HaR157+qZi3oHtw4ePPzWeNztBtk56F3cP7IcHWRDqMLlac4Bok6cr5v1wisQ2Ev3dnfqEHzVJEmWdBRgLWGFwaGmarYmxMSq11/vzGerYXzwwb1n57Nya5w8PT29FQ37o1HjC+UjpQ25eRLZEHI0ZpB1fAmNw4vlIp9ditSrZVWWTS/qGOql6bBuhmuZxAMdQd8XJAGRXTfRkQqhynNHO9f7164n77/34Kd/6np5sfaT5ubnf+r8/GRyUiNwEgP4xXDUPz0ps0hnaddXdWRSlYRMOyaoVD3ejRhk5de247NMQK8cV6O+CT6s8qM42+W6I0HXIRalgluYRGMUi2fNee4bpYOWuK4UqE431sbqsskPhr28WikI3U4vSSKiJqxPSdNqNu0Ox+MelOV6dRFk0bgqP7ixI7BeTKdN8NTBoF3a6Rd5HUcpOSRmjJTkTdzvTnN/+PDjr/7lV5wvm9rUJ/XuXqrFRyYTrqTyXsSS7ffSh8dn3gkrPe7vTrxbFqt+3IECg09WRbm1HTvniH1Tz8lkSKauq8B2ZzRYVSXXgYIL1ap0kccrN6YhIiUhbPC4ACG1miEkFTgIiA9ekYqN8kEkwEap9wm0JRFwLIpEhH0QFgm+lQ4Ch4Cb8xGl9axxUEREQISRjtsIZwIQDgEkADIzEgXhIEykEJCFEci7AK0QRsAStdd17z0AkELHgQEEwEaWBEPgNvS+9gECehZCEgGFqJCsNUaTQsziCNu+0821JzTOV4ggQSkKTROBISSllDEGAIyxvhXttKFMgM67wIxklEIAdqFxrgkAgqQ0EWJsjNGaiISBWRjQBwYiYVakNlewFo+0Gx1AgFi7RoQ9cwgtr8G2eqLN3kZg3FSWMgB775XSwLzpPgu+vco7740mQgzQqoQIBBUpRZvdnLaYjNrNHYIAARG00j4ECb7l/u32C4soouADEaJW7AIHaZvOBBiUIiQOAQBYGIUccDttOHa0SU4CAWjLYFmEtEKlNGkWUIq4daQpCi4gQAiiyZAiFgGNzCEIAyAhBe8EUSnVWrfbnQUiAlJBgggr1CIQ2IsAtSIjABEm3OxOECkEkcDPbxMW0zUWoaOu5ycXeS9OX7+FPiibDQUwWE6zNIqGRLu37nzw3Ue7u+HiaHV05Lb6SemcsmqxqKJOmns/Pzw9mxgpyr39bmLNMOs6p67dujYYhFGvr9Ks35kcz2fLPF+si+1hkvRSqwDJmzialPAn35723t492A4+CnnRZLp8aawilb53XMzX/KffPX9rfHD3Zt9FrtnvX7vz09nRWkP8Z7/3g8OTqvnN9w6u9fZe70bUTCYzprQ42PvG73zj/FQ1F8uv/I1f/dLP/fVbu4Zt/o1/+uGTb7/Drnn7zTeqCZYnUblSaXr9Kz/709/79p9pg90svrk1uFhMD25cW09nF+v6yflMGc2Hy8vL+8Z2TbT+6N6Tn/mFt6dPLqzqPn58WBQ6ErbN6Pf+1fepOiOVdm18/uijay/dOTmfLC6n3U7v+GS9vLyIIlmtj4mge+Otm698qZ/d+ODP/7k+/cPd7SpJbVnYfF7YpNsxQUNIYfnTXxq9eEfJ6mx7XHJYlhfHJo+ao3jxUKrzDqA6OXPzahGbpqyrxrlBX/somq7K01pfni2eTIq37t6uGjPuyyDtl7mHYIuFy2v50/uzZxAFMPOF+49+8YX5ZDJ7MN/vSB6aJEuNcHe0FapV2Sy7O7S67958+0t333zjH/7mP/rZz37+4ugpZbj7xp3FxWy7Y9Le1qx0Wzd355f5+b1JpGUwojvQ++53HiZGYUMCSJGpgn7yZH1zKx4Po7rhCHRTQxAg6bgKBIOy6uYLr8yn5/W6SQd2ldJDaX7ly182edLp3lJiuKv7d2/IyTzeyV56rXtyclG4suNM07hU6fHNvbifTs4O7WikyRZLj0CpjaazdVNXpFVGtC5RGtiO0pPjZYb42osHZ7NNoG9VN5mJWEx/e7sQ43w8mxY+wP6N8faoN1sxMo7H2eTpuVHqpbdvPv3D926Ot8AP7j965+7rtx03kVG2gr/1N35uVVNomjSNXcWkjBXSVpfOKdKN49CIVtLf6tVFPb2c9QcZqhQpu5gtBmmWxJbBizGjvb3j+fzR2eVw0J8uZ/Mqv35rqyiq4vJy5+aoeW+OHC4vmzSCVw62hsMYIo25V3GUqDhg0NQhYV/leVh1o9TVgjZOsrhY11WTD3o98LmNYyQwyPPHP8qSwf7tF388vXjz7teevPedqlkG56zVOrJ11WijXHCI6BHtsHP92o3zk/uhmseRqopatEiAKjSBpSzq8XD4Zx//+c6NF12oFQh5YEGyppo6H+PLr97p9furYuUXxWmochulKJEuP/eF17/5zXfPj09u7e9BoGsvv/L9H7z31nAYx/KH//o33/rMG0lnMDtaPZmez2b5zt5+hPFgn9JhnPtweDa7cW0Qg6+KSpGMd0ZnD2fLKu9ydmv3trV0/fr+/sH1y3zazKaf/tKrdX74wz/75udefmv7xt3zJx/nNkt3Xqwu7+10kus37jx7dF9IHiQm7e2NYz8YdjCKzGAcpX6Vr0JwTZPpUMVWr4rL3tYo9tHZ5FCng6jT4UaxFkz7ey++0Bla20QkanG+juMs9r5aFyZLlQLbGzghpdJYp0ePP0Knd3bf9KgBKc8XdbFiS6FZnz19HFRvvHNrHp4GDMWqJorZlRDJYJAWyxUFWU99bztJOp14MNeJEwiWyKhoJzO+4O/94OHBq9fv3v5008jF4riY5VwEF3wniScnF6GbNEGXpd+5sQGmURJprYChP+jrLMIynK/zvHRl03gMWiP7pqiCD7iqiJQRDHEcERJqUNaI0mkSV8VKAQNwJbVhSqwVCOTdUEeVJxtZraFxIUm01tYHU5Z14YOJE0FFSFaZftZp6pVwiKxYS1YbRBUEwSqNSmtlbQzgfVNwXnrVCEIAXDs/PS+fPlrMZ7UgKUtx16SJiowlo0Ar200MSqyk8qUGjK0BJEAN1oIwkgFwmpCAQIGQ8iF4VSEhIWMIViWRimNJAjaOGg+NdiF4Z4A4OISmruuOVVujLsRZ45E8roochAFYQE0XK0XoXBjubBlN/cwkyKGsHfOicOg5XC2MSaM2SgEphVZxZHWaxlXJyhfdKJmsSxecInTBC4SWxRiS0rkoiZFRa9VLomVZtGtZAiQkdqGu3JLKkdUdgvO84MYpQJVGIqKsEs/Oubzw7BNAjR4USwziioJAg0dtLCNlqUEkV1TSeEEBaYIAaw4+BNAhsA/BsSxc2Uu7RFS5SoTRGxBWiJG1hkRQvA9N4+rgCMlo4xw3zjVN3TiHihKjIhNtDXsxtMm+QROxMCgVXCMMbSo26U6a9rWxDCpNs0FnaKNBUeYEpLQJHAw5BiDiplidnZfAoVyvvLgq1MuiQbQUgNl79s67oq6fI6INxdhQgBYxCMAmfPOKH105v0SkDfr5JLP66pmu3APt027ay64eSc/9W7JBGlcYAq5awTYI5jn12NAQgStJkEhrN7tak1zRAYD2frjlIPCTr/IcW8DGhLeR529kU/g8Wgc3kAsJUYhaGLKxVSFKG7XTjgldkTK8ClUCAOCrirHndOUndC7tq7b1NoSb3GcCteEnLU4BZGl3fOkKPm1q24hQGNqIHaLWR/dJjtDVgMJGPoNXnrqrlKKfyA/a/OQ5NWrzoZ+DQW6l/RsTHDwXhhEgtUiGNhIdkVZcJELwPJEaBEgAqG3VufIobj7E9l0xA/8kddp85AIoKIT4iW6IAaDt1rkSFm3isFtzGV9969qViSDBczOayNWQbqRELRAEugrsvmJ8+O+hotuv7EZRWleNcolTUcNOU9O1o4DQ64/X9cQ7bvIlsH9wcnrzdL+bsatm4rCpa4ZOwwDG+Apv7b9U5svJ+VHW28Y0izsjbPw8b0bdfl1EBDeLPCZugr98evLBS8Zqo6Isns/KQTeDOpyfHGb9yuMgrzuQ6uCPXaPFwdb+jhZk4/VgpzvaKlaVVFKJOHSu8uyaqiziRO/0k9XyYjFbZPE2eV0t1x/NVhplNZ+J96dH5w5sINjaSfq7cSfpHD09tIkeDAb3ZoedeOejh7OtrJd22XasMQKgECOdahfWZe1E4qooKdJBeWsRI/fCqzsX80r1uo7Bc+LqSNnueu2MdcLlKp+Uq9w5Vee1D1E3Sc+Xl/NmaaiaTyWzveGtnfNZjaGahYe1wNbO61u0PSsfppkU+dK7sp+S6o8++uAwPzw+vEg6B+Ne+soffvPfHsmPF7//IKhseXn5P/k7X9nfOgnlSmnXGaS+qEjFAcvSeQPGu3iZLxV2bKSqZtrtx3u3tyaHT8vSx2m2vbM9PzvhpkoIxMpqubqczRTB7fHWdHGWWihXi0aq2y9e4xoWizzPK2V7Stt14UrnUcCxn86bg/3rZV0lGtM0a/y8txX7sNbgV5dzr0qtMmVktpwLhs6oT3U9K5fsi73r46HZ83XjfUnkJ+frxjW1s9/90QfdpKmWl0sbDcfX18tKm/FsdcZie4NeVbtiXQC6crFWCKPxuJBsURQqspaiQb9var6cgYDxHno9Wy6bssldWTCr3mikoc5XxSjuXpyvtCZjlVFKlGnPAg5trgv74JUymqAJjkhBCFqpllOgVoDEjgmxDfIMDIyBiAgUAASQEEIAjSBBhIisxuBDm0KvtQJpWW5o272U1gCgsVXyYvABAATJc1Bq4z0OGzgALjgixRxEAgKRUgQYvAcOTNAEDyIkKngWlNjEraKz8c577wLXziGh1loDxJFVRC31iGMTtVk1SJ69EDofOPgWZHiBSCmtFCklIRhjBUQp5UMAIgZxfiNRanvhRdoEalRKaxBCsaSsVhrRaC3tjRIRMzO3ClXQpKXtthdhYVKKUDG7IEEEQwgAEpxjZlQEgs7VRltEaby32gRmbiMBAQQhcJvlBywiiJpUCIGRRMAFH+Sqj4yZ4WomRQxtkDYgCDtmRUpR65kmAgVEIEKKFFIITAQE4DgwB0BRBg0qZgQRCBwgEFJg532bKochBKWINDEHQGJmEDHKAGJiY1KtlFu39FBQgEMQJE0IgMSEKMw+BBFwnjlwkDZQ3CC08eNISCxilBJs9ynaWSq4IEAoIoo2bRSAqJVxgbHVU/PzDRcAgGSL0h17770n4xf3Xv6ZLz1aPpII85D3tvXZyULq+qC/3YGkqvG1V15+/8n9/dfG114an58sq0sfWLSxQUQ42G7EGJJeb8p8+exid1AFj++eHSqSO3vDbq+fJOkvvvWzx5PlB+9/UIfV5fny+t4giJku6suAMw97TytQPNhWcdKdX1wyyI3deFU7TIb3zvj9bx3H+dYLX97HQOvpVCkXRc0Xf/bg+KPz2fl6Pim3V1kWF6+92v3gsfkv/973HPUtJsT03f/mX1b/l9948dbg1gs39wb9/ZvXjh8/OT+ZD8zWF//S2+Ha0GO49dZbD8+/9faXrk3eK06fHjZWPb334OJ0pTvJnONre1uL+48pd/2u7fSGttNfO3W+Xn7rd//tT33pC3e/9EYvTn/3f/zjT735qdkpFL6+98MHv/b5v3377ovvT76zv3vrwx/+aFoVr336rfP33jk5PPvsL/7yCz/968O4c/Txsz99eKxCrVnDSnhSjAT76GI/f/mV5Ke+uOWj8yipTMrr1RpLiLPE7r548d1TE7bOptM6gkpAGhFWrsA0iiINIGD7ZomuN4rfGMYMeVSE5ZFKr6W+8hb0ziBCxUhqkEXnJc98NZ3OOtpfH5kIm3y57mUdm5jDs6m1/uzZ/FlRXr+z/Y9+51/97/7zvxuX/vzx2WxdbI2z/bujvJxfrubJlmxtDXTisi378s6tV2/v3HntlX/1hx/+vf/53509+/gf/P3fWOWsrIoyqyFczEsPkJIA54lWncR2E7NerpWlficWVy3y9ag/eHR4cWm766757Od/KvngUhs1PT12VJ7cf2e4lw0P7k5OHsWd5O7dW6FRH354+KlP3/7gvWeBlU4Gy4LYJOPdvfOjkyRR4mrvQyex05XzTbCWkzjE2obGH58ssBu1ZwFxXayL6zd347w0WYdrNV8sb+NwNl1UCz8eD7ujAUGop7VDP115m3SFNbnijRdunj2Z7OxviYpe+/znT8/mrGysyDWstFmtiyRKl8t5YDfo9wnsCvy1vd7T8wmi7nb789l8sDNKbGRj3ekPIi2T8xNgSrtp5OqqXMW98avXb3344XvSNOiaxWKNBvLChbXfUfif/s++0sz88dn5YHcvMrYuPYMMu3tpMir9snKTnu033EyXF73hjquEbISIW8Px2Um+LktRPunsKsTVfMlPHp4sJ9udW2eXF0lmBUBrbNgFcZZMHKe90c7p+UQn6dHhfakrY5OiKC+PLzvDdL3O/39c/XewZWt63oe97/uFlXY++ZxOt++9fdPkiIkYJAYBFIMI0WaJClRJlmWV5JLpf1xyuWy5KFmUVEVZkiVbJEuWKOYAEAJAZGIGM5gZDObOvXdu7tx98jk7rvSF9/Ufa58G5PNHV/fus9feK6/v+Z7n96wWq/l0ng979x/fn0wORoONeVuNNorLi7kAjDZ67aJFq4f9tHULMPKpr37+9377G8mAXnjx4J/8g7dOTg9Xs4tq5Xdf+/jtF16Ynl58/we/erBxKx/WqvJP3/ognSRJmvY3RmqQS0wOP7hUhbo8v4yCL3/0I1AMPn577zu//VtR6XcfP87ykfQyVjI/f3LnxZeZ5enpyWirn+Iw+vrH/sSP/q3/7z944TMHNFKrJo1gH0/Pzu69f/Pg2nJWnR9XaQE/+hOf+Ju/8L3/1Rc/SgBH9y7272xIXfqymuzvkM4vDw91mghCCDCfXhJiPx/XVUNArvFpynmvKFe183HQ79lxURTDfmGXy1mSZ21TtZVLBxs2l+3BQdvMvFu5alpF5RvyzaqZzbJRXjdRF/1hfze0jaHm1msvP7l71tTeFnBx90Nu/ebmZjJMh9duL5LiwXFjsmvsj9grH1xeVOXxI1dSb5D8/ju/+/Vv/JPJ7vWv/dgfV7qvxZ0cHwXCvEir1rlIF9Ny68ZmdxY4z0mSDvtZiKGdVZdni7Pjy4hYNQ4TwthwaL1wG4EdUcKbm4MiSxaruqrqJEu1VgiiCQ2gKG2MVgxRojU6LfougAXrNQLFRCCGEHzQ3mdpsK0T0M4Fq3uoVUE5SVgu5jZJooDRCRIqEFQxEtjcCqJ4WC1qIC4r5VxgENHZ5UV9Oa9BKS04HKbFJM37CYAqG6eMjpEJ2YjkIrF1AJhnaeWEBBgZGUUIQKIEBA0CPnIUUQjAmoCwsyETRiCxaRSI7AigaZaGcL4oUyLgdjZd7OzsJCplpVjppvXLeYvgVk2dZwkIPXl4RlZdTusHD8+FQ+T1wBhAjFWq47sEVlallgptCIGDsNBFWTWta3xDCG3rYww2WiDyjSNj87ToRs4c4sX5OcTQ08pYjYikSClkH1ztVljWWEsTg4styBlA1s8Hityqco1rmxa0cVqtgvONV0AoqHRmTRYD1+AyilZMfTGvmrI3TPPcuMCubhObKkKltRPnXGu09hDa1rGIJu1BtaFtOVqdgXgOMbRNiEEjTAZ9rYmEzi8u2YemhSQ1VlFiVG6MJYVEgaVp28BeG6u0qZtVmqUQBZQxhrWJ89bFRPl6lud5ipYbt1hdmNQS8mq+UEQx8nwq2hhfe1GhrOqVDyRtrpNUK25bCIGuojdreagLAa19InLVA/LsoXVtChFYP1Ou40DrSFXHoFmTYNZOoQ7hs/7nH34E6xa8ZkeuTTtrL9C62OxKuOmUmTV05so9w927pfOfX+kfsq4VprVTaR0pQ+Zn0lBnaOrUBELgP2QGWtuEBBEYCBQR4R9AjNaY7Svxp1tXeqY0CXPXm9apUHS1Np37qYMQdQGt9UYhgWcEoHXir3O+dHP42JVKE159bRGJa4g1IiGBMF+lBZ4lzZ7tsbUABOsM4TORqNsOAiB/gCPCtbR2pcVxR6d+5n8iRFqn4QiEEEjhWsJBIQHsQLMEYc3Mls4sIFfy2LMfufKhCQp1gTZZd8utd4CslTVgkNghULGbnO6+ETNfNdd1ottVkRwgCl4F6tZr1h2inZjUaZv4zJCG3WHbEcKvJLO1VPTiR67HJrTsOZBNitViCeB9qClVrV9pBIXAInt74/ffejzqqWGhUpO7pnTe2yQ9O3vUlBcbo9Hl0Q+r2m9s7y8qT6o4Pnkw6m8NxltSht5wMphcP386k+aBSfb2rvP0ciUs/Z0tnQRlIXA5mZANj/Z3RtOzo4yrwXZR222BwaJsQqgG/Z3LVSAzt0pN67Z2WhzXy1obpQ15rFLDDde68Emf02QSY358tNg9uH7/4XspOW3w+q2Do9Oz3Z1+YsL5xSwf9hZV7RtfedUwnNXJB0fueSx6FDML5XJJViFoD35eHiZ2sNMfzusaVBhvFrOTp7HBraQoq9m1GwfWOK3GDEVmo7iT0k2jKpO+rs6r8cawuZDl/CJVNN68dnGxiD5tOOe4WYwYGopx2swfJYPV3DVRSnbKVXVdLVauWV6eHF1w3HjttY9+8Ve/+dsoi/3P/1gF9vj1bwudagm/8Au/9vy//FGtuaqWPrjz6XK3atNcJTo/PlwA+/6gtygDkp4Me7Pp2fmTxwaUaCUBLk4XiTKT/kAbV4yVZlk513pY1GFZCceooGczXNWLTG9YW+S9UdU4TgyIAR+CdyicZCnQOC0UUe/Dhw81hrRvT84voayiQ5vHg4PeybGzekQqqWpPqjDG5IVFvReZG7fq7IV5olww7947ZFn96FdumT5lvbzxrRmlxe7W9MOjhEwI3LZVUigfRcgDlIOeLJfLCCoxlmyv9a2XgEnSK/pKh6qZIzoJNgSjiDg02gZlRIBtZn0IxXCAItOL1VoqEna1F2ZjNFxVGkQAFojhKkgsoBRqTQIUpBHvuwJyEKA1QgwUqs5hokkpQFAkSBEiEABA9CzMQhAAstQiEgEZog5JhgiM0rpWmKOo7lLY5WBDiArRqEQwdC1tCADASpMLXhgQO+GGRUQBBheAgIz1Ao3rrOCc6sQqpQGs6ehAYrRSpLQiDuy8DxxQESowSKRUBM50DgAkHakOfPDMQIqJsG0dIHBcg/GiCEcJwUv0CgkAFKJWWltllSFQiNJx+gN3FKcYmbU2okATceQQY+fIDc8YdiIxRhdCjAEYFIso0ITCUQCUoggCIusGMEKFqrsZew7AjEDQFTpwFAQBUogSGRUSEnOMDCyMa/YzCjMSWq0BkGMkpQgFaN3OpkghgSElKCQgwl4ECVhEGSVtZIkBgEDiun9ehFEEFGmjlDFWVIgxAmkiBEalFBEJgFKKWZgjEWmkGNn56JmNIquVJiUsjMBBXOO7dkxm1qTSJO2I3YTAIJoIkVCEOr2JI4H4yJGZJRJBd9ggYKJV5yoKwROpZ7cs32+X2MRJm9ukpvukSg6OVG8wmpzO6qw/iqKQh/vjHbupPvmlL//yr/xGtTx87aPXTi6q/ihN8uy7v3eXqxCqmPXsxu54Po/P39htjk4RdZElzXQ1b6rHh2fKJHWsN6/f+gv/zp//h//TL1i7qMuFEEw2hyg0PV1+52iJVu+7JE9iO43CIY846aXbO9koRlel//Dnj754Gj7z4zd7PRXSann5cLwz3n/u+aM3Z48fVk/eXWq76H984+/8yrFWW4nSwUelTU+bQZ7PFm76/Q+vbQ9I32xXoWJ48WY/37N1X6PYy7f9zsa13dt3Tt69S0Xy5Z/6sd/+le9LsvGDN+86NEdPpxjcRj+78crB8ZPH7YfHv/drv6RUtrv73As7H50Ue9/91ndfuLG/Pcl28lfbIC9ee0VT+vjdx+XCp5s7w8nEwGR6Oe1R+Uc+e72ZvXf06//v/Z/60q0D9dGDy6fH1flJm4rZRzXR9XM34blX+7Y/U72FKSA6wAw0AxhofHN570lptyDfkWvGxblfNvOmySaDnX5+MVssffLk/DwZ9BrBZdWO+ho19LdylWR1pGY5b6pGm17eCy/t62+eNMKqp/Trbz56YSObT5fjQg/7ND0+vzbZSBI8OTvdvDYej4bHQG+9f2+0t/uJH/nyBx+89dU/8fnDp+/df3TsKLzyqZewcHm+7TX0sSKOibXHJ3jtxmf8ItvfuPWZL7/6z7734aiXuqryVqxNSx+uvXqAZdlMGyu4LKeY6IA0vSzntbt26/r55TLfHJxY+/L13X57mhRpUuxPbt94+v77y8cnarKTH9y8+/bRwU7RLptQu8F43zXF/rWbJ9Xpq5/9xDAOv/4P/hYG5sarfhzn+d3zaZIMVsHpzIqCs9P5eJCkeXp+tiqKtVQ0PTnJCMhqYEiAZ20Zw3Lv2vX+IH1yOu1PRhfTi8nmZLy/DRhO799PVLz90t7r3/r+H/mRL3//+2++8tEvf/D+PT91qVa9Iku0Ob88G4w2I8iyrTEICLFI4LqNRHrjxn7B6ME329uaEoXR7+zs3H1wWJjMeZgtFnmhU2OWUR4/eUCWx6Pi7Oi0ad3u9a0yLneeH/3w/Xkf0lubt5dZuSqdUmiTBKOwsPexcVOAmCejTKXz5XJjsCGRo69crLSxZ23pPSdparMsCIomPbF3n9z77Je+8uGbv54ZnWW2XMamjKDVaLQ72dxbTU/r5SLPjSD4la8u5zZXUbzRpq3dYrlw3tVtxL64VI22thfTxclyeufOHVCN1Xq2LFH0qx//7Oa2X5zPmzY+PXrQRnf24OTe2+/VIEcPj5+/fetHv3JLbU0uz44X8/l/8B/9H3/nV3/h+sHmnY9P3vj2vdmZ3NjYOlvOB6P0zrUbv/Ob39ra2Hn5o7c+eOv+MO+dzFYPPjicjK9/8hOvPDw6/ie//JsvP39Qu+b5F1977SOffueth5M8c2XrysaXrbXmxRsvqury4vz82sYwkF2Cvv7CJ1+5/dx3vvXrtp9H8GcfHo+p/+Tx+Yu3bkSTZYPC60iYGNRNdZH2E23y4Xh4cvLYquFseTQoF4p6rq7rpk2zXpINq2WIK79q51T0ZovLuu1leT/tD0RJ3ttJsuFqPmvLWtsElXv0wfeqED/7tT+9uNSzkNu+UXlIjHKrqpovpNXxvK7Oj+ZNu3rqNsY9S7Fdzupatk26dX3/4Nr1x+98UB49DKvpZHujt9l3rgocX/j8bu8ij5W5/2QFkpWrcwgrhapZLafL5eV8dfv2nRf2r7VcdWdBkaXRhVq1q7I6v5i2LTiQxnPjGSSANNy0qC0lCSrjCJN+jyGSJTcNKDgYW2lmKYkmAKsCIUQxqICQKTKpiGAyywoUg0IVWilXyxBXqTXOs2+rMsZ6BeDnIQRtEwiKSAdGxVEpyqxNCxOVaqqmLVtmzU0rdYxIgji9LM/PZ4RAqekVdnurV4x6w0FelnWuqK0aV9aBRWVJmphWSGNqJdWkIhMCNW2NGvsplb72DBzBRSREz61V2SBNEkwTpUQYgOuqCTFqIRBIdKF10tZLJx6Zjy+axk0VUlZYsrpIczNKZ6uwDOr8pBHBs3nbtM63rq48IAKwsVojoGcSUAQaIS+MVcoQDjPNDOz8xXTWMJR1bRUSoSFFpB0Hq23SSy0lPiApcmGFIQiiFuhblaXaBQ7CmbbdULt1zvtIDBJhhSoGCVVICUm0a1tk8q2PERFNEjNSzoeZVsG5mojaVsIKfekX09oFx5wDFiG6xOauaZSNonyILcdWoQLfam2CcB1aTRmC1kqFGCpXabAIbA0lNhGBJDXRtZuq55vWO0pSrYCstTZFDqyItNXAHlmRUoyqKEbBMyJao0naGBwRtX7FrFbNZaKMeGlcNauWoSo1MBH0isSkNgo7HX3kJoL3YomdcxwxBG8TbWO6HnfjWoRhEVonxoCwU364c46s7fBX4Sy46juDNXb4SnX5g+xTZ+foLDad8kBrHhCtLexrz0rHmqRn+tCatCMdnuhZpK0jHkGHWOUOStTxgtZWoivHE7BEBLySogToSvfCK+NQZ48hfCao4Fpm6AxE8IzAg0Rrm9EzElGnsFHHlVirLoi4lh2ugnXMHXkJCRk7iMaatQxE0OlFXb6iE4k6EeyZesedZoUgvBaC1FpN6XjNnR3nKkK33tSw7h9b59WulKIunHely6wRQVeyG6KsZZjYbW9co8BhvQ9Jybqcrct2dGEUfGYr6hJhV1sTBdYg8bWza02oEuyUoLW7Z60lrWFJuC6FW5vHUHi9QutDQLjDEgkQ45WRrPt+jOvw4Do3KGuOebcFrr7DWkC6UjrlylOGV/arPyQVzS5LA2gS23CcT+dIxocwzgerEJertlycx1A71yxXISmSpmo2JkkzXxU23b6+UfTHhyeXwsuyXp1dPiVJhpPx5vauUtmGc1pK30h1dCrp0odFmg43tjYf3v1h9K5IsyiFb5fD3Bzd/WExHFDwF6d3C8/gmnp21K6MtarXV5uZ8S41WTpdTJ82Z8Iuy66JVswzk2VRwsqpR4euOmpttONNuHljePzoJC3cqx8x82aaDS05oTh+9LhODbXzmTIZOVAko/Gob/Ny3to8/eSdOxspt+UimFCGhcowcB1bU7s2TRMErEPThllwYGk86k1KX11cTGMROYrV1tdB2X4VTIxlCHHYG2Eg7iGQ7jcrmbVNE/JRYnZuffD+I9dyGZ5+/IuvhNOjNEtGg1t1WwuYRXWpdToaZXYQ7p88qUv33O3XzI0ffzql3sZ+Gw2lyel7H4A0CDHL04dvPlpcvLixDT4EnaoowJFW8wYNKg5IRhIdK79aNajC7v44SaWpqtp5myVOksXsfDTILYBrljpLegXZiG1Ybky2YtvLMlr6FjkR0UpF5yvSWM5LrQajok9+mvX7SWJdCKC0iywm4yAffnBaNvVmzyaFOp+d68M4LEaDol95s3CuZ+1kOHL1sm2qXmazTKFCk+n549n0aLo4P/7Cp69dv0nzyqMP3HCk+OGD71mUKk4HuKlyk1i8OJzbLKR9XbrFoDfJ07yt6zw38+lp3yTZgNja1XzVS8wqrpyrr+9ev7y4DOVi1M/cIH10/wQkL5fRk27LKoT1me+9j50jkAWBU5UIKseeQxQRIDRaa0LPnkCc9865xodIkBqDIKQVCHbjcx8iKlJKGWWijwBMpEkpEfEcRZFWxihlUMuVtTGEKMDMMXDwoZOlxRhLSBIZBIzuxB0mpDWyToSZY4wdltj5KNAx3siH2M0ecOsFAUEUkRKyqLr2OgAiQKWIkECAIwSORIAMWpHS2hrTXU4UIiBwDM75yAEVhbCO8gojEgpjhO4SBgCCChUiCjKLNUZrLRKR1+EoVMQQWBiEUUQrLSxRQmRGwMiBELGL2rGIgI8+xBDXQj0oBOGIykT23Wezd4i6C04TrKd2vEQApA4nJILrC/laXDNKd7MO3cyGJmIRazQRAQsSGqU5RlCaFImiGAMLECmtFBEabUEiC2sEqykyoxMXwhVJTkh3ITJSiMIQIyOi1lYrjUoF9MwCiKTRGINEkfmKIYfMkUViCF1XHZDq6EPd7QWFrdLdEUmkrdXWWEMaEZmDQlFIhAqABRipO7oY47opQSuliYCBQ0SiLiWpOqXz6tHhsm7TSbb3+eu8ikDTHkST5RzyRRl6xeD86Ng7s5XbP/mn/42gktOy/bG//DP/+O/8f1gufv23vrO6vGhd2huEpJ+fPzovBulnv/ax73/jflUtBqPk5LIcbu1owbapB9u9VNn9rcHZ0aMPfr+6tZUvNLk8berqfOUSBdeuD/upu3V773s/eDdBSEI93u2xyjcpLJ5WWRt3dvbO5vj2u5fvvv+7P/Xl7Ze+dlCYYrpagEs2XuzRAEMdm7l5ek89uJsRWGslRHGRQ4Q804CWlHp6VvnXn378lTuf+sRr+9e2FktazM+3bu3t792wzScefnO+039hdX54/GTGtTx5eqQxMvvpcQsIxPj4ybFRVhPeuDF88wcX/+K//pd0oo5PZ/u7m5Txm6+/uTNM+7Z/tAqSndx66c6Lm6N/+jf+hzHevTz8cHb55MXtqN1qVMDyg3A++2+OL+BVhI/fgHoEkw3fn6TDLaV3Gtj0YCVqgMJ6D8A2VVuxPZGQmMtRSsOjbz2dXiztQPcSraJqan8ZI1pbBZjsbi9dC6Sdghs391f1IukNVqw4RpMatKRSGxr3ox8dLJvL+5XsWKjKxTJDJHX34fT6Rm8j7c1Opk1UwKpesmS4M+595YXxg2/97ijYXspN9e61PXVwPW/r+tZe+6kvf/Lt75eU4GRibl9//u1vVcer4sZkZ8OaUaKYg0765Wo5GicU216anc3j49JsitZol4tSoUqGWintnXNN8KfTsGpUL+8bM64uz374rY/d+WPtLNW9IaRn5fkjXJ5vbvzIi5/744fv/abWYfNgkGXArj17eliSM0WRJ/1entVuvn9za1FWx/O5yuwyQtCmlxhN0O8nUaBs2hde3Ln9sTv/8DsPASCxJk2S8+nSt/R0Oi1bF3xzOfuASClILj441AlB6uo6MEaLalzkv/3d7375M6++9e4PNzbGy8XlYNwjMIvzxcXlPB+aw/njk9ViZ/vGcrXc25ywKzm0NtEv375xcnI42h40y4WvmzxP6lnbltXl+Sotet41hLQxGRBJeVkjBBRj7SgbjMvDC+eiCfrp4RxFDvLkX/rpn3jw/rvj7a1+nnBd1SFoW2hlPLVBgjGJgaysF03wwbWESmlDGuazyzzPQpCsZ2IM1Wo+KPqAsHtw6/DJSb1oUPea2SrTBSVpNh5rCovVNPjKN40kqWMi0WBV2wZjIZKbXc48BJPQwf7k/fsfThf11kZmPdwabIV2NR7mWWrSJGlKaObTC9/G1je1C3m2e+fGZjn65Z//9Zsv3L5z5zP9dHB6Ug4SPx4mVPFbr3+TwJzcW7335hPJ7Nf++I/Twn/z138/QFUdn1FCs+m8Xa4U8c724PaLL8V6HgBW83pvtP3n/9TP7u9l88eHju3h08eTSU+HULbL4agomZOUPve1r3JsVOmLIn14eDq5tp0W+6Wrq7JdTcuPfukT+y/P3rj3i71hrz9OC5OXl7PhOAlCy2bufJnaIaNi5swm1uSJGxnTA2W4WQ5GPdc23sNk99rWZHz/3Tcu5pfDrc1VW3l2q9VFojCLgH4V2yXH2O9NmuAm2ylPl9/73u8EgY997o+en52Xi2PT7xPIcGeymrd3738IxnzkI1/4xje+EzyjriAWy9PlVm+5eOf3VAJnP/jBxu6W1+pytnCGKWBwaBtdeXn1lVc2n0sEWh+dzeDa9a377zzu9VJESnvaJjifrisOFosKBOezqnVN9BFQp1nmVo1WFDi40LIEcJJZRaQJvfdtdE1oWpQ4GPSSjNMs1xyb8lKTsSrRGNvG2cQyByJlNCpFTphANJAgKVIsRlvF0CCYyNG7UJXRM7iyzgwNC5NnOOwXSNoYlWr0gMu2rRoXPSdkYuC6bUVhVdbeR4nYL8zB9nB7oyfaKOZCkQCHGDULE7VtcM6jIcGGBLTJcl2AYxAO0XMgQ6RAO5ZMGUSMkQStc5xASJWery5b3yijyHGR9VRiTTapIzR1u1qVITQxhLqe9YsMNUjVSMEM5vLC3326KusoMbAAxxBcTIwyigjJGELgIkXsio0iG8JEkSb0ESTGxrmWHRMaLSjgAqPubE4QvC/SokemFVfWVcqeNBhtvI++8dpQrhRppTU4iSFABItaE4JEUEIcwdVu7lsQ9gBplkbnWJFG01Yl2BBZ6tDESCyMmY0Cq0XlfKwaH05dWVaJxSR1KIqsbYzyHFA4tM5aaxIjDJoUrPUTiMICRpkEORCJIoggdVOiCFmFoovcGAUomGY5B7FGEak6eCIMQqiMMTqxJnoACeONXusWTdMYQ97VrROlybFLbapcaKuyrhtARhKVJSgoID6EZdO6AAotiQQWo02E6EJLV4NkkWc0ZOgCTnzlPwHopioBEPkq6nUVLJJnIg2sZYpnDiJcu2+ejdJBrhwyzwrSrsjRVzYkuBIo/tBfAZ7loOBZi5WsX+xG/ywIeGUP6X5VrUNiVyyhK4IyrhWTTpTqwgFrig4iAgGyAKFa+04QEYmuVqMTsBQqZiFQgMhAa3IoR0JaO1k6FxIREiOu0QwMcS0+KQSFso6n0Vr+IQXA6+0iV5sAn/mqOtmsw388Y0ThOiu2Fn+uCuCeSTxXIKduw69Fue4/AWXdA9bN9MIartQ9S8NaU7lK5iEiEirsRCR4JpmtMR64hkFRR9fo0iHrw2UtVF0Znp5l2vhKVZJnzqq1Ww1Ulz6LQMAchUQ62hF1u4sRnh0cuF46onSC2pWgKfhsO8qVuHnFjlqbpeSZePT/LxUNitzP262d7WVcXh4fKRxNNoZ9u1mtah/mWtk8T5rK5AU+OazeePcsG9ySqNvGNXXFIrEtib2YdP/muJ6HxdljQEJKgpcsmyT9oh3Og3Oxbld8liqcbOz78xZNmqYDYDM/u78x0MGIAI03tpzzac5I+co3o3E2u3jYyzfqsl0GnfYnvqmrurZZpqlNM9XM567lCMnDexIXZkD5QO0cv2UXi+x4dfLcK5uHd+8l0tu9+dwvfePtG3d2XroxvLw8Awk71/ePzy4wqtOnx4kOt/YE6BFRlhQ0GGYYvfdQLRdkx6C2M7XtGlFK55AuQzVfYkJEarC3OTmuT7BqF83hcON6YorWBedNYFwttQUtwSQJuovjZlVlG710QtW0eu6VjaO7hxfTD8HcdhBUFNKZwsYYlDyLLrS+XvkwXeVvvrv6E3/ys+++/f5bD9/Mnxvc+dJXS3f8Jz79tV/77//RfLZoAhfbxWCS9nIvqSkjtL7lKCFwllGQxtp+0ducLYPzbb9vNIYkyxJjFNXK6BbD1u5uu5o6zy46JYxo+yY/n85Mz062bjXLJ/1k0O/3maOArWPoDyY+mGrmtYqLcp6PC1L5pBgH165cXSQ8q2ohlWY935SJVllmiiQzFMFcTibbwr2xLcrpYjjKFKCmtlVuVbZlK76FKHGQhJ2JPT1dDEbb7IfdiVgYEVy6UCIm2sBsMfM+9HKd9PqJsZRav1rlCjJrsZf1tGmb0ojnEDDqzVHRMK2aM5aY6GRxUTV1tbExWK1UwflZHRYr3++tZ5JDlM56iYi6a7zCGEUCCzMjY+RAxMzBeR995BBYmIx2wVtl17w2gK5hFCIbBszIGgK2tW/gCg2njElsAsyEQKQ6ryoDeOaqbrtLFLJYo7CLICkiAdUZRhC9b0XAWiMggXlVt0ohkBBS07ggoLXqTDocGZRCIgEgpY3GxGpCASRjtARWWgGgD45JSWAiMsoSaoNaoenEZQLxwaMIMINA9MGH2F3xjDKAqlOpWCQCWtN9TQVd1e6VjREAiJBBAkcfow+hi8ICxxgjIwiLVt0NBjiyIUABZgEgZoixM1OLQiCk2Nm7upsOGmZArRUphQoRIgSFyBI6HA+iGKUjiHRBfWW0ohii1kpEUGEMgTrsEa5VEwQxSq+dxAgKFTMrpTpnpkJGxChIRjGjC0KalFIExkXFIiEwgBCi1sYFzzEykA+eiLQiBvAxIkKaZkCIRIpQBDrktvetMAOLIoVqTfv2MQhIZEZBrZQPMQoHZhCR6AFFKS1rG3IEic/ctiwcXBsDdDM/RimtNATPihioa9bUSsfIzxrQ0q1+6SMJBt+QVKnNMEQy1huzOL+IrR/oXt9qKITF9pLN+dL90T/9v5udv5/39ou8fusHP3jpNp6d+/29jec/99ntIv/jP9m//srG4fuXywoq1NdGw7tvv33jhf3je/O7b93b2xmGi3mzmD85PP3sJz+rk95b79337eK8ajdfvPF6GadZvz1fSEOudvHd4y9ey1/d6V1/YTDYz5//2nNvf3DyS7/69t/4J4dfOQtf+dLGZH9zWV56rrWtgvE3X775V/7Pb8zaHQfBlxAE8zSFGFsXCDCxOjKdluXrP7z7E1/445tbE5smimk8SHp3nndVvWn6BSVI77776NGTQ3d8eNnvm7aK85mLMbZLt7pc7F/b/vRXnvNGbn71C2prGP3q5ou33vz60fGHTye39p775I2v/82f749G+yP3vX/699/8+m9PJBmOfB5ndrS89fH02q2Y3UpR6bbh7XygKcR6ZYseJMQ2MDhHjApAkbBBY0UKzZkPoPRIa6X3adDLzt98srm7cVmGtpWNSb6zOzh5eCyIpC0Ix4YxCXrr5ss/9a/VR9/4/a//ns37MUSNyqRZVfu2DgHjcwOY9NQGhld2Nt54f1mSxt6gNoljORgMUlft74yy4cY7752PErLL6dnTD6vqsDeoPvvpl7/19e+MBqP/7Z/92nd++031pB25/Ojs6LVr10b03GQzHLz6id5Yn959+PZ33/jUZz71Iz/50Z//G//V7Vf2/uK//S/8j//hf/6Rf/7H3n+83JaTR9/74SDLuBUF+aDILtq2DDGsXKbtwwuHB/mN7cnq7PQ8+6HxdSPXzXBjO1fv/f4392/eG07uyGufO7p7/9YnXnr3d75dDNlO5drmHiqxg/yFT37k3X92WseY56n4qInqVdszJtSxyFIKztlkJbI6ucR7T7uzgJnnq2VSWEGer5qZa7M8vbhcbkwms3mzu7vTK6KxanG0TAdJNiiyoN9/5x58vOgnrl2Gp4+Oh8Mx5OmqDdyG1Nk71+/0hoMnR/NhXiwXyyjN9vbmarF6cvqEhc7PphuT4WI1hxpCg9Gp49XFzr4d9ocgTBSn04txlo9vZw+enARojs4e7O0PE7V794MPx5PBg+N5peXgxe3mxE+2xzHSfFrGZrq1f8MmWWTvPStSOg2GdNVSmmgCfbm4zJPeoJcbk3hwWVpERwLQlFxgXE6nLfv+eFNslqWQU3H3/pN0vJUaWS1XqSmapipMFr2XGBOTPT09THMP4hiprHiUIPv56mw2mGx779vgE1KaIHLgqOqqPdjf29mw50/PbL937blbR4en+SC/eGpmU/WJYv9iLlXpx5sTPcyyrd7x3Qf33rn3yU+/dHF+PhpM7r97/NZvvLOoZr3ROAHvuN7ev/7um/deubl155VXBlvJtK11avMizV2QCEli3v3ed3tZMR4Olk8vt196aZSnRz/8sD+4NdgcLc4XYAgh0XnuWTZHBxsb2x9+79cCu9HB9s0Xb51XZ1Y3X3xpi6j54Xe+/+rn/yhpK0Hqcpr1c6M2JIAxBIq0sqvqYtAba1RRWiUhzdW8iU21ImtmcbFx42DxQC4uLtCjHg3rukxGo+MP3lYkg8392AoYffbkeOfG7uatjelJ+eTo/Of+3n/3L/2pfx2u9S+mMwm2DlCW5c7OxnxlVovyYKs4O7k7LiDdGl17ccyyPHnwoMjsaFA0VaPzSbtyxMVk0p+dNe08a8/ksTyoWjcaGIxhdd7UF8tQKlSRgxzdPy2G9Ysff7E7C+ZlbcgQUllHZU0TGRSBseViaRX1bFGhDwJeNAfevjZO+vbsqPKROE3TjVEyzEN5blH6aCVwaqh1NZsuwCKBQ9M6BYFIhdAwJN4pCAzKOq59DEhIpKL3AUNA7QRzrbTVWZHaNAEBVNA0rnGRmdM8CaC0QJCWUrOo6tp7Ier3kq3JsJemmoxrAhrMjW4Y+8lg7pcikazughuC5Dkq8USNY+9c6QSsymySeGBNikPEGGzkGGonYep5uWSVqNDWBFoxuXrVz3uaQrUqOcYkS6WOxhpEYQlN0xpN959ezJf+cimzhQtBNAkRkog1pLU2CIN+0e/3GDwyd5jaEEKemMQahYQsCAykObBrGw2BUGVkQARQgcSRybSHImVSEF3QwBCDeJcqlVrDSDGyJW2IhAXQENomirKYmiRGrSVICLWLidagoAzeJmkQaIUhNqHy/axA0K33rYD3nkOQllsfXIiM2C6dxNDryWg8UkyKVds4IBDQFhPgtdyiiDgKR2+ULfJMk5boFURkbsoq+GATHTGyQJr3U0OKlNLKUIIcbZrH5dzXzSgbWKX7g6JcLl2KRT8Z9GzT5mrBbYzEXJVVQBwMiixR7FMXVpXn2rnEGGZfaDE6XdWhcdG7aBUFFO8jAHqOdd3EeCXG4FVWSfiZTrHuB+tMIMKd4QPWw/zOfSLPhtpX5h9YD8pxzW7udI41KAefaTRryabjceKzX0SETnHoIkzrRa4hxc9sSmspRK5g0nLVcrIm0wCuWTUYu1nqZ+SkDg6EgJ0Y1ElEgOpZURoSyRpt3UGTrvQlRAEi6jxSSikQUkhd2Y6AoEKOoYPgXCWcug109bWfqT/QfQyuVTNERdD1zwOIIMna2tO9j/GZlNGZeDqS9lXmDIlQusad9dOwIAKzECEyYEeHEujQsVc7+yoiiLDWia6+nayjc1d5OyFBAkIgREVAAkRwVVz3LF4HHbyom+5HhA4A33XkESJSlzsAEkES7IYXIEjdsA2QuyAZUidqMTITiHDHOhdZw6q7ZJxAZ3QAQLrSgdYeMOnUKWS5Qk7IVRsaXXmanlnfRKRDLz1DNq2lIlRgc3t0elj6qi1Xaa7OTqewUXuAs9O7m4PtIEkbITKDTufMWzsHi9NoKTrPrcPlQkJTusBnpxe9Xj8hUun21v5G34Sjh3ersMoHqUmzJrpGTLPgPiYA44OD/WWpbD5Evkh0ebask2SQF5vTekU2VWleqLicVVWlaz8vvc9hyCrV6TbGdlmxglnXkTTYyC9ZYtKnvl3WOOfi1uaNKqr93etNUg1u3Dz98GL6weNbN6/lmhfNkqxygZsgAOCbdrQx8W453snq0Ia2RUpcuQrcsECeaSF0rq1CCygeVJqHqKjxIgCiMO+l+4MtoQugzIEnXooaWDOSpp43MTW6cT7xvL1/sHx0r/blne39+dGT6wdjWsLFW8eP3/7+q6/cblaLpl35ts17NklxPl9lae/xB7OT1ejOlz+tbr5w79u/g7pMofe52x/9tV+7+50fvi6ik7yIHs+qy7aldlmXUdezCL5tqhJIZssSkKJv2+WF5cjCg0GvLlcSiMQkFjg4RJeqEdhBE3xv2F8s5zokHGFnshERy3YVY1AQmrZExMAEaKYXq9PDRlpy9emrd/ZSzctSNEXv3fRyobVkiVq19SriZ68fHD98hAR2krZ1G3lBbBPScTVNIoLzUSXVakkoIrSc++UqMsvnP3uQpC2BVSYllbqyRKUzk6/cNEuz0+Onk9GmanRCZnPcPz2ps0lfawPaVVW5cKekwiDr6VhzdBsb1+4fTrXmPKfl8iI4QzSoK19znGwO7p0fb49vzHlmC5X316wirRVL57GktWrArEmJBowszCLivfMxhhgCx64VS4n4AAQMsSWlCUmuuDkxcllVnT2nc80gqjS1AECCirRW2KkVbeubpnXRu+AESSPlJiEi4MiADGy1DhxAgJQCUszRc2CWpnUcvTAoY0IMHDpqUuchFVKIipAoRkFAo7U2BlGQlAACUex8QUoxCCpCpUUghtDdTLq2deDYRa8jgGd2zrU+xMiEmKcIMQbmEAUASJvuHqC10UQxRubIAoJERCLgQ4jSpcyoE7K7GRskAoIg0BXbC0BEUYKdM6jz8gCiUsrorrUMIkv3m7xmr3XCeTfdAQo6m1MgUkaZzggLANYapUgRidEhMscIRMoYhUCAIUZCUroDbDMp8tGjdNy6tTbPQADIIsBAqFApFZE0GUKFToXADN477HaBsNYaiZjFR4YQXBQRDhwJKISIiMze2gSu7pmKlACxiEaFWhllYgiAKgojidGWohZE4siRW+di8FFrY22HHQIAYfYhECEQdTumY10r3eXXGRFYoggQUeDoumP7qu8jQOJcrImTPMMQo8oWZ3XaSyKYy9U871mt6cmTp49f/97mKz/S2+w3K24vF7DqffTVn9p/YXLnlcO/9l/8B5v9/t7Gxm5+czWd3do9GPc33jw8/+of+1PDmzdTDJ/6WtMfj5fH9dl736uqu3/3v/6fHMpiJk+Pqt5W/txrrwg3b/3g7r33Lw7vPxmOKCHTNNFoG0v5vQfLJJWvfPWz5+cLB81zL+w9f7T4+jfwf/yOf+PJ0b/yZ1/cvz1eLaageFUnP/+L1e8cDU1qUy0chdsYBQ3Z1rFKdNsyoUrTtKz8//0//k+++mMf+9QnP/XynYP6aXJ2UW1sPN/U5m/8V//1/endpW8//rlPXlaHZdssncsHaV25gLAIghfzd7/72Nplg+V//867H/v0x3fPr8Ps6f6G+sHXf+vNXzmp7t2bPzreH5Rbo/SzKRoT91W79xlMnhezVSsFkNTpCHQEonPMwBhwYSYBwIB3wABaAyci0mJsEZagQDLgCBJBHEg8prQqT6BpTVlWlLB7cJmSHm7l56fL5Wmd5nrcz9+6d/yf/+W/8tkXJlK3y2WLUXo90xtZlejTi2lZhyRNNiP0AjXzcpir05Y8GfRKuTIN5ctf2dr4SKoGe/SP9RuPp3deLn7jje/8n/7Dv/jBe79h8+RLP/3Tz33+q9//+u9fe/EvJsPNbZs814txdTE/3VDO1yePnxsPeGv1//xn//Q//3/9teVFb1i7G7d3nj482//Iy3prZyKDseA3/uGvvLp9oEXXDcz9clnW+eZoOBxEpdxAb7x8bXAzFW5Pmnt7Kc1O3HjvE4O9jTC78NPDdLzd7/fbYvjkzQ9UiCdHF1kvj8hZljgX+qPRctmmOZVl6aK3Vm1akxjiKByCBj2voUzN8nxm1EV3FjQBm5XTha5q54WiHYb+ZBFO3nn40C/iT+xtjTdGpJPRKKtqlyK07WJ/fysqZYuBTqreZHAxXfVIj/eGXLvE6tY1SaDJaFCXFWkUZ6o6+hZ85XZ2djk07WLZz+xoNHz0dJ6PByn1QHySZhFq4Gi0uZjPg2+LPCuKbDpjbsRRyw1IT+msf96cPZ65WCkuoyjsF8kiVLPFaQ/Z2JwIknQ4Wxxqgizvd8/vw+FWlk5AXFWXytq2rrROkSCCBKSAnCSpCFbLEB1X6Bpebo764qdNU8VgjM0IMxbV6w2e3H/fFHYw7p0dHRaDsej5qy/e/p1f/8UvfOm19+9dLktnMz3Znjz48IkCLQ6mZbl7sLOsnOlN8n6hVDrqpYvyYvvW8Gd+9isXyybq/JVXX15ezN+8fxY1zVfu7oMjbWHrYLvY3dmu1Nnp7Hy5Gm7TzgA5VJdnh6998s7+uB+cni/ZcaibQKII26Jnvvnb/+zxh48+/4UveO/29ifWxN/5xj964fmPltP5aGcnSRNGbKvKFllvOL548OTRW2/3s4kTbJ26XJ1sXOuXj5Y9k9w/ORr3ZD4vs41kfn5Rzcu9vesh6NYtmmYKZUnKTjY2Vy23bUOWo29XS1fV5Xhvs/WQZj228JEv3B6MkkdvvjE7P3ru5e3DR7OHR09v3drzRZrkA7ec9nf2l7OqPV6Vi/bll189OfzBb/7T/+mLP/4TwbvBaHB5cmmL/vDaTvlk5VbnRtz25v7BwWQ6O949uFnk6eTGraJnZuezowu6/3CxvTEc7u3WiyPd6/tUvfyZj95/45uP798fjdXWYByXrpZQt8Vke7ya3tOko5R3P3ywloqqWkLDIia19aoUrSNDBLBFolwrCgCIfZy3NSWUZL3INSHWLSf9EZH2BGSTVVslGLWFZViySFDpWb2C1jrvY8TgF8Nev9cnpaJj770rnfPCILazRATSJkMjZEmPB3kvT0yWeCCIQQvGGAVRW63QOt/4pnUusII2SB3A5snW5nDc6yVaNU1UNmljXC59iMlyWbaenCL2yliFKK4BAz4mGD2KiwJWE1ZVEM+jYT/TyXK1bFyjFcUYm9ax98Yog9Q6aRZLo40xGaCPcF42rQSfpFSMR1XjEaGclZeXTdm4s5VvW9/hZ41SmjByVAiklAIuiiyz2hilTWqVDiHQuiIJlFKEOrOWfZsVLMG3qwVGFyNblcbgGhaIouJKa+3KoFCNFCggH0mnxsfAEAUhIHBwRtk8K1in88pHwsBstVglEoSMFtaOuQ4eExOVYebAnKi8sCr4RikQlhikidEowwCooN83Tesig00spIUkKRCsyipCJDTKJEgkCD5GMaBJJ4o0o1XGaIUktXNRQutbUAIcax+SIiu0QRFFyppEWz0Y9H3wy7ZWPZ3YNM+TSW4GBZ2JzBq/sbFVVcu6jUKUpUmW5671AtYHtSxRJNneuQZ4xuUisxaBquAxSO2dIIAGL4HIBC3zpvYxBsbgwpV+AJ2pBDqpYv3K+vVu4I1rXHAHPf5Dpp9uuI7rAq0uvrWul+okGunM8M/En2epnzXMZ22gQboCypAAI5IAdGb0tQ8Er3A+V5qHrF04V2yjNcems5+sPUxyFRwDuaqqR0RS69BYByuQNXSoU7IUrIUERGQBQLW2Sa2fv0m6Fh+8yit02o3SIN0E6npFCZGvVhSxM7V32akIoDpFC4Gki7zxH8Jtd58maywqCjLDlfz2LOnX0Y2kW2q3XwABuDPOXOl2jGtm9pXudZXSk86lhcIoeLXLu+1AiCRIBLGriSGAroOt4xZ1Ip1w7MStbgNw9yKqrrkNALp4GXaFZ4CdVIRC2AlG6yI0kG4jdd4gAkGJwLxux2Psqpk7gxzHbvOg6g5BhvVXYuBntrSrwCKIdNsCrzTNrnIa6GorAKy7cv6XrqLjp5d9m56cnANyP00v5ucbo2R5cRnN6PmbH3Gtj+JrrM8vZuMiKRft2fF01E8g1I0jCbA32UYzLF21rMPJh8d3nt8ajccPP3inT4ixvrY1mFbTazd2j4+rFPXG/vb8yWOQZX3xfko3Ky5S+xyiGw7Bt4vpvGyaRcvpcHyzN7zTw9hf3nt8+J3eMM8oKr8sm8w1uDcsHh0vQQWGcDZdPLrbpCHR1l5cLF4/eX3v+a0XXnztjffeU4HjMmxM7Ghr8+GD84/cvvXoyT2TgHfx8vhJyzgNeLCxMRn3D48e9np2YzRZVFVVe2UYGNN0wzHlEFnJypcCFDC2sbWUb/bHZWhnzbJqzocjLlKb59rXU6Sm4XMFST+baCXRgsJ0PjsMWG0Nt2ePj+OqvP9BrZR59eWXnt47GSUDoFrF1qa9w+M5oswabWfy6FwWrk+1+qt/+T/eGekQYPVw8U/+xl87O3pSrVZFL/eNr9q4s1Vs7Zlw7utlXC7CuN8XjiYhQNCWYhu9r2J0iDBbLXJrfAvCOkbs9Xo21lXZeMegehR6/aRQWoIvs0Tfv/eY1fLFOwc+rgBjU9WRJQgt5205d7vb1/V44LEeFn1IjSao6tgf5whlXDY3DgYtj6y2L978iKJwsTjOR0ZTcnl0nmcoKYk2CieLso3MeW6TPPMhDjbHT54e1yXmRdy9NlnMzzKdZpkNAearJaCwr/I+9lL99PHFYKPwDbPj6CTAHCUK+GE2AlLifKbV5u7ge69/2ML2fKmHoy1tHmqNw5GBpeEm1GWTYrqcnnHrvPc+2O4s6Jj+AkikXPCGdCeB01qdxRhilMgcNSmtUCExCwCGyMwclYK15yas4YQiGpUPLrigSStSmtaXqhgCICDqGCMD+xgCBxBRSNpYo5RCBSCkVescaWSFwuB9MEASA6NI4M43FLnTVJhjVKQYRDi6GDuniwAp6LDIzCzOB2MUIgWOwqIUEYo2JobACEKA3MWrpW4dINkkBcK2bYXZBc8CXiACdgasxjlBDCECKm20JkyU7oxIAKxIERELROEQIoKwMMfQ3UsDe0sKkZRRRKrT/J1njgKIHBiJjFZEiKhprXMpok41otA1rwkIBwHxHAXBGkUKOaAERgCtNZEyWneZcURUipTSzIBEWgFH5hAlBumcVqQEmGMUYQQURI0KFDEIR0HSCBBYatcSAEJHFQetiEU6WqfWiiO47qapSABc6wGQFIIAoTgfSHc1dxBjQJA0TSRGAAIAjpGQgERIKWWJiEPksEYpaSQBZAGF2ihqg285REEkactaEQoIA5NSCBBdELmaKiBKrLbGWFxb34y1IYjvDj4JEqXDmgNAZnqNiSAOYms4QWNdXBVQjNJkbkzr3Pn05N/84z8bjpvXf+VbVf74y1/85777S2/ceuVTycbo8QdOcOdLP/Jvf/Wrn/zN3/zlL/zEn/rw/rdn9y5k9dqdl3d7ybVQD6ehiWgv77vV4eLg5qt2hsPdgdhy5szdex/o49MbH7v1pS9//OT+o/7k1sF4I5J78vi8AUCV6KE518WvvT81/8Prd25tHoz0avX0ax/df+O94zfuy6MP4Lv/6Zv/658a/Oy/8Hyvt/r6L57+9b/3uMg3HIfoCJSK4oGjBh70RkhYU6uUGUy2V/MaLf/OOyf/6Jf/+h/76ks/80e//LEv/FRvb++v/3d/85/+7m+YXqoU3f3gfWFeXpall+Gm1RGL3JBW81X9xrsP+/nJT/yRW//cV66983s//+HfnvZcq7hd3V+eP62uTeymizchGYaQJmHnWlZOYZKKGLA9oAlIAoyAFkRDcEARTAJEAC2k+mq2TQD0OpAPESBZz1xBYkH6N742fOvvHW8eXPvJT/3UtZ/+U49+4e/92t/5taaHWOgiLS6OyiyXbGvr3tnq+48uPzHhBMVJ62Moa5qVERM7nuSPLmOhMqyqfFc9n9LdDxdubrzBV7/STwez+MJssdlXA5l8bLANq0dxXl9OT965V52iffG1Gx/5soZNyXy+82Xvlr1ePhijgx0LSXP53v0n97fG1/7+3/7v/8U/86V//Lf+1p2Xf+Szn7v5+GHzV/7rf/gv/6s/89Ef/9yb33s9bXuLGpcnbjImO7IXi1XtVN/mIcpR42f9+Ce/8pnFW7++//zO1oCP33/75s7GBx/83v71O3my+f7b77V3T77y5/9s9tzw6K2jclmmWq9W7cXZdOuWAxuHe88xmMEob5vGKGo1DTUty9ZDfOn53csZVWf1sXNlwIuZ686CqnZpnroYjaFRPwm1HJ9e9EzvUy98/P337kKkukHAmhDvfnj3Yx/79HYfQ5zOL+YZqRvXx97VheLJaPjo6VMLoLM0kG1qbn0t0A56w8TY04tZbzQ6Op0eX053N8cYa0OatGjbXsxmxWC3r/v37z7R4Ha3kixJV6tlkmRPTk/NUveLIXEE5Gs3x9PVPBbhfYv9yWYIzcXFUqU5yQpCm9hhbOtQV2meV1WIbQvKsjibWh8lMRpBz+t6NNxKwC0WZ4Frz2XwEUzsDYrVsiGVrlbzpsI00RvjSfTLumr7g2FdVjHEMp7PFsdc5Tu7m03TTM+XrpLp6vHO7v7Dhxf3H62yvtdkYmhd4IvpbLQ9KqdRMB/vbhfDLWZYrkqTDY6OLk4fP7j92rWz2flkY/j+oxOJ1ZPivkmH129fm1+cJX38wk9+ojy7IDDpaFL5B67GrZ1dzoLkebtsz49OJnu3K1MMh8ns6OTmc9cuzqYmU4uLxQ/fePDB/ccf/9RnllV565Vbjz548Bv/6O/duXY9H+yAgIQ4OznfeeF5W/Smp9PGz2xBipPdFz51cX4x4uobv/p7zz3/Fbazwaj30WvXtieDdnk0nHy6rTA4PD29UBmPRuNy6RX2FBllCyO+l1vXzgO4pioTm6b9QXV8fnpyrhWtHt4VCUXQsVEPfnC/ifFjn/lYM5tXs1CVF251ZrPEN6sk6ZPxdjT/1BduQlh8+zu/Mh7epDuvAiXzRWUvltHXbblIc/3o0SPB5fXbL5v+jcuLY9KxaUJT+d3dUTHITh88vXx0aARoWBib3n3z2ycf3htvjz72qefKp9P56cV7T4+/c/f4q1/76TzdWJ4fbfe3smR9L6haLwxBQEl0TRu4xtQMBwVY7FFSt047FjIhxHxgkizEum5XLhHr51U2zqD1yiiFiIwr55UxZa1LVlVMPahmxUlU0WnLbAgw4eg4tC7Gxppe60PbNogQJCCAMTzeMPsbfRGLhpAEWDXLuq2cDygSUCK74JxvfZxVbraqWTilJLUWUVZt4ACrtqzKWkMIru718s2NPhRKJWlbVxTB1Q0HiQ5WTSNRFELrozEKQ7LkRlO7KssYYukatDpJs5ahrBy27LzrCjpAq8vZsmlLEbYG21qzg6b2lYurZbtYujbwsvUEonQ3TGQWyK3uWm6VIkIWCKGtCNIoTKhym4BELyyMjOIkGpOgb/Ik6asiOHCNJ/GOXYhsjIIYnG9SY1BYIaakDCnUmCXaizBC1KItpVaMdogKc7UEKBlXTUOoU0MKUkO9uZuKMV6gLhuNpKxp2EELidLLVY1RBRcVkTE2GOjmMl3rKUtGOxPKEiCMrTepraslBC6yTFmLIgrRi/jIFB0gMLaRK9d4JFM3DRhqo3iQYb8wmSVha8hYY9J0tN0fjXrz1bKa16kABp1k2Buo2s8gd+LcbHpmbCasmkb6lLYuWDsBMIVNFTMqsdaEYePZIa+VLqWsSYEFqkWjkFw3zybACAxgU/NM7IF1fouBrnw88Ez/6Ybb6wxX99B15fG54uSgINC61EuuFodX/KL1O67MNn/woV3XWOd7WbuBund076Q1ibizAAmC8DNfEV+xeATWYtSVt+hqpRDWGgViF5zqDENXuSdCddVXBp0qhF2D3hqDRLIuf1d8JaQQrn02CAoYEahTGzqZ5mrdWRjwGbRHmNbcBlh3qa0HYdz9rRM/gGiNLcJui8GVLCRXm1Ou4gvPjFXP7FiwdgnBOvxFa5LQ+p/d0AABgLkz23RPyVderXWarHN1rWOB3fKRCIlAESno8KdAAOvCHAQBiAiAQAgsQB39vHMn8dqZ1u399f4EQeFnlrS1ptPtYWbA9YevrUBrshAAqmd4prXDTFSHxQYC/kPw9PWfAPwsW9lhPa7m2Gk95w6IhM80rf+FVLQx7gNDf5JkRXL65Gy009+c9KBU9eqiVrWw9t5bkdvXNnyg05P56enUR7DoQ+OHvUE+0kzoKjcYFFmamRyePnojT4vldAmuMsaFtnn8w3f6o62z08PZ6ghx+vyrycnRY6Vubm7vLy6flhdz1DE0CxQcj5LVbFXNngqmtre5t3NA4drx7Gy+vLQ8TIZxmGIIZxvDfumr1aJ0Vf3+6+9PsleLCSU30sOnzW/87jcOr9955c7NtnwkqD0lEmO/wLfffCfrFzaXyTg/fHA62jvojfqj8SDz1bi3kfaU83XatwBZmm771XngWLlzCOxizItBIHGhiS6CW0yr1mSj0cYGXvqmXLR1cK4qElDGDUdb7WrFLnpgo/O2rE+OT0Dr6WxBvb5kRW//oC3Lfm9US/vkyfvPv3LQlOrDB7Px1k5vcsOD+7lf/PU3fnD00kdufO87P0i1toO8Z9PF5fJodrqoK0VqsWykbX1sbm/oyMeOy6TI4mXdHySSJmU9zQcFKi5XjRab96xr45xXOxuT5Vmrlc3S/HJ6libKmiwAAlBo523bUKrqul4uBDXNTi6OTNRpWwxzX1Vpr8iTXJPJDHK7FF2OJgeLsjZpohNTDIrz+VmhoVrMNNhb+zfBtatZRcLbmz3MdJbnh+Xx+XJxc7xje+O8v+uk1RDznFZCTQx15TY2NuvV6TLxRb4yYPMCdTZqar9auSwprInz8tKDk1StmnbUU8OezVI4mh3v7RwoNUmI8l5ezU5bkdPzhRKWZmVM/+zwSYzBWKhWiwRRjCZtNno0my57WYpohNezB3jluQABqxSAEFJkBo7IUStNQBpMAkYRMQsBMjJLJBYWcd5rRcwcRJBAaxWcF+nsh+C8TwxyDGv3YQhEiL4FAJ0orXU3hrcsXQIYCZyLTWhCYPBQtR4RWViaxhBqo2KM6woAQq0UKbQmbRqnO98lACFpMiGGGDyikEhwbfQIYjlGrQmBDBlttI+BSIcYOHgA4BggCCpCiG0rEQABfAwhCipNhohd5CDITACI2ihEpbXCdU5aOAYQ1VkzCVG4u2EIImithTkKGJMQIgJqpbqbrfMutVYR+SjMLAgsjEBaKWuMVmSNJeqa2jAyx8hRYggQgtdEgBA5PLs/I3X9ckYREiKgEmYUIlij9YQFhUSirL3FTILMEQiFGYhEFCnFIvEqgh4FQggiopUyGjt9LnDUhAo1CABRJFZKiUCMEQlMpmKMqAABvAs6sdwF1TkqVFprECBFMcYQA6EKMTKzUTpGh6IJSCkVgkckIgqRFalEA3tHJDGIj123gmhFHFkIiAVAmFmRiiKkFJFCQIyMChARlAIBurJEC3Ra3PqW0KyYONU6EylnyxPVNOw9hSZcTpUgkK2V/a3ffit3vdbD7p2YVPNrm8Xzr+4sNbRtcvboaLgx/q1f+aXp/F5d3zt8+v5HDj72c3//F772M39y42DHEUwv636eZardOOjV88cffPs7O5vFg5Mzm0JW6LJeHn7w7jfOH+mAo/7klZufuX77tcsKvvtbv/bg+L3F2Xkd2qQ/+Y3fP31w133uxnB/h6bqMI28XFWT8ehwkf/Vf7D81d99d29n8Ms/aJo4SZGMkbaJiMqgCs6TtYjStl6C6FzNpiUpC4R5klx/jt+6+8Hp//DkP/rk1/6dP/unHz46HoxsPk6Xs2WznG5sbsXKsxc2qAJIjBG4GOuDjZwvVsvvfPf7b30LG/ji5zaGkL79raOd4Rg3x4lzi3mzu5WOD8bLyr/7pL1255psjigDX1+qaRCjUiNN8MyMpJSm1WxOSrtGhRYuLqumDqBsajS7KjGAZJzJF7NYrej0LH/4tK2mON55gUaTn3/j3d7RX5tdVMlrL1UFLs7mq3n0N7YfOfn+/cujyr+6rX/qlVf5yYfgzdHZKQKEJbuKFx6qkoSbL35Bv/RnbG2PX6hSPLbtMmy+GttJ325N2hWojCev7b3Ge7/7i2+8/uaD3//GL/+5f+vfePdxu7vfnH3wW9//ub+b/ORo5/ZrurBltfRt61V1+3PXt14evffg9RTSvcHWqx//FJpkNMbjef1/+L/+e/fPTv/S/+UvK1P9+IsHQwMfubb79oND3etlaZZmWagcoOpn/Y8dbDz6nddf6o3PPmy4Hy+Oz+eX3+4Pn0vz56WJuzeGp+ezB1//NZ5XblVJ8C9+ZO/9H94NXtvQZAMTWrP13DVXHbKLRltuWx9jZhQxitEa0bdeQhyPU8X47CG0Db6smtSmSDDqm73ru2czf31n//nt60193rTOe9e6+PzLt55c3H14ejjqb4uAE5gu6sVs6pYl0HCjmPh2hc5bRaCjNFHrFETNZmWW5WJENJgkrxufKkaB48PzVNnx7kCZkavqYS8jlTTBLRYzk6YioSgKkGg0RaBysbKpKtKELPaH+s0fvv7Crevnj+6+8JGPlqvGSz/LJwaocXXblCZptQUfql4vGwxHrSPhWNbzXn8YPBOGVVmmeb/IR843ZPLFqqyXi4rqflFEAGN0EAk6jxBSg43U9aphTcN8V+qVV6vgGquiMzxKs63N5MH79x3K1nPPJ8dnZQXDAd598uhTn35x6uvd65sHd55D1yyny2vXx5TAJVerdvH48fFs7pTP9rb3gN39u/f6o42dZNhcVqcfHpp+kkl2/517J5ez7Tzf39+7mJ3PT87C0m4Mis+99MnHT89WF5d7B71xfzw9PVNAi9nZvJzl23vqcXn9hZ9cnLz7+9/+/s07N37mz/yFBKkJbJVWAINRv5xNRdVnp3dHG9ezjcFkfKtu6svTp9cOBj/yE38ksh/s7u3EYPJF2kvvnh9muXZBRWHEYBR7X/cG4zzPy5WX2mnLwdVnT57qlMab49np3NVuUAzJaJtqaarBxha1Wlfl/uatX/z5nyM7ypD2JyPK9OtP3xyazTfeevv6jdu9vHjwxuGibPZ3R0OTvnLnhSPHjY+nxxcGc89NmiijwuZgdLC9n1uN4os8STQ0q6W0qJsyj26zz67xs7OF8fNkjukqDsi8+97x5uQgXoSnjy+Pj5+A+F/6Z//4oNj6xO291enF+xeXz4aTOk+8b1vXONcqpXxVlRSTnoJeEtBLBGFUBoueKZ0jsLZXVLU3SlV1o1qf5qHfg/lsWfoALbqqWLhW91KtdJIANCiinPNVCW3Dy3njfc0UHAVCg74ViZqo3y+G4zQbZsDr0VmUUK7K6eWqnC60tgkhCiRGCUQXmqpugg9Fmg7ShF3A1JjEtNGV85UGJvG5YF9w25JNTSDAJPdN62wKPV2yBKS2anzt6sq1WkJY4nCgFAXPWitlyaTaGLI6n/q4qhoOHgEzDalRHjjV2ntvUbdVqEKYLarLsnU++iCIYDWCiAJGwq7RFQkJsShyBNAQkQMIIrdKRIlBFwgYCbxjxyxaRaSe1b6qEiVaCZM0TSsCESAwa8TEGkSMQQDAcQQiDahQIQGKKAIiyhRZQ1nao8rPF0uyJlUgAOw9rOu9A/ugtTGoOApHBG09c3DRiUJATNIYQtk0IDGy1FVttNJaG2MtUXQOYwg+KEFCIYgiUSILSBTh6CA4S1pnCXMIwkZiarXKEhuETGoUE7o0s5lVLD4bQD50dliiqgK0HKUw+cAqlrJ2M+qRDj61XhRCdAmQRJ8orXuZyQZQ17EMpIQC9oyN/X70LgrbIgseFpdzBOgVSfDBaN20bVJkqdaurPHKZ70eSeOzoTlcZc/+YJT/7KWurFw6i9GVlwOfDe276Ffn3+nUIFnHr/DZH/AHUJ3OzERrmDRfWXzWn7Fm8nRTu2tpY021ucIgPYvJdbUutEY5d/ROAFj7hta3POxcM0CEqEA9e0T8g4ozWVMhkKgbHwnSWtO68inJ1ffvthASrIdR64ybrCWItUAF62dxlivgznos1uFF12u4fnGNBV9Dj3DthfkDta2D9axL4fBKf1rjd67MPIAiuM6WrUUiWn8zvFLz5EqrUevdBF2QrKsmwo70ikJEGjs3EHR11bi2TIkIsPBaZuz8TQgovI51UWfeucqHreUvwStQ01UEsMMjrQ+gzmYmSMBwJb8JrBVC6tQ3FAHh9d5cA5eebcK1Q0yEQISFcK14dVDxbhPQenndnr2CL11JRZu7o+V5qz2dP5lGH13pZnApdZ30N5p2mmd9Fh98tSyDoEFeLOZw88Zuj+BkMW8WTlxr+0k9b0bDnAFi64xq67IaHPSWl+YH7zyJpAaFSY7g2tZuD5ImzpcXVRqzsDyPwyOjvMmgjSGbpBAX/UFvMV2qAIbPDi+d55Gyo/3bo5MnJ67GRXVkbV9rigDOcdrvK84Prn2sn9589PCHm+P8xv44UPre3UOtinr2aNwjk9q37z8eJymJsung8OnTQcHCeSFJlltezpcXy2JvpMESNafnZT/PREvTLLNekts0QgPeQ1xZkeX54c7mpgtNgrYp5zpRCTIpc3Z8VC9Ibe2mKUsuIfiqDjGKa6cguavFbmzU5UlpTczMcGguY3p2MXeQfnj4eBV1ZobvPGj1YcXw9Hvf/21iOtjfi/H8+dvDDx+cnF42bTkd9pOzh2dAUmSmrRZ96/73/+oXP/1JjP6kt2XGZjSbYbMom3rpnY/Luj8pXAClNEf2viHMAPt5TzcrV9UuMSlJA8CsQ5ZatywppTQ3eWKPDw+Ho75btgqUTXossTccarIhiEUZbqZ1uQJQpGyej7Tp1YGJwOrWte3e1mZdl1qWbHj/pY2Um9Onh+XUN6tjQ21/vLc52D2pqYqzVO/EtmrLpmyrUa83yamE6snDJ/s3BqRMkfaic6Kqfqp0wNpJXYOhzaokbfjlF7YWs4cmzyOV/X7SH9K0mTOCxsok814vP7+cpsb2dnrvPpmzMcYiaiXilVGr1RGaJMt6WumzGRvPp1cPRqjQkO5iU0RaYZciZ0VahCIzgXSqvEEjGoSZierGg0CI4jlGZkK0WiEqBDHacBfukY6ODzHGJgYEUaS4u/QDBc+EZE1iCFlAOPoQQuTI7EUCx7ZxSlHkaDQRobUWWTSCsTb6oFEba5gjc7BWa1IhtkRaKx2DT4xiAa1UjBJ8QIWeY4yRwBhNXdkiAnnfcIxEwCxdMAolsgB1DzUCCDq1KogQSFRBG8MgEUEErTWJsVqpzrxIqIHXVzeWKPxs+kQUIhJFFE2KlBIBiYxrRBwk1rIwABCxiOrmDIgUKSLVXdJZAHnt6FEAAhGBUFkdY4gsIQQAXpfCMwCCJq2IACTGoJVZY9u6GaIYfYzRBxQOMWiFWuluBgZJre/tAoRKofLRcQgi0hWlRQQjikiDgKb1U4HnQBIVqS4tFwiJOhQ1C4giMioyiw8chY21Rlml1BoShKSVZmFhiZE5tFopEWYQHwOj+MgYn92p18UKUSQEdj4qAKNVFz3krhSCSZFWhAxgSRNCjCECao2RIwo2TUtK+7YVJGNMvHowMjaRaLVWAC4iaiyygVm2s8lewkcVKQGiE7mU+dGnPv/RyZj/+l//L44X7b/9+e3B9ds//M6bL925PbSj+z9YvZLt/dX/5D/7mT/2Y+mtzRd/5itbr15vMTCoNKEY/XCcXCxOOC7K2WnW0/M3l22tJ5Oxv5hrwnffeLQ1LILnjY9tHd19+Lhq9p+7/snPvvIb/+TvPLlwJfJc0rNHfpQ3B7dHr33ppWV68Hff+q3T41UbKaHs9WPzrcO5tT3dg1XttFYc0AJmOkM99KFZVqs87edJ0koTwY1GEyQ/n11u7w7PT9yDi+rP/yv/ntWr/nC0tTl59OgUlHr5zosff/VHf/cf/7w/Oc6MPDcZ1pVbLdo4da9wu2fCAaejhSBB+251XLl6mpS1q63/8AO3VP3jD9QKnJNwvIIkWfh2Sirmub82VoOe8U0lBocjs2yiB1EK6yjLMtRslqVBn7QB6xaZBilj9EBEMSIAAUqLCSpPdNLyY6Ulug+LLEMU51tt0yjKxzLLDCoThH7/JP5bf+33vjixf+5H7nzuCy9npn79g6cfvH0ulHL0lhbbz8Fwd2+o+6AtoMDzW/DCBnCErAeFrnyMclmD++SffeE/+/FP3P/9377+wqqfNPMH//OTf/aNm9L++n/77//pf/f/YfyrF9PX00Q/dc0rH/m0eLq+/9qX/907Rw+e7G6/9vTx+/dX0zfe+e5P/uyfaXHOjz740l/46nf+7t/8T/7jn/7d//IHg2yo8vFS1UfL1Y989LXDkwvsb2xn/Z20SJUbJ/3Lw1PL/eMH75fJWR99UUxuf+xm9Xq4PJvv7m4qbdqyPj05dc0q1Fwv55QvbMw/+ZUv/uP/7q/e2Npavn9XJ0mWKkZFISZpPtjLj1ftydmsXYaZ991ZsKybhHTTYnCtTrVJih5ZZ9vV5YNvf++HBze3Xvnki9PzVR2ag93do98/z9NiNN66du3g8ug4yROc0bLxf+1v/9y//7/5i66pV3W7rFa9XpZnwzTLIQRrzfl0MdbZ7Wvjew/u6mw76xd53kck0IaUbI5t6Mtgcu2dt9/RpLa2tqezZW5zULZsm1XdWOwNBuPFcnZ5GTcn9sWdyaMP3ntx+/ri4qKuqiwrbG6beuUYoojJcmtNdB6zBJSOLCFWRNAbDXpZsVxcLqtVMRh4F4RImLhRuZqIAdIqyVKtcX6x9N4vhtM8L0gxaLCZIkvsmvPp0yHtFlm+cJfG0ubeRFfNm9/75r/5b/1rw4PNt7557i9k+4Vb+68+NyqSSbq0k/Ts6Hir3xv2sv7ItIESm/VHIyDz8S+9fO/77wWhslldXMyenpz/7re/PxkOD7Y28ry3d210Wld1o+bV8vDJGaT64vERqWQ+yZ/cPRxv3fjs5z7i/NxXLXl66+13Pvr511yYQrb541+7/r3f/qUnhydf+Mqr440bp8cnidISqVpWWpvQthgkkBtng9yowwf3rl1LdJFtbE84QYLs/htfH29kBy9+6t3v/1oyaxKib/7c3/nqz/y5djK8PP3wWrbbN/Z0dmoHmsgEDwc39s6evCdEgaFc+NDGalFJpPFkNJsv+pPtpMiD5t3d3dX58Usf+2p/e7DZj6f3Hs/OjzlWH/7wB0bv9ofPXb85Xs0Ol81pIzDZu6UAJ3lyVi8/9smPT09W5C9V3959eH+nt8WmQMwuzy43cvLtMsYAup2eTdM8Nca3TTWvj7RLdvqD6y+8cNK8/ZM//rPTx++lRfvP/+zX/tv/5v72KG+UUVWbDGzTxNBc3QsMCnCWJQ6i0Yl3gcCy0HxZo/HKkB3apgSH9ej6Jkq1vKzY2OEgi4RBPAffLivQOqS9RWBNVLaYJbmRJLYtNN57XxQ2ITYky2oZYhRiF9vgWqUSA6ABE2t7uZkUGSANe8YHXFar2XQ5nVVNy8gaGEEksPgYp1VVNo5FERKKypXKVNyfJFmWzS8AFmldl6RpOOxvDNXmUBR652pj00XbRGBlTN/kTcRW6doEYqyaSpFFUjpNgEKMcZTbPMs4hLKpCp2YTFdVY61WShk0ogXA9oZ9TaqqXLWq27hsQ2SQSBIBFQCKEAIKpkZbTZZQoWQSDIBVXGQWlWjyJnoSUhIVIVDUXhJQIQBHabRkWnOIQKAIc5O0TVsoFQAiKiYWEaUQGQNQZI7rHpGYaJ1ojCEKImiu3ByEip6d155ECmMROUoDwJpsRsjCzJEQnWu8qK7ALkQPzIjoGGJgQ9iEIJGJMLNKiatX0WoFIiG0idWkQKMXBoVGgCN7JNIgArH2JUEkJGYe9gd5nqAq2tXC86r2c0w5QIsJiWXGrKrZ+5VOy+F4MwtumBRtBZUQKtrI8oLYxVWDZTLoKYbYsCENutY5UaETZVwjiU1tPymr2lU+SxMwUJhsVTfSOu+5dky50tok2pg8tUpf6TVrxs/V3Jqs01LrYBmAiKzrtrrQkQBgp1HIHziJ1p1oVz4hAF77gq5cSV2kiK7IRFeL6hZG8Ie4RbJ2GCGySFeRAsAg0AUbr6SRNapY1pQcuhKSupYX+ANNCq9+OufQVd1a91/S8Vs70QG6Zi7sPnX9K2vlq9OJ8CraRNgFIzrZRhFI7MQQgK4+5lnaTl0tuNN6EOUZTaELxK2fodcjgLWqASJXccBOGgHpWtSAn70unUWI1upTZ31aCyydQrPW+xjWOcBnjh5c1/Rc7QpeA386lHcX1EIloBBRQCESIIFgF8xYa4JwJQny2nDW1WUj87NWt25R3cdfQc3XRrGrhjtkjN1nR+nWUK78ZICEwCTCa00O1vsfrw7aNY77qv1traTBusvuDw5OgK4iWmNX5AbUtbNdpSnXJ8PFzG/dPGAleHp5dPaQKGtrbuq20GXTVCBRaTKJhyjcxuf2tpNiVPmMddLbULERSTKXFD6bPL5c1q0b9IoP3rxfNt4Mi9WsPT+rg0DPknfqE6/ip15+rm3l4qIdb/QG4+T88jRLMyUxuGpzY3RxebZccjEa9tORa5ue6UPrqiX2ETb7m2d8fLY8HKrnY4tV3Vxe1mkxuvugUmZLcHXj5rhQajZri/Hged/zq0VvMl6spk3bnLdub+tmVS0PT6eairZNN0fD3GA7vcz7GW2OLlZet3o4HAI614SFO9PKuKCKJKtdU9hekY6VsXuTg9oftio8efJUZRNZYJHn1bweFBuUpqdl2uNmdnQpwSVmwCCvvPyZb/3OG1Uru1sH3moyjknm58dupRQMWhh+4/V36uZ+DN47iT4aTWQVAuhVPezvzY7PVaiLTNvh9F/56VePHpz+/psPP/3Vvb/wL7ySSK3lfFEuVxWEYrsRGo76HKvxlllN6+ViIUSgclBotOZoNgfD6mLhyri7c3MZovhLXy+ssTqxQZyxlNjC2H67ONvdH5NVQnk5nSP2iCUzaaqSyE0Zo4P6aPZks5i0qzLp5UCELHW5yghWvnWRIqhF2RpdJFAEUGKLWK/Gw8K3tujtcFTMrpckw55ZTENwUs1bW6RtEx88PkVM2MXQcD7ZryvFCKg9UWV1YuxupF4Ev2VhWc5J5Ug2S/qI9vhwlSf5YnkkSIMkqyoYjPdWq+qyqnvDRJOZNi1jsbG9OT95fzLKs3zw6OHx3rUbXjluWmufXRU74RwAKDAzskHVTRX4NWEHACIBkTCzcOSOKBxiy8IBRCOioFUGGETYGLOqmwiMQF1jWYwRUYzRtWuNUkDkJSRoFAmi6mK9AhgjkyJttA/Bc+guaiSQW5tYkxrTdc4DgDKmu1khdhRIhYgKTHcRsdaIgCHDMRittVI++ijCDD6ytQqRmCVepZe5SwgrTQolsNKktemspyiilIEQo8QsSVtyMYpSJESJtRaV0erZxY2FowhDBBatzLqDTKvuUmtEAQCSYhGGbhOvb6okpBVqLcwcBRSi1kopBSwM7CNI8LZrhVvT64hQA7IIMsTgPZFEAUKMHLpQoFZaa60UAkLXaxKYO/uY8HqGJsYAQJFZK0VrdWmd1GXmKCwxxhhobSJGozQiBu+0UowUopfuEUIRI0aBzpjU3dGNSYi6mYTogheIIbLWBhQqIkKKwsKxu/f5ECILiRCqEIMPnpRikBAjgyAoo1TwQSHmxiqGFXMIvo2SEFqk7paliJSiq1uTMDuOOoTgEQFakWiNUVqtmoYRIwixhCupaLEqR6O+W5ZJmm4NnrssV2zEpOBzDIov5rPmrG4nkQoTj94bz+H7x4+2N689fP+d2dsfzqvm1pj+6TffSRLe37/25//cn9OD+I3vfvtTP/qzFnVdtW3tev2sLpvD5dx7P74+KkbN0fcf7RfpVz/1idMVz84uy+k82cg29ocPPzhP7cN/8U/9uL/7YdlcnDw4Ee/ZO9cIKVPsjXmA7zyaLr/78Mb1mz/9ozd/7pfvizJtQl4xkbYZUhRmQJEsTayyq2qVauVja7RWKJ6XeV9F365Wjzl4TXRyuMKoN3f2CLheeWG8nM1ZYgz49L1H+Yd/+xO2/eitdHZ+kT6a90AdTEwK7Y5qJ33YvJXrjbRZrM6WMrN6v0gPZXbw0ub9zF9cwNMVna2AG8m3BipLfOOnVeV6+ulFqUszW6p+YeNJrNpAmlKrOXLgyAj9zDhxKqEaIU91LRgCm0QpUVXplRZtVAjKA+mkUIjSRLZaQHybiLGkLUmEhELTgCLIk1PKv5EWv/IbRzfujFS7QE4kDpOFzpvFX/lLn5j8RAoP/Yfv8j/4u/dPjpMPj44Gw/PrI/+Fz42/+JO3N+9swbXC108dH+Z9+viPpr79zaqe7n/mR/de/oqrt1+bVdcm6ezBt773j/7Ln/yzfwnOb5QDsv2telY9eWdx9yG99+B3J3u7H3v58z/8zW/+zf/w/1YuUv+w/Jt/9X/+mdvPv//Nxw8fNDG3PYtRUaKSs5MpquFFi7c3BradT6cLSAeDnY9FnpG7e3r4gX7nzc3N58EUxXBz0dTJjZd6QFgtH37vN4KXp4dPs50dXUwA8dorr42vbTXVcmNol4BekGPc2drUOHSsdra2Jo27DOUzlKlCidEnxmRp0fhGI6yWF+ybrb3NqIRFX54tV4vaaDo6OsqyZGNzbLL+5ckFCDfzZnE+R0ju3Hk5gKMEU0xE4Xjcu5yHRZy/uLeLQUcUC3F6fNZLzcHuweXybKjzSnSS5s1qfnRyNhkXh4f3stxIHUKImqhcVAFj27jj0+OtyXOj8biqZ+ONvpZ6Mki/+/qHf+agP6z36ujaUoGWIsu8r3XSIzuo61phijplpuUyCKqd7Y3L2XI5ewQQYgSb54qYSJXVVKEDjtYm2iRt23CANLNFry9ILXMkntdlaqWsK4uUDHoB4XK+aMrShzg/i48ePxzZLCnPv/mr3/rs5z6zobzXqIqtt7/33igf37w+8fUssNTOgebZrK1dUDY7X7TVe8ftol3NViqjIs8GOhOamSxrQS2m8xLn11+5ee36q9/9hb9vlJrsTgYD/eDu2UXldkfDtD+clquqrC7OnxxsTu589OWLhT8756UcHbnjjP2f/ue/0tubrGaVwaSerTavbXpoeoP+vQ8ONya7ZbVqmlr3mo3trbqcz+enAEmabw/SZHd7vzeekN3d2n2hXJzk2225rNmtsmFOdFDGUF+eg/DqYpGk46JnnSvvP3xrPNjuj4ez0/nWztgz+JaDljwdLM7mrgnMtlwt+nnSmwxDDPN5XVZ1sTl5boyD44vDcvv5z33q6IPvbvfsg2a+OIPJZHD/vadqc8+3Oub8/jvfMUn5qee+MOrvoc5UuuUAVRIrf764vD8cHgRUurcJNlldzgnw4Lnb44O9N3/z6zOXTTY2dHXaXJ6oVO4/vjuYDJfLNhrKjPbWJ1mxkw+7s6Bp2ixDRQrSxKa6KsuqbFhQW+WZ0UArskpk/+ZI9+vl5cz0N2NiMbdJak8PT3vK+BarhdQsNSU5KZVi8MGQMiBKC6GQDU2M7KNrXTeh7oN4BKtUQoZQmJQLMF16sMmiWZVLF6IvV2XtGI1NFIyHmW+ay2nV+rD03gVhJJvYUa+XWITQTi8WlV2uVjUBj3LJRvlg0CNsooirm2bl2lDGCIjo2pnOgxMejYZG69WijkGJtsAmeinSYjVbUHTDYmQHfdrans0uq6opk4Y0lGVdL1eVC0luYjQarbFWW9KmzDX7EAQ4tUqT4sggoBB7WvVtoiAaLVaJkpgalaG3xiZIBpUPThMopZiiaN1EHwSbSFYrRah1xkiICn20NnHeiTLL4EMILH5db66URyThlIUEWhfaiERQVU3GpDKIpIFVZgwKGp0SO0BNki3qFpXywQVmjYpINz5wjBENCmltfQAXnA8+qug5IGIrkVpnK20SpQhEKC/6la8ZIjMkVkcfYusIBckySBtqgxpiiIHztJCovEdunQ8BjSrSCWPt2A0HI8jA61i6WSTfs9S3OlWWyEblnEYkyvN+8BUJT4qebyH4djDpY+PL1hXDHVhJodPVvG2iKECbpXnWy5IiMcVitbRpOT85VhCM1unQbowK8AExzqYVPPvBNX600zqEpbOXXL109fd1qdUVDUeexc3WS+lCauvWdkTsvBtryPUfjvvA2oKE3bh/nf9ah6lIAQgIC5Jw7JqQOxjP+k3rt6/xRGsBCPEZrGZNrIY/cEghUtfy3kkV2Hlf4A8v7Sqv1ckasBYb1paXNQJb1j6ibg1AsEOHQheM4y571rGQ1lKQdJmDZw4ZlCtzTgeLXcfHQEjWjKMOIi6AwmtEtDA/g0WtBZq1wkadfvRsLyASsazbftcS0tXYb50yu1rlZ/+x/k2EToyBq2q6Tg4TRCLqoEBrrUeQhbrdKcjQEUKv/GDc8YnWO+KqaW3tTuuyd93BcUVVgq58hkUQqcMVdeYhuFoerLMSIMAdaqPb50LIsvazdUIXXMlgDM8MWd2Okk7EXoOgulwG8x/SR6+kops3N87PpvPTk43dHTtWyIqrsigMoUlMnqUjYG961mTF/beOjy6XppHzyh09ePS5z7woyhw9bZbN+elFdfn0zKYpMDWzGqweBkxsr7+RCUBuzfxy+d1333/49OnnP7d5c7Nf+3Z/I0taD9JeLJ8GxGUzqNuqYSa1m4RJbrSQCe1iMkmqdrlczbe2bmm1uao8R6dMuru3W12a97/1Owfbm6Va9no0rf3KxYNr/OU/8dW7337Xp+2yXY2GufXZ6flF5fjazZtPnxwWWr20v+ure7dfGM8v5kfnU5X182IzG4yIYrNYDsmi6LYMUaLCYX+wm2b989l8Y7S5nJ65qr1xa+dsVh2fPO0Nt0bDQRA96I2nwefjXogiTaNtf9lMj89PTNJnaROlorIuhsAxEQxa/+D37v7Wdx8u5zWgZmQQMQZJIylmzxzZhVXSa3/mq6/9u3/xR289J0+/+4tHH7T/8p95ee+jWxePH0nAmhXY/x9V/x1tW3bn9aG/32+GlXY88Z6bQ+WkUk4t1FJL6oA60Ty/BgbQDLDNs3HAA2xsDLxhbPyeh7E9DNgYaJpg85qG7oZu0UEtNd3KUklVqlKle+veuuHce6IR2OAAAQAASURBVPLZecU55+/3/lh735LrD2mMO87ee601V5qf+Q3GxibumPnUL6ZjpaxRXcaiLErQvrNxSUlRF0W1UBlFhnDn2qVvvPDNp9/1TNf2J6NR3rjR/DRRlNo0NDQeFRmq7kBPpwvt1fam6W508twF0JNqvtbRmVIea9vBoIQYuAkYASm2EfSyzng6Gi2cA39267zJNhYSe9FsNyrlq1pp1TsZz6cRbuxcoPJBMRqRpIsSO/1OzQooun3/9c0+uzJPNvsMwgrzcl42NVdTS11B3c16s+J2N6Vy2rjQdNdiN58o3FxLtm2SgsqG3UKD2j/Abueqh1sIszNr0Xw8246pqifVVNgxIeWLspfGs8mRFX/xfBf1AOA+ACzzdJjbNqxWU+mca9PsAjIRGmU4tHoTBoQ6uLqpUcBoBNFaGUuoiVBh3XDpGi8siE3TIKo2iB8JvPdEOgiAgCJtjVVKoZIQhH0QFFDkmBvnvA8KUWtCxCiOksRGkbXKcOBW4EiqzUsSZvDCElhbrckwABIwC7MPzlkbheACQAjBsRBiCFzWNaISCKVrNKEghhCQtNJKKTKW2ltz8J6IEJiZV1pVIFIIgIrQmMhGFjShBPYM/DBAr30EBmGiNmKI2t2HFlYLtw4xAfHeMzNSeztlQlJKGULVSopICwsDh8DUJlmucv8RUJb3UkEArVR74w3es7AIllWjDURKIwhyAAAW8CEgoEhg4fYmjkozLBW+ilFouQTU7gf7ACyECoEUkiIiUAQCRALofcOCCKCQaKW6DSzOew+otUZUIMDCWmnPDCqgSO2DsJBBAAzADBi89yEsI5hQQVhGqYeWUmltlEJUChFEETMHrwATYzypmgNq5Vm8C1YTISEBB/YhaKPrxgdiQHCBQwCtKHchhND4gATaGNdU8nDQQJmkCyVHEsU6rS2Pqvs66h9NF2gNJrRgd7hY9DvD67d2DS+eePypRx95/OLj6fzl0wFH87v5ndcfzJrFuUvTw5O3sau+8r29abH1Y5/8OHOTJDpJdLWouFpkMR699e27r18vy3oe4Jtv3fu5f//P3hjt0ag6Pc3vPJiozL5691705d/5Ax/56Nd/9zVXL1w1H0QmKJ56MkP9zI/+wNvf/kYYizKjP/LjH/raN+/cOnExQ6W0aySAB5AoMk0TKl+joRpdaEpDxmhzvDhNkiRuOqGe16GCICpK68UizpL9Bw/Ig4kVl5WNCVVIENfi6qzc/9g56Nr59o8O9XCnPj6NehaGEaxF8MbBwa6bzQol0H8XYONntXz25y4t1swPRE/Ovrp/60Y+3Hzs+ldu/psv7L41700D5oKjUSVNSMlqrasaXRBrLSJU8+C9EIKxtKidCxLFyEEaCC60TYIMzI6ZWcQH54PSioCqRQ2MDXhjiLC96tB5aSpvkFzpkyTGiMsyj/vp8aiZThuSoNtT3vd/4q+/dvl/dDZs3b6jSkoog4Vk8UK/eup/+XsL+/MvPr7t/vhPbX3mZ7azDQfndL6Y+rW1/lNbZf09hVZtvyvtna+a2+bi/If+gz+yefVT3/uFf/Dtuyc/+h/+ZZM0gc9eOP/UX/zzf+7sU+/Wv/qbV8u5C/bt43Dhfe99/x94Vv/av77+5h5bPXbz+T033E7ttL6zf6rPnHvqx//AeuQmR2VVTlKhaDHtDLaGG2oIdvZg9/0f+OCXfu/bo7J65N3vE2XKSqdJmnY3y+nowpXNZnFMxRYTTovuU8995Gu/9v/rWLuYOdJqkvsnr6wp7Ixm1ebW9rVUl9+7EYyGkxkAgNBikaPFTqdvlXZcKKtdHb750vfWesMr5644V3vv+xvp8b2T6bwRpkHUEKGyvYLlzfujTre3uX0mz09ZKedcFCedJAtFXfmAQWdpejoFbWy336/KosgrhaooyVJWFSUC7h+NXFNfvrIxnUxQp0URNJlkYKez0Vo/nef9o+PDcjoeDJW4Ji+K4VqfO/a7N69HLNPRePPcRRvHGtHEKelub7ATXK5IKaUmoz3nGBXl01GV50JoFHnPjQ+hYVJRb7DWVE4IrUqNin2RO6e8D70stsqKb2YnY0XoahSfdYfbJBMTE0dVXs6ztY1Ep45mf+7f+5Ekqr/64mw+So4nzeal9flx8fiV8+PjyfZgUNfBN9ViXC5G1fF4MjizEXWHPLvfzCZ5tfBYIqoktahMVigSbuqmmteZubC7f/Tmd3/l/EavWlQHeyflokps0tne/JFPvicIiYTepoUIqqZ58PZ1ygZnL529s3/3fR944saXvx1ZLha1BGdB7HrPu5JIGjc9Ob7e7+o4Qso203hLxUXOIYE4X0A+ryWl7uDiydGtKhyup8Px4fG5nfW78wd3bnyje/Z5Q1qEaucHnbQuxFoK3IQGTEhOD04AQZukLOrhmcF8WlRu6hYhslHaSxalr+umm61Z0tODvawTx/2hTdBz/OjOI3wfHuzeH9+f19ZdurTZG/aO98t4sJF3ZO2MGqTzqFslKtm7dTfr9DPbWYxPtHEmswD1cH0DmaJe53QGcbI5PBsd79+b1WV92kTdizqPdFONyreGncHRgwMm11/b+N6t78X93mCgdr9zc2P7zNrj6p3pKmBe5qBRiOJBP+kODo6PVES6rwJxUXkzkPPnY25yzxInkGVUhjIQb5zt57kvFe7tF4lJNjZ2lCsavzDOs5RpZu0gKZvAmoqciyp4htIVWiMo0qIIoPSljeNeJwKtx1VzMspBAAOAb5ShpJfGiV5PTFUUlS9daGphsEiIIJRoIhBh0HEym9aKS9LU69DG0KIBogpcqGYNNF4CuFAFoNqxiWPHTeN5//DkdFb4Rsga3Ym7wzRU5fT0qFg0oZHx1GWdOOpaVxUhuCzrzBcLJDWfV4u8cpNS6bqT9ZUBENze3Ir1pGmavCxbkQVq1FoRgCZEFUiCEk6JLAmJR+cInVKoQAjBKqOVeG6UMRE6IR2IXAigMLLaCXkfbKwo1qoU56WnTVEF79CzBJSg2AMQSxWWsm7lOFIqJVPmTShdJ8uQlAIksq5hBaCUrmongYsmRHGqDENQiKhZCdeKWJE2ti+WKW2kxjovAI0yqA0ChhB8DNA0AVDHOlLKOnEChKgKXxdNTYqMQmEgFXkGcdD4kMRx7XF6PErTjNEpRLJR44vEaBCnlQ5YB+1Yc81BfLlOGQfrg7KRRQnEgsoKO1QQqMmxZCps5AKqqWsGSbZoCq+VR1mUTUNmPev3srUAutftWFeToWY6qfOqqZvTk1LqPIiv5mF1FSyJ0FITs3pPWuKNh8G/K9SxfJFaVeQsXyRlGZkMq4joJbxYCkfabyNZRlNTq4hBbANkqHUZAcIqjUgYIazIlPDSadTSjSUJan90JY9fghYkQHgoKGlXx1uz14pmIeKyy2zFmWC1Lrv6k6WpaYlSltuOKACqTSaClmIs85dRuI0GUkQBuPVQScsfMLSOPGFeZXtja5RbMh8REaGVTqfNsV4ehbaypaUe7a4v69EQ2hKhZUIPvhMQjohtKc0yroiJWuXVSoqFvApCWkG0ZTR4q0Nq/XUKUT106i2NZ7CUNLWDqwBZSKSdVLRSMVmBvHZwpY0Taucu7cFdWtLgHXImSwrYaotWXEeYVgynxU7tIjgumZPAqu2tNUYwr1x/q5MHaXWWLq2Qq58X4NaV0DJNxFWM+woVvfSNVwf9td7a8M6Nu3GvE8CF3GWpifUwL2xRpzdu3rm3fzjL83JUKZN4d/jUkxd63XNfe2HvpC7HeZWQ66ddq6J+0gFL53e600WxmM7qwGkWN0zkODGqk1kmuX4y8bZ3ZSveu32QpYQ66C6kkKEZKLORGQPcizBNkuHp5GhW7M5yn/RSltqXC6vQgM+bpp4HAPvSS3eeffKZ0eF+Pi3W1npFbeuqePv6vTV4zXo4uHcvNLkYAZ83ZSlijk+Oux2rvezu3t3sqtt3Z9P9yWxRRUnhxoEXRztbvTi1sc3Yu0lR5rkbrG3MxvOyKpTSi8XMqlgwlqCySKu1rDTxXNJmVupQd4YYkdNCphMRoU2GaScd7miOOa8nZZNrY1whu3P+3L998a3bpwyGtCB4DgEVIYpICA4pSJZGx/eOfuQn3/f3fuEvfPfzfz9wKjzfP56vdbvu5tSwqnzormdx17jcnR4epOmFjfXs5DQ/PTpyjdOKJieTxu9rkiSiixfOLhaLUuC1gxuddXM6OzisTnwx0kaJFEkyGGZrp6U/e+7c/PCt+Xha5iVjFMeRr0qNpDR5SGtX5vNp1tcXtrdGU2U63dD4Zn6SpJFQOB4f59Ws1+mfLuos2xzESUXCkVXJmg/lnTt7m+s9pawAHp4eyeIACI3ZFuiVeSWkF+PFsI/rG7S2nnX6ae18EMg6UV3UZCJSZj4dTRejsj7udGNrY0smTjJh1rq/tnb+5OSWpVxxpbCbmO7Bg10Ozihd5LUEIGIFRWBY3zz3ys23LfL5nc2GF3Gkq7zp9dXq3gCBVw1czEio2usbRVg0aWn5soTa1W2Hl/OOSJGgVkoJCjACAGHduMa7yjXOsRMmRXpZi04sXilFyoCA0sZo0kppouUNT0PjPHPwITjXhCDKkHcujZLEmk6UIiEwKtKeGRERCZBbFQlLTai0NiISQhBGFnEcFKqyqdn5wNw0jWOvjVIQEUij9EOBJxBao8lYozQBGKWYJQQWRO8bYzQqJFLBiyBqQ4BIpElpQgUMhEhahxBYUAiZIYQAwHpZIdm6wlrLNJIibE93DizCLAxCiIBolGlvj4pIK4VtkwKJa7PDOYQ2jZvamzUCETO3X0lKt0rRIOIDe+amCQEgAGu1bAAFUm2Lpw/BeUdIsbbI7WOXBdCxZ09EWgILMwi0uUhAxIERRCv0oQLUAAjYmvVAkWpTtkWCC04EPDMRQcC6aaw2Iq0dr7UxivOeRFVSC4s2WoQ9iwvcpj61lkVm376mKKWNNkopEREOCKIVOSGlVYxKEBIRAHCuzbuh2gWvAJidazR7a2z7bHUiZeMJKbJR45rgg/M+igUBiJZLLPNp0R/4ThT5IrB2ihz6hWHSklpjtXKDvs60U7KoJN9a7xXz+XQ++8rXv14X5g9+9k/s3S5++k/+GdR8/bXr//Z3vrT1+JlIDZ995knSGBnlCz8b565pfF2U09O3vvjS7Lh84v1PfuPbdw9n9W/99uc7KgZVzpoyIJ3ZiQ+P/eH93Rtv3VG+ebB3N0ITpW7j3Mard0+b05Ovf+HXPvzux5MYG7+ICvhP/9hH/5d/9sLdI1/VdZoksVHzvFSEHARFfFPH2sZxx3OoXNPpDrrWGoveO6M4SzIERVqTkXpeo9GslVgqmhAh2SDpYvTej4edy0V/C5r5uHHjIjFjBydTuvfi8VmJe88/0n9K1/zG8PGUYtlYNDXMYn0WqgcmuvXsxyL9yOnlz8Y/fG8A6ql/9Y9uvLy3/mvf2KsWVJSLbifupcnJnKtFaSMTJRArFA8mwtkipP1YKdEBkTC2yvmQRFRWDBZiK1Zh5ZSgBub+MOMQvA8gohD6qU6sdk5664kWbpwUtasLbyPKugihiTKbJMm9vREqhcrM3eC1qtlaM7zdrPVhvMixCrlXYKS/Y6SGB87+t798+v/5V8dbpnrmKv7wZ9cfe2axc9mAqseHb6dbR7pqBBHquaqncPJ3r8QHe3f5v/mJ//2P/r/+w/MXf/Kf/cLf6Y2vX6CLunPhq1988U/9x3/oq//sy+8/c+HDly797vW7ew8aSNJSoZoXAs7nDlBVs1k+2s856fUG/dRYHRHr6eExuKof9xrY+F/+5j84rhypcO6xx21TYjTopH3e3r79ylfWtjYW1Wh2tDvcippq8egHPvTlz/1ipxsnNTgxO+v9pN/HZGiLOQa3CbBhcRJW7/CCg/W1wsu0OmXvlYJuL713/fbe6fSDH/3BR55/7MWvfWP/YL8MvThKLlw4Px+NSWGU6Oks//I3Xoyi+Orjl8/srGnNwUPS7dQVL2a1UtqIPhyP48ksi3tlMXbsPUvpmzNnLuazk9PRSZzQ2rBbZImX+u7uXlM2WbIeGkBUwk4AmrJKsmj/5HjYvcJBksRSJwpJuHJx57c///s//IEPzusq6zoUa5Kk2xnWtZ+c7hK6EOqk07Mxxp1Ob3B+cvwmagKUbn8wdmPX1HGURDaanj6oXaBO9GC02zMdxT7NespGTVPjYko6eF/YyBCqQXbh1v3vXj6/Pp1Mowi1NsW0uvXqC5f6h8nwypsvfM9X9ltfeeOZR6697yOfuv1LvzKpJ3Wef+sbXz3/5OOurOMElEKEDvj61tu36qbMhSSIjWzlmkVe1/W4F6X9XndRN+96/qlQl/lkbiJ9ZnPt7uRBmvhPf/b9hyfh7OVHMlceHIwmo4UYd3x4utbrQl1vne8f7h7URXX/rdHW+cejpOfKethPmhqqvHI+BCddiLc2r80XYevSlb3R4fzwSNTJYONiVdVPPPrMuFwgNrobcdPDTt/XzfbOFe9OWXA6Hj36rotHD+7t3XxjbauD3QGgIHBAhyq+9sSzt69f16S9b1zj7t+9x47TfmbQaKU5cL+buUjKybjM824nKYq5o04/64729sazxVpkE+3757am8+n903s7og7vn4Z4dK5/Nb97eHB0SjVvnevaxHa2NgjipvCTo+P1na3acxb3zm9fu390Y21te3RyGqfJ+uamyavKL55+7xM3v/pC2ovU1qbpbU3rEtnt3br/3NWNM1sDC/VhKjVOqjprr4I4MgwhzUzRlMIqkJTBkwWP7MBIcGgc2lnwdrrAxmwT6a2NJIxmECpmWZS5ONMjCot87O4aRZZU2rFJREohoGg0lWu0MSJcNjWpSGsN0AgTiTABWV2ijEcLX7uCAwskcaQECbQR3Y2tinExbmovLrBS2LNRrbwibRUa9MZY5yqF4J0AczeLuyZSVnEITgIr6zUyBSUQa9VNI2PsvHR5Xo8KP1k0QTDqqrVe0lRFcOV4OpvN6yDk5rmdaG0QmQEhjkuR0DRBBCvvpotckI4nIwGwGmOrDUBmdD9K2XtAaZoGoPXIeINoNGgW1zhUAMKxUbWwMsjAWtmKnaJ28ctZQwjBMyplmawCJgUWCAjRizXGGmwYgaUWQghWU03k2YewnGKiAkQ1a0KJIgAeYdwsVNwx3SGwdU4QAxKiFo2YaKtIM0JoRd6+URAAQKQBVolRDCGQN5EiIqUg1QqkAfAAENkkiDI2dk6kqUpXqqphQUWBDC3mk8j0Ip3Ui0VTew8AxaILzkQ8qcaRNeCE5wVQZSKdl7WkHdag49hY8NWIuQZwsYmbZoohiLgmLPKmUhYJA8QcRcr50jUFkaLYneYjY+LesIelb2oXoEMcyrwM2kRxGhFIGs1Lmk/qeVmUi9wVM2uwG6cP5wVLsQuscqIJgB56fVqaxEsFCq6EKW2IDwBAq0mBpUuofbqslEcPBUkrktCm0cASQbQCExYAQdXyHFwyIcLWSrQsGVuGPLTYouUnyyfZQ56Dq8XPFezBFfDCFfh6R0+zkrvgO+KT9hRalbQDqdZ6t0z0XpKSFnCEdu9lpa5fQguUh/YvgVb/Arjka7j043Erk0FY1QwvpS4r0MYt9uBWXtM673B1QbWNXdh+MbVmrZVoZ6lSWjntlrssS5nYKmQJHw70QxseQtvXTG1WBqGCNklpeUjbgyb4jtIMlzo0BnpHVcStGKg10OFKd9YiQVjmp+IK6SyDhvihrqs9Lis+J6uhFZAlEUMly33h5WAtZWmrHWrNb/AwuYgfroXDw59d/labErXale9HReNRXs3CdHSiyJzsTZ1CV6lOPBzPju/sHuu4K4CgNs4Mt8cyncyLBuq37+5tbvTv7B2sn9/eWdtUvj6/2Zuf5qO66Xayo8Y7o4fb/UFGzrumDNtr8TOPXHn15mHpOC/rV1960H36gk8E1pI0E4g1YCgWM603B73h6dH9QMdlxCrmrjmHbtrMR1qUNWwTQk2Dzvob374bKvWuZx9bP//U17/IZl4VeZHaru7HwOntu/uXL1x46uozL77wDTZpaPJuL3EYKZZ6VtU+XN45byxPDmZ1ZShWTz516fDBfO/22+vJIzo2lZ+AgJjFhTOb07wCwwimqWAhs06UhiQr/SLN+obccG3gal2v9epqdm7t4u7BW2nScwQ261jbn849oto+vzY5nsRRlHb7r9+6/4WvvHIychoNIHgOpAgVaauC8yYyPhBadIQQpaPdu7df/jWb0mKKnXTY7Y7qce7K0bWn0kVe+aJ0BTeVobpT56MIUyWcdXRj9enxybyohms783HeO9NbVKfT2Wzj7NZw0L/z+v3NtL97uIiTLku+0R9YMXUtWlkXXNHkaSyb3bVFEzY2k/l8qrTuxMYDNUWjVAI+qRvfiWxkofG5Qp3ZdBYSVyeNS5HQiM7zMnPlcK27e3CyudHbHPRmdiI6Asb54cHaTpp0N48XpSYrGJI0xqDu3j6kCDZ3BlUoQrCijNVxktJJXuV+wUaJyqR2RLaqWEgDovNWAjekz9nuoBfPazqzc/aN1/YXCxUn0dGk7KSakEOMQmFzfSBFU1ZVkp4F1m/cPjRxuHJurVgc+2q5ehBCAERtdWBufBDPijCydlnUTtoLsw+ti6oJoW7qwGCMsVoDcxBGCHWAmlk8l40nTb5xIKRJKa3aO5Ii1V6ikdVJHGuFwhJ88D444SChqGsMohCIUBMBoU47kdakVBAhBkVKaXJNEJDA3Ob6AIjWWiEJgFLae/a+7apnIeAQhMV5F4IPAhSghqCUccG3bwMiokFpHZHSut0mEEFhCURolQ0cuM07W8JsWoqhAAiJQOBh+wMLBEaBFSFCZkEJSiuR0D5QZZmN18oohRAVtY1tClcknRCWSw2tNTpw21DQNr4Bs1IKcJmy124SIXGruCQi0IjMQTyHUAc0lgiVMiFwYAbh4L0EDoiNeEPt0751EmPgIEACEELreyYitZRxArQ3WR8ktNxLQGuDhCAYODjvnPftGgwRSetvY5TAgbmlMy3y8qQYiJAcePZ+Ga/dLsWIOO9EgJkVktHKEBFiQGyccz60XuRWAQYgHIJWFDQKYBOkccGzb5rGEAUG0opdiBIbGtGaEBQAGGtYWJNpn7TeL68ClcTCFMdZUc1JCQFmyRkMWWRVSmJFxkWRJDpWkqT60mZ2ehAYuidHHBO88cqb+3enz33k0zbrXH3++f/8v/2L/+v/8T//6Z/7UxfWB5OTKukQMXX6aV1P1zZTOt1fnByMRm7v5qip5OD+YrL30pOPbpxd73zkk0//k1/95mLEbl4cN7cn853QqS89eWG6f2fv+NiM8qguuhGdWX90dFR0h/Grb+ylRp+9OPyv/vgP/OIXb3/59dt1GRgyDpAvXNaNJEAv6+RlWOSFiVTa6zondV2HpgjMNrJN4yJqiEOTN4OsU9aePBulYnaJ84+t4/NPTN//5z+x6NWTG98aVP7kNTCPsbl45sKFx+ztpoO5vnDV9NcieKaEe9KcqME54Ci4RMcheXcDap3hjPNvSwJw9+7xjZOD4/CzP/4Tf/f/+kUVRYu6qXywcdTRiRMpplWWxoKu2+sFpaLUVK7GgL0kEiRpapMQKxTUTRUMiDWmacAarYGiLKobz+yYUWmpmtKCe/bKWdTxq6/fdqEpvCuEGIMR6Vvb6fWqxtcuFLlXVhOIjlGzzKsyivHsdvpgrypdQI2gUESSOFMBd6fR3ivywl03wN0PvXf++FX3ic9clHm+diVbjA49++IE3Gun5z5wcfCu+uIfwI594ZXf/NfXGE78aPLqF84+8qk//9/8mRGFv/wL//0L/+jv/frf/Mdn17bObqmbD3IJTZKRiaMosrnGMeDZDUtNUc2lt9Y1MWkNMvGxkmuPPPFLv/71/bvTrN/56Gffo6Q4vHN3+6IZl74JamvnwniyPx+PytMiyrpOx73HnvrUz/6xL/7yPz97JjsY85lHL/cuni19F04bIr56fmNvdBpOlw1o3rmkG4NIFGlpSIK9vXuY5+69Tzy9FpkHu3dYbDHHwUbSTeM0TVxVMuGrr70eRdFTTzweddON7X6Rl2xIGDqdfqRUWdVpR0eGbBqP9g7XOhcnk2BiZa317CfzI1/kWSfWGmf5FLlGVHXBiqKZW5zdvFAsRoupA0bjcZgk7372sX5nfXp8opXNF7N6yk9funL45sFWNztNaoc03BqWk9nx3m7pGmtsmqY2MtwUQYhrP1m8qaBM454mKvO8bCpN1qkqHfZ6bnhwMM5s98JOcrh3MEjT2WIy6G+HxlXzsavnpGG2aKbj+U//1M/4qAaaJxmXxSzKkjjuvpXvvetTj9Wa+s9e/ehTZ1773XtSHn/lN//P1HhtgkuT27ePxkfHfjwPErYubd7evwsNUPBnt9aOJsXkdNZUDqxKo65Fr0EhQhLjjVvf1U6MVRWrl773tqmaqMKjO4eD4XbcyJf+zVdeffWtbG1Np3D52oXhuc2teFNoLcwOtro0Pjl47yc/XtZNNS/vnR7117seVKe/PhtNs2SjMLN5eVwsRr1ep2Oj2XyiArKfjme7DehOaqbj0Tyfnjl3CTxoVx0c7z729NmXv3Hz1ptf2dh55MKFs6PZfdfYKq8V6s7G9mxeGpM9+p53z2b7e289ONk/vnD5WuPzbhx7L2VZc1WnA2TH0lQkIe6kvX4yLTs2zrq9+WR0f+NMN9Lu7ZNjHafNoV27tFN0pt/87gsnhw+2rZnPwnd3D/7Ax7rrW90MaDabGzSdpNNN+3m+V9WT/dO3PczHRycE3fGp63TX2AcTeHpyZ7a4u7l+yeWF6jBgyCIqH+x+9JPP/MGf+9M3X7nxD//uP00z3rt1q70KTqfz3qDTsYkHV9Yl1CEABvSEtppXoH3BJ49dXidN81nwQSzRZDSrFrlF1Ab7KZyeTlRInIMQnO1YHUvaRRUCBAgeIEhENq+nWnwURYRpapXSzXgxB5EkslE3GheOhZom+OCM0QpRGep1406M4uvdO+OiZNcwWYMkBEwq9GNjNHEQbaD2Ki8bUjqKDRKAr1xgcI5YKZU6wlogitNIaaut0hZQn85PGSEYyAViGxOacj6qy9y1kc8KXIAgXjNpFBSoyrKdi5KihrxT3BrGvWOtgCpIlOZID7TZ6He01Ys8b6o6eO+CUAjBOSKJoogwEBOKpCbW6LVSgKRQhyBa6ZobRVprq0xk7YAA2S2s1mBt4VnHmrwr6oadeBEdJag9B6dBGRSvKAA17BsJwoE1NCBGqYAcQLQxpEwvG0TBVaUrXCXMURJrUBwEURmtKu+VVggBSBTqvJwqh8KsAYSAFAVmz0QAVROiKNGgMDKVhMoHQ5pZfOmsiZCodEFRJEEvZhU0wTM35DJrQNUOKpOaAOA8euesEZcgGfDeFTUnJnXlKYepjWLhwsnE89yHkoMzyIJlEFSgSRlfla5u6rJi1FqRKGnKQuqmbCBXqiOqdtMo0V5YKvSuRh+wknI0Z2IS9KjEB8jrFTQBaHXjy6RhfEh1HubNtOuFDx1FiAAEq0quZSTMCiK1XidpGchDb5RIq1JfGr1aU5UIt225sOwve4cr4UpzgwLQinSYUZb+rKUECpeRONCWc7VMqYUrS9DRgsbl+SvLXQAAoe9rc4OVQ2slZVl5laCNBlpGIrcvsNzCE/bLICdElhbkiGrtUcvfIwRqPQQrcRAsM6jbHhlaqnlEAJGX9rlWhsWttojf0cJzu+mtL+H7QoLat3VaHnlQ+HC20GqxpM0kIkRc1ogtcVSrFIJlJklbx0YrHVbLZh4qrxBpGQzFAAK81PcsVUgiCAyE1M5k8PsMbtiOjODDXKZ3WNPDqRbI0oYBKCiolkFRrdoK2zNBlrImFpHWi/bOiQXAraisRXUiS4+awKpxaGWta3+01YXx0ju4+m8V3KVwMluUc7e+tT49POxe3oq3Bq+/8WA4WHvk8WuD9X7WUS9/596VC/0yn9WnVdKP4qFZ306M3mTjDyYjzoueMCn1/ucuDjZ6IZgv/P4Ln/nMpxant+ezcj7Pe2k8a/KLFzcjYyeLenrv6O03DgD4/R9+Pl7Pcn9MlJf5jc0za3eOb6wNtqbj+xEWcba2sf5IOT+xHIcmPzg66vR70pjTQ/fmzdOL6+tapJ4dPP38YHMzu/XWza996x5wApoDuOu3F0XYytLO/sGx4UXSjZsqcMV5IcGHg5PpmfUEGNNhNmtg7whCtPb8B89ZXTpXdpROsk6xP50XU4K01x1MFh5USKJuVY9AlG/EKaWIjWMNJWbeSZiUucn6CDqO+/dO62efuCDF0Tyvg3MxRZD2/uWvfOuFtw6qio2NkR1AaBV/IMBNEEHnAIAF/Xvef20tzf/sj27CwdeuXXlqllMV62sf3jnZHzXNrG4gNIlJ1rPeBbLbCJuj0eTBwX7VzJv5zHnS1g4obihK1mPWHk2UpH0U7CTST6PTwyMELB0D4tl+H/Lae29QIEwG/RSpKp0E4LqpbSK9dTuZHgGQQu53Nmx20Y1PJovDOJEk0YrJ+3o2rxi3L1y89Pb9t0A3SUcTV8Ll1lYq4gyEK1cu5gLFrEg6mcFQFo0xaufs9unpLEgTXL21xibV5zY1eGWMFR3VZRUaIC86mNR0Ttgjlt3EeowbRmiqmbAmUGG2f/Rq3ZwK8M1b+SJXznmPgHGn24+Dm/t8kXZsN9X7B7M6BN8opTudTlIV5cHuvN8Zpn0L8AYsLyhRAByYOdTex9aICDNoRAXEEES4Rb3eNyF4z6K0ZhEkcs4ZRT449l6BMso49m1MnVEqtoYEvPeR0UiakKxWkdEA4MUro10IzMH5hjkoQBRKTAwgIfjI6GVxGDNqXQcHXlqiobUx2lJ75yVGgBC8c540KWEECgxlVRhtA0jtnDALkPMhBFYKBQXJxTZBUpo0CHJgx0u2HzhoTSgYQvA++NCQNoioyRCiAlSAQdizU0JBWFYcPIgQICGyBBD2woAgfumkFdJBPAqSEAAYYxBAawVL6k+BUTAs1xcQuX3oEWkywhI4ECkA9ty0AiJc3WNbVSchKW0CE3vfXltt5BAzt4E8zA7ajDqQEBwTeFaAKEB1AAUMzAIVoWJhTbpdMQoSAjOQqgMjgIgDQmEwSgMAQ5um5Jz3AQQErNaklID44EMIEgQJvbAPy3jsOgQOgkigCDgEZq20NQYAfHDON8oYJAzMJMzOUZtZJYFolVSIyCAKMU0SApAWaXsGXwRm13jRqIiqqk7jpK69CGoiZpHglTFKKWSuAwcfHroG66KOtFnMCkQKDMwUx4O6qJumJgomUZUyRxVWqlkzcPv4dDi4WFt6/0c+8tZ3Xnz1+qtZujZpZmvb8a2XXiY9vrm/9+r1o+cfpc4gIsPoZDadzseTzbXm7psv3dq9+8i7Lr768t6xA2+ZM3XreNTtnXv2uU8+/6a+/9qL3YHZPTyYj97uxIO1zfP19HS40dRl2OoPqyr/zhuTXpZpOy0Kzjp88xt3enH6V/+zP/O7X33hb/+fv1aWIUo6yFCVlVUUfNGNo8Cste7Eke7hfDyvyyLqRDZKx3lZOQaPimOqpFe4jhTb3epTn06vPmPe/ZHYnCPfuZXjs+bdf3QCOX7oNOAigYHj043L0u92FyAKet4HVE9bSw664L2JZDH6orKl7pwleK/ngd48xU39I39143/+zI3fuf7bKuo577wPymgdWd+EKIlV0k17NrNQFvDxj37ozdduQtQ5vPv2o4+d3Z81ZzN9tH9KlbeW077RhPuj+YeefmI+npw/e/HbL90MQHEWl3XZeO98uPTU9t1JcevmA0V4PKq6vaTTTX1dV94vigabk9gYAS7qRhvsRCYvG26qLDNB8YNJXTHayNYBSClNXFeVCFbaIxIGNXXxgxdqfsH908/f/MHn9A/96FZ/s7f9xKAT14vJmG2emnz47KDh2Qc+sgNl+LH86t/7a1/6/X/65hX8yJTi3/pb//kPP/fo2/fu3Zvx1pWzx/WcY+xEVEmQeW0S2knx/le/9eyTVwrAybHPznR6nc5ieny8f++7L3zp1mQKaZxofef26WOPPGcE41jXOimn2eWn3z39/ePtzf54NNW2iXqDg0N36fmP+3/+LxDCzvbAGt2UlQRY61uN4eDoeDxtPK0WzxbNvJyaOIojoxL1xo23DvcPfvhTn3z22SsnJ3NnKN9ce+zZqHBNFUI1mww2+z7wpZ2LOtLnrl06PhqV87IuatvvOsLj+TwmLQGhkp5Oszg9DdXbd9945NGrb3zvzcH6NhMbpbygEDTMwSulukdHUwa5fGX7u9df7Kcb4BoiWdtYUxIOTo4jbX2Za1JSBw1WMweqtna6L998tX/x0vGdPSSMpPZ1EyVxknYm49Mki7JODxDzfKIjA9oEz1VVRHES2ZiZAGNBXYr0+muuqtBwr9cnkFj1q7psqirpxDpWPtRxRx+P3MHoepTa0Uk5PZpqA/Pp/MG91y6fr03n9PVvLc4+857X32iCSg4Xrirx8ecfuXf9bpUXV5686rXNxzMQOTopqsKdHk873c7xvBIVxVFMAE0IJLHRtQ9h/+gkiokBEdXpwcQpCN59+ENPDdd6a/2438lms+KR5y7cnZ7sXDwfMPzu57/2X3/qRzfXu/N789945Uvvff+1CunFr3+zN9gMrAUbENXJ1kRQvBtPDkj79Y3t2Ka5d2nfoF2fcz04u6FRHT844EFiutEw2a6mp82CocYmj48PQxPg5u7drWvvOb63Oz86Wt/uZf0simJldUxR0zTH9w+zjjl35eqVZ9/1W7/12ztrvaxKtTU21ijUT/vHx0cMfvvyzsnB/lq/F6GfL45QQ9wbLio/945M2t/c+tDjTx3ufjfd6n/s0z+AtnP61tuDoXrmcufN3VvP9ocXOkY0qcAqM/PigASbhp3LKWk8l5aSjTM7RSnMi3w0JSzj7vaDgxNw0/PZxuVHr3ztNz6/KJtb92Yv3sjPPv6Jf/evffDn//Kfrf2ivQpIqboOJyeTTj/W2jWhkUhRTKP5PAvaaokHTKY+nUhdx5qAWSZ5XpVFhAYrmFXi2DYOlFbG6DiOTCTOs1EqUqp0VQjBBUdEgbXpRAF0HeoEgUAxMwDl86qTZY2EZjHrWpLgoSxMkhqt86Lyrm5KzAtvDGZJrJHY10pZIhRBUlC7RpSK0qhxPolMNyJUEJpaiwKkwAG8pyDaRlYp5wIqqprGaIpj0yWllErShJ0bDIdHTakB+92kJqQmKEQRbkrHAqB1QNConHd54wMgCCsiFevADEo3gBXoGYNfVL0kjpNet6t98LNqwVXV1laUvkwURVZHCCR1RBTqwtik1TR7NHHSJ22UjoRQIyIHrQ2zgKJkvYMi5O3CeVFo4hQBtWgNcQhsRKE2Zc3K+dJXpBUbCizC4poStVpUpbIZRCGKCCkBpwP7BphQubrx3kU6QYyUClVTCYuQAtKeg7Bi74lQITESk7akGLgoqkp7q7MstRZ0qEsXqtKVGQozFgGhEnKltaRIkCDtxLZj61ALl4mSqq4iyjiEWgqqG9NPxIhSDngKONcYGl/NpZ75adWEICpSqMkYgIaLumkC+rqpy6oqSyeouQ5NE+pSghOfk9VqO7UuL6lD1FQ6YJZEsxln3Wx9e/14dMog1hilrRb1znx+1SxGCuGho+khSKIlMYCVT2sp52hFQEt1CooI0Tt/s5qYw1JRsqpvX0ZPAwKLIONDSvN96UgoSw/W0qvVCkm+XxO01DhBm0AkIojLSqslcFqyjna7ZOX5evjpd/Q0rRNs1eaF73zzkpa9QxSk3eSVWYyX4qm2FGypS1oCJ0RYdaK1ahoBbGfD9FB6tIyYXu1+G4TasiJmlGVis6ysVkvFDK+0QvKwgw7oIa1rmR6HlUhsGc8Kq+8CweWALWcVS2seLqOFcNl01gY1Ca70ZqtIoxUdE6FlQJDwUtjTCsaAHsqQcMl0VoKvlbxr9Y/LAW4HQ5Y5UksKuMRqvJKKLbkQBgGA9nvb5XhYZmy0SqyWSLXJRtBmNgks46t5pSAiWUX2rkRO34eKusOtfp+jC+c9K+dknvPu/v6LN3b/6E9/eKMrk8mBn49/4IMXEYtg1/aL6f7xadFoW85+6OPvEZV97dV7yfp6EkUP9h5c4trVp6aOHlnf+MLnfuvRa2eef8+zp8cns+PpeOLyYhZrHHR6Tz37yPU3jqaz/OtvPjg/0x/61EVTl+XNWT6RHsaxKOx2gne+KI7KG8Zqr0RbS7EJGibH09feGj/1yPuqiXKTyTRUJ2XuxtyTnlnMtwd9n7q0o+u5Lkcn06ZR/fjs2no+r/cnh1cv7cheJU4eHOZELJU7d+bM2mBTo2YVV8APHhwMs83uWve1N2721jaKRZmkCkHSLGVueknvpDwhRWsbF09mfjE/saFGLdkwu3frXl3WmztDDkySPXruqWbuuapA04M78+Cyb12//q037npvtAIJPgRWERkN1hgOzMxR14oKT+7I3/grn/3oB86+8MUvnNx+c2bVWj8NebMY7zpDgySC7lZ37dzuLL3+RvP177xxvPd1CXqwnl24vH3+8tZ0dlAXvpyLc3BaHpy/dJZsN0sG9fjg8N5Rf6ieeurCq6/spevD3LnGyUlRdAHTOF3vbN4/udntWx/Aqt7dvdO8MbHxwEoBGZsO1o1bJKezepBudQ1Oiwdrg43p6SLieHO4c/feQhnT7/YUFMj3sk6XIT8dz7rxoJjVSD2KzSg/eXQr2zprD94+jTgOzYKbksEUi0Un0YLkZvNIsRJyIInJNISqKAPa3aMTTZ3UBoLQsf3Twkhgz5h2beDmaDrppaRITaYT1MpS7auFodiDYTKIiYCdzfIkS9GDDn738N6Vc+eKWRAdx53O8WjWXgXeB0AqykpCYBZC0qS00syBWVzwvKQ17L3z3gtIbIzRGEIIEoIIewABa4wCpbSunFOigDAyigRQJNKaAFjYWCOETXCwJMQcOCAKBI6IrCIFYKwGEABDhAKotAFU/DBth9AoRaSssm1inFEaQFhYkyitPaJrGmI0xjqWumlEWj7tSStEEhEOoFG1KXKtbc07J8JtvTopNAKC6LxzPhCS1lYAVBtot7qFIyIvrWRB4bIt1HMQ4FauiUgM4plD8JqoDbQjJEJEJK00oihFrUFLZPnkA0RsWzZZSIAIUSAgK0JA4RCWsYDt45AUs0elCYRFCAkgtHJcjUCE3vvAAiTMbaA1c2CANmKS0yhTWjECM/vAKyszcxD2HoQJCZbFDK2xDonIKNP2SnofAocA0jQuBK+Ujmy0EpVCYBAJiMjehxAEpKprH1iQ2ixCYCZud9y1otyWITW+jbrCQAIBImMQgIGVJmQMLIE9ESltFAmwECECCKG12gNrY2rnI4ssWNYOEMuqYmZttNamKMsgiIS1C01RrZZgoCmcUehRlI3qeh5CIEQOC0++hEI6zSxxbpF7V6SddHutF8eu8Ic3Dt+6Ob3345/69Nd++5v/6n/8Nz/+kz80u3/w2kv3fvyHf/Sn/uTPwLQ0WsWJmc5nEJok5aP9u//mX35ulsPk9d39afNgIrVzislH5re/df/9n4w/9ak//T99+StnttLHz52Bss6rqeNmVJQAJopUbM1gc3P/pJkczGPlFnmuIgDg46z+3Rd+4wd/6AcOFh/4+7/0AqoAAq4JKotPx4vElp1OdPXRi0Fgb/eWhCKOkeuGnSROupG1yimQuB5f21l89g9tPP3pK8OnCGLH9S03Y5NOU1irj9fi7JzA7cBvxp2DqDldzPedBxU2zcYnUG8pkAADboB8UDZnPY87aeFnqdQkPQFfwGTr2jaXryama3tYAeIaDTbXyIXRafnJz3zq2rn3Xrt29n/4f//FJIrefvONan78l/+zv5hJeOXNl3/hc7+/c+4RqIPein1dTedNbPDxS/2S5dzlR5++9szu/snxohxXtQ6hF2ur9HiiF3ldFhLF1B8OrFEiClUUSg4sw25iCJRvNoYREhTzYqsbbWwPbu/PqmC7SZZGVdM4G8enc5dp1R/Etw/GqLANkTeAilVM0ZtvhZND+tzv7b/vUfyhT9bXnkjOPL4mneDBNt1eUaqiPkJX2l73p/+Lpz/4cf63/+Y71Zx/9n1X873rf/SHrlI0fP0b100UVKLOrPdC6a7fOHnsye1sEBeV1jVBdTQ9neJsMGrU5O3Dgwd7aWqvPf3k3ZM8HnR02qvzxvhyunuImywBTk/rM5evaLuog57eP8p6M7YmmLU/8NlPf/5X/9X5C+cx0liH4GteFJ5cOauBdVUsV5Ijiyw8Hk8b9Hf3br37mef/xM/+wfmsHh2MjCUi7atJN9Zpv3d/915R1x1bD4bZ5vn1+Wyxd+8+gtGo036SpsmkzgPwbDaLdBJFnVleNWERd8599Xc/3+2d2d65UDfe5XUhWhEkadT4IEq7ms5unpnXZS301CPPXb/x3Q8//ewMEHUSgmxt9ZqmHo1OgpjB+oWoOK6nx42vt7cG12/cudrdWN/eSuKkyssk1lkW1aFK+kkn6XlXNl7iOI6j7qIolQVA8jVEuktWpvMRaXG1RGTTNF4sjrvJoPEhcLQoF0A2UtHh6Z6NYiBKTe+1V77R3RiE2qEoAEytufnaK//R3/xkusXDURrp3nY3DK5enC54f28ewJ9/dOPmy7uHe9P9k4lydZqYxZ3D0Pj1XtcjoliT9EAQyijrJd1IjeYPxDujyVoVxenB/qntx+97/7PlfM4qub/vxvdO59PxwXG99ei62MGDw8VEx3/97/z87VffpPmhm09/4mc+fG/voLs2ZFBZv9/b2ZydHHs2J0cnZJv++tDnhTLaRhGKWC1NXfkA7EKwZna6n+g4lEpFNoo7oSgaKeNudjV98vh494lnHrl3Z+znd85funB7etKJuo2wouCLqZgeSsiSIL6+f//w2rOPfegHPuycnx5O0pR6/RSD1NWcROqymo4mSTJgjxwcQFjf2s7Ho9PJSZr14zNr3lXT00nwHaV1HCXGEp+98Nr33sgz9ZXv3Hzk8sdGJ6HfJ/YVK5pPq153rduz+Xy+vnZFHZ/WBYNXNrau3u1vdcejau79+XM7i93ZePdgrsSlEJ3pvPXg4A9fvXB0b297ff0v/bX/5P/4X/8WwAgAlGA5rVAxClbUxAOawMIDR71UsZ2Xx1uXbGeQ3T2catMpZlNWSdQHxaaqFTsczxYUTCfTWaRMrJOUFvNcRJdNBUo1ofY+2HhgExsIVSdVwbmSJ7OybKDwgeoqjrIueVcWcaY1qbyoPfuiXKicnIP5tEbgwD7SsQNFhFonCpFUAEIEr4VAGEipKPK+yUs2ZHEZZUjOe1czosmbXNKYg5pW86oKkdVYO2p8mqAJjS+asS9r10Sp9QEVqjQ1dVWD58gaJ0JaMYJGzY1XClVARUZr0ggIigC9ExSuAwCCcc6J58gM19Y70borKy++KHNflUJcOW8NBPY1cxJ3hCWKLImXgMRBiyEWY2xT5cDOCds0jYed4WNPRMlwNFpMb9xcjEdeNcisxGmlUAkxktYJAWptdIw2KYFQq1BW5BoXnA0BfF4ufK/bSdKEJVVkm6YkQK0a8FDUvvYB2ROZNjEyhCDBazRGW02ktBYCZueYgT0ppTWFqhGjKNRlUykUo1TlmjiJ81FBJWHjB9kwUBkbrTLLSikhbTuoODUgdcNNQZHYyHh0Tkok433Z5JNemhmT+CZ4dnlwECprU9LWe+dAMUDV+OAjxKSbSpVL42AxC6HhhCIFSubFtBx37WDv5MFatl7P8poWg8Ggxnx9rUblTg+dFYuREreKaVkKeRBaXtHyHVyiAllm4rT/jqt6exBcpiavCtNX2GlpHJKHTrKHWOBhcvRDv1Y7jcdV+dey2v0dcRHy0mKklnIngIcQa8l75CHhaW1q7Wv7Uv1EiIxLX5Igw+obVg1kbT29LDN/Vvqi1X4sDXfwMN1nmb8sbRx1q6LCpdIfBZmXSUIoQC0KW22nYJsZBAiMgIwIbX+wMDAC0kNrGLaAZBUghAEAeOW6kzYhBEEY2+41XA1Uq7ZhAGyDfh7ysmW2KgHAyvTXSoRWmA5JHoqKVrCpBUbtYCxNDowtbFtSGWBmbDOJBETwoXsQ3xmftlnuITt6eI68E16FK8kQtLMmgTYmajX3WUWkLylfe5pIWCV5iwijrHRFsszjltXwtcPQrsnDypUHy8+942n8flQ07GaPPXbp1ks3J/ncjQ/iKFalPH3t2UbSBgtUACG8+vqbqkNJt3fuXO/S+TMG6Oh47+bxeC2Jnnvk8dnp3mA9vXA+a2o/OWzmp8eD4c562r23d3Tz4EUb4JHz64lVnT6OTmdzmAcVuptx2tX3T8f371VvfONB14SjI4rGHPcSOphvnB2AeLFuMMx233rbZlEvTbZ6a4t589bLe+NRpyO+qeYyOTaNevLyo7dv3Yuz7ofe9Umr8oU+2DmXvfytcVHR8UmOCW1l3WvnH8korooiNZxENCrrtZ1Nzrkqquno7Q+8/ykw0qEok+27b9+sF8dpJ/JNE0Vxr7M2nUwojjnQyei4btj7xkEeAJLUxqkN4mMj6z2rld/eHM7HmM8brvaTyI6Oj2upmSJJB7/zhc/ZOCElpBhJBQ9GkW84EIlAorHP/ur5/t/7ax+ey41vfvGb/fPnkvVnqtn+vQd34sQkg0EqG2/f7/yz37jz6v3rJzMYDrKL57Z3Ll8khvWtNEt9WSwmx6c2GdokKasm68RZnM3HI8tczKdZ1okhWpycuqawCjk0rl6wi9JOjwLn+YSYQ8UAwVfH5zeHFiKMbRSp0dEsMbpplKAdrm9xk+fzMkljY6MozmLV8VxursXTxahrfFlxmY/npH0otjfOQkN5yCPNmmvtnMvrcqo1WlTmYP9Ii0nS3uFsbJRXKmhNSkE+myVrcQjomoZUQojsiqzTc/UBu9qx5tIaFdgr33ggthEkHcOhjthHqambMNk/Rth0FVodttfXvARWUJTjqsx7WfTup880ZWE2tUIoi3FnsHTmI6IXVlpxYB88EDKHJnhCYPYtetVaiwgSAalImcREWmkHTWBQRM57pSg2VpgJyRCBImWU1UoTiQ9KkQADKUNESqFIW3aGiHESO+/axsLYaB88AihtmFkR+dAa3Ja5OSzMLFZlKCLsBaTdWi8sSIrAGAsMDVdGa2Fhx0SiIsOBFZFCQiSi5bKJZwdAIohM0lpskZg9CQEzIoFAbO3St8zAwm24UggQhFsA5YMDkQDIQQhbFSaCakPo2s6x1swlXoJWGoARyCoN4pHIe0eIbTqUAmRpccmqUAEJkUSC0cYHDwKESgCYvYAQKR+8IgzBL9k8IiC2umgBYEQiBcgeIICgQAhBK8VB2ifH960fCBFJG30HwOADe41ECFprBhFgIkKlWRBQE3Jg732DCIEZQFBR6ztrTWSkNa6IfuO8D8FJcD445xlBKSOonKtJBICVQgYDIIRUu6YtUiOtCJS12mhFgCJEhETKe2ZmbZQCJBBjTWDPPgCAiYyGQBUiEyCxiKtq5wMABBEgsVpIK9dwUzWu8c77h7mGmfU2zLNYT+vGGuUobtizjirTK82iCIWN8qiADZOFnMuYhymEZnHzte8YBS/8/uch4I984rlysnftyXe9710funnr5t6br3Wj1KgkypLpeF4eHnSjmTu6gVBunkum83L7fL9Qvihd41xk0bJaX9+6duXqlSuPTMd3LBGQU9YK+MikZV1eOL+Rj6a+xqIIppMaXYVqLoagCRnj977ygs2PntzcvLZmD4tQSyCDXsooUwRNU1aTkzvzxh+fThSqzEQ9ocSFiBZDqIfG72zBe3+g++QPX9n4gcseyLkKYE1HA7359Xo0xfQwTp/RUSowNSoJ/IaOe72z1wQOm73j8vSXOuuPe1jz8JyyZ9BSBbXqfaCCY9JDz/vB3yOoLeiXfvf1UGtiKKciqCKTdqPeyel+HNmj/Zu/8WufP3/xQiOqWnjta8oPu4svPvXuZz7wwad/8EMbX/g3v/8bX79jzuxonWrM5qdFvx9NDufF7WLDDSK2vpzZGg13fvJnf3zv8OTG7ZMkmrhUefGIajEptGqUFaPVoJN4DCejWaq0d2gBdgjPcvFYFl+72rn+9mwj8pEOo0VRmWq7q6say5yHSSQK68oFAFBQs/eOTaImQVCi37/OX74xS83pu54wH/n4+gc/cb63kWR96/y0rkbCjHL46POGT8NbN/ze4XVLcGNvpCL42I+fb76ZfPvl+f698VYWbye6WkwcTufzzhv359ceDZfT/ObX3xiN8ZGr564+d+nGrTtNNR/vTZ/ZGUQhjB8c0qY63hv1HrmUSZgdnaKCvh3sXBjefOn6wd4Xn/zYB2tTXnvuafzcr+/vnWxt2RAcoz6+fz/rpsH5JDFJL4a37gPA6cmoKcv9B+PHn7v4J3/uj9SnwTtRkTGSgYZFEw527ye9tSzSg87w2qW1b337JQ++qBtjoji2WZrNZ7OqqgpXxEnc0b3a6CSOk8Q0oZFQb29tP/vkM4cHB+fPbVRF5Wss0GvL0ARtlK+aeu43zwzXBunt/ftBIQplnT5gJKAOD46TTpcII6W8qEU14qZ0dVMt8o217vhMms8PtzfP3Hj73mOPXepTtH94Jxt0ARD9uHGFA8mwy8FZnSaRrRFRyPtCmLUhpWyc6OnRcVMyaJhNZ4ERtE/STGrJqypN+yiChFvnthZzV+YzFkZ0x/d3X73+5pz0537j3vueOZvE6sHxbg5DE/eY8f6d2/feqtBXnazb7e989frrO2taPPXW1o/3jqtalNYnxxOV1HGiL2xsv7331jgsjMKybAzRpAyJLoxzicoubW5Mu/rXf+1LH//4Z7t1Hzm8cuvFWAaPPvPIje++2OtsLxYniZTim8l83FvfqmbloL955bnHb986Gk/KLOrHtre3eLsTpRSC0cYkdj7N44Rsr5el/Xy/2dwaBF/Y7Y5S9tbte6mK08K50gOR7XW6qZ7OTkVXGvwr3/jGu3/gk+ubmyfjJuum5BCBy3J66drZOy+/df36mx/95Cff+u4bg60dLX7QjchYJ+KrSiuxqbbZmlGxNUmcxCphhaC03z15c3N9M8r6+cxtbm+ISWanPfDV7s2TTipREm3vDE98/rHP/Nin//CfffnWq5g/yPcPzpxf21hfc56CItsfsE7H+e70uGpOoLfZAaB8MXv7+g1fN5fPDXNX/PNf/dWLTz5x++Z9XxdJin/jL/+l//K/+q9MdZd58aEfeg/85h0AKIrGNywKhKqop0xExlQ2VlURvHMqXTPD7OCkqus0FJUrK0GPUSSMjp0iSlOrHMYI3V5UO1+zN5HWjpWSELw2FNvYMyOBtTpAcBA8sUo05AIhkDZCwfumccEHhliBUo0PsdGLRYWk4izO85lACBx8U4lWESCQWG0QlRNfh2BAkUDwtbCvhA6reWzBMFVNhWRAqK4LjC169J4r5xlUWVTOBV83TeOCroaD1NV1lmRVVdVVGVDXRBLYN14piowWgIadMWhindmsqJpI241BL4mVKyuliEV57xlCXXpufJV7TTAtcnQSRXGUpnZoABzWZagr8iW7ktEFrQxgVRbKWG3J1Q4bJB081YqU9947ZyKdZmm6fg6iLauc3g9u4powiTQF8HnwQdga7QM4z4wAhKSVAS2kTWwg8q4pgq8NMbAT8UqcYnZ1owjjOE4jwwF17TFfiLB48Z4RkEVrpbXShCAcasegSJOqG6+JaucbB3Gi595FhoyOlAJxlVWKa05AB4IoiZQWMoqMck2lhY2hwI2vm0jZgJXpUtQxSVeLngMHbkLVOO+MdRJrMkhVMYfgtIEiXwQtQBFqU1czgyYxBpE1ivGVY1XmKiiAsuQKwXtRJkDVzVKGpnZNLxsEUHE2nI0X3ThyiSmKgJ4LVz8kRSJt81QbqkMCTK23CKQNzFzqRB5igKX25vtkOu+ogx6SgaXQZeWxWulHZIlCWk2JIEsbJYnL5NTlB5ZUY0mtsBXdLKUrD8O1ZRnygCvpD642CZdmL5bQmsEEEIHa5dVleg4HWPncaCmueqijaV1TuCIPbSNZaOU3rfXuHewg3x/p0y4tByRSy0xmZKEWXSBhK1RqaVr7Fk3t/shDjc0ydwiknU+0IUG0hGGyol6rzZWHUqgl5GszjZZjsAyUQmqBFwHJMgypzf7GZb3REom1/08AS662HKjW0CWrsjJe4cTWgriCae8c+4ceuNX4t6fXUnMFy+KylSVsyYdWB/ShUQ5a5rfcCUTmNqODlhHo7SeX7jV5+P24PBTtEMr3n4zLc0aW8G6FAL8PFR3endQTqDtbT//Mpy5+78v/4H/5vNLrnfXO+MFeEui9733k+O6N3tagFJ8SphrWN3fyWfygu3E62Zfy+Ic/9fz9e2E8O+530plKUtup528N+jYAfuSR994/qe/c2L9xc1oXsyceXV8bDkHw4MFJ8Lw26Jy5tFEtFnkON++PRWXPXHh8WlTTB7c3ty7FfZWfHI7L416SIlKSbX/rWzcne9Uwewp7ph9gIfn6ucH9/ZPrr1yfNBnn9NjW+mJUNmAg1hvdpN9TdaBpGfb3x0VePnrh4tXLZ7/wW1+BptwaxDsb2SEsTAIppX6SF26yMIYbG3cj3ZEoSYEVez/NZwyUL8o4jSVgHHUb9HVVJ53IOFXMIbL9t154W8SqPs7neZB+MCpJqW7mnY3u/MClm8Nf/OXfjSPLbRYyg1ZoTGSimLHBxHYi/6nnhp9+92P//F986Vd+8Rs/+GNb+7snw7XNMJsZqNfPRuK5KAZ/6b979cU3OEh/Z2fnqSd7jzyyEZPnOleRXTuTjvZ3bRKdvXy+KHFWjNG4pppDfWRVaBpMuumiCtNFdfWRywfjeVOUfaW21jqJ5SzRZZ4nqZ6WBQh5V/m6WM92iskU6sUE4crVp0/274Pm2fQ47nei2K6vDUaL8ujBYa97SUcgoYZGbw7XxzMxaUS2Gs0r9rlKTqcnC/QZh2L77ODKlfOj/dtnTeYs1KFc68ZF4zqpBtGGOI6jQbdT5WMJZAhNItOTvDvszIqprmotIQgmSdI0AIriNANrRSoMdRWa8TxYRSAdWyeBw1p2jlTPFXmiqBMnZdNQkvqyvrzRO9g/yrgqQrOYzHu93nC937uw3V4FZd1opQQxBFZEgbmu6uBCGicKxYOAgA+h8Y59sKSs0iRthBsapYGFkRQpz16TapoqBEYAo5TRmoRZSWQVoEJARRi8MzZRRhCRSNXOaSK0ur23EREicquyEdatAkeZuqnEMyBYG6GIUZoIQ2gD88Q7LwJKY3CgiJIkmeelJh3IZ3GCAt6HxBqi1u9KntkDI5DRioS9DyxBQJQAChMqEVDUNm9ZRFjU1XIRQBtp6T6hBAYAg8TCzKxIIUrgoJVSSpOm2otwgDbmGaCtDxCWAOygRgICBSiEqhVEggL2jEvXNKLSreXa6EiErVYSQmhtgixBBLxTSgmDVrrlXMKMgERKAJ13BKDJaiIIQZF41yCBAiBNqDS2DXfBo9GoiH1g9ss6PARjzBL2rJZVCFETCQC7GoRd8BxYQNqIIkWIwsJNuwsQoE1HEkDv2fngONTOB+8BiDkISAih8Z5FtMaqCW0DHSEpRZqMUiqOImutVbR8CCCwiAJSZNAoYVbYRhkJIoW6AQ4QgkIgYBKS1tTqRQA8c+Cm1eYGzxIYhJXWvMoqunf7wWM7j6ImDMEmWBVVhCwBPOrQYAw08MHVIlUloE+PphbQy/z04Hhtfdg/t7P1yJliOlrLkq9884t33zz9S3/uP3nlft1bkwuZfvVbr97ffeNTH3/vlcvpP/rXX1Ek89P8J3/2/7m5vfk//a1fPpyfMHIC2ur6dPdLcWwuX754aOff/ub3er2k39Hnty/s1ce1mKAv9gYFp+7Ke869+spLUFVPv/vKrZt7McHjlweno9FsUd2/9fpOTLOpCwJJQmLgdDY/00l6iZruj7WxG8puZpZc6IfZelL8yGfXn//YzuBKvHCjznOPequn3hu9HagLsBnDBOCON/taRxBOmtkkHp4I3Clmk2RwBiETALW9E6mztbtVT673NjsMGMKAuEuGLGQejpHmNuEcxl149PXvzrpntqoFUBJvnB1uDDYfv3bh80eLNNGDtXVwD6LGzCqezJtOR/7O3/7ZD376uabhJBo8tdV/6gNP//n/OvvO9178L/7iPyihv769ldoOUrrVHW5eufLy7o1uJ4v7nT/9h/74F7/yW02iZofHjcDRYXPxyYse+NxZOHgwIt0gOYqtAZ92sySK5gej82v42XdFP/aZd8n07q3ruz/+oc6gizs7G5PppPANOLz+2uKNB/X3Hsikih8sfM5EmYlTffCgTLsQalQay0a8xBnE97/hPvfVB0/+o8P3XKOPffrCu3/o0jCyIJyu9+vi6MIPZmffh5Px/OwzZ2/enKwNehsI229Nua4njenY+Cd+6hkw+6Drf/EvTnyytpMN7o2OY1K9QT9ZJ7tm8nvpwYPReBoW4zLL0qOjUaffT9eiOC7ITUxSHdw/dXMjwMpGx0fHt9/47vb5Ry6cXX/2Pe//+pe//dgFWnh/Mi76g0wiOzo+nc5KSm17FVy5ciEs/LkNufrc1fFJozgeHc17Wxt57Q9297ONnStXHh3PJ3W9QGiOD2ZKuXk+j5GKyTzbOSPBIzKiD1Vjk4gkJywFxDvCgMejI9+4c+c2X/nua7FWmqL1zX7N1DT5bLpQKsIGtra2bNI8OLq30e8OzyQ3vds9Pk511FsbXLhy9uR0jFRnqa4djKupK+skjYzt134hQU4PJ0PzoIHOeHDutCk7ma2qKlKGEZEjRGKmYpH3e526dqigLgoRDiBpnChS7EqQGsk68lZHTV5ZQUM0bab5Yp51sjObZ46Od8XUrqg6vZ4AnN7eX4ymr798t7+2/o9/8XsvXl584od+aHJ058SE7d381s2bo3LCjSKlO4Pqcn/wh/7IT375138p+GQymYjBvf2jyHTGi8XGZrcJi9cPvmbi2Nc+XR96htjaiPHoaP8Tn3jPpeH23Tu3zz/5eF8N/vWv/PIPPP/Iv/NnfnrKp4fjw7/1K7/6P/yl/2L9kY92MhFb3bv+lneer+w89r73j++Ndq8/SIYD7PTdfDaZHpJy0Fjf1KJdzZj0U0Xag5q5EOzQc5LF0TSfo4VrzzzZ6W584wu/fOXyM4yhYpCiSXr98eEh5/V4NiomexuXLs+mRTLIJAAzz0fHxTDt9taHwzNf/9q3zmxt+KaJU8rrhfdpbLTRlKYpkAKFnd7W5PjEZNZrrbkKEs5degJVmIxLpbsqOwM2in1CPH38g+ciLu7febE3VA/ePL539+Ctl36fYjsavy6nVd018YbFyMwmTTY4d//BwbSS17539/zVTMfFzTfeeum1780OT3/qxz506cL2zddufejpSyywiONbk5mN41dfvPnf/bm/9P/9q386GWrnlnOE3nrPOz4ZTRUDOMHCJ5uZSnQseYXlxpmz3SQuxhU3SCKZVQDe5+IYa+Y0tsMMVcOaa/Dee6yKAKID6Lh90AeJLAigLxsLWprKsQdDXgdRoBXGkaU0rspKJ8qCbcoGhHqdLIoiFyRKktEkN1Gc6jRwaCSIBMchSZIkSoC9EVIABsn5gODJqEXhI2NApPIO0ZYuBPZaqaqRqSuZCQl9CI0PHkUslHlTe/BjrwStpoiiRvG4KihOGCG2BgnTJGLmGCnVyigVGkdZlHV7kUpI63hNk9LeU1M3gbhYVJWvFQGIFHWTUdNMi3o6UdYO1vrdwVAbLa5ufODQcN2INCbxgR2LU5oVe1DQOOe5BNHChjHJC0enx6oHkeltDIcHb6NvGl97Y4hIO2FhBSAOpRWGN2WpdNIEpzGyOtYUAjeeg1ZYOEfMQlFdBVIGpU7jKDJGKHLBBeeagCDo6kYpq5QmMkajBGeQWGtD0oBiYZDKMze1Dw3bLAnYWmB0COTrQKzjRKexUZbrUJHERhEysOfaN6KCtkWT5lHCpqN1rJSpkkgvKmmcanwcYXd6moTa+6DLxikTNgY9dmRi7UKgBqOIskgJhl43DZ1sNFs4J7PTgiHkTSCFHurAIetkTdEopVTkku2hRXWpsz1+UEoVk5diXpd5/s5E+WGuUDujltZp9RAStSSAVgTi+zrIHoKCdvbfZgQ9dCQ9/OjKOsYrBrT8JVl+nmWZMATtLH9lrVptzhL04Pd98WrLAVb1Ww9fY4lw5X2Th33quJTMPMxmkqU/DFEEGBgEhJG0av9UteQBl1Y0lDa+e2VYWzIOXCpnSOQdtVILQlaWtpX56qH/DR62hfESFbWMCZYZ2iIsKNiKldTKqfUOSuKH0KNtHFxW4wgKIciylW5lsIJWWiNLwxsI0Pcnji91XCvtDwIRkFqeAMvAplWkUsu2lihIVkecA7SHAR5+bTuqvPpOkbaTjAHa8uX2KJDAclq31P20TEpIYFl+t2xvWx6bFaGSpViLlycTLTPX27OkPaotFlwFTj08b79f3tQKix6ipCUqGgzPiN14/U4FO/W9bx2CzoLgxz72g2/eup517Msvf2+ry88999yD3aPFaFzPZ6dV6HR23v3chRBdPDo6euPmy4Ne+uzZy6+//nbaO5Pno/6gny9KbaN5Neuk5n3vuyR5cIXbPbn96qs3r21tk076fZOl2UFoDvdP63HCOgpOf+vL12syxldf+MJL565eunxth0NzdLB3sDfuDqO6Wa+qcuTrNIr3R0cznkB8bmayK5d3bl3fw7Rz1/ntwdbldDDavz0b5WcubV48m71xZzxeFLOqtvboZHa4eb6nIZs25e2746KCM+fWG3DTYjpYi9Le8GhUVl6YVTGZr3VtVZfsZr10M7NRQA1kFnlpjZ3P6srR9KQazXySNpHq9roiWJRFSDrqkSfPP7i/F2vnmrLfTReVevPNE60MgAhDYCBFAQkaWevarTX3V/7iU889U/ZkVi+662ud5Gz8oUef3n/9bXaqFivmzL/+3O4/++r1wq2tbaePXd565NJgXk/W0kojF1VtdX8+KrvdtdA0RRFY9GLaCGFi1enpRBOFTkjjNIlSbwdOuuuDzcbm0+lUoZiYaldUwQOorfM78/Fcxdb2srijuG6YFAY9nlHNPjJx3E3qetrrbikzjNK4LmeLYtFgffHK+cWNsS85jWk+h4ZlK864rienE25cFmVCJkjw6Cc+lF76W53DB3v96Iwrinw2Y/aoXRTLdDYb9LuzsqGCrGUXmL1CrTpRD8P6ueFZL3suhXJUMGkU3Uv70/mBVTaLEmuTMldaJ9V0L4pSH3wTyhjNaHocpZ3pKDemo5SySRqYkBBsqCA5OqlPq8PlpUkYQHzdkCKldGic814EyqZEgBA4MhYEffDMHGltjPXeg2e19HMFQebATkKgsNRTEglgCMFao1C39xWlNIJoHWljVrl0YJWqfSBtAQRCiK0NInVwiEoAiYgAhAMiKE2eGZFImyZ4JcQhBN+IhNq5wAHiKNGklXJeTKSFOSbFwZMiiABFSLXMoQU3mrGtIGPPTgRJoQgbExMp5iAADBSW5jRWBG0IESIQEiKCIoWoSHnvAMF7FpEosqqtZBRQuFKDIlhrEYg5tLY+EUIUi1Zrg4DM0kooH66NrP4LhLoNfsa22QCBl1QogIBnBgntjV24QSACZAAfRCnTenOXXioBpQwqEg4iYJRWZIDBh8DAIfByKUAECWjZsYbehZo9syhtjNEhBAZxIbB37XqDD6FxPghoxMBt4wiLMCEt7XqkEDUgCXuFCnXbzIVBmBAYgRADMwvHhhQprSiNbWwsIkbW4rKnExlAKdKAqAERUCkQxcwhBEQS8W0oOwfQSkeKmEMjgQP7ZZY4MkBZNwjkGbRapmWvHhKQpDaoyDnDqBeLKYPWJNraOoTg2Whe60dHx01A0EoWgR8s5t3Mr53taCVv7N69NRntbPXv7C4Oef38ux4/+9TzR8NZBfPCxOvPXDn3xEY1Pvi93/wedzvv+omnO53NyDz2pW+/+PzHnvrJn/7or/7Df3Hp7OaN2287qmSb1q5enB3fu3b2TKdrN4eD6zdvIChX1q+89NUzG924Q+X8AKb3XJWf+BmGQAw3b58YgOPD2eh4ZuLksU2LWo+qSow6nwxSlm1NLgTtqhT8RjXpdvinfy45/8MX8LJ25QT6cdJQwacCO1pf83UiTUQmCzKF6HHqngV4jLi2ySLAIcO2Hrwnh3MJVA5uG7VGcCkoowdnG4+kM8EdxNhXpyFGDZWHksEAnP0nP//K3/7f2WO2PlzXcXZuM37pOy9Hqv6Ff/l//okf+3cPXj0419+4++ZulNpHrj3+wne/srGhqkkBKuEogCyYHHTSpz68/dtf+m9+59c+99//9S+c4KbS3ZPx3W+9+ntbW71apJgfj49/49rZu7v7ez/2wTOPPvVEU0W3H+wKGogv/vrBuFiEzsAeHs6Gg06kMHbzH/lk+TN/dv3KEyUsvtncz595DyQ7OUcEUbKG3U1j3OnxBbX+w17tf2P85hdK290ou52//7nptNCdoQyHzaisF2xcFbHXqotNFeoQvXrfvHWM//ibD7p/c/fpLfzo89vv//iVrTM6XVtQP1RVZ7wX1ntpqGdFBz/z15++NX/j939XEug/86NPv33zxc1hcuXbfv+ER1kyXD/r6fTuoRvo/O3XxhTSCxeS7x5O39x9YC7JY0/tFHKcZheyHt56/baujenqWVECBemJdeHwcNcOBmfM9vs/85lvfPPl0dH+xrn1xcypNK0BOlnc18pHy9e0bie7+vyFfN4UVkfrmRSS2nVvMkq4O1jnUHe6cbJ20avo9o17FS/OX75KKvb5wnR06f3p3gEpJoBOt+saL8xFsaCq7HomZTq9nve+rMPVJy5XTvbuTzmyVmst6J1bWz+Tz9zx5OTshacF9cH9g7GrB1nfB12TGs1nmxtnquNxt9OfLQ5m8wWlWLmmY1LRRHG0dfnK3snteycH73vPMxfO6b/3z37x3//Df/x0dmyyKHg2Oup1h0UxVzota1dVtTZGEUY2qqrGKjOdTBMij3AwejBYW0NSAXTl3Hz0oC4KBVHd+NP5PATyrnJNcXow8772snj/xx6d5aMvf+21M2cvP/WJj2+/9927v7/b27b5yenx5HB4Ye3TP/SH/tE/+oe99LzydPvGG73E6FR2Hxyt94f9YUcrE/UGC5eT+LVht/Sws7Hz4GBfG8w6xqnm8qNnz1+69JUvfu3SxbNxCn/iz/3swd7B53/r3379hVcvXXr6x59+8g9+5g+fO3fl5tHRvTf3rvTUtacfnZ+MJ4fTl175dhr3z1x5Vk3yjDGLiER1O1up3bh164Vur1/XuHXmchBfsWExWT9RVnk3C01A9FI0+fzozKWrtmuidFCVTTmddvvr2+c+5qvfrfn21z7/mx//mT+RrW0zcj6fxRbOnt+ufQ1J/PTHfuC1G9eH22e+9DtfevezT759e+/yhcvDrY3j/b0o6TaV19qU+TTOOslgR+pgeXqydxjEr/c2re2rbF3AVNNyfrTIulK705PTvTe/9VKn33n3e564ejl67fP/+u389md++P0HR6Moyuq86W7vbF56rGnM7MatoMLZHRP73XJC33j1Reh3p3WWXnnPL/36l5uy+I/+wk9/6V982eHwtXt7xtikZ27uj371V7/07/ypn9q++lh7FSgylMBwkJKWwWanosoFj41EEKwNW6kJ86o4bpCjyGqpy6qqImtSpZPI9LtxPZ2VrhTAypOKMvSAqEUn03lpBImoWBQmytgihzzJkpgVWwNRYhILZc2ClSKKIggggeI4Yq+1AqXIKMwXiyzRIQh40EBkokSJ0QhIC4aI0WKitDTceOA6ADOAtsDkXVOXXhldeB9E4li5EGrX1EI2thpQoUnjmCEXF5znsvIx2k6wgiGvnCLFTWMUZXEUJABzN7FZEqXGFGUVZakoUdpGUSJglMIgaMiKtiE0abdjI3JNFVndqWcxhDAvytmimI7DfDyzUbffsVkfog4lQ+5oQamahXdNKKZUzzHMLbCEIIxZHFFiwdiqLqvdt9PBGMUc7Z666kSkAQhlzaAItXbeI4iNIquVb/NKvCMk5qZ2AYJXgkpbpbR3DpzzwTNqQfZeHY1mWWQaluAb59toTkNoEEzjBXWMitMoMsaSTXyoOt3+bDZ1DaNwUeZaKS4qMBgrrbQFIFRolF5q35VKTIeVoDIcJATvhYwFp46HG5HSjUkkn59qZXwDdcNN3ZnPcDodG6RiWglAkBAlKGdtkiq0riFQYPOynkqwyhyMatZRUAlmqSqCJSRLLgiQyT3fOR1HYhO0JSeaEA1kG1041MGHQT/TiJQN4M2jFSdaKn5W1VMC0Gba/N+7w2Sp1mjjgb5PZrQyKq1Cq2E1JZfliugqKEhw9aFV2NHy35kFkGjlM1sCApDvF9osG9FW6chLWdJKXtO+U69cRUuatQp8/j7A1NrUVkE/bT9xIEQAJEWtNg1WJe0r5vIOJVlinv97HhM9FKu0O4kEbS1c24y89HW1W79UR7U1Ye2BQXgnS0lktS8PUU6Q5dC0djxejciq5gwFsJ1ptEyM2pEQQFx+bMnLHh6ChwgIV2ofRCSidoNburMyaLWyIlmhrnY4cKUJWqmZ2tFcbsPSDgfQHkuRtmU7tH8pq9+Xh7KxVmvWnghLRrUsTuNlIri886eMIryUawEuI8OlFXtJe/CX2/BQarb6n3eOwDKn+yG/W6Ki16/f//Annv/E1SHE6q25zk/yp961dTq+fzI+eOb5pyfHg2irv3syH40OyfOla5dUpDHEcdTcny9Oi9nFzTXfLEajveGQR81JNtTAvboqDWauqoOb2TgRxu5W9/KZRwubzovxYrQgtZUm2XpicNgd3x8/8tTFECCJ0u+8eG9zc+tk72C2/9b+zQPx7tyjZwKlRw8Otncei3ayjc3u/t1D2+lQgSdVc9rU0wdvZ0kn+JMHk2JOLrpA1FXn+tuefIJ09cLmG7t3ioYf7I2zTPc3ojPrWX5aCZvRyaLMj/sb2UeeuFpN9ptiFGn35OPrxyO3OK0Xi/Fwfdg0pfhAhr14FqdjS4TK8vG4qmuzf1IO+5hFmE9m5y7YS5e2To7dye6Jn9azoiiKvLt++Vd+9cveCUNAi6hUFMWkSBNnpvgjH+//iZ+4FOlbfP0ounLxp35m68bbpSvmR4d7g17or2//5gv2f/v7N08O1db2E721rBf7buLm9cG0LDbozKBLO5vrhdmezIoyn9azvGxKEbA2CcFlMVXeqTSxSeQ8RyR1VR0fndooNcaNRjWCrau6FomzLrKzdt0or7CuXCWyWJQnly/tVDWTEUyiWEcUGSON1HUu2bB/ZbG4t7EVldXi9OjAUJVXpXfUVbGN+yRTDRLZNKjizEaSl57LSUSxstH+3qFazzJjx8fzpgxO6ijRvTVBl1dlgT01ne9Tup7EWafbr73jubq8+RSZa3U1WuTHWQ9jfdrUOUu8oCHqwfbamUU+caDJKI5k+/zW6PiAQrm5lQQpJ5OxjYI4yB01HA3OXjx4sEfZoJNszWYyur+3A1F7FQQQQhIE5wM71wQO3osLNrAmslYXzilEH0QEGueJnIiw9wYYV/QEAXzwmrQIa6ONNcKoFBEoQhJpWrmN0VopDUtuziDCwXFwrZaEERQhAoFSIGi18q5RRC54q1XjPQgCShO8IGrghr3zLjjPiES0qLyHxuoACJ69BDZKZ1nKwoDoQkAAVFBXDREpCkFC26JmtG7XEIwy1MonBRvnRQQ4sHCQQEZrrRShNrZ9UJh25UA4iqwII3HwgRBa+uOD10jGWhZu44mk9VwKrFZDFAhxEK0JFYXgWTjICokTwtI+Roi0zBXnNnkaGASIAgcUUWSCCLIopRAwcGARZqdBE7W3PFak2HsUQEIG0CZqNUCkNAKzhOAdrW7RRFoZ2yKf0FaSsrDzgQMhNM41nlvfWYu3PLBwaDj4xnNrKQ7MLJpIGR3HkULxgZcPTkAkCcEJgtIKlQ6BfQClwGgdRSaLbKS01pqItFIiDMxaG2YIwD6wVaSQQgiEBNya/LDx7Rsdk0JfB0QIIXhmBFSaOLAPrATIGM/cBPbMddO0EK29CqI6nN67+8STHziZzAHT4BrCgGi8lzSOQhO44dIxKWgUN3UzKysRqgjWh8Na2Rt39ytpTg9m86I4Km995UtPp1c/XM+m0fbmW29999pOZ/fN669951VOk93Z/Y8+339wevv6yze4U/6Xf+Vbsshthpff9fSv/tKXfzy+Uk6P3veD77t+rv/lr71s1MZiXs+OTweDTi+Oi/G0Pq3H9SztYjexJ8d1AAoa5iMZpFqX7kw3iiJnoAKGsx02VqceYsC1iHfOhkevwMYl1XuyZ55OYNv4bk+gB8lZAGaYBE8G+gQZ6jjQBoWAYY5cMYihU7YZA9ZiSIaWzpCrDeTa6MXkLdVBrcnQ2Xy83x+6QB5gxjEgcA1VBmseOn/jP/7H335pmOqrVbX5o/+P/6B3rn/5QvN7X/733tq992+/+t3C73TW+utPbslrLx/ePW2cvPf5S8f7e5eeex/oBMAjWm1sAKegVBQ+/VPv++xPfeR//O//dne99yM//dmbr9zoDaO6mj75rsezLEZ4t4COgBDY8ynROoEFePre8Xe+9eo8sptrncHO5pCnb7z7Kf4L/+sfC/BCA2+aLtinMt/UlYBNz9SwwdBh2MJ1JjgWGG38sPnwuxqYOazCi6/AnVtusJP/5M9d+uBPf/g3/6/f+cYr4V/+3mg0j2unCTUC+AYDZUWNLxzzC6/M659/8coOnt3ERy/DJz9x+fIzKu1OYL3nQv3g9qv/zn+6lZ6P/u7P3/q91+NLl+xbt0ZXn+hc/x39O1/Y//iH1Lvfsz3+3uwDH3vql3/+64uc06zesMhe5kWRF5NhoqeHu4tiz9oOQ2+zlxzsvolYDjaiy49euv36/vj67RvzcO097/vkH/zRf/m//ZNrc6FOxmF+8/5Jb9j/4Hsen4H/5S++AQAKm9Hx/XxS5U3dVxe1snEyAKsXzEoooBtNZybt5VXuq/rk5DQb+Olozyjob61NRrPEqEE3jVPKeiky1lUexwkpEmRBzvpZOS/Heyd5XY3z/OKVS2U1d8CPXDsPlWK16Az1ztlr33nhhUtnN7Nu2jRu60zvYFSXda0C3b176BoMXoqmIW17na6q56T15GCftera/oWdtb2T8f39W8MhXr5wNbhaNJSucYVPMlG+EA5GR0GcNhqEqqZBEgjKe+p3hvPJQfBNN+symEWV11WlCbIk+f+T9d/Rkl33fSf6++10UuVbN3bfjkCjG2gABAkmMYkUKUqkqGTJVrJlezSWPc6WRiPNG8thnP0ka8aWbOvJsyQrmZIpyZJIMQeRBECQyGgAncPtvrly1Ul779/v/XGquqH3utdCY/W6feqcfVLtz/4GnSTWUr1VH4/vlLNiZe1oJv3u7d3BqK9q5k7/1sqp7luTN/+lv/H3Xnnh+V/76MfoYGf5YKR93mjp053GF774ByfPrr77XR+88fKzw1vXfVHeeW04y91Ks3b23Llnnn55ZaPTaaw++sADT3z+C5NpEQTZ6tGVw53xeGC3Di586EPfu9fP3vzedxTpeHv7wPIgMq3v/o6POMkzXT759CtBjF976nPn3vzecw8cEdNBZ2PpzvXDpN44uXnu03/0+w+94b1h004G4zS3eV4GQQDJrLNytNVsDQfT6WSowlAFYRQE/d3dfOrQpt3uujDqYO8wTBSGil3RPyzb7a6sMyD2e/l9b3z3ytlzT338E/07F+9/qEmc2CiS0rMUKEUxGvmSolL0trbe/e7He73tRx99aOfGznQ0btQa3pbjwazRabSWm9kkm/Z3ptOx87NaoxGbANggOiWU86XA9Oy5Y5cvPXX79mUF9sixzWE62b6589Ab3tpZ6bz56BuG29uyVliTlWCYweogneLBjQPTFde2Xjt78tif/PHX+sNCF/5ko7vRaS8/cv/Nw+0Xn3+x3083Ws0Pve2BW4eTAad6rfXF528G//UzJ99+snoXrK+0+r3e+uZSNhlHoZkJwEbMuhQeo0T7STrqzXgCICBH50sC1j4TJhBGq0lvEkWyuZw4V5YFS+ml8L4sJZsoqdeCRhwv+2I6tTMHeRKSCWtlLq1wKTswWivhy9KWpdIYJqbwrigyhUGtEYMmV5AG9lkqUUmQ0kMjrklRKCUYhSVFToyyjAXYKulVG601oxQsywk6pLTMx0UplM5mTgA75pw8CpRCFN6SEELIUIc5lY6wtrTUTVYmg11JBVIplNCBJAbJMg5MFAWBNkYbKSQLYGZpDKEWMiq5nM1SYWfg3dJ6bXWlEbea5OykP0ERTA4Phxk5xQg+T7MyzbLxAPCOR6njWhCFzfZSXEtqtXqhdJnrbEz92ViWvpEkwxnpyDOX+WiW7Q4k3mKGaUplYT2W1mVCaZAaQCBTaEJAAQKAPHnvyTKCEFqKqmAlYARAGSjlPM/SoqDCSx+FhQ6D3mwsqqZmpUvhlAoDHYOHulQCUaInIk/sfWmtE0qjUBIFIARBQCBAKUYCEkWahWFcj0OpDHgSyAToPQGjEJi7QgAHRkdJJgMUaNmWmfA+iC1wPrVcOARyEzPueyp7tkBAVEJYZwZDl1shtR+nZRT4UPvpjKJIoYr2ptM4EchOWauFjkN9mFtHLDN2U0dSkiwgHCdYChllWeEE1LrNaW8UxaL0wT2gA/fUF3MEcw/uzJNzgAEq1TzMIcyctsxFQRXzuIc5ELFKrb5rk4K74UhwF73APGFGVIod+jNABQBRLkQldzVMdx1FFQcRC5CFc4yD1dfmynS2UAUt4kHn4Z+IPM9DZkBCMY+zqfKPkcUCVFTiKa70O+BpjtSQge/2Bd/lDrxQ18wzqaukCASBc0GQmDOYinlVYiyac5UF78A5FKmMebQAYLDYxuL3nFvBItyJsTpoRgaBxISLmGioBmpeT19Jhyr50kI1VMGpOcSbq5ywKojmed75HBotQqGwShSab6LS58wlOnOY9zoLWRUDVdVIM8zzyxejvzjIasoHDCAqex/OzY9zKyQDkGdGvguEqlFYZF1VaG/e6lYFdCzkb1UV9WKQiHCRczRXU70eFYWSXvrSVzbPP/zU15/V2WzlyJJ1HEXJkY37BDbe9OCb2svBSxcv3t4dnVheXttYv3zpSlDHteWVjhBSwlKz4bnm0wKFGx3uLW/e7108G02tmHQaEaOWgqJmWECuUZ060la57mtNeXbz8pYlKPKi02pMR8PBJD9x6ui5hx9ws8mR2qYtyyQOZ+PZ1muHKvQnTh598YWnNo+dqnVkWE9cqlfNsWFxOy/6QDLGoFavDXtZP6Vt5VZaDsQkiNvTYTYtbbelRyVnI5cWquyV4145HI+jcNxtNaAk8sH2nUEscTzLtAqLARfjvHd40Grq4WgYhaHzLitGKLWWyJYHB6PSukTKeqfebHfKLJtMUh1F+8P86edu1aOk1SY0QrJu1o/s7uYvvXBTKi21AKWYpIC4zP2pY833PDL57m+hJLweR0uuOHL7tWGvf7X7wLkgcqUOZ4e9vdfwF375hgqW3veO49aPC+4pVRoDtUaYNGvT0uW7s0l/tHxfVwrRXW7sFn0PNBmndpqFUTgaTzxAaIwhmk7SsA4xCMXQm06mw0MQEpRo1JfSbLhxdJlK6GchS2+nmRRaKzh++ngYQJqNNNrIRERQFDOjdWgEQBLozuamyYoeeZHNbBiFehXH/bFk47kwMsz90Ptet7Ns2QaxZmYVYmscDvb7j91/397uLggO4mj7TpFqbni/1IySOHQk6o2NMGoLICGckWp1dRNEYt2kWddBvFLisNWJnKPJCIhyz2Jrb3tz8+Te/kESC+eGaTpByEfTg0Qtz7JRmtrZ6EDrKG7Uerf3iyKyk0Ilre07QySVjmazaM53p7McAT05FOh8dXcDMXnvGSHGUAFmzmmpPZEjYmAlhVGSmABACaWEKp0VRgBCVZagQDADEuU+DYwRAhRKgXNzqSMqnFNCSCmc9+BZCA60BiEFIqOUKDwRImqtnbNSKKkUolKalZQMkFufOZ+XJYH3zlfx+kQkBLOTWikEkEJqpSUKBKw0MwjA3kslAVEIlBgEpmpYYwYmRwBIbEvvAmmICBBQgGRE1FoqLaRWgZCSmGme8M9KKectIkpgRqqUR1IIKQWiIPIChBTIwNazQCQUnr1UiogteS3QEwMQE4v5pKryt0nvvZRKCFEVmFGFzJkRhZRGIIK3AoQQCMTM3lNFaXy1cFLavGoiAwayBVW9Y0IgCgbQQaiVAhQCkV0ptCJi663WptoYMBdl6axjJK109QonREdUHbu1ViAyQOmct1YgeObSO6mk8x48sRCF954pUFoJqZT2zgqBSmjnnRASBAhhyDsmkFIYqUKpIqURsUr/q67JuypR4LlarVrRsK5UUjJwYcs0yy14V4V2M3tiR+Soin9iCeC8YwSH4IkBwHuqMCAsXuvjQX8rfaEe1ON4Q8nENMLDw5tSqlAHSXtp3KfdMg2bhgsKwwA9aIfTvgWHkGZayzefOXZis3FJBLGOJnd2/vB3f+7+x99/eum+155/TsAsC7Lz5+KbL/O13ujtH3inz7KJG0TdtCxlMcYwaNy4nL526VKIerR1raWzV559UdXWJjn0etO68o2VgIWNWsmR2tqVi69BZIT3w9tWWQhkurbq47rvJtisiVabjHaxhDCSQlGn4UOggrlzQqoOwXEFSxE0N0qoO6dsviT0EcglqVk6uhHVG1yGAnaNMV4yQAoiRbUuYODcNe87OmhDfkTqJkBTyKGDwkIA9cjKmzFkFmLZHqbwOQPnWSw5wBhqZbrxM3/7/3752tKdmx2lVh568JvubMexFF/+0z863CBGaIW1m9fLn/rJf/HwfdGP/vX/+dG3PLr12lP7V0fABzo6xyosS4emBHBUeCnYaMEwZRgWkP6tn/mIhkYJ5cq3nApAWugzjBAKDyBBFkAIUxKpAOHBsb/67/7zm371l57+2Ee3aGym6c0f+aHmn/+b73DF1xmnUt1fUoGuD3GTIclBMKwAJzavGdUCdYzcblFeVpulmm2rm3e+++1wc53e/2P3ZRuU11/85h9vfnt04pt/67k/+qPxV56H/UOqCR8YMbZe6BCU0EZokGUkn9krPnvZ/fYT15eT4n3vrL/pjUvn33KivtI1VPz571/e2h8eZi+/5w0bdjbq1uSRb5SHu7Siounu4bJsfO5jF30auoKuXp4kgcFADSfp4cFw9dz6zuHBxvqZzWPveuHZWwBFWaAOcDrOlcZmO9k53J1OoJgun3nwZK3VvtGbLgWBjlTcDlon60fffOywt1XdBXZWpN6xs342ee3JFwBV0k42Tm/GrSNRNzkYpZ5UPsqzsrCemq3WdDrZO+hpCTJWJpD1pNbudnv9QxrmaZZLKFfWOoI4m86kCWYT9FnJgHESbu8Ps2lulALhbt3Zbaig8KkHanZqjzx2drR3YAwiqDz3yqhhb4hCEEifu7zMmksNkqa3ty8Yy2LW6jQy61Vgjp9YdTStRX5361Y6DG7u7Jh2HCnUYei8GI1SLVU2nrSXYgHUH9+ut9fGs7HCJCA7nc7yInXeogQqcsVKysgo4W0hlXBcgp3Vo0aJNstST855n4Tx8TNLe1eu3vfg202SX7t6pTzYm92+eeJE6+KFK7VaKEsK8lHt5MrKqZVXnnvq6jMvrKw3TaP+zDMX+56ev7b94XxpudWBPM/9/u3r6fHj9WPB0jRz9Wbo+2NA//7v+cD65onnn3qxdXY1H6dgMSsGJx7pRLVamtvjR5cvjQ8KP15rtChNs76n2SRNi/X77rPGrBw9853d6ObWKydrx6VCASrRRkqphXPosnKsQ8HeG2FmwxFEqYAClETQ1mecYxQrRLu+tjrsj9pRQwj0DEqquNFkLpJk5e3f/C1XX3v1lfSLR868td7qujLNszxq1KJmiwp7bKP7zDNPdVc7cZSQUo7twdaNlZUjpOz6Znc0GI2H3tmydJnCyFlI6h30s9lsPB0XobWIZEK6PXj16a994tyD73v48XdfffZLcHhj5Uh3986h1roYqPHWuOgPmsePbZ46aUVj/zBdahzzMrBcPvSWM69euFQITgLz7scf+uA73vHic092GnD6yGqe5bMcBntbS0vJSit4ZTAma5dPnFx/6MEnv/DkfPHMQbPRdmWudJSnRdQwSZDMrBKoYhNOxkObpeyFZydDU6YQBgEqRSCBhJJxo5vU68oI6O2NJtM8z0vJRAXW683Tx44fXTk9Huy8eO2ZgrM4DHUAoeCZ9458mmZF4dha9CQEgypUWIRtIZXzOK63AmPMcCtPD+3kIDc+NMJ4n3kmBzorytI78Ja9ZSACh8xGGyFAJwZloFnMoLQFBwoZKTRKSpHlpRCAtiQBURggQmDCVq0ppQBtkJURHEPcTjDNi8KWOlRKq7KwlYveowREJXRWZMYYjUFmS+dKAk9UgivdbFIcDiwNleuW1vnedJoVaV6ApTBSpVNFOW9WJ/LkymI8KwY03t+WgQ6bjbjeAlC5DTKfMPjJBKJQY+aL8ZTZkdLoyVnHwhhj8txqrQCEZ460UQIFahMEgKKwuTHsvGMAT2DL0qMAZiUEee9kVbXtFbInm81skVbKJFM3AaGKIsNCSRShkQIwswUSKaWM0R4ZgaazcZ4XAI49S5RSCHbEgi1ZY1RkdDeuCYHkLAg5trYoCgCi0nr2Hp1ASZYBDSA7q2Yz1jWjraJ8JmUUxfV6LPMoAwlhJJTSSWjCOE7qEbkCmVuxACbBoBCKWQbKNw1GQlnSyGQLRkRr2ZekpoX0aGIErZvdSEprp+CzPCv8NJ2tHmujy3evzd0GvNDiVKVeczj0uvwZT5XopGIfAHC3D32hsVkAoPl8fK434sVm5z+BQohKYbPwpTEiMd1zYMFcgAOLOOOK9lSNV4ty9YVsaKEtElg5rBYJSguN0d0it/nm51k2PNfJzCEVLXa9sqQxSmSoFn0XNGEBZbjq1boLruYKqmpHccFbKrNFtX0GpjmYAmauMhAqvxUDMQhe7Plce1VNyeCulwrnDKkqLYN5KhLwQq4ECw3UImJovrOVmIpxwW7+zK+7RzRPEFoIjOYJ5QvtVrVjVXZVNS2YpyvNtUDzk0YAwPNkDSABzCwQCEhUaUgA9+RfwETMQCwqwMS0sB7O1Vs8/2kGQERfMSOcoyKeC5JwMbZ3d3OhiJsL1Lgak+rf4kIBd+/sv04ZdVdrNEdF17aHs93dLz39jRlhu2bYlcO05+QzJgjwNBlIDq/tD2/d7ITNO9du1RKsyZKz0cHN6wx0anV1NLLOKOsNjXvHljd56nxetsOYlVBkUXBuM2QQUg2GwzCKQJtGtxnG5vKre/WknqeHZSkoYwENhJoF57wPa/GJ8w/c2RrwWC531+7cunwT9pO4PtwZBiQFJmEQX3j1peXVZLlm2stLlLXSLF1ZaUwngmvi/Lsffu2lr5UF1Zeag73+Unt5c6V5+bnrlkVzqV2Mx1EQay0IVZna1gp2V7q9/cvLK8vD3oQLF6vw2Mbx3d5OWuYuRmPKdq0x7A8LlKOizHMHgJ1WvT/os8JuJ5baoOB0irduW00Hx477/mCqo3jldPsP/uSPAEEp9IjkvELFLJyQb/6WD/1PH95vq6dbK3i4Bb1hHjY2Tp4+6Wd3qBwvH0mePdT/2y9eF3D0oZOrxmSUT5fbuLyU9A8HaX+cRPUw7Ozt9w/70+4xOLUymbheme9IMJBlsdRF5qXSrESaW6uUqWnUtt2RJqS9Q7A2LGyuS5iM02bcEj52eZE0NppNPbr16l7/YGkllFKy1ASuPxpvrERZ6YQJWp0WcBSFjdyOyiINAh1qxWhrSZnns4Cc8NMbW3tB40QUH9Fh0Gx0J7MtJbAsrHOFAQYLuwdDRGnZEqrbw/HSRqQ1mtCUrkwaneXmckoa/bSYFNIYrBsZBzQppZFp7yBphnFrSQAf+EGnq7ZuD8YH4wNMS8By5suiH0euVoviMPG5a9fiRkj9vdwEVG/AdOqLfNaNalMraTZpN2LVCeNkfhfYsiBCodBooxilRCkFM9vSVqFhKGRotJTSk8htjtKgEEKowhaArEQAVVY0+YUiUFDJCOgJUElmUGgAQUqtJCohyDtk9sS5zYmqcBlGsMAohEABUknr0VoHyErq6qEgBVYIpnROAJNnjZKqNQoCIVFJVVlhC+ukwCDQkTECcZIVNH8xsZASGCQIiaC0MipgACZPQF4SMxAJBFk4x0RaayGQmYzSRmlZmX+rp6EQ5Ik9M1tAcN4KRCkEe8fMApSUCoWUiFXoNgAIAOucJwJAIqryEZmhdCSFAJYSpBBUSTo9MaAkIkDyzN6XxAgAUsj5q4xYgZICGYElE3tPnomtK4UQ1eoMIhKBLUtH7J1HBGJSSkVKEREIFCgZyGhjbSGQlYqIwVnHzkKVMeRJa+mY2FcuLgQAFEiOlFQIYJ1DIo0ShQBygTYAoBQKDURcEjlLkpzQwAxKaQKuxrPK3hbAoTEIIIXQWishhACBFcFC6121BkNEAoWQCtkjk/OemZQU7B2RLWyZWesQvXNSGkB27BhQIDCxdw4YmMAzl9YqE5B3znnyXjCK+emERqdx/6njF576ypn7H63HofQQch5LPhzuHo76IpZawdqqGO8UgVJCYTG2EoxG0zJqqWMEp1uvHdbDxu7N7dVOfeVY8/3f+eYv/qffTpZa3/Idb7n98mtPfPpS/06+0VmvAX7qM0+86fs/8t3vP/uL//R3wnqjGGXUbP/9f/wXf/an/9mxI3U9Kf/4o9fCI6xVvHpk5eTqxmvPfk3opOiPrxwOxwWAo27Imx08sUobR+j4qfL4UZ2OrSNQCYCAKEKvSUZgIg8IuBpDveAAbK0zg1UBKxoiFKEONsk10O8HSU2snaXCA+V28nWVFDJ+wCu2pTZwVEC7cHthOCWgWrTKEDMIgibCSgi3WBbj4qWLrx6uP9Ru62M551ikOjwGUP/5f/OLT3wNs8OH0qLO4RgbwROvfGlvp/yV7/np+JOjX/qFX/hXv/QPP/PRP7367MUf/6nvuXzha29aXn/rqc0VVdzYu9Xc+JZf+oVP/vDt6ft++PsAEMFRgAiWnUfFCHH1jcADCCCGkoElGAZmaCCEBCEDAjgABlAAhZTbEuDH/pcf+KEfqWWjdDp4eu3oFsSvMGUC6iAiNLk0KwzLCkqGlHgiUEHUAReUGarwNEQBw14BQ+yYMz+ydEK1bRIDSAbEqOvAvvcH3/je94X/8u9+5bPP5z/5o2+41b/2pacOr/b9cCRzi+O8GBRRHIlWQyoUt/fFb/xx/t8/vnXyvv1jp/E9b6p92ztXPvhNK7l1t5/u2x3cuzS6f13KLO9t99/4tlOXbmbTnppSQMod7zZf7k2LUZFJ/do3rnUV6kgk0vf2Xjt/9nxZ9DqNB4SYjA93pqNZOkwbDVOMDm889bX7Hn2o1W3vbfWKwSxWuH62E67rFy5/abw/qO6CesMgwspG89r1spTUH07s2I6fH9dXdh9+4+M6bJVEs7IY72erRztXr9wQgeout+r1WCCht9l0BKvttY2Ng93DfObCQI0GGVuSUkYmVBwMxof1ZtJYqZUMs9G0N0rve+ToqODZYCJlqNjvXt9prTZNkoy2DxHEJJtayYDOmCBqBNMBESVbe0MTBnGtcfv2zaV2QyGjl2EcOZuyLQZ9Kev45ne8W3IqhZbsfV6QDhqNls/LyKBknoymX3ni9z/w7h+OIyMk2CLN8mEQxlWkBAoJjgMdZ7Y/mQx1GAtkZkhqSZH2oiSyXoZRcHAw/Mazw4fOvf3EG76ns/Pcay9euPrcy1E52740snlRtuPv+fC7di7evvLa9aO1cOu1i+msvHRtsn+4OyqoRFHK8KvPPvv9H3y805ZFGZbDWa1VY6mzsnCH41asp2NcXT7hhrZWwiOn3/Ta5auzyXB3d+/6xSubJ+zgYNwjYGGPn72/vbR06+rEFtODS3sMonu0HR85vrN9YKLWUsuh9670RsmoFSmh924MokTMilyKIDZYpDMFohinOoxrreXe3s7h4UgKtbl5bOfmzdnhASob1w2CLvOUVKRkwMoMh7PlIyceaDeK6fjCc1995/s+iFKJRo0JbOZtmls32VhZOdw7kFYc3Noycf3Iqc10f19plRo1nQzrokW+iOJobeXhKxcvHt7ZVYYAQ0Uy0qFz0+HuvtXRI2/7fgPm+VdfLCbDIElOPHz/lVd77aRxMOnfyPg97/2+dLCXZkyxWF7rBBofefMbU969eOHSQ+ffcuJ8IzHxeje6eXV788Sxsw9vfvoLT1AqItMIVowK1APN2sUbB/tZefWFZ7/WLr//L33vT//25wGgVkuixJB3e7fvuBJIsDhwSasxzSbj3BWZnE28B8q9qxmstyMV67gRKUdkyyNrXcdWotEIEnSoZW0p6Q+y9bWjDbPUaNenNNFJcGRtY5QPgUug3JIVCMge/FghFOACrZVgRxl6H4hEI2ckZhNOXRbIQNUkurKcuaLgMrdCaCXKwnNJaMtSIAsliCUwEUsmUI6kYFZsQpVlGDIqzZ1EKJQlCkLhLASBrHcSISNylER1BC+kEMZMU5sxilgORzIzEYEgoVFIKYTQwazIk1AnWgN6JZRET4LKooAgZHSFS8ssP3AyH8yGvZlCQR5ns1JEASCXaUmeJbBlbz0LKURoFFZRzsBEo95oNi2lNFGjETWXp3nunU2zWTYZKSmJQRrQAsGjMmC9T+Ka0qosXWiUVsogkWAEKcDESjEghYIYrHXAUybnbEFM7Ll0hKiUMNbm5HJAIYUgAksE3pMEBlbSCKFS67TRKCRK6YlNGAGK0TjN7cx7rxQIhUCAUkohSlcIAc2aaiQiER4JrSNHPkJU2szygojDIGCQ3tnx0EkTJo3YFQJCrWwi2VFBbBy7XBvdXY+TIFyKIxai00nGI2utB8cWXBhFRYaWIDSAIMosbzeSODZWxlq27cgOe+Mmw2SSG6FDY4h8HIc6ioxWQErLRubxYJKZJDAai3vROTj/Po+CgRHlQn9SeZsWterM89awiivMp/bVnJzucaMFi1jgpDk+uYsM5lqdOXKaJ8rMXVgoKgC0+ES8KyOa96MzL2jSovGcF9u/q+uBhfeIFn+9iD1iWmhM5vkPUPWpz4tuALCKKMK5qGYRdQNVUS+jr5KPxII34EJiRDhXBBHc3R7eJUQEtGjzqgaO5ns9Hy7GedbRQiOzGMnFfuPC/XZP/lPBu6qDuYJDjEDMQghfRUaJCmndMxXetdDhPIZqMbw4V1rdozrVFpirbVbDvqBxsBAoMd0T68x1YhV2vDdwr0+VRvAMJNHzott+YTGkxR7ObXpMWHGr6jKs+FLVY0c0R1Z39wWroqK5JKu6MO5CxeqohRC0sCbeuxgWF+qfQUUf+O53xVnrt37tYw88eP/LL3/DCJ2Xcv+1O299+IHD26+ef/Dxq/uHp86cCkzry5/9VNzeaB2p3bq1U3gHkB/0hnlKPgw46ea9y8eTmDOXmKhXTKOwW9N6MNttN1dtVpa+MNL4zHvkLIestCsbK0c6nSQyr1y6dfrsA1Is7e9Pm0eXVk6cy8fTW9fzoHZ84+xx8GaptLv7L4ZJEBoznPjRbLi52j61vn44vb1+tHvQnyHr6XjihdWJdjq5vleUPvbT7NT9J6deMTpIvdbkZrN8KMiLJKorlPkkD0Il1eTO3o0QMc1HzWbMgHaSsXDr693ecJpbv3u4n9Qns0nqPViM0EkE6I/GSaxL7wT4bi1ykrUwUcDIdjotwyiJm91L10ev3RgFJiQkIGT2OlLMaBT81v/4+NNf2f2lf/iO5gOmkRT2oAwEBjqdTg7yqRsW6//Hz11xeP+b3nh0tT6Ik3w2K4khHSK7iNk4a/Z6g1nGkGxMy/JwsD0txi4v4kYsG7XpuGTgldbS3mRCQoynWah1ouV05mCSdlodKHNpVavTTIez5ZWjgMrooF5rD0e9vCjqtUa9Xt+73W+vtJOGLgvOuUS2IaByEXMQaURZTvIZFchoAmP6vX4tkXmRexCd9dU8dUncVDqybEy8JGQ5Swsiajeig+29Qb8fGZ0VbpyXQnNSx1ajXqtH03LqWGqJNVnoGGSpspLTNFd1IOVH2TSud7VUoRFZmkrlxhNromBzc21n9/ob3/ruCy9eBdaMwWzmyKl0PG3XW9vDw4NJttHobm3dqtday8c6Wxd2xpNUWqdEeeRYUmvF1V2QhIEQSkhRBRlLRO8IhXDSzR9zxPWk7lzpiaSJlJRAzhNKqQQCkXdEQiAxpEWBgIHUQRB5b7WWQkkEKNlLlFqgENITeXKI5JxzNgeGQBtk571nAkSjpCDywFC1vyupKyUwCPKV5xylCaREgXnhkDWC0FIICYha6cKVQkgUUijNjCV5InLeCSU9ASpB5EGwCQJAyQBIQivlyCKQ9c4576jye6FiAEAlNBMjzMG3ZwQhGYQnBiZwhBKVDBBQomMEz8zsnWdkEvN1DkHAjAIFC2IQUgktUFTrNASMKIWoHMYKiKx3vqo2QxaVFghQIBI5XliykWFefI8CBJD3DEDECKpaFgBE59kTeQAC8JUcE9Eje0RLXrDVgEIKKQQgOFtWCy4CAaS4+0GeAeZRSqSlFihASEJPnqSoXstEnpkIhTASiZkBpUCQ6L2rNKYsqqbOuSBIKBTIQqDRWqBAAimFEoqBnCMphWdfpdOR91oqAJZCkLWeGb3zzjomKQUzzrLMEhEwA0qpyXvnnbOWPFUP/ipxXEnlrBVVziCiNMY5x3dXrgBGe5PDxsEb3np6o9Z66fnLOCq7zdr6fXHvzl4UMoFpquVs0lMj4tQWU2lL7VHpJBhaOntidXdn9OLl106dOg6aLtw4uO/c0mc+/1VQNCu3P/3xT924vC9yXLtv48FHHqgdb/3YT/6lL3/95Zdv3n78VH0q9cUX/awnP/7lF3cGh8dORXeeH63ft5LVG8XB6LlXr9pB0EzMaHu3KWZo5IOn5MoK11v26P3q2HHhsCQJJVi9BIEGQLAFeMU+AqxDGQJo5SIBYSSM8LDJcBzBEIxCoUrQwF7UPMEuQAhBi6DudQ2kIzhkXDPBQ6W1Cr0Mawyj1BVaLREEBo7OcqyF6x//2NNfv/bC+z78Dj9IL38xOvfIA5/99ZeKgXhl6xNy/ZH9195yZUeu6mSW3xjPCs0UNdSROnzmqd955ze/7fOfPHfzucNbNxSJ4onXXjj72Pm/8i/+jx//nh/7tg+958Fzxz7/9MVvfewvfPT3vrC19Qs//Le+3zQbBIgAnksNCiFg0AxMIAA0grTgBbgSJgEAgJdgrU+1TIgkCkAqUGiGiMDoOptGu7P5kIADDyOQywXUACKAGkDDw4qCnncDLUlAT8AMcNXoGvshu0OhPMiVshmCqpFaQocKADFEMExTq0A3sr//s28f/MwXVbj3g/9z8uHvpd6h/twnp/1etnl+6dd/d9AfmKwkgWwaqAXYHC5cs89v6U/86eTf/NxTGMjucm2jo8+dPvWj3/fof/4/PyFCPavV/+BTu+Ms6pfQy2hlNfzJv/M9P/HPf+NiL6+vhGOEySg90Wr63m3PB1954gvHH3ik1Wp3OieLMfpszBaz6ai2HG7dvJ4s63d8+0OTL19onowf+OZTv/M7H6+LZO3+1VjPY61B6uFwNhz394eu3qhtnFtLJ/3DnV7v9h0Cv37iwUarOdotptksy0Yi8kSqJCa2gZFFTizF/vbeJM3ztDhz/5kbF28E60tBEqRF+dWvPvWmNz5qQjMcpyRpabURJcE0RAu+sdKZup2gptw0vXR15yTcl9R93EnS0Yy8q4cRSSxtWUwImL0rZ9MUQQ/ySXtpRRo1ODxEjMf9glk98PD9Fy7ePry+8+M/+xd/91d+bXT4ypHjm+lkJqOaMgYKLzXMqAzarfOPfRBUbTIbNJsSBahAe0QCsb5ydHf/NkrCyBFRkNSEFErWMi/y0QjQpZP+4d5upHUrUtZ3P/AdP/bL//E/HGlMkrayIa8mrZdfvr3y4IkT585ce/XqZHDwHR969Muf/nKr0Vq6r/Ps87ezoswJMQKF+rX+9lu+9Z2/95/+g6f6I295s4iiRkOPd/eSRqDDZP3k2mhv6ga96XDyzPMXR7NhMR2tnFhvLUejUd/ZcmmpVZTgy3J8Z2+wO6utqvvfee4rn3tiNhTf8aFHX3zl6trmUtngPPedlaXpeDodz8BhEET1rtq/czuuGxVTf7DfXl7vLK1NZtl4VoionmcQBWF/nEeNlif76mtPn77vfKwazVYzneZBHHgQtWbLulKLRthurq26Sy9/+cjxx+qr67Zwoq5MPRz2spX140LG+Til3rAE2B2Nbb/PNlfjca3eNHHgiQHkpZsvFdCLfGTUkkk6KpgVxXh/+9ba5trOftrsrB3uX9PKZXmOgRpsj5tJbfvWSzIRHODY6Wa9Ocmzs29487X9vMjSfs+ZqHl07f5T5x8+HPvh7mHcNMWRZjoaf/mJl1/ZGZw6+djbvulbn/3sRz1mveFsrR3zpByDurnT29m6Xd0ExsjQGK2C5MT67n7vcFI4KEHJ2Sgv8pzBUklKi1DowAujealZQ4llbgUqLkmKQFEspQiENaFkz92lpSRoSNJlAWHEg2kKItC6Vtg8LyZ5mSmpNQSNoJXnhbUzUuAYcqII42xcc8KzUuykRMVCQmC7R5VL3WQvlYxlKWzuPLLUmkEzkJBSsFQojZGSWVgLVIZaJI2gqVuGCqN9ZFSgtA4apSOACCSaSCkjwWOZs1Rh7q0DGxi2lrSSrVYSOwIO+kXR6DQDpXLnoB5YV07KIjBaK1WUhZI6UHpcFNPhLJ9NA6mtAya0/cwIIZUkFHlRoMCs9EXhEYGVcQSRkkpJI7Vla0tihtJ7W5DShHmhNdZrdYFqgj2JkBelElIZHRqlUHiaAwLyEJmg8p9ZR0IGpfdcxY8zBSIyQgkldaxKX2QIRZF6bwGVBFBCCmkEGCAvAC2VnnzpyQOQo0B6JZX3HvNCSmmCALW0gzLQikobK1V4RqkEgEPnHDkBzFjYPELRCbGhZFlyTqIsmVgRCC0xCkVhC6GwsM7EypMbTcY6IJBCa+FnaVZOlFIuK9nVa40kUSaUOie+fe0wmzpgH4XIGjRpKhFloCUIoNBoQ67bCq0K0IuiRGzF5WQUR0IZGShjlKwvNRgwTQexiJ21QjswMEhLiUJFjXsT5WpUK2ZxVzACzMQVJJqjontqo3sxRQuhyV0gULGnu2qPe3N0eN0f8y+pDAxUzd3vlordlaLcTRiqop5xET5KCOLebghYxB7h4gMBYNGTznepFs5b0u6FcM8NZPOUnrtIyc9JFXCVWFShD7obCz1vAoMFTQNaWLTufsmsfpKAAAiQCKqDrD55bg2DubGKK6NY5YdDxHmg+OIcVJ3NeG937wbwAEPVaV8JppCqHRaCsYJO97xdi+QiXMR640ILNQ8q4vkow70kqHkBPRBXHldAsdBgVUePd4cAAJgWMi8EFAsgyLwosAdiAA9VDTVUWRe0oGZ3k6gXv5jmQqDqOgBGJCJe2N8qWAWLWQYsTgTOLwrGucTodZBzkQQl5q67Bej7/1EVNeL01a89d+Zk/W/8xe/50Z98rkBjai0/5Xe+8+397U+88vKzVB62ko1rN7Yby0dVsrJ965rR5vTJ0zu7u412987W9OjxU3v57kvDg25zakAwO2QgNx2DSIUMfV3x1CiJQeYsjkve7RUxQM2UY0MnHun0rQuThpCBLdPRwWB/a3jm/vt1KxiVxCrotFs7W/7Y+uYszxyDC6ypJZcGVzbaYZ7PlDymGPvTadxUbEtjIJ3Y2bRx7PS3br/28vbeyGd+OEvXTLfW6px/89LeHbe9NdRYd9ks7nQfPnN2Nrng0gIDcJamhoRSRIVky95zXhDLem11PHNcylocSBVO04JdSm7qLIaxmqQ5lKCiAIXqriz7YuoIUkc6Ur/xy38covHWgwIVGi0VMkoqTKBVXs76zX/xz5/+53/7wWPn80YgXTZ2NFw+vTLLlv7y33shn3Uee2Sj1fVh4I+syr1d2x9lzrmyLAvnyQMqE9VrprUU1sss7fmijIOgn2YSZAneCnFQpjJQNvUzzlVTRu14Npwq0KrhJmkWBEGa+bT0V27dXtnY6DYbt2++igp6e3fCpTBjXj62kc50EEScDXLrfY6BWorjM0WRFeNhWuwj+ABMWgjrc6lknhcmisazSfdId3qAs8m0UTcWWMgkzfMwDj1ZI+VqO0Twaem6R9eeeeIWhO3u8jpRPpsaXWs21lvT3evtZtORT9N+FC6DT/PDrbL0jdZSlDRcJp3XQdAO0GRpT8n+8qpqqHq5eyWhKdaTpNUtJsOZzzaPr6Zpdqdndg/ylVPN+lKgLAcoklq4eeSB5y9c8gj7vZGpzVFRHEcohNLKWV/pOzwzM4WhrtrphRbWFhXh9kTIbJTR0lhfAoPSGtl78nmZWU8MDAKB8lBKEKC0rLCw0poBqcoAIiBrgTx5EiC89xJZSl3dqt57T8QCpJQogAV7JkbQygBZ70krbbS2WCKTzZwxRkrtiby37GUgAiGlCYwUkoBnmbWFFRIdARM7W7nDlFIGUKAQ7NiRd56cd8RzQCGFNEobrefPHYTSk0DQApiJnHcERCQBlKgsz7Z60AgpvPME7L0XABLnT0PyjogRpdIShJSVJ807nktaq1oDdM5bz46p8kndfcihFAwEIDw5IlJSVFjdevLAKIWSWgIiskZ05JWSRJ4YfFmYQDNhaQusgqCUlFoxAeHcwkvEEjVKJK5a0qQQkGU5kWf21hEDKyG11kobCQAoPCCj885JhNBo59h5T56kUgaF944JnC21EI5Ia6W1FgAAyN4iKokoJUoUiEJJJeVCZktIzCiIPDEBCKGkIkcAZImQoSwLFOiJSluiEABoPTkGZkQpydmyLImICKwnZpJCwt3uieoTvFVVNJUEqUxVYwcA5ag8vNFvFuWRTX3uxPrJBx5/4cknL37parO9bnE8Ge6vmW5Yaz767e988ZULL7x6MzVB6sTBqMiFnbnZA+dXGquPbZ4+Od0e9oaBLwKcjM8+tnY4uX244+8c4kHPPdClTg/jCI6fbLqDg4LK/cPBq5d2Va39v/+//+l/+O9/SCX8wa/+/jve+uhBf8JFNN0b1evm+JuO57tbj75z9tDboQx8s+Fbx6RzUKmlgEEaEAJ8CoygNCgEHSEFYMNI1BtKrTBIhsAAFNe266sGkraHAsFyMZLYiUS+fe330wmcePRMmTe1aAZydZzdrIeCcVrTsoARgs1n/Th5SMAbx32sdR554Zkv/ut/9A+C6GTYfffSd//U3/rpv7N2ZNlOLrlxLZby1sHKI+85Ve6+9he+8/v/8Fd+TUTdbgMm+Wh0MDMG/s3P/NxH3veta0f02gr33fA//bt/2Dm18T+e+OoLz372O/7R9xwxZ77p8Xd9/Kv/WG0+QlP47T/+1B9+9ud+6f/6eyvnTzpOhbIeZgAOgRE8z5fqJIFTwABQwARBMcQoHYNHIRACKZYEBAyMoBSGDBYgZ4gQYg9tX/Rl0BQAAiIqmwTHgNa4fJnc7yi9w6k2zRpP7tgSVFAnvcFBjVUTPSPWQSbMAaMHqRhcHu6Ha7U3bLQvPDl8w1s7tQ5unKk1WnXKstMffPfNlz7ze08VTjEi2Zw8c4ygpAw9lhmPWWkvb20VV6/SU6/OvvTsF5dlrchHO/thswxxOj3zxiONGe70s898+at/6W+871/98pcub482GtHzB9ORHayNExJEhR/3G9PhdgkP6s7S+uZme/XEZKd5a/9Fapinnnox6NaPP8C02n/ptf0HHm2pJZz5frg0R0X1RuC9y1hHQRS0lpbve1yaIv3aV6b7WxeubfWH9pG3vnt9ZbUo7IvfeEZpBGO45Fs3DhqxjoLw/GMnx/3xdFaUhU/LvHuky6yUDu140EyC3duHR462vFO9/dTm+3EjiUxMM7V72Dt94vxzz3/93LkNoUQ6K4RwpH2We1tSEAKgzNIZiIAF1GrhqdrmzvbhzKbL0fJ45ALdtpaJXFYIGUWNONgfgffwjg//vV/7hR9rrRcOiLLsYLZtLbdWO5NpdmxJry+vCM47S42ysGk6U9qU08IYMx4fJkG4tX1zCdeIDGAZ6Ua9eWQ864PLTT0Y9QYsuNk2e1euv+W9f/3nf/5njqwuBTX1la8+44B7u/1hVtSndjVu9oSbpf2P/fcvgAh4PFE2L6bjQAlk8pnghOM4/uX/8hvKBuTSr1548aUrtx7dPPbQ0QZ6cfvW9kNvbbmaXGq1b928srP1imk3g9jsjfKx920VNJrd3uBAoLl5446futaRs+ff+saP/sef1bh0cGe0ffVGoxHt7WwV/X6j1SFgdqUCMR1lt+688lByLo6jNBvduXWoRCxljBx4B9aOOutrJlSiTAeT3SAK46h5/g3f3NvbQgRptFA6L1LUWgCT9zYjYrl64sEoireuvZI0W8W0iFstVFFCa5681EYba7QAn/rhBD0VeZHle4I9lc4YuXHidDnbippJu76yc2ugTVj6rIR87cQRZcKohs889aXldm3jWLt+8mg2zl3RIVjy6atbl146tnnqTz71e9/1nrdl+/mLn396+eyZTKj73vXGL3zi08eWW3uHh6P9fZGNDq/ynZ3d/njSiJaMSf7kS5++efEpY6drrfr2ziELmdTk0NJLV7avvnKrugtmeRbEsQpqwkO7no/S4sbt28kwSsJQaxXUgsLmxmihRJalZSlv9HbDRNnchbGp6boD2Ll9GEeJ8ipAIRWudFtG6Kbu2LJInU1qCSNZkLklo2LvkFimRZCneTYTRU6BEQJRRo3cI1BDMI3LlDWZUDE7IXUgEgCorXam/T45LzTG2gghnCEEKouCCBCcJq5rEQsLgmpJoCWFjRgsKwWVB19HEkGjTArWoAwIMoiNZiC0nhAXLBsijnrjIh2Ws0xAiRJbcU1oFSpVr8u0LIXT4H1Ui5QWDdHsj1NXZoXlUQ55IY3yGrwJzcwRSwmeJs7nWoQqECb0Llda5MwqjpUAyeTJgxD1ZpLaMoyxdM4zWbKSuCyd0iquhz4AU1oUGpi1JHJOIqAQ5LwADwjWW4kGtS6sA2TnrVQSmKDMKqQhdSiFkhRXRbsIQFw4ssgg2ObOEUohlPcCJSJAKDWy954BBSFqqfLCSkTyvsytlpC7HCUwgGchUHqwQF5oKUXUHxfkIAsLrXRRusSEUahSjzUBWjsSclwWmDNJyguLRgVhQCIzIrMmx9Ihaa1jNEE9aXWi5ujOzmA4G/fSfGaJfRiHDiGOQCpdOLLOxpE2Wox8sbd7M+o0pPWx0q3lpbHPXImKMM8yMgHVAqtELQ6gtGk5LiUNM+yNMg24cbQznyVXgzWP9sG7Liq4KwvhhacM5mQC5o6wuxtAfp14CBbwhl43/UfE18OACjwBQtV4VbmWeG4uQwCo2oQXMhwQ8+asCt0QQNU2VqUU8Jx4MCAKuLszCyxVffQ8BqjygyEBgphzkns/iqKSORHOQzQW/jLJ4HmhUqmwDiAgASMvZCxVcg9T1cJO828s1cJuFddKc8nM6+Qsc7ByN9qU5l9T5yxrodMBWMREL8BIdSQgkJFR4CILGwmYFp+LyIxVGNJcYgN3tUoVzhOiCgSqRoKwam5boC+sLnWa2+OYYB4GPZc1VV111Vm5i8nmFwhTpWjiBZ6qzhnNXXm8cJPNL6WFzqeSVVWR1YwCq9Vr5nsJEvOraEEWFxfF3FSIUC24L7RX87CiOVi6e7YX/BIX5pEFKvrG556xWwOvm1/45Mcef+D4Vy/dSicpkZgWW1pPNzfP+XF4/PTpbzzz+XZHF9lsZXUtt5RmnE5tIKfLdUmjm+nNF+9vrSvFtvQgODKRdU7KQII+mE5FMVhKkjQdedJxc2lFhJPb26eOLm/v3aa2ikqXXr/NYaNWD4JQXbpyo2eCRqudmPa0J9PBpN3d2Lt2Swdmb7gftyLURT1olOxaS0t7O1vTHDKbqTBia8n50MTHTh0N6qZjN0aDnWQaBMi3Xr1VxDDdGQS56Cqvm+7MY6euH96epZdEMXFTbWpJoNV+fxBHcS0y9XrzxpVb5cxqHXmS7SD0mJYuK4oxszzS7QwnZbeNN7evxLUOCYh0lDpXFBOXTkvnZBQ/8fWLeeoVaqEYlAQEZ73U2nEhvRMicL6xdSn96Ee/8RMnz4p6rgLwZZTtJf/s567s768vt1USjJuRlpwPx9YB5OXMFtSfToNAN5rNpmreObBlPmnGrQBCpUWgg95BIbSJQxGi0lowgSAUzudFVmAQBlF36ej+pFdrRdPBwXicJVFtVhS9wWw0HOepDUNMGjBJD5uNtXKSJuHSNBsZyc5aAtS17jDNvBt7PyihrCWx5MgXZe44biwHGBWjvdiY4c4sCJq1ek1IUWSlCXWZe2NYBSHbNImF1iYrnQcbhUYZWdeQzzJlIu9tbzDYvbnLR0TYkVES1aLY5xNpisHwoFWPwIdamPE4D2u1KGhIzK2ZJPXo4BZNJ/vTie+GxqdpNi1K6zKV3bp152DPA4W3Xtk6tZ4krVo6pkglTNOVrkpdceLoEecXN4NUnoms99YhokfHxFJIIRSRI0RPFEdRXhTWlY6ZBUohPedGawT05Nk7R7Ysy6wsEQUSM5EIQwAWUmghJYKsUpCJmEigABSohOTKj1ZGJpRS+kqT6UlpTUSITMBSaKIKsRMCKKkqkahElFLoMJxXWYIPtKly8BBRC8nIWVEwkZACEaVQHp13JAUAkPelkIpZWXJMjrwHACmEUkoLI0FKgVrJqpW+ygkqnbPeSYlMczesEAqQPTlADSyqiCVP88AgT94yCQAUEqvgbpRCyOp1IoRAIaowZkACEEIogQDskRmrxL25bZaYxfzhzkxMpSPvHXkmJim1QFlZuKVABFSVYphRIIQmICZEDIKYBIh5d6ZiwcRYkheAWEmWmMmzIyYicmx99WJhgaLaOQQB5AQK671zrnJukwOBUgom7wUyMhGzFAKEUFIQEAghmJFJCMFUBTwREwipqhgiIj93ChNIJbWUQkgBkoA9eU+WmXJfCJTVOfJMZWmJWEgkFlJIgSjYO++ZmaqVh0qDjMBMDETeM6NANFoJIZ13CKyqRgScv2r7h6XgiJfCGy9cXGtuTuCVtpu2WnGy0cwMHe5ms34U5PqbPvTu157++oP3dUYyuLnTe8ebzin0a6tJUVpN0pS01IgoTS9curgUt57f8Tt7feD6YOAc6Yff8HCnri8++/Q3vthbWW+1uvWLl3YeefTcY+95f3t54/DKDXTim77ze5eW13rFn8hi/9Tpuszt019/+bu/NXn4jf2lx4FrwBmk3hsD6METoEYHHNUio4W35Jw37WWn25YLwFWGxEPDARFL3rr2B//21g/9q3N52RemSRCYIBSADEHryNtqvi/hEcIVkJsZhDo69NBHOLQwYEAHdO2TV/5f//qVf/If38Hi1D/80Z8Is8H1Z5O//69/9jd+4df/yV/7d9/2ke978U+/Nrwza7bXm+vxqa67cfFJxfzcE78VNYpf/Z1P/NP//R/86fOf6a42nRuR169s7Rw71nrs0fed6NzpXbn0xS98/Df/6DNrG02xJr73R//yz//Cvz21Ofnyp39vOhief9t7RuPJ3/nJj771Pcf/l7/x3UlnjeEAoQMwK2GmQTtUGhTByEEWQgchK2AmgT00HSQEABAzKA0hA0qW7AA1ejAMXQKB0FJBgtBQUAA0AVeYV8iuCTNilB5ZN4+A62PUMHETVGzLjoAOlQqFQNXw1dcX0AgmAPLlMLXld3zvqT/8zWfrKtaRnO6lMlPLa7X8hRcNOqMhrtlOi63F/TFSKYBYKhtoMJKKwkpUcQ0d8/Vt9eosbcVCDoojifyNf//9h8MrN66Un/2MffZr1x+O4Qfff+ZPvnbp9vVpKRO12v36c9epdN22SdrN5aVktPVq4Tr3P/y4SVrLq/j0178wmmTHjhyZpoRdr5bTaCK0yXUD9ditbJ6q7oLd2/vHT6zd7s0eO3+qvbb57OXDdmSIjMtn9VC4fDib7MjmxuaR5cnuxp29O5PJrCAM6joOY4lmd6eHCJHG+no3kIwtM9md9ndHhcsDzY1GdOrE5pUrN4X3Qog4rpVj11xZPvbY6fGdQauxabPcWehNDixiFARxGEaRKn2mlDTaAEkVqeWl1tUrt7qtRkcmiFIE4L1NapEthdJSxSo8tZZND379V/7lh77//9NqfWgy+UwS1YoiZRaZtQ1K4rg1G824yJbWu7Yk53jm7Eqra+10OB7qRKkwWDt2UrJy6UCbUEp9ONsTIErHfuaIREoYHz96DO7vdNj3+mlDCTRlBrEOEoyax+o/8Oc+4gr53PWbGqKyTIOW0KDthIdpqWLTbUQOUTbDZn2tI00vS0d5vr7R/pYPvvNPfvM3jrQeGh7cXFlZE4bjUH/xs5861l4d28H73/WWZz774u6s+ODj7/vd//orpceH7z8jAbTQzaUkG2x9+tdezm/Plo4due/ciezWVRvFiE7qwFonrSUJrWadWB/hswYaO70rKEVneSPPrArjvMio4CTSty48k9Tj/t62CMPu0QcOt8dxHCfNJaHkaNIXJBv1pXF/EBmplTexzgpPPh1P83q7Ph4f2EKJINRRXZtGNhkN9nbceESY1lpyMDiwZdpejWezMblosJetLnduv/ZykU7DTmMvF0Q07Q+y6TBIlHAqH5RQ5Me6TRTUWt442LoTtVe4cXJ5fTOoj/v9q1ESvP1tj4PyTtDOq88NRjfMyumldzz+4FvOH7700vb1Sx7yet1ef3W/Vm+3lk49+sD5Dz/0wC/+/M/PpgMRxjZFm3unbUE66EZ1iC9cuFHdBaW1LKC93KGxctORLYvISMkYSlWPg82j651a7XCSHfQPJrMyzTIdR8ZDqIwbw74bZN5JFXCG9SAQYRCHWnhZT9q+pLIsyLMreZwVY4K4vt4/vJXlYe7KaVnMhjPwXgoYTma1KPZIhMK6aem9Ry+E0iCKEiwjgyYnS09FqdiVAbMmJ4TUgjRiLTICwdtCsIdsYmKKosBAJh1HIUaNmvWls94Ehlkwo5IskwTD5dJlRqDgksFLYeL6koS4bZZ3br6SwkyFSqpgVgipw1o9adXrs9losH8oTVCvJyrQUoq09MPJTApWWpnQlEXqgfqzohZEzbiugNKiZKC8LA2jiUKUqMgRF0oosM4EkWVP3hsphQdtAue8kgLAc8mVu0gKNEYTo7feO89E3nnHpXceJQupUZCUrBRobVBpVNIxIbEDJ4nZe4EglVZahRx6cFIQeC8QijQHlxlpmDiQxgsovfXOSqEQ2LFXSiGjUKilmc1yRxbIhloJKQSilNr6ogrFFYDWegCfEVGe57LUSkSRYYAoMDpAcIWSjBHWAolSzpwzjMDeCGThbEmtxlI2G4ETAGWtUQsJpvtFb7vsD3IhZL3WKgv2jjyQFSLL7TTNBcp8SgJIaAjqciZmNM1EgXStXwKhko1aWEtU2DCAhQRGYmKfZhNQ7JkRA+do72A6pxLzAKKqvqoiEfNcA54DEJjjhrsQ6e4cfZ4HdFeKBItw4soUdW96f+9f4T0PFSAAVhUpc4nLvT/uggeYkxQhkKmKU7q7uXv0ZJGLA/ccbwx3lUQS50HTcwyCOA8ngrnDbm7KEiiQKtzEUAmKFq46JCKaoyIAQMFA4q52pjpkXnS/M1BV9lWhortOs7uuJ54b9BgWOTwV2ag8Y/eERdWo85x+4T01T2Ud47unByqnAFRtPXOcwvc+GQAXW0Wsas5Q3I11WqiD5rsCgEBMVZQQv+50i/m5vhsyXo093NttIGABDChoIfxBqAKPgBChgmiLICS+C3ERCWixEzy//iq91V1lFP//2cZwcX287vrixZUwV0jdZZRYUTB4XVjWwqt2FxW9571vO3ht5w8/9+K7j+sfOH/qyedeXjvSFaa9tzsWszvvWH/bxd2XYfuF849uopdBpLyzG6v13sFwda1DQN1O4+b1mzycIpV6jVWsbJkhlo1anOVZK4huD8YEzuaZR5SCFablbEjpyGZBqMNy7JfXu3s7RRKvX3rltaUjyoV0fevafea+lc3jLV3fvT7orqyXeW1vbxhovRy2RpnvTbaCTme1uyITVJQuNSITwmDgSutnk56zg7CxCUsd4wt1OCLyR8+uvLo3BdC1lt7YCMaTUSS2j8RjA0FOpKWe5jZHGdQSLXk4ODzsgQXdbbVNEgxmWRzXprOSLfjSK8/MRSbc5cPxkdUz5TQNo5jBrHbCLB/3iox1kOfms19+GUEjeEJm76TSJIQ0MqnXhVahWTp9/k2zGxdOvNvcuPzC5vF6q9OiI0d/6T9f/tI3eP1Yw4Uj4mlgktXl9rTXCxDbYTiy3EwSEcRC1fNStGrdRnclpGKczuJApkXBWUlWBMpIpTPnS+uiIPQSpdbatEIBw9khF2ls5IwoFKbZ6aBAay1qEbVMlg1LLqyD29eHDdVQNZtNRrVaE1Ux6Q/K2fX20tnDaV9gDuxsWTg0AEEjlJNJyUoqFc+yVMjIkyjKQT1qCSG8D4JQOc7zbFYzqkzduMway8uTvTGO07iuaDZa6kaksLSaLV684gbDg2/79jOjfOJtGUDUqm2Usw46mY2mS82wDEStkRxMB56dJrV7Y8BKdDbb6a1+d6lWggTZVGk5GfeFMPlkmMR1mEGZCWoqAN3qqu3tK91uMhrbZoxZMQfVRAQgSmets4gYGCONFAxaKa1kWhYA4OdhMVICMXPmSikkSoECBWNZWkfWes+AErG0joWQziqlvHNCMgrpGYidkIKBPVlPTioVasVMTAIQmah6SyAqwcpDgQhaGi01oXBEAgDZVfe7klIKzCupL6K1LjYm0NqRFwAI6Jyz5J0noaSzFRAhRCG1IOBZ6YmFEOypRADvnZKyemQopbQQSlZF7BYQBLN33jnPQIBIXjCRACGl8EwIUkmjhMKq2AKEFApQUEXFCBkRPSELpVEIoaUB5IqXEXiU1ZMeidiTJZ5LkIDJE1rmCoWAQO/dfHGFgRicYyJGFOy5yn725KQUSkoUwjMTMgphjCbvJYFELJ1DgSDQWl/9YymER0IhJAprLTAzsvXekieiwpZqQfqEFIRkPedlYZ0HAKUECq70TUKg1hq8qF4gUklEIUAQkxAIhJ6ImC35SlfsGTwzeSeE0FJ6vyiiIAlKIqMU6LgKx66Chahk8ky+st15sp6EFwCQGO2BiQiAC1t6YCEFOq+U9Iy2tN6RECiF1EJIhQBspEJEIk+MIOZ3QVSPfST32b/t7FE1omF+Ry7R9YtXTnV8MS3y/bR/SEfPfeefPHnz+eu3Vo+27/T3mq3oofu6vZuj3evWAbWXV4tZebi1qwLxxkeXTp46+//84p/27vB47yCb+SCMZzcHePao0zZZaY2lT4wKtP7W7/7Wh9/6LpdPP/T2k79x8RuhURv19VXRsW7ss1F9Ze3ypd3f/0bvgz8QilpOEXjTAFZBgOwzKjMVJSyYyxlJEPW2gRpB00MgcYVhSUBooetZGjSOLwUdgOYRDZrBWHcolLfQFZBw8G4DIoMATSx8ICBgSBByhBihKIEBNj9zEfZ6t3/6x/5rr3cgG+3v+rYPfdcH3vjP/9Ev4aTH27Fp123uP/zDH/7MVy/9s1/519/2gW/dPpi++aEj/+v/+VMf+uAP/sf/8hMd5b7r/R/8+vbWI48/9Nf+6l/53u/6Kx3Y+Jt/+990Vzv/97//D3sHBw+9+URvb7B7vffPvvpDTM6BS6e9qFnbvXqztzN59Jve/fufvHD12f/24ANLsjz0wi0th6PhYSHKVrcVxAWVo80TRw93htPe9UceP3n/G86wXq43Go5rGu8H8AhTW04BYsRSgC2hrmEdoAQgCTWEJsEIqaag4UUTjGfsTcZ79dVTBI+VclsgM5zwfEeFLYHLAgqEnEB6aADUFIgQlhguqvA0rGo4eaMbz0Q5QxaNlSUtVaCL6fat7/y2etBKTx6rv+c9R6/enP7x5w++cS0d9MnaZJr5uuG4qXxpO0s163g088KiqoW1qD4ueze3Ly2vi/vP6Uff/ZGf/Zu/+cQnbpxeD3/oHSf+w2tPZgfmOnI/VQyyoZtXrvT27+xvrqOf8La9tnTq+LH7Thw/9fCLLz2Llg93RpcOb7zlzGpv3xmZ51cnzfoShVl1F5x4YMMR+dJm/TSzt0ZTOH38/Nu+6W1//NvPHxwcdFqd7eu3astm+ej6G950rn5dXrl8+8r17ZX1lSiS46nb7g2PLLWIKE2noIByWWaDTrc7HUyZqJz1KK+hS6eTYb3dbK/Vbk/vvPLSE+85sfyBj3zzc8+9ePvGc7FBB5hOJ8VkVAZxHEel9RJAEesAyrQ43N2LtBiPp0GUtBr1Sb4znO2sHD0pWdaD2v72oNZMjq0fe+GpJ//uT8q//xM/+Nf/9i89/oY3ISprnctm5WSymjRnk+zh849f275Qr3dmk6zTaJMrwxiUaAAB21xqA9Zr0GUJyUq7HPakdSdPb375y0+0o5Xl+EQxS+5/6G3paHTyxMr2YPb8116tRQKEfeSRB65ffdXeefW1y7fe+vYz3/ztf+Ff/u8/mzP1D9JsVngdhsbUdJBlRRKao2udgxu73/Xnv+PW9a8fHkxeePoTKytJu11vdmWj0/rG55/aOPNAwnUTRcrjn378k1ER+8HsyrMvnGif7nSX7sxGqomPPXLyypNfG20drB7vHn/H4xe+cWlrevDBt7wt6w2jbpMDTY53t4eJkn0/Qhk0O2bn8FJ3res9EKtG0kZU5WwM3o4KYjcbD7Kjx05cvXzz4MrNWnN9OhxJTezQOyImxz4MtAGY9Huj0cGpc28ejnoIJk7qYaORTTLyKXgtFchQg8PVTvPK9RduXdlWHKLP0oEZTg7uP/e2jeVWNtuVcS2zs8Q1rJqt37e+d3VrOhyWE7BRfWXzdFDaOIq/8fXnckHdeKmgoCh9kGZJ9+i7v+8HPvl7f7B5+lQxvD3uT7ubS7NJzjs7O89/vX8wGt2+Mxn3l080j588lll3/4mzGG7u3LgxGvUCm6KBkw+esr3R5ulTp86e+Ojvfy6f2aSWXLp8Zz6vIHKTNO+Pa7FudzvNg32vZV5AOskD5GYYqUZT6NpwMEVwJZdGqvFoEnfDTidaaVLmrHViPOm5PAqjuiWBOfVcT5LO0iLUoXMMXpReTEf5ZMDDgzF560EIbwtrITCBriEYdII9EYEOQgRvM55lpVbSIPoiRZbWloGQoQ6ktVwUIYg4EQZJklNCYIAeJFM91DaITE0Z9E4HsdRaqdgJqtw7QkC1ZKU4VYKZhWaaTSfWQz4aZFaJwtnJUHhf2NIJm0QtqUn6VAIkgQtWajNbREEQJA2lIC9oluV+PJkiSC0pBUSwJHxcHzhZCwKPXBapRAyERCADSqGxlAH5OIosoRDKWQIRlmUKwlfZyUIJQFFaj0hCIAIagRwYIs/MxAU4EhIcWZZgUDsPUobMGkgLEUjBhZ8JdgAghfLsBIFCCcYQRkilUoKBFYcgpGcoyJdkEZVUau5MQQQCJbUUyllwXFrnGMiRR0QNAshppQAEClSATE6p6puPdA4GaYkSVGZlWBN5akLRiKUCZ51lDdblJftCOS2pZI8+FYEZFTPW0nohGRvG+Cy7eX0EOXiUllkBp65wjk2tvrS01JseNqLE5VZJDcSz2STrWTeykdZ+6sE7IYUxRFI0W6HnTFOiSsxplmels6UACURFmikU6aRYzLcXE/C7GpaF1wjhnmwEFvxlPtX+M7BmDncWATu4+EGooAvOlUvVLH4xVUessj1RgJyrRO6Kg6rUmTk5mBePVSinUu8DMlfJDotdYBDzDONqdiEAWdx1V1G1V5U4ie/JTaqyLDFHRVXaMvK8cJ1B3EVDsJDqLACIn4/MwsO1GB2gqu4dmNlztfs4n4AxA2CVW4Rijleq/CdEIajydy20XAsCxK8biipoqRJkYcXPYK6jmTu25vosZqaFbYyZuVqunUuJgABRzD9EyPmzcKETw/nUg/luUjQwYhUTBHfBUrUoOz/0qgIPkIDlfEyqvwFaXEUEr3fEEQH7u8zmdVq06gPmnIjmdjOau+fmeUYLKITzK1MsrIXMVbVadarnV+ZCHwbMYmFcrDjm65jTAhW5Im10zdkHm4cH24fXizc+tPbi1l7cyM9snJDZ+otPPzva7t149ckH3vxeVUum2ayY9nSncLPBYR6LxtKty4cvP3/hxEpbgh8e9mNds+yUVjIM/CxP031BWBQ2CNSR7trOne3h3uFwZzgZzS5cnzx03+ko0rrW+fJLr55aO/boo6ev3fza29/z+Ksv7ZMoDnZvnL7/je1WY//2rb1bO+2l5UhoRBEFMk0xYbSz6WQ/r0etJAy05hRzlUgUJh0NV83xotvorkY2Hbx2YWutczYuEhmVUk0Fpcs1Mzo4YJBkeGltafdgJpy0hwOislUPZpNCiLoMtGMfCotc7u8dMma1mpAmRCfHkywvqJRilkLLdBTQKJ9ZO81mE088GRd/+JWvl4UBJGIHhCiEFNJaUlIGRoukvhzVa8NL7zhf/rW/9pa9l+4kylz46vjfffzG127qjbU108wadVxe1lQ675ZJGUtKhcFKTd3e7wVR1Eya0+GstlSvLUfpYFtTXM4K7znLyylTvcaRdO1mPBsX3eWg1qgBe1UMp2Xa6nYyKiZZESVhPWkUVqAQ3c7qzt71GI3Piix1Sb1VFNHIIWEWJMuzPFXaOrKUjwaj60JqY2Io1XQ2Q5aRrNViBZpnsxmBLFJOmm0PYjjZLx0mSWisbSbNmTXjbKC51FE4LbyOO+wKCIpwKRmms4QCKm1Z0vXL+xa6exPc3S2kd87OlI7Acmzi6axnEZIkBwlFdjA+3M3t9NxDa7cvb5MVtW7t2LFa6WGUT1urXWn0ZLR/4qGNw0x4W5fMFAZBpzMspkux6HYjGUjP4XgwmhXzO0JKQQxSgNAahZRKEYCUc5tyrExWWucKFEIKAUQopXVUeEdMQgiNUuBcmCkQjFbEXNpCSZEVuUMkY8AELICYFQoBSIQMylOl7BAsVFGWURwjSgSy3lcyHiGElHKBkMDZOUkBkLKqr2IOlQRGkNpIiQBSSCVlaZ33RMyMSCiMCQRKpZRnds6ScwKFJ3LkGSsWBI6ZySNiqAwD+OrpQiyVIAQQ1csEPdE86E/KanlBSCVVFQQOEhURS5AC2HnP3hFQFWkngOUC8ogq8Y4dMwipyDsG9OQdeSFktdAHgMTkiDwzAipBAirBlnfWMrO3lqAKDlKFzYUQRhsGdp6YvXdeKBEYA4sFGu996b2Yr0Mgk1dKM3NZFMQMAM46lJKZLHkpJDFFQYA8V7ACkbWOmNh7ZhJSZkVhTEAepZCIKEBogaUrquq6MNRGyKLIBSEzA3PpPSMyoJZSIgoQHjwwMQspJRIzIgjIrEUGCWBdKSTnReoZiMA6T0I4z85ZZIQq+JopK0sPnjyBAASUQhJRYIKiLP284wwYqkITabQprIWF9pg83V2GWlpKCgEXL+yGk/ED7TbNUlQibK7sDVPvMmXC+lLzwbef+9Qnf33t1NIkHdeb4dSVL7y8dWT5vmPdB3qDdO/KZQf7tSSQgtOy/OLnXhG6ESflVJYPnVm7cWV4/db+X/nJv3fht3/LCfj6U0+eORJQaf7jv/jlM+c/tfXafrwUhlI+++zOJO3++M//k5aeSggi3f3iczebm5f3zP9I8qtK60gba42Fgo2TEVguHQtd73poSFh3EDOsKFh1PJQYeRIo1iLUTFcuvfD8G77rgRTqDHcUECgHkDJkBNplCFHATigJQKV3QqqYINCwfvv6xY//7ksvXCi66x9YaTw/zPfue/zE7ghGZu1n/te/OAQKVXnhpWt/8ulP/tS//Afvfdfjv/kH/+PH/+6PvPOdb/3I9/3If/n13/zCs6/9+3//j3/9v/z6jWs7f/jZr+pf/eO3PHzsxc9//eTx1ecvXWyYaK9/mA5mtVrt6ku3y1w+8tjZD3zPD/7Cv/75v/AjP/ylP/qN1eO1vdsDnOil2ilIL12+jcce+fYvfv4PO0e7R+WJ7edfGRfbjl3UruUT2Qxl4bpFpj/3BK41rlv75Xe98/z6xkbQ3Lp1/SrKw/vOnDn7jh9isAi59H0l90pQCKWFTEIGIBSGUBKoQuHM26366ns8rJdFXallL6TDjsR19rOSpFSKMfHQUbChQV1/9guXLn16fHhwZHPz/JvOhXD9g399w9JsvJ81TVeHUTrbTk7rN76r/thfXuVsJ+9dfef5+kNv0UIfe+KTt77xvP3ci0Vexs6BjuLRtATvYqGi2MgSh8Op0vDLv/Hcj/zQmdVEXXry8+9958ZzF/YP0jTeP/zw+88ejOGJl/YHM1xaqY+vDzX5lTUdtKabjXazbu9cfya3LTLZyYc729cPVs+3fePE7V2fu04ib6LJJrPUDCfVXfClb7z62MPHV491bm9t3feGR0+u1vduXYkC0Vpa6+/dCcOA0Woj03Emw/DIsRMnjp/2n3qBRDGaWUR1ZHlpfWVtPOx5dru3t1utuBaIMyePOp8Ne8P+cHb71oBQx1HrcH9k7VUjxdn7jtx66bnZ3t6d3YNxr18LRLMbLXWXtq7dYnaz2SQMIqWNJ91tbwxG+7PxgfOlMsms3HfjmYnleuOo85yl1mitgxYLCBueo9k//qc/+W9/7hfe+a6P3Nl6eanRUYFaqh/Z3+tP09uxMXcGh5kHw6KztpGOR7NJGocuiGoo+LB3WG82JKkgiWPTGB8MmzU1HWx/4g8+/sRL137uX/2XWX9yu7f1wtM3CIJBnisp4ig8HE3EUiMldXTj5Kc++8xoOn365p0vvHDjzH3vevHFz0ktVCxrkY6MXkmaPTuqKz3Y6umo+cef+pKepmlGeZGde2DDOeiVEnJcPXpqkpuytrp69uxLf/rEiaNLDjgU2aVXX9zsHvciv7I/vP7SxaCenzp3jLKMBR4O/X1vfNPm5lEeu898+g8/8gPfV+QzrQwKK8OaCXWWZVLKdmcVUFpb1pr1ySiTGoxGGeNTX/3KmQfPx1HTmlp7fdW5aV5ObZEjQZDUgqRRlq7Mp4giTpJG2L5w8Yvr923Wag1i7cjneSGkYHY7N2+0VpZqnc5Q3H7pwhdvbn3j2OmHHnnkXTu3tnr9PMZjuVsbzLLe7R2sB93uRrO5MSmm072daW+r011mkknn2OWt/XbsJ8Ni+cQjj7zprRef/0bQ0A889hBZe+e1fqv14Pnv2HAFN6bhzRufOfeG04f7gyQRV559KmouLW921MAfO37f8y8dsjm5svbYU08+qXjSH+0brUa98fPfeO0ND50yq7JX8Hd++MMzyr7+5BO2jXALAKBRD11apP1phIlRYqXTnO0Pi9K5mfUKd69tlZyqehus54KlFX48bani4Y5817s2z97XnRSzaaovXhvduVNNSTQiZOk4MDUZmNRSFITKiOxg2B8N3XjEubVFrk0QKk5C46USxLFIJnnW0AEiullRq8VWyVAb9gWljryPEuVBpJM81L4VSKVNFFCzrpRytbguJTgHVgeWwsIWwEi+iANDSpeMAoJCeO8Lb51RWjphqXSuj0ICArEDkNksHU1nXsVlOlMIJZP1XpKyZam1tnk+HTMiaSMD1jbPp9m0XQ9rhhIFE1e92zGpJUoiCBXGsXN2NJmQp6Y2GsFIacsZsEfQOTGhBlIoFCLrUKfWodKFs1qGgIq9NCb0XApEI0Ni9t4KIRDReQIVBkY7X0ogRuXIGikRRVlaY6R3uRfeuQLYgpCEkr0VzmkZIJFBwaCwpLheKyGzngtPSqJSSkhNHrzzZe4YECUTOyTWStsiZ186sgRIUgAhM1rrpZACBCA5VyoWKPSspKzqt2UwgovJVJdWR4ploom8AaOFQzHMZzP0rUYSGaGlZFaFNZa5sE4LNelTNs4mBwV7V7gySkJblkbrPJ2l+3uUTdrLzTBUhRKlcwq1gCTLRyo2SgeElA6noZDT4TTUYtgbJe3Q5lNRazJHszx1JZaFCJOOL3MjIQjuch68p9aYR1zzHOow35XCzOOTYaHY4IXapDL98+tIzxwHzbOq4a77655UCAFoEQl0lz7NHWRzooTVZ1QupIriIFeY5V6AMjDzPPGAF5te6EvmAZsVi5rvOEPVJjbnUVh1rIvXHX4FyriKGVrIbSpsQvOFzoU/C5gBiRfB31XTD1SZQVAVfM1TgqrfNB+PapSYXjemi/8yML6OyNwDYQs+QouhWozcQhmDCz0Oguc55quMdouxnVu+5tItWAi7qr3FeSJVpSRa6HoEva5ujOaGwmqaM48BAiJ8XSTQ4vAWKqtK8CMQsBITVRFFAACemSqiV/01z8/BPMv6LqTCBTRjmEOke5CS747L3csShQCxOLh5YVx14S2o1AJT4p/dwBwVBfGs1VWP6eV8MpsWfmVFPV6rH065v/XSsU5ejxtDqpXT6bWvP3nk9BkLfP7s2otPP3Onn1+4OZqR+KZ3ftfRjTfsj/cD1WZmpanV7Jqg3utl00EZahFYnpSQAR/0hsAYiODUg2d/7/efkLLV27fKiIN+dnl/MJq8sPb2M0dOniXqrh5v9m9cVjsuDQ7HuTTxchit709GTA4zSgtIgnY96LItlxrJeJL5MUmtCgtRHOdW5JNiOJwcjHzoU4+hLdaaycm16ZVb1y6d3GihLCj0jbZhj3laTqf7tUgL0xhOijxTAy/q8RJ7ubZaP9y7PRsWgTDjolQBN03dWT1Kh1qLlSiwoCR7aZQtZtY7z0Xhs54th5kYDFISANaTRBQyqDXY8cp6t9GodxPtE1luX3jPB9/5oTceGd28JFyysz+5lh5/8mW7ebSLQWldcawWRoqZimE60IjN5lKvL9NynNQMAvWGewI0h1DaKXuOkpbGsixG7XYIJrq+3UtK3WWu1QIUTqhSklehjmtJkqgs83leOoBJ6ciBknR4eIspkxSHYVO3iVmGQUIIzCAlkp+KQNSbSVk4RzOJYW6L3HmBoCXkbuRSawE9OSmS48e+6fLurFYzJuad3o1jppOgAq891bSUBIdCQnu5MRmN0gmxEKVNS6t296aESoh4NgstgjG5t1MToRBuVkwLP0bQKEB7DFSZpqNABvloP2mEo3QUt5doGk2GJWEgVYCqlk4oH6VGRaNettSM9nbKfpq98f7z9ZYpRjbNxsJ7wby63tq9vt+s1+c3FrO3TimplBQoS2u1UkYbLYUrrffE5AAQhSBfMkJZFIjCExNQGARaaSnJFWRMKMkDETKFUjOx9Z6lAmJb5FJgqLW3RIsAOSWVZLKuRBSBMczM7CtULJiFkEQgQAgpsjJ35J33lbDJAWS2ZFta5xEhCDSDEgjOV0HPZMmzJyJSSkkhwiCsnlzEQFqScN45ZkbkMAyAJXtHgM5bJPa+qB7kVacbzHPrQAt0hI6ZHCutgEkbI4WSSlRUTSCAkAweCASIuWJToSeSAoUQSihm8kTzmjcAQPSV8ogJUMwbu8iR98y+CvUHicDsPSEKQskMc8kSCGJmR5JRGYMggBgFeucAgQE9sfVWcCUy8oUrnSurTCSJwghJ1loi65xncp6h0uYQO/ZaKym0VAqZBTAwuUVtGAAgEwJorYHBMyEKLZUtS0bwxL5qxyRm4CgIyXtidtZWHN+oAIi0UMwkQEkpPFc1FiAAmciTB5YlkbUlYRUdzpVoDQV4YmDhPUutGZHYFd4isJTCMaOQROw9CRRaKXAMSualx6rGFZERhJLsCUEIsXBkAwDAdDCMW41sXOzuxzItvv/737d158qFZ65yIQzxrD8+9sgDPty7efuJjc3V08eO3diduunQNNBSnsRLD9//zksvnTyY7Vq49fLzTwxns2wGAI2olmAwPsymYR1euXT1//m/fu2t3/bY5Rs391TkJuW163da7XCW5UfPnVg61mifWD7x+Lmvf+3iSbX5Wn/YDLsbiXzm80/tjp4Yv3/4I3/jqKMhZalyuSAHkVem46GBECIsgfVC50yBEOu2WAM4Z3UohAZIGXpaNE5/6J2B+oAFjxALUAgdAAGQuyzV8rgGSSoDPxWmxuAAdAoqhDOf/+Q3bl08f/uZ4PG/+t6bnRv7e9uPPPimr//Xz1y59N8fe+Dcd/y5R0+dPv69H/yp2/0sXnrgh/7S//TA/Z1hv7g+vkk3Dv/RX/3bP/QT/9uJjXJnMu48tPJ9f+VDXdPevRbu3Om957u+9c0f/rkf/sj3nl5vbD5yDJNk/fgjH/vY5/7uX/urn/pvHz16Zv1zT/7JiYdPXr11c/vgzic+/sTP/dt/ifIwL+R/+9VfdOOsFkRfee45P5jc/9YzF567cPT8UZymt/eKWmdp3DOQ1frbkzOPPP7JP77o8ytp8UrSSGw5Jbr+0//uz620J1/61K+9+pXPHV0e/sA//JsciEbSyWAXbN1CU6uYAiXKEfDYMwkEgV5CVwAoQoWaZeClZugAhDEsP/2xT37xs18Ow4fC2rtLP7zw6oWzj3JwWtGOg1LVZRtjUDoz6ydABc5bAAJW0foKKAogVXLyLX+u8+0/0v6ez+587H9Mv/wMT9MaB2GWWyNtrLUxOkmC/ZH48sVi6xde/Mc//faNc83meprXG5/+zPbBBRUKKTS0l6Nbh8PNeH11efOlSxfrtcbl24NJTDvD3TAOr94aCp1OYa8XjvrFnjdHhj2c9Adv/t4HtraemQ2yXM1Xkp94+mZA4Tve8dCR+1rT0k56t0QYCKo1uyth0pzOBm4nP3L/+c5q86A3290ZrS63vu8vfsutnb1xf7pzbVtQuX3jsjbRyuq6W5OoiXx2fXfLhGplY8WYOKV89USnv72/0V7O0+zy1e1ef7x+dPPlC5eOn1hXVBOSewfjSX8Kvmw1jUKmYlivt4kLcgdgyzAMnQu6nbVX7ryWjcs4qMnA1Op1gUWeF+z8tJ8368Fb3vTwU08/e2nn8P7H3vbcM59rtjvpuIxiWZS5DJPlbuMLT3727W9+WzkZh2gaSV2pxpHT63u3bozHvVrUdpmflAcPHn/LOKOlWqh0+enPPX/7dq+OjcP+zp2bV44/eObcQ8d+67f/++bRxsHtQb93qIWaptmdW7sqnZDmUw+u7x+MDm5fLdudD3/g3b/3Ox+HQLMjROgfHBqNZ+8/tTdMD7Ly2p1hV8Zvedebbn/lTx8+f8KNp7XmUpjEp88fvXJl7+qFm3vXX3n0kbVGt/HJTzzVXV9/y333P/XJF+9rnTy12l5iPnhh67a7FVDRmJFroIZwvJt9/cWnP/zhH+TpGAUIYWsReje2hQSCwrIQpvBEjibDKTkGpqxM9w6unbzvIVfWoiDZurazvr5k9NI0yzpr7XSwi56dJSFUOhnL0ByWo3qz8dibvntv686Jkx2Hys5c1YyaF3m7E9m0f2vnzp3rr5w4+/jZb3rLeAg3DwyazbWzy0ZLjcXg4NbasSNF4SVGt7fuhAGVbpokxoQWIPaOZ5PpsY3jSaNlMxz2uXPsQdVYnw6nLs9rS+tW2LIc33nlax06OP/owzs3r/V2RlHtMAmky0a9QZnUQ/TlseWj1w6Gz3ztyWlvp90N+oODzVOrPoJXL+64XJa5AKn2sv13v+utjzx88nd/9WPVXTA4nNRFeO3a7bi2mcRqaaVTW2q+9vLuxpGNI0cbtpzESSPutlw6ea2c2nx2ciX5jve/8S2P1nWNlBL1Vp042FjSV5fyy1vlQR8YAoMRFTRLh9aK/mAGQqugFpmoUFMw0gSh0LzRas6yfDScsRVFCZ1a0IqNCbSn2EE4Hk+FzxSgisJ2PWm0o1k69i7CrL9RlxIF+xIVyUCTkUpjJJX0FDDFtcQ5L52SCknrskwtY5GjI0EoPAhJ3nkPREaKMDC+zGaFn6WZ9T515H1upLJEUgkjBaKXwEKqIitR+txCySClY+9nnHpXss0klZ16mBKx0AIhjOLCS7Z5mmeREOzzIInDKBQFZmkhhTQyIsAcwNtSKM1MjokIjG5KlB6sZWJrtUZkcj4FBvbekbDOSpRaahSsZOglogg8g5LSu0IyOmCUXBYzZDZKO+8IybOPA0mcAYMtfRLEtWZTGBUGEasUJpMIXRRIKQQxkEdI4nHpS/a+KC2xp8JByejIOw9QWhDeKtRKa2J23gKSFAYR09IBMxEQIxEnKBWxLzPOYMoUt00QBuloWOYsMQkCANbIyB60aikOgVIjvVYqLXAycyoMBoeF9+itk4KEckXBs9wX+RSEjmNsdqLV9dX+YKbDIMEIpJBSYRub3aQsSzMLytKPChmIhLXMy4kUCoU1AaQp1lqt0k4FcFSTf2aSzX9m6jxXb9yjCfNE4UWYMixyhxdA4/Uio7vCnopKzEnEXeYxNwkBLsrM5p6yezKZ6gcWm6n+n2hRnCbkoo2NQYi5OgfmuwWMFRtCMacqdz+C74mOFiIVAAQBlQIFJcDc0jX3PHFl4ao0OlSJggAW8dbVxu9KbOaCqcWsh9nh/Ch4ATYWLjCel8otBu1e+NN8yCtUx3B36BYA5fVcCRDnldIVWVnYuODunjHMLXDzs4USEAmFAHE3vvqeZqyiNQvj1/wgaE6Y+N5hM87r57kSOeHdvec5v6kWZgUKBAFz5RQTMgFTlV9eicjwbsb4olVv3hFHdy+teYZ1pQBaRHUsrtNF7nk1KUOcy8Luit6qheJ56FU1m6lq8WAevC3meeP3UNHerdGQ/XSWJw3TXI/NkmskyStX8sPeyAh9+8aN5U63vpR0umv9wfTGjavpePfy5QtCtU8sH3c+2kg2bmR3alExGvQ22itaQ0a1wczX6l0div7h1tKRNZ7kRckg/exwGAdR2pu+95seTATt7ffrkBQyrbf1ZDqrL60eXnSFdHGk186fGfVgb5LWOp1Q+n0/y7IhSiAhQdZZ+YPprpZw/Ng5GAYM6Xg0E4BZlo8m6eGhOSFPHV2N+ldnyNlKM37uyS/GbV5bbQVRbTwaQum6GLuyLGyuJAjEWT7xEupLrSzNTRyRzYfDfZSSZSi19ENbzvK+DKTwK6sr2we7oVEShbc2K/psy3oiyegXtg7GVjzzcs+RlMhghAfhhWLCWIja/5er/wy0JDnr+/Hneaqqw8nn3Dw57u5sTtKutFolJCEhCZCIIsjij20wxgYcwAbbGCwMxibbRiaajJCEBEI5S7vSanOasDs53nxyx6p6nt+LPndW/s+Lu3tn7j2nu093V9enviGMe/U6bF/87u98/bv+0c8gjOPkmXJzE2Iq/Liz0ti9uwOYzDX0gb31laUwMlAWXpEKooY27RgCmJaBQF5WlNGX6UasW7WWUkaUMjj16WAb82h3q1aPg2a3Pb9YR++NIfYloQi4PEuI8oUlk+VegWSJjSLjfd5szU8no7IYx4FeWdw1nEw9K9Rl4koEFiu5d6I0KDWZTBUqEZqm4yDMSygX5jo+d81WXA/ao0nebMfTZCJSNDtx7oY2aAiJtYlN86CmdNQcDqYutVtbqdfQ3tWabvVBY70XlSnqEDGdEE+zpJ7lEuiag2hrMLROgwqcyIXVUZZNO+2cDPfHo3mz66kXz7365nsvX3shaNasxcCEi736Znk5ClujZOKSIhtLox6A8z5xkZjp0AUQBQqySRk3zGCSz4BpYLTSSikRJsRA66rajJBQKWEmpYBmFWPOO6c8AhGKMrpC6wiklRZmZYyIZw9AhAqRqBSrQVU3ZCfiRbwriYhQEZNj75njwHgR8mKtdeIDE5TOk1JhEJW+AE/OOy/eM6OwY4ukq0xfY7QgIilEdM459lqRACMCEQXGhCZAEIWIROKh0j85QIcAhEqh0QaFKAis99aiZ+fYB8awZ89OEzGzgATKaKWVeKWVCCCRQgh0QLOKBGDvgVDEI5AixZ41garq3kgQRcTLTFsr1Y4AglaakJQCz8w7SdYMgEqxq0ARA/uq4EuUQvAAqBRpUoqU9ZZAA5EihSDCfkf8WaX4MVvnpURF1tqZzMqzDgwzF9YLSG7LKk6uKK3SyrFXpEkrFInDCIVDE7I4EQFbBkY7rm7cXpNBRey8ALP40jICsOeZWlvEe+tRAgqkMm4jEilAIkEiLSCAJOydZQYhgCrmG7wgkWWfF7mIeHZaaU3KSkGkHFfPZtURduJEgD0IVVJlAABQgEAKAQOtFKFCZLRaK02EAsIegRix8qwppa4/ysy3dHtOd7vLp45vbo/yF3/zrxfajeX5hRCUs6nX0eZo+4///DeLsOz2muPEkoJXPHBMFeNLV54MfNSJi4W9rcPdG59+6urqle1xXh46eqjTWbGltPfWlXBo5fFPnv/oJz8ddSHQel+nvjaa1nrm7je9cjKsFUVw/MSlfUvNT77/o8lW8fiXv7S6uZ1leTkeGVL1MP6/F0bf9X13RUuJjaCqimcKEBRDV0EbQSsz8P68UUsMe3WwX1AzFAROoE+wXcBarPcCTDQwApU+V6yVCRRbUQUFVwuYGkBWpUANwAiEys//5Pf/0qXLB9751l87+dhf/9Zv/fzuozEuqv7G2X/3777/V37jt/t5+Hd/9+SRB0ff8VOv37Mr/eP/+hN7FxYO3HD00S8+OeH0P/zSf/nwZz7di1SkqRl3/umP/MTv/cofZpNkz6vu27fb39za9eIXvvyud772ka8/1F8db14tH/yX3/4t35T8/nt/oTOVxq7m9/zg208//+hwUj/99JSzrRPPPYRsXSjzB+YW5/Y+9fTJt73zh7/86UeXbjg0KQadA/uyDbpx3w0f/dQfL3QaY2vnm4v9AV64zDfcdQNOsmySvOGBt33oox8Yb5utU1f/+He+bAJzstH8yDv++uji9JvfuNh2l/fceGTp7gNBq1cWI9aXVDxBBA0eEIEYMBW0HkoAwxBowCtPH/+5f/W/28v3BOrIof333XHPG3//938rCMMTz5588AC7uabWiyze85Qlh3DFSgO9RywEFGgSlwA49rFqdGw4Ofq61s++Ytczn9z6rd8dPH3VN9o1HcBwa6o1G2W9lwCDtU3/ofe/8O733Dd3KG6cv3DrLfHxS3Dixa1eO2jPxTfvCodXL11ZvXbs6LHB+sWg3tAwHuWu04p8wBz7fp4v3Tl/7MjKL//nr+9bPjQu+CufvHjrnW1P+cJcC2AdAJSqP/X05b1zi3d+0wPHT5zsr67HB46NXT1aufvQgc3R9oZ3pbfOiivLzAtfOHNpmKyVENthUoOkHo0bi3NB2L5wfn17lDVaqh7p6drQWW4udja3kmYAg/XtyTRvOqqH9ZtuuX1S5NYsOHP1Sw8/HgbUW2q6XLQH5TLPPiRloGyBDUJ0NtPhfMZBlqbji9fEtBYWu2mSscdpkisl+XS8vHe5P+iXbprl2f6V8Jf++Vtu3Hf00OJe46DX626sbdZqzeFoEtbj3sL+86sJOHzzvQ/M19tnzlxudrQr15cOHCJc4jJ7/OuPJJPJcHPcL1IjkzOnLnzLd71+4+TmpRPPQ5kOzp8/OxgEzOefuVBm5VK3po3pzvfWrmyiSKD99vkNJXx0qeuG175wfI0dgoEojtJp2o70IMufOXuxFtfqtcYNh/aoNH3m6ccXYzjzzDOH9y6Ti6YDWPebhuHo8kIkNBjkq+uDVrvXbreee/4FVdNr65uro7Kjcffi/n5a1uN0fn7Rx+orn34oNoqhdW2QNOqFaPGojVLOZqSa9bCZF5nzZRDHoELvkVApHZgojJO4Vg8i0zEqWJqbg7Jcu3Y1bnddPQyiep6lKkBF2tQDHYTOA4vUWp1ab96yLzNnHfvxZLo9LHi0Z397tH5NBe1jt9109sL2gfqBMAATNYAn0ywvi7xMx1vrW7vmW8oEoPRklLeXVyar0/76dqdwyhiqZ0eP3TIeD8LI3P6y25568rg3Slkbsbqyul3kZGo6udY30xJrwcbmdDoZMdJwmmeFNVHU30rrTQ3g+iOze2lPstWP67rba4OJuwvR+dUNB9nSnhsmo80zF68lg7UP/c1fLPV6nbluNRbM1esGg6jOedKvR52GNpyN9tTLu27szh+qbw7YlSi+6Onslh7M37r3ZQ8cvPHYYs2kSZaw0zoOg1rQ6kpU19c2Lm/6XpaJc5zaQmld5NYyKFNfmFuen5tbJ14rN2ox6gis9pN8bEvnrGJVHlzqHJir+7wwYS314SU/Yc+tVmt5rqtdvjAvGurnr2xGrVqAvkgcIsVBzfmyyERhTSGFWpyDrLQgJKgmeelKIPaWE2aNQAChRWSizDrlPVvLToqiKD2AImtLNLV6LYBSDEugjCaTuzLLM601KSCa9UlNJpNA0fY0S6aJcOAYhbDTapfOsXORVqCUmJjL1IAFMYUrI1UPo1jrIEtzTWg95rklBO9KBiiLMjBaGEEBg3hgEVGikAC8J1CKyLHXJCjOFRbJmCDQQJoUkwK0Sgs7a33hrSNhZmSPRlHqChSa5jmID1WokNhz4YpaoAyYpo5VkFe1mgFCGAdGG0ehTvNxmoo2meXCeq2URsq8TJ1VCIW1pEnEVVa+qsIVBDURqbAoS/AMwkma1InqcSDoXZJm2uUu9zZLsiJsBnGM3UhDNhIjSFmglFZQkkQR2lJjYCIOajV2RZFMc0CaTiegtNaIIKPBJJvS1vqkM2fjOBZwOtSMwuR0oOJ63bFttUIuPWoofTnOihBK9ASEoYQavdiUhDVpZ6/7b3Bmn6rYAXP1v8Kz1nPaATzVFxbekW3MEMjMcXVdCbJDKyp5yHVT2Q6C2oEjldYE6HrE8HWP1Oy/MOtqRxQgJJkFE1dP0FV2MnBFVgQQkBTCjm5mh78gVUSqUrkwvATDZulFs7/AHX2UCM9+XngWm1PhhR2ocH23d2RSO1HP11+l+kNV/DXvROzMtEXXN+4bMpew2jMGfMnwN2NFFQhCvI7nrlOi62842xmZASauwNFM5FXJdqquIhEikiqCmmcSIBGZWbR2SBHIjhvt+pbPEJDswKUZJxRBnjWpXZdgwQyf4c5rwazMnokc8CwGSxhg58SAauuup33LdcXP//Nl5tvbsbztaN0AZtqsmQBshiep4khUJShVh3k2h5vhvRk9+/9TFR08vHTh2dPLKz0Tl2tXRg0TN0RWamorN7o23/B7onpcm2v09uzvn3rh5a978/njJ/cdvLcx17t8fmthuQV+4+Zlt7ez59kzdM/hB585/3S0uKuhrC/ZOWo1uwGG3WaYpJm3WWf30migyykdqOPygejzg62Vg4u85Wg8sSOHWXLDHbckk4LDoLMSbU8mvlAwHZdqiMbpnIGCxeacE2WLiSHdarREbYUdlU8skq/V0Pty17754WB788QTt73sxiCeLh/onHl+dW4uimpFtwbtOE18ZJ1oqmFApSQB1YMgRCR0fm5ursjy0Xbf+qzVAhUERH446Lda7djUhlPpb42UBAYDcZ7ZAqCOYx0Fkyy7cmmQ5PraZlqkwuyrS9AChlGknW8rNV8Lu8udV7z+pne950cCNXbifNgutOrctKs08vUvnmwvSOmiznxrruuN9qhVDcO5Tm88HgJApMmDR4Sl5bnBMMmLPAjd8rwa9PN8UlCo51u9hYVDzzx2HuKlK6NkdG26eI17rVoryA8fXJivm7hF4/Egt0JKae9MhKrGtabOs8h6O55uKNbNsOeZxHvSQb2mx8l2I1pytnA2z4pJEElkmlIoJ0FMDYKAlPcWlCLnw6kNLOelzVrtZn9jC6gVNFSapYHL6pH2lrjUhSNnar3FhWGyPtrazvJyod26evVquxeldmrL4uDuTqsZDobDeit0hdeRoCr6m1u19gKrYNJPIIROQ0NdbV5cLZ97PvbNh4+f2b1vPu61XT4hAaVtWCcK6onLOvONs5e2bz50oNsLIJlCktfierMWTfLp7n03bm9cmqQb1VXAIiY0hISC7J0ymogqT6tzHgCUUizA7IRZgYpUqEh78SowitCX3gkXNgfmSk2jFFV1ESwCKM6WCgmJSvBGqxmKJ0qzTCGGJiCtYBaUhooMofbgK8UpMwg75xwqrHiQVKwYEVkoMNWm2rJwnhmQxQOIVkob0orCQKGIIBMSo3feESKgVySKiAgUgTDDzLEPmig2kWMGrMiMR1FaqerOoshopVmEqnhqpRBxx388W2gofUmIAEQzKauAuMpWRoREqtL1WHbICOIEgIiECLzzOxVsGgkJga2wB8Aq4o1ZKqOfDgNSFBApJVTpcpUIiyLNPLMr+7JEYUD0zMDiPRCQUkiaoigqnS8z55nFIyKwCBG5Km5KgQEygZDIrD6MwTMrpY0IAChFIlpECAkViAfnPIsYrWZ5Q0Rl6YQFGcUWqMh79tYjERE6a4lQKxOQEcDSFQwQBgaEnbeemRSUnlFRWTpgsiDWW0FluXTeW+8QlJ4NVcLADKxBCZTVIEEqqIaOyrgXaC1I1XIJsGilPBADV1IurbUHX10FrY7J1odhrd3t1kdZlo94PB6PbW3fTbtuuXH3M88cL3nj8F1HZDjf7cT7Fg6srW740bg7V8N2jnzuc59+emuQYgDTYdpd1CuteVekq6uXKGo0eqYdtyErDty7ePn89l/8zd8fWV7p1PHy5ctvffebP/7I8Dd+87dzzZvT5O/e94uXV1dvPbz/8iPniyKL6jE7rwCynEZj/Vfve/rdP38/kBJY8BAieCfbzAoUCoiDxUjtcVB4t006Fmgg9AUulvbhwCjvFaqmyJZCRWAcJkQTDcyUqCBDGMYQekhiiAGKUf/CC+eC9//5p4Lw9RvDQWNfL5hPWqTm9i/9k3f/6E9/6w/9SO3ITfEuv37ip376x6PF1/zET3zfqecuYVD2+1tvfeNPfPYDX1EhjMfTH373u1a6vfWr66OR/ehffnA02rjhyP7Hv3767oO387j2uU98/s67D5m0Oc7S//gzv6Tz0aefe5RsPrU2oOXPf+VLIadhrUe6kRtz+uSLew/tPXJk6crVrL89ncLgB378HQ++6k0f+pNfe/D1u3/pd//itXe99rd/+1dqP3/5Qx/6k69/+dxP/MB70tHG3tsOliWdPH7xwFx08fIL7Xb71//zv3jZzTdHnT0lpytH9j/x/PpzpzdeuDTuRnGSnTly9H/85L/7FYgdllfmdnWiTgugo02TQQM4B+TBOEgawF/920f+6H896uL73dLNpZcvn3xyXQYvf+sdlCyIvFBuFqoWYhgBki8JcJ5lgXzJLkLTVHEhskl6QpHRZlGoVqLGdhva6b3vnv/39dZP/8qFM9s6LHTpUADZ+bzwgVGFBB/5wpbzj/7cL7zpTa+5/c4jWx/82NlQB0nm6wH0c5ckvJm5drTxwF13ff2ZJ73Utsajgwc6N9y2eP7y6otXs2sjhaPJ7Xvu6G9mgU8GzyTRLT3XqU+3y+oq2Hdk5dzJS7/1V5//AdX7jne86pmvf8kvtbasW2jvvvJYaZR05xqXTz/XbDf3dsN51e3csPKpz3xpmMJ0c3T3beEb39LbfaCVjfMP/PEFx6jSpfled22UNAy0lDWxCQM9GLhm1FrqzX/my5//ysnVJHMOwAMQgAUwEDpSgZKGNuIlqtdrBo7Vxv/zP729cJtXNrevDKW0NuamYXHTPAIlFPoiDRr13sKcLXMdmMbCwpXHnglAveblrzz74tn5la7S4Wgs8+09EgbtlXB7tF1a26sXSoo97Whr/XiQbZ1+rL+8sBTF+z/7zKPv+v5/cuvL3vzQx/90vhcW09GXPv5YbuMXLpa79950wz1Hrl24iAFtbPXzInPeFUXWatWwhDiZ6Gmymdpdu1bQTtLCr483m9EoUKbZjQWIlTKxWlxqtqS2vTY1TnUXum957as+8+EnOR8OB8NLg+HRPXuEdRSHkySZX1qJ54IDS82HHjolFISMVy9u3H//3ZcubYr280v+gaM3fuxDX+vM95b37uqPJ/ubC9/17d+yPbUmWGoHXOTjWrPmUq/DUJFhRwXmWTlBDAwboTCfjoMwFvRhPYiardFosu3s4vKyaTah9O1eeHX1VOlHrbCF3meT7VZnOY7qFgpglaRFGNecU4gYxhhHPB1sRkE5XBuMmuFzJ1/Yv3Ss2W4vLXbKyXjj6iabWruJw+GYwLLLy0k6joLC+Y4JOwtxUJ8OkyFSPYja7d27R33bWtxT67Y2rl585vSp7ck00EGWbqxd2WDw476fX2lFYZaZbLC9EUB5+MZ9T3z5uKuF7blw/4H5R584qw0OhqONbepno04jarfjIs+JZOPaxvL83NkL5//+I3/yru/47uF28YYH7/urv/hMHtLatc3qKohDPd4cH7vjYKBK6/JkK4nIPnjP/uUVP8bL+w4snnxy9dK1oS/SB19+5Mg9u8N6mSZbFIgJItQ10UHqMsXOS3r4cHBpo0gnOBgkTqsgMAqg1o7FI3EWatxzcKG90EJnizIRyeaExjAdWZxfWvRBuJVBU0XJYMI6XWrpzlzHWqdgWqvTaDhKx1NX+nqngd5joJElpDACYlfq3JIAktUgLV1LszR3lDu2zpH3AoqU99YJKRBVCoOwd46YCge2DFhphjKMGl4CRYE2DJ5DE5CwZ3CktI6sOCAFpMokAcDJeITAFEZJakWj96UXC8CKIJ0mYkytUYNmPZ+OUUIVhp6MJq9JY+mNJmLQSiFiXhSECMZU6unSl6QxQIk0KCgMUhiFwiiMISnLXoSFmBQ5diEGbFOhUIit9wRgrUuyxJBCUp6hqRqxDvPCirjSC1MZEHnPno1DMqDzLEcEdl5I59bGtZhFQsXzrVBjMU6sFRYCxwzCCjgE1Cxak5CwEiAyGFjvvPNGhaiUiEc0wg6FNRKRst4L+DgyjlRecpFYE5iJTboImcuiAApX6IK1TnWgtKDLJgFFDRO7UnLNPsMsdyzgGIBBgSqdz3OHKMKYTsdaTYOAwrqam2+7osCQtKl344YSR00zsUUGnowmRO2ctVz60jkOVNSshUXmijT7BqoD1wOLYEceM5PAVGhnlhvwDeHRLLIzY68KWSoGQnRdZAI7Ip5KYzJTyFS/VBnJaKf9q2IgVZLQ9cShSusDgoQzLYnMckSxqsKplENSeaB2sEelLyGsvGzXdwX+n3cCxFmlGEr1LA1Sedhohi9gli1dWfBEQJhmTiypcoZ2uA1WqanwDXKXHbtUpb5Rs+Rr4JnhTHYoSaVqqjZ3pm2q8oEQqjTrnf3aUfvsACLZERBJhdoqURUyM+EOopHqECAA0E4zsLAAVXlGONNbXd/XGSqqssOv47TrxEdmSI5Zrkuydk4E2JFKzdxqO6BQBAhmiK/qPnvpdXYO2eyFcEadYIdRwY6LbLYJMznazAlSCaloRjJnBWwCgLQjhBOAii4KVHKI2b5U4AyEq7X2ndN5hoo2N7biuXbBmStsEEBZ2vEkby1GrT278u3pvn17LfoszS5fXu3MzW8PJr42N4VifR2tqZskSaaXI0ouX+jXm/Mnr3xZR4EOrXgWN9213Jv0Lbu8VY/RE4SRBOpjX39u+2rxX3/83RfOPdWL6xsX1kdjWZ6rbVD20HMn7zkWLnaWhkk2XPetegND/eLzj0V16S6vLB+95cXT26jCzWsnenNq7+LC+sZWupGUEodxPM2TEghQ1Vi7ND7+1DlDZv381ukr17Jw6WWvuX/1wuMu2RwP8yiOTRQ6hJTZUm2hsT/JNsPABEZNt9YAbRCJQqPquLG6uTTfbTUamoI4rImmxc7CuXNnarVoaa43nvaLNDdOS2DGmZ1kPst5ZX735dWLqihFCLyvEd+91Obt7Zt2deIa/O1Xv/ir/+J3OU0mfmyM1dj0dnnj/ObJ4zaM6cjeXDf2FTnMtTFN1mqNbqiC0ThxXscU92o1tH0reeq8BJRNnGmaza1pt7WYTqeqsbC1Vp66Ov7cV4Ybow1BCLSxJXvn0Eijpuoox26Yu+fW3XfcfkOrDXkxHo4n3k2wTJ2FIKgtdvfYwgHyZLop6AODgGpx8eB4yCaMc1dYKAyq0ERlSQSNoFYzUSuqNwNKTag2NwYAZEKMXZ5N+0qwLDHPFWZYumm7qdjmo35Ses6A9i91XXlBi51sJ1FEi/OdMKg1G9Q93GUPqJVQEKp2f7Lhi3GjRhB6bUrHlEz6842eHaYKaf+hg0bVB+hKpNp8e3s83LcwNx2nw2EpHLvcgld79y1N8gZEkOcpZtmwv9Gb2y26gVEwmNpAR53OTikmqKrtgMUDgiKswIRnBwiKlALyDLktAVEro5VxzhEpFGHHhbO2yFGjMdpZW3pWSgtzaW2oTRSYqgzeexHkssyVIBIiOhBw4hVBCBRG9SwvVGBEhIHJGFJKkMSzIjSqmtIzimhlytldiZwXYg8IjOCRQcB7AUStDRFpbUDEey+EVix7ByJMFd9hQbEswqxVIOKJKAwVAGiikMixd8IOCFhIawVVcjR55pn7GQmqvjPPiOS9WG+rxQNBIhFUquLWWhkirMhXNbpY75zzCFCKiEhoIs+VNc1X/Wgyu9UrQCQARmZkmjVKeueskDZahUEkIpUcqVoW2GmfEyDyLCLAAN47EVGCCGLCQJFSCoHYlR4RjUIGBQLsnUJABIXgXFkqQl+V2SMIKNIOHCEJV4MCEBARCYuQzLzjCtkzElZt9KWzqBQAee9BEQsDoAcEQMfMUpbWMbMAlM4apaxnAGAPIgQIpLQXdp6rIUKAEEWTZkEvjAJGGWCLoCoparU2UgnKvGOlVBTF4i2UFoAds9aaocrgQyJU12s8AQAgaqvFuflTX9/QYcR+hKDuvO+WT3z++IvjvoNdY7t503JvYa478YvT7WGe5RHpcVKmIQzzUZZPuavneo3pxHEGQUs3l/SepcNfePKa93a6PkjMyHmpLQRH60vP5lcubW0Ot+rKtT/4F48NsfWZv3v/QPHrv/eNOpLtjbWHVjdyW8a1UARq9Uij9PZ2ptP62uaWhqUcBgAWgA2IoCdlAFBgAFD30PVQoEYFl9l7UROAAk0XoAEq8GABrYCUkAI5B+MSNgkIIYdpUWS7ty71PvKnn7+40blwGV/33f9zc+302ac/hib/1V//8fvvuOUtP/yOz/7NxzeftmXn2OKrX/mr3/ctP/Cd//T0qRP/8d/85D985iNlUeza2+5fnfzn//QLt959rNT8zMPPskHVCoptxW70zONPAqgLa4NhmW9Mnmnt3d07tnfXjbtuGdzw+Ye+dO7CQ09/7YvTbBPRdxbbP/ajP/ob//3fHd7b6zZo33L7d/7be+955b2DUf/qqX5/UOy9de/9r3v12Ue/8r73/ulkfTUK999288GynDz91ItnLz77j//5D/3wP//nl66cveHovouX+t1G+zu+/0dTe23A05UjyxdPPmtu3r0sfPqFs089vWoxnhRdm0V7X3Xv+uWLj1+99j3v+alJXi7vwj//i/8tbkhQsiKFHpJScGxisv3sV376Dxf2/PO5xYMXL5xee/pSu0Ma8+HVMColSZ5+6/e8lrohU+6lJqAhWBY2wo7FiQ6dijQ4sdtl4ahxX5FrzqzSi2hi604EsnbzA/gff3Lp1/5ocOKyd0jO+TDWgTFr/TxQ2GvPffTh9KvveP+33rvnO77txre8im+95v/u08cHI7PeL0sItFKrm9sf/eLXSAdXNreDdpyultu4tWep3Q3Htaizvc6O/WQ40TVjM3P+tDOdoFObBVR06rC0u6na5rc/9Dd//am/fMvNd//gP33N3O37KdGf+Z1+I8h7S40optNPP61MvLV5ZW5h4cTp86zrF168cu/d+3fvrhXlBW50vvUH93z1k/2Pfub5y9PN0WjyyqN7X3nbHuX0+tD95G9+cBjy5ihxpZAOtVFYdeCyGCIprXDRnQfN2TRXVtHQq4up/alffv/3PAhHlhdcibtajVqweCkZxvGBrYEPurc9+tzq3Fw8v1DzPkmHFpwPwgZ4O4EaGOVJMnAnL127/aajocZOLZxr7D7x9KmFpc7+5faH//S9KoyP3HD46G23b10uXcHf9+7v5Wx49uxlycpptj0arReSCdLzT57tvarnsDMsVocbg4mtndu8ctuhA8X506PhoMlkU9zfjY4enK/N9VYvWTtkKvK777r18oV1O2EwIQN1iZba7a8++cTK3HwY8otPH9/eWA/V/O33HHv+eHJ4Xy/N09Lp3r744NHDJ09ebS82zl5aP7u6QXH71v37YptDLkvt5k0vO/zrf/bhT567+qo3veHks0/UTLe50H32a08fveUwmlqRbj5+4sTttx8GgiDUIuKZdRB5LBShJl2WmRDE9QaRdq5MJmmzPj+duihuegbIC86ngcb5+XmlY1cmzL7erGfZEFGjJm+l1uqgUuCDdDwJg7KcbuaTDQepiTO0KWl99wMPPP7E48IOEVQtMIihpiIZBnW484H7n/zKE1maze3efejYjS88/NViAiuH9mxdHW8N14KF7vKuXZMrZ0+fPRXVKXTtlf03jgZDpHj18qX2fOvgjStnX3ymf/l0J5LN0XQ63BAFwnGSJvc+cMeZF44vdecmWb/enouSYnWw2ekdiZrzV89eCIJAg7Rqjbe+7sFPfPSzg8G1PYeWvvLww7ffdai/0Z9b7MwGA13WesHmuK+NxFoNB/0odBnHG6Nye7KJfssl45ffsntx/544DqjhMGQTkiCw8oKp9RNv8wCAoECbQW59JrUoyIFtlgkQB0qzn1y9DN161IhjlsLa6WiSZ2msdRiZ5XbcqddqWqO31oFROgggUBhiEWjJ02kpqshS9lZ5zMZJrRbUarWiSNNiCuA1YmnLsnA6RG99KR4YE1ugjlgUijZBAFACeWEBYUZkURq1sGdg551FcQo9SM1osVYYlQosiNEchhEClNaWzvoSENDbQpx4r0iZtHQCKgqDVhA7MoMkUUQ6BGN0pCFo1uZ7rTR3SVqm1oUA9TDozcUQB5alSMsiy5UxtszjwHjrA2WYndaiQEIEAqjFuh5pYFJBgGFQitqepDxNmBkEM1sqJPYWFCkVOM+ORYFypTMGg6AWmR5iwL4/SQaenQqQEYJAp9ZOxlYhIruGIQVEgq1myxd5XI/rISELR2GRlx65HoaOQBQn4mJD1vnS+ShQzpWkocgLEJvbskTXqrdrul5KLoEDAHBcCyPSKOTDWpAjKiVxPczdeGGRVFgEWoIwcFwrslFZ+sDXXE5Zws45W5bgIggiq9kROltVz2pUQbfbHCdZluZFljtXKgLnvSBuXJuSdzqgsBGFoa7XtFVSmw9I2zY1AtLgWVDYCwpFQWM0mHqPyXRmRsbrCp9qml/5tWYynIrVVEKgaq49a6SfmZbgOvOYTeaFd14TX3rlCm9c7+YihJdgyU5oECCoHTBVyY0qJxFh1ZkugCBYhTYDCsosIrnS1xOyUBU1ijusaQdTsYhgxUJ2gJbITrZQtcI4qyqron0YZCc8R1i8MIMgs99RIFW6I6z40g5Tu87cqt2fEShCqurfZ/TrelWXzP519qszz9tMmoWAgoTCswr42Wvv4Bu5TvRo1uKGs0yPHfy043vb+VgAUHhmyxAApKq1aEf3JTuBSl4YCQD8DlpiuY5nZuHi1/9IBWFestdh5Y/YUZ9Vs83qNZBn9jYWAAaGylQ4O4OkescZoxSB6tOrPqcdV1t18HeOEgAI7QQR7dgkK2h2Pfd6dv7Mvt2pmYYd/dR19PT/oKLpFNrzS5PtdR1I1FwQQXJjx85nA52sW2/Mwh6yCtFMBoNWPdh169HnTx1vByas7Z7r0dkzW8BFHJl2Cy2UhvR482o2nNbqVEQtTX6UjyVP8lGpo/podT2yUoyK9/31R77tTbfT5qW4Vn/h4VOZS6NmeHFrrXXxfK83HzZYk0eySdbvNCnnaToukjFzlk5gcOzmxbSYrk43xGC3290cZqPUthb2B4XYonAZOrQH9922MeCrm1kznjt3aXLi+BXtkpWFVsihMA2TaSOsGQjY0nA48YWvNYyJBYJclBgg6yEveGnvoaro3eaF8LTMslZvYWGxM83SzfEYrA2jIJnmpZT9qXdBJ3XacFdkNapTOi077bhGovPRzQd6dx1aPDUtAp/XGy0MOIhs6K0M7XBNVlf16Uvqla/es3Xh5LnBcJBE0/kGWIv51JowiDuGcWN71Kq5qBG7rOpaV52F5SwJB6Ny7ZofT2S4ufHUI89dPTdhqZEi8WWeFlEYBkohERd+28EXntz+wiMbED5318H6W77plYdv2NcKh9ZOwkhMoJFABxq8b3WbZV5Mk5HWoc81YS3NnaawFsVWilGxQVSrd5ebrb0mMMPBxHpVpNhoLZEkq2unVSAasqjOZQ4aXU7OoxmmZVEMLRXp1JXejMr17oF2csWvzC8oKBZ37+1PBadr3R46D+MUUKKsiAuJp6WQjhl5MpkygWeJVGvYv1QPVJqXQa9xy+0HLvdHyk327t5DjgmAlGGZIBWKkqIsG61Ia8omVPbLIG4jBoPxRMiT+E4jmF+Yr64Co2nGVzWJF5qF54P3Xmtlra0GAiLyIh4ExKNGo7F0Pk2z0nkUVl4EZvIc561zTpESqQKGgiiKvXWl9b50RhtnrVagtdZaIxEzZHlW3a1VZadSpFTgq8xkRaQEvQVBVMQsVRc9kSKNpbcIUuVhB8YACyqtlUIAYfYILFwWrrpvEKJBEhDSRFVInRcHthqWjDFaaUMIiKUV8qyNqsY0QoUghOh9dXeWsnRKzbi99c7akoUFUWtNSNcj5RCABJlFkIWZK4s9iLAUziKhAkzyFJE8e0Rk7wUEiECYvavu8cy+uj3qytEmjgwBKAAg0tX4IlCFHgEgOO8qiSsIOO+BQJFi5wIy3nHiUi8CiPVa6Cx5XxBWYxAoVCzghQOtnPjZ8OcVCNsiFZDqKYSUAhHH1nkHgCKMs8Y3ISTvvDa6uhNbAVc6FgZkQAJ2gIioBcQJOxQRcZ6dsPdCSoln57xjj0SOmT2X1nlhhYYAtDKCUno/W7iCndFdqtETqzHAeaeV0aQJSVAZLTwTupIX8d4CowCAohlWAwCAVlMfvvnAxQvr7Ivv/Za3Pv75UwnI7Q/csXbh8pkTq7UgL7YnfbemyqWl2s0LyweT5AR5Nxhcu7axqZVutDrj7e2cI9cIG/uWp+nooedPjl1oC+m57SQPDt9yE5JZqTf6a4Otjewfv+fdf/5/PmqbgK741d/4pT/4y1/7zB9/8MTXTt1y621f/fxjnnw+SpdX2s1OPFwfT8apFW9ZCJSCJkNAIAJjgoghQhCAkqDloSAIFQQOpqxCBSFDiBAwOAVOQZlnl1RcBYqjuCDQCMKjy/mpL+ef/MiJ9bXWaHrYNG++cu3y9qXN19x94NQjQ/DWFNGjjz75yNMnFmqNxXn96S988D/95i/85ekTt79yz7nh+re987XPXnzm+NdOnH9xE5lcNz5z+ezSQleHTFA+/ejTzueeWMQLeaHpruVge/XcX/3t7xY42rf3LRcunZzvBZ/8zIeaijt1Ne77WiP84Ic/jMlo8/x2Xp4Nhvbrnzm169aVWMMU3Wu/+41feOGFL/3+X/3p//ovvVb+9m9/zZY0jl97+PTzz/3Q/+9Hb32we98rj3zgf3+x3WgoCDS4yfTat7/1LpD7v+t7vufWWxYipc4+fi7dknLSbiwu79o39+ITD//iz/3yr/33/1ZrNmtmIVNBXGcu7Xv/3a/88n/9yXg5KAap0sg2XHtx+6/+z19neOjk6VtffeC1r3pL+xO/+i9dUv7nn/vtf/+f/k23tufUmWd3z73Qrb9ZaLeHIaIi2SJUgMaJZuwQ9g0858tT6DJjlyETYNJhB7muOLdeW2gEnf59367/fW/u9/9kYziAy2Nzbd0HVmqKSo/TxIs1RRG872OjP/7kV3a1/be+/vbb7375Fx89t7QcnLmaOGuZTVEQOYsqYPSnt9JRv1CF7N23+9zlQVJQOslWDsxduTAOgujEw+neo/rm1y4DvAgAyWSgymLXQuxkLvH+Tx86+4lnfvYnf/Q1f/vhr+f9bK4TrD29auPgwpnN8bRghU5U3FCoPPa6F65l6+en1M0LnGJ2be8+ftndtW2lJ2Mt9bWz631TWN3Y1V4ONicYaiFfaEPInkVmgQ8oDvjnf+T+t78xvHL6iRjIlnRl6JZ63VaHLp1Ye+7iZjqC+bkNbF7uahVjkiXp+StP12p7sba/tecwlGsboys1Uz9238s/+GfvT1or3/m933v6medyp269eSGZboyGA3TNPMuQbH91PVl3hw4fbnTnfTBRenptPKhR2VvF/pU1Z+3S0cWLL65xJst756PFaH0jLdL+8ZMXDtx+M126srX+/Cteft+Fc2esM8166+Deld0LveHl1c3tQf/qVXIYIffi2umL18q89Ao3x/m3fftrDu2q//0XXvyp//4Hf/Q7v1XvhUfnmpcubzy3uvHECy/WakYM7e3ElGR6YAUuJUWxfi7tmHZ7122dGzqPPfrEW1/z2gCSq+dfOH1CDsZ7v/DY1zdHn1/o4FMnype/4mWH7j4kGJFRJgoOBDdlpRPkQGkEJNDOiwdwPgCHHrLcjXV7ARW5wgpzmo4VOIX20ukXlpeXA41J6sOgXu+s+HwgToJGw422vXOcSzrJFSACm6gb1hQKr61dGa5e23t0X398ZePakDO5cvFUs9viPJsmNi9doPLBZrFrz/7FfUsvHj9fqy+ePX5690qvfylZXDywsX4lDaTI3NWLF2vRbtVRrDuuYL2yf9eefZcubGvFo8G1ZgS2v74+Ws+GG7Z062V2eTRuxtFw6utLc3feev9TT20uNW4alm5+aeX0mRfK1DUxctt932xbhHatkSfTNMuCoHfLrfdlk8JT59p5CzjNoHjwTa/8jY89CQDjQb/d7mxt9Ym9iYzRpWnJ5uTqpc2xNqoX44Ej4Z7DLprPdOgYMdQm9TYpqUg8ulSAnLdcFN6h5VYc+3rdlB6VKE8QaQJyhFA6m02KJCGXZ4TGWNSMmrlXqxNRWCZ1bUwYKluYULQWl2WlBSQOjXjni7w0ikxAYcMAUulzQeeJJ9PCKI1EpTfiiNl7T0qREyUFRVGDSEAsAVgQFs6zHDCAMAAyXqwrGSkobcleBCxF9PziAAEAAElEQVR6DsOIBIMo9IHynANLrLSVgkVGWQ5eGhRE2nAYodLicgyQCB1z6XKjFQCHGgIDaMtm3TTb0bSgKM4nw0msTRSG3nOrN2/CIC99Ms2wzH2RcFm6LEPPCKoR6SxPNXAchITQQAkipeNAFIqJo6A2MOFoPJiyJ6VEQBuFZJK0YO9DHaoQMoBSyJBJXebswIpXQdRs9lCrLEum7L0rSRjBa+BBbkNUDdLTtGyEpii8QFmwTabTCDmqiXeZKHZItXodSI+SsWXjWdDbwjksHQIqDrQJveNCJsBFZDQJKaXZe0IikliZvMhRCuaCfEmpdBo6t96Ky8RMbaCIQ1BJ6pIxo7cBBeJs6RjIxN1mMsltbq11Ua3enOuaVtwfZjCeeGsBpXReHNqyFO9JqTDDQNttcVinw+1eC1GVk1K8uFC45cppUTjncqVVmhakzQ7XqOJ+EGXmD9opnrpOfGTGKnZyhWbfz+AQzDw+VdBMZfmp/FIVNUA1gx0zo9tLtAavi3dm0GVWVAYouINz8DpDQECFwIBeALjyZ8061KRaz0S8TlBmsZUzOxPO0AZUj8vVtzOvEnAlKbru2wJhmS2jzzhFxRYY5DpOg53qM9mpU6vsUddVKkTEvLPlM3BTeeZklh2BO1onhBnB+UYeIljxrZnNbKauAar0ODOLVoVSkGfHEBmAq2+FEKtsputBFQpgFpnNlVDrJQfcdUEZVM6GHab2EgCbiY92PrpZvupsy2a2ORGo2tWqf6oETFSlPVHF6oB38ojgG47a7BijIM1ESzPjnoAA0IykXTf3VZokqT5nRKpa3SpWdP2smim9RF6Kp9rZ25fecQf1vYSKbr77rvb8vZKlZ84+y+lqb9ditv6c97b0bsulzXpD1bpoyuE427Wye3jt+FY5OnagvX5tKxlPNh119+7VyVj7iYfahdWL+5f2hEq5qEjz0q/nJtBZljQ67aDugYoz58498JbXueLF3Lv/+4HP9rTtFwCBKqZQCNvCPv38sy4vb7xxuWm0SPPyhWvtnoyGE+NYiWLLQuzyoMxNo14rXTmY9DPPc/MPjNa9K7YbbU3GZz661h9Ox/1TJ14wTdXa/+D+/Yc2Ll4uy9L5shFF3U5zMkgCpXtzbWeBKQwDnCT9+V09QLhyaaPWaGIQL+2/9eSzz8du2oh8qxmEhkRGK7vDLIlTC9MxRrFyZW4tDzamV8Zy+dogS1dZmBSEAeVFWTqfGgidW7u6fXpk99xw44WLL962vOTc5KufPjGadD7y0dXnzvabC+HaYP3G3SuPnh8krrvRp6P7O61YTVgmUm73R1EtyoJJMt3G0kZBa5jpYaKGG8W1q2tzod+3xC872D7anOtGuzau5U8dv3LTgSCIcXNSnL8Gpy6r/kTEYT1SFIXO+mfPjp4+/Q979nbf/Z3333p4iXhcJKkIh40IfIAYWss67AIZ67QtyjAKbZEW0wQCYhOogDp18sWaTW0DJa43cx+MhgN2Y/HTPEubrTifJIg2zx3qCFClybQsyqVd7aLcqAdR4cYO8/27Y5eMndCuuYNnLh3f3aCSSy8Q1+IyLQtOWHmfpWwDW2YOfRBHc/O1iBJBdq54+V33HH/hbJ5MfDrR0XyRW41B1Oy53KX5QCm3sNQtimk9tiZSnjlqtafZlHXNZym6pNGJbV6Mp0l1FXjPSMTsAq2JyHmrla6AvhN2wsBSJew4AXFOGLRCZvaOQbzW6CwXZd6oNwlJiLKyIFKolDKoEAiVLZ33rvQOEcqqOk0JAGjSAGidE0EQCqIgMKEAsLCwRUCtFAsTkogQEQWqsCWhUjO6zApJERljAISFK/GPAmH2pRcQMUbvxPeQInTsCFXFj7nKhUZNREaRUQrYs4Bn8d5575Q2wiwiQmSZATx7Lp0jAEF0DDODGXvnnCCR0loFCsj70rlq2ccgcOm9VLohYURx3pVlyQCCjIJGG0VApETEVxMn71m4ouMsAEjMPlBaa6Orm7NSjhkBiSrhD3kRFocIlUubNKEQERlmABZEUMqzYy/eO60Do7XRRtgDKwesTaAD8p4dO6MDQ6iQqkG0sDmwEFQ9CKCVduytq/Ia2WitlCLSIuI9G6Wrm6sHLK0X5sJbo5UAAHskAmHnPYAE2gRaOQAngojOexJh5qoGzld7pxSyGDKBMsCCKM5aEFaInrl0noVJaescAGiF4p2wIiRkBmT2zlm38+yCwkxEgQo8MAMCMrAoNRsTjj99tX8163Ui8tlHP/iRQyuHFheXn//o4704tCAHXtbpzsXT9WLf7vmFuYNb23kQdjTWxZt9Nx0cbE8xCFyZT4eq0ekuHbkhGF0YXjl5tFN7+zu+5U/e92fa4PPPPxVFXddp12uge63Nta2Vwx0spjcfnL/3H3/Tbr2xkly959Y9F7fG7XoAAXgVe8db20MDPh1PqEbdHhdwHqCOEAGUBCVCD0EzTNn1EVGpOkJuiyQMO5aZMRf2WsUCm85teeUacVwUQx5AGKxMrtU+9oGnLm21Th0vxlmvLNqLB/ck09NbV58JmvCVr/zxQqfT2mPmFjoPPX7hNW/8zsM3v/JPf/23Hn7s67WO/tEf+I4fevvH1zqq22599H2/df7xZ8tJHtZxbldv7037vvqRh31aeMBLl68qASaJa8F0mBqCcjIlZ40tpPTtpvnE3/y9TWzpinqsp7kPVNxrxTQeXfzqpzrB6Z/61wd6nd2feN+pxsEHJt1jn/3cswd31TdWozvm9v2nH37z+guX253wzW+897//xt/etbf18rff9U3f+T2/8O/++89898+uXYlvfctr3/5Db//lX/xlrae/9we/+63vePcv/uLvPXnx5Bfe/wexyQ7cdvOv/Olv/pN3vLOeJj/wj97+f/78z17+2jeePHvcTnJWJorDrX66tSbveec/e/Orl1/+zW9+6uTk//7+R3WwWG/dGzU7Lw4vnvnb/3xkf3tlfxTV565cKt/0uvcs1JcG/ae+51v3h1FQcAwCShUgdfExM4JYDEHBWYDny4zDZsPUbsqTpomV+As8XUcdFZMyqs2X2bYbXQsn8vZb4fUvO/jePx88nDIzDccAJSOiiqQsPLN40pfG+vc+fJK0bTejRj0wAQhhbotaHBmmbFqSM6Hn0WpyMs+ztGfihiZu9ijQTYKkGZt4sVbj0fjqtLoK4tiUdR5NS2uzuXa9SHyp7b/9Hx8hF9Vr+lrh2TsVB1OGzu65vLQ1MuhLXa9vjZJoYQ92Fh1aiprltYtua3DXjb1BuR3O6VLS4aat92CxxQ2d9sJQt2SSyERK9qwYaoFGQR2Gm2Wydv4ijqO55jQWoy0vd0LAcenLI0eD0qG3MhnjxqgYTW3pLgcIpYXxcPPK+lZA9+6dr+9quHS4Xqb8lvvveH5t8MQXH1nozJXTRLt8rhH3R8Xm2qYOgz0HlldXN24/tn8wHn39medf9eq7s/706IGFRi/cOn08GY0m06C+e/fWyQsbW/3DN+3rT3zgsU7lU1/7UppfqFE8unj26rnnXli79u7v+vGYx2WRX17tRxQuLc2fevF84SAysVZqNM0F2SOeOX91a21V+V59ec/XnnjmW9/4qkH/7FOPPXPPXUfuuK937uQ1MW57s98O+OYbDhSuyFMNlu8+eqwF/KXjJx7+4tOxVVfOXZ0PbFhrEZpbj920cSl94tKLN9xxp4Poa089cdfeRWWst9Cox3GjHoATDd6JeJnkm2HYMhTW6ks2HWhNURgDWWcdAtfasa81WqotWM8K0a1uHAfeOc7LNJ+MtwYEOiycdUU2HTfa881ObZKMCpsUyeVmuyFQsM8Sn1ofBtjtLDeWTPeFFy/sO3ywLPNarA7dcPP5S6d9qifTHE9frSmz59jRpcVFCuP64rwvmjV2hePO7mUXhNBZHCumoNx3x8Ha/J4sgbWrq0bKskiCYLy9fiGKai53TaSF3bsef+Rzx+66s9loS6Npa/X7X3sXZcHf/NVf3npzoxZCs22mA7/R39pz+NhSd/dovFGPdRiHoyy98e4bAaBIgjvvuPv4mSeU0U8+era6CpZWFhyLG4zC0CTT0Vy7bPeo0TNugC7Pwl5z3y3z3YWQA9zu20sX0+ef33ju4tpwIpOJV05KrxACY2pKNGGoQBvjrXUCYJT3tSqFpiCkah3FsPdZAo4IJc8LClWzFhXWhZ26qdeKPPUEKhBvrWef5vkwmSJpz9Rpht1mvd0J2FoByJIijiOKQ2dxOklNLZoWaemFyBjAZj1C0EAGAIoyI2FGKdkBIXv2rhRNKN5ERkShc86X3jkgDgIDGiFwtWYzz9gXpS2dAhTrfOlYwEVRykDaOLEeQJgVVgJlmuu2QiPEPo7DIstIfMTCCik2gTSSceqF53vxXAt73dbmMLuSJipQYdQYDwYSBQhQFpaQaiYwXAYoURR26zUEp43U2i1QYTjIgZgM1YwSHdjcFt4LS1tTXrnqBBSQCGTZ0MVRbjMU02n14kZTdMAqKtKMyxFArsRbbwmk8IJCTnsBXfosyA2BGGYRLn0aqaBwOYmyHkhTL9YOIEmL3LMXrEUBO1IeQ20UCoorvPMgQExIeV5Yx9qAADVaynq9NUpQm6TPJMAKMzulWhy0upYhB1kvx8IuFm4YGExTBXHNxPUgUIyWSKzUNPkyj+KwUfPkw2zisjQD8eJZGw2EpNGLza00asbUiMWLYxItQoQhYSRkSZXKSS2KbOmttbNZcjXPvh4/c127MZtXX6+2AtgppMfr3Wg7+pYd/HIdMSHspNjAjiFphxPJDv+YqX5wp8B8xqZmfyU7xrWZbmYGF2aIB5AqlepsCzUgCgITEFU0h4hmFq2ZaqbarR3dyqzH/bq5asZrqs2T6rBULWY7fcwVsagK0maaGL5uptshRTsRSQhQRT7zDojBmRnrpTbe6hjtMCOc5QTB9Uozui6t2dlYlJfkRbOvIsgzM0SloKqk91yBk2pTZn6vWfA3eJlxnJdIkAhWijCG63lSlelwZk2r4ou+IZ189kY7APEbEqpFZgFUlRap4mPCUqWPAMBO1jb8v392nGc7COwlmdbsr+E6TxMEJJp9XjPPIM0Mi9UOzXAniygknB0U2FFnIQjTzs59Iyp6/uFPjy5/oNluzy0vRO16Odo2So+2r4S1aN/eI5lLfHElMlGjzug2mjUq2E83N+seja5dWd3cWzdpXrgcLw4H3dauMrXOjYu8dEpFcX1a2qVduyfJdGsyGg04wPal01fm5gOju0+uXlUtc+Zc/6577wyvrZ7tD286tnjx2Suj0erWFTcu7IHbb7zvwb2PPvLs3n03hUounN/Ys/8IF1vj/mbpsVELs3wcxrVbVhYeuP+uz3zus/koZQ2M0GhrXZPS89HbbxqvDY4/83ixdfwtDxy22TXwfjqc6LhWi0MUnxTb7dZKwqNJOgjr9fF0qJVuzQWtpiwszW8NzxiXNyKjjcuyVGk1HE/awAooDqh3aDFPRgaVyR26scGo1jCOzHSUiPciUGZFEKlGJyiH/d7umwsTXtrO/vgPPz7/V7WnXlhb3S6zNHPAmaWsNOntB86vXfB+bLybj3qd2DufLC6vpINk/8G5R58/vWfXvpWlmzavjlV7z5kza8nalvD0zpuC3Z3RfbvK5RupyCedyF49Pbl1Du+6vTucpkFLyGEpCyfPlafOmy8/vf3ctdyCbbcjZrx8efOXfutjd9889+M/+NqlhZorMwHvSutJhaQxCLXSRAoJhMtSkCUwAAGF6CFPt5DDMCJniyRLrChlNIjU6o3hcJhlZZFtG9JY2rleAOS2kjQwOp+WQRCAt9sXNmutGmmvKNRmb54sv+L+hcnaZ0dbw9b8SuFDTeVgkIYUkW6JJcRwe7i2HHWCKN0YXAt1R9C8sDYO4+VRP2vH9U5cAx0kU0ehOLHTPOvUIxBQ3s13GqUtuMxVqMos9VHRrHe21zfz6dQVLBhVV4FSSimqeLAXT4oYuHTsrLXMSKRmIUGgiZz3zL6wXikUX9mCyZBpRjUGMaQ8+0rizoBBEAaEwkJEBFqZwGYFAMRRpAIdaAOWy9LGYYhIOtDGGFJa2CtSgOIcWxZAQYXGaFdaEa9IAJCZRTgwkda6upsze0L0njV6ERH2gGSMqsYhpYwiQ0gsJSB6X4J4qppTK8btwWMpIt75qr5PQEpbkgggsYC1vsq4scxERERsHQB69lXOPgAaVMBOsLL1grfs2VaHyQM47711It67KlgJkACENenZIEXVSg4777xnhagIEEApZbQWQS+iEbU2QsggpLS8pC6VHawOJgiJkBhAGKojyd57vwPIAJHiUBklICYHzyyVm8yDhDpAUuJtxWuEAYFKX6pZfLc45KplDAQUKvaijVJKsffaaASsZGLMPlDGE1fxgc67QAXVIC3MIihIgqxIG40sHkSc9yJAyjBUwAsBoB5HXiRQRpw4dkYbQXTezUL5ZjmDlc8OoBoCDGqtqFqpALGlU3r2YKMQEZgInHMCO8sMAADQMO2WmQtgVAIEyly7tvGKe/b/s3fel+b6cyfPuFzyDWAX9je2bz+2GzDx6K6tlqMkm9tV3x5uZ57r881b9h2+dHrjc3/+uY7P0/44a40/hl8YpSaS6U03HkTPOJnUTUGOTzz6tf03LN524MAPvOdtVx9/6tTXvv7i+fXzlyfnzl5rNRShDhq1RHAwHMzNB0aot693+ObtAHKGuACnoWmhECvaOIKY9YIGTaA8kApDgkwTC0xZZRpqAilp8kJubXLtKfv8U+rLn3/m8hUc5yYFF5jmxtT5OPvu7/6nf/W/f8VNtglxe7B54Og+iNyZs1fuvOu222+8l2o17bKvfenzzzz6lfkjCw+8/cHHvv71JqRPP/KZn/pnP/4/fv03JMRpPzv5tZPzc3NJ6hb377364tmwrqHwXJAtCpTAUDje6BsNyqBLCptZRGl3akGjvrWZ1FrdYmsDxvm7/9GxGx+kW15rGPhfvfltxdXmn/3F49DKXvm6V//en3ztbe949TMf/fzeG9pXff7jP/fLOpWA8NjBfc8/unH1bHLjXff++G//6h++9xc//Ncf8D5tB/prX3zmx352btdu88Z3viI7e+Xyi1/tXxj+5R9+9G2v/ubmZPrCQxcWo85/+tc/+tRTL/7bn/zxw4d7W9tbxuDem2564svpBz6V/M6HPtTstoPwsNfhucvDwm905uKbbz86WFuzOQ82Vt/3ez+33NtN1G430xtvfZNnRxQz1C04oLZwQ2kvdBHhKVs8hzoy7cOOY84DzEFTYrPH/WQsphu1XxlGZrpRUEkmnjtzLbv899nzp6koLUo+HJfMVAtqJg4IXBiSaCitmCgorPJKY6zn50LrJcl9oLEZmmFgAdGLKQtIJvDYVzZ2HWnftLe5uKe2b8/cN7/lVb/5W7+/v6jn1q1eHs8mB+wWeu1yc1Q3yudFXTMo6jWbSQlxzbCze3fPr20kvUZDCWot7RolU4Ay00V68qvPPdOZv+1u3VrIzq4N1tehdH0VQjMEEOAhCMPxE+cQOkXGYWmPLDVVzVzZmvSnFoQASQlqrR4/v/1ut0c8WB0AiSuc0T5QnKOtRcSxjsOgqNcbrlAhlkm2ctA/cgo+96lL06lsdIPXv+6Vm9vldFR2e/O9/nC+E8SRGvd5Y3Xj0LH9dUMa7HBaDKaTrcF2Wu5rzx9eGgd+qq+eXyt4tHfX3Llnz3T37tl754N1LJzl0TRfGyQXr250G83uUnsXNsYbk4trV7bXN8fO0ZQ+9tG/vnm+e+vNN6xvbnTbDeuIBev1aP+e/dMyLa9sTUYTCPBH3/NqQ37PcmsyMPtW2rtq5uwzz26t5q329P5veUAyUJCcHo3mWq12KxRTGyXUWgzPXHz6luX9sS7vPbLwwrPXrlw+2715T5bg5mp/YTn+pm9+oHuqVssnJ547OR0O7ux0F29csSVPBtOykG4vbrd6k/HYsmuELVB1tLkU/SIfIIoKmnleiiCislYRNvIUijSd6y2F3S54P9zu33j0xnEydWWeTJPjzzx/6713lHliy3KcJL32XJnZ0dqVVmMfmvDwzffo9vLHP/HRN7zitRptaae1WuDs1Jc5Sri2dtVwaFFlkt95991Y6+T9re58azuZWobJlOO5lWJrHLeXVxp7NwYDQzhZH+4+uH/UnxRbo247xnxUQqLEnTi3VRQc1iEgeuS5Fw/vPvSGB+597Mmv+9Jl61A70OLG0mvf+vrLZz4XKgUW2s0oXlze2Nz0HpvNdppOGF02ykfNNIrq2+tbew/va8y5T3z8s1fOb1dXQXelnufSbJp0lCbp6NgxfejGwOZ5OZmqVpRPk62trX5unnu+/+zz5ckXx1sDmFj0FtkTAjnQ6AGlFF8gZCKlt1YIHAspIa1IUTeOG/V4qdfYvdSFclRm6Xg8FXFEBFZHqmY0ucxuj9c06UkiV4fZ9iiflj7LLQShQjCh6faK/fMKtV/stjTa1nxD6Wg8LnMNytM0TUrrdRzX6m0kgyjiHSpC0t5ifzxQrGomzNg774tiqgmagS5dGgQNRaiQSIWElNtCC0oxRhICBSjaqFAbW6Qut0mWS8T1MIqiEESciC0dl2VodBBKTL5dD9kToeg4DAIFQCYvW7WgMDQSrwMdEpDIlSuXvdftKPRFoQHm23POOSbME8dFYgLVoCgONKNj8oFRtVbDh4Eh022wTZEcASkPwEKl9QXnjm3GjhQVeZEXGZpARPtsslgPS8dYbPSabLk2N9fLW53hNkyHhc0KxS4MAyIqnUscWJE4DBQA5oVCZl+wmygKesSeqNTgBZ0l9EDWE2NNmUiTBMZbLmxGDgItsdZE6LwvpHToHXDhkdNpU4VJktrEF85nVjPFcV2FhkgIBnk6KUFD1xgfeJehOF9XseYwQkQug7oxrdjlZRTHxjApa+rcbUR94zehyDywQ1HkLRtCQVERRYth2DZBHQq2gVWNMLKuVDBhKEFIK8zSVKE30cyMXC3QVqJ6EJqpRXYAChBU7GKWAlM9R83qxURQgKtcZBTx14Opr0cUzZ5aGWbRQLDDj+i6nOUlLrWDF2aUCWTGNqq0AalAkQARCoPaKcyqYhFmdKISFVX0q6rcJfQzTcxLATvXdS0zwFJta+VRQhLwOBMgVeE8MxxyXWT0knVq9vvVkalq4xUDEuzsz8wkNTPfXUdGs53dCQvC2fO9wM4cY6abAZl1w+94rHZkXLO+ehCo5hMiVVXZTvz1jvlqpiRCrBJ+iEhgZ0dwpiziah+vgxkEEa5sgQzVRKI6JXZSiHDnHKlkQjtHZEcDtQN+qi0Brrxp3lc56dU5U4G5l0yBs32rXhZBKriDs+nOjMLtbONLIUZIsJMvgTALh0LaEcpdJ4MwO09wJ2apOpNpB3y9hIp274lqYNvzrelkkPTHRdop0ktxrZanbpImEIBz6xzVOotzZTJBEcKwN3egv7EZKHXslhuKbNtwVno3GG5GzbqJarqm+sNrecGkOs9eu7L7UrZ3cWH1heGdb/ymv/7Tv1029ZD8/j1Lpnbbi8+cm6rgxjvu7zWfl1OngjxFhUrLocMH7ciWeXbq+YsLtcbaxbO10PTqvSIfJ5OJ99p6XFstwlqzWZsbbw8/9PHfiVQzarcmhd1c3XaWw7q2obnt4J3cLs9d/kooamtjgMV4ca5Zllm3EbI4wuTwrpWTJ8/Oz3fTKUYNpWvRZCupNRrJtCiyvvfcrYG3k6x06EWFlBVZoLSGIGoYxVivtTIRHRW33tL1p7MgClk3n3lqWjqx3iutono4TItvf9crtvLl00+5o6/95rMvnHtu9dLl1TMUhcPCKqUCE62tTT72ucf/9Xc1brjbz+9qJpe2loKeSKgpMJ3GC09dLC/ET11V0/Ggn8dJoIsBPbjYqRVXvul2OHik0ON1krDWSMd9aNTriuIz55qH7rlj78Jk/dlnRG/c2J4ce1n7m++rj2D5Uw9d+dIT2eag3N8NBgm9eD557+999l1vu/uOQ81mDXQNSmEMKK5Fk0kRGmIvaW77/XFW+CaGvswWV3oMpUJxDLnLXFayUu32ckaKdF3r7nR0afdybTRMQqOzZIg6DsKozMpkyp35xfWrm/1hWus1cu/f+ODbP/uVq6eeX9139zypwHvvihKBELxSEsVhTi5Li5IDHe9iz7EKoloTPY/z1CXpuKR6o63jRloaEsMA1hbeu069axQIc1FM47YaD4euSOutqBZgMtgssiI0ravXJrVarbvYnF0MSjGIMFupTGfaeZfnVkBya5VSCkARaW006hIco8cQK8eTNsp75tKjQhCvtFGiibQIO/AI4J0Yo4w23rMipUMiwCiKBFxAio1SWhutEcAEBkl7AWavFFXmKcc+CMIKYiMSew6URq2st6RCRK1IIaJ1nhmAUAdGEaGAVpqZEavmth0WJh4QvLcgjKix8v0CMHMp3nsUYazAOinnPBAAaqWNtRZQKoOxIDhhcMLsCICZETQiii9FkfNiWZBU6SwCEigQ772UwkWeM3M1chCR996QRkIR8exYRBnlmb333lVFX8ozE5ImRUjMokgFgQmMcZV9uKos8K7KaVNaCwuwECmFijSiiHPlbJ0FOAgCz14brbTxQsjovEOg0IQiwMxAurrdCmlE0koJgfgy0KZatPEi3omvHGXsQQGSqoRYCFTlBTILEWnSAmAEKtVVZIIKVFW/iwqr5CkiZQzZkoVIo7a+ZBHSijCoIouqqDngKuMIPAhjxQJBEQpCUeaA4JlLYa01KKVBVb11eWnFCxA6T0ZpRbNFFnGOCEgRC7Bzs+lBo0ZTu7462jTlwsLi2sXs+NfO3rhv74ED9/2Te1/rpxde+NrnjS42+xc/86kvHLvp1tWra1fXtsoWEJCbX3727Pp9Nx38yhe/NjyDhY2zmjpy7PDF85v9U4NX33sTpG7u8MrTTz2FW9PXvvzupz53dmv1zC/91Pd/+Itf+vn3/q5w+Pwj62urSTuetpfhnrv3n3r+TBAHy0t7B9tqrh36Ut74PW/Yu/DUcHPYXEgQEMAiKDIxAHsAAOth4KHnwCrwJXgHRFCzUIhYPyq0XlKNlz/6mbO//h/+frVo9/YdSVSRk8tKaRJ+9FN/9Hef+FJvofjn/+JHfuHnfzqOw7Fzd7/xW9Ze3F698MG3PHj7n/3hz7zyNW948DUHTp08c20zvTDc/g8//2vPf/HUtcm59nzt0098+p7XveHxZ57SokeycPTQ/nMvPHLl4gYpjQRlWXCRxREpDSYQ68UYlSR5ECkTal84m4NSEJbjPbTx3e8yd9zTOvD6jRy2GaYK6gmcxeXgPT/18t/727/7zCe2bthz28f/9k8OLXSD+Rv+/T/5we95xz/7jne+5okvf/GhF5//kV/4zccffrbdy5rFhdfdXP/cQ1+4876D6eZ01+7uz//q/3n3u/7Zk3/y0A9827d98C+HF7PJd3zLa152ZO///On3PXPqys+9979+4pPPPvXUY6/9jne++Z3feeXZZ2ox/ep7/3uzOd/s7etf3qBQlzkD5YxFpxsGger3N7LRxBfcaTSTSXH20oX5lQNpTT7/0NXXvP0+X1yxKkNdOLCkVgHXFFwUuKTCG0p7h5EVm4zDqK7qYZY9Gff2psHxSEcgp2xpGys0Oa0uXPHrg9ba5ZTS8thC8ra3zq0cWjj+9OUz58OHns3HY1maiydTP2VHIXYbEUA53M6MxnY7bDdodWOka9SJ+OrGKI67KlBGq97K4qGbDg3WX+xM+34S1XZnL3/FyxwWV598zsFMYWodFkWZJEW7WQOGIktz7wnRkBRZ2aybMs8PLjX7/WSU5Bhgnvsis+2W2b88X+T5anpo/aH1JC/XN+dfvJzedceRbjjiftqIdbc18RlCsOvEtY3FPb1DnXnMp0To2/VxPu2PfbuuCbneMAPrJ0XgBmCisk5hVuoIjYgvnCMUz15J6UB0AIEBHYNmUNwYcfi1y/bLFzbe/9z7D+9ZfNWdN6+EjY3EmrNXYlw/uG955bb5yWiydW2rsbRg0iRs0NH9ixsXLtBiuX93u14j9njT4YOnTz7e2z3X2XtwuHXF+e0Skxtv3Zf5WiMeE8lwexSYzmhtQFZeds+tn3j40XoYh4zFNH32yWdQmzTPDKIKg9LbFy5dIkDIywNLc075gwud7fXJhRMXDhw72ILhp/76M+HcYnv3isfaY59/gmzBMC25zEtev5bUu7oopLPU3r984Nknzj564tzR25fT3F/uDx9cvEO2x0eP7BmOJ563vukNN2onx47dEQWhzbPCuTIre73OJC0YdH9SpnkBNm83uoBmmAyUikonYaTCKPAutF6c4CRFBWSdmp9vpUmSjZ0Kqbmw8MLzZ9kXTo3QiRMO59ujwapkuS+LspYmedbodcbTcjyZ7DnQIN150zd/B5eMOuU0VUaFoZZGjT1Nhtl4uL5y4Mb9K/epqHnx/Kobbzpfxr12MigvXdw8cGz/ytHlZJhtro2CXseJG68nS9i2RZ70N2649YZHvvhwuxFfvLiJQdhZmVtfv6gdnToz2P3ymx567BGl7cqcqkX81Jc+uu/WB7IsDZWuddrdxc7JF64lScpluji/WBRFkeVREC8tLpZeo1fdXiPNhvO75+6899jxpy5WV8ELFy6uLO1V4CDMd82Pbr27t7QHfR4sLOwfD4vpBJ57rv+FJ8fPnC1LF2Y5eotEmq0XFgEH5AmAXYaKRISZ0SAheS9OgK2X0qe54+3J6dWt4MzlbjM8unvlhtsOk0/yPJ2mWcJcN1oHut5uEtOlcfro5a1pLqCIPZJjV1gkq/r4zJnRF56+2mmG3bo5dGDXob17u61u3ImpNsyuXkmTtBWGAC6OG9kkUUp759gXtnChjsW6MitcVnLJURgSsPclKc2SGdHWMwZRFVGSJUWeFeQxiBpIhAzTNE8zb51YxklaZHnpx5wXuYCIBy20d2W+3dQ1Q+glVDoMdFZ6yyzOe8dlUWpF83N1JFWk5WiYobB4D2Jb9UhrLYyls7njtMy9LTUyGoNagWfnJa5HQMqhdh7Au0YjCgx5FqUCb9MiL4qioIYWUM5aXyqluyzCqNMsZbSpLZ0VYnGWx3jWNDqhgAcOCFAg5MqwAzUThiqMmRtKA1mf5/XAmLAZRYGhMvdFFZ6dCeWitSErVAoURWKRhBQjTYuySUGglC3Zky6EdWhSm4kgeoJJludOPGgVjMa2IBt53HNASzHhVPEArC3bC53MIpaUp157Ewbcqel6FA6nU1Kg6mEQhq5gl2WhRgrR1nWaaWehKKxyEiKFCPVeJHER18rQWJuzF0kyUzomQXCZz1EwNkEcN9i5FGRW9IFIM7ogTEgAPMs+nk35ccdc9Q0qIZjJigh2kmwQkBF32tHlpdm7YNU8Vc3TX9IYVWzhJU4zixXYyZGp3rAiTbiTg0OCvKNv2QEEVNEahUQzp1UFvVCEBUR4x2dUued2JDw0k8vMitSAaGako5fkLRUFwx1VC133Zc0cd3i9xQwRaEY8qvItRQAyw0NMO2FO3yAL4h3Z1s4B4SrEekY0KsTzjWimwkI7DK7ajJ1oI9jJ52GGCpXJ7OMSnH0Iqsq7Zl/tMu4wE5kpgip05qHKs4BZEtHO21/Xes0+W670WjKLBqo0STLzJwoh8sxmVrFEYcRZ8FJl0HgJ1lWQb2cBfKYoug6JvkF5JN/wdeccuX7IZlgQUUhdf52d3CwQntnwYIfD7bzsS2Rrhorq7UarXQcdxONY6Rh0fPHs5fn2XE3irdW01mlt97eurl3Is4HLGSQu7Xg6zLMkJ+P3duYBx6XOax1SQQ0ClxUT9GWr3ZpvzdUbR26Lb/2D9/3uT7znR1YO6oe/cnaUBGZjEpE/e+5cvLIyKX3TaF/SyZMvvPnNr5uWV0bZuTo11tdTl8e9aNkXtQsbV0pt77371RfPPZ8l/T2H5i0tb1y74HhkRZQOC4iWOj2FJsnTeoTDuJMl+e7FXYNs9LFPfeTl99zZWapvF6MMmwuLLdNUFuBq/0ooYazxcjoMVSvPxUF9MIYmC4K2WaZ06BidCGiYa7YvXxsBAHoXNzutqJYkSVW37AQKy8y4uKu7PPC8kT1+5iKX1ruZEzNN3EBr34g3StvcW3vD937Th99nT5/brpnGeHjV5pmETRGiWG1m2eFDR/e5yYVLp9v1/SdOZOfPpsPRcNovpOCVe9746NnJufOXbGlVcKnVNDe8YvFbHrgdi6c7NUrT1mjLd3btcTK465vf/kdfu/B7f3BB49W7VvBffcdtL3tQ7bt7Or16dXMja8HgPW/T3/qqXU8/Njx93p7L6qnpXN0eve8DT9+0K/zhb7ttbnfT5zlb6zkhNHkOSTrNyqIoBH3oy7DRbCgxWlSSJTYpa1HsndGsfDLmnExs1tfWSJyNIvGBF22dQMmOA1/abOTbzYbostA+tWp+wXzm4Q+1lu57w6u/aeKS86eebPaWilyCSLP4ICAkh1JkybZTLR3FWhBLn7tMK1OLdC1ymSMTtpqteclLx0EUE8pka/tKHJpaXC/BBDEhZpbHoSESH4eNaZ73mnNnz1xQtWYBemtrUl0FSisS8JWDC3yeF86zq9w9RAjgvURBEBiNIEYbZlRkstJ5KQOliAiMLhyHKhIRcU5XHuGK5ysVmJC0JiWuzI3RKCzgtFJKaRNo0gaADZJC7ZnJaARd2kLEI2CoFIgviyLQ2mhSaABRhANtQBGRcpad9857IkLEwBitVKUwcU6ccx5RkSfSiMTALOyEEYTAS7XiwaCoSmhmy6yImL0AK208i2dh74nU7AZf/RCzIJIiERFGES8CWhOLd84jz1zbpBQROetKW5beObbOckVANCkkdM5rrZgZQUiR99Za672fVVIq1FoH2qDShEoAlNZKKSRV9ah55wlRqbAS5bL3ShOwoFKefVGWzKxmuJ0CEwoIgUIFnlkE8pLZAyIRICCSUoyMSCweqyAKZhavkJCQxUs1dorXioQBlNImAAFh75mZRRGKgKrkeDvNGNWwVOX5WWedn3VhMJeAxN4zV0OUMHipFKYehUEppUlZ55hZacJqmugcA3v2CGK9RwRUyrOzzhISaUUm1EHovPPO59ZrIhAAYa1mzzFYWRFBnPfOuh0NMRzYv7uRBelaxsO09BKUZnujeGJ6Cbay5nL76N59VG++8+2v//wnP3f+yuNHDvTuffmRuctrH/nMpzbOXwM0vYZ9830H/uHrZ73RkuO5s+Pt6XqtphtaH9x7c837DW406763Z/TCiSGo4F/81FvOPvHZ6bnJlcsOwW1e3Y4ng3f90K3f8zM/JkARuJ/9tz/7qU+vR0AXlcqm+fPHT/zED99+5wN3Dq49WVshwpRYCbkSFMBSALacTKNmy5alIlA6YqkZDMnrQLXWhhuXTuHHP/bZy88NprCiatEkVVkKjfryTfcefsXLjg42NjYun14dXto8tb08P2/zwtXMYycuf9+3/5OvP3fmyKsexN9//9c+8YQ35TAr20vNfMQ33nT3F7/4xde89XDh5aHPf7VM+PDN+5gLV5o3vOYdD5u1M+tn05HLfYGawHkPXkTKIjUKCH0UB0YrAwKozWTS1Wu/9F9uePC7duf9z4atqbOJAgAGHdYZQlRo1cX3/eKR//nrV85faU5y3NZ70ouNbt77Vz/4Y9/9Y9/9P9u/9/H3/8O3v+l7v+/ue7/5tW/7R//6vVE4oVrn1IkNyoqXvfbYmYH6w9/+7TC7/I/+4ddH9Pof+/e/9N7/9TPvesdb43u6D/3vv/nff/CVT3zkTxTm2J3vPnziW199/4XTyY//2//293//J4P1y0nuD918oy/F09iBr8dBUbgrpy+Ita1eZ2XvyurqVsShs/3VZzb+zROff/cT93/fu7+tvdTWmEC9DcgMGx42ARoeXoHmKPh6FGQ+HbEesel56GodG5OU/dPsHLi2Xy+bPLhrrz7w6u7+l+1vH3Cq50mpe9/SVeHBr35w/QN/e3m0PRzUABIucuMkQI9MGkUVJVpvu63owdcduu3GGx957OJnH74YRIEtYMR85MBdh267KXBPfeJzz47G6ULYc6Z7lTTITGGqtfaKO/ONzaxIp9nBPZ1RVjx/eQyIriwbqLemk4V2eNsty1G9NhzkIeChA3Pnz2/O7T/Yihpr566s99vD1E4yP0iaD3+N0dLh5TnlKQjieH7fI8fPxaiz9e29+w9tQO4E3vjgrfSl40/nm1UVpSa6tp599Ym1V90SuNLmXjHQNCm0AhQKgqAQtnnKHigw09EUFezfvfvpv0+mOeTJMAhxUvr1/pW1zektR7ZqTbp0+elve/CO3Ytbu+ZX1i8MH7z1zq88dtzFMinK8VSIsd0u5zRP0lwzPfKZL3YXmku33bb7hhsuPntq88I1sXrt6vDgLXu4zK5d3S7HaVokWrgZtxPPU+EYyuW5Tj5OolY7isO5xdbq6qDW7HkK0twt93SEEgZqWvKLJ86A0OkXrtVOHrdpZi36zem2g2Fqk2FxZO/CIBvqkNrN2Ci1eWngVZAVxXpTX9zePHrTDbv3dTdHdOzQjVvrIw24emkrimT16rXYHC1Lb7fLYHF+e7vf6jaarbizUivWizIrDFNLt0x7mbMkMHGvu6hCpcJF5rIspN1dmJvfszrYiGIoJ8nq+gAXwrnFuWlmgRygtObrX//8l+9+7T1xZM6cevHicydqGjfX1m+586ZLq88t7Dk67mdE0Xyz5YqpslNEW7J3jOkoBVRpy4dxXGTOBPWFzsLRY3dMEj5+6jSa7v4bb0+d6/W6q1e2dx1tjZ2/fK3fQjUdjOZ2BXPtsOWXH/3CPxy98Qbtp88+/uUiL7b748Hq9vpwc1+nvrrRt4mszLUfePmx4eSUs4MLZ0/Xa+1IyQtPfHGp01UlO6Qk8+HcwmB9vQ4RAKOm3vxiFMdZUTZqDUVBkqTr19ZH08jrOcYrs2WDejcKuulgjej8A69sruyNQFxgUHcDrTgd+3On7OULAdswTwtiLaV3vvDACpABXWUeF0FQgoAELMLsxTMQUjVrBkFCZslSlyV2fevsIyfONuOQWU0zi9bFIQWatCFrfZK7aSmOpSp2srZEEGQGR4XlNHOb41wjPHl6qx4+ZxS26/F8o66Qg1i3Ekcb07lu2QziZqPZaDb7021LjhGycloPona9XWa5mBDYCecmMIoUl1yvxyWb3HJglDbigZMsKwVZkLTZnkzXRsnqcKRU1VQrjp0tysBoAtWIa6DAhEEQaWQhwFDHiD51hfdOISKCMVT1gdQakXMWGMKAgjAwQay0trYkoEArCK2VAFGVzhlRRoVhZJiFHYdBWPpSkEEDCYWAnlkBaKVa7bZ17D2HcayQsqwgZZz19Sh2Lo9V4ZybTsacF0ZhkU+T0uZFXotC7yVjRjJxEMTKN5SNrS3H40iJEk+eawFGaOuhVkEM4pVWw2E6yaXhfFqUJeik9EMHzJR5aNRMLQ40S7MZO9LDSaLZ9+px6ctQgThGButEKYjDoGF0TfFyqJ2OVwepURFnSrIIhbKEwZqAoRcFRikg32qHjqHWqhei4w4pqRWuRJQ6w6IGnOQ282CBNYXNcN+eHptxCbm1hSd0qEg5QB9QgIJEVBTGeXGWi6IM1GySfD1ceebcYUAQVCjM1T8zckUmqkc8qUhHVfhS0QOYJdRUtqOq2f26/GaHzshMZoTX82MQqitkRjtmBq+dSfwsDRqBqufs6hGu0pzMJDXIOyqlWXhPpWefactnFIwFSKrgZwSsonxwFslM18U/MutPYy8ASKhYeEaqZMdC9pJUavamMxwhlUAFZzIfoOoIIFIlIRJgrOxsqCo3WLWfPCMmuFM4tkPhqpvKTGwlFemoDFRIOyY4UgJEM5xWGbYqcDXb9VmNGqMAI4IA79SdcRVTC1XK+OzngKVaeK0+acaq2owEZhFOMy/c7MPBmcZ/R6g1E3ztWA939m6Gg77BCsGzdrXZK8I3ePQqYESz2AjYkRLtKKRmx6f6pGcZRyLC142NFdJjqeK24LpQCmd6JdyptJspyaqSp/8XFRUppaMkiiQIa/3+lai7srDnlsEkrcXzYWuQ56PuXFSLDggwmkxhs91Z2JgMrUDkefXCVl6Om3ONNE9rzYaYshaSiHKZD8LI+8mB3XtuP3rws1/99OE9e+JyeuzA/DRye3a1Tz56qSbTO25a7jW7GW7e+rI7h6N+v7+9vNAaX5qaBVzc23EK6p1ob3M3gVxZu5LYQeam037A3keUgwHrcLC1DeLHU4yYfVEePHhDP006dYuSas5uvmnf/LI5guHKTXsm1/pz8+3htfONRqO5uLC9Ne0s7Olv9K2VrEzarTaiiEdgUSFiQAzQbjbGW+PBMCMlaVE4kUiMjaUWKVskZe4arXbDRKW33rqlTkNK7EXTkbdESoFUk8+08J/52kXVPbC+Sr//4UeScmEKc5CMULJaQxByEvYi6MzTz8ILV0ZnL3rfGE2cNcIHQu0bo/ou+qvP/v0F1xFn69oXo81xin/08XPPvDBPwS5WneHq9MrGZFAIUS2Gx/u5BtcAlZ4Ywk/87Rr+1ejYkvzQK1YO3bDcXtwuJutRW/FRvX8JP/fkcCPvv/zlve1tOL/Z//0PP/Ly++65/7alZqsYDTfCek3ITrang8EoCuv1Wr3RbAkW/cm0WavlpVJUQ2pGMU6n22kxKdJC67DZWR5sjwZZPQqct2k+7XfanSiOR/2SO+HWYH0wSebmms2AoHDf/63f838//tQYR5YnQW0FZEsbTouEvbO29OKZKY5aSYmxMUk+4bJMivHc3LywnW4P5tvLnE1LisQJBzLYvBBSGhIFKLbg1FnOR74sl1bm+xv9RqM3GI2tt9N01GjGKWMURWvXtmZDggBpDSJFWbAXQtRaefbIaIwhQEF2ZQngCUkAXFmyFI5ZI7oSw5ndqUq/Zq2UdQ4QozCimRISvbXsxXrWBEZrIgrDWJFhdt55TaiMLp0LwtALlNaxr1K6NYCIF0UGARWpasWDkZiBgESYpRL6kCIdhFGVri8CleWq9BaRAJQCJcDWltUd07MXKEkFgQ5AMCDtuHDeOedZKSIkMlUgm/eOnTUmJCDrrHcWQEhVA4AiAiflzrAgIKCIvDARaNLMM7Gq9/66D1lQSCkA9OKFWcSKVjjzK6MAK0XiOTABKR2YUJHSWrNnUkopTUQs7LyrOh2ZhVAEwHvWpL3zCE7EMXPpLTMb0uBFaQ1AKAwi7BwzO7FaB1oZBAAS6ywiVbWcBMTiKmYkIkRKgNmLVloTCGpC9M5VytZqicYxMICiyvwLznsUISIHfqYCFhYAFvbCLMSWjSZg79kLsyKFIoCsFTGLs1aRElHeewQgAmsdVhW2wkopo8VZp4iMUsTeOlHakNJRZR0Fsc465wHAs1ekFOlKO+aFAUlEFJEX8N67HVXRnl1zdx65fxGXV8dXz1x4IYnKG2+Y3yhtuCInnnzu6lMnl5vm/R/6gLVlauGpZz8xtLc88qXHNta9ipuFLTEO3/ezH21TKxlzMrWNmmo2hKCcbvov/c1Dx3bvufnVb379m1/2R7/wn0NytXzE2+ddGO6u1ZIzl/1gesgN3vEjN33rT78qhZMO8hz6B/aEm5f6C3PNaZm5nH2kfut/PZFg/Vu//9YQU59emoym0ZwlimOZf/J/Pf7Jj135qT/+oWzsP/MPX1m87c5PfuThwaDcs7Rf9/ZtbiW7b3zdqauXElWs8pk3vO1NF9fW73rjrf/oh7770snTf/kn/+eeN99zz2vu/vuPfPHFFy+87Yff8ezTj8nW8NEvPvrO1//A/oUDv/Yf/+P83k5a+qQEw8o7Ky79sR95+794z7+sma5oV9RzZWDt2jUCRlr9n7/+L4IwiDr1ECGZpkGoFCBpnCZjp4IwUN6R1iZwJKP+Hl2+5127v+VH7/KdJwEuYJg4jkywOB6Mo/oe7+uSlaahCLLb3qBe/4T+i48Uu5aX9t6658wTj//rn/mx19//pv7zLx7tHTq89+Dc0eVVPR6YpVu/6S37j5Uf/egjsfhg7PfUV6zbPH756bCpfu7n/tvB1gN/8j/+4MA9MM7lz7/4D+dOnP3dX/y53ftA9xqXX7z2yp+4n6Dsb68de9V9v/Zbv7lQD1d291I7XVsfIXlvMXFFY6lBEaK1wvDimYsKjZ2WWptmLWba+6cfOfWBjzzRqfulujt672Kvm912ZHLs3k5t360Geh4KkpFgqZoj0AMAyYtt8aVEATVqnJRidO0A3P+qFaAWAFnOiFNUVEy2eTo1LrnrQXPfNx+8fHattdK5fC7/9AfXz6/J2XOYFRrIGDLsIbflc89dObbrhu942zf/4fv/mw5DpRmM+cM//p1dTXPv0dr83Hxd4/lzl66O0m4rWlhoVVfBXCuszYcvnusHeUihth4UUycIUWHYCHq1MArrNxxYLgqsNXvL7Zrzomvyum+5+dGvn9Ms+26+4dZG+MG//VKrWz9y+/K9R2556LOPDJNNR3L2ynjz/6Pqv8Nly676XHiMMedcoXLtHE5OnbPUrSyQWkgiS2ST8QdcTDAXB+CCrwGDTbBlY4wNxmDABhuBhEEICQUktdRJ6pz7pH3OPmfnXblqpTnnGN8fq+q0rh49eqTdW7tW1apateY7399vvPQ0oBfN1SB8/oXNOAIJ9KXzl8+szz21sdMrBADCyACHf/dY/2TbtJqGCRUhkhIAQ2GRZQ6IiKKICptHIbGoJ1+J//bJ/TiIIrIgogmFcL8zyEYvn1iKb2p13/9dZ/PW0UlePXPHntfDH/7e9zz0l3/3wjNpAb7boSzz25f24ka8sjSX5dBcWFVe71+4wuPOwlI9Pbr4xS+9NLnwispkMvDeuIW15mDn4HB3c5CNnKfeKN2wO+dOLMWVoDsc97KsIFNZWz1+/Nadi69WgyKsmrn5ajThbDISgJOmkk2GQxgBq4Od4fq5lSAIwhO1fr8/sOkdp1fmF2pGTOHQVJtbe9fTgT2+trCwsrazeTXm/NTxY+nWS7pau7KxvXqmHVXjQW+U9pJ+Z1x1UGlV8/HIDnmSjIfJRJt4dzCYay3WalFcCfqH+5VGNR1PQDkEmPRGo+5wb/tqpV6zA5bMLVSr559//MzNtyKFMCmScSI2XT+2lE2GnYORBL45vyiuMPVsY+NKf7+ji7qKG7qCsY77nU6ejRDZO6jUwoWVRes4G2eucM5Ks9JGjK5c3Bxk7v63v2Vg2We5zp13lBauO0mqjTBuVPLReO1U69KLTx87ujreTZpVOH3LkecfO+zs9tNkvFA3D9x/57MvvVwL+ebFaP5cc+3k0VyuR1UpJs7GzJjW5tUkz9oLy83K6vNPXRr2D/dSaxpwsHPdu8Edd97R7441VYh9MhzYwosvFtqVLM9WGm267fSfP30VAETU9euHTRncelv12Kl61AiKNJS00DFVsVqb1/3J9sHIjwoXaCqcFSKPpJVi551HjzcaM8gYxQDsWVhAzcZbTwWBMgeBpEgYilx61jqxtvChAZtzPuJS7PXOIQGpskZk2txaLgRJowAyS8FAhOPCM3M/tdc6Y+9BB4rUgSt8GFI1UKHSc/ML1cbcwvxiJWhE7fYgH48LXlleixvNahB09q/2ugee81a9FsWhpfpgVGTjTjoZ6sgUbmJ7g9x6DPTEclrYMBQi0Fon44zFa02OpRapZjOu1CLP4C1rQs+S5AmDKI2hjn3kEbw2SpEiJK2D8WRCCAKkA+PFE1AUV5wqknEShwq8VkFsha0V4jxnx8670SiwCMDaKNCxiNco6J31GARGKa0VASkhIlTNSi3PvQ4lrsRsx6p7OJm4WlUZY8aZyzMXONGgJbWg0Iq1IAROW3aFLtiLs0lmkdAJiFeT3PeH1gSC7BlAK6hFQTtUjh0DjXK7O/F9RpVYrTlEt9RuVOOqA7UcxpP+gKFw4l3hUucrccQRiEOJIdQkietuOA41p2EQxHEr6CeZiSparJAxpOOgikRIFBpls0maZy6IG4stFHCd3QhUrRqyt3M+SCPtNSCiNqrb76FJILBRPYiqsWUsLEzypB4FhjUUMBlnRVHYwru0kGCGikCJMMhU7kDisoNnygPK5TnB1MvBEgKU3vaXSScyszamJk/pfMBr98OAwDBVSvBGFZKw8I15ajjDC6ViLgAg6gZJUqBmSAJLpqWmUEgIqawpRSx3vadsiWGanBMELrHWTARCLDHOlMgIIAAJwFRFgRtsY8aSAMoWZhEu42kzOWf2SwAIUG6cl5hqlgkDRCpzWF/mCL02Qo5miKMsJxIpu6OmdA4QAUt6BzPJh2aOTwk8pgdSnkOYSVTlCLYbZKfs5CaFZcM04Q0haFpjPYN30x/ADf4yZVtTKCQzeDYL4eENvDR9fsKzMqvXzvxUNSv/K80KneC1/xR4DSfN0o0lZZsRxxvPZCadlZxpKjZNEdW0kWp22QUQYBGCGy+pvHZYX9ZK8RoqSjOO5xZN3JQsabQWVLTirGXfI6Pq7Xo6HirlbCj1xZPZuOh3J/3x6MjRW5L+dkRu0B9rT+yi1twqY9Hp79XrDR1Ec3NNz+balQtHlppHF+dfPn/1lgfPfO6hzX7KXlGMkcpd0xfvffAtDz38lJ8cnjq28vRnH6/WqRhmJo49hlFAcTO63N0xJpwMB4e9vbXT84kU1dgkyfigszs3f7IdR/3kMKya+dXGlQubi/XFl7deGvtIiUsmgze8+c7PPfrCYGdw0/L83uWdoptzo3b07DEW0nNzUk96nWEvOzh39ub+cHjQK4JKJQhRqXx+vrnbGSN4P86DQOJ2gw6S8XCkjG7W61cuXT2zvmCUKIHQkBKEQg+zrDUfhE0ag7qyO/C5+IJFk8294uDxK52zi+sLunf1wnmTzR2v895OzuXHSgRtCtowwW/9xRN3mmLOBKfi3h0nIYpJ9YeVBoxzMFG1GOQAnEwyxeJyfWWDr1zZz/JREPUC7kVaURBnRcZcVCumZmCcJ/ceO329Z310/Jnr6U/8QXcw2ZxrZ289M3f/LdWz51ZPrna+qjqsUdqoDG+5+8zhAD72JfunH38osPecPNZShFk+HmXZeMJR1IiiShBHKqQiR3HiLAeIJgqzbAjibTYRdEEgYewO9ztZ6vrjPASHYJdWFvKkkCIJq2h09fDiXhSEkYZWsxE2eaD71Ng7suR2djJEs7R0hDm7en0jKwrnfKWiiTQzpm4Sa8nzTAHXVRABD2zh3STLu3NzlXQ8zsZjMUWRHjYbte6go4IwMzpoNpCj0USKMRoT9SfJJGcVtyr1Zn8wHCe9qIBGuz77mAl7DwJalXv5AsyhVqTQmMBZn2SF1soDZkVhCyfMhGCiyLM3gbHAYn3hWcSHQUhEhgIA0YoUkFYBAGRZzl4AQJEyJjQaQUTYgYCwA9CFzUmT94V17AoL4nUQak3CDJ6QyipqICKiaVzOe2Yv1vpyXiahUSAKWRisLfLCAQAKkiYAss4TakWmLJZmD4BaQECJQYVILGiLcvoYGxUpZYQF0RfiGST3FrwwA3tmZqWIFAmI5xJUgVJknUMQJcTsFQIJsUhhvbWOQZxzwE7raQcWO1fONfMMGvQMuKNWSpEKQ1LGQPmcRYRZK2XZk2jHrBCV1t7mnoFAFdaysAh48uKdZw8gCstsczkllBBIoQIix4XzDFhOOih3KAiRjEZmcGJL35hBxHsEUgpK40lRSIQoJMJAqJURYes8lRs5wuV00SnpF4+IAuw8A6F3XHYAIoDSKABl0TjRDXcZEFErbb0IitLKKK2UApDcOhFk8SIg4IkAUZCwxG3lHYFCDKNAo4l1WKIpjZSLy51F5iiIAoWKiMUDAgs771k4y62wL88CABxO9h5/5pFGffF1x84dXnuWcNKoN7UOJ+PuTS2do6s09ZWtLmguvOnuXbswSPIx5FklHdpqFHWv5bpi0gbqENdWG2//mlv/7v987CvufsBdR7uTnjxz4uH/+cGL7ezOZnTXnbe0j2K4vL97bTvp+uN6dNPrq9/8Cz/UOLXSgacRMoFxDNBcnTN6mKSQZlKLK0i631f/8T8++tGPfP7nf/TOGo4Wzx6Jmgu2Cx/6o2c+9r/ynd7q59/ziYW1+Z/8p7/WWjn51e9rPPXpz/Yn4539yfmrF973D7/jzpsOwhZVK/Ha+trFjZ2/fvhz2eEQ+3l16H7jx35mpT5/y/HTeJrf9zXfcOXpV9Kid/ucuvMkfDHwL1zGvC1WCSkxoXcTaVfjdH/3l//lP51br2eJLfLCCkJmUUEcB3GjQsR2klnH2ihFmGQWC64GEWiYa1XFEfhhbPa/89vnT6z7r/yuBTCvJNm2ggq4WKyWFEIK2E6CoG5N5G3fG2DSX/PN93z4g1uDzuFLX9wYH/aiVvsjH/7Ug+9803f9o2/724/+6faLhy9c3X5l8/C7/9mPnzvdSuj0zUvHvuE9r/+2B79z9+DlYDXuHKZ/+zePV/GlH2mMbjr5uv7lzZ/84a//41//vZfOP/Qff/Nff/zzf//i+Vf/8Pf+9Nvf+5aF1eWPfvxvXv/6s3feOvf0Iw8XpDc63RNnV81c9dIrl0iFK6uLzLy1te1ttji/uLJ4/JWNC0IAXqM0c6nsJLQ/SZ/8qKBW9Ur8te+p/vTPvxkyQEEfxU7XDCiB8z4fa6jnNmQAE2lbDLNCgqUjnurMORIhOQbPUOX6SlgHGQ8NZWD3WktJ62gY1/jcnXcixI9//OC//9HGteuu16sCSlCPL+25P//oE9//XefuvWnx0t7gsJc2GhxH8dj6sYvcMB93vMY0S4aTwjdaU6uIyCe97q1Hm2lRnN/vU2DE8nxNdfsHJ1aOtOIAuIh0NdDtnavjMXWSlKury+315dvuOlpvN7e3N7/40nNz63F1fnl+/URqjD69Ph/PQws2Pzdp9ZI4rl642iGyr3vzre1KuHlxc7jfH433NUJQ3ro7JzlevQovnZf1FWnVXWAQ0YNAALk4KBiiGCRJXQamFl8dxP/iDy7WGwtF0hPxQigihihJvQhf606+5yubFzY/HcJxqZ8O5XxA4/3Lj996kzm2NP/Lv/7564P5u++9I7R53Kyu3nVT8/Z7Vtbnu5uDuMiUlyzJnEhzsY1Bba+7vZuMc6GC/LjTjTyLtcQuMjrQumB90M96vUHUaudQbJzfUK543b2nPfvL10aY6GQ4Jh3Um7XFMyc2r+w0CIeHh9W5Wrtdw9TNL4Wfu9aTnPOeG5hxFOtqq1Vbbl+ZJNe2+rUwvvDFS6vrc89c2fk/f/WRr37LLXY8WZhvz821akdP//6ffvRNd94By9X/9KEP/cD7vqEOE/Z6eX5l0u0fXTxu0Fkrlaqx41FzqYYYdHc7BadB3WCM8621yShzOYNYcen8anTT3OmHPv6RI+snlxeXavX5hbWVZ//iSWd9Z2/73MnTF158/tZbbhX2k8k4TexhZ+/4beu93nA8SUHSsOIFsBg54SgH0EEIRRFGFRP73d3dM+ceWGwdq+V9VFnS6SWD8XB3FMZNRgw5lWR87dImeR+q3Dh7cPVanhX1MNi68OSku604ef09N2++eH60dz2UYrVR+/offPeF514ajQ+6B73jN51qLC89//Szy6fWJp1rPsu2N7fcenzq9uM2wYMnnm1RPWjU2UE6Zg0uHQya7eW9zqFCbTlfXmz3+9YLx+F0lbzV2TuysHhz295zV9XozHNLOMaI2WAQcTwSywnoECwVzltrlVaIirFMYgugAgT2rEpBQUQBWPZEqlyHyHSVXX5tC5fb2EiFdUopjeycoGCglQdhL4TknAdmVIpFcFrDQd6zABACorip9DwVEBwLEuWFQwRmYeAkzVDgemeEdFUpZZSuV0IN0ojilXYjjuJG3Oh3O4Nk4NitzFmjJpnd608SLtJqNa5VqEgmhUePaCI7yQsSnguiWkxhGPalEAhSx9Zxo2rqyho/qVVq1QgDozygy9MojButJgAVDNNdGmuJkDw3a1UTBM4TgCjQQRzpWsV5xkpSHB4Ydjk7CpQKA0zYFwVSAR44SzURxuKV8c5nkgt7AHYeEJWIZiYiU9hMM7H31qEm9J6isEqour1hoHUcYhCEnFsU0Cb0So+zTJSyWS/SZAuLzsaKdESIKivcwLooDEmYCu+c84Ch8Zg6E4BhHwamGVfYFJGDaoSCwM6GWRorlRW2ooNaFHWHiQKvmRvN6qhIw0AvNWqZcMJuYikZB1miNJokd0icePHDMbkgJFttmPml6hCsiuNICWpnkZkzXeRRtREsLI12912eBtpVTLHQpow9FBKHEEbxpHCuQO10njrxijmtV9tUYL87QVYCqkyDRSYknG6ekVIASvx0SCyWE8bKZb0wkBAKKsRp6KqkHwQyjTtNzR8pB6eVfwBh1kE5Iws4FYFKTlJuPiKUQvoUisiUTpVOf2nBlwQHZiklEZ467HIDEExteQBU00TU1Em5YZ7MkADibIJMWYqEVCanyt1oAaLZr09jUCU6mUKIUtURBFFfVqZU/ghlhh5kikKQaHbjPQ1ElYNrpkrMjX+JlMXPJbUikSkTAi4NKXmNLt2QcARujEabSVXlP2VgnB0piJqBsPJhBJDIT4uBZOpJ4Wt5wdK/Kn8Vp9qRINJUz8Ibk97KTeEZdfuyMNksMzbtfJppQ6V9NKsUKh+cBcrR1CXaueEi4YynCZQNR9MDLc/XjdeNQejLKqlLLCY3yDpP05FYokVUN85jGbubkky4kS18DRXVaxVArkaVg964FjTSRIpxr6KwHnB9obV5aXcy6Hs2+Sghz/VINWv1ceeSKoreqJdDILo67kHcXNCQaM3Xu351fSkV8VIsrdZ2rl4IIu1N81OPPK2CasW47qA73Oqsz9XeeP+Rxx/+bD6kg92DZL+XpI6R96/35ldOnzp9y97FV4aDiUHD7L22reV2WK0eqy/5/hCUO3H81rnqmeF4MtrdszS8urm3sNhAn9dCWT6z0Nvan+jw2Vc3O/tjGPssHd55/4mr53e3D3fXz92cpJkt7FwtcGTazcW94XUycQJJvdJoLDfSTpLnk0rErbi6v78nSnPMQSVaXV5AsON0srZ6VIVO2CrDw7QvzEUhaeJVRUmBywv19eXmpat9QUAFiJQ5fu7y4JWdp5tzrbbKhntpJzsESFArAIUC3nMQiwANJ0Qr6uve1G7X9k/d35pMWPl4+XTdpuu/+ZkXbOEVW2BQWpvSL+SiUQfgkeOUwgCpYMgrdaOUpIP82LH5I2eWj2H1hQsH+xOv5o67YLjL/KFX8H8/MoY4X23Zm07VvvMNtUYzSfJ+HGRf9xXHq7L26POvVMLb1xaqeTpKE3Y5t+crTmFtLjzc2lUs1XaU5nlmbUvX8zyz+URrBJTBcGwy3euNbarABPu7B+tHlmxunGcQ6nb6Qc1rI0Gkdg6ura6Z4ah/fY/vPFO78NSn5pdWTpyoDDv9IMAgUkXB1blQiDr9caipEups1LdFgYFKsiwXSwTzS8ujSRxXF1nsYe9gqd2ITBVrdP3K1kJ73Vo8sto2Okq715FcLYovXduZWzrrld7u9AGp3qgPe2PA2fYBIHsGASQyRAKS57kQaa01adSktXXOWWdJkQCbQGtSpFQQx0QIwmlReO8DPd01M8oIMAESEpcjxJC1wYAirQzMGt0cO0JSWk2/G4BACL0jEaXQaFAKSGkMlACiQgUC7EuDRgAIyYtjFMceRAqbKQ2oFE/zYQiADAyoHAB6McTTTYmytch5UEiOyGhnCxbWRguImrKV6bWCCLM8LzcYQh2CB0QVKKMDw94X3hqtC1eI96UwygxKGUD05cw478u0s1aKERBQESEJGioQGUARaq0RhLQiYzQqdt5oDeU/BWLvfSmAIrIIIDnvBZzz3jk//eIUBiTvnPcOiYw2CGKQEMhPL7RgvWdh77zzTAoVEQAXPvciRoeKlACr6Xc2MRAzM7KwZ2ZSptyrIEWGAsteRDyXl2ApaYtB5bwjUuWwPAFmZsQyuCdeWBERIBJqo0lAhHPrgEUrQiLvvGPvha33pHSkFQEoIuchzawXRkSjA/bOsSMRo8kWThBBmBQREQAlhTWBQUIH4pxXSKRQAYCw9blzhdLIAIV1wiLCpDBQ09Gwh+x3OoNTR+59/TuOnuo+dv7i5eevdPb24I133rVycuW5jac3DkZbmcPISZo15uaOnDqSX8eTJ1d+/z//yX1vXNahAoCrV3qnF6t3zC+t7OM/efCbji2v04n64PK1g82Xb65ljXB87NRiY7mQKDXNtF4bvOHBha/9pXenwB52BnDewECBMPQqEP/1bz0fwhJAUq0EhXVBSFGjgt6+/HTy337P3nbulmf+7fPd4VNDCnqHsZook42XF1pveuCW+77qKwCqp89U3vi2k1s7w839rcwXVy+9fNPZc3fffaY/6itoPfy3/2vjwnNLP/Ij0Sj+B9/2f//2b/3r7/3x7/+1X/ntjqX/+ccfDqgFxbXO+OCf/D8//T0/9QvLr3tzNF/91z/5PabmFHM+dKql2otzsCJesQx8lkuzVYmawSRJQEtubaUS5lleb1aEnUJQSqEoUpwmuR10K3b4jV+l73qz/4rvaKQjLkbXLR6G7aMjiKImJ5t73O3Vz8WeDiRN2BxFXQEAC9hcb6+0rvcG40kyzDC5/4E7etvykY9++OELl7Sq1+q1H/yNH/qff/N3v/JLP/PpD/1VvsdeBr/0Lz5w6eDpCuGRxupCnK2fPvkf/+gvf6zZuPj83s/+83/7lQ/efvbcnb/4T/7Nxz/zN2fffPI933E0M/ncieazn3nis3/y51TljZe9NkXhRo1aJRvnuj3/xq/5gde//is//zf/Zf/wytJSrdeVSZJtS88ryiaZViaMdJY6ZmCMvVbNaiuZ5B/9hH35pQ//q5/9hZXTbWO7Al2Ra7bo6AgdmbC1wrwzGfZNq+XhTAHzDIwwBrAMnsi4Ig7QJ6MD6WUh6nQwri1WZULkkOopqOTt3zG3tGr/069tXriGRCGAKI9728P/8oHfqxqaExCjIWcfYGb97uGkHavRxaTTzydGx3Esalpl6lA1G612pMgNF9tRZEARtSA8sXpOCEysgePMR6Nu9pb73z7oj9723q8bWb64eXDLvUfDuh8U7trFjz/whmPHbz61cblz9PRSP1/+xCc+eurMwlq1sj9OO4fd9eWaNnpnlC6fOHvHXINHB8dWW6de2f7dj7wQGmMUthaiOIieO5R94JVBWq9Kux46mwcUKQEvareTh9Xm7kA/8tnxk1uppRZAZkL0TN6j817EB0aRCa1jgcDyMMSN9opCHFAygtyOhi7PBvfe07r+UP7CU09+7TveSHF12MvDuEK5VKrRwsLcvk+vP3c5z/WxE2cqlWU3ySaFhiQ7v3WtYov7T60XosbJJVSwMlf11raqNWo0Ka4dXWxvvHph17mXwC8fPTm/tKrzzBZsKnowLrr9Tj6WxObNWmvhyFx32Nvb2e8kw9G40FRtNxeUCiaTVNd8r3cwHEy29g7uPLFy+wOnru7sHHqbdCfviptzNQ8+3bu6kQSjnUvXG2+6w6zddOyO28/de+/w8uMi9JFPffgHf+h7r77aJQlsPtjdHFHhg5X63l6hxzaIPYCqVsLUTyg260dOHmxvKFSjbhZEtbe/47uGve1L51+6svOx07eeW11bPXf23Mc3tieZXl29adTjfJJv7Vw7cXwuCE2RFHEY1hv1q5vXjq0eNbqFPAgD8LYYDcZJtz847M4fad3z5ls7W91nnn5VAj0aL6a5VcqwsA4jEwZJr7v56sbzz75w8903LRxZvv7SVq/XbS+vZD1HaLY3O/NrbcbYOR5m+/e8+a5Jf3T54jYDhbFbaTb6hzu9A4Cwcv7yfii+uz/pknSza6vH15sUHlupb+0c1OfmxFRfur6/3o4aAV25dsViEodR6uTytgdG79LROC8/BVU3PD6nX/cGnD9eiDFpobRopZVzeZHBZAQUNhw6bcAWTEoLkGcRL8BsSAsqAFFG06wb1zpPpJins5UU0nT0z3RdC8yMCkJNioSRnPXlghxZkBkAFAAQiUwjP9NRSkRAgILeOZiuUwUEvWeajb0WECRwjqdLKhIS9oWzqkjziXcSGHVxa7sax85JnltllGdQ1w61Qm+5vCMMwpCEEZiU9gBxhJGBowuNdiMCYmNkZbHGzo0nmXdcqQD6tKJiDTlYB6gDHSgFIXrgJIprNinYeq2o2mwUuUUAUqSVVorEQbUxF9dqhSaDFNfaiUtzBAusazUnVKiRHWbeee8FyInHYpJbJEFwbmxUpLX23mV5wYjsHUJCYL1NwyBgNLn3hNozgfg4wNxSoMCyRHEEDKRDQF0PKpbZGWT0jBSBDcAT2tAEESsvgKTYuzzzFAReXCronBjGOKhkBR4MUxEAoyOlE+uU1ta5UTISkFGSukKccGYdEnKRKxEA6OfWMhdIFEaTcTYqfKS4YLY+U9qEpLVSVYWBd+l4rKuB5SJADkNIkywfT/pFv9KIw1oINLGSmGocxdCswmIc5mM/GRc7hyMygRdhB9WKyp01GpQ48ROwgSjVajWW5uaGvQFnBeB084xQAUhZMDPlGGWyi6b7hbME2Yw83BgURjhdoQvMVvI4oxpTiWPqG5WLf5zOdH9NRppWE4EwYNkhAFhWGpchIkQQYQI1E2RopraUDtF0ONt07AnOfg2Rp3CkPCYqSYFMbRycmSpTu2ha1cNQbk7PTCGaOkzT7pspmpEbT2z2b4By8hi+FoSDcm0ChDCdwlZGwwClBMcIVFqHpSFTvqyOEYimBz3ViwTLKotZyQ8Q4pRyTMHalGGVjw8CyFCKNYAowFw+KCLwDJ3h9PJSwreZPSVMZd80vFYBPjWBbug4U10KpjJRWab9mmAEJSic8qTy2cnsf8LUAQMQpGnaDKcmW3mFhFJGYpn5VF/mCuHMXpq+yaYwbnoOhMu0pAjLdIAflyzwtfeaFwFEUagE1A3aOHPIpqjIOpt0O+yj1uIaWAkwrtZj5fZ39jYkWFlaPTZuLmZj2+ulcazRp4H2uWzVWzXShfFqYX6+czi5srGxtNI2lbnl5UZggslgf/9wS1k/3p3E9bXls/MVkd4guenWxbpZHHYOd9Kxzajfc5bqtaVjYX44xGy705ugq2eTQXez3grTTE4ePfORT3/oyMlVjKtRsHTlYreKcVRtRyQbVy/VF5YDXcvHedrp1nXVY54n2bi4ViTpJDP7xejs688Nrhw6l7CbjJKRUrh/jeutVubGpPK8Pwi081l29ORqU1H/cDtRwzgIkn5fh1Vs+rAaIepKGMFkpJT31hajSYpF9dhiUDVa6aw3sp4BpBYbB5h7WwV+0+3H9w+Hk4kwo0IgZu+9TfNxZzjOJpyL0uzJuSzTOhAPIAUX4r1F4rGvJkyLrdqYKgeFu3557F8unnr62tU90WCRPZISQlu4cvSduNR7B8TjwoorQCTPIclZ6/DaYTZ4YksBWpZBkTUqVA1UiEGgogRtgZCOsy8+O9rdGPybn7h3PaaGGXAw+ep3N3UU/tkXNpcXF24+Fgvr+flWrUqjzO1cuR4HSkWMmGlTjYKKDut5RoV1GbtA6U53Uqs1LTZrR9YuXDnoWK/hxNrcajEYFOIxbCpyQWTWVo/VF48dOb6YTTpbGztpwfHR5f7BUDXD4XCY51aLN4DD3qS90F5ZrGbjcSNuTiaToQeLepzZ3f7g5PqKL0y10tzdvx7WiqWFSiUIxkVeiNx8893Doe0eDg/2dlsBthca1UqiGJZbS1Dg0I9OHVm/tnERPAwHSVydoiLvvQ4MM4iTEvOGJijHF3jnrXeAwuxZWJMxxkRhBAImCBjRO8+eUZQSH2gdBbGIF0EWQkJS2tpCAIwJRDyRUkoJe5DSLRUiEhQiEiiHhRECBMYoQ6TIeQk0kVZlFZpzXiMiKRQhAUZgEVIKvGcvnrxzzjtXXrVJlVBCIZHzjoBKmCIgzjokxSIEunBMyIiiiARBxCvSirTzDhnKkukiL6znKDAeLRKEYaiIGJhIFEvZB0RKeZ5epxQq572IzQvvRQCFiAJSzqMgA/vS6AkUkjbCPjBaKUPKoCYCogAVofeSW2dBnPeAII61Cpid0YoFsyKfRu/AB8popWHaR0eklFFGxCGCZy8syhgWsd5656cqvAMPXim0znkW8GgRtFJG6WmMDhGQShNIkSnroAHAyxQrembH4p0HYe+diFdEPG3/AxYuL/XO56AIAANtCEkTESqlEJi9h1AbkZLfMaAQCJKSaQ0dG62td7ktypCxIoWAVJaUE4pAGGtrrffWWXGKTQkqPYhlFiFDhWODICieLQMKcJK6onAgYIwCBBZxdrqHdvz08qXdzae/+NfXLo7e9Y6j18dbvQOv6nDq3jsr83e6E+de2Xhib/fSsTOV/s7+eCTPX37lrfd81bvf8M33PfD23/zNXxgVSRyqxXZDEv/eB96f7D9dm+jP//EngxG3FOSjK1/z3W9snWinysYnw73u4TDI4+NHdRQlcMGDAMQKUIMV6DEUBxeCF1+ISYNHmaTWOwRyNhm36vHJsyceferw45/ZObYSWB+pRpgND9Zr/v/59fe96Xu+5vDlxx//4I8899i2t3Ptu+773JN797/t/sXFxsc/9YVPfm7xoNsLKUNWZ+ejt91+7Ku/8etX6s1GBd75E9++UTm6+rXvb2jZFw4UvvGBd//F7/9V3tn9jV/8v6Ngfr9/EDSgcBYAWsuxCcPDw6EJAECyrIhMgECjSRZFkQnROY/sowA1WFukzmNkKLUeiNvzoSqK5QW47e7wgbdXxjsXVWXBj02RUhAo0ha5Z/p+82WO2iCR03qgmjWEpgct0KDKwvJS7YntLeszdMFnP//8u95x+yc//NBO9sTSWvP5g+EP/9JbuPiQ2ul+19u+7dxN5/70Lx4ec/QNX/+O+dVzK+2l//k/fudKN/nW7/6B2+590yc/+udv+4FvSjrj+77ljceH7rA4f+T1d63edMtf/9of/9bP/lpFyZlzdzaOzb/87JfCeIJ5jg61D0+cfOO3fv+PX7vwyiAddjvDeruZFBQqSbsdFq7UKlrrIi/COI6icP+gHwJm/ZErOLfwXDd9/7f90FJE3/DO29799e9vzLWqtWVcUirWCFZhDpU6QIukooox6Bi4CW7XuA2TpEnHKUQdqswVQb0FVGFOuKhg85glxTy0kJx7c+sf/0TwgQ9c2rjuLNSROVBmlPsz682TJ5e2dnrHbzt+KQvvvPWOVx/9zOHObpUUiy4Ed7tZbqdh5PHEgoHh/iAAaFWCdJxUqtHKXJ296jPdcfeDjebxYVapt8Oztxz3lNtK/fzTTzYWaoO0/8iHPnRt83JDyej64DA/f+XJl92VqxjG4eHovne+DY+O//4zD492O+BstV7dPX/tr1/dfdMbbikcX989AInnGmaYuiSRMFAJ0wubcmGbTs21g4DmIi+q6pzoOM7G+WRi0kI6A7s/Umx1qH2RZCxeaSoHnZhAsWCW2cRnH394/BXvW9O9jeEXt4tO2Otk49SFCwsWeGcft68mMuk50aeOr1dr8WinL728EphknHR29tl56xWmdvdgw4TxXEMfX1cfebTTGdvFvT47tjq0HroZeWaMXVCJc8Y8yWpRxYTVJ565Mr+b1Kq15Vp1+cRqUqTgTYCKldeGatWWEtjtHFgg3ZxvH1HNIG7XA6X06VPHL2xs1esNnIyaubu6cfjc5Q3v8yL0t915WtykO85tUTz/4vU33nfsx77/u7c3v5R0bbF58OjHP3eirQ+7k9Xa2ace3fVF1rm+u3J0aX5u8dKr1z734gt//+kXf/F7vjtEGXVTQ4F1A+f8Vbbsdc0EWZJl6TCp2LjWvuetX7++d+cLTz+2eiS6en3/3D33LC8vQljdePHC2trctc1nydc7u73F+bsnaTES0qqZjYrcJEJWUCkEgOTorcuPPvzi0pGWyXudK9e39vff/e3fev7ljcbSuvI23x9n4/7GCxfT4XDY2fPWvvzqpStXtxZMUKvNLa4dH+/u5Wl64vS6d0UtUEsL1VZVNyMKII/iRkG1g+1uVKv2uyOljPI8F1QuPb/74Fu+5q/+7pM26B92+6bwDzxwZmnlzPaBi9tLalJgKM565VR9ab3RMJev75owtnk26vfDYHpH9I57187eHCyeshKkIk2FMaoaeya0Ary9Pbi8kRqpITkh8prZgSHx7JU2BMjCDAyoEcV7RgStlfeMikRQmPlGtweVKZNyeTMdCSTAoKaBCiSZzZQqby8Uvra2kmmggxkJGIkFvEyX8F6YGVhAE2E5cELAew8CznulkLlcNGEBEoQ6IynQQYiOnRfRhA5IFBMwIhWuKLf52XtCGgvJhHd6eyh7yByZIFBkNFSrUbMWAWZnT6ysLi2awIi3JtDsvBdUCo0W5HHFYAGOUBMV2nj2CIzpOFUoUVzRbNG6ALQvPQQyIhAbrU2gFCZ5kGUmsyTirZ94YLZWWJOqEJCVzBMhALJVJOhSWwwLnwdakCNEZYIYTI0pMNWFSl0GfRe4lLRD7+yEXZEikZDRIDqKUragTagib63WEofkRQl7AbZ5bkg0hgA6MGHfJWnB3cz5wittCDnPMq88KGUIc8tsxRjKQGXeK2IfEAsbRQQqtz73hYrMyPksd8gcKxXqsMiZvSXRHj2IhEElQLCTPFYyGfcwosSmiQhqsdaN+6mfsHhrqmGuR4W3FhyQVVUw2sYBpnnOOThLLjEKtfPpiItWrSlxBb1o5nzcC8hV5qvOzlCR0iyeUAA0yLTRBksPRpRMx9HjDMKgCFLpowjyLHtEOI1cycxZASgnXqHgjfe20DQFVJrwgDLrVYapdSMAUsroMM2FTUEK8zTH9FoKCWdz66ddmdOYHEwBq5SxL+Eb0g9hWVREM2HoBq2aztvCWVxqauaUH94ymFYemswgSilUTZN4BNPe5LIqSAAYqDSnbiAdmUKQ12AVzH4KUupe5U56iZBmRwKzDtByag3gtD2o5CY47ceeHupsYFyZp5gZOmWOiwgAhIVu2Egyxc44VZUQSJAREHmWfuDXwl80BUglFoNSmJxWBU153yyZxrNzV/5/eXbGVYmUpg1B0z8iN3Jns2ekkKavLkv5bijfZXij1Gk6PW3WI3VDWCoBmYAweJmCKZoaRdPQo2dBmMYBhcpg2pehoqhSQcfAFnmSpIUYWw0Kb4q5xfkwXHQYqUAo6FVrab0e9A96W3vnPUx0eMR6FA+D/gEzz9Vjtm6QpJzni40mT6ztjhvzLbNICyfrb737tkf/4rH5aqWaJdc2+s2aufnsOR1GuZ6kwOfOnv3kB5/aP+jrcHB2/WjLNBtBXBQTyEfXN15Zb62dXTsh7cXx4XC+Oh8D1peCNOusn207xnOV9YPD0X4HgHhtaXnzwlU7HKtYbe0MEouh+FpVbr91td4Obr/31Pgg2+n3jp45qjOc7F9XRKiClbWjnf44GQ6jWhxEFTAQNhoIutMbxZVqnhWHnX6e5uh8sxWtVOLrW4PzW91qg9pG+dyKwlZrwWZp4jKluVqDW1u1nc7q331+AyhUmhhZa62MS9JBENe9Im3iYpIiIjvrrTdKvEtZgFE9ve2HD3XXVyT52DbkOpBwkBZX992EQdCSnoJQUCjCRFwuHb0gMhujM+tLmzWoVkJTsSmToYggXG4ZhKQ3yceZZwCw1ZCqlWhnd7zdj/7BLz3273/2q95zdxPlelYklVD2dtPnLm596aUIhO8+Ozk37yo1ddt9tz7z7MuHh6PuMJ1fPX15Z2iCBls/6g9ZY7Nidjv+pQvntdLiL/ksF4X201fFJQSemXSoUDiqaZQrYWiW51tnTh6fO3LHHffcRfMoQWF9xyFHMTXrTZfZIXB/b7i6WosaujfuhFVxh8NsXEGhxaVF02hxHHtrPapKvWXHw2Fnkluv8olNYZTySquqfIHWRJXocL/barckQM6Kqq5mrijED0c2x+DI2vIUFbFHUQKotLLWlbVfJbR1zjH7IDACEpAiQBCviDTqSMcefVFe0NAIUGB0YDR78V6EZ5U4ImUISkAJs6DDMhBFSpEpP85EEYhlcaiUxtK4RhSJTKCICNEDePYg4IW98wCotEaAMIy8MIpYKLRS5ZVHa8XTUm0FpbZDmgU8y3QwJyGR0oYK8QrJlVlklsI6HSggdN4SEily4gHK2jvFgs5DoBQhluwZUIk4BjQ6LDceCu9CDMoOuJLaI6JSyrMHBK3IsYvCaOp9ingBY0xgQqPD6YQEhPLazyhIaJ0rk1fTK5ewtZl3zrMDEEIFJZtCBSAIZfk1Ou+AnfW2rAP0hQUk5y0IKiLxvjy7flr05nP2gEwYWSm7Nnm2OaO0MoJivSu/SLyzFth5R0oJgmfnbOFLmChslEYURZq5VDlRKw0AiMqAQgCDyguLJ2axzgMCkfLOK4VESsqYHoAXHmc5SQ6z+Qm6tJPYlRIskS7veoWZmfPCoiJnmIBEvCajjfEg7AtmdChMkGSZeJ+muTBrrdgREiqlb3yfaKPufO89f/Ynn24erzzbPdxJrQttX4cffuyx7/+/3nH48ufqRe+OtZaKCzUf7Oxcv/PcyQubn4WgWGwcWzhSaWdUpNm4n0NE/+H3fnOeOc7cKhbHFd19x4noyGnB6+O+Nc1gcOl88+yR8NSc1WMA8JAoKAAsg1bgUhg0YPX3fuXZPJ8PaqkT1ISNVjQaphQE48Jd2NwmVlELRzYxXvHO6NsfbP70H/3YpOhMiseqyxfuqVwL9y9/8gn38F+8MICVrStXoCgWlxd0NDjerG7tHi6trG8e7l09HL/5dW/e29sDpf/odz/M48nRszcFC3WXuisvv3p4OT55bnlv+9rg2t5+9gqgqjQCZCPOW0GXe9FKyFprVYDAkmUFgPT6/ThSIB6FA1VI7gMAIB3pKDQ0meSQEDifB9FTTwzXT1aPvWHFV1fjapRe79k0ry7UbcdJoknC4Xa2dAdkFlAbgYYF5cFPrj7f2dtcW250x2qSZdzrja88881fc8sXLiRPvnDp7NrS7/7Lf1qXpNKunDtxJPeT4697/Vd9xw/9wDd87U/+2Jnx/mZvczhnzt1WX/npb/nO6Jb1H/rhf7xeW+Z88vm/+OBP/uA3f/4Lz//MN3z7+9/zlh/+qe/9b3/w8V/55f/wyotb3/Gnn7j73sCJuv3usz/43d+48Vz+qT/49U9+5qP3veH0/q4CwTgM89x67+cWGy734rhwjCImDGq1CJmd9zrSrJC5RhLuS/67H3v+dz7ynLX5g1+FH/jVb8G4C7DjcZtxQGAID52fMKxCcFZjrvIXxzu2VsNixKoFlSNHcr9qgoYtXgYIGRt5n+KoyvbAGnXuqxa+s3PwX/9zf3sYxHG1EkXeue5gfHW/c/bE2kotftNXvu3c2ulf+csPpZ1hc6VSjbHwOBpz19ryU9CMK+35pc39V40JPPu4VeVM3Ngvn7zlthP3zLePIgXh/HzcNs9euNZu+ZWFyd61zXBt7snP/93uxQvJoHe43WlVw3GEzcXa9t71hfXl17/1nlE+7O1uv+Vt77j79fYN77zjiY/+beegt9W3r17dslpVx6J5csfaXLWudWzS3sQrKtD0+5N0gr0BjyNUgWbB0fY4jGicuO4wbbfqxjoGTjOvEb0JrfXsvAmDorBKK6VY6eCZnvutD8JqHA73uddxFzaLnuXWku1298Y9Yay8tN37J//uj08dW/3mt71xvmBlXGttqXX0SFypL60vjjcmJgyy3S3nfM3g5UvXO4OcnXSGbnlx/vLWxYW5anu5miUMtehwknlb7O3tG63Pnji+3q6fPnly4+KVly5f6OT9Zr1dN+3tvWvNegDOZyKt+fCmM+0Tc61XDg5PHZlba83BuBCUTqfHWTa/trK0OP/gg+/8rd/585WbVuum0miZRZKt5y7PtcK4WTt384nD3sazTx6evWn12Prct7/vXaeOHensXudocOuJpU4/MUGtubC2f3W/0oOrVy8OJ+PO9d7WYXL2nvnuYLfIEvYSavL5pF47bZOu1j6MdZqlzrOtYqVZf8Pbv5KRbZZbknQ8dsPxeDRYmp87cfxEf9itNuq1xbpMrHVuqbKYjsfsi8Kxs4XNs3yS7F33H/37xxU3DzcPc+sRzUOf/HwjrKUTVpPUugKWwsH+Fc/QXms89fxLLbPQ7Y3iZs0FAWVDlw0WFuc7gw764PCw8Ma4kB9+5GmdiSyuahWeOX2LTTtF0ms0KxiY9ZNnj7eOvXTx5VMn10Y+9bnqTUYvvXxJA544e8s46fpxhmG90qxUq9Wd8ahSb509ea6zv1OtxdVatH1tt/wUnD5VO3pWgZBkhpQOCUSjzcasbZbZ7qEnbKMoD5kXIdIMXokAUxQZJWC9VYEREvCOA/bWsjCBB1A8W1Z5Py1zxdeqRcB7z1iaEAzTjl/QGksxWwQUlEKBlKkbBESFaELvfZLlSIoAtEIuR8uIKIWEJMCklCsclhMqyo1zYUUIgMJiSypBJOzFl4tGEWYCUIiKyHmPAlQGNtjnqUVSVsB5IFADx+ytNgSDgqAfa7y412tXN4NA1yq63apFRoWa241KrWp8kQhzvV4J4wgoSPOCIBIVCqAHck6lw5FRYYg0HPWyItcg1WokYn06VlpBkceVitaQTBKB3LsCmb1NXeGtFKiUOFCiAw2p69s8t95aXwQBht6Z0Gj0SqBSX8hNKM7U7NDlDpQnj85Tjp4BvYB4r4QCAGQfGR3NteJWpRqhsB4Ox+PRkLPECyc2CU3MIkRxFGtSOJEkFyfOk8IwiBKRKIzDWIm1RZEbDKwWrZRCLookc04DiGdSkiQFEmphBPbsAX1FG/FemJ3zBeO+9QrBaKoOaX6xllg/sZADJkkuyCZGZ31kUJHJPfkgmHBfEQWaK80AE+87Ex0YwxCyVhBOirxai5FBvLd5kaGLqSICeZrzdO+sXEcTeF9uQQL7qQ5TWj03cmDTBoEpvyyxBZU2UTll7EZKCqfv2hlhgelSf9brM5WMyqlmMygzTY3JVL8pj2wa2MIv4wEw9U1EZpU1s2BXGUSaaiYgX3YM0wxcSU9mI7ZQEGakYwqNmHnmucgM835Zoc0U0vCXly6DILOUDe5lCA9ei1shAwJKCXOnCsw0ogWzcWblkyeRKfaSadE0lo07Mq2JApmSn/JMKJiZUSXAmp0eYAEWgVlGbcaqb6CvGczjsu2aSlELgVg8CCGWXUMlb5kJYThjea89ORThaRzuRnNR+Ujl2ePZgb0WKZueeyzFsRtHX1ZIEcwKnMqkI8n09E+tNQSS6XktL5koPJ1XhwJYvpnYIxAKMnNZ1D3znACEiRAZpgVaMwFsVsE+Q0UeqLG42Nvv8eE1kmJ95ebrF65EDdRYN8I2U0bV2ivLnZ3L6XAPPNxyy/0HA+sGXZBsbCd1orlG/bA/MVqPJxPMxiKFTdKVpZXUpbfffeTapSsHX5qs86SXHYSm3fU6lPqdd97/Nx/7q0pcOz0Xf+lj/7tIx6Ni9ODb7tx8brfVMvlwsN/vrp9aYxVKtGITydzY5dmJ5eO7+9cH/b5TLmq1u9f785VKoCPHvL3bSwsJY1pcbkuoqFnr7gxeffxLJ04s9wdqfwCNZmhCffG5p95452q7ZShHwHr/oFNr1pSuKJsHUVyt14oiLTwQ4eL8MluXuHHiMgSMw2CUuNzmYRwd9FyRu/ocjkapEAAcIkKWFePOMKhGAv72k/XxYP7xl/uFJTQmS23VsVEqGQyiOCyKHCUV7wFRm5KuYqiNFRzb7Pjp+q0L/dfdunh0sfLEk5e38/pjz7nPvZoFVpVDoZwtgkCzMAikmSNNGrQXIdSmEozHKVjLpOq1SljRSvj67mF76Ujohj4fMfiwVplrNff3Dw+Hqa5GzjlxzR//hc/+9Nef+akfvy/b2VprecV7y3Ot/e5olPjufv/zk0m9Vs3+bGN/6G0GAtrB8+WbR4EBiDxAu3Uyc5tFwZoyhR5JFGoFRGGNFIBXJoxsXiTjgkB3u8X2td2nnr3O7AH+LIygNde45fYTx9cqS6vL7er8c8/udruD20+13vjGO1z6kr7SW1xrN1X104/u7e/33v7GtZAyZlPYojtM5pZvy4EPD3ZrCwvHjq1ceeGqNlGoJgB4sD+uL7SjqDrq57m3STaeXz7e6XXTzLHzKnfXr26XTyQvXGa9VhQEhlAE0Vn27EkRIBKRUjoIpmJfoEJjyh4lJ+JDozE0eZZprYzR7NmYAJV3BXjr2TMSCnoBKKfak1IKZwyYlEIU8QwOgZFMoAJBDyQyJdSlpAgkgKStK7yA9yDCCgVRKULhsgtZCbMHVKW5hKQIDJH3Ul6NgVkRKaVZPIJxzgu4OAyJtCFT2JyFRanMgRcPAoEhEGc9syAp5URUGCAqrZT3AiKOXW49lD0AMg1NawicB+ccCytSQGwUadIsqiTWkdZqCrCUcxYRAq2U0lBmbJE8M4v3jI7LUDcxAImgUoCCyJ7FiyVUnh2ClJMdrC1oqmACotgiB+bc5tpoIvIsWqmyANuyU0RaaS73Mow2BnhWQTfbhxBU03yooLAwo3jv2bNzDsQjErF37K1j5yx7b50nRQWJ0aTRK0WKFCIpDASFUJi9iGP2COg8O5YbOT+tUZgFMMuLUlYurCu/lAlRWJRWWhsEZHbOexR24oHL+2HNnJd3GtZ5rdGUOr9DA7oeaHG2yLPcFeM8Z8/sfGCULvdmsQzuTW81DnoHdv9g7lR0IIPey2llae342Xp1C3YuHTzx6H9vRKlZgN1ef3Q4nq9Vbz85X4ehqVUOtr908aXHqtWCwfWSzBeT6sqyPWSXQizFwtz4q77lbY1qGp2ZK7rb/b1BUOXm/SuTSt9o5SAlmJSarYIUAS3s5L472p777OdAB+Eo6zsQAtUbTIAh1DSZ2Fo7cpK3WtU3vO7mJz773Pu/4eiP/so357VBMexEBk1L63p897edixa39z982N1Ma/Vjpr40Tu3+xmZcNY5UeuVqrzNJEvvypa1sYkmsyIRsdnD+FQBG8qTjZw/y46fXimKcFVltvgbCk1HmBAzFkgWW7bGVc9f2n0aUIDDskYDyZGwAjQlDnU16+2tLdm0NIoIr+3BlGwxUiQwEQeK8GuPzz/Idt41ueWcth0NQhnDYu1qs1sZJd9Lvwc5Qpdt+/jQUY6i0BwrqHoyCyYufeP78BrvVxl7CNRjfdbr4R99536//6uNXu7rXGQfMr27uqjkd1OjZS9f+f9/2rV986frv/+4f3nPr2T/4P3/+Xd/y3guT5MFTqz/wM9/XVTt///LVeOX2yeYL//S7f0yCucC7g43JD/7EDz/y0otX+jVeOPm2r3n7z/3Iz73zK88srqo/+JOHl2+9+yqYvz/cu+sN93zrcu0Lj30iH2eTcaGJ0ok12kxGuUL+uq97n80bo2Trbz/60UrVTLJCUJqVcDLJWFGWswlCpHkd66gePnUZf/mnP/kv/tXbGTa5YqNqbKEAEFWpAij025Be5a7VCBAIzAMsnc3hmOcGgiMSSC+pSLB+WgMKAwMmyr31m49B0fitPzhIszBNITbBuD90YXTtsEhG+69sfewhtCfXKxkFh90ROQWiYh3WG3WADgBEmjpbO4EKMfNZb7i4cnJhZT134Yk3fGNE6tLTn7l2/cr7vu+fD3k031Lp9uGVq5dMNvzihx7dvvpiqxkvVE28GnWuX84Oo9VTywtLyze//oFqM3r2qecq1bkj9z6QO944mNz5hjfPN9JXX7r2yl62dPqk370+Gg5NxfT6g8CoYiGP69XhcOw8R5Vqb5A7duNhj4DW5yq73UOHbq0RjtkWrrBKTRiArQJVCUNdw8I6rUW8iLPa6FGH/uLPD2IRHbTYWecDDipDnNvbG0qeZtKPdBrW4+c3rqgs++H3vVMpFc/PZZNuklnvOLXFOJ3EgXIEgaJsNEiSnEkPtfm2b3z38KPqmRdeeLaX3nXzaRU3DzcPqyqoVasqkF7aC8gcHh4urcyrGBYW6xVQjUr1yl7eXlyPtD/sDTv9USWu7HX75y9f+6q125qNeGQ5MrB/cNhqB0++8oIOVs+/8MQ733bH3a8/M9jrr68v7m3tc2KF4Mr+0NPh8qmjOEiiuF2vVxdqpjFXOdwSijjRxXMvv1Krhgv1ynCYuqsdUuS5qBkf13xkIBuNwQQqMGsrra2dQ1GdENPxoL9UXULviyJPRuM8t7VmPQpDcQWCqy/E1zb3T51ZPNzbOugcrB9frhrD2V4ctSADgFDEBmEUhQHnPXZurkIPPfbcA7fcNz8/l+W7laXacnVhYyeP2urksRO96xvPP/6Mf3V0x/13PvnI0+lEL7frJqo0llsNUoDFsDcIAn1l80qeDc+duH3r8uXzV594x7vvGbODDF760vmdawdnj1TuuuXs6sqaIsk9bl27Ir1+b2dr7sj8A3eefO6JTTtiO0o7k1xVttrzrcW5OM3zJFBZYZdXV7UUk9FQKy3OI0igpuuCsFVLs35cizCsoyZ0iYVM1YAt9Xvu8lVLQVWFoBwGDti7Sqg4d1GkrXeGWYGALUQhoS4YnJQAiIWZPRBNizNKRaBcMlsRjQgKvS83QRWD9wxKkWcmIG1Uub/EnomImTWhVmi910qMAUKTFt4xe56KR4gg7B070lo8G00A4JkJQZHywrMV+nQlXS7plUIHwAzCYszUlS7XqB5IK+W9K+0OLrMvirzzpNCzeM9E4Jz0NgcAI+981SgFXmuMIh1rpRTUY5xrVE4fmW+26vWmajXiCJEC6vdHBUhcaStVZEk6Opiw89qELOBzR0RaxSQYhsbZTAc6zLW1OQp60CKZAJNYVdItEOeFvWfHIAo5Eq8Lp4J6HTUGJmQSMgGGjRCwmKTppEDWgt4YMTr03jM7S1QlQ0BxNdb1qmq1oBKLk0BXapWqrkTZZJCP80Q0ZxyFUb1Zo6BRSfMsm0yGA+CMQqrqOI6qNR1IOkkYBtYTsPWsCJ1n58hoFZpQEEHRyOZRYBTwxNrc5p50oBV6zcBpVqTee89k9NCqTHsVkZBGReBk6PKGiTUIudTb1AtJoOYrjVC5nFOfq0BHiwvGJhPFbjDsE7Rr80u60mCfZT5rtCKbukmWNWpxoE3uptsGOI1vqBt2R1lJUxa2M8JrYgZOb4Fn6+yyjLnsucHZfSXObpRntdYySzTBrJhnygmQBVRJCwBeW9TPskulb4IwHUQ/6zgCmuKLaRBJBDxPw55I0+zarKyZy5SUvEZnBLFMhc2knRl0YfmynufyNZkmt27cNs+A1vSFw7J6DErqggCAinAmWpXUBma1nlOWIzfg2JSulPVP5Ms+qOnfltmks2k51MxYuhHA+rI41hTEQdnhXXY6wZTvlfykLIhmACAqe32mp72UbUCIb2xfswBhGURE8eX9eXmYJcV7rQ66NBhv1BKVr9zMhiqfv9xAhSI3/B2Z8h+k0lG68UJMQ3MzJAUo4Ge6Uvkmmk63YxYsgRZKySnZe0Asy4241Nu8qFIzQxQRdYPKESIB0mtlS/8fVORSoFq9sajsZPdwaz+5UJAP3JiDwFW8azaxO+gmqZAaR5FC1ezlPHSqXW2Ndy4rJYFRSuFCOxKDJorzvDjoHuQ5tJrLw1G2d22/SaEUYafoYaUetxfoYH+U5p/6/JcyqtbnGoLF9qVe2G7NzQf5oJK4UDfDnN3CkcW9cbLUbizPV23ar9RUQrJ7eHFUjJQwo0l2DyOKk9Q5DtKEk8SOk/7SXG25tXr5xSuNWmWp2daruaoaQO1yZ8CRqJMnb7t84dLS0Tmb5GHVxJEeHuzWa/VmzQiCTQZBECbeA1JepEl/lGRJvTV/fOnc5qXngooyqUQExMWx06c2Lp9XVUWsc2e9eO84bteduDxNjZFTK8HhARGGXgXdCSQWqrExEabjIs/HJlTluCQR9NYRK+WF8+Jb39P+3d++Pem9kiUppMN3rgfUrP5guPKff3f/A793pVZtOe8DTUFAufWARoViuahGPo6DycFwcaGyfEze901vePSLB594OImlOkzySqVFNhyPu1x4T4p1q2bO1ZtbVMXN7Y04BAENUPs3H93/X89+4fu+5q7dq1dSGw4mg1Fv5HxgGyZeW3t1Y2Bq9bBWS30aRmGghItURIiMLRbf8s4fufbiH6sMohgldaPxJAiM+NxZqbXqWmvvVUBVHcSQT8KI4tgwYJbllVroLTubT0b+C4+++nlxPmNUhpgX5tz2VfnCY5993U3zDxxdqYbhb//ZxU6mbzuxMNcItzZ36msQRMFyG7t7LwVaA2VZll+6POkP/NycD+txd5QF9RbW5y5cvlQzzUo1pjiYW1waXDhk5xsLQTqA/f5o+inwlhCz3Bc5aa0QyTpHSgug0pqFGYBIIZEmZXRZ3lZuaZFSBAhhGBKB0qSVNloRs5cCQYB94R0hKaUVEgoYUgJeQARVecku+4xFFACyMIAowsKzAIoDR6yAkIhFnGfnrWcmMuXFNdAalQIslDB4BhEiZHYAqgy5ehZSqpwdxk6cd0YbVMyCAalAh6QIGAKtvKB4BmEnAsJipwasY0aFgVJIJCzMJTEBBrbeIiKLWOeQEEgDiKZpbhsJIxMqRPEsZWMRgVJEMruilkwbAEE8e01AUN7MCYBoZRhEMysdeHaAYL1jIhHws1tAhUQgiOKZPZejLokISGubZ15EnDcapxPdlGL2ChARWcq9F9BEmpQX8MIozOw9e0Rk9iDi2Jdt1ZEJPPjcWUXgrDBYB2CdZwAg9B5FkSB6FgNUjv6UciAalrsUwCjOe2+LwIRKkRdG1M45cYhUuhflXhJ6EdTEzNaxRiIiQGKW8vtD2AuKLQpSSptInDNas3jPzOxKqzhnF4RRaIxiYAKx1gJqoz3ZKAzjIADvQ2MYyVpXFNMbo+df2lg50q6tV29/zwMvffyVdHisOn/qSFT4wcOD/k5cx9vuaG18+lK33491VKDx2lcgm+x1OdWO6Vqnd/b4UiF+rzuujdVaq9nIul/xvlvl7ur1XtqaFI3o2NKt90H7ENbna9Dz4ARGFjxAkUNKkAoU4ovQrf/Uj1253l9ePGKSfY+gitwaRazYKB1oqNZqN99/ZHtj55knL331e2/+0X//rsnWC141qCBxwoUtCtRkT7917jfeevp//PoXP/Tp87u8F80tLa2ubGzuZNaQIgiC6lKQZolTOdrcFxMQMSpQmpJJ1mgtvPeN39Tpbly7ciiBGqcpAYzTrFINFxeq/d5QQ7Z9+AIiu9xmjhFJaVAkzqmsW0TVa3/+F+9aPnVoKrnBw0jb3Sf8b39g+PFHoJtXl5YbRxZi3y0uXch6F705Eycjb6UZtm1njzMwSdOu3VfdujLc24LGPABbpJGBaiitK5f7tqInApkf/fw/PPvV33q8XWm9dO0QWkfm56NBki2s1P/v3/zAj//4r/3dh37nd/75z71y7ZVf+MAvP/wxHr185fze5rvfdeelZ//uW776f68tzcPOwbe96+a33nLmD//mD3a2J69eeulTD/311779214XvqEdLvW2Lym3/dv/9SdvO3FqPln/f3/iF9/1XW852OkEMY7Yf/rJ85sXD5yIENi0iGtRNaqlE7dy6nTQfN3JtfWHPvMXHrQXRFSkYJjkURgR0TCdeAfZJI/rEoHqbNi/7813/uWVX/qZ72zULsLuF8nkEhWgeyocuqKIagYqS8Wh84tHDNYndhmoDhw7yEQvqvpAwyHbuMhD4602sbgiZ/2Grzt3vcsf+qtuf6Ln5xpz9bC9UDu6uLx1bbjT6R/kCXAyHhdNo8moAiRsRKmbhg4uXdrnvAjC2KngtnNnX/9V79m41G+efLB28k7Xe2anf4mCYYS74wSTw92XPvMppmFtOV4+oq5usChvQn3nbWcP99Ltzf3K0XvmFo5k3Hrx4SfAD2yaRpjX5+fTpNoI6+ef+ng2SsPC9l55ntMECVAqTR2ygpxl0s+NNoZUFFURq9qo5tnTmxcvVRsKKds7PDgcuPX12nIjTCawvFq0V9W1od7bKeaD2omTyxcubFhGL+CdqDDIXJ4FajIegHCAwDmuwfimaOdbv+fIA19xXPxWa6k17KW//x9eeejJ3ftf925lTm5fuNbpDR24g15y5PhKo+k3Ng+r7YZu1jO7W5urvLDb/YtPPvKN739Pp7MvbIfsF1Ctz7Vt4V537/2PPv9I4CkZJlcv7N1661ocGijcSOzQXT915sRh98BgOLF85MxZn9GV7qs3nVqokO93upUwdtabcM4stP7uw5+/7c7wba8/G0Zzw0EhEO3ujMNKe+iHV3b3/+zvHrr/7pNr586cuu3cg+960+OPP55ncnhl88KFjb29bfekmTt25u3vfvO1Fy/cfPfrPvmRT718cauPnsTtbXd2VipBqyVCzstBryCsTfo7Oabra0fzoojCMAyrXkQCbZOMC2WTDNFWGub8C08urx4JqtXxGEPdHOzu5+mlpRO3TRLUBoJ6LZ1MDBVF2okCUUHy/MWX3vrWbxz55HCvd64WHw77nZ2sUm1NlPrQJz/hPd9527Hd/cFk4obdCRdcP9KIQoUFM8DW9cvzc+16rZLx+LbXven6hz4ecPXC41uPfuHZH/vO91/ffTo+NZ/lyacee/JH//E/gCzvd/PdrWv3nTu2M8zHll99eYutvPXt952/eGm0uf/485eqrcrr77mrcNYNpDvCerMdh+BQqo1Ko17Z3dqu1qYBtOcuJisjc+rIsq4pU0XvhJWSQiZdePbx8fOvcmfEktvQTuq+H5liqR4tt0JDKRqqGRVXgW0xSPnKhh366p5XB4WQCkCVW12MgFoRz9bfhCiABMgAWumy+FahBoWEgECAoJUWZi8eFSlSAKQQECUMDZEEJnAeeDRhQRYmJFJKmJFmY6zE21nsQgBdub9Vzume4iJh76bCBYsiJEXlRptjISz7h5mZRZgFfOGIEAHZWYPTvsTS9yDSEmgyKgKvibyzBQiSGUwKa4EUqN3hs1fHghwbXpqv10JllMzV9fETzVbDE8YgxvmJVlrA5DnmSRGoalyP46imQ5emInkulClTjUIsikwbw94VycS6wuYZokFSznvQGCnNolQQhpUKKQoDFQXag3hbOO75ZEBIUTxv0GSRzbK+dYWwRXSVsBIFkdYRK0YFjGBJAaGp1lQYgPeBEsGkKMBbNJUK6AC0iRpRtV7TLOnI+5yb1XYlqtRUAEg+zyVNrHMWCJAZIKAQrKeAgH3unUbwLEarUKNiXxYpIgb9ybgSa8s4yYs40swyHoyVDReXG4W1AkGVADNEg8boChqxGWhlHBChzXLSbQxioUICn1tP9YrkASKyTUOlGFwxyQG0UsZaZ1Sog2l743T9jiCIzA4RlCr1FuCZDDNd3pdkhGaF1jNhZCr6TEOSUwlkFkXDG5YOTJkJlgkFRIQpkpmBG75xPGVvcklICIVuZI2+7PGkHD8Ppegy1YVYZnS2/AnjrG57indmT/jLKoiEy4G5ALPObJhV5/D0t2AKWqYZtJnSM6WrNO0OAil7pVHKzFrZRzR7QiKvWVc3kIq8dnClX4NIWNYZzTqUyoHFX/5CYdnJQyA3OPS0IUjKIXUlZoNZCK18alw+DZ7VO5VVT9OM2Y2qI0GZvghc/jUp4wcAgPgaxJme4xuu0PT1KJ+iQMmxYdawhAQgpbM0ffo0o0gk5dDm8jenQ9W9zLQtBGHPsxexhInTNiIAdlw+fGlxoSAzlhRRAbL1ZXpRkxKYDsRDRR649DaZ8Magtlmt9VzFFdId8GK7feQm+uKT1+pRY3W1lUzyKB0Ok1FYqSWjjia3dKR+eMj1uJUWo8EgveW2r+xub5tY7e5er1UiMa5aj5ZX5wi1p8AWuN5cOnbTEUiCP/3w5+bqbTsad4f9YpJ1x67QDpXqpb3I5OJFJt5Ldv3i7vLcudjUbWojVeVhf2z7mgaD7u6p5s1BI+ofXFicm8uKrNs5UMgFVC5u7R8/vb60cmLnYNfm7mA/m8/wzAN3pLvpxfOvepS5o/PWgc7zpbmF/YNhfb7V7+wvqzCqNJI0cWyQQtQK2Dfa1UF3JGjbixUTV8ajpIBsf793qr7U7e6LAmOqIYabhzsi6vrWQRBFDEo8aKWLPCtcKqBEjKZgYbHdqq84uzG3ODfsZdu7w6cvduxgOJFATC00UOQ5Gs2OUUAbjaAEKYPiwXc0B/1Nm1mP1TiqkrHDIkXY/kf/YOmeFfM3z8pff/aqSFUHoTGYJ+O7boV/9GO3L1R9NvAGqt4nd73llJbK/adWP//0I6Mim19sOytJPwfHyKBRIOvubTxGvtDVhXtf/77zL/914YuANHh96fLkVz90aVnbXhomw8P3vuV98/Mnfv+DHxinrAyBK7JiWDHa5WNPgMSCXthWoxGnL1+99piOa63YOIWn7jgbVhu1+nw2yorc1yrV7uEhmWB4sK/DKtsxmoIIGYrJ0IqI0WC00qht7qgSBAoWdP7ed9buvnn+uee34vno7194qf/iK/tFdNgNLeQnluTEsVXnbZLko0kemaBboDamSJJenqejInPST20030JbTA5GQbUVmzZ50UbtdQ6y3DGp7ijNxqmJpwOS8ywHAGY2YeCZtdYlRC8dEK00ICCQMYHRxjlLIEojCnvvWViYy5CyEkRSpdIZREaTsHe+AChLhbCcFcuklUICUkSoygFdpGZXFS4vQQrBAwiI9QwEWG4Si2cGAPLsfM7GhAzgmIlUYAIPhSIKjAGWvHAFMwMIgzEQRlgSLkJiBudLzE0gWJZGK9LWZiRE5fxTYVBojPHMTtgE2gSGAcQ6z+AZtFIiEhhjnVdqNviACEQUqfILVGtTtn97dIjkRQBEKUVC0/FtIqTUtKKPVM4enbBnACZU5QSCgAIgyd10dAJNCT15ZCpv74TFlY1CRKQUqdJjLwonnilUSgdGB4Qk7JSiMtLsnUUBIiRAhUCKxHkBdOK9c1PgzszMDGIwYPEiQoTOeWZx3mpjAmOsL51kLPdnFClC1EpppQVFgD2IAFrvC+e894gk3msQ0kpEUGsvYr231gGXhpRz7MtxsEZjYAwCIAN7seAK7xgQERgFvC3EIwOgUHmrSiTeF2g1KesLLxxoLQhkdEWH4JQyEVpvtPGepfwAW2dnXUV33XqrWg02trfPP7vRap5pz30FdlIaZw8++J3ZZPtLTz32wqtbrbUFKiovXDkYppxUjLG2UZGjN9f393jl3Lm1Y83Rw5cpm693Mentf+cPvqlxLJaVW47e2wgnW//hn/3ho8/YSMO5e6O7b15eOFJpLkNQlXgefQi1Jqiqr6uTT3zKf/Cv06W4GO4clkxQozRrUZpaZy0pPOgnu597MYqoHui/f+jFN/+Ju+2tC5pqVK9A0rMFE2mIAlZO6sl3/uJd3/Cjp37xp/7yU68Otnd3WrX5qFnLikIFXoNTJMtHFiAvhn3PAM16azQYVcLYOf3kkx/LiiSqqrl2Lc0s5Jlph9bjfvcAnC3yzEQGAJQhQukPJ2hgrhJWahH63s//0LHb7qUcxhYscJ5xuvb64Ff+x+L3PRL83C9uPXW+4EmjFlT//nGp/+7+9/7GcaDx9rWJs4TK7+5ZY+C2O+v5db33Sj6u5NWtpDbfs7kdXQs+8VBaqPlOf+fDf/R9973RFFe/uHN1/9yZxcc28pyhvrjoI/jZf/Qz3/DO724FS91O6iVwCWxc3FTj0YVHLis/UCqsza1sbh9kndHqUvOpFy/+nz/401P3vPXC7jBRy6+7/7a3VvtPPfzZRz712dWTcfcgf/SR8+a+xje/d2W9HtSw8v6vOHf0zNEvfuJvwCibUBQHqZcio+Zc9ebTjbUTyx/4dz+3fnKObNGcq+TpxAQaCdPcRnHgHSwuLI0nIxuyWHBFoQAGqf3EF/Yf+roPnG3rd66O3/3d7166WcX1C7kZgxYH8Wi8ppe+OvVNZROjFPE+SUeS6yoYsCMcSaXeKNg6mxlHhDoXEd756m+NG+2FX/39bLewi4FKO+OFMEqTwfXrw4SU964R6DAMTh8/8vz+ta3uRPvp8mCpfWzS6dUWl1dvvikqBmlnuLLYaB2tfuqRj992oq5Znn/2yS+sfuQt733fq0+/2htcnWvJ1uXJuJ9iWHTHWWVuuTccM+iTt9/7tm//gc7edTMeLB9rjlR/9Hz3sU8+9M73fVM/Tc3R26O53uWLf0nix2nqkQIVurxvjFHVShAGUT2u1RvpSCxa5T2DdLvDWrNVb8PJaPnJZ3coii2YgyRdWsL/8GtvB9hsLK7/6D/+zPMXJxt7aOpxQ0edw35RyDDNvQeVe0QR5TPBWEmSbT379LdzcDU2Pj1MlU5wRd37704/9cjmX/7tB/cvXz958g6jdDqZzMdGsqI/TsZp0dnY8awjo9L+xLN+9PFnktGoIH11f3DpYPMeimJxw2Hv8ZeerMb11nzzmjv0lSCX2uSg7zKOG7Xd7u7aCs21FyNN/fPXNl+4sNHt333bG9om3bp8Yf3Y0WsXd+6794HPPv+ZzWcufuc3/+znP/XB1psrg9Fwbm0FmnHu3VMvX/r0I19cmlt+0+vesLbWfuWJl86eO/kXH/wzjbXd7lZvd7fp1Zve8cBnHnr1Q3/1qSeef+K+E3d/4eGn9w42l9fn9/c6ieO4UQHDyBgATcZj760b50jGis/beRDHk2FSj+tplpCIDsJsMgjDaDAuPv2px9fXb6vOL6pY3XI3THr7uffFMI/Hw8bcvCuyQX/S7/ekSDUmlgdXd3cffPBdRxcqB6msrqyliSTJkEIKgu4jf/nfJcsODsbbQbCyEC21msSytdO/5eZTgR1feu7VUzfdtNezgY5GaVZrLpzfvlI5euLUQm37/EPf/V3v/NITX/D7iW5EtshWq/H1FzdrtZquVFZWjlEYr5w46X1RZe5d2y6GvNyoLT7QTD77Ur87ePzxLwaAd9xz+8LS0mDQiReqtsgBghFAXKkWxfS74L/8+fMnjq3ccyw+fmJ1ZamiwxBIdQ6zl57uf+Lv+1uTuMgy5ZKjS/Tt7z53803NxblAsIdanDaKlECOEFABebc4//zWxx7pf+4KdlJtHYl4EWDmcqu7XJL48ksNEBCYvZTjJQjVVBgoLQ2+UUVS3k2BolKOKDyjEig34xVo1MDomVmASCMIi0MdgHCgyXsHgGq62mRQSOVyvvQsgFDYletB5lIMwdJYRmDhsotFhJUiwaniVJbeIqFWikWstWFsAvIKeJQULKAYnM2FBRQCovM8yoS9pAYOh2NhBoI4gualzs3H+jevrWjkdrMSKL8wNwcKTWSFsyw9FLBOGTAReR1WnLMWxWsidpqcjTDgrCDDAlLkjtGEYQQsBknHsQkCAq+YlUiojQi6dBhAIQJKhYRRVFnwQQSTnoMxCTqUXLKcLSkdkKq1QqVBczDuJD5NFCKqODJQTBIC0lHMOmQxPssVuFhFGDWUCViH7FwpcEdRWPNgrRNtRo4rJqyLNmDZJlnB5IRJClswKBStyARKk1KpLbQhQyZCrQODSCigyvtJL8I+myRGG/Q5sm9XGt5xXc+hDtgXxEU7Wsu8tgUwF85JYGrVqAlKB8yxiB87N3RCRgIMoyhW6PLitVwVCkCpq3pSwOJA3MzgKMeECeIUJfBUZik5ztTKECxbcsoCrakFUrbG8EwlmXGestJm2tUzTXmVs/5KO+VGUxGUOhPNfJwZ6iwNE5pmmnCGkPysXZmBy81jAPAgIsJf9mgIAsJlr9DMfynFcyhLumSmAM2SWjNoNLWjbpCq6SbwtI273NOeBfWmKwWYlmtL+aBTyiUz8lSKLvSapySzVxdKQkYeb9g+U+4CM8hTZvxk1qA0pUdTgjOFYTfkpbIUelbsM+38Lk8FvdbrVD4ukUzzhDQFhlMuVp4bnpIyfO29UwbvsGRbpeRYNpGWeiTcyM3dAEvT4BhOMRNOJ5dNGRzM2Doz+NeSezI7iFJZKk8bASrx4GdJN++FiIBZBL033otHdAIKsUzkkhYk1MRlsY0X/v+gonQ4qtf1kWNrk84G51yLGh6dhyKu6bRIKYzZuzDQzqWdww6w4UknKCQwtSJN4jAg42vVADhtthYmk2LYhVazGtTql168XvO56wyD6pFiIhPXX2rUxE7iEGuOG+3QMT69e7B639GLh/3kcO/szYtvvPnsY1/aHy9VOYd0r58NR81ImwqdaBzZuXI1CKGiK/NzdRU1dw82G7HKnT13bt47vzK3MBh04ooeJ/LkM6/ceW6td21y/XpiYl+51q/UApNmFy/vP//y1s13n601l1++0qmElaOnboZeB0wWVgLrsl4vyxMIY1Ha2XQyGQ0hUgtH1govRliHdevcZJgcTkYggU6zyKh4vtnpdTwjQYS+oauLUevUtZ2tJ790ZeP8hUmaoxkd0fb+W9WDb6yllv740/0LG0mtqrRRRUHKWLbiGAPQyoTLC+otb7l50nsc6/ELL9rOE51qS9eX3P1vOTYcbt/5Rn7wH976Ly4e+S+/s//HH9v71jcv/eCPvrUWPhfVugGoZy4eBFHz3JuPEewr1mdO3t3fn2RBHMM4m0yAYW4htl6NhhOAwlsLJHmRXNi8HIRN7w/ZFtV6VKF6FPreaIIKCjZve+v3PPL8ZwFAg1IBM7GzVsApDaiVDnVoTD6aKN5/7vn/9jP//pe++NDnxge9nkhv3y3X1keDeH/3OtkiUePU7rVbwbnjGGkz6HEnw34/NSYkCoQLX2TCFrUyKAGy6yU/9xN33vfG/dR17jrbmAwLf3uFIumPq3/818Unnx1d7sbLR2vZYSeVfOdwf2Vt3TkJjRlnRX1lbpROOp2knwV3nFhQNBgcdsKKrlV0OploY8ZJkTiXJDxJbTayN91zBOBxAFBaF9YLEoMQkXfOmEAbLQCEKtQhiwcBTYDgA6VkinPRe/bCQRiKMAIpHYpnx+KFkdBobUGiKPbeswgL6zDQWs+oMLK3Woea1BTJT3vR0Hlx7IAUKgVIlj0yC5AmxQpZoPC5Uqqceg4Es8sROfbklHe+PxxbgDgKEbUwihXP7J1zLlPGiIAxuqwFICLPzOwDFYqwQirndhGWPAuM0Z4FoKQ7XDie7npoo0mTciAOlQIBJ86LlHq2MUYRoQgBahMh5uAci/eey4FoSGhUYNkBg7OOweP0i4qIFBL6ciuQGUG01l4EURGhFyavmAutNAKwdwxla6bXpUGpCIFN6VIHodYBSDlPlzx75oJFvPcIIKAEgD2XZycrCkH03hGRMRpRjDEAoEhRecuoCDyKRqOD8lA1ADArVKSACJmBsIRWJMCI6FkUEbBy7JjBuswiKcQoCkKlEBR4561FED+71FeCsHx/iCpJGBMSIHjvoZzjVuS2yIxWwqLI0PQ5sHdOCPPEGq3iaszsQ1SKlIkj62k6IZOE/fQuxXvxDLaYfiWkh74dmZOuml/mt933pvXlB7d3t/a3DtLNnX6fVhun7rv/lv7k8qf/7qnlWgUFQlIZD1aP1nWITnmsBoO8OH3XsfPPZC9f2VpuaUPLS2sPXO61/u2v/JtHPnepO0QVtFHgC5tSZAdZ7mJD6CFAAsULbW0CHOXD/UGwtlifb/mDLkdRZJ2A99Y6BeALjisagLPMqcAMR1m6kw6SOtlRPumF7bYNc6ooFZl80NesZJLmVvTKxV/5H7d9z+eTX/lXGy9uF2gTUqaG8cFOP6xEBQ5HQxdVyHve6+8ZVGG1pgwo5/KDSToWJXmW+jzN6s0AlNehIVaWJawaQB4NU5tNHnzvVw/6+vorz1kbHVk5/Pr/62QOh1kSoNS0YxJtaULq4Ozbwg9++MjH/svev/v9ztjNoV760OcmD7/9idsXq89tZK019/M//6bDdOOpzw6f+GT29EuFQ33L0eiNb4kfeM/Zpx4d/Lv/fP6J89hcZcjsi594mF+o/85vf+bI6qIZpvNReMjNb/7mr/vTP/rDs8vLk+3PfvJDYrP+XKT+8Hf+wI/Tdky3vuHcYw8/NJ4Uk21st0LGCtUDl8SfefTh27/tPUfUgpmc+p1f/Ymnn3pCKWrUOZ2kd9x+63d97/e98YE7fvlf/PbHPpGef3HjzPGzC0ePvPDos54FQRsdOaNsHrii2dkZjPvDm45Vg8AN8yIZZd57Fh9XokBrmxQMqt4MBn0XGlOrhaNOP6qElXpUNdFwHza6+r9e8f/xiS96Nfjeb6h/z3ecXLutkThjGreLXeREFDDUCsd7Sh1QkBPFyK0X//J6tfrqqW866zDJ82HUXjCkB1e3cJieba7XbNQIalFo+5Pk6Y3dyTBFRUUmk4l3NZ06UHv++Qu7zVq9XY/KT8FN9z24c/mguXri7L2vyw+eOP/Ms2Gl2Tpj3/ved7qd3e2121YPrVo4d3HzQn9w/eZ7Voa9rcnhMKRg/cyR03fcfm1j0+euouKz5+4Zj8SN84NnH3vlhZe+8Z+88+H9ZxiaAelac/7iixuvPv54Ohy3FytHl47sHGaVwFSqqntwcLi9r8IKp9W61wHVUnuAQRQGkPQK9jAYJP3dw7m4Vvjg2qEvrNYTee7R5+Zlv1s9//1vgP+8Yzd7uYr0KM0NEoN4YNIMgsysBCoY5oX8p994VzL8hIlg4toVs8CyV+R9b/ZufwvPrcZ/8qGHBy+PFlbu6Ax6o34WNRMTBV54//phcyV++/037RwOt/f7g97EhHx9d5JOrA7wwtXLp5bnUl8sqmh7t/OlF6+0l6pGhRtb29l+b20+eOc730agNcve9W0Unw6HjDAYDz/3zMNvOHV8/djxucVwZe7UsLe5vAxf963v++Lnnv2xH/+eV579EtWrn3/8iWcuXI1iHVC00ppfqDUrId51z3E/ps998rGF9XqrnlZr+h3f/v5Xnn35n/3y7+/1LWq1Peh99qFXsqyYb1ZOLNQQi3/8Q9944thibotskgVaaaMtsyVs1EJgGvSGYeYE1UGvY0hNxuNmuwlSEPBh99rasfWwskxx0072r776yuveeq57yK++cvD0Jy6dOHNmpdZYWJpTDdrvjBtzlacefTGDxvrS0b3t3VFRRGE8HmYFwCCZHO5e6e4PM6DF5SVbFAuLaztbO/W4fvzY/N5Oj7IBEu3uXacA4jDqdQcnj53VxUCS3ROnl/bOu6e/+LRyNB8ZFajKYvXYTaeWji4mmbe9SZTZhx765Inb7/GVOBmPlparrzzx1PxKs1LU7j5zQoX84qvXrc1l0s2GTmPI9ZgC7QDTQSpQtOZq5afgmRcnr7y0/SlTBOplQ1KfX7SFTXKX9pnRFM5VssFbX9f8nu+/6bZbR2AyTvYFhgpDpohEW5eqypwIVhbcAyfNPe+96b5PdX7nT7Z2hlXrEHUgmgHJAykARPQEzAzMwqy1ZueJFJVhMaXY2bLZtiicCQwqlTshQsm9CCMKoTgvbJ33TKwRUbyvVAJ2rImEQVWNsDgHAQkoSrIi0IYEHPsgwLKEpfAFivIgRBoQhFEpZO8BSYAYUSkS9uxBEZXbgeUSUcqJtAKEYLQyBIHIkcXaV77pthdefWXzML96fUiB8s6zsGJwLEorQGHweQFAWH6/Dyc8mvBeZ//RZ3YbFbUyZ1aa6tjqfBypdlW1GovDkQ28Fao7NsYrZDRh7F0GokBQh1VVQ05SazNhHzS1jiMAgx4UKVCgBcEX1YpWWoCimMlIKgUcHHa1Co0OiJwDMVHFGG2zvvXeZTbJ83qjXavFOsuZx5NU3CSfDAdl7633EEYV1DpaqFfihh1IkRUBoDIxAKqAXABVrSDNlXCtFueAeZJZpZvVShjU5nSM6QGA9IY+G9uwtGmYMudDYzLrI8IiT43Sw0kG5Rw5gXSchYHWRg97wyDUzWoly/MgDMIQUJvQEOjQhIYlyHwSVWtYJFVFzrNVIqIIPBi2mU0njlxslEZEFShAk6QpMVhXzCiORxAhAfEALOCZ3bQ5CGdWSrlan9IOhHL41NRbkWnoabbsR5mKdDwdMMWzSqFytBmgCIGaEpey6nMaTiOcwRtCKtWTGfWY4Q+cVQqUPGHa00MgwMAAwjIVm7gsX0AGLuNZs0MFhHJerghN/2gJu24glhtc5cZT/rLWHqRpX/P0B9Nq1Bu5K/iy6WAw05OmebgZmZqlxqSEaVjmohBLNITTV3I64E2mCbTSLZJZgQ/MmpFeOzk4E7fgtQ7qGwE8vgF8Zn9lJkYJEADfeAWoLCqCG5mxckha2RAEhMI84zk4VaOm2Tq5kdmbtZtP+eL0DxOVp1t4WsBP5TvEC75WUcXiBcpaDF+OhNPOlVk+hYLOcfnmQCFhmGpESN65UFEwpd0kggpIfHn45XNTpZ+mFGilhAVI9LRVaoaKEISLyaDjA1I2c3OtoNmOpeA8GbeXQghoMthbmJ/f7x0QkVFRPh5WKIjrcTbaBvZpDs1mIyn6jcWKVsoyJqOx0dlcTZQrDvavH7vv2Ne9/94vfvohp2lU2L71k6w4tbj44ssvE+CVVw9Gvezms6u3HF882L5q+xOC06auJ+PkyMnWlc2t1cpiZKJKtaKVyi1c3dhtLzZPn759cHhNoUWfJoNOkbb9sJgY6qaTO+594HDrkh3kk8EQE7vp/e13HTm4fnD5OWshskO71R28/Oq1y1f3FT7ajsO1E2vtubherS+2Kq36aqCC+lyY50OtDjzaAPL5+VY6kbg+zzZNXFIJ50DrcX/MGOYJgJn3E2zMn6gQffbhR166+oVBt9euxhFK3efNkO87oW87YsMI3vrgA5vd7QuXrwKzy+37v+H//eBH/rXRJlRBkQJyoCD63Mee/8oHef0EXXgp2tponl4NUPee+tROqnStXVTPXI9N5599312VRvDPf+KWYOla/3wPu5W8v7QSNKsLEieTKy++ulxbv3Q5ft2pk1eGXNjcKEidTTKTFWyFolgHhGlvBHbg3Uaad1mcMIjScdw67E8UEArGQfWn/tV3KJB6u0VEBskXYJ2DGIs8FzAGiZTCSFfiik3zj/zhf73z9NH3fP27/vrTV6pe16Nmf3d7QdvQ7o37h//1V99x8nazfgR8Yk00n49w86Xhn/2PR/70kUMLgQ6MF5uleaCMkJkEctf99f8/Vf8dL0l61ffj55zneSp1vDlNntmZ3dm8q9WuVjkhCRAiW8gIkWwDNhgMxjYG4wRfwBbYYJOTQSIJJCQklOOuVptzmJzn5ns7d1U94ZzfH9V34ffPzLzu1O3uerqrq+pzPp/3p764A/1QqyfA/dgoTa4Wdn7ufcuvPxM/t5k/deZUCmmk09r0ARO3KSqG/Xw8LLM0UkVRSyLAaOvc9WakiyGmKhrk4ySN0kamxoMuCLIZ9BvnT+0evUlNjgKkOFao0AcGbSKtFJEiDYiKFBEpAEVKKQIWIlWWwTpbxVuJNIpwCAwcPBGSd2UgZB9Qqh9VXiLWJqpEcU3kfBBhIi1BAgoKaIUiErxjEWGu3EaOJQgToGevUDOxCCrSkYqBjBJBYRQIPlhbgnAcRQhUlLkLbJkJUcDFsXgnQwnMkGUxB451pFSklVjniFETVRr/JCBNNMGoAdrSCgoh2iJMdHdmQrRFYSItyldAAO8tELEEZtZaI5EggkJm8UEUegI0xjiPImJ9kBC0ViiVH4uCn6jlhEiKrAvGJEoZz6UAo4iSSYLW2UBKGVKkwVYsyuCN1iKSRhGRJqg0GojiCJCMMkTEHCozbAghBAsCwTuoGuCIECBw8M4J0svf9BzEkNJkhFmJQiFb5iLMISQ6UkQAwoEjqroLGAgISURCYABgDoiIVFUSCbAY0ixivSCxik1gseyDBGYWHxgACaMoQkFNREgcGAhLdiLCBEF8dTZBF5zzeW5LRKMxS3Q1LHEihXOASpESoNyGNElESARQKWJllLblSJNGMj4ERQpIWeteNvHum7ljae5Q80jc31qdiw5trXY0zt588sSwc7nfXzi/89JjX/lkc0X1t4NJzL60tbG2e+9bbl1ZZjvg3UF3LKPd1S6i8V3JgJenp07c+6af+vH/9IlnepQ7NHUrKDYQiGMwSRSniUIMlhUoQOoMAD2w5mYsJim2euMgnIBqNDIIZS8v4lg3W4n3QWuqrHxosGvlqee23vaOw7h9htevqphMVqMIgivihgrlKGq0CrIi9vZ3tj9wzyv/9k+uvv/P19ZHWYKNmkEV7GCtp9N4vtXEoA7dfse5M89fv7apgkl8KWGUmTQxOkjpbG9nbYgZTU01FLUa7XqZewABGwyZa5e3vv8H//1nP/jHmESj3SfBjCwMGGpaUpFaKIdivU6jAr2i9Xf8+/2veJv6T//+0iMXy4WZucfOqxeuQ2cY86r/8ve+GLO4YRy8LRmNxmdeGH7pazs/Njzxmx948fqW1hnlbnRkoflHf3tmvQPL6fLb7jn27Yca//ZXv3DvW7+pv31xLikE+p/6xIv/6d///B/93t8cu/nYqHSuc/Y//MS7b7wPHz9x/rNf2n7kVFGbWlzbystRGUo+dW39x3/iv/7Mv/mhB848ONztqlQzYz7yve3h0RXprcmlq5v/7df+3daFsNBu/N4f/M6ffujDUVMnqtUtQ6c7vunoLbe98s1HTt7ziT/7le1rz7UXGtvb/dy6EMQkibBEUVyvEYYUXMTjnoLgmXrdobVl1kwGm1sqjVEZMKnUVBxrH+K/+fzowYcf+qs/++fZSuJ2C65pbsxgsUnUI+UDDBRsglsb581nTzeeemz0Hnf1xtcLJGV+/TJyZHagphe++Jjt7KpBOSjmIxY1KNxCM53TBtZGweCwCLmVod359V/6X//yJ3+80cyqo2D2xJ33veeekdDWmVULc8s33Lu4/+Zhf/fk7bOPPnvqxL2vClqfOnX6zLMbndUro/mk1dZHbrgxZtWei9tz08wJk37FvXdnczc+dn7odnr7jiya2ez5Zy+Ebm7UzvXnv7xwy33d/iWfX77trmOXru6MxjYCyfOSPSGZWl2Gw9H3/It/8XP/9Vve/KZ/ljRWNneHcczOFAxsVFqqRsIb77r/wBdfuHR0pbY0bedSuzA9lU752062r131v/+JbjyV5AMubKBKl/egFFBsnA9BkXelGp/SRU9lWTHoC4UojpGmmftx6o7fq3/QwP/8P88+8LXuzTcdXFrcNyx6u50+6+jQLQd7g/50vX7kxNEHv/y0LcoLF9asZSehKKA/7Ha3R3PTtZumWhunzizPTdca+sXnz+00Gne/4k4Z9p67cKYZJTv9Ioqi6XZDZSCuXGQxhjz7blksT7Uvnr/YuTZseCyur926f+GhL3312efPzBxe3B2OU1WvJXG7GWdpGiX65puWVs+d2djKb3rlkk5489zG8sGFLzz16C/9zw822jMUAyqM06x0aNJEGrX5gyv9K2fedOfNV3YGzgfwUpalic301FQR5YgOWer1ejn2KouYSxu0ThKlUlfIqbNnH3zs4Xe/730mbo1sefa5Z5rN7KHPne32hqCUcxqLJhDb/uh6b+PcZv9w2WwnN7A2drxTb2Uw0vVavbvR2xzlgmV7ps6Y5Y5BZNAbbV3ZKItx1JgpKZ2KlVFJMjO/cf36zMxSzWQ9la5dutBIG4l3l547zbnsm5/aXtteWJ557uraoXvvrS3PXt8cRmlraq5++fkLoNVgc6d57KaLF58/sDC7/+iB6YMHi43VwWgQ6fjukzcaHl26cj2dxsXlKT8aqNRkU3Pra5fSVOeyd5NcUDAijIPSCctWt0tm4oQGDEmmj9649C3vPbxwUpfRrhCmrTllppE1e+2d12k7YATIHjRGmEX4lrdki9N3/PIfrW50oCyDeImNirRWzgcCK8ERjctqtC0YkRKpql5RoZCqhv+xjn0IbD17IEOklfMigsgc2GultDZkQy0hCrZecws1mqpDv2uHAcaBWAhCgCiSdpxq4nx4YF9809Gs1Upa7faw3wuQnDq38cS5YW+sRgUSQAC07AWEGRgVcdUSxXs3u0IgikihquggioBAolgfPrJEevSq1xzKTu2sbY8loFKgjSIU7zkwhuBMoqtLIJGgqQrfk6BywN0C8m25tmMfu3Q90v7YUvMNt81M1RsQuD/urW30a9pkaeRZJYlJIqPEARCSNimBiUrnTZbFjTqwaBBENER+nCcmVXHiIZg40xBxYuywn+VYFnk+LsAPQyiRtFIaVRac1UEUY17IznZvVLL4AAzWlSySRClWZa5GtaenC6UVW0BVkB87G0eRBKeEityzUTqgVjCypTBrUDZwEiX1NGJnY6M4SKSwnUWjwBQAtFYanWMLaLQ6efTWnfVrRTHojIqItQJg7wKBChinsXgIrtREWps4NbnnJDWCFNgrpaO4VfrCGO2cI8RU16wXH8rRsK8p0VHmAgbkmelmnLUJWEWGvaRZDBMhIIAEYQH2kwAXTFqiRCbZLkBkEfUyoqiSHIQrT8Yk/jP5OU/8MhNBRarWeQUU9lJmiKrSFAhpgkgAqNx1lcxRTWnlZY1m0q0GhHvUZKAJOudlFhCCCPIEhjTxGvHExMR7+bVKRyGQAEhcCTl7j1NpMiww8bBU4PlJYA4mgKA97hdShUjeM+VUYhhPlBre2w5fFrgmuhJM8mKwZyoCQBD6hy0rVDMiV4Yfqo7BiVdr8nsySefBxIg4aY3fW/lqOSYQaBGZdJDJy6N6wElP28QWxhUmViby38TqBIzI/HKaTUCAUeGe5QcmMUKYgKX2FvhlwQ15D2xUGYsQhPccWpMYHwrsEber2FwI1bqLEAQCR64UCRgCqAAQJjlAJGQABZV+Pnnu1JjMUKIIgTVRJQwGgADMBBwkVDY1jcJeEwoCI2jauzuu/vrch/4plLzbs1Pt2vbq5d3BcN9SLRTcG/Ta822RmgQEo0O5G5Ev/Ozh/UeuX14ddPM0g5m51gsvXFqYmR8Kq0aoWS5K0UnUmoudU5df2tjqhRvuvmXffOtrn/7Si9fz+tKK84xlGRW+yMv1nSIyRrM/dKDJgOwhH2fNmfm0HtcTTTzmSDXmF9bPbc9PL8zOrKxvdZVxHPo7u2vkB0VelCVbF0jX17e2661MZzxzYP/W5Y31a5u59aR5YaV1zxtvf/zLD59YuPnzn3v46G3Z0sHDcwdu/IWf/83eIASgwCDgAYRARSZKDC4sNA4eWnr1PTfdeLAZYS94HA9zlWV53/rRYGcwHlu3trE7dt5ktcWovb27CWZ48fkLBzLdqjWypozL4XRTLS3W3NjuW24Kw9krxc033/Nzf/70U+dHWcQgeOK2b1zdvtLdegk4Bq/b04vebf2r92bvfE3eNqMP/61Zqr/qu3/53rNf+MVhXmLtkNu183O2c/7a3MySObiv3l4v0KUwM76irjwVQbx4+30HhuXl3mDUuRxf2Tn80799qquStWuX1VR8cP/x7vY1O9zKmshiAWworPOoqDUaj4w2ABA8R1EUhFWatur1/va61izAoiCOzbg7nmrU3vb2133k4x/P6hkoBaS7u4PSh3YW+7FKk8TJqBxbE+1Xrh4rJd3LMWz8+L+893t//luFn+SwG0EZSk9pvSzKei3C0jz+Ofi+n/jMqiRpzeTjvG40g5legD/5d4cb9bPjfAwwFQJS7JOIxt2hAvBaW07PnOtd2sbnLsjy0n5tYtJcDmxZlFkrq7fa5aAc9oetmabROs2mldILS9Pd3npWM8VolDv14b96dr0bD7uDhX3tl06fBYB//2PfV7maVaQJKU0iAmLmIGKMARalVWwMMzvrQwjWOsfOmAqCLYhUfechATMrUqDI+8A+hBCqg5OUAoDY6MpxXX3zaEWKJnxEDSjMIoEIK/8LKcVQ2RpRxKPSCiB40coAVXg3Lq2rKrSccwgQaSNIvWGvKEqlTaw1gAQAQgjMrFSSpkkc1eM0eM8spbNaG630xFktwiEQESL5EAKL854rSygDIQkgA1tbVtMCIlSkK1ep1kaA95J0CAhaKUQipQzpCunnvXXBcwh7ADnw3gswixgTEQBzMDoiUiGIBwEEo41wYAHng0Y1QQMCs3fVPIQUiTADRDqqTq8cvA8ekYwxwbMyutovDhYgEGGej4iQOSilJ4E1Dt4HARQBYyJCVU0AqjNH5X5HpAmnIDBpUqhCCMxBa12ddxQqqZKCiFVb5oRlMPG1k/W2cH7sHLCvJQlVnwNCDuJDQKWUUorIkArMPohj75irj5FGQgRmsaXNi7wsrReJYyIyRikBYFLBOx8EQELwxpgsjiMdGaOrc2QQLxyEmRkDMyKW1jrPwvyRj3wWAIbu9PYGXHjp4enpsDg/R6oNHJeFHw4Hnf7q1154tFe8uNG5vLvRbSZpTPum5w40D5Lglc7OZqOVju149eKlV91/8iuf7+y8KEtCz51b2y6MgTRRUpRsrVOKmAEYlFbO+5cdykQEgiCoFbIEQEDGODIuCCARherCzBjtfGAR9sGVPkrVdLPWUuWrj8Fdt9fm6ts33rPP9svN9d7KLXPRis39wNQziiMgKzLUEqKkvX42fPB3Nv/w7yAf1hrt2sy8vrrdB5CWzlrLs92d1e3tHk1BU5QxLQ2xBt0fXn3Xu6ZvvRVvvO343/7l+T/4g82ZpRXL6H1IUjMaFT6oNM32N2caS8d+9F/f8MpXPsDZlivHUkqiCnYljUkbCiYwD6UeGE2DTvz8Tz31kS/q7mrWSoyjwC5oxqDRIClkReIEy4EVcMqosUdCqDWj3b79+jce2N7YONsVQ/Av3nXw8mOXxsnynz2yPYWYGN4aOQ36vte+mm1r2Otuda++ct/FX/rDr6vNvdg5fXV+7nXf+u4vnL66UksapR+S4vp0HQJpCVBydzhyAu3prLMzLBnmFg4+8OBXfuzn3s/iv+dt36Fl9/v/9U9oFQ7etq+30VnatzJbX/mhH/qxxEyffnHwib/69edf+tQg75VFmaQminWzlbmCIQiw3t1ufM8/+7m//tC/s8WWdSFrpSv7Z4b9EMo81Xj2ynaaJAqBUeJIQeBUF5laffvtrbe97RVLh2vNO+eYv0T5uhsOca6BW+W5x+2Jt7/+138OP/P3a7mc/rM/u2vh5oY9c86b/X/74c4f/c3q1m7dmDYbLPPRTDueq7s5DMHj5bXRbsHWxKWXLEpec2JGwhga0f/5i2cA4KUvPsy1Wam36mnWSpPNS5e0r33qUx+45cTc7vW1zc6aEVg8cmzj+pWHP/vZEsobbrl545oc399s1zugsp2+3tgcvPuHfySZ3r81lulpt5Rdfv9//G/pdBbZKHi1dnXjm7/vh/o7mxdeemR+caE3DC4fAUN/3IEQXNEvXC7ErX1H//qvP/7a21+1/8abrWXnczceF6MhE0eIG5sX7r6F3vKGG5/40qeTBUUad3K/tKjbAYqi/rXT8LcP9fshvv3kXJbOfuxTT5dFKC0oQK0xgL/9hqn3vsG+9ubxwpFsamZ+OEZtCKMpkQKoo6KNSNyVZ/e/76e6B5YP3HB8adTdtYAF6qnpdvBhc71ba8UsoTHX/vtPPj4uci9+UPhQ4uJMNh4V9UZGwSUaGol50z2vfubCtY08x7zcv9RsaX/8tkOH9x/vd8rmvsZzX/7qzvXr9bm2iurDPuzb1xwVI8jDm7/+jS+evlIKbnZ2tlavC/Ktd9/e3R5764t8ePny1fn5uZWD7UZSu36tt3x8LuYiBeZa7d/8ykfTWlMbHHUGIkCKQgA00djr244d+fb7j2Q0jpIswVh8oAjiZhzH8bDbV0jTU0vbO4Pnz1x4/Wvu7GxezrJGXKtPL8z50l147pRuTt9w643Xzl366gOfS7WfWZgHO/Xi6XOH7zx6dZC/+q63nP7qX0WJmCxNp+di1rvXerudcWM61pq8kGh97uK5h55/8Zve+GoFGMXNxfrMS6fO+2Evq/GBQ9NfeWG1Xl966537hoNBlkZRGpGKRGhnaydOSOnEj8e5LQZrq4nsNFOKfLxrQc+06422jhIVNYDDhTPnamqwvTHQS0fnEjPbjhzAzNIi9HZCKRLVg65PN+yZ5560jFmjbsexJxDTKIbjxnRWn25913//EwA4cvBgCMziRVARKTRRmhChuCJCt7yY/fS/+5b9c1uqbtPIe4SaSgrnlQRvrVG5TqyAsKg4qnEI4Lq+3Fi/BB/+GF5cMzyAo21YmY/mG7EOGMemPx50S3hhdfil84PNATKJQggctFY+cEWJDM6nKQEEBYA+eAcBEDSaKCryAokUcqQohmImdm9+1dz+JUrUoNnktN7s9un0izvz+5azrPGVx9cvrBZGqZv2qze9qX70Rj0e9qdnFjAQkQZxG9vlxevuS1+6vtXHeky33DTfrAelcGun6I/40tWiM2b2hCrq565f2LyQ4KQo0QWlAxBgrW4OHWzNToVv/Lpbzl3Kf/NPnwaWWhJZJyVbZwVRsXhSxM4JUvABKpotSGCOjEoSUqiC9R4UadQEDROOL9XvuXH/bDt1hVpf3ygcDUs91Z6fqtW0+BDYxBFozUEkSmrNJrONFFLwrnSEKtKI7AQTqtWVNiwkZd+Ohsy+Pxo4a4vAUI5T9khR4V3OKkI7GI7FmDilSCfgAhlywQdxSZwEK4oiFSUzM7M6qvliWJY+KMwds7WaC1cUo4DkBErWWgo3KBwHlYUoSmfma0YHWygugh0MewNg6OVujFj4YEGSSOeCjVo0n9ShdFc2t0eIYwiAKlMoIQBJrVkzTEYkqUegVZIaRURJJqRJw7jIjYZ2KyvZmSwZlrvD4agQV2I/Eo5Yu4FENE2U1bOWFh2JxFp7y1Et+m+/8ecAcM97DoCwsFQXTjDpx9qTWypATxWI2tMG8OVsFkNls5loEIB7tVhQOVrCnoRTBaQIKhI24cQmM+l+R8AKybPH6IEKuzyRESqDCu09dyWw4KT1b2+4W5lXBKBqv4JQqVbsBapXwZOcg2Al8QgqRASeTGgrDPNEiUGBvb6sKrQGhLInkwBOfHZESPgyuGfPtVNVse+hhqrA216K6+UU1t5uIKLIhDCN1QibSCZOmZcbvCr4AxLyy5DsSpupgE3V/0GYCFQsIiSAUl3KIzCwcJjIeZM8bIW1JgWEUFm4KhGpWlIJIF5VuRImgDBp9TEEghImjCeYeLuYGQEn6/+ydUn20EiTZZisFU0+TAQACveARUEkAAiwDxwwsAQnbJkdQEBgRC8GSSNU5Y8CEGlCRAlgtNIERmOqSCsJFZBRAFBIYRAmhT6AZ7QioBUTCwdBRk3I8PBDm//gKvLexxoaNRYZoY6SdM4YVGAzno7UksbI+f75K1fvuu2OYmurWySdXuSGApbzcqimzStvXj5/Znen0DceXOHOZmCvMhiWw42NkupJakc82h5t777ivpXohbXtbr/vYTS2na0xGLO4Ml0WjtDFzfTaxe6l89v7juw/MIMYx+OdnVhcbzwIIqaEneuXrB1F9WZrvtld79TrDWXSfTMzL56/mIYQRrIwO9Mti30rU+PRdm+8ddN9y1978FQ9NrOz9ctnrzbnFx84f+Xkm946O70V2aJz7VqkBVCiSDMHZoXVmyNcFOHKtZ0LV7a++tVnZhvmX3znmw4cnifCPC99RFibvr6xCTYcXmx3B+tH989dfuqp5aJ3y416+cT85Yubcb039jwKcOAGE5Q8/jR/6rNbQzPz5FkVfeJFSdpZg0RyQHj2xQdvOHnvxuUn4yRilsKXvrS/+cdX7lq+c+mmfV945tLW+oN25epS0rvvvnTqaLz51E6tCbO3TI/Xi50XHmvcrmrTC9313aTVnr0hef6hp+ynrzaWV7IbXnNu0Pv0U9df+643PvHS829/zw+Muv2l5tQDD3/hzJPXOcTFkMuC4iQzKQQOad3Y0pOQQgwud5ad9VKMjGZGIQRbBGRMa0lRjq5cevHI4XkGdefxYxevXlUHFm66/Z5Pffrzu6qkqTnXrTda9aIjhfQLt3H3a/Tvf+iPuPNo4OuRVaUcspLEBlkY2sZH10TznW+a/fEffuPPfOBZSbUvR8NQzNZ0t9vPHdxydHF7+8qwL95HzsF4UDImrJzSYvzopv186w3ZYpYPIX/mdKcxu9hs1eN6IgpHRUkJHTyw/8KZq+20NTPT0FoJUpTEnq1KosFmuHy1z6qpFOT9YXUUpFlSeUGU0oSkkUQkikw1RAAi7xnQi3DpSu+9sw4Jg3eoMDIRKgPMVQIIkUAgePaeJxMnpYgIEI1Wish5r7UGwFC5foSJlLDYELwLCMFoVWV6PTtFBhEQSeuUhQmQNAqH4KsTFzCL8957z8GJYGmdC95LEATvPQoTYcWc1kZX0a7EJIaUF88ikTIISlNUurzi7gt6IXDe+uA9owD4EJRSgOjZs7DbAx1VNlwOHoEjEyOiVhER+RAEIFIGCFlYAosEBBHEIIKgjFaew2Tswex8BXVDANCkEXUI7AMHYCRyoaJIsiBb74LzgT0RVcQAIiRRqECrKITg3SRaxcxaV3IbOlsGYRAGZiK2pTNGk1KBg1aRcGW81ESBBYITNQFfSkBWSgOCdyzAIIEQhD0RiLBn60PVtiuajGNhkMCVmwqJEAC980g6MCsiQK6a82zwiIpDIK1IKfYCyCYyRBqACYAQHFQ1cCFUdaIIlgOEIMyBWUC8sGPWohQRI+go0kQYaRBxpWVFpAlBfHAm1pp0GZxjEBGFGECsdxCEBUih9ZMAGlPWbmT3vOqNZ599sLNxeW6uE0dmY/NqtjC7c+2Fje2XxmEzTvTRG/Zlpb73ttf1BrXmofSrz3TGdnT3iZPjPN+3HAHnrr8ZtQ6fv1Lu5rVYqyJn0ZLnIY0jRZQHq5Fio4OXCnQFyIJQllYRgdLOcUToQvA+6OqQsUGRJqN86V3hSatmMymUHQ58jnaU899c8V94wWDINn5ha/9M5qwpRld+4Huzb/u+WlZzo8GQMnJsOYkK6NaPyk/+2qFvfU9x/az+4F9eO2cB+uiK6TFmnUu9I+3hr/zu8X23r/zWz37xyZdGUoIbrP3YDx96z799M8PZBHj7cv2v/2YXtfa573cLJI2gDeJwq7vhJMTNj350cHIpru1PIxqIGagwICvlhjO1JphAxrEQFgLZxf/yo0f/y3tu/vF/84WvrlrvQqxgphbfc+vyuctr57d6GKV5D1RsFudb+RgGW708hHyrCBqefubCq47B+3/p29kOZmrRT/3Vc9s0nDPp9k6nWTf7901durINJj+4784vffGJTnf9f33on0D72Y2XXnJOQbz67nfN/OffyueWDl6+0jGoUw1lcEWRJ7VoqtHY3h6XDikoELd68aUf+cEf/pmf/E9l0E9ujn75Z358377Gzu7o0vlBolvv+Y4fueOGgw7MuNw9eLj1X3/hP7/rOx8C7UAFzzTquCyLMbhiVNxwx93v/sHv3Onnzflo6xp3+j1VS7NoZow9K4Odvm2103zsBmObJLEPHFyQmunsNJq3/vSJd3xLUTw4zB+K84vl5dC5CGZFOhfaaxeLS4+9eP6LaX/sBjz1tm96+o23L6a52djaPXM510sHlS99sHbMZVHmcXTg5v2PfO4rM4357lhsMOOSIcHFfc31ne7SQnvlwBzAMwAwzsdutKtZTc/VPJazN64UBb/zfd//2N//Rbe33t252krjQwfvHOzg9tr1YzceokK+6R1vmKpHue1ubmw9+ciX547cP7v/zuHw+rVnPnNZj7882D543z+/4fBNm+ceunz2oeUbFjevPtfd2Nra2t7Y2J5qN2dqs9e3VwN5YBkNy8DAOi7G7r3f80+3L2/VkgTJu14/yZJmPRnn+WCQN+cOvnj18tUPPbhM4VAjrOyL2lmIotgPSjvaOXkULu7Gn3q8uHBhw402ju2bPXV6rdGslYWdT+A933WbHVzf2RpdOKcG3dGNBy/GLVAZoE5ENQJo5xSC23+b+97vTv/bb77w4ur1N77+5vtvv+nMhQ3QjbKE2aWImK9e3ynAaYoG5aheM5ngiMPWMNcBmlo1pmpXrm93g5leuXFhWO7sXtNaH7/t8IsPPGI7S8UMXblUHq3N33rnvVvzl86eeuHg0ZlGM37xzHWM0s7O+ld/5bfTdhMCapWgLe+865gfO/LUnpqZO3njwcPHdnZ2zly8stAeZklja703XN1JtP7www/W41p/MFLBMwiTlpJ9YB1ARQh1mTuy3D17ammlpUm5kQsq2NISmloyW5+ZvrhR/r9PP9y7fuHuW05mtTRKEw+q1xtrLQdPHC2NvnDupYap3X3r7Zv99dd/y1v++g8+HqVw6cKl81eurb704pvuvdUYMFoNh6XFsjmfTu2bLgNcOH+5KAtGL6xvOXrHzhYPxt1jx+vXNtcO7Ztbah94/ImvHL/1FX/64IXXzdUOHFi6cJWjOAlWRsMho43TKEuN8wLESRLXDq+sPXd5tj0zystE1w7tO3j54pX6gVpWS7ZWr9dS/6p7Xv/Jj3/h5v3LRmEvMAe/u7nTTEmUM8p3uuvjQZlG8cJ0vL07dKMcqDW0u3HdlKGQ7iTPUpQFoXLiY63jmAKzd4WOlIp8qx69/RteW6+3AgY38pRHHYfrg3ERUIeyXYtm27uE16A+JCFgMCrzTpNu1Bq9VxyXu07MHDh2+y03NQR2cFDaDmu25a6QTvrd1vTntz/4eK8AJGBh0kqxRiWiREDpmcQvLCY3Hpueaqi1a/1zq4OdYSitdUZrlEyFVuZvPtq4+RgtzY2yBmsdohg09rNayIxYdy2L43tPZp1xOH3dN/q8sDwrsNOcbVhb2lGBAFkis/XR9A3mpuUWxnHZGymzs7CvNtgemDjhgP1BqqKUtO72x7XpFe81oLl4aX19B89e85vrOQrMzzaP3NBu1kYNef5gQ33bm+qK1NEbjqyvD554/sL1jhuPXekNWyGToQiLKFIo4r0lrQrrQykUQRRTZQgaja3N4RlfXNg8dfJAfMfRxYXFrOT4uXPrly/3W9HUUj2ZaWWoqCysH1tjhjWwStyw1wMfOAAp4kRFGkxUl7wftGaA0hcuEEUJJXXvdg0wUOHc2BU8sFBIzIEDiA/jNGhNoW6i1MRMikEXHkxEpfe1SOd2VNMB0JNCHSlR0bjMQUpSQVjl4i1z7DmwWISBHWqqtYEjdGVGw1FQpOuNJjnJkjAmPc5H1RVREMlMBPmQBRYbcR5cGWUuoM9LZXTWbNZqcWoMMWpFQlTkOQlC6ZhLKyLBFz5srReOgWre1LTWcYijmeXF9SuXHUDUMGkylZpW3agw9JEy40HubchdUR0Fwfs9RlYABA5B9lg6iADME+fInvtCESEAByZUgHuNWpU3aEKpZsTJ3BGFCUGwYk5QFWCsLqorl47s9XpVTp6XMcl73hSBf4w2gpfJRQICQQKAIKkJEQgI9mDME9UHQECCTEJY1atUNKk3Q2GoCKnycpn8JEH1snwFE0GkeiSpIqJ7cS9B2kvGVVIOv+yrkT2od6WdEL9cRIjyso2l2mbPaFOtxl6GDWFS7gVAzFLVKfJe/Az3NgKYsMaxUpEEQQBJpAJyI8NeTnCCSEKQSYwuiFR7QxJQaE+bkwqYShAqparK4wFXL5VhL9AlAIBKqvQekuyFBBkYAbhCrk3ks2oGDwGBgKqdFBEUqghTgRm8eMfI6F1gJgxMghjEiAKAmIyOUCNqkEgZkgAohCAAFFO1joYREW3JAoigfAieGYkJhRBJCAXAi8UACqr7poBBTWJ9e1KRBDsY9OOs3t3pjEchai71epup4fm5ffkIbBj4MN6/uLi6ca6t6vWGGhTXFw4vba5dHg3iTgc2ru9sjv2VcQjnB4dq7f5wd9++/RcvX7i26tr1yHk76O8cPnYQeXTHLQuXznXPrXdLW9RnUy/Z7qjc2BgMxsMXLlwHJ1NTzem5emRQoU+bKbGOU9OcmTq/cSrNMtZlb9gr/bjo9NOphg/2+tqwMT8/vVhHF/VWd08/8OhCkfYHdncc0vWOlBxcmfeGS7NLUaP9lUce/45vObZ9ecMGnzTktW+9528/+pgEVpoRK/BVACA0AgjKaBbZyuGXPvCFY3fc8v3ve/u1Zz/XHfU82V7PZnm33x/NSnT+85frQK1aOboGL8AgSsEAj2x00TYfebrx6Knxhc3MDRxkKo7qufcRsrOitPIuiM2vnn2q2VxUEQz7OZlcgjgz/f6/2X7FormwMWj42u/93oVf+LETw92Rt8MS61pJacv162WnUyPA1r5UJSpuRDMno3uX5x/9281f+z9rj22f3R3EJpZa/Yq48omNM6O82N7dUYmJ0zgUiNmRV7zyfWee/9vx+FGlShEOpUMdAxIiRjUDpHxZuNIjQJLECrXzAhDCKMxOLw2H1zo9fv65HdJJrZ4++/z55ZnlYrhux+OaijJKt7fPHp8zt79+7r/84rfWa1tDO7XdSb/ytzt//OFHomTZRfp19598w9tv31cOD+5vRtOtEyvbr7n1prOb197zTfe24/69t93+gc+/9DcPXb7zlTco2ZCi32xOMaqyUCWLUumgk3smTZEmPDhHp66sTiVJr583GnPDnW6aKcYQ1+PRYDgzM92s1x1ZINMddEJZ1up6uDG4+My1Q4tLp7d7cy1UPIE4CqPRSivlgyOASJuKXMckImJdKYjBi/c+BB+EhSbCtjaRUhoJBYhBFGgGEcLgfVWxhQCVQG9IJzoJ4kySsiAjkDBUlCCu0NIiCIE5uECk1B4mmzloqgyCCIBBKr0gKG1s8KVzRWl9COw9ISilQghVGxd75x0AiGfWSiGgxpAgemtHzll2BBSRdsH7Ct8MASSweM0GgSIySpH1gaSiJpOXCShSkQocClfUdKYQDGmjTWXl5BAUVRMFJRWisjK/StBKKySo5l/AGjVzCAKCwizBB4WQRJEAk1IooIEYKvQeaFRBxHFgqPoAQBA4sAL0gQ1GHoL3PogoUloroyMRYe8BQYLXmhjQhSABCBUzg4giDQzAwXlHCoMP1gVCtFYAgLQSgMAegRjEOV8JXs67GMkzE1YTBhClPFWIJGOUIdSC4JmZPQiABAb03hmjCUATttOkyMcBQqiixUTMk0i2sAQJAoxQAa0JvQMiB1yGcuI01ipWCRk1yAsQiJWuzEPBBwisIgqkyrJEo0BAoQouBGDHYexKDRhAfGAOQaGCCgaxZzSl4NxoUPidWHqd3TObW72iHF7ZuMQX4l5vHLfIl5C10vmp1iOfe/r4DcdGo6jcTjWpVnO+XTt0+sJLc3MrMfhbTkwFd/wvH/lsHMdlaUW8Ip3ESoCH45K0YuDCeySJqCIekAhEsVYAKFzPjHecJppDUAqC5TTVGrUEASRTiwFlNByzcH06RiRxNp1KcrL9fuESfa3sNRtmxLW/+CKfurp+0wE4ctwcvT3J6jrUUadO0I/1pbnDcPj26DXvyso8/spfd37jT3bOrLoZsv/mB/cd3n92Ybr477902HVrYcicZK3j6NwXiXu+8Pe97sCNx+Lnzxfeh1Y7DezyYRknCcVx7sPNd9/03NOP/Ot//ej//n/vbqQIVEphXQlZO8nX+95AVAe+UFs7PfV3f3r50mb3jD0/4MyJqiVxCDBiee7CpTuOZ7/1q+/81f/xd+d3zIUdcByGOQP7ekLCsDDf6Gxsv/l77/rArz3Ynk1e8w31X/zTb/k3P/YwDF1kyAcZ9PIsjTuba4PzD6uSvunETNY8bdXOTGOh36Xh89cPkc525YLf0pqVJmdhMLa9TtFwoDS4XOyoSHTUSk2p6Euf+vwPvvufFcPuv/ru77n1tmU32t0/07jnTa+7965XzbUWRiPfGY+nG5GizY995ks6ciszM1E8f+bs1f1H50bb4+lmq9Za+sbvefdwED7+p7/aUgVbWlxanl++cdAd7axd4zByjpJGDTg0alGSRcJilTjyNBP9+p/87nf/qx+gckrGx3T7Ldj+fHvBj7tiRhy4ISHc96q5Jz55fbcbTDr3iYf9NIWAUYnq6EI6vj72ZVCa6q1aDnT41jdsnH3RskqY+jtO67gsHEO42hnqSC5/das6CqZmF3q73mjIEqWMKUqrI1NCMRg5qsXH5m6+9NjzL331RSfjW24/2cjM7s7m7qDTHwQtg2eefOT7f/bfzZ24NxC7Yd7ZubB+de32V7+qOfuGg/NZquyjD/9d3FTPXDr32je8tXX4rq/+/d9v9FZvPTn70urW/PL04vTckMPCzBwotXF9o0ZaucKQh1BOz02P+kMQrtcyHZmsEe1saC/7tvKdM4/v3rDtZqZtnGG7OVOf2c27PNqEuprZWR90e3mcuFTpchhiwdfekLzuptG1jSKGhre80WVw+dwsNNtg2kU0nahsHnPykAOMv+Xbpy+uzX3oC+7jX3npIw+8JOU4IeWtOnj0QGSLWkRXLo1a81PcyMb5CCRXvkxiLdbm/ZEf53PN2jD3v/OBP9jqdNtzjcVG40uf+drx5Zn5owvNI9M3TB/QpMCGxr65t933dU995fFLVy84Zh7bfQcOJgqXDi+Rw93d8cqBNhCYOJtbqDmWEBzF0exMi9TyVM14W0Iq3GProt2uLfLu8aWoN7YdZ5glTpJmnIwHhcKwfu7Cpz+N73jjnaqWjseWEvQlg0rmFvddPN9//2/9+fndjftfc99SgqdfeuT4sZVGq8mkvRelaTgqalPJ0tJysB7N/LWd9b/67T+7dG6jNZeOWRBkeelgERJPAYl2N3djMrrGg3IwcuX2znazXju0sv/cqbVWs1b6olFrK6ugGM3vm7dl/4bjh5569OkkgVZr6sy51en5mtKaSymt9UGmmwd3d04LQK3WKL3Pu2MdYLA9JIgA4fKlq4mK+2uDqUMrly+fLovR333+E7HC7oXTG3m46y3fmnc3/WDd6Dgv/Xg0as/Njwa9QsrtjZ3O7paR9qGjdz7+2MPItbzrlo/dUB0FRisWTHRMID4IgmhF4jhtLdx4112nrvtn//LJYXd9MMi9l43uULuchJVwPcG7j+sfeN/BfYedxIEZgw8CLgTdmJ+7+62Zs43tPId2A7zywD6A64/ZcKKL6Wzw9Xe2v/BCb90qrTSXuUFrlBxsx0eWo5M3Nu58RVqbdlmNkZzg3Kg7//QjV65u2Ea7vjxfK3prMwtZTL3WFGvlbWkRdJok7FzUomYtJh35ssTQO7pEL63CpW352in3hlvaMZhiXNjCG4VetB1xoHE9E0V9ZJuPeOviMDgfJX0AW48za0VyXyPXjna8R9I4e4OWo9HwFqy165SZUX8URxvgOAwKOpS89vUzHGwIV0Gpd3/TbK8nFy+OTp0qBkNz+ard7Fnr0WgsLZViXOAsTgmUJ3IuJxZvbaRAEGzpSlQXVsvOYC0xpJUK1hMnu4MdcDHp2kptmvLSjYeg1G65PR4X48FIQkDALItqiTHA9XqbIuOci5oNig1SbIthXhZcFhDKcjy0LkdKHHNRDKxnT9gvR/NTM600RoW5LQXQA8aJiRITA8VGKyACMmkKGLxUDSxcWlu6Ivcht4Vj9KzEWRfYB0Ou5HgoNSXKcV5AEGAi0LUo0uJrSeS8BxZAUXkxzAsdJ9P12mA0KgMntaQ+3ySVkq4HV9bSuCwLx04Cq4DeerBsnR2NR0VZUqxDAC9KDax1eTYVDSPf3RrEKs7dOG43KIjLB4WOSEjQsKAPDDxpw3zZ+1EhI6FiEU8Y1LiX7sKXycTM1QhVCwAwAyEIExLKpJUdFU5CScwKpSqZYqq2pT2UzkQGqdA8iCKgquxCFXL8hzQYTPJJEy8TQgU/qqrLJpk1AUTkKmg2UV6C7Ck1lUQkQJUnafLMWJW48SR9hnvRskkyDvnlcqzK+jOReCapKZgApwEJX26Fx5dRQJW/pvLqT7JY1T8qSQlfftxJUE5e3kWEf+TNqV7NXlJrTy+rHgb20maT6rBKaAOYhO1kIuIIV/cXe28piEAQQGB8WaPCynm0F/Cr1liQRFCAgZg9AYHwXsJNsVTw+4mfqlIAuVKjqpohIASUIMJAgsDEQhLIO9Ae2QUUnhiyFLGABohEs2cDETJSCGov8ZdqFayPgLSCehJXBTWl98zChN5TxU4dOV+FCm2QwjmWsPd2hbqmZhR5663nMagCmAiIWFDMnnI3kYoGOwPXz2v7ZhVys6lLHU219o23LqLJ8qIkJiC1vds5dPzA6ulTU61iaf/cmVPP14xZXmjZItvd7iVTc4mMd9c7Jm2ORmKjXRdMq7lQq9WK7rC/xVfPlLGKyhxGXUmVf/urDp4/tYU61XHjlFEvXBo36i0d6TvvO1mnWne9MztbywuMTJqkrW5nODdVQ7BFb9VDNt51RK7byZeXlgyPjEYcjMdF58Qt+zfGx4IuxkO30/FeQhHSwWB8srEy6I4un1oz1tjBxtETy+cvXK231Dd8wz0PPvDc1m7BgsygDCpFSOSdA0LnvCKI48gFfPqxZ//P+a+98wicWFFpFDzAbbc3IzQPPT48fLi9smRuOpJ+8YuXu5aa802fxh9+eviFCyEzuS+UBo0REDCXI++CMGtjpucObK1dijOtlLWlpziN4qYdDq3Nx6i++tjG1I3Zz/zzk08+dk4ivPmV+9Kot3Z90444qjfjGqVtwUibNO6t7yzuj5Fd4I1sX/zqf3Los0+sPrptFlfUyIVuZytLzcaOLUqLOgYincRlAY36zBtf8+ob5tY/9PdfE4/aGKzHQGBHpQQ2YNgFAq2TNICgUVrYW0tItl4fqHRUpKJruZkWDvsOHL826p48Pvtt3/na//aL/2Ew7uZR/9C0/58//6ab35yuX/tSubVw4x2v3d5qf+yBxzu7i7OzS6cvnbp44XN//iefWa5FSwvLCRarm/mlMj9508H7j8wfnFN333Lbp56h3/3Vr92zPP8Nbzuul7t23KGY6+369voApGxPJaUN3oa8CCqGk7ckcg5+76/Pqef6r77r6KGD893uLiviwMAWlQLvs1bW2ezX6jFYfOH8dnMqfd/b7/3LB5+7/NyZwWDiKkrSyLnAHIzWmhRNasnBexcCEyELBh+C8yAQa8PEQEqYAwuLMyYKgUOohFghpFhHXhiBEHWkScQhgGNbfemSQmcLRZpFgjAKEpEitM4H4VhHlZERACSEyERERIhCqJBAkNG5Qsqy8CGUhQ0EjOKZjVYuhOADh1AB0rwPShEBkVI+BAVcFKUmAEDPQRtiCgFYsZqcIFEIJ75EIkJUQgDMzjtFGgRQUYQgARQZHavKAqOVwcruRBCcRw5YJdoIiVBeLpgNniUAIqFC0tYHYSatScj5YAMLoLMsKFpXX69BawNI3hbMxMwISmsFk9Z5JyCR0rJ3hkNQsSIgwj03rlK6LMaAwi5Y75hBkVKkmZlQwAfHFgkIQjkunbVEyD4oUiY24oMPzAIeJqUL7JmACSWAK8qSAYiAEJ1oQmW0RhRgi8QERMJKkRcXgoiwQsFgibTRBkmzM93hEBEkCCkSwMDEPq8uSgDQRJqwAl8GECABCWK9J6zeTq01RiZoQmVIIRCzUghKjUvLngNzcKyVFgkEEhgcexJBQOcZEaqqOGdtcJ795JRAelVHje7OhaXjyeqjF9a2r3Kmd3Q3z71OTb3RWFnY/+xzl7avbG3koy98+fPa0oD8t373uwfDcb67Dd4fPX7v1mX7a//jV/fPbkSEnf7YWtEAEKlxMY60iowhABFhx0YrpTGw9yEkURSCgIAERgGFolAEwGhjhJWhsiyDBKoBkkSx0WIbczEB9q+PxiMZbjljsDe2rTZ0hs4jSIBrXT1+ofGxr4nC8p6byjfeI698TTwzr9I2SuJ0FI+GQEp0Vr71Pc23fvftf/V7F5Mkfse/nB1IX6FXqq9qXZM2GRyYMTqKY+HI885OMRzlo7hewxDsYDePG1EI5ezinOu7s0886sZ6vbzhm1/7kZ9637Fv+ZHDQXd9Kuipthh5DqMd9Xs/N/j0AxFOHV2H8TBxqWJDrEi53MX1bGccfeyJ4e7vPPDuNx6/687ZX/vfj331pW423fTOFEFckO2dYTtWn/jMpUfOuqMr7vt/+PgTDzzRGXS1qoXgoyhZXx1mzaQssDfobGx0f+IX7xtc/5t60iKY7V3fTONkfmbu6NF4Q2qjfMReWLSJ1dz+aTcOxSjMLk+lWbx9dWtqqr4jMh6Pfuznfii44qYT7daM2rzaO9g+9O3f8J3Ly3NFQTpFc6W/cmD+y5//yv/65V9XUUGcbeeS73jJMI6y5tLS8RsOfOz//vYrX3HHsqH1jqvVWrkNftTf7m6W/TzLVNJI40YmPqBBdi5LE289OQ5RbIutj37gl9/1bd8Q68WcbjaHTXZ4vf/Fx7tXR7OtZGiLo7csH3smc1GkW7OSCxf9pZv3Xzpz+Ymz55bb0zPTzYLDpUtbGSBsj7/927/v/e//HzfddtPifnj2pfX/+LP/9swLT3762pfGoIa9ySR5YeVAMbySpRE4ZxJN9agYWyvDG191J7mjaGl0mXuD0R2vv3dheq6zvXr3oZsuXyqfffTx17zu/rd/99fPnDzJUVIMfXt2pTZ/ZMb7y6efm+o3brv5PcW1QaRqU82p2uK+OFuZW1q46T574EBy/vwL7/iObxrsrI/75YkTN/d2uy4vpxuNONJDkq2Na1ML80mzrkU6u5tZLWuQYevaaZMpsKqP7MLq5uDSlfU4yRfngAKKmr1yebzbHaR1JVHECuIGOYu9fnHkhrnHvnp+dg4WDtQike4ALnZwpOMDMVBeNMfd2jwrbVBBCAaj9J/92+//4Of+j1aQDwsWNQ4caXX60tX7ltVdB+OnL64NzELezYOHviUj2kSpBxCQwdAL+TQ2cayXklk7LKFFjs2u1GwO5595tt4+cPzGE5dPl1evDQfjZHnptqWp4yfuvP3S6QuthSai1km8euHa9Gy7MV3b3unESUSahEVrnY/LpFGbz7Rie+7Z1Z1y49j+2d/6yONW59//bbf/8A+/pjW/8szTnX/zH35nu8xtLq4EE8e2gM9+4YVItV9921HAEESSejvH+m/9yod3u1s+SCNrnty/eO7CWVvo2LQNKJOQF0gSPcwRymHanlrbGtZarbnm8nDbHz3WPH/2VDLTOLw4ffPNy94GO7Kq0Zhbntre3Ont9Nd3B7qZtdozKcWb6z0i8KOdrBZD0MPR8MYD+1zuQKAYmhrUltOdb3jzLQ8/erGz65rNyKOCuDWFRJIkpu68zYuxD2VNGW8iYdJppGpNH8UgsjiXmaw3t3wQvOwOdl/xmv2XXrj64vPnnt784ze/6rXTEe32us7lvighms2LIGCGPbd+eWO2KdevPJGk5bjruTZV38NaBwCjSGnFPqAiTVqhcqUf90ZPPPrUYDgAR7E2AKIIgs40xSo2WhnrwsPPDzp/cPm970xuPZlQfQgaMIpIp2VhsxrXavLAVx8/kLy9vTKtZRiMDtBwRaxFoDtqlnyYAulycV/rlhMrx47Xjx2rzc5Ca9aj6kdJ4ZwVjgTrglFWH791YWZUMLEV24WgxDh2EIIDiVSWKlUDZCnzUHKUoIqVDWF2Wb51sdUv+o+elt/83fMXb8/+6bfcYhJfS00x6pdOe1bWS5KAMTqupUW/LMqQpU1UDBBARbGhsrBElkdRFLWYc6XBAxvKxZVQmlocGR0FjtT8tHPM3hqyWV17m5toPH8gObKgX31XzZcyGqcDCzqKh7u9RnNmeye/dGnn+mr/xfNue5SMNCGZYR+GI49auzIoEGHsDcZZqj17Bboe09C6gWdLUFjWYwtsbVEQgQ2YF44ItVK5lKOyaCfGQ1+ZWJOCwkUAWqNzZRS8gjK3uXPWh4jAKGENHFBbkKjW1lF9bnY2duVw5EsXgog2zShODWFZ5KQ1QGZUohJvgxNQwYyGheqNgxWvkAiVOEccYoyFMnKI/Xw0yFHZlLwh4614Vg5BlA7B+wB5WUaJiogByDpvhR0iqThO6nGSAhoBJUTj/sA5C0p88ChsNIjAqOyzt0oBA4tSg5EN1kVa220bNVTmqChHWVPxKLgiR6eFWKMuwDvriShLJ3fHk1EmV/22MsERT8wkAkCV5WWvKB1EAHiSMptAgRFAApMSJQIICiAgAJAhz4IhiAiJEAYIPIFVT+DQE+YNVDDlipAEVOF5KnxOlRmDqnceASoychV9ehkdxJVnX/YSYBPXEAAgCe0F6qrXWikLL8MrK9lqIt5UTh9EBhGoLuxlbwcrS9GeWjSpUJvEw/aWBgEq5Dbs6TV7DzkRgPYA4TChBME/slbBnjBWPezEkoMoQYSq5IcAgDDjXh3cRCab8H8q59NeyaFUgClm4b11qXa0qjqsInkAWBX4SAhVQyPihEcxQcTyRLITmLi1RFMVXkMWIUTgSiFiVa1OgIqTPnEWMEJA7xV7iILmkhUDshIgpQmBUYsijADFBUVEQYSDAcVelFLAAb1XKGkCWsC4sQlgSQVSXecLJwJeArAIoGLxpWdXKWEcKudURBg8oAsNpQkkR3FMyBACE0jAsPcpAACAD/zKG1yvtBBPzc1Ot1qr3W67Bnlvw+tWGi8sTGfra+cJMWrXB1vDcjScXppCSHk0Hg18bwS2jC9udUPTzE1Fy83Zi2cvzi9PM8pat8iybDomQCPBLMwt9/u9a5fPxnWqGz/u+a3+qD3XJjNTWziEXg8Ll2lrdweNLEnrjd3O2FoYdEoTcxM647wf6s0bbrt189JlKfPzq528dK04ufPWIzujnYM3Lkc1efAr565c2ew6XN8a29JjEfZN1e++/Vgod3YDfeGBy+/6xrvjtMfeRlnU1PWvPnn5C4+9WA58AImMKYuyIjULComE4Ews7aZZFL5/hr/1TmWL8hX3Ll1a7e7siCWZvukwzNPm5QuxLTq9hb+7MP3RxzetKNcTKChKabqRdLZ7OtKiwJZOpxqAgwdFxpjI+iLYAIj7b7wndHpr115CBU5pKfyt++HZZ3/4pQ9/bLe3PjfrTd03VpaKQWRQSTlQpHQTxLiAPdbddG4qeC+DjPt3PP6VE+//0MUX1890OsMoioMPCiCQV16FQKLjdq0WvBsOysXDSUiSwWZZ9DsQI7MNZUkKAUBpzYyRqVWV8CC+zItGq4bKLGfJ937LG85s2CtbRapyxOTSqFhZ0hCaTz57YdAfjvr5d7zz3ne8kgoZ33rHodjX1i6u//ff/HiHj4SQjfMBQ5kYLMe0O+Rx0BAJdHbT2WaQ/Otfvfze97wpRfVdP/unqjuqRfmv//w9r74vw+JcUayjL0edfqKpdMFD6grxgYd5KMrR6k7yS7/vL6ymYKRRszedWHzd6+7OtC/HXVAmlEVrOtIE7Vb9pec23v/nj/zYD7x7Zjavz2ZFb/D4Yy985KPnAOAX/uOPOh+AxcRGazNJvyqy1okAc6iUcmetMBMiIjGA50CKFCDRpDwAABSRIqrOInuRW0YUgYr1po1SgQUR2XvnvTGKkEQIJAQBCZPnCsFXHtDIaKM1Ela9mT4E712eFwxARCLCAN4HbwMzM7BzTilNhBp1CEGErS2RMIpMWstQUOuo6rNXSgmyiSJ2UpR5ZX8VCSH4KIqMjljQeh/YA2EIDCDa6EgbZPCeWcS5kghMZDhwpI0gWOsqUA4hKaWrqB0CuGBFhJBK71CUkyACyKyVJiTPAUgH7xCEQzBGa22Q0HEAUN6XErg6Q5BCpU1gX40IEGVybqv6dwFJgQ+VVYtDCM7mzB4QmAVRISrSGiQQEhEV1pXOluOxdU7YWVtGRgOAUqgUKVJIOqvXQFSUJN5ZH7x1BYXgIegIa7WEA9uA3jMZo1AFcQ2dGiEQEWLnytLmELw2SAjeYpzURaXWcuFLFAzMSRRpraoZjPOeELRSAqCNZgHrQgjiA5fBiTAQcQjeBQmBUYxWaRxXnt4g7Jx3PpSlU4hAkCaxNhqRnGdg5hAUqcBc9dAxsw/snA82fOozDwDApz71fQeaR3b722evPtGx17kuq4O+bmb1Zn00KEXrWnPaFZHthmG/N9OIHv/k0/X57Hv/+T9Zu77Tnm6iqT/0qYc/+DtPaZelccbiXG41agQJQbxzSqtE6SxKB8NhFBtSVHhLCD6I0YQMzF4r7Zw3hrRBZlRkDIac7dQ+c+AVswdfuzK3NEMls5RTy6061csr5cf/8qGnPrvGOebB12o6S81o7Hzhg/Yrs+31obVAbU3eFfNz9PbX6q97Szrf6qczEZIw+6iulLP1uRbEU+XIWrUtIFE2jUU3KNIGXZETpUQRcwe0xnLpe7/j+mOnmtPTs/MLh9/5lnf99p/+8uJi7dyZjQZmtWbj+F13rp3fdXa4deXZn/jpW971zrGaHmGxRVhELe92mmc/fPjn/sul1dR0uKjVIEYqXAheCFXaSBUl3rHX7sYl/0++vvGKozOPPLT68Uc3HzvNXWdUhO063nmo8dxz107cd9vZc5ff963+R3/0bT/0A499+bmyVtPjcdjtDOqtdOXAPtb13ta1Jz73up2tj0VRTLbd6djpOHnmself/It2SOu9rWfHeW4DRLGqzzRC6VWZG0X7bjh47fzaYDxCk9ihD8JxQyeRLofu5IkbfuEX3s+q7QJYG5o1AgtpU7/3B34ghH53MMjS2I9DvTnXyhYX9y8//fTzxeDqzccW2q362rWtI8cPf+FrT+RFqbQBre694/6Lzz7cKQa5yIH5hStr16IosqXjEBKtwKCJuZ5sfeQLf6MNk1wuRy9k9d1xWbRitXvmoYf+ZvUbf+wdUPu124+/JsydmMtuefGlU2/6p29du/SZ7qVLR44tPf6ZJ191/2uGXKw+d5YH8onP/8Uf/fp/v7K22RfV7/NP/sS//uDf/NWzZ86mmhbrzb/54rMAcO6RF7O4mbbS7XF/5cASEHS3B2defGlpaoqLzsb1KxS3dvt55Ae4u8bJ+OgrXnHu2d3p1szs/kNl2pR6PD1XpwBY9DYunxv2uy899kCzPVf0BqvXr9RqMUrcKcJr7zly8YXT253rUSNyhV/adzgf9nb7w6m0vrG5RXFa5EWnu6NVRkiHTxyuSLMBPKPoAJ59vzPSMSqAQElvd9uWeZAyifTqWnfgaNAvNnY252fMxtgWXnp9uzsmEP+d90/ffJC9Hzcasjhno5hAkQ++XscUfaZhasE02olJlHVqyIe+9cdfXL2QsbZaiUkMomKnxs5+7UPfFdPnkuQ6hejigx1r6YsP8vPr8PT21I6lZmKkLGuZbjbSYXeYJElqKHi703ff8M63Hm1mj3/pyeWDB7OmOnjTESXp1SubU3MH+tvr7bnM8I5n5wqJmzUEKJxjUkmtFhljC+9sEAAXmJEhFG6wu7l6PWpDpmq/8v+eWlrBBz7/czvXXlo6eE9aW3r0a0/90x/6z3meKdPOHRptYqNDcPfcctPbv/7bZufmf+X9vzoESw5rNbV5/tJUy3zj606uLLQPHFrMMMuLPGtkaZx0ev3gfJSaOK17r/LxiDSuXu9E0/XpuenHHnlseG29Od3URFGS7g6GW1vrWcKqxPXO4Pb77/Nj8bkbjbplKKdb9Oxjp+Na68DJE+20kYzGKyv1zvbuykzWCflos+v1wpGTB3v9nVwlw3FZD+y8F86FOQRvfXnD4tHnP/8Hx46f6O4MdXMRp1uRlBGxIxNUs9f3RpfS3Vpd7+0/eOIrp6/ecfet0zy24/XDR45bS73Sj0Y9LsqacedefDJR0Bv4Q/uWFo++6pOf+eShW479uw9+FQD27z+glamIsVrpqgFVIQIDKEIN3oohQ6jRKFDYbFCkyRdMaOLabIjVrfu2vv5+uO1kbmpGgLCWkQo8Gms19cIzsno2f8cb7zdyChVo07YjnW+vx2EHUrGtJWvyxrRJauQ4B7EsBRoMISgApYxAYl3E3kamVKosygJDjoAhdy4EdoUrC8CGtaxU7HzpxjlQghwQSAC8D5nSp69kH/jE4MnTxM5r1fv+bz/0zW/NbL5NCEmaOAYlZRyJoXSYlyFIlKSRMdYVhICixJFHr42QbiSRHvU6gYXiyBh0oaxlWTnOo6TmAyGSjgjRB2alDaDXGAprnc9dMUwbNcdeReLKQqNGZUDEF253S2/sxE8+3j97uby4Slv9pITYB/DMSRyHEJz1hfMAqBhUHMVxnMW0VK/PqJApljJH4BBAKwWEgLrwPtOURhxHEQnWsyxrNBFFjEEA55wrhqgj8dIbD0LwrgTRZhR4HCBpNg8vzE9rbMS0ttl3jBjFjdY0aVdl9ZWOSSeRJoVYlBao2d2+vrV1pTceB22EQyQKXQnOxyYbBe3HfiY2inKEQiuIDXkHLqAy0cg6AOUFAoFHTiJQRisV5Y5RUVJrqCgDUoDkRcSNjbBCKZ317AJ7W5alt0XprePShYAIRoOOlTJawHoZeEsRlsE2WyZL0lbWSHXCPhS5RVJIxAwzrdqv/cnHAOC2b50FqIpVKk2BhfllaPLEb6MIEehlBjSLwiqQEISUCKPSQEoTG/RaWDEExoBmjICE6AScAyQOAqAQtAQklKqbZeL+qNwzMuEXIVIVVEIUrnhCUAkdwIBAVKkwlUaClbxUuXAmHCWWSetvJW1NPC8vY5IU/UNXO0z2ak9qmXC1Zc9HNYE3V31pk8I2gj1XTiUnCbJIAAFkEGYGCS8/xESDApjAgiqP1sQzNMnsKaxkN6ra3CrhqdoGBYCQAZBAEGivtKvSfyojEoPwJGnHe6+7knYqpxhgtSIiAKSwWj1ApRTIXimcgmosjhOda9JLJ8LBV3RUUJUpTFgAGRUrYBHnEZFZqoplYUQWrlDpHoQRhFCIgqRBFIMvg2cMWhFxu4lphIakFmtgjgyJZQLUrCRQab2JFAEz+rSuDAGyD0jnN/xOgZ0CrBCAsOeXM3kgwrAnYO2xriLSbYTDjSQ2dH1YbpROFLKSwAJB1rZ68A8NaEhpI56q1b2zUo5iKHd3t5fm5nc7fmN11RWN6fZc2c/76+XmbhEnzUtXbL3dsoNxsx4lCfW7/tjJlXOrG+Lk8uaV5ly9mS10dgZNUOOt0k9PAcBgt7+xujN/ZP7wycW8v6sEeoORj7zOAFS+sX5xtnmo39fN6ajeVgLeSj9rJ/lGYVK3sJJpTO12EK1X116q1ZJg6MiJA4zKdjrb42ui9eZmpzkVJc1mbrd7Q9fvWqMgQQhYnjgx+8RDFylJV2YzsW48KkG5fTcs7K7tfN1r7vvkl55qpAkZba1PG4lK4sDOasnyzjvvWZ5vj289EKZ56sp20WuqL68NPvBxG+m59VODV9w21Wz4G2fljvunMhVKfsXP/OgTo75KjE419MEhwKA/RARQBiNJSETFRW6TyDgLsysnrq+eMbEvh0V/syOjASVaAFUQT/Dcav+//PeP/cCb97XaHE05iUfGlBBb19WjtVGz6eOGysNulHGW1HzfJPWja8OVC+snL5jD7/qBr0sf/OiJY3f+9Uc+CUiD/sbU4vTw6lBB++a73jO8+mGHG9TZmbvphttOvPKDv/+nmOl8MFaGkOLAgYBDCN47CAyofAiKIKllQEli9De/aeodb99858xCLTnS31qLaP/l9fIv/u4rjzz/5GZP0qSdZOozX35quLty/MjK+TOrp1c7Z89eu7Sapa2ikH7hrGGfoAyHwVMUa9IoNi2MDzqor3zt6hOnPuLzQdktUuW6g/Knf+WBX/zJb37tXXcm9W1qG57KOy9c2Dn15NRBNXXoRrY92emV+ejQwYXp9u7V9UCa8lx/9ZHrX3v8+k0HWq+4bf++wyuo5MLl3VqdNnvd9UF+z7HDywuNC2eeX+q355Znb7n5hkoq4kpvpj3AGpJjDqUFEO8DMBNNuHUmiozWpS1jbQwYqJxFwsZERmsU0VoTUpj8ClrnBYBQAWlmBsSJ+7MSPhCh0sWrunsQVOidAyQf2IVgjCbvCCHWEYNYH4bjfPKSlCaliRQSBO89eesDCxutRIgItCYO5IJToFmAAUsXEhMBgtIqcPDOGk1KMLCPIgMI7DkEAYpCmBg5mas0HClkECFSleAe2AUfFJExWpiNNoGD9c7ZgCCRMYoQgX3wEhgAHXtF5CGEEEAYgIMPkY6ExbL1HEhXuWwhUgIQgkOGwEGEOITJkICdRo2eVXV6qiQ9QEBwIaAIsPjApEhErLMsARQRaQBUEQRf4aSCMYYRrQ+MiERCyom1jgvHyEGCaI2R1u1GEsVJktWzOOMQWFPhnLV2Nx/V4qgVp2mUgivTWKOCMi+MBgPoyj4yeoHc2WFesvchBM+ilCIkGFlBDaBIYWBAJBZGrwBEkwIOCOQooFKl85UwVOUUCZgQJQQWrkphQmBkKAqHBAJYWMthYqkFJE2aPaMSAGbngrAGCAF4UiQhRKQARVDrybngM5/5+PLMkhdrM+v0WOmY2wkmpnSeydlgU6g16yq3srYFzofa3Byken318uzKVD0efuavHvjUX1yDcY1Y9/MSxE81U+esAqRIVdcGI+e19kmaKK2KshQGpTGKteegNCg0RKC1NkYXIx8rJcxi7D1vPvJtP/7WDb+747oSYxny4O3mAC/trK005t74fa/74if+JLPJwcWpTm8kkRjPNy1nX/8Nd//X//ulWqtGzOORGBVfvKL+5K/hc1/qfdeb+fVvo6kZq5TYodVMnfUOmu20YZSAL1Pb34k1MzuJONLKFyXqEMVYDsurp3dsni7Uo25n99R25+zZhwLIvEt1mjLoW+65/auPn0qipNlKAWbf/78uXH0a/+2vv2toXtB+c+fSuQPL+2//1ntv/8jF/sAXnRCbpJoaRjEyY+EDURnHkbjo+XV1/WM7//Ib6697x8Ejd0af+uTqX35+3PdpYdHM1F9xx8pg0MnH46cu+D//vy89/EyRj8t9S3OdTlcZrUj1N3sqgmaRf/kzj9z9qsUsK4vt7aK3a47c9Rd//cKZM/sX9i+tHDi+un3B746SrG5HVvr2vf/sn733R374xRee+N3/8Wtnrp4FpaNIAeP2bj9qpOzo/jfeH1GtuTIdiIebvfZ0Y7DTe//7/z+x62kj7g/ElqHVah5c3n/zjW97w5vu+J+7PzfyMy+ev751fXj02MFwZby53dcRhKGt1ZsvXl6HxsLs4grZrJalU3koy6HKtEBopsloJI3EcBj9px997//4jV8MxqT1mzDsapz1cMwcv/ut/+F84V05/MLDX/7j//P7f3T6hfbX/+tfXrx19vK5xpX4kfmDNzbe8/ZuR9z4fL29ES3VPvTpB87tuq0h+BBWprL5Rb21frUeR6NhuVn2q6PAmEhpdMHNTE9rMt31rd3LG6unXqjvv+G5Rx/af/JIttQ6vHw46nYeeeL50q/ZURd9kuOh9V1cvvn+3d0ONLNRb7R64YWt1TNRou5/01uff+yLSc3dfN+BfDRKoXnu0aeFDgmPkziNtD5y842d4fDA4RvNMNSsW9/cbLayes0sLk8Ncg+irLPBIitySkjFxWgo4NMkciJBbOly0FGSpNPtrHN9q1mrzaQ4bPeOHGqPx9udi2uukDtvXDh7Pb++Wzx7eZRFycL07LkLG2EUZmYlrrmkBt4BG1VIc+0aF0PfXoEoQbsDVy6SK0MCnKja0OaxIEZJUof/98GP/ex/OD5ePwM0uuctUPT49W+udbb9V1/Sv/DHW2fWINHRKE/HRTBGrW/2ayllsZlpNa+ePdVp6KOvOvnEky/MjpMyyV945krp5Npuxw/LA/um3nL/7e2pWr1OhR2TjhJjHGAxHDoVZ7VaHJnxeBy81zpynu3Yjjp5ban92QeeH7n+3UdO5t2NqZWjJUca3N2vfcXP/sx7f/pnP6DNTKPeKks/vTDvisFaZ+MP//QPWkkjTk2Re4uhm+fWFffeetNKu7ayPNtqNYq+1QqK3ogitbO12pqaR9Frl9fiRtPmo6wRK+3zYffsoDPo9paW5nWm41p29srmZ7/09L655je/7TUPfeVrc9PLfhjW13am6q0gZr2zff56RygebQ9ee/MNc/Nz/uq17uq17lYnlvG4HKxev3bm4iNbGzccO3a4vXS4HIUi3w3BZUmm02icdzXr7fU1rUxvuIOq5kuZnd03ytdHu0ODupRyPLA1iafj2W6tPHCo/Uql6th3zK3m/NDW4rhBg2vgvZQ8GHZ1rBBxtGtVk65dvWxai69/6zfBB78KANpEiiIBJlQiDBQKVxo0isCXFgNGSieR5OMisEqiNO8UkqKzzJiXrPIOfGW1fPEl+5Y79RteszzbHkRtimsiY5XUleT2pdPdnYsf/+7vPFZrjhCtmZmXhf1o9iGUtZga0lFqLCFXYFFlAerAAkqhF3GBwijSviQdOGbwSpAdq7hBiUexbDmKCBlLHdgPNPgo4aCCBPB5YUsRVGPxc0198gC/cCn0Cj0K07/x5xfuvOXelSWruAQOkRJf+hBIkYt1cOIjjUYHQu9BIYc4Na4ce1eCYOA4IrSCUFoKPkkJqEinDIuNRYky4iwopU3EXoNTuR87SiyZaHrKg1Pggbkxs8jBOVsIhyhKlhvYmCoPHdq3u+3PnLFfeGjzmWvl1i4gp73AznFkNJKy3jsQsFz4oj/Gzc5oKYtnEpWipCgEpMUTCopHxpH1w3HQxqeRzgM3CbJ6BozgvffBCihBj4JxnXyRGZ2miSp8EiiOTeRHgaUnbiyjCNPM0FQiBXAAAR2RQgEsPQGIitO1zrA7Gg/LMAK04jmMUx+0AGFsfVmAF8Bud1BLRccyKm2kIwRliSF4LyIBMDAmEYMfe8Wg4xCElbAY77Qqi3EVsdFKc5AAIuM8B/AIYEtXeI9aMVFABFIKgZBn6xFbNyIYlwxeJ1kKZNl5tkVeCIgQ+lo9Rp04B6j0P7q5rqSiibLxDypMBdeZgJoryUFX+bBKTPKoAJRSxsSUKq6TLJswm0XNRAfxu2O+NrYbjsZBB6WFK3eSiFRX3khI/wi2MImt4YQQ/Y9cMpOCLJgQhaqS+Il7iCb1YLAXlMO9wBUBEu6Z8YXwH4QkrOw8DERUDa0nYCb8xyadPS0J9zwne4Voe5wMgKqRjSfiEuKknJ1AWKTqoZcKorQnFlU46CrltUdm2vMcTRYcBGBPQ6u42ljpP/8oP1cVlU34TAAAErCKslX86b1c254XC4CFBavGt4qWXSX5qo8FAgAHACZARgRSQoTCKCLMBMDCBgFBdBXSAKpnSb8rg45nIWstKCUMEAABghNCBQIGlFLILBHJdN000aeRLgpe74YhKJPQVBtmmpECHynQSislICrT5AcORQPpwhdxQkkbJQ61VEdRc7UzLvrc74OXCccW9+hQ1Z+IxCwIpIiY2TOWDJgoJlYK51uRG0uvCDkTIOhoIjFNDgal4uDz7d2dleUDo1E/rsWtsQs0EQABAABJREFU2SPB88xS0/tx8GYw1v3tXjEeB++mlqcGll+4stU0UIzyxcWFotfLd/Mm4Xg4iFtpCbzR3bZjm3thVbu61p+dbcSRxJHqbK71ZGzcqF1rHT20Uuvsrq1uNeozNUnml9pTC4vdjbMYoLc7MAa10kWnIzzc3caFdm06i+vTrZ3NTZVG3d3dUHf7j99xdtCdmW0sHlraujDo7+a9ne725m4uWpFvNmtFf6xq5smzpzaGo51O79htRw/eeKDsmF6nd+XMdm9te9PQ/v37Ozsd59lEphiX04luJdxq2V98z4kTh+ixR1drKWxsHPj9p+JGXJCa1iZ+1eHmN7/+wJEj9NLqetM0Lj692d3Cn//jT6zmS/WKlMu+1tBIjAxRo57n4IcDVHzkxFs2L1waltdA29Wts1qhHYckrrMtOv1rOiJEIoZIUwhT//k3rv7i+1/8rrce+Kn3HZpb3Bn0N5x2ZiajDKmhBrHEtcM1N6/NWz71yY2//fSljz14RdL+uPtZ3U6Xl1pnzrja1L7NzdUsTXvb3Sg2B/Yt33AgfPKhp3vj3vRC68xDDz//qc+LikKsoyQG0t4Fo+M00cPhhokM+zIEjNOaeEvKcMG94fp9N7dn952H9pr4F6aiUvxLJ6ZqPzVvxqPjDz7a/Z1PXDszSPIOPXpq5/HTvZ3VNdOM41hRjUbjLTTQyJKyO7TAURwbGCM4tjbRCKy0TvwYdq3VSggtKYiypCzxZ37lo2++afan/vN7lmonaeZgYyUUnZ/+/B99/vidD99461KzFtfm2jg148dbzMzOIUotiQXwyma++8Uzh87uLC0lY5e3a3HUjDd2w4EjK6PRaj1WXNpxt1TxpBSTFBoVhxBYwFZ0GOdwAicWJHTBGTLVnA0RsjSdmCiZhEQbQ1WSmCXSilA7DkQkDKaKwhIKVjh+TwCC2pYFBNGKlDbWO1JKADyHSGmtIxZBZgGs+p6g0uNBytKW1nofjI6qSLFwUNUXv1YcPBIZZdiHKuVMmlKjqt4oBGJAE0WZiTRp620AIaVcsCxBkwYCAeHgSRtljGcfmyiiSFgQsdINASCwBB+kSkUjWOfjyABAFdcChQhKKgYPovWucm+ySHAOkIzSCpUPXhtdbUUBFVAQds4pBCRSqAIHRJIQWDwARJFhDoRGo+LgBcRoEkDrnARWoJ1zqTHVe4FE3gWkRBS50hpC5oCaHDjnHCEpJCRV0YhibdI4bQrbce68t9YHZkUYR2q6VU+yVKFCEZAQvO/t9jZ7A51G09OzrSyKtcmaDUQqR0MzU0NmtmPRAlh2umOBEDQUBN7hKA/IENi16zozqARr9TQAeQFX+MJZVMY754MIBu99bIwCsRyMUhonn0QB0cZYW4qI0VoRi0BhvTIkEiR4FCIWo3QSxzb4wDwa5YpIEIgoiaIAELwnpWLUWqnCegZQeuI33rajPN+SyKYRqFicLyHWDCQQjEFjjCIndovLMOxv3vPGu6+nTjdr0RQHGHz1U8//yR9elzJOYh3GDOK1IVLix8EFNkkUR8Y6rzV6FuTAHLJYiSj2HkGssA9BmDlIGpNGXmw3NCtry3f+yKsbt2QbvQvrgy6kOh9qGI/jTIeyqLdMrxhvXrn2pjesbD0zqEfQUPHOsCwt71tuvnjhgk7i0SggMnqImjBTM+M+rG1Gf/DR4tMP997zjuzOu1KjSkkiT2OVBWAHBKZWg6BCHoJTzIwYhLREwM4JAoTG+fOdAlxtto7WCZMJvLvTT5RhC4899tLsVHN3Z6cTtqBuZ5vZZx9bPf9df/zrf/ifU/fs4NTgA//fk1euPnW5W2euJVGUFyFKlCIlwiGErB47z6Ut4ijK89Ddyv7gLy/vnzl+8obip368/n3fsfIzv3zloUvw6NM7SVEknlvK/4fv+87//ctf6pfimc9d3I6UrkdRM0tGvTzykGXxb/zxmV/Zd//hgwE4Xzp8ABqLl+xl0cV0Ozm4ePL6Cy8oLjHSfjT+nf/7hwcOHSm3i/3to1/32m+0X/rQi8+fbk1Nre30UFSSZOu7g76LV44feOr5l5r1BpVOU/K5z3zicx/9bH1GjfsFWlzYN9vU7f/+Ez/4xJee/fCv/8/h1Z1+vRgzrBydH3lrd65HBuOMFg9PbV7fkXAti6KD+/YPd+yZs48bQ7V6orXpdgZMCRi2VkKYPn36zJlnPrP/Fa9TVozMixwoigbx/jIPanqRdD3Jzh/O+ie/6fUf/+rgTz/5xTuOzi63v+fgvts++5WHHnv6hT/87V98//tel6/2/+LPPp7N0igP07G+cu7K/3v/bzTzqNfrl6Wrz0TVUTA13e5sD41iDCAhBUscyle9/o7e9Z3F+cWydMcOHuiv9y3qw7fdGNxC3PBPfvaLw0eePP76twBN7zt+JJR50RvYTiEF8MhtuSsvPPPC/uXZUZH4oR373qFGq9jpZQvzx44uP/3gV9fXrvdHnftvuu3xJz9TT9ug6NrW9cP7D09NTY+ub87Nz4sUdmT7m7umntbT5pCK3BZZI0l0TCruj3JFbIU99nVks4zTqaTY9Rw4TrODy8vj3DpXzDV8qiBuZV8+tcs8SAEvr8LNN6YzU6PDhyjvMmOYXSBUgAl2+0UsSi/GP/TjP/BHv/2RqVb7J/79f/zIR3/3ma89FZw3Wj/ygtpYv+sQrG53O1vFeHeHWw2bpe7+u7c+end25mz62x/ceeg8dwZxe7bZaKfbWx1q1ub3zaX1+tWd3nOXvpZ3xuc5vEKphbmZa9c3XvfaO/tbgzvuONpf2xj38uWlqd3RKMtqWa3ZzFqDYhRAKdJIHEJpYpM0snEeaBBPL888+vTlM1fHr73r+I+89/7Ns5cO3LEYJZkb2bLfe82dd7fMn+x0erOLjfbMjGYbXDm2niAPMbSa7Y2NXjEeRFzctpi96TW3aRoLc7+TDwdlrZ7eePKmRx74giM/v7g42B3Wm01OCKxcu7wWBEwW1+P40MryuNff3ey0pvzV82f2rUS33XL8xRefv/GOo3bodre3BApK24zp+nZvWA6bKrnt5OJDn//MnTe9Ms47YTRotZO4rkItTYZTtQaTynpbHY4bXIAvByZS1pVkNCltUpIyIOpx30ZkAg43Ll0CZaeTZKo5s9bpLS3N33H46F/+2e+OjH/sicfX1vq33Hx8avFgrR6V3o/GG0U5UkbFrdgOes1WDYmSDB1AbXnpUH3qa49+rToKggCwKFIgUM3EY4oAQCsCAKVQA3CwJpalqVZmstzaQZFbhjjVeZlbIKXNdt/99ed2vvhE766jjemVepIKWt+u9bvDeKOY2d3evXwl3HxnHZok3unWPCotboRYoI9EGEhzKJAxhBIEgwSAjH1e9p/LZo5gdIwhAemjaXBILbMEDoEQCAQ5WNJS2nFwniiKUw0QlQIioqLYgVPCr3zV1Ie+eF6orilWofE7f/7iL/7k3RTWPPd0xCYCrSm4ElFqdeVDHsqcdKxRh+BESRT5NCXLYyKrItAqFhCNHMQhGg5KaQzAKmZvWMfGByaKyk4fiFGBATSsgBVAUHFUjoEwIt3m0ooPiD6OEHE0N01T98GJk3Onr+q/++z1J58djsZCUeKDB2alNENAkBCqtm1cHeVbI0g1zSY0U48zpAiZQLSSwnrLXrSJBJzHUnyrljpUfjgKJCqKDCnyFGcJSp2ElZIpHUmgvBhJISHTLEqZyBY2hnhoeypJK4vVuBwIW2FVOItaCutH5TAACwetNLDyRUkmBhQAHjkHomoJoUbPDil2jKT1SBf1qbROhoe+rpT3YVCWgaICsChKX7KKjAiAC6TIaK1MNLD9UT5GptLZWEOskUCMpiyLU4TSslZGI7DjWHBsAwYxQIpCFJus1lSikij2JZlEM5eRVjoyBhhfZphCRR8A3BMXJuBoeblQfXITXvUaE4GIhOAVKQFNaGrA+1Dunuf7bp4+vGzSOEVwpB2jXu8NvvZS/4EzxfntbOCVCCAKVRwZoEpP4coAU+knEyrQpA5MJhidygQjE+0IECTsKTyVm4irvppKhhFmJJBKWsF/SKVV9zV7iJ8q6DWRa15ubasWY7IJTWSzl1nU8LLhaELp2UujTZaxCn/tUYYqpnQl/ED1i3sF9pW7aBK/QwECqlZYGLlafMKKR7EnklUK1suWo701CQBYKSYoYYLvxiprhlApZVVp24R5XUU/qNplCQIsAZRCQqrI8BOYt2DFuQBEDaiElfekkPNRJGgA1bic46wRqd7Iji2jQhLylgkpBAAUQu1zryNEBbMNdXghWp7NrCuLkQ5ShqHPIphrUKvhEIMSYEYbJEl0GpftFVKayXDhOBhbm1UFF7UMi93x1dPD3V4aIFKqyrGwIhIB5slbQBPhj4DBABqNRlOCfqZlZufSoIi2SHaKYCvZkP//pKL2fHPUT4vhaFR668d1k/R7/URncTNaONQe2VDmA2cwaSQN1RzkFuJocbqxb7Y92r6+trW7b2WRBn5ky43SHlq+eXPtYjH29Xo9FFu1GmVLmrHsj6xCdrtbM+1G4fUg9zQa9YaDNG34McZZ3RYlx0USU9kvu928lWiPRS0eNRtxbzTu7+aj4fbGzpXp9r7Obk8pKoeDrdVTzWaMrr553Y8GQx1JcyrTRiS3UpZ2RBDcwkLLNCNXy+aUSVO9NticTaPhTu3ZF6/P1qZfeuG0Ubw0G++bwbLo33Vopg4BRmFjp3f6aTn1WP++18HJ997xjrdsDofxv/zG1ltfvfTIQy+2s7WZaUx84xD5849f/chH8vHsXatuKK6gSMZl6fIibjamp+a21q5qJbXGgTEFEdddexHKHljXbra2dnrtxaX55dbqhZeMTqMkQSNJUsv7PQ6gAMlHAWb/4vPjz37+yz/zg7d+zzsPNObGw3xHtZN0JoHplVH3jv/4Y5//u898eJuWp5ozaWYgARZba2bbq+vjznWO4hTFY9EZjxMNuHPx4sN/lWVTRhk7zuumNoqRapkPJIzDwQhBmYgKa3XSIJBAhUYwGr3XCFHaSPfF41vuaBjatEN2AxdHWrivlVEtiNB/97ft/4Y33f/Cxemf/KW/f+nStamslrUjKxykLEe5UsqOhEuH3uXiE52BHwl5x9boODgXUSRaAWEATxqYyFpfDjGLmh/49Pk737Dx3h94k+0PjMEbv+vn9n/3fyuGpy9+9g+KFx9sIhy+/Zb+tmfQ083IGVWOWBOrOOoO/fOXh2cvd1oZvusbX9ezxTd+1+sf/PDfPv/QuZX9M9cvXV1YGCYLBycXRp4rd2HwITBjhb8H8NZp0opQkcRGK6WABZBYRCR47yNSSmulFIBSBMqg90FRxYkOlR8TCatvK4UIiEZH1gVCVUGkC+eN0cpEwQeDepKZBTGRiWJTOg8gAcQVZTXAUKSU0YzEwuPcaSKtiAVERGtdzQW0UZE2oJTjAIGVICgiUhTrKDaI5IMDgMjEpAkAAghzQJA4jpGUD2ytQ5CgRGklHIQlNpqFXeWV5aB1REo57xVQ8FJNMwgVABkdGaWctf8/qv472rLrKvOG55xrrZ1OvDlUrlKWSrIsOdsEY2PjQDSYtptgDDTpBRp4oYFuQn/ENqFxE5o2uRuM/YIB2+CILctBWSrlKpUq1833nrzTCnN+f5xbMj1GjVM1Rt17ztr7jL3XXs96nt+DigAIgIOAJiWgIh0Bs4gQggcBgMABSREo7900Ch44gAtExBIUiiYlwEoCAkeKEKcwIxVFhll8FHvrWRBNVFZFmqTTGUORQhBSJk5j8NZJsN4HFq01T3cQWBCVcABAhUCAOjZBqzywR1GRVgrrqmbPSWJ0FBVFPhyOR3lRB6vZ1FVtNYCTYMnoyOimd2CAk8SEuhrVoV+E0nqvdB0EPWiYVpmoltbzzUYrpshQpIytuQSxoCovbNTYsmUWQ8JuysYLQBwYUYhQRFxwwsKkau9ZOLBY78FDEFaI1lqtlAiPy1IpEuEQxGg0sdFa2cCoMMkSdiwIRMoYZLzWcwBw4MVzY+/ibmpiKEYjaEUmSQQxlGWonOXYg6pcNSr63TkYFfnMgVTPphvDvef/Ze+Rz+5amyYc16V13s8vNKrCTQorICBELHXlOEgcx+LYO08AxBoQkkhbZ2cayW23Hkfg9urMs09f2twY7k7q8e7k9d9y6+GvWDw3Wh9vbVLWaOiIHMaNmeCq0o04NTXKyk0z9amd7d54GKq4GTW0Kdr4mn/35j/8r38WUwQQiILu6LyyBTN4SiKT141Hz/KzZ+3186OvfbF62auyRpu7CxACeIBqbxDFoAC8h+DAJKCMsxVzAc7Dk6d2iIxjKp2IQhOlzUSdu3Bpdf6YuLAz3FpYmFWhyPcKhTSsyNDck+fyb/yqX/vZ7379Fz7SH1QL65YOveSY7OknTp0pKk+kyYBSOkqUIgIESqjZSGPtQePIwk/+xhPv+Ynr737J9spJ/NP3ffW7f/BfH3y+HHmIO8ZK/CcfuP+RC0VpjUIRkVFetxvxUrd9abRx8jr1bd919y//+pXf+e1Lv/aLr1V01Xna3Cz6owBQPH32sfXh1V/4pf/4X37zN0njq++4+eVf8crxpLSkVRa/9bu/Y/Zg972/89sT65ZWu7uDWil6yd1HHv3cv/xW/5KaO/aOb/uWpaOtd73z3WdPnz58JHGxqbzEQl//Pf/+697w9tmEX57O9a/6nm5/6fQXgsO8tsO1PR2jBPEoW9t9J0bzPE7K5588TVEsngvrQLCS0Gk3rBfrgjIkkKjswG//7N+971Mv5ygoAOU9+5BPMjPzEkZbJZhWgXY3P/Pwn9388t+942WvnQy2/vA3f3P5i/9y8CUv24kmf/6r73nNV3z3lz53ypsLW9vFuF85DbGDcQn5uGgmOLSsov1HXhfq5lzLk2Og0aAMtcp3BguzjfV8w+Hk8MKNW5cv5GNVD+SGk7dffurhyxfPdFeWZmdC0bvUP/PAbNOcvfjk7PKx1RuP6V6SehpffuQ1b3nDpWefy6J2++jcxbMPv+N73v2xv/2DtNM999yTKppdWb55NZs/t7bRivQ472WtmcXGoWHuhnk/NStbF/ZabXJukqY6y2JvByoxzajrghWuk0SRoEYMokMdTJzkg56vZGn54HBSlqPh4aNLu1t7w/4eIY+Go1bWstjYHjOk0ekNemYbZ5r6K07i9UcRktAbTEKoQwyLx5bHkwIDHrvpZB0+uLW1ubHXnvCCCM90zWhYbF6If/B73/fL39I8cEszmjWH7p53dhw1rOgyqtUrD7Zf8vLVz98XfvZ3n7u0a7XxjVhPyvqLjz5+250vuuuuu++55+Neh2MnVh4/d6VtdBxHDHDrrQfyXi9JY2ft2PtGt5XGSWAUYqVUUVhSJZFolQDiaDROIhiVVU/CZ+67kkh2vBPv7T539xtOGqOTOPYCPoqOnjhy9PBc/7lgq3roh41WNpkU7baZW2qtne8lw3Go6r2tS9/7ja95yytvu7h9deXQvC+9iVBnWa/0X3jqQa9AXDwa9APa3qQv0PC+rpzVDTWsh02a1coMxs4GPPvk83ZcRpnypTM+Gg7qS+fXlxfmjIEDB5PJUxfbRmZXV0+fXU+2dieb+ROf/ouvesnxm06uBMTn13oUE+vG1379neeff77dnSGlnZ20mw3UMh5VUCOzTMpJlyExcZo2AtBgdygAs91sUJYhosUjx3tbl049+9nlo6sQ+XFvfNcdJ0ajcOlc//h1y05cNRl5lrnlufHudpFPuAI7KTnUuc1vuOEWu7Y22tq6tiaE6dMGijhmhdPKpmlOVoNIIIhmstvvOHnzseu4yP/x4x+//vYbDq4cKIaD3d38+Ssb/b2+Y4eBBqVe38x1UiNww5gsHTUaac2wEIWHH724sniow6i6rQBaHEMAIR0CAQfCUhkCpUygYAF1Q6gNdrK7l680NlK9JCS2KohUpHxdbAJGkWkzsLOlD6I0kUo9WVGxMAF7pU0UI6MWCb3hZFhP7rozOXWWNndtwPi+pyZ//L8vfcdbDiXJRBQkmeYQlNEiAYkVICmDgkpDFBkUEE8IgYCFLIAogxBIudjWuLVjzp+fVN4eu37h2Im5KAHjHYJXyqhWo8q9Mmi0RtASiMCRoap2EozSLVFOuBaUuNnw+cRVtZBrzuiXLKmVpbm//cfhP30mt06hyP5+GyDBfic4S6gByyCTICX7QcCOgbkk1iFo4GDZs0dhH3x3pdHuNHRivJhSxsIy05mJVBycL2pi8SA1S6VFJISFZuJszU7VVsRDM8tEorz0qVJCTkUGSUDIMRd1gaW3zilwpFEgKmpPATVlgcVP9xlBOe8KrdiHhNAoHUVZEbnZI8txholuJhYy78G6iU16JepaRpNBhFGapIKoCAi5ysesS4aQRRGg1nGslYqUirQjIfZlhNLJYm0SHXzQ3ipoNKPKgdvtO3SRwoZJ2RMittpZZ3ZGa8kiPekXlrg914FrWpHsh6KueTP2tY9p4bvAC9mvaZM6vlAubxTq1Kg7OvDGk7Ovvr2r05Fqgkdm0iptRRgfs62VA8uH5tb++rN7Z0dJzolnUCxq6hLHfQ2GZVp6Mg2MTR06POUD7WfNpo4aASQSuFZaP4XkTIHVU0FpiuJ5oUlsP9QAgkAvuHymf+8H7F4w+7xgIZoCkfDLJwJR9gveXpCI8AVJSUSIr8lGggTIU/jSVKmRayf2BYFqn0M07UdDnIbDAEHUvt70wlcBIIjTldI1xjhMD3T6r32VbB9YBGEaAbtGRJJro5Jrf+CFoAfv05BIqenaTYR8AFAKgBBIWDSIBlHMAKB9iCXESIhGZwlUloJvJhxrW+V+KSXLAIG9DewpiARAF6Coap3GTmMwkLUUKh7ZMot0u6lW5mRYjhuKWkmkASKFsVaDiR8VblDVzWXI5qVWtsoriplSGhW1MQAAgwGubZrSakYEAUIQmlKsAJFQRGmlCCWACOiIIkRWQMydZmISncQmWFzptjZ7lbzgH/u3UlFvZ6i0Xjl6JNS2NdtWUQQwyKLU15OiqErQup1q3+idvzibLZQjPnJ0pQzbw81tUnLopoNPne3PCG31dyXBqxdPzzeaJkkX5pdmbAhhMjcXXbg8Ho3rLEnmDx5utLPi6t5ebzLmSZQtLRy+bnNtrXPoRJBG78KmohGHSdaOCg47vd0jRxaDlgQwymKIyyxu7fRs3ImWV2dGO8PRsD8ZWpxd2r4yWlqcrT2r5uKYqfJBKbO3ly+2VciHbmCPr2azzcQp790gnWvW6WRj7+rmxTyu3GtepL/vHbdpHLpKHT6QFnWtsJUX3eeecbfffPt6/+Ev/tmpd3zdba+7q11NHrTDc91OmJ01M+200Uj31s8dv63xwyff/JU/8Hh7phHARUnkPatINaPGqDdGj1Cq47e94vFTH0mNKsrx2I5ve/lbepf3utRLm9Tfuexsrm1MlCpqRFFj4nsybShEVjpij0Nz4Cf+cutX/qr33V9//MhMuPnkXDKz/Pt/8uCH739Q2dkkWUk7tDs6O3OgxY7B11WvZvYQ+7e+9bWPPfxY5eDG1738lXfc9rd/9McSYanC0uKBy5e2v+bNP1JtXv3Xf/2w5aKz0JmZaQXnR/lIaQUEyAolQpQ8r1KlIai90fYPvKNBjSvFZFLUWTNZrauJhJH4XBi8tb2tR+rx4MaD83/zeysf/Ej93v99RdEs2+CdNyBGgfiAHphdw3QVxRaGHCqttCKl4kQkCAclGoRB7TsLJWBpMe5c/19//wsn7/zh607aOH1+5/I97evuSJuL7X//88o+WTzw3gv3ffYdr5v50H3FpUl93fWHzp3esax8bcikjXbix+Pcy0fvfSJOMp+dXl1djLUyzY6O9Mx8Z5BfA3ftB2RRaZIAgKCVRmEgrZQGlkgnzSRVyN660joiUloTgkYi/eUmqdpaEWYBIGWU9n7awyiOQ2BGxMiYSEUIrJUpbS3BB+BUawIAwtp7BDFGE0UaQSkdBe+9CyFMd40EGFArJAXkgpsWowlAEK9JK60Ypn4ThSEIkiJw3jNzEDHTOnWlYqOtBWIIMp3kgMWTIhUZJK3BIXqx1mgNIM55JAKY5nID0rSXQRtlLDtSGqdtn4QoFGsjKsA1WpxzVogIIY0aJCzCwiwIPogPLIRaJ8K+slYpQqTIGM/eeu/BN3RKSERqyk4CVIrIBhcbE8cJe09CsSFlqKrtaFQqgVbWAEBNWkDQKARyzjGz86Gs6gB8zdKqpieUFIuws16moTj21gVXV857XxACK4QQfBwbTcbXtsiL/qRgDUrrzc3dcTNtNDP2wlo1s9QobVBh7ge9vDca9PKJE0njeCbLGqk+sNiIjUahSBhDiAG0d83IYCtyibaBfYA6SOzEAXEIPjCo6QyvnGetjQhXlRUgJGRBZ22sVVHZSEEIITjWhkykhZQxygchhNoG1KS0qmqXMCRJzALMVHnvvUea9igI22t9H62QpVTaygOaZuoUOl9FRpOxSMQQi8nK2raWZg5kLfCcTwpl5PKp7S98ZG8ybGBAqVySKhCqHDOzBGllUUyaEGupKaEkVcBYlxLHRiuylluN2PsIQM6euZw29F0nj3z7D7/VxPHQ9S488cgtrzt2av0Z1UnT+U5ZuyrPNTZU1KEoKqqiKn1R+PLCqHepbs8vGZKrV4c1hWrGqOWjlKSRlyh4AI5j0oiILEZXE5u1IqWx5vhK1frtj07Sz+a3H4Zv+Or0putdd56YbBmBIiADUZPyCeuEgNDWQAo++SWCOD2+vHh5q2diTYpQqW67NdNtl4XLud4ZjCdCaaOBSKDRhRBE1mv3X//6S2k6n80fnWuubG5f3Ni62p2Joko7lrxwaRYnsUGBAJyYyAdmBJe7yMSlWv77z7qXvvrW0drTre6lX/3p4z/5y88/ci4a1uE1r73t0188s7ETqsCxAedYAArrrqzvCtvx6Nxbv+GG7cuHPv/P5ju++5/e/gZz24tv+Nn3fG6Sz3Ra2jlXjDY/9JH3v+JVX/Mn/+tvbppbLcvtSWFbCyvegi/sbUdvOLK69LmHnkxn4qKqx4be9k1vGT+31VlKHn3k/m/9mz+emekOezUD5Nb3N8ezq90f/Kkffssbv9FOLNcSL7S/9f/99ru3rmz+p0ee2NwoCq2Mihs6H+dtE2XRgWN3vzlS5tGP//mhG7OJ8yZWWijLjK9EexBX/c5v/Oa9n/rMw888Y6KZIZQPf/rel7z2Ns97ddFQKqW2THb+ZXYVPFxftvU3//qPXPmV3200rvzGH/wFuvOLy0hm58KjFyJXlNujp3qH3vfn//gN376ctZLZbhJRkDqMNHVWO47dHoUTtx78zH3nAaAsbJRqlZrASGmixEsiD99//9bORkLd9Y3R6qGDS8dWdy+uiZ4Ayt5uccOt1zdS/Kv3/eXLX9Na2LtiIq2zJqTzrPdWD0T3PXY+wSOttHPDTXdubA+z1tI9X/jcqKrG+RaHWM8c6hxdneRw6dL6YDxqry7OdhbK7QmDpGl6/fINj/XvQ6Ob7fZoa/f8lXMHDx233rWTpvd6Uhe2xiRK2JhUZ9V4VINrNBNCDUzgFHOqdaPZDUAhriZ1cAxlHPmjBxId08aWMNFmnd5zFp/eqFdn48MHsyMHGjSB3uOeoKU7nZe/7OZmR0JV/9Uf/T+mM4cmGowmUZIdOXzTk/f/68Kx2dlbjWWPqUSQctlAnkcbSpNHeuOrXtl693P6N/5PIZBVtmQB0eqp555b39qQSemVnDmzwR6ypfbq0sza2S1TBAOiU7J57gIuNpoiyByKcsKimu2miZOqLJI4qctR0e+HCCJN43FlBZqt9LPPPP8Tv/L9rcU5tzua5Jey+TlK6NSph6DbNfGgqks7KfPJpD2TFXW5vrbTns1mFhsxund//bcdPzq3NSoB1O7aEJCapONGo7e2mTVMUdaNtLG9sV37vNWdTbpHH3nuX9txZ/XAsc2tzXJST4ZjiHB59ZD1RVN4c6NMbjoG3d2LO1dvefntm+eu+iI8/cSFna2dMs8Fahnnuasbc924lT63N+juzEZaJpN6Lu56bwd7OzPNdGN7J4wLFmrFM8RRp9mdlHltrSJaWew+d6aIk8QLt9tJa6WlAPqF7Q/qWPfqXn802DWdpZWVhbIxDBr3Lq2hZlc2Z48t9jYmk3456u8FXwkhKkgbePLuY26CZx57on1guXNoZX8uYAEU75wipQARp2BEDNbHSQSoQtZ89Te+48jy0sP33vPsEw/5Ojz3zMWnT52b6XRn2iuurMX50lYgaCKc2ADOAvCQFPfrZqNszbYWD83dd+786lPp1xw8EUzqVATagMSCnuKIqaFxXIeKCIC1ihLhOHBG0YmF5ataIXDJaNA0BXTgCZkoBB1Ai6QUAykQCIaIoiAegmOCAEYAoRhNWPnGbGNrw7U6M2kkhiqK07owf/vpweaIv+dbZw+0XV4VKJzEUVnUzIolZK0kVN6kSMLIur9nJnmI0njxQJe9jAsY9M3GRut9f/fE01ckz8kFnJnbPbI8Wm5Xt16fHD3WnetSMR4eO9E4fMMSV6OyHJsIXKjUtANF+eAUCTGKrwUVxtEcdYtyMqidJ5x0m/mbX7vw0GNbV3YYFEZGAwZEIqSpaZ2nZShEAjDx1HccKxWVoUGqFakmmWacVOy0D1qbWIOvCwA108li1UCIUGMQqfNR8D6KZLbbGI+GApQj1IwysR5U1uz2qypKqJhMUqROs5VRK4pkVIwq5VQDtEUwOuvMDPd64MQhVXlgDAHYeycCghSnsQ0SQlAax3U+19CNGbU4D512c5yjNto4rZLUeoXaR4mbw0Zwajgpkqw1qWrvcva22WpFadJsd6K0EUT29ga1C0msDVCKDdKxCyCkmfMpE8HqoEgOLHccsxNIHAbR85323PxyHlAp9r5MmmmUxHXtr22fAU4Fh6k4hPsBL/i/XnBfUSLFNiCSACGoBMLLVvD7Xrt4cLWRzigbz1kVgY5RGaGYvaHUxlHv9ju2PKV/99D40XUMPkIEBAZgQtrPh9GUNYT7oB4URD/tUxemFwSsqar0QksYXFvuC+yjr0GQ+Vo8Dffr22Afv/1CxuyaQgbTyjHEF6xT14JuL1CmZf/5cR9rNH3ZJzlPv+ZpYmuaftvPQe1H3Hhf0dlHur5wMgGuZeX2A1NTK9RU5IJ9BDYKT6FDuH/EiMT73WSMAGpqApueGH6BxzRNBwIwv1Ajtx/LmgoqLCC437i2rzJN21UQGCAAYEi1pIqM+AxBE2ulQDBWxoCEgMBMscRaYh0aiY/nSBnWWsWKgldJFNeVZ1BMqp97p6LLO1XfggaPWomJJjWO+vXmoJ54LHOqLgVtlHUcOFROrICOw+pSFzSZlAOkta2Nl0SnURSfPjV56FRY30sYdawVBBsZDEFVNgATkuYw9YnSdKUphAEFESmKel50QcNNNyzK3byelD4oYsZrYYNrUpF3bCcTwc3u4mG2tigkUs28LgOauhirVkfr+NDRIzzYMxQdPXAgg3S52RrXdlLI3g505xdHg15zcYZd2U6zRpyxs1ub59sNqkN0/kI/UZ2luaW98aSfR4OJiXnxwNIqxnEaZ89f2K2DmlUt6yfd2Xpn0F89kOyuj4eFX15um6Q9HI60552NteXlQ+Rnr14+M390fnw5N5wkje6RlVZva1fq2Je6KEbbox7XQQk6BB+5V77meDUecl7FzrrxKO7ELq+unOlBPnzzdfHBeX14voGyvbf3ZNakLPJVQE/1zGoS27ntx3rDGsqqNRwk1y9W29tPdTScftYnM6sjm37kw+eOLuzMZrBy29L7PnyBg6vKcfB+OPJKqebMrDGHzawf7Z2Ryj1//gGdZCz1/KEDYS8abpzyObioO9oZNVDml1dHo0ld52DBOTBJBiAsHLwXDKzACQPpMS+/54NjBWJhg+CKgSROupL4nLbBDiu22M/Rg8nat9998s3//s293f7nP/yRlQMzz14cnX3s8erc81/5zT/4/g98AKp1d2Wj2Yo++E9/mELcXD7o+4NG9+h4/ExRjKMs1pRYX3MISdoCkET78WC8NDsTYomj7eEeRKn1Foa9HYbaMleFr2uPuj0eV3WlS972xcZLjix//Usbn30OehWbRDsX6qLWikShjto2cCQFKGKnlFDgAMAcPFEipDWR44J9iEwS2DM7KwJD++3veNvtt81+/VerkydMvXVl+ZYb1ML1krSar333rV+5d+Plre+6Ot68slazPXbsNR/50AOPXIz/5uPrO/1uN9ZZt2ORti/3nnr6H7/ldbfOHTfnn9u+8cgBasRbl/b30BRhAKhtbbRSiozWJKBVhIhTLJkmJAlGkRAZxCmlbBpENtrwVPkXdOLyokTEJEkIFABMo9QiwMIKlXVBKyfM1lrvvVKqlWaxUoCqqiwGQZqaOxGIgjAphRwwcBwZH1i8OO8FoXLeaA2I1ntNIIABREIwSRKniQYCxyzC3pMWxKAAtDJprFECgIoiHYJA4BAYQbRWANP2BTFaETCwgn3qDVgfEMA5BwiIoFERKu+c0kQoREo4hMBEBCQiQkps8OwZFSlSBAI8bZOnAFxWdeW9IgRRNpSkQMcqNloT1JM80YSohAkAhQOQioxCTaSUICKYNInZ+yhrRIoIgzHEnpLYuBBAm+ADiHgXvDhtFHOw3rrgg3jHTAhhf3OBXPDgib0LHBAgr6qplIUQhAJ773zQUWRdCIiRkihG9jLcq7gm1kUzjkvEAkSDgDEsIY6idquptU5nG3MRdhdXWFMEIYtUBBwrlUTakGJvQ+3IYBwngVmcddaZSBORdXVqokibPPdaYZpGBEiAznrvGRW2m7EIB+8jRUmnpRBCSKu6tiwKkZl1EjMLCFgfYkPOmcoFDqAasTFaEZY22OBKV4v3DKgUKeHo2mzdWtSV+LLnwJNBXdcBlTU6zUwsohGyKq8866CQCSoobGpbycL6c+THjRbGWipjgp242YWZonaddjrYK4IDzJQC1EZ7FxARFKSZnhZiBA69YSXeRbGuvb+yWT3xRx95zauPfcXb7th2F5fvjvekZ1IwiSYIyo0DQPACToswmSiOqNnh9TNbl86XeQ86mfYMTPLaV7zcbvtEp0rG1tm7vuKmb/sP3/a5j37suYcu7OUlNdTBpbnL64PhOFRliWQKNl98Jpw6j7esyEtvpxuPqYUVgFBKBLrFIlDVPk5AIQxGzcdPq+HQUTQED6JgMJy4dmtx/qatzW2TctKKkyzdGwZFQYK0Gq1+f6wjBFS9kYsCYH31vX/+P69eOfP9P/ldHWNASCwszs/klS0dZ8241W64CZe5RZTJsDapDkp/5v4rf/AHxfd97/G+e371JbN/8L9e+Z3vuveJS/rz910pS80SEs0qVrXFwExO8spPaviZb73r0uOf/vo3rr7mrrt//dfu6ecrcedNAz49rT6BwKDUg49cCHrje7/3nfFk+NQjTy4tLYWsI6h9qJeOtECTIGLgdhQvLHQf/PA9W8/v3vnKG9Yu72ZZRxwHdkkSRY3k6Fz77e/8jte+6o2+RB1FgqK00sq17eToTPMpKedWDuWjclIXRYAbDp381rf/3Mc/fX+/uHzHV9y+vXnJlTV715ppaq3zcqza0ff8wPfTjP6hH//B//Jrf/HEs59wdfobv/HhPzt5c7o0J83M+34rWov0v0zOnu1c/x+HtDKBuW/6dy/52/e/79u/43v+22+vRdedPLJ4HofnmhPevTwa1s/+1C/82IGjR125IxzKwPOznTMX9+Y7bUawKn7pq1/zvj+5FwBmZjv93iTNiIR8zZZ99+DyhSceNG724J23LB6+sbW0tHn2yefu+8Ro+8riypFDt7x4e1iON/a+8uvekTVg88Jj2Xy3o29uWNd7/MyjZtc7vX31ikkaZ9euRvMHjs2/uNwdN+/4agmTzecuQ1T7emvrzFWpiiRuFJO6nmwfme9q9N1O+pF7/vTI4pGyqLTJWt2OJxj2RkFChNhodytbIegkyazLk6hhZTwe7mVphippUgYtjLUJ7JJIV1qTlmZXDyelIGoSa+s0hppZe+oNeXdPzm/BLVYu70DD8A1Hl55+fM+09i781c/ZyQQC9iZ7N992nDmtRuMrV/t3X1++92desnBi3dCY4ozznRiaKCrYgmLyTR46ykz5qlevHPjUjsTt3hAHowJA1ZUbDUZG0/zs/OblXtJSV3fLSd2ba6SPPX318LHZwzNznSgpK7+3NZ6b7TAwkWp3ms66YjgajHIivTjbSBOjKCQxBF8kMY7relLaH/2P7//rv/iFarhFMEq7vHtpfX2reP7i0Fc+SK6SGICR/eEjx7SOLp1fM0m9MpfcfcfJ/ng8LvNuu6FTs707rvt5vdHLx+Voi3VmfKhyoOPHb7jnU5++7XYthRmUTGv90XjYSFLUnCS4fvl5QeaAt911q26GL372kdKHqNGsJ64dR1evDnSk0mayOxi+7hVf/Q1f/1U/9Yv/tUTaWB/dctPN1x89muxtxw0a5v2Nq7vLi00ANdtuQhL3djbbWTsfjrPmXDWZtDtYbl20pdsIo7jZbCfcbUJ/be/6219USFJdvVyXTqkWerOxNa5q7sy2j5443GpF3lWbF9fBVljZop4Md3a6jUgUbFxZP3rLnX5Gc2YWuvLwI2encwF7rzQJUAh+nwmIBKijNItTKosaHT/z4GPrSfrsE49M+kOt7KAeoYJ+r+/qi1oLs1MKWUJpg7CogEaRtQ44FAhKDbfWuK3SrUH6/FM7B27GOOoCIIaaVPBlnTQzoEaEaB0H5yAmVF5TjA47c6/0boviyAcSNkqht3b6iODrnIUUmJjYe+9saXQEsUBCrg4+Z6JOo92s6snGWvj8Z0aPPVNMuI2Ariw4QF3DPY/11raufsdbrl9qQhQlQHpzj8+c6/ngV1elm8VLCwbRnT6Xf/r+3c11nyXJTddhqtTQ4qkzO/2e2BDXYlAhAfT3ZNirTwl85qGCw7Db1HEUbjhhfvC7XtxWYXFhMekSwaDKe6RD2oAgQZjRcZwaQSnLPgSrFWW6DZiWJck4jiSOEs2oENFax4GnVbuKEACZBYDD/kOkWJaqDhPhntOJ1rHFBOn6+e4uJ12dEgmJNBI9tTzvDfrMKOxLX0Zxamsf67gQXzFbIA5Us2JHLMY5H1SccxRJwpYNOCKMYpjvzleDfl1XKpbUqElRUxqCIqgpeBPpmGsfXABxzeXFtMG+mCQq9UmSpWjH3nNuTJrXVYSZd87XNiOKmiaZW1jf6LfT2CFOSh9H1I2SVqIWF5rjsrZ5kZelLx2TQhUnmg7MtEKoRLf2ai9sFOsCTVUVCBJpxY5jpdrNZtRoHlhZiNLWYG1rbzjIsshoFaoakL+sE8ELeSyAfZvJtU73fc4zTQERIDiNJgUB4Go2Dl/9opX5A051pEYd2KDKFClEItKiYkZteSTNZH55eMsBvNx3e4WCIMyABAG8oILAuA9MFkEWAkQML7TESwAApP2q+/0KtH0wtOy7i6ZAnf2c2FQSgqlqgv9G7NqnHQESwrV+senbAAJNi8j2d1qvBdX2X/dZQ1O+0FR4mVYQ78tsU4MPC4R9EYkEX7BrXTvNL6hdUwvUVMdhQCJQyEAgqID2KQzASMDC/GVRbP9jp8cZ9tUdmj7rh8CAgizTmOa+sUj2U244Hdg0rjbFfIJMe8yQiDzH5BVAw0CEmFrXSnQziSNg5yUiwERjcBEJiEUd0hTSBKJYGwXtmajVVkSiNLAnhYG0mUxq58tFnQ4nVhB75+wkqL3dXLTJ61BMvHPkPDHgds6BAwogIAkTsUL5/IO9MxcII6yc04o6rbiwXnzYWteDXmRICUlEtTa1MRRECQJ7w8CMkETaOSbvAxmP4H0gEE6gCuxyhhEgUuGhZmGc9lX/31KRFdFAyCqKIjDgC87iZmnrbrNjW4moxnBg67ISayEpm6rcu7K5dGjh2PzS7vbw/OX187tlHDeWm+bA/OLG2k7USVKomrqZc5XFjXp7WJX5pSu7w2BvvemWSGXINOyNG2TShLoz3dzXvd3n63ygpGy2m6Oh3etNVKS5qHSrrRupRj2/vCKS5GU9P2fqcvOGE8uTcd4f9gLmgPbE4XZv0G80RE8KQC9gSPHiStTrXYm82bw6bh7utLum1aRyZ3NlLr3lYCtN3UxXA9pOMnvp8mTYiYIND42qtR246yZz+erOA0/0rjy9/hVfGR86mCoYHL3lRNVz0McDt8+Nty695m6VNtJmdtM7fvyJp7bns1ailc+yxmA4BE+uKkf987e85MXD9cc5cJyawze89PF7P0685sflDnDl64MvOXDT0vGzX/qYjihOlXXYbGdSl86xZ0EVhRCABKeka2LPQrFCZQwQYSKCTtUYKiRDwjoGYBhX8Q/9zM9/y+tvISxO+Wejtv6xX/jpn/uhn9u5cvn02uVo5twv/fzf/eHvfOt4dMFYPxcnk3xiMVlaau5tPs4wFmCiOARvKyZBZGZVJWry1S+9+dGzGzMZH72+MZrsdmJTOeeLMB6VHKXBIztjbbClEauLCrd3692NzWEvdZNUaeMdE6IiUQatrRSCBnBVSQoV4VTgBCGFikiLTEH5XqEJ7EQ8CIt4yyZU/OCzlx8/m3fC5GtfsXzwyJOv+LqvPXTr/NwiOufieWuHV297ZWc0yF35xNvegf+uFf/CT9361/9n730fHw1z3Yozw7LQjFfnF6IQGqjPnN178L5eHfZvVt4H1CqNY6UIScXGSAgiTAACQhAMmVhHACFNkziKWHDibRzHyAIcDOmp0lRWheeglRIR65wABOsAxfvAIKIEkcZjqxCISAlEpGIySGhrG7wziog0MwsCKlKkp5pLCPLCbVKjAmBDJMF7YS9iWaIo1grSOI4TowE1ajRYu5pA4siEQMCAIs46QUQMSmnrrA9BKQIknsaX6lprI4yEREpZWwsDIyOSs1ZITVFKQVCTQgDnLCIF4Km+CTy1ZANphcImMgyiiLyzIBREggRCAODI6GlMOQhEcWziKIuNlhBIVbaSOgQAItHKKFJJrBWA916A49REGigyCBhCKKrSjVxZBdQGEK21wlBPO+4FnKsDs1IQR3oKlmYWYPbeR1GMiGVdWmtD8IoQkaLYZHFMhMzsWRjE2hB1iFAZhYpC7WxsKE2zRpZ0240oSTRppSBNkwSRNJF3qFQzS5JIBQYTa7BO2IuQV9qSccELAyNYz2WoFIMiSWNtOZAxqNVkUk985YN4DnlVJca0E7PQTvUUOB9sEsWK4yiiThYHa3WkiyoZTypjKNLaMRtSvrZRmpEC57kofV2HpJUoAuZgIRmOqzzTosA5AREI4RrVGsqqUImZW0htSWiVAVW7celTYCGMKq6qyUTr1EooZDwcjU9cf/xTf3368sM2znWL6kNN/skf+dpLZ7c+/vjOA09upgRJbACYQ5iULjKq0cqq2lvrgMAkOpQuiU3lBI0eVawjml/qDnrl1sYIAYDt3tinbW0SZSuPgqSMpoyoSdBwoSQAzS7ZC/f+n9N+b3amk2VKjh3LMgPf/opXnDl3dudqXzUzPWPObuXv+sW/OtA4qMONX/XWm4xZ2zp95vb5xS/ef9EE8hg6mXhNRR2e2tCPb0gzwoWZcN0iHTuiDxyE4JyJTBLReKQ+8Bl3dTtqNaCo69nZZlkHQtKqtbe7U9cTx4FMtL12tdtoBTsBg3lRg0IGJoQ4MzqSjeHw/qeeuP36o+Nh1ZyPk9goAAiBUXU7x3rbO502aqMyk0hg67xSyhgUST/6+Z1XvfTIdcdwwBuzB+3v/7ejv/Pfr3zmyRFi2mwnw1EhTlDAKNXMIgAa9mT9mcF60+nWpeVb9I/+TNR26pf+9HeUCbEVqF2wrOPIxOrgalvyjUEFS7ecjEbbG8+tR53m7saVyq71ylGU6YWl7s7GeLYRN2bmvutnvvMf/uGfnj+3cfvJ5SefupKmEWk96OXNwwe/6evfWjkO3ivUgCgQ6npS1Ovv/K5veuTCM6OiqvMSxS7Ndr/pDW++8ODf86UnVGQhybgsYwTHPB6MSLDZVSurrfs/c09sHv5gv37i1FkLnHQOjUv+xw898K7v/Wox5010wdYff+6za0sd2O797vLdP1oMqB2iJz74p0uvH/7iT//4eNx67OEzZ5946uBCJ21qTtzu1pd0XHFhy7pOU7PQOZTnO2UJg8mku9DoXV7bnwtCiGMz3NopJ9XSdTc22lk1Mp2lg/MrxzpHFlEiO+49/OmPPPu5Tx06sRhnM0dOvGnx+jRMLm6ffwSkeOLhRxcPH1s48UY7ZMvN8dZao9nMFLvcPPi5B9/27m/tbZQz3bmF4ye+9OnP1IwJ4/knzsRgWrPdovSudEJue6t37ur55ZWZxfnO0oHZovBTxsqBg9epoC5cOld7W/f7kYqjOMnLAljy8aZCSZvNbnf+0tUrcdQO7EmLwWZ/pzq0eOzZc73F2U6nle32iiiKtgf28MH5je1BPrFNgxX40rp8TGeey6tgGg9d3dws5pezs1eutJpxkkVpJFeeebrsD37sP/1VZ3FxbvtTR5buaSzMjAZXTOyqyYSMpohC7CCBsg4mWdpegz//46eCPZS24/kOaxDHOObSsktNPBqOD59YmIwnzKzRxxEbhqyR7PZGO9vjLItvuWWRYj0ZFIi6P8xDcMPhTu2qmfkDFefKkK2KwWiwu7V5YmX+0s7IV/SFB7d/+Kf/5Fte/7ITx5pOTYbj8ez8Qm9rnFJKwM4WhLRxafzBv/nH+dbyr//K7/39Z//2uVB+3V556MiRysvm2hpFsrU77g0m84vtdpQMB6MbbjjmvC/z8f3PfHEn37y0czHrtFzlqsmAx5Yods7lo/GVc+tLqyutZufKc5dOPfCQ0kCEl69evfP2W4wLee1vu/u2j3/iU+3O/NHjN3z+3qdS3ci60XV3nZy9cfbXfu9Pl5dmZpZmoeLr57K5btRtJ1KXJsJuN/WhytqmEmcaDVv3SXxZ1XHW8goHtRte2Obh5My991e1ObTQ1s147sAMBHN5ezTJ/Wy7021nuzs74k3cbrbaWW+4W+WDZkab5y81O9l1t70yWzj2pfu/eHDpsADMzndeWCQH74NAHMcc2CgljKIUKFVVFYu3g92z999DEJIsIbDeWq3QBQ/Tioxp3TRCEIYpDk84CGtFqFBphAB1TY1jB6+Mxump4SjfPjAcQ6VIBWfzrW05c25bHFlbX3fd4aM3Hp0/pJJ5W/oxRqhwGCY7MSXaAbMWRuTSVWNSsdZNQVPmwywFVGXcUq4utVA9sllj1sDsE0+7T91zujOz8IbX/9T9D75na1xarLXRZVFHiaHY1CWeOd/4xT+4Ggsn7XZvMEYTFYVWxmiysXYax0kz3dorEdrBYmSiZzcK5wIbhZAAMxIAegQJU98FIgBUDgVMv9JSqcHT4aGffH9D2STSK8vd5RV3yzH82jcc0lCBqVRCARGUeGGIFA/V3m50zwPDnR499ZS9sLU5dhESagHnAxEFYSBgAe8CgBAhEGEQEZ4GkYwiQAnOFs7nSCDYt+PZ3A9IjrSjhXacthKl4qDUbDw7GubGmW6sA5cTq+rKCTIpaKWpMxgCj32uCKESlWqtsAoCnlutTCrHEHKbiwHrPWHtYy9YpwZQZ+Ox8xZ1kKKs0zg+tLKIGVLmoNusbSCl8yJHCJkOiuxSq1n3Cg2JOBesr4IuPWJq8n6+VxS5c0cWmmlsktTUlc3H1tW+KkskrRXZyUQ1m2U+TCJ//ODh2+cOP/TEw/3KFWU1tA7q0IkTE8dzM7PNRup86I3GxXZ/PMlrVxmDhsiVVdyMrhlc9nWUadfw1PCC/8bVgi9YUJiFAACtDyjKM95x0/EDB1d9NqwVmAgMsYoqEEZIkLQlAQ1KxXkvGdczx48cP7ZeFJO84CqIR2TAIMJEBPt5smmMiKbGfAEhfKGi5MuGG4JrKoww4lTVmrLGpirPNaFnXwma/jKy8NS6sy8W4Ze5Qyj7Usz0UPFatOyayiRwzaC0zwma0q15CtBmBJB9KxPQfh8Z4D4K/MtaEeD0GBn3mUtT8eeFCrhr5i4GQiSaRhQAYB++c03Pm344gcj+mRBAEYUwLf0FDlPBK0zdRKyEgUUFvlZ5zyyIKIIsgCGLpamxgdBAinxA9jOJzowmH5KIQmysdRh8RL5pQrcD3Vkk7dqzprvUAAgCHuoKjKgYCxvq4KvgVUbVnt/dsWfO0rnN5MK2Coy+BkHPiN4S7jOXPAsyg1JAiIEZGLyojT3Z2BNRolUknhGtoAAEROVdiI0k6Jfj6sbj3J7Ree12+/L8FVe61Hs2EJBr1AASIhOTNuwsBqsjMCTC5Fgh85RIC4pe+HL21wdHb7lVB1WVsnap3+2mzdb8bHdmq1cWpY+JXG3b7UaSZifMreefOt0Pa0tLi6fPPrdcZFubu6kyd544urlZhHLQiznt0MbO5UYjmemkV9a2Dh0+evsdNz764KmTLz5R9MutXm8n377l5huWZ5bLYbHT34EsvuHI0oXTZ2Za2bNnt1+5crwod5Ku0UnSEmkktNEvonZSR6nhOGsktxy86Z//+ZOzTUmiJL+yW8YUN7JWGpX1JNMR1KUyeUMnpj95w8lbDi8l//yJp0uVfuFcuTPodZumiXDzjZnfGmqBW66b3dzcWUiq2245eMvJpL929dKmjQzX42Kxpb71TYvdTrj+5va5x843mkb5aLA7KfLiqYcuLUewsKjq1sKPv+fC4xfmG822k6KywnUtAZOYxqOBAnP1zGO+EGWo2Nvyja7WobE822knly8932nPbj3zjL14btwbjUd1d36x1e4oZcrQAzJZszse58agiRgCBl8zMyIrJcx+argQNALkWdB6TVoCO7BC/KEP/NXC3PfNLWfx0uFXvfU7P/b/PdxbX3e2NI34qQf+6bH7/rnZ0N6LThvoydUF9NctsAQXiAGMs44EkywOAILVaLR143J848HuJ089mQpsXtVo/VXDZIjLwgfKy0LAc4BJUeUTdIG2JrDW08XErG1DXXkk8LWjCCEgW9ai2dWkdBRnAl5QwDEyoNJIigEl1EGAlEEkays1dTkGRq08B5u7OmqMpPPRhxp7n9g89M8ffdlN2a/88ttbyx1IB+2TB/3OOJ09HMZdMr2oHZp26+d+/q63f/fKm9/+wbpmUb5g+cDDz/7Fn/ze7l//1dbzl5594lLUTvanBAAQVHqfFSyBYd/CKNoonKo5wZJwrLRWCoBakYl1xMFZ54U5BOed1UoJqEhpQBXYSZhmhaeCNwozM5NCIAKEKFIEYl0tVgBQK+WCmxpGPTuF5MQ79i54ZkYkBIiNscHV1gXmKUhfgpg4VlqTMnGcpCZGQBS01k05wQQEnhFQgmdCjKLgOYQqBGYOgoIgzjtDCkkxi3MusAv7GTJxzgpjbCIvEpHR+/zdGvc5BgGIbG0RFWodQgCE4ILRBhElBBBQ05oK730IWqMIswhpApC0ETeyTABpug0w3TpgAYA0jpUyIKIBEEQJa62VSDOOJkWVlzbPXWCvNNY2cJAgokiFwByEBUhrJK1CEBEXAoCE4AkJFcY6FhaQoCOtI10UlUZKmqlWqqEiYCZNngOAcCpGIaDytavKwoA6tDSHOo6VaaYZIyWatFYRgEH03rPWVWmREBGCD8FrjZKmKSntQ+hN8ljrOI4dM7sgnoPzAqArBlGMFbI4UHlVCmFlvTGoNDRaCaGNNZlUVyWCBFRGGOvKJ3EcrMfAiUaSQAHEOy9gSBsIwYkBnE0VZrEo5aw3SVaH0FpouyBKq3xcAXPWSB3vB9Dilhn2a6PQMgZrQcWGYigkF4/KUkRZopGdIR8pXOp0ZN2sP+KiUs3G/LWvXnrTW4+fvH7l9x5+rDd2CwvzSez7gzGJNjoymamKGiNQHjuNrA6eArLl2rtaWBIAIyTYiDG04vW93Z1y48Dthzd2dyyjsh5VbC0jeZ3FKgAqZ5QEkEE+OvMPZ1s4r+a7O4PJ7IH2DTcfOvvMmZ/7zf8e0Gcz7bFTi6urr3vj17dmDt1w/I5f/Mmff/KLp++8fnbjfK+2lVivIZlP1GBsJ5VvN2JN2Le4PcStSXz6io6ewHZklzrRwpxqNM1uD85dRROhF7GlxaRQAZaWZoZF7XzdbrfysiLURWGzZkcnjXE+SMjEhnRkqsqOC9vwFCH90q//zMmjR05ct8yBALC2vhz74ze/9P1//Xc7V+ynHvrgH/3Or+tUXGUp0lVeJ2RAmSsD+v0/efTXfmYxSdxob/Po9Yu/+LMv2vnpx754BjzoEKZPgHztGSmkRjbGmy9628Jzj+/sXVnrLPrjR27d+tUzVrWKevR933RLnFT/+uzO5edyVfXXz7t+v/6VX/yFN770axY63ZMrR3M9/IM//Ouzz13SRIP1MeXOFuH3//jPnzq9/bKv+ob/8nO/dPnxzzWacZJFyoSl5exdP/C2y1cuzXYWuwvdwCTAGkLIZOnYkRNJp9iuci6C1TNzc8eO3f73H/i7lSzcdOvS81fWTj99tt2MA7uFI80oTjYv99KY/sO//45P//MnL1xcP3d5qz2T5ZWPVTwJc+/9nXvuOnrk9q9qP/7pP73+rsb1b3/9uccfmZu7xUJHdRtm9u6//PzsO1/7nqc++bnQ/IpRcfmO4/O9SYWdlgpVGLpMQUij0bhwAfvDsY6oCHLw5lsHa1t+Z296FVw5f3Vxac6EMBr38yvPdVdXjYGs27EqrBxeHG9vPHXvx3rnnmxlzZlup9rbvXTqiy9747eNBuf659crP8riBIGM8aatX/mmV376bx5Yv7oVz7Zf+aYfaXceGzx7abi5tqYaL1194/Kxu1zxIIV+ZzY7cccNp+975vB1h0ykr5w5t93LW625NJtNksSyZYLIxM5CbfPY0OLqimBdFFUIPq+HzbTlahcBi5RUKx1FrXZau7zV7bB34zzMdps15UuHD9VFoWPva88is512rERRMBHX1ndb8c6gTIJy3mwMQqrrw8eWd8eTRiOelBaiGGqf+JEw/9Vf/MbrvvJdb3v50sFjt1x96gPzKw2j5yRpiAT0TinRzUZj6cgn/vLM5x7NntxY6bbnxm40KUu20Gk3hv3SxGY0zENdOO9akd7Y7i0stVn8bJRuXN5MmnHWjBtpsr6+5mfb3U5HRViXec0hbbdTP6MlHg+HGoHA5EW9udF7zy//4vf82K/XCRls/us95++598IrXnL04EpjbqkZZKAx9sLW1kkco9bYbv7F377/53/yN1/9qtf/zYffFyX0//vN937ly1527PiJ8e6QQyGKSvad9lLZ24JgL59dn0xqV+ZjHB8/dqI1m/hJkU8mB+dX5lsrz1+6/OyFC63ZFkSZgmh+OaurkXXZzGJ7WBePnrrQSbLjhw9257oXLl+485YjC3Pdz338w9/xXd/cTV/3l//w6bVLT33tdd+21G3tjuq1yTaWNe/q2ajS2mjT7MJRllCMexQljfluhjpxFe9st2PVSIFb+tK227x8danVjOc6/XJoBmFwtbz+xIF2nB1cOnDnnS8tqq0PfehDEwZd8A2H2k1q7qyddQwzs3MYtVvHbl259UV7ef/mO15KQfbWr/C11EHWSMvcEqJzDhhioxFQJbFzwVeONAjXKiJvZTweO+8FJMnMjcdu3+5t9wZ7bC0QaZ5aWyQET0QuBI8qjgiNLko2pT79/NnXffvdRw8cPXzz9cI02RpdfuYh58tesfKFM3NXN0ejQkdfOE/h8RsOTt71tpN3vuEOkAnEKGmG6ABC0lCutFGz4cNEaUUaPfi0k2rvFMdErShKbG1OPb12/yPDJ57e3hk1dnoNwOoD//I+rxuoPBFPE/QgwN4HLx4NODUOoh0ETtCLBA6ILoDEJvhQWOEQATAzO/YhIJkIUJgDELIwgihSPrBWKBwAgUEIyfoQBFki4ahS4CZus/BmDT99f+/5reSH335gft4xVyZpRMbU3qbx3O5A/fJv3fvMJQU+TtKlka2yFrm6IFImUAjeKQieBRURopraLcRoLcjAgQMTCQEyAhJ6nvaQutG2HTsLN64cOTqnk9h6FQSiboZ1YZzK0tTWeGV7aK1f6KTdVqJJ50Fz7X1pY51KjSZOPGLSNN1GFHyJ6CdFWQRoRo1mawbIWcv98hJR1EgJwBhWtZtkSaq9Vi2ESHnvSBR4FldmCRWu3CsiKipOgst1xbWtS+vFM6uMKMvqfgGISzGspHq2GU2KelTaPHfO2zQ2TBhFBFYJV0zNoJsbe3tRVVlbQM0N1bAKJJEKMSh06Im8Qq5L6xyLdzOdGL1NdNZaXgaj9tcFuF/+Di9IKwgiiLQP/d3vC4N9H42wgBdiBB8P+9HWZrs9N5MYT3qMqUPtgSOiCEwEkHqxO3vxBz4y+sLj7vzaTm3TZracJiMNkwAFI++LFvtL9mkhsYAAIQmBTBvjEadmHdp3/wgDyL4HiKdDk/ACY0n241U4ZWVPpSS6Jg592eEDwIiIQLgv4ghO5RUQoH3QNe23Eu8X0OM1TBELMIep5nRNQZpWlAnzl3Wqa2rVNeQTIk6TZyQg+5ISc5gOTCFNl1LT5xtEAEXAQa5VIE91IiJinmpYU/EHOTAJCAuhZkH2HLwIT1No08CGTHvsFDKBxEZSQxlBM4kS4pmIGtqAw7JSQYBQUTBGIVjH4uKEGkayhNPMLi7g/JwkHWi0Kp16JO/KUO9CWWPlOWnFRvP2HqPwbq3+9Qk6/YwER5aBQAJPrWmir6UApz4upYAliCAKBCLahyehQhAXCKboVhYR0qhiEcMzTfeKO/RrX6nnluMoi86d7v3dR+tHzpPWKkZ/2/VmZd5d6Y2GRbMsMwgcq8okfr6LUJu1vr68x4Ka93Hj/zerKB96lIZO5joLq9V4c29nY1Ofm1nWhMrWVVFgOt+sdIyNTnd5GX2tZ+Ob28dsqJqFplRvVNQbDjqRX7uyuzCTJFE6vzjX39p60Y0HrHP3feHZMsSjrbEuebbdsWE47m8k803Q7tDR9sb29vnn+r3dfku3b37Rt1zZvTqcFGlkDh44Obn8RFXadiOpvb3x0Mruxu7VKxdd2Vcg/WFOxqpmpI1icqNxz5Xl2sY6WjjZTV718jvz4eCxi5ufeCbfGnJvb9DIsnGhxgUrHZ7+wjYwQoWtL2174U5Hz53q3bJaHmvn7/jWm1RUXzp7uT2fHL6+udsrQ5Vf/5KDouqd8Tpn1eFj5OpM+MSz6oZv/aGPebuadRJleLJXtdvNUV3e8tKXPPPg/XEzZcugos6BY7bY8NXk6SfuXz12gJ0vJv1DRw4KBSqqgzfdeTC8/NSpRwR9WQzJRMEziw651yGAdkUZEtA6iTSJqyoRUKhFBJBBAjIrRGFxRQU8rVLQO8889Ms/+hAzgo4Tk9hRrqNKKCSpxsRiqAVUmuiqNxGGREMIlpUORMKk0CiKrXWKbKy88vlMFFYPz733Y5/nNDl8vL1XVpNLdljpcUlcYmCyXlc1ONF1wNorW8HQRRXrfFxVlUQRemfRIHsmIgRPmqM0dXXQcYNDJDoC7SV4ESUQBLVIAHYIIQBoQiSFgI5LawtShMjlxEVRY2/sVPvAxsh+6r7Jg2/901/9jX/3sldFWVwDjiqJGt0VgDlXDRl9VQ/isndzMrpvwyVZcvDwco/1b//W/xhcudTr99rtOI7V3jVVW2nF+5syIIDeO0VKawUSBESAAAGJXPCwXzoIVnwIjEjKkHdBAHRkMEzFIfbWighpw8zXuP7gmRWSImIA8WF6n+cgUWRcCI4DCQOSMSaI9yHYEIgowigEi4iBmRmAdKRQKVSeEiJUCoGyOFKoBMjWtXdeESGiUtoHH0AMKtAqCE/qGgGtq4GIiELlCBUgMAkpqN1UYJq6jUAQg4BCDYASLAAK6qq2hIiIShFzCAzCIYjFQFpR8KCU0loRUBJFIhxEgnjHHgkZxHEA4ESnSRwbBZnBwGDrWpyDEERYaw04tW+zMSoEqwRZpMhLJpmU9fTeGkLwzJULWisW8cFHKtaRYuHaWVKAhM57FCHURqskNgxSOtswaaxNilC5UJd1u9FUpExqIm0SIEMoyFVdO+9hf9sjeOs4hDg2ghIZkxijCVSsDKH1wVfBanISXFUqo7gWhYgCVkJsIvYAPgRhk2TW2SK3wbExhjXmFVfOKcvMPs2yptZpFqskLoraSqViEsJJXqtYlbmfzoK5t604S4hUGuvA1rrKOkAUQeEALCbSUaQUsNaktAEkVCRKmyxGIqx9QEFm8TzbbRKxVmTt/pRQ10FFphgWEJOoWhBiE4WAZeGazVgTsStGwyKJZ4I7duVZfuhjz7S4dctCnPZ3XnvHUub5f/zhJz/yyNjMHbZlzsJAkQSuyxApRKUm/YpdMBDFNnSa5DNZOtbiFr/xu17RbiX51fr0A2eg01bHBWf9+uZWkjW8yYLzJuqQilkiolZQZVVspnGCErqN1valyWSkQIRNMmD4m889GXEYVhI19MtuOnpps2jNL3z1m970yP1f/KX//APtmfqmW+ejuObgilF93XK3qeO9vVEjjYdGoUZhicQFFEE1qRkc7VZydkD2vCjlCDg2CKpQwksLs2XtvRWG8fbOINJmbn5+UlsW6XSzfm/HKNGaQci5wAEkgARwQO3EKB0eP/V4azZ1ViBgI4uiLD6/durmOw9nmVk6sHzD3dc9c+rMZFiRUVGk89IqQ+3ZpbO93ceflpdhyHfGaj6aO9z+0Xeu7vyP9XN9haD3Cz5AqqI2BpcX4rnFxuWnc5XP1tx45CH5/fc8fvaSZnKx4OvflN5wW/eWRxr/8T89cHkPUdW3vOjA008/6m2466V3xv3yvge/9NyFK6QpzRLM8fCx1fmFhb/835+95Y4XJ6Hx/LkxBFpebnmfv/4bb3vxXbcfv/6OesAgbPNcR7FImPQ3Rcpnzl76gXf/P5bowKHj+SA+cfjwyvE5h3sqtpd2d3cGRavbMQZ5EuyO7x6gOItKDz/7C79H1jAar9KdXg2gfPAs7WzpZf/lvZ/53vor3vCN7x4ODJdvmr8j6cBOsBvsrYlWfdb5s3/99eL8jkyWf+SH/6GTthK7dP9z663EHD64OtjdqVydtlMC2tjtswSJWlc2d4rtzefP2elV0Ow0FFCZO18WV59be/Lhe26769W2NktHFi88+uDT9/zr5acfaDb0zS87cdttd3zunsczZZ988B4sRrrR2b644SWHvd7lyxdP3H54fePCaDBwg8nM8rxJ7GB4OplNX/mOrzl/tt45e/ris0/ubl1aXMxsxeUA891SGqTS0Bv00ma7MbPgXbClx9KTUaqhSQiQlPa1C41G2+hkPBlrnSiV9Iui1UzL3CpKd3Y3QBC1ZjFevA9lFunKO8OctjuD3R3xkDU6ZVlN6vrEyqGHnz2LFNUetNLPrOW9Ajxjv7CvPTz/+Kcfb2VRGqtGM2EK9bhaPXa819v+53/49Vfd+h8OS2P10GFtdyLrA2sVaQ5mstV9yesfTF/RVZOT/ctXX/3iW4ebF+Y7zdFwUFY1KIUMWZSgDzlQ7cBW1S//yk//1nt/N/axmdHzC63zZ9earWz+yMG6sNwlUKQN5UMLWuYWlkIVjXInqpHn+Vwn68x1A8K4GHXnzFrPsoBCY9Lo4VOX7/kSLxyeG+2OgwukwEwx/9Z1kvhvPvihq9v+woMPpRlg8LXwh+79An/+oRddNx+Gw7nFjmh19erVyPsjh5ZOHDz0pccf+J5/921fOvXo+QvrR5WOgoqytmTt585dubB+udlqYW06rcbBY4tfuOeLg6I8ePiYiSJd88lbj2sto9EoSbLNs+uphofufXr58K0XxrmZjd79U/9hc7t/4523vRPMz/7We2c7jaWDnfGkf/rq2lte9woTLfd95mrQsRgTk6hgyzofVPmQiVsNtZvzwvx13/BdP/C+3/rVypsQEe6M6sIP5t2F0ZWHzt5n/ukfR8PKgdZpir3h8+fgG157x8zM7MFDqzuD8aHr7mwcPP7oM88sdme6czPNxtxwMHCyfxW0mh1X9euq0rESQm9dFMW3nrxr4+LganHKupoR6qJGICBApZUmwShOVpSeBN7RRosAB9akQEMIYWp30IZAEYtKm626GB1bKN72/W/R6WJl2xCS1hG/eucryt1H7/vEedIltjOPWFWAcfbIVufin1899NFzr7q98eJXnry6lbei0dJq6+DhFtdFWXrrZ3Vsah/VtkQfX74g/+dvHrWN6PLa5OKaHRTiJbOV7rRmJjzy3g3rXmeuAbHiOoCIMtqHoLVWGqz1pCWKFaNT6DnwdC+LFHq2WqnAVikQYdEi4I1RgMJeEAhAppVTHESh2kfAyH7eBxE1UeD9BT3FJiD6QrLk4Kc+bx9/4kIjKo8tcyOL5me720N7buP8+ct+PG4wEqVqxEV3tglQz3S6e7vjxfnWzFy2sbk7mVRF6eJIV1WY0oc5eCIM12qfplhjFDGIIXittSiV5/azjzx/abf/6pe95MiRQ41Uj0f9EnSFUpWFrWoUiYGpnnRmcX4l2xnUSxRv96kqAprYWs6aaqbJSeJ8LYNxKSzd5myRiwOdNtNxOel0ViAEdgVD7ataZUog9UMsmbWYpDnnyqoY9b2vHDYAuzs7zgXsQNnUiYUwP9ui4AbjsZlQuWfr0s0Zf+Ph+bKwm5tjNEncTGeamXBoN6JJGfLCgpCFEiNTsRKPamCDjfPRmDrQSGMfpA4cRYllZiKIdYA6igwQGgVL860kbZa1Y7qGadlHOtO+vLLvM6KpgjTtF9u3GgkDQrBes9HO7O35v7/8/D/c99zXfe2d3/6WO247PtcwPdEj1TSsUAEqF9mJ+eVf+eTnHyrzSeoEDJaO6rGxzZYy7UbUEOtLTx6QBXkqZRJcy5IxwX7dPIEIEiGCXCtm20+OAQbmaxzr6eARv2wfmuoRL/CHvpwB2ydqf5nnM10hXaMb7XucgPmapeiF/fYXTpCAEKLgdPGEgMJTKW36n/tDeeHTEK711l/7IbkmHwHgVOtBZEGkaT8ZCgl+GdN07T0AmPYPlhGmMhsyAwcljByIA4sHCcDOhxCMhFRBQqHbULMtlaUmiVEjGmYC1Kg0gK+tiA4U7eS+Jqwrmxk339Ip1p02Li/Z2VVqrKRpy4HL0xkIFoSt0cqEaFC2PnXvYMRR0tSMamcYdgfq4pYMdiJyRChKAuAUrIoAsp/R279aAWmqkQEiMIT97rLAChVDYMDghdQ+popEIvaHOu4b37h0442Vl5qigHl9bAmeuQI16cD+8ML4HW+KPMDGht/ZyKPE33ib9ujazWi0wZ/4An/4AT/2EZEOwb9QfLcvFblitHBwWeK0LGoVRQLS2708T4uhCs1uK0pTiGl3MGpHKul2rzxzcTLeVlpUQtDoKlRHF5qUj0NVQIgb8zO2wkml0bTLHJ+/sNcv4nh+waXow6S/ux1p2Lo4HPeyLE667cxVtQ2YNNpQwnBnU2yRSTJrkmLrqgIzGlQUIYl+9guPubD7mjfdefoZiefi7a3eQjfFiNjzeKeceJyfW2VnpIWTInv/wxfWLl6x1pUVWUYDVSep7zhCt92Ab3jNwcHuThRnV66W/Z7ZHsGp5/NyAl982n7RdB/ZWPvmV+hbXrTYWMStvSvgzd7OKPMJ63I8qWOdZfNzZy/P/8B/fXq7WkvtcjPtbvd6kOHs3KyiLAyqzY1+ozWniC1U/e3e3OGXjkdXTYzdpdmd3YkvKgn5Svd1G1efbGXVo5//+Fe++R1v/MbvfOyRfxzs9FuNho5mqpLSBhxdsCdvmYnmDv7vv3/CViReNEXTzQr2wcSGQYt40hyskALBwMKKgoiiQEoTchWqCskHEQiuqgIR6kgHWyFpJFWUfmZhhQMUdV+jc55Ja0MNbybMvXoyvuvY7PzSgU89dAEkIyv5qHz4idJ6rE1rOJQOmko0CxAxsvLiAkJdsrDynutCAMF776yoWANwUVcSWMXGW2t05II3kSZoBBIyRBhba0npoEBES7AADKQYEHwNSGTAVbkiQMwEVelKQJdp6lesTPb9P/kXP/L21R/64a5pOi4uODdsLV4P8ZzqtCoX5m6N/vKfvv/7vukvPvtsv6/C7LFDZ5696gd7ugE1yspy+/z6LgAQoqvt1FrKwkqRJhIJ+11dwaEo0gqU0cp49s5aQaitCKM2msV6H4RFETFScPVUFPZsg3cETKAiE4Gw0fv3XUQC9KRQaWMMCjAAKFQBWCHBvtYOChUCBfZGaxEOzIGZA2iFHCTSERIxcxLHiY6DZx8q5zwQBe9q7wWYha13ohMO3gsbbay1AsCAJOADEDIReu8QPSAEZgRRUzo3ogKtiIILWkeBfVXWgYWJmMBXFTMrMpqQQYxSqAxxQMDgg9KoCF0Qa2sfWGsdJCCRoVgRKopQtEbIxwWieM9aGUGu6woQFJKtJswek4TZ57VVSiOp0llkSpQ2URylaADyyk3hRkiIDIjIPky79Jgl1Q2jFIBYVxtlaue1jmJtYm0AOY04RAoQI2MCB2O0rf1wlFtnvbeoUEc6L6yOlDHRYHcyKYvaOqPydjMjRUmaRKnxLCLgq5AkcZoliAwGwYsG9MICXNkaEBUq54GDmChO05QDs8jCUscFFuecdc1WJ1IKQSJIgow6BFkaSeWqsXW59Z4DSazAG2wpbjWjxKgQnIogM5q0irTm4DRg8CGK4iSOXV2Wzjv2yIoEYXpjIHC189OeVR9cbXWkzbUE2uCyb861UTwA6Gartjx2NddeRxF7zH1ufX3ohhuKi0v/6xc+nbpOptNJPbmwvvu1t7VefMvhjfHgs48Pcp/AaFIU4zQChZA0Yl+60k6as1H3RGO205js5MsLjRtvm9+brB+86dDMgc7O4MrWzvaJ1VVoj2iGlm9c2BjtNNvzKCY4z2iKwiUGI2LhglASo1QUeVedffLqwaNH3/DWd3rbvO7m1V/59Z8H8nOrbbtbKoTHHj0XG2Nt/cPvetftJ1euP9iaW249cP8DRcUZya03HqoG4/FonBpJEjw407zaLwdW2EGsiDRWNVccQIi9ECAKEmFZeGCIEsydnVucH+1VkYkNDryzVy+vkRFhyWIXnDOUAFPFECcxMqOwSVJr3SSv40Co46JgDqEughdpEkWEywuZC26wvlX0h1z7RrsRECMFIsLBFfkkTPyH/uXqq19xOOlyWQD31k++pvOfm7e/40dPdWdnmo3Gdq9UGqpJiawm/WJl9mg5CHNtnc0c+eePb93z4DZ5QIWW+ermWsPkSTDv+fGbfv+D44eeHB3YWztyaN7j1u//xfsPfizKr+xVlisbRn3XTBvH77rtyXPDr1k9fOjmg88/N1xpZ43lA+PB9si6F3/lNx85dBxw5vANTQmOQ5UXfaKCOuVH/ukT/+9P/8GJY4f2+nkxystycObM2sUrkGStztI8BPF5FZoqOG5njW/+lu+796EPF5d6Jk1EhLSemV/c2B3NLXbn2qvPnT6vtB9OyuFQ/dRP/23DvuXW61Ywe3rpppd/+L+/843f9uJ49W3BgYLFkMynN+7Vk/oDj/8xlI8Pz4bf+O8Pf+rRtd5wfVJ61MpVQWvO0pi9iPe+nkDZb+B+9GZ2Jrpyfh0M3PlVL/v8xz938LoXWU5vf+2rC0fz2bFW5xip7sVzDz146sLVjVwp01iBl731Ffd+8CP52l63BS9+7Ssevvf0E/c9OrP6cghguupFL3vxqXse/dLf/Z87b7x5ZDdq9hs7Qz3slePxzXfeKCmef/zy4mJzbvHw+s7lcV4mnfYo98azeDEYxXHkAvuaOdQuhDRLFdKgP5r2nCRJ6m3dnmkZhSypTg1LIy9ZYUSIcZyCGOSyqiHUiKjSZIZm2yCq2SoFCxNHq/NzJWG/qksrGEQzo0LwgDa67tDChY09LVT0inFVvutHfmm3P7n3Xz5spP8TP/17n7nnvwX3mLi+H5cIESjFoP/n+8/3iu6dw4PzKQ6Oz6xvXZnsTmaTFqBhY4fW3vbSO0Xs2sXLZENR+cl4dMNSMreYbO3Yams8CJR22925ztKx1clgWAXc2Rk3Mm0ri2jKHhVl7sgtH1ge7Y1cOdg4txWB2avyzb2cQbP3pqFjI+UkJEq5vaIa5ioWBAEFPgQMGHcjRP3ph+9b1YXUlkBmu1kXMEmzhU6cdFdmunNz3e5jp570ZB4/u/X5R8/VJqgonjFJtNThgGVVeyoffODKpYsbhw6s3nDilu21S+NivLWxoVWytrM3qa8202jQzxcWO2u7g8f7zy4sz99w9OBkMPyGb3/Dpx64+PSZywfn2721s3EWP/P8E5TCK1585Onz29uDGqze6FVHDm3dceehqJsdWlyodyq0PBnt7O3sZjwBDqX122vjxoHD6eKBCvyxm2+676GzBw4sznf1HW+445MPnd0JNFaN67uzqS5sZiTg17z6xTcvJBvrl7MkObu2tnB0dTTa9lvJTYcXrXdFb9TbGNa1q+t9qejggdXhsMDgAosiCiJjaxcPHr/rrhv+/u93tnYuG8vOO1RqP4/DElz10KlPKGAyKjAiTI3JjNNCb6QoTkiRoAhrxQmG9Z/7mW9m5aqyFgIgI0ZB1O1Eh0/eZR9+5OLFtYFYMyltGpsQcMO11vrpIxdD/TcPoLjYT1aWsne/43hnTj/wxNNXLq3dffuxky99SR7gve/92PlnyyKfEe18ACeR9ZWKSsQwqvY8B1TkHQ96ubVeJ7rTyMq6LOvKBUYhY5QQls5maRIlpiprIlVVDgWi2AhRXVltFAv4EIhQEDVpQB9CUIoQBRFZhEgIledASNMCU++ZNKBAHEfOOxEIXpxj4QpRT8YUOHv2nFQWY5zY4MkoRRlrDlAHT74WrVFHyo1rnZjAbjzOZ7qtNIm9Zx9CVbvaOus4hMBTRQO1CARmlACMqFApEPZK6cAUhM5c7p++9MksSg6vLhw9sHpkecE7V+alqsgXnBman+voKApiIiO9oQM0SdrKR1LVrqVjo81o7AZj3yv0TSdui5IAaLd26rJwrJvtTrssthqtttFRub3jQ5SXrFBbhf1RjpMyChCpVipGaqjrqmGifumGtc/FNTIzdqbmUDgyoEsbosjcenxpNB45H2WNhQqwMzvPaBCFNEQxhSygt5GSwMGCMs32fGtlUF1YPXFwezyweS1BGnEWqcjW+fZoogR1EvnasnCqzMJC11o/HOS1f6HuBvfRy7if8WJ5QeZgBqApQxkRkERYRXGYqFCgFCJBTcrwd39//z9/7Ivf9ebbvuPbX3XoYESiQoQ1puNR9qs/80eP3Cd+lBkIBgXYI4gveTyp1a7oBsUdbZrGkwPlEUSAmRhQAAiBp6YiECHEaYn8NRlomqSa6iTAck3IEZzajve7xvAaXWkqNeyHy6ataFN9Bl9oO0NC+TcsahaGfS8R7KONROjLYGxB3E/HIUwr4fCakjR1/AACyTU71lRq2+dbC09jbwK8LxPJ9P2mY0YAYOGpc5quSVP7RyUyrTaTKQ1DRMSLQPAMQsGK9yCBxQclmBC3m2omhsUZM9dOMwWaGVGUUsEDgkEwRqm69hSb4V49mLga0YoFcp3Mx129OEvz827liKCpGOqqhoggH4NYNRpHpTWnngif/WK+s9kYjafrXGGONJJ1YgSZ2aEoPW2KE4UaAUntG8T2W4xYUCERcghaEaJS03sLMigRIKVxmvFDhUoUBLuS4eoSxx1G0VDVSx24bglmE962QJEcPWCOrhBHtNQUPhKVmC8erh2AQr+UclmmT192z6yxI2GW6JoIeI1VZHcnvWCSdn+zR0oxc6s70xs5CuipaLZbgfOZVmbH46Ku2MRlUBtXN00zOXL0BI8H471N8PXCgW5m1e7IDXvFbJX5cd7IgzLR3JHuI6fOL602O9b317ZXDi8szLVUGo+K8OjjFweD/szCzMLMooF0a9xPm4gKNnfXDhxbaGcZx+XSkSOT7Z1L5zc92nvuebie1NZxxHzxsecOHDowO9NqrBzfDcn9Z04/9dSzw0nugiOFhyN+0YJ6z49929PPPWLizeUThKFPSU38zCxCszn+ipcvX3iujLKZAAtXNwcbO+b3/nrzs0/A558Z/+zbl77hW4+Yxs54OJw/MU8xLRy84cyzxfOXln/ipz97cW+IPJMlGSjKq2HcIIrx+IvuuPw8zM91ds89vnxkuSwKpRFjOLi8snO16ep+ZMq5mW73+oUzT11aXFENM7d+5VwzoYc//f68srPLi52ZJM/rFCJwsJBe/Z2fPLx4XeRZfelfJ+c2W2liJpOKiJQCTUYCI0kIIThPqAENEjIHDmGqMAg7QhWYlSHSBGi8syIEEsBDQK/IZ02DdkhKicv9tMMJnXM7HPJjS3jy7hM7O8UnH9100DAkqXIRwVbufQCduEix0ZjGLOi7LYqVGk28ygw7KsfOWhiWZFkqq6pavEDu/dvf9hWHVpc//I8f27gyEObKFVUZtNEiKm7GImiMBtGMaFmZKAmOgZmIvEfUEaInJYoAFMRRaq1jX45K22hGA+ammf+rf9y1xfA73xaSg3m62rWyy47TRkusLYYFZvlffOZtj3z43Hd+/wPc3ER1oKgh0axNfHGt2L8YlFIG2fu6rgSBPFAUT3sbAdGQJkRE5QGYPQc/Vbw1qbAPg0OlSQEBaeUENfngrLc2OCKKdBKZSJNShIGD884z16FoR02lDCCRUi5YFxwgkCKtDYIgKlIk4mtbAwihCcELB2EGgLKutTJBBMFFxgTmoiwZJMCUayzIwQePQEppRTTO82nSlxEJUSvtQJyt2TNpDagYmdkRKST0PlR1pUijUoBKEWitSZM4IWSPvixL0UophaKNigyhjrTW5CQE8CEEBygSkMQzM+w3c4TgojgxJkYAQglV5ZgImTQihto7b533nkMgEGOUCIzG433XpQIQ0bEx2igm6wVURITKh2CtDRxpDcR1nfsQjDYu+Cr4btZSOlYE1tYEoIkiwlgrAZbgp8BtZ21eF7W1niWI2NrWtQ3O2xCmEy1pkgCBQymhCkE852GcxCZhVhWRokRrTZigjlnNtWemX2gItecgAhQZQAxeBoNxkiSNZtxtpAa1IhOAnRNXFQFCUbm6Lq0P47wQDkYD1y5TmEVxXRVRFqWNKCJut9NmZCKBSGulIgLw7EJgIqM0cXCBaXdc+NHEOusDc5AkTpIEVJT4sq5tKK21tTNRjAQBUaP48WR6FXzpXwbX3zB/4vb58WQ0GZcmjazNG6kSVxfjqtVOoyI7/aHJ5//+7KxfBEQGcai30u7fXpV73/OlKvfjUYcguLKMDfqp9VcEwR05lvzK+//zTr3ui633/+4/zKzOnpcrh29vru+t7z2/R7Ex0Hr2+fEVi+2U3HAcRZnzAL5mUYwmjecU2tqXzEUzbuo0GQ9s1mzcuHrD+rPDhq2/8Mi9H/3nc1TXseKNjYnSpBSKEmVcSiXa4o2vu/2xj3xm5/G9F7VmiyjktfPb/a7yix1anG8XgZ67Ojm/VXmj4jQaV74qAhOpaRSfWEfKey+A0w1kRNldH6CXNEps7Rfmm6NxKcBRoqoqWFfHsdGpTkw26I0NIvugCQXJetYx+SAqUgCsgISqrNloZkleObBMTGURJr1B0tSmlfrcT8qimtTtdhRl4JR5bqd4/HT+8lcoNpUosAF2rmwfmonH3jvvkkiREkfUyHTaTT7w0XM1rT5871nVdA8+VvR3gkmlkUWdRqe4MFRdlVb14aPx3cd1nB04tIgPP3J+fqW+6WC7v70zHhdpI2UPlQJb1s/cd95F2e6lZ/7jD/2vclDT7mZrJX7LO9/xqje8KkqX9vqTnXqYzqjnn/iC4vwld71i7fzp7333TzhcOHz0WBUEvLIUmp2k2Yq0omJYrZ3ZTFMxOK6L0bgqTNT90z/79crRuBK3a5dW58HQlcsbgXEgvjecaBOsr+NYg+rMzphPfn5056vf0tu88NHf/fknPwYpPvr6H3vXxCfsDUaRLS8oeqoc3O/XnurMvOQdb2yAa3/xzHDSq1qtlmqno0EerNcs471eNxr9j99482u++Wt/f+6HAWD93HnIba1l7SzOLRxevO5oUdcWeK9Xi2oly4de884ffNH2W//4v/3nSoTzqtwbP/eFe1sZxidW1y+Wa2fXlw8sb/fHB7rmwY9+anRxewOiA8fuXLz+JUW9d+qJcwuH7jo6vzDR5aBv6rJUJls8vnzdi2795D/d3+vt6LRFOkrSpC6dr22rFYXAjispgzEESAIUvHPOxXGCKPloAuyzrFu72lkXJSpKsoDBB5Wl2XjSi3TkXam1iqOYQTda2aTozXTaAYPSsrO+Nhsr1zTFZmWJSRGCpAE5wJPPnT94+MCFKzvNdgzkNIVT996zsHq4m+TB1jjHv/dXn/j5bzm4FPWVGqgELfUEKWnq43PpcPPMJMWTd86Vu3R5GIpJrpWe7baitIFS1Vy4KohTiy36nld26tO/93s/eusfvf/sA0/Vly8Ps0aysbP+4GOnNcMdd5w8fGg5AdWe7xqdJI006TYq76wgA4o22Igs8+nTl7NGNC7Zg3jm0rq4oSfjajCpdIwszAx6WkEdUT6uJMWb77rlhLbbcXXwYGfczwPhsFdQXkkSr19ZX7t81UTQmm3PiGi1cu7y9kf+7qN767vH7zi0u7Nb7e11DzR6a4Nu1l6an9/dvDDuD1BTWdpbX3zTmY0tz25+dn4yypsNQxiSkJmGOXLD8Z0zl+zA33n9zF0nTz764OlW3L750MHnTj+V96qbD51YX++xiCVO29FjT13oZjODIqebVuYa81evXIoI0FcU+Yq9iuNGqzW7tKoPHtpZ2zl567EGjDe2B2nWaMwtXN19Mnd+qds6ujx79mJ1eXtiHB15Uffsk184dPCYtWFne3zwugwayNUkaGOdXVo6bmvY3Bjedvf18Of/AgC93l4rjW05YZEQXBRFkdJnzzx7/ZEbF5qzvc3L+bhM0mgKaxMWQUESYxQICoMwI5FWatrHLciK9sudWAQCh3pydMndcnIVVOUrr2Prq12lCWTPudHSoeyNb7xl8/97It+0qdbe+RA4oCKkssLKpYrSiW3srusff88XSHn2SsHcp7/Ylz//7LiuDcS2isiYLKHgK2dDUIEUAYBj32w2BHUxGvsQlKEQJIiZaXa8vUKJrivPIgoo0UYBOu+dBAxsDE0ZBUhE2nthQlJqmuUR55wAKCJmQKQw3bXi/UZwZlGKYH/JzUQqcMBpuyuLiVUIgZQLXkRBCQKZrgVcIIXi0cq1TmDSMMlzLCkA6ghqlxzqzG7vbPfzutXUxw4sjAeDhaXFnZ3BlbVdLzoQsQARAYMCcj64IIRICD54ESIi8oCIVVE9d/bSs89dUJriJIl1dniuff3CglLV0EV2BJZt0kyTRMiHtDEfKI8QIVWSpuN8UgYZjvLNrT0fXGC1uz2JyUdtyTTFsaldXXlmTMYlCySjUakpQBRncZJ1W5nSLz90/cULZ3b6awFCQF9C0GnEyHVVBJCUEuv9IK+acXpxx4LE3WRWIOp0u6DE1RYAjM6yVtOXk1CJwhDrODKx19mlYU6tRu4KQFDsQEB5i2A52KKukjRVWsVaj/N8Iry+Mw7W1qWtrhF9p8FEuLZzKyLTcIEATBNQ+9rJfn09GDTidG9vEmpkgEQpEzecrf/og0/8z488+jV3Zu965xtf+XVvnoz0937fe589VQXfZnCGeEqnQDVVPzDUEJz40uuYdEpxOxbl2KDHAFoEw3QLmQUR99NY+xYbmbp0ZKrOMEMAmdqfQHhfIIJ9y9A+zFpk38uiiKa2uP0mexZB/DdAoSCC12JkUzvRvs8I4BpDeupqon3E03Q8+xAh+XIYbX/Q0zM77VfcV6+uReNkmqwTQEFAmlY9I0+HjYTEwkIKphcX49TVhVNZSliQALyIZwBG5wxwW0GSqBjDXNOkhpoptJrSSlDpgOCVMqNhGA69rT2A0XFS5ozifPAAMhmGnXHwCc4smZtvml1s1YmpW5lNmkhtHQIFMf09v3VZP32a13bM2XU3KV1VabZGiRAJcLCBGUgUkyIfAijRakrrJRYBBCJkBlAQggjBPquJgKdfBgIHFmCtUWnCQAI4XelbX2tUGlU3bcyZMoqxrAXRaKkjBYe60CbOQbWa+rZFG9uKNCQN9qruJC1dlYpI6aATf+OR+jW3wNaArQUk1W4YuPxvXUV2NNwcMXN3YZaIigLSTkcb3d/bHQz64lXaanI5qPuTdqcdH2tvrA3nVhZDoIjNOM93965EWWQnVPQrDumBhaUq2D5XzUanHSdfeHo9acfZbAf2+oePrGIaK6MxbY72eoZMK5vdXbdae4UDFWeDQT8Vl7UzF+pROYlMnA+2xeUzC3EdWLFIoBjU6vGjV5tj3Zr9/P33nt9++Ep/10/qmw93V5fUnUc773zToWS0bSv+4V9732wX3vF1CUl05XJ+6IZYQvr/p+o/wy1Lr/JceIzxhhlW3Hnvql25u6tztzoptVBCIBCILMCEzzYggu1jG/vgYx9sfw44YRnLxjaGQzjGCAkEEhIKtEK31C11zrFy3DmsPMMbxvh+rCr8uX5U1VVXXbV37aq51pzP+zz3fXhl8aUXrqyvbzlvl5erYf/cyoJ58z13/dafrw92lcLmP/n9K9lC99veObe3O3z1VO/SKH3hzKmHHh/vXz2X2+VMlAfvnCOlJuO4sLzsfHXqhSd4IlVh5rvziZb+YCSGOLrTbzz2wz/5f378D/6ZY67Wtvq7lGbtcy9/Y6aZJk2rlA1FaOgsTLx30ajMRbGZ+tlfON7uXm2FpOjxd73p0G98fruaBF+TBcvKY/QioJRlEmYhDQJhan8gmtZcA6KAioQIyByRECESJQZByBptEpIKhaPzQUpNSDaNgRlCorlh1Pe/++6Pfe7pzQkjpQiAyt94onPkaK6kOHB4QVU+0eKZbatV1c6Xk9RQ8E206KpYF94ktizDpIxlBXmelSO3PRiqwfnFo+aX/u4Hf/Ojf/zaxVIYUKnIQTiWY0fMrJEYIxqJoNptpTWwEjRgE1QUfE0QQUT8OKgMBIEkbRgmGE9GYG1/zJ/4C7d78epf/Zn0ZGc9qJHgnFVpyUWW6opqp8sHf+q+j1w+h/pAr3HbT/3D17ztWpBYXZNi0nQRxowiREoRKoVWKYjB6AQRiVAp62MgYKUUkmIBQCUco49BWCvLhHVVK1KRY+TAkYURUBEqrTQCuBCmTUwQMJQFVsFFpYGZXfBRRJgzbZBBRFBBBPbRIZGwVN7H6JEwyxKOwklCpKOw1iqGEJwHQqU1gETnPUdNChmJIAbnvNdKxyiCJDEGwUkxslnGIcr0lgGiAHJgIUEBEU6sDdEnSS6BY+QYggJVVqVSxCEqrdGYJDEkmCqTGONiiMyRxRqNU3WBpqquJQYWNqSM1gJaIRklEhkiAED0HjXGKBxi7ZljDMFN9Z0hinOBCBNrCBDFIAjpFGladp+KRDFL05IjchQUIEqylJyfvpW184YxWhtCiO1m6lwFLgCIIawDh+B85Kqqy6qqvfcuAJJjLmovyDFIiAEAtNYUWZG2WdpMU+tDXXtttVLkhDNrtVZkjCY1rlySJJ4DISAprbGdNqfTdR+Cw9BoNkgZQI2qoRGNsYSADcUuGVWTOvJkUBU+jIsCgDt5IuDzRrbQNhoUIbSaKQETiLCLrCJoH0JdO1Lka++jC94rEgXgHEcBidDIcqVVYkxkAcW1871xycJ1CC5EJoxAbn9E11lFpx6WzWeKMw+Pm6t84I5s+UTDGiz3xwBxUri7Dt/68J9cfPKLPaxnqKHq6OuxV6mmLB2X8Y1LMQdjDYzq0nvfaiYaNASuXRzE8p/+ox964tVHL/Q37jyml482N7fKvRCW37Rw4ewb7YYszrbPvLEeTbtx+6pZUiOq5zq5w5lY1CgeE8s0MSYaNII4ngQEZRpdibT/2tbZb5792sdfWTqcmgZsr+/nOrMgDDRinp9p9Df7Mw27PGO//N///E0Ljbe97c4/+uQrYvDGI03r/VI390xXB+6J1wf9aGtOiglPXCwiI6JGkQBAgkjBRdLEDFFYkwgCGhqMJkM/SNPG4sKsr0faYiNLfVWBojTHajJSmSwtdOqyHhalzpQEn1mdJun+oKdJcWSACCQhuOGo97b3vfXVFzYH/fGhG9qD/VE18f1B2moknW6z7EzAu0YK+zEMfOs//sbav83nDt8QdjZr2wakxvIS1ltxZ28cnGQNHWPcH0yOzOmi7158qZhtHDqz7dO8kc3VxbioBlVAePt3v7ndORvWqssvXzyU5f3Mzsnw+x9sff3JzY3haFTUk1EAgiy1w2H5ru/6gbKm4cbZG463P/3JU1v9tf/8q//m2S997a3vfev+fuhYaejW9nj/73z4H37rAzc//9yzX/ryc0888ryXowEUAtfFJLE6b7ZNnqwcWxlubx+cnetf2Nb9N77t21rf8yO3jXd3/f7q//XPH63ac81Ga1LoQUlhVEsxMYnyRSGiyrKoK2cbnSSJE4n/7x9+belg/hM/fd/3//x9dy9fPPamA5XMaNOoMDaTKvMXxv0XMGHbXY3cb7VHf/1nj9+3+ZaP/soXx6OCGbU2NrPgXLuDf/sn77vzbQS2N70K5mbbV0dX2PGp504151ZWlK4HW/X+2tzSLcqaXr9MTCLNxb/69/81+7EJ4uvBE1/8zNz84uoNN3fn73z60YduuOkgt/Zfe/jXOw3stmaT9qEbH3jPKGTlKPkrP//3HvvUH1mdX9m5lCb2yhtv3PaOB5bb7c988l82WwudxeXa+SCyN9hbmFnKrSnDKEu7ygNjZFKKFABZlYplFLEm01pvbl719XbWzBVSrLmoRiEKcBiV+3UoM9ti4FbLsLjSe62TheUDu7t7o/EkNzrJG5NRr5u2j836q2VZMjpPihFEX9ztTXGfqJTRiUd/+pVnjt92c5rFi1f25ha7v/0bn/iB2//awu2LeWe/1GOVQl61Xj5zeXZ5Zci4Pp4km7Ye1mvD3QbPvOuuWx7+8jfmluny1v59d974yv52u7EsevhTP35jqrcWj9NH5g9/9uvmn/zmS63uUYOwMR7NN9LubJrOpuOJm2/Ok6a1y1eWDqw2WulwMNEWHXNvONFJ89htd2z+v19sZwno6EoXA4Y0QU3RuSgCIjGCMlYbSqwKVUyIrzz3fEXw5ntuXZppjGkzaSfz97SfffzV2cWZTEdJ1MX1PiBqQQr1D33/O4ebW3fctfr5h587vLrcaDWNSWfnBUxqTZRRtdBtqFazN+qNCwbvQHg0HM4td5yrV1cW52bmA/vXXj03qxqnX7m0evPsV77yVSn9/KHjTz/xzYW5jl49fvTYwuqRQ3/wiU/UDLXHN/bKg+vb3/P+dzz9+Dcuwvm66h0/egMiFrVcXts9ODtz/NjyAPzy4da4ztaeP+cGdSPi8sx8E/UdN5+4vH6FS8ltFgk4VZDEd733nV995Hyr2+ltbOSqCv3BpA62NVetOyGy+Y6SZHGls72zPb0Krq6tZTZnEGN1DEGEFanLa5dfv3BBkLuzeZqo0aSyBqOPiEAKQxQkvj6BgSgCwqQUTAchkUEASWljAUWnxS/8/e8L4ZIMJkYtG6kUr3Pdq8brCmrS6sTdjfeXd5/77WeII3usfSTiaTGi8uFaBBVFJNGmBQRRMDKANxy1KBTjA/j9SWlTrTLjJuKD4hiQ0NWRJQT2MYBSRACTcb+gfmIajMhhqK1CQqV05NjMk8XOyn6xU0+cskldeTM9+5leGaQjh+nZ3LTbISCIRATCwgKKSKb3OoiIECMrItIQfCCiqXQVRCQGF0EjAlEIAacZBEJEYRZh0EYbbV2oWABiFCSApDcsIu8Jc/RSFn4wqoLHq+v7deU6WWqSrFeUk5oVAk8HOUppQlLIgSWyUkjAMQoioUKORIpAUe3Zhfrs9v76cIwEy3vZXJqnfnzHiUONhmo2W73B0DYbkdVwMuRNFo9QjQ900mrY90ENC6klalSjcnwkX2w2sCjGJOxaoe8KX4sErZM0zZsNC+081Qqu7r9umjH3MTrPCWtU0EzzxBSVrzju9MdV7ZUyiWmIyuYWl1OdN7P22miYsSNQVhtAo0DNNLsj3lfM2lJk8aNJrxe1hkk5mpTjUNdI5JwD9oSQNnJfliTRCUIMFjvbu5Nup6WybKaZXo9HppxlnB6MwTWkj1x3hKEII6pr0rko0cXJwBEpSARZjNVKUFTW0o0a4SvfHH3pmS80f/krZijBWcJWgCjEzgc9LedFBpiKgIiUwqjqodRDrgZic2U7VqWRQ0DDgIJKrn1+U24RwNQ1BtfrNzzt41zvFE0LRwJIwiDXFWjTHtIUv85TttE0vgERpOtpElxDDAEDsEytZkAAfG0jdr2kJH+JRLrOI7p+mQJdm5jJteNDEABCQsQpwHoqUJtyoVgAFJJCAYwCCKIApp+OABECMGjSERiBEZFAJOA1qDWCBBYWxYA+GpJWK20bbut4cClJYNJIQpomJDHGotUxMUqoOUpQoFMDAroqeDAYOifouRa1P6yj1yWLAVUVDmqnOnZYuItXw7isVuZNI7PDSblW2DOvwNpFVRbGsxUUhUxKGKKLUSsSACDxAggRNHEEBBJmANZKTf8vaWuzNCtcASLBh+BZIQiwUqiNIhUTKjtpzHObQqx9GFRJWVultCYFXoKvVrqUZiARQbxWkRow08FMdCp6pRXedE9u076QQiVWS1U7VYuy6EIUC62l+M679QuvVDvblrRut7L/rVU0N5f2t/aLOugSE229J94PJss5+FgRpDTe2+8N95rthd5OYcAuzcwzzQz7vXFdn7jt1s4OsfBk0J9dmLvl1gf+7E8eYaq/76+85bkvvyxrw/mo9t1oYbmlctw8tX7zTbc8+sjX5w8fFaFJyfWI77v7LRd21++9efGrj3/jgTtvnuzt5G27tNC+cvaK6DjanwwH4/3B6MZjq8P90S233fHa6c3HXrv0/Kvn1q+uH+tKMwv/6e++Z7kxOXm0MZr0LAznlrfPPnH+4mSpbK18+G/cTvDcuJEu39PtRzPZnly6EGp3EFU9dyDXrbrbSnTa+c0/OvuNN1xKzW7ecIX+6X97WX+E2KmyBKAIyii9oDTUipg9gwMRdtLqtIra3XTDg689+/nKmbve/NNXTn9qa+OcyhszK4e2Lp8ajzeefubz97/lrc8/9bVmS2OeHrvpbc898pUwLoUgb5NzoTO/EGpXl+PZuUO7w8kv/K0Pfs/7Pj2+MoBqYelQ8pb7G//z82fyo4fnV2565cXTCtgVNWhlEiV+2mgJmpTKFIASIF8HrUmuFfwQkUhpBFAgwhEUkhZSsa7YkPaeEYkhkgZFgBRMYpSi//jpJ2pvhBKrIQnVHSe7Jw7pI6vNpZWDJsnrzaG1UTKaRAoe0lYisc4bqbLUH4x1GrI8bTk/GZYxKq11BfVMo8mu2r5yZW9v5567jx4+Sade3zp/YScKMBOS9i5ca3eKaKIw2RdQIMo2WzrJvQcUIx7FIgiX431tmsYC+2gptZQEBGw0d8b88On5u07Nzc+Uh+6+cX+dev2WeAzVwLS7dahgePnt37t69YVXv/3Nc8bd818/s3769EiTjKZRkUZNiiRaa6ZZOiIgkrFWkVLKArD3Hgg58vQ2wjnPIhzZOc8gcWqrFAzOC7PWhhk4MJISgBC9up6IJ0nGAMxTjH8gmNKuGVElNpVrdDwR4cjRKB0iV+zSNCXRGjHVWqESwQjiQgAEZlRKT3k9UWJdO9QqRs5VRgrqUCRJOt1fa2sya3f3tknEQIaKkGjaOo3T+iUhAcaIWuksTRJrQ4hFUQADB07SxFW1NSZNcwGZik6AATUBkPMBEa1Joy8FIMTIEkVECRpSlrRSwBwhhtSmwuBCKOs6CooLRIoAiZRJmz4KR+9cUMYyc2AwiqJIYkzg6INDSrTWMYYokiQWErAmBaIp2DezRkRiiBAFhKN3gBI5FHXtnGeW0vuqcnVVBeC68i56Mhq1YuYoQtaCAopgJGrSGokQrU2UQh9FdxKFTEAICMKoCKJUrgYljSwlUqOyaGZ5rhMWqIOKEmUawWmdKj0qClL59mCQmyS3zD7q1Ij4vcG4PxyNiqL2XilIbGLzdHE2TQkCep1YABjHyAwsUpcOGYiqCFEEkFEhWpMaZVA4S02TKEQOIUZGVjiMMi6DG7uy9oFJkMY1O+Zx6asYjbYKrp2hWUmaleldgjNv+DeeA4T+m98zk7Wr+959+8U37H/+O6/3LkI7m6u4jgqsImtz0uQDp4YUAYpnZgBOrQLAEGIjS2LgxdXOEC+5EjU1RqO0nDSU7Z44srC5vXXo+HxdDBsNWD40f7ko9bxLmpYAWApiY7VCBVEFk9vheL+RUKJsrzeeP3CIGVOHb3zzdKRk6Yb21v42j4IXTpUna+Do0j/+d//m6Uce3vvmSzeY/ZuXZ249vvrQn33ly59/bbaTZClCBdFkr2z71zfKXqCxyirAkiMbZhaDMNuyrVwLACOSwlDFSe1rz0BY1ewZrAZARQmSRiCcm58djau93SHGEHzkugbBte1ed6adpe0jh1fXrp4nZKNVMam77SzE4EpRyoi2k0lIrfn8Z588evsPjjev8B5C2l09iDccu/eRhx7a3BkpqGZmyRW+kyek8nGtvvI0f1+nuzDfO3MWH/3yYH0tYZbomQxNxj7T8s775//9P3nnpNx68cWrvXW3V+PrV8dlFUOENNdgKqUyCTbL/J1vnr/zgeRtrmkWFo/d8Ja5f3Pun//nV5OmNU1ViVTjyXve947JcGfr8hVf9P6ff/+R0eZeZ3V+r0xffPXKZ37u//7o//xj8ObQXBaHu3/1Q99/tZj03Gvf+ORTNsmSLNu8tJE3yRrtCx5Pqtzqje2J3x6kZmjj6O/8jYP3/c2bAK7wmKhK/i+4/d9+bP3FU9vdxdVmq1NO9vNWczTZ96EKwXY7zWYzz5rtUb/vazxw9MiXv/L6xee/+Q/+yX2zB8nedFdRjHJ7vmteLU/9BuVlcuhHAtxgwpVYnlGt3Scee/6jn3tmaWXp8PH0lVd2OKjoxeZp7eq3f9sd1Np87dknplfBxdOntq9ePXrLjXsgqtPd2hytv3L66umnb3pHsnzbrcNhrMth2waFMtzdNSakrZn7P/Cjc8tLYlOW/Lvu/tZi58yLj/3F+TfOHTi+ZGdSO9tIZledz0fD6sLrV3fXLkzi/i1vv3dna4zbcu7Fs7PdhpFmXQqg4ejytLU4k0QQQMlTE6IHQKutNTmIlGVltSn9uJHklY/AIW0lStskabEuq0mhjDLoG3mzLCrDXavTwd6I0DkXoodaJqUbA4XZ2ZlJf7Qwv5LlyWTslOdDyx0X8eLakJlDInWAta3+zTccPb87fPPd33Lmqa85iF/4xH/L02RusSuoFtv5h//xH7z2/D8a9/e0THCoN3duePy1Xnc1PXCos/XaaK1XWaDhiBut+PTzr+cz7c1hubsz+tFbHzBPbn74r73zF37O7zz9O/Pzh1x/N8HBX/uhu1I6+c9//1x/gDPtRqs99/kvPPW+9z+Yxurx18/OH5pXaNNeYzadFQjFaBjrweaVXWX1Y4+/rJX2UUijSCSgZpaA0HhSalSISJYEEJFiBB+DH4yR8k1rnzo9vHOVZ01WS3Zm26crC5c3tti75ZUFDbq3M7nlpiMQ46VTVy6dORcDTFy85eY7JjtXN3d328300lbfIJEPS4szsweWW242ijl2/MCFC5sX1/oHD87l7cb6Ri/RVhkYjorWSuPAzYcKiFln/urOa/XVi4vLi2/7zre99Jr7xuMvHF1WCzOt/noPjA3t7LELaw/0B0ePrRokxCPRuYOrjd318dLKsk2wgogy2bp4djiRtLGQzd1w8i03tub01cuXf/DH3veR//AfVKPz1OtndoblRKgfqj/5+leOH7v1C3/x5ZOrh9HMbG4MTKJnVnKDaQU8GMS66DcbxNcrBEJUuaCNBWSlNDMGF8q9rctXXzl+8kD/iauqrUjTcHcoGLXSZeVIaYxThK6QwmmbIEzZjoSKiAODILIymUpb1b1vvSn653Jqu/43nAFs7oINZD0wxBi1Td58/5Gnnlv5yqOXKBLh9J+PY2SjFTAHAG0gsjhfTavWRhsk1oojRObAIkligCH4aKxGEdKao0h0ITAwKEJNJIAxRmQsnBNEUjpGDi5qDYJYQ/tnfvCXP/bpX7syOi9BNJlG0hxO+lmSeOdiiHgdF0yKOEaYsl9gitcVFFBELAzAIKBQxciIAhGA2SQ6CjJHIILIgshBNOnrUQQAIwJqrUKMUjsgRBEk8i7WZam1KoqJUaiQnJdLawMFSAZ95fPUHl06dEjry+trw9FYmFGTAHIUjjB1SHkfpl33EBkFUYCAFBBHNilyqJ0LeSfZmwwH4yLVyf65qytLrRu03HbbkW8+dcZHqinkKc40s9X5+b3BQGtTlsa5IjE410mVzinqsoiJnek2F/T+7vbuxbLoi4u33rp6/IYj/c2Ny5d72Mhjy4Y4nllaLMelFyeiNive69dVVRUx1JVXgDesLLWbncRif1wmNK3mx6yVZe0kulprSqTmAFzUppmCtnvrewHBRT0sSucmHIOXIBEBNQcvIJ7IKD2qXJZaS6qo3eHjxwPBaDTJ0zb8r28iMh1c4XX087U4hgQEr7niCRUSuSoMR2Wic6OVDxEYBFATCWNT6UAt76Jz05QACfwUAW2NQhBhFAQAFCJA1AaRQSEIK6mgdlIXbBqUNBKTcyQfFYsCRCZEwAgASFqm8B4GBECeRkUi131tMu0dTQPNKScbroOCRISuo6un3Z/pX2u6OOPp75I4/WLA9Mb/urlMrsORrrOLRACnHFYBAOTpi8G1D34tTSJERMDrlGqYitUIp3/oFD4EAAoIJSoQItZISEwIU2VbFIkoiGKQFYEwsIvgo2KvWFSM7YbVGFMdFhoqUa5NVasVGzkZFY0GspZ9aS2mWk3GbqGrQk3BSQiqPyqSdr61FV+7GHYGUntiEBnV29uys7GXWo3G7vW1r/NmSgzgvJUk46JWjKiDErmGkCchAFT62kjxL//7KMJrq1ya5mo+BK00RHR10EorkmZCtYvCHIJAFHD1Uqv6wIP65hMyOxObictzeuU19+mvFaeuJFFyVNYoaXSYuSDBGJgRYwWZkeUmVjEe61RJ2zNBXUQEneRGGQYnDDmBCnUky8srxXvuTS58I1wd18288b9FRYSxu9CY78xvXlrnqJFSgEiKMpukti0g1WgUPCHlgjIYjJpdu7ZxGdxooZvtbV+ZVOXc3Pz+/rAaBOPq93/w/ucfe+ZrX3z6vnfeff7xS4dm2rvPvX7l5auzDZPa7LFvPPm+b/3Axa3LN59oJpS++OL5/d1zYdB7/bX1JMju1b2G1eB5f3eStlsG/aHF9vpesnrLbZtX9rccvfL0qavnTnvw3/rWmxebKz/43oMpbA9H5xW4vd3SNLKtrerq1viu9x1dquaj9YPLL+SZ37tc3nPPseWl9Hy/cAFPHl7Y2Fu79Oq5YRsXbrzxS4+YX/qvr6R2ybAMxhV7UNSMntjFzBogRAJmFpQQQhBRiSYt9WjM0ZtkdPXKQ1k3Vvtqe+/SoCg1ZUH0aDRKTKKkvvDa4/vzCzMry6PeaH72UOnc3KFjXI1BcVVXpeOw29PgRPF+74IG+aPf+Fc/ft/syo3z5Ygr6i2vzoKLd918i27OP/fUC1rzNI5w9YSUCQ4RlXNiEgMEyGBSjQAxRkQKQSxQiNOOA5JC4ODKkCeSaUuEmCMZVVSx4tpNCmBsrbTHk5LZJElaB06beONS+7bbO0eW8ziu5poL+eqxQToa7ly84YYbz144k6nQzGyjNTfojfNms5m2xqNamwboUsd9VInS0jwws7Uz2t6qQXfKqCVvHjxxcPmme46dOr9++cre/qQm7M53Gt3Gqy+ccUMPDAiEMRrLodwXGYHOVN5A3Y7RSQwACIZi9BLZSQy+1JwYbSe136zzf/6re189om67q3ztlavn1vaSpPz7f/ttN9+bLx9N+oOL0dp8dWXv0vlve9viTas3fu4r65/90qnd6fgmxhCiCBNIaowPMcYYRBJrtVKBg9akrKp88D6IoFAgpZg9ME1fAwLHadolIEhIgEopkumENtbBJ9qGGKyygDAlx4QYjdYALPHayyKLECGgChB9qDCyJo0AqU2tSoRIKyLm4Jwg1s4zYOAAzGg0C9TOIYCmKXEpsNI+xMTmhAqwRhQOjn3MrC3LUsQZA9Ezs2hlrFFilLEGhVBQKYMgMTiOzBKJ1PSFPUlSBFRKAYJzYbrR81VwPpIgS/TgUASINJJOrTATQKI1hzhlthEho0QMSKK0EhAfwAJIZKu1VgqB0WjVtKI1R68UEaAmpRFCcIJCRAIIoBA4RiZEZQwLWG0QQSt0zmtjQGIIIQTnOU7KcjypBaRyPkRxzk3/mZhQKGXSBGCJstSSMYAskRFEKeV9NFYDg9Iwl1lXR0WkkAAIUQBVHWprVJJYo0kTJXkzchzWtfPRcTSESZIqEOd9ZE6MCrGKEYR9YI/M5cT76Kq6HFelAw8aWq0syYwIlCCAoLSCKFw7ACzqWAuAAqWUCLuqaiV5w9osyzVqjsFoBQSRxbOMQxxOwrjyjnlURmZBjRHI+VDWAZViUEF4aqmbfuu2DE7GyuimUji0k0Kf+2b1rT/xwFN/svvYly7b0makvdQ11+zFkG62LKJqt5Pd3f3IKDEixGkV+NptBChXuXe//WT7QKz3Bgu4+uRDr+aSdJsqoTptxNzGsTPjrfFwWB266UB7WbF3E46+LlNARRZIYgxVMYYYy4K9L5vN1mRrfwb5gTfd8oKuzwwmpU8DQPfIXLJy4/d95wcOzS1ub168+KU/vake3fyWmcMz3T/8b3/x8Bef7g2ioFZaMm28tU+80RuhHgFNBEqRCBIwtlvaV3H1QLdl06XlucGgCAIHF2a21tdEaHF5fnOvj6Qub/T7/SIERo1BZG+nFwVqjp79jYeP9AZD56sYY9ZMi7oaT+o6DhG90tpzcNFRQI0KkINg02SVAIMU/fF9t930md//j5evDv77H37mxYd/nc0XfuyDqytL78uo1+s/89zjz6wPzPZYKUuPPlu00vwHPnDw8uvbWwOzNKNqUmsXRhQRCMchOAwvv/Di8iHVPUAPfvBth89mX/yx37JJWxvSihKBZ7/45ANvaYDD81f7k7E79G135isGquq1V9a0Vh4IQYpCGNNLl+wrr57+7Gf+Hy6v/M0P/99JG3fObP3iT/30D3/gweFO9g//xs9O1vfvPnnsxHJreXXpV37td+YtLDZmBs7Xo0FuSUVm8KEOjSTJQJYz3LfjucP5/OKJ+/5Kdzx+TcMe8by4yaEFd/NC+drZYMi50eagN+YEAsW0071p9e7zrz+rrSqcU8iTkgVhuyfz+dLnP37m8MLVo99yvBg87nef23v1lc4CJLc3Kkj9hAgy1Wofuvfm/MUNKovL+0OwkwqYEcT5EGoX47/4tx/71X//7oMnjk6vgkkvUpkMr1amkR67q71+ebPRTcY7vbB5aUOGWaN5dLX7/CNfLQbDnfULb3rXW/p9c+Tk7aqRb+5MVpaz2o062eH7vu8nJxuvXX79qa0Lry0t3khlmejsyI1HqvUam23eG0z2wuNPX/zQD/3SYjP75pf+m8ltWYaiCMCxGI9Qpa4Wm1jQ3sWQZGn0LIqmZhlFtmVbucn7vsfk807uQyhDpQVFRJOxmbZWAcq0ytFq5CEQgorMMURXjr2v87yxduWNQdVCk843DoTALCHLaa7jgCpXggAVLiwtLvRcevXK+tyRIzu7PUZgjJOiVppclMkk/ON/9pn/+A++NfYu723PfejnPobUuXBu9+rGfjGZ+N1JA9Pf/+Vf+3f/7SMuj7uDwXAcpKE/9/jn3nT7XLX+1Y1Hy2M3LuDMgbKXFnUCu1vvvg8O3/zW//yJS0Vs3H/77UcvzvRG/fWr64cWlrYGoTvTOHVl47Z2o9FpDnq97b1e4UN/GP7iLx7ViiIDe0GtIpCAEolaTUv8ohQhkgQuQ5lnqdKmDr72sbe5eeCtb737zvaZi6O11y/vrO+MirqZ6vHI581Gs9VM8mRzY3Piq6RrZ/PFan17Y+d0KmSVdFrt40mmtQVQtajaxVDF/b3h8vzK2vZeK2tO2G+evXpwvuvGw7vefHdZhsHe3qkL51rz8/OtubqWlg0p5k8/9uqgyA7MHXD1pD+oc61Hpau1KkbxE596+Me+7X7dIh+xqCOPC8eq2ZpFdvnCShMbG1u7MysrgfDY/M29cX3h7PrNhw+8+LUXGoWXJAwmFWo9Q6YQ/M0/+8oPv+/upfaBJG2n2kaOaJUCjlKURT0ROn/h4m03HFpYuPaQHDwriYisjCJNrg5aI0b//DefWHzvg3fe97ZHn31CQTG3OJM0m7sbPZ3EoiyZAYEFRfia75oAgBSAxMgAaI0iQ7YhS4dwbefrTbl84qbj48sbjVl2/RfSuUVjF8FzVdcQA+Ha+967+PjzZwebSkAJMyNFYas1Cnp2noGDkFKkVe3q6D2EGEOYPpEFN71vEWtwUjitSSsdADhEjgzXqyLT1kQMEVCmizIQFBQiCYiD0c6v/NeflcBZmitD5aTqVzHLmjONxc39S9NGQIiBY4h8zSkF000NAQCzhGmkICwQAYiU1jGyMUmSpM6XSCIhIpHSNIXM8BQ+w6KNRoAQGQCRUGulVVLVxRRDJNMzDBc9i0xdMEoxgw6k0fQn8sK5ixC9QpxpNOZbigyOinHlY2RwXpQmUACkoo9akybtQyQAEhaUUE5yK28+sjgzl1flqArYHxVFOe4Nq4thRL6ESopxVZHnpnjkwBwn9cEDy32pQ9svH+yKrwmVMgQajLaa6dBMunj/KumklSjN47rsu+hzsXUBa6NR15JmFSplSCe5LcJkc1SPBhOTZXOdmRZhxpSxjItobV6OimpQJ83MpzojRQIagFG2tnaKwpdcUarJmHIyGldQlJNQliGESXQTH7TOUy2AmHJMrfKgVGoZoGEMNSxqq/L2wqEDf5kHXSsV/SVmRwSBrrVlEIRRJLIgMMUSBtsTiyZ6xx5Y0EUXIxilroF7mKZwoDAdDbAgkogQCwtrrfI8t4q0RuEYAwfnk1yHIM4zM4nHYtdLabClYwKSsqQCSoQFp3ftEIGQ/1JwL9fSGYRr87lpJjP9q1xT0k9jHhSYasDgOquIGYEAmIEERBAZRPCa+J5FaBon4TUg9nWq9BSILdcQ4Ne+ZnAtroJr+c//6jOJAIhCgWtCuWvsJAa5pk5kERJAJgAUIZTpQb5RoJATYqV8ooPioDwgaCIhEBPLdqKMxFQ7Q6iI203dyGKSSbvF1oDSAAIBTQzY1KBNSLpKJ16jQmEkJWLGziU6+fqzw1GZcWRgASQC3CmAOSJUSkAAxyXEKIm16JijAAIrCDzN4lAElaZrzUEAwGnYC8DCEgHo2mAFhLQ2WmuUzNatvGynYm058dQb02CiA1hN0u34d72zdcPtDEnQTNaVR46o+QX63U/WZ64aHw2Sa82C1sJRCIAdkkCnhStdGUf3/gcapCakQBuDIRJMFMRwrXBijVbOjbKW3HlrdeMFj4NmK4f/LSqaDOs00+yKTpNSa9DO1JO9UI+irxqtxmgwAvYzM/OkE1dsjAf9NGnONZqQmkYTxsNRANBGZlJzyz33PP/6+dAMdrDVnU8txdkZ3W7PXr2cBOSyHGWaPvTdD2xtnjo4n4Z6J2nP333XyuX1jcQ7yrKjNx7d3N407VaWqavra6lWKzPZ2vm11y4VO8P1wXDyxuWtt7114W/+4l2HZ0BtXwX2g/Pf2ImDZsNORLTCcW/SmFnY2UnPv15fOvVK2+Q3nmxk83PNzcloa39/h7ttJc2yhnOdhfqO2eXx/qGf/Dsvv7KdZ/awVqjII4lY5R2TRNGRDE87pEJAKCiglQakULskyYgkyajfW2/MLx6+/U2XX3r2Td/y/ade/DS7XdevEBkRjYlF0Z9tLzYyg+Px2tXTTpKqcK08SCjzVBMIc7SpiRDQhWa+emWnEwZXZ+bT2KDZI/ix3/+uX/8fFx5++GUDQMRJbuo6oEkCK7C5tkaqUjBCjNFHMjoyExKRJJaYHSEorQTY1d4YpbSOdWRmRcTKx7rstMfzXXnf2x545dTk2ZevFoVTScZltZDSPbfNLc4HKYd1iYuLc6OipEivn399IQsXL7zqaxclRDFFUeokUUqVVaWUdl5CHW0yV1cRtfYmo6ZuSljbHmbNWdNo7W1FrXyzld5y27HcWkeUNvLZhe7hhc43Hn1ehCdF4GnNto4CXiEr79lHa1IfAFDETwTAmFwAjLHCPoZx0rCBySczf35m/5OnX21mSfBJGvRP/8rj7711/r/+p//P1dev5sdmE1+CmUzqK4cOqp//cOOeuxa+9xd3phmN99ForTQhTSWFrJQSxAigNBmdlGVR1G5SFFoZjhE1RWGjrAIK7I3SgZljUKRFOLAQmcwYJGDvEAlFUmsRUSIDIkBEiYFZQJxzNE0Bg4BSdayBFBCSMRBEKzOtW3vvfUQAZg5EELh2tQvMSaIBBJWehhoA5NkbY4J4IjXtWhtFWuuIFGIIkdM8tyZBEpIIAkYrrcw0w9JaT9/AqtrVdQWIVmulFJByzitSAFC5WkRCZJNoY0ysXfC1RqUNaTV9w6LEKKW1cIwxBI4xhBC81jq6YCVkSQZEjOS9V0orbUkhsBcRo1ARmUQDKpUaEXGuJmREtNoAWgQhQlKklA0x+ijCbI1FUjEGFFCAwuzY94fDYlIySOXcuKiVQhFAUkmWCIBGSqwFMkZpEMEgSmlmJg2AULnauaAUInAdPbEQRKNsYlS32aq9G4+LwAGBFQFIZB89iCb2kSsXrDZGm4QIRYzVIXoi0qQYoQ71aDjp+UiKQojKoBB54SRJUSghYwEBoi+cGEq7mVVMAOxZGanLsvZOUGkyqbICSpAAwUUffayYvY+uDpPa744m/UldVD5vJKCUCKBHJJzuFCvnKx+nh0Z8fZn/Hd974OnPX3Kc13slcN1OMIv25YfWX355C+pGlgHp6CJrRcoSAQljROgXFRISiGepC5+kpq7ZON9MrdSh3x+8eubZB8ojs0eT+ZA+O6hVY9YHaSYasuzK9v5i86BOmjPtoBNdT4KwqKyJlHHIIpFgYPSKOTVaM6YudJurnbk55eL6K71bbr3z9jsO7rnm8ftuu/nOI+UI9B7vv/r1I/F808X5TuPhP3vsof3+MOjL23XWsp1mNqz8+SLsD/3EmgFjhegii5JON29kaT1x3bZanmlgGXBYdglqF/fXtjtKMdh6zG2bNzPTVHq3Md7enwj4YRFIa9CUajk+u/gD73jXb/3h/zRWu6rOm3lZRk9RglcoWhOSxghKJSAqRG8MlUXlXLRN6izY3/xPv3THSfieH/mFf/n3P/DUTY8Pt3//u//++yLMTCpM03v/Ghz1g8mlb2595QuvXF4zT74ytEbFONfoTK5e2LvvrhtPb42Gw6iMzq1ZnZt9+Mun55dSuzT7+Nce+cbpaqbV3OvXidJ+WCy04+KCYinGwzAZwKiHW6f68QX+7Mdffuz5RruZqFba2ykOHmgXY5lpZD/+vd/2m7/+K6QAs5hkakbTbd96z7f8yLd/9u/+p4VOp96fPPbYC90PvllCdXxprh4N9obDEClp2EZLuzoQqe5MmuQp+Ni7eOXN77vpWz/47Yfvfluv9zvRdRq6PTqXNLp49C3Zr9x/78sf/NjlrV5q4lLbhsqRWDeWwd6mVbK3s93uzEbnmqlixeO62AF74/233n4QX/v6R1dOdFt3VtGCuXm14tuKckWrJdRY7V2AMhxfmEuqgUmbW72JVSCK0aJROvrklYvV44+Mbjp+bYB27Jbjw+2Zi2dOD/b3Tp7Izj36jM7V/e86ceXy40v22NqrW2svO89VXRTtphruXc0XFnb6/WMri4cand3t/SQz0SgjdOzkHZmVhQMH2o2Fordx5twrt7/tJp3hXW/71r2dray18sM/82Ndbr3x6sM7m7urNx1MsoCoBHVRFs3EAFTW6sL1O82uUopZnK9JgU0txxoJJmHoeNLMZ0UsYWxmnaqaJFmDMQYRN6lYYrPRGPUnzMGmmmyj4hpj7GDT1rbRtEtHju64oKWZNJZN0QepR6MSQZqWdofOCLUMvfHGG6Oq2i28zSyRPnToGALsblwiJVYhBfvxP35udKk8c2r45Nr+g4dX/vGHDu5vnt8dxytb41tu7nYEJ8/962PVxVODriu4kTZuu+0Yur2P/L3/4/nHP5lkO1XcLHe2E5rtdGe0miCWq2b8Mx86vjHp/On/eKSeRG9MXdeU6uMnbr66tqZZ+9oMeuPnXzn95BPPZYuttd7QJBZQQBiQEmuYmb1zdQAGJoHAhtCXvtlqEAaMSFE6DbOwNHf3LSf39zee++blVndmtWXnTxx79cLG+977bqn7L7z4eu3LF186rxGcxQfedGL74n5VT/q90WyzobQebvU5VQdvPtTfG5UDt3Hl6lwj6aBvzebFaBICgdXKUtq097/rrgun13e391JDqF0N5eZk6867b53XnVFlVg/e2Jj4lk4efejj99x17MKFSxsDt1MHxmSjCp3Vm/d2Tg37k1D6gsQ2Z9+4sKXShnad5bzZ6lC71dy4sh8N3XHfXafOXC1d2VCdtMpmj7WP3X7Tn33p+UQjeZgUcX1rb44yIPJIQgIMm1v7flJSog4sd7L8FvaTK+cvXztCJgLRgMLT4qpSrq61VQ1DL71+9jve/6E33U5PP/Lnkvo4mbRaGQOoRA+HQ+KpH4m8d0oDkYrMSl07RjeWog8Y1dLcwd2NQWth0cU3Jas/EGA3ujETBTyprVH+lFIjwL2Dy/bum2c2Ngcg08+ZjVbRS4gBiUKI0wdeEtBE15TZwCColLIWmQOz2CT9G7/wf7zx8guPPfY1pchzVFrLNYIiMDMAEKFSKjIIC0/fmiMjSG6NkE4aytURIhOiBjQkw3ILJKAIkoYgiBRjNFoRqRCZFAIwCytS03M1RCFLzEJClbcxdikL2ri6qiTGaw/dIojCwnqKeSLFICQYA9Si886xmUZnbfMl4JpQEND5gAI8pd0CE0FEEI4BkACDE45ILD2uU82zncZCu2UouuBHk8oFqIV94IAkpJ2XacIXOCISITLI2NdHGzOriwlBXL+y57QmG7AYDXuq052vA8aq7u309kB18qRp81dfOZs3W/Mzs73NnW4rzxraWmszC0RpkgU3SZUorYnYB+kuzzvWDT9KTBdKF4eD3f3heDi2NjUEBrLZhk6wBWgUxENzbecnu/u9SgCp0MiLi/M607XzQ+bohZmnJHHHcbc/SZrNMK7L0k1qTtIME7U33CiDH7sQYsw0IqJJoyVfM8xW3M1Mp9kpesNCTDY7d+7U2Wsph/wlQWeqhieafj9FO6Pg1BFPFAJFD1FS0opAah+0NoG9MgYVBh9YQJlrYy5kBGZAQEIA8iFqrUBUI21piMPhSFt15MTBhPHsuUvKIAURERRCVLGQmgUtRcPQVKZhI4IxHJSHGFCxIE8JQ0IkQlOJGcKUJI1wfYw2DWaut4eua+1xSna41jnCaxHOdE8K08RDeNonmiLI+HrwRAgyjX/w+jxvGrZNIyNCZJ5isq/nRABIf2lBu1bOujZEAxEIAALEgITI0+eO69ikaJBT5ZummkuqhRRW58AXMByBRCCC+S5YzVkKEJGjkIZGG2yus4bV6ABq26R6QuubsttLBgOZ6XJnzuZdbrSkGE2A0I9o7XT8+jPNS3uZY6sJEANOedkCShFM1XIIolFpCByEIc30lCng68giRqvI6AWCB1JEipj52otUjKSUj2E64wierUlM7Zca7nhn/M53qdvv7qQZF6WcPidffqp86TJGNrcupauNkKPzNRFqEau0u/kGeP/9UI/D9pA7LcxbEKtCkQLMQklYGaPgpqOEGc8tRkzBTYBQC1OMGskLeUItyEp5rELlpCiAKKQJbW0P/reoqDvbHPfKyC4UkwjISVLF1Be1Vc3t3UGrkQxHk72rb9xy62FFzeWlzmAw0t1kNOn3q5FKiME+9erGux+895MPPXT78dsvvn7VJrR9ZT88fanL8cL2aZ9Q2kgbKs3FlaOthTnK55vLKzf11vZ3h5O7lhc2dlU19K8+99pwXEFYvPWmNw23S5XaNy7vYLv9+rDeu7CT5+5v/+jd3/cdyULrcoN7Z0+fXTp505l+3W43brlzbm2nv7gyf+aFK2sXLqtGUg8oz+rOLJg2VxzqSeVGWMZqpzdqNbNGTjYLi8uHPvXZ4tzWYsMo0rF2lSjR2qTWRqicZ0KMoBlYWIQUavK+JmatLGKICGmii1FF0YTdetR/XfPe1qnPYb1fV+PWzGKSJOV4VyfUmen2NreUhGFZQVTf+63vWDl53+/+wUdlElvNmd7+EGxy+OCRzc0Nm5kLV0a/89/XPvIr92dm09VlrsvDR5JyuJGbhpMQHEx8lSRKWVv3q+B9xQRaNVNErYxWLkSlAIkkMkvEawdpQQMmRiOSMKiEQs2TYnTjqn/vA+b/+LvfQ6x/9/deefgb502aJVmmjM3acPuJmZZxSzNNz7KwcPDcxUupxkZJy92OqffLQT/JxDZVI88Kpy01DQUwQ7K2vz8xmtNW3u2kvVEPVdrKKLMCTDHSysLipfWd2fbcTrnb3x9wqhcWl6rhcOQGbRO/8723txZN7XBmpnvp/PqVS5unTm3t79WKGUKMMaLS1mSRI7AIRl/VShud2BhFp1lwcb8saoC5A3MyLoejMZCOofHV1yc//rf+OE00N/v/8qeWGyvbr57ab2et+Tzcf98SwA4AxMDW6jSxMQSrDVpVhwAAdQiAZJQZlpNxWfrAaZrwX1oiBRGJRaYICZFwTSMJCDAdrIuwsLD3bI0o0CJgjRFBgRg9u9pFEQRQ1kwFapF5mmGTMYoMUozeq8T64Jz3iBhiDDFUVem9CyFyZGW0sbbVapPSkX1kB4iWEkFkCKRRAsTIgSWEQABpakU4ciAGImWMmY7QmKeeBVAEgIprIGOcCxK8aNaWFIGauu1C0EaDhFhHcTUK51p7XzZMR1vLkWFqbXLOhygikT0hKlIAmKUNFhDG2pVRiJASqyJHBEACFxwpZRTlViOqui4RkDhydEDEDEobAXHBaW00AEq0Goy2hOCjyxIdOBST8bhwg0lRu9q5GDkEz8qoRGultDYmSxMkSgmbaTKpAxIIQF15QvEucOQ6OBdi5b2rQ5paJIoh1oCsQJB7E0+IIbqqchHYaOQaafpmLEJKNWySp4nNcqOk8s77UhMorYtxiYShdnVRVN5PB2V5birnQ4g2SbQC53wMhBgiB51pD76hoJ0ZBql8KOtyp7/bztqdRrPdTvJURefHNSPp2of9gSOlxpNqb1iOJ0UIglqHwgcIrTw3pFGpVBsdokZnMHiMQBSuC5Lf88H2zYeOfvGhjbklvTjXfvsP3PPMV089/tSVdrs57kEZAjEqoDxPUCf1uKgAFegoPk2Nd8GQYJookqilPZMTe5bR297ffeAHFl556cLyakN3zcLq/D233VIl4dSF54r+2IPq+5C3GFIh8hx8DM4kFgAFnKKEyQrWBKCsrXcG3XjoUOM9W1cvt1pJ5/Bt7/7ZDzXTOXGm3+t36uJTH/mPN1j/s7/y92DdPPzbn/kvX74gqLjd2Kjr2NQT4MTqi72q7xxozYnaGfl2Nzu5On/01pUbT6zEgdveLgbD4vLra22W7lyjLAdZEvcn3G3NhsoNx5OGyUM1NOJvPZgtd/Os1Xrh1Qug0CEOJ/HQ0sozz3zdaIkKjU4i0/LB7s6gSAi8ryZF3WrmIGowGLeyZppYRSjMQXkiDB6WDmb/4J/+0kf/07/62R99z4/9/I2q+QMBLgdwkNoINQKbTnrDd9x58jvuryfu1S888tITl06vwevbpVPqQx/89kde+B+TKgQXPfDJ2w5++3tvePKLX27Nt7dDc7jxRlUFH8AqdFD97C++/5bv7EW3e+iO1QM9t/HE+ctniocfm5y5kEKSRh/6O2OuBFzUzMneus7hhUeeg/ns0NLcxA13Q3nsyD2/92uf/MhH/sHy0RuGPf8f/9VHvvHK6Rf/4M9uOjxvtJbokcW7UIWoSNXeJzMzjW7a39qflPHQ7d9hs8Nbrw5OdmbtyiG0dbkijn10VWrHf/39Bz/yic2kPTu7vHjh4v5tb/mWI93ulx76OHo/NzdD2g7LidHQsJaU2dne/4s/2uA7h4fetIy9gd/aUm0wcK+nxSTmiZqgv5q0BiLbiyfw5//6gSfO4sNPu1xRp5mfu7AHWiWZqnz2bz/6jY/+u5+aXgVra5udmY7pNg/Pdy6duuz2xjKJF9b3B9UIabBxdj0GV8XJidtub7fabzz1zMotadqq22Zup7d34PBqo5OPB+P1K3vto4sB8hvufufmhusPNyxpv7V76rmHx+XkXT/0C9sbV1uLh4qdK648e++33Hb+/BUOinQDBRtpNwLUcZzqpKVngg9G6yjepCnHcqazNB7uV+NSpbqRd2JgjiUhcSx8mGhSjaxVFhWo1EfnvDImd76SGOuqVMST8RC8E0LP2G7PSlUnZHd754i9SSwPJ+1W1snVuHCIen/AVekRxBqFkUFcb3et3WzXZaU1+IipgW4zfeS19QDd41Z/7E++n+BLMys3aFwa9SdJM5WyrEb+gz934/Z+45/96jf+4o1iNDHUPPKpR8889uVnjx7oHr+pbet+rD2YZtKySLkM4varZ6r08MHu/LPbl6ChPMizz7104fLFW2+/5ezF9adffuLKlatJ3h5WsR6USaZCDAgIhFMzs1ZYVpVWRmmjEJVV7CXTdmW5a9JcKUV1+WMf+v6LZ8888eyLrjfRZThx8/LK4fmFleUjhw489fUvOcHdzcnMfKM9M9NtZ73ar6we0s6uHFqY6aTlqDiwOhf2e+e21ouNZtps90O9fvli98TRy1fOntve6WZZO9UFwB333lQMqv217ZW57s3HF9cvbrz1lntfv7j5xvmNfsud3XrtwXe9fTDayNLuk48/1pvsmpoTrUiE6tBIiKN74YmvH7/pwELHLt1+4lOfeeTJc5tbg4q0/djDL5xcmrn71hvf9fb7ugcWr47qjTIZYdNqCkDvef+3fuqrf/zWAwuHm7bdthKNTg8uHj+UBDOowmRvv/bloZU5H4ITYo+TzQtzy7dcOL19YK45vQqYIwkCEirlak9a5c28LkuReufS2iNf+uIPfO/7Ty40Pvann+4NB2liMIr3UQFP60TMQSsVmZkDaRoOB83MJElCEht5Gl29sTF5/KG9Qx+8k5RReabhoJF5Vb8mNBfDqtINiOMQqrzB733Psa8+9nzhKfoYohAJEwmBwmbegLIcOucV4VQbjlMELqAijAKKAUmXpX/lpSe4FqXMNN2ZHqkiARIKTDG5gEoTxKl1SGklwogEIBwlRGKQTNtGo9Pr9X3w3nsOQgokMhECA5CGKb2FrtVPjNYICiICULc956UeDvsc1Q03vydXnTfO/rEyorRm5GtOKQGtFaMACwtL8CKgbI6ImW5YwNFoHa83tgiZp5ghYInELKQRYowxchRtrJ6CUlAcYyyhckUI9WwraadpxzTn5zp7o11M0knlxo4nKrhIwcv0rFkLlp6eOdM/c2mYk7vtSPeGQ7OMwXvZK8otDyZrzedqqd3qzrSu7hW747qwxThIsYu6V960lNTVuNFEYYxMUYC85SgcAzGUwnWgEHTwutnMZuda27v9K0VEMJKYgQ9uf9xuzsx3sk7m0ixvKDUa9bwfZ3ljvrl4dWcjySyDEyZjbR3DeFyhUGppVJY7ewPMm9Gk5aQgNiDKBezV40FkEjKokRQgOdaTMWeJMMr2/oBn27fMzlirixLjyBkO1wsxAiJT3TwiXhdtRUAipChsEIVJg8Jayt44jwQiKBF9pMhKKAIIgxBNZ5XTYEaREhAimtbupnBUYBjs9SCy894menetN5Opd73nvq8982KWNKpxUdV+epbrQmxl2ig7HkzCYCIkkKe6nZGJLAGVA2Ga4jwFIqDg9cBIrtV68Jo4bDpPAwYgAaYpcgtkOmq7/gWgKaKIr03XWEAAeAobutY7ARJmRBAhBEG4vjebrg6mcZDQdZoRy5TggALAKCjTdGl66QqKTFdo+lqlSyBGlCgCgqg1GC0q1gbDoTwem8OVlsw1wTvgJYhMvQHPzIIm8AGaHWg0LFEEEmUxa1qOFFjleYREygGdHqfjkg6GQJaMGTgvFCGwrF1VT7+RPH2KfDAQORIIiAIRFiQUjIIMREjkY1SEqJUIECMzC0CaJD4IAkhkrdBoEhCInhm1wihorYlERKINRhZMbKtlDjT5wTvzb3nrzC33tprzCsKIQrzzrur4wfHvfIqfOhP74xBqFSYV6FS0jWg8u6wN994NL7/Ee0MkBkItDqI4tGkMSkCKuhQFkqg6IhIqDei1gMRaCTtltJDEeiQlTHbtE6/6R1+T03tZkaTA9L9FRcqSAlaKqZEMhj2re1nadkWxcvDQ1t7L4/72yuxyUs2FiVFgHIcC1XCnNxntzLeWu53OqKRspvHIa691bzz86trFW+6/NZnjra8+NaGk7m8E5977LTdtj/ZvnF0Y7ewlSRVR5cQy3kr1pJlMxqNJQ8Ludl9iUGnjjYuDc2tf398ZbGz1mp3OOGb1yC2Zix/9yHe/8IVPfu73YOMcHFjM293ZNz5/JaGsPUvPPjXZ8NncfOnH+b1vObl6snHumbON2VajLa+9sLZ67AYK9huPnv/hH79btNvYHiwevuU//z/PfOH5c6ev5tRqxXog7BDFmMRHKiuWCIo0EgDEaRoKCOyDno4pQ5RISmE98QgqbWXVqDx0LDd6b7h/qtmdaSzMDPcm5aS0mXXOD4aVtZZZGdShLCen/vQf/cpNf/iHe3t1o57splkqGLc2r8aawdhWattt3N3a7LYCa+wP+zMH/HCjP9iaHOxmR287/M0nXs8pbUW3uqxuv2/uvg98x7//1T9/5ewuV76VphbRx8AsRGiMddHHIKgUkUIWHRVUtQ79N92W/o2fv6fb3enOzPQ3/M//o889v2YlaZrUMnMm/tabFuZnFDoa1bnWcTL2edL2vipHTidSBz8qB7MmzVXLB8myA+3myuULjzZbmGTUbCMHaLZsiJM2qmEx0F4Hrmc6BqDRG24cWEh3d88X/T6jwiQrI3tXJInpZLrbzkTVM4cXW+3GTKbvvGvuhpeTr335Yn+MrlIi7F2tADUpHwIDaCsIbDRpwmIwmVucTZE2t1xRFbEskkRFYrJYsjxzeitpLl283Pv2m823zdXz3Rlfmf7GRPCaAc0aM53dJtYqVAJklUUkQS8AtfMuchRRSic2qbzjUCVJopm9D1bpGFkwGtKC+lpcDtd8kcwMRISilWEWrQ2h8swsMuXD8fTnPiZaT8eCWhkkYOEYAnufZ3kdKkWoNJVVVdXOey8EgSUIo8YqOCakqiDSzSRVYgMH5hgkAKH3EnyondM2IyIkNaUYa22QBacHcQKBnbU6MYnzITBG8agQAAM7ECYmjuJjrEOtlCKFWqNWWmJMrMHIRDjT6k4PGgQESBRhVVZlWadJmue5IjUpJjFGYzGKjMpCZHpISNNRXYxRa2OMNcZoTWVdgaDEEKMYq5mBSCliZvC+djFYI94FmWIeOWpFk6qMHIqqLus6CJTBTY0rWhuNQEZlic1Tq5QOrvY+9nzY7w9NYkSm1CdCIldHx7Gu6sgihNqoACAhWq2U0cJMAiQqT1JiTYJIYCwNi2o4GlW1p1FiU3NgYb6dJMZa7+uicjEEbbUPXqdm2h63eR6qOjjHHMeTCREqhEk5TpPEe581GmmihLEMbrQfQWS2lZIAOw+gZ1ozzbRBqMYVj0MoCleWvj8uRKRyIbNGKx1AtDUAnOSJEgRSaZpYYwXA19FH0ImOKHXtfFn95Tz9yW++mgal5ibv/e4TZeH7dGVzvL+3y92mAfFpI5EYUJAjpyTK6rzZqEuAiDFIWXhrlAQWBZlW4OuoikCDY3c3J/3dlblZbVRmO0srR1TkLHMs+0sLzVrnp87t3HrDytJsa7S5JsYARQInImSN5wlHTNPoi5oA5xorh5L7Z+fummkvrp9/pezTpL9900157XqLK8knf/13vvboN54MvDP+t8+/9sZ639WStZsaa3W5J00j1uLVnfFO7QKIiUGF+k23L7z/O+4frQ27c5221wGToydP7IWJQdu7un5mYx2cVxKOrh5SHA8uz4lOz1y4NClHAFIOY8ekBqu33HZgY6d36movN+F7v+vef/mv/vvs4gwpm5rGS+e2rtb7mdWt2dZo6D3HqqgZEVHKqrTGEClB4QiuoCqEdtvOJa0wKf/gj/742KG3PvD+ZpKzC0rpoxDnRJTYADpU4FSD7v7BpebK9vO/up+q3M40f+1/fnZ35J0nIlVJ/OjvPnHmNXPvyZUnn978xuN1hDxNw4JRimBvIM2OU1wWo/1ct01Tjr2lxc+Pbrylm86YkzL/9afWImpPsrc3ZqtOyf7P/6P/wLc9cPTOW770mx/t94v3fegHP/BdP1Tvbv/Zb3/8zOnNCUExLAO7hbn5oU9uXjjQqEYXLp0zMUGtdaInYxdcmejmoVtXv+f7/7p3yeBM9eu/+n/9h3/+gfm5g1XZDy0rw92qmBCtfftPtE4+uPqR/3CuKtStNy7sX3n+8kv9MJV/lIxYK2Uj4P5gJOBPrDavvr7z7t/+8GZiEdTe5tfy/Piwuk2X0ao+0XkKz0V3ia1evO/gB5ap/IT71Be3sb1SVaXR5FwEAmbFrblP/Mnz126M2rPZwYNHl+f317bqon/H99526dVz1flXNi+ev/zGS8dPHB/0y0brwOKRO8tev7t0PM+yZifLUV54+pW777uL8zJ6OnF8cTwuJkN8/erLyczS3W9/y5XXr+5dPbV6092bp3svfPOV2bbjvYf/7E9+p9UxZTnDTmySFiEG5xDAc8ySxLly6kwixyDoXNFqZf3RdlEMEpVEHznGRCc+xnajM67GSltAjhxRYV1NbN5EVKUru/MHhru7FEMjBcVZMQ5ZprY3t+uJEoyYWQMILKPeCJkUgDJ2vp0xoaBc3KmERBMCgEQoytIH8cyKNIgEkI3danFlripHf/zRk0n2GYF+NXBGR6LgJ7VO2C6AzPKxuw7+j3d+/59/cv1n/8VLcdD4F6dPnTzYemmU33BOKVujdY4LT5PRto+yMruwrNTSysnGHZ3Zp984PRpWdeF7l3c3954pxhUScJJUPphM7eyOslSjxKl9GYDqyhtF3nO3ZfNECYuvylZq5zuNI10bDa8cbPc26y889OlW0v3gD37PoeVDf/Y//qfrqk89+vJw+PTNR5sthLzbufWOgxfPbbpSLR1qHGnbyc5GI6Pu3MzWdj8Dlc/k/9/f+PNbjzZmmzO3rB4u+uP2bYub61d6k0medr/j297XVbK1NxBfJrNzWSPd2i7b7bZZTQZX+wdze+u77x/sDufbSxuXT7vSRwjnzp9ptNKrm72iqvdGPhGsRg4VPHXqYjLX2drbf+TTT+30J5UDlSbiY5KkZ/Ynpx596Wsvnj3UbnQOLO9/5pvHVg9/6/e9j+fh2Ip9O4xG+9sNxZdfuzi/1L7z7Sft4sKp57a5N7j35sWnXn7y/ltu1FauDrZmVmbyTrvRSfdzLiaTa88FiDQt4YIYY6Y2Cq21F7S53dnf+MOP/+n3ffCHf+5v/ZMvf/ZPn33xqTRjANZakQHnWViAIwAgcjT6Pd/1I3uXnt/d3rFKWQ1ecHOrfrYKRw6uL7+ppkQce0ybaJuxHrHfF19SdOzdpD9eaOUzudR9EQRFUyWVKMpXZ288deWJNLfIJEqxMAtzFCIlUXwQDqyJ0kQHF5568snUpDYlXzoCJEXXlEzXTE2iCCEGlGtc4chMQIBUV95YHaOEGFeWj08m5R7v+xBD5Ciiyegkqcpy+qSMiDFGENBWISMKopCgTrPsllvf/sJLD5FWtXdHDi689vrX05xCCDFGZEFS1+xwKMJRBCSKJkLQzWSmqEuCejK+HHxJRuE1zzmRAmGZelQ0UfRhWlsgQgII3k8bHj5IjMQRtM56TvWrqJRc7G8aHWcU5QktLbWHRXl5vS+ZGReBmQMZ0RLZ7paByPYuVC9t7JxYbq7OqmY32R7zuYtXUrDHVhc05ofmGqE7YaknzJf7pRGTGV5ems0SVVQ8qeoqgESHENPMZBmhNtZYXwFikiXQ394cD4cqNQbz6GOIkzxrGoOtLJsIe+9KTMSmC920Kr3HsLDQbTcVgx2P3Ob2UCgKqlgHICxdcJGqUWk8GVSJRgguRm41UjKLwXsOUhRQl6HRbGfN9ni0HaNTCgTQ+1CpUkx3Y227lfL1DAQQgKZo52syr4CIIBEEGcAJxYAtiB3nmpXLI2YArUSaXeOjH0a4MvbrhRthqq2ZLo+m8RMgCSOLaEUKKQQGJO8jojRajdqF0bjqduc29kajEohdDBAZo0RUjMJVWRlIl227Ho/EcjX22xujdqsRNOadXCcBjfcgYYqqBbjGlZ7ypIUJafqLcr2qI4DC1wIdIWKIcN1RNg2GAK6NqGSqRxO+5oWTKbXpWvJ0zXd2XWh2rVk0dZpdj6quV5giXvObwTUJGgqCRGFA8BJZCOpgBQw4hSEjNhQy5EzJylyaaZyz3BYxDooRECltyDtuNHRqOM8wSVkniOhMCgKRI6AHDpCkqZv4llZHDjZPXYUyBlK1ktDbEgE96GfPvebObzZOrcO4QgWAJAKCSoEAKJApZQSRFGpCEkREIlKkkEAEOHIMIAGRQ1NijtzQoWGglQkorCLuOxw4VTktHCESBWin8I6T1be9J3n3d66QDRotgSJFDFHb4uSN1bfcYV9dl30f1vfi7FEQcCjTj0cC3EjgyAF67qpyklRVjDGoHAMHSQ14U1XSG9GVgX7hIh+5tanjmOsKo9ImqR0pS2BqS1Ds5B9/SD30emO9B1WWic2b5n+Pilw5mTvQdqHo9V2McbCztrKwtDjTuXzl9MJ8d3l+4cKLV3TzyGR/VE68D04YlzoL3jaGu8ON/oXI1fzqam/tysXt0a23vGnvwvim+bvuu1O/+vq51aVFLRsPHPGXLxa76y/NdpOlA0um06nZa5kkJmZz6emXN0YF7db84qWBY3X5yhiikxja3dawFgPlO06MPngvPfQHn7ypC60D9sDc7ETxfMfcMx851IOB+8Qj+xcncztVsBSan3n5B7/79gduvaVLZdpZaIfdm9/x7l//T5976Eo2eGrhwqtXP/fYHrsXfOw2W50Uakd16UurNCNFRowISgGiUoYFmIMAaK2vAeoRIYoLLslbNulArPv9vgSTt5Kra5vBucI5KeTIibeP+l+3SZYa0uiTLPHjveh5ecHferf81A/NPfxHv3ooK+rOfDnyRV0oEZNRkpvt3d6772v93P/5Fl57A1xjdmYJ63Gxtvbvf+ne9pKdP7Ii0NrbenAi89/7w7/64AM3fOd3LS/cMDryf78j2oO/+Cu/e37TQ+2t0oQqRkEAowwZwsgSgNmLrt/xYOsX/vo9q0d8qiatvHPqleJXf+vCG5cbKVlWEr1vpPGum9pHV8zcUnvUD3MLB1uZX7t0YaadWW0ndT12nkJB2nhvhGg82e9ki5Ta2eWDEvdZ8Zj7FtOxLyYlLSy+pW2ag/UzfnI5hEnlBkx4de38bCeJXd+b1LHwEzfU7KPBA4tLwfH84uHUHFQhNhWJwcPzi+98R/rapdGp13fL0uvUhsixqkkpYSGFEKUcCwlbm+6vXT1+fPXHP/wTvUHx+NNPvHbqVCxBaUsSFZks6Awo7o6g4HY32VqrrVFKXXtI1lYjktHKagssIQRrDKBixNrVpFUIPkmyRNngXaKTLEkq51iAY3QMU69ZQikyCIrznpCICQQVGUBJSQEAKWTmKniOMQgDszE6SpDonXeZNSZJgvNERIoAxLk6CI+qQikIjqf6SkJM0xSN8tbFGLSiGFlYCK6tcIWURRujByEWcHUdY4zMsSyN0Wgtc7BGW2Uj+BgjIla1Q8TIIKxYYqy9AIYYIoc0SaYA76IcBWarFXNsNlqJ1QhIEI3WSkCAE2uD8xEoSgxRInMMTIqmdzS1K7NEa5W6KITASMxiDRFEmO6iCBRqUSQKAdhaxSyiVYqaABExigCIjyFBS2x8mJZyOAL3hmPmMCVZFpWPHERhFSIJZFmWJwkHTlIbg0dA711dVR6gXxYKkOt6upNWSiEiKFX62nnPAEmSCAKRNJt5bi0zWKWbWaJRxeiVojzPAFhQ2t1O3sgjc29SkVKl581+r9twIULluax83S/SzBilvHMAAFopo+o6uqLIGrZytfcxAEWttdXjGAfDCpkDigvBIE5KnyVJp5FEH4DNqKDIPBr1Kjddj2EEb5SxNjeJReFGkmiFUwmLr2tFyKGqfI1amSwJPlR1dNHXIdTekVwzoC0tz+2ul2D95dH2iRvmT1+8UubNudnGZFipVGmtwGAxrCjROsszbcqaA7tGq1nWRd5IUICDD5ob3Uh5+V8+9g/LwXhUrBf2yuIJeuVrr20/tzXZRM6OCjduu/nevtvtzs1sbV8s9jdDCsaaQNJsNgM4IHDOuSiaNQRqJW0dZ2bzwxunT3/+Nz/zIz/xo4dn21lrocqVgf0v/vEn/vgPPl0Xuor6ahlf/eILqSURikqu7EwaBhj1bgVuEhMVZ+eSdjcH4KWVhTffdU/KETQq14CYUaDh1Z7Ks8WFA1T1tnuD+YWWKuvdwVBNyq5YxwMutjst1Zhpba73cFKM1p3NbF3zXAK2kz3/whMryx0WngwmjupuTkMXgWBU1CZrkq3K0s/NtHf3+sZAlFgOqyzTzVYzSPSRhnWkklODNmv/7u89v7h48obbvJ2TECpQzBRZapJCMC2hl8LkprcdG4+fG/SFIVy5vFc7rREZhCLs7blzV+3xpb0f/Zlb3vGDKz/zN78EHp2LtQuJVf/l175ynLoLR2V/90zeTDFgu6t+/Kfv+sRvPfUXX7uC+dzq0tFXXzjfnWuwVfu97V/+hz9nswNPPf+IHm+Vw/7nf/9Tn/u9PzRjnp3vVEBD7/zI5c1UcURX1/X4vW97z+9snDPGCFCMLD6yY1/4wzct7uyf++ynn/vHP/2Lb37Hj37nD/z9j/7Whx78jtuoMe/2xhab416fKRy52d958/hr35yMd0ux7cmoYgNaGzdxWTMLQYRjnppGNzUyeud3naySAyBriKVtjlXzBJXHVFKQXdN6Z7JxSSuA9Gh/O8ty/v4fObjf2/30V8o8XVnb3tFI7WajNy76o/rhxy5Or4JWWvbPvj4Z94IvuisLGiuJ1erxG8aD85WPSUNgxJfXdg7s6zDSJ265m9ppXe9Xsv6B992zu7HTWcrQ86g3yK29950PXn3pwlce+srMwoxWVZWlB07elXb3Lp995Jb773jsoSe+78M/debVs9oXjdSPJz0F1cSN8qxtQGtKSdGk7Cc2IUBlTYiurmsF1lADICJCHZ0mS2iCY4gxAmhDHDFEzrNUohoVIxfDQOqyiMAm1I5QMWNVUog6SRMUGZXFypGjve11C0IZRfbelQAQnFMqDcSKdABopMbVzuikLKtGMxuNJ0ZplaBqNUJU27v7d99znFvDKJmqtRKvsGYIGm0E64iLyV4S3HvefPz4YjbM5zq+uTPs/8rHL9jvufWvfPu3THaelBBHO7uu4ObhxptWj3z5Gz2s/d/8mQ//8r/552cnE9LG1VKXLsaYaG2t3e9XSrEGZA9VlMwAIQJB8GKNanaTwXA03+qcOLJy9srFTNNcx95yy/KzL60P1+q2btWVO9PrnfnMQzzub13d7i4faHXmMpw02C7MJHfee+cXHn91c9C75eZb77nvxKvPPW/yNkaotnvzM/ltb77n3/yX/7F4YH679peu7AGcHtXFvW87cOHipUOrq7PLx52v9yZVb2M/zyj4sL8/3q+jbMSExbPXmIFEjJOllc6V02vj3XFjsXnbzQcX5mcfm7ycZjrJDBLde88d33zypYsVv/L5Z4tx5T3a1AaOsYwxBKV1YLJpshelf3VrLsAdh49iORpubBy68a5L2zuHj7+9c6efWzn/uU9+deXk4k5fhd6k3vPaEhxovf542X31jXtPHO+0W3l3cWuvF9Zea7UbqTLXW0WCIM5HSpMsy/Nua9DfURY1K65FqrJfVn/wB799+8k7P/SjP/PW973rjz/58Z2rF1F8FFaETKiJgkCz1Vw9dmtdwmRQCJEj44paJ1ZFO3bdp17Y+PbxXsv0QhhV+8+nuGFa2sOCqwSl4uDGkwmk0GwlccTIaK2ta+e9oOZeMdSpdsErJGCOMQIis5jEoEbnvE60RPGejTHK6CDifeTrzm5EvFbu0BoiTyE0ShlBBSCu9tNne6VNiAzBM8eNnfPAyOxFlFaEBAxSlGWn1R2OeiIyFU+woDABMaPMd+dAoxN+8fxjRTGMLMaqJ1/8U/Bl7ZxCDZEjiyJClsRaYGXyxLN3zkUOSpsilMGPBTxpEKLpZ0qoBDBEr1AxEpESBBEHRMKiEGMMRAoEtLWACo2EwJEweAZmFVFALOp66FMNI+BuRqtLmVCy36uK2vkQizpGFhEkxJHj0hc3HukeO8ozs81eT62vlaGG7b213cFWM09m2vlsu3lybkmHK6TS2RwGe9s82/EViW5iUTnvhPRwfzg/t6AbXWNtZOd8wezKshxP3BBjVVUt3ciazYYRV453+wMtEIMUkwgC6LH2HGQvyRrCOnJVFq72buhqx1hW7H1UpCGEKKxH9WynQQ2TZeSFKyaLiSNjMpszzbRAp7Y5M7NLxW5/L29YBnZVcfstt5/vaRj62cVr3Tr6XxEJ4l+idACjiAZUiCnGFrrF6Fetnz+eLeR2pqXn51ogcfHI0m5ZvXK5fOz0zmMXQq8KIqKZ1BRiDUAIKBRjBAJGyDtZy2b9wYgynadUcXju7Pm3zD948shtb7z2SgjeGKWYQSQEH5z4cgRNc3S5Pej3D87MLOdue3cyCdjbZ5fS3HyatlBUrMGhJkQWul4GEozTWEgQmKfAnKk9TK61jYAIRWRaYSORKeVgaj/D/z+XGrBcH6kBIuF1PhL+Jb96mhpNu0YgNEUaCRNdQxxdK20hESHHqAiVEgTWwhSqDKIKsQX1bAZHZ2G2C90WaACUylhoNZQEYIbEgItxPIiVB2XtSCTV2OiQTtgYUBYIIASIGBWS1g4zHXwdqv0jB3F2jjL2uYJenX32Mb+5pbb3Wr6y3keNMYQAiIJIQETIIIZElIYQAQGiaFIMSILCQkppYgStlVLaz9jiXXfSA/fPHT+Qzs9pTZV33oF95Y39T39t+OwZU7hUGZt6f8/B0fc9SPc8ENqt/VKaMWYABtmLHqOVdptOrFQZqa0BXVmjm++y2hQoEQKjiBeABEDVEapQuzPn6tvubmhyPgY3grrQp16nly/o57doKHjDITp5KLO2dgw87tmGLesSQOF45pvfhC89HXbqVABV1L5mH68/HU9/GPdKZBWJx/ujvDnTbS0Ve/vayIEjJzYuXR5Nev29Csudo0eO6qzR358Uu3ugK+EQuK5dPbPUrjF0FxYHhaMZ9+3f9tbNixfGF86u6LSdobH52dPnFcDqjY32amq1H+5cscm8VRoxf+P13Ue+sffoy3vn1yYxoFEUfPDeK4ByPMrYJcOdX/r5A9yq75idyUKUUAauWgczqcfj/mhmPm015qsk/rPfHqh2brO0P7S/9rFzvgyNNNF4Pm+o+A8/7qpRWc1/8YU3FMV2ezWGGIF0DpP+OLA3lkgRKhVdFHYgQEQsSGgYagQOLipjhZ1wFBGlxOo6bcD+rmiVovBg2F9Ybktszy613Cjsb76uWBV7kyyFRnDZqP+zH5q/9770vncvxKzP/jIUpkgO/uJHRoSq1SRgBlcpXx1ql//6l+85eGg7pikXuL+/G11iMtLdam4lnn/p8a3LZZi5+19/7GpNrc7Mch7Ub/y737q0Bf/4l7/ln/7EjX/rI0/uq5x8JGSK0QePEjNNs8gnDqTv+8CBE3fPnjwZbXsw3OY47j75gvm5f/HC7iDVmhQEFG416MiR5r0PzGVaCQI3TDUZuUlZVqBsGLtqcWmhkeZlnztdC27EqmzP2jR1pqVs3XCD/Wpczc62fC2i0gML9zUbS7ujQS3uyLFDW1uXG2JrdhDTPE8jZiFMcmtNks/MZiarZ9KGhObM7IGF1bsvXX7pwMHVYb8ailpdSRdOtJp5fP6FncoFEmpZtCmsD2tDGkSRsEhIVBTLly9f/PPP/cmtt93x1rtu3d3a5AguuugZRQZ7w5lkcteb5oaDc9Uwi2U2M2urUF8rmkZBBYqUTPdf09oxMCImiY0i1hrvvICatjCjgOfoaifMxiijFQCwMDMjKWMSEFDaAsfa1YYosDBIqpIgjgMo0pqUUoYUkQohuhCEmZ13SimJIXivlGIGHzwDpzZJrOEoaDDRlpmFwCRpCEEiI4FNFQIoJGFWgABslNZIVeUJSBnLXNXeAUSBmBjLMTioCSBJUgDiCFECC7sYlCYBIUKIgsDM01ugayNbQmw1Ghy8Upo0NNIshEBTjHVko2yitSJX+eh8EBFmkQjeB1JaKaO0ThVFIRdHymqrEuFaIApEJBCMNU9aeibRJtW2qAultFGGo/jgSl+kNiFFzseAWHhf1hVzRK1c7UN0aWqR0BqFSECoIhhtGlkGwdssVVpVwlGiIszyDGPsEAUfSh+s1kRotBbBKJES64lYILFGAzRs0m40MptEAVdHAYocYijTJBOBxJjK+yCYmEQZytt1XdTeeQQbgXdHw+3dniKttCqdZ4EYQproKFyVFQsL4GR/VNX1pKiMTZKqTvNESKOgq+qi9kVRpEppqyNQak1ENMrEuobojVZJagTIOe5289SkwbmyrtvNrK7LKkZhCSwggTROa2xQA9a6LP1kUteOy7K89lYNAADnz47aZn5mjg/fnA56Gy89Wshkobc1IGLQuDcoUmuokR0+efNguxxMBq70EjkRjiGQAAe2xr39vTMzJ/CTXzz/u7/3ez/5gZ9Yyd/5hce/6Pr7m6cmPtJ7vufnd848ORmsixnmqe7t7bZJtQiq3kBnqQAGkCJAUfbnDy1aSHlMO+e3oR510/Tzv/3foGzecvBQvV1D0Txw79wIe1//4u+9+swTP/nhH3jmmxc+/cVn2Gi04jCGgApFgGsgbdDXaHNrQlxKMqjCvfcet2m289rpw7ev3HRstkKLIw+1s5kdVsOL25s2xLmZph+7mWZz6OLcfAugaGp3dC6rDMREz863tve9aeeJwSoUMw2VGBps9kCgHPvUqOHQWSTFzD6MIld7k0Ors3lK4/FYE4r3ZLQiUQwI0U1GJ47dM/LjK1fO2zwtWZ/als9+rf/h5aTRGCFWoR6bLAkUKDqIiUZGR899dvjcG4kX1Z1rj6IT5STytEGnEVbnV3Lt914dvnZpPOpXs2nTAzdbWbeV9fv9nQ179JbE9wfjjboaxqIw5648vHLAf/d3r/zho/TMCxfIq/E4IkXLOD63QWkfQgPVCBCpCirRMwtmeabx+tndUeGbHevLID6aPFnfW//81z/Vard9EevaAUGz3QRFRVXvbfZ/8q/c9b63vn+8689vrn/nB3/8y49t8d6TD3zgfa9/4SsqXDn64O240koX6+//Pw90/7T609+vdvoILM2sEWOMGOqiBIUpkRR1YtWBmerH/8FPVuWZyTNf2N/buvWD3UkcA0fUCWgOsIOzCZg7gZcTtW4bYw2Dv/nL82+8fu6ZU32tTQyuKiuFYlNj47WroN7bG17tXTp/rtVu7J670l3phn4VmjFJ2UW9sbaHqnn3m96eJTlr2d0dHm5lm+uX109fOH7Lm6nIIKEsRUXJ3tbmqJycunjhnrffnyd45dylpYM3jobjVx79bKrGk9GJ7tJBaq+aDs01Oy889plmFkG41eyQapZlwRIsJYml1FgfK1e7ZiOLIiFEUspVRZ5mhBkJBHF14MpNsqzNqImIA2ideITC7ROlHKr5pfnBYNeFOJiUg3IoAZNEIcjBhYPrO1usa5tpCCZ6dl58La0krRR65Nlu0h96ZPDOiXBZTpQiDtEaLRxLT0mQPG4/9JFbr1x5ZkkAs4OefIw1Kgay7AkUWFLsPXO/NT9s86WtXXPyhpsuxjCO/O8/cfrPvrp+58zOd//AjUvtY33qPv3V4va7c2V4ecU//NU/393uuyK4koP3aSPNcuMLlyrdSo020lludJLGwuEDTzz1QlV5JJXmWoDduF6ZnfnwX/2Jshruf3ZfPNe1XL44mGvNv/W+e/bXz95260LaSF5/9lzr5OH4IJfjdHN9l3Hnh3/owbWNS0cOdu+/88R42BPf37p4ZfXAfHu+nZEd9IYqVaOti/sXrjifrhzuHjzQIhqMh8NHv3TROTlx+0HTWRzXkzAZ21Qz1U8++9xb7r3/rffds7+z68rK5LEYV5cvXlYawj402q201Tx75arSsLM/JENzWXbTynxjtpV11IPvOPnYk2+Mh8EYBciKYmIiGc1Bh8hI5AvHhkgr3U0e/K4Hw+b23XccP3vqUtaYI2O293qlb9314Nsj7RplMTje3P7/UfWf4bZlV3ku2lpPI8y8cto5V85ZWQgJSYAwORuBwdcY29xzjHE65h5MMDaXbJIR0QKUpZJQKklVpapS5bxr57D2ymHmkXpo7f5YW/c8/F7PXD/mM3sfo7f+fe9LoRptwakjt1zeWLmpPhEGW0W3m+92EengrXedef70N+MULFCYWAbiwvuFiekf+P4PfOZTn9zc3IzqsWDpCl+Ww+dfe/LCxulb777/Xe/7oa2rZ5977JHuYEMbIOdCgLiWVLk78/LLwDJWnpSPWKtaVDGGyjG5C1X5yuOPvvMDCYjN7uZroh6JsAOqVY4rzrplmYdSXLq4Wo0DWSCJigGFMJFgqWSMsYsDOfDBMRGjRKEkShREhExGRRVVjOACqVgAMXuWQkoj9vhEzAQCkQIiAJJQSghRjxqDUV8bHcCTZ62VEtL7ILWsykoKIZVEAUCspGRAIeTs9Fy/tyGVCSFIKSRKZgAJQkf33fOOzz/+6Voiqn4RHBIAgCiyTGtEFkIyKK1YMHIkIj+CzsLNneljO+uvDt1lVk4a4coxg2XkEEAIxUQAGDgAo5KCiYLbYy0JJgYBSkkKJJUSQnBgYCYKCKwEALDSxllbeS8RredYm2FhrXVjwZMpRrGfbmlQUZaFwagsLHmCYG0jUanRVO7sm5oEMZg9NDPbSLOhDRwPRyxjVebF8rC8dnVbGCOUr+Jksj3Rz4Jg3N64GoGqrBdx3Gy2NtY31QRMdFIDdnFfmo2h8tF4Y0vopFNrNZMWeK5p6oUSJBoQiMrUW9s7vd5gTIGSWNWNHmzvLBw8UG9GV1ZWRr0yAJOB0vkIBQWnpJIYyFdE0hgTq5jGVSNNG81mu1PbNzmH+fbWZndUjCuDW6F0nmsm1oqm2hPn1neaabw3+wAAgj26M+ylKgmZQmCBQUVYeV0UM1Vxp6luX0xvPTGXttsENukIYWI3Dv3xNayGt55aOHBk6erfXB2XxHvDkuvpJEKBFAIySkClRCpVUWRJqsss985HiV6ans63ur1eXyKzkEwExChYCeG9s94Pc9rY7rcaYtDbjoxYmop3e+VuSZTjeJ1sF9NO3OxEeVmZWBXgvZDBOSX3xl97mCVgAtqLFnHYg07vdR0AcW99fFPXIwD2NM57VbDr2KW98JDgbyrWcA9FtDeP3VtZe1kmQIbrkOxAElDuoY0Ex8p4QokSkblyRgpnC+PKNuUHUjh0CDoRRBJSDY1JaNSBCawHy2B0iBNoTMg4Udm4ykvoZ6I75tFYMCijodFChLjbC4JZSjJ1AEnOemBAJTnGAwfU1pat8tnPPzp6+qJe2025AqmUDz4Iz8hCIxEIKcLejgEMQjIhs1AgSIBQWjBLCCTA2lIhSjSy5IV0/N9+4eZTN6yJuAjVkAUTj2tKiEg3Z6uZg9H/98+qc1fiMOb33tL+mZ+ZaS1ebM5IVwJqKSGwd8ROCMch2IDGgIbQ88mjr9kTNyX7D3iBTCiENLZ0o5wGA84yT0I/fc4fvzB99ETTWr1yWX3t0a0nXyr7IR2xHJxzH/6C/bH3tWebgaTXEVehFBKznebDn8WPPOq3qnoAJg7kkbQk/sepIiEhkCxL15zqGCl2emPlaHfjmk9qcaIE1MWSmZiZqNxoe2edg4/awdF2VVrWbnJuWkf1/QcP7K6eu+Gmk/2drWsvPdpJp04ePDYIExvLz9zz0EI53mq3TK1JUdMJ9vF0G6L2+tXib/7isS+/ONreLLyM7YiVwT3PNwExQ3dUNGL72//yyNhdbKWqNZ2WmwOTqn5voKsw7uUso0qQyLeO3DATtK8KzsqMWSSmoagikIUFS0whk0YooyZrcZZlyN6RDYBZhVEa+SwEIvYsxZ4dXQISMwOVATwiIyLI//9yAGCWAqvusN2YPHngsIX6eHx5ZXnsHBQZzU8cdMXOzvrw2C133dUWC/r89/2TVjx95sTtetRbl9EAkgn0NeHD/tkGugxZuLJyLqQq3hr3/+PPvf/W2zur564kujnMq3E3D3k5dWQxbkShGmqobrn98J//w+Dy6/33vfOt735/Z2nf7tGVpaMUNdVA9pd/5+dv+92Hu9tbWceIesz7DjUOz6HRvTffu9jb2W7OVDpeTln6npht3fiVxzZ/+c8uDvppJJDR+sCL+xt33TTRaYp20+wu95f2zR9dmnryhdfq9ejo4ZPZqO/9dkpK+rGJWcdCR0m/t5E0ZiQUW+vnm7WObCzurCxPtmqNhFPT2Vx9qlKadNyUGZRRhEpErQTKbNAvyjDsD6UMtQbMzE37cb8ep9mohxKOTU+s7JwHWQ57RSDP0suKksodnYkn7tm3vJWdvdjfN5F+4D03bI+rZ557YyNTK6sVOfLDcZQoCnDx8u6Zc1+JkQRwa6IxKgqJaIUaVuN7b0G1sINahJFAEHlVyVR8cxVIRLHHHgIgiQJBEBHs9WtDQAYAQgFAYpwVUoH3QRvtnbPOIupYRxIFAQmpUIgQHEMg9lEcheC9JwrkfSaUNFoLwOBBKw0MSrBCXdIeb5qMAkQGFNY6Zy3t2S4AEaWWAtlJqRHQuoKYtVZ+L3yjJQUPDAIQpRQoqrLyFCgwAdiq4OC1EkoZJSSD4D2Ks9DeBq0xMrG1pfPekQPUINgHy+SDtwRAPjAAEwiUgcEyNkxslFJKxTIOkjjYyChAoEAELBlCUUmBMkliIYwSKJQrK+9CVVlAJCajBJF3wQMELbXRak9KUo9aQpqCKC9yIDAKgYNzZaTTBDG3rqxKH6CoSue9C0FIAc4zQpKmDBx8iOIUgkdmUByZyEiNKMhzWVkfPBAnSVL6kEY6Sdh7qvkADFKgUsp78hS+yWjXBrhei7SUCDIEdkyWaZCNWrVaLW1UtgKUzgeJCvc2EGBFkkCC5ODdxk5WVi7RMop1vVEH4t1eVgU/zm2eFa5yING6MB5lIbhiXEpVKi2jxCitTRRR8ASor7+UB0JRYElM7VZDg9RRLKUgx0mk6i0jAYa9IZD1weXZ0IfAiGVpSxucrYQSAkEwS6kcoWfUCMEzcFBS8TdfjOZTU2TF0RlVFbs2C/Mwc3kFpWdTj0tXgocgvIqVhqjKVqR3sdFCxN4FX5HzBUR85GBk5fjqip8wMxtvZJ8Wn/rp/+O3bgu8dvqT6Ezm7DhlNU91V2x3d40xkzOtGCJTeYXcanYyxEL6YCOkzu6r9sLTr3U0un62dm7wPd956Ce+54PN+vF2HN3/5gc5G/37f/3zX3ry+RtubAtjjt5806NPXfSpZA/MQMTkAY0IQmBkSu+lwGYqJxrp/Fxzq9u94cj+Yc/msZXDXjor2lF69cpqIpuL80sQujWV6Zo+2W4bVv2dIUinvNve3t2/VK836zDy65ujjZ3ezOzMsUPzxbh4adSfnYhOHmvtjLNow8/sa4UKcdzXxmgZdqvKg4gjs70zSrSJI2Mr65xPlBJKg0AiGO3Q//kbf/hXH/sfy9fOI+DW6qCR4Fc+vzHP+v4H8mZ91JzQliiajKLE76wWvVHjia8Of/9Pd0s1h2B7mXdEDMFaz6xQyhDpz3z10tkLsdHZDScWfvw73vT5r77WKysSLli5PeZPfGIg89ZUJz/w4Fy/12uTyzwTJm+746Gnz73wqiiC4CStl85GibSu/Lc/93PfeHzt8Sc/XhV+rzM79tm/+Hf/9K8+9MnmuBLKrF3aRWPAA3m3WQ6a9VRpFYGsPMVGA1aHj+77kZ/52bm5qaybzx1detu3fdenP/HHs61Tf/nFJ3/5b371nzzAb36o3tyvQxM3Vs9Ntqbe+WD7C397GaY6G92iN9gBzmcXESzUo4ZEk9SmL6/23vft74rSg92zT/Sf3ZQ5wF19NbktEqqyTIkdQdc0toNtKVExV1U5ppigzO46Ck88VTUXm7GWAqC/0ddGuG82870tZcQn7z62vbKTDfNiXN73rfefe/nJgsJrr144vH8f+ercpcs/9P5v21q9urs5MNqUA89Eq5fPH7/7QLOjjArlqKxP1UXcEbVxGUXn37g8Wl4dbw0Wbj5x4o4HU4xefPhppe3a1Y3b73xP5UfNyZS5QqtFEJ48QrBVqRQzc+UtBxBKMauqHDfrE57ZOU0gGImRbDXuNCeFaFm3d4VgWiBKO/bgJzoNkI3RcOTQy1iKnGpaeKWandqlq+u9fLy1mzfbB5949JFbj93CXtZqsScI7Can0m6/P6zK6dSMRhaUct5rqUysi6rYu0217BkQjbhv3t539yAfQzWQdRO8sMSElnRspM0DsTT1WAniMK52//N/+JYP/dX6j3zfO//P3/jlrOSoNfXyOl7ZWXr109OTgi9t784uLp3P3Jve+aZP/4/fmZydO3j80GBwNlFSiCQIORwVjTRudVpKjBh9GquqyPLxeLKpXC2amJhe39oCYlk3/Tz/o7/+c+mBLE9PNRu1OI2ize7gi88/ceeRo8urK5OTjXpbiDjcdfvS809c7UMxe3z+7IVrjkVjNzdEb7r10F13HNm+eG1ycZpRJFHcXdkUlfQWbp+ZeHZ5sLOTv1BtzLVSQDgwN7exOXz9tdeFWT1xw/Fr166WxWhqqfPWd77p3CtnpubnhA2Tzdp43NepiXXHkTh8/Mi4N7SlTbYzV2UJgkU3Odn8jne++alXz+5ubmn0fjRMlDDSjCx7FxCEMdphaEYyBLXv4KGyve+7vv+DX/34xy+9cmky9lzS9Gw9c3j0lttWzl2ePHagP9r9yB/9z4XpyZXzp/ctzuRDqJf+1PzMXz/27PBEd8bu2H5114HOxQtXn/v81cHw+rnAaB0cKaWtp9LaqxevGPfO//e//Fdf/PqjX3zsSQXYmWqPdoY++MHO1lce/lit0b7h5Ikbbrlts7cZJ9HalSu2yGHPGY0VIYCJjx09fvut73/51c+vrV0msnGcKmyffvXyrSefEnyBCwG1TjF2jq/lmZYuGvYGo5559fnhcMzKyIBMHJRWwROwH43XiYL3IdbaV1UjqSVJ4vZqmS7sP3iYA11dvwxEFIItHRABEzCiBCEFUGAphBC81ykBJAAM3rpcCyx9AAYdyb3DvFICBFgXpmcn46oaDsbBs1AMgk1kCBwKCUBCCBDKe1YCfcB2yzz50uebDV2URVmURulAoLQI3tvCK4kgNARCkELFsj5x97e+ZflS9fa3v+OV58uXX18pqiB1Lc+6SCyVVFIVZb4XwGBCQMnfjAMTBNwjY+/ZrhgkCgQEAQRsbQXMQilHXusgEeJIQiAgRJIhiF7ualq06hF7Ll0ZaYmWO5E6MNXs5lYgVbaKIj50qF0S7Vua6K2vT0w1pztMAfNcsRS7u9zruWABqUgT453vD329tZQV1G42gx2VnPfG493RrotMDL4NgmR95GuX13qjntNYk1LXG7WpWisbDvJ80ExSXYs5eIVRSOKWnqv0rnRlZbOCipnFWSPDqCybkYhmGoMs9MZWmuCdy4s8ilOWIC3ng3xuYkZLdXRx+tYbjgyMsuOSs2HSiOrRlBP+wgUbcAZjHYMBL7jyqS1CNRqO3d4XiyAkCgEgAJhYCQxSBZDS4gTADY34RCrvnpmZXcDW3AQnCau2bCVFFrBtWtNtrrKdDfu1ry+v7lbko725i/cgkIQQe3kcY0xijAQebnUDcVyLyaPNQy1Ji2G+2r/qWRAjsADy3nlJ0khmAY64dLDVL50TxE6X2G5RPfWgBVfsUZW5L4aDdmFERKQqHWs0KlGxd47A77UY4bqojPcSRQwIBHtEHpQA4jpXCHivdwaCgb6pCPzmZ6+HiQTAHlleAAOARADmPabG3gJTUgZS5BiFlCSFp9E4qwpXSLJV8MQSJDIJ9lqLA5Pw0M3pTYu5iYAtpDGQk6OK64I6U6BSEBJ6W5Bn0B2HJnAUi/aknEe1tc3WxyYy5aC3ejmcu0ZvbEdbG9mtB2u33mwmZ5SSdlgU2kSu0rt9euYNfu1ceXUtzceGAVCE4INjFkZ6F/YSU5JBIAiJ2iijFTqKZKjrHBWPvSodImlLChFAogshNf5dt9YX5ndAlyxE1OgQykAtkBEaW9e9myN4/5v4r8dihPDjP3T0wB1nx+MAIAQqwSilYD9iRiSrIiAlAMEVPM7lK5fEw18Yf9e3RRNzI8CYLewO+Nx5WB1oEsqhPrOh//uHu4szZKLaxvJoZwt3hvWgFInQ6/Pnn+PeaPiWG+Dgoty/GBwBU/OZZ5K//0q+XdWsEh6Cc6RjIeNIR/94VKS0YoJqHEQUSspP3f7Wa2dejuptUxO+P9KGuRztrvTiZmNuTu9uDrfXtkApIwRRMbM4Mxqud9fXaFz2LgwZ2iM188KL/dPLO29c/sKB2srCxNTifK7DxPJzAxW1Q4gef+Xa51946tJ6lQ2QOQqMTIUQ3hP4KiBCarR3Lk5raROaC9SeTPp97BZCFmm9FThqdfu9erMeyIyGA9mU7/jew8f/dOf0RasNVo6dL1AQIJlEM7Myksl7W3lbaQXOOYGAUglByII8KSmIMCAQA6KUKCg4ggAQlDJAnkIABCKUUgMFYAE1vTHYJZPedtstL7+yU5uYyIoiULh2+SIGVlHt/KtP15YGf3nmZ3YvfVQmIhuXzB32E6PNtpG6GeEdDxw/dnT33GXyhNahSmLM2n/3xKVT7/yuuLjzkCmGmytT8zE5Rh0XRYibC6z6O5sbP/qd7zo23Xrvdy1effHjr15cf/Obj0d8YHDmjdtu3Ncf0bFk9APfW3/gLcdWr15t72vsXFqloLd3Ntib3oZUpuHtxBcfWf/qyy++8Go/z7SJAF2VSJiZSw4ea88tRlCV4xLjmYXc5ttnLwdIosaCl2kUlQvN2U4tWd/q2+CIQYvgWI0LDJTH6UgwBQwTU7O59ePxaMhdG7LRaNiZmVF1VXqcWDq+k0G3t1GbXtxYWRaI9Vqz2ZlsTs9b4WLBUTu1oVm4zCQ+aXG+W6xc2tJJowiqMznHfq2WFjPzbRZy0O997emXfvgHbrn16FKl4q89vrm5RheuDYdjZy0F1rGMFLKnamcwhsCl57rGUwein/+5qapcjiBpmNZmdyAj1U7j648ERCGwslZKiQyMBCE4H7Q2zlVVZaVUSscCtfc5UwCSRmlPpIQSgEpFoBQyGKH29lIQ0jqvlZHKOLAoqChL54NEgYoiqbRQxGwri5IphKqshEAir5QIzjvvlNTKGEIhpdJKuWpPZ4YUaI/sg1IyhUB7ckdgEMSEDGwtAHhPITittbVOSQEyRkStI4EQODAjCpFXuZCCMXLeE5EQiAjjbCwQxF57idhojdoQsHPBVS53IWYSUYxSElNW5QRUS2JtDDETO1s6Io6MIWDv2GilBFaOUEjas3oSaa20ktZVwMz0TRupAImoUAXvC1uxD0Zp1hFLQazGQZRFOS7z0hZaGU+BgUCAMkrs/U8hAVlqJYQsbBWIhdQEUDmfRhqAfFkqpaVUxIQoJAviPZ8DMrNzgd0ef4ARhZYqjnSklJBICJFWzDAeFQSQpglISUKglNY6RiGkikwkpVBKjilgZGIZCUSTF42EmShNkiSJi7JkT2Ws8qIkFwSirRwTVy6URbGH8POVd45NwnlWMbM2GgEFCC1klMS1RLeaSatRg4CNNPaBJGCtGUmpnQvjqqJQ9LqDvBzv7g4H40wKbQMH75RSUSQUc5JgPW3IKNLGkLVVmdvSmTS5njB1w+mlmdVLqy7Ghlhcv1JSUUrt0yQhkiGEJI4joTcuXTAAgDgc9ycnOxSkVcILJWL+qQ/+8If+1++EzsSp460E7AvPPfeHv/nT3/dD3//iytljp26fPnpfmFzqdrFex0gJEj5tCD+2MYMpcfv5aze++b4//IuHt7e82sJ6lPQ3uhir+ZnJf/4L/62/tlPH9JaTt+mEf/UXfv6vP/Ll5nSrUI2XVyAo/8Wf/TVZSqkNkXeeOQilFSGSIodBy3Dv8ZlD+9vLy9sXLl0LymdF71ve+eCZZ99YG+ycP32t1m5L4R31zl3p7+TOYVyJ+PVr5eHJqN0w99x2UAwzKhdeef0N46OpTtowoubVqKLN1TWD3NQcKb964Vparx+ZjNlIi9WJu6eb9Shq6c985fLptZIQdCPOykJHdZRKeCbaUyCiicTETPJbf/OTZy9cqdUi55yOtKqnawP+9HPR517ot/U4SeKyUtu9MgS50+NL6zvCpBIWdQzsZTUqAwohwUR7zA2qPN10YFKKITXx0LFDf/2Js4dOLpWnr2ZlOa7INJrP78Q7H7f331i/2/jZablwMFKZ3O3Wvvhnr375KyuVrXOAXr+k4CmN6nH6B7/3B69duPy2++87e/5qcGyiaGr/4jeeO9/bGak0GfRGcV2YSDQa9WG3h546jfrq6m69lXJR+GF3fWf9v/7nXzdm4bnXdxutVpymRX3iwhsb692VSKeHH3r3ufyFn3vf27fOPY0DNDjnR50Lz2a7O1GhoHJSSnjlxa/LpNi48ldvvPraH//eN44ee8tqX+N4yxaPDbe/euf3HBhtXoWlDrNichA3rJ5luDlAkwugoo/xBGqFsCvb1Xf+5IGPPNLvuuDJIQqMVBFCJ75evdlaW9U6rnXmty92p5YO9LqDj/7tX15+5dmDx/YdO9I8eqD9ta+fv/+7fmjj6upoMKAQhqNyZnFeS7W6vjO72LHD0fMvPhEpNTN3UIO74/YjO1vri7ccqRbUp/74QyriycNvqqq0NbO9uf5GJ21++g//66EbD9YjPRh6CgDBB3KxkUElqFBCNMrK6c5cgIqZarWmtYWHoFBWZcXSCR3FcWo5eA4BbeV0S9WLcV7aKkliW5GMNaMLIZDDYmBb9bTC3u7OrjKinbReOH/tZ9/x46NstLO9OtGQs1OT2aBnJDvnjJLomIMzggsXFArnvfAIASoKpIECAKthVX3gvZ3+cBUcZKVkv6UnjanXQNYsSiEkh9STkc5KlWip5hZNHrb333znhctr2JgM5Wh+6fDkdOfi8vpLWfWu973ryS9/7rUz5x596mkYjVeGy6MsGF984F033XdrMjHbuHT2Csr4k5+9dHa7SGpzvUHPV1V+cWU4HIM2zm2TJ1v5OFIR6mLEWktmyiqqJ7C9M9JKjgb86tllP852d0fz081ix37jq5eJ5YGbDszPz7iiGgyKq+tZ0bdKJc+8eOWmw0ta4ri71V6YOXhs2rKsXPFjP/k29ZHHv3a2v6XFzP54tNtHnp2biC9eXtGp3L22RrZqpI12c+7LT7yQAJdBzS/OPfW1xza3rzxwx71Tk4uXdkZXtsvedi6Zk6m520+dfPbLX07auLoz+t3//dmk077lxuNrF8+xlwLEYOwdMaBgxsEoeO+OHV74pd/4rWY6PbSdKNSWHgovPvnbOzX/6BOvPfT2OzCHq2cv5MMx2fDo0y9OHzpsx72N7u6dD91UZPLgibn1cfiWdz7oILgoefmZiy++vtEdlUKIhenJvVWw14qq8hKk0srYsviLv/yLb3nLm37wh36qvnDkox/7aHecJTWRqjSUY3S+yruvvvCNsiIZqThJjYw7U3NTk0eBi5WrL47KYZKaXnf9S1/7i8KOYy05C9KBjsPO7s7lC2cmaz3AKB9CHMMoHxSuE4bF9qYrq9mz69lWloEKQqDzwVyHxBJ5K6USWikZteoNAG7Gzf6oaysHiP3+VlmVEoERpREcAiIorTwTIDvvtJAIEhEFskBQRoMUe3kHlUTguCzyPY5vCAE1kmehtIniKs9r9bgsqkAEIKSAoshMZIjZlQEZm512nrmjh5cuXrnUisnESgs+fGTf8tUt4MCBQyAgEloZpQA9VdxpHsqM7g/W6tHgq1/6ra2djdLmRJiNh1IIIeVetFtIxUQIwuiEmK3P94iXQgoGQBRCaEBCCcBAFALt3QKxEIDsY4nAngIQ417aiKkgJi2EC7zRDybS2RCRA4UgtJCjnhAyUrJ0QBl+4fnedl67oyyW2mmRF7EURN7UlHO7c4v1ehsVxFkesrEdje2oPy4KL+J0cqKBFMU5y0HlQYUkrajKRsM3Njb6RWWgBkF4C4qhgTrSkE7GRXtqd7O7trw2PTtz8vhCb1SKuqxNt69duDC0mShCYgnAS4iVjivvhXRTrXiiiXmR22ZjtyiYGYxWUgBLEoZ1o5tz2mw0ax1P1gUP4xFXg85Eq/BhVHnwogLzyoXzw5wYVZRcfxYgCWBxXSUfiJQkEHWHjf74ZJPfPK9uOT6xsK9TmDI0Opi0TK3jQDCbtBOzG3fP9X/zo1954YLrlREKBXvGeWDxTfq6VhoQKucQWUQGmeqdOocoqVGtkXR3N5wtA6k0SY00ZelBSC1lURaAwCClQma5ObBxbJBDPyuNDGkSSRsCgGQwsRIlamHKrOqtjtNmE2MlEpPUTFXlXjAxC4UCkf1eIkgi094+Q56E2jMAs0DWKPa6Y8TIICgERCEBEAkB5N48CIiIBF6/2wYGJAbHsVZIXjonRWzLamcjK0YkHQApBI4jmIyr2AjP4F1oppKNnIkhFqQMtCeBHACDEqHMhRVQMNQlEEGzE0Ut0e277QF7yzNzotNxU/McCAOXcc3Bqji9rJ6+rHZ7rXMr6tEzoJXLC2e9DAKHGVUOC1cjC1JIIQjAByAGRikoEDEgCABU6jqp2ICMPc82qm+5Gd75tqixEA2GjY9/6twTL+qtLAURoVJG4v6a+4H3zSfJtqcWZwGNDNrIqMUouOxrWVdidNct+NQbxUosDtw4gtqucOA9CsXMnigTGIRS5BmRMBIqURQcc7RdRZ94blybkG+9LzFB9fvx11/MXjrn17ZNxUogOkyurrnL616bZNh1sTJBeq2YiAFkXtS+8Rqcu8SdOpyci009fe1qdWXLVzYlJCaHzJGSkoig6sxN/KNRkU5rduBbrU7pdjbW1xpnLtTiui1JOFdl4zjVizPJuecurV4aOA7bW3l7euL4bUfHg835Ju4s70YG4obhWlLYxhef2HzmwqUitzq4pSm88bAc9/sXMjx9/rVQLfT76zu7cnnsK6HnmonomN1SrG1ujYY5MAHCHo/ZESHiaOSKnL7x5Nr9b6lqnUQoKgvSjkNWWVcBcb3R3O0V41KMr+0cn+dnz4QENAAQgPXOqIiZALGyFoFi02QhCYrgWGpZWAtjDyh0bLzzQpBzlZLSBwcgmRmFBEGBrUYWWgILkHsoEy59cIIp+M3da088+Xdu7AihWdeV16PeoFbToHwCNAmj/LlPlMNLYqpdLadJ1GZR1lq6GFe+o+szo+95/9R//O/XBECkhPc+TfXa5Z0f/MCvLLXEiTb+2597r4YN1P3Y+N3lrae+sXzDbSeDwEsvf+073joxOve3dner6onyQn7m6qWnvto17Wp1uBapasJk2xfLuVZTlINDS63z5/Mrm9XubjlRbzz1wtXN3WtvLBdZaQKzEp5ciCM4cbh5z737x7aKtahKv29henXr8upWkUADC+2rkLlBHSk20drWlSRqcNEGHwWiyenjkxMzo+F5DW7cv1zaUFUooolApSuHUZOkkf3uDkstqDMYXg5SVllVUoh0c64x06y38py7V3qCRbcczs/VtMSVC6/NLLUhz7cuXVbErpRaiNTAWEsoXbtljh6cfPa18vXV8eMvDL7jTRNltfWd757zNnnxpc3GzFK3u72yNjh7ZXRttQyFqyqsRabeSgaD7ANvm3ngBjr9cj5RmyVuxnV0YZDn128PKAQhJDGDD3u/Ri0lIltbBQq853Tfm+oTmUgRU4CAHARKpZSU2vsqMjWJ0lPlvLeV1cr44AGBORCFSCktpdRGIiHvuVRRau18WZSV98EYLXGvCby3QwlAKRUiE4UgJOZlziCIObCPo1gp5awHgVKgRKGU2OsGV95nZRkCKwFFVWmpCFkqRIF7OSnnQgjBqb2GMhdFzhRAgPN0/VkgEBVGSoMARJRKEUDwPo51KqUGAuYAKBGtc1LKyjMDCUYAieiVVABABEoxElalsy5YClGk9iRoQggASEzifAgcKFAIAYQEwYIoeO89CRBFZatAyphAPBpXvio9eSW0EgkoBkQRSAIBCaOUAPDEiKyEqKepcx5RIkAAligJKDKGEaPYUAiC0Vo7zqsqBBcCwV7sMiilECHWMo2N0ToxxlkXApTsrKsCBmRQIDl4axmAUmOIQUjJwN45Ii+lFEIQA3mf6AgFIjNKmWXWWm+MJqZgjGwqEBACDbKCJG5teWcdO68AUe+xqoSQGCWJMsI7UkLGkZ5o1Gan2wIwqaXeuhhBCpRCACJIkdZSIimVGowkajMdQpkXWVYygQ3AghWKdrszPTsnhNBJBGTLfOQDhG+miqLYHLxj6eL6cjS1b3htaXf7PApPwNZVUuFMZzJOo3yUTUzUs3FV5a6W1koXiqwE5ymQ0eJDf/F309Pz6b706IE0+NHkkbnGfH6+/+WlW2fq9cWta9bQpUbaHKyMlo4dDEJur6+TipbuOv74X1z44sfOxx++NFVP5mV9/dKOrEvMeLwAAQAASURBVOdvv/WuD/7Cr0oDZWZvvuvBRhq9+ugj733/P5uaiL2sdXMiIbIBjYajeicWwA1tuoMgtWLNe+6/k/vqJ27ovPnmg889fvbx588OKz4+l7bq5vWnnmmOu6bWGXbXdUS1QAdn1cVLy2xaYFQiNJv2vKjtjnIW1dWVSx3Ntx0/AeLUo8+8oATccGKpNUNb+ZDQ1tnN7ZvH6eaRxZuuvnFt5ennWk00giem4pDZDpn33HIE7PK53WI8LALiTndkjFRCgpRMNBxU2pik1ur1smatORj2qrwExjJ3LOULr2dJHNfTZDAIUVwfD0IsJUiQsjICCG05tgwspcqzkhFBILPQWrBAa/i3/+8fe/y5r7389TeuvLyR3jcfHInAQupxJdeDnJ5sro5GL/3Z+I5j9W99R/v0hf4XH9+5ssKTzWlXqsoGEmgLFydynJUs0+nJO5YW7ri6si6N1spErF999JGHTu5/4o2uEgKEyAeFEkqqCIW1JO99yzuffurxNIqhEcsyzC8+MIJBe9/BrXH4+O/93bWLb8RyELplL2SxGGwMN3/nFz9+/12NmSONQR8vX+o/8zyQrietVOlkc+Df95PfcfsRf9eJ9j0P3r2/fWnrYiHzqzccmS6qpw4+dKgserV9+0eQGOxIx2g9wlDrUYCMYqfACrEouLSce3SRKjd2+tBJGKHMq6p0HGD8zTLy2ZdOT01Obw+6+2++8fhtNz//lQvTS0d3rzVWVi9xjJtvXFofZkVBtXrHaNXbHe1uD5kyKcTa2pUzzz7Tbs30rhZLC+2XvvwNL+H4LccjGZ198XXEUXuuZatSJTwe715ZfWO6VZucaqUNO73UAJzrdXdqtXg8HkWRYvIoWCtwlYuUGJUDof1U1HTeG43g0cS6KrkiMiapXIEBJJhA2G53bDZEChPNVlZ5gzXrnGSY7Cx0/cZEg4ToxpF0YFCAx1Dj6vlnPrSweKy6spNVdOb8pT21TTlyjKKRGvTUpjjvWRdAKeFC8MyIECjscUor8LP7XKMOdiB7G45zrvkygjLwrmzOIgErLYARPFUO6+2pmejCytpnn7t8x523n7u4UvmwtbM2MZM22onnaufquso9aNHfGS/NNVqTE8NLg5MT7t//1H1l/lgV1m759jkuw3e/821PPfGKnHnTr/7hl7quRCdEHKqxFYokQj1NAqEQMOiWcRKBIGft8qbVAPuXFqC051ZXyYtDMxOzzal83FWTtaqyaUGjbehMTQ+7QxOJW990z0f/9uH5w0cWDrwZtp7PvBuOB8164+qlHRkb6vXuuenwG8vP9l318vm1jlGPPP3GwcV2Mt/qD4si7Jy4ZWF1eRsM3XB86erFq194+NMHDs024qTVTLc2lpPaxMxCB6S45W33PvPok1cunhnlV/qu6G52x5ktQNqdwcXLl2fT6KE3H3/+uRVSNLfQGI3d5u7Yeq430ivXhlfeWD08Vx8EB95Oz4af+vf/5MN/9DebW6vI94fe1XEma5P6teeff+4zX6pN6P7utZ/6F/9ifv/C2sXBxsYGAd9y8sDW6vojT702NzP7yus77XaDSmvD9aDKvvmpfj8XZVV5FmzZy+1R/pef+uJqju/6znce+w+/+D//6MP5tbM2H5lUYxQg8HhYhAAulMVogKBQra1eeU1q1e7UokhVeVlmwQefxAa1SH02lcZ33DJx/GTLGLSsdNRhjraWt9hM7G5vcpl7l7z26nh5S6ZpnJUjQiHEHpKLtTZ4XUQLVVUqyQxhx46AUQAG4tKW3ldCChDSO4cAQipARmIAMlpKEApBCS/RAbOWiJEuLfmKmTWQBJQAgBKl2Jt4Bgi8u7Xbbqe73QEDBM8oAFiMhqPgAjGhkBxQQVSv1cajfGZmQiPaPEzU5qWXkrYdAAtUSgYPiDIvbQR095vfk7Zu+tTf/15Ek4rcaDiWjRiJBTETmSgCZiLPDAIFATIjIbnghEBGSUwoFTITk2dnpJEoAYCJKVxXehMzAEhhAECIwIBE1ztFTIgA1lNlbRSDwIiIUJEPgZ0HdiPiyGhtzHaPHnt+9Nxr2yfna/Mtc3guPrQ/jWMXJzpAHoN0ZZHUIh3Hw66orCrKbrB9VxogjJL6DSePhkCOjPd+PBpFoxFmTk8k0sRJJ0VUQojt/q7ymQc1tjZK4o2s4itrE/VEMATnOhp0s24LGwI021M2r66tZ7uFQynTSGgUE/VI68gMRR7COHPGqDwwWVYj+9rr11o7PWZstNsk4p1e6d0wZm5P1KrdIpAsZNRnkTNS6TN3XXejPBqpOAQIwSQRoqln48NY3HUITh1onLxxUk+gbxqURsR13WqaqA4+sDS589Lzxz732isXqsIme9xIH4JWUgqEvVfoQCglUgjEtnBprR6ntdEgeFfV682NLTs3v7jTXQ95RVQNK9dMjXBsFFCkEABd4NJFkdRaaQBgEQRE9QS9ayjVd4FjBQiDYRWXMq7p2Xorz9ygW1pFtYaOm1olGNBDoD1nIPiAgoGRAwhBQiAyCGCFiIgCQSFL3AutwZ6iTzBJBETc68ZJJVhI9MyE4Nh7MiiQmGzZ1J6KsSGYi+HUIUiMKIaUOxfFullzNxxDCaE9HaFDX/DGjiMQEx3WNYhrkNag1RLDHUoBmdRw4EMXJIMyVdqKDk/Ew7za6sbnVlxjx+9b0mlHVjk0pmfSer3/uX5/GKTQ/UrsXHVCKkGSPCFI3rPaBScEMHkvQSlBjEqI639ikgKkAPJWRzIQa+BWQotT1e1349EHABrlQpT+uxuPfuGj/f/1qd76EFmnKtCpdtmcWW3Mak+xG2/IOBKpAC6AAioHSrFU+5bozbfTk68Dii32uUDQIvL5WNcVoQJPIAWqyFlwRBAJ75h8ABZDW/vQ58bnr6mFVq27JU5fErvDWCdpFIm8cIoBPGilFHJaT21ZAEpHGDwJgQiicLBe4OZQXVtnFuwgooAsyZMjKZFZgbDWsTS33XHvE595/v8ZFUWxMSKVUdSM4s31nVdPv3z85pvLQREGXjBmY/zS584+8Y2tbpmClCiMrTb442tNzbfecPAnP/hjg9XX+oPRV75x+smXLxDG44JaUdhft++9nz/4o83B6k6wtU693momkxPxYM1u9rrtA/FwnNks/tgTxae7IUjtgUiQc56AqyooZCTGoJ8+Xd3/1mYjyZ3fbCwmSRrJ+uS4H1eDQRLXJyZRmxBcdvKgT7RmBkYKjFEcQRAIYL1DKQWqIGtSCJuPBQpvWe6VylgASwRkcgICBSelZHJCICIjEgUHoJiElCIgexfA2lYTjx3Uh2dw5/zKCPTpoesszfb7eVWKyMhgLVoQhdu/lEC60FrkjZ6qteJse6t5y2QBjoIJrIcbGwn3wTNQVNpSmiqJhWDoxHFR8Svr/j/+5vP/9gfaNxzKy2wUReaBe287d5VOvzgq8qIRtoyx8yduX1l1N991w+nXH9XtWO4//PTXXvrE//iJldNfPnrgTa+cW/v6S2svvrb21BuZiFSWORG2hURbcWSkpIqcraVqfrH59vfcDFTqSOEA+7tBuejpx56f3z/ZSGuNeIoFtCYmJFiofK0VFZWWShmoOT8Rzy2N2WJWpFHqYCgblG9u+9CssoEtK1fkNVYCpRCxLS37ITBmthxWmDY6zAICafSR0tYJIiNUqpJGWRTejrIBxEZGUVyvmbzgwWC0sbYSaT212I4iImVLmnnyifDZL68emZk5PpXUGmEMoyMnOoTO0Ph97z9MOnrjGztrl0fPvNJ/+cLQVn5uqjWp8kuvbrfbjXEpyGe1lgmV8Pb68QAAq7JSShGQkoKCIwJrSwJBwI5IIiiUgQIzErGUKjaG2NvKKilCKAUI60oGYA5CSGM0QTBKAyCi0kqXZYkISEEq6ZwlJhDChiAwCAQA8iHU41gKIVAIlI5C8F4rIaVy1iplamnqrA2BfRBVWXod9vpyCOCJAMFojYxSUmTYOY8YAjEKBIFSyxAouAoAiIMQKIUm73zwUghABkLvfXBeamWDb6hUoERkRGBiqVQSp0qgs5VGsJUF62tpJKUUAqwLISAHzxSEBEZkhtJaLYQQghFQgpJKSi2EsKEKzkdGiT26EzIzB2YjFQpROVsUpeWAyIIYnRyPhsoY9oEpKAFSBGt7EkEKISWWVZGYtJmmsUp2BrtFWXriVq1mhPSBfCAMVLGtylIpgUJXNgBwXhQgwBNpqZgYpFRaXL9FATBSCIKqdM5aAai1QkCtokgKBFRaAgtHziiphbjOtfLsicgGKWQURUor561AwUyBKDBRCMETSgwey6pMk1qtlighNA40QCRlrzfcM1KoOIpjoxDjJEKtEFGhGA7Gla3yAgha01OdbDgGECixKp2vOIoNIxTjkgV4T1ol09Mph1Bm2dCMuttjQLk9GjfTWiNtCoQ4Unk2FhAEiCSWe5ZfAFis6cHypROL7SE2Xz69G2khYgEQEUnyXgRyWWGUKMq8KkshYKrd7mXjsbOx0T73VIWrV4qtvmhzWeU7InWH751hFbbHo+bUlJadZhCRxmTy2ExslBpxxGXTjohefOzi058726bk5JHFq6+e07FfrKmf+fkP3njvfaXzm2vVwsJkq9X4+R/9sY98/CuzSzWZaAgCNFSWkzjaO+C4ihA5irUXYbIlFidq737HjaEYr28OPvb3T5+7kpGMQXIxopnYHjjScSLvduXubqU68flr2zsjq4OLjGs2zYl9Sy+fH84vzq2ub437w2Hh82E1t7V5190383j04rmrz59fm1qaf9O3PXT6+TduOTWXQXjiannb237mxRd/2xjdjE2roesiwrpozLcP3tK++9vu+Jf//sNFQYGlJQ+MlQsOwEBIUoWevM/JgawpQSGOJAgOVCVpLJQKPiT1yYtX12anokaTqzL3CHHCROQDSY0hgBAopfACqipIBkcU1aK1LPz0v/tfN91Ue9v77hK1tS+9vmGZrSdZcU3xfDv77h9JTh2e+vNfvfriufLyxrX7H1h68P5D8Gz3lZWinmgQuLbV92UZJ2QM5K68+U1v/9SXPjbfSqqKbVXl0YYaX/u1//rTf/U3j/7aH7yKKhYo8sx66+p1gyS/8czr2piiyBWJYwfn/sXPvb077OZOtZI0ZuPteHGhiSiuXCn6G35+4dCHP3vlsRdLhGpnJVP16ShRMhEXL/fm99UndGvrtP3iRff4k+7E51+6aeJ2szKcXfD7vjUqG5mrCIfOQxYlM1xWvpKETa1lKFdYGyHr5IdYIWuLiRKkuutmZmqih77MKyFFmurA6DO7twqOntx34Y0rS7MnQ1VdO30JoJzYv/Td/+wn/XD9f//tLw+6O1vdXndnu7u+W/lMat2YbPV2XZyYw4ePhbJnDd9+36kqH6bbUb/fG65fu/b65VhWzUUdN3S2csVPvtzd6d/14PHzX3/qiRf+4fi9Rxq1xsq1/lR78cLymWbTKI0UVDYqiIA8XzflkSgrW9hCG2lUXLlKalUXcWktBUiTeLfbT5PY25FBmVcVKwBGEJDn/UZUz7NdXw0Fl6HySiqqRsqHNFUP3nIsz20kRvvnW1mWd7dGEKmkHstACGJUVs1aFAR0x66sAjHGQqOgEIj38L8MCGp5LT98BKgK9TS2tsqHLAIAgh5tRgawgVLHzgYl6twrR8PRvTff9ZnPfrTXta4MhQtLS7PLV9ZGpY0j8fQzjwpnVa12w113XHzjuVMPfuvpa099+x082PhiZ64wqkZuA902mNduuys37YkP//67/49f+tzfPnL1Hfeeuv3GUx/76MN9S6FCKYyqidm5hrfWlV5oXY18c6beH3bXr3UxwqqwrqEMh8XZmoySy5ubbjSem9cyDKW383NTr7zy7LnVK9+4duHvvvDRoxH8xPe8c6rWzAf22ImjF85dRWXabX7w5oWzG9mFlXy5Wx07sXC1nw/W+81IKZdTYCXMhdeu1Zvt40dPDodVc7J57dK6Nuap586+eqH/zve8Y+PalZe+9vmLy+uefL3fuPuhtzbb6V/8r7+RujaZRr3tLBVY5LaVxkzFztX173z/gy+dXt/oFv1Bt92I/vTP/+hDv/uX/tqGTHDz0tnVM+f7G041Bo89/PC8qZ+9dmXfyeYrT3xtJgYTNd/+/u+Znr/l3PnlkwtLENvxePeF584tL69OLEy/dnZ5UKCJQHpK0utA36XZdtbrEwU0Kssds6vVIuP5ma9/8aVnv/LjP/FT//x7Pmi7l//or36/3WrubG9XXEGk0AUIJJRiBm2AKRDZUb+ggFLrvaF2xKHD/qGHZm67o3PzPa1aO3LbO4ONTTty1CYAactAWdjdyId9cf4ydgeIEQWivVs0pRGYvffMLBgAQak9lIxARCKSCEZr6xyACB6kBCkkCCbwe6ZvKYQkiIEnE3tsiR54aGJp/2SwYWVjkFlRb+//1OcvPf9aT2KktPLeM4Mjp4UMwI79gQMHd3rPaW2YgAHjOM2yjBiJhI60QF3ZUZkXwUat6WbAuDUxv9UbSh4GgeyYnJdS7E0MhBTOlfWpzle+/tX5mcmiqKhySkShYkWGJKEECgJ8QCm8tUCgpZCRRgDvSEjkwEIIYAJmYEIUTE4g+hD2zsFwfZGilEIAE3OsYuctA5hY7eEViIJSwMDelUQsUSRaWe/3ChgsgIjHo0wqUWTkrP7GsKpsLtgeOpjecbJ2z4la07jJThoSl43HQnO7UXNUz8fluLC2qnZ2y4JHy5uDdr1Tb7RnJlv1SNx249xGv+oWSgg1PdNxFiwFLlyWDaSuA7HWCqwUGClUsXLjPJuoGVuWZeDe2OYrW76qtkfjESmjlTEKyIbgUWKnFicuuMqN8lx00larxuyD9f2drtFqqtbIi1HZ7VdcRqlK0xqEfFSOm2F6MPZrO8OGri20G9eDFN4r1uR8CFk9rk1YvjEa3jLj7nvgsNzfkTVlYc8lpSUzVjmgdC5QfSqJ6n/w65/+xOd6hU0tBwZCAUYKBN7jfrJgBBYC01iXRTG32Elq0yv99PZ3fltnfvHI4uJLz7xw5fSn5XWaMrEgKTGqvPFgCKT3OCo08nRKSQzW6wpUVpHvOylwbrrRGldvbOx2ZuqTU3WwlS0qL6whMRXrXsl26Jll5IWKDYIFDBjt/doQGECgBuAQhGAj0eB1DzgyacUCmJGkojiWWgkkts5LFWclZCMXPIbKCmIjIPJes28YUAKOteHuuxoBRmkLxjlMz5EC3N1l7x0gNGvsHChZJXUws3DolNBGoCBbgAfIC0DF0kCiQaAIAIqlUMAQKK0qUVEADWrYjbZIXdsBE+nByPTHfPrSzouX0IMi8ASOBDCSZ2YBAvye/o0FgwCUKKQI4ImZWFIAI0RThenYL0z4/ZMY6XLfwpQd5rWJZP/+2qGDY6MFovZVN27y+76/MT1T/dVH+hd3UqPFe98x11jaYWEhqLgeobSEFjSQc8yCWLGoM45PnTCXezGECENNgAssSXgAZlWDEAXrhXDeog3oQe6JtoSUTKIs6o+/BAjoy5JISiHivRFXFBGiUkjeByCTGgbyzgklr0dFiQj3KoV+FEii3ts4AwMKRAAFSgdJCaiJmX/3X/4/v/9Lf/D/jIryfhYlza2Nwb6lffP7TsH2aqOl7NiStfWpmf/+u4+duWADNRkUA4fSKiFckP3cP/mN1Qtn/4SzfKcYcmJAgk7ykHXfftfiT713//zC5ujqMgVtvW1PJ0as6xiSJh1fkM05Ho/7MwvNe9565Nkfe3K5kL6Uwe8pMJmIBCqp2Ds/1rNHbr+nU3x9TG7oB7ppo2a9GLvtzbw96dOOGg1GVIQbb9/v/nRNG8UoyBMBhqpCrRB0JCddGPqyAGmQmCEgChXFTA6Y2NtgCUUAoQAJUQAKFIGRUIACLWXsCEfDwaHJ/D1vi//NT95Xb2euHDVqdT9eWt3Ylc3Oq69s/PnDu489rwUqDuQDFcHd9s6TYso7v7p46iicyzZfXdMzm53jt5u0M1y+Shrf+z23/fe/eWFjJZeNSEmEEIb9IjIROQKpX1xed+03N+bXVLSeF3aQ+RdfGV94fXT3yXY23Jo43ixccebq1QFdPXy0+eC3nnr4dXHxb/07P/g/v+XUzM5nHn3k2WVHxoCwJCLyPrNSCc9QBRRCVFQtzibv+pZTnZm40cDKRlGcNhs6WDNa6+1uZm29WLgdyU5SHkmrNZclXLi8srgwPehl6fzS/NSRSqaRKMfLz+hU9HZ7gcfEtlaDTjS1urqqzVQtnmPKPLvF+cmt3Ut56QOUOpoUqNuTCWfDXrZbUjQxfTjrrfV2dkb5YP90C3ymMOxu9rLSGojT5nQQLQ26WTe7vd0sqwzA/bfNXzy9fWXF/86fPvvrv3j78SO10y+fH6yxjJq7u/Xnn92dnW0kkb7p5tm3vPWmR75y4Rsv9le2fX8rSBvVAEyztrKxo31kEo4a0fVBkUAlNAqgAIzoiQM5BlASPQrvvSdwbIE5NolgIGIJAkEmJhGIILRzlkOw3hptgDFQQEAgIQQ673iPI8fsnA1eSCWSKBmXuQQQICtfEgcjjQ+BQlBSa63JAjNJAKNQy1gIaZ2Po6goKmQhmG1VxVEcaSOFJPIolEZFCGlsgDN2XiBKHREBMLjABIzMQiIGJh/8njqEAZi9t85ZqXQUSZDCiFSgtLY0yhhjGNlaL5GlkFHNxMZYD1qbSEslBLAIxI7QhwCCESAvChQCpXJE7C0zoUQOHIJjkEoppaVAZW0llVaAZVWgkIF571VGIChEFOiDl8xxbJgRtdrLV0kBjqwL3jMaZdI0kQwSvRC202zKEVxXdwIQsK9KD6gjoyMTAmttKlcNs7GWKjWRMlIIYWTNey8lBuLADARA5IlKR0pLCs4QpVGMAEJKDh4CXv89WFKCKJDWKopSX9k9HFDlQuWD0oYZpNDeVwJEPZa1SJGUUeSbNYNKAUsKNDfZadSSOK3pOFFAVelJSA6ktajV6iAEBFZIVikWbIO/uLya5/m+hbk4NlWRQ0BBpME5WwmyLA0yxyZxleeACtJahDbhQsh2oxELsbq9NUdN1akrBXlR1Rp1Jiq/KUimSh6YnCt0dXZND1f6WTGOQQoUrnKqZtJOlMbx7lqvqiwwBhYWgBxpI6JUZ1kGDZo6FW/l3YUjs9Jw0oyiTiwjt7q1temqhVY7rdr7j93ZH57FfKCnk9F4uLNebGar9qyU45hd0KF//Fjjpre87wd/9heGG5tFEWwpbr39CIbq7cfvvLC6OzPTIKPGpQcZmrX6IM93NocBwBdOx8jS7p/vfNs7Tu6bjSZ09uLXr7z+yvZ6l/uVEljTDAo4YsEs2HCjKbvbW3ceP9QdYzrZGG697MZeuTJ0w7XNc9PtfduXLsncRiE4YoDay69vDze+cccNR4WKnrhwofvcy/vbeM+9dyck15e3JjpHg59aP/PK7bfMClsdOjC7sp21Ds03pmYvvrEuo/juI4uPnlseWi6tRyOJcTzO67VYGLDkOq3Gd/34D/36f/vNeiNigEhHZVlW1jZMzBSuXLt8z5ve9sYrrxsIeQh7aLXKkhKoI83OFt7qNi41J5T3EdPs1PT51d12PT40M3X+dH9w7exmb+TGlOWWPWDpxzz68e9KJkR/0pfTU/LrF+Tpy9mbvqN+6tD0re+641//wucuLY+AoF2r3XXfvVGiH3/0q1FTD689t3+yUeSlJGy15Pzi8A8+8Uuwc/Hyq9+YrNW0kSOnmEm2661WW4rat779wUcf+1uUUivRbDeEtxHi9vpuu+6VCJXgy+evCmRXqSoLW+srcdIajRtlNhaiAzAx1WqsltdaTZ3KulC00y1ajXZ/M391Z9xvXpHD/rseqqBc5kaNxCRHWke5hBA0h0qjJgnPunwoNCpjJVg37InECGxoNf3Hf7iaFdOonbdcWC8FAVwPEQDA5k6vu3711C23DLZ2tXX7989Xrnd6+cL+fZ0j+w+g9bvtwfGbD5larX9txFxUedHrD6FVixtRWY0vPH/16C03nrj1WDo5o4S6cu71zpF5LOy5F5+vyuFskqzys1l7XrdvCegfeNP9V1Zef/XxZx54y7dc2RwkJkVBVWWZMNFGa+0QTay0qdmqYOY4alpfESOwdM5HOnJFYBYjztKaFpJtVRBqNCYEhICRUVolQghb9jsdU+yOcwsoxOzifi10t7tJJEbZSK5vKon99a1YC5PG043o8oXLpj6hE7Pdy4TAVIO1HIjz0gKxjnRlXUAQEhkwz1OX+51NMrqMtNbIvvSIkPVBBkgnhkktN+0GCiopNKen7UR4/dEz3/Vtb/2D3/mLffsmIy2bnebmi+cbc5MTE83+sFdWxcVzL9d1+vBnHxU2/Pi/eQuERzfPLDdnO0JGSStGLAjyJM6L0XO/+5/fsdj6h3e99dCpo3Pf87YPNBrtj3zitb//9Ks5JrZkbx0GMEpGk1G73uzu7np2ikWS6JVur/f4Uw/efao33tKxsSFUZcXrW52Z2sNfeGR1a9yamBz1x6Tl8jif2X+CeENpzsd5f3d3s8zW19aPHFxsprUWb2aOfNH3pZvQSmu5vNrPs+KB++4UhnXckRHPn9ofAuu0Ead+eX370m7vjz7y9y0A4TzGyjRrrdklkI3JqaljR/dduLw+O9GaMFrU1TMvLbeSNOkkB07MLl/cXKhFP/R93/7Y48+OHD//6uUP/vR3/MyP/OS3f9+3P7mSbK1U++cnKBXoumuDzUSH0Xh8+OYTuUWYOfjA+96zu7J1477GdE1ubY1Wr13ZKSqo1a6trdyyb+aKGOUMpZKdfdPw7DkAOHx4cqouz11e27FBJTobO1flGJhJB+v+5I/+qF6rv+veN996+M4b73gARPeRrz3y0uunpS6VjGzlo9iAZPIBhNZphMRpLS1LiwoXamFpOrzr/fumF2UUewHSe3BjuXK+156lqEHb692dzdJ6tboN27mvtRu9YU9I4a/D1Pdu/BGFCMDArFAAX8/OoBDXVfLMQoBgQGQSEEWGwDMAexIaE+EOTMlvf8fSXffWZhY4UkEEU1RNk6Rg05j3nz2zlXkTXGBipQwA2MpJKYxO7rnp3mdffLb03kipTTwcDjkEEykfSClNLEIIcS2SWnR3tr7vJ350cfHe//3hT65f2iIfolg7SwwslIjTJE3NcKc4MNuaqU+Myq3AJBVKLuM0HY3LSLFAowFFlFTOi8SoKAq2dM4hkECmwBRYyOvTBCm0lDJ4R3xd7A57xz+jAjEzWE9SGhSpNODtXpyaQnCBWSm5x7UEgSBRRqoeS6MjRBhmo+AJBaBGIOkCU0ApleZoeQ2vrQ0fe2pwoE23H1GHD8C+2TROReU9hnGjYeahnhfUaDSGYz/O4OLVa1V5daIRd9ppHMvm1PShubmsb8vdIejIW8/WI0M90YsH9rG1XjZH3Z4IlqkCqoYjmw/zMg+qJkZFSZYb6TQgt9Io0cJXpIVKYlMz2js3EcPWyO46v70zWJqdnp1vr1zb2XTjld2z0806q/qg3wcWdpQ7bwSVxpPKyxoSgk9rres3yFVQYI/MT+XU6CRyX9W/ezF56B03+qnJIo0DkwERAkWRxorBW/A9HbULz19/4uIXvl5WZZ0FB2tB7JFvcc9Njwh7fScKIcuJycUds13m+2+89Yd//Ef2LUQdo9rj8lc/+j+n5mSUmLx0M5Ot0XbvQEvWUqGcPXXAnDg4sTBda5oe2t1jN5+4cql/5XR/XIizF4vd4c6pTv0/ft+Dv/nx57eroKRpTNYubG1OtdKJVMt+6JdUDj05mFmYQC4QHQOxBGWMdd5SkIi8xz0wMpUYQlCJQpRSBg0AREYCMPkiCJKaYxEUZVYXIhWQpCaU42bMS9PSSDi8H5I6NBqizEettp6Z1ex9kVnh+cARIA/jMQDAbDupghUYQEBUo4kWMUGZgvdCMFNgQGAXiiKAhMoF1wdpoFEDqcARvHGevvwCX1jH3AKF4GxVWSIv0QhkIvLSSEAADB4JhQhMiAiEgBIRUKCnIJVCKVmxCaEN9qFj9l1vTU/ewrP7EMogqmEErUGpTNNwjHkQEbZJ+ArHYqK6773qhlOzH/8MrGzS8fsETwj2WogG2RGSY++QEdiANIA+VCUyT7e0d37UFdF0ohQSi2BHOgIUCUHCEIFwSiPn+fYmFIWSUlpPEqTUprJBSfAoPZBgCNbHOp2Z27fVXxfOKxEljYlRXngupVKIIqiADIBhT4+4V10JTMwgJQqhCIACoZBe4ArJD/32n9hi8I8KaJ4okWmz1RoPLfuQ6qi3fg05tJu13/7dL5w+rwiiyU7c2828Y2Am5hAgNkYLM9pDhojIAD5w81LRW3/ze+89NKPPX+k/cpYPNKdqcbEwk7CTjTgaDIvAMt+F/lYFQlH/ks+u/ev3L/7e362+ukLICggZCJlAyMhojMyjy3Tg7R/9rX924z/9kf3Ye9VV2+X2Nppo7mTHSm62TL1dk7p+9suFqTUVMCFB8CEEqQQBMbtAfQQnFYdQMAZEBIEAhHsNZGTAoJQEZGKm4EEIAADvhZRKqFCJZpT9/v+1+L3fm1AyENWZwC6em2ZLOCHCDMUT3aWbzdu/5wO3P/TJwVBIVDJKEEOR+p73flT6/oZ/HXV7afae2cGwrMdNrsHSDSd+46df2FjJZ2abo9KNd/pZsLU0DiGgUSpNI9X493/66B//wkNi49zCIYxmwsS+8qAbvu/HjgzYbl3bNFWqYAzV3OkL5rPPb/3dqyMjW5XlP39iM01HspakJHr9sVRoyz3uknAhRAiduqrP1G85MTM/mzgKzlHeK3SrFjXrOcdmuhaN84HnZnveO7//+ELlZa/bjRTGUaM3CCaZn164OYm0W1sDHEx19GCwntaagO1c7JD1g+FKu50qnwxHVb0xYfPRcJxv7C53oomJJJmaWqrsuAyjoEup46aO6w1WoPIRJobr02kxDsMyg1oUmdQXIuBUbWqyzrYYbSsVm4QzW25f2z554+zuuNvfcf/wXG9uKTp6y/y4GpBTk50bVrbWM1lHKXtFZsP2kWPxzMzBjz6ysitFYDEe7GjRw0h6CfVapKLrOkAmlgopBKJQec9Aes9kAcA+SBAsIDChQEtBACil/N4wCGEvgSalJAFGGCGED54BpdKeyFaWMDgfBKDRZu+cJ5UGxFib4IMLNooiTayUMkp55wAIUWmpXFWUpc+LkKR1JaUQMjJRAPCUA4GSWilNKIGIAksIAYGIPLB11nsnpdy7biPrrauCD5HW1jktlfeeBTBK52waxUJIIoqFMlpruWd/BCEkIjrvYK/lxKyVajbqUkjj2HnHjCGgdy4QeaAQPAfyKJjRVRaQBXDlbJIkIKCqqopYCKmN1lFEwCwlowjEgDIERxAoeBaMRmBgIYSOIHgvhBQobGW9d4EZAxKAUrHUAgACMzIUpRUspVSJFAwcyhwB4igOXhshHVFSS4ihLCsU2G7UFWOiNSqQSgoAK6V3HkJQEqI0khLzylHunPNSS210FawPzNbJPXR3IBDCOi8Q2AcdG6y8YJQoAckVXiqlLBB5JJICjVEmjhJd6427EtEYg1IKIQHRaFluVgIxVgaYa52aik3wQUqMIxPFMQZm8u1Os6yqLMv742F/XJidLlGYaNWFglCFne3+cNhjIbKsaDUbjUYTHGkttZDNRnN2qnl2axNJSyIpI+v92lbfk0Mhi9w16qlU15v5N5w4EnE8OLd+YuqBhwePqNg5zxACFvTe937gs499ZHam05pt2KH3BM6H0nkQQjL0t7qNxCcTeWtKHDkUH71NJxU4C7s7m6zGrcl2vj2Q0XDfgaWyeGNt7fm4tcvFxNrLly6tX7z13fdtrOXD/rmFyWTzyvav/PFvmskbV5cr0RfNqfbS8bkv/tnv/Yt/85/STm32UFMn0frGUMQmL/xwa+gyT5ZNQ7PzBw903vP2m04enB5sbz/zpee7w2p5rfQQdSsXBEofFFJixP7jLV3zK72yHPumjqqxraq4PTl57qoZDnYnAhujgdTl1d3drRGVfqLVqKfaxPFwO7vWz5JL6ydPHW9MNpYvn/n6F5/eWSv2HztcOnPjHXfk3Y3xeCeOFsrMrq0Xg6CXz3aP9qflWG69ce7YpGnetfTUuZ1rXRqXjhDiusmdtwEhUb4abeRrSTMCZqG10loFCt4XYxc8Lc0sLaSLL1bP1+v1wnrBCMxaSGs9YpAM6Iuf+8Db7rnhzk9+/ksXV3uZK+NmvNHPN7Z6E3PiZ3/ovb/7Ow8XdmwiLVWASB06Vf+hf3n75ulzTz6FT7y0ucnxqMD/+VeX3nt3tnhoFCd+Zr65sz503n7t6Sfe8+4f/N5/8sEvfvlvxzvdca8wUk1J2N2+8lu/+lDin/e9K6rMW7LW29yuKt+ZTrobvHHx0kyztjir6wYCg3W0tbxlgovi6MT+uWvXVuqNpkes1xohVFVFjVocC3ZMWT6qNQzYvPJrZy5560KtVfe+coWrNeOdtV0g9A2zPKpuP9L+4H+9NYNHQ2Clk9A2AIWHVbD7BY9lWJH2SaVB6CNYlghDFStUzu2Odi7WvvKY0JOxrOPYh4OLzWZDnTu7kUQK1gEAmNVNt96mO9PHbzlWrFy7duayMrLcHm3a0YVzV01LTs42/tuv/Lu3veV9D9z/1ubMVFWMTDMpusP1q1uBHIBZvbxqXailiS9cmbmDdx4XAbfXzrfi9kPvefDs8y/Z/vDpz33MdrdYkNDT9alszBUrbxIVqNLaZOOqHkeVs1EUMXFZlgjkyQtErYxSaVkNAAGkNEksEG3Io1p9MB606u0izyMdC1SDcsSCTJyMRwNvC6ONhzxpJByE5YaM2sIm81NxZ9/sytmrqGRjalLq6JW13bfWpva3p1aGYxeiklVbyokauFAWjkMgoQVAiJXwngBIA57ZrB4KAhWNKhBliEqs1aQrgzbtMh8NRzaJy8VTMYtxMawG3X1PPH0xak6+dnbz/ve9bevSxY2N3cpvpIksynxubq7wpQEGL7tZeeD2b7169YVXX786N1W0j+1njuygW6x0m60mBtM9f7YxJXVt/b/8505x8fPdNX/DW48WA/Pzv/Ku/enoT/9hfbXUJukohu5uf3amKSAcPbhUVlVVlhJxVHG9o6+sr7WbrUYrTmutKDajQV51h72hBVQLk5PdonLB5Siff3390Kyfnu186KOPjYvh0f0L8eRU0p6sfP+Wm48OhyOQoVFTr76xLlDWFg+YGE+ff3l6ZmmyMUFOeht6g9zn1jg7PdHY6ncTrZqJPnH44Asvnrn39pueeG410pvFypV2Uk+06veLrD8+1Jq94cjBqNG8dnWrtz2aaDf7mzuf+tjHb7/rrptuu+eeIxcfe+pL/+P3f+NDn/nwAdm+9eSBzdVBnI5f3bz8wz/ww+tXttfXNo7fefMtt9/6Zx/5ynNffHx/XXYOxuVgZ/nS5aRZu7RdrfcG33rbMfv6Wh2h04os41e+9vLes+D2e46Fyt6cnXr6pYtvnLlSemszC8DeOmMMW98txx9++O/qjfRrL35t/6HD3/29//pd31X+5R/9ajHend3frDWnNq+tOzcuKudIgLXs85qRR/ZNHJuo9i+5VAz92EnTCKxFq6YmFt642Fsoaq06r2zguW3aGsCwjJezIneFkZKIBFBwHpUIPgiUUhlA3HtLIQrXkT3MgRgAXAgCgCggsVRCSKaAUhitfL1J998avesdC7edjIwZBOepQmQjQmWHG0aYt9639PF/MM+dcSBMQIyjOE3bu7ubgdlV+T989eFGWssyGxwzOyGwCl4BSqkCsVTS2ioQa5EYiGNoTLQmxsOMMUijmQEFGU2tRiezla84Slr/8IlPG4hTu37vbfz+7z++dEDsX5rduNK/enn9uadWz13sbm3HFbdAT2YlVwQ6kmU2knLPoQaIKKREImCmECjQ9Vy6NNY5gdexw4gYgBB96QYMrIQi5j39BTMEf13poqQQgEVhQbDwxBT2qlPEzJalEEbJwlmJUu6J4UwysPL5FffcyrCRFN96//S772vPdqKytJXfBgAinF/sTFYKub5/odbbtaOsHGXl+dWi+9q2alyZ7bQ7cVxLG5ExSbMmdA0oEGEat/KyUJK3en1X2XHWR63jWqOgYmCtaXbiRC1NT7AIUA69DK25uSovXOU4iHZ75uSJmcbUvo98/pGt3u4YO+Rce24KxoN+3st0XnAwjdgHa1mYpGlMvcpDVo3iSAQf+t3rlfyFZnMyTXTBBNF4FM4Nd376J9499DtJnABJFSyDNUKBLSkoERmWCmXjxWfWf+PXHrmywwHRVpUSwIiBiBj2IFJCK2JGFFKI4L3SeqLRShN59aUnfvGffvud999x593v2lgd3/fW+y+d/0Y5yhX7Wp7fe6zzvvuaS0eTV5987sQd8oEfvc1zTNuh99qGV1cP3qKO31NLOrUwMGdf2Np8Y+fI1JnvvXv6Nx/eLJVenGgcWZh3Zb693VVRtG+y1mlNv3Jhee3idqtuluZarhrrBIuyaLbiClRVkNYxAAVryRCSL7JMCwEBBCmuwPsqiVAGACtG4wEGGyk1qQSEcrIJC/NmYla36jC/L4rUuPQgIgoCBsGlGbQTEAlohTJSWc+RB2thwCQkT85FJqqEhoCAEjoHTCAMYy4G1jlAhaLGIYP+ALIhSGPOLcvdLHn40cHpS8IHAST3nM3EIJViAPYOGIRAIgxEIIUUKAQSi0AkpEAEYgrMgJKCMFLU3Oj+Q+WPftfB6frlmcWg2sCe4pryUIqGqwETWzCMUeoCIUuNDfAZ4Hj6SPE939uwNmktDAiAORbCACpALXXMjMGRlCxRcEAOEAmMJfRzmGITQk+oQkUJkcCsBNICI3bOMwfArS09LlJvFAIprfd0c8475iAQQCCjoBDdeOqhja9+hrmgQLMTR6ryShb6aCQH0FHsbbEXldmbtGsEEOhCIEJiBuY9gomXQk7WE0Mi6H80KurtjianNQMMdneiRE8lM5vXLhX5YNvG5655UCmE0B3nupa2azVrc1tlmnBxYT5KmlfWN5zUkOi0Ha+WbmdgO6v5J7+6vrkTPKa/9oOdI3dSa4LXVrMsG7EJ9Ym48rX+jmSMsiwKPtxyh/39u+Z2i/a//tXXz16NUhIOhHWMYAOQ5JDK6V/84yu/9qHXf+1njn//D9+5s3V5WAYfiu0rFxY5yVFAcuJP/urrrmo4ciCBUSitGBhcAHDWBiml0EghCCUQiX0gx8CMKFAAavZ77UwQKBQCSEEC1V4ET2H+punxB955UMgeuRTTA2C4zMeUO6l82mxuLq9OTk/PLh4LQZFEIFYqVNn4S5995ef+5fcFWn/hjdGLXxq+/lr4SdvCqDh/vvfi5W48YbPs26H82nT9HkcXaaJ7333ve/qLf+rJITONSgJZdof/6fe+/Le/8u1udPrVR5684dSBA7F+6pGv7bshSUjY7ezO/Z1jx+7/f/3lq49f3dwFkcqwu1W02vVWLfnutz3w5596RCQCCJJGmkjMcysVztTN/v3tdjuanZvY3s3juq6lembGVLmviiKrRlPtmdvuPLV89QVpWuNhFkWpDwZlPQte1BdmZ2bBhXJ3y/tRTaCj4vyly7VWPBoX9VgHN5HnlY47QgaEqtOuxc3m7NHDa5uXTtz4drBN4X2rXltePtNq15ozneFoEKuQbZ9XWraT2JZhd9tF7QZlBYeU1Myxm25qzc5dPP/s7tayZzc331FC62X2FSQ1VWTiscfzL3/x4v2nJt/+dtUfXVQK6kmlhQcrQGgj0nExCk7WW+HIgea59c21vjp8JK6kQ2MCwbhfyvx6M18pBQBKSXLsggvECGykKmyppU61qrwTe9sMMzMwgxCIIAO5yjmlFApEYCVRSCGkDh598EQACMQsUGilK2+1juKkpqUkComSZVFay9/0pQYOQqIQKIjIliURDfOcETLLE622MjzOxgEgUFBSShNFWjMFCgQQGJjYO2sr6/cYSQGCc1UkY22Es4TM5AMjegpKKkRBFJI4FYCAEJtIIDCF0johldYahWQO5DkyETPVanVjDLCkgEweGGxlQ2CiIKRgFCiF32vNhSCF1CpiplgqAKZARsogAYXSkVZGAqOKjBKyyPNIRpKkc04ACokoWFiobIUIUgqGYENA7ZNI74kSANAxSZDflHiiNkoaIYJPI1F6coGlUsyspCjdXu0rOBeMlEUVkjgSghHAVl4TRkYKFlpglEQIrADIVU2gyMAYIPNEiveiYRRYCAkCiQIyITIKCQqKsjTMsYmYvRZaG8N72jzvkBkIS+C8snGkI2kQiQEq5wnIeZ8XZZFX3jqJHMcmSWvO+XozYQKBQgsMIQghUGJkatMT7bLqlFXlghdSDrOyKO1onO0OsuE49wGKsmoNwvQEH9s/1eqk47zMCouOU1Mf7nSds/U4KqzNLTmyUqJPkiRSE1PtvVWwdbZ38uTRiXLy0jOlH8h6Ld3dHTXTqMrE//0ffvlqduXMa6ePHJ6/urHVbKdUFP3toTJE0g3H46MnZ/75rz945tLZqsiL9T4ZEiaGYCXRTKu5urXu3ErSSi6efeyN8+uNyTiIfhh7KOv2fO66pc/olne89Rd//5cvL19NRa3dMa2p2eHFtbfe+86LKysz821I9aiy5Iq0Ftfaid3oRQbSZswDN7+/fXL/5JvefuKRzz7x+ldfWF/Lhk5lXuZgqhDIB51wM4GZqZQjdNKSzyIZaSUmO/PtyQeGvfK1F15pJ/Ou3j4yt7SysXupu+uoO9FpdCKJpS9cVXhrRaWNObO8ubnTnZ1rvOXb3uU+9+ja8vYoD9Hc/g+8+12/8Qv/amJSV7nPHA93yqmZfUcPLNRt8cTpV6ePLA26xfaV3mwCPlYrA1s40EYRIRLqtu5XxRcfe5acE0IDcwFVVVW1NJ2e61xc3TaL9952zzs/+fjfI9ZbrfpokAuBQgJGigFAYVrvfPqRpz/zyW8MC1GRqkJwwEJJF/xwm1977pXFg1OnuwNFUFqIMVnfFD/8wcdmktruBgx7CdQYhTq/4T78lSv/13+5zbQvLZ/eMmAQ2ajoH/7hI64o9h3qlJKnjkxneel546tf+D1FH0nsWljPb5qcvvld8ta331Bvu53lqzOHbn78secXFppf+Oonn1mHydbSdHsuqTU3N9Zmp5JaFGpHOrV668Wzl97x1vdeWH6j8uLKZrdTS1fWdjyEtZWdf/r973/siRfHfjQcWPZlpck5z8AcMACNR2MS/NrF3pnXa4dvqyvpbXZVQSQ02SpTOrFUE+W5ahVqB9FCm2UGIOVE3fO2bMp6ZeZn7PnhCFiNcp8UBbMChOGw3FsFD77lzedeOvfiaxfeduBuNzynGvLQDTfVzl679OprRZHHHVWv17r9tV/5kz+HP/nz+aY+sf/AXQ88dMstdx65/S6BzmY9HaU7uzsm1DYurMftxtrFtdFwOLU4c+7Mma8/9ryoApTOltX84dsUYgi7c4sLm6srpVMuoNICAymJjnwIjoOJVJQVhTLa+WAEenKW+0axEspzgRqMju0YRuOMA2RZCR7iWDjvtRT1JB7lQ2UUcgQBERVTaLUWxr5OkpPOpEKtyM1MTAyzAUbgmG++4W6ssve+/81/+Nf/O9LCCFevmWYnDsxbYzvKgkABKFxwKAUCBQgfezT84LsnpOiyhNLKYY92upCauNUmB4F1tLkL1RvlzLEI1FxF09eWd+an+YmnXjpw69G8N5SOs1EFQqYNvb2yjbFsNFpayI3xpt0+E1YvbYzmb77jRO/K1+uTLBILQCAqgawiJEYIfRqVSb3TggxK53zXb3/h236kc/xNb33/P/8ExbNLnclpic6W/apLNp+saew0hJG7o0IoXN0ZSxnV8yRg2S3yc1c3yedpsz7uZacvLRcUnGfr6dz65Qff8ra/+/Cnu70hWQs6loCbm+OQBwpVWfmphfrivqlxiSptjwY+rXXycaqjycn9R9kXGxu9qgq+Epsr65vbI2ddQwNrORgMjh07PjF30zBc7RyZPX6w8eRXn3jT225/6onXx5W1IK+t9t773gfvf/Do889/7vLFMw8+ePOnP/mNbu8bZ86cve/uO/7Vf/rZX/6F/7a9vOnMttJ48tRtJw7Pmzde7rm8OdO8/8FbP/6ZJzbWrs0YNKPd7d2M+3LQsztbo4tvVG9c3E2adHVQLDYju9tNIo5riVqchO4YAKJmUoxobmL2obh59x03fvnxF1585hx5cjZURS4FoxRSy6wIIuCZ06/+xq//87oUAqEeKy6KOPLvfut3rqxfubhxZXv9Ggp0pdVCj0b9LOHRLp977trhGyZGVy9Oz7XTWnT67M6Lp/Ot7cmbjh989crK8ohXVnNQSAjoXWyMDU4IDCEwsURFCIgCBUohkAFQCQSBSOCASAgwUhABSqEUKCO1NEaCFO7otHvXt82/437ZqmUau8ykVU3ENfZSWHIWBWBSG99/T+OlC/288oxonTsweaTX291DzKwsr+tYI6ED6NRr1jrnKwAIIejUzMwe3N694suqIlacPPy5r82/uDzevhhC5V0QRCrhialj7/+WH3740b/Nt1bSZjOE4h0PLH3re244eU81sQhAI7L59HR66o4b3vNP7veBesuDx77w4nOv9J97zW5laEvhvBAStZZMtPcuapQJgVCid957TwTsSQiJAglBaiGFULzHJRAIApjIeyElMwEGBEQhEVFJ411FRByYrJcSAVFopYGMjgQIH5zWyrq9oBJS8ABBCGClna9/4rHe15+/9PZbZh64aXZxX82FTNYh0C4K6W2/09QzEzWC1mAsn3rxSt/TYJxl3t55eLbeTIXisqoS0/Gu7K+txzqSwRWBtallNpTSRHEjJ5lDNgoucmXcaELEzUhJUy+dl2ikkTaUJJRsLfY8hCJ/8O6b//7Tn3rhhTPt9vTC9OSJW06Yte1rm8vrW1cn622DmFWu02w10zS4gpzXkQzl2Me49yxom5rBSAp+5cwaoDzYab9yeXz3rYdFsBE5FYKlQup68KF0ToASIAa98LG/fHljmwIpT54F7H3JQu4NNEFKAQDeByUVCPRMtqKzp6/tX5wR1SgRWhrzd5/7zA9/9z977vTDKJWQMD89MdrYqTVaLh8fWaj3lpLb3nLTIAskVdRZmj6lVp7dTA8kVdz3SZomev+bp/ffmvZe3zixUP3SP1340hNXLnWvgW4waogTZwk5H8Bmp1nLtoqe553u9kwnmpttJlEsCwfBt03CFkwcCyE7kg5OK62qYliQlctXRlnGzZbuxCqJhRChNRfNTSU7vUEU077j8eSUWFvJx9Z2++BBHN8nbjyii6oiCYUFWzjFUKur8dBL4SYnoB7BcAjjcTUaQQhVLYKpRYgagBKqzAoFIMDmMCygtPW8wOUV/9rZ8Mx556zojoTLPVLbEQIERk/AgMAM4JmBGEBLZBBM0qPURDISEKxGpWCPBs8ukFSICDHyVLH+w9/ReNtba1OLa2ktZgcUokBoK5S1KS8zAXmkNBGxGythAiYSPAcKnBa+VNrpyCvhGFLPktiD0MwEFaLUEhVX3jGxB+99GXxu1ZWt6igbgD2SdcdWyByz8zIlIpdndqfHjz8VFNYtW60SqVUI1gdCZpTCh2BQEEBRjTLnJ2YO9DcvSgNJo1Fv1ntdBhRxLOM0zQYuoPQUEDxaBoHSKJAoJXofWKISGoISUh+YnltqtHSgfzQqmp6ZGA1GwOyrsYljFKQSatfMP3x6vTtIZMz1JG5PtE3cbLUmIqWurVwa5XZzd3c8XqXAIBV52LH55gaVHq58+epEpyFV+/9H1X//23pV593wGGOWu6y+dt/79KKjIyGhDkIgmo0B44YrLuDuOHFsJ3Hy+InjOL25JbjFFXdjg03HFCOBBBIqqB/pFJ2+e1l93W3OOcbzw1be95P/YO8fxpr3HPO6vt/JqBpP9urJANnOddocGPXIppTlpWiLCuodlS6afrEapkI9+/oT6eXL7CsBwCSOvK9AMUpVcKm0GlL7h//3lV/6m/P/8lvm73v93PxcmGxdaDZPGr7x6Bv/ppYuG+2QqKg8EriqQgDhgEQCIYj4fIxIwPvwECENoFACIwIAhuCUoCLDAYKvQPsQIEkirfCuQ+pDf/Zt13YfktU0mKX5dk3PuZIqaRs/7dOw3ohff/XZ+Cff+4EqrysuXOnqtQRrtSfX4O8+e/rCExd+56+u/cvv/573/Mg73/PD7/3QA5/tzC7+t3/37nq7nIYL3/5Dv9qMps/97X8uR+MzL+1Ze1LBqiIuy0qjINYurbv3/tzH3vq6I2+495t3853dzc3lQ7Onb7/zw3/4iDKz515aP/8Xzz9+ueiVTowI+GZLqYT2JqO/+PRng/DiTH0yKl3hiHmlmx470WklBABbO+PKhXvuunFjsJ2mVE1yq2Jy3DJRsbfuVJVG1uVZZ7YTuMinLqm3o7jTnj1SZVkEBVSTvBhnIjo2zZmVqmSQdHeUAdcoas7OLYPfiXU601zYGOSTaqeWxuIpJF3r81F2fWF52fs8C1L4HKG+cGDFs1tanN/emuyEyIVDJ4/dfeTIjXs759cvP3Tl6pcQsRjvYJQMBh5CJki2YWLwb3rtoZev7a6u8ye/snr/m+7UnFT5tKjjN37n6889s1NrHLFUXLx0oT8dufHkxIp9ObTObQxedXNz4ktmsarBlUJ4ZRi8Dza2BIDOayLSSlgYIU5TX7nAzMwsbCFiEBH2vmRUWkUioJUhZbzLlCITxc75wMFzKMpSkVGKrNKhciTcSupIaBWWZSYCuXMMgqS8D0QQAjsulCLF4HzlnEfCNE5G2XRrdwNCaLYbBABIsY0EwVVOgXhfIQQimeSFQpLAzjEaE3xQSFZZTZqBldpHZiMSWqvEi/c+imKtjHclswCQ0oYIGTwQTbJxHCWR0VEcWWOAJTJKEShUeZ6XrlJGA0jppsbY0nskksBBRFsrjjRppTSHSgCsUUjKRPXxaMpBmmmqSOI4AqR9I1pZZJEmEoWCoFhrgwxKkfNuMh079kSYJDEBCCKCECpGVThG4Kqq0tiGwudVHhk7HmV54FB5H4LRmhDTJI2U1aDSOKqnaX8y8q7MiyqNIyLxwXHpEAlFkFArG4lnoSQ1PnijgAspy8qzc1WFqMhECDqKEgRxPoQQELAWJ4qUAtDaVmWpiEQEFGFgV1ROoQtBG13lZZxY4UCIgDjN89I7F3iS5eMsI0Bg8T5ESVwUFSChgI6j7uJMPhyPJ5kyxsYURwZJ9vaG3ouJVH84rqTY3BsCEkotSDKs6EDaLXOfRx6VdlWemLhZt4IKxEfWZKVLlSLvkKU9M+PL6e5ub38Kjh6qF3ubs2Cf2rjS2+3tQmmsRmOolf/wv/i2N77tfpkOe73BsWPL6+vb0+k4z/IYCNDPzteLLPzR/3pq5ph553tuv3Lp4mhvI46h1k39kLZWpxx0kQ2efuqrRR5eddspqkXbF6f1A7XQlwtfurayfMefP/C/5zszexu9hJJUY62Gv/sL/+zDf/fZjPgt77ppbWewsTUFA6RM6aTsT60l5qpdi9tRrAmeevLKQ0+cz3IoxwGVThqKEaFyNQuNhjpwqL2Q6rmZqIRysabiVq13rXewu3D8yMmi8r1LXzpaL4LjZr1NRX/esmo3IG5s9fsVYT5xqI0yJgQpqvz0qRNqOtra29Ln7crhmWY3GVV6WLrPfuxvXvjyA2+7Y25xthMfqg9L1e9V26sXH3/pQq/KLz1/nhyvLB66/c67X33P/V965KE/+6uPl3XVGzlhLMvqhttPH77l5gdevmoinY2qJLE2soJYVWU9Mtnw4u/96c8tzrbKSUHGClNRFEajIZoWng2kQhjV/MRrGyKrKkcFh1IwqlkQ+fAnnztyZIGIwVEcU5wC5P7ll9QFAyRc15T3corEJHoU7K/8z4emmY0pMloXzvvKtZqR7cRx0u4Pdo6u1BNTvOOupJ0+WE23Zbd49JOX2ml8/08v+O5Vil33VACz802vPQZu7853H/zXfOenf+trf/rBy+Ni7sjKbNfE58+cVdZec3tW1Bc+//l6Mz106IZ1HGxu7jKztsnbX3PHy+devrx65YYjB9FhnrvIRmkjKfPCATdTM5n6xJDz+v2/9dy/+enlmSMDlTpRlkELgJQDWyespvEiYBSLnzJmXAoExigK4OwJ/zsfe/O7v+Vh0z7Y7Cab1za5ZssyJLHdn4KvfeWxer3xpjfeOt59uhZTGeDcxWsz1mjjZlbqxaQ36eW1yMw1lGhdcPXQCy8/8MLLAf64TTBbo294++vvuP3eleWlndW99sqRenuGXTHd3phMtzsp9i9cZB8fP31T3vQjlYTRaO3Fa3fdeWhvWHolVVVVno11KMbqhIgCcMEFKwTCKEoQuSwrZjaRRYUSAgg5rNI4yUuXpKmw5C4fD/txWjNKRuNtRozjtAxToyJtLDKW+dR7zhHTqFYVBRdASnOAclStLHeG2frF9auffPjKjTcdXF8fKG1IyXB3eLBTSyN7mcdZEYITZgBmq32s1dVB+OhXwxsPg3hA5VQEad24rOjnhUKQqhwP22deLF9bP6pqRz/75HC8W9x+Q3O3bWfmauPtrRCktTzbXJgZbqxX2Vi8IojddEqTYrx1TkH5P97/mftueXNUr7kwqnUblufRe2bWtTkR8ODARA6tx9b4wqB+uJWNtqDoHZgv/+s/O/7cXvzBj72cDdHnwQQ4jqYWzTZnGuNhrxPFzQhF6+DKtevbUTIa+zByoZ5G1zcHjKitLkcleN/tJMPp5C//4hNXrmxaULceP7i4NH/l3Fndtt3FmiuLidNjz8+fXyevqKD5hYOY1OcOHvd+Mhz0oChUkbVrsHiw/rfPXBoVXJ9tRwZ7/annUT11n//cR9oRbr/0RL03U/SzCztbJ29cqCaNvVHB4v/yQ399261v+LEf+f7f+G+//NhXzxy/6VCR8/VLWzX+mhsM3/bGe//2s59v1brPbezd8MZD13fGhw6fmFzfzkaT6fr1hdl6VMdr13cee/yL3/Htb7r59MEP/N7HbTqzlW21OsqBrG6NQ+nvPX3Tt3zHO/74w3+ahVcEyfUazbfnLp5b872pUXhkpp0fWdzujweDSVFUHFj2YxK4L2lAqPIAUglUpS6m+dbW1pnzz0RagUlV8M6FNI0rH9Y2JrrUnmukLJyXlXZnWMCuG125GDLsPrtWffHy88OAPogCgsIxsiJyrtrfDLngFQqDKCCSAIISPCIhCktAJpCgEIUBAQKHOKJ6p+UcU0WRLw/PZ//6J4+fvCvKR5eRgndAYImCdyVizTSa2hkKlE8nx45Ityn5ngBRkGpz95rRejotiJCRI01pGk8nlcKaNZMsBx8YCSJK7rvnHX/1oV+b6bTnF+Z31nfGuxs7Vy7WmpEgYEBBnPrwwz/xLz74R79RFH0VxfmUIkl//ud/MOo8GcxFzAAhIaVRMWoUyEn5xZv9d5w49s5pdPb58S//6kuPPROUjlFp7x0ABs9KiAmRtNIaAyeRAcCirNJGM9Jxu93s9XfyabbPaRIEEQmBAwdFiAAI+xRNRkLnKgnyf5gMhCiBRRvdrHUSU9/tbZWVJ4VGa6VJhAWEAJwP4D1po5kmxcxHvlp+7rFzr74hvvuG9MTxZjNFQlaWWXKJ8nLqk0Z6+20KLpZX1lyS1mppcXAG9kZDFSWu2BGHEXGifZIaXSJE8bSadFU6nPppVUZGJWxGvVHNcX221bC0dOBA6WWYlcPSb+yN64nu93o3n1wmmBLlB+dau+PB7vbacLjnROYbncX6LFeVz3M2WgKPhiMIHPKhz7LpmJJ6avUrZuTL1yepCqPhdGfsrZHrWn3+qU2bdG48EtsIraHggCoqq8DS8Fnry5996bNPPPflZ7czpICMGJTRzBL8PtoNEUFgX1aHIOJdQEQiVRT+5QtrM92GgfLpR7+8NWndvnLTJ5ypmOPEFEXBRj93ebi9Rx99/Ew3imSxe++7b3Dk8uFFXiv7L+7G1TIdtK2TN0/Xto1nBH/4pu7CUtG+fPXOH5x55qXs809VT13LOs2m7dY2NtZcEXRSbyV66oKI6o9lb9S3EbXraIStHUk1VXU7k6qOJ79ZhCiUeaWEbp5NYEkG40k2hlxBswlZgK0h1FNotqCmi1YNlu+H7py99DxfvORfPA97e35hFlYOWOurQkNgEPRpC5wD0lBrgE1UFAczhCyDvRGORtht8/JRo8gFAptAvQnrm41f/uPpxri1PRBfKMYUQIhR9rsGJATCCPvtU0QEFCQCYAiMrLSGw+3xqw+A5XJpni5d4RFjf6Iq09gcQWzrMZhFGf/2L56avaMfoqxkXTqFga2p5WNkTC3FGr2KSqW8YIxgICCRCPB+vJFL43KlTZDKKe1ZKoFC13QoDVfTyGhUGIhDEOAscFGWcRlgdeSnRUhTDHmuMTKsPDjRFZjChxGDXzuLL7yUBDRavxKfBEDap6MLE6L3gbQmhdvDLTSE7K2NfNUD9JFSCtlGVmtIjA1FOdeEk+3x0aX6c5eyC7uq5FQcl1lWRlaUSlCDl+G1tRcffbp5++n/a1VUq1GtHpX5VAKGYrR5tefLQtdbj5/xrdmFdj2eFFWz0d3tjff6a+U4H0/Gool9jixRLdHaTsYDXwl7JjGRNb7kaT5NEn/6NbNUm6xdK7odiUwc/KQceSKd1MpazTrpV4OgPJiktjFufOjBgQuJhhIInXjZt6FVHhUReM9BS7w6bP7s7+fp751/ywn92tuXBrzy67/7pZXuDZMwdRx85RCUQnLsEQkFmAOIkCYRZYwNAZGAtAoMJJA5XyMURSgUaVUGYeRjLXdkMVo6uPT4c7vg1NvvmxuGM51Xza5eDn/5/s8fS+Bt33H30k0z1DU+9jtrZXvhljd/3f94eSetN1tG/MKh2WKSVdoUk/S9P/jrrbSV8auvD0+1V8usefLnfvWhZ579yomFyKd5trH7sb/6KXQDUhKjGl37eGKJtEU0EopA0qwlnKtnzudXy50//ezV2djWtbQORn/wwOXeVicf5r2J3swyF4rmor16qdcxFNBWHKqSC0aJlO/lhsMdrz70DW+549yLL/Z2hsWu73Rac1H72mpGr24emcdJlic2rSWNCkwrjQoqBsNRs9NSWo2KEMU0KrPu/LGoWY/qxmqikmt1LMtoc7tXFHGre8RglA93+6Ot1mzLRlGRjQjRGj3yhQCnWif12t5gUlTxTFsRaI313q5OkoXAzeB5OI2MMrVGrV6P5hZOHjryur3N8ac+8cGEN2v1ojtX81WpmiZgXGb5qN8DhCjWW9eHnVZ5712H/uZjZx98eO033k93nliwLatN86WnzrrCjsqryEVtJjlyamlnYzUf89Zm9vBjo9uXZw6fjHWctWdUXtTKkO9PARK5yoOwMRq0QtLOBWYOCF7Qh+A5KAIX/H6akVADAIsDRABmDszOeUESHwRYArPROgTxgWnfPyEAIQSWEgKhmmQ5h0prKxD20zFIhIjBe48lai0ay8IBiCaqx/E0m0SRraUWkIFQBIwSDlVVls55Fi8i1moJgRAJ0MP+iaQEAoLUokinNC4doDKGBEUhG9KksCwZgWykmIOrxFiDgkmSJmmaGANIRmsiDBygYtYgEJQiAg6hMsp4H3xVKhXtm2OJtIk0cGBwAYK2CrWpKhfKylgVR9b7gqxlCb1+33uJDAnIvro1BK/ZeHGEEEAABAArT5qwyIMy3lVVENCkAEkCT7KpMGeTnLQOLIDkXXCIkaHKQaoosCTGJrUUQ4g1JpZzAyUTYKyU1kp88ForpbX4EEJwwVvixJBFkRAiYAWhAuWFjI211oG5VouQQZPyKpTMWimNoAgtUeF9HClA4IBFVVqjNUaFD6HwIA4Qx3lRjyMASZMIBFwVCMEgRcowBO/L0XRsJlYxMvPcwkyoonzkynGIjEVEX5RFNnaBZjpLCgqB4XzLL8ybWbsynk4ur+bXt6vMp1j1dHdBlFY6zLbqZeV5kidEApY9d2r1ZloTL0gkWmdeUfyKBxBrlivz2JefzPE2auimQJZXo0HerqcvPn7x1uPHfC5ame3dofeVjaHmMKBkucx2av2he/4fplFDn3/8kbe8c/mWe159ef3q3m5POXGjcna2NTO77DlStWr9ynRpruV3+7notr9x5eb7f/xHf0LGe4V3MjCHThyArbV3n7prvT+54bYjUOW7zvemxbT0cweb08yHKmt3zZGbFqkWkqCufWkt29CjqeRiggZt7WhSZRSc50OLyWKL5rvmrtMHB6vZdDCaTcNiu90b4bG5A632kcfO9es0XJ7Vu5NRbzKt7NRQQ4a8WKOZzsJKHqVWjSOtaknSaOS5z6eVGeXWgkrrF8/tNOuxq/xq7u/58berxGry42vTZAQ0L1sb26O83NjeCBwff93rD9x5+7e+79tHl3qjy9c++Tcf/NznvtxI1MGD7bOX96Io3u1nejyYXLvUH5TNg40oVsYYEW9QD7fLaGbl1M13PPXgzqET7bPPXSUhdl4ba7TyVW4VEeJwO1MDrimTkQB7ANKKCKCoPBpyVr7hjbe89+Zv+9mf/91pRaYIvqpqaTrJgjKIMRlQlYQwdVzo0V5V79ikZtK4FQb9YMUHmUyypZnmp/7q//31//hL9961+JbX1/Tq2bA32LowWbsAd7wlCG/p5kLWL9J0UaDlxiEfkrJQXyjf+eOn7nvL3D/+ub8qy2OXVvX80uGrlzdr3QZKCDk3oualC9eGvd28LLqL3UlePXvm4pu/4f6ZtWk9TsdRJQqd0K2nb376yUcEaDwu80Ep9Xg4cc9XtW9+67nffP/xu941DjMWVF1bosFLwb04HV6fmTFFzhRNUXc5YGDQAbQqg5PZpfLAzPRc37myShot1FhWeZS+ckmO6/WZpaVatzve2m3Nz1x99mvN+c5MZ+mTLz2CxlGo6rGtRxSRK1kiVCszqbYqMroonMvLj33qoc995KH3vfv++9/8zs2tp/JdlVCX+xuDtc04cX7nCnBtW5e1oweUrTdmW1//DT/02MMf0J3UpMYHUshG6aJ0TkpGVEoL+USnZVkiVV7CPoIz+DANzuiIEIssN8ZW4hFVo9HJq8KyqaaFNtBIk+G4CsC+KEpmELARjfeGGGNRhLrRw3y3OzsTi+VaWmuX4IrDrajoRUrp4FwaqcXlQ/3psBFbRkmSIo7UMxd3USEDECoOQVAAzX//k+HCD0avOlC6HAoHJnb5EGIL84fg/PrCz/7R1t6wzp94ajh+4t7bbzp0cvHJ85eO3Xxbkalhb9iIktHGxq2vfv2DZy4staO77nnjtd3w5JNPfux/f+BvP/7+Tz317EsbOyVzq90Em5NiGo2rSZV2l7yPQjkSD6aGuu5UyIhLGmHddnenHmfrr3/d3Nsbne9/w/Jw5P7yk4/e84Zv+uRfPvby85uzM+l4OE4TXU9mdrN+XI96vWlCNJwUheNWOquN2psUqnRF6Vzhtdbbe8XOZGJNcmBpbn17enVwZmauLrPzB0/OwXD0tacvFo7rcTKz1ByNQ9yumWZnMMoJKEFV63QvDtb6m4Mr54Z7g2xYmGowrbo1FygB3B6M49gqAD+p8laYW5x/8rmNpDmTj7OLl7fqzbQsw2ceePCtb7n3Pe/70d98/69mru8Z21Gj2Vl85qWzt9x87Ad/4I2DSfibjzw33r58080nfT0MfT4d8oH5uWcunp+hZk03VNz94qMvXbp2aWdafeOdr/na5Y8EkdI753XhJTHKh9EYeGf3FdHH9voaBCBgT8XW+qC307c21BMzGSthNlYhgLJKWKoiU0TaqCoIBwFxqJUgIGIAdNOpNspqdFWhSFWid1w0vDBZv+xPH+xMF6O8rFZ38xdXs72Jzr2vUDEAQgCSyvv9DG9gBkSFymrFwt57IgQOJIAEAryv+wbUIAAg3gelgKU4dOCGuN6+fO26ITh6oPz5X7h/ufZENfCWjIlnHJKK6sIVVH0hL5UAGkQEqE6faM7VhuvbTGSI1HQ6DSEoS8ooFsgnBQgF5vF0ByAAsNIKUaVx7cUXHlAiCeqWmVkrr7kQTEPlVRk8owgSWdK/+8e/oLNJwy/W5pvD4cS44bXzn7jx1SX7DKM2gAockSIhCwQEAahOSbuR0F2vj35o4/CVa2enkzqhzVkpAxIJKKiqQNrGcaRt7LIhsCg0kY7n200yWE9qvnQuhNIVCLifmSFSIMLCIkiKIARCEhTSZJQuqpIUibBS2phomOX9aiTs9xceiCLiAcB7ZgCLyMwAQZAqzx5sydGXz0dPnBvOtfJXHTR33NBemsHZGVtv0NRino8TqRagHJkwGoZLl0PoDW46sly6UUn1fplZLSBpfwJ5SZNd0KZudNSuG9/rJ3HifaFiiRIdNaLlo8utJC5F1Znt3sC5bHsnL/ujzY26Z570h5FteLzerNedcztXLyYHDt9916n82b3BNMuHA0Gd1Ouj0ZYRNga90iqNnX/li+jy6hi4cK5wghDTHsNHPr/xta9t3Hdj8mPvuz/ugDE1X/rSiwgM9/KHv7T7tRdzT8b7oixZWxN8QCJABlZaEXMA2GdtsSLaX8+JsHc+Tkxcjwf96fW94R/8zV+/9vZWxYM8G2kAD5TW7GBUTPJoznaWovizv/rJ20++pvGqI5779TlaPDQ72MhmDfPerm3UwTQMjgF93JhZsLEpq1sOB5oa6VcvZtVeQVFjxiKM86xh0TNrZcnY4bRwwSuhuaaGcnRqyeR+fKiti0Iy0o4lTWJreBimaSJHjup2Q5QOjZYq8pCmqIBasza2hYlk0AeC6sRxOHJADXfTi1cmz1+Ay5uyUKODh3RSq2oJVABOQ2AIHhBDowZzXdxclc0t9fSFYEEvnLcnTtDi4ZIVdGbhFNl7brAf+gKRNyjVfmeSgQX3MSAggiyiiQSBGQgIAIVZFIHyK/Ojf/++cOd9jWgmBhjLNHJjyNfSL/xD/7Er+toelOPa8XlonRSoFyH4pL5CbiLFiKbTy0+px1+aNrvxt33nwdisCY0YEFgF0hC8WAhFBkGyfnT9ol88ENVaTFBqa4QzEEBNQMr50gfPKMDaFzAa1S5cip47W2zvhjfd1TxyOBEACGMyEelSyPswmg6yvd3ugw+VvaJZQkABVLr0kliNEpiJ4BXaktKaBa9fPJPEsTZKKdnsbZQlIyuDaMgYELI6SPn616b//IdvTNGPtpJf/OWzaxPTQHe0m8Yz0ZMXh4PdqU66gOrXf+1//eL/84v/16posLeXRjofDv20SGxsof3Chd7jl/tD6KLo7f7QCe+cvSAshEp8cMxKNKJCDa6q8iIHFuYAIEb52CZVGawhMP7c+rBTD40EGEJ/yOUUjA2N+WRhxhBl40Eoy/il9UO/9PvXz69mQDWFjgx6ZvAiAhJAKQWIDKiUSAgGSjJUYvNT1+WjL0/gIw8ZikgNkxTCNBBaYCTRlkxArwiAgqscBE2ifOWAAqlg2HUbqlWzUVK/tj2ZFmQQfRmMxdcd0r/58/fNLKe1o4tPPr35B7/xNTZ7ycz8JJNas/XjP/bepz978Rf+y9UTr+lONl/e7U02x/yVp/6nkgWjwcZRlhejrBjtjOvdxsqxw/2NHsZQ9Xd/7U/+u4mKVmvuucf+aNDPxgp1aSNLioJHVCYi0q4sHUAonEhJSgPweJiTpsZ8p5+78UDcQn1vY+zXpgzjGopzvpHi8kIMyn3/j9wWs9m5Mvrjjzw1zg1ZUxUuTc1dp+YWlmrzsw2lRiuHF9qd1mTia1EyB2nPr29t9VtzHpVJopbWVqfxdDQisq35Y8IyzaY2MjZSS0uNyaRX+MwmplaPHJAgTLOs2ba1+sLuYOKk2B31XFW0oCnM9WZbYd1gntRbQG44GI37NDN3PA7zg71nazUzCWUpST4qjO0KSAXReNwfjvd8NcHx3s76C7Pzp972jvumk721K8+wjMbjSQhSr6c1rCdWe5f5YnzwyHxVZEc78dJSenESPv3Yzu13vcYVg1Z9Zjweb23sMvUbrUaKze1BFSABPT1x46HVkf+H5/rff8PBmtkebq0li7NLy3MAqwBACoWhcsxAmpRGJeydD86Lc44IoijmwAIShA0jahSRqioAITK2KAqrNRBUZaF1JCIoAgjMwTODMBF59tMiEwW1OPGVFw5W20k+EYRaUlMI0ywj0uPJkLRCa/c50FprG0cRoHBghBAYAgOgIGitKu+cK1xgDkErUoAmjvf7TQiCCElsUYQQfAgAYLViBmRgYaUIUYgwimIRjiMK3jN5Y3QURUqRtVESRd4ze/Y+mCQiZucDWe2LoiwqBCZFioC0YhBN2hotAi74QCF45gCefQ4uLwtDCAiVd9aqaVYGCew9ACLHAQQRPAetrAfhEMqyRESljU3qbAQhIIoAGBuJ90iESN45bawLYZqPDUAABAITKcWgoyht2kYcKyQVoChdBOAQR3mltQVSULGAsPD+hWefOKgVaqWthOBKIEriyFieUjUtwZWqqgISamNDCAqpdFUQjpS2SiGJq0qPBOy1oTiOq8JZY1GEDYkjEZlOxlopSxhpsFGkSGozjTlpeB8qFxxzVVbj6VTnpSBzlhNyStWJA13nXNRqktpnBjiBmU5nfvvqasdsnTwVv+Ytt3iZWFe5EFeBx5m5cE49/OjGxmSXYU5cEF86DkWeqyjSkXaV1OtpEkXMVHoJRLMrjeBfcT+trxVuo99ZWbrvne/57Z/69yayyKA11ZrJ0lJLWTt709Ebj5/8u7/4TF72iKQonI4ojuWGY/X5bvNz/3C2E0duM3zmLy4tzx+Y7x6ebS/s7vb7k91gYVxNT998R3Z90Bht1yaht5kfOHDq/h/4mbh5oNjZBpfH9bh9dPHP/+M/+x//5c/u/fo7X3XswKMPP08dtbM7BKPjLvZd2Znn13/ToeUDzYOnl10cJltV1ateeHRXjLGG+pPCKIwSpTXZWM3ON++58eDV585dO9vrxrVXHztZTHbzNdOK2joE56vhJCuM8lkYDolrzQxsVfk4L5p5sdSJ51M7GZatWiNEYKOkN5mGABsbu9pIqxZ1FruLyzPb29v56uj4odav/9J/PbrSOnVkYbjbP3f25Sv9wfLNp1735ne8+Ru/iZqtrX7/L3/tNx7+9GcHvRGQShZbs4sz569t7m5OV46mnU5tsjMc9MLP/5vf/E//+meOH5mrysKXPmqZdquVOXzgC19atFF/M1daBfZRrFxgRgmOGSS1plVXtx3tvvXW03/+sa/u+dIpO5mwMnrqJVWKTPSxj3/xtZPeXa86dW1vOtjpO4S8qiAIaJXnTgBcFSSAJ2+iaJJVIDAabdcSo4gRJaqZvTD+ld/96OZ49pOPTa9tma/79pPjUefMC9P3/ddvrubGlWwONwuWsvIqTHMVN2v1bjG8lm09H1frraOtD/zO3Bc+s/2xB5RP57oH50XGCsOVje29cSYqAVLK2CL3iU090xceO/N13/GeC1/5EtMoTgEKfOKpxw2hrypltE6gdGVjpq7rzaUT7Ru/7oeL4reNYGBnSUYvX23edT2uLZT5EoUxVBkRKmqwT/2EMVbKoIfdm29vP/mRIm7WQoBhf6KiOKqn+1PQPXCIo9qllzez4bTYGzTMfJSVH//Sf8/LDBmQaTIoZprpwEkexA/zThoTsQFfQpku1PMqAPDDTz5y6Kjcc/vs1TMvFHnv5rtvk7sbbeu2r25sXBhcWffN+duf+ofR8WP3fPrcZzauPfXGd3/X5nCktAllAcCeCyOJLz1Ettmq93tDJZrQK6XiOJnkoxgTUBqVRUI0HAQiE4cSxiEHQVI4HO3VG41QIFCiEA7OHry+djlppCCgVMzsF5pROd01KJP+CBXoQJ1W7eWLW0Whlw/MMCMRzS53y8qjgDUxoATPrVhuP7V47vLOxIPnQIRIwKgKVv/kD4t33Qc/8y2dhVoY90dJg8bR0nv+w9rjGxMF9QChDhSgnNLkhptWek/0vvN7v+sTH/pg0ombneb1S2v3veH13/yub/of/+E/vXxlY227F9PeQuf8we74B7776//yf39omizUwp4rcw2ZaK3aUemmwgEJQgkKmTT6qOxfw/HO6YvFoS+d2Xbp7Jtfe0zOPXV4MTmwYn/6H33dwo0nv/O9t1796oOXX3jcRsc/8tfnrl6bKKzNL3QA9DSbprWYfLm6vVX6UBSlVtSqxUMuBHGYuU6zGYRLm7i8GmXlSvfEdmiuDlu4Pm7Pzh5faK5fWQ9M9TgmDnHwKan23NLmlfXhaBSnibVY5Vm7aYuyMAp7g3Gzrmda6fbOoNlIGq1G5nGX9dETN7/KzKVWvzx8fFpwXmakKE3hf/3ab/3hn//2D/zg93z4r/58WoStCg6cuPHadPf65jA7MxCAVy20155+7MnVy6decyLkebsTkdHtuXbcaMfaFxhd2Z2sD/dOzx99+JEv2yiJdF6VTrM9dcOS0/zpL3955KNTpw///eV1ABiMpuKFgNZ2+ufObhQO93bHo8wDUVJPOTiSAPsqPFBM6IU4ABH6wABCtI+QBWEEgRCCUgTAlfODMWiAMdDmy8VjV0sd28FUZT5mDM22sYJuXAQQ9kEjKq2JyPmwDwNGIBCJbQSIwkFrDcAigEBEWkRoPx9MEkTmFw/mVdi+es0Nx4uz1c/+6OEDB1az3RG4GiqbjcXGKUMckNDUCTUHkeAABMmlkakZpQFBUGvtQZRRJAoR2LkgXpNWBouqJAIiFZmoyjkr/MVLVw1GhSueOfvV4Nl7Nqw4QOCgEAm0L3E4gBOdm9/7rl/4ld/7efbZq25o1RfLiqY2qnlg0jqQAdCCShgAIgyAwGxyo+jt7zh67om1x7+agTa9MQWQwTDTUVT5sDBbn1RCaXujyFhgbrkDorLp1AXvggTeN3MYFAnCCKAUiQi+8k4HVmkdaRdYWErvEJE9E6JwyLkUBA5BEyIpQKx82N9xMAsikiarjNHal6XzARQjkojPOL0ypOtj+MyL/aYqjizbm443lxbbtZRqqjW7gM9curZX2HObxZM8eOLCcD6pTh1fnJ9pBM6nLtvcHE9K0xtzq710aHE2MtbOtgVCksZ7o8mwCBtDWLAHBKQQaaVUn/parVnsVQXL+Y1RWk/T2mJbN+YH4/Fuz3OFtdruZKM/qp9anumhTMdKWRWwxHZ9PHGFJ92sFflkWLzyRTTOglE6BFAahaHMQafd6yP8/PPbP2BmBHt5NrFGY1IXwUZcV81G5scOBPYfbhEBRCRYbVn23z4BGNgLAACJMAcvpBUDZ3nY3Z10Z2o1nn7yI7+19+Ldzz7z3MkDDZxOKARCUjGpWhJX0vCDmXnQYYsnMVfjERcL335X/XKZbbxQVT3dTJh0OakseLRJ1Km5zX6z4+5+dTpTi//+Wf7M1dwkbW3MJM+AJTF6NC6F0SAAqqqU/sCnqTo8U1vpQrc7aSyg7QQGGu3kiFAFKAMIeS0AASyFegdERFOoijyxqtOhJHfBwTQDotBcnNyxok716eUL1bXrcGW3mumok8ew0/HkX7ESaguVAwwSN6Hl/MERZGX83Euy3Q9vrdloXMECpK3xt35H54tP7g02DYIoVAwkgoS4H7RBQBT8PyZAEBEM+wQesjH84Dvrb7h3qNOxZxIFOvWEoV4vv/UQfGfkNcPeucplrgpX2aHqtIqijFSmrQujA3/44Y2X+suTMN6t8h/6nvlG1wfFmAAjYvCBCxVk6xw8+WL9M4/m3/B1s+886Ii3QQEaL4JVleY7ur/nbaNpWvXzz28+8czg849Ot/vUK9JWO/n4I+33dOqR7QdTBslAkAvirHvma7O//7flUy/McBQjl/uI/iiOAAOgF3EAKCxKaULFnstx30LdWq3TeGPqvvl7f/hzH/hDrvriPRAQsU1ooVkVxZZNub0w/z3f2P3kZ/ZuWPQ/+NNHXTetJkc/+NuPPXV2V9m4tG54+Wv/16ro2InuuDd0RXnl8q5P6196unr04T2KWz6EfNoLriRNIYhIQCIFSEopUtrWinIMgEG8jS14R0TB87TMNdRN1HQ+/+jnd2+Yj04tuEzx1Cn0zfHUR77WG2yJhFZztje46d/82vP9cSetPNjg2YcQkFAASROAEBK/gobBwK/QWECLsCjAyBoIHpEBLJlIEVdlPi1VFFHlHYtOCa21YCJxoRkVJw/R6163+K3feHOip63Z7ngn+80/eP5vHhoyG2AKwm/7+hvqh6isV1V+6cQJ9eY3rvzybz1y3x1Hbrz14Nxse8xzj1y/9MknJ+UTz7TidDzie+69u9m5sr25M78yt7ux6wkcQ7PT1rq2u1tmE6eKCqVMolSi5nAksQomiYgolK7IB3EaiTAIVpVDIGNiXxYBPBoNrG0NJyHkw1Ecp6mxZeEiy3WLHBxXXtfUdDqRin/7z35q4+oTLaAf/+7vefbi9S8+MzQVHFhM3vHNr1mo5yFkICDgG7ONQzfMpKl9/qkrvdE4T6fdw4cbDeUcj6cOC99J2h6mcaRK0Y3GTKPB19ZWfQh5NW3XG/PdOO9daahONdntjfqBqQrZYPNaNpWlA4dvPNmcTADcyKgIVW1x8aDLd/qTYjocFdMqrS2ePv6q1d4UJeUqA7FVKDR4BgIVJsUwy8eJVtqiteRdub35UomjicTzh04Ue+s0KYpsNB5vCeyKjRtzrZNz82tXr3CI+9PswEp04cLQB/vFJ3caPKmtC1XlwkxtcbmbprGhNC/KIoPxbmUMzc+2z18f/8WHrv/Aew5355NSdof9VzrJZekQoHJeAzgJ5L1wALX/U0RhH/IDpEgTeACqnIgE7wHZKwCrlHcegbQxWumSi8IVer//KoyAREQS4iQKzBoANHnPWZkLIio9LQtiRqLKVUobx6EcTdkHa5SwOO90FCW1KNJWvFOIyujKFWXhqqpiBmMMGhNZbZSqfJnEqbZR6RwAKKVIQGuFrkIkMppABe88BaOVMCtSoAAYSMSYyMZJHMdRHBvFLOQqz/uUSJDpZCqhStJEkQ7BkdbAHAIrrQJy4Xwc6SSOtvq7HNhEsYkirirngmNPmsaTqUIS9to2K+9cUYogagxcBGYfghAaHUAQEBwDacUulGVVelYKmQVFjFGBwXsv7IRZkAKCslFeBqVVLU2toljrWhzHUYTMwEwaODAZY8hUvkT2gkjALgQiskTeBUJvFNYiK1wpZLCKtC6KzHuGgMZzzWijFAaNSI59KSFCRMAg4tmjoFK6KksE9gAOq8AlE8VGT7JcGHwojAUQjq1VxIZCo5aE4BVp0XoSXKQVKtuq1TiyzvezaZVaPddtmFAQYCOpTfMxCEXKWmOkmNx9d/uWG0CnBfAuGSNojI41O1f4gyvJW19/+MFn/DCLB+Nxf5ClzZi12dzqk0BZuPWd6Xy3VW81VpbmJ0XlvXPlK29oG1cH5fW1n/qBf/S9//ZXFufi6cRH87VsENy0PHJDe/Xyeqka6bGOL/O4FhGBLxSTaILp9uC5CxszEdTQT73qDZPf/HeP3/G65Xe+900vnXu4tTz38uX1pls72br7dO3o2dHEV/RTP/nHxs5OUfIqqzfjNKrDcPdtJ27dG/qoO7daVC888uzO9rAjSX/sCyxX5vTMEtz3tsO3veG2wfpeSVVVQWtx5sf+x4/86+97//r5jFgJCwOZCOPERjW9uTuZ7vLtR2+/tLq2O+Vp2b/l8MnxdPPUoZW6Hp9ZvXzTieMvXrxUCERz7faBDmyOp6vX0lpy6sSpzdE0B7r5njtXNwalFJQmizMraVVJgTccPXntzHMX1y8N3HpRjdK5eP3pF+jqXvtgZ/PazraUy/fc9pbXvenAva+rz7SvfOmjH/xPH7z48rbrj5OurUWqdWjuxtfcvHhixf/NF3d7VR6ALIhRz52/Vv/yg2+4/90Xr31eK6i1IxWb7nx7dGk4NxNDmfVHhVfkK6+V9mXFQdnUKgBxPoh6ab1oweWffe8bZmfTBx65+NylfgHqpdVeGHGtqfuj6vNfOLs9Fp0kpS+jlDwLAurYVM5xYAZtYx2CiBiF8WQ8YAk5eFcyoRKQ3sb0M1svQR5q9ej81eqjDz3T7kbXN4Z//8BvHz7a+Yl//J7l04cAd70/G8+MitKXW9em115KZqJC4loDirDzDe9bWjhR/vNfeTSfHo9IpUYdve22wWhtc2tSr0WqNFnOZTFxwzwp3QN/9Rd+mjsWU9PjcZnUdByZvKqkCEePH6wYt69vr+7tzDT1d//Qf/7Q799psF+WWZDadBs6XMVUYhILIIAXLqEKYKxNEid7QTxCPCi8IkIfeluDVrcljTjPXrkeHDp1dDKoqr2tWl3PzNZF4jDeWlu/EKeNonSh5Fo95qJqmCqKJCI1O1ubOzA7HUzY+/FgKs5JHF/q8//+84fDNXPXnTa5WUfNF0XqMi1n5rPZxWhu2z9/5umNa/CVJy6r+vzpo0d7g22ldNSoF+yEJbH1ZtIY+F1A3UznpoNMISKqyofgg1ZKkSUyLAEEfKgsGvEUR3FZTiMjjXpN9II1cW93zZgEle/l2ypWRTYFrSbBOwcxZyRGEWkjzXp749re9dVt8VIErxWIiScVt5i1VhGa0ThLm6kmV0yHnfnW0ZXO1a3hKPfsBVkZVYkox9HffUV95uHRyXl7bH72pfXx1ekulg2DKDRVIRQZJFZdX928fHHLROYDv/nLW5eviTZm+YhNe7/2K790573fHgwhvPzL//H7D6mt7Npn0p2XvvDAEwsx/dH7P/FvfuLN1GyGql+Wvta2Zdgz7VQzcyYUMrBAJlm6+c1P/dm5XudW3t0ZhPWPXTpnEV573y13332g2U7ySbq1tut0Mn9g5q533X1gtvnJj29/9oHL+dD0NlaDrk0kKkmHEibTIg8utlrKMooJhPuDSX9cLC62z1/fDpV7x9veftOho1EXt3b7Zy71ZhfMdDeP05mkduCWU8e/9JUHxuMSUUcaleBkOOrOxs3Zdj4df9f3f+v1585d3xx/bWN3Zzi9dH23lpiKubvYztd6vfXten32xleffuKhr1y8skaaXBBgRmVWe9u/91t/+PZvuM8B7A6rUuTX/vwvjhycN7mdFuXm7vT+N5y4eubqNKsNLm9W2WRS0Lgczs40Lj13ZuH4iXd9y1s+94VHjszpIuNbX3v88x/6LIC6ca7TTWudemcoMhyvlqVcvXx9fwoEKzB649rWYDKFhLLphLXxla+nquKgjKKAQFAxSqy998DoVSCrFQD7gCDBO2PIGAsSAFApFWvtWcrAOUDN2qxir5X1HJAbdROCEs9IGFkK2vjKexZhDgJBAgtoERQJAiT7DSkSJAJEYBBgEB+CItwXZSjSVeVC5UKRdRvwMz/3TSeOPwRZHlEUPAdwotgXEx0UKqUEUDUllFVWMFE2mu5u6FGmREBp5Z3XsQmlZxZmWTm8kldha20NGYVBEDxLcEXSanYOLly7fLa10Dhy+MBjX3mylsT75S7m4J0XRWkS1SMthR8MN/7gw/+iO+97W070bhI3NYb9xDqq3NCUMBYgEAViEK2gDsEH0VQrfvpfv2687qvcPv3QS+fPb9W6K8NheNUtrxrs7XzsC+cvDXpc5cKmqjyLKGVMGu1ubltrgVkxiAR0geEV3LUgCggqokhpY0lDkRf/Z6shQKAUucqhUvvKJAJEIa3IMaPQ/y9Vzcx5kfvAIgCMIqHkQiEqMgqM52QS0mcu4fMXM+F+Eod2QyNjFNdKoTz3yqRPX60SI1++sjab0J2njiwvzwaP02khJI4nk1wpNWEf6rWGVdaAwcJNp+xCqx1rzqfD7f5wG0YDA7ZuI1TYpjhxXi2urHz9gZs2rz315HOPLs3Ujy4u0GhcjqbVaC1GdpOKqGrNLGoKYwG2M/OzCzvXr+xPgTUkHExkvAtl6UGIpSCny5LPX9ju3qrZxpNiPMyHztnI1o7ddLr4whVIDCASaS8MwhIINBG8ksff548rvQ+TAAI0Su2HvCbDnMqqHde+8Jd/82X46PH5TjWdcu4igoA+SlXIx+PhoHnSfcNPHo3vOqRMU2VQZTag83WvWvW0PRcwplSFmQUMJQVAq9ilQSlQ+fLB8t2d2urHpuf2VFVLUGuDlMQRh7xgNkBKK+dhWASB6AsvjN5xp+0uolECuU8TaB0AFChymBSQF6AMAIO1YCw0GiAO+gMY7AZ0YWlJBQolgNbgRMj59jzc3oYjR2H1Grx0LTzwNXjzXentt1BkJyyAEYBWrgggABW0Y+h2plXQkfbOU1rHIlMuVGm8PdeRMxuaSItAYCAiov0tAe/n3QQAEZEQwv4KGQABIXSaYJvgYijy2No0hILSsnKOIkADEsYL9youdJ5rHxDLCTA5Dntb+Lv/68KZa7Mb1RQj/TsfPHdk+eY33duKOoFhKiicV2Uuw9X2Vx7M/u7RydUtPxxee+1Nt3SXIAgK+lzHq6vzf/R7zz71wrRfbQCgzzlUJnCbmVSEeVX80Z+eT8Liu96yYNJ+YAiBtnrx009Ef/bX115ejxhjIpZX1LcqjuN9mrUiKqsKUQkgB7DWsA/5NBcJxOXd99zfyKvlViOvcu88oQRmIYiMrjUiouB4/OrXREsH5xsz5VRvSTCm1fqBnzj6hrPw65/aXu0NH/77P/q/VkW93V7oZ9PdscHpwkx8ZCm5NN/c7ZfeOSTRFlkAUcgo71gbpUVzVXkJvsxJEQEE74BFa2VjxUKhKLNpL4RiZ0fNHFhivOjL3tKRQ+NdtKMqrU+LLOiILl1r/6P/9vTOKBKVOfQozBxQAb4iRQJhYWAOopQSAWUIQQBIIFiNjhmARTw7BQDkuR5GP/me7rd9940baxdn5g787d9sfPihvd7UCCdzOvzQW5Of/sWbhuiAV0FyUttmVr7zW5Y//tX+7qBK01SpZDgFiA6X5cQPN1MlS0du3HHX3vlPztTC4ycOtYrmwqNPXdfWNjo1UDVd6ZyOHDlZ39n8Qm9nV1lqLB5Pm0fXz36xKkc33nrT3tWsv5cjKRaOojZEUmRj71ytHmttVAhCRpESFhHPQfJpaaLYkBII3lXT0Xh2aTFuNcd7U2OjbDJSGqqiMJY4ePKu3sA3vX6pXHv5hc+du/32Gz/4u5985JGrt95y8g13HmjHRa0zjI1Hzc1WYziivCAqXVaMjx5KDxEfOLHUMCWphBQVk7LInJ30xblpyFDSncnGsJiKD8ERkiCXlduqp9TfO6sotOpcVtJNIi6iA8sn6p2GTqpGPVTFcGe756qpWx2yr1RU787Hc5BaaF54+ZnNAJrYcEmS1qImc9Ufj7U1vWHebHRG/Z6istuJssHQkLGY2EZ34/yLVW+rXpfF2Xrly63NTT9OghOo4uB5a3v7sccu2OiYtqJQev1h7oaHarZWj5eWl+aXu816Mh1m3ZkZT+BP8GBn77XLM4990Z+7ev2vP/zy93z3cqubEM4DXAYAQAkMJrIAIoDOO0XKKAUcEAQZwn4Fdr+hHxwQVWVBCNZoIi0AzC62ESAEDgJgtCEAAEVGBw5xFBOKd5U1qixKFwKREgT2lbDXxiChtsboqPJOigyMlMI6MtYYE7Q2Oo6sUQqAGYUhiIiImMgaorysImtjbYRDatM4iolMpBwp5coKADWSsUZrMymcsAcOsbHKaOHgq4qDq0WxcIhspAC9D8VwRCjGxihCmrSy48nYBw8c8ulEucraKAR2zoXgGBRwqCXWEAcqo1SXuUcEQKkkOHZe0EZpXcC7ShlTchAFjBAI4yQCBpcVLEDGFAJ5XjALKPRFKKYF7OOdNUVa+bLK8qA0aUT2gZACe3beCszPdWpxFNvIKkqiyFXOB4+I0yoz2kSR1UTKgBIJDALAIkZrY2zwgTAAiwBkZUnCXqGAlL7SZABCAnC0HTOCleA875bsAaQMykSoNEvIy9KSAnEsHqQSxlCJssaXXirjvKsCaCVJLUbEoiwr71woq7L0VWWU0mQCODQq1T5VMMmGxoqx1hdluTcBFfem5blqitC4tNFXSIaSKJa3u/ymYw1rVRCbjUixBo6CNzu7vvRGKzXTDY9eOjscDOo2GvWV82Z3azjsD/O8jOpJM006s+3VjZ3Zdt2SMekrlJYbjty+dNObf/NvHntpfSJkRZiZtSKtzKVLm+1WZ6/I/n7305WfGCWKqduMqiJTRVX02FehZqgmvDesZmcSiujcC2tP/T9/yia89ZuXb7/ldrfeX+LlSw+fWVq54/ANrxn3S602aXah06gZmPzuP/2Jj33mMWeSkEY7eVlc3TIxmrbdzd1Qw0/89Du7snp9/fKr7jw2u7C0u70FxmR9Z3U06Lt6K5ppVsIYJaahYg4eiecaZrdfXNtZe+3rv35ny1/vrXZjtz66PDsX9QfPzx5bPnq42ehGTe6e3544BVz0orxMGy2XtD54dmMn0CDvtzbHiytzd95x+sKzl2bbOvflM5eufenFp/x4KI5hrary8e1vvV1HWLlSKaofWzn9hvvvvPv2jRdfevKvf/OhzzzQu7ZZX+mePNFOcUbH6aT03/lz3/uhv/vUuU+/cOftdzz1lavloKgkJG268+RsXDy/tHzowqUyTRPvvEJY39nMOFtsH7hwfnfu2D07166lphfYqwgVIIEo0SwFqKCU/YdnNs6tb1ms5uYWlxfmy/7k9jfeePa563NzuFfqy3t57gBDFlhqaWQBKiw9BaUQyZK1zXpjNBpw4KzIQKMCDQC1us3zEhVERlchqDqUNdkbFMO9vBk4SWvndrIzm1c/8eC/ixV8yzvu/H/+y4/r8lpk6nhgJbHNWpz3r+yMrm+ZTuQnvVvvrH7nv9/w/T+5maql+aXm9a1VGzKFgUQUSi0ycaJyH7QKg61J0rTMzF7AMwRyXiULh37svf/kb//8t8eTcVKPlEETqaxXPvClwTd+myBqirtL99xSkoRswkVpoiTEBYbc80hUxRR7mBZQEteeemxARYvFzXTqVfAIaNQrBbSrZy9pq5vdxmRMo9yRr5758t/OdBVKmIxzLdRII2EV6dzl08X2TFKr2aidLraMkWo87PcHe5OSoVmOk6eujm++AWqFn5QbpJoo3Qo6ZTVQCZw8BltXId9MZm44HcXuwtVLc3NzoiPUqmbrk1E+LPMkTcmkg8kohNLYyAN44UQZa5BD2JdOMmOkjQLFgIHLJI1DWWVF4dlHVHU6TaFaPt4N1bTRbpWTCZBJxRJGk2nPE1gds3cba9eTNCqdUQFGuas1F2YO3XH1yhmEHiiOIlxcaHqt4nqI4oXBeBqjv+3k4mBUnLm4E4CrAEROEUU6sODZETy7M9JWKwWi832trYogMDqAybRotWdvuOHolYuX6q202WwTOmsoouLC45/uXb7yj37ixJuOrKIeqYhee+/bD/3Jg9Q9uDXY+qe/9JFrnt/3rW+55fShg7JBCsKoDLZKGjK6NrJeZWW/09o8caoJs/7N953+0hef/fvPn+tz+mcXpl/60rFuU9146FVf993fNtyoDh+mnWefr3Wr7//5V3/Tj9/x3EOfLXbssy9VT12HSxsFKa0U1oxlYERhgWycqcgKB1GUJDTKyxdefPIrD34CrWiTHJk/vrxw5+bmejpbG1b4xSe+vFdunT6wwGwiq6YaKW1WTBsbZVpfubpZunj+5ntP3jXX+f3f/NPxMDM63d0dTp8865xvxtHTjz7w7NOPYKDCBWVQCEvHStho9YXPP/S973nnXa+58/xfP8iIk0F5BTf/56//wj/5+Z9b7ecPPvSi741q9XRUNlZWlqcv7/au94vhYHm2ne8NWia6/5abQ9YbQ2/Q32kksNRuHqs3WKIjN5365AMPRTZJO+12FMFL1wGgyEdlHlBDkhqWrD/MmvWZ00dWDs+lswszo8H21tUN1jwswk5/DMZ6x94aF1BHat8kqxKLEpwLSlnhAAKKBJFJEyhE4MhgkoA1ggQegyYVWKqKfUDnmWUfAEQiTEobbRNjg6tc8BJCGQIzMHi77zBijySAGBgq761SjtkPp0q4nqo33nvjzcfj0g3RZ0ncUrUuBfTAwY0DQCgypFhYlE5MQsKFjczqWr7ZEyG7/52mFJE1rgpOyqMHDykb7aytI4lWgEQEGDjU0uiWG4/vrp3VlUx70zRJmQMIAKIxGgARZX/bU0/0eJL1dwYnjq6kDfWOb76R6Txoy+KQSAARgWlfzosAHjggEIYCAB2Dakqnjm608brG5Pa3xMs3L096lVQboczf9j03Pf3s+NOfvLA1okkG2yMZ5yovfD2tlVXJIQAzghDRvvOZJQAgISnE4EPpCwEUZmtMCN57H4IAitIEIAgoQoIQOOz/a4JKKYUSgMH7wMKMSEYRCMsrRSsQH3zQikREaxKPAaNpBflYA0AtUPB58CGvJAQiMnkW9oZwYeeKc+cOznROHzm0vChFXg5Gu6QjX1SFC8w6svH8XLK9dumJRz5918kbEiVL881y26+ub+Yt216c6/ew21qwXpTW2uPCzOGThwdZr795ba2rpcwGjVoawmQ63DNRPO6PysLVG101u7CztZZl2SsL0/2bKAspJCBNWAVviLRu/v3DF+eXb+10m3Wjq2ExM3/gS39//quPlY35du64LDP2ggqByWiNQEphVVWkEUAAwRCWzseRFS9V5RRBLbZBWAO2ujVFECdplZcQKu99khoIwQgdW+7OLMEP/5ub49ujQlUxatFG6RqzihaXpNGpiqlMFLZnIBH0veAmzos0W0mdFOdFa7uVTb/v6+HPPplfrnQjSbKynOQFC0cmrvL9miRaowuH53oRnUNTa3c6YOxgX4psNdfq0CaYTmAygckAshIaDSCAuA5EMBjAxUsgTh04ygLCDCaB4KCsQBg6ixDVYeGgrVl58Kvuqef5e7+x3WpnrqwCB60hngEIMBzAKJMDR8LyHLHwaADtLiUxGeGTh8yXXlQsSiC8wuthRqT9ggCLECAQcGACIKQgAkQi8bWNMBlOWLOk4GUMvoQSpQCtAAEkAKMELnWcUMyI45BJ2W8++VDytw9PMqXZeECydvYXfv2ZH73YeePXH1o4lBaTcrzX+vjH1j/31d1JYSfOMkRfO5d97z/96jvedGT50JHdrPrMQ89cuHJRqsS5liAKBkRARG2JhI1hzzjaS377z4fPn0vuunl2s1ft7NGnH94Y9Ql8DUhSS6UrSAEDIIivfJrUxmUeR2lwoYLAvC8XUkprUgo1mQT+1Y++ezIuv/K3uzaiybgyyprIViWvreV+YpTWqH1SLxcPVmRYqThAClVF1p04jW/bcZ9/aHx8qft/rYooQJaVnXZ88tWHVlbMm15T/9Y7Jv/5t5588opUIVYKWQIAkCgSZA95WRoyM7OzppZU2bgsK6yAjC29JInFAKKcwBTR93y1t4c33Dx7MMXVjdJnUE7EG2k12mNa+qUPrK72Y8EquCIgGEMIiIqY0WhSikrHBIIGWARJREA8GEvsmREDewyKDDIDWm1k7w/+5Dtfdfy51uy1Q8dyrbb/7b9d/PGrB5+7YP7gw/2DM63v+xEauQs6WRKzWI0G5bRnDHQ61IhxGguHPBL46Ke++t53HJs/2SmgNy2i//rLHyVpsmrs5NH2y4zpOKo3AINSejoZjwo3rXXf85M//PQLX2Xvgy+Z3evvfctfX74kw+t7F1cnk1F3Zd5VoRiOqqyoKkiSOW0UEEixrdEosD6IIlOv2WwyACIEQaVDQJbyXW+6+0fe95P/4Zf/g2i915sq4KXl5rWN/nSQLSx0dKp2t3c/8fcvxpKYxul/9iuPDMblfXffd+9dsyvzVTGeSPBBMCZjhSN0um469WTYLwK4pE6tyJRTPxz0k6QbGp1ShqPRpNtNG53W7mZWjD2BbTZbgEVqS1/ttpP5KFWBkma9w/l4tDmcW7yB5mZBN20N93auxhiiRDe7qVb1/s7QVTLq9+2RFaVbrlpcnOnuDQZLC0f2ruaj8VgRZ2WeNmYIeH62McpH3bmmr0rUKmpYAhgXeb3m6qkCakxH11Th4lh1aroo8t3B1nMvDGqGJLiKGxqVRhxPy6Wl9nwrIna33H1qr+dqXm1f21YINU7r3XZqIBuNbzjcQVSPfr545txW9uc7pw/JG97W2J+CqvIioki0VpX3IizChEAAsTGiofRV6bzCwM4FEK1UZA2zsEBROlIYxakxSaTsYDJAUiiBlKrKSgTjONo/b4rKQyXee2ZWxEEkBBYSq1NUtO+gMICmERutsso7z0QqshhZHarAHBDIO6c0Wms5CAMqrY2KQJiUJmP28e0AoASAQxJHIpjEpiryAKgVASGBBO+VImZvjIqMadQS9kEpLQJlWQQfgDA4F1jASeAMhCVwlo2jKDLMogEIkQSQhFChIuZamuSVd4WDwMy+DEGCSCACFXLHASIdl5mbZlNFFJwTIGH0HCSE4Jk9IFA+rvS+XFIrE5uqLMfTjAM7qziwUmAiQ6QgsE1sEkcgkljTqKeNxPrCC6AiFgXCAYha9TqiMDNzmWVCiForBEABQtDgBUMVnEKltQIRCRSE8iIzWluyWkKqoZOCUmGmneRVMLsu61VGaecdUfCBkVgr7T1oQ4G1r6rExqUL9bTuHceGg88jjRFPhEMSiaj6dOq8GzoSFJdNi1ZSxm48qykFyINWen4SJ+Ni5Hsb2vJ8lKWx28pJeTct61tTmJ2PTp66A93UD0xwMOF6yelu311aHz9ypp9P9poQa21ff2jh3T/2roMrBz/1mUf++X/4QKmSOLIri93DJw+HophMp+PdHQMVVthZfOVIYEo+8KmXPvvQcxC1XCmjqkwAUpWC0duj4aveeFu8Zp5+6vHgA+kwLcoTi+m7vvnWL//DYzwp9Zw9cLi5vJA+/vL0wmo+HExnDzZqMUMxstuD7s7srF6Ynr18++vevnzLPXura8lsN5Y6VpMn/+DPfvW//a+NkTML9XSlsXN1FBP19qbLN3ZvO3XgzJXLzdg21Hooxje++latli587SKwb3WXdayLoki68b2vu/WF6bODzSJ24dih5kKtdmlts87BxfG1YfaJp5//9m9/z8bH/wQ7eHlzJwM5MtM5d3710JGlS197Ggx3O4lttM5dvhJ7aaVRhmrdVdenod6one0PXtzbef7a9ZVG44mz56s8C4hVhbFVgrI0G7mg7rrv9O/8xu/ON2tv+q7vOvX2115Z3/rwX//+F//04/VmXRk1e2Tu4O0Hl46tfO5Tz9/wqle/5a1v/rsnnoaDN73mTW943U23ffZTD11fd24S2BuV2vOXLl1b2zXG5EUVglDFp289debFy9evbGCp33bP9zS/nn71/T9Ri+tWa65CKIOy/jveseLz7YX5FVM0b775SKH4Dz/w2a+9eCHkfm7Yy8ui0ws6h0YsBaV744KZQ+UVUGJNXmVEEQIWBXso68ZMyrHjACSRISIC9MaKjdWBg/Nb1/rZJB+VXol0lxr12ToyFX2ptWaWTsxs740+8sWNL7zpn7351ua/+NU/t407oiPf6sNWPfoKr/110mozTNR054bG+P7D1RNXJ1evV8Em3/mN3/vAVx/e3t7xrtIK5mbmNnYG2mDStKaWEHN/On7Xd33bcG397Evntq9snXnkobNnz7RarWazlheBq6oR1X75vzzdZXrD970aUQ23+nGjS6YuOwt+a1w7fQLGzwMGSJNsbU+M687XkY7FYZzUbeZDauP+bs9aBX5/CGB+Lumt99lUOxu72+Pe4Pr13eur0ayR6TSUpQ9otHalSyXMdRuWqSqgWesQVBzKqFmvGRWF7W7TXBZ1Zkv91afH//S9BxcOXsm0HwzLkYNhP2samGnCXB0e3DIHoL40i7u9oS/8pLcdRxaMXVyZvbK+vjB7dH374uz8fOrawRWElCZxKaUFCgzWWBRmCPv0WVdV4MEqQlFlVmqj86KMUWX5NEkiZdrOBUIdmagsGdBENmGgbr0xyoZxmo7HvQpsVMOLF9d+4lt/5ODxm27cueHhD/8xB+jONJhhbXcCyswuLFblaqSqqsy69fTQQn29nzFDFcAxIzMHMIqjGhVlJkLBM6n9hILa59E5FwD2rl2Efr+/uDy7dnVDq620ZiZ7EJnev/rpr/umd+yUo2ejdo2DGkr1pnedmjt5oppMN9516Bt+4P0/8ksPNhvNE53qJ959/72vOXLgeKvae7YazwJ2Lp9/VG4/WjS2Dy5w5fvf9u2vuf304gsv9j70ieeuPfu1Uav5xANnPvjpz2xfP/8z33vjt3/7iZnTzd6k3z4w865/8S7qv/Qtm7Pf996Pi7JIrAyNs4oA2LNSYGNjrWKl9nb6daMiq7d397wD8frAicXtipsnbzp13+smw7XPf+aheiPOpsrngbTWmmxqDy3MsONpb9hs2N5ev0pUf8I7/b3X3P7qf3jgqwKkCEej8YGTS4PtfDqp9CSrteYCQl4EFnA+cJA4UYPp9C/+6sMrczXvuNYSQJWNqg98+K+u9Uc+YD61p08f8yZ85clzb3p9YhK3sbON7ZQW6ukYh9eu1uuN1X6mk+6lK2fm60k+HrkkqunohedfWD5yfOIagenIkRp8/nEA2FzbYcelV5MKXChP3XhEgZlP27edOrDXH9eay6+69bSg7O7mwRfahPFo0htMrqzvDcbj4cSTImuUxngq0+AdgCilPHhrcCE1oqjVTF3wZV4KcbMZFaVD1FE9zfKAWbk3mCIQKVJEzKy1tSYmCHESCwTxYZyXFUjlHDMqRK33+TnMjAq180HHKZCKSC2sNJeWqqe++nc3H3atVhzQAYyEFBgbvCAaYxP0zN5xKH05AYJqImfOl5OghcTzPjqegFmEtTJnzj6f2MgYCh4AULxnQEFCbdbXr4/H+f1veseDD3/aWMxzr5VhBkWkjGilNOLupAhi6ghzi+3ptCyHEzNZMyEHL2hTZiFQ+/wNxBiJkCoEAfGEwiEQtpRuEnuJi/TQyUbtgPMhngkhFKQDmfrdCwtv/LbX58Oyv6n/9mPPf+SzO3s7yALA4l0VOCiFLCyBUROBEgCldPBCgCwSAqNSIQgHBhClCASYgwAapRERFXoXmFErLSIAwIK+9AJCSsE+XxyQORApBPTMIBgAg3eoAEAJKkJkJmYIk7LdSFGFbDQmAgCNAiLCogiinUk1uXD5ZD7XsGZlrqaIREvuKkHJcz/NQ87w9FPPj7Z7xxfnnJ99/uyl7V6elTVQg51BSOPuSn12d1BBYurt+eUbX3ft7AudxA83XkowS5XKq6JpsKp8nKDDsrez6fiZme4cJGr/LAjMyCiAijCwZyEF5DyT6A998vzFa7u3vWqxIUVjYTl3/hMf2bq+VbQPLo62d4nUfi1BWxLPAkEAlSEkUgQcxGirtSWl64khodGkH6rcGjJRNOkPFYX+3p7LwWrVmUkVAyk7zNy563uHZ/zTz+/dM7+sG4LdVNeaTkbDrX60vBjPW9h5krEbZAUoYRloGQsYsEYH9mUJrSRqFLfdFF5+ppqed5lqYGp3hkN2aJAJ2VUeRWsFAaBEe24Vyn5W48bJExq08xWzBaxDnELShUYCewK9oXYVVFMfSmi1oV0zG2v45a/CO6NaZ3GSefAAygIo5fMwHUKcwmKjesvr9Fx77jc+NLz0u5Of/N728ZunKGVgsbHpEkz2qu1zYCK2CpIOjXrsnATHC4sqMiAOSO+T6xGEAfD/8LNQK2IG2mdaC4gAsjB4Rnn2zKi/xjMaOMm0AlWCm0SrF6uFOY5WQAxUJRNDNRxQDcoC1jfhkUemH/s8Tc1M4RlKj0ozacSF3/v78o+/cLURe/RaCiyyZBLaDioPLkBwHF3YiS98aMjynDBrXUMABGYM+wx5BmHAfVMbB0FBMvEgt597Wn3yy7saKRXHzmJwokoPkgUKAtZGRCovMx2yqhRAzorK2CgACIhCFUdRmTsmp4yelPzxT3xsrhXPLzRWdwZpIxEI7D16XHt5SnkLO1lVZtpoZShM85ArbaPKOe/YCtx2pBaH7oGTp/7fPzr//18VXXlmbb5Zn11sHj62lG8+V/Wz206lP/FuGz6RPXrBCyORIBOxIsUCqA3VDYp32XiqSOpJhII2SsZZVmRF8B6E67UGkc7K6POPzYtbvnT5ytWNycpiU5MNq/qJ86PPPL+6sac1OOSSQIzSUJEAuoogsAevyaBS2rKwc8GTMogECpVRIl4ITaSFhBlADIbwb3/01jtOblM1rjaclWZIrUqL7tL2Ww/OHL5hZbgt8ewgTpq9XVBJDKEepblO4UD9UJGf6bZbgwn6xO666l/9jy/97E/98DNnzn7gY888d0aS2NdbCXlO6/W0VR+NxkH2U26eYdzf3fuHTz7erK8Y29/d2hrtXPz7z/3Oa97+vpnmzCMf+UVG7G/20LOJUBsVnDhozHZfs732WYOZiQ2AV8JVVSBGpCwReOfAYRAQa5+5sv4jv/Avw2hkUM1FJAGy8XjlUMwcC2ujTT2pjYP7009fGI6fs1be/ubXvPUtp6d711xeKBOL1TYyuQtukHPQKlJ5FbkAglAVkE2DtnWbQKNRG6xP+73ddq0+GUyatVanER/qLlxf7QPIxmjQXTBHV1aGg14ttCo9n0lntt25scV5FaZ5IQAKbbfWNqR86AXnfVEF9mm7BZnkwTY7S0cWb57sXWvXTDbciK3KdKicN3E02zi8O7iSEoKBhnVD78eZ18o0W+1sMs3zIhgSVpjUVBp6vUFZsLisnqp77z60cVVtD8p73nrXJz7yiJLw2rtu3N7ejHUyE+limC/OzTXb7WZnqSiy5ZkDvdGmJ5+0zN7GFk2n97/11dfP7eyM8ycvrL68e2F/ChBFKVVWlQ/kmW1krN5XhoEiChwIwVqVF6XWZBQRkasKo2KtKHgvKKg1aVVWhdXGs0MiQVTWKNRG6xB8nhdIWLhyP7/DgYGllqSklYkibXRRlK6sqrx8hdjFqABAUCktQWJrlNLOVaBIOFgVOXAEIAKVc4KyL2lKYsUsIqyNIaXi2IbASoHStt8fxEm0L1xDAGEJXqI0rsVNDeJAQmAP7JiV1cbYwF4QfeVBCSCKUNrooEIBZpSynERRFJsosnFZZvV6w1WVCCZxDERlnhtrAkNCEaItyooNhMAs7LwEgka9PRhMisrlRUEEUWRC6Y1VaWJ8mZOEmrYuL7uduFsnYQDCEHifVqC00YTaKIMUWR3biEgBKLJUeh8YgIgREAAQCRGIARQjK1JaKxZWApV3DCBIURQhgDaGQ3DOu1BqrYFUxULK+FD2hpm2Jvd5WRbDsQAqHyqNqgwOEUhC5ZmZFSvnBdGEwAS5hWmCZachzUXtq6AgyYqCtRhb+npP/PWMs5mGJvEzDT/fbsAIE2q7LLlw6TKWdCAtl24OXvcgBTYxm7lpfuSps/W/fHzdSeNvv5DPz7W3RhUHXO3vrq6ubm3t9fYmReHKokzjJCVsJ+ErX3liXE4vXC0GuSaqFmfqb7n39oWV+YqpNxhMh6PSV8lsmuWvaMJ/+j99WEsitrm0PLux1otEIWDSMPV6JIUbrbkzL72c1HTIMamlE+e67cj43ZmGW1lMV3ezva3e3a/pbDx/RZKZxU46nPbvOjFPG3vfdcddk2t9BfHhN94H6uDGxaJeW0xt8+zHH/mVf//P1rPcdusLB9O5m7pfvfBSwc5iXGDxdT903zvfeOrF556EoBLJlw7eoeIbLCXe7I7L1UZ88+rLX0vjaGt758xXXjh5qKMXyLNA5RbaenejPLAwe6jV+MqL2y9fXb185cwb7jh15uwz7aVGcG6nnu5uTV56cW1xplEW2XCHcZStDd1cXAvWriy0N8L0xfO9wwcb2LeD3rAWx9romUZqOjGTrG2ODi12RlN/yx0Hyzy7/MJqMSne9zPvuf3uUx/76z/+8Ee/WMeqsVKbnZl/1T1vuLw+lbjz8uqwcezITV9/A7X6txyR/qRvuEyzAzAqwEOrnThLaadh98b5OJ+ZbbnMoVJ55ba3d3QJN5w43tvc/qPf+tnasl7stifTkkChItK6gMnBW9L+2TIb9+574+nlpQi0+6fve/VXHr24s83PreVZq30N3R//1m++853fgvM2QQmWBKTVxFuPzRw7duh3/+wLWre5cMMsD+ACBqNVEFYKfVWlcYSgHeq1/kAZrndSrWi4O/WFS4vQH0zqiZVpqM2bAwtLo9rCLa8+8uy5Zz756Oa7vu4NOq8UzGKMof10kC2eTFC0Ssr3fXvn6m9PRlJD0A88+iVj44jMoJhQrDa2e6PxNA1RFJliUrDSC4srdd198cojochnW/bcxfMLCyslh/6w8k5qrciksd9r+imAU5m4aGEpBEyjW97z3b/ey5P5o9sn2r7TdssHisXl47sTsMnCS89MRJ1AA+PRoAp5nBjAfb8uAMALL31t4/wF58s4pdtef/rh9Y041uKUBB1rpeKIQAArrRgDiyaTNDTFoQqoY2sjj5GNejrC21619NLL/St57Tc+uPqj32WTA1C4rNZM0tnEjbPRy5ANYWUlrbfU9mCv0WymtXQyyV2AyXRaSujUku21ywpxvDuIkhQCubKMoihSEQATCQEQSAiBkYnEKPSBjYnLcirk6s3meJIRGqs1QFZ5dsFH2iplOAwmboIa8rwkguBdvdmK49lBb2SUvPHue65ceOnLTzx6y523HDh2OC8yN81EgiAoseNen9jPNtPBZGoQjx2a3exd8ULAoJCCACBUHpW8EsIl2i/qMgQAAU2KNLJzmxvrLLi+vqMsVdNiZnbp/te+8VMf+8Db7nGxuS5VEUZdB4bSJmkcbz0Xgiwu1b76kZ/5d//l4+//6HM2Pf7fPx8u/s5DCwe7b3/DTbevrLz2Bj1/+8mNwdah19zZrNH2xWte+dnDM++4/ea77j/9kff/4fp0cnJ55fmdfmdm+a+/sL1y4tQbZk/oZAMhZBuXh2eem619PXFDPIyLCgjT2CrEIqsgqHrNusoXRZ7UEkLqzjb2BpPSIwL0t0ZJS33mwQeUy9sJRUbaMwfnF04o12eSzZ0+kNigfYn90XhrL1s+uIJFkY9GwmHlwGytruoN450fTPNJNmp0kptfffprDz71De94/Qf+6u8KF8SDJgUAHESb+KtPvtAyqpVGRcFK02K39YXHv1YECD6Mq6rRal6/vnFlczd78On5dhQLzOrai5fXrcSvv+2eJx5+3NTbndnONF+6sHr+9A2nDnfjjSu90hXLiwcubbtebzBoNfenQBnIpnnpbUB96OjclUv9Zq07KiYvXVlrN5voZZQHV1VRsxEnXV9NHTIWY8d5VmZF5QSUD55QQvACIiLOB+dDqbGqMgDYHuZIKEEAYf9Lq9FoaJLSOZF9RI4QgiZkxFecNYDBV4KsAFCB2gcCCJj9fDGIqzwABWYQhSUY51dmq3/746+j6lHS2qpmWRltVaj8eLgjWjRxo9kg3QYE0iLBlZXPJuq5R8LnHqGSrbIoEvQr4QVBgMiofFq43AOQNho5AJkQAiL0e3tRjMcOHR+PUEiX3pFSHEIQJEJFCEUILO969/fHneTpf/jo7GwtDnYr73WaHq0ECIRBQIAdECNGIJUEjQgCgrDPJIm8q/sqghBGe/32zDHhFR9ygJwoAo0uYFoH5GuRrZYPdk/dGMInRs4nee6JGBGM0WEfX61kH/3EiMLeKKW19i5oqwICc0BCYAIGRNgnTwEKCEoQAtTagDASgMj+lSgA7OOEBZABRBAQQUApQlC8H91nQRQERiBCIQwAUDPNKriBjEgkLysU5OCN0ioiYc4LefbCpi9Dp60WFzqdumo1IIpBK03GxUrXRO9lvazHL5X9opxW2usAbghNnRSjXZ/EszNpsNpVZaJ0J7aT4Wa3kVI56eejbFJJZQQg6/UYRLyZbm9T0IcOLb8SpCAUQBEILCCgCEghCzsP2nZfvEDPnr0mzLVuMR7mhKxstL3Vc84rFGMMMyOgEAlzYFFkyCpkjg0hgQ/sqwps1Io67Vqn0jjKNrKiKqfVXDeZO3rrv/qF//nCE//w+3/0a1pTyDmwLlTS2y5/8Bef/djK8q1vnHXMEioIKqrPEkU4XRttPFI/emMBC1o3UIYBJ6G+QvkIe+eYTIhnXb6btHp3nJCrq4OXJzBJU6tsofx+kQIMIYBG8s4RmcrJ2iT+1FP5e+drKzNDQGEP4jUiRqlTBhKrnWMXjNGWQ5aNYK7uTh5qr22nn35w+IPfXU9pkpUwLaHeFmW0lKHIJbIQtfwtt4Sjj5pHn7W//Xejf3uo1mrkRAA+2FQtHVejAieZ1xpMzO1ZqyKsSti5GqyOI0MBSMQLCCBIEEEEJBEgVFoJKZCAVeW1VUQUEEqmR1+CLzze/tbZeuL2srG5dLn+e383uLQRfctb1fd8s8S1KRtwGfAQti81rmzO/Mlnhl89TyTWA2odfBCFoBCZlSsbeYHjDNFrgQrAM5UiLnAAIP4/fwkRolbCDMS8v0mR/ZQhKkAQQEIkcmUAjVogFFlsdaKi0XaGFpXynlkZLSJGKRAJno01RAaQyNZm2is7WxdIm/0+rgPWmsrSRRqD85/45Gchn957x+07w+neeJTGGhBMzXqdbm/6xY6gMYwaFYOFkIHbLtBapNijX5w3W2uDre3z/1eqaJKpu954z8INy67Y7a0xUDrdGSwtuh9699Enfu06SoRBERmvsPBlVLM3HWodnk+fvZb5nPcbcxphnE2YgzIkACIUxHtPWNKv/en5X/+ToqZVI1LWTsZF7siWniXoREngKRCg0gBGAJWUs2397rfOHFkKz12ovvC14U6mTCBES8oEH5QmRmq0WxVXLitIg68gQpWo/vu+VWl5KRuW7Gum3kbVKYt+8EMbbZ1c0hcH1er5reM3LbY6qeBgtLsaz5mi8J/7+LM+N4Ot9e//8f/2ex/5o9Kpr1wtH/zHv8uFY07mZxcnk6FIEMRBb5iXfOCWe69d7A37l1Mt3/fD3zatmh/9w/+wfOT47t40eGi0o6y39vhnfxl9JmFkG3PBR/loFDySd9picOu9vX/QaiDBZBlGRliC0gBKa4yqIhcvcVprdmb2ptvbo9wV4zcsuO/7rrkbTkYHbzq4NRy+8PTLt91z/6/94ctf+NJmUbAwiKtCWf7w97/2VTev6His57RyxiYxoKo17PbuGjurQiOJOj4rkyjBSAWXe/E+lEzVKJuqkMzXlpJOpz9e29qpQoBqVuqdyAd/cH4urldl4erJXKO+lOiOlD032hnl/aKYdmdOqfpiNpnY2OZVmfseKJXExrbinJmram+3ajfj9c29TrO23GkMtneuX9kbTzNbq2VlXvVfVkI+LzlgrTvfG/TzPA+uCGFcVlUbO41G06RRGq0gj5tdLIpi9dIlrKblFM9euL7V9+Hi+mivcpULRbneG9S6yf333RqRCMBwe9eXEEV4tb8xzsrSTWOt00YiDODKm15z1Ovuxaeet2oP4CoA7Ncty8oRIGrFggC6ck6h1pqcKwmJgZMkUYBaEyImxgqAImKlkUAbQwopIi65Kh2gMPsoMmkcWxNVlQM0AYLs45m1RgHhEEVGEYXgoWJiIYMGyPvgnZBoRFJWG2sheCAEDEoBAXGAynsG1lozi9HKixSlQwRwlCYpe88IxhhrtQhPp3lZuJlWuwyFiG/WExHxXpSxEtgHYWbngvOFMloZJSwSgD2XVU5IkUmMolxyL2IIKx+EnTERIQJLmU10ZIAhlM5Vwca6cMGSYgD2lWCwFghKAJWXRZVXuhYh+f5oT1tFSoJxHERHwJWr2NUS22yZKLHNeo24VlVVYlMCVeUlKSKliyIIEgIKe2Mj8SFUjkmqsmJmQJL9NKbWZVlpFQKBMTYyuiwrBPE+MIfgPCIAkAYEQhDMcwfCCP8fVf8dZOl1XXfDe5/0hJv7du7pyQlpkAkwgGASAZJiFINIKlumZZJKr2RawbLkz4qWbAVK4itRpGlRzDmDJAiARM6DGcwMJneON9/7pJP2+0cPvyp31a2u6lvVVX2rdz3nrL3Wb9lI+CCIBFfe6TRLjNY5kCu8bhlnHYgwLQrjDIsqSghAA8DBEfdMEHlKAqnrYa9WtWOVYvd4KR8OUMZDXUl7w7rvYpBZ7JYrVA8oRdntWaCwUq90hzYpwrXFHMxgvMKuPcgmdrmwmmdBxfK5e340OL8SXrq0vdIarPfTjcXu8R+9gMQBOSNjreWcOHEEFqARzss0z4xPUr7cteAtYigDFkV0860Ho4iMLgyoar3BQbX7vVFq9RVUEQAJxyQg7w20RgCBjlyS5/1edvWRY7fd8rpnX/i3HFJT2Awccqd1cfaJdcrN9bceqZ/dWNjOJ7F2++H933+8hb3+lEjuPHDzi157WzHUY4eO1g9e0zhwG42wMlFb+sHnP/A7f3l5mNWmza3vuTZxcGJp49Ab9r16dPO3/vZpSCAQsrd97syjF3jImKwdPHiDcnPDUT2uThVCXl7qioqvsGisKb/3yUeg67wqpmaneQCLl9vr3ZTxaH0jjQvaFQckXeuFZ5t7dvOB1pq8oK7tDo3zKRmFOs17qZ2ebM4Eu1SAly4vJRmgVRUjW5e6rihqDHlSLC+3xiYq83tmGcZFvlgJ3dvefMfxs2eCSq1UGS8FJQXpr777l7u5n9tVufole69/ydWb3Xo8e+tNt5fn5veEsfnKF/7t25//+mtefaM07XpIy8tb62pPXBsvupuRgCKz/fU8xnIQUsgDA2CtEwjLl9YiWVq8vDg23njN2146tWv2G5/8quIizzRjDBWHKPz+ExcO1uXVL7l5HXy6caleEUGpd/gQvuUtR//3dxc++9DG6kh87/jSJz/1V2/54G8HYb1w3CIVSDKQxuo9e6a3B9YHBgwLhHRWA7hKFJHzxFWkQiLrGUViNLtHnV9KWFodnxqrluTGZvut73jD2dMnk4GXUYSkwfXPvvC84/K7X//cT7/1na7Y8DrzcG04/Q6X/iuLC3IFcDpwWM5E6fnLGNfk5cEmIdYjVatWVRR2uwMVoUZbZLB3/4xnfnNl+4uf/ZdGNcaAgfEjrevjzY31rYnx+vZWz+amVeR75hqv+rlXp/I0iCCslrpnH223Fl9y8zWnVvhaWty7anyhFFPdXi/rFZUoiSsxxsy6IlaSMfCe6USHgdwZgub+GclwvBp1NpYe+d7Dy+utkKEbYSCikqp4cAwKxpEz9GRqU5GTcVSKfRxYNwoClJJN7Z4xacYYHp4tpSM/NLs++7lzr3xlXKmPVLVfrsNmB+Jocqzibjl2lY1AiSo4CSSACBFsUQCijGIRMPCMISuKQgouuHLWM+YlizKfODDeOs4l58JYzYXiyBF5pMLhaODIVCfGB60eY0iMC8GECLN0YHVGPguUYlE5jqZ67eFglHoUiCjCKBQOOJrhpb2RyBZPpZ1Oc99c0otcnk3xSHDX29ooRnZi15TYagOw1nA0N1HpJEUvMcCYMTvtn+QJpZDWmh2QDCKQd0wwct57dNYjXFHZgRFXIjX2e49+8pN/cmMzPsH7q/HYBJNUZH2luCzVstQIFRi3VFLrf/MXr6tHm//n/tW8Leam6r189MPHT33l8g9/6xdf8YpXXvvUQ+enzvlXvebm2uzMMOlEs03ytjxOH/iTt446rYipr33x+W2qH3v1DZ/+p09ePvPAe9//mto06azM97+tf2ka8Lkw6nAZJlnhEQiRpIgqlfHm+AvnztbHgrmpiTRz20mqHdSbNclFnmTZ8nrSHk3MjHeW0liwWMilgZci371n18zU9OWLJxcvXOh1s1FvS5RKzphhfxAEQgl18dxallOy1ePc/sy7fvq5Sxsr22fcubVSo3H2heV9e+afP3fJOlABt9o5Qiapn2SyFMs4KknW7+VWu9WtHkhRaKsLf+by+ivvvDV79rmlhXa1UV5fWm2Mj61lA+Na51eXDlUnuhvDSxuXMwvTE7NjlXip03IV0W4PypsrphPMTc7s3T29MwX9wVCqIOmlvb7byIecBbwkitxv5ZnLw5hjZ2ubS+NYmxi0trZ6w762rjdIrfcouTGOCMk58G6nosc6S4haQ+EtECE3gNxaJ8SODgGJGUSBdJ6so2DnjuesNYYBcGDgDHFmnSewubWIyDmPJavEFXQwSJLMaG2JgUeBCMyRi6p01+vngmC5Uc6jWIGBzW7PFiUpuTUR6kyVJRoqfI6OWMCct8SCVpd/94ebrX4sOHLBHAHjyBk4762xhIDek8Trb7z24qWlQX/oLYVR4JxlDPtb7WPXv6hUL5mTWiAjZB4dMvREQjAMqRgW115zy3r7xOREqd/aTkxVceVNIiRwDgCaUAAwRgTgAK0nBMcBEJzmGKJXXArEgriouKpiOdgFzrgFBF+QRfABYhlBoHRZ35x8ZgkMlRW3VmprABERANE5xzknYoxBGEgphNFGyAC5QQCnNXnnnedcECMCj57Tjjt7R/cCRkAAXknpvbfWEQPtnQSJOwAkQMY4Y1em35HdqVVhHoEjETlP1pudnrx+lliXOXLoEdmVCjZCKArLOQqptAbPeDf1vcUWJz81Nti/Z2bv7IzN18H6iuBJkS4s5ZklxTh4b9wwjsKx8eZgqCsMdk8dctyNtNvabFGW0ij3sTHa5Y5aOWOiZLRR4IF8asiWVDcpppz8sauIOEMkACAuOJF33ntHwJgFcJYAFQFLuxqImODeekPWETHGAQEZAgACKqWs9yhdbkcViLngjIEUIjd2mI56vW5tvPnTP/P7MvJj+w5FpbrSi92Nxb/5yG8ef+qpEueOQk/kPHiNACjLY//25af/8s7XAW15U7CCAh9S8gJlm+liMba/EEKjHnJWFsEBJ2ZFYyBG57yOxexLRlmvPNG5obm8NbysT2Z9qzgXxNE4X1GRA6+1BoYKA+uscT4V4sQmfeMJ+3MzlXptEDBA63xBRgLnQMJOz0bL67bTs5USRCHvZ258tj+xL/r2vWrjY/0Pva8BtusKyLivVDBDHpdk7jIRQCz0VfvoS4/54Tn1Nx/r/tEH5lRp3TFOSMGEG5thjZyVG97kYK0uj1VQFgMN3QTwCjsfrfOCMWTgCRkTwMiTCXZMtgI8CfJkdzRLxJFr/tln3N98Ja+iGnWpoMKzOnr82FdGz5zsv+0V4ZHrXKdn1pab333AnF9KV3oKvABGDK1nsGMvs94QMeScvAfPrc+ReWM048wTEDAA9M7tQJYBCBkjBATacUjSzk+936EpIYIn4gKc897njqSPol0TM5m2/cGQgfRguedMoNEOreVKchJkKMtNozkdsEhykWWJlNKh11mupFCREMhURVVr4Q3XvGymXju9uFyOhBCgCxsIXG6nixfZ7usC4zWlOY+AAZiu63X7rBQ35tExmw0H84f3nVnO/y+p6EW3HZ3ZXdWDje2FF1RASH7phU5Jimrfhy4YOiEtFd4EYf5b7527+Zrx6/fG3/v24w3GB7Pl4+fdVgJF4cmStUYxSZ4QeZZn5JFxAYoYQS7sKE9qKhgQkbeAXggi5zh6BIYG0TILyS+8zP3VP9/g8bkgtFJOPfdY47/83fB7jzjGIkSyWTo/N6UtSwqvQq4LYzUqiqymj/7Tu2z0rbiGPmkOtnMf6GKYMx7zeHeabDkXZEa1OsN5UbVYyBJIW+63/aWzcx/84x+W6/st5Fftu/o3f/E3//p/vL9emo6Vqs7uWV0cpUUaxoqXIhz1VCx8XDt8178/lNjX3b33q5/8s+dO/LBlz7z2l34hW8kWzo7Wen0c5FKIknCpKxyTggVMxrxRVVLlw1XyQyUNQMYCz+TBqhgrkiesJyCG1honyDClVGFNtzuiAiCs7j009pm/vSkzp5L8gs7SsTJ/6U2N4w8e/943VjWWs8SEQVgO8Z3vePEdd+xaW91WLIoUuIKyYSGiuMhJcUmexeVKEJd1pnPNW8MEfZpr2+9sl8JKc6zZqCuN9vSFJw/OTU81aqPcCypUAHlaoNGRQ8m5lJWA16jod9Yuh1Hmlcn1IB2ul1UQixKYgXd5q7OhSmMMBHNULVWx7CanD86O7xklljl/4dlndbJgipRzx5kOBTAOOtH1aq0xWe1tbe3av78z6rRWV4xOq/Xx6flZ74tqrYLp0NmYZCmsRnNXTZ07fvLC+bNhLNTIb/SLzFgucWK2udhe2GonrV5RQoCAjU+V+4Ner5toXYxPTKX9TnPv1PpmV3sqxYE2pt9ZOLC/MWhfySRb64EbIXkoVJqnWhORlzwQUinBkaOz1uUF58gAnLWBlMCRIZLzgJ6IGDmnLSJonTnnUKAhimUQqkgwzhWCoyzX1XLJE2jrrNFhIJVSzIMEYAw5855zK0WaZ2mal6PIk9fWCMEZg7TIIhkCcEcuyQrJVakUF1YXhRFCgHcoOAEY5/Ii5wCciaLIrC28dQwQGXpwSjDyjDNGQFZrAIiiEIGKQjOGUnBCzIoCvNekPVli4MA4A8wxbY3zaDxYo5EzKRgXkc29QFKeucIQOa54pk2SpFHAGa8iSyIJyWCVxYEKpecjL1INTgofKEKjhcTJRhgwUQkNCYYCGMel82sbGyPXHA9lZAqXcx4EkRDoLQklGXFtnRQBebCFQcakEMjQOwQPgVSjJJVSEgLnnCtFRNqB9cYa58gpIRlDz5ALIZnIi7woCgTGGUOgaiyrpcDm1jiDCGEUqzDMCt1JMsZZbn2eJoV3QggPJsuGYSmignPikvqTaunQ4c7cNBtvMFREPJcizftsqzvY2E5YKZmZb1UmZaU2mbcrP7hv6yPf6WhbyzOYmQgLPwrK4tie+OU3JLe8MpThIJDVtC3OrU/93Wf6P/ghG/aH4DSAZWi41Zwx4IiIwIA4AUeUCM45W3jyBJ4HaNACY4pDpcTSwsSV0miQdeTIDR0G5Xo5ss6U6uVBbySusN1BxCquxC4n9EYbZw0Q+JEtVKSe3zgXHX+wMa46XR9HXOdZPZITM5MNHfd67edf2Lr52HijZ5aeX6ymPNzqveL6yZe8+GiSdBfOFlPzLx2bv6M6fZVez4/f+/lvff3jj59bb8xXbrq7Xt4zmrp1SxfsIvY3Ns/SOq9H1fl947no37Cn0hB6pPN6owF20urA6yLJ2owXs8048K2xcalba+snToxVJ66/4cD5x1cmpiuNcjjsG24tKgXe1QUb5Xa7s/Wym6655qr9z58+nwP2Q04BGOn6kJXKIkK0RXr04MHzC5fn5sd5SKrID8+L7vYAUIblUIVMqDgDe2Zp3aW54h6dXL94fnN14b2/8v4vfuYe4/SnPvv96t7mnsmJ2196+JFnnnn+5GKv6O3is3uuOrhw4SQWxY3X3XR479zq2mNJZ/HALVfzIavsnrju9kOPX1wq+j4MomJoTW6r5TgvDAB560vVKHBUZP6N737TP/3Dxzfb/cbyOpNSm5xLxggrNRVGYSCChbPrNWFvfdlVvr8QT1WX1zcv2hXbTe64cxabh772b2tf/vhnXv57rxoTMLIWvAOGNeCjdj8LgluuOfi9x0+S1QxxolpJcjsaZeR5AKIswwqHuWa+72DvHT9X3nckPPH4zH/+g02nebeXRTw4c/J0wOVw0B70UsFdnuehjyZ3j21vnnr26SeuPXYEleFQWOsIPbIqECt8p7I7fM+7d537p+5QexWIvNDkWRjEpXK13etZY7kqvf1n/t3XPv0ZznIuCAktFzopnLHARv1OpnW+WmQBYsCkcEIGBupj3gRkCtu62H92WMjhe9/3hv/wwUfXNtzk3MyR6695/OHHWSjmjk5kmd3odGtxiUnFvfcAtnBkycAVxfTsmbVdU00fsuVufuFcJ6iEYYDIhdUkmOICNjc3c2vHxyp5noGT5UolTQdRKYp4oLOBQ6iNTWmZI4PqDG+3huglb7N7f3Rp/7yqVMo8HC+glskb1XVcmKSXdEOu0HPrMIgqMo5jGW53uzywnHHvIB0N42ojTUaSS6lUYRLPwJMh4KgCa4w3BgHJAzCXZj0BVIpKTnsLhsCX4sBbP0jTSqXmyHIZVFR5OHJauygOmjOylKEHSNN81Buxhhp1e5dOnpehFEF45OhVlAelSmmQbzFw4GGYDnYfONrWfpSux0KxwsVAohzkuUsJnHdScuY9ABhtYaexyhMy9EDg6Mct3YwxJhTzxkkhferNeicuW2tOgY+8lUk/D4MEQJvBNpCVqqpkmA661o0EbPznPzz69re6d//6Q8NRE6kSlQ4eun7Po+cvXCzyxae2J8ZH+/Y0SHTGJ+uuhM5khkq9NCkoJNE+dsxsLa+2LnYrsWq30vs/991Dh0uxmlvd3v/B9/3lxDXXluLRoDPUhSbDVBTmRWHJbg27b7n7Ffd++15dLtLCguTeFl5zhz7i0geu090enwmac9VqxC4uXz55foVhcayzLxC4vbEMXOzZvyvPcb3VanVGppdMzlSuvuaaoBpaDoVnwxzK4zf843/69d/5nbcuL5yYmqmVK37lqVW0KAWAd846huSAgdayHr349qvv/8GTr37ZscXl9fVt8mjjAGuNcjcbbl3YmA7L235jdX2zEaPLBndc96LvPvj4ngO7zy2cff0rX3HPQ4/H1bFKJXLOt7e6vWExPt6oV6KtdiopGen2la0Bqc7QbqwPwUd5ZpynXnt1767pUjUqV1SjHGfGxhHv9gaIzsRKW1kMjTGWgIyxnoADQ0IAdN457zyAdeh32DcMrPGO/M7JCjwggPW+yA0AeMAwklxgKQh1ngORd5RrUxhPnlQgBOeBEFGorLPojPfEBYB2giMReAccCRlEZXHkyFwc56YolMu4YmNNSVSA80I6ziQB40HsHHFByWATGB/2xZnT6vQ6z4jJgJH3CAgejHMeAAWz3nnvpcWlxY1Oq0eMMY7aWPIeyTtGS4tn+NY6A++NQ0AhhPUeyOnMENi9e+eXF5+9vHI6H2YhD9KebYbQnAy86xIQQgAgwDtAIMyQE/oceIRQJgCEwFnDuOKoAERelHyFgCXIFEDkeYBACAoIgRR5t7rQefbxnskbRWa54BIFgbOmACDOEAGchXqzecNNd46yzuLlF0aDvjNOCCY4c5aE4AgeOEUYGVcYB4A7vAFCJEQUTIB3jACJkIFCFEQ7BCPGGBADIgSwzhCAc5ZzDowxRA87fiNGgAxZqgvymnMlOHryzgHu9N0i8+QLY5FLzjyQBwcO2Fo72+gvPH7q8lisqnG5EpW0gxT8MCmAYTkMjKck7zuXI69NlGvtztAB6sKaPEVmyvVg0O8USZ7p3AJKJmQYRCg9FgOdCckrjRpz+Y+ngLwj5IwBoidkDBEZZ8jAWoPIOBIAyx0pwQmdA0BOAjhnO03tDgCl4AgADBjCtYdu+qXX/epH//V/dYtl653gAABYEv1B68P/4wNScFmpCRJ60DPoOLJKWAIHxnghGCgiMBwRkT13cQBsXPiWBU8MiqITRdVcUHl2xpmYSY+lsgdj+j3jK8S01ZPeAgwZw2tSXvCp2vx1l8fPFoca0fJKPgQKJbdacynBk3YWgEnOGAIwMo4/d9E+cZxe/SJQdRBAJgH0IGsCyMYlNzsTbK2MQgnkXHUMkiHdem2yvAKPnVZ//C/dP/wP+/vrlxqxaC3agoVRxKUKjZfkgsFg25hy4ks/etrd/wT/iddMOp5gYFlhkoFP2zA1ywLlUYDOigiFtbC8qjkv7fT5ccYYQ/AgOXPkwaNHf9fLX22sfuDBHzGUDgCIkCGAIwaplUnCWz50iIwZIT0VNs34D8+OPXrZjI8LUGNJL+q2szxzwFBwvQNdJUIlFTDcCbV5ToyR84VnHoGYQCDYScB5T+wK9HAnbUY7bwGQczvWOtyxFwHRDglLSW4dScWrjbHm5JFhexBAJUbSrCcVZ6iEUoUeBUqoQObaqkj4HKulwLERoFeBZErKIL79hpc+8+yDOhtYbYt+sm//0dtfcue3v/TttLAEzhojwaM3PC6dvdy/bRDxMvBy6EETQlyW2wsmG+rKXBBMMBVxj7Y0dP+XVNScYVhc0hurw7UL1Zj8AJoRJ1t9/Lkkt7F3mbX2vXdN/dp7J6v63AvPXrp4AsY64pdvnU30ln3NoQ9/+cKDF5wpRKjiXOdEJDngTmDFFYjMgUPvHcDIMeO8QCKGDAmFQCm9Iy4iweEPfuvOD/zsGScfTTYgK3EuelcdZh/93Vvu/NkT57tB1PD/8w/f9uKjey4tdD/4R58nC2FVaoqVjVEPD031Y1VAHpMLmEgxKhEKZ3IBXqiacFHEen5UXDhxYffRqUoFjImeekr+21faM7PXbOse1Ep/97n/85N3/zTstNSSndp15MhE/fT5pwiTq2+45cLpp1nYrJdnv/pnv7fvmt0vv/o/jAc3XRqeGyWjyyee7F5alGUehiE50x+lcZkLIeJy2NrcjgIVxlWXe+8G2hjkXILRYDFv15v7hyYDHoRxMMywNHVwfnL60onvcmBoR2Ctz4NL55MP/8szv/izs+NTXSgH5FGFZWAkig5ybE6XegMa29t48szyVXuj6lhZRIFJ+qbIVRDkeVIUBZKOgrJzg6gcjY3Fz55aG2SmGodj1dJwaMKoaYwjSvdM16ScVwy7vR6TQRB5b3RhhknSC2sTYRBNzk1srzyPKcSxQyyYgWqpjmAo72X9Nalw4LRSDclKJndFngU6V1xUmWsGthqotV438xwES5LcOoijWsAkQ0x4PshB+ilfmkCEehSo8YrOhtWIBbZTjoNs0M66QyFLpbFa6lWaJ73ORuGy5u5a37iyrK5vDmqNUmO6bk/xflc/+PCpV7/iukrk1y6dE6oMgtXqk0Ft8shMdavXkY2oEcSN8lSWF0C+PlHZdXAC4AEAQMk8gdNFatyO+lAK451tKCBIIRgAhMQRrTUOEMA547QjKeQV6j4w53Suc2dsIIVjEKpQgHDGMoHJKOGcCYY6zwtnrYcoCpwz1oBAJoTkTORJaq03xisua+NlY5wDQVlhsiIIhOTKeZ+ZnDEViEB7SI0BAi4FAQWBQiEAmHfGWsc4Z5JbrZ11DJkUO5X2EEXlLCvQg7GOPDDGijRlXBRFwRnjwntnrTHW5YJJIaQKJCCaXDvt47hsTWGsLZVqhddhAIG02meVWEjXAd6/7mA5twOp+MKllYVld+pSyFHXIjk/E/YH7ZoVE7WgKpLxySZoUoGqVGrdYZqnent7cOJif2FtsLE9GmWu17dp4aMwirmsV4OxsdKxqw7vmxiz4EIUGPA093E5TEbae5+mqVMxR4jLpYJskvWtJQ+eyJBD8hCEYaELYsi5VBikRS6lZEzluTbSAzDJFUeWpmkpjozDfmKYcaYwXAohuNYWQai4JBiTvBDKlBg676w1URTbQjLmrdzYP3n+tuv93IxXwvrMaM+4Ck1KNhkKU8w1svEpOb+rKqPmqZO1X//dp0+cUwbHuDHk7Gh7Q0TUnMxf+45rX32nVKWeqlWNnjx+qvRrf/D44kVwhgeeHDOMWSRL4DwRediBC3ApPYInC4zJUFnuNXpkZu+eyp5dcSi5I9nLTcDirV6vr10Q1R1mHaQoUqoaIdJoONp5FmxuJ1EKs81aKSh3hymLBHKepo4zSHujh+7/umSi3ow9eY4YMlEv1+aieHO9vbzVPTyn+p3telhqmNKrDuy//dj87tnpFErX3f1mwXbnK9lX/+7jn/jURw9fPWl8oJr1pCLi/Ziy0fqWa04Gew8ylfhSefLZ3kpveuVld1SaE9mgPQjrjel985DXSqUxLdKoEceVUnJmqz9shc3SmUfP1Xnk3PD5U+df8hM31WfKn/vUvdfOHZit+o2h0UyQoG5eJKXwwWdOv/L2W7r9bGuzXQ+DUhScXVsf9vrVeqXssBGLpdPPTzTGFOfdrZ4CN8Xd7J7JzY0MeWlrKz187XxuUj0aeO+PHNp9+ODe5aWV+dmZssevfO6Rt7zxRa+6+2YfZ4lmpy+v77n5zutuvD1p5SsXLm48n0hWSx0fC3YN2ovbq2umWH/m/m5C08fZk4f2HxDi+1Pj9cKIvKDtfssS2lApLI/Vyv1B7hHLQbh04eSb7n710f23fOxLn/XokCCOZTIoXGp7qdETY9Vg8vSzp97z3l869eTXymJirDkl27ZIJAy3f+qW69rPsO6geGHh/E3XzT17vi9BBVFwYL7ptF9uF4evOTBVXyM3cGBrtUqg5Fij4pF5zctmuH//6Df/06E9tw9kuM7t6k23hTfM56fWsBRPyjGxMXIvvfOmbvbQ+tr6/PRkg1fabTPYStJk8OmP/Mmf/uOnyHqCXEa1LB9DoXyMdjRkPH3p2/fccO/aA88HNsc0NcwRY2aUppLAeyhagxMPPzDbjLY3BwBIACa3zoGIQs5dmo2iSFkEzoUtqN8fvesn5/orD2Et4Tjm+KSoLzbH45WL3/7D//qyP/rbCxfWk+HZ05n0TrCtPPPO5BxNmlcjPsr7zvJacxw1zM/EF5fXAODgTHVxbeu+488jFNXxZmFzKWU6zJJ8qL1lnDHiSog01+12f3rXYVViy8unr7725mptejQMu4Nty0IjKE97oSzXxptchqY8EZjbXaW+luUM43JJDnXKIHHCx1FJcBHXy8koazSqw+GoMBrQM6IwDLGkbFtHQUlg4XIDoJgX3helUAB6TbYURUk2FEyEYTXJ+rEQBK7d26iUGi7T4H1OLWu0Q8O5D4KQIwJ4hlb303pUdpJPjldWV7aalXLkC59n3NlGvdQaZP1B+pqfutE7C4E06+vcQq1W7wZjm1uXa835udnZYbs3OV4Xkdrq9yfrspv6rgYGnhh6Czv8CABCRgwZIOEOVIIxztFZb7Vz3jvttDV/9K7ba6Xnx3gHRsYxyJMha4J3w1EXJK6P759xlSaXTWDjRmuXp1cf233fJ9703R+s//4/PLm+rqN09lTHjK+IRm2/Z+7X/upv91To37/59utvvaYxf6ifrPLSBPKF1UsXDxw7VglWx1AEd8ydfmJpeymZqeXrreEv/Je/uuHYQai52UrdFBYzJwWvV2KwJtUut+zhp5675ZYbW90u6fR1d7xqcfHy+tZmvakOXbVnc3Er6MZb60lro2dI7zl8+ND1h7NOq590fZEXGRWke6cXA85FXDHGTsyPD7rtennywcefcIicYaTCP/nrP/jODz7zyle+9PyZ57o2ba9fCEOxMUiCUDJCAIYOG5VwkGQrW6PK6RXGxEMnXmjGVUAGgMAwGWkj4ZGnT/w/v//rZxY+nDn3pjfc/aOvf2t2/vCLj17/wrkT09N7fVSe3nWoUiqFRNzkUjZaRbukxNieaVo+W8K0v3kF2RWVAwN6cqrS6/N2LyXjG6Wo3oj37JsoR4HRZnKiKSWWK1Gv3/dI/dQgWBlEzhuuiXY6Yp3njHlCZIiEHixwtM57D4CEHHeo4QS0c68yHrz3yNgoKQIBohQrhghQkEckD+g5Fp4EYwLAOUueOEOuWHc0JAC3g8EGCmIpBOOcPfnkBZHKm45NuGxL8sKj9dYGsVIyHo1MlowAWBhIB4ZBSlz1U/bQU93trOwQ0e9kWJQU3BgDQEJwALIOEcHmFAaSGIVKZVmhwoCHoshNMkooc4HkFj05v8NMQQQumbMikHz3GJ19ctNYYR0yz8ZjU69YLiwRgXfICJADEZAmxoGXkAjAcR7ukJW4rxTr8OiPHi/X9A2v3S+iHAC4KRggYwoYMeacU5zhY4+eHxWVqFwvV+OE7NrGEgJyJgC9BQ+ETPHpiSmdd7c7a8aaqFQGGhJ5ImAMvUcQ7LqrXnpofP6+x79m88xrwzhnCM45wbkHJAIAQmAAwNBZZwFR8EDKknbaW+PJO+8ZY4JJIiBPxnsh5E7BGmNonEfHnGPgnUckRCJgnJGzKlScobYegIw2jAEiOk8MubZEuXeF3mq1dk/m1189S4ItXByMCuvSURwojxAYGwVm2HGrLdec3J1TPhhulwODzPXz3HgWchkyr3weiupUqbaWbrFGuZUn6fZiKyld2Z4hMGQ/NmPtiJaw0znFhQDY+fMsC6R2VnHpyQvOOXJrveBMyogAGIKzthzFjMHh+V0bW6eMHoIAnRgHaK0VwIF4FEbOWxqNiHOlJANF4L0H5y1eISqgsw4QhOIXVvJLD/YbZSdLlhoz4RxoLATGDOpONiWLvM8UDUzrQjB22BdjqnQHY2trD3+6eeuvZDIYlmevevfdZ56+Z7SdTijZypwDbm0RMlVWtX7e53znf4DIeZCslYonTtOLjoV1lkclkRtlLZAlxqxNdAW1i6G1DGEZ7BDKDOaaxateXP/qU/bC8cZDH9y4ajJ6/Yuqs/unP/+d53fV85979z49LKSYPP5MW5HQhena4H98dKlZnr71RRPCtbd6cPKCXD41OnRovjbRZjgSgQTt9CDXI7CGEJmzjnPmjOOcIxNoPRPCevjBU0/MjTeDsKQLQ8wDMU/onZOKETjwHnbagchYQgInhMw1ZEYOEt9o1NL+wFkDAj05iwwYcw4ZF8SQIxIREpAnjwScByJI80wJRg6Mt0AEiAAEBEQ7JxdkggN4AGCAnHPraedSCUDeewTmHIElzwhIBqLUd61Xv+odh/fs/u//+GsMPKAvsiLkXDIexHHmM8/jmemZfmeFpDWZ9gQArlKvv/JVb67WK/fd/6UYmEIXytooLWepqTXqaT8V3oWluFHHYQHPXWrrBCtV4chwKZjUsoTlhhz21dLFrX1lXq1HncvrvR8/C65IRVHF5d0tPRwYHWJUW1yzCx355KXgK0/0Oahb97Hf+Pn5g5ODjYtPLa7xSwtjYyRLLDh138aeXcmxuy/98QfgkcuTv/VXa0UWSCEIrffOM/px8yN4C4JJ4ChYWAuSaw6ohd5oewTDYaBAMsODMPiZu2Y+9Esz3jxLrlKZOpQMGaUrQWUwM73ya++Z+9CX1v/Hh3/yDTdQ+sJzUaB0MTi2++iZtX7OaiWMr533pfJ2NnLoFRXEAslCJoIg6yf5SDPJkUGejuolsX/26uce2P7K95e3y2/4wpefmhyfiypdyi0wal168N/+4aFGuZ4Zd+CmG3IX9rf68czuKgwunlrptCpTN9xx6OiB0aUPN5Mn//6Pj3/gv/z9xuaRc/fc08WR05lJS4muvvHNd/7cr77zd3/1VxbPbCO5xkQ5T9IkbYHzQD6Oa4Toiz5HULLoDR9DJp21LrGoVX9jpdh6NIwjkxfIlZKRdWFA0Ue+k18O4v/0zlurbkv3L8vCH7t+/037N54+2zUMchVtb+sR6s999fm7XnfrDbfMD9IiqLqoHGdFnud2YmpWa6cNK0y2tbnkXFoKYyUEd2Lv1L6gGiNmycByCRPj5V57ZJnsd4aWAoZWSTE7NS3AZb2tFeiQKZhjPjdTU41eX48KL7nUYKTyHhLvkTBmME5IHnvAanHE0kHr0YVv1sZrM1cfOVidv3h2IIapy/L1gWZ66H2+f2732ORkZaYyGpLpboYVah7ZrQfZ9tbApN2NTQtKNcYa/e5w+bknw2r18tK5XrcHXPRbORS2SIu8yPbPTK1eulzlfDSkU2fbyE7ddO1koy6nZkr1RnNxrd3trW8vtfpuuHvPfshpVAw92HIkXarbwyvbg6LICZCcLXRardZ37BrIQRvjnOHItLMA3jpnjQEEQu7JM86IPGfSW5OnqfUWOWNCMURLnnHwzjjwhc6BA0iWpSbPCkQw5IJQSMGFEJyhsVZrwyUDwjQ3jBhDj4wVee68RwACCAJlnRMklQiIXGYKpQJXgBIB5ySlss4V2kjBHTnG0JMDzrw15Mk6Zp0NUCaZccaRs4BchsoZ64mBN5w57y0Qc94h+kBFHAPrtbPkrHMa4zjW+ShWJsa0KrqVcjLZ2DTYnt+3t9PqzM/VSa3nSRuD2NlgvM4UmidfWDi6f+K6YzN50p2sc6s1056ldul0qzfiC+ujjZZe2Br2R6bQlBsgzxC54ExKiZKh50gwFUUTVeWT9qVuP8/Z+FhjNMpygGplFAVBpSqaYzXwzGR52usYUwSRCksouRMiRoLRoMiHGQsD4iLXOWmrPVjnvNNhGIySUSAVeeCeASIHZASSi0DIkTNCUaETwYT25LVzgGRZrCKpVGc08s4ay0HU0PzwlXe0Xny7LNKBT8O0CL2WDhSz4IvMBqYxHU6JMAinkvzgr3/oR1/54ZJLA3Q8CrLUDjjnYRD1hulQ53/6z4/l6dWN+rQW9U989sQ9T/Z1AowAyCIAv1J8B4iCwAMwT4DAEIX3BgS3jnFPyCEM4eZ90SteMjY5xklDkpFTjXaHlrd1LzOcUiI1GubDHmNbrBRHtVp5ZwoCxUPFdVHk/UIgMvS5dghU5Lochjxg1vkszW1mK5WgO+yN+msDbYaD7at2TS2e3zp4za7xmcr371mtTV4H5av49FXTM/Prl4ovfPg3H3zi4YnD9Xf/9psmx/j3vvj9fMuFCAdvnHzkiRWmwQ1TW4S8w/ubvZtvGWO7XyjPepDh/qO31Rs3DLaHDlQUs/JYGRTpPHHDQpn4xFPdp59u/bff/c9f+uQnChfmlYmLZmr37a976Lvffcsdc8lib0CktTbSjO0eT7oeg9LsTNONBqPBqBgODk9VtU44y7zz6Gh276H9N9+ZdtPb9k4IxVHgzMEjOgHNIxWKx+7/xvPPPnD0ZfO+GHVWbWtrtL6V3PH6G04982zI4AMf/PlNPWgXudCSgpwhnD97CtxYXBUMkQWuvXrm4uojJIxqhiVonHzqQu3gmK6Npqrj2kKvYwPwVdb6yw9P1A/2HnkW/vjvtvwImrX6IMl17h556OLPvOdn9x05FsXfLZIeOQKPQaiSYUHcgWPHbjr2w+891tk+KwJYX9swPp5tHhSUbC5e3h/nM9O1Zy4+//HPXKo3xe4KDFEMUeXWF9YOklHv8adMbupRmLpsc2WjHMtaKSCbzU353/md2fFX75L5hpRDbizKKNij7nqr3/wC/fKv/dFX7v3kiUuLn7nvwde/8hXbDz3la9hp9xnKIjOyPH7izPnVlbX5PfWCnIMJX77GjZ5jRZdlKYiAWudedzRfOp9f0my8GWWJ7g9GoYyk4oiiVAk2NhYEQJZoyJEJrpQoMhMCC6VqVsujNA0kjxlOTI6zqcpb3zaL+mGWS17dZdlU8+XVKN4cqz0/dai5+5sXL26JQlsWRs5zRk6pMJ4WExMhORen9c987OGf/Y2/uPjUPbXxaGcKTj3x1KA90FsdVpHGOON9hjwMq4SMlLTWySjq94e1saiJE1mRKjU9Pr03G+VcJUG53uCS8lQFgEKIIGKMM0FWGsY1jwqJ2rPEy7JNU/JGBhwJrLPWkYxiBywMgiwZNsolIJ9lKQt0Y6yRZIlinEnfH2zVyo00H5BWQvA8syxEJO9NbrKe12maATEshVVnnQEXBmFeFIKBlOX+MCHHo0qUDnvoqVKuREHUT5PuYITGMubzNENnAfz0fH1w0dbiau6kyXN0empuSqdZXuiDV1996dILVvf27tt7ZjACbwMldJYr4GMlAc4NUueRPOzc7HZOzOivPMVoxy5HQAwBCEMhjQcI+ImVhVcdiza3YXKC6hOqSK0XTAKkLbAWiK9XZ7JgbNzmSqLK08QnQ6Xce99z7fwu+MfPXjjZHlZV1ReDhUtDAvdT73zHyWce+YvPPv/yi9H1B6P7n1744M+8KdH5rgN3tTZX06K7uLFan9xbq0f3/uhypbxrKb+hsmfU47SnIZcud8j6w/unC1dsrPS9dYphEAqdZM+ePS+BaXSf++49L73x+p9+85sfeOzRhcVhr0e16X2vue1V993zRRVmvdZWOswD4z13BPb2229cW+ssb6/H5bjT7oQqKLwPqtXLS+udTqY1MAUBF1FFnDl39ujR+euvvmZ56WJ72K9W1ZStZFkxPlFhTFivW60BR4xLwuikHDPPAISr1MPRqBDcZyNTnqymzn/289++9ugNly9f/sa37mc+3Bpu7Z3du3vfHi/FqefPc8kDZmiQ5qNurRkOLnonS6cubjNfufrIbZeWTu1MwdJquzler9Xjfr8ol8M4iOcnp0pR1O2mkquQRf1uxgWTKBEiFcH0HMiKKpxNWm3wjiGzznoC8DvXJObJK8Gd90Dod1b9REC4k2VieAUGxBkSonVeOxikaSx5pJQEcICF1s6hNhRJcOQtQ8G41s4VLlTKpjlHCAIpOOcCpESb0Q8e6T19Gm84zq89WL7+kAorDrDIcue8RaZU2HTEjCZCLWW02YbvPwoX+5PRRMknifOmFAZFYZ0joJ0ebnKFIcY4YwqlFiYvUm9FSEyh0TmWleLeoSSkwHvjnSVL5DxxZIBCiq3h6Mtf+ZbJXGJ8wMPIwcR4IUvWgmEAEpmzjHgAaMlrxoEwRAwYBt4whiELGi881P/Qb/1vq6OJ/bU/vvauXfs71m4RIhPS+wABHAWe1Z/80cUvfzMf6PlquP+uV9702NkfdlqL2gNy5Fx5Z4USnnHP/ObqhU63F0UxICCgNc4ayxCZYMRkMuw8ubkMjKQUHMhav8Mg8t4xQEBknAnOnDcEnMghcImIYAWi39GbGEPcKQ8UHgiBeQKGQDufjkPiggtltdeej08eYOCyrKvzRKnAeYtorLWIDBC8JyDw1gMDZGgJgLPWcHj+/MJ0rXTz7ka1LNJhygLQntq9TCPY0Wh7Ie2unzPOYUHr2xlZnVtYG7oDNbhpf8P1hq3uoJ2k2jvBVFmpUWaHbvRjpWjnBeQ94zvZOkAGQGCsE4IRorNeICmmyHoGwhrnGSGRdx4ZE4I7JEYcGSfrnnzuiRPiufruSjEqFBejQU4MrEfnnEeGIAkhN8QYMQY7lHGCHbK4RyBGwDhah1iavvu3v1GH9PW3jv/q77+O9S85XPaSsDSWFlYJ723KsOWjfaN0KiS93elOHd3XHK+kp76gDr0YeEmL2uEbgovf3t5f37c8ooI0MXTeaE+KSU9eG6ek0oV2SIKXzq/nJ18wk+PQaNgwDGzugXKpwIeQrQAzIStd/aGPP98VlUHf7K/l195Y2exh0nF5vb5yTn/j+OjFL5n74aNt7rJ/fiTlHnO7kg4qxiGS9SjX25Xf+9v2e97cuP7qa/7gw4+stEsyqS8Vg997X2N8ynIuggY0KKg3PV2yTCjggq6QoT0y4kwwJhSBTk2nP5Cl0BfaWyAE9LQTImSMk3fkrEDgUnhPIBghKsk5B6/B5Vob48EjIBIAARAKzoVUxhpCb4wLw5AAkKFz3lofhyVd5ESeA/fkiSF68jv0JGAABB44lx48eAICqRTnaK1zzglEAEDvlVSATI/SlYtnUo+n1rYOXXezAy44d845C0gw0EWzOYONmatvfhnfWGx3zjOHQghkaD0O+1sPPnrPwrnnKuXIDDIhIOn1zpx6ZvPyRUfZrsmxO2/a++ypE1w4FbJKqPotU5kFAE+5Qw8OddAA5UKHUbKVRSwLeOSLH6cNdr6lWbb59PLahVGw+9B6u/6th9cfXPDGZvUKvPcttbfdJOp0ZvMMPH8Gs05FODx2K0Oh+ZKYv2ocqma2mr3tiDtzPvjYV5xxnAjQA0oB4BjnQgoNHpQKmamo7EM/Hb/73ROypjzh8090nnp07cDBXfNHJsXiySf+9al6DLMHYlF1KipyP0gDI6PlN71p8oVO+N4bpxpzdnsj/frTg3f9/Nv/5Lf/8LtPnf6tP/v71tnF6++eqB0EvcUwDTNtLBRAhUucZAyEHPaHjMin6clznflXvuZXP/KvS4ul+QPpzK5dxiYiIGddJQryJDeecSacCYueOrJ36t5zp1/7hjeduv+TehgcPfza7lB+/1//38nm9sXl7dxWfvtX/uPBG1686+hRsIPt7ZHIh2FQNPoPPvgPT0uXxA3FReTAOS5VKGyRM1SGIiTnIazWx4usl+UDKblSMu0PX/+e315ZfP6ZB88gZ8Y4ygZSVJFyax258DMfP30QXvSuN82pcAnKWpW2PvyJl9hMbrjklz9036g1RqSeXdTbX3khT6svuW3vIF1otQeVajkIglRH6H2eZYhZ1s/Ga2OiNBlK1lnbrFZKickQPKAf5bk1PMtNNLnLpp2ik6dF1mwqdGkjKHkyZjCoVQQKwbw0RFLVm9XZtdbF0GeTTdEbJM4HaV4gFVLGiI1yNBepxVwkzdi1Vs92HroIUW16/xGZue2tdRWXJTc3v/jQ1rlTrFhdfP6CFEGtGlORm5FJ+plgeXN35eLZzTQD29pKE9tohknSM8kgqFQnpmsnjp/NCdJBwRm+6Nbrzx1/TiFXRMMhnDnTmWnK+X0zCwsL8XqXPCOuimxYLdenG3t7rVXQJo7E2PhUb9DXWX9nCjrtltMkIhXHpSgUyDAvRgw4gedCOgLjNTBwjpxzjEGeaM6YkEpITLJECYUICCCYQMmsKQIZBlIxQK+Nd16Gqj/qGe+8d4RoyF8pmXeeQCATWmvvXGG1tq7QRnJWjkNkoBQTgnFk3lqjXRyWU6/JuViVYxkWOnXGBULpQkeBlIFkXBi03lpLaI2RkodxhETMIoIostR7L/hOPwVzVBB4wWRcrqRFZq02eWGdj0oVQI+IjHEZIEXERFIv+6kKUxhP1qheGjbKbVZrWbkZ10s+uZz2EuZhs51rN3b5dHHqtJ8bn+RZfuqpy4bzi8tpe6QWNvJk6LKc2QKdQ3DgUQiupIRIOCDPxY6dkw17aSoxqIV3XLNbJxcobwkTouPLK5faGXWNs0Z6a2+5duYQTPf6ifDlQZLHtZolytMRymi9vfz8hYtjpeAl1x0ulSqV5nSejtC5sfHdG52NUKpqHKWJElFYDFLGFQ85WMM8Khlqm03M1HjAmSfgUaedKuutBWs9Y2S9azbH0kGW61CbpZ95hd97cACtAecTtqgAEqCR6K1PkI2CKABZ7fYq//h3qz/4/uWFNeQsBMwcFAPjLXMcmBkVRNyk/NR5+MCfnmFegpOm8M4JxjQxB+AJ0TjLkCEggAcipJ0vZtCUSrFUojAOIG3U/E3Xz77xWPnQ7pKUAOS0dqPELnufpnq7VySpLYi51AGqSi3eiU9eORghGkf9vJisqKYq942jUQHkZcCEAGstkPeec8YcMKnEaLMdTJVj4+ywmNq1X4aT6+3uG9/3U7sO3lxvzpw/e+Ev/uQPnjn9/K5y+Wd+6e5rXrH77//pyyuX16++arK6wNYX4ZHvL00ea3RHvbhSGhNBp1uMermtyD2HWK67tejGpHVrVb54spyttbtWG8d0kRSCp4evnf3bv/zycw+c8AP9+7/3V1XhmxPNU8cv3fHGW19x26HdpcIsro5Gmys6G4/jqfLY4NTKsfn64w986c0//SqXb3S2gAUit5ktMkP29T/zU3tuuDMaP0y+4ryISkKP0tGwt7bVWVtde+C+r25sXBpubI43q91NmppqMFGPo8hElZe+7Sff/vL3NqbZ6spCH6E8OzZZHTt66NX3/eCro/alRqmye7a6sLox0IMENuOZcpamOQ7rId97ZHqls9HvyP7y4tF9U2k/LPneu98bv/xdIxGk192khPb/619GrZ70Duem65imn/7UZ6cnHmAuUYIXgSASYchAF5VG9fLmYGKzZ5Hu+/an3/j2Pe1Bp8hhtLkxVomO3fLyWPtSuTu5uwScShPRwWMTlxf1+UWbDQvtdDEyEHEVsn6vsESMcRmKXme4r8ledWN65LbtPhjA0NEsY7wwI+uGt7xlprOOZx564PRTP3rZXTc9+sjoqn1zb3rF2595/sR93/9at7UxNaYK41C5H377Sz/3vp/HfE2KdQUd79ekTElgPujw2tQdd+9aXmp/8VkcOBLIy5XQOVFqlAe9dJTmOMyr5bgxVecy6HVHRWKste1e79iBa6amxpe3W7fccsPZ0xdQ1F5399U/uOdj/+E3hCk5YzchqBBMJTmr7zLQOl/ZWkMzXqpXBQRb7RFwBMHDWvT2t73iK99+UHeTvfWxw7uvOvn0PW/4hfc9+sP7AODcqYulOChX0HnDOBZ54WWc2TxQslSpdYYDIdjB+UPzMzNTszPPHF9sTExFOk1625326kRjV7U2NvDgMxuIShRUW712XCoFUcw5bqcDxbiQsWNREJK3UImChJglp9NWqTrnPSAXUkbOusJmlVI5GRkSmqiggHHJq5WKlIGCMiPSRVEpVXQ6DKRM8ixLs2qjbqwn54tce8/jWk1x4dEBGCQGoAyABxaowOSuyAcMGyh4RZXJsNFgVGpUJEC7vVUbj8QK5yqamJ1eOL0dQMCYLPTAWCfLQVxp2CTzWRHG4WCQTk1NamsHSZ5q44DislzvJJ52SkQRGBJ5xthOPxVjnMgDQ2TcO+/JS8a0g28+vLS9Cr/wKuhsAXDNGPj+YNgFl4J20NoEa3rT0dAMHY9LlVK5309S73TU3ndw/CP//c3vfP+X18nWKo3exmWC5MSzj5x+7vIv/ru3339u5V++9ZnNdvvep+/9zhf+rnP2u7sO37rO/M0HZwOyuxvmTXfd2e6O/f4Hv5IMtF7WE9HeQOBolBYFjE1WtXZXfLUMBqNMKmGlqDUrS2vdB4+f7PQ7Nx275vCNNz7yo6cvri5+7kt/96Lrrrvp6qMbW9tPP/lMGKnp+dl+t728NUoyP1WfrFaqr3j5a79zzz3cu+k9E7unasXj3jGmM4PMKsHDEpcyjyfLTz7Vr4/F7f4wt3bf3ulYyu3OaH523BozGGb1WAYBu/a6/VNTE88fv7hrqnb08P6nHztbrsdW0PJiv5dmP/muu+tPTq9vr+e9rTTzubfVZqPdGaGH4fIGn64fvWp26fhimvkk2Tww/lJXjPo2ObV4TvIr3U9xUOmn2FnqZxk/dOTw7K6ZyJAklCIYKzeTYa71iDSXAEBAjnEACRZdDs45R4SMmEAi7x0gWG8AEIFzhh4Y854xJL/T/nzl2eO8RwKGzHsPBJagyC0QWJ9LxmMlDXnGKMuttxDVS5Uw8JYsUFIUubFMCBXwKApCKU1uQsmRqWFq85H8ykPpky/gG2+JXvSiaqO8FZSs9UYyYMBs7tOisBwMNB59Jnv4KRz0WO4KoRRHaXXGOSfnCIkxhgxRAQoOAn7nN+7kYmG7vVANY2Z0okebbT1K+OWFQWJHi9samDAps5YDIXB0znHweZKS5s44FoSA6DG57bbJqLLhCZgn9J4AvUfuHDLvvQXvicXE6wCM2NjZpy9/6EOf3NxWTGL3TO+jf/2dX/21VzSmGhRY4pzz0LrYWDlat5/56KO9QWygdPutL1ZxtLCyJBlm5L0HIZlggizygI2yxKdpXKoko1RwtNY7Ii6Z944zFIL1uqtFqpkADgBcBAwJPAEgeOTcWscYWu2QiSsASQDtrEAiQust55whs86BEMAFkgdPHqxzjiFHBBkIAu+dR2RhEFoz8ia3XgPyNC0QyXtHREIJIvIMGDDBhLbGWEPIpMDMivNLySIvnhbbYYSxCkPpAwW1UlBrBKTtoLVtHUtGw0CJtNCBCgMlJyuyFNgQcEQ2McWQgq2hVcKRNElSMMH+/wE0AECGRIyAkPOdCnbrgQtOiIxxJSQA/thRxZQUAMg5iJ1eXYSopASXEmXonCDLkdfCqL3h9IgiFTqyxhnOhfOeHDDBiSyXEpBzhCLNGAciQsYAPBE4IjAGSbG46uOpbz66tfQ7n/kv779r39WuSBcS1q3sUd5kNlu98PBXd7/mL2W6m5tlruchb7twIuksVcpOJx7Gpq5925sf+MbnG+ClxxFxAq+dcwwiFRlbeO85CiG8c+AZJkY89ry5+VhtZmrIJQOHlCIiEcYvtMv/9V9andGitk1tkWT5+Y5+fqkPnDPvsyQrcu0te/bpk4G11uLWmhNCWMc5I2QWdlz6hp3fqvz/PtOpwJof1jxjnujpk/oHD+fvfEvZodPkh6Oin2DAQ+M9IrPGS8EZ48iY0Qa9F4KDZ6PU7EQhPREXApjznhiC/THjzHtijHOO3hi304gMTARBb5R4ZxHZzuftrfeKc0aMgWCCMVJSAgrrHFkiT1JKJCHCMnfOmALIERIxz2GHVeThxxxrRKHCQDBJnNm84IgqkMYZow0D8kzuPXzTwtLpmixKIBcuP/K3//IjgQAeEUAEzGkfSsl9Dlne2Tw5Wl1kETIQmGvvyRsnBX/qsfu41Z7bYpiHAa6trYThg+/4hevH58rX7R+Peo9efZDf/9CKDaqB5IuXBvtvHDPce2PAgpAYVzAY2u0ejToac0yd8Ib/X1LRM6e3Xjgxesltt/7V51aevbBhnEBieyr09jeI17w5GZxf6negt1rvL1cOle3e/Vs33gCLGy6aKNd2Rb2NYc0Hw5WlN780uu8ROrcIQgDjiAydc147ZrHigrjIfv515Te8dWrv+KmK6hc9ywt6+c3xy+9oQpBStYvdqaK1ZtbtqJXGUSJLQlQasubdcGAzptPgre/5vLPJucujgjUO7Nr3nW8e/6P/+d+GgyREXNra7G9FZR9YFrCYS1eyKTiXBtUyC2wYSe7k9FVH45G97a6/tbrGpbiwceG2l+6/eOJir1tIcGm3iKshoSBiSsQBugvHn2qOVY8/8UBgi3hi164Xvfq57z3Ecjck58syZjK2Rb75RDpMdJHqPI/B/rv3H/ip21xvZeXIobEHz8ivPbBlCkAgEMTQK4VpmnljyNls0LN2RI60A0TnSSyfe8q5XhwKiQFIbZmSURm9LYqch8GkqnzhqxeeOZn+9s9mU3t9vTkhq4XLOqgH/+3XX/r+//iMlawcq26r8837HpycftX+Q9PAVBDyZGj7fV+vyfo4knHNsUaae5sOtbKNscAzy3JLnpD7ibGqzt3m5uiH9z0wVWkenJsvK0bM5Q5Wt9tGr0+Mx5h4AllSVZ0MyNU9Ks984U2rUwyTgoRVDLNBx0ZehKVstNFdXSzAeG/7W4MgrCArhquDvDesTpWrjSiQ1a2lxXyUXlrf3HXgSK8/EhLn56cYUa1ZzfWw3V3JsnavXwRByYLaaK9wH2trISp1WhnnQpXCwaA1VqpuDotekoWBSLcTzkUYqIXLG3vnUfEQfcgg2ui0rr/2+jTxra3tWIj9h+a2t7e63a42RWH0FcE01xxEIKQQwliLRIwQFAOkXGfWWCGFMa7IDXivAsUQyYPT2iKEgWIemZAI3nlNwJQKoqCS21TIwDPnrUuzxBorGBNxaJxXIlYiCDil2QiQE6C33ntvnQuFAO+VkoDACKUQxuVBECCDAESu8zRL6uWKMUXuLNtJ9UvkQsZKZUXuvSNyUnFkXDDgDBmRd86TNcY6R0qGkZRZcSXmqoIQrCPhQVvOvOSgVGiNzoZ5qR6QzRmnUjmo1FVQqSyvbj57tn1+fSjYmQ++PLnuWh1NgefWgeel2tnj9M1H/Na6Xt2S55a1quRm1LMGChTWCecNeQYEgCSUF+QEgOQgJYZSAWJROK6E8y5LtVJcg+8MBl/+zg9/4Sfn69VRJc4bE41BpyCFELCN5cRYrKsLzfLm/una0yeeC8rNjV708IPtwagYZd4SeV/ceGRa65WVpdHiswPvfD2KLShk4ezk9NG56UY5LlXVgNJuv1Mda5iR3hx1GMogCAPVzLPUFbZA60HwgBMjFMAFoXeRoBji9kDVatn4nm0rrRtOpH5OgXV+WyqGqJxzwCoODv/3v3z+4cfX1ze5LdBxbzFBXxBj5DEKY448T7VHIvC+oKIgYo6BBfKEhOTBgSdEZAwEEMGVMwx48ojgyaEHRsANNqU/ejD4xfcevv7aPeW0RQTGMWRFfTxeWWxFlaDt9KmFbKM1NI4od8BlgTjK7Zi/8jsZQ+TM2xxze/fNL/viww/5Kys8kWpntCnFYaVWMlrnhQ4jtv/IkT2h0J2BR7G5tjV7+LZqtGemMje4eOp//eHvnmoN43L9l973+sO79p/62lfOnnto4ZlBN2fqutp1B6vJ5fbq81QaD8irfpHUIlEda4CUPlQz8zqCgvPaTdfd1drq5CKNa5zLlAln7SgZbpr1/OufuvfqXTNqvFSga2VU9IZ669TTJ3/7t37jLa2Vi/vGGofMzIUnnqsfHI+qaqXTE6iDevDC6RcwkNrricn5ZhAeuPrAgRuuCmoz/T6aFKMaQpZ3L108++jTBlpXH9nz9OM/2OWHe3bXGtftee7s6NiNN50680JzdiaerOytVb74vz+/sWFvvn7P7qv3nj2zmreH/dWutkagqpVjVXIt34YxDS5rVHC4fSlPaDCweVANCkVD/8IDF7JVhVmJ6cq77oJf+IN4o3eynAI3+l2vg4un03vvLTenm912qzEW5wg59fcfmn32iRYHliZZHIZCyFHmPdJzp9aaLBiNtBAhtzzm5b3j+0tRFcXE5Kx+8u++7sauuermty1cfnosBhX34gD6vUyUWKVZTVPrijwIuZJ80MvAwuRE9dh+eOVPREm6ysoz6JvOCilMGExqCabef+Mv37S5frS8592f/uwn9s/OrRx/FEdTt9xwY0PhRz/2T8ZRmloVsi994VN3331XY29iYZHnCwjLUPiimBbj16cBlzcOfv5fjtoP3X/PY6Our2YUd4dFyshrK5DK9ThNTXW86RwLS2Gv149j6VGhcSurXR4HJ547OWr3s+HKvy49P83T/ZPFy95VZo3ce404QcZhRUC4/fafqp/6cGtdG23HvRMyCIOYkS2+9Jl7DRHn9LafvnNhqzc/WTz63U/tTIEKRKefMAVkvRScI/YGXSmkA5b3W96Sy+3EVCCYOX3iJMOKTr0BW67UbOJtZnKZgYAi75Xi8Vz3xxvjo6RnkbMgCMoxd55xSNPEm0Qwp72KSrXhsENgxyu7hibtbC4wcGEshrrwPq5E9e6gVy0HSOS8DlSY6wyJOEcClupcBkqFyjFG1qsoTrpbAQ8sETJG3jqTc9LlONA5FcUgCivGaGt0pgtASrOUIAAO1jhPUkgfhoFgor0+DIAN+2l3fWWyUcmtrUT1PEsd5flgVK+VIC459KVarFQ0ynIpomrEtzY3UfBKFHVEQcZygTubciIg8gTAdowZHpz3yJExNNZywQUjGaqT63rXxB7IFrM2xGXcWCOfQlFAfVJFY+S0wXS91NiXjQa+sNKxaiRLqeud3/zoN//hA2979U+87hpbB0HHnnv8xHd+eLp8DZx85t7tXnz46OzEVjgWFv/xt/7gHTdcRWy6OXaDSftcWiacqoVR46CbuPfj//sfn/joXyQVfObk8mxNqTDGMozP1PvdPEvzajN0BQB571y/PQgQmDFnT19aXd1sPn1yz8zkbCnoX0oeufehZx5+anpy19REc3FhY23z2SAQQRBMTo0FCN1W64c/egCcN14PW72llOJSnA4SLpgKpdbO5f7+e5+bnRpzEV9cajEuNOFKq8OMHY3yzI50P9+7bzJUYa/VX9oedNqZ4owATp65jJEigq31zr69zdZW+tzDD/qUD1u9n3jtrVuXLvWyLBvRVHPSdNcdp0BAv7PlHPPkbr95n/GbE/XawuVevXrImCur5IsXtjiLsYBmuXJgfKoaxZa0LuxgMDLal+NwdqYhRJhn3nvd7ZtWJ+10hoVzYaQ88Fwb7x1jCIQ77T/eeURvrUNkDBnRlaZ1zoT3O3t69OSIMbbj4gBgjBfGkWeWgSeQnDEiIUUsBBVGlMqWdqzPiEIiskgpJTh5JwPhicBrZJaBKwU8Ldgnv7n51FL6nrv49GQeCJu4rFoNZImBpO118f0n8XtPyt4w5lg5dNX+u9/yE5Lo05/+zOryoiG7k/lGYkCkCAJy3/vC//rN/3zk7tfPcRmCTpFx4iGBSkfU6+Vrq/bppza+/0B6ZiEHH+5k4VFw521eOGQMtFU6293wR484pXLhFBlDlKEogUNiCiCAYuShDuF+MIKR2r6U/v5v/J/FFeYVVwpMDg88vLS99YW3vXbuxjunGvMVYDXAydY6/fov/snlFe9E2ChBs1QKhev0+gQghWSIBDtJKlRRef7gte1LF423Vuk8TT34nbio9448kXfGahVKQCLPbJ4gkuTSOoZCAGEglBTMcW/I61wjMiE4OEveCc4YZ8gYEUnG3Y7BzJud5A0BCYYIAOT8TqSHvNXJyGQMvEfuDHEWCCUJEnTkdy75yI0lrlQkOQePHIlcwHDfTBPIFIVdy9JWpgsHiEzwxNleLZJSikY5rEQyKzLFRYhOOTtKk2o5ZkEcNGSY5cORDhV6shK9lFaGV7DWAEAEV/ZjAER+pwqOAyCgkKFzxjtP3AMSIkrOiMiTR2Qy4M4ZxiRHGaDaXxvbxbFRx7TXHSRrvKYC5CPtM505T847ISVxdN4xIQjRGuOJOEMiuKJmAAPwnHESHrn1xdBIE9aj8xuDd//Hv//Vd1z3ppfEo+T5pmy6YuSL9V3zV1PSz0Zned0HMNDLp2X1QBmbIppmwyTreAqbr3nvwU/8n81qWG2PHBOCjPHOO2a5R+d8Tjl5z4SwTjOipW11fik8fG2o6hAo4UZSFHblOfzzf+l0hpOdzCDTHiDkpL2zHitx4BVPRjnjDJGyInPgSHhABPBIgOgZA20JyHPJAAFz7RiCQgKDElMtv/F4901v3lsNvNHDTkctb6UaBCB67xhnROAJJDJAYOgBUZXCNM0ZAmOCcyLyQMQRvPeMcYbABeeerHGeUKjS/muOcdtfunTBaS0kWmRAwJETecYROSNAT3jg4FUb6wsMnAcIpdJpYTSEQgHINCOUVikssOCS52m+cziHnTwI4+R5GMaBbMxO7u4NW0Z0s2JESBI8oSPnjCnarZUQfDbqKxUw740rJApjHBAhAw8YSemyPLaUriwwolplrNfrefKIiOQ5eMlJWzKZC+IQncu7yVtfMf3Gd+41dEZvft90l44ehVIJFlezpC83B0RJwccMSXJaIHHHLFfIGRYm2lx1iTVba/93AO0fP7Hwqje+7s+/cOqpczrLhOK8xO0dN7gXH0nc6lbgYViLHzivpgp97MZ89qC7tA6DDErjiZgWU+FUr71Wmocb4vpsrXdRCMYdeesKYIyLMCgye6Qe//o7pt/ys6nYN7J6xmXKreZ5OgL01bIzIhVGelkS8wdprlHt90LJXNrpra8WPWbd/nf8PxcutvZmhWXIGNWB8NLlhQ/8l9+OpJwaa+S90eWFtIxz4PvWci5YFFdReO0QkPorfV6pqPHrZ190x1/+xn9jWLVgK+NV7+Hi6gp4k/RHca0SSBIhuIKicri82CLgb/ypN3zpsYu/9fO/+K9/9oFk9b7j+eWXvvH3zn7nfG9lJURpXWK0ydrbTPIQ03f8ynv/689MFvhA6NfGp8QhWbr++rGLi93nLpqx2mSvk+jcMmRKkWdc7MSIZDlWcZ6MuOS8KJ598ge1EjWaYyKcWV9dQG9MsmFdGpUqAEPH4+1BwPuzvNqY3jvKcx0x7Gsd2fzWa/bMT6wN/DZSYZVs980//eN33/2229udRcJ8ujnXy2F+Pth7uOw5KwoKIh7VIhm6zvo6chHH5TiKur2BznNf2AN7m/XxXff/4MlerWa0rtUl2CxSVAqrUTiTjhY6RV9JnK7V07RHltcrZQZBZ2PDWhmXy9K5wow4b6ggyIuOjOvJoOsdGCPjsghDSHrdfVdNTc43mrsaa6cX+1ttZ81EsxoEWK7EQbQrL+oBJ8GllLJRg3XWiyTmhR7YBIAhiNz2s77zI+Od3Bqko9y84c2vP7+6yKp4ZG9NW7uwVvCu9rlZOD94/TsOdbeSMB6fObifrGslm+NxvTo+vjVaS21/ZXUwNdncffBK30dzrAnEamOVndyNLoyUKskTQlBMAJCzzlrLEFSgrHdxFBlD4BwYE5UDhiy3trCO0CsmGZO5ThhDcGC0dt6lWWIJMZAMCbxXjEkgZy0CZ8gZMscdKuWtBU9eCCWlJeesZhyjIAxVKKRI89w4zVGO0kJwr1hkTFEpVZADIrPOOuettSoIlJI7C2oESLNEIAdwignHuHNF4VPkKLggzyTn1pI1RsiQOAzSQW84IJRjtQYTebUUTk3UcrIXNltf+Mz9ly+soUGfsvEpc2lev+w2EAzSxIUezl3K739QPPKot4W1KLUm0x9liQNSIgzAa8RRXJXjpVIlYEqCTl1cKi1v9ojhcJR4sJWSeu0dNx8/cfJi1jMWODDg6uJ6sXtm70tvmu4OVkfJqNJ0MhDW5jP7WG6ZiiujblEMWnum7Z5d7JkzyQNZqjUXiAF3u+bK+2fNxYtPjo2NVYQwEmTk+p0e8dJ3Hlj7WgEHr97bHbrbjzSvv3Yqzbr1UrxLhc16ud9NShVpeePe4ye/8djzR/buuem6qyqVoNloWkojzoKwwsuqMOlkVJTRWgscKgIlWCdISXTeZ9XG9DCv/6ffefLJk6JIOBjrwDpvke00ziIh0E5EGRx5JGuAISCSNY4TeqIdRzgwYODJXWHqebjC0CMCRPIeLHNFrsrmna/d/dPvPDDfTBT0WS0wnhWF8yYftDvgiIpMWC2BAo5Cis4o05npFcbmxc03X70zBaHicYT1KO5td3/0+H1z1cb2xpbg1TCo2OGwXIqAGGNScKs4UVEUyXDbwMra+l2vf8341NzsketjNVh45tk/+cjn5EzjDa+8bbNrHn3s3HeSk8mp5fmZhiw3kKWd7cGNB4++sNJfeCGr1cKJgyWMtSuPksJXG+PcaL0xCkvEx7IEzvFavaCsOh4m3Y4Cy1TnwK3jf/TLfzU3U48CjCtqa2tUq8S752cWlpaOVGsPf/6Buiyvd4ehYC85OFsKAIs0F9Rf35rZt3s4iK962e13/uJN5bldZCAII59rLtj0RAVccuqhh7/3hS9KPVq9tKa9eyD0N9xypDI9Nt4oze97UW0uHt+17+53f3Co29/57lde9BPXffi//XkcyVLIWmubPIhtVqRDXZ8MRkneS9ec1b3Ndc2z0kSlyGRvPbMphIwNR7rdSYiPb69n1FbDLTFI27tKm8VQb21BQYfLcq0SjP7zf5g68UzSGahDuyunLi5ee+z6M8tbyxfXJBd5ngeRQsEQg41ueyIsQ6oqY+UTT58r3nREapGlLmS76rN3PHPq/hcur73spa9/Yc1s9EbVUjXfWk62lysYu0AGUWy4GvS2Jsqm8PVB4ZAozawu0qHWtUPjogkMUh8NmQPBU4BAQdmy9KlnnnnFG39rfP+LR+ngnm9+ZXx86p57vtCY/ff1+gRndWIJEREprf2pJ46/ZnY8K9ZRKij9xGg7RVPnWhKtJtxrtv7uPz1658niz/54YWG7VFPVUhRurXVEKFiIRW7C0tjC4qVSqMI4npprrqxsZrmXzCfdrf5gFFfiMGIC5aVFAeFkNKs8FHm6xVjApAKLhYOr33Tkc2+abi/Tb//at8+4hgsnnFejbu6Ay1iYlM73l5kUkLlTjx2/4jDtp4xz58kRGWNChfVqtcgL7TRYjJgEoU4/e3JrutTvDHcduKY5dcPaNjKheZBvbl3aXbsGHRurzafaJ1mbpPLkjM0rMvbGW50NkmFj8nDhc20MaJ/5AZHWedEbXmDxLOfEAJyx1bjmvQFhVYkhZ7ootE0ZR0DmbWE8IedRuW6Ns84JyVOba8Kg3AycU0JlhlQAXmsAKrTVzpEMG5V9g/5ZiUJFqrBmlCS8rMJSXShrfQqI5Xqz09qKVKS1hUCh82k2LNdqvUHfOSuVdIXnQna62zwS4xMzw27P6gKdqdbDmenaRj8fjkau0IjgPRJcuRdyxgh2blmAHBkiMvBkw1AypEK7xKP04n/+8+L7381y41NN9ZIaFdoiENfOQncbKqVOPYj1YBA0mtWpsa2Fji5mO+KV/+u7n77nF1/u5KVAYYDi2DUHX/rK114+vva7f/2tk2udosiSYTZ325GDVx396y/fu//ZzV95y437G/v5bCOsRzbbyHXx0b/588MT+V1/+/4vf+oLN15TLtevfvyBUz947MThPbtWpev0eWdQIDBy6KxjzCERE9wR7wyKUbKZJknSH1RjNVlrbvSSp86fflF0+MUvua472P7Rj04MrF5c2wwdlsp1gcy5rFQKhu1BspUbR0oI6wg9hEoU2g77g1WTNWfrb7n7rhPPnOgU6cXLq9ZbVQniRkOorJdS3hpgandNxUFUjWv67PmNeqPc7Y1WksR7FrCeTd0Lz50dDbWKogcefCppddp5sb0xPHpg5qVXz/W2Nq+67qoLCycUikErffGtt3YHo8HmoB6VOoP25HjzyvIslWj9eLly4zXXU6ZtZAMZMs6GOs0Sx0NW8jYMyOU5gEG0Hi0P5ESzCQ6s5mcXFhw67z14DxyBEBEdEWPMEXjvCZGAcQAiAAJPngA4F4SAgAKlQUeWkDEPAMgGhZHIOUeJWFFBEAaLq5uVanlybMwMk2QwdJ6IkVTMeXLgvPXO+UCxIs85sCAMRKV+eRj+8xfPvvWO6tUHZKlSypNhmvRDOba9Vrr/YdMacFWR4Is8WT/zzH02Uy7JCACBSykBiLznkk1Uy0KI22+r7DtyIJbGIBJnO0VGAHmlKsuV0p4D1RfdNPn2t4Uf+z+nPv/N1TQJUUhEDoo5XXAlpSpFRXrTDU1WXvakyVedQQDyYJly5HJvwWfAy/uIJomXaKj/+W/+ZWHVORVboDw36PlAq2cv8xN/f2rvt578mz9+R3N2+sTx9T/9008uL/mcJKBOmb7xjpd/+1v/Jjym1jMmuOB5blQQ60JUxK5hJycy7UGLgyIyVmdccCkUIgZKkQNnCBQKITSZcrXCkCkRpakFdNJnwtpaJHOyqZeJ5x5YUThAEZYinaXA0FnLOReCMUeAqGknZogE4MkzABGUQxFnRY88OecEQ0dAzgqhnPeFK8gT3wFse7LWgmfeZ9p7JRQxZnWaO7fU7UyU1IsPHe2a/MTiWic3mXHeIHnRG3oLWS8xYcCqAuYmm5LzSHpUpdNrPeNg/1Q0FquNzS3JQm9BD/NapaKv8PdBCOSMG2sYI0eAgN57FDveN+dswcVOUAkl5/wKsBiZx0AqLhSXIghKgQhmI3soav/RP/yNQ8EZtk//6N6vfff4UvbsxY5W4dCHaaENYjrMkSMw5IAeEJCQIRIBgQcPOxhthB0MXFFkLEfLWZE4EUx/5L7RN+7ZuPXo1K8fPnLysc9PlNzkwesCGZDthlFD2o1zT3/n8GveF7TbdvtCtXGw1W1DY/zoW16e/v3H0ZQCQIcMgXkkTS6UgfeWASFHzgic5Vz0C/ndB7avO9g8ekNkbVvVJqgfrqesnbcy45hgBiwAJnlOgJyLNM8ZIXlyhM5aDJSKgjTPiDwXyDizjsgRcME8IQJD8t4bAiaYNtYxAYy/sAb/78eW3vmG3UUBn//XpsHTAAEAAElEQVROtj2MtEEQSIBSSGcNAyTvdsrnnXF5kiGTgWBZnnHGgKF1jMgzAHKOh+GORL1T9UNOY5LEQaAcpQQIEKrQeAtux8aF6Jx3ILgKQU9PzIyS3miUSFHyghCM1Toul2975ZuffepbaLKyj0ZpKrl05BEtEQMEAYIDrzRmb3jR65QpjY3z73/jE5ZSgY4BAyREDt7l/a0wkkmh9UgLAYTMOxKcFZllwnMhPRECemfz/sgiv+uVb/jW/d9BrUlbqSQ4KDLtnbPayQAJhOdiDNaofa7bOxOTqtZEoe3cFIRk+hV55pxqr/LxKgNpZKms+wU6P1nx/XWzsExZtYS12mDY+b+kou189z/868LGSjtW3ANCbqrh6P3v29cdXB5sQ/+CPHlJ7C7Dy48M916TugCEg04fahUKGhZilXZtBJgthGkvFpysK5B2bFfMe0aE1x3d+xM/GavaM/mop4JZ7UTUCIOYRpsucOPgB3kaCAqc4xiAGBPOrstaNDF+9Htfz3/+dy+n7EBmjBIMkYPzTABnbGZiYm7vvueOn4pC8ZOvv9kOWhEXaZaH1ciaLIplATJgN7/2fR9bCmY+9Y3f+9tf/+vLl1hYkrVqiMI10DYkdshWg6jbG5VLYZE6b1CwvDYeN2rl555bn23s27iwnW17IdG3lpMXnjUwvvvm90zE6vFv/X0YCZBcCNXq5Te+7B3oPulHK+3EF6NQ+KXFF55v8pCT1KmpiaiIORfekXbeW02erJBhGNaLtCACqRCZsOi3WzmyVSTunXVkkWGWDSUaK4izsYXNdOSitU47VjU+5HFtpr3e1+2zL7/16Kce2FDMAlFtajos6LknL/7Uz9+Y9rYOHT2W2nx14YxSUa7TfVfvHfU6g0R7D3G5IsIwLSA3mrEAUYDy5WpFjJXe+pZXPX/hcpLlU7WSzZ1NBqokdDKMWHm6MpUUnuQMyV6spM6SPNPAeW84hEyMl0SaJqbomjwxPAtqDcvK1UppYm5sc3spkzaYCuOxxsLl1uLSVt7qoPAU+DxDs9GdmN01O7kvcT53pJzd3uwznteqUwjDSknAsDszvufciTPTEyXro+WkXYnjbsrGJpvX3Hjjt775zVe/bt+b33jLoaPtj336SZ3kqlzaHPhLL2zPTk8Me4nLyBTDqUpUq4atXrc/aNfiYGruoOID769Eb2qVKgNktANXACml8c5aiwylij16ACisYYwj8VIYc860yawtiHNuMonCWM8ZRGHJWmt1pqKQozTWCMnRCSGkRxBRSMZIAARrtPHkuRCCK50XYSBRyMxRVqSWPHFiiHEUM8Y442EQ9JIBOQrCuJ33lOASUUkZSrRZJgIhoyDJtPde7aTeHBFQYQoOnAtRK9fTtMsF5xLSXIPgQnBbaE9kR5oD4zzsDNJuUfR0kpguEFCRVzwrZP2HTx6//9GnOn0tM/KZUxypgO214vhZeN3KTCUePH3WHr/IvvdEdnnFWSPAGU+2Xkr37pOzM5Vzi3p5oz9Z5W+4c9/kWLXbhi/fc261r/uFkyLz3u4UDDjAgsNXH3o2Iq9JcY5Kccc4qujfHl45sG8mxqA2FiBQMjRJzgjYyNp0MwPPXWE9V+cuL6ZJWIpVO9ERy+Zq5nXX4Oy0SPKqdeStywofRcV8Wa6vtBv74nZSurTdX9vq/ewb5sbK7bTnKAsL4/q+l6eFs0MBzV3jjZnpqeW1zXOrm7XGmC2yXrIxP7tbqfjyueVBe/j+d46lugLCSKEh7wvOlQodbPOwkps9H//i5o+ejXVuAHWuMxSMAyEXxvqd1LRz3hjnHCHAzv4NAAABHFyB/+9wIz0goMcd+AdciVAjABFHBt4xj40qu/GqeG4CA87yoQ+DwAGWKiod9nTCt4fi4ReKi0sUhOGuKTUyWCSpMy5LC/Ru0L4Sw6TMoCWf+2YlAGYPHZxdTnUrw0FWKKGcc1xgnufonEtdzHD9xHKpElyz7+pDh15em5pZ3Gx94uN/vtrV4f69hRCf/frT2tmxenlubjw9sr+1PdrcSqJAnn2hP1Pp3X79ru1nLvY27N5jE72ss39fvEaaucF0vcpSNWi19b6N5e69wGd5kIFFh4PMJFjjTz23+fWvLt1148G9TZ5nphaVavVgZpIL2Si6Q6nC+aO7R73u2Fi4cbynglqt3rjrTe8eP7Q/qo7PHdrbaw20hWSEIgw7LZusrzz8tU8vXTp5cK5yYP++3/zj9//5B36/T+bMSnf+wPjlxy60WvmeydpcaWFyanzlVKX7fHNEBfl0qnLwmRPLlZDPHdmd2wkhSvG0yejyavuZjK+4+hC49dCVEZnCjNplpGbAWaNcGg5yW8tR8shXezpqDuK9c/rwId1rQXnyRa43AWroVF5Tg1feCd+8T1Nt+kN/8QevvfPm//GXH33ygWel4tYpLkQxzDjot905l+WDZCmdrlSL0mxi8nKzlDG3ubp27unvX+wsTkztGWTbevsscydecqR/480Qj2N/UT1yf9I3CYrBS38pnt3T/MiH+4+fFFzwUrlCAduA3hB2h4MWr2QojOQ7bEZMIIjcjR/78Pc+8dk//e0//LdOMmP9xEpbG8r++SN/VmReKp5nPk2tth5j/sNHH5+dlQev33JuwOUsRjMI0isAkQtJzqwzmU1X6ei0fct73vKRf30mHWbV8crqcocTP7x7/ur9u9cXX0hGhcBg0BoFKFqtdp4OolgGcRCUFHcgw6iya/c9jw1e/q4JPzaCmGtXcNCCCe8nNQYgfe0Affzr7xy28re+9cs2nMJ6NTCyXA2md9XOnL1sSHJVYjq54iqSTEoGjKdFXgrDcsQZisIOMztqlieEEEVhKuUaebHdWU/0mZmrXsw8OksoRWN6Os9SxljunSEb8thnmpEbqzRJQzIYoc+jsAo2kDwUDMkHmUmrYSkQQWt7qzpRttYKwfK8UJEk643pMiEAIAwl09J78NaWS+WsGHLkuijAwbAoZCCkCHRmmVA84IPeOnGmpAIWemM5R2DCUTgsWsb6gLEsGZXqY8BCCONUaxAUc+Fs0draECAZmCAINs+tCc4tE8Z58hCXSiJQyWBoTV4da1oN1dLE2WeeA5a84c2v+MbXvrN/fhJhu8epLKOtYZEWznhgjO100CAgevj/qPrPKMuSq8wf3ntHxLHXp8/K8lVd1VXdXV3t1fItbxHIIeEEYgANDCM8M8MwGuzgmcENCAmEEcgh3/Jq731XdZf36c3195hw+/1wi/e/5ktmrlwrV+bNe+KciGc/z+9BZGIPAOwQAa1mASiDIBahLfiCG9p6rcy70sHAalmDWg2FZLAQhHBluYimR42JpLO6ha7+9LPw5YcfeXg9jlqNk0PY1w+YQo58Px+IcjOsub/48//1hg9/ZGttddfh/Rfbg83HjqvG3BWgP/n6qRpcMir9yXdde3DbNefvf3Lm+vT0C48vvGTBUHnzHTt3v/y2l7ym+raHQyndP/3D5UfORf0RMalqPczzssg0EQwz5y3HsRIBFqMsy0FFAeaalKo3myfOLJ05fbnZqP34j/zk6QtPf+eh53tulKZy++zEcLMbKIJAHDx04/Of/5rzRAQ6MyIQcSgiJXPn9l17aNc1O89dOjnq5tZ5FcboZLbpGjhqVct9N08vNGJ27rtPnBhWW3t27WGvT51bGuRlnKYjS4007OWDMA3jRi1uNi5fXB45H7aS86trR3a39u/ZOVlNH9s0tantB685YEYm63QPHNoXF9WHn3n29rg5XgVKBCJK4nrDACRpqlRUS2IZxFKp4bDMsmITC6WCUMqtbnc0zOJakrAddtetKTb6I2ZHjEwCGDyA9e7fXUQAiA4YAAQKgrE+4q8iY8EDEHvv2RMBgScgPw4tARnPzqNnLvNRncyha7YNR0OpHPgyIDQeC60ZbBQESpFhZpIsKCCRDXM78IFUro+98LpPfne4+zmoJnZ+JlDxvMbWN747WuqQCLiWqOEwO3fmwokXT7KHMFCsxpBpBgBvIVCi2lxo7avf9hogtKZwFpWSkUPvXUkCaBzPsv1A5ttm6ad/bK4S+I9+eqUoEAjBekHSl4Clna/a73lDK66e9zbiwnuFQli2m0CZlwQg8oxrjRqIJqna5z/9D1/+5jkTVtkjOgCGKFYiDAxIlNuOXW4/+vzO6OTcz/zKH9Qqiiqh2Rp4cFCOfvI3f6FYXTG5AQnOGOe9A2xO7NBaEWaXz5+PlYmjYPvkdS+ceTiKY/aWgGIZAEsLhXNWa++ZvPcoY8QoUpOVpL/Qym4/onbuCXfvmR0N9Te/uXjPE53+KNQYIyntfCSVY+uYBSI754xnBCGIBCISs2PvCQCdtt6IsdEIkB0zMyECeiTw3gIwofAegJAQCMh6IwitN+zJMwGJIUBvxKeeei4kTySAETx7z4LIAxOjMV5rNwBYGWwEkipJxOAJ4yv98mJXz9Z4cnujEokic0UpVFqjqAYPnwMAZz2iJWDnATxLKaVSCAgAggSztwaiOGZmXRaGLTELRCFkZgolSCDWkWJTHFqAH/mRl+fFgzapC5Tpvv57fvnOt2X2qbtPPfH42WdOjxatHYJK00oBnBWDYVEIIYhwnDgjAj/mOzGzsWEgCLASJ1KFRaZRkiNa6ZosmTr9ovnCz32xLrOffc9bR5vVqUu5lD3X2yjaV6Zu++FB7WUQdaXZRN2D3jLNM0w1d+6tP/S0QRJGW2aQSjhrSz++aRPDmE4lrCft0rOr5p77/dTs9OSuyWIzuHKWP/qpUz0dOekUgSs5SkJrvLccRbHW2nlHggQRopKBdN4hIgAxSvZeAZFCQKEL4xkAkAGdB2+8QIGAnoU205+4x/zdfR2BpbMVgYEKLSOzBees9wDsjLVKKRUoYww7Zs/Wu5CZBDOQBSAgCQAE3jqhlDMWAZQS3tjlS6cJABAVKmu52mwRwWZ7efxNEoK99d4XpVOTO+54849UG81WU33ur/9m6/JpGbvm/Myb3/K2Z5+/P+sPyOtGWulBbrz0ZRfBWMAglEkQ7b9uv6o3BsvFYLlbn563XV2MemWpnWUcQ5QEjQqjjWEPQoWAYJ0nBCRAhCSUWlvvRb0Z53npbXHizAtKSlMaJchZJyRmo0IFBITOefZo2NeTZZ8txR7KoQ1kQEoJzKeaUEtNL49PXBi+dKfEEJA8gSgNgrbkfWbj45epbUcBhwDD/08qunR6E6gMELwxKGQJ9n2vncqWl4oRmOHkyilM1+z1Bwfb9xbdPlRa4JmCyNemwI1yNLoVBpDJS8/h8mUhZImATggWaB2As0KIp8+faY+uq0AiE/KcyXgY1BB7+tFPd49W6rXDkVMKhadyIMA4GfqgmrdNWR74nU88AbybTRkp5z077zwzeO5n+v0f/tBNNx/8sz/6/QtnrnzrmdN74/KuoyrdVmHewjjZ5LQ6feRNb/zsam9eW/PeV74d+2UjikUadLqj1rbp7sYgG/ZDkBym/+X3PvIv//jZGTl48rFnUw/TUxOMk8pHRP6jH/vz3TONdju3FodP3ktYGVGlt/FidapaGg1a5zqrBOpnf/Bty6+Vb31H7MOoLGFyNpzdGbzxZRMPvHi5tK40UHgEx6wNSRUGsULhS7u5sei59NoFwuvSSUHWAbtcRoEXiEwoCNGBxNFwVEuy3fONsydOHrppcnJ+Yri+TGErnWz11/of/sBtP/jTf/nyN31wqhF11jeNUucKf80dB5J4Ku9kerM/NZWurw5sqSVtpgk1a43haIAy9pTofGi9RS/YWEFKZ35zdTEIKBZW59mJU2UziWvE/ZHGLJtt1slGMY8GvSsMKi8IvQOnioKQasaoK8ujaqWRNub1sHRI09M7adKsLl44d7FTmN61hw4obwftrUBICxRW6pvDbuZVStUoqKHzvcEFWZ9otiY6i4tRUHNGELJDs7XV3zZ7I2d6dmbn3ELj2PHT9Xq40dNLK53rbzkUNZDJ3nrjrcJP33Bo5vvfqv718/dcWsmUSs9fyBoNm1QCFgwDH0doskEo4yRszkxve+jpx3fPVj1fNZqmlZgZAL0pTZ4Zh9ayqcR1cEAcSAnW6TSqAArWlphtWQpglNIxyiAwuTeFJwIMgYBJhgqVB2/ciJCcc0LISIpEhd4DKQHETjuGwDMiYhQra2yW59ZYJiAS1tpIxaX2YSStw+5o4MZuYJMzmFAEs836qN9FQWEUDIo8L0oEkkGI4K02MgziJBQgRqNCBrDeWYmDkEtjSgvotbVBECJ4RRgoxewKmxm3FIXd6xbKajhIwjBSvSuXt564mD9zssABV3LprPHgnAECMIbueZpsDg1fPb407GoeFjGXgiS1B6bVtG++q/mKV8xtm5j61ncunz7TufmayYVt0We/9fyjL7pOP/AsEQNyIBC1MVEojEW0qtstRF2VUEApC+cwDqVU33j63E3Xlu+8pQllp9SWrQQWuiiSUIFCISQnwrIPqZoKuWsm2hiMbl2gD7z7SEV0DThj62BtqEJG4Rz6Eg5OV7QILvdq3/jEi8qZ/ROViXrx7NL6sYu9e49dmG2Fr79z12zLAumbXj57ReML58xzJ7ZWt3r1KOpudBaPrzIBAqYk7/5u+/ted3AyRe9H7EsPYIzBRPhw6uTKrk/+7X0ztdlLZqnUpWcQJJz3rD1JFISuBGvZe6+Ucs55ZgQYNyyMBSMe12leJcNehX1c/QI845gXy4RckuoWeOJM5/C+6uw0oYqtL713veGGKXlr0PzGi4PP3rcRWnFkT6vIbYGADHleaM3gXZSE41VwZP9ktpGXoyyKVDa0ly6sBAxgLCgRVlNfOg/FsNdv1Cs6DVCSrMYyUtdeu+fpY/d+/n891jFBbSqF1vSp1QEqIAAyaAbl1pV2GIWucM0ksh4s0vnV9e9/95HFfPHYenbsgXB2++xoxrUk5r5EXQ4H3jPkuY2b69VJlY16Vy51nC4W+1tT8wf/63s/9rK9+8te91K73LVrMmRTQWpfuTBZi9VEoqTsrV0pbTGx7+i7f+0DU3sOOyOBJLArsnyr3xfRKIhtb/nyfV859sB37lc9Xc09U7klRdk78/TxX3vo+GUrRBkHp1bazWY1E3RqZcNP573eUsK0fkqYWL36h95y35e+LkkmqZrfPYNpww3doMgWl1/ojM4GtbwUgyhUadgMWvGgPWQtTRlH0NJdhsIHwgEN9+1Jr7SDCycG23ZeqdwKogLeZKZYhKhj6xApefgNjc9+q7e22PvYX3x845ln3/dD73vw2y/ktjAedGHCBBIz+OCrD914zfX/8L/v64d4vB0+8tTSm99yaDQ8398oVzYXVaW1ZQanjz36gQ+88/Abj7qlv6zPr9S3C7Pf772zms41+v3F+kwpN+xyu1tCSxBq1u310fqVrbv/pfrj/+Xt1j3vTIAsKQgIputw67e+9ky18Yr7Hn38LXftbSWoKmrj1FKaJqNuhkEohMqHQyYSKK4sbYg3NmYO7OdgUYYl2EfiONWmDgaTmIwdAokgb774HGysR6c/91DU6XsPIojUtgnZaCxujNoPPXH79bc+ffYphKDTzyYn08XlTaUglaJSTayDfmdQVlmK8NGH9e/9wjO//ndvHugLSjjwFjxImmecMMaBNChscyb/6ufe+Fu/8fXHL2Wri4EPFjaHBVFceKn7I1P+e8WB0ewkCyOkd84KqJRlP4lUJZgGryzYtBb3uqPO1kjFiTEGyqzWrHe3RgGh9LLRrPW6fSWT0uShUEqqYbFR6JEgpYu8lgj03pues5mSEYqIMGNnvLWVNFKhVEHI3sZJPCr6ElUaxEiCkQcmr6rEaC51ThIdQSjJaENCRQkBsiCy2tlR31XiarVeluUoywS6KE7QYiBDx6TzXqACQDs1Od8vLQGFKExZNioxymh1qeNLVAqLwpXGhXHgnK02a4XWNN6TgLTO5HneaLasLrNi+cY7Dp89ff65p07smr52aeXstqnYaGs1756sXFzvlaEqLRG5qxNRL6QAUmS1DaTwJKznorRsACMXhmIdw08/Mfjw67DIGRiIQClmB2QhL+D3PwVz39181ZGJlU4I27a/8Oylx05ubeXDSih/+uf/o/7Z7/++d7wx728EMQ0HvSAJ0mb5gTe/9D/9z98PG5UGtcqgyDVV/HRzcvLSC4vrvY3H9sx+8dPPT8xPr4XqcDrZt4lvzCsVe1PBcG7hiJybFtsXZj/7r8/8w7dXLo3C3qBQgsIoYPbjmT8IAHCSVKUWA3E7z8rczE7VRVItssGxK6vP/fHvv/LIgd/9tV/92Gf+7uT5FZ2PFiaqpeb51tRGu1eaEj1RQFEtQMBhpq1DI1V16pq/+O2/MMRze+YnJyre2Lyrw1HnD35n//Vvbk1sb5hhT9LsDx+f+Zu/e+HEevWFU1tBmkiL9Waysr6czs7ffMOBF549NdgaXLq8Ds70DTfmalUIU6/37d957Lln9xy67svffvIVN++fqsSd/sps9fBXHv52a3r3Nft3XXUV5aNGIKfmmhSKRqNCEGhtS9O1o0EjrES1cNDTK4tZt7tibJYVeYl6vduup0F/YHWZRWHiWGtjBaBAdMjAHgk9MyISCmZ27JBIkKQxKXksFeHYdAvjXvIxH8Y4K4lAIDNYh5bZjkrZ7UtwnfWNPHfsGIAK7QrnDVMrjKUS4DSwZwsAqA1b7TjPyjgyZXjxiiiMa0TOgWXsuUyoSuCVW++N0NhqPcmKEImcNeC99c57FBJJghB+bfU0hlL5w0o4ImDr2VjCAtEgAiF4jLxhhwlIbkzS+9935IEnB8+cdexBGxspIaV0WXbdbLxrr+kXmnzdO/DkSFtGNtYKWTEjvXJxEE3VkqS5eubkJz7+WS2iQebYo4yIAlmUhn0BLAyhC4P//bHPtq/8XaJwkFldFhjIme17DSRryxuBtZ5FnKQ6y5wzCmQkvdftUvcq9XTYGzSS+uZgsZqEWV6ykKPSTLfqrcbE0sYiChJSsjVKBmEQkfWVcuP7XpMeuSW47iVz8WwVHGDpjt6y44cuj772xeeffmG40fUlpN3MFRodBNrYcboQhLDOeceE4BgYBCAYZxAACJlACOEcEyMjeOsYPINHQu1YqACBgLVzDICegRHYWynJMTsPzChl6MH7sRJNADAmCYO4mrkD59kZZ5zLtEUmAhJCbvZdElEUuTgQEZEFCdxXlF0dIceBtWjGphS4Sk4SJJxzhOOqdnBFjlIEUmkvCAm9F0rIQCBghcRkqK/f3/z+99y2+7V3FYTIzOWQjcnzFUyK29+489DRxl0X7L0PnHryRLubgZzc+8K5k54ByTtmgUyA7FgQWe+jKEJJbI0E6S0DknOevQe2ktRId71D3Ze5xD/++L/dum/+t/70t6H34IWnPrtj37aBqQfcgRhUuBvKwF16kMMRtLbnQ++ASRFYp4RAAEbQJpcyREJtHDAKwlJbQdi26VcftRwM3vKG2cinn/77xy8sggZnjGMJJBUyovUShQApBDBr8Cyk9GzY2yAIpJTGOONcokReuBAUIJMEBHQMwEwkxqx7ax06BiLnFSIBxAIsKVdaS0qQkOOhKTAKREDv2QFCoAJnfSt1h/dLAYPlLry45GyZkpDGuFgREWJIzjjrLUm03lvjlSK2jgWljdbc1Hy/6HljCcA4h0gAuLaxkVISSddM+fLp50h36xNBXuqN9tqjz96z56brTj8wQp+rNEmSyg/8xM8///C93/zSZ6oVFQYiDNgM19/8lttmmnsvLrX/13991DGhDABLGQa2tN55o3UYR2O2m/fsrGPnWQihBAOWpfXel2UZxILZIfK5i6e9AxUI8ACEgODZeY9SEXjvUQglI+jGAIIgEwq8Y2MEQukhDk2tGr54Go52gnTG6NIxSVBq1PdbPbqwJS6X8UrXztX+3Vs3/kQE2ucoGRyjI8+0f87Wpcvz8OJpUlswGZe33YJqO+sSTAmVOdwVQDyvjBVKijRKAorimpLA0voMkJGFBCkRSHhLFwf+Y9/s/PzcdaFbC+NNEIMi0GqyUpkMnn9q41XXbS84lwqosGIwlLVpmN3FK+bTX8eHnxwmSeCpBO+99RjIAAWjMKgGfXvvN751/KkXmpX6lUX5a5+4+JkD1+2tKQGYG8Lw0G03f3TY316ELm4k5Nk2Quc9oGeRlG5+mF+abBZJPSjag6985W/ntu2qraAqRlJ3fuGn3/G175567uJzsjqZulG/q20pdxy5ubd0Mds4VoxM0R9Es7OTEzNbyxfBW4qSlqwb5I21YdywDqG9lkVS3nTQ/8nP7lrc6qdpVNsz/9DjixvLhVf1Z58fKCaUpF3RrGe33rb/kYcut/t+OHQEiEqQ18BMggRhWVrnPXkIdfay627cWLx05RhikaEfKvLkJ+Z3JltXHr9m+8Ff+cXf/p3f/pUW8O7dzde//ODio09P74i2Ol0AWtg5u9nDrY31zfU2TlaMwdK41c4gilhqZIHNZpW8NQ4ZeNtUxThTNGS/ry5cGXCpJ7bV4oSIHCvhPGSDoXUQhA0VRyqI166sVqo1Va0yAfkaiVq7055tNWuS2heeX1m5OD03u2OynlTmhe2B49pEajhq7Ty6dOrBweqFpDF3YM/eYmspDWu2GBkL2dYam5JQBYGQhaQwXGhdK6AxKi+DLze2Nllya7aylukM4/0Hbz79zNltczMvue0lF8+cb02q22/ZXuZH/vXLz6y3ByBVd3VDCJzcuW3/4Z3sSqlgZb0PRFeWzx25YV/k7drG1aYDZ9k6p8scCVFQIONYVEIVIxIB27L0YIModc4bZOfZA5JQKkyyIneWnTfaFaGI81ILiQi+KPoMEKrAe6udBc8SFRuPSAysjXfOa2MK45RS6AyzL7VxnoFQSRnJgJ0jRVKg9740RinJnqUQzagWy7DMSwlEzHkxGo4GiUqkCsuyrKSxosB4Z40rilLbcljmaZBEKm23l5VULAR5kRcFoms1kG2vyJcB+gd3ZtMzONmEcjAyRS8I1MRus20muHbCX14qv3HfcCtD7z2DYGAW0NV497FNrw2R8B4TEgwojZtOig/8yIEb9noo9Fe/cvK+J3uOg6dOX+rkupdJtpFARnaCGBALXSIAO0FGhOSvaen/8uG9WdnjwgaYC6miMI2bE6nKhsMLSVUAgZRBWolkEFvHlVBaZ4nQMoQIMVHEwzsPVn/6HfVWa2QtF04EMgwlkEQZhcZ4XRhyFS3Tn/3PT46ymgL/G3/1UK0aPHp8cWskANz2Or/jppmo38FaYPrr1+3Mnc4uXBqZgWtvbqHzkoRhjwClhXMXypNn5c2teiL7KjWhBOfyMGktbVb+8v/eb1y82V7yupCCQOKY0zQ22jtrgXwgpTbOe8fsEXH8EQBhbDJmABjvrQEYEBgJmR0jXiU8AhCzRJpoNjfz/rNX3O4z3VfXJuKalpA7WzBhf6g++Y2Vf/rOaq+NtUrYrhX7r5l1W1l/WLBHhcCS5vcswD2PA0B/q7tvWwNd9OKFjXqzcunykkgb1TRulz7Pc8EewMWRCpQsjVVKzVyz9+wzJ77yt1+hibBSCZMw2LVn+rGTS85p8hjHINE3Qm9Go5LBFqa0bAykQbS6lb3w3IVX3DRBp/2zF6zCcPqCCsKz87sqO6da/coCSONksrFx5vSFR+dm5kWEs4fnp7j1rf/z5JRvIupqkrTSqFYJCIpWza2d30pqzawD26+77ro3fK+Y2FOZ3FUWUPTZlqPWjBSRd7bXvnL2wS9/8viTT++cqGJZe9nk1MHbbv3on3+GpsPRUrm22GvONyu1YLU3ajYrC9ub62t5Jx8mkfBWr3Z7N14/641eWs+V3/bkkw/Wq8HCzvSbj373288+t3vHniRRc1M0teuGrc21+VrQ7rRZDLvd3FgHwnlpyBKFaN2gP+hEouE2+xJqM5P1n/3LV1f3fLXn14my1lSUudKKiZzjr33qYj3YV6VaNRZ3f/KbOxYWJoKQlB+CN9ppZ6daQQXLR7/w4NHrpj7/ZOfSutu2pZ559FS1yRZHR49eA1H4wKOPfeSPf3Gm2vT1WqU+TdGl9vIg4JYMadTdcC6Hlcof//LSsRNNFkgSR7YgEliZ/ZOPrX/qs58+dG3ylre8sdra8/XPf+HipRdufOnRh/71yrLph0lUnYt0mY9yx9Y6m7HjEEHnZXOyNsqKJIw22nFz4tYil8LVsBnEjYGGNa44xb1RZ1WkNTHEez7qv/jV6kA2nGi//13XrrTho399bzw301T1HXumdsrGK249+Ozxx0ssvdOdIVRrVUSenpzOy15vkCW1QAQM3gjV+Oo3utO/9PkP/sFtpctINZ1PBU16WzPaR8IzrxXt54k6v/2Heyi95m//8IHPfXdpmCethWkDzfbqmvVydQUAIAoVMoAEZhkn9WZrYWXjtAoUSGp3cylkrg2C8M7Vm1Gn0z935sUdN94ckCyzLAlgMOpaZ21ZCCkz046xisQ6H6Rpo95ojbKtpFqPqnWdO2IYZUP23jg/GuaqKmOVOmgTs85KJWUQBAAkhJAiAI8gpXNDFQgBJERllHcZVSDR+TJJqkVeAkOcBM5bJmrUJ4bZCBjZS2+K0WAziGNX5nGSGu2iOCgcFHlRbbg4TUabGwZKEjS1MJ8V/X6vbcECun63j5xQoDzRcNSLkOKg4gufj0pvs1wPk2ZjYWEPs1hb3JqaaJRmNNOM6nVVZGU1AGNlipjYstmAah3bmVgbgEZHDOSs8q6C/JJbIQjg3Nro3CYkrJ4/KTs3i9lmliMMcqhKKDOYmsS+CZ9erZtN/NJT/VxHhbp/Ry2otmqTWjtngq76jY996dc+9o2//ZM/2lsptlVCDKXNlr731buv3f5rX3th7c//9Mt3vP4maZO1zuZA98PCp4q/9PV7ijKotbuVNf3B//6me770pS9/aenI5LXDJR2EtdJBv92RcfC2d83sOmgeO6X+5Rsrq33ypQsRA0LNjh11i1znBSLVkiAEbFSCvD/MMrNz28RC3fWy8qETZ77zoZ99yaF9N+/a9cTjz6aCkOTS2c2TFzeARKRQChEFsixNFIh6VUZp/NxDX9577fzqZnH+ZMcPemmpP/TOuf/423ep6cetvlBeschgFE1P04+8Z+5jXx4++0xGUqaBTEDYuDLKyyePnY4Fd7prb37dm77zwLca1RStnwjQbLRtoeuz0+tJEDXSAtzU9p3bOqOiU5YbW5U4RXfVW5eNsulanYp8sNVLZcoMyFaFODE91e9lGTsb8LAcDk3mXdFq1Jc2V2tRJRAYc7qwEHc6eZGXBMDMzjMCIxEwj4ukEa4WraNn782YA0MIDODH2ytA7xGBwDOzR+ax7QTQKyGcZ2be6oxCRc6xBwAEiYyA1rNxdpgXoZQCQDA5giBUzrLzPk2kNSUKQEFphL1RzkI20oqFotEKjPd5aWQcAwHozDMDovPAHh1bkkoIJQJBLISsNHZcY+RxyQUJBALvGUiOXyeARBTI0ucaDVZp8KqjldPntwpQSaQIUJJEZeanXHvrnIitYGbv2HoLQihEJLDeDK2204Ln81X1qz//pxfaSkPogWWgwlokBHmhjbZJJbSOJfLKekezC1Xsy0IJTmuTYa1WtNe87hikenOmOtlcv/KcIJIkyqKXZ4MgDYZFEYaxMW7Y3oylS4KYCZR0zptROahUkiwbefbgyYPUfb5xWr7/9ZMve0unshMxNQiMUpAAFeu9M/Chm44Me+Xiid750/qL31g5frnIMpEiCZQ+wNJAJYicZ+etE94yKoml0czMBN56QET0zjERITIje+/IgyHase26UIjllVO6zBGRENmzBwTv0Y8ndzxOs9A4oE8oJPO419wzIQOhoKsVHt4xCc/IQFBoyI2HIaPw3ngGkOTF1bABBMwykHrkLHolx75vJiBSEkmQksQMQADgLUdRIgWUowEaHUrVCJXMRtuD/C2vOXLtbdcW/QKjmhKBCCJmz6FTldKn3Uo1O7Qn3HHd3tufDh955OyZ5XOL0nkdCvLDvARkwZY8C3JhLEOypfeoscTMs8pzVqGy1jjviSBUwpB1pkSlVkf8zWOXqh/52FtvrJaDfOe+eUHAvk2VwDihyjKtNascw+Xi9LlRblqovCJw3iEJQQqIJUpFigUTgfdOSfLeIeJmEfzztxa3toa3HNx/Yt11nCeFXLo4SovCD0daEbJzriwYvDMmSiIktM6MtnpT0/NZkcecTyeg9bBWT0clW4vgGQURsOOxb5689wAEBI5LJBIwlv3AeS8DiUClsXEU5i5HhLH3xluHQGR4QpnX7Ct+4ucbEzvSx+7b+Kev8r2PEbhUCQL2zhohaQxzYGSvNZF0xkkh0jQc6mFhh0rF1mXGlHj1jmSBbX/p4urT972w0T1/5XSqZOl1GAXDbPAvf/NnO6+/fse+o7GAE2efGmaDcxcW3/yG99zz9S8CO3YGCDc2+hxVumyfvnR5ZG1MUsaxKAtCYGPYeQSQQnrJBH6s10lBYyQTAzurAQUgZKPcO5aKsn4JgEjgrPMMjG7s1nIMCOAsIkI2slqDtahEhW3hjQEPMgGnoV53MqxcvOwOtwIPjMpqXbY34Mz54Myqaishklqms/9HKhpaA4CCwQGQ9xjaPbfUeTJbuRL2LVVrw0NHcDg5qmwDoaF7AZrbOQ1VtmaATWXWWBVaz9Xtemo2H/UoqTbaWemznJDBGASpS/zo37+gr3R+5qdunJ0cCsHYAg5nvvX0qdNnOq96182VltC9K+jqf/EX57742CNveOebYznzW396d6VR0S737DwQqhAQrWNBQiF88VP/HBTD2EZZt8xYz+/Y9ot/dewPf/2m6w5UN8zkT3zoS+3+VBArb4thbohiACnR99p49K4PH95/8PEnv3Hi2CfrcWCN6K8Nw+aV9uqzyyc+2s7un5xP7zgsPvyRM/efXJmcnnaMpbPrZ57X+TKRLgq9/fqb14YTe2+46/rrNh645xNBnHZ7g+vuODC355z1RV7Q5rqppcRquLA93LlXVuoC1eb2SrB94vBDj2+ceXZduzAvTFSDba0a5sXNhyadkA89urjeNYLJMXjnGdCzF0IRcyVRlTT/5Kc/edM12554qvsb/+XwrhsDn1+uzWzLe1bRZtG598ff/5pb9v3DRCveNN0v/PNfv+fGa8r+2SQWvc5wuLnV7dpyNFBRjVRt9XJbc+hENS/Dsr8yMzvBoB1bRpmPBiiixaX2qD+yvsizrNCyEqtIxYIzHyhHuULZqk70R3lvaKbnts3OV0ajrVCxiivtcksm+tprdp16/lSRmc6gPT23Y3Zmh9XZcNgjZIUCfK7CcG3xirNyJplkEpeXnji0d19rptXpahW0Bp2eikmSu3JxMQgrRWlL3c7LzZfcdHj9Eq9sbTQn0uPHF7vLplGdeNntR//hL/73dKuBMkomaiJ0tbTy6lfceOL08rOnlr/57VPf+4a9qR9s21lr95aSSmvpcr8onKjUJxvx7Nz04tmTlepVV5G31htHKOIk0bmOkzSJImv9KBsR0jgnbE3JACBAIERKaavJO3LGlUCAabWCKK13igSy8M4459hrBicEBQHGcQTMiKo70KNRbqwFZGawHthqEChJEnkSMpSBGJcuoCAAAHQebFmy96GUSSgSSWi9DKPBYFB654wgJaNQeoYwUKMskyoUiIS2kgYyqNtM9/vdSrXG3osg0lkexSHJfqVe6tL2zKBZ7U1PQEys+06ArNUnAiUj6E801C0H7Mpq/9K53r0vKuuZ0JFnoaR3xjtmQo/ISFqC1vme3dV3v2bXLddXqsI8+lT7G4/bYydZkWZBniOJAsAzGwbvWXgHUpFnhCjSRe+2G6b/5H8cWdg+jFVq+/0kro3aJaG3gts9KDKZ1NN8ZExus15hWRIqIYQktraopLHnXMRhWoft0zjTKhwXniN2pKIgUq50JViB5SgUIs/DT37pdK8T28JLdt96+JIHoaRk5+PQ9pXt5tF3njyz0S6/53U7j167e1tzxyNP3Hu63Q88EqFxHhE8oGVLDH/y8cd/d/723dskKcx7JQRNXR76xj29U6dzKyHwOBg4DIRzjr1HRERyjpE5CMLxSM2PA/wCcNyiNy7YhPH2Z9y3CczMAglBjLnWSIDk2Avihcl0kG8a5546s3Jgp1iYpPnpMKR+pp3nymPHii/f183aFDCXWf7suStJq3JxcavMjDNQlMXEtsn5nfPjVfCyt11brLU3LmylFcTIVifZgkUKo0CQ17ooKpGUoPSooLxUaXxheR1YRrVmJrGjuSD59PHVrDCF8zOT8dy0siOTetEdlhKwOZWORr50ENUS9MXTxztvmK3eeUfzfPdyUcjOVnTHnTt3zMY4wNmF6wyLbn8d7NpCY2K0uAEkummw9kxn/d4Te6q1xOVJ7gLA1fbanhvn9x+54VU/9KFac09Q2RdXmxZCQsFSVieF6yz11pce/OZ3vvnN+5978lQSyVBCJaqWw0aC4eXF9Qfv/2xfQH+rSFuVknBiqlKud6YaUd/49krGhoHEVl8TKQ6iux8/t2ua3v2+9x2+7dDnfvLJAweaCy1VWZh47NTiyVOXZluNvbNHh1uVirrxwJ5X5H7r2LHPsDlvizwKExc5gfnE/LbeicVKLU4bqkEw7KsbX3KoGRzcOHVMxbHc4TwMYtHqQBTja4crn4+4dvnSVrWlth3de0Zv7Lh519J9z+ZDXU0CGSZnlza/8aXlecPbD0iTF7v3TpHw/U7fSTUM8mW62N1sv+X7j+45JEw/660EWxf01LYsmKwTVUznsmyK6cY1D/xr/qnv+CiqFzwoitIaCmKVJJE2wVJ76spj/J1HnqkmzwfSbm2E192yfWbf4TNn7o1Syc7rkS0LU6nEE9OtXrdANCS9NRY85OWo2Yi/8aVP3HXLfypNuiPYLtkGSeqwNL6MW5US8ihcePpEFgRTiVz78B+8Yf8Nuw3QXe+dOH1q5V8/9ZgMWqvt2t99+VTuaGTKSjUuDaMAU0KnU1jWAolimpusddp5oCK7OX3l3JKCa7RYCqDinIFyU/GEH2kCR2FAaVoYY1UioPNDH9r9wQ8d+Ic/vPuBJzrPLu/IDbzq1Tcfe+Y0AIzxAdrzzMxMJKPSloXDfDgKlVKeVQBESCgMKTMweuROnTy+88bbCNTstm1bK5dFUaZJqz1YqyaJyKvWeWBO4qoQEtE3amm/6JVd20jqAoJClTMTkxubi/ML85cunpZ4MVGIgcxcEUVxoMIiz/OyTLFJHguby0AlIl7bWK1VJlQYWiectcw+y0s2hsErisdOiZEt82IQRdXSaOWNUBQqmYSNwXAkhRiNesBYqwZ62M2LshrFg75pzM3EtWnXS6K0V0vSQadgUzoOa0llaMu0UndIKhQVkXgAO3JZt7PevqTi+uyOPYNhR/IEQb28sDTojmqB2L0w0b1StiL9e/9pv+PLN92297tfW/6Df+stlZGKyVtrtbv1iHrX94hEFa1a2u3z2kb6he/07348e90R2Zqz1ZqIA5boQXFUn1jtjloNJYUQqGO0I102RDzo5oI4jUPJqiyyX/iF//y6G/f+3kfen3dG1nTKgb71wK4GNG/+paqYh6/cf+IF59OJqfZwtVKBmf07n7v/9PILy8Ps2ANvPPTgpY1dRw+0O4NDE8JD6RhENGvXrNTZvt35/j3q2hl19/H4M98+jUoJAm+cN54ZSuus01GIc5Xq9PTs2trq3M6KREgCIVQ8QY1hUZ5ZWVGSfvk//fCzzz5//zMnu2UWKClU6LTOMk2ASkml5PRUc7IRnru4PMrw/PktWfbf9Sr6zT97/8RU2/GjONoQeRTzAotpXXa5eyEu+/sbNS8gK5zTHnolsUIhVeDz3ORD2DG546UH9jy7sl6JY7O4dO2dt8ZpMwJ97tnjcxOpIFpfWmxny9CcT+u1vTt3lPnV48HC7EwgBDMUzg0LY0YDneWVRtUajSB1ZjbbvfVhT7vCWbe82TcmZ2ZJkJcmUGEiAynBArKn8Yn9aniagJAAxgf4McCanR83vFgi8uODHyCD9+wAkEAACgeM7Mc1LAjgPDt2hoGdCwOllAAGJbA02hs7ArbSNqJQCFGWuZSC2ZfGdUd5RclGJdnsDUBi2kz7IzN0JoyldzYkVThTlgWpII7qRTkqtPbeCUnIbIwzbCTFSbWyvLL6z3/3+f/wo3uCunYewAIJIELnLThNoooYACGCdLoMU3/T0bT+zZW8HRjPSkXeOpcXHFZIRmEQSGVBSAtgrEehmF1ps60N01mbW3px63P/+LEXnu8UKnFMKqQ4qh3dc1sS2Keff3jVjIQOwfr+YCgVGAcUWFY+hvCNt7/3iw/8S551yCFxmG/ZNFQEdSry2ZkpCus33nzL08deiGxRTcIyyycnwpkIIAiHA10WsjIz2bfeF1Z4iiuxDolYHpiv/tQ7Jg4e3KxslyKtsNzmIQFnmAiUBdSSbD0OmtP1Qzfkt9wy+Z0Hlo89u1mN6rXKxGa/OHO5b3IzGDnDMNDWeig0RkIa7wyAsVYI8NaPtzuIdJXkTBwKqggzGK4rSSYnksjOMYCQwnsHyP9uSWNARATPQMjjoSwRCoHWe+88EQkiGCuW7Nl76xiAAiHGGUdCQEFE5PzVpo+ju6oGghcvrrdzG7ByiEQUKVlaZuskQaIEhaHOyzANo1qYSAiaURiEhTYTId542647bpw4cPRarSqUzAhL3pfoSwQiEXnnQCSysZfRVSvF7bv23njX9aee7vm/+u5TZ8sgSiarrSgMTVHWkooAc+iG3SJRzoavveN1/+FXPwiy5tlLUEIobYrSWrQahOh0s5JNJRQbQ/3lhx577kn+hz94E8Kod/6puertrNGZts4H5Wavun3n1pXhMiRaIQEysjcWGIRU1hkSEhAZvfOekIJABoK00UMDSZA8+FxnYU9l2WIB6DKtSxc4qNeSQT6ymZEyCKOo0BkiGG2UlGSpljaSqBoHdHNz7Q1vn5xf2PHwg5cfOaWOXQYhw9JY6/yYRM08Rl/DuD2T2Tnm8bvKDFIQOFaBStOJbdO1U5ePBSgFSkYmIcHaw9vp537xDph4XNbE/t380sPw7NN+qEkjGWuFwAADUOjMGNvMJJEIy9ImcThqt6+YolatFZ4GzjJ7pRQgSOUDgWtLF3dfu+/c0glRSTpnu0Fgk6pIQ9FZurBSLP7LF7/687/8weLC4Njjj7zzztcpqQw4LmylknS7g9/41Z/rd0a2yGuBIObBIAukcsYEKsjMSLG3xkiS2hVAJAQS0TDL52amrIPN9Q0phHNWBaFzlsYpPkGIqL0VQoC3UgghpClKR4KdF+wlBkKWRc7oLWFAnKPwrJAsS5v3OvDEE8W2mbgyJbLBoNOBdpEsDoN2TpVmcvCGw80EPv/w5f9PKnIek0gKwaPMKCXnpqQxcfdyec0+u22a9Mpw9521ngTZgLIb8IyW23dmmRkuL07VJ5bObE3u3WV9b+pac/fXrnvgvuU9t777p3753556dIQOnAePqFTgTPoP92x98oG7b9rFr3vN/pe+/aatdfG3D54tdfUlb3vwlusXomb6re9eXOu02n3z7RcekYxxKD1oQnaWQSIAggG2AKEIlCj6o9J7ITCtRlmf251w2N/x2h+/gvkgg03IovmJSRH4bK2jmdNays54T6Wsy/TgsxeWTp99+KY7bl09c05VZX9l0L586ot/8raCPhPPd3Le2H97+fd/dnTf6x6PZAWlYO+87VgziuIoSiYH/dBD5cLpp8r+aUFkNcg47Nm83euqwKFsJvWmlQZD9mSWl3W04kgmlxbNd9eO3Xe829MYS9yx0Ni9S7my6wtti1LG8LqXb3/6xMbpc0OiAIkEgXGGgMIwtN4OjWOKL2wkUdr4o39a/cF0z45Gp+HW6vOt1qEJZy51Nv7qDW/6wEAtfOLPHvjiQ6fVn2SvvpZvevW+UFUYbZCaMEmN8bYs5ueaGAXrGxvWm6mFaWdy8GGnvZFUJx27oj968sULu7Y3du2YYIPtgTi7Udar4bZmZTgoaxNp3JwwPqzG1Z17b3vkuWcn64FUbpRthih8xp2y89RzfTIuisTO6b2Yzm10MrQ+DKMgFWxMqYvcbLSqKYiy2UoKUK3tu1SlXhrRnNjGcrI/KAtXkCks+7WVdhxXh8WQRHjq/DmZ56OBsVq3alNneGXHtrmL506ubm384Pu+ZzAY6tKDdXnPpZX4Pe95BX3m/udOLp9b7r32jrnVpU2lRs1peWVT79i559ANR/ub5y6cvcwGW/PVq8n8QAGTY+8dJHGipGAEa+14LBEoRcTaOgaw1kipGIiBnTWVpALMJMl5KK0RQmbaCPTG5AIJvWfvkigSgtizZ6+dKa0x3jJyHMeIlI1yQFRKEaBnoHFzBKK1Bpz3JSNiECjrIA4TcFqyA2+cZ8des9fs41o1ThJJPte6P+wGUQzAWZYJFYAgz750uXYaLYG1oHu1SFSSolLX1i32NE821I65ZCIqdd6NVWytJZubzNZrkQ8kyq1t++SNN9W+fSwTJMFbj4CeASUDewsCSaGLhX/5na0f+w83LaS+2Op95978d/55lUUCkhgMjwlvcNU4DATgGch7x0AxFuZ//sTB9/2H+So9h8O+drG1ohhabxBJ9/ICRMiAa2u+l8kAMR8ZoQCByQsGH0qFjklKQPvml7cWZomxnQ3KsNrwIgaSLLwkZO/Ze61ppV37038+gaoioXDeEREBMTsinxk/7OJ/+sP7gQLw6p4XL89Vlk1hl4YFsfLeGuMYQRCNJ5+ZpufP+HseDaNbDpIAlJMXy4mvPVk8/kRv5JKkylnpgEeEwoEd12aqQFpjlQomJqc2t9aN0whwtQyE4d9RRDjOoAECACIw4L/H0UiwZyAaX4FTqbx+9+TJTm/tUrtH8psPbrVXbZSEKpDPXBicX1leXy99KSRYZuc0aye//cgpox1Y752PQti3d2bYWb8avZlK9r/8wM5u5jPx4NcfmEpb+/Ze/zefeIIxNd6KOBlZI0xZDZRB2+/3zw2z3bVmJIOh1QZ4WyvOuxkPIJWKjF9I00vdCzff+ZL+khshnD53edt0SlKsbw7jhFQUbWy56w6GP/7WKS1n/urup18//bJIJT7XgzP9lQ0ztTMFG000tvU2jve6vaVjyxvPbDSwsX/v5GS1ouLKkdfeMbv3uursPudrJccoQhnWwXO2cXm4/mJ79fSjX/72yslzTz5xKZytlHE4sX2uvzns9NxCpVmfvWZ2kh751n0cEUUQWZEZPbGzcbE97A4hTmI0LvN2cqqyPHSi7lfzQRKiCHF2Yf++fbd96av3TG2LZ+aCIDRzKR1aEPtu3fXEd88/+eKLd7z6R7yqnui+INUGTZn923eeObahM43aZmWxqcT27TtsZnvdfr2V3v6OH/jP/+WTn7nlk69+6dx/+/P3RMXl3E2CqHfcgR9+7//VK3sqaWViwRk0i0bO8c7NwVlUKq14b22Zm/m55orlapW+/uApNdU4t9nedu3US95wx4PfuT+NwslWwrw5O7f70smg3tzT2HVXxY0uPfLg3leSrCC1qlKJp/5p85f+Z87J3ED3rTNxHCCBkgQEMiZJWK1XPHCjIdeXV0upL2QP7b9l/p7jOkrkqDNgxjiNLFCvl9vSBRICib1+T8mokkZRQpsblz74Y/8hrQa79u3bXSve/M6X7djhksoV31jPoLd6Vl/qBjJd/YNPvktXNkZwiSHYc4M8cMPka959Z+8K/tQHHtkYTlIUNqqRHWZ2UE4sJFt2mOtebkZJEpeuWB8MBKmgorTKDh7ZoX3uKSjZI4NSkbVS1CVQ35Vbw8UrEE7A5AG2ayg7nh7/kV9M33am/n0f2uqO4surZ8erIM+tlJhU0kTFTueFGQWBBBShCHTWba/3pxZqr33DS59/6Kkry1u1ZrIxyPRwECZhacs4rQivjSnrlRoyM4oiH1VqsZRyOMwCCWyNIikpslr3hptChf1uoVDkWdZszggU3tuy1GFcsd4E6AEgkikBgaCRMcxSe3Zee9CMmKYNPdqQUhpn4jgutEGiQAoQyuh8cmJivb060ZqWDka5KbwNUFkk7yGIq6PuAIV3TFmeVyrVKEiljFHIJI4b080y3/Jeh3HsPQ2zwmgbhjIK4u7WShzGoVLDol9txqmLhx1ddrYUutGg77QG9mWuA5DtgfHM++f0rt2Xwro2+syth8yH/NRHPt21LDFUjBymOMyLMgc2I0LYu2B++geqLxy3T52zr2pAiFzmPqiAg+gz312K65OF7TsLLIQg1Np3uv04CaNYlVu9KEx8btub7oFzq3/88Ueu279rWyNA1nGjkEH5nh98w9rK2ZffeP2v/MbHhquLhw/s+Oq3Hwyc3riyXE+T6TT4hT/8y8lrr3vd21/9ha/+czax+NIbiZR3o8Gwnbs8SGrbFQ8r5em33HKL9tfc8+TysD0ShEkcGG2dsRQIYF7t9nq22DM9E0diZa3b3ehDHKDka/cvnD175ezqxh/+/adbcfL6177ymRdPPvbsqWY9YIA4DQMla9WqcXx5uX1h0XgIR+vZqOj96S/t+6mfaTIdLzd7jMhmez6oK1vRPmUmP+jawfDNbz3yD/efOHW2DYrjkGqRmmgl/UHBMe7eM/n4sw/XkyAhKFe7OIKNjo1CHnXdQmXuyrBcWu2apa35/bteOLGIKK45dEPZv+oqAhAOaLPdjyM9GmWtNBWCFtdX1UBmvRysSdOgGoWrvUG/OwyRqpUYiXpb7Wq9CswQogf2AEwwZlRJIby/2nbl2COgY0bE8SGfmRGFYT8eldAYyzL2Ho2ttojOWUEEDN5655kRjfNKkPOM7CWJIFBSgXNgAR37wnrJJgxDqchr7XJjLfbZly4nUmC9B5NIQs8BqkDEw9Fw0BuqQKEFB9ZZwx7d+NDK5LyThEa7fGRY1B5+PHvJndUbDnAYWkYLzN6jJylJWSBCC2CRElbSw2jXPtw5mxVlY5RZFUkCitNkbeSH7Vosa4Uvw7qiMADg0vsgFaZfGJge5Ad/+b//7YVLp8tAZaUh8NVKMhpsPfzc10l4YghVKER1NNpwznlGInKOC8dptXnf09/N8r72xjOQQwzjpSEuXPeedx45dPrJBzYGmyGlk9XWcu+y7nvb23rpbcEP/sD05MLEiQevXFqkJy8VSxcyEpGgWA+9d8UHfvD6t77Kb59eUTUDjZ25rQpX9Z4lCes9ai8oBCRkC+xkyttvwPcf2u7dAjot2Bnt1i6DHijClnGwttQWYXXxEpxe1F9/eHO5h5RErI0gYKG89dY5QkSPJAQxLl65AOwcgAqk8+7/T8UmBEZkYPr3qg8AIEDmsXgEgMgAhCQUja8ka+24vxhIIINnZucQ8SqE342vyqtrIM26SZJAQ51XfpRb7YV3Fq2bqwe1VCSBGw36lgdOgdCoN03hGERkmQw7W6NtE9OHj2yL4xCFKnWBngBRcOC5ARQwFMAlWA/eCZ8zGVFPDt+17Y9u2X3x6a2P//m/nL7Uq9e2VXcsHN61vyhH632b7r7hzttfrZeu9DJXbSCj184658ctYFo7SS4NVTUMR71RECWDZPKs0ccuT03la9v2H+guXYjCMpibE1LH9Ub//Mo//suJLslACustAQgiAiaAQEXMXlsDjN4DCtDaiRCVJG09xcmotC+e37QsIgUsUJJgZ3whA9ki1THOOqfVmOvEQB4iJdNaLYkmINv4pV89MHOja82Km+6ae/uZhR/+madWuzUEIckToXN+/FMAgCis80SEOG5F4DHQCgGBsT/oSiWDIELrrLUCiYRUwkynIxmuRK1m2bGpH8xHpo429x4FMsG4UE7KwBZ2LFYDMzEqJdm5OBR6OJqbmMydyvOs8DkrZOdHWRmE0ZmltR1Hb1vYtn9z+cLUbEMqpWIqhlprE/js8x//vakg7DjdXjn+K7/2ASDHFhnF5sZQBcaVXYWchCFr0A7SsCKIB7rrvBUycAxlYQQ5KQSwDwOllBqMBlIRjftuCBhRa+M9aOsAkQGdA0ISiOMeQRqH5TwDsrF2uev2OVAJlIMRylSp2OrRmCBni+DCRuPB1WxyR/Cyl4T5KLu46J49oy605dFD2xb2TYnIB6H4f1xFE616VjoHDoSynje6edddf+BQzenjrV39yTsnl85utuaV3jTkksFZvWtHOwxqtdmFYUGU1pGmWCTSjkiNjt6qMX3xzS+Zfe7JFWABAqUk6wovEEAamzxxFu8/tcx/eT4ADCm0JJ8/A4+dWhVChRjKQAdkAkbPnp3z1oEkoJBEwFaTEkzIDGakKRStuenB6ma2aZjDQe5RYhKlZSZTGagUs2GXOUBqzV1zfdZd9cPzLKHhV5/5zi/b7koY0sqlYYCIJMIE7fr6y946lRVXCiN9OQ3loBGPfuJ7mx/9+prNg2ozEIJApTbc2Zh9ZWd1aXKa2xceYj+QQRKEYZFl9zx4fv51oRR949sBShGJrN/32up+q52lOSZ3P7rcz4Tl5q7tYrYZxso2W7C2jp1hWU1UWhWhcjubotdS6z2HRGPHrgEYak0E9YmqSKMyM92t3jPPD0Z/a37sLeEtd0guV4NGjXPX7F489qmfvv0dv3v6m0/u3za3fd+1L3n7wrB/kZRCwVUEqVR3Q7c7g1SJmWarYy5urW809l3HVkRqtlEPhjajUAy3Cp3LUQeH66sVqhVBZf3yyotq0GzuDNBlGWdmEKYikfmpJ+8mTZc760dv2Dmd1PqDrBpCNytyKyZr6c5dLSfilZGpVITrDQRwGHpVSTtbPSCKQ62xKH0J1sig5QxaT2jE1rCrgolE1khsrW9enttZ72zlgXAX213r68GwWF4bVCZpaWXQz/qvOToXiCxOg2ooQGegkEJVDUJvyonG5Du/75UX/vTfnnhmNZHRDUdm0kQhuIP7Z6/Zu/DYg1/PTX92dhuhNdnValgpBSlljQlUSOC9KUuj+4NhqELjtIhCAPLeI8k0rggUYSAIFZL1AM6zQJCKEECSdIzWGxWHAAggvQMkaT076/OitNZ476RSSqokiMsySwIFqCggtlxq7dgRgEMEgSSIGYWQAskCAftQBs4ZbbWxHpiHeWGFrFUTlcRcFuzAOS7BAhpw3hiLQgghhAzUuP0CuJKEifKBGqZJvrredTm1QlK651RGgalUK9YQklAVldQCoyie2DscwqnLS+jBsRv3d1njACBQASmytpxq8HvffeD73rgzCUfK+PseP/dPX8klJMZZ8uXYPOWZhRRhGGpdAAoGRhSMJIB2TGYf/NFW2jptO11dBNqq0pM2DB4FOABZjoq1LfOVR8sXLridk/Hrrp/btt0DjsgjYcgopEJtMg880yhjxdZzUGkY67wdaRuiJVMOLVtwtNjJf+tvHgsosl4TOy/GGoxDZs8IKMggAKITCGy6fKlrPIDAgMlaZ5EEg3MMyETgGbB06uOfObl1+XB7gEPbubB89sTJ87t27jx625GVS6d9H8NKxp6cFIzgLXjP3jOjAKGQBBIxAMrxHd6PTUXI4+cZXN2xIDACAQCSRwKw1nsR+Jhg/0w8KrqrvQxVlOf87Knhi2dzCzIAYQ0ggEIplHPeGOtUELDjYlgKKRGQCCamm61GbeXS0vj3bLthokCX1RpTO3a879aXDrp6Npw6ecE88ugLnitTB+84f+75qupa5wiFKU0QuEazWknF4tlLvh6dXR5MRdKyAo2CaWnYecP7j64M7YEbr5+Y2bv+0a+OLi2nUgbM5GFztTitQ1fikVuntKDX3f6SsrdvGGy/465X/tvHPsKFOXX69PKFDsAF0+u1msHbf/AHpl56/c7DB0DmgDLPo7DeKHuZHplaEEG3f+L4iWMP3L92/tKZY4+0dohWBTaWTclYm6sVadizMusUgt2Nd92w3oW7Xzwec18rn1RELRW9dt5sNN74vjd++7vPbZktEfPsQjS/k5xTr3rvHZv95UvnL9Ti8LYjr9s3f93ufbf86V/9TEyi0PrSYnZlbUSKOm5tw6rRhr7/ge/M7Z13ZoB2pZ6a3sW86irDvp+st0Yj2NgkUU3zvl1aXXn9y9+12ptOJpq2v+fF89t/6BX3vP6l8y975/u//Hd/e2r9VGUwa5Lu0Haycuis29zY+vCHb7ppctvvPf0geibwZekHvjyr3I6FVm0+WioKAyFg+sTjL1Ck2PHJp88ms5XmxMu2aNdjL6z/+lteeX3w9G/8XNhf6czt1xwUfiife1JqXQ+q1ualR2lBgXSFNaOB8w7jOExtvL7R2Vx2lVQqjL7w1585ct3+173xLY8/8iQERiLZnIWCIs/zLJudnW/Va9nq5XotMbn13ksRWjVJ6fTjT3aOy/iz9z5edZ2Ds/ymt+1818+9ffn81/PsuR/7pf22clmBkUAanAMbAyKMrizpyyNhvYLSSmsCDOOaCiutVlDdWluKkrhRiw0GCoKsPTBupYmdN/zAqwyBhwSx6aTBnMiVGGwRX5DY6ffyZGojjIV1xEGj9EOhbOsI7d9tjp30zz4xuHo8iJMgkFLyqNcLA9nvboSVivNcZAOB5sC110zsmD93aTVOpbcFRXEc+Pbyxfmdu432MogHnYyQpYAwChhMkiQO4iIvpZIyIK8gFMohG6OjQIz00GGgMPC2jMMgjoNSe4Myswa8E1BK4aUA4zKkKAyUIERvq9W6UOCMz/2IHQQiJiHBOwTprdfecEQBBcMSomCi1y0CiVJWwPhABEhaCnDWNOtNIuwNs+bEBCWNRDph/WB5lb1BRqNp1B+GkYwbibEOlfLGlH4UBcloMBISjHaOsVpPs/765vIlEYooEAZEo6pKk1rLnmw9zN9yFLzWURx4bO04DO/fX//usxv3nnSllcKTsbo+RU7zsECnvRMmpPYrXz392EPZoF9UEqVE7lFpXPjcd89CoMnb3HKcYCuORsatdYa+8NNTjUajtr7WdsbM7ZztDMq/+dcHf/W/3jAzLTeOLzva3BitVnZsv3D20ktvfsXP/+AvHn3XO05+98s37dr5r1/96syE2NjqzC0s9Ebh5tPn9cbmpI7+6q8fu+N3X5E0O0ZAa6YCYctTz7ruy394+7F7Lx5Q/XMJtCle3RzlzhvrAilazRisA/BrmbNbg+sOLczGkfbeBNTuFFeurFijgzDqDfTWsHPyi3fXk2hmplZqMz3T6PRGzkHpvNGFJ+sxLAvOR+t/85GbfvRXmvnJ72AeRcEUVOoaJHEPpUiUcj4flVrVK1urW8NhXmZ6aiLJ+kUCAkFGcW3Httbxkxcurm+MOv2FvXPXTYeVst+aaGSFnd8xe/zR4xNTU7uuPbz02DONxiypfCvJl194Lq5dbUBjB2Eab212kjBKK/FgMNTa5tZDELbigPNCkiNrIxBGUkxivjXpOdo5t2t1a9X4cnVrQxvrkZzzoZABBePUGBJepZIgIoD3THT1DIhIV+cjRMwOkZBhXIbtmSUBCenRj11/RIBCGj8WBNB5L0gwgCAZSFE65xmNB+tdVSkZhKmUzNTpZd56wz6JI/KoFAUqHmaFs7rb75XaRElknfeePVvrLSJIQWObAyEAsDEGUu9RXloRv/Lf7v3lHz/82tfOelyTiUPSihkwJpTgPdghghcicRampsP/+kuv/cX/eUK2Aw0eATzCg2e6N5+dV1CLG7kHtoEH1Ehu0NfLi/0rV9JLK4Ot3lCDynSZZUWogt4oJ2/ZeqkkooiDKI6n+oONMFCevTFm1C9kEPZyLkZLSE4wJVHoIbCeWI5mJrZuOrpta2NObzUthSq5klQjn5vpGbrrrubhWyths3bNgZe0z3Xn7+1ujS4OShp5ZYy79ciOVx7uL0z1g2QQt/ZYXwuERxwiOQAENgAevUd2gNKzRQpIgCIAEAwSqSBvtjdnAUNw5JzZD5OmgJfIpumZ7f9w7K8+u9QrqlqG+fjfLFE49gCEaKwNw9A6g0hCitJavIobBkDwwAAeaVyqJoCZEYF5DMPCfwc+kqCxV5sBlJLAgAqlJOfBO+s9W8+CUArByNb5f994wbW7WkFER4/UL4/ye59avbxpkjQUjicruGOS9u+M0OGOXS30JnWyn+k0Sdc2RllpgzhMAq6Ks8cfPluvVbbvODi57zqftFyQMqUkQ+dyQINAKCSjRxkwKQhajEYE7uAbDv7Oq1966cW1P/u9v7+8fG6rfTEO4tr84efveejbX/va1uqlqckJ40CT994TgVSKicJA5vkoVErJ1qFr9hy/dMHlRV4O/+tv/+0H37n/Xfv2ZcvP2bhsLsxBLYrmqt0V//QjQ3ItJ622OowjFuzYA5FgKo1hYSKlPCMhW2c9I7KPkzAvNOfZZJwkwtlSyygtXBkIYAreetf3/uM//Z+kXskHRZgkggIhUUis15obi5vJLIzKzvbDh+I9Sd5djlN9/a3yJ95U+4N/HWTQcEDOekQaH3zYe776To9lEAAk8B5RKCIPYEy5sbmkpPQCnXUkJDJGApM4NCZPHOlRALoeh+0kUJB59ohE3nvnHAKoIABkD9o764CcR5vzZDOyHofdbpkL572Uij2zsx4do+MoWurgO37k5/7+93+H0m7R02m9tdnpB5FQjN/68t1Rs+Vym5u8lCOh0FnvPYMDb/MYZWkYkYb9Ig5rlWodwKtYm6FGBAC0zpEgVOSst54FoKCwPxyyZqkEEXnvPbBnRhYkCFF678IwstY4bz2jUJEIVFl6KWRpgrMr+k6bCjGSEXln2FrvYORg2K3e91xw7JJoD+I//ExvZVXevG/2xNnuycVYyvjItftZlRQmpXH/j1QEHpSgLNNyrKv65Gd+97m//+N37GvlSbxcBkX9minfyRQEqtXccwCLY73BSi/ZPlV9yY150FtZ3WrMLITx7MbZsxxJFS6/9RVz//jp8OzljGRIzoFn5wDIC4kaBBARBo4g8w4Fe5ABCxSKSbBED5aBEMgTUBg56511BE4IYY0HB0qhDKUIaNjulhkHUQgeyaNUJCNkENYZh947Ieo7eiuVtPamYuvLxdZTlXodCNB20yqlzbjodrRmoZpG28MKzHCr3S85SwNtck0ULv/cr9CaLj73NccWLbASgfSD0aUvBtb2FkuShmSgpNQ6iyJ1cqvcsK1aaVUkPPqy0N6RN61P3+eeudRJ0q2JehjHcq5V2bE9jRN7/tzK6pprzdSnmlF7fUupwJpyz4E0mkjveWx9lKE3XgUiQtTWgofOxlpSqYT1pMzb6yOZvVh+fODmtt057y9EeRlQETVqh1/TOP7Nr00Z93xncH7RqcaRierUiw9+uzEjszxTmiGsJqpCZZBZp9L6noPbKvVJ60oo7CjrQ2DTEHc05eE5GUZ6ZFiyObTQSJvppYuXnn7hys6ZiW2TMbMII58Lt9Tb8JA0J/Z0B4jOxWGcb1yRDM3JvVEgu0M5KvrGlZRgXva9FSX7KIZS67hWvXLlMmkjglCpsFlpuWFeTZOidIoxFK7IVwaDFRQWuOTSSip3bp92RnXaW1Gz1R1011YGKKRwcPHklbRSmd15TdZbyXQ/pYZgWXrLHpt19b4fuPmLn3vs0WcvzM1PYGE6lzqVifWViyeQ4ubEVJA0ScBwePV4UGZaRYEkUZa5FITgnXVxFHh2aRAFQgBiEAohFDMQkNVGSXJM3pqizKWIEQCBrNMIKJCUVJ6dca40ZpA7IGLnlcBAigBlro0rtAWvmKzzKMAZq0SQJgmPS9AJpRLMBMzeOue94/HacIheSBKei7J0iGmSJpEE78pR5h3npQ4Ja9VWPuwKIWQg2922klG1mkYSnTYCsdfeCILlgV4Jk9HMbDARFarGFEQ2b1xYSgpNu/bOV+sBei1ApdHMlfPDex66n31IYIiQx5QcQkLORvne+ehH377/TW/f78vNK+eG371v9Z/u7vTz1IFVaBwCCsJx0QsgM7N1DlkSeQcBidAWn/j1O6Q/VXZHOq8aq8CxcRZZCEL2wOi9wKGL73l2sLEWXzjjtzdr+/cFQbikTYHeEwXOQ5IIIUmRY8hkGJcZOGeEUuxNXlhm7Qj6Pb7/0fzZU8gIxMYzexTjcDQAjoebgGOWgmUC78dmeHYA6IGIGBEZga966dkzeBGTfO7kxn0vnG3V6pOtyVTFSV2dOPXc5UuLb3/v9z/98H2nj58RCrz37MGPazi9y4Z98F4QGuuQBNL4/juuXuWx04jZw1UnLiIDEkYeZ2M/O0lHrk/37GolHo4vZo9cQc68QVShtMYxGwcWkIMApcDSGhQkxuk1ZiQkRO85UHjgur0iJPJXHwn56JLEcG77tW64vrq47qCi3OCOu/Y98O2nw6n9P/hj//e+73z60W/8WhIionDGOcvL61shjGo1V5urnDm5kdlUqCSSsHO+GsTLO+aCuksePXb6tdtvf+mb3/383V8iX4SShqNhUAnZ48py//AgyTeHt+w7tG/vXXrU/Pbnn/mXP75ndgqrwURravfbf/DHZhYWatOzkaq6Ufvkdx7sdk9v27VgC7W5sfnCg49cPn2pVamdfvFcW5s3vvnlse4c2N34md/6wJWzp/7otz6198iebr7mye2cCkLkiNJ8Y1kUMon9+oZO65WtTs4FzzQrPhRPnHhi4fYdM4fnsuWL89erA4f4iQeW14aPHzwwv7B36tC1h/rn0+HIl/lobfFCo65qNeE09Ucu4IxlOjPZ4kJPy5Vd03Ty3IvNZm37zNTK+XBG3vrOH/3Q5UtP/5/f/+99WVXQn5/eHtUOh41bf//DH3n+3Oov/NpffOD1r3v6u4+cOfm1X/9vv9GqZcsbmxAEk61Gr20M2qASBrb81uc+G2iH7MNQ6by0zlaqUVbiuTPdhi927ahjbWFC4mg9DxLvESr1kGjbu77vA5eXGctgu2zsnxQ7blK2VQ7NiAgUN3RQTxqum/eiiBiVFDI3zmgvFAWRCCIaZCNUghgdyUFmdx+5Vs1OPn6y+7O//vt//r8+stFeSWOIkmA0yOMoqaTVYVbONKdH+aDMtQOIkxDYFaN2pYoI0AyicpBcyNwf//OFh89U33Pny95yF994+3zpI00JQCChevJU+yt/ff9TjxYe55vB/sZ0c3Nlud3P7jx69PTl05vdyySDOBWEvt8fRZJqaW0uOfuON/p3/7eX9/VpYROUSsGmIlvQtDFTyksvFCQqmgIUpTHG6SDAGpa5iBVg76d+Yur+R7ZI8fLYTSGJkQVwXvSUiidbldI6wzYbda4/evQlb/ietfVV8O3OeVdfbG8uD5BQ99cDucMJRcRRnBCRdeWwGHmjAamRTlg0bJm9Ze8cobG+yAaVOKqlUVKpDrtDFsDIvWEnUEpKGQahK6wCbzhPwkjnGaFXgOg4z4dCKaFS7zNnbBrXdJlJCdqaUhcsFRMFUTUKouWly600VciMXgbSoe/nQ09UbbRGwz4zadaZLWoyhULbLMtdvjC3fbPbMXleqdYmpqd63U1SgSeMorDMPDFQEFRbSklb5FRmZMlVpyuqEXuAbnujUqmIDHKtuwMbKNFIhrv2hOmMLnzd56pQsjLlfu93j77x+5/ZGKEX8vmLduUST9WEAIiToCiLEiHf6N13upy/KSzQyrrs98Sf/+t5g6nRTioKIukc93oZKREBRM14NMxH2cjqsjAcBjYYlpVW+Pd//TeLt+xvVCpHbn3FM3efPnQ4Wti2e+P8CWPDk9/6uMEL110/vfAIho1GZZhID/vmpoRXYYBBh7sdvXY5n8IS66m3pYWA0jnTz8HB7uu2zT5zEQbtyalZS0mpLfAQnM2yMs+LZiWGUm/k7cdGw4WJxsRsY2W9N1GPR1lpPfe6A0IRRVJrLktnjA2joNvPiUgbPdoopESPqIc56fLlu/0Pv9cMzzxOUKdgwmhvO30SMqkZb1ZYr1tw6Xx1eLE/XF/fO9G4eKqXD0yaSJkwgz57dr00I2lKVYn7eTJz/ZFwefngfM1iJW3VF7sbo87K1I6a4Dycacg0WTxzIYwqMwvbLi8ujp8FcaRKnYfVyAL3TZlrHg0KIansDuREfaISFuz7Wx0MQ6AIJK1utIMoBU3tQVaaItMlSSGYA6VsyWkoC208jr21TAgIYD1LMdaDWAhh2ZEf1zl4YiBEN0b4AXtgAeS8w/EzGkAK4bwPBSohSCABCiJnvQolIAH7KAzHMKS8NGNiUkWqUpJlBEE8ng4SZra03oL1QoSOPTF74LzIlZRKCGcNIvqrkhYIIRXSaDRigbFUgyz533/z7MXT069/bXNhLwdRz4cSqelFBYCRvSdg9mCFHRYt6cvVzSyvQhgaY8IAR5Wpj39t6dptbvcud+0OlbQoTIKt5d7yqj92Up5vOxsPsiDygsG6KBCE3mmjnU4qkTPcbMRFpteWz6gArTPOOqQx0xCbtUavvchjQwh7Ik5Cr5LR/uqVc6e/tPvQ9M0LNzz+9KmHHrsiywLyctee/A3vOBBvsyB3Wo21feGbZ+ZsRf/jp844rF1/zYEff+/tC3P32nJUmZoqbY1QADlEjWwAJYL1wDCuifcMIBmEt4RE7Ik9AIcAFUQBJAG1kCUwBwkx9qP68Ec/dGDbTO3/fPTcpb5HjBxzkQ8QEbxDIaWU1jkpJLMDe7VDTwgEFiAYGZnQW4+I4K1AwvFeCT0AENI4w+acQ0Rmts4rKZGQiAQpZktSGV0KiThWBB0D4DgaCQBveudNENK3/+nrt9x4MJR7/uW7Z0faSc8Kg/07GnNhf6Ima2kfAj09kWIkZ2ZjXyqKwjiNiqHLylIFUTm0rv38+uP3OeeiWpUwwiggGUAcUzQJ6bwRiazNoaxgIK0PGIS3YEozv2vnb/zhb545dvpT//apzlB3eouCywC0ChiM1ForSaVzgmj8pzvjAoFSwNrWInBRCVWRl9t2zKUCNDKYwan7n77zXXfmPiBXRjOtiYXJ733rpd/+5EalWUcVCA/GmSQMC+ucM0pcVWslESFYY8uyJIBIRTazs4p+5Tc/tO+L//zrf/Q1bSwAeA+DvHvPU1/fe3C3B1hb3ErjeNTLDXJQm0yq2/cdmt/YHMS1KgcN5NQPOw7YQ+/d72h9/u72cx0npcAATV4ygvegBDGjHbuBxoax8Yacx1Z7BmIACJQyVweq4BwXzq93/cbqUExrWZvKNtwo99p6Iehq4pXBGBOSJCk9Oyml1RYFBUqm9RSQyON6uzc9sRD5uudhOSoVohC4Y/v8xnpv8amv/P0L37j22utcOPPUE89sbKw3pipJEnT7QxHIYV46JoGi1EZ6IhLeMxKjRFaECiz4tJYgYVF2JSnwnp33jgUxAFtrkBA8eOMAtBLSaOf0GK0NzJ6kUFKwd8ZaqVCI8UEMCJGQBIkiH7EnLzRK+M6z+fd8z9xEbeTJCwleG0XQuQT3PRl9/n6zkXnDXI7if3pUP345WetWm3Hjluuv0c5YwyFKEP+vq8izD0VcUG61kShyQ1DKH/qpf/3aR984tb3KwRkm2Tk5Ug5mJimamdhYaaycXlp7bOM1B8mDaM0cIN1jNYzmdxnhLW/GODy6p7G+UYCU2lJRFmGcInljLLCRUlnnBAlGFlIigzUerAdE6yWDUpKc866wYRoyEgYVBGan2ZOKwkDCaJg5w9M75n/y13759/7HrzVSSFRajNr9bCBIaucQdDDRGvryzje+8/hyt9gcJlENWMVBUDhGz+AdMZOQ+dCA7r7qpbEdXkkj9KXTwzwHaygPG/l//7ndX7n3PIhqKENA4b11PLJFJlGRRK9Nng8lhQRieZ3+8d86L9nFhw+Hpuj39ejg3u0Pn8SHTmUBUFyha3e2Bv1+JDLdL8zApqGvNsJmza8vXZycaLZmZxbPL6dhMJHo269pHb84WNt0AKLMjArIeyz7Gl1upA5DAnCl98cu2N/57ft/6afmbnhlxNBFlZpg28KNh8zoc4O17ne+/MDP/9yHJ9LZO29Nn3vyHyv11mTVbLQzKxMZiCgGnp7UpQyj1A5dPhwUWUleZoBF1l3fOr57575aFIbVeHP5bI0qd9y0/fippaXO1tzc3m3zU6tLL1Snqjt2z15Zak/ON4zpgZciCGvTrdWNrkrm0xAtDiqR9R6QPCWGBDpjNza3vCvianM0zJMoiBqxYrW6tjHTmHdBLY0TFYEMty6ebl88txw300RK74QB3rf96Mby8+FEcmmjJ0Cmaa1d9lrzOx+8e3HPwSNTe3b0T2+s9TbiuW1RXF+6dF6FCaG57ZYDK+dWv/Hd85//3FO337Jrflo15mpBHE9snweILl66MFmPGs2p8SqoVGPr2Gity9wKJQnDIMxKDQhKyUgq53UiRBwFRvu89BaRhDBGI2AcxEgCkQg8CQKQxpis0KUuvLWSJDCP5yFxECOByUtmHydxHEoCUVobhmPHD4wnYwzcL4YSKyxAG0vgIhXGQgpmsIYIclNmhWYPSSWZn6gAMoKFKBrmZWGtsKrTayOwIE+OK2mlNGyd7/R6hbUbUUJWi/aFA9fD1GwRKDMpqFOIK8sT37lv9ZuPXilK2DV7vir8wlQ8vW0yhzNf/ObpLAvBeRJjJYMCSWOXdz3077ht9+27mhceO3Fuw3/mW0uXLuvC1pUQzo+cd94zoQAED06SiMPQWu0sewYgMs5eO+Pmd/eFaHsdC5uUWhfliKQEZEfS+TIrXXdIj75gVtewGDir6BPffC6O59/2qqkw2XLOIbB3PlTkvScIyiLpZ2GRscRCBRbYMkpb+NXN4Ytn1Ge+0QNOwecM7BHB+7Fteex5BxgTpNExIyOgBxrLQjiecACPZ57AzFfLOIQ7ubi858233Dmx4/LS+aEd5cq+sHjl2oWd9XhquDoYdUxaSYoyR0FCAAASAntvjCm1IcQxhAGAGcYFZ4LBAzAjjdGfgMI79AwBKhXCz/7orpe/cTqtluC8L2X1yfV7T5Un8tIZNIVF8CTQS2LHFCmp0JeqKDQSOOMAUEjB6KzX83PTSDLPcnb6auSgpnurm1G7Uou2ZwRQTfpxPtjw2pXKuED3X/uGl77wRL2/3GnFQX+YA/hVvfZj772p273wAz/+/d/4t+f+7atPnVkepSFeWS6O3lp7/pFz+245fPCaXZnu6gBLVdy8/8jzj98XGO+Vl2m03hl94fPnoSwO3hy86W03XDm/esPhIx//4rc7m2fi2sT2/dcILxxTe6V75eLJoMhWT616z12zvLR4ubQdsF1A2c8H17/8BhelkzumGXHu4PV//KkvTdT9bT/w2le+/tqd3z597vT6y9/xmnJ148wjj49IPPT00lq326jFJLwKdSTDQCYXu/kUlhGePXjTnstVvc6DwbG2qCTJJK2218usjOjyZDy3a9uBc8+dWt9s16Joa8N44fbsqd5x48yn717MtvSuhgtVceLui0Pyt7/tsLRz1+67ZaL2ss6SEDJtRGWzWctH8dMPHXvNG17/m//tI61aczKK3/jWo41JOvC6a+Vt0Tde/P3K7nS20bp8cWultzkYFOXIElNdRY9851tJFNeSkJT0xiryvjADhuaBmR0NI6u2PznZmomz587koVcT/PJXX7d5+Uhn5ZRyIQlRhPaD//M6u+M560AFAqUQYuLLjywNfMWyAwyzzCrJSBCFYRCHuszLYV5tVKuVeOVKl1m+4k1v2rlr75f/9kvVWVNsLr/jbe/57Fc/Pepc9APXrFVGue+NOoPNbGZba8fuHWdOXQwRiyIritLzoFavl7keFEUs5QjCwQAffXp57exid/3U6fPP/+ofvrtUWZjU7v7ko3/xxxe2NW4qt3Q8FSvX21xaI0fzjalnjj0vquK6HYdPXTwzM9Xa7A3iiXRG0tqJJ3/7N49MXXfWinUvVLlZmMW15QcunzsGb/31l7vJFFxmBxxEGToQARCMgIRyfcYhezZFNpukxci58OooWSmcqEZgypFzk/XKcGjCyK5v9l/x5rdO7tx//OSZiIukiY2F1tH4wPrdTxrv1peWZxc2KG2ywrRaRZB51kXkmbn5K5cWu5uXOJCVoJINR3EYiyBCYIpTQeSBOv1CYCRIaGPGnS9eSaXYWKPBonBFniODNToKA2OsUqH3XptCkPfaOCWJHHApBQXCKSWMdtLk9SQJFlpWF6DBWLRFYRgRhWcxdFYEkTesRNBogi9zW4zCkMArTVYGYAZFa7JWqdbKUotQxYrYuaRSkypqt9etzSiMJMaFHQz6nWZ9jv2oNIUk6PV7AXAShsurutfzexqIvjQlRNWKw7gQoE1/ZrJxeB6+dcIpCLnE8+e4tterwGtGAvAeiqJ89gXYbssPvDtqVcIvfKX/1SdiEgrKEkLlgMB5LbjsluOudaMNBSQRvu+Vdz1/9uTq2kazUjGFON7sl8MLtZkvfPh/fOiD//F3r9+z7X1vfmUa63QmffHx8yDwjXceeOv7fvB3f+cTL7vrJR//2D87saOd7OOtS3vidGJaqUaUy+Cxr375C/ee/I0/+8fa1DT2t4KJje/9ue+ZuuHiZz7/xPb5uaW2X11CciUIGOUWQQSKhpkZDuniqOOdy4c5KXSW2bowIOucs74sHaGRgULAsjRIyM4zeApCRy6NeIHstx94b//sF6KaFCxYD3U/82YoqgFrDYS2wKL0RRusrfmhX7+wEgaQ6+Fy1+4uqSyi7QsTXnvdG83V0p4s7/v2g4cJX/e+V5/upF2dXjj7zKGDu48/93xp9kjfzDOsTV1z9tTi6/Zdq+KrqyAf5hIpTGQ/H6XVRqCUqqCUCAInasns1ERZ2mpa88DW8rDXFZHqj3p5uxhpmw+zIJKNWjTInCIMAlkPoo1i5IsSxwXkgp3ziMQMCDweIwkUQrD3bLwjAAeMiAgkiPhqHI2ct4jE7ImJkCQROxZKRZEkFIhSKRUIWfEe0RnrWTKHtsjzAFjEcrpRsUhbvZF3tjQUR5EtS2dNEice0GjvnUUEEOTBI3uPnplx/Chm9EQWOBCMCosyd5a2ivrf3N35xuMrP/nOPa98WTWua0ysiDyTYo9EwNJJqVRQKTm+6frp+491MyesdUiCh8XlAV9ZdNMXaw8GuQUfxsC2aSHu9EdDFzZnbbUSbF7JjLEMJJXQzqCSQRRb8r1BDiyyLK8FkWFfaB3FIaEIEVzZ9jBmxnnnGZwVVDarjTtfdvM9//bCQhNnZ3cvXnhRsQZkqeCV17WkHHk546CJJKBajyaG7/iBA5UmfvmbeNOe2VZw2mZrhpwIGyAC7z1QwujYZWIs3HEJWIIHARLGDa3es9UklSPJXjBIIgawwOC5QmQBhefShw2RyNe+a/c1B/f/5h9/7YnzMMgCqULn7VXSOSMCkgiEROdACAJnpPAOEL1XKEh6UFQaa8F5QHAOkcYZ/rFNm0DymK+OIITw3nvDLKQtC2YmSQBBKGPmkpCtc+LqTgkAINk2nc7OvuUD9dVTl+fFxm3T8YnN0VbmL670pyfD6oH08Hw9qRROQliVNoJOroXwsQjAW5+q+kJTxKm0oaCb0GdgRm64wv0NKUo/3PTDUf9K34oaCeUpBqqhiJPWQmX6GkdTUWV+1L8EJe3fJj70Q28y1epXvvjwI0+cWVtph2nEjIEC4zhEElJaa6222qGUaKx33hbFcDTM02rUHbXzkP7u0yff/8Hvi0XTFv2onnmD1nqbdL/3V49+89kv33NmUKnXRx5K58l6YueMISGRgD2W1gaBDKRSiRLAIpAzNXr7S/ay6L3je177qX+85/iSqVcSEcX5YLS1uhSqMFDx1GQrK8t9R/edvjT4yO/8xZ//zm9r066lwcqFzctPvnBk4aAjbXLFUEztqf6fP3njm370waGPyYMKAud8EEiv7Tg8CFcntsAMY8ORs+zBK0kAbI1GIjYMwCQ8ICz13JVLetuhoZfa86gU0HboGYnQWjd2KQZB4BzboiQScSj+f1z9Z5ytd1n2D5/nr11t1Zk1ffeSvbOTnV5JSAIJoVdBEERFBRH/N+gDFkQR1FsU8VaKNypIB+m9hgTS+07Zyc7uffrMmlWv9mvn82J2/D8+82I+82LWZ+aa+VzrWtexjuP7ZUJ6AtBKRg0d5tt37jz69MF6GHCsCMQsLyzIQSadZV7bQWqf0U87LKpRoK0ujB2rNlbbHRnAlgt2HXjiEKZlo1m1WiORJ+9KSwxK5+IkRohK6xQTZTEsnPdE68QR7z14b60TnDPGjbZBqKI4yrOMPEnBhQrSPANPXHHrLaP1/iUrCh0qbg0DAOusFMqQ5eghVLOr6q7b+y+7RYnAapdZD/mgeuB49OMH7Hr91DmXa5uvqZXCbpkYnZkc5Yyco6RRYyJg4n8a0NKiIGuscxwROXCg0rF0WHnfhw9+9MNXR7AUjlEw2TAdbQc9q6B1zXQwfh4+cXC1faa+eSTtr1D3BOKamnmhFA1TlGLGf/ifb51746cePkocVbVetcCcA64CsNobx5gAjwjoNTEZCOHJSe9EXJsqyjXvUs4AFIB3CtSgkEm1btJVS2EQTVq/Wm3FRZny6tivvfGVJhz55Pv+oNna3Ov2vWU8oDAMSwh2XvriM08cOnH/Z5wa2b6t2l4RWa5d5uNq3Q+xHDrHZRQksRFpf/DqN1/S659lIVhdiFadY5i3XftoNn9qoHgiUJW5AW7Al4DEktg5AhDkDTDiiqwvhYhOrLrCyROFpwEWUHn8lDlwTI80VBXyGy6f6Qw74y0eJ4GqsDJLIyfiSEgetia39gZ2uJCFcWhLK9Du3IGtVu3efSudNQ2CgXeMo5TSO5NqraRADFxqAaf2HdOf/or+o/GNE5M9nnDFIz8avvuvX/rWP/iMNeLS5/zan7/vb8r7/8v37n/d235V0RIsrpm6d0HKKRoMUhWMzR5vh7UqUVmtVYcOSuNKR+fvucA5tdQdoNUgeVkMWOm2bqqdnWs/+NgTYycq0w1M81WGcrReybonK7Vac3I86w9R8AsuuaSitujeYVd2Kg3Z6aeMj4w2zivyXuH7YVV6y4wx1WoEXrda9c7aMFIto8Plrp+ZiHu9k1m20B9mU5t3pBoEsLCCSYDzc0fTtaVEYndpoaCQvG7OtKobt4rkqajS6qzkaa9XDLtZv49cjU42TeHztCg7+UUXbF9s6/2Pz9/56KlXvfLSQnsHLjfWFZ3RkTAIY/PsJUEFQqel81YIwRiXShrvgIFgwjlXkuectPM2deQ8oWCMGas9eWONEAIArbEMGeOSgFmde/AoOTjiSlal8gCRkkiEAEElXhv0o0gJyfqdgRCcoQzDitFFHIRFoT24ZtIAQg+uXq8TuXVhqzVmmOZKMOOtZUJKGapAMq6tJoZeoCUvA+HAmTwj8lEYSREUZdnrZ5lKscz73vzy3v3DzuxvvUTs2LvH6zbp+PQSfOobJ+7Y9/Rg4L0NOMDKQma89zwnWrPOIHKOknMChuuTcEMOiDEpW83GkRX2839/4vTaoGeV15JDMDMxttJeEoyMXTe+efIeAKIkvuraq3959y/Ie+8BPJLXW3c2BkzpPNBasNJZI1BUPFgP3pYDJJm7iW/e1f/aD7MyiwVzDnF+2fzvz54YpMnN128NeN9B5tBiDsMe5Lp2bDZ/ZqHs5+nNF6qd04HTJhv6dkfet1/86JF0kIUeDSPvEQnwHHbpXMGSAdC55df6B3AA8LAuNuHrU+l1FQsCrvd9vEPGxfyZ+Sy3utvL86LI9Ctf+1qbFtPj+t577962Z2NTxycPn+VBUGb9Mi89rqPri0BKYy1niIw58utFWyQPCOTXDa8CgaHHivRTG8PpKlxy8cTuCzIVzYUVZYYli8Smzf7XXjp520Orh0/bYcokR0RMIlWLktXVbjooPbD12b73TgjpiNC7semRLTs3Gl14q8mc04RneXd8y4jP2kWZaYvMjjTq4zaGrdummFD/8dE/qu+a8B7GmiN2OBAqMN66SDqnKm70B5+5I3WVf/7Ul/cdXNt/xw8ffeoXgxQpL4LTc3I8Hp8cbti14+g90YFnDoBMWJCv9nLmbCWuDIcZimDfQ8t/+fJ3vOdf35/nS6oyWos3hqP15eOrK8ePjIzX4kp04U0zZjBwyXhvyBzLzxw5+9zXXtaZ646tRqNjQafXH7DKxovPv2yy8dXPfsuG1ate/9zOIssJNlx6cbwRHrp/qTN/yvUMj6H0rtWqhRwyKp/zis22129VJ84ePDm2Wc5sxrK3UK+rjefvHc4f90XkTZoPVzZt22WyRKhNPN7245/+y8y02HtF4/hCd0Ai1eqhh89WKs251e4NL3vla65/x8HTs3loPEQhk92u8EE0Nk5/9od/vGnj+IkzUEsu3HPR3u98+6FI1Rnx5sSWW179xx/79x+zVuMb3/9igVgOinrEPKXoFDnDEIzxQRAEoppUq/1Bmg6HUiAXMsbIlgB1+dp37kwq9Te+65nLbrls9+WTI1dVD596emQ0/qO3/KdnYVakHGWr0t5wnV0rgAHIwFvHisVarxyKSBB3ljhHcsYFYWABnSXJgyzLs54GzSanN6+ujjznOb9zwd6tbp6eOvXIh/76H//q4x//0xsv/+Bvv5qDzPJhUZA3BeNy2M4mxqaSpJr2ekIIIQ1yKaUw2jKA0ljuIR6pb75g49kjs5rvuuMp8Z2rHrSDQW3D5GC1iPiu8fEJaJxZTRfjhE/O1BdOd7smv+TKyw8efmZ+ZdUCLXaLGig9O3/N5XTT+6fOu2pWtri2MTMzkEyF0zN736jh3r9jY5s9lI6hHJsxhquNfSjOIs2CmPCSvOAklJTQS4HLSAlz7logZJ7bWiirCWuvdZCppBJt3LFrwwWXDTtnIuhJcLNHunmabd0+ctPzL7vzzofTYS+JmFHCOFsaKxjlReG9We0PPWNhEMsgVkEE4JUQg0EaVUMC0kY7X3I1XhammjCphCtJ8sAwiWC4sAxpvShNgEFYZwjO9QMZREm1qwdhWANL3cFCq1nP85KAlFC6cIEKqdCd8mzpMiEVabDWxlGUpt04TJIo9uSTSi3vZ0C+niTza4tx3Kw0R/xgmA/yMs854mC1kw5XLt1447DfZ/WKLsooEt6DCKMQKwEPB72lJKkx4EAi73YBTSMJdJ4qhMmxWloE++dmK7Fvr8LIBMST64Q353OroXjFC3f95OARznmvZHMDfkWVK5Y7cIGC/hLMLtbXssrnHu394PhwWBZZkZQ+jm067nNbQseBwVgpKQME5gCp0JqTDEP+swfuetmtt55YOGmsqzXimb3b7r797OnTnbf/5v9++OmFkx19ppdcsmf7XgimNu3afdGG7373a9/69jfGN9e/85PvipGpi897QTS29fR3fv/jn3//sP0VYfs1MXPLVef93YeevP7Gd95yy4s/+Ce3RtCNm/Er3zTWPf7ofY+cacmNYxftOHvseKoNAgyKUpdWKb4+aV7Lc4vYa/cbtSoy5AyzwgIgIHApwIPzTkimtZGBQqJhWnLBjNG/9we3/uvf/PSF11/Qmspkfgg4RKpSEDChnNWWY2G8CGOzkE9vu9JF2/r0g6QSv/4lL7/m2qvHJra94JUv27G15B5XetmXPvbbjz9x8j3/9oPnvPaGz3//sYMZXbJn5o//5O1//s73vvW33rKY5o/edWhyqjp98bbxPVeuzB9YnTu9fhbs2bGlmkQcrEOqj48KlZw6fPbE8aWoxvfu2jQ5ObZ8ZlBYj8zmecFbtW4+WDjaLXSJXNSrVYbWAwQxgtatRrJxcuL+wye0lGQsAjKGyMADekfgSTAmuNDWOvDIiAPjiN57zvg6rASQEQdOgMgBGRNScM4ApGCMMxRKKiGFSiqVkWqFW6hXk3pVpbk7s7g6KPtFai2j3JJAlsQVyURvMDCMNJBQiguhtVWBDKXMjbPGcmQcGRAxZM5bzjl6BCT03pPnIdu6aXJxcaU9KHPiiLWza9XP/7D0aXjxJXZic2ojI6oj1oGgUqjAkzcWkkn1lj94zu1v+a+hpiRMuGKe0DEwni8PfBeFJh9FksgNslVjPee6f/YZ54FzLgmMcYAQhpIIzdAJJx0CMVethd4RQxFFKDhzWnsGw/4yIueCcVBhVC+NZchjFn36X7/TgM17J0b23/XYqcNHdJGSdkGQPffmi0VNaB+AZ9IH3kvv+6pGt758k8bVa/e0lNvvIFla7Y+WDMIKeO49cMYFOqTSQ4ksIyqJMbDSewYOhAw8Q+89MQ88AEQLBoGIAvKCfA6A6CME5rDEMN94MX3gA7v+8wsLX789L/pI4JGInPMAgkutdYRK8YAYhdhuNWUU+mY9bi+VnWFR6EgJVYrAeusZAAFDIsJ1iA+Btt7xdQc7gSdYf18WOVuPk8D7ohggOCUkA+eMF+xcn2KwmIU0CD3lvSIk/caXXlby+r98/UdnO+ZHjy8+cjL86WOr1105vW1a7pEyYIKYH1BG6LlzlVpsdAECCAovGMY1xBHRmmZQ80wJmxItNYqB0N1y7Rjonsn7ut9zq08vzP1QRqNzJ9cSrMyeLvpr2ejmsdb26Rde2rp6x3l3P3Dy0Jny0GwqVdjrFSEHJAAPUnDGOTIstVZS5MaElcBqo1cs1lvR5M5/eM8XWrnYcuWidCedCLhUjp11rfYnv37lb7/ykYMLgoX1JGkyMMZaWQnOzR2cAyQGJDlTQML5lmKV4dk3vPwGpw9UR8b/8H/d8u4P/KSjXdo3HsRVl1z58MP3+xCLQdGaHB+p1YRY+ODfvyEYSpay3hxtHtswPjZSLp9AW7hhyBWkoDdO095N7OGzxlqBknkAQB8qZhwgsnVCKHnPGGeMkQdk6EsruPDeFUYnccSVcJ4xFI6JuYE6ejq6dMChcAtnykefsLmLNXLrPCIoZADeGhNGFecNORfEgdXeGZrcvvPiy245eOTRk2cOcq6USiZGpw4ffwqAeWcXZk955+JYeWCDYUFSvfpX/ujQ/vsOH3n01Imz5Ki90t98wbQIemk6u3HbrrOHD6RFmVSkLguwCIw/54bfD8PGo4/ctrz4NIJFQCWD3DrGvHMeGApE771zXnDutEdG3nnOmfcgeISUeU/OOe8cEARCkCclBSABAwLQpRaMuOTeAefCs5kf3Dd/6Z5mayRz2q32wl88mNy2n51u51GInAM6UioEKStxWBEwM1oBsNoGTLOAobPwP6IiXTolGUckAgfeO5Dcasfve2Tlj96//31vv2CKLZgiD6vBSq9dG63YxlBeEOzefUGhrWROd05X3EJvwY+GjwdjFw9M4ASpsf63f/bWv/iT2z7ztSVmwIMzzodhxRhNLJZhxZuMCWGdkkHd6jYw4ZikYHNrZGNn6XGnUSrk3iaVRvO86xZOPzqzbbrXZ52uVDIKI3RltjhctWQ4pRKymFc0JqE0SpEHIyzNHdkvIEM5TFdZ48rfWV48EISOPJM8NBVBnK2trijIRlX4ml+9aDGfa7hGoEJXpJ2VNvMmrsTB1t1ve+9jZdbCSCPD0haBFI6IIRBHWi8UgCkNkcUA5dD4k061TbCzWffOr3Y8Ah9t8mbgtB6O1tTGjY0z88shic0bIihDzpLZhfLUQjbslbURUQ/SesJHx1R9lI8PXRLWe6l48lC3O5SrawYcMkRgBEIKzvO0MLwT+vjOfSz6Uv9979oYBZkp07DS33vt1HhVr2RxQ7gyv/vPPvDCv/mXs2//l4ffcuP0Jbunmy3RzQcYMb9aIGoJSKZE7qMoLPqWM+YIa5GYH2QjY7WF1WFvpU/E8rxwmjgP2l033+njjrGRsILFYKQufD5UjYp1ljkP2glLA7MYM2zUalwwKYYcTTpcdV54F1frjbJnu0u9Sl3JQGQljCSbwvrelaX51uRYHFcBbbfMjSIie3Kpv3vL5tGk2RvMSpZOTI0ClZV6ZX52eOpM56pbL5g9/tjq8qn3/M5L/dBNzUy3O4eH5fLa/HBqbNqWZRhVkcNIK736io06yx95fPU7333K3rxr8xZuzi4LtHGt1lteTcJzWGujLVjvrImjJAySvMgjFQSCiDyRLwotOSASouCMEVltnCMrRaDC0BkHwARnxjmjdVGWUrAgiCUAKccZuNJxQBEI7/2wn1pLnsFgmEolS+vBeYZsmA0Fo6ygrCykkgGIwpRSCsGBc2FK10tzzhhXIi2Ncd4xrFQjgb6fp4IjI5HmRVqW3nvFmUUUKiw15fmg1CVxDSji2PfT7tLyoi/04eP4zCMmdfFPfnHqsUeWl7ucvGQMkCyht4ScSQ5I3gsORMDQOURCtm60FYwTsJzwbFacfOqUMMJCjRNxdFxRu9d2pL1b9zbSOq7Zk29NjEzNTMRxWBYpEiNyTOCd+9fW2tckfNlAaTSVGhUnrljprTFM+8mv/Xjww9uzohdITto6YJ4cM4595EvPfPmHhy7Y3Ny0oWbII9Gp+fSZkyeGuTOWB5LYSn15izOmWF0zR86kB+d0phXzFsH5c9Q8omc50gCEgPQsUnqdMk0IiEwwcH69B090rnWE6w1YIgAgJDpw5OA73/quT3zuE3EUbG7V9j14TzksKNPA5fJyZ7DWZxBt3XrF4YN3A9fOeiEFIHog8h4EWx/Yelj/8xICAsP1KhNRvnG7/MNf33Lt1bV8dZD19GiFeOqt1oKHw0xzh3u3Kt0Jtjbi1a6rVMZOnF1d6qVnF5bKgownDt55y1EwrgAZF25m8/iGjTNIVgkAEB6C9bOgPxgI7jnwYVnGlWaml5546PH8WGutk6KaL2Fp4bGgiWhK0tZYZ0ExR37/4YW3vOF5s8fmn3z6zL995C+ixs4Xv/Tmpc6s1mfkWG36vNEnjxyNZ0fiQW2l5+NM1+tJt93pad1MwkFubMGJ4aCwPj3+nx98NzNy2/bzwtpobni6fObGm88bHMxOnFkY2b7psX2Pd2wmZ+wgH+y+5pKHzj6NfSYmdotNtZ98//apTVu3x5Pf+eH3a1snp8RF9XgbVDK7urS2nE9s3DB/qn88zYRwYZxs3TMSV4Net2xMjqkLRofHnu74lSuft3mgFsmJWFTIN5cXUmGCWmXn6Fjr1BPf7S51cpNcctElIJr3PPBgNXbMDjbvaZ5/9Y3FITWzefTbt32rNYEPP3ly5eRXzr/yQoeVQdect2PDSMD7xfJ3Pvu3l998RVuXQVi/7sZ3nzxz7LZf3C0ELwdZo9qacsWfvePFaytnbrhp9yVXX+rWjmkznNlQsYqlyykZm5UmqVRGW6NFvxitNUqbqzAwTijDgRXO9586Pr+23Ck6zW/+6Onfe+feQ0cenDux0ownzixU6s0oDLVz/jde0wDeERpsrqiiVTx6+Kks74pCGvLekgkCKQUvC89l6JG3RqvVph+dGj15YunWN79p1/Ybn7jzl7MnHjs9u4JgJrZW/u4f/vdff+SfXv36W77zrTtkEBZpbo2pJOqlv/qS229/zLogCJSIRW5KIXia556s4oIQLHkl8MRs2/g4qleHQ5ZUNzgYVoMmNeacyw8tPlX60nvKO17yuCjyHKC9sJBwIJ9t2T42HJRb2Npvv3HTpqmnxnYoOaaGgy7DGVltZm2H6Hy8YfMr/zLTK4CO+VUyQ+4zHo2W6Vko5jAYQxPYrIAqysLse9C2Rlrd/jn3U1IJhadQsbxMR2q1LPdLK+1tz7ns8IGnEt0vex3TTwel66RlwGGklmzaseWpp04P1jrJxlFjPTLmmEnqEdmwtK420sr7a2l3uTU+HsXS5jYJYsmAkBxHzsIrLnrxM4cfyPsLYYVZctYUQaBKXTBiTAillNXWe+e9AQDywgMfZr0orjDixMxIa4MDyyMBzjmrgZgIQ+TK5IYDIw+FKSIVkmBJtYLEhnma1Ea8F458afNEqHp9dHFpIZAqqdQH3UHez5iETnfQXhsqTEKZ5/0cwS/1DkdxPUrq4K32XiQVMLIejehyLanH/d6yL7HSaJX9NQ2sXxTW54MCRlrgDWDRYRy8UYgVVWlt3ikBDitO2rDb79PX7BxtNfJSe4qg1pz6P/+yQEFUluWSFqWTjEsOww21/GPvTaa3gLfND/3f2a8+AloD8Nh4CsOo9NTpawQ4cOrwpRftOnBkZXZxeOLo3PZdlz1ydNX21ctuufyZMye+8pWvjbzlVb/yojf82yc+cfLItos2Pe/x+RO3/+yHV151/b7D/Mk7vvfXf/u7O5/7Apt/T9ljisl86INQfvkT73r1e77xk+9//0U37X3pLaOlLXNwb/6z33zJUfPOv/3R4UXULOaxb9ZFlqVpqSMEImetsd4xhkGQDAtdmjIIIimD0mgAIgfaGQ+CnHfeeWYFA6GYUEpL9acfu21DXX3y88vvfPnYb70qTFlBbFha6HSGETDDa5i08kysDcRd3zX37n8UgyqWZropH9t357U3TH/wzz7ylR/8UzHoNbdVv/idnwRr8PJXPefQ3MHLbrycnjjZOXX6/3zoH8enJ+7a9+jo2MzBw4ebm3Zcd2P1qfv3NUdEMTjXML3y6kuVELVE9NtdJkRZFGrLpM04yLDXE6boK690rxPHvhZEy93V1bWulIwL2RqpxohbN28xDk6fWSPnGlWZ50OpeITgOEOCIjfWgRAEggNw8ODBA0dniQiQc08EjFtHjDm/7rdaV6NxyQQXKJDxMFBCyDAKgzAKuOIoWo1kZmKslsRkfWusPsjK+vj0Ure9tLKUDXqmyNLBMM1WJlutC3dOHj57dqAtRwGITKgwiI3Ph1mJgMZYlJwx5qyDdR8XMgICxhlnQSWaGduqUFiz0B8WnpAzcWrZffbbZ36bxq+rFLVpVg7ngyTiatINyGnsLLVFK91zyZ4//OPnfuAf9xXoUYPRthJy5DhMh0Q+DASC06UpnVOBstpZWxgNjDNrDREBoHMOUKym9tUv/r0nD/xitXMArOPIvCfyZB1Vak2d50DgvfPIuPDGFCjVyNjoYNB/yQ3X46D+2je94mvf/4+if0ai8YGYHitbG0Ao74EQUkTgHDg4AsFwJYHTEWxqjDVnTy6lA+uZEOstK2DMA4ICQEABLkDnPWkOGqUihuAs4wiCI3jAwpNDdEACKED0nHly1luNIiKPHkII7fS2+u++efLA0X0PHrQeBDAiB5wxJAKGUkpOfsMm/psv3nrNNdsbI0xiaXO1urR2/50nf3rvwrHVMDeRBWk8FdZyhsCQIXgPQkii9e0SMVz/MwIyBABO4Mivv0YlcggkBMdnB2g7d8305/qMx/XROnNDky20puHDv/+C5UL+9Sd/utItD/bMQ/v2b5yAV1w7es3lm7ZuTEZGK4DWO7C55rqwWRrUx1hodfekC+rIEwvSM2AggU1C0LJRyRp7iKmITwQoEeNEKEDfNCt+5fjIiXb36Kk8TdvHTsZBr1w4dM306PZ6PfH56Uxp6x1DbSwgECITAgE5JyEFQ7TOMgLncTjsH2oXq/PDS6p0q44DSjMcIAUKMmdTXj/5mW9t+be/Ov31e+3QhDJKSlJhwIZ5yYVkjAisDEUSCvBUi6OLz9/wkss3oF8GTWXBr7py70VbHrznaDnaqjlWf9mL3/TwI3dpk4ci8FY/9djjI824MdqCIH/HX//VX//vj/dF97HDp15y1XbyrLQaQ09BmARJlHgpUSEZ1GOtETK8v9pnQpLzyAgQcJ3IQA4BEZmKFGOIiqlQAjFbGMaYNaVQ2NPB9+4vKrWo1uCnjjTuf3RodEjMIznOkQHTWpeYSRkBIBCz1ntPCPbMqadazVHue0mMJkXtymOnD5Y680QEhN7WqwkgCyXzWhe6+PFPP+FzQ1CG1SBPi2xoLtl943R9z9e/9k9cBsZZAigMEJFQAoGefPRrw26RDwaywh1RqrOa4EJyhsJ558gj5wAopBCBMtoK72UgyrJEAu1K5MCACcYtecYlEeMCnTXWe+cdnHMiCWQ8zVJBDJAdXKn/y5d7N18/UVX08H533+NlJ2cqVJa8MXb9FUbh3Ug12DVdaTZ4WoKMayoMTVGUz64NzkVFSkpvPXIEBCE4cUL0npF18t6HTr/96UO/+eYdF58vLtjLVhcpKwYba8yWKWMxZmT6rWZjMyU7R+tHwYbp3EEIKxCFZXdJJuJPP7jniSMLTzyVcQBGZDPHhdIGtXaSmLdoiKBI0TsesEgUrnjCSAUiETDlTeZgOR1mN91ww5lP3dNemNt9xSXDfNPC2ZO9xbM8CnTJs057VOahZN2l2fNf+HsnHvqMy1e8N2D9cOU4+EzF8c7n33j44A+kSNJhXyqpNVoot83IF13ffM1Lb5hQWG8tAaWP3X/qedfsZODCOF457T728YXPPnY0XRuLYsFYiegIUHJJ1ntdAiITwpBTSoJHCJg1+UhdNipu2zRgMbTaT0xUxlrRkbNrY2NVWY8LbY7PDtCqtWVTD8aOnpxv58vaeS6DKGHbttSDyNVrvN1edr5Zr4ej48mGvdteXVTmjwz+8V9u73Q8nMP3WSLPuddsKAPWs/Uv3X7q1ut233jzSFA13mScD3/tV678P/9+4FdedP67Xh/WGnN/+oG3fPKGL/77D+c/fNmt7syBqIY6G46NbdVlVFgo8zXGvDd5qb0QEYLorOk0w0ZNhGR5Q8ioYoGlvTIfFrUA26k7fmJ5Yak305L1AKankulWAh466aCaVAJVkSKqCra21kdv42pkdWFzBlBpjET1iWA1J0tZUA2iOGklk+maWeutBjErsuWnnnpmbeV0Uq9t3Ljp7OGF6Wrc764Me2cqFe7JNEZbYHi1GlUSGJt0M9u294e0eaJSZp20jMNaP4zl/Mrx6endgZCDckilkVIWqUkkXHbxVKdTLiwM73v06Lbt12fdPrgCOURRnFSiczNMawMlpKwCkS4zDoTrs3lvjbOlLRkPJLIoiIC8sZa840IYZ5BACsmFQPCGnHdOCB4qSd4bbTx4Yrwscm8p8IGxrtTaOBcmIRfCaMMYMiY8gnVaBco4r8KQcU6MHIK3xLXvF2lV8jBU1hpPFgBQyiSUnAMicsmE4N6CdQ6ZFwIG2dARB5JF4fJiGPhs42jcbE48cfDR//r+3YWJybPv3tb95h0PGEMMBJJEhpwbgP82jjJH7lxkQrRO0UEG5B0yxiFgngOVYeyaI1w72et6u2bDiIPwwKFWD9Lcpj1DDgkACRAZeqrXKpw8WLu+4FpXzpekPvCZg7/7ourIiANunLAec6ddVopOOvL5767cc28GhjOwjDkmmAMPRNYxILbYwZVuF57qG0MoSFt7DnEIzpT+5w+vPPAUdwhZQdqhRw5kObh1GtCzzGg6d5Tn0iL47/AIgQGQd250ZDzNh3mRExDSepkI1zdo6xRqT4Bg//2LnwgFB6BhUVbUWCQxU+61v/s799722HQrnz958vixh4o84+d+LDHOnDdMcPDeecsAPa4LWz2RR+TobV3hRdPy7W+5eHqyOzx+vN2N9z1jOl2bWl8LFAASYeFx2GsXhjoZDkrKfZpmNi+dMZ6QuMRQYLVScY4xVP1uNj5Rv2jvJuco7xsRqiCJK+G5qCiqhv1uicDJtJGVmpEKktXlcuFUtzEufAStaqJAttdWufWldopD3k0XZe8nP3zQDbJuBrJezdPuku284rduOb304JGnH3v0iUObd7R8+3iFjz//msuWnznV766O1xJGiIEYZiYrcyFgw1Ry5fapW27c88yTJ7detGPbldfd+bVvH3/80WOP3YtDTHMvxurVjZEeK0an66YvZ7FXnYqxbhd7x/NlceGlrXrTDNPH+yunRrPNey65IRymcTOqbIz43OxFl+/4v//5uee/5hWHH/7FqRPLF1w4OndypdWoXHXrxfcdu8/lxUDbXedNSR3FzUrVjiQ4uXL8YKMBFej2l3UlboS+XhvZ0gord33jS1mvP95U8dSYZvXectJ0Y3Nn5yanI1heLMuTh+cOdapRt9cZa+04cUbFVItHJ7ZtF7OLw2E7UWX99OP7vvDVLyVJZffOC86cPl2tiphwtLd26SU1RcWp40fT/pnu2unRWmS5UJI5C0xIre3aSifrD8fGmomKmbZNx3ZWy/Ofw171njGS3S1m69vV9De/s/bLnx574+9smwlb//a528ZG425ZGA8EGHprV8GZEFnsaRBCc98dxwbDsaDpC2tlnBSZE1wpWb/+1tetLLRPHdonQii1+vU3ve01v/qaf/7Qvz76sztf+Csvfsv73vEn73i9cJ6x4oEff2dDOJYPSsHCIEDFhQw5imhqrFqtbT58YF/azxlw573OiyiMlFKeoFqLqnE0NHbHnt3dTqfjV8LQyBob6CUeO2uMkoJyYAEXMrBQTG1qFiW2O5nOh2Ei3Up3pGTX7ymuebHyIdlc29UBrLiwuT8bnA3UpRRZZH0OHXCrPBljsOayDvLQUwOw7iyyBIlJVmn6tLdyp/7qf3GTlnSuVATGuliF6XDABOPApGTVeh0w9JTlxbBSD+YXlhcXB2sliAimW5XnPe85Dz9+Yv9TRy5OWh4Jw0ibvF6r1muTS5155m2gJLDY5M4zIk9ap6U2nAspA+v8gRO3aTdManIdo+sJHTohAyVknmfIjDMGGU/zYSS4kBIZCQgCUtbkTg84rzEVWOOqtUQPBsC4sVSWRcCBIROh9AAIoihTwTk4ChDBeu1zYFIFgIIlzZFKNnDOMR4yOQhDMt6UPL/iphdkXAALwLskCTxlSlUEjw1pAhuEFYd+bWWhUuNJvWZsEQbhwnK3yDwY7YX0UnZsqeIpdJ2i66rj1nGJvrK2yPc9eqrBuS4dea9C8dgBfdNlIblifi35r1+2CUdys4bMgAMOCA62s/y2z26a2Ljie+XiyfRDvwHv+zX4+Ffh649m3ZwLGakAmKAklMefOWGBavXalt1Tg+Xuyf5CkATddkp3L8lAbhhPbv/F3YGYrCd792x/zve/97m2MdbU185IOrHw3jds3D7+FUtnyqELdZW0Z2LFZEtbLrTf/4/n/cUffPmHn/jYy699D5EzRe6Jonq9xrwbDM7fsvX04mIuiuZkExh60NZ4JlAXZbUacfRAEAeKPJVae+8kF4BOrSNAEBlJBuQ8WO2cIwBz6Z5g0/SW2+6wGHq1rVA18Musc9yPNqLVQ+brvwi+/2inta21kqZPH53dvmPDxi1brrls4+HH7zty8nRzsjXeaLzs+svueuihlTNrm8Y2Hz322L1PPKk4Def0eL0Scn7+pu22dKl2Z48eu/nmvZddudkVS5inqz2ojk2snwUbJpvDvpGA1UqlyEsqytFGfNkl22SQPH3klFBVD+kl117A056M1O13nyzKYRgpUzip9bbJyb07ty+u9RgFnf4gCoHQRkoQIDIeBKiVzMuChVDk1jsmOEPJjfMM0TqHjAGgdw6BCIAxxhGRIXkKleIoQhHFzWYUyEDFSgVAaqxWaY3Vt25subIMpGg2alww7WGD9ydOJ+h4Uamkw34QhO12Z3HQy12xffN0uzOcXVyN4tBoX5SF0SUSAQMlGAIQec4Z0TohZf09L48EOrMnzpwtTea8X6cpatJgWV+Gdz/e3bIh4ZCquiXJ7NCfeXzu2Knj5116NaiyRydecNPE57+sTp0xQqgoQEDPGUPx7OodPHJCT947xrgDkIopJdCTR++sBU/EXJKwE6fusPlKyNETyUA4QKcxrFVRhsJ6xn1prdXOeSAzVBh1Vpc8soOL3b07Z/7hc3+/7/77jCsJoCiGV144JluJA87JA8vJMyDhLSMM0tT1NI83XgpuyfmlQPQVY+QcgEKU5IUDg054Q8cOtVc65abtcUO0ZdWwOGbcM+YZEKAk5IQE3oElBEDICFNkyNV6TZsxj0AcEBoN8ZxL4yeOdksNHsBxEOjREznCwlYlv3hk8OuvHzfBivOAYKApkym37dLpl7+p/tDPVn/88+X9p9WKCYhFzhKQZZwBGY4MEY1zDDl4vz5DQyBnPOPAET0QePQeEFEid86vnwVzJ+ainBHRnr1bFo5nq908kF6na6NR640vvPrBxw7mg2zm4l0n5ufufKR7+6NP7N7ZvGBXZe+OkQ0bR2VdauqHCZrefAhtwbnr9VVjxPoFHtahHBIR6YxJTbJqaANx6RyoQFGBksdOj7Jka/2SsYnLU5MuZnOPseUzs/TUcG0t766dJzRhk7fqA+dWOkOrtVBcW8+EJEfOG+CMiJCBZFTaQinoEO637Ks/OvPu5yV+1DowPE0VRa5c4mz1N94O11+V3/GL/NBq53Sn6sOYUzgsDEMeBAK9d9a1kmSjkHT08Zv++DeWV0+71EJSqrDx+29/2b6//Eba7YpAf/ZLH60kSVoYlQSjU6OrT3Xe/LtvueXmy4/f89jg+KkLt+/9xvfvDEZvyAZJEHgvtfVFvuyO7D+1ssZtDgytCvV/fOzDw474X+96d26NdYYLtt6jQYbrdjpvXKhUXIvmlpcVkZAgZQTOg7eCS0I5tyr+6RtlCgJKQBc75jhz4L33SAiM83U0esArg27HOeMdScWyYfeeu75Xq4WFRw4sK0nrwpEHICGlM856YuizrIhCXq8Fg85KFEfW2KxvkWEYqu9/9x9Xl1djZQ4/8RAjX6nEBNzlmbeWM8zWVhhRFHqGaBHrlQaR90DOWsYYAxCSa+skh0BKcKR1KRRygQhApKUQ3nrrPBfMk/PeIHDrPIAjACn4eiOJAZNKcO61K8Mo2j9nD/wgb8XRUpdrJ1hMgJ4xEp4DMWeJkZmOYaQiOmmGLDJF4QnAOVvm/yMqImDIQShujXEewIPgUBonwtAxcapr/+Yzp/bOpB/4vU0bRkbbK2vTUyrwRTielChc15SlDaqbGAtstubyuZDlspXoii21qTaKd//J5je8+QRnIePr6xwJBonHyAomWYTcGCNUpCJlsj7L0tIJsNHVz71FJzMP3/nPEoZBfuC1b/yVe77/5eWjzyAfjLcmBxT5Wtz3JUTpphqlq6sTjbFAxmYt464Ik4Bib533hliJdY6+gjmLuA+p7JT9gx/9yEuvf45qJalJT6DNwXe7uvmpb9vP/9f8cy9h114ezhUbP3PHKgXVRpMNsoG2BoEjee1zgeC5EkGk9ZDLIAgrukytdyrC1ogar7KxBteF9N6TcNbYVk3WK8LmllOwutzZvmXrWpbe+fjZ0tkwYDu3VXfuHVdZqouiMT4uuKtUZbM1FcgKyKjTFjrTI1V4zc07PvOtI1pLxhiiM6SFCGLpjBtg0Agg+vinD23YcNPMzJmolbMwu/UFN/7Dx47OzfYCa7wmBfXJRm12dv73/vQH7/r1K37jDS/45W1PfOgDP+uupW/6rVc/96p6XqxaXfSz3vT4bhfItHC2TAZdzYFxxUaqIrNOeKmLrDYZBm290vVlzzbqGDR8YdZyXQGsq3C8Up8JgkpeeiWxWQ9ya2r1uLuaeillAI0K9RdOr8wuEWIUVhvRmDcxgJ6enHT5yV6nffbM4UpcdWmerjmyPgmw3Vlz1kfBxMpqvjo3aDSD1Lr5uV4cNHbu2fWpT31pQtkLLrvqmRMp5adbYyMOiOly8ezTMmj2+mmtEYf1ikiIArruyon9+3HfgeUf//ChG6/fHkQ6H3ZVA5e75+4PhODWOcYFR7TWgiMgn2cDQAQUIZPMnTORFWVBQHEgkbFhOuRCcsEcWa2188Qls8YZEuCMdtpbzzknBiLiBOS89QykUgaIe5OmWRCEHCHPCh4FlsMwL521UqkwCHkUcSIuuFKq9KiLvCgL68lZEhzJQd5Lc4K4FinuyTrJueWcAWlU3uGwoELncTA3iqsb44lPfvuJxw+fBRPQugHWc6ECQMfQOnJE3j1Ld0Zkjiw7R+dB+n9jFALGkAeeOHk7XtXv/fNfff41Wxh0Pv2FX/7HV49kZWydR+t3jU8RxyeffBJp3RJG6yweJqUBL6Tw3jNCROQeTC7uuP1sA1qveGEzDFYpCKQ33SEM9NRPfr76wH0ZaoGoPbnCApPCr3eIiRiCM2Q54x6JwBvP19fpRITgEXMHZQa0Lk8FQu8AwcGz67H1+g7AujwD8FljxrOG1fUHAqLggjH+bHiEhERAiOfY0+A9EnjtDJWWMWecsdqkbNNEfVDYH3z1a77ji6rwnmxhojgp86H3HpE5YxDBOc8ZxBIVc93SaZDkGAIwBsBICnCefvDTg0La+UWaXyvXCpkXRgNDpzknWH9eFSAFGg+BQO+9J5IMw2qYRLKaBDPTI3luF1e7S+1sZvvEzKZRZ0AxTEabLFTDbpqmz/YpQp4WPgySQb9Y7q4Rg4ji2WOLG2cmNbEiN8O5dlCV5LUHJyMpBYaVMIrl6ePLeenjyclXvul10fQWD72V7lrrss3La8cOP3O2LK1fS6+4sH7l5S/Yd6ZDwx4TbDQKUuuNYE6ir+h3fvDtfPV0b225uWuSQnb29NyT9z+aO+mZ9IIaW5ITKx2j/URDXnD5heapw0vL8xo8SJzrDLwY2boxWThxcqJ14eatu371FX9/6M7HsM5FZUQ0emPZ0uH991xzw9bG+Nr4eKUSSYLizMlsYoPaf+DQxNaxs8P5ma0T3dVjWPedjOUeVNSMCLlLu6tPbb/iJXnvwt7JcjCUI63Jn3z5K1PjtYXe2on7T978/GtWnjmc2sWTxZGbXrphy6DsDzJRH0vGkmo7HJucXFvsLi0v8l43N3m7XWTlSLFq9t/5pDBUStYmfeOLXvXEU3dWOc7OHndM+dW1LC8uu/H8/+fNv/2Dr/8oTbOyNw+IZeqKbhG3YmMoz0pTFEnRiW3/7z79wuiyJd9Y41gTWhVny7NHz9SiKfKbtsxsuffuL1UqU46gWk+KNNuxbdoMDsej0guOUjntwlChcAA+iqWMpSAJXHmUt956a71a/Zt3//GQ0te95bW3fem2/srx+f0HL9ixIe6apSfnW7XRcrgsXfmLL37j6isuG2lOD8tuURiGXJG+/YffEZ5FAvLUguCcM0tUrdSNNa60AQ8qQUX3ciUwKtpvfttr/+wj/yxRauvHW635M3M6K+IoqjdH+2mfAQ3TXrM+jZxkQ104aW68ojWjV0898NTN5wt35m6/rU5je4d93pgx6bH7I+jY/O5g9Dxf5NnSySCUsnqZ8xm5QV6MRrwlxBTKAbIlk7atySOs/fjR3tAmlQYOltefAIExjkKCDK0ts6J0xk+NNDZvaa6uZJLX58+ciZNwelPYXeieWhzOnnjgudee/5u/8aL//NL3rhTXySjMHcVJXA6GHb3gbG6BC8lkFHMe5YM+eC9jFfA4HWaMEaIt064EtETkLCcehnFeDJCJEixydOCZ4uRBOJQ8cGCZ8GVp89IJIbmKOAA5F0n0ZQbeewNRkjAkcJY041hRwhV5x+gcVMgATenDCtfWeMRABaU2QsqkMmGLcrDWHnRWA+4H7d7Kysqll720PRhEAgXaMIjl6NQgzUyeW9K66PEKebRRre5sARbB2tLlSS0JJG+vdg8dnU0ze0bDbb8sdk7i+ZfHLCLGkNeqvLbhs9+9w4C0ThN3vYLd8chg9+ZWc6T2yyeGh8+iExlZ7REMByIKvf+Vi8XGKXrsmdybRmKc5MPRcXr//wNvG1T+4VPD7zw+tJlSQuYOstIqHrjSnplf0dpKHvzaK9/wy18+0NXp4qmlJBRYDj7f//bO7ZugGh7LV1/7hhcf/cFDB+f2f+4L743P/BX2TgdARp5nTcL5QDLySaMsD7R2rH36m7/+9D0Pe3MoCaomZFkvYbXtt7z6N+/7h8+fWFxa7ayRIN/RI9VGu7vinU+qUc4IGXYHWaMWWO85Y5mxSkrnUZOrRXGhSykYIiIKk+VCcgSMFL3tdduLsvqfPz148xsvgo1PggViUK9BVC1b2/218eb/eOSZWn0kTAYTloKWOLm6/PgX9r/qlgvZmfyB+44F1H/5qy6968GgMrVhcuP29JnHX3pRNLTw5EJxZq3fz/O0Wbtux2437DTdYPe21urc/orYGI4k45defPfPfnquYdrNypQV6MNYjW6oR1V1/MiJQTrspJmhmi5FdbzmI5+QWFycCwK/aWKEuOCMzZ2cL4GtdHqo7faZsdMCuRJDnQFHcH5qdPzC83ecnV3s9vtZWfqAWTLkDCG1+0PBQTChCyuE0I6kFA6YM1YIhohBKCMmR+q17Tu2brtod8h5d7GbG5/E0Wi1IiUfaUSMN2rNGhcoCTqdQveHU6NNIbGX9jvtXq1eE0liXT5/epZ5hw4qkWIMnbPAZTWOCmusBy6494AA66YkxpCQkfdcMAQsSre8tuK89UC1ejjMSvBQWNMu3ZOz9lNfar/u+pnzL07UoLbv3uNrveH49svqGy6MJ7prs0eyzqlto7S2FmgXOJeD99ZZBiAk08ZZ4ogEDB2gMaW1TIRKEITVOIyCYZoWwxQZAseF+WPPue6Ke+9ZVioIVFKUfUKQXAFFIDJtPBELIqGEsMbrsowlJ+QP7nvs3nsebEQulGi1dxxbTbZjkyfqMTblfLB+M75O0CUozxxZ8Hanwa3M08LC2WqNWIiABMiBRdYxD03rm4888vQH//qHK307OV5971tfJ+Boo0XnXdjiNQ3KgBXkOBAi84SW+R7KDBW60jNQ6KxA6b333hEPRUzn7641q4OeYwRklSgoZ5pJFlmNLKBXvHwDxgMIJl0+wThD5jwD4lkc2Vt+e+slN8z//Idnvn37wuKgmeVhaVVu1+27gEjnSKCEDJngjABAAjLGEJkn8g48OPKg3bOlIlAqSqKwLLLl2YW8j3qotm/buf/M0910dhTFtTtGOh3z+l97Hobq89+67Uf3HH70sLn3wNmJ6MzubfUbL5+6/uoNYQzOmnJ5ICKmuPBzbRRC1seBMwJvzQBNJqKCQQ6slKppi6eYKZioCiKMdxcuKswIRDPR7t1u24nNl19Hy0dGf3FvvG/WH+eD1bgkSMjHlRrTag2yUhtGyDkSADlfeo8A3vjSM0Lmffy9R+CWny9f8opYV/ooLZSB03GR90QNZi6Adz2HP/xj98hjHhnce6jb5qpj0SlZGqeYYlneGBU3XLwVpib4cC5bWAhrde/7l115USP4WtYnx+n0qTNJIohcMSw7i6vTzZHvfeFLP//SZzvLWVSt8Lr4xnc/+oNvvndz5bnDzsJoy22+Yoob8bH/+9iZtTCIuSMqncW8OxG3TL/nA4UIiBwZeO8ZoODcOc8lIw/DQfnG337Llbsu/tsPf8DkeSAl51g6xxwx4GgiZjyRBzSIzpJjnDNEZx3n3HvKij5jSkrGOJpCM8HRMxUzDBj0Ss+DqbGJ+ZU59OAdeQ8IVGSZZMKYUsgEDUtzG0YsCEQ2yIkhR9leXETkIDgSOSAzTBv1ZkrkSoeBHAzKSiNwBoy2njAJKt6bshiCA8EZISIwBt47Rgatds65UETOE3oIZTBMh+QIADjnHDFQqixKAu+8B2SMK/IlAThvkBEyDIgMeePDClaXu0NjrWMWpMpTW6kFxoIUijvYPDY1PRr1Ov0aqkTSoLvGa3UiLRX7H1ERYxyQO2sB0HvPkAPjQYgesHRGBIIR378S/uFHzcuu2n3P0QObx/SVu8VLXjrZGJXxqGY89sqYeh1GRZjY4sRT3XlDkapv2VmUA6WL7VPJ2TmvgIAcL3U13tzLRni0Cq5joQySRpaiIpAVgVYSsFjCPbd//Y4HHvitp//TQTF/7OmPf+5rTx48rnynIc576In9NRJxHEcIf//uv71k24bXvf7V01tf/qUnzQve9P65B+5aXH5QF4uME6IkTwsnHkeZeF2htc6H3nn5C156fdI4pVBTvzRDE1U4etNKxpna/KN7h3ryvB/+y51n2dCNT/pB35earAliobUPgpBs4R05762mgHNEXhbW6SyO6aqLZ6LQe+t6g1JrlySqKEWpqXCs0ysVyEBgdaJehH52aX60XhmfHNm1vcHdWsgyr3xtdMYQDYtydGKX12UnpziKvBdjrbHumcc3tNyureqpI45z4JxZQ857KjMAJNZGHh2ar33+Lvjz390MwsoYJs/beNM1256ebe97cuXCLdVkm/3o37/hbW/7eG+g/+zf7v+zf74LrK8Gyejotqdns/M3951dUVGlEcfp4GyvfdoyLWtjrXqj3T2VFWlaem0tYyqpJWlmvLGVOCg05bkddCwLzDB1cQVrYy3ikWCiFgSD9kkutfHlIC2dZZwJKUxRmG6vbxlElVpnjSmIZCthFbnWmfWmt9xeROk3bN9mBzhI2wMDWTlMRqoVHrGgVadk99aZxcXTldGWCIqNE1tozdWwvPKCDZEUs0tr5zXY/Nm1enNUsmRhdZZYp1Ybz41ORlu6iNzaIIjl5Vdu7gzssZNtUP7WW7bn/XRtoVeZOKeGHaaFFFz7UgrugbReJ44BA5RMWNClK5nDXBfE0HnyzjltHJcECOSMdU4bQwSeeeezsoyCgCsFaEttUCATrN0bWOuklLoojXNDBohcFwV575wNOZiOBQLGuQMY5DkgxkIURSGUkCgHpTEeiKGUQjAmEHJjALz0gjzpPAPOc1t6EqmBCqiW6Feb85fuXanwpd5s96lDy3kRMbBAGohxKZgw4Kx367pTgPW1K0Pn13nOgM8inddHVgAMURJJpnsvfm79I3/3kvpozNxhbrtvefVIZzn67A9KQCmUWGuvbdg0E0qeGwd+PXEhZKhLOzu31G4P0ON/13mElIzFDxx3M7MTl8yM513XG2Qn2uobP34mWyu1ZgxLAA8MiGDH5o1HTpwCIkAB59iF5MACAHm3Dg8iWA9+AJG59aDq3EGci8bXXRjwbI3o3Of/d4gGz7aKwBMyRu3eqvcO18FG5x4Cz1oYgCEiY558aVyoGEce1yZ/87W//6+f/ccd2yeX1vrMw8z0NjIw/8TTMpHAGJccvWPAOGPgIWQ0FeHWplpI86Ndk7p19SsRQEfDw/N0/6IBEGAI0CGVDLwU5y4tjAlABw5LQ0rJdbGwCoRkGEQyDiV6d+rU0mq3KE0ZV6Id28aqtYSQpBSuNOgwjiNTpOvHdPTwyoaJ2rC/ApJFzfH2mpd2apDPauMBBBWeJ0C2ZMxEgfDWeEe69LkpZvZMXfSca6KpXdUNTWqo9pFjh568P4goRv7cK7bXR6rf/cL+7Zs23bv/qVOrWhRiZXVQrQY2y53WGzfVdu4efeKH3z392MHtW6e2XLpr4dABknOXX3/dt777k76hPXum+zp77FT79ZdeXt1MD+1/UnIYnQwoTFVrNDRyca0/GCwr4seO94E1vvidf9yz8UVj8UivV86fPe3yodH5VVdekHbS43lxYN99z3/FVeddMvXUibNXbb1UThYrcyQiuOySix89+FSeDWb7yzu2bGhMjAtcCBuq25+X0RZbLy/aeV1/bm55/lBrszy54JJq3ct05qJtxgaTfk8/oEEuXTVY7GkwtG2mNTtM51aO+ySsjKqF0z3RqmzesPWhbzydOx0oaQdu7sTjc0cel1IktXj7hZtW55acd9rTQwfy58ubLr/u/P0P/+JY8QUOKCRjiCLgU5OTRLiYpn/we6941YsCteHeMjpjlvLKyLilDFqlmqzcf6q87w8f3jYzhvE450EiKSsKY+jAsaXLXwQF5RhwLhQH2PicmcpPhsSENjrvWzBsw7ZRzsXf/unb3//3H/zV33vdw/v3feIDn6kFycrSohgJ/uYDHzhx95Mf+ehfXXD1ecsrZb/jp8675rm/9bbgjs/86HtfZIjkMdc6cMJ43ukPW+Pjc0vz2tkwlIqrPMukVK60oEFKnht94vTpT3/h0xJ9lcugkiRR3GaSJ94RDNM0K0qAgEF1ZdEklVTB0Q+9/8rmhhJKe8ktYYWnbOdkP98qykuFiDIt9cRk0JxnxSkazjpW8gS8NiZ7LB+acHSTLwXwjmN9lIWEHC3QYOrfPn76Fw/XOyYwWc6efUVUOljr5YJBGEQiCbzWp+dXT97x4w2TU9I4tAGhiGrhKIqidOmyv+ehgy9+eQ0srLWXxzduAfRRWM1LKkstuHLOFUUexqFnRJwREnDe1wUi8w5FEIaVSpEOjbXMceONY8Ms60eq4ohVavU0HTqj4ygJkFlXWGe4jCUXzjMuQg/GAkkuBv3O+Ph4b9gBbxmXyMFoK5C3V9txEnGBDKUnw6WKVeLJMc4AEZA7Z52zPKqoSg2cc912qYdc+kuueL5xTnEwOmMMB8PSA0OCIAg4EseKd9qi9xSB44zyKIyMdlEwstieW1nOh2nJhRz68N9vz0Llt92z9u7fvXT75o2DTvOtv/u5bqrCKNw6kZyaX9i1dXMtSR4+tja3kh+fczyQnOXGAQowDgSXpfcX7Jm645tz9/eAAr8nMBOVoJkWlTq0qsOPvAfechje82l99JTzTMRJgMi0N85DWdqSu69+/RsbN0yDD41xPgrikaBQ7OEnju4/ePr882Z+fPvC4TPs7be8Zt+Pv/j8a6L+YKWV1PTafFzdaXmoC6dkglhCvgLxE3tuysrubUTjvno1j6/tpPU/+bu/nBmfPjW7rB3fMDZzzcW3fP/n3918/sZee7a3uhbLMK5EUSjzfGiNc+grAfPeKikjGXNkjLEiN8iYFFSphADQT60Po4984RnBq0k9/o/v3/OhDUmapq7nTamWz2o9hOtem/9V+7y//s9jQYU5552hvZs20Mye1cLsuPp8VM2nHzzxL//09Q4JEuJf/vWT776Rv+KyYmL7xlf96XwahNrxOx45/ODjhwnp2ovPe/XzfvXUoX2yUdWnjp3Y//jGzePnoqLMJ1ESxCo1Ou3btGtPLfS8Y/WgftU1F4KF1LmoGukiL4lX6k2pbZaXutBj0zOsUiudaMShMxlntr3aKQbF1NRYo1L1adDvWaVqGyZam7btCrisjTeeevS2tXa7XgkHhQZkWjujdVkyY4gYU0IxRA5Qi6PxxsiO7Zs3bZrevHWKA54ooD8oJidGGiN1sjaOIwJRahuATMKqkl4I1qxWWOj4oi9ZiaHDFmW5KmpDa3W3PXBcIeONaqS1U0GQxNVemjlCb5yUKDgj5z2tv/eP6AgACXGYlkoxILa+bdJpEQTKOL9WhgdsWH+i+vD+1Q0bvJMB1Mttz93Q14eHCz0meW2GXXN949CCEXJirTtnrfHkGQAi8+SNNoIzxjhKEARX33jjgQNPDbo9IYN0zYswBJYjciGkLspT80uhUsbYzA/JOiE4K8pLzz9PQ/jMydMOAqMtEoTIZMQ5kBe2Uq/21xxHFJJvnq6cWV4J48GOvRtVEgAFRApRoXdIKYqcKX36yOqFF7zN6hBxNA9rcVN7wTwxBAaWPFY9tR7Z1/7Tv/xyVqhuTtdvv3rrzAvf+PYvyVr58f/zv86/qGDlEjEOwAkAKIegBDPUZQ554m0FMFCSAUoQ5Km0YC3znpy3rPTVG296wet//bX/+c1P3v+L+wWLkmq1UR3svmp3qWY1NRAiX1qpPCMGvkJEIMrx88I3/N70pVeY23+0+sATsNgf6Zik8AIZOGuQkCGCYJ7IOfLe4TqTiIAx7r1HBhz5+j98/SxY6fT6mO256IL8mG+1xrtHTty/72E9sLJS33r+5m2KPve5L1m+KkV07XN3Pfr07FrpdYELhs8/o+85dHTXPbPXXzxxxeUbtm7ZIHVWuCGBrTS47bSZQOuAqwhVw+cFYFs4BzjHXM5cqruprCnbn+eVKvCW92DLFIEzOaY3BtNvwW0vfKDyqbPtH3cT3hqOydyascnJx8/0Sm2UUlxw651G5wDAE6BfZzRpozs4+vnv9y++aaOLTnnWp0ha71wGTrPU+HjgRupQZcWmsfDGy3dnUP37z+4/UxIHycOIY5lmS7uufYHlvtDDcrgYxud1lk80kspvvO45f/+xRxBRhCLPS525Sj2MwyCAoNfriqqECeWkH/SH++468/TJXS986z4pfSui19+y5+EDp+58vGoECWYRmfP8r/7+o7Wg6hQBeu8IwCMiR45I3nnyHjljEvulPrXc+/M/ed2nP//pxfnTOtXWklDcAxkyZWGU4uuCMUQg64Fx74kzwTgzxpRZ5nxWq1UBvJBSFxoAOcruUrdZH924cdfc0ilnHDLiCFmRSc6ZUHHcRJMiujQf7NqzpdvupZkmhlIJAMwHOZcADExhCSGMgrTIPCAgeoKgooxx3nnGuHG21AMgEJwhE97b9dsWzmSgwnU0qmBSYqjBeO9Ko9m67Qyw0DoKAyLnjBaCIZFb30MgL00RBrEz3hojBSdjFJegTZ5pC54xYohRHBKB114qNlWPd09VG1WuoKYCyZkbGQmNHzJgI636/4iKEDlyIWSotZVSGVN6T1JKpw3jytvSec0w3H9a227Hje4+cXLuR3et3P3EkX/44K0ybIPSJE9RthZXAmLZ8kL4lx8wZ3N94WVzG7YpaWrv+6d/+u03/k215cv+kHkxsvua6176x7d96q+a+Fh7SU9v3F4Web+9WqlVMQx1Uch6Y0TCD7/5oUvOD7qmcs8DT77vve+nzY1f/cO//Ztf/7Adrr34/e+/7bufHqHKgUdn+7Plq9726n129Mz3/vNNr3rbK176wS9+44tP//wz3Ba9bEiK14k2TjQXhwuFOX3zrVdWx/tFd2jJSeBWVoarPeFQhgWlPc/dhu0bHzymrrzi+bfdf6BdWqk4k4GSynpLzBfWMVELk8A6IwXLh2npsu2bk/O2NEIsfKZlENSayXBoOOempDCQTOLYaHXQ7Ud12nvlhpVOJ0warVCS6eW9lfN2bwUmlRhVI+cVpck6uhGPDvPHXJmhQV92VnomS5dqU/ELb9l2cPaot8xbD6C888aVkkmGg9IYoOZXv3zvG1/wkl0XmnzYD01+yfbJn9997PCpxhXXbBtks1detGtqKjh+sqz4wHHO0I1uGstd9LO77n/VtdeNTI0MB2WpHZDt9brAkqCiBr4va3FzpCqBK6BIqXDQj1Jrc7O2mCoZza2aSpJdubdZ6Cw0gffSaZPnIk5GRSCGeZsForPSJy0M2VCzfpYacCVCWfhYSQOscIMo8DZNB2knCNlMtTHMltNeyVjSy4SKK1G1Ouhq72Cs1ULw5TDvrgxW2v1Xv+Sa2YOzVWt3Tldt0esbC7KiWB0x6A8GBmCkGoEfWKNKDIEUY9Uo0Rzzm2/ZeeTIyh0Pnlpde+Ylt25vNKOkqdbPAimFB3LOSiXzIjfraDHBrXPGZcggDEJHRM4VZZlbJ6OgNDYblogASNVqQp6scxyAIefIEDjjPC/LQV5ackEcGmAevbUOANLCCMG4AsWEBwAuDLAyd4ILRsTIBVJYY0tggjMg3s3yVBsETCpRKMNer9PNCo6gbZlbEzAMJAjA0jgFEPBBM9IXbUuiSlqNV1zZZ5KFYZRqLsg4yRkKQCq1Zow5R4ytR0Lkgdi6CAwBiP03tAfAIzDBpde+pvrvf+9Vr/71vdVwhfwqx5KYrjeyay8S3/ll2SlFoX0nzTZ5Wpe7AwAgrPd7s6yYO7vojGeMrw+9vCfrys3bNq/0Ov/67/uakZQqLopcyLrpSSyNQO289wDAGHk/2twG/CxZC0jeAxGBB2JEfv3nECASARLBuYiI8FyHCAEBCImtB1/rx4Xn1GcEDJkn/yyq6NnACYEIjDXrDyY8V0Za/2YkXNeooSMAkIIhQkEWsfzX7//fK198i2LF8sNPBpHcsfvi+TOnW2Px2mAA3jtChusress5RgFXilgIo4yXSKc6vjDME3Ly65Ay6ciTZeiIiCMxjsjQn1tvA2NI5EXAGPMEIBSTAhlDY/SatumwTEvnONswVt28dYKjzYbDsBJ7B9V6td/rBpEKgnPELhxSbWdlmA2HpbGmKXjsaXjjyy74/MfvH21O12uJEirVaVKJGHPlmg5RFtZ0h7DxigviSzZ1h2XuVobzg+NPPuSzxSNPtZuNys4t4YkDJ259/hadLk/s3Pur7/r1j/3xB0frdcZKizA0lA3Lg0+cnErU1IYN3WF+z/f3xY3khpfedO9tdwdV1V3qzfty73MveP0LtgfjwxPtYzJRNWYLY4DptMN8zoFJE0vD5VIozp44VFftWjpy+fY/0U/f3tVmemxs376zx/c/fd3zXnzZrRdvvezC2YWnGmN879jYLx68/daXXbVhZgO5YvH0SSKqNaYKa43VipeeZFGyCES+PL94Kr3u/MY3/+Pf+6YoF9Lde0fM+eOTF0y3FztJZbxRCRaK07nJRpt1JlFXxYGi3VkphnponXP9Iddh1s2PHXx6sFxAgWVprNaoJPO+8KVLveNgvQgqQWYg77Y/8Kd/8Ouv/8u3vel37v7Rv8WyzhVzhnrdYRLGwIUIku/d9fjunVs3x8PmmIgnqzlkHCjZ1pzPS9nHwuAylDPjo2XmnDFmaAPgq5kDJYOGyVkO4ADcEw8vqHgiDpgXCeeR5KpM9chEo7G19rVPf/28iy/qLKajrUZrRD25r3vltsv/5s8//rxrLoqmJy6++fljM2/41Ic+t/fyixgNp8Z3CkLGmYhUb6j7/aJVryvBlvs9A856r53JRRlUImvc6Oj4wJqRserq0mKzUhn0/LaRmeX55Syz1rJ6o54W/TzXgnzMZCVMrPbNcHD5eYsvf82koqeImj6o1XbvIjqbQ47RFu9HWNFlinO1BYhk2Nb9lNW2RcoNTh2BdmgzQ1FfBVU0Q8asc2V2Up88NXny9M5vfXMBRyaYyIwBwc/dHuQlSgAH5B0NU42AmkLdgb5Iq4IxqQb9QVn2NYEpdV64YuAev//gS2+6pKCUvFWctE4FZ6b0tWa9LFMPVqn60uLS2GiDkJiKyqKdREESRmWWkgPnPEcehVFvOO+JVSv1dJjrLOXcOkpqzdFuu5/UEigLJRV4xhnz3hude8S8KEPvo6DinGWShSJIO2050lCRslnZaI6CMw40U9HS6nykKpWkxqQyBpwumGIejGAckTtCWxbjm6dLq+b3Le7cuKvMOlKypFbp9YbDwUApxZnLC1QBq0SNbneJSWw0N+b5gDtry/bq6izGWXO8GsY7fv7UySz3gntA9Fo+ccL84EfZrt3iU1//3DBnjdFaPy3n2v3te/b+9m//4V0/v9O7xdUj99WrYbs9FGL96ZcxJOcdIX/Hl89++HXsmk18KPsjEVQTYAGgkqU3oOGKy+CTf8J+7f1ufpWHyIDRME3jShwEIsuK7kovS/MoiKMkjCpJOhxGQckQnPMLayk/dpb1Bpsr+YsvvvIX3/3KzW+92KzMSlYCHHXGBUnCqDB9I2LJfSbCOBipDg7tP/nwkbPD1Y9+a3amOj3sUzWsTG+Y7KX5/qN3bts6vXnLhnklo6QRKtZpd0NJcTLa7Q4YeuNNt9e36CIBRVlY7xhHxtGSK4wViFOTI51ueei4lgFVPHzqP1b/7BXbG63FzKaB4t2yRqzfPfDMm97xwue88tXX3/qXb371zbe84Lkzraljz6zJTa2Pf/QLs4uH9HAgAqpVoqVV/coXb9469eRTD3rlDqUr4ZJhQewUehHIbqrvOXDiyLv+P7c856o9G3YGQurl5S0Xbn22VdRTYypmqih07p0vcPPGnSEPputR2e+LOOwP9dFDs6q0rZHpuF6dmWoePrW8tLicWhc0auPjzfGmePqJ05Lx7vJykrQ82WZlElUITEgFtWpYbzU3TM8UJt99wYUrc0tT1gxKL2Qw6Gft1eUi62d5bsiV1gMwweTYeGPz9IZGpVkJGnoIhdFhGDXrjbgWemfqtbrgPM8yKQKykKUlEoWBNM5IkEkSM+qanFkDOi0qSeA0VRpRv3BZWkgpIyHyLGcM641ESpkOC21ysOfUqEQeAazxiFwoxRgXQUCuzEvLkEWhLI3V2hOgdnD/mbU6cxfK4sKL6pnszy7u27ZnS5FJYxygvuG6Lf/33x5Yy8zYWDLaiNu9AXqvvUMApSQ5QkAS/A9+/3evv/Kmn97xva9+/qtBxC65Yu/xU2dA2EFv6BxuOe/8PdfeuO+2L/fXMmPXdenQrLDf/40bEY/f9Wh2fCnvdHQ6JJOXU5MqDLzzdmltJR7hxoiijz1dYzJCNmxtmCQuvA8AQuYFIwPcW/Dd5c6ho8NLro4hAGuChx5Z3L4Jz7+wwmXTamHyCOPxxVO99/3ph7McHEcmeKUa3LPvviVnq67ye+/65996w2VvfctVBCXwTCruHLcFdJZ1QSNH9utTB0/f+sKbR2ZAhGvIgUs0g4HVtcI2CdacLQaDlaNHDrQXOoxQcHJ2SMMeFshLLkUJbIih4WCQKaLIgSBwJKZEPd91dW3Xhe7qnx363k/aD59yXVPXhpBLZEjAnCf0xCVx4OsNc2stoUdg6+13ZPy/o6IDRxdChou9dM8FW1cWuo3JWuEpqDouRJ72sjZceOn1i51eKx/ahf4lU/W7nzkbyMAaMhQYEz5+pDxy7Oy3fjK7Z1P9puecd/UVm1oTNh+sonSMDbmUyFJfGpDEeIJALhsA47YEgcp3liEIUbUdpAAluAEwJOQQJLmdZuF5u27Ai08euf/p/rKpDbhYnD/tBKpK6Aj1OvrYWQfEuSD0nsAb65xHLn7yGG382yf+6AObC9HVcSkj4HUo+14msDIHyz04a8Sw331Bze/ctGvXRnn8YBFUg4B7xmxN5JEYChqGyhU+g0DJqjQwuPGSzZ/Ae5d7Q+K8WgnCOA6Z6BqXTCZ//vH3fOQvPnzm6HwtZGOy9m//8Q3nBtW4wnkwu1J+9PP7lFTIBFJhnWfIBARHjqw0WjmEiopzDS/kbJ3Bwhg67x1YcKA43H33T37zbb86XFhMAr6aAQukCFSelkoJYKSNXodIcCEEF+co5gDeAeecnBOCGevIGMZYrVbX1hWO9lx+1eLx08dPHXLotbGIxDgLw5g8IFel08Zo50mpaHlpmKWpd86TB2QMKUyUIy+E9A4BCYFbbdanB85YZ0OyzlpdadQalSRUYb/bk4pZ46zxHkDBOR6TdZ68I/JFnjLGlJJKyWL92k/cA3IuBOdCce89IoLz3nkiioJQClE4S+RJQBJVrDXAyXI0peecR4IrYMjRF0Xk3K4J3oyGSogk4VEVGWdZNqhypcKw+P8boDkLjBBEKHgAFKBdBWQguJcqjhNTtJ3OHdhmPejydnfxbICCyeDeJ4t3//GPPvPZtzLWQd6BpDpctZ5f/uUH7GPz9w0ys38xa9ZdJZl47ZarN1//rs6JbwOeZBJdZixVks3XjZXLI+H04aeX9t78l0/d8XkejnGx2m/PxmEMpO+676F08eyacxP1bY/f9qCL9Iee+s1wKRAz284cPsUoWZ07ibIG9fNysfG//ur9z73u+nu//eHrnvvrrZ074I5gSsnfetvz59JTv7zv0axX2G7vtk++tVJ9uNC9oIJmmDHmi0yXpuRSmvj4O/64/sDvHP3CF7920e4tP7jjQQksrkrOudVmkOZRWMvSDieMwlBJYYp8OCiQipEanL8lFqIEaxBYqZnxEIcqy7JOp19pVAIpnSnqCQayoHJhsLA4UmmOT6i85BNTu2VUt44zmQx6q0rwgPuFhcdHxkWlGidxrb3UzUrn0NXrle3Kv/HVO773k1P9DiFbv8/kyDkyA55hHMmwcvsB2HbJZMK1LVZvuXXH5+54+OEz/sZUlZ0l4dLXvHTP3//T3ZVEobQceBy6i593wwM/vS3raRWb3Gipgjxz9YmtvRLCaiTsQAYqiGqri30lo0bcsKXW5WBiInac+VJASt2BnZ3rb+EMzWrpNA+rlXhisb2QZ12lrODeFt6gHZueWV480U1T7U3AgkhAJHiedtKy26hWrbar7f5IY4SMDZSMmmFqaKQVMuESdESmTFeaMxsCaaJKmdm8MjkqRuuLZ09FrWpzZNuhp45etHPP3LFHBrn23LbXVgdlr56MV+LqytJKDNXC2Fq12Wg2uu35dnf+gt2T7cXhmYXeL+85edOVW9ZZ+wCw1h9475USvZUhE7IsSyaEISRrvYdQKbBYlIW2FsCBx0zrdYWXNpYLrp0n8lKKOIzKQhNBqY3JM2Ot05YYGOsGWcoBdaGF5N5BST5UnAnurT8X3nDWGfaDIEAEKwXnzGudJKEMIg5lHIXem6Iour1+luXGEZIvdAHMV0IRCM6FDFjRiMy2DUt7toHRK0xpom5Yh5W5UueeMSBPuN64RQJE7xxyJPJw7iIJ3vlndWDrvRYA9EhInosSzxsVf/XnN1z1XFsRx8E4R4px5jwQDlqtQT2B5b5hgXBWHz56dD3KWSf+rH/V73RzrdfLOOuTY+AgGY0360uLsy7LV7IMoc8Zq9aFg9xh6ddBQIjkSTA4cvIZsIQMwdOz4RCdG42tf6JzzSI8FxAhwDqt+hzGGvHcdIyQ8BxsiK1bmteJ3f8NLVr//mejJgREIn8uY0JiyBCBGDFA8kQMCch7HwoZMxgJ/eLRx9tznbGpif4wv+eOX1ScqZjO2Iw6M6uHpXTAlUBvPDFiDIQQ3dxGSiSyrAWgnbcO10GLBMTR8/VfggFb18MwYIKtAxWQAQALFDsXHZG3utSahnmpDQIxGfHNm5o7NrRkKK1FAEhTjVSWWU5I3uqwEq8f7rEjpUB9/uWjGle85MFowKQOW8tv/sNLv/O1E3EpRuKqyfqeNA+xlig7tDzgG3ZMbblkxw8feWBm68adm+SZ/Q/f9Oqdd9x+6Pwtlw7mG3fc9mhdqsYVstMpW4uL+xd+MTk9wotSYezaqfW+CjgSVU6cWLTaRQGPm/Hx9urit79BXkM9eukrLqSqXiueYqHsZumAp62NtWF7aThIi4GOakHAKzKq5uCo7InQXnyhUm5w+OB/3okHlZmOKrUgmjo6t/CCl/6aB/HQXfff+NypWvXC7z36wyXojV7cRKXDmnIlW1nth/WNOvchuO7sQVmtVWvJWrccESZQgRKonH/wwceCRlhv8JHpWpuxwXKHG2eLs4PFvLZxdGpmx9RUcN9jj+nSdbIVUZNbp3D+TM59kmdQLpZZV2RDCCSvCMEkWeA69QhkvOn0O8wyiMNqFLSSsL86/5V/+p2fKGzGI0b7QKrC2fZax1ZsIFVdwPyxzl99ZPb/fuXmqjnizOoQXBS4u79zZrCYbKgLGfCs5xujlKWFdlStRkVa3neo+67qNkvLwjscqn0/oW99RxgB2vugEtRY7eK91/74jp/NrQw27dphhsHLXv7Ct/9/3vaet71nYen0b/3BGxoje4tOVtkSVk5OnD50aO7scePKiihrcfaiW3d991txey01mWaAXPCRyWaz1Tx5/EykBKATgiNj6FkcyiLvWtRZJ5DWb5yZPnp4npncWhPGbKQ5cvDYM0JwAlZq7zXZATKbnT/Tft/f7vEjHtxOgxcYSBgwTk/a4U+TatO6igx9PsyZrDOTuHQNyDmqI6WDVQDOuBCyMqnTIRNGo6TO2D//xdmvPWiD8ElU9aI/6JelChR7dnXggIVxZF2RaZNZHQSBCORcN1XVcOP2mbkzsyqSjUqkl1cVg+nzJhdXw6XBYHDaTE5upkGhuXPk47AiAxyknTCKpFXOFmONhi5KrXMZG6WUMS6FjDOmdcmC0FvdzlaY9IGseMNSO7zy8hsOPLP//POv73Zn47AAWyjFmZBZUSIjZ40AASJIkqq1ZSBrWudZPmAhWG8pLzgyyaTiPDeF84wjr1RbrjDpsCcYgGOxEkpJ8AFDACaYks4Y8E75qFppDTqrOi9ltTYYZKbUjAWNkVpeDD0K45jTJs2yMKAyWypTlyRcVcLcD8erU3mZaQq2TY92T6wQoPMmjEKTwUNn2gfTp3ssxIS7UKD1wFknhW//+KfHnrw3jFjpfZ6mnLOAk7bMGmAcJfcl0QDkJ35hvvwH482xZQvQ8eA59HoGOIsjlhd246T/u98Xv/U3Gr0kdFEUutIgh1o1tJaCQBijkamyKIb9Xq83kMBqIyOri7nL7UXXXvqhT37+3yu1t/7Ke5bOLtcN2rgfJQOvjTMZ4xUmRr2TyKTtIuVx3Npw6fWj/hdrpx47tO3qi5uNyazXac+d3jiT9AdpsxmcOXSwJEMOOAQjjbHC2rgSh1ERSFjrryVxrVKtMPILs2fRObAOvEdgVjuSorvWZ4QYc6U8urTCov/13uNf/NeLpXuy38l5gGEY22FGi/dMj4j9P/lwUYS9ePJzn/3OE/sO96zP0qLQZeFynyFleX+xt5uVmxt+zy149ChdP+1/vICpN1wRomhE0lvfXl76xg+/7Xv0jte/YjSAs6dOrZ8FC4sLeZafQdGYbMo4xGp1ZNPWYpAaZtvtfHVuSUnaedHeGjRPHD9Znxj1lIzU+fzc4iWXXaMEd/3e2trA2NKT2LFjW6DqWBlhQvXTcveFewcrw+W52VDy0TqutQebN4/rXPt+0ZocGxmd7A565NN+e/7kyTPtQS8dFgRMiahebVaSSmu8WWtU+v1UCOSEzqHWxAD6vZTI1mqJQCxNLkNeaTRNxw+HutPN0yxXkWolEyfO9obDIkuLKGA7ztt210NPRlHovFUyjBKZFqVnnAW8Vh1h2hw7c4JBAnDOS70ul0EmiXyel5w5IXgjqfU6bY+gYknkCm3arqRacLpwnSfOtiYwGdHaLY1NV6JK6Fmqxu0LX77lm99fc57KvOSADvy5hIgQiWTAucBt42U5fHB82ozUYgJ5yzWXDdqnG3H4kt+8dXEwvOl5r/jFbY+bYY7guEBvAJxrVtY2bXq4OdHZsB2sj4ZtK2STISWjhnMXBOHyWdHumNMn4Ymn/aMHSmDBNedv4gjkiUrNpUYqiBWe9Yzu7Lv9vtlZL2RV5yztiJNnx46dWRzfoMEPLXLVGPn57ff+6Ac/73UZkwFDwQT/zi/u+J79EXE+GHivRv/t68+kA3rz266vJtZkgyLVB4/jV7/Tueexw66o2p7+yg9/8LnP//EYHgMzWO6Wh09U5haDr36jszYUMmaPPnb/wcOPZqlWHPQw81KFM7v+z2eOXLV7cO0Nu0daiLEGZr2XiAJJkXMIIUIFIIGod/VLZraeV/u7Txw7eCawulo4qdFpzwLOjbXAQBtDhAyIM4GAQCQ5kXcewOpzZuRqazpN8ycPdY4eXaopNdqMhlbsvfD8lbmlPDdORsfOHi3LYu+GVmDtdVdsPjvsP73U98i8odIwJbkOakvGrR0p7j/8UOMr5sLtlZuv3XbtDZsTDgZL08uJDYIqEXY4F1QQUCS4hIh7Qq6EJu2ERrQQlMy3oZiTpcNyWFobX4gv/aORypfTr96R5RTJephmmoPMCkdMeiYYAXjn/bn2PUfgEixZVI3/+mX32Ozpd74ZNl3TIOhaDdoAE+AcrKzA1rH61lZpljo6PCt02QjCXqF1wcjmG6/c0dx5bW8tNaBI1nTHCBVr39myKxlvwGoXaiNJPhwKgTMj9Z0XnPfjux/onlp53rW3jDAaiTHvRKFh+WCx0+9bNIYbrIlBWRoiQPKAhIiEaWpVCN6gs4SIyIABOe89eCDknHEGRBAKHjh+/MH9dcWHwxQ006AZM5Vq1RmHiAYMIQkpnQdYv8khMM4KJdfRY4qHSdhIbZsJNtYaOTE3bx0eP3MoCVh/uR2P1mv1JMtKZDJOKjofWFtamyMgZ9w7V2Rd70hK6ZyzxnogKbi1DlDIQFrrmAwBSqNBSskQGWfOgRSKE2dAZZkBWV1Y4AyQOENAEkoAo6LIPHlEZpwljypU5L3kPFCiyEoAH6iQyDHGrfUevFICgBsizoXzljMSXCqlvHcILk1z76ySHBmHdZ2idzWmrz2/cdWlkfMmjGuMYRCpIGBZn3xprSsCGf2PqIhJEUdV57i2Qas6vab7YTSCXJdFZ5j2q8mIwUE+7FuVD40Jm9XBWl8KIX1039Hihbf+6wf+5JZLrt4y4I2HH17+l499+5JbPp6bxytJpg2NT0+ePtb/xw++9RVv+9u1C/hD3/hkNtSRMjt2V3R6/QNf/f6oFW99zwfzsT1M39Ifnplbmn3r+z7x7W9+c/H0z97xzr898Ms7v/av33jFH/zuM/d/1aRn06WuisYXzy65/oWbp7c9feq4huGjT9+76fiOi29989Za9IX/+mYtOL6/e2L7Nv3Pb71hvHZ6dHMt+42r9z/p7/vJ6Tq7E/2SXhmMT25yknMoiIbRiOj2h8O5s1Kz3U3fZjOrvXC41pOhMp7FgWCuFF6T7keBFIR5v6cl2jLj0m/bWJ+YkFHgOLixsWrecxZCx7VlzAFVm/XNm6aybs85t2PbltysaM22bTtPMSZrIyONa1sTVxa9QWgHg/ahND85sXnrYrFYiVkgwrRYXJ0/zkRlfOqCan0nGTB69qoLmmcOLzxxqNfPxHonjoE3JuXknGgzB1/+/B2vfMl7N7dyWc+2XIRB1P/+z+6Ymhg+76bRi2Y2bZ603tiBdjyyf/GOdw773a89tG/5yNFjR5PdlUSXaXMkqieVuTRjSjTrsdSDLO0T9xzAFS5l/SQOirI3M96QomeGVA3V4mq2/4AZdAaXXzIWQjnslTo3eRlq60W42atBNlwTKjZ6Nk9Xi9TURhqVasKRdm3bemb2bDWQEkxW6tHWpBIhQBGFMRhrnZ0ab7bXVorhAHU2NTG2/fzRfnfx+G0HdRk6Ge/eOfPYvgcnt257zs1v/Pjf/MW1L4p1f4FT2V7ue2tblaS9ukTNyKMv3ZqzjKtAJCLStY1bedZevvmmLY88Or//WOe/vnb4Na+69FxU1O4iY4yhEAK041KW1gtPjMATGefJ6sJbYhBHFW+dIEJkiKi9UUqSQ21LjjgsCm+ss1ZKWRjTqFbLLNd5WRY5CnTWSyUBKQiVMdZrO9AmVAEQZrn2zsZRLITI0oFjTHCGjJHzg7SvrdHGKcU4enSWM5BSpr0eGS0CTh4LC4Hym5pr508OZ8ZWAAvJhecQRqAC6PVzbcV6/8ZZT9wiMiGUtTl4AmAA5+o45zIX/G9wEXhCYtw53DTC/vp9V1x0TYlqlVxDsQoKRWRZAKrimuOQBCCAC2Te2sx599++tPWXWsgEZ0WesnWVAa6PxoE8kOF5moL358IP8v20j+CcP2cDO1eiBVxbXWYA/lxn6BxdiOj/Z3n23/MxOre7996tl4cQkNh/z8zwXHqEAACCCY/gnV1vFSEyAs/OZU+EiN57xhgyTs7DOnQTgAFWq6NJEi4vzjPOGaAtjYjD888/f+Hs2YA4G2mQd6Uty3zwkle98HkXdxEXf/q9p+45oE+vCefE+qajtK7QjDFWkB1mwBA4A3Tkn429PAPGADlKgUqh4MgZeiLJORISIq0XtAhzbfPCaU3kOTHOCGXIRkaSmVY9UCKMYgRBBJk2WZ4bw+MkWndMnGsVleP778tGQ9ncnvTMAJ0TGAhhRjeVE+dh56jJy1wQl6qq/r9UvWe8rFlZ5n3fKz2x4q6dT059zukcaaDJoYmtiBgGxYRhcIzDjBhG31HnNSuKgmAWUVREguSmm26aDnQ+3Sfns3Oq9MSV3w/7NO/4be/au361quq3nvWsa/2v60qIM1nJrLIm6snV4XPzE5nb+mYi597+uvmLDz8yVesjB2ceuPfklIlKo8+fdoduvm6wsfmqWzvHv7JoTdEvdQh+Po3QEl+Z2fnp3szEysr6s6cWZ+ZDT9nLv/2lXzvxzAK/WKqyO9nY2lgutFFOqg3TQutCiJpRIGqqK0/c+qXlBmHLW2wcRpNTjWQu+uaZB6d6O2+d3b9mN6dv3p/OzAWuuXffsNVpFkvi9ttfd7F4PN2lqO3zlNk44LwZT3eI2rp4cWN9uX/rtx9CgYOtpfWljbn21DUH9wvvn3zy2enZhDK6VprlpWzvHD+we5qWVq94Vzf0uDi+fCwNoBrm1bpPp1WvFWyNpYRIbfHI2toUitS5IUTz0JIgCEQMjIRZbpEiCdCgtRKtL9HTRhBq50PkSKySzlnbagaM2Sggeb9SFZKw8X3v/OQf/cYtt730QO7x8oXy2Kn+/Fwj2xqMlZLOT+3a2UC6eGmTM0IJWVwKP/CrZ77nu2a7LXvxROeX/tclRee7vdb66rBWfuJw55rbbv36A+e3Ni9t9fo//nM/9+//+vne9Gy22d+zY6I9Ee7b3cu79IN/+hvEDp654G54+V0vOrRTbSx//Qvru+Z7e2Z3jbZOkIByIIRQo8koywlQQBIwyynjjA37GbYCh/aWa4+cP3Wuw+JzJy4JKqzUnhCLdGl1MY4iBMrRSGXjkGVbo+9/6/zr7gnqRp+QeUdudv4Aw0nqQJkJ2tjlYIICc2B50jGYOARitnQFrNN3ddaY7o1zQmhOEANROWF85arh7nufLanvuaAsqkIZH6UBDUN2dRKA9gppGNFEqrrVAHAEvJ+eCgmBc2cvCMCSSkb0XJpeGQ0W1/L1EpxPyvXqc9989H0/fi0PrXBaxGJtfZFTZmpNuFCmbogmtYYSo6vx1OzOMpPgDAICYXE6IfOxhCqKWuCBhixNpza2tqzWx098KYkjhhTQl2UZxw1KiHE+CIRHS9BrLZNIDAdLYRglSWKdFWnAQsy3RkApUgIejPbeq8ADa7QGo1E3bHhtnbOlrJF6bY2TXhBEAqqu1i9eTMMwToEFsbKWB5G3FlBo5RkLpbZIqNdVtxPJalRsXqRBryxRq+i6W9/SH1nnRq2p7ty+4aNnNyg4SllZqTCJ+5XeuLwMRFBCs5FxHhmlo5W1k1sjD7rb7SEvdZGl3EfE9CbE+lhVBpzxglLt2LEt9rrfXP9vb4LX3gLNJsQCSgdSO8Vd1gdfws6Ud5kZaoPWMiRM8KKsSBhSTpyzypiqKm0YgHecE1nVRTGMG/HJY8eczuvBeIGv/+z/Of/nv/QHcPnCbW/YA/pZSGo+0akHMoq7VmrqGXJeK69oFLTZta8zH/6jm/7Hnz0NeGs20i9+7bc/d+ypvdcdfubRp1rClc7Oz8+oIte1tgridlSoHLxrNzveKKMtIaw3NRM3mnmRi0iM+gPnPSPEOpWNi7o2nENIqCLss8+Q//EbF3/zXdie8rTN1xfKchFIlkX7nmGF+l+//PAJvafsjwljlZbpdOooWCTj0gKz3/OyHa94IwLtSwaHD+if7IX/8suDKogJdeCtVho9BklgrS+IXFTFdbce/I/Pf/nqLMBgbSvXsiDEI2LCo8KqjbE+c3kpJvqmw7v643VdZIp1Z+f2+1jUdRU2mi959T3WGldVjVanLI2xRIPdMTcnSx5OTBqsw4lWKbccN40J6o0Zrq2HQq2vrzXioBHF0gldl2kaMKTtgBDtW/14Ra8yCp2pbq/b5RRDipwyb0sRJTMz3cFG4S0lCIP1EVIdR6HTmoCviryUNQtIqDmAJ4wGoaidycoir6QHUkpXlfmt1+85e+5KketIkMlu24G2DvNs7IPw21/7Vv3VT19ZGRDPKGHbBzcE0WhFiJdaCYZAedwIJmYPnV+6Ug3HaBxxvkaynqnK24lUXN4yly4P2t2Nl71m575rJnliGi3/+m8/8PmHnyzHqLX2zrJABIHXygIiEKJqMzPRFbhRjc92Ujx6TXLwwC3ra9940c3Ndi96w91TClqXVx6+da9/bjZe2ZRKE2WUsublrzmSzm4BG6epQ1tNiNIH4D33RHEeWJVNT/GZOXHL7emuXdVTzy+Oi7CWLREkLKgpM+BHQNFZBVZdPLX10NdHVXnA2shUrD8K1rempVJ/+KdPVlVa1XpyfvfzJ591lSWCW6MBPDq0Dqzl2jhnlJWOK/7+v/6GDsKff8+deWE+9W8nP/KXT9NkdzluCc5pEI0N/OjPf+TIXNxk8SBjxy+O88IYIyqpuXOMQVVV3nlCkXFKqBhX8LWvV1/7WsY/8sD//NHbX/emWd7KPHfO1AiUU+HAgHcibBtPScrmbqzf/m32I393kVgnVVx7zB1zXhcV0d4gR2+s1Y4hQUo9eC6INcQbTV84NtjMSh/HZJru29NbOHn++YVx3Og+fna13sp0bSGCK5vF5mr/mpnptBvNTHffGvO1Tz5a1bjpFGVQKqUL472llHHWso585Xh13/FTjY8ef+Xh1l237Lrh2okk5TbLKQMXlMgpC5wy0mukvOGVRn+BJIkHFLQP+hI1K7YeYZE5Yiq7HCfm0G5wqmRB5JWNnA+oEQLGQ+VD4YAYAgjWeUQEb73RNmBIucih99AlfOpX1g7OD9/1OrjjZsKpixPI16HehM3++gEOE7sh3TWfD59uMCYYr6xRleYUMWoxapMwNGGKQSrLVQiLkDd3zrATW7ysvLGkUhCIicTM7YI9z3xx66tfvvdNdx988sSz3zz+7Hf80H+/9ORXpLdjVdSVUdKCd94iCxhcvRH1jFNrjRBcK+MBnHOMckRPgBBEQDDGemNZiEIELBJ1kY/q6vf/5MPPPfbQ333so4iBM9jt9ByhdTnePnMFREaoc5YQZIQDOmOttVqWmTMOiZdZEVOiiVZ1DTzec83elf7AVFqIYHZ6Zmll2VljrCaUI3jHyHVHbrh04bRXinEmUNSVZIIjeMIcZYIAGGOUKhGAcubAgfOCoiE+DiOtpFIWGZFSeUAhmOcEHTjrPLvqVTDWUMqMdUEYMSG8NUYp6xwiCMEZ51pJqbR3jnPOONPKUMKVMoQ4LoT3YLThFGupPAAl9Coh4Ly3hnhzeCo42CPU1p1eCzxXtfXKW6NCJCyOpEYRN/6TVOSc16UMkqnaOUs2w4a2Zo04wcAjcOe7nPiKFsZa6wnVwJPUGeODKJfx8aH4L792yZenw+6Edo16sOOJD//py+88fOz4lx2qYb45MdWc9INH//kncxJ6hNtf/8rnTiGt+q+7vviv7/jge376d9bZZjB50XXOJDhSlxZed0dr/9S3/d7fHHv/+z8IufjZP/6ULrY+89R9N9xwYOSq2cONyYPx8TOfVZlPWmGrk9hSPvnFz5R68j7FX3PPj125tLqyNl4Yb46zYeyfTDM+Ndl4yV3pbYdb0l/xgnA7/af/WD94rI5seXh3eNdLoomed1ZPTO068KL0oX/fDJJhmHBKKGpvrSZeKyeRMGet85RQT1Encb17X+eaXSkPEYy2NWrDpbK1qxSxtfUBCyd7c3EbQPtmu5WZvjJS5qzd9jPdTqVIGu0YDRQo7MUdFYJoNkaybMwe8iRynhNFmywKoykbzqa9VI6zqN3UeuMd33toz4Mnv/rN6soKZYIiRW+IB6sxcxYHQ3jPT/31n//BO6dmN1iSf8+33fz+v1786Mce+cBfF3/zJ7/+2fuXeBK02umB665p7z7wD3/8h2fPjiEKWS/BgKVhUykFnhESBYSPRmNmq7yQUBGAME2jpNEZ54tTs5NVOe5NNAtmFeo9yfzZsxvnLpSyWp3ZwScmIxrUhoet1tRyf9nrjBHuKz8eDwDSKLLWm3E9nmlPrW4MPQmR47iQhZIzkzvHY9VrdhrN7nhrBb2uZFnpAirdaLT6Sm9sqNGqXN8MO3M3NcutxYunTHnp6L6bHn3+kfuPP3bX3S/Nx1u1NeuDtdmpmVZztiydUp4nMLkjPv38ZWEShJgCb09OOidFWNx6x460l547vvjcc2e3Z0GYhAhUK5U2YgQyzDJtbdRICaA1hhMKgIxQyjlYB86H4Xb6lnMeuUPrfMC4d14rJTinglHGgoAOVZkrZay1FJthUlXSepfGiZSWIlprKSVBIIj31KH0mnHQuppIGsa6WAQAoJQGtM7YgLFGEmhvvbWUIeWUk0ZWlMqqWuuAuBbbfMmhYue0tsaU1rKwKUJPAeoRPP4c8S4gRDEkHj3jhHraanbWN2tAcO4FXchvxxP57Vxn2E5mQ4Kax1D/yf/72umJZwj3SXPWeeHBoTcetIfa+hopJAI4AvE+CsOiqqy13r8A5CAieKO1d/6qx8v5q60iBI6dfNrUGhG925aGPChNKSKQq7wT4gvqzLamRbYv/gAvEEXfUn2+9dC2Wcy9oPjAC9wReu+u2tEAgCBa73bO79job+Z5ji/AR+T/QpEAPCHbYdmwXWZPEAEx4OlUb++oWKSMaeMAMEyS1kT7wtKyqbREISWYYVblm9/79iNvf4v12fPFaOX1r6YzU/ZLj5nFTS8NMk7SiCJxhXLSutpApondztDz4AE4QUAQlIQRY5RwSpx33nnvMau082iscx603s4KB/RkG6oi3jFOmk0hBN3aysvaNVqQJnEjCbnzcRDWUo1LGQcQvbBJftd/ffsX/vbZBz/76KGXNueu4x7qelSILgY7yOyL+frlyspWZ7KDgdZQUWrTFleFW9nKP/OxT/X2Bbfcuf/Rex+NWh29rvtr/OyJByOY4Cl1pThzoVzIL+xq4rNf+wghxmo5GqooEuioMibc1RzXtesXpeKloWKqd+Tu/afZ5kZbigCzOncOGgeFKbQsfSUwkEGatCcmJsZbl2paRg2x4+g8jGtCp9eGkoBjqFb1eCDX8qfXjeg+uz714tcfuPDEs3uuOSAmwyOH75y8snhj67p77/uzUGiasP560egFNM6KzUHA4trh5750/lV3X4MhHLpu9+BM3uQRi+O9188gKVgPJq/tWMfrqFwtFoXFqNObmtnnsuWlrbp9TVT3yyjlecZOP02Gq60yB9jM33inPHKDCSO/cZmde67+6kN+YYlYEiVBoxuGhbMiFtIYcERqU9YmZIQA3+rrtB0RZ8AaHlAq2Gg8Eqno9TqVNnrr4M/9T0mJ1C6qSyWrcHq2ZWkpApWGYljkdaWdt4wzJKKu2F/8U+Ofvtgdb64mkW2k1ypfDpVttjtT01M12H/+/Kd+6bd+5uLlpx6476G/+P0/G671C1vfct2uYqQ/+If//uM/euBTn/iEN4O5venmVtVf3zLrixdXionDByfbnDRiJV0aUWW9M7a/1VeFCjAYZiVPmJPGE8epZxTSZmNpY63bbpqxyfuj+d3dJGiO1i7Y2gSMau1RyYhB6Ed335W89O6ZozcUZFoR2KHgOgE3+GqCsoazQBxR9SYNhjbowAjQWRJ6IIEJJlnDogjVxQsKZ6aPTFNTAKyAL9SIXXxs8td+80pB5n1UWG+3HaxGOybwWyjh2laWVWWUtPfu2VUMNzdWthj6sNMMWNRtdavhVq4KSjhjZK02y6NydQATTTE5v/PMsDqzWdxwaNZkG7qqe40JxFCqwhA3M3vN2sULThdxRLXDuq49MaEIVK21NU6Olaw4T2nQcqYgSJrNZlmMuxOThFEA0LUKOEekxhvvvTM2TtoOlNXEutpKb400miRBmOUlA1GbcZQm3oLc9st4HsdcloNa6yBOcqWpRwK40V/fPburcjmhTJbSKtVKEgt6nMu0PxBxhxCWxE1vtfe0qCoEh1SgNXUxChNd2wrQ85g5E2vklY6Mz5zRi6eOrV5ZYogBp4hIoogHwlvHGXLOtPatyW42LorhOBCR8iZsdFa3tOyrHZF53w+/DLJNpPb0ha2TS+MTS3q9MM0mLw0ZuuT/3E/+/DFzzaR64xF73TUwPc18TZqNoJD8zGVaW9VsBFLXdWUIxYmJ5tRE8/LCRlVrFgrKqTbKGBtQbDRCWctyqNoTsRDeRsgCGA7G73zfu4g1b116/S9+z6vF+ERLJlZnNcs5Bw2VqgwPW1oamVdg+6+4Z/ozN93wjYfXPvTvT22t2oWL52y+ixnWnk7rjf7ypY0wIJa4Xre3vrIeNkJrrJQoWJzr0mgtpc/qIoyFtoEI2x4oUlKNN3funljbXKXODcd12CSNXu/Lzxe/Oz0jRytW6VYv6bQ4+IpwZdPhX370z25406935sK6rlWui+Ggqk0QxYln04z+zXuPjuFzK2QqTlOaXNxzR/3L39f95X8aMS6iiDmrkDBApChYRB8+cf6Ga47m2dWij/ndU6OyUkO8ePniRLPhRex0f1yT9mQz1EUjtgYYpdDb07JIZC7DsNPvD8bjyjqRssDYcnV5w/lw9+7dQTovserNzLGkXrm0Wq2NenNTYnZ2uF5vjjLEitIQCbYbnY2sJAIpRVk54mi3N9FqxLPthtIFAg2E17Xa3FyvlOJBFEeCUje/o7cxypB4UlMO3ngZRXHCw63BCJwtjavK2jmotLu0tLEx7g9GJSDvdFuIbpxl7baYn5hYtCMgdjQcgffoYabb2hhlX3nw8x6AEe4tWOcIQec8oDHOITjw3jmqpL24kX/ft/3Iwf6lT/3j34OrHCKnTCnTH9jhCCKRjIfCLeTDorr9zvr6O6IqL0gld+50l05rQbhz29A1EIJOe4LgkQw3hwsLZ/f0BrRc/e43T3cmr9CGuXB6iXBz+tg3jXOz84fWllfnmrWSycqWjhJmbRG1Chp4LwskgcxKKDIx0Q7ipFaVVoISDCOmXA58tOfaoDOJSyU/uawur/hWL+bWaFdyFjjFvvrFS5/49zNXNmZFOPMX/3T/wetvDNszvX03Xz5dSjkcDobjUb02GFtwwMGDJR4NIGEMHHGEOCepoMaCk8CDHR/86PFzGxNPP/Z4f2NMzFTgHKc+bkZVpbyIlpbVuYtj0FDXYKwQwoeBZ8xaY4x2nHN0QBxhnBZlNc5KJIlz8VC63/vHtQdPrL73x29uT5cs8A4keItkGziv0VNnQxD6zjd1Z/YNdV+Ggnvir1xezQamynlWQVYD441h32ZanL1crNesLAn1AoRgVyth4ZFnz7IgiBtpnm8Ug6oVhNM7pvuDzbXxcP+e+aePX86V7Ux21kZVv5DQak51Wm958bXffObicDgCGqYh05ZqA4BYFrXnxBOgYSuz/rPP2o8/dVpQd3h3+sojky+7Y9f8ZD+KSmQWhGUU0RomC2fXibzA0o61xmDNGug91YjAmC8b9Xq2cqHuxh2qRbXSv6brpnrJc0ujqdbEhvaFJ8hYVdXOASFgnffeAxfOWovUEerZ9MkMfvOT484n1ZE9KRhZKjosbCxIrxP2L8DGM4sDm4wqA8SF7WCrNhfXV71wFkayXBCpoBw9yYkYSlr8yM/e8cDPnECGUTPNK5Nr+eWHHjAenlw+3Tu66zPfeNxI1ZqavPfz/8x0xliw98h1Z5970taGACIh3l09wXXWEYZaae89oEePBMCDJQgUiPWOABKgwMEjKqVkVWtVZ9IN++oLDzzQaKYEPGdEqRKcA0+ctdtbBOfBASCiMwbQE4rGWrAyCrnSZmMwkkoDJQeue8VrX3LXB/7oDxptUavKeH9l6Qp4b51ljFNCrTWMuKXlS3VdIKXeuu0UQgTvrInCSEplvPVWOQ9cBISgMs4555wXIqAES2uM0QGKMAqt89uxSlpbcN45X9fKOweAhBFGCCI4Z8FY4tF7CMOgllrWtTEWKdluWwp4gI446rX2hFJKOSXEaGWMBSCCCQ+WIuRZ5cAzwJlu9G13X8P4yHGqjRCMBhFHwoizWioPPgpDyul/koo8OMpZb3afXB0U5UnEcV7VAW87RxAbk/N3rZ2/j4kN5z0LhJMOjTWOemMI43G7pUtwQWCNj2jtBe/t331+dUyDgFFcXSha3TB2HrVCp6hyxeIq2+j95S+//9veMvuqt/3EoZuvob3mv/zc/54/NFPLqtOKfvInvuMd3/u9abXifGN9of+pf/l/7nrxS9/y0z914oFHNA/GdoJzKqB2PnfKYO2rviZyyBI37qvn7ZYzDLVt8u7OQzvmerut3xz2Bx6VJyWocbUePvi1+ld+X++Ya8sxu/8h+PO/27rptub87NRgjF96aLMVilLXFL0zGpyzSnmvoigI4khJZbUKI2wk9pajOyeagc2qmEcs6bjQekeBZBZMEAU7W43LF0f9lcvNJNixUyyvXwYR7Nm3f2OQj4tqK5aMpmB8p4lIKHFDO14bD1eTYCaePJA296yt5K2pKKJmc3mTsYoacGasage0Pb0zePmr+fQ++eF/OFaMlXds290jyxEHxow5++izg42f2b13n9849qpDL/2T/O8owRDCX3jfn3HjGmmkKmed/h+/+n98f8i1aM9OBAKK4bjVC3UtizyzGFJGja1HecGE5WiABMh5kefUBVZ6owxYpJyCqzvd+OCR2cWLG2dW5VpubgpxtuFVvlWSkhQ+y2QcdKhIi6IM4zSXm0qOdsxNcx0pwzAQQNC4XKtiMFpoJnNE27y/hUi0zQLRbMbtrM41jTu9BmPR5XNrPpwe9WGK8Uiby5dO3XP3D/79x7+yZ1fP6mVVZ0Esdsx0kUCRjzaHphXxJvorZ84jYtqIKIsZxVGVaSe0VJSb626eHm6tnT+7vj0Lmp2md8BJ7K0t6grBBoJKqyghnDJrvaxlEEXegtKSU2KUMdZdlTWUQUDKCDJqjTTaespra6TRVVFQJjxnCK5QyhjFGStVZbRzBJM0ccYhIKXonGqlYavdcNZtrmYWQGhDPVJCCKVBI9KVqmtZammMEQH3CEHAwHFDeV3V87F/yaHOzonMycI7FM2UkBp8ZQpYXW7/x0Olc57gtqsVEZASX9QZF1QrC9tsD/ynAB8ARASLHhAt0e9842QUPRo3S1olwkWKcucdgPTeEsac9ZQDo2CsJdaVpffgvfMEiSfUW+MBKBNFIZ0xBIEQYq9WlRGlrZcZEgTj/Xae8/ZlydNtX/G2HQxhu6jsW/qN/78koG8pQbgNIXl4ofge/bbeRAggIdsk0v8ddO28p4xFUWLtKqD321+p94D+hW40sh18dBVk2h4IImUsiIPB8Epd5d5hM0mKug7CIBWxMt4SyGvNXU59/wd/6OXv/enroTg+WKmlDqWls13zyhvFc2fqUcWz0lCCxqECMqyh1lAp5/3VYG0CYMETINI4XWoAcBa8A+cAAd3V8Wx/Z4QgIdujo55zygPabjeQc29cVdtalUobWdZgm2kaT3e6o6Ic5KW1bnNzvP1pnPjG8722aB498syTlzo7duzYi4PxeJhbBzlrilLnw42x7TECuSV2nEvv0Fp76VJx/Yums9XFY988kRVhuz1lVpTznTtu3H/+mfX+WuEr2e01MRHL62NqsRWidD6ejAejujvbacdiYi6eMLVKjK/53W9/7dxN3S8/8SXNZDqFyc6EFTagYYuFlXUhj7vB7nzpdBZv+FoTS7VNmW82CAsI02qqGRFGL/JIdndPzHUbg+ev7N45c4eXa0/8ZSe8/uyptTg6wESlKtdMev/lu992ZenRKhTZer+/mDnqyiW1MZpa24z6zw2ffPSR625t7zscRQRS0iuWNpOAeKLDAOIAZ3kEvjy7uiiwnkgcqmGneXBiqr6y+Pj00X35alZeIf0FSTPSEDi9q3rVaw3vwuRuENC4B7o/NRYXnxb/8LHjn3hwFSqgvMNrlgaRpVQ7EsUJJZgPyqiRtDtJ3s9brag/3HQ1kU5qpZXSYRAHQTpY1hRdHMrYIWdGV/3BVtbsxhx5fy3zWlMEJZ3TKkkbiLEuglRMewOA1iib52rvVFxuWtpIHOJffvRv0xhGo0JZGbbVYDW/tLHqLZk/3OYTm6985fQDj51TxPN26+uPPb57IqqtXDt/xoejCwsLFslgmNMg2L17YlDJe177xk9//AuEK0IRvK9qTSmJRXOqMblw6UIdCpvLZjOyVm1srTqlDUGrsNmOK99P2uZX3veawwcuRL2B18vaCkcPErgW9STHBhhPhSBsp+kvyMGVdPcBzVtOMfBaD5a4ymDyMIEd5fqZ9fFG1I1EoAgDFfAouOEPf+vp4fKeuE2zYVmMTBpwoEQZX+a1N1cV07vvedOJEyeeP3l+WA6mkyQMeKudsigNKI6ymqNoNgOHZKW/DtyDtUfmOkrLBPWR6eZDX7v35bf8z6WNlTAMeRxWVQHUcUdM3he0YgELYmZGetRfZ4ynHRKkoqgkWMW5AYC6zhiBqpbtTttCzThx1joHPAq0ksYYLhh6mwZxXdUWFfXcKB2HcasxKVXtDQNHQxQOPAHEwFey8i4Io6aUY2PQKNWa6Ig4NkWGVCRxw3rBsYMMrM1EFBCO7V57uFVQYAjOWTMYrDNEwUKCXluMgtCogjKfZQOPnljORSNI20hGWhUxN8PRerm2YaXZrpuk3kdJmqbN4WCzLiqWcGNNWWSqqBl4grox2etO9FbXxrnYeN11R9pxmEmXjes9uybbrcauXnniyqAmZLPUExPxyoZTZfDchfj8RcO/pI/MyDv24P6ddGuEnz2WdxrxViWd0XEcaufG49rUJuIcgCEDo6R3IKJAa4tovHO1tkrrU2eLNBBRM9RGW+aDIPz8fQ8/dN99N1xz8Jd+/AenmoXhZ028HHWGlHguKPGhqjQNOqh8J1XfcffM217/djT7P/qx9ftOLD16crQ+mJR1vbkm73rFWwo1oHq4Ps5iAiHxo5H0Dj3SdrfFgkArraTTdckYjMZlGDcmJ+eNyVrtLlpHYMhjtEiW++qZJ9cPTkG5UVlvI47W5sl0BM0hmb3yR7//Yz/2i7/tx8gIJRwtqLXVStcigcHFJ5fil/LOvjtY7W3YjKaLn/ntXQ9e/OwXn6700NKAMcZlrT2g9XDl1MXjTzz7yle/4t8fuwgAy5fPjYaVdc5U9Y037Z7t9YwqvvL5h5PGbK+TPvjsuU4czc22gXj0WgjiveNBVGxupU1++01HTzz1wEYhOeVbeT43G+yb3TXOZFOwVsjHa6vgO14yry0hJkqoMpWsyvVsLC1JyFS7O1lXqiqLRFDiG7NzvXI8koX06HPmC21aaZQkqdQyUMKojFHdnmgC+nI06g/zqPY6lKqugzik6IbDgfQmK/ON4aiqFKe8VL4qnVKFVkpqq2sDQJAgI+DRW0uqug54mBe1cyA4Mwy8trgdRuI1Id55JASd9976bDj69Bc+9c7vfNMtL3vREw887GollfUOKGPSeKltYStGeXHcrPVXNvvRrbc0pncEzaYvVcloDBSsB6M1IZQLppXxFJWvBqP+9YfDUHlC+rXOSK5nJllV5VVeKA2ri6f3HGgfvf3IJ7+0/IV7JTGkm8SH9rXN6EwAGIa7jDfa5ox6AE3BaqMpAQuKcKtUgU41E2VltbpMf+/9X3nfj9+5cy5s9drLo+GDXz712c+eWFiLFaGMDL/x4CNjqfffdPOL3njHlaUTaycvU7CATiuJFFUtozAAQggToAT42lplrXfeeOMRqLfOafKpf/tKZD2hRNm61pUHYAG8+tVvfu65Z5aq0qG3ToUBlVpRSrKy9LamlMZhqJXVxnZ67TRNosrNdieWB5ml2qqirJv3Plo/+fC/fPiPfmjvdcBD6dyYggcMrFEEDHLmQQTJzKHbugStd9JDvf8le7wl1HFrvZUOgVF0g7XRiaeHjxzLHnxCro2ScU6C4OomOR9JHuLK4uJSTCcmeoN+Zt3z195w0Lt6sLpy6eLK2iA/MN3Rpd3McxFmvW7ryN75aw/s+Nx9x4+dXBppU1krKSWMCArOGu+cqkvwyCiPRYIOzlxyp85c+ZsvL12307/19p133NyenPRgSke8ocb7itnTpCDINMXKWe1dSZUgjcgUQg7EqvSdNt0v9Lvecu1U7AaqfEVz3wNfX3jidGUtYSyikTDeO2M8MhbQJEysVHVdE4aVt9YIaVuG8a0zBfc+IiQQYjhW9ysvAKyqbBjtSPi4P664jXuJFl25iYgjARmNE29X44RqXQH052b3G+3C0FEHaciXNvoHj1z/0NNn3nDn4Xte+6rP/uUfPX7fkyiN8ZlDTSCMSbfB20O55q0j22fTsN2LTJCgsZbTF0IVCGrlOEVPtmNHCeNXs8mtroKAiVgkIenMJ8PN9SSOOeVZVtbKEALgPRPMOmetBaAEiXPWgA04d9aj9yhI2mkMx3l3amLx4hVK8JljD6IZzM3Nzsz3Tp07TRGV1ZwTJbWzFrxzzgngsiwpZTzgjbCdlxmAq6UEA5x7bfTV/Gzw4L0xdrs+mhCP3uWF4gELQhEIUde1VToOhEdwDBCAAKGUSqvCMAIkjEAcBkZbUysgBMAbYylBa4xWhnGujAbCrDZaK0RHCISBsMZun4tsBzyB1+id8RCGxBGQRTUlSDeoMivTeDJJG1VRIWPGI4CnnIED9Ghq9Z+kIsaAcltXo157Yn2FaKOSJHWeMUZdpTdXz3a7Uyvr5xAVIQ5EZCCY23XD1tqC1qNS1sBoqxPJbGgIkxG58ca9V555eDQEosnE1IF0pp2tnHEe66EEHx4/NaC8Ebv+Vz7z7OMXnn77q1562817nr31+mo86M0eOb145dBkI+xMXj51qdObCqQrLh376uazu26+g8/d+sPf+/0Ll6986XN/042bymvrjSwqFlokZciJjao6q+f23RQ3m1uLK7/4gUc/8Gt3kOwhzMZe2CRGDKPaeHAxA2OctEzOTU8vX3Fff9bVDxcscoxSTWsRIENnjeEMtbacEErQlDk1Cl117f6pqanGrrlEMArNgDdbo61SKdluNURsJoS4srJW14PpiYQg73bmLBv1puaVUkSrABRS24h4IDrtNCjHyzwyGxunR+PVpBlBTVzhpa9CYAkNIkpMO4CwyrIFWbi4eyidmF88dwy53n2kePeP3/qxv3qkv2a8F0QQsJYw4pljEX/lW7733+/9x92Bfugbl6OAFFYHjBVZTpyf29XbXNs69exxVcvd8/zI3tk7X3sLs+u61kVmvIE4bayMxlJmgjKKuG92x9baAHhgrdnV27G8tGC8me61hpubPI3idqsoysjptA25hCKrzj1fj5fdzj1NTypA2mklU+29Cxub3dmp9dUtF8TdZjw/PV9teZJGM/Mzo9Epp3OGOkTbbbVkVhrnpJGjqkpggtsAXVGXo5gH2eqlR5870br+7mP3PXmwyT73pS/d+eLbFhZWnnvsyYNHbroy1kGnEYRYlQV1TtV13J4hTKLgg/U6bjVbne7q4pqIG0maeO8bUzuvXLkwzFevv3Xn1vomrHgAiDirlUFK8rLS2jgAxhhhzHtPOavKKopDZWQYR4yHVVWBd5WWDEkchlEY1VJJY6SUxijnXV7JOAiQ0HpUaw9RzANKPSFBRHEbSxGcEEqRMIS6qMKAEYIeyMZmYYyptQ4CgcgIgTQNLAAQrGVW1xIZgndlngFSzjzlvqptJasdu6PDBwUVRDsIEqzJ2NVSZ275PP/wJ/KFDe6YQe8JEoJotAX0FH2n1Vxd3wSgV81Y34r8IeCtA0DikTq4dr596zVEwRqEIYZC1YULYya6Dmqjhmi8tgQIBAFQAkA8pSBY6IwFD95qbSzlQBk673A7Tod44sBvF4tcVayv5khvqzjbPbUEt5/yAtzz/8tY2+32271sVxeZbdcZegTw23lEV1OFvKOUxHEswthak2dja433/mpYEkAQiCwb6aomgNsJ3IgAfjsiCYUIjNXWmm0QGhE9eIIsjhJjZV1K8EAZ9QiMErCuKEpgcVlaL8ctOvrD97/h9rt2FpuPDlcvqhLGw2htSWkZNhK3bwcbZLA+gMHYjmoytlhIb4zfFrm2h+AAwIF1HsCD+Vbm9vZ79ICebANXBAl6RE8JUkaoYGEgKCdMUMqJSAJGKRJCEMpKYl5aAIoYCSHaVEndd9SnTAABAABJREFUCK6uBS+64Y4P/Pbf9zq8LZNnP3cuvedAb1ejUq6m1c4ZtmtnvLlc56MsDaHViPvLY1lqlpCwkYRR89qDN6ytbOYDt3D80u6JqVe97mXnT3zz7pff8s9L99dbVdwURHuLJvPoLGWMiIhsjFWvyV/xxlsf+MyXOZGHX7QrFa11ubK2cp62XNAWjJSFHk9PTspNmq9KQaIWwS5jmDIXcE18uzmZspZDCI0VYCenu6nWF88NspXcaKwgDxRf1GthJp+4uP4dP/Si626/kRq7tZo1Gu1isHL29PMLq6f3XXvEDNWOufbMnkP/8s1Pv/xVr3ny65+LRbMi0dc+e3lHeOoVN1wThbTcWD55+uINt0yGE/Ti+YXGfCfgPA3FzJ5OkruNjYebe1595MhLyycWBiuXm023rkdpFHWTaP3CRm+mwcqakYo3ae5z2zI0oPtfnvzKXVM/vQbP3Lv6kQ8PFlZBZk3Ck6TVqmtCkbSakwbqcVYXVT2qfLMz/dbvenc+XP7Sf3xCUDTaGDmKuxyM9ASyUlGKqqrTdkR5lMSd/rDyQNJEqNoECde+BIaEGSMrRJREBAkrra0F37vj1p/+hf/65GOPf/kb9771h7/7A7/6u5fOrASho5wuXVhBylrd6mPv/+P9u/Ye2rNrfby6OhyDsMYWUpaD46uXv3kMjEdGwJMkDsG7IAzOXjoLQSU0gtVC0JhQ58FZubB6YSDzVpCGjQgZ48wNyj5SDNPY5Fjmcjyob7tm95Gj+7Otp3AuoslhgxHKGWZSLiJPmdu2/rrqg7/zy/vmB2//SQ5mPQhCjeiS3OGMrbtogbdbUz0DwVAbbbVOg0O/+Z5nzl3aYYhVckQJthqxMw4Qmo0oLw3i1cvMvv079+3Z9fI7Nr7yuc8vX1lq9NohBHPTra21kbYojYsEMaby2nXTeNfc/PKVxbAdDAb9mJELg8EgK7u9CalKaVReFWncIp4NBut1ladxKkcyFHFvcmZ1bWE4GjIurALKGRFES0UID0gExMmy9gDa2lqWnAdhGCMD69F5CIJ4fWOlO9FzHkTIu53UKCuVNr62wBml3lkDGrULSNgIoqqyo/F6GHjOhfcolVVmHABYVzbiSOqMecZ5yAwS4rPBUGl9+JZrWTgzHIwJ9WVRtBqdUlXamTjqIGJZ9wnTIUnTpF1mjkBQZgPuJWNqmG02ukKTmK2vdxqhR6qltnVVOOesCSPhGVhbKPBBo2FkURd108LKejlx4OYymfmxH/j2zUf/KiA+6k2ub+bU0d07GkFEtrJ6SgH4enqeDoZVqYl3fHXkHl8Nvr7gna0YA6OdEIDaU6RaaiBgtRsrk4rAAYBHAmC8QwcBo85Z6ywhSAgxxnammwuLayLkURyAtd6oEuGhEyff9FP/o9Vuxaz8zN//ohh93YfDwuREoGik9dA4ZXVRi7awOgc6+p53TX4X229cPFqqP/YvD54+O3zDXcUH/urzItlBi+FtN97w7HOn53pztTdl6ZHEWa6AhiIgcjwKCYnbE1rqqrLUYFlZFtIwjGpdh8RPt9LDR9suu7ixCo8ck/t6Rg8gvbix9y7IFv/5YOvGv/zNH3jfb99fZHKwsDw/Y377998zQ/yrb2Sj8V/KTsIpp8S5oln7BOL+n37oup//mec//YBCEmmwjBKrDUMkAEtLC7sOTG7PgqyflVvZ9OzMviP7ds/PCRJWSHZPdvKyQiNCRubm5sIklFXtPCGExUlqR+vDlYWN1TErzy1fWfDgW50uidpB0MlzKavaMrq6sjocDxr53m7YK0uZxqFURavZ6W8M0qTNgyQIRVWV1osw7cgyV7IkWhkIJqZ71kg9KFrzTaug1q7XjJUyjWYcEmGljii1ghZFVQO1yuhSb631Vze2ClWLVrR4ZSltRd3p9mg4DPKizkvvSSXdYJSDA4NYm6oR8727p/OssA64QCaiIqsZZd4hULBGO2cBtg9oEDyAA0TXoGzz9PF//ou1Q4cPRjzR1isttylqBI8EFHhgWHlcXLVfuT8rBvS2G+P+RU0sV97gNueACN5bawmC86g0Pvr0cNf8zNyUYKG1WoKOOY9jmjZiK03NwyRN4taEe9VLO+dP0q1V7rA09TLnytZYFUoqMh7LYEo4V4BgIki8rJxzDCNgPu2Jgwf0N58pCCanz5Pv+8kvNCbwTa992cVF9dUvHd89O0nTts3Zm7/tnq898JUbb7zpuTOXdl93fbM3v8KOaa0cWkpYJWXaiDkVZeH2H3nZ0YO3f/kLf65UxRlz3jriKXgHFr0NEDU4bxQhnjKmnfeU7Z7e1Yg7f/UvHwuI904Dc4xZrQ0jAIwjACGk1QtvuPbQ4uK6d2xqIgUujhzY++jTj4YB1HVfS9eKZz/0Fw//5i+9JZ6+jIl0qJ1DgpwgACpPmAXivXCEEiK8cYRwRCBg0EsWGmfAw6A9J++aDI7eRG88KD/7oPrGkzn3Vw1orXY0yqUnxnocDgcc4NKqitK1NAqXljKeNnFUTvSaO/ZObD6xqkorYxnGFKV5yQ27bto99e9ffViHwVZlc+UlJYSE3pntbCjjjFOKEUoQWSQ0wlMXzRMXL7Y/a/dNkpt3N67Zk3Zim4bF/E4RYg62EjGlNqgKUefxxctyNYtWN91W3Y5aXdGIj+f1U1l5dnmr1WtHvdY7brz5wW88dW6z0MpyZA4EZTSIwjhpWl7Gjcg6ozh47wBUXxVGycm2kMq72gWcmtIGCrpJI0aRgD3QpW5fspHpV9/58sc///yw/9mX3216R6ekHSHVdbWJOvvy57asFJKafCxpwAXSjfXVl9ywvzx74o++9vmolvM7ppZXl1PB86qi6JevXEAaIxCgaK0lnjsHlCIiOu8JgnUaAAghHgEJIiKlFCzAtkhkLQ8YQa6tpYRMtaLf+s1f6nVbtTTZOEdKtFKM0TAQxhhwHgGs1duNY0CQUIzDsKq0Mkob3YzDwXo/4kGtLDc4k3SO3D573zcfiRkNIB5DpqRC6zglSIm1xDnnnPZIqKYDOdCmAqCMEOCgtGSMcsqklDzYdoQheoIIjDKGVCkpwsCbbdVpu1YIACAMhLN2mz5iFBml1nkK3nvtvGGcSWOiIPDgKykZZYIz660Q1HsndQXogoBZC1LXSFCEVFe6EQUBgSyrGAULYA0oZazxVVbXJRogeb/SY6QMeUq8tnVdeiDddhfBqRdO41/IKgKQReHc4p5r98bdOy+cfhSJpNZYLTkPKBn3B8XE9L58tC6lZlHcbnY9TaZ33ZCNFrtzuy6ePrkxLjnqiVYcKX7sa/8W6NLWlgTxzOzN9/zIj/8/733LVCtszTbyES+L5i/+1u9tPvvZ+/7jufqi+YeTH/rOW4/eQhr/+sQTwZHs1d/xkmMf//Q//vZ/f/Pr3vbsmWdrncdhKAfV2fueIs2De961p9tWuX/NycdOILGV7EuQjdgVRd/km0GYRhFfXngycJiY+v7Pnvpj73/hp6eizuU6rOpqiGGdBMHjJ9ck2z2oKoJ4fq1vgYhmCkHEjKTMeG9NbZERpWpKCaeEh4H3wJnbs7MTR3L3TNJoRhwDYlQcNRlv0AZogmGTK8mlUQemG1FseRALEBioS0ubk62k3eC6rnbMzAx1vxgtxdO+qllV6iobV9VGs91IknbYPcTFPKJzDBh1G5vntC+7rdRWjc7kxGBMrYBud1Irk40WdszP337LwQe/dsGC0GiRMqO0k7K99+Uvu+vI277v3Tyo9o/x2tuuefTk5XFeTU1PgIfBaLBjfzdAddtL9l63v7N+YUWNF0SChNIgTk0hBYlruald1WhNE036I1WDZV5FgbGwlZtVrUS72TN2c5BvTCYTzEMq9K49AUv5YB3GSOrNXNsxb6jJqV5Kg2KYDbLCpE0Mu51OEvBqa72IwwZ6XY1W8q31QZ5xARHvWFtYsI5wEbS7rcAbk1VFmoQYqzDUl59fFp1WTcf9wdLB614+Wl0KEyNtsbaO7/1vP31pfKYenfcy27P76OaW1nUxu+Nof+upPNuwQAeD/kyzE3DIR6tRa7YpGog+ajcaMw1G6dGV4aOnLgBAkZfOOIgEEB/FEbeeM0o4K6vaeps0EoYkTQNjoag15RSQMA/O2FKZ2hSUklpWBEicRKU2jSieiNPzly/xgAnKAJFQShgD5zklURDmtZRKWimjIKy910AoeIZYjErKCDJMW6LVCGReBQyzUo4GWSUlEOSUOWOcd4IREbGApM5aIfJdzdy7viVrNPbgFal8MSAnjtvPfYXc9zRY9OA0EDTOUUoBiAMjiNgxPb85GBoN6AkQ9OCvFsl72C6Ap4SFhkzH4sSJpT1HGEGo5BY4H/EUoOMNJ9gkzDO0ZQ7DDLSDgBLwDj1s60CU0BuPXruVjVbXFtEhIt3OHyIErTcvEDweEK6uznhVN9quL93GiQBeCNv+1l3aC0IPXOVvELZNzHg1iXrbL7b97zwIkmbbO0cQAhFUtXtBXgIEj95VdQkUvfVXn/D//xUQiHcePBJyVZtCREII9WiMAyCEkiBm6DCOWkHYYM14bX0pwfK973ndPW/eJ8SjxaUvVf2NYlBnY6zGgTGYSb010qMajL2KStWW5LV17oUocfiWOrZt0vOI38Kc/AvUlydIkAClhDEklHhAIShBJIQAQcYYOs+8FxTDkDHOKKUW0FKyPq4kYBSwThoLESh91XTgovCH/vuPXjh9/L5//irm9N5/PvPKN+9qTJOy6NdWNwO+oevKETZi1Fs0GrwTSOu8ev7xfqB2byy7CCfmqFXL5Vc/cV8vEg8sPJ6mqYpLmxufuyhir/62V37x818tc+9G9XXXHv6Zn/vJD/3B74owqqpxd66xuJEtjDZD4qUuEhZ6bgZZPxLd0XjUTXhlKhH6YXE6mW1B0CwzrYgKo7GRFbbisjKOXO6vLpXLa23eqG2wcqGUFWHu/H/7rnc0wnn0USONTJnPzE7SZvPz//TZ86ee7PXk2rnnZq+Zzyq+ct9G5Hv7p9yf/MZ3vud/fHT/7UcP7tt//ukrP/V9P3Dl4hUTFUfvmDQxjpVJp0Nhdb6SD93gxuv3bl1akpvlYvVgcm04Pbf/8qUnRCB6bSdzNRhuzM82tzb9b/za4MbD0Y/99OT8oTCHTUWFBkmoaM5Fr3rXtS//7j3n7rvyqX/N/u0L5xVWE2FDhB0ZRIPSjQcDSl0Q8ny49e8f/5PZ3h5KiAJtldHaMvDGKgTvuLdIq5EOU1JIZeXIeQOUSuWs98Q6a53zHhhWzlnni4FJW2nI6dbq5sLlL+77ZOdt97xmrT/baSe7d80trywxYRHAWqKNAmd5Ixkq+8aXvPLfH/hSK9J3veq2z/7VRxGdNabMFaE0CkIeCEpFOTaW4pOPf8PW/tq9Ryh3x8+d6LabHomqJYtokjTAUYuwubG+a6otBLMArSAYlsrIutFmg+GSceXEgWusKL2ViNSTlNKmQ+IQtWPo3fLlM0+dq1eH3n7oE29562wwqUAxFqTZmMSTXR6wcMds//QzjRnGJQBpfeFf6i98KcEozs1QZgYpIUCstNYhRx4wTLuNiwsLAPD1+++LGNu5e+71b3oVIeTTn/384sI54YuAtybn9tVSUZB5kcUpqYqqykthSZzEvelG4dTp80tfvfdLb3nti4yuRJy0kogAGFsAqiBJvScioBZcURdBFHuryqLkTERUOMAobJR1DRQEQ0CPLCIQgScECYfAU3QWRMC9hVarQ2hsyqqiVCv0miStSW/jOi8IWhIRWSpqQ+Z8LQsNdLI3Z+RonPeFEJQSIEIIvj5YSdIWAWKV8uAJI1o6bYiUFEi3n7mABSKgIQ+9gyjkTtbOuMpsIbFJ3KpL4zBwvAA0jFgCvijypBViqLOa5SovDRGEGeO8VN47tEZ7kKVNZq7Zd/trdBUvP/95oBv9XEdNhqoiw5UHvvD3N3VwY7kmk6lRxBgk2rXCqK5dElDvLA/DmOX9sSplNd3l48oQJvpjzQmxxHriktjzxHUSSAjsmuSbm7qflZczqExIkDrnjNHOWEoJFyQKWFlpdH5tbSOMqFTKlU7XKoy4Y7ys3ORkY1T7LYWve/vvfvA9r772hiSdGBsvlTJEcMaJQ0oF84Q6QhlySr1VWetA9N5fewPVohrYW/e9ptHY/5E//Zeo1d8w60endq4Ohr6ZDuq8NxWOHGMp71sb8DBqTUHdJ9ZFzZnJyBZ6JNeWDuyY35M0nn/28ZWNet+r98xMDO+5JZk6cvfJv/1M48DuuYPXLB9/sptcau8Sd1yzY7ix/Ft/93sTvYWkyY1c9+UJxjtiajcjwlPpIbbOaOt7u4I//8h3ZO968JELxhullQGCaSMus+LYmQvX33x4ey2YnJrZN7/HV37H9O6AtatMCt7Zt/vQc8ee3Vr2Ow5fMz15IIobBIWSTkud6awaVc1m48rixtcXnmpFzagRFkUdU1qNVUxYJ42cy8IgioLO1mbZmcBOJEo5kGbIQAYYutonbaLl2Bam0ZpkSVvwVpkRkKWpi7EaqLIiYKeaEWhcuNwf24mpqR7jDBklzg1WNwfrm8q6utLNbgPC4NSzl/PKrAy2Rvm43W0lDRCUNpp814Fd9UgdO3bW+lIEUVVKWavK6WJcJWE02W15YqQxnDFTsXGVeyoQGSXOGEMpc8468M5YRriRBqlDwlcXV2pVxY2wps7kVtUKHHrnKSfgvXYeKHWOrW7RL3wze/z5cVEIThylqI1lghIg2zVGxnmGzjrx9PNZOyI/+I5Gp1E7tekEd95lI1nm41anyUJn3Rg8JMLn+UDbZpoi97UuRswHDipV69HQBRsbO482q3zkIEBnjPVIwLtak4q4wtQ2oJYGSemmxzr69FeXEiFm5nbkynhZHL3j1fvvvO2ZtYuf/sbDR2f3WoXW8CIrnJXIKFIMojBoNG3hJnfs+K4ffk+2WfGvEswBGXhHnLXgLSJq464WV3nvPBhtRBgWRf0vn/nrLK+bAZdKpmla1dJ5672TUjNOOAbIGhO93cMqaEU9Hgb9tb6Iyanzi82A1bLIcsUFH9rgsdP1r/3W53/0B168a08ZT8Y2MJ5a9Og9eEuQcEQET6wVAAEAAdTGgccOEueQOz+BdNqZjdbU8JVvmrzmenHT/cOvP/z8M5cBAIbDEed0qhsurYxGpY0iUVm21j8TBazuZz4Id89M6Lx87tiFTqth6vKJx4/f+aJbmnFMRvW+a6bf2Xm1quSpUxfPXRle3KwsCwqnDeEE0VsDBGqtCKFoPKEUkTEWlgaeWzTnVm3wVNGjbvec2NMrj8yTZqvRmW4PN/2p08bmjfHY9evQRm768MHd8zuePH9mYveNadg6fIB1Z2ay5eGpU2fWJSqL3lPnadpMKOGEkLIsqas9eh6EGHglq9KYIOYkZWMGVsmQQ9qoX/eynXtbPTl2py+MylodOhS84R2v/bsP3tsKp/714Wer9dHbfvRlPBrW403emWh0/aiwX31slCSHaKSycRkGsbWmLsebw7Ix2EyQ5aXCXuoaIdFmYqI3XNsqRlveBwyJ9Xa7bBgJAUTnPKGEMaKVpYxRQpQ06L1HCkgoQ6M1eLvdlwPeW2W8JSh4NhwN+3kjaVJOOCUGAQkqVVMujFKMEre9yUBEhLpQJA68hSiJEanUkgVCaZe247pUDz32SDPRk9PNd//ILw8XVv74L/+MBAwJIMHtrmhjdRgEYdyqytI57QA7rVZZZEZZJW2YhM5bj857YJRpp511SIiWUoOnQTTTmxsM+4PRCA1QSmspOWeUUmOtB3AIDImSknDmAJTRjBHOAlVaY50zhnNBCHhnlVYBCkrQOr1tK6BWE5TMWy9xNg4TaphVeyZx14Gp0bg8e3l8edO02vHM7MSJU+tB5LQZIegkYRPTMyxKo1AgZ9YYQVGEwX+SioxzjUbDGLm8cnF6x7WC7TLqHKXGmgqx5b2Nu1OaisbMzS86eOMD9/6d8vn0dE2SFqU75ru3nM6XpqZCYMwYIyDKC40i4oHgUfrUyStv9/GrX/6y488+ZKULkjnbm/vkZ373J996zT/9/YVdO25ZuOK+/z3/+6Yjb/qRd//8/U/+9fd+/+vu/8QnbrrlDVutQzrNNxfunzo0qZXvtVvLi6d+5Wdfd/11Rysqyo3NKXvmjz78X6b2IRf93/ntf/j858ahmJ1stVZXlqOwp4SNovTv/+OpV7/zR15yXU1w0ykFire6Bx88+Ux7IrWqAEKZQyYAdCkAKDPgjVGace6spMSGcUiQ8jAwVRFGNGn4VjOcnpm2VoRpe2Y6PH38dIMlrXbqw3CjP+p02xdOnTxyzc7ebNBfLq2naEnUmaeNZmsqHCwtZyUptY5cvrV4NmQm7bQ3FxfLfIvYRrd5UIim8TUBp+q+lqV0alxAEM0ZG1bSsRCNvGLk5WK0Obx0pTN76O4333P58r+dPr0ECEpbIVjIzJ//3o9cWVtjcGB2RxqMEBuNt9589Bv3fn08rJiyHVa+7yfesLS+PtoYlf1NJDrmoKUfjMdhHFGgm1sDb/DQzqOVch4HdV1b4zodyolbXD5V1XmjsW+4OQhZ3CHhZG/3Vv8k9TRtiabWeR+XN+rpRphnNh/IICClrxbNJWh0pmd3TrYnV5dPl+Ochai9V7LUsq6lSpNUCOIs1oWitAEoQiIZsqwoWOizMoscLUaKJ6nVfXlpMSzMgV1Hv3Lq+NtvuQ2rMkh6QRRvLZRb/Y35buzM2HgrAj3ceEplVyhAFLbixs5aKRqSOOol0c5sc3Fc9p1TRQW2qnfs2wVwAQBYyJmn43HGQh4KYazT2jgw2xRQQIUDSykbj3NtrAeLTDiHSm0jez5OIkKF9147AMCiKLPBEAhmVcUFm+z1OGKapFWllFaV0uh9LESjmUpVNtrN/ua4qpTSEggA+HYz9tplo5wRMhyNylopo403IQ9EQA3hXnugTEr0RuNw+aYD1Y69Q+xU2kWqjDYW/aNP6afO2Ief6a+sI0HvnKYAgAQ8eLedsITW28srC1Yb8IwQBETnPTjnPVBGnXXbIKjjcGx1o6DktrXuRKOKZoUXXBmF5QgxoMTbUhISWA2lBh4EHp3Sqq5L55FSShmTVhdlbjVQAh4deqRAtjkm71/oqwcP3r0QP40IHrcHDtu9IPhCjf12cdn/rRRdbScjhHrwzsNV5BKuutkoJVEUaa28dZQgodvwEDjvCSHovdLGjLNtFGm7G22bRUJytfSEUOKcfYF18jxg3vtClg4cR+K99Y47J9K4I4QfrB/75R+7+TvfsL+JW1Z/dZytlFlVlcEoY6OR6w9tpWA01NkQqhKyAgc5DBUf1Mbab418WyrCq1FOBP0L0eDbtaGEIBDcZnO5oJQhpVd/p4wgEkD0Hi0ipbQymmOogTjnIsYZZVHI2m1hHJS1UbpAaZLk6pLwyEMrQJvz+677gffu+eonv3Ds9OKV83jn3hnry5BPUTsgHJHCuKgGmZ6YSFINg83xrn1tzuKtc/Xmat2K+rMTk9M7D+3Yt0cV62HEz12+sudgExEWzq5AaR758kNtHtoI1zaLWTbxwD998cqZwdS+dm6SL3zx+PSLZ9IpYB2HY5uGvBW2na6dqybm5sbZ5nUvesPpk58TFKW1vjLWmDGRajCk6BeywaAvnTNhDZsjxUJVGRhYCGeniG2+/M3v/ctf/7vZ4XM3vmYq4jBW5spzz7zt1S/9yOInDh/tXXxipTl35+c//807Dh6Ymhkdf/74zMzOd3/vKx577tJyVb3rbW8ZL9bP3nvv5C565LrmSp63p9jO23oXL66ONrMjM82tpZVurxWnsDK4dHH9K9/3Hd+7+uEvgKx788lXnh1Fim5Kc8eRGy+dCv7xPv3xr2z80Dtm3vHDN8zvSwf1uQoVERrcEgRbu14fvPe1B7//zNyTnz79b3+7sDlcsM2ZTnuPztACCEqDMJBZcW79GI8Dwlld6jAKtLNBFNRSbvfzJCkVgU8ZrcssEKSUxnjiHQAhcRBnZV7mMgiEd14rW2XaG+y0mr1O9OBnP/PQZz6945r5r933KFjT6iZlOR6PCoLYm+0NBqUCNioXPvjhP9VojeBfOXVKaZdnlfEOHApCyrEJQypAVcbRBosjbp07t3qRcs05Q8D1rUEUho1u0zlJPQCBIIk9RM56pLY/yK32cchZTJY2h7/63k/f86rm0TvCCC6QqZLN3Wq99xYpo1SAgPxf//T3Gq09c4dv/NLTp7/7hw9B/KhZWvLiWjF7vVbGjbdoHE3suwnI7Mc/dPGD/zYc5thqzkUtNt6AgPFaOsIgSoNtvNOUtQmvmg6IZ0axy5f6YcAazeYrXnFPg8GX/uPvZbnAN/s02Tnb6iSNJAirICqtN9M7Z5bWB6VD52nMwmefOv7j73rn8TNrMs+I8YQ4ZeogDJN4Js82k7iljDTaWu3COArjdGtrjbtAqjIMmkEktNFoKQItx8MoBkRE7ytTGydDFuR5zhhSSgnQRtLKjU6i7rhet0YaqaSsObFFMU7jhjPEGJ8k7UFujUclpZZ6cmJGE5tlNUPbSFoEWaM52V9flVoHnBLwYQKHZ67Xzjdiiko7qymhtVLGqECEhBipNSEBoR0uqmI8DAMkvqQCs9E4LzIBAm3Qijr7ds2e31i1zrQ60WhcUyDNZlwV2Lrutap5aEHNtjyrtZiYmbWedRrh5qUneLZ1/xPjG7/nzk7YXelnSTOhnAL6SIi1kS60acTtvNZh1JgNbSVVqa1brTJVceKUdg58Wbj9PXjvT8xP9wqoVW9CWCud4w88W/yfj9RAA8YJMlJZTQGtdtIZr6HRCJS2AYcoDmIer2wMAGitnLG+36+owDBsjjT7wQ88Py633vPO1/7E99/kN59SpJxoMakdUotCAA9sWXkpPVDPlM/GGLYpG++7LbB05Rff/6KNjfGPvfdlYSge/+zxfddMffkzJ4mNxio4szZYj3TcmBuMFojafMPdb3jo2aWV8fnbj8yiW7p9v1aF9O0yoHz9uY2k03BQVsW5Xd/5BkJxXGw4rCqzwbybsWf+53u+Z9fu0/nWM/2trDm9j4oo5DcpP6FGtZNeNBOHpazSpfP51+5fnDt8203R8MqF88a4rFZlpT3hvJEobbZnQRinQZrOX9MVUQRcNLoxEnP9jTfsnN+nbDg707Za1UXNhEekFK0D0ugkayMbdsKgMbu5OvRSVLLeKseNaVHLkgrBuNGukl5V2eblRdoOmCcQhalztNaDqlw3tBWFDVuZy6sr3fndUWdGey9E1Jnb7cotWY6srcq+3FreOHF8aW7P/pmZlwI0qlohABKqpKu0Vowe/+YpEuB6f7g5yoUIWBDxNLz1xqNnz5wPWegdK3TVnemWdVUWY2sNACBl4MjCcr/Sdu/OKVVlWkvnFSXWeGsdIhKCxDvnPRAEpMxZoIjGOfSaUpqNslpwiiQMU1UPAACRoCdwtb9p+zaGysKvVJYgM85bpyklznqCnlBmvUcCznog6MnEIycV+Uz57W9q7Z7UjlgDJU3iIO7m2jlJ8wIef3r4jQf1woYQzFN0ysBoJJOQ0AawJFWuc/b0YtKcDDuJNk6IJnVe6oIK4inb2AKlqa4qXeftThpEDCjmRQHOMuIoY9e/5I5KYUInqmG14dYbU7tsnVtjgSIConVJGNPaR7wzc/BI6cdnLj3lUIeRqKR02hIgHhx63D7r894zQr3zhJJQcOpxc3kEgCwC6kApBd5ppRA944QgpTTZu/flGNCyypkWGkXSSrWVzg6zylNgAQcgaED0deuxlT2f+dlvvON1zf/5069rtJZsWDuogEQEwVvYNgxSZM557zSi8Sg8AHgLQNBT8ASRk6iJQbVr//h75uGOl+38+ANXACCKAsoJejc72waHQHFxQ4612ciL6VY66ue7WkFvIh1uDndO9qzRszsOLi7100g5iuPxmHE3021FOHfbdbsKZZ57/tKx84uXizADbhkXAEAcenTGeiDOe2p0moScY16qdTU2hG1e8adW8XzZqauyrsdxOuFtezSSe/cfZJyV9XhF2bUry8nM4cWFanXxrEiD5ZWvmqwYjzJCkVLOKHhg3hKglAHR1htrGWPeoDJeWzDGM491KXdNxAd3VrffkNx0I7vt9sgOR/mAPP7kudkj1/RZ9bkvPqpd+Ykv/tuFDTiUesJWfVxSFnue642tbI1jdE1ZAUUrokRJSxnhwcSP/shP/fT7fv5XfuPnj9569B0/9PO84VoqV9oSEQFGcdBReh2cRQQAB4DgPfptnc+xbTJIBIzidsyQ0ooQsn2eyij3ANZ5KpjzOBzkLOLf/53ftbi28c1nnzTSMIKUEM4T6z1jzDm7XabmwDMggeCMUqOVrXS/n6UTTQ1WO2tKSSjr7Gzn5fjonXf96h/8TsSN44QSsMaZ2lFBAAEJA8LAo0PnGQmZ8AjWWQ+OMhKnUV3XjDIpq1BEiMAoIYhK10kaM5EMRtl4mDGkKNB5zyhllG3XLBrvkigySgJiGHCjDSAmcVqUJSHgvd3exlFCt4vP0HujtTOWECyzohHa63fEeycbw0HeTcN2Gnhtug0xu2/WAVzY1f/Co5dAq12Tfve+qbJWlCVxnHpVICcEAkEY8bY3Ecm8WNoa/iepiAIHQhy6/uUzMxN7p+auvXjmUiMmrckJXaeyGjCUFoOFCws/+K7/3ZxKPvmvvxNtLr7tzS/+09//2FC6u978yq3B+nw3+vJnP7VrbzQx3chGozTgeVmBXf/gX/zq+/77Dz372L3OcyuNXN46vXbJ/MQr//Yrf/IT7/5/Baenr/TL3vokgwvPnTjzlfvv+a6Xv/W//PK7f+BD7/zhX8gv3/Hpj/5us9nQde1qL0CefeZ+CNMsH33gd2946UueL8yFwOJv/dyBweIjDx7rD8+OOQn7xYYFTSOCJP6O7/3DD/zyne+4Zwcdl8dPjT5zdrB4KYE0pwHNtI0IDUTgkVSycOApQZaGjDIlfRTFQZpWeWHyrNegh49Oz86EjYBPtJvaM2XUylrRTMKQ1UlMNPUTNGLUHz50ED0O80Q0JpgkPIoOTjWMlp3W5Hil5p7PdlvjYtRIk+HWoK7A+3R+z7UEp0S6R9XD4dbSxMxUd3J6sLHKRbirs9+Oh1lxMW6lraa7cPobo821IG5MdtpeGxHxN7/hdWsbHx9lLuLUUa/R/eHv/pqFtN2M7r715j/68y8M3dn//mv/qz/Kn3r01Ghx+d3/9cXLKxdYFM3MtJzCoM3jSXLpyiZGYKx3JrAOrAk2BmMSJRHxMQl4yEyRE06cIsQ2eTpBTJmPBhbM5ZXzImTMQUTJpPA7j8w8rNYIJaSR7kh7RZn1ptuEhUev3Tnon374uS+3w2jHrkllqrjhHaPZoBBCKIecxEE65VxAqUjiloXh2qVFYyri3ZX1rZ3Rznqd6IpcurwapdkMj75x/tTcDXt5Ip5/7uT83E6EqpFAqxtXxepg8yJG7UrrgKfNRoTeyqykdCS1DaKAsWBULoPDgPCkISqLYwt7dvQAvgYAQRgWWRU1Ysa5NZZQStETxDBgxrla19uXM8EZ4YQxobQF76NmWGlTZGVdE60cAnjnPILfRpOBNNrtOAg4ZVqaLM+VcdaYMAg4FXHAtTYBsoTQkgASdEBETK11Za1drcG7IBKqqgCJVpIwarWqnSbeBaEQgnunKS8mdmzuOWjbMySrycrSxH88mD/+aH72ktGltJZysA41eOcpcVdDdzxB9N7Xlapl7YEAOHs1GAdhO8AZEAkCemN0I41yjc9cJB/62PDdb41ufVHcEzZoVAQJI7GRmdIDJaPVpWR1s0bvGIKy4ME757231tpzF88T6wnZVjG899YBhW11f5uq2R4Yku0fr768ByAUnL0qn5CropC/qhghvOBF285eoow7a8B76932o9veNiSUEW6dddZa67W2iFcjjbYxJPDgwCJBQZh1zlgH2+Vp4J3z2igEpJR661/QYhCRInjBBTjHaBDFLeMJ9/ol1wU//t9+ajo+npLn6myzrGw+qte35GhM+n3Y2tKlIVlhdAXOkFFJNgq2NDKlstaRbW1rG2S6SkQhXrXYbb8mQUIAgSDZJpsQERknSIExyjihSBwCp9SCQ0aJh0DwMIzDIFLe1M4JgEgIioQaEgjOk5QSSp0TId9eC+Z7M8Y3vKEry/lLX//WzH7tqW+ctrm65aUzp04M8nVdVtox9N6A8zIyslKR4EwRWwEQ++qXXj8337u80J/aMXPswvE0gNiIcZ17h6LBZFQb1N/1jtd++l+/6ilOtKKnnz7VbUVxEI7Wy6Vxfs3hVtzk62s1r7GBcZQ1G64tNTckmpiYyPqXV1aejgUhBtuNNB9Zm7sD1+zFxQvFYGOseJ5RH3Ie48zR/f1L6zziwDHbHHk7Xv3m46997RufePqhlDcNXJluzw2Go8//+ydf9poX3/WSI1+FexeePE2XirODJ97yql6pso2VwZ6DrVf8lzd/+YsnXve66/USWTzXN7pjpe+mjTSgauD23bhr4cI5qunWckWbweJg3QZ089y5//jCx28//KJnTz3BbPOua6fHV1Z3tboJxe709Gi4Rqvun/1l8Y8ff/z7v3PqzW9Le9dyEsialgrGQGNNL85eO/WWa9/yhl9qjJZ6P/fu33nu5OUgaDIWWu+9Mx6RJQEggrNhIpw13jgaB2lKyyIrC0mRNVphnpdVqaigCEAJ8wSUd15VhCALQs6prEwYBWEotjbKypjZ6WhzMdu5Z3qQjYpRyWKOFOvKaKOcBlsakxsHtTFqLAskYB2pldZSOg+EUABw2mstW+2Jianp9c3lZrNFfRU0eZkV3JMwCipTtDtxVVPiOajaGBO2+PXXzo37di03RBaBoIS7Os+FpYlor14MvriwNXhY726e33k3n55UDrklkaUUQC4e/xSUKuXxk48e77YmvvxXT772JcbbiqVjy5oiTZRy1vtQzD7/Df9X/7BZjFq92cbWcFQ6EoowSeL1rQEXXCtV1nUkVGc6nem2nnkOAKAVB2EaydKost6sCg8bvJO87J7vO/v0I5cXr5w8dvJMo1MiveHoDYf3TItARGm6I26uLa80RXLHHXsfe+i5088/iXFMvOYBo0BUZgGwrrbAZaBDp40zRohQW4cOGo0ZTqnUssizJG4Q9MpY0JohWq2a7VRLpb0PGA1YaKgyRgJFafPt/U093uS+Qh3IrGCMKakogwolAI1EkyFvNQIgZZRySttlnbEwDGgzJGlebFhUOR3xkHIqwLmq0rKup6ZmGSptwICqipzwwHlLCaWUelDorFa6pAXD6MDOV5w7d6/XW2GSMBJEvMPAjcsxQ37D4euePLE+zuVopC2hlXFymPGoc/C2V61VkoTthsWpxp2rC0/3l1dJjTrrl0qe7dtjzy3umhQB8VrLWsowDMfZRidlLccZj4hnhSaCEM5NNxKCDza3CsHcYl87Sggnl0f6/JXRnl02nSHOSsJoU9i7EO88DPef8DQgaMF76ikFB0q72hiba2CkGELEWBn5OA0IMq09pUiQWGOLbGgVBiKkUfy3n/z6/Q88FmwtvuMdr33z3bvnDgXlaFWPJRDNqCcJsbWm4K0svacQMGkM8Z4KNjHfQY51rW//rld633g5j3bv222z/Mxz5+f27Ajj9tqFwZnHHnjjj+5/xyn7+x/83G/80h2UvrJeOBbO7757YbLdg0pLF2AA2haVJIgMXCGnj1xrKa20+F9/8tqVxx8uLhegpIgb1drmice/cuRlb2E8EUFo9Hq+tk7bQXf6mg/96Vf/8dObpRuAHk8kycFrjj5y7JFGN82KunT+uZML22tBf2td6XLfvll0rhGKQKTDbJMTNjnR9WFKOdrCUQIijK2WsqjybFCpMk6gP9bjInO+SloT2kdCdBvdqcgPNldW4qAeb46LonTMVdk4FF3GiKyVUlj7NO1MdzrNYrC4urGwur7s/Gh34CZ7nUpq6ykPw1SFF09frGoYb46Vrbc2l0ab672kEzWSPC+EYHHIT16+OKhttpWLNLAh7t05v3h5o9lq7pye2NhYak00S+u3BkWZ1bX0SmujrTLKWEcJsR6NhoWVgVR6/84prYokCbS2W0NlkV9lnz0gAb/dpYrOEyTOIfHGODRES8NDEQVhlDaLMtu+3dlGo71zhKBgYI0lnhDqQ84qbQmiB2+tcd4CEM4Z8VBLT8DLUnztoWxjDe+5I9l7iLW6JRJZj7YI5YT3ls9X999XXTiLxoeyymujjh/3+3fEBAjYASVJlDaqNbJ4KpvZw0gkwRgaoie6Lszapr2yHHLhSIRGgtV2WAxpFOWDDL3dMdkKguCbjz3x4ld2dx/aP1jeWL+4Pn70yUtnTnrivXPOA2Esy8s9B3c6OnfnG+7Oy6XNK8c82G2pEQkCEuK3ezacs9sxAg4IoZQW47qZhmEc1spYrRGwKurtD4EypIyAB0Nh16E9YTtaXF9YObNYDtwrX/PyJMIvfOpvAB1y5qyx2nmT08CtXBlHIfvaQ2snj/3Nf/3um1/6hkM0HDqheGgoc0gccdvfnvWIxFnqK0ALrrZIgHj0mbcKSICEOlsGqTt0eBLgCgDs3zMj0kZ/a8MqGyJQhq0k2hznTDRiHhyab080OcXx7S/biZWva3J038HHvv7QhcsrL3rZDYS68XJulWlPT/o6d7m859vuvLu0Dz5x7t5nTq8WwirkgBY8Qe/AW+cUQn+czTbpnfsn3vLW2wPjvAgff37h68fOB9HEvp27Du+aX95YmzsUjIZ1XpcIXJVQKbOyelEwGnpXbY5jY2uCYRhaD0p7AAhCHgZRWWXGSmPd9t211pW3mhMSdhs1uJv2zt/elj/zg7cpdipqqLK/EmifdsM3vr73zYt+5bj2bmWil1LnQjm46c6OT6Q2Fr3aXF2tF8jjj8XZcCJg681OU4Th0sKa9MRJef8D30i6rWNnHnHu0s+8792zzZ2/9xvvM55TJAgobY7bfjwA8J4xDoA8EFwwcEZrzUOChIL23loHjhBCKTrwzjmwQAhhnBGKstZBIIxjHZJ89qnPAffEEkJQSYXCWw/Ou+19hUe00lAREsY0pdH0xOvveeupZ8+i5z/6oz/yCz/7gxCF2TCnJdOe3P/ww02i9u7YV9TNpYXTNKAKpX9huM7ZvBg4Z1gQCSry0cg4a60PhaBIrTPEQyBCJNtYGzoAbRxlXAhSlgV6su0pAwAWCPSgpNTaUM4BCCJSQiihGqx1Ni9L9CgY51xkeead9c5sH64TSh1BFlJG0RsXMH/XzYcmQotkKtscNEI3NTdNPNPAVVVet7cz2WsKMKY2gnkaOtoI4kanGJGI0zCIZDEgts5EtLFRPXry8n+SihCprDXh2Eh9f+X5Pde+UpY7++sXsXaCsbwYDNVwxzX7ikr+88d/9bt/6G3zD09vLV164v6v7d65a9S/mK3ko0GdqD033PLai1eOETsOBcnH49k9e2B1q1p4/AO//tzOyXlP6LNn+3/yN3/3/g//1Zc++TW/9cREEFaBwygY9J+vZGuiO/3xD917w237+g89voOYf/vAe2fDvN3sgdda1UJwIMpKQ5xwOUmMl/3nt1Yu9cKZCOX/evfuX/+zrW+eKhgTaSxGo9J7TzidaM79xp8891t/7BHpWkUB1roTbUPBaidECBaUqR0QLhCd0w4I4cr6VqsFyplR0Q3NzKy48cadCNidTKda7cH6mESs25lQslxcX2i0p4Ybw06z1+6Fg2GfGJ2ErfXVbH73vigm3Ua0vHw5ilnWHzc7YRTRc5dXHbVAndDhxORhJFHc3lkWJncBomh022U2yrMRpUmuSmCZHl00cCUgyelnT5am35jdNdocV6NR3GhVmwszvej6w7seevoKAokTYRU9caqAABGHj3/zb9O0NdHr/sdHPnby5FpRWQaMWlGMwWm2e+fE2uV1RrQurEDWbE4nUSOvVRSIlqtNnU91J7PRQIRBP9+UyjjvdF4jD/x4WVeKWR0yN1jeiOOWB2ZqORpKxcze2dbyurlwZbRnijZjB8raqnr2oWcIaEro2nCRByoWpEDAQBFZOVCbw7Ude65XVc45gDXjwYr2BRc5Z67O6MzcUcND68zJM2eW1vq3zM9EE9Haxmjf/NRwfbi0trF7fsfW1qnlE980pOyv9r13AYmyYT6zc0c6sXf9ynPGls5ydAwcQ4O6tof3Xfv1Rz7dCxoAYm6uU2bl9iyoaxmEgTFWSWuN4YIxJIxSxlGVtdU2jmLKKGXUK4UIRlvnPGUsYAQTX0utrY4CoSpDCGk1IkaZc946zwgzSslKld4xzrfLHgWj4F2nHVvr8rICSnjAKmWMNiJgUUDySnvvq1oDgLcGiffeOASnHacUrCUWBVW9Cb13Jtm3v/Pk82v/+MWtJ55d3diwpvbee/QGwAMQ5x0S4gHBba/bHrYNwM45s90JBtu6CG5rNx6ccwTAG79dEAbMV6V7/JwbfGz007zzspgFXYlYO1Y6L6tcXjrH/+7jxbhoQIhWKgKUErRWb5NAThvCGPHgwYEHRLRWE0QCxHlHtuOBrjrL/Lca0yhlRivyQifZdjkCkKupRuSqpHKVS9p2XfmrYI5HfCH7CJBRprR03nkDzjsAT5B4sN5773EbYSVIAIBSRtBbW19tVyPbVWieUq69BwZgHYADpB6sd44YpIQHJMIx29Ei73nXTa98vRH8OTVaHslxXtBSYj1mG1tmNIbBQNaVl9q5Co1Jlvr69Joe18p5RICAo7F2m8H9FigFL4QYACAjxHnPOEMChGwnbQNnRAhGCDgAzjmiBySCUyooIAXnBKVRwDkjSZAYRMG44KQZhmkYREHIQ9JqNGulPLvKU9x00+6tNWoDkjuIuWqFvW64sXR2TOmIpvPULyGvETENA6edqk2aRkRp5wmlzbGpL2rYKqpoqrGglnvXT9JQxQ1+6+tnRoWCwMYbLFvr33v8seaOTjv1V05tHTg0s74ySlJa1mZqOlJ5hZJHNNR53mn3Ounu/uKii5IkTPqrp1vNiHkkvGkKqVaLhKYtNXnqH1aOP3BOcDJ3Tbrn1j0Xr5yvAjM55fdMp3t2zJ+Xw/W1Qg/MP/79+3/yVz60e9j510/88aE5+sg3vvT9P/idt8zLZ+977CMPfpKMk8Eld/vBI7fcdcudr5x4+pHHtupscpdvNofXHWGLSw8Nl/zd7/mx/Yfu+uiHf62xgw1GqxdPZm7dDmWbjuVkMhVQUS6fnzgihInPLWw8M9qYnpjaWKz7g/zm/e2U5Xt2BWNUp0+vN9tiFkmdBx/62Obff3L10D77xtdM3nXPrmS6EE2uMHe4BkA8zKbz8c/8wtFffe/5ugzWsjGPQKt6u/ZeVopwuu09bEaJAVdV2nLx1m/7idjm//xvfxtHIRUADATyq1hKWUHAkVBvvCXeO7Dg60pZbcCa82cXwJKdZBIS0g0aJ5+9lMQ0CHgYdfJCW+IbTZ4XBRALBIDaditYXsqAABcEAXVlmSBBEDEKm5urvU5cV6NK1ttUSLfTNNaV/VEzmspKORiOOSO9Tiuv8kkeL28uCtHYu3vy8qVzjU5iuBEi5o41CJ9p421vuZ6ZsnmNcPWGtpus0fZGyuLKR9//R8urXToVtdKYs9Zffezy6Wfr73yzmD6aZ3jRyy4Vqd2MfvO/fenzT4Zh1BNNO676TBAQKBXUeRWniQXnQDPrIFQW/GB0FehTdT0aDYyG+blp4xwSCpTltWpMzBwlMAGaRsHFFbl04eTKlUvTu44evnYvMbVwEOqa59Jqf/T6G69UuZHZaG2detlMI0DnvOc8KGRuAREJeO+tuYo2Ms05IwSsd9oZL1grbWWDzXE2ohwYY9Z4QFu7kXMSCQoeeuWazWY+yuu6QrTGGu1MTCORNIuyjtOmrI3WcqwzQhkSH4TcgfcWKSDldFQPAAzxBDwSpOBdLYvRqC9CNj2949jzT0RR5LUhBBkjHIWUlZQFeMso95wIHqPmo2qNRdxBZl3qgLU6qXGj2MVaaUrxJ9/9+vu/8o2nTo1HtefEG2t1Ofziv/7xy77rB6Trr6+udSNPvKjLqrY11arRicKm6EvXrlxeVZJSMF4bKCuH1HLOAoomoq3JiSKrSqnr2odcTE8CoZWxdr30jpCqZk+c8LccjMFIyjlQzDZHpIQXX5t+9YRkjngnAyQJh8T5mw9M7N8fDkdX2tOtxx4enFvRW0PV6sQejax12ojStLuxvmqcY4IaaiabRI6qxY28GPqlz5w+bw598gf/6B2vmnvnO7+j2+SNRpX3h5QxUMW4gICSIIpllaWTE/Wwz9OUcDFYuDQ51xvnSc4jfu0Oly9Mcq+GTxMazN4Wzu0Py/4fV8H4N/7iZbVetOO1oJcaqtpHmiiNIDOEOyKXbV41WrPZYCFJZTEYRo15VqvanukdavEozlfXPDXxTO9wfAvvcmU2jKtZaBu9rtykf/AL//HpR70yIm3QzbWi7+UzZ55pN5qA4KPQO7eyeXUaXDy/dGTfbrtVIHc6SY3ynAptjHWGgXQGGPOIVFsnla2KohiNkFJdjPR4DFJNdBKpKhIks3PtPFtzDGZmJqt81RILxEiZGztRFgW6QmajMsuCsIUiGOeVrm1ZqzhJfF1snT++KzlCq1JK48OQUDM1lRo51ty3UqFkIU1tnEGlXVXbqjj13KlLiyult7ryWPDOVLfeKrhSiDDR6YqQKqNJ7UzttR7n61s6K503gtPtYlPvgFDqtN/qZ400mOs2VF4LRoMAtPN2u8YEEdBb74x1QJAAWA9gHCACAQJotClN3YgTWUtnlEeghBBE5zyAM84x6jkjxnttnfcenScECeFIrpZUIMEgJEZrwZnRwYXL9i8W1zpt85bXzMzvZnO9OVWVSgWpmC3lculzTz14NFY8c6p+8yumajn0pKDcidCzkG+tG0qr2UOJp4O6zmqp66zxhU+Ozp7uSk+ExYAKWSlt9f9H1X9GW5qd1b34s/Ibdz75VK7qUF2dpVYrdAtFJCGQLEzOwUQLG4Nt/tdEY2wjY+41yRgDRggQGYRQaKkltTpKre7qWN2V08nn7LzfuNJzP5yS79+f9hh7jL3PO/YZa6y15jPnb4J3YUAJEoPOZKW9cu5yq2uNm+WVUHbz8gvFZINS9H7/cu8ZY73FZRYc/PI//m05vOpGg1Yk65zvF8bt3y3BewDCGKWUGmsZZc4iUF4Z4gGAMECirfEABIExhoi2dowJC8W5c49GqbreH7XU3Jve8qZWW65du+B0xb3Xubbeo0NLkUNdlJqARMvKmvzn333yxCef/+5vuve2Nx2QfopBiaD3J3+4zxsATakHcB49eEZZAM4iJc4RSpmUkdETR6r9VVBmRafXDLvNuU6yfnHbaJ8yHzTV0VsO7q5NI2fRlIySelxWRbm5vaOkWF1cbjQXNrYmSRI5w65dv776ttc3VruzyrQjmO4M3//uUw/c0zjzYv/Rp69s56JvvQHmKeFsP8PApg6evLh1515x19EDQRA2m+OxtsKUdXmtMLYxl1jtqOSKEKQAFALJOnHsHFZVpXemSRpaRAnUWu+JF1IiYbXOCfWUEM5VGKTO5daUdV2GSniHtTFvXG3f174gk7MqdcY50Q65r/PBJA1FD6dI65mkuTO718ubl+x3/4vX1O1zAJbkWXZ5VGze/ju/tV5EMx7RstK2tK0otMKPJntlfT2h9KG/+Vzv6+//xBOfUI1mg0daIxeiKnOiDTrHKHUeOaPggQtOifCehzQMU57rmSlLIEAZNdYywgihnBGkQCnx6HVtpOLeewOAzP/Pj39ECsaRM07Ro/feeu+cowBICSFAAQIhGEA3VixkJ286/vBH//J19901Ge3++E9+33wQzPJiIYxDzQ2DOw+tvubw8nd8x4/+8d9/5g//7OVpab0nXDAgnjFBGTV1LYWMVFDbuq61EEKFPJRyOh1Z7xiwUAbWeUop48RaJ6kw2pl6aq1FT6116DxjnFPqnXPOUkZVoPaHwQRIVZUIKDijFJx33nmtS8qI4EIbiwiEUYtIGOGA3Oow5EsNtbW22VhpAPHFcBa3G9UUGPdRUyzMLWtTeGHA1DpRg60hGBPTsvRDRqmzdWaN1WVTyatb/a2Cnd2u/w+pCD1zlaFIdV3Z4uq0e1mQhjPegzW+0k4LZvc21nqtufWzp0dX737wTQ/+499szgaTt7/3O/7iD393e72sZ3Ztd/Dmt7w74nd/6Zm/651YZSw1hkTEltNya5qFKrz3gddcH68lk8c77Omf++BPP/3Q4r/9D/+judDg0maXzlRSNBUdWfPZ0xcat5yblhd4sSbnWwxVPiwYMcA5R4beurJqBPLLnzp9+wrK5EhedbTdTpvmh79rYeO3z13fC6EunK4NIkESRwHFxCtZVj5UngnMdeYtRmHQbnZsWUymU+1wdWFJ+4n1pNlYmE62UVe21Ini99220unUqyuBMcACNctr75hkUgbE05LJrMop82Saj3em2cGVI7aaEMeaslUanyRsZ/typxd6pjuLQZ0V/f4oUuFgmFcBi6IDnZU3c+pmDroLzDnnaB5wsp33G51gNttJJXPVhSK7rMngWlaA43WGzTSNItFohEr0Sq2DlLzzvXc9f+7irAyraR1wOZk4C1NKDON0WmbJ1OZxhp6U5fRbv/7uxVVeWjRAm82OXwyq8UBPRgokJWDB8EAqVzdbyU6/Pxrv+brcq3KkGtApyUkACDXHaRiq2bAG2aCqy0QaUJeV+eJiU5R1ymgV08GMW+eBBNPcUBCmtgyx9rN0Pp1WYCwAr2wxkcSgy3tz7TgMrB5Eqvaghns7yDiPRMCEVWkvOmRZXZHJaHdAPD985KanXj1z88qpDpaSBZtbg8X22Vef28F8BtKJMCjrijA8eHB+Vk36/TNAdDNuN8JmXdQBawFNGkFjMpsJYSn3fja1DvGrdus0iXb3RkqINIqzahooyqmYZUVlEYAIxgEwy7I0aQjKrHOMUQACFpyx4NBZDMMwCaVhHJEqKqdZ7sCFgbJGI2LSiCx4dBhILhirS+0JFgVoY4qiBgoIqOtSUE5qUmqrS+2901ZLwcI4FEIhAcWlsTXl4JlhEuZC0gi4892//czOp7+w9/JZbaaMek/BObCOIAB4QEoZesKAAAUCxBOHHigBZJRRAoDOImUMCHjryFdb4RH9vpGnESeVqbI8A8cursN/+oOro2HjW95/uNWmWipn6flzsz/6aP/FCwKkYgoBa0lBV0jtPj/IUwoevLeeCX6DWn0jIbb/hHDDpHwjfHXjxaMDSnAfUrSPffR4w4VEcF93R3D7RigmJGUUkHkD/x/nBxAIciY87lOAPBCP3qPfBw7t24qIR0TvCYAh+kZzG+73LhDKqEck3sMN+YwwxvbjcUwpBlxGkTV8dWnhQz/zzhPHN3T+ZDYuocw917o2W1vZ3q4ZZjCbgNNAECYZ7A7Jpa1yc4K1o+D3g8/EO4cIjHN0QBkFgP0ys/2+OkpvHLYIJYwQIMgEI4RwAkJwwgihhAvGOUVCCYAQ1FPGiGSUOsIcAhLCGUOChS7jgDEmOHemqkZO1xYJvSEVPfels0dvvr0xN5+PJ1uDvZvvPJx29QuPv/D0w9fnb8J8lFPiaqtLS9oNZhnpF2VkSLY3CSO47WtuPXDvSnclOLDSG+9mLBU8pXWZywD0NM7y4sBNh4lbeeR/PXPq7mNvfe/Nz/zNV770ZH+i9clDK9deuIIpFluOv3Ktd0undEVOJsCv102DPHBkTGCEWoVslQeNSTmsrD969M4nf/+5sw+tjbebNaXrF93k+vrBO9POIttdH+hAbRWbq4cO33LHktudfO7Dnz/9+Q+z1pFbbzk6PPfQqbnwxU99zE11YKSEY/e/5a3TLX38njteuX7tySeuVVXEoxRnavO5Ub62Oyqu6h29vGzOjc0j//BYq0PSuXD51EJ/q68L3YrTN7/h3fn1K+Rwnel165wBc6U/4VBTEgubN2Tc7/eLyTOthvihfzaX7ZXDPbq14S1tbK27ixdmv/XK6C8/2j95nLznG4+ceo9opDSHPYQQ4Np9r190O09H7cPtVlya0guhQuk8KMIoo1Yb731Z11EoLUHmyYvPfqa/ebXdjqvSIDrqGReojWUUwlh6TxgFV1ToklC1KNNcsAMHl3Xpjxw7dvLmu37jd//HylI61XVnsdFrJHubEwTodqKd/nY37t158ugrF8+madzv9zfW94xxnFOrPSJGabi0uqCULPPKTnNjWVFnaRRrYwHdeDLL3PSBe9945pWLgaDWOC5ZZQzh5OLaVn9vevPNK6Ora52An7x5dbw5WT14y4svn7np9vkf/oUfc+2EwXscPJdNN4I2dVBTm3m9t7btKojOnR7cdNvKpJievp4vLsz/xm/1/9MbguYCNaMZIwv/7Zef/MKzsaEtIsVgbxKqcHElAc431vaqyloZACNRtPDbv/prH/rN/+vq7vpe/0Yb5uLRo3lWa105sAwZIYJyWRSVMVSCWm23RsPhaxYk5V4I8tjzT3zuzHNf8+bXLS21J3v943fddPrqZVCMWREFcTkUnDNd5s45pRQgoZQHSulaezSEIgAWupJUEMKkYM5jGKeV89ZWYRgi49742joqGKPMaEsIV7HSVY3Ga107qESkCPHoTWexm48nnEipGNEeyxqIM8QlcQeAZjNNgFKlGs32YHsD0SVxiwIvjBacxHGU10V3btVPJ1fPf/nwcm+YZ5UF51EgEM4J4xzBIXhBKp1Johqt1my61l2YG+4VADof7UXzi+gd1rYRB/luf36x+9qbVy5eKWrKZMDLHPLCWb053rxw8OCth4+3eJGxjG2cLTn1xDg/McAwiFs0EqEIJZBGzM6evdTtzqsg8uhrrYl2AoNmHHiPXCriPSM8mJp2YKczXYDzlH/qK3lg8x/+piQJKykDrGE+5qxGBsR7xyhJQ9Xphi4rIuE+8PbDhAVxG7/1gYW//sToDz69U+YGCNHaI1RZsemtt95bZ4wvCYN8mhMClPJUsD/92D8wufDfP7732x//n5E3tx/tHVmJvvkb39ULq6QZL91xL9R5EMwYUBXocjpUi3PthV4y1/BY2zl99amHVg++IWCvr4N+545jZf8suDmWxslkWmnBAyBCGZsT5oguwDliUYqWrc6b8RURH04bLbQSi12ws7A5511ZDndo7SdXL8dxwg6cgmGha9081MbZ3mA06B289amHL3/+fBiunBrg+cnkimBimtUxUkTQWc1j5ZzdG924JB8+esQ7bZwJQlnNcocI1CLlWTlr+pZQgiApixw4LWrj65xLT4invp7u9S2PpKSRJMa4XqdJApnGjenueuYKIumc6ACPJlNrwBTTEcGSMjLem5mCNA+GVe65Z1hp5/M4Znx2lcwMr2lpmUebxMmkdv3BoAYQgq1fu5wmLZEkAKaG6U491WgJZ5bbKJDFpJrpaRiKYzcfyHTOasYpT2SwOe7v7u2VRUY4eI3GeUopOre/1QKnzpO1rZEA0m4ljELlbF2h9qARkVBKGXjLGHoE5z2jYBEoIYjOA/XaeOa0Y3GS5PnYGY8IhO2fNYBR8Oi1M1IIhwQpICEOgSuB1u5boh0l1jiKAFyoIDDW5ywuvPjI5ybHVss7jya7m6TUdQ2jtb2yRqhrC54i0JfP249/avqONzdVNEbhg4SkRlQMkOFgc0sIIxTd7dPTp+WXng01RBZyW1tBXKhwNMtFJBnlzkNl4K47jr2ylq9dvaSnFRVCKJZXQ8oMOHDGAyIY40AjIysri2eeOpM0WVmLybASUgjKy4w4h9bY/fb6/VEgo0xyKYVKO4sMyMbGdtoMvS0ER+ccek8pRfAIREpRaHvl4ktpI6osXb5n5a77Vs9v7PSLEXBiK7vvHLfWUgplVSnBnHdFZSvGedx+ZVv8h9959sDHn/vA15647/XHGl3vmWaiQGKJYgQJ6IpAzrmw1qB1nlBGA8AKzYywSIZf7T8DuOX44eFUrywd6nQDxWS7tzTe2p2Mso7oHLnj8JWLF48cWZ1mFZpainR5keqqPnBk6fJnnp55Uwbp6+46seaGjQAnVh973VsmG1u+nMWpFcSs9OZP3jR3bat64umLlwZmXFkNjDDqEBGoo9Hv/+2TrbqMOFYWAhLU1mzZ6bUre2EUzfVaq8vzc71WnMSFrqeVrW05neRVVRMpitqQIIwFHw6G1npKZaJSS/NKO0I4pxLAU0op0FCFSvHhtLrvvgfra5+96+1LGBkHAtDxNMKZg5JhNjvYxkS4l0s6quNxMX3PYU8Wr1mzaUZWTqBt24+/Snk0F/XCnc2pCFSrs/DAfXf++V/8edps0XZ68u3vuO/e1U/+4Z9zT6Zb/TARzAMQD14jaKTgHRBKCeXOIAhaGX/r8dve8eA7OceHPv8Pz5/+ShhxdMgIp4w57ygSzoWgvKwLzrkznlAilUDv41ZTa0RrgCDjlBLiEQnsF2GAd+gcEkqsM9vDnXe95/73vvdrbz586uylNV1uPfCam69c2oiM/bpv+L5HP/0p73aS8fjOZvdvfu9fvfDKbivUFcZCRZUuXO4AEMAzJoCxWZmhM1EYaW0QoTaGAkguAYl3Fj044wgThBAhJBJA6ylhRDEAUIwa7ayGfRa+DJWUvCpy2AekMoreU0Dv9vmshAqGHiutAYkQ3DkLhDUVnWsGvU5sremEUjHcvj46tNg5MT8nONbDUeEIq0lpXIVAeTybVHldAcTNVhpG4WQ0FGlAnK0Lw5OGkeHTL7x6zauRjP4PqYhzQZggwgFxDGF359p7vv/H/up/XHd62u0mrdXWcPNKrFR3sTcbb3/k9//6G775m+LmyiTLLr98kRvpwcW9yFnzhS9/8dHTj/32b3X+/o8/ymqRFYim8F6jVMOKv7TT/YH/6yd2svPnrg62C7t48PWHb3rI4GjSH0hZxqmfljqdW85q+tE/e5gLuXR0fjDcNp6rMCDAgQbUI5dIUPbHew88+Jq9/gu8JIyppN3j4d6xW6Yf/LHkQ7+uJ5lGwaRH6wwSWyMqLrnQSL2rDWEuSYIqL2ejPesqFQUrneXBcI9SDGU47G+DHbTawa13HDx2YkHQqXew15+FqsnAzC0s+ph5NLs7e7qcNRqHDx68dePyS7VxIprf26sj1fNeHFg++vLmtXaayE5kkwKwqmqzt7M7HFRpuyOY9AWTScoIcdYyjxSRoTHEFvmIkrrMtneuX1DCiLAR9VIziVrNA5vXZkHnKPKWq8bdxiKVcTVcKyY7rRB//Pvu/dzn1589Py615YEigEIwgwjeJ604n+V1aWWRH5431y5vHzxxWDheTKeWxGGjk0D07Jmnl48eIUypRqMarBldU8UYOsYIc3ZpaSkrJpyQkvGq1pLLOIwZD6ynzTTkSNBUgeSrR1owyqYD8/oTtwQbYz0ezGaD6Vax1O4QJ1DyYWZ9FXKqNvtbC/M30SBkloI2UhKOk7os1qabTDWIShntEaSA8vrO9PgxzXBy7sq52WwSBITx5PLkwjcsvbbeO//SM7OA8aQlkVWLB9PnT1+vTK5i2Wgs+brAWpdouKC1m8YkJVxU1lg3CVU46u+GcafXW+nbkTd2Mpvtr4LRcMoIAY+zYhZEMg7DvdFEO0eBxFHsjfXogijSxhRVqY11HpMgRnQhl4hCRkGWV3VtlBDovPd2XxMJpEKAQPHBdKSCQNe1QQ+SUw5CMOttWZdACaEEPIRxTLx3Hmd5Thl11lS2lkGMSNAzBDQEqBKNJnQb9eICj0x+ca3/X//83NoOn/brgIHF0u9bXzxQwhAIhxsN8bhfDM8YQQoEPID3yPYBzowi4D7U2e+3xdyQb6izFikPhEI3owyRyfW+/5WPZB9//OrJw53dYd6fFmujfG8oGCaMQSvxp07B0rzsb8jTL88GI2oqlEJQ4pHBPld3v52MAHjw+64Zj4CABAkQAniDa+3tvgxEYZ+zjTcS71+VUXD/OQGQUcb3RS9CCdnXlhAQCEHOmEcHDgAIott3IeG+BoP73/DV2jfcP68gQQoECSH7xnEEtN55v28+9x6BWoJApBeCE6zyD7zz1Ad/4l3KPqbttuJeM2UF29oe7vX92roZjSCzdJaR7V23N6YbIxhXxBoApLAflNt/fs6AEiBESMYFpYQiImWEEvAWgRBGKedMCEYIMk6BEvRIESlnSEAqsW9XpQSEYN00HeY5Amk2YimoRkTKGWWMkYAF1vNh7iZV4Z0TQoBDSm/Ic3MrCxuXt8584okA8mtXrqmIZ7OxVM20ScsJ7g1qbSkQVqO+5Y7mnW89Wsrm809deNODdydzwsUuassin43MVkFLZ0SxmQUMghoMBMbp4VZN84wAsk547tL6Zx955tDC8e6yfMM7bp7x2dpwquYIa4qNva3b7lvY3tqLGkHIVg3KJNjdHZ33OKuzlkMKTDRI8tLHTp/5+AuJS2hKR7kP4mCwi/IsC00UuJJFrIZgKEBjdnhp6Zt+9G0vXnrqtgcP3vWmNz924SGj81C1X9wc/cDP/MrzFwuSto4dbxTlHo3jhVvekBmZqHrr3Llmb/7oa+9tLy0/9bGHm0fvLW3nZ//LL33qLz588cq5B4/csf2FC4ea8S2rBxe48MureX9tb4NmJleHRXuhGSqXob37dXfMx6Yc9md5HjW4bEpB7VyTp0sAZf6+99+/tTXcvjQdDKfXhuUv/tZa+pfZN33zode8pdebTyTAbJM2W629usJQaJM7JNZ4oIQyBs5Q8LWpJQ/AIEeK4Mbbm4IJKYXOjACutbdonAUQTFBmjZMB9yC/53t/vtmb/48/91NLq0yXNh9WnduTF1/+SncxMkbrrJCMg5VCkL3xZodGiQp0UVy8fJYTM9idaFvLiLkalQqkpGVt273ukQOL5145n9daOGImttlurs4vXttaV0FotU1kb219UtaASART3bQ1GeadhXZeZUGsNnY3v/W9X//IU58bjozj4c4oP36T+Tf/z3fOXIHAwHFGaZyWFE4b0IQr0hJ3vO1Nj3y67xhaJrPZpDnffGW9Ptg58JHfbh1eEZ99bOOx818hvsu7rWqwR6qq21EOyGiceyTOeEJIsxk7xjYmg5/+b/+B1bo71zm6TM9tDwAg7qRB3JztDkbDPuXEOg3e1CU2mkGj03V7RZ3nwng/rg4eir7l7t5L1wZrL3xx2FjstltJ0H3g/lMvv/DlClQYoLOGU+G9J4Rxxp23eTWVqmt8NStGneYcZdIiQcoiFWajoZQS0UrOi8FUSIEUVZgUxsyyWSdtEMLrqiBKMC60nxSl5UI5bykV02LWEi1GRFnPgKmiKKUQCB6AWnSMURkIq51BNhhPjUVGOTLmkVJGnffToiQidIU/uHDs2S99/k0PvqWalY4FlCsHaHVFARihzhmOop10wdnJeN2ZvOSB4l10ptU7aLwDgiqIJv3NVLrp3tXdrV1TOQO8GFXNkKS9cFiaa889+eC9D9TTnVde/sLu5dN3zovv+p63PfL3z55by8qqPnqkEwR0sjcmDsZFddOp26azUirZabQ3dzcXuulenueFdQZVIDyllWdxGAAlDsmFUR1E0tThw2frF//v7NYF0WzjsBD9Xbc20iIQ4LWz6A1ubk+NZ1ef7XcWrvzABxZYfj2g8nu/6YA25UcfnpZc8ljVVQWeMiDNJPLOGc8CxitGGKNVbsIQFWdXL43TOFaBKLV5deSe70/+6pmPgq3bSf26O19m1VTXo+/70X/aIURRsermoiSy6xmOxMGltwBPYdp56fTVe29/sNjoZ3tDDhiKmlaVEDOdj6NWWlMHvrKuFjwRKqp1yYLFcKFlkHLiLSSidbNl6MyEESWS1Ihy5c23nv6Tpz/ys3+8QQ8tHpU/8N1vv/bKq9jin/6NT3zucXv7ez/0nd/8rn/3I+8jxEnFKSEikLWxtdHCCSSgEgE1AICzpjTmytb27bcdZ5JPJkWQKKq4pIknQBgd9fdcXVjU05kTks21g/Gon5us0e6MK+y2lhTDwvIzz51WaavZmK9G6+25uBEnVjsCXErX6nSy6bozdW1n3gTbO/lenddFHpg8pvTS5sWVTjgZZUw1GWPCKVSStRJr5MDuUYbS6elwUI93imy8MxhsbO5ZQmmcemMJhXa7VZRaxVGj3eBRGkpZT8u6Mru72e6wPy2ztJuYoWHWIvMAgB4JAlpLEBil6HBnmIVRHEiVBkZnGUWghDgg3nvYNyg4R+lXLdiA6BCpJ4RSQG1qqQIhlbcVIlqHFFEwyijxCJRS54AR6ok3xhNGjXGM0kAoaw0lhAmGgLo2pHZCCEeZdfL+O+/++q85+fTnP7+9Mx7WZJRNKgcs4KTQwImS8aSgn3gW54/MHekZJqsgQsJF0Jae08IJZ4J80Pnox/aeeYm4umMp8c4SQkvnv+HNX3/bnff+9h//Wj6Zdbup4Gp9cxiJAMzMeaeNz6qqrPJa194jIRQoUsYIkvNnXtq6PvLTkVJNpCpp+tEYlUfOPacoKKGU1ZZYaxG9qQ31PI0aSXoAvDtx6/Josl1MnEOHnngkxCMFgh6dMc1EGe3avWS4UU3Xhpeefu7Zl14ZDLYRPCDWWlNKGaeE7reLeOIRgIShqIGWBZULN1+YVR/6i91jX9I3zUc33dq77aaot9BQSqGegK8BpzIBLzqEKgBG+D4AO/aoCBhGb4yQmbG9MCBFiaHvtlrz8835yJks2lgf56yQKW0tHOjc1Hzmi08+8KYHrp89e2XtbDLfkLFokWDczyez4tbb7zWUTnV0fQfbck5yFAF3zqLzrXm5dEzdferghUuDz33hhXN7dY6itOCcXexEjRBSwNecXL56daORJJfXJmtjbcOwcPX65u7lzb3S+xPL3UOrq3OLc4rTaC6xHgtt/CAf7068KQnlgoO3pKxLa+sgjMDrotIAqHXOCOWMVaVupj27u5vIWXToqGukpkCqAuOQ0ZC2aOdQuRjBfLcbPTt95bq79e3d935gp67O6xyqNWi41/3Ur1w/i2nZjUXaNW7D57Cnt//xs7tB0EDtHn/40bve+MAf/uanSVGmbTnJjdVAwGo9AwTKmLMeEAiCB6ekCJSglly69OLm5hlvyllRqpgTJN55ITgjgB68d95T45EAk0IWZeG9t8w324mzzlnjGK3qWgoJzlNKhOSVseQG3YEAoURQyvlLZzc+99i/7ywtLiwdvz4tp1vbFMlcZ+EvPvHnJzvs/e861GJXMX7uxN3h8qmFjW31mad3L2zX9bRWUiFBZ42UihFuTCUFryuNAMCAcqoLD84HQeCs9x6VkrCPdfeeICISKSR6dN55RILOOUcZFUJIKQh4Sul+vSIlxDm0FrWxUirKRKVrJTAKqLcmYW6xp+ZaUbshW6mKGzFhXO+OY06WVnutQDabdNrfOXD4IAm6165cn/WHImw25jpMEUoNY5FBJKaWYULBJ02Fnjx6+twzW/l67ie5A+P+D6koDMNKG1M7ilKl0ujJ3k4/6hzZu/bs2pULt772jkyqaT4Znz/XWjkyGci5Q3ca8ndlpV945cvf/MGf/b0//I9NsJzwmpjv/2ff/aYHXiNYsIf54btfs/XSiwxzpFZAtfniF//4Ny+/+a4V1zc/8gP//tbb7vu6n/yB/+ff/Ls5ycOQ1FWBGurJVBhlNLKEGE7sxOmqkEmDsohSoesJRcMZC1tsZ+dSM3EiBovFdDpzvKxMfmShFfBiaE0Yy2pWSkl1XRlLDPNFaYOUWmeNQ1PUjGBlKiGkdzjqbxntg0SNs4Fk8I4HjkQRzM03gjg3uhIMFU+KvBiORjHPQ9kgggbKM594J85fzjrJIVfjQnf1+rVXrJ4xJjN1/ZbVBMxQBvnxo+1XXzq/vZc7p4K4ObFULtxybPGYGY7s7HJ7bnFcl3VR1YMdINNJdh5xZmzJmOUcSJ1VlHIhXO3TVo8JxfxsefWWWWm0JlhTJULP7epS9CPf//qPPfTSJx+5rA1WGrlFqQKr/c7ukHiLlb377iPtnhJqGS2pRmM305DMU+bycu/wwXlmSpfnLnRUQD6ZqKAtomZdAGUda2KirQx82AooJ0REptboTBA1HXrBxM5epVSSV2NCLUb+ev+MZGTpeHO0RZulWFhQs5JkPu42O2EotNs9cvhAmQ0aPEdwpSnBNHCWew/WMaWS9Z1hGqnYx8gbtxxfCcnWaHTVTq9PhnvL3UN7Qxp2myur81v581HS1EIJRut6NlnbbrRaJMcaaZV5hi4IIxV4yllVV87UtraM+WZrLopYPs2drWdZnWknvQN/YxUIIYCQsqiIoL72s9l0WukkUOCgqg1awxjjwGZFpq23FoXgxlpbm7QZa2O8tc12czIZCeBpI+KMqijIinKUZVVlEA0BUsxqbzwlNG5FADCc1t55ySkCmtpEcSgoq+sKBbOmJgbQ2l63SxwIKpQUSGqpfK9HV5bhxHIQhQXFydboUhTk1DclJXlRoDOcUrfPUQTyVZEIb1SBkf1Tzg2MPyGA+FW2EiBlzO+nnfaVA0qcAyaYjGJiNGX7QS1HkJlcfOVl98yLu4RQKTiTCQDhxDMCb7h/4Z1vMw1usOT3n6LPnaefeWKYZ5QDRfB039bkkTKK1gMjSAh4xBuYnhsBNKAA/n+Tq/f72Ij3niAABUrpvlF5n3RN2Y0P7mfdAdwNLjUgJZRyiYSgcx5uwJP2qyj3rUvocb8h+3+rRbgf+PL7FWPgb/x8jhCgjO5H3blQTChHPOXZ9337Az/+XacAv6TLi2VdGOtnudneqtb33NqmHvfJeEav7PphzvoDqDSt3T5byTFGAOi+TkcJ3TfHBpHaB28LzgghHj2nFNELKfejfIwTtMgYFQGj+0VvhBBGmeAAhHEMFROEGus45YHksWSeYMxomiaAnjEmZeBq74ivrWMq5JybsnD+hg+rP86ng6ol/Nxir90Kzr3wKjrf7SQWg82tmaBCxYwHKmjQO1/fK/XW8h2rh+59PZWwvTOZjYct0gLPGBDOBAUnBEdHrEWNRXspqLV1VjXS+LGHXljuNk4cvm+6tdtkZHf92lvfdduf/uVnFcQqAwN07+x672CPGJ/t7aowCptht3NYKcVld/PKldXb7tSvjK4+9OKtq+1yjNMKq9qGCsoKXnluRpEcOygXOuLEnauvXNf9/thW+rb77irXps8/8Ugat+eidOV1R5+7Mvj6737f2sZ2s7UEjl66tP3qs5/oLOP5V4s3vPMb0yQZN8IgiaXolFnr9je+S2eYiPb8yYPvTX/Q8+LcKxfvfE330E1zzVGxd/qlVq9ztHvi2vVJpxs3OgHtZ8TvHOl4vnvBt4/e/4bv3d0ezkgpF4RpjSm1NLBB6fuzdTU3vWNVNjtzjaR15lz2yU8MP/Sr6/d9bufe+7bvf9O9a0+WBeuApES6uB1bL3yZ2doAdRSd84YomSbtbDwmHj3xzlkeiDwzwIj3EEhF0CN6ROodMklM7Q8fP/LwYx/5uq9557/75Z/6vT/9XdGbiyF79ImnVEsePDS3szlQmld5vlGXtpoi+rz2SRhmqKuyXFqZq3YM0QaQNoKoymrOYaEV07p+9ekXZahuOXzgzMXrkoppVm9A3ztaGdKMOp1mZ5bPGOdVZRT1Rpfj2ajVm8+zutSOY/3IV75cM351Z295dXU022jmzwBcp0wiNDhrIazk0wth+BWvByqe+9Rnr3z8b68o7M3F7OWXNvKqEoYbqoZZ+NSFnNQzSZ1qpQYo6uKee143naxNB9uzwiKj1lmuWBKGu5MqiGUziq9evrq4ENnSN6J4fxU89PFPrs4tLiwsrp44rgQfbm/lVc2kq8tyamYJ8magqDEQ9Eqrs3zrnlMH9zJ6baPkUTw8cybCOhts9A6dzLOpVNx5o8KQAqtMTZlqJC3ia0V8TTkYY40VMi1M5UASVHWtqbDNzlxpndOWEOq896boph1rPSU+DGKGHMBzLnVRNXqLeTFBwlPZE6gtmjBO49bcxt51JiUHSVhotMvrLG00ecBp2PGmUKKJ3Fc6p8gpDbJZzhXzlBJr6jI/duvN02zUm2tNJpWxhCIa4xwSYN6jpphIElkCBiFpxLo0XEQiCIDJbDLk3CXNllC6LrbGs8Gsv318ucln/vrQVZZYW6WhKvprf/Df/k3ATUsMfvTbv2WV135nukCYSVj31Im9/rTXTrqLi7NpMd20FNCh8MivrV3z3mUErKeVBs4E5ZZQa8tCUGy3I8lJrt32rGZUoBF7I1FmclYBEsa8p4EXWFMGhtBca+08ZQRRfeQTa9/8zpOnbsadjcFkuv5t/3Th4k790IuWEkKlqEvtmSgqG4eq021vrG0maZTnOkrCq5e3rNFSsVmZO1Dewaw2gNiIo6TbpIiPPp0x5FndeeSfP6KwDAT0Oo81JZ46lDDmvvO737m1MfrEo+t/+I/Pvv624Pf/268x3w5DJOSaanuuvM8ojSirZgRLKXk5HvEAUITAWshD4qmvt7nbQ9FhvKWLGrSRIbOMlE4+f1G+cDmav6v76a+88od//zOS+CiWUvJuY+Vtd6y40flf+c+/+P/78ffVXlJCBFfeVkknmVW2qormjTJMiIMmF7bT6eWZtbykMkFGpGpKzKrJrLYYUB5HnY2NNV57LHBcFqMsm2qXW8M4n1VZYe3MKCmja6+eDZOdY/PNYrLLUqyMDwO62AnH06tSWUPdytzBjSsjysz27mar1WaWW1/cdPQQ90NFGs1OMN0ZUdR5YVnqyplmHiVxsfRzTaWn/Znho1m1szlZOnFnapBClRf57u6oqO3C4tLRmw5JRECIQ7E1HV9Z29za2WIM8soBUiTgPUH0zvv9CZQQHBGsxfGk3IvGRw8ut6TI84LWBAlYbREBvecMQFAEApQ67ymApzfiaR68rjUgMC6AGfAAAB6IJ8wAUkoNAgF0ziGAYNQ6Z9E7xjwCAyTAkHjOGDi01jsCi0ttS3yj1V7frO5+w/t6B9c/8anHqqKiILWzzlNCiHWGELqxC//zz65801uWbrvJmXyg0rCVRoaqrBJf+vLuCy9ONrYTNFibosaSUsIod1z8w1ce/dyLXwYWy0SBDGdFBWUlYp6XYyVDzqmeZnWWm1pTRvfnet4DY7TKp0cW+dzSHWmXl8y+9oE7n3j8+ecee1I7A3TfMP3/HeFUEgke8CBcObRIvVnf7Kswmg7BmX1bN6JzyAkCMc53k7CZxHGUkJSOdrZefu5F67OiGAUCiOC0IoQxxqi1DhGcR28t49xoI6yLGl3KmOIti+zchey5p67xT/uElLedOP6W+x9sNXoXL36JB1vHjrK7X9eJWp5xD9ZQoJ4E4BG9YV+1Fe3ubH77d389t56n6bUrw/7ebK4ZOFoKQe69586dzfXp1ubOpY3dV8/Z277m5N2vp0FKia9KMs1nxrqNzS2q/NXnzn3rj/0yymOz/gtp6nM9no4rGieLd9w3zTYoTW9uLreT+IWL61+6uJdbAFf3AlzsiPmOuuvW5Objh4Ioen3WefnC7qXr042BGcwY8KCw8tra4PzFbSAQKp6GUaRCpcK41W4nKXifZZm1znpvHQoWOg+SiSiS3lkClFBqTU05O3ToZMOdf9PrOzypgAjwSJWivvLFkIb8wJ3N6d5UW33Hkeb5cztLB1gjrckAyol66Znwf352+MIl1edDqki9PY5V8u63fhvz+vEXPjM1QxEpGslmsroze/jIUicbTNvtpK5KIEiAaK0ZQQRgjAICEIaAtbYe0ThblY5xBx65VNQjDRVBoIQBQ+ussYYQqmTgCRGBRO+M0VVROuOqoo7SUBGF3gIlhND9ABp4QO8554wR5703uHlt+9u/7d2Pnn7xX/3kT/7Gr/7G2bMv1s5RSZsNm9KNpaBK26TdU0zIpPaHl5uHFtr/8Nj6U2fKaQbWASK11lmwlECtLWEkjAIkiM5KJRjjjHEglu2HAygFROus4MrhjRJFY7VUighhnVeBClWo69LoGjwSCkYbSikTDAA4p0JQY2olMVWYxnx1oX1oLpxLpRA+boejYZHPZty6hoCImE4iimKgaGfp9psMiFENWRJNd4YtwthUh0nc6raDqDUdT70p4oBFDT7c3ixU9eL10ZVtqzlFZyWF/0MqMk4EQavSI2S10abO9fDKmQcfePDj114EqtcvXjYVamOQlWY2TWjwF3/8O8rj4tGFV15+9VCyuBDchPUFNCUDuPbsC2cee7wVBieO3vni+auJ94QRyiFkTHqz9+orm4tz977+9RdfOfPkJx9aWg7uuOnui6cfUalkAjmCq3KOxntBSjItSyFk1GRVXSEBZmpnyyBU3uh2J446TSenvO2tN8VkGkRKTxoXzofb26Vz4KBCa4wHsERQYSfFD37T8gPv6D33zPqZi9kzp2cO2cLinC3r8WgKIgCPeZYtLyV33rp46mZVlFPEzGlRTUyv203TOYSJl9I4U85GjuaHjx4sRqnNGYkaBAvG/GCykTaiVtDMqyqJ6Wi0s7c3SNstDf1sUIatpDKMzepOd3k688RqD/3Z6BnBD1hoI2tGcToa7i0fWN7cPr+zdr2zcCthikrngAE6V+eRDISiRoPWubFx0l1RPHZ6d5INdMGUyN719mNvf+Del15cf+SJV8eFVnEwmxpK7ZGVZrsZv+0dtxLcJYSBd5RyQBEKQTzUueVBSJEV9SDuBpQAYC4kH+U7nLNmY5F63owlcQOmbFmXxqG1Bn2dZT4MUudqMDaNF1Y7wbULrzTSJKsL4kUxHqqAChVkeqqSdJYXytR6UiuFWeaHZTXOPJOu02rWFZ/mZWWrUAhGJ8sLTZfraZ9mfvtAYrVev3DhzObWqK7JqcNHnj/36tG55s7uc1FKk07gEfv9AWuJ8bSO09Q5JoWLpQOfLPQOb+2eU4mMkgbjEQgjeQKohqMplYFkoSl0sxGjNog39gTBqfUYhpIHnFFWWp1IyQXZl34BwVlT6hodKskYB2ss5SJNE7TWVGaWTXyRM8IdcVVtGKPOA2NMMGapYVSUVV1WtVRCCWFM7QHKqiIA03ERRgFntCpzCsAZAULSOHDagQwQFOOccKuUXVrQJ29OVnsuTXUaeusr4MW73iZuPdD9td9ce/JZVhtKuKAUvEcgxDlPCd23BwUqQgDnLQBx3lNKAMHjfqvHDX4QIcAQnEPCyP7RhgJhQHxVGa0JIRTJfjclIegdSsUYB28cIqGeKQr3nWy//22LzeAri10VKn38SHbPXezEIfY7f6OzHcGYYBSt1YQxdB4YYYx6j/v2p32sHaEEyQ1sHKMEnf9qdT3uU6g5Z4IJJrh1dl/nAgKUEL/ftUEooUAZAYJ0P6sF6L3d71ZAcDc65insl9Lf6Oa4kVXz+38CkOzzgtB7xBtdYwDorWOUcc6tsSqMJCP/5N1v/okf/CfjnUd2t6+moTeeUgtXr+Y7O+bKmt4bsbVN3Br5zQloh2iBgkNACvsBc3AIUSAdcVQI44ESEIILxggjQkrOqAr4vrKHlAKiB1CSKEadQ8YYOM84pVLUzqtQMe/rykghJeeK89i4JA7DUNWm5lxEIkawCIQSyaW1xLqQZ9qXdZ0KmUY3GtC6S2mEuHDiwJNPfunaxY1y6mrk04GW0kcidKL2nItmPJjsbF5PF06ks701q1uEUy7V0vw8QY+EWY0qFFSIgMRl7hhUVTGtJ7Wr9Zc+9vgtK/dr5uuhvbC3NRezm+9cdFIPsf/133Lv1VfW1q6MeUzVQjy5Oli99bBmKGgwGZY2g4A5ArOebGdPXn3+D093bevQwcaAVLf3OrPa1I4SwV4+NymH+tykXr/qy71hmcTW2v5wXR9v3P/WW/Y2skvX925t3T4dnZ6s7+TRlpxbEE3EShez6fLC6jC//PIzzzfa7cWVw9Q3HI8tpKP1shHVly9d4NEiTppKcM3UTbecgmp25ADb+MozSk5Wjhz/8vNrst2akEmKcGCp9cgTa+860brn+JuvvboawgNO9xOSDS++EsRWNux0dIGkYfNuyVEUW4OL481mGXNK3nXq+Dwk052dv//T/MO/+3mdhZy3NDV15ihnhAXGWMZo3GiPx1uMsVAIa2ugKJSYZUUQSZWIwbhqy4hRTQgAMCFoZ77dH4yzXP/af/jvf/uXf7U5PPcXf/5b/+Lf/Eo9s1uTa6GEw7csnbt4LS+mutRCoAenrWUCeo1WXddVPdN1nch4e2vDloYy4IR57aipk0iuNJVA5gNaWnf9+qZiNJQ0ToO6qAjIvCBCAMnGO5sDLkkcqyTgWtetXrw3GHgApkjcDHJbMxpYU2aDfNq//CPfdb/ThkmKkHvPqmJFNn7AwCsaLj7y4d//xZ/ZWVg6uTGeIEMIRDdKXO2sJcBJpBQBRNSW+mqWv/+BD/z4v/zp7/nn30xl/MYH7/6rv/tsEgTMmVxXQgrnbBQnVeWzaT0aV4Lc2AuqvH764lecQar40kpvpb0QR+Hc8txsME2V8jZvdJM0SccmikUtfdwfm0sXR712+9Tx9Or6JMunUcjyugziQCpVZLOitEGoLAOm4qLIha2szeMwsdYTLpDYNGyAsUBMqxFqa+tKl6YUhDLKPFQq4M5pRLDeJmFqnK+qPIxjZFCbWoWR1RoQjS4AiLWm0pNW2gRjyjxn0gsehSKwVosgNraOgng4vRpHIaeECWask6GikhqtidX99WtJu1n6nDYSrW2tTRxxKWWljVIx0R7BFmUfgigrJq1onocqq3IgCUGQYcNjRYMISFTMXBi0br/74NKYqOvjpBE4Itavz6q8YoyROp+Mit/8/b98480H/uhDv2J319/01ls//fjp2+8+Oh5keWajCJMgDVZiC1o1kp3dtUantbC6dGlj2lk4eTyZP//cZ4wbyNAvJq3hKKsMRIrONbhB259VjLNYel8XjZibWvd60c648oRU2gFlBIAzAGoIodaJH/j5T3/8l04uHo5oVPTXN25q148jzAyLAiSSSyWqqh5N9HA8kZwXRcUo5YL52ksllOSUeEIpp0RyonXlqO6PTBIGlPJGIEhlpnlJJdXGI4gsZvW23BgUH/r2P+XUtZK40WtdGZb3f8s/byv469/5rwekBuacn3mS18WU8pqQ0nkaNKSzE8YtaA9oWBggpLZG8AnVzBvKObWmrIuJCg9+/4//wq/+4YdefvUlSnUUERQ8Q8cBR/3dYTnrIv3wn/wuEWlAaVVUWVGgc4oqZpyk9qf+7bf+9C99GABOnTyCHtNes8oqa1027Td7qj+cDrdnxPvufIuxyjiaZ0XUTataG8/WtgcVRS6lpMLroiwzzxOpxKFjvcxw6HRttkuwYh5cVRe+mOz20zCtK5RUGOIm5UBK0ek1ykHdJDwKp+1eJzOkvz0lpSPexyKylRHOc4oMraRECKbC+JWLl0deNnqrnEdQjeoiY8Qx77GyygPXpjbGUzabTi5fv55XMxmirgwh3HkEQphkDLgxFCh6653bByaCkGI8nQ5G4ZHDi9T11jdHFp1gRKMjhEjJPeI+xZkzZqwFsl+kjYQi8eC9kVQoKctKew/oiUNPHVCKdL8+BIAS4rzfP+J4571DBsCoJxQICmudtY5xYbI6DPiTjz37UqzarXRpfq50ToSqyjUi4VwQyigBJRkTcjiEP/rbtbe+tvfgfQvr5yezvLjQz3ayMNvmuzvIFQAz2lfee+/QUx9KQSqbV36f5FJlNeE8bXWFFIC0yEoCs7rIgSDl3HkHHpUUQJj3MJvNvBNvft1bPv6Fv7vngZPlVn92bdAJxGsffPDy1asXLl9A7bSuERE9CaQMAuW9PfPi871W1ImU4KxKRZUzTbi1Bgndr7yNQpiNR/WkCKIyG82azeTYiaP9yd7uxlXrrTGlByQe0iCZ6MENmCMjUjBmfLOZ/NC//okoUn/9B39yeXPdAHTnVvt7W5MKN06vf/JLH7bGoDXNrr/7ePbPooV776ciFNTV3ipCJaHc29riDcU0SpInP/3oysrBI3fc3mosRA1Xj3cm04wI++xXnj6yOJcq0V1ZTlz5yEN//vb3/9PI89l0AtQeOT43HM0osb3uwbh90+Vza6RZr539xD133L+wcOszg+eyCfZMq5psuhK9GaVd8bZb7tn466dazfi1d60Mr68R4dCYq1fWj588GsZJuyN7x8O7h1vg7Lmv9M+en1zZgzBJVUfVNWQV6qIi2lbFbDie1EijMEjjgCJRYYBee4A0iuoqI5Raown1HrxHLWlip6WuXj76hmNaOF8DVS1kTDC0ijiZiFBGPvC5Z0OSsHA3lxpXPv/7lz9/vvO3j0/BZ6KRxp2GyTJjoHvo2Dve9S0f/v3/bInhIThT8bp48JR6/7t+6Cd/8jeSMK2rUlc1kxS9F5TBPobCAyDxFDyCqSvKGSEoOQuUKktLHWGUaqdvTJEBGWOCyUobQOCM1JWmBMC5SAUVGAiJc2isDRXvNJrj6Qw9ck6c89p7RLTWARIuBQD71Ge/NL/c+tcf/FFRUF+DyWpdrt1yGN7/xuVmZBnhVU4ZdUh4ZUbtiLz9NVFeySefywgoyoiQtK5rTkAEXCouuciywlnDlGSMEvCUkRtYWHSIXnDOGAGPQDxSHwSKUsbJfiEfWF16owHBoUPjGadcUiCMepTEc1McXAqW55LlVrh6cE6iTUJaleW4NFvbhS60oBBSt7TQOHF0ociqdH4ZCZtMMVDgJpPjq8f6opEEoWOu1WujVMab3nyTQsI5UFp2lpvJwcXDL1w+s3W9Kj2hXgP9P6SiopjMz3dP3PzWV89+kbOKMTzz6osrd7zl1P1veenZRw3ypJeWs5lHmo36gQjdVGdVDrE6tJz8xR/96He/7yc+/JGXREiDWGxf2ZnrpXEqf/Zf/6tf/O2/PPeVT0kCBEldFkm7kXQ7X3n2GvGs1WvedlvjuY8/urNXzTW6M7SSI6iKCWEzE4YLLj1Zmy8dOHxqsHcFZuscK+IwjoCgRcS9rWE2iNmi2rq2FcfNbojZKNu42viTj+1pFjDiLCJSFJxyGSClrJ695qBXG/1339S9dbF99oUL45LayowHUwCitZvvNYDqd7zrVOBG29ubnNMoIb1ed+B8WVWF25NCxnEQR2J3ZzwajLpqnoarTIxm+aDRSEytCbeBxBxLFOVoPEKPKwvNaOHg9cvPN8MoUHRclOV0MN+++cB8G9wsbYm0jePd882le1xQbqy9TIgfzrKyrlrLNwVBk4tqZ/v8XPPWKJpjdEHwlvYF421naJy0OQkdYVVRREG0vHB0c+e8y6fNJL/3pvD2Y3cbgtbW3aXeYHcAlBHCwPbz6ajdaat0scj7hDlvZ4zaVjcC4oD6qNFAyIRkMhDWlZKx1fbxmkCrnTom+ttZhLzICqZkJILSQChlIIB430saZVZuXM85bWxsTIeakDjWFl0+4txrJlpB83C0sPbKc4zLbnOeUywdqzPlSdUJD4kgTpPQkel0tre3OzT9UTkuovSYbM/XfrK5cXlnMOlP6rGGIFp69fxXPvjdb4b81RI0TGpqvARf5lWzlcbNJExbVs/qfCRFYzidctZFH3OSoOVxFDgSEhZ4XTTTpmBkPBgpllhExm+sAmcdo5QwEnCudc2ZbwQijMRglIOnXKmymu2zxMBjXdZ1bb3zotVI0jArMkKYYtw7b4xGT6zR1towDaOAm9LVpfbeMQ4AVmtv69ojeEQpRJrEjFOLViohpJScOyBhwqUQRZ6HaSCJ66b5iePBLUfiduoY1ExQihxI6FwY8eTWm6Y//c86H/LDLzznyiLYt84QAEaZR08ppZQHMqp0QYECAef3g1geAcAjIUDpvuHRc86Ukkwwax0AMsqt0dpoQMc550x5b421gjOwDhkadEgAmESOCyfif/KdB2866ebT5Xo6g3rWmE8W5sq5hpaq/Z9/d+pN6MArGRMqHDfGGM6U8cY7C3TfFkQQkVGCN5rJCBN8/x/kreWSCyH3jVCAHpzf59s5a4Ey7zwg+H3SJKWInvJ92Qmd8/87woaAsI+zJl/1LN0IqyEAeg+U7oMYKaInhADb/wbknBMKVIoKfRip+VaoQj4e7Xz0jz46HV2JG7TbqAy4fJYXJdvdKtf2+JUt2NyG0jLrATwSht57oEQGMk1CQuXy4aV7Tt6CqP/xocdDJEHApWBSCsp5FCq0Lk6UksrUhnASRYG2iNbqShNGCeMmrzmlnPGAgZRSUsRQqUAxzgUYFikVKO0hiROKrKgNo0xIDh6t9UkSJ0Gb5CMklRSSsP2yOLC6Go3Kz/z1w73F5tLhpYPLc0x1Pv6Xn4/b7LbX3jK+OhjsTtdGuz/0Hd/w8OP/cN973rstzLjEdqAkYZIZHqVWs7yY8lhzwriUAbXOmVgpa+pBfwwsplZKA6ohR644eLc69GbV3ys8g8D6m5fb9ZbOKgsznnQFq4ujx48iS7PRtNPuvfT0U+3En7rlvhdOf7lTN7kVo4EttFvfG/c6DdAmpvpr7ujNH195+OGzW5t2urvLFT12avHYyeVzj706asfRXG+0u739ufPHFsp3vvcdmuLl3clwMAh9eNfrDknenk2Pzx9bycbTU6dOPn96MB7Q1YVme85Uw/XDN60WWTDZvOyEXb7n1t2L03r9+rlz19sJD5Td27l2dm9b9Ba/9wPv/sgf/JfV9ty3vue27enukOYUo921vgjmqGuk4yt7u5ffeN/BK5nbvnbNjngEgKY8ce/xAPHknXf862/91HreCJJgY9MFUeiZI4qWs9x6Jjx02w0oJzXWeTZRhBtda+00rcFBd36pqndmpUXGXnfPa/7FB37wox/944deeMxoEkZqPCoDpXwhHvqbzz315BNHb10oQfziL/xMd75XVDqr7MUrV01V1d4gEsqZqSuP1hNe5pXWriry7/2u71dQ//bv/l7YakhGhrvjhbY4cHP88z//s899/smdK1tveddb/+2v/nZVowqDOFAOyawsm6k4eXjx2tZOYZiKKecsTOX2Zv+mo0c3RuMwatSuimOuazfLJ/OLhxa6UhdFkoT3v+9bvGQIlYAxUpIkUpuRFaIp7jp+Mwniz2sgMlXTWSU97bUP9XeuHrzpxKzIz19YlwFhXAGHAvXP/PzPXr3+4plXzi135l46c31h6ThWJeiqP57cfvvJotze2OwjgEo4SmL1jRnaO9/1Vl0aCnZ3Y/PlF1892z+na3vyjlulihoiGA0nJw62CQtnhgZB0o5UNtsoMjMhs0u7Y5Meymfm2NGTa+OZoMQZQwhtNGJnbAgiFqJkFZPMTiUn1LqytloQSjkY4mjINTrrST2ZCMIIU8YZa6oobHjijdHE+5pnDjiTsqpzRKftJAlS632ZF4rTKEhmxcRnBeWxqw0TQiqOxFtbCRDZYEMFkQyXg6hJCBGSISVg6yAQui6EtcZUjXZza22t3e2mKwFlhQqVJVYSyxjWtmJcOWOBsFBFnEemwjzbptyFrDmajhXliI4iCaM26R6s7Wju5ja/Mh5sD7l3a7tlkzuvgkJ7pJZF8oM/9yN0b9zV/l333cqNPDQ/p1B0EqmpV0olqmmr/ML62q2Hb2812+defGEwyFSz1UsSa6YscMQwRiggVZzHSVRWLM1No/RTDqX1KEhhvAItKOyVlWM0TIJiklHva4eCEEBiwRMmtyfw6CM77/u2uK53mIOvfW1y+mLxxDbzDglgbTQAEoJSCWMsVlZKkdcVQSCU5lpLwZ1F71EBYUAQpHZ0UhM7GSZK3Pma7k/81PuPzOfTjUsiWHrkky//0T+uZxO22OohNVab6bTSgjHZnFj3zR/85TtW8L/8m+9fPr5QTF4hfNezoUiFBSsjgcaJKNXDGWGeU2uJEJR7azklbE7ZfGpKS6D9kd//yv/6+GOuI+qiJuC1dsQRJIBEEGF/7Vd/ZDLJVpZS4qHW1ltPCCKCcT4vyvc8+Jof+b7v25eK0liiiMa1lWFojSUFmBzOnD5Xa5l0F1duWhY4uvjys5YKrHFv2B+NZs5g0pLNVhjzoM7yuLm4Mx4ntB6NcjPVT52/dNOx40tJWGWbLjdEep3lRekc2otr61SKdqcxy/LR8FJHsTefPDAZV1me176n4s6o3ClcmS7N98f97XEedNI6G8+s353hXrGVLq4yJ89e3KQZhiFxxvRHk0pX8wtznFgpWFmZq1d2drd3s3zoqBlPZsQjF7TIp2VZN2IluORUIqNaa7rvPTaOIlqN19e2G1Iu9To7GyPwzjvPObeUySAVwKYwdu6GrLA/hENAioww8IjOeQRGgDhnCaUO0HtglCLuy0Se0v0GLrr/cQfAKPGAaNHYmhEKhJrCDLeHXLA0TfKSG8abNx++9YHWZ//h08RT9I4yggDGOg8kVrTyXLvWx5+qXloLSicj2dgeV804Ylg1ejyrZrXRfr81VuwXg+y3uRECYL3z1qPD2WRAMeZUEq0tmrrKgXlETwAJo0gAPcpINdPE0vHHP/lhjdWZl6qAiTzrI3GrB2+5eHXNe2ecBUKVYojgLZtOjZQ6SKQxtipwMhlywm84yOmNYwgLxfGjh0YTt7G1u3BwOWiMNna2nnr+E4qH2syUELY2BD0D2omWZ9XQWU88oVxQoCxKT73nHb7V3tu6tnb5clbUlbO1WysK7SlJwk5hJ1EzKKeZZXJtoH7vD85/8/X2137nXUQCZaCrnAtF2A1eJAC84Q33Da7vXbiwtbkzbDfnb7vjjjpNWu1bitLVZVEOBrtr129dah0+sri1c3Xz0ou9RnP11qXdTV7n1WK3feGV63PHg6/73h88/cxLJ+++9+ITf6nH2dMXH7/9jgcnfXvx9MtBaPvD6pZ7Tu1eOnv9wtA42Ujn7rr3nr999ULUTqMw3RyY2xduX1lY2rn8tCIh8+M4lg++pXfq9vGgbz/12GA8NBqigMGsyrOpTUIahlEcidpWdRXo0rIqIpQKoSjFQAaWkCrPvTeVrpqt4OiJuxOMO32droYGMgbCIWFU2HwLsASYs5bJQMwG/ZCI0JM/fwT+6BN7ZNodl1wky43F7tZwPNibcEQmYK9/6Qf/xdsjZyEkaBE9CVv8d3/vQ7YmoUyFxDrXUgmgaLwjgIBAgVhnCSFSScaZAFLVWgoOCNbYJA66vfkyrwajAaDX2gaKA0FK6P/GWiMBwinz3IGgFAnR1lol5LEjR+qqGs5mzqIUDCkwzhjj3jqgLIpDa0w+Liul2Ng3F1pjM+p002Mq+/qvOdRqDKTiTITOGEI848z4kji32gnfcLLxyqVpvwBwYLVhhFJOojjy1mTZjAKRYUQYo5RQQFtr5GCNJQQYgX3DFCUeKKgwCFTgrEGrq9pwzpzzFAhwZq1TUsqQCy5MWXHpltvRaq958uRCQ9EAkTDta1fPXJZbXQHVvgOQRKzTCBtJUmjO1BywEC3YLBv2ByYvitl1IZLK2WYz4WEcJI0yz5TQriyJw6ruW5flyLUvDFgmJDCOX4W+3rgkq0hZWw/GE+bTfDoQIZeh/eI/fpho0mgdY6zOZ7voXJgmZT41ehgzef/bv/7x03+3fGxu68LmX/z171ZVqUt124njUSycJ6LRMXQh0wERgoJ1zhLCq9JpX9napWlnZyuvFpu1XLrvh39498JL1165kMAu1E/rOqfMtDvym779W379v/5Rkp74yZ/89x/+jX935swlbyGIQsaIp14IMSbpbMaN1XnJHBd/99DaJ18abvZ5ox1jWXOBhjFTacs8Y4w06eUpuX/llr/6x2e/8Gp/e0ZlKCbT2mp38sSBhcWU0UmcJrzc0zqnXhivmt1Da1s7Cqm3gExQz+osc7VrJr351rHrm9OT9x9P9cVq7bqiPkg9C0W/v+dRHD6yMNxaJ04WuS77w6S9TPlQuyyKGgurK6J5AMHbckhInWunTVVm5/Jp0exiFMWvvPh8d/HweFQbnHGaLHVfR+jBWTnu9pY4QD7aqSyNonkKUMyuODMGWmS5ndWxdU3PSOnRKgQwRVZRsFU1Al8QkMZ6423SbGWzYqZ3AFgSxSSM6nyqVCACno3HnXZnmrmqqIVInTVV7YrKe1l547JpEUbhdG8QxStAE53vNJstFfPRcCopKeq6BpEEgVQ0W9vVNDp7cUeF4e0HlpkpRnlVDUvrxoy0ZBie2ZwudtpcdBG4radXLq5Za4KQOj+u3cQzAbzj9ISwXa7KKy9uJBHkeeZytzw3J3zAeb4wV46GVZSklASS6G7UoZRb40OZxmE0HlqkxdSY+SDwLAyDebBhXpRRq+FFEMpAhpJJH+BMqlEc0c3tiaA3asI5p5QxtK6qNWPME28t6LyurePgiSUUKCAxxuTTQkah82aSFUkj2R0MLLggUkTwQIVFPuWSVtowJbV3s3EOhPAwsFrHUsZhWOvaWR0ECggBBHDOmEowEqcxZUIRVdcFOpRYUDtNce+mg/z+18+lzVqS0mHFeKCtM9Y6BuCtcwCuPHio/MkfTNp/k33ss1VWSwqEcWqMo0D2Oc15OUNn96EVBC0hRArGOPOOKMEppXESa6OtsQ4IZYwSdIhSUirEeDIFgoJL9J4KBsZZi4xx75EgBopLZt70xkPf+s2HTx0dpemYZ5m2IgwWTU4IbDeVe/Md42fuw0efMehFO2l12otXt86i9c6VxCMSTymlhEVx5J2jlKJ3jDNA763jnIH3llBKkaDWxjjn0Xt6A2MEghLvnWQEEB0godRaBEoR9mNV+wG3ffcQUkJwv6hk31f0VYMlAgBQCsR7IHz/DYreoQdGKWdkPzYG3rSi8M1vvmuyN3n2zPlXL+lPfJ60kpAq2QwclYQLVRZmPKbjipSZI8jBA6CjQDhlaTO65ZZDw9FMcTGp8ODBxc2tnaqa9noNb8l+Ji9NUxlKhp6jiwJJKQ2oaLaihbluXVnF+Mbm7rSsK4e9uaailFOeRHGWZ2kzIFyUxs5yHUShceAtQ0KswUAAcOIQAyWSSFqHznqbjXlZcear6US2k/3f4fTTz2XXx5UnabuzfOKgbMblEF73wFtfPH/6y89ea1He63WDzsL2HgniE48/df7WB25f6SQNFaxf2qgDDFhDkLDXS6vpFetzi8SgL2vbaCWUSD+bhgdWaXpADIsp1MHtvFowO9O+BdqMlPDkxBuOJFF47tL1rcF4tj6ifjp3F11ZveXZV7fHThw79rbx1Fx71V57Zu3Yasfnfuq9Uwo4ZyLQiEVeZNN+If27vu4t02H92Oe+UJT1q89f42LpLe+964VHn3eU72Y73/nj7/zvv/ybRU/2Fg+7mN3/2pPPPPbQuYuXG1Gj39/SkeVpc2fPLZ64pRGRtLG5XV96/vxD7/vGHwrhyIuPPkMiuPLEk74mN93UG18fXjp/MW63lzurMSvbrPvE3z6xFJ70Rbl2Om+snFxaue/SehC0kqTXvPzqYx//y/+xcjx65dlX5k90jr9xqRhOpVykvPPis73nH/ry6ac/XmeKMBNV1HDpja9LOy0zo12UChGIGgsasHpqiGEhIzIMy7ISjALgzmCHEr3UaBJGzp9/+Ud+4Ucn45kMI664p2w8KuNUWG+3sjMPvvO2x558OVIKmRhnudNOBTyblrXRSaIQCWUsbcnxeEQIZrOKM0KYe/jzf7mztROnwmhtve8q/7VvOfpP3v2a6y9/qduMb3/72/7xsaevTfJGs8EVD5LQGnf88GqoxObeehAqrW1tdZQmUvEo4oz5fDa7/00PPPHso7Fsj2ZZkESb2/1mQ5FsGMJAxI0Kx4Q4BpsI6wKIpPPjsYXW7dzfPhx/MWRFFHDGmbFwaWNTMXHuymagRKvVcM475xllvA3f/s+/ZXtj7cChHndyvLFlnOh1l65tX2rE6dr6Je+KULLJJIMoas6lc4ud/VVQ57kzSInvzXXe9Ia7pZKD7f7lnb3N2dZVTu45Opd5yWhAiSXVaLq3RQflXOznltjA0+5Nbw+bt13Z3VZSgmPGOuecZp4h1rPNresXF5dWKG2mzbbTeRBGxDjJg2y2i0jipGO8I4w00oapzLTSqwtH+pM1b9F7jwSiJNbGWIJz84uTwQ5jtqxy6xLrrAg4ocQyxxVXQVJU3CPhlDiHXIBSgfcuiBNK2Xi8x2QqWDqrszBUlLkqn3FGgdMCS0DoHT9IZTgop0AJQZBMGVNxLglgXheNtItoC+0YU1R47SoOvizHQRCCwzSJiqxQKkIeAmqDMLfcvEtwkOXZM7vnLg02RvUs8zwSipBsUOma1lK8MJzePciPH1jZHs9GedUKuWBya3d3vpvefPudl65dlWgJaegibrTnzHT3xWcei3txb26uLoJrl9bTuKWd54R1GwklnEh+ba/WBqgS2tqFToAAgwxnWXnLUnzkyOJDT15CR8E7TqAZuKYKm8vt+WMrXZBVj5x7uZ6VmeSMEWeMc8YBAGXEO8cIEMncvtuUgCcAFh1F5531Bj0VXHHDlmz/3e8gP/XTp1pHm6KVEnmOmD7zmeflyXcs/PPf+cC3vfnXP/4lJ6TX1jAhKovU6kCIkYbPn4P3/KuPzbWPXNnbBrjyS7/wHW84stuKLRYTHhvUmkokpLR6C5lCbmmQlNkYKopWJEu3//QHP/9f/2r76KEjPihZRTxSSpEwih4ZUEaZcXkjAl1VutTg0XunQkkY40EgVP2me16Xzcobe6JDZLYRKC8YRFIwsrt1/fZ7b98d0evZ3tXJValnraXe9nDQ39qoqiokLl3A3hzOz9EXT5/tJPOCUg62rCshVSNkWWlHs+373vTmZx97RXma6/qOu++6cOZCqWk2NkcPHORQgd5CCTwgZzZH+UgUBUyqaaeJWheOqY1sxzljDLMWJ2XVVKrKdGX1wsHFySCPuLLCLx1cuXDpatRKWcX29oa2lZ69dGU4zrXFCuu8rgFrQWijrRinRw7PHbvp8Jnnni0mFXDBgiDL9Gg4YYwCorOWUFLVbm1ruxXKpU5q+5O6rgklDrGsshqI836/7YvuAxQJoYAegSFBD9Y6yYRnDLzz4NEhEqCeeIIMkQBxzlFKKQGPN/rCPNJ9K7hH4IxySp1Di2i0KfrjKApDg48+/MV20lBBbMvKegTGvEfvkAgKwObn2/2dHbRuczj5hnd94ImnPpMGTFcTsK5CQtFJwZwldWWEUoxSD0jQU6DWeiBAOXWIps5HgwIIIw6RIBCCHijj4D1jzHswxkgICIppVTUi8R//3c/+r4/8wTPPnBnkdRCLTzz813ubO9YbJihYsk9hJEnj/jd/jUDWm+9M1tZGe+OOigWQ2cUXTOm89+g8YcRpf9drvuEv/vGLjd58FK9M8qc45f2tCeI4CCX4G8QZ9H4nu06psL7ch4pLGoxr6tNjz13cm145ly60xpsVKd1sPCUEqOABekNclecEnC21ZelE9B5+dNI4MnvdawMSCCIZUiTcA9wYnlVV3phPW0uNye72xsUXt689v3z44IHjJ71YCJuirC93j4u9fs6cWj5228Wr16tKLxw+sLJ66JkvvxCq0KF6/vTTK3ffIhq9T336oc7iLf0dH9q50ZVziJbTq2985/sf+vvPrZ2/Jg0sdFutkJal/tynzziYH+6UE5KFSW84rJMEuI+qWZaGS0o1TDVrdIMjNznVIhdfHtkhcfnwHT90D+uFl5+++NjfbdsQro3ikTdMKhl4RklWzra2p0pIQoEziKNIlH4unOvw+Zee/uif/sf7td0iSKhsUqKAcsrAek1VA0xOKESdkMXsvgc6f/LiXt+lllka+UDxclZQ7Skh99x92/q186Xxhw61wVaDjf7K/PGdatC88+5/8cEf+vUf+5fS2vFwAISRfWUS0HnLGCOAnKHiJJHyxJHbz1w5p0mta4sWhWBACGGk0lVdVYEKpJRIEAGN0wCorWaUAlCPIENV17mutBBUSgmEXL1+3dYV44JK5tCjR0oZOhSUES6KwgL6ssZSY9BJL1/Z9ZW+/7Wd1y5NDizMUJAaHTMVeiSMWVciEMGIr4uFCI4vRrsXDQXGGeWMdluN0liDAEC5kJRzRllZa0pAcOEBCKHWOsEIAZRCKCEACKPM17UxGghSRjljwJg2BpwndF9JBknZ17713sUFLsy006ABd4xinVXWkVKbrLCEMsWg0RK9hlxangeOFGKDAciGo6pwJlzsNLB1+fQXti++ePTet4etXhqBzfe8LwSCy6aEQNykihb9UfFHf/aVL12qgSoAirjvkvz/k4qmo3E0n4wGm9rzqN0ssiHXE1bT0bhUQdRbWKynWT7Z9W4I1APg3mSwtHJ/a31jd3CuMKDNeOXokZtPfe0nPvPX7SYyTiaT9Q/+y2+pUSUNkY9zAkw1YgBuyqmM6QThG7/7Zz7x6f9elRuP/q9fOnLT7SudzpEjb3r64ac5JY6Swd4zv/1r70xU+sqzj/7cmSdWGwvMcxKEtcVS1wiMB40/+8y1b33LqmByb2zzsXvscmt7pwiVc3Uu4uj4qRPUEWP0M8+/0m0GpSYf+rNLLbVZjiwKFgUsbYfNiBxaXLJZ5cnuscNpnVUxQcNJEre3dgf9ja3llYW9zXUVtpJGp9JDryTnvPJRbZqg+MaVix2+1e6ERTljSALP5nvzX3nm1bqoAsmViucOLTiO4/FOZYvRznB+/lSseiqOnamkbPVn28u943VYbl+6KCQOS4Mk5XCkGIeK0EjGobqNs6Q00AzSKss8zYOQu7wGOw0inw9fKXVWZbvaAo2McxCHbNAfNdIOEQRVQYgCphqNuJzqVtoc1BNEqaspaO0cbcaqnlTgoDKEsna3eyJRaH3OODKaZ9nEkJnnk7mFsBiuTTaGltNOd1E7IlWEOtC2zqYzAzAeThyIsalTzopJpVSLsTThylscjFzEVBAkQgJH3cdxVZShCvPClfU0CFS3G2OVC8Q4gulER4LPdw+UYrU/3Lj9ttsuXXxEMra2NtrdKcZTPrfUudzfOnIoCoNyTGlVe+5MEHEAH8XxdFxWtTV11mh2hAx2BsN8OpIsCjrtNDoQ1LMgCiZ5Zbx1zoxGk3bKG6kssmG2t8V4uL8KpBBcSVPVs6KUUnDOlQwn01mkIu9sVdeUoneaBdhtdmbTIuAqbfYEApNBKAMHJMvKTGezPAfIAimdq01pGJcyUIKrUFd1ra02YJ2kPAlCQom3Rmvf7cwRTqnbH5AKAcSZYrGpT9yzdOTWhXZzEtAtxALRIzGuLlwxI9QS4tFMnSHDEValnJs33/xP2KykH3vEAwpnHSWMUOq9c95yxoFTSQlljANvpMni/Jy11gHu7Qy19bO6ctYS52UgKMNWpxkmoa5tFMui0GHAjfaDwWQ2K8Io0M4RQgQlRluLqKh98LaVW1aShroe8RlVjgrGNXUg8jKqrBExvu1NcH6nvnadTqs83zlrTIXonUNEUIFqtjqUUEJcVWtdWUaprg3bd3MbzxjEsSAEGKGUB4wLqWSgGOM0CCR44j0SBmidR7TO1XUtpJxNyyIv69oY6zzCPhTJIRAC6AFuhMr2s26IHgglgEA4cinReyCAljrnHRLKiPPIBb355oOvvecOnY2e3bg6m5aC8rokA2PQ2RHbH9A5p9EDdwT348mEYhQHUggE0mrG07wIlAIkBG05y+Kg0d/ai8PQGGjESqWyEcfUuyhQSRxkRWW0dg6VCqw2keAUcLHb5JMst6bdjtIgRITheBI2I0fRGsspbaexCpVGb7y3HpWSlGLAmPfeAJt6rI1hHiVaSUEqkaQpkzf2ggBLnlrFk2aabEyZMYZUavG2w//0fXf/3Uc+mm32s/7427713Z/79KeOH5s/dttyhI5IyMy0udI++8LZVNecixOHjwftzs5kh3rmPREiqosyy/urK6unP/VEwK9So9ND9G3fc9f682d1RUbjaeXiXqd5fnM9bIsHv+GOLz5+erZZnphf3HvkhWi1P097qtu+smtbneVTpw77J8+sfelsN026nVakyHBWE1MtzQVF6Xau7s7WfGFeOnns5A/82Pf/8R99ZOTLLz61fn5zeM9tB15359H6+TOf/9xfxb3Wnz/06nu/6dShuPeFzzwriEVNTtx9x9OvvnBx8/rXvet7ci1kS1zfPZO/+MTiynJRRBsXtreufolUW0dWlxMSjNf2cDd79lNPjkq3uBJvXH7y7pvv3e4Xe2NQ7a5m2pqy3D340rU11bu9v7Y2e/5LLBnff98bb7rjvo/9zeNXt/BweSiwZ1zVzHfYYKMevcxg0Gp2orXNfp4ZxkhVuTAQ3qNQdNyfpE0zHowDKVppq9k9PNy+arHWBo13zTQsiwKN+cZv+ppP/f1nnPGVo0nSAkKQkmxWIaUsDCfF5OVLl4/o5aqspSSEAReCEx4EymjPqWOUlbPKM8IYiYPIOUwSmZczbvj1awNGhPekmlTzPfnvf/X7O2k62unfeu/dp17zxg//1kf/8B8+Q4XSzpezOomNLoyv7dDXoyITKlGUeee3t8btWi82O5fXNhtp++XzL3Oqqonu9RaromDEL7btzsbpH//Z76sibkmMwBDOK3j5M7/19MtfhlFy/InP/ffF3qHjd8wNxhNCOXEYyECXOs81Y6DRcs6brXQ0mZaTGQU4P10nzjQ7yWhWUC6G45LzBChq5xQVvYWl/sZ2nMQWAjPTG357fxVQ75HArDCCOK+Nti4MhdS+qeRoNI4Xbm8uzZtx1tS7kJXUOF8ajpX1wZW1jdd94+FDB5trZ9e7IrGMUCEd+jKbUimYUt3OzYwE+WwSqsg57wGtdpRmkgLx3Fa5l6EzGDhKkPhyNst3lWIcialMFAhgHiyC95M8t0y1onniR7oy4CwTwJkcD6dz7aW97auHDt5hIR3Nhp6wvKjQu0AEQLGqSyYE6hKIpcA58NpXQiiuRGV0c3HZFcYJgZwWNcacYW1K7QLJq3wGiIJyV08JYiCbADqb7LVbvUpPnHWR4lWVuRIZGCSsOb+aZw1bjYBja3G1tPWp16705teGw/zFc1trezOTIfEQqFBT+vjZjW+8t3fwxBFXkwaRfnuPVLtBEK7vjI83ji+3I0313IHDjKApJ+N8OD/X6awc2BvWC73jjeDOrY2zrZ4oswni1FsnpRyMqt2KMAahYId7jY1ZWWa5Ld33fP0bP/PUS1KQ2qEQ0AjFG451Xri8+wt/dPHR5zbfdKRdV+IPvrCzUabotAPrvWskASV0mtcUwCF66yjljFPnPUEgnAEB9F4KLjlnQONo99N/+6aFB4FML1N/Dvq1pBGgAkIcqRxu0wb+3E/f8tcfeDkUMXKglFttPKV5XRFNhBDXNne2d3apYErJX/j5vz7Rg5/4Z++57ab5rjGVvcpTKVIJKFEzrA1Jlrhg1mT99eEXH9p6+PHBajKXTcdIrPfgEBApRUooVIWVqiScoIc61wBAkABFh54TVhvzxgdee+Tw8rnnX76xCkA3G8lod+SA17q0dd0JYNI/Zy00qHAziwgzY4hzjTCypqBYz6dhU4jd9e0kCipbDMvCOGdzw5RYPNBeXjnx2Ycfe9hMlDOOxC5pzEqsCa/LwtRutrfV4GYhwO7BxqS013dmW5seNNEUS6/nu3NFWejac6EUYwKdCRuHDyxhPT0wt3Bte6fVmpeKTWvDsBZS6LKmlDVakWduc3evmBXWuyIvdFn0FtucwnIzSBrJ+9737u5S7+Vblr/w0CN55aeFCwRtN0JtreW0zD1jAEBKh8+fu3rbiQNhVInKOkqocR69MY5QyrgE9N4bJJR49Ogpofu9Gs555IQLZt1+GTzx+zgjQqyznDNERI+eokckhHDG9hPx1luPQJAiOodeMCAAjHJramJYIwhnw76ttXGOMkqoYMQxyhgTZZYjEkp5pgs7sw899klvzHSk4ySIuGg10tFk7GqNCFxyAKCEGGMopUABbuBRiEeklHoEdI5zSjm3xvp9i5QQjAtOSdxIhFS5Rer0tJr93K//h6VO+9ZbT7x8bavdjl49c944jd5TQhln6AgTUTdsF5vX3/rae1vzsV459aWn17uNw9O9rTPnn0cEwikCegJa15/9xz9eDlvf9p4fuuO1N33nBz+GDjnnXvuyclVdC8a8M5TiLBtJrgRXQFxZ1UpG6dzi5euTA0afv3SW5K6sZsY6wgUjNFTRoblbX6mf1WXOKIxmpqr30qXlzZn4v3/98Z/7V6+97f4DhI8p4Q6BkhtTxGuXrzbCoNXrNZrN6K7jUvLnHnt2uL7HxfzSXUeaqwvT7UJxDpbfce9rDPn8XJu+8PwzlpLV1cW7Ti2iycZZ9vyjf3f/u96rB1caBxfuufe1j3/8rxiDznJ3Z5tcf/4ZNe5PZ7ZKAhRscWXp9Cvnf+CHv/8//fIvRCKIFDYZLUeXjrzza69Mt+h0FqVNb4MgkXnBZnl94OhCGIsnPrn2rT9274nv+MDEJTd/HX//v7z80t//w8Of65++XF/YGOsydTUBEVGQlbV5WUSB0g7CuLG4dMLb6byfLR7paVyTskMJA5cTNFjMAtVyQrliYssZS2Ir3YHbI8dnNbSaXfXD3/HtVy/OvvjlL881/LWdvc3r58vZVGsiqZ3N8iPH7n3PW/7p7/zJb+y8euF7v/G7lm3anFetXnc6GVtjCCP79E/O+T4m3jivXfnqtac9h7QRTmY6CmWd19k0o4QiIVxyII5x4b131iEiY4wLDt4baxGIl9ybmjBwiJwRwajzjhIOCN4jAFhjKaWE0KI2Bw8vyFAO98aNBqmpHY92j915c715eaG5c/LOGPw04A1CGRprnUaQlFHnrSeECNZqhqeOwzNXt7wLKAVGiDV1Vdbee8aZMYZ5j5QwQhw658FY5EIEgltdSyn2T2UUAb0ti9Kjp4QQSp3z1hig4KwnBJzRRhtKxlEdiwn05uOQMrTeeYhiwQSty0lD8CCgzQURxkJxyiJWU6nCeW65YBFSQbgB74HGq6snOJQxKVW5Lg1CbertkolYG+cErTM6mpSfe279S+d0ppXbh65Q+r+JXTeuB41WoM2MMYpY6Sr3zk7HQ4pVszlfV/lses1jxgKk3DnvPJog9Y9/4ZeEB7u7eVsv3pyuKVys+4O2UPVkMDefWoBRf+CQavSm1pRyyowzOfWaM0f15OG/+pmjh1Z3+xq9nlz7tC7MMxeJQGMNoZQTQimTQKQt0ctwZzxkjADlptJJJACJtX53J//oZ85IFbQV39nRM+wEkQ8EjorpiaPtgJdJ2ioyf2y1sbPZ15UWIEYzrzh0G7ybyN5icPBoa74nnntyQ9fUlWGjoXqthY3tvaL2VV4Jml02uSCUUj2b7aCbBXEw34itJjTC3mKLIfdFPJiO51c60tFyMtze3hYhqSrLVUMGzWvXtqCa8qA4enNjOKTtxiK1vKgzyaCuC1fW9ZQANBrtm1tzKxeu7BkNMiVlvtPptFpJk4p0lmtKKAMPNKekGE93KBF5Np5mtXW5E7L2jmDKsJkXQ6SFVBGhsfU0CQOd62F/HHNSV87ZyqoIEXf7W3PzotlZHIz3lpYXTeGYaKRhJ5Qx+mEcc0+8zizxrpmo0WB7fe8iqbMqc4xS4mIE5I0Avd3cu5q2m8Rx733aChUIqj0IHnVkXpJuGIxHec4qEZGlhbmtrUsqVt2l3niSrS43N9ZHzRTmu0pFHhIOjtQ691wrrgaDaUHOhwF54fSnQfv/l6v/DLc8v+460bV+8R93PvlUTl2d1d1St7KTkm0JCWQTjAGDyTbMHWAuMzBDGsCYazMMYBC2xzjJtnC2ZFk5dEvqbnUO1RVP1ak6+Zyd/+mX74tqP8/lvtsv9rv9rL1+z/qGT2Hx1q1ZVcu68MyL5y69+KEP3rN/cBCnGRV5XfhZVXqmrHKctgKJLSPWu/nRXNcNlwCR37r1tay10Fs9M6uEc9JUlnE6aLcImewfTGpdEu4gvKkeeOMt2uCdoCRmDCkrVRXHomqUR1RGZWnKOVOqgYCScw+eEgAMgvNG6fl8OptW3nueRiT4Yj7PWmnezqz13ll9NxPJwAXHJGcUG9cgkERKIRhjNBEk55yosYDJ8glz9vyg25MyUo7c4rQCV9HIg/NOa2+1CRZ94THUs2pW0qNpQggpR7ftHE4toqTee/QeEYDS4JynjCChaRINWu1GNc6ZEMjW7mHRNIQBBeScJ1mUZFG3nXXaWRzFnNHBah8CziYlML6w2AXvXnz+0v7e0dHRZFY2WhmjPWfM6DAL+LtfeOXk6UeyC7nkpaWVcqUDoSrSNJpGKYVipQ3vuA8Oh842Kk/jtbWVQa87ns/G43kI0ChTNbXWijIqGEuk4DKJY9FutRFZq5sRCEmeGqWstYxxGQtvrXc+S+NAqLVOxJQC0bVCEgCJsy5411RNUZRcigChqZoQfADknGnjG2MDAQJhVlTOGfSgaxsxJltx1sk63Xw8rqbzcjKugFPbNBEnZ0+svOWBi9o0v/AbXykqwEA98UiDDzYEr0zwPiAiIYgBKXrGkHEScZ61IillnMiq1ME5QNIZZJ7UW3f2hZg5Z44vrHTSLJJkdW2lqudn1k5NptNKV+1OuL55GxEZ45GUumpiIQb9LkZMGsUom2vFCDoalLF5nhHnIyYae/e+hQFAOyuC8I5UtaYhGFCcc0IwYnTQyrTVztlWr/MmSg5gPpzs7Wz/wMc/dHPj6OJ9p+597L6v/MaXYHSoNilMcjXds1q/+OyVdqfPkYPOBYSjvbnB0G733v7YE5evXw/g9w9203ZX004qF30oIm5NHWLs0kIupX1m7NJaxDqTfHJwjspqOEzB09k8WjyWrXdHl0dbG7ba5TDuXv1iON05+eKvvPKOj93TX5c+7pej+tbutQsX77ebB+W8QUeoQTNVB8MKOrFy5bmLWXewdvN288qrL++U6n0f+Z7NG288+cxrhzfVN8vD3Ttbf+bPvnN489bre6+vnujnK2H15AIehJW1NaNDcbRz4cTyYw89urB4ApAXpowinqxeFOnSd3/3+8DWSyeTco6f+dQXb7xU9RaODSKmp+TkqeXKGc4oY15m2IwqPY20TY+deeLBJz526fO/8eVXv/mjP/6+L/7i55fbC3G2cuXy6H0f//O7h8OV1sI3Pvutna1rHd7e2IWIYX+Z7xwN4xg8BggoGLubhAAkSSqjTLjS5HnsFRke7qG31HvJuQ/BGMMpBe+2b94BY5wLnHH0xDvnjZWSylSoqjx1vF9V7s7WnU4vU0r5gEkijNfT4SyAl5RYpYWkRiulbVWpWaEeffTindtbzbxJIgmc+lIvrnR/5C+8R1SGlPrh+97SePy7f+tvf/LTLzMiRSJkzK0O2mrkrjENF8glnD23tLO5D+ACBMl5ZerVtcF0Xk7GZZK2TOGiBFL0+vAVHlc//bPfcfrd32mBQEgQcsC3VnBs9Rz+4W9sHYqujuGlw5mXFXoA5wigCZoioCCCs0Y3VltwDsFW03mcySilhCSTcaVqLdOwstIOwS4tZrXS3ulmNpWCxZEAT4ytrXlzFyRJXMzLLJJSirpidVUZpZSulTZxLKsjnZw+UTVXeTebVf7YAxdsOsx293irfV+/u//0J5Wu0ySusAEqHQ3Be6sNBoJUKMQ46nJBEaRvKqO0iNJKlYwDuCCY4JLSNFFlmQm61OvO53MiOFC0FgEwEQIjDo1KGa09aK08UucMIuraeqBZZwFF2s37l298fWXtHsklEvTOIY8540UxFpLHcVLM66STEkeaakgZFWnSWIdMJIJWpjJABDLBkVM02lJBeBwjF/NyGkleN4UkTDcHjFHOvVIT3xhPSB1mWmulC84FkxnFlFGXdtjW5sbiUi+i+Ww87Lfy9WPdM2cHWzfvKKUuXRlvTE3tRScnTPCdjaNobc2SZFoo7pvFY/3BIEJVMuvb3QQiu7u9tXHj5v7RdGF5YfO164+/6zuaiVPWJFEeJSygSSpttdVleXwxrg9144hy9tmbIxMACXvfe07d2Dp66oVdI6R32gQEHSrVPHYhB4V3dtSv3NxWjbxlmeE+Yj54QI+MhFbMTi53h/NyMi0sYl0bLjgivhnchrslqR5U6BH80+/OjnUu1TszXjPkCajUlgQgoURTCT4pm62nhIKTi62doWcSvXeASAkECkjQOhvFnHFW101EI13ZjQPyF//Br3ey8B9/6l8/dOrBgyuvrd17anQwNzae7M03d4bPv/iqXLxw4Z4f/it/70c67Qy5cc4KKZQDj4ELjKSoasUE9R6CDd46SilSggAcEQEI+OHOMD41Wu7SzZs33pSQDw7KySTUzWwy5Yxk7YyS0ByNT526cPtwPwFOBIU4SuhSFJOXvv76Peful6trN25toIuilG/dvpRl64zxyfioLbpHBzNVlZHwjGIgCYkikePe1u35eNbUtfeUauzGEhnu7h5t7tbjSWm1Ed6nnQ7hITDLJDEKGEUpqCCi3Vsu6nJl0JmV9cr60rywFC03avv69TRpLSx1Fo4tHh1NNi5tCAIhpZODeVuQhbw3WO4+cHG1y8xsPJncfimOT9flBCRfXe0ko2Z6NLU5b6yfjGfWqGCDN74yXqM72jmKmZSMz7UOf1xVyyMJiN5bEmxwDgEokDcD7oiE3DWjUQTivUVEDAHQkwCEIAZPkRBC7tK8IUC4y1ZDYIT4gBjQaosUnfeUEu9dQJwWRZgVIYBHTwgGH6wznArEEFzgjKq6DhTzbqqN2T+YfvA7P7w+WP7aU18s1WxeNdpArRylFJy9K9dRxgRj1rvwZqU3Bh98sIyQAPAmopvcNdGRSESUorMhkbF3ftBKpXY20KYm5x946IWnX3SNuvb6liprwAA+OOOtJ1zmDz3ywUfOrIyGGzfeuNo96NaBmqL+xgufLqtd21QhOKcdCYgcBdK9wwMko5/7zX8Nv6ciLmqjjdLBhRDQOc8Z5UI450gAQJKkma4LR3UgQerR5lO//fp8/OjDF77vw3/hJ37mnwMB7x2SMJlPnr30BSCABAihnDMayUAxz89PZ/6LX759+qGzssWCh0BIIG+2N1LKkcTFsKkbbZyOMrp48ni1d+Ca7Y1n78TtxTyhJG8RxveuXzpxrDfZuHHtpSvtpcUsSg92poKn08keuSZeOvz9Qd7R5e6zd54Juhjvixsb146duG82m7cGLYisEnzlwqnp868fXxj81q/96kK77RvdimmnxaHQw73DIHJPuUw5J7SZl5JLbajgNBOGOd9OFup9Mj4qaD269rnfKEY73/vR4x8RYnww+/ZzB9/4pju085FqM9EN1qNXxdx66+5cv+71zk/+3Q+pMAoYAaKzVsaRNwpYK1jjjA+BUkx8bZRWMq3vWY82j+y8ppdeuxyj3Nq+3l3oxkRs7+/1Bx1bl9ODgkVyf2Pyq9u/RKwW0/Lj3/dnHzv1zt//9G9dvfONiBHvGwCkhBFg3gNSQIpIqLYuGNPqtNZXT730+iuqtgCADIuqTNKUUeqsvWvjI5QgEEQgiBY8oPcOwQNFJgRrGhUwGDDeekaZNZYgEkY9o9baSNI8y6xTqEIqiA+Naqq3n77nXe966PprR4/eH7gsJcus9SFYisAIgeApJYRwH4w3LqjmTJ+eXCC39i1jgkEI3nrvIBAIIDmNpDCNDsQxahlAJCTlLDgT5bEPwZpgvTHGM0YpQeIJY8RBMMY47yghAQINIaJhrRufX0g6YGKHCaCgtNVZpnH7aLhZV7PBoMsJU2WZtZOkFcsoh5AYy9GzYEzwcwgeq9Lpygjm0C6unLDoqPXz8ZHkwtWHNGkF9Ej6k1H0q5/f/dy1caUlIgY0hBEgCP9/ATQhaTEeh7rqthbmMx2cjdLUaG3cCEJQNXiv2j02n9TgPKIPVO3duR5nPRF3P/s7n/iX/+h7z5y888jF23/t1uHtg6Yaudoqo5TMImO8iHgAEpxhLDRFxZFSj6rYvDrdyjoZ0mCMcxSdcUzGGCDOMlWPg4XgCCPQFNrR8lxPZ+ut1zcaRoykTFL38Q+f+tB3Ltf1pBeiT/73a7/wlaHj0aQxIkoODpvy4Ka11WxSuxBsE7gQnjHCxckT7fvOLcpmDiy0YyiLcd6XC+08Y7op5/vGjse1jJOV1WO1rZugKZXISW3n1JXYqLnQxIooiZaX2vPSFV4uxHkvk4dHFRA0dfnQvSdefe0NP3LDw1KV5bmT3cPDw63rptdbakcRBDJRlhL0zLEo7eSnp6PdpLWkA8sXLlKi1GyDsERGTMTtWaV4SiMazcejZn7IJXilAgFvhbIsTpcbp/LOMdMkDjlybOoyompWz6XI89apyo3LUQXdlcFqdLi3Y3WwYDvdfpSgc7OFhX6jihCgu7KMnk+rUqIxQWedHKxoSs+Cmw3Hw9G0N4ipIKDpbB5WVjPkXmne662kae4UjWQbBbHOSG4dcxqbJJdEsIV278bB9ObeMM+7rbxrmlqkNOlkO7Npe7AUCcuExADAqCc+zlss67jGgCXDw21T1N1uJ2/Fezf2mpLHrOV1bTTrxozU84PN4fFzx8GrOKK0lYn4nB2GweD0fHowrHatMsEyyRJjlIypM81of7Mo9oLmg+XznfaxcVHaiLjGREnMI6SeHR6+WVBBKMEAksuA6AGrsgGKHjznvFYmz1pOK+OsbjSXvNvJnHUYUDVK62YyrybzaZa0KBdGK2S0v9BljEZCFLOaSuG8K4syy1pJkmhbScYJoUhEO2nZoCi4hahe7qpja8lgOaJyLMTcuzqEQJS1pKBggwvF1M3HJnhFeXB2Csgn+5PSibKhWhloYFrB5RvWWR68x7sOahcopXEkgwdv/OF0VtdN8JZxJiOW5vHyUndpodtKM5lIykSayDSOnPZJLONcVnOVDpIoixCJ0fre86fWlnvXb25v3NqxTs4mVQhOEFqV8Oxr43/0E5//f//ls+94ANo9pGjBlHdRYtUU6hl0Inj03uxzT5eFJu1OJ5JxXVXTWTkaTgkBay0XLMuyhcVur9Pqdto8ElES6doAYwBUMMY4qbltJwJ98MqSiFAZcc698cGZQBkDCqImDKWMZ3UdCQTegGziiBtlshbhkiEgI1A2VsYCnafEjYsZRR+sF0KuLi/XSnuvhIi6mZrrcjqdRgxAW6/8ctau5+qpF19SJRHIHVpjDBL05s3K7UAg3O1ZwpBkMpFkqZ+tLPTavXYwwAgFpFxGVdG40Jw+vmB8pJSVKSeEBxOM8ze29wXlTbVJAgTwURS1s7ZyVjk3q9VSr2sbVWqlrA0B06hduEJKwkRCgdbWAiEK0VpNCXMuSEkFl7pWHolvrIxi5XRgIc9Sq81Ro4L3nIqicujenIJpYbaGxfPPXb5xc+NtcWKG+3Z/eP50p/YHf/3/9dE/+MVfO9iYXXt5n0fq+Bn65d//+g/+je9rxbT0tTVkqiYLi7kn1vpSF8Y2SgPKKDZNgdSWqslk8sQ7Ln7lC0998L2PJIO8MYoF01pukVxcemm6c3Uz66XBGs/Fhcfe+bmf+eJ0NNzrkeMr5z73i5d/9B+uvv2Rtae/PV899rabn3lx/069utw52qtkR3aiuEeY1Q2NI7B8vXe6mR5e3dz+6he+cW69/YM/8M7lWL28OXtxZ+Rd+KNPv9zO65Nn+7PGyL2ba/e8VUmohqHyfDwcJ2Fw+v53VA7KWoNhvfaa1V3J2yzq1rN9mUVxt/PcCwdH++2Nb11v8/D2E0kn9bRjVk8vfPHTf9Q/fer4ydX24BhNzkV08NJT37y9M/3ej77fVfWZi/dceOLclTfsV37vpXR9dnT7jRe3n59Xgvr0+/70d/3WH35uYeHEb372ejFpCKPGoZSsqq3nIAWxtUoYVUWZJIIFGyd57eqiHC+eXJxMTFNrAhCndLI3Fzj9S3/3Q//5Fz7NgOqRw+BlSrXHqtYceFloVVfOWM9JHMvZrBkdjoSgrlIueCcZoei88eDrxuRJ+8/9wIf+2y/+l7Pnjg3Hcwj04Gj+gQ889G/+2Q9tvnzjkQfvydjC5dcv/eX/+Sf3a5VEWbubT4qmLlWSxpPp3Fvb7mZREmlkd7YmujZSUJ4wBESPAFSiYDk3lkYDXhSXHzzl/pf/8gP5yfVWd0VDAGgkEmtJYF0P/P4P/Lnsk7/6zeePRJqYxsYUjQu1MlkSpUlUzmaA6GyD4IxRWoUkFd1u1Chbz5wOfmnQqaK6rBpbjJW2i/1WFOFoOA9KhABOG13MLp5dvPfRh158/TYATKdzDGCMpQQJEehrKjhPY9bUZVUNt7evfOONJJ/kzAHl26NyFvEz3/22vUu3W0aZ7euERlG87oMnjHEmy3La7nS9c0jQWijmI5lKw6TniaQJRZrGiGCRM2O0b2wgWhBfNbWqCkolEgGBBMBgoC61DQBBGz3jNMYQEE2SR9bYWpcQ0sBYYyZxK++HFXBQNYWQjEcxAHHWJXGXEFNXc05wPt9N8wVXNeg4RhEXsa8aXWtCgFIWUVY1My8ZZcQjNkZxkVOmALxgIo7leDxkhBljrde99kJZa1vNgMoojnTT6HIMsqnqOWftlYXTtp6W9SjPU9qnSpVxJI+fkpL6k2vzraNZ0u49/81vRxHNuh0UIhus+LGZDG9NRgUNw7Onz+5tHZXjGcZhNB3yOO/1Ra0NBE2bWSx8+3Tb6pPXL11ptbKlJZm1U7p3RMBPpmqvCpWjgZCAqCp7/frRATuIJXUaCCUaQmPhs69PjnVxgYaegD/x7pXNUXj1mwfKBwgYPESSaxO2itnRrDSNkxHjCCBZQHAhOGe9Q0oIZYRQlMhsVb/v3at+usUIkNoCI84FUiENBrgPTaEOHedwbH29mU8oQeMcAbDGc0EgBALBGqcxVEVJhJiWuilNl2aOpYeV+pG/838ej+M4S9cXbh409a2r26qYrV5Y3h4dHQxfft+jkxPrS/vFKKK0qmxtAiVBMoLInAmS8qgd1aUCQmlMlFIQPCGUMhIAA8BSxu87sRBcZWn15i44PIwYk4IQXxLLXr+0FyWJbsxo/My0qdaPn1xdHwCKmY6t6z320F+LpDkcT+P+wyxQNy8WWylS0Ga+2OtTmYxncwhw9vSqCqqp6zvN8P6103a/iaynrF5aEO9+fE2NZ3v7E+tiB7FIPEtJHKCzPpCMWqOoYGAsYRBlXClTKx8wu7NVyZhpb8eHYwDfVMY7t7La7nREFkyWZ7DUOSyLw3F1bG1hqZctLvWd1m0GqI0UUaMNBu2siSOBzksaWnmEEEXtzq2bd5y2YJxGny/lncTjXC30F/YqjYQ5ZQCRckEI8V4b1VSlSluxMyEEAAgeAgkAIYTgnQfCOIEQ7gJeQ3AhABIKCOARCCI1PoD3/s2YDEFC7nJdCWGEgAsWnfM+EEIcIMDd4sNwlwjincdgEcB7kJwbqwgF5wNBFiB87ZufZ6gI4ZxkgSBAYIxguOt6piHcpXgQwYm2hiChSI0Pxt61K/nggzXWGIUAiBI8Uk4c2HJaZXH+4T/5l97ytiee+tZL11598gufffJwd7dsagAgnHprGSfeBx+CMWp399J2otOs9cCDF/cPZq99+9JkenB0cCcQg8TD3coD9MGTEDwS4oMbjyfI7zYNEGvcXdWEUrTOEcKCB0JA1QWlmKZ5mgVG/Gyy184ylvDXXn3lhReepQSCJUDQGvumGhb83WAgEtJU4fLVW0ct1U3D157e/OgeOdnOSVx5ElEi7n49i8SJU2e37uxElKL2nPJ0ZbGZVWpeLA1WX3jq22dPL49FcuLUiTu39hbOrjRBFBPTXeCPvPeJjZde6C2vv/+Bxdn1Yrw9aR3rVvV49/JGd+14a7B6/9vuv/nG5mK7Y5tJpw9zDRxEP195avvGiXvWJpXqRmppIMrR/tzD/GBHJNHS+lsmB5chlivH7js4mlGlaTUab271s+zJz37lIw8+1itMcWPv5rerE2+50F5pg2v6q9HiMf4DHydPP7X/C58a3pyBD4gMaMRkIgmpnR8uHJ8HORZppApDgTFT0TBy0BiPhKJHScAHPeGB7F8eX7s0D3opUPaNb76YRNQHT3ncE3JohqsL58rm4EBNP/Q3f4yb3jO//Sutdn/z+vUzg9b3fsf7PvO5X+DCQQBKCDCOiM4oSZngrLAWnPfo23nCWXTxxPEr198oag0eGAqkpK4rZ3QIQAIhhGhlGEPOuDYawFNKKWMQAgZw1kMAqy2PmBBCCGYZxeCtD5QEYAxIsE05KgrvFGOiv9r523/zr7711PpP/uufAHN4/4qQvZiA5SitQyCOEhKQhGBCAIpEa80ZrC2yt9/XHpVHHnhCUfLgkASPziN3KKyOYzJYbd/7wPHliO8dTG7fPmhKk7bFZKYaE6zzBQELQREwxlqgzjlCwXnrbYgY6UnRzcXD9x471YuEnyXcucooE7onTyoq2zTTsxHYoW+KfHEpStJY8KZo0FZ51NXVnHpXFBNGgUvGKNXlNOYs6/Ytgp/tkMhQ4RmQ0XSnASqXlr9+5eBLl4Y1yADKOeudp5wRYOGPjRRvnorqspAJQ3AeVJwNVHPovaEYdDm1BuK0BR7reS0Tgj40CoKzHomhVIH54jOf+ot/ZdnO34ii4S/9u3f9/Z+49NorR0REPBHWOe89EsYotXUJjDBGvfXBA48TBNBam3lNJU+6bVUF63xwoZrXEAAcemDgvA3q7/3N+9564nbSgla07nzdTnMD6FxD7PVWzoj2b3kk+vmvT2ngjFOZRO1O72hr0zlXWYhlLGKHAQeDNuH8wko7QUMitF7PjopZUxkqz5597Oj200meHxzNKZOtTjZtLAjSH/Saoij08MSxM7apgvWMBIPKmWK4tdNoWigXxbScKlXpft5eCZRV+pGLJ5N25/mXbjVav3DzWiRaSch6rKWbAMxHjOSRnZR7S/1BIGidJ1wVRwfBHErpi9GNuNOPeD45LEgkwDLngneWMGqUohQkS2aWtgZnyvJQVwWPiGcwVmOWRJ3kfDW5TQh3DVhZVVXVyhc94SwCFs3zXmvj2n7KYoJEN42pG10HxpLgQiKBO61MTdGrauKsVk2lqsYqs7i43Ovzne2DNI54kqwvL21s3wYATiLqpNIhybLAZsNih/koENBNZUniA231en0Xd/qrVlBGoxMri1dvXGn1T7R5XBU1EDqtVMy5E7RxGANDhcxz5KHVyrHmZWnno6nWSHhqbav0TX8tzbAjpO0uLCCo6dZhHGd5q/+VLz+zeXt7vxovDzrf/57vcEZFUQLA4rSrbYECPDQyJa1FZtRVJhZlp6O0Hu2WnBMgfD6dAvI/DqAxa4LSxhOw4CdNubq4ICk7mhXGaQE8YICAcRwzLhAppYRRNMaNZ/NAod3K0iQmhJJcIiI4F0kB3i4ttIw2zodeO1NGISgGVoqI04Q5ypvZYj594N6FxX6VLyIKFaACaxA9grVeE6YhlKoIe7fJsy/r16+P7z/DL94fIchypqwWRa0pS6qpn8/jLz5rv/qyd0Qw9JxT4x1B4j0YD9Z6CyYYYIQkUq702g8/fL7dabcyEUcyeAROqsrFEWcEA8MsjeM4Spko52UW8TzLDocTLyJI7NKgO5sWo/FMMlo1hjIWSW4tXN0JP/Zvb33w8c5f+P6lC2cpN9sOGgzMlU0wrpzD+IAnXFoZT6bVjNSTyaRxAX1gAWQkTp1Y7fV7WRYTpIwxpBSBUEYZ54iEUYYkpLHIk9hbZ5AAo54QBM8k6aXdoqzBYyoi5wNlpJelgrN6ZvI4ydJoBgWhlBDifUCK3U406HV0Wc4m48iF/mJXZIl3xGMgFAmypnDz4cF9Z8Oj399aGiSHO8XGRvPSC5vPPPf69c2ZF8I3mlBPCIYABAm5S3ADzxkRgnVaSXfQSiI56OeCRlLGrV5eV1W72+JRPJ+XDvTSIK8KLwXzGGY1xIw3jdqbzpwlri7baZTlUd0YIKxSGggio5VxhDDr/WBh8WhS7B+NGaFAuHUEkZTOGNsMWp1Bt6OVMc4JQRhn1jIghPWjujIyEMaRBa9dsOCtAwSoi8b/cTHn6Ki8cOqMpOSt9z4wvnPQXll91wce2Xj2+ecv374PUy7z8/cvXbl2Z3HAqlpfvjI6uunX71vLo4pkiS0nmzffCNTFMQMgnDLdFKnkk3HhQtVbXuQhXTmffyBHymbzkRJt3j2WzI4OylI452d7s+0rd3JBWu3+/o1ZebOSnWx3v7mzO3vw4r1/8N9u2mj7zLvfn66vvuXD5y997YtTrdoLKTCsah+4fOCeR29s77K4/8zVsh0VH/ro2dGnL93ePvipf/cbjz9w4pH7H5rCpdnw8NKru2nLLy5kDz584ukvPbfWP3Pyve893C13Dspz73lkcn1nb7LTWVxuxUyXppw2UuaqEh6dR9/MZy8+9ezNzdDUtrFgCPnajvar8i9+6K2vPXO51z52z/3vvLJ1+yN/5gNXnr/+5c//Tr5yYv38xabMRiVx/NTlN0xRJh/7cx8PrLz14h+5tHnk+x659fTRr3/q662FzoP3Lh1M5pdfmewdKudJu5NpX556bEC5k4QVu0e95Xan1b31xu58Njp3cu32Te7DkABjXNajChoQnB6Mdt/7no99+atPj480yfvf84F3/tuf/o/dbCmPZVPZyWjqrAHEqlAElA8OCWGCMkKNRcZ9XSjOMG2JRx48P9qdffnTn1vpLU/GZT2rBITvfsvxv/4D7x5fO7p44oyqo//jX/6nr73wnPJ4bP34ZFrPZ41WJoSQZ0nayry33U7raDSWUR4cizlXjXFah0D2J7NOJ1ZlE4DbWnfbd37uV963dPGsgEUPXYQUoCZQaigtUxSkhcOjo82awrR0aBsEUI2FgEkUGR8mZe2dN0ZlSSYYs873+0tlNUHmmEWjTBonzmGjvWos5cR6GE0KRkBEGDFsKosWhITv+s7Hb20evCkbEBoAgrGqqY02EEDKdGVxKajhj/3oD3zqFz6/cempj/75759uHwbeTGeat/JJxZbWlvVodLAzai2niNCUmnMI6DwiIMo4mc9mlDNGhbG1t4Qho1R4UxNi4zgdTYetpC+jhdF03zHLOEceE85twEAIl9IZR4B4p9O0rYxyTR1x4YynMVIRW9FQSvO0XYx3tTceKUtYnkblvFJ100paqil9sCIJ1iv0JIpTKaUxFTFuPppErVXmPYBLEqlqp5raO6+05ixmjJvAZw3rdpfq+YGzblpMo3YWMWqUMXNvvQVQQjLrAwkuThOlTMZllvIQQCSibsaj0V4IK2l3wJlEhODrumlkJ1rLcw9haTGZzMvVE2dq72ujO8fX8jbd29t1Tr2xsT21Jk5j8Jq007fe+9bXn316NCsaQ6/f3FkfdCs/Ymmn21lz9USmLR1sK09MU9xzIhP75tZQW+ciTiIBOUMffDvlpdLBe86Z9SFQttuwQw+Ewzc+t8d8wEAjCgiojCcAgWAacUopRMRBcC5YZYXgEMC7QBgG7+HunqWWt8h+UeqGuUMb2dCEUixK0gNMINhS7UNxJD//dP6//9zWWC8imwpGjDaMEUIJEoIuUA7OB2QMCVrriWAiTQytyrktSnsL1fpy+tvffvpjP/iRa/t7KyfXrm9vWUvOPrD+zM1v1NoQAKN9cIAkIKHrS0vjojCq6uTZvNacEmTCBRdcIJQoY2LkXEZRIkmjWsDvvLJ15c6VN6VkMLN5CQyKck4DL7xoLy8/dP7czWsb1a3D8SRQ3mCo292Fw50NnraNJIaGeeEwYIrSsWxc7WaRAI8BJHDljG2cPX5s2VvGjvaaoyO3MzyWuPsf71BRdnujAlx3imxwYljWsa+SzMc0qgLXpiKNBWv7rXY759bp0WRSlK6xTBKPpeG80sZSinEcRVGPkFDXtpOIrBt1uyfGpbp1bT9vJ4v9WPIwm1QCoFKOixi8uPTyZjMvV9utsmwIowvL7brSJuhUMAJIiUCuj68nH3x7//ZLt+7s7cQseJZyJpuqpgQpgIyzRx/50xzZV5/6JGE2WEsI8d67EIJzAEFwwZH64LyzwSOlNATrvUeEgIEEQAYUAhK0/i7PA0II4L3xXlAumNTOeeeQkBC8955xBCAQAhIaABkSggTAp1FMghMyj+N4MptZZ3utiHvjHRFR3On1j46GWtd3s5OMUcaktYZz6bz31gUHPGYMqdfeEq+NRXAhWE4ZAPgQKDqtG+eJsT6L48qp3/z8p1W6qoxvyslkNDbBdxd789GsqMqmVpQCITRLuHNmur/xzHjYWT25Z3zCo9XTZ69/6VIgxnntlAkQ7ibzvAsAxNpACCjrKRAC3lmHBAIgAQQAQOKdBXCIhAvmXDOfa0oJRdLUymhDCEUIlHCK4GlwzjkXKCV3iSYQIASkjAbAqlA3R7cmXbk2WP3c1+786PkL6KogHGD0prduUt7euB630oXB0sat2/VUz/YOW70lubZwMKseeNfb3nj1uXb3WF/5wepqlOfDw+mJt5waTStHQnew9PTrlz/6zodGV3cXz7S2xzezXkRzbC0du1WfGHS/Z/Gh4vY3fh2G8/XTi8X2QXHzWrF/cPxYsr2/3Rksi/Fk5/bNlWWpTN0aUC06e/OCyPXasaDbmooqzEHZ1vp9LZMurWfP/t6Xrj//WgC7cmGweM8ySutrX86c9okQxUMP8R9S6f/1SaUUdSFY5wPBLPMf+1P3dwZ7Ipojpp4JFiVe7zfTPZ73iGCuHkMINJbNtALamtfdo6IIVBDwmqIPNk1T6mBUjBmSO1dv65n92P/8N/6v//SJt7zngxqh3Btlg+5v//6vP/mNL13b2pAMbGMwIAFGEHjQJ7s4qtzMgPeUEKI9LSez3/7857TzCCwEqxqNiFIyIYWzHgAhBEYJY4xT4ZxFgICeErTGBILWOASgnDNGCRIEaryjhGqnrQuIEDwSgB//8PsI+s889a0n3vvE5M71n/7spw+K5vRClxFeTuZJi3NJIID1PqBHxBCsMYYSigBAApL6/uPk+h28vNUwQiNnH31keamfN1rsbB4RC3FLnL73+OpCMt09uHqwhRYEF0a7fjuRHOOY7Q6rg7nR2vOUeWsQfLfD22lr0JVn13q8NrEISawjtFnGaKBWW4JhdLArFk45iKwjISRRmqAUPoBVPkkyW9amHopIKmeZqBliKruU0ajX09Whq3aTjCg29cQWczOZqdrnOLjnpz698eLVvdIjpTo4TRAJI3dT1YTg/3AqigUzYI1zxeSg2z3pPAVvAVwApExAIK3u6ts/9Cc+87u/4/Ump5QKoQtVT8Ycybe+/dzjPxI3iWjLXpRPf+rvXfyH/+q1p16vUpnZANY4ggGIBQJlWSVxFJBQJCKJXNkIBmJhUdVWuSgwEsdxOdv1zjAKQAgQoASsVksr0GqbqtKjurZ2XpQFBOGNFXkyn89E8EuLp8DtKW8lI2qmd+Z3grUUgScZTdJj/bg3iPuD1nQy73ajYjpZ6LcGq/3DndF07tc7vTcuP99JCXM0WJqIhBi90us3TQGTQiJwEk8P9xB5r7uOoY5olLfao/0RpZ0sXyG0yDpLEKtOTldT0RXVcGcicvEd74TD0XjsF3/nM6+dPHHPYPm+YKfBKaUqzVwx3c7zntYlZTibHAVApY+oJzo0thgOusskDsobXbnaGMpEnHSr8nAyP0xjy9iasYGLjkCwruQ8Sf0UOYFAGFtAmmVZOJrcTGSuZ7g/2uhlXYnh8NYtacnBfiFPLLS7vTSTB+V+HNsTy739zVcAjFUlWADC61qLNDU03nrj+kMr9wY9TLIOR7M1upGPhOz1mENjQxz3kgQhHJJgOKckahHC9NwA8FZGGzVbTECK9GBstOTDqc3zNemFJMwCiZP02MmLzXgfYqwqrauSEmKMjiMWU6FJUE1d127rYM+xxcOjI8Pgvocu7Fz7stNU1bSuDUN5dWP0ygsvVjq65+I9y+Lk0cGW9k2ny2dl4S0Ezr1nutHI+Xyk7LwCg7Ojrxy/+N0ea52KTm99e/d2f+VEVbs3A2jWccbAAaeCAXdcDQQ/HN1ZyPrdpDuvai5S21jCWJJEs1k5rxpEX1cKCWlnaSaEdzpJYilYmmVau1lRO6uREXReRIiEMkpkngvCKRMZiTI2HnRunz9Pk84OJ5MAGh04q5wy6JsQmkBMo8zWdnHnBn3uRXj2DTIeezX0zlRJUht917QbnGrGE3HlJn/+xdLriIMjGKzz3oVAA6cMkSQJy7t5t5WsLw3OnTmdpzxLo/G07rZlsJ4hlWlsEmeMYoyncSI4TmeFMgYQq7IxxtWNCRiM8YyJk2eOuZs7xnnCaFnUiIESwgkxtfyDb7gnn7tz36n2Bx/u9AeNVo1X2WgMo0Zc3cHiqPFgS5ga78AhDRDHbO3Y8sr6SiQiQkicCIYEAQNiCCGOIgCfxlEs43E5c87XdcU5Q+KR8KIqMUAkhZGUUe/R5mk6npbahRBQGZW0ZF1VAXycJEkcF2VhnAmecsBiOkXwC4udOJXOh1ikk2HVNIWISNrOdw9un1mjP/iRkysnK9T24QeO7+/PT6/Xf/jkePOgqSoeIBBAAPTeIwGPQAhyIbM8imPZyhJKaZSkjMciSjuDnm1U1uvIWBBGAjrKY9UgArCAzpOgKhdML88sojZNwkiwNSecxMSV2M9SzwjRXmvPOW988Aa1ByajVp4zTlXTzMpydWVZq8qrcDSfJYIzjlEiOOd1URGCkoe0zfK847SqjU8pJYI3yhjjBKGEvqmhCRYPx5UD/cD9948OR6c5Pvn1L55aX7t436N3rsHqwrmXXrhSFCqRYENYa59cbK0fy5de3nglO4Yi7i90L3Jv5s2eCbSdtZ0t0oBD5ZuqrmQz1851WL4y4AxmR9VgsJD5nXqOszlWOsiQN3VTHxT+sOY13nv/6vWbE1WgAvrqq+O29wdH496pqPXQ2sFUL507sX/pMkZ+eDgrjGeLycr9D2Yn71m997Env/ENO3n5xmsbS1FEu6RsyPPXq29devaxxy888dHvf+2Zl975/g8+9fVvblwejabLn/qtr799ezIZ1qa11uxvbl+/tH7h7Nve9S7dGF0U+dIpITOtQiAGhTGhfPKpl+OIWmN45J1nR9Pw5LPXVhLei9ONN67e3J7H7f4n/vF/qubi7P0P3v8976Depp3WN77y5C//h19cP94/c/7c2Z546amX9KR57cWbb3/3dx+l6/e9+93v/J6Hv/QL/+rYQrbX2j3/4IPLx4+32sGom/0zHdmXkKTt+Ozi4MJS+8xkzq7d2Wpgp9m58oVP/bqxsLB08vXnLrdTurzc6S/En/v9L6/mAz05fH1j887exccefcfe3r6Uca2G3gatPRNUcGKU9d4TQrQxnEBgvt3NeSCxiOaVHu0Op7NJ3hKHe9PRpHrHW0//yA+//zu/6x1es8Fa+nu/8jv/+F/85u7IdLrSc17WxlPMO5mf1p5CAN7UKkkFBZLIpDVYXl1ZfvXZbzEmHrzvxCuvXV87t9KEJhq0Y5Lt3Hn1J//DDyxcqBnEBpYAuiRIAo1Fx2HEobKwK2Bko9nrVze6Wb8M3lhSlDqSwjknpHDeUUJlmvtAtPeIMG9G1mnJY8lpImIXMGZsHrQUJNzFgVpXVXZlteeUguCOrR1vL7R/+/efdM2bB1OgpKkqKaWqai6YDcqZUAOUQbZ6x5WznVQmEvXSEm1LnLuYi3r76mS8rZsKnDONpgFaC23L42lVE6TGBCRAOHdNrZ1Wpuq0O7oqdfCcUaUaRJJEqfeuUhMeEYoRIghKYslLZZqm4lEeRZmuKobSWYDAKAMbXJLFk+m0280RmFbKOxOAGWukiOqy8sCCcc5pTTmgd9pI0fbITaUCYF1N0TvJhUWM80TR0lbeex8wOGM5j6QU2tS60UCSVFCOaFkgPPakxUSqdCM4ZO02FSIRaj4ZU8ob1cQiiaVE6xPJq7qq1DxO42MnToOjjcUAMo8zp6eMUK0bCjJ4deL4qUE/dqacTlREW84Q5v2x02tO56++ev3eRx7c3d+J05PV2HzlSy+cWOytsO6xY7y/mm1duaNHdq5vryytnb7w0DMvvLS4tN5qd601MK/6cpyvJ7PazbVZ6uatJJJpymkQbwwPSmUCeBc4I8F4CEFpYFRQDgSsFNS74AEBg7EuEYwC0c6iQO5BJpFzjnMWRQQgGOOpIATRKDOq7QtvlOsJm1X2TBdkC3yjHIPSqFhAO+Zffqbzv/zH0sEio412zgaPgVDOjHMcqXXeucA58cE7YxEJobC3vxcQpeAs4oez2mzPOr3uq69c5Vk0MqZx6Jy5szn0OhCBTWMhIJCAFGXEjNKLKye++2MfHY/07qVr1668XocpBh1hMLVmyACZQ5jMZ//TR99x7wP3Pv/US3/mb/3wP/u1bwAAj1uDUwuy1ymH1WxULrb766v9re0raSe+5/7z3lkEMxsPdXPIWO2NQS+yTg/KqZQiT/PDWnXijqSdpjicTYdNU5fKKyrZRPlQx8JnTMV9c3EBcz6nBF+8emjJGd5kw6ZZ6stOq5v3k6b2lSf14YGxDXLiYEYdosdeK2agbx/VIm8zoSutvC0GabawkNbNLG0lZRNMHablQbcXMRcW2wmhkJCgq/L0etdYh44gI+Cxk3SCsa4qQZJURqvL6/uzbeeD072qDsOdUSuhF0/Rhx4gj589fjTBP3hy65svmabhyCPkzHlXFM0rr3/VW4M8eOORoPfhroaEBHzwzjtCCKPogMCbFSqEMnK3uxa8t9Yp7zildwFqd300zjsAb8CgBwyeUQYEvXeIeLeAkhBqvCOUIuMYAhK8W4ZtrTKVEYKlLJoXFZdERBIDHA4Pq6rWSnsASimEYL2hlDTacMJ5xCWButEObPBe60YIwQjlLGmaChAACaHEOEso44IA9xBsuXv7N3/2X/npvNVhSd5CTCjPKHcUpkgIUgBAba1kpNaVQ6L2bh3u7La6/boaG1MH0BBsgMAYDQ5dCPRuhyQAYiAcEawPSBgBCOgBIITg0YUQIIQAHAmitx7BeWctBkKJtZoxARACgEfi7kJN7tK2ABARMQCSu9R0DCQEKBu3N8Rf/dWvv+3h9qOPRzSdeLRvGimquqnKJX58NBsuL60N3eTUqeOvvHRVtpOGLQQGSd5KFxLtFNO0JZZvFvX+/vjWzvS+nfHq6QcfP/+QkpO4t65VcfPWdPra7rkLnaNbt169/PI9Fx+SC73+iVPDaRHlC3N1ZWn1eLTT9KU7uLPpu8W4bqbN9M9+/4/83z/5L79Tu1Y/4mketS5IS0abV/cvbW7fuk4Ctjoti9Err+7LphIsirPire88cePwspm1KY950nIsqQyQ4O6/J7l4ZvrShtGICZfGmTxrfewD77768r+/7/EFoAEdoDVQj2BqUSB4Sj06rRRWzsVE9j/z5D7yhDpdlLUH0eFJO8+6vYXd0RVJSL4MSR8+/6Vf+2f/9u+87ZF3Zg1+7/u/NxWBCX5z+06WSGcMJxysQ6UGffOnfqj38e9/58d/6DdY6HhGrIPhuBYMnfOMS0q9D8S7ABAAoapqwYVMUxIcDaTSdVnPQwiUUUo4o9SHQCEQSr1zgB48CoaEuG4vLadzQZEjAec8pUaS33/+ycfPn/zAB976nife8vM/+2u3NvatNRtVs9bmJ9ZTSry1JQTuvUZCwXlCET0QiiziSjmg4fgSfe9DvWExi5jMGF9JyGrmIfInFhe8QscZYlMN68OjubUMKAKhhGISJ712EhPbkdHCvOmLYlqZiNBz6/3zZ5eTiKtqLoP2DNC7TAgEl6cySWRVNcY0wc6Zq7AquC5YxPIso5wHqzgapGCtp1xCUMT7mEWd5ZO0sxwMMFY5labxui/GCpNx0Rw487lXbhUh2Zzeubw5Ak0wWGuD84FQQhAhIN798P97KmoahSR4qwXhBEophWoqwOCJl5QGH/aGsPjAj/2Dd/zln/2n7ytGe828JEjjlOqp+tbT5btO1fec7Ddpy+r9tcXpJ/71Q3/1f3vmuSsVw9ghEQx88N66lbWTXoX9iUba4GSGSHRlubRSJmDLcjLzgiHY4K1DwqlAIADYa/XyZHX7yqtpb9kDIyKrCx6oy1vQhDpmALU/vFUkIjYoQ3CqMVqHXr+dt9uPvfe7hJ/vXnpyqRuq8mipk1Mauu3cVXZ/Z5RJcm6tQykunjj+xvXXAm2vHT89HhcWfFVUqq6khE4/c9Z7DwHo3tF2LL0AWqKWPE7jzkuv3KZ++iM/8l1H4ymFCnTFEt9td25dvTVYEV1D+h3/D3/0eybTfDTc5JLl7VTGbDbb1HXFSEvyVPESCRw7cc+1axvoVb/bzbuLwcxrVcr0BELkXXDGBvSN1aWeM5J0cukBR7NpP+fBu1qNmZ9BA4TW0gvlZ565ONJp7JmPTnXvqd1sOK8JOiFIlLTbC8dEHCo9iSIVc3ft0hdY8DLl7V4ym4zr0gdOy8LubM+bwudiUB7sB2OiAVnqtIOGuNcCE4hXVs8RfNNM8owvLhzbmxIhe1k7Gx8dDWeHnU6/mTZJv3P8dK82piznEOrDva2FbptY0uxNX7++sbu17Sh2Ov1ja8vWNkyI0e6cMaAyMWA86FbWqZRUdXHxwunZ6KYNzerpi2CToYL149357PoDjyxkLalms3FTri9lN29eP0FWiAzWOGcthaTXOl1qo1yBAoyaFJPdN174JMU4yhbmUxZI7LyP4+TuFHBKZMKjiNeVsrrqJpG1uttZDB6c1hElSRRrEpRuZpOirOsQoDEOgEvBM5m3Y9ZpDzrdVnDm4OBoOqssorM22MClJCyOoxRIrEAOZ6qoDGl2Ly6+/NhbIW6HANyoubM2eGZNjZR503hbH+1P79zmX3zKf+XFZjRFaxna8GQxi5L20iKAB0rkcIb7R+TVq/ravp1rFoJnNCBFBIziqN1pSaSDTn7+3PrF+045pWQkvMMojwklSRJTAt4YymRjTK1NJiPngnU2ACJFtEQK5kLwPkDwrVaGhHggXFULg05d1sGVUcSdc01jKXpKKPGqrt1L1+ev3tDK6ywSnCaTEjUYCIAYC068USLioFyWxstr/aWlxayTewPIiHdYWysEjdNEUOa9zZO0Uqo2xoPnhBLGsrylq5rIqLbW1goCTOdNFHHjyNFcWe2ss0kSI0MeCSllXVfKGA8BA6RRxDm12hhA64Nr3LTQwYf9o826apgktGb7k7q27tjxzurxXNXDiGQebKvXvOtDSfd4Zzif/94f6cAookMPQO5qGiSORZTJKIqkZN4Fo/10XFkDWZeFad1JorqpjdJxJHt5uzI2kYlsU+5tVamza/3aVc55DJWkTEjB4rg0ulRaQtQWjMbMc8eJ0N738tQHYK1WY00SJ05pZCLpSa+Nb3zVGCkiKmjT2GamCGhGkJLg0EWSFUXtlOJU9JK0UXWaCE98nqbavBlKTuJ066Au6+bo4IW1bGHr+e0XX7tx/K88fOHE+aVD3L38tCD1w287+cIzr1iLJBNPfuub2Hvn4mJvMikJSBmlppzUZUO5cKp23kzNSEoKLHHBJd1O44wdzUo1L6pON+9CmBvMTp0/pvVwf3f+xFsuXPvqKzjVo515lmRPvOXk089tzID2W/Tk2tLGH95oLz167MJDR8/fOpwFg6h9EJL5ujmcjH/9l//r+dOnnnvu5Xe+5S2XXhvLFD7ynkf+4PPPDto5eLa3f7T1xo1P37lNsX31av19f/Iv6Hpy/doLu6MXto5uxZ32l7/4R9/z3Y+ePLMKshnVO2magakom0cCCRST+dF4uPH0Fz43nYUPf/zxz/zmi7PSK+1iTurG/dpnX/a16uek1zP3dhYXVs+evv9dEMKg277z0pOf/8Wnnr/66hPff15N/W/+zpc+9L53C8ZRsA/9ie++/76Hb27evLlbfvsrG09/5cbq2ezdH3hQLD3w+S99/X0fXnzsnujGjVsBB/uH4fx3fEDvRztVPKps3jt7cGdye2Pj+z/yyLe/Ut/aE8s9ec/FFuhZ2o5bq+LeOH/5jesXzy7Z6fStD937u7e3p2aunaKMJin3CEgoUhtHMpLRdFbUyhJKXc4XVhZUWWcxOTw8mA+HpYGTC9EnfvGfnjt9fnFxIRB48ulv/+zf/vmvvbghWJT2I4eQJXQ+U0CRSfTOSSqqompMhSR3emaMK6qtva3baSyE4MPRZGWh+8i9504eW/6933zpaLJ7b1ufuV9WxNeBCOii6SERwBTC1c2X/u1iFnXPngNofftbR7c3IWp5zqh1IU4ixlhw4HUIQIx1gnrOaKXqRNJGzbv9bLW/cunVa2mWzMqiVIVqFBASC1mVKkl5xJiZNZGFtZWFcxfOv7KxUTrC//hh5JyVnGMAITgjKGikG1sR8cT7f+DmG1uT/aMHz99f7M0LHneSNOe2T+3Rjcnh9k5rtSeiPF9ZMZIE4jkjEaMUaCwS6y3nSStqjyabedIK3qapVI0XnDNsW+8YcZTTGkIUdVVV2KYM6BqtKPJECgu+MbULDbjgAVzAKMkoEqV1p9MJVhHBJItNXRttKAaGgJ4RQhxzSZJab5HSNO9UdU0QO+2O8845pyoFEVKeT4uxBNTOq6YSJApAIKC1mhDiAlIESfzs6Mh5K6QkjMZx20LgLHjARhkC0BoM7joeGCfauMIYp7z3TqsmllwXjdMhWuqWuhlOi1iQfrttbDqdlzzOAcK1q6+cPPfgdLbtJkMPpL/Wu3NnNxUkT7vPfutbldVvf+c9j554Ymlwuyv05Vdfa9QE5TxKmXP+2NLqbFps7e2eOHl6Ni3rwvZ7x5Dsaw+VNos9ejQtj6+1rYZAkXh37kRnWTWTqb59UNWBWO/imHFjvEcEAhCMvfvPTpxHG6DWOomix+8/f3njWmNAWx1zHkIABM6YtVor572LI2Yx+n++Yn/3GeJZkqnqnQ+kjz+ou7nJ+9y7xU/8ws6nrxVABGGlrhuEwIXwJgTngjXKWQLAGeOclIWWsaCIzsN99zx4e+vavKgN+E4vW1nM62lz59bt9ZPrRjWLg8VGld57z2xZ1ZxRrQNjiA5EsINEvPO975nO2izuffSHPvDC00/9we//Z2udN4FTzlOutWWCLLTY2+9fGax2H3rXg8+++srdKTh3zxmf5uOi7HQX01RZW9Zbl/Nq2F9b2tk9VC5YZ5xr9veOkjQLNojIz4f7YCrw4ajYJhhR4BRUwtl0biWjs1JZS+5s7tdNSDn1i+zB1aVsUKV93szIzoHQ0OezOUkxStGr0hSWil6vs3BrZ4cQHzE7bYYi6w56K7ePRnmeJO14roNFjLLEziy3rtjb7i+2FjpJ6dKDa9dFLqN2EphJ1lKlg9JNsKi03x/NI+B5ms3nVV3PtVFVXalAKaW3d7cmxc6FB0+vnT3ln7m+vpb/4EdWT50rFvq1xPQeGS+fF9f/0bfnRZcSpmtDaYgIqeZ7IQQZUeMxeAgEbAiAgIjBe++94AIArHUIgSIGFwCAcEIRrXWBACXogodAEEIAi54SDEgpUkIJQYLeeQxIABGJ9c6HgHd1dy4Ypc5Ya4I1BoOzpvYAnDP04AMUBh2QNJEenAueMsIJAaQQEDzGccRpoDyZV1WSsDiRVmlE7Ebtulac8HbcKqqZd55SSgn6u5Yn67RCiiGLY3SV6EoRiVnjG8PKgxlzOhBkjEG4mxrDSjnnwfm5n88F53W1p7UlIiAEJJRy8D4gUvD27o0MCUHEEN50a999ndw9vUEgznvAQCkFxAAhgEPCnPPeBy6BcX7Xe8QI8S7cpaoRgnAXI0kQ7r7agAREwCA5Y4jasnHT+vf/+Wv/fP09Z9po4U3xrD3ohGo+yNJmPMuWFk4+/MjedPTEd33gYHf76hvXDw+3ckkHKaMYjDJ3rlx3TVnOSu58VM/VrGTttp3ywfqZ7a2X1o6183EaHOU8euhC67//3//ogz/2l1KOWUfc2twgnA/nVba85Irt1qDV72R3xmx98ez1yzeeeNd3Djd3B9prW21ubR1ubfBc3Nk40jO3dvLUnWt7e9P5cDxps3Dq7OK977pYibC01jFVFSSBoBkktpEi6qcY1jqHL9XKIeEA1umbewe/9jtffvvb14yyPPKhMbpW5mDIEH0/ssCZiALVAZpQ0lefZ7/7FatYzsBEEeOMNHVDnDn0KqC2gNOjcUQlDc0nf/oT9Q8ePPf1p7oZBcCklczGdT1TjCEDJixdEM37v3vpr/7Pp2fbB4sdub+JjjlPAxI0xgZPCAlAEQERgAvmXZBchACqLmLKyqZBSggFABSCWmO1cSEAFYxgoBQYC4kIRDcEoJ3QlsX93VmSCI5mEhpn5J2dZjq7av3lz3z5mwmyWtVFpRpD9qZM1UFw78FRwSXlDKw2KnhHSMKoAAIi4c5pEuzZBXrfcb57oE4ud08NMvBzGZGAUHqjLDorppXd3i4ncwdU8JhzSZECo9BiZO1Yjyf0cGe/cRSVGbSTODF1M4mQNFXDkLTameSEySSKOQAQxtK4BeCmezfBAYdgy8ZySpASL0KyWrIuGQCNQjndytcXTlx44GCsFR/Yykuqqnpq43z/6Mqnv/75J1+5USuczOWsPERCgrXBmxAcIRTv3qMdEIIEGfj/kYCmvet12xhcU9gQlFXa2RAweAiNLiNmOIZf/A//05/6S3/z3sff/dXf+mSex04ZsJahf+PS9dXFJ2jUKPDzaZmC7izf+tn/z71/5seef+06FTICglbXPpBAAVjyQ3/xr7ryxebgi0+89dxzL44+/9RlWyvkEdKADAHZm0hqYwgIIuLJTH37j65++N0ngfnRvE6YiLit1URXrppa4nlt5X/+1M1pnVLqKCBS0VvMv++DH2Z634a9w8PbVpV6TlG77kInbeXL/ZNf+PxXWzntr7WaUM5nk7muKlvL0K50EYhptI154JwGtEfjYTtL8zydz20kmczyeVXpYX1ycaGehfnRaPPKxnPHv/bYd7wHeFb7GW0Tgs3CyVhAgR2yN77Wb3dkRJkgjKCzVdOMY15Lkdsalaq5bBFi6rJAf1ipWd4ijDfzieIklpxSoJPJlHNKAcCHfms1KDoeTlo9lqe8sWMbpiLOsJaASAJBTmxQo+GtNEpqfdQbrKY82z2Y8rgvabazOV48dkF0BwfbV1JhmDWm9v3Fjq3nqiyVVUV50G2vlXWdSXa0f3Dfmfu5tCvrC37fgi7PnTjeKEKqA2uRE5wUw8X+uvNU6bnRpYUkk9FCp7+ztSmFzHLeSbMAzAvfa0XBNTTGxUELbTU7qrxjVCkR/OFYjY+arTuHnMPJU2sL3eRwe89hKGaVUTYW7UqLSs/e9fb3ev1qnHb9DBzI7Vtb073tOIljFiiv24t8vjnr9NpOOzWvT66emuzs2HnDZSuwhBOroRyOtyIfVk4uALJqVlXFvjJAZDdmUZS8SUBL08QDzIpRFmdRLFgkxrMCKPE+aAMB6HxWN4314BCJt8gEjxMO1nZ7+dJSi6JLomS4d3A0mgVCQcj+4hIELOeaRllZ2Kt39jdvXt64fnM+ntR10ZHN+R8e+Gk8mQ6TzjEB3qhG6xlh1ARXluVs1Fy5Zp96Bl95g+wfcIcA1Gutdy184VvzQRb6eeQBbhz5o5kvGqI8J8wzAUkaccZWjq0OBr08TvM0Weh1Us7SOApS8kRGjAOQYlZxQSlip5uXWlEMDEknS0bTWQBHkeZJnCfgA4ymc6s1IaC0EhETkmAVWknSSmVVlspYSqlgxEPw3gGhPvgQgqcRE/HcABoy144wzygYpVnglIm0nUSM5K3W8fPHZocFqwwFSNOUALEERCwwBGN0CL42DWcUARbybjkvpYhCCHmaHI4mFDAwoqxplEYkEAJlwVqHhDZVFUfMKzs8mnnAgMARrTKU02DvFsESF6CcF9Y5CD7iwka4urpQFXpe2DTCd73tvKq3s/a6KsXocBq1E3C3B21932n+aaKNp9674AMSoJxyztNWwiNOARmhSRIJIREI5Ww4mdbGdU+td7KsbhQhTFI5LsoyuDzuQ7AxI+AUsT6JMtqhpTJT5Wbe9tp5RBswxBPMhdAhGO1q5ShhPkBpLWFsMpkTQHDegFd1HQnZKNcoW1tCKQsA4D1njHHKEpm1EldWwREbnNYNeC0RIKAq60De3AW9bnzjZlN5b+v6H/zLvyM62dOXbn3tKy/3OptPvOXtz77w/Gg+bvX86GhOmMwI+/zvf/ml57/+wz/68f6x5GB3E9Osqcve0kJZNfO6YAyLWZXE0gU5G6koddZrU1ZKOcKiqtBUcOvlbDpbXpYmFJPZhiNua29npbu81o2aWXVuNbs+pbPhaLPZWh/QP/r5f1WNNk4/8Z3v/PB3XvuW2b6+n2RpaoMibG824/ubp9fZ9PDV+x46aV1X7Y3eevHCJz/3nCAxkGCNz1v55t7ey7/8n1+58ciP/5N/eFZMHl5Y+8T/+VOd09WP/v0P8lDouWZZ+ulf/+W3P/7+ODbl7PDU6QvFZLq1dSnppG9cnh2M0tdfvc0YQURVa8kYk9RaFHEk+vixv/iO6zeqka8XpW8F+x/+yT/r993h7s6xLHvwwpmvffGlH//7fz1bO3bqEfneH3zfM5/7/O/+0u9MafuJ7/lgYosnu0J6aq/MNq8+twyu2Ay/+bXn7n90zdLJe9/9A6SWC4tLcx+WWwlD1AvnkpUfPCOOPvkzPz2a4Lsf6//zn/kXv/Rf/8MrV64+/I5zz/3R/mhft6MwOdp/4/XLYGrwBD1o58B5Lnhd1YyibizoIo0YS7gUwpXV1u1t29SqVnkGf/fvffTdjzz24P1viVsdqIvDNzb+t3/+n770wkalmqif+wAawDaBiBBHdHmtv3s054Iq6/qDDhR3TeBUo8+S2FndasX7O3M1N9YXX396cmL94+fXVivc/Kd//12jo28naw8DDtAnFIh1QBjZu3rrn/31o34P2PIRy1q712V3cW08n4L11oKQwmjnvDWNf+Shx3fH22CnEEyv1/LGWYvSmvnu3onl9sG8phiM1SLmqrLgSRzLWltt8ZF7jj80SPNe/h9/+3OdQZ5wXxv3x6YiShCNtVXVJHlMkQBje4V+1+LZPNx43wffA5anvc50pqC2dLKze+eG3p4mot9ZOj4j1OdL2jVq3vQijJLEVJW7azJAp1yDgcYytd6EEJATFqEqfVHN2mnuTVBBUUaVq6JEegfBB+89gUBJAIJAuJBUW4s+eB+ABhuC9WCNQeA2BAY+ToS3Ps/SaVVACM75SCTOO/ROK42EGq1DxJ2x2ti81dkbby6tdFbbK7duvSxY4hEok847yokPDQEWRbKpKkOtDSYEQoTgMivrGnVNkHIR+1B5H6SMAuXMx8aoqjhqd1ass5yyBiGOxHIrefqbT927dspnkdaESzBWh8AjIQIl6xfPv7F7I7gobSW7m7sOQpqK42cevr1xPeklF4+v3Ny8+exzT7f6eytnz7ZPr89e+1Yik/FRk8mkqas2D1aVzHZJVPdW07pszw78ep/xfTmeVJNh2U/Tem7SKO728hCADSd5GnX5PCHhoMHtcaOMBSTGewYQM+oxeOcFZ4Qwr4LxMCqb129vRExY50rrADwlMCnrQSvx1iN6gmisJ943Cm9PbSuPpyr5la/ir3yVIcTOBAoNoQseGscqUyqkxDpwjScQfHAB0WuvKcY0gHU8EgGC88Ej3RtuSykBCUfRGHXj2k6axp1BdzSaHB7u9Lq95f7K1t6tulIhoODCUUsoFkUtaRRzNj4avfcHP/TGjhv6sHHrBhcckQVEKtikMJKxh08tnerxz/3et/7W//Gxz116MknzN6eAqVYenT+3/Mq3Xp/vj/MkJqZh4A+2tojnEdCyVq4quzmPYlvNZmip9Jy4EEojREwl9d4aVWlnkdZRJDMaNWPlTeO12z7Sx/PBQppMpgfZiRPX7kyqas354nBW9KKut9YahbYGXs6PtgYtNi00KtMlfTuzN4avy7Tdj1rdNp+VbmtsCoCMRtJh3o1WlhYk6pia0BKayumoKEzd6/d11XCCYGEyrAgkwaqlbMnVt8p61uLYWl3eGRbKMmeiBx757u6CZyJ77/c8kEfN4w9MeNJQlgMmQdAz5/mf+pMn//3PHbrQCgaRIkVst9vlvLK2DgDBB+c9oRiCDx4hoHPOWEMJFSLC4L3WQAkXzN4FOXmoG8U5CQCBgQe0zgpKCEVGECkLDiijSNBaGwAQgSCjnAGCFIIiQrBMMuLABzBKC8mREO9DCI5zRgglljgH2jVGW87ZH2NfIY6SJEoY8+NpudKPIxYmjVVogKB1lngIDmpdheB9cNSBQ8+lQAyUUkGps8E2FQIQgsxSq1zM83bcu7X5qlPaO8cllxGnjFurnHMELVAfR7RqaiFowNBUJkAghEII1hpA9OAIAhJKCPXeIULwwTmPBEIIFDAAEEJCCAGAAAAES4BhYJQGigwpE4wgUVpZ5wAR7pJ0gw8+4B+LAdZ7DB7BAyEQbKOcdEEp/Maz8//6M8/8g//1fHflTQmZRklZlEfjac6YKY9eHBfnHv+u0dFocf3kcPsOumTrxm1F+bnzp3Qx3987fOht90zLYtCBF7/x7Y//jScO5q5S9tuXXv3ghx/ZeelaeTjF4He3NqNs8b4H7lk+dero1SvCu7IqWmli6kZN/Eq3213MNrfHh85+/M/+eb/9+nh4+PrLr+/9zh8tnV5kSWIbsXzs5CPvf5wROz8quksrr37mqbnjO+Nya77zyNuOT3e3stYcraYgaSiAQyQIJ1ZG5P3fc/Jbly9P52Q2rkJGp3P3xc/eON9KW9NR1A8q+P7yIO33rr42On8m0NQRO59ubyRp3snOffbzu14ulKoOXlsfYuDzYo4UXV0/es/9ebr0zReeoYnn3o92hj//058gjeostmsTmllDAZO8Bd5ppfOO/uHvWzvz7mQ8qjtkeSFfkr6qgDKOlBLb1JQJLiKtlDIOEQnhhPAkyebzsRC0KgtKCOXEOuedv/sjM84ZAXAqlW6pGz/xlhPnTxxXk0lVHJiqsconT5xeW+0fX2nfubn7zVf2X70xGRWVEzHxbL9QO8NaRKSuw85RU89FngKPEcAAIhUCvUV3F29ovXGMxiQwBNVu0+98tHvpcm0UVLOqlVJXNTYE4kA45Fw2DosKOBWBsSRJkowz8GkcdZJYSCEBz68u2qCqeeMCNaVKhESkSL2MZSqACQZcAHAH3nlNA3XWC+aM1YRT6ojX1jTzKB/0LrwnrDzEfRCCm2rbmImNpbQhyhY2X934jZ//j7cONw8Op8W0LuqggCIE7RxFNEYF9D44ACQASO42mIGQESIT9H90FaWJNB45cgVivN8kUjNJvHPoEQh4YpmbkO3P/O6//HSSZv3+oNYzpM6BFe04zOpnXrjyyCNN1mMtMbDWjocHiRz+u//9/J/9O5tHIyAcGMVA/PDgFjH2D375n/y1D63e84h+7N32z//AE//+E/UnfuGaMVZkqbfBOU8oZUiNUZwRoCHOYq3dbNqwVLcGkXJFQEe9muzC7nDlqQ3/ytZ8/yBllFIKRPtHHjrjaX3rymelV5ax9mI7P7nSlpJRtnEwSir8zLNfXkpy0cp3m9nicqrHdl4X3XbSzeh0VgQkC/18Oq9WVxen40Mg3AdijHYuNDUoxCb0Hn30O+qt/W9+86nAG5ryzz75ZcLs+//kB7e394sxQnBLJ9eOdq9pH049dGI6bapx2Vnsa6f2t7ZCKGhCBI+zThe4qEstZHZwNJpOpv1Otn3rciK6wUIQqq4PQKzErY7RlfbBaMuInNWeyaSudW2OLNRIbCZ6jHFtSu1nyk1FxLKkDwBRhgz15OhmhD6NWmm2wIShLNezg5jarBPXU8viduNTbeacSaM4x3Y1V5PhVIj24sLKYdmk9bDNFE+TtdWTe4fDmMeCU+frvJstHTs5GRcs4uh7uppLEgk3m+3fyUXUWV/zxKMOh2V5bLAYGtOLs+CNzKIszWYHV9NcLi4vnr1X7hxMnQnDUTGe2Zt7R9dvmZ5kayuZqky3v3jjZrm5M2kP0m5WXHr9ejrovvDS7fH4VYizEWPL6yJNaVNVrJsev/8cIfTg+vhwRLbeuLUcQYfHdTOPYhRxG0g3jb2uxrPpzDjTbS95D4Tz/urZ+XCyuz28OwWjyTxNIsYTIDRwvjecTudNnEXG2dl0ThkHCsZ4a2wSx0RGSEiWJSuDDgEDNuzsHBRFY63PBws1xkez5tr+QTmabO+NDsfj8dHhdHxIwXnv7ra6WqtndXu0O11Y4cXkkDurfaAU5rO6bMx8Xty5o775svijb1vXYGPV3dpLxrDx7NYYbkzQeW0DeiRgPSPQyUkS8cXF9vLCwuraaqvfIZ4445dX+1maRpJRxMOjsWB0ZWkwn5cAIISY13VlQqOc4KTfas2nZbfXi9NoPi2axthgnbPa+aJUSKh21hjXNE5IGVt/6sQal4KJ0Xg0dwRIINY75x3jzHqvVZCOeucFV4+eY4890j22JKuZ2bpTXx+za3fU8dVVSqP5vIqkkFKEANZ4SiBOYim4NtYDUEas84LJqlEmBB5JE4Ju7KQxIRDAkCaZBdcoMytrgoFz3liNlAbjaqOc89Y57ZwQ3PoQKHofsqw1mo2m06nHEMvYAOm1u7Hgdj4bjQtnrNWuqa5n8bJMNeiR1YzHcV3Vxf5R1VTRXUP4XQADDTwiaRLLOIqiiHMqhKCIcRRHMiIEueSJs1LSppzZhmZJQihOi5l30M5acRpVlY0joY3VygSnCIU0lqUpHaGBsZhm1MGoKLBB5wIScOgCYqGbg9msl7byWLTz1mwycaZZWR40VgWJ2tg0ji36NEuLSWUh1MZqo2HqTKNjBOKBUMriXHGsa2ONq6vy7hS8eukGoE376eadurW8+PLzT0eyWl+SVV18+guf5VkeeXv58tZCt8uilCD8m3/8L375t//7c0+9tvBgjgsh8lZweajquJXRQksqwUvOaBzHwdwIeq+/0p8cOJ8Il/EhMxWVcf/0zEIz3hX9dHxt/8i49v3nyCzfK6vZeLJ08sz3PfKe3/25n3NWryyJanb753/yJ/7Fbz0ULTx2beerpoZxWaHDuVFpO3398Oht73/niRZefua1/kL267/77CGJJ0HgvO60IhHCwnInGiT7h+WNV1/5qX/0v37vdz58jC79yI/+WElmT37lqY/+yfd86g//IKTM6L293W8urK0s9Ncn7oAnemld9paW5pg99NYLN6+8eOye1XG12zQ6ESgFjRgLxvd7Ioyh014/++B9jk52ji6b6k5+/0Mf+aG/SiaTwthvNTc0rlzZDVMtByou5tq6etBmbvTi7/zmF1Rwx1bX9q9vr7znOGeDqzdm1cHK7UvZmcdO5+n79yfiSM13x/vVUNWj4fZL39i4/ML9q/FHf+itjZkeX0v/4Cv/bazH5x44fvXZl8wUPvi99+weTLdvbjNG19aTWzdng25caa1qJyUwioIRrQxo75qqKZtG6wQgtvDB973to9/7voUTq/3TxzuddDZVT//Wl3/5Zz7xrTe2yzimqUxzzmJWzBrOMUo5ocgkbmxu16WNMoHIkIZYMNWYsm7OnFqtZpYH4m2Qkh0NJzwRpsw+9fsvd8P813/1rzn/X2HVA0yJn1LiArOAHIFuXZf7VX8/nLh5rSSRjUC4ULOIoqO6MSwBsMAZJRF024HR7I0rG+12LEM4mk2Xu9m5U2e++LXXeSIbG5QPQBnjXOSiqpS3SmtKkmTraDLZuvmWh06vnOjSlI83h+LNDkfwHpyzIUCSJt6GqqoYZw+cPk7g8MqLL6+s98ZjfbS5nUV8tUfv7F0+uH379NkHapTTQGtDp0eTdi9t5YluFNKIEtqo2lqfZXlRFhbieaNpMLGg1Fmrg/cujlLrEMBFae4C+ICMsbuyadUoIYWqrfdackGRWnCEAPWGMeYgKF0LJoKHxthWnlfV1Fk9b4IJhgaUPGqaJnjLeaibeZpmIomUtUzEnHIg2O4sEtsMj65712hP4rRVVlMZRUkcN0poYwI0lBMgQcrI+RBF3arRPE4IkdY01lZSxowxb7xzGGQi4+XA+42uZUSC18YEp2vsRFevv/HIuz42nhSNDxx4kkXD7X3J0dQWhD5+4kLjPYkylKMsZ9c2L0XjJk/yZlJZTx948C1XLm3U5aSe7v/27750bHEtbkVnj5+9dfWlrLXcXszSJDncGs+G+sT506qplbLEBeZELkjveHtUllWluYhs4yhlsYyNMzxObCc0EzOjxANRwQHDiGIeU6VN4yEED94DABIiAy0LXwQnKSz0kqpUHGGhxSMBjAtlXFXpQKgQ3FhDWGh0FRwAQSZYrbWPQDDnnPHeE/QUgXFaGQPokBKnQtqOO1ES5a2d/R2tDACCD1xy7+xkNEbwXMr11fVbd27Hiay0VvsH7TTudVrW2Nv7284EQkkIYLQGB86HViS6Uo4Oi+LSKwf/3Y4aMpurrB51etlk6tTosBslK4ut9UH63gePjfZHRYi+/dTXVpcW7nvo/N0pUHUlDsv9g1vzrdvoaOGUKjWP2byuOQfvdXCYpalMyEKvW+AEKZw9e/7q5ddI4MiieaM6eb8qtQXOQuqJTVuterpXV5UQKe+mU4u3tkcpk+MX3d5hOrGW0UmrF1PqJBMIUNe1mo5XVlpa1UuDHC2OJjPvKMNM1X5ia22a0Uw7z4taLy32YuriBBJOb964neVtT7xIeABjDTkazgetBHzjfVBA+svnJ1tv3Nm9wqhPGURxoq0ddGH53GkWL4l2NB8fVJMCwujYcZPkDVL0TgIIDD6Affe7j//8L90YziPCmXPW+xBFMo14UxsXguMOHQCiD3f7PkgA8MFzKpkgXmueUmOsDUAoC9r0MrlIGHo7qUxtnGGEcwFIlLGRYJwQAAQkSCkF6oIjiCGAsTaSjIJlhAGAh4CEOGPjWGpVEwBAChTCXYcAtUVRBwzWeQAMPkSCMk4ZurqeI0VvbFXWJJEPnHugbIor168prRmDNJNlXSASxikBSLIEkKWdaDaZhuCN1UC9taACmVVzwhn42d5kO4AJJGBAxkQkkihKi3JolAkQCCPTqmREeu+0tYQy6yylDIJHDISg954S4gMGH8CHu/4gejeWRvHN2u83u5zAefvgmbdO9HDvcM9bw5BHUaJUFdCDBwjg7+bU3jza3bV5YfABAlJCQ3BIiQ/BO2ffBHEN/vArs0ff0jzxrjcJaNvbo16eiH539UyPNX6+ARsvX1LVER9kAeqprXWczjTd2bqzvLKYxnJn66jXaU9BtzqtV1++3FtcTjjee+H+ozuFiPKwpmdHo1k9D7LjmuKVbz7dIlk77S20F1sDvPL6pZP3n79+fXPjxuaZ+y7eqW5feuW5y1/4MhKX9XpsoUfTKM7z/tmF/tqprZ3piVOLSRRffvXq8np39/WdIGJkYePW0cUHIsCCpz1gifPWmRrQ6csOa2gAAQAASURBVAB1WWXtqNvxgcjvevD4s3tbo5HzbcY6q5XWzUy1VtLhnmqOik4vHW4cBBrSQZvi8b0N+fXn3RevijKYPCPzAhijQIFG3BrLGNnd3+2d6eWJtKaqXSFJiBc61PqDgxGX3FvgMiY8MnUjBH7ovemf+cuDX3rqS+cWf/ilL/BnL806i6vcE8j4dLhHgQYbiMQ4ThGIg2C9AWdCZZMkKqvSA6FIjfZ3Y4neBYrgtYki7CX4jgeOPXh+/cEHTw73jyZNHfVEPa1l1qKMD1ayWJBz6dqZR84f7I9eeOba55+7M5mWtWVMIKGISFqZDMFTYMQTpMx5a7SOoiRYZ5z3YEUU6caAByaYp36x6/L78zv7OhNIGHHeG2WztNMoWxRqNDMyySNGkBEqKUObxbQdo4xCkgnGRCsejI72QBKrnUGqG0PBUkkC8RZ8q9f3ItZNRbxnjIB33gePwVvjgxY84QDeBBWYWFi2DJAntjY33ji4fOnZrNv92tdeurKzee21l3U55ZlUjXeBABLrtPMWAjBGEX3wHpEgkgAAARGAIgWPHgKLxf9wKooYscGZ2n7HRz5y8r53/Oy/+cdMzYN1zgMhVFlDvKckUMBpOQ3eLy8tT4pR3VQUkm4rdxZc4+rhXpblvNOqamqr5sS6e+B8/JWXPEPvrHbOEkJ8AOJh43D8wIPxrTdujXaPvuuh7NWHO59/YU5qSSm12omIhhCiNEWg2hur1NdfPHz/O+4h8bw2ta7rNBYybV3diP7Tpw7GjsuYDTIZxXRlIV1Zbns3LWfjJBbUqqqoQbP9ibpR68cfeniQh/X1U1sb02o4fW13t7vIE7mQEDqcKZeIkACP02JSzLRmjA3H8zzLeSqq2tTOJ91ezhITRFzl2y+/8fJz35rsTjvH20GQvZH+r//l95zm7/nee4/m4yaAQd9Yd+fGtf1dw5P2+rGHX3vjhV6nnQpwgXAOwQkCEJxPkmiibRwv9QZWzW/fd99jjPRCK7tz68lOu89TaX2HUD+b1qYBDMZYtKHWqg5sItM4WOLnRalllCQC6cyoKG8Fz5Gz1lIaaV/OZgRIL1mc1zbP2HxyZIo5ksLHPkmzdr4KBD0vm8b0+hemo+tVM+ZywJNs8dSq2RnJOG7qQulqf3iIGGXpUpS3oSk4T4p5xehCEmeMsRAfGDvPUrd3UEREHNw5Wjl+KoCa7N8ZJG1JcPncBc+S3c0DoufH722V1WE9M8HUGQ8Ly+LsSmpD/OIboy1NtsZ+PB9xQoxXVeWmE/We91/Ms7JUVT2Sk/0R5VzGYdBvpWnwWDfOTA8Oo5z3BmvLZ+7bORznrbip77i+CvXUBUOcV3acRVJGrdqZWjUqCEe8qvid7blwvFb07hTEMS/L2iPq4M2s8NqKOAYE6wGFcEAkJ841x9YXkQTCSKuVzeZFMZ+OhzMPAmU2NzDX4Y3rh9vDYn97T5dGjYbzqgL0wRkM1oNz3oGHAGiD/4lffPbssYtSFIYaaq0NBIKrdXx4NK+1bhqYltgYIN4iDVWjA9xdb8FYAEqdD5wRTrHTSU6sLZxYXVpc6g8WFuM0tgruwk0Zw3aSGu2Rg/eBAgrEpi4AQsywlbGycrNZTQlFJtC7biezzlVFVdVaGxNCUI0mHuIoNdZXk6mDYF0IwXnvZBL1+l0XSKNdWVROWyI5BgCCjFLjnbI6j+H73r/+D378ieNn0igBFnQxnv3GH1x/8vnATDwcQZZHvU5nPqkabWMueCSaWgMioxScj6MIAGtlAGlADIQEgtY74w2jzBqj6zkyZrUhhDHGVaWAIGOs1qZROnh0xiNF51xZVYwwguRoOqrKyrvAOJNcGFOPR5Nh8ISAdTaNZBTh6eXjGJSvFWGMuNI1RVMf3trYm+swnALhbyaoueBZJ4mjiBMqpUAClBDnglLKOeeCi22SJJIAVo3O0tR6bElRGNfv5LaZUUjzJCmLqqyNZBEn0jgtBO/mrVr7pvYBIBAmSFxb0N4POq3F5WRr/yDL+1LEAoHxcDjeR0c4ZcGDMUFI2Upa03ld21BVhdcmjWUvz2rjGuMYInJBCDUUt8cz5zw4pM4b9SYBjQMbTgrUfCFfffYrrz79xa9S4i9/+dtnHjn943/vR3/h3/0/XGJ8/NQr374ZqWI43y8qi5xdeOvS6bevbOzd5nE8PhxSWptmP5imv7SSINGTel75yHF7UN/c3/QSfRYtdiNfaU95oJwJGasmuEn34dXtvf8vVf/9rll63nWi95NXfPO7c1XtCl3VVR3VQa1WK0uWLWOwzQA2wUOw8RhsGBjmcA5zhjADA9cxmWE82IwHnC1ZtiyjYFmyQquj1LG6u3LV3rVzeOPKTz4/VPuai/U3rPt61nOv7/fz2SKtIUW0vXqhtzg/3tiot26itt/bnNZWPvDQQu9MuP32Vz/+/T/xqT/7U7/4T/+Hfpsb0xBG42Ebt+LvPPdC+0z3cGNn6yYEPGosYhETzC6uJUtdtHX7reWTJ84utM/2urIx1y9vXLm6dbB9830f/8DDT/6oE9HaubXu6fTE6Qe/8Nt/+OE/8WNHh+XeVsVk2V5K9+/mW68f8PvoD3zik8+/+FLa48GgK0sopgoxt35hcXVt+fk3N4K0nE2OOi129fXv/uTf+Fvi5BO/9nvP2dn1xx+/+Jf/1l+0rVM49m8/+9LXfvNFk03vOzvY3tkYtvET71nanAYPPvLM/s3PfOJTH//a5z47uzs9fenidy7ffO67r/zir/7EwXjaatNhK6I972p7caV3YaGzdLaXz+anHzhTzsdCdBdX2OHx4fHO0Vsv7GgUSxBZVZlSRwkvR5NyQpNEcMaY1aaqlGsImJCg+9aX7jt7/iMfeercuYcWTq0nUVTeuFlVhy1199f+6ef+86++dFxoHOB0occomddKW++kpZh6D3EiRIRVVp1ZXLl9uKeVFRGfz3IMlmCSpolqvFTuoYuX7uzd0rruLqSq8f04QZOtn/npJ0h8pTy4lfhFIw+V3AjSS4BSbYEa9fK3rncW1/ePqy52adA6GmUIWySYNp7HgTRaMMYppdi+fPmlYUe859FTdW3Pnzr94nevnrv00Ds3b06tajlmnFFSUU6kt4KTXNbvf/SR9z789B+++NyVK7fT5WSeWa7U4krqE1hqiatbcK+eIOK4KitGMULIWVZV5eT46JXd6ytBXGayrq3Oc6yzW9+5s9BOhpeeNqdWgAs5rag2YRLbunbIY4GNQpwhQIQRggkTgcAA1mrrjHNIiNADq5pSiJhQZonDTBitEh7ISjPGGWbOWeQ4Rhphao0EZBlG0jSgnbGMiw4gIIzKSiGr5vk44JyEkdSa0QAsFlQoI0sjA8EY9ZRawoXHMQZSVlPwnhGMQTd1EwWhp7EHT2hglNTEg6MWgBGOHaLI1KauG82FTOJIOstEYJ112jPKTTMFR01jTDGJly6iIOVxN59uA3ZRGspGasGmZXH5zRfuf+gBVEmlGlM7QYUQuJwfR6qd8PbR1m5/pR+kabqYbI/r2fFRfLq3sHp6b3djNC0xAAaNdX7p9CrCweH+4VvV9ZYIBMTbdwrCTKU0A7x75aq2JOmkkUitY9nBKI3CFOMw8jFnBEGlVMRQGLdnh/uB5EmA1hfouFCZBMSJ0lY7RwkOMcKcZIVGCDltKaXOgbTeOu8qLZUFhnutZJTPeBADwpgSQLioFGDknLcerPUEEVkr5wAhUAphgpW0jOEg5AY8o/ienU0IYrSdmYJZWzcSIwwA1nlsHWHYaWONVcZtbG9678ACR9ggVynJKbHWeuQsWGPsvaYSxaAMIEa94BcfOHn7oL787KuNs0EahJ1wPJqUTfOe+9c//Nij8/3JA+f7f/ilbzBsHzp/PzU7PU4uP/ete2dBvwP5aHY0mhSTURSG6XC1EMpKlcQRIogxsLVppEZelIU3Ll0YLt3aGCM6SFg6z2bEMyc5sx5c04v7k1HeVCYwLEJRZbjywaQw1aJI29FkglQVpFFAsAooberGo1JrZaocCBwd1lwwSVDdyNL72CGGGXZWaelpQANPiYtWB62AhhhWTi3KfNJfWrGemKbMizkgqPPSGneyL5b6yeHurJ/0dkY3kcltKaMkRoKoupQetReW1x+4eDCa1VUWCREgHAtY6BUEKu8FRhgI8sghzgbL6UpfTAvvLKhaUe8bV0ktEfWEMwJU1hJh76zzzgEBggnyoJSmnBBkKXAPAMhZbymDgEE/CCLiuoGaSnfY6AY8AccZds43UiJMEWARBISBdxYQeGsww9Z5KZXjFhCxSnpMvSfaeAeoqubOI0I48mAaRTEQhhGhxEKjVCB4GPF2mDTaaGukNEHEtPPTsnnj2uvIAwEQnCOMGyVV3TjnEUI0iFiQtuM405oFkaobGoZN3QB423jrKsy5wUQ3eaMaBJhS7Lw1VhaFdAgxETBGy0ZRQhHCzgNC2DoLGIxTFGPGGRMRRdhqVStprbPevnt3JRgB8s6he84B5ME5QOAAru29iTwY68GBB1PWOQLQWmOKPQBGyDrvARBCAN6Bwx57hCjjCGNwFiPvrBWBcMYjoB7YvEz/xf/xxr849f57U3B2/cT8+Hjr+q636P4TJyq5d+LEoJOevnn5LVvOytFeOFw8depk25ckig+yu3q/Xjx/anR006X+0pMnk/YM09CgOOStne1xVhzEadLuJjSMGu9Y40+d6+6+/CrESdTqGiBzpxcunuqcOCtCjbLvfPEzb1w4scrqppzbKGaChsW81EaK/uLyfct5NR8stoazzqmsev7Za5pFuMWOS/fMoGvqQwOWMewtc7qx3oqEg0Atj957oftrn9nLifzE+y7+whe+Uzj68uXNc4t4OBBJL9VZwVkQ9eNmpIuZTtJFNU9/8zc2vnvka+cps95ZkJoHQklNPOacK+umVfbCO88nkfhrf+6nf+8PPrc/O6iLTFdGCIwZro1xzmAtnZGNbm4e65//3N6M8mxv6WvPvqKESIkIkZtLFYYdgbQyFjA1RouAeQ+yrmXTeGedta122EhNHShjjLGUUiEwMipk/NJ9KxfvXx5Qo2V9a2MjDIMg5BG3OBVBJzYGV42VKFaNZLhq99Cf/tGHsShffHV/J48tUGm8c25tsS3COhuXncWYc+yY0Fp7RDAn3BnvDfJOEAwEgIB1CIxstxwhqJjVUjKDGCFiPJ05z7IKl2UNOGJxghFQjBiBYbfdSXhIiXRYO+o952unq8mxmsycs5EQnHqEMKE4jKgPA+M9YzQkuEa+qWqjXJ1VggorRF1WYZtFg5MLD31AtIaU4WJ29Jmf+w9f+txXxrquGnU0KhDDhDjlCaqdajRlDMA7rYEgSqi21rt7AkQPcK/0SQAhzgOSxIqnK4tL16+8/f+siqaTcnm5de708Ed+9OmFS0/v3/2+r/3mbzhrHaEEI6udRd5Z6wkFBNrQ0xd+YnEJ/8Yv//3Tw66dj/e39tGjqfVuPK+pRs45QlBxvJNQZy2X1oDV1nvKEcJYIhysnk56DfNqOi5DIv/SD6819vbzlxuMAkKo88gY45xXUnLBmeCq11l++HHvdoviuKF6PMO/8YU7V/e4IWwxwYNuqGrVX4oj4fPpfiDs+kq0fuk0oXrv1mT7oMiPCkfFd9668vSTD2/u3FlfCbSC7Z0yz/Sbb+xSbyjHNoWsKQ3A4mpXO4sQi+PeJM86Qlje7SWLe9d3SbkfRdHuzTewI2dPnVIn/WA1ITc35tNymje/8Wu/ffY+ev7hMxv7c2PA2DjunNL+aHtnupdtLqw+3CJ2fnzj/kvnNm+9EcanmqY0lRG9njE0TvpJQO9mRxh1retGvUtLdlwWm8aOtTRNozFGQsRgqfNVVe4D6MZO2rgX8QiBETwBC8aoXtQOMZlnJQ+C2eYoiePaSYZwPd2vFXWaWg+dfufoaG7nRWtAt3eu29pQCkHc3ty6gW3NaMAxz45mvKUvnG4jbEtJlFfCicGgl3RXjCFLwwVnK4cpoV0cLe/vbKQcNdpoRI3gjEXr/RVjnCb2vU8/Xc3KKs8SY40NVpbPNpPbZXHMABfSdGJBQw62yrOaCrTcoRiF3z0cFbppxSzPs53d0li7dGLhzs3rYNHa4gKrHBGpS8XewcaF0/eLsLe7M5PGdtPW/HCUQ9PpLHovO90ex1K0ElVahCoKzhnvHDY2iBJx985OgNLh2v25DlvtwLDNdyOp2mAEnCLGMI1a2nmN8Hw2R86ncRRFsTH14qCNvPUYAk7A1HU2yue5Nnxm8ZWrd48OD8dFXhallJ5Yhzw4bYASfy/s6hECQOARBu+81q4G9vYmihMUtG0UoKoxqiznWaEMyBpmU9jaloJSI413jmBiAVkPCAFmQCkkQix042G/c/7syuLSguBh0u0YQ8Mk0Ngi5IOA9fuJVhqAhKHwzjOOnbPWgmq0sWY01d5awZgQQbfbaupiPJqGYTjLy6JpkjjAlgCLokQcHU3rpomS2IPNs1prG4VhXdeC8FYSLS/1qjKYjufa+KZW3nqnLYBnCE6G6Ec+caIjdp2GPNOuqfxcr6XZD3zg9FvfmYBO7+zuvP3OjfmsCgJ+6f5znSSljGMEhDGMnZZGKVtXDWKYCzIvJaYUKAbrS5UnSYwF9w4h5KM4UrqhBDlARkniHCXcYq+cIZyGnCqppdQAHmNkpLbeKdMghMB53Wjnoday2+t0hr3pwTgdDFGQalRwpiwutG3m8+PGoEySG5tOqRAhm0RCBCIKQsHZvdQm54xRQggiGDvwjDAeBQDYWgDtq0bXtTHOYu89xsOlpb29GRcMc+wwKZRxTUEwCE8Yi+qmJIAJRpzRpaVOZcx4PK/LIpvPvUcS1UppDbYjkn67L8u6UtIaibxvGjvOjp1DlLEw4pZ7wkAqaTQgDyIW00rVVYYIseC99g5QwCgJ3vV9PP7I+vW3NspGnTwZ2eIq4OPHH1mpDyAyxTt/8Pskz9tB07Zhp5anT7b/9N/5a1/5xlfWT/XOPxxdvfIqGaYsDNvtME471XxkLMyrsa/chZUHeHjfvLh18+5GPqva3TS3yswn3DgsGMHcO6nrppzWrAKahfVeOR2rj3zsA9u7LypdRXh66amLL44OFwcrN7dnobAvf/GLq70Lb71ylVFA1rTjQBpTjQsekF5EN25sPvLo/V/+yuvOwclu3B+2orIeHU8PjkcPPry+sbeHqhFPRdoP19dXTp0591u/fmV1eVnF6fadt1dOB7d2Li8NH/i+D398983RzA5F0mM4v/788y+99Nxj93V7Q3zzjddOLEXTbC4ZT3rhdHZAEBXYHl+/vdgdDlaQLYoIL//Ez/x/796Z337+9wgy1Ju9zYM+BueuXH3rajM/CAgc7mYH2t26ckuO5oPFzpJgb7z4jTiU//zv/oNzi6dmu9Nv3X3REhaE4KrtNeH1yOy8NV8/101a3RyVAq2O99j1V7ZuXpm+55lzi6cfzfINbqL7zz0Ut8e9Xm8+xydXzxIkFhej4VKyfeOdW69dff7FN6SrHjn3yKDVf+bjHzi9fs5O5mW+10rDF7/8jXcsubu1+9Y7O7NmfjzNmrk7cXp4Ig3vv7B+5ebuG7cPQgKV9YJiHpJ7oiteaZ/NP/35X3zkI3/WIu6dxZgFYeCsj6NIG9fI6trttxH1smlaQRwhJ/e3/sbPfPjDf+1RgP0ofgyjzJfjsNvCuLR2Sl3K86O3vn1LExJGvqzqyayJUuGBqEZTi+fzIk0iHMJ8XomYChpUFSozdncz39i4Bph8/isvWtfEQniwzllBqXHeeCM4DAddhNDvf/lLma4vXlj48IeffOuVN/sCu4P9BZyJP7JhGmOR1sZ5mUsuKMIYMHPKL184G2HWeKm5yqsxN/V4AstrpwqSFIoZqdpxYuuqqSWj/ODgsL8UpEmrrmYijAPG5vmEMQTQIGdb/UFRZMo7xulwYWU8PmKCAkbIKKQbhHAYiLKp0m47CMOqnjXNJGr1BA3K+RwzGgeRd6CdxtgBwY0uvVdCcIsAU4EIIQi4SGRdTfOJECIOYgCXJp1ZdtgftKT1Huk4isFrKbWSSitFiA3SYV1lSAjKQFuLMaEkoDyoi5mSjfOOCOqsdtpihLzxjdKqrjlG3SjKi4xHQeC6qhgrM4IkAV/IWgeCUoIERp/81A/H8WJVVE2WYw8IEKWhQlZ0ku7CcK8puq3lxiOP02zilpZPNcoej3bmYb62tlBW2X2Xzl+9sXl3a9Tt66Xl8x96/5/8zB989ezJDuB4UuZLC1F7QJI0Qlre3DyS3qPGp0nnkfcsTI73GcPzqhQBSaOeGh1ZY5VWrVZQ5fVKJ/AE8CFqRs47HzASMeSs08Y6YyiGxnrOsDOWEhIQ5DHygB3GBuPVhUXBoDbeUADvlLVRQotGRoIRjCupMXEsoE1jAQB5uPcN7IA2tXXGBgkvK4MJ8tgjZ2tjG22wx+ABsOecIHBWWYwwY1SEwjvjPAQhN8qCw5hTETBbyzyvCcOEE0BUGcspIQQ8pTPpXrqylQS8H3PRElUpN69sD4dJl4sf+sj7KLGXL2+/deO6g/DDH3jod37zd77n6cc/eenhAbwLd9985ybH4B0aLHSTMHTEagQO+0LXiehzRqTJgqitwCNv0k5aNtNOv1VN5pzjeDFZXF6ps6LKioVWvy5qQnFRVUL0FpbTysHQQahAgdTA8iwvamGIDUJumkxKvTfPCBDKfF1UgpIwihEzaXehqrWRhrhScF/mUxElCETAORPC6qZSam/rIO53+5fu37m9Q4wm9RxT34mRAV41RdFQRoOmmvZTURjCaIvEQbcXNLOciWBPkd29sbMKW4sxdQZImIuQOsKNcwRb7yX21iPvveq3UUB97X2cCoKRVRZ7pI333mPsCAZjHMYABGNMEEKIoMbItaVVUlXKGGtFKaVVhjGcxny5l7aEr2WTScMnzaSyxrtGWYRJoyzmyIDXZUMxRviej8l6bwEQAEhpCcXeOYw8QQ4QUEKTsOUAvEcWfMA5coZgXNU1eKCE3gMdUc4YeOcsAqiqOohYHARNURNCpbKEYYLBgvXeeeu4YASYKhFNorNn1qr5webda85oQYkHAMAEO/ASOeyco5S/2xGhYK002oRhYrw31nXbw5CyyfSQBdw4rbRWsvEeeXDGAWClnfXOeu8AHLpnpPXYOk8IQchZ6wE58M57BN4TBEZp9C5Z0zvnjfEEYUyQv/c4650DhBBG3nkEyHmPEHhrkEUIeeMswdg6DQgBwlpZQlDdpJ/9tRfeXZi2k4i7Unpfqdt37qycWgqC4uBoSjsobdInVp7qv+fJ1Vb42m9/dlxkH/jgA8dTvDWWImqNj3ZG22NTeURZ0O6H6VJv7YEVcXZlET07/d2q2Tixtj7df+PFd14NXB0OFFjNIVaZ7i2H+7dvqHqOKmVqhsUwIRWnur8QhImoJ9NstJ/tbcdxIOflQVY7Sc7dt/qDH3/k25e3KRYH+3bzyu7SCYNDrJQiOEaYE5BGWecEIPR9n7yPqe7dq8c71++8b1W876nF+XTyOy8268vx2TtVj7nGuTNq4a1vbUzr+rF4/Vvf3Hh2wx9o1TgtvFPKMYIJwao0GCNrLOHMOxclga7155/9nCbKeesdwgQ8RrJpGKWYeO8MJkgrujVO3OXw6fc++jM/+Z8OJoTEXREugCvn061OGEXt1GQlQlgpCdZYcICBC4YAAThrHUMIgcXWdqPQyqpL+PpS68NPPxnyJh1QV5bZ8bQuo6gViyCtZtP5dNymnrIkr7zK8jQIw1A083yisiceHiSJ/drrhdpCQRiosnngXHftPn54JyuKynvOI4YpRYT4e2ZC77SsEFBCqEeeUoKE8NZy5hFAVcqy0SKJjSHSIECi0w8Gaacw2mnDMBOYJjwh2AJYVUsggGmgEdJWYN7mQRREnGFsvU1aURAxKXUUJLYYGVU5TAjjZVVjSh1FpapEFBkfdlYfqG302ld+//c//1uvvn55PJEGqGZUl40ThAtslPWAnfGYUozBeYcJgnuvOyIWWQ+IEOq8RxhZ65jgBDGtSbp88sf/+k9+40u/9f+sikSbNS573/ueQFVZl8dPf/Jjv/8bn8EYiMfeAyIUe++xV0pRBIDQyy//m3ZIWp3WzkH1+Kn1iw8FhIyoiPPjWliDKeIMV6MK1wQDiwSWDRDCCAYLDhBceWPzmYX2wgLaOCiHHbG6Ov+zn+q8deNwLoMoZoyF89kcPEQRJcQ3Vb69q/7gGwcXH1h87uXNr79yMDtUSgkciKSDw4iIxD50aXH1vu5br96MY//Qe046iZwVIuq0u4ygDlejo0xv7U3ffvUqUarV4Vo1a8uRRuRgdy4dNJkqMicC4pzDRiwNW1Ve+nk2mlUwCe5/8MKjq+/5hd/9OQYmOtE7dfrM6PC4M4hmRXVwMD5/3wN72zswN6P92T/7+7/y9/7unx2cW8qqOmm1ur1HtnY3WnZS1jH4/nh++2ie+Zsqn5UPPzIsCpN2+tb7OO1VhbH5EYN4tLMry+1kPYoZx0Fy9+hGHC6ECKwhjVJBvOop0s1cO9MfLBxPRvECBa4oZyEPi8lhp92aj0bMkWpOL5x+T+kK6yfDXgCotiYLeUypN15pX8ymmgtupaa0EwehIcBipDNdSeNtThjByKpqapraMrK4ekYWU84opk43dDIvwqAGZ4wtwR93W4JoyDHZPZgzGsaiE3S7mCfjSYM6i5zOt29/m+M73WWLgyFCkLAwifnB9oEPyMLK2aKcBO3BfF4EVK707MOXhluH88O9sTM6q8zy2uKF8/23vvHttB8R5hfPrpUNK6RaXlq/dWMvTCICHAOnpheSuixc2DXON2VeUcysBaNLZ6qApa04zea6MRaa6OzpT5JgSSNPi3rj5uX96Z13pyAKOOPIu047LeuGWai1aaehdyAizgM+Pq4QsQihMiuno2ae17tHs6P96c7R9HCWz/LSNQqBQwAYAQbvncfYOa29dw68BwDvnfMUA0PQigISkc9989ow6S8sI+tyaYgtYTKDsoGjMdzawrsjOiuMl94iRAXGAIIxxlC7E/Xa6fJC79SJ5XbaTuPAOxQGQRQkTkNAuQkdQo4zCtogB5QhVTdKaW89pUhWjTXOOUsY7ybxZD5XWmpTAzKdVlQ3hjM2jIOlXqcoK6N1lFBjgqopHSbeIYw8eEcJ4YKVtXTOcsxI0rLa143y3te1dOAAYendpNR3Xts4k7RpFZAUEdF+47W7e/N4lMl2eyFdOHFt4+XDcZFPKx4wdHPr4tn1fk/oWnkLDjwllGCsjdVSIhQQTr11HDFlDeOCEObBMkLuHV0iaM/s1BmrlWnqMopihABj562RGlkPxvsw5MaaxmhOCRcCI2yR9QQFjGNB41Y4HU8ocZffubrxyOr99+mDw1s8DSaH29Z5ht3etntnA1mP4oikKWM8xIhxLpIoiOMQACFC0pg7awlllFLrwDsfisAi5ymxFs1KRYA4qUo3TnmoZjodpJWUWlsmOCNOaoNVabwilNWVKWvjMcxLaaz22illkKCNrMFpYywjvFaaYDxsL9Yq6ydBVtQQIBFwKZs0CZigjOGmlnlZOO2NNYgSbxnyGBBwiqRWVV79ETgSOkkSmXyYipbOv/21r/JBd3Mn9xJdWu9Pb89bpFtrPFbN3/h//dT+6zcnO/aH/sx/88u///md29Zqh6t8b7y5ung2jhMOUVHprMi7cbIz2lm7cHL/aI4EoQyPNyecMZXv3v/w2Spv7NFcGt8RUSPN3rWq3CTVdGpKd/X55x5+z8msTe/O0HqvN/zY97z5/CutoHN6dXjtxtF/+qf/M6LJmVPtOqtMqahA01HpEGJ1ypLgW29fHzmUYyS4qEaZq/KTi11D25fODD/+zMf/7b/41Usr5xaWg53Lry1Q3xVw/v4TN6fhzi0YH81vbB7ev34ON0kS21ZbnH7wzI03v/HVb3zpAx9/sN71SuN0hS6tdHqD7jvv7A77naGFSjYn2qEzEbNsvde9M9q9urm5sPB+3lr+5A8u1w698LtfloejO9W8HE+yfLp2Jt66vTNcaZGQJ13hqT/YH9XaTyc5MfrJB+9X46Lt/WxSa+RpTKbHGQUXpaLL++NbdrvcTYfHRuxqny8P4tluNr6594fu5Xpuk4Uepbgp6g88dfGjjz0eeouQvfzc89tKbV+/fG75ws/+3X80fHAdE5B3bhfbm5/+ez/7ra++SpMgqwrrbaX8Q0898PKdqcMuDqOFM/FwbQW6/ac/9YNv3v03cSwW2nFdKgwYO9/r8gBEdjwPB2svfeW5xx84eTAtRnMdMBoFYZlX8yyPRUAxtgCJCFwbJUFo9PE//pd/5b1//HStrzA2o1FHo1h1+xSfJaoEHDLenUyPJg0rwqixtRPUSGPAamUZBqDqJ//0j3ztm9+cjAsEvt2Jqqapcn14lKlG1tpY5BHBHvFGWVsrgkEIHjCaT8vcqEE33d3ZLyb54vrQBORzn/v9YUh9XSzy5kf/wkejfvo7z38aAChnZV4EYdiKW+PR1IPv9vpMJCSIMQ/ms3F/fSmN+e6tO+c++H6IBTieV6oTMOeQ9wFHuiqLfr8dCV9lYwemLLRPIoSwlrXXyiuv6rmRjQPOOatkwzi1oL0jti69s5iy0hRhq5s5TRiFMOR1GxmndBnwkHBhvTJGWYQ5OIIoiruVmfEgappGVTUQRkXiEfIYeBhjwBigqgqEseCxUQaooRQDpt4j3xjOGBORaoqQUyp6SmnGWJPPKNSIEqsNskCxMGCCKHJKOauMVhp7wYOA8mlhOpzrOjOsCsP0nlkKjMbIMyIoF8iY0dbk1NnH945H2NuAUM6D6Wxvmk+Gp08zxI9nYxFEYX+QF/PeYGU8OjZecSZagZxlB3XMvXXT8bzTXRgunrQmP9i7naT8Yx/7UBrCtbffHKwOVxbbt2+8IzhFFp86fTqJ+O3rG8qV/QQFIoyi6Nata6YzRIicPn9mtL83nUytsr1WfHQ8I4T0GG6Yrx1yCFOMgFFVNpxwxxRB3jpcgQYM3oM2TjvrAFll9w4Pu/10b2s/EqF3HgEISnzIrUeMYuGwQ8gZYAgHISsrFXKuCDEGMWJ+/Ac/8Nrbl1/ZnlpJEBAFDiPAFFltMcbeA0LeGscYc94B8toYqy3FWEtVlyoMha6bTGoAzxghmHqKZWOtQrXUnCPkkQZ/KNXcBvo4ixMRYBMw/MSFtWceeTgfH25tbS+mCVLhrCHLw+Uf/6s/nsTx/s6uIu9Wb2QlEaXGOWPdaHrQ6TNOklLpsp4CAHfEOo3BNQqtLy2QKDaAZ7Mxxqic53yBeK94SKxPtXdRpxUI4ndrrWQck/VuZ3M8tg2qHbkz9oxHzlmjTKM0wiaT05hGHmFlAIADDpqaLLXFUqeTEjHe2bIKIWCtuM2TpBW36yJ3VdPMC+9siKmO5XDYJwThJr175apX0lsrwa90B01dey3L7DClywutdhTiIpvZQnFbjg/H6Yn7wVniGxYFWBMsWL/LOS6d0YRydO+CiqUH6z3OZqXRIWeRRs7oWikNDqI41NZ6UBhhRrDVxnsAQN44B4AFnF8bnl9b/d2v/aF3jCFCOdfOjDKVQh30Y++I8Ga9n0S01kajXlA2alq6mdIOB8567y2yxjtDMcXYA0bWewIEgGDstbbWG+ss5wFFmBCCAIH3mDDOGGcCI8jzghDMCKnrZuxGrThKYua8IWFgrFNSU8wwQUx453xdl9Z5QnDAgjBOEHCCcbsfr60uLD9wdn93s7AlIRg8QuAxgJRKG2+MNdYDxiIKRRQxgqw0UkolG4qJJlXIWpFICcNZ1VBCaBgp2VjnEAJntbP2ngcJIUwIwRgwwtQDIkRrjxF2zjrnwQFC4DxgjwAhZ6xHQDDGCAB558BZiwkB9C7H2nuPEAKEMELgvffWevDeIYSk0oxizIludBCFjjip0HffePde0Lj6zIOnJ9NSFXZ/cxehsbfx3t0DT32UpmlycjZBcjxmrL8yTNJeUBNBdbFEJOu6K9e2LpyjVNrTD3WqsvDdNcvZrePjCx/8byeHdy9eWvylf/kr1QyHAQlG2draCg3Mwd19Wq3M9+7gGFbWFiFaXT+zUs9u3PeeS4e37ijXLJ1dMEVrdrAjIrGwfn82y53JRIo/8PHzZePHR2j72l71UBuZmJNAycYBQbSNCPekstI57ftL7L3vW0gpfe71vZ/6U0+du2i3x71/v7G1O25tvHrjT3z83NXrBy+9fmuld+qFGzu/+29exxjzJOwztrk/aQVcN7nPy/PL57bsdFpZrV0YkVkpqcIU0a3N3aqqWt1UMDRvGop4GIZaK++0KRTG2Ct3uCd1xufTu3v7tNKEBnpjdOfSQw+dbpHp8W6TZcQiabTwoLTlCTPaIcaUVN553cg20ydW2GOXFjudxelxib06NUgHaWZsEUqLwcbCcS95cWdhsTv2Jeae1tPyeC9dutTvDPLRYeOCdqc9nkzTuP3ex4OD0c2jzXxiCXJ2b2u/9/GVKk/Hhwqsj1uBd8Yax3lslMbegnfGG8DIaQMIBTS2BAmOhfAYe+VZ03itDEbMEs/jSAhmAMrGAEKMALENamzcDsuisk3u8sx6yTEHqcN2FDjqKOVRYKwztaWIEGcREdrVgjJgzFub181MyiBsDU49IJLll6/d/q3f/cXNuwdVI6vGOc4ba8Eq6xwVlBCivdfGBCEHjLXUhGJrPXJgkb03FPdudMgDAsI5CcOAkYCH8cXFZTqb/1cFtPmsWXlo8eH3PrW9N7meP7+1M2kvncoPDp1UTktMsNUaW8AYIUKJt1rODvOGBykTwQgB7fYP9u8urIheL9YWtPfWuTAl588E8LoGhDjHBsBajxzyRrvcRLQjVbG81gEMe0fbTjXPXEq+8qa3xlg7DwOPMcxmeZxykTDr4n/6qy8AsR6hBAdI4eXl1pnVYd3MTz8w7LbFtTfuHL95DN5TIUB3OA7KyjcGOR8GnajnCVLq/vdd2nj7ZrcXceGaY5cXJu2GaRpmRd3vxE2tHWBr6d1bMz2XHPnefUuCo7eubV+/eXvy2N31+xcoJxqx0krf797cnw27ncXlRcTYmYsXZrMmL9zR8ejf/+cv/q3/4UdZEBbFTMRNEsPauRPHU5gfbBOiTp24OD7Y7bTOHh8cLa5cxJwK1iIuCgK8PVJBu4s9BWKLKreIEnpidWlx0I03t65oM2dBgGmBfYOZKadz5aXAwfb2xtlLHUsMuGmlJlvXboU0xDYcnnhsZvE8nya0Mz08DNPERmxytB33+qPjQ1XbtbXTSqJ+f7ku/fbxYWchpiyRzkQhP7m8trm/4QjS0hEaItoVrXVbvVPOC1VtKsVEKPrDxVaQHB1WnPJTp9ef/eazh/nhifVL/db6aH/KscBAFwfDIpMyb06ePJfP96Wc17nmSGrts/3prJGJotsjw1Daa7txM4+CwDq1soAnJRoFzJuw0dnD91+Yb287qwaD5ZTp85fOX7+9PZsXnRafHVHj2aB/Zj7du32456xrMTE6KOM2WV1e1srZZkZoQAlInc+yylQQhstnTj72xpvby2c7s/yoNRwUkzS79S6lhTLKAo7Bz8pGa8sZaWSjnWol0XQ6xYQ20iiwyrPj4+baW9cPJtnhRE4nU6UbcBbAYnePaGgBDHgHBBiBIEQhgzQKW1Ew6HW7g3bAAmT94Xi8uTfdOZRff82cXMGBiMpClRk5msGoEduHapaTWmPwhjGUhCwKebcd9rudXjftDzoxF+120u50EFBEmNMWAasbm4SiqRvnaZQwaZxpDEHYEY8xbpTxGFIWK5kDQd1uazqra9kQ5AhAJ21bbw92DjgnIuLGwMF4RgA63Wg0nR2NJ1xQ4zwiJIkjQmlRlc57B4hRwZhTdY0QWGuM1hjdAy9SjOHY83/9xaNXd/mwa89eWpseqjffjL67MUWsfOzSpcnk+vbRRBvb6kXOoePRXDY3lpcXT64scmdFFMVJ0kgZJjzGghIsjWWYhYGghBhjGQ2llUIESRLnRTadTcu6pIwCQMACb71DzhlrrGEBUM4FIVoZ5x1n1FoXMgHOOrBra4vZLA+pwAa6cVIU9e298kvPbh0d+qXFMNWB9yujUb110P3sF3aOszBgdtAKl5daylKHSCAE5ywKQ8qINh4AiYBzyjEhUjYsikIWWuwrqxxyBGMAZMEr4xtwnNG6aObz2hEAZ4ed9ODwoN/qddthHOGyJpt7RTNqpALtm5iEDvu0xY7GszQIQs7ypkacNcrUxwdKV1EYUsSMlErVvXbEkc3zppSSUhInHGlvrfPYhxSDBR6FRutSafBYvHsUwNwUf/2f/flf+de/A4unf+7nfv0XP/2V8fjWC899+f4Ffr57Jt0tTRR/9VtvfOuNy0MRXH3ztcmbf5C7aXe79fT3PVDCuFTEWzU/3G9qxoIWAjPOGgH+8tsv7O9ud+IImmq21URhIuksxLuD4WqyMKykOJqbb33pm9mdrMuSbisszSSb7lrT7XSCb97YaoenVnuLb0oktdnWYx6zkxeWd/ft8bhsxyzPVV01DiMDeO4sGQ78/nHCVSckE1Utx4JxFHaaM2eXsoPD7OCln/nrf/7Zl9+cTdXxYXX57ZvJYuuXf/233vfnfvL8Bx957nPvpIOFzb3ZQqu7fm59c6cczW4fHt598hOPnr7/zHOX324Nuosnosc+eMl/8U1dRsfzeZaVgMU0b4Zxb57Xb7x102m/urZ4tPv27HpZzqL5vELTWdxpdVfPTPMs6iaTeeNpWkMwOp6kq4Ok19q+NT595r77nlwx1J18bPWln/+Vk10hKuk7/ceeOnP5a68/854LV29eq2ngw2R90H7okbO//NvfUKR193Y+bUAwcJBjivX1maqa/iDO7ky++G+/0gfDw2Zw8nyrt/TUR/+bj3/wk7de/fb1r33py7/8mUlhSRTOFY7Xuka5slLrDy3/1M/82D/6Rz9XYDQYhGWpj2u4v9OHQfdv/q//m8nrbifutZLaV65RSrtsOknjeCnwTz6xGiXzs6fT8XSGKmOwmc9zTFCV15HgQSScQ1JqgkwEjdK7H/mhB2o6chBikKPdXdt/L6IPxPh+zrU2DkMrs7Z/9sFmLl3WDBbbs9G4kk0g+EIvTBkqxoedOAwYBgIKfDpoTX2hnFHeIgIU43vCxl4nrsrCW+uNcd5QZALOuSDGGRrh+fGEeL3omwfPX/gzf+FvL7U7J06y0swAPg0AgHwYBdY6JVUSBWE3tApI4YrRBEcpcxgb70TgCK8rrUEM+kFMqGtq7PSJE6cOjrYocA22kHo4vG882UUU1co0yiz0er5u8qxs6gZ7pAmj7cF4Z6PDGaYeCKOaWGcooeAsJTSIk1lZpPHASES8ZAyDB08IRxgKTzDzxlbNNGj1u62uqouAc4JpVtXal5xha2QUDghidT0SYVDXWRS0QxFXliLkp9kojeIwTD1YFtA8z6b5pN1btk1FNBBCjde2Ocao20qHTXOsmpr5mDFW17N2u1PKAnPiDI27nRoXot1xBsft5Wx6TAVtQNdKxuGy986ZptNl+eQu8YQRpjCuVS4YS8MeQZRSYY3yBIxFxpFc1YCFMyqNmQuSoB3N8lLLphW1wCpEXbedekcmBzsWje+Uh2fuO3ft6taBHt138RwCv3N7p2q096HHvpH2+DCXVUU4vXD+gbpQqoHJaEyJSKIWTSKGPYt6Wpq426SRqyzbPq6KRmFOekkQUpE3/mBWAuIIYWW8d8578M4RTrxxedl02kmLM8owYG4scIZJQI9mNXZACaWcgHch53XdRIw0tSYYtygT1H/Pxwff++FLG2P7z//di6NaNMoDQth4Asg7uGdb9x60sRgh5xyAowgwOIIgikAg4zCuFAgKAiyRKsYsZObSwwvtFtveO76zX2zPwdNgMtUBAigUxrC+vPzQ6uJ0a/PmzVvDfue+x0++8cbWMw+9BzGPWRIunijHu9n4XXojiyMaCFNJWTaIJZPKOHXIie+m7VCISmfD9mJW1mESc4GOjq8vrr4nTteu3Hz5yXOPODxZaA0P88wzXc6nGntX5wjbpaXubFLWWcMNTrud7f2jTtqWRJ1aP7Wxd2CzPOXAG9blrdLISkvrgRpCqBYMm+IIqzCMPE1jkcRCkO29A24VJ1Y5E7eCRsrGWaqln+1gWTRNEbeprfy8yIK0XarGe+cZ6i+tIkSxrnWumJFC8Fyp0Wh+6ly7UhJAE8fAARcm7Rqg0hOwAMQbghF4jaizQPMKIUx4EDhEFjuDu3ubiGCDXEWa9U4yzyaqxhjAe0cw9t4TQgjBb9++/cc/+sFnxnuvv31HYQuOzyR4cJXWuZT9duic086lASGMtBcSpatxYd+8PdfOe4IIQsgTTzAmCJz3How1gJA3iAA4B/dsa8ZY6zXFFCFsvXVGeYwaVHhvOaPaOHDeeF81yjnPKWWYMEoBOSCMIN5UubWmbGpjDecCMA0DgSyO4yBNwrqYvv76C1cZRIlADFVZaZ29R7ZW2jjjEUYIA+UsiJMkGmiZY+60tZRRArhpKm0l9dBUFWMkZKG0moB3GPGAGeOVdWHAEXfGWgRISUWQQ+A5IUEQgCdSlsZ7jwFj7LyDe4wVQIA8IsjdQ4yBJwR7bwFhhJAHwIDuUVAAIQBECHH23aYPJl4wgjnz4JQywH3cSYxu7k1BVcv9u2PrmCW0vbiUTWZH144IBmZJd2mIaHthZeh1VDNy6+qdM/Ts0sULeTwdvbl1vDcpnen0SIRELeeW9/DwZNRpz7fjqpqHXF174eowCvKA15ysner6sh5X8vhgpy72RpX60Ac/UM7IH371CwnKn3jfkq1zbPV8Og+iuFEIo3jr5p3asYWT6y0XlfNjGppH37dETXdnuxv3bSG3TDUTLGBcVFJT5LWW1ilMwuw4S7rxn/vrHxSffWX36PbS2kLEyIlh0m53/tuf+TOq3hNdvPbA48Pe6tEvfdlM/XFWzcdZwNAql7zMUlv/9//jH/vEH//+f/i//cJnvnjtvWdXwqX25f29Sa455Z1+C1Ooa2UBARBjHQLjnXdgsEeAADMAj8dzNS6bRnvvQUuplV1sdy6c7H/pq5saPCZ0aTg4Ojw8deZsqWU+nkipvcOYsTqv4qAeUnqhIxaW4qov8lIH0CCKZZ1xogKRtpdaWT710lQTjVyDCBhLkriryjkPGCZytL+t6xXBQlOSlPL3PzDcvF7bnIw12xupO9emaRT1FoJyXOVzyVOKEDGeMB5bbRmnFLB3CAA7T3SDnXOE0DAJU20KoJNaeYRtY+JuJ02787rxKJDKVYp2AxFQL7Brspx6CAn2zTRtc+u89Zo514zGPggi1HGOaaWSbge0pIyhKC5mUwBjjIwCHrQW53b599+cfeXZL2+PM+IoeFQ6BIJbcJRi8J4gCv6eTx5jShAVznkisNUKY4QJvqdQ9N4jhJEDTAihmDNGEI+SdtKJh7FaXmn/V6siQZlWrCpTBfjG1c03X347H811pZkg0oADjxl21mGMrDPeO4+QA2qM0s14o5x/86Xgex8KrHQawCNSlZ4wqmaZRc5oWiuEqQMM2hjORdINm8g89/bWxfNRilEYIm1QK4gurkfPXy0r6pWynPiAuXPnO01jufMiCiyEUmkn3SCNk0SsnuiYqqKUz3bz8a2dXhB1e71Or9dbiQXuzrMmiGFp7bSaHF9558ZoMsYiVXWzPOwur3XbQ7qzdUSRcMgOBtHe/tHCsHO0N0eE15nRBD301HqnG2zf3H76o0/Hb26tri37yUhwEiVsUuOlpRM3Nkcx640nBa5w2qpkUXzwQw9fDth3Xm/e2Jz9/Z/97P/09/9ewHe9m1JXy7xxeY7Lwphoc2/eXzvRWu6lSObzeQuHFodV0SDveeDDVHiEUEw87/gCKeN1kx3ofcMYdXExl0aPjdbGeBENtaOU0YCjYq46Q3O4eRuIT6OIBgwbkiyEAlwUDzkSdSG0twC6lXQIigkdiUTd2Dg4ceJ988wcj3ZJKLR0xWw8zef9bvd4NJUV7SW9dsvNjo8dMtc3XulxnCQ9j6reIMiyYvegOkYEo+6knt7c+O680El6Fut4ljU4iACF3qJG1p1O0oArZY1wTULb6/D5UbZ3NKUikvFwbGRsy1MrK3Go1+mKN9V0No1YF1rdQm7cuFF4Rt/79NPj8XdLjbFU2MGbl183puq3eVHMEbcE8UJWDkjMUAG2NYyacjJviiTnnfSc98g7SYivi0MJ0ntRjI/fKF5rD87e2d1O+p2t46M3336d83fBXRjwdDRvdVssiOomn8/m0mrVFK7SBoxXdvcof+fqzXmlD/an+XRaa20bda/kCdgBcg4bwtBqPzi1Ojy1Ojh1ckVwHojAGKWapqnL7a39oprc2Z9nRZPnxjpSavaVF8okRCEj1jNvRSadBeydoJj2YxIIWOhF7VYchFF/0A+4YIT2B62Q8SAQBFNKWBAJBKAag7gNQwSAEEJpwGstERClTD6aO+8RQQjD/miOwAPyylVaaY7xcHFpPD7c3tu31hmtFzq9aZZ57atSMUKnvipLTYEGIlDO1WWNHcIeDXvDaT7rtEljpLLaFAYTTCmN08haV1WNN8Zi7By6u08+e3wcCeh8a8SkVQqyRknvvrz5rNGWBBhjgjUxFjzGlZJ39vcHwx4lQDBoq6VRYRAIQZz1mINgzBkXJQkFNC9lXUlVycPZNI64sV4ECSZGI2S98w4QYSIU3HqPsXdgtcEIAZAwicF68ACYckzLSmMqrDN1Xvo4OBxPDw6LX/3t7S+19dOPDuMoCBF/82p2NPKHewJ7QYlGgEQgIhpYT6I0CqOQIuzv9eUZUQg7jyiCVjdECFujHUJByFRRMhpQhK2z3TSJeei0xd5DGFdGYU+PDvIoaClp90fFQi8SPOikGrxHtDl3YphNmiw306yiQJpaKaUZ5wRjrbX3LuDCIiqNxgkNAdZW+lk29g4YosijRltsTShoFHAX4ao0RV5jSqMw0lon4bvVm6Pjg8vfgTCMm0xefeGtTz75cZw8vUArXJa3phuCRrfvbCS0Yky+fevaybXB9/3A9711+3XbzoCZbreN5tLWth5XqBzInB9laHG1y0D6fHr+RJodZA+cWn99NrWUzIv8zdcPVtZbp0T86svX5cYRPvSLoi+bcjSZUNL028H+/m7dGT75x/9k8frLdw82RJcs82Wv7SSEPE4VLbrttsXVTjUKwqTXimaVqpTZPWoSFHTbvtMJIq0vnO4vDOjrN472Dm0v6PjpbO/G60kSsEU+U507e+Mubm3e3jr/0ZeNV+XhHk3x1p3s0N2NoisiScWcfvk/fe6jP/DktSubH/3Bp29dO6wr/s6rc8/Eez949ve/sz+/oZMojBdaLrcoQAXSgImsMpMeBZ0o2x3PZ7PF5ZZ0+rVr15mHXhxnM7x26tI7b782z8t0IPK6mM/g1Vfu/p2f/elvvXHn259/48TaeieYv/cjD705Tl+6eQOn4TM/+ue/z07/6c/+3Cg/eOxssn+wR0E7TYRjnTZLQjqelNo6MIYzompJArqwln74PefWHz01uPD+lQtns53dX/j//e23vvZCa9hvLy51lvF0XhQzWTW2qosP/fAz396Y/k//8XertLvSisdHM5eK/+X/+Pnf+o+/+MJ/+ZZtTBCycmqOVOGaKqB27VT8Q3/iQ7/xC39w4exqxNHXPv/izWnelNm/+Wf/77/5z34hDpi3rtduYcy0dpyBcxCHIg7K//Vf/6TRG8pjwlYqGPDh0xiGQi/5moLgjHMC/Ju/+wem9vW8pjysq5ph0U2EMq4sAHH67ZfeCkIKiBBMK1nLoq4yBdiLkKWdsMxlNssBgGHiwEktAZNuO4qTcNBPXn/lRtwKBLimKn/qp977wx94MGid6j7w4Pbuzp2Du8Nh7908RaMJJs7TMA4ZsQr0aDweH89OnlrjAUaWHtzd5nEP07Su7fJyFxNkvC+yMubk4GhD6qbT62NCZeVn05FzXoDAGEuoq6a2qrIgozBW2heq8rhaWF5upkd1XsTtABiRRRaHy8goUxVNNQ8xafIMyyYddJVDHoiyjhHKWGC9b5rGW1/nmUSOYoZpaMBghgKGOCXYcV3PNObWaUYQ8k7bxjdAkmEtGyESIMx52dTKAvAgVNWx7wyFYLapHVgRcou5c3KuJ0KIFl+ez2dCBNaoss4pBUYQxiympFEWaKStHx0fRYEQ8YA4Z2QkpaOgJrt7YQTjg2ky7BkcaW8aOb90+vzG3iEzTlU5YR4ASS0dEOcUC4Sg3uraOIswYUFIGQ/DBElodImMKxrlmipOglgMlSKLnbZuKldV0hIhwjBC1vn71s9WxqjCHMHIeidaQtq51U0znzPB+8OWbqRW9eDE2nwuq72dbtrCZd0NqBCksMAFVU2zOOxqwJNCc0xqZTBB4MF6oBhBQHKjOh1BcDKeNp2QSQdGWWJsgHGrFTKCptO8306oRecvnA4EPzyejcvq+HD+1AceCgKe3x098/BS/Fcu/svfunv9UBNMlQFAQAUDY94tywBgjBEhlFKPGtmYooJeC3UootTLEK124ENPLlHtyuPp6dMrvUFvsHbSKmQcfP3rr+K4PcrM4eHxE0/c/+CFc0f7lZ5P0h7+3h9++rWXr9y5sY2aaWgmQbo2WDqxsTG1UwP6XXqjsTDbndAwMBoc0AYQxaHDVmlJwIeUqLqopxkuJm220I3CBJUopPc9eEbbejreBGadJyHCgtdlWdRNnWdZ2XAt0eR45AgGmZ0Mo0EkDosiReVah46swR4FLBzLwhFQyqla+wAunllcSshodKjkJIw4R0QghgL2zPd+jzYL453rxfyIMe+mk5SHKcNH114WkSiPp8r5KG71OiLohLP5jFCBrMWcBQx77drtKJ9XG7evZ7WFYAGHmFGKLGHaeKUImcaBYcQRQP7e4s57gsB6d3RUzXKkGl3rKU0TEQ2WVk7u7x8wgHOLJ3/tn/zb/+4f/eW37xxwIIQT5xyAxxgzGmgP//tnfv1U3OrG/LjUslEIecRwpf3htGaMAiDvfZIwTAkGFIYJaUrOuVGAnPfeUYIxpoiAMRYjRClGHgOABSAMIYe8RZiAcWC8DEVICXVGO4+ch4AxgnAQEu+MNuaehr4sG8aplxUiwhtwRmtVY2wJQYgwwAghaLQJRYgRZFnFOJ3nBRUUnFWyEmFojLHaOPAEUcyRUpIRwijztaqbkTFWGdXqRB55JzUmXpvGAwaPtPQB9qqplVaEciMt4yJox8h5BF4IzChziBDKmrLU2hhlMDEIYUwpAALviUfeA8bYU+zAOQcUYe+9Q+AcAocQBgAPHjwCQiige8U0APAYURpyxgnFxBkllbTOp61IG1VmNUPv5qzrvDoYV2ErhTRaW1nL583S6aQuZvm4unJts9OrH31o9Wu/89WFurh4/+psvzhWV7LxzvJCZ3wnQtRtH46YMh/6E9+jG/ndb3160lsincUHH39q+/lN1hbnL57+/Ne+I/snv+eBT332l3/9qccunltZfOfWXSTab1y+2eH9H/6BD3ta797ZX1htY+2iMKDELS21x7iuD7NqdFh228wa5GkxK+bHhyvL5v6H+50BlaVGqHDWytojAqrMEShVqihMSRDMR8Xh7vX3PTF47bv707l6/InHW+WnfXM34EtR1D1/nx3vXt249vonnzp7+XdeZx5o5Ml09Of//HsfeOA+R8nKifDw8PIP/8CTX3/28l/9kWeeevw9X3vjjf/7M189Mlqq0nlPCWpqTRgFQMYo7xFjlDKstAaECaOycarx1iMEFjxmHL1x+cWHLpxII8yD0NY2K3ILOF1YOdU9MQjQf/nK542xDgEOKMSBSESccKvyPBubShHGic1XWnZxUUwO9o2zYdyicSsKrQdfyiqKe64xvpn6mpu6sLrWskyiQVPKvMSD9up9y9OtmWwlwTvb8+2D+PwaVbJ0AA5swEPjkANkPWDCjdIYcwS4qguHGBaMEa6VYdx3O9R6B94InpQlwSwajzLlqPbOA1scDlb7th1UupiHlFBCGGIgXNJyFoOuJSGaSaVMrY6mUsJ8fry4fLq/ekohabXC2DmEkYj3jud7lfz8N16aFtYaBASPpnPCGTjMAwrOS6mjSCCEvPeYYkqpt7XVVnu70O0V5UzVEhBCHhAGYy3c47xy5jyKwsjboNdf6/U7N9565Quc/FerItk0+7eP/sE/+Nc4buVNMd+fiOAe/8h657xzmGCEAHnsrfXgAAhGiBCsrbGY3ZlGQSutq5FVBW8FYY83pXQceMefWE/lxBlvpJVKW4e0dlBJ2VlthYtYNRV1NI16gLvPv3FdtKJBi3LKTiymRVZhwpJeH5S58s4BYiINcNxm2MimbvK5Y+Apx4zTxx6/FEfdvHIIHKYEvAGGOCFH25u6OOwO4eSptdG4GQ5bRAbH46mZuTPLLVXB/v7RMIm651YOjka9wCHiRRwSxpZWWkDpg088XMtGtIUVxIQxt86U1M2bXI8HJMEu1GWzd7y3eiI1zns1Xzu1/ObGRgunO9ujf/KzP//X/tKnFqJG2rmvTZZP43bfq6VejJeWVq5cfmGpHa6eXS7KHCtiG8KA6qrZnk87wxgBN3LKMQXkPNSTbBTE/TgeeMg1WFUWsql42ra1UqZs8tzzsLMgOp2FSuaE8/ZCZ3dntr3z1rmlAQugySQTS4K1x9WccFPWY0I5QbaTJNpZJ+VgIRGpmh4chWEHRwOnYVarpHsiKw1rZFVrI7MkjAX249losNi7sXNXsCQiaVZV3rpWP0iimOHT/eFKU0+jOLCegrONNs5QqOVk7+47b7509tK57cPdM0tCliMqGMKkF3VhskvQoSvx0on7D2q9cXdz+dzwYIr2dibG8HzaDAcLg9Rt3Lq1eqJDYqYcWA11U2FW5aV1tU7aESFmpjNwuUDcqzoWmANvB0LKRmuwlXfSjsaHrYXWoNNVjXKuJrwedNvtSM02blUHM9dO3g2aSskDXldSznNOSJoIJomNUuNhcnz00nOv3Ll7fDgeIwqqbMArAATgMRAG9mRfnF7pPfHkfetrJxlxiNmDg6PD493Nw+xgVs+yuqhk3ZjaOOOw94AwdgYwBYd0rUEa8N55AM4wIWhhGJ1a7nLCkyQMQi4ED7gAzDgPKKFpGnEqkPMIMcY5Y0zwKCvyRsmERc66UHDCRC1VWUlP8HiaOW0wIoCBM9woCAPmvVO14YJbB/uHh1YbcFoEAfJ4f28fMEvDsNWKGyWV1VYbwVg2z8Mows6DQxQLTDCnTDaGINxqdzymZVEan0PdEOYoY3VVS2kwAeLBO1lVlli82G5VxJe2woSGAdEUi0BUjUKA4igIWtFw2OdUcArGmHwyWTt9SilstNVKA/I0EE0jwSHZaIoBEyoEmMaKgLbCxDQOWaOki4MUUyiqWSACB1BLCc7rRkVJcHJ1aTrP5llprQ7TgCKiGu2tM9YC2LiVCEpG04m3VlVkN0e/szuxHoy1GCFnkcAMsMUU9fvpsN8jgciU7bbbASMR5VIZjzALqDRGWccpSmKOAJT0LKAY4UHMvQVwqE9JhBHDrjS1cl4IFgRibzLupYmWUoTcOD+fVBTVjGAHejGJ5azRtemE0aSolfEI+yRmcRhgzq1g1rt+J55mhcXQikNVN3fu7lKCvcTW2cEwKapGiCCMQkLRweG0qQ1GXNbNu5af8N3SAZnX+9ePHnn4bHmA3vjmb1zeOpRMn25Hlx65OCnrvYODvC5tOX7zxd0o4pdfnyFCTr93+UDjK2/nyRBVqhGutrkTpM9oxE9c6p6wd6++WueN0mipvdqRi+T4CAcmkJ5ZfPu5d/LrN9EImrtZhNtYEKOrNGXdlnjq/MrkmNo82Hn92mA8jdJm/WJn5/XR+VOX5ri+ujXjFTq7PhQ0Gk3mBNFQoKqGpmh2ah0QWBukr92YN634yvYhRVrwKID8/m754acXQaRrp9/36c//TlXoTjvRVZNqdfeLX/jAk08dXju0S7SgOETN1n/45Q9/z2P6vu4T7zkn53WQitPrpw9ujV2h9bissrr2evONTY75weHBueWk3e+Otne8Al3W0+Pjuzd2H7ywrgFKVdeaKwUHm5PFQe94lk3m5WSaB1E0nrlsrvptemp9MJ+Uv/ZL/16L+z/56CPf/NU/CIjvt+hqOxjJYpI1bzz34knuL7STSOebh9sHFs5fXJgfFXNp57U0TjHisLdRT+SlUrUpMm2dOXnmvRcunPn5f/FLSQsiaoklT33qmY1bo6Tb3doa95aXz39kZVJ5HgUzzC49efL++3sv/t4Xbt8Ynxt2Lz796Ne//dyVG9dcXWnplpYXCSJKGw8MCYwpPP/cKyeXOsPV4I/9qY9Mf+Ebjy8Mzzz9aLO9b4vaIUwY8R48+LqpnSHdfoc5pw7vnHvqCR9KZFOQbaPjTkKnb/+fPHrGr/2gNA2xtMoPX3nxO3lxliPhgRTKNaUOA9GJmLGuUEYjNGgH4GA47F2+utk0GhPHOa+Mn88qraR3OolFVsycUYwjxsRif2VjZ/f1N+4AZc4BC6hqyPHhuJUWUt2883oW9U+0l/r5dHZvCsI4AiCUhkHAR4fb82KaxMmw2+/FcRCH2XzW60QOOddrtdtRvNT3qgGHiDVa5lrrYpZhjzgXgBhGnFHUSuLxfO4RKouGMOgtLquqiEOWkNTmmTKYUupE6DR22EZJp6pmzmvBuZOaB0iBYiGfHR0HcRJ0F60B30xl1TDOwFtKKQ+5rGtCSZHNMSVREmtZGykpZ5RR611d1ZFohXEKDlnrrXy38FTLOuSYc+78PWs1VrU0zoIxAJ54oqxGGADAIqalbscdZQsWBYBMo0y7N8iOc2/m1sh0YdFbS5lHYPLZAROBUk2SJAIL3W8dj/caaravX0/a3bTfC1m4PToGipxUyGPvod9Z2J/MEI063WR7dysOMSeBaupWmmCEEIbZbNRrrVDtTNUM0sglgoqoUf5g5zgiaHJ8jL0K06Uz5068+cIrUYezGH71V375T/7Jn1g9sXZ0dEAx7Q7bYdQuRsVsXlSNAmWRJ8ZaFvCl1WVVsFIeEywFobWBOBFhFCCE+nGcldPGWUCOYGSMJx55YxHyhMBsnrVjlmUSWUusG3TjxmppXGjtUkS+9/sf76XtF767d+7hhz/1iUc//R8/M72ymVf6T/2l/0/a2x60+2DfWOjkf+svPPZP/q/na3BHU9MoHBLHPGAMUltGCYCl3kMjG+v/+d/9y0899r6v/fbP77z1zsNPDPfqObX1IDqa7TQLISS0xpzPS4Roiol979OPxxw7cEbf1yijxschoLSbmIAHCVrrxgcTGcWJL2s7z1WYdbrdYBDhKr83BRvbY6IhVwUmIo2jOcop4E4cgSdZbREYqyvkEWq02xvHrehw5zVPKKJCYoZ9cHBnL4xEvyeOdo5290aGiUnZIIQiFje1GiwMka4B8NH8MIjIaPf2XNpIJMfjmaXhVM4J5cQxApx4AGtG40Mla+RRP6UXT544nBxHS7zdg3FWDlaWDyZHvladMAx9dbKVNMVY1cQ0c8oXqnll6izptKM4rcoctJW1wmnMMVy7tTGdTgspjRdnLi3UdWWZ59QbpTkyvR4lxMA9gDIhyHnsnLcGsN/anksQDgNgY5p8Z+8WAOHIO2339rb/2E/+kFSSWnwvgAaAEMbaGqy1nMOosoOltJcmujZTWylASrvSec/EwbxOGW6MHsQhJhCHQV6pre2pNsJ7QBicuafTwAxRSgGc45RpbYFAkoh8VlptMfbIAccYUe4wDjn3GMq6JphaB5ixKEi1nBPkm0ZpYzDBHmmKibaNsrWzxFiJnMceU8oQIESZRyQMw3ar06gqywvOqfegAJJut5Wkk8m4kNLfE4M4hwmlQnRb/aLOjFVYEMGpdSbgQgMorRcWTh4cbBNMCaXSW+ut955RRAmxiAnRlrOKMTwcLqRp4ml4eHDU77XybFLXubZKKYUJYEKd8/eQDoAAPCBMCIYoiBbay1v7t5S3mGEAcA6c9wgQAHjnMUWEUUQ5RpwSgpB1zjZVZZ3xFiutrAePkbH23hREqQgQ01qb+eGNgw2pAHCAEfcGqkJ3RHZ4+dsfeGzt6NrGqYuLRy/fePuNq9PR9NEHzy2stTKpstI+9sT7X317S0hjj+p6ViO6efnOm74YR30cxPEP/uCfLjqrmxtHy0lEZ/tv7lxZe+CRtqYce2/rhz5+/827h2AAPOKa2mI22pv5vqOUDU/1rCWm9vWkIMSE7XCFLWBcWZNrOwh7JwKqsvGOiGLKiFXGNKTVP28dqfNxb2ERmaqaH7S7ASK0aer3P7GydOJUjft1UVuB+gtV5zQ0waLN60iaZYF++E89/Km//PFyUozyar6z2+6KRx7q/uw//N4nHgzHh99+6Bz91f/wY7/82Zd+66sbBaDGak4AwFoHBBBgRBAigJy1BCPZVB6BcwYhxCgx3mEMVtYxBeLsdJpxx4I47i4PqJlevb35oQcfG/Z6I6erWgHoprG2IX02tFCkTBnqGEjwxntbVTLphFEciv65vXHeuLln0WC1RznPD8e9TreyYfvEWTbf29u9gxAEcS9YXUqi+EONf+vOd4ym2OPxXh2f7VOB9udZU5I6oEEYe8BWaw+eUCZrBd4Dwsh7ghHCwMNQG03AJgHCPWoMBCxRQJJkkNXoaFqfW05Pneh0+Yw6CzwNAiJr6RHiomOw8wBxK0HgqdKBw2BdWUguqNMyG+/yiMQBM0q/dW12eWf++uZkJr22YRSnusiUMtYBJbTVTRCGIs85x2EQMILzrGwygxhupTEXJJvUp/sX98ztI7kFyBECHoEQ1HtgjFonWLL60Pv/xGPv+9juzt3n/vA/Dzu9W7ev/Feroiigzvrd3X3CDhAFhpG32iOoyjrgArA3RoP3CANG2DtKCLbGWqUJIgi577689QXS+eiT0eJQyUZVo7n3OI6ik+sr1s8duN5Cq9TVud6SlEo25cPn2kQ3XIvhauvWHfmFZydv3B1lWUhCQhyYUh8dzDDlo1nJM1uNs4DxVow5cs7L5fXBoNcOBOKMtntdQOLESmdcQpLQ5YVwf/tGFJultti6s408MXoWpsFqgvoh8njqPdexSwPc6gqd8pBTjI0F1+0ttkJ2687BmQunW53BvNRppw21Qd488OTF2zuHYb8tKPPTMvR0NiuSpJ+VE6tKT0wad7K8LGb1wonke7/3ie/+4atHRb3/ztVf/qXmp3/q+yitjMvDTqc7WNi4U+Vq2pu1hkF3e/tme305oBiFPAionWVW163OgCApixy4D1qDygDhYUxOcdq2qiLIE0aR9YRy7yxTCEGSMcVRq8qUciTs9MtiOpo0UdBhzh5t3lhZ61CPylKRiPRXTgio797cHizGTV3Pi1L7abfdz8t8f7rHEFisgFFZa0u8Jk2UdrLZvK6VNPlK95yVE1tPpQw4iwgWjNLG114QHoW2tojSrNG6dp6opBV6Z5MgcMBExFqdfq3Z8VElbdWHmCEpQnKwdeCQYQqwwZO7h++UbDpXR02U7zSH2+X+9jwbYdOgj33ovU5tHW3eXTuzGgXMYZOKaMAXW8utRtF81pSVxd70usJrjVk4m88AmiTtYNsIWhdNtbp69q23r1tLEKbjiaprq5AUobVqFBHBqoPhQpgu9O9NgbOOCGoayUXgFbqxd3D38OjNt69u3N21Suq6QRh5Z6EBABAEktA//Z7VRy+eWltrddN4NhrvHM+fe/6Fo2k2nsuydNLgRvkGsPfeA3EWewzgLQJvrcMAVjljnEMEvAewYUTPrndXVxe6adIKA+QoZZRxQQhFgBEwTgkhOA4ixgLkTRKHnPOyLErjBCMBS8Ej01hCPSHWaIUxnhVlGIYkpdl0HrAoSXloXMhp0ZjR8Tyw0EqFbhRCJIhC8J5QHCWxUQ6INdbldeWdRxZ7r+KYxjGjSEUhFjzcPzzsBkFMiUOuUr7TSjS4/d3R6HBsrNFaE0CtFE/G00ZKhDAgBM7+zF/4/mdfu77x9VeZA60MACisAWEHlIrg3PmzC512nZUEQHAKDqqi0tISQjyGWioRYB6IqqkJAOPUItyJotmsVNpNp5M0SsCavAAtFXaeeOq0KxqprGu3ouGgXZdlgGCp3daN0shRjP+oGG889u1uZzqezKbKOVcWJccEIaaaBlGMMQZwlBMLXjmdxMFwueusaUfdtMWx98662tRRGN7LLsWECUaUkk1lA0Epwt5YTCBkwgMggpNENIVSdckICO6boj6c5dO8nmVlOwqU99YaCj7itBMH6yvL40klDfDQBRFrVEM8RQFpDzpIOVWrJIhUJYtp5bV3GiowAQUa0ljw3CqOCLKe8aAq5CTPjdXgAHlijW1FgbY6ilOG3/0wSlNAWEUdH4kg7SzdHu23CeHezm/cGbTP72fbC50gMmJSG4aQR+47L7/pWuXD3//4cTludfuzjZvtgRNDOr7b1PlhY7pRFfR7ixlMZ3Mfds90ffv9j7Sff+3ZxU4UtRJ9eBjPcb11cN/yirbxwsLCxt3y7vFhr7dgm2DnGB7+6AeXm+zFF55fHPLOWlDayWp79VZz54H1tb1bR2VVTqoKI9oKuVdaADTKTow5d2J56yi7c1ST0vfaoXY+U/ZEyop8fjyiC/eF3SX6A3/sg1/8wrODXjLNjk7cN7jw2P1Xbl196OkTrx+OCn20durM2IwnZPLY+5/ZPdh7+RvXLj5035uvvtLu9IBTl6Qf+TOf+vK/+lehdlj4TirQpDg6LAD0Mx/6yPXXXsu2Mho5RNXCeu/61eMb70w4JgvdwXLcHx+Y2KMmkz6ChcFi1GbamuPDiQC6c2W/RNlX9l77i//wp//jv/7tYWZ6naJHmr3G0eHpleVYvP5WvT/mQctLkRo3bCdvbWYxdpX181wyQb0FHNNWN5aS/Mb//p++9iu//s6LL923mDblPDuu44W1YHm9NyCnHjv/kfMnZH7QWV9++/Luay9ciRdOVka/ffnqrY3jJAw+9YmPBIPF7fE8BU0HidOQS51Xs4UwefSBc/PJyFe1KWU/DB5/+MQ3PvP5Tzz9oX/4L39vnooRv7Wy1Gm0MeBlpZTSXLCm1rWUBpU/8AMP+7iv5CZzM4NPWkXB5v/nj/+6GPz63/zC90lLAoqZKLVLGAv6rSULrpKbitSdZPg9H/sYkmr1wpn//Au/Wld1nmXST6KI21JHaeAlzcsySYWhDlBjvI4FZ8xzhgfd5bcuv+0cYZ4gQaR1tbIiiL79wuYz59iTH30k9VU2PSamMzmY3JuCIMDdwXA8zkbTMQlYV3QYIVjJYj7XuiGYLC3193amrTQ+t75ya1qGQLEyg3bn+LjAPDh58uz+7n7YTS1YhCEUyXQ6iSOx1OqPdu9gbXx23GSTMOkpaz2QsNVlPLDeOgVNNScIkHGIYAsaOVsVhVENb/WR066e1WAh7jktA84YZwZJcFZVMw9QNo4xypB3SiJrLFCPuHO+sWZxcS2bHAqGAJgzJiQCqK2b+t7/DkZYLWUUJbUrEFaEcHCMIIY9Addo0xCPnQNd50iEPE6MA8K4Q+Ltd66cWBhGaSBrZbUVghuNy+mxtrNe/6wxzqhKm8JjM5vNKOenTi7UzvYHLWYcaBcFIXJQ1BojNstL5zS4fFq7KOaRYF6piIfU06rJPQsbXVdqksbdujac0rpxDFPATaXrqNM5E61u39382rOvDZd6R9szwSlmMJnz3/itr/z3P/bDBfjZ0SyOQhK0Mba9IQ/DZH54DE5n4zEO0l4nlLyd16pp6uO8NI27W8zvW1oYdBMq7Nbx2DnHCMUIABlGMCDwAMbhw8N5f325E4eNlpiyVhiZYp7GvDwqVUWe+eQfyw1aJ8dPf+jho+uvmaKusioBevHiOi2OuifvZ3mxf/Ogh4//5U8/6SJz7drB9b3466/uV9KA9wHnBCFCUcTJyZODxQU6CEYvffGfL/VIer7r5vPU5u975tzL37714Mol26Bvf/udH/o7f0VToi0tjg8oZtX8oBrvrZxYlkXV7Q9Jtz1YPX3tbnHi/rX6cDzT1fqZC6rCEWfF7jaEw/apfmX/KE+hUciEYNw4jzBFqgHsjJSeUE6Y0SZvZMi4bSrktG5qjJH3hlAz11pJr423FuxNVRZ1rkA6ab1ttVMkQWlkpCbeZnXGGQ5jhpwrSp/nlfMsILzDE8xY410Yc12Vzjvw9MTaemmbYV9s7N4uqjqsKqSuT8dV2l1uC5PpupW2u648uP1Kt0V4GHRbMojdZKSJ4DLPvYg50bEImlLaRt4+mGyNxqVsWq04m6v+LEuqbk0x60XaySg0gkuMiPf4HhEHvAUE4K016sY7R1WNjfcIwEpltXHWOQOCYQBcaocRR+AAI/CIEoQRot7bSq0+dGrl1GLfkfVuoNVVj7Cd5iLi4E0QE4NcQ22UJCShg4VIaq9KWplQWw8OIYIJBWstgEHGY4SQx9gBRT7iWAQYp2FZSe0sRoAIMUoT7omnQEgaxQhhb501rmkqAoA9EoSXukEIGBYB4QQ7QmypDGcBBrDGWfBKa+agFQsGNmIIEOv3T08n48PjkYh4L2ktdleQ9POjOVBkjLPWAUbG2LKee/AWAxBkjQVtPSBV1ZwHqqpCHmmjMEBT1wAIU94Y22JsYW3NWdxZXmuM63UHjIA07uTJE8VsVpVzTJB3QCh292grAAjDvRS9954QDN5JLfNm2u60Z2WhG4kJRtgTBIRg6zxGpNtuB0IQyuZ5oa2yznjvjbPeOoKZt85Yo7xx2v7RL2Q9y3Nrba8TAQAjdH6Ue8SMR8uLXVPVs9u7dKl7MNFH372lmnztxMrHP/Gxt7/5bNjp7u6O5x6Abj36gfcfbFzvrcb5ZNxO2/PJTCplEJ+qvJJF/3yrFfv1j3+snmz0dTnsnd3ZvqWxrcui3NptSjk4ubR383i8uX/25GoraW3t3BWC8cBXVY5cGorAY2k1wlF7cWFx9513drZfX1x9CHVDa+N6WgSUNfPKN5I6cDxUDapMMeiHrYX24OzSdFdP58XJ+08mve7d2rF0JR6wIZte/fofKDH5n//qJ0a785NnVxI/O7qzrSoDgIdJD6GoGhXnljtlXomE+jKj5fHJiAVerz1wav/4cGt3DoCDQMhGa20AI2V0GHGrjAiCom4ccghhBwAOrHOew87xuAH2sR/9C3iWv/rcsxEni0ttw8AjJABTCxS8CLkHdXOnfvWlvYcuRsjOe52IC4aQNaq2NgRCKjz0wZn2SlUfXePOEEZ5FNVi7r1jPBbxKUL60WjOMGSzw+aQPPr+9fe0+Yff3PkvrxyFrXj7zmz3JF04ky4tt+fTIpuOwzAhhGMg4C2hAM5bhyiKKQ0dIt5pSjACBcZQrDAJfAOASJgmLBlOtsbnTw0euW+hLA+RygLGPWWADWWAcIjC0CJHkNXgwRscRgSoMzoNPbaumJWEEmIgr/2zbxx88YW9aQkac4dRY0yuM4ZsFKerZ9ems6yajwE7zLBASFaNIUB7qx/6/u+/c/t4762XCMwZhv3pLUMa8IAIWOMp5ZQzZzyhYU2Sx97/wb/3v/y0bWB3a/B7v/mPfVkXs+K/WhVZYx2yLETWOYSxtdZrQxgJwwCB19oRQMZ7ZwwmGDnvkEUA4BASAAhx3vrSW2rGou95T3u17wTuUDH88rMHn39lf5QHDEG2O0HIq9og5AlDue29dWC+cTmb69nhWFkrQh6uroQ8ct672aRWOSpr4xwkYNM0XFlbTDioWdE70f/Yp95bTPLx/ijttlZOr492pmVulcag9HY20ZUtxwceebB00F8FfGK4furg2gvgkQvR1Zs3ljsDzNJqVHeGUdCisiwcYoNBhxFYPXvSUSYhAmTAhcbkWCI3K7uKgPFRIBpbeedWl1fyPKco75wIJUlvbO8vLq+fvn99e/cyj7vPfOpDr3ztBX1ndvmtu//4X336ez94+rH3LuaTkSlH4PCDz6xXk/ndq+8kvRSFQroAaxNwOan3D0Y7q91OU2ZKVd4Z52RRNKGIGeZWO08iWUnQFcOWCjQeFWVuKRNxtwUBlbZkoRABH/YGe8c5JUzWYxIkB6MMFBFhe+/WTS/2tJlR72QFRWERjgarZxPWuvH1NwZn2oMFsnd7zJwOBMmKcn44bsVhrxMgaxZ6vd3tzRg3wzbfuHUtipeVGo2njDNOAn68U3KGsSC1qinjBqGqKhiLmIg8ZkeNVSwYLC0wahZOLBEOx8d16fBsXiIEEXMWOUtgPBnN5mZSZPMxHI5qQCTPakz4hfvXb22/HC/2FlZPexd3BgskWlCTo3HucNwVK7ye7u3vXO2mjlKgwoVRUFW4NnT/8DhKtLdQHu/e3rj6fR84AQwd7JooCW/v12K13e+hO9vXN/f3JmUVWHNvCpRUslacUg/w+huv/8JnvyhiphpspCQUA3FxgIYpO7e+/PCDa488eFprMxlPr984+Op3rs4yNS2U8cw4AISsxwRRax1GHmOg2FOEGMGEIgzYWUcYJhTqSjcaaeXCgPR76cmV4dn1ZQSM8QA89hZzzgBRDIhQBph0+i3iQRsTULo0GM6zzAKLku68zjqttmwa5z2ltC7rvJxj7wnn3iALtlbGIgTgdW0IobLRIWdJSimHTi+ucrDGO2eQAxFRTMEqX8xz7T3yXhsdck5ZAFphsEsLrSwrVVOtLHS1cbWGMBFVZefzKmvUQitZ7EWEor29cVVVVSOlbAgGY70xXgJ868W3Xr22E3LhLAJnEcEAhMW8ncbZrNm8feBWUcAIcgZxVpXlVBZJ1G23W86TRAhAuLJKIRNQCgTqSlmLMeez+RFBSFlEEXLgKadgXafbw4yNN26D9UXujVZFUc6ynFLCGO0ksTMoiFsTOyOcamOaWmlps6yAUDDBrDLWWI+8d/aPyIgOrOdCEIyzXCcLKUIEPE6jqJSl8xAEAUKkkWUSJplqCPFRHEWCg/e1qqMg9B7VyngEpjbe2aKpvHNBIIggR/Vx0l4knDnkPcdKAhVUWz+pK7XvQiTiIOikiVSql8QFsUdy3kU9xkkUcExwILCqpXfNMIlyOVtZWgaEtFRQWOvo4VTv52UaBtY65xw4lMbBMGrPjvZ7/TiJiNHvWm+2x+P7gsWNqzv97qAqZ8v9wCm3dVCG64t/5b/7e1/4yR9OpBEYASVKacxcr5PuXdnffvt31y6syjM+aXcOb+0OHuhEJ5k8mCMmMY1tfVgcjUf7qAoC3O1Nrt7M9mSHienhjBbQilLaRU7B8eiIYJm2kjPd1mBluZLo0vqJ43feapwari7m1Yw36frZB77nB3/wd//ZP7ZpFmMoC10dTUlEBAnGRWEajw1QgRi2n3r/+z73+193BOUHs8EgnUn1wFOn+wW9e2Xv5EMn9fzGyfXhIMDHm/m5Z1YOpvNvvXb7r/7NH//9z35m/fTwY+8788ZXrq8/fHZw6slbNzvH+eJx9vrJplw+2QrRCVoukW674gMVpIeTeRy3Q4OJ5ctnHgmc3d6vpoXRzOWVvHJr19Fm49bVT33y6VvXN46OZqvtaGGBGxsfH006awNd7muTy7IRcYDA/u1/8w9/8dPPtjz+6kt3drPs0pn+V7/xEtROJK3NWdFL0FSwQsCpYbJxK9/bVd1IrC+LU2cXb24V28eVC8jNg3LYC5mH1U70hV/7t/sHR1aVmRc372z8yN/48fWPfu/Ntw/uE0FemwrSstr/+qe/NCo8CYbrp8/cfen5rbduLqyc+Is/9pe/9Zvf1CKb2enRdNZCZBjQ1dNryYD+1I/9yDtvvvG1L041Jiurg8O74+9+e+PEsH+4d3jxdH+xtXC4NzIgnXFhSLO6DggXXGBPESK1cz/0E3/Fmdocvh21StdZC8LFLCt+9N/97d6Jj9SWUEAY9P/9736ps3g/14tXrm9rXFldBoLKJv+tz/2KQyZ8MX3Pe59wZb65uX08zbNKtZMAnJ8aeOp9T73y8h8CcsxbLkhV1RgjLc1ofNt6BBi0dtQi53xtbWsx5gFc29w6NW6dv7hYH7DGIh++G7euClVXu6qRRloeUCYCawwXYdoKOMNNo2azhhA+U3Awb4JO0mIi3z3MM8lIqizPDEZJOK8Ko213GHmEuu3EFHl1tMl1wZjzDnMqyiKjou0IUdbKqsJUhKJlqQg4qrOxkkogTjipswoDM944YpT7/1P139++Zgl5J7bzfvM3f09O99xUOXd1V9PdQDc0CBEaEBKSBmONvTTSmiVZlj0ja+QJwtYgWcu2rDAoIxoECNFANwjoVJ2rurpiV7j5nHvyOd/85ndn/3CLsfVHPGvv/ezn+TwWKQ5ERRABFkirKWVllkPoknZnUZt2q3u0/0bSXvU8YpVhCBuAnYNpVUOA87JotyNEEHKmkimlPtRGavEAqmKURQjKpvTbUV2XDgKgNUSW0wBBjAmGPtdGIgVV4wT3lzae8Wg7Hb3WNBM/7mR1s7Q80OkMMAmFE0JBgJSWjBIvjncf2ibOHb57Z7CyRSExurRaMc6V0hAbY6EQpTMaQ4yAZ4xrtPEoc7qpVAGdw1D12wOt3bxMe61WMc/yIgVIN5WEEFRNwwkdrG3bt49G00o7vLGxkstCIkqdwEQFSHU2+mfn0yKH2rqA+AhxvxXWZcMBBwjVRSNrHYfJ5nODu3fPprcOpUb7F+nFLFvudZY7kcsqYBFA0HLcNMpZoKHDlDRKOwcuba7M0/x4NM+mCwTs1jA+yeXDj18/mcHRfH5toxujamqs8alhrLfaOrz9bmQuIGyTufaC3tn9/RyNvvcHHjFjF5Fo/4BPMlApk8Re7HvTRck8FjCSHU7/4OAPH3+0u3Vpt72+3m+T/Tvv3Z90oyd+tvXk96fHez/y0Z/WNCzyc+RoyIJsMXbawbBj/TbtcAEUsNFobHCp7736BnZ80F++e1on/eWt3cvp/VuU63tvvhX24gcqWFkf5otGad1uebqqkoBijJxz1kkhBGUsiiKkTW95qczzsm6U1dY4COBkVlaVwdyrKg0xIgGfN5oHPIo7W1e3ZscnZVHvX0z6ba9OBQ/j+wepbMqDmYqp7wGythQNWx0JjNMlAg5gWIgmjuJ5LUmYZA2EdOj7wigwHTVh0J2ej1wjPC9YpCUF1hisAVdpHXJGnYJwVhQ4T6XfQavr/SbNGlGlxfzGnVOTBCxuh+1I2nmrF/CQVo2WdQVw1emAJAHOAecQQM5pjR8sZgGrBHj7HV0LYjGAVgMEIHQYI4Kg1voBHtZAAAA0xgLgMCQQAEDBz/zIJ1+79+3Npavp3tl39m4WpRPCYAtiD2xttMd53h62HAC2UVIXSmlo8TxVFuLQx0ZqRIhzziAEnDPGYkachdTjlzeWr6x1p0X61jv7sceyUgPnALQQOKOkwIBRjxCMCJdVY4GBEDvrtNbaWIqJNqZyElLbSOnxsMWjRjfAAYysdpogRBCC1oqmPLnQCNOskLquA0a1UodHxyfnp6ZRlEJtLQSQUGwccMBJVXPOKcYOIsAAhhBoBxgFzjRVDpwhAACttFIOWkJYJepLm1tXL18vi3w6Tj3OF4uJ0rYoReLRdDZqyswhDZ212gAEtbXAOYoRJQQhbB9kmqRTRk0XE600ooR7TCkNIYIAOAuQg8C5xWKOMKSUAAcBhEZrrQ2AGFPKiGe00MZABBGC738hW13VZSWUH5Ol9aWTo8lgpdtqRft3Tgl10XICbPDVl+5ZgP/8n//pL3z213kLTaYT3m7PFmUUepNpPjo8+tof/D7A9oPPPhqj3uHp4cbOTjqbNXnlORoi02rmi6o8UogHIfS6t969iRn0usELz3/kfO+t5V589809CoO1jU2A6bhsakalhcvBcLz3xmwybw/Xrj32SJrWeSEhoCy4ZM3AyN7FRQOp18ynrsmttbKRYX7u99exN4QeL5saqEXLbzuos1TZWk1ne9HOVdWo2cVCwNy5ZV2A2KuvPrQUrCa6RDlEJPTqyWx+UfeGV2W9UE5P6rLb8wLnNacLM57prBgd3tcGEGccQkYIijD3GURQa0sRtNBCZ6xxBGBrnXMWAQiA4w4ev3v+l/67X/iZv/7zAwF+5GM/qOrRfDKrpvlbkzdnWdo46wCwAEDMRpW8cTpZ6sXtFYaQ5R71o3aVeTQc+EGP+B2kFjY7Snzfj+IiKxCN/bYCmkTRKvDaxvkR78j8HCMXIJROBcL68afWv3hjNENwhQe3DzLe5R62EJi4N3wQ43NAAWOMsZAghrgxABHjHAIAOWe1qBF2PKC10p1ux+vvwrhVSvRErxPDBuEsCDThPrQIYS5k6qC11kKlAIGEMqW0tZZQpg1y1mGMbCMgYI2k++PyP71++42DPK8Jw5AiAgkstSQUf+iFDz//5Mdffe2Vg8MXRV074DyKda08yHm/9RtfeZFQwCn4x7/wL3/v079IPZAVY2Wc74dCaYSctdAZ5IwBwLRD8NpLX/4ff7H7sY995P/zD/8B51gbury0cvPOnf+/AprWBAENrdMOYWqtM9qAB/B5wghURhtCiLUGYOSMhRBTn1htrDEAKO1QqtznXhzd2aMfeXJ5sx+8+k7xzbfrUvvOamkVxM5BmM1rjCFE+MtfOWaceIxTQkNMloaRFgY0anxRGAQAxqrRlKKVlWjYCzZ3lzXC/X6MGukwHV2UDGE/CVkQTC4mxWQch92NlXiRnZ4fj4rZBGIyGHQ2VtcxTnIJbt291+MhhqXjZmlneGnp0qs37kW42w4jx5RWVhtaNIhSVGvtIYIA7PeWMQXaIcDQYpI2c62khi0PAAxBYB2cZvuOoKyBloXt7nIj/FnetNqXxtOp56Gdy9fCaApuHJ6dlv/qP77zs7D3+KWuVRpBK1UqhSaAY0lR6rSHi7TELYUICLudXNbd9goqmlraUqe1yzxAy0orI4LQj9s0jtDF4SFwyMqK8QAzShke55OtaOgMEsorSl+aMEqSps5GWdrmRGnrAKQ+r2WllA2iwCqNjE4Y2X/1m2GAoJwe3jhNR55uVFNrhGTSG5TzOciJj1ZrZcXY1gKyJLrx7pFB/u6jmxSk5/M591AIsDaFUzWKg+WNbdPMGKyVwHWtDMSYwPl8Hnv0uY+9cHzjNWLtfDyiBJ3fPUAq4F5gbE4CEreYXBCdmbDVMpimZ3dyAyZp+ejjT7cHyd79c0Ksz+l4DLwoopQZ2ur2+8rpMGHdDgzoAuh6MpoIaYx2YRiFsdeMm0aTbqd7eO9E5Xp0PouSBGmOCHnhYx+/edIEho6qen+0WOSikKfv+6YEO2t5QBAlw37no09e2b203IpCwkCn3YuSTr+dBB6cpeO79+9/+re/Np7Mm8I6xRoIHWCEM+KgshY6hzGEGCKAAp84ayAEBKJGKEIJI8hYE7XC4aAlhUIQI4C80Fta7mOHPUyMApwwhDAlFFOsGgMhCEKOEEbAaW2llFGgapkTDzVVWZSVF/CmFhBYz+ONEEoLTLDne00jEALYWe7TaBCLWldZDbnxQ26h3VxeHU8nRZpb7TDEYRIaDYqysALKWlqlKaMA6DgKlGj68VBUdVbmC9UYBa2mECFjFcIEOUKx7bQ9Lmm73R4vUkjAzm6SZ+Uiy4bDwb29s+l0oY3g0H/5zT3goINQWx3FrNdrC4fSvJnOMgzgZDqtqnxtrf/sE1eOTs6FNR3W8hiJkrCRUhuHAUIMdYOoqmRVFpAwoUwjZKvdJpirqpbGIgx57MWRt5gX0ILltT4yJp8WGLn+UgdBUlcNhq7RGlpkmpJxghGAEAKKgojlGVoe9MusnJydW6MhJkBb5wCACGGEGUEIJ0FMIPP8iFNinXPAIISAdUqqMEQcBxwjn/vScGWMUJp6fGNpoykba4FUuXLQYegAssIRwjDGlZGry0sYYs/3pKoJNAHFlHseZkZqKS0iTuQlrSkhkGAXh6TVXcLOykazgAMHDHAMA0BR4rOV9oqR7nhc1EJiTJQ2QkpOEYQAA8Qp085aZ7J6ur3TN6oxqrLv+6XgyuYGDfy7e/tlOV/bHvaXWNiLxQE9E/VX3nvNBp7FiAQ+VooBEIZx2Fmy8+buO/ffOzkp/tPhJ372A5tPXM5neY1SoQUM4NHeweLwJGkPHv2ex7JprTeTDbV8cN7L5w3G/OrDa+0w5Jv65p2TQdutDzvH02mbsipbeK1kGFGQmQJQ0N/okiHE7L0379w+PFvqrr93/F2vR7WRwx4Tsty89pDfie/dHkeBlETeOb270UdPPxK7prm8fu2l1+53CH/7pXs7Q7a9sfWlP7i5e+34I3/2T3dxcO/+ycq1jdn5OFxLXv3OW7Xl7bj19M4H1j/5MLddwC9BHC99708wr//WV3/zkx949Nbejesfvp7z6DtffGeekqeefOj2jaPdxy49/PCTXvDEukdf/+xvSo0lRNrxmiyN550/9SP/3bf/4DelxLTn39+7G7QDB5yEeJYKoqs0y9J54bwoDOzv/8t/O91zNQgffXx7c+faOzffrEs16EZHBzKOiQqwarPZKbx976LNfc+yRpsO1pFRux2vDWGpAepAaHQg5YbLs9s325srT/zwn1pI+pDfrhH7zrf2e3EQBZiF9PbN+5PzMwP7P/mjHx+sXv3V/8ffLd5+OVG1Nu7uN77Zj7zv7N0KLvGP//kX7ryy//x652Mf+cAgcve+8vt//KXXllvLo8Idn6WYonGRtkM9nqbPvrD0iT/z07/xc3+Xck4gpZyVpQQQlrX0KHZKbq5vuGyG66//wS//6p/9e5+op+8ErVgHO8tPPqtIS0Md+Uble3/4+Ztz6Uk9yxZZYwqEbZxQ32PAZ5NpVWXZt77+FV1qgCCNPIPV1WsPfeCJR371tz+f5vMwpIUouMetdhCDrG6sNQhi5mOtgY8IC1hWNAqiuZQCl2PGbt25SWmL9z6YFmWt3Z9U8iUnIOLYURL4/v2LuXUaGtjtR9bVGAPOadROfIXrxQi5uCZIi4oQ4PsEIFwoO9i5enZ86AOHgK7nk9aQxR13fHDOCc2zNC2quLUlIfS8wELnnBVNQT2Yg4YEHcuYNSJKHNACOOUHCXZ+o2ocrIQ0mWbj2Pex1lbpPJ8lYd+jvGrSKlsg2Koy4RpsGgMjBDQs5lPsBUFn2RkIUcg4ta4hmFRlJo3krZ6GEOhKGcU9JmWlhPZ9J4QilDpltQYAYIy5UgogrZ1WpiHWeQRPZuO8cx512sPkqduvfnur22/31x1GVXa3yOaD4fLs9IwHAUEWAjxNy2Zheq3O0x/6+Dyvs1L6Xuwz4CwSxDiFOA21qJnnE2S1Ms6ZtCpwa4A4l1UR+JGxDaYYMn46m1VIO+2wYxRR50EWYuSsKOZBr7N7bWtloy9mxjbNnTv7SUKlrr/69a9srPTqbB4GPGglEHBV20WREWDCMAFIUk6NVhD42umyARtbGxKx+yejk4uFNpYX5aAbK2NrrRaL2vd9QEgjFYGIIUAQORwteJQ4h7txAK2usmZ3ebkYVYCFDz/xaPD2O6v97ujWzV63/dIbd9NGd1c71x9aby7OOahoK9x8Yjcr5PbaIG4n3d507yLvEO1iTAWUjayt4dA66Yqz6ZUV0m0NxLyU52bW1PduzXd3Pmi8we6Tz+3tvdvpdyFWQYCa3EYh7EX+3XQCAtz2euW4PDm+ywLy8Z/6xGQ0T5Z40/AG+sthMjOSBOFb791xaRrR3GmZvz96A0StZS2nea6EDDAm0FhgAQBFVlEMGeOMkEpUs4Woa2EaWRWNNQIBZyFjkHiBb5kramNp0FmLHtrdJDza3z9qRYnldarrpcGw17PHo+zWeeljPBeAU86MrdLU83zP42ndTLLR+trKUj/2GPCS6GQ8ya2hlkYYWmcULsZFWRWCehHABjEkKwBte575dVp5zGNmuW7w8vJlcHGBLMlnzfh80eonjJNgpT2DDgh3cDi7fGUYxBhoYYtcWzhYxgGtKMXAOWstAggA4qzVzjkEb707vXlTEuAZ6KTW1lnMOUDOGos5UkIBZ40xlBIIH4B8AWJgsNlS/NaTV0M5PRbjhSmJaHQldGe5s7QSFCpjFCNI4047TydSG9zGjIbxGCixCEJuqdPGQQesA84BjLCFEGCYGbO2vXbloc3WaHZ2NvdoK53N5qI2hAkppHEQYWUMxEgb6UceV9IYhRGGmBshIEA+5Y1WjQWEedpYC4RzoFHOYxQ6gqFWRmtrAABCNVBrLGuPMghMlRUOgkY7qwFC0DlngTPaIkqo7yMHOWu1W1FRZ2lTCSGNs4EfqqKx0AGAPJ80Tc0YUVpDBJOkV1V2786NSoiyUMZpTOBSv+9ROxqfWSUd0hhBhBHhVFsHnX1gGzgAnXMAQKMNoVhIZZ1DlADnlNbWAgcshIBSSgmS0jiHrLEaPqCAOWctQsA66CBGCAurH6xBOfz+t8HR3iklLKsbdTQ+Ppm32wldiHxcawnSRXUxLkgYXOSoTM//za/8uw88erWqq9m06qytQT9v+XgyP0e8fbQ/Gyxt3r3TGCBmZVTsaT1ZRD5YuTI4vH3jxptniDCDaHu4POxFY1hmJUlvj781+wYydacxCQwhYL4ftjrR+WIS89bG1tL92+OHn/3o7HicTidShDzZ0s25xipZ6SJ3vDib1thcevKZxO+M9/e8HmdNU2Z5ZKp2spXsPrs4PrFVZrXntZ0XIpcfH9x8qzo9byVCZ/k4nXmY+lFkYWPFYno/08YNt5d6HXrn9snp4SL02qA5J5zMZwvuDQ2yHmPPf+TRb71zsT/PBpuXOsHi+HRWEwgcR5hKgJTTHoDbayunoxmA0FoHHLbOEoqBhY7AuOO/+tUv/dW/8fOMgr/+X/2Ff/hPfmE+9//K//Fv/s5/+Px4/prveAOsagzxgDGCcVRVTeJiZ52qKmiNNcBjEYEa1OdWaFQVOOjUEEPUbgrI2SrvLRuXGFV51AyGPZGX+WJWHt5Pti9D3Tzz2OD5R1d+57WT9UvdQc/NR9XaejtqM8B8CzAGmDLmJBbCYOggUshJaCgC/vtDB5g2uiwarQzpDLp+r79o6nbcptphpWWRIycAso2QmBprrJUOY8wQB05gbSAKHHTAUWcExJBgW1qlsC2b6nA83j+YUhgFDAqlykovLXVDghtjb75386UvfUvIBqE/CSJowyOOKQMREdVhOxmG2Hv40e3fMABbBR20xlmCAXDIPagoWee0UwhXGWyyb/3uL3/rc59uMR8ykGW5abL/LFXEGMEICCUA1EZDiIgfBQCiupSU4aTVS9MZgNY5YK1DjDoAtQVSWQYhgM4qQRjmBJ9duH/5u0fLS9H5pGGYJC2khaYYNcYpaRjHEFuCHPMhYxDohkIELCiLHDhXS6ONqSVEGHMSDIc+BbIVs4evr7/6+gFcCuOlXl1XnLdkkRPiOwMsdteeePjmW2/2NDrfv52mzebV1UXRJO1YKtUKmJB1EiWRh+7cvDnoLg2SLUS6w0T6HLz4n/7g8u7GzkrfmIhyj/lhtx0aUczSaUBDhHhTKwCBMDIvcx50SNzV9cSpKs2zpD0gnIm8dNYiXV+79Pjh5PDS9kYnDmbVRdsnjVC9foA5GE+K3/6tL9kfeOxjH97Kp6cX+SKIgkee3UnHuagmNCKYS4tMk89sWcexafIJNtg2tXHNRmcdaSQCZyxUzbRWZbHIgQVCNgS7JPJbyWBeTKIQcu58D89rTTy+1o6y2TEydcfHEUe83Znn0mLkvASyQDnjebiTxCdnIy/gcRyXeRCAEEG8mB222glCOM0y7IlWmy2a8XBtd3KxCMMEc8d8j4TDw0UaORknUZhEnse0qLRU2hZHx9+11STyaa93KWr1CWiQKYdeM5+NmnGOkYXAOgj8CHdiX0NsnHIICuXmmSEGd5eSUlbjQnSHSX5QQgwev35pvPdanZd9P9q/tY+99r13xyzABEGMCfNpU2WtVuR5mrLA+Ela1cpoDFxR1UI71vKDpQDk4AoYLmajpN0mFnLHQFbFykwPTzEFXkxW25Go3n8eWK156NWNgI1stdqf+OiH4wBi41rd2AA3vRjvX5zePzgcL7KqElaDYdIvuUKYlUZzRqXWlBAEDGW4LpVzjjHiEYowJAQHIbcOIkyVUtzjYRhaaXELhglnhAjpOA+gA9BBBKyF2FpAEAKYOOSssxZgDDFFhIcEYYIR1dK1WhFx2hlnAairhiJU5CnzOUIUQAeB4x6N48BInZU1NBpoGfuE+8wAWBe1wZYTAg3E0Cqt55O0aqS1xkLnLAwYbirBMOKEtsNA62aRzaRxSZwQhqQShLqIxhiyk9NRLo3vI0Y92ei1jcFskmuhCIAB9Vmbe2F7vkj9iKXTIiQIU1TkTW/YDQK/1+1XdXPz3ZuLxdxYVTS6kmI8n927d2Kkxpx73MOUNlIQQltx5IxxAGCIsIdjn1dKG6lJBZVzBGGLQMD9vGnyRVWnuahUXesg5gg5baGWRlONnFFKG+cgZVYpUeXtdowI9QguiiaJQriK5mUdh63cr7P0gjJMCUEIGggJoUHgtXpLSeRXdSalNBo0xijVBF7g+T7BEDtojJ2UizD0nEU+psoYK0yWVU0plDKU0DTPasuSwOc81I0UVdNIgRCjlFZZ4YCAmEZe6EECLOTcl1IraCmnlVJ9FgUYN1IxxhqhfY8u0lxqoJXyfWYdOpmXZdn4FEPEkYNGaY9zaw2wSDSNqqWhGFOEIJeqFoJGIYcA51n1QAVOO2dR0vbiLlzkCw96Sou+36ZS/dan//7uoF0tsvp83m7FV5YHlSjGRep7bOd6J69qCOXX//Bry6fr3/u/+aRvymYxiRhRjFUoioK46/T0/ObqUzuvH339kSdbo/PO3q0MSv8b3/zmC08/9tjTz6fTi6N773YSsn+2X/Ngia+NTt995pmPZUWqcVLJ+dnZ/KGdrfHFeTM9HLZwAYQfIIf0X/mLn/r7//wzP/6jPzMZf0OWc1kuLq9GyQAojeaH1dn8aG2JTGeycrzQ6Nad2fZypDP42u+8uLrS3bnWOS6PH/neZW9pyR+AR1evRQO8f/NuTDnAzXR+2u0vOxBfWb3y6E//3EvfeSUE8Yu//q+f/LEffmy7/d3fKjt+7wc+1o03klYofvNX/l/d7WWuZ522p+ro4mL2zLPPRq73tW/e/b6P/NgrX/mtiCEGG+cEjqLzyUWZwj7GWOlhz1u5FMtavPbHX1cu2tq6fL310Orl7W/deqPbaoceX4tNuyrhrJpdnEcYtbh99NH19P4CUH1tZ+Xg7sUqa69vhKe1OjmecWsfe3T5x37sY/t7Gd2+fPtAMO5HXegsHPRba2vBOy9/+2K8mBTkynNPXN69lFj94i//o/G3v7C7GiRt/603To/efUUr+FM/+MFvfvkrjw6ejVudtcC9+PufR6ryMeskHUetw7aRankl6sXy+z/+7Jf++B0s2H//i//UYgw10ErVQmIECURpVvJ2JKpiqRX9wW/8s+/7yWuf+ge/mJogGHbF/MgCgv0IiCwIPGTl//w3/t7+/VyEDlFV6QwS5zHgcZxni5Xlts/7k2lmjWERldp4FBpNLs7OjwYdyu3pyb0oQAaQdF56AcccU4ObxnBGgQHaSISwahpOAGWIIAcgLoUL2mR8dLQWP+X5SWut/f4rGbpaaGms066RTRQyLaGxThvJmV0eDM9Hs6bR55OMaDkIPKMR1FYp5SxByCDr0rOjhOIg9An1yyzPJue6adLJtNsbKAutn4goDkIOADWy8TBCPEQIIuLXTQVUwwg3VohSeBRbjSqd8pjLuqyk9n1mlNUaIEeiYAgwhIRG8VAKQzGVsugtrTtq8zQnLPLCqBEaiiaK2mkx9YgzwJSN8IOYOL8xyloDnGPch8B4nFOIMebcD+pCamAoBJRxiAk0TkjBPI8QrKQizA77HqgPS2F7neHW2sMYIKVyXQlR5D4NMI0xaryAA+3ypsLxxiM7fWRKijSOCS6lLjOja0J8UZUE40bUHmLYAmetMQA62fHarlIA6pWl5dOLY4qZMRoa045bGFEB7MbG+sXoRGrpRdRIU89SpOzWcIAR4QErjc7mZWO0tm42S5eSIIhaJ8eHGxtrRYXiJK6nIuFtoapCzla6Q6Mx9CikJaPUKbGx0g1CpkRzMa9O5mUldDf2l5CXc2KQ0w5N56YQBlqntBMYLSbjy5tbpyc1ILS17JVlLfLaStCcTluUzsZng6X2/u39snE0CHgYhSxQwjVNI7BlQewlS0Vuv/vV907P0h/5qR8Ne29fTItcqNPDeTbLe6sxpjzxcUx1EPvPP/89so4RWu5z1m51GlnNL44vra7W+UTMUplx7vtNVd2fHY5nc6XQ7vWHelutKy988mD/nTv3T7vdrjJWOtZeXZml4kq7XVRACXm2mFnultaGYfL+6k2SBFapJR5DAIvZDCrCOKMYM4opxVWVWmsRweP5wlkIpayrurfW80OmKqM0JJwYgBBDjvDecn99fVDMyp3VHubyZD7rBx3AwmK2ABqEhABoWzFj3K63O/N0erEoOW4gRoFPfY5YgMosP5+nQsKqltxjgTPS5qDCYRBD7Bmp87Jc6ncnJcgPc+zX2xsrBpocqeGlq0RZnGmIoJGu3U4gBIeHowaY3Utr1VExLnKt4MnxhRdU7UGHwbrXiYgHtKoxtU4ZSglGGFj4IHv94hfPJ1MqrXuwqIUBRgA5gKxTUlsLHIIOM2KchQhBC511lIOf/dnvBeL24rQ42ju6duVh0dCX37g5jPzHrq2Fa35ae810zrCW8pjxZvvh3UanmCR+n+1et3lZVQsNjZNCQwgQhMZYJw3BBAEtZqmpmpYXPfzIw1/+9t4PfvAH3rvzrbv3JxQBhCFjWGkgjGM+0Upya5WqEWacMg+Hpaid0wRYaIxHA+rxWkoHIULAOWuNepA4KEXuUZYEbaFMI+q6LAACxupGCM59jDEEAGIktcMIUoIZRk451VQVgu2off3yh1fXVr9761u33rsJkOOMCimMsxBCCBFnzAHoEZal6Xwxi0Le6XQ313akFEWZZdUMOOmw9pkvlPAoU1KZWiAIEcHGOMYwANAo5ZyD0FKGnYPOAaUUxhBhSCAilBgAjQUQIggcwNA5a4x94DJB4IB10AJpSwQhI8TY99tnAACEqDJme3dtfjbGAM3r4iTNCY26cZzNK+qTrfXlphB3vzsCkmRZky6yzmCJx73LG1s+EOOXTgxshgMf2AqEqzzqP7txBWf511/7ikriVxdnUeJjyrgfeTzaWt7+1tf/AIc0CHtezKQSUa9VSVeluY890UwJCj3EmUOjuyfr3d0iL8qsbncG73zr25uPfYRFiViMAYWYomDIGWGmOVvMz1kcEEYghh4G1opqNrfBAvubNPGN9QGg1o0m5yesd7XdaYvFSFRNHC/li1RIvbzW11mxsr6VLso8d/nsoqqdDZJ4ZY0peDqarD38sM4XVZ3XVRW1wN/+P33/Wy/f+NxXj9V4/pd/9qPvvnP/1VsXU40h59zjVklR5T7Bed1YB4EFhGBlDACoaiRh9M67r/ztv/XX/9u/+Xf/7T//JY78n/hzP/+jH/tpMIm/8eKLHnVSGav1WmB+5Kngkx8c5OORbJpGGOITgAlBWJRjxxlwTjlHmYdIErXW8sUCO4AJstCztk7P7kTtDnKq1e9PpmmRqd/+N78HgN1d74MatQ02uYBtUDZ5k3hhi0btjqosBAgaZ7RFkD5wZSDCACIIIAZAmgYCjCHm2AkLTOOkkgSzpirydD7o+jwKrIFaVAYRRDyllQOIexxirIRSQDnMjAMeBUrXokgpZ02tx5MZDvzBMPzgU9t3z6p7JwsesCjk/X6rZfXewXk+Ec5pQolRhgIgGuUA4pQA6JjO/5v/7c9s9tewAl9/857PoZZEGWm0taYGAEKHrBAIA2eMstJITTzKaUMRoQiWRSmrBlr3n1lFiJF2t3NlY+P8YrqoFtPJgmPqHGAUE4oGS6vcC0cXJwhAypgy1tTai5jf9ZC1QRimixwaARGoarW2PaTYrlI0G+dV0RjgpCEII+xDJZTTTgKzshwPQoopWt9dmmXlwd6oHYegEpjQh1eXRuOmLEG/Nzw5uH9/f5plL//wpz6ZVyab5VKo+3dv/egnvm8xvyfqpp4VNVMnF+8ZdR5TDmNmAKsAQ61rBOiFlhpBUVbl+KLTWeVRfPf4LNflxu7O6HD/6vK1EPtVJR2UYW+paRSFjlAfiHNoyjAOS+kuTi6W+/0snwa+FaqUquKUUuBpACOPN6IQja3S2cXFLUrxwdHecJhA6YRGNPGefPby0d59gtzFuPrMH75bV+6DH1j2A6VsfT49O90/fSh4fBD70ArnxOnJpD4CSYhrNY3itmo0wbxaXAhXJJ1l0ABpqloIpWQniYWYOWCMVUJPGdFrybCoCkw8HLU7Gy05qWaTucoW7U4rz3LLY6xbprpgcdJbG6B6dn50m4NFvxfJhqaTedjvLq1fGZ8crCIfUz/yzPnF8XgymVYmVzrBcexAoypVWEJwWp18z/c8dvDOd6iC9XRiOLFOCtFE7T6FNNfaKQTkgrv29Pg+5U4ZUac187lFWmpXGDQ6nvJwCdTTKkt9yvTcgIjl1bTd6RCHZCqpZLOLCfP4M088fev2r0dxBBqpTCOA01WTzusg4QQimEEvwId7h5vrfZTYpUG7T5YuRnkjKkLERT5XjQjxxfTiWENWS3mlN/S7y+2kmwm0rKrWRuf8bLHRomlaBdH7WGshJQs5cJZSr9SVVW40FsDa81FTVjUGTmltZBAyAp1AFFAOu8gyRhdZAxwIolAKXdYCYdBqe6Hnhx6GDiJCEMFWK6UdYQxBrB0KPJ8zap21SnBKgLMEIOSAczaMPIKwlgoA5LTlnEZRSAkmiBIMMYFx0CpzIa3NYGaNjWMOkEWINVXlE4opsgYVZUERsQ5KpYB12qK60lpZgJBUFmPkMWq1jqK4aurA57UQUDhhnecRiJGQhjPuc9KOQ20MREgJ3ep0G6VCPxZ1VYoqoomuTVWllKC2x6MgundyrpzBElNKndKtJO4teVVZBCEMI8/z8HK3yynVUlsEu9344mxWpqkzZm3YuXJpaanXH2UVjwLOST3LCYZSKS8JIKHWAAeJbAQEjlBmtVNGQ+BCxotGBAiP5lPrBQjjQiptrFN6vsi5h4LQAxiIRmrgrHIoV9Bqo3QYh5wxYTUQVgmdL0pGMAQMaMQg8TF96Pqu7/nGOQBUUwsN4M6l6xyDqig6rY4s02FvcPvo9NnoCgGOQhwwD0MILEAEIwxbcYQxxhgDrQijylhZ1E4Y4FzVqCSIagecsg64Sja9qNUYYpx2EDCMKY0sAo0RQFlCuY9pGEUKOeesAUIpoKWyVkmtEcQaWWkah6hjqEbWWaCdc5wKSlWlKykYxnXdGKmNNVZbAoAzBgUsiQInkbVWG6SqGrn370bTWaHOMuyq5VbiiD25OCG46yXBxu5lLU7LtCzGKcewSJuSjDTAtZwNW0nVLIh122vJ3Un++lfvzfPfeOKpAPu6341PRgrmpEir29O3H9u6/Bu/9K/ajCRJbZvw6na333/o5K03L44P7p+cbHd6XDW88da6/u4P/NDtL7wUNfK1tz6zs70JNROLxknZW046m96Hv/+J//B7v0s6bYawUuaV77z5sz/xZ0pFN7Y33n35aKlFH77cchFdubbdfXxRj0qS1l1J9o7TohItDxxNF9KwZLc7XB/UB/fPcvnc+pO032vqwmLTwR6MK0yydHY22HmsE4k0DeoLu7HzoYu5XerRr3z6d380/OS0Pv7Jv/hD337prWe/Z+vlb7/x9s27w2GwudrJyOTw3skz3/PC+b2jH/nQ7md+9bOqWjz8gY//8eddfTIZDnh7ZTjPK+75gCJhZdM0s5meF/lTVzcvXd65d3f05W+98YWX3yZMdgKwOkjShavmiyc2O2U5vrhzPugHV5Y6O0vB6WyRl817N+8+/diHj+/ORhenkyr76Ad2u167v7X77gGo/HW/s9NbiVcGQYu56d6hVsVLf3AnzcTa9aefeOjJq09feuvzv/u5//BvA0Aef2Q9z2bHN8dU4mxWUoAuXn75b/zIs+XE3Hpvf160XaaNjztXVj1H0mwWQ5NP1CwX15aWb715Uizc5asPn793sLy1fH6WWqWKoqaUUEqCiBkorWqCZOm//Hu/nDqdqQXjSDfvMHZCwhioWlTSC4Zv/vFXbt06GG52T9NMWeVHxKdMV3V6OqMs0DWoGoM9f9iLZdNcnC5mZ3NM6Xk93uuG/dV2zDv337shtApbHEGmpHGN8TDrtCIILOLJ7YPz2Kcdn2Lk2gmXCNy4vbi+0jx+dRlSi53LCvUnpQPr+xxAVIqSEAiAw4hIpdNCMN2AcqK0cRQtdVeP7+4tTkZxJ2r1VrJsjgipm6LbaVdVDaSzylaqUVL3kuHp4nRj9eFpOatcCwZBSX2MqKg0EApC4SyMeSxUjQ0AgFjnKAbc80VTI6jP83sb4WONbrSYJb01ShgkTjc1oqhosoj6AGFdzACBXuCns1mnG3kkNMZAX1oldT3TyISBx5m/WIwxIpVQSdjnhFe1cE56Xmi0tEp5PJynF0FnhTOGHazqLGJUCOGQH0VtZx0n1nFZFTOhq5gCBPDk7EALEfuJEY2VMm4lzllg8k47pARpoOZy/r3P/3SfkXfe+LKsS8OsUIpAZK2WwiReV7sGQZznOafOOucQzbXs8L5RubN5XXECMafxOD2LPF/WFmHVFCb3RV1JYHHQCWfz483Lu4tJGWLXbsfv7r+7urO8ubVy8t5JPAiXd5YNgMCRrfX1OhWMcYeagIfACQCqjZW+ahxyUMhawyZiXeA4Nopp++QjG3ePzg/nzahsGIW7uytlWlkET88ng5gGPrUIl2WDsNOixqYCVklIm7IhmKytdWmCjMkBkqOLsyc+/vw0n0hjde2yyi2KsCoAQDBKhjffejfAycc++eS//3//m0yFd96+vbXWKxZVvxcn1oaPrSNtZNXEgxbCSgowSxuJ2dojjxWzY9pqz+4f28bpSVHM73XaXVVJCwmSMp8skqDfaq1gRNNmJuegWNSxDw0lWtg6ta2kg+woQHKxmNTpvNfiQezNZ1ma1Q9UELY8B60r8sOjEYPQGddUTcI5oUBrY0UZeAhaPEywlQ75jKwuTSXQvuf34812+857hxBB5kmfwzax5cWJzEQUxo0tPvTkTqMdsoiG/K4+ESKYF1WrFXlOz8psmjcEI59SDQCD1kE8mhQBokhxUdQU+wh7QovSIKZZC7UpUUo3IfKhNsLBzsom9tzO04/fuPuOYDStMlAIhFwcUugRY+0rr927mJXUi8uZGI0z7HvUo1aol9757tMvPHp1KwSqQEAjDDDABnBoHXDCuUaj+vxe/dJLjTG+BdY4ABGEDlqjHMDQQWs0RMA5iCDCAACALAYImyC0EM6Bde1BuHn9Wn2vGp1Mr26HvnG8ntisU+UUVN5ZetYeesahwICiFFl1vrn0lFLVjbNqZbBSLqrFJCWEQguqstFCBQz6Ps6Li51rL6Q1iq9stJ56ob4/aw/70axomjrwqGmUx2nUCxGGclGpwgCLhbUAaIYpdE5JiTGBGEmrAcDEI1pb4oAUBjpjtLIYBp5vjJONUlLVTUYYe5DN8ZkHAJSygQAijAlGECKCMAfIUoAQrGRjU3Ux++qd26xpSoqddkBr7YxragmcE0Z7jHNK/cA33FoIrJKz0Xg6ObNGKaUY8611FrgGWGMswRg6iCB0FmhrIEIAYi8ImrqSQgAECKXOQgQRAAhCBBGihAVBILXUSkIClGqUkg4i44y1BiMCIQDAQQSM1Rgi66wzD1jZAADQ60WzRYkdirxQyubRJ548PB5t7Owc3r21vb2BhCwmJ9tbCSyXAOWs3d3cvEr8RDp3Y/94Z7mHUefsePShjz5fW7RX2keeun5x88bJ22+6WhGf1LmVjQw9tnXlEkXlRXpv6/JW3dQGBgKYD33kh7/x4m/7PmkNOqGHj+6fxYU1UNa1wIBNMXj8+edxx1PVrEjHp3dff/bjP76o83Ja5eOUUs1bASBBtLpbjvKsKKLe4NLzD0ckPnjrXqB0Nr+78fRHeO9KnlWoieLxnXl2LjREcR+aJGwtIXb+zrt3tRi1uTePorzwfCBP94+AdBTryek9rFJpRIQ8p2G5WMTdQZbmkSo/+qErS/2VX/nNF4vbb3x8rfOnn3/ql/7T3ZtjESSBaNyiaIilGIL3XToEoMUQu0ZIqG2UkM9+5g9/+3c/PzQOEOK0lwQEg4QzggjmGDZN/qhf/uxzS7sfjMcLtnenWuRaEwABcdhqnVnoU5IA6jvMNYgXTcCShCJMGNRaI1X0VzrZfJEuFo2wxrQxMYja++fVrWl9ntWKk/Oqfph57Ra3yCFAnWgwRM46bYE1ykJEEYcAQQQBsMAC4CxGzFrodAM14A6CZhLpDiQ0nV/4CDDVVrK0RjijoIHEsxYgZXVdTX2aQAyausKcSGFEkRonCUQiV1UpwzDRznZj+shWhzpHVJYrPctledqkheGMowiUaVM1jnOGrItCzyEolAw9srt+uR8ODs8uZrNJ3RQAWfvAHnUAOGOsgw7CB3JlEGPonDPOYFAl3lCUVTuAqaFxvwUO7v//rCItbTbPXp99l3DW6y0BB7KsdsZSTpzTR8f7reF61F2SdY6Upl6AOI4Tjpyr0grirrOqKvIwwu2YBsBL0zOP4TiAmKLaaA0scJhQwCmjDiMErl3Z6MXo9Gz21s3jcZqutDrzSR75bHuzPez6K92IhK1hdyloezff3cvPxJuv3v++Fx6b1LNwc+Olb33z/sa6kzkgpaomBzcdEMHW5ScP9m5RjmTWDIJes1g0RpyOz9Y3t6mzmATAlseH58PhBoFWlblSqpLCSdRtd1UNAYUtHqazmrWC4fpqnk2bSa2te+zh3fH+aHo6th3SW+05Hq501g4P3xOmwbJwuoxDPwmGVZ0S5jEanZ2Mmrppt+KAtRyzq5t4fHGj26bn0+qPvnErV8X3PLPT7keikBtXdmmLX5wdIyWUM34wyHwEwmUkrEbGjxx2oKwq5GOlG2tIaewityHt5gunBe10h1F352x0n2FfawAdMzVwwB7ev9lMFwFDUS8yCFnCF7rqhoN1f2WSl+p8TzTzPB0DjjxOilx5fqIJPT88MEJI4BGTjCeTSgVXHnl2fHofY3gxr4E1/aXB8Z39wPeJR9742ks+0oUqgzYLkmA+yaKII1BBh3wPGaMn06nQEJqyzIs0rbQNgEDa2tn5SAOOfa/bHTZ2anBJODdaccC8OJYOa8hTqdJcNJoMVrtK38izg+XNxDPRxcXCQRglPFsUlFlodbEQUtKysBfjnKd6Z6c96O0YWZyeHI3PbzurYVNPjhstQVZNnnvuyXIGSOgPg74VmTIwL0FRuF53cHzn7fZ2/CcFNJROF2Ho2/eBeVhq4BACCAiKELIaARaEno082TCCIEDSKOBcyycUoygJrHYWoflstrI2BA5HvlcprYwr0spBjDGLoyiOQueAM4ZTrLSz1FNK+gFhjDhttDaU+QA4BpADyA89CEnAWRj4wIKyqo2zGDsv9BFwiDqMgZLWD5lzCCPsINBKUYJ63S4wthDyYrRAEHNGDMZeyD1KpZI+pYWSnsfzPEcYWwcRItY2nocRdJRghjGw2vcYY4Ra4qwNPK6tpcwTZYExXl1ZrhtVKw0IpYxUjc2KOgqjWsjzURmHvoPUGW21SvxQAwskQBpyQoyxoe9LpeuiiQJOKSKE+AlDEGCGe702ZYwTFELCONbGzvPSNDLyg6LOwzDgjFJOgDPY8QfhIEyZEI0XhMwPAo9M56Mo4XWjAcBSy1IbBj2LMYbOKA0cgBgNB/3RZCKk9Di1ztVKp1WFjVkaDIb9ZLHIPYaxz/jOumnqw737DmDi8c2VgVJVlk+m6dnOyrDSgnqhMxBBGPg+cMoAubW6tsgbVZuQBxBCwrlsXFU37SQ22kpieOg3dSOsHZ1PYOABCDiASssaiEoXMYkCQgPuOQydNoxQR7DUVjV5oxWh1CNMK4MxJNxrhZ3x/EIp2221KiEyIcOI15WBFgQeExbkYm4Q8hk3xhRN7lFOCKLUj8JAOCG0ZFpJ4aCtoAVeyP/kYsSUsGWuT8+qy1e3Gyx9v2UBKUdz0+TQNa1hwJlT0s3LRdTqLC9v7GysVvJtXxuvE11pdy4psCin+6/N1nY3bh3MFoUGmFHmyVKP6qZ97TKt5cr24Cv/8cWPPv7s737p01u7oVQgiNae+8iHv/p7v5EV0mJysj9Jut0iG/E+F7CIvOjey2888sKzYTep6/HDzz7pf/YPywrKWnJOPEb7CdJp3l8LWcwbUY5G1aOP7ebSer3W+qNrSNW+xx6N/aNbMzUr3vjK3WI9mCyRg+ndpSe7Vy9fbi8NQOBTlvHQHOzfxlz4sJocl0KlexPxlc/v331n8q/+/SsbWxfi4s6HP/zcL/0/f/3pH/9TUcgvPXflN3/915tp9eyPfuILv3fn1muHz33oqfvvvnf02knfpn/0md/YfLTNt5LffOWz3/eX/3Jw8trvf/ZzuUMtj0YY+cRKbbXBVvtXn3i2LKeTs9Hl7fWxGpVAK8zvXNQXeb3T8ruXV25Nj4rT07idXF7vBQyfHB0DJBZVkUn/9hjdPhiFvbbxllauffjw9r3pMexd2iUrQ+O1wphVo5OLgzund/YLC3qb1x578smnnn/29N7t3/vFvzXdf/eZx3Y46tuLs/WeF0N6xgpvKSqF5YzcvXlQFLrfY37gLe0Mtj/45L/5jS9lVb7WZhCYdkx7CcecNs5oq0wiZyofz2sKcaMkoRhhCAmiyBFkKlP98M/82CI9Bi2CaUrBGWz+dUXuxuCJ7P5ZWbTSb9Bf+K+/ncfXRVIXWWWgiiM+GA5gLVf6y7kN9s7OzxanAEkaESudtY4HvNOPJ5P61o3D1fW+F6BiliMPQAeyeYUJoQRr49K0BE5sXl299nAHOIkMqlMttTYOGxJdZPT+eUE6x8ONfn/Qf6ACSslimmJGCUSqcYWuKeFhGIU+GQSdvTt3wiTBxM+aZv3KSlanlrNRPvMJxET7vucQBoRiRCQkmEJOPYEgiNojaWrgp8auJl0Y+rBpCKGE+A4KbUFZ5ZB4AOG6yb0whthDHlOVYoxvLj1hLLIOeFFbKWvKClLstPMTjDXVzoqyCZOW9bhUut3vAashIgRBIWroQOwHZTYHiGtKPNrBxBMgoCw8Onuz1+oph8q8pJQ2dWM46PWWRVk5pwFwQehhjJ2w2KOUcKuFbhoMGo9S6VxTVBAgA0gQcFnWwA98vy1k7jGgZVmWuR94VZFDQAmCr77ytcnJ3euXL9e2iT0vSrzZmFoDFLTAQq0FoQRjXIkipHgpbitdQiy8IM4rCR0H1iZxmyIAGU4X06QdnY5PEAK9ztKsrDgjWTbS1gJhTg8XO9eX5rOUQPn8MzvjuvQow9Kcnx6s7gzqOrOCr7S3ga9n0ylEBipEoS9kyTxMYIdiCpHu9QKGpcYunZW5pnUNxrl4b+9sbdi6sr3aCbyD0QwIKR0SilRCVRTe3D8MeCgqjaHp9wJ/d3A2mhzefn1lee3y9pVbN8/+0S/9xzD256WpZPNz/7u/ssGBA+nf+jt/dXjVA/n021/6Tqe3jQat+WJW16OyVtNMUOrVlg9araK8P1vkg17AkVuM5jz2Lm6/hqpc0j0WOByEUeATPqCA+iGXzjfCGX9r9+r2YnyRN+ZiUm74y2Gn1xu2lWU8DjtBR9SZytUiPcxrIypDGBIWNIUsmsUDFRSpYCwmGPqJFfmMMtqKBul8GmFk61kMdYyCRjU+d2trS+NJXpLucOPZ3vYlF5J8eu/RT1w727/bD93s7IjZHOswCfzJdJS0/eVOfDY5WVpeAg4enLjtQWt3pQ8YA8ZmVWEooQTXpZzn9c76MFpaW0wnmQAYQWEAsJUnXBxyRSDCyBpXiwYRp2SpKIz6XSUbr5e8dnBGbUgtwRp4Ee9ttw7unzlBF3N1ljZBv315Z6tp6uQK6W70n35s68Ybe994Z+80k+uNrxtrtJFCIp9AhJwTShuH0GSs/8k/v3fjfqiQg8BShDBERmuHIQQOAEAJ1VYC4CBwAEKrLGMcQrG5FAA1Pz0dDYe7jvQruOj2oMoOgFOddlKwSDrZGLC0semoi4KWM8YqpxsT9VkrxlFhedzud9cffbo/zcYYuvn57GL/wmmrFLg4Le7cON25fDWMYkdQ0GJV5lPunCVVXrUSvxUzDKqdta1zNJtD6SvTGFtKpawjlDGASmEIJQAgUTWEEOCctUAqaazVDkCAlcUBpdoBSGlMW8pIpbW2BjoHAYTQQogcgBASCAGwQMoGYUwwoAQq3SghFo021hDG27FXVY0wCmKMEESYWOe0UqIuCcGexwyAjbUYI4I9zkOMoVLKAaeNsc4ZAyAAjGCpDCMYE9xtt/MiN1oRDAlGwBhrrIOIc2oN1MpYaJWUECGMHtRtEITQOgOBI4g4+GDkDYMHg53I0Qcc8vedIjAaLaIwrCsxy5tBOypmeXl2cZaW/U6Sn08Sgk+O91Z3V9qDENLw+CJd8VeXh8uh74XJcnp8T2u8s/t4EF6WZTW999299NxMx6HVeiksdT7cupQuChYF0/kpIg3lVCI9no3Wr654Lnr7ndcZAbrMK6NA1xusDRkn9+7t7ezuGs1u3noFw4sqK7PFAkKgVHp68BqHgAdRuLQ+uzhHxjs5zqLuEuv0cVijiKdjOi5rGC1Zigerg/N3vyPtG0G3S5hP4y5RFvkEABwOvWwxQyi+/PCzETZnRwdf/uMvWRZvYI/a+vLVZU5NkVUyryyqnX+EZeNFPqCEEQgJykUzWLX/h7/2UQXF3nf2THbyp671y/R4kqWEcxCyzc7S5OYtALB1xikAkXUaOusURLLRxOjA8wywden+8S/9k89/+fO3vvMOj4ip5ADBduL+zqe2u9FhVs5pZ9BdjueCVLVF1mDfRD2fME81NkhiP2orFyrIIU2ssUUhjFLcutPT46qxHLXSXDESAy/feKxdkIN7++dNLRUEt+bip1c2IJrlVUUpcWnqexHAGgAEMccYGQew+1+H96zVCkBqDFDKWueixMNm2pxl2aJRjVje2LQV2Lu9rx0eLLUR0RpIq01TFpRRQJGDmnJqQSOywkLKYs/pB0xRyD0/SWJn64iLQQs/dZVCBN67d/HyyyMaJOeqkFnaZmhlaRMEiaxEQCEE5vRsceXJaz/+0/9l2KC/9wv/faorgqFxQAlllCEEA+AAsA4hYyzUAFLoIATIIQ5n8/L5Kx99+/abTmuCsf0Thun7VhHG0AGgtKyFqJoSGaiNcw5iDLTSuswAPAh4qAzsJK1wdXP7uSf7azuuJG9//Ui5IlnaPz94xdQFpPCf/S//+sd/7hNW+iurvcsfvDwI+Vuv3GoPl7qd+NWX385zJYR5+bvH0DYRgo89urk96LaTeD4rnbYQs3vH8/6g1/LhnTvvbT/y2MF8oefqi1+4gUqzs8taQXX50vCPv/A7H/++j6yt4GoO33rtAPDB+XgmcRQAl8TecCmaFyViQc9449MD5vd9ZkLfG0Relo0R5heTHLLWvdPzh7tL0B9SkKumQJgyVkFnAQDpLE3H46YETVe88vLNxx99HiGTyZozflEs/F4XiBQAUU0VhNzgynA7Hc+YF7UCEnd8o+tue3h4Pgmj/kOPXF/MZv1hc+P20VdePb67N/tzf+YDS4NE1AujBMN6PDrtDZc8j2fFubWb3MdOpul45idBr91WxhGLSlURhocry6aAWKt2O7IGlqUJwn4Q4cVs1B0sAWOK+bwC2mcUUVNLWacLZdC8TKGzPvARYkpa0LhBe9044MdJpeYOIsZ9AoVGJkRhEvWyEroqjJIAYbWU8OM759ChpN/d8owp6lookxsbUGNIU6oFLCGk01kl64L71DlFKV7d6gdRmU3nRZ0jlqQT20gBoa5ziaEzQt4Zv/HMzpa1ppSwKArcEOS3o3YwmeQIuTwVWqIXXnhO1Pct1LZu0ryWtTDSsYD0Yp9wzhDyaGARRhzWqGx3u6ejIx4M/LB3+fHH0I1qOs6jiKt5fvmhtQCvpdPxxvalcdE0srYBcbVf5aUXRHF79VkGBGAPVGCUCYOAYZRnaRz5ccKMc+mDU40gz6OMcMyILBudqyjgBOKmwQiA1U6HMeYA8INAaNNtt7WzFtCzcSaMDXzOeMgD6nseJRRi4pTh1At8z0E7X+SUUcoI9XhZFNoa0QiPU8/zKSZRHFptjFbWOtFIDBFlmBGitFFGcsoBBJgg1VhjtDGO+R5jTjaCEmKgxgoygo0GGKEHeEYAoFVuXqaEobq2DDPrXJrmShuMYBhzp63RFgHbasfOuLpsOOPOubKqgQOMUy/kUipjbC2EdRYgEgVJJSfaOT/yrDW9tse41xDoNCGMIWM9iggmTS0RhO0wpARVdS2NQhwTirXUYeD7YQgcBMr6hDhjQj9opEaYcmaEVoQiBYV0ChoAJaCMNEoorQM/wB7xiM+Mn5blvKoJ8WSlp4uCYIJQOBqde16joRl2enGEpDYYwlzkGmhTOyHMKJsDBKBByBntHPNJEvmTcVOnOaJ0e2v58qX+F7/6qtVyenqwur68tr7CuZfEvO95ZTbH2MRR7FHIvaiqRZrW1lnPY9ZZCrEVgkHsxTHFtJYNsTAkNIhgWjQJ4URD6vk8odM8xdj3OAIYa0hzbShEzgKoIXDUOmc0opBywgBEhGOPkCzPtVoYiyjiWuumVta4LFNKGaAhMgYCNOz2sqbCCDBCSCsxVmsNHUTSaOhAJsr1dkwwNkYRhIx9/2LUStid904ojxYGfeOt/Q89fOnsNBsMBvOLUXo+FUavLMXPPLH2xlvfvXr58q2jaej6+8cF4gzCvGlyYlkSBK6GQdBfWX/09O7Xdi9tnh/s94PgtKj9mKxsb9bzSY7qRx9fC1tEoEXrWq9ZoO++ff/3X6wu7zxSVbK/NLw7TUUmHn/yoaVB+OrnX+n58ealnbe+ffv6I5u8YMPg0uPXnn5j7yDuE+mDN/dnu485S8Tx5Bx32+mJNmFndXftYHHBE6ywZgmfNBWU0r9MEzT81A9e3zudAG63h978brZ8ba1K5838fDjQpxfHZ+Mx31lbjOGX/+AkO79gEl1bH9Ja/aVP/ejf+5//2uZD7VvF6NFnHn7u2U/sfefF97795R/7009+48WDjc7jP/3n/uztuy93VuzStfPRxVklyzlA2AJM/fNbR0f3R8uwMF5YU99HbHR/0VvzfcY6S2vd7k7eoNPpyfM/+5NtePqVt96rESUUAGtQN7EJHmP0K7/35WUIsE+qSjiDVJq+8L3PjN85fmT4RFZWZLc9ruEHPvTDJoLLw0ej5a0AJV7cN05UN9/ef+WbUWvQWn68s75x/UMfGrY6b3zj8y995tdWPHv98k4vCg/u3TNFvrUc9ZfCeZrV0zlKIhfEN6bzw8PRBz+2u78/og0RZxfbK8vDlfW3374FJGgxwjU4vD9f+fBTn/qhH/uH/+ifRoQcj3Laio1F1mlgnVSKUCuNLkt1+emPM/o1gita/mG5/wd+C/iDoQGr5MryEDw5uNb90Pej339t1mhNMSaIAchPzher7d5g4yHQiB94/NJ0PgVGvfat15LEj7f8xXieL6owwFWtMURGmUprKhGwCnJsnSYYe5wXlcQQF1Ux7FkK9fL60vi4WUzNLFOa4pOCPzxYO7+4iNojx4P3a5jO+mEglPYinyLiGrG83PcgOz25Nz3OjTJrfa5sbQnxQmYMYZDNCwl9zKOgyetmUSBCtFNNUYRhCIBOm9zrdqr5vNMfBBbrvOQQImcQdK0ormprbUMgDHhnWp35UYQgqZUSQrGoL+vKqIwyYEUDAx8hrJ0MeeIl/cXkgiAICeI+19YiaYASkHAESNkUhBBHkRYqM3PPCyH3dGMKUbUT7ORFrfBSq22BZV7LKqSahnEfIpguZmEQSGnjVqh0boz2GM/q3EMImIZznC1m7bCHLGiMoQQqKRCJkZVFnkNl6jLlpBN74bRIIdK9pZaq8fTknU6PXNp++uYbdzkjxsAM5dAgRrymnnBOhZTOakdAEnaktkJaZBEBEWHE40bmddNUjhFEvTrPh8OWlrol/aoUd/befuF7PjU/VnWVYYTLuphlpXWtRZafjeery6SrXTleXLl2pZQo6a1gpyD05tNJUTTM86y2Pg+KUhJOKQ9b8eDk4iLgbGWjo60p83IYhZNpEyx1F2V2Mily5+ZKf+8zjw42lvdOzm/fPwuw6wx64/HC9zDw6LWN4fnpqKmqJPJsLV783NevXNm98uHnfu2zf/TOvXEYtbTQZZrNhSgI7K8N//Cb937mp3/olW987rFnPnacj/7m//D3//Hf/KtLycF0fIdxr5JKl9V5URBgsfNDf+i1nNMiGrTOZ5PIY3GnFcahqJHvc1mf56JIkgg0wpTHIBN331msbW8IqB7/0JPjcR22e0VRO0fYgJIOmd9fpHkT9sM4bF0cX4SMha3w4CDfWFp+oILhcr9Om91LK2HHv3/zVNaFVDVH4OGHn/Tw5Rvf/rquhRf6tWpOxwtpdG+p3X5oJ1rfOMirJx/9wbdeep0Fm3l2QlDQ5JNUFkES+FTYKjs7nFLKRwf3alFFHmMGGQWEUsSjxOt4yaASVXct7Cjdow0GVavjAxzkTbN0qTsfnckanJZV0m1DS2Z56oRCngsjXAspcimsQu0wjpLE9zwU3r97wEO4KMrxpISYn53OvSBmEde2DgO4fn2F+v7oZLKynHgUf+1rb4Nmp91eJmnTamNgBYKAQlmL5vzCfPYzt771baes54B21iGMjUWUBRA456xzBoD3vRNtDIaIMoohgByt7baTIXbuChAtmUmKeL8bTw6O37px87nwieXV6O7BDWkynuxe2758OroPYNVuE49hHJxcergfLg+Rtz1Z2LXVh8JKTGeHPsge3qK6zKqLk5Yn94+L9V0SOwLSVCC7Olh19q2w02r3+wjqrY0OxSafTqmVa91ANep8MkUMOUgi7uVZSQgywAEAICPG2kYKizBh2ClIwAOUWlVLFHoeR9DnAQt60oB5OhWNsFozzKQxwBmIKaXUGQMQxBRrrUTTOGsthABACDBQpnESGMAINgAZYyjCCCOrlFC1VFAaDSG2FlGGKMGUUCWlx5lSD0ZWnANIG6mVBhAB6BCC82xulLbWAOiAtgghCwwEDj1AIiGndeOc5J4HAK7rxgEDIMIQamsdgM46AJzDGEAHMXjQRTPaEoIeqODS1UvFNPe64bUPX/vGH30rOwObl3YnB+fzi/Lalc2jo9Ph0kaxaEyDWMjSs6MAY1cVve3d8dnxbHR+550bzzwDXn/p29tXHnt2a2s+enO4RLVmzEcfuH7pxq1zBk0c+kXRtKKwsu7gbPbEcz++tPshXc2P33mxe3nl9PAQOCwb63nePBdr154RMMReZ2dtON2/W1fi9GzaXg1Ozi+Akc899zRIsKVJFyWhx+rDPUNle3uV8VBjj3utsRwZbpXXMciIakpCF4dwejqySmpAp6NzgHwNIqkFByjqtJv8vLOzWe2flOkcYjbsenuHWRT7SacTdfu6yuxUI+JZ4KzTUjU4DgpjnDaM2SAJe1urVpqHnhx22+7Fd86+c1oWwi+V8j2/EAZA5KC1FkAIIYIWaKEsRqiuS6BBkrTyvHr95e9waJ1QwFkhjWrEq985+sTHkwgr5Rb9vhZNcHSI5nPtcuiHibHYaFdWjQMV9z3bTJyRmBJGGQ46927c/9I37nzyUz8bQUCGdL5oiFvM8jljbqXtE6PfHReFwq+/dfHJ718CNG2kiGmMqC+ls8A5AyhBzjkA3p9FhuDB8o3DEHHGKXLcI0BLXVVWOyHQW6+fHi8Ox7V0ceuHV64yeZznI4Cgbuqet66dg8hh4qnKRK1OWTtIuhQDB6uEGqWdFBJD4HHS7rSUJlHMwpjHNPzuXX08nv7Tz/z6TrLmhHr38N7//b/5H7VBFLgOhWo+/5f/7B847awHTGbrurFGIwgBxdY6ay3CCADHGCGEYowgYThkaLD9f/nbf+3f/+IvOGLyvCAEZ2nzn1lFzhklNWHYWY0hhhhg4BDGCBMPIyEtdMJa9PN/6VN3Xnn37sxsLT9MhztPPfX493yMfPY//M43Pv9W7AezPNVa/JN//Y83L10OrJZF8+bL92VeOqfv7ufWCmssIRATEFPw1PO7vE6XOxR67Yu5uD8ph8yLOzE2wCr1ocevvOOM0tnu1VUg8N4be8ezUXdpuXrjdO/21Crvq6/ufR/cmtyfHR5N1na7tqy6gz5taoagNRBaCKyxtqnqE6GRZZgAOlhtnx3f7w53FRBhSD/2vU8cH16c3J8MWshIlenK8zAAwAKCfJcEOLtZ3Ns77Q92sdeCuOoNAqlQzBItMxpRghrieel4BpwCwC4NO9MiT/qdfJ5TRRD2I0ybsoiiltMOR7qpm7PxfDpv/sW/e/EjTy09+cwKmwvDLVZOFLLM63bkneztbV5KsnltFMQUjWaprJXvMyelg8aRBjrvbDz2lpajdsfE0SyT1oIgiI/OTwGAotDYb2MUApFi6re6wSJdJCQQujS4joIeckSTjkVQAHPn1t31tc20LNu9EHhoMW6iVruWot3qQ1REPgl5b16m25sbg25vnC7Wdrp33r6FcPjIcx8KeCCluDjZ0660CDTq1Gt589kkYMCnBBk7H02LMjMGUI+3k2he1Q5ApH1iqbCuF/Q9zz8djTvtDaOcMq4uawH0fJGXBVpMKgi434Jno/thQq0uZCmAchA6a0mjMdAAWDHo9Qn2suzIb1lRni11uqAayQqGq3Gv39JNskjT2cKNbplbt+5NLi7+7A8sbWxu5KknWeh1OivLnbP7b2OKu9srx5PiT5r5IQ0owQhzoqWRSvc77ahlIUdFmTd5pupaZdrD+KHVJQAMhqxphIMwTsKqFNJpC1AjVZZLBSwmACMCrGQs8DhLEh9BZLSjGPt+IJQWSmtjEMJWu6Y2QjV1I6Mg8AlLIh4HUVk1ShoMTOBxgmFum1bUjjyvqEoEUej5lFNtjdWOIlY3NecUIuCAwwQVecYZJQyHYViWNaaUQaSUVKLEkGmlDYKdTjKb5QwTrRVwQGlQ1cooYw1AGNq8CD3OPQYh1MpSSq0FCBMIkbW6zpumNsooA93x6CIOeamEqBqMcNvnqawRhu0wdhDleVlVAiBAA7o6WJnOJpWQAEMKadyKnHNTOSMA11ISRCnGQFtCsedzyogFgHGkjAHWYUx86jtjq6KuAcQUd9rt0cU45B5EDgEXeB6CkAF4dnoaMK6kc8DFvo8g9LgPHZRaGev8MOAxQyE/3TvvdbtbnZU8a27dOUxCP8umPg9nJNXa+D6XtbDAlFK3WgmELvBJFATYoe1LG2+/9V3see2WBwBElKSNLGdjaEDZ+KHHvQhDA6izrVaCIJynmTYaGKM1SOuFlFUYeq3IN9ZhjzSNAE4T6zzGAy9wyjpjnLPQwUar0jatKKYIWGOgs9RjaZ6nyvo+gxQQTCkN6izFzA8hKaoSKBOEHqOoqgVBtM0DrURT1oTgbhJBC61DwEoNeMgCB7QyxqOUM6qVfP9IwD5C3vlFLRmI4/DsohGNmU7HSlqeBJg6Q+3Ne/frprRGb/cH0IuPTkeqaTSQhHCtKljKlWV/Zau9sbKGz1rdliYtm9ZH21fijU1Wz09m41lrM1xZCy/euffsQ5fzg1E2l2sBe/zRDdy0ynxSjCdDB0qLZneOpu8JYOjalc1X37ixsuU998L1s5dPEjh79undN97a6w1ZiarDs/OXv/YlaZU3aPdiUkg3X5jvvnERbvtRKyiqmceQQzKrGhIAZHFW54ppioEQIunj6eimETqbzxbzuTbOyvjT/+T4+L3GFbTv4Ss7ba+UoaFcul/7O//0qUtbKzvR4w9frr/1mV6+6Mhq+u7RY8OtbUmSDTL+7q3vfu5blHU6Hgl7vTppP/tTP/TaH3/D4NuPfuDqu18r5xUaHaaFx9qxLwWonN2+vDEbTRQAHoO//OlfgfNF4MdNpYl0gVbd4fDGe+8srS3VRTMtm+2VqL0xYBAOV9cOF+bJD//A3dfu3r9zW4T4h3/8J7/94ndbS0vhsEtxQFSZ3z64/e5LvlFRHITLOxVav/TIswHWd/7w02//0X/80NOXA8ZGJwtXFLbMFffujctytgg81vc5XRtOFpXxvN6llTcOzn0DY8RANrsUufPDs1CZIKRrS4EqDG8tTyTSx6dZ0/gR29qKZqkOPCINcApYa0Wpgpi0kt43vvgff/ITr2vwIsDjcKuF4587G3l1ecmjBiRdCvOf/Ss/8Pt/9Ze8kAc8bBq8vLKcLmbZ5Oxz/+FXaNsPfAQg4NzzgZmcFkHoNUIZaPJFZbSh1A5W2+gW18ZaUO9sb+VZNhsVAfcwhBQhDtBSy1+kzcVsRinvtX1EeMPR3ZPxpZPxI1c2i3rumpX3y8iV8gIWB9xJ8yDQN5uMqnzeiiIchPOiHk2yVqvn+8HF+QxYiGVGkSeUKQQAkGgliTPWuTAMqqIosswPsMCFR31VNwARzyPYWYKQAqBSoFKukTVnqC6PGA+d1oiwgPnWQs9vcwTKxQwzHrSTLCui0LX7/fl0cXeRXd+4IqpbxipKCcFESQOsFarykE+pD5Hxfb9xSqraQiCFRNZwjhwQ1tZKIS9MsrKGmALrtDFegKnvSyUApR5ntaoJcHVTBUEroi0HUFEXWpHAaxsrpahqVcGwSxjLq5QCGAYRBtRhlE/PKqjDJOy02/sHtzZ3nhy9e2d5a3hRZK1ePJ3MptO0220ZiZSQjW6M0844KTQwRlsBGIPGhn6rk/RKMdFaSaWlNJT6aS6qWjKMgbFONRFDQad7sPcGQnp8Me+2e17o9zjxIkQ1Xkjx3NrK7CI9Oz25sjZ9/PGtmjPumLZQC2mgpSSQRhKEIu5ZLUPqNUWW+C1scZ4WnBHepqvDQeCxw4t0qdu9T+C8EHePMwPvbreD7ZVuNc9HOkVGLPf85X5XG9wY44Vx4NGz43mvHzf14vW33/3Ma28fnS8o96UWfkgtMthHULCLOfr0v/pitad/6IVn/KT/7tmEBOvfuHX6eJLjpl691IU02D9fbG9tzyeniHqNNkU+i2NWLM667eUrO5fu3d0L/LbV4nDv3TjsUJg0C5Xee1NcfL07WF5aenxx8PLK1sNycs5rjKCLPHJxvF83F63WwGdg/YWH3vnuG8ToOMLISdHoXjsu38fWAWs1Y4Qg3O+3/evb85OzptTOAk5Ut9fZubqmyiLLJLDG6abb63V6rGjm3G722j0h6Qee+eDszr23vvjdXoAUhd1hi1AsdFMsFoRGSDrRCOsMJW7YbZ+NFnmpfYdqU80WZWdpKR/nzCf9llfN5uvX13tLS+PxIi1GMVUI+gpx1m6busFaGURRgL3QAI3zNA09z7PCA8H9g/PJtBK1RNjxBFGOR6fjfNYsbayuXxrYbNpL4qubvTRVi7ru9Gm/Re+e5K++dXS0t//IDrl+rb02pIz5eVZ985Xzr34nO7wAjWpDgqC1GDupFICQYIIhNNY5ax1wBGJjNYDIQgggANDmVdrtrQ5iirKogW2lHPHYwf7o7sECB0u1SA5vjBKaXNpZpUydj16/e3R8fesqDYKz7LzN1jDCWJJeFMQdO4zBU9evxP0rp+fTw/vjTr+3OLn1zc999qgRX/7ii48/9kjc7QRtj3rrPxH8RKohDmMNFre/+e2AoEGvo0iZpkUceVtLu7eOjmWl2kGw3hoA7WoAjsapMBpAQyA0CCtpCEAaGOsAJkhLbbSyGC/SeWAl5Tz0mFYKQoIgJgAaax102lhnHcIIQGysdhBoAIw1HmMEIefcg0oYlK6RGiNgrFHWOmupg+1WSysJEGx1+nk6F0IpZYAzDjhrnHPwwYeoMRoCB4CTwjgHIIJGG4yRtQ5AgChC9gFJBkltrNOEMgicEBWEmHnEGKeUhAhBiBxwD7bOjLEQOoQQBAAiQDnFf2IVdVoRVC69mOxXM6Z1i7mt9UExWexe2r5352aRNWtbnY2VpdPDmTB2d3eVEVxmp9/4o1e7rYg6Omh3rTZXdoYqn9x949vEa9b625WhRZbt3TpJfNbaaDPP78ddYGzWuI/+1A9Fwco3v/btzY5SxdjrdT3qz0bzpfU1LVS6KBd1/sxHXrjx8ivjm+8+8uy1+4fj4doK9sT65pqTdjZa8AYYYWSlJYHFIgVKzbCFkLJuH69zv20dRQRpzmjYT/Iyn86y3WeeufHqDQLSZBAfHk4IpdhVZT7Kp4fY2uHO1k98/AfLHB/s37z2xJUbb9/EeLh/Zk5uv/7E7vpahwcA7F67PJkfEIeIn0CfQ1ubxdTlohV5xaJazE6eetx78snrv/2Nky9+Z15NTj3CZ2XDGIHOWKMQxgAC52BjLFSGc444qpXAFDGgnbF51vicaowNDV45KPHX1EeF7W87Fua6X+ZNMAeQhbHSgtJEKg2kFLBygFVFJgvOgsAQf33rytnFOfSX7t69f2kp7LUGpUuTpQ4huBmnDRPELnoMChTcO2lMiWIEbcystVJp43hRCYa1NVWctLRx1hiCibUOAPxgcRBSjoF1RgItKXW95RCdaW2pp6BT6ng8/vJLL37i0S0pq7jToxZBAJ2Sa6vrZ0djYAmLWSVrW2epqP2kbYzChFI/gM4g6IR0lPhNo+Kk9am/+Oinmv5/+88+9+6X3prG+3duf+dbb77ZYyiIgl4SwEoLDhcYNQ5XRYUJgMBQRowxwDqEEITOAQud4xg57ZI4+cGf+om79/YXaf7l3/5HFEwIJcZnyhgjzH9mFSltoXEYI0KJ1VorQyihxBdSQWcxhkpZZfJ333rj6qW1P/df//izf+bH3zqZ1Aac358dX4w6/S0GcilTUbmv/dFXKLULaghlF+OKYQiwkcYS6pY6ZG2rc2WzPVwKkddstIeTs/pocfH8x5++VSjaYMzZlYcv7d258cqr7+ysrk7TBpaylOlf+PkXXv/KS+02TrqtK5dWi7paaJIX2ha4h5Nr/TaShlgBrabYKxeN1lA1dbfdiRMeR9vj/aM6l3v3jz3/ujPRxjZ5641Xrj/36LDF9vbutx7b6Q2iYpxHhGIjpRLM02MhDo7mH7z8QURaRjfjo7Mh3vC8xCBjIUeGQ8AI0laL4fJlitDoPMeOMrrhh+dNOTua3Wp1Vlv+EvQS6p+AdPHBDz56786+0Hr/4PQ7r5/fujtb6rM/9cnnPT8sL/Ik6o2K2d7+3rCztbre2rs7ZTgWOlKykkaXWe1RaNTMD0IGsagN5UqSKvC4qIpGGWqoj7yFqSO/2x1s5vMDSkJCKSK0rKey1NlkFCxRQsOsLnEYcrz+yPru+vWV2/v3yrxAULY7y/miBMYuqgqHJhdVTD2KWrUEzoWJh3pxdBr47XBNG4RAQMLu+vXBcj8+3rt/deeRRaFO8EUr8DyKfQITDwY81cA4BBezOWGLYX+Z2AS4IK0gCtuIlqtouyxrgk3A2TxHIlOzRTWtyGTePPnkU5e341fv3H5oZzOmcebqRjQAYT/0s7ROVWWwT7oDZuAWuyTNKJ0djU1Dh12pOMocD+PhpWuvffZb77w5t771XBLFG9vrzyI2KufTykir63010khYBbuxT/H7ETvqM8a9JsvDMJBARAwTjMuqAoQH1lUaYL9tjSYMYYKlkUpq7nkAA0wxi3jTgFle5k3NwsAPvKooOQkjv5P4kdbaOGiUhRBaB4yzADilZOD7cRwUaUEIbhrZ6S0D55AzAaWNEBgCz2fAGgfcg861MU4qhSACxjlnLEQQAqO0QdpBYwxAzlrnHDSYOERhPS+ddj5lyJqqyv3Qi8PoYrQwyhoDHa6chWUtCEEQAmeBUY5QYrHTxtWVodA1TeURJpW20EKIFlmNELZWQwiCgBEcnqYLaUwjoNbIYQshVBrIWjPCGqUCPwAQUUAQQhWw43xOkEHYOgsRdFY1EIIoDBgktZEIuU7sGyGNc1ILSpkWikLIKC/qGhmnnEAIQAKhsVC76WzEGLbWIIAcNEkUFmVd1pXfjiPGRF0p0bS7HYRwnksIASSUQkSo5wBB0FIKI0Z7/QFdxk3eVFJgDE/G483loXXO93kN3enRbFHkTdNoqbJ5fXaRYYzun546YSPPDXptykmWK62MLIXvB5jHaZnVDnAKKaC6aDCylVDOAO5z7aTBoKidLBrOCUbIygYj3Ot3D0dTTim1QBsLjItaidVaKgOxJg4aKxjHdVMuisIapx2psfWotdZm43MGMQ+Cuihjn/UGrUVRri8PhdJnZzOLnJLaGOt7jECsVSOB7Idd6VzgB4t0hiGtywYjhND7ex9Rh37kk4//0W+/HhN0fjSKrV0edoPYbwpACDTSLAcJAm7tyhOoXDCkSrmYzRaex4OQ9PvdPJuWWVWl8PjGSKv7w9bluze/W87TzScGl5/ZKo/Lpf4mDX3Qrq++sP3l+18T84Zhf7CRvPed/eO9s9UV+sgHti/u7U/2zmUju8stx8nNG/cWN9/TRKhZdXJj1tm8/K2XXlp/ajeO2QrD2qMlB+nxePvKSpbn4jBdDoP8IH9j+s7uY8Pgo3FvPY767enFNCIAcTMfZ8UMaYQBMGYMkrDlrMRWtLs6Gfh33hqfvW3MW5pfGESbLiH5/fmobCrhur14MlVf2Lv5V//K9/7hr/3uoMt3r103Hnn1tcPnn3+ku736hT/6/drc+9H/4rFf++XvPP3h54IweeONvbe+8kVY5BsbTOij7We87WuDk8Nycn/W7bQA1dn4fDQeZZNpo8KTNK/Tpp0kdW6QQ6Bpfu4nP/LRn/ovPv0v/tnrb90cJIFo6ojS2US0kDk3aqqHm1s7qnyr1Yoir/XqN260PC+9qJY7u/58enHybrp/1KLs4Re+v2wNM00/8OiT1NNf/l/+J77It5faZVZbZtL5eGfr8bWteLh1/eUvfNannEC1sda1m6urz/SOX7/53ktvr18fPrS9McrE73/1TU08Cw3CFgHgNAYN6O1uP/aD3zduJn/xf/+n/8W//p2L00lZsLgXYkxLaThmjkCGoASmfPe1t7PfevbndwFYqbwEuGuMbgRRx6OpBfewuDg/fptXowrSLKcItvf3jsMubnUD4EOLUUhpU5V1nlsNjdKMkrWVtYOjs06PadvMDo/MfE4NxNQt9YP/29/4mS/88WeFGGgcfeHVm3EYJAlvnNFM1Ej121FCqKciP4zv76eHdw6XY9hptRh8/w8NUzSf534Y9KK2RHWTl5BAH8echaVresOgG7Sm02mwzoNOYg2smnR9dW2xmIhaKqUij0AMkQOEO1DDqB0jqOuyVoiGAYdWYgoD3zPG1KVcVLkRjjFPKmFUiQAz2sGAGylaoV8U5wQACyGEBAEXJyyIPQyx0c2/+PV/93/9r/5m6FEAtWyktQ5AyH1fGu0wdkBba62BkOCqKFkQccaAUUaoujRGVwjhLM05jyENhNWd/qqU4zSdQaOkzMI4cg5ggALOfQKEkc5qyj0gnXWiaeYx68rairKANPCTllHKQFcWcwZM0orHkyM5bxAAKys7KEjWrrT9MAigEnU+mabtTkIoqeusVLUfJFYpzijGGBhUZ2XECPM9pWRRTjB1tRSYOI8CiCTiZJHJOkeyqqzTg6VkPpt1OZQOr672lTQ89qhGZVN/8Wvv7ly59sxzTx3v3c+LizCiCCEHqcyLumq8yI+572NkoBvPJkncs8A5ULd7/VLTmHnT83G3EwKEDt+5FyKva2SnFz2+fenN7+7dGdX3Dy6O983T15d7/UgbADC2qrEaPPzwdRa37t05GK512Hh+9bEV0bzVYF3dTf0gKKsKAeCgUbIpufd//h/+p5kHf+vTv//ti3uv/fMv/eq/+9Vf+/Xflrr4w2++8shf+P7LK7ColVBybdihwERe6Cg0ZV3VaPPhp3VZz8bVIR0HcT8vZtTBOI651/KwaSZ32ugkhSo9ma72ZTEetdqXvRZErlhcTBqGR1X+fR/6qdsvv15X47wOPe8qqJvF6DSIUS2a5bUtyt73ilrtEMO4EMoqEQbDaD289fp7W1eut5NBVWatYe/4vSkPOEcoTvi8mtccPPY9z0wz2Io7Dpj84k4+eXdtNaYq04q4KsMUI1kOAmIIkLJBSAulCeRpNRFGF7XSgBRasFbn7P9L1Z9+bX7dZ53onvdvvuf7maeaB5Wk0mhZtmwrdpw4dpzJgQQIJMw0vaBhNQu6VwPnsLo5zeFAh4ZA0tAdCBASkmAnHuJ4UjxI1ixVqVRz1VPP/Nzz/Zv33C+krD78A/vdtfbe1/dzfa9RwRFzx3kOQLfXINbIumjHvJ+0MmzzVFQaWAskYBRbbbVPo6aPxvOxTxHBvJrVk4PdeVbXCswqxQIoKgsB4HG8urUYJiEl+amH12RaX31rO5tN1zeTM2dObq1Er749KCfqynZ667pJvjv3mKuk1RqWFSlFaJEDUBsrjLEOQIywBdBYQzCxxgIIgHMIQuccYRhBhCEMOA7CKGr4R3f3nN7QzCeMdNrt2fbBcJr5njetiw8++8j9LG16dRKIwuITruNqU5rDYn448xY5hieXLg3TvaWF863YzQ+/zWx7hbRbPb63fYVk48/9yA+8+PK79Wg6PRjFYassRVWaXtJaaiS7x/Pb9ybDo5o6ZZMC1MJBUGk5q9JeEhVEGmMBlBiAOGBnNjseBpXQ40zNazWZV9ZYKS1ECGMEfe4IUhBqi+a1IKJCEBGMLIJ1LR2E0EECCUIQQmy0k0BACAhlGDsDAEbIaAMdsMiWSjjnMIHOOmsdhghiAizM0ww5CBE6zo98LwgCX+ha1FIbCQGimEIEjNEOGOccghAhaLR5j0ixznoet84iCCDGzgIAIcaIES60hhYiTIQQfsAIYMABpSVCGDjg3kOVIICEeAHPilxWxqeM8T9qQNsexAwlfmSs84guCjsaDAMPvXb1jfXNrUrPJlldmQm0any4s3Xq3COXLty/PwMA9rvezs17raXk7t5OY30hai4ubq45UFsYK4QXFjeUzhc3lzCExLWzIi/y6fD+sLew6a3p1c6st9AO8cZkfMA9r7e2wBiutWh32jev3buGv7iy1Ia5NykPG0vx9vVxi3CRGwhckUkamKCFZ6MjGgYr683hcDCdKFDU4Xz34N1X2u2OrGpH8HatVleWTa10qfdvv50XeQSyyXRS5WkQhTFzXsBns3pc2vGtO54mHNCNrnz63NpW3/MWH06FNys/NT0YvvuHv73iy167J1TNA8pYRJJA1ibpNGqhm6te3FW3rt8FCC718A89uoiySkH/9QdiWgrrsLMQI+qAA8YY6ACAjBGlFaNUS2Ostc4abYAxFiCJ0EjDbw/pu2P33avlJ57oPHmZ9U4gvmDuwnRcVZMpbrDNMGo4RqABSlaYII/x8XiKcD4yqJ4V87FYuxyIw7GqEKuq4rhkPOgtNWMfFfNJkNeVBq/vuv/w5Z0f+/BC0jWIOgeFQ9pn2mqCITQ1AAg4bSzE1jhjLASMIOsQJwAaNZfSWkSsVVaVlDHPuQ5Bc6tkVuzuHWEkkqQR+n6tFLZwNiqldFbUcSNBriqzgTZFsxmigCkF/bhJKBblHBLeanUkyH0KcSP8zf/0O2Y+/f3f/PWNXn+xCU6EAW9zCNiHP/psJ6bX797/3qvv1hoOyhxqhRG0FlgLgLUAvf+e8DwMIQSUP/Psc4uNzZfvfi2E0CCACOOMF4VwxgWB919ZRYQAxKh1VgqDIIAAKe2sqwFARinGGXDQSnv3zoPPfvpH3rwy+Ob2l5oXWvfeHX//v3z3wplHVLp+8M41A1itFNWmv9wUuizzmkO53sFe4j7xw5c9Bnodyn00OxxLWA/HZW5VXlaVsMP94w8+fPHay3fffvPuD3/y0XbcJKwhPL8RBQvYu3df39neEZheubrbX5wtL3Zu3Jb7R/nbtewYsba2IKGztfELhYxNVdlbXKOWpkWplJnm2iHFAp9YNJGmGXZ3jnYyFS5vnitzUVZFw+Oj47HjGGIURkzJ2c27b21tLueHxeba5cnQ5GI/TGiUNMvCASM6zbAonKigA56zDUJ7k0IiWbd4i7ggnxqHIfA8ghH0KKDI8xjoNdoLga0FRTCdzUNGD3aOR1NxONc7/+71h04vnDgVXn703KW+eO2FlzKJ5FHu0+ZkV9+6vtfsRo0u1cqRwO8udPcPdxcW+pyFlhHnAEe4Vup4cLi6vFRlSjqUpjMccEgxIng6n3gBZazjeaGlSfYeVwrcmbPLfXzxl/9ffxcF+rFnPw0Jwgh5Aectp1Q5yg7XllazaR1Q3xE/bvTbDbyze2vn/p6ROFhoJVETmgLLqjLV3my7nM94s91I+mtnHiEsykrrsrHMM8oOCSwRrTAmke3UwkqlHSB1rZytkgQGJN4bzy2CCBkt3FTUElIadXM7ffTS8nTvjZMnFru9FY46GxfP37/zapXuGZmGsXVS4bCtzZSAgDBuVQQBzcv5vB41Oi2lCsJDSOPlrRO3HsjmwvLB3mBjtTGqjqi1fjNChUN1nefDxtrKyubTL/3hr+fF+1QRxA4j3Egae7sPmo0YU+SA8zxqrKER63aWi0JMp5mEshEHHETQOmdBWYt5JbVDs0pBhikMAKLaAB7EVSYjwiGCYeBppSDDDLOiqp0DGAHu+Zx7AIFOv+UAAGnOGNXatBpB7AeHw6nHMCJYK1PXUigNLciLnLKEcU8JbZxWSlZ16VEGAGSMyrrWxtSl4D6zzk5maV0pBAhEzjhHGEUYV1ohhhFFlEAMkIYGUwwJ9HxWVwpBRxkNWDxNZ9rpeVZSDAx1tVKAIIapgaCsci/wKSFaakxQp+GXAte1wBw2G0E2rxAinaSRFkUlFcKi2YiU0BBBkc+B0ojgVqMp6hpaA6BzzkWezzEzRcoIAdYijCjCtdaVVFIpCizElGJIGKu1Spoh9YO6KI6PjjvdBqc4yxXCCNOAEOrMXEppEbaiZhizIKhrpS0AwCFMGt1GwBlQ1kDge2zxkRZy0FgojVxZ69y68aBUur/Um+alc262dzyd5rtHYyUUC7DUAGNojMQGHh6mW8sLLKaYoziJ0nHuMbbSah1NZ9pK32MQ2nbSkHlNrDHOeT5HCO8PDruNpNlItIPQgVwo7Ozm0sJgNBpPU8S4xnhSC9/jzprUKA8S7QxApKjLqiwhMhY4URvMeCprp20Xd5B12jota2kd9zxhzWFWWUjfun8glaKIIEx44GFKtNGFqC+eWJvMx1YjJ02R50ZqRJHncefcH+XPwPFxGcem3/OZz9KqoCFiDK70ehOcbt8/+Ikf+bk3vnNz/czZXr91760vHo/3aTcIYoaRswjmVRmFgSyhTwOPhaKA2+NJd+F0s9GfZ8d7d9xq50nmNaE+braHN1598+79g8ee/YGXvvpqWdjF9YVaqW9/7YXzZ5dRBQ8PR4UzMLYC2lpJXMOnP/D093/t66+O3vjpn/94XW9fuz149GNPB2b3W9/8vvLA2nMXd+YHH/nw0273ynhUWMSwJVdefOfSpY/s7QzuvTDwvLx7qusv2VIrTG1vuTu4ehvxFmxEraZ399pNYa3nb/zh13Z339HVDHEiWw36yMUtzqMvfOPNaYWOhfKN7fXZV69ce/bZJwmDKm5ceP4iPHnknfhkdeqJh3/+lP9uTOrDT37yk8N0evfd22Kc33g9vfTwqdu796FCJy900sImC+2c4qNp7um02YelGi6uLLXi02/99u9lVMXALLajvATP/OCjyUr/zp39Tz//E9/6rb/15LNxeKrRWYjPrPavv3k3Ove0rE99/Q+vm+Gg3fOCZtDePHfz3r0f+tTH77/9jcPrhzeP7/ypn/6L97fTq2Lh3kH7I8+eU/X0rf/wj3h2vHHhvCulMgI63FlsZnVqbDkc32g08SzDGxeXGfFfO7JPPHrpCb54/42bMQt3B/L+vMwxUwwRQjwgs1zOxzLh8VM/9IOu0wymunvuzCc++sQnfjB47rM/8XOf+fPSAuyQtdpZrSkjgTfNJ4/+qb8+A4CAjbTksGjHQebTHZ1+HYdaJY+uPP8Tv/zFn/6DL37p//w/vuaFnVRaidhjH3piOtuuCnv76j1EQKfXDqLO7oNdA0yZKVrAYnC4shENR6lEZmnR105Qqv/xL/9zYQVAAHns3OMRgbguKstcdzFcOXPmlbfTUhAfAavKuNMiMXKef+fu7afWH3p/S0spFLC9TpOhYGf37triSlUVLIgK7TJNiDHIVEm/K50FBlZFHQTReD4SpbAGe5S1WvHh4V7g+1VRaueUrjmGcbMxzStogTFykpYyCq2znJAkibJcG6scYMYYbTBnEQROlYXStRPKcRY3Os4YaxVhWApVzsftIPj//M2/4qpCVgJTxDnXWuv3H9ukVgohhDETdQkwbid9q2xajEKfKlOFnm8JkwZhiKADKpcQMV1LbVUSx9k0swAQzoWStayxc9qYui4cMoTEGJtKFB4NAcW1nTkFPMSAVcBKp6DHqKiUcSaMIj/yq6xmQXQ0GPTXt6aTUlq2f7S/3O6ODneEkA7iyO8iEhs9L2uBHOA8uLd7dWvzRGgtYb4FDiNiIIhb8Xw6k2VhMGp1Ql1JUZrltYV0Nr1ze/upR58qZGURXFpeODw4JKpGiNU6oLybjcadCG6c7kIc3dvPk/VGoE3AeKMZztMqL+e1UAhyixjyCCQsTlYPH9zyG8YaJStMKV1e38rTqrkQl3VttTq1vhi007uH1WgqX7l9DAA8sdj0AF7bWhgc5Lfv3H740vkTqwvO2KjRHhyUq731Ir3NkcvKOvCJtdAZgDme5eLcicdefuu1ozuzU4+fe/3eW4989oeAaxgS2u7yv/r92//Dj29ZN3BWC2sAzIUqkHVSmrXTF+cV6vVOtpa8gwfvbpzZNCarp3MhMUaFyR+kBy+vdlq1XHvq+f9ulBcrZ58grFlXeTbcPnXh1IP7x/2V89dv37NA8iis/YXlEz8yvfdWgpNmotdOL7/6vZeBeZ+nMLrWwHQW1zXEYhJgo0+hlnJmMBw3QlfM6rIkAqJWu0u9kJjKJ5GYzxPgXDq9e/Xbo/s3E0s4BM4I6pDMzFyXBBmBNcFeXuWZcpSHhaioI9xnfcQFpPvjdLi9M5tWj1w4vbgSnTrRALpqhr4wIOAEOdVe7V1/9yBP68AjDiEegWo+I1DP0koZ3ew2d48m55bW8nQ/6ibZcGp9vbyxNhmOocHNhebKyYV5UW6cuji88e7x0V6ns7i8GEdhfffqWxRJwIA0xjGv0Fbm3EEglcEMAwA01Bg55IzRDmOMIFJSAwgkgEobihHH1JgaQEc5ZYxKKRBwlagXtnyBTZXT+Wx85tKF+f7+0ejed954tYCoqIyd5Vup4K2OqAbH89nqpaemWjfQ5Gh8rxHFBAXtuDvX1Qc+8uyr33v17t3DRx5/OsCLDFtNDx7+wPI47Q4OZpc/+tT6WvPw7r5ydTbXRaW7rRZALgDSr4p+I7DOIiTbC52qLHJRW8scjQwWWVlaUS90uHMq9AOGUVlmnXYcWYsxLiuJCbbAOYRzZYWoI+Y57JdKUQeAMRRBRDBGWFkbcMY5l0ZBY4zWzgGECIIEAIucgdYYpYQ2BgNjDEYYWGO0dhBQjKFxylrCfY+woiiMMamoixJjggklBFIHHHAOGEsccAgD4DDCFljrHOMUIwyAQwgZ46w2ECPgrJQGYWQBRABqrSACYeA5Z6SSSmvn7HsGEwLIWIMgQMalVRUtLT68ee7BO2+I+v2xweLZh3rrC/n+3vjB4ZMfOj+ZzRlC1uVeEO3M89m8ijS0w9Hjl9Z11apFdfX2rU5nBR+7Mk8nw7FByPe90c6OiMXi6ZXZ8GhaFNoRI3XgcaG0TFU5yliE+2vtRuRNb1wpdjBvBvORq1OYjo1Uc4hQFTiVa0JsnES6zOuKIs/LptW0qIR1k8N5ns/OfezROGSz/LiCbj7dRTOClzpRL1zfOn94/aaFFhFOAarLOup1g3jRb6/cfOV7p0+fMqX1aMiMGE2yTqOTl1WhdF3rojKAMB7QRLrZwVii+t/+0r9YWu6euDyY64StnVm6sHTyxM+7yWg23G33N8p0Mp6nLR9w7ItcvvT67YcePePHvLOywrErqkoV8tRSVMH6w8+tf/UPd776drU7BxR5FoL3nFYAAQAQQiiF8rgHkalrBYFD2BmjKwM1cpqHx8bdKr0vfse1Xxx87BL79LP88jP9K7dng7QyQJQ684NOnuVOSgAJxpR6cWshqiZVu7+wBUNnS4tVDtLCVNnYBg0GCMe+31+KZ0UpKzZS6IVbZdCsfuSDjdBp5NcQOerDw90SGRo1Ah4jyKADDgKMCQEAYyeMKbBz1tZKwtp4daFl6WkCm4nX7ISNlSibTKbSrK30HeG1Q8wLHSSZVmk1o9jb3RsmnZ6viHVYFTlClIUdY5wl0ItjQ3uCtaZVinAjn4DrB9oZtpDET11a/MAjS9/61ovLJ3sXHn2oUg4AeYbF/eXHb71zNNg9yMqKYggR1BoqAQGAoU9AUZ9bX7n0zKMLJ86uxwt//+/9w9VeAhE5Ghc88fKyitrRfJ6L+o/SBu9TRVITAo2x+L0YHgQYQAicsxoipIwBgJy9fEJMi//8m99ZXHnkwbGiV8cXLy78+GeeXthYHuysf63enu2mAVIL60mz7b/82k630/7Lf+0Hzq8IgMtGJx4PjxlVVVG028jrxP31pk5z3gkWcLh99bjVWEClOr26dXicPvX0xaO9XFdKAB0EpOXRnTvjhX7PVvR4kD+0lbSC7GSXtmwVcba00RMYO8eSIJKylkZIpxyoG40gF7rV7WpZQQqUUo8+fPLuzXkSekBXjLMoBsD5tN/P8rJB49KAfCZYCFmzhVrBV37l+z/09A9KPg878XiYOs+rj3eY04vJRY81STsoK5GEq3FIhtM91iDdOIlFNCpq6jcFQv3IPzw4SIucIK2E4HGAPLCyubLseut5Vlw49e1Xb5bAlEVN+qsFqN945bVeu722uWog2Bsdrm9s+c5LX3210eOlFEDDfO4qOfN8zzrgGAnaUQV4lU+MybuLbRqGClXUs7KY2UpyyrAXhvFK4C1DSBWkcU+n0526nth8PD24/9u/9q/7na3DfP7mlas/9ad/uhjvWOD8BhsejRnReZ5B5UZpHvcfXVt5YnDwTcadRYw1GoioqpqtL+PhwSGE9GBwRKDU81Qc3kxS1lzcKESr11lttDbL3J+N3q2UItTrekFWaS/hg4EhiIYebTf54MEe8eNKSqEVDwKksanB4f6g0Q9OX9y6f/MqtITUmHUaB7Op84N+cyOdP5gPJ4vNpVGtOZKuxqKugSuW2o0sNWY+ockopJ3BcQqa3tJaf2lz77AoTlxYfvLJTZHOos5SZ30rm0+K8cgqeXf7+ju3r9ssdeD96cHO9rZMS4RxEIbKDygnWZlhhyGC1CMAGgB0mHiAMK1BXmUQorqWeV5VUjmAFNJREBhtIZJI4yCOukut4eCA4zAMYxiG02kOgPEoJh4JfZ5nZVWWDgALAaaYIial5JQQjI9GE6UMdE7IjCBorHUWUMw96pwyQhdaKuMgJgRDrLTTtRRCYgS1UJzyIldFVRFKAMKYECnqKAqBcwhDLSUGUEgR8EgKiQAgGDkIkSMcaWABMdrILGRIQkQCJqRyABBKLABCyFopAFAldVkqYIGSGmEEFLQaKOfSzFiL50UZhNz3vUooq40lRtm6GSde2JlOUooxBjb0GXS2KoWzABmjgfAIgcZJKQGAEgILoLI1wSTivtYGUoogJpRaZabZeJ6VogTziUTYWgC456yx82qGIYmjFoZ4OB5pCxABWrow8r22jzzGGacAEwJ5yMqyqkq1c3isa+lRUlbl6np3KlTohTFHo0lalaqoZasdlWnJAmYIhgT3ltplXq0ubzUCn0JcCzWeplLWSRQ4CgEGThSQMo9gVymg8WQ+jcIIYJqZcnFh0RVVPk6pglLUgUcJIHmaGW2dtMK5rNSI0cIpAgHSgiOgTJ0EbatQaiTBMIwCSJw2AAd2bamdz7NKWASJc7ASsoRQ19JqhyCGwFlrAdFAGWQMZURpp405GM1agVdphR0wEHDOKMEUA4RQ/UcPo/SgntUTCHSFURh5FKHDvSFE4YkLj55+8k9snH1mX986/eTW8bvf0YEBBVYaAmwsBLXW41HVCjkhDLJms7+6M02T1sLy1sLgYA+w5NLjn9JVAriMQwPh/nQ8O57V+/NJScjusRnNhDb36lkecHvx7NJ6p+MttO5evXP//uHa6U1kya3v3myG/aN5uXTyiZNPPJjLwy/+1vcuPdo/ffnysCrvb5e1iYm+eHj/m0pDiBwnIDL28trz17/1/T/45u0LJxuvvDJav0Ta67rXleGaXFxac1VL1L03XnhlNMezoX/vpRocryQs115dlfhwYn/3pXvjonQSIUQDgI9zOVEmS+8/89EPFXZ+7/pbn/vUz+1vDzZOnJgcDCQBtHku8jc/98lHvv39rxx98YunH0q0BXg1FrOot7648oEPEH//4Sc/MjXmX/2Tf/rwqUjPJvujQ2HB/b3q1FNrrk8aLivuDDlg2tKctu5cv3uyt/X8c5fPrbgyn3ClJ5NZ//TK+g88v9V4+v43f3f3xRem43Ga1tPCctq88p2vV/MHtNv97/+Pl77yn78pyPj00xc+tNCBx+/+7v/3727F/pmzp+7tTX0PLK/23rk2+IHP/bGdd962k+uj7ZtWWqNAVVvYTJYuPLl54sm97S+snOl7bXbvML97PBRpzTthnhZdCriyRwdT3ca1q5lR+bz41f/4hU/+8EfWNh7z3ebjT11+6cUrzVaj1DbPdIg4s+Tw6jtSPAl4R1XdFon91tX03q+m2d3928DrX9z62F8FdqvfD372zz31K7/0+3NRMj++dPbSr//mV/shJw6ksxowNE+HEA05R0qq0Xhy4WTjJz7zkZe+8f1JnNwZGi0VxHUYM+sci0PEZFnJShXG6SDE2Lmyyib797Yaa7v35gKpqajnZaksOh6Hnbh6cP/ueyrwCBHjtBgc8WZj4/QJwiKifFEJHwOfU4q9ulBCi6DVczmsi1G33Z+Mx1EQewGfDUe1YEmzrQpDWViqTNsaOafLOkm6GGFV1SEP8jRjHBOIoYVSCy1r7kW+F4laAwJNXXnMIVQ7I6WB1GEMiNIlptgZA7DWUGqNaMCY9aCxmBBgoEXAQqSq1DpHiRf4gbJz4jD36TwvOHAUQAcJgA4TqqoCI6yV5ARP0xlOFqWwhNEwaA7TSdigBCOLMNTaGmuEYQwTFiGsQ1s7oNPyMG4kdYUwxFU+97mn8ilnYSdupmnuoK+1N80mWiMWRbPdO36z3z159s7BDQh8WSpIIXQUWxtwYDHgxCcIGmPWTm40Gi0oak6Rtno2TDWgx5WkiIs616rkkWeUC4NwPC6ane6HP/kjg93DIGrV0laljnnYaDa+/vKVrNAb/Y6txZ2dB92lpiVsZamZrPUavWD3zoFWFiPbbCZFJutKOVExn0EDh/sPuKhkBXw/QIgipw3Q3dUFZNT4YFyqHFQuKPRnP/T4S1du3twfVgaOM6HKtAZC1rLlqvHR3hMf+WA+c6Isq3S+sLbU3+qB+OoXv3NVQ986bK3FmFa1/cVf+d9//uf/9D/+3//Zz//UZx8+15hNa+bRelilB4O90fT4o2dOrZ0DYHY8OPYCprXeeXDz3ImHYx6mhYya9d797wMnxSyq0jmjXqPhyfG+KyZAyyxLzzz7Y8dVqYHBLJmO59gabPX0aI/7naS/6fKdlDk/Qi/9/n+89MFyobsSNmIrq72dg/7ySj57nyqqy4wYJz1eQ085QyhtnFzP52OvPoh8mc4ECzgmEaCtSU6CKMZ6evOrv6pkRqwDwnQwAXJmgauqOuCM8yAtcumc52GAoU+hAn7QWIAWCiOttLWWwmbQwVbU2Dp99qnHznZoVo+PgXFK08k8LaDphGIoM6MVo2A6GTiMeovB+X7vzs7EWJDVMK8koCjTotXvABaNZbbYXz4azWLGts6tdlr+wdHMCXv/xTeYykOCVhaDXh+LepeF0IBqXmiDEIEEAFdWNQQAAkgINsBBDGuhIcEIOQSRtRZjDDFyCGulnQPKGASscZYAAq1mnBgNjLVbF1YuPHHhzf/wFk/Jvtkr0qMHu0fEUSl1q91tttpVYbDvZKYXGgujewc2E4VXxlGo66rTjqbpIaKzg22YsHnUTwKfVfbo3uh+v7ssgobUFd30UalNGMGI2nrsReHJ01vXXr1mHQihw1V6eWPr7v5eEDU6SVKBMceZ78d7x9ksrzyKg5Atn946HE5u3xsrqbWy3CsRwVCbRuy1O9E8LSFmXUz67Xg+GmW1bRFvlmbOYoSBcw46xCEFECAEIuqlWeqAURIYYAwznDJKsbUOIAAoMtY6hCslMQAQIQCcdpZTwhwEEFZWdNq9ST6UxmljtIEGWgSAc0AZ6RzABCulnQMQQgsAohhjBIDDCEGIrHMUI+sAgAhjABECzhFKMEaIEOCAUbUGWlmNMdZaQIgcsAi9XwWllXr68gfObJ442n27LN6vCWcQmKJwGE3nNd1J9w8OV9dWITAf+viTlY7efPGqK8c0CP1279LS5oPdmQPw9s3dKOyKdAxx6AcwaXCRFsQNxsdCzgop4OqJs8rp46PD6iBb6Z2wfKhcgagFUBLPOIiskQkhk+GAAB8COZuNu62F+eAg8XAjIkkzMcpQjwTQn81n7ZiXtRRSr5951NlZtT+cTUbr5y60F1dfevllv5LT8vpC3MCEnLt83k4fTKrpwpmzzYXVOzt7Dz3/bL47mg+m65cu1Edl5C+IqgK1qiEZjcpcyrjNZV5Py7y5iJUNs1HuF2C8sysh0VAdObjUjGE2e/2dF5545NGYBp6VCQxmg+yltw6uvnUIK/LEs2edAlE70AFe8KLDkSCEQpt95unOM4/G//ILt24eVmlNMCUAAmuNUgohghAQugb2vWY6Y4GzRgPjDIGOIcCpMFA5XBnvV9+CX7g6/eB3s8UO6SVt3o2jGJZ1DQ2ECNeKV7KKAjwbT0cPRh4LApfpWdVfaUMOtRPI90utq7Iixi0stmfjwjhdk2Amwy9fq/YG+5/7gc31VZKO9hWy47E5HKrNVbwcNCnB0GhnLaYeAsyZ0jqrjbSqhCo0Ak8mGmpY2nno+R73Oo60V5Zq4pNWPJVl04v8xqIBSuus21+tShTEi07m1g0wNKKaszARReb5LQdjEgQ83qwNYKzHwuTzv/bPEs79BWhMPR7de+vdQf9kp73IajEzjM9GNZDV6lq/t7jKWiv/5je/YotSC2O1ZhT5AfUYfv65p4rJrB8Hm50IkVygSafdFSBwRamBchhX0ngYJgG59/9vFQHrnLHOaEAocA4j7JyxxmBKoXOUekKInXfvBCwcDa8fzvXS1mf+zM/95X/w9//eBz+81h7d/f3/8lXJyJM/8PjB9lu7V7dXuqdOnjp9b2f+wnfuPPoTp4aj7SAgncWG1TKdldiDGIB+25NQkZBlNbhdFJWcnVhZ/e7btxugNX15Z6PTXuVePcvL0Tg/HLSWeu2T69OD+em4cXB3e3A87S8tR8M64WC6e7xwYbOowGRSBgELA1YX+Syf9jsrplIQoXIyK4Wm1O3c3i/HkFBkMDkczhqWUk7CEFOFjkbjZr+r1TQ/mMPSTa6XZzoP7x/cSRZCPclNVR/Nh37CmRccHg2iJuBNGnGqyylzFkMrFRS6SMIkTQ1VRAOcTSftoANDxrkJecQjXpVVnSkvigTC4WL4+AfaL7/+yqycc0c3egyYcrHNCmqj9YVgGGxfv316bW1zwVtcYo01v5jQ6cjO0rIVolbIymriJAOi9CFUENW1qsWsKqZRs+F1PCXENJv2/dXADz3eNKJUZYaE7DKIGuFIj4EyP/kLP6mzap5W3/zam7P929gBi9Asn2GH+50VAx2A0vHW6bNPzB7cSId3wthUGp08eTGdTPN0cACkTtNpPl/Z3MzH9/NiopQTBdMZDHGVHcy017NWBh5eSE4NZgdCWVkJB0XYCBfXF492rw8O7GRWF8IQTrSxEIO8rIAOZJZdOLnFzQFHOSEg6YUkimbD3KegzEYeIwWFhVDKKgSwQxUiiBDPQoWpp5wbDfZU8N7yxEW/yp8+0Xjj3tjMJ6SAHms6qEdHh8V8XM6mgLl8miuDlNaR/74KOq224knUCOPYF1XtoGm3WlLoNM0gxhhbqyUlgXJSa5FmM59HQkltTRgHRgNZV0IY6GDEvSiKwtivi6rfbXNOnFXAmJXlhlMWYS9Nq1JI5RyjWNYSAEAoQ9Axn1tjsqy0DkilHAC+R7VSANooDkUpCWGVKI3RSlvP86WUzhhjTZrNW0mMOOOc+Sw+HO3Wsg5wRAlBEHYajazKQz9AEAIIqcc6jaQWVRgFjLMsLwnCWgiEUBAQrTVGACHsoAPAEoqrSmnngNNaaQeIMYYgyClxxhGM6kp6nBFKhTXOOgthM+AAAgWhR3Hkew6Bpp9UlayV4owSDIF1eS0QsM46RiijBEJAAITEMs4cgEoJyr1COGQdgs6ntNbqvbqBMq8hBr1WkrLKOVMrBYALAgqdgxAy5uWFwEg1WjEhmPnYAaBqTSgJIs9IBzTU1mACwoASBJTTnV67mySzydwLLBjlg8FYhhwS2Gk2ykKurjZj36+U2H9wOM0EEq7vhevtBABc5BXFHBkX+ZEydpLOjVIEsspIQoOirJ1BURgTBHJRpKJW2ngGOiuiMMEBCyKep6V1ptNsmulES10bY4HyA17MK4CBZtj3vVTkeZpSyjCCxgCjIbSAYeAyyR0JYq+upbIAEwQpMXUtpXQAYYS1UYx62AKEEKOkqmoESVkqD2CjQZWWPOKU0qIqOIGMMOD+qBq2HfW666+98vo8lYRAZFy7Hd69fe/n//x/e30Xv37l+1FM+z3wpRe+2GlALwyHo8xxorRVFFRFTSFKGJtn4/H8eFCqeByko8NTZ1Yfefi0qd28qBuRderwxpuvOySWz7FwSS5vRbduTjEgBsrmCtl4pHHysROvvn47N0dnnlxeutCmne5sL480eZDfPzoa/e3/8c+effxCwNHS5uqkkmVtF4O2FpI2FvaGJew14HDuhdBr60cfXv/y934nQ+GJ82eWmBntDG59e9jfEK2LzZ3xg05zEVEfW5TuZIR119mpgIvly813370m55WVeF7bojJKEeBchKx1MEmY59O8Ur/3u9/5mT/z2WdWTxzuFS9987WFzfni4hLtsbsv7mb58fpJE8L82Q8+vH31tY3zi7JXfGJri5rO/iuvbnZPxbT+X//1fxSuOE5FYNKtc51lEN59I281uDajCMr+42uuxChg0FYXTvZwcfCzn3vs4ObLwYm+MgiwZtRf0C69fvUrB6/9xumN7vBoyMIEJ6ESIGz4Sxc+FMUL//af/kpxNP/jP/qDKwvme7/+N97++vc+9PwncdSGTrd7IPToTPAzH/u57YGrM+Ao95PI+KBNbX9hXcRbJ849gvJy9423HCCHB0MgaRwgrFFRFBhAB6ARstHrRJ538/UbJy6e7iV+J25D7bcWmhrs/+Sf+PCN6+9WlQLGAWtNaXtNvtk8oiIXwSrmiNC5Mi+yZen5H5fxZufE35ocg2I0aC6F/9sv/kMax5hCpYt79945f3rpaOfQOOAnRGqjtCxSwT3oU4qc3k/3zj/yxHTUXz35lHKn/8W//vfbo2snH1nAAApQbp5Ndg/z0gLio7VFRg08PsraEeC4hBnAhBrEC+nVxfT69Xee/9BZoNL3rSKGp1r5hAGozj907sqb95BWqio1dEEU+J4vqjoMYoq8w+GBF/AsnTpgS1lg4sIo0NI4jCyU87RAPuuEvarICOHWKKUlJrzfXRsPDzWQo2xsEaLMoxgYXStoIMV5MUFWBz4cj48PjoaPPfv8ZDojyHk+h8BYbSjFSjjCOVAAIKSs0aImEGurdV1gQqmDAGFAiBWIR9EsmxjrGPcJY5DSskgZgQwDRnhWFZCqZq9nlY2TRMmqVnXocV3WhPgeZ0JLpWrOOEbGOFWVeQtjaSRGLGANUdXAAQoRw8wiaUx1PJgA7Flh5rM8SFqskywuL119/Y2911//zE+udBx9/Ttf7fVbSbPlNK7zuqogpYhSLqtyOjwiHgnidq0mtTTbh9uh7y/1zsyzEiOonWottIWW49FxI2jMp+LhJ5+YzQaLa0sYA0CjKp2kuZnnejDIFlvRxUsbbrpHKCGIfOub3/jjf+LPDPKSooB5HeQKSkjAicgFI5YRGEe8LCpGLPNp7Uxrsa8LrfNMaRAa6GoVREGz32BJsKIWdwbH88m07bNhWte1hBgeDnKkrYRAzu8mrYVWc8ULKQhjqTGw4eUT50YH2RuDUWkBZNjzPQTNGy9+Y+/mW5/5zF/43I9+cmf7OzmQs9mUe8RB11ls/JuvvfgP/vZfwdP7thpCD4RN/3TrXOTFYdMJkR7sj5nPeGMJhSuJ16vTiZG1yg4psSvnTjf4yvGc4xgSjxdp7vuUEghsoouqTPeJ8uMAzrND1Fn45M98BmTN8fEDrxlT5o/2RlGzCcn7d0FdF0jqqlIwCHmjiz1/Pp0SV0Indu8O8toSjkIfD2fHWZYzBrrTgAMWMBJxroDTKvciTwKDODfKaFdbTiHC0CPIIwGKFpon392fwHjdDzydz2bz3W4Mz68vXHrs8bmSosrFZB4ASqKYkDAKFapzJ6GuQSOONaiMAQurbWKKrJ4h6gLqG8by0vCADiYpk2guJgCDdD5nGsUBI1bm4zGTgup6pU9PXuiTlsOuQlDDwGchHP76lAAGLHTYQgQIgwgB6KBxxkFAKAYQaOUAgsoYZx3GGAKIAMAEQ2ABAlpZiJxSmlOOrIsifOZEshbXD15+0G+fittJsnDyS1+5crQ30ThYObFy9sKJU5vrg8FxHPHexkMEkEoYIY+tFtwLMAkOjnc5C4xyh9uHVlEaJYosK6hphHPtF0cCGeETMxjMjt66k88nBJdxr3lrWigt8tJeu3u/yNK71/e9RpRbl6YywqhWOKszzmAzoYnv9Tut3aP02u29bCwRhD5jzhrLXBAGEAMAEMfEp7Dhkctb3X0k7x5OC2iUH1WVEfk8CBmAWGntIKhgbZXEBPqY1FIBAOuqws5CQK1FhDKlJDAaOYAhJBhhhIxxGGIIAHCmLivMaVpOMcLIGkQggBAhWFeC4Pc65Zw2zhinrUXGYQQJhMZY6IABFmJsnXXOMMKse88AcgBAZwFBCEIkZF1XlQUAQ+SMQQgBAN7fKAUABCDi+N1X/vDNF74ipOKYvqeC7Svv9rrxvCgns3Rta2tt6wyliITt0f6kGI1JOv3Q8w995zsvax7UzlvbvMj9aI/fe+ull2IigdEc+ImXLK9tXnvrRYprKGi7sWC0KpT2m51GEmMAEFRWyMGDga4kprSopBikdbN2sk5abUQXjFalzBaXIl3NuuutweE0wq04CnrLQSWcFBUKyMmnnxhLacYj1nDrK8t7t8dB8+SFk4/l812v0+Ihnx3uX3nhK4utJA6j8a2ro1tvtE+d01ntEZEOjga3xkmQ9Fbah4MdpArg/KTZgLWA1qm6rNJqPrERx5v9JajzEILVpdiFoqVAdf9I1mq1s6HGogxQVhwf7Dz49neuocbGhz/1Y2B4453Xbkym6UNPnNjc7Ku6OHPuRGotrnYiTj0++YUfDO89cJ9/dbI9JQoTiCAAzlgNATLAAe2ccwhhgIBzGhOECAQYIKgdgcQZhDF1eCqDL191yioWzp8f2r/wqZNAiFbU8/z24bCQdTXNC0RhXZvh4QGkHIYB8LwizT2IN3onrj7IY4L8iKWZDKLQz6ZtYiM/yGV8+0D+uz/Y/ZnnF7vI5OmYOv/CiXWIPGuclhYjiCl1zjoIlMGVMs5AbCnBlCIcB+E4nTRbIUOwECVyLIpjgzylXRwnwNFSWgMo433EAmeNYw1MsJrvOqcwZ0GnCekiileDpDs/uF/P762d+4AYzg9vvpXdO0LYiVQgisoJOJJq7XzHOTabVM4nlARhEADpjLIPX37qx3Xwla9+cTaZc+EwRQiCfovFPLVu6Lnj55559q/9rX/U2ej/9b/398ui/L9+7Tfv7N2Z7Q8KpdqyZiH+r6gihBHBmHAkhAbGGgCAA0YbxhmExCqNEWj3H9q8cPLerStF+eDtN/71v/lne7yevP7CeLTMqqnoLZ1YDNcPimEU183WSuYyfGPy9ku37z/U/cGf+eAbr78KMt9nuN/pQixNZeo648BQ7aBAR6k7ygc/9qmPCJkWlF+7u3/nnf2N/mcY0nEgL106+c7BaHNxYffm3duDwnf+I599/tLWyfDWO3/w+d956NJmvjtgYYt5xAwnBZQ1ss2FBQ4c9tjOwQECYGmhM5/PZ5MxBx1khcfDqQq1qHxqp8cHwIGl5ZUoptNj21zrViOxf32ajnTgH6yduZgPpp4PZsY2u32ZpaPjCcBR2GYBxwfTIQXCGRrF8f7hUR1LQpfTaW6h82mgLbTYMOlqrQ0BAIR+j0fLK0g7COGJxxo4LH7nP3/t5ZdfC8laLdJ4KVYGaaOJUZ6BPqnWTvnCZsZ14oWuJBpHnNg5IiDh1CewTstcCc9jAW3pitfCj3l7OrudlqXWdbOSnAriaYQNQGUr1un4cHA0AATVlUa0OpwOugutH/3Jy9Y86PcuHU8KQjwLWKPVPDg8JMqWOhyPx7PRjensqOtF1sBCFLWqAu6gkbLOOfW0XRyMbss0M9ghGOp63OpSa5SoDWdAyAr7Pco68zKPGoucqrSYMVI2AnfrxrbTfes8TCQ0ZjiYWKnLwmWp/cQHPzKbvVWKzEPEaY8RijHSpYSCxa3Tk+ErQOvIwJCwvCw5x8JgTEJLpBCiFTaVQkLT4/0iM279/MVbg9fXF7oJIVhpmQkfIzCt81ExUulci6WFdcdTP3oft242myay1KfWOmsdNFgpWFRFGAcIEMpYq9PKM5FmEiLUarQRIpU0YRgrLbM884KgmTRaSQSMctapuvYZqevaGKC0xQQQKoy2s9kMIK8o68j37HvnEoQoRdpQBBDjsi4RREkcIWghhMZphmiaZRyzbD4nnGKILIQIQmslAMg67HsNjLgSEnF0NDkyYIHwAAEAAElEQVQAyPR6bQsgQUjUtWIw9PyY+8YahhB0LmTEQ0xbzbHDHqGISgIZJ9BBhSCGwEIQeL6UqqwVpwRo6XuRUFJpaLVzwJRFGSchxQhAApzlHCSYFaWoCkUoQ5zkpbIOKqkRQRpZBxxESGo7y4RDUkgZUNSIA84IwVgKCSEkCBOIAICUMiUVMVQpWYPa9wJKUCUr3wsxYBYCIZXHMfd9YZw1wBhtnE1abaV12wsAdIAwYwGlSBsndeWAU0IhBwPfswAiBGZpUQtxenMxSZIyLYPAH+0fzYtSCtta6FKoqXPdbqMXxRTgOG6aCjU6BjsrtT6cVQkLs1R2me+MpoHfbvij4+PIT7KyCgPGKbdGMgS6STCaDxphwJg3HE/9gDebEWeQE6K1aEWsVqbUFUewG4ZVms3KGkOADITGZEUOW23iDAO+LLXGIM/rMAzWeolTBkMsgQyoX1mUW1FrW9WVrlXgecRnWgjKI59ApDHShjiw1mvV0upaaaWNUJwypRSlzOM+I1BWtQPvz9Cmzv3Ij/3069dvxdoyAAghtagWVpZeffvWkQB/9mc+97u/9etvfv/B6unVtVVvewftHo8Wk7iutMIEeIBgKhESVm2de2TZbw+2rwAkrt16owy2n/rwpxuQzUbXBsNXFtfbO/f3Ov0Qo9HOvXshWzXK+oF86kOLSwvswdGdE4+1W2d7R2/vxhTVdL54obl/6/CJn/3oyau70rVOfeTRyfHBmS0v7sP5/ijbGejMnn/m8bfefTPoGoQ96+TTz5xLp6OXH+z9uf/5b//h3/m1aliPJ2k3YT//U3/6n/69f3Ti0VUjHpx6LPr4zz4p9m/f3yn/8PsvPPvJJ889ug784wsX1/7dv3wp4om2ygLjELTWFbkmDCurfIheeuvuT3k0CKM3XnhrOVj64n/8R0889vja5cvTnWtRUP/y/+/bYewtra4//cMfmQ23R4cHz336R1584drWQ+dPdc9kRS7Gg3ObrdnBYD61nYTcr0q/00wnY893nZOLcbuf7si0tNw3URONtod4OvOI32o0+qvLV67szPZ36oI46f2xP/czb//+5xt+DFgoa3c8HXzur/6NbSG//aVXT61c/NG/9MxX/v5f/v1//upir3PpoQ+GrVMCIWUmz/zI4+98762FpbX1J59ipfrCt/7PTpNvnDqdlkZW6HgCLnzsOdRaYOIo6cL7t8anz53eubvXtEhmgEMIENZahYGfNGJKo9ZaN1j1wq2Vn01+hm7bX/ob/+AHf+rJjeUETHPGW0CruBl1u7357i0tckorDhyg1ul35ewId5+vwaeTE5t1Hej6+OTpthgefOm3vqswLealrK2a1Y0WS4uy0+WNVnh4/8jDJOj77YXo3at7Fy+uP/Pk5hd+48Wkyz7/pS8t9S7/43/yN1+69o07d14j3J08t/HmO3cMMNyDyKm6BixptpYXtTDEzZdWo9mkbiXNyUxmkC+vrd2+cRfr9z/Js6xY2uonYSSVPD7cBxZZgCBjzdjP86qCtVYqn81xjTlB2XReh8iLPIJJVVdFMV/oLjMeKoN8xCy0BGOETalnAYyxRcaBvDryWwzgQGMklcVGUgaRA1rUFiHOfVFUfuAfHdgH2+mTTwHGsDGOc25ELWqjhYqSEGGqlKZ+gLWqy8IhRACBCCHklJUIY1nnWtt5XcSNVjGfA2il0QACHkTIGM5cpUwYtaE2yFYsjFRdc86kycPId0YqCZQpoYMYs0pUrp7GYZNAX1VzY4CUEqMcQKuM5ZhpUyIrclmHXuT7zSwX3MFmb+VwsDOdTDDki+2lO29/g2hw9vTWcDwRuWy3lqp86JGIcJbmUwQZckHMkul4xCAEDm2tns/y+Wh64PuNuBljzpSRYeglbbO0uaTuzO9f22ZOyEp2+63SzAB0gLg0S5tt/tFHL2k5hgRFSWiE21pf4Sjv0sUm7/Uvbu08+J4oLHBaW+uchRaJ0si6DFpBHATDeZ5PlClrZBVGZDacx16Y5nU6n2Ndn1rZ2MvL1Y3O1kZ/e3t05daRH/DBpAo9trbUFHn52ve+f/rs6cefuNhd7ZSpBBgkUfjzp9bQf/7yS+/eM5bqygBrCdDHO+Nvfe2/LCysnzx5uczfrhzIs4pjq6wbp+Yv/d1f/Du/8Gcff/SkrnZv3b4WdmJVGlM7HsZJu1OLWlXGZHMHaow1JaCxtnz84E7gxzv7RffcEgvZeJpbYRFGBEBrBILGC6L+8sZo/y6BsU+aEITzQs0npWOxQSZI4iyd8Ch8/1/gnJ8k44NjWE5lOgiavdjzZuNhns+LmbaWeT7yKIuo5o12e2mBgLrTaMr5mFHnKAQKenHiI4INmKdFoSs/JtYxa21dW+6z48F9Ny26QfP0Q+cms9FKr03VHAE4Gh0ZByg03eWOD1ExLyAuQ1pP8nI8ltp5GEGZyUan2/Z8URmeeAAIVeQxglmep1p1ml2nClRp4JP1XjNiNPBJIwLdhaizdAKp3KOGMWFQwbG1UAlX7u9nL12ZA9B3rhalARBQRrVzBGIHgYaIYeyUMtbC94rUIVRKMwYpJhgiZSR0jmCMiAPOOQgdUPP8+MTi+jMPPTTer4XXHuf11770+WqiwqjjCD/5gceTZufBONvfmz6zeZ56ixWo4oUIJL46nObz2gEAoWR+U9VOKulq1uy2iEMc+9RPCNF+HDKJb9x4/Z3vvYVccHA4avXDBUSDhguTQIus1yBxo2HqXFqhMpNrcCRkHNIkogzAbi/iHt+7u33/eIYNjhntNMPY5/1up3d64XD/sBknOzsHiCeNkEElPA996GNPPCblG9fvZ5Idzd3x3j6wJbIWVBpCCIFzAECMoDUQOAsholQ5ZIQG1gKErdbIWmsdo8w5ABwiGDJCAbDOai+IlbFSSUZwN47nWW6tU0oRTCCC1igAoZQaQ/xe7g1TBBGAEFpjEMLOOm0MhMA4ByAwxlJEAIQQAFHXDgjtrHUIQOAAAA5ChIADECJrLAQAGcBIUBVKCQsQfm9zAQCAxXz1zHr17narz+fZdPXUiVdf/N6zP/Dh7Xt37USJubz+7gOIvLe/90Yj7sdd+dATT3utifX0wnK71US3rt8X1VTMi6S55Cdr6WBojbt9892lrc3F1VWdTUbjo+OD+dZyv5KiLuuFhYYq57bW6cHYyjxmjia4txlf258gRIkN8gyJonSC2RjVRnAvkZWuSyDm+pzXcBoklO/d3ps+KFB1e2l1pd9aE0GSlwV0STGb1UiCrk76TaS8WcnBZF7lIz/gvU4/nRaqypOA5pLE8dL+aKeXJFTTQHXuje5LQ2If+wgjx/PjcuPk+t29nWR5sdXzh0d1TMjgcLZzd+Cw7m0sn734wVRaQUsY+GoGD4fCv+uV4/SRR1dmw8n9/d2PXFybjqazedpqkM99shUg9CsvjPdqSwm1DlllIHEAQus0gAghgCAEgBAMAQDQAgOcQwhgUtcSQQOAQwARTDGwg9EgCS8bXCoLdTl2qoZSGS3biz2oy1qjqB0j6tJZqStJsLs72gasRak/mgwJMifPno7Dw9rI6XBOW92Dtve9B4e/9NWdHznJHjvdy4p5v0NypTmLNIQWGkoYsFBXpQUAI6QNAhZ5FFVKKVs7ylCw6GRJeR34UbfZ1YqJYhw0FAAWSwkso+GKdbi7vK7raXV0FAaEkMS4xLBVG5/C7TOlwwrXtRjc37+hbDgrBkmDagcwapSAz5QKcLCatJaW/MlkSuJmOp47h6VB89mw1YNPnGrefJOOoR/TsLsQQ2vhLIuKYnOzj4rj7/7Hf/fMVjdm6Ul6/V/+X//2JOSf/ezDExCf++DHWZ5PDm/+wB//n/4fq8hapKwlwFKKAMUQQAAd455WyhmFMRcWPfnD/93rL/3G0Z2jJGJh6IbHr8Nue2Fp62g+u/T8c354/rWbV88898mjg2vf+cY3NzY7qxutdIZ+49u3Ln76MsEMIxfFzPfajUb77s07RTUNsPMhixvND/7QM//ql954dJyfO3fi9as3H728dfXKzgvf+fal0wuPXThzd/u4SaPbL725yOnC4ub3b8yXgtWyQM6GOyMB7k4YQ0rN1ze35pMjGmHl2UjwuSgVCU6dv7x/sJ/OCkjlxpnu6NAO7g0S4t+f5OfP+P2V4MGtwziJhKqK/ZonFHTQ7ZePJzum0zmBON/dnbUYkkpgxpS2PicBDQHG6WhaMRRGIQKEEEw4ZlHiMC6ySaMVV8rvdbo3t68GoedFcTEf5WPTXtjweu3UOYLn5WS4tz2Vk2PnkFLu5q3Bya3+nZvHDb/n+HxttTc6nL17//DUqRUEDPF9SqnXcN3+0uTB7eE0SxrMB5DGiRZFFILhZIZJL1lZ9UJuUgIpSsIIEW4gn8wmBOQIZKMi3d67tn13+9z5Z7jfwpjGSds4hhK+ff/w9vCVDz/3ydH2g0KVRZF6YcCSxsXupTrfzvRBtNBmPuca21LXRcEpRZDFjURTXlajTmcxpVFuQbSypkUpSDApMspQl0ZBp783HHeSMAn7osYO2SBydTky2iTt1mCAHaO9tWQ+GtK5ZAZNh9ViMw6D9PjoiIUJBXR/MKZpXolKpWOO4/t37ltFILSOuGlZERJAjvNMQRRg5qQejbKxD/nS5uakEkdH5bW7Y95cLRG7dzhYb8Rhg3SX24PD2/vprKY1T3wJZV1myGPvJ/MdhAhXueQe98LYQVjVFfMigEEhpHVekc21dF4UaO2gs4Ti2IB0mlPG1peWrINe6PucaeUAhFpKxqkjWBpdVRJpI7XFkDgDEXa+70GMaqEYQYxRIWtjLHTIqhpByBhWSllr/CBGCBilgMEWIYsggMhY46CbZdPATzjjx+NREIRpVfmMJp6fpRVFlHpIFMoowAhGAGAMtZXWWAggQKgsSwMsY8Qawzgt8wpApJ1FFmprOffKqlJaO0fKUiKCEYBKCwCcA+C9cU0YhxAjRBAFxBhrlUMGIOM6rYY2RhrDEKqVJJjUyklRFUICQozQGEPOMGe+R7BHqalrL/C9MAAAjtNMaoMgRsgKJaW0lFBEiLQGQhcGIYDIAeyMBQAiiIwBTgNnDMYYOGCttEZQL6CUAUKqSqtKSqU5gXHgC6UQwgoY7bTOlFI6y7IINfcOht1+2/dCNx0lJFxa6SuLKICj0SSvMk57nh9YSk+fP1HkhapFVpWDedFOfK9RtZpRwFFpKoRAL+rI2jQ6bUIggCZoeKpS8yL1WEgwRg6yToMyaAGACJSVJJ4nlByluVAWQOIgDgnlhFKCsiqH0Pk80NoBgLQzBjrGmai1hVYhq4FTUlgIiiLT2gilCWFQ2cDza21UWTMImYXYICVqI2XsB0YqTjEDqNmIqqIsiyoIAmMMhgA5CB3gHn9PBUXpvvvVN7rJyXS2v/dg76GHl0xRn3n0QrJ4rtwZHV679u6VN/7q3/p/f3n44rVbd9vLF1vxA1nPs9wBv6kNNKUlTK5vrTBEdh+8lY5vdzs+IqLSCwAg5Y5GgxvOx5D7PFoaztOl5srJC3z4oE7icI6Oehtnc6EVNA4BpXzb7u/tv91YCtPJ7Mq9B9Fa6+Knt6SVNXh9/+41SGVzacUw8d1vf/eDZ54QZPitb37j4fXF8XBuNdu9Kw72RbB09uWv37rQX8zk0UyR0a3qwQ26srQpcpLX1fdfuXs3+43HljvdxYg09wD3ixwuLizm++NOjEalDgmCBlVWOwcIxRg5rY2lKJPol37l1//ET/3EydXmjWr4wUsnltZCy0eLpxZuvvnCT/+lT/7W773y+Zev/Nz5X0At+OSFc3vX3mniqrsRzXbEtTeu5KP5oSsS5GPMD0eCI/fhh09ef6NsxM0iZfnROLSs6XeppHJw3Gl6TW/BFXE2Vfu7+3G3E/b4q9fHNV969ZWr9QTNDueOVlsP95//1Ge+8Ju/Mus/8sEf/hM9qH79n/2Pd9/8w9560ljt+36HUE6i5mTK3n59kI0lbihXz6688PkTZxcdBgZRQBFh0bnHLjcXek5KPR8P5tMP//jPPvShn/jbP/OjJGYYM+RsXghuDQi502AyHDbbsXL1F/7L73z88Se6p5f/m//lz9EEXPvm937sR59/5cq9yf2xkmI2Pvzc5x79b/6H50sqHcgY4I5s4u7fVPo0zAPsEI310mb6q3/t2cPpKeU8BSHzEcSWhbxyzlvxg0UqrAwXWX6cI8kMICurfmCr4fV7Z0+1w6Z/sPtg++jKj9I/ubHygah74ve+/c3B92eS9udy0G4QMa1nE5GqrN0Nq/ncwahOq+N9VTRoVRUGwP1bYy7R9XT7PRXgIIHcn4zTuBm7SmMIic+dphDg2AdKV62kWVaFz1FWlrzBeOgpZxAEhNEWT5yp6tp4fgAtT/PKGkSQxwFl1CMUVwWwwmHsRuNR4AV5nqXFMIkbzhjgEOMeBJoQuLd//OIrV5Rgs/kcYAMJhdYopSAijFELYFWmvhcJqYGxDHNgSUD92lRKCe10IwhrOXcIi8pxiiChFEMpjAYIeYF1gNWFcyXxWVnX1AFAuJRWWKGdtZZqBQAgGAIANfE40KqVLGhrCYbCCAuMgw4hGjCqrZFGuloYUXfandk8VXqKsAcpLetpzAIEQFrNm50YQDDLpwQDTDxR6/FoBCDJ5zOXuVzMFpdWUb8LEW7EjEE4n+dBGAVePCsOwsBHlACpjCAHB5Pe4oLW/oObr/Qi+swPPHm0PxJaTAZpEPGw0Vw/v/nFl1767OLHtncPWeivnT6TzwdAmoP98emHTm/vbf/Ot//9L3zmJ+ezKcY+Rzjq9vJKWWsopVrVpXGMUeNcpSuCXXdxuc5SZyWSLgT+ifWHrMh0nseIND0/4QwqBSVe7IdJ4sceeeLC+Zdeubp3cC99YWd9/fyJjbNWWOVMsx/8sR/9UK/rfemlW1JrDYA0igbw3va1e/dveFHgce/UuQ+e6C5/9cv/+sS55Tqzc6H/p1/+tXNbq09unHj44qWVE+3xg73UBPNMsnar0WXZZGI1IJgoLfzYrya8e+oTNXHJqajEfppXLOgA4kmZymyQZofrK2ujsZ06Q1Y2FxZX6lpy2mxsdb2lw8l4MpvPp8Nhq7+4fubEeyoY7o+oN4FaUQ0YDRJYimxks0LUMktLDDWFTLK82+YEM+QDL+wIgxTyVZ07QgwE4zSzCHmIlVXuGIhbvelYIKuUEhAChwFB5PD2W42m6G2s0y4L+cZwNIecg6qw2lQ10ozWGE+Oj+IYjwt1MJgi1ljsdhrd7qgQw1S0lzbDnh92zM7VK84ojs2kEEmzqRhOq1mDi8fP9Z0Vyxst7DnmSa8hoUFGWwcxZxGQI0ctZvQLv3r7cJpA5DkjCUEAAqMdYRRjtnr29PmLl7/55S9WZWUtIBQgh7Q2lGIIgJLKYCyV8BklmGirrTK5qii1p8+v/OBn/+SNdw5Eju9v75AoLC1xYXTp8nnlUP/0ZtRakPPS98Nymp3avDCdH7YbZqHj39+fHx+OR2kVNQMtZlqi9dX1xtIpv9XTugBSSVUEvRg4+C9/5ZdHR0etsLm21jm/cJ62vL2D0VIcnVs8sdmxyZMPwQZ+/TsvXXvnYJLWhkR2PPF97Mdc1HYwLz3hjB95kVzpJivLaw+dWa1rgzDtrneeuHCqymaPXVq7fuV+nRatjcWtR88tLa6HAakVuLc7ZpwDZaeTQ5FnlDCttIPY93kUU45sLcRwUinjAAQQI60VhpAQjCCECGLEjLHGWmuBhQA6AwnGiFirIaUQAQtQr7M8mg8tMNZaBHDAPEAQRkpJTRl3wEqlQsoJhACh95w7ghHEyEGHCfEwhhBIqZXWyhoAAIAAY2SdhQA66JwxCCIIHESQIEy4jzAWonQQamnIHzWgWQxe/N6bDJO6srPB1PgsaSdvf/e16WTmcg0QeuPKg5X1xU6H5tPpzTs3Cic3tlYWFruHu7chtBSrKstlI+Ber7FwupxnFslLj144OBwMt+/loxFuNB967rn9u7faK317pPYOjxGQ3A+M5mHTL+sKNrylja2jKnTDPUqAKV2/sSmBkU5BZRywm+fWuN9IDe93vAe3s9GObjXaaWiOZ4cFrFpR0uaJKctHHjpxF9V33n19aekp2t5QNfO6GygOE7w0H0wV7TVOrm3fuBt6DSpoNhs1PSx0QZXDUK2fXxpNIQBqcDwV45yFpHj5gDQ9O3Ra1Nv3jnkzWLt4rmSd8dEgTQ0CoNfwGr7DvH/hB58qfvsbI8lGR+LgpZ2dkai0qgmGDGlDRQ4e3Dw62WefuND6+u35NJWGUsSocxZCgAmxzlpngAWYYUQQwQhBCBwymFnjIAbGKAscAAYBgLTBGpYVxoBDiOpMQhI0uoEFhoUsaJi4s3B0NIz8sN2KSzCpqlRrUdRj6XuIWU65EoonNCas0/TnUjMv3i/VcDL/1j3lR2Szk9SqgtARmBhLHGC186GFss6kExg4bIk1pLCqNgZRaDQaVurkygWkRtlsPJumZaZNOUqLWaMVAEx1iS2ViLfzbMqIkMUh9WC8sJKZFbb8GIg2Mh1wTuMV/e6331rxVxxvkjB02CAjvJBnmQoaQarNKzfur8mo26DtRimKTCFoLAbKwuIwqNVjG5FZbF46v3Hm4tLV198gbskWaa8X37hx8GA0Lx0KK/OL//M/31xrW1fGevfZT/34sMo6i62Vzcv/FVX02CML1+/uGI1VBSjnUgPgDGHIWgghQNAy5H77n/+FVkIWNoIqU5zpnXs3zj370OjBO4MZXnnkcd4N+/Hm+uXovIrM8G4A9GAwMMAdHBe/8s+/9omnYafvtHbzsWLItPtMmbbnLMH+re1hVkW9wH/97RuJR9Nitga8lQYNMEoCfzRLF9ZX/O7i9Hg7bHiMxydONS9ubBbHBwVcPJ7R0maTQiJKvnf3rfMnWrxggGovMAiX8WKrsra9cTHf3xEZqSo0rQ5WTq1g1bng9zxTHR1P+xv98dHw8Q89c+3t23NT339xCEUrJnixv3X97oPNtRPF8L7TMq1rjIMk4pRjVBcYMWQ96HvGBkWuSIU57Bwc3ohZS1bahUmF/Fanj5nenaaGNj702Edv3b4yP0yzYhjyqk6H1IgYZ502QpBXJdSu/9FPfOybL3w5yIXKlRfQa7fHvc6CY0aLWdgMsZXlpGg0e4YpSlxZq1lVc8qrSmiNkmarciTN5hQjpMuFxV6WjbqdTeVUOR/Os+PD6S4h5OSpp2WOjIE0ItTHZVEQHjQX125fe+PuG19P/GWtTFEaaz0eN7BxcrIn6lFnY20yzBeChrN0odsmXMzmuanLqAny4R7DNPJDU4HhtMYUzzPRiDtxs11XE5vnPqa6qKWW2O91OouEjY53JkkvcRHc3ZkAi+dFCHicyqy05MFx8bkf/lnEhLCpJ7W0thYjMxXE6tDzsqKwgEFHIHHOClU5EviyQu3GSaMN8aoo8uaTHeCxuUkRQwxUdS43Fs4cDicPn7k0fnC3xZ3W4+Ny6hremVOnDw72cqVLITokeU8FSeKXpQCIAAisc8rqvMziKAl8Jo0xFk3TCiAcUIgxqmsNMCGUNtpNL/QpdM5CqaQSGkLEfGKBqYQ0DlqHuOdbqzGlRkHqeZxxoRWwNoo8aCGHHFJX6DwI2lVdM8IIochBCJyoBACO8VBZl6Yza61FCBhnjIEAa2XKfEwxyeZpf6kLgU2rEhLsUewT5ggAxmGKQy+0RkJIjKqs1X7oY48AbQljUta1MJQQa6xzUFurtS5MiTAKAq+sZBR5ymigMYQkL3KMCUSAeYwhJIUyBjpgtTa+5yEIfEeBNgRAj/nYiqCblEYpZaaTkmFEoaPERDwIOQ0ZQxBqqTAPndHOae2cT3ktRCFkEDCPBZFHjKyx71nnirqywlGEnQXAmdDntZJGKgQQgpBgShxhGBIDjDBVIcMkwgh4flC7AhGHnKUQWqOdcbUSR+OZj4P+4pKo65CRqhTXtm/0vWSl12sn/sHxzGrbaifFRNyfTDdWIp9RCFzAGfZIWudK1sDUS93YZ4BA1/I9WwtobRITSgGnfJJVdamFkBhAhrHvEewEx5hwktfKYQgjtj2aE4gEhIU2EFiKLGcQKVhXghIGILQAQQdzIYxRBhkvikNOkAMH4znHmHGmAVDOVLqkiDBKuREUEU79cT4zANdaYs6bQeI8BQFC0BBAPD+o0oxQzDEOMakBLGUZeLHHPOa9fxcsLXdsPXz8mcf27oXbD/a1tgGjRWFe+P2r//1f/6vf/b1/cbK9Mr27c+31Fy+de+hDjz5PD2/e2b3SXGzv3KuAMpjYiMWjw8NbN29okPOGHRWaB+5TH/uEYVjINGzJqSz2DqdZZoAfOQga3Wa3OX/04YfOfvIHX736e03PU7UuqrY7yuh4nGqtKhBAvbjFot58VtybTKecufYKEFYdzLZLCJ764xfuvz0GVn/sz3xm8u7+2slHPdic350ny8udflzenW3G8ZeufG+hGf+xv/lzrNkvK+lEHXV5BvDR7dlr26M/+xc+ffRgoAfjm/tH61vRmYuXP/Mp9NVv3k4VyB2dVyIkJEIwLwSllIdUA+Hi5jjofuAH1t6+/e3HPv7Dh0e5MEWeZc2kefv775BCrHaCr3/9K6c77R45ce+de61WII/3tt8dj/ZAQNqr/eDgaNxqNnrN1vaeOBgPk/4iVJUUeLXXdePi7MblW7fenmcHnof5UgJECU3p+1hR9NrVAV3+yNbDH8y2v5vRXR6x3lLbKPzW9WHQP/XDn/tZa/3/5eefC8f3HzvXVh6uqryYXGusNJYfWpfvyMDmwtakPkCT70fgqMSa0igdyUKJCx96pnXuEaGdFSXARjrA/c7dG0MIQ6ucNhIYwLnnUVDMqiKp4lboNxtxlPvpcDy+Z2AFA4qRp0D+yZ99/NWbt7wAQUYHB4NP/8xfA0HTAI0BM8Ypwwk7QSWnHgUYOjcC4NU3r9bsRMTboMoKxrEQrq5ktBx84i8+2eLTLKs7ibSVHUw604E71UQbUful3/nuS9/fPXGmORs5F7tf/Ke/8if/wp9af+LD4X5x+8ufJ8RvtfvrvUZJQ5EJUNMO9Ph6qFJvLKJqsF2P55sX1o/u3QoT6vt8ebkFwBEAYHNjHWqttGhECFrhKsHDtqh1KazHKCF6aWHpzStvJa1G4DNhCCRBlaUMMEVQgKEBoCiyuOM7U7dbjboYx35oLMjrGeUhpajM8gCGTYaRrR02PAgxRbk0yFpVVgYZiCAifDArIIFeEklRGAjLMtd1bQGRrohoTAixSiPGAr+hq3kh5pjg2qVh2GOKFKJmjDLil7WqqrlHAHTYOUGJb61NGv2LG8+9c+d3hCgoxEYo5hkHtYWO0YSRhtM14Y2QNYbDbWhlQNuBFxdFURUZNCoIktwZI5V1mhJmHSCcEwRyoSBjjMVpNgXCoToKCbVOK5kbi8qsbLTCoqiJR1vt5vHByPOjdi8YDietbl+pCkJU1xmgrUppqVSWzybDoR8wSuR8PGFBkiQbWysnv/SFf99faq+t+aquZrPDeKGjpIsEwARghPZ3Jx9/7llR5hvLPWlINRUyVUgTXbrf/J3fG/LgofPPu9IFjFd1iQyEFvqUF2XebjUhcMKoJE7KWilKGEIAQGMBcPj06bOzWfra1Su3bt157PIFT4HxOI8C/tj5VQ3ctcNxmmMvK+te5+Of/qGFlfjVF1995XvfpTK7eOlxGHeODwZOgU8899H9oXjr7rEzoJiVCJYYQ4BQLVLked7ymrL1Ex94/NaVVztBsn566WBcBKz+/Bd/r7bPdMKLNkxYr9FqWKP15HisKkEIpUirqsrgHGiAvAZrNJVFUtsgbinFWexXw7zKM0LYaDREoKnSYXPj7GiUWtpq9Ld2D3fbjRjNRq1uI2k300l1sLv//vcAWSfUwkKTQzydTg7LrCpkUSLCgjD048RjjFhbQSt8P5FylmUTjXDoBwgZQlCazSzktbYWGYkgJV4QhNxL8tmA0Hg6HWdZXSgVNZPD44FztahE7iXUawHW9bttpHUty7QosXIA+sNJeZRb67dCL5EGzPOShhwie3A0nMwISOdOmiDB2lJNImskRnLjxMLW6agopiHDDlecQUqsrSpgrEPYKgNriaBUThSi8d3vEVu3DSkMsIQySohSmrGQhdEo5+3oEsMvMJo5IIy2zjkIwXuTMwiQsQ5jYgzQWlOK3gONZC2afv83fuO7V1653Qg9Y/nSem9tbXU0qfJcGamvv/TW2unTYjKeHx3vZ5M4wOsXH7Zyf+f+W9N0WgoNAbLAc3TB5LTXvtRaPWNQjZEImHEazYbj3/mtXzzcnmydfXh9ZbHdahbClhadOb3y7FMfdtWEc3f/5r2br93Mx2JtcfWxh1sKxWo0YnYaLTau39mbZ3g20uOh7K2sKKsejLJc3k9aUbu9kAiHqPN5RChZ7suBHS2fOEFbaxXpSEUf+fBPHn3xN7LRmELBeECVxRiGGEFEGYbNEFiRd8MwDtjRvEpzI4X2OYEYG2MhRMY44CxCzgGHEAbYSmmcMNZaAICDQCpgTF0LZa2FzhgtMfcsgBBCbQVEWCnBOGmE3DirjcMQYoIpxQ4Ara2zxkCrrEUYaWuklMY5ghFwQGmFEEQIGQSggwAi6wBG74fQlJZSSoSgs++RRgAAMB/MhVAnnzh36/Yt5JRQKVD1/HiEKAERmqviAz/88QevvUp93O4QP15u4vTua3cDhoDv19KsnlxhVAMYZcNi/OpLcYi1ZmWtWq0GIs7OWTnIDuQbwInDm4ex5U6Tc4+f3b19n2i2cHJptHMnOzqOAOrXWHo88NF8VoKIhUk0TSd1Ief5vNEJKpln811mzyiXB5Ff1dO4rX3iUQ+Xo0Mv4AzCd268WVSlF4ZE6Ac37qWDfPNhaOvZ6YfOtVtn9/cP5nPYXDnT7xEzO9q7cqVBfJqEo/HB8sme9HpnYSu7ezNfbO1cKceF0YCGSUNKczCoOmcf6p87R2nzzClY5gM5Oxi9+65SVQlazMg3Xn6100lu3R16CZuUauf+gLE6HRSPf/pE+HJ9c5fe3zZY69UmfKJPXi2KocIOO2CdhY5iiBEEDlBKIEXAqnymWs0WQFBrY4QyVjsIEXIAOK21MvXqyjJmGCGvvbiODYEaDcfDweFUj9KQIQVmYdNDmCilOcPtzvKNW4dimpNmkydoVpUh90+d22TU232wM30wDYE+2QgppgfD6VevTv7KZ08Rajh2VT4FjiHSdM7UdaZlThml1DNKlIWgfqCBnUxn46lDXO0bL/JAnZcZmCDjuJ2ZdH4wHHEOwzA2LuXRAgBhZSriMSHRvCa4vQxI22nCMUc+l6659cGfajSW9/YG167tNdqd+fjAWE0otFYpi3b2y7QoNrp8NiisQdk0bTRbnaUFJfN8POpFIOhFPqnGBwdrm1uGqE7vhJ1O14viaCz3bx1XElphhjTrLvP2QjQ/3hZgYVAM1jZW/iur6Kf/2CoNV7/zlZ0/eGHPWgUJNgARaDVynHEIgFUOEFjUynNMW3u0P3KIDt6+CRSDoHHwyjuPPtY5t9pulPn3v/qSGR9vPLIGVPfm3hDWdOfuUfNTj2Fw0G01gKVW1UroxaVVUMwQwa0AQ6CfONfd3pmNaxKFjf07IyMsa4VlpogtTqyefO3737p0Zqu53Ni+e3xhdWV+8w0xV7MK/NRf/vP5ePjtN29b7lZOto/eeTdG8oOPn9m7d/PCI2c219vTHLN2h2SDWtPh7SOfEGGEqo/7nVY6Pba5zkRxanPp4M6tanzPAvjib33/E8994kE1KCsFcQx07PvMakgaYew3LVTHo+mFh84V8xQxFWigNYujzvBogD2NGddaDI+nOG5Ti+s8hVwAx4G2N+59B7qBh6yX1Hk2UPUo7rQN0CEtpaadXi8d7n7r9/99uxMEWGmBzzzc2R/G06xqdcNiOvUoCCMcJt39vSOGMCRcaUt9P88y6qNmu09Y6GNwtDsXRW2tS/MZA/7s4DbEQNm6TA8JalK+hB3vtoLBdH8+ExDpbrcnjPMwefzRJ3ZuXEU9Lw7aRVphL65KU85v7d5700+KcjhzwszK3VZjxcp6eHQ8nuULKw0IC4ZhwHm7t9w3ZH+SO6CNlTrXOVDWgMJUCECPh2HUYWEi6toZ6fMgL+YiU/1uUuigrGA2F2VhpYSI0FPrreHxV4OA+4DXUlHKlACysuNMpWXleZFVOWe41QqhI8N52Vu80OtdxLbav/d6EGEQJsRvVHOJIIw9r57PcKlVWjtV0IgMBofA8yEMjZyZqghhxcIA0mYzit8PHcxSBIADUAqlldHYSYNz4XJZKGlMOlYK8MhHBFultFLWOAdtFCRCqUrWnHFnnAFWWy2sAs6JSloHjIMAIz/wnHEAGqNUZZSQNScewMg5WIkKERAFESGcMmedqUUFrAXAQYgZ9ZTS2ljgkDKKWuBxDqSCCFjrrDW+FzLiUYitA3UtAADOguk09X1fG4sALGWNoFF1EQchQcwBhxEqtZSZlFI6B6NGIoBwDiDCOMbW6DAMZF1xQtrtZJzOldCG4hZPiqzyvYBRDKzDBFCGDYAeoPq9RYIUe4yJUjJKEMa1EghijyAWBQxA7rMoaCqpCcIYQGONx5EQylpAEALGhFEoOVPGWGeV1MpZirAVAiLsEWqt45RKLbV0yFgCEGMEUQYhLIrC1NIZ4nEOLOCEvUcYzyYTgpCupfN851RRFh7mjKCFqHF4PK6yzBqDlKWcrgaNbtJkDMm6QtbW0lJCnYB702m/0W03QiNkWcpmI2hFjXmqCMGBHwLnRK0CwJSynNKFhe48zWupjFZh4DtAOWO6klmmPU6hA2Uqa2OqSilrZFVXzhmGGCcQwE7cHk6GpTTM8xDE2pqqLrCBBLjIDx2wJlfYIWetQ0hDYApRSMk9xhH1MM3TCbTGIAOhCjxKHAo8rqoaOAYchNBSSpyBRV4HFCOIMWNFXVsIk6TVCKLJfKT/CLfe2upe/fLVk93Wmc2VlyJMOD6+O1t9Zv3pE09//Wu/cXvnnV/4sz914413Ww3/7IXLD47uvHLj2kILcSZCV2DqVk+1Owvab5s/fOVBc4k//Gxn/yBbXO7z1q4s2Ti/h/18PjumxKPUeNZuvzmwdXzibFeb3XfevFbC2eUPPDI53h8WQ8CzKTxqnot8tPbOH/xufy1U80HdMF7ixpMDP2YGONYI5TxtBvGVN250eg8//NDqC9eudpYXXE6U1/a77Pe//o2TvSRPszMnOk8/+/DDHzn/6rs34oWWEnmlde4McXiu1Rf+ywudqMsdGaYph53pgVruLVzcGN8/TI+1xo41kGtzUjCgDZhMxs8/deGzf/Hv/KNf+0++bVGf37x6G/OV3nobolN1u+sjWZU3+OzeyUdOsRlKWvGFp5+4c+0NrNjGI48+9ZHT2//bEVUHizGUwpVTU07FH7zw1tMPXdxoBw5DC7Afd24+eH2eHyotTI2YZIEnl1eS4aQ6JpyefsILl48P7rXJbGGr1TnvDwej/cOhz89vbJwPpjtf+w//66Wk1jSuazMZph2v5ftksnenqDJKm7UY19UsPXLvDMaMsjKXCxsdHAdLK1t+tJgOZ5CQJEACxe2TT/kLj4U0Pnv50vbuDYPkfDAnnu8R1lzyWEzndZYfv3H7O2+sxs1T5y+ns24URKODe7v3xjdu31el09JBZ00pjravr567TKhBwHM4I9hRcGSthahtHTO2sFnR2jhxM6UWGFkJIUxIvYbPkJXPbI0QOnjxzbcWLp9aW+l/97vDrNIbl069/J/evL090l7EZra7HGmD3WTvC//yn5x+4onf/oe/2fN5VqA4IXfeHFMfP/XIE3euvTvpVxcutGMv6K2sHYp748n8EMCNxWYSmqPZvKoa76lgMpi2Y+YMmOdS6xo6WqQZCyKfB/PpNCRsf3jAQ14JpR00xo0GKQi6H336J+7vXRk9eA0FttkJkVMaOKdrn3pKiFpJTLAUxmjUanWUFNaC6WzsgOs0IgtQkHRm86wqS0aBVVJK6XMmpa2mGfEhQkBbxzgrRTXPDn26TElkIZBSAiCcLJMoFCpn2A9p0yADDfQ5SmepkaUfUilrBCmmlGEidD0bX5s0qauVkxZzzhgv6twiEiSBqE0hBWXMUaQh8eMuRkgoOJlrpxQlnrZlkc99nxullNK1FJR7RVHIsvaSxFk7zYecEebxWWlEpZBRCHvpvFRKwpJZgywweZFpXUJN8rlhDCJgjFQY0oXOYlYq43ASx8pW1McQQG2s10hiv/3lr3xeFoetyDve32Wc9NutRhAdH0zDuOEx1GlFB/vbgWNqPHKN5cO943a/jwmJO8naqSUg7Ev3d6dF/fGf/NEb3/oq73g88KyyaV0I7SjBha5Djotipk1NiQdUxaM4nY+1dLpSd4rdo9koSJaefu78codN7t8yRUaowVqcO714+bkLr754M1aOOHi0fW+0z3rBhnepsbtzf//g6xef/HCzRZEFGJlnLmzVeXlnWihMvIBPRnOPe4hio6vhvbeOv/+HP/7DP3TuqY9unln99Re+FCFApXn04dXxcOflN+xcwicfOd1qMGO0kSKMo4XlM8XRDR/6hGAEnKzGmillCSGhh5Iiz5QqVTZthhHzgvm0bnRPGZvMR/OkmdS5Q9Zgmad7s9nhcbPXCRod7dlsPnlPBYdH86XlrhBAW13VwEB8PModCAOrwsAlLaK1GY+m2gs0rIABKqsx8w4OD+tyxgKP88APWHdhUwnAfQlcXaYD6nlBiAyys/FMOZw0miurK7IqZZoX41ktDzpLp7D12/3VKhth7ntBRCyoldRVSrEjQZC028UorS0iEsDcDOdH/WYDFnXskyj2KqlDS0RVSyGJwru7ImLYa7N8mhtpwgTw2IcQWmt5FBiFLfbyKvvdr9zb3gkdsZA4AhlBFCLsceq0GR6PHj393Gc+9tG3X/niq8cPEILv9TQhiJwDxlpKCWWcIs9ZWdeV1gA4axxgHn75rZtQuuVGH/Hmk089dzx4UAmoLZyNM0jg9r2BrRwoy+WNhXhzmXNy553XGkk5ObgDkPEYZ1D1ur0wOrN46ULgh8YWsq7iwPpcPth/93f+05dkAR966KH+1nlVZkZabFHs+afWtxjUpayOJxOI3Nbiqm502s3uxubKO/eOdkcZ5vHh3iwmPk/4QtSetTVvkzQf741yOZ7mSj04rCOfnT7bHe/sIuqtnV1eOtmPlteCznIxkxg5mU6Jkh4wZ08sOBq9ffXd49m8Fwcx4UU2aTC6ut4SsuSZY41wcFzP5jVB8L2kmOf5xjiASBiQZuhP80IqTShBBmKMhTEQYoSdta6WkjKEIIEIQIwgsBC4xU4jL0utLHDWvRcGhABhYJ1WyhBMCHbSKKWtdZhCaqyhHoXaWgAQcAQQY40xGiMIELLOQoecA8ZZLWpgHXkvDYfg/03Vf3/dmqflndg3P/nZeb85nhyq6tSp1F3VoTonmigQAiVASHg8tjVe8tgjzSx7RpbtCZYssSQhDQKJoAYENNCBDnSoqq4cT6g6+c1hvzvvJ3+zf6iWvfgjPuu+13Vf1339/0phKUNawjdee4eXiUvg/q2KAuC5OGz6k0EW++H1F1/Aw2NaecurmxhqWyRL7SZC4PkXfoAcN1zvYOLGrXk9O2EKxK52XF1NR7O09OvxdJZqIZXMvCh4/Mqj1156ww1qg/39iFkH2eneVt0lgNaLNBEz02jOBaEn80FRDSgJgQCwsq1abXI4nA2ScTp8993brteSojg5GNcbjU4zjJsLB0mheOoxR0nZ6dbTKjdF3ly9sLa6whErksnWG28gJ3CieHHl9PRwe7J1LPMJBXo6HM7GR41Wu9ieaJ0eH04Y1rgZnLo6v+a1b73+YDYaNuveqJ90H7ritOa9wH/n1bcbcdWdd8QRYkH7zEeffe4//kev1Ecno0Zr7qC/Pd+KT50ODvdyzlF5NHSYa7npzRDS0IXlp552F1fdL/1gUihiEbTGKGUhhA4jmBHiBZvzly6vfejbz/1xKfrAGmMkgBYhhCACWhOGpVTz7Xo+SR3XmR2fjPePBoO+cXwA8cr6nKNgXsl2u6YVwBAbhcZZIoGIIypkJiu/0V1+5NLj43vvGV7UGh7DjuJcurk4kgnFuY7+5KXeZ642FurAJVBBibAVQkmADPKRBRUXmFCvVk+rKpHaOoEXoiwtppMDvxP5nlVKhA3cqnVHA1MM7WRcMIpnhzudNRy2XAZcL/LyGQYgCuLOdJaFzcDk/ZJrzQuHeFaVjgdbKw2cDD0VYKWdAHNpAMIIMQRVkso0PVlaW2m3lweTqaDD0AH1Njn78Mbh1rDdcpgbDmYyL4EhJDB+Z3OJ45PuJFoPo2Jv0gzd82eWKHIHt7eEHTY6tTGc/SWp6L2bd7sN96mn2t1W9O9/711psOs6UlklJIIYGAAgqM03RF6CXKnMxnENOq7gUlelpW5k3cceunLr3ht/8qXfe/JU9xNPPrp8cfGLn2j//X/4z50amfTH1EQXHn8MgLLI5awYlzk+OSyJTpAH64vu9J7I07S70DzqZUkhfd+vL/i1zoLb9j3k9Wbpo5cf4xruHM2g485GGQB8bm2xmvKoW2udaf7olXPJZJiJCgM13tq/cX/3/PrC8GRMGMFOt5ebGBNRWt+rTZOJzCriIqcmIa9qcUBw4dfhdC8n3HLErj76sf1R5M2vNTZXgjMoGY+VgI5Lm0tr46MTCHC91uwPT/J85nqtaL7FDRjxkjVrHkmDqMazUmlNQaXTfZmPmXF8jzaX4nS8hUFhVAGx9iLdml+HNnSj2cc/5b3wwkE/55vzndHOg7XFU3ErgJQBbSKM5CxZOruZEIOtBrz0IukzYyEEiFoEmu0OtabKxow4xBb5NGGusBbMBiJNTSPslDl3XD7LszTPvUYXqho0Js3yJB+3/LoSXFfWKlLmnDZwvT03mB03mwv12tqwAFGjuXX9NeZjC/CgP2k0a0JUg9FWHLmdTpNrsrs/btfp4uKFRlSXmrjQzAXMjfGdW28J7DIXECfEpCZ41T1zpVFfzDPlsaq3tSV4KrTOEuF4QezVj0ZJPhYuCsfjqt1qL89F196bOD4w0jJCgrCe2wk0QjAUMQiQALz0HM+aykg+14xjuj0b9rv1xbpvmO9kqaAUNmqdLBN+HLrjMnCCxy89JKtt4rtQ6QjhBg3ipkFFf60bjCczLYnIf9j3Ibj0PMd13LKslDF+FEIi9g8G9Xbo+Qxy3Yp8zkUQeJJTJbTjeZwLAJBVGgFiNRRCFXlmIPB8JrlgjDBKrIEQIMcSYSSFqLKAYgxpYADGxDVSIggpIYxSIUuIgJYWAmgtIMyhmFqlsYUYExqGxvjWasE5BJgiDClyGAoDV1RGFqKohDba8yjngmDMMIEUWWWKqqqHIXGokrLS1gCodQERwADWwk6eVxgzAHVZcIYUc12ADMGU+E5ZVsPJFBHouWiWF9aCwKXYWsMFQBABay1glCJIkrTkRiFkDAKe41WisMBGDptk0meUeA6F2ELjYYQgpAQDCChELnVmJkOIYIyUBgRUvkeBhVLbHAChVOA7UktrrM8cKbWoCoJRFIdSSIQppVAJZSwCCHGIGCEAGuwgYm1VFgCjZuQLoZRBxgCMUORHXACrbZpnQehNkmm70SnzQlnY7jbnIy/P85JLh8EgCJTRi926PrFpkswIrjUDz3Oms3w0nXrEtAI3oNgapC304qgYz0oD9kaTmLlGc9dhVV66tZgSCoWxQGOKoAGVNsBlVgpeich3AYC5AppbbcQxH2ptMCFGWUqgxshxPAQgMooiRxvlIYQBsMRAj0ltBFfIYqus1jY3FYCMQsuVogQDABGGyCo/cIXVEEKXkth3p7MCAEMc13GJULnreUlZaaMLnvq+J9QPG9CuvXV/dW7l2ou3W/PB44+czvJyqX3u1MXPrJ26+M3fvdPpLL38wo2jo0kUhZTvvfn215949sL8YiSz/md+dOHgwd2dvX1ZzfL98ty6V1FzdG0viMiFtRUkDq698Jbf8fsnh8NiMtduYADyMmEmThLhw2Tv8KBdjzsLdUJE5OKD+yckZPMrwe27ZXFwY6FT+8SnLt96747kVrGiu+YrC6ajIp+qKikHhwwnHZ+TO6/8QKX96ST/3/xv/+ev/Ic73/jz31xerdeYM9g6efRDl37sb/zI9597udYNrn7q0tH9/Ypn2f6AOgBJdDSa9Qbi8x9/2mGoGc1nuePWo8/81Mdf+taLg0lWVWb/IIEEEYMqLTOj/v7/+R/rjdPMEXUmnv78R+/0EkwJyqdJb0bD+c5mpzW/8vJzfxrzKu3ndh3UanWtEl6U2u7HAfq5n/no//Uf/JOLF1tJXxzPhF+rd1tRcbTfXjmXCsMnBRR4nO4+8dhD48Tb3b4FJmWmpz2FchSc+thPvnf/fpwe373+A9RSWZoNiF46fQm5l5/94o+h9O6//S9/bHNxvuVgzrygHgRhQxdSYVkVjFAvbOKjw9tnz58pBTp8b6e1slbvzEHi+t255Sc/NDwaVLNxe60b1tx8F25cenzl/CmMoNdwxR2BCVpaaGJMoNSOT4HrnBwdTIe9ZjveHZ3sb21N+2lVKAB2fvTnnvr677+AOEAKGG2AAa98//mnvviILA6JDwGYKoMNgtDtGOgAqwii3/3TV+7s21v9A8/FjUagDUuScn15/hBOv/+DN3/sxzavfuByJVTv5oGPa5cuN0A5eO6160u4cerM5pSkxBZwUnbn4zdfORxNXn+4XR9zmRqNNB4dai3Nl179fjMAtY6/++ruueXwMz+z+Mt/6zNvvrXzvb94S0pv4dFl5NeWFpcBuAkAwFDNz3d3k8FgkLkO9TGyAtHQQsIhQJS6WTqp15oGwwABXeSn1tppVmbj18a9d6MaK4tMKuJaU5U58okCGmAMJCDIJxjOBBfChIgm48QhtCizrMyM1lpiwrywFhMkeW6jyF1bae/spY1WbTobQQQwBkpIZGnkrOgKVXYKkIqDRVNNIFCl5ohiYknOp1BbaUNMNaCAYtfxGRZAcF5UlfUAYx5l9d29241a1xVmNstcjBkkQggrATBGigpggYHMeGGtDFtxmUmCiOYCO0BrwYhDKAVGIquMtXHYTGZ6lgKLaLMdSzlAAHJpBKCuixxEqjLHTENIkIUAgCIXBDIXe56HA6+9d/QgimPBNXRIbjSkRJQCa+I5YT3GfkCzouwPerDmQ1H6foSQns0qXJrDw931s5cRYslsoqCWk9wagAA+eHDQ7Z5zAg9BfP/g+PTp9fEkaTcbd45GlnjvvPDnc3VPYoC18SjB1JlME0aIKUspsS2NLLjf9KMgNtaunD67de+2F0UF4J/4zM+XOa9SPtq9f/fu3Y3NhWaj6dc0RSVILJJsYW6p2ek+fPn0Gy/+gNTI5pMf2n6zOe7vbV9/7fzD55gfj07Gq+3a5z/y8NdeunH7cAwsiEK3LCVQhlFq8tyN8c371/77/+EfyCL9hZVf+sxjG//df/nfPfXxy34LrJx+fGd3drL/gOma0wzq8w1ZAYeqEhRFOfVrDS7gNC/a3nzo0JOTiZCJ0O7yfGer94YqJosLXYoYooHUPJ0JiE2epTvHdxc6LYRJVA+rJCuLIptNfa/x/iwIg/pkLMvCLM7VFEBZVjWbNW2B64O4FjkOgUAGoYeJFwasykRlQDYtJuOMa00rEMQUIhABIHSKoQVSQSA97O0dTqZFWQk3rtfcOAbWiVxPVDoMgih0sFWyt1uZzFhL4wj6DW0xpS7GYRh4OqhxKWvdJvFZOplxLRtdz/dsVGskw342VbYyqkq9sFUaVhpISguFxrDQlq/P1as8LfLC8zHEQCtoIDBqdrgz/d7zE0gDA4TWihJmEeZcIYDCwCPQvfL4k24TIJdjhwCtIYLWAgSRRQBYoK1BxoZuWJQcEwKsef9SNbcYzc3NM9KMveaFSxfffuOemBSPfvzKmYfO3H/9uoG0uzo/3ToJ/bC72Dr72MNAlMEkxWA0v7TcHx7MigwB57G1T/u1JQxIxfOIerGv3RB/+zsvvfX6q+3a5sojK1KSROXL5xaaDJVJvrC87EOx/e7bleGQxmsbT6ysLk33bt++cf/d67dISDpLrXSge8eHS5vtdqu5OHfKo2xvd7uxdP71G9vvvPrKXDOUIX7nzo0nP/TLRpSzpPJbTZNkgFuYlVAZ7BNA8cOXH7r23o1xxi2uus0QQFAPENKzjRVfKp4VJYaGVvmiE0OHR751goBF0WCYcGWUVV5IVrsxBYhonkGYKcUc5DtsWohJlrkOcxjmAhJClZUEEWCA1gZBpBRAljAKoDXGAkiQtRZZo5QhCGFktVHvS0jWAs6lVhZjYw3AlEguAISEYGWAhcAoAyzAGAAIgLHQGG0MBBhihKwFP8yfAaUlLytG3bhenyVT5obQWCHE7u2p5/ir5zabyzU7zPLxaGH+zP79d62yxTDTCi+31sfpOB/zSTpDlWN5Mk1nntdwKZR5Frme5tX8+iIIqIakmBWv72zhesOFQdbrU8YF4vNnTussG+w+aHQaVhqRppRyhBSG0HFYOcsQdVY3V/cP7p05tWBvq+nR2Ld8cXNJw0haSxg76c/am2c6ze7ua88V5VRVocwKxOn8ZZe1Y1xrkTkm7r6djo/GvftI9AkvJoMeg8iB0DYbfqd++tLHDr/1EhF5NqrcehRMgaBaR17gLO7fvuNB9+wjV3INy+H46PW35GDfdogAnh82VInf+osXTq09Mjw6OLUw3zuePHTmiQcPbnueQ6QZn2CxD2f9ww+cf1hXU1r3gRjHDbC6Of8Xbwy2E40gQgABhFyfMIc4GP/c3/9vPvOBnz55b/hn3/2SBtYioKBFEABrLQAQWISs0UBnptaspUl6eOOaD11Z4tNnrmpfxLHiR1meC340CmqthXZrNisYRZ1m7bhXjvrJ1Y9f8dfOZihor51565XvNJpOay4cH/NWgIONGrLqcIJevDObjY9/8ROr84tGE6CBgphY5AvtUWisVYg4ZZVUxh3NNOC+BSCOkrm1LmOgnPBGXHOpDEKv12fKIkapyG0cdwjxoN8gmM6yKcBRwNplouvzCwoKqFNeZK1mnM1GVTHbv3XDEanm1nLlEOJ6To6qVGjXp14jEEoTJ+ysrm/fvFNWacziuaV272B73EsacaPRrJcWhYD6Gpel4hhbjBvLNXNrFwi0dq7h+yAMSL0ZMTydTqbFyeje/eIvSUWWWwYjMc0X2uRXfv6Dv/5Hr6uiYg6jgW+00gBAgNNJZgRXXvTjP/dffePL/zLwPaJcjXWRZFtvf+Nfbj937tHFU3PoE5+4NNja+5Ovv7pcr7uhm5XSi6KHPvgUBNtSGYv0+UdPHR/K3Xt7sSeqtJyl+nAHZtx9+rHHJ+mbqcpgAJ96+sr2/rAZ+qgCe3tHp9Yuv/bKq1cfP8eIyadZq9VxPH/Fb3DOy6yyFW8A5AEadNdvv/SAO6LmE4cxdZBGXb/RadnKIjeGwkKIuLXDpEKDQhQQi9Rx7OHdcaN7qt1a++3femF+9YoYzwxNZ/mWxxjS+dJCLR0Pi9GhA8txb+D6F+t+SFyf4rAY55p6VUWIlcxCKTXxPTkZZrMZrwUb59aGx4fAJOU0xygPfRI2Fg52D1wWadUwNiQe6YbM9bcPTlKvvtFgXcK8vaPJ0voKE+LixeX33tgTeeqEQTY9UZi8u33fB6TenKf+XJnmZTI2PLdWY4i0FsiWtTroVUU9DnzsVso4DmwvxsUe77aXwtiFwNM8KUXaaoe+zxiJRMEZtiS2nmvdxdaph5befLW/unxOKXa4eziZ7Myv16bDrBb7nfnOsCeimntqfenOzTuG2I9+4mfv7xx7cxfT6TFEjLgGkmpnuO3NdU9tnNq7exfJYm1xod28IKAn8iEWsihnAEhKHeY2RnQ2TpMY1SZJdv1ur9NYyHJ7+fEzk+JdSDUARFlMMS1KaSzW2lgLAtdFGGGsy6xQkGpZEYOG44Sg1s5kqKyVudIS82nRabWow3YGww8+8+FX37j21MPP3HrnHQwDAQCMw73huyvNeDTJo0bN9+jBoEfED0cCcx2lNbWGCwkBVkJpwRfataDmYkpnVSq5poQqYZRU1CWEImsoQoBRHESREKrinLqMuNRohSgFmACAMTLAaC1lWeae79drsRaShSyVKuWCUEw9xpUkQolSaKvCWsyIU1Y5AJBQ6HheWVYAoLKUSmuXMWghRsh1aMG553pSGS4UNKaUhed7buRABFzqGWutMQhhzwm4NBADB5M0TWtRaK0Ko9AoM+OFMAXICgCx57quw9IyC/0gyXKtINAqL8qoHnu+k2UcYxjEQVXwHxqsLSi5tBoKwxFQLsEWE6ElV5xQRCCS2nKphbIUIUwwRLjk0loAjXEoldoUlQCQagukVLUoxNBkaeUxCox0MPQch2CEEWWU5VlOAHQ8zwIArXYYoQ70XKfKZVVxP3RhLczLyvGcKPDKLAuxr60mzCmxSItKWRP4Lpeq5CWAwHMc5nssDhlAEAIXEwbQdJJRAn3Hp0iXFccQdbuNfpqVUqZaONbJk0xJhSGJ6x6CgFcycJgfMG2F4+G8qAIQYoiRx4qKx7VYWK20wQwFjguAdgkLArozGlGMXdeB0DJEi7xkFkFCEiPjWmNcFEDbokj8wIMWEUwCAiEy0piFVlMonZZZkqXd5lwGKyGzqqowRMZo32cBxYwjTBlApsj5rNIuo9Zo1/VmGTdSQWAhMkoLKZTvUm0wJtijrqwKwpgQPzyiPfzopVe+ff1n/tpPf+lf/OpP/I0PZ2X+4rd2g/WzL7zwg/HWG5/4mWf+4I+/+1N/9fN1nB++8/0bL3337/wPvwJZ+3Dn7q0e57i1+cw5h85oXGhPH+2OenujcSb+9CtbCxujsAWEnUIcE0q3923gsctXHrv9nRt+gwU+RZ15GDrjCX3927mtoKHnwmbjjW++NxmptbUYkXKvV3qNs/W5dq+/RdkGVTpHW4NR79TpJ7Z2h81FlsjeT/7003/wO19NcPLSd35vulNunu32+/0zD59bvbj5uR/94FT0HDRbbrdWn3zsz367h8amqlJDYouRX4v6J9kff++5q6uXPnL+k1/+k++tdFwAxHKnDiZZJjXpBIiSrOJYkoK267WF/+c/+UfTnVtl+YGt907ygCJrZpN8OBoDA8u9wxipROvhvYNzc6t3929ce+8H893O+HhUaY+2uv65hUd/5PRgR8C4VsPOg3e2Tl1cl5p/79X3zly4sLTA7KRoNpsVq3IzjX0IlHr8mZ+4f/8OJe7Re28c3n075cDaopfmeS9TpLX01CONuXNf+9I/dfsvXDob4EDNegWEljFaqzfztMCxaay0jWBZUfrBsrUB13lrfdGNmayE314MGktCFrIcijw33Jnunohp4QVOmc/KIhlNBu3F2klvghFO0tJQK0ttgIoW/fH48MpjD/VAhP3VC5dWCp5+6df/6dOf/wVdpYFLYh+nEsSdIDeQAUp9IMAhAlOIFi3QAJRWAQMIMp1f/Y23OJoDoBAWAIuwF/8f/oufSMubLx2dfPEXPjzdfzC3cuHwcLTYEIev363y6bW7wYUrz4Ld4a/8yi/8z7/6T5JBogpwf2cWdMOVx5o//zc++BdfvfOHf/4OqOxi4CKkP/O3fuSFr3/bMMYRAITs3DzMO63Hnv1Ya+Opcv/awfExbkR1v/M+BZ7PBiej2SSdyvyZRz403OtXkqdpWQlQcQuhDfzI8dzhsO8yBiqYTlKI6LvXXw1qgZTKKlTOJAgxc3ylVM5LlxEAIUIIY0gpMZhYjFHgONjW6pHglVaSOABRX2NIMNVSU586rjNLjgkm2hikrRIaWtSqd04GI+ISJQTBFAAJgC6lKHIRxp7WZRhE2hpCrBAVIkBbUnDgYxdh5XttY6HkykCAoTNJuEcYo4HVqMizsBYbZQjErksrXlppodWyKKdK+FHd8koqaSqLgZumnApLoDEIeJE/nI211H49JEGYGakBrYyZpBPo1etOwPNpq93qDfYYCoA2VgmHAYJsc3nhpVdfuniGxq0aBiCADmQu0EZqAxgllCbjIQRglM1oyM6cOZ3sJ8Vs3JqLAp9mI4MZZkFQctVsNXmZDmcZ1sANfKXs+UevnOTVI+cuT3qDsLu4dTTe6HZQ2BYYeZDFcasop47jiqLMhWrUIh8BigyHlJGokIa45M2X35zk5Vyr3trZrvvh4lznZGoO3r4vpUqTgUf1pcevpEmBvXpU1/2dEcz4+lLX9+jW/h1hk4WNdsXAbDJaPnt6cT0ySJYzIVKRlqrRCiyfXTm9dPbc6nMv32cIOS6dTkrWptiauB6+fG3vd//k5S9+6hHP1Tt793/ub39u73g76anWOXogueT9T3/hJ154/iXHhJCQ7e1twiuLYDJJkNtpLC7mHHKhNcCeU4dSHh3etzTAbr10aqkZqCzTxlCnGdXrXAlPEcziYW8LQxw2GWFY8Lyz2HyfAohQ5DFldaF4d30NnQxVOaNWLq02SwmzqlRCQOI6XquSlSaedd3+ybbGQFiGmZ9KWIymbt2bn6tNhiPJQSGr+w/2K4W5JWlB/E6delGRFUVVMMyNNghbSAEmMEumiLBScjHK/SACCEESQqkpCzoNun/nDiW+R4mDYLsRUIuqwhgIpLTWaADl3EIn4Ow/fPslVeWLMVGGhzW5erpcqzth5C41jYsV8xH07GQ0fbAjkyrWGIgyJxhKpbAB0CKAsRf4boP8p9/5x3/2O3nk1t7PYgMLAIDavG+lBgQjCEReTay1mBJZllYJTODZC0tzcQPwRlGxdnf+2WfnrBjRhlNM5Xxzbu/46NEnzk8Xa0HUjBbaVcENV412rGQ1SaxXJ6XKIq8xm+ST2VZYC+YWa+lgz6hsa/vOD16+tbnx8On1M8CKvbvbLiDjwWAqZ1cvnp4m9yT2AVLEZY995CfH/XL/sJjupf3jSVADVy9vnhzOjtLMx3Zrf9+ZZu3OUs0xayt1Dc1jF9c+/+T6l//wjzaW1gxafPutd3Q52Nw8Z2CAqJ3OUogJBERVlmDUXmg3h61M9jXi584sbn3/VS2xx8R8kyiZQR1QRkIvqKStRUEUBrPKAIjmGrUsz7urNYzE+c06L+W5s9HewazXx0lVudh2Ke02Y4TNZFJhY7nQRgOCESYIEsQcxxgdh54BGkHMhdLQ8lJYYAllWhutpQGQMAYt1EpBA0MHMaAMgBYq5ENujAao0hBYAAi2xhhgjbHWWIwwsgAYACHS1uD/LBWNxmk+m3lRY1ZpTFAQuz7EapKSmIR+EPpg9XwnOYqjVrSzc8dtNlYvXE0yaSBEAbvz7W+SWuQvrwy1dlmwN9nlGObH9w22y+sLjVrj4N3thfNnW4ttkhWCMeP7Yiohdv1ukA2mqURRbU3BUTlKpLSE4fEoNRoHDT8gDARhKszBzr7gutamQScIO/WIWABx5NNcqjTJNy89c7B7f2980wmYkCTP+NqZzV4/Pdy9v+J6qsoO710/3fRmRlW6MFxMRyNL2Nrpqwd7O8cA0zi882BIsqocj0Fn/dJHP7L/3stBHXKXrF9ZvfCBpcPtLdryBZUeKHvD42n/aH3lCd63K6tr3/zKl5ut9jGeKCd8+kNP3v3K17ke0dmQVd6ZVs3m4s6tycbDQV+8unKWayqrSQ5s88vfeTDmDsKMIFhVwvcJ5iZy6Re/+CPrC+deuH7r3/yLf+q3aDpDWhqKqIUGWKuVRsBaDSwlcXdh0k9YRD7w8WeO7hzECw4HvOSFw8kkLaZ5gkpHlMS1VtvCJ87CypLxjBtlWQnLQVKmR3NEQhBKCSXHM064Js0meYSCxok4zM2dcfUXt6q/sdH1YJ4rrWTGvLZVjiwrJWmReaIklTMHYldlWSckjrgTs4nn4woiyCXVXJeiU3dkqvv9ygDQnpv32iuKzmstFTTEa42nKVF9yTBXAGBKkHO0d3hw961kONBVEQSUV6VU0AAgoZYWWmsAsLIwxCWhR0cHD1CerHW7zdXNpMrrjVUfaSn0/v7YqUUYUsQzkFbAdbMky4ppMp1C15w5PTfORpOCp71xMZpxAUUu7m4f/iWp6NTm4saZ8/v3bkEhN8/Qv/XXH/vDP35z0FeuS7QCWmnCCEZE26rIeteufau9MJcUs1pzPh1Na2FytiUXOvlPf+6KlM2vfu8HJ7PseJAc7p1gxOIgOOwN0pk/P9edTXWVov6+LZNESQg9vLk+d+dwX7ou0M72nfuOVUG76zposnPUssCtZFYkVx5anwrz0Y9++P7+HQZ5p1GDEI2HFUFGAVWVxsHBYn3+2ts3Gm336Wcec1x+89qNc+c7znyrN0yDsK8EnKTSYS4AraKcrp/eZLBsrS2Md3fefOlud2nJ6/SKPDtz4Ymj493DB/db3bYaQAxhkZVl12/4bHR85EWdaP4sdjuyymoR2znang/mxhU5/8SnTg7frcpM8AJJELdhP3eVE+VClKofEMgsIYHrBPGsKIL2RitcygqNkMySAmL5xKOt4tXJvZ17803qN+Glqx/o777rNYBDQFGOZ8kA2cgqG8eN6eRAgojChfFINKO4KA+YR6gT+H6gSOComUzTmhtLhJB1iqzAjEzTClHkgoAabHAhQBE33UIYYbVLHGDLcpY7Xq3S0louJLXFOJ3efPjqJ5//zjeydHJ8LAiAoTHJ4IQAoDjZvj+Mam3Pt5OTuw03KPNB6CJK/bjpV8P7rYX2ZJDs9o1Xv3hqaQ3YSms9m/Wpg0Q2SbOJUBxhm0wzoe3c4hkWrRy88eUoiGZJKQx88vGrRfYSQtZxKSKEC04YwYx6xoWIegwrqwDsdLsXJrMjGPQZk6lMIDUQgrwQo6QIIgcUxXhyFNSXjODDcY+GdtjbxgiJCs2gMvU46jaTYsqwhUoVXHm+G9Z/eEMDCCLqcGtY6CGAtTGNeiSULbLC8aHjMsYYIkQUlVbAGgAYYg4tktwNmevhosj8gCGEGYZlaevtprEgy5NWs8GFyDPuAh9ZpLTRABCELRAIgzAgc3UnnRjFOWOYUIdgCLFihAAAKIJlWVhrPN9FyKEcUIdBgLW0BFOHgiLP4jBSxHqRgz3EucrSInR9WeTUpaFHrAEQYkQMxCTPSgtMlpXWWGMz36XG2nq9CbUui6oCgiuJCAOQaaNzni8tdHlfzfKslFhpawEoiwpBwhxaZoXDXM4lxRRh6HoOMJZSWlQCUOT43slkMpyM637dZ8RhVCthlHHdQBmJEcaQGigNUJ5LIARGAyA5ZNTzHGCN57pCSCErbS200BjhM2aBARhTRLiS1ligQJbkRmmHUYSAtfr9i58pKhciyCBCJMsraoBPkDSgyCWXPApdraTH0Gg64QrmUgaO04hdSongFkIynRXSlhDANCvDMDq1PDedFv2kGBVlKwzCZm1v+6jVqDXqgRKcIAq1MUpRiWIncCAi1loAGkGAEWTGYM+RlTYKAASk1tZAnpUCEC90CIYOdeoWlLxs1hsoyyTQLkYGWop9gpixeeySs/MLGedcSFEUFlOESLvRKiqZpznUsBbUqqosOIfIOJ7nu3Q0SgnEVgEJrAuh0ZBXkiJUCOUz4rjMZY6oSoKRMgJBkBUZw++btn+4Gd1/935Z5FuH/Q9+7uMcuW9+/zWCW5unFqrterW4cvvWYc2bP945+N6rf5YXY+N4W9ePNx9vRksruLOoGNh6/bXj9/YP04lxMU+ELNN42cfK40fqdD08uTsmpjUZo8NpdWoluPvyPVEYZZQAiRN4NJgDFTvezi2P793rA3hIMOg0/IfPnbKuBS6waZhPmkThoucjVKishEKfWXhoS3/Vq/ODw3f/8D8doDZ66Ozyc199/lw89//4h/9oq6ehBVvj6te/9oct/eDqqYdH/eLU+Sfjzv18/MKFR1b6o6LIbZoUFBtE8SvvXE/NH3z+kz95svV2MdgmBKyuL2xv943Wo5RbCGq1SATh3/6bPztNZ099/urd29tlMv3oX/24qcbdxQ1BvN3+icxNbyI2Vj9Yn4tcPQBh8Xt/3F89dXll7YksN1u7N+aLVjaTeUVQbk8GvX/03/zff+vXf//Rp87sH93Y2elHF1ayaWkcPL55pNMcG9qJTxUZNjayKit331qL4XBagtDr9fWpR54O6+vlbHLnzX+mdq+fWQyUUulgHNWbzAlIGGVTXhZlVHMo8Q5GtttYO/XUB7Z33t64tNbfu3u0txP59cWzT959+7ozS5ExhCfZri6n07C73mg3HYx2bm8P9o7rdfbUM5e//p23nn76o3fu3SsGx3UKjDV3XnuApdN6+Pz8kkdGJ3/27W989qd+9tbXvlNOpaiyLK2U4ypNXn9tMDreay4MrVE8nXi1rrIVggQ4hNnRP/jbvwQRlaKiyAqutdIUzm688+2nPhk/1bRF71atXqtKkaf5xJS4BoNW68Gr1dbdgyWQzcoUirispkvLnVeu7SyfqqWovHvSe/KLV4OH5ltEv/r17188tfrjn9k8Xf/EWzd37+/sh6HpduvAjfYH04JQNrfkaBbU4sHJ5H0K0oJT14lq3Vv37+weHmNFuRKt1gJEUqZTj8VC69CPnbhkAAArRCGxj4JGADBkxgLGYi+opKCUciEwJph4BhqppTGaQgYBNLwK/FCLXGuIKQ3rnkxLgMk0nTGKDACT0UxXlc8ABCoIHEBomeS8qpTWmILhrD9Xa7mOU1RTaSwAJPYiiKiWEFgXYcWrDFtJ3cChTl7yyhoKEcKO1jZsNLK073teUZRSagMhxcCLHQ0Kq7CsFHY9o1WZc5diRjGzCPISQ4uDUNvKGsSY1UoCx5FSQGERxCxgUlgAZkkuQxpwYZq1WlGKMpeCg3FaheEcUk4yG2EM3dDnFZ8l2aWLj29vH50/v1zmJcNYVwZSk+Y5Y4E2wPfrg5NeGAdZKtpzQZLu+HV/PJtxyYjrupQgwXsP7qx+5GPZdBh4wdxid3T4QIpMKV6rz926v9P1XGrVhUc2jm9uHR/u1rX9kU9cPn1uY+fu3aXuXD6ZEYyMlWNT7Z6M3rh+32CSaZTNqtXV+cJApvyINVjgvP3aO1GtVU57tWbxxBNn7t++L5SjJaFOo06UUQu+h3zP3d/uPX7lSq/Xf/PFG+eurOdZfu9ocvqRdVOYcjqzgVl/+Pysf2ykWu2u3t55sNBqPvvxD9964+2D6fhkmk+OR0E7vPj0h9/bu/vXwmepOM559oMXf1Bfan/0r/z12cH+6man3bj88gvP14LFbvfU0ckhkkArax1Vjif1+WVAG5EbV1UZIMf3XJRN4oUoQQZIvPNgu1WbW1o+Ox7tGkT6h3vYAtcSakAUNyqeFkr5DC+tr54cD96ngCsLpSCBU2st52U1mymIWAhdaiIBgRPGuhBSm1LAqoII4vF4ZhFzqWMh9J1IGamkHvT2Npunx9kkT4E0ajLNc0UMc6xHsasaLS897BPCqRce9xIrS5JncbPueD5lUgMt07HvNRzfRwGKtPugv6srB1s6HWZWcxgHoHDydELCRmd+o0gmxOQQQyjS0WEWWyWpVyk6HFSor24/ODJacGXXO2RtPuo0UdQlc4uLFaKz/L7iBSMYYKSUBsA6DtMG5WlKHRJaJDjJVCmlQgggzLQBwL5va7HGWi2VwQhoAJWFCHmxTwN6fJjKo+qxy6urn38KEs+BLh9bx0W8NJKRS1dONxvYY93hIDMZxxQjBWTGKw4mo7IwZbNeDwL3wYPvalNrL6wE3rw1fPfw3q1b1z7/kS9yAx7s3I+R065FgEIgdOwERaYIssJUhZJxXBNVJdWYuX69GWlaTkvZOzSmUC7gn/3cU3/83Vcx9m/duD5rNc+e2cwrk0/zMYePXv4AcsKSq+vXHpx96JwXzkXtoOyXrqXWaKTF4GTIPBcxunjm/O1727LKJ/JgLmazNHPjAGG71m2OZ3KWmbIA00wyjC3Cw2lppzzy/VPrG8sNgGyJIHMJbNTqeWKHY55JWXJRC/HafOw6LjkT37q/v3M4hggRSoE2WhqNtAVmNp0BCF3XtcZYoBFUQGqEgNQKI0gQsUIQDJ3AEghWaqYTyXY7NFpBBBOuDsfiYGTHOeGSakgQhMAaC62FAEKIIYIEaw1dl75PQbtVa9V8gbyT4UhzNTmZEIdBgGrtuqN5tr83vVZOp+X82iY7c9q4tWlZAjcGnte69OiHHHbj5deOxtsXzq6Oe+OPf+jTB6NDoJ2ZAje2RvMN0ET+yYPe5HCMAQy6zXE2cSxp1SIIJeZm5+VXn/zUFxfPP5Mf38pHJ2GzJhKe9dPAg/3pdqe7Np1lYlZwIbamorW2LgHNtTGJnCXm3DMfbzbnhynxO7RJR7o4tkCp/rTipefC4e5dmGbIb7bb6O7Nd/1a8/xDm+NJWjtzuszxTCHYcDcXTjmq4Fu3CnfYfWQtU9H1N57X5dHG+Ucf7O31946Xzq8e9u6fWqgvt2IPTetRtnH+8Vt3i4aRL269tbi+vvHoR7/1J39+6fL50c7dzfpkUqVP/cozz3/3hbml+TxN82p6OBqUaGIECNuOv7D+b3/v+Otvk0JHlBAEUKPVDGsepa7Uwe4BvPanX3nh1WttWGosK8mtVhAADKGxFgALEYQEMGg7Td/zpTX2+GSQZKMgqJ8c9nziHdxJLNYXH96c9IXIVZmdzC028spMucys6Ky6vlPl6fZ8ozk4USisN7vh6OS4HnkJMK3Q5SO5EoKNTrx/ZJ+/W9WD0Y9+aMHFHFGNHUmB4Q5OecB5fX8wmDuzrLMkjt35xVYHnQz77/FEzy+c5uMc66JMCopItxUC4ARt38T1RDJUgai5hLy5IF4uS5GWVf9opxRSs4hPR3Z6hHRGQBWEACDuMACtM6sUrypjLIXQJRBgubASrXeazdDV8/VJf+pj7ESunRVA6mqWGMUdzyJoqiyphgmJurjQ017W6iw2XScbDdbOdksYV5BqH3q1sNe7BRz3L0lFZamHo+n8ynxA8eHu/kaH/s3Pnv/db2wNRpISiBFCSGlpKEFUg+HhDc+rGa7LUTLZ3/mbf6X7X//fnqW+Vw3Bi39+eGevZIzOxiVW5dJG69S5c4Ov/aCxsEGdvEYj1wuTyUnU8M61m8QU45MHjbarddaK5wDgjU64uNlyAIFcFYXujXOEqFQRYXh/7367Tj3muyzA2IESUgh81wmoA6Udzyar68uj0Y7HpNLV+pn1Kbe9rZM6glvvbrfn5+NGzIJwMhVtr77oeGUl7r29rwGgYefa7YHamSBj/PowS2eWKOiIIpc8yxrzcxxAjnHYbEEnIkG4P+wvB3CWpdDAvExq7fNHkx4lUuPCaVSdeZblZmVxAUlik3eBPGyvLUAXDoeH2lpLtMqG42zmNc4HLLAuI54PUNBtTcVE5DNw5/oJ9cOLZ2Krhsaq+aUIABE3IhTZLB07CMT1lURQAkw1S3tHe2tn1pXgmlcAMwOsKEsFLDTIJ9hvhIT6kk9810MAJsnEr3eY6yGirIBC5aJypUFRbUEbM50eAc1DgzbWF/snW6889+9mJ0euS5QAc4sdYigjIfZ85ncH+wfMkdl0WOUHS6c2Ns6cmg56WvGToxRYVfM9SDCAJvC8osglKJOyqLhwjKt07kWeyY3gsihKqyzPyrw68rFCDFWZqdebrp+m4wlGDi80AkZrjgwiGDoeg4gSrGejURyeMnSBeggqYc3YrzXynFdZgaFXj2LgECErBYFDcTtw8mwaRm6t7snURzTGsgBjXhyNF+bc7upyMkmb9Q6tUYV/2IDGuShLIbWO45ARYg0EABIDaoHvR74ympfCaMVchiihOLTAWmsJo6IsZ5LztGSOY0XlBmG91TJS+JHbDBpSGwNgp1XLC1GV0lprLQIG6FxLLjWEoyprNVrGwwQ5yWzqMIdgVgjBXDIeDTCmnsekFBDBuW7jZDSBEFtgqiw1VndbrSRNy7wClillVaUJxlYDjCgjDsVIGSWqirlE8BIBE8e+w2q9ft9jruc5k2KWVlVelAwTB+ByWjk+ms76VhvC6PF4LK0MY88qLMoqjOJKFOl0GodO4DlSFWFArMFcSQAgF6WrFYRIcGUMYBa03dh1fUagFEJL6TiutcZ1fClVKSpkgYG6xuKyKglitbjeH/dqcV0JWRRFpYQfeAw7SZ4QyqwxhDhCCm2VNQZjBAHUUgmuEIJKaoc4hBJkree6QoqqKpnjBA41CDKDk7SsKmW4yURFiInCsB3Vk5xb4gSh44denhWEOEZZjDRAOGCuh13KUCeOZLP14GiQSjHjPNNVe67muYAiJI0hFJVV5TlOsx0XpvI9aoU2hmitlah8zyeQEIaM1IQRYKEBZmmuleRCauUEwSyZGYAwZdOstMpKVRGEAAAQE6uUi4mydn+WztISWcQcapTiFoBClFkFEcDQ8KJAFsZuCLXNpynFiBrrYVKrudrRFlpZaS5kFLqV0lEYyTIFRhNKILSu4xguMGEuw37gKqDfp+D8hUgcjl798+8vLcw98oGf7J3vv/vu4Vd+7d8tALP33huf/8XPGn30F9/4vdHxMXXx4sr89Xfe3jm4ebiz64bBgzsnPMeNVoN7Yb1bG++eKEPLDKYT6bbwZHfYbbGmbwm3NQyB5kEzANZRAGTFZHN90Wssv3rrzp13tspUEhCEIeku0iee9HV5Ixcna4tnsOtOC6upGwY0K/LcKCcM33r7m0fpjmbB0kr7wf3eRz6zdvcbL83bU4P+yVf/6LkzDz95ePd+ZsEHr27ODseP/9hH/uTXv3aV1B67dPWd9AYIYZ5VZSGrXFSVjAOPQnvv9tv/4v61y8sLa2HAMMmkYohWEhRJ5gSeLrL52K83g3hsn3j0Ulivfu83v/yN3/+u5clP/OznJIhXazHx2RSaKKxPer2GTe+8/sbF8wsXN1q8/2B1eeXkzTfLPP4v/u4H/+SPbo9uJCZZffbyT96rbx08//X1U/z52/cqY1vEme800ZhDBRaXOp1mfTjY8Xzm+1YMxqNZWoHW9Xfx45/6246y99/4qp292QrNxtNzJzsDQFwKIZdGWRl6Ng5CoIyc5ePtk6ixASq+++BmFJHh7rFQ9PzjH1EC3HmwZ6nM+ycS4OZCa+Xs+s71t6bpSTUbn34o2NxofODDl0Ex2353PO6Hf+vn/+mXvvRvXjn+l34rcsKaLAWteZWYbT3/ja139x/0ko35+kb7XHN11Dp0a5ksLOQVGQyrYYIh341WW7A2ZwAFIkeOC0F1fOOtl75xG82vlqoQFhLG6pGLET4+yeLu2c1zi8X45mw61p60gA+TzK83XSeQMgkpXl2rXzzfcYjDmo2d41l3LtRUXjg1x+/lkmWLIY0g3Ti7vnL63Ds3DtPSfOATTzWvkZvffUWurTz+xQ8/eDAdzMpaXMdg4iM+7J28T0GzEToQBXX3cf+SNlowEDbblsIqraI4YBSkVW6pdVwXSUEorndrw9lUGmUlhJhKDYEQBupRMvRpaIxRmgKLgUVSSIQghqAqUm1tPQpEwjOVB8qvZrnByhIgtIEY+jVfAZNXcjwcA2Q11gDaMHCNAdNi2m3MZWk/r6hDMCaUMa/icibxcuf03tHNuWadQO0HnlDKKoit8R0Pm/ctuiYrCgRZVmRSmcpKo4DHGHUwgAgQZIiuVG6sJE6IiRu4cf/4qEawBcpaHvhEVAIgiyAeJ1PfD0SluUQhpmWehRFdaLfTWcbL0qNxxCgjtIBgOBvOd+bTfIYxQRRRx3E9J0vLMPIunr0AUAkwshBgRrUFQRBgxAajnufgqBZ6rg+pO9k92r3xVuRiBDxgYamUkdIatb97eGpSAOQoaY/3etOj4yxN5jc2BHUurm7ev/HmNJkW2ZGcCNejn/rw5f2Dw/PnzofNxVlpd/r9/kmfsmAwnNVb9UcfeoT6LAcOAmxpqZkkkx987xVvrZMVs0a7xih2IJxNptOT9Ghvv8xZvXvx8HjcnF+IW+3QMdkkO3Vms7O8bGj94qOP7G7dpAy5XkeJcHGjAY33r//5737O72JhTJqNKvmjP/b4t5+7Oxzce/ZT5x7s9b78tbfmmvGTF0898aGPXTm/9J/+3W+eXaCXrjzcaC4sL6wmiWx2l47vb5UJX1huJ6U6Or4ftRrYaaaz4/piPT0irL4cNFeSbMK52Ty1crB7L53NAuy4fgMavNZpVLk5GE1MaZhvlMEHu9vQQkgk1IQQ92S/XznEYwDZH54NVjfWirRqLLaw70NIOPQRsO7coo2CdDZmFmLGtFEAKayYrUqEFRdVnicIeBngENnA9wTA17dPoIKAOhhp4BW6hA5GrYbTZGB2sGPKalZMxCxTCvCsYlSHvq10iTxfqAoans76VeEi2hHaRI5HlS0MwpRxVVI3sA5zcY1Liwny/UBh4OBQSIgqa1Oxdu7MI1cv79y5+/VvfEdRhAiQGtzd5e89yAMXayCf+dBKPV7oj685BBtjjLQWImUsVwpBJIUuuUDIMo8aAwDCmCKrrQIKWouAIRRhjIU0jusYqaVUyII49Ai1HjCf/uQnqpPRWtM/Ocy2795ZX12+dOGpa2+9rezgvTeuX7h4mlBXTBPLQj9sFirNp4myhe/7RcK0YmkmPcZcAuYX2rHn7jzYc0wY0sad668WBi7OL5kqfffm1unHHu+ubEJTvPjWm8uNJrLQcWOHj3vyhXoUIIKO798MsQrD1nDrPjPacakTuJ/5zOdcFr/16nNalASjdhwbhaHiYaPzwptvP3T5wtWHH6otdoZpmRrtS2QMXV1aufX2i0Ax7CIjaTKpItcfTaZK6nYcqKqqCrVzaMaMGMzunoy1hZWw3Yh6AFVKTaea10nxoBdfbM7FKA7Z5qmz/ePe6krrcJygrCpzi4W9uzOebzeVFYNhYiwUwmgjCYZaGyOFVBJqgBHiM45t2Zpz45gEjus4WAPrO8hnjKiiEaFTm3RxIVhqK6PHxMFxq1UVlVBCQW93m3/7ueI7N4rx1NOWSikgQlJrCKFWBhhNGXNc7z8n0JQxMg5j0GCu48ZRA6X5rExOPXx2aaF1cO0+EmWrUQOCa+K0O51+fwK4Ojjc3Vibrzcay/PLTV3MLbkeqr34g+9ffPg8mNt86pEfP9h6M+3dT3o3rzz15PA4qab55OS4s7BUD6Pe9uGkXwCrW7XaZPeOcea9Wnt4dCQybvMy8iilHi6jybhMppMoJs1OPNbx1atPfO/rf9qs+17sxPUGjecHBSqNmo0Hga+CoBHXSgdHlZSTaRJ6IPCKu/e2N5aeMK16f1BNekUhS6FncpyX1tRaDunfKQ72yt5wOpk8/KkfuXftAQwd49YO9rb82Fs83aYRfepHPp5mhg/HBztbejrdO7n56R/76cNrryQ2/8jnP/u1P/gukvr+9Zfdc2D5rKx3dOm8fPXT07Tqp8PMVbBMTalAUdV/6xv6B7vTo0mIdUApsUIbhQBy5porSZohbL/9ra/QEHSjQOSqlBya9990GGDt+z/mAbBGqGaEit7+sCSFxQuL7SCKJ6NpvR5iLjUBNPDUNLN5XiYJBu6op90gbrUcU3Egy6PDSZKLI2e6sfkI0Nbz6tPxHVsVnU598/SCzOj9+wdrIXS63vawfGMrP7tszi/ywMmyyQlmtVq4gHBwkOWMqZPt2wpHQWhscU+AnkMw9VtFIZCxQigOwEKnaXJnoR1Chpqd9sn+DMg0sdaJulzmWZkbiCIPe0RrmI6yXSMnBBsBJEI0TXKIEdAWqip0PMYc6kEfW+jZKxfO80EvPR7WG37YCAZb7zEGqmQMMZmeZNjivEiojxvNsLU4LyUJfN+T1fbgwUKzXaXDwI8sbkDilWAce+5jHzx7v5cA8M7/XyrCDDAPLK4tQolkWfkx21iTjs9+7fffm6WaYSQktFo7vhvEYVbysO0stKLp0VG3IX/5lz/sepnG2kqYVOaV723Nz2FtbdiIs0Ldu7HtiPS157/2uR8/nR0MIIopw5A5SjFISH1pI+pq8qdvJ2nW9Vtu6AKOKyExgdgLunH7ZHh8/fa7nfZcw2VBSMqy8n2XYnc2nVUGxV47drx8NGIeoS5wo6Y3gVs3jwaD1DB35yht+PipC/MBQxAUWqQXHltPd44Ptu8VvJxlmkRexYyq48bKcnEySMbjNM28iO4+2FlaX2vUlx2/wViQ5YN8ksWx7wU0dIFWPBXFuXNnkumk2a7lorSyBFgbVIoS0hIIqRAqGLZz3VaRTVr+oo+YzE8szhEnZYkbEcuqoZK6yDhg5OLpzsG9yeL86s7O8PUXv+fpJcvL+eVo4+z63ZuzJogpRsrxGU2w63sxQxan/YQ53nSa+SE1vFQapmXFHDeZHc/XluvE5RWqhd2DYU5DCq1ZXDxVSQeYyuoiYAYYhIG1Li4oyPrDIKDMgYOTESMoaNWtxtQwaUVa8Wa9AXRkcUBpmFs6d+a8LMcs9l0mjnZ2gu1roV9DEOXFkPk11wu8riUOcn02HQxEldUagdUozTNthMsCY0HsszKR9W7t1ZvTh55YY41g1FOjivz8j/wY1IOkPKg5obEYMUIoZk4kRUIZnFUzDzlLS2snvdSvj/1mg6qFMrdOvQPItFS7SCEslbBQKVlVY4xjaGdVkQrfdZta7ldGGGJxPuwtLrRcP8+SzPWiNBMBBaX6YRpTKgWM8V1HlNxg4Xp+FDllwYVQWgqLiBQKIMgwIYQAoKUxVV4KUUWhKyudFzIK62E9bsQBwk6VZ5qbWVYSh2JCeVkBY1wXeS7Lcm6sqdfcPANcGGRokRohBKYKaKtKO0hGkmtANPO8qtI5LzDAAELOp0IbazTBBECqtRllqagEIdgqY4QMHNcYGzgMUswFV9BBGDkOwxhnBRdSAoym+VgBmJRqWpQYUaishSgTUmEIKC2FMBBqg41A2mprbEVBMk2xISeDiesQCLHR8KA36HY6BABuTFpUBGHPCTFBeZFDAGRVuozGDV8BKyseuC5gjrXAAmOt0VpLIR3CIKYlL3klIEIGTB3KyrzkUhBIrZJKIiCFg4iSyhqrDWCMaakQxBDTsiyKVDBGAER+4IuygtogApTiBMEoCIzWGhjBNUYQa+0QmMyERZhABLT0HRsEnuJASVWkBbSQcyGtoA4hxGfEhyaTusISYg2XAwKQk0vBHDbNCyjhcDCp1UMvIAChwHEJUgFGLoaVtUobihHCLgDWGIOhxQRQ7GRFDoCcazVLOVQcJGmlDPJdt1KqKDkmTANjlIYWAqi1tZSgLBUV1wQigMHJeAossgQgC6X4YZuvkjrNqyD0iYEQoFotRKRwEEh4bjGhDoMMBYGjlYIQlhVnAApZAQs9z/WYZwygDg0DfzoeAYz/82JkTZHUsCoL8I//j//tL/38Tx63dTOWenDtgx+KDh98+82/uOZ4TiN2BifVzmjPKuXHyLUKe3qZtEXAKI4nE54OB6HRAhE3Y34UxF3/hO/OUdloloUULqC+r0aTHnTdUxcXXRb37528c+vuwf54eaMRNtqNRvjkh85v7d3M5d40mc6ttyYZ96qexkDhIlPlSBzCBgWIlMWoUUcjmFHHYH/y45/6268PWj/46gMF1O/82y//vf9Tq8EGvbt7h1pdPf/UG3/eH711PD1/rRz2YSqwcc5uLvlHk+lAoMDDGiiogjA46qXX1V4ee5dXVvZHQwGMG3srZH4yTbBjwxAk4+nKhdXvfOO5h554OA46bCq2dk/+6Ne/euXJp69+4oOzlPOpiDph88z6wetbo0zno+zNr75RzcjTX/Af/cDVcZ5vbj6UHF1nuf6f/tn/pORJd24Wu+4XfvHJ8htbN24fMc/HqqJWNiUhwpVG+DV/UuQu1fdPjiaFPfv4F37x6jO7/QevfO03w2J8+XwkdVEdjrPhjCCwtBRj6Ka5ElWiIFk8vSorlUyrC5eu7ty6k4+nzeaygLlx/GD+MpydDLP7bizdsDPp9fjk7u67B7OjviXxxmOPIVTqqpodoACDRx4+7TQ2s8G1K5caN96xoU/m54Od63eS4UHsCxR2TkaTX/57f7/TgK9+81+sX4rvb3tkO7Wl1ELxCvzG/+t3/8dfeyYb9512F4E6cBoaIAzgcHS8cmntKENaKmNAmQmdaeYxcQz+46+9+Sv/7UcYpWyOTmfjWsMXOZukepxVvZMjaOvYlS9873e1mqi87PrwQx/e3EsHH/3sY/bx2s3Xxl/81NXxcLxw/kNzi6uIOmkyrUpz5urZOcpJxdO7225Fn3xk+WQw5LNBDpxZkv9wFkid5WWtHVhrkJF+QC+cW7x3e8dziEsQBKoVOcwOG6360V5CkYOAE7oNxojgHGgQe94sn0pdGMslcATnWipKWRS200q4DrZGGCUpdSlhXtMXCYmC+WR8hzgorMe8yHkhtDaUOFmp0rRwAgcg4gS+KCoEoevUHOJwAKwylMUlL5UtscVQoZX5tarYl6oMCWLEMYZrWTjQt9JaSoCBinNlpTHaAuv5oRAlgMYCiDBRRoiycpjDvEZejAMWcZGqnNfjuufSosizrIDAMdIS7DA3KBQJa0tF3q+KxLcOddhxvz8XYOKRSHoWaQ11WmXEc6OomSXUgSGhOfZYmlfQWCNVpge5Pmm1GwACjDGwGhjTqMeTcc9nEDFSQVPmCbRYJ1OCgeGVT5FVWJWKYFtrBKXlR4c7GxsbaDbZ29/DZeEDhwk03r779p13Ci2V0DJnvhsFPuMVQAD90de+3Ruc5NN+e77VaLbrHGKvNtda2Di/0t/fjrE5f3n98O6BC+0z51a5roK1BZ86w539M0vLS53u0f29tjMvkTVktnp6XVT2qDcoCFo7dW44nJpxWmGLGu7qmc0713af/PRHRrPx/vaJ4QUB/uDB1ke+8Il4nr740h05mpZH/Y1LV8bHo+PtvbWF+vB4+Es/++k/+6NvPL7yhdNrS48/tfjm6+9cevR0GEWDpKC4RaOl4aA3mxbDSQ5g1Wy0JpPJeL+/vvksbeQKwoCKk3RQzsR0hGbTSRTVXccxnGXTSbO5Lqmwsqo3l7e3D9qthc1zYZrMdnZvB8TrNBc2z57FKPj2V3/HZz8MIydZ1VroEARODo7TPPOjlirLPK+YHyx4rsz4YFTE3YVQo6P7d2bDQ4AxxEaVhYMBANqBjBkjSpxpQy1yHCSM8b2aJhYKHVgiRrkXOMY4VmgNJXbrToyksYqghuvO0mlZlp7nQ0DKMsFu5cb1kNV4KZTh2HeXu8uTWeLnsNtpEsGkUsZgoXBWCmMNZ3bp/ErQCZSaHI12aEwC10+y0kDteb4seS408GunnvrCb/zLf8ZcxAthrKWMUQiNtlJIBCzyMHOoVrLIKgSgEpXr+ZQFtsqV4AAAq6GxhlFKCFbvV+VRPBllZy/MLXWj5577xtra2arATIHVes2H5s1XX6jKNKTkmaeflZyXPJtfmsdQW5BClHoBL4qBBokQ2ag38phhTQJYJbgYDYXnL60tzNf9VcVn/XH/+ddeW98488wnn+WuV2tHxbBseE0u5WwynV/1cSuImuFstL91+zazZK69MskAxKJQSbfZTPoCEizw7JGLl45O7k+HE1KDtbl2O3a3H+xcffwqcoO5uTWhBbSyWWvtvvlOOpvuvvMOEJi5rqzL7kZtdaGjZou7968x7JaTWQsbpx6eZOpkWEFUUGh8HweO6caIVLnTde/pilA06E/7LXt5dZU50FSpg2Cd+aCUroGlhLOZxkYbnfFKCG6sNhQCKyUEOAgoANZ1XGuQ51DfMR96avnq1fbFy912jGe9PgTQ9RGlAMIcgMrztNQCY0i8BaUQIoxZYhDEQM/PZ5cfXtr8xv7v/tn4cAANwkpbAxAl1ACAHGohadZ/WHfTaoWuR2Z5+sSV1d7uMUE6arOHLlxMpBgeD0shgrrjrywe3T7m2UG3VW84OOUy8qiwdpyW1sVamtxIMh8lt5DrBt967uXvv/PmL/3cL19+aOk73zo5KRQvBAGaZ3l/9wFcXBSqMsAW5YxCYdI0Sd5deWhzcTnKxhNdKKVgbqGVBvDpwnI9LfnuYVY/cz6qdRYWmlV5nKSQeMhWMw+TkFSt5TooeF4pf3mNSi0Gg67Hhvf3jvjJM5//3L0Xn1dKHc9KFHn1Zs0CWvDEQ46WaHY0mB4cQeNEi+e/963vdFa6uS7bnVp/75BRf5yjhWa9trC599Z1H6iN1fU3e2/hmn73rW+O9g4LqI7v7Z6uLx5vvRs2KEQT6RelHgBiwubUjMDlZVCWdluCf/d18M5NMUtjAz0GDEKWIQM8GtVqDz329J1bB8eHYxdWrgelUfl0hgjTRhMEgcXWQoSsBcpCACxCFnS8wKTVRBAURMfHUyVyYExojGtMLipMYVSUGMhaN2i2OqIskzRP5IPYi2fTkueyyk0+KRC/t7nE9rYPeOWGXtcEYG+cY5d15oI6YB+sed9792hW8C//4M7nHg4fWgsoYIi5VZbs7Qy2tlJoGobUhjjfWJpvdAGZpCKbMtcPWo0EKkIjCqs8y6xFgDCgzbg3sIBCKDDGEog0GwdRdzbu84yrqlTVFKUTKCrIiJYmVYZQRIgtEYqdOGCsKGZL9WbAoASyd/u9EDoIoMMHu1xWzI2UwkgTbcnq6VPFeDbNBoM0D2sRglZBo5ApILEk3N4+bBJdzWaNpbZANj8ZG+MtXzj9+uFbf8lV1GzXO91mWWmoQK3dlLLIJ+Pzp9z/yy999H/5jefzQhGCDMbamlKW2upB75jDoZmdfPnLv9xaPpZaMCdodhmolwiDouJz67XIJczSZKIE9P6Xf/1nH/7cPyy58AIT1mvQYbOJlEZx6VBJNk6tb2+Lp5566Gg4UqJKpYmiupRkOOk7PnNooBHkCmQDpTQFoIh84QUUI8Lz1PBCKc4L4QGCaeyFqL2w/Pb117VTIsOqSs2mJcOZU3MIpv3pcHrYg7aqL4Xlcfrg1gM/CAOfTEZDM8shokG9bqT0nRAYn88wg+x4NGo0m6Ffl1MbLobxkjc+2tPAGxwOb2/v/OjFz3moHB6V83NNIQvOJcK0GYSj8R4iOJ2mEuQapZS6hMpWd7kc5jlsFTwDtIzbUZaVaZZpbbgW723fWppf+NBDH+sfH3z8o198941vFSepNq7lMBfGrTfmz88nU4nVVPAMonxa9HG8sji3Ojo84Uog5lsLa3WKESskZ17UG58Q6helXlg47dJoNhzHgTua7SCGPRebohyODqi/jGxNylmSHkIcc+5Sx5OWdFbX80mi7fBkOGGO32o1x9MkjFnsRxUy7W5siR5mQng1GDUcRHAliddNZ7LmUt9naT4d9E8gY2Jc5VwjSB2/Nh1zXfGKzrwGOt472jvkijhlohtBd09np+bhePhO7DOtFCOh60RSKakYAjLNjqkbhX6j4GUYei6tCuKlpXRZrBVyWYhRAKFMxkOBHGhIMs56dAfhsNaIB5zv7wypW8u4qjfn+/snaT7uLrTGvYkXEr/W3r/3XuQHP/zYBY2xtuQCU2wQskoPxwmXWkqhs8T3Y0xQFMciq7QSBujReEoA9mL/8GCfYg8RDAGAhEqCp9NZxIKizIqCuwiGkRvFwWw6o75rjM7GXHAgpUQEUYrSSpTVSAo+v9QKaiEvFTcSYeD6bmW0IdBlrMy4EgJBD2JEKMkKLqVkFOpSUcwwgUYaYwCAhhDEZYUB1NboqvSDQFuTZWUuleu5SilZKMooBAZa6FCal9xa6wUBxmg6mziUBqHPrdbWAGwJYbO0UkYbo5CDHY9S1xNFRdyQK1xJIYBuNuppVmVFxRjkWoXMcXxPaGmMch3KkKOVARgbrYWUFEKlFX7fKEwwgIYwKrWUVvqUSakxwoxii5xSFAZTRrGLWV7kFDOljTJaK4WBoYREccCNERATgLWxAEgIMZfSYQFEVghhrZVCYkKDIEDauItBwUUceFmeGWkDRoBHKii1NZgQQ6wB1CCQapmrAiMINDRSIWNdSHyHNB0KCAyxRRZIbpQyFeehX6OYAAygMVmhtIIQQs91rDGVENpqiLBStlK5UkoZq2dFzQ88B6RcAAGFNqWUfhxOkrSoJMHY8x2MYFVxqQQEyFqUlhWAgLpYVFpV2vMo872qkJhaB1MnxFHsO76TD9MsFQ3fLHTCANDD4QxYaCFIZiVBwCe+lmWn08nKhBEHAqCMRBBYY7KiMoAYrd6n4GB7gCOUpbPJQH/6J/7exmNf/NI3/9UFQy9emrt7/f7+g5P5bg0SsNidTyfCGGqgQYjXHWykDmsdFjTjbieshZOdHccFflR758aDN999MN0ebH7Q/p3/3ae+9UffeeiDZ1579f7xceEGPovn7r4rR+OeZpMrn91YPnY3Vy5fuvLR0XQgiwfzLs+1a0fNDPiOdw6KEwnH8XyVJAdJPmz6m9bEmPqWpStrLN8vHzr3hRe/Bov7Fyw/aV3wOx387//jb/3KTzx15lRz4yO/8J3/8MJS4H/hZ//ro9mtjQ99fqTHo+H9T37xsT//ra/gepBMyyoTgJHBcIoZVjG+BdJGnYbL84c7PW5BDlEmgJWysx5NZ+re7snP/50fc5DefpGXhfrMF5594flrr7z25pmnHo6bc5bkWucH714bPtj6q7/4s//sn/zbkY72j07OlDVknF4i3t3CH/ncx1/6/eeOekc94Z755JW57rkHb7/1hSc+Ve7+qVHlSy8fO7Hz2StPeLUVw8yDgzHFSijt1lcj18v301d/8E8ImVxdp8JEuSgtxO14ed5bqTDibmO+tQyHk2k2jkKGHHc2mT7Y3q1vb9d82qy5+ewY+M76xScdkz7/vd/ePL0sfYaj9sHeraXNhq6sBHLj0nnhtW4eErFtTm8+88ZLX6ovRLA8+u1f/Yf1U0trly7gAj54b9/z4t7u7PWbB3/tr//iw09fIJa+9/af39q90eWrWaGktoLL0GdC0LduF2+/CZdXF4haNpghCA1ABKS93k5lQZJWjkMDQtJUugx3uzWu5daN4b276Pzj9YwPLYUsdDQkh9sjPfSqvP7ss5/Z23rj+Zfv1xthKVQnAHNh6C82b7984DET1d2dOz3P766frhWznFB09vz6/femQIDlc5fzk0FnaT27cZ+ORisxGC8HJ4PJ8sb8fw4jI+L5lgbYAY0IH+6P7tx4gBDEBGVpQSkmHhMlnk0n0iADdTWZUuZD6pZlFvphxatp2eu05rD1EXIoo1wKLnKbWIRgKbm12gvD4WSclKN6o+G16vsnD9zAlUZmeU6MLbOKUqgs4FJrqL3Yz7kmmClb+dQFgo+nM4oDIcCskspKjzrEc32Frr33LeYYB2NRVQQyAKul9spgNJNCCG0QwghoSjDUhGskFcLUY9AYpfMy94iHCJRSAjKNXUfLmUymhGIAmcIuQjCMmkpzwpjRIMm5tSQvZ0qLWj2wCEJG/XpTcOa7rka8EmUQO6tLCw+2dsJ6k0StWzdeme9SKz0visqyaLRCw+XB3k4UhACgNM8C3wfa5Ol0NhtTRAEAiwvL+zsH1CGblzuH28aHTv9oELpRw2UUyrOnlm48uLu/897Z9c7J1rtYgYtXLw17aV5l8+vNg51xrdMihNXDaNRPCl615tprp1duvbV39ZErkzJ36/WnP3T16//2Nx/cubnZ9Gr++VFZSZ7MdnY77SBuR+Pe8b07PezH7kK9PreccjE8STYf+QhPir37t3a33llerG3ffXeaJJoF7aWHIG1N+0lS5HxytNqstQK/GtzHGkFjHvvAU1/7ytfaTczVVCi1ujSXZ9mzH7py0E8vPnL5tWsHHvMxy779jRcee+L8vXs3Gy1vcDhWMnzvzt4nP/vR7a0ZxcRj6olPXn7+W39+6dEPH++lhNjaypmwtjQpSW84bjZVr3/oh9SPgqyopHSJU3f9FomjWjiudJoM+rVazXPmlheI0aU1BTLZow9f5Gm+e+e+mx2/9+Devb23P/eJz78PAfWioLFQZieZxI2Fh5aXLva2rt+7+8rmKR9RNTiczlJsqZeMj0VuNa1bAADBNPAowXHDxwqWhaysMAK62OHaWooIYz4yUgFtAHBcEIR8NgmDbllqRQJlgbFqnCcOEr7nERoSjHGIdc5FkbXanbrbHfKBcVRvlkDDtbKzQmX7I0sb7YU2gRmfToHVK2vz7x3fXHnkCS/qfPNP/+ygNzTYq1JbVgBinGsluFIWMWb/11/7ZybPtLEQQoIxABZAiBGyWkOGXM9rt5rD0QAjabWlyNHaKCwIxshhEABgtNWWYqyEtNbwSrDId0N2+dKpK2fPHm5Pomb7jZffWJlbQMgojBxEa/UFD2MniPKiLCcQeyTNB9PRESY6im2eDcezw/5wUmkXGH8xWn3koceQ00KQYhdrpf1ozl9YbiydV62zaxsrDp9IWSbD/f7OnizLDFRh3MDMmYyT2c6LnmMn+fjCubNbe/c0Yk9/+AO9/V47jPI0n1Yy9PH8ciuooYOjLHbjVBha8Hqrpgh8/vWbVx51lufq77748l1ZmqQkXthoL2lP3rl772rnca1hpU1zYf7Mw+fazfp4sFtMir1ebrkNfXepFWopWh3fY3DUG4btxsWrl5977Xj/ZHRqfen82Rr13OODfskdodGD/eOTccmljiMXQSCEyoV2HBISgksJgAHGugyfvrDqxYwStnt3aIvZpcvRX/+ZqLNQGXvL9QFZVFWJeJkT5oV1D1kALDOGEOZYADG2WpQWEAhdbRJWc5pN9dnP+eMk/b2vplkREcyAQUobB7PQwZKXupi+TwEEUBmMANu7fwgryzou9JUVUGfZ7Dj1HGfW5/uD7Vpcay/N79x9rwRs9eJTDSewkFoDADGf/eTHfv83vjS/uBCwelGJL/zoZ/70D7/9G//y//3jP/Gp02tnZ8aE3TUgiqOja9SFqjcFFvEk75fjT/+1v3P02ptpf29n/zh0EUV4Z3c3jJpLcbt33PNDOt84VdhBK6rvH9+5dU2Xk+l4Nm3FzajO7r/58vpyW6jMY47ORK8/CZoUWC2LIvQchXxLvVv7g3hpsX//TtCp1ZoNwSV19Pzami6qVM5Szr0zS4c76bh3PNch8w0YhHFQ85wkb25uoLEwWXX/pVf29w6ffOLhe7f3GAmIJSKDQX2VYHvv1hgN1fD4xic/94G1S25a3WUxKnmSpL42Z954L//Xf/zgjaP6IKGOwgFhCEgElZDKQEYcqxx+5fGL3NLt/duEaKWE1gZYgDVHCBttrNWEQqU1ssBaDDGEQD779NkowMd7E5vp4yRZX6y3O/HKmY2jew/iTru73ABVQWBMuwvdzsrB9begKU4OhqDuZFVeC6KwFry+O46J3j0+9EMaePUSIOYCYtQkTcL5ei2gwMoPP7q4tZ8cHpF3T0R/mMyHtNNGNHAxDRAyRlvfJQ0KrJkkk14Detr61jBRlhBj5AbJpB/FHYzrQjjQCAVhBQPDleQJYJyFQQlTWZb9o71sPGbUBAAWWQWwO5Ug1ebU0jwGI4xKj7g+Vp1GFHsCY0ckBbYOoFhbgHDokihsNIC12NrRYMjtDNCCSwGpl2ay3o3khCsgZ4nIOY19T5hxnlc06VlI6z4I6uTf/fs/+Nqrvb8kFT382OMAEtdl+bj0QWiJEFUJK3H6tPO//7uf+Nf//i/6I00Aksrm09x1CXRhkZcXPerZ7dHJg/rGw7pksyH917/2/bnuotLlNFGjo4QYjKhHaHi0P337nf3HLyxwWSEDgDEuowBhn7a0lX6t47snSuZGV5YQt+bXgrn9kwPXBWHkGwilMgBBpEHDbwCgRmnuChi5XpmrFBmXYuI5GiBeKqucOJ67eOb8OJsWVp89taBSPuF2w+9k/Ww4GTKIWODRINo52SoAkFV18cJmxaIbr9/OJkm95VuKmgtr0wlphzHyWtVsiCgG1SFR2XIjGKk8EzvnLm7apHfpspbpX6SmpkNWaaF05AcR1n1g+/WwNFa6IVOZ0rbixbgWtdOhr6q63zijUIYZEtiMkhOZpy6CCwu1wUmSzgbPvTQ8mcKXbv5pJPMoChdX6t311XQykgBlooRA+C492t6xhFgIZalmQ4MtDqjNKoEcCKzDqwwCJPLSZXFa5YHTrCqPuF63PVcWt13HwR7pj6eAVwHyq0mqiOMEhhdFLWg7jXUIinQ6PFIaAHzuqWeuvf18Pt3ptJ16w0NIFEmqLJLCKKs2z16FrM3LqVSi1ln0vHY5O3II279/DFzPCVoSKyFVlue+6zq8zjQWKOmdHC/MexCD3slM6tSx3nF/NLew1J6r7W+NBSqhRLTWYDS0oIKWURf7mADCcgU5Lz1kxskgXF3waUvNplpBCOD80uYsFXjiIa6UVYhSSA0wQghuSz6rRBAwJW2/P8KUGmXioCEbk9FgOEx6RlbN6IdSEbAIAoMRarWaAJlkWiYVpwh4DnW9UHBJMcRWKlGUReX6ru8507wYDrjv1rUwnU4YBqQoS0hBp9EcD0dJkgSRBzBIk7TAblma2WhkgcEQCqkBQhqY0I+lmEht/XprMClHkxIY7TtUW6M0zEshq8pvtCnAQeQpoz3GrDbEasIIY6zISqkEYwEihnrID5i1gAtljIGEEoKyUmihMy5SrgxxMSSIIAQxsCqg2GdYCQA0VoUogPI8FwKihLbGzPJpHYfKAC4MJYS6KC/L0UwYo7u1RogIBAZRBkQee1gJIowF1riMWgiyonQdRjCCgHCeI4ighQghjznAAIgwxLAUqYs8ABCixGEuwsQYYyxAGAktDLQKQF5xaxmggGBSVZxQooxmjmONphgiymQhgQRllbsEOw5GCEKFtNbAWKENpRhTCJHRBiAAXEa05MgK3yVaWqiAUhUC1nMYQUACbSH0CI01nqnSQNloRZILaDRAkBIMAMhL6WKMMSFIOy7BmBltpFFFKXzmFaUU2hJiqaIMQ0IwwaTkylpkgXAcQiwASHuOU3CVTNOQegYBn7hJUdYo8QMkjTXaSGXKklutPUa1VgFlFS88FDsUEcehxLjMNY4TBNgIrYUMQ1cZ3elEguu5VtfxcO9oIrSt0sILPc/DyMKQMEqptJJSqpVGAACAXc9HFCRZmWV5o/ZDuzVx0CMfPIOZ/zt/cPPMlYfoXONf/dH/+q3Xv/KnX/tXq124/lCDIDYZyEfOnj85OpHGmU5mrTZpd9zxII87i1WpAVGpGrMGcKj1YvXIlY3V+c5Xv/+fNpyGOMk6bDnZTviouHzpys7hKDKN61s7rBY0m7rm+7VF3+ryxs03BZ847kiqBForqgmkiic9CiBlipFZlU09iEVSGhxDNxhwtXHpzMd/+q/s3LxEx7C2frhz8HJ/MKwvO5/72AoAu0VS1mx1+aGHq/6D9uXNL//q7/3kh366Fa/fff2VW9+9frF2Jq/VXp1cJ6wQFmueK2CM0FzCl99445nVsx/9/Ee/9+LrBAuv7gRhfXCUFJU5nvC3337lycvnBv0i8uJ774yIaSTD7P/z3/2rhx/54LM/9cR4cPSVf/PHWIrNC5tBZ/7i0x/f/oM/n994NqKzdsnHyYGY7v/Cf/XjztzccGca4fj47T0n777zzddWIzeVaj4Ip4V48c0b6Wj84c9+NCpSV8Pr792ygTXWSeQMi3637SDAMaZRrT04yAoOl1bObo8PlYX3tncUDq9efoqnaf+gJw3XEOSzSRBQURQkDjIZKAGtGp66dC7qzDconElcX2x6qHt878iPHcMF0HAhdBeffsz2j6+98FtvvfjWbJpf/fAjWc1+/tOfe/CtOxTJsGWa3bn17tl47mL24Ht/+Du/PhvtfPavXHlwp8iTKgrZOBNSQ6VhPrF/46f+4z//1R/52F+ZM8A11iJoBR+89RdvIVtrtutpLsqyBAgBDLVSqpJEet/47bcvPvK01YNpfuiTMMvB7FiAnvYrcPf716QVj//o6Ua7+eTPfLRM+e/+j7+d7cG5MXvs6cbS2kIQNbXy8kJLGLs4EIlxkR3PZG3lsXt3vjL6/veW5xcfXN+Z2+jsbp9MyixeqL1PQZEJx3EtwrnQ+f40yyrfr7k+hVpRRhhDAAKMkBQFcQJRlL4TTkVeWeExyPkUQri8vM4lRohOx0ee62EEC1641MMYcWMJolmeBEEMZJkOi6oy2Miy4GEUup7Ly8J1XQCktUAqMDqZtrqdKiu8MGhEkdKIuhpj4jBkhFJCGIUAJFWlDSGUMFWJRtNPVOYGAVR+kheFKBDyHBxryyHCFENljJQJwRppQgjh2mrr1lqbx5PDzaXFYe+etCkAqlVvKC2MtqKqtLUQe1JL6ofEwGyWMIdqKaqicv3AGIMACiktsnyScay0ITaZTss0dT0fFOW0PFhaXwJylkxL3wtVCVIgsLXr82cZpAoq5rmMsnE5cC1p17qlFGUyzvsJrrQu84O7IyKtg+BCq1mr1w+OTnwf9w/vby7OTVJoZ5mHNK1F0/HBuD9xHEcTv9aOls6uWVjPejlPC6xVjvoRsDHoZwdZ+9RVa+xzX/6T8+c2fvynnnzuy3+WjBcvP3nqeOdoMuo9uHfwwY9dDQJ79vQc0tWsN1i+sCHS2XytVnKJGTr78LmVpThLxkHsL51a7Y2zu+++9dCTH6u164MbN5fnO/duXovjDtN+txuLIu0Ndn/h735h3BtN9kZAIZnI2ubC0urywddeyCazQhSNmvsTn7w6ORolMXMCUijw8Ad/5Fg/aFNVpl4NZCt1b+fBWzNY27nX+/TH18tkNJ2V85fO6UQKPeusU1AoXXFvrg11PumdrC0sH47G1K8pnZ4c3PGIDWgUeK1imotiXGt4veGgGM8gkkbKw513z14895Enn/zwsx+NowYA/x4AEIb+aG8PMeB0Vy99+DPltGRlLRj4/SRJ0rS/M9DW6Z1MqR8iYCUneZGXhTDW+r7ngxoy3Lgs8IjWxiiEnAARNh3sYYgpBcxH8VxcXzo9GQ6orDZqc1t722WpszzV2qu021paznJdVHzcH7rAq4ebx7vTRvckTcai4s3AYZHjBOHN/bSqxGw8bu8OVhpBO/amk9H1m1uOCKe3Dm8cv3O0P4CuK6oibDUcY6EWszShFrTq7YXVtTvvXgcYORgJA4EFFkCjjTUGYUQwFspoqQj103TmuMShRCqltLAAYooxhBBQCAlzSJqmSuoo8iw0EABViM7KYlRrl6koplmtQaPmmtHIWtJYbvMs1wb5JAjrtUr3udZaAEZ9RpTMZ7oU1gCv0ShmAcS1pcWLB8d9rTSFxGrrU2981M/LaQ2hpja93b0gZFQJB4UHB8c5KroLII7rSpk2q5XZ9OIjT0VNp15Vxye74+lQaj2cjoy2aZFYXNq9PgbYp3ChHRalksUEaolVudGNhtu3T96TROnAiZqnFloLrZ0He4TpZz78UNRoEmryXLhhY23tEVGMW7WOoyejsZHKmqriRdVqBHIqmnM1Ax3oetNJ7+ySsxx3Gt3u5YtrO3uHy6un+sdZvzdWQFlE3dCNPGK4wYGVVs8tNqHGg0HCsGGOs7m2WBQZkFJWMmD41Gb36lUWR5XvUkRriGHHRU6leWqNkabihDEYxBBGBnoQaoA5rmaIS4ALXuXSOJLQ9mr9c58Hr9988O4tKaArjXUp8iD4qU9/7JsvfA9b+P4syGa54+vV0+u9vS3J5NpTV/TgYO/63SItPGT8tfU+BK3zjw57o6gVYo7y3qgcjQ/uHwTtdkjocJS8+L3rzflzS6dWOalfu31r45zTXVpEJh+N+2fObXTmG9t3BiwILnzkqSIrJvtHvu+mlVBoIeURbi5KsAWtPd4/XF5edqJmZUAljVOPpea9w2PGLGCwGaLxye5cJzYmAwAEjZCx+GR3uypGtXotO54W0mASp8NZa+Xh9vnHQGvS6PqvfudrwFMUM8RlenIUOBhA7dTnprOZwhCEdVzznDlkVZbbYjyaABN84+s3F9YWP/OJS7NiG8mJa1CnOccxRa0akthKXeucNQ4Z9XcZKmAzW3/ENpbul9mu3zKcwu0d+we/Ce7fTU4OxNCslUrWiLFIGqsshBZByAhwsNJyIfBXO9ErMAO44spoJRimCCGjDYZAKIWRNcYi8MM2Zm2KUzF8dLEjRJpG3kSyqUKu12Q06g1y4sSNeuS7jt+OxgkvC7nz3ns+jZBDWoFenF+4szUGVkJcaSrv9LfWI/Dpq09853s3YUjPdc7CvNoe39aqDkqnnM3yqmwq29xcIgSpIrm7vz9/9mJWTgvx/6Xqv79+zRKrTuzk8+Rvfr9vfm++de+tnLq6q6NaoQVKKCChESCQkIYZDPaMZS8bi4HF2LDsmTFmNDMgMlILkDSSWt1KHVSdqrqrqivfnN6cvvnJz8n+oQqP+SP2Oufss/dn28311UnGL5x5tix2W/a4baGqlbZQCWVnyjIPtc+0wgF2BhseRuFitgtBnHTXF42T1hMl3H+YzrJbUM+xLhOOIkoJtNzHwgEP4bysEh88ceHy3e3bi3lTqQZDiizgvhv0QiU19z0AmKmscwghxj0uiwX1KWRAB1R4oUV0kas7D25cunppZWPp4FQeNGOCge8Da6B1woi6mqv37o2/9U4+O6X/iVUETEiRFwdtH+pieqitJNT4YSLr/Nql8Jf+2kf/4a++cjpSnAFHkDPAFiDQ6C/8xOXWWay95bySsOavfO7o5NBL1sJuPIAYHuU3tNOmMZRjQPzf+P23nnvseZlmEFqTN4T7wFpbi1Y/WdvsXn0UziZzwv0wiUcns+Pju0brRsEG80apIPCjKATOCVFAYDGwSdzGGHqAOATb7ZaUNYAol5ZHUdAOn2gnJwc7t+/vSlloYQM/Mk7wAKKG+klU56Oduyf9XkvqWZkWe7fuxMvLWIuQOpEX1mPz9MRXMKreubgKf/5vdMJWhiVotfvYO1TOCBh7wUTJqsHi5he+mHRenLkNsWiwzi3OCU5nk/tRPMTYUWyTNsOwxCEG0gbUawhtt5u9w3s4W1iCiHGEcGVU3PXDbqSEIpS0TzVFBEOYN+LG9vWtw0srndX5ovSxQU6npeRBaI1bWz4PAJ0vpiqfhQwaQI8OT5eG3X4U2FpBSjutiMbIgCjubcyLObKlFjkkTogyDAPACHT5PDt1kMWwO+hdFGUDcd4048BzjRJCwZ173x62YqvB/v7rXthaXTuPbS/2OsYAbRxqpBFTaCstF37oV9kUQzedZ4a3o9650c51gTKP0FZIGSUeB4o4U4ON9bNajWngkn6ctJfmx4ezRf3DH3uySW8xTyMAGwcyUenyJPA7Lb+dZqMkanW6nbxIvcGySCfKtEwppVWEMJ/jxWyGkU8A6Q1WhXN3d+6jRiCJXJ0v8tONjTM+CbImrYyVUq4OV5ZR/zvv3VSm7LQSIQQCwIs+eCRzRnvdtrNGSQEwcE5FiQ+cIxAL2ahaGe3qPDPKco8JIfKirhpx/+Dw8toFiC2AOMubom601gcHU4aIEDatC20tomCejdpxpBRgHqUBd04iAAihQClGqbDq4GTUT2LrjDaOIGCkqeuU+dyPgrLKCUOEe1rCrCitMVIqSqmzTmmJCc6bpi6lj1lWK2NkGHlagUaqKPagBQ5YxkiIIQIOQqiUBtZRApzDaVkBgCywUjaIMc6ZFIpSTxsRBjHQUBjdaKUsMBZb5YQwDsJKOWCqABPPZ92kRSBZ6gZlXTVKVnUDMaIIAujysgIYE4QQRMzztdZaS6U0QIACwlmIAeKMl3XBuecsKKsSQkwxElow7gXcU0ADB2rRGG0wocpIn/taS6EkhFwrRQgF1lKO0fu3POAANA7YuhIEMwCw50UOGl1rK6XUFgOADAg8YpCDAPrUE1phCIwyHDOhhXXWCE2NwZwqpbQzHidFLVKpLMT5vKSUtDohJgZS2mijIIgYZZRpq50zDkGEKLSgUVIq6XtBo6xSIGmFFDpiHcTOGYMs7MZtay31CQKIAUywa7Vbx5M0LSvpgKIwiuLFbBF4QUTwUtQDDjR10e/0g5jUdW0B8BAFPlYEYQCgQ9SjiOCZUWpcR8yDpjIQcOgIxELLQk87tJWWmmGCIMQEYUKrpjGVdggxz5Pqg9JBqx+Ox6eDlcHP/OwP6/xwdDD7w9/71vlz+umr3b0HDzshaKoZqtitd247EhqIlaTT06yczgmlAI5krfyomwsQB0x7wLVC5tnza8GfCT5ifXHr5QU1Qcyba2c37905HE+b6aSIE+/aJ4c+hfnRiDMfxn7nzPrsREtxkquat3hrLdI6AigpypmtZoWYSeMwx52EL/IcSf/pix8Z6uArv/LbB7svv3Dl+8b3XikWc0JrtTefYzG8unGasXw3LW49WNQzzeC1axfE6Du3XvnqMF4+03t6/+bezt72D/zZn/js7/0zaaE10DrXLOp2Emlj3915SIZL68v9+zfuAdtgS4xxCNq45YmSfeX3Xx/2W5AEFhCv0+m2YlzL9771zYtXY8DAtSceoUTnTt96sP2x7wJ/5c998q/+xI/92I9/5s/+6I+vtHqo0xVC79149eLWlQe3DtcurU1vbn/16y9fvLqOKEiQWT3bfrg/u3HrjjbpUjdWWT6f5rQTYKKDOGIdz4UMGYokJDDY2upZiQ+mB2U9jnkviVv3x+m796+j2hCCIEXxykqyssQpUHnl99qd5XNR5Oc789b6te658wff/rIwJ0k7NgqevXLu8OHdb/zxlz72n53teyRg5n/6x/9ge/vuEx/eSiJqhOjbZP76yWIvwzAcrJwZH46//+Mv/I//9/9Tk+9uPbqS5/TLv3N99cyF/np7quZt5Z2cKG2Ag7zDA4+v60xZj1CWOICBcb3N7lAwVESz8Ywy5qwxAJ3MCudAu93Ojk2ad1rrw7Q60lVuU3Bx65HXbs/PnDnPms5ivt0GfC1Jprfu/M//n9/0JFnZ2Hjhw4/F8bDXX+VBqCsXcggJARpoqbr9jh+F2en4zq2jjT586tnzhIS720fb23OUII0+4NZ1uxH3WZRQWeBUABqwRlvPQN0oAAxEgTJ6sSishso2hDKIgNYqpAFHJJ1P/CSyDnBMfMJd2IXAagT9KLHAIQKBsFrYqhBJK7QWirqB1Ea+jyium1qrBjPEfAwoNByiEOZ1QxDyPKpklc9yh9lgYy1fTJ0FHCOIOYBMWgCMddAyRrGlTmIh1XQ+cxZgjoIkampjXGOsFLrWDlnlfBpAhxHSUtaMex7GtZj4Hp5MD6wx7dYwLU8tJtyjRZ5SCpuqFlktrdNaQ4Cgc9aqyA8QijDBxtpFmhFEmB/5zKuKrJXEVWWVMdgSJWprnM9YLS2B6GjvOPATGEKMsFCqqVSYcC20KtNOO0IQprMFIayTDAAkcacdJPR0fPz4Jz+5f/M9U2bWVksdZpS0ym1uXrj52vZkoa2h3KfUowxBDMnppFxZW6tnrhKjjcEaUw3r6Fm+KGanNk8vbF159nt+5OGtb9kVOjp48PDOaHlt3QJyfJAq6ZDWMYtd6Z3beuTNu3eLeVbkCxYybOXGxqa1uJ4VCGhdV1IqwtvUC4ebrabexlDnVXE8eq/Pz/YGq1FnUGfl8cMDV9uZmiiVV41eiSmikbBQCnr73b0g7Gzf3z8ZpTyk/d5SWcPv/6m/9A9/+e8+9tiVl7/+OesocmZ74QLCUyyZsNCypaWzb7z0erh0SVZ51jrAFAlRG10j5XyPZqOx0lVTp7M5NFIjkaV1un5hNZ+eOoh54lfzdHf0oKN6VV60l3vGIRpEn/6xH5+NT09G44OTgyee+/AH0TofwVLF0SDZuACgnR89WBwdtjutoq7m41QBQDzkrGkN+ovZVAJZSa2clQ4WjXZ5kyBEMQwpAdjVTjeiQAoxiDyKjclWBkshK7O9dyK/r4HQdrwyYNMx6Ha6R+Pcb8fzosAk8DurBMSmrm/uni6vdabZXDMYJW0hjWj0KM0eHBUPH+4ZaROHex97VkX+vFTKMhy0bz98mFfSb4WzrElIf9jefHB8WxbVc598vhV2T44nu7s7wAFjgdHWaQcgtMAA55wDEEEljVbu0CzaZy5++lOfufvN12eLfeeMs9ZaQDDBiHDPM1qLplFKGQOcs15ACKNvvXWvLvTZzbV20l7f2PLjBCDSabe1UtAYhmnUGzRFPjm5n6W7VgoAWXv5zOjwlbgd8pSZycLzvJOdPVkMTrbvEMiCpO2kVg5AhDDCojF1pUSPteNeI44hiNY2V0XW7GUj6nk37twwgD731Ccff/ZH79+9VR/mtiGbK+eqxcIKd7qYKQshR1iAxekcYtrvL5cnM+4lo+MJj9DaRmc8HS3ycrC8jB2mlre7AWNgfb3vRb6CXt7oNgtabV8b06PRg+OddodvXb3Y7k6/9u0bE1VTiXTJoAKHd4+jgLFFVXncj5JJngMxzlZXddgPQd6THtA0QkSCldv7IyudNW5zqauMYBAaZIe9gFDkAE6rReBDDGAn7iax+5Efii884lE4hjx0EBsNINIo8AM/NKbWVVaZAimEw45VvjYMYYS82OFDBBYUJFrFztUOyrV1+tST4N4DqaXxGHZOQ26Ppg/avSChH5wFvaWloNcXVe4q2R7Et9+7sUWZUfqJ735x773XF9mU+nGn1wO8e7J7m0m5fGZpfLK/0mnNZqPx5EhJjUWzvroGhWwTfoSD+dHR+XNLYcujFFx/d+esRbYmEKDOWtecnAy3VvIs664NHjlzoRrtH2zvEEjmVfmJP/uxV7/xTtyPOp14//72+cubVZ0GIVVCjg5PW2trS+tDaupL/bWj/ZPD3YP25gC6ur3iswDnOdnfub/26JPd7sWGLE8KJ22djxcB9KYnxz7B0EJCSdYs4jhalDkIwqjVwcIRppdWULdNKdHlYXb33vHt/Xl04fzN195pMQKpE1UlZHPr7btbVy64MApDb3Q6q2aGA7C0vNxo+OHnroXdhcqgKjBu2fXe8htvl6ogJSqlqzExAEJAgAMYQAIcsABYByCARwdHf//v/52ilBFlsrGcc/f+WB2wxmpGMUJAawmdBQ4jpyJW/R//yqeRTrdPRqb0KdefePpCh2FMLDJgaTiUZXO6V0KoIALMdwxDP2HBchhnZFKcsJAKKau5ogYIwCTiD08X/aXlMGTiZDoeLaoaJ3HY7Q7f2T0dT/NF2iRrCGIXYh0EwXQyo57f6rbPLZ/9ytdvHB2913a7SYSTkCwk9JM+RhjoBkOMeET8oJruhMxZ0DjfxyishagsUWyQ5fjtuzuM571QtkNSZAuOIw1BEITIonRWCmEeTBbMg0r5BGNhNSKB0tKUlgGNMGqqCuMGaOkcFk2tZS3qomlUVpid/XkDuBR5QIjQ9N52Op3Kvh9c6PlB2GxsditVmIlkNNjeL373lQfbY6Od/59YRUsrQ1HRohDINX4nJEHc7UBdFqf7Y+TE408G//UvPPOP/vnrhyPJCEOYyEo1QHXOCpQIXdS0dr3h0uHpXWnpbH4qKgKZU1TGQafOa6sdgtHnf/1rv/zXPuZFfpHPGG5Tr2Vk7kwNpTJNzT0YtVpF4bDUpshq20StfoSwUc40sLaQQsd43GSz4dIAElTlM+JTSjiwOCuq9+syhHgWwHpRM7+ztGYa1Yh8QpOgv7UsoMWcuJB5g1jYWT0VACTbd+5baKqseL6/PmAmSPC1x8++dWuPsewTH6ef+cFnACrCPnUWOgeavAYQUidjjiGRRXbkxd6f+b7HX3m5Kpu9KPCoB/dG19txy2/FiCBZVVWTOWR9xHn4obXzz7/17T/tDtz+8e3QT5pCijTHYSCtUg431rRaoTMpck0SI+eAH4fM8ChAX/vjL3z3Rz/dH24CaJu8yBap1tBZ5LQXBS21OG3HtE7H81kNLSjdUdzvOISCOFg0J5D7GIGymQexvzjeYxjVopZq6odxaRtMOfPY6sa108PbAGuhpMr2HbBGNRAQj3pQNFlTJF2U+OF0sldyjXXHGz4SB91pbjnhYpFGbWSZobgYHRw7ZU8PT9rDa9yrZFFg6sJWwOJhU+eyqSVqlobL8+k2AGw8yQmxRborG22tee7pa3X9hxAKXUurnMcKU6aUGGkb5qwo0alU2OKIBw72g9aWds4pWZZjUaW6KZFCHAWQm5YfNFVrcrJv24QTtLzUcbba2z9YWV2z0FRNBWWRT6ZKod7qllPN2mbscZTnH7CKCEbWmLyorXVSCmecbjTDDgYehEjqyuO+taDSpkgLgFBRNcChYdSjBEWxn5c1BJB4fJoWldBxEClgsM90KaEjrXaPQUQ5bPlRWmZNJRmhVVZaIzzfh8D1u5GHsdY6bvsYAI0dAMQP/TQvlbR5JcpCcc8HDhFMuO8hC4HRIfN4FChoHGR1qazVlHgCUWWlI8Q4bKzGEBoHAQBlWYael8QeRpAgaJRB1iFGNDY+CSrpikL4jKRZ7oCBmDTaKm0cAMBoYw33SCOhgzBTeqkVIKmb2hijLYOOAEpILSSwaJrWHqecUQ0Ass44AwGUWlrtPihoIUgQ1to64KQSECJgnLYSI4oJpQA57JwxGCBhFIDAQmABoJBYB4QSCAAMsbXQQQCBCTxqtNFKA+Aa4QiBEBmPMWehM0YZY6FFCBKKHXAeoxhAComDWtaS+TzmRBvbOM2Qxzhv3h9yJlwa15QCYDSqZFrZWjaUUYywcs4WVRKHEJO0FshYa4FPSVOXHmUMUWgtANABSImHEQVQMwKJg8ZZowwGuC4rjBiwFhjtDNRGRCFGztVVUeaZM4BTGnNfCbEy6BhjfJ8i4HxOO7yvjJbaeB5hvlfUAiLreYwwWpVSVMoACCCuKss5ss5RirXSRiqfv9+YCaA0QlTKmMAPIoogcNA6grBzjtEPlg5WnulgFGdp88iTm832aPfm7aFL0UTvP9g+GS9caVYHYZme4kCy0LOwGfTbxXHpa6QqzFt0dWVYVbq92ptOTstcC1k5DRZWcn9oPK+92a6nexqPZiM9OjEHi/qZCx0cgbJcFHXGuaMM39u70UdRL+rPphNLDAAqiSJdQyvrxqOiFlLJQjvqtCcyZ1B/uDG5652Moxb57ixAl597enL8SkXL/kp89sVPvvuH35mVYv3CY29++U/tlOKkNypktzd8/d991p5Oo9Xo9luvMt5Zv7D1wovf9eobXzo4OOq1vUXdZHkhhKxymSLwR69858WzF3/kz33v7/zeF7SzedV4ke+1vFffePf/8J/95T/+rd/dutwjfb+QcmXQ3n5v+8KV9YsfujZK59/12FPH711vFuNf/NG/Gshl2RydCZf+9A++81u/+eZf/ys/tLmFj3dn85KcHr6WnqYPlEHHh898/FmDNIGW8NbxbJL03JVnLj24fgQrZ8uiLspWf5ngenk5VjpaLGZA6qg3VNyPvGD7wcOos7oZLjlhMNdbZ3rjo4mHkt7KKpAazGdH199k51eD7oBTGKIG2cqBNuEXpgvM/fVqtH3x2pndu3sGCeqRc1tX0v3R9ne++NbLXy7m87gTHR5XToJ5VXxq+JxN3clpunpmkFx9/JHvJzu339Aq6y8Nd+/OFQet1WA+LU6PxpQgHwoAIaJwPMm4rJ947gWSCAWEA1Q74gVnn3/hEy+/9oWi1MuDRAFXN8LnLE0LSqlVSmXjVz/3lQ//JKbOQSJyWcAigBx86qefP7N6pkqPTt56+a2v38dnzj1/+dm2zy4+f+bqc4+9+27WNAYAVcyqIGhzRIWSGLOw7el05iXsu37gI6/90R9nR/PJcWGsjAJP+/zBzgdYa+tUFIWjw6M6LSGyq+vL1aLADjnCrGwQoMA6Z1EQDbIqa4wrVOZ7ITJynM4o8WtliZFCTmvmcT6gEDbpCcUUOteUmTOUINZKIoDqsBUAhBDHoiqBto2RlDIAiNROZLUoDALkzp39Dz3zFKVUNqYqK6uKwWAdO6xrhSBGmHfanSzLKDYWWux0UzZHi6wzaDutKeN5VSnHEIRWFBDjgPlCAwe1AdZZ65REwEiRBUEIbBN2VvJJSZxTrjG2tlpb0Ernc0xIkERSGmwBBAhAwBmra1GhhmDaVBJjGPBWGPppWQQeCMIQIggMKrNGhdnaUnexqPJFKqtmqbt+s7xzcfio4qXHyGyWIQjb3aXFdJ5nOTMcQuuFUXtpzYi6zho/jKGHI7/Le/0nls+89bnfD4ixGCjK20vd00XRHvSXNs+dWIGokY2M49b20fGFFz/h+bFN81pMT9Mdw81gsCyazHB06Xs/tLtXf/PVl4kr2gx5rNse9kBbzCqspeyuLFlyemF9c/Jgez467EUIt1hZcpkXVNYZ3jUI+siniEEPDbe2kD8YbR8wnzohP/dr/+wv//zPffjRq3s7h5WB3/v4U7PtA6GRoo6GPovWfv/3vjpcylsD7He91ZX17Zv3/YjVNL+4HD/79NXNM9HDO9f/w+c++8jzHyKo2bxy8ZWvvdlrgysvPHnzq284XQ+vXvK68XBriKqmsxzv3rweWB4gL5VF2dRSE+eCeVpvnF3X0h3PDy9fusSMLacjjJtsoQklR3m12vEfvfzk6egkTJbqXCyvrYz2d+2wczLJjHGPXHqkyT8gdiX9DooShMNxY06PJ05KYMzpySTsh4zFCjYQquF6EsS+bTCyDA1WHHQnoxQQBhAF1DPQFnkjjSsrGQS8qWqIoMbQOe9wgUNN42B1/7B0nKGF4A5i3JqMJwSzRZ6f39yYF7k1YPncxfn4gFULGkXYaqU0QARaV9f4m6/fPSmU1ppzGCATRcX6GS4N3xmZV957kzD/OM2UMxjTT33/DzxxcfXv/eO32nGw2e+8+frbJ+NUNaYVBXlVMQaNcgAB6Gxd15RRxoiWFgGolZ0dHLSfePrv/e1f/pV//g9v3L6LIaKcIkSk1M5WxuiqrKwxAEOlXCdKfM/TtXp4K4voyvKg7XCsBZyenKJllCR0fnIILSxnM6VLUc0SvzOuF6Wans5cFMP5fF4Xc2Nq6g9f+Pjzq+2tQphWBJ1uikLUjQRYKuK81QERXp0Lk5Z7+0dpjbce0+2VqED24sbadv7mqMxP9m6dP3vl0iNrsijHwDpQmqYEBB2d7iTdIWdtFwamTilhdZNvcA4ZT8JWXpeTw2mHR/EwSVY6lWMUhQ5bPgjCXuf+g4etXrK8skSMyqQOGfHicBJ3/E70cH/fCnp5c2NpIJ1F0pm8NJR3wla41iIO6oPReFqWveWViQLs3HNUTl5772td7gMlA799Zosd7Z1QjxfSJpy2fK/I02Ev6bQiIZQw1YWrZ3fvnGZZ+ZGPnVmJbyUsMg4CZwAmxikEcwQMoJgwivCSUSQ7fbvTT73kQ5VKnCHQaidSUd/y/EcgaddFbTn0e/6HP9z70pfn9cgihzSEi8a88dZDrG1B1AcJ07Lsn7k031/YvF7klbe2YQKvESyfT5XQdV7r45TSb4SPf2rrqadvf/1lRPBjL1578K3rzWKvE1PP9fdH+eAC37hw4ca/++OV1eEzH7n4rZe+sb614RC3raQpzPqjT8zn6SydYRpAn3FhZrNsfv8uQPCFD1949U8WZZa/c31cF9xKWzu5fv48hma+GGsGV1bWeNKyfkSQVy3qdqeVj+8mne56Eo+KALqyFFUdkqc/+sTp/gl1BVlD+mT88Y88/ce/9R+aKtu6kIh08fDWw+WNLS9gvt+WhBkXKk0gFVWdUkh9vw1tfebRnuaDP/nWAaXwqcvrdx7sWW6++c7ewWl64cpji5sFrtNFNkMGnRzOOp3+O3E9XtzrRTs//7PngIOWay9QoSq2LoBXv1NrYoWRkBLgsIUQWeicRQhhiIRQnGIlQKYajDHQCjprLaQAS6eg1QhzAACwgBJsHAQYISl/7NNPUNdUzm2ev/Lmq7uPnd2IWrwpMoaNEWaak2KhoVBRyFstWhdF3G87I8pKQx4P/KjfaU1PUp3VlGGRif2mQWCeaDRoRYvJaTavSJA0kO6OJ+efe/qcprfeeThtqlLkrY2VmKBFnUckXIz0iZjYIDge7QwGObRYW48jQmjX97EpR6VWzkpL+ySI6mLsh4PecC0TjvpBBHrMW500mcTKNPnCCKhggmjZaGVUqTSElDgwSJLRtGqE6nEPa2MAMKlhCFNKulEMbKOME5UI/JhSpIGWdQmAqkVz97A5mRrHTFW7OCLKkZ3b4ysb7acuhX6kk4gzFtZ1hjV0ABXaTUvbWKhM9Z9YRUJTx0Li+R6KqnwhKoBlW9aaBj1kdZUurpyP/ttf+t6/+ytf3t/TBCOC4MWL3sWPRw/u3vFwtyqHv/wPf/fLf3jotbsONLWsmCO93vlWdzCSN0FjDY7iqPtbv//yX/iLjxAKgHUQIYCJQ24+mh3uTNIq4S1qrDzcPynrOujES0tn5WLhlPA8ttxfr8s5hnBzY+3weI/7eDjseUnQpA2CTBqYiZmHKYKIEZb0W01eW4U9iMNWp8oFbLIkiYq6CYAc3bvnjFC1yOa5cSBVZirs4Uvf6UXswvnu7Z3FxUef+Ku/+Jnd27/p9ZiWXpNzKXDgA0g5jqjUI12Om4VF5EwAH/3n//RGZ+15mhxqJ8rcxPE6dI6yAcAce2MPVe2Ix3Bje7F07+5BJ4wZXyCCLbLSKAeJ54fAWo+0A8qAbSgEVV4B5yPg8lkmGh1Fwfr6+VvXb5obb/tJsDroIsZoK1ICZyUgnsUBFWYxLhf7k9OkdcZqNi4dQpKHqNteOjg9TTqgrk/We+u1VwBHIr5eN3Hg94py21rea/WK/KTXikQ9hRQ1AkR+z8J53MInpwcEJxAzBIJalEGLzaqjOt2zjPmW4WiISUAbfTp6KMQIQo0JBVgL3ZTVHBdjFicW6GlhlpJ245RzplF4npXWKt3U2TjloMe4N1fppTMrED4oxSxqJX5gqloHESUIQmAA1MZp65BVFkh9+t6r7c6gw5YJ8asKZLWL2i3m4bu37ntB38OUIkFQvdT345jP5tUw6CPI+v3QCyIjpufPxtrkXoCrE7HmL03L7bYHZSWV+oDSoo1RZbVYZNaBIPIhIo0QhPOskrWojXajvEAYe5SZRkfteLgR5otqZX3t9r3dSYE8ij1KAug7SFpt32jNA88haGoga+l5OPCotlpJoZQgFEFCQ0qVtEHEOcJ1UyFjPUICQiimlW2quhFSRWGcuoo6XtcCGMUI8gJPCGGcCXwOEZhls0675ROe6tz3ubKWAFeIKuBeI+ow8kPOZ7PcZ5TjgBIIMeSccEKqrPY9vzZGaEMpU1YiAMpGMB4YXWttjTaQglYYu6YmGEMIEHQGgEmaIV37wHVabcSAxdBa3QpjB6G1JjJESFUoSQnijAHgAACNaJwDjGEEkLXWvs8vtYBiTCitmxoRAqBzAAMElFIIIqMNwRRhgCltlDTGFWWe+CHhHCJrrauEYIQAQDEhAALnjAMAIsw41soYbZU0ACJCEQAAQwgRDjwupLRaEwAMI7XWCDjoHES4aHJCiTaWYEwIxgRCgN5/uOZGsYQDBxAiVSOg1kobAIBQEhtXGCswRAhRDHNREgClU3HkQ+Ma1TCMGMNKN5iSWhZt1mc8yusy5NxZhCkuhcAIWgUbYwhlmCHjYMQxYF7ic6UEpURIpbRx0BnjkKHKQYu1RQATLJSplaGEcoJrIa3WBDorZEAophQQqJzjHkcILcpcW0MowZgY4BrZAAeSJIIIuQg5595XQapVEjBlquNb3y4fjrLFpOP5o5O6zGAtwmmurVskMRZmaivooHfn3gFI9TDprWxdufXKe9Ltbl0ZRNpEEccBn54sEKRxt8WTXi0hrFB1WG1eHfz2t+9P6uBjP3jlqQ8H928dwUYXcxutxW3i7U0OBb6/9vFLhzerzlqbQ8NIPJnvx8wjNM6NrqT1Oh2dTyTJW/0uA+rxay+0zJbHgovcOzh5ae3Z+tkf//Bv//Y39/Nifw56ayRt9i8+/ni1z1vtZSBGYnYceyw808fQARrgZHXj/COvvfP2x1548XP//jeLQmGfBrGnjNVaMY9rC1959z0j0ief2Lp1Y38yTyOHinmjEH793VvLWxsn0yllrZPjMQfLnY2ls09funP/JF5aYZ2Nm6ff7nZ4fjRW1emrX/92KWSr7WNuv3rz2z/6/J89ePs2i0M2DISYndyve22M/UgrMTrNRmPZXn+60rN3x2ysW22OoohcWT13OC5CbIWqwmAp9ltlJRYaNSPVxFN/yU98nKeVVjA7HUcDk3DPD7pUc8wQ7Hp11VjoDzYfH22/wykiALVXe7rVgz7ywg9pgI8e3q1no8Wo9nAstWUQXLzSeenze8ZAn8RJJx5urswKMKJ5FPMnXzhfZ/mdN77iAf9//Re/zXlrXplF40iLxFfW9ETTXvrpD19cH175Sz/zOUJjSJEfh/e2D846jXzOvE1t4oZEF1788X9w4UMfe/EXz569KCRtGtlUNYRYa2eUwpjeuJGuT/oeg0HkKSle//bhsPPx3Z3ZztT73c/96k9+ZKu0zQsvPJEEW5M7+7OKv/PWxI87upGzRQUAbrVIu9s63j9AiBojrZSuqj76Q585OT64u3Mix/ONa1vDdX9S6bAd/EeryM5nZZE3TiPgcJFVTurCGuAs0EqBHBNICa9V5YBxzmGInQG1ltZiq5DWddRKrDEYsqKukJFWG6cs4xxYFHvx8eTA91nAPWuRNsBDfnfQGo/2Az+MglDICgBQzEUUBAQRFtLOoJ3v7oUB8zeWjg4nhcz8hIW0NT2ZAGh1U1GLAFAGSA2sdYry2EDqoJFSAIQsNBAzoQywyljqIC2UptABZEIPA+2EAQAghNF4esoglY7O0oyzQBkNCTl76drx4bbUBmDbasdlXVVV6YdDjiiAwAACvFBr24AGOxCEkTFWVtJgyHnwyJUzX3n5DxDcpLgddXzgA2VmX3/3Tz/23KdO5/kimyV+YIybzSbAoTCKLcJh3NXlQopGiwZTKCuJFKa8vZgvTo5PQ+5TrCezIuy1isb2BrGXziez/d6gs5gde61g/ckLrz24u1w1/d76YjoPA4YYt9R/uCtRSZcGCfRbyVLkCMBhXxK/v/E8MDY9fC9eC3RtZ/O5Zd3dqYiGw9nxdj9eSWK2tDRMli4+vP4NHLNkMIwo3793a76YWuppN4fGDnoBv3IuX+Tz0/18WnSjgWjM3Tu3SSWX1nrztAQaMsg+/Wc+yXWuNZ6d5r46+M6bL6+e61+7dsnVuErrG68/wJrt7k2Wl0MeaK50l/NSNLsP5klvo7+1dLA3XqW0vbr+9d/+3Z//3h98971sutj9wz9+OWwlG2sXrz723MnhJIk0UHOs9dm19b2D230WS7t46Zvf+ZEf/q+TeAN3h+XswVuv/Hbc6iPSqpvFy2/c7ocWLPC00IP+WrSyVRX5BzcigWyDkE/63YgAc398aK1cW1lHnl8xIM3e8moCVD0/2W7KmmAuG4sAX1m+ytodIOeH2ztWOsKK0gDuhTYVABHZmBglvc7qgwX0S9MKRZYuKq7ObF3yHLh/95bVtpX4TVZvHx56YVwv5mL+ps/Rcj9UpugvdfQkM4h7S9279+9vjzPqe4xRSiFg5KV3tu+P5kSz42lqpV5a7lVVlVWSBuTdW1/65svzkFLG6Ne+8prVFjjoeaypBIIsCGMLgFLKKMkBgcBSwoBRzumAMSXF537vP8jFNMuFNhY7YK0BDjpnq0oYayBEBLkwYEuDTn+1r+vm6vNb+Yk4e255ZWuzM1xJj09AmdqmVWDMOGmqLEsz58R4dNhfagmTlqIg9cAldDqrmJ8Mlr0y91c6K5PJhAwHHBBrkHJAAbPaW1LQzvOGkbYW+OKjL9y+/96knqsjh7DHg/joeGdltf/Iyvq73777tc/9i7PXLnbbK37AjnYeCrWQ1LSGTIsRkPro4bHPvMahnckeo51rL7yIqDKLrN3x6MnsG996fZjXcbdXqEVeV+CBZgBFUYIUCoO2qqewllWdleUi7HlFmStLw7jdCLHMbFkVGgKnhcejuLcctvz72zeWV4dxx1CaLAp6tX8pH6Fnv+uHzm6yfHTvy3/4Js5VwkgS8243XlqJABaP9TdMI4MgJJQURT46zg6OstNR/plPDD2GqzwPuz1pNKIWMGIttYBiJ6DFgMQY81a3UGIb1vvA27AaI9hFwbKzb0s1QthjPpNWG+eGK2Sjb0cjoyx2GABECmECioaDLri1CwBw1umyJtgOz/S3t08ufejTJzfvslAfPBw7i/qXtrJxPR9PEicLqVfObdlq/vCtW5gpiMXjTz27fahbz/gm4hOEzz7+2I0b3/nOO/cE6Nx+9zT02draxtXnPvrG3YPOoNsmOB9NTuYnV154+sE7D8vZaH4yzsu53yZby8t1o7QzdaWttq2kjRzcPHulAHAsDAOknBa9wTCrZylKMfaO9w4PT75w7uIFZ+w3v/GnV1/82M6D6bI/nJyKC1c6SeB/4V/+ujAScIgwRIAsra/wiBFK8qboDDcqAYp86kcsYMzW2hlF/bAW2vfMz/yFj56/tPnOO/dfevP+4WxkBTWGvPTKTQPIZr/93ItP5ek8W8Cbp82tV4+bfBzC2fd+5pmts2EJJk4DlBQ/+Ve2vv72hFDGITEOGQMRwA5YCDBwwDmAALIGAggBQM45YACCEDkIIcAIY+SMVRjBRinGGLTIGvXMUvCjL17ZOdwVNZpncwBDn3eBK62Wa+eHVS6q2ZzzAPt4eSV2xICaIN8QxKRCGHkQQD8IyqBhsiGLBgI9qww6hpd7rf2jYykLP/JYy+8tdYarGwAY58jHVpekKW4/PLlzOJ5W8kNXr2ArmpHIa6koGG6dGaypupnZPMdKY1o7R6rGNrkEbFYLu7QS5AobK3QBGfE8nxcKv/fm1x5u71JQOVMTj1DqcYqpRxeLcV1XEDAP+brWEUUQkINRHlHgcSKMFlpjCoSW/Qi0Yg8zbJEByDgjRJP6nd7BTrafakhwtxNBVySxFy8Pdg9GeV5u33uwtdYVRZGnjREachiGhBIogfFC1on9ByeT/80qwlA0eRN3I0qQpoGopRcNgUUhW3T6yc6DbVnVyyvwb/7cc//vf/LtgwPJnclN9d4t3O54cW/zd37n8Ct/cgRt6EyjbIUJLvMCyEOdzuo6N4IR5jSAn/2db/zgT3woQDkiWOlCiZIxDzAkAGxFYTuI8kXqJUGa11DT2WREAWiEjiO/AbnjRiBhStUfbGCqm0YqrYAGRjV+FHSiDnCgqUspNEa+Qq49HKbVbGfvdt8PoJBW6LWlzl61czKb8K7PO+TK1vmvvndbY4QIqgRMG3kqF1rO8Nfe/dLXvvgv/oefl8U+9T3hIocjQkoHT+pi15mCQYbCwXR+6ad/4XPvvDr5L/53j33v92zsHj5wgNpGC21I2MZeLItTrD2g1hqx1pTokx/51IO7f7QoDxjCGgLMYkyhFIVqRknYDZKtUhwlvFnAdJrLMGyjWiMrIZRSAkSVM2L74bFqMgQh4jwJW0LQg9mhETNIBICgtzL047aHmUOAR35peZ1y3n2q1+97rJ5PbsS4AhhkdeWTyMiY6k1GEkRtJfKqrEVlHQ7ayVJRnigj5oVFBjrrrFbZdG6NcL7yfdrpRxakVu1RRPf233PlAmCFAg+aZjafW2AAsWGXMg/kVZ7naRL1th98Jwh8HnaXhsv1Ykfpisd86fzWndnMZ3Ccqp/6yZ/m9LjClW+dxpTF1A/ZfJZ5PMR+K0xoVddeEM1O9hEFk/muULK9dJbGlCKeiklV7HsxDFheZsIi3m0xKQPPG25eOh9YVAslm1GZzz0vNE5gJnkbtWsLTT5ohxCoXFT+f8RaK2Oy0SyIIgvBoNOeFAWEZJ43eVpZCH3OZa2sA6zFHaTjaQXSyho7zo6NwxQj6nsAAKGds6istRISNro2liDoR347jkJOsXNCN8utVp5WeTFnQRgSX0mhreMYR52IACcrLaDCACZBiBAUso4DQig5lqXHPWN1XVVWOeUsIBg65PmJMWieToa9FqSgFho5PIh7xqiqKizjta0DTiB0zKMU4aISUjtLtHVQGW21xRbouoZaZbJMvGhWjAZJYq2FGDFCiFUGagKpUlopiQjkhE4zSSGDVK4kAUNYK1DWtVEq8JgxpmpE4HucIUJgXVcEcU6YcgYiopREABptKCEQIeCcdY5whiAFDkglNISMMGMMowhhLJWw1kAAIEGRn1DMOMaFKK1DgR8a4yphHdAeZwhR3SjlSF1JCCAljDDlHPA4K8vaaAOhsQw7qxEgnAV5ObPAEQNDyhpjMWUaGGEtotg5RbHvc2ik4IRqXXQwZ5SNp7N2GPVi38fQOLvkcWOMUIrwoJGGA6QBBABhiI0Foc+KvCYECSUYo8C60GtZ7YRqKEOQYAiI0Ir6PnA2k7W1hnGKua9kAwGgHGGgMMFKK4II9T2tBaYAYeQchAj6PrbWCOm0dYjawGeQgSoX/cTXVjlAilJoAgCAda04Y85oTrnPmXMaOkQxaXQjlSHvQ6LcBwW0sxc2D+/uSi2PTq7HvvuuH3ji2//rS7HXngG5vhaNs/pgIVC+6IXQjSeLzC8nFGv/wexocEe1hxt+t/f5L372L/7MJwBqB8xTiY88D2AiJXYSpqfzBPfe/ub9vWM7cw2Gdv9hFoacY7IcdZQVLrefePFTX/r6q3ffeJfRlq5hU+LI67fgWmB74+LAWKpwkvC+bppG2zjpPNjPuhet8Mtaqurk4PDGzcieho5cTZYFbT37t/6GVPPZ8UEjKpbgxeThxccuz0vR7qLNp64d3CuXule+/EffTqK1KE4un7nyrSTsdMPdUgNkRVaHPtMAdjxSNeTl93afOrM8WN+odVQXqmN0r9vbfvPOtctbFvJC8vWNy7Pp5MLjZ1rD5ZOTHAO8vbt/5cMvLq16f/obv/Hw3va8KPorYdjFiXONmn/pi3+0mYSZkG6O9XEjJiaDgVIwiMhC5oLyZ7/ruy8/dq4U8Ld/7dcPTt5oAWyZybWwBewWpskPuyFJ2PDSlRfK2fz+w+vcpwIRGiR5ncet/mpnqxqdYKHiFj083u9sBDxMZNOopoziIG63lCwBC4XI4+VNc7LrgwZx72A2DXgUrq+w1oX1S+c//0/+PWFquNrdOSx4sXLtuc88GE/vvHt/bUk5USKZzu7tv/xwxhUlERztlZU0vPS374iO7/7zv/UXzlx89PP/8vPQurpWBlpGyd/7P//KZ14cfPIzT2489QSjBFDOeMJZ/9/8q7/1X/3Sr/LkshGmqaTvIYxhu5c0VZ3VtHvp6be//jJdODSOq+P0+z/1ker4wb/97L8/l8TiRMDaQ0fNhOfDrQs0JvUiG/a5bGQcBtmsaYrJApWUYoqRzNPF8dFk/2g8Dt64MVlfph/7nide+YPXSBgcHoyWu9EHfIpFwWjDGESMYMKmi5xTTJkPHWwsANhDziDDtBEYkNDzrait1ZBgxkKjHAJYNJpgqJSSUnGMGaXWGgCM51FrBePM53FTVghaTmlRZLVAmPmQBVlVCSVareUOhk8/svLFb908nabTSY4xUdqOxmmlmj4UWuDa8xXFqjGEQYK10qoV+wTDk2rW7W5aKGtltdIAIiWkx5AzxllJKBGm7rZ7xLlFkWttbWMRREVTMIFouOwhvxILGiDRSCjxvYObTz/5XH9p63Q2FVIxG1srPRJVuXXQMI+lk1FvMDh77sLuaA9oq5oSEuBFzGpYKyOL/CMf+/6Du7dHo8P+WodSD+HgZ//8f9PIGkMYB6G1QBul0tonAcIoTxdQO45BPp0GYdBUwigZxXF18HBR7D129ZFa+nlWdFlHNE7mTZMuetwujve85X6rF5WNOzpeXDj/woXNC6OjURyFCIGqyBmLUdwBsoHWzY52L1x4+ta9B+H6mZpYSrFWJj95wE6bOBmsrZ2TUqeZQNxb3Tx3Oj0J2Bqg4N7N78Tddrx20Vi+e/92L1lvilxVGjEIgW2yUTqatWMkzWL57GpdG5cJAt0ky3FAGQsDHkz3D/xeMj85TpKl0MeT2cnP/o2f/Ff/9vPJvXktzca5VawVxm4upu++9vqnn3/81a+8vDTsLK+uL8by0uVrlbRLvWDv+lj5aHR6euf1lwee994r72xsPo1Wrl278Ngiy27f+Mb58yvQhKKYlNJ96NmP//Zvffa5j37/5tnnoZS62r9x/StlfhhzEiXtfnfrevrN4dl4te9rhb73iR/8/O99SQiI7AeUFmiRc4hRr+OHD2+8LatZ0mK+RbNSYuIcVGrhFdnsKK093iJQQK2kJlWRB+Jk0Iq5EALxUWZNuN4KzkmtrAkDP2o8kjNPNVNksxJTjMJBNxJF+XAy7XSW0yJrGkGIrYXAjDdZbiGoCcFON87bzoqk3Z7U8M23b967ucc5oQzpslmUJljpHo/LRoA2DtJGnL985fR03wKNnNOVmh7PoDYQwPl0HobcADgYtJxxJonitdWnHnv6+P6Dd66/l6smiiNRSyOlkRpR4KxjHCsNfvdPPo+BQRBYAK1ywApMMSQg8AIpBJa21YrObq0PVjekBkGIexfZ1vkzRrpFXrY7w16YTKrxxQsXMSQnB6JB2g+8JDlf1LPI73c6T5X5PD85NhKXVcp8vHf/aIbuP/7sCzgMlMCMB9oojJmsdRwl0gBnyWw82s1MK4qUb42xi+MDixAgctEKnzrzSEkcJG6UTvYOjx6/9hzw4GSaSiCTQcs5Mj0eOdBdunClakw83PrOe291ls+2QwiJP1+4IneD9dUXnv7wq699qUwFpB4J/FlhL1x9YqnfstgBAEAt6vnMaiDzUsznUJOikTHvHe4fNKpaW40HQQs4IprpqLD1tMl4IYRGfuv07v3tu//0+WevtrphPcW9/qPnLjeL/E06BE8/e0lJhQkS2mUFzOe1QTVELh/Pp9OFAfapa2tL/SZIiJcEDiCgJTIWAIORclAB2yDjGQcQDnDrAiq1yHcov4RIC5nKVVqMKUaChTUN+85o6+zKytKV8/n1u1poCpSz0JVG9Xu9H/rxH/qDr78FANjaGs6OjnzSlFW6vNZf5Hnn/HntBdZ1q8luelop5DVZOnn33e7ampxNlawobLorCaHrWlSgSc9cferOdNoI5Le8j7z4zI397XTseoHvB/ZwNAPfeaMaTU/ezb7nBz+l++shCCelSufp0b39TieYnRRxJ+r4bDotpDJKK9mIB/e314ftmERXnnzq+q33Wp3l9lror/a2hv7Dd187e2Xtxhv7COK97ZOVx66effEX4vWzS+cqN5nuv/O1S3Ve6VnkO+Tou7ceYgQ3V/uL2WJ6chr0AsRayEFGpMdhRAPMYkhKo3KIXSPzuBcvrbZu3jz69X/39ZzyrbPrxskntpY7S2uvvnd7NF38+u996XS6MMJoS4F2GChr4s9/Yfdv/hdLjBSyEQx7lzZIPxaTNIAIAOMgQNZaCAFCEALknMWIQAS11s669606SJC1TjuAMPW8Vq0yYCXGGEGEkTWy+tjjV4/Gxzbkw9WOd2qRDzFXDIPWSq8uMm1cuThYXl8/f2lzPju1jeoHXUQCqaHVUhnJGBWNMIY5QKpZVS8q54WzRSY7gccwRcw6N1kcD9d7ClgtnQbKKUMlPNvfqAVpZH14vOgmDBIa+KiczTBD2yOdhNHWUk+NHqp8BGyAsIcwOHxwEC+LDHgAU4OYboiyyenUmyt5dH/bk5JBhXxKMC5TGQREAxDxwAsoI3wyqxsjoUPWqHbgedgpoR0yhGFh9Pao1g31iOIESUiydAGwVTD89vWdw6mrBOHYjY5nDDhXkWHkjbCPcdMNsE+NN4iKXCOilFDNOFVpWTWNtDb6oGzw/0sVVYUq3cIUUkvCOszvWtcA7BtVbO/sStEQ4pfT8Zkh/tkfvvyPPrtjClMKOj9ZGe+wNx8e/eFLe8CwVpukRc59wngQIIYBEnVhpCYUE1xiDKfH1a3r+TOPtyyorbPWAoe8tJC7R+mZwRYAYD4vg4iubKxZZXjoQwgwJVY1x5PZUr/VbbW68fLJ8YgQSDBHmIuq8VvU2AYA5KypZe6xuK6VAzCdz2VVraz0l1aCpnSL00VZzQAy1jorgQEygxM/xJW0WmpOKedMKYGNDTg7ulP++89e/8W/fNnqsYt8SEVaHAGcaoUJ2hot2v/N/+sr7+zeTGjv7CPxzmjndC8OIm5Zq05LpSshSqVSKI0S7LSyy0kT8+zB9u9odrS6fOZ45xigQKqUUCV0JbUC1qqmhhL4oYe7gRcSaSVkSATxbF5QP5CiRsi0e0lWl1JqgImUEliktYliXylgjUUEtJiHgOK+hyA02gRRx0FPlrMyO0VYV80EEMIwEIVQKKI0sLIqslxaRULqEDPalfUcU7KorJOYs2Uv7C/SwzBMqiYvytqnHuSQUrs43QHzMedU2sIYTFl/Op7N01wrvdSLGKGzvf0gYN1WS+qaQEgRavKiohA5G7b7TTNjiMeEFtMiDrwzG8y5vaCFmICzWYYIhZr2e1t1o52zril1fmQrvxcz0okPTxc8VNnsLsKOYGCrDCrBIPW9CMK2RXC4vnz9rbeQZzc3zmSTMeDw/NYji8m2bqbzURl3IsxNb8gWk90oZErpdrvd6gz+I6uIrKwPi6LUWu/ubKcahGGsG22141FglKKIOYgrabN8obUK/IAS3E1C1GYQmk632xSFtlY0UlRNU9aMscBn68OlVhgYqTCFEKAmr0WjjXAU+0paDV3s+1LIdtJvqgX3A4ahA4px3kg9T3MMCaUQObfa6TunNUDAWGMtpVRZq4SyEApoGeV5UTCfY0SqvEYOYI4hIc5ZglhVl4xxB5A2llACnCMYW2uAs5xSKUtgdUhp4HWRRT4hESU+oUpqCCBELogiqxwA1ALnoAs4gg5M5qV1zhKSBJRYEEZBEHFCSKUkpBBgoI1zQHPmW2MtsMBqaBwnCGGqlQEAAIiUksBZxiiw5v1kEIbYAeB5XtU02AKEcNM0fuBbCx1BUtfQcoSwNYZSTxkBEYYOCOkA1MZYoUqMIIIYYmCNxoiWtZTGAAQIZQ4gB5CyuqkWDgDocMi6UqXKSIYIpZggTAikxK8r2YiGINIPeewtEWctAK1+ByIb+cQZUJciCFglpM9oI7RxVigRByGyoKjKkLKyarQx1lktRaNVFEbG2EZX0ugo8pWUENE8Kz3qGS0xQCz0lDBSNkJUPg8IIqHv+9TL0lQYW1ZlpxUDZyn1VCNk00AEPJ9ZCmRZKwVKbT3OqIMYQIw5RtgQO6kbjyMMsZa65Qe1qq1yDjhjlDPE49RaqyQQtcAMva+CyYNjk6nlM+urZ6/e/Nw3v/ibL3vTYHosTkeiBTlyWFagN+gNV/jodD4+roKI9CI/bQXdteS1d19/6uL3/vm//ldHe6+1oKG0g5UG1MOMZXlDGfUI2Oi2/vTLx5HvrV5Kul2ymB2tbw3rOUaGenFEZL13/WCxX4q1ebsfNbLxSafK/V64URSzwpRCGco9XVhTUexF84nmXms+fSc3iWrwua01dCVw+VJRzRyi29fvheETq8nZvQf3AKB5Pur3V6Y3b9h5lo0P7PJaTyUPXn59reVfuTQsJuk7X/5Wn25efvLC4LFn/vAP//0f/enXljrJOBNQVVUphdGHp/PHHt+4dNGbT9MqT6FZvPjiZQvwYj+9NFxb7vXffa2Z7deQ31te7i61jSejdL4Yv/0OKk6eer73J1+YUeQ3mTTOlLlbHJ2qmHZiKtKm5ScJ5qKp9nbGkRdKYTwa/aP/4Z//j//qvxsdjS8/cvn66GEtp2Ujeq2QWQW0BdpiHxAEHEY2SIabz8j0JM/HceJ1WlFRZul4ux0FWXEyyeqow2aTFOFmqYcn+7c2tgYG1oj6MOCUalic4urIZEfEJ8OtVQxRoWVv2KttvfX4xp1tTjyw1I3+3A/9aKVoVqKzW0tETo4Xe48/MjzZnQtizlxcbQ2WDh5cJxa0NWUz//v/yvfJo/tjsvjsv/429ilqNKMUUjav8FdfPnr11Zv//b/5c7SDbYM0C5yRT3/iez764a984zsLbj0v8YOAqUZrVVIrzMnivS+8hAidZlV8Gj5+5qk3/+gbX//mS72lWJ6KQ6V+4Rf+soEJ7LSLxcIK7FFcL/K6ssRzFoKd/e3VlbUk7kaxl6cT64qViwMHej/803/9X/+LX4H6XrzW09gFldffSN5XAWW4rOpWJ3TaGtskEQ58lmYVpVwAuyizpThhjJmmgdY6DB2ElawQIBRAAzSDRGmrpALQWY2UMZAgBLGDrqprzjijHGHUSlpZmrKAIualVYnDBPPA8zybCUhC1Yx2bmwjJcuszNIy6vvOSGBtNpurbsuP2jItkjAQ2BontFXWAaVMVTWM+GU252EArfOoBwnJlWlEA7X2CHXWMeRCipNguRPKPDtJmywMO2lVKohAXRibYWAwosRjAOuBW3p450hr4cUUA6erhc8g8n1taVUKKGEriU6OHxAoIERamSpfhO0eRNBBE7aYkI2qXa/TjcI46oSiUYxxbco8r7VWlBKpdRglxlhnbRiHjlLmt5yV0lYUOBBwDgPoAEEQC3C6d2hyFfW6QDblUe4IDuNgsNK+efM96FHhZJx0kfO4509PK6iNcsBWoloID1NENmeLN72kVyr12ltvExf78xxIO1rckIsFIySfZfPJ7EzSJyzkzHfKMMI2Vge9VlDkuU9B5AemLBFQIUC2klHQfuK5j37722+bMh9n5fHJpLW0tJjngcBNrVyjKIs7G+txu3P3zk3P2QABGuJGNksBG/idk323d9z4Yb+C7PnveT5J+G/8z//TY0+fe/Ty4613Tq8+8+Tv/9YfPvb4Wthtv/y1V77w3re1Bv+XX/y5ddLPmunlx6+Gy21H9YaVcX/4tXu7z139aCZOLzx1zepiXqUo8TwFdx9sX7j2rC7m+wfXjw8nTz3xeMv3bWGmh6cIetfvvMZ9vrV5mYr6/oOdcnr80WfPra2ee/OtW++rYH46D8Mk8ODx/Vt33nq73R/UEmhRz2bTIAmWe/2VcPOd8UmrvUKg8aHr8ODB7nHTOAa9o3Qc+mEjbXewUfjnR64XdDZsZZRRlFkHFMQAQWtldWbYlvpkELXNUhIurWa7jkjX73CAHNR1t8MmabE6vGDl4tbubqfVzWcz5ycAmyDx5+PaJ8CnLPBxbUzS5qHP0llZa7vI5tyjamYwwcznBAKhNSEoCOkiy/0wkGUli6bVbW2t9K+/8Y3D7YOsaJR11kDgAEZouDmIvDBNsywrjJIUQWeh0cZBiIB7//6gtVVQJz67dGVrsL5mikZkNfa5bUhvuYOU5QkPoghZki6mUdRuhOUUtnvLg7Vemh5NxmNndeChIHJa1tDjbi79kC7mote+9JFnvrs0hVVOFZmr08DnRW2FaUxqoGZ7e/eunb+6GO96NDi/Hpx59MJLf/TNvXm+2RtqY+bT8vJj5yZ7R6PRzvf8wPcVI7l9MErLmsWkRQOrbNwdCOHTcKXXZulkFwJ1fHwrOXceCzQ9PUlnE9+DO7s3i3KBAOIeYx5Y7q1oYCsNy/kkAiqg1O/1j8djZwSlSFDXWR6CxXy5HReNA0IFYYAMjnzKGXbOLoxYWt+AtjOIl/srQwCa8d4CnFtOljjy4aVrm1DWTSPKRu4fjcaLppyXqhKWcwYxU7q31DmZjvtLIQvaFkHjDMYQQu6MQlA5ZwH3EabWGaAz6ErnfMAG1M5N9iphQwcCg1ve+qd3X3vTuWztyhLxQqsrY9HykEJYQcAJxtYhhQH3ozj6YA0zW5QYMC8MF6cHYqY2z2SqklGU9NfX61Ny+413N57/EHqUVVMNtBFlGbe9bD4bTexweeAA6rZb1XiyRLwz57amcu/oKD17tnV8etII2u4t9Qi7em7jlYPDYTs8PZyguMchNdNyOFySRQm0sEZsPxzNWr5rnOeRC5c27t3ZcwbkpUyL01K8HlIqQTpc6hJIWaszXF/PZqeayhBbz8eFFv2Lz3hO2mw0G91+4hPn9h7cBgSOTk6Ec1KocpqbXpIkiRBiGK0VFRztHDRy3ltatbqSsrKiYYwgq8q8hsy/dePwCy+9eyj4tHS3X7+/Nugsd9f+6OWvHmcpw6ARptIWOGBsLWqJMcAk+N0/Of6R731scB5p5Qzxo1j+6A96//jXDHHMAgsRAs5ghIFDzkGMqbEGGggAgMBBBBHBAEHgHLYOIWNcCZ2x1jmLjHLYVh9/euPx51aVrJkXe9QXXrU0oKoukXOzrLam4pxGYRsCcjgpKImW+0k+mwGtEOacU0Ao1EApE/nBJJ0x34WNg1gGMTk8PZY2SRg0WpdZGQIWEqoxRDSURcGpxtx7uhXUTT0+PNVSjEZzYQ024PAga9puOGj5EeMm8n2ftSKEY+vDOBAGSKtRWSju60aib924fye9szHsRthopLOs4YGnlfUdLMsGAOMHPKQRIx7faNepqKq8roXRUjhojK2qjPke9XktzfG4yguxstQdLNNoKZlPF4vSHZ8U2nmdJHGiNMomEW0lbHpySqURZQ3WVoRIKSJ1WXoEMeYzzPuteBDH01lFTfmfWEWYhdEgrIu5VacMaaZRUxeqKZNWxPDESt3rtZFPnS6feZR/+tH0G+9OFjP8T//lPsH+w8PU91sQgaJcIIecI0UpAkK80JMl6rSiMi8haGQjoCGf/w9/+skXfkbZA2uM4bwszLdeunfvgYxp2Vmim4+eyU9GRdYEYUAogk4Dbh1zvkQecqZIQeR73KpS0dDzCHDIYOi0VXlRRSyJeB8iiDCjgGTFVNQ1RZSBlgMiYLpR6dnL50cn2f2Hh1ao5U2ulNLSaQPeHwaH1kAIqcfzwhwdHvfWP2lgUetcwwLDPM+roun/s19997X3ADCry17o92AjRDio1i8v79w/RIYjLRnViFXz+Wmv1Yeek3kF43nILYUp1gLLtOUz6RKqUaVnntdmkBZisbW0KZVf5guPxO0kmowXwGjPC/uDnjKu00lODo+8IKwrs7Lcr+ra44qxgHvtSkiLaRRHPgRCFsqWMe4xpwEAdIkx7hMIVAFYl6sopCQCiiiiqT+kDOjFTlllSuRAE22Ftgg4PpvMGCMAOQsEJOGgRx7c+9a5rWeS6GJI8WKxK8sJctQ2NYyMRQaZsEgVhCCK28RpKKWpRchC40yeLVY3Vo0EaTYN/QGGTgkJoTLalVMDS292Uj3xxDMcTOanRywx0NokbpU1YqzrgEeoEkWGoNRiEocrsqwlgUbYxfzIZxQKh6JI1sVwZS2bpsA4TAOlm6wpustxP5o3p3+SWCekyBp/Ni2QF/j+EjRgfLIbBRGzEAhTVjX320XZvK8ChqkDAABIMebtli3qgNKGuyiMHITdVv/kOD2eZI6CbqejlC7LkgfUAc2ADhh3delhDDklBAuhvNAjGFOAOXTlInMG+YGXlymAzjkDraOIUEqkUQgSBHU2H8d+AIyt65pwoqpGW6eM4X5ImAPQWKGN0YgCP+AGG2WAEEbUElBMCFRGWOMsQAQ7JSWCiMc+ZawqCgzw+wUl4ADBGECLMLYGAACdtaUojVMAQ+cMhdDBxudUSmktNMp4jDlIEPatqhB0ke9DaJu6YRyxHq+FPpnlkzmMOGeFRBhZY5VSjHjOOIgcQQQCSDgVsuGEU0LeJ9JRRhBEWllIKAQQAaesoowHjDSyAU5rjJxzUmpCiOf5zkGlpbMAYUIJR8A0omrS0jmIuY2ioGkaDDDGmFDmnK4aYY3hnCiHlBDaAoYcMNZAhxCW2kIImfO01aVYEIKcQ1IrgiF0ECgoldRGIugoghAiqBUlRGode0w7aYUEFrejqNG1VrZpNICwEBojjqRh0DGM0ixrtSOEkHXOIUowVso4a7UFFlBrMSaoqARyFFmal3nSSRD1gaqJtYxHmDAEQSUbiB1nzAkFMHZSEQSsKSlC1GMQIUIgcs5qIiyU2sYhoZGf5SUlyGHIkB0mPuUkKyttVCWJsoAgixm1WjpoCeGUsaosCWaEfPCT3GRVlISvv7VzueyUdSSnrgWRouXaSgy4E2Vdq2o+rqsMCuMkpdLpo5MTDOjOO2PCW7/5e//6h+zHLw5W33vjleeffC4JI+JHDaIM1bEPGA5uv35jZ7vJWTAElDYW11acKC6id9957YlnnyYMZTtzOnI3Xrn/oRc2MDKdNU/hkIYobfZYIMjYDLwOhCAH3KF20O6HCnY7dTOZDv32t/747aUz5/otH9NOaMhlH/7q/+Pv/j9/+Ve+97u/Z//u252lQfvMhePtfO38eeT05CE2su4MBslyLKd3RzsjxLxLzz7Xb/und7afvnBlCeNbO3cgAOlcOGU4RceHeTl/4/FLm9curb75+lQrXSHZavc24LLM83fu3I+iJG7Fj13ZvH+waw5m4VKPxdxpkLri2tnNa0+lyxcv5Hd3Hrw9zU7rrdUz+c7pDDXtTkQwlwZobVYGPY/45UIep+WVYfsf/b2/88jWxbPnu5/6vo/+0e9/USlDPBQGHAKEPIp8UMqp2nmrN9yw/eTJ5z/94O47TT1JR2kUUieq0cmUekxrHCcJjaL9VF197FkigJJpPT0O2ht86DllzHQmDw6ZAs5a7HCeVltPf+Stt08fOT+oRqIYS5NQVdHXvvmVj//8X1oRJOmcffcr+2JSX3/l1u6N4/VLrdNZKkgYRujsRq+cFE9fWz/71OO739y+/q3b09RhgqxWpQTWoJkBs5EZtKN8tGh1AeAehBTQiJONn/jzP/i5L/yjy488enQ6KR0ASpeLIvYd9tToYHbpyTDpB+99dcLHUT0VWuJWK6iVWX7q/I15trIUrXWg1Vl70J4fZX4/iJcG4+OMxd7WymYSR3u7t+thKx/NW9HA6wRHd7Pv+e6feHj97s7d3155ZAWEITzcXcwm76vAAeRFPua8bsqqKn2e5NpaCMqq4H4U4ECJJjMSIUgokUooJQGCnBBZF0YpQkMhJfU9BIgjGgForKlUQyEJaSgaY62pm6KmmHCCgCOE9WNurM4nY85pb7Akhey1or1TRKh39szqhatntw92pVEE4163YwzUDoQ+b4QGDohGYcKipCXKdFHpJIg5Nkqausl95lMYMggBsg7ARZbFcdjvRuliNJ+P8zrvtnuMtWQNGSDayV7gT2ZHiCdVaQFwWTVfWl2Lib+7e+j5vuVNU9UAE+SAMjaKIqkV82mil6azIokCY00YtU1tDdRaK4QI0LZRuWokJEDKhjGez2eM+xA67lNCuDa1lM4aaZHjDhJC6nomVRXEQV5M/LCvpCzzlBMY9Xo8YLXCo1ERt1nUbbE4lIiaSrcGy1Jo4Jzz8drldbGvmarKvIaet5hMPRwsismla5/y86233/nDp55/UbcGUXf9ePdOJ7ygbeVU1ltbBgyUs8n04T2O/d6FS8H65v3xyAl42KjuWh9hjWlEOQAUbV49P3pw3VPi3q1vGSGItV4YtXvk8tXLe7v3fQKZHxAGOSOMwaJcDDrdrZX+ndvvpfOM4+D69dvf92M/HQckL8rv/tQPfOuLv7P37vVHn3/2u7/7x47Gu69+5Xo82DxIF9/3F36g2n5oyuLF5y5/cuUjv/S3/tt/kv3j737uo35MP/LhD738nTc/9dEP7VTF9nT+v/+bv5Te20bjh5aJxE8aLZY2N7Vu5ofp4c7huWGfGn716qOEoMXpOM2a4doQcIRkpZTevnOvQyMCiZqPljcvTY5uQDP/gNJSF/M86y/3i/k4igiOCA0YAZZQxry2VuXNg3eCnufTYDreUxDpBlKANQezqm4n0azW06ah7a5m2ouozxTgICTEi1hdTqlDDAXYFMDWF9Y7/Sg4Ohm38OzSpc3RwY62ta6FblQURiRi43zHA3Z9pcMCL0+L09nxeHuPoGRzOOCYrKzFOsSghfsYHz+ctmhSG7Q32ScIc0BZgON2LOpaTDNEoe8x3ulByo0xiHDu+0cP7j58sA8B8hkOfN5Il8TRWj/udIdbK2svffUbC7BQUlhnrTHWOkIJ9TkEEAIEPdIOeULgsNUDGgBlg4CcvXLeQ/1uCDAsERIBsNzzWlubjmDOQqU0RpgC0Ip4nmJRNA5WaX6PUlOKxenoNAy6TeYNlh6BjOrcEGI8aDwS+KQlbClNngvn4+Hq+rqmTbDcObf2odPjnZtvPGQQPnLu3OWN/svffnX//vTx586c1osEBsfX3y4zj2Ls0ViqUuayFw/8jm+MAyaTCq2uLvvo2fH4cOckVg3wQ2KmxhbuxvGNqBOHMT09mXBeu47dunwJY7k08EHejMZZo4qqmFeLeWt548KzLwg/CtP09lc+3wjhUUQRYcAvG70os6QzcDBorZ0Pl9dX1lYjzpvRgakKS8OXvvDS4f1d6GuG1DhdFKkIIy8yBkNBBzgVoipk1cBqjBrIv/76zR//dHzxka4QC1uVxMeIUGUkwgyaQDsOXWrkCULcmhrgrsXnOUVysV8ubuLupjbx6DR6ePPgk8n5lQsBphgACKDD0ALkrFQAEOvcyenx/v7uB0EKpaI4avX5fOobhZLAhe3urVfeqateywupv4bCi5jjeve12ThdP3Om1W4BoJJW+MabX716+SqmLTlf0LAzurdDanu0fdq94j/z2MWje2MfoMXp9PXFSbLRK6eL7XfvnH3yiV43KEa5ZOrsR58crCzde/t2+dIrjax7rf50NFfbB2trnbq0SjQU2v07uxvL/WgJyyxVUT9VxioPs/gHf+4zr379fsvx3cN7o+P06Y+80OoHb35599qzj2HowlZEgvPvXH/vL/7n/+V7f/plRALLmuWNFch8HwKBIA/C+WSXZ6i/ehYGfp5NolYwXB6Mc/nKjQd3jqtJbmqjMAaHR4vPTV+LfK9xoChlVQtIMEEOAIAIBBAriCaK/sbvHvzcX/ZRq6p8QLzRD/yE/rU/KE5GPQ8zqx2EFDqMEJeqBshBB4xSiFGMMXDGAYghgtA5aACwWgvgHGFUC+mc7nJ3bb1LKQQKERo3pahrKaXmFPqhpxuJceAHHQ+B4bDfAAuMzXNXNxa4mvkAAUyJsk5ZVVRCEmKXlz2H9f2xItYxRBBkCmjMeRKvVePMlnf9dp90gLNOaF3Ox1HIBkmA1GqW10Uuy9GoP2g3RVnVZGc3dw1YDyPk46W4O5lOoO8trZ8txhNgrTZNSEOl6O1794HXJbFWjVRFGRLiUT5cahd5pi2zvmcRnUyl59kwauvGOu0ssMQjAAECtc8JIZQh48VECDNR9nQ3TzILkAUIVVktJScUQ9swhlrD5NLF5ZOHkzCEa/3l7RvynbvHj637S0MSx76VEkGSzqdrS/1f+MGPL04Xr77zxkv//1YRkBpwyD1fNrhpRkFM4yBUpD0e3WY+7q+fm5yMnXWEsMGQ/LWffXb6j7/+nbvVUWoczh0FwhQeI8AYBJiWaVWLHBFQhjH1AMayrqGDAFPKk3fee6h024oD4llAUJObL/7J7ZkcHk9vqy+9+1d/8cc9qntbySIr+sMkOxmNTk6574mqQCHNy9xnfsh9zUijKtXUGLs6k4jwJAyNsFpqQLBtxKKcZ+kxC73lzXPWAoCV86hx7O72yI/bldq+9Pj577y1qwEJOC4q4XlcGWMhY5RSHrbapCxyUcwakFrqUF2FrPcP/pf7X3pvvtF/rNUhHq/zLGsHbJE389HpKO2P85kft6yugK2RAp1ktdGurvLA94QoldYYE0DcaDS12vNCU7uiUWUQBABFiLuimTHfS1oDa8rZInW43WslR4c3lzaG83TW5LnHyHS+8HhopWUYc+pTxCDERd0wEnISOiUCHpc19dmyVXNgm+PdGyvnH6eIs3BFVieilCjgjQ5g4DkAVT2Zjvf3j46ni5PllaWlfk9UFcMOtwjABCFnIMqmu8TZmHTbyZOVc3U5lnXEfWesOZruBQ2HGoYey/MZ9ryVpX6dSk1o3F/Tmk/To6i75BzKRKUo5D5jAYOIJO0gO0iFwwa3FFw8+uxVaQ95EkJMm7LBXogILoTDpJLaaagooyFdVgY3WmHHEG8rla6sxVWudo+O/VZvLnXgcwhcVc4o4fUot45t7xyp6hg5g7grFlJpn7QGG2ubcRD0k7zMc4pYt9dxoa5r3VQfNPPTvI7joD/oKmuBURaixbzAhAOMRCOOp9Oj6QwSzj1eVw2yrp8k2EdFnudAnd84jyBLs4xHtBJCOUipL6SAGEzmuUeZ0c4iBxGOwkArpWTqIEIQAoCyulJNHXtcU4c90I4i2ah8VhLGOp220DLuBNCRyWlmjGMEIgwdJoySmPrKzB3BmBKMQFU1ABOLgJ/4FBFrDHAAImSt8T2CKbbaWusAANZqY0AjBMKw0oIxHnBPCgkh5J5HmZPSOGcdAoQRoZVRglGmkHHGIkQwolqaKPD9ABqIy6qujMhSATEAzmljl9oR4QxZo6012hBKIcTm/TCLNU5DBBEl0BiNMCYQEYytdBDSUhTGOoqINo5gghkBAGqjldXTNA39OPT9qhRK67wSURQoqa22ZVkiTLSWUknGPMao49g6a5yrykKIJkji0I+qvFJGQmi0MVEQZlUDoMMQAAsYJQ4QB4CUEiBkgY3jsK5q64BUyvMDBHEhs6aUjRX9Xlc1SmiFqa+QE1JaYL3AIwHVyjBMtRWcUQiQ5/PpYg4xwhY2QhlrAYbOQaWgdRYTiCEtqjyIQ6EkcBY656xBGPucGiMhJs4h6YwXesxg4yxC0DjAfc9qbe37/y42igPXGINQ2TQ+p57HEHQh5w1SylriTNtjlEVZWQNEHAJCmziKgHVKaaW1tYZ7RJsPiF3trbZkbH35UefgwzL7oR/9Gzc//+vIqz1sBFDEWVzTUgABYdziBom1s8uHo/LSpa3LG/H27en8rdH5D630onZNtjKxMFnWR5ixpJovZIYRwjdu7mfA1wj7rVhpuTSIqukB75DHPvFkYRyP/P7mJgQ9vrJhq9E4T3EowpBM56cPD3b8AY+SrtOOt7hY4LIwvWHLM8Xxg91XvnKjGy8Fw/P9uOOkk/PCpqbHop/62NO/+nd+aeWR1R//mZ+aL/T4OK+mJ4u4pL3EC3vbt+4gzq5cGDx8+7Zz5tFPPLN7aJQRvV5fV60LH3qWua8Q+Oa7iwcWIASI4Qj343EtyOl885HN7npr96S8eGF958272WKxebF7uD3DGS8yPZMxtNHFC5vffun3Rb33iR/6yBf/7Z9cuHptOjsyVTWIvWg16SRrkwYcLaYnk9q60ADQbnPi0aO99JPPv3B6nCJkNQGj6fbp9nT9whOEbM7G94ASMEEutLKqCSZnz61n09rm07rYe+fotmjyMIpD7BtdS0R4JyHY+THdOdp/9sWf6z155nQ+pdl7dGjLMo87CHgdGgViduKtrXnd8L0//TrgfHj23KSQS2e7tMdo1CtTpAwA1h0c7IuTey0F9bi+dH55G5TX37k3Fvx83NEY3n145Ah0PBrN5qezLATHndXqV//JVzbPdYrbZUOg0QAQKJ3r9XulmP7eZz/7C3/7LCExABAR2pQi2005Dnd2R37bdwAiZweDvlZVw60o/XtvHrW6cnVw8ZXXTm0jGWXVQrZ7ngfYzdu7wjBFMCmno73d62/fnsty+exj3/XpH0AOlXVuQK5RUUrmLJxM69ixzctnKnj6+s2X2y22l0/tZL6+vsTjD/Y+ZK0Iw6aWjLHl/sb1m3fj2Gc+2Nzc2D04RpQ74KTVgR8RwBzFQkunGyUAhJByaoFBGGltGIHAKYSJUcZZhFlkIULcUuMwdNIBRCmgsMwaYx1nBAIkhMnLMptl2JZ3t0c88qUzRTqzZSWVklJKJXjs10pygMKgXUHR7rbG40U6nxhVWai7Kxvp8T1EKKUBAECbqlH1oNOrC+VTjxFSNtJZhBH2eMsoaDW02lSyoQEyRsdh2yHoABFK0TAUyp7sPJzMp9eG1/JFhiE2yhGOk7BlnW6qWgIbBIHkvDZa1YJ5kBO/EXWcdLQR1indKGug73laWGAMJgQCAy2y2jmEEUTWKc4pAEhWtVMKQogNYM7zUMIUMhrkeaqiEGEuAM41BFEH+J5Swo9aURJnx9N+b31tfe3GW+9An51MiihuyXm6SCeBDZNWCxNcSAF4yXrDpz/94614cDTJUmOS9XNNo6VR/lJvlpZKecnSWaiNSKeTvffw9DCOaCNp2gjLKQ5QWuSmBp2ke7LYn44OWgGcHeXrZ68WqWeNQ7J6+8HDxy6dlQuxUPLKE0/c+c5r03zRGfaNkDv7J51uB0X91fVzd+9s/9p/+M0f+zMv3H599+pVdu38Oe2HFJNWL/bQ5tn+oL21/M7dk8m4qHYPx8cHW4+d78DwH/7f/q9/8PUv7ZTpACZ8hqtTc/21O1mhcKC+8Xv/ol6Iew/eeepDL1ITHe8dw8Av6+ro6CRcXh5eeCSYhY3DD3aOawkaxY93dw7H+z/8qU+kxzKOOw+u32wPe2dXL86m4tc/95sf/vAn/yOYAqytrTRNCR1YWt8AUXtxeMgxOPvYNZFWJ3vzpkFGQYhRgNtC1sAi5rPjeTkRqoQqYUG/04NhD/nRoplxVcbM7yUMEpbVeSPyusyW+j7hjITBg/1jbMN02hxO32OcCt04iyzgs7wBlHKKMbRZo/S8bpTxk6Wf/qkn712/xx3yuL+85JewCvvtw3unfd+fFdUkraSFygHGKFEKLLLVobfsxQwjgNE4w531syz07+8++MyPfH+dOgxfdzLf3d31KIqS6MmnrtjF/Ojw4I3vvFZVTS2lVAYCAAHCFCKMIEQQIAZx3AoubQ0urnUrJc9eOV+PU2fxSpKoqkLAEI8igus6U0YNlvqE+BBxxBgm1OPU2G4i2wWO9h5+SajTpNNzOLxy4WOns3l3K8ZxJw9gtchXozZPYgc4gMSLQ1jWYdxK5zWmcC4NRnC5210BaHxd9rsd2vXfevedaaaEAtU3t20ul1YS3o38JKgOFmnZXHpk9cLZZ77+9a9uknWtG1+d5PNirlVVVzT2D3eOqlx67RBZO59O502ZQ9Tx4zIraiKVcHt3HnQ6/VbXivlikY/8iAUeJxtnyv6ZmvehqtPZwmEaBKzT9pTGnhdJ6FcOtM++cHx3Fzh+dqPT6bDjBzvz6cHxg8Nb9x64uoJGEuQEnl8631GVGsSgE3FgUWeZrT6yNp/AV755/MWvHwfRUmf4yI371ZUrMfQjQAUwlbMQAoYdtVJBSB0IMOsiaEVWQKJR0NYQw1aO8Ix5pj5VR4f+3mQ4SwdxCoWU9aJpKqaN0Mo6BylFwOFKiD/+0lffVwGC2tXTg1spwmBppXV499bypumudP3e0NV5d7Mvyn2c6zhSyXDZhp1RLovGQdRcOneNkTBob+CmEQ4BRhtaRSu90+OZrkU3iVnMypNKaXOa7sWUassxQ/PZ+ODmnavPPXramLqG/a1HHvtEeHD/raPtQ8xY/8JZ1WQbq92HNx40GhYanM5TXbu8qZ599tM5sLJpxOmcS+SHg2aRBYzODg/rkwfDc+tPPf+JQs60qZ3XeXg4unzpeZWBSWrQ0bQ3jMJezyqThGxq8iRuWSN6/YG1RorK91sEebO5/Ne//fobDyYVQtpVBFrngJQ6tXBW1dZYhgGkEEEglUUAUkwdBABRTfBvfDN77JNnH33EaupUI5QGP/I90f/ybxzCBLxfIDTSWIUxAMABCBinFjhnHQQAAuAsMA5iTIwxFFGMDUCwbYvlZfpf/tR35dPjohQRjzDhiHijk0orgCCoihIRAAlOeq3EA9akCGFCqdEWMx86aIBVSgmnpWiaskbAhygA0Dx2eRPzk0hDThCicDKZschrBNo9HSdByAsdCwMQxlYZpYtZKSoBWTLYWG33Q/mW0h5cWmvXpTaAvXR778Ur5xZI05bhhAJkMWzCkIlFPRo188w/KGvNuVUVQS3WCilCLZ/5AeJo7se6v7Zi406VWljBg/19YNM8rY0VhiJKY84IgTkgvsdbuimEVJTjtt87Hs1GabMomySOfIR6vYRC2+7EeS4IhtmiRISU06LTYefX+w8PbKVMXWuIndYSBjwZLmHOz3coWe6dv/j8P//WH/1vVpGWUski6Pib5y4c7H0ny3eRoEH3bHtwRUkCSdsPbmtz0uqjPE3bS9Ev/+0f/fn/6tcOT0qCCbBAGeU0hhgoVzjU/MWffPTZ557542/c/NbrB6JAmGOrgWoMZ2jneHzwcH7uUpKmN5TGr3z7aDZD945O25GnVfMbv/H7SwHqtWLCvLJwIp94Yei45wF6uD9d3ljBDDsAnDPQwTKrCKEUMp8FTdFUjSAYhx5N09n09L4DlnhcqUoI2+52vZafzlJdcbqceN6dne0RcI4Q2hTSiwIpnIbMYqRrrXQ6iMkzH7mY53Pmc6ds5lb+/t/76tt3lx7derQpZxLMC6WVKk1mLm+snLn2yOH2YQi9VhQWnKhccuJjklhH/ICW1aHPmc+pF+OqSXnQ+v9S9d/vumfnWSe48vrGN7/vjmfvfc4+qeqcylVSlUJJlmQkOQkbcGgbjIGhGaCHpoEmNNBcbq4ONHRgoAmDzRBmbOzBlmScJMtKJVVSxZPzzuHN7zeuvOYHiR/8R3yutZ77fu77WcxNgEvjKwSd0chJhIE2xWkj7gonIfRRs3W0KNu8c+Xxl06H15uBo8bxMElbjSI3RTXvN5vYk9F8krZazbAtqvkk20+CJrbtVtQrqgyizAotRNZaWkpXLjqLdNXAaMWYNGlsVtUx0se3b76DAAqiASxDztZuvL+zud4JoljrBUesmi9owJc6m4vcbpy7GKTnytl9CsjS0tI03wlSRmRTuRCXAWZLBFunjShmQhe9tU2n8nk2hgErTLMFokbUjuJA5AtrPACB1rmWhQd2d3e0tbZ5bi3GZiahZ6QXpTwvcsooRF4VC6mkshXGScRDacH2ma2Dw72N1U2IuSgtI9HWGipNHnIf4oUSohFGabM7z6dAagAhjbgFBnDVCtM6Z56gmciMmbbb1HukBGTUiuHYWLq2svpdCqKYW6sroU/GmTXOe7QoFKYmsARAVGa1A4BRaKEF0CKEhZAcRgTFmIJSaIINCvBsXmaL0jpfOddqN5Y6DU4IZwQioCrtHC6KQlQSEcwYtUoD7QB2NIgWRbkoTRxyAj3C0AMCPdbCAO+pY8ejofU4CHkY85DgeZ57bJxHcciks0ZpY23AsDGGUxwGlELgLaylTOIIOEsRQhAKLaEjhCHCqLM6oMh6kLBIOWuVxQ5ijMpaj08XWtmtlSVKoNQaAKClxDxAwAchc95hzAnDSgpvnTKiE0eIkLKWEAIEIcEYA2ulZJxDDBCE2ijngdEmjBNMiNTKaocRJJhg7yzCEHqEPMYqibnWFmFijIQQQ0SddbI0nro4XDAXzwABAABJREFUjD30JIDaAQRQHMec8TB2sqyAo7O86DYjCrkHDkJAMXIOWG8JYoAi6qFU0jgV8aiWilDiEeRBYI0OWVLLjBKsrIGIsCDJi8p444nxFnpvIUGlqqR21jsAqEVsXpYEIW2QtjZTtfEOA8SAZ9Abr4HzSUitcshaaQyGBGHqoMul4IyFlDrnjXHeG8aocpqGxBhnjIEeeucQhNhDZbT3jlEupMEAWmswxt4BpSRAzDiIIYXAGe20thRiZ61SEiFW1YogiAHKa0URQB7oWiLoCHEcWeuQ1MB4LLkBxlGCQsYd5AhjU36vq8hriaDcWqEn797vufp3//X/2WNxO+7uP9hTwCgrCQsIRYNeq0T2k596jrQD/CAbl7OkYKTb7W33Rzo/ONA87va7vfL2UX3zqBm7bj8q86oU5fbTZ1+vHrUifOnCRtg5TdsR8CbXNgpTCtn8dGbnWkzl+auX7jyqomCbkY1srBceeLYivA3CVj6dqMDXpox5ygjnYWqTznR4cO2thXDXP/qk+eiVC9O9aRJRSfON7eWNwdqXv/yNclph3kE0qvMptIWTMrNCM+UVu3vz/ng8V6RxfDLPh8YzsNw7U5fz051DUuMf/eyPvfypxT/9F184nc5yozQqKmSqTHfaxFFTzOU7b79tF4VyAC0tq8qPPUS9pe3Hzp8cHd69/erdt69Bhx7fvNxgh5BsXbjEdxdvhhyhPn7l69fimAgFvUI7dUYQOC0gCAgB8LUb7370ueebMXhwcPjBS2vvfO3Bm3e+unH1o3fLMUqdrGccaWXyvOgd75uqUqPxQRT6RVGeO7dc63zj3MVqXjw8GQOnhHO6Mt7R/Z3bH//wZ4b7Dxf33q7y46XNAVCWqhKIDLrTWtbasu6Fq0raxvIaRA5bAYCKO0GY0rAd187PgBqNKp2DxbyKQ7Bzf384n+amPnt2azw8ODnK+kn67JkOmPHFqPriP/u/8ukJ0OWP/vFL1b++PbvlZInCJKgqczIsWiF+++338sVpO92ABDugw7i/vrHSbUPDmcagKgSxrmB10AgvPX3uZ3/k8td/8xfWcONgobFG3ZVOXS1yZx9bXuaq+uizT2riRvvXv/PKtcOdUSPBjX7n3q3rH/vQBwZLg3KRz41cWduMgsZUjJKErmx33v79b/yLf/UrQYOfZGIkPEeol+DDveK7FIQxDyOmpYYATebTsBkRBIUo949PoLJGS+MdDahTRliLCWAcB7SXVyUlGAAn69o5CIHVUiECvTMU0YhxgJ3QBac8juO8KKyDyioHCEYYeh8GEYAeYx9GQKnAKCM9lBZMRtnBg4O028BpsrM/0qWa5XmnM6hLXciFQQgQpFRFCRNSQIbH092IU0Z4XmtAsLMgDQfe2qoW2EfOk2pRMMJLW3lMIHDQI4AApBAgmhcVQ4iGRAgJKWzG0enhMUQIYoS8DUPGKPEeicp4JIzTnnjnPaYoioKiFBELnLWEIGYhBF5b5bxIm7ES2jitrVVSUgQwoRTzoi6c8cB5bW1MEuC9doaHFHjgrHTQYko5CZSBadTChJbS+ij1AYqixvLG0nQ8K6syP8l6jTbw6mB01N9azWucFbrT8g6DRqvFKMmKIWKeBv2H998D1TDoD7KFAIhYqUuVYcxKIQBuYpJoZ/SiiALKIjA7PbZqv9fvAQBCxIq9Patsf7nBOK9ne7s33tw4fyFbzHAQAmyDkEKIKeUL2dI1mw2HGvjRbKQcSJuba+sXqul4On7QbHdOZuaRuRf02p1xyyj95NNbD3buX3x8/Y3Xb53OTja3N4t6+syHn9jZOT27tRoPBv/x+hsf+NiLX/vWmw4cfPzTH3NpP5dzmS9oI/zEH/nIfPf4YPyw36TX33ur29/cfu6xCnnE4fbzT6piVmV5f2Xwa1/8/Ftvf/vK5fPt3tr6xScOT09j6N55/R5m8Zvv3Lty9tK8kmee2l45f2724OD+o/vf/9mfO7O2BcA/AQBUhTh7seGUyo0upA+sYj4kzpMaHe2NpHQQNM9vnMHeTcf7MM9nWSZqU1tUeaYKZ0PbbXNnMdcmkIvAWU5AwLiEHla2G7FcybWNjfZK6zgXp+OTFWZznXcS4lWWNCnmrMh1UdYBTygNpdO5LIHFB/t5Z2P5048/d+Wx7qO7u3t3p6PJLErS+a6oFmpprdM7u33n91+x2lnjvAct6jb60Qc+vH3x6kVqdTYeffPNgwPhClPytFNkWmbVZ374M++8/upczp967olLFy/fun/8+rtvZrMpT0LlnJaKc04pNloTQjAlCCJqzdOXzzz/wasBxKe744AmpjAxTRGLdG6qbGwN7fbO9TrN0+M9bQNTYamzsLVKwwBhah2QUrXSsJE+BlA1He9Ynzu40FLKKiOhz4/eePfkvY88/zwwZTJol5Wbzkba6TDuYhAIsX8yvLO1/awWcDGpgSfrj720tb0s6+ndm7cqYcNGp512m3EuvRq0GnfvPsQ4TlPeI6Q43bFiBspztsj3xD3oqaqLtNnmvOuRIIGMUgJjOFVeCGuyygu3vrpSK9dpLt2+/iCNjgKo5vMSYzSd5etrq921le2NzV4HeeVe/ea1fDKCQNV1rJWGob/w0U+hznpdh5+9+iGYP3z0nZvvnhxoNQxaYDafb63acj6LcX7xSb52ljQHLiDdKGSMUhyEBkFn/fYz6098YOXiE0f/5N/eHJ2yX/nNmtvuZ/7IJeyOWMAgEQQyCBkyHgDpAcW4DXQe8MRDblTmCfA0sTWfHhT33x9ORsnNG5MkfO8jn7iAQvjGN47ffAsazxHFQAHnvLfeaP/w0dF334JGkxVHY1Mr4225WLSW1quFkXE3Tjr771+Hvl69QGWVA1GunjtzUNSd9Q2KSTk/3lhrLUZ52FzVbJ6dTien49Hp3pntdTG1xmXjveOjnQpRuLnez4tiY3Nr/8Fef4nMp1XAzHDnoLtyZnzzwDFEWsGP/eW/8MZvfv3am2+/d+NeKBR/MsUhY4S20qTTDFLM8unh+P6N+LEnYG/znW9eL+aHtw/2VlbSJ198OrlzdHDt7U4CLcUk7s6K+b33Hoh48JM//Meuvf5lR+frZ58naF5VU1fKQpZJd9Dvnrc6RU4YVUEX8Kh7b+fk62/cvrk/rzQoikIbBTz0AHrvjHY8ClqNaDzOCIbWAecRQtB676xD0DtIKh//N3//2l/8yf5n/xBvL6lkvbvIcmiJc9YB672HwAAPPYTAQYSQB4AwaqyBHmCEEELWeUogIA4YC6wYNNDf/cs/N8seFYujpBHEzdRLvRgNUZg4bYB17UYjCG3MmMeBd1k1zZutBCEnpSiqktKYYOyss95bY5SorTEcSSdki0Cqy+0U9pvdSgkDwGJOVO0MADhtVdobY+vRnDmJjNDWBkHoy3JuT/pnljtJ2mon7z/YwTT0xgcxuPjkpdp70I5H5eLMoOWrsphOpAbQ2t5aU7h4OptO5kUIQJ2lK4MABwA7wFLw+KVtWdtFpvJcU5pAbrfONI5PR3FEoSfCm2IxtZxHyCNLtC2t95hQBC1CftAMndMRTaRDWGvrXBhSa1AjCqxD5RyAWtlancxvXbmyvegAqeq6CoKQtAYdSlictK21VpQBh10c/oEAWhABBL1VxaLOKfTS1ygIXOCMDXnUcriZNDZqUU1O98Mo1BLGsfzb/83H/9bf/928hABCxpDVBkKEvP2dX/1LVy8AgPTTzzz103/69tRG9UJFacq4BxiGSfyv/sOv/eIv/cnd3/h1D9nnf/U7x+NlHnXWLp9//atfM3vDj/7UJ37n138XRenu199Y7rYHZ5+aTCbLDL78/KW8rv08t5pQiMM0jNq9opBvvX/jwcH+R5+52m8kDINqPqrzGUSy0WhLCUQhKuF4iDmFGGOeRqP9nTjmxxP56CQz3jRSkjRQBP3acvjU1e7lc0GvARfz4Qf/UMRxhSw7PHJ/4m/+f3vxRRQkyp2gQMahHw5l2El5wo4nE3x0fPmJwfT0TupWozCq68A773ShlcEYJlEahqEQZZUVnDOEsHeqlgUmvsEaCjQMtRSUdTGZE40IAh42GhuzEmovhbeI8+F4J4hSAnCDp9CWnVYvCbm2tIV7mPBOc318YgAPkIUeuUJkC3XEsDRCUpbuPHy1KIrW8hZxAcNLhwd72u56N1ZmESSxBw2nmttPX0hQu7uqi9mN42KPBfGgs6xk0O8tI9reOrfx9rU3WOJWumR+WhhQNdvBeDJFgDbTwWDp0nRxyCLqEKVJTNJkaW0rG5fA5gFZ3jrzgfHO2yQOPASAaoAdQWQ2mSmtrAIE0CcvXMR2ZG1NqCuzTNcQkQB7bWUOYd1pNxYFAp4Z7eMgnowXadzE2vWipYWqrGOimqSoOLO8Wc684pzSZl2X0Nml1XWYpxCEp/s7rjbNbsN7rJzNFlPE/WBpkCStsawnsxrBQAl5Opx+l4LJrDDaaaPrWkHoHUSQwGYrXOSVrLSUCgCPvI2DuC5K663zABroLHQAD8dz7H2YMFFJqyTjASd4pRl2k6CuJXYEeGitK6qSYeadl7XSxjujlZYAAsI5IrgWCiOgleKMYYgcgAGjAQ3n0zmHrHQAYhIQms1HDHNEkPKgETfnRQ4RqKuqGQSckgCBWhQhYZhHlnOPQFYapy1wHkMCMfDeQQC0UtZoiAlFyDtIIYIYh0lE6koxpSmACFmtKCKUUucAhNC67zqNyDkVcEoh9BAyzSCAToKUMUKxrAQj2CpLMDLGeueQx0ZbQimlgai1BY5z5BEkjGHgKcFKG2UMQdAi4D0Io0BrBSHw3pd1zTAOAj4vZ4ggzrmqpbeOcmKVUariiERR6IxtpZEHllLsrJVCGeMhBIghzLC2RmsZRbFUVHnAgxBBKMoaY8IQkrpCECmljPWMYQAdxRiAQHpgjDNah4wij72DcSMdTzIJQaOZGq2ctt4C4kHMwqquvJaqdsiDkEeEIgU1RtA7g4E3WlgAEEIAY+eA1poipJxCGlnrCcYWW0wwJswah72DhDkHgPdllRNCPcBGe4KxB9Za54zS3nJEofeckzDgylqEkffOWc8YCxhVda2sCRhHxEVBoJT2ACZxaAGyea21yQoVEh5ynqbpIi9ULUX9vRjm4c7EKZccCLjwXcw0bOweV/VyNCocoagUwCOxsbUcN5L98eJ46rHzQbc92OiBosIJ+eDjHyGU+ThAPl0wVkAgRqet4+kH06cKoUWhV7ZWv+/jTcGy3/vKd5ZW8ZUXBpcvXc0zYj2VDmhsJ4uJrNTv/c7vLnLbjZmax6RFdSdwgDsvSTPmdYYc2N46I2oMjJpNbEjW/9Rf/R8jBf7tv/rXi8P9cddvrG0EluwcHjGmI2L+0B/5SKVOOLJGGs+dR8iRoM6rpfXuhbOPv/XKbzdXN1qDNTeVMfT9Tjs/Grqq3lhpi4ybWTbodn7wpc80B/zB8NE3335X5iJsN+YnIzGXQmqwmX/k+584fTg73jk+s3zm4lOPzw9OwqaPwAJVbnNtOY2e3rl2XOd8MaNr6Byy74Z9qrlo9GCa8lJZB2hdmfFcgByhiHab4fG0eMs96KZ4Uowm03GUts5f3fitt7/FGOdKFIsyDDhkCKXJvNLQoTjknkoUkmm2CGhcLUoIGyykSo0QUE6Y9aVmfvjmN/79nycmWVsJZaUylMVprxofi3xqURZ212Ynk97Kcj6ZK1nFLZTtnxxcO3IqDwLknDbeKQXfv353a3VbV2J3/2T/+m5h5WClmXZa+XyoSkmT1r13T3x79aXPvvTWf/oXEgIehNdfv9+ISRS6ReGyUkIPlLSkG0ribt99/7mVJwiIPOWiyojGtpgRmpbSFlnppIMYLmbFycHJ7re+tTlgS+jCe6/eq4TGi7oRos3NTiXszumRA9/kAXz3rftb2xf4+egnf/pHfuvz73717VcP997R9bY0dGlzI8ttVdbOAlPmX/6lV3/vy7+1sdWa5nU9rJtdurLSXJyeJGnzez8ihClC7V6jyOogDC0UnVZUZbjIy9UznYPhzCuQNCLvkDYi4KFUQDoc0XSWHadR7J2DEHqAtDIBjTBCWmuEHbaeAqRlLbzGFLejNJfCASi9aCZRJTIPHEF4PBwlSbO9Otjc6Q6Hc+rg0spKbmrrbbPJJY4jzjGynEPldF3J0ekMOU940Gg0hDNxFELhamkwJg4YAEgtrDYKQQoZhZgz7pRVJOTOY22ckzVwstGODUDGgCQMZFV7C+KI7d5+WMuy0W1XlTg+POn0EqUshNBanU2nSaOVNIJqXuVZTQNvNeRRWKqCAKWtNKWzAFrAjcHG1wDSdqNTZEUcRkIKA4TzDjrnHUjCVMgSAmecRSgAADCGlFxYow1ypcg9FEZKVbnSG4/CyaKu7twVVb20upyEYT2fL/e7RV2Hzaj0QhTlo8OTbr9JCMUe9vvL49Hw/MXHhqfTykCsNAAQI5DlCwR80AhjjkVVIgQxcsBDiCALU96WthZ7B/fm44MLj38gCeNaSr0ocnmiqqrTXh6PFo8/duU4K4yhVgHvVKvfwIo4I4Neyr2zslxZ6o5n5fDglndgY/3c8cPdNO1S5604ee5SKwTlvYPh5WceK6rRxafOUxhKqcZTdePmiSpL6gpuy6e2z9558O4P/+CLb712Jzs+/vALT48f3nhw565KI9nEa3gwaK9zLDgHx5Mhbq1xUy6vReV8enr39tK5ZRY3/tyf/nMOuevvfecLv/mfAHdMjSLSY5J2t7Z+5mf/9Ld+6z+iKKoKdXLvnXtv3xMgCPrR443HvkvBYG2lrmQxz0kYBTy4/NhTu2++5qpKVVl7kISoCTQV0o6m45SHQlRSQwgBNTpl2HoolcLA59lp1IX9hBKpnSxC146CGLJydbU7pXi5vzzOR4Pl5a3+mbvf+iYJUbMdOIUoBzxiPGZsjpTBk9Eit+jt25OExtOpvj++VZfiR3/k6Yimm2cTFuBiIUSt3VKLBWShFQfQAUgpRVp/+Kml89vN7asr7bVV6M2ZjfDuo9kb7w4vXXmSFJNv//ara6urxujj/aON1dUrFy5apZc7zSTlynAAIQQwDEPgPSEYOecBbDajbsrPr3d7nD/31OWz2y+98druZL6bFzsb62vNdrMsJKYpDnheqfn0LgQl1ZFopI3mBqLMQ1TXMgwZo8A4oWXVSdN8QhbzbJKfjEfHe/v7rV4/DENr3OF4vtptVYXmPMWsBpTpqtZSQY/X1zbDMFGq5kGAKVlMtciq3ZvvjI9OlroJTfn62U1UnMzrxZvvPagzQ6npxR1cBkmUPPf0J4Rsal9rWRjgnnzq6nu3bo728pShThR2k97DnV0OwKAVMN6gzZ41CymyvCJxSvrd+ODu0fFoIQGYZWoh/BOYtOg1feof3b9W700jSvLazWYF0DrtdzpJlEO8mA+NKE5uvXf71q0gcbP5IbWlLSeXrsIPfqy7vNFpLdMoAQ4iBDFm2HvggaaIAUy0PQG4+OwPLZ9deemLX9j51lvDX//qbGdy98f/cHs9Ab6WjjmMawA8xNDoBYDe+gUKkTcTKApXh0ovvfpF9Obbs+sPSwlUoeNvvD17/fbbpajzKhmODMDEWoUxUMoAjAjl/zmRD/JZGURxVYswCReZ7567kCxvjRRxnPNuE2u0e+dOb/WMrsnBwwPSjmaHEmvGCFvMjRBw/+E9gFxxdIg0aGFXjo987agVOi+Py/rlH3hptv/+ZO+0ODnaWO5P3n9zMV/0Oy0XB92NNW0ypeYHB4/0cGVreSN41q+cXcl3p6+89g1Xl712V07KvBYwJgCgcjRqXQSlCT/w0udUfre50Z+d7rzx1d9mUVNJcfJoKHzQOjtYbvZHO+PVzbUv/uI/P3j4+o/95I+UqgQGGqkAdAAjJezpyT0hPaAgSBq5oO/cP/7S19+7+eikgqiqCmAtxfh7eTDreRx4D4pKsJB44zzwFGHoAfCAUmqdgcAwSJzu/8Iv61/9bdBpdx+NxWhIGhFz3kFnvXcQIwQRRMAYC5zjHLc6zaqsMQRaaIyhxcYb0QzdIE2W250nn1qez65XwngFjbDATTrtGGMm6wpiyylSIosie+nxS7OptEK7wNV1WQvB4zggpla58kyIijBKo9h5gCmjNOhGASZcllmzkwBg8rqGAcTU1pWBhFnkXnzppXt3rkPvJ6eHSYRpFCydWZ4OVdOy0AGq6gtrvYcPd5JOq6pV2o46HB7duWvSbkX9O7ePW8wkBDuLA04GG53Zwul6lhLYb7W08nWumyEpKzCtNFCTPKvSfqd3fh0YP17cTplpNLjWqFpUFFDuoKtcDWS70yLIIouWV9cUKufzxXA2jWh8fuNMe2X95P6tWTbzEA1zGQKs8goRXxeLTjNdX1rp9UIQ+aokRCMtBecRIQFEzMp52gqdkqoQf0AqglYb4zVAAKBG6yzjaHS0W4/2kW9aAsh3rX4XeuMJCHCjm5fDF1/a/jv/j5f//j/7VpZT45GDHiKiHPjH/+g3fLYPCb169bkPf+CTX/n9G2EAtfblPI8CDGH86196q/Vn1ec+3nhw9zTtbM92JlGz2Ll3J2g3N5cHv/07X6mNZwBWNq5x+O2vfY0ifGeh16L2Y0+tz+vC4qAZpTfvzX/1y5+fz+X26tr3feIjvZYjXmhry2KWBCF1qNFol3UFQM2sY3WmaoERSFOeEVpLlUmFmX/2Uvjhj29BfXT1QnOtKak7OLPVUVkeXm0Vk5s1v3T/sPMPf/Fd6reaabsGbrTIjS62NrpPPr11f/eEUpjLwmi9feXpfT4FwOWz0wAHSinlcimqRrPpUGxdgp2ppOo1H5tNThBBBlgLUMSwcR5o014anB4ceAhhEMmsqKo5MpVTonBqdWsNgCxuRtlU6GLO+YCmqYdA15X3GcXuaHwDUNpNLut64a3LRT3obhJ3PCuyMAkxik7vv+tt4RxANlLeC6kYNsbCOIyrApuiwmyCY8VCj9tE1CgJ01E2jQY9RSJo3NHpg9VlhP2pMdSYibcGAxrABmw0gqCNMDF6gaGO4shhBkhbqKS2C8ai0JNZth+mIQK8HySn9V7EqJTaW4QDfjge0lbzuRdfrMW7xWwCjXbWzxYnUdRljFPKEIR1UQEf0uag2+sV2TzCCDsbJY2sNFNgtrfPzccSW1GXhxAjClHShELXFADkG0lwRpZoc7PH6b6Hxnu90m0enS6y48nx7ixJmywMTw5OEE3jIJovvhdAm5fCKOuBp0HACD2eLxhGs1KIWmttaBBwCL0zuqoCRBAkEGAEiUEAIuAhMNpNp/l3T8L3+2lEEHK2LCpKCPKwLGtl68GgX9XlonaOQGs0cIYyigG0xktpnAMAI8SYsi5pRpBg4P1ClGnMqMcEhghUFPs0aWrt4pjmVWVlFjjLCVtba2NvGcNaiDgN0zAeTjIAMaE4YhRgXJQV4cxa4w0UVjhvCCUIE1XXaZhghJx3ShYYw6Ver9KaMBCh2AoZUGId8MAhhJ0HSgjGCACOMcwIY4QgCIBzYUCUNThsKGMB59Y4qT1BWClJKAXAcsawdspbSoi1XggRcE4hAdAh6BEi2BOppHbGOBsw5jzE2EPoF1lhPCAOWu2UNhgxTDCB3iFrtfHIMka9cUI7bBECwDlnrWGMOGNrIQilBFOlQKU8cDKNKCEeIOiBN84JowiknAXcI+gNRA4QwBHJjUSUhJgwQpx3GBsPfEAp8qCqFMSoqApMGPFYa88pb8UBBJowop1Q0nAWIIy9QgBia6wwNg0iEhAtBLQWUsJo6AGw2iutnHeUQkqh8QYg4oH33iGIKcAMEYy5BrWDjnOOJPKAaG+dh0orFlIEoZbSeUcphtBbYxzFgOCAYuMtBM554IEXUhMHjVVKamM9plQb6wHIi7mUTtUS/+ef0erGIJ+JMtNiMTlzbq1zcWk6Z+9960ajPyiLvLPSaS83oK8cAefOn11dX99ZjFGLSmrCOK2qqrscVxNBU8+jWNRYX9lqXO3Y6w/fOtwVI91qxcd3phKwlbOt1TDGEsg9jfouVrXAMMslwua5H3zp7d94697rQ0pCLfDidHHx8vKBnA1391Yvn3El6rfX6hpGUEeBYhgHLQ4hz8QYN/t/4ef/bpwN/9Ff/duNF1r9Tmvz4lkScBZgngTV0fHhOzd0hFevPmcWkCO43IkW4+Lo1oOT40VjOZVzaQs1G5cbK+eOTw7qKqeBDDthb7U1GY5ivLh6/vIzz21/4od/+K/+pf/20f7JUxeWZ+N5QCgH/OY7D31mNlbOvv+Nmy2MaUxuff69y88/Ph6Nnn7xWUsfq3fVvXfz5565cFKpZLBy9879boMMNpMrz274V4/3dnJVuEYSKWOhw8spx010Ws0Gzz7xdPd8Pst/93feZSubTMj88N5LP/aRb752n/Nut3t2aAOPypcee/Hu7a8tsqzCZOvC5Qbq1bkt8v2kuQUiA209nRzNrCRh4B2tRzPfw3ou0u3HcGsQRVgdnFJaJjHE3ord+/VwuvBg6cl+Owj3TucXrlzoNJdu3nvEE1ZmajqYntm+5FJW7I6NUpxgXajjW7u6qM4urTSSlaS3/MInPvvc9z31u//vf3o4LQUED49KGsb9XjDPpNaKMwpjUhswWrhf/D/+zfmLH+stMw8YDoLtDz/7kY9e3JuHB/dHjU4SeoQg1EaHnHnIr92t9h7eXNvqd+KimNYYOTetFsAAj/b3qziBgLVOHmUvv3jlq/+fV5784Cf+6J/44en4RAgFeeAk4DwG1rKAl2L29s3vRL3w7ZuH3kBRaAisb+fEewbx90IHCHLKhDAYoTIrvYCLYSmkcNbPi8IZDxGuqwoTrGVVIYMQm2X197/8M/P5jbffeSUJEgustwBg7IxQCDntGMA4oN56pURpgFRl22uCA2lVxHEYBItihhCUuQySlodcFKoXRyHSBKCHD+6tnj9bzBbY+ISRWlQIM0IohCZtNR2ySMssKyB0aTPxsmS0oZWKWBrG8aPhDqQ0tLjXXprmp8gz4zx0GFkWBry2OQ8pdsg7jmi4Px/xOG12m4tFkWdFs92CNVrZWhvmgAacYEIRKyvTTNZkfboYzyHQ/9leNpyGVkmKqbUVQQhARCnLpKpl7ZxCABqjAYZlXRKMnRUcU8Z4VdfGKYchwRwZay0QuuZBggHFROeFQIz3+u27796GmLEgGAzODI9Ptciq2VS3m0Gc9DZ6TlhAKae80+Q1rsqjU91JW2e6xfEQSL/aOzcdnwJdWym9tApagpC33mAvpAIaIEcAQWHKQBRaXUnty1y2u61Ob2l22qeEDUd7nWZrMa0MNMtrg9zS1eXz7717HzKwPIiVz7SxpNQGAKMNi8Kjg9OBDXgIOymGwJ6MTssANAZ8luVABPu39lvLnTk2qsS3XrnVbMPGWi+IcBTFTwTt77x/GNoi4GDjwuZnf/Jn/tKf/5sNvm/29c7kIEcnF871X/7+j//uN99YW01zlSfQfPjF5995907YTl5//VrImmv9ZTCfAS2GxzvD4SmCeDEW1ttPv7T9zLNXX//9V7RjRSYwVV/6vV9rhxhbvHdvnqzQi09+krTOlfb0jbde/y4FSbdJWaCL6sKVJ1qNaHg6jDvMIhdx7BDwDgmBcwtp51yjE2tzFwfMGakd8KIujOMBIRi0IgDNibdEVgBjuCjnKYVrK1GvD+vx7OE731ppJAGcPTqatJsk7rX67Xi8P66FqoxAFCWNdlnbdpfee7Bz9tmnT/dG/UG/UvjhwfHxyDTDqNMJgoSL8pggixwaH5d3d3dkYaDzVlc80GlQLi8P1reWFsYXhSO5n4k4aqyFvAXQJB0siSjsnF07K8rDu7f/1b/4d8hCBXCjERLA5vPMa8MjxihhlJA46C2123GDG90iLCB0enzYW84+8OmXjPlgVWb3Xv+SkIWxcmmtR1hYC2dsoivb6jaQlc7MQxx44GlItJyoqnJG1SqjsYFhsTiaZnlZK7C1sa3r+cZawkCCkS9KyZxPMA1xq9M7O9p/JO2QBhFkpL2cnmTTk1G2fnlrbdBxs9nt2+92l5qVKU/2Hn7sg08tlMmOZppFdeU2ltGVyysMhEUNL5279PvfehCG61l2AmF15/472KsQ4HPtwdHodPd+LXMzmlTLa91m3Jwb3GskNpvWi3ErCqfHM8bQ+Sc2c0sGPrhw9vHJta/v3zneERXi3kPAOGeIIITzhUVRgzqfT+ebSy67eXDtze9MfclxOJlkl8+EH/30863B8ZnLnTjwLIDee4wIJtRj6n3gPAOIAeghRogn1qnLj5mt5YsvvLH8hS8d/s4rx+/ePH7xA90f+Njq2lZIgtoCi6mlmHtbmRwwFkKQTo/tt75y+9HR6cP78sEBG1bWAq28ncwB8EQowCPsoIMIIkS8RxQ6Zy10vjb6uxRQhhqrW3XUiMPYtBlbvug4ZN7JOlvdXj65c9JqJb1uqwyD8dHORntjPD0xINDOtgZnHDE7u6eDXhImFBTzV195U2t05vwF7AsEEbbgnTeuNbgVmbOuCs82HQ4vv3jh+qvXzWJ3DyHcWqm1+fgf+4GTezs6lxgi1u2tLW3/1MvP//I/+YeMB42Oi0LiQvfBT77w9mvXu7vLK2ub3/n6Fx48uLF0sbfMYlK6zmZ3datzdOek2UXNBslGzmT1+PhgMR5ePPusr62cTTnFaX8JQ1YXOacwmw2DtBOkaQ3At+48eu3W3mleCoSyuvbwu+cZIPQQAGCNgdZD61lIi1IhBCCCznkIAHAeIgQBsM4Bq70nykTHI3J46ABIUq6cV847hCDBzAMLPEAYUogxhhB5I+uIegKdMMZ6d2Y9Wev0Xnp2W48Xi/GkmE3TpN1sthXxk5NR2kwxaymZY+1bYUA4s6qACo1PhsBCRiKBiNIIYVrlUy0KjeKkMUAgrUUNoYQWYRYRmkJTOV02Ym6FJpy2L23vHe6GQXDzztHG2mogxOT4RpEfcRxvrixh6k7H8+HJAhvUoIhbsPtw39Foaa2d9Mlk5orJPIjpxvoAQZtVcGdCByH5xJXN+/fvZcbt3DiCgAcYd1mQQKIrWVPUSpKJnkPPjw9LjJ3aO+63W089dansn7vz9q3ZJEMsRI7UufQAExIIo6QXvW7bS1RKAQgkhK2urNlK+Oygf6EdbsTpzIwr1Qwao52xNT7qxc3VZHI6Ywnff/BoZW2dWGmdxYQCGiOeYgqx9LrIGOEhi/6AVISJIxx4650lVSmsawThmlQCIi6F9CT32PGQEYUn42Gr26O4ORtnH/no4K+Ic7/8hf37+yBqxGEa1IV557qkdG353Oa/+8LezvEtGoSD5bY2VbMbQgsIILVk/+L/df3M1n+5u/vwtfe+RbgToiDWxpwDZXcelTgJiDWXtq4SMNwrcxxzHKIvv/3KSz/+t770zS/89jfeziZZCoLBUveHP/fR55586drt9xcVSrEIQkoIIyhlQQyN90JjZhqM7t18O+50ECPUNPKZPDkoS+NXUv/HP8UvfUAeP5j3W6oZSaiFyg3liFERy0rUw8//p4NvfWf28ktXKnEKoOt1krL02Uzn2Yggm7T88tKGhnRReuP7UsyazXZdl1ErKrM8bSYYSmPmSkoMkIM4q4Ry8ygMacynulAOOWuanJn81FcVDmMpRBQlxuhOK8izgibxbCqAT6AnLNIrvbMn82RaO+sXzTjk1JdlHbLYAWqBIFGkSwmAq4TlTgKK5/mM8CpopcruRc1BtvC4yS4/+fSbv/9rnuB6UiVxkye0nSBfD6ejIY0h9TifZ5WHhqI0wL1uqzp9ZLSolfK5NDYY9Jeni0POgiAYBHEb6CLtMq9IwGMD22UJxvYUUxDEgbEu4IDxVGY6FxnjwXR4zIJA1FIbaKT5wPOPO7Bn1JTxtNYloilLWgDhqNWE3lV5QVCUtNY8p4ykrTZrNFgxmlrjoxY32Iwmu1EUB4Aj4Mo8p4ETFuAQLTeWpjlYO3NeZdN7d99f2Q5OFnPk0fy0kAJAFpWSSl03W2kjaRkShoEez7/3JGCOAUZlXhkAaimiAAPnvbEYIUcJxMgoQwkkGEGAIEAIYgSRcxo6pK2tK0EgaHdaIYYR41FE60wWleLc1lXmHXDAFbwqq5IxZj2iDNRl1kgiq01RyjgKESUYQ045gb7WqloUkFJrtA85pHCRDSMCJTVJGkGhpRIEAohRFBOKcMQxsMhbAwD0Hi3qkgbUIQwcdN5UdW2B98YQTJXWCDkIIATAAU8C7r2V2gLgMMbWe2AkNM4BaChAhNRKYYAYw0oIDxHGCCJX1XUQJABCBzxGyENQGwM8oAwbpQCA2tpaSQYowpgyQmg4my+s0UmcYAQBgBhx4EEla+8NwwQgaLzFBHnvAUDue8dYS0IJpAg5ggmDyDsIvDdGWucdI4Rxpo022iNEkHcI+CDkzhjhPcYUWochIZgDTE5nE2VhHIbS2iCMrPHGmZBT7XBIGfAeEeQdFkbWUgEMkyiGGIeQzmdjzCknOKtKgCEwtq4N5SwIGPDIAwsopJRCZKBzAQbWOA8gZZgTBiCqi4oRHIaRM4ohDClVHjjvMcLIA4hsFPCyFB4gBzhiyGtdlUXAGcAIeGy8g85jSDx03iOESCVqTwiAkDFutHHWWOMYoxZhBKnRuq5rjJCFLmRMKgEQkcYoYzgg2jmAIPAAYYwwysqsEQay1kbbgLLvUiDryhpzdHz4wecu33n30fJybzLJVzfOqaK49sZb273zfLD5+Cb+vd/8/XQ9Hs6LdKlZIn8yGmEaNtJkNNNOOWTLBmNG6xw42Q3ZOTy+PWqf7UfdPpia4zsnkxycAQ1vzfx6da86ipfo0iV6YS2tZlYfV9lED2d1fxDlHn/rtTfXPvxpb30+rZ567IPHc21FZaXeunzp+rXfh2JCea/Z8A6p4/LoRM6aNPrc3/4bh29/8/atm89eeSaUcJap0BIQMX82Kepp92xS7i8e3rhNz54JG2Bn5yFutGpp1GzWagSrm8F0+ihpJelygiIZWH90OovS+PLzXWlENTyK28s//cMff/3N18oqH2wOrlxc/tJvfm0VNmEFblUnj33oiUuf/OA/++f//Oqli0LAsNcIuujbX3vtsa4OE7e7c/3ZH/vBd07fnT46STaba4OV/etHqFak1uPxop02tALGmh0tzqylpaDXb09Ou8kHnv0vfugnPledvktABVP0zsOHq+cug0wfH5WuaWPO3rz7ismHTmrEW8PdoUh02lqDFUdGSVNDk/taFZnAtOJRQNN4PNXZjDbIVgQa7nSq8glA2XT/UGeqt7VEwmrpzONKldpHW8+/TIPVuvrVKGIW2CCGDpaayqOj+4d7O4g55QBFSOMqxGZS271K/+AnP9vdWv/qN3+9YihopdPTUiO6trw0KxZWaWMgRg5AmM81NqyuF//mn/wvf/nv/R1A1wyIYdi9eHn5zlfuPvHMss3w7Xf2Oo3YGD/SsrI6CPDS+rJScjgqukmaLINFNn/mydXFrDaAlNK+/Ec/MT8sKhy98Ic/9N57B+U7e3GMltZard5KlTkCDIMAIWPAyfvvvh+3Gh64sBlLqWtpH+1XwOql+HuCKQ240o4QHCahlFOpVCZVuxMBCI31ACBgfbmok2ZIKPbGBg0UKPTaW78exR5BrK13HjpjjLE0xARjAH3IkkougDUEoyAKcEmMcBKLZiNxQk6GRxg54BHnQYCTqpQ6KwgAZZHzpDufLeKsjJqpKvJiOlsetGHI44jlRlayggjIIiOEHR+fLgPKAgpCyzmfl9loMeIBp4wDpebVxAMDgOWcISG1qBzmCGCMQMJCTwIWRWqAlAGj+Zxzlo/LtNVigJ8eld1OByJQSoCpA3F8nGW10cuDflFNgcOU4jSKTo6OpCw63WVKsTG2qvMQpwhagjGGMYDEeiOVBA4kLGA0VlLMizECqCgXSdxCCGuvGeERSQDAdS2DIGRRYzKdEa4H3eVKZZvnWvs7O7Pj3SdeuHT+4uaNm49a3aapFbEYOVKOS6s1cDKKsVTqcIwCDYh3Wk0ARTyIo3aivLQesCCMSGqsJogZKGbl6XKyaa0zVhESZKVqrzwuRakdbW+9OJ6O8ipPIeFJCE1VK1fkNSCnNIVBEPBGaH1EXEBDhiCdjmtC+MWrL45276dL7UfXb3BC1zc2giCezU9feOnxN19/8+M/+LEb++PtC0/Cxel7r7/Sai4128ndu8eNMAK+0eymz3zwqV/75V8fv/KNH+is/9xf/Mt3rn/1kB/1ty4dvHNnZSN4sn3mB156Agbu0bs7vJF89Z1rHPPT+3urK2fS1lKhzJm1vgHDazfeiltrjz125bWv3V8Uo/fv3rtx+/pSY+Xp569UdXHv7ohRRmPHLdPKPf+hH/jib3xRmdsvPn91d/i9cvfJNGt02ebFxzjD+dHD49uPnENJ2ly9vHF08PD4eLZ+buvi1effOTjceXjQ39iMqmB2esgYCjQJY87COKIgDHzImZFKMdfrDTwAGgBI8OR0jDxqJ+lssdC2QsrChFWVPgWKhAPOvNTZYNC1RqNA9zap3wje3xWoFVVCRU385OY5CrVHoZLQIYJYJxseAsAf7T8a5saErUHSkvPDXsf2l/qLmXp0/ShdIRgSR2Mat7A/vfbOjTCmQSNN0uj9a/eunj+7c+tmXVSFlHGvUxiZFyWEEBHsNUAObm0Ozl3YOHf+rJzJcny4dqZplFRC1Nm4ueECRGTupBKiqow1S90lp1nQ6BgqWJxki6GoZZ6XK1DxpKEVVLIcHd8py6qoJoiB0emeLfNLlx5/79oNBPBgkHjkj09GS2vtwXKCIRfeekClLiEyhSyiZuAA8QjDiI1HB8u2zUu99/Dm6fAwiBLK2VMXLw8PdmfViVJVFDV9J9w6t5Im8fVbp910befOXjuJXYJXL5zzi5HJxnEUEYhysTDIHOwfZDloDy60ljbFIjNmPKvk/HQxW+Si22+22hASBHEQcgL4yeEdXUnlVKPVOxrtiko1YYAJNZ41N86MCvi//+//mKX1H/rkC9/+veslMfv74+KhO7scrvX9Ume+frmJQue1dpYBiJ3zAFrvMcQcwMBoD7HHkELU8mRkUR7E5cuDTnft0r/69/LW3emvf7X4va+9dW4jeOFq89y55TgmzkGjeZ4NIO0/3DO//40bhyewsFoKK40EyCslIIQYU4C8I1555aAz2pGAUYhqUVKEnfOUfG86VlUx3nnYHJyRPhbOP7pxf6mFZLk43BuevbztHGcE7R2c8DQizc5cYBcEadJS0s0XdV0bUbr9+uTg7rVxeZxJJTT50GOPnT/Xef13vuwTU3pjEO2s9BMWSI+LuYXDnEBTFaON5Clz9tLi4aO93WEjSYCwlx+/WrLQkoR3Gj/4x//Er/6D/+2jL79c1NOirt+79gBh9NZXfsOHlPJgefPyKJeMk+bgnLMBIkxLMTs9yuWik7SNFpOT/e//oY9wBUszM9ghB0WpAk48BtrViPiiFvePy3cejG4c58rB0UJI5Zy21nmIqNfWe+8gRIh670NKm8v9Myub51a2vvTFX/VAegcARN46650H0CPogTHeYKRwCIEH1hsIHUbEee+BgxA664AlGEGMICcIOpUQE/rq0tNn1zd65x5f91VdzzIJNEmYhwQhapRpd5tpwByAuTJpO5VZJWsJvLLaAB/Op/OIExwggKK028RRQgku5qeF0LU2tZPWOqdVu9EJoqYyRMoSegE9QR4HjALOpnM5Gqn+YGll0FrqRFUxwwSESSCFgQZbG93fPRk021hPW7BeZIvC1LwRTsbzkDdUwMIojGNKgGAk/fb1d/frTMwBC2BixSJza6vds6utye4jDmmnFy4POkV+tLyx8mh3rPMioRhafevNm9ODg1IudG2cRpwABHAjbXoKHAtiHoahsxg6hhwJTk5ng96yqcdVUQSd4GBn12nDKUEaOIzCkGNooddl5c+c25CuWG33vTYYIRwwhrxcTHDDFMp0Gu16PtUGSKX/gFQ0HR4OlvsM0PlCMkamw2GzhRGSPIrilIjqVKhS1ZkSjvKQEIxoS0hQA/WRl8/0Op3/9Z/fzEo9X8AAselEhCHrlKSqyqee7GaFe/RwNw5D2oqzeU4ZjaPIa/Dz/8O/4xYAr6wz1sG5BZ/5kc+2G+LR8QG0QM6zGztfbQ8gRtZrrbSaTMq/8Tf+1rwoAgiXQxZGzC8W4zuv+h5+7txWU4hqKMbzeRpTIXLgjDM6oK358JFSohiVECdJByzmMxCQ3AOD8Z/9k89c2rjmZ9Vas22VJsw3On1KvHFSWxEvkYiXP/L96S/+p/L9h3vbq01GDWEoNJ4zUlV5SC1yhpO6xbxbHDfSFVsqJWYYW+dVwDRggdTSO+MgwCThoIEgBxA6nyPgiLPAYOiCZruxmJzUCznDOUhce3ldnD7ytUrDOKtE2tmAmZvOR04vGjjrd8+3SePOgxtFNg4D1x70qqIuF/Nq+oAgAhyICdJ5bUEl60p6I2rV79PF8aQczoN4FRpy5+bUs1QoYcuC0LrKFx5UcQSjtqOMlrmJEsoQ9npmSiFQpcopBFRb6jirAZ0LIE1FTcBsQLTLskOS+qxSXviov73W4seH95c3unv7tzuNVVmV7cEmMDllTubOe2UQhjQcT+q8QC8+/SFiD0PKLY21MkHUC9tLWtYeSONtPFjpdzZbrdX57K6YTAAPSkoh5qbWQeiaAVZShZgwwJyPEPNFcQQZdTCY59IYNh+/L4zoLLX2hzuYdykL5sdDnqTSuYPdhTL6/KWGh9He0em5raX+YPC9TVMHirKGwGMAvHdJFM9mZV0ZCJAFIOAchZwRgDFRylhvrLUeekI5AEADnyQpx7DT7DQ5WcxmpUbAIRawkCPjoNG6EafaoVbULqr6ZHza7rTiONHaa2MxBkJWjSCJg9AICTFiHgHCgwhRFngDCSTtuCXrEjvqNYQOQgetdgHFcUQZwYggZRSlzHqflQJDAJEPEyqkFkIqpTyGzSRWomaEemfDIFmUGUMIIA+MBwA5ByihUtfAO864VMYjCgD0XnrklQbDaaaBD3g46DQd0EUptNbK2oDSVqNR1rWUxninvbdKGusIwbXU3vrAeVvVGDGPoNFAqjqKYqlqBglEgLM4wKTWBgIfBQ2hKmesEMoDTwl1zjvrnANVmQcsdA4mSaS0wJAghJ13EECpKgAg9Fhq4L1iFCGIhBAQI0qwlLWDkNIYBxRzMs/HHmJRCsoIICjgHHgAIRRSQQiBJ5gCgjEC0CslgMGUaOOEcTxgSlsPHEKEYIahL2vBMCIE9VuxKmtjvK4kAFgbI5DEMaqFAB4ELKidaTcbRVkZ45x1wAHvvXMAE+qcHXRWp/PxZDZmjCYBZYw7CJQzGFGGsLHGGEUpEqJECEVxIJxlmISMeGchZJXIvHMAeiGld4BhCCDwHliPwzBWtSAEER4YCyHEEQ2gkt46722n3RBVSTEKOJP/udZ69nCejTImzMM3HpkSj7OxUY0S8K3LF3/uI5/pNVvfee2V116/EUXJ6GT27Vt73cutC1dXGTLLK52qMFUhCeWNOGKYG55DhiuLWlee3hl9ZzaZB3iTkl4x2oeELm/3Nh9bm49K6euj9/eHd/fXLy+rsv7yV96tg45Qyf5RDSP1kR/7xJ3dsZDVYqrqmbYWBjFX2h/uHgAAaYS1XhRiD4KId6LhfDwzrMod2EpRsrU3nZ3p2MLVE7XQVKBlMvfBN0/eW2ex7M+mJKwns0eTSXTxw2sffg7dObj17a8NlqiHURW0t9cvcUUPjsa1LKEHloGU89O9o8Wtg6SxvnNqNFYrcnH+qd7yc/1Pf/L7v/ALXzk5HcYbzROX//DP/kQzBu9+9Q2H1dL5cNCv2VLS2wrffuvtlScuYO3Pbp79oc99+Jf+5e+uLi1Ng/LP/Hc/6V3v7/33/0sSE4ycrU13pb3daBw8PJke5+9W5NkXPlSass6yUsi9TP7JH/nRd7/8ShyTTqc1PXp4PD3pNAPjDed4NM6UtqzVddip/LCsRyEGHHlJMY7o6dEUeNJZ6X7s535i7hPuiH5wR4ppd6OLMDo+vT0bBa1LL/hkxS4O3/zC/2/58ccmzfOf/dyH/um/fJeSoNGMdVlUwz2s5MnJwlNYGNPrNNcuLV//1vsP8vxv/uN/6jBKV9k//a//541GOxvXWgFHyM6jU5n5iBPOiZDOe6eEA8ZqTr72tbc+9943zn3wD3tJLYp+8Ed/4ld+5b8uLS/mFFCKQxJ6ooRhTXLx2bXi1uTy2ubJOJvW5WzPLzeTt28tUg4eu7LVWmproeZ1xZrh4axce2ppfSV4cH13fjLmJhQat+MWhuDR9XcPZjevPrFdW3/91R2aAe5dEgWFcrKu2wp9l4I4Ta1SeVbllXaQQIich0Z7RrCFPoyZkV5rGDBqrZdCLxYlRGQ6ngAfRiHX2mGMnIOYQkIxxhYw7An1hmBCAdCVqljYgp7Mhw+hEyGniGpIaLXIPO2dv/DC3s671oj+cvuZp7Yf7U67vdjKSoZYc3CaLT565gcenexOpwpDXlU5waGovPP1+spZLeus0MpUFHpMuAOWIgY9cMBxDA3gQokg4jQIMdJGllrVnjJA4+HkcAl0I2MrVVf5LNla7i0lHhBrqXV1FKI0pPPFgrG40W0yo5hLCScJaXkNlaiPTx424g5yCDqkrUYIcx4hGDCkCSCcMOulUHk77SmlpShpyKxWwHpGmTHAKAOccNZqrymj3mqnFUmbDupeP+RI6QDKUu7v7odh8/zltWwxh0RQBAjAYaM9G5b9brfMDqfz497KwOWWesXIAII8CHGWzwCMIhYjRERZUsK8MYw1HBAYQCGqdkC9yoQHwBOetAiqDcKN7tJ4NNLaWB531zvelFWdj8bzjiIIs+xkiCNOiJ/OJgHh1ggHqYeYJ0Gn3Tu8dXN0cG1j62ONRh8Dn00WKgFC8YcPDpY7rbrYX1tdnZ7uVKOjx566JIGNgkGnoYBTzU5TF0Vp8j/0k5/7nV/50s2b+ySNN86e/70vfXPj0qUPffRKm4HRcV5rPhmNmyubsNG8effhSgtDJVjQbDSbabNzdLRfDCfPv/TJXHST1uCpFyIfVZUUb7/6+jt3H+wt7LMvXl26sj6flDdfubV94YknPvLxazeOzq2tqXLo5KzTb3yXgn6v52A0OTrhOB/vPmp3ViCCjgb7+weLk0Mg4cHODWmz6Ny5s1dWwGSiAeqvtrM6l1MkJZBFDSlWs7Lb73sgUYpbIcqlx9wrY0JCHJInk2HAUb/ZTjxV3oaNhtQ+aaW4kcSmDzn3xgBm8nLe4J0VdjqW40GnrWt59fzmUiupIVFzJRY1gn51o5OXwygw2c44iHpVnS93m4SZ33p9f7nXWD0gH/7wUqsXHY6yvftHMUSI0ziJBk1y584dSvGt2eRw/1gqgTHMJ7MaIqRcFAeIQs7DbrPx5HNPIM98jcIkHPQ3oJEYAuvdfHjcr8u42z98NANI8ZDGiDsnEx40e6EwvJrX49JOZyVnoBaLMApWVy9BSNLO2aQLk+z45PBhHCZrq93j4eFqN43bg7i95TDcG766gpbjdA27IAhY1GkW88VweJwVZT9qNaL49VdeXW8tLzV7G+uDcjSMA/T8C0/v3Xv42PMfDbE+Or6RNFtWxoe7srsZA8mzRUD5Zhi2vVdIypA6S93c29o4IYx1rswniHCHWUnR+DjbvtLoDJSZzY6Ojq0oW41A2lqCqJFGohAaaa1zaADE8FRX7+7NuiEjKCxq4oFbubLNV9Yh7Xz87MdQ8e5r33jrYDI7Gc2AR91OL6+HZy+cixNoiyLtRtB7iC0E322HNNBbjwh0FaGhBQhCBRCAhHjeNUDicvTkBzp/CW79nX+4uzdmY8P23xVvXM8I2I8YJgCnLBYaZPmOBVZaVCnvuNZeOWyFkN56gpEDhhBikZfCEEIsBMg7B3wUBcB5gAmm35uO82FO5CKA6RDMnnrxo4ujsfEq6QYXW0+j9plO3PCLg2I+TFpdX7EHt691li7KMOBxMF/MPPQsBseno1k+jzutracfH7TO7+0M777zrQSq7kqcj1SW1b3lqNlp2rxgYZv3V1rR8uhrX58e5U+/uArnM68zZe0sG7NWs9lbnmV+9OCIwvbFZz5w7gPPzeanlqfTw/ml8+vHN98gvVbv/MV64T525bnhjWvXv/VtNy9n0znh1tQCVCa9sNY7Ex1e3yVFns3KIEUxj4kF0PqiyIaTozSJNGE3Hg5ff384KsFYQMKcKX1d2mc+8OLe7sF0PLa69tYSilGKralURccnwMGarSf/5Z/9C//bP/6fQxpAgK39bm0RsNZC4J23AEAAEUQAeUAY8w5g4CEACHjMGWXMWx1wFnrXSdJPfuKFbltGMbdGyzKPAqR0rbVwsjLaz5TuNChrEM+8FBYq6MIYYeQMtB5ynvIgkfWQATJfzLWdQ4pZmuAgQN52Wm1t4N7+oygg/X4XWdSMQw/ZqcoHg63JyQmCzGqlppPx8UxWeGm1005DD4CqTT9t9dcunD46OB3Obu2MuptdSGi5GC7xbjMMsIke7J7y0Ju21t4KaObjLMEI+/pMpy3L5ObxbH298fTFF775jfcnNTRGPP3yB0fHBxERth7BvGynHnWbpIlFXgFPCmUf7k8QcQRhj7ArCwIASZIgZiiAQpQyqxIbYR43O8GFzWfvPdpf21waBkAUU1zagDJodT8JJ4Vc7nVVLQkCo8l0Nj/srA4Op6ON1SjPFq1OjxIPlPDCeaSFL1WpEMJG4T+4VYTlYnyc9JbD0FAMIEKy2CWBL+aLKF6SVYFhBC0FBhKOqnIMQBDHnXyyIDi+cCn+wHN7t+5Vd48lb6be6TKfnun0fuZvfPLzX/nOvBRnNnqzaSmkDKMAYVjXldEyTqnV0hiNMECcyrpYxeX+7SOtdKfVmElrcjubeUQw9IAzQhDGtYqhQR7gurx4sbF/dzJ8NP+3t//tQosf//j3DwLWXGozbKxHldAcc8ID7QSkZrDSOimOLzzz2MnBJO3EhXAwgY9dYa7KMW7KzANEgm7kIQMQQIocjqU2mIqz59ynP8zfuq8BAM1Ot8rn7bCxt3/S7Xc4VNPhmEGMQxM6saghdjzPFI+pc7IsxyFsalkGNCQQI4cJ50JOMcWEUitBFLWcDasCzudTJRZhwoGLgUsn85EHBlOPeBizjjcBj7u6LgbrVydHJ7p8r93Z2ux1lEQnJ+9DL6QUPAWiyggN4yAsFhI4xZMUYdhgMYt4kU20EWmn2WyGw6NDxkgvYcIQXTtZ53EQZbP56tLmaDo3UkqgKUTbZ7cPHu0IUQhkMUeUMTGrnEQe+nl1TBBFCDlZSlfXoiAIMhwkSYvQoLbGelXWi83z5xaTytdzXccImNl8CqGu6yJJCLDGVziOe5w7UdTOg2wmjPIB0cCOtSwhBMDTweqFkIDTk2tSLigAUdRN4zZgCxqpKEXz0ak0VYS5VXY8ngoK48aKlm5pafng4EHAGeewNGCwsbr/cJy21kSA1PAEO2mNUFpa7HKt2u1G24hus318PPouBceHQ8Y4Z3S2mAcsqOTcA6itizlvMm61JwRb/d3b9t55jwiBmKRRWFdlWczX+iuNKIkYGI0PEAw5REUtMENKWVnXaRoEDIqs1hA5KdZ6HcSxswY5t5R25vkCx4wwFATUAMA4hxCNpzMHHKLQaF3kJeVBqxF7axFgSpQYAoIJQghApJ0l1jsH8rx2kFRKEwAoY7A2tZBGWW08gF5KDQGmhFqDjLWVUAYiD1y/2RuOh5jCusg8cMA767w1vhLSGhsEGEPkvG82k1pZqc1oNofAMUy1hpQSBNB0vkAQaWXrOqcBxgB648M0QsRobSDCVhnrNQsogoBALqWEmCDEnDMUUwccoxgCX6kceI8QQIQZoyGARbEw1mPCIWEAAYKpc4YTihGqpLDWBzxAGKVRbC2YLepCqpQEECOMMELQeEghVA4AhGtdW42SKKIIGYo9xrVUmBFKibUGQgCwxwAyRAihzuo0DoUQUdgohMxnmbLQOAshAs5ZrZ3zCCLgoTN+OMmQ8RFjSktIHOHYQV+UJYIkSqKilhb7oqiFMoQS5zymECJklHbeUwKLaga8Bc5TRL0DYRg44DEAUttKaoYQhB5RBBxCAAAMMcR5XSEUB5RpXUHoKqmCgHhvEUKUYQCAt15oUVaaQEgxDnlU1tIbjYADziujIsYrUVOEnffOW4K/t08h52prfWu2P6mL+uzG8oPbozMXVjobFzhvK0uqbLa81BmE58an94hQ96ejbivMqkX/TA9jEMfcCJ3EjQil2OoyrwugmnFzPJK+sdwahHtTvbUUda+eaYFod+/B1rkLYcCioNVr9+/f25vvwqOTofOA9UmAWwwK4YskMqNRNlhtHE3SSTVFSXhcZgClWJn+2sXTxX3EQhLzfFhkaljnQjtcFFhCk/biIzt8IA4LLy6uXTKQlLrymOEG2KuOggvxHCrAw821p3G6Xc9P57u39vZ2aNSBgTia5Rvrq64wyOpmJwqbkSAsH2Z1pje31k4X+YvPXL4/Lb3Ovvbl9y9sNW+9/vZP/7kfGxZ6sNkqCMlm8wD7K89uZcXOveuvhq31zvbqVXL27ds7rcYieqxbZODends/9V985AtfffsLN+5e+tkfffnJp/+Hf/LX/vpf/NtPX+61aIACvciHS8ut4b4+uvPm3rWvJzENsB900ulEPLqxh5WAoHJ0vpicWGsMZktLHVWaMA69B6f7DxhGURAPotZiPgyb6XD/AIkcOow9OJgcvoaKjStPIbK22HsUt0MLokIVrcuPN1cv2/a2diG1zFnGi+KVL//yMy9fdMhZDObzqoeiejyJO2HcCKzWqbHEVrNHezggTzz9+OTRzeuPvn79zW8/3mhX1nJCIDBWe+9sENIk8kWhmhGT0uEAQoSlI5rTX/uPv/zXnvsUJi2AeNxde+7Jc9dHZYJaOWcneycBhSHFxOsuA6kWz6bir/6Dn/vUn/k/ChZUWpXdIC7F0devGaP7641WGkWX6PNPnr9565X7O4Jw3B2k8/lRa23LgQwHTBkZ0P4f+dnPGEsGZx8d3Du5//5XQwIU0K2VwWxYfW88yOcEEg8dBjBKI28RYTSKw/l0bKxuN1OEvahq4yiA3lhjtI1TGsWREJZTJEUdJgELmdEaQOC1TYJ4kQ/b3ZXp/JgS3OkOTk7ziKF+Z1XowjiLCFbGJI1Gu3X2ZHRf1zPrRNiINs+tHQwXDx/uPvdip8hng36zGvcfjcYoZFDW3oFGxBmNMllJJZzPEXMUhZ1eu55N5tmMUBxSWFUFdt4GGCEMATTGOyOdEpgE3gOKaaGKXqfvPSirHHMQhFFVG8KoEjIvRKsZCSGdUs6g+agoipohRDHPZyULA1GVIQ9pEBEWEV84BKRQMQ9iFmdCRGHDG1tUBeOUohh46JwmyBlRMeLDMBZChUEKKfXAYoiUEUUxXxqsOotkOcPQspCWi1KVRbfVdlYL6VxdOwdYTDvdjtZikeFK20f7OwQIwIK8EjxtAxQtrT++d3Nsqmnc6HnGclXXUiZxjCEVQlhUeGcLUQeElfUpIiRK+t4H1kIEOHSyWMxiziCxgASmlkpbQoOVtQ3vXFWb05NRe2m11YuEgEoZgtl8VhlnTbnoAriY3l87s6qKvNGKMHJiIoSSJGh6TWaz/P6922efYecvX3wg5oWQpbHjm7dOxzNvzVNpv+HA4XduPNh9tNHZ3F5eMqAyhf9jn3p+bhatpPnBKxde/db1F77vk3fv3xP1vL8VNnFjud+4/ubOS89/upgdJ6y+Nrr74nOXtA/2Zk7ceF+XRiBx5uzGD//Qj7/xzVcOh4fZ9JA5pGeiH4eYBSzl7331N1aXI2p00G48+9JHvhfDdNJrVR0f+QAkURSECXDm+HBvkc1snfE08t7p8Uhan2VloLUqc+t1woMzHZTlephbZyA2MBsuQmbXtwfaWF1VOAihRe219TIvcCk9sI7xNIzykxkqBI9jgkCxKKSSSdpZWlsWVaEI8cg9cTFYHrSjMGJRiwk3XkgLHFKu3V2aLxYWmEbSbiTNVkOcWW3xlD/3wpWDR8df+O3d129PvX7w9Xd2rpw7G8aRECBspxaZukAlIknYbnZCKYqnX3r+/ffemc5mHKFGGiHlW73W+cdX5uOq102WWqHJ/WhnN+r1bS1cUYYB7m/3aeRuv/GlRqM3HY5sKWEcpmGArFL19MFbd1EYHe7uQUqksmtry/P5nEar0iLgAYtannBHWKCcEeQw3xktdousHiynTaogC5dD1OJQ1soZhDGtFiOjKsxBO2qEAahl9vhzz57p9d743f/06NaOmM3anfbyme7jj1/EuL93733s1Aefe+61bzzy3XzzTBwi+ejR7R/5yb/21rdfwdBDPTu+N4VQZDONXCqtGzS6k6mKA55P5osSdvoEoPlsPhzuH3kHY84JgdlkzgGL24mSJlO6lcYG0tFUHudqRlGj0Q4YTXjCKGx0upml0sBvffG3VoIRjAMBKOYR1hA6/xM/85mzj+sW9whldQ5iVHkEIAUOWIw0oNgjCyDxECAEnAcQIK9iBBlgyAUV1NMzT4NPfarxi/9ximDIWaAKqyESAjjnjvO5NI5w6r01CHgOAJLAWeAMQdB5iDGCEBnrEYaMUecdI8gp6SBEAHPOPSLWfs9BDgMYN2JOfQ/C6f3362kZrSyvPHZ2kfFM68Ojo45dJGGkK6WlHiyfN4CUs4kPWIhV72zz/nuvTU5Orl7dGpemvXZpe+tywIKjRw9AbR48ODqZuQTSI5mP9kYbg+ba8qDGQdTvf/jHf+LVX/nV3/6XfzOK2mvntlh7sH32IiANhiOvht2UJjwSZzqj/btL57dM1DqduCJfzPNpyIJlSaq9h2/t3som85PhQZwkxXxyZr3jPYM+OH5wzDm+cuVcXZQUWS88bQXeO1FXGhlGg2mm3nk0un1kRpoeT6Z57Rnxa1ubLz/7yVt33tvfP3ZaCSEDxihB2aj+wR/78Z//7//Pv/fXf/4bv//vb8jF/TYPw9jWEkIPvMcYQQABgg4ihCHwHgAAIfQAAg+hhx445x1nmFOCoe8m5PK5wRMXzzejiIdAeKGkcsobAyf702IyDzBpRskiq5T2QpnJ6YzHvLe07FEiKwEB4XFKGZVaz5SCBhdlTrGHXnNNjBYkoKKslduN03arxaWQRnqO0PjwkDdQmsRVIQkM8iKbVwVPw6TFTxYlkLqYL7pn2tb5Zszu37wxH82mpVYAiaoKurQxiE1ZzIYTSTpBFO6d7C+zhAVOG6mNXtSOYby+1vMWgJOCcf32tZtrF9Zaa0vFvMYQe4Cmk4w3myFJHt2+EbR6MXOQQGkhYk5LwwEWSlnqOxEJiCfctwLEAqRx5AOKAVrklTEnKsoajHSDVrDWoLy19+gQeFTM8kqaSjukNacBgZQRyj0eHy7OtHBRnTY6gYYFhAxQCjQM48jW0gjBo7DZSf+AVHRw71EAwbIueBwyHA9HOXVTnpIgHBCnGLAIG8AgIB1ZLersIGQ93u0h0hcVlG7+uc9tX7o+/fxvHe8czeaZQt5sbPA/939/5uUPRT/7X33+1n4VR41KCuI1FD5uNxDHDtQeWoyxqDTQBkNy+71bn/z0hw+Ppw8mhcIe9PHP/tj3Xb9279X3HwQ19dwTrP/kz3xkrYFCJ5Q26aeerBZVvQBKM5+LQY/jwGVlznhCgsgaX1SjoNFsdVpH909Cg07uDw0Ad3ceSW3WGKiO7jZaJmmg6dy02mEQWuAzDxANGgCn0Etv1UrH/vk/3P6jf/d4lsceoQj4QqowTdqtRoJlWSxgAD1xpVik/WVdoRaGwsywrzjDWmpgtYesrrIo7WNEEfAAQAgRw3wiC5Iura+cXTx8K22GRs0dJsoCWolmI13UI1PqRrN/dDzqdluM9IrcsqSTH0wz/YDCoNFaX7r6ydm8HJfHRk90gRr9Vp0XUcBBwDu9thFodjoHFUiaT4JmZKg/GY02ttdHe/ci3gQQhijIa4QseeLxT1din2GKIsrjTkDsaJGzznrqIqtLFOdVlRViTGwEeTKvJ/3+BiYBUNADG0UB4GieVSsrqfMqZtY1I4oIxY2I6ICZer6jnAviVNco5KnXFls7GxdPPfYMw9JhSRgOXcsphSmo1cyDImz30rh1mp320pQQv7p2Nc/rtH8xbK5UsyNaVEBlIXck7DY63Xx4jJhrBhELWaazWTbynAdhQ9VZEAbO4/ba1eHxyIsibui0QRoILiaVdPTunXuPXVxPPNi5dz9uJN+lIA1DSIjUOknigDBlfBinpAMZxMgjIRQkGFhurcMAUswoJVLbqqqUcasr29RK4HRVyfWl5cm0LvOy0U5YGNTTElmuBCzqRSPp6LomjFRKxTzptVrZbChECZFJoxgC740KYwKhN1rHYUAoA8aFjDlIMUbUIa31QswwwogAFhCKvLfWOUc4cRYC6wHGnEfea4SQ1oIwiDQBECHsRC0QRDjAhHgPYRpHtVYRD+bzacgDKbU1lnHirPHWAw+98845Y4nzQFoZU2SsAc466zgPCCVSaIihs47RkLBgWo4aYRQnXIo6IhgjFwdMESi1hQgD7zFCAHilZRjGhGDoIXDQaOmAI4RiCL23GEIHkDbeeieqCmFMkEMUYUSs8R5ABJFSNWM0pNwC4B1AHigpPQBRCAGiyINa+SCIHDKLMqeYGgeMc7ISJAgAQ5WSjGDtvNHGOKABBNCnaVLIAjhIIfTWEIwBhB5CqURVCwg8QFRWmocYYQKBN06FcRgRrI1FhBqv50WttOXMEQqAU61mrLVBCKcNrgwUlZBWkgCzOLDWAYw8wco6imNndRgFAhiMHGOEUKS1JQh7ioUtHcCUcOsABJ4RlmWZgdh7UJW1IqrVChoQqaImmBNqtLHAQ4ywg14p7QGyHmMIq1p64DEhstIIIQCwtUCWkkaRMUobmTa/5yQjGM/HOp+6eW6KxTFNY0qj3soWhtRIX9h80G2vnO/+9i/dW+2deepyuPncVhm5qqqLWnqlgS8ICvPqII7bJPIp9mnLqUKtXejEgJoEt/s0N7DTidc+8Dxx8c53riVdvv30hReefyLLj2780vWqGgdDMx+zQerPbiK+yOnpnDd5p5EkXVqCuqiK9d42qyTDrsgPGo0zotDEhUqccKbjIEDECet4YGACg6APplnJCksQ68VlUU1MbhI6rQDQKE06a+0LamRcVcli8aFPfujuvesbg2aUBPJ4XJxU8fr6mWe2dg5Pzmxf3sneXl4Pt84n+l72x3/qU3/r539xdbUzm6qda9n2j2y0ln0UD6QQIXcXn+rmo2OVmOtvvdlcXSLOfe03v3J5tbHUa7335Vefe+bZ7kb30pNXX/0PXyUw/r/97I8//cSFW9/+TQSHP/Mzz1660n/vt24eH1etRnN1u7G0TQ4eFnffHVU18z4spgWB+M77753thHVdwDhiCaYaOiunE0mBbTbXikWJfSAB6DSbzmmglZDTdhrffXC/nSZxwnRlb3z9rXd+52sfevmjH/qhTx0+HM6ONE+X6ZmuIMlsNOt3w7TX27qytXf3Vhrj8QS2W+1FLqWwyJmNtdZJpqQVzMHldnT+3JL0gXbyp77v+X/0f/1PKA0iFk4X1VzIsnKcceBcGGJjzPZG8nBvrh2UCgLnjDbCgNGQfukLt37qx7+98eynlQ9Za/Ov/Px/+2f+1F/xKq400pQSYrXRtoI339xLxuXr37kVkOJvf/bxW47+269f39+pnrzUX4wXurL5o6rNit07B++/c+On/9QfeXT/jphIYBwEUXdrczrcm08nlMCtrbPj3YnVaHtz4wc+9lmYf///9L/+XQ+krAX43rY1+OVf+uJP//SPJRHLpoVCPs9LBHwFvTYeIlgJabXljA+HizQMkUVhyLx2jEBKGHAwShglUCjtHAYGIAeNcBwGBITEx6osZ6AIGIPAAue1sBh6UddBnBbVlKA7RalarRaGyXi+iMJBF9/NF2OZL2Dtq7pUed5bWpVg1gTtR7fuJDGt9FC5CmAnVG2kTvvdWTaHxqVJaoHwyGAIHHDaYgwhJsx7b50Mk6QSJogTY7WVRcioBQjEBIeBotoBko0Wne6Ki7E2mlFSlzLhsUPCe+e8wRgSD7UyDkCHoQegrBbeyyhoAaedtZWrECRWW+MUwNBBAAGSdS5VRnBCESvy06hBjbaEaB7SWmqAHCWM81jK0jpt6hoSsCjkYLkvsUMMA9yYZHmn0/UWU6aVVgGJinyGMOMskrnADJVlRiFFWBbz251uF6nII1Yb3W6tgEUNXF6b3BiMlOHcByFDnh1mYpWlHjCtxXySt5J2UWUUQZREdSlCHkoprdRRFLb7zf39B2fOXiZpmweNWtRWGVXWzW4niHhVls3uyr1b74uFUaSau3Gj3bPYB7yBaETCKIqpk+OVdCUbDe+LgqftfDJPG53TfL76+FlVZEKMmoNWIOmHPvGhX/iFz8crK+cunA0Stn753Cv/5leffvbKzu7JeHhw7+6ttaXHNaJf/soX1wdds9y+cKk52n00Hk+eeurJqysfvXvj9deuvfeZH/3vXJXk43EzdoWRHKbbT3zwPKn/4y/8wtNXH6+FWls5c/UDz5myWG+FFOKHo+nb+9eW1p/+LgX7O49anHUadLFYjCeTZqW0UEpiLVDaaM3mo0KLWV2nZRUAJLWvijyM4pixhPqldpIWhkKMaiQlYI1GDYCzdbsXeoqJrdu+ODFlk7ComZQ5sRXIZoDFjfbquqxMOS3rHOSzKWWs1WgJqGeLUSvkl85tE4CEZhU0sBIccQHkvLIW0dlszjB6+ukPrVyQtpphDEGhYhjGQbOSU0Dx23eO790bPfvUY5sXtleeuDAbF0C5ZhzSxnaZFyqvmz1OMX7jrbcBxWkz3ui2MfCb/dQuqnycz+P5+vo5I3TSW2ohdTy9nYksOsHYASPxeDalQbC62mUszMan06OFARgArezcW9JpL6Gw0+iuLK8ElcqrBbSq5LGUYiGVZbp5tHOPR8AL3W6G12+8/eQV5EmHcb+/c3ew9Hja5NZU08mhUQULmHHOenfp2RcnsHd4uH/16Q8cDQ+FNuWk1rOyGzXq4XFImnHj4mSiLQGzsmiXLa91WeZf+uovrrdWpwdDPS+Odx+sbq/O5zmC3HFrOGk3ezTQOqG519l8dz4LRV6Ph9NWq5UbYHKNWHpn7/iF5mbAOaBq0E5nhXHQCeURiYvSd/pNEPAQWyaVnR/fuPGqnx3msR7NbBrRZ5877yWSebk4Pn0tm64EjUd7k5Lj9TDvtumlq1HYQmkPokBDkmMKHCDWAAixRxSg0DroNCQohYGjpPjMD174D7/7ynTsPaKQQYyBMUA5hzn20NdWOQsARAQTqay32liLAfIAeO+dd4QFHlittfcAOk8wtsYY6zAkzlv7n7Ui5i3Dvhge4mYbUAgd0At3/5U3PY9Na+XxJ88fXH8nanTSlTPTvd2k2XCEZKNTStxkcnTz3ncWeb3aWqc6fvDOdUhO2wqcHt99+dNPv/I7v/fM1atf+cp1B0yy3JtPciXJw2v3N8L2ZLGvqQsSWtWkmlsIwmJaMtRWdVVWhyKXgAXAiPHOdLC9lk2qtd7jz33kmZ3r3/zYH/uviuF4Mj7hTc+GswYQGx96/P6D063HP8icmZ1OpCVnzl4oGt1M5UuDeO/ewzjqFIUACClR51VWeXZzP3tjT9WS7O8dzMdzSgMcJ9aA/+c/+B+X+i3CKY0pYRhBwmO7vLz01//mX4OlSroO4uo0G7saQwcgRM57DKF1DkHggacYewCBtwAgxhgwDkGHKICQYujSOEwD1luK/8Qf/0wEzWj/EBgrhIIIxiHyFD58dKyUhJoaYOM0ph04r0tEmJWWhSaCajKfc0wZpfNqlna2kFIQZTRoqRmwoOy3E1XKWjpZy1JIhGmIOGEBA2Q6mzUob/Y6pRoRD/f3iihur2ysOSX2HxwoJTbWl712SyttSmGz23uwu3/rYXHl/PnJwYlQVYO6RuTSpG0NpzCn1G5feFJa9t7Ng7MrqUcVDyDCJBfFSJTbFzebOMQSWIuPpnOYtjthd7y/s5w2BNLLraao9IB2agWAMsZgg3Cn1w6TsJwXGGEUR/1WKtVI6UzVoCwNixPOU2MM5gFyKB/PAwp3Hu2giEXNoOmDbDbDwFOMunFHicwZaQFlWGOPF5nmm4OlC10F5gRG5WLRDPqyrICFGgaaAG0dKMs/IBUFMQW6lqoqZLl2Zmv9ykfd7IEodzEOeMxpxGRZ11XOAh4nZzMwCXlDVnPjVRh01ELFDG5umReeV5MvnUrNKsd/6dt3P/T5r8elfWpjdeVs+/e+eg9a+92aVcw5wXQxzoAx3vuA0rARC2lvHZ66r75ycYn3aPmnfu4TPM0unetA9am/9vfVV18/IShcOtN56WqfGWVds9LYacCIX39yM6/g6N5djc0imyjtrCM8DIAX5Wxiqctm3igdJGmey1avE4USeP3sE2cTbsejUw1dMiCIKuUdRpqwptUMWsxJ5Kw3rlxf7VzcSjkFDGmMYV3MHHWidB7bgDoMTXtpQCjUdWUdCdKl0d5RyCD0rXajK8SerOuQxTFvFNWC0LDZibPyKAoCCSkMu4DBuBPVi7kQ06B1Jg4DoIH2MIqbGKeqPlkedI3nWhOvVRTj/nJXlrVQi3vv3d/afgIHW42NSxwaaTItxsIdKqAM8qCqIqgcFP2l5c6ZlxZwSap5Mb0zX9yAmA5nFcCsQUMCXMhDbMTu3bdbAx63l+PWxQfXvtpeSZPuxmJqUzY4PvzdkMmAWsK8sr6Nmqg2QYIdkEJWhAFosGPptBBWHvMQRSGPklZVGKU9JMZ5Y7xr0F6dew1xv7Nkq7lzo6213nS6S7FjniXNRl2WEHvGeZ3LMGooFKxcumTLzKq6NDLtdwpZ2DpPu+tJJGVxDKDxRual5s1OiyTG6DwbKp3xJIxQyALIaVRJUI5q3uwOp3uBswCGRwfZ5HDGwlSVcrWZqFk1nBfZolq/QL9HAQ+0sVHA+72WEkpKZ6zRDhvgAsrSdlMqWea1sY4S5CFAEDjnEACYBKOi3GindV0GIa2dNcC1WylALok49UBUdWVqhBgAwHjgkA8iXhSlUwo4h7xCiBoLjTKA4JAga4w27v9P1X8/3Z7dd53oymt90857P/k8J5/OSS21khVbtnCUo2RjwAQbwxhMcWdgCrhzC6YGXwaY6wJMMEWwxwYsW9iyJRlJlqVWaLU69+kT+uTw5P3s+I0rr/uD5Knyf7B+edX6rPf6vN9vG7yUNUcUMsgEiwXXTR1FVFvoICSUBWAwBsAFxEgAIGAHOUzjlilLiEArzo6OD9tJa1EXIbhIRGVZR4xihAUX83IOAWjFqdGaYMgEl40kGEMUGGZKaoAJ44xFCcQgWNsSGfQh4QRGIcAAISaEAO6TJIIeBQADMq1WQnywTmOCvbUQQCUVISSJKYiCVoZR6kOIqIAIUISVUgQjxriyFgCsjHYBcsHrsrQmIIqth5QQRiAklBDcOOmMdSjwiDljEIHOe0Z5zNrT2QwRiAmmSFhtrA9VI00wBLOYcouMMrabcIygsw4TRCJmjeMoEEIwQcZq5xwJyDlvjEWYKmsLqSkhIQSMcdZKitrEsbDAGWWyJBYCC0ZSToxGASLFQgA4MAoRrJsKBJ0ZDiFGCCZJNpscw+AjEQkujDFG20ZKyhihtKhzhpBz4NvqW93omAgh4hAA0MYD7KyV3lFIgg/WKBOQ0gpCwAn3PljnCSaxiKwxwFmEkLFGBuOsjbjANKrrOkDMCMXEE0TqoGwIMCAIAsM0hEAoo5RZG/7kDy07vjVTs7qTpfmiUZB913t/6Mqtu2nCI4qjjI/HdufO5OFHHztYLs498uASNXmja1V1BRARQVCM5WLpVGrimKwznFd142SIIcrLst9f3Vkcy5XkNiqc9ydP9M49/n5v1KSQy7t7o1X44U88+dJn9MEuWG/3fvonnmz1Dq9du3/6fScVMZ3u6TRe1EeLrhDB3I+51s24F3MoiwAKIyPdNEkbU4CJUdQp5CnSNSOdnkhUVTqMMAzQNN6HQEc0Sk0IS4no5HjI2OBEf/eV8t6u7bQ7Aar9xWLtgdXbV+69+z1PLJtZt5XMbl7b3upUOsz1YpYv3vzS0ebqubtv3XOY2ZYbbZ29deM6TVjcaWGeVUfl1z/7lfuH91fXo6c/8l7ThOvfvHfpdl6R1qh3ctwIutJla6P2yoq1ksfA3b+G5bSuj7KIH+3CM09//4+/60MvvPi1m6/9IVW1UXZ1g7a7J17+1lXMII9xLms42AxzfW33OMNBcN4WqTG1stp5Cb3hSKRJlmYxQgE5o20dM3SObpXLIl+WVsFRr+0UP57M79+fVFWztnlhuiy7IQKBdOLYG2OaypWqVrN3/eh3/cZvP68D0MZGSbQs1NFBGbWzNOZ9jkYr4tGHzr55+XA8iX/lP3wqjSJAEUuFn9ZSAQ1QRLFaGsHDmbNsZX0wqWZ7h1JEMQReaptGgou01e5+448/u/Hgk4hvWcSi9omnn1z5xstzzjvZMOqlZHG81FoRmJw+39HLxawAD28nn/j5P6vCb/7e167cO8iB0mnMTpzdpFbevLW7OKx++Vc/FUMccdwb1sOByBvZ6wuKs0ZS6FU1c3GXF+Nbf/jmi8V0T0TMA3P33riTfUcwffnqQfOffuf73v+20cqwbhpjmyQTHngEgxBRxOLKFT6AfrtfLvMs4YTxsizrskqymEUxoQIhWMvCWT8adIIxlBLI4LI+dsBgSkCAGAJnTF2VsWh5EAAGEDAhOo0K1sC6UTETAZBuR3zow89cvvjGt1585cknHu33W8fHdHZ8D4sYUUyxyBdTj2GachACBBSJFsfMY6xUjjAoqxpzKnhqtOKUeRfyRuEIi6TtMMiX+8OsiwLgvG1c0NoASJsKVIbFEVtbaeeq0k5yTlgUQyqg99a7drcTjKyKxnlHWYZJFDwMzmCAKEuKujJacYaN8YyyAJxzJklSD3xTLhml3e7qeLaMUt7qtLWWccwJQcv5UdzqtLKVZT71EEKHomSAM11V06opFmWlqyY/Wp57+P1QsUph7czGStYcz3bHUwJC0u2KOLFyGYKP005E47KSzeKYICJQpD3EUdvSYTrqxQzf33szEkB7BYAEQB7Prp9cOU0Qsz4gK9sMFtVBmkTBQes98MBq5yHgcdToRh1W7WxlvphhgoKXjiDMqEBEal2VS4aApU4R2No4f/qJC/euXwE0DV5HWZJkcblc3ri8G7GAWfbypf1TG8TCnaaYx1FxY37wru9+Ng0YmeL6rTszDUO3//0//pOzpX7jyq0LF4Z0uPbB7/ug1Q51O5uPn0aRvXnn+jPv+uFnn/344e41kvmjg8n2mdNPvecZpnp6dvPxhz7c7pyT1SJKwhu33nrfu56pqqqaah61tx979CM/ejSfH7TD6sEUbB3bg5tX14atwWb/uFahIYCDP7FhRhgHnIp6WeIkXRa5t9qFpMHcO8LTrtFVrrTQoZXEta96J9cZSWaTQ+iF6CQn1qhtyuW+QxFfgAARTmKWdeO6biikB/tjbz2LaJxyA7BgEeWdbKUfDddaGAE+VXleGckE9sBBEIa91YRT1SjMuFxKjCPOWa2lAm5eLGJKiUgDApjh1ShZ7Z2cHs+cDjiNz5/cml0ulKrjNGGUkiQ6+9BpnrXObmzYqqwKGXWiiQdHjdMNOHniDMIMRiDPl+sp04VuljrLhDehyvPZ9HCwmnoqGyWxYKqReaWbvXG7naZJKy/yNm7XTd4oPS1yh+lwZZDQHvRAA5yJYVU4RU2vOyyqJWPQWAMhYDFTUrfasZNgNFg5XM5XNrenC7O6vTY8gV744ounmsaH+d3bN+fq6NQDZ8Y390fd7VMPPGrxqJPGKejzor5xd5nE6GCS7y/ypx/Zitshyaowd5O9GQ5ofW2lmAfRSwJ0dek6J6PxjjJIPfDE+XmdxxlvNOqtnw0GFs3+oJ2uba5Wd8cMsul4wilNs6xuGtk4pTXNUjEaTqQ+NeqhygoIiTdrq50j473ozIsi42U7EUkcTccHk+XkaG8nANwcl5Ky4epq98T5U6cf/L3/+t8ufvlKIwvuCdBedPlHn44hWn7+83vDUfvJt22OtiISSR8aQEfBJwhBGBwEKiAHAgMWAsStc5hEDKGIc0qprBXGiCCMIPIgMEaBthYABBECiMBgAgABBAQxocYYjCgM2BiJMQwhgOCdh9554LGx3nmI/p8KNOWLppoeTtubjZ1GwzNn1x889fKXvrKy5s3YzGwBEPRayskMutAspy5AzpCn9tL1K8dT+eDjz6zEa6M23pmXHDU8xtunzr7x0lXYxIDH29vrAfgSVNlaL+l29XI5v329R8S8rhCQrQ4erQyv3bi18cjbJEnq5kAup53VrSiJiS4UUtuPPFXW83xvv3L3IgaLeXP90sXDw3utLr92+drJjROtBrdEZ7C64XRVzEuABWmNEhYf33ptmTetVoeSKM7i8bSY1QiMTvzRH711eS/fGVsBUbWUUcIIJZD66WxnNExH6XAml1YZEZFqlmck+cX/9a8mWfTC17/yX3/zP3V6pDGymSkEAyIAOAARhBBCEIAPwQUPAYSYBAhMoDgwBCgKm5tdAfz6qDfoRacfOdGUi7f2JhElK/0kCtHRztF8XjWFrKoGATjsdSPiIbHIuV6aZhnDSjpdUWtWk+z4aExa3VPbp2fzmVpO4wwQHs+bplwuEOg0Ru+M80Gr229lnXavsSROu6EpiUfDlcFrV6+tdOny8J6DGc6Izzq3r9+N24MHT22/+vydCBHBM1U3l956K26zYW/QNNrp0E/o6qgTd7v37uystlfWNzezQSfutz7ywfe/8Ma/aXocMa+q+sEz5+7tHwBKjma1qk2zLChhH/iud1x84aJhE2wLy9s0EhUwYjQ61e2I1nCxe9c6OK+VnC+1hBTEAWiGQaNzD0y3MwwqeBest0FBrb0hMcTIBRlR2Bmks8k8r4oQ7GC1p3VTmaaVUs+zqqlYjJNWrC054GzjQx+B4Xpx+/kO060Og0AyC/Oq1J5VjU/bCRXkT0lFo80TcUKNVsBxFCKIMx2vtjpbwMbTgztxC2kzCcAHyxwUcbwCYMC0mh+N063H17aGh4c3B+vwmaeTiy9OxlPLoZjsk7/5/3790Tg1gA/P9iG8RjngIoUAANZ67PTmc+NZv98OQC9nFTMOOLTI7auXDkcR+PA717cSGAS7c+P+yc7g//VXv/fFG79VSizikZk57SSNGaeR9ZANW5VBewfjTsaMnLf77boM0HFK6DwfV+XSBmAFkEbm8/FwZTifVvcOpyLlt67tfZGIZ568UEzrDp5GIiCMRRxDkqKQQkADtACH49381lXPkHCqJFFqtFlbS7QzEKLK4rk2j57sFnlj2gaA2nriUDRYf0TNDyo9n9THzsxG7W1bMBwYY5Fys0oRbQ2DZcKHLEureh51xOb2ubqaMtKkSfvoSEIcshZjVC+XRyThUGTWZt4mBOYuzL3PMUHDYVTP73BeLPXrg+Emp5GFari6Ar0vKyNQTTgLopgtd437/QasASwIDFH75NIQ4pVTRhUVcOXG9oqGB2m3B2wK814uF1ujB0S65QPK4jJONh9a+eiVN34tjVqysTpIZU1Ks7qqnVoQETfKGV17JBiPFkVotbsSR6Peo6DYs9DGSQchrMtZU9SmbiBLZ3M3H8+9LbfWOpNKEYKcMtbZAGClvYjbWydOR2SgIVYylHkeEb+cLpvyGIEI6iNXp9Di4comlFEC4d54jLyxWHY7sN3Bt28sWzGvJZweHw77G1nW1XoOXTh74oJp8lmTz8e3LfWN8bNlxUMspQIwpFmizXeuBB5HCaMIukYpJXUImHOhrccAhhC8byLBgmHGOw98I1UhGwJxxJjUNuWilVADKeHUGDta6UCrsYPYSNMskjgBMmqkqsvcOdDoJk1TaAMAFlM06LeNc9Y5CgkAGAcMENSgieNIq6CbBmMCMETQIdQQxL33jBBKiWpqgBhCCEFinCYYUU6tq2NOjQ51VWEiqsqoxhFCmqoJAMciCcA2uiaIQAKdcwiCOGKlLCiG2roQsPHOo0ApIggBGGqlkfcYCGM1gJBSqp1z1lGMBKcYY+cDCIZiEmgCjUQwKKUSEQOIEMSEEOddAEAQarQNCHLGEYY+hCji0AcAfPDOQS+lgghXoGEMY+QhxjBOGKEYhaopAGKpEAa7AA1jzEMMfGiLyHtnjcGYQ2AwQFKpOIo88JWWIkoqqUppAvKcU2gsIYizpCyaqjLK6G5MAwzSKABRXuZpFBFGvNUUI+tB3chQG+scZYwCYG3QzoTgobOCYxi8t64wQBsNIaDA9xJRKh9FokJI1lVTaYhxFDGtVCo4oB57oJuaYCYowRh551GADBEYgDbOKtNAk3Ama0mSYIJFGHOClYUBBkapc1ZXUlvvQQDOKosgQHleUSyU0RghhBCjwlrrjYWQSm0AQBYGU0ttbK/DIPIsIqZqgveUUQiRdRaCABGy+jt/aPlx2e/2JYYMCexa/fXzN771UhIJW04r7Z0NRlba1Ivx/LAY986LerkIMfLOAB4BFGl9bEnUH56sDgrttLJ51HLU4tnMJemQDk/nZK8WS5uG9bXN47nLrRRpJnobom0tzr/0Gy/DpXcuQg428/n6OinqZdIoQ9nhZEo6/aA8oz6OdUvUh3v3KU6U9tbnGkYeI91YWQuoIChw0j03EOL+3jhrIyCngGAAUQvGBFFQpgpBAxzhNO7FCapuH19Vqmr2VN1iYD2WAn3jtasrre5yOe5tDpqpnR9NgBGiHwdg7906Ojr2m+cfGZwZ3tndPd69+/qbF//q//zsG9/8/Hy8uP6tg2svXT/Vv7BiH/3e93505+JB1iZyoZ793g/evVql2dp0srz66rXjReeZ9z2cimh0YvuNr32201WjB3tqybL0AUQeCHT98Q/+9LKNe2S/i9kXf/ObNXPnnrlw/c2rTWlJh+/vHxhXE4ghIt4SbQBN0izuMkZxRnqjVQ65kwFFWFDhDWRJdna0un/7Tp6oEkcPPfUOOR5PD+7qqoIejMc7MESubuJ+JgkmKIa1mt3V9VG48sYVVeRW2Tq3NAsL2ewdlR9++7s/99t/eKrb2Rilh/cWO3cXs1q2sOm1aKX9rCwUQITSCALgPOdge6vz9nedVYD+4CeSf/krLxEoYIDdTktaXDsA8eArX3v+h39un2ar2KHQbv3i3//fv/n+H4EJA54eF4oLdmJzCBC8uT87vd45asT137n8hVf+8TIZ9YatQuWnz23v7hevXr233uG838PAjO+UyJq1tZgSeP7s2ef+6I/W1zaHK2d5t/vux8/LN4+nexM5n4SyMmU56rDdhep0U/snpgMFxWu3Fwfjr/zCz/04yzqkMvksDxBHsWiaGgYstSYEBERao0TXmiAYgo1SHlBY5nkkYKvVbolEGdNYHQCCmDZSM0QpIDxGjawgRi5YllCtFSK0NE2W9jilRVW2O20AtAseBlg3LuW9h5788LdeeO7q/dsP0NMY0zQS0jsMzNaZ9cnY5mVlPMiLupNErSTC2PA4Ng1S2qaizYAwzoYAEGLeNwlHFJJu2qaYg9ogGJx3CFDKIutkhEkICFOwMhrtHuwiStK0hxmcqwrYwABhpCOlhTZQwgUBBEdaW4wQYbGqFYYcQ2tAgAgxTq0HEWXYB6lkADbN0roupPQnN9aX8yMKMeSt0mocQtrrq0oqnhMGSdRxSjsjoQiIifVzD5SV7gyHh6++unf/yomTFyKRvbW3b3UbhkWr3fN1KRAsy6UDEDgebGqAAzDA4L0HuS5DQARYaZv54lUsaBqPGOkuj/eREBjDxVL3BmklFcIYBCD1klKY8p530HjLmdBWtuLYB1w2i242hB4Eb2gUlYulrzGPYoYYDiYiiOHgQLX95BMpy/aPD1ZOn+xE2Wx3j1EFQ1PPD7upmEyL4EVn/UyN5Pb2KOZnFuP5U4PVo2uH5dGi34rZysbFF772X3/zcx//+I9sbD/BOLz8ylsnTq53OmvTg8n1N65y4TmO3nzh4v17sx/50e+9enl/cbFKUCfJkun+tSza0qDafOCJnaOrr7/8X378p/7c0+98L84GH3z/uec/8yVrwfXXdrLuQ89989JqP/7hH/vJiJnd63nZLK9+6+6Rj37ip/8KNd/BIOFUO3P95v1gASPBhHplfcDirZu7c621wCDhiHEOZTkr83RjjQ/6Wpr+1jqECCIwm82C8a1WxGLBHEnTjimWWmscglFWW2mtg5TOK0lYMtxYzYYng7amBNqWFPn+1vBwPG7ywlLHmGBR4o2LGCMQRsy6ADwDHEbT+aKWTdTrJWkreGiVRAEUhUE0jXvRcIu2N1sKwctvXUcBrqwOISZJ3MNRphXQOpS1aWoHAlmUZgW1Y5GuttTk+Dj1bNRd6Z4e3L97e2OlvXPtqo/bq6tRu5PdPyqCM4SS2DMra+iMj0hhNeWJ0dXRwR7CjHCPsaEEmwAxFlEklsWyHXMqYk9h2hLGTIFrgrezfKlsRUEBmZjN9Hi2EL11ng4qxV093D73VG2XZjm/d+N1mkS3b172E73WvtBUwhmz2Y/Gy53rF1/sryRPPPX45auLs1mWkS4sCtO8BYLMZ7mHACo5PPFocMzr+eJo+q35K9CHqN2qS3V4cIwAxQD4em4C7mdtUxkQ4FpvqJUGxlsPCI/jpD9tpnVjsg5d1qWmEgZEMBrny3lVK+s1ikrXcqbhJAKuUJrWZUXbUf/U2XtvHV/YXvExbrA5PpzNp5eVnvM2c6gXSdJmsx/482c+8uyi04n3b6ef+b2rO39YP/7EhQcvpGnfoWQOCQsBIwMgCQg56B0ELHgLIIW6gwqSAtCLIEwCJGBeuunCas+stdY5AKA1HmHnfAggBBB88CAESBBEiPKssSXwIAQAIMAYYRJhQJx1ACNEv5NbZxvT6fd6D3UP9ueIuKPDwm6op37wRw4vveim80VddtYHsDGH929kK0Pvmkiw6zdvXT64G0ftkyfPmMrvTC7h04ON7fZbb90888i62Fx98Nw7b71w+d7OQbLZvfL6xXHZDAaD3rAXZ+GBR/pXXr2OLFpKCxqnyT4g8XL/Tq8PWKvhpBslvijucV6PTvYvXfrWyolhEoPJwdXhcLR/lCera61kHevxUx/J+u1odnOWUrN38+Xu5lqnl96/P93duX7q0adXAYjtsa+nxwfT2oDJTL+yP799ebo40mkyGCayyJcYEOdtU+r2UMyL5V/8+b/2Nz7+Nz/wie8p5ntRKxVxZJbyxqV7//3T/8vrz32t3cJlrUDwGEOMYQAAwgCABwAiCADGEECMScQQdYYxuLbeW+m3B+3+cCuLInL5W9ekxJ1OXDTmjau3PvS2h4NSxjfLcoY1kKUMiEBBUMKcM8BrbzD2qMzLOGEnt5/Yu30j49JKo2mFTY5MY6RRARX5EsXJyXO93dsH3Y0z/RVYLhZtlInkZFUWo9Gp8a0rx+MJ5e1T597W7iUE3Tk+XNy4enH7wo898OCTbz3/PEThwrlRKiKSiJdv7O0e1B9/5r17OweNbCLmcHD9mGFnBkm7l3bKxkW8vXPz7tr5J9/x1CNXd25UORrE+Pb+IlQWe3F8Z9Lktccwivz9ezc6kU47uN0ihwf3zpx7UumKYl+XR0VzNBwli/059j5tI5YmTY1Z0KqeI8oIaBPQdtwRYnkUy7oGAAxWOqyTONNd3rqmqgqREEWRc5rgQGPcFKiRhdXOErAscs5iSHCAfuf26488IDa2tqqDZXDQQ+e94ig0VRUUszJQEf6UVOQlbqomiqMk7ejFAqE7tpnx7hCRrDNcN25MIpK0+o00EMe6mVszFxSubz2kJWmU4Wyoa5ty8r53nLo/u9EoRzmBxu80lfbLWy8fdtusXJayQcjBYKY7wL79wz98/dLt+cG1TkIR8IQg67wDfmzJbz+/d+6Bp5796Hkx3UHeDig9sz54+dbBO5857USjF8HVAUIVxSmN40UNIaRHO9NRF5Hgvawn0zGcUkJ9lOrBaPWtq2OPaTKKYYwao7/x5hXE6cV78vJO/rvfLD/6/u4v/BBNIoNZZJTllELoXaiBN1ALDE5fvFmUC8uoo9gmGU0yJGCsa5fyjjSmFbVd01gjkzbqxJ2Do2kkPOfYBeQQDiFTxhNKlsUYJ9wEEwXIaBwwk7W04JCKrF5qZGzaGerKVsUiabWDLYpFSbh0yNVljkGHAuCCa5ZzpafLZRWnGYAWAE+46lFkF3etNdLU0cYQo4TjeDQ8W5ZvtVdGK+tbi33p57kQtSonhiZQ2Ram7bXV5eJ+VZZeLMqyFF0x7J6aHJcrK6umXmhbB1Ue7xyubaf9E9vlUoquMAAYACHxDmoisHGAcJ4yfv/ubZzggFzc7yOW9du9ajnR+WGWEk5azhtgLaFYJFyzqNXp3bz00sOPPyTBQkTUWWN9GbdTDiMvVZb2ItSCAWitbZgHa5Wuux1RloWx5UoMm7LEEB8caZH0cMCDLIpEy5pGNwtd225vWJQVplGnO8jzgkSQR0GpOXIGO7XeGcjO/NLNu/2VXrbeowDnizKXqjdoj9baf7JVxCHBKICqarwLjFNKmNG1c947BwNpnDbaIIIBAsAHCBEkdGNr487uvlfyeGzShKYcq8oYZQiAwUNdyiRpY4BhhGEILliSsVgzpXRECEGYR8Q4Z62hhDFGiqqpmspbjyiyShLAiGAYQwSgVZ7hTGvprccgACgjHiMIIALGeq2tELH1llLaFDXwOGDPY2akJhg5CBiLiAcAhRCCUcoDiCA1RkOAiqoJAVoPrA9WGRgCo8RqhSAlggtCYABaakRQcMF7zyiFDEMI4zS2WlJMOBUuWKkkDAYSzLgwzmCEKcEAABiQ0ZpSyhiCECAIjbEgeMYFQEApBULAjFHuggPABhJRiqGxlhNMCAghJFEKIQwuGO8QBjgACKBzLljvrJdauuCyODFWUwy8N84YCqH3DgXQKBkAgIISQrQNta6l0s4G50FKAWYw4hxBmpulcY5RRDkPAIIQEiGktihgAIF1DkMAfWCc2YAWuRx24mBkcAATrLSKhAgBxYxbpSgCJObWe4SxoFQVBeOcMm6thR4KwRutggtEcKMdp4gQ0jRSGRMg0AZwThrlMCGMchBhJ00AXuvG+2CMRogIIbw1IQAIoXM+eEkR/nYpaVHnCH37tBgGGJyFwFOGAQJFrThjCEBMENCurCUnNAAfCQIAgOBP4usc2ruzv7bWrZfalsZPj5Z13jg12hxCw5azYrk4Elm60Vvjo6j0TRoxGhHAUWWdDvNK7tE0pbrOAAqG1AGsdVeacZWKTkKHwov9yb1+N7ahgTYnMJVeQYStDgRR0OSnTp15+a032/Hq7VvLL/2PV+/cA7gLoQQHO4v7JMSjlLU6FlrHyMFkMqkkFiEEzpmjgkkojK+MbTjLVlceCSEmOO60OygcAdgAjJRSAAMHCcVE6UaiqhORZZXXRV4f7J3p9p7afPv98Q5kmqCmf7K/Odx2nBlLKGejzU2NnQtBlvrt73v33mG4+Obt7rBFCk0s+sbXb+Tjm8DkOCIr2/0Lm6uPveOj/+RXfu2xiQTZIOu1T5x5dHrQJNS/8D++fPvO4ebp8+vxGYdPVHZ+4/bru4c3Ll289rbve9/yGC1uPfeR734gGGqkf+vqwZnuIc34+bNiuahOPPSYqY9vXjnKqwoHk8/na9v9bieTtZ0dLnEm1juxMhYjKBLkK8tpRmLG4jiGpLGewWJj0OkH1H3wqb0335refv3pdz48Prh08uxjBri0Fevq2DAEaABMYGg3LqxLevP2xbvNZIER5pSwECQEUmmWJEVTDVZPrV5IX3nhVrmYtQer506eWcyOogBl0wRLIkbnedVZjUcdmoYa13U5lpDK06vxfAG1DAhABEFTyNvK2F7r1//Fv/pz/+s/Y3EnAO9T/o/+2c/9vX/+W6BZYUl3Oa7mE0MxrOamkIsbdv49j2wON1ovfeXSsJ0+utr5G3/5oy++fPNTX/pDSP3e0XJ7ow9jEglc19LccV84fBW222fOPfCf/83nVs9vHR4v4bELJu/0ianl5OCw0yXtiPTWVu/emn1nt64dO+vu5eU/+pf/9zNPve3pR88GZWkEEXBRK26aOk4ZcCGfz5Bg2Nl8cczTOIlj7wCCROk6zyVQOiAgFTAWSmVaWRRqE5w2MqiyDBGHGDFKMfTem5VunwmSFwuAqLXIA08IyFqZcRZgQjB/6ulnLr76tfH+eDgYFuUC8LZVlgLCUURDBUPIOO0ko+P8YDDcLJoac5YSbo0BlDhtAAq1brw1nNGqqcv9a1nWc846CAEiUnkbtLYNA4wQ0mJsMt9HJFAMnHFOuwxzb3ytmo3RyVoeN7rxMGDMCLAuKCMdjRLG0DIfR0mMIIWAIMwBwkYroxTAHlNojCWBQevKxdxqA0FotEqGw6IoHHAWWmoa5xCNnHdSyyrlMQpY53Xw3APzxNOPzffvTY5uxln3xOpai8f3r+wM184EisqmElnfAR4sACFU1VLpirSyVpo10kHKAsLWe8ITCJF1TrtFZzBAUDvlB6NTStUIM4iDcTZK2pjSRgXnPUSIAFTIeRYnCPBuZ+BAGAyH88UUBhfHDABqMceMltMJDdY7NNs5HD5yohnfFTCYhk1r44wZ3zgUETzeP8qGvZOnT5baPfTOM4vpzGlflsvD3btb22fTVi8+u3Hz2r2Njf6P/+gPZn/4+f/xpc8f57//f/3zX3r2xHe98PxL7T5e1IplmVTV0Vy+43s/nHVWvvXqt0Ynzjz26Lm7Fy/Pdo9Wh0NTN+/+0BOf/8xvHewdnNp8YP/OuB9vyaV+4+VX0l6HI97bXFHl+GM/8t2f/rXPzq7faI/4vVs3ISLt3gpj2xuDM8cHt79Nwd7ukVF6say4EEtZDkcdweK6mG9vrGSd1f0bX1cmTKazlEb9bg8k2dIiytpRzLRRgviwLI1ret1OdzjQk2Vd5hB6i+JGO6PcaKXv54VWVnAet1onz18gSSaXS4a8sVFTQ0xpr9/11mBEhRDWu+6wk8/yqqlrZQ1wwRJA4nZ7GNxxXeZS6sHqBuMBUUhphCwOmNXKIdJ+9zPPnFhbK8uCYXBw+8ip0BLJZGe/yOcrJ1a2h6O96XHnxFqQzilz7uTKi89/Q6qQUHJ4cCwVMGaZdXuY0qpo+p21JMJO5q1hslgeAeKjJNa1At4uF4UxWiurrc/ShDIyn+ae+H5rpZu148ikcYQ4lXWhygL5fDa/oY0TWXzv1uUkJkkn3Rw90OttHC2PSjMbrj0Vt4bNfJFP7544N5z7yRobmbJ++NTbGc3muVofRTsXvzneuRiwV7V887U3ULy+ubVy861DczQ5tznYt5c6/a6HfPPxzYtv7d69fTPB+sT5Da+ZV3Y5rY1S1aJkhBNAXDnXBKdJElTgnEacLhc5BrDdbyutdKl4N2kxDgRW1s+WUxdWg4fa2iiN7aLIZfH2Zz52vHd7ZaiJO0q6Ceskqjvsn1k/t12ZnTdu7d3hg+w9z7yTQP7oo2u//Kv/OpGkD92FjeXjD+7wTuN4PDzb/ugPPPh7v3ntj794x+Rnn3gSdgaLkBlIExeIcwgQhlkveON1HYKwC8Aad2bEP/DB4Xt+YA1AvX9N/v6n3/rmdX9v6glGAQBnnPHBhgADQBBDhJw1IUCIfRSngo/yfAwC8cEHgAEgCFGPYEDAO/9tCob9jtZaRGTUF8oFs9Zqpe3da/fsbLqy3qkcbQ9Xp3d32/1OMkiKIp/nk/39e4noP/Gu98xu7EYAKwwxc60e3dpsZe3IGL48OmyOZ3o+H0XJ2dUOm4lAxeHh/goLL7z0ytra5rve8dR/+a9fCgbZqQwwLHfexKjoD0csBp0sPb613wDDSDybFSO9eXh8RK179IkHv/C7X3jHR/4yPq6e//xLTz25ko/nWuVASUq44GR/Pqbe0bryxSTm3DWE0ISnoGn8m2/deO6taREyakmUuPnOTBmpdNPqRkrVodZJxD77u//xc5/8DyHAjFMrTbDBQPvrv/brgECeYmUsDIEQgjDU2gVjIYIQQasNogRCgBHuMMhds97io63smQ+/vd/L5vcXy7wO1gaaJqdO//ZnXxMQsRrN70xcFi2rpa4U8gA6uLE1FD0WcRaMzpK+y5vZ/kLEMYv8/mzfQmdoYGmnN+g3Tkb9CKU4n+WYxTiLclmvbZ6Unmqp4jg7PlrGPFcAXL9+gwTQ3z45XTbBR1l/y+J5EGB1O3vhGy9Dg5BWROLR+mByMDu6f98b9/CFC5OjeTktALIQGOP17TsH58+tIBiWxfHJBx9UBm+4WICDjaxettndSRMcbBZzGnzWRY20zrmk23Xa1YVcaSUG6v7aynA4Wo4LWS6LajHZn1kcNZ3E1wBlrZXtdLa0k8VCe9NuJ5RgW9dlMcZRBLwzVRMzgpnH9e007ddGnzjdmuzW1lBMiar8otQcQxCYNs402iLXa7eKUiIDuyBcffGlET6dtKDDrQAsClrVtXOAUwoQs8bmyz/dgBZ1E4KGqkGd1hYgeWAANWF+eKM1sNZSgCqElHMNsopzDATgpG1lbqRjjAJA4iS1UOOo+sCPdKeN/uTnb2jtI4DvzWSl8yxivVGvSUVwETEQSNnsLfo9+p6TnbMf+5k/fuXqnd39qp6urbWOd/faSaJk+If/8gv/92/xP/OOrZ/4oYezjvp3//Bj//Af/1q2eK3/4NkGR5y1p8dKKtc0OYlbHhgPoPbu8uVrDtDj44Un4sz26GCWL6Cz2AhGRydGUupLlw8UAFBbJW3aynYOvDURpyXBwQPG4hZCAASFUGQD9nb9l371jefeDO3h6NSIGF9xBrTUUcS89q0oCIRDpT0wwVmnigoYBLy3QSsJIWil7WpeQi8RxkR4DQzFPOJiOt+nLRJAoAgjjLDoeiitg9DqWMQ1b7MoqhdHjEANnfUlNopwWOZ7EccIJL3V7nxyVDUHo86mqcex4HWwiACOrVVHGLUhhkfjezTELF0p5l3ANOsaaMcGGFPOgFVCtMvZnmymnSy5cfHKcOMRZGGlDU5QZSUTpDy+KQTb3ErK/KZIzz71xEevvvbFpLeFHMnaXasrHmWy0YQ5JQ0jUbe7nrRG01pMZ/vb6yvHh3sERluDBw8mM55krN2vzbScHqW9E75xp0+epSJS5Qyw1EgvooFgkQ+8nQ4YY86qYjKOu/2tUXbnzuFkduydSNOOEJwSF6y3TqlSUWwi1gqIEhx7j3nCAaAiHarQYBzmkwUXII69dZJhXTt1+c710daj2w89IjGt5jOimqbI4w7r8N5iOr9/5/63KcAIlssa4aClFoRhYJ1vhMDe0abRjbRGG4KRljYA0CiNEEbQ182y1xHAwFpq67EFLFCkldM+JDHBCFnrEYWyyjHCnHHnNENQCJJlLa01whATWEoJYGi81UZjSACEAAJGuWwUDk5EUURIXeUBQYQhIyw4hxEJ3gdEEAgQwgBxI6UQhBLCOSsrhTBmlFpj4ohYhAjGhFDntNMGIwaCsd64ELy1lEIIsQlScNa40GgVJQIEAx0CEAEYjNWEUUYJ8QEjQimXRn+77p0TZrRxAUOABQvWOUwwgsQa4L0nmCKMMYYAAIwAgBBCBAIgAQePnbPGOQADIdgCgygWNLLGAuu119a6RMTKKAghQpBgprTEEHsPZWNgAJQSpRrnQiSEKatGSUSgc6AuJQjI+wChwRiJKCIAEo4WtQwqaGOjmFCOAkSUQGc1YAg4Bx1slHEeIACs0xCClFGCkA2QE1rUJURIpBHmsAH4uCzakci4UKHJWl1tuJOqkRJBQAkOwLVacaUcCN45xeOoKEoaxyAEEKC1zlrrQkABIYisV1IpCCGCQDmrIRYCU4Kd8Q7qAAHBEEFaNwpAzCgHCBEAlPPWuUhgSikAAaOgldHauOAYixkVGEOjFSMCYWBVExCCGBVS91qtCCPjgDWqrEoQQPARRgGR70hFNMKiHWlAeEqdrRyQMBJOhuV85iTR2uIobQ02jqZz3XXD7lqQe8Vk5gTKVlqgqbRGFFoSDGv1F6AxDh4uZ65uWihlxM6PbgpjOtDm8zJ4HKC1igLGHAKYs85Kb33ro1//3deK6WEy6Ig2WAJ5bmVze3Di1pWLDzwz8GVdK82iYEm3kLWnCWSYBgZ8hcGYcOBDvKzq1U7XBgNYbYIcTw8G3INQAtI0Ko/jaFmYdjxiGHq78NDXsOew4Wuru+O9CxmD82G+e2094ecfv2B8YqVPopaUDaOsUjXrjDrZ6P7R4V5++Qc+9u6v/t4XIxdObPSv3th5bq84f4rEUdFbzzYeWT9g01/423/m/uu//d0/9hcuXb+40h9OFge9lZRxEHLzjkcfOHXuVElpb2V05+LtOOs+8e53PfH0e3auHfzGr/3nE8O1Cw/8QDba+tm//NHP/6dfubM3ocpiQL7+h1+6sP326xcny1IN2lF3kDBB9nYOeisrP/eL/9vt/cuHB9cnt+4T74EL3V5HxANpDYxEAABqhWWRpulX//jVM8n6mQvrm73aMUohmh4ctjfOiSRe7u7pGibrieghXzXj6b0y5NPFdGWjc+/mOKIcGgMteuv6/VrTc6ee4iTavV74BTi3tnp7LudLNZ3IQKCI4p3JEc0giMLJJx787u9+8vIXfkvaIsoYMn5rLTk6KhmKiLMpjySzyjAfus999sp7PvTcg+98BgsO6ODx7/nEz92c/tNf+TwmJwdZq8rlcjIPwYc43jlY7Nn80uvL0gFf1Vsi+fRvffrESucP/tPfHC+ar331xldfuP7C5bsPvv1k1unefmOcYB6O9Of/2x9c2BxmEX39c1/d3hixFJ9ePdEaUY5b7Q59/cZ9dUR8/Z3BKBgdQohTDrz7wte++cwT66IXa2mbUjsbTF0m3dQY5bRcLqZCcBcgNna8N07StCyb7sqAIaCkbco67rUjBKUqUkLLUNkQjNFRljTKIoCsMwAjjAgnkTI6ydasJjFleXG/0PNYZE56i5kHlsfiHe966vj+ndXNvjQu7ggC3GRvMUjF6sn1K7d2lvN5jIej0UnngdPWGMdjrjzgAHMRG10h6IMDPniIEAKxkc55yCDyiCptHVAYYYyIkjJIVdcNi2IsgpHWeIcZAQFbYKflLsUuTpI8XxrglSxEFMOAnQuNbgiPI9YxzUxLBUiTZENTNQyHgBDCuG5qFhCCDkNAcbC6IRATDLPhoC5qxoHRtpP1qnpOEMYQOOchgqqsHLKmkoqEZJBQgvNxLRI5Xsx6nY044soC4D2wFngHIXTeQw8FFS4EDQKkHEAQnJbKBIQQArWuMUJERHVZIa+hD1lCnAdSawAQxAxAhqBfloskbqugW2kXQWiUAwFVqmAEOQedqgnDDGMYfHAq5pBiXBTNyokTzpZpirWHUcKQg/PjZjhKa1MGTBa5tIlNOEIBCCbyYt7uduTmRtyLYtFeSNtYOzna31pf+7G/8JNHx/Pf++Tv/Oo/+aWf/sTPnnv4scVsZ7SBH37qyTdevZJmHKGoXCw3RuuGj67ulrOiPjfaTrL4xVdvjE6NYsGy4Wg+H48cQsHPm3Klt3Ly1HZxePN4b7x/uLuyObKNev7FL/6FX/gLo/Nb5dQ+++EPPvzOj37tC//9YP/g2xRM5gvU+G63RyJqE0bbbY1Arytm1fHBwrRHm6YoGo059521oY/YYCVSGuWTopF+nh9jSBjms2VTmWNjJZI2jjPTGKutrLQzzCpQlOb8Uw+uPvQgorFxNkoiGoyeNCQQ3WivTKvVwoTrpmYYyqaUsqkbTRkJHtAorRorIsoioip9eDwJIGrFvttPKOeOQQ+Q1s5Iv9YfjVpJ3iwIMAIEo3PG17OusDURGk6Pl6W0Gye6Svr8cHa4v5+lbDhMc83WTp1+dDS49ebX53tvMQx1uazzKbYQ6EYByDm3BLAoA07U1ZJQXC0bhFgkEky7lGdlcQypryApksB55KVqyglLsihLgiX7F1+8fOXS2trqoCfmR8fj3dvnHnl3Fkfzmd2/faktNlqnHs9E997u5dnBrUEvIUyO+qe7gxUHQX+Vt8ls/97FG29dHq73ioWehpCkuSymg7RXuOX1S4cEtRHGgLL+6tZDiq9mHVPPN7dPX3nrnogggkZ53+2PggXQOg9c3GkNBz2ljQggeJvgDGOOIVQOu0i0uz3r3XS5oBjWKOIx0hojlNA03D0ouBercD48PQxyL447VnuHoZktVjfXT5zrX5/CrJ14KHLJTqyLjcED73vnT958/oujQfWJv/VwenoJceJDFyC6fY79+I+/7V/98jd/81OX3rwxfPI8Pns67o8aOupjjIDTgDlgIUCCBEoge+RM8n0fffj0e0bxWhOcfHTU2Ugu8N88/COrx03lgAMeWIchgNZYBGEAACIIAYQ4lNXkxLBf5gcBEQRg8MF522iLCQEQAvAdtwElcTBh59au6EVVHbb6D0QgiXtG0VaRj6dLF3SJPK6ddQ0RaXrp4uVulvXW1uXeQYJV1MYfft/bn/vSCwEmi+PJ/NaOTYNZFkbR7mBr81TXF2VZ1l74k8P1i99888QDw2IZvvLlF7wH0hDeitbPbB3e2em0+yZXUB+NieQJhoEArJFGylFQq6rU/+PTX8KEROWNvZf+eMhmh1fdWiveWN9+4RvPtTubUWoS1sItOTna7w0G2faj03zhykWW8oPDu02o+sMOrjI5y1OO+MD9nf/l520j/v4/+cf//D/+kxc+9dnXL1/DnM7ynAmCBQ0mFFVFGYwYcSjIWnkfYAja+jiOBEEOBogxQYAhiDhuZ/EwFUNuu4J+9Huf9RlVxvi87GStoPnVW/uvXdy5d2xalOqyOX9mGFFa5ItGSk4FhTROWVugYYa5wEaTpMVxBPPpIScrnAYEZX91dTxZxMx7FBYLlbQ6mGEaA+kEF6O4lQAJOwy0T+A3Xn2pFfNJsYz73Sr4/XuTxkrkmXeLu/f2KRHQeOfV9HCMMFhf6x0elHUdpKqMhba2nTU+nxfBWSNlUTeBCUL4fFZGmI/zBYknKcFVvti5v9PCcCWhVhogWDuNJ5PFcSEbLa11xSxfX+k99eSpg1s3BBTNXOqmwLYbiWFxXJN4FKKRi1IAq7ypxpf2Tm2di5EKNsBAjbFQWwgxj1vOHgdTBsuk9FBZfVjVTo02WthjCOhibjFjcZKqyiY8rqs5xIHTOEMJoMBjzCBuAtrZFwOTNtX+qa0EE6YWC+uRhygE4zXJWn861tp7B5DnER+PDykkstJJJggM1gfImG10Vc0JhZTRplo6h9JoA/sOAKBqdrLeBid2Vh16R4NgH/uhpx88c/bv/fvPUxxvne88eOHMNkPbp0//20+9PJn56fT25/7PDz349mby6tXXvjr/4IcW//Nf+QAZ/PTP/4Pf+L1P/fHpzRPVTCeQDFaHu/PJP/2d25+52Xz/e3sf/eCZ73n2/N29g0nlnPaDKEQJxZQsJ0VKccKD6MReTryzUUZZZDznN+7eyaJkdm+RULi53p3t7Kyf26xkVZQ6ooRyaoMlAkJ1GCUxgAYBSgHyXgZMne+8/or9iV/8/QatAxJtpnDbYG+M4X5eVturrdGoXZe5wGAymZ04u9qYAtkWgNB7hHjCYZLnNZReGw+cUsHFA9AoSCCYTecBhMXxLMrOsKxXW6MB70TDTjs7Xu6ur528WQlrSxxpxHK5mMStLsRusZyLVopxKPcng5VWO2kxQqJ0Sy6PlvmsVGq01re6aeoKJnHcTWwDjOLer89LTxCwAdoC5ksr4nR1dXM2m2addRew46notVWIk3Z3WZW9fmbkoeZ5wDOLRRKnPMBvvvr18yubFGXQ+ph5EBptJKoAI4JSJHNZ17rPh/fGTdSGnR5piSLZXLm56ycz6wOCVJgAJpNS+Xh744lbV28LPoTAGuuyTpvzxFooG+QwDNDXtmAEoEwU9by5Vy+XBrEeTXjZKEqhdAAGACHS3kGCVFETFsc4itKOkiptU1UtBekEEEzEaWZdyGez+fHRIaYMAHB0cA9Wk9qjzmh7cvNy08iolRlvIAh1+Z3grgCcEERKlaQZhjBN0kWRQwQ8ghYBAIIlwMGgrI9i3mt3VdVgAFUl4yRKOklUOgLB5HgSrOtnMWaURtRZ76xzANKIQwiZEBgnwDmjjAvBBaiNA8ZbH3SttHOIQIShdppDbgGEnBGEKtU4i6xVAMGIRjHlUlYohACg0Y0nIAACAIEQAYAbJb0HnHFtvaxNVRsRMeSDMRYC73yQRhLiCEYcM2cBwgFgBADABHoYnNUxZxgESoUMSso6QGy9iwFFhCAAvLUOOABCCEHKhmIcx7E2mhGGEHYBe+BBgAgCHosQoPc+gAAxhhAYYzAGPngPnAveOhS8J5hELM1VwQh3xteyooIAhAD2HnpCefAeY+C9QwhRTrVRGJPgPMSAAMoQxIQIzzBhjba1bhglCMCIRKWuAgF1rVLKtAOY0owQJaUQGIGgncGIQgy1c97agKDHCGICg3dGB4S19RBgr01tXQCeMBh8gB4OelnaiqHWpWxabWqdIhADhLTRaZxgTDz2zvrgnDaKYgSCTNPYQ1Q3UkoDAYIYaQCw0cG64BxnDBAkEiIAMM5bZ71UCBGnfQgQAuysst4TTgnEdV0DygmkkFAMiZSaQAgxAs4lnFVKKWcA0DEWVLC6LCCEPhhjTZpGSZLmZY08gAElIkYIeeCMd412QnxnMJoVJUnEzt44xiQW0fFsWSlXVZohIKhY3R40Ri3LOYk5byfVAiCaMaeA83a5gMBFrGVd0+jSBNXfHta5biyosWHUkpar53m/30u6vaa2RjGaxSmNOYDS2uPJ3NWK6fmzP/vRYnHvnd/zgV/+O//HiK6+/Md7+23hbYuq2hQF63DofQx0bQQLvUHSq/XUeh3KMQkI03gtwkCPS7v0ISbtlfZaq1nOkl6n0kphV3hJ0lHlGcfQB2d8kzfzOIqFSHeJ23ry6bs3P0Og77aT5rhA7QgFNNlfBBPyWd3Z3uitnbv6+le/8rUvNfO8/b73RcN0VUSLuuytd2/fV+fe/t3bo+0yXo6217/02ecTfyCMfeGLL5hO73DnPpD1ZGdWLeDa2lPpibeNy+bi/TfPXOhrufLUsx+7e22y98bMhM6f/cs/B6f+zec+G3A6PNmOanz/9hIR2lrvnHnPg28788Hf+8zrUMBZkY/6LMPh0WfOHy7Y0XT20vNfp8C32olX+sblvbVVf+axU97b6bwaDLLFYr9HYVWh1fUTbj45Ws6yiEyOZkJkIhb58YRQBiFimFkDjw6q8sb4zSv3fMvgfptHKXRj28iU0c1udnMh33jthQ985L2z114xNs662CKTOXs8nR0fSxxF9dFcumrUjn7mf/p4WfFIFgzj3buHW2cv9NcycXXWHYmDXVflNWTSB0FYasJAC/j7n/zs2SeGEPQ8Wa8x/4G/9JOmrP/Jf3mO0ZOytlQwKIiike2Ytwq5P7eM0royL15crHaTP/rqjQalH/4zz3zgoz/4Z/78mc/89uf/6EsvXrl2sx3TC9ujvWtH73/v0wd3dmRZnN8cnHls897+8d2rd+eLZTsWlNosFvfujjudlW9ToKs6TuD6qf6z3/uR3dcuf/kPv/RDP/oDPKLeBqVqIUQtpWwaZR0WSWfUN9Y5rUdrq9YjhyNlYaMa4iCikVMOAROsPNi75y0hPEEEIESdrQEK3bSjjBU0Lao8EEqpQFAV5ZFzmlASgg/IpXHsIDSq5DSKkrSqJQigmc2iiPGIlUq3RmvvfPadN197fZFXZV0HgpkQhFAPgoiiSiqKHEYWBksZkU4a69KoHaAPAWrjlJHOBooQhKg2tXcaY8IFtU5Z5wGCBGIIEaQwxQn0zjgLUGCEYEqcA9ZZhJGDLmAMEVuWc8aQVoFCCmywXoMQvAccxpxnRqk0impZS111W62mUvmy6J+4oF1FdVkv7ixdEC3qtGFJ5IBnhPUG2dH04OTambt3rmEQ0W6b8qRYLIMPLEnmi8Vg1CmXUrlGqyaJo4hTAwjB1CFvVAmRyMtxkgxjEVmPMcUp68tmnk/nyOsQPKVisSgMsP3RWaXqvCo7LREAyLpdBGBT1zFNOBGFLwHQHNGy0ZAkrW4vKFVM5nGnPT7aW81S2yAu0nR1cLRzMOwlWmm7rIm27SxiLXPz5atxMjz5wLlf+Z0v/NzHf+rOjXvQAajV6tb6Z778/N6Xnn/yqYeffs8HT0Fz9/rtyXiyeeGMoJ3/6Rf/0qf/y2/94Zc++wv/+z9jN9ntG9c/99+f2zi1OegNb129/vb3nb1z8c7FK1d+5K9/4jYaa18snHzkfRdu37psAJSNOvfQY9vbDxzN3fH+lBTL6auvRU1+8txDd27vl8v5n/25H7t3/+7vf/LTUqJk1Ptvv/FJ8fufnu2XgxPfoUCkGYsYoMgASwVDnBflxHPJs24UdQgkWdxdOXHu3u6b0gZfasfLfDovZiUg+NRWb1EAq0KgWV5N4xazNizrKq+aYlkIJtzt3Y3R6traw2mrHwmhK4UgUA4U0mtPtZKIUcwSpQMy2lkHbUAGSOWU8zxOBWDGY0wxoj7tdijjs6W+fv1ym5PTp7ZF18dZwpMMeIiwcE5DjxLGldFZiy2O9lsrPd00ScIZFkTwYdZyymedkc71EoOTZ09EUfrKtTkfneXtaG3rwvT660aFab0UjGX9QYDWy9Aarqxsn54X80ovmrISMQH9QVnUmESUpT6orJ1AghpZjI98p7tahdrrWkhDu+ssWu2vnx9M54U6rsaABMqjwdHcI6QAa9OWOZzdfPT9Tze+ff+Vg3ZMkzRCXuTLfNyTw/4AoPr1V7/6+svfTFZWKkAbVadJUiptD2eSLMpi0el0tYTLWQkjcPP6dZOXWmnBo3t3DzbOnDO63rt91yDQGnRDrRkAzqp+iw8ywnDMIRARc8ATxGRZlhIVlcdOOWO67TQbJa/dOZ4Ui7WsVY+r+V5BfcS9v3rxxd76aNAW1sFFLS1yAICDxTcbaYv5vNtP7h1Vv/3JT77zPReQJ8VBCdTBd7072bjgFBXAdoMVGFsFVfexwU/93Q///37pxS+8NL20031qR53dUJ1IDkftpEV4CzqjgEitUoQUf/sfvj1uNyGtg3ceEkBUZ3XxfR/qTtT8W3eqqq6DDR4QgDgAwFoHCSaYOA+8C1Y1x5NjxjOlrVEGYhwQCjgEjAghCHzHhlnUqJWmKxnEKW9XaPLaq419wSK5stmrGy/rSrEobrWhSKWLO+3O6SffsTbg1168K8syL/P13vbVawdeAcTMmbMnMYY+Rq10ODvezTrkratvpZEI1I3HE5PL00+ejzKgi3J+MPEgLaVMQQsAcO7CaeiNtlU+npC8G2cr1Cuv1PbKytrW5t3JHhZ8nptut3vpyiXj6v7m1jvf/Zf+4N//Hx9629PZ3R1gWb6s3WKeDOP2ekfrWqnFstZdGsV90qlaz26+//bvXE0o/+t//fv/9b/7z1Af/tT3nvry/3jrH/29n7395uv703EtG+SN007bIFLEOVdJZL1SjQ0hYBAgDJgTD7BzIVgDISyKpp0SjoDgAudVT9gf+cF3JL00UBwISYLvrbUsSL/yzZdfvHwL0IhhQbFdPzfq9smyLnrd2JR8luu1VpqIsLXVqvIDKWHSbttmubZx8tr1w5u39s4+uBGk1U0ZRS1rsPRRa3XV2dBpoeXs3uHukd2f1soZGbqdiAqAIbI27O4f4qOl8143CgDDI9zpdqSSEHpbOA/AifNnl7Pp3lR1EIDWteIoAPWO9zy+c3uHc7Zx9tT9+3sDxABPBsNRnU9rqSa5jcZyZmVjysb5W7uHaT87vT60mHW6bWPgYV1UtXV1SAKMAVruTbCHk8m8nlbBqwjRw+MiaYv2avb6wRzUywTSja3toWt27+7P5gUX1JKQxCwWsaybennEQ22aWQoHWdKSyC4a7QE9uFcI6qnwvbX1MmoXDk/zvZGuQLAohsqapZEGIucsIWi4sbW0ICGDXB0EH2jMYSS8w5SjjhD5vIJB/SmpyFSFauZUZEm3TzDhOApIpz2eV1NrrECEIRa8i5O00I2HwQZtrXWhNHps6lqBJuvA2EbHBbRavetdG5965K/+2qee++xXb62vr84FuPaV11VRUa8/+6s/+94PvM63dHxq84E/9zaok/3rz81e/YO/9Vh/4w6YSPv6/uy4UZNZ1OuyZL29nOb/9N/e/3ef2nv7Q9ljb3/PtEJbo7jMl4jQpqqBURi4127efu2lyz/2vidQmt7ZO04YERF+xzsffuWVw+tXJpi5o7J86pFTo/6QwbtJGpEAAEEO+E7b/MQPPWLcDgVxcFTXBvOgAb57N/orf+sPkVmHyDNY2yJgPByNuovlcr3fI4RU2gaMLOB1WWAUZ62uViESOOmKZZ5DLaGFs6PDVsYWZe1LBYUiaZuGsDg+GqymKhgtpZEVJILw+P7OvRQ7JfN8eS9tPwJcVigDQ1gdnA4uBkDKunAgtc4Tul7ODFIYu35eBsH6zlrW4ZCuCNo4eewdAa6BtqFilI3Ww2IRXF1NDr1bxC3qTJUXALFIkc7wzLZWcpSw5fFelNHGTIv5HCN3681vPPHUM7sHE9TzHnAHc8v6hKcBG4R4O8q0zTATwdRVCebzwiNOss3IFKo65i3y4htfe/vT32dbaRB0Y23t+vWvNbM7VmtH+J07+5Fopy1OsUYYAwQCYsFbRCkRqW4qBjUSNFfHWT+e7Y6DB9L4+VIhz0muB+ubSi6wYJ00Fow2s2Ojl7PJ4erqGUTTVq9vNbIOCkZaWaspx2kqEGHWIszsymb3xa+8DvM8VdBWvLfeJyQwzjCBqsEt9B0KcIBaawQRgMgH2FjnHPTKWqOdc8a5tJUyTmPnQPAcMgC9sRp4DFwIxpPgkrhlK1PIErdFcFZrBwCUunGUEoQJYRhi6L2IIopRnldlpRGCCEERt/K8sICQQADCAAPZhLqSxqkk4hTCAADDDHkPPbC6YUxghKzVjBJKoDFAewdgECSSTglOnQdQQedhIgTC0AfvnQcQGqkZ4YwS7wIKkGPcGON90M57CJVpnAucUoiC1DVAKOJce5dFXW+9VJpTSgmjhFKE5Le7bwAMHoIAlNGMMeAAwdQZw4UIAPoAMAqI0EZpABCApm7qiDMMkfEGQcAwDQBI04QQrDUgQCoYQhhBSDCxwVulKKGYUudMAC4ALwTx1kmlMENRnGptMKYUG6OlU15JnVAhOFa6clpTlHBEyqI2EDXeRW0RxZBTyBArq6AakwhCiWhMEwmhvDPWMAQZxQ5gYwJGkFJmvU15ygmr60pZ6VwDrCMEMcoiyqvCGq0A8JmIIHIAAspwU1sCIY9j74F3VjaqNAYCZILDBIPgAwAOmACcD4oCaIxnjBPKnFQUU608pIAy7AJUSgeAAsLKaONB2koJpiE45xzDJPaYYEAJnS+XMCBGiLUOAmikct45D1xwLjiKmZTa+zxLk6qWgnIIgwCIM1FJU2tp/ySfYl4pwdC4XmQY87YoFuH2+JAlZGNttcgtOKojweZHR9uPnFo7uX57MmWkp72rYVUbQ6N4sihIjPJaeRAwrE1jRdJur7RGAsehkL4yDVvUhWIIIISCRbj2JDYy8E7L81DjDK6TAKt/9S9+9W/87Y9P7hwujt2dKb1488ZPfs+jCQ88xZABeXTEKoBRRose1sqoPI7aWDcCQ+dsI0sRZVVeYCAJohj6um4sw8oHCHTs6sRj2XhEuQ2GBRxyNT/KoW/+62/8A33fPf2+R0UScSpa7bViUdbNYvXEUMQUc/xv//m/PJ4c/PDHf3LI2q/98aUXv/z6icc379/dnTYa4/TS1+9cn9++8L6TgG/3PD+5un2wa46Pp08++/6LX/ri/s2jzfO9/olTUfuR5PRjzfTmqc0LgmU2lLcvKzUNIk2bPN9OAM5CONHs3Ltnm5NZv7tqwGy6WM5K0QfHR/exM3UTZKEA9IiyULjLV9567eqtxx9chcrs7+WRCA7QO3uHvZOLRPCVPudxvTqoq1m+nC5FKu7fu9+jsmKeJR2plIlyH3Q9Dp3eajE5inhMV1bi3qgBaDmpiqWeEgm8TRgVFEcErsT8lS9+6aDdjk1z5oGtyurlsimXReNAnPHjUjWg+fCHn37k8XOqUhya+aRgIWYGTPbze7fnG2uJzfgknwJO68plDOlGHs0mM6cnk8PVf//bP/N3f96ahUfCRtnH/+7P3G5u/PfP7GXD9aWl7RaDIhtc2PruH3jy7/+dX30wTSjE3rqF9N0zm5/8wltf/MrVDz779E/8wt/4wLPv/8Gf/Pi3Xnzh13/1lwFtfuDjb0uz+FSXlePlsJsBYUYXtsezcZYNknbveHy4dmYTiezg6DtbRae2hqYpD+9My3uLR594V0Qtb1EfsibXXGDkDYEuTltlbsZleXe810t7AhIIKSGk3c4Yw4v5FAInODVKeRNUjTCKZQOMMWnCPHCYJIxQ59NlPQktyNKW95hiLE0FkPbBYEgAwIQSiKF3NmB0f2dRz+ervXVr0Hx/37ZjgGlM+Pjw6Chfnn7g5KXLV0mEdGMIDFEiqroGDlIIfTAYAa0DRkFEGaXYhgAgiEXH68p6TXGEIDS6QYikSbto8kZV3fYaIdiG2jsXANRKEsJAcIxgQXjjtHcKQtzIGiLKBReiRQD03hhrmaDBeqtKiBDEkEAcjPfBORQqY4M1CEBpFSAYNK6aTgKPeitn1XJe6aZNOyZUAUHdKJFkAcSCZ3f39rzjlAkjgw1Ne9hrSo1d6HVaxgBGSYA6BOidrrWGCPkACBZ1XbV6m1nchoAHpwFw8/lREsUoKApc1mZW2uC9Q3j37lvYySxb76ddipDGOgDvvGU8VlI65FUjmcCrG4/OpuOYAYR8oXUcCa+rbjtFgB7eOwTCiRYnpLV/lFMPtk+dd3SRzyeTi/uUrhzsHW+fP//XfvQvTg92eEu0ugmy7K1rdw+ODr7rez74xitXv/Hqv/4Hf+sTa6tPXr505xuf+yaB4dwTqx/8vg99+nc/93f/+l/8gQ9972Pvf3KwWLn0+hsITnfuXlXNpaPd47v3Dr/+e3o1Xb1+b+ZF9MDj7Gh3XCyKtZNnWoOT0rer+vCHv+dDK6PRJ//Lr7/v3d/F0/j6ZEfVxyyNXcWL4/mNO4dbZ1Xc6jz8zidPjkbjaQnArwMAkiwBBi1U0YvbtppNDuqIuOPx0WiNJqR2LDmY5LWqF+NlkoV2u3v11ds+BGd1u8Pr+aQ0NOF8nsuqrk6d2dqr71spm6rR0nECKmceevLh8RwU40knYXFEeRYvZmpxXAQACcIIUBFF1WKGYPDeJwmTdU0xxLHwGmHMvLdNoxCk3qFG2na/3dtY1XldKjDd2V9fGwSrecw9RMiAoEO1qAkL6ydW64UF2kaUOI+ni9lbL73R7vRwIISQZjpXaql6JIloc3S0jL04tY0F7vQGnAePtdLKjI9RgCtb27yz3kAGiBgNR+0kVHZGOCib0lqVLxdRQowK7U7GY1SWi5LjThKlbbaYjZlsloT22v33vve7b9y4SLIIQJIgWufSBWNUSJjQ5cGd1z5dVGmpq+D4Uhqeykywh4fnpJxefOGlb37tjyIW71y73x71WywtjpdxGnVG/cX+/cEg62Ti/nLuCFjmRVM3CYSUh0VZWk0K9ZbzLlgFpIIuQFVHImr30k47y1jS6g0jQXgigtEYgsUccxmiFoFWNXkValRAhYDwNfYU1lVVKrNztNCI577pIN9LIXFu1lQEEa+KumwaJ2bFNGjKAoucf+73vipITKF97MH4qWcSr2aMbgbfJdgCKD0BDpmHHt34pX/6s7/8L7/0jZePX710f9DjHEdtYaExvRie3+hubviTJ9MLT8Z0w4GArYuCFgFIaaes1dp8GD45Gd0I9Ozqkyc6a/fuHXzlG9+iGECAjffOfzuZCEMP6sZgCiFAABAfEASAIgiAd7X8f2KtB1sb4xt3WMt2NoYzueh1XQWj9fc8O752RRCJM0ECPT6ab73tYcjT+1dePpjslQdiMOiJTb5/l7ia3rs2ndw/PP/QGYC4c25lMLz8yhvL40VKRnZRXTk8yFayTofUC13W1fYjF0ydt+LsysX9tz36NGdm99IRXFuPOrS0EPisWoZWDAiiVmlzWN9bvmQhoCJuQ5Mm2BA73DjVhfy1l762sjHIpcICYYMgaUQKEx5k3ch6gZazrZOn8nu70/16OgbL5uAf/n/+/r//b1968Vuvjc4//PMf+wuvfP3Slz/z+ZlGz71wK0qZiNLZtIyiKIoFoXS+WMpGUcEhgcA501iAIEOIQwhRiNPoiScfPLXZ37v15u6t3dUzQzu35zcoixPP0rqpceVdrRcSHc1LiT3ri4DZ6UdWJnv7SRc3TsvGNhGtirrFxfkTW20yQ6Fk0DLECAayro92j2SNylKXLuq0WLAu2zxRL+orV45KeX/YYm8uxu12xrnvpKLRaDYrU8GPJ/tpygvvnK4pB9oAYA3ESEq9mC60rhkX7Sjb2FgpGl+XppXFF7aHu7fvNY3DGOzu3ylkUS8bAWoOQwLUSn+kGrWYFVk/Ov3g2fH9SQpDq0unkwXLOCHESY+AqcL4zNktc/NuCvlRVZxeW9ne6CPTKKmTQf/O7f3BaCCypIXD2PBpqSEhXpZ1XY/DnQiWlXFUoLwsIAAex944bwIK1sAAuKid5dA1Vk1LkyYJcLrTQQjmZzZPHAaFssFK//Tk+nUYTNzu2LxsykJLQBMhlUKo2K/K9QfOr2UPXXnrD5544uEWJ4J0i3oJnMoyvsztnzagpaQo6zIfB9hQKtqDE8WyVl5KaHsb51yu4zgiwhpXizhxQQBESMwET2Be5/XBcnZn0FkTlHaIqDFq6gmH5K/+xDsfPX/q3/3GS398b5x1Wyut1nxv392/Qxo1vXbI2m+bjWmXpsNBErnj20ev/LW/laYRjPDGQmpl+//xU9d+/8u3GBFRPy1McftauVziZr+58MDZxy6kLLMtIvstnHTCtbv35g395u2y38Ef/ciPfOMrX3nt6vSNe7cOZ3Z8YI0yAegXb9wiv3+7KFRwxHvnlUPAPLkJW3gCPGgKLzoY0AAotsvw//0XL9e2r7BDXnsLimPzxk13HrRTQvv9ltVNa9jVVYNYSOto5979zTOJww4opF0jIhRgMEa22hlBcrjWXR4tZbHfoljpkpFgZOMczFqinE191IIopk5zQQRLl9M5bRkDgcXEG6S00yYPuIqTDPC2zGvKYuRVvTxOu6tFowCWMGgSeF0W2FcYIs9wvTyGpnTyIN+7AhGztjSyYLFwyClVtZOzlK6w7qm8PG4LXC+PQKhm8zlipLe+NZ2a0w//RKnQytoj3i+Od+8s57sVrx96/Mz+eEcbulguA8iKpmCYLmYNJFHEyWoH7e/OnC1W1k+puul1H2fHRxSMj+6/Uk3var1Qxhg63B6e37t3E8Aqy5BgBGJvbDMYtpyTyvrjg+vHx/uO0ChOqhn2deOsj3rrhweHD114OsaJkgzDbrBOlc4SBYB1WnPRKvMDY2lRF3G6LiAKIWAE19bWGzmtq3qlf6qsJkUxe/ipB2YHy707x4dlnfa2tTD5dI6CbfK6NfqTFTuEBeOQkKQV11VTSU0ZcSBAr2MaA4g4Zk6ZSDARcYpxBT1LUkppsCYieHWlXdaq121RiiulOCO6UQBAQSIAA2cCAiQbmcSxsaBRWmkLoE/SltG6UlJqa51bNIpSChBsJW1jamOtNRQjQCIuKFUyDxh4ACkhzigfbBxlWkmpG0woAEE5CQO2PlASKVNjhFFwxlkXPMZIucDjxDgNEIHeSVUHiBgj0mprHREYeo6xxwhRTCiECOMkbkmjIYCQIIgJQhgiKFWNEUYYQQSlNiggADxC0BgNAcQICiGMUQhx7ywhxFjnrdHOQQgYFRAigABEhBGGIbDeEkQQJD4EZRWnkQ9A6RpRggCGEDkfKllD4Lz3VkFCGYY0IO8CbJTlhC4XC8IZxigEQwmLOQfYxVxgQG3w7ZgBjqxHlXItxgAwzrmi1s4BygiAwBgVQjDGQQillIBTCBxEgFIspeI0IpgYbTAAMDgCEAXEhQABBhCUlbEmCBE1qgbOCkYbKSMeEUx8CIQz6MFi1iCKVa0xIQGDEIK1VpkmYjGjDFMMASYIM4ShB1GSatVEgnvglW6sDcZ67RyPk2ABIgRg4qDnlOPg68UyjiLKRSQoZqSuFAk4WONc8NYABIx1SdpSsoEAamOCcwRibUwwnhLivJNBYYgZRT58x5nPOwglvt9Lgqp3Zkdlk3U3Txwtb4zv3iSuhQVmUGpRLPGsPigDFiEnjIKd/QO+ziVEImEkwrpGqyuDOlfIO1fl3oMlCbPGBAuK0qQx2xmPt1dPQK/mzVzAnoHtmGnT5HtvXt/s9R94+MHh+sbe4V1h6d6Ne93B5vkLor1isUvLQopOO1jMohaPNwPBNOnxVmpVs5xMj8eABFbOFzSdtIft+c4u7w+k0Z5SBHqc9rEPGKdOJxZ4TkdBzQUVUQK0CN3eA+wIdE/FmuGdq4dnT3VFVN588XkPl6l44HBncrCshC4//ok/f+Lhh3eu3t56fONjG89ev3Pjwvl1efUuc8Dk82Ennt649NtvvApQaEbx2nbbSfutT/5WszcGlSOLGML5tSt/8Mlf/+fPvv3tDrfWT11IEKs56nTbImU8W2HxspJygcW41ve/8abMy49++CMHt+4+/7VLr19/8WvHX4kjAJCPEwYAODjOv/8Tf/mDH2i9+NwXW203P1xgGgADCkGdl996/o/W2tHpMxuTaiariSykk9xBG2VpzDFlIet0gQ61zJVfbp3d3Ltz5+Ybl3tba+e+C+jjRk4PHKxjwrUngQgUqGN0oZ0KDjXeRaXl9NbOUV3KulbtYdYs67zUPBa9je6P/7mPX3rxm7sHt848tpluxhxu61vzWahdHRhSnNgs1ZZia3wcBQy80toCUJXkP3zq+dYG/+Gf/oQDadDYsvqv/+L37d76Ny/uTmeq1RIiicH9G3fm90785He9/9KbL/IuDZ5OppUsJSVAROGzn/vq6y9e/pk//0Pv/MC7z3D7Ux9+78uvfo3hejFZXrl8Z9Dt37ixd/7U6GB8hwm2tvHg0bTBcDSZHhhlgtPfpuD8QyeCrF577drOwd5jb38MvMjHO8cbZzst0cMcH+/tNrIBATAebSS9w+l+hKO6Vvls/8yD55Z5PjlasJh54IkHqs6bvOh0ewBgSDBnGHPaSBlFkYNEQdodbYFgnJfG1qYu8mKcsp4QXeNlTFIpS1WXCGHgnTEyoYyBQCOgAaUR6rRjr7ysVXkwG1tNfQM0wSDURSliB7xzwXDKfcARE8pXwbqYilzPBUm0Zgro4C2EHkDFsMCQGefKKrfIizh1UMrKRoQQjIzTESPOmarOURw3HgCAKcUQIm0wI0RWJfcwYGB1QzjFFFe6wchTFgXvQHDWWR8CjzLnXXCkJZKinMVJShlDQFkbFstxQAALPq8kA4FSJEiklHLBA5oqKYnDjXFALyhi9bKy0hofcJJoa1VVZ73YK1WaGgKUiERVRSAgaQ0l4CYYbJcgaAC8YHEW98bjHYKRkkppbZWhjJ156InpwQGEOUxwFLAxllDelAUTqJWxKtdRzK0xspy5ZqZKl2Tx+kq7PD7WjWURj3mLt+fGyRREpZltbLdQ4W9deh6Eam9nZ9Br0bgzHG7IydHNw1unTp8el3WatNVs8frz3zzZG61g9O6zZ644//u/84cr62cff8e7fKVxQhR0+3eKZ55+5qXnXvzM7//2737tM5Px3C2LBy+c2FjbJJ1TH/vBn3rluTfS4XA2OUJpZ7S6MpkcFbISWUJFcnBYntzuPvXo2ZV+542XX5rvH2gIWERlbYhG49uTfmflbe95+gOOJq2OLnPMISxzAr7zPCjmda/TfuTktkDuyps3HEA8HsCkjbVWkwPIY+z95PBQScWE1aHujtI79w9b7fhgMp3O8GFpT2yMitmCML+c5E67RirvfZLwRjceosYGHoVFUc6mi4l2rUEHYUpwCJCUdYkspJZg6L3zSdSNhdBm4Z2xJgRnna8RwYICYx2AjlCEmGAZWx11g5RFDorZNJ/N+mtDRHiWRTCITr9b1YvpcpqkneHGpm4aYNH8eL/V7hnvKMXWlrP54dpK18haB7c5iI+ufFnviiQeDDNukPWCN4VyWndaWdJt9TbWnNVIhQgDqxFylBHSbsVaIRZ3CCNaqcW88MBWywkAoUUGKE6reqbr0gWkoAs4JBFjokV4GxjZ2RwZPac5r+rFzt0bb71+lcft0VoUqgB8WF1pZTS7cfkFBtVkfn+0vkmCB7NmMBggA8ulUrpZVm+dWulMpvO6UN5bp5s4ThORltPJKEuVzptioQtIGQ1WD7optLrVSzZOrGBEGIv6a1tUZKYuGAsOBl0vCcZphriLdGFpL25vxl9+7hVgWYQTiBFOuAeuLDWMMcSwbuziWC2nN4wD3W6nXOa7e/uBdOOIESraLRF3GcdJrSEOflEfZWubjo+9FgghAA3yQACymPq7N45UDb7/Az/6/vd0Pvf5rz3/9ReXVU2IFCzyxl3eL5+R3afe80gTXsdeAUgCgoHFIWCApUEukAJil3RPNelpmwze9rYzF1+71JQlCAFAoLzDmBJKs6SVRJ2mKay3jDuIEfI+KMMYCiBQBm/eAwCApUXJ5ladH9+4dFA3cuU0Twbdg7u7kzt3SAqefvtTt67dPfPQg81iMp9dW9y9Ulfyve/+aD2dQWTWzwggyFuv5xsnT3kLi+XCBQA6N2NuswiLyI024qIWJzY71oI6Lh873Xeuno4XsDLb233Il4CEbptgZATLeln82MNP/tv/6x+dOJW2u30liPV0MSscIScfOTM7OFhMx7cPJt//8Z+9+uWvkSy3TESrm+2Vvdne3V6nrReeponPKyREjAkKLu70mgqvnzi5US+ufu1TyFz/G//gb/7x89ff88SGnO6+420n/vsXryCIjYGUhXavhQgt8gohZZ2HCPqgm6bmAXb7rbKSzCFfyWEX/5//2490Oun4aDd56sHxndHuAh/erZiuXn/5JuvGH3j/22hAd67uqyZcubp7/eYUEHpydbjYn3KAJkfTjuAZ5boqBh02HLTStpJVjgxsddad0r326swYBPx3vf9tv/7rv4mRDw4cHS2P/VF+PJ8XtpLFMNloJ90kSdJORBCqVF1WOaNRr9cpZC0Y3FjrQ5rO81p5TzgHhHrZcGQwo1HsY246raSTnEg7w2Z5OM9LXVnOqWowdB4GMj2uHHAs4sVyykWnN+wC2hwd7iY0HsSMRCBJ4TggGEILk/F4ksXdFNuT/U6x0E7opx87119RR7f3d/f3aaQFIBh4iWW22lKFK2RVF8vYQ6xdXVYuCayTIuW0M01wzhnnrXUuESyKBGGplKYo5zxOe1F7PpuudwRPIINhtnutO1oxi0bgeGpLH0CpNCSQcEoiEUdxWchqXnY6Yrx7o4t0ErcT0VriBY0o90RXedTp8D9p+viOVLSY5xiCKE2t0rqueDrAGAPAsDG2qZADMi9KXbZb63JpIGUQNQBlENBIDDEgKCUc4uV0bC3odM8YC6fToyix733Hynve+7Of/uRXPvmF13FKf/RjH8ySYnF30nmk49oJH66DsS5uTV368Jlnu2K4U+YSIc/yoi/Mz/84+rs/82w19+3WI5/7zIsv3qk/+423QGi98fw37lyKfvjnPkrjKqJielge3JvLMnrhpftZjK/e+Epd5EURGtt0RxmLrA0+WD6dOet8HFNOUCMNgFga/Y4nzsRCeFclrZb3xhjdlO6ti9FXX9EKs0ZLBD3HOA8AVioa1yc60d69/SyhFy48euz3796/dqLfAwoK5ZK1bL5sMCIGeusMY7GsgZElpMraiphQagUACtjFcbtaNhZpxNtCcGIbgOo6P6pKpesCo3utUZuzICCZHRcAExJQsJbBpNvq3Lt/MGilldbQFGmvxUIOAMIc1lo6RCjOEBJyscuFWS5ypWDa6ammHm1tTSeTKBmm6UmEOoi1potpL2XL45vtXs9rE9EIIaILHYU2BHz/+LA7WAkQsdSe7o3mx8dOJl6lFIXx8Wz15KbVpWrUwcH++grtdrv39i+2kzSILU9a5ze6h/tXImiSUBSL5XJ6yHoChHi0crrbGh7j2yGwspLOe4KNt7ry9nhyeHy0PNzZIxEvG9MbAZClMR/2+sNo+MBoyxJbJd2MSuyhd0abKneuEVGgQRFeNeWikcb6g4hoDzMNGU9bIEBnWdY6A7HAeFrs37ahBhw99I7V7Gjq0raaTINgEU+MsTyK/kQqgp1RhwJkraWM2Vpr6yijtXXtFk9jgRHRUlOCnbcEgkEnBhjAAAAnwcumcsB5p1USExxl2jgaQF7lGKWMUGcdwSBmzBlZ5tI6EwJGmGsXtAvSWMqYkwoRSBjFmEmtCEFRlHFGMgqD01UjE84wRsB7GDwlzBuvrXMeBEgpFd+OhseQGOOkbWCAUivrnHEmSRMJrVMWIogB9lYDBwWLXQAYEgCQ81LZIJWNCSEEcsaDNQB4axtOcSuOQXDWeUp5UVZZnIAAc1VxElEIvbMQ4QARwNRoLVVDKUEIMowoJWVdeAesM4gwRti3ZSanAQLAemeATXjkfQAgeO+Dh0abb6eGM8ggDB56a60LFsJAKMEIGeWk09YGHxyAKkQxYRRCKLVxzgvCQXBWawRRAM45HbEIYeR8wBAuGhkRRAnGCEDoA0SE8BBg2SwJspxxSmmjNUIQIyg45oKE4JVpECQAYUSwQBggAIJXjcYEAxgCwDYY6ZQAWEnDWAQhpAQBAI001ljOqdImi6KlrBEgzjvvQkJTCDGA0HnEMEPQ1k0RiSRoQyk1WmNIfPDeaU45xh4CYEKQjcIYwRA88dY59P+n6r/fLM/yu07w+PO1198bN2xGRPqszPKmu9qr1WqpabUEEggEggGxwAyDGezM7swzy/DA7s6wuFkYQIuQQCOHhCTUVq021dXVXb4qMysrfYaPuHH9/frj94du9nn0R7zOOZ/3eX/eb0yMBmlSCEk4p8ABI63RVjtDMALOYWykzBFE8/mMco6xn+WlH/rYYQCws1I4AQzijCv7/SuBxWR6OmctxjiepfO33x1/+tOXP/zDz3znrXegrcxWNafpcr8+nAzqpF4PHbbk6GCHeS4AWmgthXPIq4UxBQSnaeDBANviJM0gSsv0wvkLMD9KjqdPPr59dO+gc+mSOU6wsJQXSy0kDkYhnHu17kwGqWTV6eLtX/6VxzafAq2OnD8yuakc8+ur2IuNtDhwllDHmWWWeXB07ybym34Q3XvznocRSIQBFaqFlCAJCA8iKUofWuOIhgi4BEHiXIAxLVUFOXJxkLjAv7YmphYh1iK1SrnJ6S2G5oaR3/vad+NmdPGpF86cbyTC3bmzM51llZovndlY3Li7vdrbvf1gAe2du4+e+Mknn/2Ba1/+xW81W00rFA8ag4OHalhAY3Fld989piSDfq2Fm698997OSfK5nyr/2t/8r/67P/t/e/aDf+iHf+qPHB7fGR9V93buXPzsR2oXL/zKf/9P5jtievt3ti9sThdVlgqLEUBYpLklKgSk0fRvfvPznfWLgDutkRcEDuB2tzuYj2mLNHx+OlgUd0ZFOfYo7LAGxUHp7NUnn5pPboVOZ7MpD/nG1XOZ8cezYjwYt1eap4ePxO/9ytLy9pNPb755az4YJrTW8phvKr1IC+7zVj1M8twYrRGkhGpgMIKLQidF6YymFgQm+O6Xv7qy5sdBLNLpfDSimK9ebFsRPdg7kKPK1FsrK20lqzhWi3GuNXXa1JrNRVY5Hf/bf/7Fml388J/+WcnqCvhh84V/9G8u/tW/9PeXqlqjHheK9/tnu821p3/ompjcK7TIC4BaqNJGOkia8digapH9rf/55/7Snzn63I995lM/8TNvv/Jw506OVHbx0uadh4N4Y2np8ctHr7wlVGvt0qf7Xvng0Xud+vLn/89fLhbf7/vAEHSXusU1+xtfe3P1zNUP/OAnH+3c3tl5tFLvLm1sMuYBJaWRZTX3ItDt1K2UfkAwoYPx/vBklM6nq6vrgc9ef/3bF7cepzA0UkEPLcSi5rWtsWVZRUEvTzPizXwWiDKlDqmq4h4N/QZwMCszhxS2JXDWOtNoRdBIznCbRwf7e42lFuURRl4ylc73lq5d7ilxdOsWIVhaZQzQxqnKYcYo9bRSEAKhS4CQdS6vREACaj1CUCpTTinlXGqBkXTOam04Z9LhfnNFioFgDlkAIQIIEYTF914LDlrjjHYIQQtco9YslYZKGlkCTiFlTgEFHMU+ckQLjTFkPDI6ZZASQA3GOVRpVTmDqkowj6jSsjop0yFiuNEMlFCgsoyFSlbWuKKa11ob2KEin2AUxmFDi1ILwbivlcul8jmjcQsg3wsDD7h5OnMkYD6t1XrQCyaLrNuKytkcQkKwh1GoxCEnwAqjgHCWQMgtwMZSTmuIUi0LoHTsecC6UltkYLbIq9zw2Gt0mukii6Jak+I3v/P6hStrxjnnUDVLDSu0zD0e5JPFaHDaaV9KTwfAiHMvXG1srGXj0Te/+u1nr15qdi/jcaJhVa+HGogv/P6Xuutntp54XCXTlc3m6vaLs0Xyxtu3z1/bXt7uWMKO9k6Hi8GHPvVcvdE8HRx99btvcoe7q8s/+od/tLd9UfveaM7OP/FC1F0T2SyfLRpxbWfn5lMvPr3/8DCoRWdWlq8+cWE+OPjG1/7Pb375lb/wl//S4cnOhfbjZ1a7+ULXV2o7x6Myzxmvj09mskghtrODI87p9yioN0JrpNZ6b7DX6nQqpYpyroxrtGsry71Hu4PpJJVFYZ05c2lTWJCdHKmqnFvbaHUdxNxkVVZhq5pe/XSwEFqNZpUDGBC1cWX745/7ybpMtEg71isrvJiYIldxjMIwkBLUvMZoMoM1AAEgAECnnTFOaWAJQS5TGSEx50s+NnI+ZJj7UbBI5y7LBVPEgHo9LJGdTNNkkgWeLqFRAHPmeaxTr8VKV+X8FBiS5WqeF+vr60VejAcnFunOhc1moxFae++tlxHQLc4iSsTsFHmst9avrZ0/3N8Vo4PAAlYMqPKloB5D+WQCAVSVLLOsGUUlVpQTRlkmNGQeobjmRcCg8el8Nl0gjCGDwAgWe1VRQKPzyRCxzKiKLq+GjY7ChAbt7urm/ftfbbZQi3V27+8ns8QzSwGBWT70O61zV5483T3EIn3y2S3UWBntjLAc7R4MV+K6djyq+TIrpdTn1lZ2j2aUCODk4enp8kofUr+QukySyKeVyM5unFludPxWGwedaGUjbHWMhWE104tDkA19Cv1QGwc9Xk9OynkhR6MxAXhz8/wP/9Dnfv+3/1PssaRIa/WwBJgRstZa0vnUCi+vFlV1ChyO/A1lQRA4wzEIo3a9B4qBmcytyXs9f3y8e+Y8Bcwqp61mIKevvnLw+9+affs74yTH03mx0o/PLG39xCd//BvvvPTw4LAsp9j3VK1Z2PnDB28+2zUIKcA00gPntLWecwWElvVavVUQ73uvX7+1q2wd6lrcBM6ZskLaBZwgxpXBYdTqNtplhsbTmU9hwHDImapEXA97q716q/Hau+8BABpXL3tBdPz+e9H8KDBVvddI0lwm83YcPTrcvc9uCFmpTgyFBSLdvnTpXLRauAZq1B5cvzkan/Q3YS2KmE8X6dxBgx0OUR72SHosdo8fcCzWVptpWlVlwRF+6/3bL/7gi1tby/t3d3ub3Uc7B2fPr/Ou59d6N29Nlxu1B/f3P/CRT1VVirPcIOs4QFADiKJ66/atW0ZG/UtPpm7JeN16K67JYLi726t38t1jD/gG6jJzcRgLkcl84TdaplnvbZ+dn+7HyfD19166srnVa8YtNB2/N33ju68+PJ2wToPXVSMMp/PMIKWkAFoD4AhFfi2QIt/aWv9n/+Dv/+av/86Xv/EVkKv17c7KEj64/25wYS0bHK9f3WLN5u69o0678dnPffpr375ZADieq+nefm5Q1An3q9Jv1jo1r0HYcDI3tlra3mBaTI92rj1+YakLa50gy0bEI6EXLKZjDuBoMADUB4BMRhNX5jWOgl7HX7u8vHnh5V//jVZQMb/O4hrTsiqF45Z5YW9ru768sXP9htGqFqKoGeh5SmFUImycI5g063Ua1BFMX/zQc7s797PpUEASRb4rRreu33CGWgwstdqoAMGwHhWZzXIZAm2qhZNUQK+z1uwtrcp5Ph2eJqMMxFEQBF4qQaU31zdZqzEfT6FySZqeWWtQOxOZ9cOgXesx2gORnVu8FD92undXqgKx7MJmLz+dG4D2R4tANH0Pw6pCmHebbSsXPg8rBFnkI+tUXgFsz2ydfXR8MlM5j2mhhF3IRoOkeRFYHAAwXGQI+9o4qFkcMhfowuC8KuJWHVFaIoup6vUaoztoMph6yDPSRZ6vARqeDLPiD8ZaKwkY93kQGwedQ4RH2pQOwSBuV0VOdGmcVTkAUR0TaYHinocpnY4XQVDzuA9rMaEGqf1iOAiUhpTErZYS4ujujaD+6Ec+3rhw7mN/71+//aXv3D54MP/3P/cjAh2DIjf5AOEzZe+5F3/yf/yn/+LnLtq3qvxmA42DVtdfBvL0prcW8z5qthZ/dLP7RxL6A/9J3z0CX/vS62fObUTs5vba6lrjqU/82F+VtTrzmZhXs1yoxURp86HnL+3tHRtXTavKasM5ggTnpc2FpBhDiC1gKK4vWK/QJEBGWUA8rAXStvPP/9M+8FpULM6v+e1ucPHyhrAwTwtdSazk6VF+6sNwdfr45QurS82jnb077x9qAbbpOerV2t1OskiAhV6t5scmn96Xalbq+bn1rfv3Ty4889HT/fetUZzhdredqZATns7H3KmsMFlRRgHIR1OMdFFViutFtihUudZdz9PSmhxx3K7VlZxFQV0KVYxPlrvEYqdV3qj1s8wpx4lUEGHCsSY5wLXUepDjWVIC4hHqQW0hiyoDAk4pBM12c17YlTMfmh/cRUgMj07jyIdEB54p84ecEycLFoSSeYss8zhysKq3G5Uq8yT1MIuDWr3JLSYecptnz//a6+lTF57Rk4fvH7x/8enHTx/uHQ9OaBBTr8UxiwCaDl6OgtSLIh50p9MFwaGx1fEwPzgZMu5Hnc5ktkiK1KsChDzO/ZWNa/M0pyqlNJkcHQJA2r3GfDGrKo2YTSb7yFmLSTLPlEkxIsPB4PzjH+q0z85ndjKXs+mjZmOt1giWl88QSObz49WVBkFK5niWZqu1cG86zNKqs9zmEfv+kIwIAYggFNdjUUlggXXWEtxp1Rl0BBIHnFcPOCVVpZTSGFOhlMcZQg4ikKYLihnClHMfUVrlWWE0hNgYU1WOcw6MVUAh7KTRnudbS7Ist5UUsuKeRzAEzjFOOWfWOABJFAZpllqjK2NqPjNSI+CctVIoqx2lRFutpVVCWMwtcBY4AIB1UDsAEXKYWOSAAwwRba0DBkHnjCAEUEqVdBAhDJDRVmuFIbTGaGP9mg+QKZTAxlGGtbGUehDxSlXWAmeM5/vWAgBADYfGWiWM1gpgaxQgRBulkIPWWsq5qSqlpAPWOss8BhywTnueB50zWjJKtbXOYqGM+363F8aYAuAghgx5UklrrbNOSBEEYSlz5JAxsqok5z4LmFIKIWwQMAY4pZQzNPQ8FkCnMSTOAQBNGITWQaW0VoYyYhxECAKIfA9XlTAACqWcM5QiBK1FwEJAKUMQGaMRhEmRYkyiMAAWKSEIhM45UQoIAKYEIeB7gVLWOhX6PgFEmjLmHAKYpil01lqHKPF9hiqHCctUkRU54RxD4IBjFFemQhZrqzxCKPUgRMYaBBFlnjWGYMoYRIg6o4uqBNAxyrRU3PeSNGU+pxhqpxBCymggsXYQYYq0AsZ5fiidkFJbqbR1nh9I6BQEsccxdkoKJeVSu5VXpYbOGmP+i1S0KCsUelJbFrGVJ3sbucqD+QKI1af92srqV1/6anejbgOFsed8lNlMZfruyXtBt7vUuAiqYjyY+ZZBY9Ji8ejGAyfyT3/4Y6cn+9defHZ8dExy9+C717vLzYkoGab5yQAlQhSnJGaGwtOHt26/daffXQLxKkT06Q99TFzfWYzzo53jv/FP/tK3732ReiELPFPp0I+0EIATbQGkIUC21z2bG8LrS6XwGwG8/fJb6VCgomjUA1BUgIYM47yqLLDUsyzAQhrHjXUMEWx9MJkstKrmOggE51Qik3/py1966vmNa88+fv3N+51LF9bOrH7rC9fXNy5c+ugLvB3Ors/TKXr06r7y+y9/543TQY4ounR1bZHkb33nPcs0il2r35+ZROmk1w/rvc6D2ycA4KCxevaJa/MvfZ3lugfB/r0H//Bv/MOdB8Obb/z8ix/5WCtu516l71Szd95JJzp9MCkSf2c6k0E8nJeyMthapw3nVIsSIdzrxqAoFsOjoBYF1N87mUOIBuOEcKaFBtxjPp5WOWaNqOGvdFZ3794P6/D46K0woMAAgFjvwsVh6cLONsBuoQYyX8xzOZjvIhIhyoTgBtppJVvOYuCUdZRQ5aCBaJEL7SxClHs0qZSoTCnyv/6X/+j9B4f3bu/eeO0dws63GsrlOksU9RRpOUBiaULh4lZrhXE4ODyKO1ZocDLJQhLnIq2UOx0g1D73W1/On/xIM6z3EEgcCzBo/P2/+Vf+5T/5t6dZAnF/XsBXvv36Y2vnfvrP/3XfZv/xN3795q27mAbKgaNRLoVBsVdbC3/ut77+C7/1e00Wt7rLXRTpTEnqiNTHD/Y+f3R8/syqD8k3f/9Xzlyu33nn7iAfWMe3zrbBaycAAM9hPdfbq2uXVk7/w6/+6vpf+VOhF07HJyfznFPmMRzQllB6PD5VpdDIjk9HUeRjDKpE1qPuxe2rZVE4xj/x6T9+7+5gPhkQvtg+e77f65pKO2fPb6/OFqWzZZ4Iz7cRo/ki5YxwAoHBVjtMaaEkwc5jzEGzUGVRJNgKh20Y+X6IKcMUqHo3ZmuXRwuVJIUyDkG7SLPAiyjAnNUWIq3TGmc0EwmxAAAsnbRSERppVZSq8HxfqsIBJ61zVlVCEuRZC6wsjod3OLUU+4xFSZIg4owT1mmIoHGQE04xtEYZ6CophZSIYU55qR0EsB53JtnYp8wZ66xR1lAGCPERAFoJ6QxnPoFYOaesAwY7C6gFEGOtgMqNxwIFK2utNQpA3GrUhZwZnSJmILCUxlZpADWAzkKngWNhHQBmLMjnGcCk3trATkiZnZweUkYCHohppoXwo4bvB5PpiRGLuF4rYKFFWautFqWyBOalQ6xpgIYQBAHbeXjdqur8hedH8ynjQejzeZJhRiCkVZ4+HA4ef+Lx/cPr3fWtXKilTn1wuB81WX9re3I83rp8zo+5a3g06HrNpWKGm1trH/vhfsjknZ2pQxGwoOnB+ztvnzkXnr26FnaCB4N9X0XMw/X+6uXH4Mtf/Xq73vQ6S89/9IMvff4rb37rDR6GH//xj1578QOTByfnnj1HSHx0lBJBVCFaa/356VBZSzE6OdqZjoaUexxHG/3NelArTg/+wd/6r89cPHdm68zbb9yM6n2ZVj4E79689dhj58OACenS+VhUoNXlDkHdjKuq+v544EcUkeFUEr+XZOOyKNuNhtCoEmjnaJLJyovwmbjbXup1llrG2MJl3ZovtEwcmUzzZitmVbJ8flkrV51W2ukg9I3Gk3zvEx/5c0ji+7vDTo2HkU8Z0HaRVo7GjBEWheHhwaEfMYwRhIxyoiAa5QtpdBjH1gKJHaG+1JlV0EGYpVVAPCMEokVY46oEEBIahJ5jmVbT0bhj/aCx4sV+yJiQ2Wx+mueZlYDgCGhgJCxLXWkAoAz9mtHWaLy6ekGWkzydGWUcMmHMQDkUI9kiRnm5c/n4sMCUwrAjlIOQUVr3Q4zATKiqEIa5SmlMaZ16ftDwimRgK+2Amy4mrV7/ZDpmDCDHkRcCKwFRiFPjTJ6XSmFEw3ZzCVOwufXUbPYgy6ZB7C8tr21f+tCjGzevPfeYBIwCvPX86sHuQx51IKadesBXAoNCrY3SAiIKkaOMLIoFJjKZLyggvbWLHrNVdqKVmI+P6v2l/hK/crHfqG1I2gRxhzRbjvnAMQ2YEqDKLfWMD4Wp5kacFNlgNBrf3zmZVrWnP/HJisYXrpxb5DuJW8RVLZnqSrDjk3G/Ds5duPBw555hOsmAUcQxt7QcHQ5Oq8lEzxe9Rme9u/Vw//Z4mhZlB3GmkTWGmbz2ztcW//xfTHZOgWRdpStW5wezxfHi1ncf3eXMwxz4gY8Y2VitbW70905mK0frDaNby5HhHqBcS0gZcPlczLHOabWoOhSOj4dTaIOANNttOZ6CUloDrAGcUmQ0qLIzvdZmP2h2AiqlA3bz3AZEKKzVa93O/+OfAgAA9LzEInbxYsdff+crXx+/v9vZunLhw1f33nx989JlYEpM4Ds3rp9ZWuFxq7N24TihYWc5nw0l5uvXniC0drj7GidOA9Hoxj5h2WQMKKQ+jzGNmLQ+KifJPK8ojfZO8tk3blIAzqzUC603tzaUgXJhaq32+TPLp4e7pnzUCGGymFfThDdYe2sdVp4B7njvZq/VmQ7Lle0NC3RQ56cH92vNDnM04rTebJWVqHRpqiqu+Rhh5CiBtb3pfDVEhPD6Wnd1o/vCh559/Qtf6QV+xO3RUaJ5p7nyzBVPzQ/e2x+OAcZWGUoxxVSUlS1Ftxv+yKde+Llf/tfffeUdRrBxoMnQj3zise11AKp0udc7vDe9f31H2lqj379xmHTOXxzOyt/9xvs//MzZ998/Pt3dqXd6kKswICv9xu5kiLQd7hz1QvnktfXWasvKsVFV6MeWUwRB6IXpNIXCuCigxKMBffy55+7c3b+8vAq4t7O/XxW5R+SV558bl04mk343LiXIM5zMSVmRucAcWlXlyzTWnhpNDhEOramgJKAsISSU4++88QYWyiqwyGamSFfObdbC+nSedzqt4XBYaeMi7KpclAoZVynYiIJskQtmntxcY9QfGbtzLw0iH1EiBR/u759ZWTZKi6LUlaqEgtaurraaa63xcNhsdpMxhoBB3x4cTe48fD9fzCB0vO710KogiNWqDo1KSQHGjlOlZYPwPNMF0CoGxjNxZq2UFVSlNIhEi8k05rjIi0ZEEbQERXv7eUDc0UQvKicUdLa8cL4POdu+9PhicDrfP9y4uHw0PNRFtUgQjboPH84pUJal09kYAMw544D+Aamo02+LxEynRavTzfLCTqcBJADH9WBLzYYUI4dop3uxVJJw4/nM6VRWVRwEyjCAuBS5ttLjzdXNltU8z0qjLPX8Rq9rrRwdHKzXlj5xdfUrrz669VD/+f/2y7/w//xA2B4qZ4pcvXvrYVyl/6+/+9/9/ud/LT7fMvzOnbeuD++fXPzkn5L2nO/1hoe/zOyhdNMf/JnVP7we/bXBU8VQ/uK//71/8luxgDcEb5XSWaSkswQCKcyz5zdODx8QANqBcX1yOENKWCONMRZzLCuFEEbAOAH+1a+//fjy80+e4UonHpMSw6++Mbm1jzNnWgH+8IfOr6w0OmF4OJjVN/tf++Z7WVl++gc+/rVvXn/z9UfJYILllNdpf31FI/vNl16CmJ7fXg54HdDQFLGohDWzXi/qNK/IAm9ffLwQur++5fJBPh9hx8Kgba0Mau16B5f3hvUw8XwnKyOdZHFtMrjHKYz9ZmUMiQMIRVkeoCBc6vvFtKyEyUQ5z/LQp0R6RgEAOaEhR1rks3uPbrD42qWrPzmeFcbNktM7UQTn+ZQqE3rbUX3JizvV7J5I54x7g/SYhSQtqnh1o1hUzWajvxQ9vPNmwOqrve4gc6Rxee/6W40u7i+3OM6PB5M8K5wXJSbDUQwxEVKk0+KF538sqtEJOwy74erF2u07mcEkbHWnGQisBjYdzu+3Wx1ocyOjfutCEG1W1oZlYWg3nT9gWEMaxW24vNSSuWecnSb7ukTTyR4GE4hdUeRlSkDhpHatlZWsmjOIcmUIRZ312niwWCSnr37364y9sb39dK/Z8xizRh6c7m2srFg9lmJu5pj5deaVSx5eurosq4G2jPv+aJZ+j4JaHDoLoLXAOqB1yCjnJCkFQlgLgzixziGHnYEepYwzh4hfq5fJFDgnpDJQMcSttZUofWgRdgjhiAbOWAQBp0xqUVaVddYZiDlRVoVRENHoYHColVHCKGUMsBQTjJBWusxSRggFDmoLIOU+dMZCA50FGihIHKZUa02YpyErpYgYo5hbayGGkMKkyKQxsRczRJVVCDjiLAQAYUAJ17py1kDkKMMcMgyclng2S5TwaEgRcBAhyjhwQGmT5CkAADqghCIUUUqBcx5DaV5RgjBmyjqjjZUKOAchghARTJURxlhCCICWQq+QOUEWOGyswQghh5BzzjlnjFAaQUYxKSqhrfY45ZgoqwEA1jmEkdBGO4uFQ5RRSqyzBACfEa0rCLkyMqCeAxBTElBYlQYhhBCRQAEHgQMOQIIRhIYzBo2typxwDwGgpKDMgwgjChECeSG1MowxbQB0tCg1Yb7VTkkZegE0EEEopZKVhtB6PkcQOQchMhhCALAzlhBPa+Os5T6HzpalVNZxiADAh0fDhTLMI2lZYYTqPmIOEkSNs5HnI2cwJAgjiJ3RUintrPaYxzDNivJ7y3rAOUaos1pUBeUEISCdgc56iPs+L0thrQUI8prvObRIcogApRxzWFY5JzTXVcAcBzQg0bScO0fyUmhjCSFKKPNfjKb9jT5vBQf7e5mutq+t9OaDW/vfUXurp2l1cn1n85nW1tkuN56qwBxVWhjW4lc//exXv/AteC9a29puhn6925yV5uHd+x978afJNE9u3X3tiw/Pn/s01z5YuLVmH5fV3W/cWzp73s4nwMGJmrV7Z3beO60WC5vQt7765tkP0xzzYcyf/KGP/7P/5V/efjQn//PuR3/6qUwJnRtgMYlCxEPs+UpppUReSgZiUVINWNzdrspJc+18sj+fHc56Nd5aaadGEQ9jQiAkAGCBqorYUo4IQsiICBI/RNaaZtOtMUcOp4TqC2eWMQ3/03/+vQiGixt7dymHEr17/+D2229312LHqF/z1fEom2c2K7BDGGKPkkYt5MhubC4VhX54/3BppUYQnM1z0ug2lttxFCkQ3nm4U49rx7cfYEgKoO8fHynI0yKdTwZLK2eQC8+t90f3jvK58nkAu5Em4OHJabu38qd+4rOinP+Lf/orLeYpJVJhZllBETkTkTQbzSQkHh6djoIgjlnkebHSEHjYGQMwKQu5t7dDqSsX0wB7VUWCWm3j8oUFDGsbmzxeOX4wZXGvPBmwAFLqtJqVQl+5vBb0lr753n51eOwgIpxLZ4qiMFJwSsvCQVAhakGQf/IzHw1QWyq0dvEyrtOXvvT6nf/w9hNX+z/+4xeno51kXnoF/9a33h0PRdCMlI8YBxzi6axY29woZCkmusykqxxGXBS1V76U/+vg5b/+f/9bzB9CSJwNGxc/8L/83JV/+N/87dHpvFrYte2zh6fy8GtvPrxzo72kmw3f0FBzlpbSimwxTVi/tXSme/BomCXFQbZX3EgvXThz7vKFabqHoR+E9byQk/ygdPrtb8uNVsc5NhzK0nw/xLHXbcwHaUzQE4+v/+7Xbv69f/zLf/Mv/sSVp69cf/1NRySi3AodBVFBQ60LY02n1zHGAog94rSUlS5RwCcz5dUjtty8crF/8ujO0eFoe2tTKmEdTBZ5VVTUx816L5kuUMggRMbarNDSuiiImcNOE0Y4IdY6l+elTV0+GDeWG3G721mLR/sTCZDNcNeFjUiOkzEPGUa4i7A0SFZ6XuSMU6kqn/u+FwLgrJGNICyFoYRUuqKIMEKAMQRDBSwAkHIslWDAhD432iHjlCg11ZQTZ7V1DlOqtIEACSstsCHnRmqnNYE2zRcorANtK6UT6MLIYw4X+dxBSykpyxRTpgAA1jHma4uNA5horST1MEIkzzLrIGX1gEYAWACllKVSziGFiXNahowrYLQBaTpkhCLIGfGNTiFy2spKSABwVK9XeaGzWVrMPBowgIhGECALbeQ3i1RWicakRjHLF4nUuUf8JElyLTutTQQZJxRBkQwGk8mk1+vcv/1+Pp9SW7Xi5fFw4Mks358yj3WXu1GrOSuSZtTQiZ2cpiFkPGiFUZQLgMLQY6GYLyjKAIaDew/afsQ9gHsdV+VGyuXz/UpXX//am4CjlbXt8nA+3p/KSt54914m5TPPP7aydSXww917ty5t9X/3N3/lD/34D42HlQZ85/1Zb32z3gumkwBDUGtv+CGtcksZ7/WLQmhgVZ5PLl+9Mp3KK1ef6PZXXvr8r/3GL/6rP/EzfzTPq6jeChv1oBZbrKnPw6BpAfE4HxyOlVBSpJS0tYHQgDxJvkfB+ta6MLBO0OnBYWU9QCD3W8lpjhTwA3dmfanZjat5gaifj0ZR6Pc6rTxb1Bq9+ztz5ZnN8xugXIhkEsTx6fBQFKXCPsDetUsfZaw1yw2mocycKe3pdBZw6BFUZ0jJxIGqXWNCS0ZpkVbOEI0swMzzeCkqBzHCsZIGAlmUWikTRjVZJV6IMGVVNWs3V4WFItXpPFOmqrfCejuwTiDq5yJH0C73u2mSQYz8Gora22kmWbdXIV+c3p49uME7zQLyuBlSxuM4XoxHCFrnEivlaHcXGAdEgT3aWj5TzSZ1L8AsAtTXhhEPAiPyvKwM8XiAAXDWJIt963BWFmHUioJ1v76qTV4LYVkV6cJSRPxglfuGhJ6sCiMrPwwJrxHqWV0tLz2RTqfIVlIKpOn4cLq5+jh3pBgPPVorc6ByL7XKC9LF6fHJ0V7cjI8O02ajv7yydLI7oMbmRSGkq4SNCMaiPJ2kRZmXsmqtLGVVts3rsshzMvJaWOfagRmMapQ1LACszgISicWJSY6y8WG2mM/nSTZdFMKev/qh5565pqbHwKQIWiNdRL3t1agR9RFAjbVmZZKty5e1A4/2D8ekJF44mhko/GQ2PLOxxij1KF4k+KSgD26TzS0KfSSVPHx/8Vu/+mYmqWUVwgLoSktLAEEIGesotu125JSmPFrMzCvF6Ua/e/c300vnNz/z4mPxagPxHBBrlQWqg0FEnEyP3y3zuSkyzEmeSUQcQoQxRImPeIC09IDc2OhtrnVq9ZDGvodQNkuiVgyABQgY+f27YLK/q4kn/SYF9f7lF9x8qCA5Oj7y6nUH1eQkMahCCE7T0qXTo9M3ZCaPbjnIglYnfubZi3sPF7NmiEooE2WrKurVRvsnYaPl+Z161DTc/urnf/8v/sU/2xo8eu/9R3EL7D46xQynCrYnsBmwbssDFbrz2rvlVNQbcf9i3Razi5c3xkejZFSkZF5Z0D+7ce/GvYg1IqLl6XVDHr/0zLXb35alRKtbj8/vvKGkAMSK0iwWcy+MfBZhzJwja/31Bmtli0evv/bG6OD48NZ7myudUqX3H52OhRsW7sYrv1MBTYmF1kAMrXNSKWeUH4dG2x/4oQ9ef/etO4/2W56/HKGrz23+oY9cmg3ftwv82KWW1ert8T0Ay+m4OvvE0+PUNteZGE2W1prbzz3z+XePY38lGw11lg1EVZ2ecCA8SPPpbPuJ9U997sN398cqBc4ZZ4HTBhLhRImN4QGtr64NT0YUGoKgFmZ+CsrsyLJy66nNnZv333jlbVJr2FL4/kat0VEyU0WiZFbvNrURrqLnr35m591XO53qYPSw06sR6JVFySI/mS4cpg3Pr6xAXnxma6uSshATI/LpsAqjCCidp7mSzOXIQCRIeOHS42ZRHh0dicni4ORhlsC635pPUi+vlprNzcvXFFCpsVJoP/YTYyEiR6ezuN/yOv39R4OiwqIc9aKNDz+x/dKNG7RdD4MmXpCDnYlQeRjgVrO9ezRCxkbU55TkoyOEpCjLJz94zfj+4au3GaJAsOlRIg2ssXrD87UJiFE16mHkpGK5cAiQsBHniYyixlwA5PTw7lsqyZb92uGjPQQkqNTUiGbNd0wvsqlUYl5RHoRSWiGLPyAV5UnKAF1eblZVFVCIoWCEFJVKh6dI5dB3TsH58IjX4qoqAfQtlItkHPlrwDgEmR9Q46yWFTBQCdOqryKb3L5zk1AipfQaHdYgn/7sysRmr7108J2b8KN/9Ms/cg38xJ/64NkXn/oPv/Urswqpw/FHP/GZjXZ2/pz3yT/ymeev/tjeA3+5c/7990/Wl1f2Hn5989KGCuwE3+UrCxDJ5z7d+7lfqwbJvkDa8yFjRGsIoAO5nOzuLLVN3JKf/eFV4PX+8S89fPhIUQgAJQAARglCWFXaWSR59A+/sPMTz7abAf3o+bVR6v2j33y3gEG3Q3/ikxc2luvtpf6D9/ZFYkcgO1zIBjZSum6jBl3O1Ixzs37+jHG83fYuya1skVNoMA0QCQjBhIZGGaXdYpJwFhaLAY1aMKZFVbWW2+Ppbr3XVpWFjpTIb/RXq/wkF2kYtZ02mcz8MGIuzcvKr4WEwqqYWF0hKdIpsdpUVd7tNCfZIvDPRGH3aHhKwxgCq6tSFVXMun64ni5yXc7L7KhGbEC90OupzHgkhBbms5EPSdCIjwf7SlllcYACU8rAozpPsvnUYTbNjpSrEF2Pw3B9/dJ4sTc6TYkVWVogTBH2MbDYQmBd4AVhtDp34eHBw6ZfrbXCO2/fYDRmQZSkhbaIUDbPcq/mpWIWBPFgsLvaJSRqlhJm6bwVxcWYqJJBUQU8IIYAigCxRZEAEp5/8vydt4+cQQ5jiwhliCKQlplfW56PHx6cHrbq61JV5Sw3wFqocnl8Ox+8UWWXLz2xEj4WRXRwfK+YnFrkGImm86TeriWz7PhwsrKxbgEwShnw/ZQWa4wzFkFcpjkCkCAELOAAVXmJKQ8D3zptrcMIEYhLJQCwVVUagIs89XyvFjUQAA5YayFCKAwCjGCRVQRhgGCuKiVlKSSlBEGUFqnQVT1olK70fE4oVtJBAIxDBBMIoXOV0dDzOAMAQCCqEvkUOKuEZIxCDBnztFFRVKsqCRziJHBSl0oSCjEB1lkCKfeDgHPoYJ4knGGJbFZUhDNlBSIYIWS0BQhhjMtSKFEZZywiSltKEILQOfS9mB4AAMYIOGCdw4QYa0QpHXBKKs49TAmyDkHEMZNKEoQcANoIqzWA0FqnrHEy+57JBkAAMVRCKa2M0wAhZAmhVGktjAUYQIOcAQY6bQxjFGMEDQQIEhpgh6VWmFEEgRQlhsBYGXAv5B50wCdESCGccQBW0iAEhXbGaU4pgpBQbqCeF0kAOSUMIIgZ8xgADmdFARlAwAIHCCFaA1GJer1WyVJb55zzEDUAFqJCAEIIMcHGEQcCpQttSoQcpURIwSn1fN86IMrC8yjCCEI4GqeTaYqAlVJDABAgno8wRA5AISoLHSMMA0ww1kYDC5yzAAAHAIJEaS2BZYwYByqpIADGWgQBxAA4B4yNglAoCQnJhEAA1uM4FxWh3AEQBIFWSittgYUQlmUJEUQQCamETr6fAFVWwIFKaOCcMt+Xijqt+sFiWqsHDrjTR7ura15nqb/V2Xrp3bvLXX99A8BikWQZDDmOPNIkVVI+Go/OX71CcT1i9VJkYJG0w+4866xm9f/x//r3PvPh7atnV4tSdurRu994xc1TZLWTvF7rBAjuJZNzH3nh3Naz3/r5X1xqrxQt026TZy7Eb72zs3Nzzy/SP/Hf/8i8RJ0z5Gh2TBFkziXpicGE4NCYwpWasXqZ+ry1FBqBVLrZ4fdef/DYxtLDWdnAzRbwiSAiyViEAbaCuKpSwuSwHgAPl5VuUjzYOWgwX80LUxv5zfr4zWMzwaOHxXw4D2MfIDQ7GkHnUWeUNC6bTCqfeWzvdLK7PxEWNeKg7hGCWHo8fm9++uxTq3lWFQtZi6hZ5Ge268Nxlo9TWJVOG0ltQOP+2aXF/ukoK2oxHWCjFaTAYZWobL4Y7y1GI79Gehurjw7y11/ZX17zzz229co3bjx9PHji0pYfhlGv7iuMitxImxf5/v6ex1xR8eX2GVM3lDPHSFmZcppwzqBz0JhOt+XS40yVjz92fjKfNOv1eqvuGOq3/TKfDu4ORxNWLbRJpKim55/qYwlnM0Gxg1I+fvHCzYPj0lqMsLa2qBR2Jg4DoXRWVdYUl55cJp4YH9zxvZA24ud/cHP/3p3778n3bmUk3Htw72CaOsLGwmBpIZDqwcODjc1mve21l9eBoYu4GsylIYZwWKbqzMU/Rs6f+e71g4O/+6//yT/9y2Kc1lp1kLPM4D/xx/78b//yz5EQnHlu7cGAXTy3fnTroZhNf/onf+Jf/ftfJXEIje02PS15mVZVrgin0hhVCsTYe/cOdncOttbDRiMK60GZFoSii4+tvPjiJ3775/6dg8AofXr8/SGZUNzs1RzSq8vNreVob1D+77/w2//87/25rZV1kZaKWIJRkU+FLR3U2WRBvQhGdch8RpwYTbWQcehdWF1aDIbcZpiSQOlWqw4c9IJwsUhMkhrggCVpmmujB4PU9/xaPVBSYwCsKqVW3GNK5cBCoTUizme03V1e214fJuj00DBDF0lWVvSJD63/zm//q7VuI/DJ4HQehnVjHGFEigoS3xlZKsUYc1ZrIxEHCMFcpNKoTmM9TWccAquygPrOWEZ8Cr7X2OkIIh7jEimhSgi0MRpAxFkMgTEOOgAwIsZarStgHGY0DprAWE4xRtBZJUsrrS3LlPk+xsQ6rY2EADPiRX57ni587mECIMIYfu94h4RHhLQgBGk6qMeh1sZBqK1TGhgNLHJSGGVk4Ne0tMxjs9nEAV3KEpM4aiwpbZDNKXHQAsswY47GoZKG8iBNp9CYRqu/KHIAjKgSBFTgxxhQZ2GdcyyLqEFH+w8pAhxqSADBoNlpQYQDXjs42mXOQSPXVzd39ndkYpw2b73xlasXrpGYrZ/pc2g8yoQopoepFwZqMaW5no/HzeWunCT1ZgX9bHxwunVx2zrv8OE9Uam0UNc+8Oz6av/GF75FfPb8x579j7/9Zm1t+6svv7q1JV944tLp/u5br7zWiPyvfP4r166+0On2vajFw6jRbCeZUnnGiLFC61JNy8TDotlva2137925dvXZTi12Ff5///1/sH/npRefvhr7Lh2mAsHOcpdQQBCSSrSWAmuK+aIMay3krKjwfF6kqY5jf/38xvcoyKYz5Hn15vIc8hQFlSX7B5nnhfUWi2MexZxYF0e+0s5r150FRZ5LBRdzgR2MGDvcP1yuR6pyCAOMUSWLsNF0zu90eu+8+8b9o8HTT1xBU2iQy9IMtbFygk6hzwInCuAIh9AoQRAVQuHAV1ovplPfD51yzIsAkoSDkHppWljn1WrecPAwYiQMiHUGQIiAk4ksq6rTDJEDhCHP5xoACLBUVZ6nWglAbbNGtTBSqY06G4yq0uU6hdwPjHRGCwwtUEIpMy+qgsDSICeFzxmsVDk4aXQimhc4CKEjzIun42m1SLPJXCAYtzdsOT4dHpdpupgvmF9zvNVp9k22CEJOocXOeUGru7RU765V1WSeznktxsh6QWhoQBCd7iwArHqtPjfk8O7+0cNFrTG26x7goFuP02lycPeBovDZK8/OZqODw4O9vZNVAImD81lyZ9epRcYRMpXyQxaHgVNQWh03mqfzU2kUMl4rWqYkztPCIxOlp+VkDoAKatxAw4KIMS8bz1RVyDw9HRzPFrmw2FlSCu+zL37GldOqnBAgTw9n2UT2usvLF8+niUJO6yrnDPl+6IWRA4ZORmmlLi0t370zW2pHskxmWkVLq1eundndUXunAoC4mJdaqP079uExmLsS0MopTQkADgGIIXTIAU75mc7ybDZKhMkrlUtYqYTZeCfN9g8ffuwTTz9xZc14ElhLDVyMpmVRARhihKNmYI2V1lRKlaWCjqz11s+ur81OHlRoutRF7SZb21rWXpiN534DzOezbq9tlZ6Mxt+joNtpOC8Kekuz+/fbIZaudrA/Qhz5nN6/fkc7sChPQ04dwbWNlW6/9/qXvgLmZvuxJ0Rub770OqH+k89vzwY6HdPSnOIATBdp1Ov7zr3/3gMV15ee/ixefvL43YOjW7Mzm21bL2jDv7t78sRmbXpw5Fbbqiqbq+v9zaXR4HQ0m4fWvH9z7933H86GydLGSr/frPJ0KWwCpMLInx0fwtKhCxdtkmOPHb7/ViRkOh77jRBh5vvtqnLEq/YGt586s7UUdQb717/8q/94ZXW731uCEIliOjoa37m3v3r5zHvf3pWiRB6yxnGfWwt4jItSIgt8GoV1/trXXgn88hMvrEQSfuaJJZ9VHX9nsTgZHOrqwZ7BHu3FW1f6K1eWtq9uXn//uDoZhC4/mQ9++7c+323UDx8cD08HtVYkinJm0JkzKzWt+Ap5/oPbu4/uWuVHvqeBne4feJ5DxEEDueflaQbTTBbqZDqsr/Seu/CkpUund+8c7h6AOgNaUWBdaWVu7t15sLRufITtpFRKtDq1e/cf1KOaUDENl44n7y2vrswGp9wjxgFpkaMhQcRYmC2qC89/oL/cvPHmm1aLWuAFUYDDep4smh6VkjniSM2PzqxDJVCebXYap48OJpMEWF8XJiB8qdfQSEVxsL68/P7944NBwjBVhUYGjk7n3dF8vdMIWvWIaVl4zomD0/sEFAy5kHUdD0qQtKJQmrzIp1BVlTLKVMxDNR+2ag2ZOH089rpBw9PNpXhvb4qYr0vXbkXZpAqDRsSVrJJuLxKKOIRhaEkt1iS3QD9x9druwQPCeGndeJQEMbJyFjDWX1oTRghtrIWIYoIxQsTDKIq9PyAVOaeUFHluWdD0PX86GnssplFMrGHAaFMghpzFHguq2TzNZlGnE3rLDEUO4Cqd0hDz0FemlGVRZaWTknG+un7G4+WDOztOk/ffeyRSd2W1+V7z2Fh/VrL/8Hr1H2/fT6vXsfEwxrOySgs9PEE7j/q3h+hPfKb98Y88nZ1ivGg9uv7t6YF7anspx+DW7cN6jXhBtfuoHE1rUUgC32WL3CLotIUYF9pde3ZrfSuP0YLIdKXD/uSHe//y8MGsIA4wJa2zGhNDCcEEWw0fPqr+4e0dTtFnr9KDQZWL2BDRCXxcySrVYtWtPbakj8tRYVkQpVP1nVfe+COffX7v9o2lraDW66JavShBkUtoFec4qkcOBxDgKlc0aHHDIUwtWFQiabTblhgtcwSFdQpVs8AMorhnYG08Fu1gu9tPB0fDZms5qRJQID9oUUs1lJWirsiAlZj6jEdKVhBATIx1Lub9xSKBPvV5A0FsbBE3cVLkRSlhOcPTd5W0nSYukpQAgiF2lFBCtQG6KnOQGj3lEe90tpPBrtVpqlKM2v1eS8vZyurm6FQePXjQW0KL6bDVO5/nDktRFgULSDOMkWIhJvlEa23tsqYr6y0b9pU3vjuVJFrZeGZYvUcdBiLptpdFtqg3t+KoNRnvaW06nVZVHudZTZtIOIG9kDZi2uE4VVGNu8qBvGi3+t1ufDhMs5LwuDY4HXXXL0bdZXEyGe3d9CuWlQVFhY+wLK0uNTYUcNbt94MA5qkIPTo9fHT64NHa9hZSGLMgjOK947EUVQe2tIVCAEKDcZL2u/12//sUiEJQSqhPjdEYAFEJBUHgceSwcUaI1Pe50DbJc595EEMlBEZIV5ISz0pAWKh0FXiRRTrJc8aJsyjwiXNosUiVBdIYba2xgGFLPAS1p4Rlvur3WyejsSil53EIobYWWIcQ5pg4raUFuqgCj1LkGycgdhhjjJCWwhmjCUXAYSsp9CpnEcHaWmWsFFJpxAPKObQahEFojUYIMoq1NlY7RIEFVhugv2d0AoAw6vEgLyXDBEMspMKYammgtQpY4vsQAikVJk5pDRy02iLApNAIWOhAwLmSAkNHGUGISFkhQjGGWmngIKWMIDJNJhDjKAqBc1obyjwAXVEKTBBC2FmLMeI+c8BILYFDzgALnDYqYr42ShvNOS2FgA4ADCspPeppZRimlaqcAxgSBKiDDiJnrbbWIUYwIlJpCwSGxIcesg46V+iCEsY41g4DjJW1RjmtJYZEadVtxBCJyGNZpYWoLEaVqYCDGGMLnSVOa+0QIJAoKRnzEYA+I75PAbBKaoIxxoT7GEAYZYWyDjPkBRRzVhVlJR3EllFunfO4b5QVlYCcG22VM4RSRoi11lilrbYQKGWUNohgqTSGGlgQMM4IcsZapYHWBkBjHXA2F0JrJ03RqIWsEQhhkqRSWigNtLLG2dAzxgJjdFiLHEHamjLNpRCex7T7fkrL5GTUbUfpPDmaTEDkKFOVgjd3D3i7tnK+FsEZ9LGwVgMpc+WQdQJwrNcuN/avH7//+j6l9N5rp6TT2wi2f+Hn/tc/8seeUdmkjqP1Tf/4/n3fSEEQUf7G6tLoOIHQ0uVLw0fe9L23W97G9a9/YXnddzq98frLsBGjiAf99d390u83D6gql3oxholSulMvHW04wxYDfDxZWX/S1c4lBax5cj558PDGvZ3bw+A52Fpi2xubr373lQ3v2sXelXQ8euvGN/lKp7e5kRuzOD3BxPeMFzqNci6O0sXhLMkHaqnZMFTMdLfbyapwepIUyYJh0Gg0icpGk8Q4OTzJ81JzBoOQLsVNz4HhOKuKvLdcG6WLqvL8INByBgLSWF5qrtTu332t02jWaWwsTnTuQ2D0dGVVrzX6L33xlk6gkFQs8uvf/s7F2qe+9fsvb6w1zl/efvWL33jymUvdpfXde3uHo6PtTX6y906XjZ5+opOV5exh3os8UqlClSurLQiM4+2TasopZV7o1YjWU01wvdWyCDKD50nRYI0AGC0kY7zZ62Aa50XiZtPF1A0OZNB7agFGwrksN3duDSJfGuMHmL390o2lD37oQx975lvfeMNoZ43lBCFCZ0nqN/z1qxcvPPdYLQiwK0Ry1FjrAlU+/PrN7Vb9CC+OM/Te7aR3psugLbN0o8cdgM1OfTxOm23oxTGCABqOVG3nwbsAUCkF8ejp/Mbf+qt/53d+9yuFnf1Xf/J/+ht/82c+/OmrxUQQ3+t85Mf+m48+/wv/+H8Y3vjacvsMV5YzFgbLbx1NWyvtbJh1IoJImUgAAQAASURBVIotOJwt2p02QLYqTK1Om2tdIc3e4TBNy/sHaXcB+r3eY09eHO0fqbH8rV/5zfEkH8yys606oN9fRrZSOwdkof3A3+jVTsZllue/8Etf/Nk//CltlYHU9z1RVslcRq0w8IPXrj/85s1XC6E/9dyTH/nQE8AVeSkrmGCsxSx3nLSXwiSrZJ5W1lpsEYKUsNj3hFEOKR9hArh1AHtcFZWopDYSEsI9Yq2VRdVph3Q+a9bB5tZGeaB4wGcHd8J6mGTZa6/8yvmNRrZItCGB52vrAAKEEA4gcMpBIqRCBEFnMAIQGgQBccgCN0+PKIm1UlJq7AznrKpyAj0EAIMQc44JqbIcQIghchAhAJ0SBCBrNMIEEF+IFELrhZ5Q+vvOTQKQg0pKQrGziHkBQUhppaRAyGfU17os1YhzWgjlewQTpDUA2jHiA4h9n1fpmCKklDPAQggI9QhlQk2l1oQiwiFEjnFmbeGcxF4cIW86OIhqASee0SAv08DjYRwaI8sqlVJ5yiClaRAboLFHEEWiwBQRTGhIgkzPCFFqNpwNJbMIM+43G2kqTaahpGWx8MPY8ygBEBJoQNls+H6Myxw8++wnw0ZztiishVWZT+aHQT1azFQYLfc3Wqd3jjVM/U7UxuH963fXzvfPbUej4cE4F5Y5rczqcgcX6mu//EVRmuUzWze+895mnz31icuL8fK//N///ZU1/5N/+Ee/8aWvY4RvfufWWnextQVQhI6P5v1WWxTVIp1VWvR7/VqbT8Zzz++Uk/Le7Tc++SOf3r13vLzcOhzuvfzqF194/OJCJNfv3u9Eq+2t8xZ62aLyaJYMUo65KFOjRNiqnQ6O2q24oFS6Skpdlt/Pp7j13v12pz0/mMzHySyvBqP59kq7f27lzBos8xQALdLK434c1TH3hdAU+5xWEFsl+Ww62DuawmW8vtafjofLy12fI4CispQthj2vNzwquvGWzybHo1PrVLfdyjJTFlWr2TPaVJWxRgdBYD2glFEEj5LsdCrrvm+FwvS02Y0p9SwGgcFlKqqkcKWWxERekC0WQVzDWPZWosHA+F6oKh0whBzwvVipUkpJaZjMZqo8TMfDRmvNY2FSDHvtIEEoYBQ5qwsBHSiqCiIotOTOE6VyBBDGqc+0sdoSjZuW9QykshJQj6fzfZmkaVnCgGcqC7HtNuKDbNqoNeazDM3H48M73VaNIMiDMD2Zr1y+tHJ2M9UOzkpeZsQLWp2mqarKWgBtf2NjeHhXawRgpIoMEz7T2fnlth+K0fhwdHS6vlR/+d1bYzHMR6lItO8HwGov8kqp3n/nYUBshPnTVy9ptYDICQfjiBRSduqRdToiwXxwVMYgxHianzDigJYIWFVArZ0DCBjjrLMGJIt0XinDoqrUs6Rsrm6rxUG7VW+tgvGNuSEkbPU4bziJYCEYLqQWqxvrVWUdAhy6mnUesbPJgzjGtEIU+cPFxNVhv3X2vgQ33h5+tbl4+oUtWdi0DFLMc5FraYzUBBNMsHWGYOIs0gAvspwS6AGDnVzr90+nZQZlp+bdmM1rp4MLT3YsjGrtZwcPD5IqaayHTzy39tVXDkQO/DB0WqlUQeCQM4PhoyvbjZ/9C5979+6bgV9PCz/Necxbi+l8Ohy0Wj7BwFhj1ffXMIU2IYHV7JCrROuS9Rrn1q6dTlQyGyKlZll2/qlLw90DxNoYsL2bb2Nu4tgrF8PV1c00T4pU1nFtb5JKUdu8+MR0Nth4soUjbKbZ2ubKm8fj/XL6YHpUuvTSE9vvX3/70lmyttUFpZV68tkf+8TBo3ujwwFrbvjLK+1Qn3tsaefO7ud/57UobmiPPPbCs1LMPZ/k5Xg2HG1cfGz13MYkRZP5xKI0pFHEyHw+DeqMMIuR1RoQRh2AoFT5yf3Dl77y4NHtxy5cU5BvXdva390vT/WNm6ObQ/j2wftCGcqRhhAAqJSwDhJAQ8I2V/uLzNIq//gHtpk3/KmfeGZ2676YHKpZtT/xar3VoBmQKjs+GFw5f+b+fmI9/fJr16Gt57P53snQcLqYwVKni3wOA6yMWFpr0gAls5PxfPGDP/HxoQvqUbDZuTR992sWlLiagwpJ5KSWiPj1lQuTglUC6MxZJAJczMcn09PjduhVi8QPGHSq0W2lk3IwHboR2VhapxRZpQnEve5aMhgOD+721zbmyQC7vO7HmxfOmcb6YLggnufxRpGpx7c2PA5MmYRhY3tdmUKtbW17KxuHj/b8ZHzn0c765srq2bVxLkPoEidn8zRuus1Lq8756SgbppWuR0trKzZLBpNRmi4I5dNhqqCNukE1Mfk8UYuJ48QBLEtTVLlxtnQeTGGuJ5DJwhQUIMoB8zEVvsiFKBTjtIAOVjr0QkJIrR6c2GqmSU6lQRIBfe7SVjJfKL/phPSLwIRodDqkvo9r/jgdQRZ5hL51/Q0NNYS+muZOTIPQ295qjg7HTiSO4qVzl6b7D4VKuq2oKjVHHGL3B6QiQJAoSyMQJAJYKyu5UKmAbLURL6YnaTlqr3YlAKenE2dU3GinaWKBF3dWq8q1uv15cmSLXFYLIVJjlYHq3sMH9XrnzNZqrVYrrbnw5OaN1/ba2P3wxy7+wm+8VQtC7CNhpZQYQ2krTaFTGJcl2X76756g9l/939596os3tjeuvPny6//4r//ZT/7Unzm+/2+8a/7yhTQvdrhnt1a8pcifS2CwAcAYaykhEFNETBjq833XwF57ubZylTQvsQqe+5e/dNc5D1lcCUkIgQCKSiEMsYUGerzReGcqh/MyikME4cef2WrXkTAEYXjxXP1bbzxCvH55fem9SrJu8PDR5LjQ9aBRYy2oSKcWyyJ3wBNVLkozT467rb6PsSxFKRLqO8i4VZUwCthMZrkQOZI6WSRGvh02173mBvZrpTBVwTDy0nxsAEGAqqqSOiUsUkphpAFQwEIpK2NMFEaMcZkLZzj2o7yqpDYYKoS0pdhi0FxeBSioNeuT3aEFGkHvZCqoD7FBJp2FLd5e5kUGnMHAEgwr6KkyL8PGRqd2fnx8HSJ9ujfcPHcF6Giey2ZElK0Y9yLfP53tFR7r1ZrTnRMhizjuSKVuPco/gFemu+/0lwq/Ttd6z+8PxOHJURg5L2CiOIVOIDgXJeB+3TkFAAIUOYcD3ypVnhwcDseHvbXe2bPnD3fva2Pqnb6FcDyehsxLBjNOm3EdCsMmk5Enit5SGzgwz03cudToud3DIYbQAjyaaxozXZUiKVjsKWm1MDsP7xjIcdDm8ZIilDp3eDjgBHCfR7V64HxjaSW+3/fBPWaMycsCY4QpRRQ7BC0m0lW5kNhaYS0yQGutiTAGxHFbq9zzGCZYa5WJAgKIhCzyjHk0Cv3FPIMIFqWQWkttiOcxQKuydJhSyilheVqki4RyF/uRypXSSjsnZBUHNd/zKCJKlgQhL/DLYu6g8nzvexHOnAdVlQKItdYII0qYAVACaJQ2WkPktHHWWW1snhcAYGsBp14lCgCQMpJSOi9KyhjQDgDnHNAAcj9oYy6qCiJUCWGcMZpChI3TEQ9LURptrXFpWjLGPe4vZIYxcdBhhKBDEELmMWuhNVYbZS0AyCGEAAAQQmWcNrJSkjhW5IXRQJrKx4FR2gHIGAXWejzIRQ4xsg5AiBilGGNlFMVcCqGNssBQShBwBGFMKXYUQUgwt0ArbRAmjHlZmvOQIoiKvMCUY0ZLWUktILAEG+cAQJAHHrEEAYcwXkwThKk2Thjge76pNEbI84NSZ4vpgnsRJLjS2mrtMy6VNsBChDG2aT4OCGaEQ+ccBBAhDXFRlFVR+b7PGU+KwmpTb8eikjTwZCWccR5iYRQgiqRRzlEhK2ehgggayTDGAAKAtAMKWGMNgNBhJEQFAaCQWmgxgtYa6AAA0BinTK61ZtQwP3AOSKml1ITiMrelqjjnzsKqkMY6iKnHiDQOI+xRLEVpCXEQSmsAhQI6o77vKpqVyNcXqjI0Nlpbb719/e25SM8+vrW9dXY4fmBmkAc+AFopWRRpo+d5zt5982T6KLUahlEYrzalRUG3TwWazYqd+wNmHOfZ+9+9VSYptGB1oz9+ONFl8eQHn9/bPXry4ge1riQt2+tnllvi0eF3Fzqdz8Le+taJlEXBQI+PpQBSQ0pNJaoKwajmA0Bmu5sqvdxeGu3ca189e2zM6eE9GKH2M895V/1FmVYyv12MG1cvJIBjv+2t9M9ZkqSncpJW5dDzdIhAQGqjnZOq0mVRtNfWvWavs9EOZPbm77zVqhbEtqaVTgrjrJklqa/sLDUewBDh4bSohZR7rNmsw8WiE+IEOcrNhf7K0Ux1ulFimEdZWsh77x0vLa9DzMcL2V3vby7x137vu2aQyErXlgGxRGcyCli0XhuVpx8+24gfXHtwrCbT7PaDk3Bl9YVP/cidm+/8hb/2U1/5T7/JrCtk1l9qv/7G0elUeZRrJT2upvliq7OOUHOxGJ5dObO3c1MargoLAT0+HvdWNvb3jjG07Eyj11mfTMeAuvkkQ6iUOq9mmIerOPYn6Xw42Xci4VG4f3L4gae3rArSJOuf24jDcLI3kJnGGBlrEEMAY9YMn/mhT3/gs3+cNlay4+N8sUdPsZMu4K611cXGNOKss9lE/uQDP7JWaTR8NF4728KEJmneayhIXTIZ0zg0XLc36ytnm0c7CYWcEZjkr/2z/+OTWy/86Uv9c/kt8G/+/e984dVbP/PHf2RrswGcKcvwj/3Zf/TO7/5/v/2lX7/8HLv6P33iN379lfsP7yHMljqNqiwZNlcudJMiHy5S5JiRxkhdb4TbYe90kDqAprn48ss3b9/bf+bxc80wONnfby13tl988t1vvnZ0MP8eBZQQYwHjtLKqW48oOaGc74wmtaXueDB2EJeVUGlWa4QQklSpr737lsKxH9Hv3L371Ec+jJMZdDCIkV+vu8ir0hEm0GAklBbWQE5aveZsOicYjCfTx65c3HlvZzA4WsKtVMNW1MzLxPdDUQprIaBEQTzZGa/72gq5e+s9F6+N8rySJuJBhYCd51o4RGElVSGUQ8wCSBmF2M0Xo1bcpQQZ6Mqq9BiGFhWViHhgpMQYY2YU0tzzgYPaWUI8YJ3v+QiCPCsJxZgQDYGj1BoDAXYOGqikyjnwrQScIC2dNQYjkBRpyCOrobUAEYIZz/ISQReFUaVFXGsYiSl1TksEIIQu9DyIqUMQQAuAhVJNx7t0VWGMiMccgD6L80ppB7SsILJaWyM19WkUhePhCeVoeaW/SJUf+oTDsiitFQRihJgFMMsSiIHSwmOMUx9CIZTMhM607a6seLUOTKdWVs4CqEqIjZYFBIj6vjFYCOWUocQ/s909PN4PGxFzzGlpsM2yBEI+HI3arQhAdjIVvN7iXmxMNTsZxbzeZAFJkkk1f/WN1y9eOTM+LQDlJwWePpg2/MXgaH7nZPbUBx470+lYBW5/9z1h1ERYTHqfevLy7/3nX5K/+zU/iv7CX/7JN771Vq7Axz7z6ZPd4/PXPn73/Zdfff3bP/rTP6uhVCLzAtxdPS8s9aNmWZVLa83AC/fvX79w5aoVLgxjiN1w8Ojq5Y3/9u/+nVd+9/eWVtuMepzXdCWgA1blS+uNw4PZ2vqFr3/jm4viPrSWUggpIhyFrVDI7/cAHg4nuNFAyIzm41ICzvy1rTVMq7JymJgwqikOkEPKASOtNpDHdUrA8OSkKHWr1TgZzablzM+gF0aUEs5rFYRPPvXE6SAFs+FSvT4enNYacqpKEvGTRcpYsxkG2mnOubUWowBjaJ3ECBljlLa1xlKWVfU46HbislwgyzhChVIMk0k+pwwjYpUsgcMAGsQoq3O/qCjBRpoyK62bEkQJcMjBdrMLdDUajYoy0XLi0TIbj4mZO5DHtZ4QxJE+8ZvpznvMlwG2QOlCpUqBgPnGYRo04vaS311zQd0h4qq0KKdh6NvSUB/GraaH/XI8SZKpc5j7UaTZJC29QPhebiyEgD75zDXSjjVUPipbLTJzXiWMEpIAgkwFIRa2urfzkHkVCOkHP/Hid16+vvfo3jNPPK1EuRhOx6Ps7oO9RycLt5vUAp843F9dwr4vlcpU9exzT+7efv+dG/da9e7GSqxMpjRIS1FWIgo9aGWVzFdWW5ACi6E0ZnJ46LFAKOUcYoxz7hVFoqS2gJRFlRsFPewcIZgHYe3smfbo+Nad+3dv3dyRpLG69WQ96B+dHiFk37351tnzZxezeRy1AeWMUWrhfJrZwKxuruTHB06roBZ325gFVTNAhWw+OELNve7spHr1jqgcwBhaCB3CBjhrjHImwBQKI8rR6oXiT//sR+/upKNj9s479xvhksaBWRx/7JMfu3Bp/avffN/S7nx+v1dn22v1es98uPHYKBse7g4Gs4W2lTMWWEsILx28fnL4tH3+4tWLxawwmlEos8UYYltWVVHh0SSN/JCHte9RsLd3dDnitYZ3dzQ43Nv9wI//eFlh4NxkPB2PBrrmPzw6rXu1JE8ffveVJ598PFHd3soy5/67N28urXRkhXceHC+v9Rji83Rcr0XT2YxAtrc3i+Lap37yBz7/0q2j3Zuj4+Pz7f5n/tAPrq5MF/PUgzrN8y+/+c75lX5jqY6RcXYWBvDo3vGbNw+PhD6/xttRJKrh1taqku7rr7/22LmzjW7tZJpIgzzYbJ7dtmUyGJzWYqYrenDwqN1sWwuSWUKJ3d7sDU5vKlC0mo2tq5fv3nz/4ft3klne6/bPfuTx3/mFrwICfB+rUjlj07QilCKIAEEfeOHpND3Y2b1/abn27LWnVrf6N37v5SotXvzY+XKm4+XtedRTrufGhwF+/3BvPhnKB9mJd+7alScu7335pcLR/vLaa6++gxhiFNZb0fb21sP3bq9QSqrkyQ9d+rE/9RfeurH/8P3b+cKZw0mpT40FvBYDaGvdGqCdxtYTB0ezZ370Iw9f+v2j+3tRzWu162Ja7y01RDm58/6doFazlWyG3BgSdHClRsud1bFweVZUVSGgy1UyWrhaLdSFKjUO/OUM8zD0Q49srPZfe+feYDZY6vLduzfrFG5vnd+7twN09c4br964/ehsM1xd6TaoWxycRM0YUfpoNDk6Gm2sN9YvLX3gk58tc/zmwTGR+aPRKCiLBpZh7FtJFrnB1i53QyzF4nT67nxeX+sv9/uzoiq16Kz1emFt/PDIJ3x5eR3mSFQDmae1oFUkRSmq5UZDazlZJDaghZGNmmytL8etzf2DMbShA/rSxb5NB+fWt0+Nk5XRZaUtgq70qI+4XYlrb7++x3lEgIzrfrMd3j09DQKea20pK61ZjEc07hQexkHkJjPGmXGqSLNKoD8gFXFOKQoRwelk4dHYliGg7tKFlTtvfscnpEhxazlECAW1aDEbONzwfeo011VGgMrTAtnSKGWd7nR7eagghtFRe7KX1mIyGMBeiy2OJmFAJ6XErvxzf+4zP//vvhJy7EqNtUYMAWIthgBJHIKb1/++0TYiwftv+O+8ceaZD//Zf/ethzcm7JxjV+cGwpG3PFm+wiXHvYhkc5ukymLktEOGh37EAzwdztLTau1qA7XI7vgUKPpjP/2Rt96d/t71xFrk+Z5WQBtlkYPYsci7dnl7OFoMp1PMoMyyxx/vpPPR9ubanZ1kfgM+fDeXlaxZ1fGCwfH0mccuD5Mh9cDlpzZPHiQh4pNp5RE/jOpGnhgzD3yOLPMoRcATKkfMo5BTLEbTo5XuhhSlrkqPmd5K+2RvvFgk8XwSrpzza81yEbNIaiVq9dAZiiFwpkZds1Gj+4dvxHG7lBoDSHnPDztClD4GQGns+0rNoC59jykB5icLsSDdtc1M+PO59eMlTkEQ2nGSAiIDTrywZqvJweEuCUgjDsoSns7vdpqtoEaANvnwgKNAO+TROJ05icJ6ry/mx2p6Wm8sHe0fIK+mBaxK4Ne8M43NvEyrrHziiR9446X/nB3eXvrAuUf7J5+59mfkZB/6AjHAfZTlBSVCliOD6oBSoW0Q9BmLAxYYfazyMQM2gl56VOyLdr/zA8rqzlI4Ho0IMa1ac6EnPPbjmn8yyjwCY7/qdnoPdw+iKLaOzGYLTvy1fmdnZ3hmY42ohFOFPRGEHu90h4fHJHTSSQOTPJ0urVzQFQQV5IjkSVlKgWiUpfn/v+8DIeAc0NJACwFBDmII0WK+MAASRn2PRWFtMR0GHoPQQQi1WCgjHYIWaAcsoTQO/WSxMNalWZHKUilXFhIBQCimHvcYA9rRIIzj5VIuhFJVKWngC00PDydClktLfZFMQ78hlcbcKA3qfiiqGfc8Z6iUmnLLGUcQ5EUGIcAIG+WIjzFFRSaUdVLqSsjQ9ylhlCClJdTEWW0NUMooKz3PR9yTQgYeBQgorZDDzA+AEFVVQYQpwdABiDDF1BhLIYEYSw1sabS1jBIEKEEQYcc8BiHgXqykEkJ8r+rLGAARAAAa66A1DjljjXMAQgeRjcLYQQSdcdiGYew0dMYprQTWFGOhBHRQKu2swwgro5QVnAVaGa0FJhxYiaDzqa+1hMBCYB3AeZX51KcsRARnVYk5NwClRV4JFTLPWQsBoBhbjaDFFAMICcGxg4XWVSkVYjhgFGoHnG6E9QKkAPFxOsUEhlFsjCUUYESQQdBRjHAlCmONNUpbSxEFRhsAEUYeCedJagGElGZa60pSRIQQiIBK2qxMOCNxFEoonXOUUGONlIIzrpRGiEHoIAZaWwCc1tpaAxFCCDHKAAfOOFFVxlkNCMXEWlfmFSWMYMy4TxyGFuVaMEwatVAZq6wBwFiltZKYIIo9q4yxVgOQVlktjCFE6aJ00EkhjTNFsQDf/zwAt++cvvql3w95ZBi+ff2kc+Hs9kVoODlZ7AMCqWljt45cibyxpdTDbHDndH5EvbNNr4FhgDSBtslon6ndSadD4pBUpaz1AgDEYrSoTubpWCIFQI7u3bxfzAf/+eb/aoUMmMvytLVEpLd46oevlOEKaPdblpRFBowCFDCCtVSEIh+zEkMnsvnJYJgs2qFxJh4cJ5NWPFtq2aCdWuYiTwhKWVNlYwi0MCgHp37U8s4vdfCKNgqPBvUWO7l/r6wy3mrVKFvkx83HluaSHFZTB/P6j55N3hm9/Iuv+jTExkEC6WTiAWchnE4S7pN6v6kzkSbg4dFplxitjEMuLVyikwqgTOlGk1+89uTdG+81fC8rExw4QONkWsyPj6opGZ1gZ/BoOkIO1kIOS9VfD+ajk9GdNz743Cd2bru1ZW///Tfeeu3tiAPknbqS/pX/+u/82s//2/7Z2qM7g49/7jMvf/HNbL4T1msQ55NJrvIHfn0GBDQil04HsN9v8pPhLIz4bDEGVFPqPzpIkde4fG5revLo4PgoDGsS05UnPjxLjBmfzk4Pp5MTIOapEYq1krT18NE9Z6sKgobGpBQUe464LNMcgVJXF68++ZP/l7+WKiodaW5shWJ1e/PqW1/6zXq7QkSlVXYyHa8tow9+cnsxm1tFa3GsNC7SvMhTW8m4FtJ27f37RxXhFIJ2M9i9O6XMF84SSg8ePJyd/tOHXuPyYx/5mT/xt3/5l//jv/s3v/+DH7300R+4hkls4MrHfuavnXn8is2mJ4+O+CxrIFpAKnShEdOqcIsk7uJPfuzKO2+fTE6FUoU8GWFgRQX8IO5s9AZHk9ffG+7uLxoRPLPWYQ14uDMLuptLTII3hwAAP/SKrCIMwQIQSJDWiJDxeJqcToM4lsA4bcpZlqVFUVZJma91mrcGaZVXzpCf/ze/8Ld/9ifLfAgkkg6K0ohc9jud5VZ99+BeLY4mWSFLwBTKh/l6Z2nv/ZvYoPXleJ7MCK5XKNFOW4A4Y8oYC2EvjPu11sHey8KLd17+7mMvftzrNUyNz+ep7/nSoqrMy6RCxJMOMOKAskUqEHI1v6E1ggBqCCK/pmWhjUTOQlvWQz8vSlPNKUWmUpQGuZah34FGcM6LIiEEEmIJwpWURhqfRAzjLB9iTn0WWums1opSBImSDkIQkAA6rCoJIIQESe0YY9hhVQEIecDa02wkZRn7gYfZdDHwedNaZqRlPvfCoNKuFnWM1IgQJQRwhEBKHLBKFEVKnbLWiKKAsJbBlJJQGjAcSa2UcIJxgJ0JfCYKEwQh933qR9Cxe3ff7newKVNIDDAw9Br9BlssjnVR+RSWWZLZKQJGFKrX74/nU+5xB0PrFGfUKiFV6XlEy9SaEEBcFKbZXTLWMRA5woiTnnZ+wKvp7eHRvaWNjd7S8vGkZL452N/tL28eTUBd4MlsAP04ajYyW1Q0f+GDH+pubT268d0A2o3NaD8ZPvf8c/+fX3ppPrz33BNPJsmkBG40TzEOHj3ab7Y6/e0r12+PP/aHf2Z0sqdy43NxMDy6cOExUEpVVAYxXRS8sSREcePV1z79uc/m6XRwfNxqX3zpi5+/dmHr61/8vUVmP3rtxZuvf8sADyrgNyJCRJkuTJn6aOPZZz72aP+gVvMP7u0yP4Tc6Upa+X1X0WJa7tw7updmayuNbidoLff7W418tjClCOIGQk2IMuAcYQwabYDOi1mZTEbzBYSwKipCjZRqNqvatUirIq8qHIb7R3vA+c0G27t/d7Z4+Pizj60shVlWEu6nqe5ENGQtUaSc+xhzY6QR2kqNIIhZ09Hg3GZ9Z/c9ikLlQJmWlXJVnjES+AGd58O1uMUQZjQAhCilxTxVRcUZjRo1Ia2zMC+TWhhB4Iy1Udwuq7IsF8YkqRh3+02daGRWMERKF4jKuNf24g8e330T2UMDEkud08QaVgkvDDo43Ky3V5MKYIS1glVa8hBKbaRS5HtuAEAg8Chhp2nl8XB7Y7NIp9SDSislZRB3GvWAQC2KqrQldBYDo4sZcDSohUrZUs6MS/2gcTjJuq1LjVa1Whx/9/e/+LlP/4AtzO2bhwubzwrZaDT9RswALUudTqaFLCCyU/+QAbS6tvzF7770P/yNv0FterR7WFWySFNaj0I/rEU1LyAcgiKbAl0ASEthjA0wNkoJpXINrfU4how4QrU0FkBtODT5wd1f+z/+gRZykCvPb5oSAgOn2RRgGgXhc08+MZwcLhaaIzgbDRmjCHIlAfFAOjvhUOeiIl6cD9NBngEtxtPMi5rw7RwKdnCYQGcZxIjSSmsLIHIuCkKKGLWq1YQXrraCJnm+f0mn/tVzy//ht96ajGVruf/wwd0aq51bvnI4FqOiSLh97/aQsOLM+tLHnnr8FuPv7492Hu1SQqCPrEPOooPj0+vvXv/B5y9g6AIf+RHKrMXUhTFFFBLqjM4A4N+j4IUXru7ffXByfzZZzFcuP55WNVzNfDDZfqKP5HnpqAJVmSzObl9toLwVLN+8cXd+ct9vN65+4Ecfu7D+W//254kz49nk3EpbzEojW9npqJwgauVgZ4A7zVo6mWcnl5/YGt8ZiPQUktZsWn3uJz72YG/66194MDgxV9b48N5steJ1Rnfvn751/cQxnmZFJOH8tqwCj/c7n/npP/7tr75y9ysv/8Cn/yhNTmdHR8DvAEcVDLNKVIWK272iqvJxJrRXC+le9SiTaX9pZWt95fUb7wdKtZuN5bV+tsghSdtgoKA7mSloiTPAowQzKKRORPHmmy+3I/lTP7b10z/+w4dv35pen+EsnM7E3fcmTY8ocbf7fGtWLjaubq210M6NG4+trrz2hRvj1988PZ6pNFcO3nj3dikEQAwApKVOx+Nmg68uB8889zFM8NGr346Cfvepx/wU3L5eYEIRIfOypBjXWmcUaO0cjrnfvPPo1HiNpXOwEKPti8tybOV8Py+yVlAbTwqggOO8GRGPmuHxYke6dm9pMS9qzfbpKDkaJpTXFlNRVWk+l++/ezPst6BRsyytzSatvDi5c4M2Prjea6QHJzdv3wshQrzKysUoqaSEk2H+5Jk2icPCzqdJaYSBjo8TMH97//Tol5567oPXzp0rRd7uR7e+9mqeJOOpHuYV8ZEE8Hh/pKU0Fh7vTUcZ4OG6II2szL2hOR08og65bG4Wp7PTIfAQZY07d2dJaiuh11eavU5YPnxYX2qJ+WLvRGyKjclolE9Nr+9jLPM87Xdq8+EIREG3Xd/fP6qF3bPL/rzIZ/Oq0az3wk5hQXtpdT6aHp8eYsYWeV4D6P07u71mrRlFBvu50bVuy9MzRhy3dm5co9P6A1LRdDDkHvF9hgzwwma3uXrr1VdsMYVaAuxvn1sVunKQlq5oLy1xBCcnp34UjcYTGjqxSCCQfo37FI8HB3GnLbTtbq/5NXV4OD/32JM4eTh/dHiylw4knSey3i2e+PjFw5uPXA6sBUoaiKBVjgBAkEZ4aG1aKUg9zO31N7/+5Ssv/KH7E/ChjZjW1pfZWXya7L09u3fLlaXjYfDc052t7cYXvv0wn9pKsfk0e+oDlym/zRsxo0Hgb9ig1ak/li1egs6zRgBnrHGYWiXVRq9lrZsOT0fDBQEIEOCMWyzyHZBVXFaIHt44jlC41AjPX9gAefnCtTOH+4+2lurp6WK8O/IJZxRxSTHkupJZPs313uryOVEWShqLeOhTVeUECGt0yENVLoROi2pEWE1kikdxu9NLR+Nk9y1d6+hKlqn0gyjPFICEM69UmdRFlZR+1Hbahix0OEyzArsTwj3qeUIInOWM2bjGK5kbAywKce1qDjqZkQDYRrPnYzEd3qNYbm2siVzkZaqr3AtjB5w02BDkDOQeps55tHV0kIetJlKFKUbGAI91IA1ScTgdHV2KlygPljxcEzZbDD1ua+0WRrHUM4gJcQsc2ZNJ1Vn6ACjceze+5rDEyClhan4DAYSg326ePZ2f+DzAIALACDnKq7TUycHwlNvIi+rL2xeRINbMF/OpRw3CYDIe1eqBSIcQqX7LQARN6UaLmR/VvJA4VOWGBCwcDcdRjPcfvKaLbG2r32t1Tk+SCqbUOWozpUuCKBJVKicG1ZnXwqyhFIYaB34kSqGy8nsUFLlwwBqlIVRSKW004wwCRDCEjDjosizBGENowjBO80xp45z1Ar/IMgixVjoXthRlKbSxgAEMEHDQIkwZY0Zb5yRlHg9Da5W1wDjjINLazecLKTKjwGAwjiOupEYIAogrJYwUHsYAkSAKjXAIACUKxj1rNOMBptQqU5bSFqrIK8g5gbAex5z588WUQkog5JQ458pSWgchgFJXiFCMEHSOAAIRAA6IMicY+xhjSiVAUpSYIEyIcUaLkgeektIaQzlLirQZhxADpSsMIAa4yhPPCxVCDpC8LBHElBEHIEQQQugcwBQjC4zWViuIoHMGE6wdcBBrqxDExkkEKSQoLTJOPKWVxznECFiFMZZKGGOErbBxnDEpHXBaawUQYZwJIX0/QI4V2UxawzC12khZSimJI7IoObIYEURxpYVSkjJijDmdTjGywGnKEANIFpUDMOKsymcYAAQxBTzkJC+VUAoRxBivqlIaYYwCQHuMSekKVQrrMEFGSkqZQgIDJI3xOVVCW2sY4xpjJaXRRllDHS1KiSA02hphjAYQYkIIY16WVcpoB6iDEDgrtWaYYUSNUcYo6jDAEHkuzRMICHDOGscJJ4RQSgCASTKXVYWJj7BXOGedw5g1wkZe5MBqCCyG2Dknq5IzzhlTUhhjMELGWYCAKMV8kbj/Emu9cfHiyKKqlFtLvTsnd2hNE09C6oRQiAeEcFcuLEgtyKF2dgpuvPbw8uWL6xdrBqjJOE2mhbGomCW3v/FW0/laGEKRLIvprZ3x3qQRhw4C6EHI7Fyc9M53l2p+VPOA1otFiYicq9leZjgVrhKzouCMWW2cBdZZp5wjCCHIfQaJF65tMp28cfPW9rmPFy0yD/MKhRAhZJ2DlgeeBcZ4sTaFhtbomVIiE9pnzAIkkK7S1PjmaHgQtTcSUNY3l3QAiGeRcZkFhMOlay1e36PKw4hUwlhrllajJCvzRDILWSViRAwhzfWl6d6N5Wa7tdK5futYK7fIZLrI5jOc58W5s10EZKPfYI0A1SIPi+mNQyTcSnspm+YogAdHC+bwpz5y+UHycDhcfPMLX/+pv/onO3EO5kOTqSsvrB2e3nn8+YsB1F/67V/efIpdebb5i//6t1588QefufrT3/nO/wY5KMQ8gKysVGMFOmUH451Wv839xmw4IRSNyzwV6v9H1X9/7Xqd52Hg7vvpz1u/Xk8/aAcAAYJgAUmxSFSxRKu4ja2xVka2Y0/iScbxROO1MpOMPbFmZZx41sRKbFnWyLFik5KpYrGIFAsIgOjAOcDp5ztfL29/n777/AAqa+lv2Ova931f6yrdKG4ULhTeP1agPtFVtnFujaWpgeFBkw4OzoKGZLMsiJGDoCzUeq/XD9qvFcIS7QXew5u7K4m/vBCfTuYBx4jjSS1rQMZKTM9yAL2k18rPJsXsJCJ6PsrOhvsTmdNFEva80+MhoZh7SCqVF0BrXVYaKra7Kyliou6SIHrs4ieOH7zIHVaVcBDxmLEodE2umHjvztdPf/tsbeHZjh+88+bD77705t/9e3+HgUnjOnHnExrP33r5N67f3HGLC+8d1WubW7/6q/+EO5X06q/+/q+fjd668uxGfGH1u398x2cxt44IrjNdC7u40WU0jvwAUdM/d+mZDz322y++9sSlq49fWPznv/MDAIAxBlFkLHQAxrG/shjsjbVH0cnufn9r63g4RMBxDDvdlJbcC5P/6r/8qcPB7bd/8O6Xvnl/PBy/eefupW5gpaQBpMDRMM6LTDhpgNFGtbrd0TRf9IKEo9OznUtX1h7ePgkCSkhrOC2VEl4QMYoJJErqRpjAmYdnd2/evv30sx/vbDxSap2fjbS0PuUGspWlKwf7r0HsQcyhYwi4WhTS2iSMKWJaE+r5jTZSK6Vk4HmMcQccgLiWVZunEECHiBQCElqXJQZaiMpqQRFElCpjMMJaCUO1AhoSQAiezzNO+KQsW60FQn1EkKgLzBAiCFmHKGGeVzfKZxRoZ7TgnjebDyC0BBFEUF5PKPWkbqxUqpbY8qqcQ0QgphYQ54zncy1sPpsCYDC0qQ9lLR22KOJBEiPClIaEYaxgVk167Xg8Oz6/2s9mBQQQYV40xUTgx698odV+cuf2HyjXbGwtne7Nm+bwrR+8eOXRZ9qdbSHnzGemsZ4XlHleSOsAB9bPRUEoxtA4zA3h6dL5JhtVdbm4/IjBBSK4qOdBlB6fDpLIYwigekoRjXoLjz39ufdeeZEgAiCUYoo4efLRDw0Ohn4v8ILW7TffOndxbXm1Mxnu7+w9FIPx9nq/aeztnfkkf2uLwdHug2+c7fzsj372vdv3OWJXrp0fz8c3fvC9x5/7+HPxoihGyIH9B/s88iLemg4ynwUAouvv37uwstjxw//9X3v0v/9//q+1bIomC3zPOtRZ7v3YT3/yH/83/6zBlvlms9cHCBunhJaDYWGUbpQaDkbnLj92f+chMCqK/NPheGG1PxvNg+iHRzKmRJZNp5MwRi5eWPa5L2fDVuBxy5XUIi8crmMWyrIR1WR/555B3SD0GMaiKY3Sa+vrEtp5UUurelGka9NIBxSZF0U2m2ityrK+d3uXM848Xs4M1GmB5lMI44iKuqQegMBFUShqgRHoQTg+mpzl+y2fZ9Ohx31MoBXKD8Mql1LqwA+c1shjiGJMUJqwPMvr2OsvL0osQSFUIWtZx5HPA5/7vrXMgMWmYhghaxRjgBoMGq5kHVDuBwH2A+SnmLV0MyIchP2QoQAIDFjQXltnUbsRJmCB50VlbYwfKTUBwIZR4nteXeUsjjmAbT9hoRNVibUSlZrLIfFY02BAJnhw3I4TU5aOWASJ73FlVJGXESeqypQ4WFvzLMJPbz17dP94c/387q17s7z4/W98fzIsDk/GhkIAcTuJ1zc36qzR81pqATFgBM/H05jgXuSXvc6Xv/HNn/r4C8AQ5FRvYZH6vtPKQuMHaZMVeSaBKn3UFEXjB10PQ2EEQoiwyAGotcWchB4ySkvTUB/lo+zhg4N5bUjcWd9sXbq6ES62tPGsw3t33u+FtJ+mi52WkZWVlWYB7/iRioWaYhyKsq5LrJQ+y8vjQTmzoLH2/sPRyUD8yMdewDsj55Q1TgPnHAIQWEIQobhRi7764hevXflEaoU0uAk88sJHHr9/c/L7D99duXKJWEsatrO3z6Lkoy+sY26R6zdG2TKfZyeMRUkkEz+ZS2GdJhAkITcW5sOhkdutdqeqbCmgIyhMo3zCKYY+QQA4IX/oNjg7OZ6dHSmlO6tbq1cvLbaXXvnS150tW1vbBhBTllKV0jYPBjc3u8nJePDZz3zy3VffDHkyP33wx7de7nTSQtSUBRXkpwe7XqIWlzp5MV/cSACsHrxxk9T10qV1OymXW9FTV5+Anp4SKo6nxdkUmGZe5Ti5AGfWVY0j1o8oAqiXRotdv0+saLLBeGymxZXHH3/6kae+8vtf/fY3v+7pabe9snAhtgC1+h3X5DBY7Ha9269/J2kvp0vbd9+/boGENDzZnYrGW2pFzIKgt9zqJ/np/WpefeFTz37kox9trV7+67/0n82lnMxkUzd+TGLubSVwPeJPdGnHnRZtQ1Hv1ZcPTgcyH04+/Gib5sXBN//YSfbS/mm6uNI7v/Lu7gEnuBzMT/TDNAgp4knA5pmFQAWMpT4+uP9gc7X76U89GdbF7nt7oshKAFfOnc/mgLX72Iy1Ql7QptSzNgEIgWa2udVH2k6nwpBG1s3Dd38QIGI8zgNm7XAtaOWiGc+mrdSxmvnWU4Wc8IlRGJd10kqapnBKgaayUgQx2d25Q8fRylIrjfg4OyOIrbaWN3uP3rz7lSrLCPEARqejLIlYQuzDgyHsJkeDvIcINmB2NvX9rpdgQ0FV1v6wuP/97zTf/eOt558jrc65S4+Y/dObd6/fmZ51Vlo09C70ug9vPYBBUCs5Pj4e1N+hXrC1sn5ydCKl0s6e21op5MAgoWpdlRUhNk5ZYCJlagyjTivV2hAKpFTf/u4fRQBwjkRWpRGCQgBnlCzs3NalahGPGO2QAloRYfQs9ynJi2ZSmbwQTaGIh2LsugEpq3my0KrrLOgEFCFb1s66qpLG2SD1q6r5M1TRg3uni722j0uIQohn05N5uxcSWncX0r17pyDyHAKAskFpYMjqbJq0O9oBFnhxLz6tDCIxCiPbZBg7VZf53BCKRTNjDR7NjlE5e+Kxy1/5+h+0Nx7Hqrj/7nsbq5cGESwb7YUk4n6el04b0UjGvKbJCQLO6irTYUitqa5/73cgb41309/8X862luPtGF3A4Wd/7BMz+u7TTyX/6B/83Hxe/eIv/Mzf/s/++8oQqOHLr9z47DMLhztg7dLyZJy99vr8xjtfevOOFApij0ICMTLUh//F/+lvfPX3f29/b2QUoIgZa1ZWuucWo8eeab/44vXD4+KzP/fpl79xM0AtGtLb9/b7aWvLi39wZz+H3oXNTVk2tMWkkF4QIUxlU7LAK6cMOqZkQwms6zqJusbqpi49HEWYDU5utJLFgG/5pF3UglNsKwWsjmI0GR4mvdWULtZzC5Crq2nYiwmjAMCqbDAz82IWce212zE1SIrJeNhbaLUDPw6T6fQUWK6kmWZmfftqJ13PjndjnHtJfHxwu7+whHTo0V41axnThK1Fwn0N2Pj4LMCQctsOmarOymYexIon0en8NG6l8VKsi7moVBz4POkuRF2gmzjSWT6tM8MSXtRNz/NGuVy7+GHF2zRRK+3OzvWddK29Od0JvBnkQDZNU9Uw4ZQHDDgjxwFvIQN7waVKHOSTQ9yKiVeHUZUEvemsns3mC0kAqCTIYam0hSxkxPOxTursgHPNiD8XhXTYaSqaeZgiI+VklnmUylosdRfTC08RwqxEi0utCkKga1MfjPauE6Q54vNMlvIkjIKF5UtRuEi91vDsCFQ1QPCHqiKCCfUUbKQUwBgAOUE8r0tlTBihxgqoLcPUEGBBZYwlENRCFo0ilBCEpRKyqBzEAJlaNoAE0ECKGWG+53MCbRIFysEgTJpGzQvJPY4hlkpCbFdXVt5/70GUcs9jlGDVNNg55JC2hnhB1ZSR5zGGnJOUMAcRZRQYjTyOLTDCYkoIoQg4HnkIUQtsHPjOWYyhsw5jDBwuREMw0o3wAgIB1FJoqKRUmDBMEEEAOcQZxtY6RSBGRkiIMGUYO2eAM1ZTR8MwoYwTbAnGFkHRSAIQg9RSqJ3xWNI0GkNWy8I5iyGCAFLClJFGacoYQrBuaqMdp5RTIrRCCAVxuxalhaTVjlXpLILWWK0lhBgApqxAEGHoEcQAgABZqy3C1AFkrUYIUOyEKinHVgAHQF3ncStlmBSzmlMcUd5Yo7QKQt9oEXlh3tTAWYRJUQqASBSGRT7BDjtljNGce8A5Z4BW1hinJfAgauqSEqqUoAQ7a7WUSiqjdVlXCPDQ8wmhDrpGCoiYaHSjBICAQaC0aaTxvAAY6wCoK0UI9hg11oZ+WAuhpAFIY2SUNk0NrbOcM2ShBdYYSTHAmHqElaLECKZhJBGx2iBAqMesMdqoWkhCKKaMEk/UosEQA2iVyCqT54VU2hqjNIQAYkysNcY6SomoGsxpKeXJ6UCKBgCn5Q+posWn113LhI7euHs/364vf2RN5g8JBdRYEHUcjkFVGyuQJwOCxweDxjaOz2shtfOMtsSnYRIFUZx222anmpzlW9cWCaNtkFKLdVNb4CxRrS3/6vNP3759n3NUEENDz7QTj1lwBifz0/nhO5reCpLOxsWrCqbQOQgsZtYho7W2onDAyYWlie3B9au7IC6akjEPGogdYBgrYzEkFlHHELGBoUJUGSEaIlg3QlmIGJ5OZ9iI/sVN5VxZZZYt5pOZc8Z6XMLGVEZ5/NqPrY5uq/zAdlueUkLZYnGZ/yf/t7/VJvzX/st/sra8UJfZz/+Vn/32d6qj64O6QT71a1UlgQc4Bhjcv74fU7W8lgRpzxKW58XcFpx4w8PBeD4vpet22LSRrQB/6tOPi++P1/v8qQ+h7//ur2xc/IVzH7r6uewnX3vjK1curzzx/I+8+cb0E7/01176/h+88p3rL/zIh85Gr/ziL/yVV1/777Qq+4utUEMnax/DrM5F1SRLbRpZlqEw7vBouTJzkcnFhX45bVBdVxon6dpohD0Zt7e2OF6om+N8PE19Yog7Ppm1MAqK4v2Dl3xqSuQQAknKgROywXWlNILUwTDy7925+crXv/zYE0/jsKeRDdNydLy38kj3O9946d6dvXStm66G2xc7AOvjg8O1hWUr7XQ6Xzu/trqy+vJ37r11YyorVRbCoNOH7xXjSe1xjpTBFDngmqoG0GJNkK0P7r0dmboo3u90tw/n4u/85//3X/j8c1/82c8IJtnm2hf/wf9w+x//o4d7b6ZLnoPNE4+eU66Z6vwn/sJ/MRwc/s4f/RYpVBqwrcc2DvYGN169vUFa/U6ksd/b2DwbifVHLr/4xrt4efnv/ee/8uD175XlwQ/FddAhjJW2AIJOt4UphwzUsyYfFt3Faj483TvYP7+1HJ/bRpRHof/eW+8hPfmLP/eFf/Odf6acfv/Gzgt/+bPD0ZEUhajdYr8lhBNaX7n86N17t6zgC+v9zZjfvf6+j1k+LpgXnB4O47SDKe4vrDdKqlorIAmlse8l1Ny7M76w/VytvTpkUlmNvZhBrUExbSS47TAFQszKjMfLRTPCkERR6PthPp5hRKJWDH7Y2+issw5gYK21hvNAmhoa6LSD0HkAe4RO5mPCGMEYAquVNMoihCnCxkifc+sCZTFA1ELTby9WZUV8oIHDjFrrZC0pQlo7bhiySkqJAAAQaCcphVUpMKVNUyIE/LA9Hw44I4xJjpHKBCawtbB5MDwDyMIwUdoAq4KA1HnhcT4tJ2GQ9nrtUkKHMbSg20mrbNKBPPRx3XjDwx3f61lnsyIPg5grfHryoi7my2v96dnQlLqZTYVqrj3+Y16cKKvqPAuTLkNWK5N0+vM66/eeqOYHpc470SIQhngcIlZlisNodnZzodv1w6AoR+3OclNpH9kqL9qtzvR0RD2OvdU3rt9eX1qbjcdZmVVatZNkMKm089txK5vU9TiHV/CHnn709W/fuHyud3QUTsfzpNO5tLGt5Gxxu/XOu7OmxF/95kvPfOQaAPrBjeuWIqPMt/79ly4+9dziuaWsPLPaJa01o6zViiU05F5t87RHvvvt39rqbERxdHawl9dzL4xoEC4vb4yn4rGrjx2cjKcPx2vJkudFVT2YDysh6pXtrbw5bvZ3P/bTP9J9i4TEhMsJZVQqMzmZVekPE7u0caAxMEXt3oJsoJgWjSiXV7sAaeqpwCOE+Tt3dg/Ppqnvp/FmVRNtYa3E9rkn8tGothwrcFLsRIA0FEirrdZYBkU+aCch5ARZezoccegtb6xhFDLiKwWOB4NNf4EwbpTAhGjtlJKOEU5JnFJtqTUK+yyKQ4Q97fJaO43daDiP2xCmBAAPUF8rjTFKEt9rpQAJDGGaxAKrhcVOXRRNXQNIGPei1iI0mgGlrfEYtmFUyaKaHzOUkKCaTw7Shc3lc1fOdishcw8jCljjbNDrRksrFuI0DKS2CNa99VZQwtGJprSOkyTptz0aNlLEnWg0K5a63bkFSFvAoMMk7QV1VVXV2GUxjQgOOaEBAA5T0jQC0tg2dTE5m+Y7QQomo2LvVPpRwHx+8fLG6+/cPjge1JXmLY6pt9BtLbVaWNvUp4S6tE3S3kWTT2Z5HQaUlbqW3bevv7cSRY9sLEGgpZQJa5dCy0IK31ocQ7LlkVY2fDXodpCj3POhxgA4ijyPUg3qpqkpVtgKIUthbF2WgBoDcOOspiGP29K4KGDI5xfIldHew/3jXRZ2UoI5hdgpz2MS61JY2ziMAuWKWVFWlZQI13Xttbk1EAfh3dOjeTY0WiBMjLRx3MZBoA1oMcO92SeudZPOpK4ZYYHInFAzZG//6E8tLySyUloY8bUv/9bRrHriIx/55Uc/i3AVLazNrRVonYybB29cD/3w8UcuPtwnWTaTtexGfiPB4YODw83tC49dxKDRGiiljNPM86t6ThuIHOJe+AEKToajhfV+p7P0zilEx0Xx8DoAHNP6+Hhw7ukn3n35JaCMl4Sbly5ODnbreuYO36urAcGZH5RVUUXrj3SdmU3PdnaPorTFPEY8QAw7OHjYa6+Xqnj6qSe++/K9XkQ+8tOfLdRsY7WdT2cP39/vnlsFdl/O66/9hzc++vTjF9oIQok9PBoNPnbl8lOPdR69vPzy11/ZvTOYj5ud6/tXn3z8o08+N62m/cUlXalenBZlNj18KCsLAeuuP/rsC903Xv1eNZ/JMDr//DOE0INX36FMLfajV776amuy1Cz25oNZ7bHFlfbk3p2To7t/65eed8p87Stv7g3mP/rnP/5Tf+5jD994+TtffrmewVvv7jLGS+49GDkOoYXuyjPbIZkdH+Q33rp1/tzGSa72r5/snk1W1s7NsDwaNzYvX3j2mTfef5USJBtRahWG3rmtlccWgtG9ByL1zz93NcuqDkBNMTWOrD52YfgQ6gJG69uE0XoqGK8TT8iDd2UhACHjkzODYRAShVmuwPrGau3k+HRGKHjk0W0tqyjwZ6eZcST1u4XKG9F4zDOmPt1/AGU1L6sizw1QCLndwSwck1bMVvudEMIbL/1ONToBOL727Ecf3L/e7y33tJmdZGV22jj08Cg3DVza7sVRTAIvH02kAEjZkhNh6Lyo3/n2q34nWOovPX3l0qvvHzOZZVmlpSqL8XY3HmRF2vFSyqQfKUBnda6qDGiT+vTu6fE8Fx7rckAPjwoWxNAZDulw/xQWmR8hU5TLa6344kpT1TavEcSytMDgqnRng5nX7se8DfIZBJIY4yzUZcY473R4FCf2rBg7g2LPVeX6Ynup7RmXi4ZMptLHZiZm6dZ5ymFpT6ZZ4ZGg004dxH+GKiI4ShfOTQb7BBruYNPURubDMxFnomqMHIyXtheCMGwqyzDACMqm9gJfaaUmGVDF0spGNsutcP3OZQnbRTVdvpQy9P6Nndee+fiFfBTtHN6/cLWrAv3gqOykaXk84dqrTAOcJRhQjKTShBAIEcJUKw0RRZQ6SDBThCCAymIqWeqdkfDOw+mTW+3vfv36nm5O3j341EsPn33m3Eay/PGL21957aY1bmIvv3q69PLvvlWULyursxnSSiz2lhaJHs5GWjvK8ec+8fh3v/3dW/dPfM/3POpK0F7p/vwvfLaVVLffeyMgflnjb37jOhQQ47zjdZOej6175vlHv/bOrhcFlZifHmXrYUyQhVgQjmzVII07yUpVk/bC9uDshh8F83IiVcVY1w82i/Isam9oZBGXpcz81rYRuZR5vODn2RngXtHkqd/3o6SWc86MFFYBIFUTRp4oLZJsYXntzsFZJwprUVkNT472B840RghVLXbWEI373Q2PBmd33wbN0IByViKIuHZ1utR31kpTWysS3wFCEWDb586Nju9n8yHlige8koBBBQJEjQW2PNg9jGHIcDI/OggSE6Th5PgkiAJRIUR4b3W1Gc0C1poMdh5dVHf3vnvu4mqRyzBIrbTf/taX5mKv1eOMcdzQUlf5dHhu7YqDkRf0A4azYhfgzA8xIM41ZqnVa5SfpGlAYTF+AFATJUg6WZYgjDvjvEQOr2w9mc0Oi6JqDCZBL58LCJG0NDO1RnZ5rQcc4VE3TM+VeVEXGUQmgHY2nMm8WVp6zMJG1rmrFYWSY1KMT/LTw/bCCrcOMMiC4IdUEUYOQAsAohhCACyqZGWQMdZorRGCaatVFkUjFQWAEQ4RnWQTSGgUeFIaoy0POCQIIkwDn2DmjA08BhEjGDOPMM6A0LPxWClLALbaYYxMYygllVZJJwqjADMspLTWzPOMEeJzH0Iber7TWmoQeAEAWGqNIMIYNU0DAWI+s8AxD1BGAECEoLqsOaPKSG2MM85KZRGkjFLmAYhq0UCAMKMOOI9Qq60DCBEspahqATSACDNKAXIWmCSMhWwIxgwBjIA2xjmtrYMYKmMsAIxFuSyBg1IqR0FWzqhChFJGmVFaWw0gMEYTTKx1GBOCCYLIGqOV0tZiCIBTBBOGsVFWa0UQopRYQBoplRGNbigmlNBa1AQSZKxHPSmbRkvKsLWunM8opZx7CGKKMGewEdIAjRgABGVNhSACDmpjtDGTfCaUooQDDFjMRKN1MQ8Y01py5iEFKecQOWuBVNI5GwbcGEkhJhAaACC0GAOptbU68pl1iHBmoauFQBhR7in9gdveWgvySgBrHYIaOEwJRUhbhSh2GDiHaqWVsRAjBBBnLOBkMi8ABI0SnGBtFMaUssABZCCAiAAHHUbYIWW0MQphF3ietVprCzBqGk2I1VI5x1thR8p6PM+E1JQiISXQiHNPGl3XFYSOMaKkqLJ5LWRZ5ACAwOdhFH2AgtnekTlyN3fPikDS7Wo43WNYY2CREUzWkJcGZ0JlndCz0CDqnn7+4uLWkgJSC9hajCFhGkGDIfAZxHV30ZdNLUXzE3/lz33z336t3qv3H56ee6wfRHRw+lCJzPHGx5HUVmuAUZSsLISG2p1cC1ud7DzID/zVD3d6a7DBhhKLLSHcWU0wNCQstFYGUwaZjw00GDjnEIKIQAyNcQ46DQygzkCKlRECWccxddZB6hMWAyUo48wKP4mV0cg4TJgDVGYDVUuKItcigDUAQSvExjK6uNU/UXLr6c3pg7MgBZZWn/rih+8c3/3Zv/NX//l//a+N4ftHuysbvW47CsOolFXX7yiTadTaevqR2XhCs0E9b6QnJ5NpLgLok0w5QVmGzf/vD//gqY3u2Xw8OxDYzu7f+jWIHkV+9NGP/cjk8MEbL75lvG42ywLDI4gELPcf3jqaHQrcnueTMGEEuqbO7CFOSOqI11loT8rycz/7sV/7p//26Y99tGNh7SzXuBS1crowcHVlY+e1XZ65pMeLclyU8+lwaKyUzcjjqJeETaFlMw8CppzTtQBAQAxjj8S+NxW2kaASSonmK7/+G/tPv/LIRx4rjM1HZ20vfPPV29PpgHfDotEAwJOjk/NXN7Y21ylEzqgFb314r/n9t2+9cmPiYV8LAyh1BIzvjRnFgU8xJATDWoqmcc4RB7BDTp/N7xfvhV4ULfMLj62DRv7Bb/+Lf/Ovv/b/+Ce/srW60AzrTz7zxde+9YcXnlihXPzqP/yVT//ET1w/Gv2lz/z4zXd3Tt+vwdbh+R5oDl/+6U/++N//5b/rJtmf/N6X37xxEzGOePzuazcXO6s33z/de/MfidGooNMfHsnKQkKsdQiRoq4gx9q5bj/evLoIQBklvNvtOAvuPTxyIFy/Eq1vrOQH+s6NB2m7dfFK73Mfffb2/R3Ph0Arp+F4PuY+1wbM5/M4SAwiLS/Y3X3g+x7QZjwpCQ82L57LKxWaoBENRoR6VCvHA1pleSWF10ppa00GvNB1ZElAPAqNNNpLuAKWexGA3FYl8xcgYDLLlAQzU3Y66XBwVhZ51ZQIIYopBNgaAADQuqEIByyVSiiokjguqiYXY8wQgEY7Lw24lDNKscd51ZjawsRfwGoyHZ/ErSDPMw4CqSVoYK1tp90iwBllKeXAgKrKndFx1JqXs1aajqenaavFg2AyGnmcAa3KfMoAaOYzqBsXkMlgAJG2CHNHCIDIOIzItMyNAXHAH+xdX1xcBdrkuTQEUeuAUYPDvaapKedVUUV+oqVYWb925/ZrAALEUMoDDprCyelce+FSmec8iuNg6Wg43VrqOgdiumwMAK5SupR5yT0vm+0rMeuk64S0ldhTUlFIEQSQ6bULl2opDA2Y11WNq+bjcnZw+dFnBoPZxvnLJ5NsLuTaSg+JmUMmaaVXw8eNdAUINBUbi/0Ry456PEnxuz94b2GxVYK89suf+d/91K/+4//JC+LVfnLz+l6rFZI0fv+9o5sn3/AR2l7vWVk/87FL0/HhyX43isDB7v7FrXMA47IsZJljjuo884EMEvXb/+rXPvWxT45n86Pdk95aWuZmNj5B1h3uHZ2NThAHT37u+Yf3htvteP/Gq0vLWyRpO4xAkJ49PHrw1ntiXkPOi6pihKSL6e6920b9UFX06NWtmPPl1fZkVoQ+JwETkiES5Nmwms23gpBaWNdiaWn58tWt06OjQlWVUCRIp8L4nk8d9bm6srk4H4wGkxJRY4wejEfddgcBK5qacOglsdV0qs1iP1nqb+hq7Ld7hjotLdDGqtJoQZCLvU4hirgdI8q0UEJoBxzhjHI6ybPxrNGQFFXm+70o7eIg1XXOsOaez4JESPnBnQM5BtA6CIM4tY4QlgAhnQZVNcZWW8x9P9aZ57wuQGSWTcJ2y47vyVmptRISpa1u4pGoSwXiTaM5p0YrZ4xFEDEeoF5HA6NhVk4BlARzBpRVGinx4P5d3486Ybq4vGmwJ/XMWFQLeTo8Fcp1V7bSKEEUlGVjcUhTXIiTXOkHe7eXz607GO7d+UF7oSe047zDKCuBoRG1EAJEL5xfD6kX+cHw5Axgq2G53F6TOMLYej5n1NKA0QBZWZ2NJrqpo1YoJkOPcBIGgvhe2o/bnji749k1zHXEA2Asp9RpZV1jTW2shEg1UmXT/OR4PixMKZyhLCu0tnU2FydHExIgvBaHPoLUay9uXuR8ODspXdOOA2dVORtCK4Fz40mh6qayBgchcigKKYj9XFaTaZmg+Gh/og2mXggB85Ok1V/wwhaSgozf97V47sNpSc+Qcr2l1WJ0wIPW4eHZyUm5MxC7p3NNWHq+e7F/6XR39//z//71x65deuFnzwkGpCCtxUuWcuN0u7NwrU3v37k+mRjiRBgw2Nj333uIKe0vpTxMEOdVIykmMQ91VQEAAv+HWUUYM+YFjodeyyYxro9HwUpXVWi4P8tv70Xt5VmxGxFvNjpG1gIBhkcnaxsr1OfL5zYW1vHodD483WPYYB5IyC0ho0pRHl3+0DOc+NqjC2udZx5dnx7ff/OlL3cvXU4SkzcZjL2Do+Fjly7cee/W3Nr3DoaXrizJrAr9zqefvXrt0VVG5e7D8cVHnx1Miic78XvvXs91Kef19pWtsBObxsyKAWfk9MGNtLVY1ubs0CZ+cG778tr61qtvvHa8f3h2enZ05157wWd3ayFkMSjSxeUnPv3s//xvv/KjVz7Tre2Do4d+3zVN9ld/8RlLW+1Wa36y55DdfHSjpjwv1OpySxBv67nt2f4+5Op7r9y6sN3e+vDnnzj34bSDWNXaeXB89Qn+rRffng5kEvgQuNduvTqvS2lUXStpXRKoJ5+5EjbTbFrUjRsNjjhzURgUs7yuCmg6mCccYwUxdKKeHRcQEAy4A0Lo9vYGQNio0o9WgtaKnOSqMRsbl2szmNZ1vLGe7+1PZ5lSudbZ5DRfXtzIxyVkUFgtlRZ1fnQyahGuTZVXHgRefjy8vLWAcbG44JVNrQ2J0vTO3gHiDFM4Ph35FLZbfNq4EmNBgICOxhwwtbHV390fME4ldsdVgyAyytYT8eDB2+++fV3xcGMjnk3yw6JwDT7V9bwSScf3Yr+/sn4yHkNuUGNWFrr1ZJ5PiuPh7PJmTwlrtQVGt9p+nhdpj0o57tCQo6aDZ70ENKzwethBm40BQhgQYnSp6pF0EkHX4NqPOsSRMOQmAF5oQoxHhbl+c3dtbTVd7nPSTGZHqyt9uOCPh+N2rz3JypN7bzfUJbFH4WKWV4ThySz/M1TRcDCB+M7iIltZbsu8qVwWtf2BgJM5dIUDUBWnQ76IuYNcudlwVs7zjSubHCtVTPPBUR0iD0WzoYlwAD3LsEw8qwOCpZIjKaZucFJBimBgzl1brqaCKfKRrcd2d0Z1UR0eDyklhBAHUF1ZhB1yEBGijG7qBmMIoDXSWaEBURjgz3/h07/69/7uz/7yL6W8qkvzt//h/7K03f/bv/gr4dpz2XdvET/4wVsPX337PjPICBDGQYA0bYeYo2lWUYzTfuwz9+L33rUW9pdajtAAuGc/svWZzz//+v2HtJVMG53XkFE2ORyvXNxKOXWQVAZmkxNEebzQPRhM8wYubLWdQwA6o8tKlEZLa52uraIS4pEzNTDWQI0oh8aTNUTIhzAkrMzmp6aRkK8S4Bqd375559LWxWKShQw3DVJW+xETuR5O58YPnbUx9icHA6vMHXuwsHERF2MHrN9qxfHa0cmeR7k1bDaTyEyDsH9493umLBA1NOL5qIEo8uPS8dby8rnR4XsEiPz0DkbGGdGwAAHV7/YdWxDlPuF8Z/dub3ErYKEqJbSs0snFC4+dndyUxbGTTSMznVeiBgVwl1YvDI/uuRw88qGfmU52Z6Nh0UuUc8sr7dGkPDraTxapNaAuq9nZqLu11lm6gsMrhdFAo6STwvoU0QIyVWbFfDYHXqtuRLfTHo3uRkRwbrLJ3EEQkM7kYCdsL5eNOjQOascgQpT6wYIFQOaTomqot7q61CZgCKzFQE6HO8YQhq2sZ1pOscmsKmuNiN/WGFo7R04o44SulFL1bI8AlHa7lRIfoMBaq+vSIej5gUfxdFZiCsPA08poZRGipawgoT73CMHlvBKqNAaFgU8YbeqCEp8S5oADFBGAjbac8yRKqlpp7WQJilpxDB2iNMDNdK5qZYEF1jZ588FaRDhSUjgHIEIIAAwRdFhKzRxwxjgtGwcQokYbxhz1GACOEoARnWY5JswCKIX0HIyiRDQVowgilBcNJZRRDABAGEsjrbXGGEwwcM4jXqWkaBQSilDIOW9kzTDyCFVWMISNkVZpqzVjjFMPQaulQpQkga9kKbWEyEmlrXOe7+d5iTBRwgLnCAYAQASgdRoiZLQ2zhrnPkiFtsY2WiOIIMGiFgBYBAh0ADmHobEWAIChc8ACj3KK4QdpzdoQUZQgQs5ZgCjEgTOVUgA5J0xTiizwYwQItCr0ucJWG8eoV1dCSmmRc8BQQrkXGi0IApzHmZpbrZ3BECAHEUBYCEkYMxbUtcUIGVgjBBHBtSiQgw4BAAHEABPq88BoTQDWyhgNOabWAmudlMI6V0rJGQoCDiAwWjemZtxD1HMIWQQ1clKYiEVC1Y0UliHKOOUUIoQQYJhIpwni2hhtjQZQW621alTNaEgR1UrLUsu6kFphRpxxUmmhTcxDhFk+nzeqBsghiBopAbChF1RVXZa1chpjBBWYF4V1phGaEB6nbQeN+9O+j3f/5HoL9LfPL745uvnEowkgSjlAGXaG26YS1kJVYODETEJgZ4fzC1tXXRCVYmah9bwQACeb2pZ4XusAAURxMa/6rejo/i0c4he+8JkfvHNr4fKC9AuHddii1DdxaLV1DgSN402tbNMwz/NDG7eoAOTO7VeCa091Oo+WFiAAHUTEQuqAkQBRTpwjDmHggMMIYgKwBRgCBxB0ADlEnXMAMYSxtXMlM0ggtFRrHPgdC5BSGkHoJ6GeZT6xFLYr7XEw5hHFNMjujytrIMIQ6o989tlwEQ/fH3zjlbtrHr/8iQ+///atOUDvPbxvXjF56+QX/8Zf+sQvbP3Lf/J7Jxlmc13MsjC2z7/w/Juv3jqc/NHyavuZn75w550zWeHtp3vvvZVZCUc14DGDyB1Miy88f/Fs72BpfZ0qsT+4Px8a1Nu8/OyP3/7tYntjm3phM9EdiN568b4AzNXgy1/+l8xfDsUASDUXUwg1oRGxPrNBOQGK4IcH1fnNRwLb2rtz2Ep6b715qHzUWV86GZ/oNw84ZBcefw5hXc4GyKtzU6R+/3Bv1vNBU9cYmjjCgW+tgdNMKdEEvgelCxyCoTeR0mkIMc7m8pUf3J7XVbdPGi1sLwTQEApDn58M9WxUJYicBbPWYnp6Mg1sYk3w+ss7b7w7SnoLFMAaaqEt0pZgjCBoaqOFxhGLPA9ooByyCCulo7YHAlfY2d7h98keWe0uV5kKo9Z/+//6rWT58uc+89HP/9yn7tz/67dvfvPowXF19vJ7O5O//3/9717+1v1xzsLW9jx7b7E3V9Xgpz//8849jTfjJ5/68a9/+7/9n//l7zz1qb80m/h3X9uvAAAEzycHKPih6QABgBGgEa+LAiJ7caV/+8EYUs59bRzdvrIVJ2ng8cHO4bya3Hjr+P7p6JPXngTS/JUf/+T2SnB2OtNOMhoHgT+aTBsJpbVOYzgbE+QCJIvD+8TYhaX29GwGYVQpi6SxmHS7yfB0in3PGkMIVUJTDMYHAxrxBwf3OtvbvZVlO61EI6SRFEFKOQ+SPJsEXhh5sCz2CNGBnzAaGisgcGngS1FyzpxWnFOtAdaAEN9Ag4DTRiBokbVNLR2oMQVpGMtaCGEwwkppgv1GGgNgqYWFOopShJkUxUI/qbU2TnAv4sbquqa+70UeRBRr46yAAGBC4zTWGLZaiwQipXWcdBzUzcxYXQQx1kJU1TzPK04CbVCVjywEkBBRTDHjnbgVcH88Pe6m55Ooa7U0wGsM6XZ6w7MTWwNi/ThsCWUo9aeD+8d773mUSuWcAUoWssp57PtxP58In/pK5b00rmpnhdXWUhZo20BMEADOKo+moq5kMwMsFA5gBwMv9qN4PB1ZpTwei7oOOGeIl5PjNMI+SqazU2V0XkwbCZcWFzppMD88pMh5nGWVKmdz3qHPfvrKu6+///Lr70fM2VoACfJMnY5HDpN//ztfee6jT5WNHh2cEYqnk2xyNhbO1Y0ZzYWWth+zG2/fX95ePf/4wvhk/9HHniiHUww15UQWSFZqcHS2fW7tm7/571vcixPIEs9v9bqdnhAjKcu8Gn7uqRf++FvfunjlPPPIn/zJH15YQ5xzFhEhmm7EHrzz2nNPf2JwtBd43BE7HMwJxTIbwJSs9Bc+QMGVKxvMQYjBQo+HYSRtAwGAFEWLC51wEyG7d+dY5mxaTCk3SJGAeNvnN+Y1PJk0zeBksRP7PjR17mTljF5YWh6dFVI7Z1wlpVawnAiEkNXWqCZMsgVS5lR5BAFjjcHAAimFMTNKeD7dVbJx/qLvRZxzJ6woDEUNsxrKZri3jynA2FlrgFNBwGvbGGGlQMbVGBNCkBEWQUUIBjxAADLKAYAQE98LT47Hq0uLAJJsVlkBjIIAWSOrbHBqymp2NtYQ4ySyWEPu17UiQeg0JoHnpNaiJJ5t5iPsxTyI17cvD4/veUCX8wEjbP/uAWQeDnndKNsOSNg1WkLHAy+IU1ZW1f7eg6KqL25faS13OaON0ko2dV3X2ihDqIdgCEhqFWyCMFRAdFYTxPB43HCGfcg8B6lSxoJLl5+cj84MULIweaUBgiITujLz0fFCK2QUYqcgY16cWIh75y8R6plMIC3Gox2fEeuiMptU8jQMGbDaNiVCwFglhBC1yAQaTeTZSdEgL3Og0BIxvtgiSA59swCVcxPgez1CaeZj0HBAPCPr2WTcjhlBFkAlnawkzLIMMhr5PsYcUxQSyBUKmc8XlgdTpWgTcK417a5tRmGiXDM/PfaKvNPyv/viW4882eNmenQ6loJXOtgvwoNjOzkj+2d1Ycecj5az2VLU6rV7x/un/+JX/8fnP/18d2Vzayl+6trFl773MvPosx957OKl5LV3boGyOZtqTuHJaY5vHX0oZgpigAH1KUGk1gYDTBkF6IcZptPTrDqddLfA8tr5QAxOj3d766taR0mfN1p3Vi5sdJeEyAZ5Pjwa9dYWITQVMCqT5t4JNaDJMk7tuQvnRoN5XVtVVcqyRkASdkrn1xrv77znxShKmuPhlJIwI34j6cmgCVrxylZKgtW7O4PTUXbr4fDiRruU880l78JKlJe5ov3RqCjqOjXEj6msy7jV1RLt3J9c3to+PT4KYr6y1XbWsiS+e+ud1Y21+ex0PNvlGM32B/PhlGGiKgQN2X78eX/t8W4/ePPlr+Hp/Oje/omCG+sro+GBQwwTo6qZg9aImVcV5dmk3yI/8ulrr3zrnYZXF7toWIWj3QPBw/7i01//tz/YO5088eEnc8UyFX/ycz/5H17KGNtbXkrfvHGzHbSlslZYz8cQo26bxxwxj/X6HrbB4cFZmCa2mjtRxzEDorYQuVJXZQNTHjDskpRGnZbf0aPju9fvZqdnOKbH7nAjSFsh4RTuHx013D//1I/4nLicDM5e8wNaATWczQGa9tPe6GzAUi+OqWuMKOfR5sXHNp/+7u27aYsvreMsn4wZyopDi1HIsJpOQwc3txZvvvPm5PSUBSxN/dN8rBFPzy3lMlOF6S6HupifFqOtjTU/ZBTg5WTp4O5NSQBkqLbSGbO4lDANpKNNg5Rx0rrRvAysORnfkEq2WuF6LzncP/EIm85kwthW3zs6u39xU7fibGXBllKwCDW5CljBrA49W88q7mGMsWlURAFiBrIgGzdASxYzwT2kglpUxNk09RpkRVVgqBY69NJ6IGSmNZTKGOVmw2lRqYvXznl+Sx9lshSz0YCBljCIxUvHo7H704qDH1JF8xwAYDwA1Gjo+6y3mAJkEwGNxNJZxEBVVZGsEEfFtCZBlHppWc6NmiGXdxd6wAnkvE6vXxdNU06sLWfD+mx8YL3Y+mkU6rZY+/67N8FEIY/rSpRlHXne+XPL85mIWt39nZ26qLR2yFmIkYUAOEcx1lZbCwCACCHmMauVyKevv/mtf/rr8WK7M9w9ZQh1aDjey/7B3/tPITBJOw64QkaIxiBOvIBqmdMIU2IY0+0uhYCVRVVUglCiKFGE/kd/8bPlg90nntgcjE6IqJfPP/v6jYeNKAOu04Cvr/YTBavBKOx2aNySRfaJ585ZKa/vHB7kNS0ZB00ccutUkrbLmZLYdyxtlEQYzbITBMnC4jqVCUSplrVRDgLtsZCHnUprpZUBfHHhEehaURx5XuC0NaW0xlEoPKQtw3UNbRMgHUpd1KXuMa+SBQI2n55WdRnGPuO4ncbU71mAZ+MBRs28mXLone9fLJspJRQpQQ082DkRle73kfNUvBDeePPm8vK2NZYTWNR5XQtEPUQ8SpK6rofTeWvhuYurT7zx+jeQm66cT5TWThXnL125deOhT1uE+tSLDXFH88MeqPvry5XRxXxWksqgxXCxjUIUtvxydHJ+65KirL9yJS9MUWRam1bMzgYn7VU/iSMzq43DRV43tfYodSgPW6kXRk6EVVWESRchV8sawfrs+DQImA/rgCJXnmDNnZggrQMEE4KonEkho5h7GEJsIupOz86Mma2uxmMvHE1MpdVkJt2kIRiWUDpgmUd3d0/TOMkrS/zof1MVIYwdQo1QRimpJTIYKEMgapqq1VqIfF7XQgjZNFIbpY3BkCGEuOchBIGB1lnjjLXOWhvHCUawaQRGzEIDIcqqCsbci31MXF0hWVrCCKY0zwvGaK/XJhhCgDn3pJDIAYwsgK5pagooho4QDAlAyFpnlFa2lsYYY4HSFkLijIzSmFCujTWiscYarQEA0MI4apciA8Bl8xEAkBNPQk0BABAMhxMvSHIpI993xgKlCSHAASkEsJpQjiwgiDKPS9U0QkIArXXYuLqurTWMIoghc8wBoLWFAEgpKPW1cY1Q0GmCEHQQQuwI0EoAB6z+wAVlAUAIIimlMZozD0Fa1jNOfcpYVTcIAWCBc5oQigEhBGOG50UDKYQYM4/OykI1xmoZeBRaxwh3LsKQaK0JpQDBoqoporksLLRSN4x7SdzK5pnSDiErS2eqOcXQYRoGSS1yKaVWBiLUaKOVNVpSgqUqGWUAAASgNlprbaxFEBtn9wdHPuE9tgARcdAIKQEEUllMiLGqaGrMo0qK0IuArYmFDGBjbVE1DjjnILSu1BpC6zOKKIYAeQw5ALQxxhjnnJB1SCMEQNXUjDgMIMMesIAgZIDWVkGHnLUQIEY9AxzlHAIAnVNKyEZghhqpGik4ArWuGtEI3WBKtJHOOsIQAARjL8vrWZ4ZVds/7QGEmFuBD+4eSDhyMwkTAIiDuIt8LjWjlBGsEt8HzuiqWVpaUq5pKmWQEo1pICXYaln7rbi3ttQc72IENtZbpBAgzy6c27KwPvfMknLi3s7OxiO9KIbOGDHLic8pxghz5ahCbYtr6TLnlBC2F/f2775DL7VpummlABQiSKGBGCBkCMUIa4AdAAgi+AGThywwEEDtLHIYEgIMwCgE0AJgpJaNVtY4pxQqAUBtQ4VqEMJFVZ0AMGetRxkIhJzWcnbhqe4kVkevTXq09d793ez7A0eD517QK9tEQnbhz30CQ/lpd348Hn/kR6/cPb0RRvFnf+kjO/uTB68fdVLPVPb6m3ex1qd3D+/f3Am7Qd4IZQxeBptPhPkRBnPTa/GU6DZlX//Oqx/+7OWzceYvwhd+9uL0uNg7eGvv+lRVRTmpW+lyVerh6U66QQeFkGdwsR08fumTb/6HN4lCwJpuv7XUXyVZOTodPb76ofcHJ4vd9QE5KWenlx5Z2z2cXnv6Manz1jpTs3nAw2quDk5uLkTRnffeITTjPvQ8AgURsiG1DDhEgGALA6Wl0X7oIQchQB5jxkCikaktRJD5XDo0L/KFpZQozaA7G+dN0wSR14kJUkHeyGZ/2K0lxYzHwbmNx3/vd1+MA98ZJQuzsrQxFrO6LKhHlFIex7XSUhqKsXOAIKettFJrTCqJPd+bjJyty5PdBwq4PjCdqmnx5F/8039499pH8gdFm7T99Xp5bTlauHx5IwzL+JmPXf2TG7/z7AtPra4Ont3+gpy2uEeNMZKwZy//0pfE18Tee0Y9YaTZu3krYqrn6VbI/nQWYKudM4pgAjnvpd56YtopG4+mpqEdzru9ZD7OVy5s2MPTYmYGD+UPip0XnrtgxPRsb1YWlU+ZFMbICiLIPQ9iVAPTSN1UZeL5DGNt8c7BmCEwHmWOR5kqlXHzvEIAUs79IBC1UI2MQh4lfJLn3TghDbQFFEJRzqqy8BiSxlVKe4jMJlPfpxxAYBimXhx2Z8W0buaVrI11ibeMCNEWGCOssQhpC9wH/xtyFDhslAqiQGlTl0pLwTBumhkhyGNYyJpCtuQFoJrnSnMeSkXzTCLP9vpJPhcnJ6fryytVkUed0GpTV5XvMUjx2fio3esIIZwyFLsqK70wVRZYyMI4buqzJp/NxsetTpci4Idd41xTjnmrZ6WRZZlCs/fwoYVgeevJrFIqK5KEEGzOTnarqqKIKW0QwQS7qpkFCbdQhN2OmZxASMpmzAI/wNzCcDDYT30c8HDv4R6NU63rvJLAwwBjK5THue/5jawJx65CEGLCPT/0m7JSZYYw5CQIg1iUdjYcU2snJzs45L3Ez4fjIE2VLFudJY+ZkwfvQyGsdtVMU8JpGApZXX/79Zu37lIUhH48OSmIQwDrCxuXq6qe5NnC1srwbA4bHSbht/749fZSf1LMLABhgNbX28WoaETZjdmr3/3mx5795NnxsWhmUR5C6MdJDBDqb/SFEl/9g68ZJt+9sff5n/cjHnTi9MZbry4u9zvJ8u139peWNkPfu/HaqwSi3ZPhVme5aJQRdnx06hPkiK0RvbN30u3GgDPK8MMH+5QisvRDFESRH0YhYpAgKhvlNF7e3ozi6OThrtYWA+sFXA9mUavzQS4z86k12vf8NOBxL6nrTCkwOBk4Y5jvZUVGAl/ophBlmdWceHEcYmGWl/rWGtNk+4d30/ZCk2Vh0LZKKtkkUSJFSZEzlmmttSjKTHsswhgGScjTsNYZQWUrdZW0wmhIsdUqG48AQrKW2jhGGaUeYbSqT7M88/2Wc9Q6w31EIS5mhUf9tLNU1mWYRIhFsyrHMKAMattkg2MKrBDzWlooirPDg0nSQcy7/NQy9wPue1ZUzOMOGasKj3sWOcYRw2QyGmojfC9ud5OmUr3lc9N5ufPwRNt449IGbazCGgDlQ5qkMs+mb7/5va3L55N2xwsjgwz3IUu9dnfh4OF9g8xivzcaZxe2z12/sQ+ERRIEDm4sdtZ7MXXS42G8sgQgS3XgUToYnMRx7IWhE2B//5RQGCcx9qK1c5tpbxWj3vCsst6ScDbicvTgLa9FW8v948mgGE+QqYSqpZIqnwOjmUeFkNm0GmS2UlQCUhuUN6oymFptAlnlxd7pWW8FTXays+NDP438Xndhtb+zs1sPjxc7XrebzEb5vFTKQmME9z1IsDaa+1QI4wOssVtYafG1jZPyzIuUKvKk14bQCpFZBLNy6pSaS5TCtVv37FI/6Xf6d+7uDkXT9B+59sJnHzfo/ddfvHvrfWHk0f7gGM568SQJvNRnP/jSb1MePv7002CifUsRAOdW1zr9fl2Jw929+6fD8VC1SAIcH4wmsdbY6qSTAuCUlCygiMFaVB+ggFFArQucmd66nstKF0rM692773udlfXNS3WdiazkYbC4thbQpWk+rOqJkLOFtZVWN9q9cV80GsX46HBIEYKqpsRbXLkwm+Uia6CPL17YOLnzvoRq/dGt4q1yayF68Po7AW9xzY7uTeJVywlfaHvWwbPp5LlnNscHEyHd6dmklYSysXKap4CJ09nmUoe1Ip1TKVUrSIEARNpqMOj2w9sP7hN/4emnH7/94Hhj46IW5dHobOf+g9Zqu7XYObo7/pFPPxl1L/HeQj0+Gh8/+NkvfuHuUKyev3b5kZXjf/dvtLEUt8syf+Pddy9/6KqmrlH46Kz6+vffD/zW+HR+9eIqQ+byU/3N5Stvv7q7dvHR0h5uXblWazYvsj/6oy8f7d/FAJwcDyM/tMC1e63ROIMYG1UQUN56++2Eg1lLYuB5KJiLOko6SSsKe0meFUEQNrBi7T4PopOb95IkCbrLxTy/8ca3Ns9tIItOptPSqQ1t7GxeQx0jEyUek3U9E9V45mE+M6dhKz2bF1aY0WgqjWtq48WsqCvr0TPRLAatxy89fXT7RUDrre3e9fd2lWQG4sgn3U6tdvb3Hvjzycw6QxoHLLiwuXgyLWVeUNdgSopJIwuLUTIVmMdM1ejW7Yf1TJa0xFhRDRBHg5O8zjSn3nQ0rSEACCAMilpKBRwAksGHJ9ksr2JkpGi6oc6nd1qhDQOLsVCqToPA80uIoZTK1DV0jHPPaWEBxhhbZoStXWOsc7KSg7PccRDHYVGfxlHMCCQG+WEkNfAh7nigdg0KQghld7Onivnhw6NR6Pc3/Sj2HUOmYn7EZ8MaUNbqr9tq9meooqDVWtq4ODp+sN5NJoM5jwJjKmcRj9i5S4u33n7f88Kjh7ud9QVkHPMAoLTIJp3QK+U0jNP5eNJtpSTAFpNOuzU9Pp6c5kn3wmMfviRnJ/0F+cbuUX/jYtBdeumNm5HPVs/3qkqobJYG/vr21tJy0BTN2y+9C4izyBkKHABGWQwxJgRg4KxzzjoHKYSTo/K3fuNf+1FgLJJG+wT6ENE4JARDY+azEgFHCDZOS20aqYhjStqyqhuhvYBja4BU/X5366NPPfPR84th/ubNo1u3HKxj6rMv//bXTncO2u0wxFJpPdp92F9YzstmCGaXz20Wg7yNTX+jNZm5/Tv366PRhXOrqAsYdZUrrNSMeq00LYpTL4zKcgIJrrJhxHwSLSCLO+32aDwGmjjrtxYWx2c7UdKzLkPO9DcuHx7sBBTy2LVSfTieWG0CP8qLbOfBXWpVr+PjFDTFkYW2lSbcMUhjEnqMWdUoEm4RhhvZIMJTzlrtznSmKQ9nxSgwQdLjnCbSB44cTU/2hGZb5zZMbc6d3965s2Mgb7JZ2l5Zaj2iJY18rDrs2rWr3/3qlwzMQt9RyopSRUG3yZwHWgvdJ7u4f0s8uHLhU6KYjo9edw4gaqZNEcQLV89t3n3/GEg0Pp0HqQe4tzc62277h6fXPcRXV5epU5RCJSvE1jAXNPQDhaGeMER6/RUn5hZrDRhPzgOGWOiaJjeioozHUdeUR9oYpavGakBRO+hKk4nmJIgCShGjpaqGTtq9k8k4GytIhllOIcum8zg6v5osyCA12mblrGzykJL+QhciqLRr8vn/dh54MdPaaCml1phgrRTD3BgQ+ImQgiBU1cpoDSzw09iHzkKojW6KijEGEUIEc8qNARCSdtqRouIRdhZCiIbzGmqlKuyU1U4qaYEBFNKqqoCDjPmMUdkIB537QNUMgTEGO5ckCdACQ8AIM1YhhBAAGgClrHOoEsIBFHiU+QRiqJS0xhKKG6mDIMAONrAYzUfMIw5AYBEE0DEwr7LABBACL2RWV51W5FGWZwV0gDCvkYIiZy3UyimrIMUAIUK50jr0PGO0c1ZpBQEmGBgHnHMAgaaqnbWtONQOWAswQkpbDKnSxgFFCfY4xdATptbOceY7CJRoIMEEOIigkJJTzzhQ1QJjhDFEmCALMcRSagMcxC7wvVILBI2UkkNotVUaSeeEzEPfAIi11k5b4GBZS4aYBSCriqTd4o4oZQbzucdQUxZxnCCErNAhC/IqK+oZoQRqC6ANfK9oFMaAUaa1QcjXBlKIHELGIYx97sO6FtLZKOkaiCXiwGpCmdXGOuuQhQhggxjDtah9zqqmYhhRS6E2UgqjtQaAEuoAcM7VUkLsYw0wstZAB4E0VhjLKfYoscZUdeMQlgAACJS1GNqmqYzRfsgIINOsaipZfyCNYZR7PK9KwgxzsKjqsm4QRAIAicq6rg1ACLmsKKFzlFMMrDEgDCmzeDaunbEfoODJF57ceWlcV1XUbzdKeYQaSAxJCdIcQUg1MBYErJiKbJgtJOnp8YNkfYUB6BHNHYUGRiFjgetttsY3MdYGN6aYZKqREHvD6ZykjJD2er/fTb1ZLTUCkAQAR5WQzGusqT3PRslKrRYzMStHo1YPMb4yPrlxcXGjNARCBCAx1jljkQPQUQChgxBAYiCATlsAACIWWWAlRNBCAKGG0GJMIQ0MAA5U3RaDMwWVSBcujVRWIOW39azcq+vjuLWAPOY5fzI8IXXtgGxvOQ78bJitnls43B2+/tU/xJ99jEWWpvL44LS7sZhG7bLUWpJamfhcuLHO3t95v5PEIJdSAT8FH/7J5955eedPvvb2Z/7a051lkx4VSy/0vvIvbrdw4Go5L3S4GEyH1eP+1sbzWy9/50v4UqLqkSj2d4ZDTpegPm6lWEf6lbd+8OFnNyevna4vLt97+ZubP/FEMRftBCz3wojwLJ9EwPgJbJqi2+Gz8ogEoDZiMCt7y8l79w6f2Vx856W3ANECNJgiRvTRwYPAq0Qxb0c8P7nJKkl9ktAw8dDRqKLOtgj1OBIWG0CUkEbr1V7PAQsgz4GT0ulaDB6MwXz6i3/755YvLvyz//qfedidP5/s7U0IJ9313u7N04Nb+36Itp6JXvrBt+ZaNNLZxqyttf/jv/yj/9O//fLdrDAzwmMKLEzSMOnGdVlRVTQCWO2wBrp2FulWQgiGtTVei2fT/PR093Dv4fH7bz72yKOvffd2DfD2xza6/Ue0jr7z7Tv417/8n/6tnytzMB6O33h3h6SP39/deOzZywhYU9ce4pos//r/euPv/8d/9dFzF1//1ksYljZgYr371/7OZ/7gx/4BAIBxppVyEAqpDcBp3P3QlQvGqG7I5iTO5nY6OmKUZPMir5qljbWPioQAjbn2MHFaYmP80Pc8opULvbCW1WJ/sZQzazXxidTKD0hdKoQQ9UnciwT0OWej4VhbRAk1xkqpIQBRlDrTnA4GrW46m8lyMm8wJbYKeCsKPWu1tRI4aAjyfQyxYX7QCFurxuWDuq6ShFsQau2cxUpKghAAiBIPQgswdNDHTjhnCYNaOqWJdUiqJvITxrymLhHAGGFrrTUNQEJBIoTmAcc+YIx6sffGay8nSa/bWTk6mXAMKHKYMC1kY6y2SggpZjEgRGvpJaHHSaRZ0QzyyaAaNEjVFIF2d5FGiZZNEqZlNo2pRxD0GDSUFlXW6qwi2kr8cD4964SwqY9pmkAXVaXduLJSlmZ/f9hbCIupXOwuTapRfzPNjmal1a2wFySdWhtuKWM+xsBCyxlzBECOu/6Kmp1Ch3CA8/EYwdhoK7Q0NjLKRYQQZI2ohLM8DOpKWJtLCPyAimwWRGHU9urZDNsw8TuGeyj2mZOlNkLppNtFUvsBz8oyGxYLy72tzUcw5YHv9u7vPPX40yNNlh99VOVz/uAoG+UnDyetiAch/einHika1VtK3nvvACN8587B9ubij3/xU7dvvH/t6pOjadUYf5Ll7VL4fuQxcrR/sNxbffWPfjcM+d7p5Ee/+CGHsMVqMhsfHh598tOff3BnsLyx9e2Xri/2g5AHK8urWabCS12jZTnNVzd7aeSV5dDytlWY+y2Yn1ECgdB+GAxOTz6YBX5AmrrxWaS1lcpKjaaDuswUIsyDJp/M/Jifu9ILt1fPHg487D3YOV6BjlHVFNXp8WE/ibRDPOSiyY+GR+t8TStnrAOQ+H7knKmkvXjlSujbtBuHUFRFxhkmGCJbOtVokVnW00IV1Yz7KSdUqTrijOLGoXCUiQYjAwkmNE6Dalq7xp3tn21uMo860Wjm+cwBg2mjbFVOtbQBSZAGlSwn5XShv04934+5nOdZ0UBVWSCb4pjRjlG+kspZgK2VTdaUU8qj6Xx6NizvmmzrwuXHwzSIAmNx4HvOklpU0FqZz7WFOgy6qxsaY13lR3fv91cWrl57xO9vtysA7z4opqe67ABrrXVVXvlhvLy8ZjE4Oz64ffs2kPbC+VXqkUZAoA2FTAvXWuyZifZJd+9gKrJc5LAcNmkUL4R+6rM47a1srBckIkBYim/d21UlZBFd7MDx0eDcxcV0hgsRn4zq1SvLMFyRIFx7bMMxDK0Fw+Ohm0AYWtuQOPFlWgxHO7ePhXXGSGtBoz9wyQONPCFcUyhIyca58+3FvsKo77PibGakkNVcGzgcjXiG+dE9C0yiEmDhZDwbFzWHpJEAUD+I/KooIGYIwaQTtZIISD2fVdTj+4czmckmU7oxYWJJk49HhwDaTgfnZ+DBWXky1v12eMMcdruFRjg4d/XpT/38pY2t2e6NtQU/oduT2fThyWQ4EZNiNslgOw0Sz4Om/NqfvMSYY6mvA/LtV9/5zI9c+/gLHzlc7d3b++oYm9Ny+sknPtHtNpPBMI78fHCClNSimWvNlO8nP9yInDXT+cxgCoDzoAaRN7f60tPPPXz/QXO6g8NElPJw97i1OFrprp5/+hkWB1/7w6/evnuYpyVCBEBlSoG61mKXybLbWqi0a5R2VbOUeOXgKAkR9jEoixCy4dnJ6vYip6k04Gg277U6R3s3aaNSis+G1e2bR/1WIiBgUSdIQl0TPwhaYYhsXpnp6urCneu7EPlRO52U07QXy2xGoYOu8ZwrT49ZkyGRilnRzMvNjaWz0WA8k89+4WfOzobXb7z46c8934/d9pXzt3f2P/2zf+H1V958p9g9GZS6nl3cXI5b/taHHtk/q+uz6cZ6v0KOwVBBdvWjH5reP+pf+nS6lL71g/sf+flfUoGfftLcvXM/OznVdfX627cneR6yoJ+2eoGXZ/laPywncynEp5+/xLESR7OFtaV2ezoZzhfWznd6C6PxuCwKrXQlpSPhwvlHBiYKO+ubMD2+dwuavL21fu6RR63IlFGd/uL61uPVaAin2bycG1ynayjsSEdw4+P1C2v6fk6xXfRxlQ83Ny9O5hZwz/MTj/fq+TSvZoje7obJ2tLaN771tfWiiQCRgA4KARDutTstamxW8ThlFFoH2mlr8+L27/7ei/NMfORDV967uU9RFyvVBlM9Vc6Hrqqbcl6DKl5YOdePsuOT02ndWCCNy5sKIgMI4dxLEjw1BjLPWVcq64VJq+d7ZeWg3F5JmFPWYgcIABBAqDTEDSCQQQQdREK5KIoZVULU1mjnIKMEAocCiFl4MsOwDvppywtKiByAwEGnhCDEm01nAYftXgrDZHw2KbNcljrwl+dDyLwGE2V0AUElBaxnMyfgQrpY2/rPUEXHx3Nj9jopw4Hf6bRGZbPYW50fH6hiamyxvL1kJNTSsJgHkI9OJr3NrSobDkYDYc9SAK3lWV5DNWHRci2sApT56+PxwDbCFJNmJiuVDnWoznIcJco0xkBkEYCwyRvR7AZhjMLoyeefzUaz2XR+OjrVwjqGtIUWGACw09ZYaI3FjHCIgAPYZ1BZXWmhrLGGMOac5j4FEiCEjDUQYlkLigjBCCAoGgsgwEgudINPPPdC6He8hdbh9cMjNTTeAupsDe/uCkfzpg7i1ACHCT8dnBC2djDN0jRu9fqEt7uL3MynZVG2UvvTH//p7/zxt7/xzZf+0l/+AiQaEa6xVqYps0Nr5ayWjeLUaTcvaVzSUEIllatFowhCCMPZ6DhO0iyf1s1xP0nHw53AjyC0QjbHR4Wy7HQw0OU+9xn0XK/XVqV0GreSbtBKmvlwe/2JwawA2DEEeJgYyObDI59C5IXFVM9N5TSonbx49eLwqKhq2Ot62cmDNCXQRGLm0+D53bPXTqqTPo+Bs53uIkbLTQPn2THqyOWE3n77n09nty9de3JwnJ8eOwvg8vJaOal+8sf+4tvv3Lv/7o3VhdW2dTvjh2krqOazMPHizsrm6iPf+96/Pre9bRUaDPPu6urh3rEtlTx5gMRcGOeFveOTA5KEysiytM4aggEK0/3D+4jTxWANIqdsPS+maRo3lZjPxqEX1hUg2FCivSgpqrl0TdpeqcZTQoDQUota08QCdDica2U9DivVDEeTrYuXgbCtdOnSdnTv/sCYJk6TrHQzKYMw9KgUtW6axllb1PJPR4IzRgshZSMIRUEYaqkAhEJqRGEYxaIW0EM+8oEF3GcY2LwsnEWYMMw8gkjT1EkYWuOsc/NihgnHlkqtqmLm8RDHcZ5lRmJppNHYAT6eTKMoQhjLRhijHQCEEkioFg00gAc+pdwi5zRAFhoLjcFGWSUV4UgqU0rpBQwCCpgnrbFKEwKMs4R4AfGsBQ46qY11hnNqgUWYCC0AwEmUcBRM5yOAYbcbF2XjjPU9CgGq6pm1LuABJMwZ4Bx0wEolMEKUUwssREgrY4yBGBLqiaayBgDotNIA4VprCJFRBhBCEdXWIggBxJz42mhnFYAWQaitdM41ssKUIuCMqoHDlBEhJGeMEAQQVo3ChGhrLHAQYoq4UKqWkiCECfI4r22jrUKUxUlijJFaB2FoFRiPZwDaOAoxch3WkdAoICb5qJN2fS9kGANglNEYk0ZVSjXQMEIxpARDKLUxWgEHIEIAgVoKSvxp3TCMkANSyhhHzkpKOebMOlAUGWNMGEMgjsOolkVdFxhAilzot2pZC6cZ9RFFzgErHMLQp5QxVpUlQDgIAgeAscBnQdGUQteAIQuRQ1Roo3ShjcWQcRJKVUIIzAdPgnHVKGelAqZsGs/hgPOyEqWQlWg4Q0VdNkJqJYUDrTgVokAYQWCNFQACqQ0yGBFWydr3mZSCUtju/rAUczgdlFWWu2z58V60bDRRDBBoFCVF3WhKKHF1NrLYRt2Frmoqr53MVSV03ecxIl5ZVsJpytksOyvrPOksNIVMl3rLj12+d2CSlS0as9PTvULWKDcojHqt1mxmAXY8Yc4ZijQFEFpOSBubpLXAdH5ElQHV9OGt76099inVSEAdRIhAh4hzVkNEIcQGIAABANBB6yByEDhILSTGAYgocMZYAAmBlvKIWlH7s+Le97//uZ+6SoLWTnlcy9xqv9NJvDDQQsm5C4JW2u746yANwzvvz2bY4qVUNdK70NZxoFQ5Ocyms6YOMlDXwMD2QkflajwqVcSv/czTl8/FSay/8huvXr5wcefG7diPpfVHEwNZM5tOaa/+m//D0//+dx4efbvJtHUt79z5jeHRDffie9eW2+JoPjk4RcgWme4ur3e2+6d4cHt/Nor7r90eZ3N7cnKa9J7sbT91cxwvrhZZXgd9r5yMwzhZPPdIPimimN155d2k27329LU/+earTz156fPXFt78+tcPjjITovX1xW4SzQ+O92/f7HfMcoef3L5LHWEestSW0CFj/STRkEgrEIQ+w1pbDQzGoBZFJyLE8/OTCQ1pr+OFvWhSynbv8w8f7m89+sJSrz46eRNQkuJY5/OYqwtPXRhPSyC92ei03U5Wux4GFFj9R7//pdXULvdWPv2xT774+ht3jw6NrHxAsJxcurz06nu70hE/ZA7hsmzmkzLtesSjZaOsAbVoCEEzOX/n/hsMh8ALX3ols8peXdrY7LQHD8ff+frhv/jX/2YjvHjtygsv/7vv3jM/OHjYeuEnf7qbehYH9bQoTybJZPn28D8EZh4swrmUui6+/50ffIACpTTGGCIskCWQatNsn9862N39wXu3aO/Jgtg+cSKfh2HAKdVleflqZzycTIraaJcGnse9eVZaFEJLjdPE809GB40EFGBOw7WVC2enN4I4ENJUQiZxAFg8mQ62tnqjWZ22OqaRxminpRIVdlIJiRCzVl06d/47b79y9dxGXeU8CK2wAKA07s1mB5QCSmLjCOUQYSqNRdiTQhmDgHbEZxAaCpxxDkNgjdHWWVAD6zAGDBFIsdSAMYIVTuPlvZPdOGRaGSOU1MA6QCmT1mqIsrKyCDaNzmflU48+86U/+oOV5enl7XOmnkNkAdCMAj/gdaOCOOYsPD4bGsgA5eVweGf3Pc9XcTt65PKVV198cWVhmSMiJQHCFfMjI2vu0fHwFJOw1YrrUhGeGGVHe8fz4aHq+JiifDivGxQmvXsvfSfX8MlP/FhV5yf57EydwBSP51NHQg0BCcNKFAAYWcpWzJVUnhcCKRWipnBx4F5/7Xcev/ZcSHss6EiZMeZDBDmPuv2FwdmhAzorR2FvVWoIjCnnhYNAA8d8LJr67ORoubt4dnKqs7i72mrKspjMkFOdlQWH/Co/NrCJe37aaxulESGz09Oilqur3cY2y9uPFrUWBQgoUQIvdFKAxL0H+5tbS/OHh5zhxy71PMqnWaWN/MPf/8aVxx5t9xfvvvX+0vlLly5eFZZ7lNVV1pT1wjn6O//uN/7m//GXf+vf/MHlp57PZrU1zXA0feLpa9PpGUZ6Nh322/F4Mr127eLtWzuXtjeP7uwGXR8AdTqaj2e1tMO0Vp1WOB3OgiTcu3ffQD2aTS5srn2AgqpR2BFkAPE49bwqq6aDkdZ6aakjIWxqLSEGgImxoH7n4d5x2u8XufG4TNtpEKcQguPbt5tsHrWDhaQPa4wtZJ4XpPj44Ym2eGHjkWc+82NFdnJ2NiA2C5IlA3nV1D6zyKfU6wiIJSAG0kZKCBl0SpTjolGlICS+2NTGaGJYzFL3yPrm2b27SadbOwe0wiwK/FConBIqldDOeWFEAAZOIZ/SMLIWaOMcwMYy6GieCQMA0K6RJcLYCilVHsaeRiFZYszDrMgQ4/cG+tzjj0JGICUml8iD1jmjNYROW+kgcraxgFMvJYh0V9cNEfM6k6OzNOkstf27d3d379fd7rLvcS+IZeMAhpjwte0rS3L9eG//5r27WTnaPn8h7YYZwlGw1V76xHj+GgS6qeqo3S9FIXTx+JOP9to2ShMW9UjSV0XhYz0enzBoJIBZXlVNCiButbhDcSs4d/Vj52Sld99/P079i899ZDwaNHnWTCfa6Yiy07s7qpL5TI9naFiQ0rqqbDCmmOKmdhYYHkLqMwq8MO1cvHB5dfti1lT1fJ6eX9RGGV1oK3gEjRT7x2eBlwRJaICFLKgMoGGiDZNKhATGSTeIk85iDztJnUKBJRC99fbekYgtif2ohQMPApTNJ01RexRn1mQ1MJbmlSsagYmVQJ6/enXzQ8+trHbs/KA6uqtm4yjgGKSAEIXGdaMrrR/M5gGZxV4AEOaMYaEj6RWnJxcH6+dacHml8+d/9IVf/1dfzRESvieg5hTOJ9P15dbZyVmv3ZoVcnwy78Af1t2Mp1k+mx5PiitPXFxYWbpx4+5SL5yNTrLsxGi6dfUq7oSkOsMoOzqc3bj1g+2nn/vMT3xxOt7/3S//5lOf+BguK6gNxABYt766JQQRsyzGcJKN717fW+t1p7NZtBjuH54Cy8YnhQtTzznUhaxv56B48vknHrz6zmMXlr/5yv393UO8vhzHoC5HM2l4utzqLhhTCgsyCW/fPRF1Y0zNOn3WW+jF0ej2KQT5h5//0MPdUoxGqprevjtdWlzggeu1u4T4lz7zk6bxcbc/nx8wqiHyZiObtvrvvfi6GE5mMt/c7DYFub+z319cWLzy2GH+sH+hnfqNInQ+mGFKW9uXHrv40dv7erGzcdA2VnsxDN958U+wtdeePf/6999yzHkhT6NksduVqqzy8uTotMVQu+tf6DFpwMZTLxy8/WpHu9W1RcIEIDrud6RDEtgwTZe21lu9S8O5t7tz+NRG+xDWyeqCt7E1uzduRvP15ZVKI2Tr/krr3Qd34qX+aFD6jc0HA0yIn4bOAuJFZ8cnhdBFXXTLKSWEcaQr5bM48uJRVSEQCKNHUl15/ONeSlGRs7TjnY4X1xfDxV4cxqf3dyfjkytbK4SAvcFofXnZI2yYl+/cus18HxIxzs88j8fdRcpB3mRBV2HCaTdtqC114XPmR9HD6kxbsLjaq6pqVuS+iVtRNMntcDxO09ZkUvYTnoYMgqwX02xcaUmA5/keCTnEhABrIQaUeYwldZlnuQhD5ADmQWy10VoSZAhWGHNT6b3B6XQ+Wevq5aWorpVFLsAWWMmCoNZAGg9oIiTygOd5cIrzSd6UJ8N24oVRgD17dDypai2b0f1msnZu7c9QRYkPYyI67ejk+CyNWtOm9rkXLQRVocOgrQCx0AUhRKapVeH7JhvtnuzvrC4utju9Is/rSmo8J0Uo5sP+2sLCcmA0wo4XRdleDN9+8+BsGAz2jiGzmBAr7agZUYyreY0QXNteKmWdjQocsP5K+/GnH3249/DsZHwymJRZJZsGAQQxhAgQQrQyBGPmoTorrXbAIcKgdVaUNYKQQifKBhMKAbLAQogIBVVRIgKCiG6s9pPYe/7Dz2HKZKOnp2M0q2fz+Uc+f+0PvvbN1aR78HAsA7+32LWlMS567oUndmZnZZXHHEJlTFmPpoWltBeFJ0f3Do5uLvTXOmsX3z9UIZo8+khXgZLHvjXNcG8kLGuUiwK2sLQiBYLA08pWRRG3kvF0VMxOSRBHrAN40e2uyemcAGIQ4XGYVeO6hBh5Q9Wk3GEkw+W4u9wTY0UCvy4UQDIIoqLck1WBWZtG653uxuEgW9zcVMObyiEWrIQ+V0rK/LiZjVVjLM0nZ9nmRntej9v9tTS5eP9MLy1dW1xIbr3z1YWlFDu6O0WPXH2aji2j44Pd97FPl/qdZnywub5Z12Saz8bZmZLoxsMXS1sn8fJEwW5v7Wjv9bpqCA6Pbk5a3c+08dOh+aZ1ZDwdtkN/nhUPBuZv/IVfuf3Ob4YhjnzOPDerZ2u9y6YSbeydinsor4Euuy0vSoLh+DAJYBDwzWjT4JWmlN0kQqhAHU8qO5scJqHvtK3yUcDbzua1NF7Y4tpHCDs75z4XjRkejxwLNq6+sLc3pjoKkqegITWQlKqmzhmPkzAoGikNqgrmRUEU+SmkANwFADgLrNKUYMeZs0Br5wDSUhnjqAWyrrBzkUcpJ2VeK6H8IGwlidIGY0q5hxwJo1grqbXQWvtBUGs7mY0whBg4hIUSNSHAY4AienI0qnIFoAIIGSOwA4gwxIi2RiOFrEWQRsyvmwwxEsexaZRVklEGAIQYO2yKWlhKAQsp4VIIDCCACEKbRGmWNQg7YyTGuNvt5FnGCZXKIIrTKBFCEsiUahzADuCslBgQj4bTakoodQarRksOKUXOOYRxGDEpBUHYGAUwFUITSjBG1gKIICVEA6uN8XwulDHGMYwBchhhDKFxmlJqjFHaaqWdswgBypDRTmorLSDKUYaN0gRCBzClHADIqZ9VudYWIAgQ0lYg7BoNjbWB7zVSAQWFVBg6z+fKaAMhZR4ACgFgoY4TjoBrVB1FXpHl1rnA94PuitamrmuKISeYe94sm2NG+53FeTYHzmBMpFDOQowRhtAA7XMGQGANlMpJo3ziUYS1gdKAomq4cUZpjEnIGbROK6WMFI1w1klnMYRCSQAAhsQYIIRw1hoHpFS6kQhXaRQaobW2CEPrjDNWa80YRxQ3jWyEwBBKZUMeGmOkaKyyGGLmEUecEBIDktcl8WiAIiudtZYQPs3m1sGqbuqmAco2QhDqaSWANRjDopZV3VgDHHClERATJWUjHITECz1lftiAlg+O6qKSkW2HVucz1o0cAHU5QiHmlFWZWFzvW3TlD7/0rReeWxfzEUu5cM1cl+0wlgAZhIVrikJiwOMoTBNvNBfE4yRp47DAMdm4tDXMdwLcVgDaMqe+dL6HcWWgzzFnNsYKGu0rYIkCDKa1mRvReEHn7VfeirpPJe3IAOMoAgA66KBREBNnnQMaQoQchBBZCIGVyFnrgDUaQN86jjAC0FiMLUBzqZOFlbS39pv/31/7wi//YrtNKkh8vtzMJ3J2Zo2OedBbfkaqcJyNpK6vfHRbPVpPdg67l9s84Tt3j9a2OmHHb20uz5tGQ0gg9XqtKCqdryUw7eXoZG80xvLCExcuPrF58dGVu+/B//Br3/goO//oxz9y9506a6Z79fTKZzaxGb34laOWQw/v7P7I5lLLJ6c7+71Hn1y7+uzw6D124kKqjidvLz+eBuP5ylbn9ttvB8x+6sc+/K3f27l/9M2//vf/xuCtX3cKkyhtAycKvbmxfnK2X88rH3CgzeDoqLe4vv+w/s53fqPvOZZ4GqHpqK5t2aEuCUw9n45qKOeVUshC3W7xpjKzxuCWJwBQ0lmDsNEcuzTEELMa2agX28KsYzA1ejjJ6ULnz/0f/mbYP//eN/9IjvNrH/vJkJLd05snD4df/Kv/0VvXjy8sni9Ls/Pa9568dsnoWyojadxf275MmRgO9s9tbA6Opj/5+Z/7H//VrzUkDwMTU///8l/9jX/3h2/+5m/9kQDKaBm3OKPcYT6ZZgAA3+cCSq2lrKRoFGjm7dXW+uLiYDydioyur/zy3/q5kOrHLi3v3Llz+928hVZWvHr/5eH/+Tf//vrlpc1nPz6dNB/bfCpZvtLSk/Hrx7jhJTA/89M/1sLyT00HBGJiAAgC3zooZ0JIG6fpYDr89//u6z5H/8mffy7phtYB02hb6ZpgCBynEGLMmaes9SkGjihZWyKbxgXapc4zgNalGA2OAUB1UyNEjCN5bdR0oK2r5kZWtqY1tioII4tpXWmtrUFoOJk21j+qyvOPXSOuCaKgqWojNIBgMj2jhIomMwbKrIiSDsAOQhr5oZEzTjzqsbIZ+czj1MtqZZ0mAFMIpKwBhNZoax3GXmWkR9Ora9s7hzvQof8/Vf/9dN92mPdhq6/dTz9vL99ebsUtuAAuAAIskESRlEiqK7JllYmcRMokE3lGTjIeO3GkREocx44yE1kl0liUTNGkSZFgAQkS5V4At9/77e3t73v6Obvv1fMDLn/g/h+e2bM+63k+yzkIMJJaK2ul0YAyrRUCsFoVhFOlDXa6rODt2185On2oDDXWFmUjhem02nVtCE/mqyzuqOHO0EEyPT7GLr/14q3FYqJl9eDRJ3E7bqqmKuT61V1IQJ1PKKWchT4NykpXSDHmDzd640cHTVkMezzP67oR7c4g1+Vwzy8X29/41d+d1o0H7UuvvSSECDtsdXGST8lLX/3yZHLKfd8p53GWTQ4p8RaTEwRhq90F1pXN4vnnvga0tzg577WGs/FkbW2PUKSMylZzqICobBRvJ3yzKVYGYkyYx/hydVSrImB40F3LG+AN99Zvvp7PjvvcPjq/GyRe0m1XRWFFk2b15uX96Uoi5DEHQSM8z/kMHx+e3dp8CXBKw3h/d/u7v/8NEtndKztBf316NPZQcHB0kvh0bb0jBIIcXr20EfnD6WTst1tFJVuh7HR7AIPlZHXj+bXvf/zLb/zcy4/mR7deeXVtY6fJVtUi55FLOonRKgzi8WxRVsvdy5t3Dw6fHD74a9t/460n3+gP9xRWTx8c9tbXFxfzja3uPC+X49WN3uW3Do+iJCryYu592rNuJx1ngBaqaGqPMuBwEPt1KREkmKq1vTXdyDITs4uiFlhVThITYNfqhrydOECVBOvbl2UeZ/kEENgArUSReP1Ot5etMozi7bWeXD3xfbS9GWNNZV45DK1xFmjMAiWdMhayWJUlBE7lqslrZZrVsnxydPEn/9Tt3nZfNPX8QQGsX6fV5vqAUxr4oawbKZ1z2uqKEoOAYxBgAKRVWjZh3FVZY0VT6mXU6fqtuCk6UldlUWBb85jF7Y1iYhhsceZhHJSrlbRu89otc1HsxLAXhUBKyAghsC5ra6Q1WuvGWhDEHlS2LCsLMMLMOTg9Wi7P0pdescIum9Vsd6tFfMa5CdvB4lw1ac5jz3EKKaeMrF++TFukOdLT1XK5eFYVqTZ2NjnGAXfLJg6ieToCpBFgFiZV/9IW8fuNigxFncSVF2ej06dBsrF0etBu2QZJYaez1Spd8Y2r60HbOL2zbZ/ev1OsP3Hl4v5HP7hy/Yqol7MLY5AIOp3JqRAaxIMekoD7HQsgITjyS4cxogQDHfSZ1rCYn+S+g0GbECqrWqiqkYUWanNzU3lqGLW1JZiA63t7Tw7OuEHOKKhkYGE36HSHQxrFcSekUGiVYg/P3KRkLOmsc7/tlEzPj6Euna03N+NByC7SxhTEOgxcoGvd7Xb625vdS58Z7t2sF1kzOtFphR0GwkY08vpBqxWdnI2yspoWyiKCmUvTTApktavyIvDJt74h2Oeu7u73bn/u9t+Kdv7lr/3eRjfYGAwXuvRFFXPW2VsTTZFnDSe4adQPU7B3aWc5D54dj8YX03w+WmttqkX+7P7j/nCzhmYxLxeLs6jj9m71V6e1ugDvvvXtb/7m//THf+JP/dibb06ytMiN0aguVSeISWtNQbS9cfn0/lvYVINhwLvhyfHZ7c3LAE6Vk4PNdhKwplptbK4RfOXXfuN7qNjtb+7ki9RjKNfu+GLx/O2dulTGyQAVVbUEhCHiK7ukwvIgQdYRiIxpVuPKJx3TGFOSVpCopMKMnE6XN1564dvffPfBUb6cZs99NYpb+OTZw06fHNx/BCHtrQVhm2SLWZ5Op5PVakaKZbq1N5idTjm/30WWItjtRIXip9PR+qWtk0fTcK+3tn/V39p0O8tf/J3f/Yt/4S88//Jrp4+eeciDqjo7OwmDAEA7XiyNqgGCyEO7l2LWlIlvXnhx75u/8wet2B9sb0yOLhCfTrOV8Ycs3t546U0W9++//yQejTY+c3vQWfcJanW66crakvWu3MJr7asDee+DO/lscrQyLPaplZeurAMaLS8WcaeVQ8M5HaztjA7PECSqKs5PLlr9AUsSI+10NtnYCcUoPTh675UXXwiClh3Q3f319Om9aTVPejYmTs6XpxdzKfStl1+eHB9P56Okm1zMn37xa595/9vfp2HUiKYSI8kMBrhRy3xebW4kG1ubpeOTkqbpIhpsFCt5tJxnwEA/LBqTC1gYpipshOp2+pdvrWWLvBtECSxMs9jp8bpcYAAB9KCjVmtrACfE2MIYhKhGiFJgy1XqQy/uRhJogLFTWmpBECrTnCpmFXhyqldzyFmAcUk5VlCXVa6wZxptCyXzPIjbTVmVq0prRT3SamMIhMYeihJbKOBVqmmMQ1Hs/RFUpLXsdkgcgRBSj6igFc7mhxt7ydb+UEwFZQGPPCsboerV5GLYHQCAWu3t+XIiEDa19Pyg2++OT+Yin1u98MM2QrSui6JKoYx/8OFslA+jpNPIwvf9UoskiJwV8TpJm9J2AqpIC8JuFE5OJroQLZbuf34nHL728P2nTjaj88Votpgva8+n0IGyapQhlBKDCXKgqSXGIIiYdbZWAnECMbZSQegoRd2W9/LtnW43HAyi9a1BU6mmaZ4cHb/yldc+f+OVf/Jf/P15mk9//9thkrTj1uPTsxD4Q+7v7m6e3j2x08nFweNb25f9KDENzOfVrRvX33v68bPpKumERaPjYWw8T5aaGO/hgyedmGtnKpEbiGIvwjSAxIxmi1YY1cWJHwCIQwU1cpr5gR93qjS1TgOL/CDhtLsoi7IGEIP+Wufw/t319eHGbr9azAmDUmsv9oOkDRvpROqIwxEZxltQJVqA5fTCagsNMLouc9tbf87KJi0OAs+rhIGeVzo5DPynT5967Xit22uqfKe3meanWIBLl64SWhsBd3o3k9Cvp02WF5T7w93LVYlbPnMYL1cPi6xs8Q7x6WR2ASCfZCNl5dHphzywnh+Nz82s3vuLP/bXfvXXf8NAGLdijzlTVHMldi/ffHD0Tm5E6NAqk/Z8wp2IrM7NalXhGikUYW10HEROgc56T4pV0mnDii0FQX4si4UzoipLwsPucE3XOcaw11/XRmpQyzJlXruRGltdi9JL2sxr25gnffrxOx9gNwhbrTJzPGiubfUX6SFxlvo6YEyj2KpKD1qYag8j2vpU4sg9D8fIOKOyoq4qpAxF1PNC4qB11gFUN6WPEXAgCgIHTRxFRd0ASAkmWVp5YeKMEk1dl7WxVmlrAHRGI89TQshc+V7QirysmOdpqXSDGSaEa6MCzqWQmBIeMilUmuYBZ4ya1WocezGDFDjredRrJ9oCjHCarpyzsR/WDhplRFX6nDZC5dIQDhDSQANGUCvxnFFCS0BQrZR1DgIHrKOEZVkWhGGYeNpCBIBudO5KAJEzzvNCBUqLoLFOSUkJFkILYQBoMEbCKgARJtg4bY1zBhJMtBJKKYQhRlBJ5TCllAFnpVIYYwCgBVDIilMOHBKywNQ3xiIEPM6NdRZBxAjBVFnjjLMQz9IVhlZbZzVAGFGCMaEAIQwB5/EqTaFDCFkEcVlJC5y0CGGJtC3KjHLmnGsaWTW1BRRBioBrKm2dZRRhiI0CQqOyzCinwOGibkolhIEYe1q5WqSMM4KQNFXkE4pxKSqPYoQIApAgGvh+Y+sA+ZwRRzAimCCHKXXIFVVhgbXQckY8ystGeNxXRkkJGimt0owxjEhTN4TgRigttdE6jCNMmRCCU26MdsARCKU1mHEf06zM4iAghBhghRTSYGudNNIq46zTUjmHlZRK1JgTiB10QDdSCYUhohQjBOuyFKImBBVCKKURggSjuhHGVEkUWm2MtcA4pT51FYVhYbfUZ17bM16NvQ5ALW0do1zLCjE+WAuF1r/xW//+Rn8dl4thv70wDYZsq78ZwohoR7kLIPYQCAmUgb+a1Um71R72p6MUG4e1evzOD5gWaxtt1qPLSV2uZjCMITQOYEgjZwCAPqU+AIAyn2GXkbqu561W8Hp88+LkXRR9FmmMGUMQYgt9jKwDECJrAYLAOossdAg64wiGwAICPQiRMgIBq7RC0HHGXJxktr78E28+Gs0X4kzXhnkQkMASaZ02GmgcU5PEQeRsGSAr5rPsbDY9niRhdHr3fP/mJT/unJ0ddwawquoyyzf2NlWRLcYXGgJrYOj8rdYe4YbuJeenD+bHx6+88cX/y80/R5Eo05nPUHe9XaSL88NJp9/7zOvbyxMR9OLzRXplP+5dvdG69bOSNFsb3QeHv3Vv9v2bb/KvXtovR/dsM/rjPx3HDQfjxQu3/b1eriLvWBLoWDGvYCbWOu3jR3dqpy49t3f3zrSLkygM1/3NS/svrPHgG7//y6uy8VpBk+YXiwXfDYQsqukCtVutJHaNWaaNrcqIIO57ikOgDIYQBxHlvrM1MmIxzZLNtaDFadt/bufV/+GXvvk/+yt/7e3T4zDQHjr6/h/82//9P/gX5XLclCJfrLbWNsQ01k23nimK3a3XX2wN8tXYfvjuk//1X//Zb36QHhyfvXx9T8/Ke++8f/Xa7f/qH/zf/p//zd+9mI5CpD/+wbf/8n/4l4Cuv/6NX1WNhQAoB3UtqbPKaIOcNZoxDCCFjhiMq0I+SQ8DjGssn502/9u/+7d8zJKwF/nR5Fn39PyEXN6pJk/V4vxh/a1vffOfOoF/mw//wT/6e9/979/zYhHErc3Yf/3V9fnpM/Dp54zWTS0DP3LWIUQIJQkd7GxePhjdefnqkENTFtIY63s+cEhLo4S2xmLKFmU56HS1NEoLpZXDoNvaslmhrIJWdWJiXQGxQxBDhDFzHBGnZZJ0sqyAwFV1EVCEjNJCQGOiME46fQAkqkG1mPUv7WMBEGKMGR/TvCqhUwwRQj2Imc8945TQwI98SCA2VOhayRwiVBalpppy4rS11tRNyQjGzDcOAa0hNBEFsp7dPzi3CiLkSdU4YKVwjFFEmDTKAUMIddABZznDnMPlcvzKaz9yeWt4dHyUtEOAHCM0l+UqzTY2Lw03d7U0SIr84hinS2uN8IxTSpcyavnhsEUcS9PFZDVtU0sY89utNC1UpXnQwhhXWXX68f1iMW23o9k0XV+7HrE0L0tg7Mndw7LWty/t6qwpiuYj8WG6mm/vdimNDew9fvAEUej3uNSiVKnFSFrhHNK1AkVWigIRGQ5aRWq2NvbLfBV3Bg7p+WpijYKQWuhhwLH1m3TRFDn1idRCivlk8syPola7Xa1kq7MJvKScHDTHj54sT6Uyr7z2p8+OHsfdNvJt2OpIDRBkSdwhamH6HmYaIbS7swuU9gNcyubexw9CGpb18vxw1G3vB9vexnpCISQexbG/vUZ3Lw1u3No5vD/BjeTMu/Tc89npdLnKIAucdU8fPjt8fPzTf+In/+0//+bO1uXlySOPR4dHD1//yudWWeMxUKtmMpts7K17YbSYr772x7+GqPvk/t3nnr+FgLv/6P0XX33hzp170QWnvqeBOLs4bPe6jZLrW+tN/elDH1JoC5BoGmXqIEmsMxbIvatrwDYEIUQgY7zIsyigsm5UXZ5NRnu7gzDsAwOkUp7Hg90BctH0HD07vBBSDjcH1aoWddnpdLgXzydHzdtn6zs7mPiUM1010sr+Wq+YL4uiZkkHUUbCLkBclfV8NB5f5CyBo1xPpLesiqiqtKqH3SArbJYWltO0afJR5RHWahMChAHWWqmtc0YuF0tHeOQzIRpjtJQNZFwpBR3qbmxB4hazKUEtgzzpt3FLYM1r00hlMGlBiKYLbRHbWm+rerU8X7DNTeJFjmCCA0xZntlSlA0wBBFbgqJoKCUIMEpa1jbf/4N3d/Z2ESFe4jHiV0UDSM0oU57f6w+0R1gQVlkOkTccbALXzOYTW2OE3PLiKIY+g8RgmNdlOl02NYhCbzoabV/es5V0clUqCFBjAGgPO4WSV67s+MQpWEUdfzoeCwHXuluOrI2nTzcGNGiHb/3gW0xLbGyrPej3Fo/vfDLc38NOW9tAB6MwjjoRBIR7ft2UVgljnHGuzvMg5Mtlna6qTrfmLEii7iotTQNbUQuHLl+tvDAAADHqaVldXMwxoB51GOig40VBsLGxFbZ6DhJrhawaQnC+KOajlPmJ1+06ghaLVBaZVUXi2eevXN0c8PZZ2Yu8bJkDSIjvxbG3vd1PQueZskNI4bLFeB74cdiOCedVUwQmAo2dmnNVNxIhpUQn4VpYYcFsUq5vdwpIP3w6Yi24hsjLL+2urf8H+XJWr3LfCzkDdb0KOdja6q+UxVwt609dRdPxAlGzu9n1KPUwnl2chR2OMCxVtXXlMlB6a3MwPj8dn9XU85I1uofphdYffPu7ccBpEhDKszLtb/cgYEvRVNrWo6PT0wuHqgHDfsvsXLr6yQefJNggp8KWa7LaCDB7cm4MudQdUo0xpkIC5jTirJHC6Ga41kmXjbZ5v9Myin7vB2+vbbZ2d9ePj89u3rh+eH7eavfXt6+kjz9cLVIfilpS41BV1RFn77/zQTtmm1v9NFm+9Uv/ImiFRZau7w2LXFVlvXG18/Bw9PjDJ2UlacCDEkohT8/nu+sbk2fTfFX7vo/zor93ZX/vCgja6/29bHL26P33XvjilzcH+ObP/UQ6O6YU9Drkyffei0N/b7NfS8MJWi7mxmhCCelHkJPR4fwRc/kyNSU5Gs8/eTJ/7fZ2DF1dNxubg8K2R2rQshvr+73j3/qvBjtVe2199OyCx70ik62soDpbzk9/8PRYNCrgnna8v/vysIPu3v1ustbe3r8yX82zRgeWgUrvXr46WlwEnfjJk4nfH3jE67daJ4/vrPWDsD18+Gyx1W+XDjLEDp8cxhp7OH7u+f3H957ikAWtCDdVOp8WeeYxurXVasVskc0xJ/MyDSkadCIAiiJrHCTtXjfoRsYiowHTijjk+f5q1UhlMOKc0qvr6/cfHzLGlbEQ2PUgEk5RD6sim6azDmtgRHBIfU5qRaRWRKEaQgQ0QBBBjBDEGEEKCWSrZWEBCrodCAECGAKGMGGBC2LTExBVSFby+x8ddLtsayMatKOylDCgQcJXy6YSzDGQLst0krY4NyK/sr65TPOLMguSNo39dDpNAhJGZD5Z/BFU9OobN8rpZHrSQIJuPDccn6d1Wa1ODcwNh4EXxOOLkyTyCJfdXgeoijIbJ8haZqSDCK5WE6kVcNTz/ajdmZxNmrKKIlZZ9K1PDnWwaQV1EVEZEWWDHKia5mw+vX6tx6A/PV/22z5S6d76sE7B1mb0Um97erFAdvGZW50kCOvGGgsgRhdn08UoBYwCbPNVyYJwsaqfPDnRVhujjTRx6JXSEYKikF2/sb52Zfjc8zcP7z9d3x6ePTo9Ojzd3t6Po07Y6fV09NYvfxdrL/bNs8Pl87d3x9V8bT2wWocMTk+f9tvdxVjeaO/qXB/L82v7V+YXorssVJW5oHZ+5IyntW4nXlGvmrLe3h7GAT4dXzAfdqJuXkw39j8zmowQzLNmGXDmtdtOAi0R5wlHvKmVqZedXlsDbWCNPe7yiaorxMhoPoECIW1AvvCd8dv9TqdFGquVtQ3GznEcQxvBKCHUKyYZ1NYp67CZXpw5hF12RIhPoGv7SWbFxtYNgCiQE0rZcpEiqImiw8t7N/cvp6NnTkFp+Wbv1pNVsDq5aBG/lmT/1vPzFBoAU8MpASxaW2M7DqdGZaKqty9f1qUzMsvmHy1nCz/xtHG94TaIAU3SUMTaQIO5rHUyePHK9c9On7xFPaqENAIsTd7B0ZXe4PvjDzkfxu2ugaqY5t3BtSDcIbaO4+3VohrydULanR6dNfcNZCzsIIcJoshz0HhB2C6KmvvAIeORykv0cp55wRpCLYxZf+gbfX55v//R3YnlyXRxurd35eGDb0Y9zo2UImOAJF53UecW4v1LL9omLZpP+xR+yCFiq9moFyeDdmixN52uMEKNkFI2wOc89ClhxhpgHQRINNpooLXSQFNKlNR5nkJoMYYOAGcNwhgDqBullUYQNU3trCiywmgdMg4Il0IIqSSliGPMcF0LI0TAeRj4BqlO0kMNbMqGIh8wbCGwEAIEvMATpTVaCiM4IdBh66AGRjgBXAgdYRhIU1cKIeuUFhBhLQz3fW2MA9hpE/otY7VSje93y7wAFhgDgiBsqkobZa0CgCMIGSMU8+WycsCt6rybtCLPU0YBpTCC2jmnrXFKaWOcA8YZYwAAUiuEOAIQY2yNrSsBMPQ8LqV2BjhA6/qHj5wjDElepUEQUoKgA0ppDFGtasIgpdxJxblfiwYCiAEC1kHrhKw9zCBwHqUOAOgBB1GtXFNJYI11zhGntMHcY5RP0pJjFEcehoAAwCgJvDDPCyGFchYR5pyGzjHCMMFFXmFHAh4ZCACE2rhaCmQBAg4hZAEwAAWcr6oV49RBJUXDEObUQwivlrkG2gs8IW1dVgRDZS0mVCpdixoC5ADABFmrDXISSqWozDQhiFImjTW6gQAYVSOIIDQe48SCgNOyqsOQEwQJAVprTEnTKAtcFPm6VmVVqFohjJXSWhsPMsygaGRaNAQhY20cR9a5IqsJI8tVlhYlQpBQao2xxkAIsrI2xlAE/YAPhq1PG6bbXrqcFjIchB3IQuBF2tK6nCohMcBGawxUeu94+EXv1pWNJwdjFkbcgRBgomrggNWK+jSdplgZamHs8dV8efvzzy+KyuUqTEJMXCbm1cSwxjXLDHCUJJiHXlljoIVyEACEEEAWhJQam3scpZnVqaoX5ezx4XrnUn93q6oq7RGEOcBEaYecQoRhCByEAACtFMKscRZiDmFsXG10ZRxwVmNnDDEWuGVZSVTf/vnXQWIw97DDtfRJ6IfxWp6OxGICzENhakxZ1PddWaQHo6P3T65eu9LtdJvGAdm9drnzzV/71fG0BE5MB6sr1/t5drpz9SrxWqJUebnYuTxEGCYbfaXys3v3Ntcvz7NsfJa1OFjv90bN9LXr7f3upW9eHH00URczMTssn7rlT/+VV1LIlksPtV90vV+7+nzgEvf1O9/wfC+OBeFxNSu3w95nt7a/9yu/iJJkgNRGLywmo8jHWbEKe5tOgLMn59QRo0E5t+0rG7NFs7n7fOD9diuUcdIapxe7Wz2ApN8KbR1rw3d3dg4PnpAwwL6GRnOfAWhNVXQogxjy9nAlcgKqFo0N53Nho/76kdJ/57/7zwfbL8UHS3P+zaMH33ru9o1CZ5sb/T/4+rNXX3v56SGpWO+LP/7S0dvfw8Aqg8uRJEvXFe47X/+llz//H3gChQyJYeen/uqrQtnxUv/UH/uPfv0X/+vrV9fN5FFYfvDHXlAtEsVbfR7e/Bf/9DsnZzmQHFoqhIUEUQyFtohYiGEjRBACDFRISZplFsOzUi3QWDbaNG7ndu9i+nhv2Bvu08UC9YOIeH409L/7wT998Q3v5ZdvdpO15dnh6M5vfPvdTy0tQgjOucdRVa7ysmhEHfi+Qw5T9+XPXel4ACiJCUQSeZhyL8jLoh3HmKGqquu6Mn5sjCAMO4elg02dO5tbaBlhBFOpJSE+9cK0mS3yai3psoBJXQspEOJ+wBBwRVlSTKx1Buh2OxkfPZY13H/ukoGubpy1GkBjgPX8EELrTIUJNEY5hBvZJL0dCHBRpFA3AGohyri3A4ku8zxiTJuGIhz4xGjlTI0g1cpp1XgBBhA5RSj2SqMJRs5Cii0ACAKMAUTU07KiCDNMmrLSNRydZunqd25evrW1mdDEX02zuBcjiAFtIcvyxYphMBqdRj6JE5Jm0loY+wnSjmAi6gxz32EFsCB+iE1tYA0IAMx2N6ImrWAmIcFRfxAnDGZotJzYvAKR19/aVdmYJ3C4FTPujc9XwGJbxXWue5f8Dx8e7L14ZXN3TzRlvRKYGx6F1KrGFijGZ4dHcdyBnbjQkIZ8skoRpq2tjXqVUtLxAjJfpf3dS3VWdvvtcjHycNVO+vPZTKo6YFHAwoPjcbu93R9eleWZnJ5hrLZ2Nt5/7+Nnn3y/3fZiP5GpI5w737u80UsvDs8ffzIcxgCxwyfj4V6vqMtorR84li/t2v4lNKNZluZFWmVZb7fz1Z/92sGz8ycHF8M4VLk7vHOwuMhVWa5dulQvUwfQlRuX7r5zEjn/H/+//pv/89//Lw+OP1pOqhdvtNdivsyX8/H5+saVMj1S0rU7/vgCO+IYwKfPTv78j/+1yfHT3f11p2spqjjyPKxjhp2FVVlibLWCEEEAbNKJLsZHfzjDVH4QUMSNAcgpLRSDZHp6DoCL21HSGp6OT51w1tnF2Xme5iyKlCGyoha4qiwQF63Ed5C2uhutmZnNl5VUvI0xcFBKgJsg5GXmzk4qK1OAgU8xI/DseHZxMd7a2rz14gBRJJuC+4xibNad9OjB8cGz8aKs1McPPu71PaQbUNY2bWwtFA2dtYxgIG06nyadKEnaQgPu0XRVAejm09VcW9UsL924Hbci5kcWcyMMQAkNNuNuwDyPhO26rr0BBTpt8lzbphV7tVCZKNfXuTPIQ0prrZvK49RRv8w19yKIaggLjwQE8totoZUO86i3WxhOodjAa3VZtlrRqmwas/C5n+mCQOxHHT+IJK4REP1+tJyruoYUJx4pvV7nQuVeJ1pb312Mx0rJuq6sI6s0V8I9Ozr7zOsgcLnLSwEx8xMrQV00jnqh3yKmyeYzQ6E2GkD68O47P7J744UXdxaH71w8+ThKfKOVj3m5usiyGXC1k/W0rKKtnfZOoOpKayuk5cwDgODYEO5Jpclwo84nO731WpC8ceV0qRuBnCVWd1qd05OTbmuwv3VZBu2js7Nuu3d69MBKZ4GKWq3BoB+0wqjb9XioSuEg5LEvlVzMF/fvLjBfCyK8yEZNVSBnB/3W/rBlLDsba6dpjIPt6xvO88Z52W3zyuTDEPoMNtlqPjuDDLa6bRRQz0+AI6zSOx008Fk7nE/Lany2VFrFvj/cWMsMLrPFcrK8vH/ze5+cbvT0l1/eiLsJCn3TmPwEmFILKUsjnx5caBDM8inAnw7QtBH7167mi7LJy0VaYr/V3R1AHD158jT249H8uLfeHWxsGegjBXVVL8fTfifxwnarnUxHI1HlAcXdYVvXoAb13v7g/gePppN50disJhKZ5XxlpTUEF2UtoNm91uG89eSDBxY7VaXRWgKZ3r65VQE0Hs1iikOLnz54dun6TQvhcnWRLcCwva4reXx6ZqE5On6CnA3VXLmO8ml7c63TDQ7efszizvVXr50ejM/zZVnVnSHA3HR74drWelUnUuWD67dGZ6NJmR9MMh53L718dfP25fHd++lsolSaqlIGMfDh+WIZbmzSDoCOls5MVucegs28OPnog2uvP39w58PHd46++CNfUFXhIH3h9c8pRN9/9yNCvJ/+yue/+9b3taqLZT6q8xdeuiKns8rg/uat+Z2js9EFaZ595cVLkMJsdeFIMb1jtn/izyDPNJcTMR5x6er5Itna6G4k44N79XxObKNzGLU24jXOW/17h6XTNOrvz2epgxeWWr8VaeH6g74r6TLNk8FaO2WycePRSZ5feKE/n5bxWks34/PTRX+wyRkFLlhlOvD8+w+OqJ+QIHS2WlsLVS1HZ03c9tp9trxY1YJs9ftSpMgohByWhiDa63dRy4vavfnFGeUsAGCeZwZrUZUceR5P1vuRElnC/TRd+RGlPhHlTBgLnbUYSQAJ8dsR03IaJd1KC845BAA5IqQEoGHMQ0o5rYEhXhQrzYx1oqohshgw65xUShobRnRDguV4ymiQG/p0rMbLlMO0F/hBF0HfhGEoJJDzKp/PHbAYm3aHXszOqkI2IPD9CANW1zWBXFEebUZ/BBVJgKNBxwgBoTs8OEOQ9botDxvgEAqSWgNtkTKaIh2G3vnBNPBd1aBVXt1+8bnz4wOLEcRd6AV5kZejwjhoET49ke88FadNMByudwaubpp+NxrurN/58P5Mite/9GqEZ3JVWgMfPr5z+/rNssyaamkERzYJCXVK+0EgVO0xraTzuI+G/t5mTCPS7UcP3n/kJ0mY7M1m29Ko48dnxjifo43N7e/9wSeDNf6lL20tGjF+ch9oW02LftLxul3OulJk64N2o9LPfuGFjz5+G3vo+c2rpU51Wl7a724OBveeXPzkj/3o+9+9d+X2C7/74bcoQ23KR9nq0uamQ1Aa0g46X37zR3/713+LMzZ6dto0Whu2Wtnjp+fY98pp/sKtnTbWqJlEMMfYTfJZnPjMtCDs+n57PH23HSS9/ZuwPBhN7nsxF+UKWRx4kEFtOMpJkOm6N+hJWVGa1M4/u5jfXn85F/K5V784n7wNoamk7nTXFuM5bbdR0AG10s2otblWCjGaHLz84o8o54mmrEu5NihWq9LzkEO23W0Xxr744usP7z87PcnikJWNKzN87crrezGKYXRw/yG07ch7fjl77EQ2yedxpw9xx4+8g8d3bl65CuIYG9VI5ZyezatsVTSNG80qzu/+83/+H+5tDp+7vl3ntqLDElfXdm+fH72VTu4HAy6L2inmtXrIj54ui5U1WMMwCACdf+nNVx7fnT25e2+r0z+n3gsvfXGvlcDRSZ4+dc4SSpPBlaq0kFKrMp6EeS1XxaQdcmmpkzWkJur2g/iaczYIQJHOW72ov8VKyofrt4+W7p073w0QIJD1+v3J7ESaMltJ4vUZaz85WWGRW/Cpvm65ygjCvhfMFgtCsXK5cygvSmUgoggTRDnDiAihOfV9ylfpHCHY7nWV0pTx5arwQ0oZyfMqCEItVZEXjFHnoLHai0LfT1RVKGk9n1dNBiCOIp8Yj1AShQFwoKlEVesgoAjZwOeh55eiDuIQcWoh0hoZYBj3OWVCaIRtwBFntK41xA5TFHuRA3A0m6x3ur1eIhsFMYrDUDbGGmUMEMY1TelhZgHQziLoaW0wpQYbC6B2hnq8akQQxsoYLQ2EoFaVBY4zEqEYYJrVFUIAUw8hgoFDkDRKaOgwxRhCY03g+dZBpYyzxvcYss4BBxFACGmrrbYIQYgg4QxhBJSLeehRCjBQsg6jqBGSIEIZt85ggvMqp9gjiBR1GcchlA4BhyiliNRVTijFmACMPQgq2fCAOeMcBITRvFKMAgotpcg5AxGQdYOgr20BEaAeI5ABBCnzimyhjbLQGWcQIoQzVVdWGc58AwDj3ChSC2UhdFZprbXRGBrnHEFEWagbZVWlpUIYiVpoi52xeVX5nMZRYrRBkkgpGGc+5Y2oMaVh3NESaCEcgbWoGbBpmbfihIdBVVZAAmeRNWBepcYZQjEksKhqDKEQepVmkACrfK2MhQ4SBBBADiIIlVNFKqUQQhkWhYQQ51xRlNqaUlTn00mjDGesk1BrnDYGQFhleRyE3PMhhHlefno8gOK5r16x2IYtOL4YI2cx8ikI4n4HIebKssqXsQf8XnK0KFCUMOZBbRBHkNbZ5KJSsB30JpPyOt+TNgUlrZfp4ZORv92hLQLiJhd5A2HS2sgNrO2GU8F8dJ5gwCLCmS8qgAHHBFlZGldACnnQajNWrOY0ckMkP/pXvxcNkjf/xlcyEdQWOA9jQilExhoEobPWOUAwUUZDBJBzzlXOFQAKBxBwkiPYAAeh5QlVosZBAJAxDgIIq8q0AWZ6icwUsQp4XFRC5ypb5XqWVRduZ/02EK3pqlw+O/nmN//g81+8LoQjBPa21r77wdPcR198/SZCbn1zJy9rnK7S6vTJs6Z3eWeSu8vDPtnfePa7h4j7q5PV4bnc3A3T96rvPPjkYuQWhc0K12n7qbBe8nztr/W9Csb65ht7YIDPFwzH0AGwyuDjc8ln8Mt/8q98cP7JMjuAQrV68XsPj16/tX56cDFduWf68KVb10xtNrf2wuGGkmg6HXcuJVs7/eQ34/nROOlR0oucliSgk5UQihDQ/ebRqpJonssXN3f8ssmX6c5Wl4dO1jVCqJYlJgYT7BFaQGQI+5//p3//b/yv/pfonW/81Zc2nrzzTkDOnjw+h4wvJ0dRBRCSs/Pp5vpnz+dVzUYTsdyKfaD4/MKzzYDSYrRqFt/+lX4S7d18cZL5kLBsVVa2uP35N3c2O7/4z/4fR4cff/FPXT1dfI8NjLcRtwfJ3/xffPGT95788i/c83BPGVRpqWqrhDXYtNo0jkCUNG0HkZPn48Vwq50iBzjY3olW8uhv/p/eWJwcVVnxc3/9z9x7/1k6X33pj136l//tv8E43rrRSUdqeXa+cTPQJe4M6R+2ikBVN0rU0CGtFIKIUGKEW03SKPCUUHUtgtAHztVaKgUgxtAZTjAMOcEoK3PisbYftMMkKyvnZK2FgyAJW8oqTIg19mQ8C/rJT/3sX31899350f1Bt13nEmJKOZO1Ypgy3wdYaK2ldBt7m3c/flRkRRjEPAq1tVoYiqHnt7J0whAwUgDrKMG+H7tGc2atKYx1lNMg3ta8o1RNICmalZVV24sAABhBQqGUgmJoIXYAWasZo3nV1MAS7CttOMaM4uliEUVRXhfUER8BUdXEJ2HY+uKlK//u3/2bwaCNUZjmFaZBWVpCkDMuLbNyVYQeVhUQTkrTVFkGtOU4QDQO46QsR3VTA4gXAjPON/z2/Qd3b1y+Nc7r5XzezKp2O3AQKecfPTlc2+xf3dk5fPwUR5HfifL0rMnK1fE8SBLGvWLVOEP6wyEk7Or+1vzJo9GjD72Qb116NROkcnar5UZHJ2ESxt3QWtHbvLUoRdxK5hfHgLAG4tWqoA4stIXhhkN9U2Xl+BkildLFwYPHjFHG44j7HoB8sGFZ9+zpY2aWhMmkl8TteDivabvFPFBmFXakXlZBQ57efc/nxopVXQIHSdKOvH4XRtFqkUGjolbsoGt1BgAHlEMvwMjhyfmcQvbKay+0fDJ6dlIZ1rt1+cmTDw/vrP7Y9Zctb5SW4+MnN77wnLDW87qfvPdk9+ZW+8ZWg/DvffO33/jcZ4u0KoXG1NVCUg8mXb/Vb62tD9rd7jd/+9eSTn+yyM/OzpdZjRG+OJru7tyyJCOef/PmC8+e/oqC4PjRA44+fQGtUcI1UDU1psBQsMyWcdS1BhVVU6umNDpo75TqYDmerURhCXCU2ahfA+4lfjvCoigZwgaTeLAu7mVQ19OLZXc9bmZFPivjBBhURK1+GMS1LRpnd/auPL7/bl7IXGLk95U2oFFOK2s194PWRsdbb5GWt3f71nw2W47T3/6D97GBzuBWFFTlvIfX1rZ6HsXL6YxYbZwRyhrjEAAEQRaycjRNWv3hYAAhitsdwlhdGxp6CCBrgRcnGBNpHQv8fKmN5SBETUMpSXSTlfXSbxAHeJHVAIFGyW2C/HbM26yupOdHyhbAKmkNYKCz2akdqZHu7qxTW1Gjwm47rQrAWNAdEAsaBHvra8TAWhfQWuhAXpS2FrZwpqRWeVlTAmzCdqjcyg9dsUwDj8nA8yMnTLGsm+PDgz5VcRIOtvbLGmlDGe9QFi2XaUhMXedlaghiWsFVenH/vd/u99qzs4ccaiwbghH3yfHxeV4oR6Lj09P+5haP4rX+/unjR+0+4xxDDdMVyppSaSdL1WhJSYCRB4AgCHPPdxYwj2hXH18cJa220e7hw4e7V16OqD+ZncVRkIui19tc2xi0Oi2DHCbUaIMQwAgtV+rodPX2ByPJ+8/fvoldM8/kYlVSjHc2Bpe312rrYOSvrW1V6VLJAhCQ1FXMTF7BwaDvQTiZjI3Uw80dh7jFXDiGOSLOeZS0Yj/qdnZts+yX6UKvluVgsLWs5dFyNnl4/DSAL775yvHp5G5weOn6bcp86gW2EQ1WzaKZjkfOmpWuF41a/8PLs6zKj58eIMS3ru1pAepaPnh03lQmWNt+fHbcbccAyTgOL6YqSLp719Ymp2dr210cdTgLYTZ9+smT/cs7qypTuUYO2qBaHL+/c3n98UHlBf789DhdZmtJvFqNtZMAcOd3MSaD3U2j6/ZazDi/mOQd7hESpotprYvlMNtY75IIA0eKGt548aUf/OA7CAiE/dU81TXptJJ8fAFJJBoJSvN0fpIv87p0g/2bdMjWutHxnfdMGQ42oqqslczrrIiigBFKYV4sxlfWe+s3B3533e/vdl7vre1tiPnxb/zCv1mdzNfbkQTwybPxMtNrW3t+FI9Oxvv7m5uXt0Gjju8+ozi8tHX743fv7l/Z2b12PRufRa4c+NVn33wBKfsTb7zQjtm33/6gxHYxW9m00gY2SfLVv/xXzTe/5ecP7p9NalO/+aX94W5YfXD34tfOAJSRh+rc1qMmLxtjlMorBCrVLGnYcZTlZdOBXVHane1tmZ9u7ezW1VG+WiT9bouyT+5+cpAvsTWE+kHXv3H7hbN79wiyZWNZa93z/JODey+99rlmVqyqvDe4tGzY7u0XTg8+KISsLG1Us9vFHUy+decDa1GE/cnJQgnViWKhxGJcOgisgVbjojH7XlhX80JZT4RRK8maOUGwEk3dCCFxHABdZ75HOsNW0ubaNgDjJAlOxyup6mVWBL5HPbdYpq2E5VXDGeU+c9YVqup4HBMfAGCdAhAhCL2YMIghMA5DI7XRmlDoMHDYkcC1KN9Vrap0IQ9PZ1VRyJN5vYpUp4Jb13s09n2hXF37zEAE9/YH08WFMtZK0A4IKVNZKI+wuL3uMTqbLP8IKjo6Hn/mtVt2uRRCBl0iinpjoz87OIAoWFViOLCXL/WLfFY3QDsnLPEd5zQYrCVHxzOPDLZ7fWUcZB5Pumk+Hw7WDp4en5R5VsDNXvfZw+PI9yFCgtJCHHPsaSkvTtIX9gYPHx1eujT8Ez/+5if3Rh99dPdnvvb5fscK0JhE33z+1t3378RhPNzsPjtaUd9P/ARZWBfZ9HDle4lTTbVUSUgbQ248tyWBCal57/0HrT7e3GKjswPcDuJ4fZiY+Sx3xuuh3nQqzi4O4f6mBu4MgOFePDkbpasl4cQJvsqwsiVivcePx8bJTx78YH09IZQMIt82qslXk0K2Nq6Ml4dvf/iB7zECSafTmmeVHw///e/8noLw53/my31fLcrZfLREbmGhwQFotW9oHcgaaF3Fva3h4NbThx/53UtdD3HMfNbrBO2q0BKa5WJpPTtdVQcz8cJXf8bIizJPK8L39q5u7/T/s//8P/lbf/MGYbaBsK7UYpQC4DE/oVEgq9MsGxNqqlz4Gk9O7sKw39263LFXmuasKYrlQgKrUaXC/o2jkxq7XlMEcXutvz7c9IYnq2nZnE1p6W0PQOrOjxceSQhzgAbOtayV8/ni6tUfJax/cva07SfO1QBZTZXfa09GzZOn8/0rMeRkvijyTFeF4+uJx8t09DYS8zB2ccvUGJwdrlQ1DzbWSsoG65+ZX4yA0XmWj2fporQ/81P/8SqbfufR8bwus+l3y+UU0BoBAS1tEK4l4uHa1ctvnhyPhl0SJXOoz127dX781POI73es61m7Ikh0Bl2t8mJSr0edanYmU9iLPR9EplJzfWGEQB7t9DuLepA3CENWVwT+oePdGZOlRdMIAyFjWCtnncMUWWuoYcaAumy8TifmXsC5UpXnexR7edFwzutKamW00ULKoqytcsBajEkSJVJr7IyFwKjKGd3vbBIGHcLSOqFVZZpB0HPS+L7HE94KQmA1ptgaUWQFRTiIfez7TVUjQkI/kE0jtUIUQwohoswLHdBSS2UdAkCqphPH0KCy0EYBpU2CcFULCHHd1AYDQokByAEd+oESUgpDKVFCa6UQwIxxBBECwDhYNRJB4AcegNpiwzCDwGKMMEbGWOUshFAaqY1GhDhnGeFWK6MlBMhZp52xFmutjHUcYqcNwdQ5DRHQSjvkuOdL2RCKGlVx6hPOlNPaKICItko1krOAE6SUNVZFic8pAg4ZRJQUymgHgVCKIKydBpBGkQ8AsBDUQkqpa+cGw/W1IRidj5arJol50oqrssEGWeAQxggBq60UzjpHiWeAox7HxgEnCIHQQgdxVdYsIsoaBxGhTElltHXOIQgQglEUaAMbVStgWt2okTWnuKw1C7mUWqsf4jmjrSGUOeuEVsJIZy1jXOoy4L7nsdICJV3IQidB4WrorNOgVLVWkhAGscEY101d1cJaK4QqZe0qVUuJnMMYOgScQUVeMk4cwFUlrLJJK2ScEEykaJRsGqXmixWEKAx56PuUIkpoXRtlHMQkjELGiLWu+UO5e7XM+11mVT05WXRbvc2t9vRAWt2FDDdWcBqAWP/MX/yxzlbv/PzEi2JtnAFaIAcAEgzzuCslvHnzBfhxlh8USuPphajx6ecud3K9PD9KjeWXr38pal+rQdvHMaaoUB/m8n6WjwHCjMbqh5Z00UitmO8hwp3BTQ239y7b++bx8dHqYvGb//g3d7/0xvZLV0WjOPt0j6a1QQBBBJ1zyFkCOXRWypkDAjnjIFCmgM4HmrrGUA7qsgIEKB/VVRMGUcdry8WDkTiptMKo1xv2ytmqcMJpP+qsn//g6dmTVVaPV83y5//jV9/87O7awCsv5ot0nOz521/9yfKkfHx6BJpp0BlgTbL5eK5X09R3h7IpwO8ffHRpkb705Vew9D/6nQ9PjtON9c+c33nn9ETVjmZpnrRa7TV+MZo++vj8pT//M++/8wtnj34laOWrmf9oUcavPecN++1G3rjNynv63d99787bj4OYOaKfnU3iFjzJi+5mO9joHmemsLYd9y4Wy/2N9aIqHp+JniEHH33PErJ1eVunFcZg3tS2kAQGDoTt/Rd/462ve1gDj/zO/bOX9q+9/sKtQW9QT5/puigaahAwjZXICC37u1uZRd9+78PnXv7q5U2lVnfXt2ebW5ufwG6w1bn04ouj93+5UsVsZCs9rqZjfYHXdzYinxVZq7+5243XcjVZMW3SuZOpWLWxbOdQ9Pf3ssKVSsBk76/93X90/9G/+Oj4o6q1TIZrfNhbLhdJF/31v/NnHn70Dx4frAhoYwGSiFgHDMT9Naig/bv/u7/z8QePPv7w3t/7yz/yj//1rwX9SBSQJNVP/dRlBQ8kyKO17tvv/L5D/lk9+v/+s++LVdPd8IqyfHY0Mw13Gi1HxST91E8BIfQYdUYSxhGhUkhgDCGYEGyVJgQFfkAxFVpCR5QSQpnE86wDy6pux5GrZFNlFQEKIkSoA6QbthpjLAYAQWFdwH1ivFeef/O7b/9WuZhE1i1WuQFOioIJFrKgKUvrLAQ/VEVD6ZqT1WTLNabOgna3kQoCBoyzBgZ+KMWchVwrS3gPEK8bduezx0bXXhDORNnyOrLMsJVWNchZSnyjlTGNVo3vhYQC5QyhPsK0yCpLAYEkIZRCIpVURgNrAs5ElTGEdjdunc0eUd93GK+yoqrk5z7/elGvmkJQRmnM1gbbp6fHfuC3ulyCBWUe5SFU0iib+An3w9pgguiqyLRxDCGMyPNXbsl8VVzMN73Lk6m58crro8OLsqkoN44S6g0Z9WVVnY/HQZggQo8++VgIHfo85CyIu47BpNu9fPnyRx/ca+eaM9LUS4DYt//9t/7S33hhc2vn8fl0fjHhKj5/PNrd3cln463eHsgaq+t+SPLlSpew32mvVsvL168V81F2+AeqSJuQ9Ad9kKyNjud+LxGVUtbTol7bXz+cjPvcY84ihHTpxlU92Nr3W77KVk7ouB23nJqcHTgxQ36Lx626NJRxxOPh3vPTfBlRJoQkxGEMyrwWZTFLKwzQTrxl60UQ6TAJraqwj9NCfu+933r11ZvzqW5y3V/vj8cpipJn5+8M1wLd6MXxauXpL/r8re986/xsfvVWR0jrc1JXWTFetMMAOnxxNOvESdRKyqb8/Gd+bHwy8oIo4nmSdBDhT48PeOQSPz46era3vb9z4zPFxbMyzQB4CwDQ6kTzaWq044A7h6PWQEmFvZAhD2rSlCaKPIWMcZIlxBnUVM1qNuu2ONSOcaB1PpmJChg/0f2dNUwqPdGi8Ry3lbOwKqkHhS7nqzMrIaTmwbN3JBQA4rX1ja29ngOzdm8zX1Syzh0yEHGIaEhhL44GcQiu86pBj5+dYquskUELluWZh6MQMdppQ2BYGFiLPI8DI60ExonNrSHEMYCQ+56xwApjrQVWOYe0MRhBTqiRQgoZt9tNBaUxa90btvDm8+9QRow1mPJ5Od3c2q+L+vjgnJBZd2tDS3hwOm712hBZRmDUHiitIsLTorKu9sLA1NgYoyoglMxgmbRiliSEc1PWTkgMIONcCIGcY4yurW3wkI2ns6123NSLg8Njj7Uh5Yx5Ji2lKomQSMjp03fXX7iCPQ9AQz2vzHB/40pWN4FPxqePAJLdfqeuXDRsBy6YnZ+m5ydNOcHIzGerTrvNOAUaVLlUEjAcqlSScpkBb2f7ei5WVtfIGM+jCnJXOeTAeDqL45BbxrkLIh5FXYTMarEaLdOskccTs7Gxdml37/ff+3Yt6+du3EqCsJ10ut2e53PMsSyFakrGPKuaVV09fjw9OMuE4i6KxnUGmqyoawjlpd1+HNHMGN5t8d5usLNu0kRnS2pWw3V0fnh3betq0upao9PlDEiJAsA9Jg2SUjlVBpxDBhthpLKU0d3re9KFG53BZPxkbWvz9Jn3ZDT9+ODJu7/3nas3n39w774XtPcvXRdaQsqkplF3iGCzyNJm1RDM6/rTtUGnF0+OT5PWIJ1cAI2J5+1d3Tx6egaMmhxdeJubJPZ5wi/ffi09fLxcnHXW/SrP5EoBL+pvX/vM4DPtwZpWRdJDvmuePbxTVrg+m7bCIMEZbKQQKooDjTqqqZEl9WrCujFLSLUy7Z4fJL4oMyyWu+tBJ7jy4TsPEUTQ4aOno85gEJDo7v0jP2hTUq3yIgqp5zkvZMtp6Q8ibeB0tJBMaqI9XRw9/MTU9fal6NJ+7+73Hl174aVl47qtyFp959Gpl7smn/7ln/r573/nmxEFIptrGiAcnR3NPchffv5z49mFqtImz6ykqUltdRjEk+0rG9noKbQ4Xxbbyf4bP/ajP3jrYPfVN+rxTIkVtuXibLI8Xc1H03RVtDtdW8If/9Nf+IVf/k49q5QQscRhyyzFUbLZDptrvFndvrGbVSD9cOKFnsiXwCqVK4y8pM2krU0uivMx6zCvG6xfvfrw7kMO/Hy2DJLedDluB2R8sVikjRe3imUxfXQos3klRLqo80wFh4sX3/hMq8fbgbyYpecn0+He9V5rN5sU08np2qW9s/MDV5PCcwI0PEx8v5UC11/v/uY3ft0i9vyN53da8Gi8KBZNnc6IT6wBCHmLSU0I7m/01rajspKkIU+PT2aVhzBFmElpAHGdOALE+a1WnhV5nQ5i1vL788WiTs/bEQk8+tK13aWgNB9zaBhCVsu6yjj2GaEhB8gJj3KHiJIKYIQ5Ek4iiAjGwFpgiQMEIeogwkg4oyG23S4ImUuliEFTIRD4WCGblyWG64FrmrJw1oYc1407ODnr9YMo6tTpCFujazsbz4tacUwU4TEnfwQV/eN/8s7z35h85c2rw52wHDUeteOTSRDGAPKNrWvn54/HFyfT2ejmjRtNIzbXN6Emaam1tYPBwNSwkgpxUq/GXhDGYfLs4eyt752vMldUqDEXnRZXWiPCptNRtxsHHO1c6iwKdWnvSm/Y+8G7Hy4enE6Wxdb1raAVA19QJOtVc76oFI0bqerzJSZe0tkQdXXwydPhYDPPVoxHm9uXTk4e+wbZ2tDQm2ZZVqerSgCM481d6GZr2/1yog7uHVx64ecIX5PI27rR7l25Zpo6z8onj+Y+SVrecto0169dOn16bJzd39/95M7Z4fkUCuEhHylsrLLUIcctYKXQnR5pcY5U1etG01GlMukM/Ojdt+tlevmFz5FgtygeYlj314dHp6e723tpNmXQCzxqXVGVDfOwI2hta0s2y/bWraJuLNLSmUo2jdV1WcLK1JPy+t6VyfHDdkRsXX/+c6+ev/f1X/7+wx//yufPx3c9qhQQndZep/v8yclTq5umnJls0o39T+69m3S2SRgWRRUHqhLz5TQLAks5McglSVeWVZ4XQetyd3ezt7k/nc8ZTdPFR8vxgYd0b2d7tpTD9sCzKi1HwMqQslF+JnKbhF1R1z6H6+vXoU4ODj+IOjBZ7967c7cU9HyZotPJ9qWtOKGyLGopQ1QANF9MxhBz5ntQAKgEhUvSYJN5UwWtkBElQC1iCo8O7i5S+50P/1k76UVoKed3Ka1ZYCEFqhFlNZOOi5xxHJ2enEIaWQu9sLUcHzEC+mubYRgg2K4t84ONMptiSBjvNoJpk1Vl0Q6TyGeggQZ6yIfQlaYSgIBqpSfnC4IJj7wb1/Z/mILAD622hJFGSYig04oSHEVBI5SUxmgQxR4wEEOQLuftJMKcaOXacaSlWq1yoRTCiBISBB6GEFgc+oHWxhiTxL410Fjb6nVGi1RqG/gBEMpCksQdHxMp65BRYa00sChrAEySBBTjHxp8oNUYI2OM1spo7Zz1Ag9SUgoFrEPAaFlThJbLjPncWKu1AM4rygpgBCtLCDHKKmOCMLLalbIOGLcWGAuFNhRBBCGBGABsrNXWWiG1ccooRpg2EEBitLVGKy09TgGGAEEhBUIQQGgdaMoKIaSgggASjKwxjGCGCaOecxAaC6BrGimNwRghgITSAGGVl41UzllKCGgUQkhroaShDFBMOGMAgkZqADAAUCkDnKmqBiFQ1AUnvjXW45wSFLIgzSoInHYOOEcQIh6XdeNkUzda1DV0yFlTiRoTyDCVWmGACGSNaoSUhNjKNNI6SplSykGkjSMElWVlrJVSUkQ5xpjwpWwa1bTjiDqMIYAQiqaEwPo+y4os9BknTBJgnbPOKmWWqxxCpB2AwBCIHTKccmchsDYJQ+iAVBJA4HtU1FWjm4gGEBEFpIXOj8NFugoDTghvmvKHaEw65RBQwBohnFLWaqEUZYx7zBpZV9ZBSDCihEHgptN5EHuEE1lWjPCgGxoAfY8hCJaLzFrgexwTjAl1wCEMyae/AkA9fHq2UrBu6pz3m9/7vce+3Prqz3ytNuPj6WO2ExpKDULz5dRAWhXKGoChsrZS0Ho0FAKHgO6x+OzkUWIQBqjbiRaL4vG9xzzWl2/diOLNUY7jXtSNO76V04s7n9z5t1de7bXbJq+WDYxZZ6NIZRAyn4R5JVUuKiWxxs3Hy7f/h3vd1nDrJrtIp+/86u/UOtu4fl1x4xkQeB4G0ELEEFNGIYSssw5YDS1ADDhobeMQ14Baiznk2BSBBxGmAALKIAQIAGUDZyCmjuvGzc8fynkTmCCJbvzav/ju8rSBCDpiPvPlzcE+1UCcTso6S2triqXsd51DbrC1jgw7XSyYxYYxA8gbb17hho8JillYTh9eG147eWfyyqX1LqYPfu9gOaPPX30D+PC3fueboBTpkYgYO33w7tl//5/Mlo9RPTcF2Vq//fHHxw+efn82Wn32c6/zdb84LA4+WiLdaRqdiazd9fZ2oqpu1vtry0W8FgRE05OLi34vvPf0TsT7+5f3WnH21rfeK8oCN3Vo5KJIF8vC87xW6Pcvree+S9b6+XyELLYAj7X1N3ZPz2dx2Btc2WkOprasmmJJI+qgi4ekaQxA8z/5x99E/Nlv/s5/u3NzZz/6cq+/vtXZPXjw8PEPPnECbl/f85P+Jw9+E9XtSuvg2tZFRrd5EEKuyrpqhDARafLvff17e5dvsEusWIxkyUU8pIw1FLrO5e9/8u1aidutBFSdyfm9MrKDivy9//rPfeNXPvp3//rgK69+ZjZ5IsrFxm700htxWthHd37xWx9lfocmL69+OhzcfuP2P/kvvs8RrpYzJOLl2dne1ZBobKDtJHxx4RuPrqSS85q02uPj5sOPZkCiVz/33A8PyVobZyz3QogQwaiucgIhIMAiVDUNd4xShgDzfSa1ggAFlECAIfLX1zvlYqxE3kq8LFsFYegxVmYpQAgxLETZNBJhrgKz1m0/ufcDW87Xk3g1XTlrmEeoZQThrCw4o7UQVjukFQRAlnB90EeEaWXqulHGWG2htVWTN2Ua+IgggjjW2lErl+KYICCdKcsMkxgBFhMLjc7z0vcpJkzkGaYQGgywdtABqIqy1g74JCAACi08hgFUUcARhkIKjAD1fSHEeHXKPJ54rVWZYY60kQ6AMIrKquZ+tFwspLbtTlzMVw1S7YgvxjNVNXtr646FIjPMZxCx0WjsB3BjuLaazLb6fVQsx6ene2vhrIClw4ZH84vl2WjU3rxUFPnWHlORT2PfZ6SaFVk6ww5oqUAcvfL55x89O+utdRutL0bjjWGHGDMaj/PFbP/G5T/+Z39mVi39lb9zab16usia6tata1me7V67kqfTcjEZHV5s7V9driQhvDR4uNbOpwfV+AhUImr1dm4+Pz85Bxb22jtRyAop6gYPhsnh0RODmAlbc9Fc2xxWhTC1cMoBnDSaAK1h2RBTPrz33tUb16fz7NqtS00modcqaXIwOscOYSJVVVeqSlq9dieuZB6oRDTqe3c/uLKxUcxGqzxr95KNva0X1l7KSfdzP/mTFw/eeevrv/4nfvZnPM7ufvub9U33xR99c1GN1rd7WomNrf7j99/Zv7wf9IfUo01dQ+Sifkc0eRAFZ7OLazuXju8/dY1LT45lngeR3d3rjS4OX/nc8w/OzvYuXYGFGJ2c/qW//Kf//W9//ye+/Ppy+qmfospLU6nxxUxr0DRifWfLWcl8QD1myrKaXBRzxLDLMyMasNYbnq5OAg6BUdl0mRvgcSq0rmXTaZWtnslmddJiR+dT1wloHMVJH7paKyXKVZ5XcTf2UOC3+pXMqmqqm14UeVVVSaNqVXJEEbacoV7AhcgQwISGDsCtzV5T5ouT3DqNKRK5AahBPOh2egJZVRvTGGxtXYu8Lvwo8pidz+btzg0AgDaGeQwTLmpLPGu1UlpFcVTURcBQMdcobnc629NidHx6cmW/B2RlCNjY3EzrXAFKNbHG6LQRUhvmKchbAaMEaYi1g6Y2FFigtRZGNRYgF7VjXJmqqNrdpJ34nAJJLMLUWieNqYVyFjJGuB8Kk7VbfaXTqnaB33aWhkkLGY2BCQmyGOxsrA83B9CLLfaWyxpSiClWTgBUj8aL2XLRavlAN0F7iD1fKcIYtrrBlCKoAULAQWudNRgiwjhH0DEIgNU1QF9+4ys/+L1fwVhDhDAPVkVWroq6kdrZxgEHwdpwq6qWVZpaJa3TUEsGnIN6maa1qs4vjnrdDQ/z3qDjIASO1I0yWiGiiQdUo1QjDp6cvffx6UUpLKbrgxhicj7KxbJZw+JPvLSbirIA+NLNW1PBs0Z1+uuukkYthBbDjV1LQwWwqoQzkBEPOeIFLaeMU9I5SlngsbYnsF1oyqDvw+liXmZV4GEpst29rfX9nXYrPB2d1ePjRsAf/N636JdId3ODQjHsR2laES8MKez5ipfK1c2nAzQJkk5nMp573AfGQopuDK/KbtQUBnRCxOyqqNdaVFfp+elJt1132q3Z8VkY8sVyura7pxSDxq13O5NPDj66f+eNH/1yU+ZJCz15fJxVqxaNEEOns0nkE4IcVLVNZ8YBjnml4cOPT5I4MZVtbcBOj7HQfOVrtwMMVxcLSCnHMInDVdkQ5BFs1bLcv7r96MEBCjBI2t9/5ztFVhM5y5p6Y9it5pkuzaAbqyLFLNAosMn+9dc6x997K1+O1jbaK2sH4c533/p90Zw7613bv6UwOT09HVy/RiHUyETAKR89/8rueDzjyBXl5OhoKXSphPW5J6TS1fIH3/gdRQabg6tPnh09e/b49uevv/ojL54+O1pOi/2r605URVY+vlsj66wG0iHlI8icWlzc3ukoOej3np+N54CC1no4PzmTi7S/1aY8LGaz7GLqEDdK5cuMyKADu4f359Wkigak20nSpigq0eM9A0hvOEjLzEvCsCDnx+lgf/f2yxtvfevDupCnx+fXbw4PH31oDb370SNMk47veT5rJ4mUAjmaj87r7MwYFHsx5aoNzb0PfmBts3b16q03XqlGz9Knx4WugIbAAOoTqI1zJgg87sF8MS/LcjnPp1Uaku5mp900K6v13tZgshJaydU0NUoTFmBEjXVamaaQfpsBCIBvexEqSqAAmSxr4xSnFOYOQxMlEXIWqQAiIISEjQSRbyF0wlgKnNPQIeesMMYBa42FDlBE4xARAJdToVStFQwoFkYR4k9GE1dz66yycHtt++zslGJtapOrhSOqqBpRScixrXS+TDf3u3En+COoiMr2/fv1vUdv72+xL7yx+6UvXzWqXBaNrLJcPdJ1PbvIkiReLorBoLtcTFnQwh7FtXCysgZkZbqzvRHHwIDmO999+smjmVA+ojHgAjMqG6WdgVCHETVGZalSWhOE7t1/66tf+5FnT3xCIIbqZ/7sl8dPRuN5trfRv3apXziwv7e1GE3LOg28LlUmX82iDkgG0o9tntZxq5c9XBoR9vp9yEyVaRp4n39hPYz6RZUFAQoZATR87souCgY8bjXCKgop8ijh1G9DxKUqyuzYltrkqXFNzLzJxXlAQSoWw812MVJEujRf9uLN81HajdmVjYF2ZrKYdeNLAQu8oGoN43wlyyw3SvzoV75koHamTEJYaNQOWpzYKGCL2QgIn1IQtVqiKRjzV1Xj6nHaH2II66oCQBNneQiTFj95tLj74eLFl7f2fGRkQ5T7l3//H10Zxigc9m7dNj4wjdG5qs0MoJFCjmPmO3uaH4+mJaZhXVulCuuoAefYz5xFs2nuCOoNuzagQrKdvRcDwlaLu1oeXUzO1pIQ6VXLswTD9Pwc+4mUS1lNs2JhgFvffE7nRWvYAcY7evQAoQjqJi3Ctb3LdT0qZ3Mi7EYnugNdHPco4atM+kDFSRBQVWqDLSGElYXRkkMUx0HDgBTjA4ACYHQFLcDlKlsx7jPIm+njaXrAPR8Krp31vEgChKjX7Qy1qGIvKkth7cKh3NUjperBNmxH7Nmj8yiIkqhtG+h7MYC+0SCXhcZQEdvZaDd10YpIqZ2zBhoCbdzefOXw4f3JosGOFnUBOUfs0xQwz2tRLEWtVjavCk49AJw2zgKbJBGhzOMehEgKi21QpspYixnC2MeMcZ/5SdjUdd1IrWWQ9CjxgNNNXQPLpKC1KD3OLEBJ1MqKrGhEY2XHjwjCABvu81pIbQxBjBNPGxn6PkaoriQHziKrhdbGeCRSUilnfIQ4+aESCTBGGY6LUjASKKMBMMgpY7EXh42SBiBrXF03hDHRSIwgYZ6l/mI1Z4BCgKFzHvcW5YpZBq2ljBshEcKhRwGwTVNDDCLPd0YSBAEkEGAIIMaEU1qL0llgnVONgtYxSiD9YfEIQGick5wh0WhGWakaCDBCyFpLGP2hiphzUjUFIb4U0jpFOSGYWAMQIBCComkchEHgOQswBqt8BbWDCBASQIcQRFIbBIlVBkBojEGUlEWOCWUI+QSZuszzIg4TKaQ1GmoCETLQAIScdbUutREAE8x8oHAjFYAIUW4xEao2DlqAAMFZLWKfW2MsaAhEzAucMQADY4wxJgwxMqDRthXGaVYWoHAQQugoIdpY45xRBlLkEQwsMNr6vuesaxplDEBAWwCt0No6IaU2thEK6kZb7UcBsDb2AgRBni0aIS0AwqiiKLXSGMEw8hQCWS6KqtZZGcRB5HuezxGl3TAoynK5yDDBUprVIiOERCEKfC8ralnXWlsAIaEkDDzOqVaiabQAQIpP+xRKN+cn6daVloD68dn86f0ITfN//z/+H/+zf/h333juCx8dvFWQcqXH2qW9/mWrgbSW2hy7BiCKsYe0DPmwfnwC58vlbBEHcVnVbI3tX9uXNI8GUa1cZ2+wd7337OP37nz4nTo7fOXL+wI2Tz966rizlDDouAuUgIpGECA/AJ1uCAT49r96G4MQhfqlH7uSfWO6g7zZe5/M3v3k1puvJC+9CAUgnm+ts1YBB6DDzmkADFAGUKqNoYQDzI1FCGmE6ejx414v8teTpqkocsTVVTbnPkS06wy2Vto624p2zt8Tv/6NP1jNBLaGe6C3B376P7oxK7I0K2jMvWE3DOL5PFvM73d7nZ1Ltx59MvWgckSuxARgcXT03no3xlQA26wxPv/kyezdZtC6/eQ7FxcNGl6Jfv5v/zwNNt755DtrIQGkuvqZ3Suf38090d1ea/Jqa9B+7+QYz0fX8HpcU36SZ2cjPwnW1/hnv/Bn/w//5T9ECS4h9MJIV2i+FAbTV7/6U8uDd9Ny3toc9LY7kzvT2ePzj6cHO73rjyvwtR9/6Qe/9fvVshhEHRpxB0ADsXTN53/0tUcffTRZTK7tbS1XcjQab/c9yelxpfgw8UtPutxb4y2KHp+ctDZu3vrsV9PR7GI0rbJF4u188L2PN1s7uhH7W4NP5BJBW80rVc09Hr3xpT8zOpoUDet24j71Hr5/L53UF0ItTPWn3txJz0bvvf39n3r+8sHR416yM/3gmynge699/uGDo2xRXbrMmBP7N/ays/eUsuPVjOq6t07+5J+78b/52397cf74F/5//x8Ci41W40Ta7USXo7izFkpYi3Q5+/DRqxttEsQeWiQhavWwKFOCjOOQKK1z5bfZ/i04O8iI7VZpVJzTPG+a238ITCnBCGkDtVIQYc+LoTPGOYpgEHDoEIBQO2u1tc60w0iISgqBMIuCpE/WxicaU+oHIM8KiD2PsEY2YRiZpmyU7oZ9q5CV1guoVUxWjYWAEwoshoA4DQlmjDPoQKNLP/B4yN75wQedsOczDLGzSmBEQr8lqlQ4sd3fXaUXdaEBksRvGaU4gxhjqJ1TKOQtaBvRpErWACmjjHOaMgwA9LyW1kJVNaceNNojflMtaqsNtJj1lbCe72PChFCUIACRNq5aLn1Oz0aLuN/VylWZhE6vb/VsV7SSANeiWM3J1rCDaDadY4CgmfuUrqYTJW2/1y+KXGjXDjzRiKYEvU67FFlg/YB600I+PRgN9q7Nju9v7693B53VYuxhtDp6GMQd0AoNRZmQtN0abF6Zni0vLiaPzi8sxWXRYMqxtmvtdrHMum1/+8r1ukbQ2snDJ95oGQXdRhqrwWq+5K0g6kSn0yPq07XWlqpqjrXfQlSDFtPPDu4xyKNB2zheFDkkyPN5f7j+4O7dp2eTuoHU7FFK2u2ot7FdnwpppFQVQpAzItLMApDEoXYuXZy+9vrnpquKIigLW+TC77evvvTaex+OI2KjiJ4tMgTUdDrxAmoQvLT/ytNH7+/2O9uDbubrk/FiNNLhaLECrZ/6C3/+0ccP9bLZ34jLxZT1Wq984Zqzozt3nsBCF+n8+vXLBx+92/KY32ljlS3GTZVmUdeDGDRFXutCAn3pxRsHb791ZXcrz/L5NGt08fTJ0cb2oKjIwaPjCKkYsL3Ntcnq4sr1S5N0fP7s+IcpOD2cc8Z7a8PTi0ljzXSa+gFRtg4cLJYZZcRKPS6rrBSgobPJfGN9vd2JOEUybZSm1vhC1pTGp09HRbminpfWBQaQ835RizTLCLceonVTxaEHrFmkIjQUEb/b2SS0V5RFUwnkMHdrHvCVaVxVAi2dElUhaK1p4G12ec09pPvLvAgDr1aiO+xVpZFNJRA2WjPCy6q2xgADdS0coRsbbQpSBkLjrFPih2tsjCGjDANYFyXFTgg32L5kAi/y/EN5OlhPPD9AEBVCchRsbV16enQwXOsDChvIMAZRQp0xUX9AnKuUIhBEAZYqo8Bi7OWusE4DSClGtBW3+l2fI91UGELgjDZSqEopCREBkGhdE9/nTdzUK8oCB0RAqIXWAhFTVsJgheHzn//pxKsQXGBnGKkQNRC56cVMKMUMClCSr+ZUAspwvcpZux/328181uQSmNrHDFlc53UjDWGhETovp6TVppRSiuMofuUzN3//O79BoQ+YjyCCCPQ6CfNZ1shilZ5ZEsc4YFho1FtfC6h578MHpSYaVp2ktdbvv/nlz1pNJaCe51mlAoyQrLJsmjXm5Kwpa5kWZUOQJAYy7gVA1Au/4xJmbq4Nl8vRqmaQ8Go89eNhrgCCrW6QVJIzToFYGuxxP3ZFUWel0kI31FiPssinzEBjja6NbRphEbUQLpf5xtpeFFAhrDJ6Ml94EX3ptVvX0cvT87KcZXVWPnz/k5vOxC0jRO7HvlAESNDyMQO2Vu6HKRhsdIUBfq+NtBG1aar62f3j/k5baAAYor5nEHbQZPMLz0eIallU7Z5HUFNXYnx8dzbP4/7W3vBLH58dW+xtPP/Zo8lHq5OHW53gyq3N2biEGDyb5ZSH+aoarsUbuwNTmfNn0/PJIhnu6rKoqjKfWqA5g5ZypqwZbu9UZSpFftFMu8PNxUk2X6ZVVd9vxvMxJBYa2NDC2Kzp7W4O/ID11l27uHLtWpelv/HL/+7Wc9faLe/Rx9/Pm72gUaAur165uaz9o3vHvd3u+npEABKqTFdZp+MXs4skptl89uTBk+deu7K9vx61wm574BC6OJ2VmdKy7Gxt+UlnMV/sJoO8CO59+FSW1aXnXs9rLJa6E3bH59Pt/fUbe5e/853vU262efTW2cPWwKcIUKhHd58Nd9eRl6iNbufy+nQ8cx5qd9yySJu0WArJMM5rt3PtpfHRs1a7Nc/T+UJtbMVRspblF86s2huXQh05G1Q6lxhdv3b70d2303mxfeOSCaNgsNYd+meyUtqcn80wTxqlqRfV07m/1l/b7gTKG08XDhtDKoI7slQCZg56lCGCgqs3X995+fln5zO5KqLNIZFJsSrW+0E6mUVdb6JUXjd7/eFyNluk2gjTGnTb7VbE2fxiKRu5yESeFth5PPAox8j3CcM8pLTwKeRaGkTQ/CKt9ULKxkHkNAMAM0YYxMo4OkMcopi5duxzr8UxsAAA2mCkMYAOIgSBdsYa7ayVQhOECYZGAQRdL/GyrAbG+Mg/XkkcRiz2c5P7HBNHV3UTDzdjP5D1SorZxcUY+52IIJktbJ711ta5yBfj9I+govZaUhW1c/TwvHn8P947OK8/9/r2WhezXlCLRSeO6qrD4vYonRJmJqN6fasTd4MgipezKbIo8DtnJ9l8lj47XY1XJpNe3Eo49zkJ4jhkBt159MDzOaJEIQIZ1BCubbQP7j+OfXBtv48dLjdaF4enyKrnnr9+9PRMCrl+5bKtM2tEv9czEtz96H0OAkXoo+PFtd1WJNCjB08vXXoBAAcI39wdQHZ0dvB4c6N7eDgDPoLEXozmIeoQGh4fPFq/9pznJ1la+ZRihJxDo8kSmmndlJ4HVqtK5A2EdVY0HqEMVpe3N94/XVEv2L96SVqIiZLKVFrsrK8tsm5TFaerRRSztQE9OTv1esEXXnq5WNGnB6OXduPxbJ5bp0F8OFn57VgiPVnk3YGfUNiUTRT4/UGfUFI2qQKOJh0r1PT0EHjs3e+NlwWdqfhgnP/knxmu5md1xQ1j/evPrV+7cvzoPvJAGLc8z0uzrMLHfnfTNun4/GA2v0BhO6tI5MEgCgxARV1OL0ZSmv2r+3m+Kpps++oL/cFaNn6MPEvKMp2aK3s7ALnpNLeYUMCLIl/Ml9vbW9wZR3ASeGV67mnNVL6YpWtbuy++8iNf/6V/O9jdvHV7971vfdBUCy+kjx89sXXTZYS7mkG41k9Oz+aTqvEQ6Hb7AAFTlRpRjzASJACqZVkEHCrniiyHVlRCe4Hgvk8CrxZNtRinBdja2aiyNKs0Z6zvr+m6Uk76YRCHdufq3ifvHWxtrZ0cPygI0CI+OZwXLeiQLaGxBmiDras9ypGtKPSgzwohJSSTxXmvvTGd68XsQGcVsqabRKwCnYgvZ5+GoREKQWgdjZKWF0eEMi2ksTpkFAEX+cyomnJPypqTWAPIPD9p+bWUy6xBlBHGkdKeRzv+4Gw0biWOcdDpRFUu66Z2hHlxUtelsaARAiES+T6EyALj+8F8lVKMoihEhEloCeASIqg144wQ7CAUpubcE0ZBipkDxiiKfWf0D5X4lHpl3ggtBNbtduw5DB0cLRdhEMzzgmAY+35eFcwShljlhEPKagCgxhBJpYV1DiGhFMceRtRAVTdVGAYAAE64gxZiQBnFiAmttXPAAudsrXQltRAaIBd4fi0rzJjWyvcjgrGUUktFMOaMVY0AiADgiqr2OAcAUUwrUSLPo17QGOsgVNDywDO1oYA4B5SSzqggDiHUDqIszbRWwDrKcOB1lNLaNEpLTsO6ko3WDljkTMAD6CDGdq3bzbMV9z3jjAWuHUVGG6UlQcgB5BzECHA/KJpGGmGtpYRiTKxzWVFwjq0zmBBlFWGEUGSdUUo4ayHCQtbY485o4rgUJk+zRltIsTEOAcopgwhDCAjGBOPKVT7nHuGq0QA466B1JmtyQr3E9wMvyE1mgeE+owA6ZzFBHuJaG20MRkAJVYtGGe0w1tYqo62zBJK6UWVT1VJIqxph4ijGCHfbnaKq5stlmmYIQI+TqiiiMLTWKa2XeaWUJoQoIKlHIi/Q2ohGibrhoVeUNTSfGrsQsnv7XYWKG59hskwO78N1GsYU/Ov/9//11mu3dn90p8wX4cBfZKnTZW2g54WNgF4cZKWxwraTtQ7Z/t6/+oUNyXqDlta200me++nP1tSdTXMXqkHLnNz5nemdX0LSQs/52H/2MfN2X24lO8KeUzbtIJPORu3+etAJZscznU/EUf4r//BjpFoGYkHxx3dGSvsWIVeh2GsO33pb2WLz0rVg/XJjMYAWOEAwtRo6gBkhwhhtgTVOAQScgQgxRj1rgDJNpWrV0IAqKRUAonHWQCVrDxjPMFrvfvcbXzcS79/w4w549au7Jcku0lPEeLzdslFYHS1xVSGRr2+umbS88/Enjlk/JrnM6lhU8xWCYXau588qWIP8SYbkpX77uY3t5z/3gvvnX/9mO/He+e7v3j8++7Gff+7R7723udf9zI+88t3f/caLP/vqu+9f/O2/9vP//P/+322/evlr/+nPvfev3nnjlc9G62j+9OzenWnkDf7Zr/5bfxhmVYMEfPxo2fXZyGRZmkn+W0MMElSL5TweRo8PnrEkfvXm5TvvpWePz++QechJyyOTyeTa1q1W1z94CuvRWC9gYj3kdz1JEghms0XQ2xQ8We9E2fEhw7J1pXUwerKxOXjxx54brcLzyceqLHd220nwY83cUuCNlvc2dq84OZ0vjy/3hvVCNWpx/dLXAH5hMft+6MGEleePPzbljEL4xS+8XrcSUL/XveLt7u2cjO9cufGGlGCSnw6GLzXN/bUrzr+2+eThsS5n4PDb3cEahh1oPUP41svx7z76xR8c/VKb9+A239ts3fvg4WqmUjhvbYcaVncejLavbc1P0ngvzmt1nJatnLW3B+NDFO8+19tEn7z3rmXB/q34lTfp15+cjx+x731HYuxDYL/xu4/+MAUQQkCQAw4CoDFjjaiVs8RRjIizxkInrQYAQAfLprZGUIh1U1fZzEM0iaJ0mVkCMfHPx2k3dAi487OzJPIDSjphIq1rtCzSGiFUFZk1kPgcMq+pinRVEi8QFGLglBSQ2fHFUZmXl668lDeSUkQdskA7Y6QW2piKCEioNQZoQDGFSCkralEZoynmdVUkMJCyUVqHgQcsgNivVQatM1orB7utvclk1m73g7hvcmRkXleFBg57uBIlhVZbRyAomjJMgp7fmYzHNIkdD5Vsom5IqMuLqlyVVb5o6pr7UZXn0ACfc9lkYdDCWlfLC4/HyyKNoliLhnPEQ6LF6nyZOudaYWu8zHvD+As/8vJH7z4a1by77rf6G2cn43i9de3G8JOPTyOESYR7w56lsMwlAvbFV28fjS76a4M4YfNpXTdyMpo8ffz0ynObQANTaRSgW5+9fnYwWp4/pFHYvXIL6nQ+mytxbJQ2TeUzJhuRp2m7u+ZT/Zv/07++9ZkX24M9GsRl1ixXhaokTsUErMpK3Lx8pdXem1wcb+0OsZHZxZPIqvOjU8bCKBkoIYmHu/3WJ2//1suff83V6PxijKPW+WqkCW73dvHwueNxU2Yzv+3P502ShFrJunZCWwLZ0eld7Qqj6odHB5RawNxLt27Mjk5Hy/OtSy1m1h69c+f6rRu/8ZvfvfGFVywk589ma+trg71tjL3d526dPXl259Gzn/vzb+RVwzwQxtxo2e+0ju882L69l6UzZODb3/nOK6+9Ol4WT07OPv/mzVZ36HkozayF5Mrlnq4cYt4733qytrs/7OwU8lNURDmNBwPCOC6UlvLZ9Ojatf0rl/uHT8+8xKMMjafj4d4uzbrTk3kUcMTJ/5+q//zZLkuz+7CdTz53Dk9+3pwqV3VVV3WezDCSCIo0LEq0/cEiZMCG7A+GDQiEIRiEbcKWYZuyCBGkRNIUyQmcRPaE7p6eDtVdud56c3pyuvPJ5+zsD90jYP6IhWuv377WuojT4hIA4lV15XhdKZqKc2PgvJQhconnb7c6JW4POkznpxeri/7mJrTAcQiibDUpjNG9Yb817FZWx36Eq4xCg6z23XYuplYpaxWiJgiZAQqYsprn0NCWF0znmUJwLmtt5t24XUnud+J0vpzOSy9u06gVOl7dFFLpcrLUAQuj0PNJU0ihfQFJ2O4DgM7ms7XxphJa1sKJ/dgLTDOvF8+u3VzXjZ7Nsov5fG28dro4CaJIaoEQcygRTdkYuzboNVUZ+AGGCGKLKaXIxxAYSwzTeZ32W8Fmt316voTIaiEIcRAhvKyMAUpL5jjM8bUxDVdc6sW0ktzQVgsxJRqhZGONtABwKwdrvSByUbOq64IZC3CuaqO0LBZpo1AQDo0sXYe5LNSSQCtVVhiAoTWOQ3yvz0spEYUACF1gKIwR7ajrMDYe9Qtp/vB3/5t0fgIJVUYlk6lUmvq+hibwHUsBtMhYraFXa+B2h067B4huDy6aaVrrZmPrBnWtRzGk0AkpwZhLtZwlf/rH35WudFqj2cwqjDY2t4uL+wghgIgoLVaSGhsbvTWKwiAWc8O5zqfLftAyBubzc5QXQkufsmIhUKgYI4Zi6jKRV2kxA8yJLHD8HnVdLmplrUKegvLi9NjUaDrdDzzX9QPPo/3hWlFk+aIO1zr99d6NO33TFGW25LospzWy0vUxwBgiTawyxbIuf15M8ezpvm7MeDxM6ibs+JSQw5NpXpXtju+7raaS43GvKeers6WGCHmwqbjgtctgnhYIobWNvhfQolqk6hQTdvzii1oAjzEKLaWRVbVHQIRMtSprhbXbWpW2XqQXB4ki9PX3vr5398e+No+Pjl7auZoup7PZor82iny3rOYlL33fOXz+FEEwXZ21vajMhJQqy5ejUXvBE2Ca2y/dmVUkc3p5hg/m6YyfX7v5OjDcZZ4K4vWX3+N7nwRV/uzps7AzDCPs+DTsdRenU7CoEabRRpAfnl8cLgUvusMuB+xiVkSDnWTJ41Zr69q2Ak6aHh0+eeQ35eXt7bVru+VBSVwKO+HmtZtJVWYGDS5ttUkatZ2900MahZ7v9bX9yldfna3Ot69fv7S7tTidGIJanbbgvK6lMOriZG7ncwqQshgAbN1eb62/FDbe2HRlmpoy2N7wNy4nDxJe16pO/e6mDyjPj7CtbZFemDru+rJW2VKc7Z2N47W//Nfe++f/7b8iOl8bXZpf5Jvj9r1P799+9d0/+fB5tLsRIXB2NFFYRV2PeMrDJULy5stvPNp7ceuNW4EXCEQJA9GlS8cXq3y+UhRlSRowJ0mas1zFvvf08THG1mm1IURet4Op3T85XVac+FFSaakAlzyI21GHQgBllVaIEhc3Qq1WOVOQN7UwkLouQKhqais1RroV+K7nImviMOoEvpTy4DjzmI0i1u1jQoy2BmDrUASx0YJrgwh2LMAAuYgYWZfQQoeh0IU9r4tagPYDN2JQVIO2n62UEgYoxRE2uO12emO8vkoXWKUuLbpxaOuMryyK3b+Aigbj+OK4BAAZ7JiSff+nk8PT4le+sbN5GS8vpoOw8Vjghpvru9cvXpzcuHFjlawIRLPJrCiLVqszmeZ3Hy2TnEMFDWRe4BBM86QaXBv7cVgm+c6VNaXT5UrUpdDSOoRAotd31k6PL4pV2u8HYSfYWBvMDhezkyOe18h6y4NTz1GGV+PLw4Ojo14Pxb1ukYijaZIuS90sW2HMoOUcSo7me6tylbVbIairy5c6uS22Nvyne3MuRDqfdwY3OGfEJeNxv1zMjZJ1bX2vna9avhNefmXn+fPstVdePjh7Thzb63fOz46efZFYQWUpV6RIEz5u9xXGx4sVmyCqdCduF1bN0oqccs7h2fn59Ze+zIB8+5U3GHz+xYf3b7352uN7n0ipAHeYiepa6JhQp0U8k6UNsdgP/CzN0mXa3RrnNcTu4N7dsydPJA2Ig2id5If3HvtdgrrBrV+8U6TL4+Msavl+NKwbiTBgocPLGYAlNDDL5lprbAECUFXaCR0vZrOm3BqPZ8tsMUmDoNuLOh33crp82mn1eHV28/XXV6cTwyeGer0+ilv9k0mGGRrFfYpbRBsCy/nyVNQmWUjGiNvdnKwWT55+xpz46fnR5tKupnte2z0+O01qlYtq0Uy26WaZL45rTm2wWtoUcBqD2IHINqZpLHG14WmVaKsgNAAzwHxeAr/lGV0DZKSp6jJjDnLDSIoa1A3Utla0TM4cKl2v1eqh1eT83gdnF7PjxbyA1snLptPpQFjmTaa1DgJS5TlvsigIykalzZEMxgA6BYdOuLF+5+bZwWHa8BUvWiQKHffk9CQIiDGa1z83yRgjl7nWGAJxSH1lgSUUImyE0E0leGWViJy2GwdFXcatrlVcKSmVwBgiRIq8opAiAsuqRhpmq9wLMdS6qXXFhSFYWdFrd6o8C+KANwoiTAixtSmSkkDoB47V2iDhBMxapLh2CGOUaCWNNWEQ1SVXgkMMADAIwqKsEERFWcRRJARHEA1brXlTIoNnq5xRQh3acKEsghAqqxHBmHiNUJXkWtWxHyNgMSFCaNlwiihCSElVGAstwIQJpRxKMCYQamMsgABaBYEp6zoOWxCBumqUBQATjIBGBDPXYkIRllJIiQhBFBGMYMNrrY2GwAALMBYKWGAItZhSgolDqObKYqQh1FUlJaCMzJMkcHwNyHyV99o+QhRYQBnzXSa4bARXug5dh/mxEAYhSB2mtRZSaaAc6mhlm6rQUgSRK6XwMMYQSGOllAj7WmulOcbMUIgx1UYTRBHGFkLO67woCexAgoo8dxzPCwNorev7nNfYKAgBBNhlBBswT8uCq6aRFrNOGBJrgLJRGChj0qLmXGJmHEaJMVpKAGwjOAUIGhj7sdTSYRACgZG12hCEIUauS6usQAB6PtPaNlUljZJacWPqojY/W2QyRknZ1Im2ljnUcwPCAHMwwvB8etFUjTEGQgwhQgj0+u10WVoIORe14FJryy0iOHQcSokBuq6ENqDKS82F/XkwHzDCrEV7xzXww8PHhWhYzc/XRv08Y9/94w9+aRdvvNRbqgmJ3aROVIO4pIb5pa2VAwlziKhWj76oDuZOa0iAW/CqtRl+5Zff+Oz80PZ0NwLF0WeBrRkGgLmURIPNy0l2a333dZntuf7J+3/yj9Nn5+VSPCyexIGXzvn0pO622714yLkx2Pou6HXR7s3rz075ydHq+o2Nx/f2X/zgWfpsdeuXA9btYte32ggrLAQAAmWMsgoTKGUDrbEYGAOmByenTx6+87WvIc/lukQYY98HvMS4a1BT12fasuWx/KM/+r1gSLY38fbrPUXlylzkaa2gywIEXLfr+47bAcZipIb9qweHT1YpH9/a+OGPnoy22dqmHzUCuaG2wf3955c212vXKd3rv/QL//HBn3z7t/75b/2d//U3XiTs+bPVH33/+O0b7eePxOn5+Wg7vza8/S/+wR+H28N//E/vj3a/OT2Wf/aH9/rr49N5Hano8afl6rmtgSxXFbKWCEM9K0p9kWRf/UtvHt07z/YmitG1tf7+UxF5MN5Yp257fePW59/5/ntXXvrWL27/w//PP9rYWZfQ5FnlEdl1+iBkpWlWq1XO69OjI4TxdOZ+9vjF8NbVr335arwxmO1nwhPRpVbYd4dvXDcLm1TP88l5WXJRH169/Y3j53eX859KsmXz/Gu/+rXVoYnpK116SSQgWxaj7TUn8Muzs4MHH1E/N262fj1+PHfH17/+4unD/dNLv/TLX73IZ41Z3fq1rx8/B8vJ0/ns2N+l3THhNltMno7YS/3+68nFcS25BPUrVzb3nn6yc/Wld3/ttYuD49GNl+NtDVmK8yZNK+zQtEk5rllYYBw0tTw5F5vdNqDUi1qnpx+fpAvH7w17zouHFwT3PLcPUaKR4aUy+OezoGkEQZgxhoAGwDayNEBbSLS2FgAAictcrCUjTllnTd14voswMMZA2RRl2gvjaL0/yavxuEvmc4eoLCuzWhAWtkPPUmslNxY4QaDEyjKgGiWlAaAyitd17iBSST4c9Vo4vjg7ePjoi3ff/SVJMJTWpS5hlAuuFWcEOz7NqzkhkDmeLUtsLaQ+LyoAsW4gcVmn1auLE2y1pZTSQHOptEGUBqxT5CmlAFFwaXezrsp8eQgRCINhGA4Xq5W0uhW6TWk7/RHBzFYNDdxF0dCwxzymIW533CbJNABVXUlTMYSjIDQaeFonSS6tFVUWht3NnfXzY2QMKdNFJ14zYr+oznrt2GrDAUROnGs7vLYZOixdzEU6b8eXH+9N3/vqzd5Gr8gWsyUO2pECaHmRGaV3L69Nk3x5Pj1fzLVUgTWgJMaYTj+yGW+1umkiEZKBH5wfnXC1VqY5Lx68/N6XJ8paQUfjMQNNDXNCnSB0MqXbTm8+L+QivXb5K4SuJQtDudS6UQZBC63iGureYO2VV2483ptAh8hcStGEYWtVC4odRqgy0vMdniez5Uxm9fGzF4ZQSAPH6/Q39XDrCq+Z73URBuudDqDy/Lwe9jyrDSMsjDrnk2OrjeINr6qi5FErNAh89NmH673+xtq1b//D/w6K1ZWXLu0dHDdVoopq2GtPHrJOONi69Q4GT2cnqzxrrt96CaAWggoTxusMYXu+WIlaKcm3toaPP3j/8u4VF6NW5I43O5LrtKqp34FG/tW//ldfPP78/PR8a7f9C3/llUdfHN378AOgfr5hOp8utDFRN968NNxYC+590uBSP7z7mCDHx0GRNWvjTYpZBep2v2Vqw0tQpRIz6nZ6ly/dwCh0F+en+3tZKrKcG0BC19MAMGSUzNL5BEP3/DzRkGJPeQEPzeEyAAEAAElEQVRDyJFcWl7rcuGtXTHGwhI0onIdn2AJTC3EihEYRrQpRVnlZV5K6bVbXULCHbRVpNVqqZQUWnOrdb43R7ywGrS557WoS1yNGik40ODiePbk2ScamZdvv7y2GbuxV+SLxTxvSk6k1gZDGoUjF2JydPCCQOgAlFvgh902d6DxCWHddj/LBKys0o0fBZRiI7iDAdKwFTDZKOq7jaartEAIkSDYGMdNWp6ezglyBp01KQolbZ5lDEMGPWM0tISCDiaNBSXDdjTsnc9AmWWqQsRgLQGhrDTauvrOq7cZTh3HemHbg838eFbUc4FxwOIgCBRDQZdh6lUVV2LKXKfOcmOIFg1yKaKe6/J5nmoDlUKGN1BWLGpJQ4saNLoRTQEANVwLLgnFjVZVyY2RkvOwHUUxjtuRgTTwYgCissT50gZRT5xMtfXvPz/Y3Oyn3/lke9zbuXlJKtMY/PH9558fV6wXtDTX1gHavNg74rWy0EaRt7HWc1WTHayqeYU1bY86wtPnR8uiXMW511tfq4XKdcUo45WxVmtgtGo0JIT5AqRh6EtTlo0hVWaRo42HvDjqX9ar5cjrQ+0dHJzm2couUgzsaNVjzHV8trj3rLu1sQIo8kgY0qqmlFoljIaWm7oWK4e6jkdc6/1MBXWhZCHMEHGO1mi4dWX03bP3tTBaUExbm+vtTkwuqjQgXsaxqHCyTLmoWu0AYL8quddXvC7++J/+9xFztUA/+v5vXX7nlud084vZ+bRQGp5dnPrMHw3aL87O2wGdzqavvfvaMDKfffD49//gN201v3Hz0q//8i9+9v7DeNDavbNuK/3RBx+2u92ta9fOz04Z8wspxpfHrgDFRIWhG3fHFvNX37v1xacPH9x9QcLulV99Y/3qteP7LwLcivpt5hWdTvzixax8eh+kxcb27rKaIS1kvqqa4HiC+73NfJZgWLBEZsk5kgISronwR3g6PbsSb+u8Zi0H+tH+QXH99juTyVLW2cnhQdD2lWTcsohifvEiz8rckNZo88X9w1NhR2t9J46ePT8IHPbyVy4//BS5ONx7djgcjbZub5188dxveGcwYOKgKBPiyfZ2PJ8Wq8Xy0u5bMIoXB4+DCFXNMvShQnrFF05Atzs72eQkO58j7CbJ0fr67tCL77548tqX39X50ebacP7x3YPDi907d7q9NuW6XM6tcvYfL0adwad3H2y//vbg1ho4XXDOoeOmqWyKg5cvj8raPHm8ZwH3HHRy77HX7hjmdMdbdc7atMPGnqrg48+fny7k+ULduRqNxj4harS7dvDgeHF8slgVVaW1ho4v81L4UbC2PTTYIa6lRmntusRaI5RvPejlde3EAbFGKoUgCTxMPMIcSiHCWIYhcTzRIOlEbtv6Uqlac2NdC4mUUmsDAEHWAAuIxQAQZbWx3Fhdy0pJqhESVk/yBel6w35rOlkElC1W0ArS8AJaDbg1FLss1J7TpbFZrGppiOsZShfnp76K/wIq+sYv3nj0CXrx9CxNCkYYtXi5SO/dPUJqI3I86ziHp2dHR4utK1d5o97/gz+joHj1zmhtY0NC74c/engyawDwOp1OUTaKG4gwIXD78oj0wkSZizS9sh1cv9z/5IMXIcfTs8whbDVPfShDly3mGWU2q4qSJ77LqLHt8dhyrKlndRJ1+y9Oz2aL41ar53ftLJlt7mx2Ynd6sXznK199/6fPumvrcX/j+PmDq196Zf/wsS0wDEk2TU7PlE/aWIPu2tDrdSsDDBfpfKHrIluu4nZ/fRjl3vismaksdt1hmSle1CFGWuftTivyesqsTk9PiBTWsPlq3m/7QSeqymL78vb+x88HG+vdMIhj/8bO0JM8Mu5Wu/+9+3tbm8Wtl146OtxjLOy2ulevvff84af3Xjy8RYZpdjje2ERSa8ODELshRChYnk/Pz8vY72e10Q5DEBAgeNFMz+drZC1ZqrjbavgKQwEIK6qizIum5gg5GFIlIfO7lDhBq5XmS9/txF6HMMx50wp8JYGLXbezZVFvVc7E8aOdEdvYdJTe2DveJzV1oPro4fduv3JHTZsqzbKs7A5vYUyfHx75tiCBphHNz5MYRq2IXep1cbMEMLeNudg/8nsd6pvAC3pjAmFrfHm71qK70QIie3HvSdi6EgeuWK5MBEPHWoiQ0WmaurGT5vlscdbrbEI4IKwT+ng1P81N7WBSaV8IImogmgumRRCNSoF4Xcq6KNKkyFZAu1zCwXDM3F7c2lmkOS9Ps2USxzElFLuaEoSVjygRZR74u0ZagnGI3SSvEQ1tUYUMuYELpaCo2rk6yqvCbfll/ue11tZmWWYtIARqrpSFQRRagNO8MgrEvo+dqOS5MjZwQ95UUsomaZTSEGMDBOfGEBJ3W8ClUvCiro0holFFWRNGMNJa2DTNKCMEgUY30motcQidKi+xR4yxtlGGK4Axcx0FgVDKAmOVhBgSTCBAQjTYwcx1mqrCEGIowsBBwGIMKUVWm9B3jTEeocYaj+JFlk4WyVqvAzQFGEErlNQuYRZazRsJVUhDiG3IgrppjDXMwRpArQFGyKG0LHPHDSiGvOEYY0QJgMB3A0iQ1sBCRCkOGKMEZ2kppKlMEzmYUggggBbXQlhrhBRSSEyp6/vWEKuBMRhBQiCBBijRAISapkEEOoy6DCNoWmGIIYbGSItkqZURwAJrrTSKKw6tIQgiiIQ2UhsAASEQAFg3tU8cqU3shlm+goQWVYMxRoAIZQkhGAfGAoKRMoAgJHhNCbUANk2NCaaUEQRCnxkrtTYIYwSR1UobA7FVRgJrXUSAIWnOtaoVhGHo+Z4HMWGUaC0RhkJyQqnnOgAAA41DCTFAKgUQ8H3HKAkxVAb5LDRSC9EYaQjCXHAIYcZroyTBCEqTZTWvm5o3Zc2xw5TSgitltNKGMQQRcDAlCDe1IBRpLatGG62F4ABh13McQhHCRSWKojYQSC0xJY1RxlgPQmKxkrLOSgshxMgKA352KBkAAECeK2XZrRuvprOZSGedqHaxBI3j0/iv/8ovqgq1gsH+9FS2EZYKQ+RTf9lUAFGjLQRIzxtWqNdevfbe7Zs/+LcfegP31ts3Z+khlPNhILKzM5HMTg4v/DjoDHbvfvDJnRvp7rW2I2/uf/r9/ae/54SOqnxdedWybIfu5rhnmrLWWnvlyy/1nz9bHUwWN0ry/LNVeOnaf/5f/Oc/+kf/9Gvv/cJPfvTx4uHke9PvXf8r7w6ub2NEpUGQIC24MdwjImAwkEmdTR2fffGjj2fnqw51dFF21ta5UqrSAGpsADbayDrGbKvbe/L4WRzpf+9/fgu2ecIbZxAbG/jzHNZS5dliL7N5DBrieGVZrfbMQy2kkXo1WWyttUeR6039p98uDo5Lgcpxd5x9nG1uDGgV6kSvb4zeems8P794fAD/7v/jv/Rbf/ij3/1Xnu+88u7Oxjj49NOPvvrl3bsPJtzKV371nd/59vuXbodr6+Gi8r/9W59ny+QrN15SuXq8P1NEOgS0GZEl+ea/90tPV0d7q7P11sZciavj0V/7S1//vX/37cE44gJMP7i71mfXNuJvf/v3Lt0cyWLp6abv9FwgV/ps48paA9q8SYqLxbDjpjX3PCSkevjx54vnX3S77VuvrmNfOJG3dnU02Xu/zMTw6i4O1GJ20Bujg8NvF+l+LVKQ1h2nS0SbQAdWDMPZ5OkjZ+ZsXbn07O7Jxlrnyjdasybt1ut2PXzl8rhF08q2Xe96++o7Tz75HiD5SdbMMXE3BuujKyo4cDd4WcyNENN8AtwptFoi+f6HP7xxp2MjoNnK1hGjr7/2+td//OEfY+fD0Bd+P3Ri1wqDO/DSZf/8XBRWR74Poe6sdb2gmJxWiJtXXlsbB+2TiVgfVxH0fvx+3tQQxo6x6mcqMABIY4FUjagphsYaDTRjAYVEK0kQcSCyUFR1YpT0mOc7oVTVPDlf7w87nZYsGy/0u624KVLfcmS0T40Jadz2ESbLZEURxSxMVilFBllCgJWySYq07UbEoqqct3qDxfGJ4byU9Zfe/qZCuGyUlZYRAAAExiilvTCwyISBZ7VCEFZW2mZBbWCBotixzEoFkmxiBfc8SgCoigwojShzKUHKUISBFlUyyw1wfOaEBCJUlgJCBInPFXTD9ovzw3sni/XRGmOsTf005+3Is8wbdLqHDx82y/PV8jBox5euX15NGwTI3ulJRzRuGK+vX5qev2hUNVueOpFngePE/vn0GQVybWMTAtsU1eb2+uMnB8whW+H646dPk5PjXtSlDrzSGT/7/FNWz9c3ewr5JKJhHJ0mJz6yJ3vHSVnvXFt7uHfqYbQ6X+UY7lwb3f/sCZJGYvPq9VfOT6eVUG4cZvOyroASycXTFw0kzIWroxXUYpEebewMP/vpgczlaKM32Bhgzw/a/c7G5snjx5BYjJq1trsxvPq9P/3O6Mol5tBHj/YAxO1WwK3qdTvEC4QxyODlbEEkankIuqZQGYq8g9Pl5u4VxNyismvbt0a71w8ePf3iB38Qhu756f7apeuXtm88f3LPDR3BdVFlWlRSiuGwn2c4XiPzyYWquUYm6Mdlkqui4qr6/PMnw3Z/9/Lgra++9MWffnL99Vc40L/z27+xE3V73VYvcqHXPj++8DotF7E8W2lTr44uAASw0SZffvr9H37zl3718PjoOKkQNrC0J5NclrzO5af/5L8Htdy6tEkcvPf0IF3Mv/bNt3/wo/d/poLx5ohBInM+PZglyUpyawjod8cnR/MkWYRRpExB00ZDCAnNy7TV6Qb9zujSukYupU6TVgDa4ahTV3mn1yOUGgUXK078Ou440dro5HxRlqLTHgVBACAzTRa3gzovV5B6gzTyY6GwMYQbsGwW2iptkQVICwIAowyHiKVZVfPCoT0/wAAqrUNt7CSTQkpGIVTASIvKym91p5NzXmTURQSYvRdnz46rsN31oqsX59OubWOfAUyzZpofp6Hfd0MPQ5hOz58//vTSeKvVbgEiMAvqwlMGbWzfFMU8DCHBWAoDCABW50lm/FBK6RvP99wyW0JCA8IIYmmRcAuqJK8r5TC4WiwRkFJqaKxU0JimaUqMrAWiEVW733GY+/Gjs6LG7SD0u3A1OyYuQ0THUayE8lU2dHovjueXr17Jpg/mi+U0mSG/34sIRNIEFSGUsmAyPcyS42tXXkaUeCzmJdHIKg0JoowFQum6WAmudVMBi/3Iy0pT16XDLABIGcu1JhhIq5tGMEysAbwSURQ6iNZcAU+NN3pAsnJ6ON4aK6QOD6dFU6bcwbyWLxZK1qPRQHmkUFkwCFnsRS3ndFZ5CFmtgJWeQ3cGUWBAnVeHZxfJbB5+dvHr21c7EQxuRIssT9LUeB7Bruu6UdDSaqFqjInG1iDrYe3YxvghasVuLRreaANl1aS2Xq2SmQI0Dt0wHt1Z39C6LtPF/OhkmaST86OrN3ffeeOlVXNRZHMAw0ZWXNau7yqJhOSu69o4rooGEBx3f97ScnlrPV0VYehubK/ND08/ODjIm1V3MGivjZusWJwd6AprDSteCUWADFynfb6cDvodS0Sr4yupilq6jt8ZdgmAq6Y8e3bkJakHcLVqgsBpDwZFoyrNI9+Wi3MC6MnDFzZlgGYe1q21QZOpex89IBwQDnmyohj2eu5gq/P48b3RYHB8cgQJ3FrrO4yFBuxc2f7iwV7RFAsHbez0qHXzgq+efrFx9a3U9Qnw6nxKoLCwRlRfu/VqL+6cHN47uz9hSLSHYbGa8CNQxi0EgMrT1QI6DiYQNRK82J/VwA7b3t0//L6tAX3p5vgqvrw5LKYXlzfXj/ZWRTJF4CWuzTyZ/srXXnvw4SeTVE20+ea7ryUXD4SqG6FVw5nFVsHjvQlWVuVJ4LtKqMOjs8AB+f7ji88/I1giSrprEW94pxe2g7VpcbYzaHc84/oaevEgXD9NZYwFWwuUKOscNbLSpnLCoFC8KOtOr7u/96Qbt6v6dDxG3X6Qn02qZd4J3VYnhJSfnSTjbmf3ypV97JSTeatKrm52l1WNvUhb2fIilSzRah72Ws3iQlf5yf4J8rCa7UGEO/3BwdG+xHq6SpGNfNdxiDMYtJ7s7T08+jifF0YRiBl1PFtrqECv04Ut3xv08jwt86IdethFhHBTFEDZnc31omqw0VKWrmOb2gBJEUSEIMUVMjpyUavLiqpEDAVtqqQmBEsrZS1cxgjGRhljLDCYEmKMMEZAK400gevmBgMDCbCYkk4noIivtZ0iUauiptAiIl0PAdQIIetZRiljDCuNzi8yCF2JRECBi/BfQEXA2rXNtfHG+Iff/2y2KgS3vIaf3FvUFfzWV64tm8bpx9tbmz/60Se9eEC1evv1l/b2L+7eO8lkuVoJ1+kRjGulGyS76z0lNCUoHjAQgDhu9ZHRdbL/eNKL+w7nOq2iIFZABdisb47zHtwdtfefvJjzst0tI+LTWK3kcmv7nXKySi7mtBMO+xtA6WePDhA3WpUhoR2n9eDhpz4J7So/mX9aV0kybbARgouL5ZlHPZlYAQWvZ51wAJrMIsRoRLBMbBGPfKSzRbY0prJY1M2i290OMB/YdscjmZbXXnnjx9/58aDX3hzHGefjjfVksmJQ8ayYLAsWwGDc1gjFUbyYLUft1q3Lr0iBT48Ov/Gtt1Tz+I9/+yeFUr3B+o2NWycvZlevvCw8Tsj8yQeH2Uz9lV9/5WBvfzVPoIf2np5BQdqj7W/84jd+dP+f5LzB3HhEXvvSYPRSG2vj5/zq7uUkMBhhAKznBzObAmuqOonbQwRBlk+4zYlLe06PshExCDFDqDPavpbOYJvuvPH2X8/T6WL12FE5g2fz4xcGkfWtL+sMLBYnt1/5mwQyoB9HkRs4HoBeUYKtzfcef/6bmyTK68rz25evXv3ps4sr48D3yNrljV95480/+p1/ZJBOZiWDtO+xy7fX63LpeohZM0lP+5sh4CsX4wgR0wBgIHMjrWoPAVmpgTda1kiUklBuuRWphYXNedMPdofdnafHn2wMNlSzAop5tGuhNUAjQ7vtdcxGGgSDwD09efrkxaPXb23WGYDaHQ1vYSdsBC/KDNiWsGo+m3WiPia1qIual4HnO167mKdWI6Q1JdXmuDM5PdOQbHRb6Wqq9f8oAgOMcVyXUgSstQoSDKUSTdVYLUHoKCsMMAgRCy1CmOvGWEQIBQQIIRDDlDLOC6OM1UbyJvBdTKgfQAOBAqbmDXOCqtFKCa1tVpUdL8p5XuRlm7SMsBAh3jSO61spec0RhIpb3/VchwEtIbKUIoCgtqgRRitJIXQcgBFshIAWSq6DbkAozEwmlFxOFxhAjzjLZTIe9yLX08Y4jEBMpRLA2ooLz4EIYGs0pVAL6dGo0YZi0jQ15xIDx0pkISTEIQRBYCklBCNtBACAUQQhQhZqwUOPMkqnSZFbHVoCoQEQNlILLoE11iIHMQyJBgoTJCWoGk4RJBQpqxmiCCFKMIYIGEQZqXRdNgVjzMWOMQoi5AWu0hYj3IrDIq2llEUpMEJaWgUMIAgh5NNANdxCnXEJjAWEeB4FCBlpAQYIWgAd3tQOgAw7BEDkUKUhVxJQjDHRVitjlJLUZQgiNwhd6khRYQK1aCLHQYAenF0YCdzIJcxxHcdz3bqujLVGCgwhRhATBJExUFSq8KhLEDTAGggtRAARZRQhqBEVRq5B0HEItJoSqLWpuBRGV2UNDAQYNJwrLaXVopGmEbzhCFEALGIkClrWamiMBdbYn+WThVaW+dRxmQbAdR1R86zIa86BttYCZbSxWBprjYrcljFKCoUI4VIaraTghND/kRVdv7376NOsPGPmfHmlC67+yu6bN1/6w7/3R7QJSFref3oYXxti4DVV5Tse1ooR4TfAcuUw7JTq7LP9/rCz89bgET/65f/yV+oLcXi0d/LTh+31XqRdmGO+8t76yl/KxSpZFqONyIIVzx/eez+V832izPb2SOPxbN54d9TOG8Nqnvlj4+j69vr6oD32nw/DyYpR4u09sdndB8tpr47SD1+svpgM1kLFpd57sqqW3WEXWL22uylgXvLp+aP9JwfTo09PjKXMg1u75P/wv/+bR89X//Kf/u6vRrHHSFYUBtrIawUtfz49TRfL+4fTtFi98yvrWT0RJSexs5ga5rG1G7uBAOI0f/48USVxKalzYaVrKtuLuxbLwNPLtGyduI9+b5HsR/mCW8/s4+XWiD47Pb71qt290f/X//IPVcZPD/H/5O/8n37ync8+/M7vZMmMN43nbb/Ye/Hi6cXVYHz6+QwE7r+49ztrV4f/0a/+jf/2//IPFyty+9L6PBLAyZNSjm62SdPMpmW9aBzSa7VeefeV/yA5+H/33Obate39p5+POrALRaRaRSO//u6bQTL9zu/9wcnhxejO1V/9O//LynS+87s/iv2JyaZ5MqsJYrSkqn7p0vWT6UXZKEZQ36V1Xr6Yne6+Nfrlv/Gr2uuxs7vLi8ct6lBzmEmu5dF0IvxwtLG1PZ3Ozy4WhS0CLsPwldsbt370G//1cKPV5Mn5w6P/4D/8z374k38j2cq/3r+98fJyVsjmyCGr4a3lFx//g6fPng1HG62B9+TwezRa89ZH5cwAGKeLi25/Lb2YLbPH3T5TqHaH8Tu/dsnroJPnR8tzJJR/cOBcfvvdjQ774vC33vjSurJ2sH7t0aefGlzkVqMYrW+zdujYilfmTCo9mZxYbLpX1leqvf3a6OzhH/e7fmdbHB8YIlUlmz8fBgZCoA2wwBqtIYAeZYgAbUQUubxpqrppjEAE+w5RViVJ5jB3vXtJ1StJBXXZKqlJGHme61C0WM54xVUjrUHKIMf1ZaOIBVYbB7mEIQ9TaKHWqMWcp8XJxWy2JrlLvd6oR3inUBopQRAG2NUKNEYCZQ342SISsBAhS8qGExoJZSiuXawAsSUAGFFsrUFISGUtZMyRlmNAhVCNnCNKtRK8Fn4cGqF5Ywh1Ci7ag/7rV24m8+ndD35AXR8jHbWG12/u7O0/fO3Vm/MXD9ODo/sfn6t0MRx1SUg87OLMqsXUD1rr/TZxiOR5kR66zAAlbQ0UsReL5dqla24rglXZCA0JEMZmVT1e73XjYHZynM2SwdamQ8O0bEKYYN54rbZmPqFdfjZTyyR2GWMwnSw9wuoqBUpczNLRYIsCVC9qpC0L4l7bTM6P55Mq9di3fvErJw+ejLbGWSoPnu91ul3ad5aTo/lial2bNbXVeFamJPXbYyw0BFLkJ8+BKVVjtVWcqI/nP0WYFpn0Awo1dAOCCFIQc6nT5SlvirDt99bWu51Li5Of8qZurPR7nVY8CKJ+rYLe2iDPanUx1wT32rEGYOfKNVGIs8PnDFslleuHxSJxGIJK11lJLUszgWDrrbffLBF2utHI945sMZvMfOosJ7N+y3v4p5/4LBpcGhxP65euxwefffTe1775z/7Rf/Urf+Vvr9KUoPDx5580xXx0Zfzs/vnO1m4QIb2Uq0U5XL9+9+npl955W9Xnk0l5Z3xp8/LwxWefxBtbjx+f33ntdW0buSyJQo+f70n588+znWuXRWOyNAtqeXaRbly5JnVzfN5UNVTSeBHqjlqr+bKu4Nnh+XBtvHPzktOJwkEPAAKkjh0/g7QJqbskvKYABx4mUDWe3xJNVTaI4Gi6ON3ZiaNhxxpiOI5C5Hhwvsi8i1Sw2mPEcX2LEATAIxA4KM9yJf1Gh543irxuYJ+uxKTO9wCIW27ojVuQBcuqooFvLF8enABgFXQbi5wwkKpmnnNxfsYJBS7evLSRNOe+h40FdVomqxlGtrd2iSKXtSJr+eJ0PyYhsHCZ5Ix5WDXXr44Vpo5PCWVIG9/vPHzy1HcD33c0DKIwzosyT5sizay2nssIYkWZuZ5fTdPZ2Qxi1xn42XLqh64SygIgpUyTDGJsRE1I7cYBQv69T+5+54/+LBHN1964HbuhEYb6OM9yXlVdP7RFcXFx4sfR6fnqame4L/egddqtLqNkkmSQtXRTV83SYtqOB1CUHnEQKTkGjt+imOliRRDVRhKhmqZhjAnOMSoZhpEfKFMCbTzPdYLAWkJZcHb+kJciDqO421GN0hRYYxuDscuMNb1RL6+XkRMN4jJ05eHheb5cfuPOdWAr5ot+26ymBx72YckN9LA23XavAEWSFxTGdZ4XzH//w0+AFO3+4GSeVAXqbEbUGMX8pqxc5iMrMcNeSGfneeD7VVnAMvX9aNjftDmHBqyySgLZ6YwDjxhUGisAbHi2SgtapYL4jt+OwsGYeVE9OUGel9bLz7/4/u3ro2S1PDvnfjuIBh1pOVe10JUEjZQqzTMEqf25RwZZk3/5r3714Mnzo4OT1WzKXNpvRZXMsvwU15bnpQzDsqkFam6/+vZPP/iu1HK9GzX5qsjKtCFBpxMy79J2W+omCtrZYUYSrStXe4QxR4paA+qHveT4fND1d29sPHs0Ky7KupgcHDyPtnc8GtUKblxZdwtd1xooeLGsBTf58dQBhGH21hvf2n/xaHmWEMZ6o+HhchKvt7vO8GI+vf3y7sHzSTnL1KMHfqNu3Xklz/jDD59DyxbnWdjqwbC3gjHe+OqX168efvKnUZu9ePiF1jx0unXB10ej6fJo1B8hI1UjNvqtaibTykIAmkafPTlgEIF+tn7r2vyQWupKWXz00w+vvfQ14np/+qPvLubL8fj6tTAUZd0b9L7/w/drw65evW2UbzTHCGRFGrTdnZt30ko4kWfMaTLfr9Km0w2llnmauxWCTGerJuisqWTKUJ0sVq1uVxsMdSNWZ1ZbrRpd2byoRpsbYTTIkrki8mj/xc76zmK1yOt8dOVyxDwt4NUrbxIKstWqOx4Cq7ev7G5euzR5Pu377XqyREYGDAoMqEOXyxVgiEVy63Ln+OmLOs25NAT752dpp+0vVntJuiKhd+XyblaKy+PtUcj2j6Zc+qBCgRfbgC2XC4uh20IOwsyHN+9sZ+nKIVwY7gLXEmJ1OZ0sMYoHcRNQYzmnxIQOYloiijEhjsOKQgHgcC6TpGp1Yylya4rIYxYwCIFBtJLSRZRAa6RyGAZQUYqBcKwCVcE1JEkBkko7lF6/MtReVaRZU6O8hIlgoWscqBFUFiOhBOISSFykRud1FERZUeMwBK4bDUd/ARXlE220X0p565Wb9otHs5Rri7UmT06r4N6FF5mNAXWD8tarrwDlCin35ubHj7PHe4fjUXd9MCKUQQqSsozbHmHEaNgdBMYIOS3L8ymoZRB6rbWbJ6eTS9udK+vDzz+9f/XaTjeKj06TVUkjqqEl4+318U50/uxw0A6dgCySfVnkVSOySRZ6/nqnM5mddqJOaZEWtazE/v7h5e3XKrUCAep2g8nZYcOrgPoQjwbDN0+OP9u6HGdzoDKrHZEUC61FGEWAuVEQY6yprSxUENn9w31B5q/euh221xlRNMtPHu6Pw16dLRxCSK0W05WoZUEraIHFpl3YENJLl9fLpozb2/328Df+8W+S4Prf+k9+tSDp7//r39noxudJ/df+Z/+7H/+b78ZOH5MombGo4997kUWLcl6db7dR6Mt3f+3dvUcnh2eJW3vv1qXvmzBGkcesgTdeuR31BrpqrFqcHz0fb940wLs4O6gs96LWcvKMskhYYYTyWhFktkpyz3W6sT+dXvTiTi34/Hy5uX2rKaqnn/xTGtnNS5frzDSlBhgibGVzVtaZhPOaj9ygn1Zlu+3GvoNAOJ0tb73x2ou7jUFu2OnmAhxMJmHQ3dm9nRw+KIuje59dhLGvqc7KWkjci1uSC1k1nuuZWvXC8M4bX1oc5cY2FqP90+fDOJpkqYtZ5LcAd5X1EwuIVa5BgQ+kKpELu/5QlVyYCaxKaoAECGA7SU4tDRsFYj+6tPWG4450I1STvPrqy73wWDfTrm+Qkb6rlRKDzgjibiqBtKzVKeeTfSwksFG31d97/HyVzIkPe0NP5JnV5mJ6lpUFV4wrd7UssYv/PICGDbE15xZRx3UpRlVViqphDFEacm3iKLTW1I0oheCiJq5DMKmKmjGXOi6ExHE8TEG2Sh3PiynmSiqtMMQQU0odNwgoYrwuuZIIwk4YEoAqw52QOT4DCGtrieMCBLhSCCEALARESoMdiwFS2mptjRSqVlIpISUibDpbOK5PGWuFESaaK11Lqy2WSrS63bppyrJCDFcGYik9xiCwDAPqO0KIiIZcNhBha3Tk+xpJoZXWGkHEqEMAktbM03kQeY7nuoxYY7jgBHtSGIQgYcxBXlbmhOAw9OrVKvQZxBAjrJXURjeKCykCz9fKIESbRmhtjDGUuRYYbSACoB21l8kqCnyuhUWoKquKI0JwGAYWwawsWm6krYYAOJQZbaVUGiipJXJdSAgERitBMIEIWS0JgVxW7bAnRSOtYcw3FkktSl74rqe1dogLtLAAVE3tuE5eVBog5jDMnCxLjIXaolpI33UwskpxaywjBEG4ShcCKeiAdtAK4payXHAlakEgAsAAhDUwlRAMUlWJRigKKQHIKAktgtbWvJbWMoCMVdZqLQQlDsZIAZikGec8LRuLoVYaQZwltYFGCF7zBmMAEUYUB74HDHAYYRhpY6WUEEFKCRccAEgcxDCRUruYlEkqhCqbxhLkEJIXlTFGWSuMcQgqqooiaKERXFgAHMYsNcYCCH9e4vj47nEXDZPz5dff/eqyqowYf/LTdPfODTCVRwd1fsZhiUgUGK4sZRzXCCSNUTJbfPm1O4//4MW1YRS1KtCOW5vbqy6ZL7KNL20ePDsdbcTn+2Lzyq/4r1z/6SfvD7dahfhQmXr32tsovHNt6+355/zSm14hK+L0L7ID7uIXpyvPZWANb496o45/9Djrv/7SqBcF80p7neefPHnj1S/PC8JMdPX1d2ZnJ5LUk8P75snzpOHVsjyGQGIQRmA1B8z1hzzUjhvHQTdA86PUcdhLr+zmatnb3ExONEa25iUqJRBclbXvDQvPoJZrWMNc2l73QaahZTpD6Uow2xtfvtofBIfPP1+uFr3u+rWXv372+WMxO+Hnz7/15Vei/fUPP/qEBf5ah82aAiDm+p20WaWZXJwsf/LR8aXxFX/tPTd6+/zFPy7Ts93LUbmEDz96/O63bm/sXr730WHP7YvKrGp7+on6u//b3xl2rsvi4uju7MqNtb1np53xRiXLr95uPTd1tmz+8l96hUH3g5+W4frf2Hv8J41drpZcfLCnS2ozp91ZMzn8B//sX1Mq5hq98tq35mib2wEdp/xk0vfdTtu///CxWOQ31tfPzicKmkZK2YAk40HoaKa3v/zO4VlD1apbaBczHJvDo9PK+rNMGuZst+/MioHRZz45Wl8bLR9WSPBH97/vjKJpVvd618Kw9fj4KXBT2gcozsvsI27I+o3r5fnRxfT00m26Kn+opJu7bmKOfbJVL4ZQI6xHZsmUoenqiSQHzw7uQgt2WyMHt2P88k5rPTlLhsOvvfelf5+56Pbr/cTbVUaJWi4uzn3fVTpfrDIvbLXGAeIqKRaS+fPTmc9oZ2jH0eD7Pzl7Z+32ZAnPnp93x2jYd0RtG072H4qfsSIlJJfCczxKWMEzFxOrsYWoEEorCa3iohqEvX4UTxaLeLB2MJt0mC8Mq5Oy5XqQMCVU5LrCQA095lPqGs6177nSAgtg1WQBY9baqtYYASkrYNH+6d6inN+8/VIrHqR5YS1FQABlIAIGQl7VnudoAMMwgkBa67h+cDE7bgceRgATEvhhuZoTBpN0GcZbwFjq0qKoNW96g800T1yHIUqM0NChbhBowbStai6UaDw/apRgYdjfvLrYf/zjP/y3gUtffmt33aP87LP96UcUk6dPPzV57vguy/K1fidZFtF4LWrtOgTHzio9nzrdtuNi6rvVcukFnqpKzVzmsVtXrr64OO0GhDieksBhDDFkJEAWV2Xz0Ud3SdzxhH92rFK5/PVfv37+9CSO21nOse/JilNSAwf4YeyS4f7pBVbs0vXdeSdj0aAVUJ+a47tfvPvOdQCKclV0+i1M6GJ2ZCqhSbZzc0NkdTZNqkJuXN/dtlevvXL9R3/yA+LgZV7vnSd+r1wf9xyKpTatYd8irBoOkD0/fb6zdjsar+dVbaHQGmWr3A09TJxKyEtXty7ODpFC57M9pW3Q6fOkDqN2b/fyYiZWpWXU37y8dnz3i944JtA/OZhATZFGkKjOMCoabbUKfL/bc6tqVRS5UWC8trl18x0/CJbV8uLoaDGbLuZTN0ZvfunLP/3BDwxm7U7fJfT/+ff/3v/qv/ivZF6ffnH39373XzmhV1tstS3Oj08f3e+N2tWywIoRpadH58ujk/VR7JLm6ZMvnJBQH3sd9slnP/zgg/yXvvyls6Q8Pb+4d+/Jndu7PrOq5/7J7/27d776zs9NcpIQ7CNKAHKjVm+RcIzRalEDAxyCHYIoRhqa2gDDvDhub270BTKgqYQAnkehFaKqV/MMKcfBLrd0MBxRx1XQcFk49bDfx7uXbzpMM0qiwB+6cbacAERcz89Wq0SbwWjkB5FQ3MNuusqhyrQq/RbSHCsDC2UJ4wQXy9WkE7xELTKAYsY6FHNlXOa2ru5aQJQCZVURyLywW9f5/UfPs7omjg9gOTltRmtj5qq8zPwo2rp0SVm/SEvHQdn89GL/me85FpqmNAgwLaVsUoFwkc87vlMUqYZpxwsQpFDZTnugAYCYI6AEbyDEjh9ZZRwX+4FbljMaMMRYvNZuqkIWeTKdIWKZ6wGKWeDwEkCELVBZOZemaPdj0TBu5GQ2DwPEBceEEayiMEzq3G28sBNEcThdPlfM62xtIs87n6SNpYSF6aKEAlCGoOOWVQWNKOtZf3RTACKV1UJZLq1UhBBGceAFQtFZkUICAQKK15RAhwWWOlLJVsfr9+P95hy0HNryRkFEAB1d2nhxUZ9ezNcHY8p8k9ZKB67b17Zc2x2Nd41CcC4QysT7nz9JGpw01cCLx4P1nXBYleXGeLC20VG1zPPm7Gyv0w1K4XHHe3F89PGzJ2/FV+NOexC3VNDKeWMBhBDwSqgGVLq2GpZNA13l9roDo+o6pa6vrPE8BwIRuK4xXpLnCJBWHAWR24gKZFUxnV9MMmjK0fpg/yjh2vvs7r6Q0tiClnydbUO/LXwXGihzlSUlRExbxLzgZyrQQD+6+3A1Tas0HWx2nj05Hg9b04sLRzbrwzFmdHqRKi2Ji5+++JRriJHbFAXwnTwt1qI134uO9/YJMIrANGnarR51o2U1V4jXXIhGfvF86retTZuq4qdlQ6nbjoJo1H/3W4PS+LyqQtdvcHw8vTg/PPba1fr2tt9ze51oY9AHCBw+mxPPI0ByrYJ2d17LoDc4O97buHT15HAlFXj1nbfO9k72Hz07PT/e3txGVYMieO3O5aOLwo1cE/WSWVnNle+N83QZhj2PCX8QbL187fDRk+H1y5u7awcPng7WBqPdjY8+O6QAlkURDFvYjSUEZ48en58cwdpWSeo6PsF0dvxofjYhLpIKHjXHV25t7j25ePJ8ybo7qxr/2cd3jTW3r42F0iz2NSbTJKcunZ+fkNWKA0H7QTAY14IPR/Hy7AJat9XrBC6W1QQytNbfXsyXVtbMSt8huhJJkqSrShtCWPtssdrdWb84Ody8usuAscqE2tvuDE5PThTkdbnsdDqcw9nJAgKHazJdFa0QN40EgeN2fLUqJtPZoO9UjbUuuHy1c/7s4eGDZ47f62+vXUzzMPK9gZvORdTrAoItBAhJqPJK+CTu3XzpjeefPTs9OlWWUww7LWfQDU72pgTB48cPOJfddkAdkBW5NcAoobS3bJCTyG6XuZTUZd2URiuLHaitbbjC1Ku5zCrpKI2RpsStitI6jkUA2J/ReMCF9CkiEDS88DzPAAgwFDWv67Lmuig9oZ1cmU2jZV5ww/JaplxMa62x55qyzmTkaRcjgLiR2ihYC1UDYr2IDTYgRXvH87+AihyPesRLziZxt/crf+md3/iX3220AYpoC09O0q9//boxxdFhVlc1oc7x+aosagCDrbXLw2EbKVA0ouW6oR8ZpdNV6REmsmJe5lFICIJB7FgDaxh2RzBfLYEuiQsZrPJSOlGAMUug8QcOJeL46XMoIajSa7d29w9TNwwgRgM/nkzSEqDe9qWu5665bLmcCo2i8ZB0XVU2vY2WLBYYFZtXO8nc9Gnn9Hg/Tcph2oaNLvOkws7O1ZebNJUKzGZHkWUVF8nZOYYopu2rm1FSaFrjuhbCEdcu30mKzN+MW23n80+fvnp15+wsPRcvhuPAeKCptTFsuaqFlGHgJLPkIpcvv/2eaL/+xSx79uj3p/my2+q3/PjTe5+5A6cVxL3dtcHe54+PH+MQ5Gl19y7wX976+tdu/OY/+fjTveI//Tt/uxX7H7//p4vj407I1rdHqm4CGFfzvud2+v0iHvUxhJXGvXFXKWPkdH3Ly9K6qJXijY9JWXvt9m6/vVU2y2u3Nxo5T07OXd+ZJWlA2p1YalhQfXJ0+NAS7QduUcjTo0eu4wC1sDyp5bwpillVVo5PPTfohEeHH2sGlpWgLp6sMtfYWy+9PT9J/O7V9fHuwaNPtQQeAz4G8SCKPZpl2frlDSeI9h4+Ylp/cW/PGj9yfez0d69crpoJCqqg06pWeeQPwyBsXb3K7erswdNVnhAiB73oxcn8zpZXTE5bPbpIngm5anlhkWZuzFqdnlBgf/9RO5xTwXm90Nybrw7bnX572DHFqkiXDgmL1VFVVCzuh+GYV8vAk6JWWcbrgp+8OKka7AQQ2xjDumjmo36n0w0KDq2twhbTEP/5RzJS2lgApUK2UapRABqIETJYW8iIlyRcGYMpAgRhzw3DVsN1CF3MvJCxJF+UVYowhsRRylKKOBdc8ND1uag8EjnYycraACgUYBRrY4xqtFXtMHQYFFoJrgHGPmVaNoSgwCFCGisFAsh13DwrlDZGC2OAhjgKRrLMkHUxwB6mBgFlpZHGQuwy4rih0oZQusmoloIhIoUCxgKtcGCF1IQQaCABGAAgAOBKaaMQhtJwBikltOE1ocwPPWOB5BK5LsCIEcaFUFIihJXWhloAjdJgni4ghBhjSiiFQHLBpTTGUIcVvPFdv2xq1/Ot4RBhihkllEtpraqaGkFcNxJTaDSAADNCECUQKG1s6HlZUTJGMNDaStdxjFGuQz3fK6SAACpoAUJCKCWFNZoR3I9jYxsNBCG+4MoAgBCI4xbGhNecAGgAAtZiRi2EBqCszLqwpQlzmGchBAgBpKmLgQae4+S5enB8zKByHbfVaXdarhFamVJpiwDEwBCMAaDGQClqow3QkGCHKx54UccLGp4w4hBsGiUwgAhha3QrCrExSptlXqRFqbTQWhloESKYIN4IN3QaKawC1HGCkAFrXUZ9x7HaAmuF4EJKYIwBVgjeCEkYaflBzRvOOUbYcRwDjQFWcFkLoY2GAFhrAYAQIqkVBNharbQmmECAEYKCCwzQz1QQEdet8itt2oFur3ODxz7oFO5Geqff+sH/7+GDF/sffOeDN//j17SuueS1VLnNACU601/8yd7H3zn52p3hm68OuQP2Dg6Nsa31mDHR77VAXYRs4JMIgNbm5ddY5+zaEIKiLJd+RLdJq0v6wd6zg3m2un6zc2VnpHwEQz09O08mC2HaVQ3jzngyFe1s6TewE+3+0n/0rblZFV45qxdv/5VbztnG44+//61ffOuH/+TzbGI9J1SNDjDs+zEjPG6198pMU7695ly6OdCLxG+hqxv9BurF82e+71OX7r943gp6PkI3r106uLdY2+7vvjJcnJ0zD3VbnSbLqlI2qJS5IIYAX57MzvobURjv7j29WOTa6VzbDi4ffpI3e+SLf/fF2rYrPDifZJ2hG0SBpSjx0I133oJOMM8hrpfu6fuf3b03P3z87jcuf/CjBwyTTuD6dXHx9NTJoDRSNThq+VyZLz49725nV7ZGRVoM1t4c3nyvNjb7/ElVTBigXY/UeTp8ja23vLdf+urnd7d++N1/UEqrzuYYdV57afzal978u3///3w6W3kRmdfV09O9m7/8lfJk9vW3bu+ld1crs5qfa1GWlSrzBSJ0UmS5tK2+7/WApcZC643g5m6TvDi5fvOVR3cPFMqpE8mZQaBVSzA7OfP0QldVdzyeT6lLenzOO9vD7qu3aO9KEAfL6b1V8hjFhQ7TYMxUsqBAYebCgKt5LXNlkCNEVkLKG4mcJbIaSM9HL7919a97CIPsB3n0Zw3eJ0zPTxcjN8boZWbWv/bN92bHR15YJ/bwJ9/5v3av6IvZMnQiXNVGISxJ3PbcsI0bCSEi64EX9+9//GjQ8+MAVqvytZdurmaHp0dmMbODDoocvT6Ojo7J74PqZ2FkBBHFjjaokBI7HkSYMTetC6OBwyIpSNjuZFw0TRZ5ETZmozcsm9ISwpjHJXSZWRt2Z7P59GLGXAqsxRhmVekQ2G1RSWTc2To6faEbaSFC1tYlJwy78XCIe3HcqUUNgcpXCmNYN7Xneg5zFC4FXyoACakttpE7PL+Ybmy/yWydVodSGJEnDFslZZOVsa+VLmuNgRYuQqrO/KiDNc+LQggRxn5dammgYmHgEQ2sT7y0LHr97o//7b+Kq/zyqDdPVsf3X+SrphW6RgsQUAYRJJARTDqBhLi3fWX79a+tJheTF5/0u6GWHvHcpmgkknXRqEpQRlyH1llx7/kH61c3iUJNhVZ1sxYN/EBgJZNVZhB9+Z33nl1kQXtIiWxRcnh2enZ83DX21luvJZkcX+pnF4d3H31x45VXyrxBYQswilkYx1hAu1osreOOR1cPDi/W+sQCvLM1WlUN47whzEiMgFvVZZFzr9eCriPK5ovP7saRA6y489Lmi4MpsVLLRmOmFYaeDUJvXpS5aC5t3UIA18USI2yQIQxjBso0XVQXBjR37x90wq7nO52Nm40YewFi7XI+L08OJxb4geNmZyfV5MilZnZ6RCB2sdnY7qWTE6klkaxZ1XneQIL25rloBPNjgGB+sPrdP/7tL73+5dvblwIroh6hKCjr/Ad/+Nt7s+Q/+Tv/x3yyPH62xyf89//xP9m9tHn7vdunRwfzC9rdHk9FOns+G/THFyf7y1UZuANoYRTGTyZLkTYY8uuX1mOffvDFk//F3/qb6ay5vLuVzpYll3/rP/3bn334ZLhx55//3/8edlmr09J/3lW0OLvQHLph0Ok6t1/dvv/8UEskqsplwagbOdCmq6I3HriQCLZ691tvnhw+6A3WuWyMNcjBSot0ka5mJUbu5Uvb1PNIoy1GuajiQa9qdDlLHc8FmmfLUmZK1wJZZa3qteKiLC0mSdm0tq9RWNWgOjpJRLXyvHK4sdnUjUYYYEcDt+1uEJtUJjlfzdrxVQiI4ziUWGo1ABAiJqygse84zGoRyv6lG+W9zz+PXAJN0xn0izJjnq8kaPc2fHe95lk7dr3YqSYzXvGQEpGnQENpctWoRbYIWwOgYVY3WpaUeUHPLYqSMEfJFGNGkEQQAAKNwVYBC6pWK0irU+aDyDBoUJUumqrWWlmMMYLpIo0HQ6utNlYrKQWfzS6M4ndeuvT6Rmf+7IXfbSPNz88noetQY6wUwIA0bxTMGtlkR6caBouy9iHy41adVMuzVBbQR0EctDwvCiIxOZukq3Q4pP0oKiq94LlsKkigJkBjWGouoK1MXXKPEOI7LrQ6Yv6izrOqIDZYH4xOTxZ1Lucn2aWXRkHkIUYvTg+7bo9uuqOtjaoSCLFW1EUBmAncDrpyckYoMkLWwvdHnuf5PdeLvdgoQxEeD9fiwl/Nl3HUohhu7m6dpAIFLgKqOj4q0vNWy0PYV0Y7zNdGtqJusjpveAkI0lrrpgSg9ts+T5kLPBZ5ymAliDWeUkZBNNgYMqSbpKZWtzvh8/tPeY1wZc7zs8PZATR6fpb5lgNsSFuM1oLp8pTyZWtrR0uQ5/OGc94oiGnYbv1MBWWegUa2wmj79rZFeHML+55BxjrEsxAVeV2URdyPGCPpLEsKTqntEOGGwY2Xtq2hDw6PLm1triZLaWC/s/n6zas//fint29vzFfPBu3Whz+4vx6uHa+Wm1tr06MzdSrH4+Dp+Vm332oFPg7Fl752u0mm+wcXKp2OBu3xzS0l1e7uyFQJscvYoad8usq16xlM2OnBsbAqbpO1zWGRmDjsT6czkuDB7Xe4t0cEf/pgL3JYVQiu5qCBH/+b/+Hau98oWTgYBukZbVQzurzeJJmxtlYrASttWofHZ04rVrVmkLzy9Zeef3Tfw8H42s5qXlVNERJdzuaiwa6LK15YK6kDa10FhgWsXQOAXPL49PBoVcIwZAYmmeFlI2Cyc2Xb7Zsm04uL2e1X7hAKPvjkMwrK/m6Y6soCgPwBZjzsDjv9bnJxhrBE2JR1ZiGyEECtLM+LPMvrZLDemU/Lez/5yXBjq+wEBvrtTscsD66+sbM8OP/xn/zus72Ld7/2eqvvJctV4DqaAGHKJ8+e3nABQOzi+flqtjfox4yq8cBe3u3MX5QE4bNnNaJOiIdZpdP5FIJKywYaaoE0wIZxe5XWWqpimphQVRY7wwFbHzNRyMWq34mgLOpCGG3rhHssxCgoitJYCa2rGy1qLTQRFKx0AStmmqzrs6puXD80AFqLeFNj6BtDqlpriR3DHZd6pA8UlForyQ2wGkPHc6UokaOojy2iUmujjUEAEexDNlk1wCCJwtNE3brZu/vkjNcGGttyHKXqsm5cFwPBrWOMWrkuinshpGZRSK87XL/5dlWukqP7fwEVLaeLMIhHnXC1XM3mjUugaLSDqeMQYfTxPLU8rTiwBjFcenE0CgeTae4wioH1fMcNIy9kDW+4lkEQqLwoi6rV9qIeRTSanU4tCY4PTm+te4HRedn04g6hbjRYk2kx9BmU3LEgnRVGIaX4Yp7qe8+9uC+58Al2LI+pyS9OCGlhHJzsTw5PL3avtOKY5cIg2J4v9PmLA+TJVoX5oiBWjePNKNwQQBUmI722E3sKWqkx84LR+q4B2Dp4u7chpTl4+mlukjuvvckMy9Kk13OTnK8S2dQSauf6rdfzeXlre/fmrl9Vx5Uux4PA1WW02SbAlEUZhKxugEay30X/w7/6b7p0cef2Bk1ptz3oOhBvtstpcfjiGeAF4Pz2y/HBZ/t/83/z//3oj374Zx8kG5d+hYwxUPD3fvOf72x3SOhcubZxfLrwlXKMee3tX5pkji4OadAn/NQC3R91JuenJPTzmfbdjrJcEKdSiluHBmspB0bBvCg1kE4YOq4dDob5LAFGCl09eXTiYI/XDohflk0TdUFVHgqzaLjkPLPIQBIUxjhagir3XdvvrBuitdbj/qAdbHHd9Zz2aVMtZ6tVUsUBXS1TXnHCvLJOB+NuMlvuf/KkacQbr14tLQ7jbj8Oy7RCTU1kRaCu8xIqc3G41wmxnjfAo2qRGq0qLJe2dhwnTYsiX0JG2jRCaOh6EQAcWq+ZLy1SXqddLC46vuN7NFvkDJOmzpKFcTGifgCxoxqJCOVFmq+m56f72O87JOjGseA8aLeYVkGPSGuMAV9+7Rf29x+0OyOK09kyK0W9tnX152Ig2At8rTWAWEtloI1jN0myJC8o9aQCouHGQsBI3GEOQXWRagMopkbzrKiklghi3wnKsq6ykhJALKDMgdb6nmO1JARFgVcLSZmSUkojlJAecTSwQiljgLHGSJurzFrFKKEUl1WDgaVCNg3nUgALCaEIoKKoa5lgzf3AjaIQUwggDKPQGCC1AdoEXmuxnIvKRMRxg2AxX1klMUEa2Krm1lhrLKLMagshsEDXTWoAiNzYxR4hGADje642NgwDIaTRpiorrbXn+doY13d5wzEmnuMCA4xShLrWQltLKSriMUwgtKjJpVYGU0Sggowy+rPSD2ANN9AaYBzKtNQGAARM3fAgiAghSmqKUFFWWmkDAGYOo05VF4yQsqoRQo3gGGFgdJ5lmFBMiZQSWsCIo3gDXQdY02q3hILWQAMhJIR6IbCWF1yI2lrpMMKNzPMMAuxSD0AkhEAYQQiwsYyygLC8KpOyEqbphfEgaJdNrTgolQDWOIw5hEqrlDbaQAMsgsgaEIcxF0IbFXgugpbLGgHbyFJZwAiRXGirMMBcaKN51TSrsqkb5TLsuY6RzAIIsfEY9Zl/sZgLaAOPxIEPgdVcq0ZABBsuAADWaodQbWzRCICw0VBKDQxAAAEIuZBcSCmE1sYiQDDRSknOIWWiUdCxvsuMAhgzA0zR1DWvMcLmzy+grXe8zmZwZbzx4sMsHKWY+UXtFlVVAHd+uPJsPD0sAxBYbTWwxlCK3V6npwv9/P7Msd2sChs5WKbzjfGo5Qezw8mySEWRuVfa3YGXzZ9vXLkWUdc00CFkZ3Pwgyf3Rpu/dPLo02Z6Ko1mMcvKeYSpNtZjrBtHrbDTSO0QJA0axfDVnTa/d4ac7v7T0/D1MTCGdOQ5OpkUqX/V2ZvuHZ0sEI97MamtWq3qTJmqRnaevfnmmztvV240Gwz93/7vvnf99taqyHdfu9VwqUBtRB1jvxW20mmOOarP0MGjo9GIQqwABqujeS/ukjqxoGSRy1Dg97vz84xoDni9NYh0lgV0DIFqBeuHL2RBHacrF2VB25R4ACANES1L0YLgz37j9w204924WulG1dpv24ZtDgdFXXvY3P/po57xuxF9752bL57On+zPr1+ODxyZVPXFg8MW6558/PDOV1/d3ztrTvPDdNkUdeixLz4+6d8pI9c9e/bATWbLi4u/9j9996Pf/kQB/PGT+//1H/7G/tHKYsqn2XvvbQMkZ2dznsK0XgHZ1MVke8vParxsYJpwP6AOcCZpvbHjfeMXb86mE28NHzz4F6Fsdwdbk4w7/o350Q/jwXgQS9E0UTeyuQS5YNgePH6M6PD2zrrnsxrVRXKICZw2xuADPD4um9NCrlwOWNtSSl+cfYbNerD22nDQXiXnWuSUqm4vVFJBbVccDHq72xtXZaG/+sa//8GDowSdZc1kc7wzxLtttz1Li+nxnzXV3uneTLtVq1W5roP8tThqLfIZ8Ieb4xv84kAvnLXedV7KxXy6NXijnN+7eaejYMHrR1I8fn7/aZ3qII5n0zprdODzzuDn/RQAQoih1cZAk4lyLRxQa4zmHqWQuhzCrCzbiBitLdB1I3mTE5dSYhzGlLJSaZGJRjzjQgutLSeYIAN1VXLNrTY28rzz6RmGCBKkra1qSR0fGFGnTdjuLNKMl2W/15XYaKkwJpzLWnBoOLSV4wcMEyFlxovrW7fev/+nm6NeKyCO61RZ4bdbVZPiFpOkZK7bcOu5garzp49+unn9LQSA5zuQQkMJxN5ymlJqO92OBbw6X1bzC5qfXwl8bRpZ5zJJMW13YgcDyzzPIIigcYNAGlzXsDVc6/XWZsf7/dCrIo+0vbVuN5lXhsJG1e1+L58vK1HPknnLCwELl/NyZ3MYddtOo1e56nlS1rkbU21NELVe29jUWX24TDo9H2FNGJtPk3LV5GllVZYsy/HWOIwpRjQtjYPc+eEsDD0KNXOI46Lt6wPdgG7bn6/SeVElRbneG4dbQdvv1PmFFXbryniS8v5gp6kPZZlZYLyQOMgc78kmywoCaC9uaptbd7UsGl5DIKmjXTfWyxz7bpUUq3kZBFTq+vadl5SFZ8vZlSuXuHYaBHPBhKps3lDH8TsdACilnrVm7/H9oOXvXt2RSTI7Of7spw8s54x4FkClbVXk2PPCToxIZKDTGkRrO1euX3otSxb7jx9VRt54+dbx3hGyi/1niwdLc7iXO7x56drV+5u9wM5/+p27THyFV/r6tRuHR8c+dZTRjo9MLSlyh5trhp/nxzNT8p2dnZP9U8i5b8Xf+vWvZOf7VqRA9KBlKrMX9/cvnjz5+5+832ZUZ1Lr4umjn88CgKyxGgNVJfmpKRwp67oZdahoakocbJkfx0HgB9Zyj372yYeXNkZFlgZRixGYzOZVXVd10en5jhfFo6FWHPB0Nl0h6gjNgVEMcNPI1Tw1FmqmDLeiqdzQoY7rBSFhzuFRWi+zXt9lrldxM12VrtCbaRN4EXC7AHvIOqq6gBYzIuIAt/x20NqsraiqGbGgLCuEASIIW2Ok8DxWmebl12+4VKTJzGPYi4jNjDF6fWMMsWmKcyU09RwjmmJRMOQz5ilYrZIzVJpOe8ycCJIAQwt0zVzfdaOTi0m73UeUNVICLhEGnU5LNERwIGSjVQOBUySl67dc19FcNqtMarNIsyfPD42SQOju2tqt27ccB3rt0MWQVlALApYgmyQBCYxUTV26lPGm9AOGkYg7bSUUr7MiN7o2QvF2r9VynYLLqsqI2+6urbfDLjNwf3IW1qqopdvt93rdLJsAA4KYCSohdZwwhItU1KWLdT8IGEEa6IwXW8N1SyyjJA5ibRAj9CvvfN1AVxotNEjOS1ypbivYGsUWSo7o+NIl3cjz6cRANcKkWtYuDDvD9vpavz0Y/sH7XwSsPe639aIwCFvWSQtLjQOtHwcRwghDtjEIEYGRC0B4qRMgrbVjVOjRSlvPazVcSGWZFyOqcECggUhpBBW0QHJT6gozb3GWhK5PmDMrCxy4Sms/CLI0PZnPgkE7P5uNt9cunp5qaQ8PDik1W/2gqXJeF6XN27FPVobLwndbnusFrHsxWRmr9/eOf97YtbP+8Z8+GPc2ndNynuRXbtwqphfnR4cb25ezpsYB0alaJPMyL7MGHM+TYb/z8u1rk5NToOhgsB2lHUA91jVtRjGBz06fjnvx3vELZUoE8c7VnclFOu72jBJ1lSTLmkCpLaRxoK1ZHJzczXOeZBJAhlBd1TEZUAfPjpeDGDeqqWZLK4TrwNuv3tk/OFVZCSFKD04c5r14trp2/aZs2Ad/9uTNX473n35ybXNnbX2UruZXd66/ONhbZVUnQo/+7I901CHjDTWvjMDIouV05mG35eOt9fVO0H3x8F6n2xltjnjVKFV3HJIXqp4sYi9ioBFNqZXKKsssyxouoJivSuwH1AkPjmcNRj85eGYVUppqrQw0DmECyf2L5ceP9/ptP3bCzQG+Mpkf7X9ECdYqTFacoHIQdg/3n26M1wGs0qyBRABEw3hYplOpOESK86pYVBoaLoQwKimSKzvXzmbH2YrRYKQF7PTWjk4mH3342TLV8dr24bTpBaTIuB+Ew+3u0fkkW3KfuYvzGRCFLqQf6Dyvru+2xj22eHIQkGg6U/7lrcF2d/Lgw367z3nVD8PZ2bFSDsCozAEhNIrpbJYFQdTy4uR0UZQV0420vMqM68NZojgN4iBYlALZ2meauW4r6J8k51oZBTRGWnHQyCYiEArDsJOVMnQdx/EUlEZLqUwQetACZYxjZFFqQp2KV8QqDSwggCPrM6ChpQ41QFiIIELEgcxnxqLhmm9XdnqeLKw4p9SUIHa7ebFsez7XtVQIASKEklBbJQwEQGIgOTI8Pz/K+58EoY9N8RdQkba6qopRZ5RnDEDzyp1rDx7sV9wwDDv9zipTa8NR7IS6hgBaN0K8aTZ2eu1OYASXVY2xhUZSaIVugMIEAkqg67HXbl6uJX/rzeuPn5/2VBRqYR1QC3d785poFkmimMEOr/aePdzd3ti9unE2SSDu3d87voXc7UDVeTIYrzkuODk91YLZTB7PLqJeNIhDWdayrILBOlRl2czXL60/v7hIC9UeDM9PjwKf+JIqoCcni3bETDWL/fWN8XapNIBxxW02z3rtthbVZv+axbZJeK7yoN3RiECFt9dGnXYvnZ1maRqEztnZ492N1qpoOt3e/vNnu5u9MPZX85TD0qNuMrMdFz75+N+msxfWw50XnZ7nvPVr7x0kS9dgbezeo8nO4Or73/sBDJP/8Fd/4epWnN0Z3b6y87vf/vGv/eX3InTqNMvzC33r5vXxZnRxkcdxoHPRj5wVbyqBtAJAJ7Yua0Moq/zQ0XXMK9HptrLkCAluAWGk2Np4be9FbYWSQo+GvaZc2OJpxLxkvoj7wyKD8fiOwp4wndFaR6xOpul9ZHmrPUCdjdnFMUTo8Hj/1q2RA0SZFVqYdFE2Ort25xuHz5Nf+oUrNy4Pn88fZsp89KzJllJJgZhzdHbeDqJuNHz4+EcB7HsY5csq41Vwo9fAXJjcA56ltOc7Z2ezwGUoFKkslnunjUDj7lonaivaNHWJDbTauqSlkYCAWcuUhV57fbXkrXCseN4OB2TUgWUOADRWCC61bABH1rFCcwywE+xW5cH09Nz1vDDcVgJAxOq0SJPzds+bLubZqsI09HBzfPSFg8H0/IWCoN0aQYFb7fafP4wAAqQqG2s1hLYx1gqkESWubwyoOS/LklKXUtd3QqCF4zgaYFE3AFltlOYSYpRlKZeCuAxrrY0GCAndYEsVwNrOIy/gRdpUQijZagUUYgZRniU4aiOEMcEIYKUkBK5DvGyVYUTKuilqGYUuQq7WSgNrjHE8DxHXgWHN80oIXesgiJQWFGOtpBSyFI1QTRh51KNWC9kKRFPXnGOMMUJGCQARxlYqJYQCCDrEEdZKITEGXAnfcYxSSiqIIAQAISCkMsoCIzAByloDgGwUhNwoZbRxmCdkjaEBGDKKGUVSAuvYeZVjwhDCjBLe1MAYY621QFvDmAMBamRphMKUWEwlhI2W2ACsNSZEA4iBVVqVTe4SQhlTWtZNY4CFxlpjgQVGSCWEhchzKSGo5beMEohQAKHDMAC4VroWQugMWAOsNtoQ4udFKgyQGhGE2q1AKY0wlKLxo9Cj7TLNTpYXCOqAsc3xSEsoGi6MAoA4BGsLgQVKagCgARACLLiAEGDKuAWlFNAARHCeJzUmLc/R1kijHOy0PYcRb1aUeVGWRd0oCRDUGFqEhbRAKyM1osj1WFWtjBWDUTxuBfNlqZTWWgMIEYQQWIRh0wgueFULZAEl2GVUcIERANZqpRsuay4sggbYqhIISWuhUJZYzTCyFnAhlVIAQKOkNApCRKkD8c8DaNm0HN/ZmK2qVbNwrbc6ubh87QbdbD1/dPDy6y+9/+GfUFo+/f6n/VcIBwZY7rnW7hdkEtx9vBoFveQw/f5vP7r96vr7f/z+7ZdfU7XxO7GGYrbQvnu6OLjbHnQBuZ1mpi5nBLSenp6++XYQQpk1yG+NjahrrrTJK1NEdGhNO/RDVKSFtGELBM1U3j9ZfHq+ufNrw91+4dOL/UccNk1dW+lPpictWpOhC9JQuq4FkPQ8DUhWVoTK7378nf/sjVuHD/af/uTBzd31xTLtjfqR68XjznSeNkXdinB3HB5+MbEwnj7KLpbmySjfvG4QFtPZye1XQb/tQI/MZ5UiubC+1lWZJUrVdVFZUS2rSW/cLR259+DBeDSKx+Ta1s5vfe88mVtYNL2W6wJE5cl3/9lvrg28cReGV/o3Xr6x1br0B/+vf1TVtYPBZjeaprM37vTXAhi5J9/65uiVO2hzO7q4CD97eLFMIW8mdnVx948eZ7o19LzMCgTU7pVhxf0Xn3/Wu3G9t5Hc/+Q7v/DmzU/+6J5KwavvXvu9H333bJp7La838KwiazttW84Xp093tu4UTy7ms5kH0fRoVeccU9saBnuHK+jha68NvQFod/DGpd2UVp/+yQsvFeplfZDOttdfunL7G9PDCYUXUVi5EczTmgvjo3VbhtH2jbizXYinvF5oIFkHQcgWi8fRKDWwzIo8F6A1cuvGYNZWjQSWIRN7Xna2fO4F/miweX5w5jnq8rVWtf8vH5TfuThMnLD9pa9889NHFQILz2vVQtTzbw/Wr0mNIAK96+tex63L4eGTT4JW1+/uTLMfY4KE6ZRF0/NeWou+WfDzuVqs+1/9W38z/tPv/N9ufW2D4PTibFk12hkGa/22VNL3EPE0VH9uki1UQvmeVyvdCjrSUGOkkRpjrKDwXN8Ju7wuCZBuRH0nmE+XDAqjje97nTBOVGYA00BzLZx21OutJbMZVCYOKSagLjMjBPR83nCEHD8MLWiCICgWF06IymoVRyGNYiVVVtVx2IkoulhNGDFAcY95GNKy4cDAkl80TfvG5k6Vl8QlWjUY0SwvRFNS5GFNECUWNZARDAMc+q4XgCYvkhUClmgJSLXddyNiT598ulot2yzoEMKnS2JBOkkpM72oDZRtjzrEobyRy0XpD8ZktAmVDjqmcoKjIvEED9FwmRuuFSDi6vXbk/ni4ec/2fHCs6wJOsM3vvLug08ekm771Tu7B4/vtt24lqLX6hBZrFZTid3u2lqd1VyX7cC5/vI2VOXk+NyL3fIsE8vzfrelUuW6HufF5Gza7W4im9aVvro1evbsWTRuj3cHF8/ONcJcmqKGABloLDYWKk0wSNJZq+3kc8GrJr9YYRRTqRCCwOB0UTm+6cSBVIo5jlaqFUcX0wuMXI9SoxHXTegThwKpZVYlvueHvZ1Bx+c4mC+XTm+YNMBoKTEmhDp+O0kz6sAo9CbzuVxdtKP+aLDDgVEykM2KQgQaLWvDQtI0ZRC3keOzfn9za2txNCsqBQzLk1U6X7I40rn1qcMLsbXVT7PG7dlvvfVaG800n/zzf/hbb7/7yvc+fry5vv70/vOXX7k1Xh+eF8YoXdYpKFLJpfZZ1I7ODu6BIoMEzfLCGW8Mr8tMSnFctFxclg0NPMjz3sB77c7mt//tHwBoC+Zs7azdfmXrd3/7+z8Tgef1TJOni0xZOU+Wo/5YAZFmucXwfD7ZHK55DpZ50pSgntbtzXa2qkTTdONuXqwuJmcYMK3t+vVdjVnOBZKKANDptKVSUjdNmQOphZYYQi1LrnW3dcl4sYVGCQQR1EJ2XNWy816wna+K+VnpBANoRZnZ1tilDmsANYYAFk8OH7U76y3aotDael4XGXaNFwYGMGSwi51FlmhhCs4bKahH41aLlyuE6MV5hoAHbL0zpiUvOTCEMpcAo6pidga10ppLIyiNDLCMOJhAzOpxZ2dyfuB5blWLyOsQ6LhOlOWF1daDWEoAIbNWAui22526WAV+WxurGu6HoRf40KEPfzrNoNeUVmd6WefXrsog8KCqJYflonBc5/rVLx+dPi3UqeZV05QMGq8dAAsk5x6jtZZawLLgQBqXIUbs7OL4eJallemPvG4vCjvdfL5qt/oISgwVF2a+nAKgqeO5OlYcWkMRhAQLSyCEAAOFLbPa77YDqaU0DaEesCJvjIHera9+vdvucgiWZ4f86aEWqtcPLanWtm88fnSCgS4as7Ro3B74fsBble94LHBXdVI19eVB+//P1Z8/7brlZ33Ymtc938/4zsOehzOf04PUUkvdEgKEBBEQ48S2wK7g2HGcMsSVVBy74iJ2inKIIY5DbCCxASPMKAxIAqlRS+pWD6e7T59pn7Pnvd+93/F5n/me17zyQzdJ4T/iU6vW9b0GKeSAIby3oQMaxn0OSbNeDfI+C0JCodXCV8pZVyK9sZFDGHS1Um4d8pAHCceuapdledHr9ZyTSRCslov+cBNjnA0Gq9W66WwUuIjH1pl83CfbW8CrgOegU9qsYzKoi8XxxcnLyQWLkLRN1IeiEZeVFU0lYLvQamOTci5bYA93GWe09NL3huX5WVWWP/wdQ0AHAYjAZClO59WHp9/4hd/79o037n7n3Uc3714Pka+LRRwOuhbqlscuZAqdPW9Vy0kPNxX4uZ/9hQ8/fHch5qNhv2rhjeu3Xrz/zTDpsWBgyqqU7WQ9HfSGMfKvX9vQV1EpQVV1H9x7emN/exTkR48u8j7nWbRxY3M8HjAHJkeXZ8dn+MpeqSQCbYBCBEFdiN5w69nZo+Go//Th8fbh4cFrd8vGtVgbX64r/+U/9Av3v/VNjVQju5PzI6MkAg5gnMTZ1iuvtGfnHrk4z+t25jHyjoIa8SiqhWAsBy70jNV1d3Bwo3l2Hg9C4d32djqbNpCFe9tb/mRRlJV0BgfcaF9W7bKVwuN1LYVBkGAAASSWUoIRxkYB7fMs096/OL1gHAX5IWIq2yAJSZpmNRgNLs5aGG1fD6+0zVPolWwrHo1aQZQlBFtOia/oi2eTMAv6/SDCfHs4nqzPFCe3v/il1arhreiT8Cu/9E+w58PeEMV5r09d08RpWFUyWreMkoR2YrHyynpEXvmxd1ZHD/Px4MWL06JsEhQZ0dEohVlAvDNS6qZy3ratQyCAEGtv66bI8+1+Ly8rt2pEzFTEg9mq6hHEMKqblgcJ87jSwPKkFauQw075WopVt3TOE8Kw19grLZ2hZNG2lPYQwMo5qUREkbECesQIUR4KbTlLlO9apQlEngKKPHKAEmq0MBRh4L2HAHgjOgwZgogRXDYCQpYx2A98pwGle0q9FKpshciZhNZ5QCENAVSrZnZtGDAovHTVTKhOWw9sNU3SgY7JvyAV0YDEUXJyPDEG44BtX9vPevHZ6TTLs8GoVwtlrOhnsWeoFopRnIVRGNM4i5BXk4vzMCJWe2Mcxo4yMBhtGtRdTJfvvftQlIvPvvFW7hXSq2ZVYYosgyshkIE4osQza8HW3lUD/dPnU2NIPgh3rlw3WE1XKyUtXNTWdSzt718/fPpJzYLg2eRItQ2s1Xhna2c0OnnxdDmfaDmKUL9aLBcUJhvXpY+qYu5sgyhet/bOwUFb2hobD6F1KgyCnY1Ul2titDMt5wDzugGY90Ma9DMvq+UaoU4Yf1nV77x1dbgzJLoui8tO1EkUIQZr65TWw90hArxT6tqrWx//+rcqCn725/+VF199dvPK3ZOL2no33ImXRw8MWe2+9SP0+xvXX317tLPz8bu/fe+9Tw6zLyFA/vJf/Ss//crhxVpd3evXwj18cjZfNlkwoFnv0QffoeN83AfArCHrZFvNL6u8v1MuG2AZIahrSll2ZbH2lM9efgqghWksWqptOLsorW2KwvSG2xpR58iNK188n9cOd9YQWUpTzHu9re2dzWenSy/gaGO3rMqrhzu2mZI0h4TyPOvKDrrk5eOJXLXf/Cd//fHY+3h15fpb4yQVopEOdW23OdwYpv2mWPfybG/vWhTYup4xBevlEtoojfvAgScffLC52z/sJQDCh6eL+fzl7t6oPjPLCjaduHv74N7ZN9NeLirpFN/ev3M5mWQhMlru37q5FudRMuhdub5YLDPMmMuL1RxhaEBIgxEJtjt1rHVpFWmVqQTG+XjV1quqDXA4TsLzs2eiKZOc5TGJWdx1VJfdoit7g5ASarQBwG+PhsX8h09CWbYAIkwJwgQTzEkgrQvzBLFatq01OqUpxMQCX1YVxcgB5AEQxjWixYR4D+pVaZ0HCIQhAd5Y7ynCRsumKXGQOMSbunBWEYR4GCFPMbVGC8oDD5AHEELIaKitBtCu23WWJm0nhbcMM+E8gk7ILiIRIjhARBtnvGOMa228g6uiUkpySpVUBENECSLUYySEjngw6tHLiwurrIK2BjLhTCgpvYEWOg8IZAgS5JSDWmgR0MBYZB0q2zZgoQeQckYZnc0X1HliCTQOIgA9qOrWeUcIUV1jVcsx4YxC5xjBjBOndMAYpbQVykFEwlC1LQQAYIg9BMA0Tae1poRoaz0iRkpCMfFIGQMQTJMIetha9YMt004J4J22BiJqPeikcsBHnFutCaUhp8YY55DxyAhrPcHEt6LxmGihlOtYSCiCwphV3RonKGM0CjDC2jupVYgiEoRCKOC7Si6tlTeu3DTW1k2jpMcEB3HkHSAIA+QQggjhuqus84RRRKnUwmtjpKeE5GkoRYuCAELkrccIDrJMK+c8EEZ2WlrvPQQ8oB54hgiF2FuLMLEEIAgA8BB6rRVWZLJoRaswApRSpVVXS60MJkgZrZSS2iZhAKHr2k4pTRnBBDkPOq2Us84BLZVUBiPkAaCURHGAIYLA103rrAsDihFMksw433Xynw+gAQqSyZlEk1WE80fvf9S1Tj5bbu5vzC+nfRV/4UcPVOKv3dherM4RII8eTN9557Wnz9ZEuTd3ri4WS0eIWIJv/Orzi1P3/m9+nPfA//I/+qNhiBtw0ZB1chXMpt/buPZGyHunUs2r8y/93i+FWbM/ZEbllZL9/l5rzPnzh4qnTQvThNDAaU+Pzxa5Vu+M0siirqN8uLvG3VmzjHYSu1otLy+DZCcbDQiLtt7MJt93L08LmtEoZPW8iRjhAYrC6N4Hk9tvvP3dx9/9sZ//3KydHD2evf/Ri9/zs18+ujhDUPOBzPmASKFd9aNfePWv/533svHuzjVrOve13zpt5OTWq9Htd64EEp/NG7KeowCM0qictG3dcd5yDsuqscxs/ci1ltCHzxb5+jK/yovpepCly1XtJP4z//Ff3R/0QSR+8f/6v/1z//b/7k//+3e/+Td/7fzoiGbDi3X7+Lm+lvUiWcb9mDB8/vJZgOn0eREn/MfeyjvhW9WkWfDyefvp04JHcZX025qdT020UX/uy6NPv/+N7z6cnj6cBy9TocTP/Z4v/vf/9Ncr7eIk3DtMaALauVPCV5cv1IvESeFmxdYOfvJ45Rw4meua4DCmPABpRlNkwwbNPz77yvMnb/z8q2984ac294LJ/PGqeJRfD/LxH2eq8dU/GqYWRZctL/K9PHcw6d/Mt9P64ltNcZLtHWrYd6Fp9ToZMJqmRB/aDrCQVSASrt3e3O7mS109lMXCSYUs8yI5e1J2UpmuzhOY5l7pi8EtQtn8bFqmIY/4iGXp2fPvajNbofsE7hBwsL37jjbEue2Na18o1fx8dQxx25aX/Ww0zHaR2X7v/Ueb/fj1z/8UTcVv/O1/FGb586etqlvVEoWSMOsX6yJMnQWF0EjVPwwjI4yiLK3KWnubpbkjCBgYJaGSMsSUIjirptaoIKZaWkNiHsYUOQeNMm62rqAHjOFO6SBkmAV102FMjfDWWc4oRlTrJouHjPW89cW0bOq6olXXTIM08MrXqxpgFjFKIa6bQrXNWhTDft7LhqJb6lZRxkJCwx6z+lgJjwhbtw2nUStsELIg7BFEuk41suUx7ZpuPps6C40oQ92ltqHQTY8mu/t75UWxqBvZWa883co5AXXZwIiRlCqtfNwbX3mNbUTL+ZQnKEp8A3zZKgJRP0uotUqSjatXXkzPyO6bnkV5ws87ebrUn/09f/C1d744PZ1++vGD6fkqCrOFWJ+9fLq9MTZti6ho1FlkLQ5CwkNCfMbDYlVjADgAi7M1lGi4lUGM5otpH5tqtr48Xh3eufVoOpG+Dbivqu7e9Fgr2s5bDWdipXlG4yzuOnnlxo5YlUEYMG9tU09OZuHN3STPp2enTV3Y40+3t0bJMAeGFnOl2i5Lo06ZVnTOQGgdwzDtZ84C3QDjDQ1iaOsnnz4Ybl+78cbnJ7NlHObLyaJtQciogQQx5IxxzhrtMaVtK87Pz6IoU1p1QkFGKMSzxdqVeufwejQatpWomvrm9euf3jtxCCHaPz6ZMQu8196b5dnMNFbpdcAIIXh6elpezkkcFKreT/Dlw/dOj17sX7vesfxn/ugfeftHfuIv/Sd/4Xxe7ynsIWt1efXgypMHH772xutVC5eL+0JW/VHIivjx05e/z4G9W3dn5Twgiunmnbfeml2szycXBKnHT5+eThe3r+5BCI4nC6G7fr8HwAkAIO/lURg160Un6nqhqso0rZvVLoiTKEallGY5Y8hVM3d2uti6cmgxSoaDed0EhEZhCgntb25qmpa1IRBR4HkWBYiuyqKpWhLElSysgx4SZ1GUBRp1kEOCsJLGtjZL47gXna7O+zcOHhzNCsk4S7WVp2uOIrwfR0a5zkDis3R4W3kLrYdW2dYtJ5dJP46CpJf16qpWQLGIS9XKrtPGqE6oRsc8l9KslkuMOBmD2WxOYppEfalUVTfadjgJYCukkSyKEB1oB6RzzPsIu8n0kdJaiprSzCG+qKoKWwRJHqUIWAcghAQAjzFc17VXmoW4Xq+9jx0kNKAGOR4F09V0q5/ffWd7uDWIuGvrQnrUlkJJjWl3cvJtgORgELVWIRJTAlgAumbJKemasm06AnC7qsMAB2nQInBaLpZKtQbtxLH3rqvXg52RpZXuuv3tu0LYteoYY1nU5+FQw+V80XiIEDMYwKKsAsbqUvZ7IUEO+I6xrGkkgC4b5JCmXbFYacf7SZyG+Siv2k4pGJDo0ccvm7LRusmy4W4W91MWJhymkXZAOL9/7XBjnF67KQCwbdt6BE0AXa1lWVFXvnr3Jk/i4cb45PnzcloZ7QQBOI7CKKmW50o4ihkyuimFUS2P+3nvxmL+TBgjnNJeE2hIGCQjzjxFiFWFuXvr5rKcE4yL+bqL+jGkGU+E9r3eRh7EL05OeUZ9CKMcKi0bpZRTPmaC8YXDpNZRny3XMxI1/Rs3XQcGDm1vb/3gLfjGVz/yXXv3D1w9+Nzt5//gNw0hfH///rd+e1LMzSN1dXsEA3Tt9lvd4lPO1U4/HfWGF6t1mvelUt/45tfmVfv2O6/3ktZCV3nxyZOPBv2QJj0LaNscv/njdzshj15cvPnm6+WkFkbnvd4g51nFYhpg7LPd3vVr4+XCOMpKFr3/29/ZCoJ80Ds9mUX97OD6brVsFrOiPHoe9Xq9UZaOwz1+7XJRB1sgSs2P/syd3/7V6uzZJxtbt0gyfvTg/nAj/fDxp9e299M8SoYRDnMaYrYZVc1ZxKh33bU71xenNUSoWMzDJHbAVqYOTSxlff7iRTLIm8IgwEvpPQrLbrkZ9nnUBb5jLSFJoFsvG9W2nYR43rZxmCBCkIMEYq+BBRoB5DAmGHprk15yNJ9+99OnkYm80HHsBmm0vzW2bfvsbPn40cfjTZBE0BOgZd2ZY2s6zqkzClhtHN05uN51p6v1GgB26+1X+lvXV4VZzruNpv3w+OOAsmuvvPbhJ6eyajqFhynNswBYrIQd72yKILGWZP14vr6gAeMBkF3jGUjTUVUVq3VTx106fVlysL03dF6LDl1eTrN8HMYx8NATzBhbVHNMIAsTHGDl2yxjvqk5YxwPEbS1bNfLxWp1GQeRhxRZpSyE0hIAAoaVtQAaa5zwIAuTRlqIQGcdtmDpaogQ8ijgnHmOKQ6DEGgnVrXpmjSDQT8oqjVyIfTQOmgtaIVEyEPvjDEIYQQQI0RaH0Y4yUBRmweXU4CANcajoO4Ugc4DK7vGuybkPh/2oFlr6a3WDsDOmcvpMg5AlIf/glTUdUWWRaNx5jxbF10nHIijdHcj4cwYHXKU5aP+IO9aN0xHjJNyMndGM4oIi0a721FoL16eZIMgTvi4Nzx+/iwdxz/xU4fLk5N2vfnrX330mc/eRsxwjuIe7QXJatlBBgb5sJhdQKo2rm42LTLztqsaikQcYVfPhteGQnOjnbUBgsnRCxeODrav7IFn7nx+xhlp1ujxBw8bKQHObBMqpdpGZNkOB7xsJwYoodTe7pVZzS2wBKrHjz/aHA0C0jULKbv5elEXQni1jrMeDVjE+/MXj8I4Z7q/XrbpYD8K/BYgq7O5t3UcWozZfHKxdWWcDiFBFEPEycgCpPD8vQcPLXVXNoPvffPbepH87PW3Qr6mndKr6WT6/dHe4Le+8bdbIsLNu4t63coX/8a/9TMMRf4+bkS7aFWwlVdgNVtUo61NwMKTWf3scpn1Tnt4iZ2cXbxIe7lqBCZJGG2QuvSozAbk8bOXTT0jzCHGWtkupi8GV3Z5GC0mFAIb8FQYu+rWmENRLSsDKrsK2ThPgrqaB4kqa7ksQ55c1dq+fHb/1duvruazQS9ZLadFJftXxmRx2q2EkNmXfuGnR2n2yTe+spUNnn58TwOZjbPlzJoOONxLe2//2q//tdfeeEvBZbmqemkyCEZs0GOQv3h29vzoxSs3bopizgg8vjjzTm/v5cNhIDuwqkTZzq/7YGunTympSktY2Iu3i4hjXzeL6bMHH2R8czY5a2SXJyFwAQ2ZExJjx6BoROdpCcBAO9gCaLDSVkvRrNvlqrZZmJFySaiNIxtRwSPoWiFFMx4NEd90dmWtEp3XHkIMvPshBRC5tpFByJMwvHN1XxhfteJytu7qtmlrDCGmlCMuatFIgTltOu+s1cagkAKK6lZa57z1yLmqXGDkA8zWxZogTDB11mjpCCYAQIwIhlg0NaaIQIg9tMY4DzyCPzDLIIIwjjCNkXZJGnttgbEeIU4Da4ExljECIVROcx4xFhqlvXMcUyUbj5xD1ChjtCHcbw5zaAzxLGSJD4CnqOyUs85IjSnwACJCnHXeSe8sAC6JhtD5uiqEMQhSisPOKu98JyXhHEAMMCrqhmBMCdJS8TDglGpRO+uMN0rIPE+lc94pDwwApmsUoVRa0bQaeU8YpRgbo7x32lnnoXbGASSalhKOGLXAKGOB99Bp64EFjmEMIfXWBUGIEa/bzmMUJgEALqTcW4YxJMAr5ZTT3gNKGEK4k9IDZ6xBmCBosXdlXWtjPQJFI/owIBQ553jAgQVKSymksd0wi3c2xwlldVO1QmLKKSPG+x8MtDltofMeAuUlwghj7KG1TlioocdGKuoDZw0nCBLkEWCISG3qVkGIqk5B7yjndVVjDCGm68XMGg8RSJPYequ9oZQs6loKSQBB2ndWee+tAdooraU11gFYlF2nJHSOYWS0g8h7YAFCUhvdGW2t1NZYDyDUxnqIPMIeeEwJQthb64BDBFMKsjhwDnIaLKvCQwf/eVfRG2/dQiD8xnc/Ptj2G2GCsvHpSQHQOkz7WrvPfuH6RMwCp/BEBmD887f/0Aj2xlfg3uvbhpJSSu9hwvj/7T/4f6dJtF6Vgxjz/W2ALy5P9DBmWUZO338S96Y8SP2Cvbz/9NUv7LSzj49nCyGmZbNatm3ZWeD5cDNiHDWTyXQivIX5aMBctzyZnhzNPv8T/6tlpBqsa68ph9C74ehAgtiZtrsofa1Xc0nCaL4q9GWdhSwIWD4KhVSXF9Xhtfz3/6v/zvHR44cn1dbuZmbdO9e/uH7a/dbXf/3aneGLxWS+WBweRM+fvxQYFM1kPuuyjc2f/MNfevjx40dPZqPdeGvr5mSyhLqzoDo9WzenxXJunj087veHnQaOwrJUjrJkY3B8dNpHbG+HqMrnw2z2dOHmvWneu/ra7V/89/7azSuv/Kd/9r+9XW8ebvQbAIyA0GGMwt3D8XL2NI/ccJerDmOSGN+Iag0A6vcCAMtRv71zN+syN3xl997XPrl1mPEMLl5+bOYXwIl/9X/9i1/9Zxfv3Lnxk7/4U7/87a8Sm1Yn8/e+//yP/bt/fDNk3/r6t+9m+edfufnu77y3188j3MbMAcbTgiuLO+MOD3b/5X/9X3r3K9/Zu75r9WR3S6q1fjR5znYObr7yzv1vnI3gC9j9ahRcWc3XrRawq7YPoq3N3uLRfNXCrhars+935aVPlzC6GphAtUvky3petp5jvAug8YZpVV4uTzimAaV9OmwV6Q1f8Yp3yBr93cEWUKVq5mXey4UHMNSNnhaN9Fi6aTLMh2spLo+Ot3cDLRJJXmjgpdDjawfHn3y3XD5OU7OarRD9iNjtq7tvJ5m7PPnt6dOPhJ2NDoPLAjQXXYjTyVkb9xMMGlOteGRuvH3l+SdnavVDV5F3XirFQ247uVpMwyw32oaDPiFI6taBNmAQ80gjQElYFgJAzyMOEJJCAkIoZsooLREm1EmDXJclQ58hvepm88oq2I+jetV4VzNICJRpih4+PwopRjx0BnpGgiDu9bOumS/XSw7gTpgREECHnUYR60FAoC29cGWnmlIl/R6lIOJM1o2TnHLOMLXIGAgI9J113qO3X3tDV8uLp/dSRhx21NrJi6eqVgZgB6DW6HJxmVInmkqDGMXh5u6eJmmHjOgqi4lFOApwUxQhIkZJDmPvIPDmwYtH1rF0N9p/9fr9730ah7itirbI3/32N11bovWyLJcEu7euDj/63odtss1BS/IMEBKlUdSPZtPV4vRCJX2I4XresLX3Fs/OqyjsYw/PJouiaqPR8NSo7//ud0rRHYzTL3/+lqUuDpM8608Xl04ZSDVnvqlMgJlYV9Wq8sgHYbKcXRotXrycbqQhRiQZhN26rFfUBUEUxcYW0EBvYFnJMHIowEQhBBmDqFUVcC7Nhy+Opto0SW+8vbM/qwuhu+VsjgBhaUA5nZdtEiecR9a1l+fnHANrzerybGtjnxKfJKZqBMSMkgBGfLWuIfE8j2dCPDu7yLeGg0E2WXdIAOz059987fv3vkHZIMiz08vZwe0b29vx937n2zfuXPvNr393sLvzmTvXv/kP//sA0xvXd84rbGb6++9+cuetN9MQybpxiENkHz/9eDTa4VnsGCqLZbGCURQPR5vXZLd+/unB57509OKkFPbqZlItV1aC05O6l/Kjow++8JOvLk6mRaErYbwUvX72w+8Bx0bL0ebQ2zhOwqbWhIUwT6pWeAeTOKuKs2SYWmziYRSHNAqho4glUbtsVOc29rLhYNAovJGGVhjZOdG2AsKiLutWaO3bWjPOkjhMsxRiqBxs2pJjZzVgmBgrpVTPX1x8+vA3IhQlaeoJB4QQmhuVNJUM+7luhTUQoUh2SwpVlnLZBuPRLYSck1B5I6RACFkEpKk1xBZaAlmaJYZjItx0fkkZblVVi9Ug7hXLCcKUpdy7pigXASFJkhiIVNfRADViTUFANTbdmtCIYtAfxZNKJFFAAEGQJ0EOvAQIQu/TOOi6pqsajIFoqzTJrWUQEFML4+2tjWHvsyQfZ9AJC4RWNuCccGaBK87Ldr1GCCBu0zhTTUm481a10g/7O7YpmnodYO+cYlxZgGTTPn40aWSTDXuDXpANkl4vXE/X52VnIGZBiFkCnUTWekCM0ggCzoPtzbiTplwUSqkwCpV1PCS9lENQ7uzsnp0tvHGYBwABBBwFJk+j8+mFMRIzOu5FgIeqrEAnKUNpklJCgyi2zq3XFach4Fx7WFTg+Gh9fPEEo3Crt6+t6fWIWrVCVIcHWySEnsNFWye9NIkZAP7lsjDKrpZzBl0WxXmSNEp5jGLOYsiVkv1sYKXaiENgiEeiq+qEu6IRTSfjqHfy/GWaBNPpS9CCR2fP71y7Qnw3KYugvwFxQikLnSaaLGUbB8iwEAOgkSeYVrPVa7d2o9BgAoyy65cnzdpiQxCnPwyg9QfPX8xmT47XT062UhZY+PFXf8s33U995u21ajUwU4N/41vfvpP1fKT7Gblxc7P7uGI5Pbhxe671y+KCPoOfeXNLOzufL5r1skuHTVOTfp/wvu7gwe7h2csH66JarNTZ6czA+fbV7LXX7qi6EWKpkKIRE0erRd32dLJ99e0Pf+ebMcdY6itxuP3K7e75FAsTEICNpv0B6XNrwOTx0rJ5n6h2mTjRcNmhaughHB6MvVHjrV3Jo9ag7d29fm/04Xc+HfSHXsLpcjrIgQlla9bjja3Ly04KoxxYFsXOtf3dLH98//loOLBSWSlXwuS9PKWmXC1j2iniAsC1iM9nk3C4A/QSeBOGKcHUOkQAQhYqI6USHngEgPW6lV3AeJ73H76Y3N4a3dg/PD55cfvg4MNPZtxm28OtulqONgeiLUWnAQRSSdUK2A8cEl1rlNWTi4tBLyzKLgiCdYedBDTGWzub0w+/992PPgkTfzl5GUco4/xiJWCLD3d2J2fPGKSYkaKobexff+1HwOMCV7PqbDIe777+udtth8RK3toYvf/4ojvXU6oDrLW2UiqaZJ2RgRfABdT7jDhlLWNMOydbY0TNKDXIEQKbroHWdaLGViEIqnWnOA4ZNYAQTx3yXlvnAYYUIoMcwB5bFxAPmQXWaQs140Q65ZRzzvKAyq4UbW2U4RwaUUCPI0Kt00GcEooccEooFngMsfXW2B8EL5yzWCllhTOlLzwab/eNRdWqCgnGDGKAdH15Zch3r+4A1ToAGfdBiNpSWsJboeuq8NT9C1KRlrJeLHiQxXkGdQeBp4RkPALOF6LaHW8mnBCgkogx6oVYIzdPk5gaU9U6GWW6OCK6jL3dzBOgiu1Rmg6T9dm0nKySYEMq3nWkK9acWyMwsApIY7SsgY4ZNgAxEkQbgwCVvX4fqm59sZCVWqcCcNQf9FWHaBSNxgOtkFkfxa7Lidm+sXvxuLEtXDbl/vVt00pOqXeD3fENj8S8qbe2dy4vjkVVAW2nZw+u3ri2vyeLxT3eyy8uz71tVaOEVm+/ef34ZI051LZYL+bZlRuARm989nMWKlEH+xuhV+bZw8n1H321zWFpV62s4AqmG1Qj07Xrtu6YA5OizOLg93/h7V/+u/deu3tjQRehXqbt6ne/9b2niwc/ffMne7S304e3+rsAwl/+Z3/rzbd2/tFvfEOko//L/+sv/fk/9ad/3x98J+z7v/HRPx3tXsGhdkqfrlbX60DUxXIp2+bCwiGFWZ7tGeFlJ9vmUnkHubp959qH732/vzkCGLdFqR7MgziLegcYDQBQ04unJI5w4Hd3dtp6TpFDYHl5NEnTniPMOGUgXReXEODrV64sJtMkjdZl1UqnAVxMLgPI9m9f1ckrKxdq0YX9/nTybL5sWq9C7ZMgGW2kSbRrgN+7tru15cMoqecOscAIb7U8PTlZT0vgkCEkZ/z04uXL82JzmKvaq3aNSW+8mXhAjs4vBnnUdB3jYde0F6fPwmTbrKswSrzqlD/b2Nk7ma0QFuv1kiDfyyl0oGkndaekXiM29NZaKnxViovnzXJlPdwaXxkmrKzOJ5OTLGewAY3xo3i4tdHvDF7X3bA3bhbnNOj1d64FUbBar35AQdMIo7zRRkj1QHeiVZ1SxuOuVVJpTDD3sGxWlHHGuYPeIy86IdoGCWuc1dIAD5x1zqkkHWghMTYEEGsMokRJAZTHYQS8U1pCD5SRxCPCGaGEYAwBtNZ6o6IgIowgh0VdqE5gipVqvXNhGBMUeGCtt1IIDyyATnfCYw4RDwiUXQW9wRiwgFmtnTbYe+RQ3SqnO4jxYDBwBDvSOtVRyoyxwhinFKNUG+u9ZYS2QlqrtJHAERyEy26dxwkASCrhgIMIOGMRBABY7wGlSKtOAkOAZ4QZqxinxmgIIGeIeIwR6pSzVjviNLTAOigMZhABq3VHEZZaEso8wtgTZDW0kAUhxh56Z61FCGJHVNc57xGERmttHfDGaYAJjRnXQiOAIIRCyziJlVLeAeutUvYHe/BBkFXVAgPbSdkJjQnVwGRxj0BitXXeGuO0FF1bDvp9zvOQUOvsfF0CCAHEUlvOMUFQK+MxBAAZ5zohPPYEU2AN8AZDNEx6Tac10ARDKZSzmkCAMXMQSOsAQAFHUcBaoaq6buvaCNlJ4bWWQmGKq66GEBEEKKXOaIxRGifQGis646zWBkGojHbKIARF10gHMIKcYMJI07QI4VYq6x3CSBurjUEYee8oRh5BQkgYcug99t46BzHkjIaMUIKrpq3bRhkLIfLoh76ihx99qmfduBfsXudvvPmTf/dXXn7v5MG/9tNXd/rD8w8nHdFhz25kgQSZL92V166wLN8cZveO7pfY7O0kALV/65feffR0JjqKXXX7jZuT2b0WLoxw5axtfW0hD5PW47i3/Xq2daVtiqPZkzCG3hbdyYQnMSq8EfX06SmnyCmQ9tLtnY3L44tXXjk8+eTcu/6p0YUsOqQIqFV1QRozyHPN2fl5QZ0/vrg8PdFBkglnpXIYYgh9U4mqaKRXH374aHT9ze3Ra7PzOujkfDprp/TatR9979vfTpJkWTRvfvFzUCkrRLI/79xlOhwA2h3e3YjJzfe/9+0HHzxv9/Qw3pZqXtbFy/sXYh1jnH94//ILX+jHebCe6nJZkRR4hJOewq7lvUDUSpZdJN0f+9KX/v57D0Y3Xtv4TMb1Bz/3M7/43/zRvz4/50EfR9Aj6IZ59PzRcc5dfVIJ1qX5KDo4bJaPnAdKG6i6KHeDfZx7/N69yaxcvb0bvH239/B0/tG774+G+5/98d9X4z24xS4h/K//4v/j2bNFw9rPXOv/4h/7V5J3vvzkwYP/8u/8W3/tP/7f/M3f+gfvvPF5s7JY9pxfNsrs7h7c++b93uZmL+s3U/3ajTv/6Dd+t9HVtbuRPNGLswuwC1++fLA7hrKZKv0hj0l8eHc+wWFYtWAFYBz3uQ2vlK3dvT54cu+ymJ2w/jAIVZgG89XE0gaigQehMKCarY0sGqm2d4YsDuqqgHQzigZG1M6a3d1NJVfeZj/y47/w4sk5ZrSXK4RnXInJ9Nx4+OxkkY3GO5thb3RzEF+T64VSFXFi9vwsho0NMA/QeBBxkPbTQVF8+vzx1xIGLh7VURJS7ueXumnk9t4g7WcdsFnkoHHUwZP7VXVKXz6ufkABRkh2OggDCBFhgdOOENK2CnhrnUOQAExbrfI489YxCgDwbV1QgrVsI0rqqtZCbYzGrZFhHDFH2qYqV+c89ki7PNlgRGsnZScJZ1pUF5NJ25TJ1mg0GpaFIEFgnV3MJ12zctYgFGjrsPfOdHmUQ88ruSZUa6kZDwkjQPlqtVC8SrKeVCXQTPhWOdgIpSVua0kUnD1+LquzkBMLfduYsmoJIWkWN8rpVgFIpVGMk3R7myQDF8RhP+MeamGjOBFQ1KuSRHRjmEIHO2qs91mvp20xShNZ6/L82YPF45D2+yy/+s7V3mjz2bOF7NRydfkjX3ztvW++9+hxsbG7FdKBqXwc99LB8L333hvvbepaEI+LanH7tcPFXEOH+binAmLZD5xdikXRfF33RsPbd2+JphlvpJwRSs14d6teFE4LAoLx9WFvMC4a8vj9exhnlGAe+PV0xiKSjuOdq68vHz69PD/du7VVzddhGHRCMsi8B3EvWl/Oy04FUdQb9kWnjfZVUzNOYBSGw/FI6brB29t9521zMRts9i2BAAIPHMKUUuwcBw7LqqSEhWlYlst8vCd8YG2jl0sIAyEERm0eR7IxjEEWklHSCwMshVxePIc+4BDOV7MPPmo6UWsBszwfJb318fOjB5fU4sePPxr3e+vp6tf/3t+4srljLUlG18e47brWrS7zEHBOLk4uSqdfefXmk1L0b+w33UooiQjev30IMaRZ2B/z73z7N29+9vNv3jlcTVcXJ0cfffI028iuv7JPvPvOt7813huaziLoXn/jummF/ee9dXUtQhI6a63CSZxZV4myDiilEa7n1emTIs5cFm+KPBGuW6/mCCQ0joEG1DovJQNEFBVnWYBxZ21dNdp3HoO2q5zVFMVp1md5H1PfH/Sh94tFnUAcEI0wW62X0lqhFIGoWHYgJEHEgPdb4z4nULX13LoeckmUSEdgFgeha5bL2fRFlO7FOzv1em29QxqGLK6bCnHSy7cq0QHoZxcraDSltGurw9s36/V8OZ81zfy1V29GQawNJJZ3VVEtF1tXXvNYYguA9UDbQS/nBD87eeYIi1NEeCqt7vdSCLATypvGSRPwUBlvjQMeiq6UuhJtneY5hgBB40HLCJSqGyQwxEFnGkxgp4vBYKyU1wbUbbWo5uVqnYSYO3T88si2a0ojxhOAuXakM7AxVrYtw4QFYd7LT2eXa6nD3uDOK2/34uhiUkxOTpFDkLCAYt+qWs0hjcKwL5XsOq2thM5ladRWa1FXXVcu6qo/HAZheD473dwYTOei66DR2jsYs1DZpiSXjPgAeA8xDagFAAIIGDIGxFFojIizUAobMoq8iZOQpbFFdHqxHAwGk0UesFEYX3328JNer394cyvv8/MXT5pWHm5ud8YrKAkPGeVXwt58UbTFSkodGmWMclojxLM4Ntos22k+2mikscB4IKQCDHGepovpzEnXP8iNBhSbXkYdNlkoDVoz7vWyWL8sVhcXxFmgzLJYLk0x3Bgxx62S2inVtZt5ADtpOtYof3Q0SRiLwpTTaLA/+AEFaUI++/Y1Z/1GzA4yjvf7RlaiS5pOd5XuJSgl+cl0leyF2qPGig8fnXlEppOLtZRpiL2nV7c2zh9cCGC7qkrjePfglfsPH03Pno0YHHJdLlbE6PXZ6e7O7tb+/ocPjpeF+s2vfHRzt98sTmmWvnj6YrQ13s8G8bgvYfT8w8QKmfRZvsVLb6e1Ho7Gg9jpznXO+rJUJ8V24AY55B5/+M1PWD8N+8F61q5PpihUaYTy4Xhd6/29G7pSs8mTAfZ5gD5dXI7HmbF2OZdRmJxfnNSyeePG2+ezWmg/5LxqO8wCoTSNwGw9oyQJk9F0qWS9HgyivY2+ejZjfOAkPG1agBEjlGgPjQshi3CMEa3MwiFnEQxogJwnBDurA+q9oTFn737z3XQzbps2GuQxIicvzhHysi4qJ8r1YtjfIYiRgBPCtVbaWYwQcABSHqd4NBzUusV6FUqhZ+sXn77vDIhY7/LFMu5t5GlSAtkL87IpAPJtWTqMsPNOmdMXR0YpVYGD3VdmlxMxr5PBpgd2XS2uHETTpfLaNY02FnRWWy44BrattBEsjstCd1UndAsD5Djx3kEMAQa6k0Z1iCCMMQRKKcV5SjBgAYLWR9Qr7zz0hGKGEbQAed2ZzmOY8gSo1nkNMQQOAAssUN46YKy22jgLoUcIEhK1rUUAe+8hQg5A4LwxChDIaGCBhQ4AB7RUzhAhtVQQAdaWoo1lNgiiQClljLLQiNDpLAoZ0h4DKYAxLghxFBMLURoFASO67v4FqejKlb2Tx6dszBBsswGWVidRImRDAYYIDaOdXtYYqAa9XlmpIEr60TYNmFI8jMhw2PcUTo7O+4f9arbwuuoPhgzCjSwITN6upadm+9pYWCbWl8ZJVXUhix3TxrfCc+c8aIyuJp1ScZwOBv3QQ7A5fPDsk6vXD1RrhVAeNgGiq/NC6cbadpil58eLYHSgLvTNG9mqOLMepYOelnrZLOpurp2Zr6fOmtl01h/euXP18NOn9wACozSbt2W2kRMbLbFKtHnyYq20zYdMrqvN3e2N4Va1ztt1wZNmXU2YToiFt95888WTp/tbZKs3XkyOIKAxCKflGYGKIDg+2JzfO4Vd9PR3z18//PyXv/S5F3q6Mcbd5Ys2LMl+b6XlxfzyJ+5+7u//vf/i9btfBnBzlN+9sdN8+uLxf/4f/O+RBpfPFy8mL167/eqN4ebZy6nGZnNAlLvM+/TZ0yd7e3uM5kUl4kGYJIFzzNkEYnO51qYDW1uvBMkgG46eHj2i0C2m5x7wAMVB2jvce3vd0K6bXZ5JDdo8NMZZD6i3rcE+zvqtqKKYVQ1Yd+yd19948fQ9BJ1lzgGAtA/oRhhdi/ODrc1edfnpxn5Oru4cf/Xeel5aCYcHfaHsRKx2DsiXv/SjVC+mk8XGeGAQbkVNsH5ZTJ3lrXRC+QDLJEn2t0NIQkDDiChAPI/yrgMcBULaulEHOz1L9Hx1vjvezMc7j+99c2d3eP7iKI3y/ji17ZIC3haLfjIEGEQxk7oLSAfcZVEUmLl6eemFCGPqnd0ZmLZ+7mSXRDGAuOhklscw6W/sv37ZVN4zCsU45dGg//JUGOiDf26xC6KohRIjaIy9XKwhgARjiDxlmEaJsQ5oSyijjDonmk5JbZ2zwDvVSCFlwEJMsLQmCELKGETOa6+1McBQjyEClFAEEEDIGqGMwYgwRgHw1mmEvDE2YEwbA6GmCGmjEfQOeWu1AQ5DILXS0CIMlZQAkIBRABDwEDrndNM6E0UBowkE2HhrnI+DlPNoMV91QjDyg0g6RRCOBikmSdea1bK2qkYQeuA9As4AZZ1yHUTAOAIcAsZFNKQAr+rKaNjILgg5JgRBKFXrMPPOQwSVUtpb4F3AiXEWWcgIRhAWq7WUGhFMOKGIUEKFFMB7D7xSylnvkFRaY8gggRQT77zWdt4tMaUBY955qTT02BqjnI8CDJCFADMSOI+Qc8BC4CBEkOHYe6iUcdZ7iBopQhZCAJ22CpQBocY44xzGABEfIsAJB8ArIR0AddkRivu9QUw5QM4DX5fCOwcIEUIahiHASAPrHCX8B2ZPDRxB1AMAkUcYAQARcAECPMusNxhiQKBSbj1vpdaUMcYQxkA7X3dSK+WMUVpTgjzE1mIHvNKOMhCFAbIeYQIhbEWltYAeGm0RRt45CAENqFIKY8gJhhBY6FZVBRw0XjvglXXYA6Mt9BB6pLX2wDNKMQIIAGst9I4y3E8Tba2QqihrbyzBOAgCiLDu5A8oOHvW9kxU1l0QNK34CIbbv++nf2T14YnGk+Fg4+T5Kaa6vqyhCpJw6GB0WtVx7NIU9tOYqvl/+ef+wXfeK4Afmrbeueb+5//Hn/r+849treOU9dKxqtp2vTp69mt7139/PIiV4Zsbg+L0vemz6fu/8SGlSd6HlIXTl7ITJOBRWwhlyqxvGQf68Tni0e6P3qn6dmWE1tJhQ+IAGtWqBiAdb2zP5peHX7i7OpqLVkMjiYfAwfmLFR5FUc4++/bNh8cXT6dHP/9zv7dpn1E0xZL9hT/3Hw5He6/fPTg7eTZb69evvbI4fvHy2fG117Kf+iN7UE1pwoVcb91Mf/b2l0+fPjs5nvY3VFusZecy2IOSMTS0ovbzRS+JtBdsY2iCkKZE5B23cv/a4Gv/bL2ao9qpX/rlv/Vv/uk/+Ku/9d/+13/l3//KNz767l//2nyFs0HYEXgxK0IFVsFs72YWh2FMSVOWrpbF0SkPyXC02wlViYVYIA8ihsHVfr84rckgev58qSP8xT/0ud/93vmv/NI/+j/82T/yc5/7zN//G3/3xdHR7RsHlYJ/4o/87GZ2893fmbl2+Bv/1YPLj7nTeO8nDj1qpPB7hxpQU1Xt7/ncjUnVhkQFoVnP2vWzy2wjnD+8yLc5F+DlN58cfLZ37YsH89k5RRjZCY+ipOfSUb9aLU7Onof+0MbJW2+++b3/5u/3+wBFB7T/alNMp/WpinW6tzk7uThvmjDIrSRWmiQCQpSyKznsECQUOe0u2rbz1JtShJA8ff4As15yZWNzRF8+eFGulkZ0MOF5wgbjw/LFpZlDbya+O8/6G7AXFys5jHZWxXI+qdI4jHy4ms6TjXz7YPPlg6fJMCYEnTxeOMgRSRbTqheETtYo8rdvbhx9ulgVbHYqt/ojAOYAAA8c5wRTmFCulKecS2UwxVI5iCgOItWsha5pizgm3iMHgFWGQJak0bqe9IZXZdmtigVkKCbUWu2JwByyOESdPF8cDwdxyrJejzpr1qKcLavTQs70ouievnr9jjdCalk3KwxxkuQeAalrQA1BlBIlrYbAURRJuyYUcIp005iqk1XLeSabkmDWCg0CkiVclV03ryPKTp5P93ajuukwoxBQzGJv9bp10tksSTQNR7euLhsVbAwNJMq4tbSglQQh0VkpbZTFHlipDUZEa4kxkKslo0xZ57y+ttM/OXn26dGR0Xajz975zJvLyYqwMBxtn0wW0FPTaT7O143e2Nw4PT4vH38stCHTIDao6RoNtFjVCIAg65VKaohs00YBu3PrZjmdQaBIgK7t8eWyTXtsvdbWocW8klUnhY5c7l2ymBsH40G2fXZ8XLTyJ774xrJ+HqUB8bhtW2/sTm/77FndNuD0ZJaMhkxACG0jOu8Ms44AKuqORymkgKYZS1KSDQjiHk0B8GXXBojlWdSUHYlDiHBRr4Io1Q6uy+mVzX0WUiCc9iZKe94io0EjG8QIY6HDmmJigDMBDHjYdm0rK8K4Fg01LXNGd9C2etGsb7/91vHlNN3auPzkAcfg7q23XxwdEe8HvcE4s56KZGe/KcWjZxfry1We0GQYQU0MIsaCcl42ZwtRl6vFrN+PvXaV1mmcS2XaVmGe7928+xf/wn/xb/8b/4uvfeNrnTNhBPsxPDt94oDvD/oxDlBEZadWy3YzYeez2Q/eAoZRnPU60UKPIk6UQDqgpbGyWYuqEXUbhfmjT19++nB999Ytw1RJRJpHvYQ7IYabPQwkbDVUbSP0Yi5mxRoHYFm3aW/ECecs2jvc7ZBjSTS7bKySjCIWMauksJJHkQcecLxz89A+LzkKjQNF3SWuV0mlpbyUnbBoPIQsJDSItTdCWGTBYDc1MVGGKmEIDXRXU0bDkENALKJS+wSlVSskdLqGGDkKA7HqKm++Kx71w/jK1Suc0ul5WZS2qKoowpRwaC3xXLVeKEnwQDnPcABAULc6wUDrTtcVJcQHwBpotfQeOG21LBmniPU9YloIJQSLSZT0q2XtkVWdkFrkWQ9T5OUadH65LJS1gFATcERkgKBsGwxgHIRxyLQ0GFthJWi7EABrfNnqdVc1KB9duxIn8UolrdA0iiPiZLU0UO9sHy5nhbEw4AGLAsaY8VYWdTmfTk5b0XSEY4RxhCOr2LIqx/0RYfl0toxoShlUXgccQUjq1dw09WCce0AdIVJqSyMeJNQSI1UtWmeTkFDvRBYyKSWmTMsugu21V3Zv3tg9fjx9/uzhzRv51Tsb0AofwK39Lack0w1ACHpdr1eeMY8Yh46mqWXOe9C2wlvgTAM5VtpY6YCz2ndVU5MYOyW9Femwnw3C7vjlywfnCLrrVw5gWayWlwdb6eXq4zWgQUCCiC+PCw+dxqr1XeE87gRQrdTackSY391OsjAUiE8v6+XM1b65eTNJKB5myQ8o2NnbRLL7zm9//+qdqwmz84vng15y7cbhYiVkLyOqu7isMcFGqDhh5xdnjqTjfg6hbaSVTbss1XfEh7ujpHUmyIMR5dPpM566g9FBe/Ty9PGT7SuD5TRzHjJiwhELmMWIX5xe7m7mb33xMy9PLzoji1pC1rL1aZT3X39r/3vffbh3uLu1lfX7fjwIceEp9w45V6pxL525qZZ6bYO71zY/+M1vmBQZubq2s9XrZSRQcRiPB+Pl4t5ienT91l2vBcns6eTBF3/vT548eKLr9vxyunt7Px+F1s7P5i9AEI0H/MH9T5LB0HJMYooiSguEGfdexhFDPHfGlCultcsD/85nP3f+3W974601wHnoHMGmk7OARwBYghzCHnoXQAIYUtKKRR1T8vJI/oGf/de/e/T4vYfHfRxcvTKkIabEiq7DLOQoA9Yb1DoCvcNl2VyenW7sbmKOAbY4gvt3rnzvvQ85doPx8Nu/9c+kcnu3dp1TCerDAK+qSS90xfqE2jxgRCpBGQ3S+PximW3sJqNNryuh67peEzeaXSwW01mUZVkU6G6htUnTJKJZtZoEDiDTBYApIbUHQmjjCYNmlOf9qwcn57PZ/CKhyADMGVuVS49xlPSsRRBFyLeyrh0miMUUMYmsljXHMQHQe+WAcxAorwAUxgjksDHcGscQIgxL0SijDIBhP5JVZzSGwFJoOMfQGKOUh846CSE1RiBvIQAEYAQhphRii6kPkuD0tIizAPKobbQRFgFgtbx5bZiEVrUdozgIQuf8Yl7mAS1bTT22GjT/I6koitne1e2mVqenJ3euHz57eWxwUqzmSZZsDTeNKysLCA4LySDl88V5nICtXm6UNtrXbWVFi7FdV8V4dwxAdHa+2o1YREHZysn5Srv02YujfMgpdiEBBNHxxmC2eIEx6Q+3y3kFALRW9RLWzyPVapZFEMe75G1MbFF1qumAzyd1PVlW6ZBDwKqiTTZ2tm+/WrH5J+99dbjBhTEYuzTPVGdQOLx5ffP+R18PQxIMkxYXdbviYaSNU0qdHD/f3bsprJCAS0sjF3pFLs+hqGwWbG3I3epy/vDiN1/90fydt948evjcQ45Ir78xKqqj+aIQMGBJ/HD5YmMQOuKU9toUt64d+Pb6p2WZR4NIVeHqKMzx/RfvV8Dfvrb3k+987s/9+p//6c99uZ+OVTb4n/1rf/L42fNOl6aefuELn3/2+GgN2WWhN/Z6MWd+LSrRyWKuesHFeTsY7ncCZRqycDvgmTKlxTocxHXd7u7eVctLS1Ells15xWhsfZhu573xYHZ64VqT9bMe28Fr37nyfHaGtzLTgiiNst5G3dXV8jgKwn6vX9QOwvTldNYYgAj2CN64dnvy5Fl/eCfmA9PO2penxflDlJNpeXF5cjre3DE6MDoqq4JkaZ6k5eTYah0NtpfrwpmqruZdqzY3h7OpOhhEWWpR56KYC1khqFsU9ke7Wq2rbt2ULY5iTJPR1o40qzTHq+n84vjpndtvvXrjjbY5TxB/+fR4fHi9WS+ytK90PVt5LQHBnmCkjLS+W89PIKRxEDcWSQQj7q11y0WBIeknLOTRVDQ7o17KwkbXNI2tJ7PLZhAPWwuCJDDaQPfDG5pSyhgrnYuTCDqPCeaUFnVLOCZBWLfKKLNYrNJeSpwRSiltCKNWaQJQEEZREFutojyFCEkjgfcauFp2hBGOiPcWQWy09ggxRgmmHmMEvOg6hH3IvfXeSwkg9EJp45Uy3gPrgXOe8tiKzjqnnaEOGyMCmkFIlJNK1hRC4AxhxHlqPXBGWg85Dx0ASjceAEIxIITwgHLelCUDXClPSdjvIYKdaDqplVCCEwoAtkoRhjEiEFGlNcO0Fp22VlsbBDwO0k5WlCBGE+WcliKgAaVUqY5Aap1vtWYUAqVR51rrhHWY0ihMvdIYUMSgNtp5rz3gjDvojEHKeGcVZMha6zUwVkHHjNHAOco4I1R47aElJDJWxGG2XE2tg3Ge1UrHUQysbbW0xmAElLLCyijOIHAIAamllB56JLXyzjPGtLHOeu8khKBtGwtdmKQAQMiIp7BpJFQAYIKo19ZBghhECFKPLESgUcJorX8QFcSkM81G0g951MpOCa2MBQAFAdbaNFILo2trnbNxGASMOG/Wq8IBRzmBAlKC0zTsus46b51NksBITQD23gLgnHMIeIYJAghBrJ2VygAPEMHGOR4wr4zV1jnkIdLGGm+DiAPrnPU8oE47hDAFAHgXMEYJhsBRighGCCGllFDaOcsZM8QlAWultNIS+sOWFuBixaJXPrfTLha6U6vjp5tJtB1EQor+yHcaVhUwWl493Li2u3vZTrps3E1KCDSW69/8lW+9+0FtwChAIMz5v/Nn/vAnJx+0Ta06wQZ9wPxod9A0pYkXpXi+Xubjjd3Zp/e/9Xd/E5ABJdvnp/rD9+vGNdbagAdb47wtXNdKHEDKUFF5gXDU602rWZpmyEnibURyH0TQkxaAZS1aP7p6+2C498nLB2dAW+A8cC4fBEmKs4SJbvHm6/0H733lv3vyvRwDRjrIcNnWixf3D27dhZzuXNl4/73ngz4go7Sz7dNPz/f3CcmdQ+psdjEcb/Y2rl48vejauljbXtpT1sm1NH5WN83ylIdElWVx4+6t1tN5pTHOOwUWjIV3wm4iAEdxSp49fvb1X374l/P/7PV3dqZHiqeRtLAyYGM77ZN6c2zH2xhh2EsSYarFchZZgzAFjiQb+93UWuihJwRJCGypVBAlup9Op5fDSwnm+EYw/s7f/n/O6ubB0eSLf/gX3v0Hv7a1ddX1Xr+E+Ts/9+aT775Xnk2+8FNvJPD6+dNlrTuX9BHhh5uD9fzTL//orX/y9fvztfjw3mNTVHfeGDRVKQw+uPK5z/7Lv/iVv/P3cLT98Tc+sXi9cwC74mHci1MOsSJOuDg85PCzVbl9en8Z9++q+dNisdhOQTE7n5qnamBAptt2iRwIeZP3bk/OujAh87a4fv2z+rKt6lVAi3C3M7M23dpQqC3PXiJ3RsJ88rvt7xSFdmJzI94+7EXDrHiwOv/o00T0sivD6ePT3gD4qM4287qBAAQJv3Z2dh9iMD7I52Ia4kVZFtEg151yGjAevHxZBKl945XN6aMjpIGG6fuX9vnTAAMsLsT41g8Hkj0AhNKyaqIoIjhEOPSu7NoaEIoJlkpAD5MgB4BoA4UWUcLybDBfL0MYjAdbkBLHkbXIeiil0Z3iPGBpumoKZVHMI6c1Q05VTdd06/nSW9PrJcfrBoJiIzxPIzYaDYLR5ny9DEMiVDkI0igYzC/PamQ9iTgOqrbChNR1STzkOPDIAYSFNhATCJ3XihBlVqt2UXEcAk9YGNWeCkTG4/F6sfBxsjWIjyZTzdHm9auOpzbgEQiMdt4q7Nyg37dELFdLrBiELgoDY2xtOuAhj1MHoRSeARJyH/YiYztKMEySYTp68vHH/eH5q2/ecAAsa2u0Onj9TY9DAH3kTQzF/Q++fWV/MNx/Zefwlafvf4AY7g/5Jx+/q7W6czfZ2t5xYsIDMF9eyHpBIGXeW++Onk4hxN4r5zEJmfdIKXHl+rjpjENUWxiG9jNffP2f/PI5xfa9e59kkPQHkLEUKTi6tvXp+dH2Tr9VbH627oxrFoskgONB7lKaxOPO6OHmxqpoeBjESU8D2jUmHfZhkr321uG7730T8aS2Qio0yLi1dpgOoFVOqlHWF7pTHkCGhZIMx1aakIYozBA1RbPSmGLoOou0lpzyJO1zHsquBBTvX78ym9TSlNsH27N5eTqf9wYDIerDw83Z+VldFFFAe/3hg4fP7ty5imncipZGCUj4/igvTl4EnCymi1EYBgN+fbyLnOpvbg6HyYvTkyzuAQeN0hwxSEOC0ejKrbJw33z322FA20pML9eLyfLycj7aHQ03B9sbo3XTjQB87e5r7/7Wr/VS9sO0gVA1KXnAAhIBZ4cbgx7YOFuslGyjftg5++DlvOr0N779Msz3r2O2sZ1TSEynoHcs4HUr6lWNMbLOSuUVEMggxuJ50cQsgAS1nZK2VUJQD71TxkGtNHCAxz3ZdEUxpywiLL7x+sC2zWpRkRivxQrRgXJA4eCyctPFlIbo6uGm05CwPB/s1tbpYoWAr8pZFA0oQUYZrXEUhCFjmLhwK+oPhutVUemVIU2wmfWt+eT9j0MYeKO2O4shc14laeiwAYQKK8J+0u9tLKvae42wGWZJLw9XZaWNB21DkEvyxANoPaqbGhISxWlVzCAm3nsP40576C1A0Fp3eTmVUggjrLM8iEVrlPN1p7GHylhMAMQEx/z6Qf/0+aPRYJPQhLAUUINVJYqF7WoEbJanxxfL43k93Lu5ff12FI9lWUNInFEOOGHK88m6aOqyKYI4d5JKz/s8Z5QTDzWymIPnj59nyXCY9ob94RysPSf9wxuYcUQwY9wouDXY1LbzQNOAdkYbQmvlha7jmCKsMAkYY0HAi3LGUIgIoekAUzVfrYQBtOiMVJDY+x8/7Ky/PDnLkmHAbFWttjYH6+VlP41KKSeXU8I59iBkIYIaI5nyQHpnaailQgR546wziGLgNHZ2fjGBQGchI95qD0vVJTLhPCTSNKt1WZX1bAUsnE+Ps95QWTRfXva3ho7AIJbnJzPJ5Xi8MS+chjCJ4uniMmZ8Z8yEmmabO+fnujoTyPUR6hAIwyA5fjb9AQUXL8/3DwdvvH279m5ZycNXXzk86C+PV8V6TTigIf/ag8e7e3tf/+DBm3f3rtzae++T81fefOvl4/sc2PFev3g0/fjJad0OPWM/srVzbbT1zQ8+vPOZVy4Ke1SVvRgPMcrzbNU0j47Pv7A7fu3VgbFdGu2vrHowXQbOht6uyiIexKu5jAzY3Oq9/dlXR1nUrldH7913DccWP5tPrtzY12V7cV6XZSM9nl1cOF1A7Bm2ygOhNWOYs3jRCMCWvcG20FYCYglZFY0D8ePH03olnLGs1zcsaI1kvN/5dv/G1eNHj1MWVZXSCrbICVlDzITSl7PCIYIhaFYtIQyCSFD2vbOnZ6LOGUcOxhSmUShUBxCy2FsPEhpJ11qhPHDAGatlwoM0ClaLxQcvH/xP/vCf+Mv/nz+fB/h4MoNKe2VeTquNjX0jyGVZbO+lqu7WL6eMwTxN0ghjzJwyUZjcf/9+iKOA4ouzSbYxnlxeWKXXVfv67duFEV50CJnLh8fQdIOkXxXt3ddvPzm76LQ+u1xcvbZdL9sEu4PbV2sHcZaN96/P1tV6ooSPNHXGEAK9crhuGkrCVVFhxJ2qOiF5HBMITk+f0cHm7vbVi4tzVa2JtsD5VmuIkbbeY96LMyCdQw5R2ojaAYJDFrJIK+ugMdpi4BFS0lhgBKUUAtbWChPooEFWOGNqJUjAGAES2roVV9IYegmAddYhjB3CwGNlAAYGOAmcJ4h7gI3ziJBa1K23cZ5hFmoPJvNVDANsrfHLQbCthWCYgyBSpsWIhnHMdZMGWHdCAg3+eTHFD6Uiobp0FAvjQdu+ODu+emXj5PxSKV2W6zjMatn0XEjj+OXlizRMoyxIMtaJ0gK7ubFhte9tjqvJSOvWSEV7vWQz74/y5XS2Xre9/dHg9iEoV9Wy6gU+igMFg8nFEvoMB2MhEoB8HNAYu2pdd4UzLjpfFXEKwqzHQ9O12d6V3WItqBHbVwcQS9HNislZmG1OX5wEqLd3ex+acvJsTlBIAEY0Mz7tmmRj/CoGq1C1WW+kO5nE43SwX1+s9/c/l/V3lstPB9vR7uHh7OlsebmOh5uHuyNJUtgV453w4OrdKBGffuMbi8V8PL6+rorpZHlwEGHG4h5lMcLGpeN8OpsqgXkJwnj4qCjufPmzP/3WO1//lb80zviLex/ff3Y6fvOt5RR//N5suHH94Nrrs7/1j9PRy4vymYkClARXbx/c/LEfezmTH370yfSs2T2QF+4SIoRweHne3TzoEzIMWO/i8kmWdGF+BwtZLF/4OGxNSWiy2d+YFJPFctWUzdVrd8P8wPLt8/PT50fPspxWXYHK595Py3qVp1t3dt8Q3coT0jSmFL6rHdZREnjRLIebd7BRztRlW1uAGM4uplFH3rxz94/O5u+FdrmYT7LNrJotzo7XGPNa1lrZuRwevv66EgwgwylorO2sNd7NTk+8Ac4FII+fHZ/uHmwspguOLNUWegS896KYrZSSBiAYRqHzgVMxUHS1bLOeGSTRyeniE/l8a3OQb1zdJeF37n8wJluvv3Hl5Gze3x5SCMpaqNajkEkXMEoiCh2gZSPO1mBjd7u/maHuNA7i1Wrd6/WKWmnpB9FeLUonRa2QgBgztqw87XTXVF1dDHrpDyhYryvGIkyotcAoq1thnIaQYgB9LZS1RugsTryxjVQAWMZolmZ1WaRJiiiU0shWYRoAZ1SnrbGEkoAx42zTlhhiignBBGIcZ1xJW7U152EQBtYpYz3AGFGutSOAKmm98x5AYICxDnjjPXLOYgCcw95xqZ2yjfXGOo8oYZQ5aKuuZoRjDCMS5lFkvGxkmyVZVTXGw6ZrjDMEo7oTUpogQhEnaRRjgJxA1lNjrfXOGccB9NBabykCnVDaaYgoxjgIY+0twMB77wFGAHIaWAel8ZQlGELkNYK001pbS5DXAGqMoANl2UKrna8B8pQSTsJ+b3tdLa0WznqPbcA4gtwBjTH2AAIIKeXOaGeV9BYhiDwq6ioKybKcYUwQwVVTYcp012AAIEZJmgkhStGWTTVGjDKvjbLGYEob0XngEOQERcqV2lgIvNTKE8R5gEMaMKaaruqUNYYzpqzBCEOMCYAIIC0kAN5BWCnlgUUYYoiNQwQmogNltUYIAIS8B87bzjoh9bJpgpiHCYuCAHpQigo4Gwa0lsJKFTCKOGnaumxaYD2mxBqHPNBSUYIDzr1zHoCyarQ1BgDnvVUaQiC0bpXCCEIIGUOIUAuxQ5pCijG2zmAMGCUWWWsshMAjhAkmBBljgiCIw8AZLaUE1lBKCKHYOUw5AV4D5dwPF9DWkwb24WC0N79ctZd2GA9evz3+1lfvzZzbfHO3rqaiaIIQyrI6xUdonNw63Hr8vdPT88nRyeIf/+pEoxFBUOvq9/9P3zAJWDV+kOTxKOmcXS0vbLDcvz6gfQDqp2l8x7Fwc3RwsTo4X2hthFg6wsOy0R5gacD08mj3SrBxN904iHErWlkblgtGMHLKd0gbp72zAZaB7KSLcJoORhts+uAC+3rvIDl/Xp83bRJku+MMAdBWXWdDJEts5OXpheyHG+NwerKMBwFn6HvfvIeR5wN3/ExdhqBUNRuQD7+7sDK6lqDBXp/nzDqdDfNXyBsvHz2xBC/XTSd9MvLXr/Rpf5CkqGvt6cOSsNm1N9/mIWyhms9Wa9fQK7C0lYPwXIiv3j/dPBy8/6jM8/D4ySwfDO/fW6ogQJHYfi3Ocvvy6CLfSi3x/VtXUu0YwBcvH7OiJlT0MljWqyvXrz15soBx/7XPHzx0oB7kadb/7a99WlQ+6hPmLnwh3tjqf+Vv/UPQgKI6+8f/4X8+2N65+YXPnHzwQF1c/sJP33B1cfX6zmd/5se+/vWPBxAePXqpOrOYL65f351M24uzNoLk+XHFsdw+2PrWV+5vXHnx81/+d3sbkfXlB8++7Zvv9gcLz8Ty/CQKWRpcGfV/HInPSoWImjF6dVVOwt6hMpt1rSkLy1IXs6BP+kA1at6V4XEvSOSyhYirKm9rlfc3muqe6Ark02Y5N203nzVTaZUpIYggYk1rWYtZoWSwCmtyLXlrN9vNTAD3rhbNk0cffrLcmS0bcHj91c//+M+2gpfVo+/89ocb414xeVHNljQOaBBMX8yU5Ugz1oavjd95etLRmH/8oHpR2MCPD3dZ73W+Es3/r6vIQAMg9g5bB2rdhGGo2o4jQjB1xhESGacd8HHIEhIoLVdVd17WA8c9xNavk5gjz7SH3qEwjJ2nEMHtvf78dJEPMuNrWde21GEWBVE8eb5uGGwNOlu3kb3YTKn1HaV9B71pFAIMQnR0uk7D1AMVUdo2pZFl5vM0HJfFucOA8bDp1OxiOhjmQigMKPZGVZZYlvXH6ebuybPHbdNCT+qiUYh1ng/DQbAZ9JKoizMpDGw1tggABJThxKu2ddoZjYpFzUOkKU17g6Y7RZCQiFPGC1VH8WBzmD178r4H3Ss/esc/XVmDtnZ/jHVlUyxWi2U42LBKcx6EcXi5XgGjDTE/9nt/XxLq6cIcnR+HG8NWuQrSK299qZhe/MP/4Z+88/k3NjY2t3bHa7Gspov9/e1lJZAhUigPbGCMZnx7ayDX5dOnz1997dZwawOko9WLCyrbh8vzbBQdDLfXVR1os7W39/6H986mH8YBHQx6jVAYuCTCvWHuke3l8Wpy6ZyMIo4AraXFPKIsktLQIKARy7L+o+Xq9FsvonDc749qyC7PZrGCASOYYKNbhiEGRqhaCsUpxYg7QGmQc8SVFsp2WZYuGlu3XcyDOBw0UsRhZL2mLIUgBDRi3Fg0j3uZ6GTXdFaTypZREmkLEIqCPPYEXbl2YLS/XFzGJEYRoSzc2d6Y3v9exwBFcHVZdgRt7vfvf/rJtc07zJIIYmBUHDHOQ4iZc0DLpsMo68Wda0gYDKK+BqSqmyRPV7MiTvnk0lEeB8CfPvnw9p2bQZqCrz0AAEDkm7IMwk2ICIAeeMsYGQ4G56fnl7NmUbalaKpFu703cEGnPZ1X5dXxjqwaIDTBQBrbaYWtX63KOE16G6lxWLvMdDVFcL1cyWZFGVbCKqnCNOmPrgTcVaUBJiCO9IOIJ5mDpbMVZZ4TSQm+fu3Vo2cnWjmjfIKDqixcBXV3fuv2JuZoKeyVvYgpMZ0vZLHWtRztH7A4ssoqZTB2CGiS8F44sMRtUWccL1czVaqt8dbG9at3DrY5RI2sL+bPb135nEeuU4oQEoRRByzO4nQQyLrGULddxQjrZam00ipFaIwR8hBAAISSC7O0yhEacMK0pDsbPduc1WWnAVAGGgsAgt4rqzEGTCuhrccQ8SDoOtHMV2kCFss2572c9BxiQBvkpa7L4vIY0qAW+nQ2LRQKe6+MD99Mo8BoHLGAhhFjGJjGaaCMQJ5cHBeYtFeuvsZoapU3plvPzqxrG6FHO7sc0uEg6ZTOB/locytL08mq9BANhn1mEQJWFGUQx87S8eZOV3fdqmS9iIewH+Z1I4lZOmp6AxRK3FVrYjqIcBjYAAfIwsq1ABspXNuJ8ThgAKwv57Jew67IEnT69LSt7bif0DCkwGOMrJZAWyM19B5jaLV21ltIkt5IWQ2A88ZJqynRIUe2bAJGNYbOEILS0dae0ZbHseiEAWrrcLetJQQ+iLO2lcYb451BALLRWgGEeKNc1VZJFA0TTH29mQfXd/OXL+YJSdpS7V9/9Y133r58+THwPzwhp5FPOQZ5eHm+XC+bqZ5+/+n5Fu9NCgVtzYIoyLOikmVDBouGckzi8N6Te+18PRrnFLtRj3WKNh3b71+/OFbF5TPGg6cfve/z0fDqVj8ORHlhPb1+60bxyckH759If7mx1c97zAt0//7k2ih5/cfuPnj8lMa0Uk4UDVZSaVQs1+M+SVHIIJ0t1sKFG+Odxw8fKYk6bwA09bJdgZxLYVS7+cob+1d3Tk9mvc1NMz2rjDk4vLW6vLi8vETOewRG/Ux6m+5tAWMnL56365JkyeW62jzY0y4ZbF5ZPD6K00FAZZSE67XFPIEunM5LFsO8lyhiw4CcPrmYPr8AW6M+T1TbWNvRgAIAEHCIkEJ0jTYWxjQcEyTbap0EQcpwJ6TXstfLnj/8/p9/9L0ezxmLrPMWehbHfRJbS+rOAQet2q5Wx5DnyoCtgy2nG29oFPOmUV4I3qdCN03RHj0/DzHsmhqR5IOPTzcPhtPTM1F1Sdrb39pazxcaopPpeV0WScIAkCnD80b0hhllvFgt9vavTS7nHmFDoKEAUMZ5VK6LOEiyJETQSWWdBwBYJbSDLWZYW/+tb30lS7K9QcYjUUwbYQCjoFbSauOhmLdVGFLvjNMWI+KRzznHCJE4W62mMY2B1+umTHiMEFfOGwkpjT1QXac4x8CxkFFGaUo4DRyDzGgRJ4BSb4xB3nloAUBOO6sBxRQBqI1zHjpntbAEJaKCzrmL+XSMsjQJVGcgdBtRQFBrnRZSSqchQECLkBEoRUqwAIBwHEU5ALP/v1REgIlTokHAI3h8etZdtFC7NMuSODaeRAnMY4SxGvZCKDVxTrRC1o1uBfQuTbhXKIxdb9QPwrzokFKordvFfH06W7+xvd3qznqVZj0j63LV5IOxc4ExNOwf1OXUKkWYb9tKqoYSDqDaGHAjWlMuGYgimNVlFWfby9nJsp6O4xhB8ubnf7xtwGg0ePTxc62LOHG7V/ub483jZ8e9AQvIwLp0fOWN08fftRZMFzXDpG0XZdn1+zdVN1yuis1B+PDpBxfPvs9NFmfDXrpRLF5WVY1Mq+RaY7cx3ki2dm2eERqBtu6PUx7w4/MjHurQsTRO6tnZRj4olKA+cD66/uq1SOuv/L3/6oP3v/7lP/DZEKc7O9fOy3PbthTD3avX/+nv/urB1a0kS7LQC+36u8PcJb/6l//q5UVnOgM4P3jntere92EgRsNEiZbSkKcjb0HEBxggAtuqrarqZRztDLY2TdetJs+fHd0L4kGQpS5IVs1qnG7cOrxy9HyGzIplnDDbFHPvJQ90EPasFd61CJjm8gUhKQ/Qi7MnG+P9jdyt5yeGuijDsvUhIh7BbNy/LL5TtyexqzwVt+5ef/fkYVWcpszu7G0vV353eyekBMvCdl3SgwHmH374QUL63qDDg1dEoxfF+f5eToDIepvAMUYdDIHWcES5tso4ncQhhqBt9GAUx4hjk0C4lFYNxwMWqMniuU821GIFEC+WjcwGRgBodNutNjcOq446S8pVQyGlMEScKNVsjUIeeEz15GzRNI1HtFXQKERg/vj0Ms9YEst+wOdVE8XMELeYzgCE1pkgiH5ot2YsTELGmLZutSyEkQh76HS/l4ecLcuCRFQJ1bUtYySMmZK2LKqABd65spAAYetB10kIvbWOcAIhxIgY6RAiQRgQhlXXeQNd68Ig3tzY8EY7a73nRoNGtF1dRmG66gRDNOSJ0J3WQhulLUAAYYw98utqCSHGSAWEYuwxQV3bojiEAAaMh3GipNTeLtsKYo8Jl8Z5CJ2xFBOKSSvK45ML1dlbN2976Ix2COEkjhn1TdsiaAFBVhnrrdKSRyFG2HsCMQbe1+2s3xsFNjbWyK6FmHgAvPVSS8O88xZa6xBEDngPEWHe65DzAFNovFPAQ5vFSas6qYRUwhjjrQ+CEGDiIdROC9USyiACCCKltTPaGI0QwRBp4xGEzlmMiRRK+xYhBIC3AADrEcJGG6u1dzagTBvpnAMeUEYhgpQw44x1umzX1hqrNMM8CeKqraCH1BpdKWtcGFADEQQYIggc4jSs5NoDZ521wFedsMAiBHpRIpoWeAQhUgQroaM4gsZGAeu06iqphAoRxso74B1QDti2rikh2hgpJQTAGe28E0IpoSmhwAMttREy4Mw7J7Ux1tRSGm288xhjygjC2BhjAeA8sEZjghIee+ibThIEkAdWaYYRhNBZhyCUxiCMHQTOOWA8gQB755S0xiAIA84AQpQzqETbVkIZijAEP+wqun4wKi+a7/3a0xLaf+//9MfvffDy6OHjbDP71gcnf+Mf/M5+gPaHPCC4XnYQ4JCLJ5987cGzp0GcTsoKxTnvvFfi5h32J/7UG/cu7g2TCNUCQZL0N5SFCFNMm27yorz3dBQ3f/Ev/bU/+W/+wT/1n/yZ3/j68/e+/VsfXXzc54QQCJ3tpwHNweGt4J2fulOuF9On87c/f/WJsovFAgfMopB4iCCVykeYUqyVLpGuaMVOv/396rlSS317e/hn/8//6XvPZn/7v/srgwHc28/W1aLurJTSUb6QWiw9jhiljkObbPLt7b0bd9/++P5vDAYM5cm662xLHrzXvbzXbO5M9u/04zTkg0JfFLyBGQ1Wnbn7zm3YlECVWR4xSFnM7tx+55/9ynvrc/LWlz8fQT+AwAIoS4mlMh4rAI4mIsnpwxMzOT/jFm/meudwOFsLARCM+fXPHVzcn0wWq9aIK8PQKiNUtbm/JduqK7WzKkyj4/OJJ5FSwClx6+rV3/7+pL4oWWcb7T78zsnlZPjWZ29BnH+eH3hGR4PD1wQdDoNPjk9Daj/zmWuLeVmu1pPV4qsPP15P2jvcv3p3Nw4QR+nO9kC5+a2N7Sf3TjqNru5uZK3ZjeG3f+2XvpH8zn/0n/3fgQvfDP9Qs7bt9HfS/d1hcD0P+usOy0kOUFMugV4vQzCAySbf3u5QU2lZKEf3eoOdu93FfRBgSAEOc9JBY+peP1a+MMTUurDawM4TGusKeEtYrxcRu5ijZmGSmA0DSgo1SkfNMbqx/Zk3fvpPPvqbv8QyCXtQzvF8rtu60lDYSspZ8cpm9ryKV8VKhFUUJE6CyayiocIsGg+HQ6r2ejuTp0W3proGeuG2wuDhvZMrcW/v+shMfyiYegi00QwjThEEFFqEELEIt1omFFtntNYYUR5FmOnFdKat1dLuDPtR2ANWIMgZjiu/XqzWYRSnUdQ1HcJQFh4jtipaAGASDZXxK1EZRLNBuli3WhmPAByEV69trRbL4Ti/sn/jbH4KUKKVDmNJiLZWEuySOFAQUUCNUoQwzBiGGDooIbU4MW5RzpZhCvM48cis5xPlmpj5YZq3nVl3Ir2yn4Q9h2FEGYaOYEASbg1CzlktN4fDeT033q7rBQmiQTZYVvNFVZbSYE+1s9W8zAd5GKCinCxP70ccBRRPH532DZmXeqVNTBFf+uVlN2IyQP7yybPNfZ+lEWesW69Ep1gQImIYwlGI0x5thXIuvHXzR3zNNnaHSR7XnaFwNJ8e4YAsrf/Mqz9Wzy44Z20nRdudPX9m6wpTKKRaL8quUt6052fruBdxguu6U60BVn/3+x8U1dpK54K4bBz1Ns/YeGcDMmKEV43I0qxpUbaxMZtXjXK9jfGwtzW9mIeQwLauLo7Sfq+XXAOQOOa9txvjHoS6KlseBZwHreo8NDwNgecYYEoRCfuyVa1cOaghBMBDDiHlAQIOQe+ALaoKqi4JAgd8vVZK4DAclMsOAZQnmTe+rOrh5ujuaOfd979bexNAMk6yKGS9OAsYX63bMAkfv/vBelrEWa837teFzykcx+zYehrkF/NTC2FVLaMgjYJ+saq17oIAF6WsOzVb1Rjo/cO9OKEO8TiNLyezZbkOM/bGa9ePH70IWRj3e1m//wMKjNOyNapteZKwmBFMjQIZxaMwXQdsURc7u8Pni6pH4FZOVVt4O4QQcEo7qYuyNUh7iozRcZ4SGiIUEEBxMEz6qayXwBkIHGFsvV5YqY1WRSEo8wgZ2VQhSgPGKfHWg7ZUwBtCMMGsXS7zIHt0/ISTUOtTrbowDdc1Op53RgEagSsRW1ycF4ulMnYwCqQp4yDqpDJOjoYbneq0ME5VeYygATwIZbyRaN1/63awM8Kya1eFsuLGjVfShHkEpTA8jIWSEcIBDWxXY6gYAZTGHmJjNSIhpbm2QLSdVgpAUYvCE8JJQICDEA37GYXaWGesl8oCECvdRelQOQw9wYg7JSFwhAYsjTpjGQSiqE3AtvsbEBgPZdMp5TrbKMazdV10tVkt1Nbtt+P+HrASCutbGWURQTKNk2LetXW7t79/76NP8s3D0XDE0n6Q9qAGbVG4TkLo6uUqz0LT6GpVxL1calAtVwCB/mhogYMQmVIC6Md7Ow4h40ndKac5g7iaz/KoD4ikwFfriyDOAx5qacV6JVfOeBTGSdrLqkUdRgGhbllV1bLyYeCRtVIGQQC7dWucLNquMJoTn3uDEaHMO2GMNt4BbymkiEIptZQl41RhJo3p6k50XW8UCOFEW7MgjAajrmi6ogoo3bq6i4JoOV2LZsGIplxo70nHlovlat12HljGWm3asnIESeeQhIziaqk4inWUfvA75zt0fwHqZJDsj/rP7t/bHY/zgx+ekFfLOsTBYlXeurG7SpqPzhWO2f35ZZTglIQE4R+9vf/y5YJHlFD47MUpj5KcEyEUYLiSPuB8c7uv8T6NUtud94bZx0f3/6U/8JPf+OYnq3W9SeIfe+PK//Arv71hg608KGp/+9abv/Ubv3tta3eUDF4KvlqJs8ns7XeuN7Wo2mrzcMSD7NlsPlsXG+Odh09PrOT9Xtat7bvfeV9VRivPOJvPl3vbG2kaWtf0NsbxxqhT9urVw/ryjDoMLFlNZl0rAaLCgCiK9vd3Hj+bBr1oc6O3mh5HUUCDYLi/j+Po5PgyxKC3ffji6Vxb31wsQBDMzxdK1KoVUUhIoaplCZyClFoCTN0hYzCwGLGy6ExgvVGNEHHajwhtO4Gtd+3/l6r/eto1S8/7sJXXk9/8fjntvLt3T8/0pJ7BDBgACCAIkiKKIpgluWSbRZfJUrnsM5XL1IFsy1VysSw6lYuUKAEQiEAhAzODAQaY0NPd03Hn9OXw5ievvHzQzQP8AevwqvteV13372ozEttGGqOQ0x56BHFCWYYJJWA5K5RyFqPNgzSgtKyNckRJffj8vKmLpS8rKb7+1//+0fOPVFMMtjf3B/33/ujb5WLirqSvcNTtjddS0KqHz88d4zhZm9d5ylMAcVm2ytpltewMYyuBttKouWr21jauLxYnsm7Uorn4/vvTydwACwiW1td6JcPYa+CI77HUiEq2WkPlMHDWQaB046yHI8rtoijbnPUhtKYtLXBEG8UJs85oqwQKmlYZ57MoIch5qRALDdQBI95apTQHHqk2TCIMiXXQUaaEtpBAQIAH2CFl8VVRjaKoqWaEI0iYckoZ67SEmGGMgdUeYMBDjLDxyigDEAkYjQJmF4pR3BjJkLfOCaswVOtd7r12UBMnVeOBZcjBshTWsaZqFaa8Q2th/lyqSMt2cTa1hG5t7y7LCmGilQIOtS3aWe8F1EQImLpmTqfdrKkbCoK81VK45aPjNELdmAKv52ciDIFhBAj0/rOXg/WNN7+6T0zbj0I0zGBnrbycZDyKexuUGcjQSkvGMU+CF4/u04BsXLvuGwCk7Q/Q6eFFtay53bIoIIFY1GeEg9F6f2etd/Zisri46idpsXxx7+uvvXjAO2mQL2eXFwLIyDV8tBk/O68J6/XX9imcf3z/R/v7e6vVvD49WV5U3c3PWCk/fvc9zq20JurHDpar6bkpc4DkxubY2eT9+/ehMnXIl6Ya9TNRm/3Bzg/f+1E4ZHE3aGSp3AI420OK85Qk4Vw6Xyxub3a/+8E3B+t4817vGz/4Vhyxf/g3/tIv//Kf3bnz2uPDR3EQ3Ly7drloPvvVN9/64cNZG/1P/8Mv/eN/8k//j/+H/2t3owtrd3z/ed8pDFBb18v8UrX7JEk0sFkcdYdRUxwlg/HO9uaTp5fMWWsb5GzIwvH27WwwLktdnjzBMh9u371177XJ5Oj48mTQ9Tiiox6riyuHmrKZ6aoNuttrO684AVeLp3sHG0cnZ0m80SwmOoCYUyVgtxMD2t9a3zg9f5QGaDjcfPb4+UdvP8yLAnpHSXg0s5vj64PejfzqmW4XLENny8u5WHY7A1SZLM0asWraZnp1fvPuXqDqaT7vDzqQMumw4TTOkmp1booJVn1MMeJUm5UN6GADl7XOV2fK5CEukLDleRsAtJmM8zm4ihkPtnt9AmBjFTufyleuf2X7OhXV8enzD0pZGNhGIekkVi7PoPFR1ssLiUisraRhWEvETOBrKRfnynqsg3xVtlWRpt2twaZs9KcHaFIBIhFGi2WhrUuyThwEqm5l2yglRsNeZUR7VQAElHG+Vca4Mq/WRiOpKsZD65FzVlgT8oBxQjhDDi2XK8powAkhKOCMQFpUrVQeA4W9Y4Qo5SAgUigMeNNqTiEE3gEvbNOoGgNIKLLeA4+aRnmrjXWUwpBzZzSA2BtDIJbCBQFB3gNrIQRKGWcd44FWxpjWWwcJcsbNp8tGSK0w9KCqLtd2x41QHkAMIQaAMewRdt4YAKB1jEXGOe88hMA7Bx1IaMcZjIAFDlASIAyB9x55AjAGTKhWGUcZdh4gSJSw2ljuEEAGQWCccgSs2oogbKHHkGCKPYTWekygt85Zy1mAIILQUxJaoIX2ACOMKQQeAecdgBBZB43zmFCMsLGWslCoxlkLEWSUEAqxoQRj6Ownl25Wam2AcdZYDRAKKPeICCkr0SIEnDVKeNkIC7EBjmLknau9jmgoq1yY1noYhVEtGgUMdB5ZUJYV8M45yIMQIRhnoVGaELISoqhaYA2AIGDEKCO0oAgkaRwFgdbSaQWABc61TdUKKbXDGCOEKKbaakiI9tBYV7cthLBuFQIAE2StN41EAFjnPIQUQGeBBz5vGkaIsw44TxlhkFrvKSGtUM45hBCjDADvrUWEcoYJRkZr5x2h1CgLoNXOEQTbpgWIeAiS7FNi17Oz2d/9+b//3d/+HlWL3/sffs9U5NXt3V/+9m9ub25f1c2pMxsHca2K/Zu789ly0Fs7yRdHtszCwc2f/onffud3kNZ31vgbb7KHj77jOICoSyESjUNQBpzEAcs6gTRM6vI3fulsf+fOf/+vf/vvRujVe7fvfeHz/wa9NI7OT/Myt5qJjRuDv/q/+plv/9EffOlrr3YDUfuW8m7egma16K33gacIWa1zqDSGoJcQWpTnT87EbJIN+j/2l1/7nV/+fnH//tdee3P7n/2tD777zReLs5/7B19//KP7h0/m4+0s6CVKyWfPrlptP3e9/5M/+2OzWa+tt/Y2NpfVU96BAbS9YQpbUF7o8xxVywb56WgUYQjjLInTOO5G4+19Dps2P8MezM5V0hnuvfHV+JtXDz8+nOft9TvrB7fSFw9XqxertYAVKW4VCB0MCNI4WFF3b7dHK4EZy8+XJqa/+f382dnJ7e3OV3/8C2ePHh2+aHa2e9bKZSVZ2OP90cXldFUyiqkS9XxZVjUOel7lTuSOIoAV3OgN2pZ9/GSpiqtSqZ/9+Z+599qtnUbSBP7df/rX3/3md86+9+HRoay0fXlyUbkCKhek/fFA7mz1T5/P9OlSyWL/5ut/fPpnpbGaxn/rP/q5//N/8y8t0UAf/t6v/Yuf/0f/RABv1dbhx2U6eLM3+FLoEukwo4W1KulGdLx5+bxlnXGt3eX0QWc0YtbWSpizx15WFliWDYJOMr14lvUwgioMmgpOl1cLgE1Ih9iH7fwStG2KkrVe5+TRNJ8jOUd60nZZPP7cT29/8WcGJ4+f/tL/hSLKo1E06lzK6dlpc/YiH25wuZo3C/CFN+9ev379+Hy1uNAXZdHvx2t9DBFsiiYsHBA48xGI44Mvffl3f/UHLIghqnc35N4+87a5c2MTgBcAAM6p1NYbL4yEoOkno1kxo456gAmEACNM46YQtSiNrRj2BHP+SbASWaUMBqAsC2sdp4FVFkWu24mvpnMpVJU3LGBffvPzrZlure0/fffhJZolcXj6vftOG+Hh0WWRhlfba72Lq6lWolbF7rVXb1+/e3T8vKkm3oNVUYZJBghzxrWicN7GNC6KnAIW9jLAvGmg8j7GvFTCKjEcxtZCa63U9mhylY5304094ZFXEjtInCpXBYuCKO40Ze2tXDY5BB5DFEex1Q5iH7AYYHRydjbudhjzcZQia33bYmeE1Nl4vV5dNec5JWyU4jt3Dp4+OZTKb16/rnQJge+mzJQXWbjvtDONxpQ1pVXCY2JbLcMo8EqqRi1U3RmFyShtDNA4uP7ZL64NesfP3+tjJstSKhPwJM/nmITEg3xaddLk0ePnBzu7luJhh9VeEhYCpax1g1F6+OCKUZCE8dreGmUhonh+OadJPBilxaryVkUwqYToj9c0wDRO1nY3zqbLEHgaYcjM+kb38vC8uz2E2UhCoEW9OJxxpBUyDjhnjVSCYiRl1Rl0CHDOUut1QFON5h67IAmF9BCAgCAMAIbEe28tUNBubu7rqoRWxOmgbiZhFwFjTVvS0DsNpHFaXhxNjtcH3Ur6pnBhEDX1LETJshZSe6eiTsjNxvrzjx+OBhs8TA9ubT5/5/2U9SFpvdfaWgeotgjTkBBlZRvyQGqfJClEuGltXtblqmEBvXn9Rr8z0AQM1oeTo8vh2mi4tbaclsX8U7i7MQGPkFG6LmoPHA5Dow0P8BtfeSXOgPpIfvjwDIMw7iZXlxPWemGnaxubSUQRoYQnhHlpSoAtpwR45LSp6gqyavf6ruIhhTrPS2Usj4ih2FpLOaQYyEoaDy2HhAVVPcsizgBWWvY6fWSRkY6RsLc20kJYb6WUYYbjJBRKamEXy9X3/nCyt9UHlnjrQh46BNpaOAM8QrK1BDGHnFbCOxkGaDZd5avZ5t6Ih8wo0ZYVp4EVCoNQSIUpwZhBwKMkiBGri7IsZ1kvC9NISoEcRpAQFlMCGebLpvDW5suVAQ2LU2E1cBADgAJgm3ZytcAEdodDK/tN3QqlDLRZGGkpAhZQSlqhAHIAO2XamAUxZohb6mWznCZh6HSzrFarVhStitPh3d298e6NJl96OwsA8l4uL4134BIhAGhdCgTRtf03mzgc3tyRDWuki6GLegniVT6bQgtB7UWet4W+mXYQAMg7ZAlwgfUGExx1uAMw7Ha9U8C5yMdtrRStx+F+2yxtO2MEJ5R4B7AHQFVG1ko5gJjTXrUS03R6kcu6NkArp1pjJCilcqBJJ1MVBqiuPeNpFMRaFp4wawzwQCpttEUQOiCsEtZo5Hw9nQ53rimmLucFZla1inXDdiZUbRimRpsAs2qyijLmMYIsDKPNAKMgs3U9I7TI54W26HLRSuyNl3UlPIKEkTCMvfYBDqdT7RyAku1tBZsjGgbJoBulPlPOJRn/RAXd3miyFItpPRjbphLdfufmq7d+7/d/v7e/Ne71zh68PNjbqWeVViqi7FnFunEaSdrN1vppiCAchsOXL88/nB5d+8qmAP7B8+etMw+ePaxWs0EQhwGf5CJlAZTVrdtr//M3HsRldOP2AUegkNOf+ltfevHg2cWkhaoixFqJ55O6bGaUZEkc5LkWFgGQtRZeTBeFBMOA9ylolB8NOo7RSlZ3bh1kw/XpwoYRurp6JFox7K+FPCAUNbXsr2/OJyunzf2PnhE8zGj6/MlL4zEEeDkpe9dup2s9j/np06Nnjz5uhbUNWqyqtb01qA1GOOZA1pWQLYWQBXyyKqw2UFuxbBAGiPEsZqo21pph2nEOWWGIsUR52QoYQ6cVgCAkATBGO93pcdEqhGklW+tp3bTRvLIkWJaVpsH2/vbpw0edlHz+7ud+/9f+4Bu/8d+/9rk3e7cO2ga9OC/jrYPtuxvF5cWf/E/fgEFE9+OtnfWTyeVkNfvo/SoLelGacYKLfIWAHmcp8iAMmK69bp1u/KJY9QaJa3QT0A8enfSDuGiMJ5IzyqypmxnFTCB6ouz1naGHxcW8StMUYaedgc4Dp4uyjgKqtStKqK1z1kBIiPcEWI+hUUbVRgMMMDXARgwpITzEAHFMaVnmhCApBUUEGwaB5jSZFyuEAeEUE+CcdQhKbTziynIMwrrKEYIYE6Ms8J4hAqwB7hOKiaCUAO+M9UAbABElgbBifbzJLCtqCb1HHocEUyiX+SLNQgg0QVBZwUlaK2MB9CQolAVLQYn/c1ZRnFFOaByHZ4dPcV2mw+2LRVNKlwY8YBlGjXZ6siwhBo3wk4sTzAKIoBJmfXvbSz1b5mvbWVvUy0uxsU+Ma/q9be2YtjQmRhQ1T7uqlBwlEckuj85CFjtkLLVIVsVqupitLHY0SVK2odrarlrraBqvR0l/WfmIBJNFHic0IPj4xUtvcNvAS+l7G8nZrG1BZJcWmJAAt3nj7tU0OD5TUuE2R/nh5XL+AGL95OMz7VGcdrzG++N4Wc4Z3zBWHD47jtbY3uZu4Nnh9CwaRWfzJdQyydYgDbxTsq5BJ94YbR5OZxdtsa67A5JII9fH4+VqxmgIJX7y4urW62+sra+9fP7tZMv0tw9+87c+cobjAJyeXIy7/bd/9L39a7tdxt5+58HJZNnF7eVl/rs/+LdfePXuL//qr/E0Jh6mnHz9p75w9eFbh88vx4Ogm60tpjOeRWt7u1dHp1eXFWEgtFSqOB2v9TbWnz76U1ELlr7Csi+enZ8P+uF4ryPqxZPH716/98XB8Aakg3JxPCvPe+PEQais7GRx63mJ+oLvycWxrVyJDOusX+iQJBnjDGJzfH62efB5Bvjk6pEoT3qdrbJqo3SQ56eOsZt3bi2v9Ac1vDfYaFenziyTAaxVvVzUNOz1x3cWzYcY+enFNI7Dja0DHhNK6hc/uB++/urNV/aqGk8r284LrS0jBBGbq9XO7R9PWKoW55SRJBtFIX78+NA63OuGrSkQZWuDQX6B0uG+9c1sPjHe3r2x8VoAl1cfF6gWZhUlWBcgTGPCLUbWG8s5Czv9WZ4LTaazyyROAkCmF8tkwLw1SuuiqB1EOEh4Z71azq23ny5G1sYEXZ5dSGUJI9Djsm7buk3ikBKcr6qr+UJqRxFuq5Zx7CGIklgb0+0lURS9eH4eBDSkHHjgnYMWWmPjgBOGO53YWS8boYwPI+4h8M5ZZaSBHlKPgHYtwbiTdpWTLKSq0UpqCIGDLi9XIYsd0FVdO2cRxIxQb4ExRosGE2K1QYRQllrnqqZ2n7gJEEjRRDwAmAjVOGlUq6pKgYCAmGRhBwXgxfmVVo6xGCEcx11iWFEXEABGKAQWAG8haKVG0DuIiccAGgtaa4wBwDqYcE490M5SgvM6N8ZAiCFkURAq2QKMAYZSCKlN2un2emutWlkPMIacBNaiRZ5TiKWxEUWfPHPOWWe8B9pIiCDBBGHsHXReQ+icd3VrhJTeQ28gQdhYg60hCHNGAQTGGEKwB04qzQiGlADrpFTGAQBtGoZZpzdbzqWU0milTRzzNEuaqs6bChCSsphhor0NGEMQ1tZ4iAOMrJBlXhamHnU6TkprbBhEQRAopZW32GKjtdKykaqRyiNEGAswUbr51KBxpm2FtSbP21YJY3SrJITQGEAJhg4YIQnCxoOmEcDbkFHrfUQJQggQaKW23jNGIMAAAmUsBt7aTzhXIAoC761xQGsNEVJaa60hhBhh7x0E3nvrPRbaxJBDACjBCEFAoNQaAEhIhKmyHigHlPk0bn19e+/tt598/OSSYvXe4ytG+V/95//gf5mk/7t/8T/SrQRk/sPL2VrCmqMr6e3b33lHUb997SCON/4f/+1v9tJovRt97nPwvHhx9kfux/7mqyiCvd1X5s/mFuA6n1ITzepQzQelVw8un4xY+Zd/4avd67MHH7/T2xn82F8L17e3Tn40e/Th5PmRKLX44OnLbPvGxfkq8vDDD5+nu7doN0wZRU4jSqOsI1ZT64WBFGoEKA+y7K/+/Z/+vX/z/ecvX772hQMcyb2b6X/3y39IaO/Vn/0Lf/r4/chySGJRKaFyxOxoLdYQrW0k8xdnL1f2xr1rP/uf/cRv/ZuXy2WjkLUuoJZSxgEj82URs2C1oCFn0JPdLc5kc/n+D8MkRthqKzWJlj64Pbr1N3/hF/7tf/d/q5bFg49UmQ+XuSd41IpVXWtPcBhALZ2PQbYWpzfH54/PEox5H3MCPYw/PK0fneVPrqox5/tjFta4Ew150JtPplDUqxaVTbO5sRlnEWhRGMeQBqVu+uvxOMQPPzjrdkNAAwgwjkjoxbf/3e/+ya/8xko3d65tfO9fc30hiGc/87/4maXa/Zf/9S8GjCQp5J2UxKNVqxzHQjfrGwEQzzsdhUr4dDn/V7/574I+377TxSh98fa79V85omnn4HNbD36EEz5EfCyrkgdua3/v/OXjjFM+zPIFf/F8ur63td7vOZ7cf/gg6nNZ5EI3LYCYirqxzGey1JVWS3GqoGidgSAwrNvpDPTksi2ddPbibDYtw7JywwpnS/53/jf/7Nq9/5A0vrn6fbCcxhtrIW7CTPzbX/ylJ0HQUOi97+6Mpufi27/+3utfeP0v/qW/eni5+vjYpVx2kP7cnY3Z48cP3n58/cbOrVvDdz6+nMwRjcft4tx5w1j00TOTZATK809UUJUCIEQw8t5CBxrReOuN0h6AomgDRpOMO8OKXDAWRCGsKuEg9NoS1zBKMEYA2FZoCEAUcKOtcSJImNKepxHW7uFbbyFXvrRqcjbrrg+enU6khRAhjHFt7Nm8RBT040SJenM8mBy9uHr5XAnZiWPOSC/bqi1qbRMQIOqlES1yxErrgIIiB9Ihp/vdREihpQ4iAglVAJWmidOgs7nTW9uzDlGMMUBBEMQsWq6WxFKOY8tsY3TVaspwU7cMMR6gumikMWEWbq5vWCkNZIPexuXp4yhkua5LTI5KszvKbm6Nzw5PIQOqmbT17Dd/8M1/9o//93U156ipzDSK4P/v1//FT775s+vDESZISTXoZFUtZGucaSG0EDjOCeVJsSySUV8pK9qSJPHa7g5iECHTzbrTy0WakdHeum2lKKsiLzaG3TiKvv/exyHjvU4nqKvLk6tKKhyFxTRPWRhnYdzxSlQC+Ea6+ayKHCQMdEZZK1DZugAw6XjUT86niyTt56tStZpHvJE27na1B9PjC4VQwAhhlDEo6zZgSdbtT+eTpNOJcYxt0DYlZwHDtK0KJST0gDFKZeiU8l4rpyjhRrbGeGvNvC7ikFBDinLlnSEhtRDI2mppkQVBGAVRWCxyGlNtTA1B0OvT0BZl0UrdGwwuLk9D1o87GyGeIIgX02lVXKzvbtAwLKpCGhuEMadAGV20S+GVZyhXYCFwL1u/mr2olWPEB1m8ms0/+PChEDrudspChiwiSW+5VGEYzJeLT1SQdFJAaTcL61XpIXYAQoyVBwDR/Tuv7tz8fPSN784uLxWUiGPZ1Fcvz6+/up9kYZJ1Y4g0cJVBtpVSNNgZHCCCkVZycXUVhkm3OwjjhHHWtrqoRNNI5TG0Ou32+klYFAZwSCjjSQKBsbn0wJV13emuj3Z6z94/9VIkUcxohxFYV/XuxoDE7N3vv5yDFjBICSVBBHkw7CR12aIYO4uqqsGEGquyXmS0nx9OlotcSJf11st2wgDRgBqAeJx6iREEBBLnUStajpCShVRSI6wA9yyhyIhSaCOYBwahed0AaBkjGgmIGAsChJlRtmlVnGFrrQcUEOoxtRSAIDJGeh+WwiBIPMOAECd0tSooJGujIYaaEKtlpZoqIB6YebtcTM8uGhcCGo1G13iYVPPLxcU5ZmCxgGWVp1kPANRKN1jbicKeN364tqYDToDDFIRZZJtCO1mLGjIwHPSQ1oSOAACtcWG32+sOOtmokMjTNN3YSjnM59O2rQiGVsu2aepGUEqldMChgHWadhb1MgtMvpo3jUSYRh3qrLu6uDIeQJiJSjkju+tpHIeu1jTkiEPCUF5U84VOoiHvxDRmjS49Ql4a7z2CBGHotBJSadkyHnhAP/zgwWhuI2IZdULV5UVdVKFpNLJ0tSwAQqPxyEPTVMJLCRBuWyUB9VIsJmdpmqxvjuayrRclZVy3jdQ6jcIQktC6/fVOxNHRZFmVppemysPh+mg8WJPSixI61x4+/xTu3krbH6SdjLai3tnv0Qb/8I//IECwma8OZ6tOEB0+fwat7feTxaocbW1cVE1eLz6zObCmxZhHBN3e3f3gxQ+On7y7ud+XF3XE8fHzOeUDFsXvfPxc3Njl3Z151c7EYns8ni0rrRw12lM6PzzCqgUWrfKiv96zWgvpptNqczMCyClRbl3bOL0wl20hoecwrI0f9sOjp483Ng8Gu6+dT2eQ985mdYoSp6QFBlFijGt0Y7zu9btNfrW12ZucXvI4lHW7mJwtJ5NiuVpbW4viMO4Mr+aa4FHal6vpb924+1nXgPWNoQO+XJTD9Y22KpxWZdsOtjcwD2jMA06sBUWw0srN59LbdtRNy0q7og2CFDqHEGpF0+1Eq9XqCzdvL+ezTkyLupBSiEZp5bSwGNM4DbAgiFLMEznLmVfT4/PxxqA/IPWi3l7fff70KO6t3Vy/V0m1PsqakGg2Jl1qefyzf+Pn6ma5ODvKUnpwa/+jB5MOhYyKVprj4+PRuBfFZFkujQVhFNy++8rV5QpiTBC9vFxGvU43DZhDlFHHKSegR/jlos6yQGhQtWpR6U6KvV35FmsEfBAshYbeAweKRlMEi8ogYL3DwkptnQZWQ2wBBMAgAI0yrdNZkLRaNsYGgCEDME0glJQw6ey0ahJCYqCxc5VsIsRbab0FQZRRwBaNVpWKAAgIk8Jg4gimVptG1px+CtRHmDrpHPAQJw4BSjEQvjvIHDQEYaddyMJiNfVASqmR8Yw4LyWPIPJOKgmRahsJIaGEF0Xb+ffcuk+tItlqniELkafGeaNIk21nWEeLaTNeGzdihjhZC8nZ2YWqvfLj0dpBmjbjEbMQ2KbmPA64xCufOdTLZB34tcHObLYCoEEcQWK91wGTUYiELFlKinke95K9g/3n732nLkrAgsWiWitAmIWqnSEIyspiUnOYRb2xViaOoyBCVlRAM49NNiJVUfk2crZM0kSsVrs725OrE6kkQh1sCYm7yfro7bcm17a26vLCCxPFsTUyi+Hs+H3hZX9j++nLdufg8xvrB6dP7we+Ijjqb9xFjhAnW9CI/KybgmAtu7699rt/8PYH9x/fufdZq3w+rzkjVakcxIgzBOFwrTPKxmc//NPvf+tbyA4/c/uuVfXNv3D7/vMH1VKJejVY3+wM1h+9/+7l5Pzrf+X1B49evvfR+dd+/m+fPXokmqmiYtmq/ma/OwofLi/iCDhgC6E1MU01VaVFTT4taToIgkr3N/a0ItPaQNqP+6ONtRuXczsaj4G8XFYTHjk7Ny8//lHW2dncf60bb4jD+84YHnjjciNMdzTmwdo447LBEmeL1Xm3s9/fe+Ps2XcQ5SFPgjjI4uugOY7DImOcofrsxTlM+0dPDofjrHXmsm3f/NxfZz63streDS/mx/PlSgpHvSvnVw5C6lw/i3kEROtefvg8CBni6x/fPz85Oks7a/fe+LnJ4gTQ3rX9L69qyALDku5icpQh3BbGQaBMxIJBXU1lxRQKfJb4HHLanSye7B3corCbNyxfnl+enUOPoxilnIY8Pbk8vbW3j2l1eZVL5QmGq/m8E0fIw73d7efPLjMSe+y9sCF3pawdDCxk3nq1XPbTZJh+ijLFFE9nU4IRCRGn6OzyrJPFDPMqrxGC1hoCoQQOEhp3QoyRc040DSUeWFYWZZokEDtnnZKm1+vGcbRcLgFiSRwZbaR2nHCpSkI5wdRrDZAv2xJSFgQs7nabMhdCWmA19rJpnDYeQkQQo6HS1niLMQEAhVEMgS2LXFnFGdZCch5nnYxyRigxRmOELfAYUg+Bs85DL7X+hDRAAhL1ssDZYj7zjaeUBCEj2GMOHZEY4QynAGhrXF23WmvvIIIEAE8pwQg7Y7311jkHvAdAKCe90sZgjAkllIVSqkaUUguGIKE0CVMeBMaoeVVI3YQYUE6d0ULYRkgehtbZOEowIMBahDAPmLFKGqNaBSyEEAOHvbNKaR4w760QwmmPCOKctUJjwijm0DsMMfBeGyU0MNY5Z71jSkvonAcgiMgwzYQQeTFVWhOGPKQMY0pQW+fFqiYIE0oJRNZaKdoApNorB73SRjvojDXOdrKUEawdBJY454umhpBgABH01nvRtgBijoi0xkhdu4ZACCxEGDlrEAZFoxZ13bQCegcgYBwTCp21DqNGtyELG61qowgEPZ6WbQ0QgAg66z0EQciM0hAAgKBxDhNMMZFSOO+cd947Sjki1DintVZGc8qds5+4QoRy4BwBEAHQaG2kJwQDAD2EhCKhhbbGOO8NKP89qwiL9uj9+xvjbON258Vl/ta7F//Zf/Ff72cc8ghSl20H2SA5m4iXkwnjSdmAza3hiyclKU57TQQ9zJdT5YPBZgJT8PT9k+1r5ZysrEaeb6fjHevdvCS8pwY/Q/7OKxshXU5mxyfedj8blcpezFV1eUVj99f+3td+61c+rKSv8unO/g6uVpZD2uk7QCEGOLCYw3ZxhWnTQiWRQbQXIoSjHhpHi7jfvX5j8vDF1qj/i7/17bU7X7lx87og4Ks/9fp3Pqhe/PCHu69tlapdzttm5T00AQFaNju3XstP07qZA7nMkqg7TJYcHWxdmx5PWVOHg3jzlTfN5BLmc+StJ262nOqi8JAwEtVNm5fn2cZGszw7ef5kZ7DXDZhwWgh8//FVI3iHhKVkq6bElDJKmkbHEVvf6IMwjLfHZlXs3lk7P1oKYw0BFLAPP15QgF7sRfTB4mDc+/Ev3h0fbLetLJppkqF8pRSxWodFq6/On6eOi1JNlUGE1jmoZYl6CkQEhARLF6HeT//cX/ze77/11R9/lWy2VyfFy4+f/f6ffjftc2TsqwfDpxfV6dXjz75xrZJIzAvpyJ9884+Jx/0USaHvP7vsxmx+Im/e2Yh2WVVMGeEffe+90+PjXvdH2cFwfZzmi8Xx0YVrzDyvvOs/P3tRVQW48BTz4c6NzbXXXk4exUKvbY2Zc3qh19cPGuNn9RyuayUkSgDEzramEXPQ5A7o3rh3/thIGXgVbAS+p5r/9T/9e3tv/vTVyxfnb32bVY/yXLb5KjgI/vTdPyo1XWrX382s1leXOa9AlnXvv/Wkt/bys1+68eWD7Y0eu3zw8f0//c5yoVdT/93Z8YOjxWdevymn4mCLdW/dePLsyns4rXV52oRAfLoYEQww0lInYYiIF7LknHgPlNCYQG9UUy9YlGYkkqqxzlGKlVacUiklxhggCBB2znnnpRWiVWHQUc5rq3AAtJAXV7NitcABz6X8/ncfwYB7DI2wGAID0MWynC/qg3F2Z38AnBz3O3lRpjwOCC6q3Ii6Mgx4uwDGA4yD2DhPo8hoX5et0tpZwxgiLJQSawXPF14G0f69Oy6gAx4vC1W3opfFnUFSThZKOugtcLrMZ21bQsiH/fF8eRmFsQMO8wBToKucIkNDrBFtpZ7MTiD0nKPN7rDS041u0EMaqhYCUqzE7PRlsLEv9OnVaW6hRq4lKJv7ZDC4uz3Yw0xiAsq8dt5ZLT3QPEgpoVWtMQfQQS9NsSx1413kjGwhZEI0k+lZJ0sH44FooWs0BHB9c9TpB5P5/Pvv/DCvAemEzjHEWHet28MEBsnNO3dsrVbT2exyirD33g36Q+rcanYVJaiTjuJBP8PYOE9ZgONOREAQdKBXhEaQ0sn5cjTozCbzIEqRA14b4JrxcNQwqjVqqpbQUAEEUcBxEEcojBJvndAChVQaABFAlGKImQsbWVHCMbKM+XylXdsq50bj7ux8GtCAIIB5yINMKxmGzIegampACYSqF/O9YWcyeRnFAQJeNW2802XBYCVh5+DgVsq9L+Fkef785Zv3vnx+cQScj+OwrAWnMfButVzu7t9jyF4uJYCLxfRKa0mAL6Zz63Qa81aZrJPNp8vR1gZEcJEvB0lQF0Kr5hMVBBwUrSgKl4VhwBwChjJMEW6FxBjHHf6Xf+pNZduVKN/603eIp8rbMI1oynwQOOEghhu7W81iKVdOtA0PGaJU55WuHVSejzgjWLQOexhTMthIkv5QlNXsfG6dh1pUSz3eGgShF5XiIVNCYYhbW3e5e+XO3be/9V3jUMCZkaYo6lU3SJPo1dduXM6WRzM56JJeGgEHvNW9OIQ0qkoVMqyU0hJUee2dYCxIgy5o54/uPxitZ9moq5CSUhKMgHMs4CGOhFQWKOShtlp7ayACzpZ5QyA2xnfSjrbeGEGIDxhvbZ30Igioc6otqzCKcIQ7a0m9kvlL2WdctapRS0oRZxxCpq2ljEuhjdXOsO5g2IqiQDNKvQNKC2PLXGNTtcXifNpUery5NRrsPn12pICJ495ofYgoEdIj2AuykPOAS4TjLqGcBEEpKmYJNA5YqFvptC8XeZ0XURJ5VTmrojDV2jhpO5sdnnSSTrac5Dt7O5u3rgeEleXi+f0fqXwJvLamBaZRlnKSLucTldBhJz2/mIwGG7J0RiGpLXAQA4pwZI2YT6fTySpNIo99nIZWWgi9Mc5rJKVV1iWZa+U0LzklkAYUMS+qxngvPymZsU4bo02hJOARX8yuNHUW+EbWxhgtrIGYBES1Kk0j5Sujc+AMsdQYCbUvCw08ijlty1VjjbLC+kpqwAju9jPnoTc65OjGJi/KZnMUCgV7aQYg8o4Vhd7dOMiLCwKSV+68/okKTo4mwf5gvpptXtuazfP75yJ3dJ63AZd3724tp4XNG5aQvesDVYSPj4vQtH/561+8/4O39nrXhTAfXzy6/uqN26+tv3x6crkqU2z31lKMsHPg5MXTvf1tAHFn3DdWIcjjqH3nwYvGu5gTZmA9n/7Fn3rl+Wl+dTS5/pk3Tg6f5hdTzqKikUYZo0A8EL7OOWpuradN02LXFlXBIoBQeXX1/s7eZwpF927cWj5/UFS5Z5LzUCmbxUlVNrN5oUpXFiDEYHO9/+zpZat9Nu5kaXo1qfYO7kbhXjdtOgHn18ff+HcmX8wSHkY8mF4W1BldrbyVhPjhKEMQtKJKeRJRiiMz6m0u5sXGThRm6MGHH79ysIkgrBsdZ72T08uUI8D02sFgvjxF3sc8vVrMCGIxTytbKtMKY7AFRtnZ7KJNZm9+6bXD++ezop4rM1uo1WSVDvjW7ujo8NFgP/WVhvF4a2vjj77/3bt37101xeHk2e2DGxnV86Ld39mZXlotxGKWN1YTFpS1NEABC7Ost5zli27rCe33u0mP1+ABRujrX/rS++/8SYYopmB3nM0vz4YRSrGda7PSrD1ffel6llDeGtG0MnKOEKa11tJJCyHFBFGCMQSoFMYTDB2W3nsEEsatJsZ44LzWkkOHMQuw9yAslCPIGGud1xRRDXCh1HleOOQccTGyxiIjGy0rD8PWAQQ99bASAGHHqEMQIkqFlBBiBKA12ChgvUeB89SulqrR0GsXhFkphW21Rz6ijGEJrfcQzRaKU48aR4mPGOmvp8vWewmqUjjn+gPy56wi4+DxWd5NY87Y9Wsbs9olQWgcXSgznZwM1zutaRFo2vwKumhrsJPEnIVVTNRqNaMIRDymHPe6BHnsjA2D4OT4aL6c97aG7aJc6/Y2NrOTi8NVvgqjbOfGrad5iYSdntRVKZq6bRs17A5iFtdWK2s2Rz1I6g/efm8YrmvSyCLXzBMUBTQ4OHjj6OqD0TgyA3lyOPUm5B2qnZ9fTtuivTh8vrabYmCLixlpTre6VDdt0hku6suNZK1qpmsbo9XFhdLo/PRI1AtaTM/nz6KgN9gcYRIsFicJS5yz0sh+lvR6dH5+9Eu//JtVY/c2tjjnTleiNkKCQlU8ZJSUKYuDyHzw7u+998e//bkv/dzrX/ib3/7t3z340u7Snzvmj1+eFUJj5X74u38UYfTx5cWNZvOj7z9pWry/ufmdX/ntALVyWYaDdG+z9/u/+ocff+e0xyLhGxrSvJCdSLvWQIOiKB7vrFeH8yCZdNZ328XlMCWAILF6oU/LJovTToBVonwd9SKGWF1ePfhwtbl399r2He/9w4cPu4MODtsGAFte5M9Xtlym68leb2tyPteTDxNsjRAWkjuba5PT95rqbOcgc8ibpu0ObA3V66++cXJ4H4/5cJiV5QkJsHbVoqqPXhxZiTfXr2PC5pMLAlxIgiqvGI96w/5od79oVbyN+gG+OjmsGjPJJyCOh72hN5ozj6BzTdFLk/nJOYUQwBCizrXPfOHD73/TyaqyJkjjq9lquLXGocrzq5CxURCrcjEYdVerptvvL+b1RKvO2q2V0djqxslk0IGq1rogQaY19JTV3sGmCTvhsixWsyavGx6nu1u7RZmHXnR4yP59xK4RSiunoTcAEuCGWQcDbyHo9GLnTFlaDHG/G8RxAgCwUldlTaJwOBroRkghHCBpyID3cRSGEQEIYEKkaKqmpQRiTKu29B4oaSRQCMCQ8CxOjXHI+eVyoq1SrcKEYOCBh4QSqbVRllGCEKCQEIqt0d4LZU0ra0SJQ5jTkDFOCeUEiarAhLCAeGOsMwBC5711PuIMQYQRjjA0xoYIJv1BW5WEMcowxjiLE4SIEloB3BrrEAiiiFtrrNfacIpbrZ0HykgAAGE0ItQ5b7R01nBCpVXwk8yRllJLwqADnlgDgMcEhQh7nmAArJXI+rbV1hnkAYOQBrExnjCIEHbeWuOVNdB7hEAQRnUtGEHAOsoYIcR4mySBVLqWkkAexTGGzGkDgJNSeu+t1RACgrGFQOvWGIsADBhDEM1XVV4WzvgwjgH03mjnHMBEaie1hQQxjGspA04ctMtygRGR3gdRGCCymC8YgcxDaAACzDpt/Sc0JlA1LQSeYGSdc85prTHCEICYp1ppqZXSAGNXtc352YWUmlAGINDWitZABLCHwJuYhwxjTamwjhFcNLV1BjhgjRXKeGdbCQEA1loPPMPIOg+gBBBY4y3xCHhtWmssghAhFBBmrCMIEugxwhgCoTWAuGhaba3zUCqLEILIOeud8dhBB5BF1ppPb5IfPziMXQQdeetHZ4PtwcGNtZsbO+/+4L0S+j7lmNBaCZ2JG29sz64Ml1HACK9ItVDU2ab2u+sDKuyLD6bjV3pSV9YcG8xj361WsL+xQXEImDbUzz2IDtjm2tierJydp0wzjGCSYoUanZ8+ub/eNRpA3k4Xj2b5xdIgF6ZdYlBxCcK+lm4RcDu9XKC4l3VHVdmsqopgGmK60suDn7wTv7FZn9Z7B53f+fD/k+3T7e21dz78d0Y38TYJorhHevnqyFgUdtioS588vVL2vbXde/NHp4zSAUwXsgIG5RfTkJfX7/SI56vVFRDVK7c2jz96WlqNEx2PIuhJ3ZRR2o2ZlE4PN0ILJunG8Na9/W+//STgkNCEWOoFIh5EEQfOxxFhGDlrYotWZ/Oy0d0wqlURjtLVyUQIFSR0YzdezOtStwGLvvfg4qPjxUY3ub09uvvqzcn5wgO9KIsW0kbbF09PWx97FsY86sW8XbUb16LBwd6v/8lHWS++c7CJJ6urD57d2797cr748c/sBBrkF6dm1UaDiHg4O1+6xrSFe/tP3yEO88jrhjhpOwH5ynriFL6YloPNzBn0/OOzzqu3ri6nOC94Rl///F///Buf/de//Etr+yTu9eNw264UjZLV2Q/E5VudkZstnl/bWkvZYMnocLQTBE01rWk36sBkz938V7/4TTUmX/sHm9P8yHsdYESHbHFhsDfeOlFIoyLCOnZZgpPi+l6099re9Ie/0Sqd9nKYDfa/9lrxuDh8/vKX/uDD0wCADvYa9PtBxt3yvFy1FFlo6+aj8keieUsKoRovNKkFrGywqsRpXjX5k/FaV7W2csnG3nbSGQ7Kcnl+jGT5CcQRAeAhxJgIpSG0yugYI2e00BXCgSeE40CWDcYsxKitGuBdFDHkAPQQOKdFq4z0ViMEMcQUUwxxSKiVTcQ5SYKmxGna/+PvPUk7AU5iFvKdYTrWer6sZ1cV7QSO4tO8ImegLlUWrtbH3XJRhIMsinhRtUnUkyKXylgHADC5FIhGVpqIYUxg1s2uLhaK+GnVpCz67L03a9hqgJx0ZbXSwqRZoNs6byGA2GNCk/DZ0WGvs8EJdh5PlytnQch5WS+dQmVZhwHR0iCCIPCdhAuhkl6nLitu3Xoa46ZYVQuq1LDXZwFHDg1u9n+8/tykenD9+i0oIrVcqVz+hc9/iXI1LxvohLXAtRpha4Up1DKOuTW6mhvnMeYBISiICcF4NNx48bTI0p6WljCe9NZJ4oFDomk7Ox1S146ily+Odg+23/zqV7yEtp1vrQ96o97z0xpTVIhVf9wLO4FtRbXMN/oZAUq3xhOFCF0tagB5TIJlU0OGR4NOnTdaqP5gqFXDA5qvVgjRTkwu54VznhG6yqVXGHEki1ppE0ahaGSAecQZ8AYSP19cDIebTWlr2WJMVdtyjOI4aUQbRKxpGo/aolJd1MkLgTlzVjeNIzykPAGIaqCdNYiQQb+rrJkdXrT+Ku5mAWUuqN58840XL14CGgdJf3J+nliLGRqOsiy7d7Va1Y1WVZMkYbMqayy89aIUqn1QVfMwzIzQl6cvaBBqa/LlqjPMMHHcUCXawSjqd6OP7h9u7m+jiJ88fDDeHX4yC6ZXSwCprU3r5aAfpmlQtyXhHELU1DV0LuWcJB1esZgELASD2+tpFnWy1EEIMbTWAYCzLD5bXDIGW6mFtEYD4dzG9X6QUqnccjkbZBhZL4vV9OxC11oK5whEBBOM21WxaCpKECYYGI0CgiNMjE0QYziqa5F2Q+JAIUzdKC1gSN3u7n4N8Xgrrafz5bx2Uo26vTQLtXLOeQIgbK23TrTSY9TdWLt+897h0RMP4cX5AkgBvbfeoQADgistALLOCquwcybtJEnaV00LIa6bikJorJbKUgwAgVKo2WTJgiBMQoIdDLzWrSkqp0tlDIoYSyIHQRAmiIEggEK1Va0cYiSKgNHpmAESAEOX+YteqLqppRwJqyZnlxbhvESLNqQmPD054jDcun4Pht24lwy6vZOnx1IX3lEEg/FwAKMIYaicIkEMveeIe+cxjlg2WM0XLEjKqsrShHOysb794tnzoDNGQQYcDwjYGcUhWLnViQmj80ePXTnrZYHHXEiZ8m7TKIB4d7jj9SKvq7TXK1tJaGixxQHCGAGrw07S7XUsmqyKZSEKPTGsbjY6qZVtXQodhwCzbq8HvGHQ57N6MEwwQM2yrqvWWK8R9BYA5bRSQrdKgFaIpDcA0CghrIdBEELI0KAbj8Yqz5lR1tdRlnljF/NpXecUx3WrgdWqrqAFUttBkF5fQ5NGIuAwwo3UacSzCLWt8NpvjsdAg1Ve7t856IV9hhOH4cHdfeBVWX96hjke962H127c2bkxfv+HH6zylc/CFvj5dHnEndUoiTrnpTw5qYYMUNX2kF5OTu/duQNUTT3cWOs6UcXEIe8QphqiRsBiuVpfH13f3w+6XSXA0bzY3+mKWXt2OpteLkgU9HvDvX63XK0upmWnm6z3+r//x98plqtXNzbDwAOC2lo31r58Pm0bl3QC3RrbNFsHfauaV67dVK1xCA8T3MzV8eP39eIKBWzQ7UcknE9zAACPLfLYEXp2MdkYkJOzKyFUW7V337j34Tv3r91+zcj29Ol76TgoWvDywUefuX036GenL2bcQmvtdDLJjMHQaakS7ZQHUZo1q/zobDLcHgxGvflkqhpHUsZZdPzyPEgjB1Bet8aCVsgIh3UhUx60uTqfzR0i0jrfKq00QbgTB9DYbhhWokHanRyd0Dj64p2b3333u3EQdLrZoigLpSIHP/iTH9zYW3/v+aN0PNheX+uj8qe//rm2umqauD+I17aGjz9+WS/bVVEuq1XcjyyBrfEM4X6SQg/Xtgab9/adAGcPHwcoFk2tjJhdnhACGKRFXZ9NFkCC2lhvVdMaThNj4elZ7jATwAdhoqSfiJxCOwgS2QppjCHYWYChBwR7AKwxHnjlnIWYOGh1SxAtahESQIjzhCXpuhFcNzWCiLDAOZcXJaLQGqmMntSrtU7IeOScts7HEW2VDOOOKldC6iANm7ppGxt24oiHdVVo5QmlGIXaGNMaxjUGmFCKoV7mRTYYTOoaYktTlHBaNwvIoENEYVznCnizPgSRs4wgpQAjoG71YtH8OasI00C2dUsclvW8LW6+8uZyejEeBapm1JswDAf90ZN3z/ph3yN86+DG+fQw7WHdum6nT7yYz4XHGIoqoMw4wxLf7VhlHA0wNmnd+j9954fXNm90eFe1ebuaZRkgQC6XedzlxnFeB+P1rb3rt08VWb+9c/H4KYedg70vlK3rDGHQpdibKA50446nLwldX11pw8PBtR2HkquXH0UEOYeUcWv7+09On924vjP0pljcv3lvePhsFff2b4cb7UJdXLXD2HtMLXM3Dl5tHz4wVRNQFgacpnEaK9aoXk8ajxiNgyj5xV/9/dnlfGMw7o2jOGCkw2I6tE2lEA5Db9varZrCye71XrY12vrb/3E6vnaYn/7Ef/IVwOAPf/fBxcszQv3ablcQ9/7jZ1/76qv3vnR9bv2zo+XP/9hfvvzwbakLyqAJ4T/8P/3n3/rX/2/wyFYN4oRSTNbG2dHhE27orZujBdCqLIzu1M109d73dm/NLRARd5PLSZQGTTmZLv3nvvqTkdtSYmlFtXtj7/z8DBF/fvRuPxnfuPe5L/2Fr02uzh6992dxSjsRoVHGsyCflrQXw5Afvni5tTbAPCAsiBhsVe6IDph++uGxWpjbr422t0fvv3WYRUk/jkyTd9M2wGjerl6+vESmu721OStzTIQWkgdwWSxp2uls7jqIPSHUowBa3OFbo1uTyZR0YBINrcFOCyjkIIOqXArVQOClBhvDnReHZ81i2Ru9KpsSGBWlKeRLh8xqNUEQIYpm5TIM7XA87MCslpJ1U92Q69f2Xxy+07ZNROhanFw1F+P1LiHR8fFVfzzc3Iyass0CPlsJbyHBkbNgdjEJoU16XOTLtmg/UQHnXKoKWkAw2hgnjXBV1bR1Gad96kDVtMB51BijkJVKtDJMAw9RWYuA0LQXt61EGBshCUcIEVHXRlvGGCGYc+Kc45wCT6y1EHptLWgxcAo4J42EHlJMeErrpmlFiwFQ1lsPsqgTMmaBsk7IVjqrRdsACLQwXjkEIE8jHkcAAK2sd1QpI23DKHfQKqkI5sBBQomQCnqCIdZWWm1ZSDv9PiSEM1rXohFKmyoKE6ehEMo4xWmEAOLUdrOQIDxd6LbV4JN9wFrtrTZKCmu04twrJxm20APvIWMccwKdt84JZaAABDjjgYHISIUUss6wAEEPrfFC14TgENM4jEvRekSNMkYJgolQbRBR6B1jIZCybgXGyCOICOjFEYXEa+mh1tAiCI13CEMAodFaG62NgQgxQhAmGJGmaKUREMCAcwKhAx4TZISVQkljvLPUw1YIZRRECeWhaiv8aUO5X5YFpdg2thHKhkxYl3AWcF41UktpjSOY1ko5ZzACyimKeEi588ZDK1RDMSnqdrZYWOs5/wSTqb3zhFLnnXFeGs1IIK0BxhAHrNKjfs8a5b2vmlZXygGgtScUO2M5xRhjgDwAHnhPCCKYaqMhBAAhax0AzhonjcniMI5CraU1FiBorHPOK+MwwZhSo62QNnDIKJ3EISW0LCttPjVMf/yrr549uPrgyVx0wo8/OtnavvPw6VmhXWkcqezF8XznWgpAA42MGIIhHQ2zHo6/9c7bGviyBHMG7+rs66/vFZ0cduIbr18DrWvOl3eu7z1+PknHFrHAayXzGmJ1lTcIOhYE88tZPZGd6GBtoz/RFZDt1l6gWmmoBTxbzLHWhssqDvtfePMn/vDbv0eGlg445juB9f5iwkHlmxohjpKgKLCLRbiWpFnv5s7mj370Qz5MHl89astWIW+EO14uicdJhCA0KNAA+t56nw66NGU8dgd3Ph9F5Ds/+D3opW7mB/eu3Xrt88uT+8xrtk5lXX/8/vlS2y/82AiEyDMah0HTLllI18a9xfQ8u3X95PRhf304Gj8hgYliTWH34dtz4bF11jtoHaAMDtd7BGskhVtVlHQbUSLKBjsRaVG9MjNRKaPkAiLfZAnvbw2eP58fHk0/ena+NYi/9PWvnR2+uLicrPIqQMHaoP/HLxbAwFR4a5vNXkdcHv5v//ZPnsnwZ//OL8xOjn7t//5f3b09/Cf/xX/+jX/+Lxbns3gcbI77/TQtVxWlIBwGrdXzSksh4wCWK2gLyVq3nfTX19Nmudwbp9/5/lHJ4D/6J/+guHxUFgIHO+Obn3VB8OVXP3784o82tvjJx9/6ys/81KKZffiNPwNlfXm8ojG7PJwxErYLeHG5eP2Nu6ftNDAZUNGf/NnDxRQdHS13bg2DdeydBQwgzJjVg0Gmeff5i3x16exycYPguBVv3vlM/vFL1L+2fuOW57A6vBB2ei4/Ams13AxmM+MN1pN2a5xFEXG91Cy891ZWuLVACicMURpCgDmn80VlEKYhvyx8a8vd3cEqF6rFlUG9Tti7fc0spwAcAgCss8g64BxEWErtIHIeEgRiHngMWyGMtSGPysUiiLmyGnnojYkIxchQBovSeEQgwx4Y6VxAOfQGeMQgaRetM3Za6wrKYHsgjdndHCdppzdMGcbtqnrw8EWu2stZweNg3ppiNdvpxGnMOY8Wi1pogShG3kJrodGcEtG6h0cXFwUAHg8jRrxa3+hI4Xivv//aZ3f21uu29BiLRnhpQ068MAuxCgMexXFV5aNhx1vQjdM4zmS77KRxXaiqbNIs4ozKVlAAIcAeQGNsK5rQhwgHHgU0sqJqADDruz2iO6tF/UffevvWnbso7ueV6Mf05cf3uzj8zL07R9VUa0Btc5V7yiPYOmBN1KV5XuxuHTw7O6wKsTEehiyYz+fSqG4U8gC3rZqXlqRB1MtwGGtrFsuWUBZ3Mxyl0DZl3oy3t/7SX+k0YQY4m0zmEQBEmctpKaTppOn69Y3q6ipizDjpQp9XV8Vstr05xpAvl213fdtC0ButPX7/yRCsFavWaVc3FcYQI+d03a7yTmdjulhZ64IkoUnktTdSRBwDjkNOEPIWAYdgLWVIEASsk44YssN+9/hoGrIkjkPRLpN4KJStyhZ4H6UJDTh0zCgPMRJKO6MhpoQH8XA8Gu8/ffw0ZCY/f260iiJGERDKUCm9BkXReB3o1o12srb2VAdOicv5dDzeWs3LmHPijZQmieNJkaedjKesrRQhAUCmrOYs5hY4jMhwMBIETXJra5F1yLWD3en5qhuSh+9+LPaGjNGy+PR7ADBFgAqpOYXWQamxMpAybK1OkpQwVtfCtwZT8mNfuHX69NFguz8ah65pAIaMEsBwsaoxJGHc9VJY6XTbMBaSIGi85izC2N+4u9suzo10bV1qY68uCg+A8Wi4EUEOO93+/GyaJpGRumkqFvJeLysnOYk7129srcr6ky6RbIjnhaLGplHjAhKN+7vb3TKi5aKgQbJaGIAaADShXIoSE5dmsbfGtsKYqtGQRxh4d3p4dn13M8hYPpkRTJ011oGqXnWSntfGAhLwLnDYOIegIRAjD4XwjWrW19YhEIvFBIQ4jKIwiKSuMIXGeivA9OQUsaQf96SW1pI4gRgDhB0EwFtUN+1oY6PXjy+vlmEvHfLO6clTosvZZGFdW85WlHZp2osDgNZDCR3rr29trYVJKi3MvS0nCwsZ63XTXiSkyHWRAuy1ZdBTCrM0rlYtwSgMoVI2ilJHvZYu7o4tsAthtl/5LOp0O2kYOrtaXiAAdD07P3oaRjHUtsuhbErH0rXxtvFgi/HpbLVYqKbS1AtKOAT46vJk++BAGc8YW82lVgKtEFIO6DpinCd0KeTsRXV9ve+tK9qy3+saKcWqEJhyHlStHygKaruc1kEQBmEojKiWlXUSc+4AYJBAayx2QcQxQd1eR1aq1+22cuV1C7xPk3Q+myLvIAs6UeQs0LRtmjpgfSy1XMwHSWoAF83UOM0w5FmytzXoEJdEXOfGKza9mofJKIx3aUJGw72rq2m3m6QBQeBTMEV3vfvko8OXE1jbsKhAtTBYt4H1UZYmyeD8IhdeCQWhDAJkbu9lWRIez6tkfzSfn/fWNiot26LNwujLX7rb4mh1NhOV9pYscje8tp1tRTToueNnoxvbf/z+Nx8/PupmfWUAMmwxK7Ajjz5cQrr67Ku71ZXMZ/JxfUk53bt7TVbLVla+UmHU7Y97xfOnqlnpdlwJj4lyBjZVvZFLIyVQOqS989kiTqF0hbbm5fTw1Tf+YjmbFavV1sH6uOvzvMWchRCeH58B056fvtwajev5yefe+MyTd58e3n+XUbCqa4jD46OrTpgGYQgxZQQY7Y2xorV33nzznbffAkmvqLzn8tqd/dV5c3aVU0I5I8r4teHwfHLpIEbYNxog56SSUcS1t4N+b7aaCqDH6yOZzwNCvXV13cYMQ4CxB8vVmbHT116/eW37+m/+2u/2uj3ly73rm7aoV3WJkX7rzz5e36v2h/1upLrJiGTdpy/OFlW78dr+4bPjuhGE4roQojHrG6OtDX5yPKNBduv2LWFBW5VWmrPLZdYdRll0+Pzi9euvv/v4UYPRvFKjAEPOlAXMcaWM8XBaGc6BxpZFNAgD6pCVdimsdVA7XwvJODFWI4iVdwBBj4kDvjGCewiJ04C2wrgQBdy287OAdu7e/tr5Mbo6e5ISYrXwzgNjsJO6kRqQc+ADpoYpiXhMwmjY3wjDyKUJKlZVk3sUSEynK2C1yqKMED7LBcFIax9HMMEEOOBko4GujTEli5KgNbXTSiGFtRUaAAyMMAZ6gNF8rnhos4wfzRfOII7Qal7/OauorlSYJDgOhLQe8dlsSr09OXp5sH/z/LyFqYerXLt0WRWYgw9ePl4fJYtF7hzojaNhyikHjYX33vixP/3eW1vrg8E4On1yeOvGTc3Q5erMgIDSzdJuHp88jEOYnxjvrDXFfJGvb3cRRju7G5sHt2tJW+Gg1zgYWZ0v2rbfZUqWFLhuymVZGUcCSnjIk258dDFxVBs5gxqwKJ7nsm6t9XLj2jUnyxcfvM8RaJNgvTfUql7OJgGPXvviZ45fHq1lIwzg5eVyc/2gzKuE4bouL0+fJntRTDUP5t77j947Ol81Do/29q9HDPKI1bN8kFGAsUdEmGZ72B+mg3y5WMzVrFiak1UvGpWL8vFcwmpx/OjtkNNrBzvf/f53/tFf+fn/6r/5lQSBH//i3jf/7bNVAeOg+/f+wX/63/7K/zck8Kv37rS1r8/P58elcz7JImm8w8RY3+mMWRQ+fmltRXpJcvp03pTT7sYP2AABAABJREFUGPcvLk7X93eUrqSW0OjKzDd37jmE8kIENEzT7Phk6ozGHkGmPZInR8+j8cHuzu2LR0/b+ur5+Ytmb2+0flNBLFucpJ1AGoBiD9BiXgwHuJxdlXLq3HxZ2lc/++Xf+sN/S8JDhJKvfO7u6elJGIVRhO9/+FG32xmNr01Qe1YpwgBULUaGRxxTvnVw09N4WdRQOgQghaBalIiAGJEUQNC2EY0hA966fLUSbTGZLDwBvaz36PF9DzEJIxKwJOuucXRxfpzFLMQcB54TV5QLaYQ39HJeqEZARDrrvW4/PTl+UC2uIPYhS8+vLoMo9ADN5sswDOu6oZy42p9dzikjAY8U8lEYilyslgVP4m4nNc7+ez5F+0kLlHVuOq8ooYywQY/JxqzyCiOorbPGLpcrAhHAUChjrW3aot/NMOfO+UYo4AAGQDQCQRgExBrLGM+LwlgdhlHTFgjCKE4CGjStANYhDIQUnHHvQGssxJgzhpxVjfTWalGIxmqrKMFlUSD8Sf05RZgAhMMkNA6KVmnvAQIYAG0FJgRCQClhBIdhKLQpq7aoqoAy6kiSRoRSBBHw0HovjVMOtI2AHlqnhGgABIyGEBBlRBhRaVFRSme81spCoLTT1ocMOmcBAmEcaafjIEYeta0wHkAHvHbOecaIMgY4QBgmGFhIrCZlXSmvExxFPFbWIQCMcg2S1jpljTXSORiEoVQWexjzEBO2LGrnAMKEBES0rdHae2+8dEYhQp1xznpMkdLKGK2k0toSQsKI9LLUON22inECME87WdtKpe28KAkAAPhVXcZhHEWcB4FplQfISKmVgt4j6I0QV8slJWjQTRXGFGHgEcLYAVQ0ylpPIIQAIoQQRs5DgonzgDMGAGja1nlPCYHA56tcCoUgChhTRlMMPQTAWaVFwAJCsLUKAGithc5xRow1xhkhVVnWAHprnAcgIJxQAoGHAFjnOafAA6WNNRpBzxkTQmKEMCPKK4QIJ8Q6o7SRUiOEooC3UlOCAYTAASEUhN5Ck6axVsYZCSEMOPpEBSFGP/a1g/2b+bmKf+8PTn/yjdu/9PvfTXphgFmrV51hHI7imEc4WLOyqpvaTduJbTu3Ojf3e/qMfPzxUb6yf/8rX/n49L0VKAY0dSjtbY5nF5zRa/PiuL/m0w6HeeOMlKu2cCgc9rz3R6cvbt5MzlamthEG7WgzPfn4Steqt0HT/SgNYlOZk2eH9s/+5D94/We/9fY3cgUWvBxmzs4vSASTLFPakBinIQPAozRo9erxi49A0vjUYqgpESxgTd56DyFEHRith6PB5mB20pha7d+6KxfmlTc+0+iNre2bZw//x/6uunWtH1J6cngq88JG0nv6Z7/10vP9IDQIMCHLWpVpz4Corcoqyhubq2c/+mF3/Nrma9f+g7GtJ+fvfXgSBH59PTUOYkQ7cdgI2R8lNCWX9WznYC3e4FpJ2GJEQ9lYkCZpD/u2ifpQlkYIkYTk0ckx4yGn5Pl0eTpdzPkP2kKud+M7t259//RH00XLPQMKC2vu3Hv9c5/dfec7f4yXc2c6z+8/zk9PrY2BC3/nX/4rtxIfPHj85c0v7O/tXJ5eVHULIcrWAmCddb7QIiDcQzrs96JOdF6oj48P69a1L/NwlI32wmL6AOlqbW3U2b0GW704+4CiRY+7jSFe+3r/7MU3V4XnFDw+9dBvH2yO89nV8wdtEve+fP3N+pze2/n6rduv/T//y//y2bM5i1jq4vvvHv3Df/7lh0/+uK09dDIO4noKigVazD3WaFPYLab/5j/+G/tf+SnBOyzoeSXd1bF+/ti4eZ6/hAPbvR2180VGMcVwOW/7HMdBQHvUeHA0reuFQQBQxkRrpdEEG+tx3ShqFIw4Ue5yrrnz3lrrtDbKAMc/XYiAc45xhgB0HkIECETG2k8yPJQG3iEphUcgzsJGKhqERmkpTQCxsVo0SwhIKw0ipJdFiyIPIPXeaufCgCsApXE37l6/yhdbmx5BECCiBES5pmkQD9a//GZ2NpnSF8dtUQ2GoW79eavai/l6lvUDmhJeSyWbRUIQcDiIeRyl2XR5XBaAoTNRq1aGw+TLX/hsPOgurspiMXfaBoxGIZdAeuABBghigCHBKIpCpawQFkKqjUyi0NsmjADE3GjRWLUx2p7PSgU0BKaXdaxTBFOlTaM1Jdh64FpfLB01MIkGH9x/Odq/fvPWTYq0KdCX/uOf/1f/+t/Np/nWemy1rVc1CcNsvGZRDmDd1vXWeMAD9fLk8euvvU5Drj2eLoud7WsbG7unRy8x5fNlEcfh+bQ23o8HKWfUOggAKlZVzHB/uOax5UkAeVzO52ESdRN89PL+cGN7NOpOTy95kmbd0fTyUlVFgHF3OIo7WZqG3rlKmKgfT6a1XJQb65uE4igM8jxPYr69NXr54vnu1nBFwNnV8Wufu338fIq8u5ic95NOVeYk6AcB062SVcui4PTy6ObuDaNVW1c8JKIBmPl+ul41eVHWjMPDs0OOk043XS1WDDCAWFXUjEBMHPCumyR1WQacSVldLadpL4sCuzzVsmlGax3jkSydcZZ6Mj07K5YVoOHs4hzTdHdjfXJahTjkkObVijrqtNzdvvny8DCLE8oQRHC1FMDZpm7rVlEAYhrUqsaMbvSzVa1bJu99dufDt+7bUiNOowh2R+PNjdGzh88+nQUBrxud9pMwClolga0Hw66UMuCh0FY1gjDGkxQCYEpOAF3Ny7RXI4cBhYIQZywA2EBP407EOKQSYgKgBRzhmECEq8VMA1DMlpPLszfefFN42K7eA8i1Qo42Ov3B0EiMg6C13nughCfMW+2jiABkB4O0rook6T55foSxtcoQTnrDAYS2OjnOI0TCZHO0jiw9m560c9FNSMJJ2s1QDQACcRZLK7H1oiko9piyvZ2dR08+rKHfWBvwhbPadrJ+EiQsjKhHDtNOHFXFPGCQB9xHxDWOJEk/HpTL6Xx2jjAKOCMUONdIUVMClZIIhVJ5bBrRVJ4iBKhsFISOBwFnAYDF+tpguN5vpciylAZJUTW9wfrl8xn2IaG8M+oiFofdCBDSCivq1kNMWQiENY0kWeidxcywAOb1jBJuGyEBChi1ECBPWyQhgFUhVAO0sUknUsKtx2lTt0na2djbFsYDxqFV+eJ8c9idT1f1oiCMam2kNmHEaBgspyVhJkjYxeUpDqIkxc6wpoKPnhx2Ol1i4cXZeamgg3Bv5wAZaGTDEdnZWLv16u3BeOPZ6dV7339P6hoAOEhiBhU2rq4KECRV2Vrn8mXFrEfQff/973/x3lcdcKJtKMeEYCuNBl61giAYREEUZa22FMHF+WkQkSwOPfSU4DCIEUDEKoyB1a6FJkv7cTd01SrtJAzirnQJ8a2RlMFcqtiI0BMkfYQwYmTt9q2ixLOXF9HeJu75/e11GrOmkK2Un+JZrL5+d+vBx1dPHyxw6A/urCupqkqsr2cwoN0O6XRib3S1WP7E3//Jdx98f9kIBFi+yMu2jlTlHWJxkM9kLUw0IP00nk1XmNB8svpg9pFPrXNBB9anb38n5dmt7b2GJMDbnbXB5fGza5vDl5cLgHE347uDToTNqso1i+4/f0ItW0/p3t7+8fGpFmx9b5D02KpRRplFMw+TmBBydXUe72winF49PR+uj8KAylqEnIfhxnJ+VS+WnKBmNXs2FZjw0WhEY+4J6CQ9UbVX8jxMwm/8zh9+63e+S1HSGw46EU4D6tLYOXvn2v7D03POEs4Dq3QI8dP33hL5wnqlJF7NFPAtMLQ7TK6mS0p5aSQoKoeIlYoTFIRBVbXAeYgRtl7WNfGkkWrh236QSFE5DZ220vumEkLK4TBJ4uDwcPb46QTTcHU+v35rMEzCXMrjZ+fOu8uyWTw9U01lynz9zt4rnztAMDHNcnZ1dTW51IAiZHgQ3H791nRygjToRakAfGNtc7nQ5y+eXVvbePv9h8N1TmmCafjkolAoCsOoUBPgLPHeaLMxGB4eNySKG61CzqBFQsNO3N3cHFzMV7P5MouJzufWKIehB9giop0hEDvrMYTOO8JpQljZaoKIaiTDEENwfvbgPD8KUJcTUi/qJMUAA0Agd4BCaxyphZNSpZyu9wPdytG4u5ICsyjuo0qqfLVY5g0NQ6UUcCjr9IKItEIYY9vGI0KSkK8WBSD49o3XnKcvXz4k2DtnWRhGbFDNl94BB7xQFiNkQ3J8XFBOGSXWOe8gY+jPWUUhDfL58vhqvrOzdv3Ozre/8/7u+u5kqhBrIB7XIoxg0x11W7HIOsGH909v3XhD02gxKxzmyhvGeT5fXk7rtLe7mlUXl/Pdwfjp/eejzQ2sQW8t+uJrn33nw/O1tbW97eFHHz0sajle7zqo8twWE7O2Nuhm660s12LgZLsyZa8T7u5vtvkqv8yJB7Km4/WB87CansxWeRClw4N9EgRzrTYO9qtaBOuD8XjzT771vVvhtlhdbh30dK3nc0mR6g773RQp2aRQdeJ+J+m+PH4BWQRD1VZF0KPDdR/ErLcOL69Wf/btw6mEkePjtc3O1tpyJa301oDtazuoQ1oj44hv9FIM7WW7ssSs3d1lcRyD8Pl3j4Vcfv4v/AxYPkwDiAP35PHl8ODz/69ffeejx9W9V9be/97jd95dbex0Fq23MX/vyeV00UDof+LzN9//0aPd0eDJ5eTOzZEU/PRkXrVyb3ujLFadYTbc2iwvj4b9FKhqrTdatS7q3LL1CwIvsrQX4gtVzrWuxr3eZFLG3Q1RnE0vrnb3epvrYRQT49Dp+WEp2vVr+0nn+qOP31YgeHmyREoP18aYQwY05aG0JU3ZYJyevnj45MHTIO2eT+wP3p+kLHMzyRgqDpq4E3IMiyInIQ85q8sSOqNFeX3ren5+OKnV3MPBeGhoCGCYxQhoAb0TVnlG6rpBFlWrmgcYGeuMWC0nECrjjAeozNu6msQ86A5GOuyZulZNa4SLeKilzNu81+lPLqZpL0q6mbdgVVbdKNJSTo9eDPsDZOsIgU630zgoDeJBVqzK2aoBKBZStxqUra6lyqIwXy1Y0FMW1XU9WhuSIBBCte2nI8F7hxk12mIItXROSWOsRxhAqJ2jlEHnEcJhlFopAQBxmNRtBYFXSofWh2EglIwitlzMAs4ghIgg5/xqsTTaeeBrIwnBhBBnvDHCGsMIJgQNugMLgPeAG2eMYzxomgXnOE2DIi+1Ns5Z6FwQBABBxmnTKoxQnGWAUKed9yCIYgAsgjBlqZSKEQYxZhwr7SBCPAwD6wAASZYmMdcOUkwYpVJoqWTAWWOdd14piTFGiHIWausw8gSSqhFGW8bTmHDgXd00yhuhLIYIeKQVYDS02tdSKuOBdyGnVhhjLDLIQSiFMJLwkFEKIcBBEBHnQhxQSB0SadSpq8oaIyDM0lhJLQxknFLqKcFGK2VbKQQESGipjILAa6WBtR54Z1WAMedMaa2MaeoWIyy1xRAFQRBwqq0QrUQAtVYjgrQxWmtrHEYAAOCN60QxwzhhlFLUKAgYNs5BjDywmAdC1sA5AHBR1gAB5EEYxk5rY4y1DnqACMKYNG0LPCaYGWMwpgAiY4wH3gEnpVSiVUoC77W3VdtC4J11HkDnPEDYeocAqoREEFJKAkoB8FfzFYJAG4MwxhgiggjBISPQWe8gpsQ6p7W23lvrnbeUEKkNIgQjrJ31AEIPnPdNK7S2HiIHfKOU88BaK41mlDNGjTXOQWW9NtYC77xzGn6qAkQ5Rr2Av/vx8ddeufHa9s1/Y79FKVNej9YzS2DemrXu6HRagRiRjThG2fx0eVVK7pc3bt9+/HRx/2X1R984vfa5jeH6QMku7t7KRjtUHXQadXb5G4vLd/nOKB1ETSlLSSynrJ/WYnVw6xVbUB9GrSiRgytRKN4AHC2UJJRWyHX64dpO8uC776hz8Vf+6k+dqMsfnh1BUELGXUAEQha6VtUYIQvBslFt6/qdEVKTq2IOLIHUMuZ4nyS9gAbd+koVMhCLlGU3RrvjooUdzjrRwfs/eLDZ/7FXXvmZuf6jL/z0X//BD/4skNo5NzuWR4f15AgaqZxrLhL62a/tHp4+rU7nmDW9fufi0ZygtJTzQTa/fCQxpJ959U3j06S/9vC9iQthuVgmFPhC9bcp6pHRnTul9rOF6KdpplJtad9nYSeZHS1AiXcG47WNtZv37s4fndWCf/DWB75WB+OOFU2jVSvtqmJn8xNLQiXxzWu7q/kkGGa/9c47F/OnO/3kwZMH6Xj97Kk9u5h86W996fJ0drA+uv+D573B+PDl1cW0SALuGCxrmU9LqU3I7Z2dNENM1rJwCqN45eWs1ZTQq0pVQP7cG/e6HaSldUotjo83t2PSbYc76yTdgK2BFGKKzy8nv/vrL/vdG6uczcqyUlTXDXPLN64lonX33ew3rn71cj6HDA+3ksmTyeVh8we/+PHGq6m3phUQGDJ7adqrOsHo+pgH09kbr76x/xO/UBjqaORbS8tl+fhBpObn0+Mnz88HKv7qF1779vcmIkcmY/OFWuMo4ahVKo4GxunSWGJs6iGGKIhDHiBugAu5s6SFMM9lY+WtjbSXhshDU660BcE4+TROAbGWBkDgPfYeeWe0hc4Aq63H3nlslDVG0BQ6AJUywPuYB8YZCDyGAGJCLGiUrWpFIdNaE4KAN4hFnFDb1Ajh9cEAWaWVZRAqaPu9ocBwvhQeUYzIvRs7qmiKxdwaUzRN4ez5pNlL6c2dURQEpRCVggDDfFkhDV69ffPx5Aet0YCQn/+Fn4k5L/NG5RX0xlY2SSKCkWqFtYoSGoVQeVBWBaMkTDpWtRAQRpMk6TSridc2iCnnmGEGnZdKeQiMBU47FXhMAwcAJbguK9pLeNoxIdCdka0nvZ3h3/tH/+GzF08/eKu+c7A7ObqsdCsV/vjhkze/8nNVVZS1aaSdXx3HlHpGNzdeOT978fDDt9/86k+kETq7FBjY9fVrVetevDhOwpAQGlDW73QhaC+XRdMiIy0nNBqmKaJeCSdAVeiAAKtkJ+Jnx5dqAZyEs7NFtt9JO30YxckgFcY01IOqnE+LOMnKlnPOLPBXVxCjTDW+rUoeam9pEAVYo2K2YBA1ddHkJWhUs2wIo864APWTeKhWoljVPGZSKqkN8LiXruW56mWZC0XAkRACQdBb39QXqF2tvNEUhKIRgCALqCwtoRSxOAyDvFhQiuarAmMiHVzNVnjZIqkWpoBahxyXjaRpX8K2kwYRipZ5HoN+XptWGmxNXjR/9tH7P/83/pNq/mxRNm3ZUowWs4mzVrYuL4qmaePesK0KgHycZbqRSdJZtbXKW2VNq3zRuOl52806UzHf2M02IT8+udDKMPZpG6ZopLXeeFg2xjnAKK1rhTCmMBDVjGBubZt0OpQim4aVNAT4ppBRGFAIrVJKSA9RrRQJOCURppABFzBmrLb5rKxXtjWrxs0mTVvHjx68yEbBjdf3rFKT08sQQVvWJMy2b2w3WrfLyshGNVW1gEDFzVIEUdKPTJw6oGphtPCQB51aqDDEm8PByaPLuDvYOmAg6va31kU1Xy3qNOk45xiLlBTawEFva3J+5rQJM8bCUNWKx4Nf/81v3nrlzptfvAl10wrLQwS0IlEETIuch84642gWIEKkrb1V5aoUZS1ylXZ7FpimkSGnXiuAom46aK2N0mw5WwEF0zS2llBKnfd1bZdFE0fJeGPTQiKb2nlwcXY1XN8cjEblqgd0MBx0dSPyusaMAw+9kohyCAhH3FvHie10Q+Dh5HRSzGseBpRHnlRJSDFB0HvgrJHSW8eBh1p71ToPrGi1hc4ajAlghDHU6XXzyzMG8WI+N04MxoFRzhqjpdTGgFJoQ+aLc++dVw5g3khTSTFfLQGBqpFEa3U1r1sPEV1N2pihtINYBHosKvMr6Ao9r/Y3U6ksomEy7LXlrGnaJOI0CmirjLFONgY6EoVr473Jcpp0hxZhJX3Z1s4CgJkD0AGIEC2KZnN31O9Gy8UqjsMwZnEYLBdlON5SCjV1453x0MRrfWOVVjXmPYcgxT7WhkKfC9E0VTJMKcbtslEAbo3WS6Fx2IsAdpjrBgFP6rLOOIo7UQY+VcF4SPP5ajxwn7l74+nJ8YtFZRs3JnCd4YcvTte72cFo1BydGY4+ePq4u70PVlXH0sXiqndw3WgxHHcv58VkUgDEWRcaaxolxjsjxNTZy6vhYPP++0dvbvWHnfFnv3Dj4mrZu33vf/6VXzt8ukw5rVRjGTxdVOn985PLKUHi+nYonVceLs+ueDzkVgAFDl8uNvbHm7evn/zgg/3xuriazC8X9772hrOuzWe1aEMWWqiXSxlwRhjpdONi1cQ4WpaLsMNZZ/3poxfUN86BQZ8HhCpkO4PO+fnp2eIMBx0aZvePqj4Dr15LeUiKvM7rshN3lDJlUYQh4RiuJheMBlE/ml0WQIHaaFGZ/lZ87XrPIDqw+PTFRRCFhFNVNW2tgPHOS1VDhBDBPgkCCLxHUAFIGTPAKe9JwNrazYVbXtXElo1UAQ+pA2karA83B904puL0mZEabN0cA9Q7PT1LOHz+7OyDh7/+1376zX7PH50d9wfJrLJhJwGYXShTA/O5zWssMIdnp08fPgQCcmhmy0VjdKs6l1c1cGBVzgMUjbKgz/BGj7w8fBmFDCCPoXKGhAE2GHvn5pXUqFLYkCTNyAAa3yN8Ob8UUnqEnAfSQwAJ8NoDJ4yGGIUIEYIqJSmCq0ZDCDgLXd22RidRQiPkoZO68cZZbzDy2DkjrYLoPDfKFtub4/cevrezt6MRIzwQiDUAO4yFc410rZK1vOwNO4N+pArfGn0xb7qp7XT7GNAuTT588aEoVzwErZRXOfIUGhosK8kIqTS1jYqFDkI/6AGIoQNWKuPhn8daMwu2hqMxSMKUiHK6vzPq9QPaHwMLOOPdhACjvSq3dzJHzZ2bG4vzS8ljVVctaSywQZSOso6oF2kY6QZ4CyGDndGQhXG0QcrZfHH2osfQCqymx1cpVFu3ti5WZZoFAQtgyjcGm8vzk6opfRjqpkJStPMGMrS2M3helEnaIRwt/v/s/VmvZUmWJoatwcz2cIY7X5/Cw2POObMqqzKru6uLPbCILrI5tUh0k+IgChAhCGgRAvggQoIECIIkQBIgQJBAAS0KkgDqQWwClDiLIruL3VVdmVmVWTlFRkRmhEf47Hc8057MbK2lh32Ou2e2fkLYg/u99+yztw1rmdn3rW/ZXqzPPn0+nx+YwKYT4Yd7r+nQoVPnCLjr2033zo1bcnXWXqd7X54/2jzZOzy8cThfnA956FGkW6wrLq8XTw4OJghkpk6xnDgJ6f7l+o8/evL48UV25Re+9AV3ceX69nBPPr46/zPvfb1fLHTIRnD4+vTu4eHl06fOhfnpPkBjQ16dPYk5ILayfvbv/zv/a+mad7982/swn08Ob9386O//PPh0crL3pz/68L0v3/v5o1U4PtxUKLE9Oj35l/+t/+H3/tbfqjftpz+5P795tHy+brvL9cXizZt37n/8ZJLlxvFJU6zOzs7AN7NZuY7N7GDv7OEPCuzn88nQNAfHR/Xe3mzKy2ZRF/H5o5+Vk3L/cN+7IneaClqsz2Zufzh78Pjp0xuvn3z9177x8P7Zep0idavFGUspig/vX+/d2Z8U+49+cf/yyfP33nsva/Xk7PF0Vnft+be+9YU7N1978ujpu994jfr44x/fn+8fMFe5u574cLJ3sLl+fHH2bDqbJ6TTmzeLolxvcsGQsihoWYYUU1VWlnNZFWi6Xl6sVqv5nhva9eWT83bThHK6d3BUTw6Obx8nKpPvGHi9agnS/Hg6RMigXHDfdYXTGDdOhaESyIf7s6HdpKFHouvlmqpSjPveFhcrtHIYmls3bvz0wyeErihwNp1eDEsju1xfLC4vT0721+thr3AiafQCSTnlbKpc+K6LqlDVZc55iDEOeWgjKvoSCVBVcoqbGMk5RmCw2Gz6pnOFW3cbTxxTTDH54NlxzDGrsA+GNp1MTZUBFRUJsubUWREsFKHvOsuqWRebZShcSimLhKJwwRuYiRoAEHjv5hx8EciHts1lUe1N5450bxI269bMEeBi3RU+eC9FXbJDo8wNeucILQ6ZfQFZu25TlGWoyi5S0zQA4JwzM0aLQ9sncQQqQiZVEUSsQIcEUsCqW6rypu329uY+eGZaLRebtimKYjqpGNAxhpJVUIYeCAbJ/TJ5HtCsqPykKtumEZeZdWOtifb9EMpSKh+C855VVQkQ1Qgk57Jw/RCD5yGllMV5NAUz2N+bp5hTjsMwNG079AMAkONQ+qoIOcbUJQIWzEVwRVGuVsuhi5JVcwo+9F2czaeE5h31XQ+mZeE2bRs7bftBs6qoc4imRM57ntZ108bgnCv84mqJRM7YOSrLcmhj3/emmZhN2XtPTMv1Zr1u+raTLGrK3pmIAahqVZWiaHEY+giESXLtQ1TRka2UjEzDkIKjMnDhWEQw55yTGQ6S1TCmaKqI5B2T2TCk4L1KHmJU07IqzTTHBICAZmZ9HJzzAOaZmNGSBMfEbhj6votqUFWVdzx6we/87pd+9AffP337ta8M4fHZ+f/+b/+7UYEdoCp7A1Xspbtcq5JX+uqX3nvw+x/0D7u9Lh8el3tVvHun/ukH7b//H/3ki5+Vf/N//rtrgPM1TH2xvLpmYlv1pVDarJfDWmIMRsOmvfrxowlL2R///Ic/n93au3FvguLU+i+8d7dpfERvFFO3HoZ0696NCb/x/OH5+z/5wendt2YLi+01cQ6TqqCySRsFdS4BALM/3T9ulmdiA5Ar6wOUjeVUoqPG28qqOD2ZnOQ+WXdBmiAXkHhB/O1vfon48stf+dr9Zx9//KNfyOa6fm2v7+PiweLBj1bWhmcfLzJItvr9j96/+9qsX/dHd6baleslJaWf/MmzTz5s/uI/9luvvfeVy8XTWzffGkyTJ7cvt+/Oypk/or3jW/MQiur4RC+V9+yNL9Td+fMnnywhmo/NO6/P/8I/8hcw9mnjz8/6qph+4y/93k9+8ez+g5++8807i8+enR5O5q8d/+yj6+x9cu75ZePoWbfpV0/PPfsf/vxqMWvu7Nm/8K9++Ts/+uzdw+LR9z/88KOHv/fVf+oB6Ot36k+eNG0rN6ep7xdHpS8OpoguD0PRadJ2flCsu261uU6b7Fl95e6+dXeRVs3l6qPv/ODe7dNswH5WTI5++NHmp+//rOb4pd84LWsczs/v3LhF7bOfP12fL81XMk6he5Py/OwD6tOg5ivvS+dAH3x8Np3zrKhpaXcP3/jOH72fFn2Oq7KYlWt4Z16Ej579tX/uH3339/61dapib6zm2kU5nFuxOH/88Z/84AHcvvV7//3/Ls72/v7vf/b/+cMH3rkEuLpGV+YKPWY9mRROIwiWwQFyIphMPAKcIBoWXZc2DRwdzQ+m0C6eq+bXTo/WnfbrrdwaQIF817bOF0ysgpKHoijMFX3WclIzgGqKcciGvpj2fZddphSrkifzvcVqCABYhOApR0AwZJyU0z4mAESw2AyG6LzTDJeLZTGtrpulc+7u6R6zS+3FZtF7wDunhwD5C5Mbn52173906Ujc+eJoOpnPiquzRTWdMBBP/fTA/xO//ZubdXt451aBTvtsKetAhccsudts6nmdBdrNZjLZL2c1ORZlVVSEtm2dKmjuNwtSYU91WW26NmpkxDz0wYdJXS9W627QmMSTmMK0qqQfjOj6/Pzh0wd/5s9+6dHDT+6+cevW8eQ/+E/+fuXy8weP786/8Jf/0W8tzp7cf/BMRX0RZEhRspHnsnrw9LPVspnVr/dDWJxdVJPCB2z6CAHnR3vX19fcg4FcLxcp2cnxvKjqq27x5pvvbYaVjY2qimoa5pPq0w/u1xUd7O27qr43f/Pq7Pq666tJmB1Vm81Gzd5+9/Wnn9y/8dobz55enZ0tTm8eT05O3XRfYuwurg9P58vLNYCBL01xeb1EFC5nR3dOBd16000Pj+bl3tde+8J3v/N3yUEZPBc+pkxiOaW8XnfdxaZtXKDjct71TYmJHNTTyf7e5PLsSSgUiVer9cHRgaeCg+tSq46LQBRAhIBcm/vZvA6OF4/XsYmS0sX5+emdO3nV37l1q3n2qF2ti2lo+i4g3jqgytf3P/nZ177ytYv1o2a1IodlqFNUIq+KZe1sY88XV66czurq/PkjMx5SfjxsmHA+K9HznTdv/PRHDz/64NPV1eWtO3urq65brd778psuTLp2uyOa1UWopp1mLgpN2iUWyVWtEnvPQfrIanm1cAU9/PCDsydPQpX8tL5z86QuinXTEBgCBAfsrWs3Q5/YZMgoIjGl+WziqPjs4YP1ulksmzYOXz56C82as2ZaVIxu72geBTFQiUXjuoPj/bjO14+frNF0EF8FV7kPfvHhAc9ap806P/5k7Yebrdvc+cb05p292EQnw6f3P3zj3g0/KWyCEXpA710Zh5hyR0jz/XnKfYxD2myKaXjr7df/8p/9tR/94sM//O763dvv1i7tVfkw+GEYigLJWz0ri6JYNpuyKFSidw7UMkiYz/w06CAOmTxM9+ZpiDF3LrikQyi9c7ReXk5mc+dss2r7rEVRTw5m1cF0fbUqPC0Xm5uHBxXFTx596lUn85mmnGOqixKiEsJbd197cHVe0GRxdVmw94WnaEDoHaahJ4MNrfb2psAUqoosbVbNerkumFHQTPeP9rt+gBxTH49uHAWXh/Mz5+rEoYBicdXEvtm/OWuH3K47A3KzmRCvrq82bUOWyqKMyZJ0d+/d+t4f//DRZ0/LedVPqrTZ3L55yE7Pny1X7XB6uu8zS9bUprNH19OiaLs0uzHpOjTUxWrpRCwNWtEgLXvwIIZQTRxQdhDbdbNYb4j59OapM2qbOD2oy6lvrjsxuv36yfHprCoDBg7EzbqzoQOjYjqhlIvp4dB3niy2K+cCUmli18vVbG9aTELVHLmLZd13nSkMg5bsg9/krEZD161WcnTnNV/Rg/v3771ziwBCWZhtveD+R8+ZabI/eXR2sWqka+G0rr/6+iRlunF4PKT40599enwyFaNJWTTPNxQjZLmxf3P22pt/9Id/7+nlVeyb+X7dJDl7+pwEOMnFoydHp8c3bxWPnz6+e+fovS++tbr87MHDh02XL97/yTtv3tC15gyfPH76zre+fvXR83Wkm0d76+tEkT67//jd994qDidx2Dx79mB/Wh/7AvvF849W86Jab4ao7vWvfX2pPl4vj+fBIcUACZNzhWTVYWiWlhsp6zA/ql3p/P7sxr1baOJUyJtJ7mJ7vcbSoTT5vd/4xtFk/ug//R4GVBf61VJ6a22wcjItK5V+Mq/bYTi+fXLxbJFbPTqqYz9Mq8q0KUJZcanEbbPZK0PKMYEYqEFG1P1JlQWKIpjJ0DZsJmZ9q13qESCrQeL9WS3N0DYDMxBBs97M5/WN0znh5o//wQ+/9rU351VIqfvmb3zh4UV/UqXnD55dXi2vGvvP/tO/98Z+2abNpK6L/XJ+Mnn82eNbs2Mrbz/4+GEeYFqGftmi+Xe+8YWHj5ZvfPltsLi+uGQPJ5PZ04fna67WkpY9FpOTsDc5X264cABq5Pp2YMZZXUkemMlpw2x9lGa9dojkuE+SzYSwT4KqpgboYjKzGLMRkWYEQDWLw0AGbJkAZqUjByJZRNWM2E+YhaAzvFo1Qy+ucqeHk745u1oNF6vGm/OoTR4EZDLxpauXi9V6sbEUMCcyKJhTssur5RDjB/c/joN45xbL7ANJlBt39yTZst0AsxKXVZkptVHKDkHFs4MSkuIvUUX3Xr+xuD4Lqp9+9OFv/ubXry9gcXF1+73Dpw/a+/c/ev3kTtScVB89OX/vS3evVo/8QDTbn01PgluoQafd4fGhMT97dI1EVVEO3TCv6+ePrycFLa/Xw/u/cPV+Wdhq0UvKq+sLD3ZyY/rRT58Oqfjtt956dPYLLhCcZJeObpS5HZpNnwYV26jSk0dy4+TYsQ+lP7178+GDM8I0n1HXpfPn68neJInvV/lgb9+zu3BOlNvlUxmuMcL+wa1m2Za+PD46+enHD5DZRQ2BNpvu7munt7988rf+H//Pi0uYBcl+huoguQHw4rPr7rqa8P6TJ+vb87LYo9e/+PZPf/HjaVdxCAKpadaz2o5n9Wfvf/S1r/3ZO8fu4c8fHn/5q0/un91747SPq9Xi/O4bR5OflKGmd7/61Z/97KP6YPro/OHf/J/+a1A/veqf/fV/+q//8E8/+NEnD/7yr33x/s+P1rNq2abSl6eHsxKKUO1HiT/56Ooe79+48zpqM+Tiul+/efsOQzmZ76duoanbP6a2H86e3p+ESVU7KqvspnVVNe351dX1LTcJ5ez48ODq6TPGvLpeNr3cvHNa7ttmzewSinCyusR5qc3i8WrZ33rnixns6WeXd968+e7XbqZ2vwrF08unPCseLdvFs+swOwqhvrxaiRnnWBXzxWaJ3q2GbrJ37KhUMR+CgZiPAGpmPngi7DcRTLp+03RNsvjsum+uzxGs2J/vnd66/eY7ZhxjXF2dTUsiFMVYTrwDl8wOjw5Z8rPnn2GWm7dOm+WyWy+9K5g4AZVTF/u8WS4nRV0W0yEN2c0vL7rDeZ0HWy9bs0pSXlDXRemhK3y4e+9NX9fN8io1fHRaboWmcVAgz4gEaJRTTKZ9Tm0/OF8wEQOJwnqxrAtfFZUPLqYc2y72Q1UV7FjVvGc0I/IxZjMg4qqasPMcimygoKZCjlwoHPHQ913qIaUu9nVVZR3EMqDGPgIYEyCR864qizgkBSjqKg2DihmxGs7mk65Lq37jzEylbYd1t0Lk2d5EcvKOXOFiP1xdLJi4mtbTWdWsOwEtHKthztlMmnVDyH1O88mUkTRLN7SE5NhdrxfBh7oIk0lVF8V6teSiqOezZtN2Xa1Z1qt1yilLdr4IoSyrGsw4lJItDameToahTaI5JUm5qAKgDX3Pjg0NkFdX66Jyotg0LQIWwdWz2jlerXtRSVkm01pTxw6nVb1arZioqKo4tJYkJcsiMaa267IIeVax2WRSBK+aQMX7su06RAORTdOmlJ0PrsCayjjEsDfdNM20Kvo++sCD2uXlUgBElZjJABEn82koi8vrhTPX9omYU5JNm7JaTnFoh+m0Vs2DxqtudTDZq8uSydq2bfu+7bsYx3fYMSM777pOCLGuKxFt203KYAjBeecIEeOQzdQQYsrOeDot9iYliqZ+AMRuGAABEU05q6phKApR88F7ZnLC7NZNm8RUjZKAKiHElMq6BLGqcHVZdUNcdd3QJzYr2Mc+5izOuUEsKhDo6AV/9AffPTrYPzidTB4tqqFYP1yVe6VOMPfw7GJdnuzV9WS+N0lX1yXNLx48efbwClv+0r3DW3PePH70l//Sl1LOTy7x97/3tPi3//Rv/M2/8uZ7bzOdxNhRMC7ufvLRpw/ub+68dWtv5pfDY12ubt87XF+vP/h4tYwYWplNZ88/e7J/PD06OfiTz35+dOtGTh1I03WyMjAuD26W73/3Dx787Cdvf+tLV+r6MF1vumwKNAfvs6GaIWTgvu+uo64m87Iu69gPXFZVmNNQmB36cn9/ss/lsxS7j+8//J3f/ufkYhPcXjR33aQ3v/LV73z///atP/9eHH52sX74/e/+6Z3ZrX/kd77yT/wz/+RH3//F/Y8/EZcenl26SSgqv5Th/Olyryj2Dva+9Tt33njvm4enR+9/eH7n7smNt6d/9J0/uPHe3Wk/zG7Onl5cMvv9N97olqnZ5HIy0SbGVZ5N9o5PbQ/z6Xz/cO8W4Tym4vpiVRRu/8atD8/Of/Dhg32v9z/46RS6jdnByUHXtTff/NJCF4NfHZzc+OPvvr/R7ErXmHvj9KCJ6//4//29R+erP//nf+eq/+z3/pt/bX29/vCnn9557/jjT88P9qZvTOxf/Je++eOfPLxaakCpDtiMrxf59Mbt5dB2bbN/UFwtOj6YffcXD269cXTy1h2Utnfykx/94b/83/oNNfz0/vDdn4a/9Ff+0v/yf/bvv3Z8NKsm/+2/8T9w6/t9f2YQEZ2iiEOeFFwVi3h56/X9wfDsefPeW0c5t5MDv1l3q+eLpz/2RZ71awDj2zdurc6u5iv31a/9uXf/6f/OIlWKoZpg8OKlvfjZd588+uDhg6d8w335n/yrC3tnkif/xr/6r3z3D/8XqxZODmZ9n0Xd/tH+4nIdIp6im+67pouSMgUXRJvNMK2msym3Lha3j9v1EBrdK2i5jLJcDU3qdoe7D130JZY+UPBN16Yo3rtk4l2oXDXEyABIFLuuDBMkcoFVhRCBuO16BCGwqigA1EwMeciimNWIkKaTOmfpUwqBPSPaNKumJmaI+9Pi8nzFlm6e7OWY7711q79a5Zxv3SpvHF/df/Dkx5+eHdbh9ZsnZaiKyV5MsUvp8cOFEdfTvdnefnu52JsH8FWXbW8+XS/XKRsYG+ONG7cl5jSIDrF0PAwdNW4+ncVh0zW9N65nddt2BmDGfcyz+bQsp5s2eubKY5TWebKkEvNkf56GZIy5a5ZnF9/9u+2dw/n37z+uptXJwXwyr3/n97792aPrg6O99fnl5ro9PD2ZHc2vYLF/fDS0XdvmaVFOj48nEAbMVJXCXpMhw+Hx3nWzbtVqHyijKLdNuzffyxkq797/4Ae3771hQsMm1aEUyedXi4PTw6qwPmNGj7O6u1otl8N7d+9qwbYmiIlyWZBPQ08hzPan85NDnOw1kRhc8EXqE4kcHR5vhpgwJYOD+WHXwqyqC1oPXdxDWFw872NPmoGQOPTtQECTqubATdOXtTu/Pt87PGlTnsz2yoBDF1NW9VRW03o+adabycGBmnbrflhcvPX2G5fXD2PfH00OtfCr5YoTHd06aNcLH3D/5u1mtbpYrJrlmvUaDS9/8fP5fN5gjFFSr81VPM8PLi5XX/9zf2GTVtY5bWw2mW42bQb1wa+HRlWA8Ps//NO3bt+6dTh/+OyZC4UB5ZQ9+1lZyWa5V2NMxDdP7tw7XZ2tytKXk70ubupyx5dKHoZ2b3/e9blZN82w/sp7byls+rZz3s2qA8DULpbqYH213j88GJLbPzkaJHU9qGTHrh2GwTSgSALpB3IM5C0bg18uU9MsxXGxvwdZB3YXy3QycYc390PAQamczQuBKOZL1mkd11eb61bbVE39OrXPnz0Nlc8D7e3bfH+K6FsVB/3J8a120xzcOzi9OelibtexudpMZqGo62a1Cd40gCuqQgBUirKqaJJTv7q87ttVKN1vf+tL77x7+u/9e//lpz97cnh891/9G//4/rE111fV5JAyNG2fDEC4u2wl54ODOTrqhsRlvVhczaqSkciHwJziiinszY8IuB3ipmtyhrZJbZezSNOlk+lhNZlenF1JNzj206qaVn519fz8+VlZF6aAjMzsyiJU5dC0q80ipcje375zq+36+d5UBSQNe3vzoYtikBLW08OiLJabtnJOsXAkwdHQt0yU29RvNmAaU9/EqOb29+s0OI8FOw6+SkMbXKmI1bTebJYSZTP0BmVR+/15mdq2qjEzXi8uIQ0He5NoQg4PDvZXm5gBD4739vaPo0grcP7kjHO+efO4X2/QOVPu+lZyqovDWTHBoog51b50hEADIuQ+pzQcH+5fXa4QKIoNAl3s58eHfl5nMq7cetN+8eTOwcFktVh3rUTEqt7D1CsYIuztzR4/f0aunB/dGDbF0K5UBMBu370rouCCr6tbbx5eL5Z+3ZQHnA5ijNp28c4brynY5AaF/X2gUPuiOJ7P9vbbzeDYj06Qcl4m94U3Xv/gZz8t9w4pbepaJiUOFuRikOTYUwjVKufzJt25dXr+6YO9adV26x9/7zvn14v57KQQ3zb9om2ns2kAm58cHN/Zv3y2cJuYz/qe4Yfls9/9c18kunz8dPloff3O7cOzjy/7PjZr3edDlufXi7Nv/9q7m/O8uLr8c99++2rdvXZ3b71sfOmHNDTrfjqrw6RyVVWiXg5dG5t9X3sHNbsm9U+fnx0c7c9vnyxXTeGhWydCP5nNzh+sN88vXwuH3/izf+YHf/jd9eXl0dFbvgxt+zjl4cbh0Y2bb37y2XO+sfrWb7z29NEzK+j4xmnjrwfB8+vV0f707umthHl9db6izf7RVFEP79xsrteb5Xrv3o3NJulK2MWvfvGLn3706dPnzxwiFVXOwgSkgJLyAIoW+wFMqQhdH8mkHvFU1iTJBkl9Jsfz49npV2+sk926d5vT8tbBbx689cbRA9w0H/3su3/y3pffe9pf5SG/96U3v//jxznb2dn61//iF4Lq3/+Dnw1Zvbn9+uTZ4wfaZgPeO91ndt1ls7xaXT1fvfvWaz/40Z+slqt7b75u3fr0sPa1f/ioKYqiqML11TA0Us7mvWIGqwsXu8Ejm+PNuvVNQgIzqsrCnCF5bYZhUGJSHQPAwEwpgZgyupxScE4ERQXAzMCYr5ohxawVMbAjSlmIyLlQmuYE6yE1CmfnlwXnq6srKiZxkHLq3jg9uTqYfPjgsRr1feNKziAxp2a5nNQlESG4tu3m+3VVF+u2LWu/uurW3YDkr9bxnbuv/dpX7/2H/9Ufp1yZmTgqSNpGZtNAjhWVkv4SVXRw8/D2e6e//1/93fe+8ZWPnz4vj25842u/9kd//x+Q+G693rTn7379xvnz9nrlTeTm6fEvfvzx2f3lN77+zms3qrOzc4zy/GFarYQJ6iJ848vvPH7wWcluaNaSoTyaNX2+fHz5hS/eOX794MGHD9GyMzn/7NHm+vLplWXpT0/nMevDz85unOzvH9WPfvEZO4vt+s23bprY+WKle8Xb731jcdF+erF8tryo4oaf7zuezeYTrotg5eXT6zSsTfXwdKbQ+9KiDhEEal8czmOz+fjh/dNbeyD45NOz17/yRta9n/z0O//h9+4/PtNiclJPDn78o4fHh/Xkq4FOjg3y5LhuLu3x8/vf+ua3j08mXTq/MUXCzcmtww9+/unx8Zvduv3k8fl0cvij7/9gc9ndunv6xW/9+rI7u//04x/+9LNv/dq7Tz59cvHkml34O//lD85a+rP3vry4ovrJ8+/97ff/zX/2t/+Zf/Zbf/9Pnn3hy+/c+sabbz399HHTpsvrP/PtN4bL9OCDJ6t1U87nfRw2V1evvzEtoXKB53vTQlrvuAgTTCWIhjpcL59PCogpd208vnM0JNtcryezSqBX7Qoprp49vTw7A87Tg2J5vbJhrm0sQW/dOTx7fHV1uZjtTVeXl1fPF5uNTI5P733hC77cu76+vH74ZBq4sTg0abZf6HLgpLODaVHUsW8QXTmtFtfXbduL2o07J6bF6nIVauRqigTzeb1cLdUMwGJOg8Ru1avGGFfrzYbYEOzw5GC+d+Tr42YRY9/NKqwh2mDmsJ5O9w/3gnGzGrTrnj56GHUTVS6DL4DL6mCyN92s8/7xwdAtU2ezScFsZ8/PVotGXFlPpo7cx794sFluyrqoJnXTborA7Oz4eIJRTPtbN/cffPpoP9LLBDQCNei7ngCMYLXc9CmZigFXVekIJeYyeHaEAMRsMZH37AmYEame1NP5ZHl5vV5v2GNZVUPfexfQrN1sXPC+8L6uUhziMACiZCmCBzA0RvbICk7ruurbrqwKQpAshjS00cyAKQ5JsrVNG8rgne+bIWVxbjKZzrq2RWdVzWJADAiUUn953uecVKIZd5vGUt8POWXx3hFhWRY5xmbTtjG74NWUCLMMpmqmwzCEENCxMglolIGDJ9PCEUE5nRZX59cRRdkUcP9wPzgPktIgJkPKSVV7HSyL94xIHELwzhPlYSiq4Ityc70op5WZEoPzYUiDaMqSRVQAkEhEN8slEQLRZrWSnFS12yQmrOqQUzQT7xE7C84TUwiu9C6laEnVsI8bJPLOrTdrEQNyyeB0PpPY14W7vFqWk1JNFss1og0xK2JMhiHsT6amQiwaczsIG0nOOUpRFmSaY2TCFLP3LsZeUgrsTiZ7RK5rmiH3fd8vFytDMIGqLMxM1ZJkRqcqsU+iaRiGYVB2zpMDE2BgEEREgKosXHAEwKKOQQlHcxMwx7xphuC4CKHvIyKQYY5JcjanCOaYY1bN6h0zATEH4qxaBi9ZNGsgn9UADInUcs7ZMjJ7EB12aZgf/PTq7deKbnX28x9fpqm797Uvfv/9D8MMvQcRMMZN07DLIj0M8/t/epn7cr+uF4vN1Ffn61X3yYfHB4IVf/23vvqdP/mg/j8v3/z6HTP/xS/82U8+eGbFs/Xz/ucfPsxLuXNnv12k5VnjlJ8/k7On+tbbd2ZzK2rvJ/Um58VnDyPSMl7+md947yff+1NmX06LxDg/rdJwkHr98Effb3l1+yt3LUqTl+X0wIU6tq2A9gnbbpMCSnbX1xuhxhfTutzfXPcTV4fyMK2H1eWnw/qBwOCUpb0u3TQzX148OzwoxPAf/St/5ZOPv/f4wVnv8pe+fO93/5G/0S2m19fpta++fvtL+4CL175059MPnpOWZ5vhrS+9/qf/3/9C2s1b73ypPrjxi58+298vmuXyT/7rnyv6hOp9CdH2J/Mu9c2T836ZJhN/582TNNnDtLp89hy65vDWjW/+xq8//7S31tlymTbnX/1z7/3iv/7Rd/7gP967vvj61/e++etf/f2/892mheXlWZ/pj7/3AzIuQvXg04+PjnxJfL6Mecg3372z+eSDnz8/N8Ee9fV3b06OJ/3VMte63LQe6L13XvvSVybrdrj15uG3vnCnX5x/8EefrldWT6qLq6uLxfrwaDaZlNmES741p7pK+0elbtJ61d57497FZz//3nd/9Lf+d/9R509/8Yfvc/af/fR+GfA7f/dvTqaz0ofmrO+a3iNVhRuuuwxFUU/Plu3JYV0f2K//haOj05sk9N2/80F9UJ9/er1ew15RQwf9+xc3k3v3S29++5/5Fxfr0G66/ZMSctdfPVg/+skH7/8Q9r277X/tN94+fvfu5dlq8PHW4dHvvPv23/7p8/PLbnpvb7FaVemqdrQ/qwEs5+7oeHJ8tPfz+58uFl3X6mbRNn6BWcNrR2/cON4vtahiETJ7vLhYFVyMXkCkue2RkIH79doAj/ZOsgztepNkU1Rlu2lObx2Xzrp+OHt2f68qT26dpiGVZdis2qqus6Zmua6nZVWVOea6qoZ+QGMAG6IQU/AY2x6YfOlrHzbX12Z68ewqpRSbdnK0t398iOBmB0d9is26f+34cK/AIkfz7kfPz+4e7kdCTTqvy6HN5nAyKepp6WHuUTarvmt7dugLP5kXhpiThgJDVaUUxdCF4vHjq4i54mlRMIBPKQ/ZjKjtWiZ2VYGIfd/PZnXbNsOmAYhHt0+XS3FMy9VVc9XWe9P50ezk7ZuXZ60VYfH0StC/9u69qnTPF/3JzeNnnz6eHsw8H3z4wce34q296QGTB2g2i1XKgyFvmv7k5n7q8ywE56C5btpN1w0pTIr9k5N2tSKzsg7taj20Q+nZ4pDaxbSY88lhP2wuzs6Iw+HhQXJoOUuXNueL2vP01kHpaXa4v8hcTWcX158t2uFq8/jOm/cuJG+alPsNFfXN126fZek2l8r56vpsve7mB9PZfoFst1678fSTz9ouQQGf/Pz+wcFee90XtePg26abTOrVqlGRKHZ9uXAlzQ7naquLR4tbpyfz+V7fD0VwVxcLZuIEErUqeL3qNSWJQ0C9eXR8IU9j6hbXFwiUOjs/e45ZjbnNxGF258135vNC1sumiet+iLy8fNxUxeRg//Dp48vrYfXn/8I/Vu8dfPLDD6aE/abzgV0dFper9XWDztpuOLx5c5E9lW7ZLff2J0VRXa/jye3DnPLquqOFntw8um42MWFWA1ZEef7w8dANB4fb1JtQBUlZVdrVen11OYhePMPjm4dVHTaLlphS32cZxASY6r35nduvU+VQSMiooLZPZ5fXrUTvWDM553AA50uVTMRN00fJMUvT9kd35tOjOYH6SVlPCstx6sPmei05V7MJo6XVYjKdXnEYxJqLq8163Q9dqPcUEwX89m/9xT/64588W31y4waCW+ZUNKvpo0cP33773TfuHK2Wz1P0d2ZvgdizZ4/qg/nsYI89I1K/2YSyNkYMWBeTvm1jM9w8vvnX//l/atXSf/4Hf/didTk/mE7q0Cyv1m1zvljsHewf37pjAQ2s6dqcRVNU03lVeE+SUZK2fQJDMBn6JoQypS7nyIXvcxy6oSqL/b35dG9mmkVECQQ0zKpnTx8vLp5M9kpfVwrJlYFCyCJJDZzrNhtQqyZ1fbhfmpkpKR4cnGwWi7BsDfLBzVN0IDlWVYEmSTKHEGMfKu+8b+PQDb1KjnlYX19BKcP+3nR/31U5apydHLo6JTL0ZUpdPwyxaZZN56taEQqPmqWPfTmbxqwHN45XwzOP9NqNw+ZqPXEFEkxnkzffudm2/dOnZwIRIF+tFpYTMl88eqboWKFpNlUoq0CF85IHAyQAHzwCySCS8uxgNq3Ls+fX11dX7Ir9veLWnePFs4sGhhv35ifH08XZVY6pX3bT6byqZ+06Sepj1/VtUzpX1MEXYTq7e/bk/tA2pqIExEGQQLyhIrpqMgNVpjJbrOqDRg3UDk4PqSgZ/OmdkyzYNU0IdT3desH1ql1driYOb53cWKybmyf1n/+tb3z8D77vkHTVZEk3vvhGu1pcPb5kgHPNRe0nB9VHn358ftFMyvDem6+vnz1sFk2h8ctfupO77uzB07k/bFnu3j5cdcRV+YuHP3/7UTioE6r4qOvLAYyD9zdPDz/56U/1fOkrABj2bhxL7BbPL2+9dmNa+eePn6Xrzf6Nk+Vm8/TZ1Z23Xq9n7uTWcby+kNXlza+8viJ6+Onzpm2vLhd9Gq7bpqqmRzf3E7pN04b69N7rd548e/Ts459++a2337zx1rnrFtdP2rafzWsI7tnTZ48ePH3vS+86GYj14vzxjdMbYXb89Om5xDStC3K0WG2KqhTB54+f337zdDIrm/PL/dlsMi1wMpM2U+pzs06bNQpIikJMiNNpbVmcx9xL07WudL5gUwQGYgDkLqW+awL7DERZb+3Pj6ezs6tnRT9l0Wc/ef/gsN5/663NNd147c3r1aMeV1//9S9dX6/bX6zPn19+/Rv3lk+fSyuX50tK9vprtx+dn0mz+ejDD1B0XjtwofCYYzp7/vTuW6+9devk/o9/kNdLyHFaVOjd48W19DabVj3Q8/VQTasowZGpiRomJePQxAgxMyKRtptOgfem8wi+rvy6txgbQBMEREyCDhAAJYljLZzvUmbnU7IQKInFIfrglfFq009LVzkWUEnZjC1pzcR75WbIbd8/PgfJ2m0WaE6TXAd/cHzwjbp48Oj5VdMSs5mS83deu3V5vdqs+2pCYCi5r8uCoVw3nUPcP5yUhb9aNT9/+IR5/htff+eDj8+HKJuunx+UQx5s0+9PK2ccd8Gz3bHWbdd0+d23v1aUaAP1cfj4/Q9unt6ZHpTN8oOLpx/ceyuo4M3T482yQ3N5iNKsH3+SyuLGs7OzL3zpjeW6vTxb3Dg57Nr44aefbZbNwT7fevPGumm+8ptf/v4fvX/zJm2WzbOnF0ry5a+++cPv/axfDccnh3vHReqfNG3PLu750nqQng9P76wvr4pA5lgzzmdaO3f+eDGd719fNL/x7W9eXa76dUeMw2DHk8PLy6fB9fUkPH505l3q+3j79u3ZXnX1fBVXV2BZTCVL6fHs4oxdvFg/v//46YcfPUziAviT0/2nD59/44vHJzdmjx99XJQ1ALuCp7w6umGxffjsIU73a1hvyrlvr9df/eLXhs49eBy//a3f+/jHP3fL54vmydDC3/vP/nMBwCC/9dtvx2b13e9/CMn++b/2jx+/9rWz+x/+s//47159+95e0f3R33v6ha/e/dn3fiYLPprOPvjBL7yvf/Mrd/7kuz/U5aaPOUzp1unx4a2DP/nDny0vbXmgNgvd+fVXvvmWL0Nq2rgGBee4cOxOTwsAHNoUAmluNRqoOQyz8nB+MN+sh+vnVzdfP10tVpfPns/me88++1QjTPam1+ebop6+9ta0T93F04uD46MQeudAm1V7eTXzdr3p4aA+vHk4ibo3nz795Bmuh8zXeCSAul6t9w4mV0nWq84UXChVq8n+fkpYOOu6oVMoC4dgXdP06xWZLC6uUt9kSXUdTm6cbpb9m/duF+Xk8jqXAYeUvGpZUopS1pWIdyIxJ0fCJe8fVVeXm9u39lNSTep8iea9c+vFtWqviILcte1qubm+2uwf+X5YL8jOn13mVqwehiQ6iPT5zut7loZ22VTl3nLRDIt2Vc9HL9g07XQ6AQUxEzMgAgRSIHJlYBlSFlUwAjMlMxhiQsC6Lsu6IMTUD4jWbBpA2t+fp/FQZxf6fiASNQuMllHAJCuz895Fi86TIeSU+83GDEIRQvCqqqIAaAqhdErqHGc1A4gp12XwpVtebwiwLLz03XmzQQVf+JzFAJfrXrLU01LN+m7IKbPD2HUrVfaeCPMAIrImQuBQ+roqiKhdrrz3kjISzWezmJKaeu/aTddrq0VQMe9d7JMkQaZpPT3Yn7Zdp+MLtjYNgMYuqYmIhNKLKBg5dUOfJrNJu2iL4J2jocuaI/tADFVVdG0fCm6XAoZDTCKKTGQ6tAMj+sJLNkBwzgUaU/8kDkOKyZKyw0lV++ABMccoKjEmGRIydv1QTio1ceyqwhE7JFqv1g7Jlz6Uddt1MaV26NGs73M9qQ8OZsjeMzWbXiUzOQUA035IYJhy6vvIxMCQo/QiIgoiofIxCzlaLpfXi5VjylkcERElFc3Ze0cOYsopRQJrugEBnWfvnZlIzqrgmJgdA3jnyjpIzEyYJSPiMGTnGNREjYgMkFCr0qupalZTFyhlATA19Z4M1ADFwAdnYIYWc0SkLBmJTBXMmraTJIQIjIaKWRW3hOnhzTvPFldPLte5oEFhWC2sa7EhRzadB5Jh+TyWPK33JhefXiyfdNTlO18pl+ftwSG00K+ef/buF+/mZ2l2I5/uh5/80eN/8IePvYN68v3UQTkP5iRp+PH66gd/8HRSuNm8ePJ0rdmR4qzu337vhkF/8+50/7T65P1Hc58roE8//IUaK2JZFEB5MqPbb9cf/fj55dlygMyTi1bSzbv7l48eYzedH+1N9ydD3/VDNymq2X4Vu847OpzPZvXU/Cz2unfD6xzTKg1NuHGyv+Th8tkn7773zclBxdyVxbIsTdl99vHl6ml38Nr+7/yF390L87OLRwjywR+/P79RIPUPP30/NkBUtNnly6f5SlDhO//FH0b603p+Uh/ML5+fNavm9ffuzm4cnJ8t4sV6/7h67dZ+6dzlcFUSvzH39x8sY9d84a17w2rtsvv0g6cf/eRBXi0Xj55xCaurT//4P/nIaXHv5nRxfvX9H/1kyA467bNcXcehU2aWPCyve1fQdL8W55acvvfdH51O8K17p7/562/87X/3P/kX/pXfmyw+++7/6w/mXGmH1g912ITZLLd6/WyhbOTx4K3X5sIPP7taXWwArBtSu7nMqqnd3NgPt9+Yffb+LyDb1VXzxm+8/T/63/xff/id516nXT8szgZTq0Jorlty/R7L0A6asivYQNn04Cjce3uvnMI3f+vuL370i/lkMtNrOs9vvPv66V/72h/+/k/SgPG6T6XdOt3Ty82v/da9f+Jf/5dW50Vdutobu1W/+eSzP/79Jz/56ebqybe+/c3XfvNbrVXF0Tw2xoX6G7f/J/+nf/NHf/1/vMnD8vLCUupRanJZtKoLYB2G1ZOLZTFz+74yGK4XHVdwerO69Rpovnh+td4/rBPEnHPl/fNhG0m+XPa1o5Sy4aZrByBbBkLHXRr6ptdcMnHbrlRAWPu2pZgPTw4UyQhdCM0wiEJZeTCVaKCQYyZ0WRIQZhHIoGpDO3Dwohapi30PlpXc0MXArEnKgN2qlV7e/NrbCnD96OLOzcPSh6uuS8Zt0w1dzO3wxus3yPHJ6d6k5rhcG1gm8EU1ZXJMiCiSHLkAmvoWg09ZiFwc4qSemuXYD9W8FkXnAjFbimrIRM57NZCYqjq5wFVdpEHaTcPMQydF4Wd7ZbZUlJ4d903sFF575x4gERXnF5snHz9+5507Ryf79bx89vTq7r3bLtDTxw+vLwMVod6vJ/Pq6mJVeBz6gZA/u/9gb782pSIgiObN0Kw22axdrBzhzbs31svh8HDy9MnZ+nLR+zaKhsrNZvO+GTRnlULNOY/HNw4NYL1cPnv4yXT2BYJ867XTT9rnhsEzKFI521P0dR02m/VmdYFkR6dHfV9eX14O6w1Pq1CXWWS1XPu6Qr+68/rxsyeXfdcD+0TKPscuM+jQDXmIRV3M9mvJw7BqJJtFK5DJUdfFuirqackMzNhv2na1nk0nxens+ZMnT598fHDjCIhNtK5ns3mxWmwAbe/G3mrRLZ5d1UVhAoiuHWSxWN398nsxtkKLy+vV26/vLRdp/WBTON2sH89DSbHbn5V907lK+r5rVqtqUoDBbK+6o/uPP/7kcG8SvAPoHaKkhIBF6SezChzNyurp1eXzzVoNZnuTlIbLJ4u8WwuENAReny8D0Ft3bl4uLzfXz/fmftMMDkPsOySbzOph0966c0/ZcV0rQY5Js1ZVtbi8iG3XNW1k6qOUk7osq81miWaAmHMmx91qZVlmxXGVAdou+ZARYp+OTqYDDGDgctpcLzim5xfXmuTGzaPLi9xvNqenR+V0sln1wfuf/+xHd44O33n9WxlzUVdDFx3Y6d6+Bx3yxhHsHU6ZUx4GRpsEZ10DwK7wVVkzs0Hg2R6ztogwqaOkN946DdM97R589id/58989W80LQK4y9VqcXE5q0qGuE6pKIKq5Jyn87pZNZKhmk9y1jxkQESmsq6c5816lYee1Jxn9IxihQ8He3sHs/ryajOZ1W3XRgUyjzZ0i/XElwcn+5tNQ4hodnC0H5M2V4sbJ/Nb3gzYoSGTJhNCh1wU05u3bzWrxayumAjVzDAOUPnSMUIZ+s2GPKXVgGqEcOPGUddlF1C0b9rF9KCqKnzy5GpaBSNq+ub68rLwQYFmJU72pyLQrvuu69MQyYrD/ZP9g+Pgp5aGYdOVGO69edMxxX44f/jw/GwJSNPgF5s2ZykKT4gEFBw5gsOjsm+HgIWZVVUposzO+WIYuslsripZAbisq6QuOcASJZAe7+/palOHaTck4sJynnjvGbv1UgQm01moqti0AKlZXy4l7R+dsg+TKXtHSXJRVqJuvl83Tc9aKFpKuR3S3uFEFURyWfpQVntHxxKHw/l+23WYhZ2uV+vRC1atu3H3NoFbPut9UQTvP/7oKbmD67N1XHQZ4rJdS0FU+ZzyYtlky4+uFxfNUFeTejox7ah0hzf2676SPm1WvWV68vD5dFbc+8Kdy/7jogyPng7LiyualzHld9975/py2XaLw+O5LSWm6OKAosvnl8VsMtk7qovaMi6uu6KsqonXnESS99QsrljjmYt9itNJuV7HVqiJYFxWk+ri4to1zbRazQrcdDKdzbuuZ+TUDQTpj//gvyr3bhycHl48e4qmRzfnXdTFg4t+1Q/tskdfzudf//KXHUPTr09u37x4dhGqwlf+6tm1MJbz0k246YbNqp2UUy9FOZ9OqHx09jR37XxefPrJk5zEO08ImgQkmeoQQRWI0LKYZ0NKUUUMHQBgEjVIAOzZdV37tGvY6fpyITFrEkndpu2Pb937+re/UR32n3z44w9/+nNfTm7fOqqq4C2dr7qqwM3l6jd/64vf/e5H6+uGVZ89vXKOZoc3q6r89OfPTm4c3nnjRsqrTbPJFhEJs11dPcsIqzbxIG426Te9msUhAWKM6gunIn0XVU2SesIsEm089AA37aYfUlYvpuQgpTSIAYEZxCiEAKBDytmzjWeFmqWMgAjITdtrxMIh9qmHhGCOKaVsSRCdJfVG0qfrpkdmXxXNpsdkD+2iy8Nbb919+53XHzy66NbrTz55EmNsA/oy3Nybtm0PEh36tunrWXXn5OTyfJ2yapI0yGqZPrqfX3/v1pvvvnZ1toTLVR8jiJGjQVQHiFQCdPB5+bx8Xj4vn5fPy+fl8/J5+bx8Xj4vn5fPy+fl8/J5+bx8Xj4vn5fPy+fl8/J5+bx8Xj4vn5fPy+fl8/J5+bx8Xj4vv1q2p1v/7jt/1XVVKL0JEakCA+p4UkoGNcM4xLookAgBJJsIoFNAG2J2TIZEyKrKRFmtCCRZRdHIiHSIEcjUBAFzikwoCM47zYbA7DHF5ILXlAHR+8LEYlJgQ1TJAiDOORBgIkKKSVxR5JyZQNSCYxVVRXJM7HK0IrBoDo5QVA3YMQGJQskkAI4JjVJSIkEiVWNkBWIi5xhUFMwx5ZR9QEMpvPfOeUxXV9fLaFVd5TZO6sLMYk7BOwCsgleRqir7PpODNAybzaCExNx32bMj58qqAlEGREfkOGcj1D4NVVmYgBmlrKH0aYiEVNZFTslETSRmISZRQyQRQeKUkneURAHR1BBNwdC5lLIjyKrBOVNAJCAAhJyyY4zZqiqkmIgYHWkWM2MyUfGhyEMixG6IoXAIaALAUHiXkrIL49FxkiV4REIAFlHPaCqARoApKTkiQhH1aADg2AV2YEAMjhGMHBiTeWYmZIJQuMm8nswnh7cPqsM98s4G6RZ9d7HQTQN9hmygxkAmgGYISogGYGACBmAiAqaq0RfEBfMkhKNpcWNenM784YyrEh0TEwAgoRkQIRgYIgIiAgKAqoEhym//xb8KAP/bv/VvmEd0AKoiyI4A1XknEczEMAOb9wSIaGSixGiimpQ9g4ARMREIIDkmR2iEkmOW8dwyQ+eDmiESAJkakIFmNRtiMtMhDQZKjBxYkgAoMapIFkB2BJBzBjCRnLOgZ0/gyBGzas6aiTGEaVnMLGaTnLMCm5p4H7wLYGAiRJZiFLVs6gM75xGpCsGzy0kVJOUEiECsqpKygiGZZDUxUwDHBqaWgcxUxXLOwsxMlDN4dozExCYgYkgWHElWA8kpGYCB63OMOaJjIBn6jAil82isQsAEWS2LSgZTMDQCQzWDonCqJgbI5F1JAAyCloesRo4IYxx8YAUdWg2+0KyK6tkIdUgDAItCjsockE1yRCRkcsGbiCmkJKIpDS05QkJVABjPJmfHHHzwjlOSYehMs4H5wKYI6J0LVVk6wji0MUfnWcVEwZSLsiC0GHtijENKIuzIxIiZGXPOMfWKWIRgGbJkRkAGM3PkctKc45CTINZFYGJCQoMkKiCaJYuAAjIXvvQ+ZEADQTOVDKTMDolVDQDBtGvX1aSEjOxQs+asRcEIkPpM46G6poSI2f8f/lf/MQD8X/7v/3ZWAlQ0s9F3VAERwQDQdmuGgZkCESAiAJoBEiCgqY3uZmZgqjBWA5AR1AC3b2UDGH/E3UKEYDbe2MzGWcYQERTAbHRYBADU8f5qYAQEKoo41gkRFAwNxm8AjR8gjFVAQANDADMYPzUDBAMAA0RCGNsLoGYAOB4dpaZj07fXERKimSGiigICIJoZEZraeNVYB0QwQyIgAFFDQjAYX3JnKghIzGNNzczMiMkMCNFUFMa/IjECIBKhgYoBggLQWHXY1tJURYSYDcAxA6CZja1AGpu1vQeYqer2eQCEiLu2IDIAARiiqZqK4jhzjv0KZjp+abzKwEDViGAc4/FP2742MwNEIiYEAhwnW1Pd9uvulgBopmpb4xmrJ2YmqoRkgEiAAESE9oq5IJmZgeH2plvbIaZxNUQwEVM1RHKOzPSVb6OZISGImgE6kpxVbTydjYgI9L/3r/+bAPBX3z4qkvSivaoCqCgRAqKNVmymIoxQBT/EnGx7JjwBoBkRMtM2h5hwaxTjoX1jBwGMbz+x7RfNIbgXnmUgAFnAcOxNYASPOJ6jl80EQAFwtF4DA0Aita0B0ug3AAYAiGI2XkTjMBgobI1ZxxE1MwAFVAMF2znk6OPj62rQto4JhmDjOL+44pX/bbRu2F7/6mdjk0cH2XkkGIwL+jidgAESISD74EOoyklVlFU1qcuyKsrCOe+C88zE/IoNvdzIvvzFXv64q+orz/yHPnvlj6MBwy/9fTs1ba0bf/UB//+LvfJI+OWnvnKR/cpXXvnzP/TR9o72Kxe/0opt95qamopITnHoh5TS0A/90HdN27RtzmnMQUbT0R8QX/FcIAJj5LOmA4A/+S8+YJ2OLzIzUB03UTDOjjs7BhgPv0OErWXgODuhqRnC1sW31rb7MmznSLCtj4xjP9rOq0a4m0/GMR/n+q0J4ssOftEpv2QWL2YtRKRx3zeuOPhyjGC3IvxSf5upmu6mNkJE2jURYJxDcPcwhBdebePcO/YEIeLLLoLRcwxsWxMamwS7/176FW6dcrtaIb7yd4AX3virDgCvzNe/3Clbq949CXbTw0vD2vo4bEfAwF6dCl6ZUV5MSkBjQ/Hlk1/2oiG9rPO2Arhb3+Hlsv9Lfb4bjdE8XvW3Fw35h8cbzLa2iDvr+9Uv/ooLbufDl5X55akDDUwFEMXl02+9CwD/x3/rv+EByQVHZDkxjw+FceljBNSxewwRxdSIk0C/biQNjig4DuyDd569I2JGIiJEBCAEJCIkQCCk0W1kXIBNzUwV1EBExjNyCMdFG7ceBaCGamYwzv+oZmLjZG5RNKoNSTZDXDdd1/dNN0SRYXwPsOS+H4aUAUyTpCwZLKuOuy9AVMCcspqVwaOBmGWDfkhtN4xuRYSqu43B1i128+QIc4lC4SdlWXhS1WzqHJlazuo9SdYkwt6BQU45BBZRQAqlzzEB6GxWWRIzQ3JMYADzSQ0ifUrIVHjftV2SXFcVkstZh5RijEUgJiqKEWqx9zxuYL1jVCPnjw/3p2XJQJP5bH8+ccSSxTGoiqikvskpdkM3dG3XtllzytlMR49UNTNT0XFza1tDRiLi7fxHRJ45FCEw07QuTmb18XQyKTxqNs2ggojBsXMuOOccO2YmImKE7Ry1/ZcQkbaFiYiQHBKOGzNAMiRAMjAVzaIA414UsmgWEZGcJYlktWxgwEakQNkgqg45i0LKuct5vWmeXTzftGv2VAQXY9q0bdO2SW3sy//g77wPL84qosRFBegsac/s89DXk9IUsgyqmkRDyWrJsnkmJAxFELWu651D8iiSFNQFkhQ9AxErKQMpQE6D5WgMMWdmIgcACiLjLtdMg6+ZLMZBTcuyRMtJkpkF55NkkcF5RlLLWcSAHZghBe9QNCMoIpgmxx4JQI1QkwAihHKS+4GREVRFHaKqMKHnou8jEBpaioN3bEyhYM0CZDknJDAicGZMzrt+SGCyf7QXLee2MwNX+CiZCF3JIgKq2dQ7SHkQAyDKCH5WNO2AIhTQGIENnYJl74ps6ooiST+kyKxmOcXkQhEKNouOMzmHmJyzQZOSASugichomJITsCmxihARkhFzHhKqAWhSJRzxDBCPJ79kx2TAqtGAEAVpXKKz6kg9KCQDs6TGzgyFgAyBEQCEUAEyEZsKkxBxSsn5QAxAJiJEmFXQkZmmZISgTJ7ZESEoEZMjQgADZs9oaBAcE0Lpi4JD5UNRBWIFjUMbcxtT27ssGjMZoqFJBlUiNhBiFhFAZCIxHVdh5wswA2D2FYB3oWL2xJ6JDZCR1IyAFG1cuAlwTOhGACNA4xcLhbqkYkVRIXmJLQenlgFcKAtTiUPCNDgXciZkZ6CWc/BOAZxjUQHQ4LyYAgATmAzIYJZGEKKKloHZEbJmizH5QDkPgOAdDikBJEI1ySAVICBYHjpfhJwMQckRoYomYgBVRnTMKpkdIilojl1EdUUxZxdccM1yBQCFcwSc+m4ymUYTAGNHMgzBsUkehjSf73sKDIyozpfmYsy5HQbyqKTD0I8AZSRwU+rJs3OqaEmiqRSlH1cx55yZMDOYIhEDAYEYCBiYhrI0ombd1qX3iqt1QyWbWCj8CHUHkbKY9zFpTFXwJAYMfd+UU59TT6wGJL2RFl3W6WTKiLlrZeiBhAMEzNK3yMTqy2LWyib4wNT37aaqy67rg58QBiAuJ0W3bDVH76mofE6Q2qiaQsWInOJghqpYVAERYz/UsyI4BqN+1YZAKhD73ldTIzLDoq6dC7nvABTy4MvKmIirFIEQnCcRMMhmsQgMYEDOkHwRUr8oA0cTQFFA8l5zwiT1ZGLqLCelTKBVWTgix54BclJFC6Vv1pFQlaEsi9I5AwJERcwxhtIBWk7mXHCeh7YHtBDctCj7PHjnBDDniMyoFjwTo+TsCsfObZbb14SLKoARbpkCQsqmgIpEJkpII8OOiGYCQKpKRAAARqrKzESsIsAE4CwlJBBVMlJTBBRVdoyAKrLd+hCaAaCpKCFlSeQQAEXE+e2LpYhQVQGQmBFBRBCRnRcSNMs5ESOTSymPGy/JGR0hkAE4dqIy7kKBEGEkD9BUiAkAVRSJAEBVnGNUYGY1IADNQow7zEgiYkAGwETjvpWYU0ojQCPCMWSCALx76MjqEDoFMwFE8I7BRERc8Cq6pbVwnNNUVYHAOTf2hqlKyux4jGakmDQLEbrgc0ojkySmlhUJDQ1AmR0iWsqgwI5ERQ2ASGysL404BBBzyojAxDkLOyByAIao7Elt5B7U1Jidcw5MRRWJchyQENGIWNVUxTGP+JQZVQHRvGfJMq4ipgaOxhFw42Ea49nqZjByZAoGAESmAoCeWQFMbWz+SMKNm7GR1mFmGUklUQQgdmIiWZnZtodumffewABR1ZiIkLNkZpezgAERmQKhyyagQmgcginsOB8gNSiK2HRArKZGbIySBYl0xOSEaKZGCmhIAgYACoigDsmAdMe2juuOouFI8QCoKiIZmBGKGgAQooLBGGwzU0BBNUQZQeg2SIIGJmZKOD6ORuxGaAB5rBWOzNl2dwVAAqqAhFvKwwDHFsrIp4IhopopkKKpIWzpA4Tx9aX4gs3ZQTAawdYWso8ozBB2sJ9GmKY7dLZ77Ig1cYfugBBFYYemXzwAEI1GrIiABCNBN0KREdSNbRmpWHxBMr3CBe2w4xYmvsS5CABkth3iF3h1JA+3TvECWr7Amq8SEq9QDLse3nEHOw5pR7/ClpkbO+2VL247w2yHhrcfjHzorqsAXlzyKq5+BWS/IBRewH/dQTXc0ZKjSYxQR01MZUcdb4mOEa/u7rcl6GXXRR4CoY2RLQIGMAbakpOAunMWAGSg0erGMR4pEx1DZGqA45KxbSWP0YKtAY1tHC0Ct1PtS75jvHBLo9DWvn95eGDXwy9B/zgoI3gVBMDR4gARyMDwJQ0yci+GhrZjMUYCGxRGS9xVjl4dCbQX5v9i5Mei4+MRkGzXmvFBto1d0Da6Qjtm7JfH9SVhOXrj1nrHvesLK33BkLxkQra+9Kuk14uq4ctuG3vqFc978YNuLWpLUY1dsuWsDNBoZ+xmY/XxV+69/d1+uR5oLyIxhvaCnNz6w7YBsLNeeOG1v8rb7nwRdyEh2IY44JfGH2A3Q/9Kf+w+w9G5bOdz2ypunZYR0eHOup999okppIyoCJaIyIAQiNAECAxAYSTeTUTMxoWtDn5alNO69ETGYICGhoxIW3aQiLe7AzRCQAIAAjUmstFMzYxI1ABQQBgNAYhoG5oyFpWRjtStqyhvp18DQ0+ODIgdhaKo6m4YJkPfdkMW6WNsui5LTGp565AAW/ZjDFiOi/K4CbBxTmYgCo5SEhET0NHDcBvreUEYIRIYiKqCkaKAGKKiBe8doakyEjtCywDKnszGkJFJzqEIoJazFIFBNMbomJEs6zhOFlMWtaJg2YYdMKs507IuUisk5ByrmIgCIDKwcyYCBkhOTRCpz7nQlAVd2yQP5INjYIM0DIBQOJCUUKJzGgqwtLUqMTNFIDQzIDRAle2ih0jIbheQI3Y+FGURQvBuWpXTuioCEyEYGrIxEhEwATtgRnbIDIhAtI2MARoiIQEhIiMhMTE7REBi2BHyBkRAgOOmGnjr+qhqOC5y9pL9dYTZwBFlo6wJzQgJ2LKMoTxiYucdgjIgEyiIoSFTBqXcj3fZUkWKfjCz1LuCBZUM267LKZLnbFlUgQmQQHKSjM6JRhFzhUfKQ+oM3biDVzJ22McBybngQAXNQsWDROecSCZGVUUHSTIAh6Js44AmCgpIZqSqxuSCE5MkGZjNEInRiYioifMcY59yRrJQ+KQJPZKjOERkR55THBxT2240KXkfCp9SLCY1iIJpTAN5D8xZesIxwp1FSU0ZmRwgm6IAYkqJOABzQhWm/eN586Qd4uB8yehjHJgoDpnIwmQ69CvHXFV7q/U6JUXH43pN3imYC9CntgyFebQIXdtvCUkm8IyGQJh1xABgBppy1wzIzIzGZIjIRoDEEIeIiBnVApmBZDFNyJQksfeSBRlG/YJjTpTJBQAVyT64mFIITMTDkEyyL2h0HkP0BQ8xqYKpsncjrR5TZPYuuJTFzJwjAyWmcWOfcgY0dKxRkcE5TkN23oMae3SIrOI8IRtKIg4M2TORIYF4JkfgSnL7BUxYSDSZphzXHSaVPqMBORTJhAZq2YQZE5gRsGeRLDmhc6bKjhDJVz4T+NpD5aAI4AgYcVzjxiXbEIDGLd42Ho0ARqODjV7Q54xIICJ5FM70EjOTmxTzvl0aYj2ZpdirYQhFjKCiig4Zs4AZgEBSVcBQVaPyIydJSZGxcCGrSE4sipAliyIMfU5x40IAxjY1gFZWpYkKKLBPCuAcsGdPZjTkFHNWzTzOSuAMfYRoZpKjam+QiSAlFLPOBgnIDhVRERDdIGkQyIqIli2PqjNXhC7nPg7sSzPRYQiBhpwGFcgoKbVdJCL2HsuQNaL3VIQ0tEYkygJAxilnRPCOyrJQMc8ejAjImJq29cEV1Wy16Avnp6VnjI4pV5YJqPRgIJKDr4koZXBF3We1EPan08X1M1e5IQ8GZjkh+mo6FUMzp6jqKgnGnnOOibLzoV2siqIo6vlq0xZllW1omg7Mct9JzpVXVxSung5Di4UPlVeQLmpgopJ9yeTJczKnzI6ZNRuz07wlMgQp7NWaupiyn1TZBAnq2UzAKSE6AnDl3kGSqALTqjAUAWw0dSkyg68LzVmGVO1NgUPOVs724rBMgyZOgq4op9YLK+eECo7rWsTisApWRWFwnjyn3CUDEdVxmQBwoSrKade2oppBkyTGAsGyCAiwpx5TICsqboc2VFWU1PeZQ9XHZJb2q1pVyQUV0Zxm890bknGr8sg5E6CIqupOskMj7zvuYV1RqGbJ2zDgNkJNlFXNkIEUDJmIEQgNgMmbyXYXDcDOGYCIjXzuuHdFQu+9IYqOOwwkZjBAYhjVIWpiioTOjW9nQDAVM0QiZOcBkVKKyMTOpZSJWG3UYBHIeC44SMoqunN9A0c6hpwNAZAdixgAGKGOIM4Mt/iOiFnNDGmM56moGcq4oSMaQ35mqKoixoxqyrxVq4XCmSoTDH0CINtSLYqIBJBFvXdGiohADGI5CwKqGDAQQVZBGuk7NiN2XnISVXI8LuNJJIQQkxAzMW9394gqYgJgyI7JUYoCgKqmSM4RABKPh6ia5KwAxKSiqtvoHyICuzT0YzQc2KlmVVMQQvTBE6FmIBrxuwJAyoI4imtARMeeBwAk1pzVwPHIl6ljUFPJBow6kjimIx8EiICoQIiGiERslolIzbJsIQ0RbanG7U4bclbnWEbaB00VEUC3EBWRUESIGMbBZSaCLBFEAMh2+ECyisbgKanFqADISEZopjiGHxAIjBkx48h82QhGYYzRK45QRAEZdtzAlmIggDEkO+62aQfTxw0/wrhWgZrhdrXaLVPjtlxhjOVsubYt9H/RB1uiZouCd2F0wh0Y3vFhWwUEAm1/A3zBeYC9oGhw9ynAjrHZwQLYMR27y7ZheR3BrsEI0M12LMb2DtuZhkaQtSMXYCf9UlVD2Gk6diIbHI//BAPEF1968RG84LJ2mNO20oYdrfMKzHxFEfMKAN1SVS/R+K69L+8AO+YPXgzIC4XLVkuIr+woXvx911MA+OqH/xA59Op1O5Zg1/u4675XeAnAHWP1ogNGAAcw6hABVLc6QRUdVXgvK70lNV7SUYZGLx8LBE6BAQBpq0EbR3bUZ7/SFzvuZYfyDUwBdFR/Ihqojpo7NBqFf7o1Mhhj6ePbsF4h+HTHPBjugP0r3MuWJ3zRD2PXGAC96MWtb2w1SWCjCMZAty76CqmwHQYbLQsMYKz2eEtAQCJ7cefRRgAAtpzmruu21d/WZSvL2T1mR0m9/BO+HOjxhjux1St8B+w8Vw1G6ll3ho9bsmccgxeE2Qt1565/dgaOoyFvF+tXLAi2NOmO1P2lKmwHdEdbIpqBgSJu+Uh4wYbtaDPbGartyKSxFTvb3BrWCx3Q1rJf1Sq+YGx3rOqrXf9ystjNO7q70rb/b2+lv/TVl464eyTuhLKv0GkjM2akAOMuCwCePr0qXBHTlnuUrEIekMxUtqzZlhg1FURzbCE4Zq4JgXmMA4GhKigQI8Ou33Zjj7txACbKqggIyESURZkJSABQVBGBkcFMR0YGwXCbX4Kj2doLG0RCJsZxPXDehcKVvauDF9VuiNO6ZEOTdRTFwDm2BIa6DWM4R0A2iKoKoDnvVdQHL71GQhNTU8cOEFR1FLYgjRXbhmFElRDiIB32Jg4Qq7JgYgLrh6EsgjruI1R1JQobEUMg5sBsgME7BK2rMvaZiH3wq3WDTH3MAJiyQJ9d8FE0iRYFIqIjZuKi8GbGgUPwoEDOEZKSMUIRQtd2qrlrWhJ1xKFwg8iQ2xB8joOKIKHmoeu6YegQNamI6XZbmxEQHXNOo0IFkUBVmUdtBoyyn+BdWddVNSmdD8zTwk8KH7wjMDBWNUJiZiRkZmYeOWgAQ+St4Y4q7HGzxEyEyDRKzgHZzABJzZhplFCroirqGC5BMrQxnKmmWUGNABGQRqoJkEVUwJIJICtKzgmBQRmAQuEQgIgRMWcZ+addIOGFqohVYgbHsc+ORNXMKJswgiucYlRNouCdd0yqFlzRSQ+IIdRqlBVVIGUBMVL1HFIUMFGJzCaaQcWcQy6G2DsGzYm4ENWoHbPF3FdFmYcsw6AGOUsoUCyZZTQwpThEBhm1W6IZ0JtpztlQyRDVzMlosABoOUs2LgpTS1EVlL1PuffITMQUuiHzSM0655HikDQqmIrKuAYyevYh5y72HVFpnp9fXRweFq+/efTwwUVuk6KTCIgCGdVJm7qyKmTIKfcIGDgAojnzLnRZJEXKYAh97M2IwTkyCMUwZANNScnQRFPsiqJKMUrsXOFdcEmMjFIcmAnJTDMgAakq5GzoHLAb1zVi54iAHYKaZQCQnNDGsDmHohqGpCIhuJQGRnHss5nmBGPyxLhZH8O+GQRFcmZi53xOwiimqiKApqaAfqfKIU29IhAyqIBg8M77oElY1aExmANgNGRm5yCbM3EGHohVQVIx8ViRoVkWaXNaNrJqISaLCZEsq2kG5w0SAagKkU+SiBBMfaCMYHmb6mJj/ksgDmw0EvZko6AIEWAkjtB2WmiEXVrJbhEDgKSxrqbO8TBsAkFRuGV3jlmGNhG4IUfTwBjA0NTYNGcVG4pQxpxMoSorzWqAajnnPsY+DgkJEVwfe5CcJAUXkCBpzKbeM3kziGbsPasNCJHGDBARiR0xiiF6D5ktJ00DoDJ77wKYaR5UU87OZPClCKvGjdrEXDnkxAHV0bq9ql3twVJKktF4xqEKhUnuTBXZ5dgTSNaUDBAgouU8AAqapdg7JkSIXcNoTI4MdBhMMqNz3icDhwrMko1Mycxi9sERgCZBhz4aiqqmgsLx5PCN01uLq2efPnsQMDAzoMSuU0OHgRkpQ+qTixrjci2JQBxEdhLK0sz6aGY5cOjaTRxkAKZQl0UhKadhLaSIOuQmylCUt2PqVZuYhrIsAHk6CyV7sWAIoKKizoUcEyApkBkAOU3IPEPIRCV7r6mH7KriwEGV+w045aIkAy4p1DgM3bjbVhEApzIQqiIrVFW1h5pM+z726IPzE0chhCrbppigqif2mlpQclCUnrvYZG2ZwRs4dJ58EkATyLpXH6FSVIxZoyYkcq7IuSvLCoAAShXuh0E0mgIxeO/UmJh8AUySh2j9OlT1dDJpmmE+P1lePweNtXMxWpOHMa2mns5z7mPfyS5EbKYqkFUJx+SpTMRIPOY1jGjIVJF4VKt4xwiqIwgBkiyOXTYxA0mCbJJ1zCEiREPKmmXcCjKrCgGZKo4kFJGN2jdiRJCcx0eMK54bs+rQckwjjiXmGFPwzsGYzSWqwkxMnFVSFmJWEUMzQAFFAxNFQs8MZmoWowbHOGqCkHDMlE5DKGsTAwDH3lQQwDGZGTtW2yrCAZGQzMA5RqAs2UQVlACJSU0QxBTBTATIMgKxgyRDTOocS1ZNyXRMGmI0LFxQU1A0As15lHWF4HLWIWdgElXvizFk8yL+FAJLyqNcS7MYuRdoUyWrgYEG71RFDPMgKsQuAAKqEBKIApJmAVQzQCA1dc4Ro0BkIEPNsSXNzntT0JyZCNAli2BK5HJO5Lwj3vXSAACO3JCigtiLNEQmMxMQAkBQkS1bJ1nMbOxJJqJtqgcQEREKIBCpiorgNl6rjKQICkiEpipZxt6QKCIqqo7daMjM7Jw3A0P0PsShVUmIKDmpGYcSiTw7G0QkoxEXvPUCBlYUBCYkRDFIYqOS1zuGLNu92UhDITGOW3wY48tIqKrsGLbSLLBx+7cFTiCqBIYGiMg4Tvo2hmRHJDtKqEattAIojBhXmXAL5QwItmmEZsaAACMy361n+KpyZUcivCA7dol7ts0sGlMkR+D4Eklv8zR3EHSs0gtqyNR2wMzAdsmMo/xhqzIyHS18TCLcAeIdJh0DqVuSaISrY+IF7JIldZv/tq0oIiG/oDa2CGmHsO0Fq4M78LrDlPaSwdkyLroVl9h2h7BTP724cod6X2QMbv95Rc+wZX4QXpJUI0Tf9dgO/eMrjNZuUH6JNdqC1h1oHHt7VN/RjvHadhkYbPU1O+UFvCT6fhVab78C4ztVzbZZarh9+khT7qwYX2jNdt8eFWvwSjVVcSuOAdxWEnY1f/F8s1GOiChmY37nOAZMtOXuAGCbR2yjFeEuoWps1Pa5uGvFjq3bSd5wS4e8UI5sBxF3bdzSUYhoYmN+B5ghsqq+kkj94u6vUCUKY+7wVinwchR3xIbpltfRF0O4I/JGtdELD3phiVs+avdAxBe00a+SIVsq84Ws5hXr23XIlv0ywxf33FV+vGpnJrjz0G20w+ylimxX9ZF82hGoW7XSiPq3XI6Z4DbxepRGKhib6YglbcwefvWmuyHZucmWgHlJKtmvmD+8mKPwBXO6o7leDtVLcnBnIyM59DITCF9YwtYZd816SajhK+OIuzEwo108CEG2z6HtWlBXNWSoChq1JOZV2ZmZmSpt06NhDCoaoFlg8MFPShc8OwY0NRH2npDAQE0ZcXs0BhiRMxUENZExOw0ZAEg1I5hnMkQgj4AkOiIXwlEGhMrb/k+iWysxIzBENhvlfOSZDQyZGD0hFy7EFAsfMgBmZA6DWRwGBur7HiRtG686LklZt4njzM77Qg1jO6AwjxVTMx1VNoSA4wQDAKDKBMyEOqr1jBFUhAyabkgy7urVIRXslaxDJGY/KZzDTdPtktwRSIcEFNTATExERTMCSE4hhCxqCilnMcmgTdM59mhWBybmlKMD55g3qQO0GsoxBSQmcU6pdNfrhtiVnpvNxjF47yWZZun7DEA55zE3HcZMFGQwQCDn/DivKJl7EQZV9b5QI2JXVbNZXZeMU8/TsiwcMSqqGgDzmEDGzjGOWWXj+kEMiEQ0ChjGnQ8SETMREI0nEKCZoSmMTOKYpQYAmsHGs1lQVUQEQFVFNG35VzPHDEgCZpoBTFVMZQzIhcINOYbKd0pM5AliHpP+CJFMhfz2nbBbqijH6J0nprF7uq73oQC0oRuQicb0/IxJ0ftSU0p5MFPLMCh4DkhgAJKTGSKwKRJB6nslGSc0VRtiP8aJTUkVHFkIPqesgOw8czCnZugICcGTS0NfuuCcb7uGOFhWBHS+7LrWFx4gx5TG/Yj3PsUBwEJRmJk5ZSZDI4+BQ1IkZQQz0wSqZIRgUVQisGREQsfOE2rOnaNgIpJUMSNKPalS1MKxqgxd9I72ZsUmJ8lWVh7QBMWh1wHFNA9x0Ehci6lGQ1RAqMqq4NIFltyIJjYWEUIgy8wgGQEop4wgRmzARN65kCWJKZqomiFuZcjEfd+PXDIRZgNSFIUQwmjSiKIpecd5SEw8psOaaOw6AjLNkAnFVDJ4RDBEMjMVAYVsIimFsgQEUWEmMMg5oVlOQzL1btzQIpjmmMyUCYnZFFWtKkq1jApmyQMGcA40eE8AmIUBUVMgR9lQwbEjIl8VXJLzaCKU1VYdrAZqBwJAIkNSiWBoeTzeB0XA1BwGE1LN5ABJ0QSARihLdaDSK4FzPK6OYzAfDGwXYNz513ZjuQtk7pYw5CRZFI0tadahYw9oETQouqKYxpQ9IqKloQPNTOScB1PvvRkak5kSQtuuUoyAwoEYUT3mnInM+2AIAMqBCB0iG8J44hIggXEcYqCSuTBgz6qSQZUDNW0LKkQEzORKVANC1SjjMSaKQxyKEGTIRcHOOzFCEBL1UKlxtB4tEzkjBGBGl5IpmDhTQ8WkmvMYUU+iuQeNaojGKgYqnivpLZTkkRSsnu213XqctXLOSD44H3woOAC7CZbzemZiAsbJQuW6RmehvlMd3qhmHV85DsHBZoiEyMyWIMvA5FABk5AAOhKR4IqUc1nOANHEHBEBmmTTZJZSHoJpNww+MHBIOqgnVQRpaujQQRw6z6DAxJXzwRn07SZ3apbLqhbNDiEUpdlgaIV3Qy+gVoUqJRo0EntfHmtMznGWqNJXZdWYqKkKOCqCm7JQIPZs0Vs/dAil8xPnS7Rkw+AYCNmHCsHlIZuYoakNMQ2QEQAm5dxSZwTeXBqPhNNMLqCnvu9MhcmKokSx3nJKAxE6MzABBO+9ihkCEiGjc+icM2BDByDeBTBlUeSgWbu2ndXTvlkh0HRaMYlHnNdzyQqEfe77tkcB77ZLAhEBokgGAGBUMzDdqjqQR3zinR/P38FRPKxqZs55FR3jXc6hqPD2/LJMiKKaJROTd97MRCWN0x8bk0uSCBkACVFFBVVHwM8kuoXHSfKYIjR6sKgAQlkWeRgQwVRhzA5C885bBhtlzeMaDACjs4iAgiMH49Fv7BHBTEahh6mpGrNnJGAb4vBil47IZmJqTKSooEZMwQcwUJCcoyPcHqMzHjUwoniiXWI/ocE4sgiInn1wOQsTM7Op5ZyJSDQxMzKDyJi4KqpI6D3TGMlVYRqnNjDDLBmQVY0Agg9MKibsAjk2UwYmBDREA8kqIs6FFPMYdGJCELNdqAsBck5M6JzLo2IUsY+dqSGTiSKaGxVeYFkF1NQUHXryMSVmb6aS03hYTRZzzEycRYiQnBsPDgEzxzsACebIMbtRMpAliygAeOedDylHEMDtOUG0O/nECCDnhEjOuS3HpMKEgBg1AZjz3gAce6CtRh6QVCRDGhUW5EatG6nq6JfEzMyj7GL0AhFFZtHtHhy32AQNIIsCjposzFnH7LkXkGiUC4HZqH4a6aSRq0LCkSbF7Uc0nuSiYI5IRBBJgATFFMbjZPBFJtOodFAjoi2cG3PH1Lah5B1UxvFN54g66qnGpW2L0LfW/CotAQDbLK9R2mfj3gBxPHBkpFFtZEIU8EVaxriJfuVAEwTacSNju8x0q4rCHZMDO6iLuJWmjCH5HVAHg7H+Nqbb6cuVetfqETqOuPclCfOiB0ZdzC62jrDje7aX7nD3iwHddcbLyu1u9rJp+GLbYFu1yihK2FJCZluCDm2bnoe/hH63P7zaU1t+bMumvZhi7MWXtiMx4v+XEHuLcmGr17AtFn7lu7vWjMdvqaptFUUqKi9OAhpFIS9FMTbi7m0uGNvLmm6FIq9QXVujetH5L4idXQ9vT8kDzKYCNp6W9eJoni1HBbA7QuAFWbXt63Ggt9Wyl2fD4cux2OY3vYT8W17vpUW/9MkdJ7Gze6NdkHDkc18QQduvjMIIBFPbnhcymtWOlXwpaHu15lvWxrZE5SuaJYTRdW03ULijY/ElT7nzqS2f+jKty140eEcgwivapZ0/vMqwwC//+kqf4O4bLw1wy3WNXCEAAOGY5WYvpJoGiiPJbwiAimDGajQKzZBG1RfhSwPZmfPOImBHkuH20Tv62mDHpb1Sz1fpoxfV3jn5zla3FogIL9uDLxr40t1ecHuv2DK8kB29sNmXfYsAMi6Z8MKAJnVFW1EP4nigHo0ruyoiO2cjmFIEA0fIZuhdFTwzghox4jYPevTl7VFFtHODMfUId9P8/4+tf2u2ZUmvw7DxXTKr5lyXvfe59AUNoAmAAAGJQYhBK8LBoG35xRFy+MHv+iH6E/4TDkl+NZ8UDgUdDtuyZdOkZdCUBRAgyAa60d2nz2Xvvdaasyrzu/ghs+bazdDpPhH7rD1XzaqszKoc4xtjfDwmLA+VXTIRjzcvDl9E+CS1M4GgScyPonkQCOlC5KOel7aUpROClYgNmakqaplffPaGlD/ujR7v787Lxw8f8PGlWU8LGqmIQpzk5qkuhaM3hJ9O63oiFUmiZta7RWR3Nwu2SYgIYlmVmeDB4KKkIm6W7NfWejhfLudaAt6s9W7MbJbruZDAg3r0z989Omj3OJUlLEVYtHIpbTNiKip77+Ml4oHuXTh3T8sojK11VglCC4PNZNhmTozuDiDSI7FK8e4vdrlbFrNeSJalhBIxu+9pGrH2jIjgRGYQprIyPT3RIxkp6cLJSkUQoPV0fry7f3s+VYl7kcJcRBBBRIEQIiKMrF5hgMYbNul4bTKNgAce95+HrnpGNeYxI3lsP0DIdEIK80iNyPRwP/bjQ74JZRYekYXkPlWlwpwxn8zCxIyxoRwvDrNR4cuiejMZT6pIpBBk7Niaua73Sbq3J2ZiIpUS4axrc+xZmKT7Tsozmdd79BRhZKYwsUZk5JhqtdlllO80/NqvIuEZupwipXsEkkkoYt8bcyEwi2R66zuDrPWMGLY2IkbGdr0UWtOCJKtqKW/27ZoB4UW0jJBg1RVh3kNZQJzOROu6StueVSqSRWTbXoAKSBJHWNi2rqdSHjPpXNbL05Mqm6c5R5Bn1OUkFY32+vYs12/2j3ut58v1GsR1OXU3b+4mXCszUzZZuJzI9ueQM8rpemlFqooSF4/I1hYEAYyStAZetBR0a92ZdO8OovQAhtJg9e5pnhSghZlIpLW9qvbtqizRk8ajixm8iBRvm2ohLiIM0tYudVmEFKBE51JJi/UWDpaCpEggC4MZJQiZkbJYTvcEElLI+kYgZh2uTS6FOGBgGUp+AVAISigqtahGr0gloUwlJXidT0CwJwtpPUmtVIob2/Pe3295aWXuEJipOAxUPRqII8FcR34KQIPRY04IZYaeVl4q18q1cFVdyhEEDL4ppGm+o287GyaK8LHpGqugaI3uslJKiXRkKXyWUsO994vqui4F3SJbqeeRyaNSEBARLmV/3jLCyNINhEgUkaenl/PjKdoOIpXKRNbasizdkyS9ecTuYSyFiBGUGZTOxMqFhLf2kshS0LadkMJLgiI3OIlIQeEMR3j3LCS1EhGiFw2zCyyq1BB4RwQtsrBWZGzPm5lBuZsjw9x1qZHWWycKoSSWaF2URTWDtZ4ENCFlllUfXfa2t0WqhVBQQT3TabnEZ+e3v/HZ9z57fGfX9t3Tx4fz0ijspL/x2z9akqLZh++ezHJZTyh1u7yUehbxCDdrYf10PhHyZf9Y69ta79fl3iJZxDnJm4iTv9yf1TzxtFcyYVjYtn0oi0bfSnlYljuzzb1770XXcnf2FO/tm6f3SOJSWSuRpjdKc/8gSt52EVEptveTrvWkHy8fIpvXHmTNrqJQUPSXRdlShKGshRdEqmr3Fwuv5QH6AK7dNuTWo2eSLAvLXfqE6JLtur0XXbSuZtbcRGShNSGrlu3y8VQqkfdsuiA84d3jKVPgoxpQIBCkIOEtvUFOkQRsQrKISJKHd+tF6wjNkeXUEtkt13Bv+3Ura+39qlLN+935rrXe9k6hVevBFE0cJKJEae7MRUkCPraB7k48UkwhxE6UTpjWK7CoWXfvICEC3AM88y9BGE97JEkJa+Pd5BGZfViXIlN4pPxJzw5iZgWMxvcSi1Iki3K6ewQC1tuowADh7pN3yFQSSyMm0erW+IBfRIGMzEiPUdDLGOwYmNMHdT4KGgyiYJbxuMgEjTAmVlLqvVG4tY1Ftv06EtATALFHY9ZhYBDi4JEyaCAKuIoQeIgm0jNpUDA10olo0cXNBPBphiNmSXcGCSvgYS2TmFSGQQkEoJQS4YkgJpgjHJAMV5KRBZY0HGnMolqruSNCRCEI0NzpZyYQ4cRQ1e36wlMGkyxD4xk9kpOkiFmCSKUgEQhmQc4Q2VqrmSnr2IoMDCEUjhCCMEWYR0dyZQal9SZaIoKJWEWJhgOpFh3OPuYBihNEGGnV42UHsnQd9CAhkTJ2ZmAi8d5p7OMzBrWUZCKpUva+ywytSs4MsLKkGwKsN+Qi4UFES6mtX0Fw5BAjz08wJ5KZWWQ4wvjgH8aNG7zCAAXmB4c6jkATIg0l3iAiBpzt6UPiM4jCwVkkkScE08SXB+TzQ1ZBzAmMQ8fwvk150sgfGrVoyoEAph7hfxCM0dRs3J4Fg71FTr3GDWPmAY/n+c+sijmLMsdbfJb2MUblRggc4JxoKLBuP6Y5bpNEOc7i0CVM2nMM/k1kMMfuuIQBJ+nQ9dxG7PiK20UfKUwHGXKjdQ7q44Dj48KnqW8CygPT34hEwnD4TMvq/E/CJ5uQ49aNgw/XwIizTSI+cPDr9d6Q9ifn8+mYHNoKmrzNsEbmMCkmcATlHsLrT4b0OA/+9HbPv4rbxwLjETBi23D77CdDPTDObRjnwTNhEY7wmTtPPCBPIiIOGDzvBmOm/AI0iNdXCuA2PW+ka96Ix3+HVbjdYR7BMQNCx4j7ipilgkFBzmiiGIA9YvxgUE48BUWEYc2kGT9EyYciK4/xnmd6W5GMGKv/OBod85heyUm60SU4SKUb4fHJJR33/DblDobwkBbdkniOOX3ck0+IqttsuXFtx02aszNvfA3xTZGYebPfBVPOdTqERKlnymZs7EC+KhHHdx+arxvzO/9wMG3z/uW/wwzlcQvnvZ2rjimPq5+L9Tam+cnvTq4wc3JtGGtvrqXXAT50eJnHfTvuAoOS533k8bqflDuARA0icDIlU3JwUgozZGbK3P4XkcwkTAEZwx2JETfrCYosAVJmIiCYCWmAMIhG9wbK6XOdeW3zYacEUcgoooEx6en5cMigTMNI7qPRxiE5BQSHCVOgixTiZFbJysRp7pla3E8nZFIp9+WtZnq365bOASisE7ETg1lEWLhbINMjS1VRZdZlWTPBoq331pq17uGlSFWuhd2dMoS4my+1WmQyaMncWwzSisjde++RzMKiGulS2U3P50fvXUiqLKLczdd6Ei7I3ROSvNRl72bmzOIZZkM6pd1MkplHPRIICGtGFOFIZ2BdF7LkTMoIi+VuBWsKyrIupextU2bbvIhubUc6MiKDMbpE9USQEMISyZnKqeP5BV+1aqnrsiyFz0VPypzEhIBMvhsQ5sIslDLlQsIswgTMdCIQmFkYxDxmCrHgSNNLIOHHK0SORxsImWGUSRkUEeEJCmTRkcbP7kMWQJnJIuwWGLljyYn0udmcC3QkMTFSXnPrjqwiM3BG2yNTVT07sdS6mu8RuSwP1/3JfAcve2uSeX8qPRqTEpH3viid1rV5M/fuV2smQlRkbDq7RyS5hTIL5dZ2xirCvW/kTgKEZbJQyYjmZn1nlnVd93YBWSni3qx5LWDhpFyX86U/I6Lt75UozBkiyeneI33EQiQcEc091DO3XZWZgtreWDi9BYH5LLwmLkLZt4uuSziC+XyuXGUR/fj0TMZ5DXcYke3PX3x5+uz754C59dPj/cvHZwhL+L5fWAsVjQhPf/N5+f5v1g9fffjmVx+htdRVskeE26VwJRUPX+rdbnG5XkVz5JRHu0o5W+/Lei5y2ttLWAvqZoFMoiRmQN2MKZG9VAZIWHvrd+tpb9vwFPDCEd1aR1KPDqD3vfetahk9t8RlOEwjzMO1LAly63vfM4OYe2uqlUKsNWJR5ohulqpI8wRR1fnqtZ21ZBIl6lIomiIrcyESM0ZKZpUEQaLzvIzQE/OZIBQ9bQt+5rxwtrHHhcMzApzhe2RP0UwjCLO6XVkWHooAEDGVolmCStDw8FS5CbOPDdOAQrf3H902NJ/WPAFQ2mkVhvV+af1S7x6jp4ISGW6BCCnMYu1aoooKBbiF8hLEaL4U7nvr3vt+rcuSXN3s4X7NaI+n0ppd3388ne+r1PQsRNvLswrTUq6zF1X1kASFQWtNe3K07fpRej8tj0IJpAoRo6dTiiDCI2HR94zseyfqkc1b9Hix2JfCvWdkhVNrHeV6qqtbdLuSoPdukefTnUYyPG3P1iKdykKEJEYQR57v3u5h0bfCdDrftT2fn5882ZKtpQW9e/P4aPyD5e5R8Xu/9ePP331/Pd2H+Ve/+NV2F1u2clrPb04vz9dvv/0GFnf395f003K+Wx5enj5W7e775eXpbtVaCUoFX7I+JnEaVHT31pH17t76+773YE0p99//4X41J7b0sjxWlixqKI5ytYugJpFwZee2X/aMDmMhkDZzEpR6Z+aRjcFSlsu2M6WKhgWxUTSz3vePb99+tl96pDAve9sjiKVCJV3ciSE98RJmGauUIkuSh23eNy0LYvFcI8DZ3F8obduf3RqQyRYJ4rNQZTn1HmZ8VyT9GmER17ou4Z3JSaj7pdYzGoGpLqvtXWvNDPMdrM37SeqiZ2QSiTCoLoUXZbHtSbieSmnXl5dr26/bsqyJvLt/c71ckN6tWfaynINp3/bl/u51Zzm3VSOZCMxKQySQw+lDo2yRxB6eEUVkGByIoKIZDkTRYjB3J+EeXUhEONMSFH0b3HYkEJwZIlUwLQOZaTZT7dJG773EiLFhde8Asaj7JqSJDO8IKlUB7rYvyzk93GwgeUlmkkh3cwh7OBFHBJBKXIt69ER4RKQjeeS+kRPmxmvkVQsAFiVKR/TeRtK2R/beiQWgQLpHEosWSowipEd42HhexXj6iEZkOoRZlGIEYvGoZg9Ek+Y9E8TsmUysVdxahIcbgSM8yVS09WuQVK6EHOHfHiEstWggRcS7ZQQrezhAa11G8lQRST6QEjOBulsSRNWtkycAGWVMMBjMFSVuMofWt8hkoQhXKFEupXYbY1KbdxEBsRK3tqkqEuE2yMxuDsqIEFFhigiWybnEaAzHxcNyuBEthhPNwntGRgojMBvBJEKJcchDfWStj1jxw0ZkEe4dkaVwDlV2oqgmsLerlJogM0NRZVlWCfhcAgQRRnh6SGbk6G9JZobR+W50xxOZkqERsTRLu0e9fwiDwjkhTDfMNgxWDBbAJxMZyCCS17cRMo+IH6ccqIGONGgfcRWZQkdVfBbvPzElHYiWDybqZgEZXrM4ZDkTeY6aeiZjBgyNvKRxDkfNPQ+Gkj6hSDDapE1qJmMQKwdqm5QQH8AUQ/EQFDQiwG4xL+PcDkIiMiPggUy60R0jXPSmLzh0Fjdu6xMU/gkyniKLG5uBiSpv/4wTzVcuYORzv47kRKfHpR8wOG+XM7Rdt6/45OAT197A9EDnc0NyEEIZ9GsnQzRyQKb257idhOl+oukkGyndcwAzKec/AHJmYWWOvN141b9NVc7krCbvRZg8ym3sXlmQIamgQzJFNKbcp03zXsco04e6cjIQI7AXlJwEIs4D89OsmhMNXuAgqY7JkDeu5VWz80quzIsc7AkfTNLB3h3zZUwqGQOHmBu+w0Y5h48wWXIi4cw4uKpb5tdtrs0xpNef5ytPOQ81A4Xm+rud9rjdPM+NjtmUx9yYF3Yc66BT8rj38/gTdB48zCez/JP5iclkvhJyOCheOkQ2B2NCINziy8bcmm6zAGjkyCYxelb46tsv/vt/cv/DP6LPfieHW4ppyi2mAIpvoBaZM8X7pvq6cUo3+mY+SQ7SNZGfrNLjtG9Lelw7HXZVHJeKTwjPVy4bNxb79oH8ZDTGDymP+5e3IRxxSuNwL234ghOZTMmjWWQ6UxRJFSakMCmPfhqkVUVoEVVhEY1hHQpLQEQiAqojYU9Fx7lGjIdJDCM2Ec9QaQAZwhKRozzimIwYjZAjIsmRSeoqcA8Q0j0RDEpngrBomotqJFQKCQ2/ceewbpdwclUtWM99vca+QURraY1ednsJMoc1E8a+dw9Qklv0tpcShei671qEiR9OyxZxue73d/W0VFF6eXlJ5FLK2BqZxWdffubffczIh7sH2y97t3cPb1uz03pqW6tMTy+7dVPVAPZ9G9qZ8C6UnOG2EYEi01wXoUBaLCJpXte19WtVGcpzJvQeBJRTSWse4da1cHOjLEzZe8vQx/s7IMx6UUI0NyPY5frcmznYw8wbAplpHkJM2Sw6EZBWZTjRhk+TMxtQ67qqahGpysLEownAUTNjSGFRFuHAUUmYvfAYRIxMYiFiYpKZrMvH23c+zWIUTJiSZNZ1574Dbn0KzgCM+GwlIk5QepinxyR+xncYgUaW5OiMXFiJCjNZFnASlDF7q92oIkKCKRxEi2Bp9g2TISW7kaD5lTQL1CBIIo8UKNG+76f1rkjp0be8XLerRdRl4Qpi7Xsn6YNqTdBpOUfuLCCqQsveG3Fhrlw4DBxV6GGP90Ypy8LI3TYSKuWsrK3tgJd1cTO473YJjwQHQmvRAjJSFVZGHzVXDU5mgo+OF7u5karW0vteZCUowyP7CAySdWVG742ZLa7C0budyt3d433uuHx4So96V6TcW7sWtfUxrk9Xd17PtdnWbFOm3t3r5oFyoqe+fQH57Eer7de2504G5tZcmUvhoBa+Jy/dEZlKkshk6LqEa2Rs1ljcYh+9hUJIpQ4LtachQ1RKqWatNecEazo1y2bm5MxKSaS6dL8yU4Bb26HoZAoJz6AUEUDcrJbF3CNDl8XhIjXSwxpxBllqCCuJMDgpY+AlLgm4u2aKVikKUGFiDQVXISFngGWYH4cJcbQOJmJNSpw5zwyl3LtciJ6v/vyErQEZEgEIk0cnEhYEA56R3eC8RLKNXP8crVg4SEGLyKnKoqTCouMNwcemjni+jMbGbMCS+Sim17fwWqqnp5lk3tXTqIzREL9RaK1Mnhn358dwzQjmquX++1/8zvPH68fvfu5xMWsg7pcszGbZLv3u4fF0un/+9rv7h/vz47vlfMd1/fbjh7tlKfX6/PRLQrKgoycRqG+dipxreQj/sLWNaan8wHRmacQRaXPTJpLwsO4GwhrprfVyr1H08vR1xCUFRU9uhXGqXELNsLl/3PbmviFZSqllCQ9P7FsDAckEMUd3VxUi5lZSdYsPROmtLUtZ1sen64vWkgZKp7b/8HT6EZUfPHzx2z/6vVIfrYUb9qdrDT2dqjyUfdv9Gvn8wt7fvL37+uOvrta+XM8rqcrp4/Omd4/1vJLg/fNTMKGuZT1vrS8DTKK5R+YW7oE7kfsO38HP9n5HO53Oiz5k43OpezTP54hNq/K6wLu3F2QElyx3928e9ucdweBi7soVIlKybRuzSzFA9r2R07o+tH5VJckL0dPVnpRPRrwlFZbkhUPe1AcBGi5BSuVkyYTw/Tn8IuLNGyVR9vBnah+EfbeXy/6SWYV4OfFS1wxpjZVPkrXoSSXa/j65sz9bN43lrAWlZn4cPBcJdrss53MPRFDomlK33pQVpEWot3ZXVyQ63Alc1wQ8u9QSCDWxyLquKI+86kkizMF1uX+3vez35wc5VEVMBNHwIxA6u8VGmSxKMbIek0Xdw2I0V5Ic4SSsyBBV600JwpRClOSDyDgabyVxIpAc7pYuJBHhvhGrlsU8wpKJWOpgohhgKQmPDDOLzE6dItflZBnMNdxncDKR6hKBRLDIgHHmjQmcJFKDEDQKymTp7rYQe/iodwsrkVhCpSQo3JXAIm5pZmBbyr1RR6bINIG5m3KxQVsTExsyhJf5oiaYNWZiIJiUhVlYl7CNksDs1phBiH1/QWSCStVMQTgRk5BHmtuUHA+hEzh9RBT7kGO49WTOCAsQcyLMtlLXgdAgLCq9Zfd+2fy03FmzwhIs3kcvr4gIVo0MYXVvIshstVR3F108sllnpsoFlJYGIuZCRIHWYz6RPDuSmaRoufad2SqxljJSEmFdeICkSBJSSeLuTVkZbGmZnOFMsueeACe13oVGVo8RyVDZM0smPMw9WHgQLsxsEcK6u4EsRxYJjWwd996FuPVNhKWU1ruCMqfNMJGVWYjcdqGKo4bmFqwkTN2jhxOXmNCJczTPxqBUkhgcsIjIFKZh6RhU1nBSj6iiHF1UABlShQN5f6I5oEnOEUBIP3p2vla/MU/hQFQ0g0YGaB9Y67UafatQj+jowCxx4gakCSDyyNGlfJAWU4QynHRTykVMZKNpHdHEUBNJzwuQQ0ZABJ4p9VPWRIPwGqwKJR/GrRtO/LVLnFa7zCEOHE2HCHHgthyMTsaI8JxqiZw/HpcwEjOmrocw6TPCjF+hTwYUwEiTuikWbkwIKOfXjj38oWgYrjpMXUJMviJ5juCBYg+GI1/Ju4Ms+BQpHwTLgZGHeGLEgIy/GPcxXzE/0dCOjfHnY2IQ0Q2wE+GIsIb7iLOOKWebaP1weN0A8Rz9V73H60cHPLgFaR18HyZzMhQck4PIyd0mwIhgEBMzIEyZMfNabxN+RLreUPmYt2Om4bjcG/6/hRnN8z7IiVeq6kYpHfMhE2FEerjrknwQDIyklASceYrghjQ10w7ebFBZk8rDQfBhRjsDw0RxsD0HJTFHl/LXJhPytuDnMr3N+IOxOP44p1nOJHbGrcHVMeJHotUg3W5U2sEXvi6VecixzAKDpjr+ORjZm0KHDyLMJrM8Xw+OUI6k2Laf//Of/nf/+OMv/tnD3/6PfvMf/afOp8gRYj/XFREygvgowN5oq3nC+GSgcGu4eNz76SrkT+fk5LbyU5rpxpoi81VudPv3uD3zwTiFasCcorc1Mncj85toskqfDNH8gxkzCY18WwQlzC1JhTKcG5zSmEIkmKMWZUMlIEmpMKUK55DODWI3OgUxk/LofsSUZMHIYTdmFSSIWMJ9tgbFtOgOLazPlTD5c3Bk+nCuGSITDg6PqQINwF0YcErWhlCFEYGInZZCVUOXwlJsqQ/n6q16hEjecXm56KVnh+xB3Ef8EMLTvAsgQXtvgAkVkcIUDuvRAQHBI4a+eC3l2rvHsG2NaIp4WJdvr8+cVKWycll0761ZB0aZIdyaZWitxOQeonJalw/PT6CUwsejN1lo9wZKyqgCss6zM1skJTGzgJPMgoS1MDXydOJUWTpDTvcq21nq1i8v2/v7dS28nFRLhJlF7CtnkIdncI/wSIt0JAkNpb/aUE1yKUXLelrO96d1XYWKJmHou8fzIZmgRELzXTl6YQsdmjIi5tFGYNzakX84CPCSGI59pyk3G430OKKPSTC7FjgwdEHjvTi2XpQRMcRDzIxwRGQ4wJkId0KIDjrMGPCIWShheLgc2rrDgMbs4arcWrdMxNhkhShKLZYG4FxW340oiHrbWyBKObcerZujEyOISMQC4S5JxBRpxKjKEb5vG9JSmSHIXciSJMysa1XNsN7fh12TWfVBMoJiqed923d/gpAyiLhoTU4zX9Zzb41gSE+3he84CemFRi0uimjfG7OeTgsF1VoZkqDTeodk23cpKqV6M4Ei4OGJLOu6lMX9KkF2dUdTKaeV0z321FzKkkW3zz/jD7i8/6qLl/X0JrxV5Z5NkETlxKf+cXv6yn/jN/PzR/r2mwYrLlxrzb5bf3E4vO3XJ5JzIXFvoASTpbW+Zbjkaq0zYSlqvasUFfHuLCJgs50Cfb9qERWsy2mzq/kuhUCUBPcsqbWu2/ZB17O3zuCy3O/tBeDzsl7aVsuy789CYEYV2a/9tN73aJfteanCRYoKkCrKCPcLwmqtFFKX0943FQZKOlRXQZD7wkx7W9ZlJZBbnVPfmZKRSAdMSkWGlKILExn2TUzpY+THZ+nbKFODmSQJAaJ0H253RCdaVJRERFbvl7kfpdTTwncnWoueip4qq7AQRsl3tiWdz9jxtp4hobdd163fK4aSk1l0LdqbcREbQhIJSl2XO/ZGJKpra2093yUvl47A6e/+7h9S+/2/+sn/7+n6TZfyG29gnd48/qCW8/VKn787Xz77Nlp+/e0vvnjz4+/9xu99+/6777756fvr35zk+vT8DcjoJNv+QVNY1ub7k33nfu1pj/c/DkuzfbTQJqW2vQDQmuYmLGVZ945C3n0zZNiH4CaFPKL1XnG+15Ndv2XvKbm3LTL1dAfA0wjo27ac7jM5YaTi7mAhR6bUsix83+xaVe7Pd5fnbzy870/XvZWUeN4eZfn9H/7gD7/84SnXzz7/7XJ6m8HWr3CvtebJIiGiD/f6/ptv7+qJinz1s+v3z48/lPJuvXezD+zL8mbTh95f2Ld3D19c+tOeYddvmXhzYpDFnuTd3bxXvat6r7TDr2u29K1ErkxmylVWzd18VZhda1mZveRSymJlDbvufcvcVrlDtHBHUUK67eQNcbWWtbzt3k7yUOo5UUnqZb+4YZG3RZY9NgiIjAKL3A+BTGsXJmMtHD22D9kbJIg1fKdslboKdWMwmCrLLlyVZeX1JOfWSXU91QU9gCuBuepujWI56edv33yvxvbtx1+d5HQqe/OwZopa9X6z7dKelCuBidZ087gK1SI8Ghf2NE0wUkgzjJiXetYgIj2d7yKYtbDEbkYqvX1A4FzeENtYBREJ2IhXx7SczCbFzDo8LzEVHMwJZES6HGXcdBvaa88c7a5GL05M844zS0QMP7YMFREykzzSW4uEEAMU4ZkeGQSiSNAItSUdrzV0zyTW8A4iFc0J2xzpEaayDH56tKhIzHBVDxcWYiGmEeHskQgXESLxiFpqJNJdVT3cohPzsHZlNoQxa+QwlJWxsxWF9e7hI4EsvDELk87rAwc8kcpESGsvzEIEs64qAxYzweFMcE+PxsQJ4xSKUBZ4uFvRarEDIcyB7NYSKSIs0txEGO4ihSI8djYP2JByhAcBBBKWzDgtq4d7N0dUqWaRwkRCCfMGYGxjrDvSM4gSSqBIkLV2ZVGm4unIZBX4iPeGgEWKmyWPAPKYHYIRPgJ6YrzlJFladCbUUiM8og+gO7x+wqN3Ow57CM1qP4iYZq111N2II93cZgWcIDqDgpQ5Mz2dmaoqMffYa12ZSUQoYLN3XqYHq7j1osvs6jZ2REKRaXvTUhbVIVEz75V5Yg2ACZ5OBJmFfghRclJOPQgBIM9MHdOHYNP+AiYK94GomEZS8w0fTizph1iGMZ0ws0w+WCrGJy+vZNCQlQ1D0wyZZxo5ViN7ZZyhj4aGh8ZgsFrC5JGU4KFrH71sCESIgEUIzQ4SjFu9ewSh00GzjNyVpIRM3JpHI0E+dC44GpUnMgdVh+PFDMLgcZkADKOoD71VTnHT6DFHOAQpOdiqITu6KRMQRIzjbwHQQc8dWP6Qq0xwGTyay8wG2wkaYeLDCZiHjGPCx0kpfYJthzYFw+73Kmoi+oRlGGXkwdeMH4xzOqrFN7vLhL6DpqPptzrkOuMEcp7/pPIO8oSmYG0Mb7ySJgdDMcD5gC43juV2aTd8/AmfML967JZG9j/olgaMW+T4DcMfA4QxSwfPNkhioleD0NiRDRpzfitwc+/hEL4cVMbk/G5Q//iVV7nIEf2CgxdFIsE5ku8j4Q6VqLwZotk58pTJPto1ISmdZBgwfJIVLIkEDV5ynM0gZ4ekdpyLH2lD8/7QoeXJg1kDQJPgPc6XDi6Djrk8eZ3jOAfFkvN/Yz4PKdTcvebsEfdrv3jj/gbnlHmjRPOYWzmVDszHmFMeN4yADCeacDGQbC62U4vLz//kqz/9Pzx/89/0/lWRj8/fyMtX/+vTZ3+XikAmxRgIGglQwEzPTh8XOWVQk0ya93cysL82bfKgf/LwaeKgXnEM56/N2YNnwrE+5n/epgImW3wQ6gehiUFf3zRG829f19O0hAJg6RYMEhYPBhF0NXeLUIEO3xkiuzO7gXpmT1hyQGom4CISOU1PCGR0xCiwKREJSxKnGzKQNEhLz+Shxht9RkAZMdbMqLoxDjkJUkUxcqZgOQn+2S+WIWGNZUhjQ0nAAmKzIBJWPj/cWxbmUmotRc9r7daKalq8u9OXiA8GYT1VvT+pSF63PRN9N4s43dURhvP48PB8bUp4OK3KQIR5J3CCLm1T5sgoZTlr+da7ZL48P7uHLsu3zx/uTif3jN62jIggRlVBWPZelZeF286eLkJLkSRoUfMUlVIFLTNyWWuEi5AkJaKoRvRSdCxsIRZWJGjERqWLqAp9tCeDVxYIsqXKPaEM8oOYgCA6GjWGUbpwidDAdGMjnCGACQshBbzIsrKeWAonz/CymNEMSVMsREQzzHrsFmlEx7DIKDfy8Za7/T+HVCFyMDigOMLQAoTRcioTnsNrO1bKOCQrUyAsLJIAPuRzI07TiYmLwnYWMDuRuGePDKLmYOY5iQC8GtAiem91UWa3dK1rEpGmuUkmoGlu6Wke6aUya4m+B5HBtwwRFaV9uwCizCD2CBYNJAuVUq/XFxJWOe3twsLKsp7un6+7ao2hcEqP7GW597Zbf0nR6K5EWqQli57TWuw2CmkswUroMd7xCPFwEoBTCxHEIMRci7Jo0UVAnBrNm/eyMMSKIrMPa0XGDIW1ffPeusZM6e4bJCyuIJJS+jW2i5yXxwjReF7XqLqp0pabo5MUrhnW49raXqzj47f4nd//cjt9/fkP7371cachl/MgoDtXvhctrY3UcHl6fimrgCFFUyClaC7WW2YRFveI2BA+rNzMwSzmSSSlFHMT1kBPRLCXuuzP1yEJVVmEpGo062kXhHdLZPfslgwR8y5hY5/VW3P087pENhaiMLMuhUlVSDKJUuAZ7IUg2ZGFuaLva6FSuUoolUJZk1VURECRPSjJR+PksqRIuCWBmReQXm0xs+fNtn2gAGLSsrTcxlPcfZQpCyCiJ6FM7xFBKOkggdTCRSGQRWUpUoSEYzxGZ+FqUkUjC+N44dPxFuFjfc0XqXevWtN9EYBqffyyd1sKEIBTZFFdgbIuFUCGS9Avf/Hn+vzyH/zhH3/x9/7hx8vzX/3Vv12K3789ken3v/zBT/78r5du/+E//J9/+Oa79vz0z//r/+rLz+9/9Jvf+/qu/vK783fP688iLv0JLIs6OVjlBd72D5zw3uTOkjJ8F0oPR7LQbFEZvRUWXpiCcvMqa2+R/l0RCoruLaiuer4rd8/bLxTeM5i0aI0Up1xOZ7vGujyscieJp8s33rtU7WEW7e78aMKaRObkaW0jz7ZZMq3rwpE16Xfefv6P/vCPtg/2+Q/+lpze+m5htp7W2J4TVk5S6tL2HtYXSmX20B+++eIHRdOjZlxzf/Ouvnv39k/++pt6p8xvvv72KyEhb0UClCZhhD0byLoTpRib2UcwkW/em0r0/p1Rkr4NCWstoqfvZpeyVERRfpMkveX9+pbw9BIvyl0poxTPvrendZEkWOwQQWKRUxoluNIdRJvvs90uXFUTvduL4oEq774RR8DCm3B6u1Y9RcHmV48C5iq6jPGTpWdLrHdrRaSHI6o5y7KAPeRC6N5bWJKy43K+u1+pFKa1PryJcJQn+4ZiP9XHMz8KnYi/RfEktsgVIsIWzqSkwqrR956A6EKe6UTNHZlLUAprkXptGxE40Vsrpa7Lsnt3e7m9EkTYE0wciEwolwjPtNHFhnJofEbhgj37oHdHHQwRGO0FMwOWmSoy8liEyLwTMc0eOuFBxGy9D0O2ea+6DCgVkRFWtDCFm2caiIWUmSwigUh0M3AKURHxcJHF3SJcWIQrgZhBpDyCg4ffzUxIhNjdii5M2vs1x/MKCE9kpkXAEREoQxwhPBXpvXckujVmJtJwQsK8sxaGglxZEMGUEebZc/Azo9fJgO8Zg8k27wAopokUKsw8+puMAhIRxdymJIGFlDCqLCDi9J1JhACAmQopIknY3JDJXBM8eoUkeEY1C2WmeydCNxvC58Gnj2SskbfNUPMsrGCmFIz2JiAieDixZhA4iDIiFCUZ4ba1XUAWPbIzqfJi0RDh6WNl5UzCpkXXQC7Kbn2UrJkrD4xOFBFCMkQMN7HBqKWOXINPXReJ9NEiasCIJMmR1ZsWQUkRIB6d11C5WrepBiMdonAghQRJkUGsrED2sQo8MXMnkyhj/EFFpuhmOAYSEZkjFpxpJEuOnCABxQipiRASEGX6AFADaA7VDSEFs649QT/T2AryYdo6kghmU4YBEOKQAYxivEdMjgMUh3hkwC4iQoYwZwx/49QYEIFHSG0MmJSUoAyaBMWAThNtMk/tjR86mtFyZW5XB+c2+ZQD/d+UEIkpAHqFrziCuWcE0iFsoAMcHoHWU180eZBPDnswKAMlH0KMcS4jG2hOlYPJuol3br91HC2PdsV4JaQm6zV5kJvg6ZW6GfzP8K/fDhY3Juegw45ebK9kyo0aovHzgw85asFDKHOIGw6C7RA85SGF5rG251hMnmIM1S3Oekgjc9hrzYAkpgHeZ4Q5AZnMfNyBSW7dqKNPFFLjSoc8e1Js8zvoRi7RgbWT4APNy2j6PSdMDksvTVgbNAiDyWQRZvu/4/vjoJUm95bHqR200W0ciQ7tyY1XACJ52MwSlK5sS/vafvFftv192Be8/BHe/BFO55CzCLOSWyYnkYzg+EwnohETPOgryqFKAhEC6Xm74WOBxJy7B6WFSR7h1cw4x+mQydCnM+k2+8cQv3Id8zvoWFH56dK6Te6DgbtN7lts0PzmYfLDcTm3STuT/hBxI0icPNOkm7Rmv/iTv/mX//j5F//tdb8kXT/GXkrs/+bPly/+69/+4x97fVRRIJBOR7M+EEVGRiBTprrhNjaz//aNpzqu+pPnw6vcalzdMf+PtTCOk4dndn7mRhi9zpZDpTeWEh1E0e0ICYy2lXR8TUxqdXLkAABLiFAyGWWQMykIycLQBBocICUA4fCeZJ7JJVIjtRmtwrUAhZZVRjE/nSBE4YQyq9fEoBipcIgcjkliDnAig4hAjkwM1sMjbOwEwOQIAgPkEWCydFUxYKQAC2A5mAYIklgSRFQCLkpZa49ooSoVafWytK1E9KpaCjz5w96vAFGQUF3KyrkIi9DHj5fWOisxqUX2fe+t71tfaqnLIoTuTpDw8JRwjwTr4F45KO/fvn36akPKx5dtXe56fzHbmYjGMyCTYnR/Qka4h8Ol8uLFugkLUYpyrRoezFJq9QjPVlnRo7BUrX2gThYIeTRP9BZrWbd+Hf3GTnSKbqS8bXtYMOXmLcl6s952RHPvlLM5OKUzFUpwprlnEMXIAxGGCiWBVGrl0V8GAlamnDq9GLy5ioiwkBIP9tmFVGVmnPOUHCEBRo6G72NtH23ohiRKiQSD+Qwf3SUTiNlrdRTNePjfDoPyJO3D+rHGjzcKpScjqYDELSK8GzJJmJiVKe3XDWhgkXpK6iwkUrZ9D0BEwegeAluWtfcOgYj2YZOn2Lb3zoVr3XwnN5+KjVi09n4JJDJ6u/bsgECYy2khIZD31psXKYzs3ZGA8LrcvWybaII8mMu6eFpr+1rOpT5u8RHpbnE6nd0vfbuMhjWZXJaTNwdzqUt3V133/cLhysxazDyc6rKgICVRxDPq/WNiB8l162VZt96UUypHhLvDvZ7OqRKZbhsRVz0FzJ2//ebl7j6+eFiiPD1+T/tL9s7394/hOyW33s2bpTBrj+XnP/tmWfPuXZ5+2rerb9suImVZ+7UlyFM8WnpPYl0qs7pfSeh0Om+bI40YbetMtK4V1DN5mK1FlaQwc2aEcd/b6e7eWgsKSklnqerBXBbydr28JFnAEcRMyXHdn0i59Va0IhFmWpYkiQGMktNSShVZIwKEAHv6utwxSyJFdK2L9Q9EUQpR8FqpFNIMiiy6wH1EURCpFKJAZGNhMIiZmLEuqndqTD2yuwSBiEq1Hgl1r4SgAsuezEV0i5AiUgTRQIhM5pKBsrBXQhFaRO8Wvaujfx5PWfCQYdvIGSHiqWmeCnnLuRN93TeCaFlX1sJc7+q6u/VMZmeEQJRXrnTdnqSUEfRRC+lS+1P76a/+de/Pf/zv/fEPvvzi3eO7b3/5E9vf+/OHqz/9+AcP/XL57if/4uHz7729e/xH/5N/+Mu/+rO3+fTl3Y9+8Fu/+/Nf3l++5fv16Wff/OzLd2XbPuz9hYmft8vj6fHN+QtNDgomSkbrrsZM5GFgdt8Itcj9Sddt34Vrh+xhskiYLzgxnYjKbt9d2nNQKN+ty/f65ZmTHJ1ZVRMbb7vdvf3C+naNqIuU5EWYM7kjEFUrebS9gUpzWLP7EneWf+93fvsPfvQHSnenM2t57N2QXUumm/uekboOzhC97cv9KTO4yXk5X/d+fb6Utb47f/7U9o8vL/vlWih00fNSr88fRlH86em7ctatX/beFHxa3ilKWHzoX3/xxfcuTyFlTd12+7jIO+UAR0dPgkit4GnSLsu+P1cB2X5t7wEeRhfW3Pru6OYWHoucZTn5biepVRcl3fp168+h0RE0QFDC08j7WteTqrlfri+OkLJEkkgJRDB1u4rTqawVSuDWjUh36xm6CsiumihYo9tyAmvs/dL7VZgiWahGgii0RC0O6LLetb7TtoTR+u57GqygJarzC4rYpStBGb29lNO5gBaCxV4BuHe3UhYOqipIKjPdJVhSl2LdpSxO9Nw2lUJa2taOZYCM4Z2nmYTCDCgBmSEiHgYQgzNcWTwTRJ7gjFuR5PCvBRFHdEqEu2A42pKYzTuzWsTojUVEpRRhAcdRPqQBYVUV8GHHMLfIFC1EmsiMOKrriNsrc/i1SNLbIB3cnISRriyO8DAA6T3JiUAyFVPIYHDAJzdCw9wkEeZurCVBSenhxCMBlDJdicINM96CkuCR7qayIGL4fmK0pceIsQ5hKAuOki+zJDh4bEJi1JbcLRNF6oTf4d3bgWcdxCJqbokI7wQWEk+T0VSLsWVXlkLCYLNdVE9yau2SIM8RSs4A3I3ARUt4IJJVwPAcsU0hJJ7BLB4W4URpbkXXkYFSpWC03MAwvxBAqisz995p/nA8riUTyFGvtkQIqw+ChoQw0vyDRUDo1hJIKGV4urIGRmM9NjeeDeMcGaOfL8AqxcOJ0sNGfxmMBnkAi1o6BUgYidZmFj4oPJx5uCCxLgvCx4ZrboiIVJSIiqq7eXNMEpSG4jwyBShcCHFtYaObXA6yas7g8cm5pjC5Gz4g48jwiAxOGjzdZCQYt9SjUdZgjE0nKOKwdREAz8CwDbMERhIBCUAzPjyBkf9yhFsTHXraA23hpifAII8EIKKZk0BjAzqZhFuM+oFYB9cwXV10lDQH3hvSqwFNh4Ei4tfMTYybMmJwKjn/Zhw6Y54BJhNxo38yR7w3jXbsmMVW0JG3MqWKk+W5Od5uYPOmxAHxaAk9GKtglpgX9grI+RaL8ioPmpTHZPbiMPXdYnswT/0mc/hkDhzfTvNJe9Azc2Ti13KXXgfsEzZn6ommZmTK1G77mLwNobsnyDx6N3dPT6KDmJzXPxiExC0153j4fnLKx7ScPqtxC25kwxEEc8xLTD3JED4kpnTjSIo+7unYiB2zZfQDPA74Okq3cZxupjkEUwEyQ2ZuRB5uU3qI0mV0HKdIEigM/f2vvv7T/+ybr/98+4C3b36zPP4D+cEf3//4fxGnL03PpmdlHWEHhBAOAMQSwC0qhAYkI8pZYySe7DAfUpZDu3IYm24cFqYRc9zLwQmOtJlDQ/XJBY+5O1fKONa8dBw0JG66m8yDWHnlF28kyu2j07FEnxBtSePYM+8pkzzZA2nIMH755d/8s//dL3/yv9/61yvXHbU99+egtEBvP/uz/+qHv/s/Xt78cUfKZLEJI7CPEYnI4DHF5kQ91HT4hBrCMalmXlngRv8cJO9Bkr1OWxy79sk6H0fMG5/46VrM4/smB3s8p44zvk3om+LvSLg+vnKkxXuIcpE6+D+eCy+FC9cFCQYhnciBdOJEaT1rsgEnoUGnMTMhLELdnSBsrIWICcGEmWyDOWF4PpuHBXF60BJDlOIJDwqCTh3XfOuMdmvMhTxTGPAYcVKWoyuiESkxqXAEFS1LYXgX8hQsrJtoWe8yUjnXRT9/PF+edw9q4d+9XO6LmjsBPbkl+T78V5St7c3oICcCLKUySngQmIuqCAlvvS3npUaGW9W6bbsQWr9urYnUWk/WN+smIosuO+8qrMpalETvHh7Cs/fwoPSQlU/L0i47A+6+nM/X1pe67G0T4gKySM/OqJ5AQLKk+enu3M0Z6p4K7tu1a1ER1bJvVyJvfScuoNp8ZxJkRKSQjk4oNEtaNBsae9fRTAKq5a6u61q4SjInMc9ItLDMHI3dmVNGl5fxjhZhmcHVRJREAs50ogAIMWjUCPjcWc5KxYxZiyGJH2shA9OqjBh7cKRi+HJusV453lbmYQEmSYRZMI8W8BwevTuCgkhLGeUvkrkMJlWUGX3fSXOkbdHN3e2ezMS8RwdbBiK1e/SwRUtQCMuIryAS0Zrmkfu1OcFhWOrao6vWCNrb1d31plLM4XOPZTkxuPXrNa4eXWphXXozz86sy3ofve/tAxdQVoyNGJiRImV0wuvmWhUED49o5gzISPym5CKZRDNzLJiIORyZSajLouVEkM5cRmBwz9H1JuDmTbBEZxBe7DmBZb1Pa60hEV/+6Muf7R+kt+vOVXSzrbWWcDlJb30tuO4o/LZvHx5/h9ezZVTzc28wRSA8mrUoyxLoFp0RAFVZnrcnoZyYro52d1wKm0OoalnNzHwnIhKyvhNzqYU4taibLPXefBOpqqXvW+99WU9bv5ZlHSWWLEHZSz1Ht95a1ZGQLkmttaaVY2Ams0hTraLCVEW57zsVK8pCjUnKQkIsAkqwBqlmACLGqMJCZeyC050FssAzKAi96yqkVPpek6Qj9s7dKEy0yLmSLhYSEVu/hiUJNTQXgnBqc99E10ztYQQSIV4UVfRu5VPlImVdROXY6wSRTi0f0Xh1x8wliSkxTPv0PcQsWqTt+129Y1IGsXsVfVhPvoOILBy89qRaaqXabAesLsylfPP07b/88z/9/R//ne99+RvvPvut7b1YoH94f4rEy7U9v2zUrqKP7x6xfwFrZftqufvsd959fvo7/yGKftu2VS9/+a//5OdP/6Zyu3t8eP/ddVnvltPy8t0velxamK536bjuG1Ff1rcstOjJL3vy7q1HUz6dlhIkxsHpkZIpfvEtyhCexG696KJICYd1s06o3du9fPbw+Jauidi650LFexBLM398/Fz2D3uTJEqSvO5fVP2j733x9/7gD5keeuO7t587CdKyXyHBkewX0hMlpXl0oyQlavu+cDFPqji9O6+Pb3729cd/+837n3/41cPj3Q/eLn/51//mev3wcL/u0V8uH4TKtltDiqy2heF0ljte4uP+Uw6qC313fX5YviB6Y23j5epoe98y23kR4mqwnmmaJAt73/dLNzNIPZ9229X3SK96FqGgvXv0Tgh+LHVV6RYAvDuxpCzKSE+zbSlV5Xy3fCFAz525MGWPkLoGp+27h4DOwtV6LKUiAPNlKRC8tD1gspaKE/FpUU/frterCta61vXh5Xp170nZ+77H+yqn3dreL3UFr1SEelyKLsILRYZf1/Wcp6LAbjuXc9HP7nhptnGeNBux9PBwXuXsBjMXXUGLR/JsNraDWlkfntv7cCrspHOrHhE0AhrDeYQEs9DIU6PhOxqtcSTDDngw6nMDuGYiWSSAoMOzMHaozIKpphZRyolSLG3hhYn7EEgOkDzOxTuzDkRHREQihIwUke6GDFYFJImQDqCMnlxp42SQiSQtpZuRkIcnIKIZPlI3+GjL08KYNJEUEC6ZFu5TKyxFpEZgdBcZ3cojDLdWXInBUxlitEQVUUyl8Wg6MbpZEeVodAqm0fcWBKbM7H24wDKHSmOcZGQGC7tbUloGsypruAMUBMx+veB0lpGLqM0bAatOL1hQEnNExCBwJZlgFBlO4MIyFEw26LMMkZKeHl5LzdG+KIjBnsZUq9aBTauQeWcRHA3R3f1cF7cWbkQpxCyzv5WPmDpQhqcwUbZ+9UgQyRC9uAHi8FE/ZuIMmk5+IkJajEKHAuluoxvIsOQiIyOYhjWGZ9PYTGKqqlJPmu6tM6UjCOTREykkLKCR8gU+L4/Idr1uN2wfHmatKjGFMjtHEHl4TjAWwjx4FHcHc2Tw7KoGGq4zpEzNzNBOzAK6THg/cR8DhGQewjrHYKYnc/HKqviIs+TJyTCSwXHAnGnBGlj6aEI/8fb4/E0382tUxEEIAQAFgo6PDPH9qLULTwMo4bAPDcpmoDzCQWkNYpZyskdj5xpThnTQW/jUQjTqNwdgPOTAOJRHmaNRYB5mrLkvpYkgh2lr+qxusdD0euFTF0DEn5pxbsBygMPBNkwh1UCzcXTAmIeavMz83bwZ3W7MxK8j2YMkole8efsT4dc+8Sktg5w0y/h7esXTefudgxPBQeHMu3gMaX7yz1gag/mf0qwcT7ApXooI5k+oAxrSi08Gim4neCjZXs91DNa8O6OJDx1XNDRxw3CWARqBFFPyE3OqDrqNbkvu4EJuN5hvQTR5oyFfBzqPm3GjGz8hyyjjyFTyiGSV7EH+4Wd/+Rfvr9AVX//0pw+n98vX/3T9m39S1x8v7/7+mx//z8rb30bRicUyAJ/jnAkks8RcBseVjhk0ANDtq+eo5dE6LV/PfJAMx0oFHdbuW372MT+O33rl526yIkpkxpQb3miyvE0c+qTz4K9plQ4J4HhqxLQDZhDreIMRkBHUWfan/Zf/9F/83/83l6/+otKTVf7w3Q5bs1EGXS1PhOev/3T76v91+uEfdBOpREQZn9zDY/4SiGf62lgrr66zQ5/2etl54/4+HchpW5xPkXH8166CRyTp61qZnB4mzzp54cmv3hjIeRduNwbDkIZjO/M610fv8kwepGNiZFozI3j0GiJm0cih7hFCWs7OYQaiyIU4kEmR4KRMUgsiBywLOVGICjiBkQ+Xx0oOpkyAOdwDZESjM2qOptCDA0iSZIpwilASwAejOVo8Dz0SMUa6RwYRguHMsAxHLJwIiYhCdCrrhUUoy6pu24n4ofIXBZvDzK3n1bH1Hpke4+3NW2tELMqkZdWstczZnywqD+cHkVKrrudFhdP7aa2lFhVlyq+/CxEiCmFiVVCoak1W1Wvb7853tejWGiHvlrpIeR9U68k8iLKKMqUoE1ACK7AUYWEWDYRhNLce9bAe0c12GQosAjNdt+18XiGp65q2A71W2bcu0Ah0i0Rxt8z0KTuTkaE29ITM4kNIK2QO1cLl7nw6r4qqWWhsCUc4YY6wRmY+mupiRLcpS5GhHiuYXmzQaw9SHmmCmXErPBASo6o0rNWJ9PEInrGBSRlgIRJKGc3XwOF9rPRMy4RHeFKCIqF14aClrOfTedueQQg4MaoWIYqwON6rkypyDl3r3q8wAGbuogpAtCQk3MuigrVZYwqDCQ2BCWXs1luGaymtXxHGq3IRN6gsZq5cKZIT51p728CjULcKY+vPGeZWekSA13JOGLxbNLOmupg1ZlnrKbKrlrZdEdENRSXMw7ywbnsXVS0rZ1auBPbM03pvfrUA60kF6Z0oOVO1wLwH3Z3unWqGhfVSC9dFYEnRMyhJeR1PMaU74hHjEt1a2y+cVJeTNb58c/3Rj77YH/vHP98AUS62d1koKEVYRGzv1rie78LtzVuhELNz+u4IVtTUa++BE0QFeT7Vy8sFlOe7CrhUUaAU8ZbIRJiCrYWJme8BY+Lrdl1PJ5W77eWZmJSUM7w3uIdbLpRoXNizFy5C2t16u3JhZaJ0FYpMIbaI1l5ElYghKrKUZKY1k891zeixeylcRZel9taLcqnVup+W1cPCOpH63qCynMp4xHX4SO/UE5MSMakoJQlQFuFKizojpbJ7B4LOmlJShKoW4m33pQO7tL0z47SsKsxpUte9NRKwpKhyhSxKp8JnlVOhUogFYBAP5cIQPAuLj2jto9aTr2+HUUeYr4TwCEaVgvTe9wycShHiNUuKQvPD/m2EJQioaQGPTMrwsurDw/r8/P6//5N/vv3u9ce/8dt3n325q+abL+4fNNvLt3/zFa4fy+mtt/3t997uH5/2q+3t29PD2x+8UQv+/O1nlI93P/S3i/zlt3/x7FKqb9k5LntsZnuwKD8021SSQyRLWU4r0NKu+8t6Ou0XhnWghWdmgJHZ3V/Cr8IASQvj2JQKEwvLtRvJwqh3a7lcv+mtc+WH0+PXX3+z3r8L98u215Ne9/ccPvZqq9Dnd+e//xtf/N4P/9a6vt0utt49BDOiUXSkM5NHUCk0lbYFwlRAnEVj23b3rOfiRM+Xp2+++eq67V9+/sXbz+9//m//4pun99fINXlrjZgj+HLd5E7abkvenfXdw+n+4/6LiGzbJWl9XL9ccWp0ffYrfIPLyvcCUaaP12+gYKyX/SNIHle9Xq1oGbukolLAC6lFP9dlU/Te4FyIM2zv0PpQgDsnJ9PCIEr3QqwoJz6vfLrs75u9SIH3ntmZTx4UDAQXvdvsorIsUpfUFRHZkyzUEFuGF9HI7wK14dIRq7wtpNj1RCc61a2Re4TI7l0EC4dHS96f+teP3InvDa4Sp3LPUd+cTpHNXvZVz6f6UJNfLu9ZOOzCWYPEQ8BM6Nb3wgUEjx45JBV9PdWWe4St5ztvnV6XRQ4v9JDnjd1ezv5cngAzK+ugZtx9IkU+wkaYY2RvIiJMSZBZuHj4gPV5mHTGdoxZpnt+7L4zcvQKIyYikRJh6ejuRYqyxg38RDCRuw8L+KA1hhlptGZG+vD+mDvJCKIYPbZgkYEg4tG1jZk5eZA7EZawEXcS4e4mpQqrkAPUfR8v/Ll7HRr2zFJKt45j3DJ8vJGZiFBE2MM8gohZBLPgj4FgmQVIBovoeCCZ99Ex3dxHR48iZTYzBoN8XD4yWUSIA7N4ZNkIgUg4FZEMEqkBj+hJQUTNOtxES04B1th7mIiGjxjNZr0Jc7KIyFHxFtYTEbl7IMx6CA2kV+pikaDx5AYxhfsQqJiPejyD3NOVKjE8GlGO5m4eXoiVmKUmBqmQzMrEZh1SCNFsZ5JV1x6RiMjBxJVuTViI2N12vxZVoSXJgjqSMqEizAxvbi3Mk9kiaqkGQyaNMNt0ghShfX8ON3fILcUxEwm32MOnWydjxA0k0g+Rh7mbGVgFSECPXNThduCZVD3CenxaRl47gY16O4R5yMhn2BDzpHzGnjEjxkbwAH08XWYpg5/JtBzF3UNZjqRpID28DcP2MbxHB54aeH6kJjMO3gOZI4QI0AO2jR0rE3nkAdJzBCENfcgQViSSplNtfEXkLcAIR5ekSTrR1GoOquATDocOHDmr/4MzzowM/VQqMHKdRi+3Q0QwmBQ6IqJv6qFBa0xIOpFMTqXLOKW5GcDgmGhKuqZM5ECiE39SzvHBgVAPGut16tDNiHeIIz790ydyo08IpCl4+AT4HiP2SuvN35z80O1oozB3e1bnJCjh7jFMkvkaao6cl8uH8ok/EXz8GiGEw0s1ab55a+nGrOEYz3HmPJKspu3h14iT0bqOiGiG+8wcqZsmZr5l6LjpgUMRdlwsfp0VO66cDv7jyEsGHdIbD2ZwEe+GoOef/9X+9NiucfmmJpXqsMtLe/8n6/lf7r/4v/Rf/h/Pv/W/uvvx/5Tuf8N0detMVDiGgY2YPH3chds5EY3WRaO73zFJ6BN9yxyBudwQw9GWQ/aOg5mcd/LXaMXjAK8//ITLpJmKdNPqvGoFxwc+oWAPNmkEHk1BGDElPOFE6WkZHMRkTtbz+bu/+X//b//qz/4L335FZbHt9PSh3deHSqWT9745yp4ST++/+sv/5uFv/y95eZOE0XcSmcwcFDSZ6kGjHSK0Q5c3VkMehF/OaYbjUQLMfP4AzecDzQudh5hL6yAM8col/fqaOtbmuC+vzOexDn7tP2+PgmkYPI4wyjqZSTQaRNJ4w4FEJEfzKiZKyXDh0cL8tjCH/pFFlDl5RA9SRqAnCE7IUhSZTDI4bQunY8bE9AfKbDMHALO3w1yOSZFEJMLD60MiGSP5moilkEhKZjoRKDyDIxFkhBRmJNUiYDdjgJZTPLQHb1eiBIuANHEmqpUN6N0Bu1cOULPc9x4sq1aphYoiQWZFuGpZ1spallKKltPp/nQuYSYI6/u2i2cIy/27t4+nFYz0/nzdI2Nv3loudVAdwrUmU8LNfelYmDljb12EgXBvLJyTWiYB0huVSoBFeGRR3fveeu/WWahb51ou+x5JvQdTieCqlQME3ntflxOvZGZ9JhJh7C7APNlZBEUis8gIvkUSZETs8LrWu5OWhVNG8nROVXVOVkiJiVh48qYsDCZGpogMfgjIDKfDpTss7CDQEHeP2ssU0TsSozFhzph7yeMFxwyBj1QsgIboN2KQwYdGlolZhrlcNJd1aXvtXpWCI8WdGRRWhHL2IbhRReEM7b1pWXtYPa2jud3YWiiB3UAk4ayy6GIh276rstvOVJXFs0MdSe7GkenRY8+eRbNouu/r+lBkJUI36rElQYtQatGHy/WagX3fMz2yc5FSCxUOIw+77BemjDTPTsQZtHcomNJFTms9iyCaM6hrh4xKVlNhArK3brEuxa27odRO3lV5t491qd6dwgTu4Znh1oos3jYlSlkI0u26LjXMPI3Vajlfn71f/P3PbeXzeW3l3N59H/uLv/eo96dIJ+vp1rcUkefnpxXJfH571rj4V7/8tix3IGXnUqhbEU733tueZFqUkXvuqkuCwty6pXupxbwTg2vJtIxQre5ZliUpM69Ee5gEGVNqIUOyVuLkQgm+vnxc5JTpHllKCXIwAxHpKswMYq1KMcNGyNuuAVkoQJS7wMDJ6VJYcxMF2NxCZWnNRUikEFctY6maKKAglnpX61rrnXAh4hTV7I5wEUKmcpDDWpM7XuqCRVFFTnfgcn258LZk5/zQixZRzXR4MMg7mGtS6qogSGWHl1LL/SKnomvhoqBk5nytuPkAYwQedVcePuqRHDE22ofpoCzq5iwg4RYGkirKTJQu44mCFM4e3VFLqZQppayiBCrr+fGzx+svv/nVz/9UPvz17/3O3374/AeXp29ePr4sLG/ffQa21tv+EbFqGoScGf3yLZ/vuWG7fjx/9tmXP/j83Zf/o+Un5S9+9Rdf/tYPf/LVE3sULp5X9NavH0W1iLR+if5SljdIa9cXYqprDU+w9b55MpfKnDwCo5LNmtmL6J1ZJ4ZlBNhYCmX3l2QWPvXWEGDe11NN3wh0Fhkez7ZbWUprIdv1H/z+v/fv/9Zvei/WSylrWe9637LtihRG2B6tgYRBYVtEJypEkbZHWGFUEYg8f/z4cd+Wsv7dH//2dn35xS9++f67D+V0PrO01k/LHXi5bL8q5QQYcTys54WoiNe6nuuXRap3qsv3i9C+bZEOaHSrKKfyrla9bqD0dZWndnl8+8Xzh1+VmsZp29ePKgsWwpmCxK/kjAynUF1O9SRJW98r9WBTco3w8FLW5x4tTfGg9YsenUQUC0t4exaY799FdMWSmSf9vOLOyTdvoI3ICBG2wTcip4zmL5REqEFK+hD8jlE1I/oTkSsnUSnLfdvbUpil9+cXhX++3pfs28dfnt59X4qc6L63KESdsggTZaGGlGUpRjsiRVOCpK4k4ZcdRR1UhPbmyiSyRHuG+935+4yVQpe1wmestYjkyNIbcDmHhX+oNHkk4iUnJSdGAg6QyaRIJyQlWVhEkIhyRXhG2sx9SASES4SPfkzMI0mUAhE2yiFJNNgThO/MOhZnYWFK8ysRi2hkyIDRGekoKkIkxASPTGEhQoB6mBAvUpKKiDp3897bzizDH8eURKQkgQgkQZCjuZkyqKUh3c0cfbzopVCGCylLyQjQILsRORQSoqxbu1StQ5qe6ZEeQUwKRoQP0xwDwkwMz/T0IY/JsIMFAzGPahazUES40YAiPvA5C3OGcx58QgI5EsFgERldRmhidAtnJregA2pITDHFSBtRLUCyjr1DipbWmqG5c6mL9ZYOYmJCLWXzBMuIyUQinSy6MhFLuoWFyHqx5wULpyARFiIlIYQcQcxFaiQkQ5gLwbxFphYFsaf1aCkCDotGs15mzDmF98zh6e5IxAh2AJRrRhg29yRmYXX3cDeM0HIicFiIspsN6c8QxrkHwRNMadZNqR7vBZAwJaz3hcVzakRGYMkEdwICFVGPYSICIiIGPzLL2RkZCB1MBvMUcUz9AxFRJAWyR1Qe7M+kA31wLpHEfMiSRhkwmSgH3JzIaETAUE4rUB5NqWao7ngFSk5a8wBLOdxfU03D5FPvMEHq1E0MMDbSwW7IKicUGuXPvMUo5SQM5t8TAQfGCvgEgkgg50/pVSlwu56JZWfcTkTQ9O3cGIfpzYlMYY5hej2QNBLg2aSMDt3QUBUDdIDGSWrcwLynA2DmmDD84GMOpcJUOgxqeOgKcbAWr989sCRPeuXGW+Qhcvjk4wfunX895sTtAsciHdBgMjSfKiBusqyDnLpRbYnZ+CxzZMUEkG7m3dIjZ6zwJzid4JFCdDulg3i4fdVB90wmZNI648NMRy+wwY69nuPcfh1MpoN4JpIfLdvGh/hVcpOzP93xvbc/g16VMfMMbgqxV27ulRy5jTMyhSXC0vroDan0clJN4g8unemy7Vr04v5h29+9k/0X/+Lpq5/f/ez/8Vv/4D+Rz/7I9Q2ljN2hCOWkWYa5MoWFM5lGElPiVcdy44sm8TBp2UmPMMxIUlhiOkxezYP0Ot7TWgUkY3zx67Q4vmjE6+Ig2o77cGi3jomRt7t5MDSZmZS3nK00AIzsTttL+/n/91//s//8/V/9n4Ke4NEjfG/ren6+Xrfrh25xydIMC+Ks+Yt/+9/+zvPP8uGHqIseaimaV5U6M1gox5XSzGM6Tuc2nQ769sbMDPnGq07rNhX+nT8ffjWK19man4xjHOtvzqq8HfD4Wp7JSZMOZ2A4fcZ5HR9lIpIcmw0AYGZSQkfLkamXnokcVRwCzQ4SsxoD5hyGBmIRKUQ2XxgcnkERHIWJ5IDuMtok5BD/4WjJPi6ABwXEklPGLUwyxNyMsAg6CJRxrcyUSQm2jiBOwByZhKFaFeekuckA1tOScd83DrdrBiFOd2fU9dKaecdpgdRStHf78PGiIqR6urs7PTzqec3uxQ1AUY2MZSn3bx6WUpmlCNm+t3172pyR7m7mInwuIsoEfXM+JeN6tdbdAs2bZ6qouyFpWatUSVikmXVZTshgTmKaDWo9kCSk6V5LCY+R3p4e6aCAELHAsivVbe/nRVhRRaybR0pqPb1hlXqi4SulyL5vZr23fdv3gIc5QFKUOBlivY9XiIcvXMvpfDrfrSqVgfAhJ0cQp4/pIyPojAbaHLc45sMDMipK4+GckR5GJESSJDmFjZyJjBxBTjk7FVLkSNrlofr1Y9rSfG8HUQm3TPLIGB5vgtZKSJWFwx25EEd6XU5b3+EmFCcFOLy5iGzXGUxxZBUlLPqyLGCYuaeRqGUifWU5qXbvCUfAdoewm3tmgbLWIncfL0/u7XReKCWsBQKckb2eThnJVdeqrTVKZ2FRjd4ACTCDul1VQ5O2/bKsd711D08K8SCII5mImN06CVdZt9aFhcje3X35fHkJb1XU4aSViFiXU1m3vhMyvEsaEYRUNHs0ltE9jz26hS/Led82z6q1tP0SlCqapVoYmbNAAY/uuTXbqVKm16WUcjLb3n9o3/+Dx0D/4of+7S/7Nx82rRwJh1EwLEstp/XN+69/wV5B357vVqYQ1kig8Mv1O6E7SUkYSwKICHeLaKBKybFftZ6TWaSsdd2tLfXucrmwMpAeTbic64NHO9/dmwWaaVUulaFE2qxnEJEs610GRzgxr8vb6/4dA6XW1vaIzDAQmMTDAVFWUogTS1FwSSeW9f7c+kWrZryQ0rrUvWcSW5KWhbOFI7lrQVn63dv1/u1yevO4nBapUlYFU+/eeyBJJSkN4ZRQd7ZStCARq9Q3967LHuyttvcbXq4srpnZIx3RMgLRU0pJdmKQUmryqfBp5bVQYSqj82AeFUXkSHFjzkwVvO6wc7wVArdX7LHFGK5+L1bK0mz3KAz2UlZka9cwh6RKYSpEfFruC5g9a6lts1375z/47J5Rt/3lw89q6/Xhs73l/vK+lmTO5aT71aND6go8aU3vgdhV+UQZ2wdwic6/+fZv64avL9/G+fGXz9/u2xUF3nfGVuRh3zaA3bIrybKinCzipdnptIYbW8+oACAcxBlBY14lcYRzZ0JPs5RFT+kdFGHMJG4dRGbKIGhp1x2ulamFO5SsnLT8++/e/O4XP85YiIWpeqT7Tr4hPcN7a8yGbomWJXy3SK51jdjbfolI1VUEkbaovl3evHk8tWbs+5Lb42dv/tVXX3/v+w+fE3/z9N3V93q3Zojb5bw8MImnP/UNQo+nN35tqqswBzzSRFklyOtd/Vz5BGp35/vePl57T320crp/9/n15WPCHk+fn+SOLM2DiWupCReub+6+77wQqPu+IVr2RWTlte/P7p28qHDRx0XOrHntzyjcEhW8ru8u+y8L8fBsnU4P3i6E6NyM6GPf7pc7yYKwxCosrGRmVURUbbsEW3cqfB8ZRk+UO0W7W95l7FzWKJxU9fToF1/qnZvxGh01o2eklipazaPUNyQL6YO1PXWJ6OvpLZknFoBK5ecPL+vpbOlUMtvOUsC+LieYaZ4QrffL43rfjnZKeduNE1nE3LslSCQPGwMzU6J3E64z5SDsQB8kQBzRP/kqlUXVxb1jBiQkQ0RgZiziHq9ohQeZm5mCQEQyy9hjitaI9MgcPcWkhA9REEV6eBNRYnI3Vb3BUPeeiEyzMAKpyHgOKMvQtseMN4nMLiQCCeQIghn4ioUjnJJUStKA2JRMZrtqdXPKVF2st8ioMoInU6QwF/N+2GBZhD18xHx6OubXQrWAtKd1s1HsEIgwjxyoQI5sJGUZ2hMgOUemgR8Kgmkt8vRIKGkQUXKzPYgyhYiVKZnc0zIycikr0ggc4ZFRyxJuvXcRJpmMQTMbZhwmBPLat0CO2BkmZHhC5o6W2Kx5dgRVXcbeVoaFGpCMcE8GgdwzEAyAHRBillHvJKQHEwkJCAEYx8xUIqIkJhLOHjby0ZOSmWiYKohHaoIP8VGEsGTQeMSPEHF4llLdIsOTKMJZJCKbm4K4FCK+UUXdQoeXAFARC4Ap3QdLpcwWwTLtg5OWYRrU1agW0vTgEECDTWJMi1YMfJyBEV/FU4JkHsQ8NN9D2hKRn4CrydzIsCXMBmQzyeiA8wMBUeSkdoalg4ebZy5n0BEZPFfmUHtgEiUTcBEwIAjNH4/TFkIik3gKcW6V/YlKD89LHkiZXg0lOGKMpgBm4lZ88iuf8hSDL4tXAcoQQA0T2rxQmuzRDf4NQJ6ZAIOHtIemqisPXvWQ6x8E2S0MJw+m5ID6GGqpvCkaZuRwjmZtyJs9bSLemwZzws+plLhdJB2+tMxXrmf852QN8gCs8zOYaTg5WaepaLgdZ3zsOMVxGRjWSPfhbJqOp0mAIZnH0YhZRs+f2bn8OMtjvs3W9fkq4wDmxAbdQPnt0o83yPzJOCWagzsIyJvBjY7jx6HuGJc/guqmVuhQZ7z+e9us0Y2NmznilDGn6hGOc9sKErttv3r++N/R+lz8zD08AEbzRF97ZN/pVPbHh1+Ur777m//bT9/80X8sf+s/tvJFkQJwpFOkMjw6EZGMOANiHg+AW1jVvHZKPthUUMbsaEWwNEYopQdYCAgQT8Z1qKgwXDNjtPP2NsaNYTwm5Y1MwjF0EyTi1umLcPsUHdowoqBkYPT8tqQAOyL7nt/+zXf/6p/89Z/94+++/lcrWg+xLZ/2ziz75aXHTom247TKsjB3g1t/+dV3f/l//eL7vx/5hY8K4WQajwrGMUs+FTm93j8cGqLJjI75N61reRDEfBMSTerrkzl6e/LhJn/7hBziG4N6kGl0m3r5yeNvyHFyzim6TcUb8clMzMIjOZjASCdElYojB9vcgeAhpoIEAplEkgGzaCxbh4V4MphKofSROJ+ZME8iL1RJhJLHdsZH370E6CDEmYUoMFzvDAhGP4bB5NPU1mRKRsRwZDMxzXKSMnkQ9QAcObLvfWryIliwkIqKKLVzTQ++3tVI2beV+CHy2kOXk6UQIeGnt5ePHy8gvHm4f3y4r+vCTFXZIyiBiPVUVMvIcfa+k2RnCqSZRwSNJw6y9RgiHQBr5YdVQWyh5pnM1yu/ePTeGNi2VrXGAgIikiLOaxnrzhMWTklFVTifn66F5do2gbbNaymZsfV2f3cSEhnUXebe9nefvctArYunJZKERbmICGBL7dYpo7fm4dFsCGrH9bdm121v1phQT3d35/vHta5KhV0wQpQNwGh5JsPxy5MtnUwtk4xsJ3BO32WOR9ht8fLQ/WUwJAIE8bTM0SdPDJnpQ+M8tsvEmgmkIzPSmcQjQezhQ8aUFklgHoLlZCEkdwpLSilIabsrs0/dfUbvN2Z2UkWilG3oKUYTG/fck6qqMgtYGcyszpu7EQcYZluDULStX5OUSVozpkxP0rIsZ7OreUPE3qVKPdLZiLku6xLh0VsERW6l1qLn5ldSYl7cvOgS1kiguqSLKiOAcPeurAym9EQnsXrSZVF3Jlrd3MyBBndiurs7296YFShuGyBIdnci2rt3Dz1pWG9+Jbo26yzc+jUyWESLEnFkY0IyDTKvt06BTjunPz+1y/vz4+cP7y8/f/dm+fqXfO1pbuYuhURRF7HuvaWi1pM8vH2If/mkiKfLh3q3sJTM9HRd6r6rxUA+dV0Wa7GU8vDm856+tz29swqHt/YM2gio9VSWO9sd+27tqa4nFi13b733DIJSa82ad7+WtTjZsjzs15eItvWPzAmHt1AqvFa3CA+GriqJKGyZpqzRryRFVHs8O7FUYuXI4oRgTerdNtWS/sxlqwvd3evjl28++96709u1LAVaICVHDYlI7+pDLRnJ4URJMQRcENFI0nrG/Z2e7pPKahkW2+MLtu7PH/vzS+z7/tRdrJaFTK11eOpM+1epgkooJIuK8MwSy+lfHll7EUGHUYUmLKTMSI8DkR49h4NVChFas9F5yLxTLE1TFVv2FC4iLLqQijFTYVb0DUT3dydFF9XH5bTIcv34bXTjmuibKmnRvr+g9crqu+dyF0aX5+9qXYXC9l2XGntLWgru6rt3n3/2D37y05++o/10+rrt/uzfQYnB0beUffP+eflc5ezO5zPT9mx2RXYVLlSJV1AtUnusYU1ICq0gJC+BGF6VBLw3TmJelrKa+bKumZYGQyKztb6WlbSoFGrJzm/W09/93b+zrg9mfS1LgrNfkx37U1Vpo4lAkey7WXPbMQBwf7F2TUepZ9ISHvvluSyPUtf9+fLuJJvyz/rllPXv/+Hff/ew/OynP7m+7I09wsHRbaPT43p+25sEo4pwryiL1JMT79FtM1XtZoXu6unzVfnj9r4ubwHvbbs7PZ55kbZTyP3p/iQcfLboaV4WpfDWnrMbK6uKCltva6nG6q0bgeWkWAARgpCUKoHNonlzIc6I7qai6+n83J8I5PEBLPvWoLvoqUDJEFQpljWBeCIKJzK33S611EwPXJ+j17JgWTwRXRSnKsWjeJplEXlzfvMDoDia92/B0Q3MAy567z0SlYtFqGpcPDL1XNr1iny3yKPgOTLceo+u5bQW7eZckrSu9fTx+h6SX7x9G9v7IycENHPfp1mFRYCMDEowzzLwMDmMOGHk0F8LAcwYHco5KdMom7AmlQxKShvvY2SOl71EZMqICxJxc7OegI7OpkgijGBpEjHrwmIRkSGqiNGQm2/9uRnCwuPMWRQIJnbA0gkYTDPRCB5IJMIbSJl1BLHE1EyNUmu622hLLzzqolM0MZgCwowhUlKGDLNnIRXRrW0izMw+hBGUnqlEmRFhzKI8/tbcvWjJjEjPcCckQngKrcI7UCNdpjkvDyaAIn34ioQlvSeSWYbCAgDzqKYlHJkuXJl89GiNcBIeCjEh7h6IFCFCKJLSEzE6SK71lEkeLkLEQhlCYn0TIkBZtEcDS9GKTBXZrXUPZmERSRkJ3Ele6xLucfTvZRnAKoUlyFu7kqwgYqIkTvciam5uPaNVPWHYnpiFZG9XoWEvhIhygrkyc8Nl9j8FiS4YbKAqEpE50mqImEk9w7rlEPBHEohJRnqWkEY6K3m8pv4qT0lPMwdTxGhhxkcgB7mHszOLJ8IDBCEwiEensJmvNMvTdIAhQjKxx+EFIjIAETyR35C7BNNAl5NQIWQMH9nh2JpIbBQbpyIFzJwRkhSzEA0aW1FQIpl4vPwIJISBRZjYJ22CaYI7aIWIm/z8KMzTKzFwYNcJ5+NgMgb+mezJsLbRJGFGOZRnNPhU1NORNXvjq4gm5IpXgc+IUAKOlM0J0nJaVD7RV1CmHz3vJ5MSs8/pMI7dAlwO1c7UWA2GbnSoxkG14YY/QTPz6NgzTGplgmEcaTt0QPoDGx9XdJBzOebmTcIzkPFNCzGA6oTQfLu6SRSO8YoD5uYr/qZDwjJ62k2OJyOGLGzeqKlFQc6+joPxylsyUuIV2B8E3S10aeyqxloGJSWnJGJwkZNdHkarjBlujRxNRvjIOEfycS2DTGFCWg5+h6c7bd7IgwuhT85hntinUU4H13DcyvlrwxYVzsJpltvl21/+ZGv7tYfq+Rz0cr2G97Oc17J2bx96b4inDW/Nv/l/fvP4i3/z/T/4j/TL/wDlHbF4uCcA4XkPYjwYaWTEIQ966MaN5KhNch6vKApZkDYkbDL4RmIwkqfQDp9cIm7N5Mck4dtFT8LzCLubB5qr8nVWUt7W4LjPyATFQKwABaVnwFPa8/7Vn/30n/7nf/Ov/s/Rv1Ohyx7PuyMSWFt391xqZeAH7+5edn+57j39fFZ3e/7q//PZ819neQOuWYYCagjwiWfY/MEo5lQoTxw8L3RqJOdJTnZwXPXQIs3CzPHIu83LIRJ8FRkd5sgxqjT1b5PGzeMgt2fZfHDd5F+jSDWXwGRQjlOcjPVouEERHukeYEgRyfkwylFfmY6gOdcZYItsgebwIAS4DFs/ApSDNpn78mCYsI4TEtbjcpKE4U7EAYCVtDAR0omr+6HOTHASQdxNhUaL08OfTsys4j0wFMXZeuYIyM0gLxKUpMK7mZyqLjVJTu+qEVtKT0oS51KEPIng1+v1+enpfP543a+r/P+5+pdYW7Y1PRD6X2NExJxzrbUfZ59z7r15H5npTLtsl21KVVQJJOQSlIB6dEtIJTp0ER2qQQNVo0AIiQZIQINWSVBConh0EGWgURbILmynXekXZaed6cz7OPee536sNeeMiDHG/6AxIubal3N1pa2954wZMWLEiPF///fAESG5pmFMSTwgJXSL/usIYK2qttpKaat56cEbgsyMGOgW5n14DZEgBSMgxJgZAGTkMR9KSY7eWiEEFsLEERZI4MbgVR2IVc3CCEMG0Q+F4gBhHqEWh8NQ6kpAhNR0ddDmxC6ufi3lnsfWGkB0/kELFU6BEMgkyJvcL5RbBDARuhGiJCWRpEOSfDw9vDidjnlMbAiK6Ag9WQ83dSrJZjtI20s+0BEgogWk3i/BCMBubwQ7Hc1p0/N2hzT2iACyMA1CYA2jTUS+vbI6b6mDToQSDuY1nkHjYGKJvY2Eoaaw7ZzIAA2AWRw8FKu2QNjSNgDgBhW12qxZmHFOCMSSEIActLYmhqCmPuSpqbKkpgYBx3FwVO8mTcBM0NpqHQUNWOoVqbm3QYbEoqVIzjlPqiu5qTdiILRhGKoCYSzrI4CpFwDMKFidQYSltopm/dlwxLCa81DWNRGVuXlATlzXIpRlSH3DTwgOyExmTbUhqbsKD6pAaLrOacyhETTWlTAGRFSvvSLStsI+cuEYrQFzTpmFAQGMvUZZ64tPDoTp7dfr6zevX79uoeXVi/imVpRhXeaUEgI42+Plg4GXWmTKi13HbGU+H0+5tjpKyvLiMp+1NqhqWng8qtmQBhFybTwOAcbjQNbxbiPwlJK5hzszSGLyoGFKMrxblyEhRpR5VXRJOYkIj0AI7mWdTdeUBm3rcToMw1iLIlA4JpkcnFAp3N3Aa04inMKAUAB8ykemwWFWfexJw3O5qlseEvolZ7//BI4v84s3d2kYhkPiKSFzQLjrOA15GAFRVTHUrLpWRO8Jf0a0Ls7DwMMEFFpXASVt9Xpt87XVqrpULDiCoQXAStVnzxX96iwZCHlMkJkOiceMwsDkfddCBIHePcyRAWMXafcwzx4Js3W4YGdKAEBKSVJyVYQYEwMEkgM2rfPs1GAZOAvwy+GEiupOCEk4j3dCgG5Jy+O3P/v8N/7s4fDq1Y9/8O5P/mB9+0uh0QyCxGor6zJMB5FcLmtY2FKWZZV5kpybreCLFk8Pn4RmOdz94Hs/mp7WaPk6XX/22IJimLIuJclRkAbIgqmYc+LEkJATYUQa0wMiBCV3Rw+SHL4GAjNbrOJpxBzEDua+1FrG6V69kYyuhhgpD0WjuZ8OL7yt4cQyhFty+P7dq9NwtNbCvJFZrYQWrUVdVbFdZwexxm2+BFhWBm8VANFrLSmPoEj5LiBSGoPZW3v54vD47suny9tPX33y47vPlsjL9f2k+Mndw7f6OF+rMw/5fuLpKHhu56I1Gh6H+3UOiDArGDGNdxERBN5S+NzUweYk4ilNnJhQWrsb7tg5pcToK1KLiARKngmhRpKxAY08ZhYYYLWiEUWXxs2Cp/QgwKVVZPR27QFDwknkUMtjiyVCmy7IqYYbNC1XSuMog7qFzcUWwdPErzLmuZ2LXRXRPRwtxSFMgQNzLtgDwogwtdYGFPRS7QrpJJK54Si4tMqExbTHqARHkCEGkzTzlBARkYIAiSSnY/aj8DhkOx1ealszTROfCha1mZHDqjc1d3doOrSVbMdLAdDcIoKBkMDDbvz2XqIwEpKAaWAQsasBUwC6q22cCjQ3d+NbqYDh7t12J3fLZ4Qe3t3jPVQNuwbEIwCY2VoVyQhgbmGNEMwciJw2tQh2BnZglsFciUiIm9b9aUZ3dWtEJJIUFAEDQk25OwgiRoS6YnQ5kgvmbopiG1UeHSLchTsKRtD39b4xFfo6o1YhnCmZmvZ2GVIAeISFInfPSwbvLpRhbuqKAETSrCEAC/WFWihZqJsyIRF7KCGorsypU1rcu4NNj0sPiCCRTRGDBOFNK4sAcSABQWff9L2uhYY7ITNz31hQ9L2LI0ILN6tmjiwQ3v2/PTTFxBAeDhBIEtirbiJwDiagZiWAsMdRdXdNcDNFJOJUzSHczD2ii/SRyL2pKRASp41WgdDawjLBhkQiAffBQoww1zBi8Z5H4SCCSUTdrGkHShInQmpmQoRBEW7o5ooATALmzStQJ5sgIhMCkTgoeJAjdONv1Q0P2qoJZGY3QwTrLWCAbnmFCOzeieOGIAC6+cCgd2uhmxEUBLhvecdbaUsetpXAW3mPAU5I0bnFAIK4tWdxww1ob7R3nNLNb4Uk4OY10z9ykxn2lkm//b048xt/41ZxRQB6//eNmODRSxzYYZDYgJqtQOupdh/TfPbPdijq9mZF3ChlYLs92a0SZsSOnnRAA2+H6N38HSXarqo3XPcqvF8kbSyb7SRpQ5eeXXJwU+rBDabDnecCexW5wXfxDE51ffqt4N4IQRsI9WtTo48gIm0pZltreCe6PK+ZAbcr3NVGO0K0ASCx+zv1Y+54UP9V3GGrDba5HR12HA1vLIwOnrm7bUluHq6qYT1TGTaOJNywlb776URFINp4cDtLC25gEd4mzH4Pd4yqf97369kueE9v2qhVGwIVN3Pr3f8pbofpo78zm/rnPz4J3M/lBj7sWNx2rxEifFcGdjkcIHaOarg5h+UECJT40AoCODIR8hrFmzswSp4XXGf0st7dfXH9J/+Xr777my9+9K+dfvJvxf3v+Dgwd0TBCAN7Ok9HAHvcwXaxdDvfbdDcA8gJiLmCA7/LdG8q4MHQ4UVCEu8j1VMKYDf32UHbfaxjgzW2yc4fIZL7aMVHk28To2xzH9CRIsDNUYnCAazi5f3jH/yVn/6D/+j85T9t0eoCTYl268FBxkCbhru1zcjcMeUsyYgVtRi//+off/r1742Hn+D0Gly3HfamgenD8JEdOvbVYRuX/X7/GjD50dzrq8czhLQPaCeL3R7lAKTnabA/Is8DuD0v8Qw13laNDU7qA+VbS6aD3Tf1J4CZbzVFGLh3iNkD3Fvrt2SLpMBNUgjQrBIKYnTjVGhRiJe1HnIOzCzYzVR7wqSpEvXAvegJUXjDAbs1NRALWbc+pC3tYVdABrhjgJkRUE8cBQSABABMKNTfAwiYCIDIIRyDSL2oOzi4ESVBQGRMqOZJkoLgeOI8BQ9OjIAkMmRWC6BAwOW6fPjwdHm8rstTvbwL17DiaIQEzrxtmdC0hSm4aS1aK3az780rIBiBWJqZe5gaomMwJiLAVlqSNOSUEQ5jUlM1JxIErGbhAEiEnEgaV8BgZgCw8NMwYWIFcMYkYuoBDhTCXNfGScaU3ZsgubmWallwzGHmZqZtHFJmIQqhHmhujGzRUAQ83M3dsmQIyMJZZBimh9PhxSiTOIUjQfTUNwxCoRufKAKIYufD9lcIEDk85ysioEdvogAgR3RpO3USFrh7WPdJMGDsOvMdJvaNH+fP3Yz+tu8dMQQhMetZHIZIW9w5bgRbhFD3Yl4BLaKaz2sRkSx4C8TcoKJqlkTUIZTdnEhyPtRWBSHUXBjZ11I1uHm4qQMMSawWNQTIa6tjYkBgyQxpXa/IAOYkvLQrC+XEGlUjiIABrVUU4cwWrafLBikAOCgaDiThwXlKacS2kEi4egQzo3AehsxZWzTV8XQH1rofZi0XIidKxEIRTd3UMMCBvCnTndm6QhFh9UaItZ7DGgOlMaspEvStlOSBAl2dMRMOZhVEjZxReiDdMFAFm1C+/cXbT17enz4bJNW7I771Fsin05EYwNr9Ybw8ltNpypjN6MWLcZxwVUAUrTOzSp7UW05JqFEOTBFItV6FEwCU9YwJhnz3dHk7jMM4jc28RABKaxraAEwIecBxkjFoEJPUTlN+t5yZySwxDqVWZBmye0qRclQIkrmWIR+y5KVdkYKzgxaGSGkIZ0aErVViWq+DZAIFQvUgntSVQccDkbQxx/GOXnw+HO4P0/0kw5RTRiJzz3lAZAGEugKaLYu3ZqVYrSwc6G6+gAWQn+n84ayREicJ9nCPBoDhEW6SiYTkKCFUSyHW9rQ4GyJKzjgmPAiNAkIGIP1N1Dnpey+r9+VujcvN4nHbi+xhOTuriFnCQbVyQEISJq1F8lBtobg7DEOr1wROAQxETCEcyQPCauXgnIY3r34L1rSG4xny/edeVo4a3lpRxgTR2rKGqGOWNLIdn95+cTiZjK+MUqB6m+vlO65DU6V0PBD/6LMfD6esv+Kv5i9rmGsID5nlNJ0syAUdfBwOoa51NbNhOKkW6nA9KgpYWw2rYJY4EI4THNTAyRQLpcGDrDSQDJt2D9ENEIRzs9bNINFThvE+37fqYDblFFb1es7CAGrr1RCW8zmNx6KtlUWIyNdo1cJTTtYaADOPXgtaYXRvNTuuy6LlnGR49dkPcTG5XO6PY5lToby8C4B48fqzWi05z/NVXJdy9uGw+NWHrHhRUyLJB1qf+l69Ol4MCdmX9lRjGceTlsWcVsvokvG4zI8tqWHzqKDhEE7OwQKWQckquGJzMBxkVFMRDNRAZfQsU+BS3UlSBqRwSgxAjvkyP2bMAVF0mY5jW1cDcQCNQFNJaF6WOiu6cBYAl2webkWt5JTHTOH24fzFICfhlHICrsxZGgCUhIVI3I0QhJPBGoTmvJrWpplTC3e7RpxqXVq7gAAoTzFmxwz49PZ8N7w4z98dT68nvtfl62M6EeXrMqu2fLwLpDye6vUxosG2s3UHZOK9Fw2IhOSEZNYjvcPMAKDXqkQMgEDk3nGMDRoiJER2876vZcoBm2zFNv4Ld0MS8M5XggAURDdzd6bkgB4WBASbV40DcDA6gJtvnewIdwhUd+GEJILYtPXNMxMDoDtEBKcEAUTAXaxEEADNjbbShsIVOrKwdckRzKBHXCEioGmTlJCk9xebrsKCxODElHu1TcweisBEQMHmSN3LAGwzbvGAwCBwt8TSq003C4dAdG9EyCIQ0EyZJbRBU0jccXCITirHiDBTkoS9tdSROOYgwWgYgSCAjt22hHerGogwDaQhTeDurq1VJOysGUJGoBbN3JkkSw613twOCkLenKUQFcC9KRIiMwiBNy+EblqZgHlTyKv1ioAi3AwAAogChBE8oGsTm6tgSmnqtxsBmDMgqakDMjIzEXLXrzFFNautRnggQQADBwCYW29LOnSyqIiQiFpzBEdAJJZBrTBwuFWrmVMvvwiDEShlq5UY9qfAmgcJikjVwsxqDgEWmxgQEZlAvZtHQ0dJbpXxLXPLPAA2M2xC8Nh8NTrtBbsQ2kOICaLbudsGkezgEnYWzlZ8xsZX2tKVoM+oDeAAj+gozK0a6287D+gt/o/qJ7gVsxvysP9sVzd5J+xs9XDQVsHeSlOEvbvf2Q63/jyC/zq8sXvYb3jKzZ71+TSe/YE+qva69CbMwu3ZyAY/Qg0QN4fvLXT+xqqgvVLeKDm2gQgbaQgAtriibQbQji3szKibCqAfqnNeEDtihrfjw/ZYwRZc2OvjjqVBRPQExV79esQtJhn2X9rL122+wE756bwUCLCe/HgbXdj2LxvnATE6kttPJ/qt2BRzbubmPUhxG/ANsNtmAW4MoU1J51t03a13tt8g3JhrEYDkt+IBNpAOdr5pP7F4/mLcLg6ez3mDPp5BwO7Qjz1Dk2gbx+352GYS3LDV55+GDVXeLm372G1u7TSWrgvwOp+Xp/dhDiAWVq2RkCq4NUSgZtag1Dgcj3NpZZ5fvhq9/QLW/+fjL/7xi9/8r97/8C/Iy9/2dOjhW8wC4cRbobZJr7bno08gIiB3YxT3AAoFPRzof/0/+nf+3f/O/yIe/vmVxHzNLN4w3AkCuK8H/c6gWXfUptsoIMBGtQrYAbU+yemjUenX7AC0odcbUBp9UJXAOdyRWrSvfvbN7/9H3/3Rf3y+/qou1wvJrJBMpmkY00NOVMusXAorCQ15EoKHdLq8f5pVn56eBLmd33/7T//ab/3oX6/1DjIKIHST7xtkRT1JZgdtNwx7g29u5Mjn+XRbdCA2TDUi3LZ7368Cd6c23Fe0/fubQC2eB4hucGv/xPbvsaNSN+h4d0u79Zr+/+DbvvYQ+QZBRgCYbzAMQN/XdOtrQ+iguUfvBXgs1a8Sh5UOiZmYCLJwhKNuYlV37wEXYYaI4I7EgEhEW2eb0kYcxo0yhEAA2kl8QQKBgkCEPWwEAImAwIlRiAODgQQxQVrBFkANjGCEoEAgBgAORwYNS4yClVDSNOEwMTMCMhOSmBsTPRwOL1/cu4eW+Ztf/vLpw3fzcsFATFHmJkRDloHRQFuUqsW0gBq4cXAXrbs36CzXDptgJ0RjrZqYOrBibsLEjImZeVCHUdJc2oxFA5ZWudtbbi8PUlOgUKyIOEkGQvOS06GpMYp14ReBA2iznMXd1EERwh0xVGMcMZAMHKi7HSkwIooIIrK3iiaIlBLWurLINKZjlilRImBERg70rrtnZOxCFwTCBIAAhODdtq2/GLDzE4FjDyPATTaJHY8AAAgJxAiwCPVup90V5QAAvmmpiZBh66d0wi2GWwQFIAIxYiJS69kyfYkgBDA3M6u11NpKs7UqArTqAGyOHuR7D3kXoKVUiook4AHCvHlzCxci1qhLa2aNiaqGCLEwBK6trbX2jDdmEpHVVrMzGHiYUI4IDxzG+8SDgE0iqpryyDgk4kZmCHVtppAzRxrVK8vYVtPg+9OL2haCJhRCaIaAFITDMGkrYOAggaQGYzogFPOQNFib1Qq2UFXzqM0ASx6yN1/W78D9cDwEWqsLC1NKFtW8yzgu7iE5ubug9AyxcRAIIwhEMy2JTkji2QghVEFERL742bd/4Qc/+PD48x/8xudffnGui6dBws2br8sSHqWW6eHuwy+/Ge6AJ06WF7dhEoC26OOQJ0FxyZx5ViMkD0DhNAq6O9iyXg+HAyGYN2GZ1+bOh2HSej1MGaDVqLPBkLLVEuSffPKivj8vlyePI8oRTcBXEUbJlGTVqqpMCYjNWyJiQsIcoBjKnNEAw1FQoQUj8aBICBqAkKaiDcJEbBw0HfyTTx/GU57uR5JEOISzNscELIklQ4Rq07Ks80XriuDgSkStQgCaejCYB0pO5BRB4khgpiLcmhLR8e4OBdyhRDT3QTBwXucWEiiAE2kyToADozCJ7C6eoe6ERMS3jUmEY0AXiPZ3Vni4mm/87NvGxjkxKQfg1ZSDT8OLMd1hZQdqTQMzsTRzwAQMFy2AMEA6paQIxyT392+OcRxO91dtOZ8IAEuTxAFDm69MSCxlnhVXmIAUchovl29blPs3v2kW+eFkXtr1O/Mq4/zq5fc+PD7+8P7FUn9Uvipvy3m4YwcDh2I15TtwA1BCCUsUaOGXqsN4xzgyLhDoXgkTSapaDnlAGzxg4IkoLe3MZLNe83AqzcZhJARvC5vV0IWFACxgSgMCTXxgw/W6ipmqujdvVR2uj5ew2m1hV/0wX2cAGIaxXhZwzdOwXJ46rgduNn/AaBCNhjGA6nzh8fD6xQ/WZRmjHlI5L1VKe43j4eH71mq4vKvFCKu3RugOtZXqjceDe1Ntw3hf1zXl9Hh9SkmbXhVxyMc2z4ji4QZqwKvhIR8BEBJ7NKIQlnU9Bw+SH8KBoLTwaJoxBQ8OkdgSg+ri2KoHo1Rf1C+Bw+H4Ca0eTtYwyVisIqZAXutZmLU6BtVqqjCMU0ANMB5FLbzSJA+UYikLgo95aMoCCZfZAwfMrkv1WVLuLmOCbOWylArprtVwQgzz8Llcx3RydUMTBnTMwqAVbB0Zz8vijuNw78XO86MhMUA+3jn6vD4SsZrPlyeWaK2NA0uSdv0mceRhuO2LOjWvM8M9vANVsRkQECCYKyGJ5Nqqu2MghRAIE5rv3JoN3yEPZ2ZAADciRiTssIGbmxFvrhnMvLcAqZPYWyuBkdJAiKENHJioS7nMLTEJUwR49CY6rLUwEwKGh2pJwwCA7m7eEHsie6+TokdDCWchYohwIyRH900HZNCVd8T7JRiACxN0kCqgWeu7eQDEnlYbrtESsYMAomob8yThTRUCWmsp50AAx5xGs4aCBNRsFUYIDMJqNckAHuGeZWDmUgu4I6Kbds07oZi1sA5zmysSYZCvdSVh7OnyJKYKpgKgqo5OKCLJXSEAARkgWokueu+AusMg2Ryse4p716cQCZkbcwpAtdbj7DtpCLqnowxqauG9syWSmy4pjUjMTE0rACRGxI48kjsAipBUr4jorps4hYgBEcAQ3CpBoDsAasTAQgDUg9209SpazQ1jSCORm6t14gti4gTQuUUBYYygZhBBgOiWmRm4WYvu48aY8+Cm6mpmYXELQOv1uQc0tV7WMqH3GGIHIDQI8GCGLHyt1q1kus0QMzfVXTWIAMDEaoq0WZN73+7HJnvc9o5E4boXt4ARtEUn+4YQbfAmWgQRBoL26wKw7nCKABG0daQ7SrRjNLC5U29imS1DbTMm2uRUuGWr9c0mdnRjq+a3cqk7RlP32Pad6LJVWLcS65mkEzuUgx0i2t7IWz3Y/7RXjB9VZ9sHPyIOdMPm8AhHYNzxKXimG2yGNbd68eNTeiYS+q3ExB282srAfmk7WIa+WYf6DtxsaxpstkG+by02h/je4PXdkRf3UwKAXzNs7kjMRx5P/RbsGtftX31D4TZUZiM33chJG053g5Y25lfve/XdDgB498xv6jfHoh19hL0G3+Ea6vcncA/Lu41k3OgefZx6Os8NGtmwu2eiz278HHCD27bbewNzsJfa7jcO2a0pAR3qoH06bGFuu8Rym1/7wG9jtv98bDyUvdbqV42mERbBMUx8DliXOckhDwdFMONmjDxYaVYqJLi2hYPvxod373Ukg+XL6e6bt5c/hA9/afz+fyV9/i/Ii99wHjQCgtgdIQghXIkZbnyx6FPNmbC7InlYNBvn5b/3X/rLf/Af/s9+61//H+j3/zxMDOBIIEjdVaQrsnc3LLgRxm6Dc5vt8Mw426f6PkaInXeEt6Kz05QcwILUxQJwWec//Jvf/MP/3dc//0+TLVVtUZ7NJaXjYUKeFIYKTaVlPk7jNI6TNQSvzWM8Htp1PR5fWp3n5dKuv1q+/Sf58weSk0MgBTIBfPTjOxwK+wu+Y+0Rt/v30QXdJgTtk29/dADjWWb2jAvvf+5/H33R2+WoHW/cBjBgs8vybQxxp2JCX6NuMzEA6AYUOTgFqjn3TFjwrs3DHjoBAO7EBAG+MfmMKHcTOQtPTOih0arCdY4pkTAMCZkAkJ6zJ7tp4H5AxMBtoGAPKWCI3vVA2F4aQNi15wHR+UdMtL1ONxZSNIQgjK7k7+6A6BLezLHzdgkdwADSrcsQ4dQUsbBV9pSHFMj9PS3ITMhMp8iAGHh888nDd9989fM/+enj0+OyrEIMBK3VAHNrtbZ5WVpzQhKUYI4ICGeU/ix7WA/fJmIHBAjzLbSEmZkZ3IUkpTwiDWLTmB6O09WqlpLGjGZEVFtLxAgRai+m+1bbOGZEB+Gma6cFuLu55JSbttVayuTWVMtSYkARoiFnACYWAChlCTUGJCSWVDtRuu8W3ZBgGCdJeRzHnBJB4OZA3yX/QD2MjgFAqQctRRc5wn5nySIoelxwf3NEQBBtDYAA7ovlxiDvpn399vcXQxhs3Dnc6EoRW1ho9yDYFd4E5GAYIECEoBEa1tsd2r0b1dq6Wi1otuGxBkgIJNa2FvJNgFZdVcEJJAKcwKJCYKIpENemKVEAYRiAE6SixajSmFUdI2qtzRegxpIIRdKBuftboxBLWIp6N47MEuDhi2rNIi0gU26G5hYeFMxMyorhazuzAHGO1hwMiVM+zPVqWvu7dDpOpcwI2pqHNYBkBhZUawsv7oGJggMBixd1CyWOrK6IWswYQGSAgGZrrYiolMQgAL20VQgJQWGRNCCkwBAau6mVugpGvcww2mkc3r99V9Yf3b94NQD98PsP//Rns0O0WnW1w4TDgUWwXS8vTmPOAgQvHo7S7LvHp2mg8+N8OhwQzcqa4u7l8eVyfZQ8ILZE5mFZRHUlRiEkW0XuIrh5gHfUdgAYzauCEKJWcfI//OqLH352/Ozz9KuvzgiWKSc+fni6REAeIfGIlAiAo0Iog7V1GdIBMDgLhKEHIyEKEQSXoGKRQj0xIlahyANOxzi9hOPDeDgwZWbknMYkB/cQljEP5quWs5ZSSynXWXWRBK22cCMWQgnAwCRpJCHJEwNhMCOaNUII9DyOaRpZMiBwHpJpqytUm1tlcENvUCSPkFByYhEk2poQt+jcXv30hKW+daDbtrKv6IBEYF1nsffcIEpdm1ZHh5Q1MFMkrIkmJI56dhmMxDh5WzdiQsiqqzVKkux6kcvTizR8lv/U4fUPl7df2nVlbP1VUauBe10WM12bquqU0uHVPTyVt1/9XOQhj3cVgPKx1XOdv56GUbwdJWubfufN9xZv7e3P1RYL1aqcuwF5BCSRMQwTMVULKsE5AD2IKGG40JEgcbqapAbVegUVWRJKHAFFQxMzOQhTDUskhAmQHZ1SXpoNSON0IKGyrNZMILf1ymRzWeanWYjDAaiprst1HrIY6XqtHKDNERpnyK2YGwRJFo8mnFRrYlyN6nyh6zu1NQvSZXmRj69Or841MvLTOmN9rODTq09+8fjl/ZFV66LnqJVQD/lwysfqubbLw/HOIhBtSAOQk5A4DZxhlLnWu9MnKb9crr8ssKjP05gAgjgnGjxi9cjDQ/MIWhKJ0HAYpazfRTiRqGck4gS1XFu9DINEA8ExgAIrcQJUrSjEQ0C0UD1N48nKkpEwygznh+lN6EWI0+EURInEEJvPiSXRBJ6mdKzF83Ao+gQ5mbxywLJU5gBoamaZTsfDtdaqS9GVmQIcCR2phhLmLAcOaa7dqGVuC2YZDgxo1eC6nDPD4TC09eoYrbQIn8YxDLVCYkBTxDQeh327TuZhrj0Ddm/obZt4oEASDIgwtba93gDDFJAimJEjtjSnFsGE4ORdAB09/BVg1/wjIAEDopk2b0xy8xX2MCbqxBcSAgQ3A3dExvBEDFsXr5dLzASmrYUTYUAIyxYxDphlcDXiVLV0IrCFMbJH6y2frXYNRGAgNOu7PO9vdQcAIJZEgeoW2yUAEmNA1RagSKNQSpIjetedcs9OBMjCxRSJVHsDEC0AAGsrSTJRQuQABYjdltKbW2zJ0sDEwqRhCOHaWHq1GoiMQV0c102p1Q0CalmTMHU5BosEmoNbBLi5JcpAGO4W5tEGGQypc9cR0MMIMHFSMAA3VwUgpEB2RNMQYgbUUCKu2gCpi9+8QeYMgFUtDyNLam0ND+kyPXfCCOjoigOJRRCREGoEWBcGzokyIUU4YnQdWQ9gJkAmcrSNZY1EPeYCRb1CAItQoLZqbhpXIUnQO2xESEDurtA9IcwduzlEBGKYYkq9TEBHEu52EACgZsIb14OY1Bxho30n5uYORNbxFDeEcPAbqwQiiAl2eQABmithwO43csM0doda6BvZDsEQdLOwXo9sPAEitA0+QHBwANu8hTesb0tYg90o+kYt2pqumx+nb48x4G7cE3v7/yN8BbsucYcScKtbe4IddI5Jd/HeDhg78wV34gDuoAje6FF7qb+Vcs/cAtiJJhtfJmKzeolNioMOz2N2k8f1YelSpx05gmcABjswtsupNpBiv1B8pg9smMZmy9Txoa3bHeD9lkLs/KGIzUQWdtfkneZxYzpssq541rBsEviAHXgC2KE93EvlThe6cWluVIsI7ytO3DY32zFhc5eGm/W49yP0Flj/7obb7L8V+8jsNfrzsO/43e1ftvHunLgNW++zCiIAAQAASURBVNxuzQaQwf5RvFE0Ovx4gzD6dNlAnA3c8NgCJ/tP7bchdpOkbn/UyyTcwMwbS+UGDO2nt9HRIPbr2UCxbfTdGdkAQZe6LqEwpVSNkqSABg6QJuCUGIAhlVirUvNFNaehAL6fz4+mD8P69E//38MvvpBP/s6n//y/+eKz38bxE6SBgDtoyJhgexoCAjsg0eeygxIAF8ik/+B//z/57Ke/Z19+8ze++B//1/+9/8MHjMbeNUVRIATMFVDICdxQqBtcIfR3Iu+TfMezd7zyGX3F6B7wuHE2HZF3KZYbhINQc1revfu7/8ev/s7/yS8/Tbm8e7THcwCPchhFYFVmAxYDovvDpxi4ltV9Icfa1iSZTEUIF3VtBP709a/Wb/7R+OrPRJtAmHYi3rZn2GZ6/x/t5L3tLz96QG5g6E4+w50yFjuZsU+XTiDc5lif97b5E8HOa4vb725P+0adu8G3HUmM6PV2L2z3ge0TyeHWQlbbTMjdrTu8UQhmj7KD/+YKTMxdj0ho0Ah2da1vFnaL+egyV80c5EApB1hfSHrRhYjojszb09C1xT0OIYIgCDc6qocTCwUCE4L1eWL9VdA3bBRoCGGMhIRIARjEAMjsjpn7+ustmllfeBAF0BG72Y4DKBp5mZkZxoGYgQLAhWSYBmHCLak28HQ8nQ6HcfzDP/nZN199WVUtHMGpkJqpt1oDgCysL9YaFqGdoBzuxKjmiI7IBA6I3ElbCBZOjkxIzO4ODMwxMcohD43rIKfjNDxxrdFqQ4C1Wl9c1axTGRkFwdU0CbtVA6zePIKFA0GtuUdtKiKdvIXu4YYA1FsCEOHN0SlcayOIRJyIVZWIhzGNg+REgn2PhQj9lQREHdAjpGFb+PoGAYJJ+s5uZ0H2xuW2wgUIAnWtabhud25LGe1Utv5Csv1N653CBv1JD+fNLJHUWxD2cBFCxB5cEAEBBBwB5oFI6qbuTRuEy/5KZmGDaKq3dssGFSGFDOLuasWajePEyAG1rO/UG0EAZOLMgBTW2sVaNQKmwa11Tlc4EGWGZFoAHA0BgYnYrse74eV9Gqe4e+C7u+O62rzyZY55jsscKKjGYeGmbJHRBSERIyVCyzkzUlU1WwUbhZtjlrTO73IaSZKHRaDWUte5K3On4VBqUVujtVVnzOKAQxqsLc0gsPdIocwrSbJwQBEZTBUIchpJHMCZCUJLbYMwRAFMYWStpXFgCWAIsUh49/LFT3/2y7/0F+/qZT7dC0PRgkmIp+weBDFKxmZtnu8+ncaDPH39vrUQcy/17nhMw4EJX3FuZYlyzhxC4BHeKoSDYEpg3gIFmAxbGgRaLYu5p8fLTIDO5FbaUhPkYZrA7N27y/Dp6ZP7YS16NXeMu+MRAA1WEqFm63UejseUhtaWKU8R5OaYMvFILOBrwOxRiNztKeggwhHrMHrKcLqT8Yh3r4bD/ZEoIeWUhmGa3JETs4iHqdflel7OF626Xmcks01VkRKlNAwsYx5PwTJOk6SMAQioWgSCmMyBeOA0IHEX7YqHe23LaksppRpiGlA58pBRCJBiA/O3dycj9w0LIW6JpFtIT3/dmLvfdh83wjIAeKcbMJq7WwXgqmsGIqMMxCwt/KxabUVdIDDl5GqtlNUxD0er9jR/4CxjGl/mYcrQGpdLDQpKxGnwVet1oZQY4Pz2vR1G8GEYHj7//unDd7863H+SH14DJB7uUd8+vv12nZfp8GJ6eDky/ubD9x6X+vb6zWyLM4dIsxIeidjakogEEmIQDG5AYM3teHhpLbHn8CDUhtAIgnBti2Ad6ZApg3q4Yri2IjhlOmY+lFYZBMSv1e5OL1C5rE2Pq4aE6ofvnqLO0wTLslzP8zjcnz9cjieq9QJhgbX4YtUDcgRG2JTSev4AeeA8eQMPa+taS7EQHiDWa1zfjZPUdf3k/s1FD9cWn90f6zx/d373+u5QWv2mLJN7Q0Boq88Sk1udhjfkMuXEYCE8rysAVyTVFch6LEQAHqaDIJhfCnyoaOPxiAFRg4GFcGmzQS2+IgqALqaM4SquWuoyHk9qWU3BalmryGgA1VqEogeLW4QZhQnxdJyGdV4cB5JJzMyaI3F66TAhwuJXEVlafUGnhBPlyEzaqAUL3x1OQylvIYqnyamnERLmoVVsbhe9qJvICwhXmxmxtPeOwXmM0AinSO5m3qqvT+vbfLhzh0u5nI4n9wioAXAtT62siIScBNkMI7zVcpqmErrWSo33zX0QEyK6uZl2FQoEIhJSEFJPYw7obWtAIKbcY6ciDJEQUIgAIbhXIu7hzLTVfrEhJr0Q7cyKrdTpjnSIo+TWKnOuWt0NEHvtHRDCFIjaSiAI5QAodZWUyUG1ppSZxC0cTHgC96a17x+XeiVJph4RRIKEm/CBhYm6jijcXZ0IhZOHq9mmkrMAMANXb+6eZQAIs4ZEgY6A7hpbChY2MyJiJIxQs4DwTtIIzJzMocu+mDgghKi1NQJJuDc/kZCCCcmsMoqR9XLJ3VlScw2PJFndhaiPrYUTSurWfsJmTgBDGqtW8wjAppVpIMwAVOoKEEwMTu6BgDnl7vJPgBTYzQWIecviCWhaHYhYILBvL4iFAwAJkKI3kSKIhQjDwKBhhOqa0uDd/YkwPBA9kRgAhoX1tN7gACIyT25hoRCOSFugCyEAuLVmYKaBmGRUCyQUubWmA93VFMIJKZAAKcvo0TA2N/GASCkjkno1D5EsGB6eUzJt7gBEai3zcGtPM1MAdHk/bZbhqGawuxEFEndy+QZAoLulDiDsQrBwx+hcoI0otDvf9A1Zp1XsBdKG/Gw9cOqg0FaTR3QLir0LH4CAQQAdXCREjb0kwhsLIyx23gcCYncS2qVNcMME+o3sLJqPGAC3Wvz20Z24sNEVvHf0N2uirXzfd5axPda7Qw1EdGXJxobxLnPbg0hvb+Htxd0PFWbg3p9TcEfort5b1jQi+uZGBLe68aMzxhu/CTZfhrihDdsFfQTuIFBE7PoUeK5zP9KzfVzW9lii/X5soNiOymzo2g6m7NjWZjV+w8aex+uGwWy4fETn7/TR6x+lG5tpv9fbiN++uh/YPbbl2N1MAWHPfIOPbmx/gGAnBvUK+8bwev5cn/yI6BHPzv/7NMEOSG3gV8d3oOv0AHf20Uas2ohpnVmHcWNNxXP9js/zb1cERn+mAmIr5jfIZOc8bVBkxObhs9FGAWCnkyOCt3JZr2d3QDMiyEMWEW8yV18tzFwkkyBIAws3raBGY9XRSj0vehjW0+kP/foH77/5m5/9qb/8+if/tcPrP8vHlyRDF3xwOKBvylokQPTWEJEEmMif1n/wV/43/Lf/Xrs8/vjVJz/U+E///X/3X/qf/ntPcoR8uD6tVChjSYMej4f5XVnqepCJtpBs76TFHXzZgZjnudyT1GB/3Hv0XyCGgUOEddwTCRT0i3/8xe/9r84//+st5qfz+u4aTUd2BuJxfDUNYHX1iHEajfm8XjNBILtD4mniMeW0PL0ltYxm0mWwurz7Jy/WL+D4Evi4MUW6FRHsTVl4pvhsj8Cvz7K4cYP2i3p+/Pb7vE3HjUy2LQE70rlDldvM3BemTcq2D8ztuHGbUYgbPNq5RRARwBuH8Pb7Hk5E2Pk7EOHhUJnCrTClLs/s2vKbnzeA9xwGNwUAAFZLxWytMGOlIIxIjChpg8yIOrzZFYfmyh0AYAmACHUIxOhLPoBHKGBXOCUIgwAk3sgru3QNXGknjDJuCDcgM0tOaBANHJEM3M0hFCDCgaIzypnISDXKGuMkeeQk3QUYkYQTcQhzHlJEWB4HyZQHBv7mmy9aqZKgms5rCd9CWlmkF17i0NvsBobgDAzE3d0lCwdAx0iRxLsnf9dlsfSR7LuCQYiRCSXj6emyWJYPl2sEX+YlAjR8bY0RQg2ZE3FOWT1qWAJBRLAIQ3cipIxs1kTSUmcDqYjdmKq3m5jFPBCCMRA4sIuNkQgT4cSYKTicECiAuzcgESKyCBJ4d4Jz29pCvfdBsCVoBkSPy+13Mhx7Eyicu8FCWF+7e98NQDEcOh64idD749HzKjrVw/qxmah7ViL25dd77h0ERfTovDCA2rSqqZmZArqqO7i5O7ErhG5PwQYV9fKAUMIpJTCtAeFeQDj6VlettBUBnNzZMbFQAvdMZI4ig7eKQRzDhqQyIiExHk6ZB813cjjxy08OLx7uibghlBLnD/Wbr+d5be8/rIkw8Firao1xJAzJ02mZv0mJSDIsRkhpeFF1NQJHJDLw4qallcQDCYA1jwBfjK4Q3M2KEkvAaLool+ZXUW/akiSmxJGsrUDqEdbQqpn4eCdJsDXVCt0kzBDCm+QERMQJQD08SMPmtQyn48MXf/SL3/rs/vRieP2j4/0vHz+8L5EHW0LV0WKF0u3zQNu6XNwPZdbpcJyO97XhOBzPT98eKUk6rFF7Z4BQCMQIkUAwmBiJIESbI+mYgUCuC9VmbbV8GBCVE5uaupqCY/rlLy+fnYYXD4ervnUTzAeSYxRzrwF2ur8vHoQAIoTSmksaERXsgmkK8p6gTDAwAjEgzijz4Y4Pp3Q4AmdMhwEEkZkkUx6qFUDOnNSWtbXr9WmZr1HV1EstfduQp8zDGPkop4dhPHKaJI8p5b4RRwjR5NbcTYiEKWcWIdOmZS3Xp3o+z5fzui4GaoJFmkNJ0dJuv4lMHs7QOcC2LfvUYwG33cy+zSMi6H4osZH1tldSq9VURRiCAbColqTg7TRNpkUgoq6lLWk8YXTxNQkQIefDEOZpkLuH+4O5LR/i6atgYk7T/etyvYQGU1NviEzEiT1nAqB1qe4j891xlMcvfvHSDO/vx7sTCdT5uq6Px9OhPL31p/dvPvud337xGcb6SPlSGgG41mZV6yXI1fQkMA5ZiIorhkpitZJpwBg5QUjW+kEgJE8R1a2Yr4buTN3ZdxoOZIaBnEIsQIHTlBIP6d7qeswpbH2cqzRPLFa01Pm6zOQY67W5qh0u82WQxA5ruSSZRBi8RpS6aJIwYsnsqu4217m1MowHXM/owDZnHmpbz8tjSyMmKdWAysOrSRKWgmvgB5KlFsAklAa6H9IUdli8HfPdkNjQUoI83q31SXAdxmEtszaNyIyCuBa9RBgTWAjbcZTJeHE7Q7QUg5eWRiB0JEYRa2Uta5DX8ggwEUkSUaFF9e54gvFQ2wVDEsO783eJxjykurTpBB6X5tfDQEy0nOfjeMc+VJ2FYIH5Xk5qXkiFDKLVRsKH1XxFGfP4+vCmfvNdVUedkQQhB2BOJw1Tn+fQnJHy3QCEcAUs5pFErDQC0aatriHYgtJ4hxBRL0McU4MIwCSAda1LmKdMLrSsRSSNmTMjwSwpBs4sAreXQS+AEKErXAjUjMgRt/QuDxMW6BZ+RB4tInpmR8dbEjMCJqDePu56CiY2N0RmxNYaQOdRA3QbJrPeE0WMolWES112pc62oUOA1lr3h4EgrYZIzGO4E2KmDOaAiohCg7bWyXWuHgCJE5AEtW7iQSh9W+idx4Tdcdlxe6kaAgpl5tygQBhCqLWUxm5pBIGJGJFYsmpFcwfroVfCydw9rAdeIMIgea01IhyUKKlZzkMpBRENgEgiwC0Awq0QMwEjkAW4G0DXNCVDc+hGCVHN3BxZAgCJKTaUKokAc2sNEYoWVZNNRidECdzcNEnycAJEAejSNkBEICQLZARVYxQCqOYkHABuLizNTc0ISQjcGgG6acMrgEga1ApYgENiKVaJGIiYk0IDByJGIWumodw7kmBmuu9kiIlVGyIxyjaHXHspbqZAAEAIuDlfqXfCG2C34FZCZE6G0LQimMeCJOSoHixi7gHg0SIcid0MECw0fACnUtbDdOTMfmvSAxBCIg7XTVPkPUamb4dcmC2AAhDRzIiomXVvYI8g7P6UkZiAsGMQgH1m9TKob4A3ZAf7fq97a/dTwNjBENjPByHAAyyCEdyDEAVhZ7SAEFpsPedbAUY3dKN30zdcq2O/WzHeCzWH597/jiDd4CTsTu0EYFsJDhu1A3tthRvWgc9HuGlxCADcd+HX9pO003tuJ4s7z2Vj3W8DtGELnasVbhDSj7oVjntgRdyIRRulqkdsh4cH3kCb584QPsNKO1DSt9O0ncZz9bnpU3ADgfY5siMsneuDzzhM7DUrdCS8J7jf3GRudIg+Us817nZW0Rcl2HgTe2V9A4i2G7Un1t9+/UYgcvftRNzD3HwTcW0Uq57WtF8p9LSmXsZvfLSPcLudxLR1B3C/xj4vbkgb7t/qMMZ+ohFboXSrzw0CIMydAsCBN7QKPirN/VbaPDcAu4vXhpEiwC4v2u8kbqY2+wj26rJDjRERrNEkrYnhaqBtzePBAYETIZMXBu++QG41J0axOrdWi2tltyENy1xQI7SyNF7157/3fzv/9D9/+aN/7tPf/VePn/xuPn0ekT0IMTGnsEBXRAiSQFdwdcXl/O1/9tf+5eHFnckn96/Pb8//yvjNP/uf//flh7/95sdv3l7sZ1/Uf+2//d/KEy9fffEHv/f3/txf/jcdw8PDkSSD+22i3ibL7dHu/tDdwyRsw5iRqLMQgsACUREuH+af/v2vf/9/e/3698EerwSPSHw8vDq+uZ4fV8K3eklWM2FmZvAk93cnqa0cxpfC7jWICdCG6RhooLooLzPeHWH++g/nL/7u4fDjwMGZWRifbw12FSfARsFBxC0Q6gZ730hDO0Z0exi2BYJuSrVbz3d/pD96srqGDHeoFrb14xmY3SiY+xO1P8u3takfD399jdg/EwHg1PMboKPV0TOqaNca98xTCkBkhCAIDyOi8P4WgOZRPapCY2AMik1+jIjRKVdI/a1AxF2lj65ATLcFEqlbrGEY9gUQmZFps8ZHh57WDrGDSuYNgQkFMWGohxFFzhTQASGvhi0QgixsI/IFhmmAAQNrQJV0OCQZGwQCgTmwJ0lMmCgRcyQcJP1YcIjI4j/92S+qWWnWPMKMEQHB9gcUETEkMMiVcFNMdS0LEwc4EnTnHyAkDGYKjN7u396QYRiA5o5IEKcxOWCENvUIMANB8QYsAIhJUkfCiDChDDI0Y9NAT4y5v5m8q7WJLAAtWmuJqIEHNIYEEX1EGbcsFMbEJAKSkCicABMRuDMi9GwlTsEJ+7137Qsj4x7+iAyYYiPO9rQOI+osB+rtpIjeee2v/XD0cOftpsH+vqYOUSJyOBDAFsDXe1LUZf29SbG9JTtbHbp5ZHAfTzMLt25QYO5VzYAQGSJuPndbeeAWEFHXRfJIhBbeg2ITZwi3sEB093Ecqq7aMyDdVJtHEKfaKrkFqHlwcJIEELXU04EEgj0OiT797O6zHzycjveUU6mmzV596q++d3h6unz9i/jwWM6XNZzk7qBrhbh64OGQW6vNysBJq65akcWjZTk2IIBIMnggYLRW5vZBGKvOikeIaTrc23I297KcARqxTEMGhMAIchIs83J3f1pq0VaHw7SWGoaXVXNIqIrkNCRzRyEMVkAGt1jJwMyQnSEtpRCuLx/evH/Xar2ePuPf/p1Xf/dv/Wxd0Q1oyA7eAorHYTpkSKfp8GHO968mNXemOi+TLfeH0VYztDxKz7eptRExUjbUQANn6LtVcwIyBXOkISdz1atCKXXFwEwDIrRYyTOTLIqD2w9+4+H8VB6v5XB6OD/aAFjXs1NiTK0tRA6MREzIBAKiAa25Z5oIE1hALJhannw8pcOdCOMwTdPdvYyJECWNaTogiunKyYCt1OXx8fzu3XthCFVdWwRIEgTIw3C8eyHj/XA4MWeRYTwcRJKwmDUINyON0FZzZozWrheNWOdlnef5fJ6vi4cvayNGZVRQYG+kIWBuCbemKXy0F4TecnTkzQRj34VGdKYhMiF0VcRH/3n06GqPJik72ApKoWMzryuJIaxNNcEk+QBEEUhCbqVe1rvTfSb5/De+V7761fndV+B4fPkJDae1EVu1y1vOwiM+vftqGAaPdry7b3Op13l8cTy8esXJyvqhvbtMD5/JeBpefP703S/end+9eP3aF9PH7z7NRzt+8s/W96us7BbJDMG0ebCjrA6jUB4PVgo4atRJhnnxY0qHIT+VbwmQzKE5wcHDQtKiK40n9myuQINqQ/BDnrJM86Nlfnl/eqXLNQVBrVfXp8fzhHx88ao2bfMZQw/M1/lCKb8/P0WzlHJT1xoMWtYPLInELud6uLtLPooGeg0K0xrmen1sSxmmA9D17dffYDqmV68XpunFy+v1Mj+pDEcIWKqFk4hkOjqMeRzG9NLdStgwjkRa59WBh/FNbQoawOYYxBDVRjmmNKgtBDamoenadEYc3FYWWGpd65LSmHKWBLXOiCQxVC2OAEg8yHleH4aXx3xYcSZC8wp6bnrOPK6lAoQDmONwOOYhKJ8FQZIQpvHuVRCaF4tV3abhGKYDUWstTwM1jWbIeH84AUOJy3q5Og/IiTxme0r0oAoARgSH8W5ZllLfJTkMmaOCOgJmD5I0JJTWlsAL0zEFKzAl1vnp7u5NHu6W67shY7PW2kowBIIDjYd7cg83GPlxvoTTON0n2tsG3UKF0d2YpNcfQOgAEIYOCCiU9k6fBnBvw7RQAGBkEUZggLDOqYktcNT7ZjfcPJoWRKLgcTw0VVcVooggkeaFkTeLnAggNqCqBgpMHG7WVMMo59pWJiFzV1siJMuQua7VI9I4BhCiSRpc1cxIOPYYaSJ2CKEeQRPukSQlYuh2huBIEea25bgqIbppNzpBEe6tdlMzD2YgQJKtMEZU09gcirmzQtx6yBSEoVpjpogQ4QgHFAhiBAi7WR8ERCdDISKhdKrw5ghN3LPPAqJ4C4SBUu9GqDfoSkCIGk7IQaiuiIIgtFkYNqHJwYXJtAEGEgeimvYUntY5Y+DoPS0uI/aVoxGL9Q2TZNXS4b9oGoxd0Z8wWdSmpfMzLKB693/pXJnORANzCPSuQesuoRGOQMQZIVRNGD3CiZHYtEZPGDAD6Cz9CHOkZAHm3vkFvQ6NgCQZgKoqk7mrRyAxALTNbYoBGLw5xJgHBPQAyUO1gkhJhtLqrTwwM4xgYY+e7YXbsxDB3VvEPRzBou9pLZyZYDcQ2OEV3PAKehZW7BBIENMWCN8zwm6b6Y9QFISNzXErWwg7NSg6rcn3LvPN/GOvIreD0Vbzh3f3og0D2ok2+zk9I0UeuBdbcYN3/OYXg9BjRG/F226xHRtM5ts1wEbxoI3ysBVrv2ZrvR2x87Y6M2er2sI/4v1up7IhVn188CN/optN+C632ZgXO4vh5hB005hsKNReI8b+Tdx1PfHM49hOvIsNt2/gzfN7bzbtR7sZVMOGasXz+PRt+tYJ3hfcXhw+eybGtlxsaTj9fuE+Z3ZuyU6f2HC7jZvQi4Vuxdh9GHtiI24G90QEEc8JZftU7KO0lckfoY19le8zHztXdL/RgJvkZ38Ad9UbbCqgG3VEPWgHK7vUwmIjwZD3hl7fsu2ys4/O4nb/4Rk6xQ0V+Pgk8QYLIHSrso78IZAHg1ZbIYIFpvQCcGIYathi6pQkhYNTkDOrghvyeBRSC1AFDJc0AUprfj1HKWU4Xo/pj75++v9+/Sf/yW//F/4b97/xrx4++fN0+NQs0H3j8wE6JgsI9Ib6+rj+uZ+8Pny5Mt5f5uPx01dslz/7rtQ/+Tu+vPvxb/2ZP/1f/rfXv/s3/sbf+Q9/+vbdn/9v/tt8kg9vl2GYpN+vbcZEd2TrBeQ+KB023YR/RAhunccQQErgXmN9bF//yeM/+6vf/cH/Ky6/Kv703dv1WwXOx2b6/vGXKVwbTtPDaTocxgnCifiyvG9tzWkIsKgVmi2LJSEGQKFm0Awj0ofHyumP79/+LXz7u+P0LwMdLUI2p6H9HH997dpwQPhovfvobn6M0Pwacvn8dN82+nibDc9/3h+9/gvb9zC2jMr4CAvfptnt7J6B64/JdYjI2FvRCMgGyoBE5JtFMUcodO5oV/5B1wmB9qYapU5TUYemXjVCpJPvzIO0sQhi6k7HxBQf8eTczQIwgJkQOgTa8zxwU1wiAAITRUB/dXdJP5gHEVFy69ZR4BgA3r/JyO4uiUYCQgD0noiNzrBJTcGiubkrOgEW8TYDnpiSmwJEJ5cw9xAFEmZBJMTv/wA5kZv/9Bc/g4CckkUIEhJ7WCdGajToRGxwROz69E4a6MzyDmV4NKbEFELsoGYNEMaUEd0BwKk5FG2mzkyEcH86eMSytnkpkgezCMSUBiRyCHPD7pkfTugK0Wo5HcdwC+cAQ4CBMmBQH+2+4gG2UiRn5oQ9JiUCnRETsQw5C0M3IgLwzqLrizkSARJu/DQKhN7/6/b/GH0vgfQsLgPqVoShG+bYEwqwE4DIt5eXRw+ChQ4+OgYHgnXKLW7+bT13zTGQGHEz3UMg6gZgER1jAmJyE6KEJMTC2c3Cg5ANvRsm8G7fuJUHfX/Gwq1WZwqklHNCdkUKWdssg+RBSi3qVQkYWa0hIZIgsWsxMkY2b8xi0QjtcEjTwBR2fzwxgkSdDny8k/F0cKJ1rmtpeeLTy/Tmzd1Xv3z67pv166/naliCyJGppUSCMq815YdqNqUjGBos1hb3pdtkJjlWqwYr50QCCOOlrAB2rjMTI2RmYSFzZSFFD6Jwcyt48EZXFaMhzW22DKvHspYjjAJAbpQwYXZz88iJmN3cAyPMOKWcZL3qPJ99oF++hb/0wzdaf/WTn3z27lcPf/zzJdxNXVXHMV/ncz5CBAUGYONJdG5lbS9ejMBXU1BqjpyZMCojpQRhikSI0dQTSg+g5kFEuGktS9EwGWRM0rR4a0S81kXVVAsTsse7x6X49PnwkEJzq/XDt4dEd3evztc1DAhDG1sn51KYLgQUoSyehEMbUWHw4dho0vHg45GGScaUDtMdBISiTCMGQwvAgmiu9XKenx4v52tppS5moZaAhmmQlA+naTge03BPMjIN43QY8sCcwx1CozXTVcuV0CTW5cP13Ep5WtallqWxcGCU0oAQmAq49iCgJC7shJQSIrkHkbv16KIuGe6Ic99sdvY++cbl6xTSzTvy41cRiQBgZ27nYAx0c60GnlNKV3vU5uDVURy1WGBt2db7kX/8+RucBS64fLeMaeJhuD5+SLiqD8PpXi+PXhURHBSg1Su72dnfDcMomM/v3sOrlSceZVy++aZo88Px+Op7PP7W+2++Pn/7/u74an58f3qTfvP16/YIqZXH67mho1dryKO0WleqSAKqkPJyWZm4EeMoQN5oXeOpmCfmhAJ45HxAJMfFtXhd0JkMeDgi4WKUEFHy3fBmonSN+TKvhe2dtjtKjlxqXK8zlHZIAFTn9X32Q1QkgLIsTV3yuNYzWRCN6K6hpWkqSlS8FhgIwJvOTWsi9roqtXmtYnQP86jf6Lu3YpAtF2NVIpXTmA8wfHdeUTjLoQGAIGC7tO9Sbg1bwDDCkA+v1isupWmmSJgAOd0BDwjaygpeIxLSEdm1nRdrC5U44CExWtO2sLA5sox8knLx8Cga43gSjNYugHQYj4dBWiltKTDlupbM2Vq9tPnVixeG1dyIOVwd0zAeVJ1ZaqljToBsSwHgin48HFI6qCpBRpK1PkEKStAUh0StNYTKOShwLRrOBExMpmupLR9fpPGOCNS8GiihYQt0gAi71loQhkSfne5+B2OcrxfQWK0ic5ZjRGqqmSBzwrjymJsa8whE0bzYensMmJm2GrO3sRCB1Cr3mLKtkiHCjMIAoKY96kGtIlIQSM8rD0Dq2Qys0dyVEcwtpwQw1tYYWc0RkFkY2awSoGBCwFaehA8AZBaYkMMBjNyirnp9hC5Vcg0kqzav7e27a0j63T/zp4ZhtFZEqgMxk4f2Rrug7AFe3OtSNWegrotRN+4VntUhZ0nTanOv/bp1HbBEBW09spMSS3SyD3hzw1tvHdBd3QxZlIAkQQCYCSAxR7fmZjZrjITIpuoQQmTuJBsTu29bM4n3TX+AqRJSALs5ce9daUQgkJuHWXdI8n5rkLIkc41AplRKFebaTJgFOJo2L5hGAvIAjGAmInGA2ioDMFGYEYvQQEDhLiwRhsgBFuDV1b1RCAEGJgAi8DAFZgczt5wGA+9FTu9rhQEBJWLtmiuEjiX11qy5cndsjgDiIHav5oa28QTMt9xKsyBEFNEAw76n3sgNHgHmRmGBzESwG2OFA4BINlMkinCWhGYQgRTdY8CjMeWo9fYIxDNm44QYBNEL5l559B4dIREY7cUIQid0IIQQ+hZtBgAI3auy+4gDAHRDcdoawdgz2ruN0U0q8Uxt6Z0OAdSOKgUQQQQ0d6TnBsmNZtEdLZ9rqw0IAkIywB3k3X9kd1i+1ec3jgwi3K54w3AQuspnx4k2Tg3s4qIOqvSjbQq0Gx2EtixCwO3F+yyo2eCz3al6g5m8k/OhSzgBerWHALZxYWinCGwwECICbKpXeGabbCeMeCsmNzhmK0VvdIbdLmhnJuz0gmeHkw3+6efce8IfF5XPJS1sVB3oZIBOTcLnT90GG3ZO0HbdSHCjYHWeCEDsoXLbZI/tb7r8c7ufALDxt6Kb1sS+y/GIHb7cOGyx3WfqlXI8n1B8RPDZqp4dtexYTOxq/h0ei+1CnnlVsKl5Y6eDbHSlnQkV29TqJSreKGbYf/HXULQbIBC3RwI+xrK2M7k5AMcGOhFFBHf8zj3MdMX5XKG7okckxuqBgQPzdV0RkqqqwjCccDBvljJgElTzsmTK9+Pd9fxNazWQn+ZLfPk4TEi1/sO//n99/flPP/vxv/T9P/9vjC9+6JjUXbKEm2kjEHgqfP4nf/xX/4OX779pZx/jZDXWGPnxLa424j2O49PPnqbf/PLLX/7++R9/c3w43d2/Ob//8jh+isRboKMTMUUo4BakCfBshYXRpY3d7FYBBSAcDRykhq+/fPcH//df/r3/h3/4Rb3O5/Plnftl5VKYoMohGxp6MKV2vjxebR6u0zQJIiAPRGDweLm8OHDKmHl0swCtrTmEqQfIea6fBT3+8h9Mn/1RvPqdSANlie6XRNLj+Ag74I54M9p/Bqp3wOYjlPhjhOm2rG0f7UsH3Z7OfWJvDyruY7MvL72bsGcU3iBmuNmf39houK95Hz3SAEDcTU95yzoD9PC+stn2Q+zbX3RfIXT33hrgHmqG0GGKZwDIA0I6rhXmDkYkQODRcR8HgPBOKgEMjjAH6ilSuLcd+BkKjy5765TELuIFgHCQlN3bxjgDhLCet4AITJCBCIKYG3vTQPUgcrUgB1MGCK8UAJ7b8ijTgaYHRti85kKBmFiICYmIiJgw/FP+vqo9Pj199e1bCGCSngeCwG4GEETsHoQUQd2tAMz713tbHxHDwMKxx5YgoPftBzcPRCuq4eyqahYIOTEiIKEQboFUROOQOkY0pMR8WlcFQHNVV3eFRObu5Hj7VXA1ZaDO5ekATzMHFndq4VtjJoyEMXDIOSdJ4oxBYRQYYV3GsiFHiEjM/W2yvVAAIogoINwtAjqqGIBEiZwQHdC7l0Pf2IR7oPUVDcEx3MCtH3uT/6OadpE0gtMu9nQP4hTESBxmhNIdVmBrbJBwckBnByjYHbgQLNxc4UbLvTH+blDRNA2ltiGl/p21lWV1b8YoyHg8HjSUSYovGhrAOd/V5dq90z1MvQgRkYzprtZ5GIaAmakxO6CXel5nfHxfh19xW9Y39Ml4d3rxYjSclkUv51JHJRlPLwud3n3z1aKtWAFExwBy+OTuTTGcDh6tmNU8JSTQVUcRs3VdrvO8GhaFauydbw8i12UFGpbL0zTl8Kha17XgOHXZAlGbDny+zJQPa11GcRQstSCAJKFhIuRaW2tlzHx3/wLCzeZpOrq1IHMIV2AmpFirfvezt7/7488Pd9PlafnRb748z/Dd2zkoEMnAhmmotXAaiNy0DphPDyNB9mrLspzuT4lZGziEGwIn4QG4OfVNhgAwoiMBM0FYEnlxHAPxaT4zkzDjMKAMjbHMtam1uuYhm0Z7KoBPCWm9RoDnw7CkGOSuhOecDFpoq7UBZhKpUN1KZmEARiNUGezuFeDgecQAE6ZhzCiQhwElp5QdwkHrfHVUtfX9h/P7d08ArK0h4TgMQsx5yuNhON7LdOThRJRFhiwDonQlfdOmdWUyMF2Wy+X8+Pj0VNfV1RHJNWxdPQyJRXIwaKALIYsDIjECb9hr32p0gwl3IsQtToJ3tlH/f9/XP28cNyXt/s4xRLN+a7O7U7TaWoaRYzhwmtczxjgMTJjbauOUp3H8yYsfwuXDj198P8Y4SZAvpkVJUWKZ3+NIOB0BQQaeL0/DQR5evMx0evf1N3UpyPTi/lQu9fL+Qx6HcUz3r94s87w8vmtNh/tP7o4v2LXU5m7l8n4S/M3Tw6HAP5vjMdogueXBPY6SW1OEKOsVcj4cpizyocw0pHM9jxRO6GzT+KJVTywjp4gWlNdWM6eBDrhEustI4OquqipDEqyrXWex5gjn88oDyzi2tT0+XYZYM9B5WdFQjC3co2nRBFyXVZfH4zDQMF7PHyKlYRhBta2FICKirNc6P3E0HI+lrmt7qrre338yv/3C6LCapXw/Dp+Pw4vzZX7x+uUM77/TD/enw7U5cmrmCDEv3xJZphGyodIUceC7s7wPJTVPiSceOLjVtUIxcqsL8ZQZzD+4N1VHSgllwtRadbIhTa4irXL2KVE1QtLDccJSCfjh9FKbUajWKxG5ocgIUAnIIHGAl0UkRAgNyHwa01O5gDgy1DAiNq8QhEioGoBCIwFqm9frB81OJCJTqwVFKI7uTuADjYbuuiY2tMr5CK5IcsxHJn67lrmcnRWdACTQmTKDSKyAATwnIkO4lHocTugKRmBrYiBb1dZXL3/0eL4WfeQ8WFGwW53T32jegzTcDQD6buPWwhYSM0V0AujZZeYGnWGLaG5deQ1bWehBBCiE3rX3CAzQsiRE6R0YU+1abrUu6gZJo1olp9Y0w0iuoM3XRRiyt1pndAO3Mi/hPkl+mdrj5fLX/up/8qf+7L/4vc8/nR9nAz883DEBEUliCHRVZjI3N0MM5qytETMJdYTFm0d4taY1kECS9G78UpdhGCXlVoswI0RpBR05ASImTOHhrtuVwsZ5rq0gohCbGUSAYCcqeFhXi3i4UNrqT0J3t/Dw6OK+rULzbd9MRBiA3cY6HAOEGKFXz1ssUUpJVZHA3XvCowMzYU+OY+FwNHOijXed0hDhtitxiaX7Tw1pbK4O7mYRIYSMpFZpZ1NASKK82gxEgkJBAdS0eISk5OEiySzCDJBYMngIUykFGUSShSOGhbdw3CtSIgSniDBv4Y5ARAzgqsbISdjdw7VpN7pENydkYQmkCCekGgUxkJmYO27LxBFGgI6AhK1VJuqlg1nPpqOcBrUgErfnIHDcPXi2W+DRN/xCZLG56+w97l4U9904milzZ6ZgdyHtEqStSO/PT/QeIBhAB49iK/J74JjfNmidNfVcvQfCpt4K84C+/TLf8u03RdFWZsWG+HQB1lal3y6PNlwENnDHN0AjYieePJfsHbPYCpENzNp3xP33dvAJETZWEeE+czfW1A6Ewf6TH+Ev0TGIjQuyV4PY3cR8Q+ncYguBgdtv72jCLkB6JhJ0RY7f8It+yt1V5yO6wQ79bIzBnfm1j1JsVK9+nrRLaXADo3p9ecOUdmvt/pGdJbQ79T7jH7FDRv3IO26yY0i4569RRzr2EQPYbBcddmelXarTF5NeBThAX0ZczbTBbsPvO2zWY7Z8q8L6TEH4CCN7BmJu2yakAN+quI0d1WfU7pkM+7f7Dd97dLaBhztA0I/oN0wQiW5Y4vbdfWz2mQc7TLQPbmwkpn3bt/1lH0vacJT+5U5yw6BB4O7l9OI33n/1x03nIQMbccLT4YjIDcCaaVMPKNbYHcxDrTs5hjYTq3Nzb8Mhz6uDjEqHep650pguVH6/Pv50vXz5w7/4b9z/4F9wOWoYMgrRMWT9xR/93v/53/9knj+c22fy2krJ0sr6iBGUiImiBJD/8q/9leU4vjjd693w9KufD1mWOtj0W5//+HcakDNHVzNvvlQ75HebQEiA4WDUZYcOAZ7c1l/90z/62//L81d/d336IGZLgceCTyUU8GHiQwI+puH0sMz1stbqPuRpSMNhOg1ZltKEEJHy9BB+RoshJab84f27NOQ0MC5i5pikoVFWLV9wXMPUnJih8xn6SvkxTor7zYXn5Wi/9Tsz8ObcDfu8en5i6XlRCtjxn9tU2R7o7guGAJ1r0ZfBG8y6LwnbIr4/m31R+/Vj7evDxnZRNybGLSkSBXnDMjcR2fa4MBOEC1NPZEXgGwgawNHxXwx3BRpga11bbEul3PLXNmALPILclVgQ2bcBcdpc3aKTSruRcxcXbc8XQQ+z7atORBAyhu7YPBKSCIUbMRDp1gkI0VYwYgNsWqSs0aqXFdMBsROOwZVdNpwOid2ViGQYR8Tvf/83fvdPX56uf7+UlYUg3MHDw8DQFYMI0UkQO6UbkDrpsANlfQUx6oUbogMJ52ZmoeTE5GYOjo6YkrDwkMDdS61EuY8cEyUhwHANIUKkNSqBTMNk3mOBgXhrDkYYEfd2ECEyU1M1NwJQMzclcUYM9CBLIgxERJkhk3MPovcId+r3mrqLdF+6vVMWvCOQSH2ZBsKtV0Ee3u1AzftEjJ56srVVepIrQnQ2vpv6xnzf5N4R3aMAERwIt0XYg6k/fb43JqjjyUhdZYdbwiCCuZemVa07H2m4B3VKNfFNjblDRbVqGCxWJYkkPua7ea0yjuFmFmpQTRWhqak1FnBYCcJaCwZgGmSEgGhVfQYzbY0ZvJV8HMYcAhHF2xxf/8mXl7ffnd9/9/r1y08+/2Q4nY55yK+OtfiQxpyvMvHd3eXbu+Hd10XVLTzMEyNCCl9ENGK1XJp5GjM4OQCOFNqEh7osNteUB0BnYRmSm6dpMDQP669xBm5W1VVYcj6WtVrTVi0PA8YiGCnJui6JMoFFxpQHRDLzRLX6FSCLUDOHCDdbl2U4iOTp4UU6L0/pmL759t33Pn3xvU9P9VreX4w5eS1eHEd2gHJdjvnhzSevkODx63VMx5yRaZnjkgbRoknGCHcvzN1LPYUwR7g2RGfOWq1oQR7DYpDpOs9oECpujpAJQ4YMWZgkEtFh9CTFwBnvpkM1b+dLtWqB+XA84uDJVihruzLfmRJh7uZnkFWOy3ivfOgvJhYWlozMwEjDSEwBqta8NYRWWj1fLu8+PNWqhOSK02nMhwwBniiGKSS3QDQ4iAgnVUPzISfg0FqT6DI/nZ+enh4vy1Iul9JR0400iCAkSRiFtZlHECO6gymYRk/yccfgvugH7HG1u1gAo4PXfWsFiOFmHdvvZfBNjcnEdVl7Wl6zq/AELQ7DQw4Bz5+//OcA5H35EnSF8IEPp/TJizTdj/eff+83Pvz0Z4fj/fV6fv35m8i5zfNg58vXP+PZ5WFKUXDAWoh5sKYVbHj9MMag5frVF38ip/vTi5MaNE+ch4ny+69+3spXVq+H+zc8TOvT4qUsxQCY7u4+HV7Vh0+Gkb9tyYTLdW7XZczjYTiAUT6MAw3XcznJwARPVOblA4JPJEwJUhBQUJR1HvPhON6VyxNzPqVpOH2iXuZrfTiMM1EGfLib3v/8w3K5ptOdzwGSMeTp/bdM4R6Py4WtpkhC9nT5bhrS9PDy/Di3qEMAA6zrh9oqOlgzZSWMzqOw9RJldaZ1Ldf5DGRJ7tLwRtfrUp6m1z9J0+EKI6c83WHTRYDejAdJUYAN47vHp+v18vLwsi5XnZnC78YDtSXKB4kYmdYw0JbgmCmhaQ0HBhaBMME5rLQ6k9MwPRz4IWn3EotMYw3EdiEE0gWsYUYGECLJ98T31/MjYJXBUj4si43jvWOJpUjQkA+lXYfhmAf22oTGzGlMx0KewZc6p0SQckYmIgEqdR6yuEhbrgQNABHobngdWlrMIdRsIRTMI7XKGENOxRqgjQmsNJc08EGs3cuEuF7Lot4yDTxMtVb3qwympVKWYqtINH1CCaRhTDIwMyRUsZrVr07y5tWPnr778rYFI6IAClcAd4f+MgHEHtbuEeFu6Ejkbr3KQsBBsocCYTVjIuEBwVsrESCSIIA2hxRUbaaaRJi4qam6SEIEDyNEETYPxLBAVaXSTomgLaCKHqorNUVbweZwDRC3JcC1zKz6vdfH0/Ti7/31vxH/4r/y6evR0PoPiXBVRYTWLINEAHVbJe/ic9h4uxDCEkCqhltUuSGJm4kMZoGgvRJDAPdgQLXmCMQcaN18FLr4PBwCzZy2y+mvXPQgcyMiCCIisGDk6MZPBGpRvTVtRIyBZr1nhsKJwXYKAnSoCxxMGyEHhOTUC0YCYGaIMFMEEBY3Zd52rqqlU27GlGupvZHWq97WmiQhwNrqIEmtOu7dfqTo7j4eAEEsCGRQqzekRMgIqK0IJXcQTq0VJiAS84ggBIEgs4rofXnuCSNuSiRCHBERbqZCsheeCNDdDtEdEzMTmysBNDfrDUkzptRdQ/tmXqORJCGGQEFyCFUF8b79jw4LbTE5/RaCUPJw1bZnZQJ8lIZpZkKIRG7GTNYVaLBl2ENnWEXvkXYEhzyc+05x0yagudMOuuy1Uq9oe3EODghuTrHFdwJsgFJP8NyMKaKbE+GuUULqRqCoXVPWnSDc0l7p3Kop2pvIvXyPnda0QVFxK7l6zdERoZ0JsnX/IQAjfOe9wM2cGvae/3Opv4VPbehIeBB2YGqDZ/q/wk6nit1fKTZFzXa6Gwq03YqOyzAEIm5y8o7jbSeHO+4TG10mdh1Ar4mii7I2U+S9WI0bnLPDELGhYuG+X+czjHEDKvqTuBWdtyr39q/9Uezbc9wAnRvM3pf6HWva6qxnkKWXDX1wCCKgk+NoW2b7XEDCG5PO98vvYoZNnQfu7o4fMXD2km4v2vEZcNguf+eobB69+wza0EZExJ7o2EFP/GgQsXc19+IcEYFu4BFsZl/d565PL6JA2HAEjNhcgPut3I7ZUTncH0bCTbyxF+TbXIsdGyDYBsKfz7vPKiAL1SAePxsPP3T/k5xSsSbWOKcGjMPLSV7PT98dQxbU1Vqziq25Ng9sZmQOqOY4UHjAtQAK43IFreWCr18e0r3b/PbdL37f1utv/Rfl8Bt/0YgQidfy3T/+/3z4W//xXWm1wXcrq+Bf+OQHXotAmfUMPk6UxXCYBmD/an26LHp1uf7xhzr/0T/8vb/px7t/57/7PxwffnsxD2cE6SZvAAGbas+69AkQwJWJELn7sljx5Vd/9Cd/+z/47ud/Xz98dW1QaltWYBmH+8M8t6+XlYuhN3o6H/JB8vjq1ctpeiAnRFAAmXSZP2i5DG4cxVpLqQkQ89BatbYwRTI7kF0+tIc30q6P87dfHO9+07f1hDuIsYUl7tyK3UD/tlT1u7w1cW+3fJuY+2d2Ze0zNrbZM8XtwztzA/enbD8E7pt8hJu4aycYbd5ptBMW93UJbn5ofXpRR1U7gcYxUldtIyJwhPf8Mtiwyn5JnLiDSJ39sfW03c3MERjAhWU7BUoOwb6drW9gcz8h31btoPDewGDc3jvdroYwKIiCEKKPte8CT+gl3fZchyNGd8npAA/1tjqxmXEKRsAWwEEhzYyR1AIRojTDpV4vWUYZRkK0sO6R3LdYREKI6AAUQna8O/34t377q2+/++Uvfm5NDb0bRyIYggN6N2QC9xaVMfXEyZ535r0rhuCuhIJM1TRQzKCpE4cREGdJ1APmmVCYNDAwzIw4UcpMiQnXouZxrdobQObKKBROBFUNO2MdWdUSZ2QR5sPh4OjsUssabhwdju7UnxQOYWTukkRIEhMRMAYhEAEDcU+sIwpEJAiw3XKoz1YI6F0Zj9gMCInAt79C9PBQBAhg9wAHA+/y9gBwDVfAPWEtAjwUInoLtqe99iYiETsAEQNguEKAW4MtULJv5NPGtXO3gGagCmbYDAA4PDpehx43WfTuT4HMOU2Zi9ZhGK/nhQKdkUXa5aLAJBgEmImMA73EmtM40lDbQjCGR1ghBEJLKVcFSvl4yuErgj68PD7cpelh4iEF6Hdv16enr797e7l7OL369NVwd8xpohfDeEpynpF4nEbEx2a+XBbPsZYPrvkwDeDpcLxbYkZQhrxcnhxAhpFD1vWMApMcmfh8fS+IAjyrN/XDmDyiWYMgRgc2xnDXp/MjUTgockAEOIdaEE3jlFi8rSmnYUwUxAnR+ZBfGphHxcSJA9wPkgB5LUvOw9fffPe9H352/+JVInvzanz/5XCZaw0/3edLeXrx5gWn4bM3P27loV7xxadv1iOu53J4QG9fkRcPRTAR1kbCzAxG2LqVXUAANgBk4JySBEteizdFRRbJy3IlB+Y1SRCHQkHTpp45XYtlHnCQpVRkkREsgvBkTa7Xx3ykTz/90S9+9Q/NHOlgzlE0s5q8vXszjEdiJL0GRx7ycDyeJGEkMajupGWNCItoVr/88tvz5aJmiXPKfLw/UuJAHIY0nR6Gwws3Z2JmEuEAMFMhcoNW5lKWWufL5en6dJ4vNRAPpztJQonP52tpK4ALUZj73u2EwK4E2DaC1GuzhkHg4LA5FFBnadIWhbVrNPcuKKC5m4V1R5L+nwdjAgQRQ8jEAl7ArfgsNPz88sclzpHANEIjJ/nksx+Xr372j/7RH37z+uEnb17kBCmNqu6SID9AjePdG12aP37Z6mpahce6mLbqXA6vXgdMQ+a6vFse3x4PcTi+WC8lxtNwePn6h6Ll6ent23P75eHhkzxM43RYq5uVdvbp9d1np3tM8njFqDYMqNfldBpeTONYJ/IDBv72n/5zf/DFH10vX2axOqScBl1qtUZATFSb0ZAbADnl8cQ0HMaT5KNfLFtg45fTgyCfnz589827QeTp2/MkE6b8dD2X9cq+uNW74QCWr7ZWWJ1J8rSs+jTXiDYwaFhZVkRKjHVVspLSSdcVtISGIZKIeQBS8GE4vqjLY1vmF69+onVsgXHMPGbJiS5+PV+88IOcKqd38yXRAaAUK1cthg3DQCnBYYA6TMfr47fEghgGAwCqFXCbcjZd1nYORkQhybXME0mi7O3iURssCGKUIBNSWEgwIwtE1lbGgc3bdDqV8j4Qj0Nu82MCVF+KPk75U7MZMRRcWNQbk1cvMqRwq9UhIKyOnCYSD81INXypHzJPiz6mQZBF4D6PP4nyzXV99KxqV8ZJveYUEsQQQFjtSgpZJnOpDjkNWmbTAq41zgjCEJgyhXuok7a6BJi5QjDSen/63Au6Vgib0sHK1aLk8XQ+XxJxKcutOgAMZnZ3JAwD4Wyh0R9CAMAe0gZZBu9GJF0kHwCAiQQxwGsAMFEXjXTCLWBnpCMGNremvgeFeSAJM0C4Wm9EAyfJ2ObvWlszha9nYUpIpS5eV29rLcWDyK01q6su6xruRMPv/ObLb774B/Xp5Z/6Mz9+Wq7fLMsP3ryGJOD/P67+Y9eWZcsSxKYw4e5LbXHkFe/e+0SoFyIjg5lIFMFioRrssEmAAMFm1QfwB/gDbPAH+AEEAYIAewVWg0QWwEREJjMzRMbLp9+76sgtlnBlZnNONszXOidy496Dfc7ey9dydzNzm2MOYd7FWpnhYiEjooXYm1kphZ3DSkJURWYDOXNbUGuCmQohlJzRex+8poJkKIaApoqATB6kgInzXorWMr+YUK36gQAKE4spExkgs8+azVCgEnvUOWcoaZ4cO+cdWKnmB8XUSjUEhZyzaWFyKZcYqjrODCGrznmskvaSCjGyMwRIUrICOhJRNXVIMo01maPMqbLH2ftqJexDFLOiAkStb9MSl1YjTpGJ66q5mC8CACiRBw4K4LwHAHJMNdyNGYCInUghR4aKhEwINTMPnKmhFDCoHAcVLZLqRgirGZQoGRgqaO2rGRGDiYIxu8C+lBmgCo/NDJnJAamKSgE0qv1kJDUUzQv6wSxaAMExG6AuLqWWNHtaQgBrrYKINUiK8EKIQAMQEUZEAM8IakSIpoSUVRlRDXDpQCwowQIK6QJRVJYMVsNjIloEngCLEAJVlM7mymY1/AgQjICqX6kuUb6IiK4WH7pwh6BmilU8Q42oWn8uPjVnCABhkQgtbtm4oDoVwznnaeHinrS8OxGc+SR21nHoGe64EGo+RlEuz2gFI4J6rXQRFS05ZhVH+1hwcgl7Mqs+O3Xd0aUfa1bzrfCSS7YUhBfg5b/AOS50BjrXkBU5gg9I0PKxF9HWArDU/xc6/wWY+CfXcNG1nXGi86HOt+Ljrw/8qQtQdvnJ8qYfuEZLgUy17wVnGR3W87zASYgEFw0jgpFqqUYWKlqNilQFkarLFgIyLSWrVRah0Rm2ulC87L/41PXwZ3wVz0SeDyPjQuL5EDWOhCDL6Ield02IAEpnJhoYMhHCxWnnAkMuV/uMBS5yCTzjeUvdC2eQsb7mTNSCy0yqTCsxMyUXyLIBiwVTQUvRd94FMDQxK5hT0jydDm8M2bFrgm9Wq+NwmDME5wPYlIfV6iqlR6fAg44pE6ljVvSTuPd3Zd2y8cMYf/n6l//va3K7L/8UzB1///P//K//r5v+oV1TL3SIfhPC+zHdxpjGx+jXKpGEyGwc5v0G6MvnW/qMwnr1B3/lVH76P1u9fvUL6d9MtMH2lpuVFS2KyDVsm8wy1pBg0wqbqpkKCZIpHu++/t3f/d+O3/9d0dk4pikX8zPaJDblpMDsKIlFjOR8XO8MeBo1pZHZERMLEJYQWr8KUyrOEMz18xzROyZ2m5RPKiUwesBVbIDWaejn4XVMR/Q7VSdgwLisFJX3+tE0v3DEPqIMfcBbsf71A5kIzrf98s3FnggAPppuy2S5IMJ4mbpnVBc/ALWIeF4WbPGtrwu9nQlwAFD1HIgIzA6gCnCl0npVgQgdOhNxxIRAvBjCVI1ZXSTPLmiV8YdzlhyoUSMkVyVCTACqNTZAEICAqpC2EKGBqhEBg5HogowSAJhW98BKL0Gg6v1DSNUFuS6+yGyAREucmNWuQYUuCBBrtoQhMhKY08pvRWHNxQOjKkm2TDqdxBPCDmPH4HJR5ykX8QY1VgLBEETMgdJ2e/WTL788vHv7OB+IwROrFk9eFRBVTSq8i+Bqr4OIikCRgoZs4Mk5rPvG4tiBqiMCBwDCjr0LJec6DdREC5QiOY9dd1Vt/TZtI5ITUjGVUpWwhB4N1DkuOXtmLYBGDtkxIyKhRwYico4JvXMsOeeUi2jRUiQzeiRSUyJ25BiAwAjMEVYvSAIjUyZXrCz2fABFFc1o4bhWAbahsYFDPD9dTAygiJGZWTVvLKoK4JEcilRHQ1EQMAJgOhPTgAzFajqt1gCHC4ReocZKMqxzihzTJAbEFXMXKTklSVNJc05zSjmrAp7ZXgpEUIrUWXC2tc4FWIQDqI7DJKkA2jSeQtOwYyNERlPIJROjgE3ppGYewzilJjaQctN4cgEIvIKIVCF5G+z6yl9d+90uUhfi7oqcL5POp+G47/vDfb8/bW/aqydXHLrQdDc76pr1sAvrSHfvjgdKWVgL931hZ+NxJHVNYMuKwCF0UxmKHA3mLP04peiaEFshYkRRmEsBtZRLUUFEx26eetEZ2GsBQkDHw3Qic/MkjuNYxNh5YyypaXi1ZsQRBIugqDF7R1yKZinRBXYo1SLHYN7P73u8+0xuPxFJx9tnn3xyyg/j3TwWMX6863/4w5eN2+CcPr/9q+tPXqx/cD1k/OSLDZTT//d/+H9+80vJpQgeppKIPBqyIYNrsUwlIQE4U6CUjAHYdz6ujv2hlHK124kAULq+vXJuSmP//LMX3SaUYSrzRNEdjuM0Z0OQSYqIrCRNhQrFuM0THPN09/Drr756engYpn5sm4Y1xCatbq9iW1RExEfG4GOMjXcOyFRLSQJnGvO7u4dX79/nbGjctF3wrmlbRMyqJNT6LoS1qmOmGGJw7BiCo9rNm4Z+HMfT0A99nyQDNLFpQ9M4zzGwmMbQDMOpH3s20aIiqgai5hwhOyO2BeQTRSGAcyurdqr0jA4te67lgYUgZ+q7GaiK2TmdBYC5ZSqSxVAJnJoTTSLFRzmk+4fDPVoyy2Y5xjbl/de//tfNPKbeXs9zef/qq6erz19+rvOQR8RAVKYGIedJskoyZX+4e9i03oMOx0M/n8y1q03bNKHpNmkYZL4njJMCtY3bPGu2N7x7MT6+u3v99fbq+dXNMxHTlMxKonfd6uqzzXZkU52PZrunT0kFe73tbr/48i///ue/err+/HBz9x/f/adm1eZCFJkcl6kPbkuMhjMgxhAlJWeqRed07G6eat7SWG7Xu1SURYZjP/Sj267u3t598umPhjTTNKQ0bgJwYbR2ysfTrCtyV7uXOad3r+6ZoIscA2WZh2l2vkFPkBIjZclomQoiRIN5zpMkEcNMG0zMwwMVHPti9N6cXD+5Gvf/kLk5ThjC5unNDWOTvQf/4G1uyu7u/etNt046jfkhYzCKIXRZiwvtLP1QjtDuJB3BlaDozYCi50i6mvPjDCLsihWFI1DJKgYwlZSlxNjO8ziJSNO0qysRbZvoCBz7xzyZbylHD+uVm0GhH1Wpe/Lsxdw/5jk3TYuBOFtBe0h9F27MlBi2cVN0MpkUgUwQFQg9u5Ln0MZpOgXebja3zCae82FqYsPNjaTJoRok5/w4DE1cG4ZT6T+9fj4OLtf9SxJTZR+8rZmwpH3AzpHLw4Rsbbfi6Ka+JwjTDKZRIBt54UAU2uDtGNft7XH/rlu3c54/7MjUkBHRTAoCKoBWQ8QKFCGJFiQsOp9Jz7UqkWVHiNVMBepOBepOiwgAyDBwKFIIQBAWsx40RBMw5koMprq7QiAtkNIErJGsPzx4CkjoKM5lUqMi0kUHIsqoLkzD1E+Prts8ubq6f/PmzSo0Lz9LmabVFJpgCKrqQ8xFQM17z4hsYMgAWtv0IoKEapZTaoInR4vkRoXJMXsFBM0gFXNHMk9sRQQVVXVBl2pEKiiCeQpFUi3CkiQCYnLVe0dFamPPwJLkSmAnwOjjaRhFCzGBAqiKZUYygpRzLdxK1oxZDbIKA41jBkMXXMrivDNRCiyVqkJYpe4OCQwcsCcuVFJKLnhRAIelFFVDIinCwaNI1uIc5zLWAF8iFqn1GwMZqnoXSikAUqmfukhKuKqwRLMBENUeLNRvpUwAimhqhZQDO7FcsgCSEgKAlKS1SQsApkwOwcwEDasVQSnZkzfyYymGmCV550QVkOpoLSUhMSOfqSUGAtkSAjF7qJ4fyFDbBgoAxuRABQhUlZgubQNdwANTBaYqIsMKaX0IyTIjJlEFAlrcIbC65gBi9YG+1C21oFkEPyYGS+VMaCDLA6iCs0RIiFrflBYybIU4wMyQZGGUGBnUWAo0u1TzcH7DM2SykHAWx0s7E2/wXMTDgpxcxB+XMzCDGrWB59isM5PGKitq4cPgWUkFaJVvv7Qvz8IwVUWwamZ8doOu9kz10hBcXGer4XGV2ixNcKhYGwIgqi0xNEAflYKLYa0initONDg7D8GZirLANBe5yQecyBamjH24YHUzAQvFChYEqkr5tEq6PmBS+BGxCBYIyaCyfhaXkMqjgUsJerG7viAlAFBdTPFMa6q3a7Gjsgr5wRLFXZ2vYAHaEHnBVgxUF319KVLR4fp5ljysJR8NiQireSssb3EeDx/O5HKoC1Vj4ejZeVAtufRn5lItQokAkZEuzKUlh/aseKSKJhkSAi2w4vm+wfn9zepdQMDFhuoMsCEAVjJdTUOqzyDTih5WsNAWzApAEbSgKikQN2quCKKa8+gdtb4biljqS+7BMpg9zikgqzxmNIeMWoZUQtP0CcA6tRJburqxbcP9IIqdCc/TnKbSyjTt7/avfqEY0U5odPjNv4XDIffj+zeP69Vu7UI6DhJY0DlRm8ZNpFKKsvXbFf/5D/1nL148eYbNevBdOj2+/IO//PPdv5rVi8/MpzxNzjXMDZQ6gRaqHgLWSARDsCIIRqWk/tX+m/9hOPwc6Lg/zv19JnLsELVoScE5DN6b37bt6naLTA7ZuQhqzq1A1LF3hKEJKY0C1OxW3npJk6lgTjIN03goaU6zeI/qnVHrqfM4u+lrPH3nmobAW5VRIYJWr1A6iwTP6Kydx9B5Hp3R8vPfz3j5eX05I4//5deFP3cZQgDnfX79C50R6WVFXvrM9sEnG8/tAPho3b78BBezIaIaaL8s11T17QDIFXOqGZUoVXpSY9XBAAqTI2YBmxQboqIoQGpgaMgVx0ZGVtDFB69uIag6ZKOa1qehVYzduZrbWlnMiMyMoIrkiNlMlhXpvHYAsoESmCy02criLABIyAQIBExIZFXuhghzQTFelkwTAkGZYdwrAmINbkfOmYhqake1MHPOi6ihdG378uWnL569PJyGDEUBEFS0nMFirLFeiAiGaigKBIbkANEzAygymKmBeGakGikg3pGBmKXgkZEI3JyKFZTZGDoVmMtspGq5a3gqHAwD+1JSE9uixdAcuhpckisGjciMZgJWVF3OxQMJmCiwiyqqBo4cIyM6ImYmz8zBIxsCEAKD1k1SJakZgiNnyAJoCljbpKjnDBPTesOJKn1PQc5LLauIqVh1tgYAEzRjBDGpUJEiIrEuuFOFFB2xR0IgWXzZQAiygVu6U1ZV1wFMa57i8naIKkWlmBa1UhfMUoM6itBCszDipX92ZhURkPfsnaiVnNabzXHau4UsZaZqqeQMqgXMfPToTE0S5KZbOSUfm8a5Pu+JsSiyscvKMxBL42l73V09W4WrG27WOcMUCjWRji6P/f3jsD+dHh6P7Wa1u97Gtmk4xi2v293NlT8+6969PTw+zpkMTLYY81DISFmm/hhDw6Gb87ElEnVTzpMkTeCZyIoYNo0vpRgIMQIQEucyAphjBDPRktPMDARmmpVcExpGDwW4CYScJnJoDiS213keBACsgBqBt8LFCoCd9scQW+AwD2UerfEtJplTWa141UI/SQzs2ujjKqxe/G//D/9dxK7IXfcMk+XdM386tf+7P/vf/x//+/8z9KnpKElyLqY0o4JqCezaEJEdQ8GcywxTyoKJnOYsOespDZvd+vPPnmZNu+tW1d0/PLzf480miIy7dv3p7Xoc5tW6ffP1acoQV939/d2p7yf4znWuZTfIeBrG2LBTQTtptNUT7yN44jSJY1yvuxAiR0w6Fi1N16igZDgdx+Mw7o/Huc/tqmubdi5m5IBDVgHn267drK9EmNk5F4KP0VNOSYumXETy1PfzPAITEDfNrmnbxjWeGACrzdGcp3HY7x8eUv84G4jpNGdFVIAi6gGrGN+bqUh1J6v4DwKaVeY/L4vB4puy9DrrMK7bpXOUCwCA59a1xIR9ehSVBCOQuDLPMjrzzAGh9P1IDgFwTYFn/Ozl83yT/uHn371cb9+//f7l9VNiaLtrQUv3B50f0zATU1w3Y86rjRfJIUSfeTw8kB8OJ5IJMOpq103j0K0Cg0zHYyjioufmZvf81ofN/t238u77pt2tNjfH+2M67GEuq+BfYFOa618cU4wuYAlHe/nFF5sXnz19VY5331/78sWT54c0z6JgoCqrZuWga7kDKEWKZs1SBEqaTl78ur/30G3WLYqxymkc53FeNd5Sii6Mw+hd4mmIzGQKhKfhIafkYlTf9Nn23z9SzquNJ81FrR9OcyoNt5bnzkspdurfrmOr5OZhNLTYNadckgoIqvWNJyJN+Y1v10b68O4/sQ9jCeaecrdexW0AfRyOn242r44joELwU9HIofW7ZNnSJHbo2q6w80Der2bKxqp5Bk1FwHFAQSKP4IJvDuXR53dqa9MVuVgkzlIGebD5xhVkRiNjES/a+ChlYtB5eL+Oz55cfz72b2N02RSQm83KWE7TI5o5YM9h5CnpzH5TQNkEcnLOKeJJZ+T2evuUtCO1rP2cxqZtPbcdrlvi/vH9+mod/BZSDrE9ygm4lyyxXXmWlltQ1/hVSSaWNE/z9BAZyJNHdrQjhwk4hrWpMjpm8BwDRSsTkoW4rSoiJHCe5v7oufMOh+F9mvvRY7Ne1VlAlWxZpM4LBZU8MwdCpyYGCkt2NRQRRGU2VSWk6puooAjg0RuoaDGA6g3D5Ezr89jQzFWbH0QXXC0sRSSXTICEwRGLFkNqV+uUT1LGjMrclJxAoZgWBTEHGNKULJukstttaBxymi0nCOn26e7+21d+RGqf9bS3mw1GT0RzmbFKtBBKyaxKRKVk5wMxGwgSxRitJF3MJ7WJTS1PBKymCqGZo5DKBJYzmAEwsykYkpGBWspJ0RAw54kImdExoREYlJwUtMYFayXsqAEDkbOSqwqfEAVhSoUMUI0IimVCmLIVIGR3d7ef8gymT6533lFoYs7pdBykZATJpSCBZAMVAkUzIppTslK8Y0dqVgDZsVPAsN4A+xB920QztSTMrvXReW+SKkdizqNjbyZFtAa1ICBR3eGJoTF7AStmlhOYOc9VNC1izgXv/DwPKrlocj6qzAyRHGdL3gU15RCmNHjHrOacz1rMGImLipoFYgNSKSoGAGJCWAXFqSiaiZoSBkMEYgDMWhrvq/9DJbs5Zk+VGZeLZgSq8oQiialabhLwwpWDS21Ry1BiRHNIRdVEcIFOsNotm5qoKYCrfqamjCgLKmFVNFS1YxXOqJZAtce4SD61WgghoPHCkjBCMgUmMqQKnZwJNYt4g+kMT9ST1MrVAzM4B1Pjma4CiKha6bi0NE8M4WP6zFKp4+IsQmSL9HT5b1GPns1kF9LSRyX9ua5byCZLHtmZ93Hm+iyY15lOUE98KRihSsUr42Up9s+qOV1OFGsLfMGdzwXn5ZAfHXmpR9EIWCuEdBb31f/PQNX5n6rOCxA/IEGAZwjHPpzphZawXDS8GDCd3/ZCOzrTdOwMonx0rS5Hu9TDgGB6NrkwPJfIZ62ZUW3CL2SqSsQkMCUiBCi6uKI75lIKLRWq2llXg8SVw2WANUKxfreU3menpgpsfVyNX7ZHtDg61ZtwLqPPpfj5lxfW1QVTqmbGdM6bPd8luxT3l+txLt3h49tSr0Ad/AtIdbnugIaLaOj8kmX4WDXyJgJVNJWUsMze4/rp8+71rU4DKOaSkTjrfRYcxz46Lj7mgkSW5kyK5Nzt1XbVUOP8vk/7yXK2MUs2W7vYzyl0XXBx57vj4zG02Kw3PjY+cBnuHn/3t+R4+7SJ//yPHv/x1zEnUmhS2rBjmdCib6NrnZaCEe321j7/Sn7yaby9klJaTpSneO3S6e0h+RP6sB25x9huTZqCK+bGgE0R0CGggUMVq9pnZFbT/s3+6//p+N2/s3LPyJYc8TaBEmjbrr666cCVCdvrqyeP9+/e9UkQWkItB+9I8v3KOwCNPrSTR+9K0fHwgDaumraJXhGSlaalMVIpxcxKAVOdTgeErDJJuNl1rbkfUNxWPjHWsreOxMqeWe6vwRniXtCb83oAZwodnEfFBxLbeWaCfZiIdgaALjP/vLLhh2Fq50NdBt4FriJckOVaJZz5cfWLCdlxVYQuwhxy9aMjVstUM1UzyCKEpGaiQsRqaiqL+45UrzAzwgQ4ZyvF1C9KdgBYUPGqKqrNOTBUODOMqqMYmSmYqBoSg9XeBagYoSIhVP4zklmtcRSI0MBM0aw6+onWOa5oqECigGZUZxQhczWqMAucYeEmomVUQillVqZo2DvyxRDVnMGc1at5z2Y1dg8RiNE2u/VnX7789v3rh2MPVKqxMTkPKshMRqWoGYoheidkqOLQCM07XlwfgQ2QwMRK8BzIgZmIqYoxZQXTpGa5yDzPAipqaOCQkTF2jqbiyIukNjgFiZ77dIp+DUBErEXAFCxX+yc728RPqXccAVzJZkqqUrWzhMzM0fOma6L3LRpYqk0fJgDm2r9YGI6Ld7WSqEqqtSkSA6ACIjKoMYKCKlQeKIjMJtlsViuVbYxmoGTMSChQlt1IbQggmNaMCsZFrGxiiqAANSVToUJvQAzAhiZIzGxQRAm5qCyDkyo6QlpFhVBrZ7IKKJ3n3AIVuRBKkQkBAHz0syStE1qlpEQIzoOZsGcEI42reDXPk6q56CwXExqmCQOKVU1YZEzE1my61VXbdF1cXzXXz3zTiqIbppRmjM4NDRxaTfPD3ak/HYbTsO7a9e4qxBbA7a78ZuN3N+3bN4fHw3D3djRzbbs9jWMTO/ZFclYTTVlKWTer46nMKqmkq+3GwHI/GqAiOI4ljSJzdlro/Nxlci4M88l5B+QsqZgGZsmSS4LoJIsFB0zeRwNH3AoYsRUtntd56q0IIjY+mkP2gISH/cHkZRc2ZZ5efHozjATuQYJbPX3547/6l5rc1afz3duvdeg3smPN99+X6xefJw3t5vl4//30+Cg8CCdk72PjXCCd1YCBQS04H51/gGG12ZWcQ2dPdmGzcqf9EZHnkqbs0PT69ubxbv7u++Hqpjt+1yu5NCYtIxcuhdUGNXC+mcVEEjsh4PvHvGv99c1qLndiY7f+RKYhj4TgunX0KyYSdAagDIqYTsM0jXDq82lIQr7dNMFTKomDX69i308Gfr1dd+2mCMemDc0mpWkcp2EUEzOxVHLdSPh2FVvvmsbF1gRbH5oQyAcfgynQ/kDqeOcmhke9n9KMXAXuKKIAdN5l1DVQi8xIjokvXGQiT+cdktauopkBELFoXvwLRC5eRZ6Img4g9ROQ86kcIhoDZc0uBCmzpZkchKZh9SjhX/6zf/7217++ef7kh5/h9SY82bwcp35zFckXAcft1oGOp7fj+N4mT2EjKedhbzGSaQhudbU7PUx5yuPhUHfuicW36JBZbHo4+I257VV48aMnT17sf//Lh3dviLFZNwqsmtPwfu1Xn4Wm7K4zjldXdJhP9/1p3fgf/dWf/N2/+b8/fUl/+eOv/v5Xv37YHzbbG3+9SqcCCEqjE4zuOqeRwQxhvd3RPM6ncY3knfMu3k85Nr6/L2kYN7v1/vh6++TGZEa0GEIeesMUmBwYN+37eZofZuzzJzedw1RKOvZFCjgXmbyVZN7NKnkcQ2zAiP3WZAJYcdRWiurYH79xzcp7QtBpf6Qmzqcjdbt29xmvN13XTsOjYJ7HUz9QmY/Rc3O1HUu6OzwwRc25T/u4Xh3HHlA9cYeuIU2lb6M7nlJK0q4a13aYMdJqSg8tMerEGIrfjHnOZULCjqKX2gGJm7BrxFpyqBkoq6ovEvlY5m/ZKXt89+rbrnuCYsf7tyJKxJpsolkNA6FqSqlsYruKrZRCKJCSA8YsKQ3bbnsYEjhgVNXZk6Xx6Hidy0hBmqaZZ+3CZp6OZjKnnogAZ9KS2XFsIsrQP6Y0uiYUopJS8E6UYrx2LubpaAnaq2uPMU/+uvm0aFJ/6uf77XY1DrPmwTkQkCbGLON6s03Hk7YXaoOZGTGbKQGaqiMPZsUmhx6BmKiaMlOtPKuByFLsLIWoaKnRUUs6rqppRkRRIWQ1zTkRMdbZWXf0CNVNxETSPJCP2cCHtiiRwDRPBiUPpxBW6OE0zB48SemHI4impM5PqCWPU7YRxv6Hf/yjuyxT/zAntKsXkgvGEJxHBFEF1ZQVjJhJRLwLc85O6kZKYwyF/TSnalk5z4WYarVSckZTNXBcxUNYsjjnUirBB0ODAgZQpPjQgIGZgFERBS3kvFhegABAVWUmyBkrs9EBAk55dhyc4/5w8ORWq7bmQJWSicI454e7h/7uPg997IhNc96fplFEx+PRlLQIWPExuuBU0TERQtsGDC4aGBFIyZOKFfLu4fDQT3lWcC5s1l2Ifr1aXV/fZKRuuwUwBGJHOWdCx4sRijJiVlHJZspExM6qJkgVCNXAEVfTIkRjArRkQgCCSI68gTnXlFRU51IUHaqRFkNwqmhgOU/EoTLPEYk5iEmWjIjsnJlWPSAjFuNcxDnnaNnzO2IVrbWjoalkQ/YuNG41p7FI8cEx0FmgBYSBEMGkPhmsCPnFsUvEGI1AVQw8ExKoVFa56NmK24yJi2oNcMeFAIFVbMmwVD2LJfDCVFq630sZVCkRCnVbBkupAmq68EXOnquqVkEtsyUVawGKoLJfljOoncCKKpkZLO2TM+BxFjnp0jdZcoIWIRVW6lBdAgCX8N4lJgLOmIidT8TMqjNm7bp8SOmCs89srW7OZ02VFrU413yM2yxmDfABZqmOOdVVvbJ+SaXK0NRMAGihLAAsnJ0zcgaXL6zlqZ1ZAAs6oWCLsQ2c0ZHlVR/nn1kFmOqflUJ1BoSWd63w2vny4uWQy10HONOWlrFRtYRLsx/PDAu48Hg+YDYIUKP6zpSoemxUA6pOJGbkqgClyg0vMi0sYKBKhCpVMixoQOQqxK+wwCkL34rOYUjn22qL7dAZelk+GFQKEy0Zc7VoPyM6Z3gG7Vzun+8tVxOYy2X7cHdrmX2+T3hRqp2HWIUOYUEBsLpfwwf+Ei6f4IzB/RNIoAILYlJMsiogUhFF9oK7EG7moQco3rmcJedxygOjP40zKBWhVds025incSYbUWWekxG13Yvbrc7TvD/2Rcdi+0OaD6Xv3z1br68C7pr2u7vj51+8sNCWZGVMitw+28H1k82fAH1y881//IeVT1bSjJQDBM9TQdysYbPFn/5UPnv5/fFNfvMd7Pfj+4d+OjHrNE20bstq3W63z5+/3N3exNgaxhAbcq3za3AdkkNszcCKAJBIKWVfhl8d7n42H18f9/10nMm3/TA9FDgdMhE8mK5bfrc/9v/4rXMe40ZKgf795y+2m12HAbEMc0lYuMxAzEahWz1pw6af8ru7tyBFpTSOzUBAyHsxfbg/OSbp39Pjq5zbuN1s4grMmQ/EXJcUALt4Fn3gWp4HG3508xaUdFlNPpZ81lGkF77PBz4S4mXeVdssPONJsIwbvYzVM2xbk+Op6gvq+yJ+NJDO7+KDIyQ4a37VFqtTMDVgUKsu8mCLdEFNAV0RtRqvDufzMQAkMZiLzdlSNm0IkcGMkc5suMr+KwqKZoykqgR+wQjAmNCsoJEpAEYEURWzgibsmBzpsuSxnXkedV1dtE9mVdGL1Q2nKjBMAC8mS+g4iGbFYkzJhCks61HJqE5hUtmDAbar7FxxhEylTNR0iJStJsSyiXrnnz19drvb3j8cLAsBETgwVPCqHNt1F2NwnmJsrzbImI8Ph3dv5/EEVs6SEABARTLgLMDATMoMZIxEKRcDy6YEIEgANfbDWoeNwy76VRvzZIyOmEspDC66FbFD0izifCDPHAKABd+Y1YQxJAoKNIuYmBUFIEIUKewJEWNwm9Z7dl6FhRFNS0GHrApEhHwGxIkNDUTEtAgwItF5qKOaglpG5QoHAYpmVZM8mYlZcc7huVNRN+RWUdDqFcrnlgQi1owPEDjL6Rm5ToPK7sTzCr1IEA0ckxqr2RLwURSkIAiYOedEAdnXVozJ2dTwAhXlnM1AxmyA66YZpz7JpATEIcRGtYhlYAg+lmlULXMZrCro5uRcsCxFpfaxZSxApGpmGn0IDmMbd09uebMLoQXgpilDP4C4DEEyluAVeZrH4W46HWU9amzieruJjQd0zTq8jFdXU9huwv274XgYlMXMmk0oWX3xoDpll0pqmtU0TKlMScoqxjKN6N2cjRsnCmKmIuyCh4gmkmZVxKKCGT2YCIAECt7Ftm0MzQeOMXhCM1GdzJInp2lCA7EiIlfrqznL1N9XYh+wnea0fzw2V3G12W6v2y++IhFT3/7Zf/Vfr29uh4eHh+//Ybu5mvr5eP++vV599Qd/OBRx3P0v/zf/6//H/+n/4gQzTUaFPAd/S0KA5rwzU5NEAZorDs9b7/XhXU792HB3s9s2Dtc3oX2ykSzHh9T32pYAAZOmYcBZTDSu15s8jVJymuemi65prIjmOTiO7fV8ykXS/XHcPcEn610uIyKyo9jFdtcgyzwN3gfJNo6a92Ucx5KUfPQxEJoWzkUBPQg97gcEF4KLLuSSnXNKdEyjlgzFSkkE6D1hZGJHYKu2jTGuus75SOxjdMRUihKgAKzXq8YzoYCe4MBFpIgAsIoiOVVTUTATKU6dOaobdgNTNWZHRApKViF2qV04q4ZvgIu1ZN0CnaGieZodwNCPYJzLFF3jUciQwZlVZqBZETT0Ljpc9Y94s/2j47t3V+uXOr+eyNbrVUozy4MYesxgGSzldMSycwimEh1bydW8Kc+pWTUqgBoUIbYth5DmQvmEm7Zpg6Hk4958EzZXt1/+Wb/+9v7117cvnnjvUp/zaQSvN1fP3dWnX3//NbnCRMf+UWDyG75+/lLKq+Pb8c+++vN1OPz+8e31Gt+4x0Me15E9oQM/l0Muo5hE76fUR9wU1xGTZO0fD1fPb5vQsvMF9HAYdcquk4jKmSr43K3W41wOD3ezpZJh5YjQTodjaJkwDuPYraOZy3PfhKg5Galst55j116dTu/zqGriA89Zm+ZpIX08Pqy6zgWXyzhMpeGNQnCE03Q3Do9q2a26BsMu7l4/HhLrDJTB7Y/3q9XVatsmBPNtQ834+JvgCHKxMk4GSYbTfISmaf1159x8ms3Qt23OQypFaQRAM3ZmjVsVxWM+dLEJBiwC7Po8FJy968g1mek49uR8ydNqcwvkybnT8RE8+WaV+5Orvt2QDEaEZlYzFCVl59q4QxdPRRjQKSBEBJtS7/0G/DqD67r28PiOPQTP/fEUiRiAyIuq71bkGKTPc585BPaNB7DADEguEHXts5Rn9N47HwkYkTmWKZ8O+0+//PNJjr978+/IyyyDss3zGMlHWg2S29V23h9LSaThUmIhEiCCkdUNuhggqkHNNiViARUpakbOI7u6/VJZ7IHNUJe6oZZjZCiIUOt8Jo9AgErAZiqaVYWcc+yUlAGKCDlitJJzmqfUT6fh/c31CgFVukkEMwRu+sc9gW269Tydgrl5SEnH7WZ16uf9af/w9vvN1Wrt3O9+8/rxXfn8+Z8UpjTNjfdZVXIqCFjU3EK/YMaSsg8BAbNYFlVERkxpLvMYYmcGbQiEiOQBLEs2M0RFs1IKgIpWq5BE7JidqQUXZhUFk+qkr+KcR7KKOKBHFXHEOU+EaCYAGHyoHgdtE477Q/Q0zfk05NCuSsa//8ef+XzfGVCetuu1Qzq9vQ/eQ5YObLVdW2EpFKLzwaMLAFaKxS6q6TjMznkjTPNUzKgoO+o2zfzQD0M/TUPO8sknn7CLT57c+Db2/eCYRYoPXgzE1AykFMempsy+JrYQsXyU1MzOqZqUBMyMlFMmz6IllUSO1VCSIDMgqSkSi5oizFMiVEAokhw7tKQqxHUsmRmICjuHSNXTmtlKKaKKS3wSLjyZGh5vlqQQEZETAGRsgpuLEjkxVQAmJ5JNlZCIXE4zEHrnxT7wE2qJf2ZnQC6legjgUu4vFX+W2ry1YoaAKtWEEmv4pqmhmYJe6tnK90G7FC1gZhUBqkpB0VoMnOumaiKD593egipYdUM6N6Ot4lZwsW5ZIofO4ooqHsOL5ZZV2tryawvggRdu0KW+UoNLAbPU5XAm7l4qtEvs2lIowRlHOh9rqfaXq3dp9VdM7cxmwQuUsKhIat2lyyHRCqFHUMNKvUGAhbK0vEnd0tqFDmbnCnThxNRcufq7ld8FdmboLBd4WbE+YhpQJd6cm0l2RrMujK3zFT4jRxVY+UCNWT7U5Wd4ZsFcMI2Pvpa9P5jqhYxV3Z0rjkOMZhZiW8ZheU8EZi6iRA7U5qKAhN6BKiLmnNHUMYqpqETnNtur+7t3YEAVbKw+Vh+7g+MHms/y0W3hO1QExxbmGZxhtg93/+PzPRub4/mmLygU1Av0T2hgy6xbPFSWZq6ef1LdrC8skIuNzUIzWTAqqr4miyU5goGJgYAVA0AM2DRm23jzw/b208f3v0Z1FDI7bp3HmFEdMjShIQ7HfurHYZyTEpKDq5vbAu40zYfhnjXPfX8YZvTu6c3622OaQjiu1wjJSorbm0mDm6Fbuc3TXWhX2G5OIa9/9GMHRj/5I/vtf/r2//VvJtbVj3/iuvXxYcJVcy/zb3/771/9Pk3zVMa09QGLFYTds83m5dUwS+i64CMzqsw5J+Ao4yOYxdgit4DMzTVQp9CaNSXPpf92vPv5cXqVID88ypRof5gfp5TY7qaSTN/lvPXhk09e/MGf3f78598M4r/64uWXux//9pd/u9/vg6OGoGlovfZkpBzEXJmPBbHtnt9cf06Q9oe3uX8/THlUtWIduJIk38nLZ6GLIZ3u3//yb5Q2N1/+q+KfCCDTIqe5jJdlhnxYaxf0caGanZlpC670Yb05r08fcCH4aMBBzZM8rzq1KNf6rove8zzxLjw2tfPBz/Znywf86GvRjQKKCgKKCGJ1uOAKvEgldCGDgYghcRV9IiACL7MaSExRAdESSlZManMR74nMsJYnCy8WVQgM1BTVEB0aMFwkmGf2qAigIICp1OupJg4A2emHVRGq9rc629TeuKmYgBmomlpGpOUBjlQ/NiF5ZCUGRjUUrKpoQBMC1pJMS7GCZWKEUQsgisRcEnOoT2QAc4zIdLXZfvbsxdffvZnSrAYmGZ27efJke/Nytb3dbTdtcGImhEiWDt33eXo3n9RUxAywZrNWERcAJsyIioCOjYiCZ2LkVEoCwEKAJWcVCcxg1o8jIyipAhoo4CK+I0SFGhyCzMSEvHA3LUkhJSA1NWSPAFIZpgSM7HzThna1CqsuBEYqyYpRLlTZsSrMTMxgqFq1YgRmYpJLCcgEdsYNZZFrARbRGsItOVvJVrJBzR0Gx1wvg51NU8ghABlQUQOozo9AVkmiKiqqSstARiAlriRcY0KTOsOoKrkrNb5yklWMDAmZyIEoqqIVJpdFVc8PzQtUVCS1TSOCIozQAU4u+CQ5ZyXnEGDMPXHI1Y0CwVA4UHTOTJuwndKRI2VN2dSzm9LUODPCIsU3mxiDD43z0ZFH9t61jqPnZhymGJrDwz0qM3vLESSdHg6pYZOprGPTrXJGRNyuqP0sXl/Rmzd5c4T9Y57GEUyRkT2xkp4yCzSBCXicTje7tulCwcBa+mkwEEKKHMyslIFQ1t3GMqoLQpJKIVICzeA4eCMx3065rNEBKKKzInkeuQnBw2nScTwUPV7frhVmQ81JwDBLusYVEwGwDx3A+urTL76K01/+i//m7ZtXD99/e3W99qwBFbeyfvIU/M7Eg4hrM7tT25rXcNQY2pUqeWyZuUBy7Xp3e6XSsytDHqGAzuXZZ7vOPy0p9cOcEtAg89vBqdzcbn7yJ5v9mwO4AGiH99P9Y37z+nh4eMxzDk0ILmZVHSYtUyoTW+shsWVn4oG70IH1RSiy323XfqUAE4OF1u/7pZzMydh1wdGcZwQLjvanIc+ATF3XEFMIbr0N5FKMUSHN02FOxcyYSaQAYfArh+CYrq+vdldXHl0TW6SYS6lmWo7VOSRyIDlxebg77g+HcZrK2doDiQ1JjOZinVTovG7esa7bWOsQBWDSuugyE6HUuBCDqiJRUTVDhIvuYJbJxMyK82Gay6p50tJY5oFCW2wKwSk6T2YKoiU0IWP40VdfvvnO/vbn/79//tXu6ar1wL7b5SFxmfK4L6dHF9oYr0E47d87Nt/4eUyIHtGlY48+ra9aUe77ibRAJktATZRpImcuUGCa05QPj9Kt2pdfuW6zf/WbboO3n3469SMCO5tu17v2i5+8ev/73fMbQF/evdZPv9g9/+z97/fDkG+fP/vR7bMVX9/d/+cX3fqxsWnSrFLsLuPEHNi7XIrjq+36ulNum/b+zbAOnYwlj3mz2QzlgGAOpAENZqqpzNnH1V7SPs1Df3r+4nYc8/hYbea9qB7GyciJ0TxPKlkFQYGAZZwDTcoDIDjXegRmwJyK96hp3TUu+GkejFbr3TNcPY3rF9PdK8UxBKYYgIY5pXkUzziIQGiZ865xgKtpugt+hjiVIu2qK/mUIIOD03CAGHSCee4jjuivQ2zGIZSSk8Ax58glAxGuTTMiIfF2tQbRQNR6P5c0lYnbLgkZw0yY2NixzOoIwCYURgRfQw+cOiyZUh7HwF1YdWM6FmNJOfLKtbcFnZIGg31/XyyR5yzAjhKzN6+KwbViPZgoHLMxujInjKtd7G4e948NtasGy7RHdFpy8HYc3m+aq1W4Mlsp+dM45g6970yLOuI2xuIP/W/vh6/DBqVgzhOwZwoRtxv3JEvZ7D4p8+99sbZdXUqWc1ZQZQlRrRiDa4nRDCpIhMjVxXHpcxA7DnYWS4iKmRJ5Ay2mhEzoDVBtLlLMRFQUc621vONS3YANECm4VmVEUzKZJeG6e7zPdByuVisyspL64TFAuwpuzkM2jF27Tz2CEVrKuetW2eB0P8mUt895GwTHw9tf/ebLP/vzfsrzlIc5rdYdIEouamGaBmbwq1YMLGkTY5pzP6WU8tV6ra7x1BQxU80sUMs1A0Qax8ExAwI5lxOcTvubmyez5SJKTCS5AIqW4FtEl8sU2aFZzpnZ1/LJMasKs6tKeUAywDQMgIaqv/j5r7/60RcFud9P6e7u5//w756tN8+uXJpndEknGeaMAJowOt923rtJ0ah1TUNpTiqFHROjVwTisI6GQI7b2E25nE7zeBrnIpDzetUaUmgCBsZIggYmTfBzrvtIUq0eTODYGWBNN0MKgIvrIbNnAiJIAkYCCqIFDJm8KqkooyP0RZMZenCGasDRRVUBVQJjdgjA5E3FAHkx9dVKLmBkUCtYOz7mnDOs2pTKP6kfzznHBmQKYIuBrhRFLtlGJZWsRJiKeF54CkQoOhMTVY3A2QcGAJgXnILYqYGhsuO6c6sc1WKgCFX1DFXUg+AcF10YG2JaOxJYmSwLSbyiGkSAulQwQGCEqLpEPlVhUd2ya03iuxQz52bi0nRdJENnqguciT1m9XeqgXrtTJjJGbrFqrBaFBEVDViMavBcGpktWMzHBBNYGpmL50wNOgaoldMH2kj9aNVTHBem4RkbOKuI6mfEj1CJ8z8swMvyuxWyqn1ww7MJuALW3KeLm/XlgIYAi+LsAz5zvmRQBbZnoGZ5z4UPdi5YL/hHpVudFWLLqdtFDPVBnXZhbi2fvn6QS1l8RkguTalzHVybxnrG7LDCf7i4EYGaMQABGdVGNOxWG5lTMSPmqWQDZB/rJEh5RgNEEjAxnYbZo7OmHcaRnPvh5z96tr3+m7//mzr+Kputnvnl1tpCubBL9BOcbxFWhAdqatml6P+QyPeBUAWwGNbi5e4iwhljWgRA5xyqy4U4m73DGToiAKgVngGY8lKs1l+vrQuD88BdGAVLBjlU5xEzBSmqQuSAUIzi+ma1e9at18OQ+nFu25aZSkawguCPx+OUsilm1ZzUc3QUj/s+aRpyquSSOctu1w5p6kuP5EYpp8eHTzqX5ulHt59eX9+Qj6ubq/b2CXLEZnP7zJ36kxa5+fLLabN+Hq/L/tD/wY9Paf7V+NvX7343qRxmg6KrTWSR3fXm6vkntto6x9HS0261+/zLMg0OLUZWFZGJYyAiKGOZe0JM/TtyzserAlelYElvH9/8Zj6c9g/leJJsNBcyo5UP/iocsjHgw5Dmb999//rbQGET3Ph6/NnXs6ScihUpjYMYedXlgMbehxha72XOx+HN1Nx23VPEBgo0zk+ljGMGlE0MPnoN8eZHf5Dtahzl4Zufdd1V8+m/LED11gCSoVYR5AWu+fibM2QDF8vRCtsu8CrCPx1jcCE8IF7A3POsrDOvEmpsgbgvK9B5NV3wkYtDksFiJrcAT+ciWUXO1vrLwK+rAlHFK0sxJXSKRgCIvJzjIrYrwLV2VwMlZCRSxNlsKjYVC0UQDVCAFY0dMjI7cqqCgqhKVRkBiuDY+QoMEIConDtwAKBkIGIEgmKAbBexBC3nUFEXMFYzVakxrYiVzI2muuSpAVYAmgWQTEkqY9SxN1BCAUAtpgoqksgxwoyQI2dGahhMEQSBkRxgiTG8fPl88/OuPxyJKazW11e75599efXkmYutJ3AEapXlKtR2m6vrvj+eTr1aASCxLGr04fZixS5U1TskVEZklTmJZxQ1M/Pe15V3nAUMmRnQAMkh60JyNUBofJRi0RGqoQeFgtw4jojkiLU6ThKQ8wbgHBsSsQsxtk0TggukjgXYGahTIjbHTO6MjS8DCYlMTNVADQnZoFqVZykZDMgAiE0tF7EClswEFMw5BkapDz81VSOqNipsRGrVJwrRQFWYUEXUDGsQm6EtPotcHcIMFiIvqIApG2cpqpV7UR/UCEhZakpVQQBVKVU3QHh5Yi1QUWh4ziMhq1g/HooKBTCRUjIWNBI1Ay1q2HAAQ08haSbFnGUuRyKovQ+RIufuRAgNIMTYhK5FDkQBkB17QIfovG9cGEMk3zX9YT/3p+nRzACKppQe35/G47C5Ls2qY0KbgVTXK8TPwpB8fOv6fTk+zFOy0PoQyUGxw0BAj0m8ZwVtu/B4GhqOkxj7AFrKPHrv0cCR38bbEdM4jY5JUo5NhwhtaCSP9/t5t3u6Dn6aBmMIIQDqarVWQt9FL0MuY+fbMh8dSLNiETLzeRhiKE+frtdNc3PzpKCD9e4Gb8twN939/sXnV00gM5r6E0bH/tqtno1zUlAYx9N333pgVorAK26JvEuGZhRznubD+4NaXm9c06xKmg6HsU/73++HJ1fbl589Z8DH/WGaJfd23M+vXz04AnD4/NmVd7JZa/hB987r44MYggG1HKCIFM+KxJzTxEzslXjw3rUr56LbrDYgJeXsnMvmvn9/OA65ZOrabQheLI15nsugArnPIsJh1cQuRp/yHD2ApdNpFF3VHY8PDYIGCtzE2DaNjyG4q6ubtu0QfNuuwLAs9tICZt4jg+bpdNjfPdy9u3/39nQ65ZTVVFQrMU5rO7JKPU2LFhRmYCSuBM66+pspsT9v884PBDr7ZwKKqNYMgbqccu6nmYILrt3R2rs49G+7EMRKKTMhkxKKElkIcOjvf/7tPz7cf/Ni3f3Jl5/B6f0o6rdXaeytZBmPMg2SZgACxZTG2HmRucC8uW3nMQErsClaSRNgZDTNCRi7bcsBicM4pJSJueRcXJdyThLbtru5fs6Pd9+d5tNqtcoDplEADrfPPhW9BvLjITmAcni8vrqVh8/LoL/+9//+j3/yg7/40Q/6U/OL3/8nBvluPM6WqXGn1G955TXrfPes+wJmWW2uLUuZJucQ5mLjyFZsTgxKJWEBk2Ro0SNzPg0HG/qbEEMaj/uHKxcCwH6ek2QzBEcKZiDso0FgboBkGufgj+Q9I4jmtusA3aZps8wynGZhtQ4pu+i2mycCaXz7K5Rjt9utbp5O06hGESmamuf2+ub1ft8GRNcVpXa1Oo7Hvu+BvG84J+PgNc2BHfk1rGEek1omx6TeENKUEiQLXm0W1zSujeg9CxY0KwSzp40ZMXmkltxNn+4ADIs07aZkAZSiRSUH1zZxJzbl0htoP41FE7ITKERebTLUGDwKKIlCkiw55cim6djFW0dtmodMY4hr5uA85pwOeT6V/ZP105J98L5xbQedj4F5MOKHMgKCMLnAXtbedU248eGlju8CJGQMTWepPx3fs/qtvw00os1oRIqGDEbOsAstzXrjG9gf9HgKDne7D1ARM9Xumaogs3Oh7rQIFhqeqDJxraqo7uZVDACRRAsALoFfiAzOcSMqqSRT4SU/l9AxE+U81eNgLZxBCRCsgFkupZQiOYlMT549nQ/7u/tHD9Q10a22OU0lpXEeSi4Nd4G7zSY+HN+N43EaphhD49tp/4Crq6ttOKZx/83vX4X29tNP0zhPx1PksLraGQYT6ZrQD2M69EAKUiR1bdc6hCnrq7d3fhUiRW8+eJz6ER0556QUIDHkIoYEp0P/4sUnf/cPf73b3c7z3LSr6l0oqsy+5GxYAxnZENCBAiJYyaIMWGV6iM6FVAoiGCkTvH3z+O03d0Q4punw7m3k8szTVRCcZ5Dh2dPbd2/vG+5uduuUExGbyHQanYtEVJKF0HabtYteiqqoEbjgpMjpNGkSBPNE623HU55nTP3knHvx6XNs/NVuoyk/vnnvuhZDQOa5FCJi7x07BJ1zMkQVMDViMjKHrJoREZAdkoI6H00FwYhq+45MIeWJnDubnTAAiMwqys7FwKqiqsyu1G06gaoiMyGpyLJxRzQERhZd1n8wdVX/TwhoKoXZAWFJ2ZjMoPHBOU45qyiYgpFzroaMZJkdOlPlxRgSAGzxmIRapVMx0yIIxrVbZwCEksri5kMkZnaW9OjZUrfW1aIACJW1RoiMIAAXkMXOJtZnmslilP5ROk3tDS/0oJqSpgs9o6IJZ05NrXxqWaRGWF9thlRPpLrM1ECT6oZxLrgWGQ/CmWS7PCo/+PrYIohbTF3VqgU5WBWUXdRdBAAf3H8u7BrT6uu8nApUIZZWJ+KPvEaWLJjFrQjPQBOIFimxzhgCQGPkksSHasEKZzrPhyu18HkqKnRGhxaBCbrqCYu1UFwkIbicSFWu2ALc1NPXBfaqZocXrtfHnJjKiFl4SQoCZ3bWmX92LlwvL7wUposGsHqgIEDVu5FVP6zFN8WIqYCmkj69fXl6/z7Pg2MidmtH7FtQPB6PPsJnnz5P2ZLgKPg3//APz198dvf2+/Fxv+42P/3pn7x9/ebf/t1fS0qEDHAWE56L4fqpLpQL+/Bvy2df6mtcIKP6e1XQtkA7ZzpZvdV0ZmedhyfU6ukM//3TG2cXoKA6k6ghiC60tuUICzHuQ+l/3tLBhWUGQBcUz2plZcjsUE1yAcEYo48eHIvmatKs85xyGsbk2KuUQz807Yod7jbbgLEfT6oApG2Ic7FpTurw7WnMYnpK7OiLm4269nrlXt50bVynQbrbrlmvD+/3q90Nc+IkjRVyBkWmJNeff7K/Wv3m+O71t6+GPIwNd832i91uw00M1FyvX/7wC7+6cSEc3r0KkGIXR86OHVVHNhdC26IjJLQsMM9EKtOgeepP3yNs5wySX3uXTGAcCoFHUufherXa709FgES2jWPI0ZDMX+92n926Msjdvfiw2V21ZTwN09EM3z8OmjR6f7XN8Tpu13FDWzEv43i6f9fgqCZElEEQ1LPm4Sgzvbsbv/jz/yq/O+Spv/v6PzzZfum3nwL76hyFgIp1XurCoVtA2Y+myWVMLkvrB0bRMppwoU9eqEf2EWz0T4b0BYuCBRNfwEvEs9/Zh5lZccczu9LOuCVANdo3U1ViVlOt62rlapoSogMCwBourFKW/IS6qAGgLbAnEyOiqhhDAUqiWbQIeDZQUYPCgugdIZFzuCyFQAZoSEaMgEpEWuQ8sxSQgEg1i0glQrKp0fJoUDDUfCYLVvUuSjUzllLPGJlledQIYe2aLDPaTJlJkRCJ0IhQJSMAFjGzXJIiewIGnY4cvXM+0tKpEDRAZvJ8db158ezJ23d3zuFmtb69frZqWwKDCiiQQ4BqjafAvltxjHLsRUQsqykCm1Uga6FcAqMZTdOUEAcAFcnF2HnvXJayrFGmUpQdI6LBzBQRnAgoqGNnqI6MSB0bOXQuFCRG54jYqlgSVTIhAQghMzsDrqM0xMCOTQVJ2RsTogAzVg8iwCoCA0CqmwkzWx46hAompWjJORcC57iQQ1PVVDSL5bJA+KKqqpqJGYEB2ZAAUNXMSs2SpOWhpJUWR47VCqiC4VkmrACqaqKWCxSprtdLXkeSUkTVrBQRg2KQDdKCQlVqLp59FZd5sEBF4ziTGQc2y6loMSUoTL4LLrqr03QfY6tZATBJBingIUvRooEYAYqKC64Nq3HqSdF7h5RFM6JLOakKBe9XHaMD45JLAQQX3YoBFJw35riKzSqO/VGSlzmVMUnWw/0xnfr1touhKXMWFRMKZJ8+j/q0vXuDb94P+/sZHXYb38+kyG50WWzqJVIs5QG95xiiD2CTQXI+SvGIeMonU113nXPsIZdCJfdjHj0329V14I1hMkeG4IIzGAxZxaZhlJyb1oX1qm1X48Nxt10b+v0+ffK82bWomXi9DesboXj9w592uT++ew2Wr28/uXv/O279evdkSK5Mdjz+3l91m+vr0MBw7B01CFQSD0fbNE7mvLt9uvniybtXh6i8fxzePE6Tppc/ePLpZ+spHQ8P09dv5u/e3zmh3fW27VZ3bwcqj1cvHHfRUKfhcXPThqv29qa7+XR9ej/uT+V3v3trLhaR0LILDQNoLnnWsCrbm7ZtjCG12xYsg0GM3dv3p/vjNI8aQhvJk1HSvlgGLLGheS6OGYW2q/WcdTgMTHDSiYbk2DEmDj746FwbvWvblYuND8453K1jjNG7ljHKXLJkQwUrXdcGR0N/eHw8nvb9/nDs+8M45SwllZy1GBiokAiraUlQnIk3CWRh2dSoVl9IAEVghJo1Q9Xv/cJZvYSbwFkIWmdB1zV393uk4kCiI+MSN+sxz+yx5OSUPDcxNKbYOW+BxvF0l9Inu7aj0gDkfszc+K6Z+5OlHk3UNI09KDARMsmEWVBNy4zgOXSbdv1EkoDvYvAqUrJOOZFQIA/OAXuODr1L07HdkMzFAoar3XXXTcMQm033dDf12p/6+/ev3NX16tkn9vU7TzTdv58JN09W8bM/e/s/vr57eBdWzZPPP/+h8ZvHt49vf75acUHVECRlr1CGsLn5MSL0Q59OpxADqomITMfWu7LZgXsHUkhxnqer3W6eB8CiQE27gZyOh8Pz6xfj/WMSu5/7m9V2HEfLxusOALxvAJ2ogVl7tWUE9mnXrOdZSjnOYhQ7dq27vm43L1XZ08x6OD1+Px/22DJ5KrP2h6mIuSYG7m637evDd/3xzuakVhy3BCU2dpqENElRxq2xm9IYlK7aTzQ8HR9/dt0QyiQ2A1uVAKEUz1bKyeSE/kasqDpkn6fTrrkl94RQ+uOBrEGYEYoWYfTEYLmkeWIXmrBrqO3ngtgGv5ryw5TUhY48WzrN09FAFMSFdRrydrUGSm8f7h0zcehCR1rQgRk4dF1wU7/vons49uhi419qNsCEMrg8BD86ijkjhXbTPAGwMc8qfYCAquP4OKtrHZlBLmN+PKJpVPMIq2D9+JaosDplntVcWEXv82QkYxsbdCRgkvNwOFwKhEqdQABiVzf/dS9CECqNj51jZNHskaAaV5qqKRMyu+q5WNtziAAoS5INO+fZ1ApUS0QgCmqiKgRERKhoqtkGrB2X4L2ynhKD+JWHgof9iR3kcXbOiOnJ9dN5ztMwH8bjlIInii46z+/2j7IyBPfdN69X223cBHX03W9+PYk679ebtQvUT9OqbadpboJLRYqW6ydXs8i3r/a3WyWnTeP7fiBy94e7MqSf/ORHAFlz8k0LBtM8h9ikMZeU5pRrjOk4JlJSKapgYm3bihYAsKJIaFCsSvuYRAqCIbIBzLkAUEDQoklyn93+/f4Xf/fraPjw/evDPGzZVg7Xzp5cueN+tEHnfelc4z1nlT4VT3Z1tWLT2LbtejtPs6cGlBFa4KKS8iQ5FyRrY2ijGdDshmL4zeHRMfWWNZXHN3fk23wcfBN8aDf05Kpr68KVcwYjYgDDdds6bo79IZViJs45rCdvqCqiimDsgmGwkkRRNTt2STIAOoqqWUVqScywADxocA7SE0ImIrFSPYfUcl2fmULd5hOSSCFiJjbNBIuHiyGaQqmbi9AgmEpBM1QopTA5hUJIjnwuc42GAvDe4TyPiEToiD60EQiBEIuCIpCB55BKYaJkFpwvUnfVZ+viRaeGtSPODgkxixpgUXUL5IJ2TrNSOYegAxqi6IKPmAEjIiCC1gzB2r9etDlmSESL/ReqfpivgNXEZkkrg6UyQzOjxYDcgKoX2MJ1qf33s2fw4pZNiKU2TmzhzyAAn1lOFSmxxSFmqeIIUczEjKky542RavtFF9IRABgh6Rk9wJqOaIvy6wwjfAyhLPlfFXUjxFLm/eM0zfPqRde2Tc5aSiEiOBtsnEtDNNC6lzYQQ1psdrBKaatew9SUFsVezbj7qLqslClb5OoVALIPXe2zRdFS4F7SkOo91sWq/AKDLb+MBnZJdreFB7ZQFHChbiEgcJ0GVepC0MWGEFLKiPr5yxfz/QPKtN10D8e7n7z4g67Z/eJXv8mZP//0q5sn21JOx9PDuo3/+ev3t+s1ozf2f/EX/6wD+Ydf/uzd3Ts0ZHS1p/uxTI8+nMCHAfXh6wzj0Bn+wbPtFF6su+HsTrW8RBcIbpkZZ2XRUpHbwjNaxEL1lfVoVWgCWs08QMWgMh0uU63CsbRcxDqcDc91JCAQkBkbGoIgOTBVyR4wMBaQpl3HuJ1jaXc3Cbzz7ePx/dppTrMBbTduSAWBppQzCKKJaM4JJZ+yjsMUm/aYwUK8uVrPY/rk+bMJ4OZqu+oaom3TXdPk5seDkX+3f3zy7Jl6N42PXdt4biLC+7uHtw/vEcrKd5uXT9iHrmmvVi1kxJauvnzZPbtNp2Qye5Am+tW6kaG46Bk4i/m2wyZSaPI0q0iMqJpDsyrFOPD4+KDDgC4RMQLePHs6w+F+f0TKXUuqq7f3+9t113n4r//qr/72l7/9ft///Ge//RdfPrndhOaKT++Hw8NkNo+ibfDQ8GhySpJP82x6TKUJIba79fp6d8Wnh7SfymTWBrfbuMgQHOd+ePPLn08ZfvgX/8qHm+N+f3j98yfdNbgdIKlKRYug+uufwZszsU0WVPAjaAjP0KqdkeuPx9nCiVw4a3We6oKzLyguLg1l5PqmH451Pt4Fz4FlVcRFqVrJkwBQgXLAGh1lAIwIiGK1lq5dAFMTANSLHHfhlgACLA7BCyBuXOt4QwPORedshIaojhARi2QwQ1crflI0RCQGREBHiCzAxGxSCBUR1DKgZ3KooGYiVrO7DACBzXLtGlBF1xRUVQuoqOZsBoSqwkC+TjYGMUJmD1rTt4gIg1vSgYjIiEqWQKZSSlYbOENBTY2neezYR8+LW08VgapYjM3zp09j/G3OqupylvHUK1AEDsHVhBAAlFKAHHBAjFYZXAZq4ogBrYpQEdEUF5KpooAyIhh5x2hgKoaKAItKnZiIQC24ZgFxrCCad6RVPuC9Cw1SMCQkZnYKWkBAjH0T2CloCAHRmUERAyJyTkzNOKXCLIHReWQfGElAanwLFKnCwaJQiqiIaTFhIwIwLSpJSwZGQ1OVLDKXUkrKWoqBOSIzIiICj+iQAAgVBJSWkPvqBoVL5NyyX6hYFLlqh2MgZIpaDFnEVKjGpVoNdwMERkQmA+ecARWFuseoaBQBFBUFQ/yQCevOc5KASdG4YU0WfZQM1adymO4YVSUhe0YEMXSEzgiBlb1vvQtYejORnBnBOQZEFZOsjllFwEBLkZyM0TETEzMoACnEbkXJq4J6JvZhvZn7Pp36RKPkeRrGPOciY4zStY1znJJISpoKhfji083q1r/79vT4fjyV0rU8D5k8F1Mxu+rabm7HgikVQgCdvfMAWANmoJgDc8bDoAA+cmzajY8YW5+Tzv0db2C7u/ZGx8dDu7bNZtOPk3chdDipNlctMWOvwylvr5vNmnbr/OLl1dNnz/Kgiu3Np1+xc8f9+4Knqy8/HW0qCKoje9nExvIpp+O6u8LQ/fYX36S5efL02Xh60NL0Y/abuLnuEhzvHvvN7VOcmpdXT0/94fu377/+7q2U4elV98//6Iezzu8f9m8fj6dyfPd+/OrLHz7bwePjN732vukO8wgWXZ/uH06OfBvdi5vuk6+ev37z9ve/+MbFVQFhgyaYdtOTG+faMhdBF5I0JVmeZf9qP6esRt26jcFJnmeZiubYRERmJpV5nmYCPvW9ioEhkSuiIBLWMbjQtavN5qptr1TFkBwF733ThKZtvSMTzGVI88yOu21DSMNp/27YD8NeBEsyUS1mw5SGaZpzQkSkJfLXTA1UpKSUnMshKLEaIVSrNlRUWjZDZy4RABmYilagVUVE1FRrzlGdBUM/oYFKnnRqd+1U0jTNRlDGzOAcEAAFDvOcs0gI3HXBsnvz5vVejy9berZ5YimDl+hpzgSIGBooYsaiKOa7T/7Y716Woo3zYHPqH2fq/AoEol+1FCORR0JzbFOPqKRpOD44Ls31zXA4eorZhsxT3GxiXB0fj6Xw6upzbDbzfBoOfYbXods1u7Xuj2Lzu1e//PG/+J//5f/if/Wb//ivt33fPI6rqx98sbp9eNi/Of4evG3Can11PTxMP3j+Zb8fEeeOKY9js/NvX3/Xhi7GkFUVFaIf58maEJhQiubZuSZ0cZrn+TQ9e/kklywq++PeszeEohJd1ITO0cq30zyS6fpqU1IZjXQYihwQHCIbdsF1xA23raHkcZ/lIPl+LvsRjvlx2l191nUd+85Ff0xzlv44H/uSJ5Usdpx73xC7OPVjMiwmKuk0aGFpwopmDv6GaHvbPZnL+0x5KkcmDk1Hlk77B5+mBmP0LaCf0ti2DZHr4lbR5VKSljmNPloe7tZtN/dzJFY9EZoLXsSgBqWxKEAbduP4wOBa56bxcds0WTOYmMhUoMG1TUOCHi2fpvdd+BTJzZaJXAwdAc0lewaAgkgyieMw6sy+o5AxtsmsWJ6t2CwBvRoIwDA8OHCGPpcU7LCOwbD0/bddu0tJkXy3fortJuvjKqyC3766P84EgXYBFENxFMTZY/+QjGK7W988vWzQTLSYIHG1tUbAas0rJgbgnC8lEWFgLypmJpVtjQRooIsLrEhx7BAAQJmIHZpilZGCKSiJJVB0js2glEJmAExIWuUjaGZKBpLTPI+to67bKLUP79878VTKnOZt69F7WHFwLZqeTgcT9d2q3VyllAO7lDLNs2vJEOcpv/ruV7vrJ2OaNuSKb0K3brdrmPrdpnvz6s18Cqur7YH8++/vzOfPfvhyGg7dpoldd9j3v3r13Ref38is0zBlsVTsdDrMc247VqXjaeyunj0MJ0zzLnjvXA2ML2oIGKIXETErJTMRGoMBoZvnLFaCDyWJUBHJ6tx9n/7Df/jF/vs3n7/YBJIopWv81VUHU3/a7/OQm9iklHwbpyRTnrKU1e1WwLdNS45FzIdI6Odcjg/3vvHMPqwaHxorkyMyKOM43N3dvX579+6+fxjm+2MSgKcvbn/yw6/m/fGH11/e3D4l53GeJ5Gm7SQnIB4sg5oTBBjTPCMSGjlHAkZYo5hrzW+SE5LTunNXExFyBGZpHgiomKiqCxxdmNOkRQlR1ZgZEEwXiqkZkKvpTsU754gVUC2LiYKSIZGvXgyqRoxWnQXAiF0xdeQMBImLlCzCSFVKWVNlHHs0JHQC6mJjYKUIAVXr6FofiImpEjnHbADOcS4FF2dgBISs6ogAUIrUVxKRmohoVS8hMRoy4QK92sW1enH8LWpKtShAtMUZ4JxsYkiM57z3WhIvH02NQPnCt6gXvX4wAMQPxiBUbfkQiyqY1Y4xGjhCsSVZDM9MmWrlUQshU63mz3pRJNUuC2INRbGzg48sVKIqDTCARTZoCMSoCoS0RL9XOyc4S1AqtkUfJGSLJASwSgwQEU2pmI5zdP4P//BP3z4e37w/jvq7P/nRn6qAijJRRbpq3rOKVizrXA/aIoY6N8kXWA1oie5eKtELGeHisWQfwSVnyszyuwp2ZsecKTIAlyNcXmRn2telZsSzty6cbaZqNbtYoSOyVYEOiXf6bHf1/s3DetsBl9vddTC4u3+/7tbHw+HLz79wDv7+V3/7/NnLZ1/88PExH03GcVbw79++//r3v9ts1mTy3/43/+3rb371b/7NXxeQmql3CYOzGo1HZ/cT+xjp+fBNBQepMu7ORfbFeasOncojWuR6dmELnS/r4pa74IuXK7kMZ1xQy+Vdz1rKhfhWo8ABAFHrDz/oF89o06XAv5wHIpInY4OCCFiSlR7LzPPh8M3PTu++l6KMLgkKsYps12tNw5thn4o4000kF3CekyjOKaW5CGHwriA326tDnyYIzoek+uT25rDft+stFswJu7X3zOzRKGdL3c01ulxk6to2BMfEeZwkiWZs2xaKPnv5/Nlnt1bypmvnZNQGaILNc2AM7K3rRGXOXCPsjJiIXNNUpbcFTvNkmsFMS2EK6Nl3RtEDpjzPzeoJxHyDgYMfp8NpmJznn/7Rj0ux796/++tf/PbxPmsbv/rik8NpcBw+/cGTlrvTWO7v3k0Kb97NXRtXzapBQcrDIFbSpjO1wQzTOL55vz8mCa1PxYZMrus+/cmPadZpyuPj9PXf/vWnP/6Dzfbp45vfNLtnzdM/pmaDbKpyFuzCGSO6zDO8GH+dvz//edGrnWmUC7RKZ3RzOY4tA2hRjCoiGuLZJ+08B/Hyqo8gUrzQk2Dx7vmw3AIhgYFzjIgMVFPbRBSgGs+TgYBVUYSBIdc+GaCaLDRVQzMQUDRlFwwgi86ic9GY0REjZWX2aFjJoAZAXG1/TFXJmMFAkBxWhfWyxiMRKzCAACmIFM0ICJgJvZnWta8aB2Nl5lR9rirUnZsWImdMRk5LqrHq3lcQFmuamiNCAMekAPVxxMRcCpWkgAUyoJWuTfPaetc2sQkMFRZERABGuNl1N5vu21f345wOw1QQt+TACNpAMQDXZbXq4wgWBq1hzeHQjLVrAmYmy6JSURIj53BZF6AS982xZyQDIAIxBQRGEBVc+FpcicmOPYJz5JBQTRHIESMaMyOikUfHjOZ9ZOSUci7JIRiqiUyjWcnsSuOIPVNN7zDUJdyPFLSIZVExEwMgZ5XTpCnnJCVrMUUyywYokrMkrRieoRgVQEQMwALCDqmOOKSiyEyBHKBVC2smj2R1w626hAVoNdWtTREQUazjm4gZiMwUkLEm8AJkNNWSSkm5jrsiAEgiagbEH54G7vJQyCkrIzkwUNF5lkmVEdAhE4GIoIJfrZhRrMwprdZrEDj1h7jaeEZEUgQfAqEHYIZgZFNWUZyTzP2IfGKfXLNx6KUUhYJ1+hCFpjVlACdSiBrvt81qzPM07o/peDgd0kBz2sw+snPRM43jlKeB24zsnr5Yr1ftq6/fInrBnAoPYqAFUZwHMIkN5pRJTTUZigtNs76GjB6V2SY5MqI5aLon0/imH07bbfvsk92f/NmXv/3Zr6aECpRzGE+lJG3WXUoDgMz9ibfd9ctN22ykaGzh2dNw3RGv/c2Lp89+/MczkI95mA+3Nzeg2F3Bsc95srE/EA6xc6HhghnGh7//D78M2krqLZiPDKoJhLebMpwav9sf9qtm3Pfv2s3uh0+eeef+83/65ddv+19+/3cvnzU//ePPPvvhKqn+7nf7X/36734n1HX05NMnX33yyXw6xia6QHO2+8NEFH77i/dtF3bX63/xr/7Zu3fvsDT7d3erztYrHtPjym2YnWv4cBhOh3mYiygGH2L0TUNzGg1ztwmO22kqQ7L+sS9zDg3XrVwbm9jEGAIir1Y3rl2t1tvA3iGbWYjB+RhX69C4GELOxzSMnprgwuZq69jG08O7/ducihYFjsiBQy7DPM9z0ZRT1iXM0BC9qaCklFzwPhctqlnFSg4ISG4x3q005lrkcn2QVB/5urUwq4FLpvYRqygnrcxeYmHMKYsse1wAw6LAKqc8EOL+1D+5ff6DT57O91NndhPWnE8WIztNwx5kspyLqpaMqkgxjUNOD5awKUPJNo/JOYQ85ykzI/k2heA2V9x0cXNFbaTOCztP69DuGGabTuublUpCMDKzuXfg5vE4DidGaa+v4vaKj/086ox93HoOTd4P++8P/XfHNqw++clfvfr53/7gy9vHvkD2P3j5R+DTd3dv5wShQXYheMs2gBYk3j+8Nkpxzf3hcd2548OwQfu0czBPDI5ZRJNqnqfC692qaYfpbrdqX7353fpm983X33W71TCPdW/e+Q1innNmz56cb9opT3pKszowit1mtblRDK7bqs396Z74KKnk8T1zmfNIwaF6iDsMcZqmQtNYxgxuGmcDakKTZNquVm616U9i4kPcopGkaZK83WxkBh9uANsy3kcfDsfZ1tFcRgVVFO3a8Kwht+ueWZFi1ASnUkhFNIP5Q7nbbF54uzbde78WyezdLKmUqWTjENHF05ygIXbm1dL43qQgumnsr7dPShrn6USuaX1IY/Etdz7094+IDQCNhQx1nI9d3CGEBBq8A6LT9B5VwaRAzprJwqrZjlYIyzgdEuYmxjxPSXLBlt1mKhN7Dy54DrmMCnm7eqkWkCbnrnNeH9OeOMikmlatNR2mNe3y/D1qdsFP+eQdkgslaZnL8iQ4J3cYAKGz6iDmvKipCBIWUSYPoEUKEYsWqFngVS1PWA2DmZyZFBDI6lysVSkhaRHnPALmAgpSpdSETMS1YGHygFhEckqapvqp5jlLEXS+22zzMDgypPD2/i4E8qGNzJgTkM3ztL87NrvtquPpcFx1K3D29s2dc01s2344PhRZb3OaYbKIMz25aRvChzcPUz9556SfN00znPTN94/ggiN689333nd+5b9+9/r25bVk1WPfrldA+Ob1OwXcagSh95LWN7uH/XE+Dd26I4XYdDnnenalCBNo5YwQmZqUjEDTVIppkUKAuZ8lpx6mn//tP86H1z/8wQpLn+f9H/3gxdw/rmmmhsQ0rDf3j++jjxgICJzazdXGk8XGjcc9YWi6TQYzG1frzdWq69ZRRYjRdD5Ow91+uH+8H8fT6TAM/ZhSPp0GRTicyvz93bs3jy+fPTs9DApw89kn/+xPf0IuzJMYsYsNks3zNE2FiNWIDLMVZjGtAUUEBoRqqkSMSOQQ0eWSAXQeZ+edKSRJIcbqZjTb7L0rUsnqALDEtC/ifLAaUhNdA2A5Z0MSg1Kyc94Ac0memAnFpJK+a367SeVZG3vHxCWVGm9vCkVz4zlyfTqYQVZV55pcZkIkJpF8KfaZak/LVFURiooaIGEuQkSLOw8tCcq6TB0xMyA2k8Vcw0zP4iZcAtrr5TJCZALRRdcJCAJAdV9cw8g+EF6WGmYpXRgRqrdR5XogfCj7l3a4VCE2Lh8eF+3OBQJYfhPsw2GNDAFFF0TgzII5i9FqVBAuldq5kw9wjkLD6iF7rr9MLygCGKKqEdSeqH6oDhHVjBZEoJoTV4VR1aciIXpHq1VQmX/7m3/84qsf/ekPfvr922/e3b9btyvvnKhURomaEhOiMVIu4j1KNYEClbM0D5FswRxQTS71Y0Xx6omdRXBkZ+gLz+qrSxW7cKAWHtRSt15yVGtTqtIp4axnWaCjC7EIDe2j0vgj7AnRTOYfP/10HMdO6ZPdk/14bIhev/pm2zallE9efNo13Zv3757evvziyy++uXtgwuD8m4fHh/uH6XR4cb273jUvb2//P//T//ju8S2xarYaL7jc9KXeroVf5XCdi3JbTvHDsKuUHzyHV9W29pkdoqgf0Br88Bpb/EdssZRaWHd1Spx94ZfDLVfnzFOrfXQSW5wByJBwoT5V3lblglUEarm4lZKEF1PdmlFIKopYQtCSpzLdS3kYpgMQ5JLHw2tix2zqce6HzvEq+tJPpmqlaLFxSC561zRDsqm4fZZ5VBbXF9t4MqZx7nebXQyuCcE79/x2t9s1x3EkwGdPnnZXOymlazeMeDgdMqTdyxXiPEynJ7c3u01jbTAs7AlIfeeBiZkLuNg0Ou4RCqCAShMjEKpaCAGcZ3ZoAHlCmxhBikEpLkRw7H1mR1Jgd/0UoHl4+56v/PW2zXL99XcP33//KpWi1EAMNJc//PLFz7773fMbPs34+v0hz6Vh2T25/Ys/+Pzt3f3jIId9Pz08FjFu3WoVffCrLgx5fuinw2HUOVPlbxQ9DXqYTv769NkXn7/8/JPDqwnH4903v3/+I7y+otPb/xi3t+A8kAckFVlc26guWsvEgI/ZgRdECM6gEpwRYDzDgoBnf69FNHoB089UtTNiCVChhMsxzzByHbh6+X452BnFvKy+NSgdFqizmiCDEatmR+epTU4VDFDRaqaYwmKebAZV/2vVLKcIsJHjLFoUU6EmcBZGJKcAZGpqksGAF1ISSymINWOBAGjB6U2IiMwBMoByXdxQEWpsJwGyLYbyCQ3r8xFMqFKPRExEVZQAlAQNFXMpGdBJCi6aaVFBZDbx3jMxIJkiMGhBIgiouYzMZmNfTo/SRFTLpfGtI2bwXU0/RMSua293q+++e12KZFXJOU0DM5tmycX5wkwKWEoCRB+DApYiSLj4c6uZmQAwIqE5pOo3R0hEAEilAhuAMXirgZA1j6AmGoEhE55posQOkYtScBSa4DzDouxORuhcp6aGOAsgsSNyRMgcY0QGJgTElDOpKkMRVVdZY0YUtBSo6S7VPzznupmxYsmEmbQsPp9GAIDZrPpFCdUIDUarzCEDpOplrWJgWbAQcjV0MROqPEt26Dw5B0amAgpFy7nTBaIL0I/IpkroESsDDhd4DMgME8+AJEVFAARNaiOMCU1r2Ov5IeUux3U+EJqqmuZp6o0suqs8K8fgHIOB46BZVIuJOKQ8pSJi6JA6Jp3nY9ExhEhsIrNvXQiNSAZA0Sx5knmQeU7H3oWVbyMCZC1g1YASGQM1LkuZMQFkQDTm1nPcNMPDIY3Hw2mmSboOPGN00SGkSZp1W8i6dbh9upnTXSRdNQwKDpDQWBWSRI6M2K1WqOxiBEBCnfM0lsFFYhJJUynu7Tw0nmsUtff2s1/8LCD70LBfrbuupIkDJUzttrn55Moa4yaU0RpP82Ec9g90e9utnijw0y9/ELdhuLvX/Xj97Ad2sqF/1z3pnEOhnPIwj3lVvEWLM75/lW83u/ZH21d/99dXXfzm7VsnbQP+sD+i0vxuiG28Wm9vd+77796egNDB9fXmye3Vr3773devx9dvf3HduR/9+PYHLzZXbffu9fH9w/Tbf/ftv/2brzcNsdof/vTLL758SdJbLj/9w68e7x9Pb0fZydMXT+/e7JsQLc/zpG27IYzTOM9DSqlM8+zYdetIRKJzmguoBe+nWVOaxnGeijFwCCE4BybPb14M6RFU2UqIse0iee8JPRmTNZ3zPraxFZssW7YgZVqvt45XjQun+XDq302nfRLYdNdmtE/zOGdJ8zBNeZ5KmkqeJQsgAlIphQCRGTiXUkQlixQpzGzmVAsAOXN2oaEiqBoSqCmRgyWYGi5u2OdvAABAbX29Pj3Ow/T/p+q/niXLsjQ/bIktzjmurgyVsjNLNqYFONPAAKARAM1gpBmMxr+UjyQeaMYhMZiBYQBYT4vq6a6qrMrMyAx1pYsjtlhr8eG438jJl4i8cYVf932O7/Xt7/t948M2V2TJ1TsiikQtajXQIjXG5vz8msXZULXfF6ur1Wdn5xclj5p7LdUDWs4qksYeFSjEdrFyTCY5P3wInpgdKKR0mI2lTXCK2QvkD+8evlNuYnt+MeW8uLriZgkeus2m5r5MNm53hMShweC7q2ey3/Uf3qbtm8svf7VoO2+LccLhfnJ+ffHsUx3z44f3L7/6OS6vv9nDH373hy/+9C/LYQd3u6vV9dsPN459nepmsR53D3mYkIppycPDoNouF1wyATSOa7+7iPbwuFdpwSqaNY0vUgEgSUVPw2Gi6so4Bu8QQQ2JmL2rVlUm8pHYCdn99h7Blu0SITq/COGMqbWaxu078qhaOUbnz1xsFssGHm5q3cdAzeKane/7+0FlyFsfLxbteQS3Hw827FbrBhGrEcfFqNgFmlQ9cmNu1FLTVLxr2qiWKHYFIeW9o6WmSuRaf7ZpOqe+mIYQh1KK2TDu1mfniDjtd40+u3r+i5vX/w6d89Hvtlsfogc2qQhBSgXnRVW1OkUEtwgXinjY/ZBrKZYAqAmLLAWJqtahQGhXIObDkjzUcgjeR27BPIWYSh77vWM1dqLZh1hyEbVSc9VMjYEnx8EQRQ5apq4JGJteRcrIVEW0ygRY2+ZMEwTqzpZfDPl2239/sbkka9CwaZzYMOYPIsl72qcepFmdXz309er8bBz6n8wHeCzoRJknYVRjQCQS0LlHs2olQBM4yrEIIoXIASASnzII86aO0YiBiEGOl6oZGjIAaJX5bQxrLcEHVRXNxA5mNy+wAS7aLvePjhgQhBS4TGPvgNZNJ7Vg1v4wEFTXenYUIYzboXpvFSvmLnbsvCJNwzRlKLs89uXsmVO/vn3z3qZuGdyH9w+Pu22h8OWnl6//+J2yIuqb798Q03p99rDbLs9Wy7io09T6mNKUhylj3Q39cnO+H9LLsxfffv/7X19e3b67b70Tg5rBBfew3zbRO8dFxOyIzydlETGFksvu0LsQ29ajaUqCDN/89T/effPNWcQN2zDsnl9csBYUZYF+N/k2lDoEbtgQSkVDMz0cHgkxJN/GRdstfRcQdHN2horjkFJfxmHo+1GqYqBSqGk7x06Sdq1j199vD32qjUc0nSbdHnZQS+y6P1mtGCG2ToFSraWWUrJnZudhFtRVnfdGc3Ye6xzpQjBVds7UVCqQplIYgJgBAAkCeckZkMc0OefFUUlZVdpFC4QgCqDBO60CiFXU0AQk+ICIAoYKs4iAiKKiQDbveWbzC5KCIKIqiAobVamiqqBFkufo0c/TsKgJqEdm4iLFOX/E3528IU8eEUZAU0duTqwxzgameR+PaqpytBTNNp3TBUT85ImYkzZzmdcJhnE84TZwcxoB6UgdOE6+82BwRKjPJxZmswY3J9EQkeaJZLaKPOWH5uAYIIqZzu0nxw8g0VFgUjX5qW3Gnoaq42n/DFiYD6HnWNz8vD1VgR0f2jyGmQECA8x0qtMEdgxomRkTupOTCBTmXxYRkOj4VnzMFZ0iSG5OvaKqaskeKhGkYf8f/v7f/+a3/+E//6u/ktge85CIasLE8yNWNXRYET48bi+69So4VhGqCanKCRYNSkeDDAGSmSLOcsnH0fKEvDkqSSc04skONDuqEH/Kwz2+qMc5057mXDiKbSdnAwCA0ke5xNBmi4+CAbMTnf7k5VV/eBxyXl5eJtHg3fbxITqecn3+/FnsFn/88Yduc3Z1/fnNhwOWolZ/+0+v8zDFEJarJbEr+8Pfffu6jmPj4mFUKRX5tCN6SsfAR6XvJIadlv9/lEA7dpIRHfW/p1UDiHjUxY7GIvvJl6KdpDE8ikInD9AxeGinZ8lOj2V2dB2/qaHO/NaTjvUTZc5O0sARMfNRF5j/TrPwROQ9a2bAqkl1Wl93+we/UyGyGNmgqlWpOFU5JL0fUmBqGq9jJWJz4bHYIPqY6pAThYhkDVLTurOGi+yvX3y6WWyWzWa7nV68eLFZLe4f3l1+/vXq8sLM52KIsVIcpoMgMHnfNmcvrduw1eSoWOAQSQtP2fyqcW07q3BZRKuiC8E7qcUFbwagFZkVae7KrFPSmjk0oOaCq1pBDdF5pCpKwS3PEaEOve23BbH5+pNXlw3f3fd324f/w6+/gn787u1tTen3vz1cX6+/+uXLd394L6iPr3/YL7nr2ufPlp9cdIfdtDuMZapZ8X6QPu8VrWbQZKvYRY/Lzn358mIc6/vb7e2b71J/Ty2sLr+ktJ72zfZ+uyZF3A8f/nH12XnFJTme/TaGR0/DUa45XmofJeTTB+1Jv5xjt0+Rz9llchI5n65bgI8RNfyIy/oYdMPTOpmv1pOUNN/n7LTc8aff8Ohima8CnC0qgA5YnyyL9hRHJQZEpLnZSk3NVE0I5pMwQ6AZUWdAAqBIivNJJZGQEjEdHTNqpkBgIjUzMqCgGSoiOzI0sLlRQU1nj63NzZ6z6K6ipoCOePaDOrM6O3BNRaTCzNgTk5KNKrAiN7MfK0utaobOIaAqKKEamYIhI+usQGlBFTarVRUyGuR+x00nglAnB43zHsgZsalVUUTYbBZd66aapFYTKqknRvO+1sq+Is7NFAJWkARAkNDQ0BCB1VQBAeb49dMNxeZcGKLB/KzO2REsogrIomZggRlxHv38TJBkF+bDSWIOIThHVdS0igggZalgIKiAEJyL3kdHszWIHUTnCIGck2IVMat4A2bQoyCIqmDm1KRKNZBSstZsIlXBqpEpAOFM+TzirgAZGRwqgsyuNkETQ1QwtaIqCKBonh0RgekchITZqEw4i/JaRWuZNyBilcgB08npj8wE5ARJkQKSAiGYADAZoIlVBWUiEyFEnMXoozb7sYnhKBU5x0TsnUu5AFlozgw44IL9WMukRrUIBQAQ76kChND54IvWcUr7w75Gx4Y+REDyPjquBCpWCWEcp6Hvr1FdDAahJnl4uI0Tx27ZtF1KlQqJSBYlQgNs2qWGVDyWwjhZIYu09rJ4fHdbhyHnHMhiCI44DYPV6trOzGLDm7NWCfJDKkW9QdPxyroMKRVuIgdGcN55V9JY04ACwcdqiVl9wz5cELWgg0md+v7q/NI3SyJfsitprA6lnV7+6vPa09ViDW768O51k2PejdCFZbu4WF613dq33Sdf/UwIbt9+W/b7zeU1QtgP78hR7p0NB5ZSDkMb1/lwawPsCR9+7Dv/6rff/Q231qy6xeEsCd33N3/6yz89bB8A3JDLhw8HR6FrzriOwzSB+O1wMLVXzz9JQ+5L/t13D1dD/5/8+kX0fnURN7f25n1S3+77/T98/2gt/Tf/3c/fff821fH55y9M3o9jrRkW67CK8eHd7XAYnPMJcsp1GDOwa7qWieLCl1IxGbMfpNw+bquYVCFwwYembcwklSl4uh9v2OPmbNOFNvjYNh7IQRlROS47hoxWh/F+2SwDL3Muy8bXcTvW/WNJo1QwDN11gFjGaV8nIWDvyyh1GvM4pmEoKVUFIkdsKqogXAW8qtRaSi1TrT44DyamrGCmMh9z/WQzggQIqqCmIirVVHWWUn9yHkiEeSrztg6IS64M4kEJuAmtEWsWIQgex8OBfHPzdnq5DBceb3/8m/Dyy7OzzwXdVA5aR9WplkJMgFots2sVrZQh+g7IiajmgljJcyk29DtyXsAIAzUduyBTjS2X/n7Yb0VLe3buQ/BxEc43pUrKI0zFe6LgY7es02H7/s36+bOmiaCuujCV6prV8y8//7v/5d+Ftnn+5c//i//rf/2v/4f/x4svP1ssm9W66eIX3dvv7m73ax+IZLlsH2/vNhfRIVaoZoVdCJgkFxSFLIuuuzukSeuGfR768+vLYdyWPLSOZBmnYTw7v3rzwx+983zcf0vDba7D2WqV6qTFqqgPLRlUWlBYqrKMQ556sNKuAkoMGG0SXC4FF2MFDNQsrhwauNXw8KOmnV92y7ZD9gauCD27vLq6uBCC7bhbnq8fD7e5ZhVhggqQ6igyqVapotZMee8aSnU/Jlhv1qvVZpyGwdBCO+4P3kmp+wopT3kRNpBUNEWdcPy+RlrwyooauoUHYm+lRwom6Ijd4qzkIfiwDGH3cGgcVU0BTYZDiMFzG8yrErUdANY6YeBQU6qCQMrZgSO0WoZu8TyVlGSAhlPRCqMn6pyv1dVanW9m7GmupfGR/Vmjgzc4jHcoGFx3mG4Xqy/Q2mpD1p3UGuFC5QeyYd0tc04sq9X6ouS+VjFJVasPUUbTCuNUGw6UepLxeBUw03zGbjo7fkUNkUULc2BkFTEAQp7NCnRyDnhyYiamjjwCmgkCEDnVufJMTYnY5ZQdsSGYKgE7181WjsZ3BFghe44AZozFRhUBrVPJYFaLpH0fgltFh747PB5mFy5adZ6mQZMlEGOituE0SZk0DykLYfQPOW8PJTLFNg7DNvRLWoSUtG0oD/Vhez/W+vaffrfd9zLK+TnvD/eKrlttYrfY3h/G3V5M06EN6/UwTu1ySZ7M0f32ZrNoH3a3IYS079PjsLo8R60FOU2HcUqtD1orB0ZANTSgaZhKqU3bTlUetsP1ZQvopE6Pu77vhx9ff9eSrYJD0a+/+OT+/qEIrLqupOJ9ADPTabVa55y0lP4wLLpIHOJq2TRxuVwPWcfp0C66WmQas6o8Pjyyc4ZMIYTYUICWOadU8/S4fdwdJnXozcUQXLtwcfPzn//qk08uN5ebq+srmobDmImYHJUpS60UkL0DIqkCAATsyddakBiJAITmipSZMyIipsRcUzZAZBqHCQ1MwQUvylW1DhOYAdPh/kDEc+KRGJldiI0ZoioTmmGpWqV658BUcmViR0ioolZLZueYqGphZjpZYKSImQAAHI37rHMRiJkBzrokwmyIAkCwKnYaV8zmKJ0y8axNwezcAZjjUQZQ1eL8ixsoqEMCxDJ3JQCYATEeuTBPA+7Rx2EmBoyqBjTnqeeyEtQZY3pCs5xMLkdKjB2PpuEkusA8ds8P+uROQkQ8dpsQzl42O47yRvh0jI4fhym02XRz0jgQj5SfeRyzEwzJ5nQbPRVIz5KSzX6PeXwzINKqR3rUzOicB8FTyokQkI7ajM5cifnBKSDoYtH2VQ3VAzlyIqXfPZwv49X15cNedgf53V//5uWnr7IpBUfkg49Sy5xPV9WcR5Py2dXF+++///STT4Z+V1xaLC/HWkAdMCvOPq8Zej2nqfBJJTnNi6fRcB4oP0oo9jQ92uzzmhNrH1+Ek09mVvSO2tFRwkDT4wh8dMwBAAIRmMxLDkUi6dV69UM/LhfrxWYFZm9+fLP0fHc3AmIFGPvD5ctX51fP9zd39zfvQcrD474gNqFdtU2eHob9g6b60O9v+3GaskkhmpOAP9F5YC6ItpNiCccX8/hEPAmmR18AHMNliEjHrOP89Jzykcdx304zPD6JrvMyUqDTGH9Sj45DHzwpPEfJiHC2Q6BDFrP5LQaPqG+Ejz+QnnTXeQUjzmfhhqdrB4RMwjT1KZlmNz3ouMsk5AGBVEynKRseh/bk3E227a4ygncq1ZLhokEkbNrgQ1sFDOXqehFJF4ur9XrJgIJ4/emnm8vn2wGuPvuLZn1pDkGk7dgMcim+ab11iFQlA3O3Xo27EcQwVddyMZy9SxQaIii5zgvGeUdMAOwQkwjFiDEAE1mp4x5B2PkimZkYyESQGL1DwOg8qJqp9z7G5oBjSiOjXV6uV+tV+PG97B76Q51q/fTF9X7V3fTjuz+8++TqOZfpZ8+vfvdP/3jXF8LH52ebly/Pn+tCs9w+HA6HqQxjiHHZtBaw5HGfQMj/8f32vAt/9pdfqSsf3uze/u1vpuf7T3/5z7vLX72/uT1sD8+uXX78xi6+5POfgRnNLaoqs4MHaaar4dML+aRYPy0Ue1pKdlpOp9WFT0Pt8ZI86Uj2k/X1ZJE7Xbynf1Z40hjhCap2/Jkn7jUAABE65ln4MSQFYGQ1QVIDFVVGBkRVnR+CmTKRGMpRHfaEYGgKyuQIHaIqGDlGh+jIcKZgo4rOnjpA0Fl8AERRYLMqR6VMK8667fEXneHvc3RsLhlUs/l2LGbAs4XWQOsRBWMz5b3O+oKKVcPKTp3zSMhI1XQo0jATsIpUAa7gPYkkgIgmoIUAqpJWARWCKlMuh32ZSu06T0taLrVM4Nz8tBPCsuu6GB4fD27oAiJUqEUmJheiiwGBRESgSqn7x31NVisYoJgRkYIYkGMyNQOb832IqKfE69wrJ3NhAqJ3bMBz6xghzAXzR4cTMQKRg8guxsABzCqaeXLeATKpqlYzYIp4vlysWufIrIBJWUS37GLXuDpVh+zZACqBMrFKPTLcwMxEqoqI1lqnqYyj1kzkDXAumZtpnkRspkiEikxExEBkBEAERmoGiloFQB0yIyM7MGbmuT51vidDLYAqVbQKgs7HQzPWGoFnJRUQiRiYmdiQCFnU0ExKqVVSkTGVIrO2aFUqsTOEKseTMz1dIe7pbl5Fa+kVwDlXRZlCzWOtfRN8keq8KzU770JciR6K9cN+CiE0TTOMmZmlZDJg54qUQM5MragJIIYp1TRNLpXYLUIbz0NT6yEd9mbV+RY8JlEkUxNC8KCK5JsA0U0OU3Ij+VLz+YuX4+P99v5e2KqThjB0TdIy7nfsOt/GxardDWnRhZLhsD14pnXX3j8mR5wnG6oyQy5VpKpMjps2rEqNFKhpSYrVnGrKn33x6vLZdU73YvXt27efXn/66ZfPFxethMm3ur3v//D6bRc9OvMrwsCrtf/8k5eaxu76bP3yVRp7xElzH9jKIXPs/EIorIZcStWa0rifYpd9w8Mh333zmuXMRWgX/vKLszc/fJ+V+krr9cv393dgtWlc62Mlt3vYglPXgUH98Yf3igSkNzfvuhgd826vw1Tfvf79+Sqsr7vrF6FAvR3v1+tFnuo339/U/8/4xcsLgWHf95efrFPRCvnFi2e3P2zjPp6dX+0PD+RICV1sxNQ1LEUOh0HE8phqrWKC5AKxa8B5V6rkNBJZcBi8iyE45yItUVmqy6l6n5er5bJbINGyOZvKoenOmLQfH/tUigYXfdO1j7sxdq2JTVNOaahJLDaAZDKVNJU0EFrJVRXArEolQ1BwxFLFcnWuMCcfgtaa80RMDsmhVwBAUlVBIUVGmhuI7LgVNwNVUDURlVrL09vFJKnBUEtRsGEYzjbrMoyBSUoxSlpqmSZRnUzPVmd9P15dPu8P02XbcNvk/mFwDdYk9aDj4zTsxBwgem7GlIJlJkSSmkeTAOrzmF04khrGfkRnQT2z+eVCBdNUm/X5+sUzCGuoYmqKGpYr4eABNR+0TlbGvL03qxxbAbn/8YfVBTUXr/qciWC33y3bxT/7q3/5H/7637TL1eLycnPx6evv//ir/+TXTcRDSp8+e7HdJieaxvHAxQhExTOdby5sSpIqs8/jNpC2wRmIJxkO+65tYuPT1DehyUMOIXaRdw/7tvXsvCIbQklD2yyYDNmNeUglEXpTRxQQcTykcRQiDB2xVeeojsqE7fIsxnjoHwQoI5U8+hYhNuzBxaDoq6Sry3PVpWHDxQBtKIf9MPXDSE0CqCJjVQkhOt889LdGTkJrzQJimIZHMKKq69UnZqFCEcxqU5Gh2yzTdLN9uOXFYhE6z1zqAFhcw33Zu7TbrM/6YZxqX0zYYh5TGzfrZkPI1dPjNADUXMk3kWI4HEYlXwUA1yG0adj7tuF2Pezvp2HrGr/axPGxJ20X3QUKlwxtu6l5lLoX2TZ8huyi61p/OU7bgP7Q37XLs1WUu8ebZNydX4XgQT03zlNwyWqp14sXON/8p8PZxauSfRO7YmORqZRdoMtu9SdDGvb9bWwLsOSUcHRB2251IaRah8dx6pb++F4gM1LXAIDZFakzfo6dg2NfoM7DpENXpdgcfz7mRxTAQAWIVU9FUYAKMm+8qhl7B4aBfZGTn1zmygVFZFCw2aAOgi6EppXY5HHyIdRJVClPdUgjsXIInrQAHA5jrYZIuehYAbQ6R+wcRZz2U+onGUuPPIh7PPQXamJ4e3OzUBe7s/uHh7u3N4+7u2ySjHKtHt3r95ORgHKq/vqZjGlQa/zCDylDPyjhfpq42rJb7u63YDjWPKTpzdsfx9L3JQpdvb+/2VyesY/jVNhqTQHQ+jExUZL66fNXb96/SYLL1bptGkLYDukx6Y/fvjnsD1+/2HTRty2+uduSKqN/2PeX6w4MdsNAgOOwBwNmWq5jGxugpmnW7LECO8/LiwvH8Hhze3d72yzCar1edIuci2/bPNWadTr0w+FQplKzHPYTMT7bdM8uLnfiPvniq3Tof/eP2z/501+9u//jrz/7JHZNnQYydWyEbGbjNIEBKBBiyRlM1Uy1xBgRLOeSckE0qSpFKrABpCEdxgmZTcGzFzOjWkoZxqlbNLnWh8d9E8N600TWOiREBEYgroZd8Gdd0y6UaBY+tJYsJtGHWoQhoEHwYeYlBR9ySYioYoRsoEY8M7DNwDFNeWTntSo5Zp7zL2gzHtTMVMl/HA/w6PIQA06lmtrsylHRGS5UtSIQE4gYwlz4eEJiHEvM7LSdBwBUMD1tJ+mYfMJjbGb+WgCaVaJ5ApoDOLOjB0+CAx6pTHq0MJ0sF6dp6Sh9GCoecx6z6QVP5+ZPko0drf12MsacBvk5qW1GhKcU1uknPGlJR2QSwKnQiBCR0YjAkD0RQRUttaqaC24+8Z6PiksVUJ0xqchoIsQEgCbmHK67xc+//OVvf//3ZczkOTg3DIdAVD88uCb+6S8+u795cEE3L85v74daRW3u99WmbZ33AbG/+5Bv310twsVZ8/M/+fx3v/2nOh5AUgUvvErViJ2AeERTZSQ76iQIcJRv7Cf2mWPScB5Bj+aXk/ZzStk9mRQ+vhZwcjU/TaHw09nVTmaZWd8wU2DGkoYX52clTQ35xscg0+//6Rsf2TULYnv28vlqubh57Fnpu9/+dn/7gf3cdwQX63OvJfeP4+Hu2bPr25v7++3WhzjW4vgk9xwBvfD0Uj89vNMimSfqj5a6+QtPHg97+gUMEfTo8jmlghAQ7cQk+qi9PeHe7SO5/FhF9fHDesTE0FG9IgQ+tvwcn0M6vsucfFlwqpc7+ZIQCY+N1JWI1EAUTaRmzUN1YRnXNt78XnIRLdvDXoDBKBcfg1s3vgluV8dC2pMlMGGqRdmwJVq0gbxX8O/ePTatC6qR8HK9kWyNd2eLMwgBI7x4+Zx8q1BrFiYCs5TH0GyIUZFciFhYQWVKOU2OXGyimnHw3EaKC2QsdWImNC4IpSagyIEFKgcGdsQsNaukWhIR1wzonAtNyRNiZWaVWk24DVABBvKxI2e1GB7C44ebScv6bHn97JKdg/q4/cM7WrTPXr56+MP3abT3uz2nVIb0pz//6u7wsOvz3X0/DhVNFwHPz5eLaFhxTLBsQpJqgCqsig/78jDmH/tvlg1fni2urrph9+b1b+XZL//z1auflbv7h92e/ePw4Xersy8q0Fw3M79gRxn1KKDPouXpxvQk4dhTbvGjsHi6Ap/8cQD0UTTC48rGpyUOp+X+UwXp9NenVX9UhI953J9cyYRks5RtJlqPP910rrQHdLMtkYgNZ+SOIZKYApKpMuFMz3HsDQDRcG4aQ2LyROYYCd3s9SRDUeFjExnOCrupVAUzI0V2zEcO6+xrBbOiADy/m6gSsh4jW1UIwJCJzGh+owYzlWqiWvUUrq6qUkvR2GGMgCTVslRQi4yKwEWriVpBQsNMVh2LzCwMtKpCWYgNEY19yX3ri3PzAUWUggbA7NquOb9Yv9nud/0QvXcOVSyljOMBQExNwKaUxinvtodaqojV+R7KlUDd3NqABgBiplIBjNABgp5OQcwUmQGQ2YHNXmdgmuc+dOwAmVxEJABtQojBMToimvLknMPZ9AsmaETYeLcM3AUnqt6BW/Cy84smOs+mwECkQoAAqugUUKyS94iKogDV1LRKTaVMyWpVTaagIgpI7I4L24wInUMkIHZzpsyYmEkRENgT8mxcm0F/QGCgMxUJSdRMK2I1BbOqVhGVieEo4SsgIjsmVEBgI1RPvioAoaCZqBnlrFOqU64iKlURqZRKRM45BTVTOMUwj1JRKeL97IUzrKWUYpRa54kQPDKREqvkWsuQDmoTU0O+odAJZI6IMxRAClRAE26c8863QSBv93l9HYZD75qDgkMkZB+bFXKsVYZh37Rd2zEiT2MRsVKSaZ3bFjx57pzzbhxQnG/jC+Zm+3jzuHtctk3XBO9AUUEeoXYh+EXrd4ccGmrNU0tXzvc73mbygdFQVBEtukbJM7QyQB5LINcfpuCgbVqOZ2C6u3snWNZXF3/+l798/uLTdz/+9v3tdhFLf1Nk5CWzYzrkYb9LrYuu4MPb96+++uT6Zz8/W3h4ePfw/TsfGbXUNLZnl+ZaF3d5HLGMMiVEGNNjXC0P44dm+YpLhEzPz1++++b1y+vNd398vViuZUrg2qmkcVCpQ3t+1rZheeGSPvCFme92u2zEeRwXwSjyy6tf/P3v3hDEwfThzcPijP7iLz57f7NvYjeMnAXefxjLtP/Vn756fmb3Nx+axRrGunt/05CPkasOy2UoZp4Ia8VqOc1VerWoAIMqONc45xGEEUXFN3a2bBxxrQZI3jcgVqfMvmlDcC40bbfsujFvV3GT86iKKU1Veg7x1curNPYp1Tfv34bYGrpSaykawmK57sjx4+O7/f42DVup+dDvc8lVVcTIOTVUVamCJFZrLUVjqKVIKeqCiHiHCgpaVZHZwREHBqpidNwmI4HVGaF2BIAd99wAahmZc+2l1hhoGLYB2ARQedr3jnzjAqiRQtodPPPDw02X6rsRfvnVJ8FqrQesNXifRocUQSylVAEdXcOE/XDnm65YwoAqzvmm1kdCFUFwUZo2XL0Ui3zx+fkmKpQ+Z44R2TQPhKUMh2n3A/qGmsa3C4vO4ia0i2kcQTLrhGRleoCdtmfXgf1+PAyIy+76V3/5n71+/W04v/qz/+q//1//v//Dr8yaZXvY3izjZhUW6PyUsrW+aRvJQzJZteHD/dvoF0e/J1qVjFYjQZ4OI2YfOzWoxUi1pMmxkEvIKTZYKkQKRuqoMVMwHMZBpC7atpobxhHRGAQdg6PWxTZGQioVixG6OpbsumUb/DRkAnGIAQvWfdP5QQMS6+SlHIZ8N5l/nMrj0Lu2Wa4uhqn4uPKuEk5gkgt63AjWsHimZZI0ERJws+5WFD+pZZ+mByBZdL7f3ZTYxIAh+qJJvF+vLvGg+0O/PGuCj4J2P22TFWp0v7sj2+eocdVqVc05l9yG6IisKpGy92CcBTGsYtxg0cUm1JxKf1vGnQtcte6Hauasolb11BqgAQGPmgoq5qEncli5DGY5oHfL5XNHHsTWcc0VyrBdLJb9tG/CBhnGaVh1X19tvuqH+356523p7IxjE5yhTBjiOIVnV3+l1vTjt2qj6JjrqLVAveCwVHKjPCpkx8guzFcBEymi1EpEJmU2XMixW+SY80BkAVUqyEhIM0GLntiiMGMFGADm5jvH5FzIdTqVeVvWPF+PUgYmLyamCV03n3HnnInZe18yBd9ojtFBQammVi2JaNJUS2ycC21cOZomZ9UbTvupTyNbl1NumJ69vHp78w4hHnYT+YYY90OqRXyksE6gmQrePTxM+aAM/VQJahsXNWf0uF4uqlSOWAPud49fXX9Wc+WOm7bp+8kzBHL9cFhuGkXYD5OIIvHVs2eP294jewn9tFWrzy6ev7vZvX7/br1oP/3sVRmnP7x53z/su9VysViWotPd1iKHC/f27vW68VfnHTgY+4OVfHF2djjsV5vFKCkdplzNOcSUgbjrWkNtmo3z637qp4fDcrFou3Dzw/2uf+jixXp1uVgtmmXY3j0YUN9LP/Q1Q9P46Nk8pcg+8DTmtB2GQaYE+7v+fHN2/dnnzxYbz7B9szNS9MUIg8OSim8aZio5x+C9cwBWpc4HWf1hP6apn8qURA0RcbloU9W7d49TTlkhtk2epjzVtolTSYZaCerDPo9TNRVT9yMEZ4u2WS46YyDnYuD+Yfjhh5tFFxZtt1i0bYzRex/CUUWBeRMPjDh7Rf0x7meA6MjPvlLVimYVMQZngOSczamv4wAtiIhE7J2S/kQgMEM0xCLK8GQsIkGdO7zEQExNZyxFKJKJ3XwOR3PrJj1RVWddZoZm4NFAjwZoAoZmclRnZqCvHXWjp0PvJzcLwDyNzDmF+V/p9ClPQTM4zhDzKDTvNU/IpJN3A3860p+UjtP8cWItnxwxczbupJCcLCanLnoDNDABcERVzQx85FI0NAEyVBEAKFXYz7wP9DGoVFPQWb2AI+rMByLE/bAL29c//+qL+3cf+setI+eb1hRVKpaaD28v1qt087b17rK93tUBtQbnASlPKaWigb/+xRd/+zd/x9z+hx9v/tnVJ1effvb6979z4IbtIy+l7VZFBQlE1SEbHO1MT0/5USCBo7PyyWGkx7mUjimupznzGGfBY/zSnhQjO2JOj0AUOMa1TIHo2MmmRqSzVacf9s/W609fPLv/8B4LtqswjAcUgWz3N/fr1br18ebDfSp16Pc5j5uzDsnvdw8XF2skmXZbtvH5xXoY8++//dGItg/7EBiApAocuScz5Qeffr/58R8HbjKEjx+GjzLliaxuR3Drk0DzU03TAGCun6Mn5RNOkttp2p/dV6Z4skUcc4kni8RxfEdkoNNDPa0ywhOuHGez23wdHUUpmysThRDBCAwRClBm229st7978+H93w5v/7GMt1ozkq/QMDutSU2rZLD8ckmrQKypNzK2wrqI7tnl4vXDoYzZheAW8eJivWjjso3EYbO5OFteV2vOL8/Xz9ZMYprMAGr1XYdIy9WZmkcQBJW0B8MyHqK3brUQMdc4k0kZfTg3ZFCBUohYStKSpBYfW0QstVDTke9UAWoGqGBWSzURZC9Ggr7pzljrfnongLFdAVbftKbJphKbZkE4DYfpAHePg5qk3WGxpD//88/++rc/fnh8/y//6tfv/3h7u3voni33H3b/+PrHT19c/Keff/Z4//DYl8UifPhw//7bm03D69ZX0/v9ljxsls16uQCzuFynIQ/7rSR99/394f7h00+ewfhufPv3F6s2XF/lQ+twtOmBy1bdNc62Q2R7AtyflsqT++cntyN4WmcnSQdOmKrT+py/4smL9KTc/sRR9NSz+B8bl04muKf1d5SV9ChbfRRMZ2ul6JxXQKqaHZKccpxqMpehiakCMDEaekdmqEAO0UwAj2QwBSNwhmimojKTspkR1Qg9mKkaMxwDeuRwrq7UWZZWrZXZEbuZW6QAdIqW6fFaNpy5yrUoFnQeGOH4KVW1aE2mpoAiSgBkpiIAJlQBGblx4CbRSubQE6AZ1Dr3vgFhBVREQQRwhsaSFcxqzsgkNpHlaXCuidR0gFQqgKICBOfP1+uW37zfPlgteb0IkQIZEuRSpJYiMk5jrbWokKf5oIV4pjs6ADA0JAYAOXoZ0RGpHRPVZMoM2YpjzlIRcC4BNTjWm3EIhMSEAAREzqEhiFQjnmOuWSsBIoAgNG1YLDrvCM1UAECb6BeLGL1XAM+BCcmoDZ6tGBJ6RHOADqBiNbVcc60pS85Sa8255qICWqvMTX/4seQBUb13QMAEzgfyXpnYeXBO2SEwEDMxkVNQBpRaFInBMZojPNL6rM73ST3uB4zAkJwRGhKQQ3KEpEhMaKpABMSp5n6Yci42N34iqNr8RkaEVupHNsvHAJp3xA4N1JQZImEpCZGia0ueqiT2EdGcC2iIxpG7YTpUHQwmNK+Wgvfog1QK0XnnVQc1n4qNo9Xsc64qFSURu9wnmZwPzplwJNVp6BWREX30rgqAC7XkuWduLg2NocmWFXFxsXYt4K3VPBwOhyZwE7ye+mNDcE3wqeam872ky+eLL9zlm9ssNe4fDkNJ7NVxCCF6jBaidJDrlAsw+fOzjarknBar5Z/8i79YtIvH715/+83/7kwursLmfPPD9/ddu2wRf3z/3fJ6dbaKlCWQNK37/Osv9+P29v6Ad+80jYuzRjVpLfagwB6IS8mr5YIQ8zRUTh8+7AXlxRdd8dfX1//pD795R3/rh3z3yWetC1iyO7s4+/Bwn4ZRIO93t6p53zM6BJ4rh/Uw9ctVk4YJhvL719+szs52D0N/P/q2ef8mb2/fROfWa20W8fxy8eLZs/u3/e//5vX5uXv22VnXrrCOUrVbNZuL1W77uOgCO3r/4/u79+888WLdpZxn75nzviYpVcach2F0gOy9FNg9ZkIMbYNIQzqsl0tuuFt1i27hQtMtloq63lw7dLlqXC4ECuuK2W/307TvDdCFJTV+rBW8X202TejGx+327ofdw7tpmEqe8jSYiapKhae9toEKFEMgQhVXcgohlFq41qigqkhEODfdKIM77fAIAMH0p7VntdYnkWj+r8j0eH8Q0cYHyXW92kyH8bDdnS0vInXR+5xT42nox5QmAwvL5fXZ2VVo8oTdatP4MG0fSz8qKKIwA4OZqaqwOcetGQZHaCJlqKBIRYtKEY4XITYyDd2Sy+HHx4MSEzdhGG9KOoDK48370EQoGdHiqiviMTZxeda0C24uRKFpA9Zp7PeyT41ncc169enu/jCoi83q+vLTm9/94/PPfvHPfvWfjfscnO9W5/cf9l2MvU6LVXfY79m4DV3uH0PjXQhaZS5ezrkQkUpdtrxPBTCYaUlJkbRCaJuUB3PmXGNATCilRPJkUopKtmrVEaTUm5Chi21kBnQKnsdpmoYhUGiadrOMNQ9GFhcB1AIFt1wCDnl48KGdkHNCappFdz6WO5DSRNcgXTQXYwVT26wX2WTlNkO2qY6ESgDZKtqIMHfRAVPk0FRO47Af8o4941DU0jI23vnBECSN4/tFiLWU2EZ0vqZ8yPvVes2l9NNtrXuyxi8uq2M1DQGmPDbhXJKVeqgyOaitc3Fx7pu1VenTIXZY6lDVcQiPh9tnz/7EQEAqg2E1jlxR+roPLpKnaF0tCXxFhWW7KuoHPbjgyRidq9tdZNe0S8Lg4qZk65p1WC5X8YxL1nxYNm0N4bAb1uvOLKd+j407X31lkm4f/g5oCiufh0cm9ogBWkKY0k0/7RkgLher9fq0LTIzc87N467DY593lerZHV0YCAAoakzHAIWBqqGhEbGaKQCzQzCUOifqTQoBErGATjUzOQI0rYAAZAxk1ohp0clRE0NTpIqQZnU+xLbL48HAKkxo09mm7bc9qIloGoZFDISmUsjRqotkMGUjpT5lsUf2fpwqq/W7XV/qetkSIaEO/bhaXi7X68ftVmwCTw7xsNuOnNfrhRmA8XK5eLy9t1qChyw1OJenujw/29s0jhMjLpomDeNYKqJNaXIUhsOwuVhrojRMeTiEbvXjD+9/+4fvLi7P1uslmL3/8O787Hp9dmGs7bIBKQrIgv/0v/z1hvFnX34y7u8BRabUBLfrd44JzO2noSRhOranmpHnGAL1u8dcbqiJbePQpjJNhP767OV6c12q5CrlsXe+GcZDv+sX63UhqLUYltvt9vbmcdinrDiMqWtstVqPacx3+f3D/d/8b/+eo2/joh/Tv/wv/3J9scoOm9Yb1pQqMBzGCQcex4yMTRsO+z6nOo1Dt9og+8O+R4XD3W7b11QgNmHK6TAMVQQUSk1iVaC6GDVLlVwMxjF7hET22A/ubptrJYR1F9roF+2iVnx3t5O7g4pppeWm/fST68vNQmomg1xS0wYEAwIidkaAUOdDVBVTcexnJIznCEZmqZiKAhGCztsnqipE+LFKGQEMRbUYeCIgckSlZiQ3G3Hm9MQRoQkAx8410Fk/guMh5ayu6snUc8KoAgDaXDBzbEfDY6jJnkJn85DzcfxRm4Gb87j9NEKD6bEDBU/wDTupFU8Zs9P/ARrOM/ZcHw1H8fcodswuEXuCU59OM+dxno4Go6cSq6M15pi0AjM155xrGyT2jUnJZhacExFzvNwsVWGakqrNNRWMgGxMrKKOsYmeEL3nhzfvr74OX1wsdlz3Q8qKSPT59TXoUEcbx75tw/tvv6VLvf78k93tByauKpFCP00P46FCefXl58M+/8M/fL84f/Hzn336XNLu5n2bD7u7H9b8eXWhugDHqlScHWGICCdF7PSszW6HeYZ8ilTNiY+nVQLHIfMotMHxGTqOqHAUzj9qLQgzqkk1zL2QVi0XLiPn8a/+7L96fPfNsnEWvAv44Y/f9oedojMXMWC5eyylLs/WLnAZXMn55t3ri82q9A/TmBex2WzON5vFN9/9+NUvPvndD+93h2wCxOAcz1U8CKiATHQcPGfM1Mnec3SbnRwZpwdMp6UwH1YfnwA4sR7nJXhsOvup0HT6t6NUepQ15+JtUNPZUHf8XD35lj5qPwBGNH+E9CQVHFU7QPqJi0sNgMhUZYYUGSCYOCuYd1be3N7/m7d//BvZ79e+bdpYKwXvqihoik4DVlEFQAc+ODhf+GlXarYFY+fYoSvZskFY+q6JnoiQL19erddn3txhquur9tmry7jAwzaZWrtcBh+rVgEFRypVVX2ITCHnHBYryTvfrmyaRJV9CG1n7A3BATG7NI2IhJ4BPZChI+AOYisiMk1Wp+C9VFU133qDaiiirGrDcI/OBd8RhWoV2bMLGScgh1xXmwUhbLc9KZdBxlKDs/PY3PbpH/7hj//8Z1+so9zdfvjVz56/v9n+h+/udn3+/NVmWTKn4c9+/uXDw/79u9sqACBEzqHWkmsC76nrVi8vnnv4fLvf3t9/UJvu7x6Wrev/8HfUhs3X/6e43lClrGnc3TXNJZxaHQEAafaWPd1Hjq/wCZz107356S52knZO2s7Jm3S82f307v1RCJ19cE/pR3i6kJ8u36fPPt5t7Sc9fyehSmFWlgGJCNBMZjfg0SB1rA9wzGjAePz+sxLNSPMJAM0pUBWoJoBVvKITUAYl0tkcNM8MyOzYiQAhH8PrR1aGEtqJbTz7OmX2EB1/VTStAiImFRANxcDN4w+oaD1OPypOFUEUzEDFAKQWdh4Z5hiaEhWEQGAgYjZLHArI3hF7IyATNsVSTayUmSTPRDj1fbdaWs0mMPSFsAHy3HbLs9X15ebD7u3dw+N2mparbtOy1jq/K5aSTQGQvANiAhM8Yu9gZnJnMRAlOpVyAsqcvANkQkQylRAizhWcCITo2RsgEwcXAJCJ57dRh0zMM2SPAb3zSAAC877XI3YOO29N9ARA1Txqw24RQ9M0uWqxEr0LnmNDlkc8EamciwC5TklM1WxKdUolTaUMKY1JxETEsDIDoHnn0bCIEkLOFRAcA3NxzjnPzpGPjXmPLioqOTLIKCgznIs96yyEgamJiqkCmpkYzsWx8xr96BCl451WVWzeexQpU5rGaZhSAlCYo7tgjt28JohJVJ46YY9SERHVVADVOQdWCanxKzNfVMw4+qUBMPuuPctlIKNSkUMEE8SWyFcVEnNATB61gjBATWl0TRgneHyculabZtc4Kgq1qG86v7gGJkBMVVSKllpqIkxzeNUMnItmRJYRwdgBWi3F0MKiu+QX/X67u7+3pGbVLLuqDhfrhRtHX0SnrGUqoICMpSYHftXFF1dXqzXv9ofhkKbx3rdLbiBSsz67Ks4edx+a6P4v//f/NsvD7eH9H3+7zzfb68vzwExTGh4yVtluP9wchmIG94OMvcO6XD9/9vmr4M32H3wuMYabu9ssuyn3behiLu3ZWZl6YJ/HnimY5+CXxBn0sPtw2z3r+vr66//iFfl/+T/+v/6HZz5evDx/fKi7u3uCcbkhgBYR+52q6H47ugBtEzzbL372ougYw2Z/yA+325L33pHE0Lbteul2h32q+P520vfj6n2/2gxnZ5vofe6n/jbVPF1dXwLjnHC9en4Zoyfmdnn+/JNPp7Efh/725n3wXgFErVmHMtVu2ZxvlnlMu8NkwDkXM6y5OjIO7Dk0i4VfLpTYkGpF54IpT7UqItXsAi8Wi1xSv9u1cZ2SDbUwh7Mzf3kZJE1vv/vt9t1j34/oNHrO08xet1qzGuOMxEdAdghKoKa1lAkdhdzkXJwrUmul4gDAeZu1ezAwIeP5fUj1ZIuYT0vn04mPR7ew3289Y/Rh6AsyS/HBdLG4vlpfP94nFAroF861y4ixAoAH2W8f1mcL4Yw1GHisIrlKlloqmIHWlCqHqSJ7H6a8R0RCZ8YG4lxEIJNigFIJTQd5WD1/oRQ1ZSuQRDGeucVy2b3IUsp+GB8f4HZqWm0Z9h/eJgTuNr67GILvll1cXBI6G0ekR5F01l4USGq+684hV3n4cHmxefv25mpz3S3WHvDV88u/+/3fbVbn2Q6Wsg/LVNH5EIiG7XcX55+A52GqZOobh+AHqGk8LDtnYIoeYkymoe1K/9gPAykD4lSnGFtHXKpW4FSdeI2KhAJmOsPrJlHfVMvsELtAgkISAwP5x/cfmqZ12GDrEcZyuIsX18uzaw7dOI33N3/MtUKzlORIEIUvzzYTVaBa01SkGDJwUMvFctUEdU+iE8SsLvoul5SmLai0MTKrqGWBkkrEdrN5OabDcHg3TVtiTiVjaWthcswkWXoZ+9ZHUHDkYTqMuUDwPYwAZ95BnRKjaxybYc3FxqlpYtO2Ux0QYNGGBP4cLnWawAMzo9ToA6CpFgRsMCKlRLkKosZFc7YIq4f9Y/AVakqTdPhstbySQmeLy/vtmwAtOBRJjiPitGjbh35U5iHl882zSK5an9k4RNSw2/2OqC9a8mjexRgXBNqETmxAGxiydw0pHB5381UgJz6EqjE7QlQVBfPsjvxax3M+PJcCKvVI2yVVOfm/TVU8RWavVRBpZkiZoagQUXANAokWYj+TbGfOiBkyRVAi50DNOe9iMz3cQxWsSi4smpUYTeMBoGpNVVpCEE3OhVpKGRMin7Vtj/XxMA3FDmlsu+Cj82kqKfVZK0DrXacYu7S/ufMCaTrESI4pxHa3H5oYANBy2d3eNIsOxdbL9dsfP4z9ENCcabFl1kSGANx2GyJ4fHwPjFK1DeSY61hDDH/84zcvX30G3eLf/G//+sXq6uxiM5Ux7+tnrz7dD3tlf3F25sDubh/IN2W/23777tOrzTT2xNYgtmeru4cHcNY5t3243aciRaODWnXJwZj7NJYKjhhIXViIyfDwuFhtui42bbMd92ZmyiZl3z8MqSzb1VRKmmQYD6gZ0dourJcy5bI9jLe7+v5ht4hxsWxQnO9Wl5tr785+/s//5NWLzZt3b/s0EjGh6/djFnHBG2LTRmYqua85i6Lj8DgOtVRRDM6XXAbBvp/s8dGsCoiIoqGJAohvOd28jezbGM/Wq8tPXqXDJAquW465TlOaxj7ncb/bW72Li7bp2vPzzTJ4K06rfvvNj98hXF2uz5eLs02H5BzZTN82IOcCocz5KVA8QrWQBWoVdezUEAw8s6FIlZkt6sH/pMfqOO3OVYAiJVBwTGrm5pJNNTbzzCriiLKoASqawIxXwDlUo2rseIb7mM39zh9PyAkITec9H57MKYAgqnxKOD31vsORYgFq8NTO8zQf2dPoNL/HzbqQ2exkITx6NPSkWeicorCjAPTTmrZ5IJ/fJ/UkXsFPStwIUOZ8EIKBEQAwqarUimAl4dXVFTLllPYqaBKCB6B1F5zzpgtRS2PKqVSppQiiBe/ZexWN3jXBr2ODaa+Inz9b/Phj3hc7iP5w8+Hnr65TlaqyXlO+K/v3r2njX16efdjvDXDVxZfh+e3d7dvv3+bLzS9++Vne777/zd9crZr1+eWPP74l71eb5d3r324++YooiqPZWqZHqQw+OhqeYiyGBoonc8zHp+goAs1j6qmHy05pl1OqxgAB5IQ/Qpv9a4CBUVFjIElpvQiPZZuHD//ZX/wf5eEDlsH7+OHt9xA4YN5sYnd2JRxW3abWcbm+7B/3+7vb3c1djM0XF9d9OtRhXPju+dnFzfbDh8f7XEGAQPDVs+vgwvu7G0DN9Wirnh/AXCHkmGfNERFUjQjmnib6j+0XMKszx6kbnkQgVVMwRDqtXpvVtiNA+Il8fpKUEEBhdv0cZU062iBmQxc9MVTRjvximhufTiKq2tHbNcNdAJ+e7eOrgEhzWJPAWKoOH37/t/9P1PH85b8IryI/vmX93ePtD1pEaylljC7EyEQhlxKIHx73BZx33MYQHV9enP344X5O1kFl4rJaLNarxWJ9HZs15ro8P/v86y8RpofbLbv2/HKdcy1Fp1IotgRIjhw3xBEJmR2asWts2jpWJofERKymqNn5mJMhIwEbNTPDuBTRbkXIWAdJe0eu5kwmSMzkRXFMtVlekkxm0IQzIBTQUnrPTE1XSopmeScIzntGsMMwdJullcmTfvnJ5ePv3//hh0NOf/izr1/8bPPZzZubF69eNefyzR9em+rXL9ZWpvub99efXMcF/fD6Awk0i+Wqa7UcBJHE+odHC0O7Orv+9Vcb+ZP719vh4UP7DFY03vzh71y4WL38C7+50Lyd+pumfEa0mm+CNHfknZydR8HlKRT5dNOZr7jjRv1JPzoxSE/X7NOdcxa7j814p1Tb8UYGx4QuHq/U487/yZ5pR/DWHI/8j5Z/rUJEiKRWj0FeRENEZNW58rLgzJRBAlNEFC1qdXbmIx4HbiIGm4v7SIyLYikKzETHQngxI3SIbj5pCMQigojg2ECZHDIB8Zw/mp86AjKogFClGgBoRQUQBTUAVS3gxCjY/EFDEDMxEUU76kSEs4e2mAqogmNPXkCIwDlPSiJSqrg5iceETAYWiNCKohWwIiTZwCqBG0cZxxrbSWTI26K0bBYN+7BcrT95fv32/cP3u8cpS3+Ytg7YETEBmWllBiYCNWKwmVOtIKKKhMQ669aIBsZAx7MYMCRk5FnFUkBCMkJiDwhAxMQGKAAMqKree0YERBE1IGKWKk0gIjQDdsEjtE2IAc4ieCsqhIpdEzebrm2i92xmLvi2iSG6EF1lRNWck6PILoKQwUGqVtEp5SnLMOQ0pjSUqqqqzmEkaIJ35FWB0EyKygyfolnOEBVQQKssgk6cC2ZVCysiEaNjh3PZhonNCcJKoGoCSIqGCA69ERGhYydmAKJajjdeIABSzSmnkkbThFprqWYqIse7tKpjRHZiHxXTUwNalafLMLgupVG1EIdSZM6SlTJiFUkJTUVMahYsyy6UOjFHRFi259P02MTYeKg6LpaNIXHwk8rNY7+54H6c3HZPhKXUoT8Y+fbsWdstCDR2WnMexkm1pFynsa9VCdGqes8I1QyB0LetTSRS0NHqao0Yh+1tkhQdpzRBaICxia7xWRQc+8NuT0iNw+3DLgv2tWaLTaQ/+fplt8Bq9eZmO2yF6m63vf/lr7/487/8Z999+5vb9x+ib/7k1ef+02cA9Hj3+LDr+/djreV8vQzcjKWws81yvVp3v/7zXzcx/u7f/8/Pnm9qllpMSFTy0I+aLOUkzGgpNH5MFdGz6wzcavmSaS9Sp4d3Iup92Xy9+i/+b//l+3/4nyU/EuTrq+b2Nv7ww4PjDhCW576Wgl1g5/r9QTR/+8ebtvPLTUCEz7++GIayf6y7BPvD0GjTtDFEQAhN6z0YO0fR2jP38uLlYfcYXCBEdsw8twvFPCUxaZaL0MVLfl5y/uSzz6fD3Yf3N7thTCkRUht9nnJ7tlyfbVKy4ZBTrt4xilxeL9tlNCvDtLN2hRyqmSMcFdvFsum61XIBtfQPN4/3H1RNYwNh9ezTl56NIN9//+Ptu7fTYULitgsp9eN0GMd+SnXoJ1BgZACDmWBiBmpWFbAaM6qWkqWKqolUb97A1EQEiMxgPryawaHHnhXTj858s2M1+PyfqkJwWWuzWjD4lAYWi+SG/eGzZ58HYHSotS8ldUtPpo2jVxdnQSoqAPicRiOMTauiZsm0iJjjWCWZWYVixNM0ONeKGDKqqmMGDoaR2qvNi58b9W7dUiTXLLk5M2TwruTaMrELzgfMBzvc/vAP/454Wm/Ohu29yeTqbqpcpmGxOQfvGASgkptUH8CgqvDibHGxTru7Uh6Wq/W2N3KBwpJleLa6oLH4UlRFc2KEaexj9NOhShnMIHgbamJBMuh8e3i8W8XYdNE5Uu9rrqZy3m00ZawQgoPQqWmVkkuv2Cm4Wt3aX6fyhj2O46QIuRwwrEL0HlYhXC3X10Wy7EuCseka9iFNaeE2Uiu6RlWyJIjLwFyqRwvanOfBnr/cKBy2w93D3f0kB9/EQx7BxwTlkB8b30VaVSCliujiovVEeRyq5CYwZjZVF9ZJp91Uq+yD75qwoqXsDrccfbu61rlumvI0DKpVgcnManZFIyuxHaY9ez+k1PrGN4tAXFUUUSCsl5fB2eEgaIHcmuNC+kFF5z7OpmvrWKomduTYDEhFtCRFyVqLcrtudtMWnZlW13gtVgGdb5BdEmqayyrquvDh7jskqlzLtIOmNVBP5gjG8T7ZMAp6C1aGNO4WF66MebFYStYxQwytwrSfHrqWu+AXy+fDwwM1pwAaoyI64nlsrqJzyp2Z1UxLdd7NhFaaZSEwNQvOI6CYOmZRJGQwk1qerjVCNjYT8S6AiqqYZuBgYCJCAFUqETlkZCxlMEQTLqlgiI6UTHaHMQTnYpeyCo4uMmPcPryfKC0XZ8G7rosP2301ALJu0Yw63e/yJBNp8c41MWCZplymYaqxxNXoOW7HPUVfylRBHccYgvdEYOCYjKc0bvcGTMvNugo4duwo5SHE4AH3DwNo8T5ard55F/36fFMNYZpSSWdnF6zyv/+7f/t8vb5+fjFMQ9O1i7jox5IKLtv4ePPo2a2WF6Wh/+l//h/Pr9fLi2bY7jaLiGT3w84tPIOB6pCziJFp51szyRW0iqECAgTzjS85eYSLzcVUtB9Slgeg6CKTC0i2XGzO1sE7p1YPlLvFVRoex/2hYXrYbd/vSymQxYhYidbnZ2erzSGXlPuHD9skDz/+Lhz6nIwNuG2WpUy1Ejva76ZuHcdh7LrF+eWzu/0jE62Cc4586PbDfrvdofNWstRpsVwG70V1GkdDRRPWcLF6frE+jw19eP/2u/sfRK2KhXU2ESYNzjlqPLFILib7/X7oDw0gIZ9dnHXdqqpMWd7dD6/f3L943p2vuxC5bRoRMQRmNilEAOyrKpr4xs/iiCNftQQm1VKLEPM0jT5EA1TRJ8H0yPKl+c2HAFHURGamr4mZIRSp89sSIFZVmGnQgHhEYAPRk9+EZguHznwcVXZz7THgsdH96M5Qg2PY7WTvMDxakAAA9Ih14Zk0pIont8ipy9PM4MjzPB27z9aW0xudnfIVc5XzkdMxO01+Gn2bWUrzX55IMQQo9tROfvruYgSIjAamadrffvBtLGqL6FKGVHXROOmH2IQQvDpaLiOtW2RQs9AErZWYlzFoHqyIlWJFkshNGtuGY+dqX4rq7fbw6YtnQ7893O6CuZCnN7/995tnL9effDkp3d/fL7vm6tnFJ58///u//evb9+tnr54P0/RP/+Hv/+q//T+vX3z18MffePCrzcLGHUNTmwDEojof9Z+A33CyGZyeJQM40XZOoCY6ftI81x5jW0YzyvqoGh2VuOMLggBqOJcfVSkFVusGa0bLue91vP/87Px5u/zhu28kb5FxeLgVheXZZr1ZNm23HdJw85ZQ39++qVOSnDvGzuPdh9e+jc9enL+8fv762+9Qcxny+bPn20P6818/d8F/8+3v6rtkiEjUBKemTGxmLkYwOBK4bAaNGznSXPjj8H4ycuARsQsnGNZTVFJhziYrgKHNiiQQzq3SYDMVEmzuvZsXz1wmN393nem9R3lI6acK1Qk2DwQ4N3I+jegAhlANUA2RmGCOZgDM9ggDZLQM07sfv/1Xz67Xq+VXP95av3uA/n572FHrrXAtxflmFIOCgW2qMAxTyqRMF6uzxyknwrGiuphLvbi8GA67xTI+f7Z6+fxl13VJymK9fPHypUPYP+4pOO/blDXnDGbgYlycAToXo3OsVUUKhQDmau3NQEQ4tKUKmqlWR7GUUquE2I79aAIhBkMUBTRnebS01zxhs8o5+9CIQBUAYt82THXY3bOL6lzNyQXnnINakGlG0UfnC3sN7XJtYnDop8M2dS00wf3Lv/w6/O79u4f9v/7N67/88ur59Yv7+31zsfzy5199/7t/LDn/7PPzZec+vH79/NOrr768ev/j7VjFF2kwgNjls030RVKpQ95td+76T57//OvHt2932282K96c593DO2yuXQBuvUJfy5Z9hy4gysmN+XSzOcmQp/ve7Eazp396as37CQJ7FnSeqFizPHRKjD6ZH58+82irPN3dTi5AsBPbCOeFfVqApx+CSN6JKJgCkmg1ECZ/zJySHksYgVSUCMAsSzkm8efljwqGRDQfzxOighXTojpV6Iw8gCMHBABiRIAM85RuyjNyDskQmXiGQaoZoZMZqWYGetykiVUUOUrVqgpmWthUGAAYagUVUDGRKkpqVoUJCAmQlR0gATI7D+QIGJnAESiBQqnZVB0CVjBwQB6RHNXotJYqVmEGyhf1gtNYpmGUWg4PB+UCeE6MTWg267NnVxf3j+M+11KkT4WCMyCRwowxeMTKxNUKAjjH7hg1BSvzFtQhGiEGx6A6p8mQmGGuXQAEVFViT+yQZuacMvs5IhN8QCIBYyZVIecM2DlaNq7xRBYQKBATWhdDywhSyVwbove06ZoQfVULzvvGxSayZyR0CFoy1ALExEFsMjUDqCKl6mHMu8OUcs61gqlDYkLnuA2RfUxFtBRUZiKeQVMiqkKKWcQEsaB3Zt4gMCAhITnnKZiKSRUkVTl2EhjOohkYErCCsencwnq6LI4HAI5RagUTrVWlEChoJTQ9NXoYAtIMW1I7Ge/go6sIgRtfpE61qBkxA6nzzrFJndQSsQbnxDJ6bHmRc0GqhbMKiCixG+vkPBcZmuDIAxJ4MjJxTQSsqYgigANALEPRiuN2p4LTYe+7BhCk1CZ4QIretZFyLtOY+zRMWUsuYNZEDsE5T943fZ/TlNr1pfdhd/8OUZqmyaV6j03nFsXnWti52HkAvTwP6yWlAqCQU7q/n+739z7g+TI8e/nijnutw3//3/2VQvm3/79/pan84heff/rFi2/+8bsisttND9t9QK/V4jIkSazy4vnFp19+WlIax/L69XeXm2bVdUQieeh3ve9caC/YdYdt7yBM+wnhANACEAI0bNMwbqs2y3VO7xcrhvQw/viP/vqr1fPz62f/3Yfvvnvzzd+b5W6hX/3qWRpk99CnXAHGrnE+uDYsCJfTWKZc7m52BrWUJnb61dfhcV/7A+4fD44bZiO0y0tw3rrONUvLqe/7McRWiw3bnpl86xERRBippsnmjWeIQH5zebneLF99+vO3b99//+3v9tvDNOXYdCIgUp2js6s2Ljsp1bE5E8JEISCVNorWHUU7P38eXON9rHl8882baZhkSOfPr5ZXz/xq0zTe1XT35u3+7nZ/98E1zbJZpzJmTdvH+34a8zioQC1Wqx73MTTzyKBInnfChQrl5HwoteScm6ZRA9Ljjhhh3nkzHKkCpFKPNIc5ECyqc2izyvEqICpVpE4+LGpNZZqA+O6QNk13c3tYhUXwbtWGy8szUPCBg9mw3VeSF9cvgvNaS9plBVWsCpbLpCWDFcNa0ZXce98x+SKGZlqVQCUXwBZNZXvzAKksnLcX6xdfeupgQrAaogFo7XcpHaZhrHnUPHUXr/b3b26++2C1xmhVresacFzLUCSTDE3jMRcXiXnKRcuA3pNzIvmwPnvxWGs1nhIQmw+UxkekDNKjcoxu7DV2a3h8X+uBkUodkXSaimfv2w6mVAydmuYUXKMioNp062H/CGaGrFY4hFR7InaAWgxJR7tlFqZIhgLmQ7vaPF93sx5XH958X8qkNQNSu15vUc82m7fb79YX0bcerBAzdudNaEXUjMJyTaxvv//j3YfXFfKGqGUix2yheDLRiuhQAaFoyTmFuPRuKfVBbaSABjXXQWpe+kWMnHBImj0uslZslpiLggo7tMqMTElm/VqBiINfB38drE+lgPkGGnRhu90tu2jMeczIvNmctX6Vhz2KY7foVo0nJnOZa5G6G3dNw00AHUtOAG7RrV6yTKOOrvUsuUihmHwXfvj9Xec5a2KPYocEoIp1yh64mCKs/eIKHTlsHh7erpfLsd+GGIhHsd399p02y3J4I9YuFz4ddj54wlDKwfFqvfh8v/vtIT+QXwbaiLTLTetPQF87DYeEPNNYmNjZU4NEQDUDkJrJETFbFUJUNbVK6MwETBDAMZc6p+5JREUSOzLUqsVEFCz4Zh4knIsA6AgJbQZJkIPg2t3jOKbHhkGgiigj12JQ1YCzUU6T1KFbxpx1Nw4h+Mb7sFgPaSxTMcBu2XYVDuM0jjU4q0WZrIrVIgdNzTicL9oxDaKqUkQqsVf2aNWjbVZdbFsxFDNvYCKlIHM4DCNgF1cLs5wsx4ZKnQ7T/rK5ZqSuW7LH3c2uaVpifn1zaxNcvrzsx4GI18uuf5hSTs8+f2UmLspisdx+ePhX/+//yabBO9xvK+WcEA86ZtCOPEvZbg9VlAA9kQCPaXJGwfs2NCUP/ThBTk23FlMzE2AX1MVglmJYeo61jrFxKpTSoYI1MRx2h/122/fTj7fbxzEBMqO2jV+tl4Bwc/v4/u0tIvoQHTbbDxNfXizPz7jAer3ePWwXC45Nw47XK/f4cJCahrHWm9qsz1vv8jDc3e1wtpGjNhSxwZSw5jElYSAG67rzXHKusNvt3ry/qXnsYgNIc+I1PZpJZTDneXW2eXZ2liQVsP4w5lSyKVr98ce3TfsQGyRXoluwd/fb9P5u+8kn15cUuxBNS84FUdmxiRkxu+AoAlDRoWoFSS50Ki7lROC9c3N5woychKN/B484IxNTSLUAGhJpqcBz+xqKGRNWVUanc1yG8OjZOfJ+UVSZuVSZ3QmKaDi7heaid7KnCec058zT+mz5kRkRc5qjiAhVj8fuM1UATM0IjwAXOCo7AGBHx9D8dU8ZtePpIiiYmdWTNvQU50A8DWx2jM0ZHPNxp8jSieWkRxnYjgXNOHs8pCqmrFWhiaS2aMLZInqoULJJMaht23rAYDOlVcgDkTaMFHWsU7OJpdJhVENXJmk8LBraDvKwHz3cbJZxGHMqQ+iief7w4+uw6DbPvkiprDbtw/045eGLLz799rvvv/j0M1a9e/v63W9+s37xcov8eL9fLLBt3O9+/M3Xv/4XD1PxsUUAmRU3RFUlJAUDO3leTswmPJWc/YQc9ZRgsRPsaHYp0OznnwUU0wpzUlGqiZqJi7ENHj2kfGhDnXL62a//dNu/KbplmPrHbYywaGOaxn4Y7j+8mVImAxNVrQQYF41Ufbzvz55dB9/UQW8+fDCEzdmld0VRzy7a//Vvf/vDuzdt47tVU6qpKhERoFYlIFYTtRDcarlYdw2qjsM4lpIDl1zdaUw+4k/o9EvOVdx2ksjUBOa1qIAEBgQ2r2Yzg6NySUfzyJMIZGAIR93xJ6PILEYSACHSKWQJMOOOjI6eE9P52G/OU9iMfNEZTQ+GgAKaSEvdvhkOb/+TP/uvx8dvX3/7u/G2X/sylA+73etxPAxpBHJTgaRNNdZcxgIEbMRN2xUQcnZ+uTKFXlUJahkbJxebLsQuNAvTul50X339JZTp7ofXHNrV5asQ2lQyIrTtyocFOFSkOo7ChIAqBjwLZYTkiVl0ZqgroyPFMo5S1LFJGmqpbrUw8MSeiC3vrGYkNJpbucmq5JJdFwkR8shGoWtzKT4GBGRURZm9LMTkQ3SuAdQVwUxSy1O+2W4dwKXol8+XUtIP9/n7u+3mVfj1Lz///es3Mfj/9M+//sPvvv/+ze7V9eaTzz457LeE9fIsjAXyWMZU2NPt/tA09uJ60yDWw1tu3Obz9dnLP33/4fP+u79lelyfX5jB45u3F58+xy7X/iYunldR5PkmdVwUH7XJk/oKHzWe00WG9FP5Bk4xspMa9CT/wIlydJIWZ8XTjhksfPqWJ4kSPpoB4Sd//kRZt6P6DghiQsSnOM88QwihE1A0QUIFmQMLx1gxHIPI8+VDaABGZPNQAwgKKAZqMNtRARkAgdQRIFTPUW2utq8GaCBsBCo0/2FoMmeG6/G5M0QiFTGRmbIDilqPxc80M4wETMGkiqgDqDXzvEtDBQZFQQRipwDg2RzBnFaqlKEyWAPFmyppYDKsNC8qVVFDdKhap6Qp5d5qlX6/m2CixreL4KJfnq+vnj9fv3+s+70TMaVZOBZwBMhGopaLVjXPLEBiggZzA+nco8KETEpkzhOzMzMibybMbvYO26z4IwhK4EBERI6IAYnYMRGBApKPwYyY+GwRL5bUhQAihESAoHLspmNXq7gAy2UXo2NPJAgGTdv4EMA5I2D1WY3IgSGWLCWrmRjkUlOpU6mT6mgqYMEzEnLjYte4EIgcmaEQeednDQysSlEgAxNDUDh6iDVnIe9d8AGMEPlI7lMAQrUKpgZiZADgju2Rs0kTDKsBMHlFm9lGkwiBoYoHbZiiY2IkJDmy2OX4bnV8K/x4FZ5YRUyiwmiR2aFDw7bpUjoQCKLmaWAOsW3VEFA8eyKpyAzOt6uasppqNvLufHmh2kfnoZZSqzK0XtgKmdda0KSWsmjbYSglF8pTKsVLma/LWhUInCMxcc4vl10IzW6/A8W+f4DqysRA4MmzYcssak3TTqGRNCAZspVcQbFpuFkEMWTvELRtbXMWiigaaW23D8jBjVmZ+O27WwW7vj7/4bs379/ebRabT3/1ybd/+Pbb735M41SrMPsQQ/B+sVieXayJ7fpsMTwM3337ffD86tX11dUS85Br6d8NDrDpWowczzfULPphzCV1wcdm4ZpQSiIKSiCa6mEK8bIOuKN96KTmSSSItbLeXH328/Prq+3Nd4s3P05jKit59jzcP4zDQGkUBRCnVmRzFlfqvN9ISofdKJV7y53HsyuCy2YaSmxDzSIyBG7I6rTbL1bni/XKN40p1SoUHDs20VIzh9i0sZQaggPTWvI0b0kjXz379PPPv/7jN7/5wx//uEtj1zSeHRMKaC3T3L+YpomZakq16uPdwZFDd3/3/u18s/XsfFh03Wp1dRFXlwZBijw+3uzevC5FrEgIrfNNGfeH/cN+Pwy7vRF6dPPukzgQMQBVUTP1jh0zHakMprWkaQrNJNLUWmstxGRS0eHM2bTjjkcA6RgptnlQnQkLH4+RAcBUibDxUXNJ0yHlYbW6aDaNFeau22wuAjUXy9WL5xdnTYRUlh7ztGu9GlgqNVA1zVIzVAFTBCpS8/QYwjqGlVEEYTOpIojonEMgwNq23ThI1YG75Sevft1dv6potWRCREvD6zeqVvpDCBScs1RzX7b7cnbxxdV1Y2hv/vjd7e371g/tKi036uKCRYyAe8PDvjvrfFgeUsbQxsVyut3RftuE9ZSxi02/RQfxkPpVs3k8bEEHRCCEWsURaRkXy00VbUIzl1U1beMO3E/T6mwpWgRq47EWEqvZpMJoyqKiNWuZ0X3kCMwE0ESp5KlqauKCaTEe5LC7q5JdE1wFdnR2sYmxEa27h+3u8bFbbMx1i2qpTov1J5pAFQxqLo/373+Hng385vx5fzi0YTGBTjJ1rA/TgQACtqUmIGZQQGPBaHi/ey8o4KPM5ByZLE/eUwGYyjAhg0i37jarJuUpjzsryq6KwTQcYrtsIDJ5QFKsudZSsxlSHad0z7QsJTEZAbLzi8WCk1yuz29StkixWW5vPjTOPbv67GG7W/h2fpfth9q2q1IDK8GUIE2+88tAKwnNYVurnnfRpOYpL5pVKXsAY9+NRXxcBwPLj6FOwXUBeHn5oh+2iFRqPky7VHe+CRqD1ep5BZQ4NEqomhY+NnjRKm1tatp2sehQPSEtlmcF9j/drKVcAEHNmBCB5/oFKYLo5pneBzIAIhY2AFRUIDIzFS1SPfuspoA8T86gNGNPjGpJokZEQD44FlMEUlNQQlLnGwOsUss0eK4575ia4Dy52jRATEMvvolLgkMpY51SFi01+phzlToqWNMGDzhMefvYe6amibs+aZaaCjI13iVRVdtv95Ed+gqiTRuLWFFdrlbrxTL1+zrl87MNurZxcez3MUIV2W/3y0WzjLw93AbfMMC7m/eff/rVen3ZLJYMZegfQAKQbTbrb775fTV8+cknxOqqNS6KgAG0q2WaMtR6toxjv/tf/92/TQ93y2U4P1s76deXyzSMVnTRLci7x/tbJccgK++01mnq2xicgVkdpn0kikQxLlp/8XB4X6YkCGfnXcMLYMZqwzSI5lwIAKVWFVAYx8PgnItdWHTtF004e3adU3n9/uFulx4PU2y85qoCy3W8eLl68fJ56kt/mJwPLPli1broELk/DCE6dKSay34Yt4/j1O8R2Aw5cOPREYPkPB6G/WwXn4kggXl/uM25zIkIMA0uFoHYsA8EhCLC7NI4iWi62d7fPAL5Zr0gYEeahmxIcdHlUnGEH//4NsZ4fnW+WKyW7WrbZ4HpvIvrwISVmdl55XoohTkWAK0FEMUE2RUVNHDkHLuZb0Ds59pyAFA4nrKhKj2JL4Zz4m9O7BDiLMDYPMDO0o4aIampm32tOGe2ju1pAHSiXRgcQdd2jJuZEZw8KrNEA6eo9E9O0edx/ZjEAcQjzuhIh7UTamY2qM8/l5BmHxDicfif86XHdMZpdpq//jhE/fQ8/mNC5DjaESDPmtNJIJmp94jIntmzKkTvuqVrG88Yx6n4MmlN68WqbYJKicHF4LWWUrMjDyKAepg0jwfvXCpjSdnFSOgKyG6YCGgdfRrzYRjbSI1zZgJqJFT25dvf/NOLFHGxOTzsP3326T/85h9fvLpera7uH/vl5bI/PP7w+ttfvnwZzs+HH75vxY1j9hm2Nx9wuSZQm4+kTQGInp4/OGbVcc7AmM5si1M4a54AFY6RGT0lYuyIfUZDNRV1plhTGg+oCqo5l91h9/zF58tm+cfvfw/7rQZ8dvFSjN+8fQ11ijUP+4EDayqquN+N5P1F15pKn4ZuuRCDw36n5FaXF6vNqhzsYrV+7B+bdjmkcd8f3n13Y8yQ7Bc/+2Up9s033xLPwFYgRIcciNkwRrdct5+/er5um3fv3/cYf7yZEDHG2AQP7x8A5msAj77rJ8fUrGPO2Aw7FksdiTAz+YsQyY443ifz1XHL9rS+jkrQRxgWPJX/EcxgrJ9E1czmbuzjY5h1AQNQUAIARFUDEE+KOHIZCpUYF+9e3x9+fN16juvYP/z48OEHmTICszpE13QeKlWRMakgdV2kamUeiw290d2+B5Fl9Izl+mJ5fXneLTfVwJM/W22m/nH7+H6zOe82r4y6YZgAxLtA6NJ0KH0f2hWyJ4zEkd18PVZkoOhzcYDIjDWnwCMRqYohqBbTAigGldmnmin3lpOWqoBSy8znQBAEY3agqrmYYhpGDoxEpeSZ6l9zMnTILpUDEoAQEMUQu05ccMn07m7bv7tfrRa//PzStGaxb97cdUR/9rMvv//uHUH56stX3/9w9+b2EJ1bLheLlob9lB/H5Sp2V3A4TLloLfa6jlcr9jLA8I13fPaLi1c//5VcPNt+//f3dx+efx0Dr8c+rWPS6UbzHttnBmIqQHzSawxO/r1ZmUH8yc3p6XPwtIw+Sjkfb43zB/Hp1nV0I50+/4TC+snnH82WHy10T3/B00873vgIDBRmqBzzjLhTme8TdLKQIiiRMzMlI2DRzIiOneh8AzciYgRCZARm54OfazjVTFSBkcgTGhESG5EyOQPDk1akMrf0KCiqGfAR8qSqNp+Cn1Jx8zsMYlURMbWqhkcOABqKmIiqVKgmKkgOFAwVraIJoSKCiKL3Sn6G+LMPmHPJUGpFUHCMVEScqYIhIc0ti1Wrs1rSLifXUyhFh3GctMbdjmjlPbcxnq2Xm+WqnyZVMSapCmqeGEA1CzKToSdyCGRGSEw0v2TOEaEy2lwBx84xeTNVUzplD4l5PslgdkiekYPzVSsTkXc0w9lMvHeeHRo2Pqxbf7GKwXsQJUCtYsLkHBEQQds1sW0X667p2rkh0/vIRMTM3hUTAwWrgGalZKk5jbXWKpqLpixTKUmkzmxthOgwOmYmZCpSc5GqEAnZsUOqWhkYAee3ZhExKWTEjkAIvTsePSgalCKK5JCd6FKUAAEAAElEQVRQZ3o0o4ISPcENzazMCxOQcdYfEYHQEwqpF3SemE1NAK2KyFybpza/lqZCOCdv/uMGNBNDE2JkJDAU0Wk6MEstJYYOHaeaUxVU9Ayp9GbiQqhJyE+iZbk8V6meStYEJREaOSGHGJyLXmCaChz6ulyhDz5NExn6QC4SNw15r2IMmHMF5FqLGVarpWit2nVtCME5KlMqOU/p4JBVZNEEqQIOz66+eLj50WxYhOYw3PhARtjWpggwMYFhQBNtnFNRY3t2vdYQ9D7ZVNjwxWfXv/jVV9/+9ptPrj+9/XD427/+bRPB1K3WF+x9WPiL67W3OcenarWk0QX+6uVXXbPwwcr0qFPfRAoOpQg5IOZccliG8+fn/fbGtUjeha5r47NSgAhg3OZ8cOHnz17+8v7u9+Tq8qKbDjsfYLqZhJt2temuv+4uv0p3Dw9vfxzToS5ptQy7x34/mNTqWZkyBY7BzIVF+//n6r+eJFuz7E5si08c5e6hUl1RsqtR6Eb3AMTASBhpMKPRjO/Df5dGgyABcIYQDQwa06VuVV2VKoSrIz6x9+bD8ci6PffhWlpmRHpkxDmfn732Wr81qMHp+MDo0jR1resiIUjXNwqOvA+hEfNI0dCbEiC3m943Ta4JRB2z1LycT8jsfd94ZoVaVACqTICIUr/42U9+9td/+dUfv/3//et/0zBvN32tNp7G6H0By1lynoANAMmAXGQvbSPEvttsXNttdne3dzf9tkH2y2k8vn83Pj54DrHdQax9h2/ffnv/9utlOqkAWPbs52k+z8vapIFVs+p6q4Ohynpu28qjxGBFqqhUqWIqIqRi6tRA1QD0E5cLn7uB1Uz10o78HEsGAFCSmjU0LZKxcxG6NE21WuN213dDsxl2sd10m7SU47Lc9p4Rdlc7MstltqVYrYguNK0WQZyIjJBVnQjUvCCwQdWsjr1BcaElIwQhxtj3zrc5y/0ffufffmuAUgoSSck+UHd1dXX3apyW7ubVNjQIULUy5lpMRX7+j/8JGH3z1d+dH99P0313Va6vuppVZCbgGR9cn5r+R3k889D0ty/m+8dcx357l3u3/zZ57h2dynzs2LMZUxhlJm6u+s/Hw1d12kcExoYsl5wchI3fzMuhjBkjMiCpRddMS2qa4ZAXtUoMtUItSlzBU5HU+I4IiyoF5hyUuofjAaU2TXBts9sOZV7mqbzfn1L+iERdt/FX291PfrFreXALWSo5o7s/PB3nKbk+IPfNcIcUueluYzNnHKesSPvzuZzvPZxYT+dycrHXvLiQhi6WemgdFYNxPgtK67aN7xy3x7xvh53voi4Ja8rnhI6qmmJQ1TrPUGrf9SrqwQI1uQjKkxgDMxkxU6S+CS9UEoqwx9M0lfK+Y1cKNA0ogszHPrQ2lTDx1oZSvXFnmm24stgFBdDRNZldLzVxksbHoK5INd9rEAwb4jbgGQwI1bkqdmKQZV53FbWwd+2Ql4+b7iY09jh+66OzytUi2e7l9pfH09+YQSkltttQmxfXP89yNGOnWJbibNnsfF2m0/R4uQvUiElNEMmRUxFgMi2mGHxXpBig975KrqWyGgIBMQGyC6UWBHPs13akdUWPSGs17OXRipkIRERlLrrygkOpmZCZPAKqAiNgMHP+9tWfp8cHqXPJFbWuw6qkHB0lrAvY+TyrqOPatU1espplMqzqHTWRz4dlSSUGAgUzXRbxqIBWRKKUOk+OAxOaUAhci1jJHiv5tgk96zCfs99Cv726vr399g/3JTl33R1TfvPlX/76v/9NUOr9pkxLS+7l9fWcj03o52m6P4/YzKclvbp7ndKiAEO/ub59uYyT27YEsO198O3b77//7a++4uC2u02ex+8/Prza+DnnKgVVqOTD6TBPS9c0AXGekxH0fSMlo8FSklOv7JqmPS9pXr4HKz74vo25pNMyNm1fUyLqEZxoMTVH7JlP57MLLsTmXMT7ojl9/+Hw8cPTXG2pxsQAHNsYm6a72m1vb07HpQnN3dXgHdcppVxS4qfHgygI0LTMfReoo1R1qSlJ3W62IXrvef/4GC9FSFjNkEgMVTXVxIwh4rp6bYfek1umwuyqltN+H8ipQr9rkND51oo7nMbz/LTbxKttQ6pZKS8lzXMl7DY9AOTpnKbj1fWrDV/tj6dNCH7jdC4AlCQ55MAhApFZsdJ4t5TkvctSnbEpVhEi1zbNMk3Mn0QZXHFaAGSqiJd645Vrs3KIxC59VwiEK1bXgAEQzK0mH2aRSoCEF7qQ6gXwuw4YAHJhTyIBKBkArHn7H2hDqwbzTBFGADW4KLBqa2xqHajXJ2F8Dl7gs7hjn5JlBgCgF6zM5fmxPn/KD6Ib63Ie4OJuf5ao1uW7XuY3pLXoHdQAEX1gRCoiitZ1Tdv5m02Yx7lOy23XEsL2xavTYdRlYkbNmmsKbE6Vq++bYVrO6LndbaBCtpKLFOGSc1pqEhXR4BRFx7FSra33oaHD4VyRncM6zx9+/6v/w7/4v/7dr/7T269/95Of/DItx6tt9/7d+zd3u3da37/9tv7Nf/jFL39ZSllynZ72WGE8nm5u70QVCAkICFQF1yiAKSIioqx63TqAAa0bfLg4WOzT98me43hoq8OFtKjnSCynxw8eRXP2RCnnWqqJPx/H7//wu4/ffbfz4Xwqd7vN4XyGCkNovv/22ynlpt0UEqvFTHMuR8laCiIfp6Wotj40m213dWMYbj67enraP85TOWbRcndz+5Nf/PS3v/0D4fjizeZhP/35l3dLLmYITIzYBO67GAi3uy6yK4tMT4/RsTW2HfwxiQmUMT+/F6wXoSHSswmILoCmi42I1gtyTagR0Rp5uMiXCKarB+giVDEZXoJ6F8VzvUBXSVUugON12F/ZtWsMFNfrDtdX/WTmung3LjcAKWjNXuc8P+SlOt/ErvRfvji9+7sP739T0lNsuu0Nz2Vht0ARj4DLPKE6H8kFNmdUrWYzdi4+7k+pSBeda4KV2l9dX736kgH7dri7ft1GeHp43zZ9t32FoREQJAvUqso0PalJN1z5uEPnkRnNqSTQUvMChECqgKhaUiKyEAbQgmZmJDmn6USOJSFiUAFSrSlDsVqLVPKxzSkDKAJ5VAA7n/Y+dCbmfQTTOk1gQOTACMgLNeSrNyillFnnqVqFENs3r1+7Nvz6q2+fxuPrWq437SRYU/7V9x+V7MsfvTg/3fctxD/74v7j+TSbYnUhhN1mMFyOuW3c7nqHPh4e0v5Y3s35ZhNfXW/md1/3N6/dT27ij754efd5+tv/eTw97n784pQlz6fGN1b3qNdADEDPRCH8k16DAOtt9qz7XH7TLt4e+KGZ6NOTu108aZ+oQz+UhS48n2er5J/cQ6uK+SwuXV7q+VXgB2cwISECExLROjYQEDk2e05XrqxgJEISKAaV0AD0wjZaIVxIq8xHCN4779h79s4RrO1oBOhU1Yc1ha/M5JgRyEQULokIqWJozLx2mgESmpiC1mJ6OaGJAxkjoqEBKgiYyupnhdX6YwpayVRUVRRVV/4RggcF04pm3lEyUzPk4AnMql+4qFXFnNEUAiOTKgICEhMrmOGK+WC1Os2MWqqaWs35vD8xuWG3CWEYNnWzuxpSWomzUI0Ao/cKlRABuJqsWFPvnSMkQzVz3jMTqATH5JxcoNeGRA5p/d6vSxRgCozkCJEdoaG61d3AbADIHLnxzNFxYN8Gt+liE733bqVp11Sriq2HlfP90Meua7qWfABENIoxAiGTAwQSrVoJwGqtOUkpJef1yCuiFaCICSAiBnKMwkzsyMxKqbmUtGQDAocgBg4ZAUCIPACZoioAUhFjNMdkwKvTspZlDZYTr90cHgkQ0PlA7IAdOm/oHBkSIRKTZwpMpOgEULQyGjOQ1VLSNM6piF2OZyV8vkPM9GKMxb8nFTE5dg2DLDm3ngtQ1hxc28eb8+lozpipGphZDHHNzjRNU6UC5Otdm9IpOnIOzKztNmM67ra9qBGzIHbtDl0gHnzo240rRGnMClJVtFYE1Gq4Qs7gslxAJGKwKsu8pJxUsouM6ACDSlUr1UzB5nluwYHrcp4IIbTXCKZWAxoioVHXIrPPubJ3Yk4LgYIE92f/6E00/fj9g1n92//8XwL5zdVwdVO6ArurBp0bNr0WMVIqiiJXt3237bj1rSPX9Si27MdSFkujLBOjFywuuNBF9jEBO99aW0qO5kBAm36bqDUG1THuIpKclns//EPnrjSfD09719zl6Ryb1sps2QH6KhZ2L16023H/0cePVZ7a1jWH8XxWsCbnuhLO0ZlIcUTbXZ+1AnWIrmldyrMICtQ+NirStLsCmKZSg3EIpiCpMjvnCYHMoSMFw6ValmRqzndNF9cfRClVDAvWly9/8T/9P370X/7Tv3z/3beuC/1ADx9OKjCnSsQ+Mpo13nkXmb0phja27dBd3fju6rzY/uFdSQuDgamLMW5uhCJaffv1/3a4/56dEWnT9krN/uGwTEteMvngA0sF7x0FL6JFlQDpmV0vCiVnLbmmucZm3aNenh/+tG9Y155qACJrGyCv96EB1Vo/6aZIGNuYUgnoZXUxajFz7N15XHh53AP8+NXr68227ZrxfAQvUtvoAjNhiHUcVRXBOCBalpxAlS0ui5qvzjGBpjSRj2YVwJqmK6UaJ3PBb68isSxnggzA5JxzbjmriC7LYZzFe/f43TeMRGSC1rbRN9E5Wg4nF5of/+zP6pvPHt998/Hwneap77ph0y5jLnlP5/fb19S2t+enj+12aK93+eFjLU9tdI0f3j3dO3WaauObkhdkbkM358oOqiwFjDGIzB6pqsbG+10jpc5L2nSbkjNVJgCpgM4xhiWP3gUTiy5UKU7NswdksVpKaiiq2oeHfdsM7aZzACD1/PAkJVMIXb95/cXnLz77or+5cdvecZ/vn5w8Pbz/7nB6ev3TL9rdTXv9sr26TRnBNVV1mZNMYxKcSy1IFapvHdEVCkEJScSRlpIXyTUldnFcztfb63Qeo78ax8fqTySOKrbdpqilvC9a0lLNuBvuIrbLnD2GqF3OpW27p/G+a/ppenJx1w69JYjEHpmMSlqSZAw+m3iUjC4v8zZuHw9P1Ibg+353ZSF6LjHuXLyd5nehpRLg4d0f0HTYBADn0LOG0FxFZo7aNc2Hx3fFSsmn667JOZVyYKJai6Kdy33TDHOqTXdTBGP3EimXlBivh204HZ/ybJt2k8tbFxEqoJIlMGuN8e39N+DCZuPm88RO53Ro3KByGQ+YOdeKdlnqOe8JCJAAUEwMjNA8sQO/TrZASMhVShERMyZGU2ISqex4bS1c82xmKKrEq8m3ABAh1wqKhdfxQQXQ2QqlRxQVDm27vZlP90htKcd5To6i9+3h8WGaKrND8EKWUj5OowPIVeohbbrYthwjDX08LQsaHMccgisqeZL14XEa5877V68jkn/8+MiMaZ6k1C5y19+0mx2Hza4TVRGlp/Np2G2s5CIWCZ4eP/ZtV/Lsu0GZf/zTH01zCb5tQvfu7ft//tf/47/+X/7tz7/4sY/44eMcfceurUVFtXUxBjefTvfT+bv339/v95Fwu+nf7/ccWRWLYrvpWOXwdMypEKDkyo4UgcmBIrOvuXjXtL5Z8jxl3bSbaXwIHgmNkZUdoQ+uUYPG+So4iQAGI0ylKPMiMB/mcdKw2TmQr//4AZquzgWgWJIY/dBttrvr0HaykHeBYjyMZTqdpvNYpaph0zQCCECh3YkmRQKUoWsJgQDLvEzHI1idy5KTCDlGhwbLvCDCdtuqamDHDDXLvM+zYS3iA3VDs+v6AJxTjuCq1JzOYhi8iWjO8+Pj4p1zzbUnqVmLljElMGAf+jYSKIBG5/enk5WpjXEY2jEduv5Wks7TITQRDavUWsrK+2Hn2FOWEsjXUkQMVH74puGI1jtCnmUXUWEAeg5KXDa5ZheDCRKuQeZSiWgdk1d5xtQIaR1h1BQBVOo6/+Bl541qAsCrQGPP6S9CKnapkVqXHgFhLX3V9QUv9CJcc6D2nMdA+sSBAVjLd56jbfCMHDbTi3kKcP0yL2AaAEJS1ee4GRigmjGuROTLMpvWWIZ3IFqLOMbI6BzeNK7MY6EafJscHmdpIx7GmcAYIBB1my4EDlZDaEttRFLX+v15il23n5dzrgImkhmt72MUmFIyUefMAJX4WKpXquKmkndDIM/LdHp698fW8Yevvzk02/b6KqVlOp6P97jdbB+fzt/+4Zur6xci9elhcjEIWPQuNOHpPBNo48LaEmNAAERIInVF+ICqqDChil78DmZM+Ew1WdUTAQIxXVN9HomwaNof9u+u+k1Ngo5KLuenAwB45zWdHz+O42Hc3HgAGue0exHaNqTxQawycypKsQsuzsuji1ENUwFiKo7C9vrFy5fd9rrZXR9O53f3Hx4fHtkFdv5nX/w4p+X/86//3Zqf+/5/O4S2/cnLgb1DtdC0ORXn0HtXc2YiBgUWcg6y7p9Og3eiWkHLD6+bdWheqw4IP03T9uzrAQQC0ovYeHEDyYppUVVE4PViBEYivFzJamAXh9zFIGJgZAqXbzGu1j28eJouJhOmZ1r4JQ8NF3j2Rb2sZEXygX25vfusLNWXWi0v5VFp6odBGgCi+Wnx7JYsU17mmhvfCYYmNrmU/XExckvRIiKGRc1QHUAT2zb0Vqnvu1cvP3do799/9fLzV/3mpSjl05mDb2P05HPN3qOPt765XnIlQBKTNBFU1UVqZu/yklKuzpGJGaiKHvd7Di1QQIRcloB+mdSbR9/Wsg6bqnlGw2qu1sWF4JgkzbkmdACBEcgIyjKjGbMnpAKQ0ixq4J0UDLExNe/DPI9zKp5wt+1//pMvHu+PHz4+cttc395izIeHw9fvnhTts92QprH3sv3Jzf5xEpHzsbQ9tP3AVBed0Kzz7ud/8Wac/eHDIT+9fbwfm1Affvfr19evCoZ2+6NXP/ur93/4j4fDx+H2BsWm5eiWe9++MTeAEZihiYGDNVz4HDn8lAx7Pobt8pO+KEbPqZjVlPZsHVpzkBdxG5/l7h+ERZ9zbZ9koWeB/AfK0bPryH74oey8Y0YAUSFEJhKpioaGYpWI1hwxGK4OF2YPhoBBdMUVM+B6uiARRPbs0BOz2SpyiKiwVinR+VXD9+yYhZhAFQkcg4qtlWuiYg7JCKiaCxdkt4mpqSgAWc3rTamXPYYBoZowgFg1Zb38t1aMZ0IDI2Cv4sgBGWheUIldl7WKd8YBXOTYcs5LXYqQihoLeqcIAIqEzrNVRQKRwsTzNBuIIWktWmCaJtf2YQDvnXNN323beERQw7V1AVBNUKLzhjiXvPJjCZnAUBWQgIiZ2AfHBICe2+e9PxC79ZgiZkZkxyu62gCYCBkdB3YevQdEBuxjCJ4a5wgxete0bWh8EwOaoSkQaykiCoSha0Lfxr73TYvkDJFdIOfW91zVaiYgIlJrLqUUqbXUUrXkWpZallpTFUHUy1VNBqSKtUKtuZYKVZhE1LKQOjZDJmQAWGtSta47DEJmZBATMNHqCJQqMDowZEBkZsfExIxE5AO5QETswDmPjMgNrllqWks5XAFVKVXKlNJxzuMiYnR5hAGUqmrqmNb7zp5bwj9JRbjWpA1dZCPRyRFVkVM6BUditel7Vq6yGIvzTKZFTugcISiV4CohVckGJWFwfVMA1iVGSVn6ln0kQAZnhVCDSalLBq8kSbSaoYoqahVVqchExGYitRTJ59M5L0VUQM15Dg2x4yVPbdO1rgWow9X1stjx8H7X9E3jo7OaF688LzMNjQ+ABMTAIbb9TqEmtSoLiH75kxuACrhD55e03L65qsuChOx8HWdCt73eMXN0IYYIITS7tm0p6yxpZJ/Zkah30DpSNXXOE6qZhqZLcyVzIfZLPsYmet+7zWfzMuens+aMrHn5UFK7e7k9Pp2wTOaOtzcv03jQTMvT7Pqh73en0ymEZrjZBW/Ho5mU6+shesqVzmfWUgldqTM7do6dC3kuZlwKo0GMgymgVlETTaG1pgkcGt+13MQqwESoJlLapnPoDFuCZpnPZllqQhaR7I1VBSHUCpyL2JNy+B/+j//i29//9t/+23+zaaIopFQcsxk4coTVc1jXc0DkmQnleP9hj/daJCIO3RCaOFzH4apfUv348X7/3dvl/l3wNCcA3BYdpvG8vflR0y9w2FMkIkM1URG7JHuJHACWqpfWxFpqXmqIUmst1TuvKiJr0caamwdAMhOAi1gqKgpQq4jIpy0CADjnNAsaMUYxIeRmaIKLDV8F2FzF68bxbncXGLu+Ha67Mh2jbwjRdVHnGSQC1unhsaQMwLWCZEHkkkXrcrO7q+XAjotk533JFXEKTWfOVauwHCm0rGhZ0MXAodbatF3Td8XQRY5dnMfRR0YULHU+PM6P2u+6pgt1GlM6qdjt6893r958fPvtd99///LuLsbBo0qCx29+s7097j77+f5pH9srdnR6fLvd3bz87Prdw0e2ABbTcmQ1lRnYNY48upvdyzTOCpaXp8ZHF0PJBQ2Da1SPjlwGVELNhR0B4La//vDunptgaApSwVkGZqfMuS7sNuMCh+nQ+NuWibFIzi0F79v25u769cvu9q5rWer09O3fHR72Vk2zTONjldxtd829otjT4aG7yimnpRZ2npxTyspulnrIVYNbtKpH57xUsaKapQs7zd47fHp6Mt9a88bhIjW4iKjnvg0pVWWxigZI3msuxABYBZBc68JtWvKmvev9VQ3qPBm6KZ28J4e8VKnAxKhNqEmDC4HhdH602N5srkoxCkzki0IKWFHBQYNwmn/bh5DSSBiHtj2Nh7FQqdAO10O7qefMCMSGVl5cb8/nJaeCGMXmRXLjrlG0lqXtbsUFHzqg7n7+0HZdaHvMGGcvYw4ybK7+AdKUy4OhZallsbaP7dUXCUf1Zd1lsHOpnrrh6vT0pBccCqiId259tilS1RSJDcBUVmaoqozzAWANh7oqxXmnYFWUkA24qqoagyMGtQIGJojOma5LFgCi6FtDFFWzQuSQdMWQMZkZmGapDGJ1mUFK3w/CcD7Z/umdA6mlUGBlytOcS54qEKApVoCU6ljkMBZBGfqGiG5vr75+9whgpUrwTkXnpbR9A0hPY9pMC4I3QhfaBk3VTqezGcchSOI3tzddG/b3J2Lanx6vN7tXVzfn8cPgM3QhMz6dnozo1W672LjpPCJQi28fvmMyIDou53bTeh8V5bCcvCGq1Jruj0/37z9QE65ev3Ra3/7619e74ebldV7OtdR9ymg1V0XHXeRNHzDnq+12f0xz0jlnqeIJzklQqxVZ5rGPCMxFCqSE3a1vbrMpgix1lqoGmuuYUE0M0fXB5enILN982KecuzaOi6Hzr17d3N3d9lcbEmua9nhOzvuHD4/1tBeRac7sPFHjHA/bbSlzLklKnabj2hOwGULOo6ktayDdasrTZR+sBmKMiIijTesy0HvniM0I0FEIw+DN5qFrVGVaZmc0jqPU2mx2kVnUckrquBQ9L2+boWv7qOYB3bzktJzxpgs0MhHFtnK0ilLRHBI34zTlUp1DRAApCuyInjHFAqir/a3UbAqCf3+yWCu5gYsJAnpGXQNJazXwWiROqCK4UnsAwFDUmMkACD8tpw3XJhy9jEXEKCLEpBe/z6W/CZ5hrqvpCC5eIUAwQiJENXFIoJeyc9V1p7GGoZAAFS+yDj43BxngpTnKwPBPYI+15xkvJil7Lvy6/NtXHOtzDOhZvlrTQgRmiIRNE8XqmNLNbgsAKLBtfN/y0Hqtrm02v3v7ERW6CNfbeLXtak592yrYOJ4YOsMw5gJYUhqhLHd9/3Dcm1qtMi2FfQeOp7nOtSxVQLWLoao+TYtjDgQA6APnWk3BiP/T//z//sf/9J8Fhj/87m9//tf/p2KMjh8OD6xAqDKd3v/+V/0Qvvnq7Wevb3OpNU8OxJv1bZNTXftxEHjOS9d0ydRUAVBFGQkFVYvnzqDopX/OntN/QIiKgKAI6M09Pb6727bNMFzf/OWL3ea//qf/MI5jrrmKGOhm2zKglOoDh4b29/s33RvHXlJ+uH88n7MI+tYdykGWhRGTFCQM7MH5l59/effFj6BQ7GLJuSyzs/rZTTvsrr/6+t2/+rf/7rw/3HRbHyKYvXhx5ZtW8+KtICPWyamVY8lAa/KeiYrW3a73rHc3m65t/vjh4zgXbfgHwzUiItN6bF+SY7YOamArCv0CtUJTg6qw2uRU14Z7eyYdAdGnBkBEompGAGb6/LeJmq0wa3q2LT0HMI3xEmdDAsd8CQlaXW8mIwUTQkBg5wan6NI5Pd0/fvhK53utw273y7Tf7x++RZRNaB/GQwg+9oGLSYIlg+VS0+KcP4l8GMftcIUEohXJMeNuNwyb7dWuf3X3ajw+5Hm/vb72vE1zQq592wI5qHLOZw6+DVvEeBpPzjWWa8lnRAeMJsrIqCqqZqxVvWuqQF3GPE8s4rtdSufltKehj21bc8Gs4GZHUqaDLJMHQQTIGRm8JylVc2ZuPbe11jSeoBSoa4NRVVmgLAigtdSSmLHfbfbHQ4jD8uEoKUnKLeKPv7i9H/jbD4dv375/sdl0m+Y8nr97OPZt8/p2c3r6WOan256apqlZGMk7N5V5zPXptFzv4HRKtz/+8e3PXuo+PPz+K8cOp/P8/d9t224U2L3+yZ3848P7v9sW4ejVQMuJda7aPWs9nwKdCM+Rsx+gixQ/qTl/gvxfNMI/hRufH/UvwVu4RBx/ECX7pJ//yZH2JynoObR26Tk0g4uL8PLnIYQ1BuXRy3qRgplWA/DsEACZPt0PK+wdwWS1VBsiATsmxJVwREiwQrnMckpCRI5EUJVNCY1M1cwZkiKRY6sZBNY1jKiYmhQtpsTOTEGF1gyUmWkBxarIAIakqmoqZox8eQcxpwaItFb/VKmoxQAJWAhYTcviARz6uszKSMFZQvSenHexj6XmbCoGploNkIHNUAHRe49YC1iuKqUSQk5Z1GoqrKyVzJJqQdfEpt1st9vTMLQMYI48M3rvqykZilmqRdRUL15EqWuDJK222ksVBBoAgYGYETlAUxXHfkVbEzE7MlgLBgmJ2TlicuSi85suNNF7JhH1zse2bdomBgdr+VjKDgHV2Hnf9aHtXIg+NkgMiOQCOg+gWisYgprWmlOpCqVazbWKpFqmZZlLHlOZFRYFBHKExCTk6tpAVySnYipEq2OIioqCRscODMkQGEkd8ZofQ1CVAgBVFBw6Z4CsjIyIzoELRgTsgJnIe0J2FJjYeyNE5vUIL8BmWEWWovvzfH8Y3z2d9mNaqpqpVnVrA6bB2q0B+PduxotUVBUd8jLX2IRSa0rivXMhlpqJ0BHrNKMLnkBTQkd9vxHJ7Bw5LsuMir6LPjCYI6IsolXZs7HEIbTbSI3nrrGmXb9aHwM67PsGEKsiKpwOowGWkmutyGxitda0LEhacsppIccpl3woTRPWur9lqrENMZgVa8hrvAbQKhqCj43Tqp6aogYCCLKJgzoXmYywJzZjVCBm590yL2ZVROt5atoGCbqu6V5et6E3ZUK7vuqbbsjEqaayHJb5KMsU2OVaiU3FzKwJPnaBfDRido4BKTiAHAdfx7R/mm6udgjFgSsCLjakXNKp1jk0zNBO83Q+PbVxQ9zuD2dTqyUN2+tlWXyI4foFetc08Xj4KFkMyvWmlWq5YvSUSkp59t4Pfatqkm1JxQxiE0JgYmzcoJCAm267E8IQOqrVO4emoKSSqxq6IATMjBZ863It8zgd5hRD9FHu7l4DwTyej/t72l198eYf/PN/4v/b3/x7tqWLRISlAqiEJhrouBxi7AFwnueq1QfHPjb90KBD9tlgWsr9b/4w788qedPF7edvjoepTnPcXVHT3mx/vKQEh4drpwXyNM2lCqmtJgVVqZqqgKyVrsRNH2utcFmBSq4FxXsDMcwiHlEUDZSRwQBBLpn5y9PJZU/x/MYEIUQwNpUmdNHHOs9O6LPPXka42oZus2mGlrrGP318H263wblV7JVJTISYwHtyLaQKLiAzOWTvvZRa7OHwofXqQ3TEki0XW9KpQ+d8AbP56T42G6S4ub1equTTFLshDE2aFiOO7ZDP82azAQLQ0t/dlTwtx1FSOp2efNuGlpg1nZ980766e9Nxd//xIV9J09/a8qCw7N/+Dpi77Ytqwk1LuTmOe993m812fPzATKhONVHjl+UYXY+Ixq1vBUVR1XkyCoZQlipYBMqSlvUKS1qcC3lZGLDzvRn7EFIemVvNQghSZzbx4J7O503YdH0AnGxJVDRuu0175ZsAtb7/6qs8HUuefSAVReJ2e/WLn/7TaZk3N3eAjE17Hew43aOjzz67PT3NyBy3m8f9OJ0ewbfjPI8lQ3ToWFQVgb1DcmA85orEZHY8vwWTlq98E9h6hwDVtNiSFjLfho6wKXnO44kZEQJoiQ2ncnAMXXOd01mrdG7QgnOZmqattWqaAYwAJWckar0jKy07qZyJkO10egQVDKHkPHhfyrjrh1RnW2Ror8i5AmVO0+P01O1uGVwZR6Gp6bWWBVWGIaqmEBzDDqBVyE3Dp/nJm7UAjIe7iEn3j4dyM2xjH9haig7Zn5fpOD2GAUXEBEAIYHn4+B7AKqJWm+aJ6nnyx9hHen4vAMQqQgDrgqNKrbUQkmNCBe/cqhgFH+aUQRQJc04IGNgDEDA1FFQyrLUdhGDoPYsKEDK69WlMzdAxorFnAxEpTJHYqxoiexerqGdkciIZychxv+mg3p4OBzHg6HvcIKiCljmXrI7YeTQL5vThMC0C53k5jXMTGRCDc0XEkUNkbCCnBORi12kp11eblIaUlNCRQ8l1Gcdx7znYvHFtuGqDr1Vvd9cp58fzvg8DUlzKvLu+ag7ZUUgpe0ZJklJ6ffujrHL36vPd1Wb+OLddjwgh+Gp0tbkuujw9fJRUb69vC5qUiaz+7Jc/+/6P3/3Nr/646cM8Tjfb1nKqKW27YAo5GRo9PpxTtblUUO0cidqcFlbxgRU0Utu03ntuYnRdqGXSom1sUjrXUoDBU6jVACkvyzSXVAHMXw2blOtTrp+92f18cx1CL5WmcyHm4+F4PI6pLCVPnkkMxTD4jiAgw/npPs+jC5BzllK7dovkapqQaipTXrKPsSwjwBpHFVVDJCA0oyoCCCE27GCaz5vhVs2tjphadDouzdB/8cUXT4+Pd5vdZrt7PJxN69P+6ebuReObZZyBMaqBqieotaAqsh2f9mlc8GFstrcvvrxBDI64jaFmzSUVKb1vgDwR1VpWQwOiiaJncs6taz8ibNr4aXwgptWNukaKDHDNKonKZQkBoAaiBoDBBdW67kdXK9Cz3mKX3faa2ILVxaOga30Lrj7YNUHzp5AYXCwVq4ED7U+jy7pEfk7drLajdWz+UxrK/t4ktEbYyMAIV+CL2gVIjWafmtERwBAvgvFzXMMuHKTLdEZAKKZihswMbppyE/lnn32RJYXG374Y0tNZ1calBk/HfNg0BBj+7MdfmBxTqaG58v12SePuxXXv3em0r0u52jZv7q6X01OVKtAp0WvqhMK74/L+48OU85IlK0aHVSsTMmIpOmcJ3gWPbISIrNC58M2vf73tu/t0+G///W9evv6x05KPuebFSoWa58OTMfsYq2I1E9EyLXWqbsNjXioqBiIRQuiaoU7ZuVakZAOz6pvw4f03r7Y/NTXiyzeMkFRNYQ1imOREAlJVxunH//AfjSrnefr+4d63sZyOomqeSoXjODvjlpGB5nEBhNgOtcDHj0/v359LQWJAsVoyASoRAkqBYRNf/+hHuN2ZQZrn8+MxBBccvLi7+eYPv/vbv/3V8Tgz8Yth8IhlSS9ebSNJ4wGiR5VpTOlc2tA2jGq62Q0i6BjnkqVWVHRIh/OkCggUvft0F8Darrz6eJ4NPhdLEQIQA4Cq4DMvS9dHqj8ht/AiOa7BMiS6bKoRTS9XLlxSZ2YCoGB0WUmsreWE65qdLneF/Uk1QHomFwMCqJijANjWmo9P32tNfP25372y9tFxmuHXgMea59N+evF6h2jTfGLvD/nknCOCGEMl9zSOLkQjJ1r7rqfgbq+2V0M/9O2mbQjHD/s/vnx51+5uyDUiKUYHkDUtVbTprwAcAZvWQJWJNBfHTOxznhGMGXOaVWvotjWNc5qmaVwy1lzKeLoNnaWppPMCiaFnRyVVioxOJR1NU11GKwcEVuizdYDMgKAlp2PNOQRAs2XO3ruyzGk+g+ScZjOax70BeG4NXS5Lv+ut1tBsHp+OFWzYdK8yvf94rEt+9Wb7UJbTWP/zr779iy+v31wPOk9mQoECERjWCrc3LzuZPr79UOeZpdx/9bvNZ2+ut93LLz87Pd6fnxb5za+Hq22zA5n7zfVterxexql1vYtR6myyIKCiEq7c/U8T6SoBPIdeL2fYmnD/0/G2nmjrcfWniJrhWsO3+jw/6ZyrGPWMz74cjj+QxH8QZoO1eWg9q3+QiQNgdGbKKzbHSNQIWVCZLtI+mRpoVbU1tu/W4vtKgIAYvF/1K8KL187AqmlRJWQlrQDmyExFihIjchUjBkBDdGAOkJEqOzGVqqZqJSs5AEM2ABC8SP0kplLrmghWEyBGwNWNWqysSr+uI6uqqqLY2hOPiipKti7PCbgFS2CiKNoE1wUXg+QYY84pkxJorWvHOzExA7MjJcmruLNW2hdVQxPLbdsGFtLsQRrSq03U1zcmKTjy7BAxxgDEhJirzCmraMo1l0s1IyJXs5ozIBBYNTWDlDLhCsAHNbmQAsGc4+AY0MSgiYGJ2DnvPBp2MXbeb/rGB6dmOZfgXBtDbLxzDMZmJkrAgmI+hNj2LnShHSgExPVf6o1XrJioWRXJuaRcSpVSasklV53mPKc8z3lZcillHTFFARjMtBStZpKLFAEVT2BVhZUJHJOCrlhxR46BwVZzvZoikVmtwH6N5zI7ZuecQ/bOrbE8T9555zwDMbB3QESO1m2QAAFyEZjy8ng4f9ifv/t4eP8wHcYkiATmLqZjYKSqAoCAqGb0vwug0ZoAdh4AiwiyR0aVAgTKyoQktabq2gBgqDWXBRWa6F3wJspkLhA6RcWSi0klH5Gw2fp2G6hxOGyaH30O2110tsnzfN5LTWuNGzGmNHdDLAqYkbgtoDUVp47YSsnlXKTUWkpeZiZK87I6QWaEJi0zsor2wyAGauaCE7PtTa+HEQG7yGo5+nZohyTZ0mwmyhCaYAB5ySWBlOKDv32xAfZt13Z9J1kceR+a/mrnu4awjOdxmRadJ2IIgMoOoZBlUWEzFKXQhGGj6GqtbJXZOwqlUAgRxeVpsvFxaGyies7ZhQYR5/Ec2k6regeEQWuqnl3gmxd3KS25nsuk0ffjmNpmE2MXvI9tR/gNnc51ESHKtXZdQ4kJFyRLZQ4++s7lLMRUNQf0RIZsVrNOhxqJvROZmn6nVlWs1uTYMfDp/ESMne9qWQgBFV0MrGwKtZTjaW8mYNZvWkO1YL/8J79sO/7//vt/IyJFIJcciVWNA4FiCBw8KST00bUdcXCuS1LzPNZ5RFukVg+hi1tqt4fzSXu8u71TsjQdy3SeHg9lKURQUzYxNlyl2GmacimkSOyNkL1DRqmlVJ/THNuu1up8WLPgdiHboaqCkZKZCZjiJT+vuHZP1vqDHQYwcYxNGjNTA4mddP3QtWGrE5zGAyzju/FpM2zfXA3j41k754eOmWodCUJNilVD22gdy2x52jOC1CWGXiQ7R+fpuHPBgAzYKJaaSiquLQSESm0zmGsgbGJouG1916nZ5rbl2Da7DTnPTYPsDRAkNyjxM4SS6/5pevw4He9jS6h4/vhx2Gzu7nYB/cfHxw+n313fXvW3b+an7w7ffL39XGm446aN3fD0/vti6fVPX/5u/0GnYgV8bJFcE4iMrGBd4v7p/d3trts28+mIJHVZmJumDaW2UiuTiStN70zURU/ARB7WVJKpgbng0ZAdicCH40Mpoe8atSR17pum23VKHns+Twc9LaYkVWsGq8y+cSGG7tWhtAb4/mkaNrv799+qpawLD12CoNICh23s+6uXw82XD/Opnj5Q4ytbqUu7iWCSFhWZFWB7FZc4vPvwVYsRskELh1mvhl7BGHPfbsgNKctyHo2AMRAVFxo08z5XQ7EkS+m6wQcXEF1oszK66MkDp1IXMGqau1onwFJNHW7nk+U0GkGeR9aSxmMbXvRdrPm87brD+N772PtYCrMGI2HIkuY0fntz/ROn4ePHpyUb1hnMylKCD3VW7xrRwKoiS9bc28apu9rsEhw11ypZ86JaDF3fXp/T98TCgX0IKaSGkL2KHIhywJrSEY37djudT4fD0+76zvv4/F6AaoiIVaoZMnnRys6ZqWOWUtk5VQGj4GIRRUDn1hSM5TqT+TYOixY1yCUjmPch17nx26qyxguqVJPqidGoSAJEx41+woA8OySWZQ5QyYEKzKdZVFxoN72dD++WnMxgu9vudo37ePxwfxxz1ay5VHD02Zu7bz8ex1TRh0UQVEqRLKXt3GboO9M5ybkI+e7bd4fh5uWbn/3sfDrPh1Mqy4sfbQ6HsWpdDk+PHrZ90/TDMi3Lopuuffv+6x+//kleCmWRnFQSMYOW6B1x/Ph4/+b2JVa7eXNzejiw+XmsTRu6pjMFsLIs6fb6s8P+8fD0mOYFK3y8fxiX/eFwLlM5q4oRoKfAu74fokvz9HAu43gauqZqCd470wa0mkRGImraCCZCcE65VfZRAqkjBuJSZk+GwYkKMxXJ03wkaBQMuOmGNlz7h3MNTd+HJp0nJJqXkpacc5mmWUxqXjgQOOedH5qhcc15nNJ8Pt3fSy0uMCC2fSuS03RWlTYQk202fpznnGZRNVREZGYzW4o2bWQXAAAwgPO7u5vQ70wKKVYp5EIbh+NxLPMpNvFwOu3zyZPXjH2/zTWXWtA0xKZxXLKaGqklqez8vJQFaudcPX0Yjua221ENoLKWvveNDwyUl+SIVIwck49SE7NXFTBFouB9ybJM87NgCqrKK+9QFQCLCiA6IrggohHM3HMwKUlVNE8AgGKEhGqqgGrKgAigsDIjaN0PA6ywIb3YmwAASU0RVzusruvxC6hoJWOvsTIkAgCk9W4iBNVPdSl4weXgc9bseQpSMwIDWl3ldJn4AWgNnT1vTZ6X8HB5xRUk/FxTZWiI5Ahj26jSfEpt0xYt70/76J2Q7p8sVHVAyLTqOH/5419+/c37cRyNdRM7VnfcPzUxCNDb874L8aef/4jreHx6dx7z4zF9P6VTtTk9VlUmr1LXvigDY0BVaxs3eH86zyimVcz5eS687j8J94+HDVjjwnQ6HfX3u20/TyNIbRwtCcfjyG3jCEvKKiKSx/EYXEM1I0jbNE0Mkq3buLk8NNH13fbh4UP0cZpTSelm8/n5eNhtrpMk4lCkNs4TkYExk9QcfQCp03j8+Z/9eZrz/fHhr/7xL377X3/19HBwyKK5piIGrOYDnU/HYbepxMdUxaCqfjwc5yIIuCTJZeqDE9PJ8jBsm9DSZjt5txwOdP+464fomb29f/vuN4ePp8NJF9z6BgJpNQboh+BVZS5TFYfqyXU+3l7d5GVBAzFLSw7BLdOIzC9vrxn44yG9/fbjsdRalfJzDHO9ohBXbiwSgqJdYl8CdiEHEbF94g+ZAoDaZTBGW3NnRKtWpKgGz6M4MoLoyqYxABADM5BVkbqUXwOsSbQLwt1EzdGadcM17GaiCAIAxNHQwO0qmGta9eTZUZni1cPyzX+u8zsH87wcjESsMfFjOszLGayiaUDPEZ/Op5oLGKS8qGro3evb7Tb4wHK92Q5N+/T03W53FZvtfN5LPDGRcE+BXWCm6GInamlJZmZUNU2MoQJCSapmdSlaDEyK+uEVlTyeRxXRBmrNU7JhOrs0yXxC7OZ98g2WnFva5rTosq+STMSxd2HI8+xz70OjWoEi24Apkdo8L5KZYVNyMnMu+GAkUjbbYZ7m7/7w7XB193h47HovVBGJQijLVGvuO98M/f48u4fTl5/ffjzMp9F+9e3ZOxc9Wq3jcSRmjq0UK3lqG/vpT19P02k6JTlOtX4PL19fv7jddcP5MOfj++/+69/9+J8MFlva/uTqxz86ffgW1aOxLEs5n1yLeLmYLpyIC938TwolPB9IFzUHL2fTpz/GT5rS83W6BmN/2Kl3UdyfP2QVleD5L4Q/veTF/YbPCNQVNPQsFTkmcqu1Y5WbmB06MjCTuspZhM4bCvFKEAJV74IjWunISKQqBGAIKiKiFQEVHCMT00qHJ7zIqsYqohXwko1AQCJkYAMHoGpiVUCtghgS0YqiWy1ECgCoBiqCUNEEgNQqAFcRUzMFrVDFrIKJrMxIXEUWcmstJ7pgELxb3e2KpggUQkCfMSoqEKgWEDA1BATvHDOjqnecRasSiDkyVUN2zjvumtCGJjqHtWvx9rrpuxsmDJ5DbEJs2Dmi1URISy5a6pKKVKhVa6m5SspVpDKZVVlqRUQBNINlWVIRIqmX3IkRo2ckcszEyAAWfOMdR+93my46F70jR0uqwUPfNU3jfWzYBQZQrQicc2aU6H2MTYidD82leM0FQLKVpgkkVaVoraYCtRSpRQFytTnrUjTlutLMcQ3bkhNdrcAgYqVUUFsty6KmBhUBgJxjNXPIRB60sguABAyyQuLW92ZySMYuuNAyM7kAq3HKO2YKjrxj9p4cAxEyAZGomamYpZQPp/PH/fnb7+6/fft4XvJKgDNTQKqmYMq0hvwA0LTW55vpWSoSMXaAhHPJZhW859Dn5cTsmM05B0zEgb0TSUxoSE3bGsiURnTkYkh5DA2BCSMrqDECWtOFdghNpIYRAYraUtUDxhDC0Bg1S7ViwJFKyUbmw5rVtNB75rBMoaTFEZWSSknSRal5WSYklippSbWyIkkFOY9EDAZC1rTu9qprM+Tz0fdO1NjZrJmdi9SN8xkYc9VlmhwzB97cbZxzIQYfezRQ1eF61292vh9qzfM8p2WWlMjUeZeWqSyT8yxV8zKThrbvEdXHAOjByBEHoiUtSAAIKuj7FkyP77/CYHUekZo0Azpi9ksCEwUpsfNE7jgf0B9uX/85q5FwLYnYNSGILLHr0pLJ6PoOCb5/Wh6ItR/iOJ7buAFD1UShMaCiiQMgUMoZkEVgCK5tnalKHlU8UlUAomBGfb8rZVHAthlKXYpIFSEEVSBz3WablgkdTctUcu266D3JUo+np9y1X/78zyvyv/tX/6/g0fmoolUKami5Q7NpmZom1FRmmcjSbLORxUjemykRh6vhM+e7x5TNNSHwPB3yNLFUzTWYNZtYchrnSmJzLsuSVSozNhSACRBzrQUKmDQUtFYR0VpLWUJwtSzOETlCVVp3uesjhYKqSq1aq0g1VRVBMHm2FRFBaKIDp5FkUWZo3eBxuH+7f727ff36lgW+fPXaKHaUrgbv/EpPI5QASlhkOR0AzCpr5RCHNJ/TUo2KDw7QfDssVQyoFCmQfHCm2erMvtldvwzNTnng5u7FP/xrt92i90jAgS7ICsD1nLrE7mA9YBKH4erF57R8qOdv07jvhm1Jetp/bHc3P7v7/PH9N9P5+7kc+3YTN93Du3ftHfuhhK7bvXpzePf9ePpw9WI7vq1ZteriDEAsKw7d7U3zuUH8+o+/2r0Y2tCzqiMGlja4coToPZJDUTargKmaj22zff30+NbpJLm6Bhw7UMFieT41jMOu6TqWmhkb5mjqsun9YY+qbSDfBEgKBO2w4RC8d8uyjOeZVCqIlbzd9hx2oe2629elunZ3o+ic5dPxmFMBZauVEJyCZEuaVNTITuePIQw+ElB4efOZaH0aP0zyntqbj/P+5ebWN37KEuJWCyUcC9Ul7dvAQxis5nlelMB7Ny+FatqFYRNv9qelHa5lOaRlLpirjM7tkiQkUtNcUvCMBOBEsaJINwSwzgPWZWxMqaF5TMBe0nnb/qSOVGvpmqajcnx8iwpXm1v23qz4rj9PIzBWYIgb4t0m3KXTH4ql680tWYOCo42jTnM9Rd+Xok3TIUVBOp0ffWwj9jZnz63VQkjj8TF2BrDQMrvc9dvP9g9fbTYvTC74FgBQVQTyzEakgEtOTKgIxJRrYWI1QGLnQuua/bg3USMkREYO3ABAqdkQAWTlSajVJnYmQgBFytqt7V1cMajOeN1aK0EpidGBWq1Zmde8CwZuITqRuSySC6G7e3H3/sPbh4djGSLA3A3Dz7d3f/u731bhU5L9ST6OM6FLaSmKvm3Jm0ESsQp0XjJR+LOf/dX9dBj6DgggtrNCu9kNrjvPUzXh2Kvo7eshL/P9w9Ptm257e3f/1TcAst3dJZCG0LeNKXRDw44MeR6X2PHN7UvftE8Pc4gShmHRxXu/3fR9357PY1rOmnWyfJ4WALp79WJ/OL9qX129+Ie/+tVveS7TeLi6uXv77TeGeMz29f2+99BH6oehDxHbPC7z6XCaajWAPoauiSpVxZrADOScd8zznHwwQ8153DbbKRUjT7UAwmYYiCJUzej2+3JaZo7b8VQP+ZjmhAgiVc3mea61Ijv2ITiXS5Gczh/ua1oMQEBqrnFoEVBEailgBQx3Vxu0sn88VMtLriKZ2SFybGJoWgRG5Bjb6COY5VQbFxoXl3EOgRzAXIQCH8epFqms46i+u7m9fvn08I0LaGQhelPxzLXqNE05L0O/7fsBZFxK9g2N01jz2DXN97/5anN1++aLn9VkQMqBIqJnLLWaI9HKEFWFyZmaqiFArbIWBn9aMK/aiYjh2qTu2QQNcA2UERPpRRFf4QJr65CIGgDj+gszQiJas2Na1+3d5c3mEkwzWFEXa83LxTqEaHpZmV9EH1gJpesUYlX1T+lqfM5VIMiq68DzgtUMDAifLeSGsganEGhNqCGqrrY/1Oeo0SfvElwW67jKVRc8LMLQtU23+TieuA2pVEWNyq3jm+3Vdujz+YS1/ujzV2/v3zsKH04fZ52ueHt9/bILbj6f37z5IuVFw6aPd32z0eVwPj3un84m/v5Yz+B9bFSXXXTn85zVnEMXCFJxiF3rASFVcW6lAGPKhZHAtFQL0XFo8rRs+34Csel8yHNVCo6lytquVUXBcJ7SOsIRaB9iTfXp8PCPfvxX42Ekhs3WzUcHSqqpi559UzVLWdj5cZx8iMbaUAjkTU3RmDgvqe+bZf+IVolks91QCH/xxZ8/fP/+/u2HTdsD2fpDRSQptXUNhGXTN+NcAbnm+vbh7fFwahxrFQUS5IIQmZrQ3l6/MrSp6OG7D0Ft1zVTLqnM++lYkuTz7BgxOEekCHFw21272Q6pCLHbbN+wziWlKddTrgqY5uSJhG18moe+yQKPh1RNPuznd4dxMSTEJjy7ivBP//9EdcELPOZ5qH++cHVlyCit+wC66KmIZrR2RNkz5giftUpEJlKD1WBkuPqJTEEVCMF45bwYKCgS6TOxBj41o+n6bTUABKYiWpTIR0QgyMvhnpejPn4t+68JJOcZEF3otbZZoKgz0BC9pFqrnKZyOKck0LcNUxhzIYQIBEV3r29+9vM//+rX/8lFi34LqusUFkLsu+tczlXFRV7KOS1l1cy6vnfeL9O5GhA5NbQlkSYkJPJ5/JhPHzQdVWSsoqWKdYf7jzw/pNPBaTLAkquKJSkOhNKhLjN7pBhSqaUSWYGFas1huBLUMp3TuRQ1Dn1e5DSeYrshilJsyRJi2wD99B/84u27h6ub61LzeTqrSpW6bYeyYBL92U/f/Oe/++Y4Lcen8/WuQU1J7Ks/fP/5m+HVEMuSOUDbM7F7ev8wnlJRCV139+YllGUZZdofiZgt3L76ydmxLKeHr//wqttNC3Wf/bSV12n/4CMBU82TX638hHaBueF6Ll6cjD8IAT/LhJ8uR3v+xfOG99J19oku9IljtHpSnnvTLm5LW2XOT07PixiFePkyYAWz2adzFQC89wCgUtk5QuPIuRQxEzQip6JMkQyBSCSbqYqoAhqQJ0ZDsCrVzBRQRNcvcK2MrArBVqI+kBHBJRmtorKagagSOkRCZgID5xFAsogWExDJSKRmzBdigIKAqorVWhArIgF4AzBLJpKkgLDUunYRgKlIVQARc46ZvMMARg5JmYDZkXMedYVNUkDf+MaCQa3LxWTLVFGRHTN5jqlURk0igITVIDggF7qu7dq+a7oYGM1IrrbOYEPsnXc+Buej82HtBDVAWb1TYqpWq2qtKdeUJaWiqmSGjk0hpVKqiMq0lDRPJSdFUdQqpQ3RsVe1tWGOve/ato2hHxrPfkWoEa22gNgMfdxsiDyIkGQDXM1psetC07kQyHlkJvbEDhBNqqpevskCIlBFRLWoFKmp6lRsKpbEFFCB1hx3KuYYKxitF4MaqgFdTkRTJccKquAZAcEjsvNBQASEV++wCRETkXPBeXI+Iq1NbYwETIyAROxcYEfEHmlFjNJqIS6ic02n0+nj/f3btx/ffXg4HKeka/OrqYmQW+2fti6fzNRkDfz+76QidWwpj023saq5Zll766SQd0qmgA7QA4lxVXGeMZBZ6TtXq3Ggrt+YgzQtnlzsInpshmjOLaJeDaVSrW2ABgxLZdI0TURCrsW18gPXp6+qVXOqgIhOgbzzOGyalMeqsnpYc05MqirLlKb5TEQApEWYvSmsWb9SatNG3bYxMLpIhOyACRXysO0UqnP+anMzzqOAokXCpm/vlnkOsXnx2ZdxiKWMeT6VknWevVrjVEo2KAgpRG9aVtIhkyM2Q41tZCBuIkgyzcEzgHXDVlRUCjCO5yMutl4bSM6EuLslJUsp1SUtuCxj10VNedq/215f9/3N6TxN51NsWnZN1RSaNksN7eblZz8F4+PhEdV1w5AWIVYFa8hnFfJRanHOO3JVBJFTkl0zHE/3lcpmd2sGVhbwyKg5FefalKe2vUYrKqVpIhGXKqfT6TSeO9e8GD7LckzgVPTju7fb69tuFx6f7tXRj/7iFw9P73/9X/5XH3zRqlVjQFCpSw6xBTEsuVYNvvWExBSIUy2K1PQv9knS/vvYRifpvP+QptEhuSYUAmKfZZmWsZYyLzkVzMW0gkPLdUFEIAxNcOwU1h0pmkKpVVSrKYqES0AAL0kzUpDLskvW/IwamMnaX/38ztSEDsTm+dw0uxjDq7vXV8PryN2r12/84SHU8eXddeNjf/NqfnqPsgBAlcqGACLLSevJd0GVeVbT0fse1KbpuKTZexdiVPRWiyHWRYnRFJdqUuv1bcuwpXA3vPr89q/+OjnGGEQEAVRR6jpIoJmYCphCTYhgda7zkUnFDOqZyNC5PI+IFFsQfbBUr29CNw+Pj3vhXHna9iGNj+YQAlKIw4vPp9PXjlI79FqWbG6aRuc8EaV6dgHufvqLmy9+/N//17/58PHj5y9vm8bnkpZ5ZAw5Y+tYMHTt9uP7++g6yUTx9uZaTvu3fXul5KfTYdO1wH4scz+82AybUsY2RBFaasm2JAUCDMyFENCUg2/aWWua0tB0cWh226Zr2yrVIQbvmraPzVXaj3NJx/t3pYrWPJUsADpcv7j6s1VzeDy8n2xRL8XqpnmFyFiMBJy5oupiR6q11u3V1Tjnnpox1St3G70z7LqgR5DGUaleCzvnBc03WGAxx9XgOCn4QQDVcRbxXYtoqlRkqTKCp2G4YtxxoJwfx3KOfru7fnU6ZijQQPP69svDfL/tWisTArf+RQ0ynR/FFDyAszEdgdH7Ns11kSVBrUWs7q93d8xzkiOyOIVUqvY0LmrLEzZb13VSmrZ5Eb09nb+ufKTQBh6YIOV9G7abm5v96b7p8Onxu3YXa60OxrS8zWWUgkB50csCgZBEYfW8ElII0VSLVA/sXQSVFVy9pGnOCwIAUy7VExGjqRFBzoli4xlqrbgqsqJWpaoyO4SVAqKG1joPSEtOAGIA3keoiuiMiIip0eVwnJ6eWk9DdA4wdt2ynEWWL794HUP34ePDcSydE8/j61e3KdVuY+P7w1IVmV5+/nqe63EpytSGdnPNJSVTkQJ/+PqrqxdXoXE//tFn+7GCo7vr7fhx6qZmqhUtPz683/SLQ7d/ODq38dAPmy3kOUDwCFrTtu/uH+6HJohWkxK8B1PHXK1e3+yi428/7snz3etrrGWcTqp2Pdwe4XDM55vPb8fH8+PHD6++uBk24b9/9eFwPp++f4wRfvfuv9k02wpfMgukQ8ubyA9lTyiejIFDExyHWtOplMDMSjWBY3LBnZfz5mpLCKXmNsTz/JSkgLUIhOAlhlTTDFxQlopLxTofHx+Pxs75RsSQybE1EcIQDeLD0+Hh6WE8HpqAIMLeMVOVdRdZcsloGGLYbLcAdDodxvFUSyVGphC6jsn50LRDA6gmEkP0ISxTQsO2b0NsmDFC3bSeVT2I768/Pn7wrWv6wMbzlL7/7reixZGLzOybUsoylaLKntuumZZTkjlsN2+ub5fTabsZjk9PZpKqprf3NSMF//lPfmSAOWWQ6lwgYnB+qdUZouNUl+ACIZtk5xwCAVwoLSrq2SGBqDrmNTEDtpIa7NOgIaqBGUxVkQlXVz/R6sMAeQ7VGKx808sm+zn5RZckzwXfqyvHRdRoDflcJpeVS6oAKKqylnMRoiiuvwPICAwoayRtnd/xAvVQe56H1ha2i6v8oggRXUJv6wvqZRO6htRWyrWt1VQMKx8ExlzO5TFVMWBmvL3b3XQdmp1z/uZ3jzet2zb+m4/vyUoX2lOZ/uKv/mw55Wk+exebbTiWkxcpM3R+t//u92l8aMi2N9ffv30EBw04khIj7acx1xoax4Ap5Ri4D34pNVdRha4Pzpv3LldRVTIHqJUo5xJQHsphNzSH8yIZchWMKKJSzcByzt65WWp0DFalpEXHdnPXxqtdMwwufnx4jNy2u83pPFWrjYvB+Rk9D/5wOigUMw3Ok6rzXs2AwYfAwI1z1ITvf//NP/u//IvNyxeP756i9yXL3ZdfXHXNf/yP/2FK54ajY/dxOjRDaLftZhg+PLz33HbN8Lf/9b+oWakqVYtiAY3M4MOrF28ezouKRM8Nk+VlPx2XZalVBKEJrg/YNJFjjJsBCZdpmavIeRSAgGHK34tMgdn5ro2+3V07wDwvzdCwwtNhnOfl/f542J+JuO+jKap8msIvMzdeRMQ1D4ZrC8garrl86PP+HACZLm44WJsBV/MPABOArW19z5O8wZqIIUDg5xwawroDrwa8ViEYOAJaq0rsApMBE0C+lKVdMkCqRMZIICxCy+P48dc9K1Ydz/dtg9y4adRu6I6nOY/f5VpR1VPHYGp5XHKxoC5W1T4MotI0fH29CQSx7V//6OdvP/4xeL69vYaaxv3T5uq2CdEjzuNTMeCwYeklZzDnnXNIKDIuRwT25FFtmY+slaHm+axIxizzo6bJVOdlqSmZ66b5zDJKTTmD5iTT2Qyzn9vN1s9pPjy4ANIGow4As+4JWdVAZw0nWQoDJKncpFoxnc82T83LL4LvxkRLSghUJUfWVWW5u779ze++4uCFZHfd548P+bT/y5/dHZ/mp8fHSebXr7b3eVTwh1q76m43G9Osy4TOXd02os03391/ff+4P+fPXm8+//ln04eHSHo8z4fy3avPPqeSj+fvm/vvr970y9M3sX8D+irr3EUkEpNCLuBKy4e/d7FdLo7naKL9IHsLz4fwn2RLsE+q+/qZ+Kfw7Q8+ZY3W/MmttP5aLywKg+eD81kZxfXYu3y8Dx5UldA5z44VlCsraK4VzdWyhlkFTZwDNAQm1dXfqWBQzcSwVgWAWpVALzWCioDCoJkskNYKuuabLvR3BDCzghddzAyNiIwdOXSiatVERcQM0QiRwAgNVbIJgKEY2uWoF61n0YqqKgiCZlZq0WpgbAZqqACYchULGNQKopCz4D15rsQGjnxoYpfVwCBVLjY7g4QKDtFF78A0EzNgWYlPCoTgnA+bYbvZbIe+b5uOQEkSOCYEU0NWRmFIoLI2uAGyiqzmWOe9C+A5AnZJnSjr+mBUjcyqQinlPM3nMWmaS5pFliKZ2KJjJif18n7rve+3mxC9D15VpIKZEhKCNU1s+yE0PTpnpWhGqcYBiCi0rY/BxcjeIxG6gMSrg5eJZDWImVWRUkrRUlRzkVyqAQo4Dr0lYE8qVasgqCFWUVWRWh2u0XVUvHCigVmZixkpBfRIwdCQPaMqKCIZMrngQ0TvMXpwjpjBlKEyOofqHDom54idA2YkWN/ARW3Jsqibcj2ex8Pp9HQ+76dUgBTBTBCBkNUMkQCsSLGVvGPGa4XFD6WiGJ2BNW2jVUw0ughm3gcfIzsFNjAxwXmaiJUcmErN1QX0ngkVkSE47lqKOp1Hj+L7gK0jJqgS2LVNy1p9WcBqGsc8jeRcE3keszZR1KwWUFvRX565iqhUJMjLAqrOu1wFkYL3DUdCg1r64eqlf5XmpDmTChClpazQOwQOrQdRq6VpO+c4hFBKNYHQtgaiYkrYb7aha/qmA8WU883d7eZqpyzztNeSGABy8kSiNS1LKQkMwKjWorVYrg5D322VZgMA9hx755ssSS0514c4ON8czk8hesNkM9Wc85Icq9UcmrDkNGxfUNv4HNM05krH47nx9PH7b0oaX7758XYYzFypWWGWnELLw3Yj81wTvfz8p+Tt/fsPTRsBTEq0QkVL0tS6ITQhp8Tsg4tSdRrH4H3bD+N8enp43G6vqwiJsHdWFh/Uc0jLAQwAeGUMgkEbWyTL0/L2w9fscbu58cGT43kcU0oNNB/++I2l+le//OX+7bv94SlGr6bIjEhLmipgi6TsgNAzIlStci5jkUrclTqT5CFUknM+HLwpBg9E45KS6e5m8Mh1PCkYM3kFik4c1JKjZwMywFrUrLAnqYUYa5lriVWqiAZAMFARY7U1PGCkpmomKmq2+pxV7bk+43I35FKvtsPAg8d+OlfJ8PDh6fT4h6ePT1c1XQdOx0nO983upg+07fs4DEgoWh0qmBKvjzIauia0bTp9tCKND1WEiQ0IAZhYRHzD3sc05xj7bvfCDS/5+vNX/8M/dS/uio+AICrsGVTRsSGYCoAhAagQGVIlrAITkpTxQ0kZwGmxuP28u/VQzk7H5XgPhuzQtQPHcHp8XJ7m4qm7fZme3jHeWrPhZnf14tXjUksdXYwMaMtabWTkTGiZ89w2w1//j//swzff3X/8ppJ0XVfGYwhRkm1vX5wlJbDb1y/Pc91df/Gr3/z6JvYxbpYsTYQ2xOhDNkQ/qLlai1QTw1LrojV6bJqApkyYU5GiQIaAXdtsufUxhDZOOR3Op67tZrX9w9EFD/Rtmpe27xWl3e5KrdvrbQL4eB6XY/IhMrpNdzMgPJ0e5/nQtdtxnsB3xPX4tLdQ+81Ollm0+FqYYSrnYXg9j0tL1iOZQhNvgA24n8rZo7kY03LoUYYmcrEmxEM6n/dvPXHXrHDKrqqAFqm5c9uOGEsqS2nb4KzXynkaSVXrhB6m5UOWZV6W6FitHVNeSs1FBLCaFdQKBuS2YeOlynzYDF3KWazqsq9Wrq5ecdh8eP9OQguuLFZz3l+FHWPwwu48Vn9M6WFmGdo3SlDPx1KS54ZxYDucTo9xaBTsanul82Kiu+4No1/m+ROqCAB8cAQgVRDVIQICE60IUtECZgRrtBuQqNYC5IywaK4qkWNwvtYKzjEQe66CpqIIgIaEiGxa1IyIsphUEUNRMSCHyMyl1CImIoDmg7+5vv748R0b5WVWxs2m6TrYPx2all5/+QLfe6wJTQxQa5WUMFfQcDqNAWPbb3/y+d37tx+lLsG7EJomBiiGFq6212Ws7//w+Gd/8edv33+shzS08bOffvH7P967Lm63cdoflTT24fB0vxv6XdeOda41mXSlVJHR+SCiVbBzLXnnfZuX0SEimFRpfOjaCFVALAZfifenQ17Gdgi5VEn22esvUhp//fvf/vrvvhmG5s2ffwZa+Z0dLKmImJrYqqsVw+i8Zg1t6xHnZWYtjrGoRHa77c47WE3BQ3+95o1vrl6exsfQtgF6UUpLNdTH/YML7ZT9CLGwP6dpOp7YeUDq2y6nZZxOIlW1nNJ8PC+5JERqGwapHHy3abTI/Hgm59OcCF3T923bLkua5iUtCwA2XUvIatzESMjMAcUcI7Dz7FQwdLHv+sPTPB2n6+uWglWRrDrWRadH30QEPI4TmC3zUkt2PoAPhOHd+4cYfWRe5oyBxSNWrRVTOs6PZ08cuqbvBwWwqI6dQKlZP7z7ppbh9mpACBygWlUF79yarELyVcyxORdrrWD8aW/ATGtGxsyISNapBoGIFUxUL4EYxPVtBYARkAnlebQh4hV/vBaTIayyzxo1Q0QkwCK6snqB0AzE/oTTWLUkEVlnJ1rh08QmCusOAdegJtjqaTIDu8Cz7XmCNl0bu8DAxFay+CWigZdlvq6L+Ms/Fp4nfzMmQgUAEABYvbdqSOSIcq0M2AQedrtus92fjvf3R3YMqoUMg5uO41/8+ZeEbuh+NI3L+XBqGB3GvosqNWl6cX339u33AbXb3jw8PhzuDwquGcL+NOV5MQBDbNs251Jrjg63Q2DkJRdUNKIlSyTQIiLKTGCaShFTJBIgsXqcCqEjRCVdm5UUkJDWh3vHhIhSZTqdsaXh+soxvP1wv+2bu6vrbbd99+5d6L2pB8P+6vbD077lOJ3PZqZqWpUaqrWwWtu019fXDx8eWi/c+n/xf/u/U9vu9/Mv/+Evp+PT9vbl9o7/47/912AQOZjCeTpH9GWpfRvOpz1b3W36X3/131JOjtlM2WELzEy7vkXD/f5cUfKcWhh4rWA2JTAwDURWqzEVVSI+jwnRPPnb6yswcyGaCqCvgoFc7PoQ7HDYI0ep5Ztv7qdaPzweSqLr7fYnb17MKR0XnQ6nQK5vww91Inh2D9nF4AbP4/pFEjW8UIfwQsJao2kXBA1flFMFIjI0AwW91O2YAaDiWg+1KkwXpWkd4s3skl4D0GfOCxIxETwbSnCFHyFXAzBzZA3ZvHzsm8YrPT4cHN/kcjieZhFSARNiFqwcvMsF0zxJnT3Hw1gOqQrFh+McHIboGiZTub7ZOEf7/f5mMwC6Zdo7JqilzLM4Uxfi5prCJldRtLaLNU1pyUkr++CZYDnpfJS6AJpoKWVRMx87zCmdDgqapzHNC7kx2R6hEqasInmW4DTbSSfipj6dlqcHPzizRm0BSRDIhzarlWUU15K5iPz44Zur1z9FZF2mUuvhQZvhVd+GUrTk4p1/9eLut7/+NTlmF3/0+rNv370/pLPH2PjG5qlM0nfhs1d/9u//l/8cgT+/u/n9N+9O6l2eYSOfv7g6Ph26bVspO6B/9IvPNt+citrXv/m+wXB306vpF1+8+P0f7r/+zdd/9Re/JJ3uv3u32d11d3Epp+729XLGCgvIbCa6toM9J8yez127eIgMnqFUzz/nT96g59959iF9sg1dPu5iTcKLUfOHIhSsjiIE02dOPzwb1fAHL/IpjXv5JOTgV1wdOfKOIjS1JMy5FiXGWqVoDcyEZCZIhFYBoIqaWillVTNrravQaagiq8cQU8WlQHSURL1KREbEtQiLiAgEaNXrAfVZdiVAt1LexRSqgVUkdAQgsobRKqiKiRmqZhCzUqtlQ9Gqpk4UzAQURKwKZK2+gonFjtSg5MX5hkwZ0GNQYnSOCH1gKIRKxM5p1KqeCJwyOyY1REfoiINDRCLH6zP8tgttw6EJ5AkUCdmkAigiQtFaUl7NQznVKkVERAwJkImDC65t2tj1EIbQbNgHz4GQGUkBRWUzL8fjXMYxjeea55IXHzD4VSoSUVOFEEPsGmbnvVtyQVLvnVltIg/boWk717SKbMYEpILAjphc03JsyQdgB+yA/RqptXpB7YvaashSgypQq61nkw+x72Xt68gpLylZLk4lgLAVAyZyohkADBGYXHTMWA0Vkch5xyE0wEyMxK5e0oVARJ6cRwyOnz2Z67m4vlsaE5JjIGdIiEREqyurCFSgpHZaluM4Hc/j43FajJRMa1UTb7QOwgKCBLLajZ89eMT496QiIKopI3hirrUytDmPkchQRUUlAWkXr2LTm2bnLafKQIFRajEzEI4VuI7DpoPBt7E7jlOI1AQEYGemtUqW6Tg6E1YtFTXXmhdgmk21ZjRDw2JFzRBYbZ3p1QeSVBGIya2KnGMXgmcfFdV7jr53JCGgSFHzYKDVSioucAiR0JhUSl1hYxgcOQJiVEKk0LSxCWNOxPziizfNpi15ARWoxRGiCmk1raUUVZVqBKimqU4tNcDMazzW1IfgXFAFh4QcRcGBaSnh+kckhjqHANZ1i1meM5DPaWZfusilnpquD7HnQC7F8/5okYFhnuThw7u7129ubm7352NJ89AO47hnvnJNSxzqkl68/imS33/83oHFZpjNixbAYGIAxkS5LF03IDgX+6VOO3d9ff3y49MH5kAcSp6id0as1RwFxAKAVuecxbmIBN55H9iZK0Xzednn+6ub6+1u27btw3f77atXqdq7tx9+9osv/8k//6t/9S//ZbQgAEVSBgM1JkQiAWHAOZ2IiIiqqnNDy4PIInVZ0ljnsYuRQ8hLkWrU371++dnp6f3+8T7PlZARBFYzIgE4FIViNXJkUyI0QxUww1rVbI0BV5WiUpWciqmoc2sFhyGslksxqaYiUlVXxOflberu+rWP2/3XH2+HZhu5s+7LN59tf/Fzj27o2uApogb6uTMCSc6DABFHSROxGhI7l4+P8/5bssY3TM5JcarCTISANUsVKRra1tDVUjfXL5r+Lt79/MU//j+HN5/rZluJzcA7JlWSasssZQmBDNlKZVd1PtblLOmseSIU5hCdbzcbwGiQSpnSGZlAkbF/4RhLzlYqeWx3V3U51SVDPrPh8iGF61u+DnEThtc3qnZ6/74UIY6mNS0navvYxCo1TQdw/u7nnw1vrt//5lfnUoerO3BlOdbHw/H2yxcVdGiaST8An3/y8y/m01TzHMjYC6IW5ONhani3aQfB6mMgcqUUNGcCJVUy8W0D7NDAhS7ENngikOl0mucTkDNyhbMRXN9eLUWp6fu7QFAlL4fDYSk6gktoNTjV+v77D0Z0c3dzdXP7+van/HTf++El4uN8mMvT1Yu703ggKwIwpidiIHRszmNWnxxHR1zmTOQAeM5T14eSznVJUmdmLvPsjMxJlpkdkQpU8c4fT6e+u/JNzymxsGXqQlQBh41VdNiRObAUWiasp/mdtJuFVZpNt72+v//WbEKf+xhOY+k4+NgseZ7qoyunJmh0XJeFwZxgWNcaRn13ldVqljbE6oZQzrka29VPfvbL3/7+/+nZ5ZIw5Bib7AqTO6ZT7KamH5xN58P36iK2N+f58Wr3xYtXP6ky7j9+0/DFT4FEIqqqhBQ45Do751esihkSBU9OanHsAJCZEHRWI8cm4tGZqpg5VitFlRxFETMAZkdIBGiqjvzlqdBQa0ZA5GC1VqjFgAnYC3PI8+JdAHCvvvxLnO9b+PB0Oh5BoYhiIzkj4c3N7rvv3y5FX7y+a9sSS1Fsvnk8xe7u8eno5lrZ397d3N7tvv3j10VhXOD1Zy+IXNO2jfH3v//Di/PLf/CPfvLh/QMIPEzH/joo/f+p+q8fy7YtvRMbZppltgmb5uRx95x7y1yyyOpuNilBgFoCWuC/K+hBgB4aLajRRQKtVpPsYhnWvXVs+jDbLTPNGEMPa0eeU/mQyIzcsSMyc6615vjm9/0+/XLzxRt6Nx/27DQ4ONz91N3cIhs7Z0y74fHm6gWILZOnqRhWcp4ZAen6tnv/4eHm+dabgSqQC018eNxnmdquGbIedvsVNfvdIZ0eGPAv/vJbOY5/991rJD3thpKKIydVak4ZIasbizjT4HA6pWiFQY30atV3vu2iq646NCNPyGagJauzIe850HE4bbuNCPlV4wL1fU8Upo/jTz8+Huc5p0QEF1fXgd00HYbTqFqi48NwGsaTVQieQtOIiJo5F5EawNJurxHdVFLs+qvLDUM97A9VsGn62MXNpjvuRxf8uu9ynh17yQWd67uVFZlyNbYimVBVK3F3c7XeP8zDMa/WVwJ8mvaLCw2QDH1crZmCd+40Hn2IMbZzGjkE37W1zufgPjXTOFayLJNrnRKwd8CeKISmGUpKjyfXtIymY6ZZiTmugoEKoPOe0GoRjFirssEZtQYAS+kSoi5HJ6qAYEafEjmEuEyuQOez6QXue86jgS1sIwRk4qUwZonffIqGGaHBOYBR9eyDfRJuQG3hd9DTqIN8PmQnM2NCQqxViM6E6ycw7NmNpGa0gJHOh+WL4R3VdOHEPJ25fxqxzu8An8CwZovoRIiy/FszG4AYEFET3M3l5uFw+viwr3WJrDrn6/X1etUGtVBVHUE+PFopF7Fpu7a7uDxM+5qL981UNHT9MB3vHk+HQy0lzCp3w1ikOiZPlEtdOoaj95uukZoPaZRKVVAJRBEqIGl00EUuSRydJbeiS14Qg188J5BKQUTHpAZMxGBZQQTzLBfP+g/j4fn2T766/ezh7r5r/bqNzTpcy/bq2fXf/sP35rjZtpVK6y6oAqrVPMewCt57x1X05WdfPOwePv/qM+awbnqKvJ+Gf/Fvfh9q/uv//P8d9sfjw6EOY+NDNZ3n1HauZvSuswj707GJm9bx/cePWIGZU6mr6AE4rFaDqUP2AFcXG+oKZri/v6+1IJOpqqnzLQLOKVsB49KwI6QQ/W5/zDmjQ8lZjRjNO4/+oe+I0R+nx2GYQ9tdN6vt5+v3Hx9Fda662Wy++fL2y8Ox366GPP7VX/8R4IlU9LRGlgQlnB1DZ7vGMtMumysiBMNlLcNCcTo76WyhwCwpZzXQM3wI7FwGZQZGtvCN0GABZ5wDQ6qG51K/BeV1rkg/n4SrEvOCQmJmrGWejjJL7F+kGXC7CfIwf/i7cfpouYi5WkqRGttX4DtO01BmBx7IcpmsonPNMOvFbe8bmsfUb7e3n39BzJvtliwddu/QamzamnMC6m5vXXdprimqgIq1pPmkZQapXbe1evIK+XhXxwGQCkjJs4kocJ1TGvY1jQZF86R1Lnky5ZRSE4DXm/G4c55ULKU8WnFZ5uGoRlJPxK1J9t1qzrUaiivC2VOYh1lnqaM4N6GclPPwcBc8Urhq+66azNPRMd2++mye59PhsVr57OXFz3enH9/sXz27Ta5erNp3b+6s5z/5k68/vr/b9vi7b1798Y+vJ3L3+xLC+Pz5s+VEocxJrMae60Gvr5/98YePwQW12t/y1fPb7//x53/88fs//6/+4t3Pr9//9PZVuwbXaqdNv6bMdR4ZC0D7tCc/89hgCS4uCqOd9Wz7JFMu6+bXHiT4pCXBUvC4ILXwbKvET68CeIrUnl2XTz+fIzdP9/zFe/SUTIOnjzO7Je7KvPBfgqmGxgNNGbLW6h15Fx0AmCByraXW85xhimLLca8t6WVEMhA634G1Ks4Vm8qtOVFXBdiRoS1nA85HJjRilOrZSalqyzfPSOwoK0AVUymyBEAVRKpBFc2IAMCgarWaiJoaiqpoKWYEVsCAENVUFMyUAGAuxC4ysyhKNSlVCARBoms6wsC1AdBgTsDlLGAKTrwL3hkoOCqOzXszQNeEdtV3fde2bRt9oOUkvZaaappAi5qVVGqVaUpzzvOcc61VKoIRMwAuwbS272LfNf12tU19v/Ld2rkWHSOxmmPvyPEcyDHWmaVEteUkgADFsS1U6+URWGWBBZH3DpH7PjZtG5vWhVaA1MgA1Bs5z9752JAP7AISk4t2lkxkCUuKLocsYGBAtPQEG7L3jqhCa955H9vjfghNJ/MMpbAJlBmxEjuVc7d9FyOjoimKIQI7Ck0w1thGAiD2kotUc6ZMgFAYMpsSBmfmgM2E0CEpIC8ivIAYgENYGCWlaqo0C83ZxmEehvH+fnc6TrUuf4sKptWMlps6IZFDO1O7aMnLn4uRPwXQihIxmoDlvuvAgF1L7AywCbFpN1M9IlKeR0dLTas552JkZSWixq1UxzLsj+WAZDk6QC5TQYiMPKfsh/nyJqhyXK3rNKIXBiiiBjrmFMkhGCKrmpoSOTFN86CSgyMAkHkqxVxwnn3J00LOKGImjgiNAYzZOVMIPlDApm2BwFRAK5h4vxi3c9u3ITQct9V01TWSRkKIHXebi6Zra55lHpxjMKy5zmkEEBVNaWIMwbeoNZcS3QYRzVKI0bBaVSI2MOe4SCLCpu1VpMLQ9fiwd1KUzbFrXKixkaqZfcxFaJq6bUAEZibuwPkW/DTvDQKDfzxMw/T33/7+v12v1oc05zKENuR54ACIbn11s3vQy2dfeQ8P7z+cTjv20anzLqacvAdmCk1fJJvTEEKp7jQMtzfPNt1VznOzjkheTVTNe5fTwRz6uKLqPJILTqQUKXXOCI5iuN6uT/v92/dvm+F48/zF869fvvnpp6sXL8KqffP6/cvbi99+8fWP3//MQIoChj7E4GItCURjpKLnWgzvvNpYSlVJppWsxiZOc8qno7Lvt7dxff3x3et5/0Ftcg5MSdmJR2QhtMCNijRAbCSqSCy6HBIwAWlVq2IiIlJrIfaLZdnMQEVFDM8VwsvR4LKvrqKf4J/5sRQdv3n21RdXL/7kd78LhHk/MQGiheABC8EsecjzBELkHDF7H03ncbjzGNBF1/Sxu02nEwA03UqKqpSqRc2iY8dcxQiMTEUCNVerb/+y+/a/CV/91vdsqFwz1tFOh/3PP82HYx0HF+jiag2ItcxQJ5RcyyR5LuMAJr4LagKsTbsyE/QNmt8fTuaM21VoN0YNYRf6TfADagEp0+nIjoHCfJ+9GMRtuOhbg2Eah3ejJwIjor5mFCjEEJz3gcs8NW3/xW9//9f/6/+v97rZ+vf3H/6bf/5f7fbv+qvNLLC5uAUkUStt3z5/dff9e5jnJgRcdouek5QQueYiMCODc1SkkIGPvloBjG3fA/lcJM+jYs2lqlU1DGEFJ266ht3gXDzt7gCATEMTX7x63l1s3z88Que++/g+IzYXPOXy44fXrx/ef/7Z523rhv2P0ziMRaDzIbQOjjINwdHV6hmAix6q6MPxj8SR+PaqeeZo9fHhXeyj88A9H8vUcu/9pkqt5gQgyeybaM5ODx97C47Xq7CFarkWwAgQNa4hNnmesOYurmoJYNFIMgwA9ZSPZqkmKwLAME330QvRzMTMJnPyoWZJw3zqnIrllE7k+Hh4uL36wtQP47EhRreZ025Mp1Wzic7VeiwJfdOddHc/vr9cPW8dl+lHaj+f8w7QkY+743tP2EXnJBr2qq69vBFKUzrVNDH/smFa9G9RQTKVAshzFiKoUogdgSuq7IOCqqhUK1WdI1Rz6BAxS1YwICYmEKgqQEDIxKxWRMUMEI2J1VSMyHlVJY6ArtYsVTk6raXOYxrn3fERtPz8+qcNTq+eXV9eXT+eHnaHCarGrnt/t0PPoV8d9uMfvvvoCINvnr36zQHeIIdu1e8P43w6ScppHm6fv5jTZOabVQ+K4256/uXzP/2Lb3/8xx+Q6ermqszZoXv54uputxOC3/357z/89NPpeH/cP/TPnzdduPv4sG1XaAiuVx9ILBKz5+jckE+YvFbcbtdzkaZdO8fpNMYQU7XHh32dMztfxSLhs4uVI+jwwtvlf/jf/tNVt1XO19fbWu2Q7fpqbVX3uwOxy7mIoiqmqtRgBGXUy3Xb9u5q3aU0i1mtuaJeta0iTym3qx5cMGREe37zch4/LhVGCNR28f7++PHu8ePdozKejsPFegVATDgOs5QS2rbUOZcSm4iGoOY5gFRz5F3rqQXv+gjzmK62lzE2ninl4py7ut02YZvGEZSjj0gwpaSiLhCFUNTmClgVQOfh9PzF7z6MH5gLaD0d6zTNITS14FhHM2KCzjfDnG62V1OZpmnICVzwRPA47JyPHkmNouvAG4cwTjn2bWROpsSRvQ9NkFJzlloGBqgZfi4fj113se03qxC9TjSQw65b5VwIsEgVUyYMTVNqepoazABAFQHUlGnpWgJRVVHn3AInUjOzhbeCiIREotUWWDWYmTFSlXpO5PzCCIalOAvUiEGfysWfxp/zWfmiQCHS8idLxbIB4OLwAyMmBYQzEP4pnGFPcPin8UjtqdL8HOx4OntHUFUiooW6DWezEhKYLFVoaLawYJ7yP2oenJIrFb9/u1NTEQsxmthwOPz+29uvXl7+9NO7zWZTFVSlpLTZrMwoAWBN6Onq8sYgfPhwPw2n4NicC6vu7u54KuWY62XTtgz73SMAlTozQ982wYfdaWZ0zlF0gIEB/TQlRGUmQlx1fYjNYRxzzVrBea7VzOqqiU0I82lCdM5AAFShjx4gt9ERIub06vnVtN/Tlm+uL5q+PYxzv1ldbC8R3J/+9tsfv393GqZVt56m4zxNS1Ygdk2MUVK9ubhoAm/Xq5tnl6t2++7N4+X15e8+f4VJ/+p//vfDcPjii5d/8+E+9jGneprmQLRerY7HecyDFWq6FjO+e/igNaOhlOq869dr59q3w2nW/LsXLy68e3y80yop16RihIakYM7Hio5QKXjjwCGG4JHwOJ+qifPWdevt8+frm5cE4p0XrWzJ+/Z4fKy15DE/fNj7SF9f93Hd/PBh+MOPb394dx+j492jhSeB8kwberJzLEaOJZyzzNW/EhzhKefIi8T4FBU6j+4LjR0QAJb0x3nv9iRT0jLIG8LCmUcCBUT8hGZUAyJmxCWcC6T46YIBhMVtrgpoyII+ztZnVMVJ8mOa7l10HOAw5LhqXRuT9AqsVgyAQ3PazyF0Fx5rpu52fXXdvX/7fnWxefbiBgBPx2O/6nQqZTg5trBpxbS9vHGrS8Goqum0Z6s6jahiVhyanaDku+H0ANVMai3JNVFVa8kioODLeE7NpDSXkqd5Xq9up+ORWjcj5SmNh9I2rp6OpymhOilTVcARfMyeedAjOs5VGJna3sifHu5EfS7jeDzmeuoubz3RvH9Un7m9WvdrD3IcDr7zyFBys7//sL242KzjrN39NOVS53z47IsvDof75y+2dw+Hv/np7i9/99mXXz5//+7Bdd1Pb99l1K+/eNau4ocP9zULmx2OD37qjF3N+WLbHz6+f/7lN2m+etjtfvjux69/86d3b74fj3N/YVay+c41ndaTygxxDUgmusBQzvr0L1iip/WGcNbnn6JknyJjAE8enU9r9JcF+dRw9mRIOi/ep60+/uprfvITATzRtH79nQAgs5RKwOwa7wIiKlXTAojOUdMEMzJABpqnSWrJSUVAZZHrEdBEfmmSWvZXS72mGRqYERWzIiJGRY2kEoBDDDEaABEbM4CYCiyHC6KqZopmbGDLlSRQGLGaiQmCKQKYEoCqAqhIVq2GamKgBECiYECiasAVFBXVtKIqlYqoPgefFv3fLKAshyWOXUAtaOKQDIpIRU/eMaEa+hDaBr1FRSTXxth1IUbfNkgsVq2KlKpaUs7zPOeUp2EqIqnWUrWI1CpEjIBQzTtS0Fxzqurm0s5Wq9a5mEDTGTrxTbNI1N65Er15r8UBgBari2rsI7KRC+x81WKLIdE57z0jOq8hhth0HFpkTwrIrkrhGBmMvePgiRmZiNzSV78I9IYoC6kaFmCRKaAAGQUO6JoIqqErfs7clqZbp3mCkkBVq9Sc0YQk1zwjgEOKjGympaScYZlJEdg7URGDnIZSiqgRcVULHEVBDVBNRRQLOUC2pZxtSS0aMgCKqKIVkWqYFSti1VzSdDoNp2HORdTYQBnRgNGWjlVFM5WKn0TVJ9n2n0hFLjhijyYmqEBmGVSAsBRVqsJBxYDRgVctDjBGFksCHIITqQqjYQVEUXYAJYFjNDKZCjjlrnWMp8PeVm3WaiWzmoiKgHfsnKtFHGCRIrD47nOthRS0iAKkPDvi4HxOs9VpgbxkScwO1WqpFLyYoDnnHEpRZcdswL4LTFRVmVDVTEstOqdi5S62nYotVPt1t3ahTeNAIG10OWcAEilWjcmlkoPr0RgRc5kZwDHlkomUHZkRuuhdRGB2zMSaCwCIilk+3P399erVfh6ccyaiTTPPsyULLogqqyyozFqEnXOefIPU3ZSUrSQEJwKvf/zjF7/5s/bV5x8+vnfMfdcNpxkcTGVa314P+8MaPgNjH/eH3WBkWisHMFRGEqguUK4FUZcmvNNhf339xce7t02s7NUAUhqYgwigmbI6z1rVYQW0qgLgxCqYFqVu2/PMH9/fB4pXt5cvv3y5e3f0rkF197vy+7/8P797+/+YxuwcKwiYGQgpMHgtWqSiARFWVaWUq3qiEEKesqqMOccuXDy/dWG7u78jGfuGi7ppqqVUA2C3bGVBRVEFwUTy0nyxBJlFcskQpa05l1xCc77hq6ooYlUkYqQi1aqAKojgwu1S+fTMAIDr7rKD9f/h9/8iopPToZoLHDVlHxGnqaYDU2mCd6veeT8NJynF5MRhHZrnVvF0eow+cPMi3f2XcnxEhLhqQ66nx0PKhRpwXVit11rc5We/vfjyn9HNF5df/hb8GqZDeryv6TA97NLjO++YCUKM2GDsA0eUVHxozAVCcwihaVSqaTWRmgfTWSTJPNg0mEFwDrjO+zfl4S37rukuc7sJ6849/xL3D/n+Yz6dupXlXOa3b8NqcNc33unVi62BzI87EwqxkaoiWmuupVyELRmUaUAO/+Lf/Nd/8x/+6vLVn7/993/9/sP7Z68absPpOMRVbFfNdMhk2t88LxDKcXh48/Nq7ajtcq5UljN3d5qG82k3udBEaEnVmqYvSGTmzqovhtWKfBN8q8UUlTxO04QwqnmCANTUEg+P9fWbv59zfb97tHUHLc6noxH2mwtFun/7hh1cbiKCRsZcMpJrQphk2ea2LlyRHS2Pka6AyCrMSaRiv3oWmybrXRr3neuj74YyAzJyMLPT8AChPaWJQgt+VdWt+63mOVCD1GhD+3FMp0ckaXwFx2Peg/XBS5VjTiXXk8rJZq25hIDO+5QGpDzkYiEKypyk7bbM4iPvjveuALHfbF5lcR7D5foynw7oW+ZCpzFgEqOuaS+vt3/4h3+8+Pzry+t/IeMHcbS+eGZ1Tvmg5ly70mz95pa5vbrpTjmrs5qnPCWDSqrNeiVPZQfL3gyZDUy0AhIH1lrZBQQCpFqLqnofkUxq9S4YqJQCSEu/lZ739aALG0VFpWLsjDxAxeVwmCjnGZAVhJlqyUTL8RmnVIbTUOf8+PExp3R1tf765aWm+D//L//ry89ffPv117/77eX7Nz+HbtNerFWtqrrv3t3poVtt39wdH988guuPh+PNs/XV81tP9PBxPwzz/buHi5utCh+O6ZvffUnPys9/+O6bb7+4vrx6+/0PjuxP/+Krn7/7cNjv+j7MWVBlc7m5ebZ9vO/TcTi+f+eV5/3eE3WxlZQYwDvXsDNVNNeE4LrGkIfDhABlHrdt75pw9/Ydk7vaXo5T9iybzSpND1Xqh3dv+qb71//dv1bFd9//8PVvfvfHH1/ffvPPp8cP719/oItY0yCSAHQaEwVsHDDLFy9vQYVQP9w/BudWqz7EqEDcdDESArQhuriWrFLypEdQBDTvtBa9fzx9uD9NqhRsShkIgDwSn9IMpjHwx4/vSslgGoMHwlxMta43a3Xh5uLFODx69JtV83i3u7i+VpXd/b2BxrZf9/0wTFJEVZu+R0fzPBA5As+BJJd5GsgoNuHF5auP+/cC0rRNIFeTOG6nOQvkY5ob3677zWE4AfvHPIHm1dWqa5yJ5GrNZiXqXK1znrNUBXLEs8yieQuNKo2nzNxCNgRADzEEzZkA8og/3h/e3pVnl82zZ2vA2kZXSmEO4zQzUdLqCAFmO5fFw+IERQIyBDBc/peXqYKoqi4jCxEAgoggOAOtooaoALwAzM7m8LNZXMwWfgAvCTRE5LNkQ7iINXb+hQEiiBoAnq1ACEsgzBb+L0Jdvh84i0eiivRLTdDyDvp0QP6kf31ifdjZ0LHQnuFpBiM0XSAai33jzCpWRIfowHzgxvsJ8XicSrWF3y+1mMmf/8k1pPR3f/y+RV9HlWgZhnUb19vV41hW3VoBxzxum+vdbgfk+m4153RKMqVSrDri666VWu6mKToGIDZbdT442h33YNhwJGe5Jkdkll2HDl0ucBiqyNQ0sWujL1XGQoDkeC5CWVYtIyqiNYzIPBcpVrdXLUgNbbj7uHvWN2Yndsxrg8qayngc+me3Q8rrzbaL/TSNbduN+aPWbABFU+wCgDFjleIcffXVt89e3ny8u//sN8+un9/Mp+mv/j//fn4Yb198fnx4PA2HWsvxcHCOTO00TSlnUrgMz0TGh/Eul+qQxGy16ZEJgjvmMTA/296mcfj+eAhEKVdkAuY5ZwHUop2BSCaQro1N2wDYUEYfIEb/2YvfPvvsy37ltZze3e3H0yllY6LGmeRdTkdPFCiGVXy4f3SE6f2HuL786vnlw6RCOM850Kc2TEA454TOdh+DpQkb4EzpBjuHep7GcVsUpk/SqP1T48byiFmm+7PVDmDhmywFfQAIKgtLhj4dap/f8IkXgEtPL8CCERAhW/gsCiZoHNYvKqxBd62TOn4ohwdJCmHjN9ehe1bG+7I/aBlJSxPdKNJ2YZ8mNvrtn3311z+98RRe3mxXTXz1+cvTlG6fXU6H3fBwJ+nURD8NZfvZN37zErmxcdb5EG1GndQyWpmnU9Y6DQMhNoHmlEtOTJSOQ6nZDJF9zlJzKWOuNc/TnEtK4hAtTdXKhKBpGqUCms4pj+Mc4wpU61R98KmWrqGUR/Zg5IFVpz14KulQapimB50zCOmUXOvqPAakeryDiKtV6+LFdNxJ3XVt++qLr+7fvomerwMeDofPXz7/7vufP7x/b4zXTr/6+tl//Nv9f/7+7f/x99/IxXCaH774/Pr1z+830X39xdfDWvfvXq+b9tm2v3+YnO/3jw+XL69Wq21K6eWr2+Ph9PHndzfPf9td3u6PDxe3ryhCUjDwAAha0ARhEaCfzEPnGfUXuQeeFt6vhZtfqZJPL3zKReKne9xZb/plUZ4tQvbLLz6935NxaRkF8Oxa+0U+AgD03gEzMulilFM1E0cA3kVHqjinlOaURbSCGFUVM6QnhZ3I+KxKQbUKCESAdq4ORF6SwVTVDMgMRQXIgSp51uV2zLTQ3plNqaqoajVRVQUVRMdAS+wZ2azUs+/VzDNVVUQ2g1qTLUUNVlSlmqAZomucA+RkNpvW4tKM4tXT3HETpATwWLMrkwsdBjYKtWRmNiArGBw4pkguFfSsrYsEiI65adg771kZjBQApWYp05zyYX86TCkVqTlLVTEQUSYCdIZkJkzMjpkJ2ZNj9JwlzdPI5IgOZtatSJOgiwiIWlDVOareCSICOCZD8jEwo6nqmWhGTOSbtvUBxABq17e+6Si0uIRXiJGcQyAmZPLOIRETo2MgBsAlZrtUyGld9GszJFE08wToXXShJQKBGrsaUipTttpBTSJaxcQUTLEmrVVKLmm2lMdpsipo4Nmjw0k4ZVMrUgqJMJj3Hq16Zls8TVphQS0BEhEhe3ZMi1duqUIFwCoqYKDgDLDWmufTOO6G6ZRFxBCXdOTZJr1cdIR4Lr7ABYHCZ+vTP5GKTE2xMkKpRSUzLy0ECKZScNYU2Jc0OYQQQ50G30RDAHUmprmYJ3ZIwYsZAdQiqBWYMHDsWiQkNB+IAxsaMZec5ynFEOYpAbulopDIm4ojBFTDioS+CYDg/UpETJdzhtz2XQjeagUkNHPkAVCKiman4pBrLU3ToUOtDn1j5NEFUiNqiEvNc4jO+4DGjBC6ENpOVaEKR1el5FJU1HFoYz+Oj6DVt2uCWNLRMyuKocQYwNgQAZ0jNlOy6giB2bc9WAFS59oyFpR913ZFigIQmfME6gjMFEzBqvrlL0GhlMJMznswzgKklQkfPj44/uNv/+Sf3Vw93z8+itamD7lgnjOArbYX1kWVYqSplloFlKU4AtNach6bpll1KynVe2QXCL2BbC6vqpU2hlq1CZ1zZlDBxNKg6JxvxnnveO2pa8J2GB8Q4LDbhzZs1uubbMPDPh1PL3/7u+svXh7uHjeXm7uPD498+PLzr3744TtiyjITYyqJ1CqoZjPExnky9cRVCiKJ6jCNIsUUVrcvbl/ciunjx0dJo9YkWuZ5ZnKhYVH1YGIkqgIiioCkRKJ2Fu3JVKqaUxVDExM1NTVVrSLIaEBgWsxEpIpWsaoLD26JVgDT+ZHwzWdfRludHsd9Kc+urxQtrJ3vMTq1ypJWWlKpiUQMqydZMF6IMxpVcW2zslqYcbXtTsXP4yi1sMO2iYlYHQyDKNLnv/+v7fafXf7JvzpO9/sPP+W7R8ynenzwrQ9dE5ro22iGroltu3btCn1oQgsuGiycPLFaUBNZIqhBi8lc04RaSLVM87jfgWL0NKVDrfthOhg1zfVF3DyneNldxdPdd2WeLNHpcKRwz4eH7uq5sru42U6ex/uDVrFSAvlKqCbjMAEqIfuArvG3zz+/Px6+/vPfF5d8f11lvrq5yqUg8uVqzTXXqYTIQZuh7UZJUtKqbaKLKU/sI4fGqrVtx95RINW0ajvPyI7R3DyObddz43zTpZJUhQhcINeE0G5IwcX16mKThtPu8DilPdTahfZmFbKH/qIZQ0iljkMyJ3G1GQ977Z71m4tu1Z/G+ZRO1q26jufhsVhMeS6ayzyh0zZecAWr8mz76jjuUz2J2Zyq96Eoi0CpIjoICCA4VCq55DR46cI669R3TR1GMC1Fakk6HznSWI8CE3GLJoE6tqmYeB+5gUHuPa1ZKruONSaZShnWzcqHy8a/cC58OP1ArrtsViB0fxo7doh1yg+Ba0R2Slebq7ucW2vEsTdL++Fqc5kPHxywqYMiLOtpvieOROTYlLTM6ry/6DfJ3h5Oj3Me2yZWkblU9v4cnVli2FVDCEAoVQxh6XVqOC6YLwUAsySpmnauMZUlPK8GjhAVnHdGVE1FSyBHiIBUagLA6KNKMdOyTLZmiOTYqSkCEWNFQ2PnHEa6efE81+mPf/sPLeYuus+/+Oru/d3/+Me/6tvQ9c08v5lyMSQxjyG++Prr9x93/XZzvx+221Ufw937d+xD36/Wm6uvv7j98H63XXer3u8Ox/sf3nz51at/+d/+y7vv3z5/duUZj7vx9fd3F/0mjdZ06xC0Ztk+e76/e3j57MWdvjntHuPmCqFFgDzmpm1iDGWcrIhzruvblDOBM8GSizJcX/aewg+v3wTn275RBcc1p/T2wyhpkCq/+fK3u8fd/t3HKuKM7n5+uLl4/vnv/7mr5f/5f/9/j+VuRADvmUqeahN9F4hQhoRlKk3EUokRahHmGmObpkRKbduNp4GyqAEoOIboMHQ+RH77/mG3O2XF/X6oKWuuqCa5TsMEWud5zHmcx4kQfQib/uI0n2Ifbq+vZS4+NrWOno3Zj6m2m1WpdRom5HD97KLOdbffx9iHhi8ut8dDOg7H4NGxc4zjNE5ZvI/PL798OL3JZWJUALhcbRTL4/7I3gGTqXUxeIRp2AGYj02IzlOHppJMS81S0ZvKBGbBATkgRwLler01BSiDkKVURedTSSIWGsgzO/DgGDnGrqulvP5x2B1PL66725vePzy2TdO0Teh6QypSVBX4vDE6a51VlshMruKZVerCfF5yBJ8mX0I+G4hMDYCITNURlKpIrCbLvh3PeQc4A/KW0/Bly3k+p0R4grWcXUVnW8cv0/LS8HzOWiwj0z9Nj53fGPSJwmGESAYLDObp7H35e5w/U/VcmkYIioiIuhxL2zmrwcig4h1su3jI5XGWVJUcZ5E+MBo6ZidY5uKMvnj1hffdMe+/+e2Xj/cfd+Ow6VeBbK5yubl53J3uH/adc8fjeBqOeZ4Q7Pnl5Tjm+48fVeV6taolTXOOwTuiaZwIEJiLVigF0ECFED17UKgobOI911pFoIp6T2paihAQAtZS1z5WVRSJ5GYtzG677X786eceGufiw4eH9QV5Fwg9YVQ5Q7KiYza7vtg+7mF3HJZCMUKopQI4ExXTi+urVbtl5vfv77eXq75pyzT/l7/939HBt//iz9/8+NMPf/+9qpVc2hBj6/KYAHC16lYh1HJ63D1MOUXvHGAIIcRAzJJzHibnmzwO3hGRm3IBA4+YSwmOlUj9mQG06vrVZtutLqpWo/L1q5fO9ftZvnv9YT889M7FbhV8m8eJENrYrW9uNg3fvX/TBLe67De9B9PTcZiFH4+DQvDr9Z9/+6eFK/zVf1omZ6BPJ8vLannKAf2TOfys9ZwFoKcl+mn0X17ziSnzi5XoU3ryEwXpKdq2xJKI+ckpt7zPklqmp6K/JzMKGGgFY3aoYoCOQlfmgjp5PYzjnTpT17mLr0K81VrKmGOoDmyyKqpV6u6w6+Km77Y/vnsbGPJx6rtwfbEuU/bs6zCMDx+PD+9vnl80YdVsX4btbTXW4WTTrvVap8d03LGD+XRI04nQVJ2ZG+eax6QgFcyq5jwjM7lmmJKWKiWras7ZqgDQ4bBLeTYonCyXpBnG0YosnLHMCEVyVeDYjAkZgHKNbShV6pgQsMzGwU15AKGgnA4HqI6iYMONc/X0CK260F9tnx0gD6fqun57eXX38Y6KtETHh7tvPn95d/9YUnp4t/Pd5qLf/vjTu7/5+5//8k+fH/5wdxiGm5vNu9dvGmxefPEljnWeH1aXTc4yzkWL7e8er55/OabI3F7dzLv7D/fvf/z2L/5smkIy9kouBlRzbau6OBqWWOKT++xs7//0k+HTnepXus6veERPiwieTJtLD8CndfbLov2kHy3VZr/6Ir+6Dy6kt+Wd/olQVEVi8Og8MtdaERlMmRkIU06mlouWYrUoKBA5QkNiWNQvVQJzHFSrASDSOZYDaCDn1wM7YiRHDESAqIyoaGJKCkzMnggJGdBq0QqoAAJaVLKqmVRiU0BmhsUBSgjAZgJmtlTGEwMKqoos3QVSpVY1JDYyYkB2BARgYppyrcdDq6JGPSIxE7k8DQTgvEfHAF7VCNBhBagMgEjMru1ZBdCxMTnvkWDxBlarIrXmNA+naUrH47CbswJYFU/snQ/N8twlURWtBlQBENmz98E737jg2DkwK7mmOQONvonOGMl5Hxtzho5jLFLJFBUIFyZ+tSqlZOZoZrGJbd+1IWopUnNsG9dEIAJkNEPneXHfEjIzOQZEdA7JGREYmC0kFBNVOd98DImAyLmAzM5FH4JzJFJCaJrYlCahiZZcclEkAbNSoUxlGudJa7FkUlQRjdAMTESzCggxmkeMzjOAd+wJznA2VZUCTHgm4BgRIIMRGNrSwKzAAKBWi0ABqgo5pdPpdDicTsNcZEHKKdHSuGG2ZCOZ1OwJ+M1MBAZI9Em8dU8bo8JIguhCtJrJN8RmtcQullrRBde0Os/ELIi+jegDMgCBViP2ioRY1SojmRgTuuCQyRwjE7CGdRNXjfMc+85KHcCcWiq5lkIiYOaJDSuwLeZocsBAuYhI9SECUa3FMxE3AKRqwXvvfAhOAWqRWp7iEg58YMDskOs0ah6Q2oKE6Cg0rBKb6INHI1PeXF/5LpY8SklN8NM8LoomoXMUsky+cYECgNV8UM3OMYHPZQi+BUQmBlPnDBGIPbFXICQy0eAiOX8aTj7K5vnNu/dvQ9eVOfsYGJ3VQgjORakVDELbi4JDtLkoSNeE4EJJc2QA8qfD8PDxw+XLL1zT3t996LqWnbdxcKBolaK/ePaZD16LjtMxzTpWdRyAkcKlaDUwIlfyyM4BikIKfY/imzaUVKcyMXrBuUotaWpjN+WTmoAryJJEK2bGENpOQQ+7/Wq92lxevHv9+v1Pf+wvNt16QximtN4Nx1d/+vuPj2+n02wKpZblOZ9LIURDKKCAWg3YTEmqKZpeXVzcXNzOYv/493+oeVg3Tds1tIlZm6ZupEjKOeUkpWoxUGBAI821GkAqGZEM1HNYGIe1llqyF62lFJeRXPDB1AwV6ClfClgVzGCheT7Zq89Xw7ObG8/rgBYIt33QqU6H3SASuHhDjyUERgXJQghSquaEBMQ5zScEhxzKkCpF3zSh7Q73+5oSZInkXOsMLZdMxKcPHxv5+c1hbrZR854qhJbJ+SauXb+NFxfu4ga7FYSo6BDQVMVUdUGbCpiiKViGeW8CKAUoui5ozVqFsNn263za12nwsUeQOg77D3dl2uNq79ZX4eKz1cWL+d0/aNJVbIZ5evz+o6Vh8/IbbBzzOnRdPU2nu6OoMCMAaE2mqVut0lwIYL26OM67P/vnf3b35s2UBiImcFLUz02glvOhQl618TAfXv7m2WEeHt/OyDhbDtGT465rPaID10RnUJgbhzSPYwUtKoxxngCLt3F2EUsZGR0VouSHR9FcxcWm75lwc3XTXa66zcrUg49uu0mgueo4wX6/56AQ0MNYjHa7U357Ty5aUpXZtxBh6XKqBVTREAfnN11oe3YNp0cZShrUWQxdkROW6h2bac5TbElLPex2SBFkjquN6pgKTMJi7AvkPJEZOacyO+/mNPexsZIKMGgIsEJxdZq37ecmF6H6WsCFy5wPJU8gBSwwSgMUK8eR2/VF7G8kfWixlLxDJdQCClzYHeAGX9VTOia9fnF9GO59L4yFqQHrkFaeV5mOTE27buZpaoLv2o4NlKoLvlXvXesC5VyJmzwl+hWrKAQHpqjgGRWwqlaxZHMTA5iRZwPKtoC+RKWKamC/bMMWr+xSQx5Do1qYXK3VVAn5bGlQdeSJoNayOCBqnZyLzvuSCrjYbtw0DPdvP7ClL17dnA7Hn35427BW8n59eb+fjqLG7ubl7c8/3s3Z8u54N8HqavXi8pbevLt/vHNATb9OuSr4w3BUrNr0dw93XXvzr/7lV7t9evthl4bCbl3NffXb37x7/WH3+n7z5apryOq8e9gFCLe33xDRaTp9/rvf/vT3f2+kmkfCoAYutr4NIiVirMUk5UpIgOPpVCW1vtn0/R/+8XtNFcmfDgOQBYNxHGLszEIt8uP3r8cprVbx8rLZvLqqU3EhvPvu3z2/efGv//KfvXl884/f/ZfH3SEVRQy11CHnpuGkzKGtKiGyZ2DvSkmBQtOtYnBixN4lzczNat007NNxPx7Tu4/DYczONR0j2qHmWubMRKY1eppSORx2C0HWMZeq73d3set8bCm0aShUC2t9fnP14+t35iCEwI6Z/ZglFxhOJ0KMvnGhOgdqRXKhuHUM97uPFxeXaBBi2A0/e4+bPk6eG+LdbneqIzFbNZFqTMgwp9H7uO27EL0RT8Oc53rOL/abLjrLCRFjt8aSSqrGdppSyhIck+fV9rrkVMuslmutNWU0EiDneNV1tUjbr0pOP76eD3Ndt3zZYhzEhRz7RnIJjtbX26erYKE8G7ErtSDSchf2zItrQpfpZomSARAiI5/3j6qmYkiOCQEISA1UTcwAwTGfJ2zEp4kIkQieQgoECITLuK2AurRLmS74al3mYjsPSYsjSH91cP4rXck+xTXoPFrZk2Vp8XwAIy3gmCdOiALAp7JoBUNARnR0RntUxTHlKmCICkYIqaoW7Rs+nqaIEDt/qI+WHpmbNIXTrA2hNuHjtPehu47dYdoFoix1tiVR5Tj4v//xJ6vaIbW+HVOd59k5D8inOaMpIQFglhSDC45TLt57qTJPJRkjkWStqkQAIIvVxdSMYKq6BA7SPAKaB/UhCCK7zerm2+FQWocBWGoNjS9auQ2R3TTMD4+H62cXKU/k4OZZf3e6mw/m2dVSHCOYEqJvupvr28MhXXTWx+ZidfPD659qOXSh7//0xXd//Xenx4/OsQc/no6rvnPMFrlWuepa1vzxcACxwKFKXXWt98EAdo8HT0Bg0SGTZinHeUZDR3Y8jB4YkFXVgxpZaMLlswvkdkg1tjHEzbu7lPMBfOPXVzfPvwIwaiJhub26nU/jaUof7/c3m9itLqbD/XR4ZHbNavX85VpF/duPH2Z7d7f7weyzL58vVwEhPBVMIdA5V7bsmXDpt19o6Pi0jPEXAehXa/FXPxai1hkYv2Csgc7ZSgA4m/roSR4gMERYStkWeVU/2Uk+eeqWDR0QMaoKmGO3BtEoo84P6eHn4Xgv0DTPvm5uvynjfnq8C5QL1WKzYqlAouzCCqE/jXYa62rduSYWKLeffaZS29ZNx7s8T23f9auNa676F18YuzIeGhaCoRz2aTpYmdNpHg87AI+IrlmnacinXZmGEDjXmTFKUTVDN9YKJiJ1qlIdSJWc56y+y4KW1YecU5EMWrEUKFWoTuwAtIpUh87YUZ2Jl2JFKKk41065eit42DMygRvnwnzFlIfjfeyhd96Gx+n4/uLm85tXz5rT1f7hvlvZy36123149+P7fCxEGKJ/+fzyD//ww+qSvvzy5n6//25/fHW4evXlq59++v7m6vLm2c2716/XV6urz5/dvxvzPInz2PhpHvJhEJXVy1fDDm+/aNPxYz7dTVPxq5cSNqCu8YGkkkVyftGxl733k0r4pKF8wqXBr9hBv1h8znL5L7+FxXz5dFP7xVn56TW/WpD2i4/irDnhOeH2S3fkspw/1YR7T86Rc+zIuQBVDAxE1KSWUrLUCiYBiM3UQI0UEUgJz/fs5X3tfB627IIMwRiRCcjM1BQQvHNMykRABsQKoEDGXoCIHSG5ICpSiZHMiBTJTEyxigCAUTUMZsjgkWEJrKEhGJKLmpOCCZgC1WIqWA0UREm9R+9j6DokX7Op5GKmhWtCKoizGVQoULGyIZAZmhICcQxBysIUQ3aePHXosnPVxCGaFgZkxaJVVEuqaZacRcQcoCkyMzOTD+Q8gnnnFVEMilqpGdApEnJgH0NsYmwdBx8aRK/VQMBQfAhAjnyI3M2SjYkZTNTEaimqpaSE4AAQyLqu71cbByrIyuxjwxzYRzAyQLPCziMy0ln6QiJ0/sxSUzFRUJNalrMeVVkKtpldjMwuuBDZMSM5bgxATXPJoCK5LPjFKiI51wFO81hqpZKdiYKQZyRkJFYlMwdEAA2zZ2AGR4xgjp0ulwoGQ2fkAJkcLrQBQI8YUAGpLtwuVVVgMci5zNM0jlPKuYqVKmqqip98c4SkAOc2gvMFd+aSLni4fyIVgWmtSsTAyuxNFRG0VgPyxB6cpnnVt6Kp1InJm4kDY7O2CUWN2YhNtQIwIRgakidGQmUSH1hKkpREFLxnxD42Mqai1TtkJltaqRiIseZqkg0s+jO0vGp2TL4hAuLF7kYYgjczQQ4xurC4dGrNSa2WksygigXvAchMgncCIFIYiBCc5+CbEDsObk5TYALPIhlBiZmInWtKzYGjqYGBakVjqwSkZtqvtmisgsxOaiYGckSeEYGM2Ef2zTwewbBZX821xFJ93NRxzy66aEUmBDaryApkZlVNkZGdc3GV8uBcxKRaq5I2fafVvX79lprN9ubl+uK2polYu5UvRXLNVDH4bnXxQgXLm/9iDnHdTHN26Bv2arXWZCaxWbPzRDyc9i9uro+TlqRNt3Whn+dRaiH260047nax6UEJsGRFqGSmOQ3BO0Bq2n44JeT87NXLn374DtGV0ULbb2/W2eDuNG1Wl8eH72NsxqnkMiMxIQEhI6NZFZE6MgJ4h13/7PJWx/nH734+TsfG87rvvfelLJv7qAKiSi44QIQMgEvrTK1ZEXOeqwEaIKEoQhXntJYi9fxDtBqoaEUxBEYFVdPzPahKlbqQ6wFV9RPj/XR/SKdHTRrXjfNyebHuXFsFclLv20BODXzfBcY0nLDr3RrKeJC0Uxkl5XZ9ubnZTqfZKoSuiX1LwR3vD1KkivguXF9f5aS797s1f7jYBiuh36xrxf72trm8ovZKY88ufCosXo6tlwcqoaGpQUUTMAET9A5IwRSkqlZVBjAKrCLcdMiu5qlOCci2z1/pNIwf7+3hQzjuYrN2ptO043hJyIxuf/ehWNtsX3BsuPG+abnthuOQ6phP++1mXWqbrTYXWxPsuyZ9zHk6NTGoKrlGwJq28RyjW6PPRNxf+ClNotV16xA2w3wMFDbPb5ChojTOsWGtNecqbLOUkpPYHELLgQjJkKtK7/u+az3TOB6nkhkxdI3FzofWDD7eH4afP26vLth12DR+LPHycsJooVVfx3GfH4em12az/fLbV1Dq4XCk05HnXa77OdOMeZDKbUPkI+I6XtmU5nqqTlyPDmgus1HWOlaMDtqS05SOLgRSUKt9+5yRPbfo8mm449CFuGVwOnEMIOV0Gh8itWChKilBKXPvQ+db8NsP4zsx5zzNdS8FmnAZm2croMOQQ/Dz9HgbaL3ZmvBwHGwdLzfdPHzI8zjCzBUaCC+3L4775OPFy4vnl5YV98J2mEaXcLVh1wXVsk+PZgjkzBxC0KqeFWB8OOwpBiTEGlR8lZLzbGCh6Z4eBUs6AETUsVMVMHPMgCh2tmZXrd77wA61kmM6Gxck5wRERJxyIqIlYCCSnWMgqlWrVk+On/aBKssRiLFDdlRrCc4POZ2OB672/OJiPO1PeVCSZ69uht3pzfvd+modt3z3sJtTvRuGi+11Z2jDMIxDt+3+x//hf+iQ+u1q8/z53d0DO266cHl1vd/tNhfx5vbFD3/3d7vHu+cvP/vm97/b392r0n43j/Pu9vm1zvlw2ntHF+3Vyy8+u/v53ft3P96+fDYlRoXNevOwfwQjAMHoDNWkiqkZf/fdd9cvLpp1+/Hd22lIt59fXl+v3r19l1Ppur5t+t3+wTk8TsOLl68ej+N4GqY0tq2/eHGNxBLCbpgf3t1fbONlv/78+W06vX/47vt/+3/7v/67//Aff3r7zm+2lE+7u7txqndD8uxeXm42K7/qCS1HF9arTqqMVQrItr1Yhd5cRyE87PfT7iQiQyk+hM26/88/fDhlQWJHjlwI3uc8joeTJ3TR56woQoYpS4j81Wff3t//7KPvVq0U/fi4z2KfvXp2etyXuQx1WPfrMs/g+PryoqYyHYcm+DEd20232WyOx4f1etWvY2CipQ0XoVZQoyGXDFqUrjYXkuo0zbVqH9tAHpBztsM4GKIUaZpOEXIpYYI6CIiKIh8qg81pQsKmdcF5wJhLndMAMud0IE8OnZkh1lLNjD5+OBHANAxIXird3+26vr+9ulytGiRdXWwBoHH2cT8uV4Go0SKUwHnviICETMSyNKs8eR2Wek1EFBUCW9JnAgSwfHwh/BgSg5kZ6hMce2l5UlsSDYaACkaAVcURL7wjfJqFn54J9pQIWj4XnoAxeFafnk7RCZ/sRQhqixHEznM8nqs/YdG5EEGNnjiwy47xnB0CVFACQBEDiV2rBgrEaKLaEDG6VASR2OBitcrzwOyHuWy7uPLw+PG7JsSL9kLA+fbi+ur57vHjfNrnUo+nYXc8VfRS4N3r196sIWodl5JFNTjvXCilMEATI1lFBEdeEavYpmujD3rOYNBcpaKF6GpWFXNkCOgIiN2ocppz730gT2SEuGpdBTvMub28nG0EGYbTibr+YrOdp2Q5Xb+6vhc7nUbXhCZGKfry8+vGubvh5AjHWhxgGUcfuOvbh8e7q9sXl7dXvsLPr3+aplPfN17D7mEn47Fb8ZiC7qfrqyuVkqtC22zXlxuG19/9AyH0fTyepjbEEBrVmoa578I4T+tNP48lzamAIkAbQprGwF5ES6re281F9/zF5WbVmRK4Znv1cq7TbpxVsWubLKCYi8A0F54STJN32PX++mo1zY3kXF3YXr0KGOechwlmscvr/rpOerDkbEzlx+/f/UrcOQ/s8GlgX+jUn8TJZanBEtv51Qx/9gDhJyvI+YPwq7n9Ses5S0yf/CJwntMXewcTyjJsnwd9XaY5NFs4G4SADgjQzBlyKZllj+Md6+MoD6710b3i7SvRgvWA6ZGgnubxdNznLCruNGShsGrXbx6OwwybliSlL3/3Bbnmbnf/9UWsJRvV62dXCn51dVurOc0kQ54GmA55OEgdht2dWiF0iFiLnca7PA1YUs0jWMilMFEpVaUSmlFA0DqdUi4CXLOMk0jSAmam+4fJoUtzddQNWZfDVQACBVBQzsLipCKzokbimqTWGQxBpZ6OourblXNeysxMzAGlmmZC7JzN9z8PoetvvtnyM+fiw92b1aq5vW1fv5sOw+Q5ng6HZ1eb/WnXxP6zZ6u3d/bXP7z/7/75i5v1ZjqMut6u1+t3P/9489nt9dX69Aj3j1MC163XKU11fFi1bqqrdJD+8lnR9PDmzfNvb5UichBjJDJT5xs9+xzPOKonMAp8Ssza2XL55EP79Yr8J789W5LsjMb+lc3o/Fb/ZE3ip3d4eveF5Hb+gvhPvs7yg5BFjRG8dwaQpdalsyuXWg2MmaOhAhYDWdQirUqEoMbkpJYlK8nolxs+PnUgnJlzgEVRzBl69sZMjEK4+OxEzcgUQAmNmZWxMhZQJAQCMTVVETMRAgOqxEFMl00WIVcFYzAmdE4VJVepVUVzyhVQzLJqbAg8OHDcbLGNBCpoYlgbPyt5VRZlsmmcSpldjBo9uQBWiZbgUUZicJ68d+yVEFRMRM1phVJr0SXqAezbjrvAXVezybkbzpxfBm9i55nRBUESEc1SpaigKgCw99FxdL5xzhGRqqHqEmPyISJx4M6YTQVETCyneUoTKxiJVvXe+ab1IThEwmTgfGzYR0BCYlAlF1QFiRevkDEis5E3AFz8WWaqKmpP3iIEdhxiUHIBnffsPBITYXBkSzEXYpXKxB4ADUues4k4hxRM2WNgT533S+yLFDFlEPGE3mF0TKTswBaHLyiSIzJiBiJyvPxrAZ0dR2CGBqqyVHaImqBLakPKp3EahrlUUKVqaMuZEJqqECIQqQgyI4GJPLk0cRkEiP9pAA2AvCNkzrmaIUdfSiFwKh6EtJqn2SKCzW3kpvW1JJMK6hiNHYolRixqItXAQhPVhARJkUHJzKTmlJrYaK2L4cgTZjNZbmFMYuYgMgXyZISidU5TFSUiKSV4IqZaaogxdCtkMs/esZrmnBZMXvDB+ZDy5HxrNeUymVLOyUdbGGBNf+FdaJoQ2sjsfdOIaSBi0CmPiBBiAwCIDAhoSkTk2zJPaMIoMXogssX7C46QmDyILRcME5IhcHB+vSx25JrTKTqmMnR+c6h7Bu+wVnJqpiiAENuoILHxqQiyiy6oZkLG6LxzuRREDdiPh/3u7v2q77vGZXa1UGzWh/t9SsW1UQ19t94AI8qH1z9RtdkEwasiETchVqhN24lVJLJcvOTL1fa0Gxz6dtOFrjs87qfhoGUmbuacTdRXCu3GAJnKMJ8MW6l4SnZx+fzu/m3Fev382XwaDbiqNb3brsKbHx++/OJf3r+5T+mU5zmbeo9SC7AHBGU0Nd+6VRvW6+0sePjwYTodybDzXk3HcSDGqqCiBpNJVQN0vkpVFWYnCgYWg9eUHTtHvogy8XKiZEqItGzKRbRWKVW8KgnKE0e0ioqoqJ7bjs3MlMnTUwBtnE4BaHW5GcbpsJ9P9/nLz55dXt+EVXQhMivXWeY5I7p1O89ZDSBGK0Zd67duPqUyolUnKWkhH9tiRjGQYtnPzhw4/PJP/yTZ1c8/vNvWdP3Zs/D8FV19KW5t7AXJTBeGIxmYKqqYGVoBMDRBnRgEpMAZASkGAoR2rjJfhnwGYmSHTUeywjDU8aB1Ruq9ZoSx7H/mHL3X9bY5DQNRvL5cD6dj3T0q8OrZS0Ssau3Wc1zlFLtulefM5E/DfbPeMFPNx9hs6rjzPrRdWzJKmlebfkqPjEo+O5Sac9hcBQhvP/z08pvf7d7dpcN0OMwuQr/pxuHUsKtSEKDMCZBj6Nl3vrkQgKZbI4UiaZx3MubAXdO+evnqmY95PD68252aPIcm9I3bbjpDJJYqw/T+MB1+xKaJ7WaF1EQZzH744Y9G1ka/2dw2m+v+8roetZxS4NWYdmRGYCHGNTe7+/lmvfGOskGSMtYEKFIrQvC+zXmqNQUXCByD27aX5G69c+PhSAwogFmRZmLvkLTMpRTEdalByaeUBbiPF6Mosve0pjD74BEyYl3FRuvAtM1ye5p+Bg+u9R9PH1siUKqSyjAIOmZRhm1/dSoG0d/JRCt8t/9hwmPn3M3VRT5Y03qQeZofwfXeNdM0QDXQnFJG7rvVhWqZ0kk1pTxnUU+dCgggc+RgteanDTyKqWfywIvbwfsgInA+/gIVJQSrJiZ9G5xZsZKlIhoSGpIjUg6q2awoaK2VyQXXgolKWShh3nMp1ZEHAjB0HFXQak055ZqdGgq/u7v7+cPr/XC8XPeu5qzWXWx24zyPQylVDY/7NB7fplLW63Y8pEeC33z11XF3//HD3X6cL6+u2ejh7uF42jkOp+Pb/Yef121bq/z1//6HD+8//u6bz6mz9WV3/zAoFSKtkm+e3779+ceX336FWk/vP2z7cHG93j1MVeizz55//LiLnXfEXFOZhs2qOZyStt1UrAU317lbN9tVe9of7z/s1HnNdZ/3qabHw7Bqu/cf7k7j6EL48tsvHZRu3b99e//2hw/OilRJDcW++X/9T//Tl68+/zf/6i/evf/x1cvrYZAfv/vJAYiAqXpPzjeAje/6U9q3jl11pbrQhrbrIuOq6Y6nYR4e7989iMWGupJPBL7m+ShyvzueUkZPZRKshRyfxrGqGrtkeMj1s9vrvu8f7g6b9dXbt69R5269BoAseX8YVImYJ0mNj9smvrz6zd//w3/utm3XN2+P+9sXz97d3VW1hsNueGQGh0EzqyaVuu7akgoojIdBzbJY6zqZS0ozmK66rqqNU0Zb+DmIjMhcahnzHIizFsJKSEXMagZAR8CIUkettRbNKkSemJm9GIu1TMmBYmCzsV3HWoqUyaM6hCp+2tXXx7Hpom9Cu5sRLY9j3/tPV8Gyk0+SGGhJSi57LDtnZM4RNCQ0garGtLgwzgfVCmCGy4zxNDAvMIFz7dknB8Zi/SGiWmXheJ3nczVatnHL1u3p7HvJ9ZxxGucBWxfhdQlTP3UKfUpuLIcNtJiFFho3GCwNJwjAjJ+aQAkRAOWpBGgZ55ioKk5FSXIWFT2H48S0kq6acNXH/WG83LiL55vhlNpVEz2LaNuu1l3/9vDQXT5To7lk34YKMuYUYleLvf3whg1azyB5LlVKjSEA4ZzG1vvInHMBrY6XyBUZgpE7Vqg5iyHUyoAVWKoZoWevailXx4xmgpgBvUHLTAxShUVFUdN88SIUzRfd7bTbF5X94eO6v2zb9vH+Pvpu1cd6mgV8lrIfpsDtfJgIjYiJmFQPu9PFzfXzm2slQ8L7w+5u9/G3X/+m1Pr6/vt3b3/qVn6u9TRObewu+y6XOWl58erLaRjffffHMc1IOKe0+HbHaTAVAJ6zIcZxlDLXXEwQBSCV0rogtYrp5ba/vdyWaXjz/vHdx0NkqlV9+CnnJAib9bpp/VE0QevCuus2TdtpkFSS1FLL1Pg4Vd09vomX6/VFWKE7DvLwsPvw9hDJrfsIh70klE+DtYHZAuRawsKLeqP4i3sDEJahHz/hZZ6Ey7NpDgDpU5XV8okLGfuccDQ0WBRLAkA48zIAkZAWvYkW5AiCqS4lhQiICmjGRJ+6BM9+OTHvA9dc5wPV8XQ8Onez/uzPxlRgfiTNga1anctJqqASioFY267QYYXKpFfrVbHTzbYHSJuLFaQ6jwdiS/N88/w2dsEx1GEH8x3pPB2P8zik4VRK9iECBKs27ne5ZDORXNKUEhfvaEiPzFFyRlRlIcZSJBdRgHEuUknIEkJKGpABdM7GUEpFUJtn9QGWCgKcqucCxAqhTpCt1iqxpVXfpGmYCxqCDxWQcDogIzku44gAPsZm1dSSj+/e6zx028vLzUUTru9+PMTbC9fyD29OZUrgKjuwMs2HvA3N4Hk8pTfvdp9dbYbx3bv39zfPrkLVh7dvXr26uH3RHmaY30wlMa85371z3x7bzY3428f3/fbm2Y9//JuL28/jxRVKxprBR9esAImZQZSAYPlfhifz2i9ijT0FFwk/CZXn9Nkvrzl7y0yfLG3/VBqCp+LJX7QfOyfe9JPStNwGf8kzLl1ln0xJSBi8J4QyT0VLmqtUAUGzAGCEiEyqagiBndZCKAymVXQRwQj1STQV1bMlxYyQFQQBq2oWSkVSQW0cApiJqhEhCFqeUdm0Ai0CEzsXnQOpWpGWOJSqommVTIhSJkQCYWAk55U8LNLTAu7RUosW0QJUBEVqBdJUKo9r78mF5qJvVmsJDThvYFESWp1KUoLoEITqnDwamnhEUHXsUAmQAXBBMDlEI6wCRKyqVWuqYqoE5H3n2WHTqyapVc1qlSyWRAkRwJiIGJ1zgI1FlVxUAQ1AikpG74F0QU0TsmNPiEwO0dh59ojeg6HVKqXUquwEkVCqOnPOsffkPRoQC4EFF3jxDSkYkSk47wHPYFxkBmYkBlHUBWhSVaoqiJgaEQesymRNGwCZPSOSc0yIVqvUWnMRKaqGi18HFMy0FBNj5/rNum1izTNYVZGUEqgRsBMLjgmVWZedBDsyrQQAaEyApI6NeWkrIVr4SaCmIoimhuQFQIznKlOp0zykaTTVWm0uUkSrqi5GhKdwOxGpmS7EKDBCElMi+rV0epaKQghIxAQcWQUcOQMFkL5dTdNAhI6Yib3vQavW6omAQhOjSAWyKkLgPAZgAAbn/JKu9N6bEig1vmlj08bIzG0TzRBqJbJpHl0wQGDwCOxi8D5InV0IaZ7GIdVSitWUJnLISCVZzsJMaXTOsw8uhBBilFKKMDvvyJkosG8IDCW4QI7Zcxu62MY2tm3b5LoA1SA4X6ulOTkXEdGHAGpihmZKDhHJgydSYVDNc1YTYiZkFRdCJ3lmx03rgdX5iIQGCmbEzrdr5mygtdbxMIa2a5qmVnEQgqkIkXNgVqXG1iFi06xKqSrmuVVD5yI1pOMMKial7fp5nN69efP88y+ib7TmWqDd3GKZak0lCaD3cXP9/KvxNAzjw5b7Uryh05JW/arMKZdKjBzZXHfYnS6eXUTPYCaiPsSvv/3Ttz/+eLx7h5gdETPXkmo6kiOx0oeNd/1Yh5ymw3C6efbi8HA3z1Ma52KixVzEZr15+eLq8d19F9cpDZ4JDYGWJl/MJauYDzGExoDevL+vqq5qbIIPrqREAEZkIOxdbH0RkAIMJAqgSI4JiBFEK5g5BGKuYJ7dsso/XS+1Fi5ZtQKYgooqG4EaEJqhqkoVU1U9ny4sTRznPTjAV3/yeT0qEV3wtXd9G/r58fWPP/7h3e7Bol+78HLTPdtebDe9QYnRqZIq+v4asIJrXCTiMO/uDO8J0ZkwlOair76ICTkqJe+nsX/x299//pv5/eHuh8eXt3+qGsAARIiViECX9VcRFBGACcyBKigAB7UKBoikIiZiYogOQAAVfURlrVVFQAldY1YgQuxWdTpJOgWokgCmlMZZHJka5JpzoaaP4vNpHusdArQ3z4VJajU0Dm7dbvbHwaAJbcfU9k03HiDjtLp4Nhx2WUxRRetpmpFwKlO33RzGEbkph73mEVRjs1I9kQOByRuZGLkgJr5pVMAUgEhBzRDy7EIsw5GYm75tN1tin4aye7h/HO+7jj/74nd/8Zt/fXjzw+HxXsahYbe66K9ubj/c7VZrzjSD04/vvje3iv3qanvbrftp3A2nD2U+HA971/VWJw7CjnpxjXNMBil11NxcfoWkx/kh+zrMpyEPffRs3nMk8o6FcUYMUBHBN+3GVMtwsDo3XZOwE0EROaQj+m30XCs0rgMMSDzbDGiiGQF382PXWOtXJqmU4shnwDRMXf/s2dX1mN4jmNS5bcPD44fGd1UyAh2H6aJvuJovFnNVOT5M+3W/7jtvcEoD/ni4dw376E5DHsfiPJoXsOKYFFwgL8IO3TQfi2gxjW0vQy5zBixFTgyBiPJ87n4iomUvA2a2JFgNEKBIYvKIwMyROEsFgFpq1opopebgWkBkRFFRzbCsVHSOafEWgRkzKQA7riLVdJkUkCiXU+u3YlTJ4mp7//bu3Q8//sf/9L99/c3X33z9TfB6/9ObwzB/3B0PQ1KRMiXvyHl/PA0AqJ3/4ovbn35+uz9N3lFom+PjjLpft93V7UWI5AA+fHiYJtQqrQ/Pbi/Gw+H19z/Hzl/cPltHf7rfr5recv3uH/7x2fV12h2bzuUB9g8HVqDYG3qp0He9i9yE4BBykVUT7959HPcP337xJ2/fvln1vVp+eLwvpwIVu1U3HY+73WlzsW6ZrVbVernpJatNZXd8/Ie/+bv7+wdG6mIEBcxZK1xsnzlpcK7b4J5/8dWNu9xW/eHn7/ymd9FqKnkY7+9GzP1XL7aXm6ZvGzWcyhywz/P+3eNbUdMigZmbOB1O+bTfbtfIGNuw6n3MdRbhwCULIjYuzMOp9eHy4vrlTWTUJHJxuZFSPSO44GPz8P4h5ZG8u9pcPdw/pFRaFyLG4/7RSiKI05QdhYeHx3Gc+665ubz86f1Pt9vrOuXxNDJZt2qTYip23D26wOQYzBTZENEHD1xUxzmlKn3fNT448p4RmXNKbeM9oUn1sU8px8hVKc0TgWmt43hkIq0l5dm5CAS1JERS8o5QBWLTGo1tXFlFwAa4VaySU8nqvYMp5Rke336IfVdyPrWfDs/OUy4jOSRRUwUBZULTZUcFeEYaPQW/EBfgqcJCgz7vEIlwqXYFs08RnU+wFgMzNWL8xKVeGpVNFWDpncEFIKymqkZPAzo9fS58Kg8/j/JPjKKnYQsACIDO2Ngn1AcCAdqv8xgICCC6HM8/AZWWfChBBhyLIlhVazznuiC6lcF6j3mcdqk4v+WDRA7jMTU3a6IYuvb9/r7pO+R4PJ2q2Jynjx/urVQCeHzztgN1TChqVR1z0zSqVkpdBd84QoAsCoRGYGIiQgCZ5jEBAQhYt+4utpvHUypp/rh/ZB8JKQTHjmo1VFPHQ1Ei3vigmogJTVGqDqd18JLr1c3l/W6qWeCC1PtN3z++f+z6nqD1we/u9/xI7Dn2rSRFr+wxlRyb5mLd53nA2I27/TBMX7z6qsz6w/c/HHcPFxdXh8eH/f10e/ns6nK9+3DvXXvRXcA8P/z0Yx7Hy4v+OMyMyM4xc3TOiEO/SbmSVqsFGSFTqQqlKsJUq5XSN4GQX9/vJGUwi96tGiKENE+EWAU+nh5Fpb9arzY+4JR30zAd2n617VfjdHgsxbt0ub3wtaRTqlBCoIvNJYs8Pu6LyKzzxXbzuJ/8UwPachWo2nlsQAM8k9/1aTEvlJknHjXYU6poAU5/Wu/nTNAy2P/CgrGz/wOf0mxkZAhwlquepFTAZStJbAt/FRENEOgJ7WVnXUvREEyKysQkJUkXb2N/XbkpcsR81HmoWsbxpJqNzDFXKH3bbFf9dx8+Hqdx1bZzOb14tmmd34+Hzc2zeZ5MIca4Cm3ftlpzGt9BPrIdLE2nh/en/Z4pIvtaGAGGx8c0H4sKkqWhnE5z8OxJnHegMueqkrFBqk6SpLkighQ1I8c85YkNazYDJcVSpQghaCGQLGrQMGIxY8CG4fwURsTg1I2FhuSmSlVkqnPX6+XK55S8a9FSBQSDLNbF1bq5LMf9bv+OVzc3z181r169f/N67evVhZ14DhFU8otnl4fx5Ey+uIg/1bKf0ufUrLvu8ZjyOG827eHj/WPch2erF7frfOK7x/vm5iJNk83Jb6LzDcd+muHm+pvjw6672PWXt1Cza1vJhMBmwsxLkvYXw9knzegsEi06usKTofJJSLJf5dDs/MJz1f2nPwX7xCE6+9eWe92ndNvZrbl8ITu7Mn9lenu6AAhJTEoxkTrPsyqe/SMAxooAgMpMQM70XMLHgAs+uIqc3ZoqBiCqDMuzwxaDASIBYlUpIlVYFQwIgQFRtCKgEUitqFTOF9oCAicidkzGnElQTLSo1Cpn6JyhkQMKXqhhjmTI6MgIjdSqmlbTolxrVUNTAYDsoGmipROvehc49B0Te02gqeSgZkVVVJFIIMfK6J1D4hBc5KoG7Gx5/J0PKtjAjEDE7HzjACUwxtC0qs7DIl1JEHW5iKiKGJKaEZj3jtjAYalSZWlGETXFM9QIaZHoiAGBHLvA6Bi9N8NFdnZeSDIiKAB5IkT2wYCQkJ1nQvIBnF98w3COLy72HAEwIAJiPT+LRaqISCkiomCsJgrO0AGDIyBGXsQbAlXNIiWVlBMCMrsn3pqiiI/xLG1GJ3OS5GoaJJudSxLYOSAiJsf4dFOEZQ9NCOQcsvPkHLAz9oIIwLikwkzRCJDApComxanqOM8lTUTGTAZ4xpeY2pJkX74TUzhnMJegDqkqwbl645Pset4Y8dLewcrMYJhT8o7B1VM+dLHVXNFgnEvLIboGrHpPrmFHRoGJgKRzBGKCCEbLUoUQuI0xRr/uu67pPDgSbWKDgilndqFtV4yMDADVs0OmWqzmErzXKm276lbXIpLmqZRJTU0kTVkVwCAnybnEymWakyPnuGlaK8WInY+lJKkpRFaw0F70m22Mq0DIhEXMh8gcyLk8jcTs0HHbELoquWr2PojW2DWEqFaVjEOX0xSbBsCklpwz+8aHwAyaZemoA2QXGgCPJgozsonBans1Ho8gGLw3cVIKozlCQkdIokrES6uiJ5dlBh9iuwUiE6wlt6u+5FkrxtimPY+ncTyO26ubrg+iPCW9vLo6Hu4Z1TexFPDtxe1nX8DPc9vw4ZhqLd43hM4FS6m60MlcjU0JUOduTe12ezzmeTx1q+75F7f92n//h79FUGWaa/WaZJj71eWUD9pWAyh5rIe3FJ/1V9ePb18DmY8wHNJh9zHV3G1v/EVoLy8U5/2YsQgizWkiDIrousaIawGd1alvWpaaSq1VCqr6JrB37ByyUwMH5ENEslwKIIrUMuWStdTKIoCYUpJaDdDMiKKZmFEVCcuuQcRUTKrWrKgKHhVVQaqoqImqVDUBFSZGwk+uonfvBx3k/v7tar2qoqB4FSgE9/WXN1OWjz+9/8ObP/wEw9efv2wdry82sVmXKe3u3oYYY9eydwrUuJZ6V1S7VUBbPU47JH9x8wyo7B+G6e2dTX+z+fY3/faS26taOag6NilJU1FFK5UZkY3IABnVTLOBLR7Is8QlGZfLgAIhmoBqBSAQAmOmCEaSBQAZvVqhdm2h4dU2P35oqMcy1bQ3K4CQRLjWQLxeNXPKpzc/Bk/981eputi4aZzBct/GechN37DjeTy59aqLrg7H7VU4HN7H2JY6EK1EJHKIoR1qLeKz+K9urnfvjt12hY0LXbveXui4M1M19BQM2ByjEhE7pCKnND7IHK0qu5AG1276dn253lxsL0LTrkraPf704R/+9ocvPn/2xW+/EJ3AQkr5YTc03XYY89XtFTbQdBdVMjDvTu+4XV0/+6zfXJ3G7IajcankzMo8j6qlbTovDii+unqZc69WDlUFMktZN9FqKXn27BDRcuFqXXe9Dt1x2gtYLRMjiI0mLktB7hTcxeW3j8dhnN8h1TlNwfcMa+/buYwAhbxPVoa6c4bBs1kSM0WD4A6ndxxvV90ql6FMU101jizGYA7nVFx05CnGZp5HZldKabpVHpOLssvjtrnOKWNhq1WNKlBkQDg2WIPvhlNx3AXsIWcSq7UIindRRAnnzl8ccs5aSrYYzs+CUgqyZ+dqSWZA6EyVCBkYjRbKWAUMMeZaswqIMjtPrZlqNWBHzhF5MC2lEhMiOHJmCAbe+aIy5xRjZESRzESiQhSICIhKGhGVPLYX/X//b//t4XB8/eb1x/cfdZ5D52vN5JQQqrMpz3UaFzT2/ePpYTe0TfPs5fPj43BxvX3x5SaQm+fZiB7uD97b6qJv+gtPggauaT//9jd52EfTd999/+Vvv3l2fTUOU2w7PcrDw67t2pffvCol37/7cA0FmuO64zTmaZqunl0dhsPVaps07w/3u8Ohjc3du7fzcSDB4zh98dXzD7uDFeDpMA3j5eWz425QknXfEEJsaS7j658+PH68M5OL9QUxPn64X627x7vHqU6bq+sPp9GTO07p4+nvOfP/6f/y58//uPp3f/W/1JS0IsUOQCx4czH27Wk8tW13vdkchuM4HVvw7KJSyjJpHltHzz977ht1rfuHf3xHUBuCcaoNMTGUNFREAY79VdNti8qcxtPusN1cHPfH9dXGOzcMpyKFjIg8GkxDChT61WYekqPar/vAnFOyoioWOF5vtnfv3/UuRI6CMh5PznsjfDwOjKjgiFjEQghsdBqGKgoO5zq3bf/Nq+cECCjDYaoVaxKtFZnmkhy7w/4EgMRcqqoKMZkhEenic3BuzLOq9CEiUgwXVF0yAYmRN3k6mRQkK3NlpyF6ZPMoeRqc57bxaqOPeBqPTwMCqsJSSQaIRKCijp2pMPEynJoCIck5O7ZMLYoIFRSXoJkBIKqhmYo9JSrMmGlpNxNdPEhIyzYOzmk1j0iEokaIqmoAakZEgIKf8L9nwwaQLafWZ/XpbA5aTiHtafD6xABe7CDnmNqnmcoMzrkzQjgzC2xBFIMgzqrZLAt4osY5ZuocKyKLrWL86sXNu7cfb663w1Tz68PG4ZdfXfoQQwgQyFEIm8ushMYpy243iqACvXn/BlQ8omOUWrsY0URREWEVfOu5Vj0MU65Agc2slroK7i++evXjw/2Y85wEPGtWd5qvt5sXz/5EHf7D99+9e/PRO5ZqqBoRUxViTqUkRjFrYwNenaN5nDevrlLKxeDZl183rsuZaBieXV1uL7sx7br2or/s73cfSVJAfX578Y8/nrbri1xgdzq9ur1dtet3d4/f/Nln035cd17r+OOPr9++/unVzUvw8sPbH15ev9ps11W12ebrvp+m4cPbN2UuTbvd7x5KBXCenG+aponB9+sEdHvTjQ/3BPV4yNN+KkUBzZMLDlfrdRF5PJ7MoHEuBGemczECc+x8YG9UlOaKx8FK2V1t2xgjeL8/PJS+miIKOsIE0q4uNKeaDvuPH7fZ2q7dwmYaxvc/39X1hj1/YrwsffbLHA5mC2XlqarqV+P32YfxpFbaJ7vbMmjoWSGys86z6Jl2XnjnL3V2ezzBhRc+19m+hOAMl8o1AGMkOp/vodrZN4KAS18Kk6s1RcRcbf7/c/Vnz5IcaZYn9i262OLud40FSORWmZVV3dMyJIdCoQhfKHzhA/9oLjIjMiNCzlQP2dPVtWQmMrEEEHE338xMVb+FD+Y3gJorgggA4e73QqCmZt/Rc35nLhBv4/3fHOUxRkiJD08HqVMrMzkwB61W2xLCDlqtZUKHHOJ0nsfhCzeotSXH8zw3abu46Tc7oqBlsvOxizKfXvYPD/N5CiE4JucEQKfD4+n0IqU0g7rMon5cfFDsWJJ6k4KI6AqliM3WRATcWhVTcKuNgJqRFWUGNJCqxbAP2MBdSYxBPbGBm88StKrOzNhvtiHA4XB+Oi7Vg4o50FW1TFDahO4h5mwdR2oqR/UxD6jRZZqfPh6hDZu7N2/uwicuPcg0L6VuxtSFOC2wTPP9zXbfhk/Hw4/70/3d9jhLmcsUnPt+af78eNzcDTdvh4fjAxI5hU/fP9//oltEbr/86uOf//j7f/f3//LHf7xTXfVqg0B8ERgv4av1hB0v0uCrbI0rUB0uKKzPvWb/Jkr2cyEIf9oTPxvYfhKJXg1wr4V6P5eDXr/1GoFzd1hhvq/2pdaqFa9NkdCUkINaIMQYw+o0BTd04JBEBZsBJHVbKwKA2MzWinsAWlc1gTnCSui69K0RinsRr806vuzCFw+VgQOIKhKD63ohEQCBRUQIBBqKihOo2zpXKlxgkd4UCIwsiIFocGanAFylgZOZKIC6m5iTzOcZw2HgDDF15KmjbrjOYUO8ayqi5rWpNK3KRAJEimnoKEZw4BQMCAFQNDF6ABITEUYiZiavrYm5ugoEQo95iDGxg9QSzFNnaiqluoq4Ihp5CyEEztFVmrrDpZdIa+QQGZkZLqArWI/UcS05Qow5C6plM7Baiplf3DeAITADIIQQQ4gdUHZkWPnNxKuT16QSOiCvd1F3e82cObiDqoiYgTkip8wJyQlhPUNqKrU1aQ4Yui4yR44BwaWZSUOMOadaFgxJlnn2E6jogmbGDoEC4brFKmHgQKsZk2MCl0hExEyAIWIIFDvkiK7k6qIOBsjuDOiOVJoUtariaoHYyBFR3cVsNaGBrWFeNAQAisTivj7WiCmvFi+ztQTt30hFiNwPvWkJIdciCrwZ34i+OIFXUZXUJUwJgShAZApchj4TuViLFFJkVxXHEJgiBUYHTzl0fdoMuc+ZEDkFzqmZkik4oGHgEMaxlMUNbLVl23pv4E23nZYZAIlizD6MA4VgpoDuRDoXac1EAI1B1849bEvE4AZAEnPKOXKA0A1huOqGDQPGSGAmDt0wIkApJXXZwJE6Ql+7figEiiFydBVmVg0i5OAcAUncBBByiIARyAMy8AAoqsqAQGsAqhADUnRlAw5db2ISnPrslgKigmpTl1WnCLU1A8FoETJScMI8ZGmWh0FJ7SBq7kRx3MQuPz9/pMDj1X0adgVmhxYiN1EAwRDNIfXb8ep+Ph37vhF3x3OblhO4bHY356W8ffPl89PXIXVA1ppmq++/+nL/+DAdDgRwc3/L9O//8i//XMoxpsRqQCQqKeSX/cP26j730Q2ODz/6jWxurs+HT58+fj8Mu1oNTkdMXb4eN+/vHp++67pU29nNuxBDjMgxZMY0tEWm88JupDORxsgYg3sSDA6xGaEzgDKRq6NodKxN3I05NBZvuOLrQoyGWEWJL/WobEzrAwOQOzZRUnNfL3EDRxW9nDKsfhz3nx5WXu8fh+M5Qfz13/xW3ZT4+nr38t2fq7pPRzjL73/xfnv9t9vNgN4Sg7WpzTP36eqLL2o91nqQ037c3iOJG9XzEzqgG5qYeVNorYzbcRw34Mvpmz9B/6a7LUJ8nA5X73+jCDlHioQxeS1yPhNU1SUQuDuF4ADuihTMDdFVGlIEQnMDIoTukjpg5ghakSN5rW2ZOLE5kJFDH7v3alssR21SlxcPgVJUoKUsqY+5I7b48uNfq1p//VUY87Ddqhmp9Rs8nY4hxtiTIvbj7uiqi2Aa1QygU8i5CwihthqAOSUZu3073dxsjy9PnCgNQyXr8iCnc847kzM4zFoBpGc2FyQIsSdmj9BEQaudvNVKGDhEU0sB8/bqb+9u90/ff3P6Ybza3v/yN3EYDfNwc3sT+nmaptOePJCd2jJHwflwAkcatte37/j4/PDwjQE4JgKt5z02jtQb7x5bvB6vop55cfTGqBhTAzRrihxDdOn7PEboWi3gcC6TS9304ybuqoj6skzL9fYL9BT0lHiQ4A25tLOVU7K+xxikzMseA4pAXay7vt/1N1M9VVsaNKeuuoxX7/z5uyZqTsFzdALy0/kBw5ho56D70ycPbJ546Jf50GHnLdQNDG/vI3Jd9l7nNFJpU5MSKZATx81SRe08cqKOaPFMNB33AYODzuVQpSxVQaEbNpd7Aa3PS4bE+PrEJNKIOHBEF1Uz83NtikgUwaS1ltJakupA5Agco2t1BaagWqvpmhUFRXEHZjEDQOZg2kSMmaYym2mfE3HA3eCmf/3Xbx3afHg5HWdpi54PdZpiZgNo2uZSHWm73ZA5IN6/uXOgzc39L3/Zn/cPDlrnxR0Oh4lMzVDBu47HGBPj8STnp09DZCD67d/97fFwKOepVtve3N++/8WP3/15enjAGIbdiATT6Tg9tZurTW3SRFUwUjw878tS7m7eTXWpFZ9l7nKclglM/uWf/9wcc97Q0tD0u+//eq5wc3u1u+37RP/yT3/GtoSEoTNy1Houarvrsd/GeNtf31z95a8fN8P4q7/7++H48t0Pn05n/5dvLI7b//P/5f/0n/7n/+nT4yFsxvu796dPH5+Oana6v+uBw7c/PAqCaAvDtomCsgpthv7NdVenk6m/7H2qYdhubr2+7GdzRZHzSTGG3O2I86dPe0Doh27Y7sS8G0eptSwiruLiCHdX99IqB4jMkaCAHuejMZj7dJim85L6PnZ9MzdHcHo5HmqtlIID7/cHMY9jDoBS2lJaSIKgogKGOXc3V293u+uXp6fD8Zgimmtt6obSTN1FpUshEFNg9wYAIotVqwZMBuitFgM3M3BfpIE76zM4AYY2T4dZr7bbkCMCLlKIoVpTNSDIw+Z8fga0yPTy8PSzI2UnWmmXDgCmBu5mSoTaNDAxgiGYOyHJSoIEXuGGgVnXRDSh+uULEeiS3/HLU+fKjgZcUULmvl57CA5ubiurbqUdIawRsc98l89FPv46XPmlEuRyEu+vlqK1RBfhc6TCPg9O8PrnK53B3F+5wpehHWnlslb3xd0Dg3lyS8QMXqW+2/Vj5vNxT2akvu1DQhpTjCk70jK3MYGDozUX+/Hjp9Nx704ch+f9D2bWxxABWmshkKKLamRmQkRoqiJKxBSwmDugeniu8v3L8+31+M3DPsau73MK6fh0aqYPT4+3d9djN+x2t/PxsLvKhF5P8xCoG4e6zKaVKJgJI7pjLdLmmmN3db3Zn8tNf9MN43eP33755k2XNyKnVhqZX43XYxf/8t3/8v797Z//8tcAXKRYpF/+zS//8R//8+7mzfRUWq39ED9++HA4Pf3hv/rdy4fHr//4ze9+/Ye37744nfbtdHz79u7l4fHl+ckjXn/xtjUUULUmagQoTR72x06sqX364TtSq9MJzPouR1Zy2w2bpdUipoYppwDIgNOyDH3uup7AS62lAKI7e9d3VbSV+vB0AMKre+jHXV2O3G03/aaWWaclbXYGNQ8jo0tbnp4eE8XtdvvurX27P8Rh94tfvIf/7nV0X7/WsQUviR9bK89+yjmCrz32r9lMXzuNf/K1+eeV5pfl99kQchnlXyf9tdr6pzjQmkO7WPb8kkdDt5/sTLgy8S5PQNJaBKUQMQ+8vc3jr6zP9vjXjo0DAeDYXU1Pz+j91Xjz3f770I39sDnsD2LW5T7Frs9h01/N5+VmcwNL8VY2my4PY0jdskxuNdqynKb9p4fD0yOmTBRCGprS+ellOZ9F1AGmc1mmYkBLgeV8vt1xLc0ViSmhm0zMsQghhialijtBU50dq5BVZHcCaBaODQFgTGBqcyNiCE5k6IYIhu5dl1TsUPXTcVqqx74rVaeCixvb+WbXscpmpxBl2pe4uVOgQ9PE3O9u6tPjy9On07lst/fb7ZUwBvY/f/dhPzeLNHR9q+14Om1z1DY+7cvYU+qxlmU5akohjf10PnNaNtv3t3fXx/3jZnwjbarHQ9xe1xRV/Hk6jndv1LwZRYhWjRE50Nqw+Lpn/czDczH2mL+qPI4/UxRf/USwipWf1UjwFT+8KgeXt+Lryro4MV8X1uvQC+C+xt+QwM3xtTr81Se6vlCaqEETSSlxjMDszilGJgA0IjIT8JXghWoNDA2AkRRc3GztogF2oJVys65cWxm9CMyERAbQTKt4bcABE5I5gtsa0EQ3IAdbydSgKoGwEXAMapYgCSoaqRiYm7qtTlMn9ZmhqSuYuYmauCmtnhkEWVUpk9YEEeB0NgqGsN4QsSFst6nfcNqkyKbVTOq5eCvWqhMbMsbA/bBOYuweAYDcAJKDSpNaixsBBuZm5sBmrkDsCBSJuMt9VBMR0wY5m2kptZkBoAMhh0QxBFdVdTBTUzeV9TCDkBF5XTFmzubgwBwAgdeoD0SVBpDEfe0M41UyAuCUIEaI8VW/AyACNwdbDYwAl6pSUzVVUTEzNWuryWntZ6SVTQRmpqtZEok4xj4HZmSi9aTZgURdLaKB11AXnssyzcqpIGHR6Gx6DqyBwNWJEhMyo7txCE7IlNiNmDkwhsCpc2IEB/fWqrurLRwGQF13RnEQBI6xYw8WYa7Ai5pLswsDcUUrUlyfR3TNn1/YXbAejhECrVv4z6WiGNlM3NVByFVbOc2Pu5HmZRlSH2IKHWOkGPD6OqdBrq43opUZUt7IUpB5/XgEDoG7Psc+DdsxD8Ow6UMIues4BEYDFSIyUSnV3FpdUkcxdK2oSA1o6rZM52V6JiCxIzEbrBgAVnMHiYEYqUsh9Blg5UEaIJnJKsWGQP3QuQkTjbtbituU2OoypLFZW23bJi1wDDFN86HPW2Ka53MfO3A1NwRkzggUIsaQDUB1cm2lnEK4YDuIAMQDrxxy4BgdiBgp5Nwld2gQAEOMoerS5n2/uepstDrnFN0EUweOHCPHxKAoU8p9U3Q1Qk4pulLf9V4KOgVGTZDDrj18nOepHws2ITAEyDke93tCUpPx9u1k5eb+fYxMT7TZbubzD4JpWaarnZ+klVqG1JmD1DkP23aelzx/8Zvffv/tN7LU4+EYh+EP//X/4X/5//2/y3LOIRklkdqqX22uVE8587JgECuHI9/e9dv749NcilDozVFORxQltuFqmzGJmxYJgBzYQHUucjhRoJvbYRiG1uzm/s2791+G8Tp3Q6vL6fAiquV8dl1qmeu8lFpFBIlRvTU1MXA3MZEVZQiB12SMM4fVj2hqKqZq4Cuq08kdTBEDAYlUdwfX9bTqtaxG3S+66f/8P/7D7dXVD0Ofx1FdSAXK6e7+XRf0Kndj8i5IqzWk6HGAtM27gdCJLbk6EYCRiLwcbHka7t+305OjXN9tnx8mhGCqiNEArNYyTcun41uyHOJmd821dH2PAUxNxIxivL43c1JzLaCLQwMXMAIwdFszvWaCQJ8jBgCAHB1AXTFfuSPikuLGdYbliE21NcAcczSLHCfCEoMPnUhtFKm1ORClnKDi/vsfQbsNEuaQ+wGau/m4GVqtu6u+VDVO/e5+gZAwQ5moCYXByVZcamBiksh6Ph37bVrqPF7v7u+33377wLzhsSfuLHSqDbUQqTkihchIHWNgMSNVMGEKIlPggC4A6Mafvvnz7s3t3dvbNNzuj9PDh0+72ztMsOyP/S5f37+5uv/i5eHZfOpGeH7+7mWavvv2T44QNzsNeHV99fFQno/7TB7ChmOHkV18wuPj91+DzbubEYvUdiBKRiSqIXXnIpxHDhlMy3QyKzHmvBmgtdN8ynEXvUshHOfz1Q7eXt2dz7Wc9ilGMFSCPo6vWxh0MYm0rttl77FhWyZB2Y2bSfF8PiNkhzGniI6c0vF85BTbgiHh036+u77Z7H691BYCI4R+2InJOOzQkis/vnyoclIqNMQqs7Q5db1Nz0M3iBu6pDE9v3wyo0Cx+SKY5vkxYiplSvHGiO2nYwNstQEbc0BCJBKREBIiNJ18nd5cA3pgzpz6cXNeDqJq6imQuqgCEscYVq3/kqWnsCZxckilFnN1E+YUKAIKIYsJIOcQzqfy8eGpzEsX4nizDb2Fu+Hhh/2Hv/ygxc/zXEylytoDezxOqBZj+PjxIVD4/tsfurHf9F3XZSK8fXsLga34ps+BfJmWw7nsxvH9u6uVrX0+vtRF799c7Z/2XYfz8cNUcLhKtsDzw+Ow6fJIOs/LeaqbVEVTt1FpIIoEzdvz4QnMuq7rA55Oh6pi6h8/PV5d35Vp6bv09Pgkqu/fv//Nl9c/fPjw9cszurNbWdp0OqcUHECqpevrAKRq337zY8DNYb/8v/7b//bf/f1Xf/Or29PBDuf2n//Tn66v81e//J3Tt//wz/96Pi7bbvhwWCJt9WPp0owctttu7JgAT2UWgfurm24kk/N2122H/P/5xx++eShP0+JmqgqEGLipBojLch5zrwpFRMhbnZHCbtypyvF4RMBabbO7IWdHWhbZvr36uH+4Gm6Oz3tO4TTPZlTUNn3ejB2jVrMAdD4dAQFjcJV5Xrp+g4i1zKy+2XbIcVmmPg2bfhiH4fnw8s2f/yxNQp+baq0l5YREqUulKCu5tyoV2oqlsBgJHYKRO4h4jJ2CIIFbrbWAabNCQMSMntT8NGvuYohXwNHZ0Q3Ql1KbegzDPNXj4dlaa6/ELlit2eDmpgaAwESyGiSILrmsy2y7ZmycLk33l8HnldXq1VZOkAEAEaz+o88goVfIin2mTdPr9EOI5o6vBCRDcAB+TZkBADi9nsHDpcHs8zPumkiDy3T+k8Pjp0n+gpL5afZ6bTf/HGRzQHVTXdHXAGtchKjrIkpNHBl9SFjFu11K0e/uxnKe397fSMDzUr5889XzseRxiDE+HZ/Uaojh5Xh8enwq51NmIgMAHXPgGEyaQwicwLVKO7cGBoSgDk1NQPsYhdI/fnx5e5pSimAm0wRJUkJry7jbnfbnZqft1dW4DaWJLXVzt6NAzKnLGUz6YXOaDqYeuk45tVJN3Id0dz1+//L9//Hv/69HKC/7w9t3X+x49/I0AUVCWKRstpvUYQqrg0232+tlOf348vDlr35/vek15o8fv395OnSxPx7q1x8+fPXFL662V8fzeVmWPqanT4+n0+H27g11qVX98M230zyrma3lv4FvtpuQVzgI1qUu3SB1cZFAiq0dzqdmoM3W/qQxYmAaumhgz/MUHHIM3dAj+7lW87oZczCcpqkoHl72cNpfX70t9UWWwzCM+6cjVbnaDOq4VMkJbnab4/N0OB6BEUrTtn8On2OY6ypCAsfXxJkDXLDuK/ACLhqoE5q/+o8uoX/AlX71Kiq9llvhGvlBB7sop68S6HrW7RfJifxnBehrLSGuJr7P7rrVzOev/iYkQnQ0SAoxDnfd2/elHIYwhHY6LgdOoS1za6eA98fT7G45j2r1WM6iwlAJlzfvfrXZ3jx9/LbfbQCqtdM4jFebnhBrKZusddq/PD6dnw8hbqpj6q/B+fjyUA6Tqs9VW2tT9anwZPhw0gjQwCOCm3cJEpi7pSCNOjd3Y1cD4mLw3AiA1SJITRyqcAUspqbY1NT8DKTuY8pWatYaO54USvWHc5kWGHK/aHwsWCVa48NS1hapeiwhECXz5SFfxSbelkkg7G5255eX4+EJ1DbDaPXx3V0nevPtx6fzfN5td7c39vDwqY8owdriXnXb49M85zBaqyCtqZ0O+6t0dbftnh/a8vJy9ys0mQi24/ZKitlpubranJ4eru9/RZwBwqrFIBjiWsyI6Paq4fhFTl+Pq3BdAg70OQiDl/XzujrX3c/93/z7Vy2dfgY2uiyWz1Gzix8JEYDhEujFi8vpotu/fhqzmYQYcW3fCNEBkBjRAxFRNCJwvNSYeTKtHIJJBVFwNQNcITKI5OwXEyohvk7tYOiIGMyxOTWPwR3VAwTAtfoYL62OKgQqWt1EzRwMEClEwAAOYAjQzJXc0NFUEJQcmi6ytl6Z4aq9EqEouUfiYu7EwNwcUAXOp9SxB9dI4FCq2GBh6Hg3dF0HIfS9Sy22LAwGOYZx9Bhj15tBBCK6pLgU3NsiZSlzrK3VVuOqoRKAG4GrClEg7jhxAkARk0VVumRiKiLg4I4BAydSs2aOHNZyCXgl9iEgExEguqMbI64KByJHFZUQOCCaiaQUAoeIDODIATEgR6AIKx587X91BSvuBmDgQM62Vp2p2SWOoutpDDEzESMToqOjgYG5UiCMmZwYOVyOWc1dlSJ7XLEOATgx1xByQqZAOWc/n9rxSctJpTKn9ZApxLjekJEcXPhSeZY4Jw+RiN1UpLqLAQBGEXcXQ0BijIk5EjOBsWk1ByR1b2JmAIDmF+ibmhGhugDgBRaOhGv4jMBM/9euolYt5mACRh5z2CAD4byICAo5EYBY6lIX+WrTbe/iF7/eKVlK2PUBTc09xhA4MAbiEFLMXQ4xKgWiQCs+UdWbAphrbctU5vN6ndYqdWrslLsIYICshm7QRNgFFBigTCXmREytVols7tN0Tt0mpf6CkuKQcrBWVjoOmKU0pJQ4RAIkg83VLYd0fP6hH69C6qsBh2TEw3jFTuoSQ0YAYIgcAImBYuwcDBxKWQg6iBxTNHOTZgiErE1zzGuCETmoQciZAgPGEANGAkdyFHW+nCSaO3JIwya3Kk4UuwhIataapS4QIKeMGJkj5y70GVBwngLT/uEk6F0/LvPy8vhw+8UuptzKuco87q5MgSjWZUEPeXNVypQGnWrpuyxBMIzqut1eD/3u5eVTt00EMGy76aDtfF7O83Z7d/aztpf9x0/377/89//Vf/PtX/7y6YevGYlC0DZpdWIobUndFkO3FF0Ox9Rtrq/f7PcvDz8ehn64f9fDvGx2V4dhu+zrdtidZV+nsgKGXBREAmJywFre3b0bBt4/fHz8+i9lXgJCJEuxSykOY9dtxnhz7WhNyvlcn54PGASTRw3n0xQgSmt4ITmsBYLxVTBxUzEzM3VTlUarsoImYqKqarYiynwt5GEmdru8ebvdDNudSstd3IzbNs3j/X2M8P7N7a/efoG1cgqlqQMjYitza1PMGXDNu0YjIiDYbEKKZMlbQYOwiRvvTHy8uirVHKC7GVy1J05J7fDj8/EY8xUNfdhcU3eddm94HEs7uxM5gVZwQ04AyVDB3bUCmOsCCKjipoDACGCy0qHMDaECEBN4TOSBQ27n55jcq7l52GSELSVr0zFItaE3s+U4ARJTGHe7TdpM83R6+JFy3N7d93lTpcWYQR3Uc4xqFMZRi7pZjtENnANqAZdx2Dy/HB2NEhOn7ea6nH3oWFu52+wCqtaiTdQ0pQ4hLtNJXKXU0AfEiMhdF9dzcTGwBo6oBNoamMc+PX16rHX5/X949+VXv5ktqGoCbE8vaNj1kIfd+y/fvvz4I9Z5l3e545vrq48fPzw/7V1rrTOJXndXoueAalbVoZodz8oRQHE/ndVr4t7EY9ymcdNaIwSVqUGdysymRFqmss1vCIsH5pCq1rXbwVxAbUxDDG8gdU8vR4Fm7mJarYlJtBhjDx4hZatLszNFra2gZDNRN0KPMbotiy4pd6Jte3V9midzFzDuuuvd/fnwVOYDmqTUWzvPpZxeZK4HQ8HgiM3ROEdPUcTPuhexLoSn0ywMVes25nI6eejGoZOqQ9eDk7q6tNcJGYnQwE0bIiIQuAGG9d4DhKbWaqMYpMlcXwL1YxfP57ZIU0yGoFIIWTkxs0hdszMhJNNGhA4WAquJM1NgkQrMBrrU4shmXqqmzc3V23g1pg9//aZ+13oOgaHvw1monqW2usYYiLDLiYH6oTfRcdv3Q08hbTbj9mrXJTwdTrlPY9+xLH3Xb253w8B1acfjkdF3V9dfvPtqv9/vH0+M1PdZW3neP9B2ez4vobs5H09SBawNYzedTsf9dHP/ps6T1YqMTeTx8eV8rv0mTaLTXA7HYyIKiKeXB2l2ZHD0vh9SxK//8ueXh+c1fl5e9kAtRkx5XO+SOs3PZRm3Y5vKze0VAjTs9yelDN12Z6T/9X/Y/Jf/8p8/nva//+VvtM4fPnyaUa/evOneXac2p2RIGJgOp31EQLfM7LJMJ7+/7kOgb797+u7p/O3z7GR1ObUmMcfaqhtwEEpp//KgRt04WpmllK6jjvHj4ZQjStUUuje3d0Y+nSqInfbHcdOBwvn0PGx2aibmFNPtpnfw7394THkrtZg5M7lpDCFshr7vYsDixhmriJtH5BjiNC3Ph6NIJfOYE3JIlLrcdblbnTeJBFzUq3rT1kpRYl5VjpxTLRUpEJI1DZk5UABuKhACAszTOSceu06s1ioKnWoJzXNKMfQIZoQh0LQU4OCmXRrg334R0QpCYSIwA3cEMrMLVujVl+qv2hA4mF2mVjd3NUJ6ZUw4XnI0znxBZYP75bkMwVbUClxoQeuAAeDol0Lx9fqk1+QFXExJr84gxNfK29ccxsVShJ+JrQBg4KvB/Oe41/VM8fUw/fO5vRMhAblaRPTVRUVg0jqAmJgc53MR80xMkU7znGM4LFMfw/3NGzFhJld/fno+vezBrZxP08uTTtNANAREU3JCAjIdmB2hgak0UIvEBr7JMTXt1AyhNqlCSOGw6NDFTZdrbcvSAKBLaZrmq93uy7ubrh+e9sfjUrn3QHI+zy/HE4XoRFUJMc7l1HVD18XNdnCHsiy//O1vD9VrlS/u3n78+O3VzV2XEnVcqxHBcX/ut1en8okBwC2gjYj/+T/9f7d3X2zv3h6eD3/65l85gjv+5te//9Nfvv3d7/7uajs87x9CCgMkmaYQ8Ze/+dVhPz89PH/8/vs6n7uErfnxPJtBAaitwjR3w9CN2zTkYTOcj+c2n8+HvauBG5gxeCR2wmJWRBw9MQcKCi6Ecykx8BCTiLV5UrNIkGNQwFa0Ho7GwTVC7Daxa3OpOeVxk3topw/P53MOXep4Wcr1rn8+zc+fHn+6APDzRI0Avio75hdhRj+vIPS1ZNbgs1cOAGAtu3xFYaymt5+7iMABDMzgNXHz2Qt3+evfKKOXPyXANSO0ttjCq0Pk8w8MLNBRf88RGwZ3JUITlSIUegw1jJvSaDodKOYu9Y8vDy+H09WwmRe7u+mvrvtzPUMwg7LMJTKFQMSiegJv1upyfto/fwrIpWocdiBwfPko08SMIl6qng7tWOx5wb9MXpmvo9fZkqpX2HV2nSAAaDKIUNS9wVIdTIpdmCyohg4qZitqU3xuK1zcHLyaRzECNLKzSD22x0kd2Sl/P8PToZwlqNB78a+2sc4HCeCNnlVubrdNT45PsbuNqSutzXXpNldGx8cfv/Wbt9fX28P++as3uzrXx/10OJxub7q+y1KWoUsfZz+c/WqMzF1FIqK5LDGgN59f9u4WqRPAp0+fvvybhg7ddrO9u07Dbei6eflUzmfub5CDigckcLCfs9Bfuec/+YcuEbXXFfizL//p98sOfHk1/Js3f0axXXxD6+4Jr6Y2uMiLP8Vw8dVD8Tm1BgAAIWXgqCvFDTwwExEihYt+SaviJaIECB6ZkwcBYiIntLUIFsjAVvj1KqU6mBGF9Qc1EzWkEJGD0poVUkJAgkCMvBpc1UzNxc2lzaauqgjBnBxWeYHWHyes5Hcg04vNpOFrutkAAAOzI5qJOTKjqiGgqpRSwf30DKgGji447iiGCGTVFh867roQcwjB8rBWpE9ViBhLA44GnkMgRo6RACgnyyl1sSxLKWVZFlEzcDEQFXRUdUcKmNa0MmPnKiq6tlQ3FQc3B+KAbgxqsEo0BCuWDOGSCPE1cGe04i9XjlFKQVuIEV0VAUNAZiC86CAxOkdgdsCLH21dD6tDcbUUGJqaqjngRf3mQECREZCYmGCFQRsxIjixfRbTHV1FVp1plbfcHcDMLx9IFLpxlICSogYHqlRIloXcOTAQGgXUSm7gShiYk1OgyMbghOCGpm4u6s0EACiBoyFFiEwpUQjEwdWaSjOcS6uiBu6rRfrzVYBASGCKK7oXfb0q3XxtT/r8pPHKKgqJQ1apCKhSA0dAEPVxewu1oZMsS5fBg7ayVJG5hrQJedPljnIMnFJMmQgjx9xlM88U1lS+m2qrUqqKtfOpzLOpmAuhtzKLNlUjgNLafIC1oS3EiAC11TWrL2oA2AqKQq1myClwP+bALNJy36WcS1nqXJFw7LeqVmrpNtfb65vaBJE2403s4HB42d3cr6a1nAfwtcmZ1FVEYuqY0ED7sWOOWjXmDhBNGwd2DapNdUEAIeeYAaAFZQ4kwa3GlBFDzllQHTnEES6JJ0gxTeWU02jWT6VETkhInJtZzANzQEekHLgLjByCiVNIoRuAuOuv0dlMdvfdvN93m207tDKXet6n7Q33/fJyHsdRxZp5KROIO4c87EypLTN63L98TAGX8wmTxaurPG7crNalzHumvi0vh0/2xe/+XejCcvSh25yOe0r46z/8vtXTcj4AMsV0PJddNx4Pz7ubMYdQp3lMeToeUk63v7hv/3Iqx2O93rRWgcPVm5tP3/35y/fvuozH/ckUnp9fYgppOzjDuTqe5mn6Ybvb39zdvb/upy6UIiJ1npfW6uG0L6XGGA0hMhN3u91tKbOdjwBOm1TVzhNZcRMVa4Skpr7K0mZECGCwapOqgMDEq3pqAGLWWrvka1Zk10+3JvgP/83/thVCqUzo5suy3I1vru6uhu3200O56iIuam556KUeCS2lmGICjGAOIQBYqzN6aFWgziEPqFhOp27cBUx1esbMy6k9fzyCt5RiOZzzsAVepJNt95Z9yoF9ngk6ogjcO0REB1IgBiReH5pih4Sg6q5m4iqOhKoG9bLPmbrUwGjuawczUheuvjRpYkeQhhCou0pdF3N3fnqIISuoGXF3nW/uFcJ4/6u0f1RpQ99/+u7r3e2XRJT7IYS+ltaKMMaQ07C5AQBbnlKXQsplkhCjkuchmdYhbcHz0nzY7mKixF4PJwyWYlBK5o5MEYBjVIuAQ5+y0CSmoBEgAFiIOXXbUo8UcRjjef8p5SRoy3n65k///P73f7j97f/mXPX29n55+PHbP/+xH1OghNvd7t2v2v7FGpxPC47b619e07j3cvzw/NGKMkdtAQITtuPhU4w3fYgOPKsRYearwMEUOeSpPCGhm81ldokJvO/6eTrsujHqFiyM3MUQifP59JypdXQsZXZytSUCbXYbcFnmYzUNHFJIdXlRCDnhqc4iFjbXTZ/QQ99vmajUo7gaJIrBLTONKUxP5x9TMIImy1x0zqPmjknpfD53YwpUbJHJKmDFAN12MJ1FGwKlEKUeWjkFHBBDKVOI/ZhzjAgaMlwzU+AK2fbP0831NX1GmSJwSuiwHqesk6pINW+BI1NYN2SRRhjE6HA+55BE3IncgSgEAkJF9FYWoiDihAAm4IZmhugGkYO5tDqnlBFQATMNaihzHa921zebl6f9N99+rNOyux4PHz+6lM02vewP01JiYEM3tcihH8cudd3mKoVgoON2s8xizaazZO7vbt52m7uhH7NX1VpTQKYheZgWdHfTp5fnvtuYYplejuXQ9/FqN0otUI5Wl8MBdzfXx3nxPoLDfN7f3u9MiVN8ft7PWpHQm2CTl/3LsiwI1qScTuec8nmam7Tt1Za0fvrum9pKSkmaIIlj9VojhTpj4pEIBYw55di9/+Xtfj797u9/XwTY/McPn5CXkMPt+1+9f/v+//mP/zRNx9/+4m3O8T/+538lhD+fzl++vUdOdar7+mxQ+5DeX9/kbNaW3CWtdT/Z47lKjEdvL0+n6MqI7TytNOSyLJsYp2WJqScCVyCmmPjx5ZHMlLCp7676FPQsZWlzn6I39eLHdubQp9CVcm5l6dKAUj/tT2pIhJEyURDVm91tLecY08vxnAnVxdUBsOs7YGitSW1NNPcdIIBxnze1TAHxcDgFYBVh5hQJgXIcKmiXyBCn+cgBEHEYN4i9LKccgrlqreBkRmxRbEohufm0nIioNu102I1dLZO2pqLI0QyX5mncEoBbE724inw9/HVbBwF1Q3t1Vbg7AhGr6Xqwu8J+zeDVGQSEZOD26jlys7XSmy8If1j5Q8Tw+mDmF1LvCq/wiz2DEC/n4P4ajMDPs8zl03GdmC/PdK9t0A6AYK8xjsuohQDrifmqFr36kF7TQz/N3K+MGjQ1AoxIBigGHDiFEIiGSGDGBDngVZcjRzELHOMwhH6gfsNxeD4+Df11CHx4PpjB4Xx+fHiQUnrGSAggDNankAI5gLa2qBigu9W2pBBjSKqaA277KNL67fDhWBcFBJ8nVZkRPG36nNJ5KmJI5zrXT1djt717++7Xbw4fn9ns9lZn0UD49HQEpPHmJkeOMSCjQ4upyzlM5WU35PPhhz/8+7+b5xdvbWnl3S/ePDw8fnnz5cP3L+ClmI2bcVlaF3MX0nQsX/7mtkxPf/zjX7shooXf/e1v//znfyaM5Xz64w8fDGzshkhe6uRg333/4fnjY5sWrTUgnY9zK9XN0MDMxUG8uXk9Lynn3d3t7s1bBOyPL6eHh+llH6Oi1BjSuakDE1JkTCknQPVWxBFNxXKtKcV+GFpbmrSllmHc5aicTKNxjsfTS87RFCl6yDxuthFl2v/4/PQ8DH2rre/CfkbXSwfaxfv2ap5bTXMrGGsFu6yC5qo1fkbGIFzyQwj0OnOvWulPqg+sbiMEMycgh/WawFdlgNbX8+UnQP/58P9aV+WrCgqrIW4NEYE7GIRmgePOmIlTxIcqi7TGPpCZlBLTQBiPKP3mSpqZQOJOPAq0t/fv67ndDEGJuxD302Nm7LpNCLHOM1kr0345nPvUlaIUc+BwePokywTi57nVqtJ0KRLC8NzmH5sVxVnhk2p2GBFLI0UYyKB4tNbU1Egwn8QK8HoYaOqZQB2rGSJVdTUAJlwrvBVdVUGm6h7guwkOlpTjp/156Lo5xAX8cVlywtwPEYOYhDCa0tPj8fY6iT6ziHTb8frddKrH09Ltru+4f/jrXzh8gRCm08tXX96Lvvzw6fF0nLpx8Ly0pmMXnk6y7UcOrVm57tPSnvvhFiS5kzUHUXY8PvzY9g/U32t3dfuL37elbHl8nv8KMpOKqYY4rosFf/I+0ueJ9Kf9zfyyDvwiW+NPr3iFsF1Eos/vQbyEaC7b3+uG9qoorm/8HF+7wG7cncBhJc6taxl+9hVTTwBuLtKIKAQiokv5MriBMwUCBGA0FxciIuIYo5tba4oAQH5Zxati4wgrLsbMFZGZmJHB1YEvsg8EYkAXWmk/gKQEZNLEzcBZpZm4W1FBcHZVUEN1EIkA7i0wKbiZUgB2qmgKZO5Igd1NLEEAAVBF4iJKyC5mUC3GWltowk1DbZQKRIKC6hpEJCyGbHY5RJlbC0tFZo6REJcQQggxpZC6GJm4oy5mTpxK6jpRaVVKqWoOLqBqxVRzc6Xcc+5jwozkqirSmoi7E7mKm5g1UV1Tr8ABQ3REIvqcFrwci1zC1hRCgJS8tarojiFG4kgUwA0RkQNydArrbgNusFpUrF1CJ+YA4j9R1gg9MCMRItPFluYKAAyo5uarS1Td1FTdxNYujEv+nFzNXE1FazNdT2EAIHA/EqA7GDA5xhQxBUMC4FBmL2dQAHIgpMiUgjMUIDPCJtaaaNNL7YBT4hB7TD3GRESAYGC1tKW02lTNVXW9KMyBmNduwcv+6S4rCev1gkKmn18Gr1IRYa1z7jIgMgUpkmNKAZnC+gbugpkShVIbHtrxIXUVUER7y9sxhYEtciBGdglW66mcRb3OxdTAxKW5WV0mcDHQZZrdXVpbyhmBwayUouIhJDVvIiqttsYhAMJSBSkQAFLMOW/v3tzc3QMDBd4OQ6tFpDE6oJnCef887q7zZhdTEG0hpd31fatVahvGq5A6QJjPxy53gL4s0/2brz59+ma83o7jeNofUhxjyo4hEgKASAsc+zEQDmWZW4mIHnOnBoiIpGrKiTJFcGcOFDhSAkTAQMRulnJwMChJ3IFSHq+ZCNzUvUuDI4WQQojSDDkAYuw6EceUqBukVuKu2+R5OhAIcZznEyIurR2O5yH0MaVu2IqausaUzZU7RmsGrMjD9ZvSvg9d53URq9F0Or1wYsfgCPNRux42t5s2L0/f/3Fz+85jUrIehseP396963/7t3/47k9//PYvf+o329z1pZXNsIOmyNb1PREx2Ol8HDFtbjfLvDwfn7/61funx8Ptu/u3X/7y8enj7TZttpuUu93t7cenpyJm2sZxjFG1NGkwnU5I56s3766/+NIA6ty0VCchsOl4OOyPy3ye5r2HZOt128zBBaGWVquuB7xEl6cKIlqjLmCmqivXjZFUdb1frFWH6gaqAECM4GSmqwYDAF/+cssQammH/Xx6Ov7uD798u31/nmfj2N923k7BnESX/SNQITSg8PLpO5cWM8XYt3lRFQeLgev0FBDarCLhdHg5H/bmc3f1hXlMt2/6lB5/+Db3MY/jePv+6qtfYejnqU3HwgnVZqLJykdHWlv2nOBCEwY2QANDTjEPwDHkwSEgRA95BTk5I3VghG5KnF0LmIIKdRuKo7fF6kLdYPXgNEQfXRZ0ud69X9zDbgdq0/mhzacQs6d0/cu/XaaFwcq8p7xLOSCBgeWenDu1Tj1zZHWJuTNTjkOwVOYTQHCSGEEdOWFTtcCpz6UeGxTKFkOsrY7b3XmepBQJzjlx8xAHVVI34IDEFHITMVSgtJSSxx437G4PX/8pDzdh+/bp4+P921/8uy/uDg/Pxkygsd+iQ7fr+BAPp2MzwDTMzW++GPS0P54fVUvu4rk8B+rGeCOwvCz7WduwfcsWAw4AUmoJxOJYoS31EIeNiOkiN1dbE5iXp8hsYKKqgGYOOp2mJroY5KrS1Wampm2Z52Zzv+kjJ6tORCnHpq1CBQhgGTRAEHB1WOZ6ghCvuxv0XW0lRgxxSwRMrC2qVq+tutV65nxVIJhBlTL7VEIr5q67iKxWvJkulaoxb90MkE5l3saBMYrH3fXfxfDu449/7se46HN/3QuJzPLTeOBgakgrqxQ5BFVVcHBbpnOKCdfe4oCBo14IfyGgm8HU5swhrGcXxCn3igXc1IGYOcTSZkcgQCRGJlUghEWsQYixyynWWs7ffdw/HkspBvDjw+Pjx5cfvnvkGMS867sQUJoOY7+7vUMAUzgfTgsQYaizEeY+Upe6fnMfg7rO+/18NW6vrm6uxtHaAo7bq9ARleVZvAtprMdFpiMhT4dT07YZh6HP81IBwjSdW6vTfKIYMYTH55ebK+yTffzu43g9UOAQg7kty1Lr4qBMxsQhBkDlGPvcufs8HRF9qtN8nIAA3YcuzaUcDqf9y7Efuq4fk+rz4w/zuecQv//Ld+4xd/H67ialrhY9z+Xd3/z2/3Z39Z/+4//4pw8f/t2//8Pflvr046fJ2neHqPweobsdQeq5S6lKba1ejent7Zv9y+PD/piu0q/vhm8Op2mZoSgHdnOpwkQhRhVLnAKHcj4hRyQosrTSlqVySrvx6s3bt48fP3R9ypHGzeawPz4eXiKnGHMRlaJM/PbN+8fjgzTvuiF1m9PpGQCI4lJEqp6nvQMWASRCwhTSVObWliH3IYWQYqvN3d3kpSxA5KbmljGu7KFmZGhYFQyZkcl3/XUXhiL72qrA7OjElDkbZ0OIAKaVpEmrRAGcxr7HWeq0P8qRmLrYIaG516U4eAxBm3RdJvpZ99N6j79EEsgR1QwcwtpTaY4r+HlNygABOAKJKf3scMIdPj/NEpGavY4sTsTu7m7rHHIRldzw0ne2BhXc7fPJ+Osc9Vkoeg3u4CVZgev56CsdaT0MWcct+6wLua+ZILjE5Nz9dcKiVTN65Wa/Ps/iZdBBcMLEFMyuu+zeun4IXsZNhxwFwrDdbserl1KR0vPhoBAU6HieCuKC4eVUamsd0zYSuMSQyCEAEoKYA10Qm4F82/WuXtWd6Dy13CWHKFW3286XKtUA0YkopJY3lPu7u4HzeN/n0+nZTZ6fn/7y/bdgFEPXb3pX3226L79697yfS5s4j0td7obd7f3txx8/RN5I0y9/9cvjy1LFYhop5GWZWmvkriJ1Wm42w/F5zqHrbvpPD88Ph8fc932kx48f0gbTEJOHf/iHf3CVHDtEkibjbouAf/rz19pqLVOtS0QidiQrcyGkruuRVocZKjg2W20Nqu3x0ydOL8ZxM467t19Q6E77R2IoajFw7iKrgKEBAjKwR2J0QARKBIE5BKJ+MwzKuYF7W0optWkHmSm4Qzd06HY6HLvb3rDrNzciWsq8nGbu+rHvgcrPR+V1HdnPsI6rO3Its1+1zp/QQhddiS6v+8yevYhIq+ltTUwCXprN3Py1rupSeGY/TSmvPK6fHHDr91s/5idX0+pLWiP6yCFpU3RiZ22NI2M3nmmOWY4/fMpdOpz2yAa4lNqAcWlGzNvt9ngo797eEMwEtc5HdAghxzSU0rzWLuNc4TQvDhGwC3Es52pV3eF8PhNGqyZCmNOx6uMke+HGyMznauh4FeHQfDZ7EyEHoGiMVkzIkzSjmMW1j7TUysilqUI09F1EXA8HEQMFVl1mpYAhhJfWTsSV++PUOA43b+8+fPcRmFKPFlUB7jdbk2aqMfZOdDpPoUtoHmupKtubX0CjaX+myFfv3x7P5y6ndpoSpfdvulK6uSxV9M3VAOfT7cgTWZUWEy/zgTf3Y9ygu9oU8xZDSja2Ni/tZXr5cfPFHyB1+e7dy9d/vcLURMt0GrWEGNWJiX/aufCiTP/066uZaMW2wc9daj8bXn/CXDt+dhr551dcuvFeIwP4ugb980t+bs2kVzXUX0Nqn9cWEEcAMNSAl2+EuJpT3AAiRzVdo1cOhkgxZjBRFERBJlwz+Q6MyBzU2uonXXtCkIL7alphB1wLnTlgYmTQxIEJV28ZXHoR3BTMQMVbFVAzdTCSWtG0SXNVQIjkaJKJmiMHQjNCqqCILIpuwI6OF2ePAabIDg4GgdDU61zGLTJCjCygiBBiBA6uaODi1QAvvCMnF0FfaT6rURaZmAPnPnOklDtCxtBFChExppI7rdLaUsREymyq5tZEinlOue+GGDOBBzUxUxNGcJdaFi9FwYEcmJ0Imf0iZZu7OAR3M1PGgIhIkU1CSroUCICIvNaIOSEzMq+0oXWZgKO9loFeqD2fb7SIjkgxINNFpEayS1zSHFzVvIkDGaipmoiKuKmqrndcRFdQANBWa63SmjZbj3fUBIibk8aBR4h97+jAxIGxNnZ1a0ZoZhCiBzYCA2wOYgwK3oyR0JUgICakBJwB2YFFXaQ11WUpp6mcq05Fmujn/0B3B1BGNLU1DWpu4MAhrIsbLhrX5Wq5SEVuxpddGJmy4EIc0dVlBkcP0UHLsuQcdUxEaT5XpvB8OpyjT/Gwvd70YwcIoA4O0opKBSQmDymvj0yUAvejSfHWKJAbsrcowdTUFgR10CZiZq201lRNS/WyVHWMCQCIk2+utufZ/HGP6H0fZJnNXWsNzONmM09zirEb+mF31W+upGHKA6Uc0Hfb3fPT3luNKcU4hBiAfJf7aT6raAobaTpub1I/AgcFDsxaKzsFdBUBsNj1ISkhudWmayylrUxxVwcgaZJS74RIHGOn4qZtjVGmkNwhcAI3DoEoRkcO0Q2ZckodgToogBIyo6YYGQECAgRRoDhY2QdGYRyHDc+zeyFvIKi1mUOtU9gGxkvtIcZuuN5Cq2DelmXcjm6Azi7iwK3WlDhAQFcrBRXqPE3HQ7+5+fD9N9vd7v7L3x1fHrsc3v/yN8tSHx4+YgATHaK1WsZxcCnneaF+bCrT2cABiPfPR+bnuzfj/Pw0pO6b/cwWFG3+9O3t/Zu/+/1vp6U+H57LUsWIY5jOyzyfU+KllH77Mty8GTY38e0XCuLNhjt7YyJ1Kud6Oszn895tOR4P01LmpUbMRivFzFQbEvLlYcLMVUV8FXbNTV3MfvoDEVNBMFVjJ2Qgip/drf/9f///8ALHU7l5c3M1jH/99sNj+Ke3X/z69MN382mJUoNZNNsM3fUmqtTaamAksPJyNo5I6FYDQZs1pj6GgRFaeej60PfvTseX7TA6SRgGsPSLu/8Guz4Nm8X7E90N1zf5XceU0BTMyNRlAVivYZNW11ueu2urZT6LyPnhB0DM3YihQ0Ag45CIgxtRYOJATA7OBBCSYzZToIAhAAXULqQNxIW6d6BzW2YwDVbREEqlwN2YnLmpUb8NFsmXcn4JDuP1LVCQ2lzOMW4YURFDihSjLoKaGDrn2aIrCkcYh91+eSLKIhi6beh4gUNkAsqRgig1aMASxqTQIqTQdaqQ+mEuVd3cRD30w3VdHlM/mmXkrp2PIWM30vN3f/71/+7W++Hh8ePd2zdI4/mw9DC7Ueoyig3x6uXwMJeXqer3L9+Fq/7pfBA4V5nYumGzlZcFqw/5neQMtK9Lc4A0dESVdTGE/fGpgoQumwpRXxWmyglil0eps3gRNSfqh9Eda92P47Y5ishU9rJw7DN3GZ2K1CJzxBRSP+vcRMzd5gUBcoRm83k5GSoGDYRe9946xCAWqlutp64br6/e9toIYS4zciIMZZlOdYkhXr/54tNRokixWtocAkuzUBFhB5zU9qzcxxvQIOZ1XjKf7q5/udt89fW3fxw23eHpkSGEkD8/Y5kIhxUCeCEFIAZYSXghAjKCp5AcQbVE5kBhXirHiIiRKUaGViOzr1sTcy3aXIkQUwyU3QCRzbXUulaen6sSs/i87I/LXF6eXo7H8zy3Hx4/LmVaytxt+uPhVJbFFcXo5vr6atwcJ8t9DpHBMXXd9c3WHPqh2w65zGW//2FIcDVkUNM6a53BHjMnR3JXSGnoeZEspY097IarVsvTk0CF82lSqRzITGtpTcyaqnoVPT632G2kuQKe5oY0C6CUucq8tCLSiN0UsSxIAG7TdK7LxIguzUHNGiIb4PPpSOCJkAO32tDOYey2u+tFzm+27+amu6s3u9uNgUfy3d3VSeLD95/A4Xb39sPXH/67//v/8OVXb+7f3jz8+Hj8+tvjj8+x79NXb3IKAmZEfbd15j9//7h/3gNCm6Zjab/owzTwk3hZKgdSQBWJfQ+Ai7YeUiu1G7g19VcST9cN43YzL3O/2S7LZGrnMqt7ClmagbW2FATquu5w/nSelxR6FdmfnlVaDjmFMNdzIGN2dY95yHGotTg6Uej7wd3KsnBMqhpzdFOwRjFHirUJx0hgoA0QA4TWWoyBOYDbUk6H6QncmFllbYLGCMoc1ZA5uGvX7SoexcDVqkgMWdHUXUxIlYExMBBFJnBjxlpmlYtgag7gq2GI1uAVmOuaRHuNz6zwFTUDd0ZwA2TE1ba96j3w02hhDmqOhKbGvNrR3S7hHrQLyHPVnXD9dIXLP6+qjbmvZF+CdeC56DuvMw3a57Ho1UTkF8PH6khyQlo/ZDXZrw/Cr5alz3aiy1yGiGbqgIYeCCEEdIeATDRmTKwcI0RSTYsQIPZDV6V9evk4bN6MaXg+PI9X18RQylSbPf34UA9PI0rPzFYTQ2Je4wLmsGpwfUoGsCyy3uxEpQowwVQWDNwA5VSReNh0YdhQztvddX91bVLRbH88f//wYNZAdDNuf/n+l5vtbc7htDzGrkcEqf43f/8+5/Dhrx//+sd/rqUEyrd371KKTaWVl1bmPtKbd+8+fPfDmzdvoGofod+y8BT55vxyzGmcZZpOx4Y83NzeXF//+P13s8zv4i84pER89/ZWmxORgr48Pp1PB5Oq1bx4H/vEPs2TtpI7jpyWpThAimzmQa3vmBPFyPNSVSAiijVQqQLjbvvVL+7nZXp+fKzTeS2BYUJ3m0ycsI+5C4HBDVwQp1oiI3MKHBHUoWOMkcJSjbhAGEprg0Mt7Rkex35o8zRsOktgtVZtZZLWfjo2wNcM2CoT4QUwQ0xAr8HGdawXN3rNASESXSZ4+2wH+mlgx88BCCdEBHQnR1AweF2rBsBEaxrydTWbX5rS8LPB76Is2PrT0upCgLUM2pk5o1dwjTlNU0t9guXMDmykS+vSoN5i8un51EUOkXUpy3Qcd1+aHNRmrRrYKCCt9zxTm+fDy8PaQyhuOi9troBQ5iVxNIqnWj8c6kGkKkFO4FbFP7WmgOZYFCrzqUhReDtQUNh0OWJx8Sgwua1Yym2fwAAI1LACzkDXRA6goIvVPpArzpg+TvSdBx82h4P/+v4r5/DHhw8NMTFF8n05i3ZSypvt6JzAKzKRd6psHuq8YP0WpG7ffNFd7x5e9uQgItal3dWbl8dHHuOX78aPT/Dtw8ucO6Z4PVKdDz1lRHdmdxu77KZkaHjyuB1v3i6HD33szh+/8V99jLv7YbsJaQCk7XhTlwm9oCsTgdOl5QteOVX+6sDEV7X7snmuYsBnAcgvmxNc5OyfFJ3XvdPXne/VOnTxEL06ky5K/esG/uqNA/xJOfpf96SFyOYIGFYTg6oxBw7hIuszBbpo/ABChM68tpqHlN3BvCCzmYGqWMULLRhWxtf6fRUAmEOIkTkT9JFy5IAc2GnNBKkRmKqEQNZghXYRuHhTExMws/UoHB1ExdwZyV3WkwBiJrSOsSEEJjMPzGIQAnDgqZmLGbEaGBJz4tCVpfJxoXDsaGut2qruEaoZRkAEZ7rgj8ncFNhWWpmoenMqMBfmmGJaOMUUQ2CKIYUUQ5TooimWWmXtLyiTSalaLPWuJfWbGDOnEJGiKboAhJwDl1zKgoArlggBeK09IwIHMyMzUEGNEAgAiEMMUYOiKsdExITkYISEFIBXUNHFLEnooOLaLrdlZAcENwRgDEhAjK+QNTRwA1ARUyVUJtNWXQVs7UozcGe63LgJsElzUxV1ba5VRVZFSVQAsLUG7iklxMyE7gqgQE2kIgLGEIgsJAhxNTxoE6kLNEVtgCsJ0VdB0VUJwUEdoNV2msvT88t+P7/sp/PSDMkJwHQ94iJYY/QKQICE7kigtqIZ0aQFZnu9Fi5SUQiMFBCbNDGlgMGbImjuB0c0A61T7kJrbZ40ZRdVlUqMKXrajef9odW5iZCDtBpSDIlS1zkRJWbOKXWQQhVPtCW3Npf5uGjZozNjVKzmXlpr9bLWTcSNQozQh6pggOYeYp6PJWRw8G7o+5DqMoeY89Brs8Nhyd1w9+X7q7dv3RGMQsq7mxtiaIrPL/uYYoyJYyLytXKuiZrpsLlO4+gOgZkJ5+PBgCBnjjmmHhHJkYCWaU9gTMHNCd1MrJxjYJNSS+EQOJkjh5CcmOIgthAzJUrI2tQRQohQIxOjQUhDzkNtlWOMqQvJW1soICIHZuTgl4xitFYRAwdu4LHrKHgEaEVUa0jJwB0p5G4pi7Sa4jAtSx62OXTf/fm7UpYcB3BO3Zj7DlSlyXB1dz7tDSDlXhXGq9vTeXr47l9//fv//Zu7X+z3j5SCKSxT8Wa/+dvfnc/LaToSEoTUank+TopeZR5z1/WdzgsZDV3XhXk6nvoR3fDq6vrq5vp4ON5cbzFuPn37eHp62dzefPnlF8u5nY8TaJuPh6otQJpPTcrLfDg+pzTe/6p/814dy1ysnuflyFVF1KSmDt9/9f6HHz+KoqsTgJsigq+1wIiigsZquj5ZGLg5iK4WfFTT1pqZm4q7IjCspxH4+bwK/vinH1EhjaMfDj9++wHR76+u/vjNh8PT4Xya2lKYmR2utjm3ct/F92/urrf91c0WzLA1QARXDyRVVRSiaa3os8pCMec+nQ5PSqCHechvhvfv8+5tvtpt7+7NCZlNmvkSw4pRahjXrlACSBT7tQYS3INL1hs31VJMRUUAEdFNrMl5WaqJ4LpVBeDQx5ix28XxCiiaMQJRSrqcwDD2G5EaAsjpAFozA3prS4l5jCk9fPpuuN4BJZEaiWI3VpFlOse0UXUkiqkbN4OV57UHIOS+zhpzbjI5YuhSNtCmjIFDzJik1WaNOXRpOLWTISBGE6/TtLkKdZmaB8o9EZm0HAmQIRiINjl0Y2/zid0Dhf7q2rUphlLsh6+/f/c3qR9vapPN3U1dprpMoA7IOffDVbx9f66PJxV9v7395x++eTo8DX221uqMbnmbtqOxn+0qb7ZdPJ+fxGTG05CQuLnWFDMZ7Ta/IJdzqa08szJhIIqpFOtq5wABAABJREFUh1okxqFZUxfVer15Z1qrzMOYF29iGVLnMi+nOeaYuo6N5toay1zOA+eEwdSYoalUr6ELLy+Pb3ZfMG8C7iDm0/ISAhlijk7QCCwgh+1uaslBwfl8mhJ0y9NDX1Ozs/DMmFPYODsDVRGZF9MpurGDiMY+u0qMaX7+p6UVkJPNlHM37RfvLn4KDkSEbi6tMTOs/C9wRFDwRFG0EQcENG3rw9rSakwZwNx8jFmsEocU8lRnDtF9jXcDAFyioi4uKq2m1JmKGQxDKstyPp6b1nOZzstLo/L119/MUsUaMKlpKaXL2YmBuhjzMkvKHYUwDOnN/a2amTQOMTPsPz31XfrizZW0MzHeX10RAIowRuI6DGNprZ1PxjkQMniwiqSbIc1nfjkuSymqzQBUNXBeas3dWE3ZvC1L4FAWPU3zVXf76eHjOF4t8+RgRCCtBcAmrrWYawhhOuxdxCOjS6sFAbU2AHCCcRxz4O223/WdNIkotpyutkOOyj2fTg/AizcC9rAs2+sv3767m6fz9t/9YeyGP/31Ly8vHwhK7lI3wjxNHYXn40uAcN2n3TDuZXmZ5nmq89H65LXVmOMv3g2c6F+/fvxQzwgeCGPuCWCpZX34DjFwIDWVJg50dXX//t3fLHJ8fHruc6hF2C1HDoGlaSslxoiOgVLidD4fQuqur6+W6eyAmBNjQLAcuecsUmLMImhtQilq6sTrA19MwcniVVdqM7VhGEQR3ceYqmltMwdiiqaec2+uU62M3ufMGsAxxNDaXFtpTWbRFMzVpDUnCSmGEAOAKZhZay2mmFNsttTz2eyUug1TRBdfSeyB8TMRaPUorAGZi5QDQLzeLejVGAGvdhtEdDS5FN6vnqPL8bWaEwCCE6AbBF7NRP4TMMBfQzdu6oCA6q9hnIuJwl4FpIs345WNDQBg9kp5AQAgM6fXMf41fHEZwF5pRK9SF6B99ir5aty/DF0Xm8jqqHLfdFmIF/EG6G67bff2bihuszBWcoXSJNhCJJhCCuHh+aHrsnk97Y/opstcj4ekvonYRYpMDGCqTMQczR0xiNLStKpjSCnSUmYISKJDjMgxDcN5qSkxx8ghpHFEgunl5fnhCVTBtB8Gb2Wz2+3ejf0wPO/P3/7w9fVuHLd56GPX9ynk5+dTtf63f/j9F1/cvTwdn58fh3GbQgb06VDB4rf/+pdf//3ffvf1tyGEC9KcHRPmqyvKOXbx8KwItN1sxnF8+fjRy/mXX315e/eLD3/5etgOHEKtskzLcdqX81lqQ4SuSxDCspzPp/M0zQAgTWprqODgzZ0poOOaFIHI290NE4PZUipD6MeUAiPh9maTN7vluJ+fX6xOy7xshwGZKCVw1lI4haFjAiLTwOiOTaS0pc9pGPvY33+czwAGhERsCAB2Ph1z5Kub68OnDwiauoQKqbm9XgUXI8bnaRoBAGiNDCEQXvDsqzATXkd/v0Cr/bJ68TPQ6ELdeh3qLzY3ACS6ILfWMuTPs//rXP+TmcRedYLPbPnPV6LbCn+39TutWoQZcH9t816cPFDVag6ttb7fnqdJVGuRujQV7L2Zap9vyaTVQgApB/GmK/cXVdo8nx/rXBnzXFrfD3PVKgVAOKXW4C8/7F+O+nJSjdwgztaKWFNLvGoDhkTFzNSPzKMxVgGyPnYcoGidRFOI5iRi2xwIUAsaEhNOtY1dIAB0CZGZcGm+dzpAHIDe3e2+/OLLH87npJttRV3a4Xz+cjtEDqkjRzTwcdwAKLqX2uZ57jvSVtvDh1qW+y9++eb26vB4UIrnwxF317ubq8PhhTjc7Lql3pxO5apjSMgpFJUuoDETxhVPFTgM3fbTXLnfxG7ot5syHZeX5+6dhDSELiGvgjsBqqmGEAFeRZ/L+nldYK+S0U9LBsFfeeqvy2AVDNfPuODPX1fKz1Qk+JxNW3fZixHtsxwEq8SItH7q61K82I5+8h6tyx+RQ1hNpCsO1d0jRwFxMCBAI0CNHMFE1ZAopoTuxtLl2ESagJrCKtZfKHUGCGrCIQGzIzEyYwhIES0zBfIYiAIBkmtDQUW1QARWwMAMXV2xiaiiqloTV1nLddARQJhIVta2iIOBIXNgciaMjObURENADhAFZrWAISAiBjNQ1Xk+UQTBtokkSNDc2JGdHCglcHcwNwhEzmwA4munvIsIulupHBrFAkxDP/Q5WaDMFGKMHGJInESk1VrKUs1MStGmUltYlmG7zSmn3IWARBnAAdIQnTkuy7z2NnCIyLyqHu5O6011LSo1BmbgiNyICRCZmYlX2DmGAMyACIQX2i2Ym4DJ5UDGEYgREQ0RGYBfb8YrKd3BV+6U26oAiawenKZmpoQMhO7mYG6u4ECEhEzEkbASBmmlNlUz1zJLW5gJMSAHUyA0LQ1FnRgQOYIjKbMagJiX1pbFtKG5QVEidCJ0VOcKDM0AgKM4HBd5Op6eXvb7c5nmqYiIqoqsGT50f605Xvfjn7xGyKymgen1uAvgs1QEYKYthehM4JwiIzYiUm+BcoxdsRkRzbEubTpoTOrTHBItKDqdOUIKIeQUGSgFSrxUlYDjZhC2+7d3zDGk1GqBVqaXZ5TJ6rksh9ZakSIyq4GCYnCvGmKXu91SW6mqDWrT/npz/+UbQsjE47iT82xNng9HcI0s3e5qLi3H8d2vfjVc7QzD9vraGgzX14xwOh60tZs3d65aa2NENFtaGdOIIMy43Yxaz8u8QG3aatcNakDSWZganojYTDGkELMsdaonYu43V8iRKDMHAAu9mFRtJYQYu6GJY0gJs1tF9BhJ02guqpbCJg/9epPlkBMnXpHxBCmPIadWCiAzJ2kNTBVQyyHlEbSBQ4hZ60IeVNt0nMFS6HpVDTG2Kogx5rRLoG1++O7rl0/fjMM29J2aIVkMudnSDcwdUYllqSlC09Z1Epje3t8fn/+C/e3V2+uPP37Y7LYpd4fHB0r+q9/94o//5b9I0TJDrbLMhzfvvjzPJ1k0JJym2QFyl37xxbs//eu3B5yai17D/fv7f/2nf8r8S04x5M7BHn/82Orx+up293ZXi21ubx4/PtfzKXSJutzavDy8eMPkZXt72zNajlbCcT5YmUOHtZTj88vVdrPNw6eHZ+LUxJZlYWZbOzReQ6dmDay5RjN7veVAE/ULzF7RQV2QmIzXyPv6muF65whIzh20hghhcmdiTP3ui13MrKUkxp6wr3UT0nh9zUMu2BPTZrsJnFKX6zIHIJJST4+mS78dpgnqtKDHkPJ4+268+XWg0Tb3tB2M7PT4glLXIC2YtjZTYNOKKQEiITl1gAmQANnMVlnITc1XcjVbK4jmLgQtRQCEVkog16r18CzuwGnY7jiP3fVbSFvDSPnas4Ibc0TC1Hs5HUI3AHhSooDU9+MVmbq10m2StdalTX150daY6rjNyyzWptxluL9v04QIiEoRKXOEDoJ7WLQRAW12V0raBWrnEsFTGJEg5WC1juPttD8O/ZXalLqtVmhWa9Uu5RSTgRInUgNTFCbDWqvIfnd7Zx7T1Xj75R8ib+YlhW3/8P2HcdvdvrkJgZB7qW2pjZi31++O88fH7//c+LwZU+BrEpvVtbXGriFW9dvN+PXjP2yv70Ek5j5GSsmXaT7PDzF01/kGpGv1GAjvdpukCuxL+yRoir7tx/OxqVbSRQWhuVcI3RU7DN27FPHw8n2UIecUPC7zY8gJEJEhk4O1ob8DQTDou2tif3/3PkFEB7NIHpn6HscUedNt29S01diNYq0qnqdzn3yTrzM0UpBac459l5d5RsMQslkFsCo19R2kzlozMBgoRSjLyU2HYbvQSLrUau6NuFuvAlVfqX2qup7akpOaxpgCICMWkKolhdh1nbSGFBUnBM8xi9RAGjlNtZ3bBIjm6uqMGGI0NZUKgGbKnGPIZbHz+VRaU7PlvBz3R3N9eto/PPz4fHwudTns99aMc9JmZgSM13d3u+vbbLbZ9sBRSmnLfNy/aFVUzTFDV4YuQq37Tz8QwkLBpSXQGIP3Q1vMa3XwFDhxdVMHP5/OUpeiggybzeZ4ek4p7s9lKSUnraWpqiEBBC+lvHycJ2zL8vj4aMZACXwBdZE619OAGwQyMAAMDqribvNcXMVV1aTLOXEMOdxdjwQCXtRIAU2snuZS5Piy7N5IFzKJLRVPxzJP7fr9szhqgzdf3L351U28754f7v7xf/qPyIpAqvby9OSmX737BQ7jXuUwHbOUt7d38Xb8+Onh4el8eIbDqQBgzv0X74apzPv9kYkDJyznbthwiG6uYk2UQ95utiHEqT2U89wlQLaY+f3tLXo4lvNcFmRH5ggMDk+HY5/z7e76PJ1BZew38zIDqaoFxqZVtZmaGy7l0PfDdkzVsdYWOcWAOWVpNaaeeibEw3mRZg5oSAF6b1JbdWMNEDiqCzGLKQASBhVg6nI/ODe35iaIFjsU5XWcFFF0yt3o0E7HFwwp55EgTMvcWvWgXUyi1soSIWOgn4ZkuIwGF8sCOK9N85d02GXOIQTzy/iCZoroZkiMaGaGQBd2ESICqr2enjsAopnhOvc4rn21jIiIK/hyHVsIL2fhr/rO52lnLST7LBmtqIb1mQ9WBisS/mzURnN3+KnHSv01hHbBZ1yyQuaXRiIHN1jRrBxjrNaIeOj87qaLiXI/bhqy4qf96SqmGEjUcuq6nAFB3F9e5pRDm+fHTw/QWg6UAwSwAISIKfcEa20sAtDcwNTdrCjMIiqIkYeUSN2JltooUDNTNVmW9njshsiEIUUOgQMxGIRwmks1CKeacrq9uZF6+uHDi/7wlFMYh57DBqfWDG5vNl/ubjiGH3/YpxiLTLvt1az248N3X8nffPn+F63IsAnQqJ3q1XjbvHWbocspHCGlzMxyPn/z9Pzb3/yaNH/zx3++ur3e3V21UqbDcjy+DF0KXV9yHrr86btPJgXNSm2GZq31/RBjBLXSNOS0HUeE1XPIBnY6LcOmG292v373y+P++enpKeYQA386HbfjeNd/dUSQEtLQmUgOMTISQ+jHqtKFgMBaPVAgYnNxApNZsVDE290dUbeoHI4vsc8UKadc51Plbnt7//DhL9bUwMfMZTq/jsiXX/F1YazRMyb4aegGQATGlYG1cq9e/Rt44cH4Z/faqvU4EHyWnhDWploAx9UKAWsHFQHQq6sOABjRYK3WsJ8wYHiZ+y/ygAMTgxthAFQiVNUQOoXA3HGiSSZG4Bioeaunvtvt9y9iRsxSFyAar3dMFBSIQ2Y2mFMMhOwyt3rSVh1xqjX2A4dYj/ucE7s/npdvP51+PNbTbBjj1OxTK48Gi0NxcCAyrw6qSMQpwMn8SQE4k1iXQ61lO3StejMAjNyWIVMMbGYsbSBKgBQiIZgHjqwoJnCuLeUQRFVPP/z4x4/HU3XYZRIXUvmS0kg4pBgiACcA5JwISZHdXWQJKaiGcjg+2bfD3d31zd0B/Xiww34/DvH69vbl5dHq/P7+/q8/SFXbpNSNw2l/uN1dLR5mD7suq86tTk7tanOjHrvtnegUuvz043e73xYfmYcxdo6iRkIO3hpk//lO5p+l6te/vXga1/+xtjoj7SIbrZ5Hh9Xuga8mtNfFipdEMFyE/tdn+5+Wyquu+Hmbp1fr0s/tSf/2CyEwq1mIkYikIrqrWCBkCo4AboRkoMBEHpA6pIAg5ASmAmuLpAGhGqzIYFCDtTSA0F3N0dxNBBTBnJERjAPGxI60RvWRNRBqLWSG7t7EAylT4KiqBGAOlynHSdSrgoI2d0NsBgTAxJGd0RNCQIyRmBF4bRZ0jqxOiZMCY+pD33sMBSNjkmZdcAOJgIDEkNCJMBgQIQaKyGxo6C7SLlZWEzQ3UFcAo+IzVhM2iaHrQogYUwwpUOpC38eutNLqspi5y9JcZlBLo4l0OafchZiZmAwDdRQ6NQm8dp1dpCJcq+61OTkYA/B6MwwxuGZVQSJEYFqDs2tPG7q9bi/uKFWlAQAhOQckdgdAAjRCAiIgXMWfFafjrblUFzERMHUVV7mEwBEBkCiYKdIlr4XgwdxdOSZRDakAYHWYlwlNXVuTGplMiiOikasiIYWEIaz7oKlCVauKamQrLp0V3KCxo9VCDGYCGJvV06IvRZ6naX+aTstSTYro52VPlxOv1X+5GpwJEQCZCRGBiRDB1eA1kv9ZKoIYA7qaamRUbePYNykAhOp1OVEgE0V0CrG2KSRGBnOJGY3MmUMMYezGcRg2W+I1ztdy3499tuVkQOdPs2uTMpVpmY5HN2TyRmJutTbGwI4xD41lmdtUz80txX4cu3EzxD4YWohx3PTPLydYVFspUN+8uUkhTYdjt93+9g+/297egEHfDVIhd93Q5/PxBcj7cXDAUhsx1nkGpzyMTZY6nTmSihyen6WqSg0xOtUQclWVioyUc6cuCTMGRRICNZmXUw15QMJWKXZjCKOmPiK4NDGnGGM3Spu1ChKDAzIHjjJNKY+hGxEDmIWuR3NwpRABUGoxJAq+IrvIrZYjYKKQHMwJQ9/H1BXDMp1CDAaa+q65nuvcQYchq6hInfYP5+PD8eUpRY45qlns+s2waU3N0L2UuQx9CsPg6P1u83Le58BA2JrL/MNN+HJMnVkLEVLfTecDevjN3/zhT//4X7Qu/ZhO++l4eLRqz48vt29uMQQiPBcxLDdvbs/nAxC+PL28fXP3/v370+mwuxkaFDQOIdrSjuUZjxPkLg/Xt1/9rdbzy9OPx3LKkfvt5jydpn/e99sx5G7sr4m688tyOu9NNIX8/+fqP5otybL1QGyJLVwccVWoFJWVWVUAHkCw2aDRjG1GckL+Xk444qxJGmk0awMbgDXee3j1SmRlRmaoK45w963WWhz4uZEJxCDCLM89Iu/Zvn1/3/qE9/HdT/ehG1Uh19YMlZzampVipLZm8apIrQWc91IMZe2Xaa2qainVVNYtRhVqbeTos27/eJp9pFbL8SEP3bbWvHjr+t68IyM7637c7SJx011vd1e7zbgZr27G8QY5EILnruYZcbZaTLOYoffHc4IWN7u7rhuX5ZRLPnz6ieIeMSy5bofeqpBO4nm8ecnbLRIZMFnVS0OokiEigVUEbfC5gETBrLUmTcxg1bWAKeI6wUZpTVvxnQOprc71OCkzLj/F/RuijbotDyP6AZoBKHnqhmFtbfaIUhZVdF1v1gzNhz5NVs367V1ppbRG3gMCekLnHI4myoRFMzpPjtmCWlNQ9thKGzbjspyAzPWIQuz7kufgOxGKPmY6Vqnbzf74eGB2iLYZh1wWEVrSY4ybQB1wRIzmVa0q8pxOLgakwUXPPO5evuBu8B6ePnz68S8/EtYvfvOd864pJ0D126u7P7z/6QOfT5GsglOyXCdzGCnOrYqIzPbi5oslte3mVfVSpJ7nooYI4253461zdjWBALRh05fT09P0NOy2u6txOefp8LbUFGKY2xQMh7AbMcynx9YW9o/T/TT6Pqkcz4/fXf0uk727/2F7dU3MrRYA3bAL0HPcFpClHJk8MChUaRjYj24vTp7O0ykZY+zHXqwJlLpao5nj0LfTA6ArjjhE30etn5Zl7mlCqA6d96SQx34zl6XKmVOV1jqO51nicDfG/vDxByL0Pnh3KcVkIjUspTrHZmagnr1zrklTFWVEQBMgTyFElZprWgXfqWVHLAq11aYKDJ49mqq2NZyUEBUJmU1YRKTWktI0nzbjbrPdHPyjI9Mqjx8OXejQKNeCiLv9mCuMQxe7LoTQRT/NyyHnaTqfztMQXEmp67p+HLb7ftON0zSJqvfBgMfN2PVD572UMwfvfJRWTmkpaXFsfQy1ZDToBx87nB8nNDocn0Ta+ZTUCI1Krgp4Tq0UI9cawNt3H8bN9XGeR0fErAYEri7CzB6d5NJU+64vuSwt1TKLtBg8sctZo++GLsQYRNvx9ATSnHNSlNi56AiAFJrA8ZTJ8rJ8DMO42e37ze40P55OSRv8+Ne/uM1uePWi29787l/+2/t3Pz08vCeOPlJdSk3z95/efvvF5l//4TfUyvFxPpzzp8N0P9WnSc5Vcm3OJlUL0fvgRZXWVGYDU6211SrjbtPF3bjZzMtU82LQYvAottlfORfe/fxJQMFojP0w7H9696P3PQF734NaF3vnnYp1/TYvc21CqgxoRIDODEJ3VYEe0wIGaGxix2NxrjJhVSEmtLb6vEQEHak0ptXssAEAs7b1jpwjIwMyxZKLQzQHAA6wLWVypj5EAJYKRcVMvXelNWbe72+R6jLNiNT53sxKStAaIPd9ZwbSLoG+ZkYI2gSJ15hTz05+0eVc8Mgl1cAu+dGXCKA14Oei5kFUVP0MjFcGBgnQ1HgNCHiGKbjSP6ifjRW4QqNnugpgNaAhrqQVrKHXSoAAxkQiAr+IQZ5VRfbsuUD69VT9M1Nk67D0MnqHdapOSAIGgNIaGLAxaZNaN9fRsRExgK81nVOtqtZaM7fZbV+9en08L6m2VJvrxlLLp4fHvMy9g23vAzSHGDwDEZN3iKqCYKo1sG28BceUtFUz5yuCNosA53Met4OoGbra1Ac/Xu8MtJUK3lc1P2ymeRErm3FrZksuPnhpKo2ur1++uPu2tPnqes+DD9GdDrNUK7V5x2++/YqBvv/jPz0+PfS31/3tzcfH09VufPh4710sHKRJ7OK8TNvd1fHxAZFD9AiaakUXM3A9pps3N9ur/adPT1oWJdlebaLzaghsh/vH2FEt+HiciKlz3bC7ag3Ik4qFPnjnszUmJ8y3uzfTcv7y6x1offf08PbDz/WciGi7uwYp3+33x4dDOp+1kYknwN1myHnxBCVn7yGopuUM5BFaztn5QGAhBue8mC3TPB9T3O7YuzGGWhp7h9KgtcdPn3ZDt9/ul9OUpVRtu134FU5eHWXwjMcuLCc92x4/ww9E0rX+D/Ez0l+h2Fr59Cvv2WVJXiD+c8D6hTnCy4K8GDGfgb9dyKb1ArpcHXZhbe3zu6kZ2loLborGTOS8rLwTmtRKRAYE1qLrzJwqIXpCds4J1HEzSKvrXpRLNi2xZybN89lU+n6TpoUdINHh/HEc+9L08aH8+H5+e18mdhiQAJdzbkLOsANjMqciikJgiEuVjce5tQhBixG6neEmdmp2FWM5p8C4GcYuQinUcUVGD1gzIGOtogjOBSa+7f1LlrdzGxiwqNRyx6BNX8ZoQeM4voq0TMuBkbfBj8aBW6vR+27ozEgytHpWMB+66TSnXPcvte9Hq/FU6ZyKQL66voJPqdbTzcadp3o6zUN06ogBOmJrjahHZDPW2jyXrtvGuL1/f+oG0Dq3ujjvXeyn6RHJUaCq4okUjQku9t5fJGOfacPPW+cv7sb/9ptGvIRhmV64ws/PXiUmgKuN6Pm5z+zQZxboeRTw/AGeyUa7/I2/LOyLynJNESLEVXOA2pxzROzYr6fxQFG1EXWgUiyrokoF4pU8vUQcwxq8DmrKyM+1A6hiKuvlqGisqipr670jx7CC9zVlAwgMVSzE3kydk+aApTQpBgZIZlhNpma5WVJbRAXAkNcobUfgSHvHHiU2YULHTLxSI2BGhhJC35wDH7jrXQzBj4GjQ3UhMnvFNdbHzCpQUCLB1ZfsIqP3QaWWnBXBrJmBVEHUIqathWBVSxb2JN0w+tiT9+wiU+ddCT7mUkqpzXSZUyvWxEqVAblDitG7ENgH8L7mBUyebXh4uWuZijQgQ/VojMhGDM6Ra2JyCe4jQmJgNkJAWNkfNAOtpmprFKJjQwdI68AEjNZf3ucwNEMQtVVMdDGSG6jKKuXFS2sS4JpHSMjk4HluY9ZEGzQBdkDkomfPeZkkZWittialoZm1Fpyvao54BYKtVWkNW9GWkdboc77o/9GRAVoTq2lJZq0KT1XnKqm2OZUlt8fDnNul7gmRxEzNiOkinCIk5KaNwBySqjC553kW/FdUkXPOMQMYoSkIATQpACK1RKbggqB2Q6+1OFYVZEIXzEfYbGMcve+73fXV/u52t9mGMGhZRPT49CS5TGXRnDU3q40J0zKrArGrJsfpXGtxvhuHq1YsFz2dWm4ZTDeb/bAZQyBCY7Ique+7ZZre3z8ixrK0/c1V5zpE1wyR4Lff/Yurm5dqdbvbBt814Zu7l9P503x63OyuA4fpfCpz2m62fexLXZyn09OTA27zfF6knGfnnKqkqUkV8sn5iOTRUSonAljWiw3Qx05bMS0tnYmdiljpXBzZBxe3jSI0JSbTRNSUgGJP6lyZmE0lUujD5roZgxqGSK2pVPAeAQkcmnAXpDVUgZbzfBy31yat5sWkoiEAIaJ3gbCepimnmWMYYgzOLcuplfzx/Q81nVXEBLzzOZe42TAHIh43Yei4lnI+n5jNO5dyJarXm/h4/2kY74KPoKCn4+3Vqx9+/F5a02rjsC+5ubD94tt/+dNf/9EEgPHDp/dj2BoUkCVEbEVR5NP9z/vxlshtNsP5fDrcf3rz+tV//Pf/3hFWaHmZup53u9t0WvoeypTOD/fj9VPfD19/8+VpeZrvH5Z5JrKKls9zWPLh4WTk0Nrt9fbTaf7padrGbbD4+Hjsu0gIKg0QtAoRMDpCVsUmaOgbEBkIAIsYgiHBWsOoIiIE5hjNzHlaZ7eX24dZPidBenX7RhOSs2F7c3h6evPyyy+ur+C0OIbffv365atXfRedd845a8XUwGpLs6hwiMFHBtLsnKPl8QPpMi1zyonsSYmITKCqzVdu/8VX+27jEQcMPfgIfmvgVQ0xmCKyQyso2SRLzRdHgJpdgk6FANFQSnbOiVHJjZ2Z1loTMiK7pqLsmX03RlMkpmbSzjORcG/MFcAQvSJR1wMMkjKZ0rYP2mpJ1A4GZlZ9IG0ehBC41XsERHNgIss0XL9wsbOq0NowdLmaEvi+r2VGg+BYusBhhDQ5h30fylTZO1nAc8c9AhXygBVaFc8xp+T9YMLS+JyKc0PJEgZmz/My9YEMsCl6NESraV4eH/pdN52XGDebV2+cH+7f/u3wePrLP/2X33z3m77fWZF0TgzdN7/7d/vju3/++CNaSi3ttoGgCRkaKMBpnoljaTZ0cdNvj4cn0+YoBCcPh6eXu5dGjdi1nPK5IscukmV598PbIWyG2DnvntLjZrtzwIpYUBuohlAh7e+Gh48/VZFNt5+nB7D6xatv5qUgUTeGlA+gp1azmqqLjcQon/LTttsNYSj50VgJNPbbBHzKc422ib6UVLH6jk/1hE36buzH/UB6/+mxnE4gCuKz1H/9d3/4//xP/4+b629FbJ4XIncbb1VKajze/D702yTtxc2g9cMyTf1+k55TWta5CxgxErLLUtB5bdkxgvPrmSkQN8mpXQIhxAyQRVspqfOdonnnW0vM65QFa2vrrddMGRAdF80hchf347iNnoXhzVcvPL5++vh0td8fDo9//rH/6eMnM5geEhe5uhq9w/PhMM0nDkHNrNa7m2E6nD277fbq9uXdyy9fOGDfx9jFNBdgjhSsJIY83l0DkDXLdcGygDGqNmnT3Gophxm1Zkkpp9RUU9GUW27KIQKoiIqiIjviVIoxUi0C5jpXU/KMuuY7MiJyNSlWPIWGzaSpISIJYFMBQuc9OJpbBhU0HoZuE4NU9czb4Xp/9bqWeZGGCMDw4subbrOtNXT9fvPi5TeDC6bv//r+f/6Pfz58/MTdNu423/7dv+p+7N5+/9euD5txmK389//2d/+733/5D//853cfPiwZUoP7U2Hnb2/HodTH03FZalWzausxu+msomoC7AybKXvXMWGpxXeeUIfoNtu+ZTmfp5/ef2AL5BjQarP7wzt1TR1v+2vHllvOc2X2xm4VIbgYvScyTGla8gxqSM7AITGIqVUffexDq0ohOkkGZsjB+9VHwt45VMfkXGetHs+nNBd2rlJZtRRVm2NmhiKptNq5sN+80CaICNrIKTg0UBMlIlNVVSLXjf78dAqsu/31NGNrKddKDglAnmMqGJGICK2umy5cepXXmfZ6tjIDIGtqjGvqBBAiGZiqrEEUAE3lEq1C2ET4Wfbza0MFXuRL6xsBf47WXrf857Ma4loKdEEx9Lm+bAU6Ckq26t4RyfQSbISXmrTLUXcVmNsFk382XfyiVtLPI3a7MEyMXJp0Azlx0WFwNPYDqE6HQylVK5hYbjpuYm3w8Hhqiqk21/WqcP/w6XQ6cq1dxw7NIXUhMKKhoimbqRRDJAAmCtEbud7HoWsQvDGkJbcqu5ubDAgxQDeCISi4GH3nd9d74JCXWs9l2Od+JCnl8emkAktKT9MjULSH+fsfPvTjiM2Gq+3rNzfB9ze3V/1mbKyn6SH04//wf/k//0//r/97Tdkg/u3tn1/82/8+dP3V7c37t+9+/7tvPvz8vh9GC7nlSo7iGHvPP3z4ePvqBTvbjHx7s3n744dySux42OzQ5HR4lFIVxJsuOd3f3+elAfMwdEWgiQDYMA4eUU1ayalp9P79p7/GrvvrD/fX29s3r17cP9x7Dh7th7+9dQ5ijGk6BfSbq835CWo+PT08mehigKILTACtj87FAdBCN4Dzjlml1SzAzscYquTTU9dFZs8+IkHX9ZK0peV0nsfeAyGI1VLhmTC9LI1fQrc+A+xfAWl4hmrPi8s+O4OedR2fJXj2jO1/sbVdZHHPteXw7Ia4vPLnl7p4gggvhCk+Zx9fuCe8PAC29q8xoBIYExiggCeOVpJINjQADd4tAK21WvKa1rGUevPqarfbWn4yURQzAOaOfSAEKcXAMTDCHGOXat1s9g7k3fvTn79/fDjXueKxipo6UUewZ1zFpcoOiRtgyrWpRITBe+cRyZTgrK1UwDEGMhW57bzkercbxl1vajUnKXnsunlpqQg4d0oC7ABb8LinJUS4Cm3cepblZsveyFGJsV+KEmBO+TzjLhLA0jnZ9r2K4Hr6p6FreJ4WIHSBa6uP9592u7LZjm1x86TLnI3q/vbF8WnuoDbmCoREIfAiy7YfluksNjKjaxC6kOcJwzZcvR7yFCIKaJuW+NIFF2w2rTaEKDV5BGJnqgh0CYi5cDkX8vCX1WGfdUcXngfxWUB5WW+r7fczSfS8HNbXeQ6zWg3Cz3utfZZ4XF76eW2j6UXP9Eyx/yLlRGAmQ1IzRmTnWXRNs2O8NAEQMRqiqUAztDWeWm1tYWB0xBa1NSJQaCAGAAICBiZmoM75lJeZulQseYxswXlTkZIQgJkN5LmNAIEch+hbVVVtoLqoNjKSKvissyImMBXAJihmYqqGTdExmNBTa564YwtMwZlHYwTPsIr9xIRQDSpaDI49imdzbMRCkRQQGI3YAJjRCBWgmTIQs2dajexUS82woELVyqCMaGqiRdCVah1ZPc+DQjSj2AGSD533wYXml5Rak1YRpLUZSZfJSAZG8h0T++B6z9RavZB/CkTryjEERSNUQWlIDpAAiJx3ermx4uVmi89SNkUwlGq1mAoarhlGBIx4GaKtiwovbrsqTbRUbRVUCEGkqVRVVVPVdQq0FrFdViAiAdLlNIEEEBAEnToXPZKG2HfjPC+Ss6Rclzn2QVvRmhCEDEWKpoomKgXFUAUZCVbvGAIbo0OMaNrEpAqyF0FwRMC975RYGy7tYJSbaakVDPCi0UO155g3s3Wga2BNhRBsTWrnX1o+nhvQALU1RAkheHIKq2I6xNizIHmvklsVa61ojZ3XUosTZgYzRvaOYvRsenp41PYJVFptTM7A4tABxKVMFKKYWsSWci2p5NyHIfphqTSXej6dng7H/Waz2+62uz4Gdz4vOZkBmel5yqoHqRBi3Gz74WrkjlvVMmc1+ub33969edVUOxcZsdS2v74Wyct87vuND/F8nFx043YXurG1LNpMdeg20tIyLWuSmEiWVp3zxApap8ejGjOv7VrWdb4pIpKPUVV99MTczpVCICo1PXnHdPUKaEQIRNDy2UyQO0MGZy7GJgmZyHslT8ggzUwNCV0wIgAEJlNVBDWrbW1Wsnk5qQgi1ZK64QqY0XtsJS+l1qoqEa20fHj8uJyfxKTVWbXlpWyvdt5HotjtdtBIFdh1wB1SjRVFFzFsVfNpao5c8Oen4zBeYejU4PHxoe/HJsLexT7eevr//c//8V98/Yfp4f7h07vgXN91reQ5l+6AXR9ZYBO6XKsaINHhcNAiyWr3auv6calKgZH4eJ+s3Y+9r7mFrt/FzXI6LofH+fCp2+2+ePPV4/3H0+FJnSISCR7P5zBskCFGfeWu56oqtWmJvSsttyoIyHAZWyEAIxMAGlYRNmgGdW1XIhQprba1m5KRV/eWqTLYqtJZr4LNttcWsrSBGR1vbl6g3/7+mz90eZH7h7/77VevfvvbMI4ubkqaCbQeHury0GrN8yF2g6dtCExWpBbAQsFtX7x00as/17m1pZ7P6fbV/ubm2qibnv7yt/t/MqM47l//9vfYBfJHYiTnAIxUpBWj0LSBj2CGLiIQekZiMyUEEzGR3keVQk5ccAjVRBA2wB7Id+zNkEzJwMyYcV0GZkY+CKhkAXbATsRciNQFRFsjjV3IFKKUk5WzthoDSTby3OO1NGkCIXalpppnN1yF4apMj+gdgxEGaY2B0QeMrplU09B1pqogxuB8DBFK1q7ry3LAQME6IKTO77qhijQxQBWT/e52SQdwaIjeR+fQcZmXaXO19SE4jJIXtHnT2/n8gHTjwvDmt99FF9+/+9v3f/6nm9tXuxdfWWQpAgY31y/fSOnk/P0P/7whtZat1T7uzjX5GBDEO9fmCVG4JXZMvi81qRoKTPnIkYN4hzZuu6aHliuRj+G2i4yeFDkwaqoFwcWN4zzn4hzlUtHHXT/uxts5J9A4xJst4Zwfp7KY+izFMaIlAHBUqyzRUYV8aiUioTABi7jj8tRHOJ0fanJ97F6OO8t63W0IIC2n03LPDp366KmqbHZXeZre/nTfhRelGA1sDpYpR7i66t7U/gWKHT7+zbl2X4GCcqDpfIibcb0KWhMkQCQx1VKUZEonzyGyE6k++FyyigBSy2ai7J20poZIFCgSOTYAUE+8uvqRIPi4TooJGQxKzaaoBArNyMQgz4JIC1YhVIcU+99+94cPD6I1v7jbbbe9Bz2cDy9+92UpuRX98NNDS4od3V29fPPFy1dffAWOfO+s6TKlKTcDaC2nOmFVz61JUVVtLVcdes8cap00z55RwE6HqZS6TGfn0cWoDE1zbW1tbSu1ldJcCEBAplKRhxCHwcfQd7GWRB5c75c8k0PI5oC11lrbEENrys5pa0zkI6tISdL3PbGQmAmZkmNblrmIfDo9TvPcx03fdbnlb8ZvZTbnXU1a57yhYbi5/vpfXHfjiz/+w98/nJ+OD0udIgtebbY5lft0+ld/910Yxv/vf/7zuw9Pr66vQg/14TE6Ox2mXM9AJDkzmXeEALmoY261OoIhxnlJUmTcXXXDtuSEWQbXObSx7969+9haNVPnPLuoYtvRn05nUfEYxn5TW6l5WUfFaSnkA4CCiXMkhTwxGzjvOLicMzEiMHUUu835dDIjYk7LRCTOu6YqTa1qq+giNGwpz+yiIzAR33MqCcAJsBmCw9Rm0aZig+tKmYScs64pb4cXKb2TWmPfKdWcawhO1v54xt31NTQ5HE8h+sEPrqq2kquE3j/D3xWi2AoFcLWVGXji9oyMV+sYEYIpAa1zRXqehDNRE7kQOqaqBIi6empW+cMaZW2AaHpJ2bgolS4ZG2ZwcY1dYPQzYEczfUZNv8Lv8LnwxwDWOIzL3P45seii0nh+8edMIrgMTHEFcJ9/SO3ZwaYpJVEkYsG4NEapS0qO2MB670PsvA+mLS9nBfLAOi/H8/np/XtqdfTcO44IwTkDa+ssXZtpBVVyzsyOc348p1NVI+iYd9thHCMG5/tuqth1Q9zv5ir5lMfN/u7ly9x06K+epuOSa8dKYCmnq+HKx7GoEijdWc4Vpc2t9v1m2w3Q+SKylPnhfAKx3/3u26u4kVLr9Ok3X373tx9+dKNXcodzUvap1n4c53TiLjYCVTahru+WlJ6Oh971ToGIuq5/99f36ZS7odvudy72p4dHBIohmjVz/unpBID90PXDsDrkN5t+u9shoUidl3Rz9eJ0nHbb0Tk+Tdmbk6VOT6f9dojE58fDbgy1wovhRXJOJbtgL283D1IlaFqWPCciaK0BVDaQMm/HIQ6juQGou9ptvKTSprkWJWCGtCxVy2bHloV92G9GI5um43lKmisiYPtllIz4XGgOz3Dp82O/inS5INh1Ka8RGJ8rpZ6lG3bxjF2ifC880ArF7PO74UX/YetFB88ZW8/UACDaZWRKK3VKq9XoEiYGBGTrrGNFkWKozcQQqoqUycgZeQNEx4KUSilSDMGTz7VdbXYgSVpxjsggRF8lI3Etk5oCulSqWDPJfRhiDH/5yw9/+enp/lwSkKBaUwJAs0DQBaZqc7Oz4FLVHKPh1tEXY3jREWtbfwWRyURbzePNdhPCfMq8G3Z3+65bmb7aalHBTayDCAWK51aWPE11nsuotvdcz4uGdrtxQRRNaxFoIGZzqYC0lPa308PrFxtIE1/V7c1tquodU0dUiNGfajGQEPE4TShGIMNmE8wfZ6hLm0R3+21tx2YwpxaB2QWjpiCxD1kwGhq4mpWpC8MGu6G7upN0UHDSUIsQwuryaQIuDoZsSOs2ue6Nn1nzX1GIF4LxmUCC50eehWi//JeLKBIJ4EIJPUOCi1kNf1k6n7fGiwfsskx+FVN0iUa6POH5vcygifLKNNhqwSJFFVWHhghEDAZNFZERVK0hgqE670xZhJ13ogVlfVVTa5cLBAAQVa21AoRZXBafWo0FmBoogXNMztBg7We/xPIoICIzsWPnnGdXCY1jNEDBagAga6E7oSOrAsUMEJuaARTDBlgMkhCp9kYRZSBSAwPxvu+iM8cYnIsuOL6EkDOG4Ciy8+ES8UNM3hsRGAEQMgMiOUfI6IidI8ZUGxGqiJohUVvDgICBzDOaTuvXH2JHzEgRSdlFqqWmpWmVpg0KGiyiqz7HO8/M1nVWqLUGtuqKPnPKl2GISiNWI0MiIL4QzIRAtPLNgGCmaAAi0JqpggE6hz4AOwCyNduKn1eg6ir9slZXTyGAijTVtY7TCIjI1JDIAYA+63ltzRU3FBECBUI1RUTP7Lu+1gIUIkXpq7bq0oLpUJcZGktJ1kSlWK2MANIIyPko5gEaAjMzEAJwratv3TGTAIoa+hA8tmbB1PeNZ1YAMTNYfXgg2hDZEE3ViMzAESGiWvPIiIDEZmK/uv6eqSJmRkaSVsWRASCoIiA5j1hASwiuSWNPCEQOOBKROnTadDnPxG55nPKpbIY9+a7vIw/IZA0VABRr2HOap3lK0pR8R65D0FTy43GeUkKg3f7N17/5PWMSSTktp+UEHGqW+4/neREGN2w2FNxmv/ee+t6rNedQKrz66pvf/ov/dU7ncbvd9Jvj+en61Zf90H96/2cmDj6cjgdpcnV1taSclyl0YzdstUxpmVI6mcp8nERayQValQvZKLgmwTOroQEti5mR98FOQMzO0VoBQZm1RQQozEXMhZHJ+922lVnU4qZbiUsVJGSAAlpQChCDZBPEdX5YDH1kbMhaS0a04H0+nlqa/TiWeYnd4ClaM5UU2SZJSIImZTloO87zeXo4OQcCKrmI6jCOKqDMoR+7bgfEho5916rEzrrdnbRcSwrdzNDycqbgnAsl58BunuZ+2O52+/tPn1TNMCCFL168yZh+829+M/3Hp/mYtIrzXpd0eki5E/LQ77wPXqFejd37T8emukxz6Pj65fVf/vFPd3d3Hqg2Los6LLm1riqPQg6laZuX05IhLdub6/3N3fdvf5jnOSIOV1sjzkv58f0PfXfVgZ/qjCjSEBDYkTVr2pjBwNSyqKKohw5FQJWkalHxHpRETBuoGgoCEICqqncMqtYa+stV0A8eqP/q7tX086fh+mpzvdt2V9Onx82me/1vfne7v/IxWk15Ps6P75b5sMynYdzs7r64/vLLko2NIT1pLQAJzM6nBShK9ZvtS9xAmad4Q6fDzx/+9HPobyLt9l/8bnv3JnRDGEKVbOW8LNl5R8wcIofOKLKL6HpERPbPsiIkaabNNCETEBDH9WCl6rzvQNdUjGRapVWUKmU2aQYNVCEMhA5s4LUKDRu4oEQrqYzMgE4IgbwLvbNNOz0hiFljrNpKN/Y117rkGDsAafXscYzbvZhBS6jJMfh+U9OZmRCJ1ypfvzs/PoQ4qCjy4B20lkOIaX4iACQadtvT00mIx93mdPr45ps3P//lx1LOBKEKxRiHrWvLIxruxyuVpk14DNrqcniYRj/evq7Tw1Rpf3v14ruv4z6eT/c//vDXV4be4bC/IsOf/vZjrafazrd3V7Ic53NCYKsNNIslkdCEiMf5dCJ2I7skJbeJHR+XRx/2/TCCiQN4ePgJMV5vX4h2ffcaYT6d7lGDltK5F/v9b98/fW+lSk1WCUCGGMFy06eaC1F/mhcD8rGzMvWuq232vXhcqkyynJC8EbOP1eaaK3EXXU/ob/pR5QTCjoYd7+syUbHt9ouakwpm5e04fHj6gENclsJ0jjT02u8DJZv6AD6COV6q/+qLf/X+/lSXt6OXCvX0dLp5fcsOuo77bvh8LyhNVSV4T2TBDbVVAJvyRIhS0YzUNDjfRJAJiZBYm7BjAZNaiMCkoDlyhCYApKaOXa2XQdqlg9ug1MZEpTak0MVwTlMYRt/3Kvbh8fGLr99c7Ry22lH31z99X1JbWknTMvTDt//q23FzG4ex8z5gXUpLi9ok47Dpuu08T4RtG13VLIAMuMyzgTiw/bDJ6Xg8PmqTmidtbZ7nbhwBC1qtBWorgFxUFVHN8rT44Mn5phCADGCa27hz22FgRRdcldr18ZTLtr95+FhU2tAP0uoQNqq1qUo1BNCmaCBVnONai0kjBUHMNQVGdNwhhOBvbr8O3C8Zvnr9qjYDiy9evB42L6f5IFZPn6Zc89Xr7f/p6//D9z/89f/5//732nJ0LvSxio3ef/n6q3T8+eePDy52P394ent8OJ4mra1kZfbk3LaLBvrq6ou/vn+rHppc5iFiUHJGciHGXCYA6GIfiM7T+ePhEZoi0bDdksHjIYXgU8qtSegHNKtpMrVuiGANzLgLSBGtEBIAgWJbi9yRSHt2zvvY2txU2vkQ46YLW61aRIKDqoWkqRo6IjJRAcZAHaJjQiHJVi2ycz4tCbQROATMtYGKtKZmrZ7RaCn54fAXR+xDWMoUXGDfd8MNqkgrTcpSqmfvAkpLucwhdiogoqU814Sv0ol1krsqE9aeEYTVH7ceTsEMjVa5BK3HebjwPmrGRIisqgZ6Abqr4GI9tCFdOsLhUs681oFd1PRwqQNfgzb1mbR6Rhj0C/NzQRyoa/XUZ/roV7DqEpL9WUn0rJtH/JzdsabDfA6aWUG+EaKA1VIZyfmYDc4Z/vrjx+uN9+qKGrOjVQKL1aw5jg2A0R4OTz//9B4UutXZihC8YwJTNTC3UlLIWej+WJ7O8yJUxBrTwBiI0PC8tDjuQ4wPD6eadXk4sPOvXtw+nKc//fVPLnb46aebF69ur18MWD9+/LHvNqKQcq0quebaCpnttpuvv/p2e31XD+8zyqvX14dPT4QeLCxPx3M+jvvxIz3e3L25W8r7t99jGB8ePr5+/XVrqyIAAqmCEXtmvx3Gw3Hi0N29euFD/+LupuRiasNuc/3y9ubu6uOHp37XkzfJ84d39w+fPqqK92FzfQ0GZrLbDDHE8/k8n5ZqGvvu4XgcQn+YEgDc3FxvxnD/4eF8Os9HITJm3my3ZSk//vTPky7OOC2niNyFru+GAblMqZSKTLHbqfcqmrKZHsjPDYPV89gPcRhev379cDwo2PRwnJeq0kygpiSxbTZe1JdkLYHzHIawnE+/4PHn0GBYNe0X5w/8ku2Cl54gBVDVyxTePksz8MIw/SIeuXCRa/LRukzpV0a2X0RzF1LpV8oSRIJLWvazOGRlA9aPhaDKhGIASohIQISOALWJzYmbODRt0m93LROiE02OvVMjJPS03d9G9qnmwBhCVFPn3Dj0+XyPAOzddJqRHZFzCD///P7Pb++fEh0Fz9KkGRESGK+fOrde4YYxGx9MShN2jB7vc0XAa4+kct2HLYOXFp2PIYxdN0ATCq7rXd9rE0I/xKsq4tnIgdbaxXZ+Ok7HOSL3HUYApR5dbET3U/JIg3Onc1GTx7R4H4MnJPen96dvv7rSU1V3vnpxZ2ZgxP3Wuxq5luXkqA3dYOrvH9PO6Hq/MYDjCc/nhQJsdxtc0lmWUux6v1vS03aMj08Pm81Nmz+21oABhE2pv7rFbp/vf8BkZZrJEzGlvAAY8+D8DrgD4F/TPc8+l2dD4zOzc3ncPlONzxT7hea+dJwh4mpE+jXXdFkrz1sgPjNEzzKlX7FJz5/hvxLKrXbIz59h5QAcIZM2YfaAUHNSayaMTEiutWbPG7j3HUBjUfRmItIyOK/NBIWB1uICNQEwBEZbdaDIiIIk5ItCMxCBUtFBaAUZkTyvyTIOEdGKCl6ytp045wKhsXkBY9KGitqUiTvmkbUZFoUqYp6AOAMmsSJNDQ2xSmPgxAboQuiFXAbXDZs4brpuM3Zh6DoKvt8PfuwpDA2BGFXWSxKBSIHByFSlqoIykSNmwuBQrVYz9CQKCGhqDZTQyWUA4VJuBobE607BIXgI3HXahVpSzivHBWZQUiIER+T60chBiIikIqCy7j+XezYYmIChSUMiQwYwcg5VLzpHYlt3EQQQRakgFVUBHXlWckAODA2UEAEaiKGqtWzStFRtgiag1VTW3QkBGAmYxMCTgzWzHNBAQJ+HNGBrpJGKrSHbq8qIyRMLUBXhJt4IjQRNIKvVpqDI7lKETYYmBgxgTEyuY++UQJWCM2it1rk0qwAKHpSQyHFtUg/H03lazICYTZoZiAqRW2EzIRG5tYFUTVdX5/ozTPirycAzVbSmNjlAJKC13F4ECasVkgpsCFpb6UIk75olAfTAVSSQ78JA5Nl1YRxuXn0B5FttgYFJzVrJaVpaLRmRyA+KIKrn8/J0OkGpQxy//OpLH6hJTtN5Xk5gq+Q6/Pz+oeVas4buugt7IwjRbeKY05wshchd34Wr8fU3b8RSCDGGeDh/2u+v+xAOx/cAFMetlByDc5vxdD6bYYiDuZillbQ001pby0UVXNyipVQO5/ORQU0aaGLnCEGVjJ2qAHIXPDP14y4nMSAgJmKemZm6PnLJ/aZ4F+RxUWnsneqA1iE5clyW4mMH2LQcXAgqSUXDOEpTlUaU63xwoWtLQnRicjo8tFqYiVCX+WiGm6vOSk5lPh8fVQXAHu4/mRVVMbOSJaUFFbqhd96D+m68jsPOgDlEQwRUciaizAEAEXjotkzg5qmWc80p5yoEu9vt6ThzC91uCK4/nh6tlc02Hk6P/sX19tUXiA+H+59KTkPXm2BeqhUFmsWg1VrqVFq5erk30E/3p3HXkeO0JAX0DN5z1YYqXeD5fHS+c52XCrW089P58fG8efHi9vWb9tP7Jc2kFgnRzOlOltCWqfMoSClVRoI1BdSaAhqaoa1psqLNmUep0AA4NtG2jrMMRUTExATRCAgMCRkRPquKci0m7efvf/KGx/mjH3a7YG/u7vZ9t7u7QeKcTprupeRxe7V/9YW4DtCl87wkcD5AbUggNSOCKviwn6ez5FLynJdpPs8FQt/3X3/73e7NN+xDyZbPx3R4irtttxnjZhf3ToGZUAXIO0AUaYCiqqaN2CExgFL0ZmgCYA1qM+zNdUAOzYk0agvUE5SHNj22PFtNgNbSzKSIikyg4kKn3il3cXOj/UsIVxCvmjnSFdIxAKg6pOB2A1ir6Wx24uC1ZXLCXtQWZLKqIgvi3vVjPRWODpkElbtoYoDgPSOoSmWP5Mh30SQTWSAQKaEb+72fT3U+Ta+++OJ4OMx17vfbbjvevLou57ykitQ1VJEZ0RTUrAXktCTfV4ddrvnp0+OU6nZ3hQUP6Qih7/rhpuvB8OHjD6Y2nB921zcYlZuU08SbmIwqgomJAoah1LTkqbTS9wbsifC0TOxo6McswjzE/jq3XNuytDnEPi8l56QCpvcln5vM476vJQCMh+PTkk7L8rDdXVcB0LIb9k2SgPmORM28liQuDpvhpaSHpZ2tgmMVa00rglY1KKQg7IAcC0FbFrTkmYZxPxs9KTBb8G4q1XPsNttc0/2y7L7++uNPf85VHAwY9t311/Xw91kaK02Pc7COrP31+39PwYvOrczLkrphPD4qQ+zCtcPPuXVGhM4Fz2wAc5oYEZmj61VaaQUUCK20YgBoLK0RUPCMhFobOz/EbslnUwACdK7U6mOMXY9UlunE5AiR2dXWyDnvmACLWm45xlBKXSfGbHZztRl6bmn+65/f/vD+55evXqLD/+7f/HfLtCiTNsvz8f7p7Bi311chBsedVDyfDgwQvZZl8R6vxu18ekTvN5ttOn18evixpTydnkLXgbWUZ1XNeWZsRK0pHg4n5n4pWpoiITsmJhGNIYq0yBEjOedEG1bd7IdEicFR1S523ncJFwMEcIaOicHmWmrfhdURa0QCiKKkpqa15qbqx/Fmu1exAORUtzt/fbtTxGG/QYCffvpb7B4Qod/srvZX8zIdjnOJ9erFq3/37/63f/nz92//9m4cyDCAD3/809/a48/OawRrOd9tr95cX4cQTqm+ezy9/3hIc+uCW+4/FQUgrLkFJh/jspyRbehH751oC10kAjWpklutQz+YUm0QzXZjFMCmLfYDsWfklDPAiqSg5AbEgWPNmRwxx6yLiCCYJcxWUpuJkRRCNwzdZp7ydJpqLuRjCAEQQvCEFmKHxIoorVCkpjotx5STUUtp8UQApKrkIqqSJkNk35MxM/kQr6+Hx9MHaUW0tibLcgZwh+MDUxw2u+3mVvGouai02HfklFbBpgrhc/cT4jourqtcwdQURdewbVijfIjQmjKgmq6FtaamsE6ALl0wTKhrQgVexOxrhe5KyOhFQgErJ/U5UAPXTCLENZTuV6acX6NuBAPC1SgDa9j2Z/PYL2KoCwQyBJS1FeEX3PUZrz0nFl/CO8guBSkgCIAYmE1MsbFz81IolV1wrZTrq52LrtRctYFg8M77fs7607t30+mk0vzaz2IQiKVW75kRa2tmtuT2MKVjg6loEQREcrAN/OVuEz0pIUb/eJp+fP/g+p4DtKre9O27nx/Pp5v99Vdfvrm5u15Oy/sP7xaWzX6XllrK7Bw/PZ7H7Xi9v/3f/Mv/7t3992/fvf3jf/4PXEq33R7ebbS23U3/5Te/6Ycg1ZMfMo2n6r79/Xfz+VNR9+nd2+2wi3ErUltrzoV5WURTvwm5Jgba767vXr3u+75JdY5wE69u7q5fvDo+PFlrCCC15VJKK0Va1w8evVZ0Hq5vrn0Xfvj+5zzPwzB45662u9OSBJXJLSl9//ZtZIxD//rr3+b5NKdpnvPx4ei0+ki3mztSfz52jmGay/GYBqbN/mqeTlWtgK9Zd57RMzBoU/LA2g5PTzRRaYVib0bj9g7gXLU0k5TT6Uhu3EADaO1qt0n5gB6Fn1fGCqwvxCEgAuEq5vk1lAZYi//gV2N9MzBgwlUjp/rrkKLVILQisWd24LN46VevCfDfLNRnexE8swW/MAz2GQeuLJEprNpxj6R5gjpZTgi1SVWp5J02KXkmBFDVphTs+ma73QRpRZpgxwCosuacaq2JDAOHGYj9EEJ/vL//8ce3KWkqpFo7ROfAEQQfPICKMhKb3Bq6ZJ74KJZMnaJnrkpTw5EIAftAnWoM0aG32hjdZr/DYQg+VkgeKThfMihDCINSsXp++eL6eHwUgbGj0YFDVOdLVbGNVe2IVWnOi3GXm7UqQOjj5qf389ev9qfjFAJtt1ujaIhx6ME7rnMtCwO6wS9i908TEodh3AA9nWtKLSCMfR/OefVTD64jwnG7nyQNHk1QWgpx0296jIP63e12//Z/+Q+OAK0hNUA11LDZNwieA4Dhs6/nmT9//sI/I+vP3/Ov6Jv1hz4bw1Zqxj5b0i6yo18tGHuWtMHl8AjP2+sz2fiZUEL4Zff91fp7/iCIWGt14Jk9IDpE8Cra1KxKYyBkYiAEJQ1gQGTOxapCzjGxYiVERw4YVbQBIIBIZQQRJWYzUAUEVCDnwhpYqloVyABE1qwtMtG1zhNUVNTWGFwfog0VEQxUMjCAqgrVVRzC5Az6wMCuiCpSMOqRirRSm6iaEAMCcUNy5LyLvtv4fjfsbwYf+y52fReGwXVEntF5BVJTAyVQA6ytAulFLoUAamIE5hkYCH1wzKRWWA2BVZSkGUITVQFBasrYClYXiJhWe6IPTBaC73peltZaa00BalNYErEzBN9tEZlW+brKhW7mFc2tukYBqQaAPhghXrhotEtxI+CqODRVrQgKDoGcMtvFtmYX0tIATFSa1iq1aJO1QVtbUTNVRSJEJFNRwc9KNGsAgNoQ3XNXHyCaopmZaDNthMxoAOKYiFxtYqmiaRNoQioE4MwqsyOHDGbCjLRyYgSqa5UeBOdCrq01O8x6XjIyho6JaJnnWdrD0/HpdJ5STaWVWgFR7cLmIwKYohmarG1QROSITI2YVBVsTdK//PmcVUQoxkRogciaCrhVccoCBdFYm3drO7LWWhnVjWvCF5amkFLXxX4/TOlJkWsVh2TWamstV8bILhCH0qa8TMfjicn+8N3f9RtmK3k+fPj4XmqGUpBR1Q6nfDzNuZUY/H7cxM31w8M8bLvbN6+kLRwh9l1prRm8ennXjwMAbjZjqQuDu7p9NZ8f8/kh9ps0L1pld7UvpRLysL8h78Gk5YUs+UDNBQ9dyZPkBdrsGfbbjdZa8qLi8NKXVVGiNTWAXJBQrC3sWIUw9ArggneOpM4+RoBa0SF572NZ0IT7nUc/SEo1LQBKQLVUi15LNjQhk1alFiFC0LYUEzVDcC3nyaqe0tQNfauVyUs61zTldC55JrJScimt78I8z02bqmkVJg/CLnbd/np/9dIUWmuaUi2JHRGBVi1itWZ2nombixSG3e5mmQ6hW9LpcTnN0Ox4/zjsdsaNyajjEGNa4vK45EV313v6zv2Xv//nrpGo9tuw5MUtftz094+LzqTevXv3ZES1liUB9/3hMA8UvIPl4XzzYrBaztNp7MmFplpjt7Em2iqrHX78Aev+69c3f35b6zzXqZABVBHM0fulLUUbIIspyGoEB2AyUEQ2JAFpVrmha9hQkZlpTQlWUmvSAHgtyFQFNWitsXOfY63HPozd+PRwvru5RXAdwujwy5fjqze3ZU5QE0MdhwH2e0bM6VRhRqA+YDs/pWlBJWtPiLWWJFJNPQlIObW8OPZDF7ZhMGyH9+/OD48hxM3tV/3NK97s/ebKkEsTNAdEpiaqMFdmRVCpjZhNDAIDBWQPBkYIFA17i3skx2DWCpQz1mObPsl0IihrJL4BAUIXR5TccgLJgCY5SWUxgHaC070b7nj/jeu2zXaEBiKrNteUgANgcH6QuEFdvCQ5f7QQtLVuGKlinU4x3mLsZZmgKEjlzqP3BuoQlWtJZ/ZkYGotDl2Zzt45ocoY1DG46Lb2+vVvJS2ECZfK2L39y7tdGHZXG18WYG7L4tjAYYEWXIxd8F3QqsINgUvO5L3JPI6DVHj/7uOjC26Ir7/6vbHPaZme3n18/+7qdkylqbrTKVXAEEcTaTllwGMVcl4Hz86BmAdTs1aqiMTNPsA4uE2uRZ07TGfwW3HurMmgWhN0ZlRP09T3L12/mw8/Jkxx7By3VBOALblIkYrShQ36WFIdsJPj2bgn6z3dinHNEOMQ/biUs1uLT0wYPbSk0EApZYXYA/hza1XzNnhx1Lgu08Ju7IfNJJ14vv7CL4dPrZ3eHf8XwbT58gVO2ujYhZAOZz900ooRbW7D8Qnvrl+fJz1nuLv76ur6KyuP61UgejlAibQmLTgX2KW6ABKYBfKILJcxnK4SCARTbZ56B1BrqewQnVjFZmYWvEOrbIVAvI8AWFs1beSY0DGCQ2dSkRwYgufcchi2NxwPT4d0Or396/fnOf/rf/1v3v78Hir80z/9KZcW44DNrm/Hq92261xw/jgviyVQJ7VWzdH1SzpP51TSpFWrQF4Kt1RyckjOx/vHg5bcBw6ExykvuZalhL4jjmrYxe54fmwYwds1e7ECtYnIZhfJzGTebMd5SuP25pzfd2Hrw+SdddvN6XQutbGjKaer3YZzNIRipqAqplUJzVIjsCG6kXrHKBWfnmYG0w2pnINzNmejIB8/hnG4ur3O7Xw6TOfjOS3z3ZuXYYjH4/Hh5w8O8fXrlxUApBx+vu8Bl+PR+3j71fXY+04lRP8Pf/rxH/789sPhXCsSgCFC0SLJezZZXRFA3qf57H1gpibFDLVocelpzgS23+/7bpxrHfurNn+qaclVapbgvAsKgFIqAGa01DKBM4D5MK2N6GYV2WJ07HzgfpqXIfjAfD4/zvN5mY+IGLs+9B4Ic8lNQJuxtbLMzQQdgREoG2CIod+MKZ+j57QsZshM0koMIwOrIUDwTiUtJRW11HWBXZ/mxZuCWc6JCWqqH08Pp+HTy9dfd/1mTk9VMjsPJmPs0uWE8xn+mqGu/IkprUDZcG0jAVBVBEYgRNML4DEzJNT1FnM5NeqqRVrH3qprGgd+5oTsorqAtW7ws7zCzJDInkU+BvaZPIILcYSfM1718/B89aCZrVyXPQ/p7fJWF0Ctn5/8bPl4tmLYJWB4LWRDQLJ1bN+agIgLTA672Bex/aYPm03VHHzvoyfgtJTvf/709uMD1eqJPRAYOEfbPgKYiCZVU9Omk8C5tnOWLCBNPRGReUZP8OFwUm0h9rttj4Cx99yHftOb2GlZFO2733/37Tdfffo0ffz4Hpo6B8Nm4x05F4P3p9PpzZcvllTunx7+b//j/7Xk/PLq6uvffEUEeSl3X74JLIePy7u3S7fPu+3tl29evD+1tx9+3vSvX7z+6v7p3md5+Olvr7/8d4a8Du/n5dh3AT1baWC63V9t99emYiZXV6+aWLftUymtlu12Ox2fWk3Hx6ea8/X1DtFpse3dro/u9HT86cefkN3rN3fvPx2h6k/zOzB1IY790Hfuy9e7h/vTxw/vnx4+Krjr/Wbotz1AQLl/+PDubz/0boxh9OSDh6vdME3n4Mdx686Hk/Mh19Q76iOxa9ost6mkFFwfHc8PBz8qj31p2oik2NhHQ5aal7TUfM5tqcKesTXpumcbJl60bc8hMoBov+g3nsuo7KL1WPuhf0ke/oX7WeOwL/41W+v9xJSel9/l9X5Jonm2r32OM7oQSs+L/9lQ8qs1fDF7AOjzB2UwkJokHQmltmkceiaPwN77+ZRKyxBG85zmvOM4BnDWcmpq6FyoZUIE5B0xtLL0fscoqpXD1szeffyZ0JMkFtkyglpw6FCDs1bFe8dMDLYz3fT8urZG/pzAWu1H3vYopY3eXQ/cR9j4kX2/3fS1Vofex46ZSRqBodY2J2uqALUWFZEyVakxoiqUmgfXMdFmjCGGIlKT5NOskjek2y02xeMpT0sTqcD+z3/7+OblYG4iZ3cvXp5LM+IwRJQNzUoVRUt0IYT+eJxHdd0YNtQty1LyHON+vx0fHs6tQB8CkbvevyqoJbXd/npaFkBg57s4TGnYf/Ht0/dvXb9BUgIg1KYaupFcBLBVbYq/xArBJfF3tYvhf60i+7yvXVbTs7Lokuhvz3zQL+v1eWf9LK4EAjWV5597FmE+r+FVn4RIq4nyIjf6vBoBnHOlVMQVoYOqIgKRMzMVIaaVQjUiRqciJo2ZEczUixQXoplJM6cKZo2RAFWRyK8MAiIBQtNKCNH5oeu3HZKzwB261aRPcKFjEdREnv3KTGaMROy9qmgEVgQsQCqgKpB0bbZynpl96NiR81kgN6nNEuY18csAgR2zC76L3dDvbsftbjOO26Hvxo59MB8MoDWRWqtZlbVaWg1AV8EsISIxI1IzJKUGLiISETN3ZqQq0FQNHFJrAkQKICK1VUTgWmPwBIJmiITkmBhd8L4rtZWcpYlpA8SSExMQEruIxOgdmqoqiKzfGzHDSnaIECCxM6I1V59oTQnEtd4BTUAEZR1pGrqL9ex5C2ogAlqhJmtiLZuugLOBCZKRweeZCiEis4qsrUqwmnBBANs6mFm7iFTXe7OpQTNlNCJzDAgaGM1xM6tiBGRG7DtAlLIg0BryT+xCDGCttdpqY3QMVBrOSaapTkWnpqw417m0lprcT+nT+XCal/tTymJMLCrr2WPdewEvjmBARCJCkEuMF6186rPr/VdUESCy51azqaGRWHXeSc1m4HxYY66AkRlSPvWD98HIaz90TAy2/upJiiRbUi3d5gqdVzEDa9IUqaV2Xg6tpU7hD9/9Ztj2x9PT4elgZTk9PKFjUfOOHx7PKWupRhRu7/a9dx9+fnw4/Xxzd/Xd71/l85znedh2iC44uLm5e/nmjY9u7PuUZ6nt9auvl2U6HZ8c92gORWPs51wNeLPbuOAkH0+f3i7He3YwTZOUbGotzzUnABVVU2siCMC8Op7REGuaANBA0dDYplNxPnjfk+9qEzUh5VZLS7GVwgAE1G03xE5bcy6EnlvK0kSmMyEDOGdba2IKJU9mVbSyY4QKgDUXdrGlJZ9PTK6WzG4NRLd6POXzk0gq6Qwg02kC4FZqScmkIlHX9cH1PsR+s+k3OzUtaZmPh37oPEPLLUTf6gSor15fffjpgw/DclDX7chGRB43O8eQ0slhc+oOn+67cSyt9ENwLoQ4aNNvvnj59s9/7KN989XLD++Pjw8T+UjOp2mptaDTL7559ae/vp0OyYClpu1u3O83x4cTok1L9qjpZJ7dcU7+bmAE09ZaGbebZZqhQc3y9PNHLfnNzd1PT6ciBT07BwoK6EkcWEMwJJa16IJ4rfozNAQFItVm5sxMDUUESl2LB6qKqgIawuqWVzNEZDN7trlDW7Kh2wSfDof9cONa7a5Q53z6+eN8PEhukk4dmaFAnVsr85RAzTmM7G9ffw1GRMrRE4W0LGiQ8wPIPGwdmE2tpuO9G+PV1TXxMN68cbsX8fYldhsFb2bIazmUaCsmzZF5z9LMrY2aiGZAIRoHs5V5AWRGaSgJZYH51KZHkEQiRhz8xmJvptKyigEjQnU+rw2y1rLkwiAqC7RF9FHbo9vc+v0fxF8heRMDArAKTIYIwBRGa0EgQjTnk+RFTHx00hJoYwyx3xarKs07IiYtsOKMMIyiBb0DMCLPLnLE/TDmVMEoXt/0fdzfXpeff9J6+u0ffnt//9DVzYubV9//8e/7rVctKo3JueD61y+JnZo4dsreD6OCcXBSluO9HPF8e/f67m6rnu6Px+9/+Cdo8OaLNyHU4/efjqXuv3zzbjohWitZQdFkavMs6ZRK1w3O90CappO5frfpa0om2Nqy6V2d3rU6Z0kh9CXnVFLX3fgYRBtRm5YjIqjh8fz+6fADEO2HWJfqMZBjT14gL2VCC8Rgy1ksSMv99g6IC3VNGpH3xrk8MSqB5umEjs31c3oC4V3/YhhHx15M0ZGZ5adPMYTu2pqXlipb65HkMWueApSuC2Ik6bE5QhGnzN53+74JLPmBKzNcB+xRoY/9GvGuVS/3SAB2BOsYQ6WpeaYq4lxYj+kOHREs6RTDYKi1LYyefQSFlMt69MvLoojMZCq1iUMGhNM0IzCtCMI5NSNiUW2qYmXtyVnvtV2I2orNi5ZSlsW7bjd07356f3o6b8a+mH7xxSvQ1sXOBX8+J2NDUEaIXd/7zTw/Bh+mw30f/bi7O53PtdZSa2ko+WRoh+VwPp2UeHD9UvPf/9NfhpsbbLofO0QsVVJrqc4CYKBgfJrnMQYi8j4M4/bjw1OTTDi64AFNxHJrfT+qFAYauq5ZldaCY8eMYFq1mhRDUQ1MDrmpEGLD8DC3SI5a3m38y7vN/qqLsStLrSDjPpDzBjxNLZUau+79Dx8fTvezJsLdb3/z22hhmo7z+bTZ9q2E3XXdjz2b1TSlx9qZ/9O7T+/Ph/un83RcUBBEOQIZmun1/nZJyVoxUfScSzK12A2qWtIc47AddikvnkMfgrR2OhyY3FI/peVUa0H2xFhbVQImYgdMJFIFpA9+XhZRa1PrhtjFrorVBmYq+Wwi0mo1DXHcuqtajs6zotaWyYRIGKi0hVCZGcicc/O0eA7eu7Q8uebRqgcSwlwqWFOBjMxmnkORVLSxKWCbT4tz0Qcmw1YECJlczZWI+95bm9/99Md+2G/GkYFTWojQrHnv27PCdMWmTVSBPKOB2nOk7po2gcxNFQwEVQ3QdMUYqAoAemlPwRUaP4eprDTNc0rL50k4/IJ2CFH04lZbb0yEoCtD9Kz5WSe4q4tnVVf8yq0Dl7dY0ZbpyhgBGCJd3lYvOcH4/PEu/8vP5qBV9YQXMHYJpHGEROAYiGjcbCwdQmRpyzhGIK7opjm/f/fpeJp8s4jmnTFyEmGEKs0UpdXoWFVKLUlNqwyMG0Df+RjCOSUXWACoczc31xU8AFlpQxe2u6tPD09LaerD9vrN7euvPz2cQ3Dj0Md+UAwiVVvOpZ5Op+U877Z9DOZOZcdd9+q68/E8p1evb2+uXy8Ft1f9t7/fv/3xXZmX6sr56f6L17/Z9hst2sVeatoM14/H85IkV0vzIqUSYuz7w8OBfHf78ou712/YBQR7/eplzVTL1FKJQ+xiKEuZT6fj4TRNM3NgYmk6bnrH7unpdH44bIexIs9L28QYuphzNjBGPDw8UcdWKyj/7pvvTtPT41OazukI5enx8MXN3kH48ubVzx8+PpyW+ZzubvbLiUTy9Revzw9JhXIrMXJw3Pl4e7t1MZymmpb5PKWSc9dtpWonsBm379s5w5xqJSpSciW37e6QJslJWk6pQa2fofczQ/SLsee/pYFwjc6Ci9LoOelqhSGX0bz9KmprXYSA8BzdZXDxGdmvpRwXkd3qsLxcKusHUbhcDfaZDTB4NsYhIBmIAhqTNCmlIrB6xI4Fml4EfkxIsdtOzQhc57vOUVvOKrW56EN03udlZsZu3DZR9F7YpFYkJgin40NrxuSdZa4tMIRAnXdg6hhiCK1pUV0vwo03Z2C19PuwGVyudeiQHIGZ1USNOs8hOg7sh44RgKC1rDmh1Y61pOQQ0pIsDLHb5lpPx8mqIUDHoWQRg6WVGBSIYohXt31dZk3L+XSKwd/saL+xw9JKM0X6dD+nUmpKTvHq9aslJQx9v9mSCmNdRHIpm91GcmODtiQF612s5nIqYxcODtDxuIla09X17cenB8TBjAmqi30IgybdbK/Yd357y8NehKGJI4LtBplUxdEva+eibnhu1bvQkKaXre6zvugz5fO85J7ZSrwsvue8o2d+8fNW+Lyvmf3qlX6Rrv2qXAANFD5/smcOav3j2IlbSxFUVIgZLklwIKog6hwwoagC8Vo8c2HriZxzKg7FkWMSBAQiQmVEM1h1AWtyEpKxwdqEyjGgi+hYmdAjEhqaIohIRW0qqp/3fSZrzsiMKjJzcKoCaqxkIExOBXWVr4rGwCHEnkNtqytNl1JE1ZBc7JBcCJvY9cPmuuu6Td93vY/9gDEUhVpbqVrEDEFUePUrOVyd2OtcSJqw8ytDay0jonOeiZkDAACrtKpopAqiramJtVIYsBJVx8zRSEHBRCkEJjBQ9EDOl5JbzmatlgaWAF3ogalHIHLOajWA54g0ADMTMSRTMRVdeUBDQ7q0zpsR2Zpdu8JBdA6YL0Z5EwUjM5BqLWspreT1dS43VcS1bRuYEIjQwBREwFRBjUDVCJ4ZIhFVNSQDaioGoM+6JwFbJZCEiCYi6p23vq8IlUlrBlobM1SkhUjEzlwHUFGq89paa6LNxJjMIQ+9I225lpwOp+mUys/H5eM01VKqmQFqUyBYpZ90qa2A9UBAyABgKs779SFGIHZrZtzlKlj/0daMjZhqq2LUxNDMhNYqHNVWVCXXENEzsVN0Yt64Q2VhH/q+V+XTY4odAfeHD7PBUcAcR1PK6ZhrZobffPvN69344end/eEtZOkcz0W988xYanv3NC+Lkfpxt0GHaSkffn6iSrevXvRdPH06RKLtdjtsNh/vP755dffi9Ssk55kBVUSGYU9Ex+M9ioa+Z++R0dAPYzdEBiiP7//L8vAJpBnUdM5tSYYqJauaj0FaU6lmimatNRAwE9Cm1hCcqLaWldB7p8Bipq05AgW2qrVeED7ZKgxTsWLIIWQQGnbYNEnNAA3RLUuJtWqTJtrHrrXZmMxTXs6IrokRXCrPVBappSXLObMfoC4qZT4fQMt8mtfll6ekYG41DFkTaGDiujClc57PvVHnCMp0mpfWypOJd569+7g8lCk3zt1uRzbptNQqbrtrLftu6Ppwejzu9hsARHRtTnMVEBFp3WYTxusPH34Y9v4GRmWXl1Tr4pgwq3Xtp/M97OP1uPvj33/fD/ruxw/bTV8kSVUOrGZPp3kMfoh+PjUxU6iKadSGAAp12MVa4XQ8uD6++fL2z2/fApIBlDwHF8EsQshyqefQi2sdTZGYQQmNCdDWTHAUIgNb07KwqVzaEBHJ1o5ABcdm1trzJBn5eJjKUrdX+yWlN19+2TsHxY6nI2Ol0ubDVKB0Y7DWfNy7zW7/4qvu5rXFwXWja5NNH+v5AciYZZk+5WVuUqaStBUH0fUjd7fh9vf93Uu/u2nGi7GeC2oOIUCbayuQZ8I1PKRJDMjOKImych+2W2MH66CPHWslK1DOmmari5aC5il0oEaeFA00qVXXXamJiiI20CxtMVX0wQdVLcjIBFabLI9ixVHgjTZ/x8zAjkylLYBErgNkYw/sFYyg537TpjNoIhc0zUR7H3spvraqrXjn1amZOCZ0TqsQO+cIwJCcC965zgIO2w13vWk7ffyxHD5tbvYLmRs3Tuo0PW53PUDjGHofY7epUp3npjJs92Dc0AAMS8LaoJacqiIdo3eBvBCm3PtwOM7/eP/j9e1wOh9ont8/3sfN1en+5waijg6ne25aEbyqlVRlGXa3Q4jSpKmqlVonElbjmkFUydPgYl6WSN7QOu/z/DjPj4xucPvo/Hk5xn5PBIpCjgOMzTQvi5FXjVprh6w2g2nXIdgTILvgrYKq7/2o4XDKDz0G7uJcz1VKR+zjgCxZC/GYc5IsCAUqCOh0OnkzRpZWUtWSF0YOvS85OUWUXA73rRJFINerCIJ0oWf0pCQV749vd5vX0fDww38JL+YXX++fT1aqaiZqiN6H51heU7NSW3REiH3s13Nz8P06kzLE4FG0+TAwYNMCSNWQCR1S1ebYg4KIGAIAOXbrmVFUHEcFYyJQUBWo1qrUMt9/+nh1u23Hp6fz6fXN1bfffn2e5irVB+tiuL4a5lPBqyF4BtU+0LKc3x8/gCqj0yw5p6XM3lMM+vLmzV++/xtwUNUltafjtNlev318/OHTY4zbJF5LOtczM3756vX58bDm+om00Q0iCTAOu+taC5Lr+10/9Aayu9oA4N3N6+PToetDFbkZr6bzgcQqAAB2vptDaPMSnNcmgJSbVlM0BLBU5i56Yo2eht1YDD49nm5vYjfu7t58cfvV63lp2rgsGoD2L4a7r7XM5z//8U8qn07Hh6thc/tq9+7h/uE0gejrF7fvfn63ifF3/+K3Hdenp3NRGv1m98Xu/GI6PJw+fjqZai7NMdXSPo9PvXPahH0gdq1VH2I1OueiVdn5tZnde0/k83KW2swQTQSkaO6ll6qMqApKDFrNpEpDinH0zjW1ZArpnJ0PZhB8jG4oLdfSajmYVC9sBHNaQgjEWHISqbVk3zl7zhAtRsjkPae5Mrkuxk0/7Edf2/l0PgmIaalSslAMfhy2OZ8odiW1MhcwaiocgouBnM9pid6hj6W04+EJG3T9EPtIZrlkzSL/FagANTO0KspmRGBoosrEF8EDIhFVVUVCU0ZCMFNFJAYDQAETvfSEI62KcfxvnDa/nn6v3OSz8AIAUURXO9uagKBghtjM6DIQV8Q1BuL53Hc5+V0QOl3cFRdhyMUJZwYE9hxdDYAKQEQE0J4DvFcm65dAD0MBEwGqqlifzqfewbnKm/14dXd7PJ6mx+PbH99blY0nYGRTU8tSnGdHKFVTKd77ViXVpk13g//qZptTyQV+eDrPS0IirMV57vvhVCxJDcguhmnKj9NHcq6Zffdv/s2Lly/e/vgeJG/j1fHx9PTDuxVutdpidA6gD+F8mg7Hh/1u1xRqRSKIPnzxxW9f3H0XdrvD+WAYv3vz2+jyX//x7z+8fduahe2LqZT9/ur29jWZ+9vbf348fJDW1LUquhlGEEtlvt7f9t3gN9uh6wL7YRj+9vg3An1x95tPh0cwePj06enpUVrbjBvvKaVy9eLaM6WUltOy2ex87Fygq/04oHt4uH/16qa1VpYyjttmNp9P2trD4cAIr9+8nE6nXOR3X3/z00/fD0MX++7u5e0fXn3VEf3Df/5Pu/2+KP98/GSmX79+8fjTz3fXd9vdRrCzYXsyV/Hcx8QBjZbg/ZytSF5SZR98CA7ANDHhPM2oNUaeltKFwEHPLX9en+vu/4u846L4sc/6jdVYucqJGFcDJ4IaIuAlmhoY8CKEwF/4J/wM6G0tQl8fuQg9Vs7JLj6hX1NV+Csj2+VHP19QuIrpjMBAABBdq370uzT/6AjZEKoQeKQICg4RW2u59sGzx+ACxOhd305FqzAhaOjiVU4fPfkYqTQh7rqw/enxr4i+1rYOH01BGhRQp1ZJnWdTcN4zsUkxsN0Y2FTQCHCz2w599J45Dog0zy1gG2+vtesMkXMp55OLniTP83mu9TSVXPJms92ZHObHXIo2Y6MYXSBspaiJM6mnFDyWPJkPPvrdi6thv/n48aloZsLRUwwoAEsqp6R40JJ//k1Or77+qllp1YXtnmKux6WzKsu86ccs9cXN3bsPH5Go78bT8WE3dkMXVYTYBb/z+xuXS5nPrYEKUN/RZmOA/dCB3wy7r9FH7xirOgTw0XcbaY1xrbEn+JXzDAB/IWeeq8suXsZLPd4v3689n/6ffWSfNWWfn/1LQhE8rxVcJ2GfKfXLovrlqetqvUjSLhKmy1uxDw6olSKtIgCt5AIREa98Q6sJwROHVptqWfuuAAAFkYidqwWRmF1AUkJhYiUzUTEhQANVIwKQWmpLZgbsvXOelEMgYAMwqyaCBKCARKgkUkDWcDBC9ERB0QBBUQ0B2JwyKRBSFVAz1LWYyYV+7JwX4Ga2c66KKRA470LHru+c86EbN2O360J05JwFdkhWBapiM1UxUASzNX+IcT0xMgMzI5Fd4p3X39EqGCNCQkZCMhRnxiIViiCooZlpayUXpoXQm2vIwRDBMYIRQGRk5yqS1CxSVaGWQoSIjOwQ0ZgJzFbuZdUV2mq0FhDPSAZ60WUZrRIoEEVdb3YATMDegJ/lbEZSUZrVJDlpq2CyWs4VbOUEmWgNRl8zq0UFQZkRlVRNwNQUVFe3GwGKgZpcpJFABKa2io+gSWNmM0FACsEzkmeqvmSPOlgTzdWDohQFqIAIDA7BIZggsLUWwCg6t5RuCKfDU2klqzzM8/00T7WKKQCJKDEx0rNCdI0CXzvQjHkNnqKV3lrv+CrmTN2zBe05q8gxAKsWZFJT8rGpRR4UmtaTavHBm/MIhkxVkzcDc7UYqKBWtQWtlly7OFLfoaOmSsRsreTJe3v5xatvfvPVdHx89/4H1Xq926Dh/cNTaarq5jkdznMtNIy73aY/z085tWWSrh821/319da4OeLtzX5ZyjnP2/32+sVd7HpQcMytFR/j/uZuXh5qyZtxBLBatGHcXO+Ds8PDX8+f/qalmEorUnICUzVhBxQ4pcVUCJppQgACDc5UmqmKVFh5CPRCimjSpLbqvCMmWSx0W9NWskpdmC/DR3QEoApUUpUG5jYCUpcTgqz8Y9ZWczFErVGlACNylNbEKrFDKdPjx1qOQE1qA5N5mkKfnfNS03K6d461ZTAyhVYyIJHz3rE1wUBdF01arWW776enx8PTzEQ5FUO4vXvZJJ+nE7UueHd+ehRgQLi6vSWANC8lZXKtiexubg8Pn1Qljj4vYq3FsZeUqrYXX7z89OHD27+9H8ft9cvd2+/fffvtb/7hn/7xd3/47Tmd/9Pf//HVy69+/vAuDnx1t/v+hx+urvdXt1f3Px8BDU2muU6u3Ww7IGceYkBmqqWwGZAYY4jRCn94+/H27vXt7YuPP7/nwN0QNLfoPCADYNGqIoYgqibGyOsBW1q7xLu0asYAxfv1V26tVSQidK2JZ0NCApRWAYi6cLkYmAu0OHZX19to7urF9RfXd+V0bJHH7pqUdq9+GzZ93G6AoQmjj0QoTQVAWyklkRmGIU3nJuC70Uw1Td7HWua7l9+N19/Q9kWqbimSH88ueuqCQ0RsOp8tT86ByUwmiKimaaFq1F9dCXoeQhJhUeTLUQglazq1NFvJjMRdL6ICyt5LKSVPKAt5RCtaCwLbpWeHG3ptzMyECrDWKqsbvbWDnH7C9sjX/yuzPWBv5NhHlQRSgBDBATP1O5VsLWNHZI4ATJSdGDgkx8EDKrqglhHJdbHW7HzoRkBT74OLxuQAOPRd2PZAUg6ncnjyMdTSZEpD7FBlOifvGJF8DJvra/ZDFZVaRFWZUA2lmFQtKU+JPVHoRPHjz3+Lw+DJl9JEJFXtb3ZPy/nq6xf/4T/8pzJOm3Fp5VDMHO+QfavnzWZ/XxZpzTMDZgq+QatSEYHYIcdDEd/vS07VSo8sejIzabLklGshP5aaSi02n8QasjFRdL5OKcSQ54eEaUEwNrC8nI+7cY/Gx/rEwU3L7PyNo3i1vV6e3pd22O+2ZcnMIfih850u9Xic9/supXKc5s3VnbXKUgTpXCYD3vmAKsA9emZGpOFhPqd8AMIX1zcegLAzS0taljZxp4yFCUK4LkK7eNfFDWEZOv70+OP98uEZHICKEuGaWoWEOWXHDGQxdKiCiMENS5vVLDDW2kRLk+p9dMGlMsWwFsd4JCQyAUVCIAY0IkXiS7soKKELrl8DWcyEGDm42nRaliT63R/+8M///MeU8t/9698fPz3OS0bEN1+8jg4f758+vT8v09KYN/04LTk456hZSabAgVTbKS233TUDYMuf3v3J6jSVMi+5LWm73bsYPp1PCGEcxzgM0rmfP3x0IXz/4YFE+7HvvZuXBUCCY+9dadLFsVYJIYzjtlUZ4n4p9eXdy6eHh3EYZMpjPwT2S8ohOKllWRYE9jFIldqktiaqbBSYmckR3u663rn9GO5uN4Tcb3fb69tWOCf49FMar25vXl4FxIfHU00LAW524//wf/zf//DXH3JpHz+9+8c//qMIfffmuz99/0dP3jgg+3fvPmwGdAZffvO65fzDn/7mO2LG/dU4z8sqiJnmeY0QQ4NWKiEF71otWpuRi5FbSaLGyKataSNiIlCKYQyO+pIfPGkfPCJpa63WnEq/3brYM/M4bs+nuSynxuZcRGTXD6BYa2t5qgWMjIjEBABXrfVm3KnW0iR0neM9SFWs5KDkwsSglFttql3fS8V5EUIDrE1z6AORywnmlAXrlI/LufnYcbdRYgRiwoixakupQROHLGJmzXl23td2zsdlHMfOd4QCjpz/ZT5tanSxOLOJEPFzYKYSUdMGgHohcfAXAAKAALoKe1aBxYpwzAhAL9U7v7JKrHNKgFVasRI3gGvWERCRwsXBtiKgC5RBIIRVMPsZc63aIv0ll/o5fcNWyZM9f6A1d3MFSOYZkbHp6oBDxWdFiAEh0OVALG6ti24iyI/HRNfjuH3lu/h4Pn/4+f3x4XQzDGlK0QM5LBlSrkQYnXPkxVoSeMrplGpW+v9z9R9PlqRZlid2yUeUPGbMaZCMSF6kq5oNAAHWwB5YQLDHCjv8S1hgidVABCIQCDboaWCmGjNdNZXFMjMyiFNjj6rqR+69WOgzj2j4IsTC3PxZeNin+vSee87vqBIcann3KDNqlBlRL/vwer149mwDPh7HOp1OU1HQ0URc0yq7rulOD/fbNz+YFB+aHx4PIsk1jnlOV/i2bU77x2F/AoRusRzGXIVzEeuMevuHv/+7f6J/vLy+Qt8CcKkKlj97dTOeYBjH5kI06+l40koUXNdd1NOx7Kd2tdrd3a9WHQTqlpsXr764e//+xcUFM5vYUJMgrS6u7o+nlLINp/3+3qgaqPOeCJbrVb9anbYPUst6tbq8uTmOw8W6HU/pYTyBi2ORMmVC6taNb5qreoGsx93psD8dj4cyTIow1eP1i5vt7vDxw14d/+Hd/+AlRecu1guZ0pucTpV+eNh99cXr4bTteGHMEzFvlmHd5duPdT82bUuI5GouyTQWUwQSs+ViM56OBlZU0/HkAxcRhEx03sadzxA+oawYP6WCPk3iZ/TVp4DY/C+ENF8ATwP8HNucSer203zQrBLgk+MIn1SDH0NuPwqrSDCH7/WsQc2JEjirUE/JNTBAImIwA+caIyYSgGTsTKtzQSq7GAyxVkm5hAWVlK83N8UoAqU8XF+v86TeN/PGwAByLblmiv2Yp1wTo5oWIyDnssIgsmRCE1EuU3UojjE4XLQREJx3MlV1eHVzffX6C44xNvHh7r3lU6mlccHU1Agdqw4gadjtT6f04f60m/BuMkG8GfVlP7GmMee+6RpHrePOOQ2zqAa8jA5NFBApa9keM2O4evZs+/gwHffnwjjCvmsPkx1GhEp/+PZjQXz+7JkPm1yV2y6q+UzDNAFRqXl3eNxsLj7cb4lgtb6otRCR9z4lWVxfmQuh7TM5MGJwod/4xfIcNQTG0DMB1IxWnY9TKuSj5JNK4WDnr/oECMInVvmT9DOPsWeZ+uxRe8qm4flcPOlACKazsD4P+p9MS2dH5GweQrSfSE5Px9vO4KKzx/OT+mn2E5w7ETrnQJXAFECkgAGiI2LVqmpCRmcinSHx7FFCZCLPLCbC5ISqoiCRneVUQHQ8++/QZvJ6LnWYytDyMQGSsa9O4UxxAzJCZEUzwDJ/o1qqCoiamYmhAZ//UojE4E3MDEWQWRGJScmU0Nhx27sQWx+ESICB2IDIBXLNwjvnKTSRmkAxchOEkQHJqxMNYlJqlTpfbaIGYPPKxAfPjEQIhKWaaDURs7OW5bwnQIc8Y5bYNPpQas1Vaq5SRUrJnICO6JkckrGKutAQkTETOVIo85uZSSkCmAzIx8g+ICEAmRQAUinEzgAB5ubnYmLEbrbGzodhjnIDgSkaETIb2Nm8aAAqKFXzqGnSMs1tv3NIEAwVbPZLEgAgqs6bVTpHcOHHsKCAmZqYIpKqGqKCkRFonTmGKmcjW6oZTGfONbJjZnBkzkspWsUFIDWQCcBATU0ABOeWDWOmyYEwCCpmhNGjsR7StB2GU5oqgpiizQnBWa83RlKT+UZrAESMZ5OVmSkQITEoMFMfcL3q/gupCFRLLkZqRo5oXqsTIaAQFufPbmgx6mITF77vIThlRM+egEFsmk4h9CL19PjIbfS+AUST/OrF5me/fOUZto8/jFPul10MayQ8nfZNz+QW02jv/3DMA6wvrqlzu9Md1YpFmfDy5mK5aLXIXNY0aZ7yGCg+++rz9dVlCC4EIrBSpG2bnHbjcd/2C3JNGnO3Wi7avqTHuzffT/vbJjATTXVCR1RZqhmAFJUqaF5KzXkyyaUkZiQzstmAwoBUSgE0UBEtBIhG4NBEIE0VwEwYEVREymQnF7wJas7EGNq+TPs0PkBoS0qSJh+8mUqZpFQxNW0MVEZhblS1iroYNOc8nBRHEyHgPB5NsUwVOE3TAVFUyzjsvevUJq2GzHOtc2yWXdNyxo9/emt4wlVfsoVmEUPTNuV0Onz8+H3TLZ1fsV+mKV29+HpKY2yCGgbvUirdsteaT8fHfZ4c22k4Otc1MUityNC1cfd4MJWvv/7aJr29e+C+YKB/+fYPf/4Xv/1P/8N/+6//7X91tZyOD8euafaPu+1dWTbdh7cfL28u3373QKpVpFQokeU4TdVtxK2WHp16zuBYRU1SiA4VO3L7h/ubz16F18+/v78lAvMOgaUWImObOwcVEcGxGoqqQzovr8xEhOcbhcweRCGiUisjMhF+CtvM7zVPm2TPvHpxc3o8pmPpnJPtcJIHT/nmxWe+CcNUkRfIcZqOiuC8K8eJdYQ66Hjimn0gdlRMGuelXYMSA3l3KcbLTSOGHz/e0d2W0K8vLgLn8W6rUsdTkjQQEaOZ1FKKix65SUUqL91y44ML/RoNiB2CITmq2Ya9lEFrBRFTp8SgpIZqZMQUQwzOpKkl1VoIvampCPuWfAygGioamhRCmt3c5hvwHcAkaaDDO7dEdQHU0AUyb1KtVCRHGBQCuWDICoQC/aYZDjuwxI5d09TDZLUCIzGCARE59qqV0RMH4mhYm35RBSlG9rL/8IM3vX52tdvurl8/J6wP335rqeiptJtVv9lQ0zbrS/ANpmSa6rjL07Ee7iENBIV1bBpXRafjHpDIoGLCpm/bbkritJqM+/37D3cfh8OxW11USE0TW/NTpYDIoU3jYAgxLMjrPpXQxEo0jsNl69vQlCoVi4AWEEJD8N5vfEWxVvNUshWuGScUoqpJSsYhkEMNwVHJuWsvpWaxKcbclAK5Q3FD3g0wdLAWU6uVmI/DbdfYcMSpnKrAeCxtd+G5Hcv++eXzWqaW0XHto52m/d3tD8vuwrkW4zMkPQ07HxZt0P39h37ZUNeCJzZ3Ess2erdGcKJlnOKq94gPp/G2o1WVZ1+9/q++fXfbh6PKARlCjJ8uCWLy7KqqioKid56ADATPM3oyFZFaRAgDAEqt3odayqxVljS3hwuSC8FPucxvlKrC7HFGzJsSsYKpVUAUKSCA6Ewl5dy2fpjcD++2x9P46sWr2493qzau1z1V+PDu4ZQHj9Q0oVu2se1u7x9LUU/gfYj9ZtEHs3w6YBPb1q17V47jpDV1kcGFYRjbtk05//M3f4guLC4WTcu//9N3Psa+6QQoVfFEKYsBd8s1AC8XjfMdGsd+GaPP09i0PXTedytIU+jp4nrhMTw87NrIXbvUokY85pOnoMKqrAixaYIqojnmNgaZchfd5bp1jkuqD4+nzcVStDpHy+Uqm+9X/TAMpw9pJGCEcSqn00SU7zytLxe/+fL1979/c/m4/t3f/su33/xT8H673WsuuzERLXa39//uX339w4cPZsbBfff243Z/IuemVAjNMQmYqqFjkwpEyI6dY+ZUq7EjcmiERLFZ5rQTMZEKUAUsIAlKzRUAkFGqqCFj6JdtqXWmwWWanm6oXKqailczYUWK0asUAHM+RtfnPIlIlcouglnwwVARdZLShOA5UJOYg1ZAZ2oFpEjJqlQNa62IOddaSnUuAFPjehOHvh4ORx1HUo6xabsm+EYreER1ojnVUqsUMwjBEaKaHo6HkYsPHByWfB6SRZQMz8toM2JSU5nXi2cjBSKxzdMHopme5+SnjM5PNts4z9B05h6dv8o+vQM9JXPwrPXM28+nZ7snKwYA2plLbZ9cP+dF6Plb4Bx8mymjnyp+ZoqsPOlWYABqCOgIHGMgQIRAZgCVoIjO0zsR6lwhjAgz2AEUCCCYMPbL2PVut7u/+3jb+p4h7sc8pjGovlxeeg9VxIpWtWkaSi6a1YXovSxCvx2mMQsTeyapyoTe+TKVezgepjSJFQUlEFEza9vYu7g97vpFJ7elDdH7sLm6QbLj8dj6OHcDHQ+72w+PNaeZPe4u3PXN5eE4LYi6PnhHqkpkh+1dSVJLbhd9Lfq7Dz84ZuAgpp99+eVhPy1W67dv3l48e3E43aUx5Xo1ytBjG4JbX2ycc7HtFqt1EQHV4/Z+tVywc9PxFH148/GP0zgYmtYqYptnz10bDuORCZYXKw7N3f0DGa6lV8fNsu+abspVXT4ej7UUkApGYx6X/ZocBn9xeDhs97v9w5aj72M4nE5a4LPnn+/u3hyH9J//8MfLZfOb3/7lf/iP/9+Wm3/4/ferZReW+vWvn4/TUMqewFZLXS/646kSYutCEY/m9sOkpahW6Ls2hEGnoiJjVgaMhBG++tnP4L+9g59kd+ZJaJYQzwduPrVPPIvZKvR06Ge68MxwIINzcch5PLOzL+PTyxjqkyQFM4LrkyBwfjyb7Uznz9sZAH92nPwEXEN4lgU+qU6ExAjAAOR8YwZAhkQ159A0VSaz3PXOGKOLzkc0ZsM2BJVSynG5eWZUSi0MHmpFdUhwHO6cA6uVQEquanw0OBgdU7lCC8EDWNf44G3VYx845Vo0hVW8eHbdLVbg0jQ+HB9PWgvV0jsD0XookKYkBSTnMX97u38c8H7Ax6wSoqFzU2ysrDwrMMyPVJALJk9ExNE7IHBY2Ef0nRlZgWEY0IbPP7tets8f7vcPj8P2/pHIxybkKtFzqe7NDw825VdfUAjtlCp6blxQ1dNwvLi63I2P3WLRN2EaB+ebZtHBYSLgpmnV993F9fb+brFe2jQphLZbO++hegeoRtwsg48yTUzIbZfEM7di0xOdCmAWxfEsMaL9KNOA6ZNZyJ5UoZ/kweAnLzGflqdc2qcn+qfj8/SZOXQ432afnERPrPYnCd3ONjZ4uj3bk4FJqqgaExkRiNhcFM9oCo6DmqIZk59buAgdgRUpTzlMAjQko9noRkjegc7WIDOkGdCkpo4xV30cpsYhEZKYa6vVEsnYeTAz8qBmioQOtGitoFUVAKzWSavO1m8gdMxFCpNWQO9acr05x7EBJIwtNp2FlkJDwWMIDDxLczAbpIqZESGZqqp5NAiB2QMY0RztZOdIigDjjOIJMTRd45sGAcHUgLyqqJqIlmRaAeecFc8LDgJwxMrGjl21iavWYojVlE1KOjEaEzrwYDLbsRSYotI59FfMDBRrKY7njeZcTnpG+yEAmJ7faGtBN3c3PmXTVAgA6NxqikQABMhmgKagamWCmrRkkzLTjxDIZj+S6ixFIiIiIZKYELPjYCrzklVqYYKqhqACSPjJdymoYgo8u+WQiN0cNmcgBZ2Dc/PpQmBP8mSYrDajlJBYFbSqZK1Jq4ARVBXLaNWkDKdhtx0ed+NQ6lTnRBwSkHNOVefLQZ/mXTOd2Ys675J+cs9Fsxj58mL166+//NUvf/F//q//3z9KRaVWJibGlGqVVERNlVwVKcQJGQmsVIPYpEmMKxEUVxbUNLQgRnDaLELKExE1q0hGy7ZHg6vnyxcveinj9nHvwd3cfNb2C6tpu7uXlCPxKZ/efntHSq9evjKs+2Hbede0y/v9w2K5aBpXc0axpmsXq/6Uj87BzYur5cUKHJYsTevVNDZd3y5TOnjfqFFO0i2Wwdnp7vs8PUAZFl1Xah0OB8UKQGbGzrfNUoukcTQrwISUaxIwJ7XmWtBUDZgYEWnWwNGJVZsfC6UQs5bqnBlArSl4l7MAVpg952jsnRGTWJgOUqVOo+WstYoVZtKSRYtKJEKZeYAKAFym0WoFLGk8AUjX9lNKhK3oKKlOaVTNSMDoaioCJrV6jAR+rgETwTQdsdZF33uNw7AfTve35dQvVoFCJH+1eb26eaUEp8OenPFIOefddktA/WJ1OBzWq413cjzsY/RMNO6OWdQ3vqRUki4WzTBkNb64fPbmhweT6ep6+fH9+HB3/9tf/vZwf/ficvE//tP7L3/xZ6MWTZmRawEdC6sQEjo2MCQrCmMqjm0ah9WqFU+uC8wsanXMaoTGZvbx7ZtnL559/ux6d38Y0JAoNDHnyccAk9QKVaCoIdHczIEIJmJz8YaqiVQoVURNkVDADJHNmSqGUEo1RGQSOfMp3n77w9L3WNX89NnnX764ul40EXXIx7Gc9n6x8g2pjOqLgSPKAY8kD45K5lG1YLU6WR5Pjhp2vtbCBKfTw5gr75mJ1s9eOJ7G4/Ht7/6+TsNw2NUsUkWmsUgpQ1YVZEfBu9iXnDNw2KwvL2+uXr589qt/Gy9fYMtoqjJJHmspoWnARzUGnin0QMSipqAErJKoFkQiVYJMetR0smmvcjQrJtmKKiCxRx9lTEYhhJZxkfd7LZk3DM1FKYAYmMjm1YVkQ0BiIKbYQ3Vm1UWsOaNHcgEAAVkN2LeMpjUhMzGFvikJFH3bO3bBajGrWqRrr6BODx8/9MvVtJ/S4YGMuO+bZdtdXvmuB9eAb1Mqit550KRs2WSo0zbnseoEpSsi01Qw+ErYNo3kcshDjA278uaHNw/H+4STUc3prmuuAxJBKFVEaNFf+ZRJWvJqoQx5jxiZAzhSxZKmKmKsZbz35k1LktJgZPaD6lQkg1atjfOrblWnyqCt52F6QG6Cu15sPhvrkKd3jbdFDLv7fYCFE2669ThVGWvrO7GYzXmIY9qd0oOHhi1Ebhwt7h4/rrt2s1jc3h0JMHh6ePjBkVtf3DgMUy6O/VSOITQm5bTfPV+/AAVP7jhuY+iqyG54f7NZzUjdVd9hGgC0azcuxj4uDvvDi4vPn99sfnj3D0B3wT1tkhFFodRKiI5ZRZHOJmEkZGYih+wckdVaZh7KDBmeERCGJU/ROySqtRYxA/UhAJEjVJVcpZbMzjsOaqI6Vy8RAQFy0cIOj7tt3o/j9nB1eZFruby4WHfu3Zv7mqbY959dPScgZrvfPaY6LlYxcGxde9jfVUkfHneOsKFmvVze3t6PmglTVR3GMU3iBDzz3eEYYq8VxpR2u4EAgWy1WtYC5N32cZ9LzQWMm9A0rlmuulVN6lzgYBeLZcm1XSyatpWc5waT5WblH3fK0C263eMHprhaLNq2o9JUMEgTqlAABEACJACHWfX2YWiauF40FWB3TKdTOR3KenOzvLpc9IvNJoJxVTUk2Q2n24d+3T+8vT0u999+82ZzcdEu26vnF6d3D4vVWiV9efX8u3/5IZX65c9/fr/db57feIeEfphyzfUwTd4zgaUpOXYhxipChITEQCIiasgsUk/DAUyA8TA8eA5Ns2hCLybEHrXUYjF4733OVUFTzo4RAJAdO1KpZqpijqBposi8+LeiVQVVgBEANA0nsRGhqIn3bSmKqsiCxOakbVupdRxHQ/UO8pgUnA8dk2saP5VE5CwJAE2pIppKUdUpFUZrm7jolgpQ0iRpHGpKfvRNZ4CMFvu25FIqG6NKHYYJEJsmVjFnLWBAevI6EICiGhCaqjJx1QpE82MiIc2RbJsprSpohkRPzopz7AsA6NPAc6Znni0/Om/L53H5R7fFU+Li7BtChadXA3hKlsGMQbIz1gPOutP5DyrAky8IYF4hPqXqZlAfAeo88TNAIAiMMz9EzVjUMYiaGIAqADJTETVANUtqKloHW/S+Jzs9vK+l9GGxO44f9oNDaBluVosuhjf7XTVGwqmUMiOuY/x4nBRxqoOpdG6O2VkBBQE1NdUjqKVMjpMoMoMaEB1PaZwqNawlxNXCx/bFqy/ff7hlp4SUpkmOpeQ8DccqulottFYXIjfN4/7QutB3rWq92Gx8jGbIjgHsuN9rEYw4jgVRH4/HP/7jP0qulxfPxKT14XQ8Ncu4TdthPK6XK2Ts+xWaOxz2Xb9EYsgVAZvYHo/HklNgvv345uHxHkxrkej8xcWF77r9dHj14uV4v70/7E7b281y1bdtMWgXXevC+3e37F0X4sXF5njYnR6PWi2VfHic2sYXX4IPN5cXq7Yf0mBWugX//pvv7m8fffQ//+xrjul0OL3/45vrpvPc/PbPvvzv/tP/UMuH73/4+MvPnut0bGKHDXETybQm8V1vCkWzI8HICnwcjmy17ZfDdBLyYCpiLtBv//rfwP/pbz6N47MORGffjuJZF7JPCCKbxcmzl+184D+ZgT4N6U+ikv04lpyRW58MRj9CaH7iOjoLpfYJ7XXORc6TlM0OuP9CSJjRrIiIjORNHEIAdABSpSgoMdaU06FIseCcInZtYwqmCKi5JsUAwYMjIqg5+0gEbhLIaZBpMiOpJmo0u6dUa4VKeDALVlcR2uibQMFxqmKqMfDL5zfr58+y6ZSLk0pSpVZCjG1MpZTjVPN03B8nsQ+P8u1BDuYG4JMCKiybeBrL1aurpc/DeIKcV60jB03wpXIpSobOk1BWgRA8OUW2hYu15tP4mDK3m/XPX7+qu8Pt29v9fvDeRGyxXp2G8v33e6K761dXIS7GUs1BiFFVc0qNX6rU6Fxhkaqi1MSujpnIh24N6ticiwv0AWLHcVUTenZklIZx8fzF/uFjHzvuIlYAT+CCcbCnOC2cK6k+ZQptnvDhbM74RB36EZd19mn+hFEFn9Jjnw7Mk+Pt/A3ONrcfM4/nXO1PZaKzOcnOX/jk9nz6aIbgPFmdVHGWeQCY52iYIYCKuhjMZH5JIn/m1iGCAbMXp4ATMbMAYzHAakbEs6gCs7oENia8P2pwGIAdiDVATsUKgQE1qGilSs4zJBkM5toEAlQEBRArBgJYmLWKGQAyudCIj9yskImQRECnRKJeo1T1oXXeE7mKAABVLCfJkoDZF4mmpOCdeSbnHQIDEzMhUhXReRggBmYgMgATNDXHjp0hBAkEOre1gQKoCSogsKoREbFjT8alFNRapRYtKAhpltIAOEQ0A2IkIOcdsYGUAlLETLVCyRkAyXtEAsIzH8jU9OwyQ1MzQXBgCqBnbwDorP7ATC8CmOdvUgEpUJLmSXIxned9nAFpqgKIxAQzwZt4fpqbjVMzF0SrqohIpZJQMlXVSlaVGEl1xgQqGLFTNUBCmysxiMHN51DNEG1esRNglipSa62Gjs4J98woQIoEKpWh1jQSaR7H4Xg8Hk/7U9qNZahm8xu7KZyj6MrEen7CRzt3yCEgVKmeyDEhoCN0ZC8v+t/88vXXr59/9eWL+Sr4hLUmUZuNcVWBXATLqGpABkGlGIkPba1SitVTYsc+ejU/ThOitNBxdIsL55vYNFddXNbhOBVxHU55yil3zXJ1cdkurqZUhmEMsaE+fvOnb999vGe/eP3sxeP+Yxmnvmt8hHcfHkPTdl0kUB999BGEtx/y4+HhV3/5q37Rs0PnnOfAjlMal7FXKSUL+4Z9DIGDx8e77zTVEJj8Ik1jHgdiimE1DictOcQFQVTIgF5KkjrVPGgtUjIRAc77UkNVIjQRMwIMKDNMX1AVxAOxpMnIqWrSrFqgTADBDNWECGvJpDAeHoWSpRSYRKpgNSSpSWoyLcyIyM75VMQgkOPjcad1zCmDFlOTJIyVY6g1iSgo4czQqsjOAfLsVUcAy7y7O0AtjHBIWlQ4xlpq163AQuwutJxu3//p9u5bZHbE7WJhVc3QamLfqJbNejFNA5FuLjclF0KX5FQOB62hVhv2x3a9aLvFMaeLlxe/yD/73d//y4c3900TD1M6DdP1dRda+Oxm+e7NPz5/8frtu3cBQ5Fpue42l6vd9sToEKqJjEUhOGfkyGPGHpwcc4iIDG2zsFrREQhKLXd3H6+vFqvrxdvtcapa1cC5qsocmJCLWhFA1qdNAICJqkkBdPNDhYIZgoogOmCuIo5J1ZBA1YzOCFIA6JvXX1794vOXrz/71S9fvrzRaUTLRMoKknZ1OEAa0cwFMijp4baMWwdVAKuYVCSM7Nu2W03HbToKBQeG3eVV5CYY5sPu/v33eTylnKRimZKZSs2l1mKWCxR1ZhRCRGYnRuQWy0Vo++mU8iQIQOwkZ8uDiVBoQ3dp6IxoTqshGIgwIZaCYKjoQNSOOh1Vcx53ZBOamZQ6HV0IaJ4cEM4h+gJpJBe1Hgh903Saj7r7zoFA88KAwDFhC1CtZqICamgsSEZRjV1L4DIAOo7sSKqEwEOpRF7Yex8MjF0wMATHzKUMgErsm6Ydc0Zqrm5e1DSddvdsHJu1Xy/DYqO+USTkAEAhOJCa91vd3svhzk6POE5YMqMTRDAhNQRunKvDVACGaVhd+pSKazoop6Zpeqi3j/+I3jxf9s3N4+N3uQ3OJLB2HT1OD+v2WeedSTWj1jmt6TQeg4tM4Lgt49YRioBCdbGfhkKBe7fY799HYsAEeLDpGMPCWCMXleMx3YpJ8JFlfPz4gIZ3u90KadP3OSMH6n3D5HOhSG4/HYPbhOhq5aLSL93Srcvp+HH/XdPFPFYEamhtKMtlu9ttn1/fjGaEDtVNUrx/7q2dhse26T7ffHX38Kd+vaD2Guzkozd1ZZjApIJrVmvkUtLHHlMe7sx++fzlq+3YyulxvgqIiM+8B0QAZgbknEckdsQ5p+Bdqck570LIRQ3ZRZfS4HxgJNE6c0MNkDmedztSAZz3QUpSNTKnRYWyoUnJnoNDZOdqLWg1hNDEUJbYD/XycnncP9aT/t2//GFzuWYflv1qmrJkSVosw7Cd+k1z3B9yVAKuUyXvYnTpeCo1uyhW8DTVcRir1CklMPiwu5tqbdoOiqgoYxyzDYP+cfzQhma17JfrpYD6pgsumBYo2aTGGMygprK8ubh/v+vJVdAYnfdd8FFEmjaE4KL0zeKSyaqMuVRAXK0X193Nx48fRDXVmnLJqRKomK3Wi7btHu53q1W/WC0I8f6Qqh+OBe7uxqurBflw+fymafmzXz+7fLU47jNwI3lMh+O+HoZ8bKIH0cfbh6YNoWsoxHqS03YgsA/bjxeX61zrz37xGj3/8fsfhqmqgvcREEVrLWIIwXkkEpEitW07IDCwENsYeimmVg67u4Fva56IfM2VvCcwIuTQOueRyIDRjIgJnJoReyLz5HIakGIInejk2pZdV/IJQXyI0ziSGbNH1zvypqCQ0UCKoKHpVLOQIyQ0qc5RLpqmcahHIrU5Z2EEVp1zSIZgklVrBqAktdYa2qbr2mlKzDgOY5VCSCFE530IrffxeBqlqvN+rjdC9FPNGkLomvkqEJvnFkRknHvQiA3JTM1AzBRRDBjpk7NHDeg8qxghVVMBUEC2H5eaP8W12k8/sB/HbzSgOcgx8wsQ9Gm9bmdD0VlngZ/gPuat8Cd8h5iRAT8xhkFshg6oPrVLA8wt5oazpR5NDRHIsKoJgCEVMVUTwNkxD0zGlFSufd873y9YIbx5d3jzuDMX+sBXrV8Ff9yfyEwN5v5gAhoFdkOehEEVpSKaY0QQMIuenCc0a0Ik5465LPq2R+1CPAw1m80VdFeX15G4JCm1fvz43kSapmm7kE6DKh2HoebEocm1OsRu0bq23YR174JzoVmvoVqeKhIt1ksg3Ww2sQmC+HD7qCD9OE3b0+9/9/f/9n+27hYbN41lm9pA4zRoTYR+tV4zOAXUis3VhbEjFO8wj2RqDlnKdPf+3W63XzTesb+8vuz75Sj288+/eP/+h912D4Cbdnmzvvn4eBu7BpCB42pzIYYqdTw8OsLNxUpBkfDx4QHRto+7i2Xr20BMUm0Ya99d/tlvFn/45m4q8h//9j//L/79n4/jxCbLVfuwO26L/6t/8xfff3v34cNjdPvXF00uuRSlUsV7yXBKpyaSd9qu/MOhEjVoWoYqkgMBBccit9vt9esvjZc/0W4AAM6Hew5eoMEse35ybwA+barnMNl5GKenVNGTgDM/oMEn9BAQnYf5H+M+Z1nznKT5VHJ21ozgSSQ6c0bmz9HZrvckESChqiGSc1ANrTWI3jV12jMxsYoiII45MUeRMta6ulinNF33jdXMBJ4cALL3UsWh8+RqmqCqpokApNSUynztqJgTXRmgMSLF6C+X1EZi5JI1l7JadK+/fO3amGvRmnpiFzlVAO9TKlMaxYgZpmES1Xf76fvJvTFvwVU18+zbSF1DhNt1N2Rqln1Tq7hU8yGAOAQBAg0mZsAgtR5PLpJacc2yWV6qseZy2N3td6eL9ebrf/UbLdO7b98+3D3oaVwvL+sU371/INarz30MoeSE3vdxsztlFJ2GcbW8OYxT0y9Qq2PfrNoivttcMTumBmsxgubZ5/2zr8ePu9CRA8op+82a0HnfkUFOU9suyAWAQMDn7C2eKYg//tx/PAlzzd6clT3rhp8k9zNlDT5Rhz4pPk+3Q8OfqvBniXH2apyDbvbpez7Bj+zpHAOo/uQ0AgCoyJxRIqJZ3DEQAJ7NRWwoUhBnJDICkpkysVhFBEQmDiSKmJ1jqRVRwPTTBYBo1bJjNjOUIhVOye6O7DQ4VRDmKMTmQBArGli1WpLUqlrAlBCISYxFDbEwkkEGFLPC7MEcoKlNhGQ6AXpwEU0lJ62ZdDIg4QDOo/NGztjLmXDsmSwLWqoEtbJhEwA1NsE1kYIj5xsXCNlUUbWK5FJqSjklVCMG75h9cOQNSFRU1QylFGYQy2ACxIhGasEHQsiYzrcHVUWtOWUg5yLRzL1ho2C1+OCRsECGmQhkIKXSvKWcM9tnFuR5WYImaAhSgBzMMRSdm+Q+edV4PiakCipQi9ZiVVQqGM6EwtlKOVvEiAnZATtAQnLnU4mzAAocTKWwVMrOaSgpg6tSaq0iWgmeGigAEEysnnORdk5NzvY6BSEMM1e6bdvqaRpTrjXlkQ0dKhEwBcFqVkyr95RyrlJLlSGV+8O4H8tUBc4arAkIgBGYoaGBmfKPfmQzQD7H05SIvKNIsIi8aLx3MOwf/gupSFWlqkq2GQsHMMtdgDTLeVUmUUGPzrPvQteH1XrhHSLpoluAB0Vp++b66mo60ru3f1ht2l//9V91ng/bx259sd6snY+nMeVhIqIx8w8/3N/fJ0eb1eri8XiPVFdX6yHT48M28CICYzbB4mI8lbLm9vHh9vnrV+uLjQ8MMwjJu1KlX666RTNNJ0AjJGZXUjrtd1arj4HIVxnm0H+tmsYjIYa2R/KGzM3Csy+WcoVaAYCFXBGZnRHzY5uqmInUqioA5r1ndqVMamJiJoLODNQ5rypZJwVFYGY2EVQ0hJImRbVaoJoBKhsBi6qqoqGJmg6IqIImRQpqmUqe0FREy1RU0EjydEw5S4XWx5zGUiYURhQCQgOHvqakOgEKOfTRH3bHVGsUaaIzraj+/Q9/apumXTYuMLIHqWk4OhfYuz52alBkzKeTc7HW5Dn66NKUfNO0uswpO8+Li/7h9gEf96ubq8PD4erVxYvts7dvPrx797jYLOpUU0rNqvmrf/+Xf/M3v4MKp6HQql/crG+3t88ue2emDCtqSqrVEE0PY4kt12op5y5SRAsxyDRFIoVKSAicBrmzcd3Hq1WzP6bHnBVcLVaqmemMNkZTmr33YgDGTGamZhVUSzZEAjJVRDEFYhZRJCOa2aA/Mt7/9/+H/2NnG0Y7nY7DLrEYATivUgaTGpolYa2nh+GwVx3KdJAsWdWHhrs2LJdQXBk/5mnv4uby4rPDbu8CgtbDcLp/vD8dj4AwnpLzvjpIkNGhmN8exmlMMTZu3Q2pjKV4oOC0b1yMru+71fWL1//q3/D6Sk2kJheDUYvNsgKJgYIRAqowqIMMZaJpK8NOcq7Djh0BmuuW1LYEvWBAcPHGIzgAhDoSEZQRIHOdrE6IqDlrBgTBOpTxvbv5FfUvq6yAvEMEFKnJoQK1jKSAYIxMaGAiBkpMtYqKEAK6EEJEADVhJoxmCuhJqzZdQy6ehsExxkD5JJZSs+xDE0LsjBzGjn1E76wqmOmU62krp910f4968uSmIkwOmKc85Tyw7zxGACyEWVPo+6kcck3APq42o0lc49qfXOwEJMHu5mL13fDwOI4rz310a7ykol5tHPddfz0d9r3nGPrTtN84F6gxIrOJiKqKaVgsmjqdakqNc6d8VPSE6rquSnFkCJZLYhiYXBN8Hg5SchM6XODRUSo5bC5AU9bC5ETrVEbfgtYKUrrwbNn5aTqoVteHMU8hYAHUXIrG9nL1cXu/6l+Waii1DzFDmoYffPvCMC7Xz1M1IF72z7SeuArpRID9uj1tH0T1ZMOV9KxQaw2bxiXLhx+qxpZb6FfzVVCqAJIjAoRSBcyQFJkBQEHZOQVgF4EgTZk4gJmUGl1UtTxN3gdCVJ2HS2xCFBMpgiAVCyA2TZNyAYCq2fvgQ1QxNSs1G0Do2o+328OhVNPlOn5898Os1F8+v764voykU8rDeCBz5Ng3/rNXN8e0jy6CY8XmetlPqTqurieraRFohHT74cGkAjH7ppSiGPvF8uriYr+9a/uOGt+83fq22x13ucrbu4eL1RUzAdUGSEWO+1PXLVaXVzVh112WCQM3l8ur28fHTRfzkILzp/0xOpKSpFZ0IaWUBmmiv75ZHofjm9u7dJwEZMppXkvFJjBTTeWoh77varWP7x6ubzbrTYss7ZJE6+3jY8q2PY0k2t1slqula+L6mZ+S27y8nIZpg2u2kou8ebcdxiwP9fJy2TGn4VhkXF8vb9++L1PG6LvV5dX6KLqf8mQADj0CzNWKjqiUDAB90wffGFO/uJryfjzuRSo57lc9OyJYq2qeMqCVlKQmqzYckpnRDD2D85MDeS9izgUwE5mcPyHMjy3bGIMjssmc7zy3asX5mHLxLvR+meUUOJRa2KGPHolSGQwFCaNrTE1KNVMfYhU5Hh4IRbXWWoh4fnxBdFXBtZzTMBe+EnNoI4AiWJpOaUohds7HplsA+OF0AqtEnNIUQiiKlp/GAzMCMoQ6T8VqgGCqs0yjcm7iJARRferNfUJr4DkONs8lZybw0ypcFZB+nJDmP3FuHf9xov7UT3Iev5826rNONBvan7I35wQFAp6ZlPY0U89FzvNoLqqz2ESAoqaEKpDFBoGnfSYYopiJgiKqgRja/P9uHsjNrJoiXr/+7Je/efXDH/5wuz1MqXRdMyq0nd8sOQSux4mRRUtWiy6MIocio6GaLhz5gMRIhGrAaowQHDfeJ6lAcLnuyiSMXExL0alIu4x901xerd798K5Ue/X5Rdf6KqRWH++Pp9NxfzwxUfBhdXnVNp6RnGudC6vVCmpt2uViccmOHDMyIfJwPKKCVSbGdbc5HQ5VysXrV9x1/+Pf/+2v/uLfrNaXd2/umxCJw1RsuWzJh9NpfPbs6h8+/uPzEEDFB6cieRwDMxO9+/jmdHzsutA2nRLHfsGhWRPtd9vxNCzWi65blFFvd2OM69cvX5KD7XYb+75WODx+uLq5Pg2nw+5QS0m1tr3vV4tu3U7bg1lYX764fk4fH+52t7eAdnnZ3O3G15tXv3//8PnnnzVo43b34tULr6JT/Yu/+vNvfvhWTsOHYei70JOWUz5C6ZvNquuO+3dI5qIH5TENgZ2Ljal4ZiDXh+7utPvFX/77D7enH88mnHM1P/EJfRJ0fvrxOeNjc+k5fDrOTxrPWXjFH1WdWQA465SfdJ7/wjMCT3mSs1bw9K0+Oe9mLfWTqvB0BeH5QlJDJAMH3CI7kRF5HuchLiN4i7E5HoojAK3gHTiuqfrY1CqEGEK0kpEJAEuZ0rhnbhCKaTWz4L0RVFAk6pnVmFTWbWiDLRZNKZaG02bTvXz2bLVZVLJxGBkMzMZ0IkRCtnMzoBwP4zHJruAfH+tH8Af2jePIzofG+5DF5W7xTw9w1T5b+TX46SDb637d0Tufd6tuaYSMDhwAiVMvZfKtLyKSJuUQQnP14nPVlE7Td+8+NC1f/OyzV1+/+vYff59Ewqo9Heq793fYtauLqxCasWRk3zTdOA2mkEtqm2C1hjYwJeeC+QU266IgFReLzWk6dTdf0+Imvd/fXN3sD8fm+kURoLYXHwFLIUVyM7oDmebOrKfud3giCp3lQbAZun/ONM53ODC0J7vQ+TR8YljZjzrRJ7va08GxJ2FpnsXPFrj5q+cDNZ/Vp9jZj50D9uSCm++etVRmZucQsFSYn3K9JySaZ301Ey1EdMZpASOSIQLR7LiZrShEjFAdOYUsVgHQVAFhfl4iwpQmNSGiYBqQREWKRFcjCSGAqIhJFVWRKnNrPAKhCwoOAQkKGmhFY1SbWRuopigFKc0t0NWszsProRBzbBrnG/QRiDFE9aHpeiAE58w5mS16BGKKamc6HjkXO2RH7NEU0bhUGCeaE1illFzMdG50Y6ZZiZhTYyp5/lFWEwLyzpuYkfMBVYqZ1VoRsNYCSJSSEjVtCwZARN4bGhMCgJRiUk1VEaUW9mddcPZ+gRqwh3MqfHYGC84Xv83rKAAkmBk9yHMwDWuxkkHETHD+2c3MMzRyDGaGhI6QHZCDp1vJ7B7+dOpYPWpFxzVnMAZJevYvGTOdfcaAVSuKiWhVRJwL1ma+lUOwIoaGCMSuceQiRcwjomkpU0oZAQE9ArMrIrnAYai3u/HD4/DxND2OOUklpNmxBDONmxnMDGg++AioarO1ionRgPBsQkKEtnWEoAal2GGq89/sLBWllAgInEdhNGYSASipAgYFAzUmAoexiU1HAllNh/HY9O755bWhUet9XFHxf/qHd2Mef/ObX3/565/lYXj/5o2Py963Vn0RYUurruzu7374w/dv39x2zWW/2gx52wS/vFjc3R2HEzroGsdN45Cq8xSjH9M2g/vlb/4M2lFkivEiNpGZVUylxHCZ0jhNx1V3GZuQZZiOj46o7xfjOExpmg7bECMQITIRaM4hNqWMaKWqAlCMzsVL8aGkAQ1UUxpPQDDvxVSQyLsmTKmYlpQm7wMCgqpnbwooBGDEEasyAYgaGDKrlloyYnDAUjKBilQR8DGoJjACpTopc0GCXDNBRGKpGVHVyhwbKSUjOlDJ6cDYVGFjYoMpD4EXtRYAJAPnHLFHR86R6HQY90ULsTdMRcu0H2O36VpnUKfTiUZX1cCkWy5P48k3TQheDJ0PeSzYQAjtNEwGudTS9+16tRmmtH/cG/HyYvXhu7eziHs8HW9ercThdx9u9f70+Zcv7m+3u+N+efG4eeYD+ZvHxTCmZrHONlxcXJjZVEpsHBscx2JFBL0xNt6TWJHs1ZUMRsXQiLwhhiaq4DDkIdf1umsXbTKbRmTyuQ5iCmBVK50PPyKzzAxPwyp1puCfWXAGCAKo89U+xwhUVWpFx+fxIO+PwxG970LkWrtFi1mm7YPaIbaUh8N02stpD2ahjaZG3ChBKnX47v1p/Oemv+yXm3j9i4L9w1QBwjSe6vgwjoOJsW9KKiF2AibjaFW0iNS66pr1cjGNWap59GBUD9Ni2YLWcdheP/v85a9/a8ubZC2HloKjvq8VAQ21BAY0cZBK2pbjdtrfq1bP7H10/bVbvSYX5hU1s0crJIkk5enEONk0IYg6rzUhoVrH7dJUkAqSaRlBJwarj98FzWH1dZXOiIA8M5tkxAIACF4QEJjZl5zNLDStgZgC1ULeODZaBQ1CiApWSxEdmq7h2JoKa2Wyab8DyQ4JOMZuyV2H5LiJVRENNU8yncpxnx/fp3HryFRgGCYDmXKi2KsZWOd4NZ2qOcI+NLE1spKOAYgx7gcBqJryZbcWHJ3jw/EHxt4rJhADRJ28gVUKyOCWSIsmLD3KIT1E3ylqrgczQrdxTTdNOzNwWqBOqUwcmiQmEltUqRWoSjn0bsPk2fF2f+fbheXM6A/pdFuGr7/4oghIGa1k0ewsxG4heZ/yEWmajod+/QXkiWtNYzmYdssNuBA7yXny1pj65fLaKWM9OSvj9Mgxegpqmm2M7obqKQ17ZudDj1NsoA0UTh+3VCJ0jYMuuquSH4zk/fbDghekA8o6pd3ozt46ZhLDuZJGTQnIAKtUx46IpFbRqgCELrSNydzYrcG7KSVH5BwiOiZfpNTZPFsEAb2LaoXIATGxEHFkLyU554uqVqmlqsGYE6mAYN/Hfd4hV+fweBqfX74ITbj94U+tb189e35Io4rVPEo9tgG6ZeM8bx/2VoAw7B4PZBaojLtdFW2DzwVThXe7e3bsQhubZj8OQM6AapV+2az65Z/9+hdXm+v3jw/vPz7c3t4Ph11C6VfLxjW15ppz6+Pu8eOzV893WqYy9n232x0ubnpAc0Btv3jz/n1Kk2PIVtqla2huFJXLi4u66A6H7WVcowuHw1BKmYp0MeZSBh2988tlv53S8941gXb3D+vr67ZfXHcrSXraHk73x4fvb9V4cbFulwEDIdXhOLJzr7548e3b+2nUWqs1Htty/cXF7ZvaNcvmBb/74cO/fP9xUrm+vrjw4cO7D4BQS0UCIiImAyPnxQzY7YfTenNxGnZVpuVyowq1zo/LREwKFtqWGRZLSONRVL1zMz0nT1OpMjfXEnt0KFK8D+TRTJnJRA201jpMJ0cekZ33SOTZi2GBsbjzks3FphrlNBA5F9kxnU6TaQFEqEmsjOVohtzE6HgcDg6x5IxEKKWUUQWcEBpUEUBMBZq2dc4FH2up01iGcY8TxiZG3yCjGZpC0zVgVsZx7qcHADwDUUDn500DPvMJzr4JMEN8qvrGT1ygT7PPLPCw2twNBWBGSDa7zp8mXVObdxX4BBGYAZOzgUOfiqPP3qB57z57SJGQUGcsNJw3k2e1zj7xjEBt3iIaAsz2clAlQjUzhIJYBUBBbA67zf/Z+CmNMf+ic1ka4WznAPybf/onq9MC1LkmTTmSrVr39csVj8Nhd8rZDrWcchXDUy2j6FjUEa167hxZlarqHStyzRUVAWisxcBYUYv0fXeq9f40VrGmCV+8+qLrw7Ecr1++IuybwGqGiNvtftgPomLg++Xi888/H6uaQd9EYLe42MS2bUNYLDZN2xtK03U1ZzC6ePYKKjw+fthctp6hTHx/++Zh++H1V7+AP6T333zT/PovQ9cOaXAO99vH65vn7DxY9RySHIPjeQQqUgCNmQ/D9vu3f9JcLjfrbLy+ulpeXtdxOu53w2lYdMsp1SpoIT57vlnG9njcp9Nx0bZNaB+G3WW8xCJhuXi5WaHS424aj/f3H09E4IM7HHd3d/fLi0277Bfr17dvvvn65y/g998Mx4fjQ/5uTM8+e/bbv/rXf/Mf/8Ovfvb88cPH0+5D25JftdtbA3UvXnS5gk91/3iAMjlypyw6iUo9pWnRN1VrDFgJF22/+/hm0fWbzUan7dMPH58sQp/CXj9O4/hpop5HkLOvDeAMBUb6lPoBsyfGFj4N+09/eg6g/diB9WQ3+eQfmX8Tnxwi+MkWdzYu2Vy19vTZ+UWInnJMABSBN+SWoLDaXJ7evI0AJVdiElNEW/c9qkYfPOgxn1Z9ByDeRx9aK0pAzJDrQVHVMGtiVmYcq6ppUVHHTBQUrjv34tK3DtI47Q/D5ap/9uxqcbUYyyQ1QxViHoa9d2RGRS2EMIxjHsp4KI8Jvkuwg3gyV4AgwWKzCaFNKaWM7HS1bMeJx+MWfIVVK4p1il+0VyK7wMChz2ClTqrmSKdpFEMwRormAqwv2tXSdytf6vb+h8PpbWD48i9+dvthd9yfXn++2T3A+zcfPfn19U1oujIlBOjadsrV1Lp+cRqGIK7xIae8uL50/cq2WxBDDt3Fav3sZwTMjtgTTuR9U4twiBxaqSMTU2gQiZmRGZCenJEA880K8Un+O+fFnthX+KNa+FOJ8NMP+qwuffrRf7K52ZOodD4/Px7YH7WkJy7SJ93+jLOGs9xJ/PSFynSOFxPjHMF0iGTnP65gaFpFo2uRHM0tgKjOBTEFcsae2QNLZQFMM3IHZ7uoyfkCAVA1Qii5TpS2AJ5ZFaHqwpmyMACYqKgZiFQT80QwV2QSEzE6wLmXitm7Kgq1CjIxoVI1yARWq1aFqehQdd6u0FibKCHWxaJnrYQ+1cn7COgQNbJ3hA7Bz6kzMFCxNBkRhjiTA9QA2MW2Zw6OY05jGk9qWktFAFQgJkdBzchjJQIi06xa53A0M9NcsYmIoKaViJgQzKRmzayOHToAVkAiAvLIRoAVFM98MgQRJCRT01lzOXeEkjFqhZlJpAIGgArASGd35PzWjCYoRWu1olYVzvsvImJDY3YwHwZ0s34ESOdOPZhZ57MWpPONzYiNAgdC1VyrCIEZu+AozrUWZoDqHFuRykS15Ccq+gwKn9dSygzV1ACNHYfGIxgn1ZrSqKKRGK2mUg/HaTukD4f0bjvcD2kotYjNbgkAQyJ6MgCdwexqOD/7Ic4tdSpA7JiRwJxjAOxXfYwxxCaG9r+Qitgxz8jSUi0nMyUWJmACABPJ1bIHX6bTZOBbRSUzM/G7w4Fd8ErpVE7brUr57V/9+evPnj+8+bDb362Xm4vLm67vEOR43BHlYdx/880f7++HGJdNvzwNxxjNh3B3vz/sR6Lec2y8M81N6xFBcuqbxYvnX7z7+O76YtX2i9A2RM4U2FEbY/A8TNK1S1U5Hu5LSXMN4/E0aE2m5ptYpaQxS5HYRmwcki0XXTodmwAoOo0P2/tHq5JOO6mGWJkDqCExGqpUJDCCEJxWTTrVaojEjsWqSSUQdk4kqRVQBpsNlmYmAIqgVcRUxepc02WqqtmMkT2AljLF1iGAIZlZSYPUSeqoUFVEagYgQiJDMyEglVTTiIDsneSRnAuhyTl5z65Mw2FbyzRMQ9v1IWrje+cj+bDfH7UP45hj2xpkVXTeyT65EJqwKAroG9ctfW9oomVsl8s8jmkad487JOyWFxc3zf2HD123eP31V7fv3texLNeLP33/p8v1zZefvX73w9vvv3+37Du5S9//7dt2Q9kPf/7nP/+7v/s9OQ0QTexyvXnc33piE+3att/E45hKKZazIwYkUgzeiWRqGzOoalaEHIGYFj3shtx476I5yJKjpyxWVEMTail4foQ4Z0elFuT5gZ8QWaziDOWZxV0kMlWxmTJudn5L+H/+3/8vq+KvXr18/ezaqYzsAkMZ7sZ0HE6nOpZp2nfLYKblLqPhcZ+OQ479+ubys80Xf3Zxc0OoSqBKgMk4gbqEnIuRbyN7qCKlTsPoQPs+GnApmYDZRVEgdmnITdt5rJiTC3z5+udXv/3XfPUiWWQf0QepWNQMgDRFTHI6lN0Pp8MdkQvdhrtrCK1rFoZArtM5s2sVainDwAQqidhh6BWq6xaSJwMG7Y0A1KljQGAQLSP4HusAkixP6e73QYXXX1VbggGDgalVAYdGnhHBKgBR7E2SGnvnpnEk5+Z3FmYHyApz0M9ibKtCLYagoQlyGrWY49isW+KAPgI/3dykap4gjR6rypCPe8I6HfamycfATmxSISURZp9qVeImxFqVuB6HU6kTUAPRL31syJ00lTQk0InAh2cyVmeGnEvFk4kTXDWXNU8oJFk6t4w4dZHACQYUqZ477zamMuRjDQvSdLW5zA9bArfqlhPjYXyEUqMjTxd9+yqPQyp3XW8uMFrHXSPH3euLz1xRB+Z8A1ChEvqFGg3TQIhMneuW1eKQblMti8Um58rejeMBxbxDBjmdDqGJtYyRPPhOwcXg1xWtOAKfTntGhFxc3xG6brmQqQTvnHZmnFwEH4exymGKfYRymuTw/Jd/cfd2yBiai+7pEQrArIo4x965OVXOFFQt52wqzjtVMTMAcmgIIIRVBRBSTaRE6NSEGJyRgpJzACpQmObtjSIAWK0CgDxNyQAVzrH3UqFrFitrvv32jx7rark5HPbPr67H3WE4HS6X14buYX84jMfLptVUH9N+c7V692GruarUkqVdrAlpGk7dOmbinKs56nw43u3RqHdNCDGEuNvvHfDV6nJ33PJot9uP97utd9+8fPn5L7/66stXn2tKbz5+V01lquqb4/7oV9gvOiJYLPrbx/fPnr+sXIUotutxd9+2cX64aFv22FXVq/Xm3fv7mlwy/fDx/svrm/vDbjedSk6qFZGOpwkJfCCTdJxSIH/cHZ7fXPaL5vHDPYfjapObZhWCS+PUBN+uwsf377/7x8fnrz/zq/7V81f/+A//fHG9vrlcVNk732wfd6Zh/3DqnP/2j9+EhteX/euUv/vw8e3335vM9pgzHAfICLnmDIih6UxtsVgQceSg1ji3yuWIUOskwihEU6rOeR+4WK2TqtVpGhGBmNG7xaKvJZVcaz4BOGKUMhE5AyiiYKYCHKFtGgBjdobiHNeSZiVKAMxMiirUaTgRgIHjyTOKqSI772OBSiBoakhpGioRArngkJlo3juqSq21iiohMGMMIY+DOm+iANh2DQLmMhGJkrVtX9JUJUutiOjbRupZKjIFPSszZJ8qw9SI5iZedIh6hhQA4syinmdc+KQOPT1EzsKrnT1AMxjhJ4GbeR3+NO3inKw5L9yJzkMMoj1Brs+9ZPO725MeRACqpmZEoPLp+yoCMvPMw1AwQdAnwlHVMxh7nsTnANqniBydh/ZPySJABDQhIKrwd//0x8uuf7nqAuhl51cLz1D3Q96f5CQ2TBXAHPGYK4FtPC2CawJaLU0XTWnKqWjtIhOymAFxaONmsT5N+XEaHw4HZXfz/Nlvf/ur+4/3b9+8b1ZNtwTv7DScYJLo/apvTFLJvNxcPnu22W0fycXgmmfPn21e3hxPE1Fc9AsRu398ZCZD7jo/HCZTcME/e3V93G4h+lLpt7/+i//m//X9hz/83Yub5x8/7IOPxDgOx2W/2D+OMpkKEeLpeOj7S0MSMdVSBYn9NJ0ePn7IadysVgau6dpF19c07rcPiLBYr7e77bPnz7vNxtgtl4vD7UMTfd+uEeDx4bbU2rX9mEoWicvIAK+fX++85tXqsNt6Z127GI+nUoaH7+6NXTry45uP681VDKeLFXy8P/3pX75r/PXr1589fLz72ecvx91hOg6ZRhjq/eGudcsXN5vPn6+Pvd69uwcXu8vN8Th4qS2amDgEy5W7pl81x4N89vVfVasp55+KQTOG6ynRAZ8OyZON59OcDWdVEp5QM5+IMwaINhNe8Cez/1koetJ5ntSjT2M/PoXVZoTN05h/lmbPV+Uc+5xnsPPO/szAMaK504eBWuTI7Cs5ZK/GYAHAT6lWVWDwwZtoQJen0W9WaicIKCxaBAiL5CQjhJCGolKYmMmpioKhY0LGLBc9f/6sW1/4h7vD6TB1Ib54+Xx1dZUlK2jTBM0pHYd1u2TF/eFuklHJNKmVGpgQ8JBVQ2vFAocutJ77YarOh4uuQ5KmbUn9YTd1naOLz37Y3QuY33287hHhdDodfbfxLhB0ROQxd7HTKmkYvdXD7cfDfr+4umlWNy+7/nR/N27vbt/v+kXAMgwPd5v18v5heLx7cDF2F88w+CkNzpMzrCoNd8gO2XW+lTI1y2fATalG3mWBi+vXvrs0FHGNUBQyQDJFYC9ApvMuu1EBctGIzz9H/JTcOZOJ7PyJp7OB+OOPfD5Mn2obn/xET/dG+PGXfdLUn36XnkTIs0Ppp18Ln7xvPxH65zDuJxsomBkyAiIzwzl1KAggWh0BIBHyHMIREY/nhB0ykRmYEylIxOxmF6qhIREy48zvQQZVETGYDXJEgCKaSj5OzgM7VaM64eTRHCgAEDCYITDMbzkmZkxECIwI1LZSKxjUKrFBRavA1cxwFkbBzIghOm9Z1MCAVKzmOo6nAOqZgw8ixkCeGcAIFRHnAEOVagUBAHMhVZ7hHbN6AkzEHCMTBs+1llKKmqoqIDKZ8w4AUBUKqiCqmz05zJ6CJxUzq7WACoCpGmFVqSVNjADmXWwQyQCRZjOXsnOmFWcmtBGK/pgqQzWrTxwpQobZTHR+UyZ4cgHPAWwBragVtQICMSnMSjedw7TMAAQIMKfzzvFvQQM4p8PhbJw0NYT57ddslmUMEMizIc5h8hmLRKCgSsLGXAhVXS6TmllVFVUtqiJmSO7TNQAGSC40nQHkKWXJdRpz0f1peP+4f3843qc8iNm53tRU5wCvIeLMw1IFBAJEmqnes1Ym5p0jOqtGaoJogYiZY9t8MpA+Ya2LCqKhAZKxEQdzglwEANWADJHRg4AoEhOhQQCPYjmVjtpywu3uw2qz+vNf//l6s/rw4V1J06Jb9/2GwI77hzTtieywf/z+7fvt48HH5XpzWUvadNQu1t9/+277OKC5dtMGx2jVOer75VQn0bS8WPzx/R8u+nW/6AMFEwAOKZUQgw9xGA6i1UTnhrLoA4iq1ppTdGwsaSqMtWk9BI9Sp7RP4/iQh3TciyYAmP8fWdWmWYK5qkrEkrOUPLfjKqqWiZCb2BD0JUMtVVXYkSmIZSJCxjPGrBoQEgQDJmBkbwKSC4ECzy8mqgamBgXOqGASTRSiCNSqJgZqKgJmZKBqQFBqUStMVlKuUxazMQ2K6oVFqpnlaYQgeTjE0N1srkuRdChsksIpRl3drCl2q5drKYVc0/SL2HfkY7tao7FHNdHp9Li9u695aqKP3iPBYrUeTsPDw/tap9Xq4vLZ5XG7LVN+/vrLuzfvclJU//7dN7/+s6+2u937u9uL6xfcjlPKrSynPH10737xm2ePD7uuX92/uVtsLi43r1FlGgbJpZbaeFx2oSad/fa1qmhpQlBFYmOirMAi89LTRzelihXUzAdYR7+fTNJcDMmms8GeAEwMyQjNRIoCMoGBAZFoIXIqld3M0J3PPxqcgb6/+8ff3bTLf/7mn0mnRRP6ECnVftn4COubi2F3mmTSQ5IiAclpuPn8X//8N/9udfOCEctwwPQo6ShjgpTJiuRDzsdhOLXdoiQoQ5Kqbb9Zrl+rIwFn5JHZO8jHkwvkvItE4/YxOjpth8tffB1e/kK6i4IekBEYRX095u2O2UMeh+GDojm/CBe/8t2GfZCqwGTspRZFMq2SR0IjRO5XYEa2QEJC0pJECzIbADoALTBvnGtVIgWGsODQWRmpjChT3b0lFX/5ZwLOkMhHM9RS0Lv5ZmRIRGTooLJr+5omahmZXPBS6/npTbIPHfqgSWJAmE75uIeSGHxsF6714AKFRrXYDHcjHyOoTMPDx/3De44I0IBfB0bTI1EFS+k0pOnk2yvXIBCQDlRSTchggE5MJA8QgkcmDIArq/k4vF8vQx+dVxqULSwO42O0MspWUZHqOKTFRQ9Yl6vm7e2HDT0n8m27HPep68P14uJ+OjJCg9wsVyx4ShKMB4vZwJk1FjA3mHLVYs73y02ebstUL9uXyfyKbbe/j/21QWfWYW0L1cjrrDtCpNDNMbc2Lj24RewJuE6F2IlUYgcgNSk7ypBMhD1gYD2CQXXcZK8lT00bGYuJKPJejs/iS899PT2s45q5WhlvM4b1YtVaHaYPD+93uwNYy8uX81WgYgrgnVMRRRCtjp2CVS3zDoKQCKyKgBVTc+xryYRI7NpACFyrxshN045jVhFEQPZTHhGMgxcRQmLyqZzAkJDZeUWrpbBnF1fH/TGdxo6iAu0OByIyJN/y8mKDRe8e75oYGEN0Vgy6rnv74a6W1MfFpECtO057S7Vt4v64Pw7T6TiaFGBIkvuuI0Cr+ViTCz748H5/e9EvLy9f/u6P3/gQy1T++M9/fPfuY9+tX7148Ytf/vYwjduP9xw8AgEjEC7Wyz+9/e6r158fD4ciqdRyOJyGNMZ1470/jCcDswLeN9/86Z13oVt1ftl9+Wdff/zhbZlGEiAVVMQZAIJGCrXmUmzKaRrdccpffX79/MUlgJ4OuzTklz//as1y++ZhTLW7vn75xc/++C9/2L/7cNoflpeLb7/5Pjb9cXtcXCzZu+OpBB9WbfCh3Z8OKediJQS3GydQJQJTdc7DfMkyiwF5732LRAaW0lh0AqWcikFlYiQKLphK37czx4E4GNeSdbW5LHk67Y8pZxdGMgOEKR8QHCEROkDnvCcmZh9iyCURzr5naWJjWhAF2QFaShmAfPCO/XJ1oVWlqpFpFTADk+Nhbyqh9QAqKg4xl1pLRTR6Sh5432QbECEEpyIlZauCFJxvbfb0oiPk0PQ5TWM69E3fNYvCSbSmnKRqLk9w97NT235UgtTQTBTBDOisoKgC0+zQPz8s0pOtiBABcX5cmxMU9QmKBzTXc54fZef2lvmDp1qnp2FHbe4Mml1+iDC3ks0jzPxJwJlAhHj+pkCMs+9ezdQszdSkeT9pYHCudZu/zUwvsnmna2ce0iedaP5AQc/yIiCjWQUlOEhpp+F/8vPLTbDtcXx4OEyjnLKRZ2JRsVOqGWAR/XXjynFoOCh5QgaTRXCAFAIex9yEsFh1Q9L77eF2nA7jtF63P/v8i9B0v//9H4L5y9Xl6uY5NaxWVqtYazk+TPe7x/3puFrdVMEPHw59u3j+8mch9mk63N/uyOw0PNyq9m3bNovD8ajjcY9y+ez5ar148+a7/sufb9av9/v7Uac9Xfy7/+X/+r/5v/3XJR2Hw9047F69evY3/+kffvaz32zv93nauXCx1XHle0YoaWy61sDQbJzGXMb7D+9X/arv+5TK8y9e6SkdH3dV6vNnL97f33/9i5+H2KhnNJh2B8uTianp7rg/Hk6r1fLD92/6rmWO41H7fmXsl9cvdoeDHcvj/qHKcLlZLT1fLvXbP73bXG4A5Hg6HfYjaVkvmno3/dPf/u3/9N//q6HgNx/fRNf++i/+/OPbP0YHx4Hvdvbw4ftf/Ozq5avrL6+f323HD7uHScW5SL5N0wgIjj0Q3B23Lz7/1bOvfgGAF9c3T+M64MykmCNo9DSHw9xhBnPwdD7KeLbinZUgwrMScA5iAsBTYdoTKGQe/89pyvOUDj9qUE/+o7O36Wmy/4RDwqdXmf14TzrSrFDBjOaYwx+MrgO/CM1CR4cIjtm5WLMiso+BiXPJixBNhCggBrQcfBOcS1I8Ri2F1JORjnsWZWDHXrWMiuZcS7Bp6FdfrLoGHk+H7TF7Cjevb7rrZSEAwIhe6zSNJ6nTNNVSB0J24QIQSto6B4y1Gt3m/EBQ0C+7ztDdnnax78gZYG479/b227bpO/bL4E/Dtunbg6y/HxLc/7BaAFIwyQ4zUa5oBmncAwPlaSqeLbZOxuHdbn/37vL1F5tffEVvF7t33+aHaXN15ZbL7e398vpi2h+3t7eh6/xi43ihUhxArlIlN40zY+aGVzFcvfTspzSxD836pnv5FXfLfDjEsKrGHAOS05p9287xJ1QiYgOk0NknPPqTTPNkDZqpVDo7Mn+Uf/BppIVPQ+sn+Wh+HQM9C5NPB2a+qT7ZlM7koyfr0JxC+4m6hP9/L37Wjp4O3HwRKAgoIjh2jNEsm1QiNwcf57eAamImVYr3kYDQVGolAmY2x+IDlAqYPccimRAJsVqdk84AoKIE5yS31FIQJqV9QaimDA1YRfCQGY3RGMkwKKPBuakT0M+XIxFRiATAgVQUAMjAEyuwmFYljxocJeBIToBFgdkbM3JTlK2CZQhddC42IQRPMbTESMwuOOcZiR0TaqE68wHRDPCMV43ESJ697xRMRNWk1kJAAObOvCcgcBXMyDsiOq+FjcFUhNGpVjBRFRMTqGCYSwEyIiLn7MkuhmCOSZHhrNcAzT2k8zFQnXWemUwEKvMP5GwnUAUQQEI1QANTqNWknhlSSIhETGp4XtUgntdAOvtv51OqZvapyUJlFsXOmyJAMKmSE5h6x8Cks9KkczzOzFRBSMHIfIimYoSmqCyCVaWUUrGWovmshSMiumKqhhzaSKGWEYnK6cSMMFdca821plLUFJCQgZ4WXXbGoBsygKEa8JNkRGY8319NcF6weUImQF+UYgzzVfDkKmJSgVpyDIFcMIXZklRycSahYUVV0DY0zikwn6aS67BYdREg6TSlY7/u/vzf/tky+If7D2Ky3iwvL68J/TgOw27HLLmUt+8eTidt24uu72oaYiAf3Pff3+62Y98u2q7zTShpIig+dMmqkq1XV4Fj68tisWRyLoS2bceUu27tKFatJU/OUZXUd61T52Nby8CsBm4aBpWqUpvG7R4+7h9vyZlaXSxv+tUruH4uZTLR0/7APhBhrbnWbLXSTFMnJBHnUOoECmaapgGRnWurzPlUmbsXqlYoSoZMTk0NQEzOvaCiyOTY52kQrUazyYJqVTAgAkYPxqA2K0REVAVUVKvAXEMiAuZVimp13okU710+jT56NDLAXAp5bJo4TYfQBgQaxgTgXLOK3fXMA+gXz6d8Cq7rry6Nqe/XY04Ym3HUdLyH4X7/8MGkWNVSYQzBx+BicDE2i+VNoOPu9sOb7zbXL9ebi5I/puPW0FznvvqLn/93/91///d/+/f/+t/99f/nP/zN3ft3LNmQfdsfHnP+cPLKwXHnFS6XY5VtsbX3wG69Cma1qsSWYuuZGIhTqbvDXhAliw8Yu8VwHM5+ZoOUEzGXnNgFIAIiF13EnKesqgg8q+sEaKJAoLUgOUNQKABAGABBTb2Lc3crAs3oMsTzVfD6y4vxBNvpuGngIQ2ZlBzudYRhoD++ic5FDsEvf/vX/6veh+X1y+XNV6o5nXZajnI64bgHwhhcXPSHx0dFFmzb1cp5dB5io+iD44ViSAoGPnYt1MxON6+6Ohymx71rWMWsaa7+/Ld086I0K0OPBAjKmKkWGe6jZ+Kcx4dc0vr1rzBeV0EAqGDGZ6wFNy0iQEHkDuYMPzoUNROtVU1ACZUtj8SIhForgJEPxF71nKAWAAOPTIiR6l5332QTv/lKcF2Uzp5QE6tq6AFhrtcRIFWJ/aKkxB4NxEBnWZ2pMw65FHZEKKKT1tE5H/qFa3s147Y1RCsGCkDBpEoZxuP9NGzVkvOL02jSXUnJ6XTUomW0kjHGTRWUlIDEN5zGw3FMvm98uwi+eUhpyAOHFgldWJLlfukTQYXJkVeFqWbDUFAGGEGVwS/CslbLORlNbVjUZOZlb6nvAsAk5cR19MS73c5TiOhUaDweV+vVLt9XGantmbTt+pxoArg7HjdNC1nbNjocS059HwytCnbxIoTu48MP6LopbUXN98AA5nrvm3w4LDfXgJKb/jQdDsOOXQzOdS62cZXzNMjYEO22g0oCkykNm/UXFcisri+66TRm0/XychySAbSXz0xtPB2kHiuA+VUtY9d2h92daxsXlmdBD87LFCJS1arCxFmqiBhCE7yUUqSqifdzBpPIO8cWKJzSKfigAOTAAFNVA2SkqgXVmJiAEDD6UGoplme1xPHcLIlN09ScAnIgAKhx2eyHbB5fPX9+d/vQd42hnoZTdIEQ2Pn3d/dDSff/8rBZXt5cX2+3u5xyvwpd8MMwPj4eDCXl4r2LXXv38BjaLoZ2OB6laBYJnatpEinipTTCwVewpo/FMlk9bB+/SQN5115cPP/i59vH+/W6r+MAJeXhdLVYDIf9YuEo+Ab0OI4+8LuP7/OYlouuaZrtdr87DZeXy69++fUxH4+78faH+w+3e4ohHY41JUJkpK6JjQNyWoWmXHwgJBuk/st3H94+HJ5fLaKL66uLb/74jxj7z5+/rNPwhz+9eYfl2c21b+ntt3/87KvPvvzVy3/+p+8ur9a3Dw/L1YqR7h92j6iOrF8uj9tHdHzz8mI7pmkYregTGATNQAzIOUQvVURriKzVzDkXsdTJ1FxDyOiCq0WDJ1U4ngYAx+x94wAxNBGwDSWVPJhVAOgXl4GaUk+1VkNDNkOtUuGJRloladLxsGd2hMAuuBARCBnBLA1p7huY8tQvlkyUJafTGEKUKmkSRCw5EXtRVbX5KTalQUVHOc5a2Px0xcwKJlIsT127XPQOgGoBMoyxjWhW65gnUAi+dexKKaHVh48AAKrGMNvnz6oOIQIgIs274BkD6QgZ0c7FPXrOkj1NwqoG53/Oa3OaHUmzMXTuYYEnlYhgDpfNIA8EMJ5L6wFmTxD82OAyg/bO0wwhEc31N3PLC5zrZs4rGPgEOzoHtc9U7LMC9bTF/xHZ8TSwn6d2PU/j8PS3BgAj4zrV2yn9p9/Xzzet1TIW8cQXy26QEowtiRQQICWXjT778vV6HYfjkKeSxwkBi6KqLPoeicdUq8B2LIMytIu/+Mu/tnx8+/ZDu+qWm6ufff3rKZV33/8QPR/H/cPj3TCMY0r9cnP57FW3WLXLGLx7vH8/HbbrfuGHxZc/+9V6vSwp5TzEhrtuM41jNdvtjxzCi8+++tOf3l1evPjiZ1+/+/gHqeOU/F/+9f/8j7/7m89+Fv7D/+P/+r/53/7vNqtn++1DE2i7f/zM/arpFkDEgWPjEQCJh2F3//F9bH2p+XK9IeP1umOxu9sPBLhcLQ+nad1f9s3Fcdg588vFYjdswyJ2sf+Xf/odqF1fbx7228VmFZwrRqtlX1P+4YfvFssLEbi4vG4jvH/39v7+vsJ00S0+/+WLj7cPN8+fu4d7MhjHY7uIV44/3g9/94d/+Plnn2PU3b5+8+FhvdzE0LSXMI2SHuFue3q8f/z1r1+tl/Hm5Ysfvt09HOsgOBU1dYVkHVwdTosvfgU4AzL8Tydp+gTimDVNmrHBoHo+4efUop2ry8/M6bOA+5QEOjtH8MeDZT/6hJ5O31kn+mTzeBrln8QlRHxCgH36Hf3JuP9kTflJoInRFNE1GJZCDTedAdWSOTBHZwII7DgCqnO+TEfCymRFkguumiqZc1jHjAUDec05BFdzAjAzrkoC1gd9sfbL3r39cLg7ZOfc81cXVy8uhHAcx9b7KecyHiUNZFVQANQ3K+IbhJL2O+89SK1ZpIIGZmYyRIZ+2TYxrlfd9nGbj5Mku7i6aQNXGR/eP3TPnh12tTuWX1/2ao+qMR8nD8fFagkOx2FPGNAFDK6UMqZ924Wmiaum3X37h+Fxe/n8RfP1L+4+vL/f7pvoYtNrrf1qMZ2Od++/v/4y+LDUSkM6eedNtVstT9uR+9Cvn/luDTkjOCVevP5SuwtBD+jVmNgRUtFsDMiIUlQqMbNnI4O5cWnGuAGcFULUp/zYfKd9knLQPp2en57GJznnfNd9OmKfqNjzwPCjGnnWDmeR8UyW+elLfTpoZ6fTOYD2o/PtfKLmu+nMekMDUXkq0lKerR/E8xeoKTtPhmYMCObUaiVyyB7Zsw8lVwRiYrIiJlXNVOeYp6oCoKgVwCkRATKjN2ACQiVTxEpkgmhWRMBmL4ghkjpyZEDE7JiYEFhEYO4XACUDMCVGQBfQsbB4FAU1qiYVAaEauiqKqsHAOw6O26YJPgIhBs8xAM53ANNacppqtZwKEhEju/lx2Hvm6AMQu+gBIZjB3AGvs7tWiZm9VzEwY+JZnAHT2fAnUlWy1QRqVUWrGoGhIKMDRXYASIyKZDLjlGcrjz3doub7iBjh0w3DwdyDRgQ436NmY5AiKKiAqqEizxlrAiGY36nJPWHVzrHz+ZipCIKazI4Pm5c3prNbF8xUCYhQap2D3jQzrWDuCSNAgzof0TmSzlrN0DvnwQC4GhcTrg5yBqm1ljrfaatWRkKgUgWIgYNvuCdumhjapo3RO+fu91B1qJTrTFlEVZ3RsfCpsoZMTdrYOCJRqSWjAQGpavCud7Tq2iYE75yp1Sef9ROraJpMa4i+lCMj+OhS3vuwMoWqBcUAC5I3MFEpRYmlaVoROBzHvpNXn3/267/+lZT6/t23TO7q6nnbNeOYj7tHNCWst+/vPt7t0lRCaBeLlem0XDiO7odvP+4fUtMs+/WSSFWKd9R3S/aNkTWuXTTL7eN2EdaLRedCAKZURaWawVQGj+SDc+QM0HFXtB63D0TATHkcrNboeUyn77/5F1b+/Jd/1qyv28UCjPI0TlNSSTmNQg+7x1sd95JGYgRUARBF71jUpCoCSM3MvpTKoEKMhqYmtfoQCAh1fnpj1WoITF5FnfeAzESeoIKIFjABQnUmKjOLHedGFSDnvFGQNGktqlVFaqlgWCXPrQFgnjGYVEbLuZBgwHVoFvvDlh3JMOx1RygQjVxDIbpmHYKvcpyvmt3dh7ZfTg+74XEHKLvA3Pha5fT4yJrZaYgMRAXEN1FNtvf3/WrjihbVru8vr7/4WL7bPt4Ox2Z9czkeT8NpTMcT9vzFlz97/+1/W/bbn//l5//5P//u9eX19uFxOG6pZhU6HCSuYiVTP93cXH542HLjG44OqgBH78xkyEOg6MKiWSz7i8s6DqfD7pCOmX3TNyhTsGYqVlUcoCdvAiqQoVaCENoAYZBTVSBgAFAVAgYxREZARZNzZr0QOiRUECKGOWgMiuwMzhfDf/+f/oEmIYdTx/8/tv6jSbYlSRPElBg5xFnQSx7Llzw7qyqrmqJ7MNMY8AEWI9gBAhGs8Rcggh8FESwgEHRDRmaALkw1uqdJVlVWVrJHLgvm/BAzU1UsjnvcVyOIxX1B3ON4PNdjZvrpR4ak6kmhOLTvf/q9T6+/99Mf/WzumzbMn+6seXXp4sWw3WI5UOmiA6gdtJegMh53h/UBger5TQ0gSrGKJeeUUoEoRj7G67YmKToej/v7w+bpcdg6R6YO63Z2/Xn1+nNYvcxEIOAckmZLW9CUx8HXl1jNhvvfoOHqy78vXKt6scJkFAJNucgiUJKZ2tiZFWInAoBoZeTgiKOJAgugcmQoBQ0dEVjGYW+aScVMTBOSEylGDKEGrlyR4elbEnVXP1JemApMbY8AUDmRaRkcsxQl9t4DO2fkTM1M+eQYpd4hSZL9VvPRO+d9jVWFMSJ7YK8A4ByoUkn5sO6evpHh4MJs1qzMnKM95iH3u36/6w47FAkOCyax3nMsY/rweExDIldRxTqIlj4V9dEPJWUHqujiDNSSHYoAKKU8ZGYwRh8O+bGmiwrbMeEwjgzWpbHP+3kVjrJtanJUjt3j4fjkQzzmccTivDPdzeN107Dx3hUxoxjCfr+dNxdxthrymvxIMYaF9fl9xjIM+xCdwR5URj2UtJovqgJQ7Dqbcmwf7++regY4q9u5x/o4vOnHtaISOzFA57VoGTpSX8GyJpYyJqvIlZzzbnNgiKhNGheood8/LK7aYjmTc8ubbvMgNgbHC/KSErkqJWvbz8ckGUTy6WhERCpaSkHE4J0ZILHkAoAZs6ExOjJiYgBULVmKIxbTKtSiMrW4CJDGERVPDL4ppMYAwUQLmBKSd54Qi4iagEASJcJx6FarWnP59s0HD8xx/uH+aVXPXry4fny6m88qI3h4WO/3u2ZRSe8+9au3d093268Wde2Jum3ubAdaGKxk8aFKY37/tJPMCNp3OwJr2rpmTlZQkMgPw564fP/Vzd9++00imHlaXkRE7odSQPfv/mD7h1jNM+cY6zSyU3AimsawmBOBpGHo9kM28j7Eat4sfvP26/24/5Of/HQVYb3dDIcj9Kjr48I3T8cnh9AsZ5Kzmf7we58N3fZx/VQ531ZBSlaA4ONY0qEfw1oXVUzDYXl987TepOPusxfX/8t//g/+5V/8d3/49g2zu/3ysw/v3w9ZcyrdmOfzxebpad7WacyihqjHfjCTzWbL0RGoc0jm1bTYdBpCsQTeE5acihZ0NG/aKttQSqqbJno3DP3Q7fq9ACKjVXVTNe18cVPyWIqWlEVyNohNA2pjvx3GzkCYmJkBAQlVRES1SEL00bu2btwC1OVxRHagJaUR1TnHMdYlF0NAJhWbza8ADdDV7ELQKfp1zOOkDScGJFTUlMdUjE2JkIgNJtX/RPeeUM5samimWtX1ip0gAxqamSGKFFUAMBe8r50L574BTwdNPY+WpzZEwKYM+xMMBDYV8+QnjUQy2QMB6qkfmljeOB1np6YIAQB0mnqa6tQoK04Wp1MIj9CUwvDcC5xb3wlOUiQDUDNFED0zhs4t0XP7c+q7zlZHp5iWqac/j9vt3HmBAZ0kP2fPjjNV46zemLRyiNPx3Cgjf3PIu6St2dXMV46QSh4GQnYIqOqYJJUEpChq6puqaqvDlrSYFRlHTbkk7YHckGzIsry8+OmPfvDw9GHzeDdvF1/+6Efowv5wLGN//WKx3m6//vZbMUPg7//k568++SS4aJjJ9pDw9fc+e3nzT2EY9rvNw8PX2Q5VbCSNh93Rh1hVYTZv89g/fPNVjBVn+tuv/qsm/NNPP/386999Zdh9/uWPqbJ33/7653/0x9/87ncvX37y5u0fbl9df/vmvY0lIA3dcLm6IUJUyZoP27WN42HYaU6qcn37auyHNA7HlF5e32QTZnd78/r9/fsXL64rH8ehH8c+ePzm7j7nPAvzd+/eLy9WpReseEgHwLyYtZ988dLAlRHef/1VTts6hlxyyX697/tiJvC4vasXzfzm1cOH+83jg+ZSQaFkTw/v/vE/+6Pf/e4psN/udoHjfFGjbNrbZerT+nFnX3WzhV6tympWpdQHCKXr1oeSve0Ox5/+7E8w1rGK3kXv+WMTfqYJTbqzE5ln0ojZyYVrgnZOwjQ4QY34sQyfW/5nddiz1AfO6OUZNpoq0hCfKXjwfImJPXQSq+H55RGiqZx/OyIAmiKwoU0RS4YI6BFnaDVC5auGPYZYtXXsD/2YC05us2gAQpMNr7joWxNFEEQbxx6AVAtgQTAmyTmbmieMni5bupjH9Xa42xdTXkY/n0VCyzK2oaJSHjePfbeZx6ptZ2KlrueOmqfdUx52k2HI8rJZjMcXC79JtFhcxRCslHk7Z8fvH94+7g5zd3E7v33/+AHJLtr61cvb++2+63fzdEe4N9A0bOfNa4LY5REp1otPBFwmV9WNA6jMgeTc7ylhXc3Gw/Gp/K69vrj54vX2A/X3j8EFKaNZDvO6DP3h8WF1GznEdr7a747eG5ixrwxdvFiR927IZcjeNfXqEwkLEAZwwdVGjslLBldXYEgmKY9ANKWPsWMAmfrjjwF632EYPb//52/YqTqmJfIUbjU9jc6yHzhVEzyjO/idWjhjhh/LD05u6s/yMzgj+afrPAPypx9KEWIjdmAgpQDwRJQ2EzGZmFMAk58RmSmIGAlMMAbC5KZMk46SkNg55xMlUmEiEU0nx0cwlTNMRlJkHAZTD4GYFDgrqUgKpALiCQDsdOAnIjVApy4wewMv5IxI2Bk7FCES06KABGHaGZGo9mgGqjbk5IhHVYFkCoTsGUPg4NAROudccOS9BYfMkwF8TmNOQ38cUy4qE3uQQuWjZnQe2FtOLkQXA3uH6Aj9KVRe1UxVxABEZBI645RAD2BqokaUlRgBTLWUQohmogJaiiEQGLIzVAA9Acaqk1s1nrzS7LRvmRHTeelANECVk6caEpxi68dnjhsiA/FE6qUTyj0tCudAvQmLVCErZgqmWgRgknXRdHEDA1PIolBgUoJN7xQYwlQeE4RoJiCqomIARqQiDKRWCAxQ0BuTJ1YvcRxTSmPKBVUUBLQAWErJRNEEDZzzF/OmZp439bJ6/BDXD4du349dymKGRCLKzk33h4IFRw4n60UgInaeEA2BiA0AUNkxAjAKM9HZsesEFaFDRmcs0VcTM2u5fHnsO3bk2BuMoYpqOqaeGAW0beuiBVXbWXz9+Wff/9EPhn13f3dXx2qxug6+3T3uihmRlNKt10+bh8eUwYeqab3K6AKJwrtvPvTHPF9drC5vkmZJfYyRGZwj71xCcaFJ2cacVhc3PngiYOcNgF0c84ClkGupmFoysWH8UDRFdiBkho6Nmfbr9Yc3X/34T36xvL418MPQ7debPCYpqeSUhmEYjqqpXtZFD8MojO5cE2CiRGzEpgRIouLIS1EmYEdFBYjFAEFMhMjBiVIkhA6RwE3/RUAS1SLqPRkoIBKyqppK0VRFfzIZkFHLmMbBNBMxIYkCGgM4zQpaiH1Ow2mESdjnoVA4DN0iNjpILuPiuu5Kf3X76eXV6+3uULNaQXSeiEOoFMVw9JE9C+LYNM3ju4c5i0HZ3O+HihDckEuoKyaufSVjjqFS0KHrraqurm9368f9Zq9IVRUurxfdIW43w2revrq6+ea3X3/yk89rrA+H/vUnLz7cvfv808/efnM/pvL294/ff3EdQ02Gq3ouuSDQMA5I5MBVsULvmT07OnYbQQ8iq9VFI7OnzdZFsgyoqWlWT90ayZVSHDMTBWOQ4kiR0TtXxoIATHQSGKuZiTEoqqF57w0MFDg4MFNRxQkRRvb8zEatQvPp9168effV43qjJpefXP4n/8P/2Y9/8J9czb5HBXdvf/OHv/mLyxfXP/ij/3m9CMiDdION6qq25ARZwZKWLGgQGjAeE4TgEPJ+szbyBTz6uqpCzaV/fLt99/vNh6+Pu50Hf3H7ennz4urLnwBQscbapSCQ44BOJRGMRqrKbnaBzaUButsvwShzZULk0JEDyNPfjAimg4kAOHMVlUyEqGraAw6YEgKDZCgjgSCoqaAZ4MTSTFBG0JF9MEtajAElgUhCQDV06MrTNzIm/+KnEK5FFdHIeURUzROTC5iJnRWAKQNLExI7H0VEVcyK8wjaa+klazOfo2/AByBSQ5xAdzMro4yHkjoBR/VSx0FKGrpNPm673eP+zYeh27uq1rHfH/sqOl97K7p/fDocxnYxqyuvjl10SaRGYoTgcbBk6Nf9QJTnselVi3EVaiaRNLIhWfTmU1FixlAN/QZQkBCowJhkMPEx6YgxqOO2bdaH7cXiOh1TLwNzxUhmaSiHoXAIcylDUWt9MMGhO5KOZKyEFOrtuF7OPKsbSx/Ixdj6QksX9yXnLi/rphRipw4gp7UjZnYCBVNu3KqimeMOlepqCdRUddOtO9DNpvsWKjef19xzP0BOXNHs6qrhIMlyzof1Zm1IRN4xB3BjGXfd8cX8xmPUNs5bf/fm7fkgBSe2rRmhz5osK03xJadoJws+lpJOx3TRMWcpEmIwMMklhChT28A4HcEYmZhKyaISfM2EY+6LKTn27JBczgmJ2SE0dZ+G7XHftHUay3Z/DByvri83m81qsdxsNh/ePxy2B80izqXD6MDaYMH5xXxRymCax/3o0FbL2eN6e3d/fxiGKkYXqsViNgvBIUjKpYx1U2tWAhhzATAs3Zcvr/v9QUyPxwEEQ6jrpmqqyvuYxcZ9X8U4u2hDE/p0V4W6S1mLHrdrQSUHY78vSTdPT2zy/ZefcinvHzeAcHN92e0KGRxyPgwbiN4xjFnFbL68WtV+2O9KMSQL7PthyDawAXs2yUkQOdjYX7fVbvvwm8P63bu3l1evPn35+r/58/+2DF0kjozXi8XT47fqfIi+G4YCMnaDqZSSkITIjV2SUXACxydjAyITRTTnmNiLWFM3ZnY8bFRKUzVpP2zTPqchZ/HO+eB0kkV3h/3Th9T3zHW7uAyhWc3bnIqfXQTPrvepdGnoT8xNKsyOHIcQwJAdHvd756qqWoE6yZlQA3nJGUQHVR/qi+VFGrcl93ksSJTHFLw3k1Qy82TGQJMFqJZsJoFJDWrfHPo9TjXk0AwRgJki07QVmJTxuB+63rEPIdSztm1mx2PHAAywP3SKAoZq7rk7tel0C8iEpwP5xPlBLDqdRokRJ/dIItYTnKPnewgRTNQIgBH1nPRj5wOyqpz63inMDE+59UZgiFMECzMXVQNUpKySVQ1AFAB10q+dkR0DPDU/k9Hvd/qos8YNAMwYQc+4zzOVA09f20eqxrmBmx5Epy7+dPvDdGfTZIcESckRBR98ADUlRBYg7xYtJbXIFBnHrjuoFiAR6AdJOROASDHV+SI4F79+u315fXXz6afrt1+Xkl/eXL/65Mvt0zBfxeubS4T861//1Xr95Jxbza9ef/GDZlFvHj/cf/OmrT0WWV1e9b28+d1vSftFs7y6eRXbi1dXXygUhdwd94fDzqZaoghSlssF6uXD178PMd5evR7SeP/23fXNiybI3e++Ua78IuRRSgEf4n63W1yt1vcfrq5uS86AXqxoGQDz5v6urtvZbI6Odoe1y2XezNkHZljOLvb9wyfff00JttvtmPpZ3dZN+PD+w2y5SKOsLq5mbTtQD2ivr16IWQi+G46H4+PmcUOFJNlxf2zqcNkuHzc7Ta7xvl2Eu4c3sFjcXH0a3Fy7zTHsntb9uO9+/e//6vL61XI5L9jud7kkm81WdeNcJOnL3/z7XxHDVdMsVvPN5rjb3v3wk9tvn47UutV8NlsuSgE09j4Qnw194QQRnerzzOU51fGJxHFCFu1U4R9Lb3LCAoST/mcyEDrRi75DOJrEnvh8yRO37QyqnigheGYLnYsXp1Q1OGs2nzWV+NH76ATVgmMMM24uhs4DeQX0zZyZnGNDEjAffDYFZ4jFrMTgvQ8iyQiUUEGU+HjsyAUjJUdZLCl60FmgZc2HIf3hIe+Su23dq+tlXfkx9bGttJTNw0N/3Ef0bdUOKWOAhn3f7yz1UJScz0kG0fmyemH4raCKKDJ72nWHoe+6dKziLLj49vGrIacvv/c5i3775s3XD5uqDP/wZYWIvubmYiXqmKrF4rOsQ1XXBkFDyEkY1PnGETn3sox9GfvFbDZ2u/3798MqXby4Ji256/NRlGDumID2j9umWTRLpy7GWBUZzbBplkitq2cwRXobr1585tqriUHkkMUA2ZG5Ihi802KWihYhH4wYmU443rSCnFDGZxQHP1qdfwR2noPF1UzPqxjARMqE0+niTEF7frsR8Vk9/BE4/8hSOjlZ2bnA4BkKwHNxwxmSPG8GVqQ4YCJkJiQ0KSIiUpA8M5BN2iaa5EVkQEiGpKCA077EzpFzNMFBRGAmk6GNipwExABmaqpKQpNVrglBYDYQUKJK/WijRyAb2QojgikDkB+ZvfqIsXGRwMzACbCCkqmqFTVDU6TzvSsGgMCI7ByNZs47MyTHPvjKU2SM3oWqRnboCDy74NXUJJU0dtv90A99SjkXAAQgZk7icnIuRPVBfCg5Ra1Mgw+NIQF7Yja2Ke1NJ5MRAxFlZkQCRGACKcZE5shRKWkKeQADEyklEyHgFMZIk+m4mYLRNLc+odVMJ/uhadejyWXJAPS0MxPjqZwAbAqQRkDW6dGIk9n3aWGCc8i2oWkxNTRFVZOipqeJCp0AT0bIJZvpee46Uc0Y7IQ3nRq0k8BWpji7CTJDAANBUJFkkifTKucr9kbsXPA+paGnlJMUALAqVuM4jscRtJBlstJW9HIZW79cNbBYu6f94dt7OaaCgOhYzQhRTXmiBqvlJIXAOTYEIMpFkFlSYcaxWEpZYfL5Ot0Gz7bWxETj2CGRw0DqSkIwYBKTxIGJeDokECE4TpbAbDG//OInn89Xy4fHx+P2WMWZD/Nx1CRb78GTrtfr+3fvjn2PYj4EXyGz1BVng/cP6+5obfuins8LggOaLy+C9yWPy6YZyuiRHFDK48XlbTuvgCSEipCInJoSY4xt9AFMEUwdkLpZvVytLg/rD1KOHuDh6XH/9PBHf/pPqouLMZf+cE+ImrKmXsvROVU7Qr+WNAyipe90OBb2wbtxHJEcIAGq5HGypjIRBCbAkg7ORUciQIikUrznyTWAkeyMwjl2k5lZDFUaegY0FSScgtUgizEQCVJgYhVD0pITkiOicX8wVURlVFAoRa2osIIykJXUl5JDdey2DxfLL3Dshv5IDg/rJ2N/fHjbel1Uy367N8Dog3OQxgMRm5ihT9J328Pv7+9Vibzr+2TsVq5NkDuR9frYhMrsUNfVmNft8nJIXZbhYrVYXL9wXBctw+4RiEMVVI+haj77wRf/1b/888tt92d//MN/++f/5hiATAw11E4VF1x3+2y1ie2bOvbDdtFc+eXyOPYI1A/JMZSiqql2Tkvqu55sVByvV3FMQ58MwOU0Vs0sjwMA9jmHgAyEBilnNPORBCnnSUJKaGCTv+fJQEJNiiEJKKkxTbfGxJImU5Pz5rS+uytPD00L/+BPfvI/+l/9Lz770S8qqg53h/X7b3737/4DpP0//V//b6ura2VJw94OO2fkqiBp0HwoY3bOG0ZuaoeEzrVI0nXHg7j5pauXHEIgHB8+3P/uL5/u3h9323q2/OLnf1wvb2fXNyWPI9fMXuMSqxmXggYoI5dtHjq//AQ4CGLJRJDR1YoeyBELaEHnAByAQUkACMjgAgOQSRl3ctiBJRl657yasfeoBcCAvJmCZgUAUwKzktAySEYdLY+mRs47QM3lNDF3AZBl/2FIffPJn2JcKnkwBXKIzjSBjIQELqgREYn0WtTFOWhCLaSZmS0NpduiQNMuuGqUK4qhpIKeGYyg6NCNu0cUcb7GebTcD0M3rB/6/eNxs9k83IGiAozlmFJXocXouuG4e/yg5pqL62q5whBTMUeuCV4PXU7bJOCDA/Qh+CygUookBcdcnZqXYSBQ5cE0R9+KPZkd2OE8VsPhrqJG9jtbXJtmhSIle2pv6pdDJzb6Ks4k564fUB1Bw+QdeRNhyzXPhjGDD7tuPa8v6rpZH9dUXSoEjy6VXuKYci/iHDrRMYJTlVJSKYDNMmrOJSFK4zzLHIw8mwLV9S2LHfabsZe2vXi5/EH4MMu445LHfLy6/EE1u9a0Jyfv77+e1bOZt6Fbz65erndPayFkvHpxobtxMbv+8P5rXKxssLY92VojYfSxpFFFxpQMzBErqkhmioRc0kgIQIBqyN477vreh9qsqKkPcSL3EqKpFlUwKJKJPSjEENCKqBChRwegZgUUEcCkCHopdDwkUDaV9eNudtG+uL3IfXcxj3eP66+++vbq5c3q4rIM3W6/u75cltK9eHn1tD7sx2N/PAaQV1cr9vjv/vpXQ1ITXl1ffnb7gtGO43GzWZdSKuetSGOYcwJUAdRMMnT1vLlcXuz6MhazPDrnm6aeLVdlLOvtcTabxapGT2ORm5sXu0Nv5HzlbX8Modrt9+v9U1svDPSHP/xeHvZ9tzOwyle77WGzP267w9NuN6gowHjoHbNH1x3WFy19/tkLBvzq3bu6nd9cr9b7vUdKYmno+36Yzecia0QLoSqDPd4/wdu3n7y4/fH3bh7udm/ut+/2/c+///MQHjaHY1ENwVXOa8opSTbpD513aCqxiiI5TYOm0xQNkZ2RQ2ImOI4HMnWIs/kSHK/v7ky0atp27hTMsRu7QQBJAJnrZZ26tDs8qtzPVyst0B3HuiEK7mJ2k/KgRY7dMeUBFErOCUd2IZJv2pkW64/7djHz7IeUYmy9a3PqUUp/OOY8MhkxHvb72WzO3uWcDEENsWRVyaU453IeEIAdgRkR7fsDTMiOqZmJIhGpFEB0jsGBqZWUkTIQg+lxdxTR6CuxIghVVedSLA+azr3BSWtmapOCFicLUFMDQmdGhgDKSICntDWeINYpeWTClAD47LuBYIQ02R5NbAoknALFZGoaABRRCbOaAkz4lImIWRZFRAVjANXnYF5AfO6lvqOrmNyIEE70JnhupfHc3X+06sBnU5BnaGjSvz0reKZu7vywySAUkMyMDR0aBeeda6NfzJum0nEYyVXDKGNKDcDSuVkIfb8J5JypRzyOA5TkgKyIBzCkMdPj/rC4ub68uXzz5is5Hq+vr0Fos969+OSLtlnevfuwXt93fTe/uPrZL/7xcjH/j//+P/7ur99qf6grr1YRiJTa6+zq+rJtKyly7LqHx3ff/P63alQHd/vixfzimgG6/Wa93Timfbf99Msv3/zm7f03vw5x+f2/92fv3jy8f//05Y9/8g/+0/Iv/m//1z/5/LPLi6vDdl/FerO5X17OTMT7ZX94WzVtv+1k7NBkv3n67Ac/cHU15sE5P4+X94e1Sqnqtu9757Dy1dP2aUwdMY5pfHf/NlarxcVylHE1b3cPG2Sr65Bzfzj0/aEL4JH9olk50D2KSnU4dul4zKOuN/vVsr69Wr58cdkdUu6247GzQrevv1D3MB66sR/S9mkkufnkdj6bf/tXv1UiwRLn4Wf/4BfXV6s3b9795v3DGNAW8xfLCOQuw7K9WAULHx7f/fhHv/AusHPIHxv4c8s+KXPOvKAz++zkC3PGGs+leKag4TN2dEZ4ni2sz5ipAdAZgdLv8EJO9LcTqnTmIcGzQ83pAgY4AcN2Dl8jxFOWOpyT+xCViFytvAjV0jmfx2zoQ6hJdT8UQi8FmzoOKTM6KcU1NVKU3EfflGHHaJ65yym4QFAyFEUDcjMnLxq3rNyvP2yHEgPzsvHR0dh3PgQMst483t3fz6p2ebFCVGKrYrN9fNgcdoGqpnrRpw7JUiqW+FVdXfS7TBIJK49mdn/IDLTfPI60rZuwikumeLfb/O5powa/WIVo3bYbL5tGCmBwWsWxqlSrQ79HyVwVMMslD8d91baOqmq2LD6Ap7rx5eGx26xLzheXq2PlauTN0647DnXkUmT/+KFpop9VSpUdkTAUNV81cbGCLJKLIS9ffIa+hqSRXErCviGuLAkFD8gExUpW1RAbYjepjACe2WDPnJ8za+0jB43O9kTPsPd32GtTyMAzGRLPTnWTTZY9/6aPRDY8iZLOXLYJVpgyJ+07S+e5xr7DfgMAUBUEPnW/hghKBDZ5S0+vf8L64aMIzjQb8km6BOCcUyshBBFLWQC981XOhVCZmKCoQS6Z6GSwVQAYNBk4o85UiTPDYNaYRcCKJxTKaArpMaARvCu+YtPO1LhkXxV2EbSQgml2pmpqYDQJwgHBip5uFIo+FGJHrBR8iHXTxKphHw2JnGNHBBlTAknjYei6/njMOYuAysnxHgwNVCVnQhQAAiAwyUigqupDNFPggETIns14atFUVexMMEQzYA6qRZSYkImEWEoxFQTTokKFiVB1wmdsmouoghmCIp4kZzCVhwkYgSpOiMG0IKgBFECHKudJCQGd7asRwYCIPzLOnpPOTE9DGRETMVUiMgZEAqBJgyaSUVWtIEzwIE2snYmYb4aTOw0ZqAmCOgIQA1MElSKmqqY40dOYJw9vEXHeO+e9986HlNPY9TkPaFD5yA0ed/t+LDlntgw5B8JZ4Nt5ZZLuEHtAZpdKnv4QUVVCNGMkUWDCyU9qesE5FzA5AJSizjsDEgX477GKVBQNYojsY+61DKNvfVzWkg42SioFg0eUWPFy2ajL81l7sVo0s8YE7r79IH1q2zl6byBDSYRhuz2M+/UwHPr+qAW887EKbVtp0uMou+NBBFfXLy9Xt2MeCcyFgAhMXDdtyiMiLJrWDNOwD3WL3sRAyYthzsU7HzwzmSPIKSUZFpdXBD6PebO9k/EQycDpYf/uH/6n/3yzPa53jzrkqiIQIShMpev3x2E39MfDdq9DEnQMoBnyMEATkRgQlE4gpeSCyEQoqZiY917FSimAyOgRnagSs5ogIGMkdAqExo49h2joVACIcx5jFcF4TKUUCcQTWsrEHDz4gN5rMTQjFy2XnA+gRlpUMiogEXMAMLREqKDQuro7Pkg3EMJ2c6hnuLxaaRIcctevF9e3hu6w2YJQ7gcMZCZf/fZ9Tmnoh8XlpYDtH3br3bYK1Xa/x+DCvFEOu74voklGHwgdVlVLZP3hQL6q5nMX4+Yetw937NQ5n/q0fLF6+b3X3354/+X3X19eXRBR086++vrbfITVxaWvXLYSCMdcbl68GPpShEIdQDvJ2Xv23jHWx8M+DYN37IilqBiEhurKl3EoWfOYQMgzMSoQixQBBCNDFDCHjEyoJGKm6thNNK3n+QQhKxjjxAOECbtHoulutHNA8udffvpP//QfffZHfzp79Zkdu//vn//e5cPx7Ru18pOf/OLLP/vHRlXRbN3RiUip1CyVAkYK3s1XxjGN4qMnS/mw3R42/SjcvKpCFZDH9cP7d79++Opv908fXn72/T/6Z/+M5ysf54f9sLl/53wVq5mF1rfXp2k1jnn/gX1wy88kLiULe1LNmpXAg04uowo6CRfMVC0VjpEclW6raVP6jeUMgBRbN58TOUMCTZA6IlIj0ALsyTkRUfQQCcBMRpMBcU1lIBs0jyCAGExJ2VQtVCGn3fHrf12/+jEuvhBjsmlBnHZwAzMF8LGWbkQyYp/zmMdjdMZI47g1FRdqDL4gAnNJwuTByFIn49FyhypEFRjl/thtH9Zvv93ev3FB1vcPYzcScep2xEDAgtYN47E/WMlx3ri2GbM5LXU7wyJSeoeWRQ24zyVpZ7FijgoAkoa8rwxyLqlANgaA1oeCeZRtzjlBGvpcBxj6Eipom1UBGqQHyI5d16+Rg+RNhCiDd86Ti7mIc2HIBB4Btbd+HCRQg+ba9iWoDr1oIvYtcf2wfTeFgY9lPIxdrGZxNefU9YfHLMqRDGa9lb70o+YWo3eQ81FLZmuZG+/6KqXD8Q5IyNmsmaVSgPtq1hJAyYd+3KYh+9nSgMoepHNDtaeIw7EQuE3fzZzfDpswc7u0Dv42VM/+FKhF4PnkhUQuigyIJCpITM6lnEPwVVUNKfcpkaehdIEdAk9GU5P6SEFVhdhN7v3MbKbFtJRC5AEJwIpmh54dSxYwG0vfp8Ku6rujr8LF1UW3319fLh/vHw6Hw09//r1d0sf7PRy7qq5mq9XTk37z7l5HjcG3i2Y2m2/X69/85vfH/fjp60/LmPMw/OUv/7JImjVV5bz3nkE5oFmezn3MFFxIQ95v9lU9Zx+vLhaaj5rH47AfyuAlYEZLeNjmVz+8pCGTp6TZgS95kJJr75dNFcNNqJr9uuvWaxmHponcNuxjN/bDIJ7c1cWqL4+iEJsqMDpw0RMYOQ4I9vL6ejek3oxjEwydtzpwyhnB+iGJaKwsxnh5sXh4fHrzzVskF6uaWPpu/zd/+0sfPaEyQSk5l6Iq5NmrB6tER0MUUe8iGqac2bEWIcLAPIWXs/dVjBfLq8rTu3dv98eunVVN3ZRiWiQGpwIx1IYWQsiiw5CrWBfNrvF1rBOVhsiBjqMO3ZgKulC3F/MFqaSh746IpoDjmEUQlDiEcezMEQLmvlenQIzk2sUKEKRkdP7lJ1eb3QMQjCaEzrlQceyGnfduEp/IRP8XBRKzYhO1QQAAmLxjPjEfVEuWEEOIvoiqFgiBgIakAOYI94en+eyiclEAcxk+Ng1qCOaQAEDUGADBmBH1BIOqmYKK6qmnOQ+lmdAR6cljE2EyD0KU0yekAKLTUANFTc5Ns9nJ51oNxCZN2ynnF8wYTpawk1yHThwfeJYCfezX7dSv4LnJxmeRx0dThxPShGffomd5xrnRPnf8p3+nz5RgCp1gQvKgRDiP7rIO7NyYOnb+YrZ4elo75CjgGIOT5e0qeM9cpZK9pCCExMrETMdkH7aja+phkOHdHcDw6RcvDse0XCx//NM/fnv/7d3Tm/3+0DaXL7/49OrV7dd/8+t/86/+nG0IBPV1M2tbGeTm9WftxeXl5U3Jab/Zx7q6fnU7n39//fSEQP1+83D/4Zt3XwVyV5fXwTlTzUNeP6xvPlkxkaTyze9/fXV12426vr9fvvzk8+//5De//dXr1y+//Wr76vrlb/7wl19+/4chVI9Pb4l6SV0IlMdhNpv52NbVfDgMFPn6k0+//v1Xjqe/OmAV2rYtY+76Q9v4m9ubX/7lr1bzi6KOiGoXnx6exmOXhsFURbJKqdu64ma724rqcewQoZ3NshoVBUiHYXza7Tbd/pPrJYfomV6/fPHm/vHpcPBVWN8/rZomD33ZuZ19qC6vbq8vqKTD/uk3f/OHt/fvf/JHf7x6+VmXXCJvps63JRUymlW3h+PTrJ3VTQtIalNi83PrPM3AEQyI6UwqOul9Ts3/id2Gz7SNCXrEkxvRZDf7DDmeyXB/x4hmatqmOtXv8Nqef/PH2j499Qw8nKglz+V6rtTzi5n+FgKMyDOVEKp5kR3HCugUIYiUNZeAsQ7+mHIMtUjRXEDEVTTkXmA0wqzHKsxRaRyyFmLVRcszh/e7cZ85VG7RhMuLeky5jdGzWz88rddbAmxnraEeD4fZYnbY9NvtqFQpV4euO/RP0TUVh+L5Mrof8fyvPnRffXhSSIs2LmbN5WpmunRNVKaHh/6Xf/h6v99j9KvIF1YaTfPZEpUAfTVrO3MPhyG6yF2OWioy51AsFRnLvut2dHA+NouxoxjCxetPD9u71Pe79+9p1rarWU5Jx+Px2LeLRXc47NePF6uLwkH7zntfenFNjYCkmtI4v/nEz64KMiOBqEp2MRYV8k4cTnZxpQiSd1VjzOeIMzvLWu0sf4Uzn9HOGOR3yuL5y4+PONXKd9AcPJWFwXefeba8wvOzzxV5gvynGsaPWBQ+E5TOS+i5Nk2FnTcwAQUTAiZyxtOfYWpGAI6cITD7oiKaFYS8RyNiBkQWMS7E4lxMvqAWZmZiI2CUbAUAshZQ84hgKKKKZmYlSyIaHXaImahBSloqIBREMBAFE4clswQZLXs1daWYqnICojKJmglKUQVUAAVBdoYGDIjkyBt7IkIfzFWuan2syQcBBISCQqU41jKOQz8cDkPf5yzg2JMPnh0ysWMinFRg5FgRi5qVYogmZgoli4sRuTgfcWrjCAHIVFAVRFUMECbUCchPYhrznotoySWNkpOZSSnCDCjs4snsHCeaL56KihCRzM7Q9ukBikCTGOekYTQFh6co08mr/4xIn+FLmrZeID457oMSqkJBUENDMphSIc+aNQBBR0DE5gGA2CEhAE+L5YmodiYcO+/ISBkdowmLZAEoRdHQDH3winzSadKUOM3EzsWm0iJ1pSVLSiWXMRUi3G90GPMwaBlLlvHY69fv9/f7rj+99+CcM0NTYQDAKSnLEMAhAwCTQyBTmdy2EYmZY4zOe2RSPQV9nKCipg6liIiQgQ9h3ix63C4uA5rHzEigZkzOO2ha4Do0gRzl4+Pd4R48htmsZZFh864EJ7mksaQxYR6SDmNOgV3wziOWMZWCQxKE5mIx99VMpUTiKkaOLlYuMqeuIxIKnMvY9yN6bNrgArOP3pEJaM4UguQcXEhjz2w1VdiPh+2GQ5VKt5jPQrBf/uW/+uFnP9w+boecmJ13GKH03b47HMfucNxsUj7mklEtcBhHI0/sqlKGPIw+Rl/FY06mCsnIyMrkd0Bmk0vBZHPpUF2BBFOgPXpVRAhmikg43UMuELipJXEcGQismE5meh5QbRJdGgoQucikhI4CArs89loECAkV2auoI8pSVDOxM8FiKeXBh5COyUci9MMx+4r7Udvr1WhcVctmQZa7hMeHN2+eHu72O90+jaLF1eu69peXF7erpaVy2BwEIQwjMMUQyLkCYwhtOu4xl1CZklMcQ92o86GeXVyU/W53eT17/83bWF8GoO7D9hvFahaLprHPzvlM0g39mJNqWcxXHPzd050KHI5PmeaXL2/LcCBz4ziApRCdocVIJeF6u0PDkYZh7KvQMkIZs6kDxJITIBmCFAV2pRgSTQt8CLGMxUBFEWRyHxNEY+9U1ZABUCSRrwhRpCCwABhOByAAgP/j/+n/rF386v7uX/xf/h/5w92r6xe3FzPfvvzFf/Y/XSyuRMdh+wTjY3TeNZXQUVJGDOQaP18IRvLUtMXyIF0ac3GzFxevFrNmUfYf3vzVX6y/+QpSf/XZ9372p/8kHfrDZj++uycrzWzZLm/D7Sta3RQKpgWt13JgL6660XCtPqoqOkDMLL2RMlKWAuyIvRErTCNnZUeW7iUdtR9FAXnBixaIERySmY6WBwXkegFWQAqSMxMAJHZ6ms8wsAOYY1ySFklHG/dQBtSExgZIjEbG6FVK2d6F5qVShaamRqZaMqpC5U0yhOgQRJPlHYgEylCGlAZNEuLCfIRQA5DlgRGdJcuStmtQcLGi2DDguP1wfHh/WN9tP7wbkxyPw5i9j6t+tzHjykdETWN/2CfItliumotlNQvjIQFI2vbgcUg9VbPFYmYigKUvWRwLYgIgjNGBWBnScd8PwOSCzxxy6QuMg46DHthdjIWEIVur4IZun3Nezm6B3F3/NKurVfMiHcTFlY/VmPbk8rE/gg+GCRiEDRGRg0FtPnLp076f2admqFmvwg8zpO7wSN4Yghaz7jB0j1rSor1xs5fDMDggsTmgFOmLChKLxNn8xf2uu5xxdqNbkNK+K5L6Qi5XHuoY0jASYsLy5t0fLl9eu7hsZheFZSiP4MoAxyhscOmXi6ft06JtK+8Pu0fvTzYt3lEuqlpi8KY2lpJlKCXVoWHELCKA3gcwyEUAFMSIvSNg9gSYxx7Zi5XgPTmHgoikRFIECcn7UhTBEVDWQsBkHtAAKcQ62+iD80RPm/U4lIvLpXOwur7suv7tu8cQgo5FVBYXra/C09PT/utvDPHl9a33fjFzH968/ebb3/32m3cXq8sXl83Dw30e+tr7JngA9oAoVlUcnTrnRTJFp0D9OPQlqdLNZWNWjvlIqKnYcnlV0iAiKR1i07SLcEhpuWiT9kN/fPXycjQ9rvvZ7ErzmMYeAI6HzkDQLDgE0OGwU4chzK8W3gRCaBonfZb1+03l8GLZXlws6pa1S+l4rAN98uLq3dPjh26fjLJA8NTEWkAFrFgWs74fUiqBKeecUxmHgaKviA77+88vPxtLvdmPYHnIo2khJg6eA3ed2TgWVQItRUOMBgAOfIjex5KLr6oYmizl7und7rBtXLO6uEAyMQNRR+R8I6Ylj94HYleGfR2CD4ENiV3fH5hd3dQEsFhdDcduSFJKklFEMxEulpeKxlilcnTMaSxpKEw4lBIqx45KHooqAld17dghQH8cpKiq9zEu68V2/VR0HEScc2qSc0ZAds5MiZ13UV0BQOcCmIiIAJiUIgVUJ4L5mBMYVKEW0XEYAJzIyG2J7ezF7SeH/X5IfR2rqj4BpiaKBgg6jaTZAAFRVREZUEymCBsgJOTJUmhSjYlaVhFD0VO4/eR2KQJAIApGJtN8W+xEvrOTUS+dOiAknEgWJ1kCTvk4kyTn5KB08k9gmiDdUyd9toXF53/PXfszX+OsVzthQHZif3zUfTw3as9mMqeAtumIz0RE6Jhj4Mo7dG4xbxdV9IEIiBDG3C2WoYrzUrKYMFEZbUxW9FB0rBzevl4q8rf3u8fNcMyWgQ673lmPFblgT49rii0wv7n7oORff//HfSe3V9eP9+9++ef/r2H/0DiqqtpzbNvZ7SevXnz26eGYyUyzhRBXX6yykhnuj0Ns5455sVjVTaVSvvntrwnK1e2tYFjfP63f3g2rcH17Wc3q/fqd99TO2263aZqXP/7RH929+ZclCRCrYSh+e/ceovvtH37993/xT3b3b42QHKpoKWkxXz3dfXC1n7WO0S6vrqoqmkE9q5H57u07h+Ss+fbru2WzXM7mGCjn4eHth+5wrOoKg5eUTSFWTYjVfnd0wc1mjdl8e7+WkpaLWRoyo8zri6fH7WbbfcgHNFBIN7ev5qtayHWP9z/+/k2/fry5mpG3Yzp6mMUKZJfmq+oFXDxthj//F//NFz/8weLqer66ZIcwJG5YHa/3D6bp8y//2DlGz2Y8xVpPNYBAp4k7nFVogKigqnima/wdX6uT8+NUfM+d/wnbsb9DDDopMk/AzxmWgpNS49y1nS2QTq/nhP+cen843ThoZnTKW4MzV0lPEOgEIiP7Zjn6Nlbt/umB2DeL2fq4bReNJmurloCIGSMLKyOVYXBVhRZAhZBNhMDIEWbWAgJmKBcXrZjePfWR/O0CCXNKVs/mdV2tt+vt5lC6slrOPEgqY9uGx/vN02ZUg1DVJEzUxVB5rpiJOHVpqIpvA2Qo26R3adS77qLd1SFwFfcp9cdxKODZWrC2G1dtCAKacdN35NHsgDRcL1cmB4Cu77v9TghJioEWHzm0s5KHSNn5JvXHYUB2dP36k83dU86mdqznHqtmtymiJcSw2T7M+uuw+qyql4bsYoXkADRE1+d0+fIHrp6LECOSjoZGPrKPIsbIk++G9ANRYB+NHQBNy8gzxPd3cKBJ9usHXy4AAQAASURBVPUdMRmekEg8lyFNK9EzlHNatMzO0rLvEILgrGM7AQAn6zU4gQh2JmKaAeFJVIsnHItgCjP4TtWd6pXJASIjGyiYIBgRk5FYYWYib1aIHBmACU2KNGZRQwB2XnImQHbEjkqaTJStqCCCO4dxAZioIQjTSZNsk5JcoRAhq5jOHCEwm1MRVOBpf1KFnM1EEKogIqOPNTKrsiIKIQCLTRRZRETyzsXafASugJ0BkQvoG/KVc4Eds0MiBTmmlPuSh34cjuNQxCi4EENofIzeMzkmQueYpozO08Zx8qcyE9WCoJKAWETVaYFQATpgQsesAKKUi5203dNa4ydAWTEp+bPgsYCZqYAhSJ52xsniD0HRDO2U7HXCoUw+rhUmJz4wArCAAZqAISKfnIZOqtXnFQwBGYgBEQhQBU1NC2oxUERECkBsOM2T1MCQyIHBNAElImJDmsCpkx266CRrUxE1JQNDduRMVIUFszo/6eKQI9HEWbPJFAlUDFBNzByRQ8nGHVYlj8Pg6t7bE+v9Y3k6yN26fzwc3+7z9iijgCKqFO/YEYoBTlo5UAV0zM/EuySJ0BFRkeKc80QIyDSFyOHfgYpyLojoHDMpQio6tq2W7rGdeyILVUDQMpZY+cAQ2cswduPgyDMwOzAr/XFdJCE7MpVcfPBJ0zCMhr6azWbzxZgzIhrSbDkLVds285wzIdZVs1q2YVE9bd8ZQqygCBtRt+tEsIlViMFUmCMgKSr5SIjeoWNCktmsLmM57h5ny1tiR72w6Le/+9sXi9ft/HIc957MM5U+pzyksc/DCFqsFFDU0cqYCEoW1S5bKbkoIkgo4ziya7vDiAWIBBFKGRxHVbM8AhBzNAMwYucmihYhATMyknPkg/MRiSdBNU7cOKJiAnkwLcwesDCxd46ZER0gM7Miaim5pODIORKz4MOQM2PUnJJm8pwGZSDpei2JnRXVrusZQ2GUpPXlsk/5op2H2TWIZy3v336z3z4gSvDV7cvVzY2/f3xXJFWRj4fdhAwHtmyQhh4Qh90xlfHmZuFIlUeN4l0dohMDFNnf3znyN7evP7z9Cpxc3qz22+4HP7m9+/BmGNLxkIvpOKQXLy9StzkeusXV5bHjzT4t5vXQDXWoFk3bbfab6DzbclY570vq1UMqaShGhE3bmmpRqaqKiZwnYM4KRFiEJ5krMhgCEwHQ2I+OGNgq56RInwrA5JR2gnNFFQgMjZBAT2cfQkQEVX2mW//+P/zVf/jX/92uDNc3nyx+9Pkc5m0z+8Hf++lidkVjl49ry0/zlo6bx+Pm0CxuYzUXCKmwukrVORlSetjffRjEcbhY1JcOdf37//jmL/8VWvnBH/0kzlcZqrzrZMo/my1cDHF54S4+LdQqMTMwFkkF/RxCLVYre8PCpKhZuw4BgFlzImbVbKggypgx78r+ASxrGikuqJqxn6uhaUEoJqOWwkymOhFFCRQkaylTQDZIQhNmb+RUzbgBRCPGOCMOaNlyZyVL3yGSjArkYlWlYZu3b2j+2gCtKGABzYAIpshUxs4kS0mqU8pSknEnaSAXpCQKjZqAZk8GaVCZVMeKXJtrrO+7w2PZP1jpTI8+SJ/TMKYxl1S2Ohyck1Q6MO2HQSG0zTLEpj/mw/a95sShcrH29ayO81G5ZERgkQwKGYXbeNxu5zMW1TEPdfS7w1OWRoEehi3BGIPTgpEuCGdjWoMKW5IhRWo8oY3FUKMLKiVrqUNb9DCklHKvNqBO8vAC6ubVjAp41aGs09glsOjmVnpHPtTerKRhizAAgJQ8r1e1c8A3IsiuIoYBcxp3VTPrB8eOVLOzZta+KImvm3YRuwM5ia4kBUkaYbma79bv86BoDZB04zrbtu/ZBlwubisNWVwI7Re3F4enrYwBRh5LOiZNvUaapVye9wI1cOwAgAijd4jERGYqhmZYhaofeufYwExkFmfb49b5qCWJAfuJ+Yhqhiqq5onMLLioWrpxgJO7C5jKZAbHZFmKEpoZkzk0z9xeNBSsrWPqhjdfvVs0q+VV8/Dhze+/fVxdrrwqSFrMmlDVY0q7ff/h/bYcd3l7/ORyCQTj/tgERF8RIBCQQXBE3hfL6XCMYRZCYKQYKjI49F2WdOx0VtWvV6thKKu2ORw2Xbf3TKvl/DCMJe1nVTtud+WY5ZgdUzur/epyXl1/9ZtfgokjSykjIDrnjNqqYQ99ORiO1xfN2Hfj8bAk/+Jy8bqpYqAQYyql4qqwgmvMhMC/un59c/XJ/vD4/u5xVOn6fiy5iYGYkAhVShpjFesm9n0GorvH7Ww2W8yXx31X03JbhhCjelVyKqJmjn0dWwuVqXT93kc/pqEKNSCgQsmZia2U9eGDmhDxZTv37M1k7MQ5Dp6Jqe+PqoakwzD6EKpQDUOX0ohu8suGxeLi2O2HNGrN5OtF7dQyFjnsdwqahr5IQSfeEZhv6rqqSEXUcklDgbGqawaa2kCRTGiLxptCFWNK3TAOEQEITSHnDGAlJUBwzKWoogwl2ykN7TS8ViQpBRlDcFpkOpFOFqOIzjkAAxPpu/049pc3r1eLm932g5YidDoROQJCVEAVZaJJgKZMhDSqThCRIWaxCWRPefJLUDWdWg2bWBLncalOvgeMiOBs0khPHZGdu9zTsPyUqIJAp5G1ApxdKGEaUp2aoBND44wOTQSMiVAEZ94QPE/V8dw4/R2BxWnCj8+PwjNTY7K9RARAZmIiJiZE732IPvgQYghErqmqqkJVQ51VrUh/c/lJt98goXfcKzjEnPuxG4OH6+VqtZh9/e7D+013GEo2LKYiUnsKjpoqAujQKxEPvHzx4sdXF4uH999GSL//1b9dP36omOYvrvtOvvf979+8eNG2s2KARJdXq/miVpXUHcBURs1lLIeS8+i9I6b9I7Lz8+XiuH149/VXN68+u3pxs70jyfnd79/+0Z99OvTDfr12jk2p3w/zi+sXL14/3N3NZ/PDcc2O7t6/u/nep5eL5eG4Qxe3T08lqw+0WC0r7/eb9ReX3xu3+8AcYwTirADGT/cPWkqYNfs8cqiXixbTcP/ufX/YpDHVTZ1LEbWqaQCYkEopl5dLonLYr010Fn1hKpKOaVdScuTauoqh6YfRVIPxu99/FZtqcbP0onnoZq7pDnlxzaoDItSz+fzq5m//8i/bqi6tYjd8/Te/js3bP/vn/9nV1QussqbUXs5XwMHVrmqMANUc83OLfG6TDc/1CdPy/ZHdgeeKwnODfia3PWOgz/SeZ1rJmYv03Nbjx6o0OPf9J0EJnLHQ77wkPLf60zf0LLY8F/xJYntCASbAgR3G2tUL6kI1n6VhBHRIWM+q1BcOnms/plGRKAQGKZqATLVoKUhOIYc6KIEqdqOZ2dUiLpr41d2D88Hj1G1rjN6c23TD7jikQWsf0jjUDRHBtw+bw67k7KLnhiiPu1BDXS2zYoGxqt2M4CBakcwibTq1wMXR+y7BfkTcI2Ht8XpZ8zg04/Dz21mw/NDbQy9FoehYvf/QcKqbrRpJTkSUREApCxAjwuhcfzHj49Dd3lw0y0vBujsMH4a7drZq4rx7eqd2ZE9N26aUmJxk3H24v158XtULFUmi89kMDVXRh8Xs8lVGFiPPZIOqaGyDEdk0EzIEUwEN9dKY7cyInECe85v4HUxnKpiP1QPfKZ1nc6tn6e2ZGanPyPYz/HT+6UezNfzOV+eyeXb0P0t4DWHSM00vyT5eBQCA3USYEETSE/hoznkzRQAQAgQxccwqBU4xl0DTgIFOSAR776soILE4zVxGOkmazaahw3RJnbIr1QgBiMDAERGpoWbT0dSVgpad5sBIBIEcT6HnYGKWi4D2MUQDBnLg8MQNITAiYiYOFCoOjkMtPliokRzYBH84ZFIwQzFCAE3DsT/0aSilFAN0seFQh9CE0Hj2ofIuusnpgwCccyflFwLYJLcyRNITmCOmpqU4M7bAxIAO2E1AHhaRnI0MphgHYAQAYiuZTSYXaNOiokpwBoXRVE4TElMkBsLJShwITU8bpk3pECpgqKcdDUAFECcDNIBTmvmz3hHxeW4zgZFyApuey4imhEcDVDMjZgOYYriJTjF0agCTpNDMVJQJTEENEehkIWUGhoyO4mRlM0GHkx+/mQIqI4LqBDoBkIgyezZGEhnFB+eUao4OGrZc8vBuLU/d8alPu1TCFO2ECAZlctBGJCKTgohMDKY6UbBOmbFQeUegpqJ64qAVOf3Jp4MROYohxIDEkEvvGeuavZ/72jlGHQdiaxdxzFkT7LsjGpOpcyyaQ3DHgZjQB2eAUkqsQu7TOEw9W8NV0xmGZhmaWd1USEyOTbViWl0tmjpWDvd5/cmnV4HdN3/zdTqmbAzCjni1WAKSDyFMZBAzR85xBFI11WIy6PFwYOek9N1x8M6lIXXrx1d/7+8fD/uS9sx13u+GY3c4HFDEk3VddxyGnMY8pNIPRUbyHjSrZGTvqmpMfSiTeMxGUZ387cGDGhqpoHPeChhkcwWR0JSI1YqiBR+J2YfITMTeVxXmUy2TZ8nFOQ888eIzYiD0ZsKM7H03bhxhJ5JUfPD1fNUfntSyIxYpABDDYijiw2UqAHkDolISsDoMQ1/I1T5WYjiPvHv7B+Q3TKHb7rIqusDBu5w268ckKE5ddIjm2R37EaXUzkcXgvfHvsfCNbXbxz4nqerKX9djfyxSiNihaNakw8PGfvQnf/T73/12WbcPw3p1OTPG3bZftvVhm3bbsQ3dvKmD0zpyTqBJt+v+xe187A8QfNOGMiRwfttvfB2qZuZC1LEDlXwcVEoZFR35pjKzNBYkx2YmambsvRgkGZn8tCh74pxl6Ls6zJkcYTkz/sAAVJXY0xQLD0yGYsLIaoZqiChyotj9i//6X3TvHl5+9oPZ4saHeXfIf/xn/+iTz763ffs3uze/ub26OOwf9m8f56vLZvUp86wIg6+CJ2IF2JfdhzGpX302v7iN7Mvmzdtf/rf3v/nVFz//xcX3fobODYcDxbppLzUP1WwuakBBfSXNlSTFsXNVKGnkeqWuFiYFAlOWAVVJsmJRjOwaAyXnMA+QeyidpiNIctU8DQCLClyNpqoZco+a8QTBROTAlTOZZBMjcCCOU6ThxKWc9maCBNJNTq2gYCrIaC4Ce6YKlSz1oElSxpzL47cBgGafiCh5BBQdejAgjppHyD2rMGPudgSDjgciVJ0AVdPjo5VRnbdBVMi4Yr8oano4lMMTytBvdw/v3u4Od8fjfiwg0DQXK9s/HQ6qBofDXiXXYcl+johPu51jaOatq2YYq2p1ka2UnMCFtrrph31Vsfd+m8s4yMurl+vN19nKcDguL1/MYGYwY3BjWnOMaX90hkAcOOVRoqsZcDh2bdvGUtdhnrUMJgJ5vb9b1TeAYhkVvco4ay8d4dhviWop9VgoGwGAWnbeD7pL+Vjzha9m/XEr0ofgurSufF2BaV+tLr88ipCtD7t7RKk8qHQVISMWwwAGqcx4cVnf5PJm2HSDd4w8C65oll7Ya3/YLtoVVoU0URjBjoGXufRVM1PJXdpydmDx85sflu6hmbVJtv3QFc/N4uLjUZyIECQX551np1LAlF3IY1KDouCCx2kmY3nfH5AdKjJT0YKGRDTRbFMeUIHJVy6aYT8mJHQhGAqA1RyyFEAyI8aQxgMjeLT+cPARivUvlquk5Zs3H2btQop88+b9cBheLG8lG3NZXV+mlLrjcb5cHved9sWKY2ZHKEC+DiYGIDYdnooUA8eOAaOrnXfkOOXiuJhIE2LUaEX2u57JsQlliAAaHIGm/YYMhsdxsboaH9NhL7PVDYM2kR8O63dvP5gdYsDASAaxCpvNoTcExXngvu8S2uXl9Ww2X9YNhziIhfhiPAyOMCZrqplvSUQBsT/sx+EYWdvLVRvCtuvuHrcyJukHQdiXtJgtzGwcx8VykfJ+PpsTuXfv7i9fzEDkMOwcU04SQ53GHpEC89j3ZugoiqjCoUhhcv0weO/YsZRCjMPQi6rz0YfYRC+iBhAqZ4DHYXRT4qcVIo7BF6MhZRVz3nkfGMOQYHsYGHg+u9mNfc65ic6hiYivo3Fo9KIbNsPQFaVRs/cVAsWqqlydiJC5qEJJhNyNfXQu6ziYkItjSk0TOBCajankMYEJe3bOTX6QCABARbKZEZNYKdlwGnIiSJE8HeNMp3CUMffe11U1y6l3M29mXTc8PL6ZzVYvbm+Goev78XQLBKdCBmDmkqmYZcNJtKCGBqZFdSIdqeB51o3TcBINbXLfhImCLGLPqhs421ozngCeyRNYzQBMFIgQCeU7zc2ZZ3Fi1k9s/amnevbfOBE2zm3yMy/jYx/1bP773KedWzY8S4hO5n7n5xIiIhGRcxy8cz6EGGJVxSqGEGPwjETeEUAEZUmbw0Y0A2ZJ1h2f5tVFaGaP+3tOeVb716+uD4f9r75687TuptG0FLEkTWDnwTuugt+Mo1L8/pc//cHPfq46/of/z/8dSgaQ7rCv66qdz1xs//h/8Meegox5KOyrwMhm/HR/9MGJcToMksYqQhkOZdxSaFzdMEi/H4yYgIdxeHz/7erl6+XVfBxEXPObX/3Nj372Jx8+fNvvd57jYz9cv3715Y9/8ub/+Yfg2EfywXfdAXJi6x3su3FwmJvoLIbV5e2x68zAMRtKrDxo7vpBjZuqa6p6VCR2TR2YUHL/7uuvpe8Ycda0AuYczaoY6zal4tiN3XG3XQ/DHhAsFwbcH7umahbtsvJN3205kpiBwmY3XF7M69rf36+PX+/ndbi8Wn34wxvHkNrwxcvbr7/9Bl++ju3nP/mn//mbr3579/DLn/2jn7758LjfD3/9y3/3j+b/7OrmNTpmB+QCswckE3HMYPQxhvyk5DjN1c7FCydN2QnzQTsJ1gyfKwwBpmi/U30CnClqp3I0A6RzNp+dO/rnn9OpJCeo9fmZzyjnMwrwTGv6TqWfPz/fQUgAUydZNbOrYVtRsfn8WnP2gE1VO7BQzQKzmGo2A2eIiOI8W84gzlEcyiG4kAy6sWREZqyDG3IZs3nHqLTthqbiWq3k/mlzIOE6xNQP87lnpIen/eaQ8kgUXCm5bEfEMgdPOBbClMerxetR1uOw9+x//sNX4dvH3zzs2Tv2TooAQAwUHedjfx3g08YvbADgUWnbg3MESJuuD2yuFzOFiflCru9GY88OvFlA23fFVya6vRBdvnhVzWbH/W63eb+8scX19fZpBB2c91nQ+8qGfv/42N6u29WXORUugIIO/NgNs+UtVnObcpPMSsnEnjiAESAwezTTImoQm3Yybpmsfe3vcsQ+vuPfoUzC6W0/A43P6PZH9AdOQVfPnEk421iflsUzTG4ApvBdIBw+Ak1nrOqZ3fQRlMQJtDld3kSVDF10AAAKpz0FldHUNHgEIJmgfpxmXtMZPLsQkJwaIaszp+qlUPHETMjkQ8y5kIgWsylRHmHy71Ywxel/CKiCACBjARrRCAnNNQhg6qlER0TM7FTNMZpm7xgmJg0SolNi9AFCQOedr70LSM57QgIhFEhmqoampmCoyoSEKqMkkXHshz6rKDnvQhPqGftYx8r5EEP03pMj5x07P0m5pn3DwBDUSjm/AxNzR2FS5eSRbHL3q4zR2CF5wwm6KGAKWogcsEMgQGQQ1JJNdJJIKYAI0knDNqFFCECgcHLqwwmjwdPgBECn4DA8o0s2wSVTavNZ+XZexKbEbDNQQUBQAclgYqYEAEgGiqfrTgZWPN0IeKLV8uSXNI2mJvMpQEIUUDQQQDYEw+flUU0mbQyKJIcomhGyqcGUDWKGSCYCYI78hO6Q896CgoKPuYyz6N3FLKXucRu+eVApiac5rSpNLisT7IiISMxBzFQV0NTEEROTmamqoYUY6jq6ENkFREY64bNnryKDlIUZQaWq29lspjIScfAIPCwvbzhAU1VAFBmHoXd4UsKLlRgcMQFaYO6PRx8dAnddml+GUNU+NmoWZrMYKwGsPatq3dQi4EIwNiC8f3hq5xUU+qtf/a1TVxQPxyOxRiZ06EIgwJLFRfc8IctyrJari8Xt/sNbRprP6r5L43E3v7p6d//25SffMyx56C0L+iRj2m+fBFilfPv1u2HYmSqqoCgUBfaIAUlVRNVSyp6DZth2GyiTKpJyzkzEwFlGsALkA/usObhoYDkfHXtyHomBAEmJwXkGIiIUE0NQMwYiDkRR0ZDBQJCYyBsUQ0TQEMOYjgZQEAtFrpj6ToaRnQNjNibwgahPB+9iQS7A5No+l1KQfdONYyCWu3cwVCWnWAUX4m7bA/uUQcmcISOBauqlaJnVPgRi4KI2CPTdkQKBZMhcVXNikKK59FnGfAQ97GZtUzkNsY4hPu2eYn1zuXq1f9gShPX9/vXr2+Pu281mz2G2WDa7w0EHqDznpx1RFBNU7PqOCPd9t5g3oOaQRHLaD0nUV6JMzgeoGcgB9UmkgDFYBmU0JFIV731WM0DvAgCMKamp4wBgnquSy2hJEYhIpBjSZOyvZp4cqBkIMpuYABvIaVB73rqywquf/Oz6+hYV/vpf/+v/w//uf//Z5eLpt/9+v7tfXC0+rDfDMdXVLbVzrhbDUdHHGFqyd3n7++G4NVzQ5S+MAg7d7u5Xb375X9ez6k//i/+CFp/mEXPqmQNJMTRkOu72vmqQQNFkEFOr2kpFoVoJV3ZaGoQQmNEUpGRAY5pynjOW3soB0qGMPcbWtS/7w0A+svego+WBsJjsiQOyo7BURRMz0fOEzSPTpKRFFUADndybyGQEUFRBIi0ZLFnONo3bmYAIRAEVmRmdaifr35EUrFZazEoPMiChGUh/BOlQRhWEcasgaEkLcXNtpvnwYDmBZqWoWTksAANwRdLl4yNZp6U7HB7WT++LJVANvnr52Re/+eu/2j/cBxV2yFBFFxftDL3LksmFZjGLzRx94GpmVeUIISfNsM9jEmkWFyNhRbpfPxQqfYHYRhLsCl+9/GLclLv1HyB27dWn3ThIt54vlszYzprN9k4zOHQu46yeO+BNtwOfncemWhlJHg7L+WtwcewPYGImsaoQKrLosGrbxZg/jId+wEPlax9rBdp3KRcbskVEh80Prn827vbXV19+2NwXO4bZBGwGkeE47JEBx7xqrjQ5F+cKftM9DmW76zZQX69mVxxn1t2X0nfjffCVyWFYdy6Ez17/lBj3T7CHLpVCrO18eTh2zfLT1AMP4hd2t3u7vHgxdqlpq/MBDkou6BiZDWjMoqJiQmQ0ue47NoCSypgSM+UyAri68QBKwGaixkXFtDARIKSSgg/MxI6RcEqSKiUBIRCNuXNYTU1BiLPHuw+SrFk07ElEttvdrG1ibNHGVPj26lYpPtw/ff7Jp/1hFwPeXi/vPjy0DVfh4vH9Y9XMGNQQ5/MLQjp0eyTQbCY65kIxMtIicinFOXR1HLOgJwMrY3FITCBjcp722y2HGH1wgP2hY6KqCem4A03kakQxke37O8gDp3FW+e7Qqxppyb1Gx4AElruSJkT+sHm0sQTnZpcrji35qr1sak+IpKCSSlVXsYpVHcexZtPdw5Mnv1wsiH1ThzSm3XFL5HOWLGqo5X7dVPV2e7i8WNXz9qu3Dz/5wWv/BA+7TpNJTqUIIQJqXTWpZOKqaVvZ5d3TW++jI0RQkaIGx2NPBFUVisDYjzKOpQg7RiI1ZnZFjRCbqk2pM6RJVDmbzZ3nw7E3R8t2PpSOoleyqqoCQuVAx9HFSISbXS8IalI3jap45sBV13VpPKbBfPBMvqmanA4pjSESMpYuD+MRcQSAbj8SI3v2nrSAAooUmTiSACFUiKg6qc4UDdgz4Sl90TueTvop5RCCFHEuqMh+t3HBe2JCvrq42h8OJZWnp41n7/1JgHZ/LGBsoKd4EsDJwnOyrCYCOUtomKfO4FmeM5F9QHWSVPz3FTRTy6IndyH7brsEhFODcuLdnc+SUz+kZ6eDsymHmZ18hk7sixNGBABASGaK8JFs8VF4dmZ1fLe9Pn8YAiBO53BwzvkJIoohxFjVVYixqpsQQvQheoeESKhFvRU9lG44kqvGQoThanmz3mx3hz3V+PkXnweUt+/frNd7Nnd7tegzPhw6MLhezRjhOAwo8LDeUltdf/LyYtn84a//Yn3/dj5vgFzXDbEJL25fUAyr69e56KZ7qpDR+8Xqqm5m8+UVMWTAfuhIFYb9/umrdr5QWY396EPtGH0rzkfvgmj+6tf/wW8anS2RwuLqdrN++OqbX19cXqY8tG2bjuNut+E6LldXKR0R/NXl6u3X74l8yvr0sB6Ow2q12O47djxbzD48fJhfXPRp7IYe2RezojnWAZC6sStjXjbzponDcfvh269y17XzlpCHfuj78fLqBhGP262adFlySlK0jm27uJSSGeT2Ja63DzRgPQvLRdzuNk0zk8txcewfHh/yMIpoAhy6vB3uXiyWq9Wy7w6lT69XF2BereCs+fxnfzZvL3/91//mBz//0d2bb779Zv3Lv/iLf/if/0/a5S0yAyACqwgRw+SncW6YT5azhAZ6Khw9e82cCUKnMj3X2KkADW0ylP6uWzGgwdkGafoVH2lIH63WPyICE/h0vgf+/3zYCRud7rQTbnDGUQHQzt0+TCwPJK5nvm6RUKwggfcBLRjobLk6HO8qV1ExUDUSIEMmHUezAqYMhuxrDAdLyaRt3axyHx62ok6nmYSqb6o8DiXr2Asz9eMw91zH6mHdPa5746DIx26oPKCZJ8fG46HX4AhdfziUw9GOQqVyA342q9NYfvdw9JWvGMdRSPLFvL6aN5eQKy2ScC2CiIKkogamiIeMZVADTIBFtYBgMcGcxGaeK4JVpJiBdBiLFaXL61ezps0DHDZ3pZkvL29292+yiQIO2XxwKv1+/aFZfR7buofRUMmxcgjLF+rclNsNODlloq9qAQTPiAil5Dy4KpqjCas7gUHPq+R5eXvu0RGmxCUAmKzOTwvsd+rnzGb7OAjGSSJ0As+/a+M/UdWeMxunS330bDM4+7ydyExnguf5AR8/cMq+RM05MzN7JkQgBwSEDDqBqCSmRQoSEnrQCfdRkUzkibAAGDESEwXnhEPmUEjUew9mqsqCgGSGTGfmHqAZMHlEIEfBo0NlEDI0daYMWABJJ2yOjGAyRXaTzMwRIzsl9tXMqoaqWmNgdAxTQJsUzWnshShlEHDkIvnoqWL0UkrK+bA/pFQUyDtfxYpDQ+QdnWYcE9HUOR98JOdwMjw677lgk3ReVWVi0EIpWsoUQA1FICUD4DAZQ7M5RmRKpYhOboAIiMhIAETsWIszyQZqCkqIIogEKmCgWiawA8DB5EAJiDRZ0sL0MkzyaVnQaXukk9W1GggACiLZxCeYxjdWThulCpggqsGJLQUT5UdFQYFOpkdwxmKA0E6BeNPAZkJ8CoKqyYlFNgnT4MSVNgRgQgYmbyYopFKmZUdN8aPtmyIpoB9TdmZUjMlTpIAGPWvSeTW7Ws5XlWsYACCJTlPbrEJnzpQYmukEohmoI1a1Z2UwgDl2zgcOEdipYcl/16sIwNSkCKHhYUjSH02I2MoI1UxyEEO0GftYAVgVIzNQxd5xGodARMQ5SSnFzYKhZRFs2IXo61oFkD0RqGQALJKZMXdHURxLryXmfadSumP35ptvhi7Fan7sjn1Ky0VTzSo18D6IiBowOufJjHXsFzerT29e/fZv/5LVaykDazGpZtHVpeQuuldlinyS0u03aexcyHnfbe42OuSoCIIADsgy4ZgNcpn8VooKEnaSHBJ4YMacOqQggjmb92KARFVWUxvYxwk9JQ6AFSObc+w80pQ66F1VO+8tKXvvNRqoqRoZEpMjpMDoAMC5QL7hEAkPCIaEVQhk4LBaLj/ZyAfnAdBSPgYMAGLSiRUFUaCcoe9GFVYTrjxD7rvByr6pfIFhvx85tDLk7WPpdZhVvJhX0aNHZ1aNw7jfHwC1bZxYqoP3jguaGI/DPizrZtnMaxwPjzHOiclxLLmzyfBfod/v+xGwapob7N5tvvfjV++++QM19WE4vPzy8vdf7W5WF2MuT9ttG+aOidnt98cYXRY5dgU0H+24mAcXKA+d5q5IcS6QD03FiXwQBzoCYFWFpFD5ml176AbJaRzSNHUldlK06BShpEA8hd9M5xNVVVEkMjFv5s5eeAATKxBzKY7p+Rzf7WS8bOPq5duv1//lf/m/+fzHf7x789e//9W//ezzz371l7/WcbxcXdvsBuKrbixVUztU6X97ePidi024/mfq5+Sq8vjNu7/5f+fj5ouf/SK8/BzjZT9kzamuWh+jHt4RoA7ZUlQCv2jcbJWTUu3MRqpmatEko2aTjL5GM5BBUkdUYbgyFNQR0rakPYEXqCFeGHNOUlcBtcjunUlPishRoYJwoeTVAjIYFtV+gs8194AOEEwEDMh7QFER1UwIoGTICGjkIHgtI+Ng+YAAAAl0NCRLA3tHpDLubfstoxn70u9BR6djgZ2VQpg0bUWTjgMSShmQmeJS8x7GziQzsZlIUXYVV5rSwbqds6HbfNg+3O+fHsxKPb9u2jpG//br3+XtBzY1ZB+rULegGZ0CD/MYXai4rqhtgKORH7MJsYury+Wy6+/7Q9kO3Wg6W129aF52SZ52NHSDgm3GfVi2JQrEKKk77o5AXLUzyeV4fAINzrNfXo27FFwbo1/vHslRP3SzMENqSsFl/SrawqxTV4gsVLzrNl4YVdpYydAlyVV9cRjWbX0p0kcfjjICtc3lxfqwu2wXg0BXpMq9wRhiQY+YtIq+H1DMCCofqnUuNUVEjV5TPmy6tZ8vgfApDavmtp2tdsf15fLT405rf0t2OA57U/eQH+rmolDeSbqYL4N3dYZmdhVSMG77/baMOlBv4reb9XQXqBkyG0FwsRQRLYjkXSVSEAkApSgzIxgThOBTmRwZe0DwLkop2bKigiqhExEALTICBkMDFRUFAyZWExDw3JqqoaDzfUlJraqa4AN42z6tIWlbuzQexv5wvVpIKV3qXr++fnxcWy5N7e/ePTJhrPwgvLpeYpl5Z4dDr6piAhDqppn87ME5JjrsjgbqQxDJRcSTVYHMMDbtutvMvPcI5AAVFXMakiISKlCJIQ4py5hJJQ/smcoxiRQ2i95T5URKdC4NCTUTQx5VCJmZTNhARfrj3iAjbet2Np/Nq2alaLFtATiPeRyzqVXVHKSsrigc95vNDkZdVBXNFs7jej9IEWY059fbdRNDO4uHblPPF7eXl/1xCLHRMsSqPgyDEQwpVRTJIaPLmk16H2qOlZmpigIZpKIYvJOcRFTVRCbQw1ABzHxw3od+6BFNUZwPRjxrL0ramxmYzmetc3EcMyONqc9pJBfZRS1Qshy7o/MOXDx0u4A6jmOs4pgGxZHZIzOoliKlHIuMYOBCXcTyuHeEwQUxYA6YUzf0yDTFaROTmpFjRpdyKpIBpwktEtHQD9OUD1mJHBGZQRGNdTu5rjJHEy2lSDbQYga+bhcXV2bkCHcP6+bilAM4qhKSgqkB4/MEW6cTFihO53kEUFU0mwBQOE1QP5IfzsOIs3c0PE8mTh5G59HjtDnhc/eMBkjPQT+npxLA87nu5Lv6TAk68zKefatPl7bzd6cT53SR88iNvhNbbgCEhIjMzI598N77EGOMsarrqqrquq6rGKs6hBCDPymVCCWN5bDTyjdV1R/77fD2cjVDsLaFL15/tmxmd/fvfv3172dV+6NPPrl/2u/yuO2zASDSmMuYh8vlcttliP6L7//k088++fr3v92vn6IPpUjXdW07+/zTz7tRy2AAjef4+ra+aGqCnNO6e3j38PW/VRXHjkNdUuccs/PZFMBcPZMihK5aLNiFkgRd88lnP9hu78li6g6Dl9nlvN9t0rFHhDQmhrK7/1C3y8V8fvfhsZMcarff7E29M93eP8W2UbXKN+jD8qr5+ve/fvHjnzuV3aYslotAXLxeXCzGMQHB5cuXHmA4br763a8d4GKxGHLOeWg4zC8u+9LlIUMunv2sqbUOBJbHvN88iiqqFslAoMb73T4g1vViSAOKRO8+/eTF2z+8iZd+zHq33Ycq7sfx4elxNoP1dnhx+3I4bP1qJt0OZqvXP/05za7vHr7+yT/8HzeL3xx2d2+++vWf/SffK7kQE04hTaITOvSs9MGTAMMmH3ckBERUmMrwmcB2crTQCXN8VglN3IwJeprar+fqt7NSUp/9RJ7ZSGf5GcD0oDMb7/Qrn5lEZy7J9PRz2494gojsI9YwmWpP9yvWvrqsFitwNl9cHI8HK8ChDb4y01Jk1s7QhIi9q4goyUhMeewAhY1UMsAYEBaxAtBhSIg1M41ZPDFCUNDj7rGtLkpRNKljHId+ux8UQj/aIZcx5SZiJHKI2SQ47g5jVdVkD7Oquq6q94/l/uHNoevn6n648NsuExI7iMwxZS1jZhgRRVEEAKxYMRUBBtMsOnEdBwVkZMYaULICwuOoCLQv/qohGHLaFLMNIc8WSyYgtW7z4KvWzy7SdoNWxrFvLxvIoRyeSv9QLb6gZExYRq2qFTdzZAIFz4xSTNRRhcgANklGSDWnFJoFsbNTA20ABsin1ew7dXJ20f8ung5n1Mf+DtYIZ3zwTFUDNDtlwH/XzdwA6LQsmp7q6SNsCB8Ja99VsMG5Xuxc+gAAUNVtKllEBAxMnbFMXYahogKYmBAQaDEQMKdGYBIIRVU1KxlzYI4GBdjUmwGq/P+4+rMmS5IsPRA7m6racu/1JTwicqutu9BdQIOYIWUAcCAcIZ/4l+eBDxSIzFBGGk0AjQZ7Q1V1V2VlZmzufjfbVPWcwwe71yMbkiIeEZ7u5tevqZnp951vUauqRS1kN3UlhwBV1dxdgQANAUFXaxUzRUmBN6yRJTmIanSI7uTOAI5aTRlQ3QAgG7iquyEjp6ScAqdIAQAQCiqUvMylTrnOVRcHAyKJscMmiKDP83lcliWvmm+RtoupJWkZAziauq/FEmAAzmsL4RrpEQIQA/PlXHlFd3a1qmCKol6Kg2o1XNuEqjouBKtcg4EE2NjrGtODWn19FwAISVYJhhu4rSZHAF8xHq23IP7MEa83IgfAtXTC6yVkGAwUwcWhgAOygRG6AzFcgkjMEQEVLzFDgOBrbdAqNLuKhA2ufOYl5GilqFaOEgzNwZ1crRZ0Ba9uttrTXfViLkRyQEMHXr3mTISuRFidAIS1ai0Z3dQqr51BZQHIbKhaHbxUq1UZBVMbUXd12TxKJ8wOpazFTahuaoaEiGTg5E5ERORgSGJgWYsQXzYtuLKUF9bSgRz+qaqoFGXmaZkCIjjN2UulNgadUUXrUtXc6wlxaJokXdKycGCOUmtOyE6QlyJE7liXEpIwxyUDoMYgIsLuOs1OlJoGzcdpAJJNuyPFxw/PtUz9LubZYth++vhMhH3fbHcbiby52WhVVUPEasVLYRJh6WP4za//gyuGFJ1pGM5MGGL8+P37JvZOpmp5yuV8mo6HUuvT07v9u0/LUJrUgpOVTEFYABEihFxrLpUQRGIBrqrAaLnGJFXRirlSQClWAT3ENdESXY3dShkkNCTBwJmFhVmkaXuRFFIbQjJxEmELqhUF1wwtIpYQAIlDREaQ4BxV0QxLKSyE2QBKd7sZ8tnK5FilaYGQwYOEWlCkLa5eC0FUoLZNJY8Vy7JM4+BnYTA1ghQVC6GGXlI5H495NrcgZIWCo7sVt5wLeHbNjiBBhEMbuynnp49PJ8sSOMbz5v4OoGdMZI7ofdxEoA8f/hDb3eZt//QDLc/nLu3GUcHg8d1jcK7DvNlsrCttsz0NR2IiwiDoDtP5lEKLRONYcNEmESPWRXWZXOZpMFVW0yYJAFDkZc5uSAW7bnPf35yH8fB0nJcFgR1XQT4w87JkIfa1kA5cmNd2SxJaeyyJSM3WpNA1fNz9ZYIL0rdfvv3i/N0x5vbr1189/+Nf/4e/+N+/+OOv//a7P5yOw5vbh/NR795sidomTrR8Gh//4Ezx1Z8a7yS+0nLS51+ffv2fbm42u3/xb+T2zTCYjnMgaDvwfFw+ftJ8IN5QaMPrLzD0jljHAUCADZDdEGwGLSgROBASalnZLuDgTI6Zy1y0crMzD4DBjREqe/b54LpYmbHZcfsa5MYwuLAjeIGialY4RKgVGcEFwAjMLAOoVwczrcXdCS8pGaYVgCiw88a5w9CgTl4W6Xur1QgV7NLClo9+cIwd5AmxanZDAXfXyfOATGS6xocwsg0ngMbz5OCp35RqyG71aCUYbaiJMNZ5PlWrTd9S3Fpz+/y0Fz0eP+695Fd3d7ZKNaGYZcY2hjb2G2Ku4HnKzB660Dats7iBzifXeru7o2UWrbFqI7Jt2/Dw1ePpu+gaOJT98zyNqYGKocw5bbbLeWkpECNx1wuzd4trWE4aUtfLZHPPaRtaLeTY3bavTct+eKxSmrABhzbuoMTgoW3a0/BxnheVGigt+eS0kGPEjiBZ1bc3HS3jsXzf37wZofKm2y/DdJ6FNq7JqPO1a5EKy2YpZd5/ut3eWnku5chJSz5ITMv5oP40joeGmlZup+EMvERiN29oG/sHsBIUwZm0lDxMQywquxiw0sPNN65+nhfN150U+Mqf1lpqrSu2JOKLsdsB3JkIhM2t1OpmIiFQdLRaaykFGZvQmqmjs5CpN7FbysxESKyqhBQlLGVmCe5U0RmplrLfP7lh2rTLvNQ518V3m21s4HnOm3ZT1VWhb/qy1IfbbZ5zqcvd3fZ0PNRc5yGLMBHoPKcUSBCdWvOlKrjO8zwup7kuy5JB803bgZvVsmkDughyAH7oWzQTCtWXJpEDE1Zd8rIMCuqWXTmlVotVcgdnTk2Mx3Eeh1lLIca7m5sBYPZigJLaNoSljOfzaM5dk/p+p2hoM2UdH/d5eL794ouaRWLX9puuB8vVSo0NT+ORxJsmRZE5j1nLFw/3hKfzeRjOk0Xbtv3xaf/wk5+4xEBBWUvOqUHXOg5HVW1SQ47onufZAdpuM5yWtm1Tapf5LEIAuKpOaikIvkyjAyDLsljTNDE0VSuYDqcnNzWiWkYmFoljXYiICLnZarVpOuVpiTGkQIGDpHY454Lw+uFNlw+n0xj7XpDrsmdhEGmZ5+kUmIMEBK4lu4NbBYOlVFPP+RwDu9WSZ5PUSBOEHEC1rn6EWgoRACmYVa2OyHhxsQQhdS+lmBmLrgmetTq3sZFUanHLZiDEEgMiClOdF8XizkqyudmGeAUOiGqGjER+cUKYAwL5hWEpqnjdJa54mlYBBr98+0WhY2s+5VUtsQLvqwbjMyBx82vI9KXdDK4WnwuO8kupD7zgpBfcTRcDmq2tZ1c/Gq3lUHBFZ+vfkC6B2des4lVDxMxMxMwxJQ6haZq2afq+i03btk3f923bxhibtk0ppRCImYkUvSyTLpv58Nimevp02Lbtw5tuGp5v6dVt13x4966epj/92a+Aw7t3PxzKdBpyir2om6oT3d91jgxD+dnPfyYM/7+/+ssY/OGL22myaawSUhc37x+H0PV/8qt/frt5NY8fy/79Dx+ya87zyBhCissyWgwkzCxMG7IgIWw2t86dKRCD1pJzXZYSWDl0qpanYdPfHB4fb79oQ9gRcV7Oz08fHl5/dTrP8/mQEi3zIKklJmz53fvvcdZxyulmU9WAuIlNHjMoMOEwzsWquo6zprZxhZIXYjatc54fP/5BDGLgMs255NRGRjqdn1i4CUGavhadpkndTUsATqlpu7ZM87IsoYnLUnZ9l6eJUtzi9vTp6fi45wQP96++/e4PXdN/eXd7OM9nx5/dbFouzkSp3YjbPNWn47btVccvvn549XY7T9NP/9mfDPvb3/3+H54/vLu9fwuwpsMiMV/pmhdU/Rm7I5K/tI6vVTl4AUbXnOD1/9mVsfksH/Hr3/CFy/TLqr4SOvDSPnRhOX+cNfPZOXkhhfBFjIKf//UjQciFtHJfPZsOhODsIkCb0LziuAVK/c3D8f0fmq5dlpzrICGkvkPn9dEGSEBSzTwgFDfEJjbj+Siiu20bmR8PxylDZVddAguYn6cRcm0lBZSpjCGyEw6THhefKwzZT4sD0HkquzaW6tm0D3UbaV7Gm50s1QLTmyYgQKPYRl5c8ybMuYTIpp6L1mKeuCDM1dog7jrnUgEUwNTAnRkdvQDk4lZwBNsG3DIms8Ugl7wfyOfacXOmHOVMFLs+EUQs8PTu+/ubnUgwMSCftKYYa9Xp+LR99VVqtgTBM4ZXOyO80gEADubGsTF0ZwImqKZVkSimxtEQ+eWmeomgxs/E4OezDFeG5sdn/1Lt6C8nGK8r6iIaejkIXvVEV058FZa8mHOBLsvookGjixbkKg+9mqSu3P0LPXlzu1tyUVPXVVrq7rBOL1bJjDBdrpCXJBxzRdD1nUEnBwQCYAAmFnYJMaa2qVbdi9VSEYVpFcMaXH9XB3fQ6pewI6A2hU3EFihYgbqwu2bXWtcUIFdwNxaKgixcEJnCRW46DVAGR3MkNZyzD3M9DXkxL0ChiU0rlYq0jlkH1XGcrHpqUgxNTF1qO6EQY2TmECQ1KYTAgUgIAEyrERK/WKBXkSCSC/gqb1JcfxNh0IqkbmrqpnV9LlIEkGuSNFWoa6v72ojIQATEIESGbuRegVYD4KX72S/0RlgrRte9DSC5O/qq4zFwBVuZGVof5QAOEFZLN7iABDdYk9fB0A2vbPh6E7mQQn5lpd2NfDWRETABmJteFps7moFVdwWtplq1al2JwLUElcHdURDRSXBta72IoRA5IIq7QzFgMi1U0Gp1NTBwr2peSwWrJddcC5GSLoENBe7u+lf3u/T+CEUv2wf0gLwOg5gIkcwvif9WldAJaX0/zL2iIvhaaQcEwPQZHV/+SMISMDAjarUFfXPTMoKGMmsdP+5f398iskLIyArmFWxWj86MiF7dogQtxQxF2q5tkcwcmhAIDfKsDOgAlYdjrupEHJP5Mjwfh3FcRECLjdPsWhk4hNA1PRGlmJhJiyFgaCI4Coc2NeD1++///mZzp+plXobzabXzbRrO5yUEDmGZjst0mNHAJnv/j99P47N4DKF3x6plzrryhgjVbI4hVjUBNM2zEyPWanZ5yGABJxBzMrcYxBEcwcyJyQkQWuLkYNWJANbNFhGH2BF3hBFsIQwhYIi2zDMqhtCHkJCQQ2ACB2UW5ggkxGLmbEYiHFjaeHP/djx9WOwsUUotZMbUKmGttsyDqyJGBKpZGfh0OJZcAYC4ArM5B1tIJwjStn1dljyrgzVR2ti4adGFMGomVKh57vs+T0Ui23gCEiAuUJv7DXEaR6fDaOKbngFsc7txTnc3Xx7Pe3LbbPuxwi9++Yv//T//TZIwnsc2tedxrOSRZclDEJ6GCTES1r7tBq3gXnKxShJ8LmWhJQaWyKFJy1xqnpFxXkopy2bT3m+bPOt49k+Pnygig8XAbQzmUq24QSlWXAnY3MCtmAMRmAMBMgJRWcfltbIIMbut7Wdo/vlh8yf/4//59Ve/+PbjX/5f/u2/iU39z//lP7W36ePpPMy1Au8X+7N/8S/fPtzm+ePT7/9+t7mL2z8uzc4oBgQf/vH8w9/4vL//k38R7n6eyzJPlQH63n0+wXTK40BAEO5w8xMMiaKAnfO0IN+l7U7B6nIgXNADcQsoiMaeNU81j9J2IME8sy1uRSQamOYzUStQsQ75/CRxx/2XeLMD6RXQgNx1pXuggoQExIxuPrplRHAzAmOsCAXqTJYBFUGtzF6WdevlyO4tUHTugCOQODTmM5ChlzVQX5qmLovOB0JCcqhLHfbU9AAIdYI6oQI6LLmCGSfQ5dFhY1kpxYUWNefUGkYJEZcBNB+fPwzHQy7L5v4e424/2A3fH779fds0tw+/0GkaTmdwZde2aWPTG4aq5A6ha9Nma06ceuCICMSIwjYnJbrdbp4en/Mhpy4VqLvmzfn4EZzaNpl4nkfgehyOWQHPYyQP/S6lBmVH5IfDvk/tJgZkLMWaJiEiVwLdcNrmPJvPxNi2nYFprcJSbDD0MS8LanvbL3V0LdWezvn4uvljKlWYD/l427yayyFS77LZbvh3//jrTDbmYxNSLSfmhRCbriPNbftwKJ+6JgasBezr168/7H/YhZu+e32eqwG1zf0yD93mVbvpHp+eU9P3TeOWx/NToMpqtpyPBLuvbz99HLLqVz/75a//v//HzaZfxoKSvv7Jl9c9Gy25dE2L6MDMHHIZrRa1GiQwkTuCrRo011ojx1xLJRJhRIwhgoObrnH+pkbouc4rzWRmjlhUhZJQQCREymP2VWGSNQQJHeXqXLFtOyQYhnHbdhLC4fTctm0IkQBMyzhMRGxamhgctN90H999QiIFn0sBpdNpXJZctUJeArrWKjF8eXeLtiREIlQVIQCmhDwPR4oiiEvJjsaMIhgNTsNQa80KtZqWxTaeWjZd3AHVsi4A6EBRohLNxZBos9mM55NZJbNg9aZrUmi0ZK9zJNzu2nbbaaXzMethn/f7m9dvaLOVfudNnA/zPOc5A7fbtPXFqp1p/PhJLTcizf1NEN7PMwgKxx8+vXvz6v6rr76qH/aH5/zw6ua7D0/jaUTwvIzE0YsxN8BgtXbbFryGtDme98CMhoaetabVj4ZEQohExKZ1qAOacyQ1A1fNRqt2uBZXICIWGYYpxMjC0qVaCqjleZE8IyVGfn5+bLt00++ez0cFCKnLOdu8yp9lnrIWjRKrKseGOeU6WK1uhki1VgSopSylzDA0qREJZRWv1bwiiloLAgESE6tlLbrC0FJKECFmJqmazZ2DzHNV8SCCxKYKSMRhnSgAoJnVXAvmWqAN7QtyYaa1sd7M11TxdcbtjgbATH6VQgBiXQmgK4uDl13lWmfzIvjBFYVfEh2ueiH97F/DqwriirFfrBOX7ekVO8MLllgDOl5m4i+5LusFd0HddPUBraXljPAyUSdEIkaiEIIIN02T2rZp277v+rbr+q7tt23b9F0b15Sipg0hRFk7ctAAlrKUZWCkhrDFWOsy5tpstn3TwuKtxDdf3//948fn86dpOLnXN/ddDd3hMNrsBXEXm/04/PSXP6lePn04CFEbt8fDEqPcvtq9enhVFGXTf/nm6y7ab//mzyNmJpAQkDCGlsiJoNtuJSZEadpN290VdQc3aMrijsaGrrBtuihUqtYZLPswfRDwFJvh+Tm0/Zzh/u7+u2+/G/uB3A/7b5vUoYOYW86gerftfvsPv9neP3z68OGPfvYLtRi65vDxQ5u2TKC5ukLNi7rf3t2iWpTABNP56fh8QLUkMQYellMTRct0GJ+Fo5DUorlUc5cQUtNsb79kDEsumhVN24ZLqbyaKYj2+z0AAdD2drd/PuR5JOfHp2OIMUb5/f7En8r/8qt/tuj43ccPTUg//dnbaZ7K4dB92Q7TebO5kbYnwtc//cn29S+G4d3DF1+ZGhCBo5m+wGP40XrCK0NzWYIXmY9f/35p1rmgeVu1QFdgfiWQXkinq5buygrBPwmovhrULlq3y8vxKxX1IzbohaJdtXMv8jvEC6FqV1aJCM2cEBzEsQn9V13/EyzacmTXtgnTUufp3G9vHCmGppaSUkANZEzqboXIkcThkuqx7fvlPJoiUMjV3WtiUcchTyHIJvRPpwHRexRHeDyPYw3HuU4FjtkYYSt8zv5cwVjmUgIZmhUzRHec5qXe900bnZMdhgyGyFhKDsSO1iQOjA4oxKdactFZvQCbreEnCOYAXgCLOxBNxnOl3st9oA3DXKobj44/HCsBqY8htY7EKQJFKHmZpvU97FKT5yU2AMDjYe+aY/fKJZhS026AGYkBK4KjuatJCiueX+9AtWThRDEaEVxuSlf/l19TWi53tc9LDl8qzy6Eor2oJPFHX3bVaq7ryq+80o8+4kvx/cty+1y8dv36H901AQF5pRfXd/BlPQIAoDIjEIYYiYWjmEJV06JWzazYulmmiOhutro0XddPm3vViozBVM2UWAJGdF01IxpDjVFqtaLERu7iolZXGaiZi7Ca56qFcMkYHIOs9WJo1dwMgAw0VyiG5kwAATl5jBiDAWety1nAZzD1aiRTpmHUyWjKUBA4kDFxRAGelzLOtRYD56Zpm6ZLfRf77SowDSHGEIg4RAlRLgXfRABECFgzoqERSwRYVWMIAL5WziMAsXtArUQZTLHUK7e8Kh7cURyAgMwB3FwNnZEAQB2MLsUe5g6gCkSwkndmDhWIXde4JF0BOq7xgOuz1BRM16YV94IioAhAF1KRzEkRENiBeK3Z85c1C5drCs3XgwA4rcoDZndHNLeMQAi42svc1M1MKwLWqrXUWk1VVR0vpS6r9FiFBUlFCAkFCVayEQlJAJAEWIvmpfoAgHrZDTDUxQly1lzKeTjnvORl9ppryfNcDViI2MzcUfjST4HojsxrSR8BYq1lhYFADqbM4g6BBBFXk6BqrXVmuVjyr7HWaGYZq3oICN4GDHAGn/p+xxRVO4SwzDl04TycdtLGmLTkmkvs2xBFc16mxc22NzcSgkQBzwCkNVdXM601ExOT5KzuhII9d/Onw3kYi9ZtGz89j8AIxRF520dmJCQ1I+ZpnAnlptloLVpHA5cQXt18mcdhnqZa5jW3uI2bvJyYsMzzp+9noJCzLud8+OHTdD7d3N6eh+l5f1D1EKJIRHBmVgUkB0RhdrNiNtYlImFFM1hKlsAF0Y1adifjgAhUq6W2NwTz1bGJSChNQ4EpBA7CIYSml25LACGlkGJZnERqtVpKSAklgYNIcK/MFIM4c0hxOVZwy1nbtglNUxVYmoc3X3zyb/N5Sg3HjodzNZV5mqsG4bhYzk510VLqnAWM0ErOC6Jvuq6T6A4BkS2LeF4KBS55zss8zTM6tt0uSFtKvtvdPB3e7Xa3IVG32z6fT+22I2yenh5vHl61rQzng3SgfRdCt9SJRbALkPnD+4+7u+a8P+z6Zl5ODzcPAT0ISuzWSes0zn1s2pjMPBcAqO4UUhL3cTqRgSGByTzbUj06NTGp6zLl1KZdSqdxj0zEcnO3raeSiy655qKuSuSXC9vd3QmJSUpd1s2vI6oaIpGsbam0TrfWGbGpMqOp2vUh9MXDm//853/55Sa9fdP+5tvf/P7juxAlxA4g5cKwu9v0r+fDHw7f/3b3xRdy/8vjuWXBBmZ4/u53f/u/fvXLP+v++F9PuhsmFdYmgA9P9TjqtKBXIqbuS423Jh3bUIZP0/F9/+WvoH0wABtnAAGJgKEWAJskuJei5YQcITYWApjqolbAXELXpGA+n8vhh9DvwutfGfYqjQIYsIHVWhgBzcScbKblEcpTGfZsvlYcuBkiomUw0zoDmNeZmbzOCGDzQfOEUMxMmo42P9F4q2Gn1LiRGnAir8V1cXVw0DwYJ4zitSKSlQmRwBzU6jKbu1UA8HH+ZCChZ6cIbnkZEYO5SktlGnQ6TPsP0/EwzAsCHJ6O43JUQ0cZjic7TezNPA5gJTZNG0UkUgiCghQwBOlvjBsJLceQ1XIt5lUVmVOZsS719v4rlObw/JjnvE139/1X+/m4KBmVUisydV17ev/tzf2vunT/5uab8fxYZm93kbc3VhYTqmqLeiMp1znPh759S3Fj9eSmCOwY3HWY903cIYFiHeosYWeOS14CqxBv0p1ACjEule53XxAzYSoehmk47X8wmENihNnjPE0fBEO/eQhMWKuPh500iGFZClIccg3pRiwG4D7Rx6czSB6W5y48bJrA2525DnosOszDKUNtmh601erz8nqcnrMP//BtfPP2T5bT98R9bLbPj+frzsiDiJkigJkzETgoaDFDNSc0V6smLCuXZAAkDMTqLhKWMgcJtS6qlmIjoVGrpebVhl3VYkwOVLy4GbkiiYSABGMtsWmD0Hw69ylhCppVxAP3QYKR3jevGLzkakt289vb7nSa3r875jIfDo+H09mrL4sC41gLApRpRqtdkIbx9uF+yWevdfj4sW+J2ijMUdaUQyPyh/vXs45o5kJLHmKMjCgS5qVZxtndxiXvuqR5MqLQi5s5FAIuEIBS03fFiTzHZgscX/WvOYRh/wdmq1pCtK7t5qnEwCLRTZj51eubnHNgXo77+bRs3oi0HXJiFnbPy7LMOW02abPB6st8aqJTBFKcFJZSHUCa+On51H986ts2T7p/Pt1u+6enAxIseQIu4BRQAsc8L7GlqhbCZrd7qMtJc0X3gOimhuRmqOigtVYHRw5IUHNmDsIMjsScc4lJkNHQA4sZolBVm4dJi0ogrxkWA5S+7Sk0utirzds+4TicKFCUNM5DaEIIWimba/FiDmUciZYkwcAkMWsoObtD1/W11FLLvEyYZ6u6ameI0FAQyNyqKnpeA1CZyc1jTGa2NuuG2Cx5NjUWAcK5LBEhpriUcj5XQlZjcGPklLrYdcPpWEq5QlYw13VEeTHtOtqFc7lwOWvPBriv6m5/wcUXobrzpZsWCFdBOlw1Pr5GLL18y2WUbVeJBgCs2qLV8WNuZkRIl8BNv2Js//wZvMRRE6KZXeM9AdeEzStcW5NpCC5JxUzMQhJCiDGm2DRN0zTdZtv13Xaz6duu3fRt28XYNCnFpgkphpCYWWTd96Ajci4ziLfzdD5jbD2JdP3DF682LX3329/svrj59g/vPh5HB759+3WXwvvv352XvCzztu8f7r/8h2//kAtIs5gWIUopqWIK/e7VXWxSyXY6n1438fzDP74/Pd7s7lGYmZvUmBEJ1jybOjjVUkPECeY5f5IQkCmXUbh3s2F+jiFhCRV8KYpO283Nx3ef+HTe3ga3UvMkxsM83z68fX76aNnmYdk/HUNI1XR/2EvAT0/vqOWqS8DYNg9Pzwevejwc+s3udDyXkjkAJSaUpVYFUKtOVqYBLCPhMtdhHMs8cUBmDCQKPuvsZk170/fbm7sHDoFCOh0OeRn2j/tIgqha6jyMeZ5cLSQydG4bEq/gyxFaocMwD7k8PNydic9L/X/957//t3/2pzqPuuj7T4fdzW487bnftrsdCUYJJWue68Ore35zk2tes16uro01GeMFll/hNOFVtbPOT69AHK+6kgtiv4oK4KVU77M37EcEz4tqBD9L5T7Lga7ykqtT6DMPcCUN/AKAANb8I4fLRUBX6d7nr15h6yWA1rzleCubr+r0ITWNc3WYlzJz2nLoq1VDd1dHQXT37GDmyiFydSta8xwlANBpyaaoRrUqowfiuSwphTZuclVzEMQuhsfn82Gws8Op4vNcV+9Oy3zK+sFkqvZavGNHx6aiGRRVZqljUaDTqOcq5+wKmAkaJyZk4XPRQV3Ji6MqLgWZMIIxOhNV9TVeLRIgGRDPhtXDkm1D3jFGQVfLVYcZROjp8RkRQmd9mySGaZ5T6qpxrRB4I6JQVfOySt1JYlmIRAy9uro7SbBSnJESA9OLIt20SLMxpitd+FKl/pLJf2HBL3uONYT45YzB9bxfqZ9/uiyvX/dPiU1/ESddvv26SD9/hcOPFurLS/OXpKwr+/kjYhLmpV54fiKJkSWYQHDUxsghL7nWuWQ1dzAnRCRGWAteiYDtEj9XHd0JCQkpMFjIi6zPVyYiErZagRANzQHUVtckVq3kvix2VvWio8AgLpAjaAQndXK3akWtAFdwBskgABEUxUB1yVlrtaVoNjWSCrwYLsAFyJEaQiASEXTQUnNZAEKTLvZjiUmIRZgYUQiEkNkJDQEZnEhxDeQ1AgCt5KsO5ZI1hYQk4g5A7AAOBKuXRysBwNpoZgbFyW21VgA4oRuYm5vp+vXr/YnWsYCCqYHWlbIBsJUWdgd0BluFZPYSimZm+HKzMUcCr8VpHfoQgiMLioCtsq9gjugA9MJm0o9vWKtUeG0lXVOG3Ay9Qqmm4LWag5naJYIbq5q6q/nanoTg1T5nCCopIAgjCTG6SEAJDowQgAQRGZGIAoHmWdlrLaoGTOSMzMTiTOc5H4/jfj/MJZ/Ox/efxurwonRzBjNdzaG1LoB26VOlS4sFIZEEcEC3qsUhFbVSai4FqcSrJPBCFTEDIWPgICmIdAmyPpFgqSMFdl2meU4pstLNTQtex2Fw97Zp1fIwLEGka7vUtc2my2WueQJTFjbXUgqi01p7ScwCiAQsalCW7I5AwJHP49B2zaRlt+ubPkHVdtOEKFFoMBNkqnXRJTV0d7/LU0GYT4dnqyYBq2WqZallmUaCIsx5WWo+PL/fPz8ebS6A9vH5UzFt+8Yx5GJZjRGmcQZwYs7L7O4piiFxQDMjAgTJVSmkqr5MKg1j4llRUFKzWcyZSI0BTVCQJMSGRQEBSQA4tL00rVVlXjhEU5cY5rmoZSeSJql6Sg1AdXIJwUSMWdVL1aI55SU1rSuY2e7rbwzph9/+LVFTyhySwOJFy1QKWmMVzsO0FHeF6h2B1zrpAgIwuxueWCw6+zjOcyYECu5WAgugIOL5PFGuYHO/ab78+s00lfN0mgr3Xf/0/Byl2968rrUcj89t3/py9lkkOBidB49xA0DlrJBkzONN0/Zh9/R4jE3MwyRCbx/ePB8et5vGKzCCgk/zEpEca9GBY+huYghxHIdN11ih8+m81HLE3PcxddvDMBR2h+3Hp6VrStP7btMuNeyPRZhzrsM4CrdFCyNV12rZIQCQqgOYASIxI1nVy9wA0VC9OoCju7pb1WsbIOw/ftDD05/+n/6X5fz+P/z5v5fYB0pkdn7++M3P/+iX/+wXPnw4fvzDq5/9mbz6epgs9dAnHf7hL88ffv/1z/+v6c0/K5DQ66YpsHyqzwcfRzLjfsftXcWtb74Ci7h8qk+/B5d0/yfYvDWzsjyTcUi9s1gFLXMbUee5zGcHSLevncTzjGhIFEKDFXA5LcfvEAL3PynNa5AWsa7egzzMgaUnFc5aDvnp43T8IEwxsYTghkCRmNQVkD10oIqycc0GCxAAdFonbGJoq5YBpnM5neH894Ai29fSP0BokQU9KBgAm87oLiHW8Ylr63UByFagziNxcFWvFQG8KoVYSyWkMhyAO7cI5tyk1N6UXLgMPv2wPH5/fj4dPh43Nzc5V4w7Lfb+D+9snm4an8/PeRm71AQCipxzJeLYJelvJEWS4CHkZWJQJwIm4L6JG3DlxIVoqhhiu/2iuwF89/Th7u3X0wc9HU5M4f7Vw1xO1f0XP/+fuP0mIk+qxY2jF5wgIGNEpxijxnbWs3Dbb15LuIO2XfJkiqf9c5Q7ks6lzdQSUtHZi26gBQqVaL8cv+h3bF12BgCSgADD4SMCO1K/7RbAx+P3y7mE7e1ik8oA8PCYz23AZB7KtG2/Tt2rXAfLNTXw/PG7GG6X46ISN7uvpvmd4EA+VhOJOOyPQNNYHs+l9JvX7nLbf4llQsubfjuf9/Ppsdve+GK7m69u7r8cjuPLZqnU4swsEoSLLkjOwEQM7sXKWorjgOoFiAIHtQLuuS7oyJzcXTghGiG5F3BHJBGu1UUYHM1rqTVSJFnDjICJAdmhujGoC7K5xsibdpPLMo5H5s7dGB2ohr4xL9/94Q/vvnv/4f2nJddlmUPgagqWQwzJbTqdt4KpJbKKqp/efWtu2+02BlmGYyM3bsps5rhMs4LO43PTtV1qs1m/ucl1UVOtiwgJ8qi6qM5asbhbbsKIBMwcUuy73fn8DHYkZGJSjcbsxGYhihADS1tqJXARULNaDVFNF+ygTbG92TjCOGA5HU6P77c3XzV912+aeZqOh+H5/WF/+rDd3dw93H54/z15fft6Ezfy4en0dFxQRCR8+PD8zU9TSjKe86v73e+/f19KEYnO8Xg89kAxIQcSYQcwhK6/m5BKeYaVYFetqkQARS9uKUS3YuZB2FRLBiNAMIkSUisSm26Htu66sJbspUoTmamAuqMI5Trbspgel3Hpuvbu/u5wfpTYtHjrXoNjKTnnMYQ2ps7J1VQJFWGZl0BY1UspIigpqhuYqzkHBjN1qKWSGZNcZpbMCGZ6SUBARncrtSJkt5hSIxSGaVxnvaWWWhdwYIlro9haZUZWynlvWpcpv4CK1dn2I+/XOm9YUQq5mV9VD+u43GGt5b34ua47sc/Y+VIUhWuxieEldoUuYOYSOX0ZAprbJTl7TUKgdbp5/czlNf549rl6Q90RiNCuyTAX2dGlaW31jSARrtKwEENMKYbQNKnt267t+r7vN5u+7/tN3zZt23WpaUNKMSYOUYRFAhER04q9dRXmewObVmAHG9s93A3H6fnd43fDQTX/4XB8/vR4c/9wf3v3Dz8cf/vdY80eEv7yF784T9Pf/e63ROHtF69KXgit71ppU4otY9g06f3TB8vl/uYGx/k4ne/vtomhem1SYvBScimwublrYhinbOBt31UFAE0pRuGqFb06qjPWfB4mQ6LzsITU5py9zPP4zKixu5OYvNT908fN3dsQ+0/vv+0aPg6jCE9LEcUudmWeYytWsgOcp0MIGIW9aNu2wu4AfdcSIkp0N4lhPg7ZstWZdKh5ISSoWbwEiOiQda4Et/ffvH79JUunRnMtvpRp3PuyMNmb13d5WbzMQ1kARwk+11mnPE0lpGapOaVme9vPh3Ij3fy0f/r0futhkfj0vPz53/3+//n/+Hff/f53Piwm05tXrz69++4tU6BIAoHZTI3VzFa/44tL0VZE/kLkrB+vhqHV6nHlleBFl40XaI0vSg/wlT2CdY3/GOO/8EkroHshj1amgK7MkAO4G7wwnnQ92o8lIy/Hu/h/8Eox/Zjq8lV+SBdMyRg23LyBkjFtN9tbIqvnI21fh7j1ckLwIIKOhAlBGMxAiTEwg2PVGlM3zaW4ztmWogagDsc8JrYubhereZ6TcIohlzrMOlY6Zvs026TASAX8/WynDM8ACzpUqKM2QbYRvKoahIiGkB0nwyF7dmoFqsOgyqrmXh1mtaKYzcVhhfDuVNTQgRCJhcjFoaohKiAtDoXwZJYdbtgDgTvOs27aMGZvz7kFSgwhUClT325SaJ6H8yaxhA48A9h4eu7eMCgQCFNwIndbQ0G9GoIEadb8HnLwUtxriBHWTqhr5v7VHHaRDl058csq+7wwPi++y+31+unP0rR/Kvvxy4l+ca6tN1pfZaB24aT8nxz3qpVbX4n9WO8G+KM1DkCADi4SWGT1mhi4IAdiIQiciolWn5cC7lqraUYQv1AMRrRKagicEFxXjgNZJDQp1dKEuORlMTNhMc10YW1XAtcY2RzMaSywqEu2nlXQOoLoGAzZcU3OMWRgQE5MqSjQoqNOAJaXOist6tlUybNWYzYGYORAsYshBWQrOpXRCD0liQlDEyQImqOqzgoMaDNWMSaQQMqkAWJxis4MEtdMG3e3qu5KhO6IBqaVGYEACB3Z15QiBGAGcAJ1cHAFA1yd06buun4EZNTr/cAdTAnM3Nyqm/OakmSGYOiAjq4FgcERa77yi+u5NlcFdwJwzbh+F6sDAhFARWBEA4/ujsiABAaIBODgeklWggumBCRkcTN0B8tgBVau7iK1RzMzYF2pIlvdcWzmsL5UWCVX4Ai6hqopoiswgquAEgt6cYpICVdTZ2pBAtUEJfs8OwranJoA5B00TkSMuZTnD+en/flpfwaErolTqVkNEEWiO5jr6sMlBHMjpItF1wmQ1QqAC7M5GDAQq5m7a63rVXChirabSCGysGXN02DMMcbYrqPF4k5N2vZdf54PwHUZjiS4u92GFBwgNrHddJISIEx5YcIUollhEUVyxyDEyA6kkNGMEYF4nudlnIEwtTEvtRY7H+fAoWlDSIFjQGIEGufJwVIrjiqgbWzV8TwtsJzBENx08TLPTigtEYPP1WrO4zCNp+n0pMtsTgA1SEwUcymGVK3muZpWAK81a67A7OAzCQnEPoBBNRBAYplrWZbilc0xSe8IZuwYDCFIqmb9pkdSsILm4CghSWxSt+G0AW6gzGXNBmuQJKSuml6eZMwCIkFiKZNwU4CtuqQAJycHLQoAXqobzEO+//Jnx8d3ZVn0wpznGCVxnEquXhWdoyxTPRwzEc+zbmIoNY95aQOq1SRktQBRaiIui4gQ4DJOgWTTbRwAKOyfnpxv7u/u5ThPQy7srbR5tnM5G2YSYEVocebRzft7aVPPTex2vdWyTPnVq9enx9P/8Ge/+o9/8Z8UEYlzrp+en8Zx3G56acJ5mFgoxeiMIXZVM1UlcUHsm2YazjE2m66dl1INZrCmiYw+LcqrRwR4Pqnj2Gy6tgnjaQCitklaLQZWVUEwIF/jxJBUFYnAkRxw7f11W7PuaqngjmBVdfUjrFfBH3799//8Z7+46fQv/uLfQwoq4XCc3mzDXb/7Yncj06mU881Xv9D+rc2Fau04n377VzXPr/74X0H7pqjUeUxUQfd6eq/zHNoeIalHpztr7gljqM/n93+z3W69e2P9A7La+ZM7OW8rsGWzajFF1aGMRyCmzV1BAq1u9TJlrnN+/i5Pn9rXv8T0BrkBQtPJrJijQNwQwvgM52/Pp+9St0mxl4efU+pMC4aIEGFNFPZ6cSi4u1f0Sl69Fi8zo2mZtSwGA8d73Iy6nGEZ6/CBlkeWSBxJGnAyAwoBKDiKMKMumoc67alpEdc3mYRbRLAyTMNxniaEwGEJWyAhRHGteXhf8ziMn8anH07HZ1XpmjCdj7G528Tbv/3Nf9Rx7vsQROs8b2LXNv1UBissIVFIxAkoVp3z6UkRA3e1NhiSsagzQgE0CkyMKYZhPh8e3y3z8VyW7/ZZxNs+VioE3jbtVHOzeYUoPfh5/6Fvw2F+Pg1LQ7ttunu1efj0vOemq/O86RvxKD5N41B1mOoJ0VwLEhJulcPp9GQw7uTWx3mz7RfZAuE8VSLEGBR0XD4GbyjSkuGm7c8f3pvl8Tj1r9rT8bntAmltu/5xfLx7/WUARXSDZZ72yBC45vnw+u6hw07HtkrY3rZ59NqEaSlQjv3tjaFWcEkyL8dt2OhSTtNTJ1DGvXC6vW2qQsX68z/9Hz89t03z5en02+s2DEOMboYEQYIwEsm8zJfnM3oMjZkCeIyNaUUEYUZExGRutRQCTLFxMGSep8HNkVFXd7QqMQoyMzExIotg1Vpz1pKb2AghsyAjVggxLroULbf3r9RgHiYE6NttE+nv/u5vDx+fAuhPvr4/nZf941qMkfvg58O+Vpe8xEbqVLa3m6aJtZbQNO4muHBD3abru7ZNvMzW9lnLeB7O+8PzHCankLxF5vOwtKmJMbQ9FgAtoEBIODucpxnRujaN8zg/PpLWFLlpts1Nj1hJQsmlTDOgS1yrjlFEum43V0UwAnc10KyIeVokxu3tLTK32nz7h7/bdbdpuwvd9vb+1aZJ6TF9/PDxPOdXr9+enj4K2Zum79pY4TnnYuZDsfTp0G77zU37vD9tNs1+v0zjTFHbJmmdpqH0/b0btE2jw2gQbu++1nnJy3kVeiMCE5uZamFmJlazwMREWqoBxNgwSdu0qelzLst5AnMRycsiiQhVazFF4QAIMSazAgG9Yl7O1Wf3ltBrnhkbAFiWHETQU1EdhxMCMOG0DMzMJIDoCCGImZZc0B0BmRDAWYKXAsQIUK26GROt20EmBCStdY0wECIENNdpnpiWdWwLALUWWOt20c0LQRAJzD6fj4CoVVOXLvBgbeteLWYGl5ACuhjEcFWmvhgZruiEEG3d4Jszrg0tV3AEHoj8Ou++4t4XVH75UbQmYYDTdeTunyEOXkue1v7ZC/5BXJ9g6O5Oa0AmCK2BRJcfQ5fe+/UjSwgSJDWNhNR2bdc2m82m7bp+0202267r27ZdM4lialhCiJFFiITXy5Ou2g9CBqcgdTYEi10aRv7tf/vDsN+/f/dDIG6oAYRvfv7zH54Pf/6f/+thNGk3/e7my7dvjsen3/3w/mbTv3nzui4lg2xvt6fzvP90ut+Za3388F3TNDd9d9/HnKdXtxsQWYoB0fH5hFYxhG67qXrKS2raHhnRPTKCwTIcTAIBlPKoOafUi0i1ag4p7ap7mfN0eo5NmM8H4dZDBwTLdGy7m5LNrJ6HnJclBCa3hqW4L8PQtGGsCwkWG1Psq1Z1Vy2eMUpspT2ep7iNHuRwOOdhgroQ5jxNy3SIIuBC4G51GMe+37y6ewhx+/79++N8vt2+adsuBtl2Tdi2uuRhPNe81DwC2c397TLNIfGSnYLkqUC1sUwLOFUj8p//7O3T+8cyK7Xc4N1cyp//9V//9M3PU9IYN8jxfrudjlPXmZuGmFTXmTpdkrQM4KrT+SfyjRdfl18WovkLQbrO2l8Kxa8Y+9JBvsK4z+qey4VyVXv8OHjmgvCvQriXC+tFxwR4SduHi/bEf0Q8vBz50o51JZGuiqSXfK9V24GAFLC9hTrxckuxs4wpbJr2VV6toYQOUA0ohlKzAQInBQV0NV3rr3KZx5wxCKKRg9oajBQc5HSekrCBIfl+mB9H/7TgU/HJoAKSuyLMDkfA2Z0ZBzV0xIwfiwoSEdDialYBjSjX6ohWwNwZIQRZHFWNAAI6uDUrEaMahZAJ3Q2pgkeChrASqrmgu3p0IiIzG6sJADEqwjzlwFJLGQ6LQE+7jXCTSw6cGJXZc1GAKiEMw3njF2IbANZwDmbyqloKcXRhI0Cg9XEuqYEg16zyq48Q/4kWyK9E/PVfL3c3vAqO/jtB2eeF9Dnr+uUQ/13X3mVF/ndEJVxURXhdpxcH5bVZ4CpP+5GoCJDW+nMmYiRmYgTUWoRkHYIlDB5BUnQHzdlqrEXLUs3MLQO4oRMgXjMfVUHdEJlCIBEUosBWFkcFNDX9LKxa7cxE5sCMisYEGbwiApACVYMI4O6BmZBExIHAVYsNc115YHeqiMWxOmp1QGYkXgcFISQmRqv10lbWtonQAyOhuWZ3zbWwcIbCC61RvCnGTBCDhNRKCCF1YAlDq8jC0QnXh/i12R5dEXR1ItarNxsA0Fdli6kjmOklMGR9gLmDqaMB0tWpaOuwZO2Av5w/UwIHNDB3dVyb5w1B8/XkrovPwAx8LZ0gAyBCd3IkdHYDdAZEEF8T2QDC5Sf6ZzElwKU67qJ2Q8eafVlMs1VdO1jd174lRMILr77O46wCIJGjIxABkroBrNlwbqqonqsSYS1VAoUQiA1cDWmN3AaRVUYkzJSzkddFuWJi9oZx2zbhVdMA1Wmah3os1QqaM2HVNSvRDS6L8Hoft7VoC8hLLYwEbhUNkQwwVy3Vk/m6cYIXqqjtsNZZBELiTR+ipNkWZHRjkcbAJrd5OCBBPc4OIUVxFOfY3myQWYWcEFXB6kqCupNW9BUXOBYA9Qww95u25DKWaZozOqXYNFGOz0/slDNs7iQGrLl2u51rcGl0Lrt+F6KUnNtu26Z2//hBgABgXjKhLvMgSMRCgFqXeZzyecrztAyneZgIPbZRIB4+TdnqUotwqMXyrMxsVua5FlWvBM6EImgxBCBwsOKWqyJYK/E8z6UYF+UmGiLy2peM2aYtN8wYQ4fOgYMWiJFTvwvdTjjMSwkhzfOCxFq967Z5LAAem9YgckjgSgLgGEQYpaqz0DzbPE2lZBKJLKBZmjc/+9Nf/bf/8hchNkuYRaoEEkSyOizVnLSqKWz7Tam4LMuYZ6+zgQLGKHE/5YBcASfkJvbzcZ/YGhR3LIwkYA4Um+ms3x6+v7+7vblrK+Qv3rw9nY/VMKVXh+enyBGRc0E9L7FZzM4ppbiNcoDzfiaG6uMX33z1/P9+9zr91A3AMS92s7sfh1NoaCkVK6ckaCBA1alkU0UJtNndb+/944fvQ2hTaMbnAbJP+YlTSjHmcQazS6UiqC1z6jcidDgdkYUJcy3qam4I5K5qBoRMcqFNQcFxdReCIxLrRfTspVZQ1av49Pz86F9/88OHfxyWs1Eop7xrd6mJbzZ325sHaZvAm+aLN77k4f13MfJ5eAba3P30XxYoVRcEiJiZZl8OCJUDIZFDwP5L2HwNCj4+ldPvb+4fVLbz2CSG8ePfAZXu9b8yE7OZhAVdl4Plk5ql7WtqN2VZhJ0xQxnzMPhSEZr05t9A/9qp1pqtVJ1zSKETsvH98bv/5uPSdZt29zPcPkCK7lwQAMA44Go900rCbnpVaRJAAGQjwACGBhI8JIiteQErnmbUgnUhr3U6BnIbHtFcOKE1ChFTS+Q27pkIU2duYKimAITIbtUNdK5aiISFIyLrcg6SXH08Dnne5+U4PH0ydvc6D8tcXC09Pf7XeTm4gbqN+Xi3vdEC6oM0QVJDwpKk1uF8OBcYQupTd5vaHXDimChQMS06nM+n6r6oF0SPwXnxzmzxSUufYq2L+zyUvSPGmCwPrHkBbiK0kdXTuGhHMXqdjodX3c3IKXS76fSOW52Pn3K2tOtOBbv2XjjULD3dT7PFOsRWetly5jpPVYemYYKc60TbnVUXgZS0QPZ5PM/Tq1df+7J528v+8W/DhlK6jaEh39y3BMsCAkgGYYok59MBpRmHYfP6K+BNroVFfR4bbaaTd9Q9vPriPJ0cXg3TiGn7Tf8WC2aYz0OmSKAm0TDQkqEN3cenIwKfn79bxqd1KTAziFjJrrrYSERWSlFjxECyzn3WaAp0J3Sti0hYNTW2doQ75JxFuOYMzkiIaG5qZsLrNJKEk1lVLWvfkgFGCeCOTDFYDIIlMZPVHFNPLFqn3a5rY5zH8+9+/5t5nPp+06Skmin5zTcPLGRlQcTn/VKABYCsSgypa7xM5DWkhoPUebZcObUgoASyiW0UQttM2RXG0+NFOmy627bzogVg1DpZcXczBXdmHnK93W6Q+Xwec7YkQoqwzHQ2kdC4ukIIgUMXQ3SbiiOSmmU2WCmPzV2LFBwjAp+eJz/b3Rdv+tvbX93dH5+fp9P84Q8/vPnJz9vt7i78bPv2y//Pv//fxOPd7uvvfv83/Xa3a9Mvv0r70/DDp2czPJ+zG8RN9w/f/u7+/otnfUSHshSJQU0F01JLYMIMaA5W58X67a1BrrU6Wi25VldTQjJzswKGxmAAIhIltJu7KA2UOg9TrQbkgORVK1Q3QAfVGpuuVCDEaclN5LbpppyjMAc8DvvdZtOEeHoagmCeM0dA5kAwT6ObcooImpcJid1ALYMpshBz1brSHeuMCxAdbZ24SmhW9dOKC82NieCST4rEtLaNqKsqCJO5m7pIRCLTak6LQ1Hf9m3qErLkcUa+bIzULz/nkgyN16zoF/h5VfFctqR42eWuIztYXWCAjp976+2y97/Iz1ehBuBaurICpUvAAl7F7wZgdjkyXKmfdX7+ovhQ82vrr/MaWum+1gKtJBEAMjMShCDEEmJMqYkptX3fdn3fd5u+6zd9v9m0Xde1bWqalJoQQgiRQ2AWEiZapUiX4IOregQMrHplDqXiPNXjWI/7YRhKuLn/6tXN0/vDh+f9Px5P3358coO729d/9u/+dR2n3//6H55/+KHfbQ35+4/7283WiT4+HtsU7nap3zTn89j327vttu1oOB9IUWMqdQSgEBtE5xhDCqpqgypVMYhRXLUuk6SUmtZKnqeBYhSJyzzlZTY1iVy8WcyAgaiZz89BumP9uEWkEKfnTw1hcV7mfan5dD68un/lOiM05DicpjY10zDu7nt3N7Ml1yVXMkdxFp7rOM+nvk8CfhqebDFCdZvyPIABY9Cac164CgG684fHT0rH2/uv/uiP/lUimpeTawaw02mchmEeBkaahwkl1Lkus5ojyc32ZlPb4enjhzyOuWYrNtcalqxOwzDO5wna9otffPHdh6f3++ev//Qn+9PHdCNvH76YztPaVwRmfA0WXskXQ18jKuAzyQIvCOkCXP0FNhk4vviIrq16F3z9OSnrcqjLNYJXgP/fxSHh5+N/xveE5GDkdLlAYA33cLheOUg/Eon4xYL50rS+XmB4Ua6s1K45XQEcIHVbn4fYvU5pdz780Pa7ePMaeazTKfDKPiNJgCWvKR4AsfJc64EjgXleChLnaqWaGgaixNHMnk8Dc2ChwI4I51kHD+9mrURTzYSAQRbVxb0Cl4vFCJQoq0rgVQkoCLr6ZEyNAMAruDrMhgYu5GqOZiIUCII7graJiZwQyD27TQaOWB0IqWKNaDsmMyUnIgYzJKxuijLVinNpO0c0Guam3wJj0YJIRLg678bz2PTtPA1mVWJc78frDYlESIvXShwgMBKQGbvlMqftvcvlLnXJnl5vXNfbp1+lmKvGcuW88Soue9H7XJbIZ8L8yv/8aAVdc69/vK6u7rKL1OjKfl6IoR+ZHK8v6TM55QCEcJVgAgARwxrFCIQGAESG7gxAzOKoRSsRxYBM4hHRm1y0Zsh5WSZ0y9XXUCEk4tUutobswKU060Lgmxdfh9iXhnl0B3UNiO7KEBlBiJgCMZpXdZAAaiZhpRmdCaqVUj2vfmdEQ3RUFHFElATOjNGQ3Jk5iDBY8bKAMyHHEFIMIQZAq3kAnevazgUAhLENJIGZtSwSRGtUw6BmhixuxTk0GJlAiAkQiQwcwfTlYsR1Ob9EkL2crotKzC59nQAIZu7gBlbWGns3Q3BclUGwsjhGCKti6dI5bwqArk5e3SqYA66iXUdwB6KLH231lOMqR7o8TrUCIIAisiN+XnJul4zBy4tWAEMzrBnKYjVbUTM3tUvtvZM76NpS4eAOl5JtZAJfN8/rDkHN3C+nmnDlsd2Y3cwVmR2prmVQa40gABExSmNOVityrL6QK9WF6txR+WYX2p/diS/fsr8HX7yYI3i9RB862kqVE6652oBAzOZrjpG41TUYn4WdBImZg13R8YUqSi23EgEroLGQgDeQ3KAsplAlkGFWtz61gsHU2m3b3m4MoXJgIRZkJKslCIF70epAYOrmzO7oxVyYCZulwDTN6hQlSJNEKC8zABGhA/Q3d1lt13cxtOdxptR2fV/rbLmiQtP6MI592y3j8Xh+XqZTTCIsIURgGad5nmcwPewfCdCAMCYiPJ1LwK56WuaJULSYAUiIh2F+ejqNJbtQVcMqKdC2sUXPu5tms1rVdELXOc+xCy5UkQBxu7nNy5yEMGAMDOgSGzPyUmXTAyIhxdQyBzMDQEkBByKSEKMQwPPBrLp7bDsJaTkfJcXQtgpIMQCxAwFBqeV8PELf3t7dmC61TqHpUkp5MAKXEOppJA+CoUu9CuRxlsip2e3PR8pUiyk4s5xmRXMDbwOOxTVbZA1O6ktz07v5eVrAwcGahqNzv+2WaYltMxyW+XfftZuGmjZ73d7d1JyH89gY3j88LNNy09l4+sSpZ2YF6rftXG/GfPq//c//+m9+/Z1Ezud5OC6JKFGbi2+2t8M0IXIpdQWZsU0hpDzqycdXb3ZvXr95+nQAKZtd8gpzWaqpeSYCRswlcwhd00xzyXnqm9CEOM/VHWjNoru0AK73tVUjT2qWvSISKq59xkhqCGqmqqaGCOXa/XTzetc+9L/5h78dtUBVgNhsUn+zQdqcjvPu5rZpoO4/jvvnECJv75rYu9dlmdyzEIMh+qjnjzo9AZrEjRXC7s7TLZPR+NHKkQOpBOM27rZWDiCcbn5mnrROrnMQBptteQZAaHalubWSEynVUc8fvAzzMLa7n8bbLxdvzmUppaBq4Ngyz+9/eHz/NxLz3cNX+PBW+lcKpBxsjZteJpQAkB0M0UjUa3UtJAHXGAKt7rrqIk3rauNVRNPghqBOwERiVqlJZsVIECsyOigxgSSnBkMLOsNwtFIUES49AFbH2aoDRCEi6ZAjmFvOw+mgDvN51GWAQPMMs5bD49m9ub37ajyO5+OnSNbdddVcsHOKGWat6hRBETVTqUboQBTTZvuNgSBGRpyHZ4q4aAYJTYeGlAgHnUc9GTol/DT9wEF2MQCj1ZKXpybtADy1Gy2mFTjsMkVOIdYGPbgHt7BNXyx2PJwOea7U4sHO3Lbncn7Oedc3TFBLnabTq/sHV9dcoJdp0hSFyw16Hofn1LWfPn37+u4VsH48vG83bbrZlCkcHTa7Vz+9+dnw97XycJjKl90NLcVxcpnO09y2LdQ8530XHyJt4+3NVPBQsmxomKbGsIXQ3H55nod3T0/b7R0Fvr/92W9++x+breb8POPx/s0flfNQwDk12etcy1J8zM9Y55q6h2/uXx7bVioBrfPVXOs1GA/UjFnw0gKiZuoAzJGIXaGCV9O1BN3cWBqtBaASC9GKjoCRWGKu2ezSzx4l1ZrNDCUwwjwvm76JgXMp8zL2TYdIoGXbNTGG4Xz48OldXcqbr16Lw/5wqDnHtFlfdNumeZo397G56ffvnwI6x+AO5Dt0Ldmq1aZP7ete1cALoxNHAES3drcRiaEhq/P+6TBMGYmPU2YJi4FxQDRdMmjtQgKoVse5WEOEkd08pQDotc7mSguGEJM0Uzkztcyxa2+xnqyqs8XIqs6hlbSdM6jh3ds7RchT0XJccpYo3aZnSr/7zd+l1BG1m/vNv/6f/u2HT9/Ntnz9x39MxIePjwH54fZuUXDC/eOxi7ztNl59mkps+yUPSaJaRSStBTAzh5CCIy6TqVrb38zzudSjm8JakArgCMzkqg4q3CBK0/Rd3Oa8jMtQi3IAZw/CtZZpyRwCWEhNs91Gd9DiEqXUiqbLsmhVAhduN5v7aRqC0O7uzXD62G9vSp2G6SRCIYRasZbq5iJS1cxUhEvWmgugBgkr7lu3E2bmcKGNECrCWoFNVwh78WiZGq/V70xuRkylVL9atZjYzJk4Na05znMhNIfMgK6XZwEirmkFtgoaXjwyF/ECOlzyg/AKQBxedp2XkbtedplXuwTixSlhds1nMbyW0b6Msa/ijtUMBPwZAV3h0I/Qkl0EUACITGRmhLiWZ67/rZNICSIiIcXUNCm1bde1bbvZbLu+7zd933Zt1zVtG1OKIYYYZWWIiHkliYjAgZhflBxItKbdkWMIgajrd5tchqxpcYGQKITfvt+/f/dhfzi3Ke6a/l/86p//0S/++P/4q//y9O691+Xh9W427FP8+Tf/7G9//fdVM8fQtZvAvpzHQLTb7ObxXKvttn3XdLlo2zYxxdhuEWsQKbkCR3eKbVuROcWyTIxu5ufzCcEQPE+nVWW8zNNwOiBasUDdpo6zFrAyF66bmzgMRwwNYnj++FjUpmlU01ZinhchKcuEkKJEqIjgZDAeR+hDrkWEQ2ymeQBEMIoUGoTp6Xk6Pu22t7X4NM3uAEa1WK15LT0QsGWaeHO7e/N1192+f/9k83HTCYKeD0c3k8ibbY8gt/cPQKFUB7WcF3A9H49l9tv7V3dv7rWW9394P5+ncVzcSdrtsD8w0ek03d89aM1Pp/2b1685NGp1c3ubUnImR3ADWrOx1jrYa3Yw4cql/Ah0X2iYywVw0RkhwEWVsS7XC7d0NaetKqErgr9SNpdP4I8x4orf/Uof4fUoKyN0KSq6kMGXQ+NLVteVGcKXL/ixFuTCYa0/Aq+aFSRAN4qctjDfNJtX8/LY7X6OTUdWAQXQrRQngIiWF8dC6+2CseYzEApHxACo1SwboJOTV1NTK0WToEQBrdNZq/K+4NGYwFaf6JBhMFQAR+/JCUAQGTwS+tph5a6G4OBW0S/hZNUdiQCwqJN5FzgIBKYUmGpNwimhEOWs6M5Eh0UXIFUCBwYG93USbuZmIAQBwKpntC5wMRtnTUI5oxuJkNYMgSU01WCaModmztY2NJ+PmwdCwarVOGAgQDetjhD7TUVY4ynVnJseU3Q0h4vj9fL+X3OtL2zR5/N4OYE/WhyX5XYlIT+rkfxlgb2c5x+tqh/dbeG6Rl9WyufQouvCupKPn9cSXMzCP8pRuiRdwYXUATczV68EAEAAKCSICGYAvo6oE4cQMVRpWil1Xual1lqKkq+tWUQsgGyOgMQSAWcABFxbeNY3ydYYVqZLsbppRUcFEARCQeBVGAioAO4E1TRXU/Ol1FzNAImCeyEmibrqZpSILqF7rtUUHBkUi3QNCQRpokQmqdUBtJZq1cCMghBT9cIxMXFksdSokFrJikEpRTJHUQNwZsGQUBjx2h6KDoBmvi4Fh6vjD/zSMHppdXA3dXxxZKu7XfQv7g5oqu6GCBe66Er4rcpoX7OrwdDcNYO5mwGxXw+9JpkhsaMByhp4D66A5KqIDIiXDrWr0u0q6zJwhbVd1AAdoBSvannRqqYXUy4Jq19D29f+CiJBQAQGd2AAuwwiHdRUL6lGDggKAGqMZGamWHImZiKTIIwmEgCQkB2YKKoBCYMLx0Aq9ZiPx/P5PEzjNJeJxd++2pymXM5ldl1/kct+HsnAAMBtJYUCIhA4hWDmiBgkCDE6EqJIMNNA1+qz9Y+mFSBEjuYVkdyVEYACBwanEAFBEDywm2mMkrom19pt2jYw1mpTzaUQKqChgBAZeC0VkVhk3TohJq3L4fyUsI3ccSRERaq5lmUp4N5tttKGpqEQI7i7L7e7Hryczocu9K/u77IVxrhMZf+8N60SJeeMDsACptMwIgC22N9vzvtxPBfPEKKkIKrStciMuSq47w/nYShD1ufFHkdNfZB15nuavpC0hVDOztIgKGHKdUpdhxxrJvFUlasBsQN76lpDR+aqNcXIITmjpBg3u7C9lba1adKS3cAMiBE5xCam1OZl0uKpD6Ft59PIFBAIiRBdYgSHIDyOp02JWkGtKVOF49PN3aubh28el++avitlCqmZJrda0D0vMwo45Kl8XOal1nleihUzwHlWVQV0xtq23Vw0kd43bdv1z8NzF0KKAUxTTDG0Hz9+5HDvamPeP7zdlVK9Yp1ttmGz6YiaJNWqrZ30pZb7+9vvv38s40JBsmktrosB4IcPH2+2N33XULXY8jhMQSQw1gYYDCFwIK25lhIkgkIe84cPz1/87OGWaP+4dwd1IEEJwUpdLzy16mrFQqA0LMvEklLPZOM0ESAEJCGrvm47CKnWYkTuSMSKDmDooKubFHyNpF9J3Rfe9P7V7sPTd/txz9wQSdNIalPJ1N7vvvzJT/tkhw+/LnW4+fLPIG7RXfVdHd+xxLi5JWk8L1APZXms88hp49hQ2vjmFZDYdPDhAxMtVQMZkklSxandfgG4yfNKJWI5PRE6hi3GW2xbsyX4QsvH+fDt+eldu/tZf/8rjbuhUtGFJYFrorI8f/eP//CXd5vd7uYhvX4btptqkDlqLoLFpwmJhZysWK2XOzBFkg6wgk6QBwQlqJoHRCFQXSZwRFBCdBdHd5yxZvBM63DRKqfGDJEJfEY00LN7rLL18AAy+1KiSz2/F5xUTwaZmclTancIOEz7fC5aM2hew0UQ4eO7/eE4NemGgSW9CrI7le9iJHZrWKdS3G2iQasyR45RCLWauxNySJsQN3Vesg7LCSmAtJIzAom7sIS2aU7jMUnY3XXP++PzcP6ifTN5PezfpZhA5118y4RqYx+bY81mmudCJLP5aPHt3Te27FHo/eG/jVKVWSN+XPbWlk0SmNoubceyZNfYdQwAZHMel6VmOYfYn/LA1t+2W74v0zK1oSsQKmXElvxmHkpqWq1lOLyj/mHz9usIh2k6u+VahlyH2/YBMaJ5oKh18/ruXx7nKeN+OL+rvrTNFqRQknefHuPdQ27stmtmP511jP7w8MWr5fyxZCeGPI0V5oz+9Pjt/d1b2WyPZe53nc1UbVlKs14FLESGRA6IS14ImYkuAJrWx4wSCuIqmnOtdnn6VxVmCWJWUNe8B6eU5mVBIiPxi+aoIgIxI4E78zrZWU1ZSNttb2VN/vPbmxsO5GXZbHc15/Ph8O6H71LT7b75Zhwny3MKqWvbovD0+MSG03GoZTaf7PyUgN2QRaoCxRBjalosRcEV0ZkxhhasMMsawKFmyzQsGUr240KPI8Wunamdl1IWjKF1RMUSgp2zNhTGRdsAsPbgMmgphECB2sS1Zq3Z3QIHrBNB0QyBsWmSubWtZLVpPgf37esvkCSX2rX9NM5CIfBmyvtmg5tt393+9PDp8PT9D5/2C7TS32wS91ig61sRHk7TcRikiRgSn6f9cP5p103z4ERGNJWpJcxLjilKDEVNAZaSpWl0HKwW88ixh/lcSyXhVVu5isKYhQKGmFQ956XkDI625ugAFa+lIoCTSNNuY+y0KCCVeeEQYXU4hSCxkYqIdh73KQbm+Hh43G6UA0pKotHca52mJRNj0/VWDUHRqruBIQAB6aVpT42J8KpnWYkRM1ergQVgVUjj2lZ2dW+5u6urqSKTIAMjMJnWeZ5DjMKBmM2LKSCSOjVd2257BIPvfw+XnExzuBTbf5b6XPCyX+J6fd1QriPoi959fQatTM8qP0EkA18ZOUREvPQA48rNX450obJeJO9XX8UVBa1JUp+1F76KZh2Arj808NqHe3F4SgwiIa6R1W2TmrZt2q5f/+v7Td93fdO1TWpiiiEmCSISmJmYkZhw7TOCVa51BUsERG7KRGusjYGbUuw2eDobHu7evl3y+MO799M0tX37f/+f/913v/uHp/Ppr3799//bf/nLjuiu79Lu5qbv3j8eas7/8a/+vIInTqy0Pzw14gHpq2++Pp3PQEYAZnA4H5p2Z0Cl1vlwUK+bbru9vY3be8Bg1dgUdbq7f8ttC9QKS1kWIdBSh9OzLidpTpL46f171+qT6jwzaqle8zLJMcVumTOp5XlRNAfPRRHYwRwxGzQtFy1ToXmZb8nzNHfddhqnlegow1xGCykR+5yn/eHRqoKT5mUZJkRD92VZ3KupouRpHmN730o4jIf9/tM2yF0fhuG4LDXFNqYudW2IUU21VkLIdQYoMYBmvb/Z1q6blukwjYT89qc/vyvLuH/67X/7ddtvbzbNYPjp8RSbqYvtuHTSPhBSycpcUYhIAHydYiLBS4r1hZu5kJsvUPu6AlcFnduq+4CrCG5VGAFczWb+45ygK9y6gC7wtTTtgtSv6/wF418ZgRfgjoAG1zCjKzL8MWN1IR4+v1j3NWkALoI3vIB5WpuJVm8JAJsCpk3YfOl0G+L99vUvFurqMAVpkBanS+qNOCmgMFcHQ3Kkpult8Zz9eFyskrCrQwUflxpFgJECV1M3jZImtdNixXyNzAWSpVoxREZQb5gu6biAhFBU+ZqQYuDmhEgEboYKkBAZ0Q2YkNyYIAgGpjZwn9itsDiLgCoDEMKoMJqDEAM54KzVHZCpqCPiGs5fnICIHJa5pia4mVkB54ufdX0/EUg4RrE8eR4lsBkjAKhLAFB1rWDKMTgCO5Gb5hJit8bfXu9bAG7XpfFCE/rVL3iVoL0wfy8c4do8hdd14Xile34sOILPHNCFxsQXMdxlxfglB+661K7008X5uMqePpdNvryQl6OKBCRkEVwHB+t03ayqAgIjEbKDrxnBBgYISMaCIomNRdI0L8zmprXM6KxWHFDVL4vUFVwRVp+QwcqhrXttd3Bwcw6MTMiCDIQciUSIyUhLrTpnLdWLujnUimpSAIFEDRFRs7MYCxmUwCAUolC0FbZL1wZCaFKMbeKQAGhtwALz9SOZEREDuDMGBADUVZpDYAgKVt2wQKJVFwnkBMGRiPj6Tl8CzleuDWGtYAbQSmCAvF4LgARe3QDMGC7V9GvW1SUQDQlhDVdj8MrgdJXksCO4oSuYg65hCSvNTQ5OsJJUl+Sy1ba9rhcEQ0J3RSMwA3OAuj7q1ikQuoJl8AoOqO4OXqsXtaq+8l+XX5CICJkcANYkQ0TElZlfNyRrOKSqmqmt2ilYk73NEIKjObgQISKwsWB1YzTRGjg4mLtnq8rBDQwDti25yWarp+ndYf/4eBzG8zDOi+nTWAzXnC5EEEBYB1TmeIloQjC7SJBXlSaBq679fR6Eg3BqmthcOmGvDWgSAIAIBFmEzd3cgAQjkiuhE612fzKDze0mdJFEhIlywWJQDR04sKqRobob1HUnsfaFMBK7L9MULHbtbigzA9U6g9Nc9Tid39y88loJYdNtybHa0sdAZTqen9gwBC5aDPx8Ok/PRzALgQjZjBxM3VWrGdY5o1d3arvG1aTn07DEtj8/nfrIoYnH06HkWkzzWppO9XbbV3BzbBLvNsIMithubw+n4e39bphGVQNoVN0BtTqw1loQSmz6qc6xIQNiZGBp+o6YY9P0d3fcbRWwqMauz8vU9F0uFYCAQmrbqoVYUhsppPZm55ZdYhkHRlpvDUiUYq+mq63QqmGtrtr2m9TG6TxTYEkASwYy14JWnWSY5rrkpXgIPM+uiHPVRbODBUJFrKBguhQ9Oby6eQNUPc9NK40EzcbR/+R/+ONxOHaxGaZcxmk415vd9mZ7s18OWDK47fp2KouVEVqaT4c6FVjo/Om0efiiafjs3jBvuuDmXkmLokFect83AuAE222c52nJ5zbdhTYSEqDGhgPFOdv5wxCj3HS74/FsVoTYK5BDrRUQmYODLzlv29R5k4uO5m2MzFiqE6EpAl7KE6uZA5asTARA2aq5msFad1a1mioiVDPVl0EyTHk5Ph6BIoVYl9puQuJe5/b+1dvE/un733CA2y9+qWGDbuX4sZx+SH3HsTNIqID5vBzfI2QJIk3jzo4dUEtWsAzGBKFtN68cgqGXcqpLDTGYzgRVQlOXhQCQm2whhAhQSIcwPx++/TurUwi33H45QZNzDV1g1ZTHT7/9+8PpdzHBT//o5839a9WkHLMBOKAr1AlYLrIhYYAA3jkwALqDIhIs4Ao8ryNylARmKxHgrlAn0jMBaB7cFLRqqQoAGMAdJSA3xMkWNZ1AhdudjnvELJyxnOx0xDzM49FB81IRqFb3cW+qSEIMbsDM59Oy34/jMOVcumY3DFViExs4PH+/jMeUiIjHaWBmdBMK0jTmUFXnYWIkCRGqOk7zdFZQDok5RupMMbTb1G+LYq1qC2nmsepxWIa5TmNZQDNCCG9ub77Z7//adHZfFMrTfh+a11kfA1ot6kxd1y/l4PUMwgULxKbUMukyzc994oB9S682aflu/K5a6Roni07OqRNftM7Vud8+JO3bKA1ls0wh7Oczc+h3v9x1X2Gfl/lQdRr3H2E+iQTEHCKZzWnXB72tlc2wlqXffHX38POPp8fH4V27JUVt2u22u/n46TcfH78VkI+fTtvd61EKWC7TYF5321vRru3paWm1AppG5dftBkzPy/OQh6/7Pw4sEq9z5cumW80QGWKIZqauaioUAwkCGGKxioCByR3WdBUijhLNFN3JiZnzUhDQGVZXENCq5lA1ZWFksmpIWGo2dZaQGnI3Z0aHWguHGJrGvbabzTAMWuuyLK8e3haAWmZipBBr8VxmB9hsN1H1+OFDRFDX+TR2fQfoglU2XZ6XZVDhAO4EVmslEEQBd/csHIAI3YCRKfyw338849mb5aBD5SAbNYSpKhgpbRIFLByi1kGc8jK6ghAUoxiCoU3jyJHb1LNaYopC6iZRmH0dJ52H2RCDRM91fHpOfc8odc7ufjqfNGNdTlpK0wkwUPCf/6s/evftx9Pz/vjumCL89Mu3h+NTbBm3XdxIJXrcnx7ud0+P+0+Pj6lJc64QYt/vlnlW1VqzVBRIrOZu1XIUNlcAaJo+zy3U7ODCoWqllQUxIxQtxWyNrkYSJharqoaBZZ0+tbGv5nleavFAITZNUQsU+s0uL3POs5Awxs32zuvCHF8/vCbT8XxapgmkTaltm8iIudZaCzgGkiAswqaFVQHs4shgcnAiJEDiywYaEZjDRdLuDgC8TszWtBEzWJnIAABka4/uWtfFDA5aS8kLkfkKhdqAQnmcmzb+COReI6mviasXp4uD+0ohrQIgvEov/MITXQT7a8cuwDVN2t2J2S745yUt4yoregHUuFbav2THvIzJrz/ysvsmAGBc60yAEFl4zY9nYYkhxBRjTE3Tdl1Kqeu6vt90Xde2F1VR07YxxtikECIz/1hGtFoiCK9F16soAsjBrjDvSh4BEjGLp76/+/J16mJ+fvrrv/qr1uz+1f1u9/Bf//q/fnz/cVkWBbyNsU+BEaepTMNxf35mgE3XI3Ofkmq1ZRFqRPDT+x+aGJuQmiZtN7cK2m92lDbCEtstxUjVjscPn/a/cStYhjyemgbqw0+2u7fDYilupmk/nJ4xtqUqq7rOXnLD8Tzul/Nei0Z2rapqdjgtLizdchpzydiEyGJ5Mbdm0yOQG07jiPT/5+rPeiXLsjQxbE177zOY2b3X3cMjIiMzq7KqsqrYTaKaalKAIAgS9aRfoTc+6t8JgkCIehFEQRPY3eim2DVkVkZkRPh0JzM7wx7WWno4Ztc9GR5wXLs2utna2/b61jdAXUCQ12XZccrnEwlE1KdPP+X53PepetkdxppnbRWE5/VcS1F1AjW3eVmFwFVzK0I8DodBUl3XXmA6vr9/RonD2N/GfiQMhFSWDGBlzWWdc14JtLVS10pMCMRCr2/H1nxZ1sgyvHoVf/sX3//wR3JhpG4caqsCOH9c89frzdc3wkISrh3z1XILPyM3G57yP6H8vPB03C+m7H6lueGViARXVt2Xd4ErzPTyYBeY4EuWx5/QP/xzs//yAwICmdn1y+nKYwK8AqlXyMBejJL/lFACL0rJLU0QtqE/EoEzpJvu9hvszbudN27GKSTT2TeWgqEaGDIIeTE1c6E49Mfp9Hxa1EjVEAxMibFUTQJEyITaGoHn6sesCsS07V0yV23bkzsIgLhvjMXt+zEKN3cFRKBqCkyAQLCl1WLHl7Z2FOrEm1Y1n4uFADFIYDQzNs7NGvhwOV6pkiCgAkSn0pTQEiMCMgABCJirG3gB0+TNWqkzS0JhA1MrCAHQkCNJaJdW2Z2wanUiJAattWbpd05OgGjmW6cUk6Ftb/7Gw9w+tj+xXbuUz6UaXwSLlypx/+Liy0f/AvDAC/XtCptfa+5LetrLE1xvgVsI1pU7tFXJZUVc5WaAXzwgAFwCCpxFLnXk5uAsjBsdFxzQTR3AGdnIDExIEEkbgDsabccbDeZuoUpeV6qkTQHZHZoBkiCyAyJSMb3YIZsrAJkSeghkjrRRCi7HdXTy5mCGtfpadV2qGtqFnoZK3FBNmyMCIyuKWQwioCGFvuMkKCBRyL0RCQlTCE6szWpT18q02R67N6PISIE4hG7oQhdixxJZAgmLRGIG4o0pZIiG25kQkZGErxgQACJYQwM3BQC/QiiIdiH9kWxey4jg16yIq3TxkuN5US+CXbcUhIsywgAczEAbtHK1wuKr/ZQD8kWtiIqAtllt0zbPaUAC6AAKoI4M7gC8IXfkitbACph7MzDwLb7Udfs4kBmRgRgAHDblu8EVDQUDU93ioNXcDVTVHUwbuJubuTmgat3SVt0NGcm45cLNiKwiZqpCTMwsyZmM+lKyuSqKSye7Xdjv6Hh6OtXH47qanta2xZUQkrbt3YbmbTv/bKtlO6IQUXXHzRaJEMxjkMDCWy7e/8SrKHbCLFvuJG8aKHQkQDACNjPArSRCSpHZCR29YoNSsmtztSAEKk5ohs0aixAjEyJtgryaWybmIe3XsgpDEF5NEbiUtht25iBBQuwRwzQtY0ddurFWtVQJEdDOp2NIcTpP6MAE6zobKNQ8xDREPJ9nahao0zYRYt8f/uZXf/bp049D6dQghX45rg9P87KU5mAsi7fTDAVkWdtS9HA7vrrdk9UuhWk535+PUfqnAklSB4guqZM1KwZOSRA9SCJiQmpNo4whMjnhRQBF0o0OCZHcgDiIqPsM5t3Qha4bb26n6WzmaOBqXdfP09qltJxPqUvEvJmrIXhTX6tNxxxj5Jrnp3s3dkNT61IKXZHcqFHq+uVMRbEWZYmu2piyzcfTbI61FgXoIjvAWs4MqAV1z//48PC3v/kK8/PT+TimIG7zcbUB7nbjw9PTq7e/4taQp2maVerwan8+PjIlqK1UJXbwYc3aIRKIZZ2eP0G8SV33wz//sR/lN3/5y8f7uRa3XNBiVrjZD0+n6XV/+O5t79jW3NxdAbwpBxgOO64IVNc196nb3962hycDMYeqbgbNVQhBPevKkhiTNV9zNoAudkh1XVbXl7mVq5upq3tpDaHpht76JUGx1qbaTK2ZuXu9RqAtedGqyKSogUNIA2F6/fbmcLM7/vzP56eHX/+LvzMKXo5QJl+eumHH/R4wAKDV7MsDtLPRZhaZMX7l41u3AOWM2igMSBHjoa6rkE3T0o2/JJc6f+IQzZhlcCflGPtOMMPyNL3/w/x8787p5pd088sahqbQDQMsy+nn7z99+v5wePv1X/8X1Wq62VdTRQVthIKE5B67nemCjqoEhu4AoUNggAa2YJtJJ28raAEDaCt4Ayuo1esC67ProiUjC+ome2dUdFOOBoxgxYhKNYp7IIEQa0gUBfJsx0+en32ZSl5UtZZiDmUtMY4A5ubaWnOdp9Lcno/rfK5uVoo/np6A0s4Z7JSnc2RJwrWciTCFjjavD2tEpG0F8waOMYRArRUSFAz9OPS7nkPflB2HUggo7G5eLXkabmIwezh+csA0DN5mbbOglvwDRXazuTzf9m+EyOB8O0SvZI4Y+2YIliniebp3QQIDIyIe0t1tx6EkBlbvGKKq6bqS6qzt1av9u4d5Xef97U1kToHX87Pa7A0j73udUMI8rWQPN13y6bHhNNxRRHN3VJym1cBSTynw82ktHl+/+cvjcs7r76w8dwlMkVgl8Lo8iaSbw+vn472kG8Nwmh4kFcPGGuYSurvDw7ufLVCuC2jZj98BeEjjcnx+lW5pXTZlVSvtesaGzQZ3c65AFEYAqNu0YevFYTtLm290A+ZQWkVCYimtkoKjxpS0mSMICyBVtSTRGNEZieZ5EokOSuBIW1k4cbDWXHUYUp8S6mrN1lzrUpFk6G+AseVlTPtFrdQaIwVJp/PUal3mBVBPDx+L1pjS08fnfhjyOknfozkDGXNrS+pSJ4ERXae6pY3EAczLOruk57V9avzjeTGJ62rnVcce6UJKBnPXCsIyLfqmDxUQkA0bMwHh2koMkZnRGZFj4nl5yIrj7lVTQxlDl6CpmUofXEs+LeV0fv/zx5u7u7S/vX1zc/fmsK4A9vr8fH7+8JEQqvrz+hxj/Oabr358/zirxrG/ibel1GUp3djF0AJOzj50ab9P426o5zW33IoiggQGhFJKFFmXpUsc+k6IxEFrbg27ftfqXGtR021qq1pDTNWUAVmCWwPE1hpv07Pt8K+AIUbZezsD+v7QA4JqFZC8VIKIIEKgJWctkjo0MnMJ0dF3h/40Hx+PT2MXAks3jFLatNZpmfajhNjN8xMxSpcS9bk2cNdWwMh10/87ALCItoYAYOYAqoZIRFs3AoTk6NthdBumbawdNOu6QW1zB1AElNAhoqvVeSEkBpxyfmkYaDOmAFC79qZXkctmPnBJi/vc5eLlcHvxt94eBz/bwryQkgA3l7hrv33BfT43yHgV4cBmq3ChWWz2vvCnU9qNSCQixCwhbKFmqe+6Yei6bhiGYRz7YRh3u6EfUkp938cYQwwhRmEhYREmYiJGRGImvAxELzK8i1fxhdR1df+4wAgOiGCC1NxjiM9z+fju3W4Mv/jt35w+fvwf/8f/4enh+WYcHrIVrWgtUOSAhGE6r69vDvshlOo5l3VdkEiQwIEQhy6N412XYgjQtPX7HbPteim1zM8/uYM4DmN39+rbfjjU3Ei4rMfp6eHp4VMueXbI8wTQmIMrPN1/MseyVvPiWuuyqHoTzrUCKoeQj899t7SmpRmWpta0FGaCgChizQC6IEMpq6rNp5yiTtP9zZt9zkvLc6uTBtNzI68i1Nal2x3GIO8ePlirQG6tbTms67KwBIhoZfn04+/N2rNbN46xG4fxVRhGtVbK1OZGhFrWjfUzpNi0oDH3wayq2ml6HLWEMBzGrmpbpyUM+7/6m7/+p3/642kqAXE37ndxT47i/aeH43e/PAQJBI5IpnZFKsnBLtKszV0S0e0KYW7NHDraZ64QXp1n4LLaNt8ZBL/G831GWF9a9uszXBr3rfe/EvO+aOtflt7lsTeeAZJfnujymvClx7+YzfoVPXAA3HzBXhgB7gjeALdwNbveyZyoAmG6i1Fh2NVzYUoADEplWbowEkRTDqF3LRLScnwmZwAqtZXWxt3weFwBWoqk4EwCAObeWguAqoCE5wZTc4NN4wcNobhv7TeCNwdiCkSuLRIyuIBvOd1CpOiAsImRmEAQ1ta2UEV1CJGZqKkDs4IPhE7YsdRm2blY7YK3ZGtr6KLuBBiQCUwRKoAZJAJhAHAlMZbVoBMxIHdEj4SRISOzg6uDKgShPJ3BgYhrqSEkUIVWrK7d4VbNUQg3EgoRIuAmaHmhBG07xWeHqQved9nyPltiveA1eP2o/eUSfHGbF9XjC7h0la99Bic3nPBKDXohsX1Rbv7CdLkgntdt/Xo1AMDGgCYiZqbNgY55i80BADBVYEZQwkAojMho2+veiDjk4KoAKrSJkRIxWQtA0BwaKi6zGbSmvrEInYqZO+RqZltCgl+WLIBAS5GIQWRzCVR1yOareXZ05HaVODUDYiBiRWBmJCSJsQ9JJHWcEga0SOxgLEwxUuqcWM3MAQzJycwMTFKIKaVhSKmL4z7GlCTG1ElILIFYkFBYkJyYKQiJIAsSb3q6bQkDMhC5b5S1SgibExGAojuYXj4JzRuS6WAITtuyNUUg1crgZpUQnAhAt7y/jaO0fTeibhZGW1K9gTUnAZDN5Agujmxlg11JHHHDvQ0RwRQ2a20zhIbgDpczBnqBlkENHEB946FfbF6RSMRpg4pQAM0U0GlT0+l10OOXXQfUTc3V3fSiZLRrch8iCRs6sGyOak5i4GQA1hitYROkEBqGQhLJTKvWbC23TuLrm307nz98eKzm59xmdSBUdSRHJFUjugTRqxu4MwHRhkCDaYnMiE5EIYULjdrBDOhqZ3uBikKKxAS+qWNJzYgQUAkpkDg6sYAhCQKBI7Wm1kpgDkwhdeYGrqXWaXlOIaZwgyQsQbV4M5GgUDGEAg1NQ9ep6fN8ZpZyXj2bJDSrqd8BYCtTrsfXb75dpueqSERdFwixqqqVlFihkVZt2Z0Cdwow57IuKyoz8qz1zeuvn386/rt/+z9QWt/+8hetWD7WZV1a9Vy5sPxwf34629PUUorLogqxzn6eH+4O/V+//jZEbsjH3DDnPVEfU/ECGobx4OZVHatFSUJJLe/GURXEkKIgC0lADsjMgRBZUyqn2ZqCI8UIxGaQUgfOxKJqY4qPHz+JbKXrVd2BgYXJrK21LmGxuZT4+vVyej70aWkldikvuayutYKaKiwLLquf5lYMx13ggB+fT6WgOtRaQyBChEDg5Gocg6Mxw7qU73+8//VtN1DC1vaHWBvbBM91QZQfv//pdtc5WvVGlTnXFJI2Y449hefnZ6UpjpS5uOdcTrJa+9jGrhv3+1Kmrg9zzrvYvfpqN09zzX70SUge7s/nJ4y9DLv0+vXNaT4O+77MdW1r7GLsu5ZhPi/IHqKcTjmkKIHbxUoeiQiBatPqMxMl9pbzVFFbQ0AzrK2VWs1BkWpVBzQHVTUEIWqtbv4obZslqipuvdtlMZi5BHGGbugSpTzXGtqru30pj4/nj9/82V97PLTliWFibJkAKDl1rRZC0/zc8iOxpd1tW88UR9y/ckLzM3p2jhRjK1pLMXCXQ7r5leasnjEm5AAG2qxqkBAJFaYfzz/9x/X5GMfX6fDLlu4eV5Ig4vnT3//74/t/uL3d/+a/+C/raeIUWvWaq4PHcafNUdhrUVUFIUZt5oAiDJqhzOgFdfG6opubIgMgN28ASmxMpLUiEaaeYMQewdmtEjiYwprRK0hQVSLx7saAXTpGNjeG6FqYuUKiuCcMmgsxessAFKRzFHfNJU/HokjzWs7LudQGFGIMrpXA2Pzru8NxeYg3SZshqgTGCxeairfaqiAAByIJ3T6kA8KautgN3dbMu4G2pq00d+UIaVyPH9WNhuBM/dC7qEYtT5MoLS23qTGHjl/djndk4fT8eNgPYjzN63jYNa/T88eGDXeh2IzN+4pD30+Pk7Cz9OtUWfPNq1+faF3qY9XiXLjRPOf9/jY13R2+cYUkbYaaLSthxJtD153KI0HT5ePS2KVmW+7uvvMCp8eZQ0COIbDTUBkkpVZ8Od0r5rNOUViE1aPx8DwVBkfucoa8Vujivnv9ePzdIHXcd+zweH/ce1pr7YZwfHwKjiUgthy5RD/veLdMP413v2n5MqPZjmUkvLFB1QzQCYgckRABwIApIEKzaqqIaOBNq3oTwEBJiZnZwEBYtboBMZfWYANWWnMrBAEQ1RTcYuwRvLWKtGnfmIXHvrdWN9QXmLv9Pkqcl3NeJjRdJl1nXecWhLQ1LYURLUhM+36XirZx3E2PD+fTmYhdvesSO5pRv+uIzEFjEADDogjsrZm7hOBMz+fl3/z+09QAuSVi5n7KWxzoHsWWfD6VKqGP0FG5P5fjN30UtBAFwREYAJmFGVueV0dm6UMvJDdvvjnWel5Nzfs4AsVuCHdfp3VdulN5Pj8/fHi32Lw/HFIYht2rbvdm9/rrdcklK8N6fP8TR/vt3/7lf/inP/7w7qdv3766e70r+bHNGYqxy5JVAWszchKkXMuyLCFFUyOgVpuIEXm1IrBzACJkDqomIaEzbLM9N6bIzBfSOJEDbOzpbZzHzG6gtSDQuh5LPiGhGbTaxZTcYeg6FFjzqoYpILNQCuDYWnVzza167YYudulNel3nUymlVfTmfRoArJVVBYZ+V/Ks7ggUOLkpkHIUQN8sjRzBwYIwETlALVUCM7G5mRq4GyhsoR+42aW5xADm5q66aXpaiKF5dUBhcTQ31mbdbrgK8y88IcRtaAabBMDgApwg4ibh2RjcsEErF87GtYm9GG4jApg5IRDSppEDJEej7U5/kvHsdHUXvjCRru3StZ3ZQtiQEC8jxo1DJMIiMXUxdV3fdX0/jGM/jkN/+ZP6fuj7GFOIIYSAREzMQS6efrg5+21MKbpgQkTudmm4LuG+m6X9Zi18kSEZOCATeowJqkXEIEzj+Hz/9PMPP6YgX799K5KOc21LjSjPUxHhtZz6lGqBo1V0qjWnEMTh5jDshxFJ337zTVE8nh4PNoS+YwqtzPfvfjRtiGiACLSWMz59Mq0kgbq9Gu7GXQAcbzuy9vzhp9Pzp8fHB0TsYlxzAbMYwpLP5k2RWm6l1aYaIhi2uq4A0BzJGeEiQZyXFTbRdVMSXtd1rSo3fWm1KKxZ1zWrrimilrWWxjM0ITOr8/PH54e8LtC05CLMqm3JWUu9uU1d4OP9+5xLN4xpvB36Oyd+PJ3K8RhFxn6QIODWHW6t5Xk6T+dnBIwsIYiqI+iQBqs1l8ncUtfd3IxWCwD85V/+2n/8+ZRbVtx9dfPVYYeJUGxDN93cvG2GVhta87mv3upvaxG+ZJhuDDe3ywD/M5JzZb1tt7l25l+0/XBlh1x8ha4hZi/PtD3gtk5fGCQXs+OrIOiFnORfLIINtL1I3r4gRm3EhStn6fqcL3wWulKktmLG0HF36201dQIjdNemWgHYjbwBArlCSNKy15ql6wzg3fsnpxgSmzcFFwStTszqaubqAGgDUXWoTkZkqmBWmikiEUZEBGczZq4ACJaEOgLGLZEImlsgBOLqim6OEAk7wZFJCBkBmo5BgqAaufm01OAgZCHQvicvoBVJuJeG7mTe9CKD2XzammIzQ4I+kpCpWeQ4rdMrjgpYch1ir7W4Y+DowObMzHU9g1ZQ3XyPAEFL9jpj7FwYNsMZddfKMRnaphn6E4Lada+7fEbuVzMpvKLmfiFaXqhgV0DyM/xzpYhd4PKXsr1s2Ns+jS9MMngpn035uJkm+9Xq6KLkRbzkWQJ+ZqV95icBbEjR5Qvg8lDX14uwhZohiYOpKzhtcbDNKjjRNnIwQgetVQIzi7sbUTTvFBt4OE0IR2vWqua11ObNoTZt5g6k5sBYmvrqI4qSB5HdKGNkMmtNczVAcanErdSt565EnAJLEAJAZpYAACGFLkhE74XQqpE3ByIJIXVxJErqVGt1cyFmAg4iQWLXp35I/a4bBo69SIwsEjuWwCEiIm36cDBkIgnA7CRbDAISAhLwxrhhcEcmoIs41BXRwbyhg1u9fIBkcLENuoxnNmoaoiEYEOAlct4RbMuwcHAgBkMnJ9vMhAzc4MJduu5giEAIm9bKFLRtPtdb8wcgbgbWXPGSGLZVhRpYgVZNFRxczWELdiOHLW9UAchBNzcCcNgOxpsu3cw3U9wLvOSGm206A24maNdtyZCcZIugRXNX3eS0TSsROqijV815qSzsnNycwYPXXOdyPpXzU16XVetqbbG2tsbkYK4GhhBErsxQJLxo9Zs5mAEAojdXAQT3QCSXz45MvdT6J1BRTBcca1PcCwnzxuu6fGLEAoAOBgzmKkwSBnaqdVlbra6ATTB0/beBEwcxXavWjUpdVNHJUVB8kE6bTevSisd9anWpJZOkpvkuSN8PqHXku87lbPcSDl6h5KabrDHnus55nQlQC/QhDfFmzs+aFzbJSzHOgySvJ62n28PXS5unB82tnpdKElD03cN08vrh4+pEHENZm2Mg9GWuLcD5Pj9Mf/hPfvn69d3N0/KeJEnqgF1VzYUgZiuBYkhRm2mz2MUQY+wCE8YYu2GQKMPdXUwjWTNvhGbWYtfJslqpTAwAHMK436cYNLcQE6LH2LkWJtCSiZkjazFhRIdEWnPxdreeZ2RGodTJk1XiRMiIWRiP05qtjbej5tXJ81J2w/A4n9KQbLH9rs+1Zs9uDCiuLIKlNAB8eFrgnN/uQm2ZmBLRpw8fdof93W3/219+/eH+j+PdjTZeS7On89DFEESzmmPkqGvu9n1eHuNhPHy1v78/j2GHiKXlcZ+6wNoW5Ti34lCDhKbeRelCmo9nInzKc80qkVqbUxpC5FqquzGH7f1smVJHTZsDIqM6mhqqAzO4gReWhGpaIStaM3cvpW2hEk11A4jUwR1as+ZWELxpa+oAzUzNtWlpzb4YojVzrapAhxAFBZ3GXbo59H/8x3849OOwf1U1e5lDjzVnDoKxA4leGzEjldxmFHRvgBGGX1bvxZW1tDxjGh1QDcBIEERi1dXykVMCScw71dUIYgrcTn7//cfv/21Iu93rX9vwZvFU13q7g+Xx9z/8x39zk3Z/8dv/LOzvNM9anxkLcaLUmSN4JJvakgnJ1USAvLoX9OKnM2gGVTdHZuSEEoGSaUZreIljMG0Nws5IkKMBQqtgqrWgN9SGkMGLmkEaQJKEwcsj5Sevi57vjRGaKgagWImzdnzzG8pnzg8h9Dmv8zKrFlXlGJZcSlkEkRhPy2QcwHCU7vbwdl4e2AtzyDqDQ5Dors0bGFTLahC6vYRoRBKjagZbGCVn8yC5rhVbSCOFLu73Mu5WI2RRa1POTa20xZmatWp+XhfiYJpGHHP1GIJnfSO91L60KTLW5mWdCYVjyI1AdinBPJtWVRgAcKEddIVyVv3Y+WpG95q+ff3L9pR9OcfOQhdJ4/r00HhptcAIHPjjw08xpWLT6XTqAhEPi+Yi8cO8z6fzXff25vZ1xp8I8v2np5ubkEtupVZbm823rw+lwLSezN/E4Y3CM6B0/Y3DftRvbm7+9vj0h0P8qtvDfLy3nEf+6uv+V+vHc71/uJW/+Itf/+aHn87jbSz5/nx8fzceSHpoa4w3G3t26w0cbdPzEyBzcG0bQZSJEbHkouaAKBKYRFtBvHjnV8sNGkmfQmQSYzNzFlTVZrbmZ6EYmImZMaJjqUW1bGcCVzd3kTCw5OXsTfvYA4NIMDNkcPMgFDlMrSC0FGMKsugydj2i4ZjWnMECqD2dFuQQUy+BAlM/8HpeDYnkEARVbWMHE3cECEzn6ckdcivnaWolA6ZmqEqIrcwzUQCYRJCDc6QorMta0XpJS87CWood+o4ACB3QtVWhOHQjIsY0CHXr4wdr9au7u353+3haY4zHeV3NUupefff2Tfpzy+vp6fn06bl1NT+fqAv9fhd7Dyl5od/8zW9//sPvptPpF2/fPt/fawMilw7zcb296ZuX9ax1gseH55ub/XktMYWutbZZ1yIAkrmGBFaWlp+R2A2N3KwxUBoOTlDyigilekxCFE3drKk1Jg4S1Nr1sAWSGBxMTd3JCQxqWWqeWcK6zG4cQuyGLqY0T89996oaICNS6sJY5+dS5tbaJsbLpodx12qb1nPfhcUW9Qxhn2isWt0B1IAphuCmTS2mbmMpArq51dKIkFls61WBiAERzQB5I+qQuW3emu4GoE2NgADBiWLXlVKaawwpphS6oZQ6dunLVmGzjng5sF66360hIbweQ+GFAeTXMfW1Db4MzokuUgWEq9jCLwwtunSvvrU89NLqvPTbflWwOYgwIggzBw4hEFEIQUJIKaWU+mHsx2EYx2EYhmHohyF1Xb9l3scUU2RmRNogNiTasCG+HAuB6HLi882sFWAz033Rhrg7X9yLXlo0vArzAADMod/vv/nlt1qWH//pnyPF/Zub4+P9/dN7Fv36zeF4WtUhMez7FIRbrdiARbphJPSxH4IQMYS+X1sFh7vbuy4EIDwd76NgYEGhrh92h13LFcxM0RyQeDofYwrLw6ks2cBFuO+7YT/Wqnmdsi7TfD6dTyy9gBeFogoG2kCdVMFMTQBUzQGDFLfIwc2rrhsm51ZxXkptEgc0r2UVpuVpOT4eoS7jiCVXRhDkda0BZZlgzauj11JbLi1y1fp8XAikPzCsi+YqQcxL1fMf3x0FZdiPaRy5Vc84TY0Ic2wACDSM+72aWZlPy7IuZzBoZWnrOs+5Net3h93N2I0JGyx5fvN6f2j47mG6uxv/4re/MahmKBKQaTOngIs5y6XMNlEhIl2BnC8bZv9M6bjiRNucHC9l//mqC8SEL9ZdW6m/2BK/SCfhhZX2Bc6DL0DAlSeCV4BpA4muHeP2JC/8lOtA/jMd5YVscrnBtbO/vFx0QCCDZmoS+m8w11xz4LBoFQDhqLiAAaiCKwGSI1rzVvquq9qej2eEA6oJc4jARoBetXXMSABAVc0Jl9JWAweoVSMjItoGHKCje6Rt3VkkDKBJyMAIUJirmoMr+hZw0wAYEUlvh2CtEoB0Ua2poZkRxdJ8bX7oBa115Cp8qroqRhYxzW6CiCjq2vTSKzqCs2MAc5MtEwZZzRGsGQLIRicDdFMbxujmRKTa6nqEfrRWEHpsVuc1Dq+QGUHdlIGaaiCxK15Hn/EWvH5al98QvuyWG5S3wYP+md7j1yL9TMvEK6XoIii6cokQvlALw8u2+SXiudHh/E/KdwPBL5s3voBGbq5f3BXAL9QeRHQzYVHTbTjxuW7B3bx5ARAWAQZEBiQARQBXc1NiQWRVB6DaqqpLCKkbu343pb4ZzqtlxWWtzbxtsCOAqTte7htjGMbY9aHv425I7Npqi9UkhK61sq4512cCI2QOsUshbDs4bm4wEkJgCgimxohWlWKQEEUSMaupqkKzLT9UQuiHoR+HOO7isA/ShU1xhhRFUAJxIBJiQQDePCcvZCJGpKtLNAAKbOKsDSBUJzQEc1CE7Q1ncMVN0QWGBnQROvsVdCZEYyY3J2YE3+LkLpAfwoXOe2G7OroBqJuCGVxoNRfzcrxYYyK4grcXti4AAiiSbBlqYIpX9SRqA6vQGmi7bHFmmxURIME2brfgAEACyBu+gw7k22sAMNygMQYiImB0ByffLJO2XDYHNAQjMQA0ZzS1pqrgBq6OIBzQq7kBWMmllhOaorVlWR4/fHj38PjDu4cPj9PjeT6uuZiTbIEel8gSvS582swWyBGwqUYic2ciM99M6N1RHcy9NkVSui6Eq7t1EGI2MwCj7a0l2uAu2vZ+RkAmCZsZlBAXK9UbkpB5H0cKCAQErE3XuqI3EbHaADyG5OC1tT5IJLxfjt5siKNqq14psAOm2I273ZZZEkU+Hd/FPhFgUY2UHAwISs7rPG20KCYhsqJP7qsIGWIaUhccKf/00++i3+5f3fhzQ6nrNOW1nU71D+9Px+JzaxCx1kYKXeDd3Xh7s/unP74jwHmu5wX+/o8fv5nz7UFSJAUVil3oAEAJw9BRR+qWYuAQUpdqq50lClFSdPA49t1hR6l3x7rMtVQ3rc2JKA0dqHb9uJ6PqUsGjoRNa6llf7Or1QBAYnRijn0uC1gLbGZ1WddPn97vD4Mdn4f9GHsZ98P5uQUGsBLZy3IexrsKNQU1qzVnlF6IWrXb3eBND+Oru6///D/9l/+b/+f/7b/5+O4f+0GWeQEE7mIxW1u72Y8EiqC//uV3D0+nMtd//uGP415qrmPfmVmUEZrWqiHFnOfQd62V54en27evtKyABt7KmificT9Mx/v9GG7H/ZoBPXIAIAL30zSNg6ZRmGxIfZ5ywq6uPrfzeNd99fp2Xp6YJdc19CkEWLIK87wsYA4OtTYw4wi5VYlcqwMgErJRc625lqpFm7urNlXbjOxLVTMzgMvEB8HcSlVzU91g+886eUTsul76lCiQeWAc+8DQ5vPy61/8q7wWZEPwUkqrtRvuQFLLs4hZm/LpIyLHMLS1oNx6eMPmVk6ILn1v3KkyxRGl0/pseQKX0EUFJOLWZmuFhQOu5fF306fvyYd+/03TYZksdPUQ7emf/83zp49v3/ziF7/9l6bc2oyeU58wxNYaeEZnW1XLihIJEdsC+RP6SfPCCOiAccA4OkRkvhjnATgPLgS4vUFu3IgubxSCITawClrASssLmSOaV4OwgC/1/BPoWk5PzOKqwBRSqqUAtVpyrY0x5fksBDnPZV29zgja8qRGdVq8NQBE00FYKKrDzX5cyj2BmzdACELMASm6e4ECACzCQoysraZhECEzDjKGxNUMGG9vbmPYIY/d4fWi0ByGfii1OGEgJgBQ/PT4VGntJIXhFTC1hR4/vP/q9a91Lbt+/+r2sNS8PhwlESQm2T89PlCLRrgWJzfG0W2MQUIstSy1NRds06eO46vbr3WtHL7J9oGsuC+n52NHdNunlPz9u4/ZNe72bm3ErqBxVzWmJyvD7hd7fpssBHjG9cfp4amDGCSm4avm566XPoKQPz09LWsFb1YeD/3rLo6fyjMgTMfHxH5IISy/j/Wh4NTaGPobb1atnBS+/au/+3/8d//n//w//Z+X0yOuz9ODW7Rf/+a3zw+nIQ7PT+9vXg/M/LIKmplI2Ewlm+nlO9OUmKu1ZmoOgYWIS13MjEViDK02DjyQbM2RcHQ3BytF3QFZEAiRUuiWvBBtQLDZNplRDTEgk9ZW3RCpjynX1VC7fjD1WosIqcJynrQWDmKt5DoxQ8ultlo0L8u8PD/v928CM0K3/+rmfH5Us1KsG/Zdv3eHIEjEuRRgLUVbLVpXYDJzYDzNp4enTyEN4GzAabg5dL3qUsqEhHXO7ex+fOyCyNC6joLEus5kNBftmJa2cuAhdSkMSLLbjSGk+ZyX5QQhfHj/2C3fHG6/jakf715Va0QBPdQZDru7m5s367fTdDrOD/cff/rjfti5aX94BQbPx4f9btglOc7LB81zUTnVTjq5vXk6nr/6xavnf15BGBB3Q6fNmDAEamsjxMDsaGa1rpsJeYnxgA1zVUAUEdwyskLYTsSmnrW4YeqCWwM3MyWg1pSZzI2AAoshMbGDSwohJa2q6hIk51zqXE/ntUjk+Hj/Q+puTZ1EM0gMfeQ++9O6zKGL6nCazmPshz4GAde4lnWdjyn2MaWcKwYyta7rGUlbReJaWwxKBMt0BGjIaE2JmIRMVdU3AxCwLegEtvGgmyFBCKE1R6IuijUlli6EpsboVqpBRoflPL90Ni9tDQGoGV75QXhpY2AbD27HUP/cMfjWAdPFDN6vj4bbHYnI/BLpcgWILutuG3Hjhg0huDkxIVwkHSJMhCISYiSilFJMcUOC+r4fx3Hc7/thHHZj3/cpdTGlEGOMkTbiUWC4CnY2qOjyki4dFr1QOXBD07ZE9EvPjV+8vkt/5xcj0g1lcAwJVR04crp//GRNY7f78HSmguLMrnXVkQNGPIzjnCcrtWNMKRERMZKb1xa6XdcP474nCq0BUUTiJU9dt0fH2PUApbT88eM7RgpC83luTVVdWOzs4N7W1RE4dNNpmuZTywUACTxyEq6nqaBrqauaD6l3gKLmTaOwgbmpSACH2rRWj4HMG1PsYwLgnHMF6JC0tfn8HAU15zqtTJynqtt52Fd0fszKPOSmgVGtAvq8rqWWUjSmdJpnb0tA6tyx6DzXZsaAdTnHmFKKOe663U0uZ6CzhMShRyFhoS6GLt7eHkxhmU7mOuY6nZ7mtX44PqU5DCx3d6/md++GJP/ir/8iCs3rHIKkmMy8lAxIzIJEbhdjnWuP7VdBmH/B1tgWwgYXXdrzy8/wwuK4LAu/gjPb5av5zGcpz8uCurKEXtrw619X2SVegadrcPYLUw9eKEtX+ccLkHUhiMClcOGKxdKGCGw8Ile9rFdwBESOnPa5iNqCTCgETuBooGrVoCGDEbiptsXAAoeHTx8NJISoVsyUHMyU0MBdEAuA+nZcRUMEBlATQgQQosXAwck8IIgDmPZMHVJAjwxI1MyZt7cPi5mZCyH4Z2MXDmQK2V0Bm7oryiYMLqBgr/YhiQ8Bq9pUHMyJOWgzAHNjBANzBwFmwIAIaimxqpHXyOCqqhWYnL2qcRebOrICuYPlWkYCrTOEhd2gqSkg99T1hmpuuJEgJDjDNmq6Uoe2Dv4znAjXD8w/f/iftWPXrLTrhop/WkZXAtGFbPKZd4ZX9Ol612tdvBhRX/fZL/b3zxfxaiYNW7Cj4xeqt0tLD6bGzOYGm6HM9pYCIDjDhUFj7lUbORCHq6cNAhGKbF9NLNLyCghq1lqz2gAAiYravJZ5XUvVjcJn7oguzOQgSOPQRaHINPYpBAJ3Yu4loDRiznklawDeu4euRwosRAhMoOZMhIhEyEjMGAIzG6mRcEpd6HqQsLkruzUh6bo4DmM/7lPXQ4xMEYDc3Ft1YkVnJANwMzdDJNfGtCXZG7pdaDub9im8vOPklw3FLpxEBHNza+AK1jZ0zwnB8aLIvvgWbVlzG/akl73E/JJbv6045OuesLHV3LVC2/ys1QkAnFhcHYm8GpCAAZICOCADM3gDI1DaMtE2EAfcXBtqdlPwzWAILhvhtssYeQMQBeSL8nBTKV4CHwCZEUyEL3vLRs8nNoSNnGS+6TTdwYkZbItZUTUvtWqtCg7oSiao3gozWat1XdZ5Ws/T8/H87qf333+4//BwPhU/tkaBWW1r6BAINqf6jbpnRgbuRgjuLkiIBNZAHQGJQETMjIjdXd0M0S8Zai9QkVwgQOFwQVsJzbYDfmTCzZ/DsDr4ZqPDEhpSDAFAmzu6t80cqRkTEoDVRkwAYsaqVdAR4Hk65pxT3BPicp611CTdPE1fffftzeEwDMP88CF1oZZ53N24VYUzhhFc13XyUoMIM5R1FYe1rBJYQicx1HkhaiWv5+V5ONyO6evT+VxrOz+dz+s63ux/ON7/06fnu8N3EdfnaTWDdV0PXfzmF28+PH/69rvXz5+O4N5clgbTUm96MjVlzLXuu7i0lZB3/S7T2u9vggcOhKCp7yUEYgESpyDdDqTj1KOhmzGRS8jLusFs1rSVbGb97rBM07Drnz49IEDsUmsrAqRAaM6AAigxtLw+z9N8WlO3Cym2zQTBLIbO2nNecxr6+3fPT6fpTbohaG1di7XQ90C9uuSsqnY3dLXW+w+//2/e/30vw2E3kCigzedlXXV12PU4Gnw4PfexV5IuQhSc58qBHHSenlBIpM9lAndOMuw7NeMGdZ7ztMMYAEPOlYxXNRmHea63hzvBj3laF7RhCA/H+6/u3oSIyIoC87wYGsdAMdpiWsv5aTar464LKUrV83lSZWauzRF4c543AwCCZiFxa+qgF8knUs7ZDcxBzVW1qauaGbi7maqaIai66tb6WtWml8UIRJ8PRjGmRMIYyH2ZFhnGUvLvv//nP/vlX3Qjzfk83t5pcc05xtHgsiNwYJ2fxL3ZyPLK84P33zr2aEcR9+YUO7WuNUIZsFUrlRMJ47rMsn8jIuX4QWIEnfPTz/OHnyiE8Rd/dl6sWru52S/3P3z49P06nX/1t/95uPllNm3zp156TkFtsaLc37V5Bo0OMXEH5Xl9/tF1CsIVgIbXir1Lj9Ihbed7Q94QfAJCsLZFoZj5dhuwAi17yd6KMGPXQ24oQloAzDzX81Nj1FylG+X2u3qepRdEmMtCRCQkpl5bmR7z+Zi6nUpKQ6zLY6kZrLZi3kqtmSj0qSPs56WFgEueSaAfhzTczNMZKa3LzK5qjrqmNCCF6kAUQuiZBJ274YZQSRS0emCQfikYqLX7ewVwcWizNTPE1O85Hp4nftPdlvr8+PDeSWuesPGrw3Cz2wu2ZvbT089rWVLiss4MUqztdkMFXKb29e3f/tm3v/3p5z9WP3bcQsLpPKGBj6hNVRv6I+SP9fh+L0O6HVpo65wPMowp/fz8qE5euce7E3yqesRSDjLIMHz6+D7Aa8P3pIZ2dCrT+YQwvH7zq4f1Oeu6u7sVonXJHX/L8DpFH3BXW5nm9ylQCOOa1xjLsOPjw0/9IZjJdHoUZlATCI/P/xZL+u3ffdXv33/4/ndv3r758DS7h76//Wn5cHPzCyAFbaVdm2QkJkAEZq61mSm4JolEUNZ1Y0TjlqnsetErgbfWCLE13ZjIpVUMFxcVYHI3YWEiMC2tAG6sJAooW/O89SpgLsyt5US0tiwiu/1NjOLmuBRGLhXyuk6nJ+FBy5rL2byh13WaUhxe9a/C/m0upYUuidz//H3J07i7JSAAzHWN/S67QVUDipKC12ruDoyuZdXs1tRLbqra1Nzq+lQkoKOje2kIFCRqXmJ/eH03gC6ulkgQoSNk0C4NRMHATuv5bni7FlWmm+9+tcxLv7vJeVnN1rxO87qTt5Li4e4AIJudcG2YxhHTKx5uD9/++ud//N388LFWvb29YYnn+awT3B5evb17lfo9IpfyNAxdimpU+th5O+ZqZdWmQMQhxLU6OIgEApyX2VH7GJf52KWeObRlZSEkiLFTXUsuLESoDqjaEMhA3AwRVBuzCItIBLDWtGBzcxPsuqFVtbkiSjfu+27o+6WWUi2XWpB3SOBmAB5E87qGMDY1cMYQVFUAHT2vC4vX2lLXR8BSy7quycC1SAwEUMvUkIlIS9FqSKBqqR9qLa0WNyw1E6GZxxSIuLXWWnM1MGcmFgRiB0CO5Jv9A7ib1hokEIpwBAQgCMz4Ek2PF3cLAjC4lOjn/pkuMrRLRs+lJaWXGfdLq+3udE3/3ToncwOAzQ3TNjgGAAB5W1O2ZbQAAlw4/LiRiSimwMwppdhtXtVd3/XDOI673TiO4zj245j6IXUphchblpkws2xT5Qtk5eAAqgobcMu8uR4h4kY0AnjR+VzlGVdwAK+qJbzM5L8EFJDAg4TDYb/WswC+efP6e32/725x6QVpHIujnp4nQ8hlikRp17Witeq4T2MXEgNL6ro9MQS1VpdarK7nAmCg2ASRTs8PLMhBrE4GuOQ8xIG8sTZXZEZvJtBqraXl3NQdmeh0XpdS81qmUoEDq7eC7r64Ilozs+biWE0djEjAqRREAhH2DcoGnfNqkLTf397ctedjqa3b9U/niZBcWyurkImgVVcrpeQQA7qbtpKzBCnFz0uVMMQg65wRYC55ngttYjpCAhv7MYWyTIB05seHKDgM3eLPqpD6DiRoayjirWmpCA5IIcQxdbv9jlJ698efPzw8rWbD4QYAYkgxxHUu+ze7WorVGoK4owTBy5c0bXHRxLzZY+FL6/65SUazTXxo1wL4fJOLqdb2e79gMBc1BbwQLsC3kMIX5c7VytjdN03zhVJndqUbbeSfbfHBVRO3yUfw4v1xNfS64ldw5ZvAC9RwYTRtNXuJT8ILfAC2PSSFHUBPmM0qEaIhOLkqUWBOCkQUwNVaCymFFJ/u53mF1wlzbYBYTYcQc9YxxqYNWZoqI1qzXLEab2M4JDI1R6wOg0MSIjJnDORJnM3BLWAQoUbMAAEtIBo0IUwUFnVEL+rRAQyIYGmKxEDY1EV4NYcKaQUaJTAOsc0VDIQAGS0xNPNmxnQNsnRAwCgcE5WskRiAaxO1EDt2MwnSwN1NQtq8krSAtdLqzNBQmxDV0kg6kqioaMrMqEC0RTlvxcPbvndhD131hn8K1Wxlc9UuXv66qswuIKR/SbK8FiAAODh9cfGyEfkLl+mFx/nCTXqpXryqIb+g1221R4gO9qdhA5t1/2biBkysqhdE1Q3ACXmrva3qVC9Ku+03hATiptSsqoG6q7qpaillXebpPJ2n87yupS6tPi+5ti22XNRULQ8xjPvhsAuHUVIIwpyLMmHHBE4kklAIqzCSA3OAYBVRFYlkE4+LoAgiwua3lEQCE1lRLCFJ6gMKqGtpbq0SYQwhpD6k3jk0IGhgWhibM7sIkbqyKSAxMysUQGRmRWBRbkgihOTMjkjMgIZOCAmI/LLqN5zQ0BVNHdCakm8J7oZECArESAjIeHmTAcmd6Lq0HaGBNtAKTozqKI6M0MAatuZm6Khqm0/ZJogE38AfvnxROwEpADgamgLQFhIFvn36DoBuhtZc6xZRd2EsOaAbmJuBkzgRqiATGLKwGwFsOxMBuCNxDIAbBw3UHBDMDN0JgQCbuevFCdLdNt95h+a6ZXRDM9TqWQtBBmtEblrNbM7l4el8PJ6mWrMaBtacHVDdCUHd3cyE7PLvBne48oEc/QL9b9AVXE8p7ighIIC5l9qIi8hlhHyBiljIHIRkOwQQbWsjbgDexYocnUkYxU2dzQiEWLU5+MYI23ZiYb6EyJkKSmAGdyKPIZnpssyEsgUqF21qpuDmMN7cpb4HbcJExEw7aGZmXbdHg9ZUMCkqBZTkgMGLokOMcYPMa8u7oSMLcznfvbk9vj8+fXhOsdeGsb99OK1//7tPb+6+HmPM82knYkgfy/lf/91fYjf8h3+8/+7rb4Z+OD/eO9ru9rDbD0vO+7t9lzowNMPI4z7dWFn6ITFILUitUZzH/SiSWEJ/uEv7fdq/5u6gFbRVlMAAeS1p2OV5zqVwCK01vES5EDGYlmmZgckKqKo2ZwA0R1cicy3WqnsO0Ze6eOxFk681xC7wFGNXivfdXd/Pdzevfn74vrnVosDetBJzlEgIrSG4ItU2rT5oDP1aK5hy5OKWAlGgwyFRGYj7x7lyK2NHw7CLDIy0mrPR8/k5RSDC0zxByV0/IFEMu9ogLxN3few6VeIQai0xdE/P89D3H3wtufUd3Qy71rICipC5gVBRRfd2NEJB8lb1dISiMFfbDx1V8o0MhEFCrGamlYmauTpW2wwdwEw3bN5AVa1UvSytdv1Btba2aQ+aqjmaWbsEajjgNWP4+pWQQmJgzVWZ+vEOpH+YSsRyTjXNR0lS61JrHcbbuhZhRhKA4vXJytEcxte/LudPSD2Nb2pbsE0GOXaHVlvRpRvfNoWWV0SJqSvrypIcaJlPSVBCmd59nx/fE4n043lup6mMu/HTH/+xnL4f9v2v/pP/FaRDrqqnaTeO2lpds1nF9Kbp3hyszt6e6voRbQICiR3EHjF53Dl15oSA6GbaCA2E3dzBGJFIra5WMyBy6DalLjBBGoGD1oagSBWwNG9lfgIQDMnUadibcWvkoctW0FuePETviUgEkCikuLuteU3jDrWAGxg6C4oxt92wA+BaW60Th9h1vaHsDjfV9JRXVzQrjlZrQYlBooNqMzckSmO/V0IQAfJ5Xpi5291YN1IaapvneRZcMJBpW+tJuq4ZrSVrDCZhbhqlj+OrnqjYejP28/x0Wp5rLSGlxbVG0cBmHYF1YX837qf1HDTj8umPv3uvRJI4L2cyxDJDK0pAwIf92/unHwR8eX6uvixRfv3mu3wWpP5UWnMYUjfn5eH4c+i7KZ/BMa+z2yP4bPlhP9x5W0PPRWv2wgAP5Yn23Qgjxy6fns5L67obLRWoLWWZ6txhCSWRYVmWsj4ffT3NTwF2a15aeRz6V0OfkhAzPN2/R4Z8PoPz5PVhWZvSd/Tdt18PeXpXgaHWvJTr+coA0JoauKqyCDo5gJoCIjMhAxKpKXPcpm9mDYHU1AG3g1TXdebmiGtZ+zRwkFwrI7VWUuyJyBGbq4MLcalViGtrm5uLoFEQt5b6gAzVvVZfVk3M53k5T/cpRVJ3IcbOahvDeLu/W+c2TecPx+cUSafnk1ZvLcR+t3/lNjfVQEwiCFS0AeCSM7mZmaoTMTiSwHffvR3+6Q9bJKJVRW+qZrW1lvuuU0UwY9RD2At7a8pEfddHIVDtu3QYds1WCby7uxUSIQOwtSzSiVrhlA67PuccgGtb3epTW1IaQ+oA+7h7W7Q5B5e2TE+vf/mb7jA+vf/xw8d3Mfa73a4e2/n5ibSclue7N39GyZfzkQC9+i6GQwrH0/nxuDDLeV43p2pEWPISOY3DoXlTNyKZ5/Mw3MXAzeq6NmFOYWhS1au2tiEWxOhqEgK4tWbE6I5NTSgwM6Kpt6oOpRKKqroWh7nmVZhDTLvh7bzMpWQiWepZOLFL6obj8RSDNGtb6yDEzugsMQS1Uksz1ZiCG1StCFqzooETqNaYRCQQwppXEYlRAGAcdufTsVRHdEdrrSI5OvRd7w6lFBEGcjR3QG0ZzMmBkZSx1cYMISZGUccuxtSlUsvnfgQv7ioEYG4vZinmTi+kCdwS6x0ut9maVdzYTZdwZ7y2q7CBS765AV2EOUSEF2ecrcnaxr9IyLwZAeBmQR1T7LuLD9E4jLvdOA7DsNv149h3Q0yp63uJMUggIha5tm1IRNcX6VezpAv0g5vqbIs8uwhGrjYNF0I6wvX3BpvHzWc53oXyAYBbAh2bx8gxpWG8fzyHyCHGOS8sRIAcpNQ4TdM4DLvDXtCsAKeRA3WBUWvoutTtzvPpNC9ZaxeGFFJui7tVPYMTuTZtdzd/FsObp3n16IjBpveubZ7OSI5My7quy4rItboaAlI1fzyeS7Ns0LxEgF5YTasZgjc1QgT0jeSIRg5WzcAZaovo4Npa6SQ8ZwhhcPfpfBbx54e5risJulp0beid9K7uCuKkZa1VWSAFWYveP8xqdncnT8fjOCRwbq1o0/2u89rEoO8iuZlpqVXEQCxXmD4+c4hd6pmgLhMhSIymDQA+vP8oEBCNmKmPEtOuE7GY16Xr+t1hfyp6K5GRz+czkccQalUzrxUAUSSEmIw5hOBIBI6EiEiA9oWDEbxwLz5H+130QpdrHV7a8Stcc+n4X7ps+JND1ufCuUh//oQncrner8DTZn27cVKuDLwXpol/ARtc6/Jim3R5zI1MsoWXfwEWIIAjOQCT9Bxu5vmZwCRFnQEcW20baQHgkiedc+67Ic/r8bgAhdLqulY36gMLcR94syPL1b05sxNSbWbq5NDRxi0xR2IicgsX11gMRGCGBClIJ6xm7hoZxhCqb37SDu6BqSEAQatlCMIScNOwGCIDM1UzBqhqS3ZMPPQhtVobXLzzARiAzInJEKNDIugFmKGqciAAt9bQSAgEUNsKsde29v3oaObWDMDJ21rWp/GuWputZWLhbjTaBFtGHBzdcauSDQS8SsTcr7DQtSwuhJ6tl76Ig+AzVPkntfTFpQsn8guM+ppe/AXQhPDZ3+1iX/SiP7wSQv1yzRcbmMPnp/+i2i4vlPCiK9o2eWIzu5Lhri/O3N1JAACRCBA27ujGPN3oSE2trKWWvM6n5+djzut5Op/m8/Pp/HyeT8t6/3xuhkE4CRO6EKIbM3RdiDFqtQwNakaVDpVjRDBAjiKJsQ9hyRWmspgrAyCDu9dGxClIEDZ0Zk5BBAAdh9CnFImjOa651ooAuNuNMQ7EXXOpFcgU0SOrE7oHYGJgQHYAQfQtYYsIiBERiLdP1sDBDIm2PDICAPSLnBkJ0DcZ6FWK5r55V7ihKjg6NHJ2BSTZEnUcAZwdcUsxu5JhHbSCOlhFSY4MaOZGiBs3B6x5a05OTJcItqZI4tyAxMk2E05kdmO05hXBDAidZGOZuZm7uzUAAzW/uBg52Ra9vtkVAZC5GTG4G9AVZxJEYCB2ZAcCR9iYaOa+5ftsQUsXriZfChtRzbTqulYzaAaleVOyWgmaaVFv4FrKnEvObhUgjf1rUHoidfdsuqgiEpmHYO7GvBlo40V+B0QbAwuRtq0ZEbGqCouZMiEgWFN3b7XRNRnwAhVtroaXswHAhfx1mRJcPyUEB1W/MKdwc75mtI1w7dRczW07RV2icjAgkZkFCeY+LYtVj5FV16qltRqC5LK8evPN7vV3sZN1mbsulTIDOqASGrt4KW05d3EPGOe8IkXNKg777oaFSlkl8Kubu3Kc/vDjj3GI509zPq/jbpQUw05ODR5/nu4/5P/Fv/7Vu8cfFeswBpf4aOWT6v/hv/7fv/q//OW//f/8v609/dlv/5ysffjx45Tw1WEsjYbEWQ2qCnPT2nfBqs6PR2/U3fQGssyLqh9evULirh8pdiiJQnI3LerogUVQshUi7GJqpZopIoQUwPT88Cl2PQuTsBo5EAuzkJvmpeSpuGOK49P9h2/673reMUArfnd3tw6npdjjTw+RYxdBgjasTpT6bsletTY3RQT34/MsQaIDA5SlZi3AWHKJozhikvg8rade0a3rACCP+7io5blUgxRt6MJS1/3NkDpe1iYoOStTYeKUBCyTt8ihT3Q8m2vd78d8XlXPv/7Lb/75/gjkSyl9Hx2UWI7r2qdw2PWb0/xacooYCNqqqG6Ky0nlK0wBuzGe1qM2uqhOzQDAXNURGzE6eQMANcjaNn+iplZrM/O6RYkCql3YRaqm5o5oZmoGhHCx54TtTL6tgnG384rF1368Y6TS2vH87M/rt3e/ZAkhktaZXa1VAKQwIJHlCfKky0SQAFeAAsM3GHd+XgRBc/XQWplDfGXleZ2OYXwVJJQ8Q4M43p2X3HUsrdSn9+X8XltL401dtdXzzXBz/9OPel5+9Vd/293cmofl4ccgIWKCbG6Lc+Jw51ls+uN0/gMTQl1EBARjPxr22SOGHULY8EK0GRGjRAD3lrWZkIBW1RVxA3cEKW6JQW7iWr0xm7b1DHV2rW6otKMwhpTKfNZm2opjisN+uBnbeor9Qct5XZ4hn009hNRaQUZzt7xKSMWgzOuyls2Fblkrhi704eu3Xzk4CZ9Pp1zrOhfQyqxqlUkQEJBZYnACTrJ7zeMdWiYmgyZRSPpxfIUdVWjcDSZAbrELxaoSQqBAbK0el6WKNOfzVJHs+Xha1Z5qY/AuSezHVutc5hLLQW6I9292r9t0Pp8fj6cH2ziHXVerHfZjH/r747uxH2AKXlOQTi2saOjQp2g0rtp+vD83CGs+fXv7y9qmVk3x/Hz6sNv/a6G7/DgnGkp9DrhL/DbJMGP9sJ6r6v7mjSpM5Qy2xG635HJzs18b3d692u/4d7//d6qrMWSrY0+n8w9n2zAQ9v1hAQ5pZOD9/luw0zSfjNistOZpN3CUwpaGXhqfHn642R90d/eHH+5ZYj+MlzOXAwlvSRCBGJHdq6ulGNWaOYgEMDBrbl5bERYkiRJLW4VjbTmIEEstM2CMMTUzbYqAphZCVGtMRCgGWOtKiIFFmM2VBKABKuRcAkIXWSIT0f3pBAZPz/fHT/cd7W6Gw/3HH7WWbtiPQbTWDx/flyWb4n7X3d3G5XScVsJu6Pv9/dPPh924P9w6okAAgBB9Ws7YJkDM81I2EL/qfF61tK9f7X/4eDSS0EktrbZG6iIcxrQPwynXX377i1e7OC+fOjPu03F+fnWzlyDNdSqTiLA6QTArtawdoc4AoaMES56SAGMIXRe6rswLqLU8eVlqfrc+vS9V+2H31eubBeK8ltIfvv5N16bn+fQoiV+96T+dHr/71df/4R/efZQPu5v+1es3nz4+E0Y5ZqgtdHF9PjVQdSu1bKcaaLVidYdhPxxPD2O/5xiUtHm55PWkXcK+1Eq2WKso1Nom362ovJnaNlPXSsTF67gfdsPO3Yp6XjNgk06EWVsxh/NUOturs7k0lf3NG1l5nicsa/Mau24b/KtBKTkKmKmZMaH7FrVsQGRq1fzu9lZr1VZEeFkKuNdatTQiNLWcCwDUVlPXUZBcVnDdeh41b1qZhAhQMIWhttUdVMGsobATkqEIu2rWVYSrQ64NmGIKn1vaTbew5Zy9jMA3rYI5Alx8ey7Db0ciR9hAostNATfS/QYDvbSp29QSEfjSk1yIPFuKGzNtifVBJKQQQui61HVdP/TDMO7Gcbffj+M47sa+H/qhT10fYgwhShDizamakOgiY7u2PQ7ojmiKcOENvtAv8DN1yK//VLz829wcYDNsohdu0Zc+I3h1iwUgAiSk1AHS9HyCsvq00JzFW4xJTW/S7u3uNnWpITDBeBOQWa0hYSsUANv0hNiQKVAcusO8HmMf9+M+gDw83U9zbtZ+f/p7b4W4N6esjazq+ai5QQhLXXNRSX0IoVR7OK4cQoyh4eAMUazOc3EruTKhW4mRqwKREUHfJTUzQwIzaAG51qrknLo51yFKRvvq66+9zqEL63I2sNxaTxIlbFSB2tS8agN1V6u5mbg01/un9Vzo7Zu71iqjt+xVl2nKgDBVWJYyxJiiDX2Moe6GSOw5FwCPzE5wzquFEPpOYtS51KWCyM3dDszzks/HuT09O+nd6ztb17W4ux/evrrZDxKpS6JWWjOtxbS5q6sBQWBJw9ClDn3gGC/h8Zug4Eu22LV04GIHdKX0vLA//pRAhy++VlejdsStehxgE5JsdjlwQSzhKmeEK/nkM2j0Qj+5ltfLL3GTeV7IIJc7XUhIF4LT1ebEXxRtCHDFf+CKfTkgkgzE4lrQ1Vsjs8CEpozGlOY6gTdiDBGXx2U+r5F7li1kxcVMCDCQOk2lObEIaqtAF37NgIDacvMgwuYOHgk2U14gcABiAtcglAICsdeWiCUAIzUTU0vuWIqSBEJEAkYFZfQYpKjVpoTaJR4CbSk3CBAiheAFDYzaJb3OEN23GbVaJBuTMFGuuYtihIaO5MyMjEWXwQcCBickCsLlnF16plDXJ9OFwMEdUUiYEFWBUcANzBgF0GmjUV4EZS8F5S81gRfI7wWTxC9wnquj1fU+L/ZGF2bY52sAwOCLG2/Ms62Nve5PAO74QlOCL9lFnyHNFyzz5fIXjwmEuBkV46YP3KxWtuyzbYu9xgL6VY+L25YJCBdQwDZjeDertS3r+vh0vH94eHx4Oi/rus7PD8/3x+nT0/m0rlWBkVJgIYqMakYsjFKzo1lE7W4GBopCIRILEwszM3qr5swWhKpOa7EG6Cw9B+EYiQiAKYgk4UDB2hoChy44Qi0NGpoCS3TunJM5NnXQRgFYQJhTkNh10o0cIrEQsbAQIRIRMyASAAISXb7p8EqJvXwPqoErICPoZ4B4A+3Mr/8raEM1QndshODQmBkMCRFIgGXzMXJTtM1qTje3dTAgYqerkNYdWvOmWisxqilJvFAdN5sr1o0UhhLAHMhBFQARDNS2yQq6k6rDVXW2xVxcqTjgFzP3KzZtbgoIaHVTcwMCMENIQGKOUJu2sr1m38xhL7AkEyFs5B6EZmZqVdVc15JzWavCmpu1hlBzXRRgXpa8zmVZ87xaVmbp0rgfASi0p8mRitpK4gC5VQVoqgjILxs4Ign7RnTWzRHLkQjIQxBiQQ7cJaLPMyH4EirakHskAncEZEIDBUTaUuABEUEvGzWi8/YE5opAmxsMACAxAruWzQAViYDMUZVizeu6FpGUUso1r/OCZgB6c9N/+91rji1XLbV2KagZISGywgbMV8Cg1IB97G5F1driBsDeagZv7DQdT0/vnsd4+O5X3316/0/jzY5YSinLcT4e8zSt+7vXu5G7GZEA1FotPfLz/f1/99/+t/+7/+3/+l/91Tf//t//w//9//v7vh9ffUUJFBugwlraCjh2HQKttWKU2lQQwVTRglDz1l3oZoBIjhSH0YzUrJWKoNrauk5lLU7cWjPwppq62JaWi3b9WL3l+bxNcppaM62qLFxcW7NlmoPEeHc4PjzXrOON9+NQ2uIMIpi6rjpE4Xl+hNIC9jFKqRXNWFxda65YzQFqrePQRQo5Zze/uRmcPZ9WrxSTlMV2KR6fl9t9t+/HskyKhE7e8NPHp24Xy5K1onDEqlGCN6gtn7KmfXIirZmDtzIFuZlPc6uNnBFy4obV+12/rvNh7CR0XokNp2MVoZjEHbUWdUxdVHOvFRCPD+d+lH6kPqVTadoM3Ju2DdNnAPCNmgeEWM2rNmtWa2tVzaG2tuG0WyO07UZmhtchMW++YnrJsuHtVA0AAKW06PHu8BWGdDo/TudpP9xKv0tDKmuLiQnBhR089L1eAFP1hshJuhu1ajHx7qumTmxtXUIa3GXT6KzzJzdHbIBC5oa4LBMRIWiZnp5/+sFVu92rkMJ8/6S1PTzdM4bb775LhzetPeX1mU2FD2qVOBoEprv1+VTmR20f+8PB0dSbAZGTeqpNPEYhQa/ozkGIgjWzZoBA3HF03yB5Z4oRKJqRmZlVcEYkRmqeATNgrW2NjEh9w640WB+O8+PH88MHQs3zalWHN3f9eAAIqd9JGCgCtBUDBog4dK1pMy9VwaHVZgqGoC27h5v9NxKptjafnwG95gwSwE1CcEd35S6lMLSmYNyNO+r23u2Lr1bOCG1ezl3qgOrHn+5ZRAt2t3ehD6UWrSQphBAjj1NbsNY+BQPrIxFCl2KtCE3VFiUP0oSpTk83PQ+H3pYqcHht3aM/8W5Y9JxNkwSDZFnWknJZGF5BaWBETI39/fR+ac20RAmhO7D7z09/uA230qAeP+y4/zh9iKF/i9/gspZst8Or6fwEPMbOAc73p8djO58wJOy6yraczeBmuPFay3JU8kMfltPH9VxqhgrgIQ7jL0o+Bi53h5v7p/elnLrd0HMf/K1Db1MizsfTu+J26Hb9MOSqdZ6CIGP86tXNenqwdg5d36d9EJHuOnIlUFVrGkScvLQ5cmRmNc2tsYjXykTMDAgkQdXRdWozgjtUNQdoaNbHzoGQpbYCZMJBtSFR4FBqJjRE7LvezRjBvPVdZ67LMrkqp9TtBmee53VZznVVV5unIxFx5J/f/WEIsjscgOLD/YeHj+9NdezTuDs8nZ9/fHpvqt3hoC3nBQ/jjpyWZRGRgnp+/IlpDbIvpZKwgwKhI+bSzNquo//Zv/gV/Lvff/9QzAAAYuKEeLPbLdOKkL/dDfvEJU8DigRotbw+3DFCihG8xiAphkBUy9r1vbfqqnc3h9O6Ysv7oUNnRwro4o2jOIAbeMC0G2s563q8f/qHf/r3T19/91c3r3/RsOXSWJJIms9TCj4MN4/3p9e7V4jkTvNa+7FDpHE3DLf7eTrnUk/nybUxmJuFEJoqoalmhGEc9lpKmwNh1Jw5bDYI0AyHfjydFkRiZHUFMAJ2MzMjIiG+qqu85uWkhaUb928iLTnPW2uEaCyho868lXoedrfEUuYFkGNMta13+5tlqaYrIxNFI1UtRI6utVlea9ePIYbNhWFzkRFilrQsMyGaeW01dVFVAZCECag1T30XADl0bqYtuzZtVc20VhauawFzRGitdX1njdTBN62ybdMszHn95qtfrGVep2WZ4E/+247+6GZwJb9/7jzhKo0BcCI0ty3NGnELXL4k3hO+qHRgc4Z+6cdxg4jcRQgBtzO/hBCCxBi7voup67tuHIdhHIaL0mw3jGPX913fpdRJCJs6kJmJeWtdaLNa2JRiL43aNtC7OCttHfllMPgFVLQxAsjANhncFVDaXi3BFTzYuFYbEnbFu5yJLHY8HqQfbm5v60z5tDr6/u2bw82grZ3uz4HdyW+/+RasLaczOCAnSgEGN0QzT8S7OK6lNseb/nX1cpyWeT6FGOPYRfCSazcOguHT/SPpZgDDkvj++SgCr2/f/OP75f3xuVnDGFsrrj4MSRAjmalhVWJnJgzMItXanGskktJiDMu6xiApCCG48eYBighrXg3TKPT4eHTXmgt1IfWDl0ZBGK25rbk0LSEO0zJtw41c6rSW51yrdM0QrQ273f3TXIueG8zFz0/z0EU8rUlo6HMkHsMqBLELBBAFYx8lpZIbD92rt79Iu8N+f/vx3Q8ErYEOu4jGOS+Oyg5jiFUbIs2n081XI7qtyxKE3XVdc54n1WpaWajvOtVMtmdGIgCJSIwv3IrP4ptLwV/9p6990UtNXZbIy2q5rgj/gsaDfwr4IMDFTWTzcfkChv3cqOMX5BHfCg6/zDD/TFPZhMtOG91vq8UvWv0vkYHPSjU3J6jVXd0xKrpq01ZrKewKFNS3FCVTdXVrACnCT58eCjKSc6AtO4kJ+iGeTospgLnRhTrY1BxIANwsgVUHAotI6gCmUcgBmRm2GETA2qwyHfouRLHWQsfCbA7naRIMyFgAkzA6NYcQuNWmtTJT1wdBd3MRMEBVLwaDo7iBORFvciwGD44VEBQS+8CeEqtVVxeiKNJMY+DScvBImCIM6q7VEhN7UGvOaES1zlrPicANUYKrgjNe1be66YYuLDJHpM8IzWcbrOvPF2hoq4zLz/6iGbxwfRC+wJPgql387C66VcIXSNCFOvQCDMEXWOZLdX0BC71c+JLXdr32M5C07XFmG2gBm4j4ymNCv7A2NrWhu5sjAAU3ByKzBhu5DdERcqvP5/OPP7/7/g8/vP/0kAGqNlumac3nZc2lqHsD0HZJGZgWOS/5vKxvdv0v39zc3Yxvbvrb293d7RhjCCE6MxOhtlYtrgXXta2lmVcEBAxEXQwhYEwiISBe8t6idMwEhG4bwQUZKYYYYgwxADOJxJiGLqUUUt93XSexo5BIwmcaImyEEXLfen+/GEsT4eZisOG2AL7lqLr6y17xEoBH7rjFF+tF52XNwGCTAysCCiBA7ADNN891cDdDc9AKtUCrgAWYMcjGbNqobu4NrG1cpC2EFNwA3c3IGcBAwlYPboaoaAqKgFcLW7fNVXpjGNEFFyJ0cCRAAQBwBUdw3fhAl+3yQiQjFDIhIwIDMCc1s2ZbmBrSZk7kAEiXUFFmciJVZ2ZEIAKz1lorea0511zO87o0naZ5WZfTcSpVWTAwCFhetRkyhxQJmgKA+oWVycLaFC+rbWM/g22hH9voy01IhFGYYkjCgUiQhBBV/9SriJgu3w9+iR5AIEYCvKTPbrv1JtWELfTt8pyOuNE7CQA2QgKYASNsztjk7tDaui4rYSBmRKylCfFcZiI9HF7f3Q0O1YyiBHNzq0zoYEQE7oQYQp8SNa9E1NZnJkoSwLLETQ5Ynp4eYtoP4+7j/Q/LWsBtN4x5XaqV/evh/X//I4c4n4++5OBwOOwfjqd9F9ep/Z/+j//Xru/+q//lv/xX/+V/9nf/1dN////6d9////7jr97enN//qCjLarFPFKIwBkdh1lpDoG4XHStLL11/ePOa0zDc3nAMBAiq2gwQQ4xgdXk+lrUgBzfSpmYFVOuqWovEQIK+5i7QNM3MRCE020Yzrq3UvEaOjEFLq6bzNEcZOPLh7au69M9TvXs7vv/H99/84u2nh3vhoSnq5j4Lrm5VWwxk5urKLFmhtpqYKLAjIcEu7jHX735x14VTW1o523HR4/ppf3NzPp6Gbn88T+OYwOV8LEEEYBr249ANwrDm1hrEkau2+Xntul2Ke0RyMw4BytJ1vB+Gp/eP80QphtNSesK+72pxIV5zi7GLUhXbaT45DQRszZipOZ/OpTbrD7vqC5E4OHNwNfe2wfdugIR6EV2TalN1A9INvTcz96rW9OK85hebCEAgAtjI8IAbd5K28z0ApCFyHSX1a821VmbUPPm4CxLKXFBjnp6Qobt5o0rEbOtZQEEc1Im7ujYevsZwo3nBthAKQrRqbqB5LstZwl7XyrueGFotcRCy2Y6fpk8/llzHw522Oi2ncp7meR3f/OL1t79Sa7l+yE8/3r5569i3smLoqYtJ6f7DP+SzhS6Fbq+ttbaYlnT4GmUoLoCRvIflyXXibhREMyIm2zijWk2re0XuqH/lHLZJOJSFCaitVhbTFerSWnUTia+0LFaX+Typ+fnh8eHdu7yeta55XoncPz2QY4zp2z//81ff/VplZ8AKtWYkUK3ZFRCYwBLLcT03lITx27e/qZqnp6fWzilEcxQkQoqHvpl6pRgjErqZSJ/2NzH23O2UwGqZNQ9DiHF8+viwfHpqpd29vk39aOV4zrB78waQ11KoKZiH1I9dFxjm9en4+G6UYMC/+OrVcVnfn07TOqHhRIgC4OAldZb28evpfN/K4o5ROjVP4w6kpxVErREc9r3p87mdY4LsDcMuGlY7kXAn3VqWMaZcjkSYlxVxAOcYuYKfz/eH7lXqujoDsu/33fF8dmzelk4sEUBtr3bB1ibU1lLVaykLeOaY8jwNO5nUlauuZzvfi4ivJza96YY8Lbks0Me3w1fT07NHEtzvXh/qdFwNXFEczNxgqe2sUJu1OpXA49hxw3Y9JVGtLQojUeDI1hFQrhOxDP1uLUskBiQkUDeW2LzVUgMLEtRcY9+7NgJ0g1yXECI4EIC2YgoN3NwDB9MaJJpboLiLu7U9m0NTQ+QuhSgETA8PzwS8Lpb6tJYTEXW7fp2PYx+F6Ph4P52XaZli4Jvb4fHh+fcf3yMKOBCHPR/GfSekSH4+nUvR6TR13S7gIJDc0DSfznOQQCzzMlFgzfj+3XPo05+//crt4WHOz9lZWZ1I0u1Nl0S+++7rvJ6FNgbiKg4xdERaWxUi5n6IXWuLmUVCiMFqXsoDOuXZIxoCVaWysvRdSN0wdEQyzecGLXb7/atxrK+X6Xef3v3x8f277/7ir8ZX+6yQ0u7hp59Xba+/eW0H//73PzdPu1c3nfC6LlF83PVDCuvHrOoIXtqFVGqATAIArZZpnmNIanxe1m9uXksI4Apm1mqQPoSd6lLLisQoVJomSe6GYMBotrnA4uaUqAZlXlp7vxv2b15/O+fFtC3LktcSQ6BIWhZVYekY+XRcOTEa5jwjUy0LITFGIWvmIsmgttZilFYWQA5xTCGgWwgBiVUbsiChubWmzLyJOVI/WC0E5s0N1BtU9dSNBFZzNrDVFxJw1ZxziEGYSy5MEgKXWsw9dWkLxFRrT88PXd8Ruv7pPHlj2bxoWi6SMaTt3G+2kVJRL5QKvE7ON4TmSrtAQH/J7LlYebyANMREiCIiwpsRdTcMXZeGfhh2uy3ObBz7ru+HYez7vu/71KUYk4QgIsS8nalwIx1sHRi+zOivgBHgZiYKX4RPXab7Lz3XpcEiR8TtNIgAV4MYBLAtWJfo4jMKtJEzLjdEdCQAczD3dtgPxzLxIX37679Z1HPNJPXrPz/EQNo0RSlLDQfaxXGa7VSyuitSGPYOabbcBLVlaDZ2+7m1w92vYuqXdc51ubvr8/zw/PD/J+u/fi3bsvRObJhpltnmmDDXZlZmZWVVkcWmabLZLUgUQDSoh4bQgtBA/xX6T/Qk6UHPetSLBAlQS2oKDZItkewiWawqVqXPa8Meu80y04wx9LD2PhEpBZC4ceOes8/O2HPNtcY3f9/37RDLmKcypTRPOZXtxSbn9Po4/OrusC8QENjynJM3KqQ6l+0mXmz71vnD3X3JErtGqjTRGeCsGIGRAgexpVAZwDtfTYhQgUut3WYDU5Fp2B8PlmS12orqjAXVRIupgBBjYwUdRdGKDlXK/WEoxJveX6ydanz17g6Mfes263Y45t3joMjMtQkMiEPK02TO0dqYSaNQsbwCBeA85fdjbVfbTz7/8id/6x/ev/7mZvfu/cMtTIKGgDbpsc7ZtV0ahnoYu0/ZeWZgFSm5lDTneZKazUSFHZOXKKqixgZqhiqAiMQfM0WndXGiAE6LH5+0GjvxPHgWjc7Qz0k+QltK0/W03k5Gxg9L7TSTP03viCen2/kFThrR08t+8Ah9IJrszKksCMfpdc5fsrwFtLPwgKxaARydOk4AUp0evu/WXGpx4Ig0q2GtTIbopBqYxbhiksMwGpAjLgUMjEmJGJGRuWYRM0Wsqh7RO8KyMAzonAsqi4KFgJGZUJfsGDEQtcCcqiBBJ9hEUiZiI1JHcdWtQA1yBhPvfCCXRACUGUOICBQCWy2IPM4pFShKaTTP2Lgm+TlnadxpCmfPi0WmcXDZxVQyIATn2LHzgAACSuxNhVxrzkupwTdEaLUgoAub4PtiKDKbZqviAtIiLp4IA0U1YDyhaYs0tEgwH8NfT0tq+djtRD191IgG56UAv7Omnr77vBg+/PkHVeejTx4XnmhZAXa2t330FvCsXyKc3Ywf0KPfvQsAnQOYFsx2MRKDmagxM5gtDeSEJLU6x2AgVXDxK9iyVZJKEZGa8363v717+Oqbb9/f3o21mmoTXE5zKlUWf5xproJMYFhEp1pD42LYvHx+9dmL7fOrVdM2TXRNDN55JQQjLZldNaKplICuopJbAss4Rhe988F7tyTGIJgFF0wEmHOpRITAsYld37d9G5vYhrbr2raNbdfEtuPQuBCQHLpwOvNYKulNcaHDFrVoqQBExFPJJi7zP+Kp2N5MTjFhCqCAUkEFpaIuPWFipiAVrMKir6EoALJfODFyztgBAmgGrSAFVEGrSgFEVETyANEAcamCUFGppkiGS0ISGhkqIqgAnnyQS/e7h1rg5FSVRdwCFTNBqx9V6SHAUvHmjBQXSxUYaEU0EAIGNDZTJA/EumQzaQFTq7OU2WoGPe2EiIzkmJjZLY8GasBATATeNW3DjhAVx5ExTJAXq12d0nQ4PDzsplqPWeZSgoPGeVAxNRGda0Em750D9MEtxeBKy6djqgKGZli1VgFgNFVekiUAmBwQI7sq1gAQsw8nzto9/RXYKTRqOehaTKS6iEZu2QCWhwskM4VTIt3JH7SkeKupqjlkxwGXtG8gMyMFrIIqCMzMaZ7zXAAUDVfddrO6MrMyzaFv1LRI8T4wqapIkdg0jghBiRVrUSmqGZDFrJbatYwI+4fdar3xbj0eD3MWpDa2m3lK97cHJdGMpWiuVhDW69U3b29X3dozz3PtV62K/F//T//NX/27f/Nf/C/+iy//3n/y+7//5c/+/Wd/9d//sxjyl3/4pSmNuzGVkidBF6TkEBFJveda8jTn0G8EqQksMou2Mg8+deg737Sl5GG/A4RcC5j40Drnc0q1ZE2CCCnrPI1t36c8H3cHMc6lLKq6SkW0EFqtNo0FGE2l6VZFLQAJOBdbwqMjcgTiMGVdNsea5ERG1kSgiERsWsQ5dg6hmvfOBdf27f64K0JX2yB+TibrdUTzwzR3bVumsu3W4zjWnB1jGeerq36aU9e3ScBS8WSiNs6jDOrb6HxjyGpFSwXmIqCK81wuLpp0DN45IuyCJ8e7w76NHSAB65B3CBYbf3lxNY0TOacktYpkAdMqFZgZKKdURRFRQKqKIvCCAiGoahU1gyX+SxVEDABFVEQZyRhFjQjNkJkRkHAxDiAAOM+LcZb96SowRUAdx904TSXn2Lh5P15fPjfVpnPMOk4Pm6uXVQRDo5olZ8cmc4LYYreWeRfatsqRrFidkRygP+4fQn9hkjhGH1r0nQEeD0PsW6MC6SE/fFfGY7/eOsd5GsbHh8Pu8fKzn1z+4KcAk+4ftJbQXiYJx3HcXD6Tio/3h9u7N12/4U3Dzh3396TGzsXmQqBNFRS5cT54q5KIUaXWkgw8+ghAooKowMj+UjFUanTZISHTUkRpSeqAUBFqbK/SnE2LiNY85/FoiHMaD8MRQOaxpKkqmCTtO5/L8euf/3J/GNbPnjddDI7Q+ZqmnAsxFa0553EuZh5cw+QPh7e5HAGsW60ch5xrbGIpWqXiwkc6im3HSqHduH5jSMUkT5mx9n0/Hfbf/Pa7cZq5azeb5uHhPe2974Jr+vpoobuKXYfqBIAVEDAwb31LLpvCbj9zVSTXcLh8vn7//qu4uvLB16NNtSN1s1RtGgiX94dbNVaF6bEkq22MoBBNqYh6smB341vv1pvYQ4QGMadx1m9KFaVaXF1tL4eRXvZfYtrd7v96/fxFlmlS512Ertd5//iwn+ooVERrRHx8/J43v1d5o2U61pGbdePaIaWM4BnRqiOrNRM0DiO3myTjcTpM8wAxJOEqBepQcQWci85qVUsq07R7fP83fvr3hrvCvi1ms+H+cH+x6YE5Dcc596vLq/O9wELwS9YKnfzHepL4a2H0BlaleHKErlZRM2YuKmhGxAYQfMzzrKiePSpULV3TpZQQdRFtUy0q1aET0QJlrg+qxZETBfaRnS9W82ECMWSraFqmNCUTzCUH9lrTPB3H8fi4OzDzKHZ3d384HlVss123TfS+SXneHx6DN5W5cRG5iY0rltZ9c5gOX3//7bPtxXKQXFWnaXa+fThMxXnlBoI+f06rOb/nvZgaYS25JHn2+aeH4yOqGWqaLTKQc0lKMBBBFUgpT2DEJiaHw86hikq6fQxt13ZtymPb9FByqUqNTxXSMTctKwYmUHCg1Vz84qd/Mh8eHm5v7t6/W1+aW1/E9cWLL7u7m9f3u916s/nsy0++vxt/8/3bv/3THzeGdd6hAgGvN5v01esq6rxb2n6lCpixYzOcx5l6UjNEqiqxafI8IVkt1TuPC5mOLFIRuPFB9NTwAUoGKFIYHAKAEXn2nqvIfv+Ya/G+aULvVq3UUiWXkqXWx7uHpltfXjwvxYlMoV0OVxWJ0bBIllKIsBbBxRLPy7GU1TTVSYkRk7eisY0GVouw86BachZVYh73NTTBRW8qhAQIltW05pRqFSDwziMBsrOEZihq7F0uGogdenWmAIRkhqlUjHB1efnu9evfGZTPh9hPo8N5DlE40RaLvwXsHGb9dAp+hoZOrd52HjYQgJcnJ0J2TIjeu6WnrGmXHKKu71d937dd1/frtuvarm1ijE0TY/QheuddOItEZ3b7/MMWYOI8ip3eE51n8ZNVYjlVtqfHvCUx5jxBLYMXfSCnQE8ZrR8mKDU98/ILC2AnuAPMAJ1z7ap/e/Mag++7DnyMyKt4FSOn3VE115o5CRgyxCHJq/s3SfLzT364WT2fp+N4fEySQliHsA0O0zQCoZbj/f5WzAPxQxrTcJz3U67zNB4a4G69Glubwe1NfvPVzWOCq+urBu3y+mKapsaHNE8jzqPiYaRA3aq//KKH737zFy+vu+PusL56eXuYZYKt1lUbq+aSimNWxyJVDUURDana+HA3j1PXrzTwmASdxK4hpONeHXCpNtXsYyDDlECKHOeK1Fyvupfbq/vH+6pCGJ4/v3r17iYV1zWrxuXjcXQIxHR5fYXHXRpHEzgmdI6OyWjEcXbRUdNFp1Qm+P77cnPYrpvu8tkXWMDa6ogR8jQdUcg56vsVlHK8u+k2lyE0peo8jbXMBlU0E/EHDsPOH53pYoiy5bL43UnZnpgigKfYd9Mn2uykgixzNCEAnmunlsvlCWQ7TeRo+lRgtkAZgB+q+T4OuPlgNwIw/eBFOlvVzpqRATDh01qFj6SF04S/yJ1mYHW5RkTABDzJ17/8l+/f/fd/9Lf+vlQtWfo+VoFcZkCdy8SRSjL2NOVjLjmEJnBIc229U8Ka1Kqyggk00Q8qRSyConEg6IjGWiNRhkoGEaEABESvtnhVdQkuQW08g9icMiGF4PrWSTUx7RuPhuwWBwuJaNs4hToMasBE/lhKKTDnavWksE2z5sYaz+jYGXg0FQGySdWTmepFCAvdwUBtoG3rp5QQdNU1Sx85mqS8dyFUrZ6joE11ul6/EDNFQMI6D+JrdKwqaAymQEu9lyF/xH7ZiSg5fRpnlfCJxHzSfgwAQM/4GXz4Qztxjeffw8lb9KR3P23P598+CYi/ywXBE0f5YZ87C5xP2W3LQvldxWpZoYsOgkjnRHYABRNTBUUDZgZAM1miNJHIVAjO5xhaFxJJStFSa04ppWkeU56H6TjOEwCOA6maLlV3eoq5W3wPhto24eXF6sefP/vkctu3rSFXgTQX7x2bmABRNGAFVSPHkUmbEBuiGCMxd32MIZiaI6pIVcQFR1gFrdSqgEqOfYht263bto3r9apr+7ZrQxND36EL6OIiAMHJhWwAYLIkJgOd1AM2MEQGQkRG4gWxPe0fVk4fglRcTGIKWheGSFUq6AkpAq2mBVTOy0NB5OQyqoguABqAmBQAOYlKWlQNGbGYIQF5OxnbZGGHTzYxZlNFVTNFopOGKQIKgAXAABcQp8ISTSgVTGARsE6N9gy4gEu6CM/LQgBDMFqaChEZFYwIFIEQNJtWLVVzqXlWMVUFdIRIjpGc4cICEzECsBl5x66yIQDxwucldqgA4ErFUiF6T4iqteYiUpJCyQVMHZGZAtOyjRI7QgOiRctf/ICiSoi25PvigmctWS4EYMzMhAbmgltYp2VNwpNUtHDQiKQmiHjC8k81dbrIgwAqSxqRfaA8zQCMAM+9HrgcOiIo+OCJ2aTWXGtVUCAEH9xUU87FREF03TZ919WqgVu2UMsOqDARGoGSCzE2LYKATKJlno4s1vgoDGrg21YtzcPRijHjeNxVqTlL430e87u3tyWVtncIdd3hMeesCb2023Y/HzVL6zzm6aJvlPk337393/7v/w/PP/t//M//y//qj//kD198evX2F3+5Xvv7+93FyudpuPzyi1/84rvj/YiZQaRQKiUzeZ1KHXNxrKnwGmIXtSYVdLFBCi40xOBTTqPUnAqxlmSanfPzPKuF2HVS5O7NPTEbqEolx4w2pzwMBzY2tfWmIU8+8NWz66QmCmnMBhi7rs7p4vr5+++PTB4dROJhmseclA0hIxQptBQ0zjlRrYHdfhJfalHJJfnIf/DHn02HW52gVMjz3DVumMc+4nGaXmwvqG/vdsWHZpAQWvQBXIMpH+daV902tmEJZydnbI4ATW2eU63Wb9r94fD5ly/G3X46VsbA5Oacuy6C1Ri9M7+f5oa9VMxqjWunlIg5+phFlrK8NBcAMiLJQmRyiilDA0FDMBURg1NVoqrVqlUVzxeJ6JJ+wyqKTI5p6aRzS+UHwjnVgZ9qwq3YnA4pJ1OIMfgQrj958ezyc5XKHo7D4JjYBfQBJENJPnaWdpozuVaFqb0CjFYykzMXUYvU0TuPWuZ5321exNClkqRMznPbehnflsdvjndvfLwk4jTsddod7t9eff4Hl7/306pZh9dMrr1+MWUp2HTPXpS6v3t3NxzSav1JF+Kc73eHnQfH1CJ7115PFT1DbDdQdjLcglSKa+AGOZBWlFSzGBDFxuLGwoWhgRmbksxWBkxHVDWpFFZALSqbVpN3eT6kw6PVyVhv39ze3R2T4HSY9nc7MHAtBU9KFQ1EyuOrb6f9Td9w34XYNDXNRSSb1pSPu2PNwMRMVuSQ5l0be0BGA1VhH4CcUCbHTQhaJDSta7uuWWNcqWtqGn3kWJr5Yf/q9fuvv/v2+1c3w5gORbeb+MWnV13jVOerZ8+7YVptJvfymWEj3BA4EQXPF5vu08tn37x/O/tuqJCOR1U5yMH7MKV5sgzcaICLzVWo/n483gyHx7mw66f5Xiqs11fScJmhU4KcrEJDsZCauJJGT6rARC1Zg0TiSgWL/lOM3TyJ1hrdBg2zDLUedQ7blh3AoWRuHBvWo4+0ftH20S7ZnpsdiRxhr2yS75gtxvVuvO9jv+6/ZPCqWiQXKcCxaYgIBcr+eFOKYpnM6OrlpR7kbsgu67q93O/fouQYtyruYX93cfUFIfveTfMDoq9zehoMgJCQzOw47gk1hNh6DuynUs1QavUuMDECqggxcfCpZu+8VMkpKRM78IsDG0JEJlTveeHzc6nOB3ZOVdDQoIpY8H4Z+AFM2XLKDrjpNg+H+81qU8eBGNFLg5RTvn1/m+aU5ik2vOri96/fPTweYtN1a4eBCtVx2OFRCWGUsl11VXMp43oVt9vtq1dfATSOV9Xc4zj0LR2P42q9+e71/fvj/DCXYjxMJTYxgDjvLjf9/XEcsnz58urHP/7y1dtXwZGkPA4jtoHMiqlq7fo2Rk+oc5lXbWycm4c01eScH3ZjzFJJBDSbMSg7nw5zXD03x1ALs2PyZlpFxIpzznWrT36vGXZ30/hw/ewqldxdtP32x+/e3t3e7rq+7dfr3757/f273U9/9IMyhcdjQnSlAiB678REqgIQOVIVxCVdkYFPLVHT8TG2TVFhH9KUay3kHYXW972VWQQZHVgxBXKe2ddaiN0isWsVgaJQ0QUDLSWXlCXWpt/22+fDsO87N+zvp3lWyykffbPxwofDPTE1fYvMJc0EgJ5rFa0pl+Rj1Fq6bqVVVOs0Ds55pixFUh5qrUAgKiDLASC56E1sHol8DI1v2zalbHpqGfaBhuHoQ6jVHFNoOiaa01hVl76OZ5fP9sdjqVlNfYjkajUF9t12K/XE1p2HhY8Ekie3zQkXOhu2wE7lZaanyQZRl8PyU04lAp6esQmRl7xq51xwwfsQY9u2bdeuVqu+71erVbfUmXV913WxaUIMwUfnnfN+ab1fdFs6W6eX96F61nf4ZHw7v3uEk62D4JytDWdPEQIttZgfUkSevtkMTqfET9PVqUL6FEBxnueWFzRQNDS1OteH2wdQuNw+f/H5pw+HRzQkJE8mYDWlzlHX9Xc38zCM9/uhXT273Gx9t7q7f8SaOtf2YU3s98N+X2apVbWiaQhNH7uSh93j45wSu9Cyv7jeujn/9tXD61lf3z88+/RHP/6bfyC//VVKY9+6yK1gqfMMQM9fXKJrX93txzQdZQC3/s/+y//61S/+zeqy3j3supZjswLNc0lX283jMAASOd96byjMjM6FtU8yhJbaVfPq/f3zy6s0a9d2pIOLkQ2HNAiSimkpxzkj01BRjJsE37+9TTUnQWb/9bvDfsaZ4B//g//xxTd/+W//wy9q0cOQv357t3H5i6uLnLKiOeYqYka5mmrhhlpITMYcJc83h92mCat1N6ZU5rkL8PJi+/71DQCkmlbbjSqMh6PEUmupJdU8my3GcnLOex+InCNeHFsESHA2jHzw95zQC8SlYdngHAoMyy0Cn2C6s86zzNn6ZG48XTRLUeCp3u+sotqJSLJzwvvy3eeUI1gMIGc85Hyy/zTF21kRXWA9UztfnOfJ8KQWnV4TgU4GOQSTKmIl6avXf/3NV/9i23IZHlGxKiN0hBGIxBDMIS8Fx5YnAYCGsY3+YT9u1r4NfD8OXRePKQEhkTl0YyqVQBEahwHqDFDUmDESOQVVBFLnSFVaz6B1RkCExvFS3puyKyJd633wDbNIKbU2TZAkCAaOXrx88TANs0wqMqbxWAQFnbEDMlHH2kbz0YraUiWvqtE7BaAqDPqsdRFtyOKdV61t4xjVkaJ3q8ZXlYLOo1NRZmRG59iqGGJsInsvImTi/endsgtgZ6la9QRlPOk354/1PFQ/bSWL1nO2lp2QxcVveP7E4VQQAE971kkFX4SfjzREPKFN55/8sczzpHB++MqPdCT4CIADeKKYfve7AYCZAJGI6NSbtkQXEQDSUtG1/JRThZ+ZKOFJiZBaDRQIRQ2QapGSsmoFMmJDVO9omgsC2xm1euJARYwRYuRPnm0+v95cdY1nPBzH4TAy47qNeUx970Ng5wWAtAiIMlHXRuyIHMUYQtc0XUfkcjJJyZCAlVDVUNTMmNkTMYW262LftuvNdrNZxSaGdrW0VAM5REIArAKoS2HmCUrVRRxAQAFYrNa6RPABLvHTJ3EYAEAUJYNWUIFa0GxBhU0qabGlzmxpFDUjMLRztBCI2Vnk06XNS0AynFjORZYQMCACqm4R884SI6hUYj4tD/yo2gwJakbmxT4PCHjykdaFewEpaNUWgIqWPRKR1GxRiNRUDAWZzBTNQA1BQTIYI6hpQSFDAxXJ1aqgCMpCXTEigSriEupDiAyARA7Ioakhd52rqTJ4x43PCTGI7tq21lrmFNabLj0WT0mlkmEBAIIKAGjonIGhLnUWiA6IEQFV7AMcZ0ZIFZQWD65ZrmBQQ2R0zpAVQNSItORTcvwTT2HEBOfQIsCl650ZWKGq2klaQ1rOHxChSlpsaIsLzbQuEDQQmqn3ARdCQXUJFSbnm66ZaznmWcGQ4PLZ9vrlpSI0MUYOANl79exU6mIaavsL8njc3dp0aJrI0BJJVlFVF4NWg2LzoOtwUWotZe+ia12gam/e3k5TaaJror9/eLho6c37eX+3i04vfBxTesy57+M02zQPjmy96rzq8Ouv/o//m/81NfyP/sk//qM/+ePNuq/usUM/3L2NfQsAmsGtw2ZFTYO5+ufPLxjZEVxdbShwyolzBDZyiMJ5GtM4LYFfyGYEpU5pGtCqipZSS7XYtiWlkub19hKdk1rzOGpOCMYcmbDopA4NOHKY5rntN33wkMd+1c1zOe6HZy1/fffeYUCpaoWshEBC6JyL7MGgqizTQq3iGrec1uQ0X6z99WWXD3eN1GbTB2xLlmkcg6PognNOABXxcr05HKfX729eXHRYLZoxYQierDIioctJyWpJE4IXFTBhZFQIPqTDUcWIwjwXJRUrFjwBzpJj02xiY4Y5FSNsmh6tgIEsLAmiiDpF5/xRZlh8iAC4VGvjKW6LCE2sVDEDJiY0ZgIzz7xkW8jy8Ox4SRUldLxcNMTMyMRI6L0jd+4BZJgkGxgzr/tVnqe+94yybpvh+Gh1WG/Wi9RqYJKlaVupSZnd+kLJY9A0733sNBuI5TQ2qx5Qcp5if4WuFQadM5B3gev0kO+/z4+PSKFZrdDZ9PA4724vn728/MFPFU3Gh0CNUTgMpH4d/CZNj3ev35iGdvOCfdjt30gdHXlHTc6567dzRSLXegO9L+UBZEbeQvscsTM9aprIAfkGuMVmA+wNqol4R5onrCNCAcfIAbIrasRr1KHOt2gjN8yZ9+8Ou/0+lYKeWB2HuNpu52msIimn4ShOtXHUd56wPNwPR+eYmMGogbDqpsNsYkS+zDOU5L2DuGIfU851nH2ISK7WDN6hi0quv+ia2AM21Kyt7UMXO+28s5//xW++++7Nv/rT//D97b1rgqRcjSYt7A4/+mK9XTVQ53yUUZPatLr61DUQ44oRSq3leESwbbsWmdareHs/QsW3N3dVZ+67+TigxsZqxpoHFMIVU2j6rnFv64AxzPXucXDPmmcXF8+m4yOJDYdkvivoDG0u4p2P6OeDXFxcehyiVtg/drFFEOriu/t5vx8xNn3nc02uv3i4mwbJl5u+C5efNJ+/e/Xt5SdXzrVzmkPTgJrZUKdHkgrCRe491DwdNu11zXmow3G+Ezi0q2stwC5uVmvMxwAtRu+c/+bb72s18Ljpu7ky0AXRyAA17TdtM8+zi51kW622aLq/fzg/IaHUioDOkXNhmTKrmIEwMwAxkoiqioGZVVBSAEY0EURzjgkMEaNvhmkkLMRYqpZSHRMiBu8BkSlqFXDgXKgiRFRrAVVmZnLeN2B4SGPTtSKlzLOWtF116fjyC62aAAEAAElEQVR4f3t7HIbogkpRoq+/eX9z+xhcw2tv4KZjIrTonKmA53MWhCHabne4ubutYlqH+ZBvb/Sq67Ic6sS/uXv/mPX949HF9vbhqEiHqWAtCjgUUUAyyErv7qf7hxGZ0nDcNkSO0KpnV6Aej8dSPAGo1DQ5Ry7NY9PGro3zcCiF+vXLcciP06OhbC+2JWPl3ea6S8413bZZbYAbQmiaZsnoYCLn43Dc7W9vPvvsi2mc1xeX8IKZ6fb1e2oDzPL6m/eX7ebzL6/b9qHk6jwxghBBqd75UupyupdTaptGpS5FqQjmPKOB1kpEgQk1o1BwPrADdVqzSmmbaMa1FDVlZlV1zoOJiACoGDQ+QGSt2gQ0leGwW5pwcsHVesvAaU7DmDbPt1ikM6s1lVydC1JHQCNPWouaGRBj6Pqu5CQivvH9ep1TVjAXT9azVDICsnMqVqsqZO+dSqlzFWEpCQykFGA0UxXoV1sAzTk7piIVKV5ur3aPjwAwT+kB7zcXF2mmaRhrTsF7Qn7/7s16s30yIz+NC0uQ5dPos0ybfLbJwJndWQIaiBBUz4+C5yw8XFpWiBCdd85x8D40MYTQNm3Xd+v1uutX6/V61a+XHKK2a0NsfAzOee9PANGiDS1tZUQfgoQWUoh5cZ+dDt/PItHyDuEcNbWMMh9JSKdzeGJEezKDLDP88h/P09EJHHl6lXMAtp3ZFDBzxMXyw5vXaRovn2+bEFKeHZNDnI/D/uFBjkOMzrftd99+n0v2ji4vX0C7mpmLiHcOzI7TY7VsgLlomnL0vu87RHbIJR1zeojB+n4VIT483OUyC9pxzIcJ42Z98fLisJ/MdBV913XPrr74q9t/982b7y8vrml3fH71kg1q2f/4h59+++2rP/0LiUIX8fqTLy7vbr5mTavY1iIK0LS9LZkVJiEEADQjM9jNh8v1tmnavgvbpnu3e5Q4M0PTtEyYVaxUUJxK7WOoatVJu2kj8nFKsemutte/+varw34moqzl4c2vd2/faU5taJiAmDy4YRiLmIK1QVfRCSAAIfA4aUpzXOEK0/PLzwJNAep0nC6eXaU4vv7+1y/42jUXjttsisShCU1oSqmoNQYfPEutREjsQozOx7brvQ9EvGScL8/yJ+rsafUv2MXy8S6xFst6ORM8cF5OZx0HP8I64Dz7whm0w4+G8Q8r6iN7x0fBNKdh8yR52scvCR+b3j6EES2zoZ54pxPWYqoIZKqL1mAGpgpQa02vvvvVP//n/7ufPItmtLu983Hj2GUp4Lww5IpLEBYAqFGuAEhMhqaC6IMvaSSvVbN3zB7UTKsAICKnakTYkCbnSxUF9ASN2uJUZUBiQtSAWAALYlFZt65WcWBSZRpT22PXR0R3HAY12XT+eDyYo9W6v0/TUKQxi7SMhYpqLaM4JJRVi01Lc9YYUEGhLGOZMXEEvQjuIdVZOQD3LTqPRVMbQzIi8Hme2ZPzXlRFIVIg8pOUEDp2HpljuwYMxIxoiCeJcRFwDM7x/7gM+0/m2/N6+iDLPAk2+LFxcZEDDGyRGpclc46bPrGP+ISKPWlSZ2vh+ZXxw4p6gpFOgNuTQPQR5fRh6Zy+8olP+vAfn9hJRF5cNbQQdLwYmZ5OEQzUAIoAE5tVEyklIwMaq6IZ5JSHcR7neZ6yKiCwSHanCnNYYKLFywaw5GNYw753vglhP8yGj+yIEE7wB0p0GkN0DKLqHQF5cBjVq5hvY7/qQhNcbKpQ1WwiWqqpKUEuYmoCxMghxn6z7ttwcbFuu3UI0QdHbIBiUlQU0KkaIxqY825xC572C1BTQ0IwBV4O3wloSXzWRSBAUNACUqwWkIKLccwUVFEVtYBWfPr8T5UPuBCOoAJoS6MZAsCpPlXRZCmZRwA0QZMl7Ai4AqOCEhg5wgIIJxzsdNqB53evCkgmdfl8F+HipNWJgFTUClbBljdAQAjoT5om8qKqg6pBBUKFirRo2nry1wnoecNZRBDRqkbAAUAX7cXMnlr8nPfog5E3MCmlGoCiWatIbBZDk9zkXXQu+dhimH1s/JRLzSJqREwUmAAInSNiOIckGi4x7ESBl0vu1OuKKKpMLKoA5kqVao3j4B07qrWKVDBwfJqO3dPVoKKL9wxO/bAk1YyXuCZWqwiERmomUuEk9C8nAqqqYHgqjzN0S6Q3mCEtQc6gQIhWteaUhqFxTWD65JPLbtP50DgfRLLKRGjAJDU75rZtLe2P+5ynsaNekxkk9lxyZXaIZpbzmKX4SUuuQ66FPWSZ390c0syrNmzX7c37Nx64d81UxsdZA1RnFNkxHD1Y3/W7cacmzuq6jZvG++De3+7+9P/+L97/6qvf/+kf/eRv/p2A8dlPtoMO/+if/E/ffH333/0//2+X69/zxN3ltl93NRs5J2p5HLvYoplpzcd7yWPNGdGmMZe5ErBWSWXWKmqi82SEitpSNCsOCEqZa1GrzNh0HcrFeByhVpFZs7RtUHBTmptuMx4f13yB2JuyQch6JAceXSxYpTJJ17pxHtkqI5CjyGGCWYEBdEpVxNqONh1fXK+kpDLpuo+IZi65llz2RFiK+siCMByPndMuUtdcm9TDMFeEtiULXEzLXL03I2KkGLjUwp4iuHnU3cPIXdxs46efw89/8ebqYnOYdm3jXQxSxYHLKbNH5zxRLAVmSbFrzapKXZTIIjbPKTbL0ayKLmVYSwC1yeJsPO8ohKigROYAVZSWZ+YlPJQAwJCY6RQJQUhG5BwRMRM678/JCzDOqeQCZty4nCbNFmpz3V588/XXV9trGcVdBfQeEEArOwQ9luN78h2FHgxknljNtU2pB0lH770UlTwZ9xTWCoilkGHwKPm2DO/K8KCIcXtBwQ2378b9g4+r7e/9jer8vLtZBQz9ajhkDrHbXN+9+/bx5j7ahV9fAMk83edh38bIAIJzd/0J+DWia3wAe8yHe6Lg+s+BVoIOy9HLZCLYrCGuzHXgG6sVoBKg5QnLCCLoO+CkZQJk7i9AFMvgdE6laBrT4TCPc82ap2waxjGBxuNhr6roYZqzStXDsG6jJs4HRTIOntmVOXEgvz84DohRalVVR2RaFLAwA6p3njDOqTD5GEPXrp1vrz753Mxn4/6zTzG4JvB08/bP/+2f/tm//9mvv3q7A7v4yWfpWPCQeo+RtWolV4mj865fb7OWrDZNDy3kxGZI6Jtu1czjfHH1ZXl8+Pr19xR0KnNcr3Dyq9C7ogrsZTw+1nW8QCBSvmjcN2///Pnlp7WCjUOMTX58N7MXNQftTz7/wdfjIHBkTg+7G6EA4bJBVp3B9gEdFG4uV9CWb+++14YAzYGf56nvtvv9PfvOgQO3LXk9ZXdxda1yLFyAYcjFO17113UfK4Tgm3G8dw05CjIXKTbn7OKWwFuSMo4Fc8f8rL9OB0VxNWOt8dllh2n+9PL69c0RpOm61hmsXFAUh+lwd+v7lY8bQobzLcF7hqIGygjAIKrMXkQBKgJ5dhVUVAgZkNkxmIotN9latMbQ1pLJYcrZM4Oh9yHlmcgTntILELGUmWxxR3swNlBiZnJmWkoxU6vY+Kbk0aBqLWxkqu9u3uVathdXb797Exjev7m73e/YRXBhLtnX6arvVVPr/TBPiNhuNsN4DIQMpgret+2Kxv1h1cfvv3vfOffHf/jFn//s+5u7h/XVy0+u1gDorJsU9sdUjJlQl0cTtdu724f7e6jJFNropMDj/thHvOwdVGlCIExqhiaekYjYsY0C5ShSwdOr+98+u7hgBEWch3nMZt7aOeynw2oNfTXns3NEDrxfqaEwrK8+I17leXx4/+7i+sV+fyDvr3/w/HE3IdaXn22+/c2bb7+Lf/BHX15dX662zXR3W3NRMOfcknEMBGZiCgLGjsfjMbSNKIzTfP1sE0TKPHl2UiYJDrmpQqeiDEREdkjkoahIWTZVVTUml+ZE6BLMbduVmrBfN21nAjIVLTZNFdZ++/xZmuX2/e3+/W/a4EPgfnVxOEwxelCe5uM87k0rO2a/dq4BUwSRImbmvFMQZIxdW3PiEDofOQQwrTk3S+gymYmaYWjiPExd3zsfvY+7x3sBgQIhBNMTmJ5z6dv28voKAfe7/TAlw/1JxyFkxzJXIhp3hzNg+mEqPc8gZz7oPLg8ERhIpHpqu1kQDDCgJVmaiQiZ2Xv2wTnnQxOD923TNH3Xtd2CEfXrdb9a96u+a7sQmxCiD94557wjYmRahKFzPgA8sUsnCYuejDrnehfkxaNhp7N5BFieSJ/MFvh09Lpk0J6GeVvIqOWxGs+ONYCz1+xp0sfzmIYnLQlUdRqn29vX5mroV0kriNap5Mcdk+Yya53Wq/j+7u5Xr77xftu028vL1S6Ns6Y6FQbGkktORRJ5qFmd730Tm+CyFrDycHhkpFpmdi0hvz28j8FHjndvXntHLhdcX8icN6F5rThVm+7GV+//RRa5fPZl3zT3j3f7+TjP0zzuv38tn37y2Q9+8J+wuL/+y//uh5+228uLu3c3DoqqtJWDc5vrZ/sp7Y+PVxfPh+MhH9P2so9XndRiajE0phIjuSY49nPaqUho2rEcPJNnLqZTlrHU7QomGSaBWuvXd78eqxq6OmZw/q+++k0eh9W6PRwzuRDBA4KYxjaanbBpQWVGAvTOZRFNkneH1+lnF9vNs5fPnKcpzyH6L378t2+/e6tmsedPf/BlqnNsm+jb2HDN0wn5XzxaQC447xsfG+ed957YnXusPxzFf7gIDPFshznnzizaqD7xQWcp6dR+/IG3O9sPzuP7WWyEhTP6//m1vObTUobzrH/SCZZnuo/QkNOrnWSC03uws0BAS8aFAZgqgtWSQeqS1DAOxzdvvv+3//r/Appbv9IxpSF7v+R9sfdtrY+aRkZEgCIVzU3DQI4DNYQx0aTkVqtVycUToVQPiM6PVavVycAbbBk3bMV0ZJqViGDNlk0QkZhABU1JgZAmYGd1jeoDsDMyr+jnJE3Jq81qzTxOeRxq07XU9GoAta49YSptG4ecxjJ2sWkDFZWucWQwzwWJHCNGt4gCxZQNWsep1lnBMXlv3hujdn0vRZ24/VQQGc15v6p1NADkVtRN82G9vSAQ4rbtrhVapg1ywz6elbgTjYgLmvZBs/ngCvvAisFHW9XTx4iIJ//jkm+lp63sycn40XaMT0sDn5ilDwto2eXOXNPHUj9++MplWzuLByeBCj/8kMUe9aRbEuE5VcnMDAkZcQmGYWITMSQDOykmiCqKgCaSc1IVSYJI5EIVmab54WF3e3N/OB7zqQ4CkWwJs1M7/WXCGd8zwznlw2F4d3Nv2y07utquXHCx7RtHbeR1v2JnoEpIoQ3OzIlXNTMKsQ3eqUBJUlRUxdCIF9tARSQx8d77ENqmWfXNZrNebdYhREYkU8izZHHOIXi1pdbVO+9JDUAJyQBsMSEhMrIC0BOBCni+mSiCwgexVwEUVHFJ0lYBMz4F04uZLDoNmJrpQsUtbj80MFmexGj5wyX7+7RIQMEqGIAgSMVzIR0sOTlESM7IITKepCI1XXy2tERtARIYmFYwM1WUCjWBZLOyiC1GDEYABuTAlkwuQDIkWsKOTuoiEaKa1pMRV9EAVNRMTSuoAjBg1UrICMTMjtgjMXmHiIx4SsqjgMQUFGYWpmrgVVemgJBqaebknW9jKF0DUKdSBdCYgMkTKSI7R7gYxNF5D7g0lTkFZUQiEjM6b6RmYCA5lZotOPSOluMJZP6Ys3vKKjIkUlEAQyRFMRNHTkXNhJANBIDgVHmGpou/2QxQVU2NkZbdXKrQgh6qVRVRZXKAGMilaQCrz5+vV03br9bOB6SA7A3Me6zGiFArEreEUFNN476mGcFpWM/TIXauVpBqzmHjeBqzSY7BmRTRGmPrEB9vbjrX+R6uevf4cO88A7v+kjcbLlZW6364u/v05brpLt/fH486NcED0ZyT1FKrsNoPfvSpZ3x4d/vn93/683/7Z1jtP/4f/f3+xWZ1efWjn/74sz/+Xz3+9hf1cA8o3Mbtp8/Ye/McvQOAcThUQQO0sZQq3rkimkR98KVmMzAmE5vmkb3z0R8P9yiKgMNUs+p60w+HfRYr4F2zmQ+Psd/4wN57tCIVxuOhOGiatuTUrlq/m9Da9bNP3+8OPjIXExFCm2sGKWaiAkCqqEWt65gIurbbdo2H1JEogmYaEXKeNpum6VY+rO5vHw9jfd64xtHq6jKlsu7W73a7zerKlThOuZhQHps2MHEuZbPZLB0zRmpkxKxShuO8uVq5y9AWYSy5JHasoMG7IgYKm9XFYdoTOgGpUsSUXGRiU6xSCBiR0UE1AUBboCJlAROtTIE5VNWqVVWf7jjeLTU8oGpESzXKksEJxOSI0GA5nlUk9uydY0RAdI6fbgkxNqLar1YNEDB/8umnP3/zq9h0PdX1uvOxrQqaMqshClgqaCH2FUBKAVFPTtMMWqXOHGMVIRc4rIBCnSdz7GIr5V6mG5iPaRy4WXHTTuPu4d13SHj5+e/52MzjrndsKo/3I4eNd+7hzc/fv/123X3Whe0w7cH2Je3briXEedptLp9haAVc0zRQ5+lwdBSUrsRtUUeUg+UCztPmCuK6KJORlYSI5BopVSWTD+ibJW4AAYGiVpVxx+noPYqWYTfOSebs3r5Nu8Phm1cP728fq2mdKjr0AQ3Ks3Wzim0CJcBSMM1JNNtCKFLZ9KFvJETu2jWqV1SAalIBmhC7EFYK0UmJXRti24Suaza1UNxctS+fw7ol1f3r73/2r//slz//9SymMSLa7mH/4uKSfP9wd2+QPnmxXl9sjodpzTGJofPEWK2Mw10Wcd2VmARxbb89HPGTq9//7ub7JYkQ27ZUGeRoWKkNR911HCWEsU635X5rTbt58VCp5DGrDrvb6349p6NpjL5NGIKX/eO7Y74hn8fj+73etu7y2fpZmup4HC/i9VTzcHyb57G7aJKOWQagw/X2y91xZB+J8G6a+3jx6XaTxymEbTapnIZydKBYvWdHhH3bVo2zTCmVbdgKVmj9m8fDJ89eSDlWgrbta4VBSkmw4nj37n718tMRpjU3tzejCuWxSOuR6HH36FtZrzbjtL98tlVhrRiadrkKqqiBOecWtJyJgT2TqVUGnnIyBHYOiUUFEaNrcp2996LFgSNH7KKKKhgBAMGYZwIiJiTMeQJDh+jZqUrwoZRSSl2iWxAh19mHRqoCwJwKgQkwNz2QOw4PUqCKHHaPnrCW8rg/KJIL3HaupkSOpzIr2jROaOxQjrtb58HHHjEcdqOJ2Dhqys53L58/v90d/+VffmPJNqu+lglFQohXfVvJ1i3PRQXxcExzqZ4ges9o3eWFZOlWfj5M3rFjmHIF5UpEZCUXIkMxMShSGxe9kWNIU0EAxePVtsvTNM2jICm5asPV5XXVZKbTOLa9r8WH0HkXaxEM3PUrdjRP8zAOl1fPHneza/uLzz65ef1qnKfxmH/z29d/88395tnlnPJm3S2hUQoGFZq2Q6Rc5pKKGYhqCIGI1aCKiWJAl2qtoiZC0jRx07eb4/zgvEeBYgvHE2oekSuY5pSJOHbBuYBA45xLSV0T52EicI4DedeswmF3yLnuHnbd+vLq+lmadlWOw93gu2NcX0+ZiS5W3bqmLFpznZGU2CtZs+rRRVWrIj50amWe5sBUcyVmFHU+AIgZxtDUmhaPvVbpu1UWRcUYmuurTx73N2pGzGevviN0w3Ds2vZi+6xttrvDY8rDuf4dTc0F16/Xh8dj5KcnojPVsFS3wNO8ASeoHozoFPHLTGflxYjQOUJc7BsuRO9DiE1smqZpm6bturZZWu/bru/7vm27btWH2DZNDD5y8M75xezAzHBKiIAltvqD3+cU9EFPZNMpb+gMDp1mfjxlbPz/BXCcv+1E5J8e3nDpmj5HF9sZ0DiPZXB6/DxpRqeft7wHKbWUQaaBkYviV9+8er5ae4892fu79wYF63QEu384rtZXfnU1F3izm1RKrdUTVcvzODpiUwzIvoFxGuep3lttmo7JVv1lKdn7LjbXw7zrVutV36fx8OmL68tt+ckfXf6LX719dsUXly9uhvs37985csdpKgAmFdJ0cXlxPBxI5XK7RcIvPvv8N9989ePP/9Zm/XKuN88utgTN3d1N16+EHIIdhvzpZ38ir3497Ebn2V2sMurls2c3r9845hcX2+NuJOQmrAENQ0tqVsuqAy1JW2+GVWYX+8HMgv/J55/91c9+2/toZhbCxVX/MB72aZ5nWbyEvecX69WWpnEeCoAaRBeMMThomkhApc4huLYNNWfZpYepyjRfX10O8zj5Znu1efHyxeFwW6TUMlxeXTf9NoSOkEx7PXV1IyEgkWMmdsjsneOzTkRIT86tj6ggBACg5YEfgBabhS3zF8AZK7LzRXHShs6T0wd+7cMl9DTenwUAo9NqPAFDT/DS0xK0J4ca2qly7wPGBGdL3IlBoiVaHQwB5XSmryYiUkRrSvPxOP7q26//3//un843f/V3f3gJc5WUMrkFkq0ihFRzLSkHdmqqoqYEis4xZlGySU0MY9soWbFKzIZqah7RE81mDUBR6wMdclUMI5FKbQkDQSXMah1RJKymgjwoVqMpyapj71CqRe/QbBxmZbrcXgbXgh05AMXWB5fHuWV2K4+oK0eBQdWqWjZ0St6wDiU2DsCpmnNUqnqmJriSUyU2AE8WHBDUzWojCEUNAXOR9bZp+9aoAmjfrBwwqqLnbtOjY+LoQleNzMB7b2f0Ztlw7CxjnwAbevr4nxSY02T6tBQ+Rovsgwr99JXL/nLOuXr6hPGJ+DmzRL/7Q35nreE51uqDhnliJj+8GTi389nplT9SIgEAkMhUCVEBmAhOQOU5FQvJAERFanHOAYIaaJ4lF1FVrTlnQAwNGkJK+f5hd3P78O7943HIaS6IpKLLPL28HTVDUwBckJkk8G53zLVOc85SAHl7uYlm3lHXt13fklcib84BM9Sqs4CYmCuKWkFUAMWAmMgHh1qkWhUxAHYhxNh2q65t15tNv17HtiUCUFWpJWVVQ2SDRC6Q88ikxkYO2QEjGqDx6epc8MOlM4jOkWZEiGh61tHAQAnFlnwftIVJBzDBhU5C1UWXeZKWF8LKxE7leACmeMoPqosOwaZmAioAZkJWKzxZDR0TBKh68tCdNiJWAHRsAIBk9HQEgghsVkAEpJyTs/P5sAXVAFHBKiCBVDAEUmAALKDV2AM5BDIjAzAkQzDwoipVDM1UllurajVyCsLsdYkFXErF0ERmBEEjQgKHAKhKDoM3VROWzMsRkg/BBQIM3vG696UmQ2B0YI6QvDMgMAQFYrfgyIhIS3K0ZyIyW8LdTvBfrQqKjrGJROwWMExqJRcWdyk8SUVSFc/VdQBkVhFNpKgZIRtQNSVgAATghbtavPkKJ2tlqQWRT+lRwEZkpiVlJjQFUMs1MXODdnm97fueQ0Ryfb92xHnaqUygtdRiRsyhapbhkciQxQeulJfnJ9XKngFsGo55LmUubYgYSCzPkm/e74O7kKJsKU0DmrkQ393cIjafXXev3hzuHvRqvRrH5NhWK6/o6pShiHeewPromjbMw7E69/Lz53//H/2D4Lqf/du/fv3+bf7qq27bX7786rOf/vDyxbZ2uLneGrp29SylQbX4EAkhlxmAzFgqAVGepzzOec41CSHnOQHmWudSsyKi1SoFAVBAlYBoJ2lZkDlBiC2x5vlYTes8OQMXPEDxoTUTImj62K/C/cPeVBbtU00JII1DJLxcdzOnUtFHtyFvJtsWV603dZYrKqTdFB3F0Dpy5hEZ0vyAHK9erh/3EzoENHVQZzhOE6ndvHvvyTerBsiN88F5qqZqNqbqXMxJA7lx2CE6gDoPw/wdHqb+Rz/8Ybf9SqoEHxzpPE4eCYCGeWhil0oih/2mneepqhqSc46KmYECBGZkUi2mVGt1SAbm2MOCyttZJkJCWjxqBgiMTlBhsQCczKVkBszkiJxjIjJkIGbm5Qzl6VGmlgoV16vOcwyCXX8l7H/79vuLy22jly9evCgViCMDkGWwucx3yMH1l1nQIysQqJDOopVcx+zn6THEK+O4OGMdO9R92b3COkiaiV1cbREgP95imS8/+aFrL0yLqyN5P+0PED4F9+nDw6ubuzf96jm3/f3+DcnYN2gVDKTo2F++pP6ZKDpGGPeSRs89xcjtRutAchcCCrPGNca1uYYMwQwtmSYoxQqCspEnFdPBnMN4CQKQ9x6rmYz3+2l/HA/z/W741S+++83Xr0qdp6kuKlhlTUU016aN7w91NGscuOPoCVY+HuZJDbxn55xkzig+SLXqTMk7sxBD52JnIrlkJODQhM1FCIzIucrFi+f9J58nQkOZ37//xZ/+2fs3j1/84Kd/+t/+s9sy3t7NL59t/vinPwWRP/0fblCspHp//3i9verbfsqVATgTsRfIUGaQSYCPwxAD7w93Bvlv/sl/9Jvf/IVhO2kqOc31AGl8/qJ7cfHp/Tvomj7NFpCK1jmFrr/y3fO6f9isQo8qGbYXm8N4vHn4aixpON4oHK3kYjm27WH/lvKhWTebZ9u7u4m9X10/vz2+0XEWl9v+ylDvd68EuYA5j1vnXr3+5cUPOn9x8fhwO88jORfCNYPKJLUm8mHUIdcpQzWldvsC012x8VKjQ8C4LsqAZiBTnpvm+WrrLyBmP11fXdExT8ep7y+Q3VxGpnzxbF3zICLkrHd1PO7TZO3FerkKVG2pHSyl8Kn7tpZaAUHIAIiQSilExkxaaxJxjFIzEjQhZimiZmZMzN6nPDJ6ETWrqsTEZoCAyyMXIYEqATvHtWTybeQeDGJw03homkYqjce9M3PsgIPzzfDw0AW3n4bX798e5jn0/VyLK+jElEFU55JEad32RhiYtl0Dlud5cIjex6R60Hy/e3DtynmopZDjxoP3tQ3r4yj7Y9JSVz4EMGMO5uaC1+vVYX+8vL4kguNhWjftZ5crT1ammQiQ3DTPOaW+Dc4DgLShHaa9Q0rTHENYr9Zv7w77ubBPnaecclFyDUzTPLU5Gs7jQLERYMmzhuxDJHBqDAgh9sMwP97vCLnpLu9u5uNhLgadd20AKfmwH6++uL7fH/OQ2JRQclUHDGglFx9jqaJWalEC9a0n0JLrNI2eHBgCExIpaJbCLmg2qYKE8zDMaOvNVdOsHw/3fd9RKTlPtVbvGzUg56uasffIpYpKLkX9mi+vu5Rx/7C39LjaPHsY3/cXnmoY93doRWGdxXvHvts21A2Hd2meYVIkRG286xQADDkE73w6HmPX+Kbe3b0DEafVe0ZD0aJA3fpinkd0ZBQvNqvd3e3h+HhxsQ2hfTg8isD24vI4PJgoO6tVp7kUeSSAtu9TLSrZ0KQatR6N9oc9mJHzH0aPD8TCx7MIECLoUgUNYMCMyxEl86ndzC8us+hjDE3XtV3br1Zd23Vd1/V917V9t2q7rmm7pm18aGIMzgUfPDEtAOxiMTsDRHSeoZ/mIIMTAPLh/PxkycCn0qgnTmgBhJbzOz3PT+c5/SlpQ8/1zwBLYoh9kIQ+mHw+GNSWd4amYCqipuRd12/631vdv38/rcqnn3827h+Z4PGwqyB9s379dhzK8Ozi2lyY6jSXWkdZdU3f+jQcSimEHtmVaof7IefsnAs+bPsLRJJcVQGYQ+hSGWs5Xm2u7+9vLM8E+DBUmR+vnI7394PEH//tv/3df/v/2h/3S8Pd7vHhQIgiJtp0seXI6H/53ff3u/xHf/QP/+5/+p/94i/+z7uH2zROfb/xzM6jKRyOO7z/+kc/+eF3v/6lFO1XzevHd+vLTakU/Prdza/JNRzcPN2F1dp75zky6P1DygK10mHM1QhiTNTOqv/0L7/hrLmWaUjg/DRNqeSUMxsgsffUODwe7tdb+vwyOtfsxknNpIoAWCE0c4ikOE0pBFq1q+NxHmeF/fzy8+e7u/3x4V3bNp/96Iu3r95iTY13ITYhdszLQfoi6CCYnBYWIrHjZYZAgvM8vUwRH0Zoffo3xJM342TdUYWzm+O05ABsKUOms/bz8VL7QAN9PKIv33jClU7JJmYnd+ey4he44mQEWyr4TvrTU8m6AeCSuYFgi18EzNR0Af+klpJTrdMw7t+9efWbr7/+y+/+w5vH317rHg7BmtZhixhqyeygijoFWgwmaKo5NG1Os+XSxTCnHDxGAtMqBZrY1oLBhwJyHOfofOvhIHQUWTtrWu6rWbVGMRVbM3WsE+Jo1oF5j2YaSRi4Gg7FesAYXCWtde5iA+THIfVh7vt+e9FXyWJV6tSEOM/VGItIbAKg1aoikEqlSrXWJoS+3dzc34IiB0SQwMhOEGiu0Hp2oGASmxBCeJiOWZBMLy+a0Lp21ZZSQASZAG3KowuxW0d0hM266Z4fhp2jAqBqSi6c5mhRMENmOAUO4tNHfpZb4EmIOcmPcIbGTv6bs1JIdJIK1D7swaelcv7KD1rhx7+eFB77+J9nFeq8h52MM+dFe1pB9mErPa0cPO+WACpgIKYO2ECRUEVMFQ3U5HRdiJhBKQWERA1EyjSXWq3KME0A1q0UfZxLGfN8f7/bPeyLJEQoRZbQGzgH+5w2eARGE1UD20+W6jzMcnfI7x/H55fdH3727JOf/KBtlltMNHLgnSGVIh7T/phqNdFCKEDgCEPj2q6xmqHAIKJqRMw+OhdiaLrVarPdNF3jvGMAy5OClipSTdWAnUcERiAH7IA9OG+ES1IewAmIOvdGMpxd2GeC/HS/gsWYTQzEpwY1ZEAFQBADIwSHKEuUjcHClCmY0RmyAgATRRS0ilYRBXTBiIxwMZaeMq+XmBDERRMH4AgUYCn7QEIBIIcIgA7YnchYqWiKUq0mrKNJxlpNy+nUhQMy6ym9m4AAlrQjNSOHZHb6e8CFNQJiBVARBTQR0SoLycRozGYICipKCx4lYGjgGADUBNkpoBqYGMISfSaAQD66pvXtHFNuu2ZKTZKCgr2PXg2ZVWvwjpxDXgQiRsDlHGtBMgFQF6KTiAnRgAjNrJSKjCIYA58fHJbyARP53Vjrk2RrS3ukACov9wwzM62YERFMRRVhcf6e7iCmqqZ0CkeEJRRGtUpVAliOjpfrqmncerPywfkQVJ0HH5regHNKJU0mo2OSUtg3VWuZjgTCjgHEe1NIzjdahVRiG01ymmY1aS9W29XLtzffTWUkxbbpd4diZXz58mp3+zAd1Hli7bihL1/4+VAeMijY5eWayuDmyoHbTbPu+nGosdsg02GYmfnzH3/+wx/9sOlWys3/5H/5n+dpuH91Ozw+jvvxqz/99+urq857AuQmjsdEjI5pKsLOGyq5OE071UChARDRoloAglJMcm9lFskGIGlWqFXntl0ZWs6zj4EN1+ttSe83V6v5WDm0gFqnMc2TIYzpANheXG6Qjbybphyd37TtPLxGBSnmXCAEsZC1kGJJQ7tqxJSYG/ZOpaZKIFaAAQycZzcO43jUpmu1+hdX17vdkUCDGptjh8EjtS5N86r3ITisplQYrQ9MWpijgi/FUp6ZiAOAUlLxkVa9m1SHN4/f7Kae3YSQcwUHTeNAzRFNcwHlmgUdcE+RWYG0ijF2sckpaxWpRmBW6hIXr4sOCmiAqrKYe4kIENTUAQOASAVEt5R5A+rp8Z2MaBFX3RJjBOS8J2Iidc49SUXsODoXAesuxfXlj3//9796+9uH/L41fxhsTJvN5lIBuc5ocx7fmxbfvDSMWAZD1TJLLd6vpWTkKFJNqmo1nRiYQOo4kuw8UZasaM3mkp3m3c3h/ffBtavLl4BQ56NKTiXH9gq6Z+M87B8eg+uI4f7265eXz7TCOA9Ifs71+upT8KtiPpI2bkjjnjRQu6XACEWHG+8tD5X7T833hkhQiAKQSZqRnBkpOowdQgE9SDqSe2kQUPdUdvl4r+M+j2MpdX84/vxnv/3Fb76dao0oCNo4E1YfcBzScci5wOyROx+Jp3FOZCWNZhJjYMA+tG1syBUfo2pRIEKHAKFZkYsVsig2qw1QAwCW6yjzD3/6mZbh+P7bksfuInz317/Kcwmb5//NP/9Xv3p/K4Sl2A+/+EKS/qs/+4vd/fDyMoYYUKKhyyK+XREZolMBYieo4/E9+VwlC1JchTkd66P/4uWnb/aPWdQurh/vMXZbKjQ87PuwmYabQNBontMYjeW4M+I10Yqhc/5Yy+v96yKJu54IQ+6brn98fEvoczJDBxxU1bOhx/1xAG8MFDhmsEBhKugFQ+xznsbhbnO5erGCfHgFcX0R/NXLv/OXX/266xxxCQgzZfXuODykNPumC203Prw/7t/cjW99v8mjUEDPbjgcLvrm2eaibz8bHr8KdT4cboANqjlST8mxHcax7Rw71uojRdc/P9xPKcOUtT3bMJsQclVmt3hoVDTlzI5FKxsRGhOS90SMRBUJAWQxjYtKylUE1Ni5KlVUmbzjVnRAApXqnEOAWiuAqWjFUq067wCwbbp5TmbKTLXUGBtVyzkLSPBNjP14fMg5N12Yh92725vjmGPbmWpE9ALMjMsugbzqOwKrNXd9m3IqpQz7qWlagdMhMzku84BSoKhv2/08xwKaJzO37ptOTQ2hpClpg0yOyjj3XWQwFPnB5y/mWqGmknIMHCMTY982eYY8pZxzVYEqwRGRaUQpdRqGTeMZwcD61faxPGz67jhOh8Ou80FTq3NeX14p6WE6gHj2nfc+jckUkM25sN8d89ub55+EZ8+u9vuHh3fvc5qfvdy8e3s8TPPxMNeKMTRSCjoCM2BQldB4U22CT0kMFBFKzgioUqfxiN2q6ds8TYDonctF1rFtmqYOMwf2VUWLyNi0V1eX14fHO+e8c/F4GFbRwDsCc87VPOdS1+vrVRMtSBkGw4bDtmmy5VTLdP38+vbd9yH4plsddo9Ny+1qU1RISbLE0IcY0jQhIhOO4yE2LUgFVEBe9avxODjnri6eHcejd85qnVPxwedcEbDx0UBKmhJCdI67UDUH31xdXI2Hg6SMirUUMGv7fhrntu9Eht3jXS0lROe9TyiqGnxoApeUSqm/O3ucD8uXc4hT9S/gUmNmwExMiH5xiaFzHIPzMcQmtm27Wq361brt+vV63fd92/dt2zVt0zRtCNH54GNgdsyMRM7xogotNutFdjqHYODJSn2a3M8H2kTLAcmSI3lypeGS2XlCkPDD/5HToHSeiQhP0AbCqcDkNN+d/D5PY9WT2+MD1rTQ9Ys5yRAZl9IT0TqP4zjUeWbnhiG/enV/vN8jwu7wPZm+2HQDpWpDbBvnAyCO++EAksf9PA5ipADoiKo5dhdXV+xpno6Na1Z9bw5W61WaU/T44vLZYf+47libtabSTGVKpbGye3efH6hM/GJ18d3+kMVcYFAVBcfoPKuWMtkE9vyHX/7jv/dH77/79j/86//Pj1+sQ2yji+Ms85SqwLPrSx617O4egl5fX467ISf57NmLeX+I7BsfvWvByAUi56KPlQShaC2NJ79upnFSKRz7v/F3/t7F9e999/q9W33z8//w5zVlAbU05TQpgHMcgweAknJViZ0n4SosoBf9apwShyBmc1FmErM5Ze9cLdU53wSYj1POutqur1dbhJznUni62qw3265tYtNGF1v8QHiYGuBpncCyzE6BxGc/4xMd8qRJnmOEF4rNTqPQcgIOBkBLmPpZujnPErrkoZ9FA/sQLHRWNOGJBIJTAxra6Uc8VWjB04I/IyhL9AY8xYTpSVQAMLXTklQ8NfKQmYmq1FpKSvPx8Pjmr/76z1/ffPduePz5t7853L3brkP/WZ/nyowIIlCsqhF7ZgHIaXbUIKICMYeckkMA01LyypFVmWfpumYYci0S/OKeU2YjBUOaxbxq66Gqrj0dhRSsJRiWTibiUmuMrFnWSIqkRrWqijbRI5GaehdIdBhHVW3WnYttqTqPMzJ2XSSw6EKqc3Be6lxFiZypVQXn0XSKDAosUqN3SFZU1dRxMMHAGAiid3M+9VI03nWtN0LRioS+iwrova+5hOCRjEKkbl3QOfZSsyNjRwoKSHTmupBO2VIn9e5JKvxIMPoIWHtaLif18IMOvvyiM1lkT3Lj09o872a/84+nxXr6Uzu/izPvdgaa8PT6dhJQz9+5hO/qUzj3+R2pKSxC2MlGIws6ZAoGYrYoI7WWWis7XnbLcZxqzaXU3X4wk1RK7FbTPKd5TnkWK7kUFSVC1dPWelrxutBxsPjRTNXQ5iwqkopOaWa41GeXrXOrtnWe0BF5NsdEoUoBUsSqWhcAMPjAjN47dpQqjKUOqZYiTfQOyPkmNu1qtY4xMBOgqqoapDlPs1RRA0ZlDBDJAbqlTJiQkFiJEMkW5dZ0+d+Z/zI8XakKgKYCpsuxApgA6NMtbBH+kNmqnYRhOEE5TxHnBrAUmoEZLAyRVYCKJiC6IIRPC0O1EqKpATM4hpPLzCMiKplUZDZygAygp6MgJEUDQ6sKtUDNkGcoCUTACpJbBE1TRgPDCmpGAsTADGZGAm5RttiMDCoAGLIAgNGit4lpFUMXTEU5nGDIWkQVEYgWeA6BGJBPq9xMSy4p57mkOedUSq1FTJHqoqszKxqxA4DIQN55F51zxI4YQwi8FKu55VmFzmqsipoYmqpWcUwiwo6ROaXlXD8wOymFEE0U6eS5OUlFqrpkMi0OZDtJT4aIZlJVCT0vXcdgKgKAejqGWir9FrueqdnSU76opFrV1LwLITRNF2PrEVQqMBMBSS6GWeYjWg3OgxZHCJpVlA08t7kUco64AfBgqjKjkZmJFBNkwOh5rneCld0qz/Xx8aHrw+bZxePD7f3+uOkuAKm/dCLj885ZWf/Vd/tZ4bd3h88uwtVVe3uzc9aOzir5cTd457Btnv3oB//gn/zDy+fPHYWHI1RCRPriDy7bTXj12+8lTYfd8fmnF7Fzh8MgpXbtGmPwFPI0CIILaEJ5njkXtaylBLS7x7vLq8+JdToenaNSxDmnWiXnwnMqJXDIc0qZse6dw5oKOZyGJCV5BiVjcuzWQN00FqA859puL6zi9ZVst23ntj/7659HkrkcVquVTSNUuL5cizPP7e5xUrEmNo7UrHZNN04zOReiUdNU0VL0brefdb7cdLFxjO7h5qDF+avO9y5JhYDrDe/vj7kKjoEBRBApby8uppoAET0DC5m13qd5ADMokorJNMfYzWVyjIzI7NnBXHK/6g9jNkQyHI65iVxVCKlIddEbgmMyMWMg56wCIFbV0+ZzvucwoakA6Gk3MGR2ZzM7ArEhOEZEQiLmJUyQHRMgLdw1L7H2H4bkJnD0QAS4enb9zbvfvnrzSzYZdrfD9ipcrBWFCa2mWoacJqLg+ih5cqQsY60Dx61ZdGRGnMeE0JJvDYHyUdKROGAtUueaZxdW5GM93M13b2Pw/bOXFiMB1GFPsW/79axhmqfxUI5TxmDTvLtcrQL6t/s3yNw121XTCzda0ZE6HMfDAwqG1TU4p+Wg4yEimhD3G2t6MaSSDFqzYjoj98ytlSOaujpAPpoM3F1os2XNXPfj7hZBzWS/293cvP2rP//V97d7NdmuwtVlHIc0TvnV7ThX6VrP7AeF14/zoJxd+KS/yPMeIwcoXdMFtOhEcW6jJ64EFEL0TaulgNU5F8d9XF1wv3HECAVFROzm/Wtmv7r4ZHwc3/x6f/N2rxR+9eq7P/vmuwnUUm6b5sc//eLXv/rqbtyHLuyH6XigLz676vqeyKjxAMrMpWSn6FwUShnqlIYBsVt3Tbgg8mZep4qzsKdNvxWpxWbIxo6bbf/u9u2g4n3jsS9J1k1/2brH4d373aGIbC+fqyjUaZ5ysdRq3KyuYb3aHQ4h2Hq7nefj+LhrXYx6a3NsfAs6NU0nMiDEdfv5un0xHb+OToLixebCQ8CUPCcuD3/3b/ztcff2/c23yRl4YsczGDh3P0wXF713daxD3/SOkEjf33276l/26mD2c8mQ78uYuq71/Xox19R5lPIgxeU8Ou4SNuOY1PPFs0/vjq8gsCfMSZ6etbzzCEbsDEBVXWyAWbWaqCmAinNEBKYVweYyN6EL7KukqtVzAFLyLpcCBqBW9ChSCIMJCFTvw/J0QcQI1Db9nBODVEVDWaZiMqy5qtrxOKzWXYwdgSJx2zbHYbe/3ZVUZyNP7CRvw9o5lyVPtZZcuzYCe7LxcrUBlVygCLkQ5zRXsnnKVXQcs5RqgjXlkgCdQ3RIvtbEAdrATM4T9OS8o3FKaSghxFKQSA6Hxyx41fjgvWkqU2pjMMke8nrrb+4mUd0PY9P4vgkxhnXTj2mIoT0cj/NcH/dQLQE3283F2/3jpNSE1VQyT/OyG+M4hWkGxNCFeSyllKZtS5H7x8f5u+8/LeZNdg+H3W784idfJNt/+/XXwbNn365CFkFHwbMKTGPuVp4A53kOnlVEVdrgp3Fu1+08jKsmxibO4xGNU5rbzYUPoe26XKY5jeSRNEzTVMr99bMXq+16t9u3XQ+IVaQNbjgOWkuzXofQlFLv59TGdv3yB1/97GfXzwAJj8Nj0vnl57/32e/96Ptvvmv7dn3l9g83zk9AYd1/KsUdBgzekKKqzKkgOyDEolpyqiqh7Teb3f643awrCCEqEqs1bYecxYyCB3RZB3AWXMy1qlQi3/f98fG+aSMwFT0AoJTCqPf3r5roDSy0rpZiWom81HIssgkXIQY9n+ue7iNPJIWZW+ZqA0IkBF7aRZgI0QfH3scYvPdd3zRd163Xfd9vVut+tV6sUm3bxqaJsXHehxCYHfFy9kcL1oGEuNRKfYgHehKMTuP4Yq1ZupzgDHQvcMd5PAIFQzw/nv1Oauw5RObjIQsJnx7tz9DIYrVY0mT0CVsCMLBzj/pHqNFTaigBETqm65cYm+7u5t3Fi2e8Dl+/e0zDhEiBA/leSMCsSjke9zUbmgeCw34PoOyda5rL7WVkYReGw4RVn19fMjXzLCE0Dw8HQHPM9487KRmAUs4e4fpy/bi/v/z88tW7dH/MdffdH/7gk3l4uB3GWnOuAmrqKGfrnIfY/Z3/9H/2y1c3w7/5H649/vjTH/zxDz79i1/965/8+MdK/ftXXztUU1GdnWug1Grp+ReX3339ek0NOZJUh8M9FnTBTdO0Xq+hKpt55GEuhnK/u9+NR/R9aLp5t//q4WfN1fV/9B//ndvdw29/9vPG8VN0T1GEaqjaeH+xbtpIsWuP81S0rBow0aJV1ZwPCMbsFNAF10Qqebi82DD2+8P0/uaNe355ebF2kV1wTbce89xZRAjMy9I6pUczkJkss/PpGckW3IzgyXhjH4QYAHgqtT8HvSAaMJ3n7hOTsfhCdNF7TnlFeuaUTorB4lN5kjntxLsti3z5DeLTOl9Opu1JWzi5Kc+2SDhP/6fXtqeljoZwwudUzdI81SrDYXjY3/zql3/+17/+i2M5fP/+5t37V7HK9uWVKRaVXGW96QURAQRUCFMpRLjqOKuJIAGwc9E182izlHUbAM254GM8DodqqgB9G1W0VInoCvMkZWXgHVnKnfOupVJqJH5UVMCs9aJjIgCgOuvCRVSxVGq7ir5pQUkqrBpHZNOUwZEL3vvIxE0nViU4n+axReeJFThJjj64Jf49UtfHx909UphrvYidQc05ZRXv2KoEx7GhEN0xJxFct23XhVJz367YSBB80yx8kJrGsEIKHKNrV+A7lxCEiU7OEtJlcSkinXivU47bOYIazn1nT6vKnvbYJ7oMQAURnqxAp9X4waj4kX54ToZ5UiJ/BzB6Ypo+cr+dwbeTpoVwjvY9cTx27ox8+uaPlaKTD3fZo1WqIZmaabVq5JjUci1oUHKtUmuppWapetwPc5qOx2GYUk7Zhx0GHscplTHXuUhaBvbTHqwLfaKnmw6eOt4IUc0WItQMRMWFuL1cb6+37SaEjn0TYhvFUIG0FjBJ82SieG4oU1XHpFXnKe33w/5Qal2KlsCUmGPfr2LwgYFArGhJpeSSi1UxUzYK7CIttZpLRHdRh4rm0DlAAmQ7a4LLFU4GZoJaAXRxrrLBqdV+aUCzuqQUoSmeF8py+QMxmgAYGgGZqqIh6DlAzwRM0MSkAAiAoAGqoKpVQUM1QxKExUpiRg5dUPKgBKoABuiBCBfQbIHTlq56MEBBMLCqkqEWK9VECE2lIqFpBTVTW/rODB2yV2EgBaymoLz81wAmgKhaFtRM1GqtymxABhU8A6CUzESGiMhGJibg/OlvkUEXLU2kTDnNZUp1nlLKNdc8jPOcZVnazL4NqyoFAJrg2bNj8CECEjv0ziEysltC0BeXjeoSOgkCWIqgGoIt5BNPCUHbENkhLtDRssGe2LGnBjQzKQIf/Hy4HL8aGgODoqLqyVKIpwDH873FjMBUpBJ7NEYiAjYxYGRCJNc1TdP3hjDm2gbHLgTfe6epHIBMIaMVAy4lpzoRoiqhAkIlorbrmPqckpYpOjGxecpYDdSxC1Lx/t2NIBq4m9vHZ5d934V3795M49Q0bezaOZVA/jhN6uLK1T/8weY3r8ZXg73a6Zx0tf5kynkY2ZEx2cXL7cuf/vjqiy+HCcub4dMv188/WaNWlHWdp3EuF599lodH1wZFmJPGfr3q1ofd/fF4PAxHYjIwmxKSZ+SUpjRPampaD7v74HEudRyHGAIAD2lkFFWZD3tTgIjzVJU5QMOg02EMnoNvGcBxQTKrqIIplTRT1XH9/MoYXRvT2/erzt1Od9fb5v7xjmOXwR/2ed2E588vv3/zNXvfOw9iaE3g+jDvHEdzsCujcXDBkWcfqEoRoeGQGDE6t+6jotsdcrv2/dWzw/5RJ/XBt6vVMFct4sDVAsM0UTBFSYKrpk9z9QH7Tf+wewTUi+f98THtj7kiOCQVk6oUXJGKNrMnMCylAnPKFZlDDFJUzXwI0zQuCavITmoCQ8blEdbAFIkRSEWBCcQQyNhKOdn+1YwXN/7yvE+0yEXoPALiYsUkNAMiNoAzSAnexzIJoPWbi6PMX3/zy8fDu+2qmecanv+oaqQigTTNOyuzcy1xo4ZSZ3YiZap579cvBLjWEkKotZLvAcjKXMuEkEMIyWa1SqEzF7VUq8kwdVeXq0++rEYlZQxbCu1cQ2XPkd99+03WtAG/XV20Tff+4T1Su+ovEa0oypTbpomB0rhHkm710rVdSg+QHz35Wqtg4/1aKBoQQADiqgoYyPVFsuaj71aUU80HdB00VwCmw7083iCU/e3D8Hj//W+/ffP21c39QTCsV10b0KlBNQJIY5qyri+36AhyVY37KWeR7PBF71WLdxwdBkDEgshVi5TAngmdKQKRgICRb9dufdU0zby7X/Vumus0ZrTBgBr38vXXN7QN683Vz3752z/987+mTaTDJEW6lv/ZP/2Xb27vLCJJ/eIifPL5Veh8BY2ucY5ERETMCKxljI51LrPilI+TxRfCzXCUTy6fv3gR3eH2Nzfva8NFcz7OX15dTockfY/gWGq3iiQdc64yff9wOKSdEiNwSBlAtRSmYg7nlDQYFOlCF7zVaQaw9UV/e3Oo9eGTz370MACZSS6AUMDfznW9an5w8cXdbk6H4xGpWzHn3Lbx7v0b3d1rLaxCCOMwKU/ObM7jPA9VfXVduAp1Tofd4+X1Z5vrT1zT0BhSDWOmH3/yE9ps3r/7jW+apRkw9iuRknKJfevYs+PYMGF9GN8/HmvTd5bndC7FXHIoCHmaJ1wMaFIJgYiqqEglcFJFpRAgEK/aVamlqFURch5dVCkKtqT2eQ5JBk/BFic6QJEKp2g8KKqsUksh7xUgNp3WmtJMRG2z2h13EGJoOjSTnDXXEIKBHXb7KnJ5cTGMQx8b9v6Qky2jpmOiACoiULLMaRrm4olMdEyTotUsCOwZPXlERNBqqAyxCXPNbd8BLj8t99Fj9Ca1bRq+6udU1/313f2bOQ2RYhO7dXTzlJ1rHPNxTFXg8TilWkWYXBTFMVU0U8EmBjNbrdeplFwMyY3juOpi12yPRWTYX/XrKnUcDsT8ON+C81d01aw3xWx3OHZNICAt+urm9bv3x9//kx9fPLs4jumbX37D5C627rC/ezg8zLlDAhOtJmBspmmePDMRlSKihoQpleAbMGiaZpjGTXBgxM7Nc45bAEZGV+fsHC8xrv2qm6dy2D1utytVVVEEUMJSio9RqkguHHg5uxqHod4//NHf/JPvv/vl9fVV0z1/uNu9e/vt+uLZixdf3r555RtabS6G4ZG5Bbtzrmn7FRgCC4JUmLOkaU4xMiHMqUgBQ+m6uHvYh+hqycG7y7bbPR7bLqY8TdPQtK13/v/L1X81yZIlaYKYkkOMOAlyaZIi3T2907O9O7MAFrICETzgDQ8Qwd/FIwDBAtKzOz3b02x6qiqrKsmlwdzd2CGqigdzj5s1VzLzRno4sQhTO3b004/klELru9jlZVnmxRp/8/L158fDfrvdba+OhydTcY4aQmYouWYpaFBUkZUQmLHkGcjFLlxaDsRzYAqCmWNe+XSIyERM4Byv5tPeuxBj07Rt17Vd23Vdv912/abv+77run67Cs2CCyEG5xysITErSLS2Lchnw97nfsoMic/gzp+oLS5fn8kaZxuFS3e+4kzrBo1+RkJa2+ovNi9rn3TpjfAZTUAEW8d+a5ty7pmeyUfPfjR6aaR+5lS/0qE8tnvm4MPGw/dyPD7dbJqDaQIYs757PLqH6WbXdx1aLWmaq5gCRB+bJqhkVXx8OBpU73jTtU1oTqdZtIDpMo9N9C66okB+c331VqCa8yBlfDzqOP34/f1x1FLDcZw/vH/fNM1XmxebtqtAUqtvOnC7m+31tMynzz9cafbLQ9ttx3z6hz98vt29XEaNL3avv/n1w7s/gBkwT2WJpTPlp6fj9uYmK5RhamIzpxkJmUCkqtbldFK1ijIMgwtiBQJuBPsmdOnuXorcffj9KRXO0MUota4GUgyoiM6RM7tpeRvQQx2HoWt84zxU884DQqqKCFqLIIYYiKCoXO22tZpB+fr19eMySJVlHPfXOwFutzctcWw6Cu7nPJ4Lm4wuSjK9aKzQzuffzu7sSBdnKoDVOQUvrKC1n7NzD6C2Rt2fgZuzKS+BrvKiL2DrMx5wKdVLM/8nTf7F2OjiVfJFvPZsR/RMLlEA+rn87Nz9A5ipVFMRlVpzLsPhePzuuz/87vvffPf7fzjV03E8/PGn94TCZLFtllqYnEckAu+J1FRLzUbIVXXOwtSYlzxNodloGjZdM3w+IcSmaYuIQ9523TTKVKtnCoSx6FQ1UCy1iGj0rgtuLrYNbpRiqD27UXk+/+jKiJvAWaAyBOZSMxiwmQt+gcpkXRttMSmiotG3bQwnWgpUsBocL2oVKjI2TVORApFKAYRspelbUXbagQmbkFkT/GrLEAM6h2LFefK+9W1r3qoyuX6cxtA2hG5KM3rHoe22vamEGAUEGEuVzrd6pqNdIGZcyZaXM2qK+mWm+wXA0UuiIqKd4Ud9JpJcEEC7EN/wLFx6/u45ivFSJc8F9IWq9LN6wv+6uuz5mc+HesY7z/rG82QAL+97efmZB3E2xsBaSznjCFRrVbVaKqiVWmqVkvJwOs5zmpZyGoaH+8cixczGaTEyMXh8ejwNU8n1mTR0pnF+sWSCNYiTVrdsAxEjAhBpWrftwlcvb65uti56BVXJNUMVEyMVm8ZFyuq8TaoqYmVasiNmrlrnJS25rNpOBPTs29gGT0yqWsVMBXIuudSqRiEQB3aBmbwjQgWrqoRVBRSpOnWIeL5zmZ3D8MwAFRSsFjABMCA+/2BSEVBxzQRdz64hIZxJVZeTZHSmnpiu5C1AWJ2wUYtpBShn8hVc4tgNEElVQdRUDRR9WMVlxh7QIxJChXX1QoY1qF4zoANyQKRWAWwFoaxky1lzITCxisQiBmhaBR0bg7EzzMjezi93K6ERjKUsa2WKWK1VFIuaIkkRIEaPRKHmBclnqUSMK81exZmKCHFQFhFVNdA1fU7UChKIiUg1UDMjBNVaa1ET5xwR+RCIyTEGH8kz87rHYHZ+NYpy3jEyEtLlTi91fSustdaaRUxKccxMK/WHkBiRnxHen7GKkABRRIjIzuxkIjBEXq2lVMUxq4mZIjpCUgVdY0gAiNw6p0DVNVCciV102902EIOpC6EDT8jzNJzq41LNEYTYkAMwEi0qBQSASWtBoGIuhs7565KeuGLwmzbYOH0m0+CbVOsy5CKz86xataY3r14u8/Gnnz7OY97vb8h0SeOyFAUM3j8NJyR9e8V93KV//iwaPjymLppD3e3at1/f/OVff913/hf/zV8Nxwoi2+0ugk9Dim00MGCFbPPxyVS7bjs+3AOrjsfpcM/eoyM1I8BUFwAlEkTUlKBk9jTNmcDnvJQkKpqnUmsGxqIVGWrNbWxOx8HF2HUNOZyGpYkNaAEgMapJmYMZQq2BxKB4IhmP2h6JdjXx9fWLP77/T9tNu+12h1OqVd6+fHF/vH88Hn79Z79898OP+24b2Z3GxXF32++XZezaEGOzZBuflsi43Ta3u8azOzwNj09T3zev3vb3n1MadbYZ/eb6zcvh6dEWIseOpWZtQ5OrIlRHzvvmOC5DhbZpj/nQ+81us12mRDrFrXu/Rm/FICmXWouVvt1KLiolxqCmpWouwozkBYlymZsQYuuryGqRzs6txliEKCZwTuLANfkVkVUqGjAz0Lr2oCEhrVUPzLQS8WgVphEDEp236qZm7M5XQRXJKZOPovX+47vTdHIdI1ug7b7/umZstx6W++nxfa22uXmD3JoCI0JebClWQHIC7+c5c9ghRBd7KSfHZlRJUZeT5QHBXHulApaOeT4R+hj3pqRqqOzbvSCpmvP+t7/5T+M4X+232+hjaI6HezTpmn3HIcmEpo2LXKfl8ECo29uvyLdp+SzTsW2iD5yYoX0ttEEwRiAT1JkqmmuhLlROngyWJ5lPWpE3L4wi18mWhzo8zsv49PnTTz+8+/zxCaltd7Zr29v9HnK5v7s/LaNx+OXbr/JS70/jIZdSalFLpVaTB4Wbm50yLpIt5U2kq83esXoWH3pEyoVIAZ3r9humBv3ehd5Um6apeUpFxtMphObqxc39w4d2E6h1P33/6fOnx/vjob/Z6JHefPUCkd5//pyqAkBwtrnatLcvStOYsibJkNDVLnQMDBSWWhWxLFVd8S2PwzFlitB9rHex3QLv99fe4nB6PAqZcb+5uXKMVe6Dc+Pp437nlnoaTk9CtCDu+i2Z5jLd7m6L1jTP11evjofHh+EeCm9dIGDH3ePpvVpjZEmGYz7Eq0aWmrUMy8PV1Ztl1BnS129uxZ7ujrLb7UPb951te/7ujx9sOjFSDHHTRs2FGaFp5jRvoG0RP7772L249d23V1dvng6fITy6DTw8PtzsX7+++lWdpgzYb18isNaVvTzMILvNHmr2aKpD1/u8zOPxEOmmaXbits3F3N1Eq2QiYsZcSgiRAEopQKiiTWwd07xMjhyBq1ZV1XEoeemartZiWk0yqGfilCeOFohiaJaSmJ2pqmgIwTmXUzFRh671jarOy7Db7YidC94UljIpWtdvmRxqdT4Ye61+yOlxPpFvc00xsBEtJtkUKpDn7WYTHVsdYxeHaVjmTHGLRLmMxMERsNVxLsze0J6GYR1CBTbRZdO6TccCQOxUkBCqCnuXBbxD12JKH9vGtu3GNY1qTXkgNqRaJBEJoqRSfAg1A2MgFNOaShlz3UpHMMcY1aBq7TeBwKbh4EKXZ6si4sK4AJJ/8eLN+4/v45Qe6bSteBrrPJWyZPLum3/1y3f/v3/4T//8u9pcHQocl7S/6R8fD58eP/zlf/9v/9e//7v/+//1/0br/EEsBKpmilZVXYwmuUhVsJQyBmq6bpnnrmmC8xwieteI6njQxru2BfZmyTV+DXhFZAAYx5mI+80m3z+olFzL9upFylnEsNacDvubVzfbF58//3RqeLvfv/vw48uvXsXNphyn4/3T22///MVr/vzph/3tXhTFtJYxTycXGhPi2Ie2dT72hE+He9GcS3LM6LQsR2o6H1BEkHGcl65xIQbnnXGcTqMU9+L65mE4DtPSdcTR32yazx8e9i+um7YzUWCL0TdNM47HJrTe96Inx5jzgkguOCIsc5nLUJ0P3UWSf6G0E+NqwkBEzjlE8MGF4BxzbNqmi313xoS6zabrur7r267r+r5tm6btYmxCjM45IuY1l271DYJz47wOeJHwQgU628ecW5svaBEA0sVAdD04vsh4Lm38hXp0tn65cDjOjjDnrgifibeXPh7P3zg3WCvShGhgZ7eXCy/kQmWiZxLTzzuxde++voI8YvAYI7m3b645UFF4OiXQ3NDu6XF+Oo4iiUGtSNNulnkexpOKInJVQKY+tHkom03jGZq2vbnelzK3jZIrwe9SwuFwSHU+Tcs8HeuKHm1fNI2Mc5IyEsJhHh8fTh/IIaBzjsgVff/eYxd85+xf/+qb5he3v/rzv9xuw3/+3/7hL75+83R/GpbD65cvdm33+NO/xH5rTLVqnhcvzpyRc9fX++F4Ci5ohdX88AxwKFSA2ETAqd90c1levLqmhvpYnWt/87txnsZlKX3L4yyMzlRqkS3z1uubXXSgDZtDFgVPKCVHH6qW4Nh7BlJu/ZIMq5KntMiM+Xp/NY2TUdx2W8tFULHSkA5d3FzdvnAePJ+diM6gzBnvOTPRCPlSUHCxtbpM4n7Op1gNhNaCsWdO2cr4sXUs99y4A+BFq7NW7gURMrgcw3M//zP84JnF8Sd9+gUfvSghLw8+p2YZPMvafpbFZisZS2vJaczj4enD9z9+93f/+Pffv//xOD7en55O40lKyVpcRN912bAxbDyZiImwA8ml5qoixL4KuNBSXWpNxFokNc0up3ty3qCqVqm572LXp6eT5io3XUtFuepSSusoSd50DcvKmYDW81zkhs2bGbGphMCDCDIFRhQNRME5leJ9V0GqQwJIkrsuElPKVWqqDFdXN9NwSMvMDCFGMYvssUip1jDmZFCFLW67JmVDQFKYxsX5qAZI4D06NvJQS+mbLYeWHBrk2GwzkHkE75wLsFTNzExN1xCT5yBEqIVA2QcmpwBADtXIQKogO1AAUDgnaD8vRXbxG6KfMxQvJXbBgr78P6wGz2tDeka91yJYeWMX1hB+KYzLnxVt/2J6/eV9LyDTJcXv+Q2/kJLw+ckr7e2ZT0HEwKsEAQxqzaWWKkWIiByJiqiUlOd5VqDhNL7/6cPD40GRUlqeDodpXkopy5LFpIiO07AsyRBEVoRtVYKa6XkqsFolXfShZ2IfIjqH295/+/rmar9BDsOMUpfIpe21KgGSCYp6EUlFxiUVMKmqAm6VvptUkVqNmJ3jtu92+12/6ZvGewKta+w61SqIPrSRfeN8cMRM4BkBkZxH5vO0hNYENCMFW0PNwEwV0UAqaF3j7cEEkejMEDSziiamFUxAVxYSwjlZwSOSVQFTVAJFBFpxQzABEFi9hFbao8rl9oQGiiuB4LK4gYCRABkgA3hEtyrewBjIIaFhIBKUy93zfHSoCigGYiZqYqKGpsgAiApqxGaCYubUiE0NCAwUqJoocDGqik4V1KBUyEWKYl0puoTIjoRIFmE2mAGZXSBAg0KGZtUYsIohA6iqqlgtUMWq1GIqWkVKrWkYh+MwTEXF0FZNDJH3HGIMwZNzIUbms2k3nB2i2DEjwNr1mgqAqQNVRQB25IWKaNTgYHX+JWDyIRI7pPOO6MIqOv9lxGR2Mas2I3K6Rr2trGZTJNRqqIKIogqIzCxSL7cNC94hUxNd30ZyhARFNAQ3zvMyT4ACooTA2ri+zVW0JkeY85inpyZeiSohtKGr4MhzKSfUyq4xk1qLcx61JJlktcUCFNXpNJMRUBmG+77dd+02iwgAGROKd267jWzb93f3H94/7rbX/7u/fFmVhRmRvCP2XJP88b+8M4H/+O9/hMLffPP2v/+fXoQN+dY44HiY+11XVvZNnlXKq1/1y3hK4+Fwf++jFxFTFR+qakoH7xsEzkUAjA1LSSlll92qEy+pigIHn5OymnNxmhfng3OEKGlO0bvgIkOL06mWhXwcnw6eG1Mream+c/uoVU1Kc+Vv31w9/Msf/t2/+zd/9w9/qAL9tvn+u/s3t69GrWVWKJub7abO81KJicf5MTK9ub6+Oz4St31wrfOENg95GVLfhd2+n5dlGEeTutns274dpjQPc2hd6/u0jJhhEwMEl1L2yEgsWSzXFiMoYsXe97poIFbnp3HwTpuIUs1QDdSjA8RaasnVQJelIELrXQEqomlaog9N8CqVnWNgBTADx6xqVaohOfYAIKZEjKZrCqzCemtBRDJEQGIKyHzeF5Nbq5/IAZiCBXZmst5LEL7ktiKK95St1PHp7vEhWbb51GPvAfJhCq8cq86njw9PP/TdTVXCWlVHx2ySzAqCkVkW5WYjRuxJy73k0Xc9WEWFWmetC5EDM6spT0/LPO32O9/1ZuBcYxxyliIZ0R0+/zSND5vt9mbjcS5ZBxRp+y0zL+VJcmljAEzzPAaK2/23FPslHVTGru+s5pLzYm0MHUCGUphJRTBGdA2Qy/NTQAVIkgZNQv1LaDdkVYfPcvig5enu+x9++O7DKctc7cPHo/O0vwp5qb//3bt37z5sXl7XBbyLX7++Dd3D3ff3vmmOh6FkmecaGno4zpsXfXC8qnuXJQVHENCJMUjTbzj2se+vrvc1VeQGnYhBaPrlNOdl6jablMsC4jbt/Hh4+nSfT6clLQns/sOd5WpPWKWMwxJj2CC96t3/+G//TWwa74LnwJqAFJGRyDsvVpiZUBpH2VLrthVE6iNGLZVp6oBivr9L+oHcUtOcxYYpBfYlpUxiDh6nwzA+1Dz5doeuEeM0zi37MadTvl/qHOc2onu9+yp7D2mYjoft/u3N9duSh6sX8f50OB0fX9++mZbsqUbX2TT10Fs9/vTjj7kmMiLFrzZvp8OHz+/fa1lOeeGmf7t7Oc8H5xsgrCK+7+4+fegKOCoN2CZs0nAo6VNDyUZ9c3N9ur8fSj58+hw2HUdy3p9OY+hbAS0ioGCqBTRPy+72Rji/fPl6Osnd5/dff/VLqGcBGpLVevYn6prG1JDCmhkfIpWaSy3BxyY0p+lkYFVQRDx5qYJgNS9906QiANg0LQGL2jgvpRbR2rYtIaWSxZSAN11/nI6EBIzeuWUZyQWtZqK1FkTYbDqGUpMsUsjx8DDPY+r7K9e20zxFz30bnw6H/XYnSiE2YJpLsaJWrSr62GaGXEuuiZVEbE766VSITWtmhF1wHFzfRKmKoGkZmalxfWVg9rtNT6q5appLNTOtfbcptfoQc0KRhZGGeUY0BRvGMRVx5Go1UXGImybUJSmS5HpYFt8Yot3smmXOMfjoYxW0YkZGyMVUiH78/O54OO1uXuaUR1rmJHMWhLI8PChKs+kK1P/lf/kbcs3T52NoG6N4fMz/z//H//t6f/OP//RP3vu8JEIwExRA1qaJ5FxONQZXVU3Mk5ECqNUidSlXu93j8Sl6x6AlFe/D7uZ6GB40VXJUUiYKIbRNEx/vH9So3+2WZUjTKU2DKqkU6tqmDeN4iA73bT8fT/ubm29efnX36WPb9P3t1ecPTw+fftjuXr56+XI4PnjkWpaqYKpQ51rBQxEhYt80u6vNfhmfFixo2voGnU9L9r7ttt2SRjIBk03XHI+PHEIMPs3jAfnmxe3D3Wkelm4TweFuu7eiTfRWCrLz7EAthCaXArBIzhSZiMDETGs259iHkHM5Pp3OGyNHDgguTU+I3jt2jl0IzocYfN+1m+22bdvtZrfZbjbbbdN2Xde1bRdiE2MMMYQQ2DkiZHbEtPr1EhESPpM1AIBoBYaIiJ8FFStDaJ3b49o4waWJf6b5nPtnWhn85904nPn8cOnMfgYi/eyxMxfkGVDCS0b1qv7RSx93sZKBszYJAQwUbf3Qn9MGwAyIUGTtxghD4NjZaUTjMs+ejNAN0zhPAztwZF0brq6imVN145wgQLfp265hhrJUF0ITaNcHAq7LiAiPD4+qkvP7Ciq1RBeD2+zefkvs0TemIGW4+/zB+zKLdY+p83BMNZnFoJqnQNhQ3DZeSvnDb79rt7vv30+//Pb26xdvP/308cWL12++evvhh7s0zdt+M46PhhgaN5WRTT25Kinw9ZzurYJonZaRHeaSrWAM7jDct6GZlgKGL292gWVaZgEqZfZOuUos1XLqo9t2m5KGxjnUymhYq6jOaGDmnadUgmNV27TdsoyA6pwLHELDS0rBueA9m9Yq+92uTBUdvtjvxnH2ofn2djcfyvB0//qXv1ZVZLzAK3g5O2cv9rUv1jNwiGf52YphfmFfnEtgLUREA3vWIxKcDUvxQvfB567vGac8Y5Zr1az9/hfc6LlqvjTrz9W+PtPOEqE1GupPLH/XYz5DEXiOb9Y1RBthXoaP7358f/fuh3e//e6H37x/937I6Xg8PBxOAMoIhOYMtu3e0mIEYetrrSJmxISsKCktyKTnrYvWLBR6bAo67to4ZDUDIK+Ktebdzj1lHDKOSyGCbRvmnIg5OJclR0fHqmJIjD2zqBrgZOhwtelWUGUiI1AwUTNVImtjYKC65KLC1BGBYzOAquqib9uO2cwqJLVi6B0bqFQicA5D9CE25HwpJ+8YFJsmFg2gqqV2bXBOffRMMcbOmJDLZtOixdN47NrQRJ/S3HWtAvkQkdnFmKtGZJkTEqFzhohEevYsWY2iLpTIs7SLvnB88Lw8PXN3YOU+PvMRz/DMFyaZfSkShMurL3X8pRaeIcSfy8d+hi1+qbQzdgl28UKHy87/eWU0e8aafsZYuqygAAh6ZquVJaWaC6KFGIip1FxKySkvVcclffj88OHjB0Fkpqen49PptMZg5ZLNrJSsKioXz+wzKoUqtjI+9Hx4ZqoIRmvUphkCNuwdgqi++/ywjNO29Tebbkm5aTfALNXmVEQ1FUlLmnI1JgRaPaYJIQbngmN2fRN3226z7frOEYuAalGpokDOOw6RYutjQ+Qcu3XcTo4R2RCRHH3B61avfEVGVV0Vb6CipaBWk4JaVmQHEMAE8YwuqK0MR0BkMwJkIDUDY8AzwWdNzrqQvy5QMqiZGJghXkLiDEAMDdVE7awgR+YVyAIgELVaQSoiI6+/bjRjAG9aV2wLTaEKlGpV1l2BmYCamZ6LGkm0Aq2f6wwF2KutSWcGXAwm4E7AiXGuVoSWFSoCIyIDdCForuicICMThRgMVopJrcLMZ16uGSKIqBkpsKFTUAMjhpyXaRrmcZinoeYFANx5d0HB+xB800T2gdg5xysCKQBI5D251U1ivQQEzDR4t94eUirCGIJPCxGxgXofjc+2if81q2g9DarKjHCmHqOZlpoJybHLNa85dnBW+XiRun5wVaEVRXKMhKFtHYBjFJVlyqZmggCiqmASokMgx95xq4rMxBxKGuqyMPpUaoFl43oFFuTgvOnSNAG1IPB544SiMvqmzdWWZVnS2IZGDdnz9fULqbZUqyAIGoO/Ck1BzHXWpbbNjl0vKoGrk0zRx6Z9eJiHUXTCam6p2fVbZfz8d7//D3/7L//j//Gvf/WLr3evrvqr26fh6NtIzLHr5nFQQx8aZghtX/IyT6OWfH93H32nUFArkcsL1GqKCAgl5fFBDSw2fp6nEIMUS2l2iGX9TYN4J+PhQIgUvJCVWtjxze2tEsS2SVPSBGyQaloW51xuipjWEH0e56QOoBmOQ5EMaHO++6v/9tv/8B9/c3f/2DG3sX14PEbXOB9nXX46HiJ3WtUwE6IP3DZ9Snr/NAjy1fWWMS3TvJweN7umlrlkTVRffXXD3OVkKc9ErmLNy9zzTqCqgxDjMIw9M4RGdG4bTlpu9ltHtNzP6VTHhJF5zjl4j2TOcxVQgFJrG4CZ1BCMUipMWFTAyMjEEAxFDYCZ8WK3+Wx1uC6pBODIkYquAzJaPdXMiHkt1tUMjc6rBCCCYxYTIgZAvdwTxnGGou1mJ5aqLofTU2xN2Nc5taTe6tNPf3i6/2O7jSYL6UnVGUdAV5YBapY8yHwPQZhb9ljKApYdESgQKFgBUMcRANHmko5mue179o0IObfl5jrNo7Kg+r5p/vm3/5iq2/gekplUz67pNuIw5wFK7ft9Gh9VcvR9v/9KQ8xpJDQHjecgNqecw/4XVE61TgQMIaCLigE5aCleC9gi412VypvXtrlVFVdm+fzh8O7DND18ePfp6TRQv/nnH9/f3Z/+h7/6y4fT8j//zd8mcVzl//Lf/du/+y9/8+F49/d/OPz1v/ozQ3PeO6ZIyGpAcHect03sd9GZgeo0VvXm435e/HbbtVe3rtt5HwwoNmxE7XYzpsIBeApoIYQeObBdp+l+GUZEbNr4n//4x2FZqkgtojJ557u+Q4BSNTTd56fjdZWdQdhslC023shXrZork/fOL+kQ/Qa0z9OCWIghtDyO48zl5Tf/xu5NBa92fc7506cfXuz/rN/s84/T5no/pmyS29AVbZzft10vS9q0e6hVqjXuOqdNXaLo1O53wFAmbZoNxkZInIQ6DJxqGwoOd1QSs7/uf7GM8vRotQ6ah2bfFSw9w/zwqU4jKzYYN2/eFOU61vl02ISOyD+NR2y6t7e/1nR3teVyPI7weHX7tljv2/Dp4Y/mS+MioV29aMZFpmKbvms2Vwa1jYGqLocDB4cxhs02ZyhzbSPnOndhz+CyDOtVkHMVteC9quQ8Rd/Ums0yGntsUzVQVMIhDYAQfStmc0rgKDB4F8CsijFBVfEYq1URUTQz8BxyEjBj5nFYgvNg6rw3kVJz8NE5npYFDMihQ0/IDFlFFcjFDcUpqxrCdrdVwLSQszDP2jQbDD6ia10AkOPpvokbI4xoknPDXMp8dXX98eF0dxozWMFYs3gOBnLIxpLBwBk45AqYQakuWVLT9z4wO9517QgafEgLg6qQDId3yDEQ1HX047AoVmUgV1WLFFOtgGXMngkRs4FvmsJ+mZObqkfLdXRXbWya+XBsYwOksW2I8HiYH0/a3Q/Rp/5Kuv32u//8h198+wpRHu4e/+W7d6f5SKpGMUl+eDwwubpttk1flnrAY8pCjgHA1KQqOxLV4GJJVWtlZoFSpURiJJequapcUwxNKQvHNnbdPM5o3rGvSES0zAM5XYb7ht901zc5Z8chtlrSXCVtdl8dnj6VmqLzVZbjeHj14u2nT4dPnz786ttvX4Tmh+++e/NtvH61H59Odx9/uH118+LtVx/fv+s2twA6Dg/VKgfnIpR5lpKkJBNjshBYgUpRBmIktTrNQ2CnIRyGyYDByECccyWVcRnc3FzdXB0fnlDp9DS42L64fvXh7p2obbipXRiOj7GLc8pQc9t18zRt9lsGEK3jPAGSpuycu9BlwDn2isS0xtO64B1T18TQtbFpt5t+23eb7X673W23u67vm65tmjbGGGNk57xfCeEMeE6V+tIRgQGgIwRYASM4y3KeGx47h9Ujwjqfg3W++jNWBpw77wsGtHbkAOvm7dLu4/PDdnYfPTu5fGnR1pb7zCpCNbHL9B7OX8Dl+QRnx+GVQ3JJJlrvngoGq27AVLTMS05ZDZtt74cTANRa1SBGR9vdcSkFsGM1pLvT0G66rmlfXu1UtaSSl0xagw/RYcrpbhm1AjOWtPR9FDMf4u76pmlbrbmCZ+fGYS7LIEsaD08lFdZw1Yeda94/jAbqFBrHyBgbx+acyayFyGHVlkSfTn/7D7/96uWG8nL70v35L9vDU9y3t7/97p9qweDAbVQyOCSBehoeUcGwxthM8xi7KIollYZD27VgVMzmcbm57qQmX2k6FfDLrm3818342+8bJpRqy7Bx4DAT0ZLUPLaxkSpNdACoKmMqzhn52LSbUis5KlYQKrMKaIhtXvKUl+vb3nEQk/2bG7hbHh6Pv3r9q3a7F1PigOzW/njliJ1lg5cGe7V/JuILrWyV4egqVHxuxZlodS7CtWV6Zm3Yc+99Mfo49/LnZl/PdJFLv24rGQ3tQjQ5i1bOJKZLeZ9VKGeBzPmF50q+9Ph2TmlHAF0tcsDMZMU3BWCe58fDw2/+8Nv/8vvf3D+9O6aHp3kcpimlBQiIaZkzmVBsDuNgpUSKVY0RXWjUKoeYp0lqWmbYb2/SJJERCBU57q4gpa71c8kl5bjZzEm11L53N/uulqWWIiJtbNqYFYyYHMOUShvaksUQYnAmhkpFaTZ1qoG9gBGhR/aOmMGA5zH10PTbthLVnAErIIcQBUhEISdVC6FvQhjgMXrNZioQGp6Xoe+6agWZCbltWkcIhgTmzKWlMPiVauZjiE3rXTAmM2jbbp6qqhhi18U0jU3X5CR93zvnRK0NvVZDM/YboAjoEIiA0HTNAjuThpDQBM9fPxPGnt2v8ILF/AzLXlVLq73xBe8+m5Laxbb8vHLCM2Z+oZzZn+CPzxFi5wL9GePombV0BjEvMMR5QbyQm9bjoT95CfEqSkCpkOY0L2lJ+Xg8Scl937VtI2qppGmZFZ2oVquPT4+L1GqW5mVJSUxFhAlFVbSuvoFqxky6yvIRVxmYGawiwTV/av19EKJj1/oQXJzH8v7D465rtyE47xWQPcXoFaDmrFrzvKiaM+2dw8DE5D1779GA2FejEJtt3zXOtrsmRtIqAtXUCMgRex9dbMkFR44dITM6vyo4Lipm1PMlf75CkcDA1lwDW0O9Vc1kzbDXklEKmqJWQEUQAGDE9YYC5BDZcPWZRkMDQjhjSgaqYPQM/yHgWmwrQwDR2MhUQcRUCRSs4vkGxyvySlAQFKwYmIGQCKFXYyBn3GDNqBWtWs1Qk0kFNRAFFTBRETQzFQBUNEMWFEJUKUhOsdgafMZktaiRoFalLC4JpqKLQBatq3yRnI/e2BmxOe9DxCQ5GAW1pYBza0IpMQEgmCLwSmsyxGJQxHKtIklqJdM2RE9OlHKtgBbaGGNw3ocY2LnggyPy3hOigAKRX08QkiGqqoogMJqZqZqFhtJia2dMQErAzHLOYsUVEYJnqAgRAIlAV9kmIa2SNEQwIAVHfB4OGKpIEcgrZdpgNTl3MTTACkSlZAXKSyVUM2NePbSUiRz7VRuEgevqXCWmNUstzFyrOeccxj5sHk5H32wDsSIWGfvY1SJJkpbMCKrudJqmY14PKYa2qMx5IcRqkHNumtD1jZSiIjGQE5ZITwuUheaBiLgJcc5FioDS7nrr9vHdh8erV9dPQ3n89Lhrmr/6t3+Byn/zP//7/8P/+d+JmLlw5W5VVEXYBZFSloRkQA596zd+GYe4gTQPOU1glVwwaGqlVIvWImp5zN57Jp6XbGjTOHrXFAQAjMET8LwUZnIxGBiRI8eiIEQutr3vBR7GpeSUEXCZqubH/cuvTDW2fbO7WlD3rzcfvv+EoC9ud3cf3hPKv/6LXz08HWrKBHbz8vo0TrFrarFUFSnE1kBKyjUtRRpzrXu7uX18PP74/fjy65ttvx/HBY13+36Y5lzK50+Pm+t9G2I+1FoLEoYuAmvrQqkLSGlCSHlGEud8LppSiW28eb0paKZyui+VvY8xLSk2npwzFeccMCigmaqKZyZHhmpgtVZDEDBPvq4DCCA1NSQzdeQQjBCRWGpdI2OIwNQcsa4GcGsU9/mexY4ZAJmYefWoJ8cMYLUK8bk/KKWSYi1prkOxJUQC1MOUd5E11Ifh0x9+/0+Olr1r95u2qjSuqwIixQQYQDSPxwcIEq6+zfOcplO/bcAw58VTlXLi0CqQioHkWhdy5nwDGLndG7lai5Rs7MmHP7z/6SCy3131rpseP7+8vRbjpVYxQ8Pgm3mc8nLa719cv/xFKlUk1ZybLlKAnKcyDoXeYI2b9i6lo2EMvlMXq5hh8ZDJFRmPNk9GwcIWiRkyjJ9PH39a5ulwf7p/HA65/v3f/8MPnx9//e2v//m3P/zL9z+eJgl9//V+8//9j/+vD3cfC4ar3eaHd6d5KcfjfRVjIjEE5ELhWIDj1lsJWNNS0GgucPvidnO1uXlxRT7UUrtNzy4IsjE6xZomdLh/tS/CoJDn+8cPHzlSzvX9/eepZkUJkfKSlTmrucC1SlUY5qrVrBYteRzuOID3HfkA1AIRcidWXPe2gtqCapjq01wOJQO5TvTwh0+/9XuWIX5+eIKeoYjIfJgHbbpTqqa43XY55VJyrhMuKUCMoZ9lKDUXLY3b+9ASuz8+Hm5uupvrq3qUYcoW3Mur2/mUfH9bq4oDt2lLRskbyvXNdQtueoTh8XTou91U8XQaWbXf9KyFwSlqkeT6kOsSgZqwW4yE3FffXD99/nGegLX+xYtXT+PjOH7YbK+sgo+9edjvb47fv9u++gWrkKbA9v1Pv4Om6dvrJSVu+xB7U2xbVuACo1p5/+6db84CNCKqZwKsOWQkDjFUAXac0sKE5J0iRt8t87IsyXnnnKsmHtmzzzCLFGJ2LpjZPM3suWmbWlWKOHbzPIPjbrMpJSeTANg1G80HFZmSqCoQhhBySoBaakUgDgHJVdGs5frl/vA5D+Ny8+qF5jCnsb/uYxPzktOSmWB7/bIiPz093Wx2few+fP4QUGathxFCv9UiG6LH07Hd3JSSFpWqCgJRCoH0DQfPiuLJWa2SFg9xSI9Lrlqx1przYiZFkpWSDRQgySIZBIJazLU0zgeGrFpqVQBElwQMq2c0AYeUi7noREvKE6G1jVe2x8P44qvtx0+PSfjhpL/cvvj04cc3XXt8//n4dPjncXhxdZ3J/ulffgoNpOPIPt9+9WI8jFlwe7WZclaDXCqs5EEEAPSOAKHUygL95qaWe4OsqiJ1mgc1cj6ia/KS9vvtOAqiTfPQ7fY6pyY24+lRCdumAbKapmE67t/+Ag5DOg5NiwSotagJEavoNIxXr2/nMd09Pr765ptPP/34/uO77X5/+/LN492hbdgzNtv48PiAgX3TLEthpibEkuacUy6577YIlnJh51LJDhjJMXtEIFBQAzQTYXJ938zD+OJ6/+nTT5vNbrvtp2l5ur/Pm9psmuk07273jw/D4+l4c/Pi8eEuF+3aZlICEQYw1WoWYjsOiw/Ny5tf7q7mKvn4dEDQkst6FbRt0xL7yBxCjNHH0DZN28S233SbzW6z2fT9Zrvr+m3bddGH2Dbee+ccM7P3a+D9WbpzhmNoNQEiwFVsYOdBEl565As4g88TeLgwK56BJtLzJHylXyjAmYf/DPo8j9rh3MmfTYu+zFfwsue7zFzOYTNw5vyfsaWzPQdcugK70ABWy5HLZH5998t2XkotaTGrS1qWXCUrK759cTOdToQ4j3PJNTRNu9nsgo918tyFxuVpTgmCd5ttb2CByICQQxclpwP74KB2MWy6dinJ+QAF5mUqeSTXoFSs6pkDAXf94oNv4XFJc55QkDP1nhygVvUCprWCLuMcmz5so5qlUr7+5ctYl/Hh4Z///dDfbNr9zTLx7vp6PKXT40MbvZmM46Hd9vM4IzjnAgJbNQ8opdJ5BI2EtgndV1+9fZpOsY270N+9vzMM7+8Ph1SiQ+d8zQlFVKGAEiI6JkfTvKwzXkPzzKTmPUvJo5h3vgmdaPYEsfezATtTh1D1eDi8fnu76beHcX717a90GUrS7mVjGAQYVXiNKLrILFbLdDV9FqDZz4GelfnzTOy5dM3npp7OYM3ZT321o173/1+a8TNACfhcbBe3r4u30bl08VmydnaKgTPy9AxyXooVznbrX1p+xDPQdCY0KSFWEVMT0FTS0+n+N3/87jcffvy8jD98uj8O9+PhWKWud7T1HwCqgp8fj76JVUEMvPNIwORLqr7xNIHUfDg+taH1beua7nA8vHi5VxsALRBMueSUHDpQmJfq2Hddt8wLRz4ehjaGIdWl6k3bFiiihEa1VkYkQgOMRKpWxULApKbr2TFb3UG04jwVcwVZQyABnKbT9e6lZ5ZaVEpJSQCnOTO6tveYixapOcfYGkGkpmk3Ms/ehTaGOR+dIQMwkglG7w1KbFvn4wrVeG5qhaVWMSAfk1pC2LddqqNv2MxUJIS4iBI649ZcB+TXlQfRAM1QkNSAYc1SPEcVP4NFa3AeAq459xdu2rlK1tXsgkdfkJ6zePYsUqILt+iZdPQcoXXRTP6JUdGFOwQ/f/D5M+F5gYULo+6LoG1d4OBPX2cmYqWUZUk55ZTK4TAOx6e2bV69fhGcy0saxlkM5lyHaVxyHuZxzklFiojUSsTGtIZGIiECrQDH+iMQEjAgkV0MAZHWvIDLas3OmOesS9XA9PLl1dXN5uqq33UxBHJYRaqn6qFwdHC2gmJkRuYQPQcPhqWaog9NbILf9sExGagqnmO4vCciAAZT0ApChoTswRTtEtAGaoBnyRcCnHWsBABAzlCQnYmD1Y1Iq2k1MJNitUBJa8j9GltNjgBXV2kGjoYOKKwGVXAeX3w5yYhqUA0qoABUQAVVsAoKqLBmxpkqEqgYkFdRrIIkYNlAzr4lJgCASsBmPhgwAWBOKguUBWsyySsQboAKK/1J0ExNkINCVURBMiLVQuwV0spKUNOqIBayQFa/GE6LTElylSwKxIrMzgOxC4Fj9E3DIbqoFRdgBjbPyERILKu2F9iMVVXQKqAhj3PJ6/0UuQ3BwopvEjgwZnLkg2d2jjmGJvDqZM1qCgie+WxGCJhrNjpbi6mKSXXA5WybiOSc81gRiOmy9J6vhWdbazAVIkIgw3N+GyCuZEhDAVM63yTwgoDaWa9YEQylVtOKBKC0pMoG3rGC+kDkkIiYg/dsWGPw7FsEZWaTWtJMULOUkgu62PhYDYLvd80rZ7kaBG5RmUBN19ZFhjmlIUuGrtury1mmlLKZ4+AdEwGqiUplBCP0bXuzux1P49v97uHz06d3h1zs+maDKNO0dNIvJjVPcYMAZdNj08Z96ztffve7f/rFr7++erFRTU3XOWeuCa7ZSq3LPJm2JklKyvM0zzOYxchaUMmNh1mXwh7UeJkXXKG3KuTj5/dPxlSCTbluPElKRSrjXusCTG3fm7GZ1QrBu24TU17KNEgtbd81TRgeI5ZMUJs+iCRi2L56peWfy3xaTiBlwpqzp9CEu89PZnqz3T3mGYjVQezisoweMLBHTLWqUwo+GJmILum439rrl7fLssi4QNc61HfvP728vQ1tOD3Oaa4IC8X86tXV+/fvFaDvmqdx3tAmhl2RTEhEhiIplbbfxG4zTCN8PtzuupZ0nPPTmDJ7MrRFqpjzXEGAcF6mPnaRXc5L42KpBYmRuFZVg6JVVQAdrRfxaq+pSoyqSoCOCdVUDYkMQUBXUfE6rUBAMSPENc6FEdW0WlVwaEZoRM+LIJRU+93WoC7jGBlzlWSQUrq9kvfp/U+//8Pnh5/+1etXJS3Q3Xi3M0OsAiaaq5ZTzQM6Uti2ri11FKtEEaQSodUFdJUqA2DIw4mApEJW2t7sgF2VNfM5b1/cLnn6OD5utzdvbnfpcLffhoDyNCzgKYSATE8ff2KQVy+/jpvrrMlUyCA65+pCkGqZam2bF3+pOI73Hyn2IW7NbQTYVDyIzXcyPViaICnf3FqzZyY6fLj/7X8a7t4dDk8/fPfu08Phjw+nTx+fdIGc+d//9o+51qv9ds7lVKfPHx4AQL39/t3TNg5d12UpS5Vdv91FN9cakEvS0zCFAHHjbl+9VaG4v9q/etN6MSkuutB5ZmXP6FpyrpQjeQ81Cqemcae7ezMIDU1pmcfxcP8AVJaaNSEQKlHJRbV4IhGrwEaO240PWGvWXDNWjsJEwERxSZCtePCtC42iVXVN91KhaQLUMkIN2O+bq92UD7tdjM0Ap8e2ebHdXR/nJyE8LYMo5YBVlp5aAk2s1HXLpCF2m+3tOD0I1XbrVOepmBk5jjnlWkJwUZzNIkPNxi5rkeR3zeZ6u30aPpFNm9bfvvzq493n5qqv01E8M8PHT+/aJmpd+ivevOie3j+iv4quPUz3w+SqwPWLTXkaluk3bVcxM/k2uzobLBxC3F//IqTCIIfl9ClxZXLjsDDNIiWkvfcoFmO3n5enbnOd5ylVCG23XgVELjbkCaqoI855IXRVqpmVXKILxZaci6PtOvohpBh4TrNWXWxGBGZWW43GOLoABKUksVXpTV0TAVRB266fhqFqPlVRBEYEMB+bKmXJOYbGsys5OxfmZQ7BHKNVY27iZr8IfP3Nr398/zEbb5rexUjUcGvLvCArmvWbl2B2mg5dvyl5+eHh8PuHwzdvf7ksp5qk8d08ZpVajELXCJbHuszT9Aq3vZToOXhnQNOUpnlZpkmRkHDJOdVkBs57RzSmBYBBPQHmrEuuxLgsFRnBzLsIQMZOzHKtKzet8UFXuo4AWjHoDCFnB9hXa98/3RfJpvr9T997lE8fP4PZkvTu4+PN69dPh2Wch8qh27Upy9tv3/zu8Lsi8DDk4fFTs4t1SgBgaLVWRHTEIlakKi3cRedDleJDNFOppQoAaGVWrSDY9vvxOOEyW9f5NsoSus12nkcjcuytwjyP9PiJ0AsJ+hg23XLM0+kQ25iXxcyW8cQVuy18+PH33779xU8f/5iWcXvzaqyzemOs0+n+zatffLy749jkZaxUrq62XWhoTqsJapoXJhIttVTvoic+Dseu6zZNZ6qbtvl0f399vbNFi69J7PrV6+PhqSH0wbHW4fDgw0v0IKVs++bx8cPu6l8RN0MeIjbdriuWVc0FmOZT8FENp3F8J7/f7zbdpm+2G6vFiV+vgjevXwWm0ETnQ9s0sW3att12Xb/d9v1ms+n7fhPbNsTWeeedOztJOkIiIlrNh9Yd7zNixEBnNxYkBCBkvZh5Ep2zbC+R5s/Enkufve6+1t79PBO3i/PQz/hKZ0oH0J/0Ul+a+gtKhetOGC5xwYAXF6RzZ3cGgFZiEZ4tIfAZVLjAROsn08ooEpUqJZc8DqfT8ThO4/F4qgpDqmKYax3m5IgjQ07joE5K6TxsotvdbBARBZBAqszTkEpNuRJpYLjqX7RdV2o6TSc1rlWYikBtfPS+SzIyByYXWpGK80wVgp0wYt4w32zjJJbMrvqX07IMy6AAL+MGEWMYt10zzw/bN7+GaWivrhXwNBSFYTgNu6sNirWBm+AUjIG60NY8VZOqlaAaVClrdkJZZlg3EqZwmOdZBed5GO8BS1mslgQVqtgoqWFomW11wAVTARHqAxGo1FkQ2DWBmMwcQVVBcOMyIiF68oRWM5rttl1NlQhOp+nqm6+H+3mcH6/2fWiv1JgZCZHZXaRg65mitU8m+jKl/9Iv/6naBuDiRXUhGF38X85sMnx+MuBZfnSeK5/HeitbAxHwAgI8o1LwpeU461fAjOCc1LYKStD+tPP/8nF2/q+t8kdYuylVUdBlXg7Hw+ePPz6d3rGfH58+HYenZcnIjOuqV0pVUTXn6ZByQjDnRcU5BhWENRCdmDyhI+/GcXDkwPfEDhQNQ9xch+Zjk2lIixZTrF3LSy61mkeaZGm6uO27ccnsHSCWWjvPqUgI3tipCiGiiiMMjgLSKhkTdAomIsGxJ2cGuWacAB1xYHQUfFdUgwO0ygjbrbt/nKv6aqIoMbbMixAG751D1Oqcc32PiE0TFNMii3chg1o1Hzm0Tb/tRcwThBDI+DSNaanBdZ48Am26Xal1f3vNcYPkfeSiBbEtRdquIxeAAwKSAZ3tqVbWws9SkS5Ux+eSWgviUl2XsgNck+bhC0T+LGldu9vnJvdShCs3DS5GyCvMY/Dl4+ALz+jnZKHnEr8sdWfUCAFsjU1/5sr9nL4JAGCiamBiqqa5liUl1Touy+fHp+O4vHr1suZ0eDqdTuOcl4fHx1TSvCwrmUhF0UyrrA7WyCsaonghgzI+858QAFcxxJlwujrNEQlAValgIfCbV/tffHV9e93s+9YDkDOpk4gw0CayAbsQigiHxoWAzjF7MyZEUSgVkNGhglarJiWrIjuvZrrO4lGtihkCASphNVQxVES5sEpXX+P13DhAQnJ2zkwgMAJgAA8oRlmpGhY1AjWralJQK4IiKF0S84gRYCZ2hp6QzhCw1XUtUDUCBCAghkvqOqji6lFmaqarKg1BQAWRbQVbyH/JdjAEYgMwZHIOHBs7QGdaAdBUTAtpARPTVeMmJiaiKgYrp0iqkQkAkFNDANKqBmgI1awq5Aqp5qSUVU+5zlnGXE9TLYYKgOyRvXM+xup88nEJXcthRhcAwXtST8SMRKJrvh5UMSQCpqy1Clb187yoqNbCTIjogvPeKwo1wZxfsUXv2BEQGiCrCuBq57RyKdBMQ/BgIGoIqMpOnEhlYgRgYmYGAnZ+1a8h0vO94CJAQ2QmVVOTM4EZkZjOkbDEpoCGqgamxMTEALKOfNWMTGsVsWomjjwAEntRQ8+8rl6wOnRVIkUCs0xIWRZJiYnKUlQkhDa2fQwuzwsTGyy1FmIHBkWy1ISmIDQdT3nMTWgtcLFlKUMXnXNBNDD7WtO6S87TLNXabjM95vRUEFTtEB28fhFVIY2lmrW+aQO7cZlVf/XVq9N4ErTtVd+H7oc/fGx92/bt/f1ht9sRowEpMvroYrfdXqXhYHmseQIyVClLerx7ElkoICnUKtP4BMhVDREUeFlyUWPm+8PxintTnaYj1BpiV1VEqnOMKZlZ47iAOsKcspmhKOUSA6Nj6/ZpPAEuCpCrGPm5WN/Hf/77PxTqNptmOi3I1ncea58ne5zvrzbtcUkKvioHdd4MEZwLHKnmPKSlbUIkB3FrGZY6MToRJ1m++ubl/nr/hz9+aNvw9u2b9+8foeA0Th7smzcvf/zpc1m0dZt5mSi2S87sOTSxqjIiKjD6Ul0ZoUa83m9+/S3943+5O0kNTISkoookZswYfGsA7LBxwTEJaK1ZRYmcGKjqKhxVA0DiNbEYAHQl9sE6vS1WAVYzFgUCUHMcGBjgnC5jqzsdsoIiOABSMxWh1e4aAACqyrzMZjWbzPOY64LBSYXjMn737o9suO/iPB58tc3rv5BaDBBzQpMyHS0flzwEakiBzRWtbb8DjKKCrEgZHICJCSoqO78O6YiCi72AJ3DznLvttszDh/ffO4XrbZsPd62z4OkwnJC7poug6fT5gyvl5vZ1v92KVMtFcvUhOELvdDrdF3F89ctKpU4fnW9it0EKpVZhZMeWBzkdYBoQhF2H/ZWgUTqWz78fPvwhHU/3P3749O7z59PwcDgR2c1VvH/6oY1E5NOyzHP+/eG05OIYHdEkKqkA55umP86jLLNv2ikVH2MEWcYRu6tcdBOaQLHvOm8SzGKIIbaq1VSJyMxElH3wbWvcaD3mnBCgSDGQ6XjMy2Kl/vLm+rHUT0POJkTYBmSwhil0vutC33RYlZQDnKMwCavVxfmAaKoFrdfMClq0bprtkE7BY6kLGJEscpLYxZpGmbNJshryYYyFYKmh80wmZfbUbWOHpqUMFYQwRh9QJZ0elumQyinRcvX6tvVhLrbrGi2UxqHkcYYcrm8fPv0U27ZtWhPNrr47fT8dn2KHkSF/enyzeVkb/XH6OJyW7fV19B3W7D3+8PH9K/5WEfLyyJwbF4+Ho1aqvvh9c9BpVhUOzHJ//OHNi18yb8ZTDmDz4/uIcvX67ZIOjeGCc2gCVG7ZQ7FxHCHL3dO7l7f7ZVxSxVa7L7ssNST2jLWK836YxuAbNQi+URPP3hTMLDYN1wqGUgsDIJqsdw4zMenbXU6ZPJJjsQLK3XaTpglUyJGWWmT2TKYKBtHHlBdRY/SI2IU+5ZnMRCsrhcAAGtp2/+o16tzd3GymhMbLdPizb//1i69efP/7H5tms2naLB9QK0ENDe+33fSUS55zMlZ9s9nWeWi8yyLddsPcbLrwcDwi2cOH4+6m3zTRDD4PY6nzy5st5qVx5CK32/YwzPOUUyo++CWlYD5pUlVVjSGaomgxQFNDxuN4iq41oIchxQYXKzd9K3kFuAkZ2LsqZIaihogNh6T8L9/99DilWsVSKT/kPgZH3LdxHiYvBuPw+PnJTInIN9F7msbFedcyfHz/43Q8th3MaRYFdmREprbe5wXAJImWru/nMc0lq4iiIqGJEjTs6Xi8R3aI3gUHUvOyaBHvgvhSagZUcr4WGY+PTbNBM6k5Nk2aY4x9KQXNmDjPhZTIXOeax/snpzje31nBhjiNg2tj02w+frhvmlZ1ud5ujulumU/OQtte1bKoZDBlF6JvmlhNxTTt2qAqotkBjNNxt2lyXq43m8g8jHOIjadYihCYmfV99/j57np/NR5O/aa7udofHz85FzZtN0zTJrg8T54RzfqmEVVAaBoPBMPjQ0m56Rsg0suO6NWrN03wbdO0fdf3XdO2Xdf1Xde0XWzapmlCDOw8ETETMa+IEK2OA5d985qzaV/0F0arf+l6BwM1WCOpLsliqnDR8DyzNs6unwCXkfvKJLp0TT8LPr8M0vHSdZx5Sech7bkBIztDRHihgKzZVWeO0npLXY8B7Blu+gIffBnOf7H0IDMtVbSWZVmmaXx8eLx/eMw5Va1Fikpt2pDH1G9j59vh8MhEpZTWOQW8Py5+VMmLyaq5MFNtXLzZ7bd9A1hrLtO4OEek0nZ9325zmcm7m5ubh8NMtNlvv16Gh8fDOzDVspyWg1YkyfveM7vHeVoEbzs8PR1RYNf3kd3904MMMC3ll798/Xf/4W//7Ouvrr5pprLkkvFku6bRedU5aq4pxuicG+axqiBYSrK/2pvUIuqDk5JCCKHxh8NjVSDUzjupuW2aELtP93f7gJ13dUmCoGKG6sC2vZ+rGXBw1DAyAjMYIRmWWkANIUTHqLbp2jllKUqd2/Xt4eGhi52RSLbUhGnU2+vXkrL3IcYutJtcy4Xy9aUFvVTgKg6gtTnG5/Yc6CL6wmfEcC3EL38MAOnM5yBcJ3Hnph3hAiGdndPPQKfZKlcwu3DfLu33OVlwdUG1n9UZXjgFlwKzCyUOz/IiPffzBACghrYGlEsex6ef3n33t3/7N7/96bePT3cPD8e19tbPyaXYGu8HAIBFtagZQi1WkvpNy0CEDoBFlZFySvv99TzNOW09e2ekSa0JHJp+B4dlmbWwQHHekxfRyK4NLueCprvO51lVSZh2bRzqiRGZeSqVCVuiqhYD994XU6w6Z3HOs2mIFDoyJMNVt6rATkWrUNcHBAVkYwSP/bYBC+O0pCkF6gIH5crIzrmqCt55x0YoVkLDjHszBCLNFro2ttE5jyRgyL5Z5rwsFY232+uUxtiFtu3nJb149crAl2qb231O1UdmDtQ04N2FT3ixkXkuN3zOD4Oz0OwZHsQz3ezCOVs1recEpbN10Rn7+Tl0+bO15ova9kICMrgEmJ3bgQtQCZfDMnh+10t9Xlpe+pkvEhiowXm9/gKFni8ZtTXOSTRXzaWM41CWZGaHx+M8pmGYpNTT6TjO0/E03N0/nJYpS1UTUFMVhySmoCsARM/YvqymGbQqhdc8d7ss1GZ6ZnuumT7O8e2u+fr19YtdG9m8iaZFz1ogICRFQMIYW2Bw5BU4+GiMzgd2QWtxZ7L46gVZEQMROefNMpNDVHbOeWZ2tIrO0MDUxFZl0qoyAyl49qtWdB7ZAQWjAOxsdY+uxWq2kqBWqWIKK5hsYKZmpUAVsEJgAEJgRIbkEAmJkR0SGzLRegmwAV3M9gmIkGlFCbWKSTETMFDFNSLNVpzCe0QEYkMGZgADUyQkAPIeXQAfMDSGDsyg+EsOWlHyQIZABqimqlrWfDoVBJJSjVhRAJ2qAZCIGlAxy+rmJEulSWHKekxlXOr9KZ+yFkNA8N4zuza2fdu0Dbe1SsnAzD60XVRzgFFEnfemZiJFpJQqZmqcxQypShatAIYrPI4ESgYSvOfgfewMzTl0zhnQOppSQWBSAAE1A6cECGf2EinAaiJMYgQExOy9c56NgIgEzOHab58vBvflcjjfC2hFbR0TIq8qRENgJAIWUTKqgGoqWgnhbPu0dvOmjlnApFYTJXaOAzi/5nswIZoislYhFAoRpJqWNYUKmTk0zoVpOEou19dfAVUzC74FK1qX6C3N8zgs02GIsecQS5lViifnXTTJClJFVSTPc2hd23WSwcRssWrQtPT48ehYnZWubbF1YGqMm37z6FgOyuRaxyklJ+HTD8fhuPzv/6f/rr9puWm3t7fbq5ss4HxjyjULR89+u8xzrRa3e0NGGps+T4OOp2PK1ZRNcFmyb1zRuqS0LFkq5rmUHCA3UgYAskKecbbkPANZzksMzoUIhI6bOg3DeIrBl3kqyxBiE7t918bT/Ycy5eHuePUqt698c9v2u3h/yl//2df//v/zvfKucTIs83aziX0nVF68vPr+412IcbOLrDpOswIQKgXb+M2S6lIwxEBQCHSzD/OM07L87rsf3nz99i/+6u277z9+Pjx221Byjm3z+HTCALevbn747of95nUfO0MxQhGtUmotntqSci1mWYckwUvN2Lnwy7fX//zTPYgD70oWKbWW2nUB2ErVEL2A+sBkqCIEK1ZcVVezN11xZERBUDNF5xDxTN9EQ0Y1AIU1NwGRZfVlV7X1dUBmULSupE7DtUel9fHzxRCxlAROxzKtOZUdukx1GR8fXdegDx5xs9lvXps61QWBVCYpCSylOrPzNc+BFk2HkqbQblVLLQMVCa6ull2qjpuGHcynhcD59oaa6/HpibxntFqrAIxJmhBxmbDmze5KqgBIv/Mi5XD3vs7Ty9e/CJttVQMTJqfggnNWTtNyylWpu7HYoR4xz4iM6LOS7yLmxUmqD+90OGJdRICvXmLbYlng9PH4/W/KdMjjScdTCKBgp3EpxDH6P/7m41ilGkwpOc9qAOyUcFzKnGoNPqh+tbsayxKcY89bv1MkpdBe7yl0jad5LN6VbRt1HKHzBC7EWJQxblx/jVnmp0c0NWzMPFqo0xBjI05kSoql1hKd/qu3VxTg73+8/zikLIWBDIGbsOs7NqtLoaZJuRDFljquauCca3IugaXloGpIxqHVkiYVYa5WTuMdh6YjJ0tIpuy7KeWsdd/vx1kcuNZ78v40fco1teGqQcx1YXLEfslzVmDybAlQVGZIYxr7qtOu6/P0btO8XpZSBHyzK5VicFgyszMZ3DYc53EyXErl0/LN5msZn+4+f2CjEGMMLkUcDyetddNdpRFQAHHOy0O//auZCX334XQfgsws3LR1MRPZ777NapvovNtYetx2IXg/m6t4HduNjt9NdbruXsZusyTZb/el5tv9NaP5xmsG5nNMOHs2gZIrETISY4yRkBlNRXOpS+u3TDYvKUYkwJU+3/oGSesKjSOBoUryjqd59tholao1sQEWMEDzwXMqad1BLHNa/cbQVHJRlWwJkdQAjRiZfBjHk2+b/eu3NU1pTqG3wPhG8ObmGgO/+cUv2XlEfNni8HSc5qXmgm3Ly+7+4ZgmMAjLMtbgqfV/8e/+ejicRBT75rprP/zw02Z/Vc0eD3ebdsPcvNh0YElApqqWl6H4YVgKWAFk46SMcw6okjIRKTCCApoLnhByWq7219NUFLENPhc9papUWnW7Lh6HIzf+MC6RWZiq6fh47LuX7x9Pd+PgunacZsql3bbvfrr7+ps3vImffvvu61cv/+4f/qXGTlWllnGk6Pot9z8exwQVGA1qzUrM1RQBmADArbwUT1jyvCC6bhtcXGBeWS4hRBFJy+Q5kvOn4dS1fU3AzruuLcccG055duTG8XR1/UbHCbSSSbfbjIdDxBaB8zJcv/j27v57YvLoaikf7z+/eP3LuozXL6/vP6ZpOr1489U8j6dh2m6uBcdlGab8uNvsOt+kXOZlzsW2feeDN173GmoKMQSVBAhtaKUsKdcQPDuQUpwKVDHLu+6KKSwpeY9Px4fYNpstj+PQ9GFOE6PvujbX2u42h58+b9q263fTMjt03gXNWa2ImacGQpynxcXAIvOS16vg13/26xhi37dN1zVN0zRNiDF670PwPrB3RHh2sDinxD9jKggIq8Dn3B8h45dmBRH50jY/M4EA1rTdS4sPF7MOuPRD61mzi7XrJcgeV3Ph1ZJmRabWBs4uLfdlzk9nCMDOXffz2B4u2cz4PO3/0qE9/6UXeYadR99nbGDl66uamWqROo3z48Pj09PT8fg0TdMyZ0YjgFplnmspljDHyLtNk1J5OJyQXVVxWrcBN5u23+76tnWMTkRVUi2E1TN1221w3gDnImMeFYUB//Duh4ZfOuYfn/5F8rzrWiRKad4wd+E6kytpXJYlAPV9m/PctbEsBoYlJ08cQpfG4fHz41V7nU/14eO7N3/xZn/78vQwDMNjwwQES8pU0Ac04Cml2DbD8RR9I7WUkhld32/SmFVcNez2W7YmpbIJ+HRIeUhjWkzEm5Ylt5YJ3bKaiIEiQB+ICRChpkye2UV2TquYATGJaq4GKJwJjATgOE177LfbTZUS2nB99eKn93dPnx9e/vmLEJS8qyoB1IeAqhdqD53VP2CI/AwDrZoR/HKq9cyqWA2JfwYRPdftlw7czvnhazngBfhcYSO6IJ9wMTZaHXnPlkdwccZaeUfPxsKma1XquUrXzh/hZ938ymt7Rl3hcr0oogIO0/i73/zmn373j7//6fefH+9TTj6y5grE4zTNKakJEYGYqUoBD7ZMp9KHQczoxodgqgTAztXsjGIq01IXsSxlbNuAYISgSH57k+4/braNTas3cUg5ZavB+65phjSBQmAOjlLKuWIxbGNTsijUtvGkIFUZkZ11HlMFcE4BKkJ03rfOCNj56FswqwAx9kV5mWZE7tvtZAdmGpfBlDZNzKkmSMjY+IZQlpRMCMCF7iqGYHYiGS1P7bZZZmXmsAsYGsdRpBA59M2c65QWFyOpIieTxYUXYMGsErhS1fmNC61Up5W46V2MyOsYwgjpQgO74ECwmq89Q0KXmDr4mUD2eeVZBY34jCYhmJ3xwOfF5wLcIJxXtdXYCC9MnDV/Cc7KJTw3sj+zSYJnn7gvuNCz7OwiaruU+PqTGF5EbWtpr8ZCZlKFiGrR4ThN4ygi3nFOy48/HlIp0zRO85xKOo2jqhlYKWWlOwkK2Pq2JmX1mz+jZJffH9kK4agiEuE6DwCD84SJkBE1Rt/G6Am0ynhMyc9tCE3nnXOO2QQUWECsqBKG2AMoApkpmhKqaGUUA3AIHpHMPLJo9bFFF5xraBVQs0MC1YKGZhnNCKGqmhmoohRVBauEsJoNowvEHtmfG7VaoSSrWcsktWgtINVqsiJaq4qYCEoFKaYVTc9Rb4Br/gMyIzIxk/NAzsidxYUmF6kz2TmdDlEVVEERFEwrAAAxkjNugFtzEZ2D1b6WCQDQeXARwgZij8CohhxWlIqYdW0az4VnoIZGInklLIChKCqqWDEgFZFqRUENl5oXsVn4kHTMej+X+1N+HHWqKxcBohdHlJeSp7n2jaQQm4BMTazVhDUoceh6dpHRgSmWBDahSa0amIoioZBDNXNMAEbsQvTsiRya1loXdi5wZA6KXEXAxDMr06qkMTMARlNTUaZ1LoCGpqupjmN33gUQnbc1TExISH/KKlorUs2YHBErnuNb120EEqmKAAChyWqOd97xqGldTUtFiMlMpaopoPchBmImI0KTWsURmjlmUDRCKdlkAZMlL8gYmy6GrWYDcyEyYK61eh8VxbSAKaIsy9OyJHCemyhoOSVEU7XTOBOgAk7z0PtN23eAsqSlVlCp7BsVUcLXr/e76w0jcdyIoXdlfDoe7wfFwgGG6bTMebffOQ7k01/99V90HXqixgUCzik3u33TNBUwOFKRUkrwQROlaU5zyqkAEfvoakulHg5JIR6GCnNGVEL0wQ3L1PX9nMrj4725FH0/5+mQpnYbkEyJQvAIWrWi4lgenHe7620tC7hGFQA519L52O+vLU/F7HgcNi/16f5hv+ueTk93j++//sWb4zgLghCOtfTil7kaDP/tr77+lx9+nIp1nrt9W1WcwzIl76PruuNYBIQdienT8RTb5ubl1TScTk+PTLrvNjOEmktepsY32+vt3dN4ex3efvPtu7unhjgytk2TSyL0nt2cS9O51ntFKRnu7+fY0XYT37y+Ps7l06fTqaambZyiwWo4dB49gdo0J/aOkaSqmICBiKwBjKaEQFqFmQBVKigiM63bm/WegMQgspKimcw0G+G67VYQFWFkNTlPfWEFmr9MENqWTocpL2W37WpOS51OMjBSmatG279+29Ug2Qe+lpSN1Bwfh3ctBZVK6EUTNwyUVRO42LSbeXjPoUGtikU0ERAieQJZJrTa7q6K4XwctCI4tNAA0sdPd5n4xVWX7j71XZ+WWkpp91uFZTw+5DLtb25C12k1BDFUkdm7KHnRMteC6rfN/kWZD8vxITKFps9FoGuqAeVZxo96upd58D5432HX1yJOlun+p+HxVIss4+yDv7p1m1xzts81/ZdPxyrgI06nRRCQoBarIsE7NG0aMpAK9d39hwCgCsOYuPG7fj+OM7oOFEVAs1nRlNJu05CLtWCazbedUqPKqAVrlboQeoAdUIntNuUnk6qSgcCci/ttEXmzu2r+/OoPn+8+PJ0elgWAxEhEtJrn2gRtQlyymPhFMoqQFotEVYIjrVCLccDjMnzCJXRci+aUmtBYLGbBHMRuoyynqfjgHTjGOk9jGcmHNqLPJUXeKDpERtJDer9pXxXzw3j6s6+/Ccem15LM5nwkP+esCn3RihA+fH6/ffWyC0GXaT49OgwnSUsuBbm1fttFwTqXJzJ1ZjLnT/MYXP/i9tXHT++K1OJPzsHVZk/LtiTLOXW7jQ+OWw/MBbTbbw4PP369fz3MC2WTMkQGiBFiP5wGqp6o++W3/6ehPICP7w8PN1dXVudlunv96sXn013bd5tdvHAdwLFTMGAEtRDCtCyGBFZLTYE8hU5MgYnMAWLOGRCL1Og6QwKsKjVLRaZTXoJrXNNVFQEz0LykGGLTtNN8im278lhynjofak2gxESqGkNIOTnvyDsEyiqQKiGrKLvWu1hlaXsKwb/kdtNFZqLTtN1v//iHn653N8fHKTZ9exXipktZr97IP/39b/bbGzsm8wG6FtvuqukRFZnb2HATNducluZma9XdvX9//+Hzm9f7YRhrKTfX16I8GvsuLlMZjplUr/ootQiyGmpKfePnaWq6TVU+jtaQq8wlLW3XlqSvrm5+fLgffUN+2+zj+w8/3Wy32OCHu9PNC+e3+x9O89+//9RH/6rvyzw7kfkwOMRxGGLk66urZtP8/scUFNrOEzlVCl03pIrRldOJI9eStGoplZgIyYxFq9maQWtklqaT1uI5dP1mmedSaq3VRGvJfhtDaNpaDOp4mKxWv7s1AKKIyKplE7d5XhisSimSOWfioBWDiyWNdRmi9zWlYtWHZpE0HB9MSxPxxZsX7z4+vP/0/qtXLx/vP6f5KbAxI8e9qgUXO+ry/IBWHu4/bjdtCA7MtNaSMwE3Tai1lDSisQuBPG92XUpsBN12g8Edx/Hq6m2eP1KVxoe0LEgmJqYOAMYlXV/fpPnoBQJDKdV7v04CFUutqiLFRAyRod1sJKfYdrs2fHwPAPDNN9+E0LRtDE0Ma/4ZMTM7v84/zw6sq5wHzGhVPX/JJ7+0JM/g0YUkAStRHwCAEUxMwWzVbqyd85kxT5dZPNKaGvOleVrbmEtTdO7NnjUX+OVDLx3/8yz93LitYo418OrSkvzJOP6MT13+PZvMAJ05HV+OZuWxq0otOc/TeDo8HU/HT5/vPt/dT9M0Z0V2OZenhwcyLUXAsEl0nDOqoAiK9NG/enHz5sU1oPZdr7mkXAyRgzd02zY6hlTlNE2ilqqxior2V9e312+t6DJ8iDBvr7en06mIed/sQjsIpARFbS4sRqdpWUrJaBVwTtlJdlBiKL988zqND+HKebE0jO9/+xPQ5xg2oNnFnn2T1KCaAqWyKkJ003TIriiErjOxwzAY4JSmPgRAKHlIy6KpkoFoPR2nrAgGSbVto0y590wEmyZKqYzYRs8UimNyTgxKNVbMpfbcBOeq5BhDWnITG+f8PGeox/1+0zTtnMu84F//N//DP373t/xnf87sa9V+E88G0kSmikRn3BLOEp0VzFn5G/Sco3eh61yK6Gdw4TM1Te3MBEJ8zpA6lxhe0ICLfuhccSv4iLby4FZk52fixTPdbTVAgi+1Bs8V+YW2hucHz6V3ATrNVMQMoUr69PHdd3/8zXc//v7T011sfdW65DIuJZdcSsmlIGFVEVFGAkBmyqUuRQNyETgzBcyYWciJMcduSllr0loKGzks82KO4357erpPFdqufcyzsCfMOSeDRlSaEJc6i1nDBoC5mlSpimuuPAMRE5GaKCMiQBfDuCTvoyogahsjsstKpBaYHTNQiL5JhgqGxMzQNk3RooC51BDDllFMmQjZsRczUCBFxBDbXuenkVwg34AkB+jaGLq+LDLP4/XNLWNz//Qpm+76ndXEIWyaGx97QPQcutB/Pjx0r79KSUOzkeziZg/OA10Ws/XsswMyo5VrtgLXhpcJ7AWb+RlRzfRMOHomBsEzKel50bSL4/9lvYKL3fnlKSu98rKmAtKFm/lfrXgEdn6BPRf0CsmfxW3Pr0ICO1fmn/wxQFMidp5dOQsbcs7LMhuaorInLXIax9M4pLIsSzJUrWKmq5RyNapTOa+0JkZ0oVGd13tdrwf82bVhenHxVmBiF0Ip+HBKP3w8HIb55a7fbEPf9hQ6MJOqxKxVVAsamWKWkUPlxofoFSsgVBUk8hQZBcy0lqLgQkRABgSroKZWzcp6CGWFdVRWyTIimipINTA0ARUCQjNiT0zIDhBUTUvRmlWr1lRLQgOtFU1BMppILSoitUBOKiu3yBgRTAiJCYk9EocYkbP33tjLuTQUwFDFrIKCVEVVq4KiKit+XQEB0QM5Cy3GhnyAZyEVIToH7MC14Dt0DQCCFAgNNRFKxowkqqsp0ooOnrlFIKIGLFWAoJqImaqqUhEsSllqFpiqDhkeZntM+v6QHuZ6yAoInfNdpPWYcy4oDi3nyTVtcN7XJpeSALbgPbcb5sChYQKnTWxClrzMJRcxdVpVTBXQO2+mLjQUA3kyAFEDNTCraobIjh0TUzCtZraumWimUBFAVQmAgA10NYtwzosIGKwi+jXq6SzgeWZhfrG1Xnv1875BkZicW69cZl4pYutFXcosoKLKyIZgIMwsVURBTMWqJ25iQ0zOE6FZSYbgHCOZqi4lNexj8KpVJRNaleqYCbdY/bIcEaHd9rlkH2JoerNsWmqdp/Fxmmfmhl2L3luZm7YppSwlpZK2bY9mfbMFwSUXtYLO2m0XqGk2265vai4yp3GYXdueTo+EvvVWhmRgm20zyuCCJ21A+fAw7nfXN7dXkMrmxa7fXzkfY9ub8TwsFBgJzcA5IOd4NtOMYIg0jEuaj4qw2VyVchxTybCQuDqlpomgmtNydd0nORK2u9DOw6mWauRLBZEFm4CItRTQutntd1fXJtr1jfMwDU9IdHq6M5FlWJip2/T5cHx695OnePPi5efv33HX3V5v7sp7PI2g9evb7TDMHm23v3p4+sAe921QAhcCB7ekmalpuibl5By0EcBZ8CgCYJjnpKUycjJ8eXv1dP+U8+lqty3WDafRO9/4ZpkmZt9tomM3Dcdt44FQNJMRefABSln6reto9/gwjGOpUE7L8urN1jf8w4enKouBB2eGEJz3DkTNDGrV4AEc51wBSdEUEAxqFURzhKoACI5dqcLMtupdkaQKGCIwk1u327h+Z938mpgBs1u9IggB1pgGAxHAi8c7kJU8E0KdOfiOMflIjFBrgQbAO0sYXLtpfJ7vg6dSKlgV3KrYshzMzY67ukx1nrnpU8oBiREM5lKmmksIjWv3BDkvEyjGLi6jOg7O4ZIKdc3TcXi4f7ja9TKNnXOdj8Pw1G9blXy6/1hTutpsWx+gprbdE1JaZiJhFJWax4Pi1vdbmY82zR2juaBxb8RMhssjjh/q4TPMczmN8WXP/VZ8S4RweDj99N308ECe+02fs8ApbZD+7PX+D3//m4PgKROMlZDMTOe6et8vWlUhNq6IlimZpz9/+2IbYplyQafqvv71t9C3YjBM46sXr7Q8kFRH0LStVBYJgSJThCrp4ZCPJ7G8PB59vK6iWQQgpjGnUok56QwMoAol7Zz/669v/83Xrz8cn44pP51K8Bw20ZibwOyUZSV6WMq1cQ6MDd1cSvSRQ8w4///J+q9mSbItTQxbYgsXoY7IzMoSV3aPQLOBwZAGEmY0o+GJZnyAgT+XJGgc4xgHY9YAGjPTum/3rVu3RIojQrnYYq3FB484mbfnPGRlRcaJ8HNi+Xb/vv2J1OrTSb/02+qMZJeVnufkOG47Xx/PIrZuIrD8+P6HL968zkRzgeyBBAIRWMrTU/B9qumua9tAuWq/6oanA1B3t9p99/A7Q55S6Tb9WJ5u7796967EdmYjz6HYDBpi2LDYYT7PABRi6fwZxSLNMo+j7FZrGWSQfLN69eXN9uH5CZwMcjpYA2VyWgPfpdFWtIOCOifykEsF0eO097gejufb1R+r7Y/pEalp4ysqYxUmcS1sTW3Xvr7Zbs6Hp93d3eE8Ho9jbW0VTdLFB55TBmIiIECp5pwXUyZWy0shzpgHQgagYRg9MyC2oTMVVUWgNjTjPBIxKBhawTpPQ3DxshctMsnJeVapJrWYOB8AuNQcAzNiSYIIjBic847nUopI9JGIc81M5Lzf3AQzq3NeNWurosDrfuWDf/XFlzgdmdtuFcDjMObN7tXJ6K9+8z/++ue/Og1l2/Gm7R9/+PGbb75m76aqwzjc3d/+9d99R6T/+r/6b3/32+9/+vaHbrf77uP89osvy/n83VNOeVLQG+6npf10mFEBtURHZoDkZYZ+ezfNquhn0g9HEcNV6HKh/Xn+ah2/+fLrv/v2u3GevrnZvX3z1TxNQyns2gzNWGDIJ2eQc33aP2mtDNgSJrLecxA9n56BRGpVccggtRB54/Cwf+bI48fRVb5s7hKBWVX1zGYqouw9IiKjEwOtBcHxzntSGNkzkCIFlTqcD+2mn4aJ2GktkWlmHufcdH2tGYGJnaqy8ZwHdK7rb3ItbVildEr5tIg0Fcw7C+TH8Xx/e//h3eOrL26appmm8fH58eZ2ezgeQKohlFREdU4PYrbarESNXcNsJSdU6Teb2LCKdF2UynNKnjG2XS4zMBjSXPJ2vWGPaEMt+/VNPB1PoWn35+OuXYsaOHYQrJSH49MXu5tcC0iaphT8NjZNykVNkMWxY2YxUbHzcF5162mam9VFW3f/+o1zLkS/lFEs6QBEREx2jRw1M8JFr0FIuGAXujbqENIVsCx5vnTJ/EHSa+bCBS8hvcDx/yyixdTkhX66Iim9kk0vEOy63f7JFPY5ULtup1zA2XUT/cpbfeYLghfp0GIxerk/XI5weZ8LXLdF/qEiVURyTqfT+Xm//+n9+9//8NN5GHNJx/OsQKVUh9fnaXXkuraxqg2Hmz7e7Nqb3TpPaQnFlZr74No2MLs5lfGchlwUFfKEKt7Frm0j+9Ph6d3j+zpr18UY3ekw5VTa0Nc57+f34GPnm7jpfEvDODniGL2anouoiMema1sf4zieyjkp5VNKffQyDKaj+BEdHsdJXXBt77wvJddiIuV8SsF50uqxZ+9P9dQ2EQhXkbvWPz0/OeNt50opKc1DOsTorGAVZbMmcsvhPNdqAIoEJFUli9LcNRgi1EpFFClEh0Ws1gyAc7GuW9eamIpjcM5Smnzwwbn9uN/QV3dvXw/z05vXXyOzI2ZyF3/Yp+CYKwFzVW5cR4YW888FR19ZoX8CkhdR0jUw6xOh+EIfXiUkaJ8P25WrxAtUsU/ftUjnEPSzMK3lVa7huUvqk12JKASTy3stJIIqLKOIaGAidZ6G0/x4nh/FkgF8eHie53ma0zLr1ZSYYYn0JlQD75yiHlI65EpoSQDIEzkw9AiC4JlzsabfPj28twpARCEAoY5jWK+6rp9yGc/jzXqVrTqD17df/O5wYvINt6t1OJ2Oveem4VxqDJwLDFNp2YlqQfWOm8CqRoEce2+QK5EAO0J0ZNB43zSBgDj6ueRVu6YCYslQyS3LUlvqyMxmpEqmlaAVRecbR3iczgzVMVPXS2qgGnkXQ2ijq5N47rMNTXcH1IgUs9r03dLzomqhadrOSc5NH5XVdV6wrvtNykIcfezJBby0LBraEpdGSKTElwmzT9lqyzpj11H5tMJ8WrRe9Dz2WbD0si59Rkhe/uH6+ssidmFA7RPVCReWEemqbnoZY7osgRfr44tkCD4N9DXaiBZl66cvAjC85sqBi943Ltd6PJyyVDMLwfvgFWBOZZ5mEV1YFQI0M1VdGCIFvfw21BZh6MLqLjqNhYddCpltOVGIluNdVEbV8Dinb398sHl6ddc2gX3LUyroM6hGDqYkCqWoQ1Q1wOJ10Z0qM6c8mzkABDcZkQoAoAsAqUJNxgxEynTx+S1XIzUVBRPCa7CSXqROC7eFgGQIy447kamKiJYqUkSqlKRSzIQBERRlsYyZWTWtUKvWgqpmSgvliEvTWmXnpZoPQYtSMEIlU9BKiKRVpIKBiZoKCKiY6WWZAkIgZ76F0JD36ByyA2YAAueBPTgG34NrkQOYgVTwDYYWrIAKKULOekk7WgZ8yeNiVTR1oiZGVVUUi0IuWsSS6FTqftbjDB9G/Zj0edahgGJwYA7ZoRGoZ2Iiz2CWVWWak5dGak05A4AAIjn23sUmNC0ym/WpVN+WnMucJFRJxRQViTg48n6563DoSG252yhatWYfAjMSOTPHIiKqmk0UAIiXyCcrWsHALXsJCADgnCcuxMRM1XQp/UPAf6oqQkK7SEBRTcl4qSlwxAoKSzCwqpo6XjxpZqaLf9NM0JaIM0bzzgekgCAiWcE8ezGwqmKwaBoBTGoymBkUAB25wJ6Rx3Qs5bjp7nLKIQQAVw2llEBQaz4dTt41aOhaX+oEeWLj6ZwAOYY2l1xFQ4haNae8uWnblb+7fT2ejqmUw3MmojpkU+vUsapqnQeRXMBTSvNq0w4jKEMfwqZ1X/78dvvFLVqMm9u279kH8m7pe3bB2ZLSlKc87tmAAQiNyWLwTdj+9P7983gUATVddSFNpZoO5wE8Gtrz42MtpVKcSpmnOcbAjhwjISuYD05UYtO5pqG+b0OTTkckatudgr7+KpjafE51nIuJAeU5Pf30k++3YlCKygQdNx+nDIyKOQCUNPWBtv1KZgkh5FLMOLpe0dWxZqkGmPIcXUg1uxB9AwRxOKXgfNevjz99/PjudL+9GYafyliQ4f7N3fNpBjC/iptuVZ4fIjLHzhBEUaQ45uDammXZXvWd29j6uD+nqYyS2wqrJt6tYiqQFcHHNCYRIU9ELEWqaKZCjmqVqgpEtVYmElUAg8VElotjAlyCHhYqFD06AyiiV0mzLZ44R6xSQQkJDVENltBRMyB6uWhcrhqlpt2rfn8cijjPsXEtmTBU3wSMMU8zYLvbrUqapaYy55KLmufQ55pLrt45IjJU52kQ7XuXTgdqqOZnAwRoqkZSJ5iMCZGmlJv2Dp1/fn5c3dyByTzsY4TgtAPfxlDy0EQmrPN5L3NidqvVmoC7rsuWap4YK5Jjwnkc0EV00TnFckTIBh6a+4TRQFyZ4PmHengMjcuzhaZ13lcFcAzDYf7wXRqeV7vVnKRQ3t7fZdu/JWg8cPnqP74//qefThmoIqghEIoZEVYFcpjEpBo66JvVtt3cd13YcVx1pNi3FNj1gX0XD8Nz7zMTl2mcwyl6qyer1qGfyHnPCn2Y5zo9P6dcgJzzQQFIW4Y2utJwTjBH5yy1WaSCAuRf3q1vXt1VRAX88eFZpUzT1Deub7p5TqTS+hA8A3sC7zB41061FC6vbrc17rjOh+G46+NM9TAMXd/58RiJjtMx06CoQDzNJ0dMoL6LY85tw9lEAk0wV5069TZnlBoZhEnK9PTwsG76sYY5HcVOu203Ds8tBiTUUuaSGtfEprtZffXTu29Xq85qyVJO9ZzTAU3QcybzMdT9k1+thvH4Zf9lOY5DOTWrWLWWOu1cXK820zwBNVOda6lUQ9+Fdf81UwWB++2rlu6qCPjbuchcTx6dZ4SaoncYfc7p+Hwm8uf9sWn8F7evK1qd8nyaLtcCREBwzpc5qxZFiN6nNDtypRZixxSRqKooM8WoKs6xqWnJiJRNFhcCI9dagCmElgxcbMo8GZp3nhUBqaqQIhuSQ4sRwIrIXHJEFrAiUlLywZOwmeVSRDJxKDWbgHcU1z2iDuOMRH3XDOeR1WLfJ5vuV68oYIiiJbVtvHt9vx8n9c2c8N41t7e71sV37x/vX71Gh5tV/Oe/4mk8/Pbbf/ib73/6v/0P/9f/9Bd/vt7B/vzwcSiOUVEV4P3H54UHJ3ZzVST3vJ93u10udZqyP02rpnl8fOz6tvFhP6YT2Ha12t7S8/6Jm2bbtCXnKZdphHmqbR+Cd4cRCpTzKUmuMfgyVjT0hHUuEYhFno5H9u7DwzFEHxwLWAYrZWZCzZbHDKKGyEQqEr1TVTXIuTBiDBGIAFTkkkxU0wz6ROgYgdTIOQCs2RhAk9SSGDGDDcPZhwCiTJ6M85zbjRunmQFXXT+loWvWKFq0OtdMKbfdihmlzFIrGGrW4+Fwf3u3f/745dtvzuRKntN5RqHTafbRac2lVOcciuR5Qvar1SowSpqZCVSiI0Obp9F79+p2o6UQ27pboVnTNrkIATTRnY+GZuvVejxPiNi6phS5Xa+O53NouY8xT+kRj3BxLpiYOufPp/MSHFBKdZ48hWzTkjiUx2Qoy1nQ9ytaCkWYcHGZXfggvHgm7FLtBABAiLjkUl9LEvClWB5fYNILPloC2hbn1gV9L9yTKhJd7GmfAlZtYXmujwMiXVKELqBC8QVkIRi+hIGYXa9uF+Buly14e1GBfEJvcJGdXN/0k4vjYvzAz1AcwqX9Fs1UDVLOh+P58fnw/sPDDz9+eNof51QMKjKiqWcInrniqnGvdnd5zKWU1bp5c9Ppec7TeSbom9g1fjiPoeHhfD4cdC5VQPM4E7v1au0R2zYg4jSfoImx447bELcANc0Tgrvp1k51HotSi9akkqXOCCZlnFNhal3wDJJqenV7H4IbpD49HHGaNqvQEXSB1l0z5qoGPjqVOpc0TQgCVpWdC85lACanqvv9c9+1jil6X+pQqxwPo0cPhoxNLjMh3t6szvtU1GKIuTLU7Bkt8rkIAjJj4IAAZigFsPXeUfA45+yY2IEoqGERmYZBa+l730aPoMTECLe7m0MuNR/vd/en09ObN0iORYVsCYsDIjYzA30x+NjLB/dC1ywf91VLtGx5fqYsexFgvOD4C6ReCEa8js4lW+sy/HhRbCykzx8C8usMXfnMa2QWAL740dQuRpNriDUAotrFVolmS93RglfUrEh9ft7/7rvvv/39Dx8ePxzPx2kYyBETzLnUWhCAGFUMDVWBANkUtB7Op/n1fQKccqmiDXsVcYSOwDcBC2RVCM3DYfjq7Z0hLfE2lAoh7DZdnkepNfZdPY2K5WZ78/3T+XlKv7xb6/kMpH1PdagApWuiqjoAM8yGTC44U6tEYCaEREiNp+iRGQgN2doYRU1M2q71HmP0qJUIQtPWRX/BLnqfpsH7QOidC1wFqOSSkbmkSuDUpN/cpIlynpr1hklYtOZcSorrnffx8fl7MN+6dhzP5LhtWyCrUik4DnGS4vo1svOxnQvG/hZdXKxnn+jo5cO+MtWfIrHwZX7sum4sa9nLKNjLVF4+7Zfl5mU4r+P32ffbNbHomkW9MN8Gn0bvxUN0ia1cJuwPOfjFsvYyYJ+x63/wpOU/CouBV1SXEmpiLwpjSqXkLMklN5Uy52lKU5FiumRwKQKqqIFVBViuEi8/vqohIl1IKVuymC7A+pptZ0C4GJMRVEsqKee+ccgMLuRqz6dpHiu7Q9e0TeyIqYqIKpuBgiOK0bVtRGHHYKoiqKrg2cWwmMfyXJmosluaf9jxhQ9S1WqIZot8ABBMEBBUkMBMFiyGS4M10XLWai2mVmstueSSpZaq1UytCoCRCtPlNwNVCAxUlqAqRjSTQB4ZmAypalGpEptoUpecZjRBU1IxrXiJEIKLDGBpbkcjDuR7jCsMK4odOA8+AjGQA+eBHBCBb4ECsgM1Cwq6Ns0L56xqyk4IBUzBREEUDVhAFbCqiEGWRRYDqWouOs11qjLM5TnjU6J3ox4qFOTQNIQUyEirqlUgQybHjsmBxOgcoYGBFk0y7RW0ktUmsDKhd857dr2LFmKe54SuSBUsy5oH7Mn5WA0JyVTFqvPOeY+M7L2qKiCTsXOOiaqaqREv9wlmCmAIZGa1VgVbCPSXVDg15cU1B4rk2F2s9J9irQGAkBy7pbhRTQFIF8ZoCTACYCB0AFrAUNQAgJbSRIDovCAAgWkpksiUkBkRGUqtTChVvUNmIsdVUxt9yamU2aP3oUc3Wy2hufWxr/VExG3TqpbgbT4/758+WFGHSM6ZVtTaRHc+5vE8O99gQJHM3rNzRHW9WW+262r64cePUmvoAkFtXafrLvguNn0pY5Xq0DvH4HUcj80m7j+cPhymtvG//i9/ZqChaZruRswb+WnOgRvfOqKl3KYaKiMxRU9efD08HeZhX202pdXmbjx/mMZc6mxQiQV9rWDOOyI+HicrIUNGLsCW6+y0IFQgbKgdUrrZru7e3sT1ut1tnVK3WhESlDzPoyMjBJ0+up5LnV9/83p43o/7x9PTc87SrTf/6W/+/L/4+utX97vH/YfbrkcKwyTHeXQgsW9D7xpun/YDEXZdf4LJoJKj6FurteFQAUhqqblbbdI0j9P05du3333/rW/91z9/+/33PzWxFavksHHxcRhcv97sto8fH1dNq2IMHGKTckKNfRs+Hp5iRAtjt+3727c/fv9xfjadLOe8XnUwjON+7rgX5MN5Ck00rY2jnGrO2QfH6HNJqQoRCIiZEmHRcuG0EYiAzKRIEXOsaNfN0cv9zzL1sOxGERPCkglIInK56qgioPfhRWNHADnncZyoIQzh7vVmOB5MdbXZZmApVYJlnj4e9psgmueu3+xPWWQyOTOic21J1XNb6giMefjRcTrvH5rYgZiPK/XRO6jjgOpCs+Nwh2E9jYlcQxjG02E6nTzaTdf4mjRNPmBNls6H8Xkf4qpbbdC74FojtpJQC6BYKechk18Zd+gby0XLVLWEzS1ttnU8tV5weKz7J/a9oSMe481WuJVmQ0ZczsPDuzyMjfN5nKGagBFx3zY2D//qF6+3bfsqto9Deh7yx8MkjuciZoDMMYacqmP35mb3yy/f/PIXX+JsqLjexA4dDmPj/OtXm/Pju7e7LZa0vr3ZbFapTI23VefScObgjLFZr6Wyc83m9k0paZ5OCkmVvA+1VM0jE8XwugGs7WMdT85wOM9q9lDn4Hiz23yxCuQ5hGiSUVMTadGqsi1cu4nz6pwx5mIkkXNatet1/+Xh8IN6MWTA6Xj66c3tr85FEEDmlJOtvnzTtJtvf//3EErj6XB4v1ptxAdlBHT74eGLvoVaxnQ2gt2qHY9jJs5ohzm5XBtvAeZa4Ga9mYDPzzM1jetuqmApMufxdH5qNvebJo5jM8t0v7nxr316Hm9vb7KHx8fnm7jt22DG7Xq1n1LgFqD6VqY6mxghmsJ2feNcJzAcjqd1jBSp4qnMc4h9waz17NudJ8p5nPLZc691Op8/ts22JYRK55Qoto3rNVy6n5zjKRVTc45KyYEjIzlaQugdExsDInjvG+8cewXK80BIPgQwFNXgeM5n4tg3XVEh8iUn04qgoODQpzIj+aWxcE5zgBaNSkmK0DQNAkWOIjYMU9f2iOADmRBTCC4O82QiHEKVpALMLEVLEiRuW1bSr37+x23T5HlkJiR+c7t78/r+w4fnbtV/9cufOYWui+168/Vqt95tjo/HLOLbfrXy9v5M+en/8f/+/9y27jgOP/7+g4qdTYNDRF/AosNf/eJXf/nXfzGNsllvE9v746hVUpLgXcHS3G6r6P3d/SG9O4z5NDz/+uuvzvI0vN/H6HOSA+Y+NpV8d/PGB37anxyIKTIgqJaqDXPrY8chlfx0HvbVCnnf+jxrqpKrOHJIUKeTAmAkjr6UAmY5Z+cdIBCAYzaFWow8AahUVdUQgmcPZoDK3jP7eR68jxw8ALHntV+Nw6AK4+m42t0vJl0f48KAuCbUWdq4maeHNB677d00nNl776jMIxh571UUCXzgWqfjAETuaf/k2KGjpmvkBIhQc5ZSmCmEKLWaSR4rSpEmMFJo2raJ8+lMyD54NatiJpbydL9azfMUexexm4s2XShCT89nns2zm6ahi+HjeGjbddPsTufDum8qwTydCZ1vfU55Og9d15euE62MFFfd+Xz0IYJZrTKNQwj+avuC2EZEJkYiBAO+eHnwhbIhZrxSMMtvFS5MEl3QraFds1XsAluuFgtYxPzX5JbLM+AaIYlmZvhpkx0Ql45z/CzC6IKdEa91Z1eIpp9qphaH2WeGipfr4wsU+2QkseuBXZGcvgC1l/iZC45bPBOL+wygpDSO43A+Pz09ffj4cDydcs4GoADEzhSyJpgFc90GGg7n5+envm0ThGOd1pG//mrngPePh5/ezUUgdgzVSlXnm9b3Tb9qW1dK1qoFlTx1feO94+C9b123G07Pzabrm9Xw/HCenxQFnE51HOehzILOb0OzCqGl8OE43MRVT1zm8d2HA5nbNJECgSVQnE4TlIQO2n6z7ldZcoNNMj4djs6xAvrgT0OueWiapgmNQzelcpAzMSrUNM/eeVGd50lrSXluChACak0iVYsnTEUUabNua6kBsY8+54pESnYcp8bH1692vTbD4ZRLbdsw5CpqTEjOlaKpzH3rKkrX6ZBnJq9l3G1vUyQRjb5l74GQmU3lirPpysNc6MVPIS8X1o+uXCRcxD+fbZ69TPcnk+Jn2rQLm3OVC13In8/VRpe4dIBL795FuXHlqhZ/mple8P9CAl3G9fLnpWbos0ldcmfJENREwVLOH969/4v/8Nf74zM5LCW1qziltOjdqigvSSvL1qPUru3YOymSBAR4Ung6TVOB3lhrxsghBKkxcpgnvVl9/eHHv/n6zes2NuN4BPDtyleD2K9v1vLx4eQqtN1mGJ53r3fHUb/d7w8ddqtVxKJz8k0EciTSejQRQqfmxNTUiChySLkgUFH1DmNk9hpiA77FwB7IAZDvfIiNSprOQMjswVDmgswUOEKXc22jd+hFtCoUVAMcp+nWMZHz5A01lUPwPQHNeiLHvtUQQqpTlWrUE17yvJGYPVWx3e5mUoV267j3zZ3Z2pGGZkXskd1l+cJPPCHYtd/u5aN9WXQW9eIyhp+qvi7U5OJWuVoYP5Hr1y+8zuknxugPuvGWQdYre/Q5t3kR6cA//fqMErrM3GczfNHzAHz6C1wFL4jzrAamasw+Ng0xnB6P4zwXlbmkYZrFsoiYqqpcqM5LbBwAgIDRkrhx8ZwpGSLzC8ulL7zthRNbfsXEtIB3NDAiZO9F6XCc6GQMXAzQjo6x6UJgx0yNp8b76D0jFLDjXC79IQjBezBCrIZk5AEdeG9SYNnnFlnyt1QUDFWr1aoitBycKZogKqgsAbFmXqqKiNUqIqJVVUsptZRcSy4lV1EFkaqAaIpLliIYATIqgy4lHJ4AERwmds45dd4ZkAcwAPYMbEwIIqCKWlGFDUwqLJyaqr00MmDg0GNYUbuCZgXOg2+APSz51sRGjOSB+FLu6M1UsWYAsypazNAjBgQCQFUTwWqkQqKqRrmUqpCqqUqqdc4yZxmLHSf5OMFPAx3MF3bkgg++iY5kZkWQwohVBNiIXQyBgmNHS3OYChhKns/ppAOr0yIIrIU3WwyN94584DCLCM4iSAikpkCEusglyLsOCYg9MjKyAWtVUQVMkZnQgncqIoZmQIBMrBWkVjAoJYtZzkXViJiQTSsugb5Apia1LmfBC1UEzE5VqxYiVhMmIseG4NipqYExu2LlIvsj8MgEVg1Ml9q/YiomygimtsiUqllJQgaGpFA8ORdDlpGsaGWtwhYav1LRkkb2AQSnfPRMy8FInsyl83Ca07kLvXOuFnUeQXAueT/si9a+XSvWEGLbt6YEyOz5eX8yM6vALq5WN2g5z5Mhsjewmdna9cq5WEs10rv162EavA/bW7i9W5NzhlGwnRIiQLP24J2PAQBKkVqmks/MzEglJzGtYi40UVYi8PTwZMgcwq69ff/uu+Nh8MF5F4+nUaXWWh3Gp3P+8ss3pT4z6mbVS02lJh+56zexX8e2q1X1NJQsu5tb5zvvomu61e2tmZiRb1fz8el8PJzO59hu8uMQuoigkvLt7s3zcW5iWG13GAihcoBScrtqXRdKlcagc66keUQTEyYwIGJniCq1jVEKcADfEGKc5hS65v6rN8+no5tpvdvUXLVkrJU8B/ZPz8dX66Zf9/OYeNnIgqDK03na9b0nF3wExPPxeP+q+fLr+x/14XAYjuN0f7e6We00jcOUVbQNoaqZWePCPM9WTUi982qgpgSEhCrK5AFJ9KLPJGSpuuz5moIhaBVEY3QGhkwLTX65xTADo8tW1XLPdA2bqNebKrgksklOlSjvB7q/2RoQUlQxBRXU9+PTl7ved3Q4DZtVJ+CIQCWXclLLWhuREjuex6Nb+TKfczosgvAsxwg7gmr17GNQF423yitULPMx9DcU4mkYyNPb+xsHWuaRpbSr/nk8qkq7Wq22O1NPHKtZnadSsuN12/Jw+ADcAPtULQYoeQIT9mtyK5lOLeaQnvPpURXa+38xPX+U5bLEMTZNHZ+GH387nQbfrNOcfNuC2ek0xbapiXzoa+H7Hv706zjmfB7nKekplTnVajTn2q86Nez6eLO71xq8ABqpaAAfmcOmB7LDeVy1/aaPrX+l0zluIzkwpfMwkkEZRvJRzNVSFFAEFcB5Ph6es5DDXqwCQGi7EVzW7Ppws7uTar5N8zzlWkPbliSA3HQBFA1wGicDTwhN06tJrY6b1aRTqpOgoy4a+FV0qjJM86yWsmQpPfcQWw3hlMem88FHNMrqA/Y2SMVZWru5a6fTeCqFOm81e6CxZBfWxerj8ceTehLIo5LfoWv71k/z7HoHAZ7Gg3LINWcJjnkVVl/e//Lx+AOpNsE1szrZfH37VRrfNxznWUsV6Zib9knkj//Fn777q3/XJ9cTYJSS64fjNIqxQt9ub2IXBEK/+bg/EHYf92drm69fv+XGj+Uxl7l1Lss5mzNO3DnyVlMVv+3ufkVpmJLV0iJ3t6/uvv7F5a6q1qpSBYE9OxcMYcypSjWCAMFRzPkEi3uBSHJaMiCXFg7mgGQguW/XCipaAbCUXCWDoWcHAFWtmmnJFDj6SMZmWErNqbjoybnhdGxj54LvuUOzYZxaCKAmUg2wia1w9SGoyXyavHfsrZr5JpjhOJ77Ve+8A6LIMI316fnct7cizx6habyXQNiEZr1atb7hrvF5Lu9/OlBs7n7W/ff//Js/+zf/5uff/OIv/+5v//v/+//lN//w92r5t3//rQJuN93++fj7Dw+vv/jm+flwOs8lpWXjL/bBEQ1TmZN0bfzx/XtPsHYOEbMKkI9tFFXXtm3Xbnd3OGfrm8M8nk3vukDD7MFa56ea1witAqREpkLgkA9p7ijGQN65KaWci3N+Pj75NrBDH30pMxG4QKqGSKUYAYiCAQcf2HU3fWMmUgsy1VoNVKqaaWh658i5OE2liDRt5DmVogglTQNyiE0/T0khW9WmaU/jXsvoyaU5uV63d9/89Pu/bxuqi3MKuJaCDk0zEQNV58M4JaZKCKWWNnRf/eybh/fvMgKYpVzA9CqxgZJLMZtTGaLvm1ZVbvrtkM6OEdDLbKfz0IQQOeSprJsYEOOr7ZxzziV27XA+mto6rM/j2LnWc0Ps1GYtAKzsojlQZh8a4pEJpRZEbHxEIAohpyKaY9fD1XjA7GHp4sAlBcPomhC8NCdcuB8AALuKtF/21REvnNMV3gLihQjCy9bx9VlXYLXohq5Gm+WpdAEVn/xh9iL2sc8g1fKAXhM3PkNDlwc+7asvbNMFmF+FUVcD0vXgXnRGVz3KNSV7YRHwJSDETLWWnOfhfHp4/PjThw/vH59Pw5RyZudMoZp4HzzYm5u1q3Mjst20f/LzXdW0f0pNdOOc/+G7IeeU0zSl2sRmDX7dtF//6hfTeJqOZ9VaUjKAvm88GwZ3f3u/XNxrhel5X8cTRz2OxzqMDUX0HqJwmgmapmt/fDzPhRXrLHPNksdDSvlm27/drnIVLXNOxUcuaqu2ndLgKiLOaf6IjqvL4AIRVNH9PFPJiN4cjUZS633TaprP++HVq3s0CC6gAxWpouS9wz6lOc+JAZaKwC6GjJoqYAWroGAJainVUELjyQiqDufzbrMO2+0wzjmlLtJ21Z7GDEaiZuAMKYYmzTXGur1dg9n+6eH21St0tHAi1wigS6sYXgOJLrc7iAYKn8TUF2HH5Z8WV6N9hqcvH/miVDK7FvPBi0zoir4NLnJuxSXx5RPZtEjnXhJgLm5MAFhK9xYaFUyvidaf3vuK1JcXX3DhIl8AWwSUMp6H/flxnz/gutSzffjpUcwk5VrKPCdCZFxywhY1krY+OmIFUEVVPB4HCnHM5Tym7VYb58SUnUMfyPmcxhv3JVpXqyF7YJ+LicJme3sejt26D6eUZHY+bG7vDudT1zV9G85ZO1f7LgI4mSZE16+aYd5P0+x5RWJgkEqJYYngFDH0ziMoITjG1XpVuGUfrAqRU2OmADizD2JGwMCOmFELIhBhbKMZhrZlqXkEZiep1lJEcmjXRBBha2RGzTSNCty2Kw6OmE7nQV0MzWZKR2anRuh8CI7YN00/HI9dd7cvud9s5yn77ha8M0a4JJjQldu53ExfOaIXFvJiDbuwNhcy+qK4hKuZ62XVetEfXZehhVOETzS1XVak61heR8wunMxn/4L/GU/0B/zQi8Tok2DpKln7bLA/fYvqwu2hAeY0Hw77w/EECCpWS0k5D/M8TlNRq4utYQn2WRrNrmeZql03sVX15RwjhEuu8/WnI7xcZS4/BJgC8dXjRUXkcRyPKb0HBZGA6BoPVWLATd+1we9Wfeu7tu3aGAIjaKllUsRahBipCwAkmpHI+9Y3xKxEnpwTWcQfC6dpKlVLNlWVagaqgmagYlIXRWmtqkK1SkplMVLlmkWgFJVqqRRRTaXWaqXWpJCrKoKoMIMjah1E0taZJ2ucecbgHDM1sW0kEBIQASKbgEcFJUAwRRFTUSMQQ1UERTBRAWJkb+wttNCurVljuwIXga/EEBEQA6AhA7vrR42mYr4FycaNuawUBEgQq5oaVNVqXI3UzFSqwVwtV8ipllqz6DSXU7LHQd+PcAQ3k/e+adrovHOOnQ8eK5TkpOY0SM3iKiisnHfOOUfek1QFUzVNaaYjEyC5CIjgHZFj5zyzsjeD2LhUxJBUkZzDpZDSDIgRqWjxHJakJ0OrUqrWarokjwNwIKpqBEYIQJjBahVVqaKliF7PKQS65LiDOBdeluOXBjQUFQPwbumZQ2aPfHGrgSIiqdoiL0JEKgnAiBi1Lvl4ZkZgZsrkgQxAYEmzN3DklpW6CY7JCIQhpDRbFcJQqgTn2GLklVoCYwBm4lomYhvm03e//7ZvNq5vS6qETqvVXI7HJ6a22TVNS9kcmxHilBMZgnDNNTRNs90w+mmcVad+xf2q86HhsCZMMUbnV8dxAMCG4XQcQWG7672T6LDZrhSjZAYx0kyePRYDKDmBsWShwMYUXKs1ax1xoZlDE1xTapU6PZ8O5LG/2Uwp759OrM3heez6OKZqkofhI0JlwpFTtfnm9rZteL2Jc7Xn98dpP9+9eRXYYaVhPzctN23ju7Wq5KrhZtXdfA0//NYNT47t5o00Pf06v/1//Y9/Eb2b51EVnaNxLG3wN5tV37Tk68fDoaEgRbq+EamipW2cipUKVcpyAtecd6tNKoOQhN5x4P3Tx9vXXx3lXKcc2yimq9V6fngGlHXj92NWdeQdRmvYH87n0zCqoilM5dyvg2BlCivXDk+HsI7ffHULDtZ36++/e//2ftfEMKVkouTAFFTKeVbnqEidUhIzF7gstLxaYKcXTpqIkOl68UG63mgvQXGgIGqGIkBMxCpK16I0RDDRi8HbABFVlcCWfRIAEBXQerPtjtNcs0wl+HULJXVNJ6cBTdv1emIbnp97M5fRoYJYLvtaRq15He+mkrSSiDY+lvSBDDBghcHHtbkVIWo+UkPOR3Uth07TUOt8s1oVq/vh0PXbrluV83MMjEXzPOR5QHD95s4HZ+CdD3WeGTH4zkod97/XAnH9dkhDs37jSbKckVyzuS9ALLPLj/n4k+QOtr+eNVKIbDcWeuKgdbbTh+npIzpEC7EPIYTT6bR2QWtJp1FT9p5j3/RTeXzYd8GXqqdUnXfDWCCE4ymRjz6ExrXO+Q6gWzUeZb2m8fjs2Kvh48Nx8/Vr5ZFD+OLVF1Lntl8BMKGG4ELYnI/nMo+mFm9vDNw0Dl235eAfnp7BieQpdE6KRcB5TorYxk5phgbZat+GEJkImt2mDV615OnUhVjMcpE5zTF410Re7ZKWoiqqKDkfztv129iF08OPitWwBHY5VUKea27j1kyZYhuCA5I5/cl/9X/4T3/7b7S48zynqQA3Hz/uv3j9xqOklI/TqOJW/a8PaSKQJrTO6Mav+eS4bBvagodnfG5WsaTD7q5/vXZ+KD67m81X67tXfQdNzTpy7LuZdnNByVCzIHN0TsqU5vM2vC7TjJSjEfsN+VjKAHlOMu5uok3TfvhwPv/0uvtZv76jstVjkTzuD4+uayTDcT6EEMt87PpNGpKPKxf6caatu9+u+z/9r/+4u7l994//sIuX7qdaKgIzYp6SY7ew9iFEkSoqYx6QzBGrmmdOMgtgLRKcEy25pugigDQuqtFc8uJUcBzZeag1hjBNIwESswGogpqRqaG46ICYMIawBjPVjIRgznuHyAoFEEsuFkwV5pLUFDw2bVSTnAohImJsu/E4lFTRoOk6kWQIv/7Tf/67D+9LpZzg5vXuF//8F8AhZUFjco5b9+s/3U3nw3HIYxpvv/j6t9/++LAf3/9P//b56WNcbYpRmZN5TqnC+fzrf/6n+6dn34e2a9R0f5oIY82lbzqRejyPwbsYfLWqQvunY2xD49rDcTTG2y++mkU3t9s0jiTSePUOEVXUziUx4ip4FDmn4kIgsADQOVPJp6QxFkCoKgyMUL3zVSX4MKGrKgauKqIRoPo2RELPjpBVdZrPRKii8zgxk/eNEQpqjJEMSp67th3T7BiQfRNYS5F5cC0YRhd8ORc0a5p17Nrj+XBze5uyTOPBha7b7PJ0cM6BQVVBZiQClAULihZmz45kTiVLmsairSFUEWIGUCkVAXz0Lro2xjSOqjUlAVPPfJxOwVEtpVv1oW/OwyRgfddJPTsfRCVQQDM0cSGG9Urm5AiLyJhHUK1C7IPWcbPeZMFpKkCcUnYhMPo0mimwb4g8MypgHsfxPPSr7nJj5HhJ/l22U/HFsYNw6dA0gyWOegFFxFdcuyAiXTxlV2yC193wSzYwLlXI121z/NRu9omOuaD9z3D3VYB0Ad0LLWSmF6hvF5HSYtO54CeDq7eDzNRUL+/1Iit6kW8YfF4ydRU8ffp+uPwMF6oJ1GqVlPJwPn98eHj//uOHj4+H01BUxFBV55zWm7X3/p/97F/EcqRUutj/8Pgx1puWuWnD42E+DUlkiMHFtn212/TBHZ4/8M4/j491HgzL7brr+269e41aHUoFNw2nnGfnIwFJmr+4287pfS61X63qnLPmj4d923ZV5Pf7D8cMH89CWret6xjXa+5ftzlNYDDmqevbFGzKY3TogWMXPfNchYmnYayUm36NKqVa531Fd57ObWwdcdvw4+ljG1zTca7ZQKd0itB2sSHSrFByIZW2DaScpjHEZtOtcp0NAdWOx6xAzi890NiG4N3yy9bTeU8cfNsYYi3FingAYCq5FJPqoUFjRz5QaMiRW283RC6n7KPSy7wt8PnSTbUkYS28H9JSMo8Ay+70dRIW+g8vxsnP3TiG14G64PXrCF40a9fnvPCUV3LTAFFe1CLw4k6DT2/3AtXtOtYXHgEvccZXgfjyy7lSSWigqllUtUx/95d/+ed/9b8djvv945Nn1zb9OA+AdSlTLCoMpKZi5pDZBSYa54kASrHzNN+t22xySmmac7eLtWYjBHbI1O+6VI93N/fn87HrIxrP6TCMvNvdTHOqMt/dbx/3ewMBicHTdJ7e3DaHYcxKihy8e9Wtn58fgmNpV4pOqzU+DmPy5IhNrTJfKtyYsWlDCM75hkOPrgFvxDhOSi7EEBE1OGdm3sfMU1BvKg4NmBkckkdyKsIO2oZZs0wn2myMUJFW69s5mTH1NzdklQxNZB5nBSeSaq59bGOIxOoiG6Aa9mE9Hx+63TpGmycLceVCgwgXmSUIIC55bS/U4YvxdaGQrnfm13G6Mkmfzxa8KNBeBJoLOfSpoM+uwkh88c9eViO9aoJevpb6vU+J0fAHS+cntSUAAC6+gz8cO7jSX5+znYaqalp0OA+PHx+//fZ3Hx73QC4LzdXOY8qlLLQXAaouqBfN7JJ4ZMtKDmZQrX5+mixnyqVJE8DMZElqsaUGAdkx89JvxjE4RBDV5/0AhhEgeOo97qJrou8ib6Nbd/H+btOv46prmxgJVEoGcyXPVcWBEylsQUWhgndERt4Fig2gI1U0UKmL0K9gVRBTRTAmYvIqVcRMwETUqkpVBVWF5TyrqRRJRXORKlCLFdGcRMTmXIaiY7VRJJssTrzA1jKuGDuHvYPGYxNqGyhLMkOiLMCAwQBR1RGKlouwSlHUnCCIIlQDhSVE3cC5aK4B10DoIK7VL37Jpd+DLgF/S4atgSEZO/MRfWtlNgwKQc2JkAgqsJqqwUU1JSK1FtFUIReoQnOBoeBpsMekD4XPyNW1FJoQY3B+MYKTc7NijJzmVKyqFZnKVPIscLdZNwZIDgAcMaIxMYJJlfF8AucgRAAOsTV2hiTkq6hCLbVWQDZAJoe8RLcROwAk9mpGKmRAZqhSi5WC7JhAFYmY6Xohd44BfIwg88zOmRVTBYNFZiEies31Xb6uWUWLHGu57Vi6EBEQTKsiIbNTFQZEZAVBQCBWKUDo0C0XDVBRXZ5Zl0uOijAhM1epTBQb38aQa8klB4el1saFVJIHT0jO+TJPiJXIOx8UFVBN87vffzsPad34MqfomxibVJOZEIauXce+KfPZzLJKGSoa+SYSw2q3Cr7Js5gMDrHtu/VNe3u3PR6mOZ2bGNIsUxoUqN/spAwcu1dfbm9udr//9nfIDFKZ4mm/R0Cx5FdtOp/VjEIDSI6JkGotZrWqxL5bbJlqGmNIOTchzHOZaq2leAIfmCqFJhZEdWrZpnFoG0/sainMTRU6HOdh+OB8d7u9JW5ESWot87nvbxajJTkkiliqpLlU6ftNbLiUcW1uOh23d29+9c/K89OjC3F7H8s84Ji33foff/pdv7H1avX6y1/c9t3+48PT4cEHjwjGSEAOoIqG4JnJBHPKMYZxPjWhbZp+u/V5GO832zRPkYOKzlPumzbXHAMH4HFOb17djMNYqjYhDCUF38xDSqWs+3Z/enYZocYpg6/57Zv2zU3zdBy/+dUX0zADM3sf2RWpbErOl1IQQFSJSEWIwaEhki2RXwjVCpMDABNBR0RoagJqqsi45C4zM6guAsIqldkDXkl6AAQC4KqCCsrABIiE153kWlO/aQ/nqXOhAjyenl/f3np1jrgPzXw+B9Z9Przf//S/e/Nr9Os01zTOnkWlAjAyS5aUR3BU8lyzMAgB6TzFbidAoEVBzLyIw65XYAWM3dbEhuNz0zU3t3dQJ6jFOzfO51rmEKLnVWhWhtVMNA+glTmgTc/H34VY2d9M89B2K7Japz1KIbcq1qgWSkc5P6ZhH+9+Aesbpey4onfYdGJA6VyHg2vISeuDS+NQs4CUeThbrtOQTcUHXzN2jQ+vnWg97keXSvDcUZqMbM0lWUTardq2bTer9fn5iTmTwf0Xq7ub9Yefnr9/t7c321Xs9w+PQVzwSDIhcWgDOWDvY9dP4yySpvEI/ba7baang/ft7b3tz2M1MRER7NqdlVJBcklqafXqPk5BoLY9IyH5EB2jWeM6JGbjdtMXCl0TB5vP8/ezJvCxWbXg0KFLVh6fno5pDx7efvHq8fl8SiU0oRI0bTtNQ83ZiXCe53Nx/d3Xd7/Yl1kgHccfKXoSns/DatebwcbHfv3qw/vH29Wu8vDxsN+GZkeru83rLd9xoB+P7/p2Hdtu94XbkXXj/NXuq/j2/t/+zf86znJKdc1WTlOc85TzartddbfclWwVyc6P37lXX/z81Tc/7H/iuP7TX/7Rv/uz/zUif72+TcMx5aGp7KLfbW62nnSwYgFpdT5NaT75biOo5LtV0yCzQWT261VXZ51OZbXin3/9MyctPM7D82/7uT68/3C5JjCSITMjgmMSKUsMyzXgjjw7RFLLU56XuxwyUERHXjSbqIpOmpEJiZoYU5oQgAmFbEyjdwEQigghVcmpzI674H2tmZmkJtDqQphLbkKfa2ICMAEkJgjBqUEuyYwRgRwLACI7BwRUag3Ba99KycyMgAzYdPHX/8Uv//2f/c+ooVlvvvz6dRNCiB1v2ftQUzkPmQgdMc4WqfmTf/1fH375/EeHw+nh42/+8dun4/mYj6JSUvbBsek//MNfa9XovAvddvs6bgdilunsEaZU1ustGcTIFKtWYO+MhNSapn/1+lVONisEU09mKN5jeTpSKi35saojIlEG8uw8B++5ZXZKBXAwmXJWAgErWj0xmDFCEyLubkSEgZjjnKb1ppeaAKHWMgyzVK25sPdM3DWRyIAAK4qC5NrEVsDEIMS2i63IGUyRaZrnddMO+6d+s5qGMI5nMWUfO/Ypa/TRY+Wa1v3qkM5IVrVUKQzMiGKECDnNjGw1x83KN2EaRyA474+1VlVTUSRkh20TpKjmUswcc9P3tVZTM5VpTnHT3q23++Pp5mYFwQPZcTys1usQW60CCF9/ff/ttx9MSnBhn8+I5l3MdV513el8ct4BQa662dyVIrVmhtg0/fH43MZeTKc5ty0uURshRjWbxnRFGJfd4Gtd5gUCX8JIEe3SMY8vcSt23Q6/4iR4eexC+FyNFpfwn0vGC16RMixZFp+sG5+Jgi4Q5pOL7bov/9n+OC6RLvDZ7v2iygdUs8se/SVuCS7dafDpG+zSbbvAqGuQyCeNwEIh2GIBWQJlSq2n8/nDw+OHjw+P++fTMAgsd4yIhuum9xB7jHY+ns9P0eW396+++PKr03H/9P7h4eH0eJx396v71eZ4PnbRPTw8DcHfrLsyDlzKq93u9qu2pDHN6d3HH3JOUOp29crmIbR+3XcljY1r6jwdjsNqtWW/ZhhgHm66zZRzzak16Lfd3ZZMNZJNpzPUzNVHR8DYttuUS+d5tWoRVJRyWeI4WUUCe2Z/3A+IS1GbxNUmkhuOZ9VhvW4ZkRoSrUWKCHjXlozHUpMUMS3D1CNIrYGpaRoxPU1nxxCDP84zRxaRXGoTI6EzlZy1ia5xEVVCbNr1bhjPaHI+DsM0ee83va/gckqSE/nWxBBdv1mvNzfkIiISUS3Z+fiifVs+pevt/XWE4EpUXjUbcGWD4KqC+wSSryD7ZaQucBqXLTzAZSAv43eB2YT0wk0pKKjCRbt2yZKBz0yXFzPOZwqPBV3rxaC0PKZ24Y6WnGBBUEOdx/3f/dX//Nu//w/byP7+lY7z3e3dw/PpN9/tS77cTDqmKqpijOR9QICUkor56Byx1hzZnedJkFMuAq0CgQKxU4V+c/P4049vv/4jOf6YZShVDOPpnFwofbedieZyulmvS5I5le16s38auxgGwypYgQiNovNNk0E5oEOfp6ImMTg2YcSqpQkxCUdzBLX1LnrnEIFdWG+qVCk1tBpiVBurADEjIZJznsGWxcdUDR2yZwRrulYkNS5IntCErDrnM4Ennmyi0GAIkgTZzWlC9mBYNCspBeeCExUAcqEH8A1zGqbNpqdSHbfkvSEgkeE1GNoMQF9myhDAEBfcvsTuwKe4oRfW+rNlEeGl/A4uAesXp+FVnPTyxych0Of6tOsrLzzPMuALqW8AQBdW8xM7dWU5L2a3ZdHV64u+HNs/obIApJYiOs/56enww08P3/7ux9N4qlX3p/k8jnMponUJdDZTQliifggupl57oeM/o8wWvR8h4LWbfDGILVsMy2LO7Jx3zD54t8QMAwAgKbMWaRBaxjfbbrOJm1XcrGPjXL/qb7Z97EIfW6Sl3aIoEzCRAjkHQLWq9843TWia0HeubYwCACNWkwoEimBoRojOMymYgSwhNqZVak5Wq1pFNCRGVGZQBnboFFPSWjVnScWqgqpVVSAEdirVCKWaFC2mCPCM0CFGhs5hF7j3sAq269WRc6xGSqxqyzwpIoEqmZgZipgoSgEQA0V2sPhj2AMH8BFcY75RdsvlFS8uwKWnAQ0UiOFiiUVFMnTKXoGrQBGsxWqxqrZUeJWapErKSRRLwZwsVx0nGQTOQifBxJ5CE11UdtEzgEbnDUjM5egT4bmek4jNRQQsD9txLkXudj0BeAcQIzlGR+ScgtZS0jzbcPZIhJwtGzlBylLrIh0nIuYQG+8CmohektMREQmdYwJCJaigKiIiRasKM3nwzFhrJXQGhEjsvPNaqgCYcx6JVCo7XlyWiP8ZVUSEyyeyRJyLGNiF0YTL0gxIi4NRiQCBBc0hiFQwQWMBQzAiAkE0BjRkQmIiAhZHFr0rdZYqBASmROg8JgNyQbSSEiLWOrddF7tgMhLY+x+/+90//na3ugvBG5qLUcFqTbXmNrRopeRsVmKMquAgOtchSpGE7BSs28aq9vXbf3l6/qiW3j0eA/sudF23m3PNmm9u7ueS9/uhCZuuD4rw6uuvBEgFPbsQicmBIyBAIBBqulVoYqmqat7xdNprrloNkXKqea4p53GYyGHTt6Xo9Hg0RgIWUyKoWhUMsKLzRbTK/OrurmQ5P599JHPGVKc0jaViYwqEuCKiXXNb5+cQqwLEGAsnKcpOj6cBgG6/+MX78W9cA/+n/+bLH7+Xv/yrfUTnCCE6bPC/++/+j3/7d789z/n7f/xw1zX3q82bt28R9PB8ylIJ1MBiiI6dopkLw/jcxN2r129++un9in23jnnMVWop8zBbu10fHo8OyQX/fHzyoRvOp3Pjv/jizbvf/xCavnexpuIIzufUrzah6a0Se1eH/P5hGK388qtXTdP88P6Zgq+B/No9Pg04gzdhprEURFy6BhA5i3rnwJakT0PCiGHJovPkFYwAEUkBjAgQdaE6RS5XLSZEWsyQYKpal5v+Sz8sLTQrI8BLH42anIfzOM+7u9tpHPub25IEwIZ0gqIAglJJ43rzmtsbxZDzOeXJBVQwM0vTbNnImaJn7LWo1Vl56903Wteua6GMAIAYIGwsrh1aPk6m6FyYjuNNu1m3IMchBgcmC+qOzSY0nZhYFRcbBnGmdX6SeXLmvLuryqYKJVmZ8vgU2g32O4PqytnlQ52P6FewuUUWSGcr4uPKmijnUadzOTzkeRiOs0geTk+a5hD7Jsas2vQxdqGJ7TmMyr7UuWYJHFqB4MXu7d1R5ufh5n795v7+dBiDQRmPd7d+u9vmwm3ntaY2UBe7nPJ6+/N8UigqQje3m2E+EYcsWOdaC3vXjs+HfHxSfnf/zevtph/OmapuNg0WOx8+sDkGMAZhr+TGOQXvpQap0LTr4qTvGh7PUFJY+aowzYXbJgdOjg7TVObqfXfKqim1cXsqQ8ofQhtZyKiM0wBYHEofWW0Q5hLMgBC532whJU3Kwrtu9f75+ZvXvzykjJabyPPx2HEb3Uaz+/WbXwx2fKrS0iS55DiXME3+9OrVDlJe+7bM6bZr4XSI3abm8Xj8Ybda1eHwcJhnL6+6NQlmOp90ePPlz3//7j3pQCK3t98Q437/TNwA2TGlX/2zX4rM+4enJPDmzTdf3K9/+u7HTbNqXfvh9C40/o/+1b+aHt59eJzZzWk8ssMq1nabVXMbvHe+qaP+4te729XmZrWeTto0DbZvPv744OPlWhBDmKeaS3HETDSl7L2rOali40KRLOQIsNbCxM55RFOVlEfBNjpH7BCRgNRgyrOhmRiASCmqyMAIIKZm6qgRyYEbUCo1ixT20dSYgIn7doVAZuxcM6cJKqDjnIv3kZwLTfBEpVSpFWFpECEickudOUDw0QCw43Ec59F+9uarx8OBAG7vbwFonqZtu3OMwLTbdACy3t6t/Ob5+Dwep+enofXh7u7N9tUb8/Dh+enP/t2fn5+OAFkQj2PqV6vT/qjV3pef4npNjP3tveYEHTrHNedcC7rQr7sCtl2vnz883WzXYoxEt1tqbX447fP51BLiJKGAC5zMCImd01z7tp2qkHFOebOKSqDzCZlPUvs2ApJVkVzEtCgAURvjXdfnPM7BpXnMc62gRTWGoEFd41UhzcV756KfphnNdW2vplkkOC9owN7A9f324fF9jIGcM8MQPCDGvlcVqVmKAsV+czMenjxZzYWoAVBVYXSCwIRqIqKeHQq4xlewnAuYqgogINF6symiUquIVFlyFYGRtcqsqWn8pu/avh2HIWstkqbEX355X+Z51TalVk1CLTVdrFlysu2mFfgRDNB78JGsLluzIBq5maeZGvfwfIrdqmmbw3GcaupX27ZpQJEp6lyrqIB2fTdO03gasLuEOP5hquoF0171OHSR/ixczxWOX41mF7vPVQyhy9779bUuMOiacApXcLRctT4HQReuR+0THrtQTvZyPJfd8Ct6WuRIV2D+8novAH3Z78al2uzyXnRJUbJPx/mS1nGB/3p50wXpX7bzdcnl2R/2T/v9437//uFxHEepxQC8Y0IEBcmpaxgU3v7s7bnou2OqTx881Vrxzf0riMchl+8/7m+7DlSblmIbXAxf3N73nj58/P79fly1kZTa9e3d7ReBm2EceLUWKx+fH9fBgeL+dGCIdXYiwzS+R3Gxva3k1xxcl573z01sTERmCewQqBbMVhXBoSUhIguRmJTZUc1TmolbBBuTDFrAx5xzE5q+a97tP/roaylmmJKL3s1zmubJuaqiXbMCoCnPUy7BsUMaplGtth4cqCGnnD0zWEpF2EHfxaIKxqIGIrlkBCx5bIPXVDnMjlCLrldts4qn01SKEVrT9gDU9ut+d++bOw5tVR+oiX0LV9ckAtoCz/DFJHkhjK6BQguqts/poeujn6XJXFnChQha0ow/DdqVvET8RFle8P4iO0NQAFB4oQwQgAwAdAHLn6HnC9W5VBktD1woK1uITvyD56qo2TxNp/P+eTp3u1uA+hf/4X/yjk3L8+FBNANeTkO1JXjdBAiJzGR5D1GgQLWW9w+PX7959Xw8vN310zgFx9QEqKVp29PpvNndSctp2t7dxJ++//Ob7c3+eBqPAwfutr1qqefTahPSIQHV9aoZUl21zWGSCuwcE7vNelUkd20E52pRrMyky7ZHqUpsqIYGbRcUK5Br+l5d50PvtBSais2+YeNuKlOIjCi+8WVCHxtRyXmKoa1AStisWxgqS0hpDs0KuakZXOPa1c5pyc8f0bXOkaqvVU/TuWhtmtaFJs/WrDokZwi1mu/dXKaWGvYEaFWE+xWEeJVTvnDKfDGaLAY/WBajF3/rZwuZXb7txVJ1XUiu1PfLyrjQ7P9UEPSJPXrJUYMXpnN5M3qZKHyZZIDPZvmFqbq63F6WuIvgCK9BchfO6dNZ8HQYx2k+H08/ftj/8PHpeRiPh9M0TmMuVWvVYlJV1GQRBC0Om+sAX1d1xCWZCwhhSdFARDFzi54OAQlRrqv3chxMSPwi7DAzUa2lMmjn3V3v395vfvbFbnu76pvQNsER+RCbtiMCI0eIIkrskMiHBkkQnKo5ii52vu/8agsxqg+EAbWiWdGkNalUAAJiCmyGVEU1aU41DTVVyQUvWx2qpqAKpmSCNUOtWmvNmpNOuWYwFTSAqloMhUhRHTDWix62ms5iSXEs1mTtA6YCasXRBOxWzlvOTh0YBAQAIVTTjGag1QxUMlycj9exQTZC+5RGRJeF60ILKpgZXT9XuvLQCgqkRqJQFUqFXM2Mi+miWKkGRawIlyq5aFXLCglgNhyNM4fCQZwjH9FQQAkgA2hFIHr7L//0V3/6L5+en/783/7Zwz/+7fD0ngm1lJC1Qb+KXfAITEDgY3QxOo+uaYF8rWKpGBX0UYzmWtXYDJi9j4EIY3DEiOBqVTUQFR+Cc+CQRIQIInk1V6pI0SoqFaSW4An5cpEwk5oTgtnFcqYG5Lxnx6aCSMTuqkO6UkXOu4uzGdQMAdAxK0CVwkhgi5cSPdMyzIrCRIDmCAFYRJmdAZuBcw6NAMy0Liym1eyYu9jN6TFQJCS1SkxzzsFFQGSO7FyV4jAiWSkTaall/uG775y5+93NcthiIHnKee66zkGoNfnotBIgbjZ3JZWSc79q7vptaGIpM7e0vX17OOxFM1YFx81q612YqlSA0Macxul8Wq9Xu9v7kqfYhlXbpgLRdR+/+7GJfWiac05NZGZXK9Q0ljRUtVLnJrYihZmqLkK7DMhqSOTGcd4fx1T1F7/81d/8zX9kaue5kPdSSqkFTKTmVPLt3e3j6XA+5d1mM5fSrcO4f5aUkcLtXfPFmy+G83A+j9Ncu+gOh4+7mzst4bQ/WSWiqvOs1T4Ov//qm5//5i//I1m5WTf/+3/9x8fz/Lu/+y6nPKvqt3Wz23QG61Usc3ncj2X/uFn3Dbc+iA84jklMwdV8LszWdOvzNHBcbVb3p2lSq8TYdajY1VrHedq9vjs+Hzyz965fUexux1RxmDc3d/vhbDk3HBBxqDKWGmIYxiOa3NytywzvP54OT7//1c/evn51Mw7p8XAiwrYNplCBShViJ1UQCUHFwPtYS/ZEiEbkRCowXe6+VZDIFrr7sqQzIVa99NSYWdW63FibKrFbtrcWrYGYOaBle4rwEyooJG0MjYXT4bnzIbLLdTzNs0S869eu34mImdu0a1U8nw50enbE7JrkQi4jN0wVM6R2HS1ahRwjmwi5FgyljITGMdRSqYmLLs+I2MUpjUYQY7ScrBQXwnAelCi2O3Y9ONApobEJmhYpZ9GZWmhoU2tIc+5vbxnGKpMLjfIWMeB0gOEhDT+BSPP2VxXR8kCo7J0a1VSkVu+Cj93Thx+mcfLN+u6bf7bd3eacyTQPz03nCeHw8NxtVuNp2D8mRerXK8ua0+jZ3fZOIWx3cXvjbjc3ndv9/sff3bz+4m67/fj9sy8e0XavQ394jl9s3p2Gzc12t91JmQvV1c0auQWwXI1DH8j1fSrHSUp++uEHoKZb33j150Pyri1lYAnOD3Od2Pem0MSuliESrpvVPCpsAyKBCKABmSnGppnFqqp5abvQmJNC9ZyCxcPxlKtublZDTs75pJJVQ+PUsJzPKD7Ncn/7ejom5+LzMGnR6FelNne3Ow74+HB24szo6XjqDZvt3YdT/fK239323//mNyfLOatBXW3iQDjO+x66r37283m/fzzsq64hdHjTjjPcvNl9+GE2g6oQmhWv78vw1DRb4fDxtFeyJjZ1qOziWEq33jVNX8r44fG5XcW7V/dksV+/MaPhgKv1r822jqZf/Pp2dXOzCvmncu6b1fb2tefydDgj+lr18Phszm7e3PS71c+/vvvpNz8+POx3r3Y/+/WX//7f/8V2d3//y9eXsyCX5SY7l5pKQabgYjEBWmo/vYFWUHbBEaqpiiIiccBFBgwotSJ7MWCkMick7Nqm5MpEMca5TIjogFULEhKQgbJjQIeIbddMoyoYABKamJqUru0lapVSFYtWIFv8/N6RiPoQRASJpVSp1RMriJmqGRE3bes9fv3V23fvH9Z9W2ppmj40rlaxCqrChN55CkSeVzdvT/tz4DgMp+l8OHw4GCoY/eIXf7LfPJ6e3x3ODwbI5BGx5kQI9Wzm28GJc9T1K6mSJOeKjfdTrdWqO0lEndPJNW1/e9/38f1v/pOOqau0ie0kxya2z/PkGmKPz7WuHakVIyOKgbgKtbF9+7p/nIb5cFYA1KqWqxgC9KGbU9WSH08lpxpbbPpmzMdSANnHbgWaT6eB2Xm3dPfqZrUepynXyftA3tWqm9V2zHkcT23XrFdrkdp2XS5VzUOWbnVjwrmMc94zSZ5d07TgoBgTmiFpSUjgHZtWBiyqFWS32pBDgMoEKWUkUrtUvDOA856boCJSSzUhxl1/g05KlnmcgqPW8U3bksPhMJRVWa+2ava83+esec4ixs7plKGUu+3N8XjWKp6dVak5g+o4j0yOPS6hjOfDoYnBE4fgtJYYm/E4cqCuawy0lpyrGLJvo/NXFIEvqhu1C2VCcNkNBkN7oWSuXoel1JjUBOBS2AREV7kPGOgFDF9poqsIQy/bm0tw9fL8q67nk7/j6v26gAZ8MZC9wJsL9kHA5Rg+aZHgusV/cRHhFRpdEJfZ5/hpkR695MtcfnpDXfZj7RommufpsH9+eHx6PBw+PO3HVMg5yxkMc8lN9If96f713d3bVx8fHk/7dBpOa5Io8POv7s9W9s8HMYuB+76ZTmMVXa/7N6+/vNmEn96/+zhNfcud3/3sy190XchT3j+8m8jYsRQzg24Td+v+PBzSaXpz87UPq0rmo6FgqtKSHZ6fIWvXBnDsG19rqeaCj4x8Kmd2YR6neRYFnHJxRCG42Da+MVWdxqmUMsyogQGppoK+cdERUGibw3FKQ7678d6M0KkAIh5Ph2XbHYBTRispMjpg7yjPU2zam1XTxn4YJ4aETNE3NefjMHvnItntZiWQiFEDKNspzWlMgRx7h47W6/XxOFZDNWNANd3d7drVOja+CZGY7WJbvDrCiD5JJy4DeNWyXYKEEQz1qqm46Iw+idE+Y4vsUkv+iTT9nAj4TClhZgpG+E+ESXYVy72g8Es2+wW5v6DpTzO8OHbgGghsnz2+gGxQlVLmAqm53R3fPf/l//Jnk81hs/3djx+ej0dDULOSxdCAwEwdsWMPqqVk5z2iOqK+iWWeTnMy16QKU5Euq1ZZtQEpurieDt/df/nllBM2bcYWuJlKxoiCWuaqaF13Vwo4TG0MVaTv2mJzSrq72ZZSpNrtNnrH06kCuNY7bGnISgq5JmXw6MEkBg8zeqSubXyIznloWkNSM3LBNwgcUMv65ibGUFImH0LbzePZecfqQuMkw5Tm2PTTdA7orVZRwCLsvIIjxWpJQNd9r0C5ioj50JOLBEqENze3MThVckSAVEr2zicdV7udYa2mDpDYLdaTqw5n0VS+sC94nbdP5OJl+Vl4EHyR9CzM9cILXRfQ6+f7B4N1eQpehmWRRep1EcPLQXyu4vzEQP0h1/NptF70SkuC+nUdBPysROA6cS/f//7j/uHD4+Pjw29/9+7p+WmcTynlXEuVUmqRkk0VCQlwOTq9Tve1Wf6anH11bH46nwzUlnRiQDMVu+YyLd1nIGKIUEGNyBEhcK3WBWyCu91udutV36/62HoiWnqrwKGiVBUSQxRRZN80rdUKVGlJP+4636190zrfIDlCB4YmWktVMUJGBjVShapWxTSJzpNOxzoPIACKl9wlFDOoNVfRkqVkG84lZatFTA1NSU0UKmBSregseCsEJhgURU0BVJBRDbJqLpZUS1EwZFRih84LoBMFASF0BIyXEjSQWpcmL1zMesbsxIiRjZwRGyyJOYJIagqiCmZLtj0qEtKiT1zoN7FFRiTVSoVctVQzEQVchDxVrVRLVUuFMWsVmKsV4KSGITpoG24QWQBV1QGZ1nw8q8IXf/SL/+b//N/++k/+9HQ6O23/LA9l2KOMRNQ0Tbfe+L5FNEfoCZxj55wP3oUAzplhqbWm2ZFDT+iYCIGdI+fcIj40JGRioGWYvREgIRISmBYFIjRg5wAEEVQEzKoqmAoJIi59eSIitS40JjMhoQI4F9gx2Kfz4EIVtV0jalWEkJbTiRBRFZbUCgRbroeAjhEQVdlMEczIoZljQwAxAQPVJfPMHDIYqUHb9I0HwVlAkEhqBaresRZi5OBdKZk9eib0LQVnaBS73/zmbz8+PH31+j62EVwI0bOjmo2QAbhIRgJysdaMqmOaGGi12zn2RlRrUQOndPjwkDKs16/Ycy1FjcZUSpH17j5GGsYhOHezXQMZskPXEzcEVNnffPllOg2n03FMs1YhREVk9nkaQtOxp5SOVbIUAW5NrW27w9OD78wV0Tlvdv3/9rd/PWn++S//5HAaTj/+ziEL6jBP3kXNNfiw6rbfv39HIVSoDqWiYdQQWyv0/PDuH+kUwG223K9ZBLxz59O5s855qgDrVds2zXd//10I8HQY73/+djocD787xEC9s5///E3o+Le/e6xztlJQy23futW6rOFx/3Tcn05yavu4WbfRd1lGNd1s25RKVcoZhtN0/+qVWDKDwzCsXdOsu9PhrKWmeY5tnE6npu3meQpxPafy5lW7P51X6yafwYrdbLs8TGOaNze7Kc05qdXn292ujN3hcPqbv//xqy9363VsUswprxvfODoPuRTOpoiGwGNOoFBK9c6pCioQATm3LKAXCplQREEFiZFQVQzR0aW2saosy4GCERBeNoEVF2caIoEtQUeLHGE5C1SXBklCJY4ewVaxXeFqyKfifFWckyqkSHbTBjWJsbWUxzwbc05FUYpNTbOtIpZGdgGslHz2zSsmglo4OnSoQORbMTSVEOOUy3g+hNi1/RrrEZlrSVYrWWD0zpFqDT6imtVjGR/IZu99SlPoAhG4wCAT4FjnMYQbXt8WUVdnm/bp9NTsvoT2hgBUMntg50QCOedhkPP+8fe/y8dT2/ab11/Wqml4noezDaltV9U3c8rFrZuu6UNIUsKsogDnrNx7YKfSbbvVyjvId1+8HlI9v2mb26/B4eu3vvV4OjyWcWysNJbzacb1boaMLI6beZi9mwGdjzeCcDy+N6oU3HQ6PTyNqM1265F9BXTrZnP7+sOP3wFR022U+pqyIq66po5DylPc7HxoW0tAMqepzFQ0EIX21Zd8d78/PsbQlfx4ztP25o6QzvO5mlQjF2MBacK9jEedAbNfrVpDTHooSZAq6FGy+W7LPYXUT6c0z+obNx5HDn51e2vHUtiv77tTOvzFb/8aHehsPjjPbrPZrILDAZ9/eKBWVhTub78AwufhfH6Y3rSrbRuPUwlx9arzFeJxqNPTvL5dt6GZh1kOqX97KxsaDtPNr76xBL/5zY+7m800K2p5GI7ebV6//qJfvwouWql93+V8Oj693zY3x6fxV3/8Lw55PD48R8avXr9+evjAbfPmm3uo7stvfp7n8ru/2d+tvhZXfvxu/8PDX3Yh/PTd908/+ev9lLJzzrlJsgnUWhNNCKqqji9nj4owEiGlPIExETESGUjOWsWHMOZz45vGNVKLaDapqrWKKSoghhimKWE1UwvBi9VVszkPxzLNaNaEUMXSPLexaWIvNaMtaXy0XndTKZ5brSJVmJ1j8sy11lpTDJGYSkre+SK15Np1TVx3Tx+nX/3RH314//CLr7/omxBXLSpZFecYmHLOTKSoUoXQ+lWz2XSnaXM6bVe3xzKNxyEJefbw9uev/3//3//nVOaY3HbbnI/nJuBUZgI/Z0W0ogZgXeP6xq1iczrvVY3KKA7WN+3uy+3jw9OHp8cyHiKwAyzzvO68Se2Jkgdu3JSNmSNYH31BVfIhth9PRzVDprYNClYKsGt9CFKy905F5iJFQdhBlo8PH60IUkCR4fAcmtA2nQDebHZPTw/R8TSfuzYq4jglQDJ2s2TvcRxNpKgKuNg0Td5/rBUIl882NB3XMquWNB+U+yZuoeLpfGibMElWU4CqIuw8EQNSBWQTEBGFru9yWZTQknKSKp4ZgCNzMa5VhrmoHd6+vTEyqCAmpRbyFii66MlsGs7r283rtzePj2cxqaU2fdus2mkeY4hMU5kGVA3tapwfYvClFB8aE0k5u+BTSeyo6TpR1Vwcu7bvq1QCBUStkKaRyAFALdfqqCUiY2mAvZjDLnc3CEjkXnrAlx1uVSVeQosILw6Nl8gMXWSK1zBruML2iwthueVFoEtCywKJl5d+2TO1T8zQJ8vbJ1/DCy/wSUkCV4JJVZc7Zrxuc19jiq5aqIX7uhqxDfRzoA+faKOlUwJVJac8DMPpeHx6enr4+DCcT1VqScLOheBLyYb05TdfT2p/8+6jpSxFCepuFzXLdz+9AxUmItNcVKB8cd93fXz7+s3Dhw/vzsPtpu92rza3t6fh9P6nH+Y09RyagG3vb3ernHUuif36+fnRNL26uZcyl5rWd1+gX0+HA2M1yV/sdiKWy0yOYuNyxZRNZK6MgQGs3N6ErWEVGEYFBUERIAUAVGRrQlg7/zxnJhonm/M5OBJUMSVm51zOuQkx1zoXZWYtmRAdO89uzmKASW3VeQCIbet8+zylH/ePgQhViCDlWatGcqL5rGJJ2cGuXXVtp6Ai02YV5rmSd+BcFbl7s1Ww83EKXbPermPj2XOMEYFUqzfPxFoLO39Bw2BL4cxV9kPLgLzoiOwCnC9ijk/T/BnENlVA/kP8jqbX0wOv4391BS0mmiUKYAHuTKj6h695pR6vkP8PMmyuT70mHi0WkcXvCMu+t6ma1Fqm6bu/+4f/5a//QzYf160lXcf404efBHTJFlZYoCAAArLz3lcpsWtKUgQg5lyLKubZftyfVz6+f9hv+zcMrhZh75Caea5qEBocSubwNXJLKmzcr/x0Gk3K6XxCt4pt0ymehqwmt7vN8P7hrguH4ySGihGRm6arkLGe2xhnycg++r7UOYK0q8ZpoyJt28YmEjsFIse+CToWJo6Oganxa5EQg4thVYHZ+dWqT9PoHHt2GaWa3uzunx++b1wbgjMMhtp0QR2lw1HK2Pf3fdenCvN0aH3Ybe7mUlUSk93e7sbhHEOzWsdhOjXYheBKnpCBkckYzEwKsANTNLt0DVtdGBpEtgtbpFc542dEC35O1FzdYS+qn+v/L+ulXRVDnxFHL2lEn02R2acXoBf95WXBxE9M1aehu8jl9EIhXQRIS5DNlcX8Q+sZvjBMf/XXf/Xx3dN5PE8pVS0+hHmeDKSWpJdOJ7Alb91A9IUqspdXWpLnVAHNgAg+EVaKRoCyMKOi+hnFhQCmtVQARSTva9VkRZjHRAj5w/GMaM6xGUW2NgTPwm6o3ndddKEh5xZbhVgl0goFcEnCajg0TI6kEExsHoBVlS/XA4fMUDLUoklKmup01vmMNWMVBAIjAxPJAEmBpIpUnOc8zVYERcETB4eN46I2FJkMSzExUmUjJGBD8GwA6MxQTE1BQLRWkbMVnCozsS/kZwNofAQRRWaUwMpQyRRUFksqEgIQGSljAFJAARI1WvLiRYHQlkK35SKtskTXmiiwUzMttZZSlhRopWIuC1dlqKhiYqGIiroqIsJzqbNgESsAFRGJEByTi74B4LxY1YpKLQzgWIPDJjiqcxB58+qm7/t1H8bjSIguBgwBwtIDI+yI0AjF+QaZOEYgV3FJElcgMwLvAiI757z3YIKEZigiCMAXP52ZmamhIV5uZqiKEGNooplKLaKqYoCgprVKVRlTOg+TlBJjJHYG1977C6VzOQuuVFHbyrKxBQqAUuWzc2VpqyRCVjVCWgg7WBzrUhFBTcGUwBGiqqmimTIiGooqAxIaAkXfEbNqRVjOi4oKqoWJGDk4VzQDGXF4ePfx+9///mazvXt17wIDAAHNc661mmEt1TG2TTuXCmoGGGPjfYyxuZSxIK3XXQjh6fG8Xt04r6fnfb9aixTXtN122692Tx9+yHN69eVrDOBjG8ghRY6NR1ZDqOW0P642K2Rc/Hh+FaTmknNoWrNaS9GaEfl0ONUy63T+8dt/cL1LJc2nqelv/+SP/8v37z7+/T/83d3tvQeWVLzDJjbMvqTSb2++/eEnKNq0Np5Onv//XP1Jk5xZliWI3eEN36CDDYDB4e7h4ZFREZk1dVWRrGoKlxRpilCEC+4o5J/ihisuuOaG3FOEwgWlqoXdzS52ZWZlZuTgEeHucAdgk07f8IZ7LxefqgHZgLvAYGqm+qnhvve9c+455wpiiCGc0n3jmjzg9ByhcSc8Nc1GkVzL6MBA2i4+Px0eHw6+Wb/59ps8TMM4XL1ar29amQTr6Xh/HzGkMX/11SuoYAWczN//8Q/qPCI7wrevX41TKkWOx5mgCk/IGjk0TSvo1LDUvN/vu9iIgWpTZwRvjnwtZRzm29tbKzrNCSgMo+52ira/3gSb5+h8lhIjhYF1quP+1LVtzalkTYexX0Wwfnec3r177lrfrRpEryKGBEyOEMDnUyGA6H2ulQAXH+NS2ia6JOpdxKzAS6Qogpnxsl+LCRog0dK5QF7UB6qCYEhkqmKVmRFMVZn5ku8AACBKT89jE7gInKptxG278HS433arh8cn7zeNio4DxkLcIxR1lObcWtBi7ep2mLOSI9cDr4A78J1KLimXcggtA7kQ22IzMIgJMmIlI8jplObabG4ARXJyjlMayXvnW8Bzm847xJpUTgbiQmtInglEoUxEnUoyLYwMwFKzh4TzQ50OvtmE199WI6iz1VkhgA8CTAqQT+P9u9PDB3Akczn++Ac28I1b3910d69M3azRgm02raX08+9+ztPcdisDBUeBvKXabbXfdsxcirR9k9L45vbu9vV299OPq8YR2DwMhul6u17Ftl29Gh6G7U2PdU77w6pfe8+GlMukOkNJUhMQoHMYGtPm/buPTd921z0VutluHn4S00nFs1sHtWmeLEQTATGstTGUaZ9Oz+Qw5eJD1622Q2hn9AXKdPj4dPzhavOrYT9ZTV/+yZun6enw8JFcPxPOQ7rtWxYMYZ2HxJsIQfbTvFo3DpXBz2ncNG9++8/+Z3/5n/8DxwatoklQt8EtxgqKPGd2DV/fvPvxnfPrTc+taTykrmcpnlzrXLgKm6fhdJJpPuUyw8P0lKY5MI859ZtuGOoq+Ju3X8xSPHn03N12dZj6tr++uUv3Gptwc3unea6HU9O9uQ5XX/zyn3716z+bhkLMkTCPBa9ev7n9OtcU3IAmcMgrpHk3FBIy0lGb9opd+/Pvh+f748cHPrwN//A3f+2suIBV83q7nctpWQXMbk5FdTlhm3euijCD97GUmmpl70QEVL1jx47J51KZWEGqiqGpUgytIzdMR0JGpClVZiaSXLKpdQVfgQABAABJREFUiSqzY8LFkG8KQx6WM0PNVqUiYXABkUWKqhmj97FOEymxAVi1WtquL1UAMZVcVZlIzTx6ddU7XwdzkRTRRLtVC727+mLT9G3T9KZoYlWEnPPedd6BnbsbkgsyKZBn37dd27rDI6QpbwMXLfsPH16vNjnEGINq7VdtrbnkFJv45vr1PI3KViTBPLFYHQ5R1Yfgm/ZUUphL+v7nNtd8GGIF55Gq1pKl84TYxNh0bqwIBAIIxC4EERlTOZXqGHMpTWgi8ZyyR2JET0zOpzwxc4uUVUeRVDMZIEGMRIguOHZuHpMZ7MvcxNiFEL0rmkzNE5CAiz4dj967pok+BNc0+8MxVOhig+iR/DLlqvE+Nu04FKs114E8E3ckCkrLgHZipsA1i6kS2zgdmuhjaKqoo2aYnkUKe+6aftbZVIm461qpZbVu97sJwcbTGGJ0DoNz3ao97g9atWvbWuWbt2/fP3348uvX41T3hxMihsA1FRPLRYygaCk5G4L3HsAInSl45lOdWued8875MY3bVV+FEEFE+65/fHpomgDOoSkgNU1Hl5750iBfYAAhGixTXS+pdxdN0DLDHuzs7jHQ5WvAzpOgXrrIn4benO9NaBcGasE453lnL9D58uALjrHPrgzgZXrZAtSXR/CTQwhQF0y/IKIXggpxybI5Q6gLbn8hl8600TnG4yxNOqN4RAMTUamSat2fho+Pj4/Pz6kkci56CH4Z6VYV+e7q9v3joyO/DHEfSjocpwAarbakkeOq85oO26v47S9eMwoWGZ+ftk0k39xur46nw49/+N4DodY+xlXbetZSy/3Dgwu+7zfr/mYkVc2xacqYqtQyJ6u56brgIA/HcXdKZfbBiUoqoFVjCIROwYoWU1gGWpOVPnrHPOVaqk255JxLrmbOMTWe+yZOTg9zPU0VENXUewYtJeFOSsklG3lvaKq1huBr1ZSr1cKOXJJimuckOAQXukieWLM1rV934XA4Rg7g3JiyGZ2mOs2DwZGtNoH7vmV0Vq3pV56cTLVdNbgCbuPVzTaElrghigiqJmpiCsHHRZ3wmVkQLzIPvNQswIWDQbyMpDnDabiQlZ+B7LNv6AXdngvYXgRGyxizT/Vpy1N95mTDC/uJZ9vmgqXxBZ8vrqRLrAxe9BZ2ERshLFkpgCAiJYtKrVpd0Hk4PB6HcTg5T3/7h+9OKVWpKecqVVXPRBVS9KEsMxaMA1OR4h1r1SyViH68//C67w+zP82T26znufS+MeKu2UxjWjnuIRx3z+vNbT59IPKpVB+cc1GKPTw/dc0qtq2Re96dRMqr6+1+2PWrflbcK1w7Zwohdrkk8n7Vc60yziWGLtBEIKvepVzBoSIuIXRisOo3poJgBLVbdaIVk4SmMfAmhoRN7NUUi6sivEQtEMW2B0GmwCGSp3nct12LMoQYgDoAspq85831Va0ZRY/HuW87AYldC8IVgF2UOpu27DhPO6QN6eD8nWplElAAAtF6ZsFNERnMluAkOM9JhAt5cyF9PtttXhQ1/4g+xM8qDS51e36uc7TE8t+njY5enueTLumiV3sRwOHlk5eHL4K5pcpe5GqffTsQwD9eAXD/9HA4HqtpbFiFppRrSfM0qQgudKTqYnBYsizOW7raC0kEcL5eQFS1i9Rq+YRpFUAkhEUUsvyUFlh9nteMqCpiOiuyaq5yGODj7vj3ke4+PL6+Wd+smttV1wfoA277ZltX2Z1CIM/BMYOIAsGSvAdIiAyCKqhkmkUE0JkRkQNklawiZUplOpVpKvOgZbYyMbpl+KABqYqpqCZRqkVqRilWC+QqgBgcOXZCxrXOIGWSueokkKQIIiMqnAN/0Tn2ULSioFUGEhEYq+yGzAiejMEgqhAoQiADErLKCKCy5CThwmAQokIVJQBdMklMsQoggJGKADo1NammVc0QkQWRnQHWVEpKeU6l1GqQhSuEasu0CixVRbFWKxWLgBqKmZhlhWqI4IKLGvoCDMaqWItoKSgFpHSrWJ8//qf/x//t6Te/hWN9un/0D+97sLJMKFMFsyqKoi2TVV08WEiO2DMHjg2CGXshJMdtiI6cgjGBgTjPeuY6QW0ZOGaMLCKmKlVqybUqAJ7TqdkTMRJizaVqSVpqmXOd5nnOWbS64NgxMwMwEisoABGC8xfn2fJHv+pFDcxUqqqpN3ux+ZiZCp8jtZCRVAUMAElNCT0YiKrhmVY1MUAyFQAFAFYiJAYlJGRvVkL0JgpazRDAl2p925liFakyNmFd0/zjD3+IyF//4iuBUlXa0NWqJRWptW0akUl1daopdi2F6Ml7cjGQDwaqIQZGLDkfh6fQ+NN4Ctnf3L1WM9fG1Xpdkv744+89cL9ap5zZfFytiUmNgL0HKtPIDOzgdNqZ1vkwSpW1XU9TTmOSmkODw/EUfAhdyHV0DFPdrW7Xj0/Hknkc4PnwU+hbH2m7alKautVqmMYqamIpJ08BKucJurBlENFUFWsyslznA6yvbrpfRm7a0DYhzIfpVA/p1HAMV6/bcTesr19L2qec+qsop+SgyJSHw4RN7133Z//q+u/+4q8Pj6dZ96++uHs4PF01/qtf/qKqHYZxPM21zm3bXG3iPE4IFNvmlE6omPIE6PqG00ylquiMzF3XSNbjPK63fZ7c4TQMw2mz3oz1/XzMZrje+N1pF9rV7VU7JZvnxGnuIk+pRiI1dY3z2Pz804emb5Fsc9Ued8PTU1LwxNK1UXINPoxzEoR+s5pOk9bqmJaD6cWiggBcanVMArLEri3cJyB655admphVdRlwrGpmBRCIX0YOIwCrARkue7qoEhi5s56iifH4eHBthyqp6ng6rrcN6YTVB8BxPMXtWsGoo92wy5o31M6let80/dVpPq6u13VQs4jgHRMgiAowKho4AtcohJon9ivzKyYnWZBBSgU1F4OZMjEaaMo+9Nw2UgtxAEk1HQMVq8UHr5DzvHPudRHP/bUDb/khnXbO966PyID7J9l9tJLp6gvobkCAZNKagRy4YEoEReqQTs9m6szX09hu1uvXt3R17W6/FPRlSmKwdjbu3h/f/+ChvH59o4r7/XOkGKMjB0rQrz1yrODj+rpbrw73H47v/iECl7nG4ETL5mblJOJU+jXcP93Hb16LgwBsoNyscjECbRoXyT3d3x/302FMmSFGavpgkkTjeJQ01girMgwc2eoHLNopl+Op1JGZPRYnxzQcSI25qbWSj5k0Sx2e7m0+Nk1o3AZK/mpzHWjz84en7XbjMAyDISogtG+uxcHd5jfb1v/Hf/hP3dVVLSMzTtMhOpZsOk8/Pb/jvhOYPt5/6H2wLPm4610buP/my29/uN8J8ubmq5wtRJf2x8Y1DlaOaDyVb77+03w66ihtfz23RTPu98V57l69Kj/ez4/5ZrOiWsb91G3XnMnm7Fz44u23N6+/PX6cX9++Cd3qGzQtp/1h+vabr+Zx13SvbDYUqykH52AEaanp16t4Te4wTsPVNZ2eH5SNzb74+tXzUz7uT3/53/8NaPz6V2++f/fDX/79X3759joP6sQOx9HHN/thejkbee+dIywKBE3Tz/NIZrVWRmpcQHJJimgBRUMrWqpIFWCyuCQaAGipMxRCNjXnnaix833oxmkoRdKUmpa890gIiMG1qRybJqjqNM2t6xHRsKoKMRBQKdUAmhjRlIBEKxLlUoEwldx4z2RgVmsBRFOzKo0PonU6DdT1pO75dPr2T/7s9uY2jUNs2MdQl/sSwAJdGMhAfRtzkfk0iKqUgigo3HddWEWV8XR87NuGVNb9+ng8CkHTBeUwT4dyql9e3T7uD1sGLGKFGYAdaRnK6fm6X2suajofZ1ZgDr1vUzlFHzm4VOahTo3eis6eGdDNAJgLMxpJJswKs4rWElxALaUWQyKnBBiZO++YLBc9Pg0G4LvYBpfmIiKgnOYZTUutsW0FVUHuXt887Xcmekh5HhOHSOi0KEA1cM6turViLT40YFgkE7i235oM7BuxoyM0qdPwHGOJoQUDqZUIECIBFxh9CEiUqxi6YZwJ0eQEKERgZrmkEN2UypSKijjCV7dbvObjcTjsxvWGiPGYDm17e/36NosEH+bp+LB7fvvFm2meXt1clWR1rpoUQVU0TXm16na7fUU97A/Re278PE/srG271rZSjcycC1yg694cDjsFLVqmKrFdOY+i1YXIwdU55XyeA4h4dliY6gVpL6W9NIjPA4AQCfElwwgvYOXMxdh5osJLUvUZKl9s/ucevJ0hOJ6PfgafMqUXKGP4eeDLC+/z2ScQwFRf0L7a5ckXq8eCq2w5Q3+CLede/Odt+Atr8PJe7Bzvt8hUzMxMTKdxen56fnh6PpxOc8o5Fe9d1/fDMGJov3n7p8/3T9e3199+89Xf/ue/OhzTfphcE8SQmV/dba/6q9P9+2/f3sQ2pppVNIhQLtREVPf48DBOowEYoVFlctOsmbSJse865rBZd3n+UKqAC1VRQZENYdaSxtM8zNXAWteuIsfgpmmqgE2Iy6EhggNLRc2TL1VAZDEBMpCgoWFoerF5GvKpTqOgaGbvZ1FR8IGA3DCn1hM5Lwa5ADCLABuYoS7HaBGpAoCzVae66nsg3a5XN13z/sNHWrcq8v5xH2LIxWRODtHAtm1LyOOcwAiYd6fJIWWpfRHHbrvp57mgd12/8rEPTUtNQ8yEwS8NYABRu2gs7Dwi/lwn5wIgZLhwPi//2mf65uyW+ceKDMSlF3epyE9moaUs9UJ0XjRteFE0LevkgtMvPiOFixnnPL+KPtFFZ8EcnINF4DxbcFlv5/oGNZFletd+f7p/2t8/3Qv60IaffvqpQAVCWMYVLTBeVcRC07BztWZybtEnqVrgAFa1FFAZT8P3736623x9f9i5xkXCzq0NWhf7uRCeZLXt9vfHbn2zf/quW1+LZDPx3LLidr3e7fdtt45XNyGDZEGom3WXkgL6Uy23zbbp8jQfnGPF6hw13pFCLoquFy2bpm16Cz7WgoExBA+xZSLvHQJoFeeZKYKL5FiBYmTt1wEg54mQRAQdm1R2db3qx93Jed823STz1jEoKPir7eshJ/bepil2bdj0VDxWI1Y2JqIQHOhCVVBwAKoUnFpxPkgerZ6MOylMFEFelPlsRmcqzxSQX2KIPidfPqsoPN9rX4SRnz/4shv9Y00QXiy3nzasFznbRTJ3mQ7wknF9kdGd9XNwUWEuOyGC2cUIhnbe2uHT/MiX4r68jSyyuloxuZSn0/H4/Pw4pklNwKRKXZj55XvOtKbaeYs30POaufxUzgzZi20ZzIxe1o7p4iw+L0uiZco7ES1LTKSqYjKsRVQrM37YjfSH+z7gbR+vWr5ZxbtNd73d3Kz71vs+xKaJiMSeYhNiUO8VpUBBPMfNE7KpFkNToipWapnHWcdJpoPkZDURVER3DoNaJH2qooJKICxZpZJWk1odKDBHz8w4qaWsu4Tv9mksUk3FAMyYnSFFH2LsCQP7gMwesZZZ0lizlVSnKvtJGsoBGUqNjsCzoAiqA/HEuORsI5AZEZM5MSVjAVcNGRDVUGQJ3FFVW8pIxURqzUjEBOS8ikkppeRSSlWtqspOOSg6W7RLSlVRDHSZvWYV0MS0isuGYgzoDaB1bp6K1QJQQQpadQ40l/x8/PnP/3r/h5+4Qi4z5pFwUTJYyZLGqW/YQA0cMAIgu4jkOAQKXlljaIQ8kPfeo0dPaIBV7RLvhYSMqGRABAtdqVZrBRGTagAotZSaBcnEIQKg5ippKmmu0zjPNdeazTDG4J0PwSGRgDlHjsi7IKp0icq6UEVdV9VMdVH7LPcSPW+rn9R0uDhU1ZbdXWXR8ICeZwHqRSK6cKkCBiJqAI6MDRG05hOhqpqA1JrYe3IEhOy45LRaXdWSjrtnz/KLX3zFzplY8B2yAy3EzK7xMVCSNqyfh4frfsVIVs0RSRbn1AWOXTztjqfdrlm3wD5E3lzdLHqZrtucxvH0sE+zbL+469cBgeLq2sCLADFrhZoTGjjvD/tHrRz7MEuRWuE4DFOWMQ/HcfMq1JrLfHp6LmqUpnGeT+xZQBTZNVRGt7vfhy5UteM011m6vk0lLdGfzvHHh/djrc756XmOHtabDqwGDtBA17THwz6nSSCBA4Y5Ng0yq9Vh5O3VqyoGKjnn/eMekdqrRjXdfnl12O13Hx5bT2//5BftKpz2I4bVP/kX3+5//mEYxte317453X1FZvD994/TOELJFJvWNWvEWgSVFKSqucAuBpByGmY0XPqmaZo8x816/fTwWObxetsPSg/P4+b1+ljo43EwhK5p130nKa365jA+K7jYxcf9Q9utrm9uStXn551xcU2oJk+7IxKehkqOQ/QhggKUIsDOeZ1TYgZVY2Y1WQK3/KIqYmcmqsBMy6EilQpgiExkgFBFmfGcwWBGfPHkmy5zBREIzJiWFuinVttwGg1tdzxWb77pHobT11+sv7j7YndMKOiIxzJ3sR/mHNuo0jB1FP1sYNhw7/fHct3fprl6Z2ZacmZkQ+TYKHty0YgdegDSlA1HsinPIzt0ITKRlqq1IojvVsQBDAidiaEYOkbnrI5IFdVcE9FfM0QAAUhgJ6RKzRrZy/GJ8kFkov7KXb2tddaUVLL3HsghseViMkCZ2en6tiXzsGr625vm9rbEq6mSFOnawJrr8UPZfZBx8K71LuZcX79+A8jEDlA4+sAujVoxcNNJlqf7d5s3fbtqj4dxGPdo2jXN/FxOz+M33zSOddx/EJjbpqtSOSCg05KF2pRyrbMnV6rt59zP5itdXW3Mu5TIVd/2ryelnLKrO1SDglqcCxSZ67BL4mstQL5M5puV76+Tp7GemlWTkQ+nwa1et/2mCVeaxrvuLpnz2+316/Dd+/erDu/3O0Q3P//lVequvnq9G56LQp7nw/7D7ea1N396+rj9KkSHz4fDF1+9HU6paSyVNEtu+22WoWsoVbje9u/vn56G8YtXr/tmw40xsMebIydrkfvwNI8hdKpYms327S+GY+7aVxuvbcOg4e3d9vruzarfSvGrZkPO2EW5KdurVZ7yj99//PLLL3uX3r8/vrm7Svu5DLabdl/98sumgaZzylaK1qq14P4peefa9g1AedgP//V//PB33323XW+wWQdu//1//Ivvfv9hvW3neRqfD4Kw3V5//9/+5/H0BOfNHIj4fPAxnNJQa/HOGVJwnokFsCH2/cqkVMsxdlW0lFLTBKi5luBikVpr9aEh54i8Qq1FRgN2TYwoakyUSwEB5zDJBMiGxMF1zLUqGpY8OtdE59k7gWIIBnQaB9+ENnTTMItK8M6TJ2CrdRm1ZiA+tCnn5QQW2qaWLEU0g/ch1+pDY4BKCEwKBkglZSZKs6hZaFlFXQxWS5k0l/q0nx7vH4d0fLq/P+0OsQldGw7PA1SToshOquVZ9mXo1fOQTMrV1TaZhOCQqQoH7bi0cx1KLpGw8U5FLc+esUgd87InRUftBDMRTNPcOkYfFDR66rrmOOZ17EvVXCo5F72zUhelZNM2oQ2H50Pr4qu2HUVPUgp5dVYESLSURACEXKtVUTW7M7hq+zQNvG0GrwUEGBrfDNNcjrlbh66/lmGX54zspvkQm2uP/jQXIFyvtyUPhJimcRr3hafYdt57IpxT9j548q0PRTKpzPMUyEUfcp1rLY7ZVESglsLMTGSI4PjhYR+b9vp6PRwHgdJ0q5zgNE7bLTNR33fiwQCnKRF7UmqbmCTNObZ9c5oyIYIAI0ot7ChrRWHyXgEZfUNNgTmGAKDB03F4jG1TqgDVOSdSrYoE3rSAEpFHqmcsfDn705lPNIDzfQSJAEwB6BzqixfDwVlIQQsfdHGhfdJbfGp006W5fbFS4HlYMiItB6dLSMfnzfZzpixeUMkl9WJBNvg5jn+RZyxZMXZWaLxc2Ms7tEWHAsut8CwhQdPFZXTBNgvyV1tSzEueD/vd8353OAxFIQuSi0i4RAfPY/nd3/w9qm3W8c//4u/qkPM8m9W+6Tbd6qqlKuXDw8M2BER6fNz1m66L8aqPJe+r1GEc85TZub5voc5A1K/6436UojH6dnVNSO/f38fIsd0Ct00A15NCSUM6Hed5Hh6fBmVX5lxK6aPvmtitWnSUS606R2JQIXMI3Pgwm9WSa4FSRRSQQ1XzPrhQG+SiNs0iguv1OqUy5wSigETe1SJMDplU61zEUFdNQEStpY+hEhph9AGkEGIXfWB82O184+ckw2kM0TXeccMq1fuYxOa5zPOMas4ZGjTet4Hbdvs8pCHlUtOrt3fXt9fXb97EZu1845jQTDWxb+CCKi+UJC2Duuk8fIrOkrDLTDQ4a+UWC6R9Vjb/o49ehBef0UR6mU59hsJwUbrBS5INLQKK89nKLllIC0101sMtGGJxdJ6TBS5BL3oJMzJQtaXvp4BgWmutaZ6Ph927H378+f2Hr375i6fT9Ifvfl9BBKCqlSqLLQiZTJUdE1AqGZkcs5QcXBRtalXT2oYYgp+Pp/18nM0+PA1du9q04XiaQtc1m7t0Oj4dju22S5Cv+yvigGqOKYR2SsUZNk0sJQzDHDnGdp3KibF2bQs497z+i/c/vWm6ltlQ0UdDLSmxg3XbWNdMqVSjfSrNZpuzMLrYr7LU1jETxdghmmRl7wHQgIP3tYKLXmLwoDySVm2abi7ZEYsmjrHbgNaaSrJFvY5+tX2Vcl313XgYaoXV9S1wREOQaXt9c9yfVL0BE6OpNetWdc5F2+DZuZoLuVzzg48dcqNWTSphQ+wMyWixjix730Iu62d6InjZvs4sIF2+6sWnBgAvZOHyl4ss6VyU+tlXLrWKL2Il+Fy1ZC900Estfirmy453/n1RtJ0Z1XMRg1423PM1nN/C7e2t1nLYnz7cP77/cH86jaJVazGRM5d+7gEoE4jIOdcC4EXGp4vdc3ndM2cKn2/CCGhGCyKxM81khPgSKmxqWWVJzgakxeMmoqkUBBgmfD5lT9hF3oT91Wq3aeO6bTdt7Lq43azeXK9fbcAA2w7MxKRU1ZrVOccGBARkWjTNaS6apwzTZPNEIIyKl5w+IAARUASg5X5oZgikClIziBEokzeESXQ35Xe7/N1OdrmK1oDqCVlUJBcgLQXBRWpdt8GmQ1C/Ui7jeNwn8HkehjnvIXtCpx4DoZAjVbJAICCMAIjOOanGbEYV2SMQG6mhmlqti7/vXC4EYgaqVpKIABoRoCio1py11qrV0MRUCZVI0Smyai1aFaCoVgAhrAZJtRqpsUHI4E2JkBvADjBpZmfJKlv15IhdmguUKkkMrJpV1JRVFIhR1KTkmlIkVFPm6JiJGJARGdmRc84FIG/AjpyBOecUAAxEQc2qVYfnCCNTVVMwK6VkkVIqqpqo1FyrqFkuS968DVMZxpxmS9PIgWJw3oUYwxK1CEwMAIQhBgR03r2g4zNV1HSdGICqmcjitly6R0uT1vSyoM/RmAuHK8voKAPRM/EPYKoKRqqqWhHQAYhoIGQDU0GMJorop3Qg51SqAapZKQUAitRax+eHH/u2iZGLSONbBldLqSrALnqHDHdffHv//l0XoyNErORstd4AswtBpY7Hk2O6e/sLv+5DIOcbNdMiwPjx+x9NJbarb765O55ORanrrlyzrYrk0MegopITMSYtt1998fx+V0tltlUfD4dHYmckzDyP+zqlPKZcpQrsHk/zlNA3VXV3uO/alrCsVpjmKc+KFXIqTWznSWou5NGvXBp2IUR2WbxmxZSlIavz6BhLnuZp1PXVlE8AedtFCNFg5aEZnsrh4en27kuZjhwbIgCiD9//2LVoK7l685YwHD9+ANTQxlWZH58ffv7xp6/vbl1/9fzxHqMbJwVw/+Zf/Cvv7cP7P3x8Gg6HYdPGJriATCHMUkutJWXV2nedggqCI5ZayZL3TRv7Oc8+S1x1cByO+4MVnOb5MZtu6dXd6pSz1cqq4zA3m67vu1rKqo/HVCw0Ust4OpIL/boZj3l/mA20XzVLhXpHUgRdaIByzaIKS/PfYDGWqVVQIWJ2tJxx1ZYRZ0AAVc57MSEpgtk5uwsI6DLh5dzWuLRVDUzlnE9RiniiuQq5Jo+1qH9/SOsMGFd5v//Vr96M8/NVtzncj6e54LpJ3gcXTikVwzKVu23HsZmOKSI5DElBxawaAgJTMddQAziN4+zc7D3P5ZBFpeZ+vTZTzZkV1GAZUkixy+PAjM5HmYeUM0Ioqfh2y9hAXIlqnUdPAMYcN9S9MqlcDmV6AqvkG4zRpKJWNHShKQWlyJI0lodDTXOZpnbT9a/e8u1r7a4AfBTgUGn6OHz4MR8eQakNK9dvYxv6UgyNna8CtUyhbYC5aRsOK+c6lry+vYs9skxy2u12h+317TgOQMLRIWixncMr9ug8GoT5lJxXYlsC1YigX/frPEEyD6DliAqvum8f07ECgPMUmyqzIyxS5/HY9hvLoFJD147HodTEsRECCGxKY8nslOSEdXIe9qqgImmc7n+6vlt/2I9JqXt95V8BRdJ5rjoVxz/vfmxvvgrOzaeRguv8lqDt12s50K9e//O/ePhvGuox4F4tH46xa37xq29Pz/nx467ZrEqeWr/tyb++Xver7vh8EgnNKjRd9+HjwzzVyHB4/ul/+q/+58MjDge84Tdffu338MevfvurfvX6/Yf0zd0rNVFwnj3mOk+TMNcsBzq1Lf/yt1fzDCGunMLrt69+3H/A6v/sn/zJu4fHVbMWgxAQGZC5jyG1LXs6zPXH+/1f///+ardL4+S38Spr+U9/+f/tA3/5djPMwzTXilqy7B4fQT4llRBhyWXpJjO5Uot3TgFUagZCEO+dSsq68KEJDaTOLkTfBABm9EwECgQOwOZ5xKbXBXoWdcSOnKmwC2o1OOcdlZzQuVoqmzeDUnMMbWw3eUrqTGsGxK7ZDuMzEJW5MEAtmckLCSNXLaBacnEUPDMjEplIjU0jIiWXftWD9wbmI4OqVvXkhaVK9eTAiYnLUuc0lf04H8dSppTn0+64OxyGYXp+ejoe93NJAFCmHJlvrq8fd88BTFQQjRuXBH/cH+7WK6r4nCYwLKIISOxSVc1DTidSaKI7Ds+rZjUOlQkoIDKfVETt6bjrIrgq0UETwEjHNK3bTeR2IvGOHWHKqYKZD4A2TTkQpJzJ0fXtpqb05WY1jGk3G6JhDCnQlAsvhw9DJgQiEX0+nX719et5cg/PpyhQci1ZmbPzzgymcSA0z03RGkMb4hWYFDk574AIwaZpCDGazWCKnqd5cOwAnZkhkUqe5tkAiFyu2q+bEOJURmZ23udSqlQQIzBgV+aKym3bDMdDcJ0DE5H9MH715nWRlFN2zu8Pu69fX6lZaLQkA5DXb24fnk5qqEhZTFRkLgYw59mHiEaqSoQppyI1NO3xeGAnq+sgKee5tE0jZCYy5aFv2iK27jYlJ2TwnuESzEKEl9iWCzJZMlgvuAPsQg6Z0QLUl7BpokuLGvCT6+dMBF3gBy4h1i/+GjgH9gKCfiYfeiF84IKdP7uCS4jAS0D2GX68xCKdVUEXNH/WCZ2ZrZe8oxcktbBIy/fQIm+ysyvphe0S0ZLLOE3DcJrSmEp2oaEkYHnMaZjG4GMTg4uGwlBSTjN4LIUB5Gq1aZru4+P93c367evt2tvw/HiziUR0Oh6fH94FT2ru6vqLts0PTw/P7x9b7wPRcNpvun69aZur2zkXSMdNtyUHq75PplKLFhmGEyj217fUrr9co5Zaxuk0Dh/uH4+ztUle3axjCARKoF1oksBUKmhWK86xKSNwKrWWPItI1Vxr8D6oVHJjkiJD50NwCy6q82wExlgUTdRcCHMtc0WPFUGnUkDNkzOwEFzX+dZD0/j1xkUOT6cc+gBKr9braqlxvgluNwwHkrZpTseUSyojiGDwFIa8uV7FtWu6rhq66IkJ0dVszIqEzns1YXAX3Gz0UiP26Q9TAUJ4kRrBIq1YDvAIF2UcXCxfZ4C6QOeF2rEXScSlb/xpwJq9OIEIlkBIuwzDWgR3gMgGZ2vjRTeE5xekJScSPuFpALNlYonRWVmyCFtLlZLLuNs9jvMOWT5+uP/x/qGCGKpUm+ZUa5FayPGyZrx33vlUkg/eOW8iQIykzEGhLvEHBWyX0n/zu+//7W9/u9kN21V72B2vXQjNmqXkYedEQIZ53jVxM04zRA0NgXDOOUTX92Ecpjzs1tu3MmdB6Dftbtg3Tl712/1hFzcxNq2WGdmtujimh67tEdC7ZswZmLrGA1TnMEQGT2TFrPjoa8nRhyaEImiQQ4zsFB1hMEfON8HMLXM/TRHV2radFF2wmjOh1jl7Ftf20zCaAjERC6EiUQy9SWl8yIHRXGhayWOV5JsmzU4koRIR1pqa2JPNrHuwXrUlA8ACYOjcZaLXJRoaED7FVL3IhvCyw5x5aQA6a4Veos5ftrrLPvW5SGfhTs6FfCa74dOu9aJCsk/j+C7yoReq+x9tvpet1S675gsgOEuTPhHwAAAQ2+b5/ng47R6enofTJGoqqnrhqvCsmThTTHQx0X3OW8H5/ZwVQ3Ym9pfyX8KZz9PjDAGRiJAIP2s26NLnVjVbwuJkWW6LSMoMxIAMx4pKfDxKV2qX5luFO/Zb7533zBhbT86QkfCc3KMiohnJGZjUIjlrKZImrJmXVCEiYl4WrdSKJI4ZBAlZhFRqRZm1zLXkokAoVYrgoegPu/mHfXrMygwxhIjQMYZapdbZTKFIHQX69uauv3pFZKXOMO2QIkEUYy3HscyHqXgwKaiBvZPGsYIGRCFAdmpEvHROCA0FWMgJetFF/KQLI6MqgCJAIIK1GJioMCMamogW0SpaRUUMUNSAGH3EGg3FsArAQsEroFIQosyQBWflbM57121cRIPYyKS7eeSkrGxk1bTxzgBESjVTgimXiqDnQF0AU63ZPBMGJAA2IEJySB7QAbABIzITGRjosrKAkBQFgNiYzrMMzDuXi1ZRU5NcVQwVRGotWQFrVUGromnOh+Mw1xp82226ELx3jhiIUEyRmT0zMSA4x4wERPg/UhUFH+RsoTRRPY8PN1lG0SyrAAEBXoZZgorw5U4gdjlOmZlWMzQ1EV5KHJkWNxEqIjVVBUhjXKsgWCX2SlBzaWJI4zCf9g5r3y62Okb0VVTAmnYVnA/s0jyO43F11cauY8cO2QG54DnGZaBrcNGHzrmems5MUpJhv0ctcdP0q7ZbrQ1xGPer69XmeuvjajlZSU3lNJmp1VoNapqiJ0ad57mmcXeY53FoulZyOY7T+qbTUrSMq1W3e54Ite/W7z4enae79avnw+N0Gn3HTQyn06mJAagVq23Dk5GIpjFpLtE7oBJatmJmOBxn1wXjus+7vu9BUhN9HoaH41PTNpvyatVuXNiYCmHd3r1KJU/T2Pft17/86un9u3k4NuOm6YJ7czuPh76P9GqLPz6srpCxKIdfvv3nZTg8fPz48PH+bw7vTsP+7d23X7y6S30dn/dpmpwj9uzJiAGID6c82ASGTVw7s5ILeVDC1XajRzrsxi7G63W/251WnlZxNZzqONbDYexX65IKOp+EkEIIgS2XMm+vVu/vDzkVUBzHLAZMLng01XF/8DGQ98zL9HorqgaISGIGakS06OtNwUDNQLUSkogSISjiWVMECKjnqHvFl+aGoZoRng/RKpWZ1ayKGeilbwybrmfU15vm6eFgetpebRrm1WY1VPziN79+//ixa9paFZCpa2dClAq1eoOpJEt5VRpfRu4cBC6qfrUed3vfNkaChERQyknqJFat3NfnXRlnCivmGJp2SrmJnLMomG96Rq4AGAKxmWkxJLd2MUA6Ciq5Rs1QxTOhqFRljirFacrjc02p315bu1FRAAmRShJDADIwYecYDbk4D6vrm/buF7S6xtARgp2eKQ31+Gx1ZLMQtnG98U1TVc2qoUk1YvMEIXbgQkb2m2vkyAKH4Tn0bnu7efjuYdgf29C9en1z//4HBum7cL/7+c1XX5U6rVcbQuMQEYOBciSIXdi4SrZ/fE9QW8LoXMJuP0xP3/3OqsW+3VxdM9p82A/zqGogVsdJTbOOZZrUM3uyxcQnOjx+gDYI12mfRc2FdtW1U8pF6j/502//h7/9b9evvrQRhjJxF0pJbPl5/+H27otuEzEfnK1XvHbOJFaPwVzgnv7w81/d3F77gryllWtSbJUZR9zyavPFrf+i/0JfHX9++vafffvzzx/WRg1Fx65ZxZ9/+OOrblMCUoDf/Jt/Pd/PW7r7L/71f8G9pbR/+6e/SUotN19tI6fkGjenvN8d7u5eQSRhRwMa8ePT+Op2NTw9v396ev2L6+PTU7+KK7d2YHfb1f556LcrqaYZ0Kxh9+b1Omv9D//xr//z3//027dvEY//5b/7zX/4f/375zRedX1J8/750K9XFcHY+m2nqmkqbezOG/syWIG9Yx7Go4J5Qma37O+OCdFExEwBiDimKbNz01RUKEYOrjmOe2YyUEMXm76qOs/eORVTqSIafDRUR1RyLsVc8I4pFzMw75rGN+M8eu9DZDABJBM5Tc8lz2bom85IXPBoZGA5zwiMYEvSi6qmdNRLaB8AOO9cdBtqc5F5To5JROecAJCR8jwxMpL93T/819v1N413UOu834MTpCQ2xTWuIY6Ttt49PRxKNYiYKy7gqMxFSiHVOWvsmr3M85A2TXfVBkIQtaKlVquWoyNSrVK6rpVamSB4mrUOxQa1qnXduJUjApKiZmLgtv0KEIfxRGZWFIHevnm1O57uj4mZFaioVrOaCjq2qrFBbAIRitqccqrZk/ZNO6YC1aDWNvoicNgNx83qdtvPQy3TsW+CWUUzM2lDN04pD8d2c7deXc85aS1MknJi4xA3Brrq16Ky2V6Np5P3Mc3JahXvHJFJEcm15NA2RNCgA4VpOlmtgJjyHEM0VSDzznkfTKyWUhA2mzYGt+ma45imuQzH03bdpSKvNtthPp32p83VuuuaCVRF++Cb2E4pT0MuSbxzw3DyzjfNCsxC46ZpCk3jGAHEudi0XZEipSKid05EtQgDNuyXjI9asw+t1Nk7DsFdQPKirzFYQnXPHrSzBuesA1qO+ASXoKIz/7IAHvyMYblMnbKLwuKS/3JGxfqC6D8hFDNcPPyfwBTa5xTPuRmPF4i0mHngEo7xArZePD4ILy35C0m0PAECIKKq4uKmOzNZdD75Le1IVVVV0yJ1SvPD8/P79/f73T4pzvMcosMiN9utoR6HwRj+9Ff/8m/++j+xY1CsqRLjx4eHvXvetjGGxowPw+ntmze7xw/DkA0wUMvAN9v1w8P9fNrFGLro2yZgLU10tzfb2y9uqO3zNLp1b0TsYE7peDpRrp4RVFT1sDvmPJdqUoABnJOv3t6kIuOcDvtD9NxErGAWvJkxQNEMqIRMRIigSt6xsTvOYy1wTGXIIszOO3acq6ZUvXcxuFwFCdpVcxpmUzAxVgJU37IIAELjGquac7q+6l/frq2mWrUqoJVhfwJ0XRNO03DIc8ex5pxqDuzaLmz6OM3pNNvzbhSp1XzIlc2Cq4g4n6Z1W9g7JnLeSS1ojOREhV0AXGoAFQzMCC96MTDEi+DobMHRc5G+1O3LULxzJ/5ScZ/q8QW748Vodhar0QUHnx2LZ4R+iaRZXvrs73nxlNlFcIIXmuBMIOmSdWsG54tkXbrYoKKS6nQcn94//Pj+4af73cN3v/++W/cCMM2lLi3rZeKtgVYhpuBDFcEFiqnVKuxM5CIiUM2zAqCI3X98fre6v4s0nKboPRjnWtu+46m5PxzIN967aU5MLpf5cBpW6+thzkXMRWcT1KzzMDUh1prrXL9689XhNF1dNcN+3A9puw6GRWotYMRdAWqb4Ixc68Z5CmQYO/bOmBx5RgvBVbG276ZpMHKMWE3QMROiBx8DAnAITnRxhTrfhLA2hJyAUNhT1cpIWpKJdv1qTkPTtqoSuK1pbldbkaZUCbE3cNT0ppmt5lrYdWBuv79/Fd+CmlYFgJoOAJHj2qhdZgGQVEAzAnRn3eVZNoYLToSXcP9PosgX9uSTiuhCpVyYx08PXTKGPk0oO3Oc8JnE7ULGXMwBn5tpF3YdXzxxdv7Up9e+fPaFab+QmBdSCwAAfvjx54cPHx4fHne7fS1FVKVmpkuc9Vm8uSQKGV5K+5Me7yzWPF8YwaVxcL5eBAMiWvZbIjyP4SEEM1UgWCxqi1LponciAgBVQ7TzslMjAjUgtdZxG5u76/W3b7bf3G2+vN5cd37dc9MtAjUl8iagKma6cFRVqs6T1VzTiLWwGSMCCC3LnQgMGMOiT3QMJABZK4IyZuBZYTJg4FzscZo/zPLzKR+qhOgbhOi5ZWoQmuBqnmkas0HV2bTkWr66ul33/TjP87EDCypQVHQoueYxa4OFKpCwOcWggVEW8wiqmDAgI4BYQK/kFJ0iiqqKqAGKqoGqAkI1RBUSVRPVCoqgZqJWTaupoV5oa2U2H7UEcxWA1ZISKZoYqJEaVsAMMJtWKJ486Pj6yy9s4sefE/vWB8WqZlrOG6OCUhHVZVgbEgI4RNJiecaCjkJkdAhMwJ5D9IZUVVGXlCV2ISwtKBFVVCTHjlWMmNTUVLz3y+WXWqoaGKkKKIBgqSqitWqVOs8ll9lMVl1suy4ET0sVEhMRITCSc46Yl3sHEQGcyxJeqCIXPOmiLDVe5geA4eLzAdCX0RjLuI5lLbnFcG9m6i7rS1XR3KJJIhEzXexsbACiywWhJS1Ozhw0Bcdm6jybQS0l5dFRA6K1lOC76L2AaSls6hwKGTjXrVpGISYXo3Pe+UaRpFYz27666derKtH55nQ8TKdTPp2iD6vNZj8M5pskrlQzaWoOu4eROZOjAJZSavqVC2gl7+736NxptzscT45RplxKmY4JRec0p5y11JqLSXl63qtAyWBYAerxNEHBmrIjdzwmwVyypDrFth+nqVYqpaJzOQlhkGwHOXxx+0pByBAx5rF2K2zbaFKn095Ba7JwuohHLeV4c7e6fXN1PB2KAnJjxT3eH+7url99+fV3f/sPZf591/eu32Zo0yE5g6sv74rK4XG3exxAHOah7dyv/+lvCtA01nF3/Lvf/e71zfX6uovmpuMpj2NNuVutPdPrt9sxD49P91JHIGpDrACnYWoiBM95hjlPm9v1Bun5cbfuwzjCeEoxYAi+gjjmp+f9F99+2a27CWH3cbhpuq9f3zzQfnecyzirZYTskPqui2F9mMeSZu8ckwc4S9mXflI1QEPCcwUjuSqVmU3N8XJKQZG6fOAdA0BVxbPE2hCJiauB6kJYvrTFCIkWB8Lya92vaUjX3R2uyOR0c3trA278akinuVSJPrF7nveG0nTBMM5IJYsT64JdXV0dS2lAWceWh5wLFiALCBSaa8YA5rTktl8f75+KE51RT+n29jV7MUVWUU0qhZsG48pEQarzHkqWPDvfALfqGjazcpKSip2apkewNO2lDM6vA/F8eCST0HYQeo0rchHyyaqZglUkA0PDOtZ5oNiv3v4K4hVvr6wUnEY7fMTDEQAsFeQY1z1eN1U0awXXOYfYiEOP7LUKMxtGBMdmbjxYGnx5vun9x+/+9vT05Nlvrq5NpzIfr67e9Kvu4eHDN7/65cMP38skAqnZvAVeqaGLPGUTEXLd5vZOFJ6ed7vDQVXnWhsO5TQR2BxirYTUqBxVK6qgFRPIRQwQ0YlUj+Cjx+kUWBGCIoTmKgOFbuNvt1AmONrf/uG7pu2HZM/TqeMcoEnj0WG9Wq2yzF2/Ls/JhGL8Ovb9U/q7PE6u6dvtKrauDtUpddL1ABJcu92sV1d9u/7uuz/6Q4QZNvNVA/6bdkXSjjad7net+d/2v8xl/nAamu0v6vinrml++Se/gLLfP55Wq6vXb978+R8/rnSz//DherVyAZmgj9FBNYLGo0U+HBIghuB97H/769VpHu1Yrzfrv/zvfvfLP3tjgfpu+9//xd/+mz/7kzLJ+3f3X39zhWhB8bZt/8lXb/m0e/rjd98NZlPtVqvWw/cfdiE283E2ClKogjKzWS21nO8FTLmKai1nuKhqClXMNISAaOM81SKAIDk5T95517TecJhSNZWcXPCEhorsHQOrFGJSMEdcRHIpudbgmTnknL13tVYEx+SHefAtFjNmmOeZgJgQQH1gQAAfwAxRpIhD17RhmpNVpMCRmiKz91RLNQV0DrXmOSOhqSGgdwFFog9m5p0aYq01hgbYtFqV9Jtv/pULG9+waRX99vl5f//xw2ni/fPT8fFEirdv7lLSp8fjNNcqj6A1BI9gCpBzKZKGhH1sNIQTQCAX0KRmdmiBcqree81pHFMTGwJABzPYXmQnuSKtu9YTkpoLxMSbbn2cZ1VjT1VLCKFvQ9uGX3795m//IY1jUcL22h8Ox6dT6Z1eNau+WR/HqZjkZH3bKkEbLVdpmH1L05gRkZZWDvHT87HzfrNdpZSScS46zskMCTKBocmUj853SMIg0bs5j7lM0XGV2VELxMTY9j6VOYSQUyI1dOAD1QpEDIg5peCiSDEtzjsRuTRMEdRESp0rATtmUCMHpZauidt1DzamWpo2OMrz+PTV3WsR2O/mvtu0gffD4zjgqtsg6VS1SPWeS84huBCiIQIC+8zsONCcU7fufYw1p+E4hfWqkmKlru8MbD5NBgCiaT5Vyv1q5T2mOX2GNAzMkC7tBtWFRaGLisfO7NElOtrMTPGc3vsZT7Qg5mWG2aUrfn4Cu7SyX9CR6tmvYZdWPJxx1oIRzOxiX/iUKHRpjCMgLB6jc1//IqG96Jjos7SPFz3TRQWFeEYyy6Oq55cwhMVsoGYGOZfd8/4wzLv9yOzLeJzHE/E6tu2sVfJ89+Zu1fu/+t2fjzlLlXF3JNBxLBJl9eb6zddvaoVjtlb89+8e6rgnbU3lpMk5N+fZmet8P+YMrIBl0zdvfvUL3/YHpOHjfe8D1KRWas2dj9fdutqxpsFqUQBQauKqlr0pzXW2ktu2jZ6D7w6HQy0a/bqeE9Cx7VYhcCp5zllARKGIVoNaK5qJQKqQlSe1guKqRXKCy/EZBNCQpoJ9fzVNo4kCGChuQu/IHaadWYmer7ZrRi05B6QuRkIJwX+98mkqeZiF62+/+TJPFRSf9s9jlsfdtHiuPLu7q22SVBFTVpxr493Nm+t+vWUOTQyGuoz1UQNQYecQliDyJeP1AlbxxWB/GXL3Iqu+wM6XUr8wj5/Q9Ishh4gW0G6fSvksvFj4pjNJBGBwTtB9eV6AS2TR+X80ADvHvOulvQdoqLCEVy9Dc4CQVdV0ccyIok2n9OHjh8fde2EZZH73+GG1Wc9Tej4cShUDXDI0lsFPgEDOMXuFAoAAJKqLssNHr6iGGENDIs55ACtSnna7Q776w8eHt3e3+eHhV3/y7fj8odtsHt7/tF1fK7nt7eun9z8Fj3lM1mrbtARG6IjATI/H55vr22bdDOO8bTd3bzp7PlCNTmtVDL6TksicqCU0Md00DUtdNys1QCYXvSiCMPtowXtuLI3BBVBCdrFxhszOiZWuu5qGvXfRuAqI5wg+IBlz6PstWE7zIcQmTQMycCB0kU0cs/fETmu2EJqSZ0PwzgszBtf77XCoauo8IYdpP4CobzfVOFJHSJqfvF+ZbZFXamhWmAmhkOmSD3PmXs4exIvg7KJ+PG9Q51rAyy5kFwWcAXxK0jd7mWt/URBd6vOTwc0+PS/AJZNILw6YF5XSubbhH7NSsOhAAejCDdH5evBCv19ayD/99O64Px4OJxU1k8WFIKIL+MUXUHFZRxfy6kVbSkCGZ9Px2QGKly39Qv2f4TA7Z7g0ts/sLQCIiIqa2mI5fXktovOtxxGyY2bysbnqujdXm998+eqXb66/er25u4595C744AFJmBxzMDWtxbSS84CSa8mp1GHUaQTJCBUQdYkEomW8EAEQUUTwsMywYAXQmhWgVJGsOCtlw73oh6m8O9VUMVLTsosoLXPDtI6hJQvbVZHy8HzcnWYbnuvT7/MH3v76X7568/qp4UW9blqrTTqmbGmuEMAcGC60hSh7MDRENVLEeP6nR0L0dWGxFM7D2E1UQVUATZSWyQMqoqDn27aC1LIYvs1MRI3I2FvoqVYtgJocm5AiGnCjpGYiCmJE5InQALu2X62up+i3k50+PAM6REEVZjICtYWcJ9OqarUKEjtn7NjMoKprkEDJYQwRmM058N7YoQ9JzSMqoGMyM0RWA0/MgMZmSKJQTUupiljERNRUTKuVYlJrzSIianORacrznNvoV+tVbCI5ZkIRw7MYCAnRecfE55o1QCJCR/yPqaKFC2JmMzWCRZ0GCIwEJqBLNPt5zxfTcxPhPP7D7DIfUFBMlQxUlxjLBWnYkqNIKoBI7BwFkVjnHFwrAgDqPUmZ5vG5pOF6/SqlITZtLTWViVwbm1Vswmn/6ELsVh0gmIKq+j4CYqnViFdd60OT5jye7of9KfpArOvb61e3b4fnoWTsr29v/+TX01h87Im8qQGIGqLq8ec/HncHOswIoiY+NnMpEHxdJGI1q+TQWCmnUudutT4dpzpJCJhmSUnHKZ3mfS2kZqcCRSTGxmFREcc0zNl0Tnlu/fZpnK9eX6sikfMcUGEecp5PYoerphOpKRMlw5oA1KHaBKt13/Qes5CP++dT166NwmEYv/xySw1OMw/7o/Pu62+/GfYf2203AYar6369Pn18OA37PJ2219ebq838PBVz81ym6Vg9ZqWrL26vv3lT52Gejo8//bRpe9fybPz+/cOcs3kIntroCfE07drmRoG887kmjtxoM8zTx6fjtl/3fbM7zsaB0R2OI3ttmuijx6Ecn582V00IvN1uRayqGCE77PqQs6jUAvDwOHXruF6tj8dnrXIuXVUjUDUgxMtpnYmrFjBBNJMCiEyuSAVTxyyqSHQemwFMCIxctZperMiEIsrMS793MZ4RfTqo95t+s73eH2bVxjcrjl8en34iSUS2O+4Hy8HKdeAiddj9vO1uyHeHw9NX61fRh5IymFZ0u90juM36+rZdrdOwC94bkhRAAvZ+nsdhykeFLOGLbmtAtdYAiiilFIretb1SY3WSkrqurzULgO82zE2eR9EUY0xSGVtEy3kg3xBF5JCnR7PE3sB1FdvQrnNNDgU5xGYttaIDgwJoAg67O0AVMx2POB9wHCXNwYWSarNdu2ZTalEX0QMCQ9wUldD4c/JlOpkIQmWpXIfh4QMAMFmeJ51Obd+0m9frvj0+f7h79ard9Ai2WvdSs5bsnUcX0DCXyceOiRwjGeWKx2Muiv16g0rgMNbRM00OwPn9WJBCd/tqvJ/qeBQopjClyRSRyaaMKFqspAqWM6lET7EjSdz10zTJ4Le3nXkuT0+krTIqq1t5tZoxVZXQXTV3rw6PH29Wm02IwzQSSb9pyyHXXPubrg1dE+Dx/Wk1wTrcudXq+s2XZUpz4d98+8shUWz01Tdvxv20ubqLXVczyDjW4TgeHpN7/nd/+mcfn+fQ3Z48QOfLgFevtrF1H592nfetg9evt1Xp6lX74WFs191+nkuxEGjaT+vtJph8/3NCkfVV3xb78NO72Mbn4fQvb//08bR/f9j9u3/1m0BwMrt7e9Vv2lQmz+1/9V/9u//j/+n/junhf/2/+181bv3//vfz3338+Xic33zz5bQbUpl9aDNNNee5ppo1xPMqqFkWjlVMjNChJyRVrVWZqxlIzc57BAK12ERVLaUwU2wYACXX4IJ3nPLsAEvJyIiAkguiMqMpGaBzcRhH0dq6FshP40jsHDtRIaY+rABGZjbV4zCsfYuG0ceqNcRmHAYzmHMmdKtt/P/87v/5L778twxulmRmq36dlyMgCYB65wBoPM3VsF81c5prqr6NjesOp6foOLSBhd/84ouUqkhF8C6uY7dVx6/fvvr9737nYjW9+v2P98OcfONQjRkBiYiQsO9bU+2baGBzyQDkAk2gFcSkOqbDlMn7WhNqteizKYC2Lo5pmsEM0QE2CB27q9XqYdi3TVeRzcehJq61aRvN4qLvVs2cp3/5z79N/8P3H3ZDZQRq1NVRy0/73XXbbVbxOJWKZRzHXKoLqIalFOf4at0A0qrr7nfH0yiHIU+l3K63q74dHw7RuURkhqVkUEPHY5o7H2MbTdtpPhJC6FfzNAXvb1ZfvXv8XqiaQBObSjBPIwI1TXs8Dktkg5W5b1eKlMvsCGOIUsRMg4sAVaUgwpLH0zUtVOlCCMFPp/nLr25icE+748Pu+E9/+/Vht6tanA/Xb7ZTTRGob9ehbYe5OMclpaenQxODC01VXTqfq64HKSDCzqFHFdleX5/2u5IrWVWjOaVNs/Xs2xjQhNlqquaw1updpMvNgHBBsajnpNFPgh3+TGG06DXODy0fL81mWwwC9NJM/jSmBy4d7BeMhJ/8ZZ87LD7ZOBYRyAISXpIjPvM2XPrU59Rg+OylLmDNlkUNiOdoyU/g6ZPnCF/YgAv5BWhLdBEimkma5+NhOE4FN1u3Wn348edSCgCmOUuBftuuVlehWf/97/8eq65imCxh36ScyfDu7pbR/fTheL3pgK1lYuL11W0fN9P4WMEZ0zQO03wOWLi7fXX9+upq3YvafBjbdXj79ZeWrUd33L3vtpthf//w4f64e05znieoyrFrFa3vwu325jQ9OIql4jhWIl6t1zE2Oc/ONTWXOedZd00gz81mtbl//hExKJCiEzBgZEdkpKJJ8VA1MmSsYJblnCSFZimn0pAppqk6z1nru+fDqo1fvnkzzScdTmAZQUFCu71abzZFpv3TExIT+3bja50/PDxJLsxBUFbrpm+jIY5Jx2NaJAWrddN3jYlubjbXr2+56396+LHbXrV8rWCOmYhgSVi/wO+lls7d4HNSlcJFPnapiU9Mzwt8Xmidsw7p5SsNznKJMztEL3KPizDtBfyCXRQkF57ULpK35UUWmskAwOjyvWCquLh5DMAMRQXPZA8hkqkhsVHNeRrG/ZRPx3n/08f3v//huzmNuepxmnKtixpOROsi6zIgZu+CVBFRJgbVVHPjQ66ljbGW4pCbGNM4Tin76NtNW6juT8cmQBLJQx4TF6Gu75ibYU5X1nBwofEEkksteeqabh6Hpum70HiCD4+HcXhe395WdAosyfrgrt72h+ddzjKksmoa74LMmdmZoIGLXQDmWr2l2bFn8t4FF0JRaxu2SqgcYshSAZGcMwEk5qaj8UDEIqPzQYCM1AenBr7xoCAWs4hrV2qYqnjPsVvn046d4+g8+SKVfSRw83Tqt68PU+5XGzee8jAAldC0sbke5znEnpt1VgRjZknzR3Yj2hdAawIDSY69ASxRg5fd5rMCwUUQ9BnnfSm2l48/UxLhxZJ2iZS2z2Vtn1fkuS4/7VvnydcvE8fwQld+VqkvAVlw2SRfusQvn3tZIvppf/3w4T6PKc+ZGKzqwqEv74KQlxI+G29e3j2ifXKoLQMB9dO6wM+WyznEGvDsO0NEwouhDc7wWs/YG4DovHLw8gsMmJmI+ia8enX1i9vbX7169du311/c9lebponoPYboHZpJWiSFoqBVgNiMitRxmubTIKfJ0sSkBAKMzIHYAJyBAUQkJgwAgmxGzrSQCjGZipZaqqZUB6v3U/441X3GSD6ALQ5JU2HHqOZdvH375dXr1+9++OPv//53IiLHD/t38q4eV7df+6uvutDNcTPF2fJR65RznmqJIJF9qbT8gIoCKyIZE4saEDKSIRl7dM4A1cC0iugSp2wmAFrECAmlgoGYkAJetFoGi4ONFFCRlJxwA75aUFCznAydAGXQhJAgz6YFsQALUUUrKgTWsw+KWEQKZIGGEQkNUc+eQVSDaiIiBsbE3hGbRUJWYRCPQAjeBwNAdhSjIbFjYiI0IqqigMDsq4gQNL4xU13abirZRHWJUVdAXViLnEsVE4NpzkVqv4p928bokEDknKbFzEwOER274DzgkhNnxI6Imfll8z5TRbQMlVy0c0uiIaCp4NnZA+ebjS3/MA7O2lOyTwF0ywfL3z5t/MsqejkZVTEEFkXi1ofovCMKoEZQTWet4jnOORESI86WTLgNJHk6yQgKWDC61gdkQAYmZEJQK9fb1XDc/fTjOxFqooshBGIT2X94+JB/TnMGCf22l5JjuxLy6D0SM1maipoh1f5Vj5VPu72UXMfdsN/XUklt97Svp4Ec1TKZFK1J0yiZaoJ6gjHV0SZEQFCsEn2Yi8ypjnMKzq26brc/xRhyVQbXr1r4WDAXRZ1SbkIXfCQ079kDNF2f0klBp2m66hrvfPBRxepEkyuAUEZob5sfvv/xq1/cbvtop/exWVV2FLvj4279atXGVTqchvFw+/ZP+pXbtK9ybQ8Ph3HK0+nUtk3sgxviPA6r1idN9+/+vtveMPN6u7p982/ff//D+3d/DL51rTOVVGqVDABQlH07zhMQMnPb+pwn76nT8OF55yi+/eorvzu9e/e42fTjWFVAxczMatk/7qOj2HlwpeniWA0Rg3epFOcxK6qCAp7GPOSnq/WqJJnTREzoGcGsKhohI4CpCpgiopgSEiCWWgCEmM/9XkBEEBVEJFBVq2BLE9hUF+vv0rkQKcTMzAaCxC+3BFWL162vqcw5V9PTvWtMWBE5WjyW2q363fN9Ub/pV8+Hh6vNF7/+zW/15wFSd3PzJqWn45Syaw6nFLZYSvZNh25JgLOSZ2qc+jADDNOsVuLN9TQO66trRko11Zo9O+fXcp70HFE0TwegaLQCIsMpND0DsDMDpzKSmXNYwBlxHWdSKaLIMa5eWzWYBnDA3cowqNbQ+EX1R90aR8K0t9PPkE6WEhgH3wCjW7UYmooOuxWubgDJK5Ga5iPMB5QZJDsQzZOVChXSnNCU2EOgQOjBXFy12zWTdLEBAB9imqdmswYfY7tSUwYGK1axmlpFpB6RDDWuumxCiC3H43gcJnMG7c0tIHXd5vDhaZpTyqZqx/lIsTF0VYFFmMgUS0FEERAIXgvVWh3ltuvA7Pj+sNuP0Pl/8W/+ze//9v7p6Sn4m/c/P61eNVdf/uLj+8fjobS8h4lpHNq7sOrx/v3fv737qjYhhv63X3wx/jxT9t+8+V/86tt/Pj7/8f27D3/4735oaevXV7/8s1+sr68fPz6TNf2r2/1+cJznOQWDm/VdPNDzwwx2fMUni6EYrONrXG1yqu8+vv/Nn3yDx+KwhJX//e8f1jeEpQw5315fHQ6ZRLc3q5ym1bYTj/OeI3FFbTu33a6++eoLTWmz9cECoOasMTjvnKTqAgrVYdD//f/hf/v7v/0+aPd/+T//XzUO1clUKgwnKzW2YayplkEFDchH1s/0C8isUgEARIBMTJg5Or8Ez8UQVbTWKqqmkaCmUsJisTFTM1lGgJqKkg++1kpGbWhTyZ4DEKiWWjMRRh9TmdgFABe4IS8lp5qLI/JM7LBk3a7XZkqIAKo5F6I2RhWrWmutnuK//vX/UqdpCV0MsVHVkpMhtN6d5yFiMVAE8p4AffRNrVp1ZueQWYqaQJlSzULMx/3J+3w8nqDIkJ9BSu/dw/uHfDwEAA7eimyvVjG4p92p5vOZcen2qaqaRPDjNK+b2K2uH4d98b5KlVya6LGqqCGCaZlVh1qJeRXcqgkx+kLl7eurUmCuMzIz8PaqbWOUIlebTd/7Wub9aVqt6fb129//+ADBnRAY+H6cpzJc33y5Aim6H3JpuhgiWpWUMoiFyFlFsby+3ko9TEXvn46bvm/atmlKNgstz6kymJm5GGoFkRybCK5Hj/P07Al9bLXokJ8Uc8nqCEGdaD1n0xozQhFF5G59ZVKgChBU0VKrmRECEuSUXfCrph/LSXWJnjUDU5P1ptFS+obpqq1Cz89D7Dplx9756A7jiNRnMSjqvE/TSKan49H5a2EKbWwBT8PIoSHXpHFmpxxYSgEfEkK76qILx/F41d2chlFNyIecJvYewGqq02mQVEL7KUZ0OfMTLZV9hsVnuHHx4+NCDi0T6PECQeyFXVKwyxiyBTycIcoLkjk/7QsawgshBRcAdfkLvFh1LqqO86P4mYLkpdFu/yib45I5tEwOXXJVLxE0Lz1qvAAqgHOe0WJOOs89A6tSU0ml5t3+8O7nn3ePD+i0iU2aCYkFsO+v6jz/xX/681dvbmJDJadvf/31X/7571C47ULs1vvdM6Z8dX3lVr6/6uaP9Tge9uPHTQTJMKRxEx1uYtN111d3TQzHYX98PjaOtuvY1fn5r/98Hk5FFKiKzoF819/cffELU8iT5FLVbHfY5TT9OP0RQLvgQlh7x7lkwzJlXbUdmAHyplsbNR8ff2RKIcjt67f73WNAX4XmlOZcazWpklNWdKZYlwwptSVMmoPLpTrCrKWWyg6FEdgPVU5DrR/3vaNt0ynJ+nr1anudM5zyNKfSxE1J6ThMPlDnQtd1se2fHp7Ir7qu9Yy+gef9UK6c8/7peUqpHp/316+u0lSPz6fbJl7dXimaY8KFJLrwPXYO9j2XJVzO4xe92Qsgtpcygxfpxj+C75+V4yJ7wIsW6IUGugBzAD3Toxfi8+U3fva6utgRPofg58vTCx+KZ9Wa6WKGPR/2pCKQESpIrnPOQ7bxH777h5/ffyT0283N4+4wzs8CagIiSzwtIICoovOITkEWi81Zb8LeyuyCT2kGJDE1MmBQhDGVq9j+/OHx5mpNCOPp+NO7n764uzIau76VXM1qWK3p8SGAui4iWKrVdethzk3YIBVH+2Hax9w714bYNI7zIER4dX399DSUSgXMg7XtigJKrSiGht53CF7yzAht2+UijSOD6hDUOUJyHEQVsDBFIC+IDM5zEJiZiDw5Y2TniYWcgUNxhqbDsWnbOc0wn0LsKDTctHWeo/OmKlrIB9DimExK17S1Ugzrmop3JDX1/UYsybALPrjG15TiemOEYCcHo9IKkFXPc6dAFXkR5BCCXJj0i14Hllnxn6mLzqV4KT3EzzfA8zakFzXQeWbaZb+0T1+OL5zlS1ybvhSnXZ6L4EW3hC8JRgsPtcQEXzLozs9hC1n1cqGnw4kUAFXFDIyI1GyhGJaLUl225wvDCZ8xYHhR8RFcJFKoi23t8vOgxQHkHLvFnWZqy4jM8+CuJVPMdNH4LDIsQsKFJiYi510T/Kub9Td3r3771dtvrlZf363XnesixYaYFeosWhEqskfDWlWkkAU1yWkox2M+7i0JSmYGg6pGpsKGxopqZIbojBfpFSs7ZVclK8znH6OZmaVSUi5zKkVcCB4YgfSSv+MYPfmOr+42X/0phtXPP/7xdBiaVTfO8vP7e//4vH39zO2btumntpXaamlUMxgplQrIhAxUjXJVR4iCwMZMAugoGkUjb+h0EQ+CmZ4906oVAM3YoC7RJAZSqzlcLH4CRotqdvnXNwBgb8hAhMzoWCtnrUn1KDKbU8fqHEJw3oGm++c5px9j3AzPB6ulQk2ghIvijBYZPIClUkWFAAJSdIQigQhr8uSC847d4iV0jnwgjB7IFVVi8o6WoRzLcE0AMEXRCipSReoy7RGqVFCVWqvWqqWYpiopyZgmMNmsVm0bg3dFlCqYATExIzOzY1wckHjmdZeBCEhk8AILLlTRksul53bEJeIUCcx0yRsiMlVEWgjOhctcVgIugU9mgKCAYqqiqnK2mC4Lw2wZxIjI5CMwmFT2DpkUhEhF5pxH731WcSGS2piORD40HREDljzN69Xq+uZGrJRZmnVrgCVNUisyvHv3YxkHFV5tbzxzaJs0nQhqyTV4123WPq7ZQT7spsePdZxC1xlhncdpGJEDMgJQHo7ex9BGtJye9yUXMZDjyAYBmuNxdAaAYkh3N1ff/cMP864+nSr2TiynqTAwqDoigsrohzklqbWoEDoX9sOBhhMu2kK2pnHTcIpO1ZxqiZ4Ph/tAxEjrtk3TiZoWAgQO7Am0NE1wpOvGAVUZjmOZRQq/8uhbVF3frol1c3sjeXYOn374w0OeOTY+BsdN0xADkmdmNcCSy/7xEKlbd7cm/ur2ak7PoPDLX/4aIT/f78hpu402cR6n02ky4FXfueDHPLVNN5wmAzOpbWher69OU/r4cP/N198Mh6P31nFQsWlK4Jzzfpjyacr9VQ+ncT7Nq7ZNfbV60lSBSepZZoRGeS57HRsfg3MpJUSkJX0EKoM73yqQzNSzFxUAcOxgkR8hlbpsB0BIaEYEuhxQkJwLVQUQRcQQiD1cEhzxPBrmvBjGcX5y+/3zEzvOVobxtGpj2K6m+59pOHzZhx4z3l1/f78bD/dftWvbnxKfOkSSXIZjyqc5l7Zrn4bnlXyBSrnUCkIer3oXiBGlpJKmqes6UC7TadVfGUuWVKSoYujWolxFCcAFzmlQkND16GLJg6kie7NkWA1UtLrQqkE1i6SxoemQQ7+ldk0+mmZiQN9XCGpC0YkmKBWRSYsO7+b3f8iHh+3tHTUrDJ0oKwC1K2pXGNfAXoqyZJwO9fik84HZTMyFQMGDFvCNIlui9npNIE/vf5Kart9+2d2+EQDIg9usfRNTqVwdEhFGDo2o5uOkGJqrV6ogxmg2Hnc1ZfPGwT88HCVPp8MwH+ecE9ERPK2vC0hhAECY5lnRH05zdKuGo2qOrlPRIQlSYaeOQURdDAT89H5Ht+vm1RW1IQM9ff9hPJ6ihzRMb7c3QFT2Kc7wy7svT6n4vu+Qt+224/b1L1+30O72AxVs0nXbu5uvf/H+Qd99Nxx+OF59+et/87/5dTp8/PnHdzY+vn/8YxPWv//rv/7VP/svr19fQwBtmBSS6uY3rzc38eFv/nPr9q+/6Drh7/7j75svX1XEGK6OJ0vFHczkNGfh467uhoSOgyvj0d68bQw1NLHU5F2jzt1/HGPg6DanfYaa0jA167UjRYdawXmWJMdTWm19GosKMtv/5L/85eGn/ObLm/0A3//wfbdehRaTyXE4oQ+lJlQiHz27+WK9QYRaspk5H9hB1cpMIQapVkpS0dj4KoLozKppMRRPSEpTGqNvovfsGMgUQUQ9+0Bca5WSFUBUVE2k4jIlBMk5770vuaY8RaLoG6llTtVEiZXYi0gMTmqdpzmEBo2qqYqYWWwdYvWIYbuaUmL2hMAIDXgz8C7MRce5IIJWK9XSw+jZM0IVEzEffGy8iZLjnFKai+hQi+ZS5zQNh3GahiZ4XK/+7m9PTYjXG79eNVo1zVlNgmO36YuYiBQppZQYW1FwSOgYAQ9lGiSlWjdtD6aBw1xGZPCOkcw7CsieKZBd9d0wJr9qg49IdZbcd9FxbGPwzl3dbAHAciahWufrruuv2uvb3/7h+5//9od7UQ0+Vs2//+l9E+Nm1RqgmVSBQK5KbmJoY+wYEZDQffP1zfcf90Y2pnR93X/4+OQMAsJQa/DerGrObYhWcxmOrV+zthKHYlUIlaROR+e4ZvWxASjjMBDSarUlBLEcfQwUgo+naai5xL5bDuDsGNSkluvNZpxPqjWyL1ql1MY7QgNVBNxsVnlITds8HWvgbrVu03GCov12pRAY0KzMx0m55pLSWAJzSckBMMK67abhhJLbEEDUx2Akjj2ZURVklZpQS86ndrX9+PAQQ/Ac8zwFzynP1PA0T9169TliRlooHzJQU1v6DS8o53Lavxj0P8c+cJYcnWVAiw3i4jx7EffAGYCf2+kXVcZFG/KJMHr50guyvvThX77zDKTO9oSz6Q3hjLFe7Bh2oYouwdWLXW0Jk3zp1iNckjoUDAhVrOaS5nQ4nv7qr/7mu+9/2B2Op2FwjhWqFfENucaPp30ex75tEXjMerVev/vxvYIw26aNz/cfRNQHv9/tRfppN70O/vb1VzfXmz/+9V+AhPW6/8UXr56HnSH98MOPSNL5IGP54u5q9+F4Qo2BYvCd94jm6IqZXfCKiuSbts15FslvXt8N42kYdJ5rG1mY1o1Ha4xlzGrmpc5ZCyMglNfXd6fjYR7GUkYGS/MsQGCCCOycVSECMmuW/FCFGLxnqqU4xBBDLnmeZnasaCJFAPqmKUWGMZ+ypBX99pu7adwnl5qwYoSbVzcEOkzj+vY6Onh+3KWUTuOQ5jw8Tk/HdLNZtb0nR6vOq9oXrzeN758Ox/V2pchlyiC2Wa9ubt9OKQUWAF4sZqJKS/rp2f4DcAmxsgsaPnNApgh4hsf4WYFdDkXwj9QaL9W1yM0MQT9pkfDzIXkX1hM/GSjhUwWj4QsZ+cIAwEX/sXT20UxsgdK2wApAR1LVap3TVLVkzH/5l39+ODw2LcWm//7dj/dPT0QAsjh/VE0IQU0JwTkHgKKKCN5zLUqGtWZPTlIBgdAGR2xE16uVguUpo9RKdJqmj7tnD+53f/fuzZdfzfXILlqFIqzOU7Ot81O38pIzqWPnsyQlrKBt3w0zffyw++rr9cPu4c9+/U+r1fF4avvYXfUHUSCby3zdt+SQAIidASJw1/cVBmRfDDD4WnJsOyhzE+KUk0AlhioAgOxdqWJMgECeyJOKEDn2wcBExTU9FHWmXU9qEtrOainjsWsaamKpSRUYCZ1PpbQhaGjMUHJpulZdA86Ba2qti4xQq+Zp8E3Txq5KDaFTqUVGdCNCf7ZKgS7zG8+WrqWe/pFy6MJBnz953mk+qdfs08dLjaJdOOyl7i6WFoDPSc3P5JBmFwpyYWcuX/mS7fbCQb2Qo58UPi+PAMB5FuVlRBkALJk+aEvajF48lXp5q5+GTxp8xrC/XOOLpOqseQI7GyXO4UREROwcEiGyqVaVhSviRSwIZ1r3rDk665kvfmFmJPTM2659u736st9c+7BuG0YGRUJiVM3JbNHPVG+ETgEBkEVM8pzGk8wnkEyXCXUASy6SSjEWY7bzqAREIjYzMzF0SM4QmIDI0BRNrBYU0VzVUJyokQEWEU+ktZZa3apXUPCBa3QWRXMCz96r2nCY5ul71z42/3+u/qPZti07E8OGmWattd0x1z6XBolMJFAAgSJQjqRYZJByVIhUSxHq61+orR+gBntSSy0qQmooglQEK1QUWUUWiwSL8EBmvsyXz1x73N57uWnGGGqsvc+94IkX7957tj37jDnXHN/4zPbSk1TiSsHYGYiSCJmCKpka6PJWkHQJD2Gv7JTY2C0mlaqmWpfiUBERWRhDJ/unM8pxTnpHMD1xfxZRGgF6h8Fb9SYERGpFtSaxbCjOC8RivttddE3Q/nb/cPP6YXS0r4Bk4kkUYPHDYWImBD2depkQPZtpQIukkSAwOEYiIwIfmsVbzTGZCQApATOXWpGJkMCg1mqASFxyVlNCB1DFFA2LqJS8HNGlZFMoKeVUG++Cj110jkGtLi0yGzhm790JcARABAVjxOU0oFIrIBEy/U8EaGB6SgrUhZS1xG2etnwD0CUSUexRLb8QU03NYDESNoOqoqchIQAAiMDJA9JUajU1U2AwigsV3SSDVe9cmieQbFq8A+ewzAUxMgbHvuQxkFtfPnERk/QE3nNbBEqaHSAjlKqiGnxDnqOTcT4ej1py6datgTlC1fn+9sgxegiS524dAZV8h6qOnYnd3+7VLLh69+p1CO08z4CkANVgvWnu39zM/T6YmVpKNQ8y918T5DZGHmxKCwzdTKnanCg658g7X82nkphcfywFhV0HCk1s9+Ow23VN206a2eEwTd2aOXAdc5bMhMdh3q5WSDSNI/myDc6kbjYb9vzQ3zpg0FW79evNhWpcM+wP+1mo5nI/Tp8+e7KOnaxEvf/6m9fby2539eyTz58fh/T+fT8dDzVp2Gy3zy+Gw6ED9/bV3etX+epi+4uvfvXie5/+xm//zle//LPh4bA/5mPNbYhSeM42TRWrcgjVgBXU8DjOuGEgtAqHh/Sl/PKL33hyf7d/eDM36DcX7TTPjnHKen97WK3Dert6uD8yGDeepsAullLAsNbCZKZsRilXQOyC98GKZDnto8vBRRfavwKIqp333yKCy755Hk3UWpnOpGxkAChST0F9uOBIqsYIpqJIaAboHmmoMqV5FinFds8uB82HfkR67TuB+xxKnA+9X0fI9VnbbZvtQ9LgocFi02DsS52KYtUJZVyvVzdv3gTGYZhdbC9NRRK7xkS7phPCnAokba6bXFOVnHMJIUJoq4L3HtUQak6DAnGzNmIEQ/NmmGtPLjCFOk1WpQr6Zodl0DKqAXPnQmc21nkgWmPcgGPKM3tvqrVmMLX9m+m7nyHh5fPP1YWqgWCF3TpcXosLiB61kBWqB5seII+QRhOjdsPkdC6SvIWN61YKMbRqtZ8fXoem883T6tYYGuh7E3NtSyGwzcF7Co0psXM++lJUlUQNwHOIYOK85SIPd7f3+1uzJob26bPned2+/fZ11ewNy8Prtm2mYZ6OfR6LAnmLOVelumLJlcB1ftNJ4w2zIoTQuBizw9Vmw7vde9Hpvr9+vvFU37775uUXX9Qaaw8u2WZ3+ezyGUywXV9c7Z6///I7pheF1p9//0cXq09ePvObq0Zy7RwUo4vnQfP4/PPLPNS//vr1/e23z1atM3v46tUPfvLTf/Rv/Ns/+/OfH77+q9/8oz9y226cUj8UC8QXzYs/+rv9X/7ll//ZH7/88Wffg6t8P5e2XT3ZjdVcE7Yd9mJPP7kKG7joApPTuRJByTblERx3XTPu56YNvgt+5ZIiuPjii6fguapKlQwks3qEd2/20zhePX9JhK7iN9/dR25e3bz+n/1v/83/x//rP/v+b/74eH+wTCqGSo66wgnZSplB6wK/AoCI1mqOSUpRM0NaNW2WmmTJnfCLoyL7eNFsRQszkqEBxRCJsZRsSt1qFZtunKdqoKXUKs4HVEEANUWj4P3QH8kcgYWI1buSKiimXNCEEND5JrbH/qAiHhkMffQLl5XRVSmlZLNAzqxKVtBSAre1ZmS3zP1SnWspiBhCyDA1HsGacchNG5CUgbSqlFqKOpY5Dwgcm01cu2O/d5U2V50f9Jtf3f7Fz/8GzF3tVpuG286P/Ujg+zGjQXDkXchT7rrN0cYCxswpV8+kzIfjfa2ZiVJJWiXVQkxTKk0IJmoGkZ2BIVFR3WzbJoS28Y3zFBoyDc5tmqbx9HSzPgxz7DbHQ4/suQsKpvPwd370SdeEn3/1JqtYcllt6qe2aa43lzf793OqVmEVm6lmm/PVbluqzvP89NnFU9OHu8PD/X7Vxd3lbujHxeQpqbFhmmqD0aoaSfOkUbVVsz4O+3kYmfni6mr/cBdjzClJHdAAKFDo8nxw5ENYBxcf7m6Z0ccWDYtUM2vWbakZAQB9jMGzxtD0/Ricc8zPLp/e3NyrQwxx060utqv5569zmjt34dboVY79fnVxPfTH2K2Oh1v03G66u/5hFln5ULXUWgnYB8/etu3mOM3bbqtQ+35+tr5sg5Y0omnbrqaULB1W21XuZw4ulYSua1btNI6O+Xjszx3D+fh/njwvo4tzM3Jy1Dgr1OAjt+lz76Fnes9y//Nk/DHtHj+ChvADWePEDzq3WOd259TQ22Nn8zgd/6gvf3y+M5Vk6aA/NPz26KP0yBw5+yYZ4WOnc57VL9MURJVSTfspv3p3+26/zyqx9cOMLjpQcME1bfviyfNff/1VbFahRVMuh6l6rBk9+p/+1m999aufzXNy0U1p+tUv+s+/ePnp808ryO1gr4/3L774cYPu/c3tv/rld22gzlN0sFmtGcA1jQ/gyEwltHEcE7gAYoAui+G8fEiJqSBKTdPDKP00kdusN+uhvxNXd6GJsS0ykNZV3NbiXA6yNAe1Bt8oahvDw+FGgYd+qMBEARwAqwsxGtVqRsTesXNzSmYqVZigSkViYiopKwAyAYBzTq1C40e1b9/d/s4X22paQAxoKEZYhMmTm6a+AAmCamay6ycX1FVR7udS86yzjXlerYNr8ubi2gI2sXn5/OVYEP3q6zdfvXz+qXONSjEERDhlenxEjjijOHjmwen5F7rUhZ2rDk5jZHtsaOExDRNxaaXsI5bEmeDx2Lk/8o7Oj15a5nN5L65DC6YKZmSP2WaLMYCeqg4f24aTKk3IMSCJZqkziOwP+3/5x//dzeH+0PfOu1zLYToQq1RTURERrbZAF6rsmBGr1sVzQBWWDkiBHFAVE+PFKdaxI0YzcAEAmIN7+/bhybNtSvOv3x5v9ne7RtbX619/fWwogNn24untu8GFTuTeEL1rrObFuHW7Wc85AahO926Ww82b2Dm8uMrj3HbNMGRnGqNLMl1utzlrbH2BisHFTbt2VzUpkYZV23RdznMIDZBHB8xh+Y3ikrSI5pxDU0fKjDkJhSgGDgnQGRMRM8YQwjT3bdvO/T7Nx9auKETfdYvRc/Qu5YrE3rcILteZCLhd03zMOTVdkxFASuBmGo6xJe99VQRRZy3WAfGVC63hipb0ugXrUf2YSnYCZxBPkPXfvuEEID3CinY2/PkIT7fHKv4YVzpzJ89P9Oj1fILvTzcsfyE875aPW6Wd0grwDGiZne9xYkCcive8DaoICADCghibnd7jSemmp7VECzNOlwk0fGSqdFoSp1V2AnARzBZeB7Ijx8t4YMExAICX642e1wPAKRYNAOjE42MiYiIkR+QBnBkTNJGJKrKo2TikeRTVbEDRUYxEuARGqQGJYp7mMo2aMqsxMzITKRgRoJkgkKgSsgogqebszTE4QqkAqsaODckW2EVNRciMDEykVkkOI/vlx6iqFWHo79v9G7nZlpt3UETIY2ypiWbIrFnKMD4kmIm9lKLGzkdd7A8ZiBUJlxQJAQBjU0QXCJ2iQ9+Ai0ZOEVVN0UwX52o1W1hZgsgIqGp0pr6piAkg8JnEpohAhEsmgjlv1GadK0hFEER2sZgpurje+PV203SxC8NwTPNgUKsYMpsImlRVyZWgAi6hF2qEzjMRNZ5aqutAzmGM3kVHnlxw6Bw7D4BmWKuqFiGulsmFqoi2yB6XY8USN89VVKuoqKrWUrRWVMjZ0iRTqioamJrYeE+OQUSMeGH+BeeW0gEERHTkljkWI5sKIhAuQwh6BD1PUJGoLmlny5JRUFzmTSpgSABqgkCAoKpEvGBziKgn32oQOVl+naSVemKTLtsHwmJxLUSoZmjoiHOdUCuC5WlStdh0swzOuVqlqtaq3a7xHjarXUlTiKSgKuJ9rCWnbEghNqsqe5XqgzPQw2EappHJDIwc9/045xqC90yqwlkPc+9QpzQ553M6QK1AYKbDcdSc2xU13frw8DAcU+yarPU4F+dTHSUP+83FhpmhsWEQnlUF1aprYE6jhyClEoLzNOWaEvkgQyrsARCzQgH0zu/3U/S+QURgkQoA1azUWjKDCoF58sS+CsyjNY0aQuNCyiAEN++P4DN27fc/ef7XX/789/7g7w9T/tO/+OdfPL988vLF008/GYepZbYxfffda5HUOm63XU7zt7/6Zr8/bD774vlv/e749ru3b76t8zj1AwOlsf/sR19E3h4Pb37rkx/t74Y3r960TaP+CNsgBuOxqKpVy6YgZKVScQHNs1vHrmY14Kuri29f31fVr+UAam1cYdVS6nbXvhn7dtXmLPe3/fWz1XrVPfSTVW27GNpQTWs9JZGZCbMTg5LrUMU7duxKWZxQSc9ZHqZCTItvPREuKORCrl82ByReLjOyXA+0EnlTUITFEM4hmxrQgpzScpV4BIpyyQ/7h26zlmxZcB4TzGkOef1s1e12oDEnPKr/7Ivf9vu7uS/k2vu5Hue+Y3h389XKxdXm+jjNl0+efv3uGwappQCgUDtmQxPf4jSOLrgqKhjG6sa5gGYgQ7SSpyLFu5asWE2CihR8XCt4qwlAfGxBxQCdJ9MMpAJq2BAwk85l8t21667Uksw9UkNxA+StJrBaUzU1Y0dazIrrVmHd5lkAI2+fYXsJ7baEYEDMRGWEOYEJNmtaXfKV6ZhLylbNX15Tuy61FAUFsAZcJmpaTxeuXaOB5IkdUNyoLK73zM6ZqZWZPagImgKyiZaUGueI65yO+/6ulhQcrXbP02EGzQ6FIMs8TMcsNW92rYqF4Eaivk+CNAM4r+yw399cPfnExcjdukpSK1OaWm8Xm6cDkSG23SqNI2scp/rT3/uDw/3hYntRj+Xls2c6O5z81e557J7u/PUf/L1/Z7Xt1IfQRC0wQ7mIfDjev/3Fr1/85Pt/89e/+q3f+UlY01zqJz94fvnpthyPLrD+8urnf3YT3O3F08/S/fuH2xTU2quWo0jR4yi0acPf+f1nz77/8Nf/ioZXLzfrb372V44+37z84m++uw+fXwF6IaDGpWHwWJzjZseFFGN4OM7Bx+urZh4yODKCsArjMLQRb96/X/FV9Xq53QmUKrVt/dXuqQosUvcXL7exsd/86feP2vwfuv/j//3/8h/n/VwVORB6N83HXAuYsnO5nMwalx6XiAAQCKFq8A6JVKCq7rq1aY3BmeGQUwwrBERyIlpqBgJ0wYOf50mnaUpz8LHxvs+ZnAPCEJp56mPTpTlNeQxdAwZGOM4zMfuGRVVMvePYuH5MkswFn2eb6xx8CE0Yh1mlet8Aoo8eEJ1zqYgkIeaUCjpyIQz9aIAitdTcNq2UzExd106ztpvoPBLQPCbHrpY6zdN6vVlv1sOxn3NfM4zjZEWZaOrnb199vV1trj9ZNY7zNKRcD4eijGOqQ8rtQtnDuF1vpgJTTmbWBW9quaSF0ht9AAUBWzebcT400SMhM69jk3MpAkaURVY+Fqlzxi625BCmedM13//kE6CcchJJgq7Zxq5dHY598GG7u3QM3/vkMqf8zc1+LsXQzTW9fTikrq5Wm6aWh30/5zKXWmptY1yvuxDcNCRWhEqTyP6hR+esCANd7i6Pw8BoRGhSiL2qHI/3Ia671W6eegheCQ772xh42B/IU63ZULwLJU15zl236rptPzwYABMjkamoVDAq0+CdM63TNLSdD96r1HXXSKnMZowcnCGUWdaXXRH94vuf6JTSNIBDF+PxsK9wWLVdKvnystv3MxmrgQseib3DNBy7dtW0IiBotrvciSQiAtJpHgixiNUs61UnOnoHJeWaR8dd13ZpGtsYu7ZR1XmYH1GSUxt8ksYsSsYTzgKAy1gZTj0zntVnH/hEduJinL/szEVaQqhOGjQ8Uys+9PinbuUknbBz0/TYVj0qy+z8Ts/EkQ8N1Afe+JlgBIj0iBcsPZqdKEZmpgaoenapQTM1RCLEKiIqojKndHvY/9lf/PW7mxt0bprn1WalVYxgu1kB0pubV92mia5t19spjVdP12JyMzx0m4vXtzfv73tmGg/j0rrf9Edu+89/73ukkCscp/Hrb99cbv2Ti61z+Oz5phyO727uWkeN8/v307px11eXVWmzvRZy4Bh5afWrM2VHDiqr9KLjOLXtGp1ntIsnF65py1zvHvYlTRyi0JxSD2jRh5t979UnsSHN43x07AldbH3fj7nqlBKgWz5UVauihMCMyAumJmjYBF7C0zwFA0q5TlMBBWBrYsy1zAVe3Y2/+fkPXn766TQ9GHmHLqd5f3/MfU+x3T25KNPh8LAf5j4XMBcvn11Hlidt9/7mZrZpmDLFSUfddnT92frJ1bW34soMorVUpFMbpAb0mHH2AZ9cOnAzeyQ+nFrjhUL2iE4uVAf4YFf9t+gfj894RjJPtbhU79nGCOAMRj4W9KnHP/fny+vIycwCllWwVBqCKRjhQijCKoJmKtUq1FoU9O371//Vv/wnX/36Z2EV1xe7vh//5pffVCtqOk1pCTMRESBYHMIYuZQC7AAJgcygiqABA6SSXPBIwOTath36fXSU88TIxLikP0cXHg5DsTynbP3BLlvmLoRmHqcmRKSsmUE9OlOdHTupuRq62Di/GseHKnkV4+uvv9w+ufzkix+9HQePngEYwXuvUhQZve92V6mKUBAXXLN1XGLnK0YhjwQKJCpEzvsmWyVQFSEPzF6kuNjl1BMCgToiQVSVuNomqS60UlFN0bEqdruLh/vbedhzuPZxJTlVqarStHHp0prgC0iWyhjb7qK/+2q17by6UsCImUIaj+ii9y9ECZADR7WkdougCq0hIyhBXaLulnEsIhmc0vfsXC7norLT9vcIDJ4se+yxJtHO/KKPyUQfsYoQ6ITlfFR+j0+/AKSPcoHHjfWEhZ72S4TFQB0/3njPvu4frxh9dJJeHm5y2jrPDt0IZrBQK4g+cOqWGj+T9RYt5wlFOoP9C3ILp44GzgjXGdxafH4XJhESLXu4mS0BBWYGsti5BlUuAuTIBwCym/0xTTnXkuaZnV1t1s93TWxaZkakxTGwzKmkXmpm5kXQhnDKVQAEq7Y0UCoFgVQNOOiSGW2FHDii6pi8Y+eJs3cWmBrWBoFNVaoK5ZQbx6UmYs4e+kn061+nw8P+9f3t8UirS6ag5FWETYhQVY79UIoCMSA36Mw1xOo8OlRDFCIlXIhdaASKgZyRA3bAjohVtWrRk7e+nihgqMs/lm+YgaIyKACKViJVEFVdzsBIgMbgo4WKFdBlBSGytvFmXnOZBUWlSr25ezcd7w7jVEoepwnJm2tzhVRzkROyU81ixBB907Zt1xkq5blVbQgis3fLlyfnkRySr4BFVAgYfQFz5KpaNUUyIkYkXALszURVTAQMEEWr1CpVSi7TVPtjj0hNcCF6XADOhZCGSI6d84uD52mREhstLuogJ7W5IbE7cfVOX2cB2olTBCc55sLPW1B/k4VnuABxCnBiay0ltcynFqMs1WUsoHJGapdfqFmtFcyYoJQKkj1JrROZOcYqlR1YdSknNfDcemdgc9f4LnrTkuc+eIdoZLVtPZJmmVdhPY7TQzqKzMH5WmvO8/Iez/JWy0VBfN+PbSTvrEAvuSL7eQD0jW8ac1SrEvrN7ll//34cD7km7znEDoqhOe8AbFxf7CaWm9tDE6MPQaq2Lc0i4zw5769csIKHaZ6LONc454YMpmykx2m/7dbrHR1nncbDJ0+fHPZH75jJappWTSTCEFsGI8PWu8YHhVpKXj58djGGbdfwMA5pGtfBM9v7m/d/9If/sM7T+mrzxaefyDD/0//8n/yDv/f7u9Xu/uF2nkaoRrH77s3rEOMnT66bFUzD8c1//8eXL75++uTZy89fWsl3799CLip4e3NgvB+O+y09bXyY83Rx+SIG/+uvv27aZjpmAENQMiCEuVjOkth1DXaxYdZjvxdfVh24GMchicD2Yqs4zSkLlsVVLBd58+bQriORdk1IaawFXHBUSCdhh7UYgJ1wewUFEyOPzjGKGiKSLUtjcWhEhGVXXcIszEAF6HwMXw7xKCJMfLZ6tGV31PN2jqIEKGQE4JjOR35omtZK8oRW69raz774wZ/+j/8SGv/weljHF/vvjt3maZnq4a6/0HntgyfeS79Pw+RwddHdP5T9w0N1jVM/TdN4d//icnUZd6Jcqkbv+37Y748QG1GdJzn2x6cvn5skMiu5IIJV4wCQc06zAwY1QAfsrGRPHshbRQAGKVYOwXNOiGFFViQ/oBQXIyLWMhOhX2+FIwBgzZoGYocmUovlsfR3Kc3mHDUXfP0D6K40ROQIgFyLpVlLJb+h2GitdU6sScD8ZmcUlB2EQOo0GzNB2jOCu9iNQ1UENmubkGRE53NNJQEIhOBLySXPKti264fbV67bonNd29Rq8zSXqfi4Wj35DKql2dorG/bD+4fbGMmF5tXr+eEgbw4DMcQY02xzwWo2AbKikq2aVSrV48RN40NLNRgYGk0lKzcdr56//KJXqseSpwShfbF7fvdeyv30kz/4d55cf377dvzRT15ycGnMHgCsVrMyjhwd+fHm1auH25vt9eXVs6sffNK3MP7x/+e/uXz++fVnfxCfrH723d0vXr3excvPP3n5Z3/+Vz/+nRcreDEO4/27h9WqsVJUoLv0k2QL2ny2bf3vv/kv/4S//PqCLP/FL4dfTZ/98Av77qB5FLF52qWxTlq2F223dSGAAUfbYK7AiIjTmAIqMPZTWm82zz97dvdwt3v2FIoyWa62u9o4hFSqKZRSa5Uyw1jgZ1+++/Iv3/87/+Dfiq75T/+L/yrbnhDVVLOqGALklPnUaEAVrbWS4+hbh2BWD+MotTK4OY/Rc6qgQMhurllSbpbsLhHmIDkDgCGKWppncVh08N578ilPIHqx2xyHnoBUwZGzqiKqplAqM4pACE0pGTIBk2qN7UprmVKeU935nfeNkilos2qq1JoyiDlmR+ximKdRiuzzEdSQnXctgmdytU41p0xOsiiiZFKV4NkHVysCUCk5VxGTXMT5eLnpypR/+eUvvv7ml8+ePvPerVuYhqEmBNRuGx+mVAld0zKx1qpWsZpnzIRM7EIEEzFNWgUh1cLsjbXY3ESfSqlSneddF7NHFSyGDgnUCGgqtSN0JNunFy+vtrudO8yzD45ndFjXV1e73bWxmdjFbpXHedOsf/xDNxY7qGSzqy7mNN4OBw4XrQsIVqswESJMY2bH3jlGNrGmjSXVOdVt17k2pOMAnSeAXGpNVcXihilgjLQ/7GMTd8+fv3/7reTiPeecBZWQqlmWtFrvvMPsGImP+/eCSo67VVckS9Gma1KaRQojkqOxzlQ0RkrT2HQrY/KOah3azl9sujz0q5dPi4pB9QFiBCBsN91chSlolZKlbchUbK75OGtWrZk8+6apaKbmmohmXRfzlLq4nkqpNQNy27SA2g9HM0NywGWejyFQ07YppWyitWoRH9yHpuM0hliGjUvvoWf45rENOXUAS2/xiN6cOml8HFGfssmXLsaWw9WZBXIK6jx17Y8NxmnqfdKvnQGkx6bmdMzBc8PySDx6nJ//LfXFCVMAODdotkQD6XI3fJSwLT+lnU52KiImaZ7fv7397utvTfJ63T7cD/3DEJuG0eWSYggvXjzpB3z64rPp4fBwfOAYONKr797strvri+uf/eyvfAyqhqSliKIehsn0u1c3F6x11cb7+/vdk1VOY5qnyPzrXx2iEJjrri4CwVwVQvP25n2gGJDGPDVNcEjrXecaJKTjQwVBmKZ5nIyQ28jGtWabyjzOhBAdN7xmF/bDHGJEFRW7XO8kFyxWpU5z6iXlkqs4U1UpJoKMUpVD3K1jseWYjldPLvOcEAkVcs7zPDtmx5xrFcM2xiyaRUzMFClDfj9P46+G4di1oV1fZ0kE9mS3HYjujnm6e7Cat7v1E7f67tX9mOo3v/r65ZN1ORyfPLl8874c3vZ1mJ59/uLJsytQrGLr3baB9cP+3oW87tYEi+fGR7DOI1NMT3SMDyWy1IfJI4nixGVYGik8H9o/QpUWePFRUrl0ubogUHAq3w+r4UMJLyKiM7h5piGdKG6Pkcq6VD6aLTjRqVDp9L4kpZxr7vPdP/tX//Svf/VX15eXu+uLX/zsb27e3nGgWqjOyXkHaCVXUSHiLOIcI3tQXDAINSu5ABg7h6ZqAui8JzQVzYAKGJq2m0uhwKFtaoFjn6sBQHn16qvvP386HrTfV/okgOYq/PTZi/evv77afV6lFAPnmJCgKpNru5WopToiGkU/PhxuwlfBh6bbILuS503TsQuInpGJm+CcgEdaAUyxa9FDE9epaNPsPIXF08ZA2XGeZgcLfuvJDAwdRXUmMnoz71G0aMnOAag5MgDowkWaynr3tH/oa98Xds31k1yZJDEuaA7noszelWK1qG+b9eV4/53ME4AjsDSm9cVF0bt5eHBN61wwrRwjAEu6dw6YgxqCiVVBDoAMBqBohGfc+W8BOXAmG330LzuB2Wagizbywz51fsAJwPkAN53KF5c0g8et6zTi+lv79+PT4AmyfCQu4fmxejY2OhczfrQI4ISjQjXlZQE9etmdPXxPfM8P7Dok5EeNGhKd+uTHxDc4dTN2uiQs7ke22LyQIzNDQtIznvSIPSGegbDFgEvMINUyZptTTjkN03jo9/tDf3N7nGtFRy8uN0Tx+a49BcIrgIKVUstcJZNzDESATGSa1byJoBphtfNYA5CA2FR0gbawEBGgMSM59o33wSXR1tFcakNCWudizqFzDmYxgKJpQmmj76f89v2tVUsUHTuoYkUBMPhQJQNxrWUJsAZDYCXW4JBImcA7j0BMaAiGHtirC+BbowAckBwgEaJWUVAmYiLRSoSEXEzO3SMCATMjKJgaFFMPvFBoAABVyUyRPcYVCvtOfRYPPmWxOdeSpVIFyuWujvP+/q6WuZYpCUgpOie1iqSlmgIBMzBRu47b9Wa9bpisTo51U8sm4CqwJ3OEzMhojAZkwIjegfPmmBYljRkjATpGMhMwMFlA9qKiYAoiZMpoc63HfuqHyaHF6JmByIBMFRavfyR0yIsJ01KlDO4cibC4VJ9qmnGx/f6wmj5ARWoLFwMWWH3hgJ7PHgoLswNUTwbjgIS6JFMu1KvlS8QATPTsNKwqsvjJgVkVO117SkZVJpSSvedSChKi8nrzDNFN40PbNME7A62ijj27tqQ5BCKjnAs797B/ZRWJWjRQLVmKmkTfgcFwPLLjqjlNg2EbfJyPY5+nGIMPDoDyILnObt2GVXd4uEc1VIZc8jSAofhwc/faV1D0btvutt2333wZKKy9B6HpfsxlPs5KyFix5sSOQG0dOYQ4lYoBCdLU13a93l48u72/bzYrBxY95jpXKQTYUQjEVqTP08WuqzmXSeLK3/f77cVms1kTIDlbbbqHh5s0UdeESO6zF1ev3rytYq//4k9I7JsyXn3yzDn+nR/88M1ff/vN8HNkcY0HBNd2Ly6242F8//V3vmua0F6vPaf+7c9voYntZsvOYfTbpuuCOz7ctd1mPvZzntFg3OcQ1m27TqkPDQE7tWxSq1QQjRxVcR5lSn3jEQAUYb3pHu4PKs1qvd739xdt24+ZnSfm9/dD2zZV5fbmYXex2W1WYcpVhIEcR7DeREEBiUQEbRFGYikLsEhMQICL+pSJdDEqg/MwVBeBpKgoAjpiXarOkBENjIwMBE9XghO4+jghq1Uck8gSgAsAoLWsN13OhaaRm+NnF7/zl4i5am1C2zXX3+umvrfcT/3D9fU27ediybc0og1aWW3CedWsaupvX43BMDRB0PphankckHLjQohAsRQhD6XK3XAQAgSdp7HMc7MKpnPuAfNIAOQgH8em3RIaIAJ5VRUtHDqdb02xiqkwMqpMtSq3V351kWXM+dh2zxQQbJI5EQp7JtQ6HGGe5vu7Og+u2fj1FV98UptL9A0zW8lExFAVFUIjBqqCzoEXFaG4MvbYrJbBkdaK7QaJNCcpo5ZKzco5B5LSMNSUHHpR8N225kKc0Sp3sesckl9fPmt3lxxclgygBqVpI1Uk8HMap8MBLe3v3xGjsMxjRiZzfpxrzqrjiKqOfLUKAPNU71LBXQw0Ob+xJM2qZWeRyFaIoXvx4ovjKLJPjpr9t/Pl1XPpm81q8/f+3h+VQ/r0e58I1eurCx/h/v6wudzub/aEVLM+PMxth1b6P/ln//yL7/8UL6+++2p8eJ93u6d/9w//8c//4s//r/+n//Mf/rv/wTjIb/2jP8KSQhd//zf/fl9lLnVz4Tu/BiGPzTSO7ObQetjSXKz5/tX3nvwb6Zdf1+++vohN0137bj0f3jQbDU8/ef/N3ZOnV/t57Lr24fYALY9TrdlfXAZCaCISMhKAxydPLwK7w/HwySfPD2NVJ2A2perZPJ9OTZtdm6YqVv6f//l/8enzv/vjH37v+z968t/81392OxxdyA5rLVMMrhYVE99ErSc+hWMyQ3JcarFaERURu7aZ0kxGgVoknLVE50UUHPvocxlC64h4kaa7Nnj2TFhKCqs2jbWkAdCkSNWiKqiASFVqriW2DQPVnJq4zqXOeeyHIcZVEwOKHh7uPbpV0xYVzeWUz8pYyAyoVpE6mYn3vgo2MaacRbFbtUNKagJWh9R3vkOMZth27VxqzdXEAnPfT1WMCKY5O+cYVxfr1eFwc5zv93ev83z34nrbxFjKlI99HhMY7lbt4Xg3pwJiq7Aap2O3alQlpbzpYpLCyFJFJDvvArMQASKTmVHNqWkbQg9VA4A36Vr/9PoiF0tzJTTfht2TjWRIdVq17dWTS0SxYnOaTaQJTYxRwLab7f72WCtdXV3mObuevvds/V0dv3uYPJoDRhCVYux32+2+HypZrTKkuSlOSuna9WHfq3PoaZoS+BEQ15v2dpiYialhyOM4gsfgXNXxarOaxtkyO3KKGdGQ0IcIVgO50F44iGIafMNESYsL3nunNXvPY3/YbLegDtScQx+CjFNJM3ZN221C4+Z5XrWtzKXbbgSFPGaAruvK2BuQj00FzEV2q64qV9FacgZu27aU3K4D3qGaWrXYtlZRVAGg1LxrtlYMGEIIjhz5dn9378kuN9ubh7t5mNA5Dn6eJ/Sua9bTMCBjLtU1/rE9eBx7Lyd7/ZDffEJh6BReQwqL7fpiLYGPDz7hR48txEf6tWW28VGP8/jXpalZSEenGJ0TpePEaPpo0ndq/m3huNvHfft5bH/mHJmJ4tnjwgDtrJs4oQyPOVlmgKeJ/WL7UKUeh+Fuf//2/e0wzXPKaLpady5EKbLpVi+ePR3H2YdVmcr7d7cvPn1WBMuUt916HTfvX7+ehjklQTRCaKJT0c7Zi+v1u6+/6x+Oaqgiz5+sOafp7v43vvfsB589/fJvvm7a9nh7SzI/e/oUUMNutV1f3b15u2lj66jW+vDuJq7aGNg7T6Fz3q/WKwDbz+luOG59dOqGOW13ayUsOdcyh9ajVEAFoyLFrPbDMaUSfSQDyaVIjZ5AWYOBwnYTE/hcWQyGaWbH4ByYV6W5FgcMFMD5Q9+v1q2QVkQ1M5V5rkxk3jPDOBwe3pfVi2fbzrG7PhzuxjSnkqTOaRadcn+nRkgILy42nz/ZtE0QdL7hT15errv25v7h2B8s+GZ1ETarOeXg6frJ0zQXIlYTtJOLyVIT+ohanjlCS1+qjwqYj2vjZEuKAHQq8Y/5GOfhm51UM49EutMwGRFOAVdw4mif58fnFQB2pll8VON0Qj0RlsSdxbgXDUERraqaqcHxeF9y3vd3/90f/9Of/+LP15vV/Zu7n/38b/I8pizquD+OhACgKooGjlhUCYGYEUlAai7Ou6WxZiIizLmqGRKyERNHHxkM0aILjBbcaS33Y29Z2mDjMM4ic9XDOD304qg7TsdPn13pm/u7OXfNqmtR556Yu/WqAGLA2LImP8yp7doyyP722LRtG9uLxg1JGTG2ASp656VoWK0Pc96GrRpLGdiRZ15cUpHBDE8KGkQmJgIwBa1EgIAi4tgRUkmj9x2z88RzHtBx8L6KIDgXvKJrNrt8vJunBPujd0G1VinkCbEhT1Mt5HwuoqSefYibVOe28djESUVUwdo0T3bzbvPEO7+WSTC2aAoylHJkd3lSEi4e/sCG8ijDtTNGCWciDXxwSH8s0kfF7Pl4rh8JdB9hmDMUemL0PJIrHwGXRVT2qFk7IzELyHSiBJ2A+FNZnwGh0+s+qsr+9pctQOpJiQBG+Mj4RDWlc3Amnl8ZwPQ8Hlhu0jOIC/ZI+lysXgBrxbO/0oLr4iOwRXgmOKHq4ioNaKBnJ21A88xSijpCdgn8L94c7u/vj+NxyMKOrjZNqQUlsRWiBtkZOsAiVhTEmIkcLtEISEQRQZUEDbXW5bNGNCBUq8QOSRVRDU2qoTMwx+gYfXA8F8cUmRq0oDJqKplHwGowpUxWEYHm7AgBjFzj27ClYKKaErOrjKXoNJdSxQxQEmL1TiNbQxgcM5ljBkAkY/bsfAUEx+CYfXA+IvHj1U9N0AAViNhMkJe+RdWMmA0X0bgBw8mybwELiUwEiQGJmNUxIkOplEYGlZKP6bgfdRJCQ6iKSNxs1blqVkGJyLIwEjmHgYCcGIbAm/XOtV3bbr3Mtc4OYBW5CeSd8yFy8C4Ech6Igdj7Rn1UZly8pgFATU0dGp0UigpmuGgFDQC0iKhYlZKmeU4TmPgYfFiy0xBxoSIRMxG7BR1VMyImJCQ2MxUFBKJlXZBjQmZVc8uYEQA+gopMln0ddNGdGZz5paKLBRQuSCqdDmQoi1/1woCxEyp6csc+oa2nkL/Fosh0gY4iubgwFiF559gHZiwJHPkYN31/R2TOcckzInbNysAoMIdNgyGN+2ncGyot4WekUjMTi9WaxPKccpYKvoihgpFIzrAYgsqsaZomxoHRBQIde8ncVAED0GjAFhiR5nG6unoWK3/5y282xPfTsaUmzdmsiFCuSM4b6JRNiokqGAJISbWKiGLX4Itdcz/W2ebpqABuGiWVCqAFqmsaqWJKgXicS66Yp9I0USwnmdeXawQ9zMPGt5BlHHvHhOiq+MO+/9lf//m66XYXF3fvb5omaoBvf/ar3cUqlwoFDS0QBo+HfUm39/f+HlRD48YppVX2wTf+wjUOtNx89XVcdaFtKtnchs2uu3/7vm29a/z2cvPmzeuS5flnn9X6KqdbMWCHuWo1m0pujJF8DN0h7cnQOSsl393PDnlOkzm9eNrKJMCOwYERMFfQEGicZmLyDbedm4vUUhTQxVVNmbQioaqpLvZEDheDITvV8onqr6epARGrWq0JEVUrnV0k9LTHn0qbgEQrASKymQkqLsd9IsfObEliRRV9FKAR0nobyzSzksB0mL6+fBIKlveHY3bh6Xq3fzi6zurcC3m/8ft3rzft9tnK5iGtqL45vo9NRwjzdKxMNV6/nvNnm87Y7vv+yWo9qIxQ98Px6dMnbsUlZfSKSfMwSJXxOILbr1rMx+N620oRJscuQM2oauwAlRECgWgSmUGJHRGJzZOV6lY7ZTPD2D4Bv0Ezq7NH1ToSqZQZtBhYuHjq8BO/fYJdp8CIHqcRCJg9CEkZEQ0xUS2ICq49XQG7DcQWfEuCpkgBuCYdb1AHCIHCjqCx8U6nO5mnZrNNhXzcGHnftPnwnYI470QdQNg8+QxcyLkgcVxv0EGzvRiOur+9L3kUKUS1aWPK5e3749zX+/v5UAyaldY8T5OBBUZR1VLnec4eq87zkIJfP7va5cNtvLicDIJbK63m0drm6d0r99kP//DZb3e//Ye/tc/HJy+uppvsrg2cjYM4qGVAKPTw3X4aphefPfFPnHF4dt32D3T97OVnP/y77tLY8+XzH+dDz675o3/8b0X+zIWrn/z+duTSbX0/jm9ePTzvVm/f3v70e5/e/erW78LT32hqWNU8pYcUd02d0xEm27Xyg5cBjvs//XlEqewe9t9GSk/aI716TZ99vt5FzrvyuoeOw3r18OZwsb7cH2W3i64h8qSzQdUv/+JbCkhZR5SnV5dzhZVraiqOEIlTqkJKAayE//Df/w9Sgi9/8av/+P/2n/7yy58jskM0E61JEFWgluK8k1JOxyIzdo4Qs1RiFtGAXsWImMHllH1c/PudIy5oU567ZlVzXmbEjY9zySUNzBRDCOjVVfTcxGYapoXozMyqCoTOe1JABO8CmCJC9JE3Ps25DX4cs2NGQ+epzrXmYggI1Lguzb0pmoiPDYIvNVsljrxc/U01uJjmCsirdlPm5InJoOS8DDtTSUgqUlMqhIZAec79/nauc56maTwEz+v1BZOazJJks2rBIE3l4XDYD8dS0RH2452itN3VNC26Lfa+ISAykGJMMXgrJVcTMM/EsQk+xqnvPcJ61TVtCDE2bdM0irtNbJopzdG5cZraJjx5stldrR5e3a5dm6bStd3u4qIK6HBkgUDomavKatMNw/CTH3xysY39z1+b1JzFDNM8S5XL1Qah90ih8YoQu9Vw7Pth7Fbt67sDokDRbrPqDxOQ6/tD03Srri1kFZYguVJzjqvWN75KXe+eDsebeRwJSWquMjN55xpiO47jdrVzDtgRmJoIsg++220RQJbplNaSzRCM2A3DfHG5Q+dCE7PAZ5+83B/uri+uxPzb/e2Pdp85XM1T8mFlte6n40WzIcdEeHm5G/vBAENw5MkHdjFIyo13Ahica8JqFiAITaApz6vVOg2zadls4zwMTAEwFNHtpp3bNvV9nmdAF5qGHJUsUgUel8GZ9rPkJz2KKM6txMLGIDjFigGcD/cn8YDpaQhxHhwj0hks+tCdnDg+5/H3GQ6y5VXtrBI7adnO3KPznNkATuSgD/ZJpyH3h27/1FMhgYGBntPQTq2Nnu6vp57LTnQUBKyiOeUxp/v94e5+X8UoxJtvXgXPp5mLwmp1OUwzMzexLblcPb0axrxddTc3fT8O8TLe398SAai161ZK0mlOU4pdOAz9NOSuCXHVOXI1QPUOdff0xadf3399Z8nvdfVs0wT/7bd3FG23ae+m22a1vthd3Pz6SxhTNn/oBwezX3WrS7dqO2Qo89C2TbvbkFZN2Vk0BbHcrbtaITaXabwXAXQxHfts6JpWwA59rwA5WVUzYjWpKkUQnWe/ntJcTSsis5uzoGIu5TiOl7sntLLDlDZPP615zqkf5wlBGU+z4iEnqbXbxer9XGoux5TuxyKQjNl3G37SrW5fvzsch4fDrOyS9S8uGshWTMd3fQjBt+3FxdPjPHzz5c/nMf/wp3+n7ToVAoXgPTHbEhR16nFPksnlV25mhKBq5xCmpWz1o7+cq9nATE6aso/thh9b+rN255EGcrr9rLBZKh7hESA6FSJ+cJY5LZpTEu0ZAXi81VTRlImKYkpCHquUN2/fvH7zyz/+0//er0Jw8Hb/mgifPnn+5v37++mgVquAJ1RTAzU0PVnJLs6ishiswunnWvhQAoS1CDOSAyWNMaAZg658IDOtdbVamSibPXt29e5+jLt503RqrlQUSZG5GptQ6o+X3dY7N0hyYYWoHsgbdJsnN8MrD40jYmdq0A/Thczdivqxdut1cDqW1K0vcxF2rcjInkJoprEisWPvWAysanKhw4qAyMbMpJJAEvv1OA5t0x7vUtu0zqFVZDHRQq135sk4OJ6GQ7MKzWqTJYe22ayefffNt+1qrXnywedcuUjsoslMLoKid5xlRKT15dW7N79ggOAis+YyrTZbmUTSlI9v/MUnNQUXWnKtzLPHAcCjdwZ4doQ5A+ynGkGwBeNYsoDPWLl9dJ/HfvRcuPj4QDjBJx/JzE5bHZ6creAccf+BC/eh3vAMEemJt4mPdzijnh8V4RmQP93xnIZpZnoKedKTiMwIcXFsOaE5cGbU4UmOvIgV/ifyzBMWdvoPaeFViOJiygq4+FAvPfyyHsV0eeeE+MhOAkADXfggugTSIx+n/Pb9vpZpnFMSo+BDCKsYN6uw6byPTskruNMlQ8RUHBHBAqQIngRHCMAKhC6cPfJPXa4hGaEBqTEs0CuRD75pSkyBx8xIAWnl/M7XlAQkmzkBZFpyQCGpFApKPsQtx60Aq1THAVSqSslVBHItRM47jMGvV7ZmC1ib4N3ygoDh5KQNPjTWrMy36BtwDYeWfaPsQVWkPnq1EZIttjmgAIIGxE6tMjkEVXALXKSS0ayagQE7j44RAT2gZ2ASs1Ll/jAcJYAPjhiRnQ/RNQwrWG+pVM3ZFTGqITASoXoR5IAUgnM+5VnyGLQ0hI3DdXTBuyYG5x0tDChmJCeA1cyQVMSUiJCAFh8sNHGOmXnBLo3ImVQwc9TnPE1pHEcV9TGw46XcHHsgAjNiZO+Y3HJ4YMSTNOe0ImSR1qvYUmSgxsQqWf8nXkW60NIQ5WQ9hKYKS0gNgIieBAtmDMt1fzlbIICpKCyr1RTwEdNFA12CZAHQFNgFQkN0HgDqzL5tXAQtiFxrYs9tWB9uH0zq5mIjUlGd8+1s1LZNIUNVK/M8jQsu5ZoAVoZ5D1Lycikkp2q5zAQBiefpiGiAYRxGSzNqYh+qWgDnvD3cvG1cULP1xW6aD1JltbqqkEPbhlbv7t5ECs8/vb7rDw2CAjbd7vXrV01cK2I/TDGQKqzbdSrjWBMjIjKx37bNXNOQctddBdcc9gcyq2YpzdeXF2icJTfR9/1I1VLJTePUZM6TY1qvuzkNaJSGhCzbFSWrm3abc+q6lYoa0qEf7+4OXdtlKV3TuJb29w95VkACAiCzOyXvmD1XyVXidoXM0zSlfto/9LFhZvaNBxgcyNAX6d34/sY7mqsMpfSlRyIOdDwMwYU2tmnsGWzVxikVcq0IVEli++AAyQyha5o8jA5Jrd4fD269jcirLlqth34y83Otqdbtyt/cH9HRdtNER9HTvp8BidhDqSLChEQOcAGMkN2iNcXzyBPNlMipVa1ywigBiJyqnJB61UXHKWZkACAEQAgMKECLMF4MyEBFl4GDfZhiAABsL7Z3b28udmsl7cF/9fZdqTblORCO/f7WNE9HR6pa397v29Bunz0Z59HypKJy7C+uPjHTYej3x+Nqt5Jh/5OrDTA8TGNOfe2LCJbCu0+fjv00pCpWp2kMKujdYvYhsS9FGyI1lJIFKoDVaTJynoPJCJpEelU1ZCSGkkEOhtmtNm61keW8x96Q0jCwI2StU181S5qci0DRrZ4oRVHCUpnVakYUQjSZiNg1wYhMAXwDgMCBgiEkRTYkESfmiYwtgY2WD1oLr54CbamkvB/Qc2yuDn0fu2tF8z7WcbDoGX2I4fCQnPdZ1AT9+qoID7PWmXM6prGAldh67uD9dzfAgL6JcTcPPaLMJfU6IVAqMANA0QacVcGwygBplpzy+qG/3PbteuUdN5dXN9N8/eTqeG9/9O/9++vn3zvcD6snW3NlfhgP9x0CJik6ltXlWuf8cHfc7DZdE7rLVkCkB08wDaLVXz199ud//Vc/+de/t27X1cy8e/Ptq/gbn//o738/pcy7+vqbu03Y2tS/+5u//MG/9g9/63vf36zo1+plSmlsasoxRg8cY1istaSKrNv4u7+d3++5j+GL67C9pga+/C9+/eOf/F7/7bfNpvPbzaehkZihaa8/2RzSHn13P1StFgK0XeANf/qT5+N02M99QtoWBSNiZEYkElUjTHNRRad0/374z/7z//r3fvrpdJOeP3n57evXdzf777/8zALV1FueggeQWTQ9nsRqToCQS1qtdkSN1jJnEQTvQzUF0RDDNM6IKKZLXql3UazWIo5AitQqEbGUknJBJgRIRURlEVpP84DM67DLKYGpCADBOM0p5dVmDYhiOclEgaMP9/eHZBrYNU1AojTlw7D33hUVQPQMphabJQpQgH30EcBKmVMdovNoznnPiCkrsyslsfebuJumGX1chXa/fzcceqsFDLuuiQ66JqKzaf8w9oMnQHJzzWO2fT+/uznkDKvVBqWmPjVtdzzeoxoA9sNoStUs17pbdSnPZxwAas6bdccARLC52EDKbRN3F1tAqFWJuWnbzXrD3jEbdCFK2AZ/fHg4jkN0PlfptlsfopUsVadh2uw27AHUcrUQ2pTGi9X1F8/h7bt3hcruausYhyE/HIdSTFN5+Xw71TkgSohpSuxCqVZMHcJhnNWAEbu2G9OYpsl738YOCGrOUub9vl+tL8ahbC82jg6oEzpy3pvWtl1N46xS15cv1u1lnt9cXF2Nw1iLVDAopY2d1ASqc5qEqGtDyTn6UKYk09z4NqAjw5IrU+AKq65tnM9jwmJd141jatt1Fw0McslFrYtdrX0/TduLzgEtQXhIKCmZ0aZrYxtqzVkltsGFYKGt83s06TZx6G9K8V304zwfD4PzEVuhGMd+iG07DfN6var1cRV8aBNO/1AzQjMl+NAbmFZi5sUixQCXFCp7vLZ83Hh8wG/spERA+9DfI/6tbv+kV7BTh/PhGR7n6ktpLfeiEwlpcZj80DCdWvfHgfZyGFvI4yd3bTu7ZuMi3bAz2lRrAVCRfOz379/f3O8H1zQwHbquA6sG6hw8f/lSBMzg+tlz5+PXv/y627Rf/OD517/41TymH37xxVfffo2e18Ex4ZyTgjrvgoKau3mY/t4f/u5f/umfAsGqa7765c3lk6s1t3/zq1e5HDbPnkZsq42zVHT0/e9/OjzcXT5/Imrv3r/fXV7zKg/qr3dPrNwlHQvkV6/e5pwb4vXljmqMjsEHAqc5ewNQ0FLfP3xXaip10mpSZari/BqpiR1MUzaruZqADLNM2bJRyXKv/ajKzhp2RBxjN6fMnp89fTZmzEmr39yrwzJrrp5h3XTTdFzoNCJaUEuqh16e7LbH+wcOcRMvpjxN8/2h728f7kyUVv6HL56+fnuXC3z1fn/9bPf5Zy8un24P/ZRmC138yfeePdzfjvc3v/7Zn62unj97+X0wEhNPp5HxuSs+V8iiMjtRFR51O+dIcTuzzU4EuL/tY2QncsjHbfejeA1PLjLn/51vW255JNPpGWlVOCV+I8Bj7w2wCH/krFtY6E3LuFBFlT2mPL5+881xOv7L//GfP/R7Su79u7vtarvZbL7+7rv9eEhTYofeOTCbprQ8DRE5clKlakUwJibEUgoTN8Ev9e4IQSE03rMDA0VYRT9P/YpDzsU8+hiQ2Bw0q3b8+jD3A5td7OJU8pNtW4bxMNXQrLjm9w93n/7G7/bHdxWEzJiQTSIKQS3GplRqbTYeEfsph7bZXb8o2DTMTCZFvIuOMXpXp0PYruJqlXKdSkJmVRUxRwvZS0AV0IhRSkYuiGjMxZQlY/BVpRQBp6X2Rg6Zi4JrWiDC4Jz3+/5hfbVbbYeUEoC2TQdWU52AGmZX55nZx7ap81TrEBxvNrs8DaWIY59S5abG0MzDcNjv2cXQXuo8hfUauSvz4Bo0F4HaR0XWGdBeqovgkWvzCDXCI/J42sbwca+yU6U+Vt1ZDPYBijkz0s5WQY+I0PIMcLZgN7SP6tceX+3DY+zxMXDWpsGJ0fNR7YPhYhv/IeENFq7XqT2hk6qYgMQUF+KHnUGtEzCrZyLThxsW8c2JhARkYB4JiZndgkCZKp0/qoVggouPNi4RloslqxEbM6Zab+73VSp5JvbrJm6D3/h40XZd1/rQAQU1qmJVACkQC0ghMKkzGlQzABCpzG7p35dPycAtnzMZogqAEDECiRoie4/O5xglxpBzrrl2ntfMB6hzyYW8a5qmbVsHBFYUkDySJ9c4QquTIamagStVUkkA4AK1TVx1vg18HfXawQV5RgNFMSNEYiLnkRlCozFyaIEDOk8+kAvkvEplBFjyEE8fN8N52rIUBxGfpYGsKmhGiNVscekhJvbBCKUohYZ8ZzYTk/c+z8WBp2gheCBy3iMxNE0DYDmZyBLvGzyzcUqiaIqgYqXUMjy0LJuOOyZGCN65ENh5IjZcOA1QdTHnNwBCUzM8oUhIgR0SeseIhloQrICNczXN4zD0x1HVmrZx3nkiMOCzbxY7R+yA0EBAlZnBqi059UQAKLUs+6dUVYAQPDELkqjiedM+Q0Wiogu+czrKiC2e1kBnup6qIaCqgCkh29LJm4LqaaZhhgaEaMQKwuQWE3UixMVJ2CojMRp4h6BF0FRrmRxQG/n25hsQ3zZtUSklrUJUshg7YMSaMA8gqfG1EopgmnuViuAMvWpFIJUCWshM8jRVNIOcZ4PqeaUUcqlAgOxUcer72K4QODifCgM0TDr1Yyrj9DArWOdp6vveUq11LMAIs+n24ooQh1o8+mkWrcqsx5x225VnvLMxzxWs+BinA+Zs4ziI4ZwzkhfgOWtgV5J4diWVrg2k4CI6oLmfKpBWBckx+mdPr9PQbzerMR/bGObj/vbt6+dXtGtXd3lYb9cgpYr1D8mqUhVAZsexCfvDiMQccdVqOvY6w7dfvWnWm5ef/3YebxhQLE9H8d7nnMcurnadlswAIbihZCiSxgJIm22D1UDh+nKbEkoZRJWACS1JASQwQTV2TlXmadptN8f9DEJNaEtfyCsidT5oSYaK0c1T4tFiE4vCcTi+ePZiHMvdfhLFYgJIViswCwgCLnSx5fCiYEWFAE5Zvo+7rCHiYsMJCiAiDkmX2YUpGCqaA1pMExVVwUwRCM1M5NSyEoGJsGM+qzGrare5FCOAit4bBSCOvll3sZdSxnu2uu5aIT/WsJ+MyBdYr9ersj9MvayudquVTfv5avfD4q2NOSLM/aDYueDHAnkoUisM03iY3r+fXUOIhgY+eEMzhilXm8cQ12VO1VKVRKhGCx0SDVUlm2YthZo1KkgqahXAs99CWGFVY8feg2VHo9Us0yw5g3lVZ7FljlZVdUQTxoCFtAIHMrGiznUr00jkgNHQg85IS4IAilsZR8OA4kn2OL6v/Wv0a25fWthp3+vDd4yFunYayG9fuNCgZpsPOt6BVGw3Yi1H59quCWQlIcr0cHPcz46dWg2RXev3d2/v930uOg6Siknh4Pzl1fYAx2EqtcI0icWWXHczHlQCO0oKpnIVwuHbXuz2p7+9sZLGfvJPP2uff/8n/+gPD9N8/+bPQtMN371tzG+fX998+U3w8Ye/+0V/TB4gFVs37crb/mEYxofDw93VxW+2G/ezL7/58U8///R3fg+/PH5yub7re7ddYaRnP/j0MA4PN28hQfp1+sFnPwyi3K7/wb/2jw5fjfZyfvvt3fd+8gNF1w/HdtsRmiSoRUjANV4VH+73LrjN9vL4L/45j6/b//m/Gz7dbv6978Gd0Dw+PCj+ya/dlYR1KHh4cz9efHrpUOM67u9HTJjrmDOYyvWLK+8wFwGDQpqzpVQoRA7sPIlKTrlKWa/tP/oP//Hd3Zs/+Ld/9LOffxneweV66wCCb4fDQ9Nsm9bt714tRvsAYAo1V+99GzpQNaSqpW1XVYsLDsxKnVQNF7330j2XosRg5skxGaGGEKKPVmZCRKVxmiFajA2YEnllyprnafJEzvuaMzsuRZvYajUEbVxIQ1ZDWmHXtMzMBCZiUpk5+jY2K0zHOU+HqWeCjb9QrVoVCeaclHC320aGeZxrKUTkQyuamJlcLFXBwDGPczIRR/zi+VOVPA2TMZhnMz+n0rTBu3Y4HB/68dAP72/7nOY8m3ctYL3p71ftqp+n0JEjZMexcSWrqqGaYg2RIS/ANoQQY2wYpGviIU1d21ysO+9YwFabBhQiq4PcRTflSuA+e/k9qsdUSvTu/vYhKa6wLcWmYWBHYCXEMM5T261UtVtH9OKm8qMvrhoPX2ZiFINSRc3EN00/pX6aiWCaEzter9usHhwxeqzFimzW3XcP49MXP35z/wvNUqZa+15Rt9cXkHjoj5tVd3XZDem4uXpZlFUmqgJAZsqMSqTIQLUKhNi2LU04mGIuOQYWhNA27Lnvx1QNkU9ejA6Dc86Ft+/eP39+yY2fpbZOWhesGhEVNdd0VYQseB9ynhB0020OvAeVdRu3K1qteiAiH9n7nEpoGyJTsFLr7no3zfPlxfZ4+9aq1GKbzVXqZ+84Nl5VmyZIKUyOAfM4mUKqGNxZgHbuhk+SLqKFP31qUgAQT0Gxp/Z6MUj90OAgnoEgAINzxtTj1wcXIXxs3T9WP9gpbu1M6oBTA4Xn+J0Tk+g8Yf7AFlme1Jbk2ceG7TxDf+yNHkUgeBJlPPZZgICiamq5ln6Y3r+7eegP3755s9ls2uhccKoARJury6ury2nO26ur4X1/Px98jJvV9ubu8PbuYchQEjl0sQnTYVhO7jmLR9ztOgEa+/FXX34zj3rz7s4YLrbNzf3AO4vPdvldM94P7+f7fOG46ieXu34awYV3r94FlItuTWpV5X5//+rNVw7qKnbrdhOB1t1Wa4Is4zhMaG23HvMQQwBLQ+rRPBmh6KrZSdGMiRiGrMc0EVkTIhhDpOM4qgqCBcfDnARcEVS1LIVggv3RAImpCQ2G9pPPPi9GN3fv+uHQIPTTnGthU6niWaLj4J035zk61+RSqM5T/9aqlDyCypwlV5g0zXNeBb/brr99m7/51dvDu/2mDdcvN9vL7ng4PjwUxwGc7+/7fqxNXF9fvwDCmotz3k4iMFx0iGAnY9qlwTtjmoqICopnIePZWgsN7EwwOtfSB7zxsaROZX4q5w/dq52/f/7j/B06l/cZ/DyP5T6I43hRG5gu0cwAhKpVrOZc3r3+bpwO6uWbV7+g4GLgm8Oxa1bvbt7dD3sxZcdqAIQp5/M7MCQ2IJGCRMSel8AjdgaE5FOeqooj75mVHTetSQ2eV6tGNbXc2JIZxOFwPP7mjz6zSBgJDMs0uc06TZO7vhjm/s3hnnX2kX/+q69e3v120z6xuifyoFq1r6BZJ+cu2UKGGEOgmso0ReTVbgchso9NqyKFfFROMbphfFhvO/aBwDWxKTmXkjyvQI3BQNSqeOcUseZKOHhigOD9qszHJjQ5HdRqsGB58purKc3btnXmCA2kUggxrKqE1fpK5nmaD6Wg81utyXI2XwmN0YEpmYzTrM2Gwm68ffAu40pSyX6GuFqv10/u7t7397fXsZM8SFozt4hoZXY+KTpEZ8bLzgigi8UomC4bKKqeiulDSZ3LyeBEcFjSABbv5zNeaYtY6Gyo9sHm+iO8/BELOvHn7MO38KPK/WhrP4NJ8Agy4ek57RQc8JExEp5MThca5mL1BYuHD+pJa/kIAtGjdgwB1fQEUCCdmHQfJbgRoegS2rNImnnJb0VmBCMkhUU3erJkOjtnL0or46W3NkNERSuwuEc0sW09w66LV118+WTz7Ol2e7nxbcfBA7FKVfBqBAKgZlpMzEyklvOYohowEgMIABB6ADAlMAAyNQFadhwDJgMIMfoMsS0iUaW2OV+07iHnSbTUyoZNiNQ1LdnKUMFZEUAzqIyKyHMSA1fEgNmHpm1D2/Cuw4tAT326pLAKDFXUMBVFESQjJg7R2PvYgI++bV2MPgTHqERkZIpmyweOtICtthAw1VQZnUlhdqqGAGSkVtFk0T8piOMIzKQVTTxzt9kYgHi3OZb3w+A4tjFwQNGFLUeMDSOgc6qC7A00MKBoDGWa85iGuQyg80bHzkHHIXhsm+i9BzJiR9wwh2WXllwUsyoyMxMHIu+InTGjZyAEJGFQg5rynKfpcOjvhnwcU63KzjvvgvcLRsTslwBAJiRSWDK81ES1ViNSVRG1WrXKUgGyZEfOuQIAMy2xcEutfsQqAjAzVQEDBVs4GssaI6RlSnYqpBO+KScm8ymn09AUANXUdFnXBACqlZDMVMFAQbUKSvBILpSUU64eabPe3t59pWbdehODT2X0rs0GgRtiZzLD3Hu2oiVNCYklZzCRUoGY2ZVaPLmcZq0CQmjAEJCisU4ps1bHxE2by+QcMrMhcIzoMVerOZOCj22MTANP/aRWD2M2QWr8brNK8yilqmqpdto0al6tNsMox5TZdcc+s0MfAngSBXTQds2Y8zRk10RgRIctNWZa8rTqQq2lW6+mNFUFUPAeQxsQMbTt3GsDOB/vG8fTcFSt97cPbROocVrk7v2ARDIXLTL2ubJrY4Q8c/BZytRTzSJVh707Yq05N80qcgMHvP3VV5ut95FInXdccg4Q92+PeUq77Wqch5waqbjrLo91Hufhbr6/vL7GBlMa1itfM+dEpVTn2OZca2ZwWitZEFBAwZBW6444j6WWlJ7u1lNSBdtddHfHWVSaGMFExdJQLNLxmDabZtVgGUoImA0BHMBilE4GQM6LJiYCJEYyNUI20FwLnRXzhGQo50MRwuLvSbRQ3TyxmrqT6zvQKWxORNU5NgVU48BqRmD1LDqYp+TAC8rqcnX/MD7cHZsYPMM0F44RbQpx1a5Xt/fv59Rvr6/vxrfr7hpQ2MOm2w5pr7U41CzH66snzuJ4nERcYm+sa+6GeVSj/l4DcIEMqW6ClyrTNIDUnIbYBjNNkhvu8lhW293cP7imYwDJ2cpIKEiMrjOKCAmwGhh1F0aNCIoIqkBJlve1DFLzdDwQt6G55Lgh13HozACo+BgrkEGg1aYIAHnApnqvwOw9gqAWgFnnySyYb8A8qjk74ny06Y2VETGqvwByBInw6FrIOUzZiV/7rrVyLP29piOQBd9mpWrg244Z5uPtPB5TGvNkPl7E2Bz2k0c8jIdxHuecctUhi0JYXTw9ptd9Sq7pGrNxym0Hx5yzJWSvprlqIcrGkmqD/HU/fjrnp9vtk2dX7Re/UWf3y7/4k1zz5ir+wX/wv/j5n91vuH3xm0/v7w7fvX17/cmTecyt2bdfffXnf/oXf//v/xt//qtv/8P/3R9+94s4PEyry+7Hv/0iYp2O85/9D//002f/uLtoxoc0TOnZ801seLUK85vj1Wb3L/6Tf/KDv/d3rn/4xfSQ2q559r21/mp+983Xfndx6Ge/bmuuiE5nIHN5EN/gsydrBna/8Rur3+27ld39f//Z3X8LjUDz4mX3w983Ro7AIP74utbKV8/Qta+/eZ+dbi926t3usjWCkuD2bX/z7btpOv70937QPG3BGTsPYiYGaFqlDVRANpdNiP7NL8t4TLfffffw/tt1dzFYKUIM2h8f+sk8sgvhdDBCjU0UFUBKaeIQuhgJ6mrVTVNCJFNQUjCl0zhY1EyImBnBpjSbGpOBmWqNoRFTckCMXdsN42Ag7IHLsjhBVIrW4JqiIkWDcw8P903o2rYpVUqpIQRA6tar437PxMxUraZ8qCVF57xvqlQVmecUXfR8gojznHNO3jtDMrNxHkUqUkQEx8jEjojZgVrfVzE1JGMuOXsO7AjGuVYdsry5edgfjkVyrTkGblycppSqrruuTLlpQ+tZNFPAEEKtEzILeRUptZqK904BPaIDcABcwRJQR+xDnovzjAIqkhXQsNTEsfPOldqXOipoyTWl1G4u2ujHfry7ve9ijMHXKW82K0BMRS8v1iK52rhmeLHbrH776puvfz2mfa51nKeL7cZ75uDRdDjOsYsOWbHinIGMGFLOl+5yPL6unchkgWm13eQ5jWkcjgMJNO363du3u4uLdrXSOm9W6/3tSMihaaqoaDVFBsllurh6UsbZATlGH+M8p5qziKxCW8Gc4zRPjOidb5oARMM8h1CePL0kghib0AZqnEduXCTCfhpA1LmQUpI6LnT9Y/8+BOuaaEW2rescjUVMwW9aUODIuZS26XKVWvI2MJX95Sbsj1gLHA7DJ0+u3968d8HN4+yoc9zkOYUYCWmaJ61V+NGr6CO+DZKqnEIzT8SNkywMT8NAMzPGkzb/FCB4lq/B2T7m1GucXIHMQD/qmj4Mus+Uj3M7dRabATx2Q2bweIbDvxWzdmrF9ZEbgh+6qvPznPAlOGmHTlapJ7jLTtwlKFL7vr+7u3377v3xOFxfP53G8eb2JqeZmWqtrWvv3/ef//jzh9vjPE7O4eX19W61+erLXxDi05fPybk55zlNWeYXT65vb47aBB/inMv+4UgizrlPv/fpv/rjP21W3Zzn7a7lLlq7Ij/QNF5x/PFvfL9tAuXx7u3bMszX22672vz69au1j5DzOrYXT9Y5jSD2+t3NbNWHGAi3LbFjz4S1boKrlkLwhA1BQxhSqVmtwDzmQY2cxw0HOR8mshZy1LUBc0FP1TswV4ZqCElkTAUBtUjXRUWQWh5uX6dpHo57KLmigdk458DEQI4xerKUxKkWfbh/v+68oyYlCeyP/YhEV6vt7b5XA6s251J1v25cw13K8v7dcH8/rleNao1tu77Y+TU+ffkyNqv9zWsA3FxcMj/2n8thW+BEZNNlnGZ2Cp85e7vgUhcfSBMnmPAMLC519VhRCxB5Qnkem3xDxJMH6gJFnYFGMwNQXRKaYBF2nOoVAc58k6V6F3MKRYRqVclMVNVEpdQidb69ffXlV3/SraMYpqGUuXYxpmnop0OeswtOF+qGFNVqp4BCInJqoqqOGNSQqdaCSEtbWKWoKJA5z2SmtXgCRi6CrWt3q+tazZEEH2saIlAuslq1tebONcOULzabh4fh4WH0zkIwH9zls5f/zZ/8j//mT35QZo0NmGKIW2Xz7VYLZhHyMfhIbCmrVFm1zd047FarEDb392+7hqsUIBqnKeW62rVzmQ0oNmsFFBCtOYQ1IMhJviq1lsCK5KXYbrN9/fCd63aBw5JPobVSyQEdiZBhzbNv1ySmc4nb9VBuunXHAd/f3j65/NwqTnUfGxfbployocXuoJRqxWKz6g93U54ChWnoCXC1ulq1V/v+jT8+bJ+0UgtxywygSepALgBEO5UHnEPOHhGdR2bk4552Yr+dDh1nHeOpID8W0OIZDP9oMzs/mSEAEH649wce0SOw+Yh1Pj7g/LRn+tzZEAnPGL6d6UqPz7D4xKiZEhJ8GAUsx5gPFKOzQOzE33sEoh5vflQ1L2g/LUuFkInYO8e8TMGZCJEQjYzsZMx6EociLFjNCQJDQJOqVdyKvQ/e+c7jtnGX0b3YrV88udjtNrHtIARBM4AKUJTE6JRZXoqIEYiaICGaEZCaMSmRAqCqELIZmigQGiJhAhEiZAhAjpCCd20TVKpKnUvZqF5mN40ymalYVihAJNq4xmMs0qtpkYE5FEFAAtMQA/sWiaP3ji06bojXAVvPDoG8pVwJ1BTUlAwB2DnHzmHwoYk+eHboPMtipwRqBGimWsB48fs5Ex/NTJacPiA0kaqCcEpvRwQmR0Coykjee0ByPjB5c+5F1kMipBAYHWFFYnMJiNkH7wAqOWfEpSQCMcieTKDUMrJV1nLhbeu5IQ3OkyPy6EJE9saoZipitSghGgek6KgJ7BC9B+eJUBkFTVW1TOOU693D8aGf7w/TkMVCDDF474nJMcNCuIATMQ3AxKCqAVDNYnJyAlOpBljFZEEw1RBNVJ0pESIwqJ2vCx95FemZOIqAixf7AsGR4aJmP68NAwWkhY+2jNPOavfTqQYRAVRVZFl5p7AP01ormDLoXMxsKmVYde06djd339R5buMaCadSwDwgdasWVaDOUPpIJmWap1ELAEIq/Sp2IprSnnyQqtUyaGVHVUrJWUwJjRDWTUDBnLOhVUmll8iEKnfv32yev2jXG2Zm1P1+BvKAGFZts2n6/tg/DIc5ZTWplcCAQkqZFMAjArWhkVqO48B+1Tbd/W3fdXG1bd7dHCSlKsaxieCZcRYap0SI7EPKk2NnxqWIJAEBLTBL3nbtnNIwlpVr98eH3SoKyAz1omtUNFuNyH1f5zG5SCp9iM4xMZLjtXCeSg2easlEbKgoLrqIglZh8/QCRCPitB9L5Hmam/UWQ/TOM3ibscepW3fznEsRlVumgKgEeOz33ociGaF2HacixD5N2SGTowqaa63IbeDFFKpioY7gKKR4GHNsglFtNq7uZ5UYfJAk4yBtE/JUbvX+kx98evVkd+xfizCTKyBk6DnIcsZd5ISguLhiISkuNpygBieKMi27rxI51apaid0S0ilqgIoAYksGhhHR6fGn5bFciRbZpyItVsCQazr0dy9ePFMCkTwrrrfPUn8zjn1Au949HftUFDabFm208T7dv9r6YB7cyr++fddungYjtcJci8zjWKjb+hjQIOVkvvqdl5Egshl0q/A8XuSHXgqQRM3M3OWqDAZQp7n3IdSsTIiRwcjmfnG7kCTgG/aNDb2pAgdqt6IOTB2o1gFhrvOhv3sPzIRBjatCQNKSBFQNXROrmrnWcGXWkk6MpvVAoEggU0aqVsWIqX2OFBEKlHsoB017yxl9A+1z4At0ESVh7ZlKAZNwyb71jDgey+EtpKq5tNcXwivI1jbRWz2+++bh7bsiSE0TwoZD0w/33lmR6dXb14f9UcDnudzNJcmwcjE5f6xTFRrzeJgnNSuA/TQrOlSdk1BgqZI9Dknh/fTi29vu4spSbXMd3n9rAL//D//uu5v5L//LLw/3uv3+D//Jf/I//PR3fyNqKPf9v/hn/+X/6n/zvyRw//Y//rdW4TLq3ZuvpvdfP7x/0Lh9Eeby3Xdvr549+Y/+9//rt9+9/+TZ2lW78D6JTrfT8836F3/238Luh7/7D/9Rvbh62KdhvHm1f/fqf1jvtl1BvHyywnXL7CjilGW7CmAwT2ZiBjbuezZ/HFI3jZfX37/68W/f/9N/BQ8PD//i/43/4Lfl8xc+wXf/v//6+z/+LWAoT/PFJmrXNFfdONe5KAdSgvXFytVPsqQ/+Yuf/eE//NcIEQNNc/HMINYf8mbjpPLbt8O//OM//f7lk/030+ef/eThqIf7Q+fjZtP1AMkObbvu798DnaQ3q27V96NoDTGsd7tjPyC3UOc8z24BU8gTc5GSa1bTJrSq8phfqoqOAoBM0+CdL0mK5rZrci5393chBBMoOQfvfQglJ1Nk5HEYfQjAOqd8eXGVp4kYSTFlrXVmdJ5AIIN4gOCdL5KbVTONyXJVMXKwWndzPxtIqTXE1jGyC2DKTPM4S9WmjaYgtarWytqEGJiqlfV6k3LRWttA29WqViCo4WrzJk3zXRKxqsU52qzaUnKpRbReba6O/QhRN42/2u7evn9fZh2SEXEVEBElrgqND0kEkbxzVSQ0ATE2QT05q6XpPIiWXNgxMuesxerT1VZTOtzvAZOijdMw6/jZs8+qDvuHGzaax9S0rYB5F6SK1mQlM3gQBinr4HdPnrx/9/b+9r33gURzUZFasnhGM0N245SmVNiRSE0FRindcexiNwyvf/qDH/3V3/y5ipFzu9XVNE6a6zSOwYfjw549+HbTtC3o9jj2ZH6eegBkpjoNWNzMZIirbmuJEbQNVksuavM8N6uWeAKtSJhzBYfR++gYSYMjM2ubBpxv/Grs920XQ6BtFxmoatpG109vV90zlTI83BDHcRibQHW2rnGHUQP7i/UqsVy/vPryq1+3znu3nK/gyZPrNAyC8fZhXxX3w9g0XVZRKymnbnvZH29A0mKnq6J5zo8Nwklzs+SBIRKSgp5YrSbnuZmCEcApuxgAzok8H3qNs4sQGC7C50V8QHAGZU75P39r1H5uJ07AFCwmEggfNTrL2e4DY+jxjZ9f+SywPo/QT+CXnX+uj7qxj5okABVV01zq4XjcH/qp1nku6Pn9+/dVKrFzwZHAbnXZtfHu3X44HrbrzTBOV5ebPE1qmRm9w5u7G5Aa2Pn1Nrbts2cuq719c+M8XVxshofBN7sYENGIbTjOZT54gr+6u7+O4V//8U8aD3ev3hyPU8P16Xp9cbEC6A/pcPn0aYNMNb+/vz0WrnmgpIRu3azabsWGzuFqFaY0HY77qtSt1oQE6Mc5qSZCLhmA/Hbz3DktZXh397Cf63AYUdvRAIAIo5jNSZyLHXFPekhlzmXxr3WRFXQcRwCe87yUgYGMYwZGVTCBGBiJyIw9j2pPuy5XRdcUAYvumHNom3lK4zygSs11LLJdNWWYgmNEjl3HLXqzkvrL67af6vEwNOgeDsPTza7xUbUSGUI9cXMUlkP5gkjaoh5ZmBDLr3nxyzUAEDuxLE58IzxHkz9WyUeF+Nidnu71cTv+t+58Rh9twT+XrMATCe/UFp9ULY/6ylOXgQAES+bB0lgmu3m///rbb1wTmi6u2g5zVpHYhFJLKtWFMM9Ja2HCLEVqRQRVA0A+MZoIAEClmKiYc+idM8tqsuRNIyIjzf3c7tYlJ88Uo9vrnoKG4JuGcw1dt9q/f3DIq+j7Ydhdv8RmC41/9fDz33j+cp7G4zzF7eZXv/55+uEXXXdRoeRct6unw/A+Oi8ANZfL6yeeOeWRfAyrNXH0SGaVCFbrNSGx94gkgv1hCG3Ttu3Qj+vNbrXeptqrVEQihgpzLej9/5+t/3yyJMvuA8EjrnDxRERkpCzR1d1oAN0ASACcIYdySAyHu8MZ467th7X9T1fYkjtcKiyGxBiaQAONBrqrukSqEE+5uOKcsx/8RWQ1Z9LKMisj3nP3l3H8+j2/8xPRM1uewWku4GMXm4tp2Ie4Gkt1YFqSluwCp3nm0ImMaIpo5JCCc02rBOjZe181udic9rdSgJw3TSISu+eRKecZgdr1s3nKp7uvJYbVujvc3AE3bb+N+ZgPN9IyNmyhM/aoBmVEjoABqLFHMiY8qlfw0UoCHogGC/6Cj2X2LWgHv4UT4YfF+Iwx4iN0/l9kpT2q1R40mB8Q9seX4cO6/ghT2Rk5fbiW/81y+sBUOhfwGYJ/OM3DJzB7BI/sEdZ60KA9iLkeAKbHEcID7xNNgejRbs7MTAEVBBGZSU3FTJaEtGU8cR6Cg4gyERKTd8AOfGhW/ap1Vyv/fLN6uu0vLnoXyDknhmaouVYxA1ouUKSKai7GaEiOdNFlEKIDEFNTVUfuMSH9g1vfoo02IFMm7KIHCSgipa77Ple77rRIvZ0l5ykxTsERObSSIYNkNTTTVEVNibhpVugbdQ2BlCxkih6ZPCiKGnnviCCwSFJCqaZF0KkjdqFxbcueXfALrIOOUY3Uac0ACEaLdTijqdQzw8wqAZsKACmoIaraEsYFsMigkJkMoW1bMgK1VdfpLZ324/P1sRRzDkpNkUOIrUcGQkJB57BpjZ0Xr/Ocp9mmMdbBQzIwT3DhXB9ddBhCZM/kiHzg4JEZHKNjYGJHsXE+xhBj45mRmBDApEoqJacyz+k4DPsx3x6nYUrVkL13zhMzEi3xkiIqaICkVbTWBSMragi0RD+Z6RJ7dl4MiQkQ0MjRUk3EBAbOMz10x+7xZhAx1TOoc2ZtmD7An/QgrQdAVFQz1TNh+gFCMgPAKgpmDAvLFVTlbHatlcgtqn4rqaTZbOpaulyFw/2NltqvtuyCGZViwhzbhghRMupkJRXSnLIUi64/TSOiH+YklonIKjjwKU2OQE1FlV1blECr6Oy915TZNQAU46XzIQ07NLu+/rSIS3cjM7gInY8yzJhJah0tATh2HmfLk+Qk/dp77xlziHEoMzp/c79DoODj+92wueiyYDnK7em4uVgByOt3d21sjOg0FzDWQuyoYa++Ki4WAOCjIyNmZvLjlKwqB3CM/bpvGu4aPo1jJWtC3B2HCaXzwXWhaVwVQlRFy3U83Y2asxpvV23bxGE8xdj07Wo83XcrN1V5e3PrkVbsu6sn/bajOY8qVXW8e7eN8XiaaCbRyg7JoYAO03EZK4LgVLP3jUdXREseHXlqAkKaclmM0FS1VgCkomKzNjE+udrs96dpqtWgbdB5brt4cyjVo0dCslRzH8NU5/1x7Nbduu9vd6MCcPA1JcKzt5VoZTB2aCpLlduS0ghgy4dfrL8WmqvK8i4m0gcLRxUhIiYWESRUg6oqqgYosjhX8hKtGsz58Cih1+jdMA5zkjzPs8kwNxfr9Ty9S2k/zJwRbm4OW8eofeMvP3r+EXhWOQ7jBBDAmlXviuwquGRlEiAjalzZj9XmLbUZ6IS1NXO5uuCV4f6UutCQp9DGkpWdR+PTaVh3bZoG9v7py1eAiow1lbi5qMNRldg3KkrIhsTNRYVGRDwg5pTu3o41i2VVhwDcRMOILhg5sFrLDK4TiCoOworIk044f13nW5WT857jmiwItdReAzVaktP3kO5Qk4kCBfStxSfmOgAAnbQcsQyllCqBmy3UvdMx3X8NAtx0oX9WQpMqoiMo8/715/v7W9+toLim35yO8+5wH6I7jYebm29KFYamIqmzm/2NqLuTUwzOha6y6WFiH9kBSo1tmDMS+qbTomZpTjVn432BX74//TCEy+urebobTod+s/rilz9P2m0//exHf+P76Vie7K6+erP/6q/+82/88Ed/5x//vcL46uMnN3e3f/pnP/313/qR2fid33rx3dhMY+n6zkEcj3sC8I7ZcNztr15cFM2+ccdx+uHf+0d3f/mmvXLenchJQ+Ef/IO/td8JscxjEkkll/tUuzb4EJCNCCkDMCgSNi2Qf/FP/u7xP/yr9a9dzZ/S7rMnl+H5BU3wW5+WTZBRv/P3/5vTF3dvvvzi6d/8yBHPmnIDPpCIWcZu09AKu7YBiFef/NBHmEWLWuyjU9AK68sGwRjk6snqb//tH37nef/X33z9y/c759evXnVlOIzD6TQOjmg83s7jcX25WJjDNGe0RYmjjlzjfE2T986IvA/MPueUckaCKoaIRSogTnluQlNrFbBV1+c8RRcVJHKACk2MeZYi4gkUMDQx52KlTHPuutZ5hyreN/e7vaFWc9y0cy4K2PZdnqc0DTFA8I1jOhxG571ZZXaAVERLqZrICQG7qiKAtRQTKiqg4gKwd0AKREjsHddqapRrKbkQgTzsvoDDosMQkVwAkIj4ydVlE8kkT2MZhxzRtdRNx2HlkTr/4vrZN2/ehegc4lC1Cg01q9bWdd5RqVUAq4jzPnpXSp1pIsZiOpbamjOTeRjXq7VzTtC6di1SVPM4DW0Xci2I9uTqOqLd3b473B1W6+00zWHyfrWpyGIpeFJV5mWX2Dp11PD3fvDZ3btvlHBKPE6zVNFZP/rOc1WdpixgvG7ykPTDMLQ6puN4+ObO+dUKTEtVEGAO6EFKRY9VbH8cg7jtpunW2ymXNB89IXsfmkaLIOBpt2v7FfutM6jzSUw4eFZT0zQlRy62QVRzzkZSc2mILi83phYcgWodJwHufEtAVs1j7Nv+3eFtcD6GTcrqCbWaFvFENZfbd7eEToFd8LnWy4s+egoc66xt35ScMDpmakM3pGHV+yJumE6k0DTt1cWT/XEMZV73q9MxL3PhKjV4/7gjeuiC8bElRoBzxoI9KnMeSEbnruHRPviBJfTQWH9gbcC3dWZ2VkycuUO/0rA/vhrPsc9oAI/BOvCQFn0OxHmQr52J3w9kI4OHgTk+MD8e5EVL87Zwxs8ZOAu7yLSUdDoe94fj/XD6/Bdf7U+jqqhKs2qOd6dxPP7ge9/v2mbI6eVHL3Z3TfDN048/IpLX37yZkyA6huoI2qZpo785DO/v59D0P/jB7zTtX7kgucx/9u7Pj6fd6+N+e31ZS3VsAtVHd7l90hj94Y//zEH5ZNvG6J68uLYsX9/dPdm2BGTocp1BUrtdr0Kv9ck2xjlPYxoYKU1pzJolI8GSgTPlkvLgPF5tXt7v36NWAhrnJKBpGqpUR81HT9buOe/2aZdkTKMkIzUHxOSyoSeOHodpdA6JoOTsufFNMIU5F3Yuz5kZjamqOnZMCGbOjEoJngKzKTRdO6V5TtUMpMimaZoYpyToY+s8ZvGNj76tKaVZkAsgzqWK2X5MHOL1sy2HNRqNh8k711x1oqKIJMJIwI8t7jldzB6cWhDJQJcB8GKotYgP8bElX0Q3Z1bZMlx7rJDHOtVzf21wHlafK16/FXC1tPKPBI2lBzYAg0WVA3i+vLOVy0IlEhEVraJaDcfT6Y/+4//yV5//eEy3vvdf/dEXCuo8nqZSTWu1lKVqlVppEWPoeS+39NlFykLRWKgZtqh1jFSl1Axgnjk4n0vtQtv3HQOKWkrT08sn0zQCQNPE02lsmoa9Ox5Pq/V2s2k9c9+FGPDu7v1Xt+9fPf90e/XsJz/7yXe65rOPv/vm7t13Xl1W4WNNLbQ5V7HqPCOx71hrLVJi0wGjksW2TzlZqJuri93d7YouYgjOuVRTySlsO9/EaTr1q7ULa1NMag6XzCMQrYiAjkVKTQU4uHZd8jinAdlVFVVzpmr1bJJLAQGtZEdoWpi9SPbOxW49pplBm66bh/vj/r5bdcShlllNkWAa57Zr+25VpzjLlCqsmm539zU578NKSh0PR67BtZfkLsi1JsVsNpkMA7JTq7SQbfBhlfs2LvQrYIzBo+0JPv7xAa08s9oA/jfvPQOgZ47R48sfzrhU8ocz2QMn6dv1vFQp/srx//fAoiXRCcCMHk7CROelfMnOOhuF6RmBRXhEiJasSXzQhOK3rv18Z5w/v6kpGhIiEwOAqAIiLH2NKj6MCnhxfFRV0yXk3jnn2aORKXp2bWgutqvVKvRtNJOacS9HYoyOGQmBSExEay05pyrVgAGB0REHMHHk1OxBN8eG57Mt/EEDI2IgAkRVA1AEDo6xCVYl5yqC61xzrrUhtbIrUmuZThP5YN4zmqeFiqtSigutj41rWmSe8kAC4Mk3gaR4DoQoYOTYuUDqq1oWqVkNSE05eu5a13XBMy2Obcu/JSEROw4igmhaZyI+j2VM4YyVLOEND1SxWhYauIE54GUSE9vIiB5dTaUUartVCF1EdlwcyYZ9ARWnE2YBVw3MFs5VQGShTIApTdGycxIZ0awN1Dr0joiI2aFzgLgkM3IIGALHxnVN27XsAzlPTKZaRERsntI8zcfDuDueDsdhN+epWlULsemDR0NbxqRmCFKkEBkQm5iZMTGgemIiQnIAqKaOUM2IyOyB/6mGCETogl9C0wiY+L/wKtKHycOy7VgcXx42MYsFsKouVbL8P+Aija6P2yQ5j69UzUAVwXiZL6gwMwBVqaoVTNjBxWobvO13b8qco2/Jh1KFkX3TsGNmZ2XEkogEUKf5lOdM6venAxJ45lpr5MYArahkgcxAVDHnnB0Sks9lZkA1MdEq2TgAOVLxMc5TcjT7rhlPp2rmMPreqaX19rIzGsZDSrOT/PzJxqF7e3OYkh6GIxvdn05PnvTMvDvk1frykIYWIgD3m5WKv9sPx8lcpNXFleOoUJyUOakPDEBjSVlrpGBMVYC9Ny3gUKtZLZuuU9a+wU1sh+G0vyuGuNPZUyUD4ywaD6cp3xf0vOoD5OQZRCCgQ/bjOE+jEmOR+Xh87UCz19VF74FjiDLC3fs7gbm/7BGTFexEHOZ23QxTOh2Sc9RtGgNVySEycTjuEpi22xUFXV1s+hOncQJADhxBDuOkgCaWzYXoGbkaDuMQTdAIVJtIOeWG3Wbb7EdBk9C085DSnAIDIBx3u09ePHnx6uIwzpJNgJaVU0GZyBbUHAEIz0ZxSLXKAtAvpm62BB8ulbnsSKosuDwtNkagFRAXDLTWarocYdFCPqZtmlmtZ5eWaZp98Pf7E7vW+xXXcc7j3TSY0XYTu97fv7vj0EZ3veWLOVmRk4NkopAL1+KkTsPgCJ2jm7vXapdp2GNgqjbXgQaXy4RdnKehjuWyu2ixauMVDbQCQa3JZiMXQyCL7rA75nG6fP6JlmJTDcAgYAAWNxhbmw6mpjU51wJHkgxlqmma5ymV2cVGjVfNOpfkYgixF1ME5tCBW5HvGVDzAcpO08AMrunFvYBma9QAkUOPkmT+BssAmsBIkbHdQriuGNBFSDOWAzD4thEs7Dp0hGko779MdZRq4fIZd52FTU3qMUVNx8//8nBzS77jEKvQ8TgexyNxSDndH+7U+9VqOx2H92/fAMGzvrs/ys00cdi8ux8UmPx1KYd5LiLggxPAVLKxq6BF61gKB1KyQTKjYRmm/fuu7Q63701x8/Tjv/yLnw2z/e7v/vZpGHPL3/u9f3h6O7iG37z+5t27t3/vn/xd656sLi83QQ73u8DIgPO+/Lt/9dM/+Ke/8x///Y8/+f73fvmL3S9++uXf217GjZ/ThI2jhmHby1yGN1/0m06P490h5ZTB+7/48S9e/ui7T7578XZ3vLp4bminobJzRNS2KExAkqu114227dv/+f/Z/t7f6a4//ebzH1///q99/cVPXn7nBz761BX/vdWLi4thLFDL2/2bi+/z+llz/fEmlTLfDiXD4Sad3u98a822vfr0CbfuuBs7H7Rot22m0xQic5pePOdpuPk//19+52d/vfvx//rjXT40VFOS4XDo+yY2jY8up2G5C5CMPBPgNA/RcwhN8AJAY5oBAxIqICA6diaADCqVXfDsiMAzE0guMzmSnM00QwWt86AA1rXRB5Qqq6Y/6FyrOcJpTpTh8qIv6RSY2belJjAoWZIkh4QGq/VGSlUt4KPz1MSYsn39+t3N7vjs6nq97mPwwzQSQnBtcEggquKJ5ppRHBGZ6jwnU2i7GGJMWaRIjBHAUDI1AcHmnJDYVZsPqRZzPjYxFrMAWLJgna4uelA47vdd74LnYZ7evvtaBQl9hVpK8sF16FHYEyUTdo4NiRCIjFlVKgixFwTybi6ZwULbAnhEl2q+Wl1jmbLW6IjJ3e7262a1Xb04Hu8lla5thuFEPsxJurmiaB9jmgoq5um43a5T1XHOUEoXowGVOac5TaV48vtxvkiYkuSaDdEZnXL1VX0kqbobjllVazmdRgVxBl1sUpUxpS7G4Jmc9b0vRdOYJh4vLq76zZVqmWth56SKlqIguahv3DDvtlfPrGzv33/NTpzVeRw37UbMwCx4l7X6GI9TAaXNheuiM1EA6ryPiGbCmAO30zzNJH3TMbjgLsX4uL/ZrDel1P3pvmmv51KfPrv+y9ubz65fbFc+BLo73jar9n44eS2xcU3bIdDV5cWb90cfKHg67qdV05eU16srmjSX1PSbpvamlfLs0LE/b4wQH9Q6S/wrnrsAOgfYP/bAC2hz7tLP8mh7iPR5kHfZYzLaQlN6dLf4MMvWb5uwwgfixwfIZ8F58Jyu9tDF6/kU5zd9oHh8aHMe2zU7G3J8a8p9PtGZu7TElajWUlIp6Xg8vv767dv3b3LRnFLbtzjaXMauWz159uzi6QWnOa6bJ/hkGKsqzlNer7epKPV0dX3xevwCAUrS8TiULELH/fEUrL765OnrL99try5++zd/9K/+9b/sNn2HcNoPpcrbm93ucHp1fdVu24+vrj97tp6n+8srf/PN/snTdRcvUfPN/U3n3WV3SSQUmiOU22GSmufDYABNZAdMgMEH5oAGwFgyoJXd6a67WBHpnMuKN8ddGWosDIfxtN8NBQUUVlfXbYLxOBcN81CmlCrz+nJdh/q8jV3TpFIBdTicBHSek2MqOUvNSMEAQvBMtDjLbFe9zPsY3WbThsYrmENjMDD07MYkIlkM53nuurYLfk7TOEMXOKyDc7TumnGsU9IpTVjr+5v7/oJevHwevdusLlQqmiHQBzkLgumZs0NIZ7QIVM/ojT0APecO91FfuXA8zq3sY+V9qEN4/N65RVgGyksBPzJBHmPNEBAIjBZG3LLJOuNIuLTDgIhqAKYiIlJEJJvNZbq/v/3DP/p/neb7jz7t//jPdm+/vHGARJBrLrVaLlOqqRQgIIZaxawAgogAADmnukjPSFUWBeYi6yHmWpNKFVFwXs3YU7uJw3x8dv3R8XBAJmQfvSfHqLi9vLofhq9vxk23vr5cg8HFtg/ed/3q/dv3nvsv3r97qa7vn077qZYyOJtzL8WuLp9ZLqfTHKOfp2l98dR5JrDG+7aJsetiYAMfmE2OBLjcaG1s+j5qqdNhoGbFLnRNNx7uYte1q8vTJFZmAkZEJC06etchCMnRUkZ2xG2ZbsgbN42KYi1IRDGawhJVGWNM8w7kMsQ4jiWGNnpIp70whhDqLmVJXe+1opbEzCpCDLlO3Pj+8tnhm7/WfPBXgUM43b297LdSKxRiEx1vjdjCBQBCHti1qhk0wOJYsqx4ywq5gN0fSES4tJ2PdQbfMg361V/2QH97BCThAYf81vGWJfrhr98S7X5YDB8g/4fV/fxKWLjQZ1ctOMNP3yJ4ntlBhAh0vp+IeQGHCMnQ9MMHOfOnEEzPaNSyjuMjJPaImi3noHO4G5LzvITTGgJCFVWthFhkobQSAgiYdw4XHIxQKyICMhmhi8HF0HbdarXabmPwXItMc06lggqqeaYu8CpydMwIOU9VJJUls1w9B0IPYJ49LjENqmDe7GzGh94hsYkwApNDBANlJLCqiJ6ZQpCek7iKYzXLtSjVygazniSnVE+lnpx3PrTO1SIIiOg635OPCZByxlKcoSP0Aq3zaBXIeR+JefnQTmPJgk4FQICUA3cr37RuSZUwXRLmlc2E0IxUzBTBmaGqEACaCRgBqxiSMwMwsVq0lgf+LgARsWOA4Dx79uSd9/UgPsZ+u95crms6elTHUIFmLESYBeaqGdAJgVQ01Xpg2XV0jFYaJ433jEyO2TkkAyJDPDtQmQF7blpu+9B2IfomOiBQVBWQUqUUqTWllEpOkpQMGNlxQAUBJtSaJdWKwIAmlf2SdYZIwDEY4ELKc8yIcM43A6KzIxgudkGEoCLLxh5woRbhoideyvgDq+ghIBXBEBDVhJexgInZMkszQxCpy05IVRUWZpwt8jJVOCeSIJgKqBiRmfISyqNKACYWgmuDRyeH00ELNf0lEhooO/ZxheRzTVZG1moqVUueT4RKBClnM2V0hIiGhJxrmafUuJ4dGhQAcYwPVNbqfB9Dm8qA5MD7Wk2kRO/77RqN47pbPdtGb+l4zw1fPX9x93rwsY3kY4t5zMM4JrX1tnO5piyqEjkWtaqlmt7c3xqZRzdPM6IzlKwyHwUH8t4TlXGaa5XNZjvNUxUgZjXdnSY0S2qziUNg0a6Nvo2MsFl1p+Nw2s+RESo7T8GzVgVkIjiejlKlFK0VuKQIGjfNqvMlzWaVCIl80/mKtu0vbUpVEzGNucicN22/9eGLn//8xbNn4NEQAtGYx9BY34TdabZq2RUxVUmKHph736SpDLuTuby9ftJvu1xSKSIGITSx1lJrLovEvqBK1/azgiaRZKIwjWW9aeeU27YJNKS5zlhiCE10aoqCp7u8a+c2+mfX69fvDgpmSoSAsiA/iLAg1guWr2DgiQxIRKXKkrSw7JJElB+c5KqIc+6selc8M6RMDazK4oFtoqJqxIYIDkAqfUuVjM3FOqk4iLmOTWigYpH501ffff36S8QUnTMXYty01h3efPnko4sS6pvb48tPr7/88iuGEpqwPwwk02l413WBIEhOjptV8xKslnpyCk10uRo6vT3cvGvWz9dXYHUck0nVSQrXZtveH+6/evt2tXn6/nboGtpeXNp8qjWT84pca2GyIhXDGnwwMLNap+O8u6+1kHEpKFnupjv23GIo8ySG7KJvPZDoeGNytDKDFt9dqXlwPXJnSiojSCKrIIkcQNMA9MiNKguzYTAiJkGeAL34mCAqOc/g8t34/q80ZfLRXawhriE2RYEkcbo/vf3i+Po1+PU8BQvusNsnKRSRWEtKYCYFK1lOc+NJqghjH/1cIXQXK/XjpPtTwmZVT4M4r86lOh1Pp1KFPNWSgaBWSSq2cfvDMYLd78an37148YNXP/jR7/z8p6+fXK5wOt5++fbt559//MNP5a78x3/9H/7gn/+TT3/tu8d3h6/+7IuuWx3+8ssf/+LP/9Y//O0vf/Hu3bvb7372/e9+76li+bW/+d0YfTX7737zb/zp//J6e92G1hfWXSrdxxe9QD3eIdrTV8/2Xx/SMMDq6rs/+NH62QoJri42scEKoITeUx3hdMzFzDNrrbNz23/2T7Y/+0n+4uvVi+f0ox/Ji+76SbC3h5rw9Hq/fvaMr9fPPt2cDref1Ks58f03J1TSKs3aSc2XF2s96eqSobEQMaP26wjZuGE18dGDiflw/bT58u3p9oubf/3/+ZM8K4Ctrlfzu/3l9YV3bnd74x3SQ/cIZiKKBE3sSpUiyTEDViaSkooBqATnSskPRnY0zkOMfZ5zSjMAOuZ+swZnUsz5pljNc1KAYoU4SNZDPaph17SO+P54dORKFkPOeYhs0btldOFpZWDI1PRtGoZ5nnwTSajkrEW33WbdX8XgqpaSU2Amx1KLlOI9i4iKNH2LzFahafoqZbc/eQ6lFgNMcy5e0aTWikVMKyGFNgoIeR/ZyGtw17/8xc/7rh9MGlTRqjW3gZCwSLUKJIwGUioBbLr2UKojh6pEJmhtjGWeHBig5Tw7IERKKTXez1MKkYAt+MbAcpG+vwCx/XEPVsVknnfOuX7Vj/Nx/+7GeZZSycCBBpLpeH/qnfYROaRa0AUBc5477k6D9M3q6vr6/vi5QSXGy8uLrz7/4u7+7Tp2beCkqOIb1xjMzE6KplyaVTOXst082d2+GU9TCnV1cakKIKK1KmAM0WqZ01yYj4D9uvfb652pSDWRQccIsW28swrzbAW69vqAN2yjI26bjtA1TTwdjzG0DCo5rTdrqfnm/ZuLzWa17vOcQ9uAqdZcrKhUH5p5PjbNmghUKqESQpGSUsKqu92uDW2Z88cvLp5dNC5AjPEw7K/W/eF+WK+7YZocIqkFZlTJ0xhBG2aTXAXG02nVtaClzInJ55Ids4qoPDYY54fHefuP32LlnGVo9ID70Fkadja9PvODHufcdp7BnWfddu5a0OyB9/HQkn8b4sFHXOmRu/QgBDqb0Zx5RA8j6XOPc+5yHshLDz5EZxhJbYkigTMVysDQxADpnN0GplqrpJzHeRpTGVPeXFy+e/12verb1Wo6nT796MXLT75/eXE5jVNwLh2SlnK13QCSNexA55Sy6mF/yFM57E/TNKeaAeqrZ88PU9Javvzylwb1o5ef/Js/+g8+xGmu83FEE0IY7qZMbk8Ut60R/fQXb7YNvvvqF5vYYM+S71HnGNiqTUnytDvOo1HwRE3wMUYjXDXBuYDs5zxO6WSi7H0XI1IAYseBPVbFoorBQk+SSm/slKtANpkOo+Sc5rRahWQos+Va6zDKrKHvunbVr8KcyuGYJY2qio6YyLWtmTEymEYmMPFk4zytvSc21ZqnuQJdrjty4BFrUbDKiH3fbVeQhkSBgg+5yKpt76bJC5MlT37bd0+eXKQ0Si2k4ME8upoqB5dTim33sGYrGCAwANA5X0zPgOJ/CWg+VOvDQP0DqvlBsPOBVfRIc/vAQlq+tkBDD/qyRx7H+UrONDhdYIFF97MUqxKqqpqp1KpVUI/zbj/svvryF2/efDEOd+Ddf/7JX757eyvVfHTTOKWcDExMi5TlxptSBkRiKjWbWnCeiJIUPn9SJSBiRnBMrFJqycQUm+i9Q4BV25Y0Pbl+Mk0zIbbez6fJE7Lz264f5nJKhV3bXDTg2BM7F9rYp1x4FTnSYThedBclh2Ma+j42q1ABry6f3J/e9NiJOcDYxNCvLruuSaeTqLroV5ttEYxdE1BPx2PJVbJlnUNchdgWTBzcPJ5if2mAsetLmoAHxBhjNxwHDwTqkJpShFCIQEqyDMEFAaciWisRiFaoREzkKjMBaKkze19z8r43GFNVdoE8qdk0TgY013oc5iYGLAmNTVP00ciXPIdme7G6TuPdOA19bKfT4EptOx5Oe+63tc6YJnBb5xxAEUmEM1ijyuTDw9L5bcTlbJC2rIsPDkELk5HO0eyGZyXBUjUAD4CjntdNeGxUwcAe/PkfcfYPrKMHdOZhKf1WocPDmvyoV3tYk+Ec03QmWnzrkHZ+L/MZuvo2VXMhWJy1lR9odd8ihJ5vEHjUhD7ehkTEzhEhmKGhqJiogRE+EJQIHTsEY4Mlg2chGREBITJi17ZtEzfr1fX2Yt1Gz6gKs9RpTIfTNM2zmm16/+pyfRnpouXoULWoGKBTE0WsRiAlclA0AjQgJMblMbc8L5xTdIxCVhGVEAAUSAiR0BFFYvRWYwONimgxWBkNAsoe3Sz7ZLPVlCWp5OTZnENuu/BADaHIkRwFwOCgCw4BTFREEAkIkBEVgg/W0Fhsie5iHzm2oWl5kYqogSkuRmnEJgKE7L1CMUMzES1MDyMbQDs7rus5H8kEedFjATP56N2iaOPAqM0a1Ycp5zTtT3ezlSlG9Cqos4lpwqBoqs4DQwJRngebD2w5MgSmwOQcISN7ZkfIjMTMnsk5H11o2UXno3NMBAsRrdasQCYKVpmha2LTNavNJpUyT/PhlMacpqlW1SoGCILq2ZNvnCMfPDkGIGQWNVVFA3YLtROIEB60jEQIujClF1XuAmBRVUH05/3Or0BFALZ4rS9oPKCpySI1Pj91zsWtS+afoZnAmX9nIgIiZ7PGZft0RogMFHUpRdMmdLHxjaulnE6nIwPFZovBFS2BvWNvwKYZ6+iYCK2i5ZxMFdDmPKK5rl0hwjjurWJOE6gyhlq15GyYnV+gBQ0c2RClpJSRPTMqVKgCtcxzDt47R+n2hKGz6wbj5t3705MnCK49DSNTRjAtQkjTmEOboaacBueatrtIeeLARpDmqWnaY87c9VY053m9aZTiaSxFQKooOed5zHPKCSxANWcgBs65gGwAtZRKkNVEQdIAgmmevQMXQ3E0lcTQKJR5HNarNgRtWw7VORef9OvdzethnDFPXonJrS82yK6UcUzjcTf6AmBKrYPA6mQyX0R+87d/5+3rWxYQKX7buMDTlGCao2+KWa4qYB58zZLqceU7JioJg+u/+ebL7eXLSKsvvnjvnKu1kKPO9apVFbzjeRrMpG3jOFePXq2OkwoSo/QXfP108+71cRiK7zl4X2uuVYn8/jBfbPnJ04vDONdTAk9VxBAc8fkuRgYQAUXihcNpproYwdniiA/0uH1ftiWwgJILkR5FxNSYeLHTUrVaRQGI0BSQl2cSMJ/VmKu+O94eSMDsuLv52Sef/gjR3ef65nY3zZTq6Ltwdzjo6v3LTy7bgZLON4dD5vbt7ZFYCI+b9iJNRr69fvLRNCjL7MXldOpWm0MuqZw2OeQ0O1wIQe6Xb7/axi6YSoWuXR9Px27VoME8zO9v9lcf/zr1EUl98OMpW9KmXYvMAMUhlDSS74GClOrBUh6qZXQOBXwI++FGoVn3z8JqI1Ack2Or066WGdTIkbVb4A34LQKREygHSBM6R8zIZMBGjL5VjAYOvKOF/WQKOZsZxS35XlNxnOzwWo63ZMLrS3ANrFZwcSWGtrvrgsy3t8P7d2acMdB2UzRXq6nML59+fH+8vT/ctOurq/by/uaGg++1qzWfpvGYUoHmOKarZ68uuy2+PcQ27Hb7r2/fv3v7VnNCgn7dzqUunPdadU4luG30USC8+v4ncbMRbP7kz7765NPf+dHv/Z1/+//4/5bTN1eXT9ObcV+gd9t3X8673Z+4fPqz//jNj/7e7wPD7divL191O/qbv/WqW23advzm/Xj1LHz9+vazz57vbubg7OnTi1oTNnh7O1QJHmM214dAAdxl+Ph3P705lOffvfz656emuN3d0UX2nY8tOQJNwOq6nlDh7v1OUc1v+Q6aXYabP7n/+c/W/+gfuR9cK2L7wxehX9l+/8UX3zTr3HloNl1PvRzXeZrNAQW3vW6R6Oqjaxn1zdv76Vh46wlxGYBJURAdh/L6rZ0OguaBw+XVs//qv1398b/5968/vwmRXUAtSdXSlLbXq+UuUNGaim8COULmlDIDotNVt04pW1VAICCO7TCcGna1Vs/RjFKapzk/ubgyrZKLSF0sYXLJhshMy84ImUGti+0hnfoYuzYC2DSM5EIIEZFSnmvRputAocwFwdI0VVEwnqa577o0TT5A08RZNXocT7nxrSIiOrMkBiqVkZkxTyNhLKPUdu4v+mu+HI9HmcV3jQuuVAmBFwOmPM2Owjweai4umI9+PKRpOvZ9V6ZZculCYMJhmojd7jiOU6nFQuOij6KKUolwPOliUVG1MIdaCoKlkoPrclUfGcmhSC2q3oB9hcroj1PxkNq+lzJJqW3bnuSeiC5WV4w2DlMRV9TSmEJLmqrWEnoN+cp88QbrdW9zsaKhb2RMqLVtm/XF5TD+OSqWVOacuouL4DozVpOSrU5zcDRXy6mwY1S0SpqVKIIgEBYpKU3sfPBBclGoU5oRsF9HEE3T6JvYbbcup4ClTCWKEhOoEEKd0zTsXFg/+/jl3Zu/bl07q4mUtokppC40tapV884hGqJ+/e7999ZbHxqpk+vYKklVCojBg3UKoFZSLsQeGQ1od7+L7YoIx3EfOF5smj7ANBUK3eX6yd1x2PRt38UqmlJetRVCgDYEwv279130p3FE5DQPq66vlZrQDIfjql+TIy0Kj4ApnIe/H9RhZ2HP8rUzvHNmDD10PGdxAT5mPJ3fYKpnouyZygr2QIh/aLK/1Z//KmBkH64FzhKgs0xNzx3T2YXvrB87N1hL7/Sh/z83KA855mdI4LxrhvNRzUBVS84p5dNpGtPsmnjRXJnVkjJHfn7x4pOXn3brtRCg8223ch7TMCKiEbx9c6cohiClvH/99ng4eAdI1bMS4nE45qy16M3t/e/9rd8/3d2m/W51ceGQZziqiKIRsZje7iYAPB3ys5XftGtqkNo+NGSQvWcfV6nUcc7k/aa5QvbbTXvY36aTSLVpRsNKzN5TCEwUlsAgrVmNTCoSg4vTaRebprmIzWwK5Prtbj/v99N+qsiBqCg4ZqwymvE8ZzM6naacRKSKKamwd4wqtaph8A4M1NSTWwXvGyZAkdyuu86r1dqvL9KcjofxVOanFxerpr9q7HQ6TWVkcN77u9Px4mKFGEj5Sb9RMzFL1RDqKjRPr1+oVLd5ks03rq1AbbMOoTWVZQdEZzqDLGSaZVP0QMN4pArheSe/0DwAF1fpsxzmjDb+qtzngc9hCg8FDw+V/QhLPnTv8NAjL4VFZ40knkfNhugWpNLMRKoiVJXd6f2X3/z167dv3rz52kVD1P/0x/8JSR2zmeacqomheodlrrlMBgCqzCSmagIGxGxAVRQMBEwViDxzcBRyzXOeiIwcq2n0DpHZh9A0llMaJ/Iup7n1DObmceJ+Rc7fvvn85Wff/+ij519//VVjvF2v27Z1IRJYLhr7ho3HcZxH7QIcJ/5u/+z127+66Ncvnjz58hdfbVdXUx5WF5fNkxdoSfTo47rvNwLsfPQxoszsiJBi159Oh6bmKprKfPXq6Vwo16IGfRfZNA0nCjPFPkY3DadIHRHXkoEUCVhxTCfvHblGZEqnoVmtTEvOELqVilJspVRUIKI0jw02KFbmRESr6GfR/cFieHK6f39I93ztvQ9VTWRmX2O8lEwW4ub6k/3rlNO+pNE3UaCmnLo2jLv3MbRCE7cVXWuiUAvAETES9w/EnAe45xGu+cD8Oa+OD1w3/YDjLOvYtwS0DyvqAzYJCA88oA/L2Hmy/MFIixC/dREPytxH7OoBIkJEAP4ASeEHxfHDcwDOiM8HLp2hqgEu7iqL69bDNZw5ng8o0sMa+8HVSB9YRUiPjDtEMJNa5ewuDIvZMxEhEC5pP8xLBJwZECx31yLHQB9cjGHVx3XvW0daZD/M4zgejqfDnIaU2LvnF42VSpeRjWdahMdC4BCJnRMtnkjADJ0Bq4GBEbIjZsIFDjI1BwiqqNVUiAlNmFhREAsC++C6rqkGJgqAVUCUuapj6QImIzEAQgBkisSBGFEHLC5AWEWOaG307Cg455kMkdghVM/ekTvDGNGzkpi4xjVd23RtbDuulXNxAFgyIxsyPGZGECn7Wg2gqlYmjwhn1RUtCV9ABKpGBOQYAIjIeR+8d84bM3BE0+i9IK4vt/PxUsajgjlQ0RpUVEFKhlKRGpIQogMAraOhoCNHFrxrouewZJg55zzTwhNjRGYXiBlNSCsKKmC1xa8FabGnjY1j54I3JAWqNUvN4zBPOU9TTqXOpc5iRY2XD4zEzi2ID5AzrIsxIpJbqEMmy57EFq4fIS0/5OVlRIRILngARMMFu4JHqGixunLMC1YEKstAQk0QGcBUynL7qpzZqqay3EuLElNlcYYXNJVlxwFmgCbGhH0TPZNHEi2pZFDz3DrHS1Bm8A0AVVGHBrW0zCq11FzKvMzehmFw5F0IiDCejiWXJvamWDS50JRUDBHAaRUEdt7llEFNdEEKDUSVrPGhVCVGY+MWTatgalafQGh//dOn+5u31uae0nj/FQ6i5tOcGdkyIPDT7SfF4uvb++jc8XTs+7692gxJ2IfonTCOonnxDWPvQjOPp7YJrIqkpLpqr+/2dz7G6HAaxpqNiEyQiUoRQbt++uR02JHT6yebeZ6fXF4Op1OaLY3pSdvvDsc2BE9Wq9o8fbPf9dE5xqZpHeCU5Ob+TjVLma6unxj72Lfrpn19f/vxRy+61u/fTSWXb16/cc7H6NMENzc3Xd8xc4hBCaapnE5TNX666j1zhqzsYtMc010eBGk1HGew1HeYsomhZGDmJnKaxyVdIlXVpKZGJG0bd9M4jrlp/GF/uui6ewYOnFSHLKs2lvmEzFLpcMzbi/5ys7G6O40ZiZMA0JLRh4gkqmCIyAbnRFU1ACARNQXBxWEBgEAFwIyJzUxk2UOriaIBMKhZWeIA1YyQkADPYYS8qHcBAGC/P+RUYxPZ0Ucff4aOaq7O4dv3X3XwrG3bYZ59aIZ8GOs71+LudF8Qs0WuNOZhu7m8vcuptOyaseRxGPJhvO6azSbuDu/X22tyfUrZSlbDdc/EeDMfn+fhxeqiWVktp7ZttNYhpZzy6vJJJT6eRteGogDcuHihmjTtnW8UjOIaOKgZqUiahv29mfq2O+5uqc7snI9b4DClFFBFc3VWy5DluHryPQxb7K8QGplGzHfoZnYtogPqIF6ZzUaTESsGUxID9g40L/H1BhEoVOxFDKB6mqsNgnP76nmFTgyt6YV8PQ1cax13h9fvchVsV2G1vr3fsUF3sW3dtaBNeW7XF09ffKeOU51T9DGuujQd1egwDc/W690M0zSZXzfbbfBhvh/69cUT0TzcpUyiRcFyFVNhciHQk85fb7dXL58r9c366erqM4ivVs+ff/1+96O//3uXz7f3b+/I9Bc//umP/sHv74+wevHxx5+smi/ed5edfx7+wacvb/bzaZfsUI9taS/t6zd/8cPf/4fesZrth6l/sppKNq2t8XoTtJoFvvztT8pP/0K/mXCqaRu8xZtfviejIhMTjhnWG3e/T13k8T5tescEwHDxso/Rlz2cVqvm+98Ht91CTfc7wu/nMOe77GCClr7zd3+IV3D6/PU3f/Hzdv1ynvJ3/sZLd4FzzqfdfHcrnK3t8XCfqfVhE9G7cT80wc/T/PTZGgG+9/14c3v43t+4yunqr7+8/8nX39zsdt9/fnG3vyuzGLJwvLi6Pp5uzhsjghiDLnMyBce+b/okp7lMUx4btzbCIpXAmiYScmDvgzsNJ3Sw2q6TiWdWsCIS2CHiXMa2XXsfpnFGT0jA7LwPl+ECTMUATMQ8O0LDZboQnLN65nh0fStgzIwdn8ZJAZ2qqDUN56lqMediBcsped8wEYXoQxjnoyiI4mbVA45ieYnp4oCRfZEUg+/bOOc8jlPTtH3Tj8NAhN2mnccpl0TMWtXH/nSc1tttno/jnHMGM8iCoel8pK7vkpZ0OAWOyKlt/DxXC96BxbDaH3eoFrwPPtSSTbXWYqJGUAVUkYCOx4mIiTCV2cy7EMGoFOuDb1H3h51UA8+SRQ1TrX10aRo9cWdy+/Wdj7FrWynCZiWVvm0qijBePnu2uriYp2msZc6jc9H57v5wF4Jr+/XpMGotNc9oSE1UMTLybb8/vNtcbe7vcjXbH+5W2wvngiI2oa+Sc8nMsLABTof7qtauL4bDrlh1TSMlVynjJF0IjmpJ993Vms5pvgBQHcaGHSJ6ImEtWp1Iv12DdffHyYf2qsU551Jkte6VFMCMoKIiU+jieBrneRAzahoKLCmvLtbiw9OXT9Gw446axgO5QfI0bGKb0iypzNO0fdIAqWfvnauK7ENVqWXO85GcN6Su36R0JHDMPqfxA0gDj0QdA8QFYqElrgbATB777aUjYGR48HPEh67g3Ck8mAAt26tHWc/DWP2hpT/bvdpjv4NI8KB+eOjDH5oYfEiyog+sJFzS1h7a/2/bgyyaIz2TPtDOf5zfssQm25J9a1JKPg7T+7vD/jgAyjQWNOxcfPbs5dNXr27f39aSfWgHmVKZnON3N/fv371PKcUuDuPYtmF9eWmepRxt0pJkGCY/qhRBT5eXl2/fvSvzEREZ3cWzZ7v7O/D85Gqzuz+AQQbQbqXO1h+9+IvPf/7q+vKqibvT3bMnF5Kn+/vD/enu6XbTejqcdgG9ll5EQtes+jaf5tM8X19twcwzNU2XSnXOSU5Trs4775r9VNeb61qGr798E1xIuZpPU65ZYc61phTYLVYQ21U7JMkqCABStOo8zs47U62lMJPIQsoSMmCAq4tV6+M379+uQmBTEClWV5vet4EcqtZN9FNKw2naXKxW68vnm9Xrb96ZwKuPX+ac9rvhPtUQ4+WmdY7ZsaIXs9Mw9v3Kx8a3bbtqkV2plVXZoZki0CPSAw/MMcNHtPJceI8d7wN6ZI+goZkB0rlcvwUvPrKIkM4Y6KPlCzy+9vzb+TsGgEBnRtu5OWcww2UPpyZWDVWo5Dr96V/86dfvfnbY74/DKEUOd8NXX/zlNM3Oe5EiNRloLbWWIozjtOQpmXdcq5iZytm3a9EcEBEzqxhzQEOpk9QEYN6FmiXGxmNEJCtapxlB0jjGvg/BB+9UxIXYr9a76dBtuk+frufdu56x8Y0PbREMHDTlfMiRYxOb1uCYdm7dW5Gv3uz75mqEXJIlrcFxoNBfXZoLWqs6H9YOOKgBe++YahYC17TtMB7Eyu40bJ89vf/8bT1Nvl2Zo1o1pQkUXWwEpuG487HzoZnHoWk6BFIpIsKM7EA0AxEpz2XwEj2gi02t4n0AYyhzyTk0IaUh+JV3AUSlDGbCwKdjstVG1ecyn04DB9f3rRjnOls6tbEdc2XXbJ+8fP9NspLBU7u+TMe3NRs365KGbv28lNk1a1QCKeBUdUT0yO5bQPdSgufyeCigZZldFsAPsCaCwQeOwrdJQt9SWsIDug4f2EMLa+hBHPOt6sXHMn2gGT2ujfbgjETnKzv78z6usY+v1TPDDh+EwPUh9n6ZYOOvnArOh8Zv2yA9SufORz6DvMiIBGZqYsuSf35gABiCGntmZkM0Q8+8wF8L7dpEGTH4EHwMixWyDyaaC4yTvLk97U6nYgImkWyccU/WuVIq9sH1XeuJiNi0Lg+tCpoUGBAR2XlbICpmNSVcDO1UJbOBKjAHBCRCRAUoBoAUEb2PfmWKNQKoVjPwOE5M0kaqguQcMqkBOF8UgZEdcXDBuY4pMreNY+eYORA2MbrI3s9EiKAE1sUYPGkXzRActatVjMFH70Jgn1XlrCEhJYeMvlYBQGRyiNmqw2BamZ1oRXJIrGZElRgZ/dnGDcB75/wi4DJBAVQmBoXgAwJX4KyEBlqKSSFVKObLDDk5rh5bECEGsKpO0DlTc84RU4iB28bQISIsEWwLQ5iA0IhAJYsQoiNEJmQkckTsXPCEbvG/N0UpXCt5dm1O0nfzPI9VZrEpi1ZNZXEbx7PwWOQRr1zGAshsCx5kDomYGQDZ+eXRwA8yeXILSEj0sIdwj1ULCKLyMLAywsWQWs80bDM1tYUdp+fRASHK8hAFRgDTiqaIwATEbKZG5AITknfEKFWSVSFQpoCsxEoMhGxAtWr0HlQUFEAQNOWRTKXWWqpWaJvGoO6Pb0k8YxCDKqUqEqtjADVCV4rUkoQSGrBnRp+yMkNwTmqGImRODMssCHk+pTa627964z3d6098dMVys+kt5XQcyHTbxTHPRjadhtMwdv6p2shu3bVdLQAMuRSVcDrkZJiMCajkekolZvNMKVVUCavYP3kWyHewklrBuwaoXzsVnOehXa2mYc+IkzjwUUvdHVN0vLs7qkrwfbNCwELRIUVEiixktlo/UU0c3TSNWGuq9cnlZRefnY5D4xhiyLPcnPahj7fvb6aAr179WsrT+93tMGYF7ZtGIZpJzmIIBhKcc9BUxZTmVISbbqhD0tpswjxVh11JJ6S6verfv74jCGA0leydb3uf06yG3reOcS7ZQJjUO5fFpiyHMc0ddJt4+/6o4CHb8xdPven9/QhEOSm72TexW3W5CgogcpGznpEMmJwaqEERIyJFqyKLX+EybBBVRJSqCERIqrqYPsiZBAdaF8dEWXbGhqgAYuCZnHfec/Dex7jcBX23alpNac4KF08/Snm+f/embcJlexXwyeV2u//yx6v1E+O0v71lbdp2bSJQldhtV58S9Bxmp7lU7cJ6tb5ooQnVbt+fTmkS2FHTd7E7Dm8oeHLUreg45a/u3n589dGwz2C02lzmPKqluVYwRKmlZOsiAjneuLjB8lbLrNzYzIQ9sAN0qnMZjmmcjCDbXFRryn13Ebcb9q4OM3hAp6fh2Hbtpvsodluwkm5+lo+3rgqyxYsnBCSWyQWt90YNNhcCHgltEUBqIs/oWlFFajG2VhVFXBnk9B6yGK2tuUSD0LSVeinCKt5k9/rr493dXKTZch1SbLvgu83V5jiM035fTvr01ZO7m3eN8xfbtZlMqPsZhyE9vViPMuUhvXu/H+ndXZrvdwcVAIda5j7EmuuUppQSRTbkUkp09bd+6/tNbE/HzG1rEH74X/83PjS/fDMGtlT13Wn/n378h3/rt37/7/4Pfz8n6Z/yVz+/3d3bv/p3/+l/+If/dWxDbcu237z64Xecwr/8n//kX/xPf/NvUnd4PXz+xS9/73d/KBuKnf/qr981ntuwjQ5LkAqiCse7w5OLTWzczc++7J4+++mf/tFv/8M/6LZPxtt3fUWe9WLlAWD2wIjjOKuo95SFsaPLH33y9f/9X67GL7cvt+2r63LB/Xpz+nzoZhhvditcyRfvXC3Pr67bV9fZdKg5f5VZrX+xevmK9l+PLOn7v3VdTWYRQNpc9FJk5VelWMlKlCWnNOTPf3F3PO3/T//j/2E4nYb7z/e3JyxQci6laD46emgP1Ji5Sg0ugmEq826qjEiEDbeBIctcpYL4xjliHKdkVgMjGTddn0sp8+yapu/XOc0lS9tsUGEeExiUuRDSbNk5jjHWovOwbAWIhKY5heAuNldpGJznouJXgSzPqVjVquYIpepq1d/f3hkwEMwle/ZLi15ScQ7VpOUYQyT2hJjLCFyp2HycShEXeZxnrZZLnacdsWOmcRr2WqJrCMgUhvGkJTUhrq6e5VLAZDrcnEatBiJYxJzvvPOE1IRoFderDRkD2coHg+mY5lWMQCAEJWlgZ4gKVFTrNCOiBT4V4WluvSuSLi4upc5IcCwZa3VcSwXXhKnuCQuFwATTknAk2UEAdjXn4Xgjxec6n3b3FxfXp+E2OueD92JFZ8fUNE2pCQ2nMXm2e7kJbSxFcU5zHZUMmGspkhKTNzPnomiZUgltDIQGajKfDjNRiLFzwRlRkUJITefmMc3DcROv19vt61q7wHBUyUhEVQqjgM7TyKvLp9Pu3vtA6AlRQUpNyC7lSU/D9Wo9lxzaZruJw/D21eaFR97nuy2vlFlFpBqha5oGckppBkdlKjlXUO1DMxUaRYMjAp+Hctk2X+0PuZaPP36BUC9Wzb5Wj0RUthfr929um9Dc3d+DaPBUwdJw6PpuKLnrNkCMSP3Fhk4fZs6PCgh6sLE4y3gWH/Tlywh4NgB90PAjPQh/Hofg+K0OwT6c4MOZlrGcPnA7HjuJb13MwkQ6O3osk/BHJcdj13W2vwWkB8jqLBpaNoDyMPl+aMO+9b3zEURESq7jNN3d35/GU54mZlit1mkuwXda6bA/hSb6GOdxPAyn03GYTqfheGTHLrjD3by5erK96A73+9fDaT7sT6dkYKFpmH2F8vzj5w7w7bubWrIUOx7uxzIaqlU7HocpVzJDpq9f3zRsb17fvbzs37wfbCxXPbzfHcq4X/nNRxefhc6TTU8uO1QcB0ljgaCOqm/XT5+8BNBpOBFzMZQK83wMwV9tV1Mtp+mESONpJtSry4uczHu7Ox4JKBgFoKunT/I8IFGxmRwb8anOpdQ0zkbsvUO2xQyBkBaVhvMsuSLDcThlPyMjee4Ya0p95wj1ON45x2i86RqwIMXuj9P9cRjnCYlM6uE0gEj0bMZGdBqTUXlyvW1bKiIxduN+JjfXWfpI/WajxKLiyS8kUjNDUDzziR4xInjw33j4sZ81h0tUPZ4ZEA/kuQc2xAdeET5aFZ3pErC8yx5Nsc7VhI8UDzyX9mOjvOhxEBFVRVHBapFye//mJ3/5n7/65osEcn9ze7FufvbzX3zx5hvUHHwntS7AEJiBqZhItTM3XGoBExEzpYUwbpKlAiogiygzV0lqlc7+uyxmPrbORwPL8xwd11yItdn0uZaAWFWQ0HEY83B/uP/ux68ut/3+9r4JTdetFZkJHdNYJiIJkXKdmSO3vt3GcVfe3Q4vnm/f3I6XDW22V/W079dtu4rGaEmCj4ACgO3qCk1joHyYkBBRW6Jdka9vXm9fviTiw93t9cu2lGLqXBvm8YiETeuHcfAhEpBlJSdApsqagaN6T2nKRWqIUfOp5OzaIJa8u0h56nwragRE6EVmlQmZAMwjHXfvjd32ojmkjA7BaL+7a6NuWvLcpCFBunedtO3laX+M7ebJi4/r8FbmA6xXrulz3q/9paQRdWYZtKycW1sVBGdqgGJqSABi+Lj+Pa5rDwvQOQIAwYAeKDlnH2ikxWT3UaCLCLC01B+IbwYPYpfzerksdg8l+AGRefj9V67ifCn6cCh44C09YqkPr6UHoc7CzVu4EMuvZfRM+Ai80pl8tFzO4qHxIM58vJRvAbTL60DF2DEhMtHC7kRCM3PePzx4iBiJCUyZiAnVlJCZ2cU2NGsij2Ipz20MVeFU0lQKEbYU2DQQBCQRO01VlWyhbwaPSAhsWQkBHYqBQ3LswDkkFjMAY1PNyaGYVFI1IkQ2MwEwFUJi5409OOfUgVZgtOgAAgGBq+SDzzkvkeyL9TQ7RVByChYjO+9jiF1sIhMhOkchuq4N7FyL0COijAu1yjO0XUTfEAd0njZX3HYcAyh571WrqgHzQ4x7dUSmQGbA6L23UlUrEQEywCKxUzFjYPKOyQGjKTZNQ4zeOQDw7IGYDEBrzSmdht1+3A/J5SlyJc01TaZIUpwJaPUqxEyARmYMSATADsmRek/kGF0kIjQh59E7Cg5ZAaopoVjNxIimAGTs2TcNxxZ9gxwWPbFqNRTgEFzDoSklxzaGlJNalzXNZS6aBIoqnMMrEMkC42KwRYTMbnG4QyIgWuhlwERmj+a/iECLMT4y/5e21g9a+EXVtojLVOoSGvWwZJs8ZAqoCjEtfjGEJIs9OyIhLy+mpb6ZPbvF4r1WIOTQBtKqIqKZF1WxKpgwskg2rWSmoirVM1dJNWcp4miVcyo1aUVyrKI5zYTknJc8WU6MaGaSsmMHoAIFDVUyoaU8TfNpnmYpCODBoO3a1Wb76jd+EMkdbo6EdbcrbcukqrsjjON0v2tjX0vOdeSu6b0DjAbTR88vx2lWg6brbw779WqdRx3Gkk1d9IGDlrwKDlAdYa4wpZod5zKddu/ZETuGAghYpYomFT3d7hrvcqpjnrpIAfC661iLRwL1PuA45FKmpo0dNfNpahw+216+u7txzks1pigmz549O56O4zA74ru7UyXXhFBSuV5fxcZv1v3bt7fzfOzW/TzP6Ziqy33vZZnanPI8T92mceS8sSqCo0BQxYkUH4JzOI9Dreob17Rd2yc7SfFcFEXUew4hHqeBtXHmHWVDLaVIRXQBxUh4HJKANQ7UbM71lLL3aATTlGPbpjnHJrTR5yZMUxVR79hUbTFO0zO5nh732YQL93MxWTcVdo4QTUxB4WHJJnYGJqKEoA8A6BKCaEzOe8fs2CGic949pN7UqheXTdlp1Zhu66kUIn7x7PrLb3aznFJun2wv3t1+3V9eCPnoutZaGA6BpZQSQtBUT3W8uN4cdrOou372qk00Dzc5G0S3G+6iVg7Oq2fwmu14d8yzQBeGQUNYE2lRmXPOqne7u9Bd1+lUJNq6rbkCOpUZ8gxE6CJ4Ay1mVuepzlPOsyKUMvf9WqVj7ENsZTrWCfM8V6JxGLu+laTHw/3dm3egM1pm5m61dtyIsqm5xotOBE5dyJVsiZjSxQpJLY06ESMAAQAASURBVDhVB8CGC6tLHQjYbJKwuwz9hYJwTcBsUiXNVOeyvxnu3s9ZwuqS201Gd/nsajppqg4EVe3jH3z/dDpdXGxqKbCKb1+/KxTSIHXSCjkpOlREmafb/fE0jfNmuzqdEihAs8gKhRyZmdQaUX7nO09/47NPEOV4Oj3bXIUYTse5aeKK0XM4zSeY4q+9+I2bLw+3b8tv/sart9/cb4Nttu7/9n/9F3/+n/797/7tv1WYT3dHEl31/T/+u79Tk6Vhthqv1y/e/vL07t3xt37nVedjcMzkUknk4LQbN+16/d3v6ukmdBer3PaXzz/9AfrcffPHP+s321/8/C+/+3s/CsxAcHkVnQFyk1JpgvvZH//85dMrcf7J93/YhNX07/51Mwu6rX73Ol7HEPvw6SXcjONPv1z94NPw8sJakJJX2wbmyArjOPmuXcWw6rrj29uMpf/s+c3dGJomOmawWmrTOgV98XIz7aaG+W//7g/+/Kc//+av/trg/nKzHo+DA+zW6/mYkB+7C5hTYsRSMpEDtFSlC42qoelpnrq+8VqqoPdBrQbPzMzsVdN42hNzCN5M55wXzlAqpQtRqqCRDx7AWCHnIqoIFmLwPkzToFq9Y1Hdnw6B0BBrLZ6pKpRcQ2w0ZxdcCM08T5uLVZa8WTfTXI/7sW26EIMqhOBFZJxnQozBM5OUuW1WEouIenRdt5a6zzkjGSESURO91vLq8jqnejhOtcpmu1bpCWC8v/fkpWnev0tqEBoHYJBRiDx7FsllRnIxBk9eQdLpGNA2rQ+ep2H0hOBo03X3wwBLaIkIOUaPWWoSqnm62HbHcbCcXNcoeayVzHW+n/J8rGMA18WmSmIH2Q6R/DCPiG5Oxd3vZ4kvP/vs9ZtvYgxEjghEjcmL8XQcWx/3pcToiqhqnVKl4CWnMifRigTRhySCYMwwHfchtE0byLDUAgZN8F3XzcOUcx6OOx9CbCL7cNjtCRmJ58NgVa+eXFy23XjcBQ6uBYO6hNU6tWF3uthu9/U9mlJw7KlaXblIRsfByLCKtev1eBptntdNOw5HdKEJDbNDH0TIwFSwllJTmtMMoA58msrFi+cBcd4f+3WX87x6sknVFKltG9ifmpa2q+bt/ftPv/Psi1+8/bh/MZ6+aJqAlch7h8p8jv1iZsla0tC1fSmZ0WPXfaudeJRNPDbcy0bqYeiMuLSv9thpPMyZ8eyNSg/eEmAPHfaHJn6xLrLHWfOHwKAzY3zpZwhNz+4v580fotq3gqhxseN7EB0t3tXLGNjMFtM+NACjxcMS7FHWtlzUAy0ExayITHO6352GNK82XZpTmhMHevr8yfbiSiq8e/MWGZroT/v7i8tn4mk3HuPKa6lffvHzVEp//2bdxZJ13Xar8BTtxgXMWbIxJS5DfXPz9nAYt1cX/dbf3rwPWrUUq3hKGYliDBeb1f39XTYUpqF1vomVlGMLWGJoq6EQff7Lzy/6SGyppJ67rm0unj2NkU7j2PWr6bB7/uzpeNzVmsFBEyKqiUBJGileXFzPeXz3/iuMbtX4ec6+YM4mZXaItRZAaiMfU8lVpZojbAJrH0rVVKsUQ+LovKr4JfTGIAZHBCTCooHIau2C2zZd5Nr5pmvalGb2bn8ap3ncNt3KEZKnpMjkPJe5HMcc28CM01x869FoPs2mbM5jE548W3OzNkSrVucSYgMYFhN1NSNaWlN9aHtpsdeysyPHkqFs8BjAtOh+HrtqJD3nHf+K/vERVXx0vHqoT9QPzTg88C2WdysiIYA+FiMuKd8qJooyzPvPv/z5n/3kj4dyHIbxl5+/6zv/xc9+9n73XsACMZrmMpuV5YprLVWlFGHHCLq0G0t2DxGawDmO+SwSXaaIcnabQVIxJCZHKgmNiJA9FZFV19RSocr2YgNq3ru2jff7wzzI8/Xzm7c7RNyuNoTs2WfJOU9mGtp2PuVaypimdddaKt6BX7Wf39z+qL8sSUstm37dNF7ECLVp+jQlkey3wYVGaqoiVUSR1YXFwmSapsO743TIKaX28oULzqzOx7Q8y6aUaklSArsIaKkkYEIk8q7USRVCbPenkQSD68pcojfVCZqZgUsdg8M5jUxBbRaZqwbnnFXWZMhFxVbry8NwW9OMbMfjoQ2h3W48BrOS8+BC0/VRSnWxJW1Nw/F0d7F5niWJGCOnYddebQ1AySNXU0O3hB3JAl0+IOH0wWMIvgWJf9h00MJee3T2eSxUewBszmjQWdP7AXI5H8DgUdP7yLuEb7/mW7X6q4j8Byj1AULCD0d65DotgL7K8hVCOLMw7BFjAnxIGFiWfCR4ECmfb6IHPukSqfaga0ZgJse8pDYvTwaxRaYDhGdiLiOCLlbtDAAgRp7ZuRA8obaRPaKp5VwPx3HI2Qi8I4/kkTyZBxIRMVawrJq0qAABMKJpNZ0RovekROKCOUZUrEWlSE5cs6KBFEZDMsceqVEkQEIgMDQgMGQzQBAwF1zrAJ2rVJDRRZrnXIsQUqnKjgsiOh+7Jsao4Nqm67o2EqGZI/SNb9uIDNE0qkD2aEKMMQa/6rFZ+W7tfKihgxgoNERMBqZFzdRQqqCqcwCMtSoxGQI5VFJBRWY4K/jA0DF7AGV24L0hIjI7Jlqi6cBqZqY655zn+/f3797dvr3djcPUaa2UgiYoGZEdKBMAE2hyPhITsTN0AMrIaOADsQ/kA4dmwfbJMTkHnquZqUmVuATeMxsxsAN2SIzEhkuSDCAh+oCoIFQW811ik0pENWUFMgRkMpMF20RAZmdozLSQPZ13zLwosgGRiB/2NXQ2PjRTM0cA52CpDzeZe7x/cBG/mwGAiC50UQBQNRE1MwIUBUSURe6Oy/MJFJToIX8TzJNDPLtDKSIDkhkCIgOzVxJFYqYGGbFILWTGyKogNSMScQQzrZNpsjKWNNckTLHWGQEdN8tP0AMhmCMU0LO0U4wVtBYFAM9aoGZBchgaBN6ugg+N91HNP71+/s37L49ib978sveu731jMQTe3++hCCms+16EkFFFU0reGsbm/jiEMgpAymWWw5ySQH9zKkqkajXrIIPzYarJk3c+uoYjI/moou32wsi5huZpPu2OWWrfsDmQYlOScc59F9GxYr0bxki63V565Pv9nWOXa5OPkOyEmsnc+9Mdkh6Op7brANGEb3en6Mx7bvvmarMdBTd9G7xLKberLm77jy+6YRco+tjyaXfa7QaVEAM3MZaibd8oWBZx6PJcgQk4kQ9DngOBZlGVpumypjnPLno41j6E+yHVqiH6pu/UrFSZ5pkdu0hFoIig1jkpIXd9e5xmNqeijqkMU9vFfu3f3kmZkwvtvJvazofY5DIHk2pozIpQF+7QokhDlJpNFRREZBneqlREqmqMdM7bYESkxahMzUTU8LzPdkSIBIjeOx+cZ/LR+xDYe8dnqCiN8/b7r1599v2//l9vjcKUdl3wb27fP/vk2V/+9c6XFOKq2177LaU0ODEf8bh///zJy/7p1c3d4TSWcU5tM9Vx0uLev3vzbL15+b1n0y/3LXsHeDccZsMYgDzFxk+ntG1WIlOzguleWuY55f1+xIjUrM2FlGvbtEUhq4YYfeRpf5A8uTWrCoOrNbuAU52m6ZhFiINz0aioR9GcJ1Ez53h/t6e4TpmO+zvVTNXW25XvtjE2rt8458WwJtDgsb0EvxJjdt5BgnmAOgNUjVvj1sijC1BHlJlqhnLEesRmhd0Tcy1Aqa5jF7EMARTK6f0XPzve3vv2ycWrT+4zuc0mxc610Qef73XdB+9cngVU8nzM00SI3WrtqjgoX319c7V69uqz7f7P/+pdGo4398b0/s0oBuj8MI+1VDP1TVyUiR9d9f/d7/9eOo2TpIsXr6ixu9vdd0CaHtu2uXt786/+7f/7f/yn//TVi+eua//0P/z7eR9v3x9KPa4ufnA4DL/4y7/+7d/8Pbeir/7qly+evuqfuz/78X/+jV//9X/zh//6n//z/2O73bz/+vZ7v3mddFxvO8nyV3/x+uNPnzSrLneTmLquv/mTH19fmCtwP9zT5ctmvZ1//lfPf/037nZrdq6WSgie6Oabw/WLC++dMn7nNz9tHaW74auf/PlH3WUbr+DzN6dfvOv+xX9funF4N25WT2Q35WFlx24c3tnFigLVfb7/5tBv1/wknNLcFPn6j3/GbXe7u/toc7nqWvBweH9oHPvo73en9aarqtfPt1/84vPG+f/4h//GWzbxKjqfivN82T3VfN+1D+QEJhZsGjcMQ9NiF9cAGDyOc0ID70MVNQOpJRUIvgHKpVZTYkIk72MsqZCBATrfjvMUXYihFVUVQRDP3nWreZydMbPUWqepiNWujWo4TTMKFDFF17bbnE6mEmNTRaTkwA0btbGb0uQolFkYQuOV0JpmNQwnAyy1KvLVaj2djk0IICapoMcYggnM4wnV1qtV6Pl0muYit3f7Vb9+fzt7VDMjDm0TrMp0ur++6FDyX7y+S2NCY15uC9C237SBT4d9zXaOZa01xrgWQZEsIiUZwHrd3+rRoMbgD/Pcr7bzOAMhiJrZaU6rGMdcQbQJ8TjmvqdSKhJ1FKeUArceqOYyl9FxXHdXQlSGOwYSsSpiNuR512762/fvXr74GEDnOXHoL7pN0V9iaLr1xX548+CiiTlnRKumIQQpWVWrLJM3JUQVQQUk9q4bTnsxaVd9iC2giNRpnFxwary5vEx5MqqxRbDxcJ+3T186huPtXdM0eR5UCpbq2HLJLBlNXXDBoaituismgGyNDwxaUs6ltl0spxyplcZnw7a9WqZSIhrYOXZg5XC68y4QQhXEJgrFpmd89/7jp5+NUFaxmUPxSFBx0642fbfu25RSGk8I9XQ4Pt9cfvXuTclTv+4PxwoEjsgMvWulJi3pWErX903bPMbWPKoNliZCdbGNsEcnC3uwxPvQS5uBnneYZ/bPmSq02EmebVyXiTV+8M5YxBl47j4WutBj3/PYz3yLwGFqD/KJc4v+cLCladIPnRQ+TLN/5XiLiax9i7YEyxPTVEVqKilbPaY0DsPpdCIz752PTdd1u9v7WhMU7eOm7Tb397vD/e00T3Kaa9a+W208ffL8177+6hfjsL87HFOqL5++uLq6/OnPfjIMp9/47vd+8dWXyBRCA6rDdOo23eX26Zt3b0OkVKwUGacEeCTvvee+a9/cncolE2GthWV4vl4XyKfd28snF4YKzC+++9mn15evv359KvP9++PFZn26vT0d8vu3O+ekXbfkedP2TYg155WLUm23e3s4HZKxTTBbTlPuYn992da53gzpm92e2Uk2QF9yRoUI0AbXez9nvTtNhdCQe+9SmUGRnScreZx9DJ6p83ARggeIaMw415Klrtgz5ONp7LsGEVNOjp0UnWvVah5RAfquWXV+TrVxzWF/ury8OB1m14T1+mouKZj0gVcXGyBkH+dpDNEwxHMlAJnqktIE50hrO4Ob8GBBvWCXD+VmAEvD+u2G+9wbfIt+8dAmL1LK/yK4/KGvhg/978ORljJXIDZA0SyqVevt8f5nP/+Ln//iL1Ie7g77d6/fpKEcdtPrd69dQwBAzIfjntiYXTGZU1ZQtA++3EgIakuTU0XPkBcgkwMAx84ADHAJoRZRBCJCkaSmsV0Rs0h1LobQkxUDLSJVsg+8G4f39/ur9eVmc3Xz9ovtdo0Aqz4aYqSmb1fTPKHjbn2JZZQ09b5/soa720MITrHrQzvvby76drXepmmM3MbQ2zQcx6lrvKo5Zi2a5ux9TNVCv027uyaGuejNbnh6/eJPf/LH64unrz77jikO+9t2e6VaCTBynI+33fZpjF2eD5Zz22/EYU0gWXwMkdDK2MTVfj6ZOId+Pt00q0umlZmqaJ3VQdSSDAyalRUffDPP9/Owb9pIjZ/uEqC6EHZ39371LPab6XiQnGi8Zd/G7lKLz2VofOOlSik+tFIS+1YLkKEaIAExa82LKBJMAf0j2HJ28/+w/jzA7o9IOXzL5soMzGhZLAmQCBCAzmX2wQzogbKDD7yds9fQQ8DaA9bz7Wr+363wB5LTcoIFcrdH5c35PrFfWZDPsP4STAYGqo8Il51pQXi2ssYHmfLjg+J82+BiIczkPDunAGfrIiQkcLiQmdhMAQlxMcsAYvTsEEmlMDskbkIITME5EanC0zAdT9NxHBSsb0PvPZfiHTmGGBAICSmEAIRAjihYLSZCxFUMuBohOAIwK5nKbJJdqSQgIghITARkQAbE7MgymqCRlQJFwAqoMVZHyg4ZnQE7JleFyZdcEak3RsfqgrnQttGHGOIqRs/sQIQJQtuENkSPZJXm0fJMJkQYAgOjscPYQdNRiEQNuojeAzpAAIwEAKIoZlIgzefHNTozEyWxSlIAEIiI3AKHOCbvArAzRmACIE+AUkwNVGqRilqS7I/Hu7vD7d3hMJ6mlOY8rTmv2SLQshahQ1hssR0hKTtAxwAUmQgZFykZO2ICamFJMYuBnCf2SAxIAOeSLyLEjLUQmmkBcoaByakZApmp1WJVpGapNc9TqjKLpaoqokCOCHXJmyFHrAhLf4yLWREA4mLXuzgCwzJp+HDHnAdTisQPlsEA30pAW+y5TGoFQBFVlWVno7ZsTUDBCBGZzZSZ2RETAgDTYllqj6Dx0rMD2KK4QxNCZGQAMPIIymYmMwCpESOIidTqiI2WUVhVKbhgVaLBN6pK6BCxlGQinltTJZMqtUxzyWJiVoVdQHUiGnxsmoBNLNWabYeOCW06nVCxlvkXn++Cd4f3x63zqOX2ZgjcDqcDEcWuGXb3YF5EsomoAdAwz2iaU1UFiI6Y15fN1fXTn/5yV8HIkWcGg7lMSNauVwgkBsM09t1qtb3c7e597Pb389VqJY2uL/tpHLeb1f1uv9qs9/fH0EQF3J/mlxet6FwVSqpjGsZhaJwn9Kb6/Om1c7lMY8nJO/7ok6dKbhzGbtPtTqfrJ9fH02HOCqfiXBje3ybHIQbfdV/99IsXl5feu/c3e2JigvW6VcbTkDirj8E3PKaxJGE0QqdmU6osEj2RFBExMINaq5yG1PiWHJpCDH5MacpFQbvOz1Mdx6TgyBEhE6tWNdWkVoe5VDWAdddPudaKzESO1GatbphT430plZ2PwQCK5KomauIcP1heGZgy04L8iJ6tjOw81EU1AzVHi4XEAhwiAnjHBgYEAOS9W2JTyZFzzjt2zi//sTvfBeT9L/7yq8/zVxfNq1V3kd7viuwhDnjcd13j+1amk1i63Lw8ff5+N8/2rGn71fF0OmHO6ELTrN3VOKRpmoJrAevt/V4tEwS04piawKvLF7vd24alzkfHkEvKWr66ufEaKtdUBJqVsOGKmP1hnK6fXKWpwhptiWoylloRKacUyUmdLI3DfncaU+gvwCwllQrGkIZZkcAktk1smsKijg/z3MfYrVf9ZqvkgGK1lVZFM46+grMiWk4hOsp3ZPs6HKVmv9pyuy4i6IJZBitoGSybVWyujVohb8CO2MpYyliOU4vT8fbr42nXXW2tub5LTrv11YuPVqv+7vU3p91u0/t50N3tqUx1ex144lrqarud0ZgmoCnP4+pi+/qrN5+9eLa5ulrHrpT5dncc1UbFISt7ByCegYt8Z9v+sx99/0Xf7o/TKVe/EWsKN/7m3W2z3rAYh/Yf/+1/vFk/+8N/+0cvPvvso+9/p3vx7KXvn3/ya3/8//vxJ5988gf/0x/c729ePfvub/1XP7zfz8daj/spJfmDf/ZPIISf/+yry/WqWbnTmN/v3n/3+x89I3eaBzlIjF4E+enm+rd+j292qeya7764udsfvhhro6MN220A1dOYNtsODIT851/tn173m7UzL6UWulw9/2f/rbwb5Wdf8MZ1VPnCQ+Ob1Yt0qv7T55cfPc/lcEr08ntPvvrpXR9s+8lzcUgbip5I4Gr1G2zK9361igPmWqHto+aCjjYXrQ/89pv8n392czrir3129cvDaXP98unF9osvv/rks+v5cDvubuvpCD6c7wJEF9ycp9C0XdMOeVbjLBJcZIDTPEaKSBSbWHLJMkYfjsOxDZEQOfhSpIv9OB2rZvJhsXQ5zYc2NlrZsS8pZxsRbcrStAGRT+O+7aKoiGjbNmlMOWVLSdpac/KeyIEaUAi5iujIPrB32/X6fr8nVSsKxKkWAa0lMxNYNSzdtpFivuuij8fxgEBzTmCA7CqJCYe2W639pl+lNCtgraVbdTWrFhWpzrXO293dvUnq29Cu+/F4DL7ZtN1hOlUA8+xM2Ifo2tNxt151uRQXm6aioAEWY8gwP3317Gc/f+d9W0sBK73vp3EwwEmrI5SCVK322HSxplzKGKJ7f7px6LbdGsFiDOWUwagig8NyQs0ZvSuo0Xd39+8/+d5v3L+5n8b/P1n/2Wzbmp2HYSO8YYaVdjj7xBs6ogE0EgmIEEiQoEpF0yzKdtnWF/uLv/gfucp/wEHlsmRbqjItUzLBAIAERZDoBrrRffvmE3dacYY3jeEPc619LuxTddNec6+97jljjvmOZzwhMcDF5dW7t/er1TL3+7jvNUoOSUkRuWSJKVnnRAGKMllCrVxdSh7GgADEMAbNSsvVo/nChjiEMRWVs/OLw3oX+v0wkPEtWuOtL4zDMBjmLDJ2h7qexzqkYTchJmE8tPN6YaoY9o1l0WyZVDSIsPN9dzCGNWauqN9vZ037/Pz5zfb+0cUjNoAOdmGsEKCorzmXvmSJQSrnh3BwvpWC7+7X58unxrk+9LOzxWG7ndeNMZRKngwIZjO/WeOhDw7NbrOzllIuyMTIzrjQD84bVYghWkNFTU4SxiHE4E4M02nDpieLXiQ6cjEQQeRhPa4KRAYkH3lD04J1wobwwV7jRMaYvn2aJ+C9SALxGPf5DeLSaZ2uAMfMsgfO0AT96Ek5hkdikMKDnui4mT9KkU5SITiRPABFVVVg4oC/V1xoyXkcY4xpvd3frTfhMFjnlrO2qRtjzWa7jjkZttaakMrd/d1hvy2lPP7wqRF69dWbDALKv/jqk/v1WwKOWZz3Xei6V109q3/7b/3un/2bf33Y77iybElVQ8hV671v0pi48jEXFWHLIrmINm3d7/qxl3sTa4fzyi1sEwWz5ufPV6Ev88XSz+39zX79+i6V+OTq4ur5k/v1bR8DNs2wK99+8fz80RkZfPvyRiQ5z1okjHHfd7tDZ9t53cxcVQHh/f3dthvSkGKSma+J7D6Nhstq4YcuKmARKYKkYhEYAUAMStEccnIGseTKUmPRO17M3HjocxLrOeVExP0QiTpPIEibLpacKqJ2XqmUfswpJ+OcEoVcfCIGqOft+flZLLh8fEne16uly2Ech0okqxjyScR6TzyRqAtMAR8TIIgCANNEgUhwdKQGeMCHThYTf32MVjjter9JxDhee7rsqMD5JukDTrfIBJROoOZJiQMAIKVoFkkJy1fvXv7oxz9+/fpL5bS9W9/c3jSrOpZ4+/rG1i6HaAzGcRTNDCakFHMKKU0TDgIgkIjmIy9OJviTkOR4T2CRAsigqiLEBAqEk+0vA5SqqkVyKbGua29djiGX7L3t9sPl1VlIYRxHgvLxi2f7/R0yNG3jq1qZYgrL1aKQpCRaoHVtgqFtZ+2i2gzvmnk97OLMkI7do6szVh3T0C5WxrQgst/du6oyzvf7sDwvuduvzlfbPgooOUWV7hDY09vb23n9aHlx/ubdm8X5Gbdz56oQY5EYJFT1Imca+qjKRFZKHEJv2BvKUUNK2bm662McRuvaECMVnLVVCF1jZ8CeyXX7XbtYaclh3HDtikxmJYqqb7/6dLE4J7WljHkIg8huv5mdPzd+lvcDekmxZ9M429jFZR4D+Ya05OGaJZmFB2AtoOWUqIQomnUCd46FNP1djuIsFT1h2Cf46NSmjk7VR0RosqECnmgr+J4K9ECL1COZ6FirD/ygv84kOhbHschPqOZDgb9nD53gJ/j/e5PTEk2/ocFERCQmQgIkoIxFi5ywfyXE0yPi9LZ4okkBPLRlAiDGiR93kn8iIhljJid4QphYG4YQUNiwNQZxUmNVCjhrm+V83vgKRQEhjCGlOMSQQYxh54y3drmcNZVhQmegZlg2pvKGUZzzUkBBRQ0crfcIEAVVS8YU83CgHEVUgfEBtVMtWhiUJSKWLCAixBWiHvM6Ub21sRRrqQU0SBgiCkeeItXQOl+MY9fWs5n3jW8aa4EQSxgB0Dcz33gqI6YMyEUZEdki2wqZbd2Wqi5sChkyDNNvkD3iH4CMpEgZsKhwScVYq0AKoFnQOpRcygn1pkkRhkRGQQUJDROimfCSHCVlKQqQcsqhH1JJWXPR0sWMYzKu2FJcZdgaOvLnAJjJoBIxE5Gh6UUyZI0iARkFZEY2FVgy3gMqsZtIyCJAAIQiICnFnCB0MhEcmB0RA5JoNsZnBQGIMaYY+zHEnEcBIRbFSYPsjGUzpTmd+Haolnk6G8hxp0XHWPsjfoQKCoSIfFSBqjA+cIlOUFFJRYroablU5Hj4kAlKpMnuazq/CDMxkzUTWkSExxLQyR5RAYAkl+lcIiqMCMjAaoyViQkjqAAiYNiAlpD2RokmzVEZJQdrsB/CMPZF0JKqlsmuHIEIPSpY1BhyTkkV2ThlQeeMdarkjTfWMCkoVLWR3JegAAVK6Q8H1zStaY3hbz8/392/DlE+fvEbrz5/G4ex9m53uC8xKsgw5iEWNd5ym3SHCIvFPKU0RB1CysCHsjmEUIBjHFOY0jotmyorq3Lt62Xz7OLifL0fP/7VX/GL2ec//UUK4/5wf7G6KFIPPQ79GFls3Y5xYIQ0jHsq1CAaIUB2fNU8iYedIi/OV/3QjZtDY4nRDCFhNwyhq6wbhsEac7+/JwvDkL2exx6ff/irT58ud4ebPoQPv/eD/X0gx+3SDf3WeqeUFDAgbg+R+nz5+KypudMOgBkNIYacc5aWTUrRcF2gbLuDd56ULEM7s3EItVCKnESHIdWL2llKFoqhfYjWmLY1qhL3otnmVLx1hzHHkJFQkbo+kjV168cxb/bjqsWmNiVmASVmxIyARASgBYUNn9hFjHjMNVSAclLRPzyIVPUUtICASIgAwkiqAExoDCEZY4wl6xwSGmuQJsrnETcFgn4YUEwXXv3NH36wwvOvfvbZx88/uN7cxTymYPpdb7Q+vCUOq8bPQGtsbAplc3egxirx3W79necfLpbz27tXl6vzcNCYdYhoHEKJBu1mf51Zt4f9gqsxpuXZY2H/pttdNKtu1zmEpm5vd9sMJo/BExJ6BpsEHDHklEfZ3Y5YB3JVTjlFSH3Xd2MMmpnOzp/g4RUAjUP89PPPXzz/QdO0601fM8f9Ifm9sXZ19WHtqyIAxNzM0XgtPTGSq5Cclt6C4n5Iu/sMA7Kl5rHUT9WeKXosBfMeSkLNKJqhgeoJKhBmQIfSGw05JJYI3cv7z/6qDNnVy9hcrl48B4S0u795+2nc71l5HGi3HRScd27c7e/v7mIsjy/PGyqf/OzPx647X6zU8dXzD6/fbR5Zap9edMMhXSwWF6tPXr59ux9v73dV02AqHz2a/fb3vv3x5UryGIv/4ONnY4jzxdw/ujLeo7V9l6yz9/vdcj3r7+DW98N4+/o2zxb1xVnz4ulHJLldnP/8r35y+fzjIrLZ5No1v/rLv7Nf3zz67mpz14VtTEhjF/NQWt9QAilh1tbXN93jx/7+9vbJ40fFQoKOHVVtU/e7ctd/9/sff/2Lr2FxOV8+qb1xFg3C2aJpC1QOP/uLt09W1f392yff/tgsnG1W5dOvOOV8fz382V8uf/OXYLcdPn9Vzc43Lz+tv//h/PLq7uX+yffOqMJYgAv0Q4773qIL98NZTfefvaTVOZ8REIUxnK9mIqUYJFTK8tHFxV9epz/65z/69V/53U+++uJm++Xd+o0xJoc9qczO6hDD8SZAGPuhqKjXLCKKRgUKZcloTGMbEi6QFFQI2ma27w910wBQziMkLSJ6PBaqs15VY8oIOMVjxhCL5Aq9QJ4vatISpMzbWco5DCmGcdZaZqqaShGVkIElSwwhpITEzvpuGCpARX13e1vXXnM+u2gO/ZBinrezsRtQEVQPu327WgBpjlEEQAkKEqIxnEF32513xZDpU+9qz4yWbJ9D4xryNITBQ5Vi2e3Xt/e788ur1BwSKLMrQxItdeWyYozZ1nU9X6UUlro0RDFeC0JT192+c6yisqrrq2b+Jb2dLVdjvy2i86o9dAcACCHvhmgZMcmqtpv9DltvLCOCRBMYQpGikK342ZmWOO52EMQaO/RdZTGnUHMl/R7yrhtuQ5g760Ic0dptN/T9IZZAzqC102CoWoi5iIBIEc0C3lrJKUtxtUspWsclZgQexwODM1xZS/e7zQY3xNwsZjmmEnJEFs1NUxHS4bBDNN1+LTm38/nIpdvfny+WIuTny9ANKiKSDOHl8vzm/kbyIIopJyS1lW3m1TAMHNPNYX0jm185+42QroURwTC6ImPWpCy7zYGI+7jvQnfZLudN+/p+k0qp2mZacXpfg7VVY+rGJ+kXiwYguhoX2hQJSsgM7dz3+xRLBMv1fOYr148DW7LeacDac3/oJCaUv8YqOlFyjlswETk6uxDICUcqOU2XER43xTCxib5hKn3kPpw4QHq0cj0CPaBHlxl4P7I8kID0gfqDD6+rnqQMDzP/6Wn4TVXQN14+sp6QJpzrIXVoktapyrTgFxEppevHN6/elDACgDGumS8XbVVAq1ktiofNcH93e3P9brk6O1s+ds6qlvv761A6Y3zt7fXt20fPni6r1aef/bzrhzAkY9hb/Mlf/nk3HNhwGnMuxbStCsQhrdc3aImd8eJiTGwopiQizCiVWS4uLs7PS9l+fr8/s3jZsmf6/OXaEN6ut2dtrQUZvTX1eBh//uaNs/r8xRNr57XHi2b28uefdMOohZxxKUUiKJqY+Xx1bquqCOxub4KULJJDGcdRETVTKEMpERRq74qjJOKtHw6Hylmlet+PIgWy1mxq41NJbe1KyqiaQ9yJtM4Qw6w2IOVycXm3XUczzhbNyjtV2tyNY4Iv3u1mtZnXtbNGRL33KechZy06Qjc3lMDM2nZxcd4lXS4uaCZZgMHkGBgZSJlIJTOZ034WAUFFCPUo/ppyXgn0SL54yIM6IZXTAAxyGpnlyFN74NYhgJykZ6dCmngcpxo6MuBg+jEEqKJKqkWRECFLLppyHj97+dn/8Of/bn2/z5JjjjmnJ0+ubu/fbe6vAUvt6iFLLpFIrDGqMAxdVp0s5FPOBCRa5AEqnW7Oh88GWDRPCWuAAESTQzsiIBhURYScBUDq2laVtwQphpxS7Z0xpuQSx9jt+4ury8ur2f317dli1TQzw06LGrbW2j6EJJAzOMqz2cLYtB93lE0zX4V+Y01Zrhq2mIbu0fIJQRPGaJlKlsXi7H5zu2xXimzIgKAqIWVmZOYwROvcq+v7j67q1UW9fnMTt9dLzwUlxDRrVof1V5UbjLoSksJAJNZSd9jZ5oLRMDNoMWwIzNDvmvkFmNIPuyU8L6XEOFSNV8SSs2ARVS1Jxg6pTpKtr7Db9pubhtm3Tb8Pfdf7xu83d3W9MsxV641hUE6hs0Suaupqvt0ffLvylg7rr5hLlBGRpCiT0TBO8UlSBjQ1oDly3PA9XKIn5HqiRyLgA5cIj5ZbR+BaAIAZCHUaZU+UnyO5Uv+6SOzUJ08MtyNupKDfZM09ADWnlnmkGE3+XSdo/z029NBEYaJSwFGUw8yIPMHu00OCkYRQAUWVWOFoY4SnHqzv3woQJhAXkaw11j1kY9KRgmGIEYBVyqRg4xOCRkdUiUSQvW/qajlrV/O2smYa+Puhy5pVxTK3Tb1sqovZ7HxR15Un4raixrE3aBlBiuaY45gykBIpkaIlI6Jl7DSNEEeKA4kAckFlQGIroAYR6ZTFVooAK7JCUQJUQmBLDskpJVAwoKyASJYgZi5Kx8mrqtx8VTcLNs5VzjoATWAMG2+MJRSVUsZRwqhKwBatU9uYZqXVufoZOgfESBNon6kAMauIEOnEwyJiZlRXykR4IyRSACoZNCGSAqJxk4MPsxUVQSXrCIRyFi1TRBkSSxZFNY6L5DEOKaUJggmChUwGNqTMSGSmnmfYKAkiWCY0bJwDYiU2ziOxIiGREhJbYovWMFsgmliTeYglRGCajDWmbA9QYFepKFnHxibs2Tpii1Cm5RQAEwoAGGMEiABQZbIbQiIsWqZmP7HTpsY5pY6BEkARgQllgonFNkXzkYpkjQ/HjxNUpEUmK6JSVI/pf0REEwyISoiGGafcKMOGDTMZJp4uAdTJ1QiPuv4pdk5QVJSRaaKNiioIaj7S+YhLjiUFBGLrlAkk5hQINaUcgkixRKSSikjKoTE1GydoUNLQ7Qi8ZHXepZwAtGoaY50UkqKoOQ2B0ZCjEqJkLRLPn61W33887A4G7O3rt5/9VXe2WkkfX376I4M1QGTTmMqG1BtjbUERKliNQwpjZgPGe2OkQnBVmwrdHMb9oQfnyBpRRaZSJCR17ZJsbdxcTbU6f2r03saM+XDYv7toHl1+/INX19fz5dPQD7DpAaVIyUMqmCpvhCiirpbNbtiN3dDWDcYwjHncdrXHNAZg9I5Lzl0sZACNCV2fWTDm2bxazubz2hnfvnn5+uXLr77/3Y/8bL549vTyw/aLT3706OnV7l3qNvGw7Z23s6ZClJTK7ZvbxaJazObbLmUd2TGzpFLGaFIq1hVjXE0gWdliHAdmRgbDslq6XZc08eHQ165GUVawxuaQ1JqqcrMG1tvivU9ajFHVXNUVshYFEGRVR1oYY8keeDIZKyLMpApSjokdWQUmgF1FRYjI8LRVQAE1hCJKAETESAhKzNPJl80UFqBELIzEhpGscdYQEHpvjbNIZO37BLTFot5q3t4fXMKvfvzTs4++1S3O1nd7773zMB52ta80I2Q4X144V2eb3ry9BjAX87ZA7sPhOxezePvp4mzVGCmHbmHnQbKEUHtr2KYh7bu7qr2sXcPJWL+yds6O1+OGPHjJKaHhqnK+j/u28QTS7beumquWEkLUKEZf3bzWxdVqcSmxH3ab5vwsK4gixpK7w3i/WZzP7vp9zFDNLkn72aLVYQCU2jVtNSNkLVLVlWAW3THUbBGglBygDCRh6PaSOraMhNXyDOuLYuYIgDIwZtVYSjbOQQY0TiSpFjYsMki/LvstqfMa33762RjGanXmHz/hdkGmaAyyu0uHvau4dna32UqS+mxek7z64nUcu8vH33IKL3/x2fZu5xrjK/7F658+++Dbv/Nrv/wv/uRfoDFn1iVQI3HFkEnOL9vFzM9d9dGq/ejpubcsbJ2trbPPP/qgfvSovnjs5ucVwP6QRuw+eHrxp//y3/ze7/9+8gX9+eYmSsz/7k8++fLr1//g7/+Nn//kJaFdzKgv9OSxQ+3+q//m//WP/+D379+8+dFfvfmdX/2NGAUst67uvt598cXb+bytG3+FjBCfPVuE+3vNuvzuL6VffH79P/ysOm+++uSztm5ni0U3JA0ZW92sh8bxENN82bYLePR43rZu9cG3r3/yqnszPrp4QfvCgZpn3z2MY/zTn9z/9C/b2Uov7Mov4LD3V0+HV+vOW3TZrGa2gcZzBTUAuzNXxvHRr3+nWlr0khkNVbmUMKQUQ7uqHj2viXC9M8+ef/+P/tWP//if/tEHL9g6TWnY7ffW2qVfuiOpCAgnwqyrvUs5GaLGunEciwqhBaSSc4gjsUGArttZZ4cwWFtZV9WmOvRdKeKd887kknPOSBhiVsilxNV8ljKnnABLTgFVu76rmxkCOTLMgKTOmOksGVMCBLCUcnG26ofeGtvOW0Az9kOK0TJbInbGoxfBLJJLIWTnPYqWkAExppQ0O7Ih5xBD5Ss01LQNADeVDyG4ypGaIcXLy4tx1znvQbXy85i6mOPZ8rJ2eNsPBqFqfEejMzqMglJWq7M45oIFWBdny5vrt0kyAyqC5NhU1jhbSonjILlvHIw9IdNQhiLJWWsMhjiIMTUZVUlBOoKG/f3dNoxia7vd7cZUmrk3BZq2JqHQD9ZzJnL1vDt0lVskxXE/WLvKpaBo13clwd3tmyQ5o0YBMiylgKqWggDWGAUqpRBZZu5SLlAs2spNUwGUmMKwB7XGVc6fzyqNcQDWprUhBs1x3jweDutu359dXKqU/aEn5N1hd1bXbTsnCdbYnMnWs7ubdzydwW2JEIsUYziX7BuHiiGGkLIhMmyA4dnq+SGtCURi8L4pUpQwpRxiYDYBEhhyVQuoWVPKabsdVThkXLKdLefsbRqGInnWOD7a0OJqOeuHHFIklZqpGHO43wtBbY21zsZEICSFgSQVACKCFNI3gRaASczwDWeWUyQaAIoIIB7JFKoAfGJYnIaj9/uM03xznGToG6owPU0sD5dM57/3nq56nFFOE/6Dyww8IFnH8ev0H9Mwj++nlAezpG+gR9OHmz6UgGQpqZSY8+5w6Po+jbFdLKqqTqmEkFzlSHF9vXn96rWy1PPm4tF5HlIpsdt1Cuby6Yu3L9++ef3FfDU73IW7/rNUMooAlcV8PnZDGmXosnGGSFtfEUFlDVszjkPt/dANIDIR1Q1T1TTMxpjKLufbw+bpefv5l9c94902futROxyGpqIffu/jl5+/8rPF/WFtNH3MVyzOkdm83nbxhhD//M1NDFEU2VIKMaeyXM1yHp5fvrjb7bvNTkWHfgghkq0NGcdWmbIm49kWmwttx7IfsqqkMBKo5qxJLQp7UxnuugOgNUSkQAhtVY2xLzmPQJZ5zAIKt/u1IYjj2DOS4XY2e3L1KMR0sz30Q+r6TIYb77XvDKO3tGjrkFVSWZ6vCEgTNq6hySRl2pS6VgEnqSAbAkWkh1Kcyoam2ZfwgY928mA58nHgZG11mq3fA0hH1czxWTANE8dqeeDYnTh3D2gq4WmCh8neEBFUpUjJJSdIP/vFX/6rf/svd9utos19BCyW8O3rN+9uXwnIfLbcbw5x7JU0FSXGlGMuMrllf4M3oqpKSDAZZBMAqMjkMX80BgbEnCIxqgoqEqJlzhIBwLBBZG+q2jX7wz0DGeaqakDLZn/Qkpn908vHh7s7KXm5XFmyxhjnnakMGQehFKSzxVJCSDICalO3javubrfeVleXvq5YS5jPz4jdodu17dyoxpL63ZtxGJaLy6BGXNMPIyKjiOEKkMcQkNv9IR5CnhujSuu7DbC/evJ4vRv2Xb9aPN1v31iuECxRKbFHbytbDcPe24qYYyha0FX1Yb/OoTPc1vVsGEZXtTEFoyKAxro8hBSScW44HNqzOZINw6FunDem3+2tnRl2Q+y7Pp9ZSYetm8+N5QJxtnyRhgKqOUk9PzNaU7Uw1lXpUEDYuQwGuMqpEHIW4SPBp6AKIL8n6hAdGw7IkTB5NN8HAFGgE3YzdUMEnLhyx06HD3iLvgddjlX8gMkj6oNIUk8w5vEVOLnMwYnQ8D41ACYtHAAQHtVr+o34tukGwon0gZNv8QmjPP4AAgCEgkA4xZNNltnywOM7Qkqn7QMRIZExbIw5EqyQvLXHFL9pAEdU1WMOlYJ3zjnjzOQ1Y2d1u1rNH12cO2sRFBFCGG1xMaXlcmaNXTb14+XsydXl+bwlY33TVJYgFxEh1pK63O9yTgpKhFKSYQuaMYNK1rGHnOCYx6bT4D/5IU44EQGKKBuvisRMDAWSMQbUFjCIYHRSSisbatizyS6TAJGxCobrxjet8xWwRWJXGcMeoqXpx6UcwyBjX2ISKUVMBGwMp8Qus6TiMCsVS0SMKiCJcPL6NhnYoagIqBCiIoECIjMpgrGamIwaNgoE1ioygTKxTtXBx0jQkgtqQQtgDOeScgKQkhKkLHGcfM0zkDIrKhIpiLJhY4mQGJkUyRAzMiESGgeu0omUMMHykkGtTpsnMkRUiqacNWWRJJKIQIpOXOaiIsgIKGiU0HnDhLbySAbJiAoiTQy0okVBARiJj4GDIkAspZw2SjLtk+TEx+OJeaoZAAyZE98YJoW9HomZAA9QUU65FJ2gpkneR0fjaz3N20iGDJM1lg0zGwSdcCJQABBmQmSRjIRJ0gmymoiqRbWoAkgWVUABBRRBUtGcJVtj2XvUrKLO8dDFMQwxxJyyt160ELG3HgRzKYIZSrF2TmiRk7OTcI0VIMXBkK29YUPiXRoTW7NYXErWIHEEfXXfSx9r49qrby2sGbuuaoYy7udnjw4QttvbJMlSqzkjABto67rf3LH31rW7fpclOJ4VyLtek8Bs0R7GnBKEMQCQFpBiCpQPv/fxxdVHt9fvfOO++ux28+7+d//+769Wl3Efk4yHQ7q9v60dnT9/3nd9iluBfV1ZY/Bw6EiIVpyKNu2ibUzs88wYTyhhmM2qynsVqIx2h51vPIJUbd3nAgbv1/28hdt3b548/ejRxUfnZ/Ntt4Xc3776yrTN2ePZz3/6lzNyxLR6dL7b7Uscw5CV8OJitT9s2rltGzuOWUoyBl1dZ7FKmgs65wATCoxjsI7rutrvd6psDHmDKYq3Bklm83bbDTEpswtRcw4XZ4u2KW+2w9V8OcRgkERhNqvjOILq+bLebvc5a8k5BLaGBGECQIlQi+ZJ/jflVCJMeNykPjWIRcWSkVIM4YPdxqT8RcJjujDI5AnHlo11jERkrSFgYssAOPnIPUiQb2/2Xd9fvbjav9qOQ0iHWPMM3Lg97NrLMwBIEZvWPH324e6nn0SLI5ft5m3bPk4xuno2HLaraubcpYEqDdtSdu6sYmd+8NHf/OSn/65uMCITOS69FBoKeN8qhs36Xq0TgVyEqdqON9bOmW2M2XlizyEGQ+KM6Jj6YXizHmajzM+EUQrAOA6llHE8LKvFbntzcf7o888+eXl91/rzxepy3N1Yy4dtv9sP9fKqni2RkCzFFDQPZIgVlEm0iERTsORDCRGFi6mqs8dldq5mgeRg2BqMgFlLJq5ECpraGJfSQLYyEEtY58PG2Lns9+P96/3d1lUrrmamqYZ+G7cbYwBzz6SS8m5/WwrP5jXBcHf9dXfYWW6a9uL+5Vfrl68Wi/bmsIuhlGDGXfjz9Z+61ho0jbO3++3N9VqVZk3FoDP2T2bzF1fnrOCq+fzZ07Zwu3BjwqZedEPhVZEEb+7Wv/prz3br3R/8Z3/vp3/52fOPz13deC8ffOfJn97v/1f/mz/4/Kcvf+9/9Cv/j//Dv/qT//7Pfvjb3y9Jgsr/+n/7n3/2o5/84Je+/92uNVpddzeN1CXH2axZzut9P4xj2txvr54uQLQC8/YX18tnLm90tbqUb80/ePHk67+6/bA+7958ngp89z/54O1dctbuQljfbEgXlvDVZ7/41t/6ZdPOnv3wuUW3f7nke9jd3Pp/8DdyNPNv/3L7vL35r/+42uz8pzu87c5ffMAKu92wW4fFsxaMcOWURGvgys2uqh//6IsXHz7hmhhZJDlvkKEUVcSbu/WTj5rQj7/0S89+52//ZsnvXn71hh00i5aAU8xj3013QZFivCUAy6wIfUgjqjKhQipZAQWycYyKCCaWoeLWeN7tt01VBenYgIqAckoxiyIiWy4ARFy5VQxJiYihpIxKUqCpl5Ly0Pc8b5Tt/WZ7uZyDFikqMRWRpq1NZVOMq0VdRGpvRMnO274jBUwhiapxpCKODdZOFaraH7YHyYmNWbTLvo914/thP1/OcwgoWldWFA0rWlrU5vpmLWo66RFhu9vNmjak8O7+VeXc2fmy261XTx5rlmHoG4fMFEUXri5iiXapjN67br+NMTL588Xy5u62af0kWp8504/9o7PHZRiGOKaCrUBdVQU0l1IUQRKytqKlpLqyvq7fHW40m7PZPMbBejeG5AHjGEDFV44gAUFfsiGz3+1c62M/ysil6eZV1W823KxiCIToanf/Zo2llBCQGMEgkAqIKDuPwCEHYw2z88bnpDlBSqVqmjEmlBKHXaeZjS+xJIjO1DFEgHTYvJ23Z7tu2Gzum7pandvQjUV0t75fLeftYoExWcutcaiiotY6thTD4KxRAF87CxT2w7yuVMA428y8SHp2sSplhCKNryFlbFjUpDET1jENueT58rIUHA/B2uri4moIxaghMMvLc4NQW5PFVeSA0BAuF7Mw9pIjSen7kXybh+BAamcOqXRjNMacLeaHbu3YrtrF/e1arWNv4tC/Hx+O44IAIE7ZtqdBRaXgaReMR9UX4YloAain+OUHDtGJGTTBNw+erJMOQ4+8j0lShqdF+IQCHVU/JzmbPsSUKD54G03u2u9n/iMx5Bs78hNzCAAISVQR5HQZgYIUKTnHWIYxrXd7V9fnvsoxh36wxpxfPNof9j/+0V+kPqKaxx8/WcxbZ8wAh5DKxdXiy5fv7r683d1tiBmzlRhUIzG8eP6cATf3h8cvnsYc1cJ+tyu9INMwhNmidd7f361nbRNDEAWVEkNeLRYplf0hVA1czKsKUrfb+9pst93g+Lvu8RDj4TD+m0++Ln2Wu+vFuXv09GyzXVMq8/ps33ddN05DHzNbY0PIdTs3laVZe7m6vH9704vEECULqM5mddOe39zcjVmDZiKs0Hhr3x66PrECGUNjTK2rEaiLO8OEqADimUWpIIwhO4JYCitMTPs+paatMMeSxsVijmTAcIwStl1t6GzR1I1HgOt1d7fps+WsMaWSFDIooBXOlaTW1MYQN46ZrPoQY1E1IIgkIqAspRTIk50pwoNp7nGGlckVF6ZhD04mKlMpnXy49L1x0cPYDO9/4YQ10UnqeFIxTrWLxwSpI7o0lZUgGQXMOeYiuZSffPaLf/5H/+puc00MzOIr7Yf+zZtXXdcjiWToDl2RCCSEBAQxhawyUZdUSxRhZibOopMT5TT5H8UVEyI2HREn/QzhZPoloMAsoATKxgKgtY7Jbda3ipnYVVUtkEpJpeTK2bauL84Xd6/fPn30qG5qtqZZLhgYnYhCGCKTnS/md2/fEuP52Zxbf+g7Zby8WD17MuO0Z+a2nt9397aqm/n87Zc/HVLX7cJy9Sir7rY3tXe1reMh+MpjCZpD7fzLTbfth/1+PD9bgqWs+eb61bxx7fz83WbLuKpny83ty0V9gexDkpJjPV923ZvKOCKHqKUMxvuqquK4900rAiUV21ZhHIykgohFmH3fd0296PY37TJZx7vbvllYx3nsA5FzbRWzh4xlPIyITVsJURaK6maPnodxtIS2bZu6gGl98DBsUZOrL1Mm19aEpDlZW0uZwEtFUDgp0VBFp8jICd6e/uUB4/5rjQ91MsQmUkQ45tAf1Vk6DQBHhEmPgrUjyW3qfu9Jlu//Uji+z4Ru4glfOs7L+t5y++HjfZOdiXjiPp2kNXCssemjTO23lNMOAU5j8Tc0wEdM7ORUpADEhMTTOzMfrWwMMRKScC7TKGQmwRkTO+cY0bGzxtTN/PzsbDFvl4s5GQsIUqIikrFV1VTOt7W/XMyfPzpbLeZNXbumZVdRSaCQEUPqY+dU1cSoKUoG0QiomiOUyCKURRUYQCSR9dYZMlaBCaFoNIAKSq4pQMgeDQIkQ8xsVJnQqRRWFGE1cnRvQnXWiwIwChn2zkBBHQTRt61tnIOCmrAkGaPERMPBlACaUkiJQIpGcpx3jQJV9aytwBiuHFhGpJKTJmFmxGicU1UCBkSRYtgKoCKwkgIa9sQGEYGYjBVAlUyGCRlQFURLAbLGIGhBEAUQHIiJEb21znnDzGSydYVLLBPli9gwGYNEbAxxsQhATGT0WF0F2aH1hKwiqFkBCAQlsxirSTPmnFMfUxgQhUnBMBEdwcgiohJTHAoMRZGI2Pi6sdYZJEAmw4UQCrAxqoUIVaGUMm1zS0mTbB5FBeRoI33cN5FiUTwKdYscjwRKJEcqPmVNfw0qEilSjrzS6REyoaREZNkgAhuyxhETT5jiUYmJk5m2ihTRib8qKpMof7p/UBGIROTktp0RCVSttSpRCdE5JkQsOY6WMI5xkvMTYOubWEKR4q2f7PgIlRyVAM42u8POs4upMNmqqkA5hWDYIGMuUUSNb9E26+3QVA0gOd/OVs9njdtsb1MuIYdqWYek84vZF19/0WRw9VyTNrNF399CPzDqYdymFASbEmXeLO7WN+189m7bs6niGGIBQmZAV9elaFaoLe3W7z75i8OX7c9Xy9lf3QlZME375uY+hDz03fnFWWuDpkMR2Xe52+9JR8UcgmghAnPowmHb10YYZeh777hd1GUMyq4/dF3oQ18UyqzxJaT9Yd9A1eW8vJhVVXv14fN6/q3S82e/+Pz+frbfba8+XP1Hf+tXf/TnP/Fl9vFHH+1ubkMfhFikOGuQJeey7raVt/d3Gz9bnp+ttvs9GxYQYwRUxzTsO3DMYexc7bohWc5t2/ZjSZHYYNWQQImlGCze+XHIOYExvkAaBmmaalmXHIJhAqSU9HY9IihxAZUglKTELElT7dBVho1JKZYiUwQMAIhqkkyn9RVMi3EFLEAISAyoTKgyFTwZJlExxhDBdGyY+IfGMMGREweMxDypW0tRY/l0LCLD7m57OHu2SjK/A2yunku/Lod+GA9xiGf10xna269fbrZ3Z5cc1KwuzhDsJo+//uI3ubq4e/kzZsk2tbP2sjkbByiRX3/1mUi/Xydo6uViCWU/ll1fQqNzE33RMYzD0PpDH+bu7Pzi8avrO1e4xP25u8glz+qqO+wtFdZwv94V3+RZlcjs+4BV20sacuJ67uZNd9e/Xvcv3w2vr8O3P2wDN1Kttt0uRq6ai67XmPrKWWygaWYpo2WHonnYq5Nqdj4cBk3Z+QrV0+oR1LMkDCWxFpYChDFkU80RqMQoVCQntp4UtVuHu88gOLNo+vt365dfCpr56ukY+vWbrxjZOINqt/e3ZGbWz1Vxd9jVGjHvb19/jWgLmOt3t9u3b0MYZcwVWd84w96o7Pro2vowprc3m8MYk5YETAIXl8tn5xeLylRty7ZtnzyVZmaBXO2RbAbTx/0F5yHGDz+8SIpfv37z7PLx3f3hyQfns7n/F//tPzf8NzKHP/4XP+u7vrpY/dbv/ZaOPVe+toIqt2/uZ227ebe9efvys5//5Hu//mtdHxaVG8o4hGitktX5eYMG0z6lJBe//t1Cpvv6i0Ww/Vfvzn7wQ/lBm3D3vd///uvX25efvqrOrlwF7cIC2JDEVN5Xl2//zbs8butv1anm+nde+HB++LNrvFptf/wzfbtZ72ftP/69miD8N//clmrz9RaF5t8/s4bA4OZuT2THIc7O5jHEqvUff+epQibkYYzeGRQ1hqraHYZxNvNM8Ob13TjYH/zw7/zT//v/Xge1LfVjRoNsrDHHu2AcknHWVj6kWIoUkJim6QRAJEhq28Zbm2OyzAabmNMYBue8IimCsz5JBmN3u+3MV967Po7LZj7mlHMgKIfDsJjPZ7NZUcmSrGUyaGvH1omqq5oujMbw5cVZ13Up54mxHGMoWYE4jTknQMdVbb1zw6FXLd7P+zyEMbDBWFLOVNUVE+WSYh6t4/1h45wjwiIqJZssxvkw5pDSIWyIbU4CuTSVZ1uUNcWxrVtnmFTTEKq23e0HdI1EZeTFzJNCOOyZtZ3N05CGYXBVZcdQZLSkpqoEJMY8xGSYrMF3611WIGMfXVy8etkTg2MWazWllFKOmaEYlP1+N4TwqF5t9/s4jrbxLz741u3LL8YgFsBaCjEnEQmhItNWNowRz003rNuMTYm7bj339WF/AIAcEgNUzo3DgYjZGCKaDh2GzHHtDuicZ+u1CBNixQjAk5pDS5GxxMyGUKWUBIZzLvv9lq2vmpaM2e/3de2rpoJhSDGMPcGsWcwWC9/cv3tJJVVVtdlvVu1528zjcD9v3dgffvn7v/LZFy/X+/4/+jt/e3P/Zn9/jSmTxTiWtqoQqYjkkovAGMb5bE6JlotH1jWgkAapG77tdobt0MdqzrmUi/NFyUUks+EsBMYSs/O2j7HyVZtxnVGdG9ddVdlBtaDpYlot2sWs7WIRKM9fXL273Y7DiJIfkKIHdfMEDk0pUTApJh7MpBEe0mT1uJPWCUs6jhjH59bJ1/TE2TgaIZ3YGadp4jTZnJhADzwOeAACjhq2B5KRTFaqD98qD9tyfb9Yn+hF+rBYR1SdNogKWo6WN6Ahprdvb+5vtsw259JWdujH+/VuDEMI+xzik2ePZ+2ZIMahBEm+qqDI7f36+u2bFBNi/vDb3yfkv/qrH1d1tWyaWbt89/ZNu2iZjOZkyRlhzZpI2HAqudvGnEpKGRVyylVTpRAYKZRxVq+s92Hf7XeHYX8AkFFKFvjR11/PiWoHWcJZ5UHiRU2VyD6Vtq1frdeEysaoaDNvmM16czDG1U3j6joo3F7fSBhtbc4vr67f3iD7McQv794Q8PYwVlXVkEfwr27u+1xiKaTauooQxwxZSuUrAZBcLHJd2yHEhKJoikqSsqh8U1V9GJ03BtU4WxlXMprauKpSiFNgzNBFscY3/uJq1S4W6/WOXXW2aEJ/QIEMFIvs9j0qUz33IqBovZ1bq4ilCCJMHH8AkKlaVCafT0Q8RpnhxPRXJjo6lZ4qSY95aPqNaR2PEoXTQH6qoAfWh+ipvKbpB0+3yWn2BgQ66Rpz0SKSM8Gf/cWP//Bf/+H67poc1Oj7fX/br9frm6Ip5VJZTpJiTqrCxpVc+qHPIHjMRJeHj5FKRuAjAeSk7tRJ8jBZxCCCFDnOIDotqbWIoHpjcs7OOaOcU0glEmDVVCDqXZ2A+z5drarW27BfS4hPnz2eLxrnfFs1m013Pq/7mPoYja+7bvSVvXw8X8z925tN34/Pnj56+miGOoQUF7PZPh7I+9nZxd3m5t3N3Vl7HpRXZxcQZf36c778kC4ep/GaLWsafeXHMSpyALPu4wcZLPBhH548f/H69Vfnz/F8drbb3p/NrHe+i/vKGGROoTODc9Qc+p7QMtKQsxAbU/elG8Zoq8oQD31fVT7F3lm77e5dfS6KqBYLSOitY8k5jqaZL0Psdof+ovbOVSWUbGXEMQ6H5eWKYaaphFKwPbPej/lA1QLsohSialHKHRgnqMCoKIoqzAIEWqatLiCAyuSqC3jCd96LGY+1BKfX4IF+ie9fP9akfrPkjsX4cNE3mGfvmyh+43J9f6GivL/6WOBH3TAd3+II8r9Hi45w/dTpRRU0w6mdq+rJWhtOmQPT1A0neyN8zw49gkaG0BrDzEwkonREOGmKL1dQRGsYrWVnSVSsta6qKu8d2VnVLperR+fn7cwbb0Ww70fv28rXRYpBqJguFu3jy/PHZ4vaefYVslXiylQKMsSgahIZRWurBQLnPNhUYQxcAklRLYoZEFTEWMvWoDHEpigVAASXJTtjRAqxASJEMUSICqiIRSESYpYo6JTYUA0IpEUES87ACGzZGSJMeTSusYaIUFM2UiTFMo6aisaUwxhzSjEnb8XUyFQ5AySVI++Md9Z4h4YAjoolUBWRFAMAEhc57mIKIYuKMhNaRNECk0c4TElHQmjsBGeI5smKCSyhEkjWUiZXNGv9bDE/Px/Hkqqu78Ze4phzKAhFxSJPFlNsrWFDWoAss1VAICC2MNlMsxGJ04mgpIwKBWKWoqCqgpidR2LnrEUmYieiSAyqRSTEkYtIiMMYhzAeQkJgayoRQAPWGLLGWiYm46yyzUBsBUQZeYq4nyoQzTENDQEARRAFRNAg0RGxnQhFoJOn4QOt9MQqmigcx5tDEYmQENFMMwQRMzBbOjI3TqozpFIyIBKhIoiqSjlCqio0ua1PqZmEiCqqPHltowKhpEJIAtkgoqpjk+MouaSoIebKzq1CLhmNU9VcCiIYC1XTqCllKG3VjIeeEDNwLJpzRDBgSRIQGmYANCUkKxB3m1wC7O/Xr75AUIdQNWdp3/ckm+7m8vGLp+dX0g8u893mthv3zPz48tnt/VsywPZivVdVJKJnT64QYbWs1NIe7ZtNHIY4axtr6TDkcRxH3YwRYxzD/c6ZF3mMuSTbmrtrn3IS0cN21+1uU+piiaHrQQoZljQqaJCyPF+Bets0lRNQ2N32gXhbbgGkhDCrfeVdKQMBgQizffb0fNzn8/Z8fTgA8c2bO7grT5ezx0/46Qfzs9XzT37x5R/9d/96WZ999R/e3fabD7714uzp7Iuv30xB9N6Yunb7EPr9OKubze2BoSix8y6UEsI4X7T9phu6zeziMagPkqrG74bd1aOLUfYMpqTY1F5J9t3ARtq5c7U97NNm11Wz+noztgGd89uxD0UBSNERcElZdQCCmGUcC1rbjYHJUgadTLYEAAi0gGqWjIAiZWqmWcqkwPTMp8O2AICxOBGIAMCbKT0AiPm4DyCyhpjYGIuESsTGTrU5GW5Nd8Fqudh0w27YAbTDrizPvLP2untbyJfgYt//yq/94y8/+dOvv/pJXduxSBiGxcyNh62JOG6/nldX/sX39tt70kJUChopGVCs4WV70bT207dvgEOKGxau20fnF6vD3TuScja/CEl8VXdJhu12VCmkMY0XVO1H6feddYtyu7OQE8g6DIZ8GpK3tu8zscXiAXh3CGPCLoa1sdi2zAxlaFZmd4h9jMaSrczL68OyrqVUu90aITsI7Lymno2G4bWKsq1hfi40R19Diq5unANNPUDWUticS54TJBQRAnI1xgH2L3fXX/PiQ7dYDncvtzcvc8r16vx+t467O2991RjpYwZombT04911KcUiSJfjcKisGfq8vGoO4T6FNVldLebXt2tAXC1nfQ6L8yaibne7RJpIK+tnvl42fuHreePbypBzZ8/Ol8+vdH42BtfttmfLs+rxs+XyclyXRQXUoLUZit3d+g+ffbfbhv/6v/jvfv93fmMY+T/+gx/++X+4/uCXvp9kQOOG1P3Tf/LP/+7f/VvdfvjpTz75g3/wu//H/+K/+h///f8UAU3tbWMRtRsipDJfOEucocQuSlbj2FzwdqeX/+C39Edf0Fdfrjd/eZhV1ZP6y1dfemtrWVkEVXDe+Mps7g6Lq1lbPfri37/64Je/E20QpLt3by/HA96M9etNef25axw9e7zub4e6OqT11erDunF5MR+GcLe/u/jg+dWzKyKMg6hq3VQKSUoc++hb0617XLQpZDJIhN7an3zy86vFo8Xs6n/3T/7bR9/5m7//P/9f/NH/578v/UBOfF0bVQU/3QW+rhTIGRqiTpaLhnk/7hrbIhkUKDEW4GEcTduCqLXEXDHSEHPOkdEgcSiprat+7IranFLAERnn82XfbxftEgCKFBUxzJU3JUuldu5Nkhj7HEMZQxbYeO+QLRDuDgMDAeMwjIv5kggkay6pJCUy3lIYBpRcSra+saoMBMzDODSVryq/3uyWs3nXjVwbY1zRUPkml4LEiIlYra2K9mery363tmTGMDLbpm6dVymyPL8cukAyMnu7mpMkiFJKrjV6W8UY7g93RJBzDKWb4Ww+a3a7rat8LEWUvfeb3RpQ25mFDCxjXfsuxVyKq/yYlJHjmCwDG7fdbjFziHshAOMq617fvXXOLLzv9/uAcBiiAvdj0BqtghSJMRumnFLoe2ftOIyHfr9ctfj2hgiyshKMKSKSAVaEAoCWLbMi971Us3kRBQ/etyVsS4lsjfU2hlFU89gZNqK6H3qunJRSEPth46Gp7dxYDmMgkrr2UDKU0O8l9fHqu0/u1i9zyTEOgOrQ3N+tK7Jk4Jd++Vd260Mf+ssnqyfPr/axPJo5GgayZIVDyk3lCsTQdY68NV4EZrMzYLcfu8Z4ydjnEHKw3i8uFmMMjJRyZOa68oSdc3a5mBkDzvv9treuoj51w1pQ2nmDqIccDREI3t7dfv+7H5TNPqfYnM0/+vhqs95v17vTcDD5DQECKQCAyHF60KPx0MTuRkTCabuMIkpT/tSkQMCHIxccN+D4oHrQ9wPu+wW2iiCSnvKVv7HX1hOZgh7oQognNsV7stJDwvRx1JmgIDw5KE3cIkA5IgYnWEFVUsyxpL7f397dZChsjEdAxM1hv++6ITePLy6fPG6fPn2667uxj4u2VuL1zf2765vtfieA87P5rK66fn377pasyWAZq+v13Yff++7V6vGPf/znz771/LA9ENdu0ZFzFeHrr7+CIkwUx5hzLqUYY2JJZ+dX6y8/mc2WheJmvY79qAqHQ59TQYV11H2Bx4+adl7f7zYNl8rP3765cWjfDt0Q42rZ1gx5jNgwoF09fzGfVWeNG/uD7keCPL+adYfuF5++6YaA7EVKXVeqcrm8IITD0L/bH1Iq1jjv0Vuz70MuJSURIQS1AMRoSUsMTuPZvBlCniLyEDCGYEilaOhDYeNmLqVyyH2M2TJcnLc5FjQ2xbRfB1YwhhBxdwj7fmwrM6/b1eysH7ZIGFMa+8762lQ+BeDKHckZRKpaVBgJFESKkh49XeAUMHP8JSKTSxHKZGakcDQtOpYfnuDGB+IDfKPu4L1v0XF4PtbwlHlDxzdQmPhEiIqYS8qSYkl/+Yuf//Gf/rPD5t45myFu1zfbzTrlsRRhNjw5isLoPI1B+nHIkpNmVSTQLBmmdTeqaFEtxjABlwKIRzneNOojMaEWEQIRUWNYVVQLIk9TR1ExhitnKkeb3baqmNkUnQRTmmNWjE1bzb15/fKm4dlq+RgV5vPKGDG1GqYY0sXqMXIFqVuszhFks+3Xd/HJ6uqjD5+0tu/u7ufzJgwjKszOH+Whe/P1Z/PZ2X4fxpS5MofDOzlssqvx6gX4uuRITEqy2e398vIwDjfrDfCTebu4v73f7e7PGhPW14Ppr55+eHfz1aKd3d9vxmHnq0oDlnEEpZA6Q5bYg0gei7W+FIljnK0e5XFnMEOGjKVt3RjuVD8UWwplBNSSgMQZV0Kwhm1luj5oKmzcEHoQr8pdf2hDZRsvecTunZ8v1FvDVSlgnWHn/Xy+2dwaW2WdgTAiigix06IgiRSUTmC36Mk06MgbOoLt38C48fg6AIASHVGZqXsdu98DNfOh6wHA/08k33sIE0+w5glsx9M3nsr+/dfhGxzM4xfwG/fBdDFNiBKogOADnHpS88pJ9TZ9JyG9J+rB8d2nCGYAAEQlOrrKKBCxIBpCQC0qhGidMcpM5B2zJeuMq7w1VVPPGludL1fnZ8tlU7FjYsqiTdOWlFOMxprWm7O2ffZodbZq501lnSXnlUgB8zDkGCVHzZEAvG+BfXZNSnsNhhhKP5RcQDSVwjzFnREah2gUjSrmkkHUkI9ZjMVJ+4qYCQsQIRBCUg2iTNNvA3udUAMRUBTKwAamaCTJoIVLMikjCcQ+99scBypQYk5hCOMQRIsAGzStr9p2WdVni6ppvXFs60rJKKIiIaFMxDERPSLlAioKogWQFGliEhEZC4hIdvp8ogQGgVhBiRSUFFAJFYVUIWbQrKTO2mRSXVUXZ2fC1B769W4Xx74aDsYE4mydI2OPDwBiQwbIkTEACAzABg0BAYBaSyqTqTOQCpRcoKAWw2Qss2Eiw1MsmnFAE38NRaGGeQs0F40pD8MQxpByKQJjylllHIIMWlkzkSHIOmUmImZrjSGcLLQYiUQKHO+byU2b4LTPmvo/G6NHR0ZE4IdstJPBNSIRTdxmJJ6ciZjZmKMSAQn4lKiGpztENU9PipzydP8Q8pH+gRMlpEwbCGSamHrADEiomksmxJSyZeusD+FgCIgQgUWRTO3dYr97VY6KQ80lOVMRcQqpjGE8HIypqpn31oJw6Puqqcd+dK5Fa3LpLXO3u0fiunKzRXvoVNhY5tVidthscxrSOFTz+Xc//OHdZlPGIfW7oUTNMUdG5pvN9Rh6tAaQKpac8X7dnZ21Xdcn5GEXGWrLQI0X0d1hzEVTToC5FFAWZnz36uWU36kd73qFjE7lcL3fr+98gykPuT947yWTIwQwj5886uMoOccw3Gy3qPTk7DL2h/b8LCtQytvbO5XiEOaNrev6frcrRShDP3Rt7XIf5RCqZb3fjufPzqLK559/dfniUfVx+8VPri8en+OW7t/dh96u2osAsD/sAfD+fuua5tHZ1f3t26tHVyJj7JMEaRYzqqmkMKtdHvJ+GM4vq+76hvMCCu3XXc12UFGDMY1ssDasrErlYtVYoCwSS6msD2MwXFvmmEdRqwhkSKmUosjEQAQpxoCIOaVilXhaHIlOMWgAIMKTGRgCgNhpmYaAQEUEAZitTtl8p90AERnjRAsiMjHApDQj4okjyccFmQIzHUMMAAAgxWA0/MpH3x73aCjr/Tto6HxuuBRnWr96ktfX27s3ZzM3cPnwO9/9/LPP+jEsZ/MP2seU0v3910+fPalKk0IHFV7fXremWVzOwbh+3d/d3xGBMtvFWTkMhuH1q5/PfaulbO9uF2ePprBDAqntwrN/1FyWPJKr2bu77e7R5cxY2d/2+/26qYxDRvTjZiMaTV1v73deedjnL+5ub0L/nY+frd/sdtfXtoL9etcfRm6qcOjv1vvQxc1+3y5rB6U1nPutp7RYmPX1prZm9ch02zd+nks/zOtKxg6Slv5QoFirjFLyWgixtobYxft4/dn+9kv2T8iv0m599/mPIRdrbUrD+uuvNIzBe5xXYegViSwwU0gYQ1BmVO2GIRb0th3ub9frmxija9whhqKlFPZEdWPv9vtDGomASBYzX1lv2D05W81rH7sIlc0iSeVufThbfvuD7/9GTV3Y3e23nUgNYxP6Qaxm9t//5e/86E9e/t1/9OLrL/a/+oMfPP7Oi7svtu++WF/W5dEMkvXb2/D4xdLzLy/mjTXue9/7bhjzP/yD328a8/Zm7Yqb+xlb8+TxrGK33w9Q4M3N/qPn54B6/+7m0VkVwj7rMko8+/C78PTb269effCrzz/9o08Xq5mvq+vrbcF5Ee22qXIMRUTANaiAUkRZzr/73Hc5/uwWNrj/4nr1wQeua01/qH77Mv3d3wrzc3PeQCmF6GxxOTtvb9+uidoyFtDoWvf67btm5q6eXNQLz84KZHK234+5mNDF3/7NX/70p2/8rP6D3//B+erJH/3LP6QYDalaCv2e2JR44lOIAELf9dNqt+QkhItmrkVjioRkrMk5V64KoaQclvOFalJEa9m7ZhhSKsUY7MfRewuSvauYKaZ86A45ZrZSu1pV2Zqc0jhEY5wzdr/r2KB39vL8crO+yyIxSbcfvGM2JDkD0vnZmQiAU0nZGFBAJB5jJsJcZDZrixTnfE4ZmWZ1m0M4hI6Bur4jov1+Z2xFbAqU/bCrfOMru1q01+/WqLLfbxhQQb1rx66LMakYAd3vRy0J1NXVbLa62GxeZR0UtIAS2yzD4dAtl8v1/aZGz8i5BCJ0zjYgKVoylipnlaqZ6+73Bsq8rcdDcZURzdYaEjCWiCTlVFKyVRXSePXkUcgac1/PLkokQJ/Kmg1LyVYtIhHxYRw9aTfuNac4QHfgi8vzN9cHEMhltEwxhPOL8zjuD0MAxjHFtmoMHo9XxETEoe9Wy7NdPqhGS5STIoCzruScUypS2FnNBRFLTMzUVFUcB41D7PeM1DZ129QiQk01jr3GRFVZ76433d6D5pSbtk0oyGydDxT2oe9Cv4v9H/y9f2TbLPkw5N3lbDbEUBFLLqCSc0SAikxSmPkKkQ95tGRUxVVme98t5vWYU5LcsEiM6UBuNVNEywY9VjUXFUDjXAXSp2Fs2HQxSJGmrT0MYz82bTVvlzkkS5gRWKS1ZFrbuOXDXAwAogBaACa9wZTmjCqCyEdt/fGgI9NTSUFxIlaD4kmerw+uFlMG0Mmg6DRvnGaV0x4b6WhE/OBk9ID9nAhBxyH+tPf+5ir9NEF9Q0n0wA056rKPESXH78lFREQB+yG8enu9jzFkUZVw2N/d3RfQprG/8kvfB9GS5Pb2vkip6gqA+i68fvN2v9+lnIDtk6vnd++u9/s9Clxerj68+uDtl6/dshkO3b/56Z8aC/dvb/b3O0SgksfdsA19GHq2NsbsnCOm5WoZxtjU/mb9tnJud9h6b1LOjx8/ffnyJbJxVlWVGcHAdggxxCUDO/vZm0MZR4gDO98u2t2YRyjnyxk2lW8XTdOEsXuzOzCUmLMh/Prl7fX1ekxQ1zalXB+jUpERCmjICRTm81aLVpXLKSMBIRgmUzlE7LuBAevGgeOcjQJ4Z+KYHNNy3u4P3WzREEAKJWQpWXKBWVuHEJWoG8oYY4XlfLmocrLGxZj8kqwxhxhLKZv9Yci6mDktJWrcbfYAPFvMbVXHCEIMSI4NTJkzqhOXjQAnuGfyfDhVGB7Z2Ucmm+iJN3GsnuMCGR50M/DAfTgN5XJMn5rAIZkUbcd6xmntPFUyIoKIKEjOKWL5+Ref/rM//sNhv20bd9huY+6Gca+aimRyxhlnUTbbtYigQgxjiKMgTHpO1YKnnbOKIMGU2fxe43mErGiCRydjFwRlAItcVMpx/gFRZWBvuZS4P/TEBEiEXLJw5RFAS3p6dZnH8fNXm5lrv/29p8u2TXlUwVDC4nwmmULIVdsagtXjS8bx3Vfvvv7qzlH14YfPWyf7zd1sXmsWRG3n85LDzevXkEwsEMb00be/FcfDm/Wbjy/Pvvjyk/rZR+1qkXfXEgKoArIyxyT7YUwxVtYA4H67N9mcXZyNu7udb2bVfEzJkY393hJYY8MwODfTFAtlRDSGUhyVNOeCNPZDb0rOENGwmBIQXT2/376tZ8uQBiCKORtrER0p5tDVlTuYPB5Cez4LQzfJOLphn8YzY9iy1WHPsRM7QzOL48G1FhFCLm5+Xtiq8tEQfRIa6QSU6NTTFACJVI6GblMtHkHsE5fo5C6kAHSSD5xa5gnS/Gv21O8Vk/oAXz70vxN17n071IeO/h4KfXh5+icdv3biB30TblV8ryCePswUeHVEWY/BbZNjFhC9x1wn8fIDpHQCq9Awn0BeAQUiYDKMiIiG2BpjjEcgQjTGWGeqpvJ1VZnK2+pqtTpfrpbni9msJssikGJCBXHCs1lV+ZkzV2fzy3lbt65ZzMgaIC4lZ1HJJo8ax5hD0lxAlQidYUabiwFjM5oCAgCiFgTYWmIr7MlQLkULQFEUARUGZQGVYgwjTOlVFtCAikIqqowW2YCpiqpIMWRTyUSEbACUIKcQDbHLwQxbDEXCYCRDjCmlHEpOSQBKyQjMQIy0bOrL1bxpPTvPzgETICKxTCIvJEVFNnDiycLk2Kxpyn1XAEQG0hP9CYCOVj5AMDF3pmx4EcHjHxUUmeIrmNlYNm1dx1IMG2Ic9sRYrIIxBtmysUTMbAFFCcgQMBIyG1ZkZQtskAwTqygBMFsEJTYAOl04XW2MATJAxJaQDSCrTp7i5BgrQVEti1ZEUykh51hKP6ZuGMIYQUtOWVBVyqRTs06nqDc2dvL2BYQJ5FGYKEuARwSN6GiTD0jAzApwTPv8a1DRid7MzEhomA0TExKBZcOIbJCJjjgtHrvApJkXEQQgOAbGHX3mpvA4xeNSToEmu4ST4t86q+gcZAZMabSGGCElyJpTGheLlSGI+9Gws5YRxBg05KWkHEZDVbu8Eil9ThESYVQXm/NL27apC+M41LWOYU/OANCQUtqvcywChn29vt+n8WBn7vEPrm7fbHbDLoR95e1iNduv72yF82bV7zrQCYk0KSaNBdE654cAkgiNYTaphxxzESRTGee3+52dm6HPApjGHjH42dxXfOhTGkttBGJBwjF2aLKogEbnuWlcGFMMadrEpMPYzqqLsxc3L4MIHrrBYGJiyQlSPDubV5YOu0M/5CENbGrJpeQikOqqev7B030/VqvFoes/+/SuafYm5zEg82Z5XgfZXL1ouh28e7fu+7tnj8/a2bzvD74yY4m3mxvr7P3u/ny5KCWIJJOzaezYh8q72rn1YfQVLBaPNGEaY9+XxXKm2olS5Wf7bkPGsTVjzojx+bcuze3u5Zt1LKnv05h1sapXDraHFGQ87HeWqXLVmCIzusqEPhaBYShs1HoqKkUUkJAw53xsyiCIxMTHVk2kk5xzOrsDTDw9ZiJUAlRUYiYEwwYQkIkNn+xH0RqeTPSI4CRtAwCoFy1FTjlnILV4dtau79908ZAZ9uv7X/v4mfhbt8B2tjzE8bPPvhyHct7WTNyVcdW2dcTc7UruVcemnlXOMVbX7za2sh4LQCYIUrBufXD7oj1T1aeSh0NbzWLREneLZiagm7vDk+YZuPZmd79qqpmRg3aXV8vNbn/96vDBxfeu/Gy3WTOmknMpkBL4aklVJSH92598efb87C8/u/7B6rxkSeshpWgay8aC6Ny7bkh/9fL629/5YFG7Lz//cnG5Kn0//mJvPXx81Sa5mZ+dVzahMSy9hjzmANDkglR5IoESnXdALmzvu/t3cbNrPvjNDFXpt/u3P49DqJzbre/W93fhEABSLFEkmEKi0TY2QCxiELiIikDOpAUKSioDKIJxY4FuSPP6zFdVUloftuMY4hAN0qKu2RAQnJ2dN5U1ln219JbHcdzdvatSeNvvxne/ePrk8XAgqBfr19g+8s++Pb8/DGWU/fZO8v2bLxf5MLz58u13vv3i//pf/t/+Z//ZP1xdLP7f/89/+Xt/7+9cf7mef/8pgd3edi8/f/n0xbN3n77tx0O6IOJqv4sDpUczqD13wxBScGKfnM2cQp/CZnd7pReLp/NhU8yZvf/ky5nkp029/nefffTsYrseKZnFypvGGsHhsKcs6xjPzmZni4uf/uG///7v/dJAyRmGzRpKH3T39H/6n4cvt/KuC7/46dmvfdRerYpKSrv19f3F5eMyQO7C8qKJRXHpmsYjY7N8gYr/9l//+w8+fPrkg6cZIWluWl9XLvXS72UMfL3+6m/+5rN/+X/5Jzf/4dXKzw79fjFf3dzdFwBf8cN4AEUUwUwbOCDUSY6v/RhrUyEVUq3r2RgioMspK0AMkYnIWiJYtu3Q96GPOai1bG0iT2yNFgGFNAQGKiLeWe+qFJIKihQEILSx72/6m8WydpXtujyfNykG5yvy0HfdOARfV91+Z4xfzJe73aAAxhGgknApisSlCDH1/XZ++Xw39ICWK4yHEU1dVT6GbIwVVWtrFDOO422OwMaAhBAxq6m8IfbeqSpE2uz6xWqmoMAWRPbDOmWx7I01q4tme3cnWc/PL2McJeV5PTMs1Fhr2pySL2FueBx7csaMRYfUgHnx6PKvvn4tOVliV1cpZs0ZDQCZm11fWeN904V+ubgc49Y7vzp79NXXr7aH/ZjK0/Pl2G8pCRZp2G8PnW1nWbBqZvvDwdSzMZU8DpVx63UXu2gRUaM1FjEisYE8+UcQc38ITVuJsqIhqp0tBGRNJRmUjQK1bd0POxECBFFABWO5Hw7OVYQoJaYU27bdb7eWiwKkGJvKpxC8xtTfeAZWJMDHF+ehlBcfvPjzn33iWyoaOjpcPm5Udi+/vl2cr6Dz+/s3FxfzytkMlCRWVZVTHiFkyr46S0UsgzXzEFLX77x3hy5IKrf722998ISRrXXG26GMAeLj5bllFQVmX9eZDANCNatxbSWPIcVHF6tu21UzV1nnyX304fnnX7+76w+//vyjfqje/eyT94PoCYTBB2bQ5OwDE9oCRHRcm59WaN/YfiM+DCCnt1GYMnQQQI5T+Te43Hj6Ep40E3jiI00vn4Cjo1jthC49jCbHbflxbaiKk5DhOPkcBXEn0dGRZz7N3iKl74e79fb6btONEQ2JlM1ut93tl2ertp4N/RgPY1XX1uP52dLVzfX13cs3r7b7/WIxH/vuctH+xY/+sttsl+dLEXTs7nb3i4vml37zh//uz3789HJ1vV2/fnMLORGWYexTyqDazNowRuOMtSbGrKopxLp1XT/O66aoCACDKRIVIqKkmJG4XswOu80InInaymVjbg9h7qwirNp5KKkvw+PVbLZszi+Wi1Xb3W+6zTb20VhXpBy6Pmdh49u5rwy1jkR16Asa8+7uHplAcFk3sWQD6hFjDpY1pTxrakcMKs2iZudc5UglpWLYHg57R1SS9GOaL9vz1WJ7twUUX/uUUz/07KmUElPprnNTGbR0vdsPknw763YdAIpK27YGYRhTH5N0QJjns1pK2ncHweLjUDeN9bWiJEnOWkKj0xIcjjIBPaKA07w9/dEXASRUkRMTaOLiSJkIcqdU86NCEU80oYe5YCo2UcEpiRkQVB6KFR8OVQgKJJByjqnAl+9e//Gf/tu+6wFwv9sP/bbvD2CAiOumts4N/XjYb3IKwBRSSBIFBHQaQ6Y/+VNE+kSwU5VSiK0iAQri0T0ZHky/QAlBUUQn7cwx+nuKrE4lW+tUMJfEGbUAEs6ratbOx/1uOZu9+foVAWx2/bc/fpbToZrXZ5fnm8PQ1Mvbtzfnl4+rurKgs7b693/+875P5KrHHzyaz3h3+3rZ1sx66Laz1jPEw3a/vnnnZqtDV86vHq8W88++/oUVe7vtXt3cXr3+4nvf/u71sCF4Zt1ZULsdU0awTFByVA1EddXeb68Ng63maX+XyLizS1O7GA4aIxDE2BuuG98e+mtDYtgehs5XxhgT42Ho1mdNPR4O7OwQtBG7WDx++dVn56tHUVKI42yxkJxBhZC1cNWSsxRzXDnbzHweB5WKFHabe9uuqtl52HYatlyvlOfWzyf2CGog49Q2rDXlCKDAtbLDkid7KAQ6kmlET5wFPEFCR6O1Y61NatgJfKFpGYzHryM+RJ2djIUebIkeWvVDwX6DEaffqOLTzz6h65Mx++nqE95+vOoYgvkN067TmyMSHB25T40ZmQ0/kIt0il+Fo+Dh2MZPUFiZTNmPPw20FERSECIzZXIxGe+ccw4VSwFEMtbWde3YevbzZraazy7O55cXy/lyYQyTMSll39RUwFo7r2tf20VbLepq5o1zlq2Fyba4cBm7MMQYcikqRXIWQiJCg0UENMsYJCfVgiJIpkZGsKzOCuIxskoyKqqQAoCxCpOfVAFCZKfIgCilAFgyTEAwRQ1JAQiiZBCRAVAMKE4BICWbcUs6ICGrSk6Qk5YkUgSE2LPagqTtbHb5uD079/M5V46d0+PgJwrAACpZFVDKBPiDKopAiaAFdALsSBmQ7AnjQ1VRJWCDxIBIRUWKSFHNBCQ5qRQoEVQRCFCMcx5VEKMkkVKahktmSprAUCZG5ikazqJBYQAyhi2oAEwsKgBkJCIGY2tAReMIEZmPKAwbZAQAM5ltk5uimhABwJSSQTNlZWbjHLPNokhcVGMusUgX0m7fDXGMuRRQyYWJyFglq8hITGyOSpopRI8YAYhOC6TJN8tYJjrx6xSRHi6BB6ho+kYmQkQidJaNMQhqrCOESW4qKKd8THgg5SmUCZOj02HFTFGBCKpAyNN5ZWr3xpCqMgIiOhIFjoWKigIZZpGY4gAozbydN/7Vy08JKjIGUKclqnNtHA+mtow8xqwEq+Uyx+Scrxp/d30r+8AJcsl+NnczW4QK2sNhnXMkskFU+yghVAZLTu8OG4kyjgcFKpr6YVTRMHQkDFnAYFO1RYqKWjZc+ZqoDwJq3t5tHr14FL2uY1Ch3a5fXC5cZbpuXxgNOQTORWMJOCioGoNUIpOEvk9pcJU9HAZQARU9jFkyW2tdM6Ysmh3L9c3LgqVezD786Pubd78oOdxfv176pVoachIVgzgckvGyWrZGy2x5PvTh7c0WmMN6v5wvzx89u/rwKfSbzd39l5++zmcFbfn5J58/e3QFzGxle9jHGK3hD548+/L2nWcf0qDC680OEFWw78LMO+8qQD27mEUYU+QkkEMgppT0MHTojKRRRK2tCyiTyxl3faj6/umzuXXw6ef3desi8O2mm1W29hXJVCeSJRvHvnGHd4dSBJlUIacMQIRMQFlkMptDABGh4yoVAUBEJ/3jBIojQtGjHJKYeLopjQGEydkeYNrvElkmMopwNNtDMIZF9MQhhb5LVduEQwyR5meP7Ky9+WIrJUrVLFeP+n6/6UZu3f5wQDIzv/CGlqt2t9uerS4EUs5pv98P/XrWtO/ersnSWMbtEBpErA3VFQ79fv1uv89M4prF5y+/+vDJi/P5qrFngKYfD9vczc8Xi6YuJXcxPfvg+7at+s2hNvL1psO+z8aLbe8OKfSpriCp1rPZoYvt6nzX7z99+wYbLAwh2+p8UZ2fffHJdSrjs6dPb++75azqw/7msH+5eV3d++suScp927x5eQM515jmzZxns5V/NO4H1UOfQ5FYL59js7R+nnKOu8OibdKwlsNNfzgAIM+fZG23b1/J4XV39846M4b49s273Waj1hwOu9r5GHzrKmbNQ0qpsG8Nu2EYRQSESVEFiNm3LQviGMW7erYUTSmOY0jOOmON896xCyE7X3nfGEei2bjK1m2CLiX1xJh1f7MJUbohvvjObxRTj+C6vgyHcXUx94aWv/s9MKy1/bVnv/V22/+j/+X/RMDuBvfhxx8qj9/+4eOhD+tuUGuby7ldVVeP5//sD//svILf+eHV9u1u/tiPIZvGbnbd5eUyjl1M+fXucHax+uB73zv0++pslSpsf/At489s1reffAIpcTY5mU9+8Wff/49/A6Kw8+dXlavc/XqHc66Fn/3SRzyzjfXlZbf7xVcIDrPf/+TLfZJn//BXl1WMuz6kgZyx82p+cVbPZzfvtpvhsLziqp0d9iMKp5zZGFD52//Jb//0L774+qvbp9++mM1ddxjGVMjS+n7/6csvf+s3f/D5V7cf//7vfjHE8fW73bt1C24IZcjDxaPZdBc4yylJVdUpZWudxDHGxAaTaNu0CDpv61RKNw6qVNdtlghAi7rZD30BZLYp5hxTkng3jo8uzw2hoLaujikByjhEUGFrRDSV1IWDJ59VqrpydWNiGvphHGUc+5yKrxx514fgiIsUQCOo8/NF6OO+29xv19aYx0+eSilgZBh7X/k4Bl9Vl5dPxjSqNY335Knv7lVMzhBSzFmnY8ZYEkKRAmzsYtnA7uBql1ERSyqpWdQpSEuNqQmAcymWkMk29YXEnEoIZeijCBAam7p+1i5e3b49r1sHbBG6nJ1xhWS/OximinjfD7WlbehLKSRUGbtq5rt07yt++mzxs1f3bVXP5pWxfj4zt5utb+ysqt68eztp5n3l+xjZNzmPQTKOHaqmPCj6TGXT7dvVWbcdrK3QQjOvwRhbV0MsyP7svFnv91A0DaP1lQEEa4mM5UJAfT8Q277vF7NFglwKxBibqiYxddWkNI45EDILzeplDkkhT+04pGhdte/6yllUZRUBSGE4rMNs0VpvDeDVxXzTpTf7++Y7j54/f/rmr/7iO7/5fEEGUl8xj6hDifOzC5QSSwllrNiBEpAxBis3zynmUhQ5QTzsD8w87MfY751vrzc7+uhj42epiGMTiy5b9+jc56ED4xUYyFjvqraKUHzjk0jKpWlsfbbyrTGIYx8o5baqQ4hffH3z/Nnjy0erh6O8POy1j14sCCAnG42JcDE9k0SnTdwE8aAeYRqcJDmkD5PKNKtPEJQ+CBROxKLjEIXH3Bw4okdHj6GJMjsZuMjRgQjfT0vH4UrhiP4oTPZDD+MSHp2RTtvwiXmkpUgpKacxx1fX16/f3XWHztdmfX8f4rBYzp88vqqc36/3CDQ7q5p2Yci8+fr1u9s3Ivnx07NxH7Uvm3EdYsqM9/udQ2rci9nZSkv62c9+HvuBsOz3a3Z+vmi6Yd8Pfd3WhDbFgkovnn94t34nQ+j7mFIOY7w4Px8Oo/N1UUXDd/frGFJRXS5mAqSMpnI55yi4VwNqQdLcV95yQTUiT5vZ89WCi7z74u2dhX6/YzbsTF01h20fisZQ0LJxthQp5Ptx6EKCPjhrqqqyyruuX81r51zVzPeho8Tni9nybIlFDrseraubOmXthn4+mzvXjGFsGt8fwux8nrU8++hbTz/EP/6Tf0ugztrLp1exH0xlhzEXRjH+y/VhyLmyvEpgkMOYyJguADKEDKCIKVtjhmFMRygmpxClZJ+S1rX1nhBElYkZCJhATrM3gIKAKBICEBzdnx/sh6ZzFQKAiiDxhCfhN7yt/xqfQvUE3zxoCeDEYpvk/3S6G4qCiqSCere//1d/+ie36zUbl0PX7XaKxXhrDceU4jjm3BsUJBhSLEljDGWCnECPHA2a7LQnlPYIcSoAoSVSKUKGJ2kCTKQVFRGdPF6TFFAgQFa1hIyQx9F649gPqSARG9aiEqWxtSMspuzHsRsSKfzNH/7K2fkqI7XGW6xLjId9trZZrGYq0jr6+uXXhyF1yqJyPqu79XVbV8aa7X7tuWaL++3tYbd3TZUMccOzi/Ywbje3dxnyXd/nXG7ffPHh44v5YhEFgtL9ELaanQXLJcdQO2ss3+93F8bdvH17/gQrw1ryDu/b+aWaZoy9McbXsyENji2Q7YdoyRA4RCqxsLWKmFPpur5ZLbxvhiE531jjh37Pzvmq3eyH1aJGRtWSS6i0ntV+iJIkuMam2MeYZm0lkvvDXXX2CP0sjYPt7/zKF0M5HQwDuaWwQXeOY1ENmgq6RZnWugpMZuKIwQOhCPg9oeebAQACp86JwAQTHHPCbfRUXn+tDb/P7Ts5SR875QmzPwKa3+DI6ftO+MANemjAU30fLdzfI0wPv94zmo6dGifLLpiy6yfYfnJ+Ezyqiqf/QSKa3IselMjw8KEQidBYwzhZ4Rtm64wzSMeAqknOROysr101q+t527Rt085bw1RKCSGWnJ2vmnY+a5rzxbxqTO2tN2SNBcRcVAWkpBRyyQIqSMiOSzF+4gSWUKTknPLEkvYenStyTNISJiGDoKUIKpSpj6gAAIMIMzAKA1gjINNjSMkgoYKyMYpUpChkJCEE1Mk0LYMCQEEERMYpErsAAOSUVQshMqOtfEFWgWIMt3Nr69rXjDzJRFRQkFAFVEAUcgHJIhkRCwAhqRQt0yPCAGQQhaJsaYpLRCJEEgUA0imyTrPkknMiRJEEKqAZCcgSFCiiyAhJEcEQMRgSslyhE0UgSAJJAJgYrSUm4KNJISIXESIgZjIGrXUO3ZFzxIooOqngFImBkNkCErElImImVjYkClhIjw93AC2iE1/F4CSfLuhtcmT3Y9/FlHNRp4QIx89jEdGwJWa2FggnY5bT+WHyFGI8ZkJhlmOKFNGE9h4FyCeoiIiQ8Wi1RYbIEBlDSBOsxITH8wohCxQAFdWs5QgEE8rEFD3yXlEVpg7BiADA03tpYcbKEhNoimNKiEhkFVUlpyFKVASumiaMI4oxhg2T5qwIxBRSRiUgnmreANIY8n6Ieb+WsWnmZNhYzvt8c31rnQNgJXd5sZy3btt31cWT1Mf927vKUAxjl9Ps8Xll6leffmrVTALqZvbR9n5rLfWhN4hV5S6X8+1mP4YwxlQUJeP3v/t4N45xDM6RFrTejBm+873f/vijD/5P/+X/2TnioqAYQ5E8FCAmE8ZoGUIIYwhRUsgFiIhdBLh69iyJXpyf3b66STl+77vf2V+/REJD+tnXv1hR9jZ/6+MPNjdbUFienZc8hC6gZ+v5/8vWfzRLlm7tYdgyr9km3bFVp6q62ty+97vucwAEAiAlMshgBCPEgSYKKfQrNNffkOYacKChpAGDCgUlACQI8APw+eu6b9/uLnv8SbfN69bSYOepakSoI+qcqo7MnTszX7ee9ZikslguNuudY14dH9umOZr5u6s7Gfjdd99iGOrlyRd//MvtzS1aOX92cXd9d3J+1FSYoqSI+35ImmbN7Pr6ZtXUrqqHsG3a+dhL6qPOhDz3YagamK+qbh/ikLzleuaGblDIzjFxtV8PJQEABs1sLXrqk8jt/uWPz9C6P3x9Q86D6Ho7IlDVVDFla6gU7GMvKIbIYE5FrKEUhVCRZDodFymHDFbEKcBiMjE8OLijMiKATumEigwqHxJhVA+YKKBMiWjTOBcQRSiABIQHbjMxm8cVHFDRWjf2GiO8fvd6p/l2HJa+OT87Xl++M1SCgWj4aLna3a+BbKm8t6frbY9DTwHPTpanT5qvf//10fH85csX335/uVxwyunhbh9KqRetZTuUDoR2t/2z0+czKc+WR+9fb4EaAN8jNba9727nQEW3wd40ujLeO2eVQ4hdZkArd8PD8nT27bffpa4/NbWg3fcjGbx+fyV9fLFcLhfny5NZT2HLOmuXl9drX6/2e31zeYdVcUa2m+vni+cPV9c3r1+n1B+vznSXvvpmc3UzvHl9T313vDr+5MuXzfPPeHGSMofdnXTJo24ud8PuvbHsF0/c6XkI4fb7r70kKDHm1Pe725u7ru8z6ma7VcAyqmgZhr6qbDWrM5R+VIKIygDIpkWAFAOYhJJJaFY550xh3PdDjmM7b9SwFvFV5a0/Mo3xMzDIFkSE7XJIXQGt58thEKr09NlRAHN8dvbJP/757W0t2e8e+sVRNQzjYlbt+75tFoAYhvj/+W//4p/8w5+VEP6//+9/95/9F//EMCDJ77/6/sXzF8dHs3zk7tfri/nRf/Ff/KPvv9//5b//6osnxwZ9Wxsjev3u6qjy+83m+OKs8giCqvrm7fc/Xc0jSO5h+8131vlP/tFPxv5BKzZ7+GnzBWzDu+9//+KPfoqOkiTD2K2H9ffbZetN5VOGGMajn362Xf/G6bCXV/ZHP96WcUQ+f9LCRvptPzOuaJAYzp7P1QnV2vdBALqusHObfRn6/kVlXnz5TDL0Y5ZxdMaMaZCsRwv3xz/61CT79W/v/2//z3/+X//X/8nl7dXR6XK7f3COqG1jOWwJkjWnlAiZKcVRtNS1QyIjucSiTP0QsyQCQoWYx5SiYRJngcSYCjOplrppmvm87sOy9gDikMZxSCkbb9q2BdExZMM8xL6t5gSqIrmUu/W9NazMd+tutaonAkfta0PZkC7b47dXl00qddtIykj05OTpbrvOIYIUZi5lJK1q70PMPY0xJEly012enV20syPNJYzRMrOhFPvWV6Wkulpdvr9Gh95jXXlCHvd7MJaJUVVZrDdFQXMJoatnC0Mm5/Hk7PgPr1/1266qV0w2pfvnL55fvX9TMLI9ghAcY2B0TSMQYCdNzZWzYxzqxoJTMVQMq5RYemuksdyw5DySVq033dDVftZUbh8ib7HvhsWsGaDvdyOagmyE0HpvyGYGrjw5FhHrXY4hRxMLeGdEdBiGpqkUeLcZbG2RuDAQURJxRFiZPuW6ne37DRnvfe2bKlOpGtx3w3QmsnWTUqqrJquK5DGMBqmt5/3Q+7qqrIslT4d6Z7jyLueARptZW7J2XVhVjSO3u8/HTy/C7n5RQ9rfpzg+3Ky7qNykVbu6fvvqaLFy5IyCJ4oyxJKc5SGJq1cLNy/ShXFErsbUhTC6ZoWh2CoJsRrDzLkkBcxR05CWVX3Szr572M+W85ITI8xmzcPtPY8FkxhrxwRRxdce2cxXtWpmKYva342hSLm/v5/P2g/IyuSTokUeK+aP7hpIiIoiBRGVGPCjQQweYsVUS0ZiUX2k/XwQIhwoP3JgZ0zNUYBDuvmjrm3anj423w9bFB5MPh6hoSnSFvVAwDjUKR/pIx+y2BTwQ/WkjzQSUSkiIebrq5u7+/th2APpfre9f7hrZ4tnL19iSVJKM2ur+ez07ChHff3u7fX794pqjJVUDJfZqrrZrdWIBPXO1a7xXP/+u3efffIsl9AezS9fvXe1+cd//s/+57/4H7XAk4tn1XK5vr5XDc7zen+3329RJ79LUdUQg0oyzETwsOtAwVpnNDdtu1mvh75brNpchIquN/0WzdPTeqfsAIFl4aoU4/u7TRhiClJVXooYVzTp9w/vGYyvfDW3zy5Od6mPoby/3u2HsTbmuK5jTst5u1nft61ZrBZJ9aHfLeYrY2s0IILzo6aZJWUyzP2YEtLy5PTu7u6+37y8eJZysm31/tXV6Wb96ctPnz67uH64vVoPXSWz2t1fP/jK1m72+8urql4Ie0Ht+zyf+TGHIuXu+lYZAKCyZuZ5Xvm+j76SlKVufC6SQX2WIlpJUanIiLVOMIMykDIhEKNOteo0OA/jSg8xfI8F8odSWVW0ICggPdIsfgBAHobcQfalCgDyqOuaRi1Ndu+qqKgFUgbtUv+Xf/NvHm4vmWW72YZ+raDe1cSw7TYlJ18ZKWM39Nt+F3IqIqIFAEUEcPKSOdhowAE/PTjBMEHRLKUAopZHuxqauB4ChMgEBJONBmnxzI4wSam8L6pdGHKJhjGrGCDrm1Jwt98uFm0uSRgbsn/+Z3/Klty8qttlCMWyaarWn5wSR+/x/ubuD6+/t7Oj3f3w4nRmdLeYIUkJYawMWoKUh+1+k2Jq5/Ndsouj+aJqbt++erh/cE3dbwcE0hxQesnZcknDrvXVAHBS10/OjmMoNVCJqevHqnUt+t1ui6TH7Xzc3WUEMp4klJCbZj6UnaKzsNyHe+vJWBaRHAqTxHGffYuIJQQk5Grm/cKwS3G0lU85+3rR94OxZgzROEpxqF3dp2CtlVTYzepqpRKAkBC7zX3TXhQQnzKnSEJgrcSkYszsCdFMZWuAULnglIMW+MN6VcpEKyJkOYwu+gGj56ARRASAqX3Lk93bYaE74EmPzm2HKEmdYEo4NI/xw9IKE66pj7zLw0B9pHc+DuIDWAUHvuXhmo/A0aP/Kf2Qt4REH+/38GOaFPKIz06648OLPDoZfZhDH9LSFA5yLRRENAaNMczWeibrrSWiIlJERScFGglCASA2uZSsJQt0fexkDGPMeQJSkNlba2NKPBQnRRsvqEhGBMcxlJxLSliyCiKQIQsOhXKO+xAOPsmQAchab4EQRbQkLaKlQJbpk82l5FzwkKBFygaMB+uJWWhimaEiAZUMmZiEnUzEGCAQkhKJBFRyzqSKhBMbCzGLZCAzocKKVFSR0Tkf2SowIBvypiilCKyYhZQRQckiiJYoWVBFS5mSxWDK7hIpJYlq0aJIpAyYpcQJkyP9uGGToJaSx5hzEVGQoiIIytYgATEACwNpSUhSSkRVQ1h5T8RxUAKBMgIQTRI1JjIGjFUpqEW1MDtFRGJ2lp0nBmIwBGAmoyUAEcrlwIQCJGNEIpEFUAAWQCVmY5FgMi2aRt+kMlYVQmO9c857Y33tXB+SlKIASgKI1igRs2Ge9HGs05KJh/F86C+pEjIzAQKTggLT5DRNj3PnESoyzJMruLXTm2XLDCBENLGs+eCbWD4kbCA+SjEnDRrS4zWnBpcQIyESEQAyImhxxgIVQjDGAJqcUklQV1XOOex6UmFLlmtQ3m72CJYIJiyzco2iojUWq9iNhNpUTYUwDB0p5VGbemXQ78NdHHpDlsmUUJhkHIar3frBirFmWPek5Nl5Y52vjo/nKYTd3d3nf/RJv94NXba1u7t8mC0XBgFIwXIqueuGlIoQzhc1Ort96BRSTJkzmkT7cWirKqN++81XXaD/6j//3/3N3//r7e2ldZSlAIAzHEMqMaphUTXWskUkTkXbZmZ8RcatanN/d5tDOmqWVzcb7dOqqVKQk6OFz3HfrYuENIYS8TaBNyBZqOLQB2fM0IWj4+W89XfrDstopak8N42tZrN6drZLtmmXVMqr776Z1VDX1fpmLYCL5Wo+N7AreZSg8Wh2QpIc14PuUkreVxizlOKAGbkUiENXWSuFpICCVjOfQ0xjsK6tnB01WWuHIJIVEMIunz1bjffh6dmKCv/t7y8VeDWfvXt/j2BSyGCtAlh0MqpFGlSLgGeeOkciGlJi40BAtFhjVQspAE4/kJkns0I7YcNySFhQACZiJlE1zICcS/bOGmYBIWIk0kkjigAA1jjVTIhyYJyCdQwESjJbOuIY4yhp86c//tP9+q7fvK5NqhxWTb29XK8fRiuaE9xeplKkIjc31Xx5NK+q37/9FdV2F/qr92+l6yXHxdGiMPd9XjRHhgMOwGheX1+dVE/MrLrt+ur4mHDVry9t3a7XQxQ+XRxZEsCwvb8cuuQE2blaSXNkyl99/evd0C/bpRHjx2KqWUYY+uHsyfnJ0dGPn5+LWitpf//AWb1v1uu9bXDo931JqYuSrJXm5n6374tVExOMfYCYsKFNLmnbz8C57M3Rhczn/TiE69uZnRa+DExmdsKO9tttvL8vQ6iMm83nv/7qNXPuNg/vXr+zlVMJltC5SpKASJICWcYuFUnWzDAhk7WOCUuJkRmN81lQwTEbqxgRUmFXz4BJQJgcWkdgFssnMRuqneUSh7UQWl/bMiPn67Ydiwwx+7p+fvz08vurUD19uN3+8nxetfz+u7vNgz0/mf/61982y8Wybv/T//ifzo6Bqvo/Abf6ZGFbx4JPl0cO3b/7q6//k3/y47/+m9+crerdbvfJ09mPXnwRw9B1YbsbGsfLVW1bdzw/KUUeHvr5om4a98nzz6Eoo3JjYL4oeRZzffXVr+qTGVETB+bz9vSTlzHC66+v/LJ5+ZMVA56+ODEljQ97TVlu1uJ8//vXTVs1TWtPjnRb0u2uXJaYojeL0uvmzV6Pqtvx4fiTpQFeHdU5ZkBg50zUFbj9fmwbJxrns2ZE2K/3T1+c3FzviOHZp8f/7i++/5vf/AoSpXtJD0Pq1qXbqkpVN1rS47EGrPdZSgGY17OQBiAOIaiiIqy7u5VfKiBZAgAmSEVEYBxGZypVCeN4OEuwLirLIMPY+XrpvEOKZHiMY7fbrfv+ZHnmnWM2Jeam9kUygMza1lrLZvS1A7GMuN1ugY1haJw7Pj2RJOMQoXCGNGKZrRYZxLBRVF+3SDYOPbIppXTdZjZfVjDvdl3KyVqe5DmN94ZaRRhjMABPnj0NKYriOGZidXUDILGLqaQigGgBhZUX9UKyjCGxwv24ZsCzk/Ptbl/I1rNFLKEb+7OTp03TJJWx20rJrC7GaO0EY+ustou5C0NUVUNaGcMIs9ms9W4YdeYWM+fGIThmydFQezxr9ruOtcJMFqwllqRZIwJV3g/7fHx2bBoEyeMweuPImpRLyFKGtN32E1bADKy539yLxNrbGDITpLh3rtESS4SKjUVyzAoY0yiSUgzKNMbB2yqOERCX8+Ox31jUGFMso6u8lJxBDRuJccyxdVQtFk9fPL1frxGl68d6Pj87Xt693Z4/u3j5yz+l6+9//+1vucC8Wj7c7nU/+mWRqBYgpzBbneRhL6qVq7UIqBAoKY1pqznV3gP5MYQiijkBqMIoKc5n9uikCmlwpmbXKj3MZrP77TaUfFxXYxcByTiuZ34EwFJI4Wg+a89md9ebcbd7elT5s2UKeVW7m9t1DHr05LiPH22t9ZFSNLWFD9SdQwmL8oENpDJxuEUKIh0qBTwQiQ5Pe6xU4LGmwUPqM0yi/ikWVh6rn48v/VjvfCg79IAN4eP//8GlDyKLQ8ny4X088owmysijRgI056xSYso3t3dff/P9m/eXY+hIabPZzJp5Wy9YsevGdt5Wzhty68324fpWS6lm9Ww+K1Hu72+PVgsFeXX5LuyG1vumqRtX77ru/GjJUt69eSuiJRdv7a9+97fOOwZBsvfvrjRlJg1DyKUwUAEBBecMADrDUvkcxmEYa+/6bsiKta+Wi+X19TWAWazOxv4dO1LJWdKYfRdUQFfzhmu77ofxZheGYi1rNxzNm2Nb933HQFyZjOCdv76+3+x2Krjfp3pWGUViLmm4ub9ZtO3x0enNemub9uziORYdulAwV7O5ZZdyZEt3t7eAZHwVRMY8VGS9a0uD79/eVrZd+Xb9+v3SmDJvN9uwD3mQAtaI9WMqxlTdELebu8+fPx0F//DNpTFGtSgQZCBDw5jCyEPIrbdJpKiklKJPTS4lF1XJJUkRU1WAqJOv+qQXUERVnNKSQQFUVA6uoQpEhy+eJnP0yd1DP4KOE3//o481PNI95LEUJ/w4sB6dZmQCNUGySBH57e9++913r7yvadxj6ZlBPfd9F3M/jiMbLgL73bYbulSyTvSFj/ZDBxhUP0yWD/ojQBVRFCQCFSI8QAwKhKBAxmDMeeoLliIVQ+udqg5DJItZUs4qUogsKtHkS5X62/X64uLk4XbtSf/pP/n5k2WdJJ89P6/NavewPXlyXNUzVRGgfrd9/+5yNju6vengYf/y2WpOggTjEAxiVTX9/j7GEAdaLU6HoBWbo+Us9ftX331H1qYx932wNnvvEDAFLYWFeHG02A798XJ2NG/iGIrD3ba/H0aH6gmGbQLtbSkGTHi4s4sTy9zFMcbRoxvH9axZhQGLjsAiJSGBgilCQz8aroZ9VzGVGM3KAGKM2aWsB59zLSopZWO9aiwxNlUVY1rM2kQpiVTG5xJUkkEK3b5pq5RH7LZK3lkLMAlNjAEgKIBaMhM6KAoA5cBhhEcZC35Y1+A/WND04w/UA8ftgBMd9F0HDtDhQZMtFk4RePARTjwM7AMJbfJB+gE56PB40B9gRIc7Avy4LE8j6eOK+ri4EiIqylTtHvh6AB/N4A9Up0fUCkVlcv49vEd4VM6pAgIjITETW2MdkSFqvCNiBGBUVUkpySEKCiYfIEJQ0VzyMASkbUgShpDGUbM0jZ/cg4auGx62bcXHs2q2aIx1QAbQlJKnXcqgURRklqQAiTCKhBSHPvQqyOycqQhEsUAeRaKUyFlEQUEIsUjORRgIichZdhX7Gg0DApERUWIHmnWKjjes5IgqQsI8KiQCkiIIRMajChs6qEvJoU4u5moYi6qKWmPRemRvTZWKIHljjIIIaM6ZSkI2wKCSQAsUlZJLEdGsoLmUSayqqKXkUhDIamFANZa0oDVMKpPTM+ZUIopKjrkUSSlOkdnGMLEF+rBhCqIwEZN1DkWZpaSiwZEMJIG0jICiAARKE+PsUM8iGUI2xjlmZlJClFImIboSIhFCQS0iIlKQSQQBWcQockEANsqWrQM2bKvD2CEiVJxgyknXhtxY1oGkYCgpKzA7IAZmZYNMAIx4iBSjiVE1AbSIIgdW6cG8DsAYehTS4wcF2gEqstYQG2OIiZnIMBFODug8IXSHuY08TSYC1OmzeYzJPJx7cEr/AIDJNhNBp7sRIgUSJrTGlFLGMZM6T0bF5DBAEVUtMtT1Yghhv3+oTEMgyOS4ZuPAGjJoIafCKWUyvN/sSkq2coKKQMM4GOsNGBAxtcshSEzGGRFNGUtB6QdDRDgMph/6YG9rzooZ7geJoYsh2wieZBhunXEFUs6p20eISQpEVbLc1Iawury+q+aLxRyxcuFekrU12ZjK7duvf/t3f7FaNrYyhCpDRmQuqjk77wAkpMwGnatD15+cLJqqvrte00Bioe82C7c6O54bI9pU1XKOQzFsuv1+HAMbahazsU9CstnuPVsCmaLi7+53Q4CcihLOKn93f09M6/2OjTk5nW/32pjaz9yPfvIJ5M31Xdc49/bd/RBuZvOqnjd1Zd9f3iNaTWUcbo23UCCEwRg/8+1+uK+a5mE/Etiq9knEsBmH0RgU0KypZjBzK5QQwKHbPHTG2zHFh9v9n/6DLyUV+8KYRf27v3k15Hx0epSLWK73+14V54sKSAm18lwKGWdoMhEtijixiHXidiKxiE4tMiKjIKgKiBO1+oOgk4mRQQEMTREYSMRFAUSYUaWAIh6UbCgquWQikMnTEwAAxmHIUPI4HC+OaleGXX/eXPDYwP57p9Y4unu4b/QEi5TSO7aL5uiuTzmXxbFnwRfHz1794e80prKYNcez7f0dqNSGJRfLcdXS5u7tbX8/P1tZ647P5fjJ+d31uxknB7JaNY1AJUYZ3On53fVlDvvF6RJEQspm1pycLrZvt9bw+en8m9/fAHI/JOtMszoa9lFLShlPjp+eP1n6EodejpbH67veqtluN7Zq28X8/vp9XVUPN/1qeWawFi67ND45nbdKmvLqdFaE0TpetAXceHJ6XTXbb78/GbpZ4rt+18UdGefqNgtYst36jg1fHB29u37/9Te/QREYxhhkeXwSJROwbgeNgYoQW9FUgLhQ7Wfer9QmAuUJlSaHimg8ZFDjiG0OneRcOQTrme1+3ynZ+dEz6xsSbwAJrYFxl7qqns/nFxqzMJM/rgi79djfySnT98PbX/5nP/n8uP5X/6//4R83Pzs6Oht6e393c7pYzGZL2xhsCje8XXdHK7+92iDnChyRWgjPT49CkF/+/OesinG0BOvNtqkMoTlZHlctOtt8+4erp8/mCghItXOSUm29dabb7kpbHf/5F3APYsG0M7t4tnh6/O7r70wLzdkS1H7WvgQQYyHtR0P+V//yf/rFP/qPwM/0jOOQ6y++4NVyeLfe/Df/4skf/+nCNtywj+27r1+/+MWnRy9PE9LJ6RPg8P56t2xczNk4YwwIMEEuMV9u4slRPexLCaUytUStHHe7oWhcP9z9H/9P/4df/c3b/8f/9b93vlnOlwCl63oWHUN/OCMhVLPZuN+llO/Dg/eOCaQoqBDbk/ZcCzrr9+Meoag4ZyoiilGkaD/ujbGNt1nz0I3MBNa2fi4lG8NZMoyBECs/P8HGWFfVLuVBkiAVAU0hD9Bbw43hsRtQjWMCIMNOswx9EhFUHfthuVpZ3+66rTV27IYguW5bQkYmsmQ9I1A7n+USETkOwXnLaJS5AiwhxljYuuXqdL978MR1VT2se1buQrQzp5oQTMnJmcZwc9+98VwhGGcYpBjrC+BicUoKRRFYq+V8t7stJp0fPWEJQ7LbrQr7DKQqltk7G7pxUs7vH8KyncVcNMZlPbOlmIruN/vVbHay8DGM1WwuSSWnkAZnydTV2K27cScSK9toLFl4BF6drZ6cHndp5wzGXTLGRk0QcjVfxbFDymPs99vu6OzUzl0ZdNiPjhywAmEcutwHv5j348CClp3mwOT6bqg8o2FJIeQotrZ+Hvrka9PMl+Noc9mrSkkFCVJKlHJdVcpu3wfB7RjyF58+n58sr27uc5bNJn75i5/K6kTaElmaubMl8klVm3zz9Tp26/u4NYUq96Qfbi3idtvPqkXtqv1+fTQ/q6yVHIdxaGezmHqU4gmZ2Tvnw5y47sebpqq7ITSztmhA5Pl8ud2sd7v0VCbMRknw7GTZx/WL508vbzf7/fjJ4pkVk/YP93cPf/7nP3//7r2r6ofV7N31/fX93Y++fPZYI09lhCDAVBscAKPJbwjkUH4cyhKcrFwA4CABA0CgA5VjKnYfY+unS4vIAQk6XH0qmQ+11CN1Y6qSFPFDPQIAH5Cfx3/g4dCoIgdW0WO1f6i0DhvcBymRIlDWIqXELHcP29fvLm8e7je7rTF0e3Mbw/DsxfPz81Xo+8Wiqdt5Xft3796RJ0wwb2fWx6b2+7T/7PNn2924uVuHEKejYClwv9muzs/bxXzs9nnM1roQy3zWNr5tqwoV7+5uSSVJ7vYjgrIx3pu+H0REBApwIcPe5AzdduCl3e8HdJTVXV5fKaqKpLFbzGfXV7eVd4jYjXk2a9HQLpZdyDmRavUw7mM3FsUHiXdJjxpmFct8ujrZ3q83+86QzbGkAhCKs3i1WVeGNanMquvdYNvZ4vxpQs7dPqRI3s6OzxC0v1/HEHLiuq5eX/7h6OhoGMKPfv756z+8X52dfPn087//q2/eXd/82T/4Rf7Dm3plb9f92+u1q7wov7/eMqFlUpFnT04j2le3V0iUUiEUnQ51QgAwZIiDpiK146Kaco65hFxSLqmUqQFeicBEPSuGrUMwU7UL5VBqE+qjdkYVQIpM42U6P6nKYZxOCv7JeBQQFRU/ptQfWG4TanNQngl8sFw5JLKhJlHE29u73/z9r3PWlMbt5jaOvbHElostpKVICH2OKcQci2RRUS36aMEFB9nk4y0/zoBpAiGCaCHSKdgIgKfaHEBzKYyIyAQFgUjFkjpmBehS8HUTcw4pKSAyhZIxBrIuS0iJhCCFOO67JyfzL56fFEmqtvHH3a5Hy4ujIy156GMuebuP233cpPT6zbsvLp4+PTtO4WaMg2f07IrkYdyXJFU1s34xxv70aFkhvLm6vb3bLJ6crO8flLmp3KKpJPZQ4BDWE8dZU43bLsV+5n0hDDkVpq+u79qzlYpuu9F7XNTUd/dzb7K3aPOYNkZbBE5lcN70494yZ5CixYptHI0PD+JaIh+7vj0hUaza5W67rlOvknIMpjKPMIlUldvs94vV6ZAzkzOzZhyLdc6Q2+/vZ/NT9svQb5oWZdyrzZJZtVfnuJpRDiqDIKFt0RgoEaSQqRQtEoE8yr1UAQXgA91nKkw/KmKR6D/wJ4IDGeFgmA2PSAwAPDqsPbqh4EfcXD+OG1CFg3//Iz706DM0XfzDIv9ISoKPABNNGoYPoCjiIdTs4Ad8EJs9Kh2KHG5ywvBp4ujpobVw2CUmaioS0kSfQEAEImIDNAkkJOeo0xoAgkhFNediuKScu2FMiVLO+74DWJdQNCXnWEpTShx3+8q5ZF1v7dg4f2Ocs9bViGQsO4fGuGwMoEqJqEVzzGHIw6ACll2BUpSKqkpQyJozx8JFsGRVSEVUAFSAkA2T9aauTeWdM6xAWjRHRKslEgETMBuYEt64oBagUnKC6bPjycAQyXogNUxAToSlZASBAojMaKy1aNjYOmRwnl09r6vKVpYtIwEaA4SKTFJARUtRURWRUgS0FBEpUopoLiWKcpGoLMCOBIg4ZbKGJrMiBAViEFGknMp0BWLUKc8RCqhqnhQPQgjOGBAAZErZWjUECVGtLXEgHUGTKoEQE8DkpU0KhGwmxo1oloxkRAsoUjIGiUlUQDJqRgDIAIhIqBmBjAIoGiAWssqstkJmNZaNR18BOWKjgCJFVFHBsWlsIZKioKgTQ2JSw02+K5OPIQEAoSpMfGmkg3T5UZwHU51MxCDw/w8qImOnt0SPpkVME6ZPyAKqMjnDwPR6j8bvBzeYD60BYlJRJtZJ2S8y9YtQVCSTRUXOsUg2ViGmsYxdiX2JCVFn82OCvL59U1cNoSkSGZDZZaCqqokkdCOynp6dGLC3/cDMRUSNDuOYxtFwXTKCsvSJiqZ9UmLrrfcui0JUNZxStJLr1lJlDPL6dhPutihpfnJiHGz625JSt9lP20w1W61OVpvNzjm3Wh199e1rg7VCs1ye/eG775ShMig5B5GqqbohrRa+SMwxT53IXIoUUSjIggDO2QK6jfCTL/+j79/9zRhT1Rhv7Ga791r/7OVPH3Z/cFxVdgZosLbX3c5LrrwvsRTDs/lcDSZST5AElqv5xenJEEnJ3Fxe9jmnGGPfu8rEPld1K6LL1fG85curN7bxRBKGUoN59uwic9ls9/2mm+eqqisEFG8moq+vPCJttt223xFRCrlxVTeG+20EMkxYChDgYrnY7Hf3u+7seGZ6iDEhgfOsxpRU7rbhb/7+zf/qP/0TuL9d79Z//POnf/WrdwNSH5Mh8JXNWUPIzNA2HpCHfpxCZFCl5KhkpsYHERk2uSTDhgzB5DLEBB9cRRUQyLATyYoyeTSKCBuWw1ItolPynjEEAiI5ASERllwKALMhPpx7ohR0Bl27Hrttn9RR4IpkbGbeNSXkoq6+2/ZJzXZ7+eMXn61WdcCQMwZIMeS7zW3QopXZS6QoOsLpYtmn8PffvEKUo3k7m9Wcmpx4tw1G67vLuyI4SqqNOsHzJ89SHq9uts2pXY9bkmRjCJsxBbHG3N8+gGIey/VV72b1GNPdtjs9MWBYBGIWIeOa+dCD962tUKsFEmy726Zh1AwaYjdSQi/2+cXp69+/qdv5slnO3MyydJtdRjuW5EiGYZgf16pjv+/ZHB2dHe2vbjpFt1oNu27bDdbaxvDy7HwM3bvrV68u3xozN8YvFrP7EJsFy64zZOyyFVEiZgKBosAWyfm5sbYgaEkANOZUu1ZUQxE0PgmkFAF5hJy11Ggla90coV05dy5oydcI0XlVDO3qhYoZS9e2raScw9a0q+XR06enP36yfPH13/7rhfK/+hd/f/H8yWJlXl8Nf/HVu//9f/Wjh8vx4Xb94vT8q1+/evbJxf3lvr/eHT099b7dbceNpNOT+em83ff7urZsuTlqL+/23VA+mc1jl7f3D8fn9eXVQ12383kTUpgtzO3DdrVoYsghJAUMIWrGr3/1dz//Z382e7FUyA/Xr06et9CYv/u3v/70j34uJddGJYOkvOvSn/7n//l+M3g2mqIh6HQvdlbmlf18Dp+cxm4T17u6gWVL3Lj9Zl/VbSpDvaiett5bGHNCoJy0pDJrXdXY9UNXz6oUsptVOct2GwCQvI8JP/nipQ/jyo6xe9vOno67B+dnKSctKYcDq6iU0m12k0FqytkY3e23zla1d8PQG+cVNKdkLRq0RaWoIrmkMYYRUAUkaU45KgAS5SJZsrMmjiNZnleL9fressOK3nVXnzfPjHFKkHO0VV1xhZLGkJ2vVvVi2PclxuaoAeUUYimZ2LRNjcb0OTGWum4ExDc1KpDlsQuSc1NVBXIM0RADMVsDAKLaxRhCmLcrKTAOoQKUxExsfB2K2MpjUdQSYu+sLaJ104bQD2FwrgJEMAzGVm2b0jjmhJkYsEiZ11UZx26zvTj9pHJV/9CTwmK+WHdhkCSqhNrU9R+udklNk81uu7mY1ZhKZZ1nN3RrrmhRtRGMbz2QADOJplCw4ozq2bKrPJbYy6Kdj+S3KLPFalY5IYlJiqB1jYDU1Wzc7yuk+7sHES4xjhFePPn83/7Vv1ws52EwcRwVqKr8EEspuUhCNiFnzintYlsv2tk8ht75qk8BpEhJAEW15KjOeGvb07PVfnMtUKzlceiRWHFKlOcUZCfh6+/e+pv75WLe1PO2aj//0R/vZTgjt4v06WdfAuK/+Nf/8rNf/KKb5Sxlfr4yGQ2Z3W5zerQ8WsxySgnk7OmLMIySOutMVc+BWbUMmpqTs/v12OVUN4vNlur2WLNiyZhDGbdNRTnvNtuNqtntRgVsF4uHt9u6qpyzimQ2XdtWd1c3y3nz2Y8/ff3m5uphOD456zfdk9Oj/RhTTPhIjX4UeZFKeWw6f8BuBA65uHjwLwVQFYKJrHEoRA5cjQPPCD6KJB4vNrWdP9Tfh2rnI3voUD3ToR8OerAX18eiaTruHp78yMUQPYBNBCL62G1XFcTJmGaigaRJfLbb7f7u73/9/u76/n6bY5KgjLRYrtp20Y8pRbWtu77bAN5agNX8+Ozss1ffvmqqmcbS+BkCgOKUpuOaypJLuSyOVnXl7u/e31zfOE8hxaOnxy8uLq4ur/e7XYoRQPb7rmipWx9DVoWYshRRUGYilX67reomKy6Ws5IjonjrDPN2/SBFyJhu3ztXH5+fSS4hpoft2NQz07AIZQK/PNJx5Cy628as933os2yCrUD8fvf+akeafFs31t5v+qRQ9sPM07y2TVsPPfZpdFD7ZoZ1M+67+bzd7/pqsfCzxcPdfchydryUoG/efvv05AwyQDSE9RDKMdJu3xtntiHcrrdg7O5+++nzp5td6LOORYQ5lhyLGqZ9LldX76xBIijTWR9hQh+JCFVUdVckJE5Jam/aWstBWiEggo/MiFKKem8AFJSYgWgK2QHCIgKPHlUfsMcDYIiEOPF0Js2W6hREA2WKBf8wTCe481CDTwbWPzC3RkLRoqKi2I3hr3/1my6Guq62mysRbdqWCHbbB9XSdbtx7FU055wlT8rHDxX140/9MNY/jOpDkS8CyKIFFJitiBBNUd0HoEGBvKviOFrS2nJt3S7GUSDnEnOaSnUmVMUEJWH2CgBonXPOGaKf/OhHjtr79e5P/vwfQ9dbLvPzo1zisN9Zoqbht1fD7X7zsO3qVp6/XBXpYwqzujIgUmTfbVLJlZt7U4chtsvaO+h297fX77yrikAWNUDLtnXWloIhB3QWnU0hG3Qz6xaVH/YdqLUW1jfbq4ee+vhHF6eupGGHFRoDHPqdZA8Gran6bmzrqh/XjZulkJQ1plJV7Ri2WjDmcUxJeTHzNqZQIxSBGCJgq4Bjv1/Yhq011sehF9baVhL6ylekSoTGGDBsLBfx/f6+gRYZpAR0NWKUcctE6FsoVHIHROwWWSsAgBwACYxVRFWlSbc1CXcfge5HdGhi2sAErjyCOR9G3qG6nKRbPwCYPiDmjw9+1JQ9glKPDKNp2Ewvjx8CAj54Bn0UWv7AHf0RVxVQ0Ec4dBpfBABFy6Q7Qz0U2/SItiLioehWmSYJEiEhAiloKXm6fTwg9Xi4LSQAzFkEQIsQIxgzYadsmIBUNaVIoCXmfSnWsrWcS3FkuKj3XLvCFElHiA6rWiPnkZnUWWOtJWTvyHljnLW+QVd560BYiuZYcswoE6Ul5pglFYYsZWRJUAqkkmIspRQFRCIFZmTH3nlnXWWdAUTAUrKqEheaLG8maw8EJAENCAgoxKSExJbZKDHbSo1jJgIFtKosOQIUI1MDxRAz+wpsXaElZrLeOWO9RyYkBDNhKCiaoURFUBRFFRUppeRUUslSSk4pB0UuSmBQKGMWQHauiqGoTgZMwtYawyolp6IKREZFmTDLyAwABFKIGL0DQ0iGQ7aluMS5SOVZapdjiIPPsdM0AmhRQVVmQwYBEZkQWVVzyqJFChhVQ4Ak6pAZ2VggBGWArCKgWtKkT08KCmiQmI2BzDmOoIBk2Hr2DTnH9dxUFZGBLJKzqnhvWWEMOYqCFhDSQkAgCDplNE6biAIiiRYimnTrQKRSmIiRiaBMR5pD8jjAB6jIOzcVdcaYiSPEbFQLIJASTPAQAsCE+uBH6HdiBAIA4GSYh9OsOZyFBAC1CE2ZdgolaUqBSjZQgEEhlNQbZ0WoMlTScH9/DUmRsEguRaz1QGS9t02bJRDMF/NlxXz19i0yqRTrrbIQ2coZBNzvRxByhoHUHdVCQOSIGIdibeUbZ8wsxxGJH3a9tVU1X50tF/12XwBu33+3mM2rGV8Nt1U1MzY+DPH6/s1RVWPOV91WSlBTt27x7uq9MW67H1bHcwrqE97vYxpHJVUVIMmlsDWoqrmQgVyCAgJpjinD+PrmKwaEDJkpS0FXHc/tvrvOJZexgK8u1zc//emfl826u3rbWhfjuN32sO5cbRa1B0h1Xd3f7e/ud/P5YjWfHR3Pj5wZwuC8YczMXJD6mMPt9eX7sFq2w6531lTE+/0evTs5PlEthptXb96crI5CjFy5qvZYipSEiM6RdYSGQyxkrEqwbCPgmIWtCzE4U9c1Puw2faKzi4tXr98w28VR/bCLVWNyhK/+cHX85PWf/MMfD2l4uHn48scXl+v4/fv9sOsZhBGNoVJKLgBA87qWnDyxq/wWNGWAolJEAEIOhrloYZy8DAorER36rqIqKAUTTcRBpInuyYSMVEQssaggG1UoUhCRGBWUgKZjh9Kk7AQAKCFCzuzNqLhoZ1e3D5fr62en9ieffPL27d8abmIpMedmvjo9/mXc7y9vbx7iGKEuOPzJT17k9RYaDft96FJE58Vs+/xmfXU/7vsMO6guHBVDN+t3x/4Yk4xlO3t2DN6N98hswMn93ZXxzbevvs+qrA43mdEdny8X3oWh/PEv/mmRYbfpbncP3pe6gufPn4OIoFZNVVDub9ezlV89OXpzde3TLsqwD+Ny9dSV/TgOu6HfhtEsrTuy67RtdGaZS4kFijKJwuJ4ZYlLGesWDcrNH/4QHC+fnCnJxR9/wauzq9dvKMNyvgiXV2i0xAczq07PXq4fdh5N6PsUgQtUxpYCTVObyitxGPYVGebFOO6Mc2HckxoyFtgIQEIiZsnJ+CbGIRKyqVArD8TsNEVXr6xdbffd2YufqfHns9n11W8A0FQNERXOiSXHXKQslk/Yt6Ps/vX//D9c/OwngPb2bn92fvLdd5ebjkqEqysZb7pqVv/9r759dn6828Qvv7y4rOrFfDGbUezT0tf3b9Zv12//0Z/94re/fnt61lRze/Z0cc6GVO7XvZ87MvaLL16+fnN7dblztbZzO1ssCM31+4FynJ+0rvZc4+knXxCZujF3378Pg+dPlu+vr89ePuWWGrTvX7/+/PzTas5xDIJYxq2ACW9u5s7rTXdz93en/+U/weefXb3fLp899w2Ob74vd11+u6/nJm7D1c31+bPz6/XuyScngtnWFhQJS8paYvFGNI8grKVUtc0xM1PJ+Nd/9c0vfvZJbfnZM/fpT9tdqvffCmQVoKrxRZtpFhAiSGFLxljUzIyVb5AxpATGAiIKEJJFMog5BwGIEpy1s3b20D+QpZDirG6CDExExCkO/ZBJ1Ti7KwO7arPdz5rZxeIpxJSRckqGrSYMaXQGUxhjSFasFiUyaRTVlEuuqqqUUjRVjcOIm80Wq5oQ29aXnGMY23Y29uNuu1scz0NJJcOQQzufKaKWYtmq03V3y+SspX7os5acC4ANcTBITVMJqPOzlPM+RmOMiik5VN6nGLu+43bZhU6LsvW1qzbrB0mFDBE3orQ6OTesKQRGBkLs+hhDziWX8tDvR5GI/O272z/98hNJ+9aTcwYdZceuavpurCuKuXfGxSjAtDg+9stFt95jKAxYccPOBrUZymI+e/b0CWoKISCyITPkwVhKmXPB65srNNztgqudqkApq6NVP45NW3edSlHjHY4ZQCUWNI4rp4xFMGYx1okUS6Zt2q7biuSxWxPWiCb0IUnGloyfh9CVko2xhFiKOKa6sSkUQkgxbjcDARvfnD+Zv798/fT5Ewxlhs3Dm7svf/nFP/2T/3LdhfPZH933a+bWInlEVDCmyilRbZ89+3LX7Zxmo6JoGIsm2d3tkwDNYdxtKY/E8rC9+9Nf/iPiFEqwvg19PjmeR8k5i2je7taeXePI1JV16BwPfX9+svrqq9eL1ZkWaptqdbTcD+HFsxOL2N/eLZdtGuP6fv1Yq+hUvR+a249yCAEhfORX6AeQ5gDlTHUPqCoUkcdkekQElA/YzyETRwAJkFQ/nJfg4NJ6cJLBD8XQYyUEPwSsHgN2AB65slMddeARSfnhcyZaFDwqM6RIURmG8c3bN/frm34YUkon5yfjrrt92Jyv5vP5bN911bzJWe7vrhfz+fJk9fL509t+0EqOzo6Mwb4PMXQ5xbubuxwTE4/jAAWOv/jRyfHqu2/+IGMZUEMsL14eQ0lDtwMtAKUfR0U1hifWPQCYCTIBcc41ruqGwVjJJVdHq2EbtOScoqiWUpAQUMaU0cnRyeL29qGeVcgQhr59evT+3bVvZmIsaexTZu+xhDDmfiipKSOAo4wgZMjleL9NOaesGkMu4hFVdVTQJ+1qtphvM6VuyCltpdwO+2erIyXyta+GuqRytX27S/vz+tn66nq2qK9v70xljGaD+KMvn17e7q5uHn72ky9u7u8Xy8U/+2e/+Mu//noz5u5+YGNiSCnnXHLlTM5FDr6hH7EcUZE80aUnjr5mlVSkzjmLTKyiLJpKKQqNKCEe8uRF0ZgPOeEfi3I9dI31USmjWj6QJhQ+jmbEA5b4YRbggXB0YMWhHrhpqoqohVSlFCmh4KtXr969eZWkdLv10O0FU8gxjrvdbhfTUIo4W6USJQeY1JeiH8SWH0bvh1d9vIEPuACBTgaoMBVUCigiU/6taNYiBQmZLCMBZS1JNYoWSKrlcT5Ngk9UJja43W/Pjpea8+po9vTieRZzulqdLhY312/rkyPPRgtYNAywub57//aVllD67rNnx1+8OJX+oWLryaXUS8ndbsO2rhbnGqJxOFs2ffdw93ATcucr7LpdycliOj6+cM7lVHyziMoPIfjVKgxxVhn25v7t3aftM8wxd/1mSL/Zq/fj50f+Zr21zrd1k3NJqWvnCxEA1ZyLM20KhdBqgZKjMYTRqGLKGnIxMS+MyVlSLpa9CuQxM3MIY8mGrCEmUCpZlQG4VBWGPMybxs6b7X6HIFU974d9MwdRjGFkH60AJhCyxjmUoGVE8GTmoAwAKKRiiKwCKiBNZSLSAYbBj98tHoadAk1W1o94jT66UMFjFMBhBcQf8s0Oa/I0LuEA0fwAggH9+OQDYXPC7nESgv0AWzpMgkcQ/gBeTogRAAAwE+o0Cyb2j4joJA9jxCLAH+MpFZGRFCcYBBABikzx8zBxMBSJjUFiw0xsCFFFUhE88AuSiDIbKYWMKEARTSkXyUW0BE1UnENyFTPVzlkcLUjF6km4FEQrigJQMoReCbA4Ts6yNVU9Y1eDc6I5jV0exzz2KklykhggDSXkogUlEYKWDKKiUkpJCmTIMRrnXVX52jtXEYJokiITixZpUhYxExIpMgOxQZWSAZTYETliZueNcezqwnZyDQIwRVBLAg2SAkhRNMwe2SEbJIsExjKjgCZCh6igBRB1QhnZkMYsKiWrFClZJ1OlUlJKKeXJqwgygHEaC7LTIoSa89TGIyNSEiBAyZKzMDMTkUZmLajMFhCAHTISspVskMM4OlB1LOI052JNtJySjWEoOYhmgzBpYhSBjCVbARCUhKlH0ZKzgBIhFLLOAAKQQZ4iyYpKJhBV0ckGDkQKqUbEKa+NVDiNMcVEznktBC1aBwdBGykWVmq8p1KKaCmachYtSVCJlXTyq57cJJAIpBCigKJanUTEmC0zEE6Gi0X4P4CKrDEAZBim/HEiJAJVQgDRqZJGwoNdV1HhyX1IAUCAVETw0ElDEZks7Kele7q+iIiq5GlKKIsUyIWSgWKcEU1IWrLmIXbr0RivogrKZJCZvK2WMySKo1bN8vj0+f3b78K+t0oll5xjSGM7m2mMqtIu2tDFMZc4hlnlcTpxhJjHwoYBUZm6rqtn7dnFE3CtDDmmMQNIThcnT9mby7dvlHQ7bvd9b6v5qpovmzpA6se0Ol8+PfvkV7/9g23djF0JHSsO+87RSlNCwJiSqOYYVdVbm5WUgJGRYIxD28ytdXEM2u3busqiu3208+r5p2dPm1w2+0pda60x8Hzx1IzD3fvXNI6waCrvsMGShA2PXRy7vl2qt0xs0hBw3qSQh257/uTo5vJhHOPydPXFl58Pd9u+75ml8bzdj4ah9Q1IUebN+gFBGcznn/4ojPucS0rFetCsjmgcBuOdNZwVVGUY9obZOdf3fV23lqHk0o+9s2yAt5txNZ8dL453fS9aGGHsOjDe1fVf/vvfpT5++elq/tTtf/uuAnpyevRuSKSSUtQCiDj2g7PWEIesScEUNGyGYWC2bADwYD+EyNPqPZm3i4gh0klrhqiKRcQy55IRiYhKKYBKRDnlyZx0WpEBcQKIsghOTY9HKikAqOi468pOMAN5ns3rzyp7efn9Xz3AcmbHNDjj597YIi7prtuj59oQF729uZGzo/W223Y7BG1dU2Ml0O3v1xjlZDZvUF988lna3scMJ4vzzy+evf7mu6OZD+PuqDrpw2779nX79HR+PNuMO9ex904GTcPIVeXV1mZ5/Oln4lcPd+v95v7TL87f/fZNxe2JXUiGGDMX1KIphH6g3X5vVEHRzurjp2eFbYa6399x5RqkqnF3l3cWLAGplDAOy9WcTWOtXc7nKQXnlimJltHahh2tH7qTZTvs+xDvmnZlwGTAEHV7c28MVHVdz7wjM/bb2eLINlWMe5DgvEtjUjIFwILVHLu4R2YBdlVdMoY0MBQmK6UgOettl/rCmJXJNqTqyBukDMWywaRVtWwWFyHscwoS+9XpsygxDH3lF+1iHrue2Zeo/f4eoOxG+eMffXJ5+Ybt2LbZsjs9qtvj4+1d8Wguns+buayOlrvLsL++N5TfvHl3nJbEvN92Z6fHvzz/fHs3nC+ePnnK691GYnr75urF8+MXFyvn+e03l3y8PDmujEdyYCz3684Zd3o880f27s2lnTuwzs/h9bevTs788acvi3hx/JOfft6F4Zuvvvvxy/NPf/IcEBW5OW936y01zq1ctTpbv3m9fHlxd/MGnxzFGbhPZgVy9zDa2oPy/s26emn8snn2k+fG2BlzFyXFuGQfxqSo3pq79Xa5bMZBhi6fnMxKkc12c3pyZC388k9etC070X//3/7F5vXYPj86Olqt11EykTEhbqZZ0NRut+1iErasqvt+R2AtWUW1bA2bFIcYojiOiETknck5Fym7cSNaKIE3br/rjMUxBcvWsvWMzlabfpskvDh/YdGWMpaUMqGvajYuhTQOXcrJNL5p2yLgfFWshC5a4BCjd54VY0xDyr6qDfJyNp8OJiHG+ayVTkBKLtl7x8TOmxRi2zQ5phAyaS5SYi7nZ0/W9zsgITVN1eQUN7uHxXwOWccxlgKWTc6BSMIwhBBiLiZlRK6syTmAqrNmLHEM2TnLtYeS4tg3s8bVtqTkqiqoQoneGghYLNdV04c09RKtt66yWWleuzCU+7w5Xy5qaxLTrKmMtzpqhe5uvzNs67aCnOtG+zEV0dvLdY65du74aEFYui7suuAMipS6toqYc+iHJJAUy8P1g9H885/94u7h7Xa3Pj46u7x5T0CGjSQxhqUkyTmnvfO+aZo0DKHbZGcNFgImtgpaSpmUhwvnLbKmpEXbZgFQUAMb65kQpPImDYFR4xB9bWpvYt9VeDZvzJPjcyTr61bKzdHRk3kzGxZh7uuXJ1VEG5mG+02VkyOcrfxuf2/ZPNzd5BgaS4AcQoo5gYglO5Tc7TaVRUKTcjmqedF2Y+J5U9Mk8icN3VgkE0NB6IZ+lj0phT43VTtW2SJ/+uXL7W53cXqWQ3n+9OR337/PfZg3NZweX91uwXzwW4SPuhiZUuenulYmztAHUtD0QPxoqaKT9xBNle3jiejgaSoH2yBAQKTHYucxO+YD4+OAGBx64ROuNBU3jwKgx4ySaW872B59tOjQ6eT4KDw7lGYIOhkdKQhAinG933z7+tV23+WsxlaW/d1wbw2fHC+7rg+pJAx3VzcWiMT6at7t09t3l599+mmJeLu+uzh/sl0/bB7WKYSSi/UG2ByfnyxXy91+Q5CMMyHKxbOns8q9ef19GAdjzW67LyoEKLFkzRO+llOmA29+6uhAHEcmHHY7LQUQUwiUCzACALNhJI3lx0+e0zjGVJBNyvHqYb04WtzfbbLi6nh+7Ku795cxB0KoPVEJ1rvacUpirHrWkBJIQZHKMTF3WQuW+dFiU3C93vWF6pxYS7/pVOFouSoxlyzG8fv374egpprPj0+vLy8tYtgFp/787Mlvf/P18y9f3u3fh5wsc1PX67uHxVHzoy9fvnpzE7txzIkcI6LkItMxPcuj78oHgAYezdEx5xyEpEjOpYiUUkrOKqIgB0RHCoJaVQRF56ZQe0Ca8ubxo+fvwW13coFWBADBA1dNVKd6Xh+bzD8gv30guMHjSWmSMygoikopUAT0Yfvw97/9uz5u+rDrd+uqJiC6ud30/S7mWFJxlZNSQhRVKIcKcxr2hxr9MOinUv+DYvPjfcgHA5yiMsUOHERGRAiQRYhgKmvQ8q7vegSdEogUaYqgEp3KHyOeVUcUKSn12Zrq/OTCoC4Wi2H/YMk4JM9wf31n6kUC/e7tzd1D13ehYfr5j55j3BFJ1focYwppGHqmqmlXClog+blPJd1d3vXrsXKLTbdh4ArM6bxeeKcIfRznq6MUxvu7BzalqXHcl6Jx1s6GFAGgNmZR4Zv78Zubu0+OLpzw2A+sWNdNH/bGBufIkNESc0mVb3RQUgaAnAZrIMWeQFVKLolBS7ex5UJVjbd9CGwIWHPIZJmsM9bF0LeV7UJqVBezpSo46+czxtRJLgpVjMHYqiDkMFJOZBUdqyYsAGyNXSblrMUDAgGxh0Oy3oSrKBIdFtTHdZKQcPpGfwD7HNapR2nY43x4RBPhg+nb4VKPkrQJdPoYLwb6Q7D0A2T0gdX5AR76OLwOptPTfYhOLKePYQKH2zg4XQsAEjDjgVdHQMhSikwGMJP8jEgmSZRCkQKgTI9XBUVQRiEtIHliTJQiCGoYkYmIAYAMIkmWiIBSYLKEJBDCzODYUuOaxrOdUsZBWQsWSGkUJkCBnFGAFNUQItnKlbqzzmXHAKoiKkpaVMEAZi0KRSGBFmsQimjJAAg5EyizITaWsfLee++dYaZp2ShFJv8dIkRjVAmAFPIkSgJEJocw8Y08ucpWDRmHXE1xWTTtVKJSjBYAKUCTXAQIVSUhIimhZEAkLSBRFSZvI5wgCEQBmggq+siCnLyKSi4pxiwoAMKFuAAwcqYYVbOoZlVrvSgKg+QipQBgzokJVAxYBoM0kTQBYHJztoiUCZyWMkE8mksaR2cwZQwMIUEpRCCgQAjEBNYDOyylpKgp6CRpR0BklUlDI6RExiDi5KVCBKICePD7ByBVVpUJd2H2CAyIWlLsoqStq2dgHJKb8ignEpgFRdQiucSkwEUwQzokRsHhpUQFEZDg0AwgZGsQEQoLKBExGXo8ET3aWltDcMhQw0cro2l6ErFINsSTFF6AEAWnwAKVRxB2ukU5dCEABIQIdWIVT8Z3CgSYczFMOJngGVGSormUgMBdt4/7PTsPBCJijWe2osLWGVfnGJHsfPVk2D/s17eVc1Q0aAZk72chKymFkAGKq43X2prWFh32fVMZv5ynJuciZD2oVovZfGHnC8q534Y+jqEkOT99dvX224fr23kzZ2esADkA46/v1tjljGDrqkT9+tuvm9Zt+pC4KI9Naz6FJ199c4+1qys7bDKR1t7nrCUrIYw5grEspjKNCjFy5RQQc8GT5xdfPH958fzk+9/81bB/sDmfPXuy294HgVZnIPtZY8nSfowsibPkLKvl+Va289lcpAAjWQoh3N5vK+djF3a0H7cj2+r+/RbSN96ai4uz24eHbtur4v1+I/M8xFQMNvU8D/shpEL96mhxcrradWG/6Yf9vjo7Pjprh3GUnCUHQ76ezfZdF+O+tW4coxglw7vd5qc/f3m3Hrs+vnt/94tf/Pjtq1frvq+dayr3/fUe69nDRv7d37zSUv7kzz85f5auf/3GUXV2ury5fsgFRJUtAZoiIDkrUFbdh9Gy8ZUbY3L2cTiiKggiH7aEafIAiOrk6giKCqRKRDydwg+M5oPHHcqkmgNkNlPXjpAmDiIo6ONsODlbXF/f1stmcTTHkueuqmqIr2+KunFvx3F8cnbkIfUPD8vzJ03ruV1e3d2a0v/i4mR/dQWZ6ra6udsMwMjdnLGUNJs1luXlvMWhHwHGQRfHp3f3t66uQkom4j5GJPOjz35yd/22zMK+34s1FcJ9P87q9ud/8g+G202A4btv/832L+9enDyBqPx9fzRvvvziT8KYQhqQsGnr1GlQ/PWru6pdtW5GAK52dQV1hVjcUOCTT579+ps/PD09/+ard6vZiWtsLM5aj+RmTYXWCBRn0CJCwTHm07m73odkEyxWP312vn57Vdk6dfur19/OoVqdn4fSu4rvN5fO4OmnL+LQkaQcZb48dd4Yl2IBb/w4dFBGpiCCalQUsnBBw4adq4pSQRYmJFexNeKdaZMMMWVGzkjV6cVscfHqbrvZ3i1aqmeNqecJnKuqqjoq7JHZeFfINL4O++38xcmJaX7z+9f/8Bcvzk5vX3z2WWX43/zzv+70fllf3L66nC1nmcLV7eb//t/99f/yZz/64o+f++PU7YSBV2dnZBWNKSn84dvvm9WXZl4Pu3FmWxp5ve/7IVTsxm3OkKSQDnkXc+rLspnvw9WRrEqIN9+8uvjypT1tn312PF7vaSyb224I4eXRs6b2Lz45ds7mQWLoSNBWvLt5ODpbUQ1d352d+OHVg8SYosTd3s+ram73u51/cSJYpS1WR8233/7u4iefxiRHiwZZ0doxiZ9bLaBqT89WJYyL+fzhft3tOkV88vR46MNs3tJDf32XX/1652c/5TB8cfZFfPLtzdU9k+y3u6Y6bAn39xskqusqpYKEBsCA9db1Q6fAMY9EQAgqGEO0jkWyAteu6vvOsjGGu26o20pBjYoWEVH2NuRgyYnKbnujkkJMB8NL0ZJThsIeyTalFJEwhsySK2MZMI4js+y3D6ujo7ZphzHEIeRc6sqSuhQgx3EvHTIuZlWMPTPdr7fL+UyzGnL7sffkfOPDOBLaYdNbhPVm2zSzHIOKNKau/SLJnhijyv1mXznjnBXJbI0lFiFQTTG7yrb1TKRQzM6YVJKiAJmuGy3OKEO331e2GfajZJ23C8opjAOxoJJPev6k9ra6OG7/+m+/PT9+8nB/b2duNmuN5zrledMUzMlm1HS0aCDsc++ttaEbTD3rt+uq8UDGQa5czP0AyaqCcQa0EJJB2A6p75KfO0mDZLCu+ve/+6uT0xPN5nj55Or2bhj6ytoUxPhqCCMBimrKGnoExQLobAU6DGPvbVX72RgHBcgp5rwFNLZuUtEh9NbZnILztnZGQ6idnXmjaPZ9jDEYq8uFv3v/7uRkMS5Phj4g68mzo6xy3w2umgctx6fnmWG73j/9dHFSz371t3/5/t3NvJ0x+xJ6FgI2BZOijnk4Xl08dA9DTkftMsZuP3RpyJ+/eFI5yDEvjk9iH+tV68hvQz+r2+16fVzX79fvX9oLhRJznK/abtxKKi+fLX/z1baUxNycPTvedsNuv189OXkyW51fLu/33fy4fawN9OAlNAEsk5BfJwHFwfNlkl89li7TCWxqjukjbvSo3nlsaB/Y1nLAdJAYf6iz+UCweDzRAYDqYx75gd/xGCUE+Njn0B8mpSNODKaP5kdT9AjqIdNHtKSk2/1wfXO/3/chjLZq6oZCGrKkqmpur++bpkHiMQwx5rppZ/Nmvmj7Ep+ePk3bsOv2J0+ehC72+x2zemdmbYtIktPpi4vlsvnVX/62FNht+ycvL774oy/3m13dNpLSw3o9X7R9N4qKMuWckLhydT8MyDqd/q03o0bLlFISsWQYDNXO55xT0pg0a0bkusZff/eVNe7s+PTm+k4D3N5tZK4pxCHcO4Pz5eris5fff/P7bre3wCq0G4JlQ5aylpBzyhkAfOVDli7mZlbzfLb85Nn97XroAigBad/tMWMRvVnvZmMWTdfX17tuF8ZUzerduCXrXnzy6V//9V+2dX11d422evfm9v5huC5DieaPvngRS7p8GG53m9MnZ3Xb/u6rb0EEEMuUITYRFQ4jZaKl4ST+fWyzooCmUhQBYhZVQBIYAWkKGCJiMlGJmJmmxOGD5wQeKEqPBfihwIZH03Mi+Uj1UJyMRkXwQ/zYhyH5QccDAIRw0LUdTvwxQyrld1//brO/i7kbugcyZdsPDw/XwzACSlV78m4ch6HvYoop50chDggqHLKVP+ICBzRW4QdMq8fPBcpBwaAIj2LPw9tDQSYpklIhAWe5T3migNDjjFCcDK1JVUsu1jATlVyePX1SeY8lrE6Xlq247LiM+zUToKSr25vvL68E2bH98kfnJ8tKhsE6k8OgSKnkGPq6XdX1vB+HpmYkvb592O+2M+/CfkhJ6nqeID05PWEmZqtqfF2jSr8eGEFF5rNF7MdFM0OjTeUx4rypNlf3bzbm67cPP7uYD0OvUmazJaDGoWckNk60SI7oXeV4GAZHPIS8mK/241qySpGcU0qjljEN61wKEo59nK8axJJTsmzYMpnWFCy5zznk0LmqVpJUer84K3uqDJTd/v7h6uLiRwlkHHq2tfUooCUFAmZTiZIaNmhJgsqI1iEhlMJE8EF0ddDj6vSF45SODgDEQPS4oH0Ych9ZZfg4QuFg+g+P1/zY4kUg+EEUPYBCkY86tR9cGB9/HFZp0I/I0sQBUkU6IKEfr3BAIEBVixR8ZDox0wT5EwAyMx9AfJhAIpEyRVEd5vjUpkcgmt4BmcnRZXqsME9+xsJseQr/RmUA0YIIgIUnNxhQQ+AZa4cW0TA5S5qTCmZVQ6i5gGQsCdFM1gTsTAkpapLAUFljDJH1zgGbHOOYBpRsCE3bIDJokZiQWEsCQFIgMoBkDdmauWLiSQcqmIkVGJH48KESG0AgdmwMsEViwxURG2PRtegacp7YEbIgwsGOFkBFU6QCBAI5as4oEUGwAIEQMBUx7BEIFVSVlAG1aAJAFdBSUIAUUEGKFilZSi754HxTJGQVBDBoCIgiQMwlT9WhZtEiEcUgQ8mTalKICCkqWGIpmUDwAP1YQCHIxpCQKqAAomPDdc7RJjAMLpmUgpY85XcTGWWvQCgRsBCBKjIZ1AmZFwbSlFRAcxRQZQfAhIf/CEiAaApLnSYLoJQCIhIFjQWRLBmKsHVoayADxCpUQBSAESxhIRxzTlmT4mQyDdNWg1AmAOeRbcxMlPMUADXlRxUo9FgdP0JFNAF1j6LOafkFEFAVmfSWgARaUB/zMiez98f21/RbD4ccERVAEJEyNeVERSGXhEQpjiDIKs4ZRElhgIl/pJolsDMKJDmjrYoCWrZ1m2KUlOazxdz72/vXgAQkYYxgDrqj2s3iEOezpvI55c7Y2XC/lz43bVVX1RD3SUssWRM4pKr2WeXqzeWw6Zu6OT0/engY3735drPZgDGL+cnb168yUFP5fcoXF0+w5BALsjLZDNE5N6Ld3Pd1W48j7LvNy8+Pv33YbFMw3pQQDBERqKFUAhEUyVKgdj7nGGNoKm/Y5ljGbr//w6/7O1/FsJzNhXW/6ZmbgnR2fHH9h9/MK3N0tJCHu8bWAFmUQ4m2sZg1R9zsxnkD1llyhhtzfvzk7uoOrHE1Q9RuPzyMZbPrDWNbGTBs6zpEHEYpqMvWsavHENjQEErGrMZdPD/tx+12P4zD2DTe+1rTZHEldW1CBCUDMeUgwARk1tthuWqGUHYpvr68/PTTi/T66uEuGstsebfdKpTtIH/9uzeDyBcvTj97cfGrr15p0MpxziwIZVojCUUE2KKiFM0lMbP3jKCiBRUmW+tDp3QSAYMiIKMS0aRxJMIsmQ0eiKaqiETIStMaRh+Yo49nEp5MAeCg3gQA2O0HUzlIpcXm21ffzk6ec1Uunp2Nql9f39mT5R3zjxfLBcF2u1+HvqmWfnk6i12KpY9SzRv2vM/4Znj4x59/Mb67bFcLP7N3u+HhepO60C7mx0dHcRzHOKZUassGkK3l6mgzlK7PqHF33XPl60V1cfrk4sWn3e7h17//d5++eNk471en6GT1ZFUVWs1fzNrz26uvQirOVyJCTMbAetx2GI4XK41REjjjOJYcxqapUZJ1kSTc3t3/6ItPQhhTTsd+FcZYHZ+AoVRKUzekklIKJfSxd2hCnx7uNt+Zut+Gh7eX+/XaGLdmNEerGLXfFHDLPu5iQhBb3Grx6ScqkIYh4+hal1JIGlIW42dYJIxr3ywUrEoREGTWAqZpd0NvfG2sLQOmPCATKaOj5fKZcjtAtTprl/OTyzd/UGvt4oltFzmE+ep06KJB7IcA1Qy4dVX9sIPl+ZPVmR37FNW9v0/Xb9//29/9/vnFZzdv8x//0XmM20Spsav/7f/mP6UydrH3rZUiZFgSPWz6h/1DZasXn58nKI01x6ezkRMB1RZGoqautvvh+8u7Lz+9OHu6CONgrQ99XtJTX7t2sRjWOwkFBYce0nqzfdieffHl13/71bd/+93q5bn3Jo5589CvzpZSim388tmT0PV0Gaq5I6/DK2meXfhZO4z78Sb53NbsypiyBvUF/Pz8s5fgQUb8+jfX508b22Q7a1xtx13cPKzJZO/c5f1+tWqL5n4Y/NzlLENKi4Wrsvnn11//g//4T//XZ7/4P/9f/pvzo71zdsxDDHE5t9Ms8JUTmA40qa4rYh72QUQWy1W/HyWrmGycYfJZZciBCxuCQbskKUp06tiwqlhnU4lJi3NOEHKWyXmlkEVEX1stMOachhDH0TceLaaYNBRXUUHNY2gXTcakqGC4Ws6HOPqqYkeWXUzB+jqCFc6Na4YwWGuSlvlq0XcDEcessUDOqaQYSnLtsqrqnPIwxqZpamnR+Kipdi7HtOvXUvKiqivrcxcq3+yGXhWa+azb9yqS4mitL1LiEJnQWLPveiZCIEFoj5YSB4lDOzuSFNFaIFM3DFKNox9zKCq/+Mknu91uuWyGYdvWtXE+QHlxcgSIoli3s1zEeruPvXdMKCnkVrRqqs22z6mUkOq22W73KYdmNN7Y7WbDzhw3p/frLSAMaSS0KScnbrcLdTsbh1GTTX05O1nut/cAZJwPKRMyE1ljQXVeN/3YhXH0VW0tpTR6y4BGwfimyUUFo0gahp23dVM3jTP73VqtVQAsEoe49JVns1gsFvP5+8vbLgzdvgtDdmxff/u2OXouzgbJZ+cncQzO+xjy4siFIY9DWrZNbfDf/uX/uN7ffvb8Re0akSJjtpaGOKQwsuembm8fHq4fLo8X593ddrvZ1E1tyTbzuu+iJcusJWfFEnJCwyrFGrpbPzRV0+97UMNMoGY+W91c3VRV/vFnF6GPQxTCcnK8uHx30xdYOXdystp2cd8dzN1FQFEOBzZ4ZFYAPBYQE4tHHouJ6RD76D896fQPNU5RxalqUoDJ+QgPQNOjwaNOvezD6zwSTPRw5UPcDwCoFvkgv8CDfmNKrZoEGAfY4YNw6QNDBB/vULKIQN+F95d376/usmLTzvf78f5+ve93J2en/TaUDFXl1+t9iMEZ27TziydP7x+2x6eLsYsMUs99u2jHrhtSOTk774bQ9TFLPn92slo2b79/vd+PIjJvmufPLvpu/+7tq6qyIeWjk6NSZLPeEyEzR0nzpjk5Pn/z7hUKhlyyCJOxZGaz5vb2NkRiYsvOWqeClkrSiABGyTlLaMcuXYU7b9yz5Xy93sYQfFPlbry9vOn78OmPP3v24pPvv/tWpdiKmsqBSIq5apzz1qrZD3E/jiBwfHLy8sdfcFV9/fb9+nbjAH78/NNXb76dL6qxi0Fpux81Zmvx/vq2bqwF9mBiFxThcnNtrBXR3WZEthkwChTC31/ebmNk63/xRz9a3/e3l/fdOALZkkMpWUUOkWSHMlkP5fQPiBUH/EhBFLRMzj4KACqFEB6F9mgYrTHics5gjNGMPAUTfbjMo5hnarNNg3giGcnhPE8AcDDGUvhga/2BiqGHKCs40JImlEsmZhO+uXz33dvvdsNu/fBQN267u7+5fhtDz9631ayp691+J6LW2e3YT2U1AiRJU73/QS80vQ4cyviPt4AfOXyTm9PkSPv4VD2ATio6JU+xYUe0DamQMEzkA0IFBiQkUgUpY7c/Ws11HBHh808uxs396qhZLef3N/fE6CTDKMh22z+8vXlfvIk9rGr32cUZiwpgDgkJU4r9sCVjq7oNIUJRb9z7N2/7Pnhbtx7f9XvveC+Dn9v5wmUREMy5GGuHbh9LHLveCVWn7XdvLz9pZo11I5Ta0E5gOW/HkP+w6Z+f1DNvs+Bmv7EGi4Qwkq/EWK+mDrEgoWHa77cEMKbkbN3DYIhSjF3fzxfL7eaW7ZKYx6Fr5s4QdtvdytVACJYxomZxxkopmkbmltCgMoIBlbqttQ/DuOX2GMpUpyYRMga9q8YwIjuiRktRzUoMxsiBBiaMfFgH9eBcAnAIXz8gRKSKjxJEPBDKDnPg4yooP1gV4VEj9gMg59HwBD7Qk/ARINJDCMHH0fwf/A0fMc8DdD+9yvTnP/RE0kdaneojlwIBnTWMJKApF0SQLFMW18FqGWgqWUSADSOqEhpCQnTWMjOIFBU9RAtON0sqyMis4IydMqum6lyLMKCx3LStq2pjK+u9994yUylQimYpoCkPKoFALTkEtdZw0YzFGWJLzhpjHBvPBEqCgrZypq2c98b5IhiHLuw3OuwhIEkmAGJDTL5y1lfEVhBVS0nJ6NQ0ZERhAgVFVjaODZExaJ2xFZuKTcXM5GdqGzJuQrFRixR+tD6eVrYCFBHTAQREZcNkABnRWkUAyROvS1EmRsqUL4+CkrOUIrnklGOWLJoFiuIBjykpAeY0WiuGGFRTTjRRoZSCqKIaZNQ87asTZYbB5qyMgkCMcmADFwEpePhaAYwFUMiJgQEsKDJbJs45QooEOsFqRVJJg+RMyFPuGSKAFgBJWQAIckFUZtKSFIGZAAQIBFEmIrMqSAHMioRoVAsAljLGEdkQGwNMppqpYeMaQUbjBIhgipaTUnIYYwJFZlCYSBUiIiAKoHIYecyEkYjNI1SEhgybR4xo+sVMk+DzsCpPkuDHbQIBDk5fKoBcSiZEhUf1zmESCSjqpDUTAZWJ74wwOfRJKTIpSmnKryIuEhAyIZIxKQxh3CMSMamS8bXzdZKCjo2vShiRmKwvIYFAM3f7+9gNPQFEGef1saQyhiEiFXQl22G9rwBt6+raX16/N2Qc0+nxYr8fLODuenM97BtnK+P3Xd6/exh2+zDEkgq76ts33zeW+k20UKHw9f2OCYxx0sUIo3Um9ONmH9nY1CtFiGENvsyJuiHGmJA5i5acJxjC+yrnAkqAZBDBMiDWdR0xVFTSWBAVSHbr0RLCEMS6um22l6+a1qLYu00CdAW0RNjuHmprrXfD0Fd1c7w6KSlJkf1DV3LZD1v0lGPZd3titrVxxpSUS8S+H2ezuqqwG/Ztuwxp3OxvmVCltM18HIuMqQsboNLMzKfPLm6u7jXrth/quiqlSM7EwIb6ftQMvvJJZAwhxGJN5TyDNu9vN5++fPHZH325+ctft219lGi7voohIfF6D3/3q/dY4MXLs+OHVf/2pqpMyHmIpYigFmRWVqXC1pSsQEzOIbBBkpJQCxJaIgCd3NMFgImmoYXA067ASOXQ91VREVFmyFq0iCGezBdpirpEZqZpsZn4yB9srVMUv7S4H2o4mzty2mw2rxQ7N2+Pj2fO8jAOrT1ODCVR5ZZjv//yx/+L9evfA+TFCd3eXoUcxqIXR09v7m44jl5dEgrJnhw91fnondnFrm6qEO3t+ur05HRW1e3Zha8u/IBg56dH89u7brFcOZRmdra/u9893P30j35GhS/fXRYif7S6j+FnZ59/+vLPYp81EykY5/qHrVrM3cgxrLfbZTOf22p3fXd8fPb26rt55b2vN9ebi5Nn5/PTcfzdqll+vx6HACUlARHNhuu6XRWNTTtP9zdVOy+m7Labyh1Jmt29fyj9zvjEbIjodi8vn/xjufpW9vtmboC7q+3GccrZzOcvMma7AB9DGN4B7TQV62cIVuLouclFu3FvSYFpyAnQAVmslxGUiExLItH5RcEdWp2v5g+bUEGul7PvX33/5Y8+1QJd13tytpnn4lOAxs6Wx2cha7tamFm8e3/929/85Z+ZL7dBf/zF58vj5r/77//FP/2v/uNv/t3bX3z+8pd/urjcXH/yyfP1t1KkNCfVfbcl4DAE73W3jZL1i5+/uHr9fjU/ebjru4CnJ+1m3RmSs09ORSJoXh7Vv5y/hFJu395Zy2bpEZCZ15dbJ4hd9CtjlL75+ruf/+SzXSzG5c//5DMVeHO1/fE/vIhdtpVDQ3koMSXXGI91fL91kMfvtg3Ptr/7ZvZnG/98hUKyKfvfXR99/tSQrz9bXf72m6qux0zVyZPTZ1BVZjuMGMfVEY3DqCgEtqraKONsaTXzOAQZSonl4XLTrurFjP6jf/ByHC7/+b/6mwC6X+vDdtvMjW/90MdpFhBxScU4bpZH280GEXOIlglzgjw640xdhxCHsWNvgL0BZwlEi3VcSrG2GoYeJ4azodpVREZyIVDvfZ/2+369auY5FQFomrrrOmOcNTNCQU7FMBF6R8570dF5s14PjZsXVS3Yd/HoeAmlkLMKMQzb06OTklPOVLLc322bWZNTYmTLPEJhaxft0cPdutsH1VJbZ8hY5sa7WLIK+KZBDEMIJWj2UlXWGpA0lBBVNBoiwlJwPlsab2/XG1IwxADQNjUA2tru93syJEJxFOtsYWJnq7YhzSDijA1RA4ha8tZjkfVmO1vOXz1cKZXT5VIxsfWTKwCjqeuWEUxlUhLDxXpTLY9SjBxKisFaL8zrXe52u8ZYVujHKFpU1dcmAy+P6yBJpBjnNYzGGzLsjFkt2+udL30Cg2OOVIxomZRPqhAlUWZrmzEMiE6llBIhYV3X9+td09ZxTKIoachGmdg6z4Zj6Ouqbo+PmhqHvrcRVst6zq2vnl++v/aN995dvv76x7/8ZeXsdtvN2nYcCyAIIFk6Ws7Svvs3f/E/EeVnz37y8sXF6+9+H3NfWWsYQwiEQkJS4vZ2XYN9fv7i21dfbbveVn7RNtZwScTWOWuSiDGEJSmqdcZbO4Y8O1493K+X7eLd5fXKts7ays9mrev7uycXJ2UoMA6Lptq1dc7grbt49uTdu7uT5eoDIIQHB43JKfpDB/uQXDYdeA7uQnDoT4scJDzTkw4Ue1BR0YO946O058AbmlRCNJUpcoiuenzwAUJ6tPCYnveIEj1aayAcJGyT2APgENKG8IFeNP1CFFABiSlvNg83NzfXN3fb/U5RN5tt1++IbAy5H/anZ0expLHEEELT+tmiWe+31rJV3u5unr94Xq/mSBLz2CyaGNOEbxDaWdN4LXfXV+iYEp+eHoUYx3HQkve7iEyztllv+ylbFxCByVdtTiNosdbFJO28DTG0VZXGsfJV5et+jKAaQkQF9tYDoHH9mNoUnpyd9Nh341hAqqa2Q88kMWbDCALr9Xb86psXz579+Kc/e/3tN3EYa0X0ZrZc5Ji7IQMIVfNPv3z2ycWz2cw/9Nu//KtvwjDkLHXT5Jy4MW07e3i4Eed3Yx+jpjD2Yw/4/yPrT3ply7IzQWw1e+/TWne717s/d48IjzZZZLJIZhazEqjKIiQIVUBpUBqoppoI0O/QSDPVUENBQGkgJFADpSRAophZzCywS0YEGeH9c3/N7a077W7W0uCY3edMXfgD/No1O9fsnt2tb32NTeIlmt39JsudIVTEqiwE027fDaJjCsxm8PHd3ZbzLHzx9afPn/zmt1+OYfTJTwSF471UQpqK4aOl83GATKnc+L3CWTUJJNGY1IfoYhLRGGNKEoOPwThjVBISowrIgYFNSEfkBQFoGlRTtQMKBAhTsT4d5BGnE/tDSa1H6xaY8kEm7JIO5/qksN02X3791f366vbqSjVutt3lzaWIXjx6bjMX/PD27TvBFGMcQ48ECBglAujU+n7vbPpQ5B/+OA+yIdWHR+Ew5GkiqOiBLSQICKSSEgABhBQWecljHw+vx4nKQnTIHEwAlinLTNd2s8X8yelCUsqrYhjalEZ0Ze5s33Sdb6/vN6FtSgsR9Wc/+cGydBL2KCGzbgyd77qhGS6efWisCe1uvjy/vnyzuduU9UmyEFJrFfKqeru+/J0f/yTpCIwiRKYSU3ftzTB2i7JcX90tHxXlslrvxycvztb3rz94efbd37/KLSQ1r+73H57kp/Mz1jT6oShmQ/Aao00AVpIKc+73ewKyTAH6ENE6w8wsCZFSiH3XY1aAjhADKUnU6COzGbttMZ9lVbnd73J0IGHfttVyYS2F4GUc87xsmru6zgush+F6npcaAnCpEMmUIHEYe7SlUg5J0beqHoiZ7OQ99N5OSAVUSPW9ygtpEm4pkB5JR/CeIQbTKvaA1RzZZd9jBb2HffABDj8OmIefHliheISHjsa6Dw88XG4CKnH69z7b4PBGBBXTFBwPoHog/uTO5ZkzRD4Jc4whKoHENMnijHFwuKgqqypMiVHOMZM5+GSDxJSYaPrLoJKIGAYAZCLHRlWMcc4SiteUnGG2VBZ5XuRFXRWODAOJp+g1ifigKqBJNQHJtCgbIjbgyjwvsjIrsqxk65BJY5QkRDSbr8x8bss5EYVhGJsNJz/6IeiIRARkmW3uTOGsy4mZQTSJqpIqYDpsLaDWEhpky2zMJFYwtmKbsSvYOXRVsiUCEyhqPOC+CpAiaiIJEkcIHcpAMJn8JQYiBjQIdAgVBUlH3zQEURCBlEREJYr3Mml5YpTgJYQp23raCiXGlBRUlUlSiilNtuMpRTCZAkRVRjDWToIrJEQiEoQEzMhCCIoaIQqpiETQiNaipsksTaZ8VLaEbIAAiYhVZQKSIIwgCZEmEyjEqBoRSUQgGURkYoGgKQKrIGACxIlORKQJIakIyqH9kw4zCCfxXRoVjUVrfAzWuTh2bHO1hVIGyClBCkl8gphEEtrDuqkConJYF6fCWQVlKpEjMSOQKESUlOI0C8zDxjTZYD/MPjnuZwAomiZ8biKKTnfrsLfIkRaoOBkSKZJMpknAIkFkEidHJEoSQIlBAZOoEIGmiCSqOva9iDpXBhmMYUJKGsmwqyqEg91SVVZGjYCmlLzvBSKpyU1d5atB+iIvZ/UqkPfcn9ZVSXR/tx7GscgsJqIYQ+eHfRvRjWOCxAKmrEu/30iUIs+MJcPFQPh4fh67tlpQDEHG4O+7k1XtnB2iZzZPTs9/+/UrQ1mRFxiCaHj6dLHrZZ6bLeBoTBDN82wEjSEaa2h69ykOElCSMSQptU1jCLyneVUbQFBPAMZgD4kArJq23SqPhtzQ4ryi3e16UeTnq8Wua8oqs47CCF0XJEbnGCX5Nraxe/HseSdNkrFrR5ukLrK+94BAhd10fbwfDRNoAxBDUCaLBPfrXcZZnVeWZ+O6p9Ea4kdn83FM227M8rxv29H73JQg3uWZcRBjsmzD6NvN+OKTJ5uu16iD6K8+f/Uv/vkfwKc/+OLrV6WjxbwK+27fDqIQo3z26m2QNK+zk9Xs7eVaUkLUlCIZGKNnJoky6cJAUKNOSWfGZIwikqZR9GB6KKKT/DlJBEA67EqKQJY4HcicqEBMCEAiB+N4PVg6KB7cIt5vPgAgURezOcCMePZoZd5tvsxKGwLzYJahjN22NHl7tyfNT85WQ9jvhv6z3/6WUr+aubbd7zZr58qMqtR0683lqjjJFqWPoYv9jx599O7yWzHU916TBdWnZ4+r2eLu8jYrRr/+arftFudn9dmqurzxgcaA49AbFz1qg3bcb/Z+u5jNS8y73fji5y856e363icBAh9SBLaZrari4nxxfbvO7aK4WM3mddCwOjmvC+dTr0RhbL67uX/69BFxrOsMyJrcFlk9jL4yJQkZylKCwhVB2phSVc7bZp+XhaB0sWX1ZWmH0b9+dfWL/3SB9ccAe0t8+e1Xb/v46LS4uPigCeTOaH+3zaQ1rmr723o+b7Z7UEWynXaD3w+xFxzZ5nleK1YJjJkvUJKoDiFV9elscT7Gb42NwPliebJcvdi3w9PHHzbtFoVE3BiMcRlTXc3rsd8Nw201P9mur7PKGeP/4B//oN11T19+8Ju//O3dV/R7L56Vsvtv/9t/+m//L3+xvnlalvb26/t/9S9/m9nZH/2zT+3MoIOqMFmZhx4cypsvb4zQ3e72yYsVlCQqq8ezwtqmGbpdF/xYVuXEwT95fDp2frcem344Oa3LepYnurl8W85nbp5/+pOXcd/1d3eIbGxOpV1au3s39l27ejJDk5R1c3t/8uiEknbXt3Wk+9d3L/7on919+3dps8+erfajR2Py04VXbXab4nx58ewjtamBuyyT8lHhfV8XmQpFCWrM6encKOx37XazdRZj0LrOyzobfSyXs3Ec1pv+xcuaitk3f/i4K07+yfPf+T/8d7/KcodkxnA4GBGgIQ4+tu0ICsZiUZfOmJjGEEZ2hMhVUXpOtii872OI226zKJegulqsdt1orQVNIYqKWlbD5MdRUhKJbNmZLCqMPhwLIc1yG9Lgmx4Jgve1qVU1+gCSOMPTk5PBp1ld3A33Fm3bdmlIeemqxSzPy6Hv231r2Bpn1OLQ9YgkMe73O+sMgcbgi8IkUWTHZFRj2zZJRQkNm7ZpnTFFnnf90PVeFUEZmLLChRiTpHpWNbtm9P2+2+679mR+RobCOLo8A1QVMWR8185mcxAcxzaRGpfJ6MUnUrNYLNANY2aEFEjmq5nf4d1277vmk+fPVb1xhg2nqD55Ap6ahMIqmjQkG9O8zO/HvqgLw1nfNfeX96WzDMlmhS3yCGmMwTrHLmNyrhcd+/m8urxrYgjIFo1LAuvtsCjrYbcrMxuCRwBjTIwy+EFFlHQc+hTR5plEyQyHkCT2WW5X89XQN4wAGrtmt1g6MCR+gJiW9WoY/Nj4nPBssZSkxjIQVrO5K/I49MnD9XfvzpYX2VNjajv4EHw0xqCXlER27fXbb5zll598wrx8/ebNdtuWlfFjShglRYnJWRNjSGk8rU6263ffXX75+PTxoi5RYGw760riSYaMIpLn2e22McYUue27MURFm+WLUu8wKSEDG8hsbeIWEzHryXK+247L+Wy96S/OFvN59ei0bvZ7eH8kgqOnrx6EEHjAg6YdY2oFPxyH4NBlS0yHXvpxrzk6ED3owR5OWZNP8NRtP/Tdjv1zfZBK6INNzIGO9D206FDhH/fGB6BBjtjWdEk5MmpTjPvd7tWrb1+/edOPbdO21jITGmK0HPz4+OkpgG7v196nMnOny7moouGzi/O+2S8XF7NqZcn0/b7dbOZViRW++fqb1XI+jD56+fqbN3fbpiyrs7MVAtzf3DSbHRIsFssUw3bbAKjLTIqiKmVZ5kWGEp3LmMiYSdxEwxAfP1pdXl0nVULNcocIXdeXRQ4KIpIgbfb77utvnz99PM/m+83u6urGGGNNXpXm3dt3xJRZI33/9utXZ6fzT3/wI2txaNo2JrF2ZoqTk5PVsjJloXHcXF5+8/Xl6H2WAjsakcZhHDUUxWzX7ASEGW/v7lgDeqmdG9p2Ma9QZNftf3D6wX0zJoVoaYz69r4hZ5PAOA55ljPj2HdX74IOYRyk6weN6UBZwOnATw/3/HC2ng7TeKiaCYGIEJQQmZARaLLmPdBpplP6QWUWU7KiooIKhAYODXp5X0MfiugHZ6sHGtxEGjpQON4zelRVACYziwPzaWoOCypEBR/18ub229ffrNc3psCx9VeXl1VZzU5nJ4uTL7/+YtfeTXc8eI+CjBhC0CM2RhNydcQ99UjgOEID3//uPetq4jTBAWLQybQLEZOqKrA1SVI3DIV1YZwCkuAwRUlU6ZCKC3YcJSZ8/Oh5144E/nz1SXt934f2dD4ffeqH8fruvmkGQyi77lFZfnA+l3E/jmNVua7t/Nj2zQ45y0wWfFfmxnebd6+/Xc5OnTFjiG2/J4N+6M6ce1Jl/f3aZCUC2iyPPt1db0CgzOiLu+9+/rOLy8uQhmAUbeIqs/Pc6mZj0c2W89eb/sUiPJnb6EcNuYUspOTHkS0qQErRuHr0XsmJ9gyUQgAQUW/ZiWjT701VpWEXYgIyQ58wpaKsdu1tXlVoqZzN436PpKDQbjZ5lmemEgkEOMtrSYHASTBxGDLrANGPTZ4XhiGCmrxSRVAP6gEZuZCJ+gEw3ZRJ2whHD7VDgB4ZmWpy1GPdeXTmf1Ccva9Gj//wOE7+4XB+QBaPcDzAA2vzSKecHnzA+49cNfgHX/iAGen7VfqAXh4QEVRgQ5YoY17mblaVqrAfxkGSIBJAZq2qKIOxZvIFC6O3qEWZzXOX5Rkh+ST7MXpJKSkRqEREYgQCMMTWmMxaa5hQjTFEZJiMdZiEGarCzerSujxzXFi0mjBFkKDRQ/SIqJqY0BkyCM6azLIx1pgst2XGxiKmMIKQApjMZpnLixnNVuQyECFNsRFLiNaYzKIoIbnMkWW2zKyoEVKClBAEUBGBjAFCYkPs2DhjS2JDtjCuZFuQseQcZgXYEtmiKGoASapxWlNEYhp78C34BmOP6gmQ2aAxaAxbp0wTAe2wjqmARBXUlKYMRQVNMYiklIJIAI0iPqWQksYooqxHNaYmSkiiKgIiQozGJohRARwyMCESGiQgFRSJKhPXEmTSySkgyIR76pT0mywga4jiPU7KXUBEYjYAmjQhCAohG55CSAmBjUqEJCpKyNPMUElAD6ufTMpI0KSSEARACadcvOkTT5LkSbnFAqAwkmUgJkM2K9F0plgE9GQyBVYBEEFQY4wyC6BBBkBUAjr8GeHQiTjyRwGSKNJEizswTM3DNJlklofDUDpK1I47zcQ1hSPvUxUmo2oEVU1TKoJMh5YJYhMRwaSKgEmSImgSUSHAAIFADRGRYcx830nwGgQSjhrROptXiBiTZi6zRKlrJIqdLWcnq7uvPlefZIQ4RMbJPEvbcM+5Oz3/MIV4u7v/4Mcva5t9+Te/GobY7bp6MUsAIOn2fueQ990+AAG4btDu+r60vO82Z+cnmSmbdbjb7EI/S3GMUXzo8iz75NnjnR/vNg0M3uTZl5dv1bCxLgTfbjazeVVQfd/fafIvPzz99bv75KUfParazIkkVYgpGWdiFGBSIsMsUYoir0uLISyXp3c3b0wBhs1i5qq6GNsuRH+2mr/44EfNtr/67suqrIyzHtDl1Xq3Kzh3WQ7Iu906z7OnZ4/aNuCW7+4aAW8A8jxnp0kTOyRGyrNd0xqXG4LBR2PkZLnablpNaIiT1+16b5xph+CqarsZEbks88g8Dm05L/LkRABB98NQ2Py2HdmSCHgfDMDpfPblF9/NF4ub2+3rd7cf/fDlfbOmzfj48WI/hB7GcfBszNu3uxjwhy8fn62qXds1g2cla2yCNHEPATAmJRRm0OSRBKyazCE7QG/YSEyTazuAMpGCTDnERCSoBIoEIqI0HbCI2ISDTdqhnxBVVRIRHVSTIMwMCA+n6nlZtduQWmW3+Ue/+MG3f/aXhSkfLU4323By8mxo6sw6Jry6upk/nl/vbhQxK+qPXr589erXI9Js9bQQO1us7q5e/ehn//G/+6tf5vnZbDH/xYsfr9ffog7bVkavcT+WkA1D18X48vGnktBL38ewu1/vw9aWFQzjftjNH2Xnj+svPr8sDS+Xp3Nb56u6tsuTwj07f3l3tem7HZJGBM4s+aiqy9WsHfde+G8+/+bJxbPMxCzPYTTsOFMTosvq/NtX3z5+cf7iJ8sv/+w6r+rZ2coPozVWQoQ4cuFcVovouGtU0IeR84QWIfqizMna3WaHMYR939+8s0/qq2Yj+3Tx7Hzf1hdPq9/8u78+WZ3/8OUvbvb3dzd3syymaMi6rCg29zfqw+D3Li8JnbNKGigrh4GtmanOkh9ctchyZ/NZ56Ve/LBeZb332eLcE4UoXNj5yZPdTeuWFyeny836bgh9VnPbNdVyUc0W6+3d/npLmclr+6tffvvsR88fPV1u7vePf/zJ5vLrX//pb1///euf/vHPxuFubMJ/9p/+fFHPsABeQe9j34em22VceA9lXho1V+t9iNGJZeIQ4pCImOfLcvT2ZF5u1vuE7vpmzIyWy3x+URKrBI2jxIwDWdnvZEjadOef/hjBfvvXX2bLKplUzOv1m92stDQjm9v5comR23Z/8uGZzQ10/v6v/szeXe3+7Z8u/M+yRy/47LxnKU6rOcN4N3T7dnEx78foMn3z5gqcLp/O1utdlVd1vfB9vNo0Ze7OFitn7NC2pHFkNABVzu1GQIBZb397ff31uqJP9lefG0wsaMg0/TjNAlfkWV6u7+6qMlOFhGgYm7apyqKer0IcUxCbsY8NDqgSmbTKF6AmyLDt9kR2Web7rsttrs6020YiqAKziTEaYwF06PqqqFL0EiEMkQtCBGCQBIbr3MyHuI+DV4GMMaSkoE3TXpycdF1vrQkJQ5Cr6xvHRkTYZhAFQNq+rYoKCVMUJlJRYmzbbvB9vVzs22FWWnSu27RVXiaG3X57Wi9SHBNiUeaGcehHa+1kX4YgCCZjOxCgM4hacmmYksRyVpdZvtmsQwyWTDlbCSQkE4MkCcY6tBzBSG5VoAQ7h1DZ4nrfictu9juUoqLZR0+f+OSROCMbLScFJnQmSyE4YzHFrunqck5FUdblsG2TCgqJQp+8B3M+WwmMgFiWM8oLZtWEdZFbJDHF3X13cXJyc7fnpESWyZyt5nd3N4xIxhEQCExHGzZGNQJhSAP4YNl4paRYlAtJowAW5eJ+d1cUJQEECQQQRm8M9t1omIeghaf7fX8yX+1329Xpahx7InN6fkZoLx69IOI8s3WZDT7Oq3kII4iObR9DV7jVD37wzNr95ZtXaWizojCEIn0/DOQDMXd937QNKZPY715/k4G15Ia2dc7l2WLbto9OT6y14xgmUYSkmOW5cEyiu3335NFKMdRVNoawXM5uts0u+Cfnz3SMbd8Pbapd4UvcbPt+8M65oq4sufdV6ntKxQGvgUOLWlEnfuv7opqOOWlHf+ujvwscuBoTaCQiRPzQ9piedDirPcj6Dy+SI/JzkCkdGU6TzwEe6nh8iBif3jF9T2XxcIXpWxXVMcRNs1vvttv9btvsttuWGQEgL83i/GzsBgBq22EYk2GTF0XX98W8XJ1Ury+/m5cVRLaC+/urIY3RczWrru/eVfO6b/elEnO6ur13zi3q+enq5PrqrYIgsTEmpTSGxIQhydB5tkaSWIbcuc26PTtZvHl7jYiZtf0YgTAryywvYoigyTLZrNjsGiMIgnEcJQaNdjf2N3f754/PF8tqv+8iaEgxJLM8mTe7TjTmeZ68tLvmOvoyd8+fP/3Bk4uiyl2UMDTXm6vtm40JyQIurQNnd7G533WbTmar2X4YZ/O5GOOv2kVN6seUBGIiVyBYBvIpxSTb/VaSCUpX24acy4q8j9E4w0xtPxbWxBBGCVeaZkVOBkgpxKiqk1MYH+P08Fgdf+9/EAAIkAGJppxotEyG2TBba6yzZJgMIzMSKU5eIfTeYV2RDow0mYhBdLASPhTK+KAsO2BC8r2hC/AAQR3ljQoPiBGkGEWoabqvvvzs9vrbEJrk5fbupqqrZ89f5tZ9+fXf+9DnRaWKyY/OGMvUDL0qsDEoB5bGBJQdbGImi+v3CcoPcxG/N4ofGE96iNZC0MmaSQQIQxSDuBuHPM8NYDrMwIOUBxE1RWQwxoYYfdSLpx+0XTvPkoZ21w1gchTbd+N2t/ejB9LoE6t89Oy8cLBu+sxZ3/Wh7yFKHNPs9IkCuiwf2tu3by+tm1eLCw77dhOIbDJu6MbF8txRtg9QLsooQhBjGNumlSSDxGK52A8tAKrN2iEsl4vLy92z56eXbbi+7eaZ67vh63e3Ly4+Adm13X5WrXzoBQCS5SzzSUpbeK/iPSGOQ1fkDsgIjCqCmjSm6HuD1iS679uynItGkMhkJHaZNeSw5yy3Voa+2ezyvDj54LFXHGLrkCGGNHprXZKRTTn4vjAFCaQQMXOSlCCgRpoyHY09uD+nSASoiDo5Gx7ok0AITIoMiCjyfqgdkZ5j9/b7C9fxH37/keMQOC6SR9olwHFMHNZMPI6fIxb1MPwfqGyHNwAA34Msv0ecmCifh+sbYmd5WWWrMs+sHbwHFZ+SANjMTSpIIkwxMSAbPClmT07qZ48vPnh85vK868Yvv3v75bubdTNMK7qKIgIQqqjJma3hLDPOZJlxxiIooSBAXjhnqSxsXmaZy6rMlZg4hqSj6pg0EiZGUhAwNiEaa03myKDLnHE5JBX1Po6ITNZwlruiKObzrFqoyRFU4khhyDEia3BswDIREjtrp78uSoQkUzUPKGSMYUJLxJasY0PMBtmyK9lV7HK2GVmDWQEuB3YMBDJAChr6SXkmMWgaKY7qW4wjqlhiADTWKRtgJ4DHrfJIIZSEkFBBU8SDhAQkHbAIPCbcEVKUONF5QZVUUSe5EwNATBEUo4KoCkVmo5AUDJJREGtQNEyXAZi8OAkBDkaEKhA9goAgpoFUOI2cksSkYKadPB3zJVGUCMFazJzqFEyaNCZRQUqik/pQUAV0MlYRVU06tYRQQRAiIgLEg1gdWUAkTjZPlCCpSgKWkJICEI8RjHOcCLMKQwKySVUFDJuEQGysdYZZBQQmt6YAh5bABLuhHjeECcx5qI7Nw2SYHpjSOqZex1EXfcDwJ1aqqh6QLZ0SPQ6q5UPn60G/PGkQhQhJAVMSJktH1MyyBZWQBkkjghjmgKQoNi/QELABJcY0EULYWNGYlbOmb5XFOu63oW9ClJRlZAh86Gd13jZ3SUO9mNV59fnf/Cq2IbMZLxwac3+3riDNFqe79i5ZUIBN1yqIneetRsyLd+vGoMkSV1k+dAMhIroPXj7uu927yyvlDJHr1WKza5gMQBLfliY3ixo4v77dDPvAzvZDOJkVCc39rhs6BcGUDv6EY4hMhMDdKPO5mc3qsWtO3EwhXK9fF85K1Pum+/FHZ/vrKwvud3/+4y+++fu3X/x67EPoO4fGZbP7TUNsq+psWSx/+91X56v5s+cXw9jfbW7m+Xw5r4Sr/diO3b1BMeSaYcQoIkl9EAGlWFflMAySzPXNBpkKZxCxG4eJZWhLd7e9n68WyJn3UJWLFNVmmR87lJBlqGgy4D2REpKhoHp3dfvBR09vvruGqFlW/Pbrt2dPnnz60x//1V/9Kk/05HyhSdZNj0Q+6ZurJsHVJy+fFFnuuBsHD0Qq6mOybBhBkyiSgCIlx5yiV1RQQwTMhCRRDpIJpUOnFPFgNqTME5GbiCbxq6YEqoBMxgJoEk0y+b0zISSJx3lwFOEDDL3XgC6rg/Sv3nxW5nmV5ynivFxmaMjNTM6DDDrTBjyYep7l99v+/v4eUvbt1fbTn/78HBJR7Gv32be/LirMirwuipvLV84OZPH+bnP65LGRbE75TXv9+NFPlPx3b79+/Hy1KLP1bbNanO/ufd+OSBxkvLnf545v3347y5eDRwmYo/v05Sf395vt5p4MSdSECM6AcyLR2jyMIZ/lmbq/+urNH/7wcYjBmsy4mcvr++13JsYPXn56fXvTj2oQDAqRGESJMpIv7CwJxgh1fTa0+4TRcM9lGQA0ABAQMQO6MquXbgzbwpTZrJw/nTkNX/zb3xaLFz/42Q9329uvvvlza+z8ce1bD7rY+/1+f5fVWYbLPJ2PoSVJRAHBDipmPjflKZhTclG4YOsEHRHkuYw+jMEZslgY6QgZut0Yg5w/XfTdpnQkUZfn1ehbzpwaIYaTJ0+VFSy5pd01++X5895+t3wx/9M//ft//OyPn774x/NZ8fo3fn6azZ/Ofvs3N2YWniyXymqdKV3Rt0msKedZt2kuns/zufa974dUkP3szc1PP72ABEPXbbkrlpmoa9fjyTKzTsd+YDBK6mb86KcXvh0BsqwyynbYD3kOs1WOS5vN6lHTx794OcqeLSlT28U848Wj0+bq3bDv72X/6OUnTXN18Yf/yUBAPRRoOqCkMYKU59W28XRuz4onY0qzs5mrXDbDoKIR3l3dni5LUazqMvowdH2eGwAkZ30fd614D82uzcvsyQfnf/CH7n//f/rbD38Hnj95HLpWYiqz4ywY/DAEY4xh6oIvinJW1RGiAkaNY0wQxWRlnpWk0I0+z3NnjIiURdUPo0pqh0GSiIlj0zOqdcYHtdYYsWR48IFdFjEOcchsXpZZSkGZzs8f3W924xiu92/LrJjVpYgoQhQxzmZI4+DDGKoqJ6QYg4plgK4PLqPoBxQUpZgAUjTGIFISIYsm49rVqljlRfCDqjIZMgQos6oGgno+2++bPDPEFEMEg2OIo/cut/3gN7t9TNB3w2JRl8k0+6bIbbvbb9OGCYu8KPLqbndvEIAgotq8wjGEeFOXhfb50A2SNEeTvJ9V5e5261N0efHi8UuPgmycyySqQEJCYyw7p8gxBpsZtbUCKKRiVsxUY98H1EdPnzR9s9vIXReqChlNOc/J5RSjyU05K64vbwc/KBEZKjLnx0AWjMvq2TzLimHsLKPGKBJi8NaVMY4TSSGJggCYjI1BNCklSZNilxbVUlL00fthPFktjc01xjwnl9vlapa6FqOOoa8XNeemXM5TTErkCl4tTro29H7wG3SuGkaZOkmzZTkMlDwh0d27fRzGvKrHIUkKQxhS9BYw46wZdsh0slyCpohxtVqdLmZvr949fzqLMTnrGCn4gIiSABQzkzFT5rLZfPbu8v7pxSp24WQ5f3t93+z2jq0fJRWMmE5P5773xCk3WBWWQVMMs5P557/97UPZcOTq6LHwnnQ3eHBQfWA7TBqYg9PGFOdEDx30A/9a9FAMI8Ex2kz04BUwiSTo4Gf0Hl2CQ63+vfBynMAqmZpz33sUpiS1Ken80HEXQeTpUKYAKaUYw/r+/u5+vWuapu32u7btejJMgEtXn83Ovt1+2/QtZ06ZI1ASmFXlfDbf3O+MIpP98MOX3373uVpxRVXVc0tpe39HAAgEye/3g4/+xfMPltV8327W++1sXtd1icxVWfj13dj7JFJVuYgOPjlmjSJRNrtGBYylqfRLKttdS0CzerbbbfsxmLw21o1jr6poSBOhMdaY9f1G/HhxNp/Nlsp4e3NnklirTIZRASRzPLPsgldN119/c/f2lYWYMzuLJrNP6xqdLaqZMVbT2NeuuuZwswOIfdsb61IIMaXY9YyS8ZTGExFxu28yYyVK3w5jslf3rcldL2FeFNakuOucIXPgo5ESdH3IDE+jZcqNUVUmQoWHRLIpNO9AJwKc3E0NIRMy45TibA1nmXWW8yzLs6zIsyxz1lprrWGDExRyGF06DTg4RM3QQSYgaaLKHXkaD03e97LHB/rFBOXoEQE9kucwpQSKKcm769fvbr8FlxzY7aZZLlbL5enp8vybLz5DgHpWI1pA2G3umVzXNZrS5K+kqkwsmkgVFOWhnv8eNnCs6aef/QPQcxr2jKgHsadIOnzmBOCMBY0pSWWMBxnTZPfEqoogCJpZQ6TbtnlyeoEp7Pfrjz993DfrftDHLz7ou2GM+6bZk3E5uXUcnz87Pz8p+26vKSCbrmuYTNsP7Kq8KA1J26w3+xtT5OJmLud230kKyCRos5xOl8uuaceYyFgl5RyL0iCqZbu93QmZ222DbN9t7p4/Paty2/UdL2fzOlevQvHiYrFrhrfr7dM5sXEpibMZYArjwExCblBlwyqRmfzQmx5QjaFcY9SoxvLY9cLWlMWuuzpZrgyXQ98Za9q2O8lmLnO+oMH32WLW3fbN7n427iU7D9GLDmXufABQGfv1LC9ykxkiP4zOUPTeuSShm4YRZbkCTaj5pBE8VI4T+wsRkIBIj85r/8HNPjqyff9OHxynH54Ih3VXHx54AHuOTKMjX0gfXnKIXvoH1334+g8ufxiG/wCdNAh0yIGfeEM8z+x56Ra5C5K6GFKIBAiMKge/IVC1xKh66rIffPD404+ePn/+JHfctN11GFCjDyGmg1zCIDKTQbLMmTGTpTUzFc4YQp7sWZGrIreOM2eMtZlzOZNNHjXF4DUEAwzGGCJAVmMMG0smM1mRMROjREAJEpHU2ZzQZJaLLDdEBJLiKMmnvottozGAKDGZskAEJIOqACnGBBJBgQANGyRmQ8YaYDLWGeOYHZExNmdXcFawdciWXKbGoGEkBQ2ASSFOKusUo0pS79V3pFE1GUNESmSQABiEEiADKkxOZAd8IaEEkAQAB/yXDreWCIgR4jToBECTpJg0JUxJYhJRTZJAQVJCpKRRmAXVucwYR2gmOVbSBKjACIwPQ+lIrhHVhJMzkyiqQAogCUEQZDJrJyQmQctJWRIpWIQpghFFVEQ1RU2UUh9TCCFOYjoiBI2oTESqlAQEcLJNEhUiTpKIEIhh8o4WTaAKFFWSCgKJoECSMPAQOQMKguSALbAFsmwpt5ZdjtYaY6bgjSCCkE3nA1VRUUaSJIAaUzoInvXgQH+AilTk0Ew4CIAnL+pJmXzQd4IUQTW8AAEAAElEQVTiIVhTYHqiqKaUDrdQYWI4HxR6B6JXUlQFZeJpC1NVA4gpMejYbkvLEr1KEkjGWZflQuhjclNUwOEIF5B5dnIeuz3EMY7t0HUigGJQyHLZ+130AbAJBD/+4e81t9+R6Oxk3uy7fmxJbFaw9rJuN30agFL0qSyxH2HXdCBJRPM6kxRUwuKkjEAA7nbdvHp3GYYxc1mMmLzfqUTRoe+R6OLRcr++XyxPX1/eg0Kdu+goQXti3BAj57zxCMy2yppuIMdBwTFB0spJ6djg8OLpwhoecbdamhgyExGy0qK9WJUx2r/+zd9KPxTGM5nVrEoh+rE/XdTbdlflxejXTy+WYz/6PtsK18uzWV2tb+/avn1x8dHVddcP49gOgMbHmLksAViDojLE1AeW3rMDWzglcYxkiRkSpJPTk67dOIuX15fL5cnji8eQUZZVY9fstxvnKPa93/fOcg+iksaoN7v7X5z95OVHn/zm15+xodubzW8++/IP/vgfnz273796/eKjCz+GfvSjj5apa8K769190//kk2erk8UQ7zsfAbTIsjClAhKqYkhCABgDM6lACOKY5HA6kENEoigSqcrUNJs8uiYvRtRpPQcBNWySRI0CSMYYABJJImFKd3ygYT+YOK5OFl5DDFqURb/u6vxCNOw8LN0ZjrHKVjf7jZgkIpffflPOfvzxxe+ezwefLsdseP4M2Wrb7Idmy3U+Blmcv3j06MN33/0acNiN+37snbPjwGNLH/zgDz7+4GI/3t91X1w8OkWKXdcbHpwMs8rEQRcX537bmZDZbDGfnfoITbye19Uim0evmCMBocXQDiarkhdJmlkLRVXOT/oUYRy/u/7lj87KJyfzPM9SwG075lXpAMsiV6V5mX/6kX779asUYlFwWZQKYAyMPmC381BX1enN/RUoUWYsZX5ssqywjNHaEIdxbLb3V9XTp47yf/1n//r3f/jz/+V/8l+++eqXt9vLJuwuni5n1nz+5VfzgqXd5bmdn3/U7C+tcxqFwbb7Pi/MGMcsnyVTMufKmSTuVU8u5gy2vQ+zolqtisub2/Wtb3brs0enSuXubmNptrvvxrY/P1l99u2ri5eneVH2e68jd3v77PnL715/DsT7mxEf53/+y68//kflV7/87MOT8xcvn/1f/4//3T/9z/53xa7d9eMvf31fnzx/XBWv342rhWPGWcHdbhz7uLvtGMy63589qlzBaNQy/fiTx9EHQDTsWM04qmIoKw5BYlJCAqa+GZJJmHB9tz5bnrOlMcT+dkPFIq+z4vHi8ovvjFu8vtqcfrRURRn8m68+//Ds45tXzWyh+flF/KEL1fnT/+R/FQjqq5G++CZ8dbP4ybP4+lZDxLx2mvZf7XUxlGfVfr+9edu/+NGzwpWB/MmFy41RtaJiS87NDFPqutjtese8vd4D0YuX51++ulka84OfPj7/6NUf/s/+yPD+r/6nv/jV3/yWj2cjwxCGQNYkSTLGXVgbUmstA6eYLBtg64f+9Gyx3zZ5no+jdzk3w/757OPd/ltWb8ARsYQU2lgWtcaQOxvGEEc1mdMYMTOqagxbR8SkYja7TdtoXriT07P9djN2Q1BFpizLjMTlvGh2fTsMeeX2bRM1reqaooxDyHIDpOSsMU4ghjDJ1jiGGEV9CMayCr55/a7IZquTIoyegLqmV6N1UYzDmNZbQqs+jikRU+bcOI4GKTfOOEqiqFiVM0UqZmqdW1Szd7ebZginq/kwejK5NZxCsrbIipKUAOS+ffPk5B915jJ3Fkky54a2W54uv/ji1Um1KGZZMc8KSyhks8xTP4wCCELqNbChzFjCKCGm1FHkJEzIxmSOiUl5OY+opP3q5Ny3AYEFUNE6Y6yzRVUOoyfjxhCUkbM8ry1ET7FnYzFYZRn9gMyqDChJw8E6IYUEhKCWOfrohz0gJ1EgzaucJdNBAupu35RVUVaZy6yBCHEoCzaZq+rMsmFGAgFjjCVmO6bRFpnNTJSYIAqHxu+qrGbDmNnM6N3r71LsTV4gkQ9r8cNJteihCV0ffN/168zlCN2mb50lh2m3u53XuWoIIQlJlRuJwTme6lgikhRDSrOZu3wbxUesXFYad+8kRY7QhX6EvLTGGjKkxayIw6gpSoj77bpeLHI6BAEeVGbfpxQdfKQFQQ9xyu9rmam8OLTI9PjD96Y0AAioIogkIAg08dgRj0DRQaT2IE2CI/hzaOPh4V3gQYt2+IUPTzrUPUfJ2xHAkimCBwREVf0Ytne79f266xq0EsUnjZI0RcEdfPf6VT8Ow+hjP/gxWuvMyWoM8f7qrh9CVZWny0c329fbsH68ejb2fdu1i7o0aqpFllLQ3L7+4lsic37yKAxNSmm1WHiBk8Xs5n69qBcpSlI9OVne3d+PIbKlWV3lmVWQvhuAAFH7oatmddOOkKRr9kXhQDTEMAz7usw16d39HRm2zvWdr2tWpyH1t3dJ4+bR8yeL1ard76ImY5GIS2dIYm2hdiZjzoiJwBgqqyLPCjTWWY7ikWRqZi/PToBpSOFuN4AmGjOxNKLGmACjMqlGwKL3XZVXTA4pJqQxRSKQJF07iGgCPTk77Zt9lqk1zFGiTl0plShIzMcT9yQmpANmcyAvwDGDGBGYJhoRGUPWsDFsjMkym2euyIsiz1yWOeessVN9aQxPDtXHMYQP1e+h6/se/ZxAz2NbVxWnQPl/yLWYwosnr+gjy26Kn1UBHcbu7eW37bj2MopI1/tnzz6YV/Xd3Y2SzOfzfvBVWb57+05FY0r7YZz8vUJMx49LQHQkF00I54QNTSE9etT+TJ/jIM2cbLVxcj1DEEIVFVBkPBozqQL4GFR1Vc/uhzFMtUwSY8lZtpZCDKOXF8+ehHEz9rss+2C/a/PZuctc37Rj2zvDXsfepyx35+ePy4yH3c4xxTAiYIxpt21PHj+zJCn5zd3tfHU2iEMsVFTjmOcmJIwKuaHTuurbbQIWxejHCiB0bRz3jtJuH5oxZaapl2cSPEWdL1yVw6YfffQmz765vl0s6zy397tmbl1Z2DGE2Xze7K81aemMyV0CdM4VZbXZrQmSHwebVaoJVGNUBI0aou1Ks6rKk3a/XaxOASSzbt+2GAdTFtm8Ti3jEBfzKvm2v7uqn14ou269zm1NBiXi2I1j2WXWoiKrVREDQ2qujGFml1RwqkuP8Awexhkd7hwxEukD0edACNLvkX6O3Mn3K9ohSvL769vxCUdoRx/W18OieFz94EhSO47/f/DC6Q0cLqLfG/LHzEB9eGiWWVKasFJLlBlalPnMmsKA+kgp5oQhalT1KaWkSRMLGIJFkf3wo8c/+/ST5y+eWGdj392t969eX7+53uy7IU4sGQU5pF2hzWxeFpzlWZ4VzmTOGkQQYTbWsDU2s5lhLDNXWSoNsGpKQWMEUUQgJmYiRpNZAnSMzohBzYwT8ArMRMTGWmutY2KVmKLXHkRB/RCGQcKQggcE5xwAIJkkMQWfQpCkKAKEbCayERIhMpDN2RRkHPEBMDJsjDFsDVqHWU5EikSaQKKigrVERlJgAxIS6MHdD4FBIiHBlDGPE8sqQUpICSUeIiFEIAWUCIqKZtLAPWgYmQwzMGvSpBgAEkiSlFJMAJQkhZgICQQkJSVNKQmKkhMUBZ6EWMwGiAEZYNJDEymxRMQwKblUhabVMA4ESTWJJCYSSYYBDKHJlKyQgaQgSWUiGU3jilWSJBuiCX5UihIFJKkAI6AkTDwhNwe8hRQwCQoCKoggJE0giOhUfAQQZVUkdAfHO6GYUDShBrRgMwZUZCQ25DKT58B2ctvEqY8kgkiTsTgBENIBNNLDhvX/BxUduaf0kGImkzE7IaoAaEoi6aCMP7CHkiqkpAAgEx1pSrA8AEdJ5VDYH+dsAlACtMYYVPEDMQMgsemGkcgI8IM+FWmKlMOYPJDU8yUb2u73cQht049jENHkExlsmj0TQErAUJhi9+by6tuv83KmIfb7wRnOazMS7ZuubwfKTAhp7HpDtnJ51w/NtkUyEcAkGAGVOpNRntGqzlVwlzAvitvrez9GtaQENrODT5c3O0m0vdpYMowYIZ2fVvvBpx6HfT8rzMUH51ebtsgtjH7w4WRZzTLru+FkUSWNCGBil0ZK3s9nq8a342Z78eTFm2/ezBjnC7co5t3IGmW9b4cq5c6Mvul0VxblcL8bwlhUpUU+OzmzOl5+d0mbvlxkZWavrr6JEiR5IiA280W1a7qqLjNDo/eEMK+dmVV9CrfNPmKRpx5iRESOEuNNxnCxPOsx08g3l9er0zmgnj5+7urq9u1byyag5pXpdoOIoHW7IVxfb59+9Pzzz7+SQdjS61dvnlw8+uiD55fvLlH12YePBOC7b2+GEIzjYUhDiJ99+aYu8sxyUonx0LBCACJUEUZCBFHRqJaJUVMKzIiITGbiEBEzgDAxIiVJTDxJKol4csYiIsUkD55Eh+NPmvaGJELHHGRinmxBAUC8N6j9dl9ePDF5/t31u/m5vW6+K88uzldnXbtr2/3TJwtKpjp7HPB8VZ31w5dtv57lELr4tM7u78fb++unsw9P67OQ5Ntv/zp0bbmomczcFd062ijPTi6e1Mv7/dVXV39RLTKjaeyG/a5ZLhdvvvvq7OL5bGYXNftYFMY9fvqDbu2b5ub3/tEP5vXpL//8lx/+/p9Ej0Gx6T07l+du7CbzeFKwjx6/GKJ/+dJI48+qsh9D142FLZqwr2bluO8hqauXbYLzD34yX3w8xDsJrXLIMy4rhK23xlibTFmb/S0Aj92YLTAvy5RCP0ZF8H5UCuXpKs/mY2//4Gf/Au+7v/vLv/vm3d//9Oc/qasnlnloNvPZ0yKTPhZqJDCVq1xTcAhjo9UqA/Qg1hQL5HoYqchz3955GGTMinpVPjrZXd4rpn5Iq7Nza9PXX3w2RPuTH/2YFIehq2c5Flw9Wm5DO2g6e/x8c719/tGjv/z3/27obx7j49//J/9x5er1/Wd1/nG+WBSDvw/df/2//V/DeudK9Zz//qcf/X/+33/18Yvf6T1KgiDi7zoC4kkGZTE31O2DmjhfrvZ37dh3ZHhMaXFSlIXpx87mLiFOKCYRqsRZZUhx2PuL1QmENA7eLIvlqozbsL/crT+7vHj6wfp2WF5UpgIkiJJ+/JMfSkPrr9a8NwawXLj7m28IH7MOr/+H/9tqq+bk5/aneYToTmaQ6+xpmVQu367VADv49Bcvvvj8zcmTZ2yEVZW07/qh56zIidCa0Da+qItylkegmMTNzayyY+P3/f3vf3jy67/41d/8u3c3922xyHdX62kWhCjIPBmfVnXR90O370Ehz3OJIkmtId/1+13TbtuyLFxVADAVi6ZbW2cMkrFWJBRVZkzW9N3QN4v5kq0LaTSWlJgI8qLs2iaGyIoEvCgWhrEfQ8TWKBTzOqQYRrXsJMn19Z0fQ9+PczOvylnfdf0Q+r4PcVyeLPfNHsQQ5ggEDMY6dgaNDt0wq+rtZpMgrmZz68rZbNHyJvrAnImCy1wSGTufFbao8rRvrbVh9CkGsjSMPo7BZsV8We32/X63y50hoH3brU7meekJdTmb7ZthVs02232MqXB517dtaFZnP+h8ICSfhsV8tt425ExZFo8fP6nmFWfgvXeceQCfBMgwqKLkLotJQBKhI8OYeNc1K3tSuKzd74IP1ezU5Xx1d7uYuUerpQ8C1iHhbDYLPjGCQGJrQxxXy2q9awJhAljUeehCZuH8bPXNVRLwUcWhMWhCGAlAU0oijIjMKuLH0ZlMJFlnLLPElIaBjT1bLtt+13ejSgqRd2NnGdshna/qnIHQEFpNqAmKKlvMCmvLIQRFToBVXSFYApkVc0aWoOpF4rhczYLP+8AEiakxlmI6+PZqTIayRb66ur5OIc2MrfJq29ybLI/e7LbNycmFJbNpGuTCshkGD6pEdui7vM5ms2yzXc9WL/phJCA0Zr4qN9/t15utK5fMJsSeHFnhzBo/jlmdGUuzWXUsVfR9DSEKhHqkoasqHmI+jv21Y2EDU/ULeiBxHHeWQyF0MFw4nq5A8HtFzMEt4L2CYqJ24BSUq4oKAqrTIfUYVDvBRtPZUt8jA/rQt0d9YIMjCSAYE5L0Xd/s2uCjQURCURn8eHa6+vvPv+p2HRBawwipG5pu12zQzOaL07MVSNy3beGqws67fVMWzke/OFmNY2uteXd5oyk9f/GCEW73O2Yex1jX1W7fMkLX7sZhyFyGSiKQF/nQDSazXpKP3vuUOWetHUY/m83b9l2z3SBh13ZEhCAiQgKO3KKe7doGmYoiG5u+qvM0+qhQz8p3374qq5m1bI1LElRSHGNpUGLMqmxZ5xhTnjmbWXac58UQY+d7Z01Z5MYUIcYk0Rj35HyeW95ux7FteFaVho2KI3ZkwBGSrOqqD6kLHhi2w+CFV+eLq+t1XWYxxXHoc2Zn86oo9/tWE1R1jUgOY0QNceJGAx864od0VkSYMpKncCVmZkbDaCcykWVnjbXGWuuszfOsyPM8y/I8K4rcZc4YZ9jQJAh4kDEeo6JEBfXAxpjyzqZBdTDLPgzPQ3XyH2gWD0/GaTQLIGrSJJJSWm/u1s2NycAEHpL/4NmHi/mqa7fdvnWuGH2Xmbzd984aEuN9IkRBIqRJYyuH7KMDOAQHGGvCiRT06It8eEfvn6YPljQTwCQCCjjhWapMCBJABZEiysYPyEYTEKjV9Gy22Ox3k/bFAD6+WH73zdcfPb0YupCdVsVi3o1NP/YhBWZQHwzIk5PTeYZ+2DMDEWxuN87R+m6rlNkiD34Yuj2xcyaXhKqx3XWiXC1mzbYJKc7KSlG3TWe41EGbfnNy/hgMrNvGzla/fvVNXs9yCx9+vFqWN7v7zccX1cJaVbvI8t9ebwmK9XqUvrPsPnqUxXFEDClmIDZo9EMwJrm69Pu9yXIBsZnzyYMKomjyY2BWtGxiimnoSlf4vhE8AceKEKP0w1hnubWc52WIo3Pa7eLm7t4udkU599atb67ni4WrSkinw+gx82OCsix0d+ds5uoFJE6qaCtlBBREhkn7MzGFGKbF88hVe8gW04Nq7D2q+TAa4SD0fe/JdcR4HnD0I+ZzpH8eQiHhAUt/sOU6PvM9SHT8fcerTm/isMofpGkPHCeAZW6NEhMgqkFyztaZXRQuM2TZoWjmkyFsvccknpEQGFLF5qPHqx88ffzs8VNB3uz6u9v1b75+983rq8v1rps8sBV4CicHKKzLnXPOuswVee4METMBJFVrjWE2TICaWZuTyQ2wxpRCinFClpnZWGMIjMU8N4xoyRjCzLIhQJQpngvQEk2AQExxjGkkJFACERSZJjYCTXLUFFNMKQ4REiCiQSYCZoPMbJgYDFsyOZvSEBGQYSYABmXCKQRNyShNmyYpkk4QDEYCkDgSBDJCqJomsyacaLkT+VERUFElieAUEgIyGfv7ybIE0AIQkAVJCHz0E0SaiCnMbG0IkRAQBRVBJ1x+yp0TUEmKzJSCR2tTCmwNoSXkQzCFoJCKTkBVUoigQkhTuhIZAkAio4KsfFhsCcE4MA5sJWhRhVKU6BUOihZVkhhSIIoCoKIYJCYBFUiilBIkDyoiHtkBqJIIJiQGVQGNqglUBJVUyIhOPv4wYT0TwAPECUSjZ0S2iVitYbYWrGXrlCZ26/SXQoGESGzJWEY4sIIIIYmIJJUHYef3BGjHlAMBONi+oYIckX05xGNqknSgFglM/YVDbGZK06lp+thH3yMlUgBlRkZmFNTkSEDaLFNNAWMa2oHBxBCK+RKti6HPmEE8cqGiKSWgNJ/N/fodjK1BAo0xjRp9FHHo/OiddXGIo7RP6np7/6aaV+XsbHt7k2dFCAOIDcM6pQBgJJIfR8ZyGANpogRVVgBTFONjUonSYSXFftcawsU8Pz8puhCXy6LrwggwjF6II0gYxY+xzIqyctu29SmWbWqbMYm6yj599mi3a6qMHabTed4PRpIaEdaUMw2JrM0zE1E4dzONaIyUJ7kx3nKmSNudJ46Lk3rf9BkyIAYJzpIkBbbGEMaQRLdt/9nXXzx9dvqLT19+/vkr6VT8iNbImOarehh8PyZ0WV7VIaUQxiqzfvRAUSm6svhw9Wi9a2JKdZlpkhC09cNooHvz6snZE03JiuTOrZv7bL6arR6Dmtur12Vdth5kbDUKWBm78Pbt209+/IOXP3vxq796bcVsL/ef/d1nj//5H3zw/Okvf/3Fanby/MPzru+u7/YE1oMkH/b7rmn6WV064yRFQDSGY4xRVFQMT3mmgoyKKpoIMWmybI/uDCAiSDq14Aj54YhBQEnjtGQwW9V07JNhSklU6UCOxQlDBTx6TAAAABEDaVVVy1XVx7W1yYGb52ft/r43NiuLx2enFFs2TsQ+uTi5evOr7v7d2cvqq2++Tgmu3v4mxvGTHz0S7L764uuz1dP5rBjd/Pp+Xc9QLe7TTclwuvwh5xJjmi1PszxudrfMpqgyb/jk5MU8r9ebVvd22Gm2WN1e9U+WZRZnN1/fbavm5ZOPZkVNEXb3LecuKLAtADwbUMShTzZ3Dm2ZW1Mma+zYjJmzgppI+qGLUWIfssyllDbtPjPuxYsfDn2z2226/a5rfGZM1Dj40Rm7uljt1pd1no+xqWwOiM653f0mc3YxX/Tr3hfd2+++/Gd/8k/+/m/+5hf/+Hc/2X6wWbdf3b57Miu3r7vlyUVd5S4Tid1ufWfZuIxT6Gx1npIHDKHf+iQGgYucy7LOoMoyjcP68qaaxSy3tjh5Mb/YDveuyn766e82W6nL+s3bb+uqni9W3Rjnq/PLL19X9WmMEUA45/OLxcuPf/j23c2r797++JPi058+XubUrMOf/g9/9V/9b36B5/rt//fPymenbZXOGP7z/+L3rr57ffbkLJ/lb692Ls+yChRjXVfDbrw4n715+109XybvU8yABy54VZbjOO7GQAoWFVOSJEgmeSFCwzh2g2V2y3z/TRuCOlaxSStz/vsvu8sOEf72b/7m9/7kD22GwEyZCKA9yR//+OMq6p//9//nP/rjf/rE0t2//Ffd/f35hz8v/uSPegU5hdhETrD95tIVBc2q+aNH9WLx5Wev9s3VvF59+evLjz45a3dtdVKsVrMQQdTe3d4/fTRjgjCoVqAary5viuxx6Uy9zJum+cMfXvz1rz6/32R3G5dlJ/XiMAuKPB99jNHHILkz0+5tjUkxMLGzqKT1ok4xWFsysk9DCFCXVRe2jIZYm92uKmtjsNeQWeNsbYGbENiAks7yXFXGYV/mGSKPflBNgNCPqagqSZEAIEUQVB+H1Hd9N1/UzuXWlRmjD+PkUk/oCMh3PK8eg/ZDPyJQ37dlVTrOksQ8d6xpVmaAdlZVu33XNJsxhNls5kdf5q7ZtZKSYbPrurI+I2YkHAbf9f18PgMAssblBlCUkjFcukwNd92QI3MCP3SBKHd2Xs5DHOPQMzpEWW9vz+Znqb10NougIJBlWZfGJPH00ZJYQaXIagIrYxRFxy6aUQUVUpZlIImNiSmWWcaSwtAy+fnMtr34BMQ8W55KCDGkoQ9dP8xmNSo7AyGMhtChOV0u7rfNLDchYPDJICXQqrDzKhPEzLpgTIpRJIqKcbkEPynKSTVJ1BSYaXL1sJa9iKbkkwxxWCxqEcOE4pOGVMyrvHKqAlFkTJCb2WJui5KsMSZjMkVu83weYiCwSdGgYc5Io2CsCqf9AC6/X6/DGC0ZFqqyuYUhJhm70RomcvtuA5LKzHmfmt1WVLebnRKxrW1eGiYFnvpvyARs/JiqukwyPDo7+fsvXp09/jCva4dtQPU+lAYF0n4cTpbzlMax91VuTxf13eXaGKJq8lo9VBKTLIdoalse8SBVVHxfuRxa2nooX0X0vZjsQeJzqMQnHsUBfnpgEh2qpXQsUA4VL9AkZAMAFIl4JIpMu5VIeqiwJlrI+268Tp3AyYgmqUISUYCYdAwxgjZNu2+bdtcE7wUhjl4VjMkub9b7fUOqBETWMsH+/p6sTYpks96Pg0afMLPZ/eZGGcqsJgy77W1WuM0Om66r6vrps+ftZs1MbMzqZJ6CECAhtcMAxMaaduhiDIzETEU9i2EUOYSweB8AiaKS+HH0WVkyAhWm2zZ5ciGMmMHZxUm4ij4qEWZl8ezJYzb026++0THkRTGMvYrLc6MKPsZIxpZZnbFh9cEXbC1nMRGIVcrYoAhEUQRSMmyIgGnflFnhTsg3Q7fvozF1kecSJEoUBKIMnWHQYUiIhLztwqgA1gPrMI7VbJ6AhjGEJGerxYsX8+vbuygxDMmVFoEIkJhFEiKCymRWjYhMUwUHPFEDaCLemQNClFlnjDXWZc4ak2fWuSzPnLU2y5xx1jlneIpVmpg1B3RlGkuIB3f2g84AjzS1g5wHjsgjAR3y844Fu8KBpSaKE+9bJ66aT+n67mq9vQ9hTGOCEU+fnN3dXavE5dni7m5TLRaQUkqxqGYxc9tuIxoJafRdZjNnzOB9msoNFZzyfhgRMIkA0AHbPCoe3tf6xxkFAKoJJv7VdM5DTKpTRq5lFgWv5AEkpaTJkM6c7f0wpohi+jGczecxtO3QJYF6viiruYpE7/3QG2sAIaYmI7uoHKMnlczAbrNF0MHHu9vbpy8+QpHofeh75zJmjG0ffYAxMVvRhApGta4XklLb7h+dPgWwApl1OSTT9zLmWcqXXRwu181PxljnvB6aPi3ni7N3310nn37yw2e//vIdEw9E74bxapPOn1Yh9V2/r8pF222TH3noaCacZTqy+EhOiJJoYnYpUhhFJdgsjyG0zV1dn950PkxpDH5wmbu5e1fNajRkCk6hNIjLM9jcbLu71848XZws3n1945sdagohSApZXtrCxnGr4AzOQXNJDhI5ZxFYAOhAGsBDMXqQnh3/O5KBHuDuQxv3sJpO4KUcCEf/kA6ED9Kw7zmvT2smHNBDOji6PMjW3r9cH+AoeA8SPTwwDe7j+vlQUQAAwMW8dMigyRmyzIYpz4vasSMYx5CjLQa0KgXajnjnoxexhpaL8unF2WJWYBq0x269/uarV9+8uXyzXo9R9NiZIUSGScXlnDXO0KwsrOPcZQqQEVGe5UVmGK1xYKjMc4eJ00CYFEFQnLOkhMYZa3NjsgwLq4Ri2LEKEyB4jUIGUUGJBNBHMWgBAyooGFDSlEQ0hkGSpkkLJDKlK2pkA2QsGFJj2VhGImQ0Njc2By6YeHrMMBIj49HVB0nZHFm3QminGHWQADJQGlA8QCICQYOcHZDElEATiJCqTv7ZGlC8ikBUVFGJqIIgqgMqIBomFDWohEpEaCxHRU6qItY6VU8JNSWEyZNkAhZFVVIKRAaIgIEYFVQgIjgVAQERwgnP0zRRgBEm9XciZGAGPOgSgRCZFBSZFQ2SFcqImCRiRDUIhMAmkUFFioFG4jFCDBIVIKAmBU0hakwxeQSBQ0CZJBRhVsKJFREA5RA7l2DqHcEEt7OqILMhTgrErIbJGiA8EKCn6YgTWdUw8mTsmwAQiQwbZiJSQFVhRFZBzCb+xjQLjqwixSOteQJ6CY/8QZWkMlGSQETlGCKV5KgdnN6tyIGWOnGRAETBEE+uw4jKhibkTZKH5AEFMalKSqrEJq+Ac4GkqACi4kEzIEbEql74fuiaxihkeXk5tClFRLaZYc5RMCURpbpcadT9bvv0wyc+jX30gWBU5MBBuZrNUmx631a56ftg87IPIwA4a0Qgd/UgfdREaGL0ZVUK4roJKXZFkRvjqpnLyJh2uNt2wxBs4UqX+873AbKsyMn5Ifox1KuTFmnTDY6gMnSxLDab4evb9cc/+NFutysXbrfr1cnLD37468/+/ZPTJyb219fXi0VWz4vtdm0Ms3Xr3SAS1VgsSu97g9wP3fm8SMko6L5tmIxIOj2ZAZm3l9uI+bNPfuT79s3VlSiOChIB2ICTdhhQ1BDGGOZlVc8zl9txlKbtkIUkxphi5rKsCMMIqsTY7fydbJ49exLDsNvfVvOZxGjLs6cfXfTRr998xYjOUQL2SYoia5q+a3YvPnz261++k6DW2ru7zW//7vMPfvx0/irf3N3PTk/mJ6t165vt4LLMAw/es6Fd6wGJmUAwSiK2IEKg05ENFZlRVYgMIdJxMxAVYpoyXgVEUiJCBk6SEEAEVBMhiQoIEJJoAlEBNcyTOTtP1nrHbp5qOlDCATbrrpov2dnb2x32PWjs223JTmPohtYu8iJ3kuLt7bAfwPObxcksN1XTjHm2CGC4npPx+53von/58e+9fftVUchmGDptKJo8K5b16aP5h6vZ6v7m3X68C7H1oe18E4JPFqwtHBlVrEze3Xfz+mQMOObhRx+eX/9Pb+bnF10TsK6G4FNQUzlhBc8hpaCeVFXAGnRkF8tls9sIZU0X2LqokqQXTT4AIdgs5yK3KGHcjOPu3Zvd4uTZ/OQHReHvt+8ijMbh1qvZtYu6TAmBCBGG4Jm4H4LlbGi3MCbttyqNYfPvf/PN85dPry6//bvf/PZ89WyzkZ9+/MiC9Jt2ux/2MRSZiVzl5bzpGjal47Rvrk3GARIEtkXF1jT7e8Zk0LTt8OmPfn5/v3FVHSPmFtBHIhcgdmHgAVePnhrEsWu//uK3L15+dLqqiuXiuzeX/+SPfrZvx7/4f/3V7/7Of/Pn//qvf/I7P59fVMW7d9dvX51/8GG7vhy/+fsP3Aevv/vl2T/9r7U8+4t/8xkq1g723eb8w7Oc7Pq+yWoma/ouBBGw6fnHLxiojymyPz2tej+SFTttgQIh6gQvxqSIZvRRYgxDcobiOkBuqpXBSgOksU0Dh1AFY+H3/8vfBRfA5rEdU4h3b9cG7On5+dDHn/6zf46nq9t/+2/P/vhfnF7f7q72eTX02LUDLp7Vuguc1zwrlSg3tSf54Bcv/DC0++7xR0u2XJ1WzqDN+Op6vTxZPXm+Qkg5snGubUaR9PzZWV5kV683xrnQyV/86d8uH5+u5qvq4se//dt/4/tumgX7/R6JqrJISZANIzLROPogcVZWFqCPfr5YxJS+ff3OM7vM9m0/K9CkDEnzqoxjCiBjAD/6LHOANsaoEk8vLoahA9C+b7KsTOKJWDGqMhurSF7FkEFO3TjU9UmeyTgMJ+eLcQyCISUJAxjLox+F1eWZD2HX7tXvz+eVCBBpXldRYte3y+XJOIy+b0OMTdf7AEjgnEHm7W4/9mORM5EtZwWCUMi6vk8xbJq7er6sqqLIMj9EtkZF1rcbznNEQmeVxGaubVpGU82WIQZBv27v/dijaITUR2+MYYR93+fGZLVNKmPwCq4LuKxL3zeGLJNTZDCacdn1O51E6CpJlZnZFD6NqELkvEb1mJeZtZlAIEDDzM7OTHa3eZ3XM8POjyHJkDlrjBHEsqp2uw6BVAOBDF1X5hYgPjlZ/OWvvuG5JYUxhMlVB4EBDU4ZrAoMSAgqSVSDaoheROqsZsSIDJAVeT52zcmydo6BeT5zlHzfj7OqcgRguK5WTdpHQSZmkytmyBAkjNEbQxQxxWgtI9lmDOPQB98XLmv2LTvMS3v39t3Y92We910YQ1TtYxSDcYzj4D2xIWYBFNC8dv3QpuTLctl1bTUrs9xtr9e2tKBoDC9ni7/8zec//uELAzCry5vb/vHpyRpls0/rfvjg0ZmmkZAX83JsB2Tqm/7kdHk8ER3IC4d8pWOmmB6kOvhe/6ATdYPex0mBytRJI5q4qwgIKEeQ6H2T+1gh4YETpKoHu8mpmSdwFAsdf/UxTO2BgvS+FjpUWQ+EKAU5FFGgktQPYbfd392t79fbMSVBEiRmnJhPlvHq8h2oIGGIKUfqmr4os7LMh73vu/3jD0/yWeGFK5vdrt+dXpy3bZdRNAC3m+1mfZdCePrs6dC2bdvMVrX3mplyv2vv1vfnp6uul4GjzcrdZnOymN/e75zh2DVj8CkEyyRH3KRtt0hGUZLIMIzlrKyyjBFtnu27gY15enF+fbvtfM/Wfrve/8Hv/rEU9btv3+z3TVkWvQ9h0y6LLDcOUEUgRbGFs0B1WZfVbBRFBlAIwzDdpZQghmCshaSGMlQFDPOZS8TXMYyii8y1w96Ra0bPgLkwRiWjxGY1K682bfIpz3JjcbtvHDMiZc40+y7L86cvnr97/dqnmCKyYQAB1RSngpZoqqomMxIiZ5iZnDXMbA075iyzzrk8d9Y6a43LHDM7eygsjTHWWjaGmJCmynriUk9GWUem2mHATNbUD/KcSc3zfmgdivMDiHQYYyKCdLBzTKIiCQCDwH4cr7cbtDSsowZ6+uLDXbMdgy8q13StJrDo+tQZmxtUHweRyAoxxcw6Z9wQQpL0HlBFUoWJJw4gBjFhiknkgHpNn+SIFbynqMA/+Cg62SxRSkoguXW99ykpADIxqszK5dX+HtGGyM7Scpa/ffOtyni2qqt6NsZokmdAUEiixCRIVV1WDinFOHjVEGPatbu2H7KyUpTt+r6uq2Y3PHp2qkE295uqyvMqG5pehyASy5ktKu58b6qcc2584/JKkYIfzs5Xn13dIIZEsh7Tm6u7p0/Or262m50vq/rFM/hu14pvPz51MYrLqi7Z7+7a5ws3y1gFU0psjGAMaeS+Rc7F5Dafd8MNApIoAo0JogbRSINXziF3Iepy9bjt+9IZbdt6UUnI+mZk4zkvsrqQxhPnRQ1JY9vs5tXy7MnL7dUrwY6AleztzV3p7qtVNVs+9+rBj9aQzebIJRAhJFAATUAOCOV9JNlRsQIHK/UH1Zh+b7g90MuOFkV6oBfpwQfuCPLoQapyhMoP4+E/SE/7/s/fI0ZH2uWDUE3xYB93iGcCIDgw7AAA4KzKHBkmMIy5YWuJjXEErFqwyQctLFQOdwPvhphaUJ+QlKwblXqg7b7Z3a9fvXr35dvrN+vdCAjEjpmYJqvlqQ1AzNa5oqxcVhAbYgMglk1ZFa7MijIzNifCAtSF3mmEmDQlIiJEZgY2ztnCuSIjy+lQ4ohqSjKZ1klgQaQAKRHYFJKGRMiWnEqaWCASRJVSwpimPmhUUQYmQkNgHBMpMhMzW0s2A+OIEDEBAKJBRjRMmUVmYJq8fpQMIirQhBZAiqgRUkRIkyUyIhNYIVYyognZcxwVBCbNGghogNhJEkmCklQTIouIKCVVwaioSoUoALoprxGIiA2mxIZYCAMiABOrJkAUBlXUKIYMAjIzEiiIkggIQJxwmZjQEDEZIlCCwyKIk5sSo7HAloxF5AMiA9PARgRgTKAJUdGQolMCJQvEIoqQIIICiIjGCClq6A5yLxXBCWFJADiBL5O1ikBS1kR80KCrqCRgBNBpuE423EhsiCZhLyEyMRKrTniXYEpM03HmwL9DBGMJifiAE6kIIAMZY5BUCY85GUdW0XsSNT6cP6bRowKgGpNMW0WUBKoymVcrTEjRdNY5OBUJAikiMDERISEhGMOMysQggRGRDMQ+RumbTidCblYXeR3TXkIqsiwlEtWkUYiL4sSPO8OGVIIfs8KNvW+aLstyQhxCKsocUJBl1+5BODTx7v6mHX1dLClw1zSgAM7mtZNmIJQEvvOjYxOVUhy99zEkR1hlMxDZjZvaQoqgys5YRN73vSJY6yqrkHN5fvH1zV1QVRKVqEiavMhYWzPL7aLIUaCwRfT317urMl+UBUjc1YVud5sXz56u95tu992j05qxVfJ1kfsh3QxrTYJR3UzrjAQppTFFPDlZ7va7ajZHS/3oIQyYxDqmKHEYiN04hHevrwFuz1fFxaqwubu+TRqjte7x+fxusx7bAQ2Tyd7drF3mVovSsFksch/8ybJs2nEck0pIUWMYy7I+qYv7+32xbrmQ8T48yYsQ95gtzj758QcZt9t3JmG/Hfo4ahTOeL3vLl9fXnz8w/r8YvvuGhXGJr3+9vLi+cWHL178j6/+UsmeXazGFO53r4euB0VnTZQkEtk4IBJRVSZi5oPYEVUQgNkCoiEDqoicVBDAGCNHW6ypeaAACYQOpyNlooM8UnTiNKoqE097C7M5JngqESMpk50gcACIaqqTZ5XJTytzte9Wq9NuXPdtoNhWpx8kP+zbER3NF1mE0GzfqWxJZNiHxeq0qk+W58uvP/sN7bLzxYu4bzT5sR26sZvPXvhwm5QZa/D1bktv3t4uP6r3m3VkHJETSJ0vy3J2efWutIs8z1PwL374u59/8be63f3Z/+P/KQN8NDs3zs7PTrLSbG/3SSMkMIpxCEDI1vjBF3lGQGnwJJZNFdBH9e3uDhirYg4KRKBWEqEqF/Wi3Qxju7ltNwjl2aMfnT954UzX7zd1RdD36vs8r9u2EYhZUShiGL1RqGbZrm/cfKQP3Mcff/LFV+3f//bt8mL+4z/4nWGL/9Hjiy/efCbkF3WVPIIxXLpZ7gyMbj7zfkzt3lQnYdhltnJuJokJKDPcN2MIaV5fvHm9OX30aHu/HXtvTV4vao+hD/3Zo5Pvvnx7fv7Bzq+fP3n27HnOhInyrjPvXrfb2/V6t/+T//l/PkJ69uL5kMx337ybl/yv/9Wf/snL/+a8sN/+9//Sf/f6ySlB/zVafPbxB6+/ujt7VJIzfj9qJqia24KZNXqIYezAFUmBUDErrU9RAL0PKpJZEkyiEJNUeWkMdINvBr8s66IoLOPmshubQTfJizz+4SOVttnui8olwKKo+873G48g8+Wi3yYYe9V2fXM1+u3669vi5KP7v/5198WfLbLluzf/49P/xX+1Ge+5fi7dmNcnorq7vjt9cjoMyc1tPi9daYLH5n5cntR3680Y0sX5ygdh5tB7k1lJoSwdRkwoPobTx4uQpFjY3//jT3hxJlCVj3G3+2z9LkyzoCjyGA7Wk5KSQJKkCloWZZSEAppk394J0NnpTFICoEfnp6Hr2DAhppicM2zQkKmryo99XmWR1MfQ7vfeD6yUxNgyH3ynqlVRhTEWWb0fmjj6JGLI1NUsJN8NTWmsZYaMDWdN2/lxcNnsdLFomg5AFycn95tNkZkwjqopBrRsSGkcfd8P3X5dlPUQo7Gm63pjiQwSGtXkrA0htu3mo/rFbrdVI1VdJyrbbnu3b06KUyXXdruyLMqySEPuOL8P2/V6t1jNSZPLjYhaZ0ykXdftul1e5HmR931DhE/OT8Pmbe5mRvtow9AOqHpaLzf7vWMLWeF9hKRIqgACGuOoYAHFh1i5WZRo1KABxFTUdezuXJ6JRtWAonEEtkaT7vx+sZwpptyxECGQM84QRulQNc/yXRsQsZ5Xgx+IkC2ys8YZm+fYd8ABNaWYJIUpzhSRiA1IQgCmTDH6GA0ZY4z3oyUq6lm7vc5tfrLKKW1nxWIXwhBSkdVnyxPD0g59Nu77kM1Pzyxk1pRJdN+1ljnL3bIoVBFSrN08alhvbzeba0o4r+d90+e5dZbD2CaNYwxM2jb3bGvnihgaY0CBvLBAmpW1dWxru1gUbdsTOEPO+52GiEzMZK3rhsYZ8+FHj77481+OzYWxFkBS0EGjMxCHPo6lc6f9rrEqWV6WdSGQ2qarZuVDTXFwdZnCwyfuwpGdMXXUpkbdg62miCIqHrK4j3DNVNhMrXPRh73mKAiaLiVTIfxwXZwyg6bQhiOX9v1JbeJ5wDGhCmg6ecGhFa8HgZuCoopqjElBU/T7/fbu5nq/23Zd62MAhFk92+3bFKOKjN7HOOGDKQvBFS6JDmMUjOxouTjtN3v0JGp8N8yrxW7YxiB+lDDG27tNVZYvP3h+fX1dlOVivlrf7iSK92Ne5mwopZZRvQ+ZzULwiMoWu75JKZW5G3xAYiZkwBi8qlpLRJB8qqyLOEgMWZUZ5r7vCPFnP/zo81dfkLH7/fh3v/yLn/z0o4vF8rtvvkkSttuGWL0kS4KiUWIUVjTGYdKRuGKj/TAYyCV0Np+llFASMzJj13aCUlZ1s+ursvaxyVPY9v3I+Wo5s5QS2iCQsa2Wi957ZsjrvAths++bJqJlUdh3vXW2ntUIcLfZd8Evl/PS2uC9ijBRSgmZUBMq0AHhIURkRJpknEzWcuZsZl2RZ865LHcuy6bUM2PYWmOMYWYistYS0ZGhdmClKQCoCAnAFHcsBPQ9w5fD6UmPICY+UHWOGqHpKyVBZSBQkQmO0aTAmFLaN+v79Zswtuo9YAbi7u9fnT190rfbpmvrugbQMEZjXAg9oZb1bAwjRyyrOsUQhx4fsDJCnUw5AFIUZmZmiCAgogmJDu6yE3AK+sCJOqhE3tcuCABMGGNyTF5AARlRJCEgEt51O1FAID/GwqJzZn27e7GYvXz2VElGH2c2G8YkKgYojTGjospLA6LBS/TIZtt0by8vT1cXCWXo2pOz5fZ+4xMgAgrudl05m7NxSZrMuW3X5kWd2+Jufc/WWUsxBuuKmBRVlnXRf37NhW182A/6t1+8efHkFydVbLaNddkHL8/+7Wdf3a/3s+XyzZdX1TKP4xgK9rHKl2XTdJayPMvJpCGMMuztzAVns6Le79cG2BZVt98aa8ch5s6lGAl99B5wLOql324zImvzruuHkEIcXGzFq8mK5DONUs1nKY1haPbry/nqWdGe3Fy/evz0WWGLm+v1KLxSHnb3aE8QeypKV5ZgrCpgVIQEkgBk8mSHg9P/lK5+jPrTI0dIj5qz74tmDy85yiUPqM/hR99XrB1v+pEDdKSbHYrgw/P0ey9ROFA1j792mhcH3yJFeLChO+rkAM4XFSMZRsOQERlWZjaICMqgsaDBh7G0RTMUFonwrgtdCGFor26uUMXE2HfN26v7y13bxyRsDBAhW2ZknUKbXW7RMlt3kJGxIUQmmxVZUZZZXS2XtbXOgBjfs4+xH5JMCgo3rV3G2bKqZpnLLDJFnPINgiYFVGRiZCQShKCSEAzJBBhwpEAAE8xHqCFiCOCjN6CokQgNGyYkRmYio8REzhpXsMmZmEEZIpMxltlatgYnXQgZRD5SuSYYY1qhCIiBmDgjSDCRhlCIOE14SgSViJJAAkx3RkSTqkRISWIE8QkQ0Ag4FRVIgKI4CkTFCGBECJSYSdihgBgwrIYBVFBT0gikgsKGJkfCgwSQSAEsG0QR8VMoXYpBLIiCIishKhKTEiAzWQvGocmBWZGnSEtUxeApBpQEKMoGiImdEE20NU5B46BjK0Ofhi71Qwx+Wl3pkE4LoJokgCIIgQIKgqookjISACYCnQLYEAyQTYCAjDidA5VJiVEIURUkaYpKrCKkyqqcBDEhmYmUNLW66NjimqSAk7+Y0uRuRf8AKjoswIdvJnoz6sHGDkUBYbLvlpRkajUcEFtREcVJuaagoHQIIBciBhREMhNBDCWJMuhkO9P3MYn4mJyxmpSLjDJroyOsmFQRQBBUM1f5MI7jUBZljDpfrq7vTYgCxDFGooEcZot5GNrtvhvH4fGjD7yPzb4R5cAJlIrMxUmSmLEMgAkKazQGZnSMpijW24QCmSmarjEO8jJndmPX9e1+sahsnhkyAOSAq7og4FFiVbi286cXM9/7/W6MoLPSzWfldnd/6s66LvQSNMWcKj94JOmHLrdZWRQpRAem2w4CqQueEQ3bEFKR1wO0yfvrq3tburIs1LCEtG1uxnEUYiyyPC8mrLGuqu16Y9hhVf/s01/sri93d+vRh6bZk2VGnVVV242b9dYyuXnVND0pZbkNKbXbzpisnmWIOYhWZR0NRB/YAdsiiqR+gJTaprmYnSDC0A6ri5Oha+7u70/Pn330gx+/+/ozNJxn2dD3QmLQvnv1+vmnP/qP/ujn/+b//mepj9GHfeu/+/ztyx+8PD+5eHt9n7XNPDOPFvWby3sglsMIZEI0iOwsGlZAFSGYFrAEiJMBvSYxhhhJD4sfqB40mcyGAAUEVADIkEkSaWILIeqRhm3IJEiHVBo8kPydtYBAhMYwHCfDk2dnN9fXj17+MEPy++H8xXKzv8tsiSjsMooI41588n17cTLf3O9A0vnZ88t0b8qqi/3ff/7KN/2T4uJicfLZ69f14gSNV8hPZ+fjoGMMzW63+OgUjbl4Wm/am7C7b1MoXSnoi0jStB9ePPXdmFWFsXB/9aVs7ubz6vFHH91d9ndvt+cXF1bd0MYQAJF9HJjN0A8uL1DQGsvMEgQB5uXsdruzzhEyM2W2hEiiaiwljSAgCdiYrCzARkc6bvrt288C0/mjOvVDiIIhZVXx6OmTN29exSSCiohgTbttMiuWi+522H23BRqWUX/+B59+8+rbwlbffPUbo35+Mn+7ix988vT/x9Z/9Vq2ZWli2DDTLLfNceGvzbxpqyozq7pLZLdE1yAgEAKoFwJ8EAQC+hF60ZN+gPQoQe9qAQIhiRIo0El07cju6q7q6syqzJvXRdwwx2+3zHRj6GHtE/c2oR0IIE6cffZe55y55hzjG5+xKt++uRr74CUViUnHurJjCtWi81WVDgckCyoxDYfY19V6uXC77dXVNvrF48Xy4nwNU3//5t3ri2cn/V1vFvzoxWNLdiimGAaDF08Xh20Ak37yg2euWX7+D//q57/66e27O2vkbO2/fXN4etL94Bd/LFXz7OmH/X/9jyk5a9fjf/8ufHZ29jee1hfPK+3LFLByg6a68qRl2I3dwp6ctFPqXe1jTs2qFgYAIUVCBpBwGKrOzClHMQUQVNG28UlkirGuDdp8/mzZ34ayy5vL4iohUVIQKQUzGWBrracUJGzS7vpNXX1w+uEzfPai7B0V95df/be//Nf+jnn1Nv3+C/gXv/MXipft/otvm5//TTi1Duj1r7+IYoeSH394GsbedytHdhzTyUmHINM0satvr3Z1be7u9k3nOePrb6+s94tV23RV2I79fmzr9j/6v/63Vzf7L17+xbi9nfbDQ+mFbDDFDAIhpaqu99Ngkb0iIRUtjW8YKMbJWjPGyBZQhewcalnSYWRgazBOYRwna2h/OBC5yvlpnJpVU2IextKnoeTQ+q4kkZKncDCEhj0xpxTHFOuqXpuzMO5z0WmKKQ+Vc23b5CK7POaUcylkQ5KpgaX3LXNIKXlr0LIpEEMExaEfXOObrr26un508mR32JecoEjTtL6t8Xbz1bdvL1aL2tury5tF23bt+qTqQgxM+dGjdS4SUjxMA+bJV06TxjGM49hUpm7qzf29t5VzntiIShZBYoKCGnOe2BhJBMhsrU7FkYM0hUOPlgyilpJzNs6VkpGMdz6nRGBQ2RubcshS2BkRaesuZiFEY72KMgJaO06BCLzvztaP7rZvyHoVFOCcEgMT6qKt91PW/WCd3457LSbkiDE+Wa2mElQUFWYhDOHscIIAWFKyhlQ15xFpnroKIrKxIpBidGQ0pTKqq8lb82hRWe8EbU6xssgkKU2H+6vFumuqSgHZe0U2rIaQULMkZEplfxg3w7CrfcNJpuHQD8NJV2OS2+ub3WZjqQDj7KgQ8ySax1As28ob533tKq5sVdv+sMPsASVnnVJGMjGMgmqJLNoYc2X0xx8+vdtc+vPHOOaTVXdzeVUAPZur29uPH68aX5GWYXcwyJnIGLu9331XEeH7TmXOM0Y5EnyOsc7vzTP0fRtzdIOUh5A0enDFhoehN7y3XT1Gmj9MxeH9Q48MpgfbjYdn4dEY+wEL0CNPRMuR0KSgs2f2nE8NIjrjvJBS2e72m+1me9iMcYpTssyA1A9DjCmlXIYJmeZRo2XKKRnCnHIqua5817b7+x1C6eqL+2H75PnT7W4DqM3SDwPmm8Eb+umPfnS7vRvDeHHxZByTs5zzsdwcx5hCAAVvzZjiMI4KGlPCopX3JRdGrJwB0SS5KOZcqsqLAhGmlL3hzWFfeTKIKef9/fZlTqumSkmqRT2Gwze/+2v29bKpAWoqEnJIIXhvjcEiEiOMQ6jZFYKUEnvf1J2ZyVsChkxK0VUViIKUGIJlw2ydQ9btScOpuH4aV41L0/Ds0eNtPyAjMi3qhkRDGk/XrRYsMg0l17WdjR6Gfly0bWWri7be3V43tcGmPeymkDITQoHZnPmBIFFACUTnUA6i2aKIDRtrrHe+8t55550jw8aQsWbGU+jIZpsXkwogqM7VDCHNDLjjCjquuveco9mrERUKKsDRYOv76i4AAGBFUDmmUhc0LCAppJDi0G9COEjJvjKrevn23TdElMd0c3m1PrloFt3u5r72dVVXY1+mCWIote+adZtKudtvGJCYj/O9WfJgWASsZWtnOQtIKgAgRWYD7ALlO8mQvlcvzR/Mfa8gAAsQqDc2vVd6AlhAVR1zISScuzKEErMp+NFHH9qm3Y5T07WSJYwToBHVGENlrSEAkGkcDUAM4+XV61Iog4nT/mTd4VRu316vHp0jw2G33+7vVlOHmod4cNYZbxf1Ih6GFLKtbcgxS2q9JaZSJEWIWfdj2E1hDPAuxa+/uf3Rs/PKyLvb2y5Wv/rs0//4H/3V5aRmvT4Mh5KL+upuPz09bSvfiWoqyXsrUXKcTOiZHBCiaixJ4iggWoozNpdkjQdRiVHJSUqVbQC0AALiZnfXVk27XmsWYWQ2kFkR/WKZN/uwuxlMsz57EsL0+vW35yePa9/t7252VVUviGFy9gKIC6iUyKae5epHVaAKEhXRo33W97gI7+GiB7jnuDniETr6jm30gOfgQysL3yGDD+RLfPjMvFfD8Snv3bkeOGc4M5HwATPS48UcXbFEQYGOUJZ+7zZY1paJCMEaskykxRpjCFTFEKhQXXFMpXK0GFLjcmt5HzmXQjHsb25KiGEK28OQRK01glzbY8oZgJrKMCEZ9s4gKh91dOiM67q2srxaLrvTk0VXUxGjRTVLoTz/mNgxkTFkDCyWTV1VtXPOAEIscVItcnwxAqI5ek6kKAhKBiQmnhkscrSQySogRSGX2dLZEjIig8zPFJSZk0LGI1eEyKAEMyJLwASGwVplUiyIRTUDoIigzsrVgkVkVk0RK6Mqqcy/lAJAdByU0JEZhAqIcoS7QSVrTppEypwff4z1nKkswgmIFEXQATgtAGiO3zMgGjYqIAJImgsoGiZUEAJkY9jALHsys/B+3mCkFDUGRAWYBY+bLZgZ0bfABo0FJiWa91yEAiVhySpZVZBJCJGNKKqIakYRzUmnoYxTmkIcYymiUhBJRYkMIIMmgFJASy5aRAFFCyDMys7ZpEm1KCAZVgEiYmJkEJxhHUJEQUBmmPPEcfZNB1UpOQGi5jmIVXGeVAAoaJaMDxXPTO48nh0P96h5uFfnIwL0iAGhzjGuoiJ5truaIaqjIaPCLEGbAbeiCEqigg9xDjOliIkI1aAyAapawpJyyimOMQdCNNYulJg9cdeY2snoDGSVMh+i1hhXuTj1vqpN21Q1g5jhgClASmCtUxEmBYAYc87i/UmJeH13u99N1rgc941vu669urmvsTHWLNdn43brAdma+0Nk5BzHqnEmQs4jcK7arrb+9nanpRhb9wn7u33tvGo2QGMsh2kEw7U1ruVpv7XkG+f8otlO4y5GVzd3d7sYi3PWkK0ru91tl21rvLVIHisLtN3v2kWnjJrR+rq2bnP7puJ1STKGsfV1CGkUkZSNMatlt6gX9ze7+xjIpvOT1eGwTXE0bEuC/f2ey1fTYcCSIQuhTiEUkH5IzlaplKZxdevP2uW0CxPF9aI63B5CkEbskPYAjJRTwW7RhTI5Y/p+8o79cv3mdnO6WscQpzy47gy9Pdxfr0+XH/3k59vrN+0iq4N+KszYts3m7lDi7uLxBTHmIReBMOVv31zWi+7nf/qLN//Zf9sPk6v56YuLw5i2Q4gxIxmHnEsiJjaADg0ZzVlyRiImFiBC1iLfNz08sutV51TE40BMhMiISC5y5LWqIiEiiSgqChYinIMHRcsx7xiE5rxHNu9R0qbh+nbf799i1zVnq/vrfaPtadUuT5syjVNKbVcpe99pn3Y4ZWAdD1NruXF09frLktLCdHXj6pZXbfNm/7ar5dGylbjvQEuwHz77sfdmOLzEGm++/r13DWRanXXaYOxTbapDTLtNz+wXrX375rc//sFP/tlf/cUn5+Y+TI+6Z3WzaH1TDskwpTHXTaOKMQqTKTnQrME11vgmSb4ZXj2pPswl1b6TkhVCiCPDgoxzrgsp5hTjUCpnunUFgoY7cv7m9Stf+yK4Wp+OIe767NsVhLHEEEIkoHbVSJyAga2hSfKY9vswvDi8+vLLF+azX/zkF7///OtPP/4Rv7l8+dWrujWLdZP6gKmMvbr61JBWtaYS2Lj25BEAsUiWgSEtTtv7/bef/uLf+KhUX3zxBWL68NlZH/p2ddZ1FwjrnPfcUN/vpjLe3d1ut98uli/+xa8//9nPfnF/uPlAzutuxWq32/1nP/m0ID7PZ3EK02A3r6Q780/+1b9Vf/G5Wa384qK/31d3vVa6D4e0ySfP6+Vpt99L3ESEAsrjlGzjkYgtzekSMRVjWXKZNnnaZ8uVcgJSNMSM1iISTqHYysQp+dYp5+WLuh7o6t3u4lGrJoHAzfWhrqQf4/JkUWJxyDfj9Kt/62+++/ztGgO9vvv6P/yPP/6T/zntw9Vf7srf/0fnn3745q+/fvbxvwlyWvkeUgQ2dm3PmlOy7Tcvb3OgR48fZ5DtXbl51z//ZIElhSRa4vJk2VaYM60W9Rhz5dr2ZFlZZoW/+GevFlX34cfnN3fy9VX6/Ve9S/tle7TgHYdgvSMFAFzUnbFVmKJqGcPonZnrrDiFWEQl5hhAbFZKqQgU632CbAwzmaKkoqrkfaVZrDEGyDIXxsowISzOzu6uN5V1gAApVlUD1hTFkKeKXAm5lFhUFKnt2pzyPJSytko5ZkpEjIonfpHTMIhFVSajyuM4WOe1ZOOcRRqGlKHUprrd3rV1B6hUVfvdjjytzk+QSQmMb84WGFK0zqzXJ2+//UKSiwJFsxap67qkUNd+0qmkkS1NWXQS71pLvJv6kkNl3BTGXLSuV9NwZRwzt+NudMYiYpoAsFSV0ZzBOEElY7EIkhUQJCQGBk5pVA2qRkCqyu9jdFQ3VWVZSilMjJYBsmhoGxczA5q7/lZQWm+lwNQfJEYUJYNTigSl9Wy4XCw7LakyVRqmk3V3F0jvYeZwGmMBURWIKZdjUIBCSSXXtmYFIohxUG2sq5h54UlBK8cxTXe77RrLalGLq+MODDlTgbEIaML+sLaNWy5DnokToqCplJJjyWVzOIACkgOC/ebWcF62pnXl9evL+80GRY0109gb5qZyt4cbw0ZIs8TaV8tucRhGjWpyd/Pm/vzihWrJJe/3A6I1tmQZY8pE5rDf2Kq+WLV//fLbm3r5R5++ePPqG9vU4ZDqztxtxtvbzScfXEhO7bK5vtwsz09zeSCZwwPfZ3aeBpztM4jwqBfA4+BBpeiDGfF37qvvrTKkKCAcw2TnI0wBcEZ43kvb5uNHvmtO9ME4Uh/cPY6DuVn6f1R0zOqh45z+O13Rv9TniCJgKZJimobx+vr6+uZ2mkbnWYGITT+MCiigpRTvHRFIKgqYi04haSmsFCfxtlLJbO2ji/PL370+Xyxvr+/rrlJr+5Q2ff+3//bf+eL3vwaEtlktF6t9v7POhbhNMRJhTBmRrUXLGAhExc5pEqiVd8EaROjaKsZcpqIMde016yxaAUnOmFXdWaBEmZxVge1uRNONh0Au29qPU8JY+u2hXVRV7cI2zHEwZGyIUlmTko6h1G0FZBWEjXNoqtQKSM7g6lYKsbWak0iWnEqBuu3a/oAlB6/ZUD9ONTiLerJsN2Mo6CtTDbtDUGkW+uRpG9/E/q5kARRghRTCSJhcXgn/wS8+u3p9vet3XWtri4dhIFDVonOCx5GDJkhGpIiQlFwKWkOipWhRECSYB/vGsjHMxtDD4yEwmh5IGvKeN4EAAEdBosgsEjnG96kC6MPahuPOr6j4kGQ+P0IKmpEMIrNokSlMY4wx9tN4e31JyHXdDYcpl+Q9rs+efP7554bZWk8CghpLpjCEOO0OByJzsTyNJV/dvUScbXENqqhqloJsiBhQjbXOOFRNZdZvzqM/nu1uswgC6pwH9N1we9ZdzuIzYjZclJilZCQSBQPomGPJx65YwTJ7Z2IavYGPfvRJEAFRAo4h9/F2vX5SJlEobJxBKZJJCwLd3d/shr5uTrabjZWslG73hyFvPzz7eApjf9gsqibs97aAt1aTsjhbmTBMIs5jKyHWzYKdHcd+sVjud2MBvr0biteEOubyF1++Ol83Hz9fygYur65/9oOP/t5ffPFnl5e17U5WS0jx7Rjd1ebZafXoZJ1zHkICa6xdSMmSckbgqkHrh2lcOFIpCcQZzgKpRIe+srZIKTkwVqRaYjbGLKvTcRj73b47r5EIkAFdibmQeN8QlsPtG/cILj76RCm9e/P5H/3yb1UthXHvsjHGadmBVAqZ5u1OBMgQGQAATVKEnH/Y1d7vq99tkg9AzxHNge/QP/2O2/bdDnjcA7/DMvXIGjpuufqAIc1ub9+LQoPvwUqz+zUqHANtvttcH4yRj13s8QLaigwbAmBCRrRIzGRIEYlx1uxIMLnytPClq/NpK31MgFwQQSlbMzAV0DykEosSNcY676qmMcwK4C2RYTbceFdVtnKOjW2rqqrsomlX63W7WjaOKRdMUwTJJSsoE9vKM3Hlja9h0diqdt5WM6hCqpJC1miJgZkJiAohxyIADIBzUz6DywVnxTmUUkSJkSRnFGBApMKEImKMFQUQZjKogFJQBREImYgJmaxTdkIWySEaUFDJVBBUSWYRWYRSAArM4hEtCoDIOENXKiAJQEFmpySFkgAAYTZvxqNm7MgUKCpFJBadiUmSUQohkC2YFYsBK/i+IcSZg1mC4OwJDahoaFaq8ryfOsuOEQ2iI2JQgwQlIzIbmnkHhIR4BNZnkhUgAAqpYiogCUuElKGUI3MKmZBVEaFwyZCjxElikGks/UFCQDWEpGxmfZaKQsmqSSGnolJEiwoWBQHioxZd0gxrElspGdlkBSQ+as4YABjJCTCyA2I1jmyNzhM7YiOgpFpUZwQOj+dAQTIPhtakqloKICHCUR8NAN9jFR3xep1jPechQpEiAgo5Z1BQwCI6c47m6E8AFBUVIDAyz1RpdiBHnIlTx5ElzIJ5fbjnCzDXS2YI0ygAxhpmRMjMYF2dpigZQLNxxFwKSVX7mMri9Dxup9jHHAoz5SLWVWhA4qRZDod9U636/f7q6o6MZ2tvr7fdChaPniM7UfW2MkTY4G5z7SrnBwwhsMG28YUxpni+OL07DNMYtvvJIduKRIpljhkq66YphL73xpaAMUZf+Z/97Idv39zHTZ5isgbHQxCTVMQaN0W1BnCMoOicscbEMZJKzj1SCePga1/VtbUcwuHpk8f7YbJVs1h1w25gMl3X7TdbURxjmcK+O2nIYx+SettIYyBXXb1crG5utnnoc46WTVZdtXUephgiiI5lUlFiLKJk5NGzZ+82V9t+XD1dY9LhvjfeAVPlKxinUgpJrisnBXOcjG/rpvry3eUf/ezjb69+Yz74Qdvw7urqxtsnz9YffPyT/fY322nL3g/TgGNoKvf27dvnn5342u6UioiyPYzyuy++/tlq8eOf/uA3f/7r/WZXLLWrbj9mw1qkgJq5Gs4lWzBsDRGqASJIOc+lz0xKVcSiSkiGWVUM2SJlNmdEUCJ+mIoLAiGRSiYkRWVCQJijAYsWUGPIsiFFYWZEUoFShB+8ig7bLWm4unz77RU8e/LCWAMciJkX7u5qM426aNpDms66Ok7DJx/8ZKuHb2/enrTteP2tlugrZgFnzfX99fVhe/7sY83TiOHk2aN3v/vL09UPh2K+ffftx49P7vPt2adn128vHXWlTItu4VTut/35R48xwP5wd7J+enr66GazzVAPB+x8t+jaumtDSKQlS4k5LnwnGZwvzBhjQSLjDILJEnMOJ90ZkxSY5SoiIhVWREqkRaKxPio1q5NweHt/t6l9h8ZRtXryoy6Om5SUu7ppmnGcnD8xxh/SFaoAcdZSd5VX5ca6lTt98WiZcFfGxz/4aEjp/nB4/sPnv/nN5/e7ULNa77q6OYToTH3++Pk+pMN2F5IDbowoWw37O7ICJhaZ+u19Rc3Vq5fb+/2LFy+4gnpJhwGL0G6cQiiVq+M0soNnH663d2O3atpF89EPPwolPH60HPrdRx+fL8+rm80GiDZXh1cvb//4Fz/YftH+J/+v/+jf/jc/uf7t5z9euJtXv33xJ7/snj2R0ruczl6cHZyJu9Tf3znX1mCai+WQ+pQZCStv+80OTS1F5lJWFRNou14YTxmytUYAc1QChVzMLKOsa0Q4bHprqCQ+e7oaYk9Zqto1TYUE3cIjKhm+3/Qf/uj5btD6bFFZTlebON0k2n/0P/lb53z/Z//47z/+N//Vqn18k6V8db++OBvG0U0GRAXVtPSDnz0eQ7nZ7BcnNfvy7EUrEnxlBEJV8TRNKKapzWE7oMOTs6ZIubvar5ft0/PFYtn5Sp48OX+15z/6V/6t3/6Dv3u/vZ/vAl9ZAOradhgPoUx9OrAxhBbY1HXbDxuUrKjIqIS+q49EVyi2cnOkO6jEFMi4btGN4+RsTRbCME0hNpZSTMZYyBKGKYUJQM4vTvIQYo7DsDlbPVr4xhl76PcK2LTN7tCraFFdNM1+v0M4nKyX1DrD/n67IefI2JiyN0YYUymAlEuJqXRdk3Kecly4prDdjtuqXY5lqInPLk7fvL28uHjkyMQchikykxGKRS5vLp2vD4c+CrSLKoyJENn7McZ+GOrKNV2bUgEFYCRLRqCyVVM1u/3eWcO1Cb1aYwAkqSAioRFSX7mudCVEg1BKaapFwBhKFhUkzXkCJUYWyUUzEKG4Gi0gAVpEYUIRFSi5TERqgJ0BJFIQ45ZSSskJEWIOZIx1lmgiLKerekoFJTPh5ub6k4+efH3zbnfonbeFII6jMVxUc06ETMwlSynJOmuMKyJzHq53DkCaqkppJF8hFBCpnJmmqTRNGvOqXfjTdX+4LxIP29FWVdMsri7fXbgW3Lpt/djfDvs9MXWtG/qBiFWl7wcCRmDjjITx29ff7u633rr+0EfimKI1OITeoWt8pWxzTo5dmJTYL1arFEoYQ9v5cRz7w6at7OH+pl1Vzps0Dtb7tq4MGufhFz/9wX/xZ79+1la1q6dx0hzTkMfD9NWb66fPHpUpAyRrTZyGrm126SjDfJiCk8455zqneDDOSd2zUBrpobPBYxP6PgL9mODz0ALNhGukYywQzRZCc986e/c+1GBH4EdwnqwqPBD3j1yjOcxh9pSZLQvmuemsmJg9gGfqiKiWklUoTSFO036/u7m92273MRVVCDGenTYIfOhvCNlXhAre2ClJiMlYAEAUdc6dXVygoyjp+bMPDqm/eHx29mj1+y9+f3WdPvzxDwDrs4sXV7dXQxqfPX26aM4EIUJ5cfH4zbt3Krherm/vbxC1a9qcQhgHKGqcGcdcOa+5eMtdUyHKOARQOetW+92QIHtrRM0YppOug6S1oXFK1rh62aa83/eDNzbErBAZoPEuxTjuh50qI4JCTNlYY6pKAZQRQZghp6HynWsbLWhKPQ4HVHXMWUVyDFNPRLkUIB5TWS7XNB3GfopYbO0t210fTs/Ony5OrnYDIVWuGvthv0/emw9ePFbe3uyHqFEKEEIphVTf3Gx9U/3go2dxPHl7dYWtkKZ+n0RBABCRmN6rCVm15JRRDagwKqOKLSVJsaVYUACZ1WrEREf11tEp/QEnPHbQKioASkd/aHpgsM0DYDxaE+k8b35vVDp3ZO/bejhMfQ5pmMZhmIxjKRKmCAT7fvvm8g1Ze311qKulsorI4bDJcTp/9iGCvb+5DyF0yzbE8e5+i2Q//Ojx5ubm8vaSANlbZznnEqcMCN55RFRRb60x1jkXc0TiyjfTFJxBa1zKoUghwiJK85Uf79KZCwKgWhsbEYJoQ6xI/Tiptfwgg8EHbIIQK2tkilXDf/KHP63IjttxsWxJYbfbr9dPMGq/u6ra2nCQwiUGyCWU8s3bN93yTEbVELi2KZVD3/u6QZAQ02bYTZBN4n4YumW92w0p5FZjLpmcQWNAIxISGQQsiNVyOaa3RSQlZbYKdBvhL37/+sXT09Pl+vLd25vr7X/w7/5r8P/+e1/cpDfboV0vO9NCne+2cdGG1luJBkSJENmolAIaQkxFiakgonEWqEhARBRU5bnT64e+bry1lLNmyWxNyqkfJhcj69TWHojG/lBSURR07I3fXV1W53T+4udazDevv37y4iMRjBGRSr/f1bxEVUlh3r2ITC4FNZFBIJolLTPx/zu8CB/C+f4lNct3Wy882A69/8wDEvqeUzTjPe+/4girf2dq9L6rfZC1wfcfR8bDESWaiXlIdASo8HtXA1DNwOxD58yEZpY3EbLOCK8wsSgUJud51cBUPKBRpJw0hTzUzjoLNPCYg0JtuWuabrn0VW2MbSpmAlFhJiTjK2PY1t4ummaxXi5WS+ecMchIqoUMG2+YG6O1Md4gGJKq4rY1tqoISVMCZS0sgERWcVZwzLiYEEJRZbRH4TQiIGfNCCzv/YahMMyGM2IZ5zcFIiKDaEgNiWLJc4iXcTUaQ8YhWcE5i0FQBef8q1JIBXKAHKAELQFQcM46owqQgA1I0RmIkDIfboqgDCCoJQHI7CwNeJzjiAKIlpRFZzslKFoy2IIEmIQQWAtEISvKAAxMSMAqWEBKMYQy6w5FiVBQefbjJGQABkHN856KoArlmDwmpChwjJcss1ZRdGY6KpaEedIYpBRQRGsRWfHohw0lQc6Qo8ZQQpAUZttsYkJFIpNzUgKRJAIplSw5y2xZLkDCTAI6Yy3HmkBVRWaoSgGImJnIsKIqsiIDshKhMeQ8+YqNZWOIWJCLHn/OMNOkAVmxQCKeURicE8OlqEqxhlDwfwAVwYM9tYrobMMhRfUIDMEsPCtyTM18YEbjHN5a3ocUKCAqHVcn0jH7U1UKEpSSiRgZqHFsGuMs10Po90ClbZvWUZDojNEIWbOiZeTYD3VTG0NgF4unH9z0X8yReagwjsE1jbF2SmOSTK7y7cm7t7+vq2oIpR+nqqp2+22/O+vWj24vv6rsRVZ0vmoWZ0TadCmrDqGkfSZAEBqnGCSHKXbtIgUghpCmaQzeZVouVm073N+6Zpkl5oxS8n4IfRwq30xDOj1db/ImpyBaVotTHkdinsYhl2C8maZx2Tb7wy4kbRqzrNqr233bgrG+n4amaeN0aLpVnIq1LRu9347WeltxTBMQ3R22Z9WKFPpD/2ixyvFw2I277cFZJoNnZ+1wmFRwO45FChCwAcvEiJagTDlRub1/+8kPP+nH+M23r087v153SNoPcZrS6cnp1fZ+VflYwpgnjbH1LRv3xbfXjx6li5MPry7fnUi2zRmWcZg6f/bk6Yf96zd/6YzdCwxDary5u7r54Y+n9YpvLaYgh8NovVEsv/v17z77yScvPnj05edv9tt0NyRlKoJKWDSzsQJISKmIxGgVKmuQwSrk2WaBkdkctemoRYVm0J+OFl6AmEsk+v7BgXOvODvTi2ZFQGQGQiYglFmujAYJARTfZ94AYJGf/MlnX/7Vt2ftQkN+d18++PDR5bvbe9gmTd6a6/39zRiWzcets5vNrjp/fr5aIR+meEeuWn10vnl1054+jWN5/MFPtKu//vKfcxXS3VcBdsHFR4tHnZ4cNq+v715/+AfPvvz9tYWprp+8fvf2zD+Lu0KxgnFbQSWDiaVdnqw+OnnGuGu7CmI5X9bjdlhU9TREQybFZI1zhg3bSFYJiFiykiUJxbMxFjRqigIq3rEhLCUDIGomsmyYuTFTzVDG/oCUrLhce98+1jHsdrfemDhuBZmtda5lzIBumu6HYdJCacjDpvcm/Po3v318dvLBDx779erv/+PrR8/dZz95sb0v93f7zeUV2MqQ222HrJlbj950iyeVrw83b6bQA8hw2K2fnHcnp9vrO1dWd7f62Q9/qWb89tUXF+kiBLM6PTXEaIMl2u8GYmqXzlcaD/pf/Cf/zb/77/3Pbt/eXb07UC/fvn3XdW0GVMJ25Z8+7gzzomk++fFPn/3s09/83X+8/ea1ubkLv/ndpLFsu+23t/Lhs+1d9qdnj3543g/9fn+N1QUaYrUlxKGPddN6i5kAEYYhMrGzPI3T7mZ4fLFUOfZwScQwxDFYrzmLgqRUSl+mwwRVOj2riyCzZy6g0qyqaQxpzI7o8796+cFHzy6edn/1X/5XT9fP/uB/8b8ad9X0L/67r/7iv/6jP/7Zmy//nKqPP/jFH76FN/4Xfzy92XDbDFdXu7vRj2KoeN+FKZzDWqgYk5FNnqSp293tYRJIlXSNQ+K6cqhlf4jLpa07fPJi+Q//yT//xS9++jf+lec/+9PP/nf/+//jo49/cLir4UsAgJKyapoYVcAaY7ECEUGNOQ/DnogNW1EpMpUsrvKixTB3bTfFMcYkkpEoS/IEqoSiOSRBAZS6rYkck7jKouY0jou2m0Iah5hCZjRW5nBliZpdbXLWlAKpEhsL2g/B+SqlIoqoJCS1rypnQ56aulKFlGPXLcnZmBMRpjCgcevThYbsDX189uLq+mrRdvvDsDsMZ0/OYz+2bX15dUhpe7pYsvPEcjgM7WKhKmnXU8nW2agCiiDSNg0CaEpQNMTU1LVoWa8XOeTdoScmVR23d1ByiUVysraSKYhkNlwI6npxff/1o2UHgCKxqbwMMQpUthGBVDIjW8NZCpFFIC1jyRPzwnJlbdVPO8BsDKhwkRRkqKqVc11lqhiDllhXHiWHKSJyU/k40np9/urt9fYw2sqHoTfGxJD6Q+8ZRLIiIjGKMs3ZSZC1EEEuhVABKElyZCvrARQwdU2LpCSQY6gXrTFcJFxdXVvnpWoWz56M2xscCxHEMlb1arvdEgYFi5hQlAD63VgiKOQYp4VvCSCltNvc7W8vy3BwbCRngGhsx5OJcewaqxmTZjBcooaQF11XGMHazWa/H4tjc9eP7N1m3y+q1tTsDJVIOWvOsYhLCnVXP+lOv3759uc/+siPLoaNAVytusu7zW+/eP3zHz4veUwxxJCWF3XdVA8V0YzPzLCOHM1+tTx4YDyI7meTh5lUD0hHpzydzV0IEYFm2bjqnFh+1GLMrzZzvPFhYA7f9cAACDOzu7yflavC+9C0Y+uLisfB+GxN9NCAPcAHCrlkQE05jNNhSlMsOcSExMSmbdqLi8Vtf5j2o3OMAId+RFAiLLlMSaKm1dMLQ3rYH87sY51gP+yWy8W3b94VCWyohOnN1184Z+4Ow7Onzxbd2clqfb+7d7Z6+fb1/ebWuzZnjSlapsrbm36bY2SGUrIxXNfeOya0j05Prm5vDJMDIBWLKloWVQPA2z4aKlXnUkrnp+u7zV7TcLpwm31vvFl1yyEcppD7vgBiSlLVLqasUlTQ5sSGsmbrF9bTbB3NiIYZjFPJYRzJoGgi62aPd82SNTVtddcPxjUV4nKRdrud5qlq/RDw+v7uo08/czmP20NX1WuQXUq3fTAhV9422aokMKYmF2NBEefc168uCelv/PEfLR9dvP3y85OK7m65H2KMOUuBY87Td6IAmJ0eSsoJwwQExZCiKmrx3tMs5wCjImQM6Cz6UXpv6qvfb3cfWos5KPrBTvi7SL+jCudoHH0s7B/a6t9/8bvDfhimPqWoWtAYzYAAqYSiOY0ZQrbNYrO/FYv91FtvFsvTb1++9NZUzjvL3767ilnbbt0f+qv7S2Jqqy6LCohIMc6oKLNREWZT+YoMIwghNFUVYm6q2hEBaAHCjAjHeg3lSKVDJJl1Z8idXd3G+0K0j0HRmNqFWIiogJJgESEmUCBCQ+wqffrk9MnTJ1IKI5esMeWQx7V/tt+8C9N+vV54xjT1UIrh+vr2dj/ExfJJ0FtiqZp6OMQ05rapvOLt9Q6jtK5iYwig9uZ6GNfLNZHNeds1bdv4cYpsLBsHKABS1VbzZBgyQJimDMDW/Pqbd2dd9z/+G5/c3N5/+fLtk2n6D/6dP/n1727/iz//3T72UhAbv4+YixprPWZgnaaxaepSomHDAADFOS4lESgzhTwZcMzKZBBdySLEKlkEiAlKZoKqdVhS7nduhUULN62jk2F3axiJ0Hr23vT7u1SkO/3g8uU/u7u9fPT4wzxEUDKeU+rD7tLapV2e2qoWAGACMGIMEB1JPN9BkbNMVt/vePg9LOg9Z+E77Oj/P9wzf4GCHC0l5i9F/G796+zbBQ/8hHmfPaaqPfjQwYwrzVeGAIRz1vf8pkcqKLg5JV6LM0CIBAVIEYSQCQWRyKA1rEpqixcoFXQyi25MSpItNbWdxyLUh20GY52rm6qq2sWyrptV47wlIlVjCqCiMcbX1i1P103X+K62zG52FixJAa1r/eKkrbwhpDIaLKayvrLMpEUUtSjlJIAKhATENNcmCIrmyDsVBEFVLABIDKylUMmSJywwpz0gMxMygzWgBGRmMRqgRCpo0KgUtl41IVmd4T9AkswSEVDJISCCoCTVDGlSSSqJGCUjGQarAn5m3QLSUYeFqDrzhARUURUlqhRQVS2ioIRFtGSVWc5VUlHIAFlLme17kNSAAGeMwF6AEY2dpzqISohgCqBKIYOioIbYEjIAzMASIgqoEBCTMhmCgqCgGQooILI9nviSZ6MrzBHSpClqSoCAxs30RiBGVZAMOUrJUHIpokCKzK5SNJCyQIKSSRNolBJzTiUHEAIRJURkNhUygBZWLTCrz2YyT0EF1cTIiJmZkAmNFXRCDsAoMhmP1rG1xMwPtChBBKTjxArmvFSaKWCAAMwqDzfSzKF+AE2PUJEcKVAgRUopZUaFZtSuZMDj9SmAiMDsZCQqkOcxBZKB+Z3n5DhCnhGzIxGQyDCCsvEImcABO2MbjUENIYLz3rkGuCi7rMhV5QFLnowxOQbDLsXsly1X1X63EyhSMiFZYxGJAMM0aSmd8emwG3ZbazwSiGSCXIbw7Zcvf/zTT0ZToyARoUPIVHIO4dDWDjFnAW/tFEIpoqkslqthvwUq/ZCs90CqkPbjdpoKsd30d4joyDS+2by5s4qLVRNdGcNBJVaVKeDGsEMoOQQtual90RJCb7p2GnvvaskJQRvDtbXeGG99KtIt1siESo/Pnt5cXQfatk0HLp2/eHL5ZrOgBUzZZAlTvB0yW1mtFof+kGMRwSns5hseSVGl8RaRJOcQE9UVMS4XdU75q998/kd/+DfxHPrtrXFad75o7vdTP3hMPOXsGmrrJiimnBZt9fhssd/dG3URE1DTlaodQ4yqaNfnT89PX19eXp4ta8lIjDnru5fvPBoEQBHIUBDQN7fvdk193S3b5UkbZMJDyCHnUoylIgIimIuzVuMxUlczKCgZ6wh0BikRQLCIGMNzGZNLBkRGziIAymxFChPNy7hoBtX3huqMdvYiNWwBcdZGzgdSEZkHBe8nCBLlzcurCGPllnm0z58/Q5MgjXk73Ia7Tz94XhNY28h4tWhWznRTOLQyFiz7YVd3Zry96lBkdy8RnZXbu01lChg1Jf/80x/fXYbOTf3mq3dXl2D4/uXw4uLn+7ur89Wjb16X2Lif//IPby5f/fDTD9I0hgQU0pLasLt9/uFFf9fHlPMArqqwchnFOkfMxhoAKFNmZiEUxVLEGQKkWIrJlsghiOZUCJgZQUNKhskbMWR0UqOcc266Soo1iGE/otNFd1rSHgkIzRT3FTSaExMjGe99Crld+O6k08L32+kP/sYfSEo3170b9KJrKp5ur94turM8xsc/ezSxQVWb8fLd5frR2bLyw2E7TneYDyqpWa3X7lFMYxoUSvXssz9YaOz7q5v7rz764Ac5uovnq+HQpxgr752tOjLGU0khT3G9Wp+fvnj57fUXX7/6H/3Jr4aw/XD1wi7qDxqXcz5sD5DT/fX97z//sju/+Py//MuFP/3k7/wrt3/vPxPwWpvmoxcrdxanqV226DyKOIfnj0/R+N3hsM+he+Q8EzvMIIaplMIMxpBbsrFYCaesMQRXVdayCBOhqy2CMBRC7E4XMJq+PjSnPhXNQ9ruhzQVS6oAaSxIOIXps8+evL4ajeOzT3/pF/Xm66vXf/0P/uCP/vXd1bvNFp5++oPtq23/9uXpT54e3l73u7F9uuy6Lk98enH+8q9/Gxdd69eb67CP+fwxXb27r2x9XtnFuq1VYpEp6m5f7CEwpXFKvrUVKjs8X50Nuyzjbvvmyz/99OLr1/vu/MnxSDCmFJmmqAJtU41hMkzMaBmJyFX10B80i7EmxVxCSpLB0XY6pDg4V08xrNrOGA8IMYUxTjV3OWdjbdvUKcbtYct+rTFVlYsiC9/WdXU3DV3X5CxlNs9Q5VklCtQ0XoRUaT8c7MJZonEItXclF+9sTMEYgwg5wbI7XS7q66u32922qduL84ur+6uOuyAFBHLMq/YUsFhmJtrv+jKVWGC9XtbeTMO03fSLRSsRpz7FNO73u2EDSuyXbT8Gg7aujPeua9r73dZaLiWlCKIc49Q1NaAOuykO/WLR7sadc84Y04dbstj4EwEiSFxVRYHYoWFQcZa9d9v+rnLtOI5ka+U5o6ikUoxxpFiCGhf7qWdDoGxtVYoAKnHjyLGi5JBSn1Oy3olkoDyFMIZQd4tN3x+mHg0vl93gzX4ciyRHqJJnJ7cQBmaq2MWS2XLl3RQCQBFSYmAiEkUFa4ymCALdSWessdyGEDUlstC2DZZe1aPjernsGndz8zaHClo1lHM8FCEBZaJcJOZccmlr13qfUu73d/dXb1IOmmLjqxiGYei7rgphTCkumtb5Zhg2lryiJSbLVlRSLsPNze6Q1usnU38I4w4iDhK2YVrSKk0BlbpFG++n6TCS4em+/ODDp//dn/36/PyJdaZqvGhyrIjmty+/PV+tLlbO1x4A+8MuxwdW0WzrAqAix2YE50nYe7ULgep8zMicN6Xfa3aIjnIxKKqCQAoPMM4sZ0N8CEVTnVGnozWRzlLiWZj0vjybr+G7rLT51R8EFd8boD9YT8yRbUjECKIFIUvKElNOKRUgVYFv3nzrnbeiXDtEiCkRw7JrNrtJixpm723nZxMffPLkiUoxbAxRfziYygyH8avffa5FDNH5xePl6mS9PslTGA975/w+xMq5LGXbbySLWrrfbQ/jqEwEigKLRV0zWSnLVRemcf45OzYOgb1bNS5jQSIt0UjFhgrqclHtd72kcn66cKoCyBIvFu2NHMYpK1OREiYgAMuMoIyYYy45yGoJxsQkTW2ZGci4piPSMPSGKIZUN0akLFen9/f3KiWnUFmXU2ayTdOIFgYkxsYbkTztN2vfGCcp5BzTqmmn0EPWrnNxmjJSUkBC7+1hP3lrFlV9++7un/6zv/yjP/jxpx+9GLe3tcVxTMN+KiA55lxSUYxFAMxxrC8SYpwxRygZpGguIBk1IxQtDrwna3NWNmbOu4HvhJHfNdWiggjH+D480pfgwXmLEABJZW6mafbu+D6n4i9//esU0xgGppJTaFY1ouk3B+udqDhnqhrHw+H6zeXyvM1ZF4vV/d07oFS1dW3r16++NtatzlaU9NXrtxVX3jtm3I2jihIiKCKTnfObrDPWzhl8yCZlcWyZyTMniWMQRiSkrICoRYvK0YZmBlad9fdhU7SQkCIysRYFATBAcOSZGCJSRck2Exv9+INnqaRXr1598vw5ac5TtMbtD++mYUcFSTKTLTkZRER9+fqL5fKcBEIeK1cRYhjGrAVB0zjtN7t21UjSIoKMXDCMU/XIhjCqKhpQFmRAwhRDVTkVgJIdyaJiT7QZYpTsmnYK8g9+8/KjF6tF7ZX8F5+/QZFffbT8+ZM/vt7t//z3L0MofRquBgfenLVVlkJMBbQUoZwq9gCz8g5TnmOFjQAygWomUCZRhClMRM6wl6xs2DlKKY7joW69Micorl40KuGwnfrI1tRV3YSc0litFh99/LPt7q7f7mtbFyXVShXDNDLUJUZEVkNcWSCrzO8RHH1wsT4i7fiADj2MgQFm6eSDmfRxM33Q4sL7V5j/vl/h+p4Ed4SH3mNHhPp9ztIDSPSweQLA8TkPrzrfBg+gkj7EqAEwKmNmJkNAM9qggoSMR0WDQCZCkWyQ2JAIzCZBUYBJEgiqLr0RpYhcxqLEhHPsGhhi7+qudr624E1WLALOVJ6prVzrrSf03rFKiikLOl9VVdWenLTVPD/ZQUnWO2NZVUsYgXAGvJiJFIgMIjAhKqqACEIpooXmk0cBsJAqipQSWVVRiAFBEMEwIyoREiOAGFSCjEJAWHIhwlKCYUAoMmOBkiQlmuVjlBGVNKskIIGSEApAUQFFUhBUArIwx7IjIwkAKhKQaiEgJqQCgppBEijP5IFSQAGLQhHJMeecsuock1YUGBFQBKOyAVsVBeCGFEATSGItGQoTgUDRQsjEphxHLiIlCaIAzdpFJAZUJKOgKhl51iYyoKqWGVEERBDFFCAFKUlF0VmdWX5ED7ZaqEjAToAQma1otlAKmcycDDOliJqzIiAQCBMJFoOATERGaWY+qUh6MOlVPKZdFgQByAR2No0nNoQ2ARFZZIdsgZiJ2FhiK4g4S9TYKKE8ZEMJgIIykhy5fiQqdPT2/Y5dd4SKShGZcVAVEc1zeN4slnsgGx0dp0SP/bsCAh0RKEQ6UomQiRAEUWbxGT2o0RRViQFm3qxByc6SoqOuRTKlJAKs6g5AjKXcm2mnqoWZFTSHdNY1aTxsrq+s4RhDVdfIpAhICOJCnKrG391d+7qVksIQhmnofFWKbG7vpvGxr7v9uGvbdVVxvy1xytZQ13K9Wn359VvJrTXVoR/CGKiQSqor9lQnQetdzoWYwhTbrslSGLD23kCJWUtW58zQbyomZV6sVveHQ4nJWweV39zfe2fCmM+WZxRTR5V3TeYpFhlTdlIyZGNZkq5P1l+9fblsT68P9z/+w89eflXHGA/760vYThhVchkSe3/2/Kzf71OCq12PkhtfSS6IysYCiDEmiVZVZQEnAAJ1xiRJY5qQnOby29/95ac//aQ78ZdvrrBo1XWu6cZdrlw9DhtMulwvxsOYY+gsWVuS9rZq+/1U2faw3cObd7i6IDKnZyef/uzj168+R/RxSu16dbeZ/FdXq/WJ0JXxJApxzANPKeqrby5/9jc/qTo/vr5edn4YA7NRVVIsSTQXwkwGERnQhijeYVXP5RznUkA1qzDjTOAHEFSejdIISEFAlI7+W8cDY9ZtAqqIsDEzIR8IFAoCMtn5qFIFJBAp8/0DACFmE+D80eMPnq3+yX/156vzDx+fPd24ZL11J+cYt/vbW1uf3w5pH2KzPGvX1evPv6gWi65ZZuhL1J9+8ON3r7fLs4XqMFxextTvhunjj0+GKTspw/3buj1XZOvNZrtbPXrx6GQ13r3uAE3hfrs7W66NxsvN1xfnL1YnF0DTelHdXe51mp48/qxyC1BMU2RCX3lrDAJVpo4mKnIhJSK0aCzbZEQEgetmkUzSwkRAAIWsSGBE1CxAvlvmMh7u3tRVJWUiK1pyPwWtya8vdrff2MoiNyAiJSIwWU9AORZjiEz1+MOP3r66FQ1vLl+frp/KBH0/vnt5DylffPRsN7zc7bZc11e3N8tq6RfOgLm/uqxdnMbdoj3z69O3b38HqzNfrdNh54h/++t/9OwPXpAeLlanjTvtc3TIh2lChCxzEEtOUQwLKZy9eKTOnj85cd5Nafz691+ddqfvppuLD1Zf//bN02dn7cn6//v/+e8+/dWH9/fDee6IO0g5jWLe3Z6d/51RmykNyuHpz38gCS+/fe0dZOy5Wz1+9BgPoW6068x233tnrTU5yxRCg6hF2toK+BiHpq00zyg6KYCSkmplfcm43+x3+2F9us6KYUiQhFl95WQKm3fbaYLT8zPXTUj46LQxVSliMtr2xx98uKplU/ZVVy1O3Se/zNv/Pp2f2pMlIMh+mg7DtNvHMRpa1p1bPWq50thLvynOt10Ly6b93W/ffvziAim3LcXEjXHIAQBTEprk+ttN3TjDVbU6Wwd48/Xbf/Vf++nt//Orr768mu8Ccqau2qk/6NwBM5WSVIyx1ltvaZG5WKeEmMioCBhDgDW7xdIVyW2zKinGaXLOWmvPmobAhlj6MB7CpFm6xYlEski7u62pXLVoxn6/bBosYpi99fv9bupDt6g1lZQjMrjKP3l8fnq2iNNIhjf3w3Y/+GYBkPrDVNe1MTwOAYQR0jAlb5Zxn27gMI4Fc+9t7SxM/YF97b1RdQpIbHZhn4bkuIBtDJvaeonUNd0wHIjg8aOLw2YIQU66s3u5sWiN4TjFHhGYa2+nkKbDmKMfB7Wn7vLwemE7RIfoq6qVMCDMAggEMjmJMdI1J0goSlIULZaSkci4pRTfuQpIRXOS0TICEjEbsgokCEQWoahoTAEZo4Rle45gVSEjpqKLbnnY3YOKM1xEg5hSyu3tHiI9Ol96lL4oowlj8JWbpgyATIToADHlQggpRucqa44W3aJsiBU0pDgbpjqDbLSufeWttdU0TCmNzgUGG6Yt941VI0CrkwtCinEKd1PbnRpfTyFVlUPBfBisB6tld3uzv7+PY++c966apCCjSG4q0/juvr8hxKIYS2JLJBjGKUddnCxiSkmlPV9fXr9tU490PoR84nzFeXe/GU7ruvLD0K/rVTe1N1eb5VkV43j++LSt3Mu3L//gJx/4mq6vRl+31hEw/vlvf/tv/ekv0XEJyRrX7/sHpEgfBCv6gAEdxQpHv6GZj3H0JzqGieB3sTn4MP37XlcDxw5EYQ5u+G6GrkfDbDjGiUieyydFggcHX5Uyc4jmZktEjhf5vbc40pdKBpj9kRAAShZVDVMMY4AihkmRQo550rEP3ruSMwAsu8UQxpRkdmYSxhcfPF20bRzT+fkZoGTlxerUURmn8fpm40B3h92LDz9YtcuitFqvjdFD6IuUReffvu1B0bPe3V+3vvHM0zB2zo9jJCR2dHGylBgha22MstxrEdVV4xtHYsk5O01xCLmtGwXypirTJAVikWVTpxSB8qru+mE4XZ5mULR5igkE5k0L59QMRABk24YMqhhzQmY0XDUN1xWiWO9KGq336EmCNN3qfrPJMpki7Jqo2jYdsQgWQKuqOvUZyuGw7ZbovJrKJOP7OCiUwxg307g+ObHOb/thexjYmPV6EUP03j97+vT65v6rz7/5+Lwdg6Jx3cK3dYVacsoxTDGVGEvMWQBSUZ5FVlJyBkIIYSpFcsklp1KSyz7naL2zzqkYZSYyyKzzqPihvnkQtgki0bwyjr6932dsyFHBBjM8OpsBHdflsLlDQMI0joOQbr+9BUbLzohhRt8u0tjvxzHISNoaMow49Id22SrCZtiQNwYVyO72N03bGttM0zSO45zqNzsL+co7644nDaLzRIT9MHrPDOycY8TtITETK4EqZAGiuZdFollnV2YjDJztNUAKIOJZt4z5Dgp4ZyQXJkYiqyo5dm2lyK8vDzKpo4rQ9H1vGlc11XAYS1RrlgQu9JMpgZy9311NQU9Puyn3Y+qbbum9P9zvkNA5U8oYct9xd7i969rGWiaiQxhc7YfdwVjrnbEWRUxWIGV2Tgjb1dLVbb6/DyhTyP00nZ002zDtwf7Dv373px+dnJ4294ezv/+bq2V7u/b4Bx+e/tu/+nG7XP2TP/vtu1243t796WcvqsYhFSli2PS7vasUGMcxVtZarlHRomFyRaUyPoQ4S+GGMNZNTa4yCDkVLAoJUonjbsdrnzFpleyitVLkcC+aE0dQSiFu4H61WrWnFyVkdI03nYgjaHzzGKkDPk0xWiIsqiUhoGphYhUFmCGW494ID2v0fTd6bOXgQa9yRHy+cyxCnJ+k33uFh398398a8YFxBEdZLgCofIdNKb6/R74DsmbGAyiozB9/X7DGBARqUC0ighjkedsFLQQzsZRQdQ5wZEZQEgABAwIAQoAaB4faWTprKnDYx5QRSEuehoQaDFRYOwPOW2ZGY0ChMsYgkIoBRCkgQkh1VfmqrZarqluYMlEsUNWWGpoD0YsQiuqkwEyGAQDFED8IfkiLMCkTz8yQnAsqqWguGaQwCOIcuIXMRqQQFkUgMIwEKqTZECEoqUEFOspaWQFJBXOkogiCwKgIlEGKSETGMhO2iFGFQIFVkeGhfZulSKDzTgXHGYmiKkMBKILzb0YZwCEkUJQipWie5fpSioiAiFIpRQmUVSBhAWVBi4gWNZUcVYoBKFKOunER0YRAGEfrPJSMzKUgsCUiIWRLIFmPWnFgaxShIBIAgVDJoAVzhpTmzHtgM+vBZCaKihxBd/aCCPY430EplIvJmeNEYUojgATEaqb8oErJEZCUDMxpqkVACpbEKgIFiQQUCMhZtUyG0DCyA/aKRomRnSoSEzChMbOiBhgNOSQmACRSREKF4+plBTXMgDCLeOa8yFnGjIb/h1BRyiIiqlJKAYSc86yfVwUCLiWrKoLI0alxvhFxRoEJ59TPOVUWZlEnIBytio7jOAIFRGPYGVCQwsRiHBlBICDnK6ugglAv60NKSBTDhICEBlDiNJQsQLQfQyrqREASAaQ4pRiZzRDzdj/YyoFySdkqlVwUMhi+uro5f3pKKeSSxiEbsn3okez9OD1+cvHTz374u7/6vaVWQY3DLKLKbF1bLQ/jUEoBVQ2JlA2yMbjoalF49/XV8mTparc79CmmOYj4sNt3tS+E3Xrx6uoaCUouMU8tL1iwsn4cAjlRLB6RFGNRY8x4GNK2f9KusnII8e3lm9v7W2d86zsRd7LsLlbL/f1+OwwTlWLUsh1TcVXjqjYdDsiwWK/vd5uUCxDFUgRBiOrlGQFrGac8OIN1XZcQX335sjt5vDx9fNjdkoVu7aZpU1naTTlOCjv2tpYUAdE3bpzS9X5njL/b3Fhjt/fXT1XY25DL4vHy0QdP3r3apJh39xvjbVlDmA6iGRRSKSkWGJMK7Tbj13/9xreVq/ztXWBLJWrJOq8egZJyICIoSgaKSi40TerBVI4dAzNFTnOdpKKAlHNB5NnngZlQNRchnrNglY2BeWZLdFyliLO8gpmZbS5HHjPDDPOa9wX76mLd70MLze9evglWbvvLbqjIuCmFEV27WIxyP46HumkOKd5eX5+eLJqTC4YsbAyvgOKbm2sx9YHCu6tXu9DnHKq22e5TieFkcWZX69dvrt2pefLi2T//3TfeNM8/eHHz5puqtvXpWZay6Xcr9tXq0Vhk2t6cPHH3++vzk2fBVNSucwFrjLdMdCDCOW0wR3GV1yAq2TIV1KJine8Wq5BGyHVTdylyDoERTO26uklpSikBScjJuOb04tMUb4ENWGlqPwYY00bZG3Oa8x7RCoQMUFJEGSxpt1zHIR+uD4hvEemwOdzd9598uGgWp6+HV89//vHhav/f/JP//j/8T/9v//7/9N97cfboueeYhh//6rPPf/Pq5Pn5esXX1/cxwO7+vmlPcrZh1PXpuYsx7icxtnu0fv3l3e3mGxD/YdV2ywaoSMwhDyFiknJ21lhXbi8vv/mrvx4PL+5v+6fPH3/6409Shtvr8Zzp/Nkptfz68q46WXzyhz/5y7/7f/nBj3/S++F2uj77238ybO7C539tn/8wR03O3L27MbauKlx/eL4f66pZvLm5EUOV2mESZigihyEgmbbpGHGMOQtM09TU7FiTKKrklBhcTsLOTmPQgrbyF+tKkIHRth5yMUDb6+1q2SxsJ0Y+f7P/+Fl9++o2U/3x02UqMcTMbEfj323vTv74j1DMb778pz/8O38YGzdxUU3tRass/nTlT+Hy/lDY3ey3pc+NWZyft3EIoClq/PSTp30/Xd/ef/Dx8vJ1D9I8e2ZNy2OKq0V7+SY6Ms2i++LLuzcvb37/De6+vHx96y438XgWZB0OQ4qJCVMuyNA0XQ4JVdLU55iLZMMGiUKY6rZer9eb21sCUZWYhtqukGixXJecRCXEcRjvmnZRe5YYkbhq6neXl2fLVb1oEsAwTURGsjJBmEbrMyCYyhRQYF513RjGFNKrl6/7cVovFsOUiJ3xTFZtbXzyOWe01C5qQpymsW5qQG9O+P7++uLiNE1TP/VN2zx+/uTy8j4HyVGnHLpl61pT102KcQypdr7pWkFKKZycrPbDVglcZ9cXzRh3dW0sWxVgdjGUtqtynErKz1+cb/b9UFJfNuxsu2qq2oxjn0Wsc3E4KJKvm7jvWbWEyVVGUTUmAQwxqWIcQlYIUx90sMYatNaaytkpZ0Ks6nYz3BsgYzwyxDEaMobI2EZKRkN9GJKyQz+NuaiM49TUFSikFA+xV6Nc2WZRb9/egBDko7ngMPSVtalkb62okjUxhrpq5kRsUGUkyaWwEnGJKTN3TWMrt1wsJEXJ5KxJGqaQJ5neDcPzjz/wMnbtqj/EdrlCoLvdXbdebXabplECs98dSkEpCfO4GTZx2KPoom1zyuN4EJVpFCKDRPuxTyGR4dvN/tHFuSjs9wf0vhTp+wEVuTFjiX1ObRpjib6ux32PYSxqhvt7e7JsWj/0AxKTsW/e3Z2e1v1m+6tf/vg//y//0Qfni7arxn01TWk69B//7ONvv373u2/e/vCDi36/W5y45Un7gN3AnC+G39mbokhBgvc+QbM7XtH80Ik/zKXh2AsDwPyfc3onIBKSzscY4QMX6Wj7+NAnyZGQfyy+HnJ74L0vEsy5KDM3hN5rN2ZSkgq8DwnSooplTh8RECRAViSdHTOL5JQRma0mUW9sSBl0ThpAUbDOsam243ixWpVc+jEsulVRffXt683tDZRYL1tfn7x49Ph+s+vWay3ldrN98+7ti8cvbt5exmHMaWBvF1W9XC8MW5DkGJ3osmvvN7ctwT72p2enJ8vuq1dvjfCjhT9frbe720VbSywFpKvskGJR0TieL9s+Q1O3Jefzx2cv345dUzvDOUxGYb1st9t+iMlal1MuRQgwptT4ZhoiLhsCrJxnMt46X3m0VpKp6qqPB2utd+2YJ5XS1YtecyloAQyZWFLXrSpv315et+3aWQMkl9db2h2Wq9Xd/tAuFmk8PF6th6/ejSG/u9wYA1VVF4EpxJzjYtX0w/T68vXJYjWNYyjN2dnZ3e1VmiZL4J0pjI60VCJFQ0yplCwgUkpRQJBSkqqUwpxLTmma4uSd93Vb2+Scr5xzxjpmVQVkxmOZf4QOH5hw33W+M7YCR7UbgB6JaqoFZhWb6ntWkW/ayjch70IavbdsKJWyWp1KUuJSUrq9vsqo3rm6XoYSrt9enl2sIBNyGQ9bFVl2C0Vhg0VJc2EkQDTGgjCy9c5ba4gwhuKcs87OzmCVdc4ZZwwixZjnHqYzBkueICtIDwUQmEhSUUQkDDkzABEZ4qzShxhLstY4tHmavGUtCrl4a8hZAmWE/rD94vr64vRCPn2RpoFhWRl/PW0aMgYhTwlLrJtqyumvXn61OHuGWFRLtzit66UWLRKsNSL5dnOPQJRVQuIFkoEh9qqFwYiirbx1duynkKZ23c3CUsnC7M7PHv32260QWEO1p5VBaO2d1v/067sG6ZeMTy+6+6ncHfLX2/7PvvrivKG25o+fPnq+aPZ9uNvuTrnxVSuQRQHIFNCiGrIQqa1syhGO3BcKubSLRoDY+pBiLsJFnDVSCipZNFnLNEakQ/24TlmATb1YMKRx6lPOxtaNqccI4zAsTlbqoRRWJQ2j4woI1bIaQ8apQZl7vpIAQY4Cye9x1eD9R/hAW9DZUx1mVtDRRkgfSEQPy/m9NPcB6PluWR8t3h4QfTzqoWbI4eGdZxHmw/s96NGOC35+VZ3t3vThOucmWYgMSkZEnjkPigqCZEUEqQAQGaNAhCxQZpoUzecEIiF4ywWUDSfEyITWJJj5nkkSTiNZ0FyKL4WsdcYbtsJs2BFZoNn4JllvbG2btrJNQ7OGhIipPg7apaBGBCxFVZnYMjOBgBZEFBFAVmJGFElSRERZSYoCCB2ZWmIYVYSNQQAmo6pEfLQoAiEABEEgVWW2qkpAIoAFIUZSBVCRgmQYABlVCkjUQmoMkFMBRoNYAEiVREGl4MPZePyBl6SgqmV2+ZmltapFpcCDc5BKLjmnklORXErJOUsRKUVRpQiIYgYmyKqc2YHiRCoFhJGyFJjDHgRUBJBBFIFER0WKs4TcFOMrLaKECqKiMzEIRHAm0ACCFCwRS9SUtGQAmAm8yjPB82gDp2CAUJGVCJBEZ8GXKBfkiASGAA0YjznFEggD51xKYpE5+0wQtUgB1NnXyBgGYiUjaMAyOi+EYGrhSskKMhArEBiXEAwbNIzGCRGxASbAo6oNAYhJtJRSCBlwri8I5j9whHYJCL+7CwAAIKVcRBWwZAGdZzM6Q0dz8QIP866ju7zIPPJCVGPMrIbEY+9NCHpkE8G8jSsR0awUBARRwEyIOWfJQsY6XzvrZ4Fpd7quay79Pm5Rib13hEVLYsYUw353GA4TsmVX5RANuoxDLrFZLDe7yflmGhMQpJRJNKQsJeeS77eH6mTtXKc55lg0l7b21lXvbspmMz29eP7Zj9K7d9cu22XbbPdT1XhCiClIiczmfH1e5vvP2hyn+839arFYr5fU+LE/rFcnLS9iLlmwrvxq0X379vLQ7zpnQ4TaVQaz55xVq4oyJDZIkE9Omn47LZpztrAp25ZqRlw07bvNfRj7zlUhRm4bmbTf90Zku91534T7YZqyQZjCOFja9tOiMp5sCL3zJmVisgqlaBKArNrHQ9u0jUCMMWusvCv9sI2vq6776IPH33z7hYa6Yrnbvq277nazb1LwxviKmdUBlYTTYeyWZkyH5elZPw7D/dWj0x+M+9jyycX588s3Q5KpM9Zb3m/uzz446xbNOG5AgQ3OSX4icn+7PbfnzlVaJiQuGhVBj3DiMQCkSBnC6IzJBfJMMozZO+LWGWuMBQwxSQEQnlF/BNAydzbzUsNZdatATGwMzAQ2YgRAZmbOmkiVmRVkFm/ODMz3qTfouaKld80XX37NZJdP65ubL59+/EdfffP1NPWvwt3paWMNrM+6q3dTQxj7G4SuW61u3920i1VrWh1zRr29er0/9GLJLqt+f7B+pc6Wqr6dxp6HPGY/rOtuZTS/evW5M7Do7PWbL8HVYZqGAdDgetHupl3/pl+06yxTwiI6CrmciyHKudTItqqMqXKcrDMhH8W91nAKiZkMO0n5+vLbR4+eLRen0EIRiVNEnYgQpDirgEDoNdVOO9v4w2GqjSlpjGFo3Itm8eFm94bogKhsk2SVHDIpkcsS68qB4dXzx/WJBYDrb6/Wa0l92N7569dXz158+r/5X/9vX/3m9ZvX1+sl7m92Xw4v4/3U8zDunfXL5oSU6KR7Esb4+9dXALhu1hiv77/89l3IJxePlu4csBtCbGpYrRb7/T6PcnJ2QZiUhnf95u5N+NNf/cnXbzc/+PlPd/c3t9c3627x+NFZ2yw//80XT16cxAz3b4c/+3/802f5UStVfLS4+eY6Pz9//NOfI4LefgOenvzip5e/v00HVE2pj84TeBRvHj1bqY+llEXX9sPkvVVBFRCgOIE1olIkmzjlmIOv66applAM2rifdvfb+7vdD374CQgBowoYILJUUm6WlasxpVAb+vzrLz578ctHH52/erW9fTdpks+/+frHP/pQDSx/dAYh1Ytlu/X5rH396na58HXnvSPj/DQmIvG+unh8+vbN3dnji9/9i+uPH3eh75PQ08cuZNmH+Pz5er3w3077tfH71zfubBkPaRd361XjGlwt8O3bu7NF+/Wq+uJf3EeFs4tT+CsAAGdtyYW89cYNc0ASwpRGC2a5WE4BCOGwP9SLuvI2jGMsKccMaEjE2ibFHPMAGUS1kHhbV4YgiqtcVbW397tshkdna00JDa0X7c3VHahta2MYE6Ezatn0JQ3jBIrG+CxgfRVjqqqmblpX583t3bPHT/7y87/+6MMPioCI2gLsECQ1rRtDjmn0VVfXdX/Yq0qOMmAwZkNahj6Ro6qqShJrrJSSY2ybJk1BQNg6a7hIOIRxYSsFHca43+6WXQNOxiEYX93t7xfdxzkOOcrd/c425uNPnrz85joHyD4zAQCxqV3jSyEDlXWOddAcl111GEYAWjb1NoRcwFtvPKWcF22b8tKgG4b7nMtEQRViHAvgous0w5STs6bC0zgenOlmOuqQoyFjodGY+uleyuStAyjb/j6EvnLOVhYVKwey9Lv9VHREQxqlrmopBRFFSylCZMhw0bk+gdkrt5QCIFDQGM6lTCl6b6LIYrlAJMtMVEzgfoxA+le/ffvoIj95DMvzVZpiynnRdN52noQQShw0psN245njdICSThZdmHo2mmIEndaLR9v9thQkgZxzSqV2PucBwIaQpmHqLINofxgU+Wy1GIe8WrSr5TJOMU5JYhKBdlllLYchuBorBGsWy/Xi+vrKPz3Zb7ePPnj24ePz1y/fPn52HmPoui6mMWz3pyfrv/jL33z45O+0y0V/t3Gth4d+5mGE/Z2PCwIe6UNHOf9DYzNXlFpm40k9SingYQI+40YoooJHP2wVVDzaGsF7EQUgIh2DPo+Jn6ozXDUHn80BJSo413NH+YXMTPj5S1RRQY4TWhBmAkjjOI7DKEVyFlFIKaeQiFC1DIeRmKYsTmF9elJ5O4xvAOBkvZr6MY9ZF2dtu1yfnyCA0fLu9dtpPy6Xi7ZbffDhs+3Nvmmai0ePLt+82d1vpJ/KmEoaLcrpqgLR1dljg7rdDx+9eLS927aOS46fvngMgMZo4ynlaUrBerdetWRg0darpt3nndb+cjsAUsj5rKrPztvXr95Z4spRZWHZdo1zlkWMtRmMoZOTTkpOMhug0pRzZ1xKKXNerNqssWhBUPaO2LDhSaRetHHcG4KSo6sYAqzWp4fhCsGXOBlyxris0C3W5xnRdjlsc0mr5cJbnoZdxdVmOyJRn8Pzj87fvNnebfZRMCk4a8+6+n6zi2laLivJmvK43Y9fvgs/++jFo7PzN2++lZxTAZrJ/1IsK9e2COYsqpSLFJGjDFGTlJJikcICOeWUcvBV5euQrLfOW2uNdcZaZUuIygxIRMcA8rmlFpiz1N+36wpKMx6qKoj8XtrzvnV7fH726s2NdfjZD/+oqpsQJCl55+/v3+7711MapxSITN2srPd3l/dd1Ti7GtPh7mZDbE/Wi7Z237x8NYx9VXcgqgUq55kIAciwM66kBIZaR94yAJYsIYTK1ouuKSU4a0FFSqmN8cZITpILGheKyMyaz4mYclFCRFIpeQZiRSGWQorWChnyxhTNipBRDaFxvD30a+mA6XbYXW/fLpomjjvnCMrk/IkpkTACFmD8+uWbcZDF0pJByrxoTpzxOe2QoFuuxuEQp6DCwzgq5EKSSk6HsnCLmnhboPGd8/725r7kVC1TbRrJmQFZ5HRZ/fCTx//ZP/1rj3hxunx7e/+3f/bJf/7nX5am+4e/f/303P3hD+pPNqh9v3i+2m3NcDe+u99Khk+fNU/WTQ6DhU5yKqqIJpWMGQiI0cysEDamHw8rb5MkgAKECIZRoQwpQO2NcZ6Iyziw95pzjAngYLZoFgpmERBM3ZmSh+GaSE1NDlhCnPY9m4psTbZWKEguhqG2XimoWwCzIs12+zij4Q9AOOgRDvoeZef7j6Nn1/dgmgdHI/2XsJsHqElnphE8RCTDkQp6BEBBBY/Mz/Ldc5COohh4UNjIexbRTEXChz35AdqSAnCURhEqSAG0DCA678JCwPNVqCZCIDREJABMZIooK3tUi1ZInEkBNJuxqGHDBEwlTHvUXGUX094yY91SXZNdajLgChSj6AjFGqxrb6sKVfM0Uo6W1PmKjSklgxYtQdKIklAyA1hGRQRlVTHMgE6QCIHUaDElF8KsRUrKSloKsCIiAdvZYlglz8gdEgEIaUHJgDw7BIDOWBfOpmmYg5Zxtr5+7z0EklAzCJqcQbMgIyIwqLKAohZimKmMIDK7WKtknD39RFVnkyAQKZozAKoUzUVEVDELpCySNRfNGbLM9jhFVQWSoJIVINEiiJyhKEIhTiAqQsiliAoAWymkpRCDIoJxSoAqgirgREkIlUFBEAAFETKJxVRQMkiE4/VkZIs86xnnkJeHdQgABEB0tAdCBBEsGWdNMZDaGrkC23JJWCWNCVPg0GvqcxpLzCBJJCsIGkR27CuwFtgLmkIsxpKxQk5MJURCDOwFUIGJLRrPrkLr2BhiQ/M1qKqqsYaJBNmYGUH9jg6tAPw9Hej7b+TBqwiOMrhZglZkNiGaCYDz4EtpztfA4/2JhHg0vzJIcAxjgCO+No/Ijqmcs6u2qmhBpFKKoli2CEjGkKX29IxEUIJzvF41mPtNmJCd86SUkub97v40y/Z6u7vZ5zTvP6xYYulLCggwDoOrq+EQkEvKSZEJMIyjYakclzSG/YGbtiTp6how5Zxjjl1VSZKbq3dPn50ctocy9obg7PTEGByng7Vm1TYiRAgCIZasQofdUDd1xfVNvFxX0lqKKT46bV+/fvPi+Wf7/jAeRlNEDun07Ow+H1Ia2saAapomkXxyur7bbRkMGUwybe/fOVO3datgwnQYxxvvyCtOY3r29DkY2MDd2elqe7WvqWnrjqRUlp3hkKBqTu62hycnbSwgErt2cb3ZpSkuWm8Ic4xGJfR961smG8KeDBMkEuyM2bx7e6PhYnWyv+8dVVgaNNZZ510z7nfNyVIhaRaJysQL317fv3r6/CLlfHd1d/bhx5YdKuQBcoiAMoTkaxeSlphPF9XdJcQi1tspJsmlbquQcOyLRNWcCMlbTrkgQpF8HGolRQHmmbGppZSoajxJhjQl6y0gOGukSE6CR6N0JSJjnEhSJCYWEcOOGAAFZ9YNAPEcCQkIYL/LSuOH2TCICD0koI37Q57S/eVr9rDd3eW78lG3fvPV5znHNJUxHRYn5yjm/nYMOTAkAJpSwH3uQ98sVmmS/W4/yp3rXOXbZV3tti8Xw/SDp8/QjDn1r774nbjm4tEFF1ky5cOhItes1/uwG/M0xaldtn1IYcjgzKiHj86eXb59w0xJ7DANy26FTFCyt5YBGVmLMGKK2bAVQDLGsFMlJKiMt9bGkq+uX7359uXFk+fL1Vm3bNMQY0QBLTmKqrV15c4guVSmuqkAhdg6Kf3m1q+7RXWyv73znqGqSGkapjBOOSUiPnm0sDVfvXzVHPjs1McdmFY/++mKsDx6fr69H2110sPu6ZN2Ufm4L9P9WDer1cIcwmAJ436v4zjpxNZ+9qOP1mfryy+/evbBkylst4OcP/mAkftDGsdxgjKNEyhWVZuG3tpDKMMf/OoP79717XJNN4dpOvzui7/++JPnlbG7m42I/PTnT0Ps91P41d/6Rfzmav3Bxf/h//x/+vf/nf/l6N6d/PTZ/fXb4e5bJds+Pdt++w7IPfqD890bf3u1Owy7j375k5PzBVKRXEA0xkxoUTGFkESVXbdocoyrtgIARiRjQEFyYRGBUC+c9Y+Wq4s5IAENgqgzPB1CKWKcvb25b9pWif+NX/5hv+1dhx99uMpjTkg//vgFiH71xfXJk5NnT5uXb+9O1nWM8ey0W6zqFGMOeRrl5Ve3H3xyPt6nm1fXjz5up/7w7PnKsNbL8/0uv3t3CEO+242fnK+mnBaVe3beff3NdY22qZUql3I+XI1Df3jx4cX//T/95x//ya+uSv713/urVO6PR4VkZ00IOabADGOZuBhkZuL9sEsAra+qtkIUIEQgymjZ3e02Tx+fhjAhGMSq84s3d9do0XlJJZyvzw7DNoTJmMqwc84dpoCi/WEkcr5qi+Rx6EVEhEBIAbx1Ywzvrt+B0mLRMltf0c3tjSI4695dX5+fXQCCq6oQoq/rfrebp8pERiXu7neSi69s3TrkYXfo62TZOtaEVnORw2aonHOO5pqSDJVcpMg4RN+Y09VZSEWSVt503cJ5K0WnEM9PVye6FJ3qzvPEIhoOSdKwbFtpC0gxzlkpzpv+sAMENCRQjjQU8mglTMEooSKplhLnOVvKsSgUSWiYhIgMEJSkYRiZoKmqQGk6BMZGAfuwL1gcVUW0oWqY9iUM47hbtAsDOo73tffaaL8f2kWzaJ23NORsjBnHAUmtoUyoitYYBADCkBIhZs0q4q3LKeZSrOG5sCilkEhOsR/RWnZtrSKQxbgKSC5fXdbOkyFB7fvD+vG67pr9vi+AMQYGmMIYD4OBsvLAOYehXy5WYRjDOJWciWFVN8O0TTlolmkYnSXj3X44TKm/2b0zAK6yjlFQFaBAOukWw/XOI8TDLrhqOEzLpSeF26sbLc3g8+MXjbFu6Mdm0SzX3fXlu9Xpent98/Enj//sH/9lE8+ati4lWKS7q8362SMx/h/8k3/6t//kJ+wsWXq4C+Y+AFX0QTX20I6AzHNbOKq8VKTM7bgezTiO0jURmWul7yX9zJNvUVUFmQkgqkd60QMCBXPGyYwdwXEygkcDornnAoH3PdFcp70318CZWg9zDomIxBSHoT/sD4jovI2pkIpxJqcESGSIiQzxYtWC6m6zr2vfLVuRHMfcNovV+kKRnTXh0N/ebQzr42eP1+sTJO770LQ1kN7dXm2u32HOzx4/8pw5jwsqj06X3rbbfusren7x6M3rTaPYLmrJpqnd3Xb79Pz0dLG8229PFm1l3ePT9faw7xbN4TA0lR9DilkAsGl8VVfG2drZirzm5K2prSEFktL5Oh1SzBm0vDhb3m33gwpaJDbWoDWEYogJBXPKc9I8AjAQsyt5cq7OkihNq5N1H3tibZtujCIABOptNaZJsLp4+qO3h/HZ8+ff/P4v6sobA662tVuML6+HPg4xgyVfuVXX3G8PMUukqFC/ePp0e3erSSyhIbaG0yRfvXz3/KQ9P1nGsYecEYshTgoImrOKYv7/kfUnwZZtW3omNIo55yp3dSo/Xtz61fEiXhRPCmWEQEaKTDPMMIwEZGSSBgktOmQDM4wWDWhAmz5NOhgNDMNSJBhKUwokpJAU1Qu9iFfd0q+Xp9zFquacYwwaax+/NxPv+LnXzzl7+/G15przH////c5ENajZsYbYVGawfpask0qOSXLIcUpTEXwoysKHEIJ3PrhQOOfYeTxG0h56g2a+NRxpWQ/X4sMJfDZlfJt+BQAAi2pBevXJh99Z1s0wqeW82Zx0/R4YpjTtu/3MPXWO+8O+aSubdHd/O+VJAeqqZee/ev61qJxuTvpxUjMVRYPCewAiz56dI6emoQiOAJFGnZqyKkNhoE1dmcI4jm1Zl0SA0vVTICPmQJxVVYUBAMETMjFani14ohmQTBCZ8KFkxysllWSqAGOMCBA1b9pquNu/evli+cn7adjtRT04RkxxSoELwu3+7uXrLxbr91FNMzhXF0WpEvth8GVrSkOcuqEvuNjvO2MyxcN2DLVvl/Wuvw9FIOZD32+3XV03oBpKr5KZ3GIZWps2kH73vcvfvLx6ebcLnv/xn3+ORD7ABMW//MXrqY8//OjJNMmfvn2tistNWVC1Kh0YjuNYF83cf5glE0BMkrMisgqgHHURRw7ZBxeUCkMCwL7vC9dM05imjlrPRemQYhqnnEOglNL+9qZWbDfBhTpPwEVTQ4w5wzgQhdKH2I1QeGbPvvWlA2RyzgXH3swm0GCzQ+CoBMCDEP4N/eed9INzkhffHR3xWxjrB+fRu18PzqFvbEX47sN3K/RRWjomX44B4ofKG0RFQHi3Itsxj/YQRgN7sBR965VpfsOqggqmCMKIimCWCZkxIBKCU1MEoOP7J0AFyTxXzyl4IjaqkRZAg1oyADRmNzOLMuiUJ0PiwCIoSXJ240gz4qUoSsdUkNecc8ogqnEKCK7y85RCNGnsTUaQniyyQzI8FhRInnOZxATICGSKREws5pKmhKgqhuRMAY6IY0MgZAcz4hsFwRBtrkrAI6ucEBwggSbIooTAMzabcNZWTEwyghohCIJGQEJCy4peyTwgYIYZEwQPihGoqMm7f4I5lobHrtB5lgWmpoh6JCmDKEbBrJZVUPMcexJQJ2IW2RWGSI6NAJAF8FgLiqRzmsqAkZDAEEMQxwTOjFmjCDA6JkAkIwQjNBN0QqpgYnlUUzQ7RquQj0G1nFEUUQFIDcE5YzFBmHsHcrQULSeY81rmBZ3M7w29oAA60AwWUZNhBmY0I3DsCV2FZQ2hAAoKThERGTkAOkVnrhAEBadgCMzkiZwjZueMeHb26DtzBDMz83F3MXd02IMWdHxUzPygd5LXNwG0/I1WdNx1qCoiEJDYw2wDgJCNFACZHSA4QsLZWc34oE3N5itmmtvQEEzNjkLSLDGTV2BGCEXpHKMMMo11VRYhMPB46KeuNw3eFYdhcsFSBkTudt2h69OYy9LFmAC5KOvdNjKEOGoSMcsx98OwK6oTP1niyWEOxGN/mO5uA/icx6puXB1KlHF/ONksb262U0y3d7uzx6dFASa0mwSdVY0+e3K6fbXtp3jY7denJ8si/OaLNynbx48/evPq8ylPZbGuq/LL6zvfu+Cb/X6nllNOIbiiLI2oKAqV4yayWTSHXR+Kin0vCTMk8rpZLXd3h1AW6IWSls5j4OCxXhT7/dW2i+QwRHeYMpl8/eWXTzbrcRr2pjP7s3DYDQdwPqWJJ2IEZBLJ8zEXTDxVlCcqtK4qMJRkw9izg7Zq9rsRQk2hAtCLR+evX78KTNt+WLb1MI6OYN2cTocbMBrGFHixvd6lUe+vbqbDwYU6LFabR+fub6hum1c31wS0Wld3b28DFqhKQGLgnOu6KZQpid3c3S1OTm/3w9QPomqzA0iNjusHgqhIL6AYSiRS1SSATJYhqyAjAprxDCkSFaZZfMzk3GzHR0fAoKiO0BCJkJkNUdWQ5iikZ3ZmKpYNTdU8ExJ/y9gfHZpXrVent0N/8uiTm9sXjXMNBaHihxc/evHLT3PN0/ZthHRx9ujJxfkXX94T8ub0EjBMOpWLynpty/LF/urq/u13Lx9hiF755u2uWSG7doRQt5vh6kZS2pxt9oft/ZaGtF8s65YBFhVPFfRjN2w3y6YpyCHE4XD27PHh/q53y9ZjTLldtLeH28ebhWZOFonZUw3WM3lTI2DmIqZYVIuni4XadPv66sWLL7/87NeFd48enS8XK+dKdlS60sCrKlBThOU4dDlvfcFlWRx2h6F/vTh5UsXTNN0iGrIiU6ir8bBX1fu3L4tFYz28PcTiu0/++pef/d3/6r+V+l40MvrNuuy77le/+IvNJz+WennxyaXkfHsd1+dn+69+9eu/+dX3vvNDNUs51ku37e4G7hziJPzko+/K17fDbsxJpn7anLVQ4GGbQlhIL1evn19envQ9/vrPv3rx+W8++OiDxbotivz9H33w+HL1s3/92eXJ4+vr9OiRffXr62pRb+9v/uH/9f/0P/wf/Hf/W//Bf1R/8uyrfP/0g4/efPFZe7F+9vt/8It/8enqldNF/fpOUtJV/Zi4jgOkHA+7w8X7Z6BqDAgoIuRDPMjC+dhH5y14PHRjWTgHHMeoTva3hxACL5C4Ip8kxbu3+5OTUxmnX/7q84+/9wMOzMEvNxsADFDYIb19cygv2raQzz9/e3mxXJ8sTOjZI357b/0i7257suKsdYddjlNfFrS/6SHQ2fni9Lz61VfXd6/Gx++1Q9LtoZPKN4Sv377+8L3z9aLdj77vJ4eFDfGLX7+s63K86fto7dnCjK7e3g77IdmdLwLm+7/+l/+Xsxb+zRfX810wDdMEERFEBICcrxwwuiKmyXkmUySQMaOj7tA3TVOEkJAuzh/td9uUUlnRolnvDnsmLasGNJchdGPHrnTs+/1kSW53N1VZqAiLolpwvO9HdJ40SxJgY0dMbl1UQ6iGbjBQdrhYLDQpIXl24zhwoDxEzewQpmlkZlJIScUE1ECtqoqsutsPDti7ahwysg3jsCoWDAjEwASBNcv17X0oiqopHRGOM98Vt7v7k+UGUNlBSqnr46pdMOYi0P3VtlmVrmBil/spOO7iCJiWy/Xt7XaKEzJOU6wJc855HDz5qGnsD0VVSwbLqfJBxEAFRcEUmQR8Nw7B+9K7KNmyek/z3D/HuKorCO7uri+C74bRFWE4ZEYT2o/DvQkXvpFsw7izpOwojrEuQhUKFkUHXT82yzNfRtK362Wzu98hGDMDQBIldsyIYMlUTck5lsTMc79MVkXv2RU++BgTpMyE5yfrq9ubuE+LxSIlWZ6fffCj78nQHcZxO06VK/KUdMqx70GzI+u6LQTebwcf/K7fj+PQeu8YppgPOU05BQ4ZwUzIV6JdjBK4ZuApDd6H7jCA58WmoayHwzaNY+2L999/7+f/5suM6emTZ189f7m9vf/w0d/75VdfXjx+RMQAIpLqqrh9veONkzgs1+vN2cn99fak9vtp/+zZ0z/52W9Onl6ePTp/9eL19f10eeKv3zx/dyx50G7wQSECeDh0IJCqgBkSfUsHmkfUMh/SHsbUD8fv44YM0AzoHWDI9KE97XhYMTQEmvHE83Z53sJ/kyLCh/c2/29DO6pXDzrSbHTCnCXFnES7fhhj5MJT8Eg8xWmWqObjnPNORcumQiKRJKIE3G0Hz2FzsX7vg2dq4jwE1Dc3V+PQlU34zg+++/bldQjVar3q+j7KuLu6Q9Fq0TDYfndTOmqWTeHCbnt/cXEePGcRBFssy4vLZtj2aNiWZeWdQ3KZWvTnqyVqrr2b+m6zrg/deLft4pjKIiy8b4pi0bjaUcHQrBZ1SU3D5FxbrquqGBuzKQ2DhALb0jNTzLpqG7Ukam3b5DR6EFc2LhTsA7BzvmDoFbGol2w5JQVBcxQTLFcX6f4KhKY8qExVWUGgRL3xDsh/97vfff36msCyjOjdj3/8/Z//6rO8HWJK07A/P1uVge53w/1hEAGHtNksskia4jD02fuqLOOUd/vDk9OiXbeGNMUxTdEZMoBjE81OQQ0kJ1BQh2ameuz9AYOsCmJxUhUWEXEp5+SDjyE4532YiuCdD8QuM7NzzDP3mueuvmPK7CGXNnuL5qtmpkXMuOj515ubKwfp4uxRU9aHsfMTNsv1OA2+CCmlvuumOJUB66rIU66aMEz9MO6c823bFoW7u74CABd8Pw6SRcGqunFMjrgsi5TNkUsxB4chYJxG511dVsUM4kVrmvJmfw+aNoulN5lyTM5z5TIoggKziVSFjypZ1Qi9Y0MUIFJBIiNTgGhKpuMUS6YM6lxhpqh00a5WwDBNMek4ooqJwbDtm8UqpRE0EQcgff36NUPZlAsCDnVQEV8W+RDZyLkiDbHvRhFqV5vrN6+o8J5c3+1X68VAMOTcNityxd3LrwvfgBmoaRYwU7QM/Pjy4stXV0+fPHl5u7tYnvzsl19sAjeF297sq6rZZ/qnv3ipvvjdT559uu0/vbqZKv7OB0+Ht29iVACCEg9TqsmFUB/utzBTdU1zVslTEQof3DCCd44MhJSdZTBJFhw7KigEM0RiYguuFjG0ETmr6HTYF+ypBVcu06iKAQHiOEnuVquToq5C2ZgvlWtkNIuIgp6ZgxqBKnNhM7XmIQr7DcLtG6/O0cH2jZlhljJhdgLZt4i63/r8bylH79Jr87fHY0rgnTFpXj+P+vsMA54/BjsCr+c6ZDg6ieYvIzQx/C++kAmRm/k+gIbAMxAewPToUzI2ZVQzAYSjgmBKx2+kZsJIIFoClgpBrTcwxZylqioFcOwINTDXZSiDr9saEVSy5Tz1naXMhBZDDqEskgNwgOQJsgKaicLUYRwgDZQjocwgZGKePWZqiOxn0weAqhkig2NkBnaenWaRJCaoOeH8d8rKnvHIKgIzcXOSbx5/zlYjQxIhAxNTZgKH7JC9oQObUAfQNKc+gDyAISiIIhpEQZx/bACz9REMUc3y8d/dMuDsLjqmA81As4iAKGQBiWJZQSynnMWOerqaSALVpGhIU8xEgCkCIhG4gGAgOheSmhAZYMac0EVDJHSOZkBjMGYYMVBCJHBZgcylrOjYnNM8gko2wHkDAEjew3yQBSBVmmc8NgECA4MgEAGygdn8XlVNMljWebXCYIgCqjpfKRlUTFSzATKAsg9ATFVtxcJCBb4AcnOf3THURoToFZ3NtU1zBH6uYZXsjwLbjNRCYsfMZAYqM/R6Fk/Z1FSZ2RAYcO5vhXkn8m2paJbs5qXs4Y6eayltzivOfHIkYkQ1PQpSgERMNG875p2RIjIB0zF1elwrCGd/EtJx0MEmSkSSE2qacgxEDsKiqRk1T5OZb1bnw/310I00THXZytjfX98e+qRAajp2Y7uuoooJFt7f7faStCyYNBcOHSbwgD6r5KyOyedxwDSUJXRdV9VFCE4rAkqnl8tDp4f9IWdftU3uInZ9XdeGzf11NwzZiIqmjZL73X51XkC10Ioz2tPLJz7wNMZN2Sx8HQPd3NxvzhpiTIMV6EVpFHVZIVqzalPqy4KnfleQtss1moSCQoE+EELOmct6VRThzfXrvkMzq+sqBGvq8rA7iEQDrOry/OTs1fUrAlBlyQIAaZSiKR0Hhx5TQgPniqHvg/Oq2RcWIUFSIE5ZeDZGqmbOSej1m7vzy0c5TwF1uWz7Se+3h8LPErsSIwKNUx7iyJ6GfjShnMc47Yu66KauOF+0F+vhettWbZqE2PXj4MjKJgx7SUkcYXDOse+nMQ2JiuSDcwNn0Jlu5dCjmGRhR0Azdk00jogUfMhqMqbCCkAygBCCGjF7VZ0dbPP0VlVDKICAZ5vfvGrh3EADRITuGI9XldnSH5yHozEQ7QGUBwACEOpisVrd7nZJ06G7rjkkwWoRut3+5dufP/noUTfR7X14cvksT3x3M4nYzf3bZ8+evPj6ZuT0nU8eyZuBGcoKd1E74OWySjYQOgt8cKnncNXFR+uNpnFzsbnvdtvd7clJeXvzuvLN0hVxP0777cX58v5+f3d73dSn/dR/9vlXlb9chLO2bkvmLFIWTZ4k5RhKL0kAEJ03gpwy+8L5ikNtIojmXXj64ebZxz+SGLf3bw+7+5vbKwNYrxdIWLiQpjGrEhcQQmF1P9yTs1AFxXTo3/gy5FxqJucJOSNJvarfvPi86589/v6jstlc3V+dXKz/4I9/LyW7upo4wNvfvPzgow0B/Xf+vX/31RfX//rXX//uuqqRyqb+6uuvHz26cISDQrW5MIO6rvtuu327S5HXRfOX/+qv6mUrauV6tVwtkkw3r6/QwvnF4/6+u3z2pGqZFrwb7Ic//V2JGR0r2M/+4ufjR5+cn5w9eu9R2O/Rwfpi4Zj67fg//1/9L+9vX/1n//d/9Dvjj8U13STnf+un28Pr132+/PEPOdFtGC5+xHq9evk3rz740elvvn716OnGlYub60myrjeh79MQ0/lZvW6Dd6YSDajvMhjnrCpw2PVNU61PT9Vkd38IFYEZejp7emYRRN0HH/1wcRKmId+93S82xW4/1lUZI108O+WaLacPPjkPwd1u+243nq+XIxwuH5+UNedJnPfWxWZVpZSa083JpX/15f32bjo5W7ZNy0ELp09Om37bJ7CLZ2sL9Orltmz09FHT7xLVBUahwoVF+2/+9FO7lg8+WJ9dnsF50w368v/9i8PNm3/w3/53/uzP/tl73eNPfwMA4AtOUZp2cegOhQsGOHadqkSZGtcWRTnsO7VctwuzCoiazfLFy5cOXSjKUSApDFPSKASMCnGKTByHIVShqiqssq9LypMRgjkOwaNc3V2nHJumbetVF2/qsswa+8NhngmHAgvv8hRvb+68C6HgNE1jGtp2UVWLN69uM+aVa1fLRX/oiI/NJOMwZLCoNo1TXZbtqs1xNLCiLacUxyHGNLTNugz1bddXbdt1PRcOAgFqSilLIsAsacriGIvCYz8C23K9+PSzz58++fjFq6+bZSCS3X439qQGlmXqr7quJxQugwIaIRVVkrRat29f7UvvDQWQ2DMRiUbBDApRVQ1FZa5szVnNYBwnHwoDjTEnggS5qSrnXYwjgU1pDHWdcgJNSAQG5DCl6Iqiy9htt459YMIshro8PymaXTfE9x5VBdO2Hwrv4hRnUQNUCPA4viTMambK8zuRHIJz5AAsTbFY1XHMu+3IaFOfxjT0u/5k3QoxFeHT5y8bpEePTlzhY8xFwdOhn7pD2/o09oxyc33fdWm5rIlxtWrv7ncguSqb/W6HAFxoTpNniGnoui5nLQtOcSKAtizvb3dlUVpWy9L3U7MoxfjzL16+ubkRTOaGyrv1uv189/LAOVIe+hGRLErZlvVmkefz6r778P1n//xf/dUH5x98+eKzy81ZGzjd788u19vb9s9/9qv/2h//1uriHN5tiY5jbD2GF2w+8MzHmIfxtgEAqMoxAmbHJw/gbCmi2cV6PI2YgamagiIdaUQPZ57jGQpnoUdV5oPObAnRh3iEvcvBffv09MBIegAGgEjOImqoBuM4Df0Yo6iyKUuawwgI3iFCziopg6EkhRJXp4v9rh+7abVepTGvV6vApWRbbRbDME1pqpfLtm4Pu6moqrpdGdPd7hY1oUjbVJvVZnf/FgOcnp6kMaLRo0cXy81qe3fb9V1TFWUdiByC5pSWbbtoF9M0lcHVtSdn/aFDdFUIKcPL290gWga3rot1VXn2eZTzZtEuiphyMFyF4J13Dgmk8lg3C7jHlBN51xae1Q1TdKCTaIE09Qktnm4WPviccxVYNJkJE00aF6v27uZut9tWi5ODHgIEg3uFWJRVSlNJBYmvq8LDqr+7hTKsNuuyqKZputt3kcP3fvTh8Gc/u0vRl8Xb2wOahYIv69X+MKY8bfe5KatlXXVdT6aikXzBjrfbvqnLoqmpCKEQSxOmySRjVgBAVXZs+uCKOO7QwdRmiGgWU9EoUTmnGMm5EPxcPTAx+6LwYXYYeRcCkvNuzozMpWezwQMRHlIkxwikfisLOV9e+OOf/PT89PzQdWXdUslZzTHpFLvtVnNiAEcAMzJ/nCYZJeeyKFDzYbsb+8EVntmBGjeViIaiMFDNycgxY07qOFSB2SOhFr4ERUh50TaMOMXRxvx4cxqKojvckqZVXe8Po6ZcFgWKjSmtm2bbHxhxPqdlVSSe05uMIGqo4BjJsC5c2scCMSWrAn7n0el+e1825e6Qh4QpqSbtttu2qmKa2to5ht3+rpvian25bFcCzA6TJEPp02CEnrA/dMSuIDdO4zj1y7rRoUOZ1svq9fMXm5MPAKwbDmMambQqfdboC5fHNN/4q5OT5Wb1yy/uqlB//er6cl2flp4QrRumcZoCBl/+5vl1Oow/fO+9ksIX+8Nnb26WzI8cIWCM0Qlm5hB8GlJKyXkPaIaQRDMYI/iyEWS1o83QBe9dcXf9an16wi5Mgy0aTlNfNjVWbhgyQ4rTkAWCC0m4pNqVi5wnUjMxgTylqSoCevOLVRQw79jQhcqMJdvcWmQmiAyogGigYAD4Dq58XD8fPELHxe/IkoYHF9BDoOv/TzA6eii/pRcdudQPMvmxcOAbmyU+LK3HHKY9KBA4v9AxADx/wTtzxTvnEwATAcymh/n7KpmaKREhMJiCTogGR/swgSkcbSSomgnAuUJydIjsqHFUO+iTRVBmJ5KRGQHQgA0qX9ZVXZUtBY/Ok+OcckowieVJoDJP5D37wPOwPMWIYJaFTE0ykzECmrL3QKymKIhGCoYAkpKIgBg7j84hMuLsOFHTSU0RUbMhoGNHoGaJEXFOV88umOMohBSBVNEhQLbj3IHRBSMGNEIzSArKx0xSengCylwzf0SGqAISqOADYhkkAx1RNzNRw8xMQMUsm2RLMauYquUUU845pZgtqSpozlklZhHDYtJE6NQiQGJCR5iTElAWPWrjzKKmFCKwIgNB4ZiZCigTSAaaNCEQ5LFwHBN5UnBoTIioKkwMpkyO2M/wZhRhZNBkRyXSjj0C6Oa/DwKqqEo2FdWsmhTRjBSnLCYAWQwMNUeTqCpqBMSIhogcKuEAoVUXzBWIrIY2C52EyAzAADxrqQQgZiISU1Zko4jEwOI9gR1JQQiQ1VTFOSBE7xjn2xXQjraeo8H53fn4wVWUxd4FQ2dNShXM6KFUwJDnmxHJPIWZE2xAiDj7qIlozo4SAqLNNxUhIhoiECEgM7EhP0zIFDRJnCSP6/XCe1it27JuU7wf+1GUh2TdMHWHsSoMg2WZ9tv7LDF4RyBZtQjlOIxjyhicoCKDYnI1pYNZngDFB4DkUGxZtGMc8nQQHwQ9TyYMSc05ISTQYbNaDsOIQL70VY865cDVrhsc+rIpFbUqeXi1KxnrIP3NSxtzaIIhmMX3Li/fvHm73+/YM5A2hUdDT0XO3WZRgkC/6xd1200xp+hNvK829fLzr9+cnC22h3tkKH0w727v74ZdqnxgwJhzN/Q+lCerxeH1zjmqaqfDeDe8XKxDTFrVze1957kouZAsgCAqzhOoOAYicOwFlZyaahIJgeciPlNSEJAUuMrTmMaYyaZxLOum8nmIli15hyn1kveIkchNYyLTahmWi3rXd/Gwh3bZxwSeseQpp5jUNE/DUFZO47heV9uhswQpZkRMOZWlG5J23aFp6r7PSDyJxKyMpJqYkQjmqjua11Z2BgoYAFDU5k4DSSYqwIQApmpMjITAyIDkAAQAeJ6egc7tjgB2/AAt6RjY01wESIREM4sLCfkhgHbYHXixOD3dXO225+tTn6bufiDX5DIkV/SHvmy1j9kVJ1NP/XaMpeMiXC4uXF1kZ1Xg4bAPRdhvr/r762XVvtlu64/fH4aRC7jb377/3pPXd9Tv+n7hy8C//OLX04STq605gxRUfb/PDv2i2bCa7QB5OTg/8P58fXL18vps9X5Zl4ebvSMGyUw4aWYuTBVUmZkdg2azATQgLxHF0iBJM47E6FxYnlyenD3VnNOwv7l9QRCxLDwEciSAzrvSL4Bh2N8S09B3JeokpYyoAOioXqyH/h4pLNvHcsD+9fXX919pSfF+Wl88+dlf/fwnf/QjIg25WrbuX/zVn/7xH/zwurv66d/5bUj5q6+vPrh8r4DgXeVLD0nvb26fPn329vlnVzdfPH7y8bI9D1iUnslNmEtLXK5lirhYnE87efPirvLBe3z56vXm0cl61a4W7atPP4cxUm5++6MfXT5+cn399suvXhtmGGGM6enTR//0579OB/j6+k9//Hsfe50uP37sGne4o6v9YnrZ//SHF88/+2Lx2+c3b3N6ce0p05IfnS+Wy1IRru6nsimAOdRctaEIdHtzQNTVqpjGFMoCkPv7MYTi4uxcs4mIZFk0LQU3dmNRlAYgJOWqJoEXr/cnm/Lrm7vfOf+Qs7z5avfm+vajj09Oq8UUBQEcuoKK+rz0DG1ZXL3aE8B+1+dWTs/qtnW/+eX9dp9cc8GefSBk+Ku/+PM/WP7+1d3d+jIEKoJ3t939pqqbEKCQaYrb2/2qbVcnvhumpLvLdTFASpPs9ttFmaWbTpLef/H6Cznrh2pK5XwXzEU74zSGopj6Qwglo27Wizc3UaN1eYfGiOHuMKF6h7K9u1+3LYjthz4gBheKUKAhjUMoy0MaSx+KUACk2B3IeN8dNhenh8PQDYMRTjE5F5iCB9/v93lMwlU0FEVTqxv2xFmsKop+GFLMrliQC1VdScx3276oAhtDtkN/QIR5N9P3gwoQmKkVweecccp9N9Z1XRX1/f6GMWwW1bTtpn4sy5BF933H7GRCM4xjLKvmbLUSpOAwDgNXBfI4dvHq1e3T84vPPv1FVS1222HsxvVmMY3T2PeLZZNz9N5tVue/+s3fLBebqICWmqbohwOAEVaMMI7btnIMULKjshjjZEDJAMRw3tsSO3RNw6JCiEXhu/2kICmPdVlQ4Q7dCJAcWpTUD9FRoahDv2vLAiUNhy2Sk2ks27BZtzdXrzzLwlPsxtxTW/txFBe8GiQRUynKQkVEhIiCK2Oa1MyIJAszA1Jg5xkc2mbRVGXRLqt20Tx/8/pk1SLRmzf3k+T1Cb7/+FFhdnd975uKMmrtZezKQl6/uY2HUSSPUyyLQtXI9PbmZttNjv0wbnPMnlhyD6iH/cGX1TjqKAZokPLTJ5fXu/3dbvze5TPnU4xpv+9cmYeoSYlQbm9uh3H37Mkl9VHc/Y/+9u9zWR22+9PNuaXIwS2Xbc7JVHPW5my9WBX3/f4nP/rhX//8V9/7zo9+88Wni3W1Wq+vX1x//fL69/7gw3dK0Tc+nqMshKamIAD2DjtNdqzcmje6cHT0zAcRNDw2x9I3Y+pvUQweZuHzSceOXA8zUzR4mLvOp5n5+GTvvsRUEPkoEx19ILOOpGYmImaa5hGmGXtXFEVRVm3b1M1h2qUYs4ocFSxHVVWdnZ4C4N19D2JlUaxPVu1ycXF6IZNUm0YNvn7xYr87tIt1WVb9NK43q/X52de/+SwPh7YIZR0YxxxvSwYuSxQnKbbLtj3ZhOC6w11JVDcFkeShb0NhzBlJZHRobRNCVex2+7ouAGm3l7c3N/1hOl3WJPb+481+u19drNplu92Nq2V96KZhyOfnZ7c39yfr0yl2MnFRwFjiXW9xkrLw56ebL149/+Txs89evRW1bGBAbVNJTnW9ccSmWhZeUxjACFzhipu7V03dMokv6rptD7t75wsFMEuFo2E4+OAX6+XY98Fxkr6s61Uou35ktT/+o5/85rPPn1/3twca+l4nJQDvUFWGLkpKUnjH0JQBQCX1XZfKthiniNwbzpNX8r4AmjteQQTIyGy+DcBmGqsagIVgqpoFsmJWA9CcBVVEEpILyXvnfErsvPNj8N6H4L1XH4jnHLEzRsQjfpXnUmcwRMZ3p+qHC/Ti5Ol3Pv5euyiROKPRaH1MLriUpxhHQ0kpzrqz88X9do+oOeXg/eGwlZSc56auRTUUHtENMYpmA3TOmVlKCdXKxWLZ1tPUVWG5qNvt4T6U1Waz3m13ZlY7d75uuzjG4EBLIjwcekArgusPg2Ooq6KPIxOImSdSBQOrAmdRRGKGrKqMyRRUn5ytX94fmsL3cSJIDFOz2Axvt0O/H4ZV5bApitT36CBDSQBXb67JNauTi6KuFUhi3zhPKXXdoaiKjHJ/uKN6UfrV7d3b/TSchTZrptKbEuZch0pFcxwAZLmpp8MQtEhTZPIuFGIayuJ7Hz755Wdvi5P2ze0b5BDqgCTTPm33MTA+uyj3u/iZHHp6+dOffNT9+eejqhxG5qIqGQ0IaYxQop9iBCA1CYyWIhBpFg0WyqoIraQUVREYXaFRmAIB1e1pikRFWXoDUSBvWCBIcDbGaez3JbvY34msvF/kKSFpsQhTyofdnqwpWlc2DYK6sAqhIvAgRnPXuJutIAJGD/Uyx/VpNhjAN0vZbKV8BymaO89mmeedovSwtD4oRPguzDZ/g4eA7vwiD5LUu0/D/5K8/m5Zn+8tNJ3F9uPR+EEkerdQF97Nd4VmRSBAEFMiN6sdCEpmZhMiIHpEJGRTUFAFRPI2I1YdM0GSVJahUmNJKEqGBJTjqFgUofSOkMiFolgs/WIJ5FSBstmULI+I7MgVvqoLMBRgE8tIiERsjgwJZ2qyuBDQeXUBDSEnVEXJKY6SLEYBACfJic0FggggwMgBczQTYgQTQAUQBjNJRKwPOcIHLxWamoOY1TgwkUdiYFZiQAOLYIkAkYLOE5SZtmeCYEcFDQxUUA1AwBRA50IGMDUBQD0qeaZqHsQgiamBoQFmkRjHlPIYUxaLalPOYCqSRUXExFKa0WxGM7laSYNiUgMlhRngLwCgZMlY0ZHnZKbKiInVYRb1kMVqT4KpCMFIJIE6JCBPCExHdwMYQCacmOeHPM84K0BU0zm8qCowd8fnBKYiSTWZqQCapKRJZ9K4ZDVSnSdFaOCymIEDUI6CZjqOGmYyIesx7EjovCEAk5opoElGFTAAcALATJKQPXrnCMAICEFF5qCYIhgYeY/HFj981485d9fDA0zoG6kIEEDB0I7DLpg/RgRS06M7CJHJISiAMbGaIBgRz3RiQ6N5RoHH8QUf83HIxHCUjvHYzSkTqpiJSfTe5RTLsirrUm3cb69NFUD7/m67OxDTYX9Yruv7q20cR0YhAiaO6WA4hwgh5gnQLKcpS1mGUDaWldSKOqQ+d1NfBcHACcCS5jw4wLIom2Ix9IeiclVZ9YcxR92PKXhXNw17lMHWq8356enPf/WzVd1sh1y7xqObrqa2qouLC3A45Yjo3l7fHIZ91rQ6WRdVsd9tKy4Wi/rwZhvILzen97c3KY8g4J1frtqbbbef+qoquy5VdWs6dYeD9IoCHLFqqkAgXb9eLeKU7u9vLSYynobEAmnMTVFnHfM4LZsaFS0lBOunrijQeciiUSN7lyCSehTwzjMxoRPpFdmH0tCmOEieCFljaleLsY9pGNmRY0tJyVEoF9moqIKiFXVxc7g/9adTTN122F3fnz96XBZV7CbZx/5+8s5PU7zbdk+a9ThlAHaMGMUFGoaJDMycGvT7yTuerwYW8wiARt6pZlOdlWlVY/amlqaMWDjnY4qz5BklOscIhgbs+NhsNlesgiKgGcpcXoBIJkjovFMVQCBmRAKarXcKAKrq2M3lhu+eRqq4vz+MO2VHpceC6tHljG7axSHJyfpifzi0zRoTkuO6CebBB4pDbwOeBG+ih5tDaNqTs8fTISfFboBPX15/fHm5f/kqVIiBWkEFZM5Djvd3txk9Onn1Qh+/98Ht11+L5tOzM1Bij0+fLF+9uCmC2/VT7keTdH13VXzy4+RzknHScdSJQ4gpE2JMU+EKU9E8dIersu7a9dKH2jKoTCKZkOI4KaDMzYnBrTcXU5y6vitc7TlAzlPMwCFQM8qBGYoqiGam5EI47HeSUlGtiAOZbS4fpWkzmUyafvLT3//Tf/YXveoPfvIxUOqGmCDXJ5d/7w//WMf7N89ftOX55ZOn9ScfYNKz8/WrFy+cU09clH5IByNct4/K8nz/6v7l9qp+5D/46Duf/eWrN1/fjgcmWj/96IOuOMQJLNurr67O3lvVrf9Xf/Inh7vd7/3Wj1abVX1SP//s6v/7Z//ie+9/kkf73u+8fzjcFChXt7e3t1/nu7uyiKefnGOMsh/fXL/64KOLDzaru6v7FPTiB2t/gt5DX7Crllef3X91ff3b7XfM6cWZR8JhPzjyRaCYY9NWOUoWdD6YqCRhpa8/e3Fx+Wgc5OSyduwPu0OBMHTj0PVDn1frDVYqIOcXFZD+zk/eg2n0pb7/g+XZsBwO3fX1YTok4mZP+fbN/uxxffP2jYl+8MOnXT+Uy5o8X+37/cAnp8u6HJ9/9jLu++bjJ69f3H340feLogpFx0Kf/eo6kTs/d3dxfPn18N3fW7DX84vN1Rf97eG+PS3AyfK8OGs2b9/sT0/K3N9dPF399/7Dv/+P/1//+peapVhd33w13wUpi2bdb/fAvF7U7DhrPkwdEJRVs9m8d3X9umqKIU6HQwLCsc8A2LRlVRdBQLON45A17sfdpqW6rAK6Qz+SI+/L++t7tMkxGvlFE5yjYeyb5TJntSxl7YvQqgoTlr40NRC9vds1i8ZYQnCGsN/tCJ3l5F1IUV0FqjrkWCBL1rqpRXNKkynWoUEwVRnTROjJKPbj7e2dK50RkFbsAwUp6jBt+/NHp2x+HPu2Lajkvh+ruo5TBKMpT+kg5KgIIQ7TfbbVYpNAS1+ZwZQjMj66PMmSYtIpjVfbfP7kchpy3Zzsbr6A0nlXjnkMuBABKkIXR1YpfBjG0UyXi3VUBkf7/Z2aGM7wB485TlOMMZdV2XUdEo7mgZRckGxT10HOliSBqMnCu0B4c3unSUS1Do5R9/ttXTb3d7syuKbkNMlm1WgVX17dMwJIAjVyDlGRZhdMmhlwnp2BlMGzO5ZxAMB+tz1btaUngPzxR++DTE1VrRbLUdOr1zeff/bFwlftoq7ZKeDhdr+7e1uVkMYpgdxt90XpQ+lDcPv9rusHo1AWlKfRB/ZMc3dvaIpjS5hAVVaL9erF9c2+G9bLsiz57m57e989fXL++ZfP28U6AwDnpnTrk83udn+5OS/PT+qSu/vdcH336PRJL4OnEgmIpC7Lu6tD9/b26eZ8v72Vpt6sN1m6y+X68Pa+WS52Zfn8yxcfvX/x7jihc/XJO0MQwsOR45tDzjz1NlUCmuu6j0qTKiKC0Tz/OHbzzLthfBCJZqY1PEzBj194nLSbHeEGD6mKo7/7we1EOuevyauRgR0nlqCSU0qSsiQxVZmSpJwE1DEV3nt2zDRNg6kyESKcrBdNszDJJhqAnrx/uV4tBdgVPse8uz/4yk272B+6bFaH6v7t1cWzx+TceL/r7+4rLCoMQ451XZLBodsys5msz8+q1ZqDy8MYwDfrtaNiGLdJpa6q/tATsisCk5vGXvLEAFVVvXh9N8QpD/GjR+cG1tYuOHBsRXBVCItFE5yzPFShKotyuWqK0lVV49DFYVgX7LEoiFPSFMdVWyJGAxM1yVqtaiVCQl/4mPKyPbnthvmgtdtvQ1Genr0fY0KAHONysYjjwUSruvJFCaybdnl/c1O1i6popmFUywgp+LI+OUn7w3B9d758xmU6TfH58zdTP3TjRC6oCHjMYEPODZKlGBwDmaqMMVYOh13yM0fDVFDVEiGCEQMAGhIeD7GOjk1lNgM81BslMxEzVc0qYJLFwDTnhOC8Y2YffGLyRXDOh6Jg59kXznvnHCCxC+R4Hv/if0HE1Hc7oh/86JO2qU2yc46ReulEIWXtuojs0EAtjXHc7W99WceUQC2UzTiM0xQdE3sPTGAwTFEtmgmosSs4eJNcFt4TN4V3Th360heKUtdusz4bu6gqlS82j6rKyaHfO8CiqlJK7MlkFlKBmckwIGYAQxDRmh2YOKKMNM2iLtO8axQzM5jUyMgXhfjCBZ9MyHE2QFeMomeL9X5316waj9APaVLenLzXrDZEGrse4li2xcsXX7MrPDjnXCbYLDa3V7eHOCXnuzEVmh6fne/3ew+e1Mc89t1QlHWWXNUeUVMcMOBcnqVZFovV9549/up2+8n7j1++2W3vxuDVZX18skga97vt73783mdfXgOkn//i8x+8d/b2an9AHIbJswvsLdAwjXWOWc37ckqCzFVVas5MlJNUi5ZDkVNm5wCdZCiKMrC/u7ldrJ60i0U3HeoAdVO57LvU5zHVpRPEaYpCu7ZsBEoOjQt1jgqGjpmdV03jcM0huHoZjZmYjFiJGIhmoUgB4KGh8UGsmU0E3+IXHYmh8CDymM4rIX4j3bz79SBV4DsdyI4B3+OSjA95L32wQ3zLfWlHYybAu4KzY7Z3loVmzer44YNl4kEq8qICCsD00DDJR88pZFQAPur3qAjoZidpMkEMZM4MRIWYAMUFLhjrBLXDrCY5qhqAohiaeecJyHlflAV4D+QlppkebKIuuLIsyZFgBhNQ9UUJRASKOtFMh54PO8RIZEjHSoTZlCMoZiKmqoKiqsSASI6ciaKhEaFzoApiAgKgc9+ZyJyoshmrBKAmI4KJCTjHRqgGqmYZDGeuPBEyBgQEZGBVU5rjUuTgqN8ZIAFmFIVjr1kCIJuNsSo2E2ZRNSdUU0mzgDTzQ7LGrCmJDlmiSMpiklUNgJNkNVWELIoCgC6ZCOkUzaGbLVJJVefLDUWMlYTVmzlSywrkKCFNyaqSVTEQT5I9qSfwpIVjYDSHjh0xKQCBgJoRAzAQAc1bhpmARUeZAnDuIFVRM0EkA0MxNQBRkwxZVUVURRERRSyrZQUVVU0Ak+PIZQZmCB7IAQQjR65U0KycGDI5na/urDxbm2GWrPTYaSqJ0GlWVZMZWOU8AfDxJkQwU1MEdDy/bTRA+y9hrWEuZoPZvQg8s2EQAIDBISLMPijCB7A9ELl3OTZERFQ0IJzrqGai2dzRVsz37TwSMVMCYGKTZKChCGriyqpdnqRxnOKdJimaJR5yTlklB8Ihp6pdxj7JlImoLMv9zXVZlhrzJMkzakLLxITdODAasTMDyYbExhjK1pDqRcEO46S1D9qP5EJRFgiQcmzqmq26m25CCDlH52zZLiYau/3uq5vDetnOZjEwN3aJCPtxIEegAJrHPqvZcrmKolMU096DU7X73ZAT95qo3K3P1pLEMnkXdsPh7PwUVcbRDrcdsysKM0iePSFXRUDVlCOqpDSxh5Tl0fnly9dXZOiYRGASadqmPwxADEaaJi58XTUzH0pyzopNWQ9xNHCaE3gEIjT04P1842YxJV8VXR/7/tCuGzNjIE+hZJgSEOGYxsZj8CwpieTQLLshVwbB1XfXh8MhFusSGIrKn5wv7+7GmGy/15cvd23pU5rqxiXNlqEoQuD6/nCIyDnPJlNCQucJncY0zEj+B3M+ynGHDWDZdBShsvRKBEgBvUieMVtEyOxVTQEZEcgMwDkPQGLi2B1jzUg+gIF59gaGoEmVkQhncCwCoom8m/K+/+F7d6/vdttYNDxuOxDXtptQrm7vdlXIw34KKNN2++rV1Xc//l4cE2a7v7pfLav97SvS8Ns//KN//mf/RABuhvTJb/3088++qON+UfhmVYwNILq25K+f//Vy+Uw0NIt6HS8yqfSj0y4drpzjAvzrN1thOVktJcUcu9Pw9PTMvfz8q5OTj5dVOfXbNMWqrlQWoMRkMU0+lOwDABJrlqE7XOXUOS79+gN03hXMkoEYvcVpkDQKIpq50JJvGfIwjALKnp2VSFiGBprTQ9wWVTUe7lLcFbgoCxqnQafJoeUp7fdTWD9un16E08Xd/vri6Uldnz663PzNr35xsjq/33X7IX751VcfPz39+//1/ybL+PXbN75cNqFNeSTny5pNdLmoDqNsLjZvv+r7od88Xrcnixd3Lw7XewW5/O7jn/zOD/4///Cv0vnoS1F2jsJ5eYaUX355/Yc//VsygquKP//zv/7+jz8qV6e/9fSj06L67Jcv3zy/HeN+fbm43+//x/+z//Bv/vGfbK8P/4f//T/8n/7H//7ybFGNMHZd4qHZLN7cdY9WwaH+4i/+4iff++nNLRSb4ruP1vf9sD4rhm50xFEh9tGV9fV1t140jp1jRLY3L66XTdP36el7z8qG9nf7/Ru9u71aXzSf//LLJ4+fsPdkedlWP/vrz58+PtttJ816+nh5/eUViAeuBoni7PLJ4vZF2t2mqgwK6e76br3evH51PdyPY6cnT5YCg0eoKqfJNme1eXDrdbtcTrurfZ/fbD998sk5GJ2UJ5fP6qma7t8M3Pq7q7HsMmrx8qvb77//9OvPrteXdH/oHjUnJmqit2+mMa0+/fz13/z8Ny9CPNzurL+Z74K6bZwvb9++qcs6DoPNdQWIq3Ylue8OVjqu2BtmK8089oexqeq328PZajENPSgvT1YZiu1+ZxEDu8KXkaxsGkU9u9x097cSbb+/W7XtLh2KopQYx34iorKuEEhE5tzE7dV1GcJmveq6McfMjpu2RnKarVo4NDIycOTUhqH3YQ0qu30PCI4rX3BKY0zCRdicbA77rlk1+13nqby4OHt7/XJMFgxQXOyyU1+qywhD7Ivk8ySAGGPqDn1T1mVVShYfHBGQWWCUwjWMWPo49r4oZMxTzLf391VZN3W7HbqqaaSPY3frWE2077uiqE7O129evXbMjv20z5lUSclsOmyHScA5sFwWHl3VT5OyEoey8NM4AptzLCqWNcrIxGiWc/ZEiqCaq0CQxpv9Ybvbel61dUUm3WE8P9nIME3jtFy1U9aUJjRMqlz4NEzOuRRz1owAYBSCV1NvYGAppboqTM0RTTkrUbOoMfiIfkjAaIf9zfnmdMzT5nTRpJ4BqkV7f9udPblYNEVbLX/9q0/r9WbY71++ehsY2ZBFcz++2u13hx6Am8qZ1zxJRvFtJaKHw7BcL2/uDsMwPXn23n67u39zddgfFk19sVm/fvXi7q43Lg6RXVHHKTXr1enZ+eefv9ru8rJpoKre/9F396/fFFqu2seKnpg8F7fjFaP40CCJxXG1qLZ31u3vHp01fb+92Jx+/erFxeXKHrVdN71+e/3ucPHO4zMrPrMuZN+KO8yx+pnKqSDvohHfkFlR8V224WgNAjMFm//giDl6aO2ZjUWz8x4NTE0NkeYd2xwPAj1CNx4epSZ5ZkGKJFNNWaaUU9Jd193dHcxMVGOetvv9lGKUOE7TNCUAI8dM6DyfnW3KUIjByfmZc57NJUnTGOOQJj+5sqoW7We/+XXp66aqb2+uTzbrYrFEw93VW81TWfk8daxTyas0jaXzq+XSGIrFErwfuk4OexcITMa0MxvbuiFDdhi8L+pmt9uRUelCT/HV7f1+GBZte7bapGka4/b8/NHV1W51stlsNllSVfjNepUGkphOFgvvrfaFI08N06LGt9smAMh+MJeGfLZcnp+0n726JgMHxmBVWZT1ApSLIqScwQwVgwuqCs6YfUvuenudMp6sFuvTk93+AM5zEUaNFYaLJ9/Zbu99HYqqGYdDmroAihpPzldpUd3ebJvxvvK8/uBiP4zPX71NGRS0G3ofXOWIQFnVERVVEUKBmpx3hWeTpDLOtUH0kFicd+oEoKRHxXDeaRvMRjUAZDNwaIYzR1WyCqiKqcE0ZUB0iT07n5JzLsbJ++D8yOydd+yDC5mzE2LnPBLO0EhEwG/Zik7PF45YDcRIxDRnJiTCoesc4hRjytHMUr7deG8aU8qE0G2Huim89ymlwz6bqqkWZckuEBL7sp+mwnFb1ya5KpxOU1O1i7q53V+dbs58CC9fv7hcX3BOi5Nyf3vTFi0oO8+GB8/cVKWl5BGI2CAz45CiL8uUkwP1iIwwJXXOjVNyzpvpBHyTNagigljuo+3H6cPLiy9ur59t1gVlHyDDRF6GvFuGzXLd/M3zV83ps/NHzyQOpqOmw2LZXt/f3hz2l5eXEMc45YL96TK8/fqm8ps8KuQQvC+q9qs3z9fL8zh2kiOBZUnVonZK4NiHyrKBHG/hqm4+ee/y9u6+aqvD1zcnwX/0aHWo0p/8+jNXVt0unrjXhXOe3Oevbvsu//A7H/zy0y8HkRUGJprihEAF13nKzht7j4DEaGLZMmRB4lC20zgSaBHKSL5crDDL61/9TXj56rf/rY9s61THYejris8vLw8313ncFa4RlDj2w92bxWkwY1eVQGxpAJiYTWKXty+apgjkwTtiRQVmgNkPYcDs7WHhw6ON6OHUiQh2ZPnDw7X2INUcl9ijhcgeYr/fFKcZ6LcVnQcR6biMvkuczYLEt8Nr7xjuD0oUzCwY+NYlT2AKx8nIu2JkKAuv4iQnywBoWdQAEBksMToEAlNVIGIzNES1jGDHiYcCqBKg6nHq4AAKhKDiVIYMyuAZybELDoni7IxJiXJWjTrF6dDBNDI75soYFCRLJBPvGkS2WQjSKHGEnGg2dpiaxPkZZXlSmbNl85iBZkJ31IykgTijGjrVB4QsAhKaIqiCpOOTkD0SABIeNTJUVYdkSirIjKgJJqHM5IjJW0accdAzOAkATQgUdW5BR5t/1POTVDOYmInNXQI685RsNuagkMikks1AM2o208yoYFnVFGjKkrOZmCSZQ7qIYMYmyQQTWFJjBAZzKMymIBlADMXM5uY0R2DRG3jPpMAgycQLiEr27Ei8CwxWMtWeSIFYEYHQRJURUNEY320PDAjmLBWgAQO52SClIADJbL7K1IxVRbPkabAcVVQ0i5EqGEKegd6SJSdLEU2ERhn37JwRsi/AeXYBfIWJwdXoGwKfBNRm2w6Rc3PRpKrlnJGA1JEq4PzTPYZpmFAyHilFZmLqyOcjpxsB8J3I8w2raO6zF81HfRTn2rfjIwRMEBFnjR7nn8ssICkgMs1EvFlNPKIeEYGPH9EDiOEoLptGYlJRMJg1VJHUdQOAAYQpWcrHbc9uv2d205SHYUpmWTWlZBm9L9++uTm7PN3vdwU1ZVVNQ+fQmxCAMbmUM5dVqMrd/T04NM6aAc00W5pGx46pdAWGwg9TH0J9drHsYrcoV0PX72+HPMayqV3TTP1+3O8X6xaQbq62XIa6CrubrXo9OWsVjbAWy6DGhKFsFk378uWrtqX1ZrXd7bqhr8t23Zy8ePXVo9OTq7v7FFllRJb1SZWkl6xguFkub2537FRMy9JFjf0wEYIPRdRYlKxg7H0RyijKak+fvvfV25dt2Q6Z0CEAmIonBixMXbAiY3LEGkJKggHNIjOqCZjLSRQggQGDqHZ9N9cGjt0Uyjr4+ubwpijDGFMVCj3kaYjaVnd3wwhQVDiMcbvtVs2CA4cmZJDVpkbV3aFT1bKoxmkqy7rvcYwZDB2xGaVkoahilCmpY0IGTRMSJ8lERMiEkHNicoRIjGAgkhVQFOdmBXbkyUeZnPfHWuL5gmRmRgNTUARjRmIg9GpGs50byI78MQYwIEZyWZIeMUXv2hcg+KKfhrfd/e/97u/ff/kGrVwV4Wa7Xa2XxUn4zb/6OTrXnvlyyW8Obzarx+2qvPrFy8fLC0lTiv6zrz9/9OjZYnn+2avP37zen62fbc7iX3z2F+cXNRasYj3m1dNHDqu6PrEpkS1rnK7u3nLh12EZVs1wP/DCLSrX3x7KYvW9P/i9v/rsl5vEi/Uyu/i2u7rPQ93WfRrVskqK2VJKyMxcqUEap6nvAXMft+PrnyfQqn3iqUThcUwIzoXWVbXKTP9VIyFEFwwgG0KgYhz6yjebzXvdqwPG6ImpKCxHDBPmNHQ9iFbV4vb17eN1ebjb3755tT47DW15e3+XWZ8+uyT27+PJ3f3Nk6enUeOnv/7F5fny/P2Lu5f9dTctFwvLgbkwjlyUFseEAMxXV883P/id2368vPyAYKISzftff/Umlfnz518/ev8c2HVdVwRbnbZFiYH5dtiVlf3eH37oPD//4gaD/psvvzo/O12eN3VmDrio23/5j/7xv/on/+gf/IP/0X//kw+Lp5ts+vM//+V7z84/u3/+t//473z5m+vLi404u/z4k1e39v/4Z//yf/If/eFooy8dOjZWI7doWaJqnM7P1ggwHtKbl9cn52eL9jQ4tNbv7u/evE2iHibu0tTm5tkH79++Ppyd187pMMWPP3lcVGxaueDSaOfn75UtvLk+hMaBc8+/vNm+2Z+vLu+6m2efXNQOP/30OhTVX/7ZZ4BrfB1/7/c3OaXd/Sgq1eVisawPd/3XL24vv/9ExhjOqE/jYTyI8X/+L3/zt/8rj6uWNo/qfnu3XrfXb8cf/953AoO7WZSVWzL98t98dbfTJx+f7Dr8P/6n/+lyRe15S29fjf3dR98/+82nAADjFLUfiYmCoa/v9/en65P7/X6zWngXyBUAdn3bq0YKXPnKtYyIZdMOOSVUQnCslmW9bA1gN0Rz3pV1zFFFmqa01Zqw3IRqmg59TOjDItRVSWOa+m5oCi9iYz6k+1S4kFIyT67AoghqygRRBUzMvIGKRI+hKApPLo1TznOGXaRw0yilKxyZQyzZTczc0JKaw66/vnkVvKvLAgRvd4eTYhXzYTpouWg+/uiDbnvQmELBgNY0pUp25FJW4TzEWJAXwJzzft+jZ4doOZnpNCQOxaNHz1gnusOSEYMbDvu2aSp297dvq6bup1FNgguSM7kg5qL0Z6vFzet758pDvyPCnEYjQc8GTOjZlclDllS3yz7uVcRUkSjG5NhJTnEagJgkqEA3jMxlGUrVZJKdUijqbhgd+ylOVVOGCDLepW6qPGFEREA3R3FnBURjjJ6dmhGzRyJPpqZzlYD3oNh13XJRry+WjzcfXz1/40J48fI1F+7k/Gy52fiqy2D3+2l796ZZLmq3mERW52cV4f7upl1UOclhmkbDTVNUAQ0zeSjKQC7EOC4Wi6ubG0WuquBIGfXx+WJb4McffPjZp59n0aKuXNPeb7eqdnp28vb6brs9FEXYbdP5abtanrz6xecNB19YdDjmCAb9sG9CmzX144gEXZdQkdn1w1A5ZuBx3G+Wi9h1NmZnSvztw8WDkwMUHsxOCGQ2M++OWCJ85zRSFZMjOupYbSY2h9DgXaRh3mkfzypznewsPOk8JEeC49FlTh6ZHY9Ixx7e45fPcFIAVZUUBSxLTin2Y357fTtESZJilBhz14/D0B/6PuV0e3c/jNM0RjVzBo55c7IexyzKJ6eb5eI0TinHvNsNb9+8/eTj95tlfejGr58/z5P6Aoah+/DDD31RCUAe+7u7m6quKsdGUq8akYgFX5w/FZ3hEQjDiNNUBpdjtmiA4FzpuYhxMFRyGKdh2TTDYby5vyPCk6IOK79aLeI0ouGzDz+ATI7is8fPpmkoqmK9aOOUVuvF/e2NcawXJYtDhFCxiFSLqusOm3V96lZfv3nbtvUgUUlP12vZbeu6qKpKkZz3HIKquRBKX/bdPYID4JimsqLz8/O3V3eHcfBcFgWOSWrDmGHf90ae63aYpspzETzkAFmSyO72rlkszx+drhblbnc7jsOqqU+WT6/e3u93XaXEjsvATChTzin7wlOOVcEIINmYcI54GIBDNkICQCTV44FzJpzrXHY1K5iGiIxGpgrI5M0MxKkaSDaxY5lKjllR4jQxkS+8c+y9J3LsvQtFUZTOBx/K7DKxm9tymJBnJwgAADB5zWpAoBaniT0SoOScRR17yJKnAYIRQX+4idNA5BRRMYsV05QMFICYXF23PpQxRgDIIoBOTcZpKue3hK5pWgS52JyWvri6vT6rlyfLlvKQ0lgXZRqsbUhJ+hEYuHI8TLFkRsQccyBqQlDGbGRitaeCeULKiAX5SYFdEJEMdtWNROiQlDiqa5arcHPfErSVK3LyntM4LprVqm5evHl9evp0vX6PQafhwJyrqt533dXVjaoj7zWN3W7nq6rLUZEbpklk07aU5e5wZ4KzV7KPgyu899jWdbcfXAhct9N2yNMYKmcKzrln711eXd/eX1+fXZx+9Zuv/+3f/V66dP+3n38WWKsAL/pYQNrEaXG2+PLmZkoWSnZFGckP49gsiuG+w6JIosGUwIHzYjgrBkxkeYw2chlynDSbqzmKVutTDs2Lly/f2+592ZShmu4sRuVFUS6XY0qaskFkLuMUx37rQ5XEoS/LshqnXsXIdNzuWH/ZDDu/fJSbM+O1kgPieQar71Ans5kHjyKO2QPF/2ENPBqM5mvuXeHSN8Cid7GwbxmI3ilEx8Dkwwo8f3y0Yb7zMcy8/4eL+iHY9g5T9A2caAYdzW9Lv/kDZkKc4Twgokw46/bEfAzCHbu5CBHUUMEI1AQUGIHnFyHy8/nXIRekDmdzoU4puqpghOCcqiJiFp3GGFxk07jdTt0hjYdFu3C8wTkDTY4pmIJqNs0QO4idxdGZzu6OlBFRBSYARMkKTm0mXRM4kpxxNgFpxpl5zwUCWhZTRTRUBRE0U8mEBEBgJoA0G2fADBRMRNGBkyxzHzmwACfLgMSALpsw85zDhllZMTFEm+crSKhqkszANAOAIeMsqBmCzLFqBUURM5tbyyBnBCOauVFmpiZZcrZhiipZsiKSgM3rn852S7IEQGYOIZBhToggRFlsMjUkBEMkRtYsgshIkJIDzmpZbUqJAJ3TwpEGdMSeqXAOHqJnZoDskQiIFFBVjhe0zERvb5hnD5opqAhkUFGNUUQ0x5yipMHymI1EsoFXAzHLc3JN5nmGopmqAM0/PM42ogrNAOxMQIldVvDOWNEZEvkCcyZmm5Ix27yvwGSQHfMck0cwQBXHKGiKbEbIgHTsVKXZazT/o39LKkJEVVMwRKLZK4UPWPijm8odK8wAkWaPHyIhAtMR3kiI9s5NiIjOOYRjwp6QwNRU5iefY+eQ0vwCBmkSKPI09L6sxKzbx24XxRKQ7YddUzcp9jFGzRZ8wViOccssMeaADi2ZxnrRTtkzQD8M5LGpQ7NYKmK2FKoKGFdL74Hfvn1dLsrSt303Imjr6tCUCRKC1FUR99043m02q7HLB80AYqJzmE6ydMNeWUcVSdHcHCo3QMgow6ErgFzRZLBs0DYlowFbKAgRJcWbw01RFYR8tjy53W5Xy3LiXZapLhEQybm7fueCm3K+j9MaXVkycZiGaGQimRyhmCURJ5JttOGuvw8F9dNO2SFySjHlvGw8kYqJ4egoEaHMyxRy1uwcS1YGAxRgN4gQM5iqpLIu77Y9O8/BSZZQhLIo0jRITEwcGCbR7U10tT89LZNMWVMWCVVgx/1h712taCkrEd/vxsBh7IeioJy9CA0pRbMs4CTHPBFSTCIITM4xCyUzzSIMxOwRAFENYPZGGrMYsyCApTTNwzBPzoBNjdnN+2oFPdr+GIhITeYQpKgQQfBBUdWMieenSVZ5xydi4tnjDQAnGz9eLoXh1fPX5/UlhSdPP370+T/7J2m4e+wvluuzTdNMuv3ujy//83/0J/oMVqtPosKvvv6qaMaf/NZv//JnV2fN5VdXr7sckao8DkWRL9crr+LXzd1df3t3T2WxWZ0dbuB8+ez5y1998tGHeanQyuGm95Bv7u+Wl+e3b19ZX+zdOPiBQA672GN/+rgtmuYQD6K6qGqNWVPOoON0cJ5NzJcVA4kmQmXGLLnfPYc8uPY8+DMkVBLHNh9w2BNipdYjJkJWsG53x8amOow7D3xy/sHdzZdszsgUc44JDNKUDA3TnmpgV049/Oh3fvoXv/n0D3/nky9/9ZZyQcp397ehLC8fr9++fHu4utm92L34sxd/9N94vL+5hmpBxcn509P+cEgDY/Rthfevn5tA46ru3txoYOPbV3flqn10Um/7mycfLx+fnn/xyxfLi5IBX3/1Jo/SH8ZX035/c3/69Gy5CSmOdekvL87W5RmodH38+tXVJ995UraN2vi/+N/8x//Z//Nnq6ffK9p6//b2R7/90dn5eXrD60U43ZQvXrzp31z/5Pd/cq/47/zRH043w07G5ZPF1dvtOGqR8el3Tojh7VW/WhMBdl1aL8/6u+wL/OUvf/P0kyfojYkfPzn1COXV5WHblXUxDnl/PwIldlXZOsyyu5+ub/YV8ZvPbi+ebqScGi2Xpwuoy/VHDbuygeLl12/KrF/96je/8/s//J2//bv/8D/59ao9WbTBkUxT/PyrQ+XdYd+fXpxVhbOon3716/dXj7vrt0+ffTgU+Ecff+d+H3/9i6/ee39TFyi9vfnF7XufNKPJl599gelJN22L2hdo55dFFU7/g83fT3JSLrf/2//d/7q7eamhmu8CSaNM6grX97vgQ+ULNCyr0PWd5yAKIEKIYdHoOE59lJRcIEI6jANmdEzXt/dtGVBh1+1W602XRpJwdrK4vX6zvY3JDH1s6loyN01bt2F7fdM0y7kdZjIcZFyuK/XFzdX+5GLTHbZtWw39wVM5ShIzBJymjEiLqp1ijEMUUSTwBbXlSX8YBHI/jW3RjjEz8n7XK9p+26GimDGG6SAqmYkAIYNV7QJVun6SZczZ2IWcshmEukKkGCfyjtgtFmUaJjNUNOfdbJZ3LpQN7XbQXd+/cV/UvoQMw2E0wNAuxjxIHpPQwrcg2oSSUO8Ou3ZxGpVOi3rcXUFOTbMaLBIHx6EfJ8o5q4IfhQoDVkndwTi0YuK8T3mY4uRZTSfn0ADGcbvvbjw2Z6dPD90tWFq25djtPcf1SXt/t1e1bugeX55ubse7Idcu77N4QkfonFeAFCMytot2GkbH7L0L3k85VcEDYQTLU8yGqHB1dZ1V3CHe3tx9+OzZ2clH+3EisM+fv6y4wAz1ennyqLm/uhuG8eTxKbKNu75c1N04dv1UNFW98Mtls725piE6cB7cLg6/+urT8+rMwE7ON4Wj+8Pw5Mn5p1+9NMA/+6u/sSRlVfVjDHjIydI4GGG2sS1q3zQf//53v/P9H//yL//y+z941t3covPdcH/pV2YERGLZgAgwjtGxzzkGRoSicf5qv1+dbcYpEtjZyfrFixeSx3dHlbl7ZY6PmZloYiLVbKY4IxdnHQlRTfCYNaMHmqXqu2PSg+4EKsd5NpI9+MOP7bTzpx7FIZ0Rj3MF7Twdn2mpx0OW8SwlSM4555TTFOOY4mF/GGLq+kGNAcwxgse9pilNUxq39/tpmsSUmLwjBgIjNMeuAOOS68PuYCiL1XLK/eP3L6ImkvF6/6Z1bRWq65v7997/sFmepWkoPd7fHTxhGQpnap7YkVEoikK9SzGHsvSgkjPTXBukZV0mE0cch2l/GJq6YR+8cyYy5YPzuF6e52lfFUwMyWSzWW4Wi7dvu+Bx1RaHKHXlD3mqq2o7DBcXG89IhILgq9bFPHXdsg3BF/0ggLJZUFvCCPRoc1J5c+t2uV4oqGNzzjGzZ4qOmEHRJKW2qTMAUGD2TRmSJlf5tqji7VZyOl2228PWSmgX66qq4+HgnQurdpzGw/7eob+PE3usymZ9/uiw3e7229WiWLatpLjb7vo+xiFKiuW6MgRJyTtFI8tZKRAYEzvnGVkliSU1JdMZaDpHZuZCYXtw/cPxGiJDMgMwOXpJTJXRDEVAzUQA1ERFJOsgE0IIzjnnJKQcc4qOfSijD55ccN6Tc4Lo2YE77o48cxIkopyjigIYMHTjfkxdWdRMhRk7cuM4ACoD1GU1xjhzSWIU55nRFVVFZTkOmZkdYUqDdx5SYo9t05TBseec0vpiHdgftltL05OnT0PhZIolNJlkDNJWzfawZXKl99myZ3IMhESEmq32RMGxmSMgNl8wMUbRAtjHDCgJtc8GAJV3i8qnmHe7q1FWm5NFbbhumtPNo6HLeaJq0URjc5vLxx9BhrHfk+SqcNth//z1KxWtqiqng+Osks7Pz2+G+3axnLbjelkuF8ubt2+og5ork0lhjGlY1Iu5sY1cqeC5Oc3bFwjivZcoBog+fP9771/t7/uTevHD957fXr+92SbLMZOg/elX/UVdXFbF3/vOdz67/qtb4O+fvV86K4qqjxk1eE7TkIAZDcZ915ydBS7GGB0EUJSY0SZ2HKfERFSEmBQW7eMPP/jyV3999+rL93/wk77rNifPdtevte9DKP3pxbA9TP2hacopDtOhL922OamNGVxR+1ZztpyJ+n57c//2NZBvNo+b0w+as/dcuYaiBVcYGCDhvM1GADkmy46wmm+xbx6yabOPQo+f/yARPWhC9pBH+5ZmNCtFD1m0B/zuu9+Osd5vfe7Dnx6J1/PLHL2jYDoD5+b/fFdHAABEqDkTAvB8jDV9mA8QOQUlMEQ2RHkAJolmATRkVSVE51hw/mQwEwNhgpxiEsjzeZxobrQyMzM0ZBCbukO33U6xZ1THbjZeEAIDsmVMUWKC3FseIUfIKZkoKLID9gQAmmyOUCOoypzpUMkCoDmTCiMoCKMSMEhmzYQKkjBPppmQCBSAkViB5pYSBgXTGV3OCGgZBNEyCSAZMiATgD2AbmyOwh2h1zNKCZQgm86sIpX5+TlLKTjnAMlkZrTRUVc0ACVTBTXQxJDIsiPyZCCSsg6ZUibNhIhJRRQFWc3YWAUTmgKBaEFYEINqFFC1yTAhIaAn8kCMhiJOZoZbCo68wxmd45RDTCBWUFE6JwoAJaiSoUNCYIOZwkyoRukAInmKmrMZIDIg4YyEk6wqkpPmpDmnPKmCmYJmMCIzQzRVNfRzzGvmMxEhODElh0BshEBEjg0BMSJ60KhJCAJTkY3BFQQEGUEFEZE9sLeQjqlMcqqGBImZvcesFhwCKTIRI6Ey+8IrAXgmpPxAd//GVTRX3auKHRUi/LY/kGZwterDjTyPzWDmXgPORmlkmn83ZkKc6xVYdAYoGpGb2z2AwRC8d2AGKjmO93dd8N6VhZgmESCihAG9h8IRj/vDNKY8jYww9f2Yp6Yogyvuuq2ZE7Tg2DATYCA/jTE7qcrSwLjwB5vYmQCqpKJpJhFI2YVAjhw4HRUmgoIl22p5bpXEqStXi/Xlxa9/9vPcHZqy8M5lzaq5rhrzDsywbkLwu+1+Gsa64XVbF87v+9QdUu1bE+v6HTvypARuUbW3+227WKac4hCDdw45HnbOec0YcyrYE1ldtnfDWBASesl94TBidliOw2SzK8xQDTyTU9FuWC+a+6k7jDpBXCwrRUJ2UGRTmCyqpSoU+8OOsMpA7DyCEpBDZ14ToalYSgJCxCmBc84AYoplUTRWqeXgAzkomDFlIF4uGrXp7Hz9689fdtth9YT6bmyXTemDJgXDBGAG+RBLR76msizGMU9TRqZQFpk0afbBwdz1Y8DEjlmZVLIxqwgaGkDWjIjEjoiJAqE/7npnK/4c/QQS1OP1SWgz2J1IQec1iYg8B1EhQmIGQEf4MEKYL1Eym2+6ue0HAOD5L54XgRtNfpgerXGw8cu//rP31lWx3NzvBwTICoc7yJYun3zcuMW0HWsLi+Vi3w/b60PtNyenZ3cvXpcFIMdQFvdfvyybsL+bFnV9Wte77S1DkYZ+v7X3nvz43//3fvpXP/tnN7c3Ty8uQVFG64XdLkIE0WHqhyLS2I/rVVuGpYeiLMPV7hby/SU+CcnidKhq5zzlNCAJi09TInKSQKMAmrihu3uZD/dFcT+bRolVDeMUJSVkVlSxWFXLdrEsQ6U5s7PDdlcE512oi3J/GCUTEQN556Qsi2RpmrqqCOsGgeLt9a1jf/Xl19bn+7uR8PS9D57+8q8/nVr99c//8vHlo7/77/7bt2/GEfTJdx8Pu/Hu9Wdq6eLik343dbvDsw8XUi815LOz85fPt03pp+5w8fgxl+Htp88VE7j2q6tYuOLsbHV/lavFul4slov67YvXtGm/98Pvvvjy+ePHT99evXr+q68ph1Bx+7j57idP+n3/L/7pP6/H7sXn2/c++KQ93fz1n//N5frEER7SVDi4+ept2r/5nb/z219e11Ofm8q3pQhpqMsi8GpRPjoPb7/e3b3tmk2zOd+wye5uXzVhuQl3v96ePT1/L39oGs8fnV7f3bz84utXL69OT07WJyfmoTmtVutSI2bKf/3r5x8/fjyOuW74vQ+XTz9YJoPCw+c//+LT3zxfrpoPLp989fnz5qwpvF9tmr/9d393f7P79c+fI40Fp+df3dS1Kzx9+PSkCri9jbdvtuN9XDX+pF32+8Ezf/XV84vTZxInjvLDT96jJnp2L97cPb9/7t+Ggfj7P/047Q8xy+Z8uXLY7W7HKX7vR+/9J//nf/ry+otV5Rc/+Pizn30+3wVJRE3rogJwwbl9GqbUq2pbLw9d7wufcopqpa9EJ4uxrUsxrduF80Xf7b1jJmeABmoyVO6UlLp9f6vJEYvKZrncdfuxm2I3KmuZQ123qtJu1mmMoiJR05CHw0Qsz19/WXBZFSuHAQ2HbsKSyqYKhDlGcAYZIKsPjgOnmIfu0A/70JTOF4auXSwQzUDSlM4vTm6vdnVZbzbrVy9fIth2u1+u2jiNOeXSO8h5e72PU27aMhTN7V035GGYhlVTBO+maUIooqZuvzs7vcDexiG2dTlEGaeoZmfnp2VwoKYueR/6/cFAiuBRhMuKnZ/GHRHut9u2WvsQDrtDXbXBhYFwe7hzRaGaoqp3yI4UOKXk2AVXIoBqljhms6wRQIl9jok0O0SR6DQVRstyOY7XKe82dQOQQ3CiIknqqpQsbw5XOiw2yzq8yCerKqn046RJY4qAiKYePSNhGeiIt1XLSZmYsCJaV2VJ8N7jzXpz+vrtHU9wvtqY5jdv98HzxeakrsqmXb5+dU2IeRIQkz4FLNt6I4lWp+393c412VUhicSY66ZZtu31zf4wSrWov/ujv4W9vr2+Gc2NE8Ts98mF9ek0jsFDUQZEkt1gYIhW14ucrKhXH3383qub3c3Vq/vdzUW71py9LxCZDMfu0JYVUBCI0zhwUUxTrNv69ds7cguQ1MfkPffj2DZtzhLHcV23+903UtE8yVKzo7UHQPQ4a557cGxWfN4ddN4RWeGYNAMABNIZSW16jFaboek8gjtWhQCIyvxkUjNEOhbRPjA4jtPyB9qGmIJmVVHLw9QdDt2h74eY+n7oxxQK74NTpX7f9X0/TuMUY4wxpggESOSYEdETVWWxWi4Wq/Vqs/BG4xQnmVysfVHfv3mbpyHsfVO3y6J6efX28vz84vR06IfNqorbrrvfl6Fet1Ucd0WzQMkwTd4VOeXaBzbYH/YmyUw1xsDODFJO7CHGQSyG8sRAHUI/dKX3i81pUu3GsS0WArY+WVZlM0xJLD15/4kxETpTVJH1+iTKXV1VgBmVJMVmHZIBz8jXksB6BThb1WenZ1+8fHmxbDlJs6yXm1o1hyIwo5kYIYJMk1bFYtQupRGRgQIY+VBKhhRzUTVnp6d5OOz2g3dVf4ho26IuNydtHBOxK5pVu17GaTzsdqrx0IEvabl5VDbL7f1NykpAp6fN2SPOOaY4puHQ7/YZzDsuvAsheN+Mww2CikZDIMTKFznnGXVOOBfZv4O5fAPMejhbAyLwLFMaADoEBQMmUANz83wWxUxFxXQap0TJxciOtSgychr33gcXggulK0rnA7BHC/NdELOKoCQR1TglRJQpS4pESM6xd0CWJQFjjKkKpSuK2HVVVSNhVKl8632ZkyJLWTjJcdh3LmAIzrmiDJ6JqiJM/bA+O1ks6sP9QSWfbU43bTkNfdtWacyW9OR8set6dlSUITTVuN0CITtW0fkI2hSFoXFgTVoSOyZSCcxlGaYpqZoTky7NtXGk4jWmLk99/+jivA0VudABqWmWtD45B7d4/GwDlqfDjlGxdPuxu7672++mRdOqZE0DOh9CVXAxHXoTTilfPD4bLO7ieFIsNIuY9EMSc0VZi0o/jYFrLhfRKJSNUZ4kkwuILubUrlY/+O57019/6QgMrC78KriDapqkrMJB5Xk3/MN/8Ze/9WhZu6oMjhn7biBfle3J6y/evPdRNcaprktyPMWU1bLBGBM7Dohx6MtyOXT3i9WFQ8JQCvD67PzqZf3151+8/8mPNqtN341Vuxr7A4ADtHazQY+QesdFTHE89GU7FasTwobIN4uQE6hkac93169Sf3f35S+2X/8Cyqo5uVw9+qRZP/bNiSsX4EojxxwA+cEwpAQPSx/RvLI9LKrHq/wBSI3fWIEeZKMHofSd7jMb7ezBlmQPctTD+nv8vIdc2wNB+yGkZgDwblR89CiBAR3X8YdngSKhZjFVRs6QiJwBzjmieVxNiAo0k5nR4EjbISF0YErkBBTIz8cfVVFJCCaqyN4RMxqBiWaMlFJUMxXJKeY8pWkq6jDT/JDMNKumlAeLg+XJ4t6jQp4ICQnEFMUZ5oeWMU6SAU0AADXHSdKkAqYZbB51gKGaqQNCHQkymDIkg4zgUPWYO3NuzksTAAAyOwI2U8fOzAhmFE82UzMCACPE2UJiShTmuunZZmMqKgIqJhmO3WoZgGZyv2k29oAE6BQIgFQSmKqqqoma5phzmhGBYqKgSTQZDsKiNrufsqKgIpADFIVkqmAAmkWUAAEToJkmgwwICJotSnTmkcRJJkyBfU7CjI6ZwDipOCwQpuByQWoklj2xIRqCgiGo2UwdEpjGPI1pnCQl04wPWUpkp6qGOL9/NTFRAJqPvZ5IRWcANBiICakSIQAD2v+PrD97sizLzjuxNey9z3QnH8NjyIgca8oEqgoAQZAEm+wWRSPbyO6WGc3aWm0yvevP0ktLepC6JUpqqU1t4FxEoYACqlBTTpGRMfp0pzPtaS09nOuRBcnNMszT4/qJCL9n2Ptb3/f7DtQfAiQlZmBCngZGKJoPhh1NimQQBbOmMNnPcs5kLBmbBkRCJBTkyXJnnc2BpadkLCARsrGWLaExcSBjbWJ0xrJzf0MqmpQh0Lvc5qRe4iG8PH0CCkhM0/LnDuwIgEREAHTQhojupKKp/kzv/HyTTQkADDETHPBCkkS9CsYYQBQ771VSTq6qhpQBKcScNt3J8XIch32/L6o6QKzmJVcWvZGkoJi8T3moj+bD6zUDWSpQuW+7xcwtZ66qK8ooQxCyq/PjdrsumzJnzuJF1Sgfr05iyKqAanOK4k3XeWPdh598HJD3t7fgE4VUVo1I7ocxRbHkQjcSwqyp6rryaQArbKXILoweUau6yinTNJzMkYB831XzGRhGhNbvZ8tZCAnZxsE7JgHwOVbWefFMPI4ZKSm4MaoryhBHZ6zmBNOWfUyjD/WiLEtTGrq83OSyQiVUYFBQJrRAUFLpICjYMaS6tKgaclc6IiAiAGXHnMWvFifPX74hMxOSrvP2xKAhFFKJVVO2fYQkOaUxytGq7GJCrPdtEFVjLVijScc+xCTOGUFVxMEnsIY4ZZA2S+NKZ812vQMBEQVNQMCThZ4IFJAMESCmHDMiEDlrJvedMk/OayVTkmHCyXWIoMCGmVk0MllAQkRRYDZ3/cJAiMBGQKIkNnbKWSCIMS6rGGIEZIK3pAgAuLh4cHl1WdX1e4/eef3s8uPf+3Z/a79++lTb9PDk0eq7q1/88tenZ+/su5cnRw8ahqYcn7xzut1v3z97x6/15vbN8ohtIZLy86+fvffe++cP7z3/+nmd2UrliqYPbYnL5XyGMbx89eeQnmbqT46PFvXq8uUrEXNxdHJ0Mnv2+aaol/76ssbcQgzj/vH9b62v98y3eQzz49X1PtiOV0fLXbudzeYheYQEaBHU2sa5ZU4jEAIYMjaGmPwNIBBDzh6tSTGqgnrxIQpi8mLIVcUq5EEkV9V8fXU5my8IDJoaY0rjDgRSVHI2Dcn3MvayXbff/vjJn/7ZL3/vH/7xs69/dX7vyWp2/NnLm2YTCqhqnv/jf/afv3nxcp/Ds83Xr5+++vCDh8POHy0X7IpF47qimy9rYyMo3L5pbXJDtztazpp63qdetrexHx5+98P15SAhrzftfL2PMc+XDWBSyO9+dO/P/t0vX/3m1esvbx8c3U8SZqfL28uxrmbkKKT06k37T/6zf3b5y+e//tOv3eny9dNffPR3f+f1F9fr/fj49Hxxemx8Xi3O9msZbtpW3J/+6a//yT/7uO9zu+01yOK4iXmoj8vaVQbh5bM38+WycHMf/NdfXMdufPqz2771F+fFj/7nn/zuH36MVfPwdDlv3KvLa9eHWVmM0b95ffneB+88Or1XOOdWJTHdvtm/uNwvZ3X75sp38e/9nY9vrluMbmwLxOgRPn/27NvvX7T73Kn+k//8d754flmY0hU2jBEg2qK4ODvdvNmfrcrQ5tosWQpT35zO5p9/vn2wmt/edMmmx4tjS055+OTvfHfVLJ9/duOH3V//5peffPLD2A3Pnz8/Pj7dXken1/fPZs8+y8v5o6++/rywxXQVzGazcQhE0LW9FiUxx5AQMIwJyRVuCcqS2xQ8g1PtU/Qxpbq0Ydgj5qY5GvpORELElEy77euzlZ0l8WhNFdL+ZrOumiIOsXQWEBkgSiIgFAqjB9DZrDYIhbOVqR+sHv785z8vbXk0L03BGXTX9ymnWV2FMaaYbFFIyoMfazO3ptA01KVr5othH/3Qg7XVzHadr1xhEUiERDa7S7AJk1vVK4EE7CXOEQABAABJREFUDDpK0GCdKyvXjqnIYCtuVrWqxJAwYt95Rcg5ImDtmm7fk4KrnGtcG1omwCSOZLao291Qz2pVRJMqSzn5ENqmakjEIA1hdFwhuK3fL04vsu/3bW+NGQZv61XCmJL64CuqiBGIffR2GrRhNtbEPkLOaCnmmKNnyaqiIJJzTQuD6vvueH6G2b+5uTw7PrOmHNp1UVRgsHLzfuiOz+qTN+Ww7uvCZVB1OvYh5GSZAERSBBBbuBASspZVYYizQFGWhS0XTWXK2SBGpJjPF7NVMUiulEtTbPu+ms3HGMrChpgEymIxQ2dRVY0jZxyFGRlmUxbUbXwOQUWzMeUyl2WtbE5OT26uNsfL5snDs+ury8Xxsd+2q7q2tRmGCEqqujgHTTEGcZZcNdvt9l9ebuIYj7kE1UffuSjRrmN482J9/uAIM0vKttKQRldWMXsFiCGbYCTqvtsenx8X1jJiCJlUy8JqljGkt/uDrId88rTgyVPpyJRnUIGDyUdIaVKVsgig4GGkMaX6DmgjmDpg4IAywru62Ley1ITamIaqhz2RfrNrwgNdgxBBJywpQEq5HYbLm+3N7Xq/70YfBSQpuIGdGY3hdhi3+90whu2ua/sup5SzSEyMlFLkslwsl4ujI2tdP/RVUaDjh8vjZ69ubl/fjO1QVkVZ2TT6V7e7qqkef/B+yqmZV4XD9XpfOLNcHDuOhW0wx5BkuTgewmhQdAxjiiBZRQpbKzrIueu3iMpkmdxqWbN1xkLB1HdUuqVxlnI8XZ05tLu+LcpytTja7teLenn/3sPtdt2UlUpsZksBWC0WMceirMfdUJo5CytztgRCquysG30oygqQWF3j3L2LxRiGoiiQnSkXZMsx67KuurYD9EQICFVh+t6rS+XilAyl/S2kJLF3Rd00qzyOgCYJdG1njOt1MFzloElBuXbV8ri6h2kYQwwqbRidq07vvZOy+H4cujbHTG42ny3jMK+b05z95fVXztpocgSPRakpIQloQCUVNcwy9bPCnRT5W8V5d5EZnUgPojJBUqczh5EnV5pIFhEkFEYBzBmzck4CoFNVUAyR2Vhncg4YrfGB/ehc4VzJ+bA9yEljlJQSqmhKVBqJyfcDpsRMOSZA8eNQz5YhpKza+8DGsLEpZVMUiVBBJgLTOLYpRWY1zI7JGUMwdeBqU1WzZV015e312rI5O1uGfn/veNUNIxZYz5vb201TGMu1H0dRMIXFFEjBlaWquMIZa0syIYxRYuVcXRaRR0S0lhtr+/1QOHbkehVjbejHo/OzMknby4Nm2Xsp5vf6wj368F5dlo50f32pfvChh5S4dG9ur/sc2lHKsg4plBabqkTNvQ0eeyKTU17OytOj2eub64zRlDb2ylwGr87Nqnpxs76eudoa53O2xlHdxO7WiFFI5FxZ1yn0x2en3/tI9798drXZPnpw9p9a/pOf/CooR9E8xh41QRY86dpe8hDRyL6dL08iSB9Gy47JKFAKQ70g7z2R8SkvyjLG4DL6YWBTjj4VxGVRbIfUzOrziwef/upX18++/ODjj1NTluWMWjPu9mgQDBtZhJ0iYrNsJEnwoTaOqyOCguraJI5jWy0u3PFHcdyE/bXvrob2qr15vb96Y4qyqFf18qxZPShnx83imF2NXCs5mNwzdDBc6je4treAITwk1L5xEn3zgfrNp/DN3fKteDT5Vu6+hL91i51uv3jAIYEqyEFcml4Kd4Hj6QWH1q7pQwQnjw2CiBAaQAaYur8REJhYUaYdsEw7fSBRlKwCwkBJhJgmQxLAIRuX5ZBSQlJWlRhkoq4SUGkVIMWYx8iihJRBsqYwdkAJILLvxLc5eqsxYSYFZFICBFCJWVhFgVggoRKwikJOMeaUUgZFBmBJiMLIhILiAYBgomVnACC2CJYIFY2iQVBCQJp6q4h4Ss0Wk2sMAUAiTPchmlQ5RAJVBTZCDMTEFu6MZZAzIAoKCChkAJScNUtWQVXNgoTEKAebrVFJEjXknBJqhiwogjIVOyCgQU2gbKJA1JwAMwGhRRAFEMAkJkvOmiwoZQEEOQxndGpn9zmzYR8TEnMSArEkSGAMOqOowqji2BEPPngLjWGcsILEilMPvEzeY4lRc8wxTTUTU2xYFciwIgGhKhJhloyATJphyopmUCFElQwqDICaBKZQelQ0yCWwTaBsLJrJiu3QmEmlAUUki6KTUTgrCpqYskiagOE0CSVkiCgBTNDrnDwgEBEwAzIwR6aESMahYbHW2CnLZv6mVDT9tSbs1SGUOHGKmA4gMAJQQp4WK9PFjgdNiaYlERERH7xFU5wTJylxaq08DMqmPjZBIEZUxBRDjKFwJuc4+j4CUWELQ3EYPWAWSCn5rENORTEPPoUwNk2dJccxGVXn3BAC+0SzKmXlhAikXgXMMEoZgtWi2w3Z56Doc6jrpSudz3x++mR3/cpYq4xkKcaQhcAVq3oefUxhAJdt2RCZpKLIgMJEhsGxK011s1tnY7moguYYEoqWhRMocwgZ8qJq1ts1qUlxnFDcQUc0SOyGvl/Nmj5FIkXVpqyTSN3Mu761Fuely0mashZR48zoUwIhy5ITadakhWNsKkDsugGRSmeOV7O99xnx6HhppL+52dZVzUwxp9LWQQRFCJAAHVciWVUYsHR1CsHrFsD4AYuCbAlAEH1gh2XVJONTkIIMEHkf3n//4uWb134cRXS364YYS4PtkMS4kIduSOowq4IkTSLJ7fdh9OCqUo0F4+r5PA2+73tQIkN+HC0bOdyVDDIpGMCEMPXAAiNMcjYzEpGg3NW7ErMBmZRscLZ861kDAEA2TACZyEhOwGSYpiZGUlVNTECgRGaiTiAQiCIfUs1jgOXy9NXLVz///KtFU/27n/3okycfnT5+ULji179+Nl+cWHeyWDbNjD7/6nkbffnoGMoKu+F63RaLe8fnp0Pnb686sWMWf7u+rgyXs1ldl0GKppjff7h4s7m6/PqyLFyM+esX25OjRXPkunZXllVCRjb7sYPS/sVf/eze6cmXL96cPHhQKO32LaiOfofRex8+fXp9XB2Z5XeBckJBzYYhyUgIoskWJTHFlJICJAUBw2wtAQAwjb7PEnPOTMyFsWyAcNfuCE8ZKt8PVJFripja6QGeBUUdmiwpDm2KIRVV3feQUNZ9vzo5ffriq+XyeLvbRBFiv23T8uz4q+dX2pzsklYED985++6Te2GQbpFUfEpydXvVjR2xvLlJJ2erxdFxjPrk0flyIc+fPn1zefPee+8Jz4aBqmYWa//g8UnISlUx9nF/0w+7TbOkhx+9R646fv/s3/78p7/7w3fQuZPTAki322HXj0PXP//iy6d/9YtP/u4n/92//J/+l//o789ns3TW3/vOxU9//TUV8UG5Mm72kz//8vf++CIqnzw62XpfVOXjh4uf/+zLo5P3wFkFvXp1a9jce3Avkv74z/76k299697D03Q0rtcjzSus8dF3Hj9/uZ2V83Y/WpMMQ+GKBBCDPP7okR/C7fVYuerzX752pXvw5OhHf/mTf/FP/2FtTXvbfvZ0vd9sH9yz9x8sjs7oetyfvvO4QPv448XTz559/eySNC9OnGS4amO/b8e+uPzsetngyfvHP/v0c73cu8Ux3uvfeVw9PDkvbJpflMuzurb23/+bn+1bf3o+Hyr51c9/8/0/+N5idq6Ks1kxPymbVQkhtevN6cPzf/6//m8+fXX5L/8P/924vZqughiCpNz3UlTNOI5lVZnChdFHCao5jZsURoI89HtBUlQFW9bl5c1aMoGBwftxGKG01ayMEvsYx7YtqtK6ilGGNprCoQKQlkW1a3fi86yq/DBG30GOSqSTjGzxcv0mmHxx8cC6UhiyClmznC8QJKdIhKoQUhLIaEwffBp7QkwiuveF49v1bXV0L4GagnPO6+0eWNb72/nx/OziXtynLz//+uLR/HKzmxdN3VTb9Q4ZmXUch9eb65PVnJWsTaZxg4ayKGtb7/drQ9YYvr1dN1Wx24yk0Mxm7b5T4/ZdVKEcc8x5asdgtpmoqYthcyU51lW5GbcagjtqksTsx7KsrIVN30PoxSCzK6lC4JwlSoqHWlVBFLImxyQxSMzB9zmnpVvlcSQDRKaYLfb7LdgSTNX3nWYqralL6hB8vy+K8uh4dbO+CXk4PT1O5Gg9BB+8gHO2dpWP3hJFnyRlREACNlRWpWYpEE5PZieL5vT0DI0FZx9/69H15dX29fbJB+9UVHS+ffDo/uVNG0ctqrqosW87IizrUnIgQONtvw8MRV2WBFoWRuuFDwkxzypTumrsYtqGwhSMmHxeLo5BqannabMnKIrChTEYS9aZFCKg0KzIikXduIIZSaNmxSS57VLbjSmhZBpiRFIxUQGRDJNUs7LtujEPBUPtEDQSk6uqGDKxKesqg+w37WFvASAik2Ho0OMDB9Q0AiCSSp78RAIgkqcBxdvty+RgvbN+ICKJTMmygzYE38yt7wbdMKXZUEXwLpNGBEh0xyuCu4G5qGqMab3dv3hzeXWzadteFZNITGIIkLQsjLFWVDLImGKMUbMgUj2rht4TYT2riY0CSMa6qY+OFk+/fH756nnWnMOwWNVd5zeb3dHxUSaqTs5CJqMyq6puv5OcT89P62a2214XhnNKZAqfsqo6MplSloSgZVEVdjGMt5JTWddlWWPORQYRKJxxhvzQ13VNXBIpgtjCpZhXR0vjzBi2lnV1etwOa9FouFC2ZVOnnHPORdk0s1mOSlhmSVmlrOswqgiyiVYts0HQqrSucK62wpmIi6K0xqaYnCu8H1XRmeJmeGksA6EKxhSVsazrvttihUU164ZkEF1Z2rLJhFVdee/BUGkrU80Smn7sWz9E32PuAaUoCrQgOSZbaDGv3MLNFhJ9BiHn6sWZ37cp+PcX94buVgGNnWvwQ38pcW/ZSp6Q1Yqa+fC+T8NbVc2HE28y/ascZCQVPSzFYWKdHKa7zMQMAqhT6yoQomFQFc0m6wSb1mEYY0AkdtaztYHYucrW9dttQco5ZUEm1zS9+BhDzokNV3WTYsRMTdkYdn0cS1eEMRh2KeYQR5hCPCrIFEMa2h1bZ13BaAywplw2ZeWc+GRn5bKuxPck8fh4IRKPjhfzxfJqu7k4vxdGTwKFMWwAAKyzoo7atiA0RBMfl0Tms0JKe7tvkckwOSoMomVm5HsX80RwO8QuZR8BFqfATsY4QOGK+uE750f3H6CoxDH77X7onEHwLaGawly+eRP8GDP3Y6ysoYyzumjK5urm1Wy28AGGLq1mdl64cWw3N9ez2dyxGeKeLc6LchSJ2aexL1czZWOdNQyJ0JS1EkW/N+zAUD1fqaT7F6v31ltJcbvpjurqk/cuPnu56aJWy3o3+ujsr15d/2effPTp508fPHyXEjo363ajVxz8oIRjigTQtbskojmBR2hIU0LAiT5RuabfDSfnR5bAmuL04aOvXr36+vLlBz/4bg4oQMVspqApeVUqZhUDZt9FVWMIXdnu9ov5O2RWmQxWrmrqFBMaKsula+7PSTHn5Lvs+xg2edznsd9fftmvX7XNwtXl8uzbtj5DU0xejLd60EEan/6X7hSit/rNN3LOnR709psOJjvA3zraQS+6o7HcKfKTWUjf2obuFPw71NHhaBO89LfcSPBbnyIhG8UMd/S6CSaHREKExKDIbAkwJQUQSBP3QjIQAMEBF50BOCtk0SRIYJidAQIR7yMAla5kYwghptD5URUsc2mcRYDUyxAihJSj9i3E1k48aQYimxQAKYMSokhSIBRBZCTMIilDjhKjqCiTsgojMhlCIGDFrABEBoUns6eqIjtSUEAlJiZCJWuJmIwFRjaM1iJbBVZV0gQpylswlWbShJqBEa1TYmULgjCpAgYgR0wAOWrMoKACOWcRlZyRSAHIJCEENFmSCqQMMWnMiMop5xhjTJoEk2ICzEiZOaQcgQIiEQExKgURYU6gUSBIdtNPn5DZ4NTbppBFgmZQEKCsAVUJyDEbgy5jSmqJrMGQNWQImcaQtFYAIWYwBgwKE6igZhERyaoKFomYWQlNzgBwJ8dIhqwKAZRQUTFPAyGRNGWLFbJKUFDUZMipokwIeMpIwMZOATRmZuuADVmrQHmqTWUFAgUSNIouk+accww5J02RCMkqCQIoEyKSxIMbjgimdA2RASBEQ4xIzJYNG+L/H6no7vKkg9P5buQ16b5Eh5goHuYbE7IaQBEBUQmRid9ijfTgkiWd1GPJbwllSQQULGRQyVkIhF3R+54S5qS1rRXA2KIqbdjtvUFjTNuOu81eIYuKURsiGTVJM5FlAQJjyFIU9d4gsVpJKfjErASwvgToQxiCMVTM5xBSP/povHWmL7mazUXFutrraC2bugljzCIZVans991w+dywzpd1jqltPQAVVaEiUQZTYk7ISpK9ZRtjZM1gcswehUYfiYVRk6roFEQMjePNvqXgo0cACaF1XNWuHBKJYFm6JN4xdzEUthJNApBzFMjI1hRGghgixUTGxZTKqrrZdQqyPJ/FG98PYd+t7x03e8sW2VqTBHxKriIziPc9WWK27IwEwYRKrMyz40fbMIQMDhBFQjgwhsys7H2POS6qpjSzPbSD9mVdAhasYg1LDFRRQjBVlSF0Y540NadCCj4KEIeUkYiIex8tF6a2STiHCDmXRZFzDjEyoqoaNEjAiExW83T/NcYYYAJmMtOtXAQQiZCUEAg5a5KpBmzSm3BSkZWQCBmMIhEQ00H7V2MMASXJRGzZACASsDFwl0ruB//hJ99ZnSz+4qc/Pb13/pO//vpn+rpsmoePTspZfXN9Jfkmyvid333y+vIyW/dm0xdNRVhcrfcfvHPsu2u/Xd87Xby+ae+dHu/a/cYHrkpztpJRh7D/6P3vcbgujDs+mr++2ml0VzdtwEwa66o6Pl9+/ouvhJld+fH3f7dPLuOuXBxr1w5t11SL+VFz+/WV3+hJtdhe76+alzU3kGlRzRE5Zx8lkQoQkbEWacwROE/1AUSNiiSJTIWzTZIupRhCYJsLy6mHXb45PX84N7rdvXaUifOw3wLOsw8xxbJy0zAwZWnmZVf62cJk0cXJaojJAFGpAljV9elJMwz63gePUPXVV5vi1fbitL4NQ9vn3/87f/vpF59JR3HQJ+99kMZRSZKkEENTzV88f/ks7xzY99/9mKzNaXf1alsVxjblsM/OwWbXOYbj1VFrq9VZdXPdWzdcfGfhlrnft37YK9hy3izP5+eny5ckq3K+3T5cfbD8b/93/+J0cfSrv3z1rcf16+evv/Xk/vk7xV/9+6cf/s7Zu0WomiKu++8+XNoRf/H05fd/8OTD9977v/6/fvRf/PM/MpTnq6YoTQiR2f7hJ59ED3/6oz977/x8ef44EmyvrstFcfX0TV7Or7ebswcPT47qorL76/HF51f1t05CSEcnJ0L87vce5uzdzPyjv//9F8++Oj+dHz+eMTUFu/3Gp9wCgJsXZcFf/OIafTLq57zcp3H9eleUZW3hnQ+f3LxOj5/cu1l/1rL5wT94L71M6524C3ZWfv2rL89X1b4QqLAdducnj7/10REn2Wz3JyfHDx/f+4s//xk/8/OZs4U8/fIXp4uTs7PFr59dQaMn75z98//Vf/nsV9/6tz/+7wFg6P1s1mTIxDbFITlBphRzWdiqmu027eJ42XetYw4a62bRj50PSlwQs7HWe184q1mjeGuNYp5bk2Pe7da54ncenN9ubmQq+cimqUtFtI5Tphh801QqvO9HTbkqymWzsqKZQPLQD2F5dIIpVAVrltHnJAIopEBs5s1i37YZXNVYP/YxDMzlw0cf9NvNol4EZ7ZvthYMc3VxcXazWZ8eUdRwej5vqnrhNeT0ers+mjUqWpaGAOfkUvC3m75uqn0/mML60IpENOp9l1NZupLZ7fZ7hRwFyRAmkZQh6ZgGMkZCTuItsSXrw877gQCUGA0BQmlX0XtWCZoMmqQJ/OCQi7oexsDEBMTGOAuaJMaMpGMchxgscN/uVT2AjSGFMBbkCGT0G2tMXRfXN881m3lzVNXVfr9DhNl8tt9u50ent1tmNCn3VcnvnM9OTtzt1r+52iGhADMh1UXOVgEZCRGRsbCmJFwWsKjMfFaWzeym7WNmNgzK129uIaKw7HZt8kPpypjSzXp9fnqqkhVk349Nc8SUkXNVWmPIEhZWlfH15XXO2jQNZDTGmALmrlKRvg/GclkVWeTovFHSvvduXpBBRg44sqNmXvfDkGJyxjVVk0NU0q5rUxQ0ZXJhjAN7mS+WKuJckWMSFDDGumrXvzxZMnOOsUPHAhYMRklWMxvTNPPDwkfkkP0SVT2oQhMHc2qhAgCVfCBaTDEIBJyc6DoV25NOPCOdxsh3jT9Id7UhdIi5TaQNFQUkogPcAQ5syQkuOdG1VQFwMokkAEGEwhlnDRP44P0Yk0g2HEMceyZjUvI+5RTDYayHXDmXEwiAqcpqOW+OFou6aTf7X/zys2HXFoU5OjvOrm9Wi6++eoWIfTdmpaqZicpyMQOJox/qsm7qJmcpnXOSQ0xRsmgiYiIcfUsWjak0aT9cC8ayqYuyjDHGMDJR05TGMimAgLVFVZdZc07s2Fib2ZlubK3h+eIsa05hqJsi50hkkERTcoWrZvNRoqscgwmpt85qjMxouEw2GMtRtXDU1KaqK+uITV25EhVzjFVdqrUIoFGoKAqag/gcIzGyIVGpq+rk6GizvmINTdWEcRBVxCyorijLutrtrvfrLaJhVyCwzQKaw9iG2PkJYaKIxBFyNVtZa1GJIIf9XkGZSPIQ46BpSCmN3RolsASACIoIOYsSIkysIrgrvwOlOxrEtE6fCCQIcOfwRwBgYoQp6kgT/2MazBKDyAQ9EhAFoqllSAQOyNQUgwjnyMaCQtKDt04RCKQobQgRMCGkcei7ti0KO+YQJCpBzBGScc4xGBECQykmSZnZggIRpDjEPgAAM5PhmIKxXBXOOuesxSRFVS/q8vr1embNUeN8CKt5IxCXx0d1ufD9m6Zxgvb1+noIXVNYxsyEU4ijLFzKubDMDK6yJtmsmFKeN2VdFuPoK1cdzWcnR1VWEZainG29A1cdn1/MSntaOA23w8ufWlPG1hdlXbqmKIuu26Ipr3e7y/Xtcnm2XV+HPC7qhTPlspnHGAirRXP2m08/K7g4Xs6U4ouXlyGCxbLvWlfSfFnPbdleXcZQErIr6jb0rnSEWpQFEAXfg4CxJiNHhWq+qkp493wZ2t5ZeHl1e7ac/+bL1yLWlAY1pDFvc/7N8xfv3l/43lNOY4q/+exTZ03MmWzph7hobNf6mCIppJxTjE3NOQgZcAZ23frhux+NQ+/KZUhS1ycnx/euXzz3u66YnfXtcHx0bI+K/XbdrrdV4cqqSJYw5rHro0+kgw47d/YAijoBoEEboobkjBVRNIRkjWUFkJxREqiC5olpS2zYVmCcTp1IilNFFuhBmpniLHcpsr8ZMMM7/Uh/W8C5qwq4U4/gLfPoG2lo+grq3fF+S5B66yf6G5LUoWsQ3/7e9Cy4g9IhAQOoMLEcYmsEQAqZyBCyAiqSsVNnfFLQFHNSiJpVwBASYMwppRSz5CwI7NgQUxYBRMkxpUyqGH3yY84jca4Lw6yYe/AeRXPqxAeMybKgIhNaNEQiqlkxHah2yGRUEYkETRT1KeeYMYMlciBI0QECAh9uMgd+FE47KSWECJIIWQUAhYGZLFJJbME4MKhlkV2BaBAZRDUn5IAKMBWqSkLIk2okSECsyGAIVJQtcAmSgJOEESBACpA8EUpOiphEGTT7ESgIeQFIKWdREcwy9duDCIpwFOmjDBHHBFERyGaAJEqgCgykUzOdAGTQiFaAooIDNjqBoSiBAEIGDFmjSJYJFwUGxRiymGtDzkBTIjP6JGOirEXKkCUDqjIpIxBpnrxsqMjCqGSFVU3OCppVp2Y0FZRDfFynPnhJAEoAk8gvU78poB6ccVkBAC2QAGUkYspkJv+aECZjCAwKFQSkqpKjgiRFZhtNgciQMQ0+ZZ9TYkIGwwQIMNWQJVACRZ5ObRUFyAkVgQWIJCXNKDBNJn5bKgLQw0oFVQWBJ7AiHZBb/M2rAKYE0wEQM7lgAQCBmOlQhaaIBCIyAR4BDwWHAKI0LbwKMqJxOhAAx5CZwYfAVUOgIKM1qgi2LnXXxhgT5uhTGiOh8SGozcQkOYsIMwWfmqWBxgVNpXNd2zKRNSRZVXV+smhqd3u1abdpsZq7gq1zxhYRk0Xjx6jAKCgCRVnI6GfHy5xhJN80J2Ecwxg0T425ZIzz3ovmoiwqdNHHuqmjH5jUOCfA6AwIjGGvKZZVSa60tgwpWXT77cagFs6mFNngolkGHx0bVRzGwRUG1Ew2vARx0vkKZ1JOExmstKWqknHOVcO4FjW1LePgBxurkliNhtEPPCtrBQFA64oSHEJ0ExqBKGEyGCdLjeRApKSYVblkoTymVM4qQfQ+0BCcliAEYMcYi7LZrb3vpCpSFiKiNLZ2UaV+TEPPBDqtcUCJCRT60SNZZIdkEKgyRhQymrKghEMMYwpqJvucQEoJQA5KIxlEnMw+jEZBNScylokMWSZWUAIQTROa05ABhKzCyAJIk/sRpoyATnqQiKKZFvPT8h0ZiJEBlfmuoRMAAK5vXsef7E+Ojj569yNS/Nt/+MOvv345xt2nLz+vQRZF5kHTmD77q6+WUGJjQqnU4D5vLh6ejO3tzdULA7icL1ar0zGPzrFK5QNurrePL85f/+aXv7p5frRcYZQ9R1U8eefo+uvbYd9V89nN9Xofeh3T6mSJRR4wPLz/YNcWXTcO3c3D2Unf7V5/9qopirEfJPLq+MiQfXP14uQoG4Is4spiQggyW997RKpchQCmMCookLOGIe2IjyABgdUkOWcFRQgSYpC16V1lnLOOUs4KRdmMXQbNmIc0ZtDcNDNQjd6Hfkcp7W82x/cuvn52dbJ8z1nKhfFjev38qjRliNE483f/7vduL/dlycz5f/iX/3o7yMVqdXx+tr26Wb+5gqRu5nwcx33eoY/jcHb/bLP2V62X0J6eHnPfn12sbjbtYlWlEOqmuHexvH6+rsuyLIr+9TMs0fj++VfPfu8Pvt8PMAxgi0LH8Ktf/nJzvVv+zh+evfv+2OebF9fuYZViVmPK5VHG8Kf/7tmqLooZkM3tzebXv/z8Ww8+NFX18SfvskvozH/yt36QuiikfTuWxTKOQQsxhjnL9z/52Ij+9U9/8+pmc+9k8b0fPP7utx8ClA8efTDue0UcQr9+ffPwyRwgLs9n//HHn987rk9Wi9nCdeubs+Pm8f2jBH7fjqFvyQEYcVV9fLb64vMXu9vN5kq+8/GZLRCY5mWFRDlDELy52f/8r19898HZx996MMp+93rtb+WmXWds33/4rfffuTiel9fjprHy8tmr25ZOlvUv/+oX9989fvz+8r//H/73P/z4j8IoX3zxbH42v3f/3esX6223OT89+urLV/H1Kxzai9PZdBWcHK+2231R1YxWEMcU2ZooajBt+65oqqzBVY4Ux773QwdZ9n5fV7MQx4h5PpuHoZOYVSTF6L0vbGFdadFokuv1duj7qipms6bd9WVdhJBIRFFPVsfbXZuSr0u7vu2qsjpZnQ7tehiDq0vnKiJTlRJSuH/+cLvZJIKiNvvtbhiG7fbW+5HIKlbHy/OuHSwRpjCO/XbDgIaYlkeLy1evUgqN5f3VdhiDtTyEWFRmv2mrqpnNl7dvrsvabfb7qnGzpnLOhhC++OLpg4vHRcVD21WzYjavYxBgk0mpcohpN3TnzZFjp8l4HQsqmE0UiYMiSt93DMKuQRVmdY4U2Xfr6DPkFIMwyWx2FAVUNMRIzApASAJSl877PsSQszBiXdg4DgaSiDhrQANCliyK4thoDu16XVu2VRXHjlRyyKhT0p191xVMhmg1b95cfv7w/HRh6ovT05N53Q79ej/e7vpo2ThbGwYxCoqimNOjB2fOUVnPNZvdpmuamtmcnh7142iLYuw9Ae03Y4zCGpWwrBs/9kx8tFpx5subjSuscyU5d3x0vrt9mUMAR/Vivg7j8uwMhn7sQlEXOUGKfrlqPvrw3R//+Y8be8xkyEiPCkDOOBWxxjJRDHlWL61xhkxWiZBPjldAtrS06YdgE5ecY7BgvB+rorreb5OMKLLdBVXX7rujipP3pW1yjMylgkrO1pqK3m5E3m5UphUvHbL0B9r1ZPZHvUuXTXmEg+lnqiY79JoJAOL06x1f5jCOUwAAEQUEgsM0bmpIRyQ4BDPoUBsEiHQgWyMxUmZrV8ulcW45X6xvNpv99vpm0/kxxAn2h4P3YfQx5xxz9Kmpq2Y2TymxSWEIpXP3751lcu3YXd++7Pf75dHxfFYfHR/LPL28ulys5ikmEcwBC+SysKL59mZXVc1qeVw413Y7SxxDENSidDFyynGMHpGMtTEElOwKZ22DagzbFMK8nuXogck61iTWWWsMMUvOzGyYiIxPiblcLk6zH/puYyxbMj4mISFLBphcTbbR/taQYQTOhGAypCRiCIlZVUlyVqmaqp5VbC0omKrwoycGAsiaVYANZhI0jOhyDkRkiCCD4aJYHoXYxRDKGTXlvOt6JG0K5wcPbKpiHqBttxtrjWQJ/ViUdWkLoiqE3gdPU0hKU3/9ApK42aqazStjBj8o2KKqtCgSG8lx7AdS1CSaNUlEADSMokh8WDvfDWgBkXDyUlM+dDzBYbF0aFh5O8dFuSOz3IHVJ9ossDGgAoB5googikjOWYQAQFKWLBKFk52ugpSzKVxWQMOQY/Bp8CEjpZwMqSUiwhC8IqpSktQPQ7VsVMRaC8op+Jw9M6hkQxZSZqdIbJgJgAFYNWU11rX7zvtxNZ9pDnXJzvJuaBeLpYiEHIqyiDENY+9cQcIpBWuMMZassYbYcl0X1nLTlDddnxOyNYAGbYUJRoBX223AZAmO59W92t2bFQCZ8Baz3b9pc9fH2B2dn4kCCkQf1rsbizpr5mM71G612++yhlVd12xz8nXliI2tqxc3l9aZpm7Kyj5/+bRth6ZqSjKpG84eP7Sz+a5tgWzsQlkulV3m0dZNBgamsipzjEpWUIksokkxOVM/fPhgfb3tuv5sVmeDf+vb7/782WUbxsKYylVp33ofj1dHvu0ghMvL66dPX7737v3Btznp2MfKOR8SAoJGJhKNWYGEvB8I2Vo7joOtlqapx5hF4J3HT65ffP380y+/9bcf9+th3HamxpPzc5Nx6PYZEbmoC6uZRURi3F+9drOLul4kIBVhyOQMgbJzpiwmyK8CE+BB2RQBOmDW7rJfh6jtQX15K8f8ljTzVrr5G3Gzv/mhd/myv5HkBf3mhZMQ/9uHe2sgmu6939yPp6TbgX89/afyzZ+ZRQ7mUuNU5eB7IMBDFaYhMpOkz2wECVUBMsp0eMkCWVMSVQKDHJJEhagghIxqKCNozDHnhMhTdVca+2G/0TCW1lgrhgOpZJ+Dj5RHFCEFgjQ9AQHSVKVJyqgmKzMZRCOT10VzyjlLRgRDYECY0tRocAhRkQXACSJNQKIgwKSI06OJkAwjM5DRSWERAbCgDMoKiDlhypA8SERQzHECECsKIAGzohXNwAAgd28DKTCgAjvNaWojAplcV1P0VjSrACgZQbgLxpjJJyaKWSmqjBl6wTbpmDGhBsEIHEFRQBWsYWYVnBjkgJlUTFJlZkIAZUAQFQFNAkkg5pxSmo4PCobZMcSolSVEsmQ8acgas6RJyyFQEkGDhwe+Ss6ARolFQFAUUXMCmZQhURFMAURA8+GRrtOuVETz9LNFQAQiwAl6SAAHL6ixahiIkQCNQWMUFZGIiY0BYBURQkGFzGRcNjYBK1nIpF7QkGhmY5UNEoK1YFAnki8IQkYVIsIskw10CpjRN0Cl35KKJmLene15GilO7CH87QUTIEz1H3dDqqnaFQ4c4mm3f+fsA8DDOgnhcBaIIBAxS1ZA4wyhBKRCo44+zealijq2GcAUtljO8+1arRt9ADIAmkQEbMiZdPK1UfQpJckxA6Dvoyt4aLds583RDDN0yQfS2cJ2MPjsmwfHj1cn4xjKEsvFbCP55PiEY9Ix+DFm1bxrjSss0VfPf7NcntbN3Db18PKNyVZRWSIyOzKiE5dNkveYOxRiRlW0xvZ9cMRDGpBtVc7yODrriBn9yGQkpLJuGCFrlJSVGNlMffKQFNlpzsRUFTUSpZxF1FirSIguRY+ETVkyIzHee3h/u28zSFE1XT/Mm0YoK7HP0RbNbFm3vnWmHAfNg2hmJEAyIQ2UiQ2VdREGZUbIQmXZLGYGNI6KYACRnRWfSleOMSFhVVVhF60UUbEf5Oz+cRqzb3XvIqhNYNAqGBSRlHIqjIgqMBF7mJbxQoQKGGMCkKIqicFYzilknwEUplMIFCRrEjzU9AERoSmQ42TCRyKRzEwEfDD7w11Qc7JnIyLAZKoUBUKWrEDChgFxmvFmTYBMREkiMRlyeEBOAAD0wcNOb6+6i4cX7eb68fdqjG0YUhzMbDb74tmvV2XpY7xc33746F2om9e32xefPf3h99598+LGGnrRDnWz2Padtbp+dV0t64uL4+SzH9r1V18cVWxMjilxUEWAsv762eVms3OcLtf+/OLxfLHoqv1vPn/67pPTeWH9zbX49vT8XpdXR0enn37+G3IWG3u7uZybedcZC8WsPpk18xSCDLGgI0FWFSU0rpYUBREVEB2AAAiRm9njlENANeSQHKGiJeOMGE0qXbeh5thynbwfx1A6q4y2siGNEnuUyI6ZuGoW5Sy0u30xm6nEwtDt9YZ5fO/b51ttky3HPj94cPry+mrMfpf6/8v/+K//N//lf/VP//E/mlerX/78Z1liF7sZL9v9WDKe3j/vKFjG2XKpnDzZ2WyZ29Du9j60N5c9OHd7nSyXbdtbwuij5NTeZGfwdz754Nnr9XuPvtO3KhkNuaPjxdBvw5i/9/0P/+//x//zxcMTVPP4gw+ffv7Fd3/4vaSDdYvVCo1zbIs//4uX543lRfU7H/9gcVz8+D98/skfvBtb70qyVsPoXV1Wpbt9tY4jH9+rnz17cX5xWjVOuvTR9x6/2z2Mg3/xxcvLN1fHJ2fLo+NxG3w73H98anFKG2OU9NGHJ+fny68+u95ebeqyhmj+nz/62R//4RPbUB+745Pl8sH8+nV7td4WtkI/+nazX5vHH5785c9ffeujM7DSD2l5Vre34wf3j2/erD/7i8//1t/7nt/Gl1+9+fiPP965m7FN65e3f33z6r0PH0rg+eL44bsX/XqbbPj166vzxek//S/+2z/71/+23+xIUcbQ344g1O/Gse1I86xqzk4vPv3067s1Fh6dnhVF0bZdUVTL4+Nhf+uqSpD6OCb1q6KMPnifjak0amEKcWy5jDHG2Gksp8j1bD7z6zWx82MANdtdd//hUmJi5DF4SbQ6nvV9TxkUoB/bqQlIMVf1zJb3vvrqubUm5Mmrnpko+JYAQPVmc6MpoDHtegTJFoRs0SxPtlfbdrftYM3KHiArUOEEWfqBWfdDx0W134+zWVG4xoCpLGx3XVGYs+ZYwGzXfVLwPmMyDusc5M3l+mi5XBydANIweMg5dkkFfBJn3H6/B7Inq/PCBUhhHwJpH4ahcCUzJkhlSSSpsNmZ2ZD2KUbvFYAdl8H349Bay+V8rtFXVY3IqHnotraoyXAKMSsRSWHL+mj5+vaNZohBQJP3W0NO1LNyaSwxiabK8K7zYfD333kwtG21cK6yhtzLF2/QULloxj6crOavX788PT9959Gj9e26sK6Z2YujGk/KN9e7eWlGA/t9hCSqOmsaZtswrGZLV+Li6ASVLRgq6r4fmEBVrSn67JGQ2M5ncws6hsEw+iEgAXQjEywX1XxR79uuG4bdsAFDxnIzX4Z2c1JyjH3hjE2KAITgmuKr2zfzq/lHH30ne/j0y68++OhbY76UlJt6vt7cNuWM2I4+GiAxxays49gba1U5+BwklUWxFDWAmnCz3RWVDSEaMoPP82rR71sJaGcAOZMrjCtSEssEFg1ZIvTB321N8K7p5vCk0btx9rS+m3hDMG3TJ9PQga3x1uVxgDeoAoggEvNUmvtbJGw8PMAQUZUOCzDkCY90iFDc+bgV5LCnUiViJq1c4YytnVs2za6dF868uV6/vlr3o/fWxpBSjmH0ZE1VlyGmeZaCeFCcN+WqqSAnGeJ2s18tzh6cPy4Xc2vg8sXVMIybdndxcrwdYzf6wlVNZRm17fZGaTZb2bIYfUeqlo0QoWEDCKyaxJDNZHMKRGQMVVUjKZDmod05UxrjVGLV1Iy47zZF4cpmhgA2EyI6NuPYOxKLRb+7DWMHiAjGj4GJXVFa4zIXppip5tI5A3noR5mW45IL51IUVAHRylWD711RGGsUyDongK6qFUBTBqah650tFJNoKp0RkTgEViXNMcViPjs6vujbjUpWitWs6tq+EJgf1X07qs9MRTU/Gvt26PaIFPs90F7ZNtXMVnPf92PfAiMxK9Cwue7bG+dK66pxt0FbTFBPckWh4HsPkpiQqdDsFQhQVDMi45SdB1QgVFWQ6ewimjYiU5wDpg3FQR6aprk03SknvwYf1vCHYTXCXV3atB1Rw1O5kOZpuCspjHe768NgOYY4aZXR+/1+i8zj2MfkQXNM3lgryfvkyZQSAwAIGVSJMVa21hxSDqyoUTVZNgYgG7ZVYYdxQCycq0lCac2sKmLoV4uVZTDGVM4Mfa+SRDHGcbfZzIqGkEPqLVNZFIWzJLFuHJE6NmVRLOt6vx8YxTEV1k0rIMjQDdr1w+3Wb27bs6P6vYfL/e1uv+8MkIg0R7Nuty7rGhj2fduN/vH9J7/+6vPsY2GKbtxZpPPVyXa3PjudF4t6fbPzY8xBF8tVVdmb29fttjNiT45Xfj+cnp7PVyelKXbbl7WdBZ9OT+4NGurFypUFFm702XCRnY15kvySIWbDolQujt774N1t2/XDsPbhwb3VsGv/6tmlx0qTzyyL80WX4tXN+vRkeXNze+vTuQ9dv+uGHoVCyn3bEUuMwVlGgjEKMJmETAUgD8PO1DMgLYzJkuZHZ4/eefzF15+++3u/V69mvvNludhtNuV86ar69uXrlMZyVpXzOiM45L4d2le/qedlOb+vaKUXwwgiSizEhAYYUBGRp9o+JUIEASFE0MndM52lcMf4v0NH4J00r5OjE+9cRG/51L+lCB1Oc8UpBqbw29SiQ8Ds8Kmiih7uyfB2E3sn+P9WbO3wu/o2DPf261mmIoIJrGIUjYAiKqAhSapIdzEcJAIyAGAwp+wNqTKIomgGhZg0I0TFIcUxJSW0xhBhAlXFkNUYVOYoGsZBQ7CoTWkLDo4VckJJoBmSMpIjLYxzhpmAmZOICmbNWVkQBCkTqggmzTBF8Rg1WhQL0eLUAoQgykSAU2IsT1oXEqsaTAI5EBO7Qp0DtiqIgqAZYkZJlDxFRwiYoqakKUxIbMhhAqVNZi5iQ6bgA4mZFUBVJlej5qiiqGkqmFdNiJk0iyTNEpMqmpQCECuQHP51BCACknIKKQ0+DlH7DG3KCSALTL8qKhqrYAxM8y5FgwZBRAU4ESIgHxRBFZAYU54ycBpUICtI1phzJBDLqoCYEACUHI/LgoOokkMyABNzkJVYsygwKE5iG+YEkiR6iV4P/JNJW9SJ4YMwuYgkS0IFIpwALiBJVScFBhAVDZBF45ANGTc1MSEiWGMMIyKgIJISxZyIGI2NqpZA2IJaAETMOXoRUkRlpKKgokRDNIldmpkJQFAVJZOqZkUVVSFUkLdPjW+kIgAAYpogeTA9og4Q8+mhIoDKyEiMADy1+03ZtGngdQDcHzhlenfFEtKUoD6QHacfAWlW8RkKtrbUom582E/m7pyCIiUtRjWjFqoFo4OYcg5NWbRZBFVAIUWPiphyJgVAIlEU4tls5SMKWcBsi5ktzMPzi6ISY3nc+zb5PngtZ66YWUNsrXRhd3V17/TxmOKmXRsANfjhu98Zw7jbd7frfZllbpyAskkpQzd6BC2tddYEQmcLBAvSZ9Bu6JwpNYkkpcqxrVgwRw8MzXImUYfksFpaG8ft9aJa9KN6HYWspDBfLXPWDHYMnXMG0aSMxjpAEO0AFNBqkqyEwkPMaKybL9r92ogSMCAUztqiVqSu8ycXZ1iB7+NsVt6OJNzkNGKCsliICrBLxFjQsp73OWDGpqxTGIHB+zRbzH2CLoQkIYtwyEhoXElGWt93YVjRETGGmMPIouzqZrffIHMIXkRGH4isdRbZqQoSBe+ZLbFBzayCwsyGiAyzCogkTUlk8l1DDrFwBRlWkRBHS1RaBo0ANNlDBRQkERGRnd5+w2wQAcGwDRKROGvEqbKPWUAAmZljCsZYYxiADJEoIWIWAdHptgEAi+MZkbl8s56l1lb1T37ys7qigs3FvcXtmzeP33/0xa8/rY7O3Fn1bH9tcnzn4SMa0/ZFO267bvDVclHMF4pG4o6ZSTOO3YxtP+6qRZ1y3oTu0cXD8+r4i6+fL5aLHMFAWh4drXe5nj/YrvdiCjurbVUvZ9XLLy+5dLuxzSl/8fyrMQcZk3+VZrZ8eP5Ov4Ucgyltl4aKbO3M4Le2OhrHwRpmADZ8MGDmlJOf7FZEtqgqiTmMAwsaWwhJFgVi61wa8+36ZrWqy7II0YgAYMhK6Ko8DuOwV2ycbZDk7OEjKAxUqBXaI/Pww/euX758+Wr/9ObLP/7DH1y92iWgtvWZ88Mn9/7F8T9rh92Xn3/KxbwdwoPm0dHF6cXRarvYdjEN49iOw7wyr1/vMubV8dHLr752YMrSUgVPn311fn6+3fuT0wf3Lh6Mfpit9M3Vq6J5KMrPLzs1JUBaX62ZXRIOaYjDmHP/6S8+P334zt/+B//Jj3/0pw8fP+i7zY9+9Gc/+KPv/sn/51/9V//gDy9fvnj/O+/dv7e4dz7bXt927Sbx4sPffwhlMqboW//mcnjvydHtejufzxbvVV9/OWSS+4/O/Rh+8mef6Wje//Ddseturm7Lxt178PD9bz/+f/8/fvr+h+89+M7Z9Yvrl1f7V7/67KMPPvpwfp8RhiGc3F+4mcldIoRlXfZdPj2tTyqXUmpjcI2pjIm1v/jwnQ+HeymNuxSe3qzrsrCl+/KLp0/eP+t23Q9/9ztfhPbR4x+01B49uf/s+eW/+Xc//vC752Zm7En15Pxb5Cgo/cc//ZnQV6eL4gd//AdR+F//3/6Vj5e21HJmTWlEYh92GZMtSQQtF+0owcdqdTRdBT6kkmxOIYXBgIZxiEOI0WeQ5ng2Rm/LxWZzc3p8MoxJxNrShXEfwppyWlbL6Puu27uybPutgtiqPD66uLp9cXLk+t1G2J5evPOTv/7zZbUyBROXGUcfva1qMMUUsb+53prSLRdLH31RGdfYoir31+uSqW7K3a4bfVuVcyRXmQjKN1d75ywilLPKEA/DwGyOjma3N2tX22JW7oahH/tx34HI4vQkQ0pGVaOpHHQMAIUrPv/6y3sXD4i4WjQZ2/3YFY6rYqYJZqvZq/Wb7zz58M3Vi6biYRgXzdGyaZqCFXQcNiqaEQef6tpSUSUFP/oQfGPMvl3bnEWzYlZN2WcmN8bOhx4NGuuAXNRsC0sxGEQoayHOILYsNGg3jIZNU9q6mQ8pxujVC1GdIKUQwJUGtW3XdWl327Hvuw/e+3BM3a7dP5o/yAIZcgYJkg2ZKNIYO5/PxmF0hWFbGOMMUExiyuLkaDVvxqS5L1I3DqfnJ5vWx4Cns1IEimoGRM42cWrhLNzoB1Ft295WLkXJqtaypjSOY9mUq8XFZrOPkspZGTTd7PdNUdisIQ2reqYmb/Z7i5TGJJajwZix9WNdVEeL1RPbXL7cKGtTu3tn569fv0KEWTNXA3Vdx9Ebp85yaV0OcRw7yyQoIMhkE2QkLq3th6EpqtGnZjFbrzeQQERvtv3Lqxvx+9N5PbY7tzIxexUcwtDMm9EPxlpbuLsV0bRdmdCMijoNIA9qECHDYU9/t/0+BCEOxb/wTa35oSmEiBERFTIIqALSgbWKd8N2AJigSArTZFoPY5AprfaWjw1ICErGWFVAyIUpqTbWFYgEoO2+G7334yiI1jkEippHH62xp/fvv7l8A4bnTZUU99t2vlq999GHitzfblKI/X5YX2/R0vHx2Waz3t9uwfD9x/dri/1+W7piuVqtVsvR70CyNTaGUYEAOOU0wbaRqe96AZnPG2dYRVNIhqEobeHKcRjZWCKeeDdFXTG7nKNzLol4SWOMINlZg6BsjIgaY7IIMhtTCCAwESJpjhK8ZNGpzFcZIUlERBVkdojWFbVx1hZNDLms5n23qZuViPbDwIBMNiQpK06xUzsnmJJ/WJSsDMpsi6KReoxp7D1aInBhDEBSz2p7cnJ9uZExuqpWCDGEft8BCLILpuD6qDk5P7G0b6+77U3MPVhU0L7tiEYAktEjgCnIOkNIRKqEE12FycYU797uCW51kAvhEH085BDxkGAE0TzZK+it20HzYQ2OQkB6CNkQHJIGMp1AiAx6Z1ibvEksOWfJcLcgAkLMWZDRGh5HH/p+2K5Jgh+67f4yBZGkpXPOmCGmmGNRVQDZcuF9IAYiLoy7vr4ixnpepRyDHxkKm7m2FLp1Uy98ytF3YqAorRrUIEXlJISmqFiz73dEPJ8tP//F11Uxe3x+/831ZVOZ5bzKm3ZRlKmPi7JIIjmrIzuzZXaRCPox1HMo6waMbdcbRK3njUO82rdPbzZ/+cWzxWy2mNd1yTdXm9XY3zuaYxZni5TjrJx//frrbRgfnT25fPkFEc9nzbrdLxb10fnJpututrfL2RLVFQWP7X5zszXGlAaQvGuwOV8W83rYbye1pGwaU7red7NmbopyhMxVGUIWJlvPlWyKwRaWLKOUKPn45OT9d8+G/WbzJoHD9x+eWDJ//Wbnmmq9HZui2K5bQR4kf/risvUYogxtn5PEMbmy2PfBmogp5miTj2XlwpiPT473w9oAzuezGLOJuXSuH6KIPPzgydf/5ouvPvv1ex//cH8zFq2tqqIfclEtVw+Km1dPg49oclHPjLEVas7t7avPjuoZu1U0nHNgRFNYJARUEKEJaHwQgKazlHCKa9GdYIPf0IUU74Sib+ht+Fvizx1g+i2f6Ld0oztXwnRwvPte/cZA9LZYafrew3EOt3F4qxQdXgsAcBDk34pLADkLiCLhpNGrKpDJOpVbESplEFI4VIQTKqAqWSoCpumuYgRjzjljQhqytEGGLFnRIAFAjCkCKlpQzZJSDEMWzLkunCW1BKReNakmUCAEh1AYLJxBRqLJwkUpZ2VLZARJJlYyMCqknGPOmNUQOANu2v1IVlCiycZKU2EVvjWKEKsCc0mGsKxzUSIiCUDMkoLmhKKQCXxSVMlBsoBMAkMkzNPYHxVUCTRDSsSsyMigk/wBU6eoqCTISVKSnCf1gAAMUtKsqjGOMnl10WR1gpVMNVFZsoiPyYv2MfdJOtGYREQnJnKWrEIMRogZia1VTWx18lGpYhAtDCKgJBHNYBhQUBMbiSlKkqxCgHGSpjKkHEUxgxhD+5B91gykgKCZBARAs6KAZgVNmiOqQgqSQo5RJiacKrOZnumaBRFUkyICKiFP+aupNFgntzG9XQNkRHuQXNgoETGDIyAznX0CygRIZE2ZVFWUmafCOcskjGBMmnwDyBnAEhnn2BoiYhXMd6VPIIRgCDVlhKwaQRRyRvmbUhEiqoKIMvPdtXnw56EIMQIAEU1YogMOfXpqE0/ffrCBTA5A+eagBxVNpuEYqcLBAZs1xsQlGSN1ZbY7iimbkESHenHs+xDGgIaTgDFmHIYYgqtKpFyUTIZS6FFTxS6nxMqEEIYBDTfGRe8LzvNFHWLOMd98/nQcx6IomB3Pq/vf+6g5WcE4uuzDzVb2/aJYtN2tDyn3AlLu/HqzviFnyvnp0aw+cjS8fnG9uWJLqNaoMcY6i4Di6gqQc4LcjmVZWhsQ82a9yyBGyLJmzOyomlVB/SFPaLAompOitIDInqs0hl4hWg6WWIEGcQmAUDNm5mSrWVGZdru1VEeIaDgSeonab+5dnG2vNHrRLJKzMVMm2BLLvu1+/w+//euf/0IQ4aR+AxE9gURU1hyHcajMXAna3BoGVhoDGGeddTj2CBEiS6YYpSgLAPZj8BBXJ4vSByXadt38pFq3O4EyCpiCu37HxlqwYwCyVpQC0BSUz9NdwKExxlhOWVIOMQExK1hbYIyjJcoppZQEAAmzJk2ZkUEgxlBWtTHmbc8ekpmIoVmSIh/iAFNGVtkYmzUxMwAqgeIhfQyqzrjpiSOaRXXSl/RuFT9dBW/erJujVXO8VLVf36zr85pj23W7dJmX85VPtB8DM61DK7sM+9tHp2VZjvOm2vt6sZitb26/fv7qj37376Xti3S84yKuL7ecioyz1JydNqkJKv3m88sXRVHFPm7225NHq2zMcXGyv9222/bo0erxu/fXV8/HbmbJnZ0//uz69awuN6+vjup7CNW4848unty+eTVbnC5WxSix1c1vvrhK45jHdHr/g08++iBJnkJ4zA4QLVcZOlcYAez9gAqOXTVzjJiTdrGLORGQySZ2Yqqy79uEYMjlFP0QXFE4U0c3OjcYYwFx7AYzn5W1jTHG3heA129eQWVd1Zzzd25u8jjG29ur5dGp415jePrrr/1w+0d//w++frV98rga92F7u91dXVVlWa2OmmZZNc2wb1MiQvAhLJZ1gY2oT+DvPVldnD8878Owt2kfP3vx9A++9+3750iWjk/qfkin759K3F/djE/OjhJBs1y8eP7mox9831n72Y+/SN3V//Q//p8+fv/i1fNn7U179r/4g7/3+x+bgrbjaNgU1qxvkmZydu7KeXMCT19cnS8qW7kHD5bscLFshv1g3dwxIIk1KJFPj89LPBWhlzfPP3j/Abvi3/2bn2624eOPvwWcbq+u/uw//Pz99z7+Jz/8+F/9yc/yzTj6/ezh0fxseWLmVy9fL1az3/vhxdnR7Mura1MW9ay8uellBFoVr292tm1/9dNffOfJxfJs8fu/cx+j9Rl/7+99Z9x3l1+v//pPf3b76vWzmD7+wXd//flfPPpg+fvvPvjpT3/VBDq+906/7aKP+/XtH/7gu+9cHP/7f/vjT//yJ9cvX9MwUrqnJOSQDaLi0A1lY5EoRZi5E25Tv/PDEO5WVLK9XbuysMYZCz52QLpYzD7/7Dfl/D1I+OtffrWsZr0XY1wecrfbAad7x/fWN7c5JmJbl7VM+QJnXl29dA4qi6Efi6L+enMzWx7/7ie/f3P52nuJeaiKcjabo6W+7WTMU57fsgFH++3GLJcEso8jYhUGn1Pikktrhyi/fvrLjx49LKiqjpbDOO4ut45NKmiIsqxdH0Cp6rd+d7MGyfVqkdq233cvXr46OT56+erNycniaty33leZSsbFcoVqjmq63OwtUPRqGY/OFpub28Y1N1cvb8o5ACUkZ5uu7/b7bTkzi6aKQZwt45gcmSwgIAgoqETGSyzKmaYYo2d0ZTNX2eccldA4BhUDojGEkMiYqiq77cYQimpKia0BFlDatcPtelstbIohxyGMLQMRlYqYkXLySjRmkaDIblbP9zfrqqoYTGWqrJGySj+62qWy6IausGVMvgTQ4KkoXeW4KPpuwCSLyjHhyONQh7OlXMwKBVdaS8WsF4khEoY+alWVzGxqpwLApSsYYogx+qEtitl8eeL9MOSkjkihb1OOWpdVjImIMsMgSUC4LjXI0XIRspec0MJqNvNJr7ebwacc86opMScEnDsWNd7vUUmyzBdzgSyiyBDyaLmioig1MRgABcwxAyuQsUSFJMxBQaAPsazql18888Ntoblr08yZGINk1oyIDGQmTLXeLYwmfUhlIt9NI7RpuyEHTUd1QkpNhoxvEBl4t0cCnAboNC3JyUyNaXrYK03+WThwIvBtDZAegJF6sC/dWZd02rpPPn1VEQTDLICiCGQSAlFRVZW1RkCRiRVnRd3GXcyaUzKF2/j2+MGJ3uyZLZdFNWvOHtz3Oe9v94tlNUp4ffVaLUUfbvc3jsAwIMrJohrDgMiz+Xy2OvYxSNbCsPcxxpDCyIjZB8OAhrq2VYCmnBu2+3ZdEhTWEFNVGEOGFNjyYlZvbm9dNSubuWQtCidpzGMbUrYTeYkg+IgChbOAQgTGsEhW4apeWnTR7+LYgyJhIRBBUTWmNEpGoKmpOlrLxhbGFkiSJbKzKSVDdgz93BaGbes3s+U9dnXy2Tm1hBbZqojGOPZ1WUjyMA7HJ6s++LKey+j7YY2KkqlZLgB5e3OFUDhjeV6E0EuW2G5C10ZX23uPTy/eOz57fP3qxc3VV6LJOCcxMEPBBkTD2IVBTVFZ5zKgSIYcJKk1Juc4qUAAiMhvE45v/WV0gFvhNy42BJjuPHBgbB1Ca4h4KAi/yzkyHs5WfLtTh0Mu6OA2eiuAAiofiKQEZIz3odvekva3N0/JkLGMgS0xgQUJQMjIEqMg+2EsixIN9e0upLG0lVBOE26XGBENo8lYWrxt1838veR31gAZnR8tjXH7/a5qVpJjisGW9nq4uRnah6v7x7P59fraGuOMAZHCUFWXx0eLdkwhI7GZNVVKPktSIcUMRsSQO1qG/RY1ozKXtSTdhP7qug8vb4vSFmyOY1uVtWlcBmHjFPT1q2f18mKzu+lDrGuXUjRcPLh4p9tvNpu2KOaVm/lxpxg3202MuSrNcVMYovp85eYLJjuMmYzzeVgu58BaNHNTNIhOo5TzCnMPCEXZoAASMwBSVkQFMtXy/jvv317ebnt91Xbs+Mm7xx3Gtgvf+ui9Lz9/8fvffT9142++eNFFGILuW7/dtfu2deD22132qe/HVVUlUSRHSmPYZ2hFN5YXOXoha0FySsaZnH01O7p/79Gzn//Vdz/81urIDd2usSsN43r3ZT1bnL5zr73exNFvL6+XqyNTFhSxv75k+8vTx99HLEUAiBDoLpXCCCiHHJgigIp+Y7SUO7nnoHO+Vdjv9J9vlKTDbRgOetJbZMpvvVphYr3Bb/GF7lojpzun/I3S+8mEd8j56uF77nLF+I2bSO/cTYfvlCxEU+pzyspHlSyQFBRlusKAQC0ikiGgrBlE5JBhm8iqUliKZLzgGKRPOkQAoqzCqpoURcGILdk6AkioUBquHRcOWRLpyAQq0SJZUWfQcpoMRYAohxo1q8AKwkQEINmnDJBRQiLJjrFiLY1YAwqg2QAiaIbJdDX5DEERdCrHAgBFo1wKF0BGJaoKaFLIiFOvGQkaRD2IGjKJMCokqkoqkDNOwrOoisqkqqAgowJKjlPRvORRp92bqubJxoKgBkSnY4mCAGRJwEGVs8YYkx/jGPIQcj/GzmufJCmIILKoguaslI2gsa5yjpHv6uHQEaYkCESWCEhjDuNIiAQZDz/DTDyZ1QQUsqqKRMAcskcAQ2UfTkNeJm1idowIIgeQEmjOkgOoSsoqWVPKehCQiEg0gcrEJlKdtBcEmmZK6WAbzomnBBwRqJBhYDd9jhIBLDNTUcBkxSBWMoSMhoksEKEqKmayUZgVQ84AqoRojBFAZnIGjQECIjBMBGisQTbEPHl5DOPbCwdRVJTk/49VdBhlABCRQEYwTAgTZxEPyc5poUPExL+VQTswrd/GRA8wsWkkBhNmjwCBAYAUUJXZSMpElBF9jkjWMOcsWVRFYsgp69CNxhhXumpWh9CBcSELsqJkQpz+pWyJ2FowXdcT1BJy0IhJcw6jaAxZczbsFvXCWnf+8D4tSzDYXq1hHFhjSdylMIxtuZrz3C2acnuzZzaFKVfn51Lz+naz3rTDzTXkxK4ubDX0Xc5ZHRnmAFIUxlQlEWocxz4JxnpR7fd+vlgRgp3VKslUlQaNaVyt5ovzefJKADYrIdgIlasGH42zgw/sQKKqklE0bIlNTP7k5Ljr9yJomIMka61REJ/9bmRUtMZLZmOI0I+xmTtm54O2+/jek3eev3iVG1N6CsKOWDVZ66ZTwrIJfl8vFpadz8PknsOCgbmxZdf5LMkUlStMBnSgYRyb0gQv7fX16dkZIaZMPrAIOGPSIAxkyBI5QgbnEgBIRlA/jioiqGwKvLuf5KQ6kVSRgJgZJCfJCRCBD9is6dQTEREwhkQzsSFEEQFUkcyGplYBJlSd4A6T2ZCBKGlG4jtIERAhTXOuScBSMUyi2Rr79tmTUlwezftuuL15/ukXn//Be38EmzSr6qtXN8dPjm6/eoE5vXzx5YMn78A4tjfrm+evY8hs4Ph4vl13M1c/OJtVCtebvjL2/PQChmtLdavy0988+/D+6YOqSSpB3MNHD55+9UWOrD73N5cziqv5vZPF0fXtVdtu7t8/xuTI2s8++7WWiKY4qisJOpsvrO0JsbBuu910IT5/8SWZJidY3lsV89mue7OPZxDZKBbWWouac0ngSqsIzs0K2xQsMQTvWyQ0lQPThBjJFEm4mBFb246dTzIvC0V0ZZ2i+DAWltW5lKEoCiyU3QzRxlGY60VTvHq2fvTxI2PDAtFwms/Lo4u5RNld3u5vdo8v3mlvljev9gbky88+M1I8fPQAEJs536xv2zfrth0A9Fvf/oBMfPHV5dHpAxT6i5/8xcWjZszxaBERMrGSpW+/+1gVdjddlLZA/eyrN8l9u3Ll/fPHzdJ0bd/1/dn95fZ6E7vx9YvPvvP9J//if/tfrx6u5hflhym+evryP/z7n/zX/80//dYnH7x+c7u57N7cyj/6x0+6W/3sN88/md+/t5pXJUvGoPKrX756/PgcmW5v2uxz7nnz5na7ldlq+Sd/8j9///c/KQz/6lfPVrPVnJoiaOxuLq+eP3r08Pxs9fid42Fz+8n77xy9615dv86u+asffX5+fsGpy+eYaACgxWpuLADKvdP57W24vO2Pz1brq+698/dPFvP9ePvi9U1tVi9ebv7g9LvlvDg+rf1md/LOxcsvXn7xm+f337n/xS+fpZRmVMqm/c3Xf5lS/O7Hn2RnL58/210/H7sb8BL7Ho2u131REjMNvQcA1OyHvirnrBxTX9dNCMPMHfwUTe1yFuvM0PYigQ0Tope8Or1QISZIsXcPzjLifLbqu1eKyRlsx86Uri7Kvhu4tCGElNWLrE4eFKaKcXClJZb37p/F/ToX7mjWMFMStsa+en1zcrKsimJQrOqCUmAylnR+76xgt21bV9pB+831mycP3t90N4FM8HB/tuSUn775/PGDh1VhS1PfXF4dVWW0eHX9hhXnqyMfx7OTi+3uxvuxG/bIOHZjDrJaLVKGom5E5GjWbHdDVZXPXjy7fzybNzPH9tbvgG3fx2Y2y+K//+3v7ddbNz9ab/eUsKlczjD2kpL3w0jaG+ME1bcBNGcRH3xdL9vOHy2OyWlRz/ubFz70whhDKGwJSkXh/DAuFrM2SYrKFgtXomYkEyWMcYwxEFkGZcvZ+3HfV9YAuxRjDN4YO/hBg0+SchbHxbuPHmaN6+ureX1kqRh3uTmu62bWbrew2c3uPW6vN5qi72Wxmj+89/Dp868La1zTGMtlZSB7FT07WsZUAWSfoqucdTaDlkWZkSxTzQ5Uox8Ec12UQ/JJUEVJ5OLs8Xq7LiwVRTOGtJqtkh/73oeUbGJJUBRmuVpttltDfLI8urm6Uklh3FvnsoVMZK0hB+++e/7pF18om/lq+fSLV8tmFtO467eP338SwMcYmsVss9vHOMyrRYiaolaFC8mriiFjyJISMosEILvr9/04xCHpft9dPltaqaqmH7bLemWdBQERIQU/jsyGEFPO32wsdIqOyTSONGQOK1mkt7uZt5PrQ2r/jkn5doOCAIQMU40Jokycx7frqcMY7U4IAFREnYShu1zGXUDijsk6/W0ARGQCZEx9lyGkLNINoR08AI6jF5UQQ/YBmKrSEaADmtlmpwMh3js9ropZ30YAefDw4tXTL9+8vjRs6sXsxbOvq8pITCnJg3ceZSp9xHsXp7YoJSeDgEBt36YYUxjuiqVVVEMIKlrYsq7KYdxWxtZFaRliGhUga64WMzYGBCAJW0fOYRRUGIYWEApXETliTcknFTZWpqJSIjZEhGpLNpWEfvSdAlaujEkzJFDVHFNKhJaRQhiNqUBAFYGMAei7bVVXKcayaWJK0fcJ2SIm3xljc/LGFVQ4MoyaHLjUtj44W5SuiJpjU5ftkO7dv3/5Io9xXzingtbIcjnbboJkITbGuCCDK5y1nPywefmZ75bN8uzexcOmMJeXX4xDz4g5JiFhskVZi0jKXpCJ2RiUrAkBBBgwSwZCVDkspQ/aENxtu+9oLwCTRUim8SwCwVRbRADyzVkFk740TbPhYFC7O6/gQJa5wx8R3Zk/gAFEQVLSLClKu9mub77e7L+S1DluSNESGaZx2KcUjXPiIxLs9xskx8TG0uXVK2s5hTgOHZJBJ5Yoe295Dpj7oU3eO+NCC1lRReZHTQbxObqcJAdrrKjubm9X8/nDhxdOBtR8slqMg3eWy7JoyBbOuLq+3PSmKEqV0rld653lgrQw5qr3TTUjE3fdTlElZzJUUp28HzPt9mnujMteYqzqY0kokmOK7BYhpziO88XCWsSYH7/3eBjHN6/eHJ3cSwl3+7Zk2l5vw5CZiwePzktU73MzX1l2GnIcgygWZePqRtG5qjK2lCyQxWRNMWUBR44IgXKGrDlYUzK5nHI1O37y7pOr293rq3WKlgu8V1nq4+e/fop1+Wrrb663V7sQ2ApQFGmHcb3vjmem23fRByYTgy5nTnOIIammod8wWZCcYiKrt29eHF88qpt5CKSQ33n/g1/86D98+bO/fPTJ97EqvMjyZIXXMW734qAqLPispoleAHPh6qYgf/Nmq5/OTt51ZQXEAIQ6nZCqwJNphCYFARBpqjxDRDzg3PDOVARvxZrDflIP8bG70Nmd0Uj/hg3oQBeCQyfAIa02ndoIMh0VVPSO8zX5OZDe6j96d7i7XSscUHR3LJbf/sAkgkhJMgKoJiJWPOSLJyELySpiFhUNIJKi1yw5ZhFAtmwQkFVMSDD2aUyaQUmnaidmRDbMjFWBZcXGaEE8d1xwMiQWxAABKCOjZufYMTER8+HWKHLY2mQFw0zMKQlqzlkkAeRgQCrm0oorpib7A3BGZdqdC5EFUDm8W4LAmlLGDESUWCWjBA1BkyADoJGYgIyYiYftEHhiXChO1fBTGCkiJ4AMOcvkQyFQFRFRJdGMqiBJJy6OgmTRmKcXSs4gkwgjOccp7CcSs2LKmlVDDKPP+y7tx9yOeRRRxSQq6pUYVI01mlEJrBgSJUJiO72x1gEhG8OIjKRJCHMwVpCNj4wikDNCIkWQfHigZ/VRswIxNs5tRzlNEJNiTHIwgVLOCRSQp5+jTPkuVEUGlbfSPE5s3KkdHskoAKKSkqiIKLKlCRNETKrMKGAOECJC1aTIQIrEbCthg3c+I0WDSIaQRb0iJYEsiJAB0DgDmFMy1qElY6wjZiBDTFMLOPFUujfR54GYcSp1BVSku2vgzlUEd3jqg8o7uaVhetPhzvY8Ma4J6bCRJ0aEqW7tzm4Nh8mYHtZNCkBIEwZyGs0xESAaW6boU0YfTWMXrkx9u7WCnGkYfUyqqoaASMtZ2bfFELoYkuacYyJCnghPNPnvuLYVlFVqxzSGoiwl55QyJGWyGvNstjTzeSzsphts29ZkHWJhtV1fh3E0lmb1cVRJo3DFR/OjGMP6+tpLZ4BD10uWsqjQmJgjI6YszlRmVipAjpFydtaKJjtf9n5ryJoZSdoRV3VTxUxlUdSm7NJ2tlyWtuh84JwdC1ecfTSEVNSmKAhDSsnXxveJVZ1BQs0Ix/MqHFXr29AUTZ88IVVFsV1vO+aidF0/JkjOkjEcQzIl24QE/NXTpz/4wQf30vzV7W51Wu2JZUyQ0JAiWxAAzEy286PFrMCuKnwafACyjAi2sQpFLuyQYtnUffCIuZyVAiP1GPbiHA5ZQjCr47Pr11e7dp/EpKjOmISmahal4Xa7BVTm5ENMCtamsiissVliDFFTZucISbKmmBAMaCZGAFRNSOgKB8iqk/nOAb6tLlZEgwQIQIRJPJJjNgAUYyLmLEI4KacACFmE7YF+bZEBURGYCRn5kGvju6sgfPWbTx9+8Ega9w///h//4sUXVfINV8cPqjB20Q9lVR2dLDH6EPt7x7OrN2/KeiHsQ4rz2ezN7e29i4dvXn9WOuy6/PmX16u62m7fcNn83e89WTXnmzdvdvsNOd62t+XcdZAlgc1kKCB1cdhQGkTiru3HuDteHQn3CM6RuW7XIeRUufuPlq9ffToMw77bVzN7cnFvv+lPzpZHpw2M/tFs1bZfl+UxIA9jLpKtLXXx1pMFcdSvLbp9FgBlazQJZI+Izs2UDQhbKrIfMaainCoVUEQAJMRQlMTNchxz4WxTNQmY0KFC7FsktkYrJr+7McZtb1NZNmz1+aurB8dVSNVsNrcW/+oXv/jOD789ny1l5ILLf/8f/+wPfvidqrT97f7hO49mxfyvf/zT0/urnIp2F8vGfu93vl+WZTvk7nrDJu738fT81Mwq27gxyoOLiz78f6n60yfLsuy6E9vDOedOb/Q5ppwra0YBBECQRKvbSEnNNpPa9EUm05+oTzL9A001KU4gSKKAAgs15BCRER4RPr35DmfaWx/u88himmVkDB4vPD3uve/stdf6rfbTHzcvvni6/W79zVdf/+TLT7abCAWff1QYY9uU/qf/+X9Z7Tp107/51VcaO4duvrz6v/zL//Xf//++/vgnC+fgJ3/64ul19F18WD0s55V4bbetlu5hO5ydzJgbQ1aMxK63VTGZ15ubbZvis8vFX/6ffwZCJJNXX113i/T586fTufnl3/23px81AXS2OJlOzbtvr/fbbnJ+MZ0wzsovnj9Zr+Rn/+QH28OND8Vhn0/mze37+/PTGRLNq6LANJlMXKKs5t/+x7/5R3/5i2Wtt+vw5Q+/WF/frR7uK0vb3ebZF0/+6P/w5W/+6zdvV9tydvbym32v25PL+ad//IPuofvV3/23nLvDZmusXn28fPXNG2MwxVTWrjg5EVGJUaNUpet8lz367gAmGDPMp3VMx9abw2Fg5qHvjGEFu1gs95sNM9jT5rDfa6KT0/lkUSbv3998x6LTWelD8kMonNGcDNJuuzNFGYZhfrKIWfu2nc2bu9XKMk+Lk65rIUQNqShL7+OkaSauCPudBwIiVavZt4eBlRbzedcOvvc5iwG+mF0Nw0DZVq65v399dXF1trh4e7vTbFTibntoymnJrpjBxenpt1+9mtcnTy6fPqx35cT6/XZe1PW0XiylNlVMgtbc3N6dTurtZsdFWdV4Mp90faKwO5lNzi+WnQ/tblCCrvPlxK7urz+fn6qrRWNRuIgBCeq63re9M0acid7vD/vCuCSJLZOpzmeTIQwhxYO/I0RnjCFj6pnknOKQRay1CRKzOHYiSUCJLCkSIDGHENka6VPnYxp2DDrkQSXmFMRnWxeq8eBbcLbN4cXJ0pa4ax9q17CaTFo2JVs6OZ+FfgPJqz+UVQll6YfwcL/69LMXhsK3b999OvtxOYHoB581xiyMmm1ZTxyDEhuyAMZV8z7HEBNY1/YbLiZVOXVG+zyAYPaZrW73W0SRJBnYciEhxNg1TenQkAo50/f7rm2zFyZdbW5d5bquc7YqXVkXxeZ2PWuKEPp+tT6fNjFKe2jPny4Nooglit2uTSGVzviUCJkJnXFDbI1xaArNnsiAHhsNJKe6rJNPmSEkLctq9fpa41ByJs01a8FgUWjcbwOllFxRphT18WT0SCZSRDouoFUeu3BGtej4QUT4OFwIAOPj6PAHy/ERVCAjpgOJxv6TcWX+Yc19rHI+sjLwD5isj7rRMaMhxwQFIoAAISEhAVvjs673h+3u0IUYYwQEzTKWsOS+tzbFGDTnwtjZoiEyq/1mMVvMXHH7/vX7h1sRnE2n6/XN4mIBOa9u70/OTueXT5SK07PTEDPbeOI4tIcwtF27d7bAjIW1MfQjKbTthpy0bqQL26I0E1MO0TNQUVRNUyOgKVxKIfgDEwAIo6Qc+sOeEG3RAJICpzBolqpoUsxMAEiKisxoDZdVFJD+YHhk1SMk7wyI4OC9YcdIOWdrmElTGtAUbEwaBpUA4Iq6EM1lQTHuJCKyg2xm89lD974sCtGsjIp2Mr98ePcyaVQoJ4tZv9uiTzambvCf/vyfvPr2H26vXy0WC2MoWynrIgyEjGSNaxqJMcQOUUR12G01pjxbzE5PuaTbt9c5DDnFEIKKn0xnxXRxVhT7w+7g28IQqCKNlFVhItGsxxM0ICKN9qDHAXrUCFXkuJXV47IX4OjlH2Nn41D+eJInUYCRujte2HJkRHyoDR9jAB+WZwYImAQyGhRIq/VN269iv7OQh34rKSNIzjBW2ljDIpENhdDX0yah9ocVYEZ0OcTEjCRaiPdpuZyWruz6oe99Zap2v81930wnObJztQ89IxkFIQBQa8pu75+enk7KJEGnk8lkXrWdv9s8GIZJVZZNsWv9s/MLUF8Upc4gJkk5t323WC6sBPFRkkjKASFFr6QpadlMMtF6uwOUwpXGsjHUD910uuy7lgiLphru95HRFu7q4wsEvLu9mU9OHNj79fViNnOE7XYzrU9OZ01TFX17mJ6cT6dzVYm+TznaopgsToUKJOdczexiGIqi4iyiAUGLapJiGC0FKXQFl4gIZNjIk+dPf7B6t99sf3+996mcnZ32EWeddER/983LaWE7RQNQWPB93G+7w6Gb2jLmrFkBKYtEEVs2sd2hYR+krE+EJzEjSCCVcDi4ulEgL9ScP18sTt+//eb84xfG1UM7mLqsFss03La3t0DZukKGGHsedhqmcXayJDS7uxtSs3j6AkwzmiMJAEQe5e4P4+AHxVIBkL5Pkh3hbkfumx47Av7g937/Gh8iYo8+I3p8rArQeNJ/NGuOz0mVD1rU8dtj+uAP8mbwGHx7FKY+EFQ+3HOPHwYMpHj0zhwjoCNfBhmA2NhxMFaNmpPkpDlLHhcEo8PfsHMKBfVZYehDSgKWFYmI1FgyhGVp66qwCAVxYW1ZgEW0TFbIgGIOhoUZjVFELYxDGp1UaozLoojMXJFlQiKF6ANK0hgYc2mxLKSqDDFBTgCoEkXlyMxn1uNDgmhcnqSgogQZRTAHBtIcRiaVxMevGyja8uhMRBoBNTBShBGVGQCEUMEqGAQgUIA8hmpFUTNrjpTxUWZSUFIQEB3zaJKPb+SgUTWPPhRJlAVDhhhTH2IXpI86pDSkmAWyHGuRmCkpGjZMqJCRHBvrilJSRlBmBMmGCJlDzpbZ2kJQ2DIETgpZvaaEkEWOzacCMIb6Op92vd8Poe1DrAhAGLMhAsyIpEjEVhHAZUkRiBCZJAtkAEJURNAUxkUPjp+bKQUANYPqmD4/Zrd09B0joUHDjEBslBmQQBNqAPEjgxeIkY0CAxKRQYSs4ARizJiDIVLio0hNRx0GgQjZsmNmVVEYQ37kipKNEQAmUhVCAGT+/r0AAB6PO3DskBrDZfzBAw1jietYb0ZHBykR0oePHpXjUazUYzp/dM/ih0ApIo32VkAFyDmRYVUF5M4HIKfkDkM2lgqSIApsC2tzUjauqArYthLy2A4RfGIUZ1AZRQQ4FVXh6bhliTFl8YUt0BGyPX1yFQQeek1361nT1E0JIa5W29ms9J0UZWUJb6/ftdu1dQWZYrXZZxIsuK4dCqz3B1eyAnofISgGUckxJAmhmE/7mGOItXOmbKqyhh31bZvzUNcVm0IA2JXGutDtm+m0ms6ayVL7dd9tykWdjSgsUkpp6EUhS5pMZ/uHdRAgwpRSYUu2dufDZDLptg+EWLDTkIGYlTCzIYMaq6pS5CFkZI4x2KLQzKGTly9vnlzVn04n6e1uqEwGGrYpE0kSsphjLkr2ubfEfSdoGrZVVVrNmiELqGTt9mgYyaHPuS7sofNBoIv47mb32dUCGDMhm9LWk6i7lAWQU4bZ6SwXFpCm06kfhpxjiF0ccgheNDtXAaghSJCGISISIwMAE2WikXdgx54OBS4MGYNMRy+/KiKxMaKZCZMEQstkADDmYNkaw6qKxKOCKCKIYIgZiZhUVVQI0VmLhKqZiJlNfgwdxJzQx9wlCfjm+s35s7P1d2/Xw7a6cCF6BNTe7zcP5aT0/ZAaxrqUCg/Sv757/dPyx1bN/du7aV27SeGgyUlwMrm6nPm2Z2fbwyFZM3l+whbud7tmNt0drqmaXJxfZZ82re8Hf+gHb61kw8i3D9uAsWR3e3sLRN5Sq9Kc1w/3SLaoF2W/3z2dnQlssua7tzvNQpuskJYnCBlMYYvZ2cNmZQwFn0qqn54+Xa0PLIwZbVbLJqSdc5OiKEKfrWNAurl9u9q/q6vJj3/wi5ff/nYxm7HjInOSCIhorBetCqNBgm+LqrFlFlU75W17QCSL6KbNdD73OYcUT88v/vY3X90dDrDr/+jnP65nzjw/f/t6dX9YffbZJ10XJ4vy6Q+fd7u89v3TLz51jWFXlTWt7u8fNnfz+eL3X7359Mnp4uysmJN1Zr3ZHg4dOdx2h5ub25OL0/fXtyT6wx9/RK7kaMra3b293+83edDvvvvOzidf/vSj7buHj794/vqbh6vLeYrdxfPlJ5+fvPz2br3u/uNvf/1/+sufz58sysolkXpZMbnTqnQOmykKBTZ5cVq3u3h3s2Ey7WH3b//Nv/6Lv/ij//jv/+7y4uyP/vjTluXf/dV/++d/8efbg37spuu1f/t2d16b02dn5NYas2WZn9W+G/7h229P3gxcUQjD7XZLS/Ps+dnQhvWqA6WmMX/39c2SjO37xel5yuqmJq3bl7dvPnl+6mIDBOkQXr7eTcrV9Nk8G3a18Q9s+uLQhm+++ooEqOxdxX2SelIONDSnTYpx/zCUOfsQnCvL0iXKZLmmJoUwbeokUlXF4dAliB/OT+zMZN4MbQ9gsqixtN3tqnpaFs3srLm5W71//d6imsJtD7uYXB9C5ZoolauwKkoabFEVrnJDO1R1qYYHH1RpUHEhKQIT2toqiClg12+jzyfzSZD0sL9Fewk5LxbTzXq32UXr3OJ8GlLWRDHA+7vb8+Vy27eLi4sM+pvX36DFfWjZFefPlt12f/3+9uS0PGw2MfRvvn1ZnSxfv//22cnUUPZ9t9/ds6tSOcHMwOZkumTOD9vNWV3bwjpn0Jqu76/fvi+n87KaWEZjsJ6UieL51Udv315HrLhxvg/50C1ms4f9NpAyabvboOqyMevVer48Vcqx3++HwMwZhNkoKBqTUixLGyWKSGEdEpMxKLHvh2YxF+IUBAistYpoCxs1l7OyW8c2i0GNbVs7LowTyD527bBTRlI6rWfnk0m72kyaStgNMmDlzKQahm46aVxpY0j7h9WsmVWzSQx12HRv3r36+PJ0t52/evPd+WJel1yUk/2h7yUpEmZy1pBzGSBj0YckiBZZAYqy6XOWHEflg5ROT8/2QxcyXZ5eXL//pq5PQoxjpQk6S0oGUs6JnM1s60mhGtocqmLiVPq+G1IoS1dPm4zD2fnF3dtVSqmZVICZDYhP3g9NXazbrixqQQLmZtp0+34IXhUVNKTouCakmIYkUXMkY0U0Sdhvh6jct/37u4famMvTydu3b5bzM9XQDcNZc9G2g8XaGmPZ+D5kfAygjdwAxLGGZHzvGYMT+mgaGlt8GPm4DxdRyOMRSDQDADONJiIaQ0EAIoKPL/KYzzi+FACAqKggABHpcSB53K5/rxWNuQkcD4WSJYnknA7t4eV3r7797vXgB8lQWGvZ+BD7NASfLLNCDsEfttsYIsPMB7FF+emzT377D3+7OWxrV2Vr7x/uiQU0DV1nC2PrsqjKSVn7flCR55fnbbfr2z2IWFNlFSX0MVjSHOOhPaSQyrpxzqU4IJZ9DMBqy8owsnWElFKSnBfzxfX+MJk2IJpzZGfYuLJuBj+oqHMmhACgZJDHMgoRGN/WmQhEWGLK1pYpZQEU0JQzIasIsVGUBEnVoCqIphAkJ0Lru252NhkGb7KwdQI5DF62Wi7Oiqr2fV81RdmUgNznSIUDQFVsd7tJM/HD4cWnH7+6vn5/8+rJi88kyvbhnXVORMkypNwNPRk2CEVZF9b50IJFEOj7NoTgg29my9Oz56ub1yDZNYX3Ifgh5OD44vTii7x78LsbA2qsKmYABEFGziKP5M/j/EyI3xPW9fF8ftQN8XGs/nDBwPFakvxolJAPLGAcD+36oUn8yDp6HAUBAMqcu5yNQUCKm9Xh/g3IYdLY3aat0SUL2+AVqSyceo0xqUrrvapYY7q+H4Z2WtXWFF3sDDdjEJKdTktXQhricH/3cHX1Sdwf+s16WruCq9i1OXfOQEw+S2BA7/1isVguJ5LVZ2gWSx/6yyfnd3fv0jBgVZSm9iIF8LSZtL1fLma7wce2E3Lr7W5eL3b7gwF1xgBizqgAwxAMGBQuXaEp+b4v2Lb7vpgulMv7/W0zqfwweD+cLprnHz1hzO/evppNpoVxD7d3y2nDWa5v3s7PT64uPxp2B2C7OLly1cQY7A/rEDpiqCc1sEGy5XRCTDF0MYbFtJbhEFLHzkVR4MKYQnNLZpITCAy2nEjMtpw8e3o17Lar7eHVqu0kkXFnp5O3qx0jJAJXF4f9cFpg2w7DUPWH1BdD4dymW58tZ5qCoZoNHYZ+MTvhXPiAi2Y2DCtj5OHd25wSFdwsLtIQIOknX37x1S//+s1v/v7zP/lTC8Z3nasntnRauv3mrQFTVBWbqt0f+p3Pw66anjaL05Ruw9CAsjUFAx4zvI+Ut0dj5PHo/lhlr98bJMerl0Yn5pG1dTT66B9qOAqP3GiAR3/Q+F1EINI/eDqOtiD9IP18rxfhIxjlv/tHP3CSjh+FH2beP8i8gYCiKIA+GpUEEBAVUZkt4AgyyiIaY5QxICOoCmQYrGXjsG6QKosBXVAYjBFjRgo22sKUzpVlWRS2KoqmqGrDJXlDaBgsEEomhoKZzFhHjsQGkRQBTYnsVASyQVOgsdZaBoh6kJAVkZis0aIwbHgc8cem9lFSISJQySBw5JuJ5CSqCJBFAFRDAqKjvJMzqtLYkkVKGhQ0AwDy+LcmIkiIjDRWdLFVMEoWkAAR0KDmcTsPmjHro+4miKSaVVUhi+akWXH8+4Vx96+aVBE0acqSMPjUdakN3AXpfQiaUxIFZmZEMdZYa611ztnCOmOcYWvYTqqaGEQlxgCAbLgsy1AXQIoMwYfCB0Tu9xhTUImMwOMzEUkAUDT4MFjt+j6mFGJQAIYI1h5NM9aqgiBlRIWsokePJxEgiGQYy+VARJUeq5YAKCvqqI8QZRUkHNmIikCWMiGRRWYgJraIrFkEBlBlUJWEqkhWlMSoEKuiqOYUNKG1BdB4GRkBSCLIdLw5FEb2FoCyYWOYmJnpqG8SExECf5Buv2cVHf9vR30XBFHpCFBUIoYjKBgJ8fjDsfXj8QgEQKpC8JibBkUAApRH4xV8f+cDqDBzFlVFVYwJgAvEKgSPiACcYxxV5rKoVWU2n61uHwhp6AfDnFUMoKQ8ZF+UJguCCvjAjNaWhssIpppNvEBm+zYNlrk4nSwNUD/IdkugZ9NqaPtwiMOuE4lozbSZGwNFXXa7Fti4ukTvN5u9AuQkw9BXy5nmFIOvmmq/6yaWGmN0MdcE0PeScJC+qCtWdGwB1RY2JUnEQGVlxThCwZwlQyZDYFBEXV3D4E2SrKCcs1HAJBoInYgym2SoT/Hi7DS/v1fMhBiCL8vGAFIGkSw5zudzMma/O0zrCTGrQFU6Edw8JFuk84vmBx9fdIfrTkw9m6YQICdDhAaThtlsZnPRx7DeDbbAypLk3Mdc1G7q3GGf931OUcXiofM+5L4PfbCZJSnXtVETfZTZycV8l1arbVUUogbrenm22K0OJmOkRMRF6drBZwUfUs4dE6fkUxqyKhEqEhKGGMb2jayRbMXH9wYkNoxKiIYMHMFDAgACyGRpXLchEqBhk1UA2RibcmAm5lHTJAAZT/OGeFQ2mTHlsQVT6JFV1MwXDllTVFTEXFeLB7opTEMCm/264CL5uBl2F/MvTEOJTDE77Q73SnB5+qLtQ900VTN9926lGKL3YmLMxj8ECfDw8CC2rJaz+aJZPawoFf37rjI1sLk7tK6adodeI8Bk+vnnH71/9ebVy2+enZxWppjV9ZBx8fT83/7Hv/5RcXbz9Xds5PRy/vZh89mPP77+/WvDbjGfJ/X74VAyNYt6vXo/m55Rkru7WwPJla7t+xTl/ebW8qyeXKqINcYZZ4YalRW4mlhAur198271qmqq2bQ++EFdHZNiIWTy4Dvn6pgj2SYJoxkFu6QQEAvftpPFpJxNBYxG3O227CbL6fw3v/7uBz/64vZdv7l7s36zDtkNfVLfn13MF5+dv3r9XdclzKnbabvv64k7bfK7179jpour06uff/r2zeHLH/w89Zv9NqNBU1JZWYl69fwsBnhRTyDLZjdYoRT6wuV6ubTEu+3w4vmzwz6eTGb/6j/9zZ9i+lf/5n/7l/B//d/+9//8T/7ij6vSvnz9UM9cXbtqVv7P//LP4j7EIVoL20375OnJ9t5P56WCKKT1/cERlQW/f712NU8q82f/7Od/+9f/cPN2/4/+5Gd+2PehO1lO/vk//fF2tf6nf/Hz+4e3Ss0//x//7Pf/8Ze0aJJP6uDrr96c9LF+sfjJnzxPq51kuPqsGULOoN89HJaNMw1vNrQo3ZMn+Pab6xNIV6fub375n548Pe9Xd77rhjJUSNQU89M6dl5Cur3eRgnzk2m79acny+ns9Hb1Hg0XBX319T9cnizC4JVL0FxYd3V5HnPu9i3P0FonWVQhREEltsXgu/1wQLSExwBaWdqu8wUbEc0pIXgUuDi5FA37bpdzURpwqu1+/92bu09ffKY5zOfl/c39hJbTJ5Pbr28kZypZoTDOdl3vh1QU3FSVkPS5e796+8mLjwjizfXq/MmlGOMqq8hg6ax8kvookja7jbOuqafbbluSaUq33x9cbZ82z2Q4DH1wpixKsokW5/OHu3VYt3HR16aIIpmaAcJHf/zZy19/45OcX54PoX2/fllys93tFucv0Dq2yYDYEpqqXO3t/Wq33vOsLrOE6bxenM7/829eni36WdmcnZys767DMJiC7/ztrL70MTfnlynTug+gydZl7wdDZrV+P62ru/VemuW8sSF4sLmezm7v385M1Uyb++t3jWjYr+uiMIZFMITMEwtgttuHsimNq4VyGDwgAJOKhhgAsChwMp+ApNBvkNnZYr9+SKrOlq4pul1sbJlisGxAbDbh6uLC51hLzD4ExsXZ6f3dukSC2BJVrsTnHz357dffuNXhxz/50X/5zd8ppc3eG6uArCi2cLasIoRqYrrW55zKsuDahQwxBnJlQ4WPMfpgbSE5qsbKurYb1t3GTWZkpihrQJ1OFqiImg9+mJaTpqkz5CF0ZV329+v94bCYnhTlZNJM36/vHdr5pInSc8mn07Ntuz20qTK8uTucnZ5DoZ9/cvX29WtSu9ocrk5PqrL0va+cG3xgC4LIhhEMoSV2pCbHHHPICpjpcP/aUfR99/DgTxanhS2sUsbManwYXIWiOeaEbAi+Hw+O+s4jbxHg8eT3SLX4/tdBRwbBuP4ezeSAgELjiQyJHhfXeNyLj/Vmehz4Rxi2qoKIKOTjXHK0iBCSAqhkHMmpSMwGCCADEqjIYX/47tXL27ffkWpZOB9yStmHOPiYJAuokgJKvz/YjM18eXq62PthOW2+e/fNJhzKqk4+DsF3w+F0MSvIJDLVon7yyccn0/n+YdOKXj059V0f0oCqhniUrIbom8IasOv1ru0CIzOZHLUsK0Q0xk7nU0vImlIYNGUANk4P+60htuz80DFSBiybRdKIzEyUNSOpSCIuJCdFUDRKjovGmsIfNjENSI7RIhMq9KEriD1kAJSYEJSRUJDJ6khSVXTG+H5gYGNd6FeFtQAZJPk2U3lQQlRitmQpZ1Htp4tl6g/EzFTncRuZ09nZ+bfffusIn3zyWRK/3z6UVUNCKsAj2s/vovaAghli9MYYa5gpDYeVApbVfH52uVu9BQmucDln0NwfbsnI2fnV3vL2/o0jQTIISUFVhYn0yKj+3qSm8JijOWZrcLwyR6T6h2tSFPQ40NJowjjG0D5oSCCP16OM1/Po7EDkDy3lLgRb1j2K9KF92KTXN5RypxHJVFV56DqDDDRupTX4gIw5CRtniESCYYtIOWdGLnkaw2F/aJ8uF5h9YZ3vIwAn7UM6pNiTBmtSlkFTLqxLKaWQ5s3k1bt306qcl9YPYT6dde2DQXrx5Fm/3r169QYRken07KQoi1lTpJt1NjSZNf3gUcgg5nQwVkqqtn0bJVlbZlS0abvfkaGYEqkO7WG3P0yashtaYa4qN3SH3TBMp2c/+/LLm/t3KR4m9WS2mH338hUTOmtX6wcknU0bMGiaZnp2hj662hLENPg4+KqusmDpClsV7BwCtqtNNZsRZOAc+na2uGKkLEjsKKeklpg0C2hiQyJ2fvrixZP2p5vD/vBmJ8XWp6K0P/rZ87/71athSD1SSImnVerjugvKbucD5JxFRNUyGWfUsALmNCgaRZdjYNDQrVPct7v39LLlZ0O1OEs+Nsur0/PLfvO6u7so5lfdKkBUU1U4qbc33fX1GwRTN9O6clU1pXhIm3Wvd1hV3qXJ+ZTSFMzYgKT0qEki8agEAH4f8xpV7seQ1yhU4rFI+zg2fmg++yD3/GEc7A+uXwAYt7xEj8OljvQKGu+JD7/ve8EdH1/uD+vYRi/eY5W7PJYyf//H0qjoH9Ff4x2H4zySFAOiEUWRJJKP5QfEOFpD2JArsZxoUSuXEHswByFiUGZyzhlnrCvYWVsUZVlOmrq2WHF2RkkDoSBkUrBETMCG2DpUMmPyji25UoBp5N+LMBIjEqplYBCiXFlbV+wcE0GSY3R5XLojExIhMozGkiwiCfQIHSZjRkFJFZEo53DclxzNISqaEAGRxy1JThEIQAhEQSIBAAZUQmJkVhpjSeOX3IgIQEQNogI54QhFIxAAZDaIKUUGtYRZyKBTwQyAkkY2U87ifTr00vocc5bRWQCaRZBIgYiJDBrDZVU6U7iimkzrs2mTJMWcU8pZwDonwFkkg+QYikKg7f0gGiJEzskippyzqiChaKaRYBQSZFGFGONoNRAB0GyYJHgCAVHMEXNASSoKOSIRIANozhnGPm+2xAZGMo/qOOsSAqgwgkgkgjzKayiMrEiIRpEQCEbCbhbArCkCiSghK5CRkZGVRUNGAYMGKaOxXBhVFkA7XpmAZIygMpNhgyhsrLWGmGiEVgMyGSJEoA/WukdXERIgEPLIgiEi/LC0QEACIjO2ntExkn8MphHx+O6l48CuH1L6j/sOGAVdUs0Aj5m0o1lNsoooCFDOCdggp5gxJMwCIfj5ojHG9lldVZ1enDw8rGFU5pISAiuSMGeDjMNhb8sKyYhXKJvqdLbzO8O2qQsglj7K/Tar5i4osXG86ldDd1AAV9dlfWrrsrbQbh/6XZuzcF0nr+Axe2UiyeqcE1Hfe0zQtwMzDltvmz3VTdE0ZKhvO1FA1aKuUkpZokge2faCc2cQRYL3k6Uj5qquLJsUJeWcAQAhxghEne8JxBhExsIVCpnBxkjJS8OlJkVAZ6sgaipHBhRT0RhbwtANoZfg1AIXzuacDDnI3G6zxu1nP3j6w88++odv3vQClSkkgKBKzmVVD31KrOQMZyFhIu4HH0UgUjdEgLrrSYiqhW3brihLayF4f/FkmaPvejFsQkxFPWkm0773UbSaVFVTWWvrasIYRGHwPWYg1ZiTZlFmKkoaJRuRnEUgMzIT5JRBhdGQqDOWFTEL6/FaGS39x0QzwLGME0BF2BAji8pojhNQYx0SAaBhmyWNxG9AZDZZ8/FZbsiQxT+IKxfG1WVhLPR+P23qu1dvNCfRcveQjS26dieYo8TN6r5I5Kry4e6t9/vZcoEZz84vXt+8WW3XZ+fPG0uvt23Moar9/vbeBze5uKhPm1dfv0FqDrerJ+eni/NmtYchxPvVw+ycXFlZlbZdvfvNA6hdnJazk8m8bg7bdnXYzOLkn//FXzy83jp2hu37399zwe++eX1xuoyA7zdvbVEOLiOJQ2OrCSLlkOum3nV7U8qQh9CnGFNTAkBhsgZjAnLBhGyGrqvrOsR+v79X1dOTZtivrJ3N62Xf3QmoIS1MIwkW8yXZ2vcHZ8g6O/iUcpYYUCG20ZWIxLW1++2uLNLF0m0S7zft/e3mFz/90d3tO2cpR3ny7Nz77q/+7X+YLqaf/uiLOPRW/NPnJ+8ebtmZ2fxkPl883D5U9XQ+m92v/ec//fL1y3dVOYHQ7lY3KXTbh+r2XfzhDz8vCvvZi9O+382WzWHdv79+2zTFt999NVlWiDZz+ac//5PFJP/f/9f/mw/8//x//C9D3z25mj4/re9u709/eHbz9uHZi9PV3T7GUE8WRVm2+/gPX9/u5eF/+tNfXDw7o6yrd5sMeHF1dn//8NuXv39x9XzqJqt3D0yCjN99t3raQ79JZGjn4pNnT968XG1XvcOGfbl+f/f86Q9fLMvOqxVzcTV7891qv2mxSMuTGUi8vFq0+8PJaelMvr6+VoPzsiiJ993myUeX4qNz+f7N3X/+t2/OLs6vvvgoiakLDED1xJEti8I64v3t6nAY5udNHILfD0XnWvKTRRX2ISsWJddVud3vIYtIjpqJrQikFEBC24d2CLaYNrN5iseC5N2+1aS37TCbT40pimIy+DvRCAiFdb7zOUhRV7igTxaTYUi+3xspDoeNJfPmq/3uvp9O590+x0DTstQYDRvr0Pv20LdYOVe7aM3E0GJWV46IaL8++Bzebe5/8MUPh+gL55bz+cPdXYpCjjeb/rB+awjqSaPEGNOw7zRThtR7H0O2hpKRIaV6sry4muz3D6HdfPPwuiD37NnTl7//DnJ3ceoKa3eB9xL7/d1Pnz8fDv3m4fZgsKmrrtfz0+ft4fbubl3PqqKKzy4WBvVm9coPbVG4uqxbv23D4NKDqnmzvWcjF6dP8kAYWNOAzp4sJxqDctpt1zVOZUiVM6m/d9F3IeTcpxjJWFErWZ0rfQhZwQ8+Jq+Q1+v3F5cfW+uilxgCuqwSh/7AbFVAQt5uV0XhBh8OnZcwzM9Ot8NDWdehD9thv1g2pdiS69pOJtXicOjCYaibKUrnyEFK3JTdMGjXg5Ajd7V48vrlt5zhxBTbzUPZzE1Z3W16Y41V7/0WChNhKLgAtBk5JkU0Ly4/e/XwHUI2Rsk4ts5VtRLmEOqqmBST1nep3+TYT5pZzGKYRHxdOct1CoPPPgOJ4vn5eeiTgtl3hxB9hQYi3t+sF/MJBuoPrYhfTGtC4ktCyffX91kkHPZNdXo6XbBx3X7HTMZaJZQcMzFARkTNQXNQosGHQ7cvqOR+2Lz+5uJ0EQr0w74ui67dFZMFMOzaDWBGw9a4nFNRFiE+euu+D+KM553H5JeOOs/4s+MS4rgKF5UxcpBzUkDGx4ZohSRZdTSXAQBIziqCAKp5NAhlFcn5A2JjjJAAEBGp5hEHSzyu+4hNEXMmAUTMObddd/3uer/fFs7OkQ7QxdASakoxS1IFydnnPKnKnLItbTOfDCETk0F8/ea6apqzk4tXX3/dlIW9uAAAYpOzvzg5n80W67tbvztMJjNXlNaYthscW4N0aPdKUjhjTLFd3W+2WyY7P10SU8hDwaWoVHVDSPvNhjGVVWWtGXxbuTpETIhIWhZOkhZFTcwSPbtCYs4KWcmwIXIZUVWNKdhUJCz9HnI07GwxZXbt/g4VynIuKbHJoEqqw9AXZYHookRTFrYoQ04AZFzRdz0ZhwCYoXQFKg9dv98+NLMzGTyCIbI5j5xwmc3nu93BGmed3e/3N+/vLl58dnl5ef/65UmUFx99/O1Xad8enGUE4yyaajpp5ner12OtECpGH0ZwA7Pptw+QEjNV5aRrd6MhIPveJ8nxThTq+hQms257N45qgCpZBRSPzdOMHwZdwsdFNcn3zN7jXv9o3Dg6IxSQ9agnoUoGOPZUIY5D4B/SghFHctYfDOb1fmNUZtYUoGXGj9AG67ohi8bBdyLChrthyEGVdLyuidlZ27VriQGREKVvd4aoD/skMWfsBj9UhQ9eIQ3RRwnb/cbkAKw+dNZVmLSqq4NvRZJlYJOtKXKIDnFizV3qS8oNpk9ePD90O8lB1V9dLLsuFs4+u7p6d/8wK8st2e3hUNvGlJZVswQiTSGIokhuylJFhxS7LkBhwbqYs2hmdAyw228N4WKy+PgHX9zc3oToT5enOfH1y/cQpZrObt7d79rN5emFhSJ2YXF6WjfLQ7gjFN91SbSazENIdT1x9QysE5HQD0jGmDIn3W+31lTOuSSK1sYYrWSA7EM21olIipkZq+nZ8uzpi+c3h7b/+j6Ih9QOGeW8nLzdt8xQWwqDrxV9zKLYBskRbS4PXbw6qTRpTmqLKkhuDIXkc0g5pN1qW7gqDd1qu81Rnv64tM7FOJxfXX79y79fvX1zUboCJ90qutnEVtPz55/nbr1d3Q+5CztRMIWrTOF8O+nZdLuVdSelnQuSYSUiUBxtKUeI9ePT8w/ccI9PWHxMlY1Wgw8SkX7wFz1++4di0ffizxhsEswC3/9xDEfH0THQi48YXTqK/PoHqKPHDNrxFvowsj9+4PHWIGBQEWQkwCSZiPPY40SspIqKigooQKKPDYZsmBjZgXPgCi0qwVJNJuvKwkmWomDnnLPsHBvHVVHUzjpUB8kSEorBMV2HDIZZXGnQMpsC5Thpg3PKDhRTCqDCmCF0AFZUKSaTE5HUhS0dsxnD4QlEEXT8zB95NYoqQiACSkjAqokNMZECg/LI62GiR8QREH14FvEoLwMpqiIqAo6RlPF5gqqgGSSqZoCMbAFAceQEoTAAEhkHKsjKxkCKxFlzIlRgpQySMyvKsUfiWOkQQ0xJfFSfUtasSB9Cs8w8SukGwFlb2sIaVxVuMZtMpk0S6btenLAtXOEycoypD2HoUWJCDKNxk5lygpGrlEVQEyDk0bZkGBAQhNCOhWUZIMOYlVPKkRBUooofV0GCigIwQn0MARolBuLxSS0j2pCPBamqIprJWZWEAGwskhV0ZBwSPfrcSBSJrRCrMpIhY4ANsslZgUAFiNSxaoqGHRpEJjRFyKOjmR4XWqCgQEDExo7ZG5Ml5zEQCo/nnMc77kMAjT949gzz2GdHowcJkcbvjfaN47+PhZuP5yQEkA/Pgu/tsqTf/+RoORpzYqNvekRvYRaNKSOxohGBboiFsbbIyhmcMfW0ID87nQnJQ5LYe1BNKaNhVNKMKuBMlQbB5CFwnx9OP/poUgQIpAcvXYSQnIJBFp+UZb1aex9tYVFyMasN2s31zarfVkWZCYpFZSdFhtzt9oaMtTTkzrGDJJoykZEEhbM55O7h0CALYzFtvCUKknZtTh4Qu66bNFNVnSxmVBqjNodAzkJRlc0sbO+6/b6czLfdqqymorruhzaHQrSwJgaNOc6bydAPk+nJOvUMrnR1nzwBpNFuzVwW9bpbV9VUIoVekIqoKgpsOaaQklZ1Q1K0W//dq/vPf/LZYWhfXd9rdJpZUkxRxRgi2w3+4nQi+cYH6Ac2ZIVMjDkmzNmvB19zeT652GwfIOTe+wzhfv322ZefRx8AjS0LUtMs5qq63uyNs9NpA0VR1zYErcumH7oYBz0uVzGlxJyMs85VIYaUQpKEjArChJKBiFMGJjDEbCwRWstAOC5hR1oWqIomkYw8CuNm7DQGts5x1qxwlEMVhS0TEiLTcSNgmI+kPWuspAyPuqlPvrvZLM5PikkxrVx/n5ZXF0l1e7d5cXG5oz4P+fOry1998/WPLn/sgwIiZkg+ibYP+wIVQdQV9e3Nd4Z1sVi8ffvy8vI52LOvfv/ryW7i28Oh4MurF4T68Ysffvuv/3NZ8qmt1YfZ5fS71+/mtvSHgUpdVtPtardZ3Z/Ol5dn8269ihw26/WseR5TbOomqkxmk7uHd5PlCRV1Nvzi44/8ZjvsNybpctnUttyt+5jy4uRU0mZo++nFPIcwhHuRUBezLuhsOiscWlvZwm62Nw/3t25q1rvt2eT8+eWfvHz5t6I+Z2PJEGhdFChY1bN+dwDiGNWYYowYT+cTNBBDTDFjbSSaYYhc0/nzq30XUMzNzcPf//a3/+O/+PPCwL7bJJZ6cnJ2Mn/76rp0pu/a2UXzyUfLw+6wX3eVTrq79noQUahL/P0//KrvaONvNnd3P/6Tj6K6p09/cDvtCza//OXfP30xd5ay6m7Tn13MOOPV+VXuwqtXt598ZL6+/voHn18tm6XJYnP+q7/+lf7Jl93DqjmfVPNq97ptNv708lTVc8GThlnhJz96Ujcf/et/87t/9uefrFfbk3rx5vXKzYxg/MEfffnu1furT85nuXz78h4hffTJM5W+Ls3iYnn3dvvq+q0k+ebN784vF8uTy0NuQcA4KQ3ev1klvaFSJkVx2ObG5vv3987VhbNh8CH7qydTLIrf/5dXm/Xh5v7+4vnysF5l3y3Pp83kyfrdw1d//1ti98NPntWFUbQ+pn5zqF1p2Gzfb4wBoFgsJp9WX/zyb/+qmX9c1VWKGlMMmepJIYxsTYrJ2soZBxG83wzd7umTy+1e+/1u8EepqKhryAiiojp07Wbfns2qh92b09lza+oseHo+63Y7y66wBqPf7lcuDs+fP3/77dt63lSTarqsheG33/7+7MsfUjXphh1xcdgPYUhX8ws+RHloD5BNgn4zrFJ/upztVoeL2cmw36qIH7pDQI89JX96cn7ohrKaWyYRz5hDCrYwi1nzcHfTTBYpAzKwC0N/2GQNIaHNAMLJdKF9/eorY8p/+Pp3X/74xfs3L6vyYjkpN5vV+9vb3GcDJEFn1WLVbd7l68WknDbTth3S0E2tPTltdGi269Xk8unD3W2k4awucreCoTidXvleZ8X5zer17mE9nTWhG+bTsgtZCJ4+W25evW4m59c3L0/OT7koYx+EnGjHkBVFRG1ZDru7cnJhyfRZ5ovFerMatns7myokM5qNZVjOz/f7XVkaJT3s037Is2bBEriK36zul3O7Dn10VKgOOWCMGUhAUvCV4RT2oTSMzLaZTc9taYih22+mk5PNw3axPHdFef/w/rNnT/n6bRe6Iafz2ZLUyjAEDHU5bYPPxhRVMaRkrfPqX25f2qqSFHPMgAyEXQwkyEyr7Y1zzNblGNlUbEtln2ICtNPJWfKHjNJMZjGEoe9n8zqkUDemsLOccl1M+/0halnUC7apH7YfP/v017//PbObLqf3d69+9PMv/81/+usfP/tB17WSclE/M8UEYggpxCSFsyMrx4feqCJoSZxzRiFI7Wa1mpR1Tl5Ric2kOkuma5MnxBIrJlKQvh+Kusop5PjY/UQIYyMHHL09iKya4bioUNSsMp5uxm0pIFCWqJJTCAJArgSDksedJKiq5gjj9nPsx1HNOapqzlkeZ6jRXfQYO0IVGT3+xAwRRlt4FLTsRDVJ3nX9arsLSNOTU+Zyvd0J0JByaHslFhTRsSeXwRmvmNiSc+PC+2G1EYHF/CxraE4my8VidX+jCe5X26Zw54vTdnW3unlfFdXkZDZp7H57n1NGcoe+jRBZyBau8+37+xvNOmkmZLD3vStwUheTuok+bu5uMWUqTfShj8GVhQD3IVtb167a99shhXlZKSrbxpAR6ZUIbQkKSCAKhp0rCkI0KN5vSW3VnCqarNm4ErOiBsmJDKSYiYwrJ6CCVEjKzpbAjk2Zg4+xr2jKjrMvAKxlOvhOVU3Ofr+Z13WIqeGyquuUvCSvQAgS81CfLGf45N13v2s294QEBA+3b9E8+fyHX7589XpzvzLMorEf9tPTi4vmR+3mYdhvi0kpErzvVBQRLXMOrRqLTNYVSlgx94hpGER0/e71+VU8mVVWZ+vdGpVHPMd4uCbkx2yOjkhRBfkwQY/Dkx47x3mcch8JteMu7Tg7j/YBxHFaPAZ1xkP9WHkmcuSTfwgdVG+/ckSuKCaqy779+4TvcprY0hqrSQ5x08XQhcE459iI5iGKscYhDl3LRCIpeLHWZclBIhIwwP6wg4t5yrHzQ+/DwXtLPcchaY6h920sC+dTCwxZ0yAtore2SdG7wuyHrWUprBmGtplNv/zyx7//1S/D6oEX00lBzqCdND5FajGdz757P3QxFWhRtfUHJIpCBOB9qEoznzbU9hvj2+A9mRhiCL4s6/u7exKeL5bldNK3W+rbk7NpSOHm3YP0w2TaDEM7hK50BRvXduHi2ZOTs6vNbl/XZUHSD61lR1RUTeXqxpVNHyL6wVmXEYhM7AeK6urK+1jOpklRh2ANDX3MMRp6nLhFvaby8sWp774E5/Xb/n7Xq9GkOXT7Q1tO6wZpUlcPD/uJAhsaYvIATQYVgDx23RhGQkmEjoRJ1BhHKVuLtjF9Cv12tb3++vTpx9tuaCanT57/qNs+HN4I18ty8Xx3e2DrGM3F57/g8tX63Xe539uibtvBlYWV5IrKtLv9zVfV4kdoqlFeISIUUAV6VNS/Rwh9yJcgfnjePWo2j8yS8VLWDxSTP9SJvrd8jmMnjX1NOT/KoQiPT+wjT+eYXXvUpOQReg1HvK4eX+NILcJHG9SHzxEAyFgVMYZT9kBMQIB6JAoTIxlEBSHJAUGJGZDRFGALKhy5MiCSnUBRM5YYshjrSmcAXOEMgbO2KGs2ZWFri2I0WUSLWBo0QISiORgG46yxhiwzM4zlCIjqSuUKYyYfEQBUc84hK6jkvnOY69pWhXGMCooqWVRAkQiJkZhsRYCQBwAFiKACoMQGyAJzRiKwkse/pjQi8UQRc0QEFUFiYDtGtQnwSCkSIQRkzo+as0iG40TGxxQiATICILADAGB77F4TwRQoBsgDUMw5aVbIlLNIIs1H5X0MTvkQ+qBJUUSBdHxIyvGzQVRhJAJiMmVZT2fzqpmDYQJgIYfK1jAbA4Jk2xBikr73Xdel6HPOMeYskjOAoggwoYAYNsayILJzCCKSs5BItoYIRFICjagaJEuOAGlsPlUgUUBkZKfGIjlFJkDSJCKoaAwr8rgXQkICHYfTozGHnKJREEbNqsRGiQUNGEumIFeAMeRKZKuAxACIBhMoqARiqwQCSECKaGwx3jGMH5xMmLIAkBVAJsiKZOjYszluwujDXfAhgAaAyET4aM9D5BFYTeO7EwEgECESIh3bPj7wwuCIKTraPujY9PlokR1//TGAOhLSs8jIiBSFrCqjFG2tRgkxGwZQrQv7dretuZnOq81u1cwXoUu94nAYvxYYs6IXVMWCkoTKlWqETNw/vLOWqNfU5WHjDVAA6YEkxRACsREgYDJktvc7zR5yMJZSSplGp3hOwSMKAsSU2BkVCYMHFSocggvSszGasd90KXgiYFuTw2iDJs/WTmZLzUdVjSA7y52HqiqGdi9xCP2Qw8BcGiEJsd93Y95wPj99+/orUMvGHfpYWOfjUFWmrt27oSNwMUclClkq5xKgMtSF6bZ+6CMbk0B3/WHSFK6yIpAwMTnxsN4crq9f/eCLeZLw8m2LzlhCQhNDiiJdzvFhC1gma7Ka/eYAqmRx8FKWZAvKku/WawEbghRFIRru13cin1hbpcOQFFHR2HK+PG3bLAKQxDF7Gss6gJnHOKPkiCiGWRREgMkyAThiyaiSQyYkUUVBMqQwVjdmy4Soo1gpOprvVDWNX1zL9vgGRIwwctbHspQxLWlGIBHRkYqlisTGGiOQFEBFAZCJH4dku3x29f76vWpOgx+GHCVHgAFp33lrp3GfDxv/7OzpdreSjB9/8uLmu/ViMmkDrLfrpqorU+z2h5CkKKiP4elnP10/yHo9TBezYf8wnTbVdHq7Xj07f3L3vptOT5oadg/v7lYbLczs4qwK6cnF1a9++zVGLCq7T/msLM7PJvuHbUz6g1/8oN15LacXJ+7+7i7m4emLqwjso+Tg23e3lNQQ3q/uFtPLbjg0zdJ2GLt9iD24PMjOgZwsn989bHJUCxUi5aSusp3vr9/fiObpbHJYt5cfX3791W/7YfXRp+fb1aYs3P1u7ebV/PQ0GyymJWYdg5mSFAjYEZYGLKaoYrVZVmR0v963auqmfHpe5RB+9oufbtftb775bvX+5c9//tPz84tyYrLV6byZ6nSz3q5XB2eL5fllyvD0R8+G4B9W6y9+9Pz6zXq6rC7OLx/e3c0v7XZ195uvf83SPDm/+sW/+HFhpd207x52ZbnoU/r9P/zdp1dXkuXZsxNb69VHH4m1d6vud799/Y//+PM/+Sc/m53ay88W+6FVxPOz6aRyhzZMZxRDjG2MfV9PnLL52c+vqoX79qv06nX7+s27Z1eWC7vd3/aHgW9389PpydNLgPjmze1iVqHhTdtziWVB0ojv9K5bHzZhdm5//frb6cxVzdRmVbXl0tmSD5vUtq0O8Prr24uns6I2bIsuJ+Pj02enwVaOnELoWp+joGElPX9xsd/vurb/7voNsZmfnc6ni3W48TmYKZ3MT5LmlMF3nQP+6KPPdvuNq3xRlIWzKQbXFIZJUrTsok9ClHISyfe7dxn6ZnLFrpxOZ0fBNMTu0BnHTdVEVbauaU6Mg8rWN3fvlYuqLpdnJ93Qo60IpXhRRRsOvps/+zgE//TF8u2379jwp59eDXF7Op+eXD75+//6N8+fPLt7iIMEtVpP6v7QTU+n316/fbe5rSaf9IcNF5Ua//b25un51fbmZlLbZl69eXs9ncyDxD6Gq7NFioAV5Bjut/flvEqa1+t1wuRTmM9mIXTddt+c12bqDrF/t9mcy6U11bMf/dNiUue7b/0+bHe/PZksQxRTlZO63uy2N/vtzu9x2E6aF1yQE5yUk/V21b/rMiOa9O31N8uTy7vV/ec/Pf/d337z/MUfXd/sGeP98ArL0JA1pUldbru2D3673909vJlOjXOZjZ7Mzzab+7Z71zQfhbBvBVGHnESBm9mJADABImVNEXjn20uubUlDnyWoMmeQalKlGH2MZ1cXp08/+X//f/5f/8Mf/6M3t28Xi6UrZL3d1dOlDF2WkDNWzqZkJMa6qgeEfr8Gpel04VwZUrc4mSuwSJosmxi6alpR29zsD88/e7Z52Fy/26YiDj7Vxg7gmdiU9XTW7PpYVlVVVjmkofcIhWbVJKa2ZDBkiSGxK+ezsyGIUF/UNuyHzh/Y4Mlsedi1YehTCkrGlJW1WBQ09LEsGlBhg7Yot/s1G1zOzrZ+b9jYonl9c1OXZbvfsVTTanJz/+Yf/8lPN/fbxdl877v9YQtobFFZNUE7UE0xJUlMNueUgu/6HlRkGDa79yBaT22WZIAmiydtbgZNVeWy962P7FANaFQFyABZjg1oRxlofLsRVVDSfDRUqzwen47ZNBE5DuuqIinmqECckwZAIhAVGc09WWIauQwp5xhjyllgZF7Q8dwmAEiqAmPySBLkBERFUVhrIWcfo7IztlLJh67dde3BR+S6NKAVbPddTJoEXFmgNQCQcx6yR4QUsiVtZovZbLa7uRWJy+XFoWrZ8O5wKOuKnZtM5/v1fr6YXJych+Gw361yjLOnH82WJ6v1qlBloq47DENXla5yRQp+t1ppznUzI2N8GMiYup4ZnnzzzdeOkAEcE2XYHw4EYMumG5Kxxaxedv06xTCdzNgaLxDHWmTJiGSMjTkJAFnHxhlbaEpD2CdJha1jzgI+i0dmY0z0CawzhsBkVNQYEDDEgQpHZZNEEbloquD3koI1M66qFDIFsNam4Amw220mjgEwR+WmyUJDv2abp/PlwacupsnFlb17fXN/+8VPfzbEfn2/+fY3v3362ScnlxeG7GZ1hzkbg9vV+2p6Mj9/Wk5P9ruVhUDWkKQwxKxJg0IIZAwii2QlnkxPO7xPMTLah9vbNJuUxcyWs9huQGWcWxGPHKJj9/ax+AwJGY7NcwLHMUwfbROj941U87hGFhUaZ+PHSfwoQ42v/Hg4/3Ce/zAlNw+vi+jl0A1x8G06C7tplsI4ZOnj4P1AiI11WYA0a47jQWtsQWdizCoxkzUSlcZVWtbWD4d+WDbGoDGoKCn1h9KhoWyR0+DVUHvYEZWVcRJz5RpJuWDDijG2OqxEG+dOEtmTyfKjk6ubN7/Z3rw+v7pMuy1EOamLAqNLle+qzb6jUglQU2TDNZNP4kOMKSwbskgYU+nMZtu2hxp00e93u52vF6foyhDidN5YKjeb9f1qrRkvLxb73eHV9ev5pKpslcLw6cefcjM9DANiaGq3ubvp+rBcnHchTOcLU5ZJvEJi44goBI+YkVKCXDpnXZURgx8sQwh+GLoUozHW1U5ilJjJWOFidvbcb9Yfn1cmnXz1Ztj4eD4vmTFkSKBd17OlGDMgbYMIKYQsCZOPhXPElgxrCCG0tj6JyaOEFH0zseMYKzns724NglucHg57M7ngmHabjWkHNNjYpjuswZVg3ezFl+XFs+HhTRqG3B1y7LvdnueG1Yf96271ujwrsTB6jD0SM+NRc9GjP+BI+n/suH8Ufh7B7fQYRHl0uh2vVMAPl+Z/pxPBEUXxSDo5zqCP+iocCyJH/efIy4XjUAofuEjw2MX2GAaGxxd5TK0BqC1RJeeEZkznoGFGRD3WOo2vo0wkKmQtmRKrKVRzLAxYx0hKlmyVkkBhiqas+sKBWmuI0DCzZcNkSCpHjaPSgSO1BhlUciQGQ+xcQYaBiIwjAFUhtlJUAgaSjPJxjCllQhxbwtUSFJWx1opkyQJ55OEYIiTDOIpcCgiFCrIRgqysCAaQRhVNVQUMSFYBYgTIAACUVQCI0KASIFliAhUQRELNMubaQABUUDPkMUQLInok9xGNqxcgAjJKpaIVAZWk6lFYNZFRgURAAgSqSix5lMNlTN8mzcoEikx2fOyIKqhkySIZwI1VeKR0jNQRhawxJVAcnUM5ZR98H1J36Nu277r20HZt13dtl7wXzZoTjUwnQwawNFgQVoaMCqmISMpKSDmokSjSqyQ+Cu3KhFmRmRiNAikZQKfIRAwKADLygACADAuhtRaJxygl0rGxCwFgfLALAggSAROMAiUbMI6KEp0jWwLZMdE+ZsJYwankkVJDJKSjaQjZoAoTiUgWhZGgPRb8ZSE2zKSixAjHzQHoo5PiDxvQ8BEFI4DAZOCxlRMRmcx4azCSHms4P6Q9Rwy6Hh8Nf1h1PjLNVBFJRI560piXHEUi0ZQlZ405o4oiIZKmHHwGhLYjg5Ms2XMoJ7XfDdPFjFK2kNu+DyETkEQpRIHRWRuDL6oyZc2HIflYZERhygoqfd/jeGzLCpozYuyDM8wEEjwbBMCcs60LrgtwTDEBQ/ZRAbEwOSbpg7GWCqvgQj+YmIrCaVbN2q33rkpiLZZsq6ZfHQwbVo4+N65Ezb7rQM34xGNrVAGh6PvoqokfWt/uS9coYOi72k6GGLNKSAFMVUI2zJp6w5hVAMAQkjFINvqudiUbkPF8qZC8pDT4WbtczoeYgx8woS2sqL1/v6+M/uKPL3bx3f27IYNTUYmDZCotex8Bue0HEc4eUiYjlAATQ9U433tJw3IxfVj3zIYLO2z769cPl8+fMGHwsl33TFS4opo2SVKKuWJjCo2FSX1QALaGxeTkQYWJjWHJaIxl0pyUyTIiiQEAwpxSgkzCrICEiihIRhT4mHlkVTHESEcTKbMBBOvKERUGo0BNzCNMi/i4XkMgYxBRVIJ4Z+240GDrPiTzwxB2fTuE/bQ4KYpyI33oB9EUYu99i4a5rIXzk+fPXn7zcsJVjt10MbndPiTJRo2YqmgWkichPPTt/m7wlz95Pr1cXg6HP/7TF//7//dfNW6uFsH0n/3kZ+Hd+93u3Xz5VEpab9bhPi5mU+Oql6vNxeefVuWsmNGm3Wy3eUg5p2wyrfv7EODgOVOVfUBJyZguBlAwADxo7gGd++z5T2KPu/3++ac/3by62e23/WFf1XXWfZviti9KJyQ6rSpjjCvrlLUf2vXDA7u4u9ucLj85aT5/77+duVqTj9L1WYq6FLZiTczeVU6GSGRjFGYCxr5vyyJrzJKcDE5LLOwkRzy0YTrFcsrbLcwmjihdPv3oy+dfgoR/9x/+65/9xU+vzk7aLinErBAGfXJ6TqXZ73uTYxiG3X63u99NirKPw2H18N/+2++/9M9twWfni8XJ9Ouvbxcn9X7XG6w//uzzGJLj3ZeffHb7+1vXxMXTxaR20RZNQd4Pv/jZl08+mv+rf/9f/vKf/fTvf/ntdD69/6pl4Cef1795+e7HP7woJpZYr9+G51X19S9/Vy/mu990k5lt8/7P/vHny8b97X/++xefP0nL6epueH23UpTnn5589PSU2Kwedn3niZRtdk0JFFFizn4P+eyTi5e/etkc+nJhbGk329a5Oh5y0ZiTyWy368kvZstqL/F2mw6r7dNqigjb3V22+8nUxFSLpBxzyB5F5vMGRH0/rO/v+u1+cbHowxBiYMrMrkDTd0NEU06qPrWH7UGaPDm/9CmKoMYEJkNdEJnBB2vzcj5p9+Wbl19buzp78sXp8/PjwSjnyXwaUiyr2hA/HPbXq9cFiFTRWGRr2qGDmNkWkAYELK1YZ31HbcZv3r796Pkza4qsg2J9/ea2HeDi/GS5mO5Ce7/f4bKZX7g39185wN3W3PtDNh01XSoGiRI7nC/ng3jDpQDf3xzYlARFMrk+OdkcQn9oBXNZTrrYdhoY4mI6fdjuQTAmU5czg5NpY7+7+Zab+nn9Ig3dkHYlWsm5QId24GyWJ5fbdbAKh+j3h36+nJ2cTqvS3txeG4uVIS8+aUqem9OZF35xefn25k29LL795h8Oh/2mvK2ffNZIioeOpCvqWcg7Y91kam/eXtsZ57BXY3yK0If9be/7gehkevrpL3/zX559+dPUBgQxxGCsRBERVzSHw3Z5Ot9vt6vVtmzmZClLbtuDnZQppZRyr7DZbydRfvCjX3jvmnI5mZ7cr64n9exhvS0NnNcuJEmgAv0uqC3LGDKkxNZyxY4m+1VwCQSo67ZPTmYSe5R4eXHxzdffdN5/8uzqy/nJ7fYWC7s4qSutA9j90Fti2xQ+wdC2woxA1hpkNoVruwApmcKaxolk6ziHZJCHYWDrDLnY70IRwKUowdjSCfZ9O51WQzdY54C4qurd+iHnfHY+7Q4tgOSUl9PzHAfJbTmdEdPD3X1l69V+vV8fZk1z/fZ3l2cX6r33EVyB7AoNcYhhyAQKhkMIOQ5skA2w9DoEdm7b7idFKZqHw7pssu9W8/JEyfR+fzY706TWOFDdbw7HFAMcHRnHkuXjEDJaMWiMSyDCmLMYJwzRrAKi4kPsukFUc86GnaoaphijSE455hRH9GnKSQRiFjYjUJJgxF2OLnQRycKk474KyTATqMSYh6HPysQmxhRSzoiFK4xx/aHtuz5474cgCqogUZwxQbQsHBFplqYpS4Oh3Q5Df748eVi/u3ry1LJl5tPlAjNll8oiVZQns/L2/Z1KOru4nJ2dxZQaW6Zhv93umnomiVhtTqntuu16Z12DpgiiJ7NFiRD87ne3305MxWSJkAwE31tCRANIRTUhn3LutoeHppqTGgVEyAVZQkwICio5qWR2latrNiUiKnpSmpSLtg0kAVGrcpJCJmMNUQweIVtGjREBVZKxhdq6qGpk13uPiMa6vj0U06VxTll8N0yaZuh2IMYWDg0aRo2DsYx2hhhzFiZGGXx3IMNlvXj79TfvXr51k9nkhAjo+tvvZqf7qp7PF8vdKpIhspnzIRy8qU8Wp08Ou7UB1hSwSSopDgMhSE6opBkO8TCdnTUnT7r1ncQO2bS+C9mrIOKRMaQAqjJi0UeHPuBYwayPrGsdL9RjM9/RkoaP6HQe52Q6Tr7/XTXasfUMR6jwOADII+j06K1LD9eag1HOQ7dfbU4bXkbMoeulPwweASfW9YMHzTEIgapkETVskgpIstbEmCALAKYUc8p1Varqm+ubk7p0xoAPpk9ssylL1BSDODYSUwYAh8xWcjBguHA5dMmr5uRINQy+6yNqYd3Tj5/F9v276+vQ+8XizPoWkeqSi2WhunzHuOt9UnCOdvs2CWSBvuuSyND1hTEOJftUOhLNuR+MGzN8OJ2Us9KqhOt31zH5girlYr/z767fYkJj3MP68MnHn51fnr9dwbwuZk1h0r5r97OT894nJXTNHCz37baulqUrd/t1VdeFgfv7jbEVm5KLMoTMKVhMbbvJyYOkHAMkgjigZoBBgHg6Xz7/OPhD9td3m/TmbovOLOrm7OLJt9dvdg8bQkiKWXIUEZSEmkWZCDWBxLKqBHwY+rIZoh4mzTyEoENQksXpotu14vvu9l2dewG72x9Klzkl1djevJ7MZxVW61dfq0YxhZvMSou2WUYHqVXjiiiQ2l0bvCl+WdYzgjMoihEBkXUcswnGh5GOFOYPjKIjPPQPNCA5hlQeBUuFD96iDx/zqOR8by9CpbEKcESmPEbX5MPrPDqMjj6mfAQOwfHjj4/v72Wh4w11VKDGIXlykqJHJJCskkbYDwGoJmYD43gtiobHDQNVUzs/k2ZBVTlKLABGkSQGMwz1pA6DNwrGGERgNkxcEC5qN63ypMLSqEFCFIkCAoaNZTaWjDPAbI0b35DQFGpKFlIZRARQU04xI7IhiQVjVddF4ZgxhayKgESoQDi+g496iSoAWQAkYiUZ67BAM0BC5Tymq4kAjUACzZgFJBE7JQRIyATGgHUIYNSpKigqGyWjqqSJclLJoKIydvMxASqgAmZSwlFRSgARFTQNEIPEAJKyQBYVzQD8SAZkxIygqlHUiwYAZ3hsWMMRJA4AApSUEnBCUuQMIIB5RJ2IdH3IIoxIHHMK/TDsO79vu65vu8Oh61rfdX7wgBqTMlJhCkNoGR1JU2BjaGKsVczBRycmHy+fnH3OESQTHm1TagyxBclojMDorlHIESHjUc0fM4CowMQWrANjge0onyEQjuqMpnEpr0TMRoGQDBkL7MA4dCWYAtgBMcpYJZdBBJUYjY6tD5CYCkAVRmRWQUJgY4wq8Oi4ICSGEVmlCGNb/SjRqqac/3upaAwfjnCs0XM0YomOATQE1JFpjQSghHQUZQlIR4gjPN75qEdo1uhEUlUEURnl23H9liWLgIjGkEWO4DMAkiQAQI6SZs26Xe8VNaTIODNcdskTWFuUjBokhpRExVgCnzVD1AQCUXMMmUkqttmn7GP2WURGsS36jISQM0BmY6wlIsgGkVHGm7awygwiOQwSkzOlIBJZRU+mRDYCkEASgVX2XcQBJQmXMnRDUVfFtMmKtqnDZicxqCAqRd8Zw4oUc5Ycq0kJhlBTs5wlxuQPk6aKjDkkBquMWZKAWlOpMYmpcXa33iO6mGLKYgwyqlFRFUY3+OBDYjbM4BBzNrtV68jOTqeogRhzykZJBrh+s7MF/tlPn/wXf/d2433SwhUy+NI6RY4ph34gVADyUdroy2npu5z6mGKARRGUyJj9bghRU8DvXt7V0wWxCSG0+w4IUMUaTFFSzL73Iwoo5aSizDweW0hRsgKNtdhYcMGIMSVEqKpiDIaGsVqPkBkRMeVcGGONPR6QJDOOhLRj6pER2TpENKN0TKPXGkfNFBEtW2A0bMfdrzEMCoSWGETy4zsWAIBBMZouLi7SLn/18vezTz+eufqwvvvi+el+l1xVpD4C4P3NtckAmjWQih1S+/zqYnW3ndjm9fv1bvvyp59erB42q166m/uLi4L6wzd/9e7Z7KNdm3a7dlZNt7fvbt68clW5Wq0gw/PTM2CO3UBuNjE0L8zDzbvuIUWU7hCb56eFw83rh/Z+X5nzujilFkqe1DO32++yyKSpS1f3q+HscgkGN9v1s4tPNB+uN+89YI1Yz6ftfndx+SS2GQaJfX+yvCBwKWmJmHN/2D2k0NqKnDHL5eXNw02M99N5vXp4MFxIkCzJMmefQwxlYfbhUFg0xqQcmdhVhaRU1wWWje8HQvJoDZvFvM4xAWkzceI3v/vt7ydnn796++35xfRnP/ko9umv/v0vf/5Hf/LV73775c8+Pf/prF0/3K3Dr3538y/+4k8qKn92elZY/bu//ebyotkPdHl5MamXr797Oz0tTuZuUlfLRSVS9kPuD/e/+fWvfvHDTyXaxcVHV59fPdxfb/e7//Rf/+sf/+xHRZr87Ve/KZs/3t/3fk2ru/Dnf3r1/t26aUrv4s//8XPxQ05JMz59MlmcVIf7iXK1XM5JwsP67uuvXl+dnz7/+Kw/tM1ycnZpWE3v+9u3d3Xlpifz2WIiKR+2O0lQT0rX2CH3zC7FeID+9JNLA7K/v/MZnn326WE/HHYryNruVycn87tX16pn06ezi6UrU518GrwYNpIETNYUUMGwcaZIBpHAGUsoMcTN6gEtVHUFoqmNmcQYtsCH/VA2dj6bMUDXdg90X7nKKhdl7SUBgELKklB0iPrkoy+qqvnm99/cv3ttmiPWutt1RZ0Q8f7dezamcjb5FIHXfW8dIsfd/uBMNSkbymGzul0uTydVcXd3N2nm//jP/yIMB6W0eli9mM3n02Xw+ebdg0GlkkrD717+/laTa9yqb4uy6sTvDu//9le705Nn1sDbl189/ewTdNXg8/s31y8uPrVI7XZfnxavr7/75OSqLhySmy8u33z7X9ysrE2llgBlOqnOL69u3lw7wn3Hh65tlA8PD/NJVVYFG0ErHodisZBBt21QSXfv3xWmocoE73MKQ5R6Mp8XxW+/+nU5LVCprKa7YZDMfr9Z3b6u59A0M67T7uah0DnndOjaxbQ6hNZMJ6G76w7d7e1bD2ZRX3Upl843p0XAvYdhNlu+vv7Vdn/dh8uCk0YFFWtQM6NqNWliymK8c0XfHSQTldUwDAomA6sRgTgls5Dl/v71zFFlZPAYc0vGDinFlBlw8J7ZbLqHCL5Es15vLTEBZMmHoW8m80mah37rLA9s3t7dXUyXh93h6sXTir94d3v/8vVDaaWube9TP/TIBVImwaELbDSpDiEU1QSt7VKs2BguPn/x8e9efVc0ZZbgrJUU2ri7mD6pqb7b7FLys6ZKEF1TG1AW1x+2tirbziMVzWSya3dd32mGFGXoQaJhgkU1875lwhQjEhZVZQvnmCPE0O52h1DSPOyDSigMi9/uDvdAWFST2pi+HfpWQIEIQ1DfDRB60pzjYAAMKhoLKYV2t5xMHUrnQzOrOt+WzCkEiTZ0/fc94ar6SG45NpEDHY9SAo9Wo0efRx5BhRhiOhzazXojKE1TF8aN+7QQog/B+yGliKBZJKUERMjGWGeIRhws0bEcVkQA0Ro2jIysoDGq9yGk2PddjILESQTYkil0GPq4367WD7d3ve+Z1BkKQ3JMgghWQSnEbAwDibF8f/uASAlgOp2fzk6v372vy6px9f3NA1udzJpq4lIIm9324uzUuhqSTiZm6LbrzW1paxIwZGLOXXdou8E4lwEFtCkLjWndriR0U1fX1TRH71OMKdeWiMyh7y6bKyI9dBsiKYq5LWdJc+5aZwxoEtA47MduXGNLLhq29Wh3dxOXJSOSK1ByACDJaNiKJsnRGNKcffI4QmWRkcnWU1WwxqIxoIGYg+YwDEVZiKCCWFdXk0noPAr5EIpZ2fU7c9hMT56WeXbYP/TdwRVlSNJvNs7Zqi5v3r/96MsvDxtfzc6HlNvNrtsPs+V8drLs+j4ctmRcURFIDzyZL07FuzwcvG9FVdWJZARFZcpqUGKKoNa5xodONGvOsW3HK5BpjJuNiYQPPEX8IBGNx28FGQM9Hzgv+Ch0PsZ+Hn/wiImhI5wFicZ4IxDyY/iHAJKqfjgRIcQoKeQkkrlx2nUTkCIMu9SLKGVxIgrZg8SQS2ODj0rS9x0jgmZQDjE440prWRVQDUDOst7tt9vDpGBHlIaY2Zqi3O96KtmUxrJNISiIz1FSUMgIUSF1/WAgGSYffOg3Zb2Ih5ZJL549eXi4ff9wAzKcnC4kZk61m04XJaTGDX44dGGsCU8hZJGCJIWUgCnL1PGhTSAZFYd9i0WyZrYsq9PS7PcP795du3p6sjy9eXMfpEupYzaAuF13Lz76+OrFp4cBQWk+naDsVvcrBJcygnWz5ULZhjiUZVPawrdbSclY5/ugwsY1yg4A87An8SG1vjswEiCqgh9Cjr4oIIQ9RkA4tZOLkydDu/OfPdftfrjeDbfb/W0Xcko5A6C2ISdJcRRaCFPKAI7ZZsV6tvRpiKHF0HJwUE9n82nOgQPZAg1kshCl3688g03bw0GDlNrMFuGQtrtNvTg9O5/5/abfbyHsg+pAPLZ3S86lbZzjsq7w8Hrz9/9qcvHz5uITqGpwVg1nGrf/MNK7QHV0wCEoIh/9b+P+nB6BRh8UoEck6feGt8dfxO9VI3jMsBHQUd9REJQRkPSoMeH3j/SjTvXBM4SPlQVH1xEdpapjnvj4Z/DsDHISdGHoDUHOA2oCBsxZIQMd3VIMMadgTGnqKU+nZr4AVwKbLJHAqCoPZOvK7vdVXbLoCKIzZBybeemWEzupuHRAGhkh56xKKmwIjSuRR6iMIcOgqERAlkdOgCjCERPDCgRApEXpXFMTa05BAdFYUCYEZjPSOMgUmhEUkQxIAo05BU0RVUcfEOCYO2NUAYUsKkCEjMyKCARkDTmTjVMuABBFYMTtAQNbIibNkAKogApIHm0+HyB9oJqTR0RJPaBITpJDCl4z5JxUadQKJHlNoDIal7JKQpCcAiGyMgi7ojDM/dCFlLIqkuQUfaQhpsLGru+stUM/hChJxGfJoppCzrkffO9DH/Ww3/puF4ZWJaUQUlIwJFyYwjETSbYEhdFJxTXGilVzSimlBEACOcGoXOTEj2gfNAjKOWdrOMQB0ShkQgFipJF5SEiATOwMWQvGKlskFmQEQmBUhRwVEWl0iBTATskqEhCpMUgGyAIyjG0GOauCxJRTyjkSAOGREi6SICe0x+DYaPhDUGRGJlRNKSuSHC08GRBTSkqMoKr5++l4/A/hkfE1tkHhqAYhACgR0zG6hh8+GB/vfR2rPB5/CAj0eIeJHPlh8tgRIiIiCgo5a86aRURARVPO43NEVJgNZEFAERAEWxQ+pPaQc0DIiMAEhotiMmsUdeijikQUyVoam1LQIIadhui910FVIKWMgGObjLPlqC8gsrVWNGlKtnTGFj5GIINFmYjAt5h9URY5KSBZS0OO7GoikwUYpTClySI+jKBLawgYZUhceCAQW1SLeVgfxgJOg2Z+dn57976uyugcUMHMYXdIPgyFDJqL+ex+/e2nzz979fJ9SAMS1K4OWYA4GzpZLG+//boqqm6fgIshJQUktpK9dRySiCoSaIpsXemmsZPdFmwpzgIZSFk1CLKRSNdvDk+z+fNfXP3Hv3/9cB81O0UaUo5RRHIzq/sMkgBhIMWoyanFqETm0CUOqWjqdu+drYlzN/jdbjdZzIF16DsfokisGicSnSutCiKQ5ZhSzskYBsTCFUkwq/qcmVnVF8aQIVVR1PQYzSRkY53BzKTGMBEDsAiwMQAZWZlIIDMzkVVUtoUxZkQ/IKmiEh8bJQ2PxHeDhEmSsRYADBtRBoQsmfEYDz2+CbHuNjtrXRJRVAN2vbmxOtzd3tblWb/3ErSaTXfDuiz4ZD4JbfQxX14tb3c3Wc23724+vvrsi6vlzfXr9w9vm2bS3bx+COHq4rLfs1lWXXgbI9XT+cPtq1wmAZzO6jdfPWTJlSsbLL77+qZszNuv3pRFlaGfnM1i79+/vjEQZ7PTF8++3N/sTucX/nDwXfQ+CslkUnaHYKlUtm2IkHm37U6XYR/Dx4tpv3nAlJ3V5nQ5KWZtME0xd3UxmZ4lP5RFpSho4na7RcdDhsuTp81stnn/rpkaskm8GmfIKEMuKldaExNrRgCMWRxiTgIkAiRsGWdZA1kkLnPKIXaldZDczZtbKuKzp/OTs+eLp5++uLr47ptfP/v04xDw6bOf//Z3b/70n/35r//u15OlXd29/cWf/w9PL3+4ub99fX1jbfr046dffvF5kB1qMbNUznl6slS0998N1ze3LJdEOJ3afej/8k9/8v7d6uGdMNu37x8wSQ7panr15aefv3q9+fTzF4nhT//yjwDtx09OH+4OZUnNojwceg4ZkvpW6qn5+s31X//nm//jX/7T//o36//w6t/92R9/9vHPnubf526fq4KHPt2s3n76xfOHu7vnHz2ZzIu7dw/XL68/+/IF2Wp5Mmm3/rDrchri4BWHkyfn6uMB9/OLhalOfv03r4ff3+y2Dx9/dtV1a1NOVePmvk3G8NnETR1sw7tX13bIZxfz99s1CEHuJQpZQ5aYTEg+xcQEZVNBxv36LvT12fmFR0yxz6K2tNOi6tsBcyCm+cmy77zYFP3eWkMoIgmIy6lDwBiGvt1NquUv/vSf/s2//6u//+vV8S2hslGptkUaDprVsjWWfOeXp2fbbmvAimnu1rvVITy5WE7qWXtob+9vp+Vk37WTeXP//nY+m8kkQU5Pn50NQwCE1Cc3K7vdbl6Z+5sHwOLsrLp9ezef21V3szkYNUxUaqVkh0N7z3jCFMXYcl7uVzdvXr4vJ9Xq/qEw9enp7LtXf2viUAlLF9chCybvu9v3PJ8tb15/ZcJ8cf602+2Xs4vCIBUTa/Ru/XD//rbeTyZ13fW+NJMnz758ff910ZhuaJtYfvub1z/+yRf3Pph62Wl88ny6Xd/X1XT19vWB8MWXZ2/v3mWFToYh7k+QdnFTL3mzfzgM6588+4UMh+/efttMq8aHGn3fpfe32x9//uXX7779weefvnn1+/Vu9+zsYwXabNsSjMa9q8xscbbf7zMogUWi03N39+5m2+8vT2tWEcyHfp9jKEpu+9ZlWG/bZx9fvPvu3bSuHh7eXT692LdDktKEaHwy1oCyijM08V2cnNR+6LxX2e5EyVojA8+n87aPivrq1dun5+fv3rxZLmYvnszfvF+vN9vp6bMKYoIchoCWJpNaiqbr+rqpgUxROBCdziar9Uaom6fJZ8+fzRblq+9eSlLU4uLkk6QD5DApHGlylvb7rp5bSAkQkvfz8zNUr4FvV/eTuko5u8o5Z7MIlxVGFd8Ts3HV8uQshFYyYEZANK5MMeToa1sg+K5fqWHJWZIAGB86P+wLO5lPJr7v+7Y/tF1KZuIq73sFLwrIYJ0lzRjyjCgGH5P4vZ8uKgCI0WdN5Oiw3j2OIEfXxeN3Rr+PKsi48zqOGiJZRCUhaAYKIbWH9tAefPD73cY5x8hMnFKOOfngY4wIIDmPZy1jnQkJcQSFqoz7dzlmg8SaRGyMNfb/T9Wf/diWZOmd2BrMbA9n9vnOMUcOVZVZVSxWk+yS0Go1WnqQBAF60T8nQI96EyAIAgSxSTXFLnaRrCGzKjMyIzLufH12P9OezGytpYftNzLpwAUcx4/7ucd92zazz77v9xEA5JS6fh9zjFGGJGrgfEnc9W3TtN1u2/R9nyTFoZcsI86mj4OIiqmIgVnqht39WiPWk8oXZT2pN/utaVotDnMaCo+T2XTbD7u+v7u5ffLsGSqawuly3u5v728vwbiaTFJsc+6QQhIBpKaLijidVnFottu7uiyqcqpqMXeSE4AWgXNO2XpiMJO+26tlhyW5ApjyMDhgSTFLJypozL5yZUAqkLxKAhEKZXIl+LJvtmwIxDkOiBFMvSvd6LZK0QERcx8zMOUhKXVVOcupA8zsnPOFLwlI+nZX+FmSvB8GF6YGxAZZBmAuQtFs7qfLw8nhUQedxNYRF95dfrgAMnbY3t++//53J08+ub48L2dV3mvTtHWsfV3OV9Pp5GC/uWr2e6KBXOuqSVFNkg9cTOKwB2ti7KP0oJnQSNT6jVYL7+aM+9jfA5ooPJiAFIhJQEbC4u+rwx+gRA/XKI5ARgDQH8xED194AKrDCF95+JZRJ7KPEYgfeO0jSHR0MI1BoYclUeyZfTNES9FEfZbHiM9zVrUmRybZdV2J4hHNRGSsBoIskR3v+q4AMoRk2SkwEpI1fc9FAR6+PX/z559/vlhNNm2ah5VZaNbtZF7GuN9LB77wrgBfEVGOvSYhVdIcPBj7pl33qZ2fpIBF1ybPfvXo2d//+tftcN8N6XhRNHFouzajZ7UStEfJqsSgoEPXIaCaOOM0DKF0yiZEOeXdtilX/vT546Cwu738cHk+WxzU5ez1mw9d7J8+fX570Zhzm5v94fHBJ19/jmW538fV4dm0KO5vdteXF2enj/dDnB+cVPN508TgXTWZEGhzczddHhcOry7OHZeh8N672O3jblOXNPRdHgYunSY1pyoRVMA5iBlwi1xIJvIL7+ezevOzL0/abz5sGsxmWfPDsahozGJgqGgIMWlRBe9cJhZ21ezANpkMpNvlsi4mk7TNqhy7zkR8MZEUU9dyWRWlj/uu2+x36/vj50+pqLdNc7+PBQVyXttNu99x4GJWIhWWOiNvWmX1EqPs3qdNt3/93eT0k+rsiZ8fWAjiwkj2NdAHHgeMGKMH249+TIyNCJTR6IMIAApjyumjv+ijTPSHnzzoofgg238cI78Pq31MmNlHKPz4jI8Slf0hRemHW/3v5aiP+4KydoiZfblcESBL0jyAZkgDjifhgZ1zIAPFSL7AckLzBUwWWJRIHEaLhBoAVylX5UQK0ZwJiVQKxtW0WlR+NS/L0kiTA8Qcc84pikd1riBGcoy+JOeRw5ifAjVUSTkzAnvPIqVHI9YcQ+GLqnShIE2WBQyIHBCRR0aviuAduAAFgy+Bg+WMuYWhw2GPqTNTQqcG4BwYgpjKYJJNldATEzEqmTkGV4DzyI7V2ERMNUdCJFeBMQAAkxoBouUEgKCKSS1HMlXNNh5eihmapEFNQU1SHhHGpmAIqGSSTAFMTGX0UHpGUnVohXdIzhGPMoWpmohBnwibtmMgERXDLiZ0vutiBhNJEoes2ve5H3IX0zC0FjuVaJpVlH1A8sVkOa2nLnc67HJOoSrKsqi5HNtWx3gdaAYZANgsj3RnN6J5CAWQMGQAAmFP6L0hI3s1RWZAVHbmS/LeCBnJRFGVMD34dXBM/4HCmPUDBDbwSGRGpgRmhA8KEYEhoIigCEnCEYmo2QMkyQygJsCFY2YswDETMTsEQyY0deyygYmJqclD071IHkfID0PhB1bRmF+mj0CvMTr2UHf28NDDL4EN9KMFEPWjOQxxjDKOUjEZjG/1QcUVyWOJm6qajjqRqSgYaM5jtYiO7jU1BDRDYkYD8C5M6yFlEHLss2o9mbRdDmU5QyDq+s3+gVcs7KwgKvKQSNHM4tA7733tEC0npcSOx9wsARGTMaEhGaKouKLw89rVJWIGNWAkdGoExH1OSJ7BpZTLapY199oqoBE65wTT0GUgxYKQERmKeSjmC4kqcWBmSdCkPDs5obJwzsGgRVXpdu8szA/K1EMc9PDo0XY3xGxEDsCmRXW7W3PBoXC7piUqObOIofcqo7mUEZ0pxihATDiOFgAC9kVK7uZ+mM9sNg++YFcVu23P4rodvOrXn34W/viro2/s7vyuT47QfO4TI5GDUPhu3/naV+javqvKYh8HVUw99RoztEMG4zAMqhSHGGdk4+2+LP1u3+13iZDMdpPlovaBvXdFgAYR2bvSco8IUbJgDoUHMIVMSEXwBppFiKmoSkwZVJgB2Zlzhjg25T0I5wjENGL2iZCJR27WuCJHQtGMxMwekZgZQM3EbEwXA5NDAIBEVJohMzOR2oPFruna6XQes2KBn37x5HxzW9W8Whycv7uuiKbTubR2d7/W0pB0iGnXDTeb+yoXXWw+++RHF6/bwg2XN9+VBysfl76EeRWk4CEITYt93zkuvvryk93mwy5GWs6yDpmVJsX783d//MXPVuXB6eOqbbc31x+Ozg7rWfGLl799+vnTnLq+6y5297DhSVHe3p0z8yDt7eXtfHkQ3MyArq929WQxKBPx6snzQdK0qi+//Z0TZYLV/Nj71YyfTFezWZhZaochMpELmFOMsSe2UDtX1IeL4/X1B9EYKr/ptuAoa8x9nlYH5Ip2GDxjSoOaMRO7oJrHCmc2app1WU3NwDRlhX5oyAAUP/v6k/vuUgKtThbv3767vrj86Y9O/uPf/WJez2f1rOnizW73/KvHRv1i/un7V+8ub9bPT84++ey5w8H78D/+9S/+q3/5xfa++e1vvv1nP/2jto1f/fGjfrP50eGnflr8zX/8p//Nf/uXt9++6jozrmKxO1jy9fbDn/z8i369y3Hyj//wT9Hgi6+fb3ebalr+T7/4n//Zz39SzX0zyIebNQAvfLHfJ4++mpYH89WE68vbps3ypz//8YfzD421EtEXRZubk6eLWVe17TZ4d35xTSTzVYFO3rx9Xfjq9Ox4Mp36qtxuNkVZRs05prIOy3K+a3u/LP7of/VH719dhFDqZHj87PjuwyYJHD+dJ4Xri7swlNXEff3TR9/8zW92NzvwKBKrutI4mlx1iAOwEfPIwJ0dz93OieH13X3pJiHwbrctpxUHTw48hyzZRIuyiBKbrotC1WwqQ2I/TgaOXTU5Lrv7nWb44ic/fnf+ehwFm+1GMnZ14QgDQhvbXdPWRX037EWlBjg7PtBmm2m43+9oSMx41+yIajZmtWfPP3t3+TYRDwLa9rumOzicpibdvL4pp1W5OHxz9WE73FXu0e32iiKK0+399e6aZvMziM1vf/dt7vN82cOA7c3f/elnf/T9N7+KqgfFGbvF+/X94vS4F80p7Xa3DgJGmpytbq/Wov1Xnzy/uErOAVEq6yI26enjx99893IY4idfPe1kc/Xhu5//7L9+9/YeY//q/G+/u/3VX37yZwDQDd1nXz6PkrDwOdDm4kNB3fp+ewG3jx4tr6/ewZD392+N4PB0NvSNITX9MFnVVNIg8P72cn91PRA+O3v+9//u31Vh3hvc5fx6vW2Svn718vbqwpVlWE4v7/YLX1Eo3p+/D/Nlu73zZeGNtQ4IUnlYrJavLy8v764EdHIw3683+909wMShQ1ae1Ze7Bsppk/bVpJ4t5rteI2csfNOnQpgRF/OKSO6H9VyprOji/U1ZhLKeWBHA+au7jS9mWdrycPHL9++ePDq9v1zP5xNX1wfL6mK3C75iLvrN5vD0IEJab9eHB4d9l+IQQ/Aqst9vqtKTd62m9n7XpYocL5eLq6v7NvXOgyYQgGyIShiCMtd1FbtmdXYUJemQLaUwzm2M+/0moA8eq0l1d3//5Pnx3V2z2WyqaekC9duU2ui8K8oaPLHj5vJtRTapgzeOIOjJ+zJ2XV3O9ju9224Jc0G2rJZt2yKmEJyJKdEg0jX7ErEiul6v67ow9rsuzg/nfde32wY9TOp6Uvsfdhn2QxXIw74CxFRFERFMRQQQEDQOg5n2fZsVh6y7Zt91/W6/QwJR8c4FX5hhkpQliUhOiZGYmBCJhoemTscGCoxMQECeEAByGoi4KKqUTSCb5RRjlmHfDkMGyaA2SO5j16ScxWxIQ9sPfZ8yQBIdUk4pp7FKGFANgkoXOw8+S3KBGbVtu+Vq2fa9c84VAYnWt/fALicKbrLb3D16cirSXN18CBRm04Ns0ratc6Yi++1OAIiorMokZipmmNV2Q0RNaUCHUBSucEEyIlBR1KnXnAZ2ha/qlKTvOjJRIjUVJGAmF4gLYAfAI+oCCZznhEjsi3qeu83YNiwqBIxCAJTtQQ5zSOicWgIzk6g5qmFwo9yg3hfOl6LJCBcHx/fr67ookXg6qYc0HXokVKZ4f/5d+enXi9Xp5urN0LREqCr9vu3joD7s9nt5/6aaLTa314tlXXrX9i2lPpRTH6aT5Rnu1yJJUj/s4kCO/JS4BDZ0UhCBaeo7MCUmAo3tXQLW1KUYRyarI5fG5Y6K4QNkiAjG2jIAA8PRhW8jTeT3ZXwfnf0Pl6saGD7sIx5Oc+lhew5jiOfjun3cY5OZ0g/ptHF7oNangcEyEaDWvjjOw89ni7Lb9LHbgq3NAtPMhzbmVtU7VENTGYZccpFFEEFFe0nVdCIpJUkcnJiu+7xv2llR3d1duGGY6LQ0rQzSft909+RdSEL1YWZvKae49kgOwfkiFNU2a9tc3kt7fHjG+65LOC/82cnx1fuLFNdpcAdViXtWDAC09KbtcN9FGJSTOMmhmFzkuB26iaaKCwS0KMOu8+Xs5OCQs8Vh16b9bDKZVdOXr1+z50dnB0Pq9n0kStOqfPHiyeF83rZ9vVwdHq26+w/n718++eTFdt2YC8V0JsCArigDgm7Wd0ZQlW57f51Uq8XE0Lr9uttvGKTZ90PXAOkQe8hJIYNkkWjN4JzLsbPhQzldhVl9dHJ4ff7LPMSffjqflfTteb/vEhuQA4+UMxKTcwH7WBQFMmeREBwahGqSh1p0iPuhmmYwDOWEyXLsJavEFuJgcS8oliD3XWx6g7h992F+tKoOD8RPkcPm7naxmh8ez/p9ww4dAmNwIahqt9t6zN7VgRofs13ut29/ZTT1szkuz4qjU6qnVtUWnDASohghKZp+jL/QHxiMEPAhI/kg6pghwceeso/4ko8euv/iwx7GBjwIP/YxgvYxc/nxJzywU364zNUA8EE0+n3W7feptEwOkbmehOmE2EuWNPQmEdOAZlQEVxWOyEuXus4QLRRalxYIHBTem5llNVNMSJ45ePYeDQNxyTip/MG8XExCHRhtYEQQE5WcMxo4Rn4oPFNmh8hIBC4YAgwdICioMXpXIVrsekgqqEXpqzIgjSoMoXPsayhKDA6A2BiZjZFDpWWtwBgH6BVUKAewbAiq2cSA0fLooEUDBGIjMhIjIyYjJ+QAEVUhR1NRyWZGzKgZRkuLZED34Io0QTQlA0emikiGrKLAiAZEwXQUKlDEEFE0qZmpiorkZIAqWURAsyMrHBYMHQE4AuIxXqtiItESqpiopq6vympfDY48IonmbCIp5hzFQMRSliGLSrKcH0QuIjRTUdCUTfLQkWgVgiP2rnAOUSOPQeKUyQGPE7kxGjAH5zyRM1BAB+yQkFnJuxHzbAamCMTkPPhCqciApCKSQcVUzIwcEZNIBiAjMgNyTAqWzRwaZmCHzpAYDRnIDEB1dMOYieZokgEQVBTMctIsKhnDhLw3rYCRvXeO8QfyoqqJZdRxk20GWURM8Ye86B9KRTTOJYQjo5F4DOmMpkhEwofUGT4gzH9QdmlsTzAbX4doNDKNEi6aqpqpZDMY53JRkawqqoCa84M5ERQQTdFAVWXUiBEx58yIxM6ScagtWOwjjZXnKqEoCNkJxP1gon0/OBdULPWJRJCMvVcfogND884r5qRi2rMLakaWyMgAMBuy8cRJ5cwhpkg5gVmSvqgWWRCVc1K07IgVkuJuMeVu1yEzF0QU2EBjci44qkST9kPvCBYl9VCXRbPeKoOfTruYJt5t1ufHp8d90613958uf7S5Xg+3A4Wi6/ucMBODSRsHJqvCZFkftuubwteaevaQTZiIgTRHF5iR0iCE7BkJldjnHL33rmSg3MVUi1YBfelmvNjcbgM7A3zz9vbkSf3V13N6befvO3OeuW67mHNWGcgkxcwuAKS77rKqFmglUGByvQ3btq1KX019kn63vl8tKhugb5qTRy8GiX3bAlq779ZXt1VR+FCGKjjvhm4YywyJiJnNTEXZU5+yd+yJDQkVzdAMq6IWSY6lqAskAkJiHj2T4/1bEZg8miEAk0f2Dzyscc3N3ghxJIM+LMFHnJYbKerOlWRKRB8J2WM4HwDg+mZN6Ferybv3H0zqo9lyt969u9xnobt9AxwQslb54OnB9c153G+GbEVVllxtUnF7rsPN7mX/cpuvbNt0m06d1ocrkJ6G6MBiFu3T/fZy192X1QENtrm8Labzqjr8o58/6fZD7G/r2SrXfXkA+7Tdtvzpn342K+z69VU9CaGYXF6el1TIkA6PnscizuuTwIeQJ8taqT5W48KF3759qyBY+D/60z+NLYDC6vigqA+sdzL0uU9Ds/EFx9ShWUhoYG07tDF2XfPTJ19PAFpQCrzZ3oXgsyh2oknPVmdMJWrvCndzd+W5UJOs0REjElMwQMuSus4xZzJ2NCsLHdzVZgfOdThcXN/pOu+36B05V//RT7+cTudv31/8xR/96F//f/7ueLU8XFWrVb3Z3v3lX/7F69++8T4JDLvYf/nJiyqsruLNn/z0X6CJNK207cWbV6vVCpv8yB/t3/U37/rjnz6qZ8Mjh8u5/5v/x7+tcj8rV9PFKmVwCID48t2rv/pX//zHX329XFUX13dnJ7NKhLgsmK72+3/4x9/+5MfPWFDbsJX7Sciffn3y9u0vWV7AoF1qTx4tv331/Yvnz6vZYnfT7m93Q7fTVVVVbrJYbm66Dy/fsQvPv/y0qBdN2/tozWaXh+g8gaPtpinqdPbl0WIb1xfrD9tNgZxipnI/ZFgdziYYNvcNecod35235SKxz947YnNM/TB4r+RKdKHvO1ONOZJjx2Wz69t+KE+XkyV0zZYzDE3fGR6sjvbNNsammlci2jd9XU0Y1Xsb2q7PQEWo6iOeU8pdXa4+X07HUTBCENUcIqPDfbdz7AT7dr2ehmW7a/pmMykcVa7Pmlinq+VR5e6vbh+fnEm/eXu3aRW4LKrF8uLykhGTBAtUzdQzdKk7OvtUc+zbxMXk4GiR9guX3z0+efLdt79bzetmaFeTBVLTlHJ3f/Vv/vVvn51+hhb3w1UyePTlwXnzbV/0Dkm91mf1xa/ex3VKmde3b6owVNNK4u7y9WZxcDKYfnt16Uqn2dZNe3vdBj09v9xeNbuuuY5qP/rJn27ed7u17Hr98sVR12w2m6YoJ08WpzXM9zurp353v8Fs/dXe59Vmu5NJOWzb+90/LVez73/z8vDkJNDBbhOvmlhU81cfNk8/+VkIx1fr92dPP+2H9vjssL18a5mWx4dcBjQjx8L69PNPb+42k8IRz5E9mKaYyKhwk5Pjow9XV0C+szYlODp9vL2+n83q0lEB/vJ2HYpqtaj6od218f5u14l0bT48Wj56cnr+6nwCVc55P8j1bnBk09V0v+3fvvvw9NOnxay8b/vUdhxwdXxUdNsIJIJvX97cp+RXbhjyfnPz8z/7sZ+1fkq73dDt+zyZlbOZK6sc1RV+sFxRMSh5Aj8ruwg7iS+ePL3Y7gmtQNAikOe+GZCLeTVpuyGzAodyOt3tGzGoJ6VgiwQExFhIJ+zDEHMVFm2vXUq+KJoYl9MVDU3hnGQ1MM0wJIHE2XoxAdTtZi9Z+7zx3pdU3O82jWiOfeXdpC4QLMaESeoQwFPOsc0RuACGKAaCfdvSdNoDQsoAUHqXYtd28Yd9BwKqKsDInFZVGVGLIklUNetorVfTZt/cb7dNHw1BDYacs4gkyaLZmxoSYYxJNOeUTRXJVBXRiJMJIWBWD6DjaSIBqR+bPZFJhF0WEeAs0jZNjH03DEo+i/Z9rylpzgbWpyEOWUWISVPKkiVneODY6FjBFvu4vr2fTqZHx8eO0aHOykoEyUFRFgBweXU5NI0r6oPFrGk3k2lR1+Hth3eA7LjIKW2720CoWXb7NsbkHBelR7DtZjvE5AlSykhQe8chFAQ89v8+OMwpZwEjRBZVQGEiEE05ARJzya4Ex8isopoiWgJGF1wXN6E6RmMDQHAydKDKELKk3ho1zVnATIEGyTknQiNyYlwU0/1+CwY55TjEouQUk/esOtRU12EqQwdCXTcMg4CQiRBAaPkSvj37/EcuLG7vrusAhtoN+5Ri7Nq6LECKHGOoJvtNW8+nQ96jWLO+86HjEDiUkAEgaxbVSNKrRvJFgdPY7gkZxkNTSYiASLFvIPVgY+O3ieSxn9VMjFAVswEq4Fh0/6DtGKIRon2sqEGCH+JpMNqFkMgUkT5ytuDBcIEP5bCiD/YKIvcAwgYau4p+CB1AEgYDViFRUCaowI4AvyxrU3mZGpHsEFSFmIYkow0k5UTA3rksA34sl0qqzAQiKQ4ZDB3/5v27f/Hjr2vH++1lLlFZdhiYAIKpDtvbd9zc+7LinL0DiRJNssWAxeLkpHl5+/bypkm4qifoEPa7pxPXBLre9AaSFI5qn4YIAOTA5cj94AUnoWg6vNjtBsmFUYngzCqkgFgQ1FUxdSRxfX175Z3OF8dv3rwConpakaa7m1tADghnZ0dnp6scI1h1uDzq+023vS8Kn5IY4PLorAiT1A9VUVZFWF+ft+vN/GAViN9fXlfLw8IVuR9Euri9JwZG6Pa76aRMKbMrQBR0kKEDD54LGbLlHRTUoE6efvbJ0P3tf/jr3GZu09HUxVyu+7TJecjJeQfs0CwEODhbhOkkdrGeRF+EJFIUAUH2QyP7O+YiZvHTOucOTdPQO8sMuLu/K7gEAM2o7XB5u1+/v/UlV9N6ejQ9OT1p1zeNsogh0mx1UC9W5CY5dug89H3ebizfibp6OivdBJKH6yyJevbmK5hO3MmZPzrB+ZGrZlZPwbN5Ahj5I/iALcGPzp+Pd+GHR35Ipv0QRftBPPq9E+hjfOxBSBov6RED/4OEhKNy+vCsMe3ye0YSPoyih7Tmw6tSWTGBrytflswshr4ICEJqiArecVGiqssTLoYslhHJBXMBmBkJEMxRzImLklPyha+KgtiVCItpOZ3yYl4WTI7AIuiQUjekYZCkqAoMgB4RmZhMiD24AOwRFZnAFA2QHXBgX5TopWu9s1AURGySJUcEQi6wrLCaayiJClMFU3WM1RRcIE2QBzSxse1sZF2TMzIzl1VUo2rmEfpBZmjgPDpvzhMQ5oyWIA0y8oSCRxdGX5jmDDkhysc20Y90cSQEQudAAf34JgydsKlIAhqMMMWeGEApmxgishvnX0ZgtOCp8BgYUFPWPCKPU9KcNQEw8zDEKEocuqRhSMGNFoExxTSGm8YW9mwGAGIgxONkmdVMVVBId71PHQOJK42cIhhZ7akoxgI6NEAkJgYEYWRiBgIkIwzMAYjBB2QA54DdqOkQGDISkSGhJssp5WyaRzIdAKIoMBHRSLLOqghKlMwMMtJIs0ZiJBVRS0ZjjROYCiESIyKZjIp9JjQG0TxI3yB7F2oKnhERRiMWmWZmxBFOR0iII++ajETl4VjhD6Wih9MzRER0TGZK5AwNx7cORoiqRoymSsT00MhA9gfDGfCjXwlRx0yggcrDZ1kli6hBzvJDVcPDn00fZOCxPU1BxiI2JB45SC54Au9US8057tkKNkp9E7yn2YSQYhsxA6Kh2Ri9NgPngnEgj2ZCSK4IookgAxEZOzQzLUKJ7MyZmzJMkEgk95pHqI2DkXzwwJ0PiJQgOiJHHMJE2YBdn2IIVeWcOsu5J8TcxqquaVKnpCJa1ZUvHSIwUGBeLuf7zbqu625oNm/PXZKCsNu1se98YCo5NQMalL6Yl2zNGlKkmrapoYCWE5GXrKPpLCdDcWhgIIDALoArfFn6wMwExGDleL8spqHKZXPfB/TdfUy5O3vMz07mhYb7bWwSoiPssyrkYeeJ2TAjhaqs2J9fbCKF47MFWs+EBFpWXruuH+KgysETSNNsp/MZgg19Bwh3t9fTaTU5OvYhMBEghSLklGOKjiHGlBMgBEdMhoSeiQGjSJIhutoVpWfvODjnPD6wsggRmBjwQdlhct45QEQczXSKCAZKzMzeEBh5HMyOnYIgAiEhkJky8ThRjNyicYoBgIOjBZVsBU3mMwJ68/0rYlDv2WVGvNneffrJSbzbX52/7vu9QzddLCVC3O6O589XfvnJT0/+8z/+zfLJfLfvk8GubYuD6UHJ7frcc8AwmR7MyCmjS91+WA+ThD99+tO73fby5uXV5WVs2uefvZifLqrDqu/yzd399fn3bGlWzdl3yaxQ89T+6IsXzV7f3Z5jvZjU3pkNezleHsNQfv3o63/x9b9kS/eb7cHqQFauX6/RIK13MRJqjO1ufrzYt+vMOgkFsFVlDTe3weHp8ux0ftAN+5h6CmSYxCAEl1Oq5idczYfcEmhqezBSBTIDG5VuzJZFMkCs3BSxyCLKpGI5pe9f/m3qvvzjv3z2PjXHZ0/urnb1bPr//R/+zfJgspqvvA8v//6f/uXPviqCu3h3NXs2ffQI3nz/8u7+Fq7z8fF8slxIN3SbvW035ze3z5+sTp6W6NPznz46f3//6HD1+eEyZfnij150fX7/6sN+//7xX/38v/9v/9vZ5OTf/803//WPfnT56uL2zYe5f3JcnqR7uXx3fbKcSof1pH79zdts9Or8n7767J/9+Y+/HKxDRmV3eHCW880337x9+uzFdLZIkprNID0vq+Ptxa7uIrP79NPn7169u3q7nkzRF0GzHJ8c3Vxef/MPf3d0clROJ5OypMFnUDVh52cTHvouS1sRQ1VLhrPjw+9/9U1uOldO+u1QhODJ+TBZLSdD46LusuaYnEYVdkzECDa2AiEgoKdqiL0GWcwmOUrbrkMZ6jDNKcxnk/V2c3V9X1QlsN9vm1k9jUNc39z4upqGhWR15pp1k9TN5mXqBBS6dhhHQQhIVWmW95u1d9jn3nMxmy6nXObYmMeua6dh6jNrkv1+74Lv296B63a7nON+vZ8dHrFRe3n9eLJwxHEYDCGT7TZrVyDsdpYiTs2BULJAzontz98+O5q/3r3/4sd//PoXv1jkg8nxwexg+vjR6evfnAPDo6Pjm/ff8bAS5QjIKd71m1x1fd5S47iafHL8k/3tBRVKiIWbxkHWdx8A/HRaT6spip0dnV2fn//u1ZvBx6Pnx69fv00dwH16tHh89qRmizE17GR1PHcb0F6/+Oqz37z+7qq5fvbo6f72fjKvKTh/eHz99lenLlI5WfGLyun9xcWb7YdPPvvq+nxXVvLi2bNhc3s097q/vnx7Hh6fqFF5sLq8bRxvDsNKfdVHGXLbtJv9pj85otniiH1mLnzgzd0u9epC6FOkwQJ5j8zBhzL0bds025PTo2/evP2TH//ZL/7hl113P62rLGnbOvZTF2Z+vommqvn00bEQv3/74cmT433Ou+22nIWqqieLCi2cv/uQ0/lqNr29uVtMl/PT+fvXF0fVoksdBD2/vFqUYbveLA5myrOEcHpydnNxKak9Pnx6PdxhWefdfh3Xx4cnTuFFdfrqm19r182ms7bto+rB2UFlhSZFNQ98c3m3WEzXN2tRIUQXPCeWnJJG56iqa1DULAYqcSjYFbWPYG3TJM2zetJ0TeFCjtFSmq1O99u3Q9MKMAFHSbP5dHe/vrt5h6Ho+v5oMh36TgOXZVBTJBykw8wOYIIYGJ33VBIzQa+Q2n4DDsySbO6jC6zysDAamXqjD/zjMkUIUCSlFIch2gP2xUS1T7EdhvvdHpgk69APo9yqapDVIIFpFjGVlJOpmfOI6B2DscLY2zKMyaPgHToAgyQCaAA5aXauFIGUpe+7tm+HmAyH0c9PAFHG2l3w7Bkdm6mqiHARwCyhPHBKs4Ckdrs/PDgcK0ckadv2rizmqwMSuLm97jfdcnWQU8YsbdPMjlbtdotZprM5CTTNdrmabW7uU0oCKmoM1A95vb03QCBa1JVznggZoGRXMmYdYuwAyQCFWIC8Z0bMcTAwQUO0EEpAJgQizmaWo4kQGBEZMiiHMCOcIfiuubahU03jZlEkgWUzJUR0AYSSmg+TPOyH3OXYHbqvTKAfGsfqgxeFkrxjTDn13boqJ0OWpt+aQNv2mlVSKLwP89n+9ubO/Xbx+Cdgm6Hf9zm1QzZRybLfb1xRsZkjFnSpy6qQLRWFj7FzIDmhgYIRMqsM3bAn9gzAyOwdp+CCsxRVPx6SqioIoDlixIdTVjIDIKVxf4Um43GYAMOIg6AH35sBjnhafTjpxY9wLYOx++xhhf+QG8AH/jXaxx/zsBUfN8hErPZDzOehnlhQkdEXLCY1BxQIxsHBosMZtFvR62GYMu1VjRABhyiM4Ey8Qh1CFhkkx74rnCcDj0hIWWDddLf398vZJHe7pF0mffXuvJ4szlb1ycFqv9/lfstV2za3J0dHCKGTRODbfj+ZzKvy4PsPL1/fvX20mn7+9GxS5jplOay073RI+61gTyqEIICgyULWpscdwK5LQ9aJwgzstAge8L7pNeXpQb0sXdpvtvvomQ5mh9c3a0myODsOZXj97e8Ugyk++/rFwaI2MWSezJZ9n2tvraoIWdL54qgsKwBUhYODantztV+vXSgmi/nVxVVMdjxdbO/vJyVvb981u918vhDJDkliHmJXe5e1l2ZvJgTQxt5U+nZtgFT7Bt384PFXX/zk9bffbm27Kh2uZqEZrIkRo4Fq19ZsL46rg4VPmk3ZYuusMOKsFgrmotq1TVVwigksBRIk5KLE1Kb94CwQAHF+/vTR+lr6Ifad6i7fXV7v31yvp29DUZQHq9nhKiwWbjJT9l3fY8yY+3h3XaAvfQkc8l409XWYQzt4RR4FzS3Z1UvzLK6MVeU++7n79MewOFDP42WOxAi/hxN91IMe7Cg/GIDGzJj9oBQ9kLx+UJYMAT8y2T+KRh/ZKR+7/x42n2gfH8LRZKeGfxCBs99DjmarJRJwUY6eHlP1oQAEEyECY0bnCDBHyoYY0AzBwcdN7UdrFPushuQdu0kIobBZ5VYzX5boHTKoxn5od3noYhvzEAHQE+qoW4iQY2Ic6atjuSEgj+MTCDmUxkG9QhrAPPnK0FRNDX1RgS+gqLCakK+BA6ReJUMo1HlAIIggjcZONRGxmgKTmSiYKmURUQMDYmQGIkYmdB7YMShKhPxQpQVI5JjYAzvLEXLSZJIU0JARSJEZgNHRSHUZEciIJIYIaDmZCeVIjD4g9phTTElIvCYBAVMBJCAGdkgxsAXMAZ3kmJVQQdVkPNtRNaAUE3EaUsJuqEvvPTtmARjhNqMSqIDoCAUfarZUHSEXwYWKmEQSmCGYIyACley9nzhHmotQMwMRjNwcx4E8j+leQwNUsYwm+NF2Y8pjlxqBARqiaY4Sk6SUxxwWP+x00dCUBCkriYqYWMrsyDEDCrNHoCyKloGYPQNqYEJCBiZESUoOgc2MJSFIMskoPQ4N+BIl0ceAGz5MJGyoRBjcg2yCNO7oHgShH1Taj64iYgR88LgaMIcHMN7DUBw5TKM2iKaQx+GIo2NjNHHYmCYczWpmBqamJqoiWcYKO1E1swdlT0ZVSeVB2h01YLUHtBEZiCqapZggMJpS4YNNck7EisAYZNStFFjjFlQ1i2jiwiuQZk19RJcdeLOeDAmcCyGLMBsRkQACk3fC6BaBZz6TYEokwuxTBibvuY6pMzAXKgBUSBicc1VuWzICTAiKTBIcVyUF6brttJiYQtq3VSi9921zP6nLpt+uprOimrZDU3jY9/c/+uqf/7t//T9W3RTVXCnzoh5uB0LJBgZoCgB5Uvmbu3uJ5Wwylbxmco4Q2EkywIID5JTByLsCEZDUu4Icsy+RwXssQlGHWrQxk2qWoJoq+GHdINdxyNdXw2Tmzk7nodhfXu64DsHVd5t2NpmmTlIUrz4NtO+So7pJqdlvlgehrnwaEioGV7ZNavaxmrg4dOn+/GTxdHpwkK4upe9dGc7PPzwpi3I6dVWFfQeqPjhRZQJhVTMRdexFkkFizMH5DAqgjsEHZk9IxC48nJ6pjkAHZCZEYgcARmhmhIrkGBwAMDtAQwKzTBTMbGRvwUdJCYySKgMjykdpdGS6AwAoZEN49d3LxeEizMpi70nUTSYvPj347p/OZ5Oj9eVV23bMeTFdXr5/s5gst/drh9Mfff7Vm+++/c27cypsMkEgzpinvJxSqftdQOu6zaQiDJPdeofMd7frFyenf/KzP//X/+//F3F1d3M+OZ4sHh+8ef3qoD+cVFWoPCrEpi+KAEVRzapC5X5z632xWW/vGpwtPikgb3d3wYtKGHIugt3tvh2kzF2K2aWhx3pu3X1ti3G9KNotD+Ht979shghcLz/7RAYZOpEkdTEtl/W+36QhldUEvNN2T9459FzSwdOn4DqVvp5V5x8unAuM7J0j701VQRDZMJlJL3typJrbzVAwxAH+8s//1f1Fd/XdPQr+0/fft01zcFx//vWXk1m5vdmePn96e371/dt303K6vt3jqysLKpZXp/PlbPqLv//2ifmb82u/prOzZ5ttM3s02+36331Yr+aTPlabdbrf3zx9cRrYW5aT04PS0+Xl3WZ/cXD8qKrzt7/9NnXy9Z8+vr1dT44XRnC8Wm3v24t3aw/l+rYri4Oj+R+D8m7YIupsWr15c10sjp9/fXZxcTGt6ncv3x4fnA6b+/dvL59/+ijFVkXI8v3d9fJgnlLuh+18VmjM5+fvjk5PvS9e/fZVVW9mi8l0Ni1Ctdu1qFbUIVsetj0E5Zpzm3538R1N3LNnTy/fXXU3d6zDDm2Pu9PHBxcX/+jAtk1rxQySeRTvXYxokpNkEStKH4et86V5EtCyqgFDjNFQ9vv10cnjaq7D0F7ffTg9O4udddtdUVdDKyltizKU06mqW0zm//Dqdz+afXV4cry/3jr34Kdomk1VV3HIOQ4iWM+KuIsx7SRhFaYpS9/2rD7G3hGTZuvaKjjWMAwNluwKZ2nPzg0xVqdPd3fXfewfP3/xd9+fl+wC0GJafPvmu+fPvhJXff/dL6rjZ3XwPAwCvm+C7Iqz+fM4SN74VT2JN2rosjUXNx+WhwfteuvLw6qe9LtL7Uw7X4DToevjpsb52/cvD1ZzKiflrGYcalIfiq2224ub5WIOFTfDOXE7Y+amm/i8efebg+pJmzq93cZu3XctVuH2KvMOJn65z91ydeims+3V913fL04ez6exvX/lcrq9efXF4+dxkHYzHC8PTeOqmPfcSk674e7u9pLqKdlAlS+XHMU3a/XBPTpYtbtuLbiaH7Trq3JZnxw9++2v/vHz6aTvmhCqqpgh4eHxaXObesirxWR7v9/cNlEiFAcODkq5+/KTF4Rh6LuD1WGaFqGE3cu3bPTJZ8/ub26ranl28vzy9T9VLANaWZft3WZScTVbpqTSbIqDwVdlE6WN6ozue5NgWXeR9OpiawRhWlaz4ub67tPPPo2i+9S6HN6/+/arFz+5cZcI6fFy2fSJmB1TwXx3+6HCR0OnnqeT2uccKSNE6dqm5CrGAb0/OVlmlaGL5az2xEM/gEJRVN3QsedyUnT7qGplVcUcxbocxZd+Wrg2qVOy2Mccq9I7wr7ZG0Col9Y3oUIgV4WqhfvZpGAuY4ptsy+YLy7fn50cF94ZQz/EgiqvajKU7Eozx75tm2VVlFW5u9+HSZUNgTANiR4WRCCmKA/26xHECKJZcj/0KeUsJipji8gwDH1M+6Zv2w4IwDAOw+izhpEAIGQmMSUDGTcuOUVEJHM6LqDNwJAI6aEEiBXYQDXnrEnHBnmzru3bYUgpghqSFuyzUpdSHCIRgSmTI0TJKQR2vjIgMkXPTNjGLFlMhLlyYWIKILLvh5S6eloGch+u3g9tV08n9aK+urgsiGZ1Na2nd/fXZZhA1M3uHtBS6+IQVQZwLoRqvd3t2yGJFXXpkFKWLsbgcFoFsaFJ4hmDCwZIztVlZZqM3MglIHYjHhNcgYCGli1r1pGuY2hAlFP2xoLJoANoyNooXc6dCDhfGwUVIoeSxhWwQ8hgShxKV9+uP7S7OwUZ+tZNK3Y0DINagVz7st7vN7NQVrN629/ttpsilHFo7/YbV3DUoa7qt69+42ZnRNDvt6o6qevNehdC0bfabBp2E2IfhxhCGbiIEQDQh4DoJPWAaArMyEwEKHlQkVETdI4zF5jMcjLMJplICBGMDMARAZiaeCIDFLDx7PTj1sZMMT+suY0fCpUfjBJm+gMA+CNi4od82ZhYe0jfAKGZEI0+bgDAsd/jYZOF9HsTBwEBGYmagYl3DkVM3BgkmpbTucKt6Dn537VNIl6LEhMHjkOWjAjYp1gFN2RBwiH2RfA5ZUDynge1X7x+9+cvngxRX13ePjk9Cssn765uFV1rnVFBviSumqF5+ebVyWSBkMs5lFzcnL+dlOXzJ0/+6fW7717f3N+1U0rT0j9ZLZ/NJ5e3O+itT5KSjI3coBaH1CfXpARJ50SSJDgXNSOyIfG0WsfYrDfXl3ed8MHxs7eX1z1k8B4kX7+9NFNUOTk9mc0mqc+r40fJIPXtwdFye3X3/v3Lx0+fMBVU1mU9z72w991us765L0LNRej2+9vri+Ozp9vbe6dJLX54/dvZ9BhzstybJOMyp1gyW2q7buN8lUWGtimqqgrV9uZidTrpu9bq8vTHn2GBMdnlet3e7yshLwJMkOVoVf/FV4882s2+WXfd6tkTJMqpL2ar2FDslZCyDKnvPDrZ7hIlX9YFT3LmuqxdMVnfXHX7q5yMGcOkypgZrZ6VgajwrBqV0M1rXq6UarVc8YBpu7u8t/2QDTulViBQVeEqyW1ZTADIe++CV3HoCQWcV85d+t1vQZQ//TGuDqz0SsAIoPaAov6YsPwhZma/34r+IGPiAywFHsbGR6XIHtJrPzwNPgbQ4AGa+/A9Y5XlQyP4D/Y8RKKPT3rQnMK0JHbgHDESGMsYDKMsSITJMCGAmjoy9EQgAsYkgCAw1qkREoKomQoWhMUszCpfO5sVRJhQogxD3zTNvo1DtAwOxiQy5KjJZ+ecL5hcBb4ALgEZsgA4U1VjNSQA8h4MKCWQgUHBDI3IFVDUVtRUr9DXwAHBQJJpBvWg2RGSJpVoMjCImZiZAAAyWFLRhwZGdkRmpOgcERAjobJlExQxAELvwAcLZTZSMUxqfdT8ESGeDRiocECM7AzGCnlBYiAAYCCCoGBGEtkFi50BGxcC0SA6yAhZMwBhzkCGnqkouCwcpTF+BSlGUyVCGLF8YACaVS0LuRAFFCxnQWYAIkMFJWQmMjQgQGa1nMwA2HHtuCRLkhXU2DlyTgEQyRlgTnXFJhEUmQswZSCigOyBCdkZkamhCo8HSSM2kAGJiBkBTERMTMCyADAAIGEGAGQaIeIiZpYtxmwpR3bBGYiJGQYmAgZ0SI6cMx/QOyyDY3JkqOCLgKaSkoIZOlBHOQLEDGo5WowQooXM7EYEHgIgsEMDxpH4JeOsEiMg28Mx0x9IRaNChMg/KLD0MEQVkR/iaUCqAsA6Ol6RRmAeEpvZqBaZiRmYquRsBjlLzKJmoipZAUCyjGqeqoxhNwBmJH0oSjNTHXtos4hmIwA2kiGjKoXKqETICKqSmUPSASmE2hFT3G2GbYsKKhnYvPP80L/OAIVJMiBVRPII2ROwR/YOHTkXaBLQA8QhNr1TyKhM3lT7tAdAQlRIhJi0JTc1oxwJxbzziDQv67Yd7ob+4NExUZ9TBoN+6JH2s4OpiStqlPu0v7k+eP7ltm/JeyJ+//63X/306+32ThR9cOV0dnl/V7Lruh2bd+jBwWbXtM1wUB/kvjcgM0EAU2M0xCymkpXNMTnAPBI3izBB5zg4DAiOkbBwBTghIlf48oCI+W7TOyq7ddclMJDFpApneHm33zuaQ9FsIxN0Yk0vRV00+33ykGMfY71eo2JIKUnGPub5ohj6/vhs6Qvat+3dzfVkvppNp/dNm7PEFK9vbo6r2hUVE6cc2bvxf83MQ0wAigIhsJogEjpkdAgmJp4LX5SeCdmrJCIkYkQlZjMZOQ8KSkwP48uRiiJ9LKZkKH0lMtZoEhGh0chrGONm7Ag/yv2q4D5KRdf3t4/KAhVy1rv77uj0yfbivTXpzW/OiWmQ9Xq7CUUdo3Z3u6PV8xBmkxXlHn/13X8MzJkV0O3Xut0PMUtVV3kXm3j7J3/607fv3m2bmy623rmJK1ark7p8+v/8N/+OwP2zP/uz3f79v/ubvzlbPqpfHHJd585KLlxNjw++ur/ZXmx7HOLJ4dHxsy/j7f3dpr3qE8b+sGAymc5nNZf366unn5wsnp98/8vLdhtn9VHUdjmZOa62bV9OixRFcn95tbvb77IzhjCkWPBcs0mfHp2cqcuYsgUfTTTl2WKBigFLq5Sc9rt18LTfDgBMTDyGt9krJLXMwAiOfSkGogoiJBmYYrvZWn777vInqx/Vx8dv/vHV188evXv9q/rgyePnP+r75npz/vSLT3717VucTr48O7jd3K+39z/50ef/4T//+ng5/eLHz4Es7Ojw8eL7N7+7u72Pdvz08ZOFc48Xq8psNnPG3nv49tcvPejqcKKFPHry9P7vr9++uj46WU6qyXrd48S1F7mI8d35RQaczsvpweR20zz+9NHNZRsR1kO7G2IVXJvSlz96tN5sLi8iq1OBR4/O4jBMl4Wf8C5tiQCSHh7N97sdF3ryeHF+3r97//7w4GB5sNzd74ppPn1yuttuP3y4nSy6ejaflFVWae7WoSwGAQMsZiUVOMnL9cX+zcWbOoRmf/3i6xPscnu//+35b9iRKSxWy6aHaJlyRrAy1Og4t7tQE6AqUXZSlNV23XWaKw8G4AMvl/P7+3NfYHBw8uigadYMvgqlDKmqSvL25uV3p88/6SIeHp396LMXm+sbl+dFVVf1QwMamg59e3dzN51O9zGi8/u4bnfNydGjIUdP3k1KK7DZ7VaLk1lZrTd3kGhzfweYZ261x3i32y3ni+XJ6TbKdLkspLvaXD9+8ay/b28+vPPz4Ys/+flufZ+jw3LWF8WXP/vjt3/7y+Xx8l98/en68rIMMKmns9PP0/p2325vNpcUOqVi/unXkNc5C8TUDuvV4aOcg5A6tIlfbNfXs6N5m9vcsTXbw2nIvV61F49+9sXVprnY3NbTg9WXz6O83Fx/f/127atPbM93/bRYYtt+wApy3Ne8kgH362E29U8fP/rwy7/Gheyal5989vPv3/9tIXtfV48eP72Da5quZjPUPu72W3RV3mzc0LhJJdnK2UowE+PKzWIjSna8mHfbu+PF5E3OSdLf/ervv3j+6d2+xwqPnn36j7/75fH8dM7h4uKaQvVhN0wOjt79+pyQjDWlDJ7vtrdPlk9CnOVNX0l4f7n7/Mdf/9Pf/ScY4OzRUZeurzeXs7JC4Yv9dWYOURL7y77/+ZfP3r98D6V79sVnb3776vrDXTTatrHkYjmr5yendVEI8dNist9sZydL8Typp4fHT7qcytJjk548PVtftW8/vBHBThB6Y+TCedHYx/747PEQO3FiCFDMsBiYoVctpiVJfnF69u7qckgIYMliFjgIS1TIeVdVpUcygChDqHj56PD9+6vFYh6mxf52a1m884Wz9f1FVU/7lLo2ooMuRuemCrK537LXJy/O7i52j794/uu//Q9xv+Gi5lBPfGXMXewdlao0qSYq4gtX+UnsU9OkIjkC38W8zy2Ydvt10w3zgwkb7m6acRTknG2kQYMhYk7JQGMcurbNKmbqnUdCybrftff7zf1m03TdWOziyIlqyhGRHKNIIiJAAiAC+r1XSY3AxvCOc0QATACG6Er2BUnOsXMgljVKPwzdkDRlUbHgAyJWIUSUmHJVVCJCqKGs1AAGhKzeubquHECf1RDKShxTTtmgmE8nRXD73c4Qp9NZGYqb6/Om2cznR0Wo7++vg6N6WqPivl3nGIHKzW6NpN6F27ur3WYfiuDJ362bXbsLofKevPd5iG1KgQpmXzivNiBg6aeOKaXBEY37ENUsYMzsHRN7QzZAVRPJZkJjgw86QBRTZDTALCJ5K2kAGIbYx6ExY86IrnC+VDOBjKooyQxGCW+ISZElR48QUfuuCT4wOQMchljWU9Ld3fm748enh8vV7u6+i+2u2Q3DoDvoFY5WbjKpvvvt33s/T02XAdF5RTBRJMo59X1Tz2Zgfd9ZUVehrNXEciQU9oVIQgRRBQNg532RY7KURfo4DJJVRIjJVLJmNMExEf9wJG2ArA+GCkJEx6hoZmoPddYIgKCmZCaqhOM+d5R6EMlAiMhsPOO2UWt6yPE8QLcAEdWUkEfnEfHYpDP2Ho1dyQCjuwMZMKEKmDA5RIcITs0RTZVrouOYzyo5cvhN372UYR1jVblY+E7BFDzx0MVZWTh0CZJmGburSzBGa0ReX948nde7Zv3h5v7g8Ojxo0dXH94icTWp+9jerW+X5LZ3DbZYcKY+F/Ws0Nhutt22ezQrBkd32/ZOcpLu16/vAtqy9LMAllM2BXRJgABTxqla5V0jKZlSwX1WjbAxycHtTV/uetJ0VIeD1VHs23Wzm5wsZquD9e1te79GdmfPnzx5cra9Xz9+8aJpBmJeHTpr78+//81suSAfkrhpPRPRHBvV/m57XZRljikUPOxaU+qaHvo8nbt//Ie/IYDaOzHd7tZFYOdD4UvUlFI/9L0PBSoNnfjCTBWYt5uLyfx4u21d6cqD1Wc//qJ+/2pS8e0usiauK2JW5Dd38ep2HRDmLEM75AK5oBQjsc/7XekBLMX9sDp4dLveViE7RKvm7ELGpo/t4ug4+GLXGcT+9LTmSu8/XGQBns6rahJ7tQSWxSH1ncSu665fdefXu9tOe2QoyQBy9oZnKz1dnWQT531GMzUQYSEjdZ6Jxctaf/frvtnTFz/Cp59qwYBAZAg6UtXNAIlGj9FHPLt9FH1+z0+Bh2jZKCx9JKP8XmL6GCn7KJfCqKs+QJAeyLjwg5fPAD/WFcAfvAy5YA/7YxYTBdCRrsw0bq0z0UiMMUWRCEyIrIoEJOP7GQ/zkbJIEYpp4ecVF6wkfR763HXNet8NwxCTigbm8R0jExACOWXH5URcgWEKXKIKKmpKZoojLt5ABAA9s7fRoKtG7NEHKKdYzs1PgYlk0DSkbgfIhI6ZAE37JneDCWhOoBDToFSAkSWTOKgIoiApMrJjcIwIhBkNTRTBkSMgh94ZO0EWNUuJUjZ9kP0RWBHZeREjZDU2IHv4UkICRcNQKAVFQmMgMBhRQqRoYhFAARJhVjAzMmDvQh2g9hwStJJyJmSADIjIiqMJGMGcwxCIHaGJKagRAQCh6HjJPLDfPjbEk0NFw5iy6QA5qgwe1AUzSSKKUGoamIUBGUGSRFNiJCRJyTMRM7IHVwISaQZLppI/9qc6giyZ2BkSIhmCmJmIGgKAAmQb0y9ZJRFQEk0KMLK6FVkF2WXTMUpDPpB3rqy4dGUZnHNMCAagCXKCAS1nQtJMBOBGdjhk0sSSYDTtgLFznkfrGxgY8uivA1UAJgIbk2H/hVQ0TlQjoH78LcODGvowXFSF8KFsbYy/j7qtGcDDOZACmKmZiorlNPaTSPrIJ4KH7jtDQBqNMziG/TMSq4mOB02jEywJiFA2SwKa0TKaJmuQZl59TmgJyLNzlaKZ5ODZeSqLkPucu54rVgXpEqF3hGbeV4UCm6kJFEVhqhTILwuuA4WQSAkyaxoLPsdFnIJmE/aleziPyYUnBJOuZwFkNgYxg37wmaQb3D7VjnIaimqae0nDkIaCgk+9BCsIHCJxMXPBSOqbNx+efXUCsxJAuvUguKsLJvIBl/d3bSSZTZdt11rEYsoCUbIAuZyTooWiyqQ+VHG/YZyICjsickSe2XFBfl74qgikmKyuKyuIKRbWQ+UcVz2gKEDyzS6LdCcn02pRnwSzq3vvi4p5AxkcZa9d7hNyMQlYm2hKSVNnMan3U5IuSUoxxqblgrTRdt+o4sFyXk9nbd94D81mPamnla8adlGAHCLCkAcmp2CgajkSFwaAaEPuGQnBkmiJNREyM7BDBASljxlJdoEdIZFn54DNFEEdIaAHUMcOUIGAmYlA1UYqNjtGcAhsiAxmltl7FTFTZv6hIPnTT5+iYV/g7eUlGGco1B9L2xVi9bx68tnk4mW6XUvvp198+aObV6/b27WG5Eu32d2++Pwr9tXSh+GuyWqzRei395Oi+uyL03dXr7pMFuZ3m81ssmr2+4ODp9+9f5XD/n/7v/tX/9O//ffROGY9f3unpT39vKwX5fb2ForS+9XJajpA+ubtP5Whni0Opssjwv2Lx0fXVzdDc1M7rpH6dpt9OL+5ubi+LXQxKevV6cnFxXfGMtFws8+fnn5STvXlr17lHNF7HwqLgDlPDsrNpi8mRZaIJN4VOeehb7x3YjlAcbvbLFYrJ9YNkqX/7s1vPnn2YyJP5kAZhMfzbhjTzzkSurjvmbGuqvXmFjxeb2//5H/952++f/nibPHVz59Rv62WB9CHf/8//N2k8PUE0xY++/pZ18Y29qvlbL5YdGv98vGXCHRzeT9deBFf+uWL5+XTg8c353cX7TU5t7kYBOPzZ6eEVJKspgUOQxqG//A3vzAo/+TP/uL/8n/9v//v/w//zT/+x793xfRoGVDJOjs+O3x1fk6U2Otmvd232Aydr4tJKMKCvcfNZj+0fenLEkvwaXO7nU6nzqMLLojb7joA1Sh319tqUgx9DHU4ffyovC1uLm/CyXI2r4fcYxkOzlbzg9XN1e3l+4uzk8PFckZcaNLFUbXbNeu392Z89vTJ6qhY317sm50Bvj//jlx1f3VbT4iAm506LCcVs8VZWVtK9/vbMkxOz07uduelK4tydrW97Zum8F6yOB/u1uuJLgI7dGje77e3rnCOwtBIPa2qovxwdVNOuCinl+8+nD17/v77l8fPjh3J0PRYUFE8ELuAFR0cHM9U8TistvthEAgJdutLzwHdNA5dFh+HVLR7C+wKv9/1i9Wq79u7q4smZSCn5IoqPj5YQIq7dWJiRUi7ATg4B5Oiurj5tqxmhRaFKz+8Pc8U6uVis913PZShyq5YVry/2ommp8fH6ob357eXr3634NIDfPr4RzTp+q6/ef/BK2C5+OTRweXbb6vZ4dl09p9+8f3jw0+OJoc3jRyF6dK7O4/DnrHlu6vv7obXq5nXxGc/+2/yL39d0qwhe3v77U9+9l9D/XmOPhyEy83fD/1m/er1+urvfnz8l8flkwj3zebN0eMX6042+0zh5Ldvz08eH/3q+/90uDr0s+LRZ4/v9vero0Uxq/fN9u7D5b/8V3+1ubra79pBbYBdWeq7D2/3Mff9ULmw2/Wr+ezd+euvf/zHRzF069vTuo64lUp++au//j//H/9PRvD9y9/Nj2YGxM5dr+8fH7vF8nAxOXh3uaOq+OXvXtr0lOezk2l8d79Wz8KurotmvTk4XdJ6+NUvX5939/OCjg8XeYgXH95ImedVfX23X50cxyFOjo4/+fGLV7/6hedqsxkWRwdaFp89+nz94Xo2PwXbllwvS7g7v0WemNF0Nb+9X08noQCazlbJcL/eH0ynKW4PF/UwpM16U9fLru23zX46mRDy+d09u0KzOV8o8vzwkLoeHNanZ+Cs6wcYKCn0JL6LXJT7ppw+C78AAQAASURBVBuGYYjD4WK5295NJ1OzeTEp2uuGgIKv+kqQcr/drs7Obj6cv/zVb1Ghi2cnn3x+c3HbNHFa6dX2zk+mk9nq5fvfPD19vGnvyjJ4X3YWgw/COj87Pr943+rQd7GqS7M2V1KfvdjcdavVIcAvAKDZ78fdwnjopWYiOeeUcs45jdCYmEVibrtmu940zT6rpDgQQhrviTYm8YGIVHTchRi5bJmJmP3Iq0dGIxjzAoiuqiaunBE7UDXTLNFE1LIZIpJ3jJ4ImAiS5pR7hAxgRVXUZVDFIWXnCjcp6qIERTKpSQVMwFIWNVvM65JRc+xjP5nUwftd01zdXDx5/BSg6GNjQ5pUZeHrtt1jigyQYzupS4HsULdZirLe9/nu6rKPuSqDKOY0aE7Bu7Lwk0lYLqYAwgZ1mKTYx2TMbuzrQAR2HtBxCMheDEANFFQSgHlEU5McBQ3YGwg7j0CSIWbTnMxSSgbmQVFEMQ8IzCE4V8SYPIMPZcpkoKFwviv6pmGErOJ96Jqdc2U5maSY3LycrQ73u/V2fc/VtF4effjwHtkpDmLYDsO2uVeoyIX5wapN2dhJykReUmQk9phSJzkwQx7aIgQoAvmg6ECzSkQAIgZEVVTL415ENKGJpE6S5KE1FQQEEUQCIFFhYkSCh35i+8EohAAPfAhANUQgsRHwATYu6wEN0EAQANVGLCMhGRiOcKLR7YY4CkOICKBEo3v9oWZnNG2DmYo+lH/AuKLPSoaEGIKoEZJTJykV6FjNBa4Lrbr26Gi1bEN9v3kbB/B8Z5ByRjNGB4VLosZqjEykooRj9oNE9MPtZuWraTW92613+2YznZzMqv39ZhLcrKo+7OOm3WkL+/3+ZDqZxOjbzf2mud3Ftdj9kAaBKOhKNygIMmapGp3s9XHNB86B0i4nFKuQAiQCSQ5Stl60VUvObVUQYGC8E0vb/CdFqIdu192fHj3iJP37V/2uC8QHJ49OTpZ9v6+nFSA32+7Fly9Q2l//p/+o5I7nJ3HIYbYsqjL37X57MZ9WwXNKojBM6um7lx+GoVtAz2549eb7q7t3n5x9zjkOXT+0m0V5hjGVVSlpSF1rOVuWlJMaxNixSrfbab6YEHBP+/tOzeW+nS1mVcnV9TonebdP65QvO+kHrQSOA9eHpXeQ0zD3U9UoqXfsTPph38iQ9gGMcTEpL6/vqunRdDm7urnwJS0PjurpChPm/dWHt78pQM4Olrt16q/3d9vtZOqr4Devv9tdvp8/+sRVB/u9XV3uJJINKGkIRVG4wmEF6tnAmzLE0a5AXCAKGlrqTJHYcNOzqqG6cqYnJ+ofZCE0AQMkBjX8WP33cDl+5FKPkicqfsRX2w+XK36kYz/IRfbRUveQMrM/eAThh66/0VuEH3Woj//Gj6IIWSSZycgTIxKwmDMAkOcEikwAAMwIPOpGhiRiI713fAmNAn0qAWaTes4WIJoMw9A3m92w67qmyzkbIjsGAnRY1qXzjsvaT+ZcljpbWVGbqwiZpYccDQAYIWdTscSGjl3JzDmZGQOysudiCtUC3ATQYU46NDlFTQkZUA1VzLLFXlOUFDVFBRTgnLMoQzLMgIaASMyGwr6AULIlBjXJyAzkyLFRAcwACFlwSNoPlpOIGjAQKpABm+EoEZmIWQSR8Z5PLozWRSRFy5ajDR3EqH2fY9KcUDVLBIsGCYiQwXlG4NrTzFkBompZWRGJwbIggidnJkzsg5tPpux8FwdNYqiqAhKJvJkBOUQiYoOkKgoZLAOIJhpiJM3skL1DtoBaI/rcVgVXbIwRDJkdkqFlNDJQTYaQGBKxKtXAbqQdIiiwAyYlNCZFBhkPirIrajGFlE2SSATCZIBqDJwMk3IUNGQAUkFvBWNwzC5pYGQkdj54H7wrnUPvgJwRgZYgybMbmctOnKB6j4WYYELrKXkYeiBH3oEKECEqwFimKYoPpZijvuWYlP5LVhF8jDHjePyFD+hrGpNlSGOq2R4inuMUIoD0A1tMDQBA1DSr5CxZTS2lnNT0wVKEaCBZGcCQck4fkTFgpiPEW3KCJBAVo2mfKBmoyTAQKzuULMyDxAFFSciyAhl68hyQSdEbBR86LSCnaKblJFii1GViDwFMgMg58t45X1CxcFY5qgowxH6Q/WDdgKLMXsVElNiB8wpj+gnNDJTSINYJAjOqL31OqP0AyVh8c7fzU0OC1A9FxTlr7Adfe8mSu6FABzGzULdulqt52myuLz4sP3+87WCIawCbF7Nd31YhpFIyW04xDtFXYbCuix0QIZEjL2Zk4snFIaNjM2eQvSuYiIFJwSR5Py2roirY5+xIBbXwIfjZzX0rOc+X5d3tnkuHQn0v72+aafSPTk6Pkuy2OyzQlliCwwawNZC6z7koQ8p9wZwh5hzJ96EKcWiHPskgnn1KUY1Fe0euLAL1HaINbbu5vjs8PSMiRJQ86t6FgRIBiJhi33WOPTrUnIIPRfDMDkQ1JXJOJHnnwCjL4NAZqCPPRED0cZZQx6ymTOwpjB7Vj4E1DM6NoDJCpI9fUwUc2V1myDTSQ8chEPddGpKoffWjT//2P/897KqiPD599Fi3N92we/nrG0iWh77ws+3lRXP7ATyCz1yWj+Yrv8/lkGPX3N1fD2mYHp0ePjm5+XCz3ufDJ1/cffv9clqTayUO95ubn3zxtY/DLq7+7j/93axYKJTzP5797vuXKYXNetCape0Ky9tuH/Jkcbj6yemL3dBdn7+bM03DzBPQYslV4UgcVylup4fz48Xid99cTI6ObzdvNq//09Pli8PpwbDZrSaz7flNs7se9nvyfLt+M5svluWpU4udaDTvmQvcN01mTTFJasswr+p66FLy9vlP/vT2/NW6uTw6nh8sTuOQQ10ULiA5A0UmZGZ2CDp0EYlFTFLummazWV+tb6Pm/bBduqXdx3ffnNdl/vLHL95/f1FOJrFvj1ari8s7LMu6Ki/vd8+fPDVzCDKbFe1uOFhM5suyouLizfVsXnp2R2cny4MgoPs9nBwfrW82m80+ph7J+WlwE//P/+znw+Xm/XTyv/yv/qK5P18tJjnD61dvPv/qs83NLg0SJGwveonx0eODgni5LEh1c3U3mU+QaDmpiWi/bxErQpxOps0mTmYOIDuiWV2Z5qZvTRGAHHG/2U0Wy6PVAWXc73b7Np08WkjWbt9Vs9nJ6en5+/Pry4um2R2eHNSz8ubyajpd1lxfX91dvTtHxHpW0srvts3mvtG0zX2Xgk8p5Wxd3yFrXXC739VVXYWi2+3XaJJyI3uZ4Ww2vbu/x8xJNDBOptXQ9tkG0JwlTcp66GJdzYXbzWZzdLjywUtM7a6/vrzISZ9/8uz81feT1WEqHZIKPATQdvu7k9ODOCgz3Ny+qg9PV8vl5vZycXCWsrTNWnJb8JLJ3l+8XZ4ezCerMhozIaSmvc9GRTk5Wi3ZXOq6UNB8OTN1TWpnizkVMMDdfttMJyc/+9N/9a//h/9bvN17Z2F5+ury/vryw/H8MGMhbfurb/6hVJxVRex1dnA4L3rd37nlckj5H375b8FTCJyzRNHV4fR3r37db9YHRv39sKzrLsnl/Q07y113//oquHBwsBxESdjL/PTZ15vf/sO789vHRbl/fzF9evrP//ifvTn/TREr593Vu+bZ05P3r17Vs6P6Z39228qbm+2Jrp5++nPsdh7oL37+o+9+933XbW+vdo9PHznyw27z+uWr48dngHx7ebvrhqKqL277X3/3zV/+yV+td7dJwYXKO7o5f/X5108vXn/ou/VQ4vPPPr378Dbdy9NHp++v351fvz95fHpQL/763//bn371F68jp13yBiby9OzMTIZO3158GGxTz6eb97vTk8/rOrz8/rcvnj1q+2F6uAB2V5u9+WJ7cW0lkJb7TsoQJ77e7lq/qBeHq3fn9/OjyWy2WFWz/fv1HBZtTOWsOjicXDfbuN0uy8nZ4vHRyePzi5dk9uTZi9fvN6Xzu+12Oa+enB1fvL3wlDVBPas27WZo+6KiYciz6cxEn588udrcFKVLErftzZOTx7E3r2WXy5mfdkMcU+vCto+7VbEqJn67a9LepWZ4dHJ8nm+NBnV29smj87cX7JySgXfIbmASYouWhUWgqGeThf/w8t16f7E8qpHcwckSLQqgIivh8cFxr3l5fNi3TSeazcpZAd69ai7a2O0pPvv86d351adfPNGeb++bqpoNH+eCj5AXFM1mmFW7tiOGlLOqsXOD2DDEOAzt0O37LkkWFQPMoszjWRqDqQAwESOrZiAcm+tNVQ2NzJgcFWhAwN57Zs/kHKJZjkNvKgCYTCWJATgXHLCBoCESpgzE5INjxxwK5ygPCVTrssKycMDeEQqEuvJFue3bthuKsijKQk1UsCwLx7zbbZuunc3nRVXdXNwCSll6IOi7tmuaxbQui3poW5U09F0ydQhRZL/dgUpZ+klVxJwdgndcBF/W5XQ6jX0ENkDbdzuT5MlXVekKnyWzD0YFcVByBqgqpAaaNfemWUwNQNWQBNSAQMYFJajzNTJJ9iqCyoAkOefUVyyInl0ofDA1I/NFKWLO89JXfa9AkkRyFwOHvu99uzd1Q9+vDo7ur6+bbrOopr6qUkrezCOrSBpiAwCGTPquf1PW0wygCuh8QVXs9jll7wpQlZQk5RQ7zx7ZMwciFjDTnHJ0Y8Eqljkncp41aFLng+YWTESiiqEZOEIzIlQTRKOPGF/8mB/DsejsYzwGABgJPqJixQCBVBVs9G+AYh4DOwjmmcZyJxuX2Q+77gcozJjYIWIFVdWx8Abph5034IjOHmM5xAYGhAAWglfLIPzAIUFIErmeTNn9+v52bTorWKqwbtpOZWM0ECvhMOSZd6X3WcQhiBo4AqI397d/9ZPPSHXfD00fL1PWKEPWs8cny/n0su2b5BqRm/12WTlF8J434C5i2iYSwwSkPcSUHaIz6I335kR4D7pgcmJOrSJ0+FDYzN5FB71IQmo5JNC9mJDzSnWHKt3ptC66rr3fZcgO65PnT/xkmnNmhKqq9ve7s08/oxzffvfrzd3dyfMvhmbwYV5VpeW+3d5UwUFM3aZXo4ODeXe7uX3//vDxI0jd5uL88vX3EzepikWz20vIZQgKlLOVhJvNpggETJql69ZMpaRsWYpQXNy8zTs/O3iKQ8+GzpIR9k0K5mZFJbftbRM7dEjkAaXrnJpnNIM0DH46CSH0bSsyDP3QbocMnEWLkxPm2HZNyhlzuHv1pnQ4O32m6qr6YMCldOurbpfE1c6VEz777JNQhc3m4urufHW6cvXB8ddfNPfd7vY+xVjNw27fgZWHE1+E0XGZkYiCB0AzAR1RDyRqecjsHDV7ef2qhzLwn9PJSXaEkOhBMrIHNwI+eOE+uorgDxjXY8eSfdSAfh87+4OO74f82h88+pHx9YMwBA8+z4fCpx9k0o8vREBmURSyJCAyBgUQEQACxFAGF4IZRgMxATBQJUCH5kcLhoKlZH2ybigQa+9KFo059t2wa5p9H4cohuQLdFSUPgSupmU1m7piYr7kMAVyKVTqKyxqRtAB2XcWA1oGNpWsaSBgGvOkFIzR0GMZoKjVV8AeU5LYS9+nOBAhAUrO2itqtD6aqAzZDJNIyqLA2QzFHKLKWHqPPpTkgrEnHENJCOyAHPrC0AEgpAFShEE0JlHICkDjH0nBjMcUjDoZ70SmgEyuNAyIbCmCDqAqsQHJmpLEQaOIZkmDagJAxAAg7FhU2fGk9sus86RrgWimBsY8dmepGREimjMtURlUTHvJ9hFqDoaeHJow2IiDGovbPDwgj5DAMzGCZ/RoYLGgYl64WYF1acGxc+DLAggJGEDG/jxTVc2QOwMw9KCA5JAJXaHEQIbkABAdmIojbwhmys5DJk2QNINYFCXDpJCMoxgQCZARcgZH6Fg9mxmaAMkPOC8DUCAEZHCM5AzAM9FgGdWVhQqSQVZUEBq9TjmiY1MVAUA1BGRAMxEVQyR6aB1EQOD/QioapyR4kIfGVKeOKu04XY3lZwaQs4w4pLGpcxR6fvg9SZacJEcZadZJRETNQEVHDrapKY6+P1Ide0DAEC1lIqKs0kUcBKJCL5DAUiYBSUlGX4nv2BKisDrNBCAUUJxRwc4XVhQwm6LOhzhI36sIKpWGxg4RGAMrUBQH6BfOrYIGVgVqM0cBGVN0IyoM2AX2VSYi0MIIzMUMkhWUEIkLR5wVTXOGBJ5IwHICj5VB6tuhKgu11Gx28/Jsspgm7WPTbW8vZi8+ub/pZ5OV9+fbqw/Tw8nq7CD3u7RpAkOMfVYNFZkZKkDM9eGCA3dNNB6bx0FzRgemQC6YavCcVB5CjiBjq4jGDEM3O1h6xx656TrnQhGm05KHuAEH5bzsd0PhwzhQ7u6k3d0tF/Xh0URt06e2MDicVfMVX1O/vtftLqtjqmtjQY+qkRFANeakzs0Wc764Qkh50Ka1ojooq0psSARDu4vd1Dl0DrNC7pJzPMQUHGdVBBYBMQvsTBUc6rjGRwZkQHYjnhuJ0BGRAqqKM0BkBUVE7wtAc96ZGBADPviJiJ0B0rjoETYjVR0zmMSk9qC8GSgBjkE2ALi7XU/rxeX528m0fPbkeTmZfXh/Pj9ZXA5tu9+tVica9JPTlXfVq2++K1zRJZisCifZI+zurrfbrZ+UoZ7Py9C2zfa2kzbvb26ODp9WHgB0Mi2u35//8z//yc3771++uj0+edztoa7IrGeXFvNarDDB4JzOplUJZLJbbydQN822nFXVweruzev6sJT+uuvk9ODo8t1lzFA4kN3N9zfvN2t79Ij+F3/13719+U/Y21//w7//F1//5f46Fi622/18Xm3b/WQyWc6WE1ebcuGLzLDvh9a2UZOYmmBVTobUZ0iOQxH0t9/8+wBuPgkX76+z2WK2cmx1XQzJiMiMDCU438cYXOl80fbD8vDo3cvfXF2+4RmvwN98++4qnn/+yR+/OHh+fffqu19/s9lef/HJH23T0A3r4+ODzfUVVBOOsj7/0DVxMp2go5jBl0zbIcZ8tFoA+dsPd+IzVkUIVVkWXRzQ6PDsqKxcUnn93fdnjw8mJVbz8t03r4XzZz89vr3/tqpn7ca6Zi+Ybjdr0azilsv5/b51XM3qIsZuPq8wUN9FRFcVXqPd39wdP154c9MZoWUDbLctew4l4ayKUVLX13WtXLSbXZhUy9ODsOW769t+15b1rJ4UI67u9MnRfusv31yh6upgtTw4aja9mV+uVuvb67YZ/GGdpGGHl1d3i6ogjBdvP9SzpfOFWFKAIelscajJ2MFkPuGiCPV8d3P//s320WenlS/FkmW73W6n8+U2rY/qWUnlh6v3i9Uq+LLZ7VKGOOSr26vZyfHlm7dlUUyWy7fvLnqJh/O5pu76cjdZHh8/PxlHQShcOwzSRxOYlctdr6zbysdmdx3VeQSV5vTw+PLyll2/ve7TbjspS1LXrq9W0yLMqu39uvZD16yvOj+dVxIjYjg8XV1dXDT77Zu77z999HzpDy7ev19Uq5Mnx/vu8pffvFo9PakeP7p8/f5s/mXli9Li7YcPtSulzx/e39B0vrm49kyr1Xy/WwNNHBeRGzascF9PFx9uIuZJKCcvvvz6l7/4xa6cT4oAnXVdPj06lf2+79ahWh0Ws/XNdtNcVPvfmD94/OJJb/q7b75Znj6awuTk0eofv///RQszq+Lt+tmXf/XdzXdfPP0ySeMof/fyf/7ys5/dXf1qUeLxau64jFIwhfcfdsflMSBfXt8tF0eOhk9/8mjX7U4OFje7t0bFwcFxurtprrY/efrFMHSfffbJy198t7bYDM1PPvv6Yndnbtbn16tHh22zPzo9+fbvf3F2cPLFl8/v2+u4SUdnJ9+/f7uazg3YOae2++rTn16dX+f23Wz2JIivw2Q3XPe0u7zbhpnu451fhf/uv/8Xf/2b73zTs9E+Aa8mFli4+KOf/bOEPCvq4Ke7q/3Z0z++2l2mbvvi6cntN7+qi+rw6cH3N9+dnj1ZLZaFs8D+8y8edfuGMqniZtvNpisQ7vY7P5827f2kns8WsyFdiWTIe8WiKvy0rJrow2oWRQvHPmrbdpEhUGmMl+fvz54+erp83A6tQPzkq+cX372ZFC5JO1tNl6vq+vzOlb4IkzxENvfs8bPXH87r+WRa15ub28nps+bm5nhe7m7f+spnYSiC8xRcfbisvv326vRg2bXDYMmzixKnB7MY5fBw1neUyZfTYr2PRT3ZK/DkIBsDcTk7eP71j9rt+3EUSDY1JSJibwAYh7Iu+qaNQ0w5E0X2ZEZDSk3fZ0k5RQPLImiKQMjE/3+q/qvJsi270sSmWGttdaRrD3HjxlWZNzORqEIB7KJVN2ltRrL50P1GPvE30owPVMamdVk32awqoCAKSGTm1SE9XB695VpzTj5sjwuUhXmYm0fE8XD3s/bZc8wxvkEISGNZCaACIQI6RnscSwSURIVIcYSJAiChpr5LCUAkdppULI24awMEE9CkquP2ro+DI8x8lpVT/ehG8cErAjnOs6zIS0hD29Z1t40x5ZnPi9I5JnLb7RaoHNqua7rMZ7P50fX1lXZSlWUCjSKpOTgiQBBJauocZt57gs3Ddr09oCMwVIB2aNnh0Xw69ApEJqmrWxINwYupmnjnXVYY+V4szyt2AV1AQEuShloljX770Y8++gOIvAi4wIxZTDGlQTQRmBiqsIhqBOcR2TnOVa3vurxwPuRd7Mk0MJMZEhE7cJZlTrU71LtJMQPArmlDlu0f3p+cH3/6y1//8Xf/sV7vl+eXy8Vyt35AI4vDkFoYEhrnueuaOyPvq5wQkho6F/KiOWxNFYmJnQx9f9g67znkhoSOHWUSjc0eK5mACcFkQMSxawVzAwAcuqHr0EzSwGSEjzBQxJ8zN4/33mCGH7eqo4wJpgA6GiPYDMHoEdSLj64LGNNnIACq8vhUBP4Z86uP4GC0x2qi8X2jRyr2R9ALISMjAMhAqmA/x3zQIyuRqMWYXCgRg++6gv3U0Ye6frOqXUFQFQ3Auz6+6qRWIceiOqSYZ36QmFQ9BiF9MPmfvv3jV6dLc7baHYbCc3APD+s32+352fxkfjzElETrponIdYypjXGISUxbJYXckAFKx3OEOfHEsTOYO+eSuphyn4EDUGPQ3Lsk0cTEgByrWS+miaKAzxwwF0bWyhDrgTeiMD0578o5ZtmAeDI7AY136/2Lr345W06vf/zd9v5msTj2yCI4Wx57nw/7TWr27GhXt0PXLxdLl+rvf/pHxhTkUD/sP7x/PbTt+ZOldKv9frs8WQaXd12bTaqu7SQmLAoyyn3RbofMF2mIQ+qzMg9ufthuGPK+e0By5POiLG1SDU2cePp0VqYYb1VcFdJhyCvOS98Pnc+qoR+qGXVmHiwJMJIqdLWhK8ym/XAf4r7I2M+PHu5Wq3cbbXBx/qQ6Plr88hd//x/+XSHKOfkiT7m8kw4szI6ft7c3h9ub3E/9yadf/K/+1R//w++bn94eYo8MyaLPfV4WSujYA3kiGknUqomMiBGRNUZQIVQCgfc/asZ5/l/AZALu0SMHpmNPn6kCApj+M5fPaDSyjxVmH4Nn/7wr7ZFR/RHQbooftaSfhaePicyPs++oK6nBeMHFnx8LsjyDXmPXaRrMeUNHyAysaIDk2XtkBQbiXiMAmfVomDExERkmsKgAUbxYjhjMSFPq+njoUxvTIAaOcx/ykJchn1T5bIF54fNSKZgLEYjNlD34nDgjMHDeYgbkNKkpEjAYYuxMVZWNvAKAy1wxU1+YK3BU5eOQYjQRQ5dEEJIlg6HHmCAmBDIFTaZp7OjyY/RJVJmQmZgRHRuPQ5MDZvS5ca7oUA1SpLER1HQURdRIFQBYJQIkZODR2QoOwJADUAaQmwKKSKql75KkNLQSRy0jJQFRUQGDgGAGCcEQhF2u1mXOFrmdl9aJ9CJxvBoiObYoSoBmJkm2h10ImaiN8oeaIQBBAgBAG6srANQzD2lgNEZSZO+cQ0LQPHDmaBJwkbmZs2lOhSfvXcgYCDA4gNEE5pgQ0NAxKJBEQyHyamrKIANSoYAIZKN04rySAjMDAie0SNFQwUTFaFCOwkIhMZrzCVGZnPPKzlxQcojOkJxxPyhiCvw4HiuZ6EiacgSKlJCSETFllobAHpxTBBlHZcMxbCyi+pi5NAQGVSIyBOIxmPmfS0Wj44gfyxQE8TE1rQAjxFvNHs8VfNRu1ZBovHExG/FEktRENYpITClJTAIAJmJiCCj60QT7uHlTG6UjQBTSRqCP0EXtE/RqUS2axQSGJjIWwg11S5yIzUCRSIERCRVliOSSyx0wY5ZnAaGKCCpJCGQkVzFn0A+c1DFhhuZYDYJjgxj7mLqIY2k6ERMpc0ICSKCi4CQmiMQYIgA4AU9mNrStNIbiTI2IkljdxLJ07ICIM5+1fYx9PGRUTgsaura+m/MlTYv9fj+7OBfAZnWNFPPFZLfdDSJFkcfBFGBSFl3XTyYTU+m6qMkUdEgKiiHkMUVAI5Qiq1SSo/F7M26ciDHTniJKbLRYeB/csso1JgE7uVj00O4PXcbUAfRDBMdZmdshHZphiLIv87OjXzt6tdrUJgqIkykjVYrhEGnXoEiHMHRdy0SSJCh1Rj4LRVXEJClKTH3TtjmxpRHQCUmjCyQpmSkidH2PAM4FI0tJRRWMRIgQUhxMFQwKMzOLmnLPhiYmxDh6hZwjZh5fBpiZmQGUENl7U3EhOOJoEZkRcKzoJfZEhGZq6tmrjbWCTISIrEk+hiyh6aScZC8/+019uLdeusSg8Ps/fPf0+Wez+blGt2lWBfD2djWIes+QT2bHZ/vrHw/denG2qNUno7LIc5K2b8n45OKYJO63D/OqNJWcq+mLTx5erc252ZMj8hgmleTuj6+/e3l2sjw/Acbk4r5eq9QlVwGpCsXQiq/KTtPUT8py0g/7mCJz+f1P35dFuWofIPUXJ59Ws2PD7rtX/9PN+q9fnD/pYyqPYN1v1OWDA8z9vmsIcVZOHbt9u7s8+SQK9En2fZO7PhQFASbTvMwhmSQxEJZ02NyV02XuQj6ZHB0vJDGY6+Ng5MkTAfXRBlU164dulmeifddur95/6/J0d/fQPjTL5WWYX9r8pOjS6tVmlsN0sXj79i0ZD1HZpbMnJ/vtRhEsqDPq0n51teEQlsfL7aBDTFk55L7wE/K+YEe7/UH6Q07B+bwMLjIi4ItPn2dluPuwvvj6yYny1Q/ffvv7Px6fLShDdLjZbMpJYTbklT/UuzoF78Lt7cOTJ0fsOcucJLXgh6iH2PuCY9LVZuswq6oydurIZVlGJATgPQ19TDGt+nU1KX0Ifdu6vCiPpoZD3zV9I8vZ8bSaNm0zDCmbVIvzWX3YC8ppuCyrsm56F6iaVYqp7Zq66c4vTo9PBO2w3wwu5KKmIs6XTVN3SUQaJu99IKbduslyKhfzFg4/vHk1KULOuJgd3f74NifWvrmt2+PFkcvzQ12DYOAqL3nbtW0HstuLuc1hyI+Ous3+5mE/HOqjyzMF7Ou73frxFOSFz8py23YMnNAmsyl1vfmw220vP/3q7v0HAlyv1/v+IXn/sGlOK7/ebs+W022zMQeBE5hcv38t5s/Pnx7auFrtnr94CZk8bG4AjKfF09/89v5vvt9t7o4vlvsUX99sl8uTh6v1xLvfvPjqi9/+2U+vf3r16tsyd2fnp5vV9uHm6unz56qhHQ6yOTT9/uJkOVj87Otn6x9v9/fvy8XF8dOTLIT6sH9/9Q1S4ygLk2qeu29++nGS59ubPcz5t//ln/+nf/v/8mAuhNOzor0drrvtZL48ffaLwHmC4e3hzvIcHV9cLrTv1jevT2au3ry+HVa/ePJlNb/cxZzB6t325fJ8OKSh6ynYs89fOKO7Zn/5/FM3DIHjh6s/fv/6+6fnnx/qoRUt8t4GRQjrut9an/cpnB6Zc21KP354M51O/+Z3f8fTYeZm9x92L//k4t/8b//391fv1rfrclEm8x9uHkJZ3a7bKuT/xW9/ffvv717/8J7Q2piuVxvJqI7ttqtzym8PD+dcUijPXnz+9vpAvnz62fHTi/P/9A8/npxOzbjfdn5hu80Nu+e1Dp//6Z9s1wfgcjjcru52Z5NLC8Vt18NkJuY0meOyS0AMgSrncFPvKaqpUfDlbEaeaTKTOJimoixJ6ej0edP0oNzU7d2+W5wfg8Pt/XoC7pOvf/Puw0/zarLfbs+ePUlGu7438e2eF6c0mU3z4HaHtm6jaj+ZLfpWMPjjs6PNer9vGkZZOtps9p7h6LRoVpwAaHJ09Cmvdod3fTw7P213veXp699+/nd/+eqzL78+aAelm51OhroHg2o5VadRgOb+9NMTATy/uLx7ffvlL16s7/ffvbsHR/s2/TyQsGNVU1EANAUzG4a43x/quiaioszEcN+0u92u7/txRs9cUFPPztCYnJo6IiYeiRKiSkgEwRDMTEXBRFUsAgklQxvbvMi19YHAwKyTwbng0JkBEw/SSuxUBuKQ+5CFktgzOzHr+p4Ak5qalnlV+EnT1Nq3dbdHcHnIOMscu2EY+r4VEU3Dbl9H1bPTy/X9/eFQL6czIjistxzcvJoQqiPq29aQ2GWeoe3bbdvXQ1LiFLXIbTYtRGBf6+7Qnh9VZe4BIjEiKiCBQlUswI+IZ5/ERpc4mkpKDEnBROJI1nyUJ5iQgkMgzpQKXzDEQ6w3h3qNnAe/YF+o9UmHcS4Yy5a7LhpmSEUcdiE4VSxDpb20w64s2Gc5dXXfx8n8uG525kmG1HR1np9+8cVvfve7v/b1ZracrVd3STEUkz62HLLt7r6qKjVOQ4MOnM8cs0QhdD6UBphSImZ0HLu2b2pwOedTBSA0HzJElJSSJkQkECAkI3Quqio59pkDFjGNAyGDxZHMYjBSNPCjj+Kxs+lxxnicfBkAx6r7EV75GE8jRMOPm96f52UAQDEzlbETFkwQyMa2EABEUPs4UyM+qkb0syz1+HeAPYAQ8CMtxoQRaOyl9RxNwWASSmzbopie+fx5Vt7udoe2L2fh68Xkp0P6oWluhzgw9kggSi6As9GdG5hD0u6hmQZ3VFUpRe3ifCwTfL96+/YheG9keVLZy4w4BwtCc++KOecGBbmCMEf0YBmzJdUkCGQevM/FRAxVjQCQHlWGEfhkKkLoAo/ojmSAQxoGEabpIoOi2EQVB+zd+enx/vam6YYnX345ncxu37368PZtOQllmaNJMSsxhEOzbR+uCwbtur5tq8kEpXl//a6p91Ux0669ef267Q55VhRZ2O4fmNBADodDNp1l3lYPD9OyHIbeTPvUAZBIkj61Xeu9m8/nt10DYZBWhrbXuhVJDFiWLpTz4FhFh83+sG9K54qJK8uinMzyshgdSix9L+1kNtkfNsxupB3vU5xPy2b9oPvm8sVX5y8+2bx9s7tb2XYfl/PFcv70+VIPuzA9uTxZNt167SnMj6Efvvz6z5sPP1jdDXQTJkcv/+zXi1lo7u6pi0uuZhZIkqOcmRARVAEI7BESbSCghugAQAcDEacH++mbSEBf/8s0KxXBO8axXNsew2T26IV7pBbBR5vc+Px+FID+qQoNP9qH4KOd4ed/81FvQhwvwmPm7fFPR8fJx7PzsziFZmTmiJwnLEgA1MyiKhg6EkqMZgRiJmQGY4k4IoiRGiCxo4EALDBVbKQxDp3ECGZI5Is8OO/LST6b+Dz3kyVmlVEQdgAgmgBQARx65qCGRmN2k5RIzZgZVC0llWRgijm5wthBNolcoQsAgCna0Me2NRs9PSAiJh0BwDAEQAMUwEFMjYHI+WDEkFRUFRQdAROFDH1OPqAiAiCzuRzQgwGYgIKmR3QxUDIZNwdmYqqYREbbDSGaReeJQUgHGV0sKerQSuxjHFQiIsY4IGEyNUMxI2ZTRXTIxORAQYSdw0mOZxH6GA+9DFEVOTCCkRlGUUR83LSkAZEfwdLkmBBBEYCZmLyBERAijdErAFAgz4RqROYd5J6XZVgWtMhpXnLGxo7G548ijMUCzIym6AIg2ag3IgCijk94Q5IBFMQGYg+gxE4NkZwaIWbkkjkRGQRIiZQYQiDOnHMYMkdk7JHJsUMfkHzy3sChEgqAEEcImFg7ZEZ2iGyGBg58ThgNUU3ZOQMEJjBUTKADWWZGojIesqTqHasBex9THIvvbTTt/XOpaFw1AI4WIUJ4JJ+rJUfOzAjxkRT/mPdUAAAZ+8xAzSRJjEnVYoyxjyIaYzIAVbWkj+FSwMedmIIjJkRCtgQWATul3qRJNKi0EZJCMjNAhcd2T1EVJTKQEX4tH9OWQA6JDPukrSIZBFZGF4zYSBVlIIQoEbllUB8yhcToBcipsaTYthaTJiFGA0AS8l4zj96ciAOIyWIfAxcJQB2FPANoNNacyFORDAAo9SJEqR4K75BN254ZA3rphny5bPt+spjWNw/t9bvi9JOu8U2n5axa3bw2tfzpJ365lO2qKrK6Hg6HgxGRcwyj0GZ55tHM0KIZEJB6BmZygOaCUwVGZBqvrZCS+hAAaLftsoBoMpnNDbK+N2Q7Wi66wy0G32Wy7zoGIJEyd4Fcinp/l+rm+mhRvvjVxZtXb9frQ+4zKCQt2cdQ74dtlyyCYycpAdjQdLGLjOiZnHcCHZgNdZ3Pp1mR9Skxk8SOA3MeUteNvmtiSqLEGJgtRTMbUiTEIgRkdsHD47WImdHQGEelk53zhiPuH4ILoy5q+IjYgvFGiMyTH69YwD+vGhQRGUlVwJAfxTVAAB/CIwwb4LCtT+fd0fykqJ7efljfPhxevHyukQaxPLhqNo8V9rZvk1Qn8/nZybtX96sPt2xuGCClsO13Bl60C5jNZr+JvSsy18d9Oa+Q1Kz44w//+OnFF+dH06v1TxeX1f5mKLMiFOHl80+8cLfbRYvT4ymlXlK23/euPD45el73+w/rB+/11fffPb+8KJlVd2/evVML+XQaxU4un65v11nRzY+Phy579eZ90w2zcjFbTA7xPi9n5bzq77N+10scdpvrzz//1UkxY6Ku713uPbuAmmIHLhOA/bAVYOdR2b34/Kub6xsXaDaZb9aHZjMMA55eHqmoPbbYyeO1UQyN6/rAuVXH4cP9+8lksb/esvmiKBtnTdolsMJnQ71u6kNZLdrDToeKubn3bddqKPLDoUuDiaI6KQprZFW6MKReDp1VVd8mb5n1gSFOj0oSv1/VsG3NuSwEHAYAnZ9Nr97fhSxfHh+tH25TUnYw9INTnVRlvdtP51VRhd2hmc9nQvGwrhV1NpuELO+7PSLEPlLmAvs09L3UgEKGoJR5ShbbtkfkYpp3dewPm9iLd3lRFbv6gGxFmelY+DjUwo59ANGhjeW0zEt/e33v/cPR8alyOvQdBzh7Oq/3HRHf394aDOBiQq6bPg/eTES6OAwG0DUHFaimxXRWTI9CWw/rXTM7nUA+26139/e7Zqmz4+mHu7eXZxff/+F7BizPJrvNBiK3++3i2bJcFDfXa7pfYWQd7LDbV9Oy6/qul+t374+Xp3WzbbUfT0FMbbdqmD2orNfvXh6HRh+ablvvDk39bLFcdLv96uHOVZqHUHcJyX3y8snDm6vpvPiwr+cnJ/3dPQY4muU+bA91dXJ+dnVz7bOp+e7Vq2+effZV8KhZfXn5/GF7+7DbnH76abeRGZZ//ptfzDG/+uZKD22mZX581qa8yrOn83wuBWTm5rO2jqAz65b79YO0rW/dcnpC4CX3wdP9u6Zv4wDsjTbvP3SZP788m86r29tV3MV//1f/cVYcTQqYhXn34e3Engz7Q21UnjwlKh5ufzj9/MnTo/D+pzdiweKQyvn6bnXYflhjW9z/oZxmICH6PJ8s0SrgXTYJllW+cDevvuuj6/2eQOr9KmZ09vJPhyFVVcmqdfxwev4kHoVNs3t6fvrT298vp7P5yeL23bUrsqHr5i9Odcq5yHRWx65+aGKosmCxM3NZ6FP35S8/X725TfXu7//+bzNfEsFiXkxPyndX+9OXz1brD8uzxeGwf/nyF++/fcPNcPnZot9dLeeeHd9e706OzmPT9F105Vl2cXQyWW6vDlnZt8N1r8zBn5ydJe5Pv3pyuKn7OESJ0ZeLaXG/6Y3z5+fPN6v7uu0++cUXIu36/V0xmZEvfnr95mg6SSjrw8H7jNA9rGt0LjjIQzEjykFEu9Oz/PWbq9vv1y+ePNvs6u1h92T5VDRNZwvPRZfibnWLyZzLQjXFbphkR9f3mzwLGHyK5Nn7EPIicxQ49PHQI/JsOT3sWjc5cifL6rm1b14/vVzGajU/mn7/xx8+/fxLN69eHP/i+5/eR11Optn6dj2d/8Vk2tztb7CA9fbaM1+cLx3YwzpNp7/crf7H7e16Vp2Mp0B0vM+1cfMnmurDfr/f1G29b2tC3reHIaYYU5Q0GkbYMQA4ZAAgIiQIxJ7cOFsTISER8ljGCYRoGIc+iZiAmAolBR0kATkEA1QRDYyODA1EkmhihLIsPS+BMldU7JykNPRd3zTjBlbSELLssNvsd2tTKEJWZVUIAREx+N3h0LR9KBeKvN00ucuqSaVqu/2mqibJdH1/Z5qOJ0tNKWRe1fJ8Us4rGeJuvbq5u1cBx568p8x8FsD4YbXd18NyOnl2+pSokVhPysI0JVFEaFMrQ8zzipGk79gTmSGymjGPi0xiH5AYHzulKak6xwnJcMSBVnkZ+m4VY5+oBiAi6fsDGLPLFMD5XNMgMkyXn8tQ7nfXwbnV7qoqT/Kcog659yHk5EN+/KSzUBYukMow1NSePf1q/uZVHJpyUk2OFrtNq8myIheTEPK2PxRl3nS7eeAExmGKxIjooVAQAyPi0YovqYvNFgFdXhmAIjN5YzVgAkUU02ggyKzMKmPJhnrHBqxprLh7JOsi0ji92GNT8ViKR+Od9uNedyR+IpnJ2PGKj3TexyZwGnUgHmnYOkpGamofe4cBQIkRDGns93uM+4xVfPbzQD3KT49zFOljoREiOrJkY8UPQGAUAQ8wmxRRU8k8ycIiL28Pzb47yOrwr4+Xv5plt53cbuuIAI4MIfisGwZHMAt+Oclz1SoQg/W9pCEGZz4jSwmIwDBJ8kVAsdJ5AqucQwMmckSoQKBJ1MBE1Ai5YEBIUZxjMTID750qDGCOSHWs90ZNg6iBmSoBaDLrkoYAxo7Qd2aN1ueXz0+fHMf9/uH25pMvf3Hx5ASGh5vX3xJB8B5Rp8eL2XJZ9+thv/XWeXAPqwdyIbDFw3Zz/SEQDO3D7T5u1g/Bu2q2qJuu2R+OF8tmHxV4UfhU71K9gyIzEUKoDw8izaSaStdJsrbrGM0ADpvr05MnHx42MKTtepORR8DucJjP/K8nx/w6rnu4eugocUrifCBmTb1TjRpVGo24mBVp25JET1JmQavs+qfOheRn28UnLznLoO1XV1e3u1tMMp9W274vsrxvDxLx5OmLbH4KH17f/v6HYbe7LBaTyUT7Okr55LNfNWeRHjbTXnxNJVUcPBAAsoy0Z2ZkByAmRgzEY4WRIDDYQLXIu9d8dMHlF5KTIRowEo9mIvunKNg/C5bZCMB+9Lg9Cj0f1aBHs9Cjh85sVIDwEZMLH//quFx/TK3R+DaGZcYk5uNn1RTJrPSBASlzPWgSMMeDGgVWjMSAhEkBxIjYATGigRJ7MIUoNvROUk7mIJKlJIYcOAcsCw+APveTqSsrzArM5oIOEFWUVMbTS+TFHBoRs4IYOAMPrlAl1Rp8ADYySQmBWIDYlUo5mHPKpGpDn4ZhjE6rGpkqMyAHJGYEHZK6GIc+AvmcfMaOCcUwpQFhdLwiIQXkYEQABMbgcvMVgEEUUhEdFWenZEaExBANENREQAVQZLx4KAKqgEdFHGwUDZIQJpGoAAlovLKqPGoNzMHA2Hk0M02PVzwVJCW2E1BkqpMMqtJpUjNEdqPuTSZCI2fZjT9LBvLoHKOx90RI7MEEAWikywGoRlQlREdYZm5WZsvMn1XhZEKzzPKMHSmhmggaOjIkQEYg4FAoBHSZjVxoQmAeRWkFABUEA1GNaSxnEhUFIOcFHERRy9BnLngFIvYuBM48OI/OgfNGgZgQyIBgrBAlUuIGTCMm08K8F8u8QeqZ3fjpERQJ0NHjDgbRjMmcqZKKDB2FjDkYkpiOpDxCMDTPTlRVFB8z0P9MKlId030EYKKCj78jGBiNbiMaf/DjKgORP/qJ1GTMnokmTSKSTBUkqUQZ6cVg+NjJiaRqAEb8qOKpKCWQWrAV2fc4iA1ig6KBiSCCEY3RbFADTTSaao3RiByjA0A0ERMFdGCjTVnJoXZR0oAApoPzBIhGah6SDuRIIyU1RxDbLtWtpgRAiM7MgBi9J0KToSAypW6I3nnzKCDsGDFh30mbCCc6EJmCAIo4QBPAQcg72WEiqxazpqu7nVTLebO9TwbbzeHohKaLo/Vq6x3n5VHbRLtdewy+KGLfsUMElZhUVWJKYi44l3kCXMwXq81DW3feeXQMSGriiAhVoAMMyEEBVRJI4uRiz8PgQo592xfVxCGoymQ2PzmV9XZXFo5xut83KSo5FEzZZFImTX38cL3vUrq8PJ2U1e3Nw6FtjXw+JREiOLq6vneeQMBIm0OjSeezAgBQ1DtUQYgKhoZY5HnbtcOQCucAMA7JMTvnkMDUQhbQgBhEgRDJjAEDUxZcWeRZkYU8QwJGCuxVRDQRIyISsSI6IiJARCAKPpgpIrPj0b3KxKbqnBsjAKppdKQSoKoGH5B4XJ3RP0HzgNk1693rTX35yYsvLl88OdE841p2nz9b3H+42X3YdEOXYhMCtYeucQ/BDjAMZ0+e7Pb9ftczOQdJEnnKrO+GRj+sduaG2clzjAYxffr88yfnX/3//of/95OLGfYyNEObdOiHPLjD9sCipeP+vsmQFsUyDvupn3bNHQV9+em0WdXbXZaFyWF1wxybvsmn6frm+8XklBOy6e7urt82VTY/Pz5tdq2Tiqn/6c03v/rqt+Q8iaCK8/iLT38BVuXlPAn00fJpIB+EYVfvZlWWpKu7g9F0MZvHPr368bsksWm7dXlwnE+yyWx5jM471q6PKiYpKprLgw+5D7Tdr5hDW9vx0ZME3enzk3rVb1Y7V7l4OEym5cXLZ3UtKgNac3Y5394fJGq83xs4ZB0aqA9dPqliShIPs/lxO7RmyDmlrjHT3XqDcdZ0A5xpt1sN/TDEEBM+//zz1XYlGwtl2Kyai+dP8iqbLyb1YTO0qp12PaSF5EUmsRMxTGm7+ZCVOTMOXdxtALQBHbJJmE2yoY8uMCkPZhKTGQ4paWCXY1llh31jDrMqmBRR2qZezxmnWaYWRazIyzR0bdcYhXK+ADBHdFjv0CDnbH1/x2DVfEnB9W3XN8aYFQV19aGqpq+//0PTpelsqijeBdGGnZHDGG2z2+72N1dXulzOZtNlGuTwEEFtPsmbm/27n14/+/pZnrm+388X+YcP31+ET2Ndny3PV30rQ4IUK+eKvNwPg5ldnJ+svn+/38Xzy9O7q9fDbkixefmni/EU9P0AgrOT08N+Xx6dH8R6gfnsAi22dZeKokuioIdVw6CL2UlJZbtu2rbb1fvJ/Mj66DPfxsNJNf/x+29ny88pYlEVok518l/9xX97d3u1/v4DJ2aDodncvPr2z//sX2wh+8WvP7s8mfYPezkMk6ykT+d/9Ye/+/WXz58uzy7K4++++cNsPm33m68//YocHe5309kJBh9yPDo+2zX328Ph+uHN4dCfLj757LMvHr753qM55DTg7cN95ikD1yK7jG7eXz85/2x6kt/9/v2Tz385tJjC5PTkmW0Prk5RcD55Xs7DH//w77v1/RefPl8hoeCz5SyH3PlfYSiWJ9nVm78DiOCG91c/PTt5WgR3fLyQhEn06uZdKr2fXz5dzH74x397dHRZTGYDFGi5bw/9TT03toddu90t80z2q/16mx8tD3taPXQZ+N3D4eZu/eUvP3Nl+Se//u03//GvMdrq9TX12uya7WaolsdtE4cu7q9uzbJhu+4fbnsZLi+Prt+8d8H/+Z/+Iu7vJ667326vh+LrT/+EeJJPXDUp/2//9r93c91vVk+Wp6/fv/308qzMORNdrXtCt9kNP755/2w5X8wmE87LbFIUKE4ebm/IuPS5E3n/9qcCChU7vjxtG2Q6SExJogXabA9VVYrqtJpWRZGGGHeH3X5dLqppsSxy3663WZicn57G2FuEo8myb9q62508mdf7pqn3y+NZEwGBTk6XZNZJ8sFt193seJEGvHtYkbPpvKqm59evNwVNBYa+6wD50+Nn/pBSE3g5J8hd5k7PT77/4d2zp8/7dnDl9Fd//iUq73d1Zvb08mzOMHR8WPelX8xm50n5v/yv/9eH7cb0o6vIIImY6DDEIca+a9RSM/R9GgR0SNK3HREPURCBHYWQJUmEY/KMBJSQiIjZG+o4jjNT8MEzK7AqIJJjVokpiqVeJBGwd16J0IhMQIGQHRKzQ+8NzOXBhZIpB/YKmKSPMfVdrbETSX3fKxiAIKJjl5UT7wKiAqOoxaYDlTwLXexEpSzyABilu7p64x2Z2OFwiF2aTPK+j8tl5X1gx2WRi8p+/bDbPKSuCy7z6EIR2rbr6nbd990gRRlevvzEB2sPdZX5pLDd73MXmHDot2VRmqTYt0hoxIj46ALGUaZgIEZygEDjV4qgkuKjI8aIHAWWlKUksR8QmckzZ6oAhnGIwMQEKQ1qPJlcbO+vxZJKbNsNs4u9OOainOwPawrZyeXzdvXB+XwyW1xtN5Nu/uzlV29++n3bdS6E6TJsr9djc4aIdl3nssqHLIoRAaTBoWNmBUySFBCQQggQO02D9E1UIjPKPLADZESHaKAikmB065giAjN5x2MHcARjZiCnavC49DJEQKRHnhCgmhAoP/obPqZvAAwSICDy6OsHM0AgGLf+4zYXET8uxcYBeMQWGSgaaAJAVBuJo4iM4322AT4m8oHAmEHBwHR05KgqAjCywWOqDQ2Qx6idIKJDjGqO2OeuDFmSKvUdDMMM9EWRW56bKoI65oyDGXqHoFpkQXpBVEVLTiGXpIkcuRwUidAZiIE5JRExAOeJgJMoEwEpAPeGxCTezEwkIVCeAxIik5lBSsDsCYkIlM0wqaDPZLzXRmHA3hS9H1uduqGF4J8+PZ9XeXp4uL9ZnV8+OT85wubw6ptv6vXm5PR0Ml0sTy7AhWa/MRMfW5S42ax00EnBqds/3L7B2BSVv729i0kg9iKJNT1cXR8dLWmIh7o/Or+Art6tN9pKg1uXB1I1MRAdUg+sLvMG3LXdpJgdtjdIMCmq/eGWFDmwR065b7ukCl9cnK7q6G0nZvtts5vtyvkTAuz6lPmynF2YRa/inXXtZnE8iX3X7VNM0Bvc//6nsw6++vyZn8L8dHn/6v3mfufdUIT8/Y/vPn9xMQth9/6V3r5Pd+tuNcTG79/XFPXoiVcIzntxWZiGIIesAIYw7g/RiJnRRiqLjF4dGGVLMhxX/gMSIHat3l7hk880zwUSO7bRooL/BKv+KBR9tMAh/bNU2j8xiR4TZvixd8ngo+QEHwdRABu1IXw8U4iAH41H9HhGfhZM+6YDM/bee3IMiNajAnkGGhDI5wmMFBHIAyDgaPYYhSw1sJioT140Q80CAjijzCB3jOpzDsGQ0QVjRvKGrIAMo8cQFciIwHtyPELnDckoQJioGbgILgMQ7XuUlEyRGMiJmLFo21oayJRSNAMBEuBeFBCYfeDA7Ih6GWQAEnaQMWWlzwoikr5RcpRlogqE7MNj1xcCmJHLFQJgQIho8fGUOQbH5IgKD+hBjJFMLA6d16RR46AGQM4hOSBMsUcjAwIPgMpZTgoYU+obINUkMUUiHmOEjKxoI/VGFJQdojGlijw4fC5Fb0MHg/SgyCzgiVJMhgb6WBPAROw8h+Cc88TkmBwrjkMkmoKlTnQwIDVVSI5dlfGiCGeTclnwNMcsmHPALI5ZRQBk9M+g9+BIyYBIUIA8EANAAkL0ScVMBo2MjsipaByi4vjqJqJJwQMwsFdylFV5yCnPNXh05LKc2SUjM8dMCDTKS/LoolMV6800aoLkmLJkWWCPRohADIzI41IfhdmIQARERZIoIGeiQiPZmhgZkUgl2eOGwByTKj66gv451nqsk0MaDZr6aF59tPYpGuhHsdVUDZKqApCaxigqKjFJSiqWRDWJJjABFTXT8eZg/A8rmGNPjCqRxDgpRbB2wCZhF6UbLD0acNEIETWpQiJHyAjoDRETQ0RkJWQFA0VIYKoYQFHNVJVMFDSCJWI2o5SQPTsm02SGoETmSRKMpLI4WBJCMFBg0qKAIrPYB/TYYdcpQY6MkYS9sTTaCQ8ccK6JJEEg3w2NAxOLRL6tYzFfggmIQfIYsb+7r6Z+ejbfxkZiGzf3fnlWnJ+2D+vFyfnVq7dxtc6LiSuzxDjUzWS5rFtBSSCAypqIkGLskz7MF5PjxezuYTsuo0aUtSI6coZK3iMxoIJa30XE0NZWTrxGS22M0QQhAVZHc/CqsE+azCOGAp3NCheHmM/yprbDWlc/3cd++vTFyexoIt/e3L+5DoPkjo+eTOpu0rSHoY9ZmZN3Td2eLHPneYgWsryrUxQdulhRLgJFNpGUMqaWwREiOrWEaOy9DyH1Q+5cF9NIT2cXiklVTibVtMgyz94/3t08Uh6CI3xsamdHhGjqvAcmJkREQiImNQthdEUiMasJAAQfTJVG6RPJOa+mYuadB7PxrgwAPvvFV/223V5v2fz1+9fPnn/ePDzodr+SbHtfS2x+8evP7zabm80dukDmThazGK1ivtlvo4PLJy+3Vz+WpTtelncfHpwv8tPTH66+nVTlcDhs9uuLJ5/97bd/Mzuu6rYv+lJ5fVO3J3R0cnzZt22mBedhNinu7m9zTwUXYRK267XsNaPQrePZ8Vfn08VPV99ZQS+fnxS57d8niXJz/SqQny4WhDY065iMgILnftCnL1427Q5BZSsppc8+f97um8snTyfZpGnl6HgqEjVGAZkUMyQqQhFj7z2n1J/ML5qDnx6d+ZAP0Z9cnFHfDTFtVg+oYkTocyZ0zjMYOQyuWK27+XQiw3ayKHd1JOOiCpfPnpWLU49Jur7C7MdXV8WUZSCJUWOdVC1hOV1wcE1dd4eOvQNUdJh5rHd7nwVQ2u0f8qwss7zZNJu6Y+8cAwU4Or3stnF1c5tS2/fSNtv15nD7cP/V5098FqhFjVZOs/bQHertrFx27eCYLcbDoTWMIczYMSIj2pAG6iVJjAN0++Q8O+/RBTRIQzwc6kJz5xw7X+8PeVEUE8+NSq/dfueYy8VChhiH3jvfpSH1jfYZGblAk+N5u2tPn5xdv3m726wWixkxJYA0pK7pnSfOKkV3fvnZq5/egNLqcHe8vHDOd8mGesiq4vxpdf3+6vb2avewkfjm7GxZzRfLs6O2rZdHs+39/vW3b8uCdnYzO57f/3iX+dIAbvU+NpQ12WbTLGdHRMMwtFEkDatnTxb3N28Wsy+GdvHqu5/OzuZvf/puPAVZxu320De7xFqnGA9b66OkYftwe3z5SX9o7u7ezZYVOd4dmtOnPnWH7d2dUbq4POqTfHj1h6NJ1fbtbZEl7zf7N8Gd58Xpmx9uptXl7ds1YPj86Vdb517/9IMg/Tf/1X93OYFXhw01XXdvbdP6ZX7xyck31+2/+uW/Oc5tu7rPquLsyQsvwenQ7W1WTKaOyuMwPZqvVzf9ehfbNMmnVPjL44qIDz/cnEyPur4+vvzk3U9vZsty+Xz+4dXbr3/19f2HV/fX9bDPbvZDkV0Ui5N//P5/PJ1fZAsI7jDJZq46uV6tNq9/mLpimpX11Yfh7p4Zzt1iu0nzJ/Mc/R//7i9jhfP5fDIp+0ZiY7ebeq6kwIT97HgZJuWH3W5/sBfnX0Xwi8XJ3fX72WRxdnL65uZDyfPlcnn75oenX3/VxG3B+O7D6ujl86vNTX56PFkucXJkSHK4/9v/8D9MtMiN69ttUWXPvjzfdWl2XK5uN92qM4Tjl1+Y3fXQWyAxI4gwfXL1cLd+d1t3+4tn89xlmJr9oZXyot8enp+fB8k2t5vjfPnZV8+/ffX9fHKaU4rDvijP0q7+fHpUOdzevr/8fHn14Ucqq3J2dLhal8E/7D+E/PLLz3/53e++95vb7Elu6V5MUt8UWR478QmeLC7r/mCH9up+v1zOPZe+zC32h363W21ePH1xeFhX8yJweX23CgvNS795d1dN877TvjZ/Mr1f/3Qyd6HKmqbpk4Hpk5fPU+ybtlG05y8v1ncPKs2XX3x6/X716acv3t3cHNb1tJymesiPLgaQX/76L96+e+vmR59+XW7Wh/lJnk0Xs+OT3e3t6eXx5uD6pp0X8xoyD/7JxYtv3rw5uZhNsql2bV6W4ynoukhuhDOmrm1jil3XxNQfDjtVS9EkJWXz3tPjZI+B/eOOG8kjOXKAaqrkmJkcEzE7ZGZyvnBEYhoTaYLeJOq4MPSGzOw8MWnsU2uiUcRCKIrSz+bF/AjQCDB1/dAeYnuQoQssHQwqCYkcouPgHDvvXRaczyT2XTf0w2BEXerFwOdVGZwn3qw3m+Zwfnpad13TdZOsiiLoaHY8LWfToT4sZ5PmUO8eNt2hdeQmk0qdj0Pad8O+7Zo25rmbLavlvJpMab+/P17OKNlmuzGBQUUsZj4X0yF23hMiSQQmDyE4V9qjtZfAxh4VYOeAvYEhOo8AoMPQKrBzIZQzIJahHYbB0NgFUGF2pKKxNVNUazbvz7/6s2J6XG/fWd+D4mCUlRMlh4qYbNjeLZ+9YD6SQTkvJ8PQHfZZ5omzZvWQce7yvN60niwEbYfUJ0sQyqwCS0QOERVGTHkgSwiMyGYuhLLr9motkNcRl5RlZoYqYGYmBKAjyt0EcSQcMHJC51GSSTQwYjJ9ZBKN9E+Dj6DdkRZhHz1F8LHM7NEd8Qg3GqugDIAMDQFGUOP4ruFjMQujwePuVsEQyHTsTf45ymPIZKr/NBfoiNkiA0EgRBz7REaGrgNSA9TRXWUK5l3wgEl10MGxM+8sn2jsNSXQpMmyjAWMyYFYcMFQzYENShSSxZQGQHTBO8zHJJyjcT5VInDE43cSDFAhBAYwSWKmRRWSJhMhxKIkBIJkqoBMSSM7QCAHYKoMmkQYCRkMzGFCAINEYK1oQmaE4Cgr/eL0TERvr+8wlM9ePHXc3X1439b1bLE4e3KeT6cCBmnIsoJAV/WDJ+i6/XR2Bkz7+6tu/YAm+80h9Y0JokJWhPawC6gTZx8ePkxmpznEerVa3V/PqnNniDaoJodkfkIcvO+xH3KeHOJWu37YtVu/Ovnks2J6tFofUr0ddlvz2eJ4cdg1XT3kIp/M8vu6q9u4vt8+fXbJIc+cJ0iOHZE6mO9ud3lpVRUI8649ZFxSwHTo737/Q3i4ffri8uTF5dNffyW9Xn377fW7t4vZ5PZqe3K0QBfXN++sZ+39rJr1264j3dFdfjwZ+oF5UuRFUXmIOzeWgpkzRTSPkEAEAIicipmZWCLn0RIRqwhEonZIb1+7yy85PIMCzYTNxjgP/hOP+p/ePnZvI3wMu/ws8f8z5Qg+hjbx44cM/+lBRofcz48GYI8ZOfjPH3LoAYH6bnAVZzlkDtlxHEUmRDAWQ0Bnit7AjS4SUDUZY8smVoYsV8hNEEQwUVFaKFxRCQd0XpIhookMIn5UhxVAFIwAMiAy8oZkRqAgaAAOuLCMTBOHiRpYGGxoBqsJvBnLEDUJJBXnHBqKIBMgiYmxN5e7UHqfgcWU4pA0KlEoXMhcMSPnwQSJAJLFnpm9z4Acck5qjIIAIEqEJonAgBxYQnbkERDVAjiX2HPImb0mwaGzNOgg3CZENELjAMiUBCyhKiCYKqlpFO56hAfpNzGlpCaaHKNnNyp8jkgFDUfwEA8SPftpQU/VqTpBDBTraIOCMSmHFCWKmloSDYzMLjjPzjETOc+OxcABEbGZKigmsdQ74iLjWeZOZvnpZHpelYtARc7TeZYVSDDo0LmkYJE9ECN7DwBo0dQc52IJzCm5lCCpGTKyM1QgHlJMQxLRlBQZwSETGZrLgvmcq4KLuS+KYlJBno8/MgAC9DEhPdpy0JAMQJIgkqqI2iBJzShpAk5AmTnPjgiIA6MiAZB73D9YL2lQEwWyFJk5iaDB2BNvKqNQA2AEBAhEROj+M6lIkzhm/AgtUtURAIWMIy9eQcfKWFAwM0JSMbOURIdBTCxFSVFMIfZRUgJAExlfxyQlRDRAYE+j3isMApjMOk1t0kOyttc2miSN4/FWMGBkMEA0NAU0NQFAJsKMMefEkHRwiM4hI6NDJIfkkohpFCEiL0lGlPGI1iaHDp0pxC4aiphI05gaErF3aobBp4AKg0NwVDVdL8m50nuvyWoHpn1PEkwQiYkQGUWV2KFpkkQpoYZ237jgDbXreuJgIM16T5CXi/luvRqafblc+Fm1fdigQDabQDfErkfCLCtqloiAuQ37lsicc0ROzILLAKFrduRDOS+1E1KSZJqE2BEFJDRkI3UAZMY+6wbZ7/tsikDILM4HFYvJDIxCdnbhYrobWtf2yUSqZelL3F3tDbCqymGjb97Vq1168mT56998Rda/fX1T99FPZHYatq8TIKopMfddX+9qICMwEEQiARhEMlE0DS7rTdt+ADCfO0sSwIWci6LwWQ5qqlofuiRWFmUxrY5OlyHLQvAOkRwjk4iaKYkQsGceUaM+yxCRiR7B1ybsOYRMREUNEZiIgIhQRscSgEgyNec8sXPkxJIbMWMfHYIA8OH99utf/eqw/ce/+f53VVjmTfz1L142m+1Pr1eT8xfdak/hl/XwTT88QJEvLp9u378tQrG53RZ+0nVN17giO0tJHmprZLhavzsPXyyml7c/rk+r7OXpSTUt7q9+/PKzl2WYxLabLAq3C1VxtFt3fTucLk6GiP0Bcpp8uHk4PZneb7bGNptNwJVd3c6n4cP37zW66/V6eXYCJqzdrn4o50+a7TaiPX163uHmdDH7+stf/81/+CvjrCffgzs5O0+hgz68fXXz/Owzptn+0E+XUw3QrJr58WK1erNYLPq+G9REMShI1/V+EzgcDpvg88NhuPvwU+44pv7k/GwYZOjVULOscOwQmZBi6mfTmdhhs3o4Pj16/frDUX40D/zs6VNkbJr94mQB3UTQKdjQt+/frfumzauy7zoO7snp09X1vS84poaIJuVSEX0Ijsk5H7IACHfXD2Shl7ZucIhUTMtO+naoj8/nXTcopt1um7S+ut7mIX36ycV0Pts9rNEBZtTGdqYzx855RPODpK6v65uums/JDFWTxnq383lJkBEjGAzdgDEVwTvPIeSioslcoLwsNfaDGHvKSy+iUXW72ZX5lLAjZ9U0H4a4321DXlHwCIZESXC2PLr98P7H73968snTkAVllSRI6rxbbXazSdGklro4nU7FoiNXVcsOu64dCGVxcjE7P3r/4yuIcre6++a7V1/+6qvl0TIrs/n58mF3vznUfX3Y7NqimKy3D13bF/v9YnK028rt3X3d09mTBYRMum6z6qIOHvB+fW9MLqPrqw/HF6fjKUhxYO/7toXCT5eLdtMFriQdBoxgsdn1EDzhhIiePJtMj5bX728e9pssc2efXPTX9xoPIjg9X66aTVlVhcf15oNrh9l8QdikIiyPTn568+7t628++eRCoj+uPl1f/zSfn+dHOSNQ2dByUlXleX5/t7saYm6+mj49Lk7Thx+vf/Vv/tWbNz+yy/JJdb/b7Q+HajrtERdVvjyZbR9iOT3aPNxUx0U3tBJc7yyb5Nv9fbW48KV8/4//c6D84uL87ubm6PRFbrxvH55/8XR7fXO/XmRnl3erh7Td9ga/+bP/zR+++5Zn82F1FfuZYfX6qtps72efzc5O2K7WpycvDpuGstnhEDm488+++PYPf40Fn10sL46e37356defPd3e7SgrgNxG1M+Pd03nJjKbzy3i7fZhcf68G+zV2w8TJ3t1FVVPP/s0xW1eIXAybQ0UjLLZ7Onlk7rr9ruHuu5vbz90cYrq1KmpHLobgP10OQnTLDbNL//k6dsVf3hYbYcuZe4Xl5+4B+iHdPzk+Z26JzM/uXcPm6svP3/R77u3dTwcml3z4cX5ca3y+sc/PP/8M6SExLOT04igBE3b3Lyvfd+fnb/k6bM+6vXd6qHrBKu3140ry3q97vo4nR+R+OeXn15dvSky3/VdD5iZAEBEu7w49bNc4Dipm5TTH3/6YbZYDNjVytHC13/6KzlY5qTN491+XS1md7ttLu6Lr55998eraTnpDvt//Omb8+OzkJWa3NWHh0/np4fYVSfFYWiN8PTpeY5KF9Nqcfy73/2tqKfFk1o8grFPp2dnr97c5ewnRxPVVNc2yY/ur+9CPi2y7Hp1PTmqxPPdwya1ZvzoKgrBCSgjAYKLPokOMdaHBo1UJCWNSVENgYiCQDJ2aIRgCgaWHCEhGahzHpHAzCFlLhhSCC74jEazQ6vgnfNO0tD10kVB5ACckbM0KJGYAnJVVUU1w6wwNdUkkoZDrbH2qIAqBA5R0BO7kDkiR6Sm0td1z4OmpKZZCF0UBZfneZkXbdcc6vZQt4PBrm73m9V0urht1rOjo9nxaeF5f7+rcuwPq/36EFMix2YhDa3LKJRh2O4QoKqyi6M5E1ofh/V6Pq+athm6oWvbLPghDgBEeW6AxNR0rYBMijk5z5SDoUQjZiLGcd3AjITJTNPgEMF0HCtE0tADEZHLDVGTpBSJg/c4DL1JD0jeeR8YdN80V9V02h5CG2vF1sh1NWI2yYtQVFNLPUpEM9UYCKehur9f9Y7YwFP2YbVxk4ICg0nXR5dlyAGLSpktRQZg51UlpeScI2RNQt6NYyUjSupSz0yIpqbCbqw/MkICxDE8hsBm0VQfPQxI7IIBqCgjCY4dZP/M9I9k+jOAVz9OvCMuHQBo3LGNniL8iHJR+IieRrQx2maPOtNo5Hp0JamaPhbTPN7XqxICjoaInyduHe0XxIxiSkBIqAqg5JgMFHVUt5RxVF+UyRGaI4oACiBoDLlh0rGz2ADMENnnZApqgAQ2kiURvQ8IwM4LgHPOjV+3jVwaNAUmI3YihggKRgSMjsgiojNyBkweCQCQMxLBJDFYxkxInsFABhx6JhMAABGJzmdgmsQMAMjamIwpFOV0Oe+2m9t6n8+WL37xy9K7h9ub3W59djKfzJeOQuxSXngixBT3qw8UY9fWDGxDBwrNauXNFGR996AAs0nV61BUIbadR333/l0+PQohDM1+u7lFoaoo99v18WK5fajzrCynM2Wf2h2oSH8gEe8wz8rmsN893PD0fP7kaVrl0buhH1RksZx2dMhIOLg8h++umu3DavfuzfHz51lWoati22hshej46bPD7k7VHBkzMAsMlrMh07Da/nC92ny4O3nxxfLys+WZ9vf7oW6mkzyJTbPMispPq74FY5vN55IG2fdKD8ujxa7tbOhT02dIABaH5NgjoMqAYISCYwsksn70FTE51ce2I+0iu1be/8DTEvITAUMYvUjjcxs/uv/x0Uk0Pkd/xhP93JL2zyrM/pmyNKbQ8J8EoEfm9Uel6DGvho80+Edz0OOjR3ASo6m5VgQkFAQejYQ8E6tgSgZMzMSWDD6KsUkTcm7AlDuKbY6BUp9ElQOEjIoFhRyQDZCdgUoyJXKq42dVIjJgNR3r0gVw/FY4x4aowOSDmQEZAOowADeMpUqSqEmHOPQWI0WHAFlwPBpgvPM+N587nyNhEo2IGgrMypDnXEww5CAqsU0GGnt1nQMDQiRGVETUGAkpgRIJkBixIRkIeUfOGzIZaBaomkuWmfOWRPo2dU3cNqKCpkbAeUkUGIlMse8QNEXRpGLRHBh7QZbRJWCuG0w9jRczJFNTBGBTBWDORuF8PkHywRVutm4/7PpDbwlcMu5xYFVSS4I8ZotGRRBNdayoYgNAMjJEIg4hsDqCWeknhb88XZ5Wk5O8LB2Wk7I8nrhJYI5Sr2lodWhBE6MBO3LZmFzRMSiVohAI+KQoqCLCiAxIzotxhBi1NwBNEjwDsqFT9I4zyjIsizAt0Xtmb0QGnASVUKPI4/OdCIwyhwZmfhCLGIdRCxCLZlEteHOMTpVBnXeANBaTqWFUUCBjTKCiwkJkwMwmAsSPqwFmRGQENfr5MD1KRTIIkJEzdKiimhLiWGXKNvIVjR5pWAagJqpmKjGJWRpSGlIckopJSqamIj+bBVWFiQkZ1VgZTSEKisAgFtXalNoIUWwQSyJJANjMTBQBAJjMiAxleKSjAQEg5WgFcEBGh2YMiEBgwiZAimRgIEbsQA1TSqZmBAJjKYchICQViWBJuqhmDKyC5tDlObBj7XPBoW7Q2BeFOIvaeFAclAVRUUmVRZGJHUka9V8mN0IIpFdBM68iMMmyvqvjths45FXGmrU3u+lkx+dleTTvboYsK4Yo9e5ggKzivX9MCBZek8ZohChJMTguyrKkbuh1sAgtOXIhk57Q2AwF0DOpDT4UY4OqAfaJ2hZcjtZ082XGYB7coWtT1JDjcp470Ou72Ctt6lTOiunRbHu316g0GCBvbvqHm7fnL5rPf/XZ4vT47//hu/vrO3VVnvEghYGCQlP33s0UkRnqphUhYxaJXSQiEBQ1GaKpQlZkKUqRTctJzuyKsnSEoqmY9LFLs8V8cXpUzSpywXlnMYGpSVLTlJIiMeDY3sI+uCwnJkdERM4DggGhY0J0qmqAKmksoGX0IorEyISA3nlRJQJCDwCqoGA/N6B99cm5dPvLzy8v3Hmztv/pL39/Vv3r9dW6PDudP3v+5Ev78btvL56fM/ctyr6WYnYKvTJjkVecmanum6Gsjg5968v5Ly+f3T7sp+7IajEqBvQPu/sX50/36ybOeb3ePb84rQKi+pB4AtXn5xcxDYchbla9X5yKaCspdlAWRV49/9Vf/PLYTf/4j/++KvMjnrcplsyJwPvZoviM2jerbfNOrnxMm/VGDisivLm+Pj4+Q6muXl0VmlU0/Rd//r/Mi5NQLsCG+6tro7jZ3J+ez/74+m5+fCxJ0tAWZVGVZera7WF92O7bGBm85zJw8Mv5ZFaa9BpTs68dHed5yexhUA6+a2rvQzJxkHt2X/3qX35ydlrOqg6Gq5++R4jZ9GkkLMqCYGfS90PdD50vQvBZrDU20La7xXypLEMXQQbTNDIYVDWZpaHZNatZuQglTxbFP3zz5rP5p63sJfNlsYzNfX/YpL7u2t3yrGjqbXOY+eANOPUGSWM8DDvvqmnfxeCoqEK9P0SNzrmyqFTbal7aIe13OzAqqzIrCkSWQQ5t5z36wF3TeF8U+UyGRhXEVCwCalaWzaFWic1+M1tUXbN3GRdZ1rQ9WvLoCLFHHJJm5WR5fHF7/aFrYjXLYj8gSVY4IO1bbLv+/PJ5vbrpm2EYDsGHrLChb7PMd4OuNs3kaBqWT0tHLjaG981hINzNjvjk08u3f/lGY1NU2W63mVbT3XZdhtzivu4iTpZZQev1HRfkch6aJHvRJHlZXG8fXjy7wIwpMfnHpoOhjccnS+2UOWv31u7YJEyX5/lFZdI6lgLyi+X5dndo9/Chv97s1svpBLytbjdx6M+Pj7p+3e1iSrYfhjafWnJDt7Ec4349Xb7Q4+yv/uovB+g/PV8+n75sazh99hLA+yl/98dvg6P59CyoXJ4ucu2q6clf/qe/mR/lKWLusruHu2h1B+6Pr95cnJ+/+nBrZIHDpo0vKaVDt93fHZ9OU9odDs0+trU2u9V9UeWvX3+wPrrgjB3NT6shmy++Du7w5s3fkx8O27sHmlTPju/2+6Urvfr7Dy30ZWYnk5MLj68rC8jDp789f3P71zc3+z71KnT69PL19Yfz5y+GXXs8O7qp8qQ63Gy6Inkga7tFWQp6j/n93VVwwfvsYXVH6B1n2cXZJ7/6zTf/8O9e/Mknq/fvFov8YX+jm8Nnp/P9amWSbu5vy9lJX8sD7Ipq+uHDu08/+fT67QcHC9KZyKGaZfPl/Orm3qhLXZMIYj3cu/16NdT7IXoTgtfv378MTw4HDD3d3Py4QxjWa8phaA82pLOjpVc9n009SjGr/tVXX7+7rZm65YuXmPrN6jbk5cPDppgtY053aV8Uk6bdBYZsmgnEN7c3y5PjIemu6eZDJyldr25C4XLnt+0+m1b5LLTr7vCwfdvUuQ+Lk6O7+82q27384ou6786fn+1v7tpDe1QdN30XyoyJlejobI4frrt9e3dzF7v65rqrqlBg5kLetk0c2i9++WU79IBSFqVDCJM0P5rHttnuDvvVMJ1XZVWGODRth71i5PvrzWJSdXXTNe10Uk6nla/YLQJqyqrZ0A0YfJ7R4CxflLPKPwqmqmaGzD5zQSyppWioDJrAoOs7MxQTEWUaxBKhI3YqCdwYYOKoQgzJUiA0VEASSEQMLlN2SIDoq0noU1RDB+hTT+zJOCCADH3qVMxlma9m2XTu8kIVtD209bZv9pYGBvGeK8foKy8QWJMaM6ilNEQEBGQzUhkATCKIpMA+I8TU923dtml0rzRNYwJd3xtSkWegend9n7uSs2mz22SE2bRo2+HhZpc7RzGumgMazMtpKEMeuG/aPHhm7ofYNDsRM6N+iKBSlaVaEuM+AghwVvkwc86l2KakYGDMCFzk2UjrZJ8jBGWimERSsoTggoOua8wcomMXfFaBWUzq84oLQOnTUKupMQKk1bsfwBeWlF0g0hSHerf3J74hCMHd3b47++RLBgO1oW36prXUGIddvblfP3RNXbooaRgHCgLzWciLAiUh5UCO2I2EIAQlUxs6sIGZwYKTYKKmSdLgRtinRmKiURECI1DVRKZqgiBE7MhF1FH7IUA1GQdTRjLTjzXhaDi6+Y2QHqdboFHeQZARnopIpvbxgz/DVwwAxrbuxzIpAEQUlZFuxDxWOCGYqY79PAigI670caA2ZWJAkBiRmMahAEk1ITkFZQRDAARGQKQhCRIxsYmMu+wkIipowN6DBwEwQzIwiahmaXBgJoLsiImcVwNHBBwUAU15zBl6h4hqo1QgRMSMJiIG9Lh8xo98G4/gkwjhuJOH4NlUTIQgiQEQuMyL4aAqYgQORo3KUEUDYwBu0FrB+5v6Yfgwe/HkybOnk0zurr5rhzSblMenM3PEDkKRO+dd5vcPN/vVlTMbuqY6Pjs6yq5/+Eb7nQxdWx+aXefyUsVSijElQLhdr4+WJ2VWeEjr+3Xf7rNQrlb35Wyh9dA9rLIjpvmRFbNgONQS6w1KApZqlq1v16tXP2Sz/fTJp8W8zLLQHZp6d39Yr7Tv+7ZDyUVtUmUyuN39zdHJnIvS5RWUQVKmplmWZWWmddvvtlUu5ZOZd267adq6K5BS5GH90JgEsfnxk+lv/+Kbv/qfdw/rbr9Ps2o+LV0ZJgu3PtSCACpeKWy2Wv8080fQmxuSdx4YgyOEZGKIDAbAjORG6A0iqyGIGhiQjgYWNHAa4t21P30is2kKHtDGQR5GXfQxewk/s4dsZA/BSKaGn2nWMJ6Ff2pA+4jnekSbjpG0R74XPHrv9JEgPwpM4/5YHlfIxixRyXMUSS0m9UYRMgioDpQ4EXlmE1RDSKqjNOXYjVDgLHBRBayh79pkhr7gMEFfmM/QANQI1dAISUQVTSSNcA19bCAnAxA1eLywjFVuJujIhxHAxJQUM+cq1YgxWd8Nu43ENMSBgOCxMcsjIWcTLipGHlcIwEXIplyEUE0xKwxZ+w56n7rejAiY2WVZCMGTCSRRScaO2CkgojPyxkTeY2oxCTinxpgFKwvIS2GWpOJ4IGyboW97keTI5ajeowJSMgXVoUlDjMMIvxZwLCHToY9dDaCmGKMhBecIFUwf29gRkNmR80BWBDd1PO3i2SEe3R3uD6mOGsE1TTx08dANUQjZZUXGjhWNUADNowKgJxeYnXOUZ6qcu6wq3WIxXSyWx1V17KgwLD2ExSw/XbrFFNlcOqVup81WNKH0MWFUAlVQ0TSImDgnnEdzUSGZioJjJiKfF46DKViMcRjIFF0gdsiMnCvnUckJxCgeCZDBaIz0GsEAqAqqiqjMPGaZCSkQElKHbMJjzhAMRJL33jtgU7FEosxIBiIqwqKoikrI+FihICLkCOSRE2Q6DsU2sqX+M6koDtHYs8EotoOMKGlAVBEBI6XxBUUBSKKAgppK0jgMQ59STLFPpmqqIjJuPNzjBgNM1YDQzNJAAhCVBK0T7UXbaEMCER0iISqgoSEwoBqijTAjBQUzFCQmJGQGERsSARglZAYcleaIhAaP5HNyLCjowedudNdKfHw1VVEmBAVVpeAkClIAQ/AOgiNT1CQRU4dCWZ7nqmsYGo+cBpSe2ZvLGFhISIdkfc+Wm6GpMSAzgSp6Y2dyiIMye266obCQBqmmRbNu2107OUo+D0OWofUlTdKQDJIOYmCMLrXR+8wyjTEmTcQcxaTvzGXeFyEHCK5r90NMzgVCl0wYmVGDy8SSggVGM+vT0Mcsj0AEfd9nTAjoDPsICc37LMtjWYj2tm8HygORxzz3KfUfVgKqhqLy4x8+bB42n3168S9+9cWP3717e3+PqsF577OmV0nxcBjkMRZqCkkGMZEQHDknoykoJiACJaZQltPFcjabT6rZ0pGZSlu3qZfpdDI5noc865KqKtCgMfUpjtk6dsSOvWPvfVaWyM45h6pg5gIRAxExcUzJY0CmlNLYhJhSCt6ZASg6ckzOAyCYijA7IWP6uQ0Q3r56e3Z6stoczk6OFkz/x//dfzN3GZ1xz83rb//6dFKEVGNXTkKe+n3b1FkxGXR3efJ0u9oEJMe4ODoCX00FJR764SGn5uXpJbfF6fJiX+9//OHOZhOiHpKxcmqtPyQ/qy5Pnv365Fff/u7fAcFsObU0TMpqOpls6sP88uL9u/X9m7enxyepiifnk+lJtv7jyufzpmtCnl08ubzdvEIYjiY+LzIqy8O+W6+HZMOzT58sy/nt+8FD8eTii8+efDlbnoSi3LTt+v7m/uHVfDE9Oq7ubt9XWUVKRTkl8Il4ta33m01MNi1DVXrvM4tWr9dyPyQ8cnWXB5/5zLN3RCAJENlZOS/7bu/AjDXPJqfFSbWoXr3+5m//5j9069W//i//W0o2ncx2m1Xm2/qwHyz5Miuyotvt8+mJ966qJv3Q+Jwzj6DgfJnizpCGoZf+ELueyNDHvm+2azubFF5a6F2ZF/cP1ybWNu0wxOZQ517bBkGQzGV5mSRxKYdtvVmtT6pZXpXdYR+Cz3wRm3a/2cYuTqa+3u8ChZx93dT71DdNzcRVWYbcDalHV2ZlUe8bWaVJVRqxmaLREMVidEwi2vXtodYy+L6u86mrsiwOMZqiy4rcOwdd02VZCMB3H+6y7JkkE5EUDRDIWVnmbde4Im/bQ+y6vq4/vLvKct/U2+OTJ1k1bQ5dGuKb76/OFv74uJrOy/rQ/fTtq8nRxXJ5cn/z0+6w3e+bvu2Wi/KweQh91nZUTLnKi+u390OzPTs/7bft8mi+72LTxKyk3e7gMMMs9sMwnoI8L4ZoYAIm7WrzdPmvXj65+Lt/+L+Xi9DpYTmfF9VZtxnu725/9ctf/PD2x4uTkz72WVYs5sfbh8ZPuu9//NvJJDRyuHt4Xy0um37Iw4T2Dz5Vdv0e0+rDd395dPaZ9Xiw7ayYWUpIOBy6k6NlFvKcy1ev/7GYTRaLp4eue/78SRWKcj657eL2/Y1h8kd4erRotltPAHmOkOcMb+6uJ8zru3v1z9er+23bfvbZ55vdTT6BWrqut7/47b/59o//gOYP6wOmeLP6T//+7/+/Xzy9nGXVJ5/+alnMrra3v/z65cP378BFSf1ymd/e/nD97vUvXz5x4O+v7/ohv7+///WLr7WYvv3hw9HpljUl11q3vfr97fOj51E583TYXGNV3R3AW7pdvZ1V54S+7tPUVc5pVeXSSru++v/8299nYE6Pp0WWdn0Z8p3VAzJXod/uT07Pn3/+6f37a+kg1tv5pNjXD7P5tGJYHC1vr3f79uACJuyLKgfzgWfljN69f1sr1u3w9PJos13lLssnGXZ4e/UT7rcQ8nJ2bJlNFuXv3/3dn3xy6etVtP76+uHZ88+OqvIetotltb17D7HbrG+K/Eg6A3/oh+Fh2DWbblaWi6M5tfWLLz5ZNQf2Zlk8v1gS4+r+fjqbJfSq3cPDh7kcUwLrbD4rCaRerxz7aVkSmHZ1t9751OAgk3za7pv7+4dnL54uJ8Xdbb16d41DP8s8G1dlpSDlfHaGLivzaTGb5Lpu+r5vJcrhfrucL0zk9TdvqrI4uVi2w4AR2aTebWfLY1eyWwQzW9dtP/Tnp+cuo3dvf0xU5T5nywx4Npm/fvtGytxRmM6nt1ev4XHcUO8dAPb90LftMPQGFmOKMbVp0JELgwxggATKSdVip2oYkzoXUTPvQvDo0iA9IBIPlvosL6Q3Rs7KZQhzR3nT7xu6G3ogMTVCQXYjvhLHyphQTMH7eujr9V72G+n2hJbnzjMX3otinyIyTRzGKCIxqpiKmRPQQQYgQ6Q4dOC4yEozuF8/tF3nQ8GOh0Pdm5KoIU4mMw9yd/s+AE+nSzNdHh3tV+uubWPdVZ4ZYej63JDYF56Ch6FrmEBM67anHvshppiYXEbomPZ1PQNM6FPsJ+WSsllEL0MrqSUHzvuxr6SPSSFiHB14SGYmcejaaC1zIADv3dANah0hmUIWckTp+obLxWx+Mc1cU9+1zapt90LAKRGjgSY1II4i+6Y+my9BBAnvrt9njlKS+/uH7X4b1aDr7la7vo2WtF7vHFNRlkxekhIGBUMFNgihMKSR1Gs6JBOJHZMBOKbA5JIlFIPUgSmYEDMiI8HYgAeAaphETXX0MtAYewY1QEUwNQUjIDX9udR53H+PrptHK4QZ/pyxQbZHIC+q/VwGDv808RqMjhwEI8BxAmbCnyksBB/b1hBHdBQTqfzzMikTVSIjxyO3ltmbJe+9GKiO3F1DNCJQ0+B8eiRnP7JfnHOGrKqqCZHQDJhoHDFSAucRlNClMTmAZI8DDBgQMSMkZByloDFuZI9LaEQXRkQSsaABAzzG9wCdzx/31KpgikjGACKmOobOAJGIETywM9UkA6HPWCUmj5AjSsDbdIBZdfb0wqM021Vsmnk1ny1PCp93MpTBqSWLqe+2h9vrggxSctXseDq9e/em2W9Be+fssNuGkA3qX1+tqsL16503Ozk6mk6rereOLddNM5/mGi0Chlll0Enq01B7BPC+MaYwSV3DJXd1kxLOT87r25tudQ9DzKsKfOjbxAYpKgyS6ri+a9fJoqffvLig3QoPXQv3QTmfnSWAPg1EgOVk39RD6og95oFDeXL6whFbvQYgGFqrW6vvDLrgqn/5p39y9ep7Tal0mUU37CJaRzHWm4ZUw4QgIAqRgUMm7wwRQR+FS0IgenTq6KMuaZZAYcyPgRF6BjRVSbG3/SFdXeHZBWQOGMaabfzo84HH2MvIuTYgekQYPf76SLEecyk/Jzg/OurGfJn9/AR6dOrRIwBMDYAe+9I+Er3GMyAKShTNHHsRGdoEYNSTRqVMfKWYGSAYkyKYAKKRjSqxEYADRKJEroOAbMBjSZnXNDJWQETNQJGNSUQFQdQSmKohITkCNSJCZjUjG8OY5JhGCRgNDB353MixI1SxvgtA4jfdoQZAASBiV5Q+LzmvOARCgBQ1BQJzLrgi57wUZCMEwBQjsifnsaoCFs47JEUUGTqD8dx4dH50IkIIBmAghlGJ0ec2qbQIyVFyrjMxnyWwWPXCIElcyNRn6kJsezITxiaKiBCzL7K8qFiOrN8P+zveuX63lS4xETONxgBNAIBEjohc8BCcmrkiD1VREp4Cn9Rxc+g39bBrYj9oXaeHTRMFsrwsJ1MjbPteRfoheccEUFVVmYfM+yzzZZ7bMExm5eJoVpWTzHCSEhzaPGMupphVXB1zFgJ2rt9Bv0tdo0PLQJzATDDG1NaEGELQbJqYu5iSSN9rEjUiynP0HsmTakgCoqBoAOwDogcfFF0coKmFBsgKh6z2WIHHacREj61qqmBCROOlFIkIWUFMJKkAkyIkkR7JOQhKTBC8Q0MA1lHXd4yeRzYePFpLdUy0ATpmSiKMZkY/e/Q+SkVNNGfqGQjUlB8XFIjAAAmdQx5FfFCTFGVs2pKYhmGQmFIUSaKiHxcUYCZASCPkUc0kkSIMCcUwmnairWgUSAoqoEaPFWkOJBGpYCJAk8TIKopEiI9RSLQEqhQNE49URGQix8BOCZQZNKFzygmdgyDgCAANwGtuppoUUlIzYDAxTUpMBB6dlxAQ0OKe0QZQcU4zTnhA7RwBK6eIjISIypgwkveYOXAlKQOIioCMXVAYhxSCI6WmGarzfIB609yWKVw8rw7bbbO9502RX1wOsepuOzdIOamGtm53e/AcgmXeDzIQYVa6COiAsRXpRYZWXIRJqKYTH6jpU9eKgWoiUXUQhJG5HBJGMwUxxoe+DVLlzvWx95SbSMahJ5WYDJyZjK3ybaMDtFkwCsFynZxP72539aHNM1LFh9t+t3s3n/lnL5/n87z/7s2hSVlR9KBdv49ttEGVHvnnYphSApPgnImCUZaFIUYkN51Nj06PlovZ0elxdXTq0AisObSImmU+5AFNfT/UdTtqPSFkphqCZ8fee+cYAUNwQOQYPDtVIMdq4pxnYucLNFBLjOOrCDh2hBhFMSUEIwTnnKkRkWPH4wLhY9PBr//kq/Vmf7JYBOTt+vblxVdXH971hi3UTT203obU8O7dYdcenS0TF5v1uqmv08Rz6sSCd7NPL379H//T35fznMLi6GRRZpum3b08Pb15/48hLF9+egmnk+++/8vz4uTFybNXr95PJucvf/3V1Tc/fP/qrzNvZSh2u33T16vdB3f+Sb+LrzYPZVVab+/efF9PeFqE9+/qKMPTyyeH+9ep62/v3lIIfXt4cv5J08L67rA4PiKC3bZJh2VKR7/+zW+/evnLGU9Oj8v9fvtXf/Xv3n14e1h/OD8/jm17dLxkro4mlwSBHELmvHd3dw/bzc4xbpKP0ufFHJJWIYfMJYsIFIqZdGoMPstitBCIAjmjzbobUvsPf/jjX3z9X7kA/4//6//FZCNdC7lfXF4YaeraoV6fvLzY72tCRkdNvVXV85dfCjCTEfoi90OX0tDHviYl54u+b/Ny0vcapZ8uJs3N3vpUZhPsLQ7D2cn09v012JAXOYWEK9G23d3Xt9P3zz77PJ8UbduGkGnqD7vtomkCL/K8kNgh6dFytt1v+6EuYU7OpTicnC7cxq83u5Q6SUmk8yHjEIZdPS2LvMi6ro0DEVPXDsSW5XndNCEExz4oSurroZtNZ/vNoSjz4Iu2blyhFrJQFL0omTz/5fPf/f3v5ofFdFoBWN8OI9otdW2ZFT+++fZ0udjuHrb3m839JpvR7fXdD9++WV4eC9Csmg3dw+/+8T7k2WdfPlkenyD5H//hu0++egIMfUz90ElrlVeEJErNJhVl7aY+oWSWVu/fe58NCaA3aNK+H+alKyeTpktd142nIBRFOZ8N+9tsAkODs6NieTLthzi1whKpz1VBrf/syeWswOOFH9qt+ViV5X51P/SyWe9OZhdN306rYxTftQ9fP3u63jVsev788uH3D8vs6P/03/0f/v5v/7F9/+Ou469//afrmzsf/MDy7PKLtrGU2ipbnCyeQupR6qPnx11X36028+mxwuFwWL+6us+PUIbDrArHp2XwF//n//7/GYL7+stPjk+P+v0hcHc6y9Z3b/v92vn8s5e/fXN1/dP3fzQbbt69FqSjiav8yf/iT/7lfJnffH9bTr989+HHupWHcvtqdftnf/Yv1qurm80tuXR2UdIR3/9wM3rEOca7u9eyOL58ukjt/s3tj0+ffX40DaVa39SFnzS7Xm36/Bdf/vDqPQ/x6cwyP1/FzXZ7M3PF6vBmPnsxmy62P71hiieXp8N204iS+k8vn8dJuH33jqcXPBw/ffLi+x9vj+afaLN7//p6chziATgVlyfLb373D+fn57MCm0Od1vtAHvvcaiuKSdDJ+VdH9e2V69p8Xszns0G23stqe/eLr/+11u3+0HTAf/zj1SJ/Vn9Al2ZussyPj1cfbvCgOeX9ugUb2kGN/v9c/VeTJUl2pQtuoqpGDnUW7sGTF0MVChxNbpOZFpkr0iPzMjIv8+PmJ8zTFRkZ0n270ReNBtBgRbMqSWQG83B6qDFV3XvPg51IoG+yB884Lu7nmJrpXrrWtyDm++l0Gjj9zkfP315flpPw4PRkb8MHjz++/ObtbmiW57TddouZW/ebLMN0Ut6vVgo4r6e1QdyuJ/XECImZkG8urxXzyXIBFOKu6QkXy6NNjCd1VVTVzPHLt18h1zn207pst23T92Wolov53c0dOXd5eXk2PcVy2t3tNMKTx09W4b4o+OHRw1/8+uvdLovdcV1OJ9O7q9e7q+0szKqiqpy/adbf//73X3/7uaa4i3lWzKjPUNB6u+V5sGE4qmvn/M32zhX68PnTw3ggOUtGwCip77u27fdt13RdlJSyAI1QxzGSo8QAauwYAaNmMzXCISUFGJJ54hAcAyAj5+QYIe21t5wbc3VOiVEYlBkCO4dOYpekS5KIbLo4M5Z2v9u3+7zZatsS5CIEx877gOpJDQGIDSShRk8CZsAcM6haHrdjI8A1Q991TR5iEqTCkEUGEWEmVR36oa5Su98aKFVTcAieupT2XS8mwxCT5H3X1WVRl342DcwEqEhgRArUD6mPSTV6psBIaKpKCGDUxxzKma+nwnnX3pSm3gd2hZhmycEXQD4JeXQxJs4guXfEzJQyiUDKwuzE2DTFoUWgPrWIRIDD6qpDndSPJqcPZ/jw5tWLzf1lXbDkmFOXRRDJE7ab6/20mM8nzpXdZuvm06HvV31rZtlk1zRi2XmXM+Ysznsih+xB1JU1s7MkYAJkLgTJydSIPBkbgKSeoBAem3DATDRFJSZjMxphD0geQRkID0W+SVRVBTQTKI8NY0hAgGaII7dJxmTZWIs2ZsnMzEwOI/P7Tu9D9GyE974HF6nJd3Gdw2gNJibj0G1jN5AeysDH0nBkJKRRh6IRVf1em2ICsDHpBZJFTdgHI+DRnGFkpMgMlgkQgZhIzBwyEJra2J0maMF7URQVMAQiZEN2CkCEoEo2WgVQTXmc7gkBCZ0/8JbARq7vmEwyV4IpghkocRBR79yY9AQTQlIQVfGMauP2f+wo9BlMMiCxRzJQlZzBkFxgMsvJAIizpnvd3ZX80dMPPHG/vTeUclJVVWWW+75zVYi5o+AIcL+9m9YudbDpm6OTabO/b7Z37X5dhJCH9vzB/Isvbxrtt237tDqalj5u9n5Wbjc3tS9THIiw3Q2M/vT5hQPsNrvUtOItba6q2SI6gukMrE/7tWZNbYx5G7sGiKTj/X5bT+vcDVF0Us+4rDXmfTvEZDeDfXm1+hcfPenNYpc27ctzMZ5U0/k89q34avnwSI8jAgNCjqpGEbR8cOFqz2DezPdd++bbuFsTF0fPfxyKqU+9ttvCbGi2y9KlcvQCcCgX7CrkCkDRSJOggQog8+EqJEBDk4xIBmgENkYoTUHAIKH3YGJdNIlps3NCys7G6q0xnQho3xGGxhSRGo5Er//BPPQdk2jkkv6T0sCDPnRwH9kBtQIHvMr7PJqZHaSpA98aRs1ppEqLKTiPgJo1JzMAUkbnGNSXQITJTA0kCRIRoEP0CKA2RAHBzIUvPHifwWHWUVACVdVxUTKN3YZmKgqqIoqKqGnMPeTDkjRmNgBVYxvhvKCqYEZISMHIGLkwyMwGzsb2qaKkyYLrmS9KRAITH2owc86Rc+AckkMk0MSOjRC8k6r2HMoiFN5b6mzYax6IS3MVFjWaYe4JxECzr9B5MDUwLIs8nSV2SVXFgAnIuVBVxC4lQHKhJDNUoKKXrmlFU1WDmQtFvTipJovCOcpxWL+bbG/6u8vY9RoBhVLszQajjAAueB98qCfCDlwIkwmVvp7OuXBLpTgM+y7uNvuui82+37cZmcuyrKYzIxq6vutT0yVV9M6VZbGYT6pqWhbBB4eGRVWiSAWGu9Z3HU0AIUtKGLNXIldSqLFcuLT3Q2vdLhuaCGiylK1KmRnqEiYLMW27fcoyKGVCG4PFBsmQDUwOV5rq2CXpxhRjRuoF0CCJIAKzY/ZZEo61XaZqAISEqFkFRl6P4uEkQBQ0G4COZhtFgT5hcE5UGHH0sSgCoGP0o9RI8P764RFnBwjANC4g1f9dA5p0WVHZETAAk4KBmpk5YSDMPpNjQFBTFc0xS9aURVJOKZmY5Kx5rIRTAENmMCV0CGACqIQJMSH0okO0LoGoDDLKv5oiMQGhqagYqogOhICAKqOxKDEiGIOiKrL3plnVNBsCk3OCRk6AFQMmpwjggEyViQGcqpFjM1MSBUZHozCHmjCSV5IhakQkBIcmyY07iARVMR1Ic2w49WSYRYi9CSASGhMwOdYqWEfWJ04DESMzmBAFJq85gbJnN/TRTYMp9rt+fQOumAxNC7fXNK9d7cKkjvsOHZWLWd/HmFLO0bmSfADLVVlaGoIDh77bR4KQeus098NuPnXVrB6giZ2iD6CYxCgalY4KH5lUwTuOCfZdrkpyiDIMiOQKz4Fyb5IBkCbTMGgXkvX9oIrZlAGKk9r6FiB0Q6eiXn2/jqtte73efvjh8R/+wWc/+4cvmr4NVUVhboAp5kHEe+eRh5SIKQ5DVfuuHxA8qCOAIoTZfHZ8erKYz5fHZ+Vs4RDNxBeVaRrb9Po+NvtWs9h7BykxArJzXFQlO3aOHXsFcERIFgrvXBATwnGvg569Qh7vjyknMEEENvBFUDMEdt6rgkoGIhPFgzILALDdpsTWDo3x0cWHH97t1oIyO5pevv7WlwqeT5bLpk25RHDF0EUTnS/PuihcFXW9uHm3mxT6wfM/3HU3N/tVLLrNttN91kzL+eLJ00dfv7jfrW9/8pNP1m/v97umrMpd17zZvOvKQStetfvsQDor5rPz58+63XAfm8ipT23X4PknjyG38+mif61O/equG3YoWVsZjqtiVs/Xt/109vT04kMOvFvdP33y4dH0+PHy6Sef/hERXL16++XPvrx+98X8wexHPzy+fxfOLh5cvtkAo+di9nC+3XcsaJL7Zl1iXDybO4SuFcTA1aQIk2k147KqJ2W7GfohI0Jd1qLgGJkxxWEYpNvsk/YfP/2d3Xb1xbd/18W1atdtd9VsUVaTUJXv7m7Pzy/W621dFAnTdFHJfqDqaDY/efHiV2pAjF3XO3JqOefOuzKnFnJylc+WYtqanRhYKCab+z3NFlPnh2Hfds3suMzRsthiNh9yAyCrzepsGGbzqUvJVFwRstnV9dXjYsLMPtQp7ZJGV7Ik3ey2ZVkXLux2QzWdsw+7/fp+1WInTT9M54uqmt+v2+Oj2mHoNAUu0Lssgw29I5TYB19UwWU1iXHf7J1zzW5Xz6CovCF0w4AOPLlWLCVZHp/95ssvf/SDH3osNMWyLBDt/mb96IMP3DdOoxwfLbrttqiKfXsDBV5f3UoRF8vF3/y3vyfHMYMBDmlH+ALFN7v2dv3V+fk8FDav/X67Xe+SxmExW0zLkg08YyhcNZvs3t12Q9tAP3Wz6VF5u1+3TS/ZVGmIaVwF7BgRMFqOqaqK3fqrL2z3/Ac/GeL95utXXtLJg/lyupQh7bp9kny7fots0Mv9tv344x9dPDz+8z//TxcffyAK7W4POKSmszQgDfvbSypnT5///m+//aozv9r7xenyt2+v/X4fKf3gD3/495//9VF1sZhVxxfnyDy0Glx1fHR2e3Xz05/+zt/99Z9tum6vzS/evfxX3/vnyfT23XXKgrl99ODo+PTs0aOz3c3um1/98qMfn7+9Wm0G+Df/8t///B9evH7x7ur6l9Hr7//Rn8rw9qd/8Aewa3/xD1+F+awy8JA///rn5yezQbqdDh9+9uH95bdX12/uzZ2cLDe3q3N3ujx9sNtsnj24wIePvn799YcffLp68YuUVvWievDZ97/6q79aTNxsvoRU/unv/uQv/vavtzfXud0sPJlgI/cXjybLk49ocBcXn4rFt3c3yrzpd8FwOl1WmFe3669f/aogqxfL6WKx13g6qe4znl0cfbVZV48X7gSBi+Zt/viTH/fgzIbV+sYoHp2coggO/b/7P/wf/+y//E29nHsPlMMy1F/ev66PAVKc1nF6Pl1vV9Rtv373qz/503//zeXdRx//cU47duFXv/wLPn4y4cizclj3P/jg05/9/G+32D758GK/vwzebq/f7Pu+61M1O17n5nazfnjyvFqe7NeJfWHkh5y5CA+en67vrnNO6yY//fDTdrN+/PTj7frmm3ffPD17aMGffHgeh2Z1e0fd5vzJ6TCkzXZ9u78ti3Ny9uXrr3fdrqrZkCh4cead79PQ3DYgNJ+FUooBUkKt5uWyXrbdlqtw9uHF3/7V3wDq6dlitd5LTivIdTU5PneIHtC1fS7q6ZvrNylpHPrtsDtenpgNPfFdG89OiiJQTM3s+OzW9lwu9s1BMO37GIeU4iBkEuP9erVer0WToSGhd2GIfUAaVRF2jtgze0DCPBAhIYnkaAfyBIlFzDKMZhEURVEg6lXWQIhIqurRE3tGZxRjElWry6JP0HSbzWo99E2QzKKOgbEwUAUYZBARQwdGBuS4QMiqoqN8gAoIxGxIZiBqluN4ZuPY5xxzSg7QIXVZDAxBd5vNfDafFgVJ7NqElkNVbLq0GrQiXsxmZenBCNk1XW8EyL7rO8McY1JDQVeQ8xQ0t97xpK7UAF1VVcckkuPOIYJnACAMzIEdIwE7ckgxR0vJc0WAWfOQo5mqKfoqmwkqgKpGUyi56PdNNasYtb+7vO13bracLh9TeUJus9vdmIHzPvWDGFbVJOa4W91Lis6FjLje7yVFMbm5eVtW1frmnr0LhR968KFgXyRR58xPpkAUh96rqQoCIzpAAzYZREfekBkgG3qA3lSISaUHZTAm8ISO8MD5QEQwZVDKpPhdaf0o4iQDI2RABZMxcqKqeFCLxjzaIWmmZog0lnXg+xzNe9fEwVmBQO+roMa2MgKg0UikoGg2ii+IgEAKOp4K0+jKUdVx/D6M2GgKzCgqetCuWCWjoWkmRCNWdIDI6C2nMTHjiMfkmoAQkQGgKY1QD/bjcALGyJgNyBQZxxH5sMu391htGzMWNF7P428EhkjeyCEYSBwzTUBjNhDVjIDMBNUIkJBH0o1IGoU0BgZ3mIoFDgYVRAYEMiZEUWMVrvB73/v46cnp+uYSME7m8/ny2Axi31NZWiYEnFRljr037rdNynK6OJ/Njm9v3zqiBw+faJabNw0Sg8DQ7x89PJ1MSmaaLhZ9J7OyLEn32625AC6U87ll0a6Htp8v5qEottcrK14HLrt2byJJi1AXqkPaXc3m05jSfD6BbJpgMV103c7lHjKU5KaOTkrcd6ltup+/eP304vjhySwn09uvrT8emm0xP4tGhDUFcwgA4Jz3HgEkqaRsYhnYAdX18XPusqUsmdAELBRuUQBVi+Oc4qSubTyUFSNgSwkIzRQZ8eCDQ0A2FUQY0x5jgvB9i9/ICQVQQBEDI0DWXtIutzux5Uhp9zwug4MEdBB9RsDMQRA9SD32Xid6zyf6JySj95ak73Sk93CikfwFI3BnNJsZKBCgEb4/QmbmfOB6EbEbvUlgllMCAZUcSmMNGIRNVYQVRqwuMpupJMhSqClWpbgDOReyISiKHdYiohro6C0yFZExAA1qaEqjZZANAJnYQAlJQdWiGUgWM2BC5xBEgNmAjAMWtVcAFCJHRcXVBH1wPjAR4uguNGQGHnE5YwdDNhEiDJMaK+eDd6E0JOl32DDqAGBUzI09aMYcLQ/GgEWlfmrsjSE7HrwXRk0pqyA5ZFY1N6kcTcCcoXNmYJqDS6QKBoEgZz+blotZPVk4P4dkPDu2dlsvH6V2F7ddjkPX7mK/ZZKiCkVRcVn6SWWhVHBcFL6aYCjI+0A0BZvFYbFvhn6QnKMYArHz5WSKzBJT1wx9NAUyBQSaz6cuVFVVqqpzhfRZ+z4MvUFEQ+8cqFlW3bU4adEFLpaAhoUjLsAFFoUUTRNVAJIzAs4mWNUAeTrnLuZGQnLejFQMzbISEImImqhKStlU0QiRJVnM2sfMBN6rQ5CY2SkTEQoSGpgaqIz5K0N044LKIipGyBlB86gbArEzU0ecNCXBQMQkBuS805y8YzMiDnBAUZuKETOAgSkRpSgI9N0i+s5VNBA5czya7ogPGphkAwJXOGABM2JQEVMc9kOSrGpxiJoFDE1lPJc4qLDMCEwGpkhRtAfpMw5ZhwFStiyaR4D8GFq1w+oDHY8+kByAHUBeqGKGKuQK8igoZtlMDElUIGU0w2jswCmTJwQhZEAySSAITjkAe1YzAQNAETA0ZheKCpXIIAuB84zMFlklC0NGYzLrICcGT0hJZLwVmSibK9hHFXN5CEwZLCbm0phQWkREJScgKSswDqYOiQ3MtWstCq7KoH3Mt7fl2UObTrTpoes0NvPjY8l6f3W3b9tyNhYvQhGqmFqHWFRGaJa5F9vvcwviiRfHp+16L31K2TA5iOgYw6ROKZIII2aTobM9p+q44qBmApgdY29ghkBgOixm2MQct11qBnFhUK1mrpxNNk2ymFETlyMo09Z36bfp3dHx9Mc/+uyrb69vd9ERA/ssIFkIUcmGoa+nBRB2SRQJFPqUiyLM5vP5YjadTpbHR4vlgkLFiDlHMx36NPRRRYaml5wQMeUMAN5574Pz5IvAzID43oiIRVEigmNngGjs2GWJjtlACJF9YQbMTlUN1VgJLOV0qCh2bORUdbztf4evu1nf/uhf/OjLv/t1IsaqWqf96fOHbbeZHDNy2WfXbamoljrJ2wzABYRpgjhE5YJd6kKor7Z3NKk//uCj/qvfzAKfnldwmq/v1u3NjuhKyFIatm358u3VfHkWMUbJ3/z210/OTyaLavnw+MHJkzdffb3P7Ytvf/vhyYOTk8n06OzodPbrX7zcvbnKKXdhOD1e/tEHn/3Vz96Bm6tIl+Om6S6quXH1wfd+sljUrbRf/9YGjefPpz/+8Oi3v/qvL169jLvWaeSQuzanuLbMdzdCRta7zbobilqpPLp4vFu9886zQLbhaHnmHzxwPqrs9k2zv70rwhFUihSIcD4/W04vYmpyToiAKJD7BJHItbv1evN6t7vzXmKXy1l4+tmnk6PjJL4bhuu7tyZWOGKEbpfd4D75ye95RokdOZSYFLILjoJTBSjYUlLIOYGoGlLXNVmTWtzth7NltZgvs/TkUjYQzJv1tqzLfrM7Op1sd/fNbl+XMwKfYk/o6+lkdX/3UB8W9VTF6tlsv70jkkldxF5Tm6dHdTbIOSVJk/msrIvV/bpvhp1spUVf8n63Dw5jioFdUYSmTwjEoCmLQlYDV1Qm0Hf7srCyLNrdqizKerFMXavZClda4G0Dx+dnXdu1283R6Vk5KUWyZ98Pu7Zpnn3wyV/92X/50acfM1dtu56df/zLP/9fiVmu49X1O3KUdYhizOHqbhOHPF/M+hwVyl3sddcVznVdJxKGvg++Lieuz0Nq+hT7fiiwDhr7qireXV89eHgx5ByHgTF4550rxlWQ09Dc9ZTy+t3u/OQ8Dzmb3G63RUWL+aN6frxvoqqcnrjVXbfqOqiqs4uTbtcOhd3s7tN186Of/miQaUL+8Q/ONvs3/Wa/ue8WR4uyOhObfv3V668vf/2jP/3x6uX97e7qez/53vCqWe121fJ83b+Yz4N4ulxfO6w+/vijty9eXb/5Zn23pSBZ9tPTELdwcTR59+ZVv24Xs5N9Gmoenj57Yq7oByrC6U/+8H+qHwhMG3cXujR98vyTp+fn/+k/9lKuJ5h217vj+MHrm6/ns7P64nGjLZ+c1VmffvLk7V//7O3rb88/eT6Izf3JycOLLeMRwvXrrq7rTz9+Kq34cvGrL/9+eXxU9QkG/mc/+elf/cN/ePjhpz//2T88fFYF7F7Gb6COTbNFtONnD3Ki9ttXPj4A5cv15Wz5g9TehGrrCp5PLzYxX23u60nZ1eWu6x4enV9u7mYti+pvrn7x+IMPr6+/NWua2MyqC71L628uv6x/UVW0PF7et293kcJiOa8m22/aX/7281V/05sN72yJxyjL3/vJP7/ZfrNv84CuH3Stm4/PnyFf311efXq6vLv95XXf/ot//qez28/dYtbc3X/+8gUSvHid6iNn4K/WL29XL/qTj6E8Ip6fnc0Gro6fnci7L69uvpkvJ7//z7737u12Cct+fb9Z94Hw/vb26PjR8ZQ3d3f7ZlVMH6ZeHjx4VtRu299hbpo+Lk+WV7/59dHRYjdsJsUiSNGsu6KsU6YwO5akNilumvXq9v784omrA2WJHRi5clrt7rdw0zqTNvhOdLORt/eXZ8dV2g3VRJvYz0+Or69vKNQaYZPuvTttmuGDx99/cfVlVQYlYOxgQnfXqwePnz978rhv+rpYxuje3e0kwdXlZRUOO6K263NKQx+jxGa/2+62MhIskoai8N7NJlONuR96Q0XgKFKSImLhw+gKIfAE5sgxkXeIYEROwWcjVAPNmiMYhBByip6dCwUWAcgXECFz7LXZdevrb5OKSmaQUHgiqIuyKgKBOVQDdC5EyUmSR0DQg2MFwCyjAQOLgUhUIB/qXjKxK0IJpirgPfdDlJxHcmxMQkgpKxjmlIqCHYEk2W42/TDUVV155x2DYsqJCcEXXbsXjYCc8yCI6AowFY1ZOnJ1MlURyi6lwXTQ3GHhlbmPWvgFESIFdl4BMoDzBROpSIy96ZBiL6aWzVHKZuWkxDDtBYZm3a52IZS7ZlsUnl1Y313nq3fzk02YHZWT5X71TqUnZ+A0ttkVXnLXtwaWj04fJBjdytRutvtu70OBY6198KGskREARKQgUlMSHZodFrVzoKaih54WADBVMgzoIEfiAkzQDDQzmclAypYNGYE9jCBWIEQERO+8SkZSy1E1AygjZTADHcu9QdXQRv3DQE2NiAFwbIuH76buA2zFAMzU3s/hcJA+xqjaGJAcR+XRAvSd7WIcpPFgRoBDjMAA7bveWAAgBEIyMGIPIwAICPVgbhoPj1FhLNdBGh0jo/NHwBQJVPMIV1JDYEQAMqBDUw4hotn7LuPRMmQKBswOiA6VbiP+FhGZDcUUkZwoICFxMFIgBDWADJbJwIAADBnQcOS6ABizB0A5UE0BQMCiaHTOA0hSQzNTdcGDyJTrx1yWg21/9t8649n5ReCyaZtiUpR1KKtQlkXOtrq+6vt9Uflq6c4Xp0Uo1re3m7urKgQEFLTpyals948eLvzNzQSHs9m82WyH/TBbHu02m1VMCFwX1YPHj6oySF639/fdpnVF6KKSAnz9ddftnA2zs/NQPUgSlssn+yvZ377d7tLt7c2irAp2XVyTM4zqM+eME/BMgkHaLM3eXrx6u751p1MPkuT2jpn54dNi9kDA5axD7gCNKbAP7LgMwRUBwVmXS+WgAQ3Ysg49SAviHTlmZ4ABnA1ipsSGRiij/wdAMyIj8yh3GsIhWEhsOl5pIzfCgAARgdiAQIERLSXnB23u5ebSnx3hpEDi97bIESqEB5o1vleE3iOo7WAEGh1I36lCdLj4/wno+iAd4XcL4SDCAhLYaL80AzH8Ti8FZI8gbJA1m45OPBwRLZhVwJJQk5IrSE0U1TmnYJkQ2SU1y4YK6ACQgMemKTjIvgBqwkQHgwWiSlaF8d0ZHU9jq5ORqeJYm/DeFSUKpqo5CztWRTVzZpYzgKWYwJCKkhjZF+gLCgUhjacbTMyODq2IY9xZxfKAWVCBwFGofbWkUBgFNUUKKOJMCAwoADCmAVUEhDQ6S8Kcy6k6Ak+CoCACgx3Sg+Ma9o4dcwDyJimnbMAcqgAMdUlodRHKqvSFQ/YUCismUC2LYqHdNk1W7XaTgodUhCpU80k5mYIr0DkLnrhg55gZXTBy7BxqYjOuuokKAWZRRCL25AOgMxEQiX3q03iVUAjeFNh7BERDEotb1X3HKWlK6BGAIYsbMqx3ofQ0qcWxIBoX7BUpkXPIiECQOvZeisKcI4yg5jkUMEFfgpFkc+DUUNHGiJiiiYhkRVWUnHPuehsyJjUxZTXHHhFNlAksG41NZQAIoAZENhrLEN53FyjgwezMYDjStFU1gqkYQkYkzIJMKlIEj5yRyADIMSBJjuSCOm+g9E+k13+UinQQBciYkNAx5+/yZwTIhEKK4gNLUs2WU05t6tNgZjnnQ7sD2HiYBmZsCDouWLMuQm80gLZRe8lDIlPNagD8HtmFxGAIoMSsmgHYgEEFFEQFkAGUCMkBIIgkMAHLKgcyGo4O3IwmCg6JVEiIHThGQyDSXhWV2MihobEZMeQcBTNiBlMKBCxiESWraIrAfmnoNCdIiuQUiR0Bu/fPU1QFJLM8IFUZrHROTUwTWHbqLQ8GVvgQJU5oIe1WUWkSouCkKDSnIYne77HqmIswne73bVlWolJSsYiL++t7G8wQ0HIRKl9NVLKRSM7OQ+kcAqdOWhnIQkkuUQvOnA+aMcVYGTsOfS/ZFAjatp/4qmnjtCqRdJBIxEwgoGigpgp5UlDrcN8kpzQMQ7TAwtOiSlXq+6bXNEIZHHC/lbfbu9TKhw8fFnjTdGnXrMgzUYgphYDOUeHYMcG4Z0Uop/ViOX/w6MFisVgeLaezGbFjHG+aIjn1fZeGqCJZkgGICjExsffBB+8ceR+YGQm9czBSIpgJsfBlVhEVAyNico4IVDKCITNBRYQ598bKjJxYFRQw6wh6VO/CuKseV0HJ7vUvvy3FLSq3f/1OCFt39fKbX1TTRYZsbvp29eLk5HFw/uW7y+PpUeU9O7fZbrquvzh7UM+KcuLfba72qf7046exub+92ojw6YMn1+vmtmuKRfXm5c2Olw+eflzNZr5svvn82x/+4Nn6ao3q9+82qzfv+mbnp8tlfXx6fnb17WVa581+XVc0Q9+02gxx263m3eTDi4dtpISAa5lNAkvKaf/yq/91v1uDK6b1fBj4V7/av/ry73MPXFcXv3vx8u+/Xr9bP3TPimriMmyvbgGwaTahLO7Wqy7K5f3XVTH98OknHz77bLVdp17X635Il8tFeX728PnjiSVKCvsml2WoZ0dRkqTELM7zZrNvh76o6/W6WW1XcRiGYUj7BkWM8PzRc0SXZGjTfn52tLtd15OJxtQM7uzxx8Wk/vK3f9/ubieTUlW8974oFUB00OzA1BVORF3woa6i6KSqTZWrQBVt9/d9zFlThYwRcqflvGopuIrStrm8vjk+f+qL2nQYhlhWvijddn1XFLUCMuJ0uohDkzOQQJa43a1CWbvAs1m92+xdWR4dH6Pdd82wj/uTapklF6HwHIaupaoiM9HIxKEowFCTmAp7V2glMjADEe/32yhDKAqIgwU5XS67Lg86LE7m6822ni+JqN91ZcXLk5PV/fXZw4dhUr+5ugrT+ujR468vLxenp+v1XTMkM/XeT6uaQu72XTmpzh4cNfvBexz6pt1k1uwKriufBDCUfc657wewR4sltPH19eXZWe3BmtX2yZPzy6u71PeRuJoWRVm3TXsYD0Dny/nd2613LvWR3JR8OWFauEmx5Lv1NZT19Xq7Gyjp8M2br58+f5rjkPp0cX5myW7ub47m9aSw3371Mj9YlsV00Pb87PHpo8+2N+sf/9EPbl6+BtAvv/lquG4925vXX/HOaubf/NV/u3zx5aOjSdJis+1Wq2+ECZJ+cP6scHd/97d/gdztt7Hvu8Jo5uvo5Tbv//W//Jf/8B/+P88eHd++u+251l7vtP9s+tw39IPji8tf//r7P/psSs1PPvrALfpB5P/yP//fajq6Xm0m86O7t+92zd1k6gvntu9ujsN0fvIg9vsnz59dvn793/7+P/7Jv/23XWPH5+c37ebBo0e//fnny6r+vT/80+XZpwrv+nb/qy++sHazurp+cPHBcj5b37754svfLouHCOVnn5588fXP/+D3/rm1OnXV3eb6Yv5A1+3V9avl6eTu/m17szs5v0Bvq6u3XFfPnjwxtt3tdonTchIcwd3N9dtv3znvrc93v3r38GT+O58+aftts5f71dpLwZl2q7YaoIn963UzOZ/02+bs+On6i9dlWTXrV7vVq6HPD8rw9IOLN19e9Tp8/L3fK+OAhqEIeId6vaZuYoWGcjZ4+/CHP7r61c+lb1tZhSJRjqD7evrBg/OPoFvrfnv54rbZ3zvk7dX9pcOb603OlrqduWK/704fPzmbP7h++8ZEH5yeTYu4atu+HWZh6SSvv3g5PTrt87DZbU67oaomt9dvjFx2Lqf2XRMfnM4vb75+Nvm09J6PF1l2vWBdFlH2qztKrLPlxIt4cm+u3k2nU0zWb5o9qAd3e7lT8JjJm67fvQ1F9ejBCWVnUV9+88uu7+v5Ue7Sk9MTKgutZqUNRVE1+5wlHz1YqpjXo+3NlukArutTTP2AjLttG2NSRM/O1It2OeYHi6PZZObZvbq6NM2CSoyOHZIzsxxFIBFzICIER8CEjhwzA2ESUMtZlQiZKOVMwOw8s1MFQC2LAqXs+ni1umt7Q7TC0SSEmpCZCgayyMimybtKBFBjzc7EFIQAGUFBPREaefCDDoVjBWZHZhYCoeWYIoKpoZillJKIAjRDmtYTEW32e8uu8DUA7bY77YbTSX16sgiY+rZVQUNipCFlyFqHcoji0YvmAkaXvIxNWLnvzND5ClJvIADU9V0QclxG2jkNhIo0R+fM0ggzMQAkVCHylaUhxkZyK5LRpsVkSa5k9n2+Ns1VWa+vVuXRnArQFK++/nJ2fuGCT0OSHIfYEmHhJ3HI5EI2icMQ45AVmVjBNqudJdBBckocnCGPnWXOuT72xBRjr0lAwNiTDyLqAYBQUkYAJBRLAgQKIJFVGVlzz96ZZI09OzFUpBKdJ2IwMslsiog0Ekogf0dPGeNmI1VnnHLNzFBGa/XomhjrneBQsDQqOPpdHO09v+c7Rsv7YRgPfJ8D0gQOaTc9FEKNATEcsdbfbab/cUo2MBUkAwQmzmA8wl+UABnGQBEoHAQiIHZsJqYINO7/idjIm+jor7KcxmwSIo1/G3lEIsORKwMS6fBrjsDu0VVEo+KAxOgcgAMZf7XxHTCCf1TLDv4sIlFBI8QCTMCMyank8Q9rHkNotZkxjYMXKsCBC5W1Bth99Spb548eMBbNbofm69IfFWXh3bs3r7u+Vxjm80URamQIDm/efPntr74qCHU2b3atq+qimFePZuHe1ZVDwLhr0r6ZlsV+fT9EKWez07Oz+Xzex36/ere+eUfedy1UwkRRur6nTGyKun93BdgMpitXBZD7++buLu0GXLE+XEw8k2rkDEjBBTef1Zr6Y6u3Q75tBhXYdRbbobA8n/tpiX6zLb2bLGbiAcGMzHJDcU9Q9H1EMI1a5ELbIWdHCqbqyTERF4GNTJUOjBPQkcgzapNICIDox7fQVAFH1jkbgKkikZkCMhACHK7qkcplqhmAvMuaQRNcvkAX+cFDOj2Xaa0wlt0dgpiHCxzxvTR0MCt9J4GC2RjX/E5X+s5d9N7KB98Rjg5120Cg+v5/v9cX9b36OpLAzEa613haLaJZ0dRkyJIwDcqDMSN4IBMgYGRJCYHNkJ1TSYamoqZKxGaoaqJKaKpCY6RITUSjZDB8rweBGSChqpkpEokoAYEJJDMVVTUwVmVmMxhNiCYiOQOaIbErjD2yNyRAylnMOWIao6cKoJLMMiGiKhGCY3YTckBFAVwYssVBnZTTI9XImHRsLggBqQRwwGAawXqiUtEJOkNUGysTkT0LABvgCK5HJERlb8CiFpwz13oqHFkdCg8BcfQ8ETGiAnhkKEEXBXNZlWNRmJ/PXVUTe3I+qnEoC++JUNXGVC1q0ixcBUJAQybwnk2V2KuRZmPGQszHBAiW0tjixQQ5DmiEklEEDcHIlZOcB1DVQQiEhijrW6oDTqfgEREtC2IeIU1IY+mfIBkQAxRq2Sw5RwoGaM4TGSKQgjAjoUtZAUd8blIZRCkM3EXXZdVMJEjjs9aN/HY2ABMlREQUVQVAOvRakgUDReYR6j9G28anhpqJWIQMauNrESGmODjngx8/Fec8MSNzRkEFdMyMB2/gP5WKTA5+WgMTEjMDckhkRCg65J6cYSJgJzHnLJIsR1EVMx2p7HTAzqlznkZuXRSXhXvFLkuTdVBIhtnQgIBMZaxvY0TTBORGiCPISLHW8XjE1JDAkMwxhACImBg0g4zYPDUVeA81G+HK7LypiRqqkgUd4WQK4Mggm6lBAgYDBTRwZEyG6jCzqVPLmcg8slNVEjJFQMwSfVGDQwAEI0bOimLgnRMmdhny+Ak6REBwDoBMVJAMpRt8CBkzMnexn8C0Kst+vZE+ynpVHC1pWdV2nDYbyZLivj4tMyz3m31OKWf0pYlIWRXoLQ0pd72lVPsKp9NGgDT5Auti0jR9028n86UvmMthWdSNWozJBxdNh6wpcYwYPMe4937mAwmwEHEqUhc15oqpk5hS1CHt+ryNHj0RM7hCIA9xp4m8K0JV7ff26u1dF7vzo9nxsnp1s9400Gb0ZQFm9aQoiiAp5qzsvA/u6HR5enpyfHwyXyxny2NflSMmKHbN0Hddsx/6zkTiEMEMmVQMkX0IVVWyc2jKyM4FIgjeG4xBYDRTMSF2QAhmTIwIhMgcACmlRKOi7irJ0SyXZSVZshgCMLHyuJkBcmFcBaHyoGFzv/HUCygdPzEXp5Ml+7J0KXb33vJRNWljPlueUUr7zfV8ecwKj04eocgw7Pb92rj84qvfBs0lDg8fXsQdUu3cQKS0vr5bOG+7HpeTq9vbWQ2Lybzb9OtVCxTqotjvmwfHJ8XxA8R89eIlunq/28bYTo7PPvz4DK8a16VK+7ff/iZKmM4gdo3sd1pfCJFz3OnAk1IBpIhxSPs2iRRsRFF/+cvfIGJ5Pm9CCstie7Uqprzb3w02KRwk2zft3gZsd0XXDvJZNV8cL4/r45PM7umQ0m61aXfZcTE5Pnv20eO+udmtdjR0jJo1thIlJUu5b61b7btmQ25AUldAamw2e3529lFqNLV69flXJ8ceJkePLy4+//XfXnz0Lx5/+OTm6pubyxeLk+OUxXn2rih4OmiHaqKJ0IoqDLtYOeoCgkFRVpalrHl+Nt2srzQbSurbyI4RXO0nfaiQ/STsV+9e4Q9/AOTKsh66lhkRdN+slvminM1Uc8ElA/ZNr5SLSd22mwzS584fLaeTqu36Pg0nF2e7TbO+3oGpqnZ9P6mmzX5b1oUPRbu990XluBDpTbJENgLyJID9sJ/OZkba7LZEs6KYxr7ZS57Nqv1d74j7Xbvfrp89e9LsVzkZMaXYMMgnHzz9q//0Z8dnF29vbu/eXQ/tZui6elrGCNtdn5KZ6tAPXdamjyJc1tXji6PV5TWJijhAqCcTi9qlNJ/VqlAytrvd1VUTdf7s4bJdNZti4zyXg1OQvl8DlJIOAbQiVP2Q1XgyP1nMl1+/vFTvHi4X3X7HDGVVlsfV68tX272VFT19dBK73XV3H9v4bDG5390Wk7Jttrv9enZUrW8vu6H56affe335Ou+EFb31X3zx15MHjx4+OP3m3YsmdZuby3Kop4tl17SL2VGzu/n25ZXahOrw33/5H3//p3/6ixd/8dsvf/Pkg6f3l5er3fDw4+f+3d3u7bskaSfD57/8a+f9z/773z1//uTpRfXym6unF49scKVYRc2n33vctbvYZl8Xv/71F4v5+eLB7G37cv70vJWt7lYfPZknA19Nt/erh8VifbN+fff27PefVOH0D7//x/G2w54u13fRl7/51asC5sN9d4rz5v7eKyI672fz8gFY9N5xHuagV+v18cOLh5PJu6+/OKLim69+dnF8dvXtV2U1+fSHv/+Xf/cfi3mhteyHKx/cvnmHgqVXDnZ7+WoxnzyoZ3G/93XRCzlJi0cLVC52k2EY6klxd7VPYNPjaQX+dtUFT5/94NMXP/8ZsJ+f1U2UNnavby4fPTqi3uLQLeaTN9s3/c63Wyuwb7pXZZi2Q7q9bj746Eldxtdf/cp59+Mffvz5N192N+s3X/91360ms8Xj5QfXdy+fnn28vVvFzS/7/bdBcTKZN61Gy+J9zunq5nZ3vz05Oz3/4OE339yKUhFO3r5eOa4ePn7SbN/dv32XPQ/NZpXjfjs8ODvf5z748oPv/ejq9ub04VMyDHX94LS8/Pb+6dFFxRarSUiSsyH59Wrjp/XiZNHe3SVBLsu+t6qevHj1JYnNz6c5N0fVSTkLaZ+X1YyA7q7389mFg912u+06c4AC2aFbLh9OC2KF2GF7t3W+dNF3+5ZzCeoS6iDD8YNz7YTgsAqubzeiNjRdUTI556Gsy3IYkgIWIYSyJMehLE9PT0Xy3eYeNAEoghGzOFJR7x2N9TOICgrEqpmAicwEEdm7AggdM4yN5OOEgthnbQboB02K5ogJ62mYBfSATIYqhOhdMGRTBdPSsaoIqlhPGJiRyBsoGmRNhQNkjsYxNmM5j2gywD6mftzK5YhEkiWq9jFCTvOJFwNCvl9v3l1fn0xPl7M6OEO16aTyznVJk7L10VdFlmgq3jnHzvvCUm+CDB6B1RSY68mUUVM3GEhMg6OJsibtUDUNUaQjQuecERt7MCcpiaia5BhFYo5tjkOKzbDfY7Wc1Bf95i4Pw6bLZT3Zbbahpjy0grjbvJnNl2aaUurbBgFyQaEoPHuTwVCb9dqKaVXVm/12v9uGukwpxhQrnlXTmSloysSMjpmdWYp9E0IxxDZUhZrSwQcPjGiAWTSp+hAMVHQoCEEATcxUsxECMlkcNA/oHAKZCACYAhgxOSOHEAkgm45lK++1nkMsxsBglGJgzISNDWgHM8L7i2UcKRVGRNFBP3ofTHvv8T9MyYZjzMfGHqjxpNYUAJjYwMzUgCQLfleRTODQGWTNCQCZHCAQ8Wj3MX0vSR2m+JElZAiGzoMBj+8YsiKaCIIBMSAjsZmYCaggEYAiMuEIc3ffiWIICocRA0cpDWwUIA61zKACaAhMY3/zoTGdTAXBGJ2BEbosioYGyoiAqpKRycyNx4RmxhQMDQEs50CsarGPkAXKYr/eA8HRg3nN9RT0za9/cbNei/Cji/P56axwod9tFfXm1aa5vruYnaQYb95cx9RPJjOuyq7gSTnzvsgpMSfvCyCuZ+7x6bKqQ5F1c3u1vr/rmm5+dl6dXizmy/7mPq/ut3c3mJKvy2I67Qcque/UxFeRq9OHH95svzbkXa9lR589/0iwbdt2u21rXxoWgYgj1OXkfMa72LZJGLkiV5Xl8XyqXlIX366/pcI7oiZnB65b7SWZZZllmKGbL44rXyt6DsHEnC8ADJGAkHEkpihCJnI0gtFND24HQEMg5tEThmBkOObMDvrO+I+NEQ9FNgB5Ty0HGyK47Ld7/fIFNNn5iRUlEJDJoZWP3hO21AwP1puDw+3w6euIGkL7ThIaX2P/qBi9143eU40OksyoQx36tdS+81OogimImpkSuVEGNUY2AMeQxkEUQMZpHgFJkcDYxEyzR2+oBpBTJGAz1fG7G4iNYz+QMioe6p5Hk8gIijEY4TRAqAZ8QLUYiJiKiIBZlszOewcYSCSZSEpJRZEJHasZqYCKRgPvFZENxmgvmIABihCOkh8CmPOevEfHSt7ImRg7cBItElCRsiCQhhKKmnMFMCBrBkCvYA1iICsRAMCzq0DNAARNCRyyI0d6qJ1Tcs57BFcGxyYMGHyJ6j2XCkxgZEnyYJoBGcuqqoLmuXmysuSiIgojki94z84TIJg5dioKKhp7oMweyTmw8c0WZkNC5mBhtIXmOrBJNMepG4iYWDyCxhS1h9QQKwUkc0kyMPqSQDPKgMlZlykkAiDHzKgG6L0gIzAzQx4YTaIhe8bCMYspSlZDTx5MRjcXgiKJAwJ0hAwA7ElMisKmwm3GnFAGkwSOeTS8qSESkZmKmRkRqwICmqpjN5ZXphyD8ylHUFPNaO+X5ftaANHxMQFqlpJwzN45ds55ZGfE5pSJDEXMO9SxoPCfSkXZVDOg6fhjABIBEiEzojmHOYkmArIcJYuoqURRy6KZCQnJeQ9oPDqiMpoRqNMBoemhizZI7iMamoCCgQKhiiYzQHZELIDEznI8PCbVDv1tTAZITBw8OBrbAscDCFQC0XElGSCMUCQDGVc6GDlUzWOlIhFKBiRnkk055iQiitlVxDUrJhqttUo5IxELRhMBMA4OANAFcchFYZpB1EwQnGU2cePeMlnOaoEL5KCiwETOj1H2ro2kBoUpGFf13XZ/cRx8WStAn6I2TTgu/NHMLOMe0j4iWbkswNHQx6GNcUi+CNIZFBw8Y1ZBMDTDNKmnfRwigK/CZFGUs5jNwCV0cT6fEvjNKmbJ6HwfbdfkScVA7MJMYWQ5CzETuyLUiTWSTutqNwxpaA2DJBVhY0pggdkVZYacTEQzeg9dvLva2pC+/+mTk0lxwzuTxEWV+ziblM6Fvk9qdrSYLk+OlydH88VyMptPl0tERqDRQNR33dB3Q9sfiEKExMxM3nkgDN47xyF4YmIOQMwIAEiIjvnA6kQau2gMMrFXVRn7YgHYeQADAiZGCAYOTJEhOPYKYhqoUBEVeH+QDLe3q9mRPv/959vVrt21i1ncr66Vc5e703kxNHA2vwAdXBza3Xo2q5q2Kc4f7Va9dUM55U3aIIcBcZ/taFJm5ndDu9/eba4bsPKDJ4+ffnCy3XQMPhu+fP3u+x8+kUxvvr2vpgs/KU9ni5LuTLG77adHeHN/VyzD06dn+8tbRn/77bv1/W7bdmXBF48fehgujssXX9z7mr+6/vZoMtPe6kU5nRYkcH25BqpgUZVlIEd371aT5QI9rJt9vNmpyiyUYlgtT2bHR3fv3vS7BkqbnVXNfff5i7/vCWez47Pl6axY1JOasTh+8tF0UskwiOb7q5c59YRaznzXNinnogpKgYrQXr9tm3sKg0Gfht6VrAbPPvvdXqo0WLfvJXfHF0df/uzFN9ffXnz6w+OHp19/9cubq69PjpaG6pybT2dDl0SjYeRQqQI6NqOc1FAKIx7AMA9Denh+stm+evvu5enxQ81wf9vMF8eucNtmX0yqpu1PT2e/vf1it7k5On3Y91EMSIBDWG93y2bnZ3MiBjTvPdROY08OCqiyJJG0Wd8jhNnySHrYdeImi3oGt/d3Z8cLHRQrLKrJetcenZ6Equr71iwTIXkwykjUNvuiriDYru28D76st5ttWeuknmiWIfUhFGC0ODvq0n7XbX1ZDH1fTmq3n9xd3z199ujL48Xt3evdbrNZ3XR9k/KQTEUgRdn30UyzCFg3HHgcYb0JU+TFpEgxx7Z3Bk0vgLxYLqrA716/Ncx76boVbnf749nCr9sQinlVe4dd7CwEVxweCetNQ8BcTtCXm/X9/GhZLY5+/fXPmqb75Hs/0JZef/1yWiNi+vbrN8p5uiiOT2bNW3356musF96XPAuXry6fPTt6cvZwyD6Eo07Wj+ZHFeTbNy/Kk8VmuKMVu0nJKYUKEfVqe2VF6KCv07TP7b7bL5YLbTdfffG/Afg72GD2N80qY/BNXB4tJXaO6Xx6nrbNvs/1yeMcZl++uX3+yfO7bdr2zU9/9ONf/eqbRx89225u2Pqr11fV5NnzR98vpvXPv/jPP/qTPz6eP3iVt455yLQ4ewIRum386R/8/jffPnj59tVsVjsyFOvBnzw72SSMq93Dx4/v3l2vVnpyNL28+fbZB0+2w61J9+rLnz/++JPd/a36HI7DV7fXH376h2+++bI6XU6eH91s0+T0UanVf/2b/9e77fVkOpn2dHx20rV7AwJUtJy7PrgQ9/uctWui8pyJ5qVzk2J/1S9Pjmazk5wl5n7X97x0t/er3HeNym+/+AoiOrbVNytz5HpTbuuzIx2w7WN9sjg9jkOK727XLLZ6szqx04Czm83195cPupz9wqWb1d/99/+SC4cW25Ugwz6n9d3dPnWKtTs521/FEFgYb+XufrM6Pn4ypJZKch7DMgjo26vN1Wr7QXVU11XxoOj37f1qXQYcYjOZHZlVhZ8aVG5SUTc09+vC8dVq9fBH38u3pMPw5Rf3Ht3t6y/Pz2azehJT3Kyao7PTlGVzfxVKDFWtQz+ZTNa7/Z6GUJcsdnV9V1TT3nKM2ncDgD4/f5gAvA8xVLt0rXuqAlycn7y7uknSz8PMJb5brT7+5LPLNy9jomI6McD5dLHe3WAXb168YgNXHobkz7/4mj17opOjRV1Vk3LCDFy7qi4l5pxyC30WYF8EF04W1rT7pm0zCDnP5AJ7NEEkJAaDLFklBcdZYgi+cAWPYGxAVSHinCJmwRDEMLVps+mGXpBcGbBmmgYuHaYhOnTOBxeCkVMzRmDilJOBIXPgiRlYzmZIAikLkcsGUSSpmbGJGgASxZi7fjAxS7lwQUQ9gsYhGQTPKcXAeHN72zS7R2fnR9OjpD0yV0VZhVB4KmJ/v9rXgWMSRDeZ+hjFOfQ+dHlIMTMoo6JDLhyiisRouWRXcNAk6F3KOY/sQIqEWJalAQKyKmfJRAaaJEU0AGQjTCmlYZ2afTc7VZi07U1Me3VOuQLxIvs+J4j90DaI2LedKilC3+1Ls0nlRK1gUkNQkBRj36mZ9yH1EYyKUCGXhh077LvIRWE2VnrlrGyG/dBPiirnzI6RSMTGLmAwJISUBsvJqyCialZTQkypB1RnGQhzZiIH44BmJHmEr/A4iRK5LHkcHw7sHLD33U4HKxDTaLf/jkV9mLffaz0H+ouOtBUAxNE39J1ypDCyfOzwFULSw3EsjCYpsJFwRIcB+v00LSrjaTOOOTdEzRHYHeZzBEACFQI0xIPzQ8HUEGl8hYoYIBGDjcYdHiudEdkMERlHxAchgIxmpbF9eaQ2jIYjAMbxO3zX10OETDAmnxBB9QDyZkZDQlGJBGQgbuTHjv4sMSIGdJIFVREYKYzvkYA59iNuyat4ZSRZujzjYQ7N+sXVZbu7uttM5vWT55/UVEoP62adJQZH/eZuUnLTNZq74FNJ5CBCSsOg49bDBa6Xk3Ky3PVDwc4H7te3+/u7u7ur+vT40x/93uLhh9um6+OQ9W7Iw2I567YNYGF+evHo0Xp1eXxyoeVZs94Pm1XfY5+SJ+5zvF1dL49nk8l8VkwpWxCqXIGhStmIeTadAegBSMMct30kWWHXWG76fSBS40KhNlcZLarloqonvgyuBFMmP4qLo+IoAjjytghNFZFgpGiZ2NgijGjEADTGwXCMSpkCsBHhQT0iUBl5WQfeEIIRmhKIMJBEsX10SrzbQ7O3fJT8+0688bu+9yiNLjQ4yD0Hf9mIFPofMmcIh5eN7Xl2UE7HFOdBhIT3V9Folxi5XO8ZpmaqIoZAjoGACA9k4VCKZkYWGeOWo6tASXBMNIIBI6upqJqqGYjJWBRugGpCxAJA5AxNJcmoJpsC4vgCAjQDGbnviKgyZodBRUVSygBmCCBCzDElzWM1eTYAB6hmSdQTowgRppzZOVAxhRTtkFBCpIPPS4iCMYIvhMiIQEf7Vx5x/mLMYYKu0HKamZw0IDuAXkCVWZiyJCBn4ztkzA4TpKRZyCGhGCKQGYqBgo3igkOPamTOLCByVkQDQxEAYY9ljYAgYiTeJKu6ugbyRB7BGAnZMwdEAKDxw0USh6C5H+lP5IOqICCiqgqxqh3yT2Zg2Zsj79GySIrIxgHclAoprW2tV82iYprFE5GgDgN45iFCDIZgoNmMvVdJ5B1YHgvLzJQYAYm+w2OZInlBAcSsWZQAIPZqxI6BSInQA6oC+eACl9kkUKQMhSNiBDQVB4xEBpRTBkAEkyTjnRINEJ1qZs+iikyqOSeULIckJRGNRk8lPBwFoJqaGmRJopRlzPF5j+zYeScWGb3730lFoAoKgIhGptkMjIGQLQmPACIUFUE2Swqqqe/AssqAhKrKvjww6JjQgASsz5jEumzNYH2ynEEFENVMRRFMyYiAkBVlXLSmxoxiZogmamgjWxuRD3J1zgCZBMkoC0IWRhvVVlFQE3YOgHM2RiP2B042myEmyYYwRkUBTDRnzcSQVWNMocRkQmaSk0AGYOCAipYBgYhUTJkNMTrvAJGFGMiUMDuKg+TseJQ2kxEpipEba0nFLAM4ABSFYSBgrp2IEDjHkLteRXhaQj2HuuYkkExiy2RcYOGLclr1+xbBLKPkvp5OXO2zQRaTobdknFAGU+fdxPkqFLNZM2QTiPvtfDYJRbm+79toRSij4q7J3ns1DQHR1HIEI2Tkwk9mk6bpYxtDDSEOSjit+Gbd9gpZgUoO1TzpKg0RE6tIGdgibNv0N59/8cmHj46XZZtSN/SeOGfJ0gsgOV9NZ4uj5fHR0fLoaH60qOqSCE1yH4ehH3IacooyOjNVkTEUnpjRxi0IO2bnHRESjcK3sfMExkTOOQCMOTvvxn3CCCESFVBlckwoWUCzgIIpO2eAlpWJgBHFiIDYmRwONAAAEnR3O3g471M0pJtX75QGVFs3sSrD0fFJc38lXb90R5im58sffHzhfvHq6yFQXVWzqVttrp5dPL9vdgBEjmOyb26unz17PB36dtvfb3cWMBI8eXR2+ebyj37yI80NFmezJ7/z+tuXTbd7tXn3ww8/7K4b6XKGxfMf//Fts4JqKBeurqp+LaXCg+998s3Nzd99eXdxMnvz4m3NAd1kOZ8VDrbry/njpSAm66nEUPjC4+rdjacCvMsSV9f35P3x/ISyHJ8Wb1/fJ2PqgrIvptNWt5vNqirK45Ow237FvL1cv36H08XRSbYqhDKIQUx17etFnVNflVU033aNiImIJBGTtrkZ0qYf9kNsRCXvU9/D04++pwKPHs5/c7l6/uHj1N0eP3p08vRh7tO3X/1ydfmmKIuqqpt9w2FCWAIpEpRuimAiiSD4slDo6lnFPH3x5Vc//OGnMu2rQq9v14yBGberzX7YLo6PBh2s1bPz6WrXn5zPZ++m66ur45NzA0FHOQmKeu+GZlPAI/WUY2RDX3Llq5iiKwIDg+h2s0YgJK0Xi2xR0n5+WsNOq6rMUfqm4+CyaLvbLubzderEkveVSI79vvDFZFK2TesCGUA2K+opZB3aJpCbzxfSZvV2v+mOHyzfvHp5f315+ujh9tUGmYgpp1iWx08+/vjyL9+E0qtkXxTmoIvR1FTNl0FFyRRUy8oPUUDz7X3MdYnsauazk4d92psJEm7v13y8nM4rKDlJ2w7Wt7zb2fPHx6XPvojVbOqLum075w+PhMXiaL/eL5dLduH45Ojy9g6s/+rb1x9/8EPM+mA+y/vN4sHRpmvu3abLze1d++jJo/u4KqcnJ6eP3rx697a9KYt6t9503LR9Xm+GZtg/0+PTU399e/vww/MvX742g3ZIJ8vT0+ePvv6Hn227XE4cunx9+UqauN8l9n3et/c3d8vHTxdHk2lw4eJiu929/OK/0/kFMn3/+997+c0bh3T+7OF6rziZZijftrk6mu7uNr/46rehmL1++1XT3GJVLB8+/uTp90o/+9nnf19R+Zv/7c/PH5/P/LG3vLrftfBqasBVeL19dW/bxfm83w8ffvbx57/5db2c0MzhpmNKm919/fDR9NnDYmpdX7z89ueN759+8vBIPri7vsnJYFYmpe1uxZ4//cHv/te/+M8/OTlJu/zqi8s/+ekfuT794NNPr1b32/vbsqChxw8//mmJfvXq5fT8fHdz1TQ7NInZYqaT4/r6zbuz04fa5ejaIuTXr28Ws8WHP/rR29v99AG1m61ETLtY8aSe1P2mPTo9Y7fOmIb9IDGdf/j8Fy9+I7t0vjiJ1HHd+TxAyuzD4sEZYvHqxYvJRX3XrLzybHFOOYrp8emjZkgoSXbw4qtXD48fTupaciNJJ/Pys08+SP001PXPf/Fr5vDg/Oi2bT/8+AlzvXt3/U0cpouTR+enX/72m20AAr66XV88OVfxddHtNne+RqoHYpovXcqbUHNqYlFNmy4dn51hCPfrm0U5f/rsohn6euqnYa4p7XZt6Tj1rZfhpDpFw123Pz09evPm2gV3dnbybtuttis1OVos17e3xbR69vysbTpI6fZmAxDOT05AegTYNbuUjYr5rmvmRUlhWN2/DMGryzJIOamHdMBa39zee8+h9GZ6tLSCh0kdQlVqygqWs6ghU2bRJNl7V5f1kPpd21U8Pp3ou62Yqo77OcnCjkLw41gipikJERBmRwasXdfFrGmwaABFWQZfspQQJae2VxITdt45JQIwh+DAokRAI2Jkx8yqRplSjmTmCFKWLJDMxFBMDST4QhVUoinklKNEh8EITYARHYMQrNqu7dO0LufV0dHsqBnWhOgtxEHMOmRvClVRbJs2y1AUFWYZRS/QFIJLKTtfRsjA/mh26lBjTnVRMMGQBYBjUhNRicGxeSOUgYyYDdiAECQn1dxrzl2zQ3QpdszkGS21zU1ThJl3pUrsdluudLcTR+S5atp7IaJQxhRNBNw4Kw2GlaFmRR98lhyHFGOPDCL9MAxjVEUsF4WXXnKMIaDzU+aq2X6jAMY2dH1ZqwoQHQQcVTU1YyRFS43F1hhVE5McciNgIvmA70EZ4y2q2RRZDcEElZmjysjsEZOx6x3sIAaNRR9jh7GBvIebjJ6iQwyMcYymjSPJwW5B+N43NLryEUbbNR4wLgj2HSZ6VJBgPFQjIxtHz/cBNMKDoDkCYcBEDYndOG3jP87Z/H7wHmHDhkAmNipajIhjogZIZbQegWoGckQAmgyQCRHQTEaVavQ4EZCZIbGCBxAiAhuLfsCxNxMENhpPjhXRkEBV0TKYjkVRB0lAbZRrmRmZRMQMcUzIAIvKeIINB5+RaU6AqETDMOTUbzR+++LVbjAvdHY2n4a6ubpr7zflYu7ryeSk7DZba9q73T5lqQLySG6l5DEQkcW+mEzEkuQ89BaYDWO+3/e3V6j68JOPZh//wIfZ0O+662+3b2/WL99UrmzbGKNGyi7t4u6r45Nau0GxC4xNJ2xoWcXTkGW72Z7N6oCGCh5dcM4jFRxSBlQyVWINLuQUTaCuZrftftjuCfAE/My5ia8LpElZl74wg7IszVhFRxcbEZmKpESEKMJMZIJmjEg40rNGABaOgUdAA1NQhUNKlWwsPDsYdhBMRjLPCFwRNRrjHaPmyAaQLfeUnK53fH3LHz4fh9T3/pvxwrPvGNXvfUHwXgP6Dlf93hZk3/3hw1fw8LVRr4L330cPZjYAO6CmD2NBTlHUBCBwcMRIYKajykXkDBTtu7VgDD5HBQZFcejGn1m/exKoHFqsDgVsgABZFZHhfRGVHeyBMFoN1YyIx3VoaoBmqmYiOeecwYCYFE1F0WxQURVVYWYwOrgKVXX8mEbeuIqKEeCowgKPNwxDZnIeHAOP2apRKB5yjpKSJEH2LjAWU6xqBRjzdwbBWAfVsQRSNQJ5hGTj24mJQU2BkIgdAQGSmTkkI0BBAgIgZ4UDN2L4AUfKshoChBKZGFQ0keRilNJNeUT3K5IxGMKB3UZIiuBJo4GaCjuHIOxGFROYnZoSKiOPWS0jb0KUzZjAEqEnVVNV8RAqa8yGwZOTzDAYGagiJkPNY8MjqI46PHNhMiYVwciNDUtghEikSmhsYogEbGjoQLP0IjFnsURMnslR5sBIOEa1HDGDugJFGIwAiSiMQhsiFaFSyaimTs0s5YPCGFyQnJmIEESAENU50bHSAJnQVIjD4a4ICHB4WIx4uSwZDTIlHFeboloc0YfwT1xFCna4hxIeajokKpJlEVQCNOddjgMoa0oW1USQgInBCESBDI0REcU4A/Yq+6RD1i5r0jGROp6jIIySsKKNnQViamgOjEbN9fAVQFM0VWJmh6CCWQhQBjEzAA+YD1S8kWMHiIaWEpEDBDUhG1mEQg6Y2ExUxXJEEBAlVUAlZVQwZTIPMaKwg+CgzClbNsjjbYaYx/gkAIDzhZoSORKVQYMrksKIl2IENlRARTzIXKJkFgqPSrYRqM1VhZiNorHzntHF3aYqaqomYD6m+65tvCfHAGRgXM5qGaJDxgQ5ZiByjlzhEwwqxoVPvQzbliygBb9ws2nBiJKzajctiRdcdJZyk105QIg21mOgIjrvdVCJggSuCEUoUWKKeVrU22ZgJYdKmZxyv81+AtOy2qqaaU6xV3NILLTZyOvXW8dUGhhoWQfybrdt2RUnJ4vFdLKczo4X8+l0WgQPIiImpkPX5xxzTDkl5xygqRkTMjliV1V11gyAZVmyY1AjciMQjtk7QlVBZDOjURMnGJtTVdX5oKJIbhyVHQVDEwDPrKLMfnxEEBMBigEi0Htb0TD06KeXv7lPwS/Oql3TlZ7QyunxGVj66t390bR48uhJf0vO7f7gD//kP/y//5dqVrqZWzUZB1Uu2qZrN2tTV5cnHz783ok7vr18izM5fTDrbtqbd2sBpu7r7WZFfn28mJPZ8aLYqAxtNOSsMj1dzCbeVfz125eQcxRr7/cnj86mp6f+pP7y/nZ5Ni9DmRKJpenkeMiCuitcWj5/erNaJeRJ5a9XV6cL77E+v3hw++rm+t02TIrN7WoxP5ssQ+V5db9iP0yqo/2+8xy67RYCD3vgwphxv71fns5dTW3fqZMub9frq8IcC+236dyfEfFm39out7sYynrAjMYx7QEzBuMEXdORYxN3/PjDyfxkv9vHtP/613/1u9+/mC7P3tytVPOLX/8ydt10PvHFLEcoi7KezhXMBTYY98cAiMQhKxT1pKrdZvv29OHc16SJ393d3d/t/KS+ubmtfVjANLapLrwNmZHqYjbs4unRyWp77YKzTnIahq4fuljW3Oyvd+uL+dkZugCqzrEQFiFopG7fOOdn9Xy32+7W63YYzi4udvdrTlATImFZhKGPMQmCN8Ou7QOGvtuEIiBTWUy6ZuMyMbEZDV0fCiwnk+l03re2Xt/nrNV8iaabmz51aVbNYswyQKByu9qGwnX7fr/uPvvhj377q1/sb29PTpebfYvJHKKaDTFqjqkbzIQ9eWb0AGaTumLv31xvgy8/v7yrK380C6vbu+99/KyLWfp4f7exPPjg1KyRbje0wbnN3aoIRRvFe65nB6y1B//g5AINnIahEe31Zvvu2Qc/bCwYupv7u32fZqmYFnw2f5AtZ7P7Kxk0HIUj6v358tR5arvGOb5+eyWOjk8fpE3eakqbuLpZP5o/Duuy5Hz0sLpZ3Xz71f1+v4rApZ822nPhfagF2sHP93n3dPGgzXh3dbN59c6jquFnH36vvV91PXzx4pKiFOxOZie3b778/NWrFvxkSif1sYKHKc8mfHZyRLJvhoFIoRn+/Nd/OT8//md/+ntvP3+x2qzwdGrgzs6W9enZ9uWXZC5ELM0xA2j+8te/YSCK/e2L68XiTL0bmr0qqnUvfvXF3erm4slFzP6LX77M/fpk8iDkxFQd17WbbH/91d/Muf7RH/5oe3NZVSc//t2f3m3fVRN+9dWXUpTNfu/ri6Ojk9e/+Sa3zdNHz/br1Hd+N+DJxRKnpZp1+60HDMQivSMjzc4HcCypv/D8/d/5w198/UWb8gdPTn/9t79eHs/4dLFuuj415xfLj56cvPnyalFNfv+z37l580b77vLN2z/813/cX90sJieXr++GLtx+sw9lcbvZruLAA+Imfe+j761u3zmWy9uvLi6e+cVZZ7OPP/z4zVff7Nv23d36h59+MgClLlqCDx4+zxLJ6aZZv/tiIOT5rEaz2Lc566SaPPjw6d3tLe7W631/t7q+WBTb29tiVlYVHx2f3vadVykCY8yTacm+cr7ep35+uvRKfWpi36Z256wgT0UZqqLounR8fLxp9tf3q3pSrO/3BQRM8u6rd9PZTGIamj5VMq8nu+1ucTJnx7uhT+3AXKwubyC1zx89/ujx02Zz8/T84vXrlNvBUAHk4ZNnP//N3/swcUVJ4SCYkmdj13ZZ+q13JUwK7CyK7lar4N3D8/MQAiLtNivPrqyLZAAKgQ8HtAjoXGEgBgrZTCU4R0hFVRRlURRVTBpTUlDLeYjRMyXHKWsUVXUytn4qiMRo2rUpgE5D8MxIrEAemHE8XwZEKooSi1LUIGU2UFVGGU/hI0IA7k09IZAx5CEnyYONBmHAFJOqMTKqqKQo8e31zacfPF3O5xCH+80NotZF7cGbdpCwgDrmTnMmxOlkZmLomCGbZg4+5/f8WgPnAxGOZV+i5p133rpucOjQqaU8RB2QfGDJgszsgnOVaMopxqFhdj4EzUZEknNKQgYmMMjOuUDkgTi2/bSqt9u1894MslpqWxXRHAMRoWGW3DUhFCA6DAM6lk7ykAGR2GVpVQGQkZyqiua+a4ENJ0tf1HU179o9kZOcYtcWISASsVMkRGfEWSWlrMNgeYjKjIDoRJPlROxVCRRGHLWh2Kj9jNXYo4MdRrjnwfWgKnSoJoOxbQwBxtzNSLGF9yXfSIxgh0fkAQw06i86JgTe2yEU4BBfGT01CIyH4dEMBMEZABAdMm6jt4JIQN/7KQAdH8waKmMQzhgReQRljBChERmD+J5tBIakSCTjcG5KhO/xxGPJlLELZqA5IqgZHOAp44H3OCwAHiwiakiCBGZ57LcHHDNroJoRyUa3yChnEYAZMKkkRGDkLAJIwMBjpg0AySEyEpkqgKCamSARgh+dM6ZG7Eo3abvcxXi/j8nqXrFG7Xea+zVP8OTspL+/7bt2yK5UGO43YFZ6XwbXrfdts4VFFfvOzJC94+SrUqJZzKCGTCXR9Oy06fPpg8ciaffqC9tuOe+mlMKyur3qtuthUDR2BUfax/vNfvrAYE61L6nrjiqfMUPlbNCYhtt3V+ezxTTUzsAROEIEC+CQCJAdgyGFUJopIi7KWelLNinRlchgGMricH0dtEUJzmUANbOUaPT7w5gUHDMncAATjYwtojEpOfJ3EQnHHTng6AhDOgh8hgxmZobskBHsII6oZgBEF4QoGdjQEwfnEuSoOYF3B3rVexHzHwkq9o//RXof3nx/+cN3F/EhfPkeCP1ecToop2DjT3ewr4Ed1NT3gTXJImOmcjSMGBEc4jwjF4yJ1RTMVJQIEEjViAjGKRBMVUVNJI9RlVEqGoNYiEDsENSQVMfWRwGUMS3zne6lZshsQGCmIjmnnLOIIACxBzPJSQk1ZxtTSYiqOv4rQqpmlh0iCvI4Co0/H9pB2XUMSOB4dDmYGaigJjQDVSCHRYnmoAgQKmBPIIAkhsCU0NT5EbKNhMCsCsYApslyBiFGB6KSjN14ETKN9yvIKmDCBGIZx/Y307EDgtkrIqAHUGM260zVDFWUWJEIgZBHvxkaGqCoKRmIWs6mmg2MNJP3o4hM5Eb12ghVzBSAHLBH8qCJiCybitDUh7KGnHRaUIq57UwQB4WYNBnVwSqvnA3H2yqrypjLBQBAL5aQyLEHIAFAX7FlBjHi0VZqqKoW+75P+WBUU0S2ARUY2TEaSE4IPFrJxAwJyCEjjc1mAEAcVCRnMURFGp8tCMAAIsLo2TlTiymR2njHP2DyAJDIzJjpEPsd/4KDZJRTHtWJ0Xulkv9HqWjEdpmCHpoY1BRIRryUyOh4MzXTFFUERVBHAUjQkXeeiZkIhVxS2PXaZ+gyJLEsZKAGBCwpInokQENThRG85BwgIzAAqYgZAjogGW8Bo54KZpYUBMVQxOPouCJGRc3ZABAZDqHXwymJqjEhGZmRpVFRVaLRwYooSIYpZUBBBsmWwZwxqjNlxknOHeRM4EENHKFjYATPI8geETVnCMjAMggBcQLJGTEYJGSPzExBhiZgkRVgUA6VWoYeNaIdlREFdjsPzhc07Drn78L8FOd14Z0hpHYnsTcw1ejL0tchJ/Naa0w5RgTI0hfVTM1yRmKzlCQm1TyElV8u3WKK5g3KQHa09F3MfS+bXphTn4UdezN2DhU0RTIEgYRQzObuTnbtFrwTTEXwyB06gkypS5HA18REkjNIEkFzhuTLul61LSNW0wqHAdgQtKpLZjefFdNpVVeVD+WYW0a0HGNWGW+hBuodgxkxJRlPBCvyjtgFYEAi5xDJl34EWR/QeWDBl0RoCuzAEJDQsVNTNmQKAgkBCRkRxoNWY06qYOg5IKPkCMaO3Qi9GoNtAEDF5A//1f/pZ3/xnyWnFNNqdb+YFvNqvrtbF+S397dHk7MXX36z5OPvff/jX33+t8XE77arp88e/OztN2VYlMWyaTaTsgw4++1XlyU/Ppo8+me//0f/y3/8f5JnQO9n1fFk9sXP/vLHv/P09j6/ud7W7K395aNni7QZVpf7y69eHy2OPYepLysCwNAkKecnX37+iw+ffrC5hu7+5sH3H84KP3n4mB6eX0we9Wl3+e7zbthu2nh0fPL2ZtP2+fnzT9+9up9NTNxw/CgsHjx28/nrb96czU9mS3r14tv54pRcIz2bToSH1F+fLZ+Wxw9W9+9OHpzQo4fb2Cv1D58eNXGz2bWTiu9W/fHi4VG1GGLbNEM5KbqmdVhmTezrWVXsLq9y7FLs+l1Th5Lms69/+dv/87/6v5ZT3Hew3tz+8IcfffBker3+9ub6y83lFZlOJ7Oc1BexrOeB2ReTnFTNxOJ4QlFWNSM6h+CnXELqow/Bh7DbNtttS8Hf32xO5hOuiiHb3GN2kHrpm74ogyk9eDj/5sU3CgrSDf0WwYDQVyHuhrv768XJA++9ahIRUPPOsvehnJhpvagM3f3ddWpXprBczosiwLbrd/t6PpvOJ22bUtMMkNhV88W86far1W4xmSIpsFpW5wgIK+9zys36tgw8m89STl3fmOOimpw8Orm5ulkcH1+9fbfbbnxJeSez8qjxQxeb+vj8T//tv/n//of/lA33zTAkY/aAxpiHnJGAXSGiMUuOWQ2USLNN59X9dt8NmkC7lIjdVuGTZ09evrxZHC2+eXuJQxz6nsjdrqwolmrQD6oAu80ulIeNESufHC8K59arLageH83x7n4xm03OH1x//UVZV0uYrTYbgf7Jo6PmbjsoXK2vP/vgeyVX97ebUPnprN53W1/MTs8uXrz+5sFjfyTTaTHr2vvTB2c+87/+o3/+5vVvFpOwv9kP63RcLtXZ9ecvSqbTJ4/CYvb69ctwdPIv/6d/9/Uv/nLIGgrnMk0qt7+LMmQgpKLo++zRteu4Wv/d0w+e2bfx5e31g+PT7btv65PnwN2++bbdD30fzfR3f/wnf/Hf/uzN5atH+UG3mj5+/smFp1/9w+cfPT2Ku54FA5becUUeRe/ffFtT1edeVabzspQh7t4lFGaX7u8yWR5y6erYDtqsFzy/3/qPvv9Hr7/4W1B3c/nW2/TVq9cnxWI6q9gVl7fNhx+dFWfH2+sdYPvw+Y/eTb4+O/+gWa/Ppkdx2/bSDO0WHYs1+7UCFNf79YdPny8/WL69effRjz/ZvL1KTdvHIUXytp4o/P1f/O27Vl3Nt7o5Wp4iwNHiuJfbjz79pL3bry7tbPnBJE9kYx/PnrW2evTZJ/Gm4vW0Wcc6H//P//7//mf/v//HtEakqnj4wNapzLRev9ms1pP56b/54393+fYlmD1ePrr+/HoSllDTeT1FJQa76+5/8NFnEodXb+4v3zQff/xh8+66KKeLx0f9vsFuuHv5dY7h22++mC9nn3568V///C8zuOkHzy1PBxF2/vpqHfepvd8P266q3KwMU192bb66v1o+ftzHVBeYdfjos8fffntlkoMrwaCaTaazyfr+ehJ8HDoDZORyWlMnZfB2Or+92sBmMy1L9rzbNH3ORVXO5+Ht5f2js8erW7pbr8vp/PNf/O3z08eT6WzfbQh9KMuXb1+fLC/Wq+12s2J3eBZ477Jmchgqr2wD2Ppu1ay3s3l1PJ2X3nvPJjarJ2jkXcCahzjEGFNKohqcL4oKGENZpiQqGtCVlXdloOCHIZolQoMhpTQAwL6LxgFUsgCxMaFINk3dftAhOUUXUIM6BtPsHDOYSspZFMAXnkJAH1glq4gogHrHhKgavQEYekMkDJ5NMWdCMJWcczYzJMopsQOicH+3QiRHHjMOXZtTXzrnfdHHCLBG01lZ9HEwE++cJ0KkuuAmRmEgV2Lh4pAIsd/tA9NsUiPkPAYpFAgwuNBjIjRETCqaczIqAJIM3mPJJCrBBSJfFHVM2SQbC5LLaej7vYEhG2LebNd+LH4GCFwVRRn7VlJUIABzhgk9EaOl8ZA2iYTCD0lKXySJQ+zL2UwFDidVYAwwSE4pZRCFHFNXTOf1bLHbb/j/z96f/VqWZGl+2BrMbA9nuPO9Pnt4eHhEZmRkRg4VlVlZA6uLYDdEoRsSBQlUqwECepDQAPVASIAAvUtPEkRBgJqQHqQGmy9ii6LQJAs91dBVWUNWZWXlFKO7h8/ud773THswW2vpwfbxyCL4J8R+8Lgefu45+5xje7DPvu/3xdgLcLuqR7VzRQZjau6fITNNIFFSArJMBjEzSdEDIZGkHokRSQ0HtAOASgRgsOxYd8wm0gMqAqqm3C6fxSMiyrLRAEnJNNG1ZQKAYOAFmVlewqZ8D49DKMGytQ3MIM+sXme1TAF4oGgbAtCAGVYBfN0SDgamKSIBM5sqkDMAUDBcW35gaMAxFcwTZkAgMBw0LEuJcjwCGHLjcSZGMBgAuQCDeQrAVEHRKLe1DWJEjhuIDnAiYiLK2lR+fhh0qmyoSnn2a6rDNB0UM+UaDIDXaOKhXU5VzBIBILmcimIjAQuhAIkcY2lsEOYikSkhNyizVTvxKUwmzXIOTje2xox8eby4PFqNNytytrg89+Qmo0nsskLQugKa84vQBxe4j4ARyumkN7ho1W9un85XcnEyHU9o9wBtZ3F23jY23vH1li5XfRRfOJckNdqNNsbjG9d0sSi3R/3pK0Ns2xTI0agwC0WYlEVBRhoVfcGEiMTA62Y9RAQiELWJdyM1ViFAYkpm4pCyYJBzi/kzzN8vouUWPFUEVEMmb2DDfBU5U2UzMB0IcCjPpvXoMQNABkBCw2wRyIkuGEjHbKqWuuzVA00YyLmAEtOycYpEjPl7zKpTllhyU9kA9qJhBpuNdcN7xTWb+m+ISoNINLDA1jqR2TrR+RoCNmDVB6lo8CUZDax4QjNCMETOpWRkCIM5Sg0w45HUwExBkqiZiRqoyjDtzTYQI8x7lEsI1Uwhh93MkigRIiFneYXYDLLdRkT6KCoqqkwgoggCgCqmIgbGhAasBiAKIABExEQkYkymaExZcjJkYmQ1QEN2HsgDEQBSLjVPIlEAiIsKLRARej98wJJUYlaayIwU1Ujz8Q1sjEmiwWAPUTNBdWiC63l9Vh5I0+CHTJ7ysgdEVUIGQyV2FAwZgQidR0x9L6Yi2if1DhyvzZGIaApIg7SR09ZGmtTy2goQIBuuE6yY0W8MRmpqwIAIhOQUUAGJg4EkDg6l50mlahgjJjEAC6iOkgfJZZFi4JwRGYCoMJOY52xUysYuBDR0FIBQURMkNUuxB1UGQueCcwwJTA2cCZgObGXK+l1mjYOyClM+aXvMSqXjgllNmVHUJAkYqIFDHuDNZo44iaqaiGR4Sw6NIgKg0aAeUT4yNOO4zAwwpcSMksf1L0tFqgaaEEAxX4o8AaqZmYjkOgYAAVVFRelT9quyY0VlzsoeoqI1ok3CVdI+WooSZTirg4IhA9twERHU7IjKLZqm2gO4dQ55sBLmy49ZdisrZH46MiINdCg04CGVDblVk7wRkzkkUetVIggAiBoQKfmsMJAZYTJHDKqYDEmtN8zuEiSRNFjJOH8IQEy5ozQLYsaKBZqYJIwzkZQq5qLyAARrepKJsKFpZCykiyjiCk591FmULeJxCeJT13e9imp7eoHEfnOPpyOfNiO0CNGJpWiAyhzAEZMnKeOqUYkQNXU9BVdUZbdokAGINKblyaJMDpTEOSOnBDAiH8rxxE2JurZPGg2tU/EEAsKVoxj7NuZVpqLC0Lk2JjYkJ5M6LJcXbK5wmmKX5qBmzBAcG5OYJZUyFG0vnaYekJBVQJOgc9WoZvZFUY0mE0Q2hBhTH5PEaJCvFuKdy2dQQPTOs/dFUULuxkAA5mzJRuKMrGbOa0YDw91InfO5eRLMKl+KiJhmLJ3zzpTFkoISEpDLpzLLRkQ1yUOI850EAMCNa28cPn327MXTb3z/Vz9+8ZO/9e/87Qc//Neql+98481XDy7fvHOwuT09ezK7c++95w9fjHYnbYKtna3Twzmip/FO16ywjy9fvbxxffSd9949Ojp83Jwex8+2N3bOTi5Xq3a0sXGu7d6tm2ezxeMXS5xc+c0P3l88/3zer54+fXrj1u5q1q9Stzn2T58/91RrqH5xeP/u7k0aFzvv3JzH1YPHz24ebPTt8ic//JPbN2789YefNizjTXd2vpqt2u16Olv00y2ar5ZF6Zrl7Kyd7R3sJoRX56c37944fvyEQzHaLesJGYSHnz49uHF3tFulOOkx9bLEsiYYnc6W1c640uL0+Cw5bRZzz+wcPXz1YPrV78h8maxXdq4M/bKzxFXBUWPqV6vVSVG71cJQ8ezk8r33vn/91q3U9tosp3u7sb78vT/7wfn5C+dd18vt2zeiEHPYu7qxnHeiwKZ9ir3Eogiq5og8sYmMx+VstmrPVgGddyVr+emHDzb2akSsy1COgi/58tVhYF+UHkvX9RGijMZVPZ5cObh5fPg4OEJUZASC1AlRWM7ni4vTjc0NIyCHHiGlHgBcCKZmZkU92uP92XJ5djZr2rh7sJeHp0iEQEVBUbiNLTYNk23t7ZydHnXaTarKJSZAiVHbHtSYwJHrVnPCNB6VSXHZLpLEcjwdTzbni86Ql11X1845WK4uqjH3vVyeXty4fuvG9VvtfLG7OZo56FW6rocQXODYsYIVRdX1LRkpQOF87FVSj2YOtSwcmorYx/dfQtjd2ditihaSJogAGgKtlrOzM5uWo8ePH+1f2QOw5XyZj4Lt3Z1V17etXC77qgzkqmrkelk9vv/h7OLV7s6VeXs+79PZ2SkXb5zNT3zhN3fqZjXraZWrUp+9eGnOxWjgyntffbea+GZ5fnZ4fzradKNw1izevXLl6NWL45PLBonG9fHh2a2DW1978843vnLvD//wn5Xgf/39ryxae3L/RycXxx0U+1u7TvqvvnOrWdLR2cvN6bShoomyupCdG9v7m/tv3Lnp0YPFr33lrc8ffO7GcONm9fDjJ+ens53dG3WtP/nR7/exHe/Wi+Xys0+erLr6rbtvlqPp44evtB6d2cWVze3ZxeHLJ69GW5ub25uXZycbVzcOz49s0yVqm+USC3fjzu3nH75w1WS6uVlzuVrOXr78eHOr3N7f/JOf/dEbG9t+RHASd/Ym4fqNn/7Zn/z6O989PHrx7d/84J/8k3/0a+//zu7BtseVzZ68MbEnP/831ofq+u0HDz5+9ztvz2edOPmNX/vWT//Vj8fjcbGztZpfzGfN5vbOxenJi2eff/vbX7Oqefr89L17b/bP5/OL5fb2rqv8w/ufv3FjP3ian5xggsn2ruvZ9SzCxWhrrO7KweT5o/izRy+//Y3vHDerK2/cIFs8f/GhKhQUTlarK1//ys8++8Gt3f1rt+908SFa+emzZ5MytO1ssVxevXHw8Pmr/Ts3/WJWIsdlvPnG7ZOTi8vzC1eWmwdluVsuT3Bj7+DR4auz46NvvP11k5Ve6q+8//5f/ewnq8vjrcn0rFm9PDsDhWpUqrPZ2eV0d6tRonLkp/WTF6cu2q+9/+ubm/XFYjaPbRcVgC+XUm9sap8uLy6nozoAfv7ocTNfhY3y1u03nzx4sHdjd7ls0dHZ2dxNSnLgxq6a1ouz2XhcPv7kwcHu/tlZW9fVqmm7vi2d39gev/v+dz4/ellvbVvv+1WrrRUF92ioELwvR4O3LvWS75ed567r57OVaXSB97d393d2gy+AMRS+qGpHPmrqV1pV1aJZKgARE7iUhJDbpjczQuq6nr1LSRkl6nAvl2JMsY99Zg4kIqLMkkRESau2nzdK4kj6isGMuhTRiNhpXps2JeLSF4G9AEhKpGqWuX+oCCEEiOYMGFVVnJEAlMEzOecEMakCMUYRQEqrVg3K4C7nK3ZuNps7NF+54EFFmlVHogVBYKirUVTsY5f9y85ToADghF0oQ2qiLwozqOoyu4hdWTpf1HU4OT4USM586mMSyewkESnYsWkgVI2aiNm7UPoqMBMgpr6V2PddI7JaXlwAiA8c+wbRVGw278vRRDS0XTsUeAFyFkSYRC2lVIWKkNk54nVvFlHse0nC7EWS9J2prJaLFDsVF0UUqRxt+FDFvmfAvl3EblwUPqVEREhO1YAtSUypkxQVhNCbmmcyM5GIZERBVTg38YCppYwfEumJinyjw8q53ih7jACGbnAANNCBNpv9GbkJKUdrbJ2voTzjzf1xCgBIGYBtiCCS8pwAYOgkgy8QLwM72swI164gBLBh5goAgghEaslAmQl+KYoGOQikkifttnYdZa5xRvdmy7ap5iENeSIwtIwJISI5MyUA5HzXNqR+sh0KAUyVIFNrdYAN5UReJhMhEZCZZCoOWv60KHtIEGlgbAOq6JCjI1bLyRkxFcyACwTDLLAZZq2EXEIzKkt2jO68a0jbMhRboS6COkNqUzku9KK9aBYXF33T8nijHBV178vUtahd5aCPLOqpBQ8ol6uEvQ9jV1TatQZa+artbePaNUSpvA/F2KDnenR2Nk8oowrLULWtEmI12RKP6nB+PqMu1b7c3d7S07MpBu3jrY29/XqnRnaWGMnVLisdlFN9gKBgCkAAyRiBQJkQgUBzyg/RKIuJtmZlgRmiMmCeL+GQYclOGaUBAo2mRll3Acu0mwylMtV1wsXyV5rjRFmPy/kxBCNVQzL0mA1ehtoLkXgf2aJyqUUpEokIOR+1mWxta3PDMOOF16MZhpyZDlhrXPuOwHLYEtYDY3iXAxPstTA5eHgGB9P6d7O4YKZDdb3mznJANUVCzp8lWJJkpoJEgZEQzUQhw4PWECLLs3EdqEoECKCSUDKG21RFkqgky/h3cEAIRJrZZCpqmkSSJpGESApoBkkiqIlqlowdhewRMQM1yFOkfMJQM1PJVJ88QJIpsyMiJVgXzElWtVJKZppnlw7ZhoPfAEQ0ikrCBKZAaJYQPBNlqr8gCikAKSgaGJoydSYECYkZgMDy+zKzlFIC7YeoHeoQIEEGDUAO0WGm1DIQSdJeEwEou8zRMjMwQRhqGIE8eoKs3EhEVRQhM3QO2YEKAGY3VnZYGgBYthiZ5i4CQ2QGAuMKMYB6AFFTk6iGRibGQCSGQwaOyJMTHCaiOe6b566YjQz5qgOGkJgMzbx3ShiAcu0mAwOggCWxlBNJqp4witEwQrlXYyPVyIy5AYAywQcI1CCZZLA82Tr+ptkvSsgxxZy0HTK8eW8MjWhwegICAHFemQAbwpKWx+nfkIoggQECA5iCgmgLzEYgvTAyQr4SpJR6yu1ipmLGxBzYec/sQAz6hJ1p22MvcdWBShLJoUfV4WpkkgCRkI0UhnxmAlAcRjUONloVREaDnMAEQkAgQFMTa73jHPBiQswLBJbMhLJXLSWxpKZGqmBiCdGroCdwmJDEkE3BkhopOmcilDyaqqGaYoHCNoRcTQxIkrEoODRwgCBghsDM5JywukmBGGChpkaMyM4SSYdm6pBEIpIgcLfsjDx6sotWNyNU3hxij/1qVY4nqqrLeUfeTbZxMiGJ1otzANAbgEQhz+S9koaNTUwdzBeIIKpUlBX79vxMYwohiIh1rVyYn9RKdRdT30RXun5ckPcbk1FUFRUwoWx1j0JIEpOZBgejsZvN0VZYULVsOzEj5a5pkgkgFmWRUsyrRo4MBdvYO+bUSxJVTGVVcHCxi6xEyHU9qkejsqqd98ykoqpCmOH9CfOqHIGKERMAMTMSOXb5BwBgdgjoiPOlBQGROEeoGTmlaKIIpKqmCmzMDgHQORVdX6Wc44GRqCAp9SEEBlLtCBkJzditF9E++NWvPnn6Yro/QYkbVL169PDo6GR3sjd/NTs/TZeLtLrsZFkse19tTfavHRxeHlXbk4vT5f7B1djO5yfPNjfGh317zfGD4+ejclRI+eTxxbWD6Rv3rs7n7b/564/eePcWdDgZbX/7na89PlmdP7p48WJ+9ys3fXXt0cOL/e3Nw5dH9+69AY30bZrsjffe/d5f/8nP7xR7zZE8vP/Trb3wiwePx1OIvnul8zM62nvzrfP5qvG0c20fDLd3p+TS+dGsKsbbN6+3F7Oup1m7WszTo/mLzSpwBWDw9NXTa/tXCj8+Ozu+WOF2qDXU4uTmteuHD44i6q9++7f+4vd/t78UV/lqPJrsliL68rOzot6sqGyOnhMEV+nl+SsKRde247quJ3X7aLVdX+VSJm60uGi++c3fmWzuLhbLpCrqjcPjl8f729vTcbW/e2tjY3NxsRqNK0IkTABaurInZUloiEqCZqLehdj2qV02XTfe3dq7dfP480OmwtT62ETBy+VidnmRYjq/vPz6nW+enZycnF4GDl0Tbb+YHByYNMv5wrFTkK5ryArUEJx/8fCTzW9+y9fjvm/MtKrKthMUUABJvRmQL/f2RwTh8PgsxeOdne3t3Q3s+tj3IjDeHMXz1LaSuvn+ld3pdPvo6IVHKnwRY58kmkAoK01tistQFSoRGHwYjdmt2r5ftXVVzxftZDI9enW8Od0KVbGcr6aTcTufX1yc7O3ufuuD756fnxV1CUen5/OlNmai5B2h9ilFFSJfjEpNiZgt9RSIyVns26ZT1fG4imKf3v84+PFv/ur3Rzv7l6uLfpWsQ3Ju3neiVqDvUoxNTzzYiubLbrIxbRYrX1W+GLVds5i1mlbQx63t3VmcPT/7/Oq1t+rq1uPD00XT7RehKIvZ+cyAmIobV/f0EvoexqONpusul4tu1biOp5sbmmw2X/U9vHz6/Nq1208On9y4vnfYt5u4+b3v/Tae988fPLlx8B4VNL1e//znP1sto3cTg3ByIde3945O6fnR4e03bxw/eFUU/NaNG6fJNlx9dfPKw188Tkv+6pvf+ejnn23sbUGC+3/90IrA1XSWMC0jzRr1/nh+fLBz871f/ebzp0+rY7d/7+bRJ/Nlspt3bl48/jgBFPWkYOcCn5s9PzyOfXuosOpi16Tab3x4/ymrMlRVsVEX10rofu3730+heP7q4++/u/PP/sv/9Ne+/87uqDYT78KVyc7s2cPPf/ZXzezkt9774NmDZy9fHb1xZ/z5R5988N0PtHtS1FPcqG/ce7OsJ+ms+fzxq40qlOVObNI3f/M7P/nxTz2PHn70i9/8zQ/G3Z2Lk+W8aceu/PzTh9e2DzbfOPjr+w/fv/aNnbfeCNvjG7f2/uoHfwXFxv3797/19tvXRhuzRdPUPKrKn3zys2Xf3np772Tx+Znq0fPjd+/ePn3y7Oabt9r28OjJ0/pwVrtir9p89MmzxSrevb5//8n9a+99fa7Hgdple+5RH332HAQ3b+y1aps7W08fz3Z2Nl+8ODuP3Y1b14rR9sMHn1+9uz8Fd/zycLQBdV3d//TDsiDvawXZnoyv3jj47CePyrKuJhPtnLlqPp+X6k9OV5Otnf5s+cNf/MQslaPAjoMP/aJZXay6XopQkDozMjDksLV/PcV4dLQKYevw+XI0GVVlMb883pls7I7G83k/f3HRNl1VTu68+YbMo/RSTUbnfff2u19vLs+a5QqcPz2doa+v39h/+ulTMxpPpk3bHlzdW86WqenyUeA9kWNEHJd1tNgsLwP7N+7d2duc1KOSi6BAQOQcWzIw9KHwBUeTlNSMihCapnHgAWLf9YzonV/OWtf5qq6ZMKYoXZeLk2Pqcto8SXI+mIqJNW07W6xWvYLBCFIRSkespgw+t+SICCEwOcl3dlGyjSHGNmcecu9DIIOkAlIUQQxAKYEyEzOZqikIKBOLad+l6XhEBhtTMlBRzUVpxCgSQy73MQuhUDND5eALcoApMJJjM1rFHpGLUPTNin0BRqha1+NQljFZ2ydmj9Z1bUeOVSWlqIiQrLE04qLpVs6VfTL2URmYnRgyevQVUQh+pBrBjZYXpyk1mrCHyORiMu4iqHgfkkQAiCmSc0kjGbpQiOYJUEJXmEqSzswIICb1LsQ+EVHXp7bruz62XW8LC+W461aFr4pqq1u9Ik3AcbVcuFBwKFQNkAE5xY5j17Vd9koAgOc8eVFEjLEjJsdBNCH7ITsjYqYIaJoMGImdc6phvfALSKhrLssAXMnSISB84ZzAwVCzXlnNE5GMYhhCOQaESMT5l1QTkvsC4JI9E5mjnSM7mNvKUAddIK8hkyqC8ypKsLY4ZdeR5okuDTXmOY6kCgOZi4GycYqzTJYnY0C83kHK3izMMpkRgiK5ISsBmYBra40t06wzr0kBdYgHoRkKAOXCdkAaCs4p83EGvrIh5CXnIWMBqCpoQNkkBaQiaAN3hjSaCImV7EwEDTZc6fq+AE5oLqIzGk8mpdeY4OyieXG06tWByAwWuxu1G29cIsfTuUezBCiQNEkSCCRoncSKg6ZWY+vHuHGwaXFF2jVzWfBlNHJlsff2O2rQH8+Wz4/np8cVQLxcJYvVhKodjm1Krdbt6MBwZ2dvtLlFYiNHKD2rc8QKSuRABFQNGQYcKagoZGkha4uI6BhgrY0gvJYBTA0NaTAhICCB5eL5zD82MFlXkWWB5pcEm5wUyRVPmb2Vm7zQ2YAgHVw/YKKa8m09eA/oLGWQircOdVLQtZuyf5CDkpllNCC3DIAQ1xnFYQcG99AveYdskFDz4jLAYI8gGHbW8kwaBsPUF6rQYIsbIO3DoZL9FzmZJgmAAXHQvwghM+GJiChFRVPtBQiMSFVVddCjLOfLDPI03BQMiRBNc45HFVLf57MHoCE6oqHcHCDXC4IpmKjGnARUNRQQIlBVNQUwZNZsH7K1ejbEQMUIVCgDwJA562lmZIoCRkNtEICIpiQpmWQ9DgfafT5yVQ0kS9sGnEukshZtQKpKkO2BX4jKTCQmQF4gOSRR9F+kYgUYk5oMXy2oKYOhsYKRGRsYoAGaMaDPSrGCGqACpgzLWavKZmhIlqFXquy9WZeHLojpQMvK8iYhOgOlrOFpb6IIDLoeBWZqaOCM0FDF1DgIAEBuqUdVJPaSzBAVCIgTCCCIWZYnGImMPXrOwg0iUkCIBBKgZFFkJiJV4Wx0MQBGUNXUE4mYMQ3eiCTJwAkiIqgkn7NWAEmBkBAYCRnNiEQUAdSUkDHbZUGJHYCsj77XIyqbYwY5FZEBbN1caMaqKJoA7G+yirJ9Ky9BgOSTqZpkR6tkd5ElA8tSmhEiMDERI7ERK1hv3Al1FledtlF6weFTF0Qe6NtgiIQ2rHKoZihXPnUQIUteo1B6HYjFrCEDIaikyEhEA2ebiIEdWBbiMoUNM0AMUMFEYkrSifXsCsIyx0NNEjBlv5mBSewZUSEiiHMB0RSUkJE9ImpqkRCRQJHNu7WLkjwbgCoWwUPsgJQdSRRVRVA0IADyrIAAhM6pZKpuZHJFKHGJ1IWosV+tAoBZy4FTF9UuyLsw3S2nm9T0/eUlALBziBBGBXinBtJp4DB2kyQ9Erepp+CrzUlqV6riXBFjTJJCTMJ9vbmZRFdn8+XlnIPrxqUC+XFVjAsUyU40Xzj2JMmIIBRYj92qUcfFso+ByTP0CIxopmypYGbALnZA6Igqx2RpVHIX8fRy2ccUCk/E5cSPx+V4MiqqAlEB1SCTsAyQRCSn61WFETk4550aFGXwwSMQETETAIqKc845MgDHrCZM6JwTSYDmHOfvPSWzvB5kUhRljD0SkuMkyaNzzFGEmFIEciEfJt4VmLs5DNbrEfD4o8cnhye7o71xuT31xwGLK9e+VhVb9x8+slhsjSqGNNqeXDanVYBXzx9p38e+s1UcYWoulwfF9tbWaHbRlsnhrGud9G3/zV/54Oc//qtxUXKP33rzzXpn3MA5zFf7t3E8mpSoPdv85cvrNw+W3Vj61bfu3jt+9qRbdmylXvTNalW3sUY+O25uf+trieJxM5Picut61cDZr/2dt598unz48nS7LlfzCyauilAFP95A6811XQnWzed33r7+4PPTAt1ydnnz9o2nj184Ky/PFueXx1vbewVMqvHk9MUlB/9q9cKku36w9Wd//p+T0ubGxDboop1dLhuNwNY9ffHJvd23C6r7ZQdeq9EGewiluUChqg+u3e3bLnVST3d+43e+Fuqim1+iyK0bW/tXNh98+me//lu/7iwuz2e+KIsiBOeWi6W26JlVNUnvPXlfoJmilVWlmqJoSewKKNkVG6OLi0uHdPX6Xovz5qKZt+2iUVk0vixOTi++8tVvTsYbs9lCJJWjcbNcjMYbRTG+bOJydgneAKFtuyKEyd5kdXH28OGn997/NgKJpb7rSdEHpwY9ejZsY29A+1f2Kl+cz5cXl+e+wMK5jY3RYraU2DkiRXGFm88X25vj3Y2dJjYBXRF87HsFM02atG9aJnLsumZVYvChqBVn80W95asqKKGv/XzWqBgjrWbLnd3xi+erxfxso6wOtvej6GSHwnTq/XnftYbaEffzJXnf9p2qkaF0naTouCDAyXQsSl3bNW2PBqQmXfvDv/zXIoIqDkBTqsZlUYW47Ou6qIK3KKvlAPTt2iYvUCwXC+m7vm8mo/LiPEUz4EDRXd2+Pbbi+Pz49rWDpt969vQ5s7Td5dbW1KLOFieioKu0aCN6/9b1W599cr+qKgrj1CyD4ypUllphO3n+8qsH90KUucaXH/753sb1qzc2Rhsbn/7iQbtIoa5d4LDAttWtqze6s7NzWbGjxard2d7YK8fff+8bnz9vUJqj4+Nb16+dnVpR09VV1Yb07OUpO2q7ZUz04ujl7/zat7Cw49PLnUk98Vjoqo7t/Z/+2Yb/jRG72emr5x8uHUhRl9RK7FdVWW9tbUxI+q6eLU43tkfzGbpQ7u9dm7143Lbnp+dHoy1ZHM4u0fbfeNOOn120rz54/5u37l07bZ4TbsPcfee93+6Wx6NvvhkxRlls7qrf3Njeq4+enD19/DxwOZru3H7rnfNnPzh78LxEe2P/aryQabWZXPzRH/2kWS2Cjwe7exfz1Xy5nI6KDc+LprUW54vmxpVbH3x9a/byZXO8OG9Xjz/7rCbytHj3ytWb07356eG8befLi9mL5+OqbOYd4+Hh04f7V9/78V893JtOHBZHL0+mU/7qnXd2ytH+m3e3Q8VzkCgjH96+8dbZs1cvnr/c2x6dH15cv7F/7erOg0/ue5RA+tc/+vG0Hk2m7sbBzh7o848e9pddURbzi3a5atjp3TdvPfr0lZmWk/ridG4qfd+//OxFRXjx4tC3nSYwar9y99bR4yMloq5jJEMofdE1XRc7K9zOdHRxvpxOx9456Rc3rk6ePT8xI9YQuyRdWjbz97/29qMHTy9WZ+NRjZ3M58vpxrSh1jO384U5RIRV39zavVG0K1vNl6eXKcXRlZ3t7Um8OIkVTMYjid1ytWrbZOaaxobJI0CfhBXHdeia5vziHBVv3bq5M6ktxthFMohRRlUp4E2sCIGZm66p67Gq9F1M0htmv1BMUQmsR0KAMpZsEmNPRM77pGCWr2jKRLk0QwliTH3XSd86gzr47TIwY1Qh9Zpilg5UrCoLQkJE6bvc5q4pgqGKgKnLHayWAqGlrAIoSCIg1cRZ5wITSdNRuWhaQPJMcbV0hLFbhCJkA/iqWWqSBFay90wisa5CWZQMrG0CIPbE3kkCAVRANx617ZyLggpfVgU7x86js9imUITVCtiI2asZJnXEXdcZEYP43tOYfCiKqkQk54C8z3BWpIBIouarup7sTDb3l/Pz1XLRNrPY9Ukjk88TSMyucABVRSICREQmNlFfMGczFrIaAGjSCMgGGvum77rY9xKTBe6alfUthbIoK+edxL5vlkU96tq2ZE+ODUkNGCmJITpRSTEiICOaDtU47FiSIMhA5jBC8ISSpIWh2R0YnTISKaEoqhlZTvIAqioNQAVcT1jNQG2YA2eLzevbF8qzERgMA7j24AwTaUJehxKGxA1mzQny2j5otmkMLp7XchISYbJoxJBv38wMMoUGXgsNeTKMjEA4+FSySgX6BSEmS1HD1B2Q0IAMOYNGVQRUIKXsfEBAc2HIkA22jsH4NMig5AZtKONiBrI1IFEuYkMksUySye+XAElBEWGg1RABejMzI8OUQ1E59YcUkM0hlCGAJDLhInDsGk3a9YLu+csU1Y7ns0ZABDykvRF7pX7Vc10V0z1re2vOAIXZzRdS1dw1lsQcS2oX7GNVh9qxtOfQz9vlRVlvunHpyyl5W8xPeqDNva0SeW9nszmfx47EsKonxfam9n08PN3Y36c99ugAnXNKqafc4I6YTVLImWlCaKCqBIhEkL+B15qgKREDDYXahJRn9shDsbbhayFp0GoG4HM2TOSxkMcKvrbiDPlFg3Vb/TB6jfI4GYKRiEiGagRAYKQARp4YvAH78Vg2NvnNN7rtiYGyClp2lgm6odj9l4QqG34YEFyGONDYcxM2Ev7SIwahNI+QIb9ENLzbYUQPD3vdgCZJnPdZjRAzZhZVQCRiJsoHDiBBLtTTlFIyTt4XWUXK3j0d0hBkIMPOGxiYiCLmjrWkaiIpH8Te+Rwp5UHlNERM2U4k8lqhU7M+9cOgBmBHYIqUbQ5CwDRYpix3BxFZ/q3crSSmYAkRmB2AoSlls2fsNEUDIEJmp5SFaAKzLCkxBtBE5hWjgYhppvaswVU6HGSZeGUDoQ2AEiBTMDNDex2BVFSA3NoiakLArKxAnhjJZcnACFGJGC2JEYj2bKSZIUy4ZmERAhoxgQPwljowA5PM0spQC1NZnznQNJmknHFBBLOc/82BWh08axknZ2BIogmQxEDUACnzlo0ImJNKylB3AwEDUYQ48PKy3cEMiRyRgbJDMcvXbgPJzqEoamaYElLRd0skTCmCcuyTAYm0SM45QujRF+wdgc/p4VyR5oKXJPnyl9nhapbpUWRA7FTUMjkHyEAJKTdD5pMrMTIR5WMBQFVQABheq6hrVpGKZuF/7U6SpCBKTKAKkJ10gKZgICLOe++cC4EcISB2Sr1xtLRstUsWNQe3mDAfuTleOhjuiCSlDK4aBkzmWDkEZJFEYMZ5P9ZVsimhKRNlNhXmYDYO2W0i0lxbl5KqEpOqxtiriZgBBYnK1CM5UCMkFYG8lpIEAEiZgLLKnbPXhJgkgar3ZZLIzklSTAKJicEXntmLWKY1UTLSZGa+8JrEEkCOVqFAjrlKNAFE9C70Xc/E/dmqGleuCK6YkMR23vgSgjdLMcIZIxEV9XSEmmIDCFaMakQyS2U5iiTQthxCWkYzK9gjs3pfunG3XMWuq8YTcq7rOpO2uTwFwyI44tCt5HJ2AY67w/PN/c29nY26otViZqpF6btGU9c5SLXXkrXrVhVhco4dAQMpee/b1dKRZx/UmACYHIBmN6mpMoFKantiwgn7UFVGXIxKV4XcD8CEKaVk2TmJCOAcASI5AsdVCI4ZmYIPZkAZQqjETEBGgMxIwGZqJoNTDsyxiyk5ZqUBgiiasjrOTEjBRInQkcvs65h0HVFWQHCFT9HWVGvou448j8CfPXty++qVImw9Ozu94MtQTzd2rv/kL/70e9/5Zr116+h49dad3cMHny0NtyYFHLu46K7uHvSC/UV7++qbabk6uLIrcXmB8ovHD2hUXSy7EujKlf1PXhzpXG/dvPb5ebt369rjx5+O9ze5LI+Xxm7SCvzk/um9Gzc8xO6yjfO488bu5le2wxIvV7YxHf/z3/sv652Dt9/Z/vn9H7/z5kF3fvL8k+dj3D7Y2ytq+Ozj+8vVYmv76mzZXi4XrlkebG+fHR2dHl3uTDdevXzBwT18+JwZmW22Wt14542u6w6PTj2herh+442zly8C4cePHu7fvd01s6ezo2vXD44OX27FacXuzts3jj9/Vt/+Km1tdHHe9f3G1nYUQ02pjY7LrZ0bDotyo7/15luAlQIwcz9vRrv155/97OJ0vjvaVQD0VVHXjqhZ9ioWQgiBhfukSshFWaS+k5gcgRm1bWN1ob0heBL3xpXbNsWnLx6tdNm2PZL2q1b7tp6E2UV7/vL5zdvX68odvrhkpOt3dy4vmjdu74KaYzo+OnToqnHZLlOKPWP45BcfL5P/1Q++JY5jjLGNmlDEHDkOztBWixXWYWNnMtnZOjo+mV1cbo7qvvJlXaaUkqPVIgYuYupXTbu1u/fy+Yu+FxNzzFF7TUl7KYpRs+yqmkKo5xfz0QYA0mRUo9q0DBfLBgzbJo3GIfXt4ny5vbuhUS5Oj/euXn3//XfmP1imVl0odQubZikplsFXk8li1fYxAWOKaujKugBT71hVFqvGlBJoNSrLysW2a7smlFXFLvVLysHOpLHvZ9jEo9O9q3vlOoa5bJrTlyeb0xEDdLGN2uxONz9/fnKSZoUvOepmUV6evYAEG6ONLp5E3yVyoUC0NN6kz5/fv3PjG7HrUiLHcHl+piZYFo8Pj7zCwb29l4fn0vuvvPmVanNy+OLo8OxiZ2vjF/c/3ziIs3b5/V//oIfYnUNK+vT5K08UXPGb3/vNj37yw1/c/9htbH7y8atv3b29v7v96NHTWe+CD68u52FnV3j6g5/8RT0ZT7cmbnIJlu7du/P0k8fXcZLmsxePH/nJ1jt333/w0ceV4Whj25Xl48cf723uRui6ee9d6FgxRm2lE5rs3oyrY3G+kxeljspiPDs/np8/dSJ33nijb93Rs4+LzfropD1/2n/z3Xuv7t9HopOXTYE3NjfuoOe05LG/uns1rCAKnf3wv/qn3/6Nv/3q0aErtm5+5c7p9uHHHx6Oni/2d65iu3hxcp4m/mK+SpYUdbq5MZqM5/NZOdo6PulZyVUb4KiV8+WyuXx+BLgxKetxWfLVuqcCV13ft/PTF7U7/fjV2cPPPxrvHBTbe6tWdvauS795+94BqJ+tug/+1q8mbLnYvuzOd3avtxfNg8fn9964cfjkcddzo+Wnj47f+eqbL4+e7e3diNCOrnAPMD8+vX77Rqvp+cVyOp0mwctlu7e3Mzue7V7dXdTN5vb2rIMguFzOP/74ZY1+0dnmjV1cRoMEiKnXrStX4Hg+n1sVqODw/NVitorXbl5ZnF6Uo3LzYHuxaM6eLagIXZe6aKtlW4wIq5HPAAEAAElEQVS3mkaK0eT+o6N6PJ2ORs3FCp2fTqbjyfjw9HQZY5hOfFW0Xdrc3nry6nR7dxPZSIyLAEiF8/dfHu5ubB4fnjBZORpdnC2bpp0U7vDVaShH43HNzBFWiy4iuKII65Vk4zJEg+V8mYwC0aiu+q6rqoJ8iEl9CALE7NSSIJxdni+XS5UYCg9qhBb7FPvOJAXvU+rJOV8EQIl9l+/RVDWpiggyF6FQJed9F1PfdV3qRaUI3hFv1qFkRAci0iYQE08QnGdisRxzyIv01qU+9r2oMZEBEpIheI8g4gz7pJKSC0VMVpZVantECCE0KwGiPqoviqIsU99WVQVEXUoMxEVhMaZOkBED9cmmoQrV1Jiw70UTJAUiQFMxh+iqKnZsAMw8nm5YihIVAIjRwCRX9RIiqiMURjVl5JSki+QLp2Zd22jqB5eKAZMzJMu85aIEJlMNwbut3e3d/b5rkkTru8vZXGJnoClGM/TsbCjkoiQSRavSee8t9hqTd57J9amLbSzGxWqxIqLUdRKFmJG4ma+a6qwoPKF5X0iMBNCtloUvI1MxHpmJcyStakoIKL2goYgmzPBRsBQLJEFFi4EKHAqWTFQGZQSIGDJaNgRnwiqY0biYk1VryAp9EbzKSYE8r5JsLrLhzzwIDNZBs+zEyY3mrwNla0cPIGRm0IA4BQNClIES/cXDDFUSoOdMVRgSYmYImsNFQJQX3wckkyTKd+moCDasUAOA2vCGTDN3Zpgpaxr2gRGJc7hPUW1wghnmnh0AwIys0YyhXE+eYZ0kUsodaio0KBhZWBLMc78M37bhZWmAfKOp4pBHA0QFU0AGJISkYIE9oOtjBOeZiqnZZVyeLbuXFy0CNVKQc3VwnPoRcJx10CMhF5u7QH7xuAnUoPHGeNwmLJjbFS4EqoKY2dUV8Hhxcr6zc9WP9q2e1HsHnotHP/uz/viF5/GSp97XglDu7G1vHyByv+y6VeOSVeOpCAZfekDtE1lyoUQCkASqjIwEJgIAkMeArccNDKxWylMytEyPGeBUmrLlwNYxwF8eLbn/PGc3B51DJVOFMwHaXiOLVMFgmNwNgUfMU8s8+xtyY0gIOW6iJoiQgJ2AAIc2VO7GG83mtrrAEgHMHFDWWZJC7uQmzsN8rT1lxTBLVtkogYPgqYM0NLCNcvpMDem1ZSYLF5YzTIAAyPnZ1m9f1ZTNZUnCNEGOMeYKLByem5lj36TYN21rpmWIoSwplxTSIHIiIRmpCojkRkFVUckpMRVTBMvoVWImJjBTlUEsVjORGKOmlFQzaVxVTJXA1JQQyHx+IIKZKJOCgWacsQGgqSQiBypiairMzHn+owZikCsYUtIU0UxNgNwgFiKoCiOpAZIzMOAySUJUsf41hB9MGSlX44FGAKNc3gYmkggZkEwjEpgakFMTMxOLgBnZll9VCPNCRTRhQWPyqmKQAIQdyho0BsCiaWhfFHWESBkBjmCkAgoKYqrqQ4GEaJRSD+uD31RUJQfQLF/DkAEgf7+SIjmXTzQpJWOvmmOupGBALsO3McfBmHMPpqkCkQGpQDLjwerlDBDWMHhC5nXmSwcBXQMTmSbqU2y9LyQJAPV9H2NCdClFJENwamIKFFNZGLPXJNm/aWCYBWPIfPRMBIN8VBAiO046mM5UlZkIEZksH4x5wWGAcwMQsQuYlPS/FUADgKiGGW4khpb7EVUNFIhwYJurqajznpAwx+IEQQCErZfUmkbTZGAACqi564yGeBkz5aUeRDKHpoi5uJxMlZlTLzlZrArIBJLxTjkQm5XkrBawJaFAlhSZTAGZVZIaZlE5pWhmIkkRRFVTZk2ZKUkyYERHMaaMsSIEUMtGrnwOJAOU5ImSyrCmoUIMLld9aoIOwZsm61c9ERJwCCF2vaqZGbEXEyKWmJCYXNG3kV1IFh06QGUuTdt4vhIvoQpchAKA2Lply6FQSbGP9XTqx3UYFeS0b1uF3P0hqVloMhNBchx86jtC9MErua6JGJx3qKRMWgSKpoAp9alZxlBXzrsqlKuuj103OxTuVhu7E+d8s2jWbjSkwEXli4Jj7ElN+ziuXIrcLFsyLRybCmIMDhGx77pQeE1CwZV1kRC6tlezKoRRWVZFNZlOvHeIYKYpZcadppjW4j0SERL5EBxzCAURsnNEpAqEaADeMTODKWVKkeYWAGNkZhZNuUTEAJxztjYOqqoL3rKsChrFNN89m3ofwAxVxdRzWKf0hxupx0+P968fvPP1Wx/+/Ke3b9340z/6CUcop3XfnY9p9Xd+45vdRXN4/3405K6oRqUEc1x4DuOyqqaT46fnZREo8P3P7r95787udjW7nG1tlLdu3Tt/8vzo2Yvv3/vefF48Of35QmafPzu+bOfXr+z2q9U7d67DJ8+Z9DB1nSbp087W9nl/OK4rmZ83510QFzX9xZ/8CWN/dPkyPWvvvfuenp98+MMnIKGN84ePHn79nTub1biu1TvukiRIJs1oEixOCPH05NXWbj3Z2Hz+4PPx3hQBurYH6cYOqebNsVTTrdOXTyA1PLHZi6Mb/nYaMfcQpbt9/cbs+CzGSDA/2CmfPv3w6pWb0IYY+yTimKVtmxUAhbbTJDrZv7Yy31xcHlzZpoKLzXLZLv/8j39///qbN9+69slHH452JqnrL8/OpYshkK+LrmmK0qc2Ok+G5oInh0l6FfGF72Lng1NlVpQenjy+3/eLplsdHR5fubZbTuvo1I1d1RVHJ8/ffvfGeFo8ezxfzeOzR7R5cH2xWpZV5UPoVM9Oj7vYGoGCgMfpzuSnP/6jurQ7t950IXCpCtDHCCbYopi54ESS9HE03dzf322by9VqJmdxNB4huNF0nFSaZlXVxXw+895tbu9dnr+KqUeLqtH7AhwkE/SUQLvlPNSjmDofCnDQrFZUFkXhV8tZGUI13Vm1Sz/mi/nFxt704uz81bNnmzvbN65dIeaXZ2eEbjKetk0zX8XAbrOuUXTZxyapK309Cs70+OhkZ3erSxqjAmFKErvMMOXYK1WhLKvgOaU+mtab1fxsFTCMJ/ssTT4K5s3psmvjq5cH+3uTjSvnl+mzR7+4jGejTbd9Zev+Lz7qI127djA7So8//3zezQPa1d395aVfLi/6tNjfOrBeDOTqjWtHR8cSm/0r1RJXpUB7etqe6WrWLCvd3nlvuo2e9eLJi/FO2rwzMlvGxaX0nQbc399evJw31k929nTe/PgHv/v2G18lJ/ePDjttb9y7TUnOTs6O58trt27ceOfK5ub088//Uqk7T+ITX9ndWRwfyauTCePG5nRxfLqxu7U0/eFf//7d/TfZiw/u7vtf+eSnPzpfPZle42I8/vTDj977yrvNiR0tL29fvfvHf/UHb105uH7tRqA9dr5LzXw139meNsvl4fErr35zd7Rza6vBs+5iWfhic+vgfDmjBk9nJ2+8/d7e7vVPf/rZ7tXdn/7sz9i7eqs+uHWl71Y7ezfcuPvspw9dwVtbI1uebEzqRbua7lTvfv3ugx9/tL+5p05fPT1++60bbV8cHl++dfuNs9OjJq7Ojy4A8drt/dX5qgh9bJfvvHf3Z5+9sjCuN+qzZy9Tt/j61755+/q3i53yxptXj44urx68/c9+93c/evIZ2wfY6pOnj+9t+uVs9fZb469/4/YnTx9E8k+eP/vmu/eK3YNnrz5/9703/+JHf1XUd966987HD5+33fl3f+vug58+nZ+eBd6Pgt1qefDWzX7RfO3dW48/fXJ2/OrarbfKYtTOL5vj2Y27+4zVi4fPZHvThfD82dO0XO7sTjdGG82s3ZiMUiOHL17evnv7/PDy+aNXzoeuHe/sTs9enp5Javt4cXG+c22fi5Id713dVpOTo1eTzUnXxqqmva2rr84f7V3bPjk9uTy9nG6MVKQaV9tbW83ZeZLoHAbnzGE9qo9fvkKAqqA2xbZbjLcnfTObbI4uXxyNJmH3yt7nHz3AsugTr2aLm2/ePn51TOQ3t2sY5heWYuxFulVXBrp544AB2uWSwFIU5xigNEW1RpNedM2qb+fzZXBcFi6ltGoaRhMEcsyMzlXBc76t9IGJM9NQHKN3ZYyRnVNF04QZoaIyKkIZfMFYMGjfgxogqImIOkAVCZ7zwn+KLRKLpFyizIhm4nzmAObFeKVcQqQmKYFg5bgBg5TQuVFZClqRwY2xr0IomAomAlSR2LYWU2CsQyiIJoVn09XiAoliL845DKigRGRmIbikIiKpTyGIRXGOmMjAYtt3bQ8p+cJpSqaWYiQCiaKSysIRSuxbxwaGqzaBWUop37KKiCTxngEwSbbloyhwcI7YlSUoTkabwfPpyeGiv2AE0JQ7tVCSR3SmwRFIil2rmhBZYoeU55upmV2MNyZmSS0haupaF8pmNpuMRyRYVSF2AGixWbTMwSEkJjJzlEBjimRJ0RjBJEWNBC7n3lPsiIKiqTINXh8DAjDIS9pgmWwC7Ag9gxCIAGTEiRHmrlp8fbtvg0Emc1Ky7IFD0fgazTOYIgDWATVDhWxkMDUcOultcE8AmEkWTsyAUBEAjHGdvFFWTirAPOBCMK/3DjrCEMxZ+zsYEdzgaSKyITNGAIC5hj7vQLZg5PQLoeZbbDMk5sAGRgSWBIbK+QGemtfkQSlngJByogpMFfJNYPYsIa4L2cDAiHltbRmsNGufDKEqZF0PDIhTEkKHjjPJidlHSaAU1EIo+xhRgUwxjByOfOoXs4tNEk8yqZnMdfO+tb49n+G0aMuF4Hj74N3Fi4/6blE4LowWyxZ7s2iEXFYg89llSkwwa1tfjTdujS9evaonV69evXs0m50+nVlBNOEmdpsHVT9frro0ZoeLVT/rkMuqHqVVw0XJnti8poh5nod5AqXDYDCjvEo/qBmIRFlhy7MdRBhME4iYJ9zDXNEQyVQyB90MIROpcfC7rQ1ctP5sIddXAa6lpEHFBMBcWcWwbuAbxi+SirFjUyNApHyLTlxXaboVNzatQOkXPvgM1U6mqAYEIAIDwQUAyUyGIY2DhWcw2mXC+jqxaXkljBBUcD30B63RDAHVNM8xQAdX3OsmQTXximakooCsYBoFnRNNBsrOG6iKiOROqr5tlm3TNI7LshpNRz4UiJxzf2syBhiCSEpJwVQkZa8cEpJzROSdGySswfJleW4bU68qYklV0TL6CUwkGw2Jc94wGz3AOTBNRs5ALHv1FIFBJEICJGLHAAAhnyYgScy+VNUkqadczcYgJmiDWU9Ah11CE0BDVnQGPp+yLIu7mc+NBmialAg0GbHLR2WuXMsP6Cy1sY+mAEYEhKIpeeJhnqcCoFE7ZY0pAhKwqWmWd5kIQVUjIqQkTM4A1SJIAiLLBiUEdKySGDGmyEggYpb1ekkaAczWXYqEqGaZPZ7leGSXcvUjZN54NAARNSYFQBXKHkZVYIwqCoqKBCh9ImIzRVDTaOy62KiBRSTn0CB4nx1j2ZImYAicc81InhyIGCCIJMmS2sDVRUl9Til67xE0hIGwrAJEbACKoACDls95xKLh4ADKJ08jAEBFRTOmHFyzPB0Xk0GIz/IQkq1LnwapaKmrQj1mDdpEIJmqyuAUzKAjEETEjHUmRJc9IYqWBBW067TrtG9BEoqhGhgQUA4tEuZOesuev6ytgSgCGBohgurgj8ynrGHgAQ/atgEaiOQTERFCVGbMhbCSelABSSZJNKmKmokmAxFJaoLeOXbaKwaf1U5SNUloSmAk8NrmS+zI0KICGSOBmmePoI4IxUiVi0CIFkV7QVFICGYqxgyWIuVTsypYZABL0QwCOVNxCKlrHKNKS95S05Jy1C5XwrnAsU0WY1nXMWo/W6Ehu1BVY88BTEAMnJOYyACZJQojUShUUr9cFXU9mo67ZZNNg0jofFEUvu96U2EX0FmKnZmOJ3W9GY6OTs6lwQKL4MqyijF6TqggSZ2zsq5ib8TWL3tP6BCrImQYGhLKehwhk4EhUVbWixDQqOukrMN0c1yP6/Go8syEAEimoqqmGQtviOSYmZ1zLoTCOe+DR0QiQiTOMjQiAjDREAJGYsy9m1mYJQAPsO6NzcsJqpiDzAL5efJSXC4VISDIJyXE4FnVCIg8Kw51gAe7O0zp04cPRtu7L04vkuOdzcnWza2f/ex5s5y5yeRo0b73rW+eHh5enL6iULxx48bp+Vw6wc2w/7W7cXJubRt1tXflyubORA22Nnc2R5PDjx4awmg0+vmHP+k72TvYuvLGQdNLqf3J06dV5T/59GfNeVdVo6+8c+fw+PD4+TPbXqKni8sjEZkdLzd2ptu3pqWUk1n5zV95ZzZbbU7Lh4+be3ffnV12Dz5+wKjnZxfjjbLpmouT08I7KoITSc2qrCpyPG7ayhXtbMFKXkwZ+z72y2a+6rY3d4qq7rsGoN26Mtm+Om1FXLvq55eTipuzM+36ft5O9q60izg/WeqkmKwWKFzU9XRjNL9cIWE1qRR8lWC5lMlGnazf3Kmc01DSqu/Pjo4mo7H2y88+/dlkY+w81ePtl22XTNG0Kj0TSNcTQhm8ZfMnsCqVVSEquS6hLLmo8PP7P3r8+cNijJer5FBGwV1cLDYn2+p4OT87SceXl0sRLgrXt/N41IOmzdEd5TCZTsaTDSY6PXoWm1UzuxxPxne/dktg8YN/+V83v/E7b959oyxr50JVhiiiop7AB+zapArNarGxNZ1u7p8dw+JiDqLIGjxPpmNCNe1MpFnMt7d3p5tby/kFiIaCRRUAiNBs6KRU6VwokcyMKHAxKgvksgzSLqVri6JoF4vRqESD+Zn5ACnFazcOJtOSH9rYITlYtWFjHGLfqakPbiJxNvOiyRM5z7ffujFfrji4ZD0zpyR97EVkVPmuaeO8C4U3wtRJjJGcmkmM89Xho2o0+CnOjy7GG9Xx84uNSfnw/sXW7l7UGLyHtj1/9coSND2+OlkuLldvvn2Xzn3fzppZK+rYbYyqsSRYrmZg2MTZaDMgxKdPX5DnK1f3Dxfzmzu73J9eHD76+S/+qByNXz4//N73vvbws0dXr107PZ9vQPrJH//R7sHOfH7sVXwPXiAiP3j0ZDTdOpvPv/2dr5Ufffbq6eNUblDh9GI5rtqXx0fPzp6grq5tV+999yv/4r/5fYc+YFWN9w9ubaRVe3F2vFouDvZ37i/PNiucz6KfFk8ffdb2sQwa23h8ejKp/fLkGLS4eXfX1/Pvvv/Vs6dPT86ens/nIdSjjfF3br6/mi3jpFdNz+8/d67bPbh+80r9bPbwk09/TuXoG7/+rfnFMvhzpZM/+dFfFMQ/+PHye3/7Nz76+aOwtfk7f/tvP334/On9h7feuhN2rxIjO4CUTg+PF8uVH5VHD061MalTSjaqRseHl6tVM5lM5ucXItY03XQ8OTs5T23qOzGUclz89c8+Xbbp+u7O1bvv3Hdkqbv21u3f/71/+eada87i6fNH5y+f3jzYn44rSrYQ+eC3f8NreHX6bBLw5Oh0izeu3bpSN93R5z87OWuvXNkumW3V9hcnnz94tjEa1TxZvJDp9ApK+dY7bz57cbaxOV6dXZjByUkTrbh7787sZAmu3Nje7nsBgePT+Wiys7k19XV1+vKsWbS4OVGzvrPLsyUCXbl94+Si2zjYu1G4y5Pzy8PDUzEEd/NgV2erO7evm4Pz0/mFxdG4blM/2dq8dfvKs8+faT87evXZYjVbPT4L9Wj32n5ZlgoEUWYX5818WdXFlYPNdrlqmk76BlSR8OBg7/Jitri81ODJFU3bb0xqMJWmu3r9Stc27eVFjPr0wWcemMvRajZcC5qVIC0JVaJyqMlTa1El6XxZlAUAxL5LMfrS913q2jamqAAJqGlQJOsgmBtyfFEiIBOFMuTrl8jgvVdNhBBcyNOW3jR1UfrkgEajuqqCpggoSiBJTDUvzycgxzTwkk1MKapISmamkjtuEdTIcfb0Y8YPq+bb+HwPjwYhFEZoAKlLZFSWIbeGErNmQ7+KCopIcA4IiroAj73FtIwI3IuWdU3eA6D0OsAuAdATOcdMjILoREREY58IUSzHA1gkL4eLqVqSCOoYGWCVYl7sUZGu6wEsdhEBJcYMNHC+iCLsfRJD4txYA0ShCpPJGEtfaB3bNptqCNHAHDk1IyYKHBcae6tqJyYxJUCSlFSh75sudUkSEnZtcoCt2vLysqjHyMg+mKW+bdrFLHjHzruiSHlpEjBlJrkkQjC0PgkRWb5djoDoRRISw8AagjWzBXPaKd+8MDtmZyoZvApmhjoYhfCLnE2e2a7tHq/lnrXoM9Ao1vYZeK01IcBQDZynRvBLjo/8c55ZgwkiRh50ppNp2J610KsaEvIwDR/eRk7v6PCiGYbyepdAhscOSN3MRIJ8Vw9DEdIacUyZhyOWMzCSzSFgqmr2xRuX178FJmAGIGKQkcmIiDBQNbK9aDBOvQ5QDOrFsIMKkMuG7DXd2UxxeLyqDJKrmpgYMyBT6hI7nATa3Jiuej+7nLWrzlC5oPG42t/a6Km5OLpgcmE0rQqW0YYPHFcrBVOCLiYAOl+umgQ7ddHOexBzQWHWrRTGO/thewuF62pXD4rdK7vgHS1ilLxCb2ezWV2E0cHEK6KmwkHShoSGqZ8BZqMZAOaOLRu66jJhB2wohVvbP3DN5TFYu34G69YwtHJYT7IcCF+EBwlpcGkNqS3NH3Fa47RwyOHgQGVHHMjKmQk9iFJ5PThJRkqZKpCQD5Go3N7hapy9nLQeGIgEOdmWPWI27CVCntevx+P6S147o3KGaHDDQb4bAxsIJ5hZWLAWQA2zUwHBCCMP1wLDnGxiMILXvr3cmIckYkgkaqqggH2Upm3Pz05F0mg03pLdjY3NUDER5ewegliMYLkmEJIMKVEiJmJHTOyQHBEZEGVtzswUc7pOJIe51t9Gfk4EhAwyZwNCZAQ2o2zwyKQRU7A8AFzu1AI0JMh4L8p8pCywgOZJOSIygsu+vGynGcKvZgBKgA5LsJz0k7zunonmhJrTgEyDrG3ChASKgM6MNd96mpiApgQIyKhmKKyCiE4BEVkVlRgFCYaKODRm8IBKwKCM6nLxmQkioA5iApAhoVPMykV2ypGlLHmyiuR0ZZZ+gICA1UyHsxTno0MBkEkRs2aISAZmmMFqaIYGnLu5gZlEOGO5VRH9MKCUjEgUTdnQQHMOB1NSzoAi4LXha8h/IjlURsdJO8BYOOczmlyV2KGBaEp9NLGUzCx5R/kEaLlxUoGRNEt1A1Yu73NWklWISFRBeQj0IqHLF4ahEg0RsjPUDDwL9PDl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbl9uX25fbfsQ0mvb/79+44llWKo6pq2jRb9FETo1RIRTUqx3u+HAFVgliGkKKwZ1dUMUbpL2P3IqbzJCsiSH1CorpwKNasxMiJSi67Uray4sCkShFpe7JdhS3jAnwxv1j1ESFKOdm+ev3WpN4KRdgr4uWzT5f92WWzBISuaTnYZOTaRew7c57apk0S2atKnDfpvIlVPQplmFS1Q2qahr0C6mLZ9KJlXXZtigIpmSZLCogkXdSUBBER/dCVQZlohpg7JdTAyBuYCEJRlMiBIaN3Yhm8IJlTlK4urBx5FV4uBMwuL1ZtF4uC6pK7RqMCgIUiozap6a0uJ55KVRqVJQh3HSsVo9G4KKsQKkSHQAAsKl03Pz1+kVKzWHUxyaj0k1GBjMQcPBfEXW9NB77YxqKejDaDryd1HSiYKmmcHT7+4R/83uJ0OXL1dlm/dfvKzmhUehbEVdNppiYyeFRRczWPJxsbV/bOO/fx42c92fb+GF3pCuedrRbzUIyq6VbgkYGvqo1kRRiNiAI6bla9s5T9veQcoANAkN5iK6k3AnYFOU8AGdxoJqaGiJJEBNAXQKUrRsSeiJgw9VEkmYkaOOeISCSBialkGywiIXlAQhfUgJkBSMwAUmovuvNn7eyV9w7Roy+APCETewBmX6IfuXIbXRl8OSn0H/y9DwDgf/Uf/ffOz09HO6O9UfHxg+c3v3L7K9+5c/L0dFztP3xx9tV7984++XQR05Vv35q6sw9/8BFNuZnJ48dn061xP2/cGCd18fLVCXvqW+CyuH6wu1zGy3Y+mdT33t777EefL1dy82tvCnhaxJDgpz/5q/fu3ekv+3mf9t7YcnXVzuPlKl6/c+vJ5ye6OO+pHW+BLdpXSy2nkyefPz+4MtnYLWaLVbtafvs7b5wcxllSpNnJy+ff/+43H/7kvIvld37zb3Wr+Pv/9R/XwTfHbR/9xVnXxM6Ualdsbo7aeaNAGGpX19ub49np2WQy5hC8900X67rQfjW7mF/Mmqouqqr2PqRVZD95/1feo655dP/+k+Ojb37/ax9++OPlfBVTHG1NVFLf9oDucr5ij4HBT0e37hwsjs+jpcJEQIBRhauqRpYIGigszrqTRQ+icdUVjJOdDe85QCCunNM4Pz+7vCSM/8P/wa89ffjq8OyoTWLkd3fGLw/PEWGVdOdKXTo+PF2dzxe7O9tbW9uA9PLFi9Fosmj7aVlsb0xOXl5SvbGF4exy9d67d376k09vXTvopTg8uVzM5B/8L//D7/7Od4Or/z//z9+///TJr/9bH9x58/b339+LyyWQIfTtspvuTGNDq5VWVWgWzWSrGI1C38QQPBCKKIES8WLWtI34kiT5asSXl5Y0XdkP7WqlAKEMEoMjyd1ELjhNmhs7soXY7IveDVz7q3H9swIowLKREPn/8n/9k7/4y3/DcNG2q9zSUE2CghhGRyG1y1HN7Ony4sKRXFzMytJ551Z9GhV8cXqhZuO6/sq9G7e2R4B6enreGf7v/9E/BoC/+Dd/2C75stGrt9/Y3B2vlr2CqgiiSoopmWM5efHw8f2PkvQxprbpmPH5y5dX9ve8L8kTii5n85OL86IY9WpVwK6Rtl/Nz+fsi42tCYu8PDniQGXwTK7rejBFlT6Jxn7VdNWoMNHZxSLGyITJjMkZYlkUzpNIUgEiLLxXBU/Utc35xbyJ7WrZEnMSbbqUwz4c2LMTNcr8XzJTafs+qQGBZwqO6yoEJEL0yF0bzy7nXVQGDM6VZeU8L1bNYtUiW4xJRZEIiGJMIqIGtEYeGCJnKziQD94MYkqIpqKeuSo8GrdResXtg1u33/jqxs62L5xzRADI4B0BIhO53PvI7JgAcQjtDrZxMJWTo+O2b0MISAiCyDkCj4CMQ+cTOyIwSmouQN+nXsEHSP3q6Yuni36RRNkzmcYoCdCXfrVS8eP/2f/o39tzTpolqIGK9QmkWVxcFAEZYLVYooO26SnQ5sZYemuaPoIRW7dczlcrx1iEUFSjvouAWAV3cX7JDrtle9ml0aS+PLtMfSLUpmmWy64cha5NZrZaNcxkAKbA5DiAJTGEoqwndXl+sVi2XV27btmumo6DA8Rm2RMjE6ya3nne3Njc2d4wBRdCIPaeUx8x982CsaO+7XNbbhRR1ZT6mMREVVST9kmC46T2r/7yzwHg/a+/CT0uRDc2S184ydhFBDQBU0YwoBBczgqAQtsb+dB0SVRFJJpUdRlqxwV7xwFVUg/IKeGqTXHZdfNVbDo2y2w+dgSYS7CyE91y6bghUfAAucubGMAkEYLowOVLKYbAJfuUFJDIU4pJzMjEEBT7UaCv3X77f/4P/9d3r7xroq4qRYlJARQQVSWnTtQgpZR5rogUVYsypD6mJBJjqIOJIlJmpMamS0bPX16+OGx/+Ec//uM//VlLvHfr2mRvSuUkw4u7vjeVVb8y6IqKuYbt3XJnLN5OHc+c610ABSdMxgJA0zDZqjf2dvZ2xqP27Pj+01efPXz86Nnx/HIZ5yuJkvqIYKAKaATmEHLLkhgwoWMEVEm5ZxVFNCaNUQ2AGU0SoagKMgFwSiIiTKTJkPMBkwm9pmpimuu0ypIm07oKpal2UWLSpJB6aZvO8kcGoEOiBXLPlxoMzGRVW4enVM0AmDh3ZAMAATjPPjgf2BOZWRJLAlGHeFkIFIhSEiRWpJyukj4lUfK0sVGEKvDYsx/HRMk8sFMTAuhTEk2WIkiCFBnAgYJq7CUpAjMTGrMfFciMDISoBAZAxF0TJUoFlrr40V98DAD/6f/3v1IsADPdwtbA4yEPh+ur1BpLA69ZQK//Pf/z6xDZ60mHvUbKwJpuM0Rbvmii+pubffHfIWT0337e9QNep+/We7AmONnwy3/z+V+TOdZ05HWgbyBfwpoUY/kbtfw/Xwf/Xqfavvj78Nv5aXQoI1rTM3Mw779jFpYTIcNP6+Sc5ZxQfmGkTO3VGGPXLo6P/+KP//DkYlFWFbayuzO5cfPG3a+8Nd0c3b652zYRELxjR44ct11yHleXFw8+fQievvH1dy5PzkUFg6vq0kwkWkw9IzjngFjQVZO6IDCVlJIIqGLXRQFLim3UKjChkomhnV+sesOTsxmZAcNoPGo633edSoI1PFhSJxJVogGyK0M9KerNUNShqJk9M+eWuoE/Bbb+Hl4PE8P1RzT8sf6KBkQJKKKS9pRaaWaSopGHUFMYA/k1dCk3/WUYxjqBOSSqAAAMc2MVoKHD7n/zH/4vAOCTf/1/RgNARRDMTU2QU3uGayQTDr3wQwxu/bRfBOKGrOVwn6ewjsT90nC018eO5cK0ddyL6IvQJ6yjd8NefPH55NdeH5TDcYRf7NsacvZLBxwgDHkqMFCDXIk1JJNes71eH9e4fmMDd83Wf3v9GSKuWwuHd7N+b0P8D9es8QFZlMe5ESqCGimP5Z/96Ef/h3/6e7MOtAGQOFH6+9/7lf/oP/j7fXsJGoHRDMFI1y89jIv128Ghrc9MM4qKXr9vHIDfw8cC608JAV+fF17jzPLRBr90WhtOLJnwkunbONSxwzqaOXznw1BAFUV6PbqGHR0gWrkNEMDW/4E1cit/lkSZtT3EOl+fEF6fr15juRDI1Iwypsjs9S7n71HNMqRL7fXX8EvjLZ+zEE3t9TtEQlMwNbBI4Sv/9v8WXrOKipGY9JuTIDFWJYjj5SIW3tXkq7Li4JyDJrZIRUoICYmpnzXsSDWZdADJUodMZCbRll1kRvKksa2CJ7AUFTxRSmZOopbBjbzEbubKCTvvXFkU9WhjY1Rt7I63NBoslpfnx69ePruYH3cSy7JMojBys9n5tCgQfeotNb1aQpXUd5rUg2kfY+wToBg5H5L0fd/WVelir8kKIrZkgXowFgAEdQAFt31kYhNRASZ03imAiqgoe9dHRSAzYFNUVUvgmEhLTwyCkNizI7F2FYX6DpL4CNT3ichMoyn6AKjStyl16AARsQ6V5yRdx1SYmEZhLpzzFjVBQpC6rNgXqU++LHxA6OZ9b57FFXXuuo5NU03Hk3HoVr2KVNWojaliZ4qxTRpskZrJ1qQsfGzD7o7HhRaqm6X3KI6TcxRbcRR8HQSgXTZlGXrodjfqnd1JmNSjrSsNVLPVqQU5m51u0abzUBRBRPrYFH6TjRAxJSkodGJ9jCZSBI6rDok8shixc2DaqzkmQ2JXuFCopJz1VWUjExUOLriAXAMV4EoiJ5oyY0Fz4lI0QUJEiT2AEoIBMLKpmikzgyITOCIgZ5opcM6VVZFKiT2iOIfAnpCRggkRB2RygSiwAs3TkHG/87U9/nBVFr729u/+ne/84uHJR3/wl7hY7H7ju995/9tb2298fefgH/+//uOEH7+5M7p49Plku7Yuzp8cb47uff7wsJ6WI+5v39nYvTb+0V++mDfNaorjDf+93/76H/6LP3/54cm4cFHs3/p3f2P55OgP/os/XcXlvbdufe/v/k//xX/+X8wvD7vPXqZ0+lu/+iv/6ud/DdLvX7v3xgff+uFf//lvfO/ej/7gn89PZxvl+O7+3iIu7//i83tvbGxye/bRw7ND0Z3wK7968KSl48+exPPLnat3+raJUGxulpvWXnjRcuPZ05kvmTyhWVmExdnCeSYHMcXD44tb+9vosVulXm06GVV1QVYHLsq6o8KnHmKrZFoVenLxnCJu7e0YF48enfbNbFSFRasXx6cu8DBXNvDo+7YvJlCO3Olpb12/sTc9v7zYvbJ/dji/uru9nD3+7e//96uN9/7pf/aPx/sdgpGDxfnF5eKiM9va3nz/G/cePXjx9IkVxfTv/uav3tre/uQvPz2YjHxdfvL58eV5HLsQCoamdQQnL05HW5u9SNN1Xtq3Pnj7nV9/669+909vXw3aq+gqbI6u375btB2E8/Om33zj1uTe3Y9+/ODv/fv/8Nmz4w+++93rm4HQfvu33v9g+fYnr07+T//J/+3nH1y/sn/1rbfuvv/e7VZkEVftIu3sTMFEFIraxa53OX6NJCmhd5qi9H0IbjQtDw8XE56enpySa3b2d5PpZFp58o8PL/ev1PmWlIHhdT2JWr4AoaEN1zwYEKH5PshA1cDAYzRLf/Knf9isTq/eKHswjRJ8KEJxfPgKC6hD2tupm7ZNMS1nq7oi73Rnt3ry6KSux8tOknEouGnFkQnFLqViq16drPJRIGbgYlkyWQIjACVQMEEDAMp7x4bBOReg7zsfuK5Gd29XnsPjR0+3tse721vnZ0e37h48fXRcl+Xu7vSTDx8h63hcCLjN3cn87DL2q3E1wSRt36tBKN3sYuaYXGDP5Midn12iGDEAaqAQey2KwoUAhAQkqc+IC+eZEFV1sZiTZ2Yk7/rUMzMQREkIru2lLIOpmKl3rmmTIQEqOo5maGjIMUlw3oj72HMo1HowckUBiDEKOxIaQA84LCqAmuWTETKKmmRMDJOpEZEiQMbUAjBTURaW7xIcoTEAoAOwCGYmDI48B5HkmBwREzChSAIuTI0YDSFXR2lSJGYi75Cdeh9iJ0TKzJlhELhou96LGaMhhRCIKYGwakBcpHbZrxQESNl7S8aeKDhXFePAv/7tDw5qD23Xxab0pRmvlm3lkGMk0WQxINSjzXZ2QijadrGNzogd1+NyCcj5Ztw4zZfjSX12dpnIyOK4HjUXnTVx1qdm2apKCFQUgZlSikVBbRfrcdmtGkUs6zp1CQ3bLo2nI5WERH3sVVIUXrYdORIAS8bsfOVMzSdTxmWyTSAVw6hYOxecqDA5RGhXKwMHoIDE3lHhlk3XdUamyGAKQMjBCWNqh2R+NfbWae0rMZWUOLCSCCEkWcPxjAM4Z4iRnXHpe0sO1PoEYMEHZtZkVKIydiYAkR0mA2HrWTsTYoRkCOAYiHO7rJJzuYw26w/MBhKBEQEdFSYZeqxZ3UhJVJIjUo0hOEMER6oIAOy8qZF36OjDB7/4b/7JP/qH/8H/brS9B2BMhASSBBAYEQlVTFWzQNm3LQD4Inj2EQ1VvA8BMIEyUW5y8Z5T1Jtb/qtv3Hhn7wqr/8GHPzI5vbxcOo1bk40EUFQhSpyUE9Nq0SyMVM5luYo7IxiNMSCgwy5Z1zYGVCu6oi9Lcl10FguhCZunvqg1gScoUtszq4hoMspMYjNQJceIlGknmdub8a4xCgIGppRRpQgAmHm+qiJgQAiEQDZoG5Tbx8AAoxgCial0UvZ9ZtkYYFI1oKiafummeqCkZJ7uugZojXeFTHIByE3kqIBrnTlPbYGDNxMyIgAT8d73MYmqAXQpESI76NreOa9RnXchABpCMonJGwNZvVFKqInLtmksSd8siXy/SjwgXIZJXZg6FraIjJpASCN6BGa06BDRcxd7LKysfd91zq1nzuQINdcXr6fpX0wyXz/563Kpv/Hv6znYF7Cg9cQRv3jAMD3LH+cvz2JfL5B8MZ3+5V/H18+/ntitX+b1+or9t37zl9QXAxia4eGXZoLDLw89V8P3SmBmmE/+ttYmiMwA0HQNQ7LhBYaLt9l63wxe92zpML3LQwW+eJ/2+j2txcdhNj7McCl/jExgKKLesRGOpvUEdj2Bo84Xvgzh7r037331rd2r1z1IalPfNJtb443ppO9EDan23iEsl3dv7B+fHu3vTAuEo6NTU5J577yGOuxvbFovsUsxgTkG0dT1RLJRF4YkCWmjZPbNMrLzXODWziitGpXUXteIbjFfxOVyNBm9OkuffH5qKQHoGiw0aBm5KwlRMdfbDSzy17PgPGHGL4BcOHxlucIYB5WIhu9zoMgMQhpi1puAmUAhg9gMIGOBMllMbdDhAfMrZ403c82AAIjAMMMlh9mxw2iIiAzG6wFtSGCShX4dfn3YX8o8JkIaKETD3D8/2AjJcrcjkqq8HgSEA1KdmFT6oZYuawd56o6QuU641g4oL1ISImAyIeDXZ0E1y21WGTMGGc00dIgjDgVTBAAMAHmfwQg57zMjai4fHJDZqJa7CzJUB1SUiXTgiA9j3MzAEA0IYeDdZMHDDIdSudxXbln7MlUizAR+VaWEfo5v7u5VU7ecawEKwP0iLRcn3eIcUMyQicFQZd3Vlz9XJtDhwMSMdSfKJV1mQJDfC5kqAWbJHix/zimftFTVEYtmsJciUkbF5X+CtSLERApgasSU1QBCVjUmNsj3aKAqiKQixJRlzfXAQDNAQzHlQQYkQhTVfJOvIp5Z10ogIwtKxvKmJERoapgpcoOMh2YCQGBgIgwuD558kiKiFCOxA8SUIjpHCHkEmgExiCgiEJKIMiHkN4KgIDqco5AAw5oANxwMIlIXLtTmCgcKq+fzitPWxpjElVW9SpD61KfEHvpORnVtIGCWJC3bedctfKGhdiDatkLFxJAWzWrMwEFDodIbOQPCvgPplNhLl16eXIZ6Cg1ynxi3kUajartidrrom/OnT57O52dJlsJSb41E4fIiHl72Kqtb+9V0NCJ2MC6KQtvlXMl7A1ysYh87iSZk5JWsVQkhtOQANPYifV94KgITsQsV+nIxv1wuVzEpBkaHznlJqWl751yfYhTxiJJSVESUqnQxJmFHIJCayciNJtVi2SYzJWhEtbeukYi46CAlKDwl0aixKF3b9oY4mpRKMTUJQFO0vjdCW/WJMHhHRagTaFkVhhQFyJE5XqYONLrCd9GSineeK69NUxSmbE3ftX2XEjAl550ry150VLho0dXVvO1XrayW6eBgv7xcbRST7fG0nE4MzVCLqqiKQJ5XXdzbuOoBLuezpHx52TfHT299660rV8vLnz8+uL7hK1utjvb2bp6fxeOLxd03v/bs0eV4tMtVRQQRrI/tbNVvVaXEGJwzRCJvygRslFwIgKAUXDlRYHZGoKLJIcYkjOS9FwHgip0XBTUxEVEFZkfIWe3MV1LymBfxEIfTFntCTILILPkgAEZGDhXqSHzw3oESUuAwIvLIIfiiFzIXDJ0IIoOujRx//IOf7W9M/p2/8+/98N/83stX/bK1b377O+2zZ59++uHi008XEiCewyQe7F759OH55fT6ag9/7at3eOPRw+fHrqrmiYvgF72LL87vvrlxfHzJIzs9PPrjf/2KIs4X8fqbd7mCH/7//vnq6cnBwdWobvT2B0/w2mewO6q0Syd7k92T1eL6za3N7ZH68GzW+Z1r//xf/kBmMt7dS+YM4WxOjdtZhemE/GLZ33r7m4+a+ZMH7fJs+2I+u3ZtOutOlo8/PPjar3zt73wPnj+4/7MfU/DbW9PZarlqO/LFolkeXNkRjYdnq1XCzelmAsMYTQSx6JNKK86wqqfVZrCybC5n2q7evnH9D/7Vn41G3lfF1Xs3X14cbU8rpu3DwxNHbBK3N8Zniz4RhZErDDe3K+D0/Onzjf29hMTjAi6bF08XiPzeBzfivH/w6Z/duzmtgjtfzFar+d/9n/zbf/2DP5xOy8Kb9/TJX/2gW0yen138H//v/8nv/mf/8cNXDxWo6/XF5WnYKpsYv3LvjWa1PDs5vVj16phH3kVy3l0suw8/eshqXGC54598/OLaG18ZT+5sXflq++T+5maYn/SCxfzV6K2b3z97NdrYuPEnP/jwW28d3Hjj5oPnL//B//iDr1+89e//vV9ddhePnp38o//3n73zF0ep2/zuN/eubtZNY4+Pju9dHU/HVhbBTAnQ1HzwBCbIZV2cXiw7sa3tkYCMdncKbi5mMq3rQHR23r240CvXiBgg9xPRMPbyQH+96LK+dcVhGQgAAZgQVBkFRk6dhtpfzudd7Kb1SCFdLC58UUSLWBU7N6/8/CcfluV47/q1Z48/r4pwNmtGo3HTLa5cuXHUdwkpTOpFErdYGabt3T0/64Y9UWIiIEDvEpjC0JCLhJJEFEESMDP71WyhislADSf19PT8YvfWrZ2dzV/8+MeI4ueN4xCTPX7yChEmGzUaLXqbdf3lqjm4dg20j12sp6O2T8LgykJjLymVVaVqW7vT0xdHm9vT568Ox2Wo6goBiHjRrirnq1Etqr4ol4umYpAYy6o4vZg57/PdlVpyRNp3CYFcQMepjQgqCYYSDwB2LolEk5hixc55BwrMZL31CmC6X3oEBsDVvMvXZknChMG5mETFjAzNGF1MyQyQLLDrJYpKWThbT3QYoQxeRNoY2bFEAVD0mFIsRt6y2ghKSOwYHEZNoBxCYQbEDshpbnhVS0mBMImKASTNDULsWJKQd2VR9V1bFp6IokQg7WPUaCASCne+PH1+9FxMQl20fR+jWN+HUQ1FGVv95r177968RbFru8aRM4SogEVg70fTjfOTZ9OtaZ/6WbvwtfcVX7bt9sZ0tWgjUBexi+bKam9z59WrV3t7+6enxwo2ny0MYLZcOvNl7ds2MmM5Gc1nS8cMBstlq2CAZAqSwFUhSiSmJBDK0kDr8fjw+PxsvqjKetX0o+l0tWw0qUNygSbTana+ynYxsUgOQLQejfouAfaOXK5zdYUDM3LM7GISNSLynhN5MrG+bUxMibQXpmEGN94Z9/OLrln5skwCzlNC6pLEGCvnHROCxTZioD52vnCuZASpS16ZcGA0LFzRiZiAq4okSqEEwj6leR+TWhdlBOQca1IBc0SqSVQADMhV4wpSS4zEEKPGlFIUUNMEaiKqRVmCSexbtNR1MhmPCCxqYsRQcDIjMgVjUO1TcOXHj+9/8uRnX5v+asV1MhEi8wUSpiiQFAm44Nin1McYrSjZCLjwySQUlfZtnzrvyTkQTaGsk6WmW5RTiHZ28+7G3//7v/Xy//GLVy9fTqY7Fx20TlwYIYfCoSlI9FuT7a5PsvSzS4WNbcEZL59VbcvTgifCELdQitgXxVVyVYfY6ALKVdLziCs/GUeRUFC30LgSFfCePboUeyYixKQASH3sQyh88JBiL+JLD2axjaDqnVewGIUYvKfYpZiUCbxDNJIkEsGBM7CU1p1HYiJgYksvvkxF6WLKX5T1MfVJGAeFOpkBgSc0USLIDhJEsKSIyITJBAAIUVJSQTUjAOdYkwhR1yVCkD5miYJMTcRURZAdk8MYk/dcBqbKO8d90yKQAc2XPY8qcAoMUPmEgL6EmCC2IqaGEpVTP90cexAX/Hh/i+tpe9kcffaQjAsXojMqgzUrR5z6rDJTG1c+FEUxzAuIeG3/wV+6EK1tFYPm89rqMKgmr/0Uax0HXi/b21okwV9WjH5ZIFp7E+yLF8iTpy9eDQFe+xrWOswgAL1+rsEGsX4+WCsKOPy5fgjAoIO99mTYoDTlt7WmMOcasHX/tGb1w4aWedW1IJAr5CATnEERQJEoT4sB/v9k/XmwrVd63oe9w1rrG/ZwxjviYga60c1GT2Szm5M4iaQoUoMt2owlJxVJdtlxUkkpKVWqElfF8T+ppKw4JcelShTJpURKHNuSKFIkNZDmqGY32fOERgMN4AJ3vmfcwzestd73zR/rO+eCykWjCzjYZ+9vf+Naz/o9z1Oa30q1tWGZjZftKQ3uWErhphRwMC4brwqiWibzOWUdJXVrR0P34NHhzMWtcd589KOvPvfc4QvPH1Q196dRxtTWc6YwDiJZnfOWVZkObl5t5+G9+/fu3z6bLRe7N5xK7k7PkagfMjkBjXXtm6pZj9nYZVAEWvVZDFIGhAxZUjcgYrT09ndHj1hXbrGzBAVMQ7/Znpwf3TuKY3KOGWQ6YlYSsMteBCgLHwhApbHmCW7yBMKCC8QDCUo/NQLQRat3MX1ccBPT7p9m5JO8RmaIyEVwUVA1VEARnebS0xlg0+CGygrAxZQeoejA05mjCGgGwExFpjGTUuosqkQEBCpTjv4k9hGVIHYEECltWQaIQGCiULrpp+3W8s2kUISippO0cql6EpcGaVUD4svFKi21guUMvMCbQVSmu5CpAvKF8qjl7LUJ/CkXglnZngmoKiKdGZuZyTQiNZyKF8t+K5qNmooaEV5epFZy+tWAUETALu+HOF2t5duqwVRZSEAsMhVLmUkeiPtmN7RbWoMmHzg6GHM/iHoEIEeAoopUWkSt6DI4BaYbIaqBgpEZAqgIkFMwNSMrmi8aFTDQVFXVirYCpcusVECWfStavmO5YwNYgXwKxmRmooZIhoBMQKhZyDDFTIRF7jAwESHgsn6ASCpqYMiIiJK16ESqWDhUQISy96abk5WbmoiBoU2qNAGZ5olwRCjCKxmimsLlIpChACDzJHkTw7TegSJS5FkVZcdFvUMiVRMVK0IpTZeVmlyia9MjoV6Etg1g2QNqjgFFYOy31PhFOe2remZpSwQ5j5ZyVhDBnIf1+p7ZmhCpMhRhJIkDogWW2axqa1c5l0fYbNMIENVGgYqwi6jdGOIGobt6+Oz+3g0HuzOap+3x48f3Tx+8db5eheVCwG+HCIMGhNVmPF1nR3zvUT7l8yo4z9ZUTFifnfXsUQ0ZsqUxGro6dP1g7M2Fvs+IuQkhg5ZCU8tiyGOKWRWQ6nZmCgQaczQhzxVj8EjgQFNOY3JVDQ6BvGfKKTJT2xJJnzvBLP2QUrIkHPxMcVQzRPbOMdrY9UgAxnE070McidCj8tBFZPB+lsds4ACoaneA2ERSHBkUQiUUx7gRVs9jl1bVrN6vw8Ojhy3MHbrGzUUHzyDer1cRh262e5iy1G0rZkMfA4Rmtoj9WbM737wx7syaF25dTX0YzUlMrW9jGmNOi3mz086PzlcR4HB3bxhXCKY5n773jZ0bVz/24ae/9No3bzy3v5y79cmjtHWVZNk+bMOs8nXt2hw3Nq6640eBWDNVy91hTM1s1zvnqELAse+JQADZV+xaVGZGk5HIAwJ5AgpAnkgASUQl9mAKYKjAzpNzzE5NRRGRyAGiEbKaEaEBETkD855Lun+pOBQF9i3IHLDW3KNiqCrgmrlyVUuOvTrjin0wIESCi76PPIxh7r/x1d/de+ql5Y2Xx3vH5/1ZVQ3711bd7Ufd4Hqi7//eV196of7trx7dPs+PH21+/sf/VPORZn16vLzx9LffOD46TX03zNz40U8vPrBcjKPMfHv73rs/9NOfeedb27e/3t967vDGwY0vvvaovTI+vP/t2oe0yX/lf/7v/uP/8j+ronv65uGXv/q1F1886I4frB7aCLmeu2HQ0CwPDnaP7z7qx0Gidtv+/Bi1HU82cXDbH/jRT/zur/6jqvE//Oc/863P/d7LH/vod+4cX3vm8Pf/63+U7p9ee+b540fbD33w2d//4tcE6OxsUxk2QJt+aJulRTo77wjt2lPLescxu5iiRT1fDWnRxs12efUg5o5h++Dx8MzzT8uYgPl06GaHh/e+89bNZw9vr08bb42YbXsyzFV+6YXd977+KJ7m6x94qvMuMZ2teu+W7exg7nefefmlhw++vrr3zsnav3X0yz/0l//k17/52vBg/Tu/9xsv32wjrF754ItvfePBrMWD68/9/PJn/t7/5z8JOsxS/Ykf+6H//nc/q4v58x/Yf+f2w3fWZ+uYPvAjn7LuvMbuze/ezbnf2z/oT9NS/WZ7erI9fvZK+7RdH8dhszpp99cf/+k/+d47D8bXHgynp8+/8uJf/Su/8IVvPfgv/x+//t/8X/4ypRGy/ds/++p777xz56T/9MdeWdSzm3t7rzz93FvfPfmVf/Xe933kBvPQNtXu9fm7b717PbaVRwSSlMkxgqUYPWHd0OtffvPm9Q+2raWY5zVd22s3g73+1nuvvnjdh/DgwXeG5z48myOiy1lxeoRkZgYDEWHPZdA4jVKm8YKhkRqoJvKaYt6uT3bmDTIyzCs/r10+So8XO5VZ/ehkfee9k7qaLZr5+eo+eUO0uQv9OOy0DuMWYsKq7tf90XF4/tbzd9/56kZR+4mnoODSNkEGjdmhIyI1IUDQqXrSee+854qd54pof3f53nff1Z2diMO1w2v33ns3j6JKNz7wTBVOb9++6wiIAVU35+fL3auNb07HaK6pm1m/PUWnpknMYo4oWrft8cnZYtYE54j19PwYCHNK80VzstnsVTUjA5KhgZEkndVViuPp+WaIoyrErJpGBGDG2lfl6Rdj6plr500SqNZViClZzpayqCKDqmYTcYnB1XXdjYOpVeyGPnof5rNZPhFHjklHVjLzzoGBJ8lohSdyzmURAMyiYErImkwlB+8YGVTFICWp22rTjQCUUswpQ9UAlAIRh0CApireHKFjYFBAxCzZpqHnVIhiKBFSmVr0w2hqIMIFvUkGkIyFGSVnH1pXhb4ffFhgcPfvvp5FCUBSUtU6eEBPwYmnp65e/fQnP2brQVA1ynyx3Pa9GaQ4jqKWUrfpmBE0iiKCExSUnBMb6NiPmCFtNbS02a5CYBm3oCnlzXJncf/Ro5223ZyuFruNm2HXd9tVD+rWfUdgqorEztWenXfZh7oJ7mR1jIxVCKvNCsHyENk8I8csSSRn8Y4Ws3oYYhw60/H6td1HJ5sU0+lqu6wbI3EO0IiZch6daxCKdzJ5RE3Z+co7Nkegpgje+WTZO9d1PbhpRPRDf+Yjb7zx3e9+4yFodsJZU1ZzgISOEpCUEbVqxBgxb8yFyF44uApS5Z1GwaGnjIqsOwihkZxiHJ3jtqKcEZFkkxNRcS2ZKRJUoVKAjJYwMxM5UssAGQlIMyhaRkYCAxkigLJoimMUilXwzDHl4JiQmZxzxDV144Zq56pwtB5/6Vd/KfX2oVc+Us/3qZ35UEnKAREcqCGDGGUjdiH0fUQjAAx12K5XpCmNHWMYU2+WNa5BIW5P0OoMSdmuPr38S//OL/zdv/V3Tx6f4bJdS9/MQ1UzEjD7IXeVr+fVbjcAKm/PpR/w4Fq72r7ZMCxJcjqRWQ5zv+pfd2EU8EN+3MfHoR2abuxHQhjFhLwFD0Rcpi3EKFmccwxgBo69ZVUCUHMEkgbnTXxkUCOVbGAGijlmyaIZmMnERDIDMlEBNrXITgYaC0pDm7XSODZRACDHiEqSTJMCkRW238zQBJUQvPMiaiqIqKAiZmoqEzjC6IymOWtOQgiaZIRcTqR5W6tmMHEEYDCOotnaxhMQiCD62e5ysbPcro8tqZg4DUBODFMyEMkgQpxT9nWTtzEngAyEHM2savzh1b6ZGWKsPCx3tRtPtqOpOlp6Ubbs2SvXFFrDEWfz+awB+MrFAwgRaAKEJrKy/PBCyvljJhq40FsmdejCo3Ix6Qa84GT/mPthcj1cvuoCQ4GLtfpJR5gW6C+FngtPzPsYpemF79ueS/7giZRQQIn3vetk2HjfBpRvSHhh0yi+kQt8avo1m1bmi7iDE+tg038mM2XQPAlQdiFrXfzNCDoRHIqIoIAXbb7v26dmYMyk44Cg49nJ/bvvdSfHTeM//LEPfPgjP39+tDLGWy/cGiNKogypWTShWoAiM5hBUwfnOCitNtvzpMcPzh/df1zR3R/5mZ9+851vyRjr0JyeHY9Rz4/OwbRtw2xWDSkqIaPbbDYIgMzDkCWOIFIHn1JKII5rNLy7Oslyj11KWarZ8nSduN4BAAMFnHw2kzZUYIoJvFIozcQGRbeZ7qgmlyZEAEaisj+oEHtl/01MHxaPTPmLiNAUgYgYaKr7U1MVAUYzyApmIqowoTGGhcyBJw19hI6mwBS1yywAACJERFUho6wikn3wBUVk5lIXXmb3qkrEKoqApkLMSEhABehQlQLflWp2KoJCFkI1cORcqblkx8UwYWA5RyJWMSylzmoIqJZFhHjqPkPinMVQEUsDlwEVBLy0dqmZSlZiKi9gdqpipuSc5Dwxj6ZlooU0WbKIprNdcmZyAJN3z1SBiEo79YXNqng0FUpsCyJaqQwj5uJALJVixoRT6yOqmeSEiJozMhVeqfZB86CSQXIWUKPHq3WOQ123ZmjFY6VZFZkdM6tZ8QAhllu6ECNxOfMA0ZCIjExl2jQAU3PM0zlDCIYqIqU08aKOD4rSBDjRgyZTBAogEZcjiEQqYibomIhNlRiJqDwF2DE7BkST8kQQUEMiFUsgRIxIBoKMRKW90HJOBoVZidmMHSOQqZIjIkopm2YAuLw3gU7VZoQu52RmAlROV1FBZGKnOSNCSomIRTL7IrECe2emIOWoOQQgBHZogiln9FzsOZeX4oVU1LrQ+KquLdo6GQQXNzAP+8izJJV3jWEIlRPLbEOWBCoqo6ZzgnOBAanVlBs/YxaxjDCGipua6nqJRo58gG3OfbAMYo5VCExBTJx5yDOnoUFJZ4/zeHq2Ot1qGtTSKGOkQdrhnPMwbtbSnw4ejUa2mdsOSkDkPXnnbNkYsRubGY71GgjZuVkoslj0YYZkILn1VfBklkezcey3MSUjTcKO61ATGQqmCIvFjmRM69FAgcA3zkAJKI9CbJpzyuqDI6pyAk0KYiAC0Y3jaJAFNZBoHp0RORXFiqie72671HdUhdC2jcQ1M7EHzYopVXULGaPI7qwBkKKTZBE0gXF7vn2oljMzgtU+5CELJCCYzZuxG9brCFYxt+MY1Y9ILqDLMaMBpHHcrhofZ1Vz3j/edris63a+kCFmtRizIKw24+Z4jeSE6fR0Zf12xFzV7t2vf+dws7r1oesf/NBLm7jZZNnbWaRh0wSWzbCzuAEUYrfhPOY0zCgRx7aej7FvFrvkPDqH6M3EVZWIELILc8OAzKXIN6XRgJErAy+GYIqWNEfQiAjAzgUPwEgIRAjknLMioBIUGZWcAyQzKrdjAjOJhfNFNTNgV1fNQkYldFRV3LbOtUBBgdh5YK9izHSRAAMA8JM/88lWz7/2zW+8+LHDD776yvd81Hfr9X/39/+LT/7Q4YP7d5bO/+j3/+A3PvvVr/zzew/vj3FnNz+Of/9v/refePnGD//Ex/7l77y2U2Ee9aXvuXF2fPze6fb6XHHrlN2/99f/x1/5wje3tvNv/Af/szff+qNvv/OFTcDP/NlPf/6fPNib4aO3vnwCxx9sZrmRe/fuH1y/8eDROXZ6+PzN4XH+o6/8i8Ob86ev31idDqTnr7xy/Xh7PHsk43h27UqYLRYffemp3/lnv3y+dZ5mv/ult/es/c7X39w72Ndv/NbONr3+7nZ3LkD88PRob7k4Pt+Yx1CHnd1FJGiv7p4+WO8f7u8sXR+TOPY5s2PnrN0lpFSTgaznXh8/2kbSEGrnaum2Z7fvzEL78ivPnXWPFrsHkLd7O9wu1cNY3xh+4mef/e9u3xlc45cNiQz9+VPPXekfr88H+IFP/Znnn7vx1pfP3vzu5w+evZ4e3qPzd3za5vHuz/3Ja99+481vv37UPxhQ59H6n/3Z5//B3/l/7u/ue/IHh3vf/fbtsQeQ7dvfXvk69KvNap0+9xufhbHbXfrga+/Do+PTg72Dh7dvb3K6+uzy0YNNPh0CLT/46vfeP3+0e636x3//X33i5vf+tb/27//WF771e7/7Bx966bn/6//yz5/eeXzjcFk1IYNd52v/4F/+i0++8jIhMOZrB6Fyiwer5U4LahYwHQS7+dFbqgJgJuKCL08WXwdJ49jLC08/b8beIzLlLr5zZ0TCw71dqogo/4WffvXoZKU6W591+4dzBVUD4GniQZ6nYbZdrm49MTAjGDGmjL/5L/4QEplaVTuKul6drnEUS4hVJ+O8qbanZzl3lLvjB4+MZXnjgBC6083Osj1dbUGtqhgJu277la98A1W7zQaRy1WgAMjM3iSLiIooE5no9Dx2TkFSElAMIeQY+75ndAe7h3p6rzt+fGN/Oa5WXO+Dm7/2jS+gwUsfeOb+8V0zdOjjNq/PH0KmcTtqzuxcTjENgwHU3iXJKtq2MyRebYYkhGCOYDabZbP5zjJrqoIzVTSugovDKGar1fm278cUkdA5zGLMJKpRkndeU/bMqAoIkpUZgdEQnWMpQ1Qk57yJmVFKGQQ90MwzFWDbYLVeI4LotEoWHIvamBJ7NIWUhV3IKZfphxmy80wsImUYmkUXTSjgek5CSAroyFk2UJOYnAs5jR49MYGqirDzWRKgISH7ICrsg5qYZEQAk8CgiI6ZkVXNOx7HAZ1TNFOQnAHQEac0ihkC5ixv332j33bVLBhbzNEREAFW1XoTr13d/eGPf3wGtk4RjIBoGAfVnA3qZbM9OZcxUzWPyRzyolk8fPhg52AhMZ6fnDK64FtiCW1J21EiHofoXb2oDX24eu1a7rvFYiGST1fnaJaHcRxjaBdZZLmci5opOQZmSqlb98oO45gsgq/qlJUCNeQdI5Af4uAczRfNfFZ3fQ+jhaoG56rgMQsC+uA1iyYNdUXARJxz8uzGNDBVZdFCVXNOqpCzDcOIAEnyGJMjvvSF3H387lpOwz64TNvjqOZyTADk0TlmRCIET9RvOxN1ni1Zl2wWgqs91aGahdRpyJa9SzEXq5c39Kac9fh4o11k9mLqGD07dKho6IhM2UxFxEDFvONQIasIuJiUCNkQDLOJZwcZmTi4kMfsa2xCAHSOXM4iliIoIKloirlpm5yH9177gj2+533wy11XOReaUFe+rtn7UFUukG/aLK6ezzh415pYcgH6bsTG9ZKINOeISVh1sVOJxqqtRLPB8KHnrv7bP/Mj//A3fvuo33jXYIyZQA19QB+arJowcRM0qTNNg5wdjYvDq6v1qo8p1Luz+ePDK856ExwjO22bYU2z3XY3kl+Dz26l4ogUnXbJFFQVHNZVxUSWLGdVU2YKgYxtthuQKbvBktscDzHFfg02gkMeR5AMjMSIZoTsLqJUoFQ7I6CJIRW4AzQDEHSbhGAgyoBYjK9TAzfgEz8TpKyiqtmyiIohUYEbiIptxFRlcrwRl+QSSWpE3vkxaQhc186LDEPKgxBijAKq88bnlAWsV9FQC8ZQu7apsoUMNNuZdwbkQkqqCqCGcagskydJiOAB3TiMOcZus4E+VXFkE9UkSTWeGSF5TpLEuW3eiK+Cc5IuhZ0LWeUipITep+e8Dwi6sKfBxEGgXTjPyqLHBfRRzHmAcPliu2BHiryClxOhJ3rQNI1/v5JzwaDAha3MLp+Vlx/6PjccXHArF+KWQYEDYFrxv/g6NrXL06XP48JeNv1ILxxPkwuKLnChsqk0uQ8vkA1AYCI1BQIqkUPl9ROiUUQ4eCKHPbE2FoxBTc0kaYzd8YP7t797/vjh/u7uB154brl37dr166T08oduUGADa4ISMTkHCk0V0pgliyqYoxjzph9ELPa9pnj+6OTk3hd/9Gd/fv/K9c3Jo1nNwOPpybbvM5AM3SanbbuoIcu2G7frjYhVTVXVTUIDxz5waPTK1Wvvvne2XW2q2ldGe4c7x4/OHXFb+XUWZAKzcpwLJIQX4+xC7hQNiC5OMC0d9aVOHIrdDBFpkpMuwS+4VAAn96eqTujRhc9nWja+kBsBQE2no16QMSQmosstQkCki/9NXM3lkZhGRDgBbFkTIrJzE+6tdrnSPL2DQZmrAwDxxJZckHlAXGbgWErsi6uLmKAwHFkvkKkSxJSBkJmnc9NATMBMNSMREgMiOxRRRGImxOL2EmZ3YfsiMCs+L7NplyKqiCBYQR0nJ5MaMZsoANCkN4EVBdOMyF1+R6CLk1WVkEwVTAGYGIuehURmWpL9RHW6SsGI2MwAkJk05ydHE429M1ViQlNfwgZpBGZFVIdnY4pjZ84ZNcToHItoGdUYKBg47wgZyrwQSSSLZSIiYjMo4bbMrKYX92yZziAwkwxIxb+Pk0woiIREU74VYNGhEImIVATNVAWK2+7ya6o4doolT4zQkYlMpwMSFNENjJgM1RQNDAhQplw7ACPHpjpFQAESUQkfAABT1SJDT8IcMoJIZiJCUM1FaidCUwVCZGQgQFJJasLs2HmAyQzKhDnn6euAErABQnELipqqd85UkbjkAZZDf+HGJK+DKjdJbDOOom3bLNj21arg9sn7EcCxOKDRMqiCjoqbfnXSdyOwORo9Za0aIKydJ+3rCtrAKoxujgDzWrwkHc4cs+GI7M7H1CxuNO0zO3vPOSNHul2fPL7/5gjJatV2FrWNgFiFLqbtNjLU86BokJWobtFDP4j52XLn2n6zO9cu5yHntQszICHNhIYqOaYcEQjBCBsyMSAOWCuMZAA5s6mJRNDAFAhnrZsHo7Y2DWf9kEScA8ZsydDViBTQm8hmlbaYFvPGRHWIoMbKWSyqCiqBZUnosKoIxZJojMg8Q/O5H0aLzimo67cnaNXMzT3X5ANx6IeBfKp8FUXq4Bx5jSrSbdYrNKgcIYV2MY9jT6h51BKwpihd3M7qOVrScaNMDCSZQRM1vlsNQ5f39q7tLa4CqA8+5oRC3gfH2GkGDnuzZSJLY1/PZpYGB+qYh5NVPNm7cnVx2OzcfnzuVMY+VuHGwd4LqxSC4xyH3J+o9nXl+6EzXKCruF4CB3RNimUA4NDXzlXIrYFjZLSsqOiYyAO4wgmaRsm9aWZCYgcc2FUKxESFyEUkNXNoSMVcawiqpojeMYvkSag2FVNAco7H2APU3kdyzletogP26GtCR0hEzlgZ2YyUJ6no1R/+0Ol7r+3cDffe+Nqvvnv0wve86IE+8IGd4wffGbrNai2/+c9+f3WmJ3d1/dhfWe69eDhf1HWurp4fnzx8447Yzn4Nn37l2j/9ndN734X3Nt2f/umf+cJ773zuj77ywWtXbn/jrS987u9874/96UVzft49/q/+7v/7r/zij59+587VuXvvne/cfW377CtPHT94/cozT6/WvRqNJn/6z/+le4++urNbe7/zb/7ZX/h7f/t/f3T7uOKzG4tmnXFzNGAeHrmvvXDr4I3b5zmvX756eH7n+K23zzwOR4iv/okf+OhPPffrf/ufhbo+XXWBHZgt9tp+3IxdVTnPefNDH3vu3tu3WwkRyAQF1HkKbZBttqwNYTo56c4wxDo0Ghh39up+NfheuvPTVG33X6g3Ge+8dba3CM99eP6dd85qgy/+zjd2D3fOH+vq7Gwgge3Ge9QBV6fDlWf2815sXnp69t5zL3zoqa+98Qdf+Sd/dPjSc9vT0+986cxfubUeoIddHGWd4f/2d/7rK7Xb2WvffePusFrB8vkhQddnv8XlPpyPQ6iqyuEY06xth35MFrt+4wPOrs3U6HzbpSEe7i63x93922/0dTsO8Xtf+n5cxbnbfvaPfo9mT/0PfuwHxnFbVXujxGGMoQq1a37+Uz8lW1hc8QCp324Pd8OnP3Dl8d3jq9fnMkrl/eSxJ9aJ8AXLqgxq9PDucXDhtde/yfj8lRs7OQ13H5998sPPHp9uxk7NIDlFo/Wmf/O9R682FOoAwGUZawJTkzCzWfG025PBOE5jnjrMf+WXP3vz2kHUk9T3CpglMlpbzXOUMSUvVYwrsS6PEiqP5jdnqxs3DkD59HRz9crexlAAZ1WdUzw/j7NFs+qsqi8GRlqWTRKCIiE7KvIsMZqRKQDobN6cEnYxWsoEHKrq+PgITa5eXT68//D86Ojgqflpt3nqxs07790/Oz8x0/VmQ4bNjJzUQ47Os6FyCJhh1lQSYwB6nHPsMjLGbGIUqrpyzoZN27SD5pyl8U5E0KBIAACQk6w2241KaJvNZtM6J2ZMqGqWxREnzd5XgAxEgpkAchZVRSZPmNSSQZfyzHkFRLPgQ9dvESyKUAmB8uiDyzETYmIesszYN03d9aNjB6RZtVjiK8+IgMjsvJgxcc5SBedc0LL+TOSDG4YsWdkFMUR2RA7ACB1RAaTBTJkdO4cIRN5AAZ2ZIjAgmGoaIiJGESIvap6drxoBQw5l8VsBwZFpTqkLi513Hj16vD5pWp9hWh9TkyGrBb979fAzn/zonq9i35GRrxodRkAQNUZUFKmYuLLojk+PFrOGVNA74MAeV+uTpg6Lqs55cMEvmvZ0deo5aNac0qxqjs8euZ2FRgOAdT+0zQ4hIndAW1c5luK6UkKXUjbLamaGomnWNklyTGJqkKX1PjTzo9Njh1S3fmexePz4RKIt93breR1zd7g/f3i0kiQ+OF8FgZxydg4dVdm0cosOx2wsKXl2KUdGp6CKIESsJcSIDSFdzJK//fX3VFCjITOioYLDgEaOWeLA3pkjCey5tm7shxEdQcUxxpp5m8/a+axetk4xkqPAaoIqMAz9WVofdbJBy04CKZmr3DRsJ59MUTOBeeKMpGKpg36M9WyGLrDFcewBTJAETdKImoNzWbSmkOPAvgGUmKGuQ5c3Dh2RmKHEMcz54Mp+wLBZbYhBz08QdByiAamqSCLHRFw3rQ9Vu5iz9/OdZaiqGKOBUfBGoJAlSyA0SYjW9T1i1c73svra6OMvXFt/4sOf+/bD+90mipHfw6pOY6zqylW+z6fKwVdex9RWmITGrq6b+vwcVFc7/di7cTnzmM+EdZ2GlcA2I5HzTnLj0XhYbRgJgyPzqCYWhZwDR5oRsZ45Rp3VNFv62U6mOWHbDFvZP3RDH4/ejeePLKegGUCg8shoWVTUHCGWQTmhmohkQiIHJsqMiEjMaiZqZRFXVAhJxUoqHyOaYTYLzFk0JgWFyVOgJqLMZKaElKSsAE8iiYqVJWh0jIyh9grS5+jYOU8hS1KVCOgYnKtCAPLKFc1qc4YVJkfA3qXU6VqxDjRTTJ6QGYTQECBB8C34mdVVJsoyWk6BzIziGCVrSmLorGVmp6FGHxrTIWUbOyH/RJO5tHK9z3A2LV7YZbQ04pOfwpNn1uVbPFFnpuAjm6bfcPGIw8uXX77BRRjtkwzoiUgq9zh4og9MQtD7Ph8ul8Pf9w4FQYILcenJi+FCCZu2+4+90aQlFCaiuMnKqYAlwoiwJOZcQE8FaEErDiIDQpTLL/BEwZpylXEiiYo+AMXShmA6eZdSGlYP7773xuurR/cPD/ZffeXDV5+6+fRzt9Q3VeXruoaUJIqZECJX6JhTH2UcJWUfQs66Ojsbxr7EY29OV21Lz7908/N/8NVH999rD3dOhl66PA6d5rGpWJnSIFEE+jhsegMMTa1iKdvm6BwRQ1PPHA7dcHp8lIa47dZdytWsdTqf7R6sTlfkl5LHAlipanGXwRStw4YZih+JePqHorBMVNFlRvVF1YfRpMlenGAXc3zVMvSadEdEItB8cTYSYYklKp5AUBW1SaRiLEKUIZJOAggxudKVIypQfvHJueVMMhEjgSlS8UwVeIYIVEvOEQKYCpgiOSQ0nXQf1ZLQdBENY0CTabGEfOGFKp2RHROp6AWaZ3gBoqliceeVK42QrBicsOhBOmUkFZ12SscvWE2555ComJmCmtk0pbo4A00MEciguC0lZ0AqMUNl7wAqIYoY4pMIaBUFNCIEIlAxgOIlFFMVBUBCKjLHFJZEaFKiPqcjJKrMpKqgWuQTUQmewNCAGCmbGeI6pph6x7sZwAAkZef8BU/IYMDAWVKhq0STmRaUqZxXKgWEVFAlZgCAEgRZeFKTyc6H00lFSIUkQkJCVhMugrCKFqsa2AR6gxFySTZm4rJ3pdCHhGZgWcmxmV2EHiiIkIGKGEIWBYAnup4YmBEzgDHThZCVym0OL0RxKq9XQ5uwVgQCREdT7mYWndRU1cs7OCJM0+OsyAAwOd0QELnwbgpg6GgKyQIwUXjC1V1IRejVIaRhe75OsU9NmGWYDQOR8ig0q6rWQxrPiFQhk5Pt+iTbOqUhq3feDIyC60SqegmuQgD2CNaA1mwzBBHtQXAx3xcVz27YUtPuQftqO3+6dU3Ixzqc993R8fnDkdy45p2DK9t+HGLHWOWkDQgZZjHnQ7W7P5+HlLb1cmazHVIEi8wuuEW2GVo63zyszDehrtkPw2jkSXMWqTCDjJKzKSAAk7QzjsPYx7EKDRLkHBFt06/HeL4dtc8K7JbzKnUnIXAIiGIScx7VGAFoHKNBNqqYzPt5FzdJDciYqaoCkibTaCqjirl5Q9bnfrOyeQWMy+UMsrow64dMlNmbNxu23ShH/sotcrUIqmVgv7O7z0Cbs40PO+bqJJnIb9fHIXhmTAlUfT07rGcNATHktt1hc4qVqzyQkDVS1fPaJ2IkillFQVM0sJTivG0xC+WONLc+zBbzNFSQOsr96r31fXtw1T+9u9zbc32NCa/cEH76NNXREPPAsPWuc2C+qdvZVfC7FryRR/ICDr0r354cIVVgjGQEWW1UzeXpQagmySyqJjQlZjAgVwEHNQSCS+G+XOkA0/ILFKMrTiqpQyvYqpmCCIKaaCCmaqZpBGJCZ8TljglUDD5FhydJl6MN+LVf+d3Ht++t73UvPBvu3fu6jnf7lWlFoufeH+7fXHz1qw9Xg5nQlb2wtMGG+3/5L/0v/lf/p7/5sz/5GWquEe9RuLdX01/8C3/q//gPfuOv/If/7n/za7/xn/4f/rdf+/X/5Oa8ikebd09eW+zOnn5u/3DX//RP/Py33nvt+K1HwxbXvdv6+ouv37n/cHj+1ZtUhdt3Hv6jX/uHP/r9f+HW4c2d3XXN4+e/8vn9F76Puv7jn3jxC3/4L1dHj2e199qO/ey1t97ZDOAoP3iX5mH+1LPymZ/9k1/8jW+s7qx5Px678HTlnr96/d23HsmI6slrtT7paBa299fvvna3Iu763s9m3QCOfbOsZvOld5UO2+uHzd6VdvV4PD3r96iOst2/9ZKE44P57t37b9BsVc1Pn/r4bPbiM+Ojh+/eudc/3OyF5cmdY24aAGCGZ5+7+uaX7vvx6nxWxWr4/V/+G3xjL5JdOXzuS7/zWth7/mS+/5P/xl/c+b1/8sZXP3f1uY9/7NM/8o1f/fUri6Y6aHVpGtJxZ8Tuw9974/e/fny87a88M5Mcqx26Vc2/+dq7zzz7tBN3ftZ3fZrv1TduLcYhOx+8auVq7fN2q1evXQWFw4On3/7We+/cubM+zXdWq7/5f/7f/d4XV5//8p1XXthzlTITAW7X28Vi9r0fa4dxXJ2Nu8s5iKZRdw/nhE7ZGFyM2YdgdrFaDKX2BkEhZRKrPFfdAI8ejFx1yOGpa3vgbHevNsAsogaucsT84VeeOTtduS5XVVXPCQwsKaFlgZonY3Sh8ifYejrZ6Uuvbb72enruCnWrI567EIIiXtk77PrTrlsLVteuXL3/7kNAMRMDlAFIYFzl9XZ85sZOYMLKB8T1ulvszGInQ4Zu1T+7c7VcBUwYJROBWkYuq0dipkBIjGKAzECMxM4HNdw5PLx3+p2rN58ZVniyOU9k7eFegnzlSvXoSA2TxiFuxiZ45307qzebztfMjNsuMrs8Ro+5ruxb3/pC3Txz88atbuyjQDWfq4qqItA4qpo7G9ez5Y5JBgDIBKpV5QQsxW3Xd32Jm01x1jTTQFyViRtXlc4jI6zK4hWSEmQVyVlNDSgmqZETjK1zCoKMgsZ1ONuOS3KeSXOqCVISEa2CN7NZVYPANkUkKhnEVXAVuTElIFAUNAPQKjg16GN0pmBGiJqFFbgE25qZSNYhhMpARMw5VgATMTDn2dTG2Hv2kseyYC0ZEcgzqwkxASk5Vk2qqmUBMYtjzgaWIztipqP10cPVgzB3ZlGzyNCzI79oenPdYJ98/rlXX3rm5N0jQ87gus3gAgNAPwxMjN5B3TB7NEDTyHCasm/nAzpwfnkwOztZjdJf3XPb7fnZsMpIVnuEQAwpSdXOQ1PrKNH3ThwqEjriCr2JEqqNowjYzqIBk5SiD25MmVIJ5KUUu/lspkRD13v2DrCd1/OdGSaADLsHB4vFwtV4dO9oUS9D8E0bnGcVKWtxqjnl1Dbz8/6hd06BUqYhj8GRpMzM5N1GsqaUNZMPqE+mB+kcmCrq45izqaEgGngmT+qXutgxaZvzAXLEmJMZg2lV0WzmA0hb1d4D+cguuNploG69Tdvz9Gg9rml9PCLW6BxU1NShqngcxzFnp54RAiKbWIqCLg1Cg5lqn3oKzFUgrwQqkhnRM8bBqBDtQwTKZrGuZko26oCehAAoVR5gWH/6J37qRz/5p3fHRVyv07geN1uJfew6Ve03nWhUEcl5ONsM+Wxt93TMpgllJGYkMjJ03hhykjKFQKaYs4wZyCVBMPNVjfXs4zeW1b3tvWG17gHooEIXt51vqspXAsyuQXC5750LthJMe0/VHzs5//b27u/cGb707IsR9/RsNY7gRoLRzFVt3sSUSVPMiXRMLVEI3KeU0XWjKgAaIgVBzDTuLvnwpr/5FJ5u70Lrw6zuuQfcHh5UONQnj8g5RpEyZayCH2IyAzUlNEeUDbx3hJRS9jU7QiAUsTGqZkVAzSqm7Cg4MsOsYqoc2Ht2nodRsaAkpW/RzEpejZmAupKzYkpYZoNFMBDvSEXUkB0ZEDAEYBCBwVTBxCSKUfZompO4WXvroN5fKltwPt29kzcnGkXTwEpJJeYIqMGHHEcHACYi5lQwjjPgHHtAROeByKHFLDbiSAJt4yMEFYyDpX6dLnSS4poqfM0legNPJuyXWswFA/Q+h9il+HORF3RJ+dj7/v/iz6XgNOk5dmlSu/jcJx/4fhnnyR+89KldbtbFL/6x15Z/e2LwKlE6RdpCuBCx3udXu3j/J+ASTqki9q/rWCVHGadvgFhma0VK0CI2oWEJSiDEqemK1GyyvqBaifhFzf14evzgrde/evTO7XkzX7bzj3z0o1cPbwpVQgtARF8BYFJj54Ovum2XujQMnSfK0RC0W52hYxFggNnuwpCaWZPj1rczdfb6W2//8PM/PvSSu7V3YiJ9HLj2yGRMguxnc8s6DsNmjC5UPfm9tqkqCnMOOHvvnYddlE3coG9TptPztPCqOfdxdMiXCM8EFF2oYxOxYkAAWKQKKMIGKpiqFEYHn/BlFwtlU0JQ2bdFainvboxsUEQhM1OcEqOsSJEqWmAvgqnEkouzrITXcKka4hLpU04YNS350xcH1pDJwCRHZpdVSsSSSAYgYpKcYFIrC9FWnKcCiMXbVYAPgAn4BQLEkuyMomaGxEQXOTglmAnMCEn1Ik4dwDEXeYrZF96MiFQFEZlJRJCm/kqDIk4Bs9MSbFN8awaqJe7achG/SuorKACqTmxm4adKYnchlcBAaQoru4xyVzMmKuexijFzzpHIIU1MXrl4mFkkm4FmsQlZEkKeFLMiIRcQTwGNm7AgVwlsXIGzgltniZpMM6AHK0IfFH5ZQcEwSUYmBCzLh0hUigRUlYmJyxWpSNMTfqqsREImVBURAhOVwkMxu2nyiSSSTQUZoch5AGUUh4DT9wKTnEpVASIYKqghsYiaKBICIjsnkqE43RAMgHxAKGhSOVJkJiXILKfEzpWxIREXybjEsDgOKY2TCI/FKsgqUu5RKWdiKqlbIlruRsxcbI/TeVykKAQkZJqOi+WsBsQlKI2NVFJi59ijqVze3iapaIyDkqnokIV9tY1r9HU2IEdS4aAD5+wdKiSwlGWrMCYZ+hwFAiADm3cgRGqw1YFcXflrZE3jKmY3xjVRm9EjcT+Aq56a+cMruzdSs4PU5+1Zon67uZtpow2cbuKYZ7YWldh3J97tzXwzdutNN2bgarnf7l3bdA/YxDszHZEEdWOeXO1kVKRlZbswmrINlrDx83Yv9tumWrbNgmFDFFfrs0CSV48jRM+YGEeibc6iVJGnZOvzQcFFUGZa9d2ew0XlAI0MTTH4dht1G6MmCdUiJiMbCRU0aIzOOzVzro25S5LqZs45g1lbpzFz8nPEmaA/6wTReeO2ClRxVhhWZ2mMs91d75iDjXkVNXIdvLtSL3YBEvmAFSNspT/1zUI0RokjQTIOzVLrPcZm2dZMNebsnPd13XePQUaPY+rTGLqdnUU2iSYYfNSOrCavqD2yx1G1T2tI6FrJEuZLrkQQt6tu8+0HdaMSz6/XN+a7N440n2Y1Oe27u8Fh2x4mq8wdJIXgZkCeqJLiy8ypgNqmStyiZaAEhAgMZoSqOqgk02imRMzeMdUK3tBdNENdMsKIpFSEdTWbChHBNJNF0FTuggjoANVENcUUVXrQjGaaBnKBrJKUwDGx06l5Qcg5oMl6s+fo6HR84dbu+fa7P/hDH0De/YPPvbt/7YWDw5df++IX/aJyc/jEh1/ZrXjz9v2nXti5eWX5xa/90g996uYQT5XdgPTqi8//w1/57R/4gZ+52a6/9C9/Wd5d/9p/8Z/fsvN/9Btv7Fzf3z48+daX/2h7cvDwre98e5a2Fs9MaXfZXWmkBpP6p372R7/5lW9Wc66f2XklVb/6T//Gpnu7d/7Tn/j+P/zOW4fPPjfcg9t3Zx//xH/wr37/tz/w0cNhM1Rhido5jpWz1Ln7QwZXff4P33Z48+4bj2988pUf+dmf//av/9L56Ykp3bh+ELepXc5M+rt3jzkwKImIsUfnXDBG0oRHd1dctU8/PRtrXH7gGR6Ox+G42tufe3fS3znKd7dHwc8jXc13Tu5m52S2G26i76/l9e67m/Spz3z89nePqhFjjOePH8/n+yJ69OghVdbr2b279//j//Q//s2//Q+onbXh5uO89+234o2XfmB8683bx3dOj8Zw3XQhN15o3nj0gBb10d17z99a6pzO1qf78/n1g+rBsN6Mx3rWf+wjhy50cb58fL8joe2ZrE775c4+Ze1Trx52Zosha0TJeajS1usmdvHf+5/+T772hc998OkXPvO8u7J7vWTiMiCQ4aJKORExIezszFNM7cwZWkpmpgyIBr4KBKY4rQtN3DEoEHoPz7+y994793/657//tS++t2yak/MBA52vesm6t2zryiEaOASTNMjbbx/dunL9zubRrWd3FosZRFjFuLvbZEnOkU3FK9PwtViyifm1Nx7t7N8UvS8sgDiMAxOvMW2Z9g9fWpp13e3ZPjtr+u0AgbaWq8U+cSuGVrnbj+/s7V8lwarStnEMTMHXC+yGoVwFzGyQnCPNWWJmIjOWnLnUW4kCqiRRlVD5bT/E9baBGafFuH7kK0opH9zYPTk6P7t3m1Wv37x+uF/3b9/ZuXJdxK/OV8TknRONAKAicUzYBGNcZRe3eT9unXdDP0r2Ga0iunXzmbsPTmbNvretofPNIo09ILN3WZKSsWfHuElplBQAAgB7ByKQC4WoSKiS1bgYVdnxkHIELYN/EbXghpyauk5ZNI9IaqqsOm+aEHyKg2Qt4wBBGGL2vgq+OdusB0kyjVah8hU5YrCyqIOEzrEh55xRMjKJKZXlNg9JUkqpaoKgAaiCMlI2NbEpBZEpmjgXHDUiAyM571OORU9H70nBOUYOaphlBAKxUUwdVcCByXIC0CAIt++85StnKCJSOe9qH0Wi6DoN3/fhj33vix8IYrPaZ/Pnm3Woq5ijqjrvwMAU0pASyiBA1Wwct23lHLmYzLI1dRVCs1qdPXvj6a470Rh5tvfu3TvPHt4UBURGCJq5nc36dWSeNe1ytX5cBWqbHXJ+vT6vm2oc09iPY0yCgNHALGUTjR5pMV80tVvnngJHzVlyFB8THt8/qbndOThMud+s4958L3WxDT540HEkC865TGRgjimnNAt1ymmqvmVCsGzZDMXIVy1RSklMNacU/MRTyLkpQ+5SyRc2A+9d1ThxKew1zeEsWph7yyZb0mHwwVeMlk0WIWQZLakQROlVsqiXNIKJGsYsoW1itMGEmTwTGM3qGsZBUyQ19p5AM5CgEzJEReIh5UCaWMIspD5mMQQahoxKUYEdgfNV25rzKdjIVi2WDiVLns2tts0HX/joT/3gL1zfewEji6ScJfaDSZ/7UYauPz9PY9efr3K3RZPUjwwmIUmKlkbHOG42LrCOJjkBKHlUFYDkEdsqKJIaRBGBEWJuXP89V3GvC6+dPz4RGMIOhion9T6QoaQO2SOLxkRqnfRqqxrN8/Vl+MD64d3gt6ISNayH8SyZRkmj355ux/XWohCyiSRbQwXZkAQZxnloua6oqU7i9sxGdx6v3nQvf/T51fD4eG2uXRwPWeqFUJW1q1qbzSnUlFQyQlwTCiKwpQRgRM4AkHHeVI7AMaOqifUIWwUDMgB6EmljBmhAYFDWeUNwgIqAEhMZmikZKWBWBcSLQTd6xDL6JwRBTTkhQU5G7JBAETbjSGW2K0YIKeZImFPaubWUK1fqg4N6NneEpLm9UXWr3aNH21FDNhhzNFXg5FmAMUpyntWTdttakQQJUataaaAs3oeZJbUINE8KmLU7PrWcQ90QTCMinGhWAESdXGBl3eIiQRcnWeSPKUQFsHnS8vXECGYTWIOX/wyXNhSEy6xWgH8N+Sk9UE9+coFcTAHVl0nVUyIxXFrXJkLILrZhev9pwyfBaPrRv65GXfxSYSsmKWqqVSp3eNRSpYaqF4G48L63wWJMKxSSMZGaoUEhTAEIQKd9VrAHLCVdmFXPHj9466tfOn9wp26bW1duPfPCCy995Hvq2TIEt3/1gNVAE6iO67XnABjH1KPKOKpTylmijvN2XodAzmfFqm7EIjA17Sw6uHb9Ru3817/8tU//0E+080WqUsVmK5BtTmOSoR9N63Zeh5oQD5Z7YbtuFzuvHB62mLfd+vTs6PT+47PH59Q454h3d4/vn5jh08++dO/8LTEgz7m4nya5sHBpeHEu0QTXEQJSCWYs8gpOyNg0ty95z0XWm7A0uIDXpnOBShp1EfzMhICsWHqmqJXJZkTICMTkLhInaBLziImLToSmUsSmIiu8X1wsR5TZE1LpdIJiUwMphqCCyhEAlGQf5ychBilLyjk7dhNShgSgE9OBWAJ9EBCKtiE6ubRo+lwiuoxbR0SVAh1hcZyBGqKp5LIrRc2gQEOlyF2nq8uAiZBI8lRVQtM5Z6oCiI5ZLLMRE4OCmRasGNBMFZHLGU7MIrmUiDE7M7OciKnY6ZgdTJorTEpEwXNKeashFvIICQBEBKEATUaMJeoaTTnT0jdEiA4Ns5HfdDoOIxGDASApqOSMQEZmBszBNBY+iwkBaIIdteTNTQFFTAUHywRI5NCygpUQOyICA0ImYgMCVbXE5IpLrsQh0TR6mDx9ojmXSDJVJqdWlvdgOi5EaKbATFTCukAFmZm5BEcVOK0ghCJamnkNVET40gVvOplWL+o+RePFTRcBaOpxwzItNiIiKJFM5JxXEclS0CcEACzTmoKDXWhnCEQkIo64fICiACK7gGCSBUu5OABcSkXbPmvWnJNlzGkcdWQySAFFMmGoqpSHEBB4EBuNEnmoyDVVqLEiB7MApAPB6LTEJVGtuUKryTJsmZNK3VY3+igVK8NO0+zOaxrxyLSHuI0uUetCNW+GZaODbYc0HCEIa/boDCqqAonVvqa67uOm9Y1D6rbdnP2yRdR4slmH6CFJsuMQdpxvcu5G7S2P2/jIch7PMDQzoCzSk4FzlTNTMTVh0SHGbZ+z2LBJkMSTRxUEQ+aYJLfOKjeOKQR2NadiTxX1BITmGcZB65pUrar8GDtAFO2TWFYk5JohDv3JOgaek1I8HxJlqqkb+70dYVeN3X2uxC9mCcXN9hILcSbtKxCLMaWxFp576tN5jIkCVGTU1mMyAWxCK3HWVItAFYMLAqxKAARmm/O5iCMcAnKGPA7dGgCUDReLg+ONIfnTzf0KctBFTskZVNRmYDXuozSLilDy+vzwVgDa+Fm9OjuP3ZtddElTu1dXoARuRr4XTdYJ+ywjqzMuXYbKxAiazIwUrCMEQAWj0vYkU9YXGLLz3oyQKsOgJW8e1BAMVCFzuW3qYKaGQSEgV0VNLzcEhIJcm1gEMJOEZIiSc4Q8oimBUuQMSJVXEyBnAEx0seYxPRM+9/vfSo/orYdn/9H/+uc++8u/1lf1/gevHZ88fvzV4xx5DsNPfvqF9eP1008dvtdT78av3t6cP3oErv75P/e98+jfHYbnXqSju82ms5/8vu97/Y++9MpTu4/vPzobuzjgZz5y9fC6vXkbzk5Pbrz89De/cXT3wSZU7qkPH778Pd/zh/e/vB7TW+9+8979+zKkl19YXG3b9enDFz/10um2e+07t/MZPPejh998902X/U71wg996kdPxtc+94Uv/Zmf+8Wf/8U/91v/9B9/8INX37t924hOjldjf//lp5fNbHZwMD95893u5MgMxgEAZynDw+Mz0FEZY59ZaH9/rpjZOx8sJZl5mFEVUV5+6ep6WK/uHdcAB7u8U6378wfH4+r5739FTk83x0ejSTWXCvXh8Xf2Xzo4eOFD7z1a7e1cuXt8/Ojho1l15XTTD+JZa4Bqu47kDbNdY/7dv/V/P37zZBzs+//sD7z17vDWf/9rXz36jkHn9+/p6QZnw4/9/I/+7f/q//X0Sy+7OV+5tbe+M/7Gb9xupMmp++qX717/4Eyz9hs5ebRhrrN4xy7HdTBPRCyEkhsOGazimioes5ydnFazo7u9m82v/b2//yt/7a/+9OHBsqxVSbShH966f/LizWtVTc6haC7zZEI2UfJOJW7juFjMp2qOggxfrFfmnIEYFYgxjnE+a/OQSIcvfemrj9b8Yz/2yZzHpq7EjJEkm2S98+iEDWue3TxYPnh0yi4Y4rsP+yv7s8++9foPvfhyFvVuKt5VVWIEUUQz1GdfvN7szGA1tHUwNtBcORPtd5YHc+JH994a43HVNsls6LZMLtn26nzn9PThwcFOyqmt5sMYdcwew2pzupjPBIwwbtd9uQpE1XmnlpGd92Hst0TGnhEMBcgBOeTgRERNkZEdPn31+lvvvB5Yn3nq8P7rj11Muhln8zm2Ptfj2ep4Ma/fuP22qr91dWd9th1zJIbgneS8s7PsUzTzH3z5Y+NGzGB9vprXLRMkDoFxM2wZFeR8z2PsV76dE7MZarKclZ2bL3aabtSknpwZjH0PEugiBjLmrDkG5xxiGT7GnEUVCOoQxiF6ZgBEdlmAFYKvHLtlcsE58KGq6+481k01DIOBERcpOq+365QyXNytHOGsrvvYEQERg5pnZqIxJs3iHFfeJVPvXRqySvaEaLk8xS9s5IgGnggRDQLYvHX1pjuetzVMA9HMDJJHGaPEiAjd0JGTLEogdaiZHTJnkTF33hiEDej2/XcqJiAz0GxaBrjsXSd269qV733+2YW4YZ37mDdDV9UhhHq9WokaIlehGsfes+UswTvjupcIiJKlckFMgPJip6lbf+/xsYmtj8+7o03Oo3ADjhc7u0PCrWbVqKgxSoZzYxmGHoF9qIgwpWgihiAG0cwBBq7W3Vk7r0isCWHTbYaxr6sq5gxkLdNwdh7Yhzaw05ht7GJdV/244crvtEsGc+iQsB97RXAcxtgFV6NxigOoAgMSO/JgluLoqVYzLjA6ukuqaHveM4p3WAcytbr1gMZelzf2aeFHh9Jnkmz9iDEHZB1iUhoHjZAdY7PwJJApqzMAI0tICI4xACN4MEJShgqMh9F7nIHEnERZQCk4BGA03ziETIQU1TQbeVElwoqDJFXD4l9s50GZXWiVTBxh7cQ5Npsx7WZ7/uoLv/jn/qNrex9RYrPsQ606cuUcN9F1Vje+XagM+zey5GSGOWUG6Lfb7fm5Dqu83ZivmCxGQTWVEdDM+lJlCswI6B2XVhtAVAUku97qZkyyPV6LKF1VBhizBwrs0pidnyuNqqOKjjIsWodu+fhxy1a9+cZX652rI2Pe3VVUz0gg67GjnB1RFTxBP9uD3eu82sTje52XRjHu7VzdP9yb52az6c5OTt98fVvNZsvdJfnx7EzcZu/Odwfc0t6uHVzl+S6ZyWolm27Q5DRSisKeHKMSZjXP3HhgMu9dHiWbBE85Wyo3/6wqUGYpKmCmnhyhgSghoaqpFTs8GJgYEyGQlm4gsOCcqjhmMAUFZGQqzny0mBVBE6pATkZATTAikJzb2U69WO5ev8bXbzR17bJSMhWuw65WQpRMIZoKO2Twcyey4toBGdRzcyHMmgCCsVejxK5SXXhwgNbltDVLYv3GMlhEBR7GCcr7Y+pOgYTwUv65uE4uRZEnBNGlLPNEbLn8tws95/04UHGI2ftAnyeqzSXFM02HLlvVbHrZ9AZ2+QkXWhNc4kITw1Qe15OU8/8PNl0CTAiXKIxdsE7TVhWF6EIkg1KLBXr5QygCwMWWX1jLSE0uJDQowVUwNawTTNKFmimgAXJ3fv7alz7/8Paby/ns6Vu3Dm889f3f96m9w70Hd48c4sFuM/Nx3IwpJu+bw/3DvusT4OHy5vnZUdsQKW/HbrGzpzGrWhWaGCOkntDqWZsNeF5duXWwu7fz+PHDmtCx0ySOQCQRmwMWBE157Ic8RrAMVV053p4dHT98eLi3PD4+nu/Ws2Vjj0/Q3PZk4+XhQVvjuLpz55uAjBCyZCO+hLDMLna1XbgBLrKEzFRVsWTHqCGIYGlNK8stpeCLLhTIi4U5wNKwTjixOJfkmJnQ5UfaxdgJS89aSfMpJ+hFNpFzSGxTsrUZiJkSoiFMZiUABFKwEvszxXIDSimmQlTQQhGIZkIuow4DKx60XOJ+ShHbBREFAEgoqoVPmWIzyl9EgCQiU5wXlXYtRQITKXP7sieRSHKalEYiNQMu/BohQNk8QEUk51zJusmSp3T2yZmGxYmBaFmlJBOJyEXfGeSiaU5mQkSEwtGUYQRSWclEJM4pAiAxYQG+piAuQkBV4Uk0NCIunpLCgiIYOVbNpaQMi0kR2AHrRJIJeUwim36NmNF8uYKJGQsgI2KaRIWJxdQRqomaMjI7p2XVbgoYQhUpKFZB9hFL+i0Sc6H/VDKAARogKgAw6YWaDIhSMo+IDErBGQCic05yIiYml3NGRNEMJmCoqoBKxFkEkdSmi0BMy67HElJefM0loFAy+6pYdJFJVIp/06A4D8lUiCaFUaCo0JSzIKiBIjOhz6qlcBCZFAwADYSQVTKiA0SVhIApZSIkFDXBIpa5kiLPkvPFLZou9f5JKur7gYwIKCkZoQMFVdOYJQ9Z2UnlnMKZ5i2iprSWvLGcF4HIEzM5kizKyJ5M87Z1jPJ2aA9HUUUx9gKzKO3o67pezmd7lFnkfNaiaH+2ebzu486S+sFms73l8EiGs5w1cDXztZH2ut7mLQS/s2hcyC5wGqwfe/aBJHCyMaeT2FcJKfatrxjJ2MxZNjpbdaZ56RmJY9xGUJVcO6+QZ0FjHrNIGkbmhoViL9HMg1PGmMYepHEEntfbHkE98/n6zIcqJ0P28wriIClpzMrE49iBjsExGQA5NZKsAF4jdWJDHzw6obrbbMBMLDXYVq7GMWWWPp3s7KPx2M59ghMbI1FT+QLWjc7lPA59149JVptusdifL2rnnYrzRs7PgfZqnnkMSsE558lrHiiPDkdweXV0f1hvW9p3AGaQBTyb9MmpbaTzrmURA+c8IEjOkQ0CIjRcL3f7vD5fb+987ejFF3Z3lrPVEMl6AudSF/bcCJBFurgSHNPYYzUjNSYHVjFWhmOWfpI8FZV6R05VisU7q2BR7wGRKmRPGJBYTA0ALSsIA05irg6qSdNIZgobxBqlceTECExMo5hMdYOWAazUYmYRQFYFTQMDIAWgCoMxWMm4VUMGMNDLi+G5lz/8e7ffONjb/frnb1fh1nF68D/6hR/+m//Zbx7fjxXl67J/fnT/6OGKfWqu79y+e4xVs3/4dILqs199sz/brNjvH1xN9vov/+Fvfc/LT7/0/GIb+fe+fPvq9aaazd5588HxnbuSb711+2jv+jwJ7y6Xizb09x595dE/fWo2f/3xw52955/abwXc/nJ++527q75v3lythv7w2jJb/89/85/cbG70afa5r/zmtXk66t/91KevfeONf/GR566zHWtvOwt7+ZMf/NbvjasVB28np1/HL9+x83qxhydHPUOdx8EM61CZjgyOHTbtwfzm03F4vHu4d3hl8d6b39WxnzfeLa8kp8Hz4aLlMbbqYnf/+tMH/XGk4fGwfZyG00e3z6/dbOqKXZbh3iPd5IORTr8rPPNX9hanZ2c7ezu448ejs93FwTB60f7wqf3Ht9dvvfsgxuSq6nMPv/XqT/zZza+98+bb3c3roOM26bjpN7/627/1gedfvv+o22xOF/Prm8cCOptXBBgXyyrVIG2z2SqhXZmHzXp1eGW2e+3g7tvHIbQpbZJ01dLt79S7+9XjoxWHxcH15dnZdvkU3Xr1U5/5+IcN7jjAs36cN8EFwtE9dXAlDbmuwnrTt7MWxNSAHYuoZm3rCnmkkrgIUFoz8WJ0Qsiqtl51wQcByLn63Bdee+mpG5/88CuPz/o7d1bXrzaSk2cSdTlJTrLTzPZ3d/7b3/m9oXd1a0dH653d5ujorRdufPQTNz+wXY++8pJSqL2IMZBkJSJJ0aLUTJuz8z0xBj3Lq/3llRDwmec/8e7tN+/f/c580SatEYJqnxibqiWRoJYYwUhy1qyGUZXFh2v7yxgHzGm5mG9XE1U0UcdZxaIkYeRpkmBqiMwIoIS+bltVPl13Y+zTkELtXYUSvGsqEri6cwCJj06P9w4Wh/vXzk672G2Nq3Zx2HcJUhSlOszyeJZE2IDNHFEH2+wqqiuanGLoHec+I7s+bWqErOQojHETiJEw+IAggUKFPkHaaevtGI2w9mGbOmTq+k3DgRAL8u4cWzJD88SaxMQ8o5oWQ3hO2QRq9iKyrKrd5XzM2sc4a6rVVrKYIGQVFsPgoowK6hzbRZtpFjGDlNQ5NtXgXO2rLJZFy/CSiNTAByeqxORCECt5ghlBRdU7hwbOMOWMuB3Hvqmd5jWiA0JJZmjIBEhMHomzgg8LiB1pEMQUE3gJ3lscMtVRwnqzHlCUoEaN40ASs2oW47qqyf/4Zz79/O4VGPw45KOjDVcOQDl2kvokBkiqOQ59H7t23ubVKscxDkOow9ANWJuCScLZfGYmbme/gcV2i+N6ffNwlxyqRWAhBYtmWWehgRzP4qaZV9RHIAeIMacQKsn9fLm4sdfcuf+4O4mnZ5t6vti5dWixG8434Cp1Qy+ZwC0Ws4zQjePu4f5s2WIAzOAapx5wTnVb9Tp4M197UaCi6qGR44sZJCiAqqqiSFYzAu+5WfedJAEC73kcc7kKmBxp8fZnJSWgEDyyCfOQKXiaL4PISjslohyh34zbgWKn60TOUTi1+V612Kt8wG2MsV9ngT5lBU0mqeRhKqBoU1HMucTHiIUEqOjEYghY1c7VAUxmFAho9GErwpHOz0+dOmByFYOoITChWHKh8suZb1sHVrPf97g7zH76x/7Ssy/9YISWzaB1psZBgAzBQu25qcd+y8CWBQnJVUbOBOaI82Grsdvce4TbNeYuSR6TqIqm0UEiNCnmjrJ6z5RTZqIsmrLMAMAUutVbq643zfOd0WY2IlfIZIZb5Kw5xphzhBGG/cN9bp4be0958e5Xv5VZqqfVLXw25Rh8BVnQ2LBOOMPmOtW1zZeL/Z39zSqtzs9HPl+NmZvdxV4rm+7ee+dDd/6BF/nqNbe9s4WznWVV+5puXK+FhnanHhLt1zrfDJXEHnAjAIjekwAgYvDkUBkxMNZt3fcDocaYYxYEZKYy1k85ETsAjCkTUAgupoyABihiRVXhaWHfeFIOFA08GKKxw5yFmckhqDj0amrAcUiWNfiQc2IPiFCHmup657ln+PBQQ2g8NQaBcEDrBlmfj3lQTSIWGUjNAIhdVTEnS+rZN8zZVLRqG2FEFKfMwI4UGqrr2kYP52mMgkAipdyNL3Wci2n+Rbb0hb3rff6z99FB0w+mtGp90p9zqQgZXHrDYFIPEN4HA12QPJe5NJdvje+jfi65n0maeT+hdDGZn5SdC/sTXrjL4OKbXApaZZZuF1t7wY1NZNP7GCFDK932gBf+qpK9VH5RAcuLqBSTF8HQ7IJ7mjaAyhys1E6hqmZAIGIY4puvf+O73/xqy+Fjr3xktlgw1ThbrHrZ3n28v5wH5rTZdH1GV83bduh1GIaUMjFtVhuVTMRqVnkHaoKUMDuQ+aKZzZqYbYjZgXbbURPt7uy99tYb7915sHd47e5bj842Q+pzHwfwDgld8AhoIv12bTHVwTu07nz41v3HglxvhrE7N3IG2qfusDnYXVbv3X4IOK9mNTsrocHlAJV9iIZmhkSaAUDVTFRYStu7TDZ+BSLTiyCeohAB4cXRxonCmc4uLgcIcVLrDMxKrzkCEeZLom0KMqISYATAVqghInaO2BugmmGZYBswoaoSu/dRbFZSeFSzWgZkJi41X4QooheYmptOI3uC2BAAEasBgkqWonWBKTIxsj2JOZ/CYxAKdwYAppLxIi7DtKSeCSICkoooKFKRCACA0ERzeb0aGKBxwYrMtAAvJoiApTGyBOqYEDEQlSgnRC7eOWIq3skpH2oKP5quMSanWm76JVUTVHRCfgxFMjsHF9k3zCWWGaY0HJvaAEzLr2DJHUPAqThe0Jk6S2aowIyIoIPmDAOSgqhqBrDiFCvKFxE757VwRkYIpWa7rOUWR4srKLGpMDkE0yyllE5VQUBKp8mUyilMzqzkrFMRcS+OEZYDYaaOnRmYiqIYoklWBQIyMVMjYgAwsXIWEREBik4JUjQFaZlmnSyKE3tFjB6AJEVmb1ak55INhVRcgUiSMzMVxhHRCFFREblYOwEAkafjNemqBAQAqIoGCoTOe1NFQERDBC7J3CKFNVPNJebJOy8ik9PzUipKKdfsVSFFDX4u0InmmFPWMAuzBtwgSayTfITsvDOIAqCzJeccAR2wF3VRTVIy7FkBGTq5R875yq3WW+RbANfQamjmPJv7QWwYU46jrTp3th2sSlf7oeu7IYqMouv1sKyrxjnQ6LwLIahiztnMVCUNMaZthXtz2s9G7BczCzlHYN7kKJiXi8YDjWltUNe1prTtupGY3LxpXD1uh5jFln5nZ1dyNKDjs9RtdUiYksycA7iIRt4mBEzGli1wrutgyjFlSZZiJvaELqcYWi+SXPASU840RoqmjIRZII2S0SxI1iGWvmH1DRPZfLnEoUcIzrHlPOZt1cw0pto7Rum226ry2+25Z6cQV/0ZWBXqmQAOybJZWy8DuVEDV4scR7UVVu2YQ4KoFhmMcqe26bcn/SZxBW0ILjQi2QU/9L1AHvPau0Uy5xS9qxBBUjLpOVRA9vDs7OhsGM0wHPDdWXW/Y27Rqcpp7aA7PdfQWKhz36vLyFIhq26SIpgiBrEsYGqsyI6rEu1iAmCsJUHfkgETouPAgEacDcsjwASwLPAqABIYgYGapNTH1CG44JeGDbnKQBGUiAVKhBsRoKKKohYrQVqTc2KJ8mBuJikhOENldprVsyNyl+Oit99699XPXFseLtR1z37iuVt06zf/v78p5zBfLq89dbBe3fNLrm7t6NwPTsTj6fFmdviUpHD37oNXP/SUO16//Y3b165ek32LunNvtYkGT33i+556+cp3vvztr61Xf/EX/+qnPvJzf+Nv/G/uPHyQmDbbuD7eXLl2QNm/fXL/3/yzP/f53/r84Y1D3J3ldf5Tf/nP/8Yv/8s33n307NOHrjLvzr7/h56TM3nta3/0mR/+9O//+r9qnc0r/43XH129dvOTP/Yn/vkv/faVp29tH0DW2e7u4RtvrepZ29oovn5zWO1fOZzR7uo0z5zf2bvx6J1vaRZoPIAeP3x3tqi32x5kWzl39blnH955zFH6VZzVtZpKtAfHw8Gtw1OXbHdxckw1P79/9QMnp7dTTrE7v3Zr9uD2w3wmfrG/d+3Wyx98pYHNP/ulX6lqbJbWr+LD0/cAY7WAxyfnQ7b1aAf7y+3p9u1vfSGlzXe//MUrz7r5TX79D9e52vvoZ376W29+c75Mvtt2w9mHX/3k59+5uwzzZgfOjjeh5Twq7df10weVs+MHJ+DCw5U5hvra8vjxyc2rNyXj+eoI1qJ42vW9GuxfbepZdf3a1f35zuE826b6yu3HT9066DZDWwcfaMcVeSQ1swpM2DkzVSj5WUYEsxDKDOFiEHsxsCRARTIkRwrSziry9smPfzg4//p3322Wi1/9wy/+hT/5I1cXeHra1W1Y7szuPXrAHHATd57ZTc34woeubs63EeD5l1/oh+FkE6/tz09Xmya4umF0pUTdzOAic87ms5027UQ4DzYTZYv2zndfG8ezegeolZ1qd7vaOs87bgFgV/aW58crFQafu01HCLNFHbHeboeA86GPjKiUzabsTULMktkxMqlJWR3PIhPqL2IgOSVGP6a+ret23p7EjbI6548er8zIBZ7tHWruZtIww8OHZ9tus7dfLXZ2T88f93kMTb3uhioTu3boRgITgLHX+WwWo2QpQc+aUgxQE9Q9dMu9W2Tb7vFD6Deq2ddV8G7sezKrqmZnb8e2W0JsTAHYWRnAUQY0LPF/4shLWTqbsOGC4LJIHmJ2FCAEQhMxVG6bWb8VQ+zWKdSERVomqHwllpidyLSEGDV7JockJqbqiBCgboJHzJJEFRidD1mLaShWtWcDGbOaOgbJkYiJnHeVgiQRBXAuMKqYBF6oimEpDREBYMcpp5hG55x3NMQtE7Dz69QFzzlny8lMXGWR08PuIdFYBY5jJ5pD7ZOxiO20O3/iU59+IVwZzvPB3vJ885ADqemYxiwsoGKJjMa+H8ZeZDx6cHJyfLw5Pa3rqr56beYrz2bkk1kcMxuNQ45MV596+dFbr591Q6pCHXC1XSGGbLK7bFardb8+vXL9yvlms1jsn25Pswo5suCu7F1ZbXsOvLvT4ig7TZMJmNIm95Vz58crZgqhzcZR0v7O3FfBCNhzt+kJAU1jynuHBzGOIQSXQAWzZgUNjtWUOEhOCATsiC3L6ADVMrFPkofxLJsaYnAECOynhwEDlNp5UwOgNFqOqVmGuBokeE48muV1Sh135ylmMOGUoxGgBwFI6NRo2yUUJe/Qas2KigQKIEiARJotKnSlh8HXUwEwoYE6ZlAZe7FQITvROF/Om/nCK42n57t7aIpDtogAKRGS5DQDHuPQ8sJ5njlcAB4w/fAP/uSHPvaziVpIwmiqKaVkIgySc2ZAlegQEJ0x+Sq4qhaZ1vQ5uBxbG7zVZxWPUbrYDwban68JxXmfDbIYq8ZhEEJwKpLZEZCg2JUW/TUvD9bvrO6vdZv4GYdtTClUzKBgBOa9I0dbF/LpWrtmf282j8fi+jD23bkMsxfazjYzX1lLpDraODjZuVrvPHel1Zpwl7AK2849vJN1Y7B13qH43YXrjvDRPbc+o1nt4ma+6TE0vq7BKgV051tKFgIBsiEWusMrSgYsq8UE4NgrQFYsdtISXEIIiqAiTCwl8kNNzRxTzqaWcIIbptZnRHSEjGgmjpyoEDrD4kYCszKHNFNEsxwTgLLDBh04IQZyxpVD4CpU5L3MZn1g5BQdeRMxW0k+iWmVdNNlFKh8AyCDjESBPKaYsKq5cc2yDkQe1PmsGNAJq5PYOZ9R/PhwTKNoHBkCktV14KZOSS51nAuVBi40mYsmeAOEJ4xDEZVwwnAmy84FE/TH3mv6D09ApCma+GJaPvnCLufp78OSpl/CaV3wUhNCe+IwgyfvfOEUe8IfXehIl/jSBB2VGSxMLEohNS7AoifbNX3Qk/TlSYmYIJbJ1Xb544sYZrxUlKAsNlkhJsAUTEDQUcX43uuvv/blLwz9sD9b3Lz17KJp9/b3bt66Od9dBEc+8Kyt66rZnK88ATGLjqFhJGyqJqckMbKr2btqVgEYag5NqOoGxPIQT87XOec4JCSWmIbVdmdvwS19+7vf+pGf+oHZandzduakZ+Htpg8BGEFzJqQq+GTiyM1mwa+HoLhKut2s5z5oTuBouXN1MwqN42L/MImZSZSRQj1N1EEv9+DkUzQTVWQtaT4qKQOQcwZAyAaAwAQMOMXrEiIgEBJNFsML4gsML1RFNDAkUzWg8rmTQ00BCCdDVsmngeL/AgBE8sSOJrJGykk9aaMl2fd9uqSpTClCBgCgJTfyyVlEEz1WEBwivRgeIpQ8aUBCcpPAZIhiUgQvNcVJAVNTUzUzYWKYQBIoP6MpjqN4GHW6PBDJoYoAURE7wECylNURQAYU0klamgL1i8bJ5fVcnHpl2ZOKkqZgACYCAEgMCJK1CF45Jypp41Yq3gzKasFk3Z32rwEglSgvUJFLNo8c25TOU/KhyMBMBAqJU5KkEMmQ0QpcjYAipsCrGEWh5IhPmiNYluxKx5llcs5UgaDQRjlLiVBzzmlRIlXMlLEg5DYdbsTpWSxCaKICAFIClMo5AwBmxK7QcEwoRRcDVBFkzFkAodjEjFFNfeXU0MTIO0XAqX3vwj9ZDgIxaMkXJxGxya2J5BHQGNlU1Caznmkm4iwZVMqGiU3tjROXrcaOAOnSauPYlRJPyaKWEQ1ACoOnJgioJkVxU52yp4rRLIte3L314l41/ZmkIqZKsomIocsigEF1qH3dSUTtLSMT5JxExaHCmCGOjFZcGKNkkZglk0PSVAeoHFfsTCCBrmWLgWO/t6yeqp0fCFNcqYyiJzQOg2wwwO7OErJLiciRgG7ExbC3hnY0Y4uzttqByqlpitt+tFxJMkcMErfbY/XO1fO6fSGPa3Y6xI7IwTiPwzYZ1NxC7kDUmbbO1ayMUNXu5Nwen/Qn6+2NK7uz2eLB8cl2EACHwHlSgA0y1M1MRGPGnn1kOx8SV05BTVLOWHlSwbraDaFpmppJ1+uTIa+2YyLm2rtsAwVWEDIRM64t1C5F8Q0SJraBgqmv2t0bjKGiWcUkOqLENPaONMY+ghs7YHWL+VXialQCMlQdY+8DqXR9Um/qqSI/z5qGEUHIUIILELdpPHWUkAAw1O3MmNrQevbDuGLD3bBk5IipUwuqZMSGzazOZO3MpeO1uNnjc1ryzYdnopvNKy/u79YDcBiTDNuNZs2K7cITetCoqTOsESlbb1AZs2GLEBhrhDmBoREBqJXA+M5sMCDPjSEh1UTerJBGGWm6vZIxGGR1VpIeqEKcgUpKjA4ZmIHFkgoShzIAIULQZKBEOQkweEkCoN4rqGnOzAoqPGnzUVVQJt309OGxM7t77/TFD93C9/rNybuLndbiuGxcXJ0+vLu6+uKS9uvbDx4eHlypduB/+HP/1u/+5rdf+eBHnpW78/rOy9fxrS/fe+bpp4c7pznnVR8HDq6p4kiE1fkx//pvfDbeHZf14dWW885elx4kSeensL8zmy/4l3/zd5bijt9879YrL717exXaoesd9MO8xQ993yd+51f/6dnbpxKdduNnf/ufHzxz/bn96/e64bmXb3refOHL7+0+94GHj7rh5L25q+p2tu36n/rEDw7b7/zB5978hX/rp//gs9+cSzV226YNp/ER7+/wejho9+49ftiGZtyMkOula46PVvtX3M7Oria3P9/LkALPl3Pabh+ND119SNy2CvVbX3lvt65jV514Vy/3wrXvAXpjk7dXDg++8Htvvv7Fux/54Ku7uy+uxu34qBtG6XFNEvdmy2ruT+9uMdOjYRUCNBTjw29fuzlbU7z97nDYPGPavvOvvi5pa7eqvaq52u7IwweHrNyfnW5jmMPqfAxVvX1wTnVo9m8MDe/teNG8Wdn8yuLa7uzovU1bE1YLdfU4GGYnPeczC6i67T77jd/8vmf/zMH8xi/+9b/11//9n/mLP/79appFS7QkKDlPF0so03LkRbFpcdvbhWceLiD0Ei9Hs1kDaDkLI7WNgCVHybv4H/47P/aVL7/90g98IMbYthWYHezvDQkc8wvP3Lh+EDbrM8vmrDrfps+9+d2ru0/NWlksmnlNooIC5tgAS20uIYDnLEiQPMk8BJfzZnVu2bmKco0sbuwVSiyXqmG0EMQkKviAlaskZUm2HePu4Y3Kkw8emE6Pz7PKxSC3oP4AqFDqMCwTmEhhXw0JfR04ONkYucp5jmk7n4WqDtvtSJnBqgcnp87bbLbbnx/lGGeLQHF76wo/fJD9ounGYbHbJJP1+ryp5t77nAfKVvvAFPqzc2VezPa7jMQeokqWbtjMa9rZPUCljE5z5ioEV0HK6gG9c1VIMQJQ07TjOPjgjVAMgdERbZJAym0bFCDnzORUNGdxyKxcO6dmMUvjvUPnHHvnmODRuhuYGs8Bgg9OAMTAeYeAznunikSjKKq52m36XtW8C5rzrHFlnhBFsqrzCllJtQ7eAInQhzBV8xKLqSgkQ2JnFh2TEZBjUckxmxoBo6Fjp5pLcKJvAoKaaUDuxy0HIFYzbxqUlSiklM/Wx55ELAM68r7PoyErAnH96vOvvnrzpX49Bk8nJ6vT83O1jAgc2rEfQM25IFkNckx5vV4/fvx4s15LGrdjR8G98PQzCY3UJFvqx6b2ZND3Q5jpCy++sL7/7ma9rXZrqr2qWhq7Lnfbvu9y04sD3HQdY3V45Vov6d7ZWUKuDg79bjOeHM0P6uOHD6pqwVhVdVM7arq0f+Xazmxx7/iu8yU5wSrG7nRTNe36fDP0w3xRjzF77wN7jUmTmCHIND7JMhYoP5sysYADscCNoAKCgPjgVSRr1njZVARxFA+qjtizDDlmYOa1xHnTelYXNW1k3EqKNPTcJ8kpKYh3QF6qeYutx5aLlQKAjNFhykoq2SEbTSV9KSpnFMizJgQPmhWQcpY6OM8hZQBxahBms+zrJAyEi52ZUs7ZanR9FUxRh6jjqA1T5SOCpLGpGhzkYH79B3/853w9ExXnGFTHcSOCDAzETV3nsWPHBuo4ICI6BxwMtaqCqvqcEgbeYQuB5Ny2QpxUAdoZmLhQB+fJoQPQpAI4DL1oBiSVjIaarDk9HhnG2yeyPUlW5eqA5jtDTB6ACDElp0zxLmzWzlWPR0u7dmtO/vqN9bfvm1zj2c0qdNuzNUA3YMw1VPutBBwiz6o5hYVksormV653m3ub+/fhtKNe1ycgvaVI4zkMDQRwliybDlkhyWJZKeD5IKSmW+t7TAlNNQFkE4/oCLPIACCF5TNLoilNS9855ZSUJ3eZMVNJF4VpRgCOOYlyCQspuViqjohQmdnUsoARiFkUIabKAVhGAOeYnTckpwoYcNY0O021u0AB3ebQtNINizQ4z+shHnWSAFdD3x9v5Ph8eHB/XjVU1QroRYn+f2T96a9tSXreib1DRKxhT2e8c94cK6eaq0gWVSJZbElUixrcsiXLLalpG7BkN9z+YMBAf2n/BTZswwYMG2204UZD3TZFdaspqSmJU1GcimRNOVRl5Zx3ns60hzVFxPu+/rD2vpmybwIXN8/ZZ521115DxBPP83tIUwYH5eGs3C/qQCWhJy9EKVvCQN4J+V43gw5+OsurQcuu8jW7okk9OBd4S+zSnZcAd5rMbs6zy2Op7Lw8Ox8QjK8aKSe2lZE+9fIgwIjG3f0bngKPPoVJbyWbEar9WauQ7ZJgTx/CT61FsMOywG4Z5zMGptHqMd6TP/Nwf2pMgt038amzaKcV2Fbw2v7mnQVp+4Zw623ZYXV2Pzsaj5BsbJcCBcOx2wUMERRUVIGYwLXLk+/+yR80Ty5qV7z86msvPneTqLzywrXDq5dt3fTrtWOe1UXfNBePnvRDLEIxxH46q4uyCo4E8mRS9QihKNHIVJDVgCDL6vys75vgQ0yprKblpESw6azONuxd2Z9N/O2P3+uXX7NMleP5/vQUk5rlGPthyEMqqtLMlOhs1aybzd7+ZM5uc/eiCKH0VNTl2cVq6KTyi+OjZx7fvzV0beEqVATjrZIAtDOGEY4OmJG7IiqanYlKJAQTAKbxMyFk2LaLO0BCYtzaOQzADA1MYPtp7kxHiIBouMtzAYApmAIyIRkyEW+9HUYAaIhMjl3YiScCSLqlUAmhG69m3BG7Ru0DTAlx6/xRG1vSgJjZqepIWIKtfRjR1CybAjne+chGUjXuXDZARCP0WjVvLVhb0xGNHPTxWiJAIBprOnbt9eOBJZWEhAQERsykagbKjhBZNEsWAHXsVXUMJOmIFR9ZY1uVB8yUicdknqrSuJ/E20vCRhb4Nu5naoJiZkxuFHqIaEzPGdhYdDaWA5ro+NntOGLbjxdMVfLovEIAImdopDRq6AbGzjGzbdVWMCMANgMwJHRjVDCnRIgOPRGOCSgDRCBJmYm2xcTENhbwjXUwzKa4u5q3GcVty5sqAIzxMdpeoaRmJsLstncPNSAYy+lUlR0TMSBsyUKmPhSqmZgAWUXG0F5OrQ+VmgAgkwME02zjXRMAcTSbIgAxkajkHL0PCKgwIpB2lXZb7xUAIpE3MM0ZyUQyASko2FhIRwCgOY4+KSAeyzefesRGcppKHmFZxAgGaFv4uopsL0JmIhIxAKP/H1ZRzlQ67wpEseDqIQ2OPBhOzIXCVAcqpgiTPLSiSUQVikFS1+Rk4AMCGDN4Ao8wq8rAk4BVjh2GTcpQB5q4/YJYWFQTROi7JVPcX8yahydVPXGMamnDfbu6kNTGNKiCo2FeFz6EWV0VroztesAUtUjmCbgucNWcE+MQ1cGsoKEww2hMrghTGQwcWrZN1w+5L8kccN/2UXLhQo5dcF4zp8HWZy2TOrGCLEtGxMK7IQlsux5FBQoX0pC7nMHZsMlFQYVTJlLFMhSL/YOq2gNzaG0DawKblzikaClXHiYzblsdg26FL6rKuVnIw0Apr09PZvMpV1ly40suK+eSsKHnIADOQ4oXCFROjzkOltsieO1aywopoenyXILTbNoND2bVAkwzVt5Xqqa5Z54zYVX6KZTNFCo3cchkAhKH3Jpmj9uCMSHrR0dOzAVBFuiSnQxNK+wqf62uluvHPbjp3qRpl4eFI8GY/Hy634OctOsQCiKvhkkB8ppCn3Aoq0uiwbkSAYRAYbt8wohindkg2iJEY4cuMDmGYGZjA4TYqLczgSGygZg5Q0fekyJDhtwl64E0ycBI439AHoFUJSkgMDOborlgyQGySI5962BDhgAJxYMLBsDOqxHsMskOqF9FH8qjcu9qUf/O+z++9oI/2Cvuffzez/+lF5oL6s7SzavzD+7ckpWrp/Cd3/n1fD705zybYXO/EZf7IawaOX10phSP9yfN2SO3OR02+5fKumNsL9b/5F/981/65jevHR7J7Nrs8jPtRfPVV174/rd/vTiqu/Lyo/cffvMbr/3T3/jTS3vHb//RH1RFqOrq9t0z/KPvTSA8fO+sKvaD7n18+95f/qW//uM//d47H90ZYnpGj2/U040F5U3GtMnDv/Otf++N77z9ne9+Z+NOy7q8/aP3+5PVbBKeubQ/KN/9+NGiPm5i01DscuRQXb58+f7jtlkN3lfdumGnk1lVVYcX5/ett+vXD27c3zsd2u489qfpycN3b0yPzpqVn1Vtu45P1j/49hvhUpXOlxbTXqV+kHfeeWtyEBLK0KZkcvV6zSs8edKmC/MKqUuh4kDcna2fq6+5w+MfPjm/NJ3rveX56T3zVpTkmnLv6qWTx6s7j0+lCzUvjPLs2Kd1OjlZuaMSWmm6dUEVteCNh7Zf3XqY0GkLVJaLo3kJLm7SYrGYLS53F5Hr+vUvvq71tL52xVbdz7z0lV987YvK2SGNRIOcTRQp65gNRtiuYOxGswhqQLQd1MKu5wRopAyMi1dMnEwnMxeH9MyzR86ld7773QPbv3i8AcspRlWPqsEcxG51/869N5fPf+GlybSuAR4+Wf/8F18XYkBdbTqwsi55W2ZtyowEIBm68yibHksSADRoh4YqD7mIQ7ScYrPpNxazzacaWJ3nsuDgTSiVVR0zWBZJmjOT+TxWYuXkmEKxfRaMy00pR7SxzYFUEQkdFyKRPI+r394jOYqrzYApxew9oDqP8pUvvPDd77+H00mXh3v3HyyCNE1bU1H4yScfPSYlCJQ78wV7h5OqAmVyDi06H7qYRc0zsWEfB0ZnaqHQS64MhSOz48vX795/gmQHs7LrGo8+qhAzGRNxWRStDEOMMSYwrasaVUvnAMEndYA2isIKznvbecQUhZQsq5JQWTE4iZkDAth5sypCCUImSgZqGpg8ctcNzrEjHlIsvZMuqeY8+iQRiuAQMI+rRWCAmHIOxN77+XyeNTciBEhgklIouQjl6MQWgYrZG6ecdOxhHWPtgGaYpAdAzQnEJKWiCMOQ1XRSTkSyAzJRjUoBvXdnq2Ufe/IQXDCDFDOhM+Gc8k+9+vrrN6/33caAyun8/q0HYqrjgmqKmqLm7EMhaVitV31Oy/Vms9qsTs+DQ0F72G2uz2ZFUWfNDicxZi44Q57UIabescznezHHTdsnkEmBOCwJJ5mlnNc5Z3M0WwTprWtbqv0zV46GGIdNL2wuqUi6fH2finqz2rBQRDy8eYCelrKujsqjw+mTB6cusiIs9mcgvqV2VlVdN8xCMcjgnO/ixb4/JKSyrLvhPJQTAx6tIQRiWSFDUswmklOUXJaTvmkJkBxHszyk8SogQjU3DMliDsTj3GGyV7kCoTA3cSLKiSULWS4JI6EgAwgHmux5LBHZLJmKDdm0Exws99r3A/M4nlMgroo6pY68y5o92v40MAUx81UpGLRYiIrEHr1PwMLOMSBBBAIHwhiOZ0U9wS7ach2XSyKqqsAMM9YD5/7qX/k79WS/TxmRJacs2blQhIDIOQ9MRuhyHKgovPOELmchdM4BGlsSNheHYTZZqHeSyBgjejN0pRWTQL5suqEoC8gRBBSgmAzINgwJA5F5E8tqh3165UrKD0+G9Z0hbqJ7nspJjFKyZ3NmaEq5fbI+u7eYLvJauuN5jEGlQJ4lCuC9mzjMWkyq3laLeVWW6ofenHRtn7TQdsAhQ3KxxeXJihP2HcfByDEyGGpSUaN+A0zUmWxWUSlHZMignfSbBEaqSmxmwEwEkFICBAquV0lRIKkDTDHLYCbb/AQxqYx0DzBVQ3COEcBECYCRRnEAFQxQFTMoE+aseZdzMSZRzGJMFArnnbPg/aQuvRPyulhQwVQEGAZpNutlw+Vpx1FqbLzTUKYkF/ce0v2zoo2h6cW3WgSqJo6JwahCN/XzS2U1q1jV4ehuyOjKQC6miwGsjRnQQIbm7JylDtO5co/ssvJ0Wu+kIt3ZhT6jyWz/IO7YPE91me3jEndZsVFp2lltDLdBHdwqKfrU3fP0cfpU/tnJN/qpp2n3i+1psmzrVAF4KlN9moz7FHk0TsE/Ixx9KhF9xg00Qiufhtn+LV/S7pW7WeanSpJt/x61jO27H6u3tvoX7lwHo2Qkpnmso2KKm80nb//owSfvQJb9xdGNG8/cuHl9Pqn3jo858Mm9+/OyBnY5wcmjpWYh5Pl8biA+TKuyRKKyKEIRNqsNmYj0kjV2sagCohRFSQmm0/2irhFAxAAhx36zaVwRitIt9ifvvPuTk02zd3jYPL6ovV83NPRdFmXvCNDGklDm1UVTIhWZGsTCyrbvY5L9oyPg+R4W9x6152dNUVQpJfJe06i00HaWi2PT+1MNcNv8p6ojVmZrQkEiYkRGZMJtOo+Ix2TTOKMd/2zFOUDcWrp0/JtxV4WDCEhPTxsaZzoGo4FEVAGJ2DGN9MI8OmsADJ4G2Ua/jOnTE24Htt6JmIygQMC7U9QQdzwyhayypW3TmMFEZhYdAdggKszsHKuCmJqpIaoZgDE5HPvXxs3uzikzUDNCNlXaAmbARtwPkYmIZodBxiQjgoHiKFva2NE7VmgJERKSimyVOxFE28J3RnHbcNewPvqicKehgooaGLkRFG1mJqpgRs4Ro6qYKnqnstV4kBENVATHgJs+TQvSLpa5ZUdaTsgIZiJCQElzJjXaDolcAAcmffTko4Ap6MgrRwdb8w8ysZoYABAKGI8ETUTQMfYlo2HJEA1H0JLi2GWEjEiSEzk21VGaFEkIo+drtCyMBsZRMlNTJeZxmznHUVxOQxrIHBNZVniKQwIfSgAjZtDx5qFIqGPVmhgSykhHMhJTIEQj1TFkR8hoBlkyMYlkYs/EOSdEUs0GgORUMvPYpmvkvYoiGSHbqCtaHklehAxEaiamBkrIwEi2pWsjoNrOGUmEhJrzTqGnpzfA7fRgWs1RJHgCW5c8OCq6mEquVQW0IQt95PF0VIN1O5i5mPtZBeNUaVZDwUAGgaDwCajLQy9WEYTKDap+nU7NvKbKfOHIMSNxFXN2BcehKwonbEnbVttl2yQB0sG5OC8P2QfKlhKCTqui7nStOmSNbddNSyKXdGgxxeb0pDNfFqWZ5XJTuDIE1zYXKceYGi4JAXygNjVJ+9w3RHVZTTH5JJKSVrOqyEqZ8hCjJUMrSh6SxhzRTGEsNDSC4IlQwQyXm7ODvcOyDHU9mVfTTZN9Oasn03V7SgYSY+GLsmBCBUs+UD0pmX3sMrL37GWAbNAPkfEMpW2bE7+3IV+GUJILTYwJOjTnDDhLQB8NuriWnOPQo2ICSJ7bNjqQokTQKLGNOqANkLkKQUydLyq/iMsnzEVV1oPApKgQ06ZZBp6pWNRGRSo/Kxxr7tkzMK+JWgtr4Y1y2uSYT31Bp22el/PFbP87P3nvYOGnE/cXv/LFdz6+l2ARxKWsAObCFKwA9YX3kBxSUEXEwmGh0hMlg0Elm0bNHVoE0JL2SKMjBctIThABvGUwEwI07A0EQcn57dWVM2gUHVQzckAEAXRAaoiaDRCQUMcnczaJgCAjQE8F+pWq+dySVkYhuULRQSjBnO0a0C7d3L9xON+vi+XZrR/cVj46hNoOpuXhweHnXpt+549vYZ9OPuZriytqUTvdpIvUD7d+8mfspqnnRVUfTF5PZ/HS/s3vP/jJcy//TO2qx48eS9+pKjFHCq/8pV/4+re+9dv/9J9/8OZbbYIXXn7l3sdvCuiwSV/96Z/7V588eOvu/Ze+/oWp2Ks3n/nhn73VLuX0SbdXuemUorqjF54fOgyXVn/8e9/OiQCNyb3z3vnf+/f/1h//SYPdn0zqJvfN47e//fj9j1dBf/bvfOv+9z9ZPXnyN/77v/TGt99WKZou7U2fnZfVkwcfHS6mB3t7mM3SpiY1ICpDm+TVl6/cu78a4kVR8MRVlNrKDaHJicPFk/Vicgx1eef+vRkvPCBTHNJmWl+Vi4f37zFB3WXLnqQMaDmg0wS+MHTBeis5MEs59ZhtOBtyDw/jMtOqOsZv/uw3f+ONXycMmtGTv1g2q4nBZLE86+oCY9nnRWUvT6sznnw8HEwmi6J+8ME9KKbLJRZ1ebFaXbtydLaODgg6jAmEkxdgoBYHdbXNqtnV+R/+l7/+N//8M4839u//5Z8+2Cu3QwAbKQPwyYPz0tHNqwc0Zsc/JWQaPAVSbikF4+LM0xGEjUWkkrOqZsHg+e0f3alnRzU9/8MfPPi9D3/0v/qVr603LYgq2nfe+tGXX37phVe/8L3fvHNye+///bu/+ed++tVV428cUo+bt+599AuvfR4GlyAVBXdDIkJAim30oVQRiKlaFLO948ersyL42d7s4f2GuLx+bdF1F7ngew8fltMDJEkaTxtJyDm2GitPhB7LwnEoC4yFF3LuYtkE78rKPx2Kj1cSEbFzOaexxwHBmDhFITSNkvtICiWHg8nBY1wWruiWcVoUm9WFL7go3Cq2fcqLgv203ESBPs7rWY7WR4nZmARMAnsA6vvWGASpTzGg965Ug6brq6La26u75vGzB3v375zEbA/TXQDHBO3QMFMITozNoAh+SFEBveNs2y5OMQNAESVGTwg5GwMYoFpMaTSQq0GUVLrSOWegOedB1SPHLEManCNgAwImX4Qq9p0IBMZsRoBMxETkWEsUkyw6zncCcd8NofBgFpwbRIlYUvZF7TzFVhgxx8E52q5jamZiZjDQJBGdU1VRdY7RoWQRVRH1zomIqahJTsIOFDBJUjI1DS5EAGRKOQ3ar/NaUcjAMqgpKEznB23TvXDjmS+9+pJp7HpB49ONJI0CklPOIo55LE1PMfVD33XNJg3L84th0zAoKuUsVQgXZyfPfe5zOVuMGOqqay6y0+BLEYip954Pr15rVquPPvnooA6T4CQP7eaiLCaz+eRxtyynZWMJKqpLO9ifPrr7xDloYj587trq5LHEzkMc4umkuFzM51AWzjD3aVDdpLaYl8g0dAMWhrlP0kxdTeRJhUrObLO9I0geJCdJFApFMFCR5NBlkZIdF77tWiJKZqYmSdB0GDoOfguLGCeTDkmMGQHJe5c1V/NaarL9EOZeC+RZoecxnfeUg0UwywIcqiIUqGaUYsG1L/2QTaLrVC3lrMqly5oMQC0RhYF6CSCaGaAmLTmSi65wrnJdqLpZSSEU6HSTyNSVRUotKVeHJKLVpNTFHgFjGsoJJLTkPLOrgzm2L33hm88+95UkxEzMPkvLAM4VWRVJjCHlKEPjyPvAZiYg5D07R8ySk0hmCuQceQ9WbpoLV81n0z2gKmUzGbJBWbMvShTNfefJEEQluUrRl6kb1u1yevlZnlxbHDxw9fvwwb27zR2hbHBNq0XOxp76YV1ype6yL4duc6HZ3WtzUc+L6zwcz3U67zFWlcqmnfpYaArdMhDUk1L63sICMaMNdz+5tzm/aE+6HCn1kRFE1REAmeVUFUQ+YwptB6wlrgFVkIVBYqciBihqInks2UAFQYC0/V/LY4+xQMwmhgaoJmbAaIqgpo7JDEFVRImQmMmMaXT3IyGJqQIgumyQEdC5LDraVDVLJmQPXDkFD5MFXTp2kxLBRfYOxUNWw6pw4pzE/vROCxNOqAqQmw0vO15Gy4hgYsrOE1ifO1dCfVAWhxXOKWHMSJEoSp+xY2LTBCYSB0eArLXE2Aihi10rU+b5vq+rsJNlJOeRH/wZbAs87ZbHT3Nj9qkC81SQ2dGEtoLJ+JTcPSrxaYjIPtV1nvKKxq996gjazdE/jdDALi/06ct3XuBdR9a4Z7rLi48b3mlZT41C437YUxr2Z41N26/setlh28VlOroM4NM3vXVT7VSnrZNiW6oFuyJ2EbSRdZtF73784YN3fiTLzeH+0Y2b1y89/8z1526g5MVsSorL89OJ9wG1kx7BFYXjKqSkqFBPKlcVYJSH3Ky7i4vGIYKqxoGDny+O2XukJqY4ne2nZCkJMeSU1XJOsWlayX1aN89evdSt73/83lvf+sZX7pye/uRitT5rVTJ6Dyazym/Wy6bpF/Pjw/mlNAyp76MlxNIRziaFaL+3VyUIR42ktjm+fNQOK3CGooIyVsSPQgcQ6vgwHmNlJjDalkfAM/ttFgx57C8HMEBCZtj1vo/mra1XaEylbSVMQyMzo7GFHbbZMzNAIjLafoMQiWxbFk7jc5vAzMS2YcJRGhgDTAaIKvo0kr89W56ebWZoQETjmyIkkwQEO0+TMCLCNjCnqqqGDhCBmLayl9HOpTdqHciIYy87EY9S1VhghogiidB5x2NeCUSAGVSJmNiJJERy6E0NAWlMrqkykW3VLtuewwZAqCaEhGgyepRGorOOdhUiZN0FSceqvvFYIaASAWzzl86xqNLodTJQEzQgdjqWmukoBxEYGuxgU4QIqCLj2j4iqxrS2OrrzASRx7cdCp8lERIiMHkln6Tzbgt6UJCxKMAARBTMkBjN0MyFIqcEACYZiXB7RzBmLyKisj0xYMSks5lqHtg55xmAkFhzEtVRE9ryiWA0H23ZQ2MWEoBURU2YeSvEOFy17d5sxlyoiiMyUzVEg3HqB4iSE9Do5yIwIOJxpXlkY+EYyiVnpoR+1OGZUXHEtTDkLCSmoJaI2QwIkV2RUzQU57xIBjMCh0Q5Z3ZuvFhEFEd7KIIpEDEBjWBNxW2tHgGKCpGz0exIbCajPW4bnH0qFXmeE9PQXoSqJuKSD4K7XBV1u7nbNo+zJSIyjKwxaUukWVQRGMEFON7DsnCe/RATAvWqzswc5kyQiBy0GbImbTfohSgBOtKV4+nFep3FxHwzWLShz9GX5AEqIed5Pq+zkRqIAguicRRBJs3R0BCdgsYBEJjSoIaeGSRm05Pz9XQ6xQ4sZ9boVfb83ny2f3Z+azHdR4aOkNH7wH3qhwjmUAsW9E1OVaBRgs45MbIiq+RB8qXj6213PmyyE7MkIjqvZ3UdZrMJGg0ph8IpRDMNvkhZp3U9Kcs0xByxT+wd1WUJmbhAVxZOQVEg6Wa9Hjb9/p6VFbXrJzSfCw5gpTkWDYRlURqnPAzDut90KmUxAc9d15y3nRRcsqM8uAAi2vVrruZJ07SamuYIGcw4nR8f8uO7GyGxEoecDIa6XLBMhdj7epAWwEnfl576GE8ILqINEaeTSVUX2LXeYVnSfD5J2T/ZbIr9I6z08ebWt7/3my/c+Nn12g1dR7PSEQM7JBMD1HlSFFQyRXYWATGldKHSIvWSW+SU8jK4yeipBJmiEQADg2gyUANv5gkxa2YChMw2aOry0GhKOY3dTAUhi8n2XkwGpqaGBjkPRJQyJEHAyqhQG0wyqUrqXCjHQR77AOQAA+6kosPLk3/v7/xs0cW339u8fevDv/t3v/Xkw+/+3m+9/5VvvPYvvv296eG+RGgjXLlx9cMPP+nbbv+gjIiVD92guYdnX7geMty7/Q7u+Ws3Xrn7ZJ2b9eBsXriqwtev33jzu28P7fS3vv3Bg7Y4EauK8rmvfOONNz/AS+nqs/W/+PXfsVTX4err3/y5f/Z/+3+8OJs1LQ5VvTg+Pm1Ov/atrwzZ+jb86Vv/+n/4P/qZf/kvvnPl5qu6OfPV7Pnnv9BPX3rpG/XRlef/1T/+P1y7evmTB7fDLNy88dyj95dJq/Lm9XY+sRAk2uLKXnMqTXt6eOVKz+yKgE76NksTE+js8AByJ81AQz559OTzr794evrksJpcvX7UxNNyWq03q1lVrLV/8RtfKGpe3n7oI1y059/6+W/88T/7yYIXOfPabH7kjp4v7985YUlXFjgs+7QJZpj7ZjqrPU+ePHiMBurcw76ZHtbO6b/8nX+qDrFHif0rr9348cNPLpaxrOsh1JOJ3Tp98uLnvvLWD+8eTMuqXpw/uli5Tj3WB7PThxeToF/82dduvfejl557/vHt04PD6mKQxP0v/tI3fu/XvnvpmRf/9q/8w//0//yf/skf//gf/Mpf/83f/e/+/t/7e89Nr3Z5jNUzQBozzjcv7Z0tN6YjiGI73NyZh0BNR6Ti0zIVe1qDtgMAEjMTrs42pxdtWS3Oljxx5eWr0y98+Zm7J41nXRQFa371uWeq2mfHuOcaWP87v/z1GzevPDnd/Mn7P3n2xf2feu21aO5Xv/Ojv/rTL19ldcE5Ns3CDoHx4ZPT6Yx97YqqwJWJ2qpZTsuAWVbLk4yxntSXLs0biaRcemdoi6P94mIdhxQcOKUyuKFvpGvbC7fedFxPNm1v/RboqwKSMxLmNIjIuLoFRKNBF50DEGIuJzUHPr/wn5w+mO1NTKHvpNjfD7U1H9zbn1W99S89d+n89JGbha6T43K6V+x9ePe+D2FWTHOOSGyaVKLlwfsqIzJXs6pu2zbmPJtN+5hW7RqQ7z66yBkkoWcDHFf2HTADIBFLTKjgidpukJjIB2J2LhgYITnnJOcRSOCIAMCyimlScUyMULkCkIaYguOxiMLXhQ/cDIlRHPk+xvl0hgRtTMhqYMzkdmtxlQ/rfsOOy0DZjLYqmyN2oOoY2i4yYMWOENCAx5OFWQ0dASKIZMcskhDBuWCqRMjMBiaaFJQoCCAbmSZkcI59COMgljkQgIhmkqEfiEtxfNKd5mAEpDEJAzlH6LTHS5NLP/e1bzjNKQ8iKGppLf0wiAqAJY3OlQAGaEPqoqamW5+dLc8fnoJkNFU0zTnn8KTprvgipWbIFxXXvpim1GcplCIbSTLKNi0vH+/lzdkdq7So3Hx/ZoLLZskEUVK5qM/b1bw+eHJy1sRuOpkqkKtrnFTJInurF/ssKCAoUdCqWlyUgkPjwWqez+e5T+rx6peebR426yen0PVXr1/rUlZBKCBuBhAC1SH3ZOaJVdO0KCRLjD3qQC44JjCWnExzVZaClnMO1c5nLRqICIy9zzHVE7e/8OXlmvZcNWVBuNhkmVIE7tcgXVYA77msfOEsMDMiR1MwAg6SU2zVUIiAAZ05x2DM7EUtqxXeYxKxocsZFWd1BYUrJl6c9nlwgdmRZoOU0VhRikk9m1SINPiqKArwtbUYAzxeb4osE7Ja4Je/9TeKouoso0JMnWVl9sAofcwGOgI2ygmgxazOB+dLI4+EsWs051CGFKXen6GFxLnAfVegiuQE7L3mwAqu9ON6vCsdOlAT7YcQ2Du+f/aorupLz18/P10vHwDY4MSKW7duL2+11iV8yXiq6H3YE0qinvcuDeuPuJggVi2swt4lmk8l1+onUB5OHeXmJ5K6FKEfpIvDpMhKrcZVe/YoPj5vT9s8CDvAAJ4wKQIDmBKzD8ROpQcZMioGYJEch6wiktk0KyozjSDZbCIqgMbsTCRlAVFnAAqOUcSyCDKZWgJAx65gyYpJ2RFt08nGo5N/DKCNDxDCrApA7FhUcUsXRUQGVTJExHI2gfkCqml2DjxPQ3ApeqBN11tRsCvQl5ai9jGvV9J2w3ItWUtyRT0d8pBSr72gy67wZW17l6msEvklFkVspROMkBWHkFk6wzxo1yWJptHur2QgN/UYqAPEoihCoZp3wpDZ2N71GbPNlulshjhWHWwloW0ya5xowpbrs3v9lgv76XR7p/js9BYc09yw4wf9W5yMne608/FsGUA7IIw9VXxgF1TS3a5+dkNPH9ZbfQCf7iGYjSs/48u22pTBbrEdP1WRRueJ2s4lMfoxR9b1TiPbReTGwwAKoGSSDRwPvayfPL79/lvn9289c3z18nOv7F++dOPmDT+ZmFlZlDLkmNLBYl8kDpr3Lh31TR+K4EpXGdowxCSr84s+xmlVWc71ZDaryq7tyuliNj+8OD1frx5NpyWja7vWxLKJIeYUzVRRi0UJ5ifL/b0+L+abD9952w9xfXKyGQYOoZyFNz6+Na38i5NLZR1QjShd5PaFazf65uGVg8vvf/RgMd/fP5w9eHh/yMkVrq6LDHixXoEVXStVtRgEd4mqURLZWoDGuinc2ogQkZk9U2AKREzIBkZAo6wyOoO2MCwwG+EVSFs/EQDieFYBwZY9vNVxANk50W04VMccKOw+RkRiIgKFvDXFwY6WZWPKf+QobU1G24vAlIgBDXD7ZVXVbbqHkHmLd9mmrFC2oadtgZQBjL2HNv4itJGl7Z2TnHW09wAAgIAB4ZZNA4pjLRgCAWQRGhEA47IlaNbt4RXLMO4ZjoErHffQkQNEBlJJ21N73J/R8YyAwEBgqoQEYKqmZgiG5MYLGBFUhYgRUXJG5i2eGQCZEc1Ax7ydiJBj26J5Rvzzlhy1u9yImMePSSVtr85tCBANzLKBWMp5GJR1NIhxzFKE0Och5yw5ky9NBbZICBtxQooAhFnyDqFktv0sFWnn9kMYb+xgmFMmpBGTtBvV646uZQS4230ZYYXoGMdmN8ScE+7mAluhzMA7PlgsEEwlq6j4XQaWyGwEYY/LUCTjg4BIRU3HhLoDEERUHZHYNKo2RCSSmN0u3bjVuxkx5+h80JzHAMR4EhKPoGzVlLc8ja2jU0cv35iYy6KOyBCd9yqSJTlHiIiGSACjxAUMyOMhBLF/Syq6fO211SoaPiFuY35SV0w0zcpherPPq8dnD522VcXSXPiQCoapF+9hUsBkUVS+zErgK4dkwCggOSXDVqyNDRM0Q19V7aaDUEuA1tUBOPW5H3oBnDAXCjYMrSrkhGlwaM6BM6rUTzJzEWaYCo1dTE2yPF5ybQ45QuXnzqNS57yHIfX9RiGy872IA3UYHGFVT7zypjn3rgLVIQ4MFSVgxZmFxeXZkDbF3G5M99/+8FSZNps8nxRDr7EZKGAz9JPaXwwnFrMgpCx1FdS6IlTeF8mgmEyRp0PTiVmbrREg8oEDcIlljSShohSHIWV2ZVnVxqRt70JBWUFTB5paCknqnFdx5YqGHFXTuZlMyllwzLrMlAQUuYwJNWLuAwljnqpE0IkM9YAh+AnoxMRyJsayCpWjrHHleCiLUNVzt+/73G42PSBB0DY2hat8qMDUQ4mQV9p/tN4Ue/vsSoYasiPtkTHF1HfdKmJqxHP2cPHMUbj96PHx4TniIRfkJhWZDqnVLM7NHE4TqC8qT9NkZNIAnks8y9oADIimEsEgASiCwwBGlfNijJIg96Q9ApJVxjAmREAj5MF0kLwxIERlYtRB01gXaKZJNUuMaorozdQgAJSmaAqSDA3JBXBTKBbq98AX4AMUU8AM6J/mgnM7/PZ//Qde0+d/6pWfe7F2736vPF29tHdjn144qE5vPTqtyyCoD09PzJFItVrr8eHhZDLJy86C3Vmdvnr5udn+Xgobbs7EwkXbrFMfFrUN3fqjNJte/6U//z/4tV/7fzE++sW/cPOdP/joD377n2V3HVLzZ3/wE0llc9Fb2rxx/t2/97f+p839773+1a/9/q17D+588Azp+f37WBxV9YtffvWvFYk/d/xqMavb5w4+ePOuH14+e/DRrZPlh/fuLde0tO74+jOLOP/8V37hgw/eevTwjbXjg4OP969dffhJH3hC8cwjCZGqxhjR63zvOGNbVaHru6LkdrUJnqZ1/cGd97/65RsXZ6eC9vD0/kE1+9mfeXF90TwY4vUvvXSlkn/z4fv9qj2aHvz+P/31vTyBMmjw125capuTu+9+NKnKaPbx/dNLRy/qgJpjGbhvhsFTeTS1wj94sDo+Pup9unZtfnbrnLxLIX/uiy988ODx0A2v3Hzxx2/9eH+2/9qzn3tlD1aP4dl2v5j4RpfkKp5MQNrFbJE2Ooj95JOLTSt22l2+srjz4H44vKL+yscf5QGK5WbtQvvsYkEr/fX/8r/6lb/7t/7Rf/P9v/4z9OIz1yo/ruFsh/RlyTfq/awy5sxsB7Yc/SdjU6/o00fAp6uoYApICEqMqoDMl44X84Ppb/zWT24/PPvKq9d92gTz80WR2ji0KYBY7Ibl+eVn4vMv7EegdnW+fySrrnvu+o10vsoQ/sbXXzoqFJOJIiOPfBwK4d6T07KSspCHDx+qAqgjEB/IOLUxRtEnjx/O57OzZXc0LS9dnTetMWNZ84QDq52cri82GpPM9iduyu35inJq25Z06ypixzll75zlcfEKEMZwHSsoju9fsmMwCE/OHxbsU4ou+B5l3XQAeOn4aNm0wxDRpbquhsxxtawdrWMznU36nASQfFlx0fYXwZfeuYxWBBbJvaU2d5NQqImmLGrIbt0NB9OD2LYhuBjVMU8mi2Wz8gYxZpNx2oJZt826aoJGhFhXZYyZmQNz27dDzoHHxStwiJ5d23ZMhAgi5uvgi5KTkiEZDFGGlHxRgmJWJXSAakhi6spCshCiqCERe84GYorEZVU4JhwbGAERMXjv2I02e2IygJQTgWQRNhRTj44xILJa0jwi1UmVAFWyoTkmr6YiWXL2zolqFimCRwFVIysQUZI4ctniOrVL2RCE0jtBAyJgh1Fq77/1jZ+pldbrTQg+1NPm/DyKiGTyTlSDqyyTpty1TdcPq25z+uhx2/QWexuhjwZl6dlx28aP3n33iy+9MkA79MOqj+yLttmknKu6AKOYei5t79qVxaWjj999k7tmf2+uMceUNk03OVY3RwRZLs84Ddp35jGEuks6PX5uetjZcGc6qzz5u/cuWCvNrZsVTM6hizlebNqD2QyIkpkLvrg025/R2ck6AQQiNkt6cXB4+eJJr9YbkoCWIUDKYJByJgeTatZ2G0QLzsj55cZKV3SaAUzzdnpweOlAAcgRkKlAUaA6LH3ATtAGN5tXgaYLdjO4OBm6C7URh2HoDBx5pBhFJSuADZJdyVFNwXpRc1Sic+NqrafR2YIONLtkQCZD0m49sKxCaUzOug1mIjD0My5LCAUSYTlxSclNPKOGgYrZoNGLTQO7fvmFV366ri/1MQuopiF1g2POkIGTD5Xzru+WJomD16zsvS9qRKeAOWUQmxR1EmFGAOv6paahnBQCGsoaBjNEDyRRffAxSxp6V0xdcDE2LtSF2N3b7+7Xh7NLl31F0/3EdL2aLeaHx5Oj/fCjDz5ZLS8uPobJlVQfZPTeeT+ZN+uV7d2QolIk4gXOy8LFnKwKFQ6blA4n1WstP8QccnsKF5gtprzsV+fWi+l+PZmlsm/7cxBJoIYcnHPMzqGaQKbSlza1ZtlEIUQTk5RUzImCoAUw3ukQYojIKgZiOSkZKOKQEqNXEGIa51TksCi5KAnBmnaIw26WBqBoztEY5RCzbIKCQERIKWcmDM7FlAxIwYBJmY29qyqsSiwKBSjI+Zgo9lFTmM54Ns+uZGJ59EhONt2jC+sGFRQBqTCbCKJ5r86xQuE4aIRG1+eRJBNbSjkhAQFCTgIsIcfeVEBRYlo92QTYEyYogi3mOJlalmIXyTfbCio7B8TWZvMZ8eWphjSuWm/NP0+NPU+jXmMA4zP/aztHEsA26bZdmzHTT9WaXTRslG0+dTFtk3GfuplsqyeMm34q02zbpHZfGPf36bZ3AbNtnamNdqftP3cCGYAB0NixYuOMUW2XPNpOvXfy1W7DYxcagBIJiEhGRUPcnJ69//abZ7fePZrv/ZW/+Bdn88VkPnVVyQKxbfcWCxPpm2YaprnJTewn8/nqYu3YpQSqILkbuiYU3pEd7i/qqrIcCXzXN6Eqk+VHZ3cDVfP5HhNmyZOJV7WuH2KOxcRbjn2i5BzbcL5qluvu5rPHp+ft3YefHCwKNjpbrxduMp8sus3qw08eTEp/NJ9fvXywvnfv1oO7E69XnN8rqvWyudW3k8nk9GLluJ9Xk2hWlaGsiqYfGBwhK7DB04hYJgBA3mozo/FnhDyOphrkscmecAR+EyETMgLgTvVQ2EKjEcb2qm3xnJraGPZE2rrVDMGQt69y8DQPCIDIxJ7IEeIWIW2jQKJPW76yJLCRTblzFRGS8diSRkSwO0VozH+pbDlZoxF9RBSPjiQdg2/jAIpHezCz287ITVNSImJgAyPiXWZdt+ExYtMxh74NoiEg7sJNKpnIgZnB2ERGKmIqo9yGY3k0gMq2qBHAmEhVDAwRGdkAVRKgjT4fVYXxgxirvcBwhFOriRmiOefHkSCOuyRqKuwYAbMKkQNDNCBC2Q5+CMwkCxESkZkg8JjyGOVCAzNRAUUCYGJHlC0hDFnULAOYyVhmv2pW5AOq2+7KqIKRIo7eYB2JUVtfjIn3fgwCIBIAOvaSk5qhZULajccYtoB8UxXvgyKJCMAIMUdihlFnHOUxk1HmATMm3t7tRQzBWHJOoxRIzGBgAmpCDIhESGoZEZFGqINKUkSGMdenGZERWHPP7D4tChiPPKiKILNzXiUBbU1AlseKP6MxfiayveGoEBERZxEDYPboUEVUMzpCINpaz8bbuOD2JMlgpqrMpGbjMSTisfDt35KKinBchCEIb9pbcVh7vyrqOQLn5LvN0DcDChLxpNBpadOF116mU3KuQr8wLczI1Ekys4AQksHD9flFt0IOFXKvU2sndXGc1dar002bD/emkKxw0yGZuiDSNF0zDKlb55yZuRRLbbsxMPEFUCAj0MiS5z40HEG05MJxVYc9lYFcyS6ANcLWxiSqgR0T5w4g2349R1imLPODw/OLk0W9RxDS+WAaCkhh4MszfO35w/u3u71LV8or179z6/amuRCRalaIyCSwQ/LEvWUjFDTFWBSU0ibFEsFht2bLIWuz3mzaVhkn00lQAiyK6Tz1p6lde0PefgwRxchyCD6LJrbYy6oXcrbp7GjPeGjqqaPEiKXG1KUUVM247zq1VJV7jtkccIyceyb0rijQgQCXAS0Pmz6VUMwO8tD5gAezRdj088msKkKPGQEmoXDErnDmSktDajpimJT1xw8efufOg1UxOQTbD2a0WDbrPjVt7hNkgdSJVRQcQxEckx0c7qfhdLF3IN61qVEVJkq5dc5le8z+QBGyolq2vFI5N+iziHPBQFMkRpQcKW+0qEOYRQ0Z0GEi22R5bGrk9kDnADUiMRTAzFQSTdVM0zAuJZgKE6tGtZFRGwFU4kC+GJceHHFM23JOMS3YOz8FLIhLdGygAGRi+rT7Sd2923p4ef/jWydffGmvl3Z6afaV6899+7d/+OS8sWhQ4+Z8ldDm149ObzeXFsc5p5OHJ0XJ3tcnj07lxvWH53f8NIDqst1MFwe1IQQksq51ZPnPfvv/nh/ctqq/886yEI9DEll+7tWr/+ydNyb1pb2Dw7Y33jTf/c53lh/+8Of+5t/9n/+D/95/85/97/3pBzeuPvP9e/75G9dmfi/Jk/nB8fd/+Iff/Es/9/qVyx+88cF7y3s/+fDBMy88O6Ppk6b/mW/+9ds/fnz37Y/bk48n2GiHZ3eeHE5u8CQslysdhtgPwfm9vb0nZ+dUATopFgEJFqE4XZ4/+8JXfvfbf6pXaoH88UfL9flqSFaX074xS2xDDr00H939s8e3nr35rOSLO+fr6cH1zcM7pVt0WbkZoLOFzTlzv9G96mpFh51eXLt6bbN+iEpAxel6VQhdnU8PgmvbpB9t8u3N0fQSHIdH9x4fTWZ7NrePipfsVT2Lyz85HYbcZy2cxSXVNw+WTTfdm4BLT84+qshPsF7sP3Pmy4vzizurzVe+9txHH5/9L/5n/yB19INv/6C8OPvP/o//l6vzl164+uX+sb39vY+78nVBVkjsHYx+4C1xQAHAEatkQN6N/HY+WgNAaOOgJtOyoKc9ueNCqqmZoYD0eehlMrHV+aMvfv5g8XPP3X/30e07j//Nb3zy9//W167uzRPqdFEjw8FsPjuenT5pcw5nD1ebrumh+JPfaqjN5eXmxsuXmk5m+360TydNRNBHe+fNDybeYr8uK5cHWPft1frgwcOHDgzJPNFsMg/ezwugJMuzVV1UnqDR7KfVarVexuSJ2ZVR8XwdC1/FrEDs3HZ6kEW48CknpIBMkhVRTXRciVPTcVVr6NsnZ+ci/UXX52E4Ojz0jq9e2jt99Cg1a0+wd7TXtJoG6Zp+f+I+//KLf/jGu8C+LIt124JYN2wMhMmDUcqdikwKLMhZNUXIlqPFLpqFoijA57YHUGYlSpqHpsvTcqaxV1N2/HRqQYgq4hDdyCl0IafIRQCEZAIiTIyAeUjEkIboHEsWhDjxzoGyGSHklJJmEFPF4AOJEHJRh9C4bKbZPGBRlJu+m1V117bjOAuzOQ/lePKYEWLKWURpHHwZiFrXDqoKCogEquNSJqKpZVIgguDDIL3nyZBiVZVMXrIlVXAwMqRyTKrZTDVnNQXDmOKQYvCY8rCRvrWekDx5yxENSAhMPeOXv/zy0X598vBEU6aiHvqYLRlA1OwEc58QnYicnV+Ipihycbrul52k3qGip22VB6JGcUXYnJ3dv/3J4f6cjNSUtHBQOQan2msqyypbMtk4LK5cf+7hnffbYXBKIOodBkcmw6QooklZLyopGIk9zRbzSbn/5N6tWXkkmtvU17NZPyRfTs351Xo9JGDm471y7ookhr4QpNPlxbSyWcm5bwmCxwDCQyuWE6E5wJhyr1L5SkXHFTJTDUVpTvumlSzeuXW7Qvbeuae5EnRUVoUPWC+KphlizELWblKIoF0sfby6qNtu2S473nQkZGM9ytCJoSbOlHxRBGYzGkSVgzeVPOTRzDXkKNmFwtgM1JA3XdxzMK0cA0AW77wXTJsVFMV0sogEq+6sLiZD7qqiYsKubSvwwQN0fSgZmcrCh41aKxzpi1/4cxw4x8E5HLpNKApfhpSBiCTl2HRD37IDAOdCEcrSgHOGlCVnqX2tRjknYkyxZ0ehqGPKZsDGKeayDiIZiXLKphqqEgyzKCHPJpP7n7xXFNX1F5/tUzYdHPP04Gh2eGlx+TBMClYtPvjgvSfnS1Bhl9y8UzchLueX0vCQyuB9GbVPefAGkNHM5tMj5sxYZEQ1ZKBus0JToIB0iae+qhdNzmWpk/R4dfdWd9EExiI4Zgze59yJMZOQ5sCYCQ3IYKwnR1VjQxOLKmjoHDlEAMiiauYATQUAHbFnMiNJIEkrpCnR0SzsHRbAev9xfHCqWYERAMeeHqZxAV/VIXqkQQQQPICKqhkZGFjO2Zeu8FTXgYIbxLxKYPZZWGToBi4CcOV95cFks+KLMz07S6tGxk9DNSIAgXofptMUB69GQ8xnsulRSVEiMoEoOHIMkjoAr840tpp7b67AunOln02L/YOmgFhO6rIuYx/400nyFgzzqQTzqVnnM9mvrWSy+9YYUftsQ/z2p3fLKzsf0mdyXvgpZugzhqKtj+gzX7OnGtPTL4z/0M9YkXab2olC4zT7U/rMTl3a+YK3mthuTg5Pv7hTvbZxt60/wj6Nmu24S6OKATIiiRAVQcFEJUlCQ8rp/Te+/8l7b0+r6suff/3y8aUr168ZBgwh+CrrpqqrwM6ApkfHnlw3dHuLhahb1FUI2Hfd0K+zSFmEg8ODlKMZ5Dg4x0jGBj4wi+ztHYOGtruIue/7bsTZDH0EAtSwWi7f/fjeWdMPzVLbXnMu1G/aFkvfCd9/fGLM+TTnIU18sWm7sp6top0t1/NyYq7QrI8uBnO+mrksw3xSiqbJtJZ17xzu7e9pSsgoIDElDjOy7adgOJ5FSIRmrOOXkMYwECIBETKPw6kxpTaqeJ91km2xR4Aw2o6esql2RAA1HXUZIkYjI0IdLUCGWzyOjpVnADAuvaioqIoImKCNveBoIAj4mXMJbHzQ0hZJoKo2YolGIwqjCmy1D7Odv45GGWvLUSIe5xrIZAC0DTJtRR9iMskKSo5VFZEUbEQsmakZOkcAgGSGtu0sYyLm0TipKgCgY4kb8ciSJ6JRjdr64AlHQQdUR1HOZEuJUhU1A1YFIwQiHk2FagCiSAjMhDRaYMyMyG0pRUTjW1YYK9XU1MZLYCvobQ//Vh7UUT8dUX0i4xIkbgHoKlmZARiZqagdbgCSsKmSqtlm0yJ6JAYiMEEkNTFVYwBk2mYVFREUDInEZBQit/k4pO0aIoIRqCgjZs04BuUQt7weNGTaotdUwVBFnfOAZCqIPLbI4zZNRrg9NqSqYwHetpxPFceIMhKAiWTc3U7MAAkliyNvZiMn1wwUlZgNQCUhgiEZAjMDAhGpmkjcOptGKYm2Qj6zE91Wcalkds62KTsEIMkZwYhIjcwUiJAoxp6YVHSsnEPC8fpBZBHRLdR8tJR9Gj3esYrq/dSueAZUbLrzW4ULU18bhIFYh2ESFFlmLJOpTiculBjKUNZThP2uKSQHKKZmickMJimGmDi2cX1xZ1GGIcNk76hy+7U77HPuhrwZHjtoChxKTybQ9BvyhpTQOs1tykNWcWSagcChSu5ij3XBjjBnE0AchlTNb1T1gsVLPIvxHGXDMCCg4z0zEStAqPLF/mKq6WGUlacw9Ju6mlvmYcOvf/4XQ1isT269/5PvcwsnoZ2k6rX96rXPf+napWs/PHnnjTc/CgVqDlE6V1Wl962tCGlxUC8OZ2RRRJkLRWyWF7OymVVltzqVbkmUixIqVw3DwCnHZJhdgGpWL6Ca990jssEHWi2fgGDhaB0jmcsJCjApkquoXfemofQBihjqelBohj4rOwqOSiZRGTih6pDjIOQBZK/e74dzRJejVb4w6YqABCIxNqt1Grw/0FQEMLE0qCY0Ru0lx9IbFv77H390e7naKINCe7GCwtpNh+BX7cqV6svSkZVDAmTiSUb36OwxEGE+O7B7k8kklP58tYTglKknyNJW8zkoCIKoqGxAuyQbB8w2RSgIJUsjaQ0l5nzh/Z5ZgWg5rk2WkiMBilwYCXkHWKqi4zqaASMi+aJXEdEMIkgAuUcWggTYSO4U1ESSrYkaImeQiMGYQAWZMDhjTz6YRVJz7DIA0HYA8cWf+/qv/ZPf25u9fHr//Q8xvnP//T/31c/ff/Tux6uL9bqfzybFtOjvP5rs+S997nj5uLt2dLi+uH+Kw1e/+ZU3vnvP1aGlVgseLAVXTJADgDLP50fLB0+KcPyz3/ji7/zrXyvqw76KhJY2T37hy1/NUn54532Pe6mv8Wjxrb/9tz/4wR/4zf2v//Rrb37nT26vLupME5j86I/uH379Lz7/7LNv/PZ/br589Oi93HZnd4Nm9EKza9VfeO7LJ6fLk7PNlVD93q/+o2adrs/3Pnr3nc9/4fKqtc994YV0qh++/yG7g6z9dFKuo7QKrc5/9stfffuH35tOKsCuLkO2+Ye3H185eg5SpQW99eH6yvHlVfOkXOxbRT+5+/CZ/f2FK4XmLuPQlBf98tqXv/7Ow4vqwKaLg+HBPV0vvQ6enBOuMJSOh/MzjEOMLbNjKsrycI/nzaPTqTM+W1++fKN9sirsSjzVTP3V/f321iZtnMVcMeZhiA56TUVdWZRm3b32539+eePO/Xt3J35q3epLX3r9ez/4YM+QKUtOVfB3bz9e3rv48A9+6/qzL1YVH9XVsIHDzx9+8ede+OLnv/Cjt37nd//gn7/2H/wil5QkOSAEHM22AJolqdG4HAW7EpbRhosIoFB5nzPulrx2Y8atvZolm6l7+yf3DxbFyy8fVUHu3n28atsXnzk+nk1LgdgNJ6ft1avzMvD9exe+LlMsf/XX/vgvfuP1sg5f+eqzb//rs0cP0qH3tknFQXDkYhuzVwTNIO06FamsiqLyXiSsut7A1us15ES+8FUFBh1gyulor+pWbdeDJrmxWEiC00fLGIcyFI6LLCm4IvdpMa/7Ps3ralL7pwNwJodsfT+gGhESsKKBKiF69pL7qgxp6D346Wy6Wtve4eH547O9vfknd+7Mw7wsJl3sJr48789UbFGHrM1yWDYyVES5E+e9oYCmLgkVQGZIyKw1+dTH3KWk4oIvy+CI1t369S/8zObk8cMH9wdBY6iKqhssZhETXxVn64ukSQhc4SGlDEaZK1etbTOoKqLkDMiePBGLZQQNgRVo1TRb1F/Klw/2p5OJDDKpSsgAppshIqD0gxmknETztK6HGDNRThnQjCyBGREaOMe05SrgmMnwoUjSIYNokpzYaGzqYOfGNTxfeN0OjBmBU87sKaoR+4B7MZ2RguLgiFSVqVYTzdn5gmJidkqARoSu73uBlBCavOk1DbGrq4otqxj6mXMupv4LX3z1mWs3Hj5+jICz+b6keLo6dY6zoWaNqZOUUszkXNa8bPvN+XJzfiFxyMMgORVFMFEAy2Jc1kSMhh/evl2XL8z3552zrmstOQ9YI0TpmBEAVJKYzI/n5cEX33nj+15y5ctQlzFF6TPW1exgbmoKqWOsFguGYSFdvT9bdxY3PaiUEBmb0lWucKtlO0Cuqikm3aurx0/WllPh6+P5YmhPZWiISvKQusFR7vU8VBPQhGJM3oUyp8hgdVUlyUlVBFIWMWIjBip84FB2TTPEuL0KONXT2bXrhzEPDBQ5OSPrhNgFTzNIHFtYx+FUm5WgEGZFzEVJJUBADByQjFANFBWbmCADEQQMWSSLKrg+ipoRKLBNvWcSFfGOyzDpE0ooeLbXEHFZUZI9X6pNgJ2hE/RFKOLqYo5r5IihSJYxC5upsUCYLw40xzSIxawCwGAovgje16vzizhEy0jkJBKSERI5N/TdpJrlvpfUr7uOmOu68kxD6iXGspjkxN15X1RTT+g4qKNuuXE+VJP5EFdoFqg6e/IwbfoXP/95IEEVNufqEpAE0Hy4+rm6mi7KaWU/fO+9i6btnwxVsGKivZD3iM5yP1jy5URVKmLyvtPUa4Fcl86V00tps1EtXV+KtFDNbW8P1FK1H4rgnabhem2Xie9qc1dT48ELKCgQhRh7TQTms0AbU0BHlHLcsJgoIjEYMbGpJc3O2MyymCOm7YSIVJWQEIwIgoODuT+YcOFwPqs4x4t1M0QiAkYANUBUAx6xKyYGGpjGzN+QRXG0A2oomJjAOQg+ExKadT2qKBCWtS/2aTrx5YRSzmen3ZOH7YNH7apNbTbAmLNjJNUuRVc6YwtlcI4IwHoDqKgIGELm2pVexcySw7H+OAKdgM/BLajTVh5AgUKR65nfmy8O9uumn+IwXgU0+mOe0qDxqSS0U2FGqt+uqOypr4Z2UhJudRnYunCfCjHbTNdnN2qfdSFtNYZPf+P4GP5MAm6r1dguBfbZqNn219ungKGtWrRTgmzH0v6s6PTpH9spUDty0W6nbISSPy3A1qd7PeoG40lihKI5GxgQQFhdnL/xx7+r3fClz3/t8PAoxU21tyfg9g72GMg79pPjOHSxG2JaDxrQIBDXxZ7nyXq9evDowcH+XhH8/vySivZ9K2CQdAyJgCXnq6HNjrBbr1KWpBHA+tQheTQJdRgSvPXmJ7dv3+77AcmlmKrC14ezxcE0Va6P+vC0oVBk0b5PkhJkAIXYDwXTg0dnxzeOM+PJ481F0x4dzEEGi7Zarf2kqib1ejO0fR/jkPrkiir4ea85K4xLIog4ruuabtnJIwkYxjp0MGAAsm3x1zYjNta2jx/CLkuIRjD6j3YHHLYRGwQEExjxEwBoxgiKY7ITCLcpsJG5YyOxQlU1qVnO2dQQTEy2NjFQ2DKwnwqLMG5HccRwA46vGg1PMIoNCIAEaCaaI7FHRJCtNDr60EZRBMFURuuHk9F7qDLKkEDmyJlZyolGZhNsT/qtPoUklhFxTLBupSQ1tTx6ZEZ6t2qybTWhIRAB6dgVhAjEY4DMVIlH0w0AmmRhYlMBHLOxpKKwJf6MOijmNJiZ8wUSwRb9g6Y2kmRpqxbpiLdRyaNasTUGqvKWQT5mo0gVwHTbBy9bSnZKMkhCyMYI7AgIAg051tWMfRiiqCU0oy2HCMgIDE3USJzzpjIuAIDB+I62dHUAdmRKCJTiMD72nt5cxkCgqoCNC3fC5IlJVZ1zZmKWif34aZiooQGy6Rh/G41Xys6DgWgWUefd7n6FIoIAyAwqoCP2yJCJaAtBUzFAckRZRUWc37ZeMKKOxW0mox0MiUABRuMrsSHkJIamIuw8KDIxI6uZZRn9dsgMCkjsmAA0x544eO/HGyMTq6lmAYCRJa9oTEDs0EazUqZPS58AAEA0VOV+34B3x4eLLzKEPmoZktlqNu/DAJP5cLBHClrOpjGrx8mmn7HbEyiQyz77FJeBnONpjpb6XFiYsWdQX06Dq5wrQgiImL3PWDbdGZX7bNzmrtdUiIGkPPQA5hiQaTIpBIY+xdL5YdgAG7mAltdD1/RpQfsgdZJCYgeSkAVcbrslUEFUekhtn4H8tC5FW8FY1RMA7JrBspWuSku+PH81MN9YHEruKd8+uzh95nBSpfLOjz6YTiY/f/OnTz7ulnayaVMoWHO02L1w89CgEuOMZNkxGqitL84rX7SN3TVYrrsUkwt28XidZ2k+n0heBZQwnyPsIbjU97kbMvaDqhgiMxbkSjZjzMKEm3W/GVyWyOvmcOYP9quseWjPLUvJ08LNHMzXm9NBEUOZhtZgYCxBAwh5gr5t0UJu+8jsiKuK+/W5H0QU+jj0Gj1zVU36bpP6RvseSU/b/pO73Q/vnibn+pgWxKIGvrJC+hzFDQgW26SimjKRda0WrIeVf/KkxYkcHsjmYp0QrSy4mKILxiG4KdieZTZdZ0kmjxgyAREVAAdmJbNlMXYzs5LoALBU9AJmKEmipBVAUujRXXYQCNi5YMQOGZGyGpICCVoYF3vQ1aKGop5ayq3EJsuFSETiGBOCWB4sDZ4x50QKhjmnjh1ZzjGbmSO/fSS0/fpr3/qp4cGjgwNM1XB8fNA1+kdv3t27fj3CqovdzWsHQ3PpZHmSz9I+12f3TutFmZPldSt9s2nbu3fl6nNXz5886tsupVVtdrEZ3IVMDU8vbl88Kef7izNwdnTl3/2rf+mjX//tH3186937D6+/8NyNV16aDFrtC57+7qOPfvMrL7+2Gqyf1xcp/vL/5H/8+//5//P5z712IuG3/tVv/cxLn7vz6M3PffGrr772l3/wzqNnX3p5//K12QK+++337z26+Pm/9vJ3fvudJ59AMav6dHHjlc9dfunq6Ucnt+M8nz9irhkmRVHNi2nClXcTT937H74XCNN6c3BUdCcXorjSeLi49MGHH3/u5WeXS3vwZDOvp8u2q6aT46NFcEFlcnKx+fDdH2Fu/Mw6BkzZ6dDlZNqt2vXxZJasmu5dOz95//HJvWtXXwCQk4f3DvYXqYvDycN02rqsQxIntH70OEecVpO2TYB4ugGOs9zjfqg0roOfdLkhz0XgTFCKff8P/ijcuOYuP79++FbhD95+545G/uDND3hq3lXNcljs7d+8eemHP/7433znR3uXroTD+dmD5k/v3nlhtfnD3/jNv//v/sKvvPr1f/4nP7Aq/82f/ZaYMhiqIbMpMod2s5nU063JeLt6qFu2AgEBliHsOjJGs7Ftw9agRFiV/lvf/ELKPWBenXU/fOtkcfjSq4fu7sML6oHn5Xc+fvhzs6nbbEKgg8Pw5k8+/uW//A1VoRI2m+X+Ddy/vJjO6XTZfO/tj3/qqy/uH4eqhD6qD/Pf/ZffdwJGzd5z87e/+ybpzBVuEFjsXzo5P93H+ZC7rJqH7BRDXRQVt43eub+MqfHs2dix7/qunhY5aTPIwiuI1lXRNJvt9IBYYiI09k5BxoWj0a6vBqKKTAYUqiIbTgfuNk3XdvPJpADqMlSLsOrktF/3axMDQHfR6OVLl24/fjKpKocOBIhJgVJKpS+TZtTcpViVhafZav0IiUAAweUck4mS1bPyzR/fO6orJlRCVRhS7vpV4YnJmIw9b1aNA0w5kXcANuS+T7H07Ng778c4mKmmlC1lH/wQh6bbkC+IXF3XVVFAlsq5wrusSQ0i6JASI7hxCOV8KKphGDy7PCQVNYA+95ZtbzLbNI0CMEEcBgADpJgGEclgURUM2UzUnHdpGEABVMEUDcdsgiPlQAgQNQagVk+JNcrA5NAM0FIazAzBcko5i4HmnMCwzzlaSi430ieX2hgh+CyZiC1rYATRF69d+9Jzz69OVqRGzGYZyAAYwLVtw4TOkSg451ZNe7Y83XTd+uyiPV+W3tAsBG+ijFQUjoiJWWNOGRXoo9u3ntWrk6ODIWvTrVxZCDlGL0rIJkKmltoO0X/xSz/96NZ7d27dOjq+NPWzhGjMKrnrlkl6KqZtGko3+ej+JzV7A5vU89rBnSefhLJqhySrtQr0/RqossEe5QsOFYVgooXznUARysxuenzz5KOPCgJCTikt6kXXrg0AgdC5fugLg5hbAEfgLBtRYAqb5SkQkyISs/e7Z0Hcz0RRK8ed5YBmMSN5rhzvQZ5xBF1FTYKjMBDYmHVWB2KigBjcoBkMcjSNFDAoi4EhqOWckqBjNyJX0cekri7Ipbp2fS85ZgHMsZ3QAXnfZ61d4QxFKkOCmGRYL4eLwpqPT06rSVF3NYZAAPW0zoLSKhQVFkXhnWHygQFsiEMcct92zfqCVRcH+0PfGWrwE8mp67u6qkCbrJ1CzpCDr6OKxn6zPi9DiJ017XBw/Jx46GUwVVHgqizqkK0D0Kqq2tX5xfnDV15/DQP1fWsKHEpmL5LRJDC5en509XkGsSzy5o9uXTxZJVKYeHJt0yakDJ13BTsrXRAVYHAu9Lkv3V6fmZFCse8P1IeqXz8YAmTLVTFn58iXRXCZKjhYeH9l9eGFg8YHQSQRyKJMZUJMOakaKGVVUgvssqoh5gxqSIxb4QzQEJwnZsdmHkaHIERRNTDiaCKkvgiAFM2alBVpXMUm2k1vBRQVibJpBholJ8k5jdhUYjT0npEAHUdASslh53PUmAScB3TTA+BJHozWm/W9u/16SWaWxRNns8Kz86Sqjr2Iibpi/7g6mGtsnZFzdVIKs4V4n8mZiDeDpJYbkU1OB9GAZCLnD7ulwqzPfuOO9oIrMAEhZ+VPJ8lP/UK79eWd0QZgy+h5KiX9/4suu8kYoiF9+t2n7iHEp5vdbnNs8h4TYVuJaUezHnEnALDlIo0S1c6XhDt56TMUpC1+Y6cU4ae0pE8LKba7+alj6rO7v2v+tp3/aFvg9fQd4/b9PY3VmQlaVkuAohI361s/fuvJnVuX9g9e/4VvXj08uvvx3YcX7c2XXjy+fhSQ203bdUMqiqw5pX5RL1xR7u/tadqcPj7f9A+A9Mrlo9l84Z07v1gul8v5rCZXkCNgZ0gh+K7d5L4DQO9JAbKqL6swW2S19mwZm3XTxz42IYTZ4V7pXbvZECp4jsmmk0XXnntX1dM5g5EBEWyabj6brDeNKpTT+XqVBpXgPSGH0vW9oimxN+HzkxUxOOeWF6tQlM0QpxUUdZFWjSuKqLLruh89RTrqP+OB3aJ2x1Nr+yKAsbtrJ/ONh5pw5Ozi0yM/fsq2E/0AeWyYUiMw4NHWgrglSu4GMNsCLVHVLBLNRERATVVHshYSAygigdouVgZjDivn0WpnzJ7ARDONihawaQYEzZmYn56LYgYjuXAMEo3SFSACARkAiart1EdHNMagdLSqjWV8aEykZgogtl1WIuatRASoI1eGWLfF8yw5IxICqiqyA0SR0Ta1vWSIEE0BkZ2DMT+1U/DG7atm2HmIRmuJqozLoqMrzEABRnRR2sbxmMZjrDomy1BVx+CYiMDoXTIFM0QedTFiRqBR/9IsBoqMgIZIgaFkYARTFdE8SIG4KA40gYqA27r4mUi3xXCAjAo0hsRUs3PezJBYwZiQDAhQchrb8MYKOSNApVG+lCyjv0ZAmN3o/cmiYIpMT5lYIgm28QJFYhoPjgiOL5GMRIBATGo6ap05x22wkgnRsmTULSxaRZCJEDOMWKydY3H8mHh3Z2EGgZQiuy1fmpxTyWqmmpHJ0GhcmmMyVQVVG9nVGHXwxECcUwJEZh4XwGFscgMDFQNj50dnGZoYKI2P7FHsJ3hK7HK7X1+6QvfKckjTMi8sd0lbH8y5i8Wzc4cNEhRBB4Eh50movU0TTNVQEcRgUBqkjOItAaonb6GqKjsEzgZTK/a4nMcsgAoQc8wxxZ6HZF0SG3JKg8S+2yTzbq8MySyUxIltWu8zV54W2bxAjtIlmg2ywqLePzgeUrPqO7Te+YhsFIBYcu6StuQKRSkcsJO267reUi9oIXVJ+/Af/Yf/29XpSd+tZofT+aX9x5+8fxgmzOX1K8++9dZ7BWTN3d/62a+/Gx/87vd+XBo1TeSAs4ODoTPMtO4y88QF3WzWzND17fm6F/GKlASdgnOcmt68ePZJZT49JKoZVbuuquq2j60IMQJiwTA/KtpkDMQgsYdsmGKR1clEYkoI1PXKUPqwz8W+I+9DbDeZPAqDo+DLA+dnfWd56LqmJcxur2CHq77PWSZDs+8qms04aOFw6DrJebNpU851FT46OX374cWqVysLizKvitIFyLntuzbGjA6JSFh6SSZMhQqWKMvmdFJO5q5s47BenqGbhMlldccSroR6Fo3JFUClSTboENZJNhk7U2XeZ85GKQ4tGKtMPV93dMVhZQqEbUpJJcXUEUMWQwNCNUoFEoIiITkGdgSmRCLjkwEIPTJmFEbA7MlVmKcezDSrtpbXOa8tZySWnFSzDy4D8DZXZKaa4tZu/eYfvd104Burn+Nf+POv/dp/9U9kT5rHMsPNyWkTKnjyYJlWw83Fi05K1fho012dL3wRPvjRByQT3axz8ifnlsVHphe/9vmP3vxIaXp4ePle+8kv//JP/dnv/2GpVb5oc9z7s9/4Qbrg2fVXKrxWXd2/8mx++1/882dhL9HJ//o/+mu//rvvPfP6F1+79vLtt3707f/2n+YEb/zo3S/98tenU7o/LP/rf/3mf/y//IcP3lt166Iob+j6Rz95+5M+2Uuvvvz2mz+++frz6+WjTaO51Cba2j//C7/yH95/eHrrB/8MB5etVdWz1dqXFWhO3WbYzNgs1KYcM0DsYzWBWMTDq+XzV595dPLWlfmBsTx6sizKNAxq+/TozoePmpPLl6exD35vUUNYd2sXXNpI6av54dxbWF20jx81CqGeXovJdWub1geTSXh4vvGtMambTiAKJVZ008sLs1AUnrVkAZFlmLrR9buODZc+BOhj7LO4ssJH3enFrWcOXwhHk+VZamOOGhgcJFy3zbyomxWvu+7y9f2br79y79HFT+4+ePELL6zX6x/8/v/ng/c+/sEz+NWf+qW3f3AyP3jx7HS1fzAf4wAghgAiUhUljojE7VhQn/5tZoSEu29sR83bUSyYASOAx+DMGccuXTme//2/ff0//t/9Sc431OTVm5c86V/5xssBrJjUj+9f9F39wgvXjhcT7f3/9b/4/Z/+2o29CX3ltef/8E9+XE4OUS796Z+dTQ/cF14pAsNsMfnwJxcH3PFwdvKgQ0UiIAem1g2b4CD2w6pblVUVSpck52Go6nlVcN8m9FW1cG1qS4bQp2f3bp6uz9OgLVvXx6wt7dCPjimLecfdZrCsFGjEfKOhmRIaIOaYRyl2aCygC6EY+vbl5575oz/7QbM5n86KKlVm4tlHSeZp7/L04rzj6FIfc1YiJylrVtFcVaUk8S4MYsN6FSN7poCSTbKKK4KD2a3bH06KmQEVBV6sV0SZgRAcgcupkRRNJTDHISVDUmPvhpRCCFmFjNVAQXKScdW9V9GMCjKtK+WQFVxRIrtxnqhqJroehlZVCcSkLMqu6+vSiTlk7wiKIogqIRmoIggoOspZyuBG9zIiihkgeGbKkZwjxZw0pzSW0AJRzlIUBREzOEYGJIXEJKEo+iE5LtixmpkrsyQyySIeyYBy1iwIjqKkbkjK0IGs8hqEiTwxm0QldmUYhv7mpRvf/Pw3Lp48brvh0vFRypEDSxedc6bKhKqShpxibLv+bL0ahq5vVrFfI6qIpZjqohjnV0VRELmsQICS1BdsSA9Pz27WReXrPCmzWD+AY9+nFbkyFAxiwVPb9kp+/+bnzgaFUBTTA419sVcaD7WfmARyZd+0TYHkLMah6xQrdWWAUGZTGww7V0M5CwhaJdPTi3VdQoW+ALg4O+mz1EVlRThdPg5V5ZUhOdO82iyBiBE3zcZ7x0xAXIYQUyyDV3XNoOuurapaspgKOfZuexUczxbzUDjTHM8vLYrVRUpgWrjJzf3qCuMkLc83lKrh/CJrrokc5/meLa6QEbuySIDe/OmTdepAewFLiCq5FzFkTyreITN47/skJeK6b23iSyQINCTlwM4DxItZuQBXJIE2pWwWnC903bYPtO0ak6yZeabden9vHtD7HJHDDMrTt97luwmcK/acKO8fHXFwOqSHD358vP98qMpkrXfZh1BUPCR1iA5siC2TAup8GpKmi/MnE+9n00rBJGs9X7jak09Dkxl85QMyGaskmUymGuPD+/euX3/WV6GLnQ8Blca6ZxVVkHoyyQBSLMi/9kKfh2T87q1Hva7Wj1NxWC2OgKcplkCiKSfNQB2RiUav1K/Pi/l0CMWFpfk0TCYH1eQC2xWmeemqqpgPRN2mBRUC43JSXX1pOGkkt6EIKakgguRsMGjnIDCgJGFE03FJnFRBYHRNGhEOmp1zhWM1KbxNiyKJUmbBlFST0SDwcAkp9/O9qjvdrFsdAIHBRMayFxVl5jHYQEiK3KuAbVfVwdDUiEDV6kkopiWwM9HNk7MGoCiCryZMoBi1O+/OV8OjOxgHNY1Z3bRgRhKNKauZZ1eUZVYu9/Zpby/Np4izQdSjK4pCyBkgMHpzDJijMQeTiuygBMZ759J3RXAmESw5k4DgwNCx9/PPKj1bOWdHJRq9RFtuK+wcPk9n1Z/mzZ6mz0Zo0figVEQC2ilBO3LQU8kIP7PlUSXYNl2N9s2dfWknMuzazXY/u82+bKNMsLUejFke/NSyND66t4LUbr8VYJwUbre3ZczQ+F5GVYB0S40xU4TRxGFbHo0ZgApYNkiqTR/P7tx6+P6baX368quvf/2nfyoOEtOm8PjKcy8VVMMA5syxGWjwOKsmbm/PQUhZHj16fLF6cLg4vLJ/GYmKELp1c9a37KqjgysivXeBnfYxAXkTKH2ZicAsFEzeZcGzzXBysty0q83jJw4lhLKuSPd8UQJqP71UFyH0MaqCGfgkJgikiDr0nfOFm9aOeTGdhOD2ZnWOElXKKmw2g0aZTidQ10NSRN304tghcdPkNusgtu7PFrN58JjVGAlUt+F+JBzbr4zGoz/ymhG2LWUANKaJwHDrD9qOv0ZnG9Ooxe5SgdsAIdjTEwBGgDiO3CFVBUMxHPvaUZIKoImAJlURSaqisGOXIxoAmexOeXoqFdlIJBzJ1aqgqmg8BujAVPNYpzbik5hI5KlwOLrlDG17tpnJOPEY3xcx23iuggHoeEoT8Xi6mSQhNTPHNAboREZJiMwUwTF7A0Bky2ksPCckQjJE0YwmBltbEI3DRDUDI7Sdx2/rsBsPPsI4nMGx12w0HCEAM5kZbVvRAMxEoqOtBrjNYG5NhYQAOsbWDMaf2sajzACQkLctKQaikbaeFwBiBUMkJGXFw8mUThoiIO/UqDB55YWbahHGeweaiiA7ACAkT5w1bQ+mIYB76jckcghompKoc97GN4AEZpoSs2fmkTMNQLv4wMhPMiYH22TZmEDMiON5RFtMNRGYoimTUx2BXA4RwYCJVDIABB+yZEl51H1o3IQZwOg3FXBuK15rJkRjZ2pqAkbsHIJKjqpKzqkqgIKZgozHENFtxRtC0Sw5sfNgaCbjp87Ipgqg45lJOMLmUZMAjMhz2OmAo6JOI+MbEdEEycH2dvdvSUUFKLDDuvSSyqG7cFCC9UjFdO8ZAEce46YFTIxepUwaEqACbfqUsm1SV4VpL+ZLP/Q95CGbGHrvJsyVknUaMSaPwI60B4Z6GJK05zGrWnZMogQ8IZpNZphyBkiL6VEV6jyYI8+oOXVxcyFGFXti2XQPArm6AATfi63X65yb4Bkh5D75mfOFRnl8vtyUdWjbzjIsJvXpWf7Lv/gfBO/INrU0/e2zS+LD4tK9O7f69vzjx6spMzTDXP2hOjd/9ft2b70+d8SOKfVGWRjdvKy7IQ5DF4d+s25FwPuA5FDNEYlmVMqDrFbJeyin5arZTIoSMVtuDC0nGXKuJzWhrZq+8D54gqwOoWsGAggAngsS2JyfiHOgAowhOGFObQxcVqESWU9CcKJe0fokqE27lhyHviuq2rICqwOPDffe1x5E25h40zW5XbWxFUefPFo/6NJ5zhm4RAWLsZVIfVUUBlq6SSiqKCbMZQU6rL13Hoxyc7xfgbOY0vFRWA/LxXRKdcF7l5KvRFXJk/NZk0CyvNa8RBDJicgDuCziOZN1qjgp99TVaEGVDTOiknMgzoWZZSNVE2/S5wE6QV/UquSBiCZqZECiYqqSMwOMojGqgCmi+VAikogCTuLArK1CLyaGYhJzTMasgDmnLbdsNz149aXjK889+xv/6PfP1vU//tXvUXXzk4dpMd9ruvVif3K+Xt++ffLapWfms8kbP/5Tq64fHE8iNs++fLW594AN9i/XvsQhE9NU1ifHJpuSZerK8vxnvnR4fvrO4fOLYakvHE7OHlN51hw/c+Po1c9tvvvGgzfeaPbt5a89VzD5Ev/Jr/5OsfdSPnvyve/+sD/pHp+mvrVM8af608f3bp/r+qtf/tI/+W9/5y98/a9Qvfyz7/53D9/97uLy0cvPfbVCf/dunUMoTYn7V169cvOFy3/87ndf/tZff/TO+xe3H+9PpjHmoU85DkGTYl9PAgCs1+3N/fKVF6/96b13njk6Om03Q5jsv3Dw9oM3wgyMYwRdHM32DvYvlhcXzcawn1W0Wm36LIe6Z5ouHUwp97AepI/I5FwqLDnrZ3W1WmYc4n4571ZdLxYgIGWuyk2jl6Z7E1f02QIV3UpmfmLNmbQXBQ5m2LY9+8rR9vZbTRY5db3Y8cGBa5r+rTvlYQ4Z2s2qPLranrZltSiK8tr1ZzaN0KXDWBVDLJ48XFeHxaNPHlFnJ8t4Yz57fPuTf33/H2N/8KP31//JD779f/pP/iFjcCNvzpB5i6Tc+m7NbAQ2wHYJYjuS1PERbwhkprhDJ24HGqBMlJWw4rOzs//Nr3wevT5M/sn5ckEUAtfOT+qyvOnu3Gs/uHOajtcs/hs/9eoLr4aA+bQ9+9I3nh2SA2gl199579bi6Mbl2vQCzm838dEHRA/PN/0kTNiFBJaiiko1qfo+Bl8QcSjqtllOq6kqiKTUd2Z+ddER8qZrnbNld5pFnPdKNJvPvUEX++3ACIAcA6MvC1e4JGN3rIyTBEQC0OBK5wKggcWABAYJ5O7Z/Ua7fZh7tINistk0nvHocP7o/LRfr+LGhq7zSEkSJvCOhRwDSBQzBREkbbpEyNWkiDFajGVZhiLkLGcPzgm9q8jEQvAx5aJ0qlyW5Wrdsg9mOYgNw1CWIYpW1bSPJ6ULQ8yG0gxD4ZwPoxuZIOUoEmMK7MpATewx+0A0n5aQ1DlWgpTjqEP3MTI6Axu6wTmoQpHiME5HJGcwdcz90Kc8rvCZZ2z7YT6dDTFhcKJSsEtZHTvPYyAcmbEInsBUBMAjYfr/cvVnsddl55kf9g5rrT2c4T9/c02sKrIoDiJFSlRTalGDW2y10bDTsdO2u+EkQBoBcuHkIkCAIAFy7ws3AsSAE8BA22inY8d2d6vVcqvdsgZKFJukxLlYJGv6quob/9MZ9rCG931zsc/5ikoVUIX66gz7nL32Pms96/c8j2QWIjSnjIU9okE2EzCIueMpMpmMEFW1mCgogAIIoZoKWwYpTLWVHBBKzuDYCJpF87lPv9ZvLi7PL09v3mYiUR23w5h653zMJohZpIxx2Hbr9TamQcZtuV6xFudRinjHnhnUqlAD+KJG3nExFVORNOrYDw2+f+O5l9SFMUEuWBOquLaep9SLasyQU64Y6nn90Y99cn1xfbXJrl0u5lUc+xfu3Ly6uhqi1E2VirDk1Xrj6kML9SoOOdu4fnyzvkFSj9nY8zBEQD9fHkopuWQmREcVVbnryWy2PNAYPfshDt6HifsD0LqpUxrZOQPNJSO6JCpmSDjFdaqC95SjwH6VOnPIJa2ebH1b/Mw5hGbpm7O2vtPgAdnSJHC3ij7Icskz8S7o4obyATMFIywZBV2Y1TBmjSWlYh6oZlIVlVnwRVRQSzEtBkV9WynRUBSZcnDOOUwJde0BydfgADk417SN47SNNYtQKVLEAVNr6cTHGZbg5OWf/YStFuPl9Zsf/Kg+Xf7svc+NBU0gbwbRePvWi45cSh0BOkfsochgwFVbQxHLJcUYmgpAVPNs1noHIKp9NPPz07lYKnGsmdvZPI2xWGLDtq0ty8P33r91fO/w9GiMG0fM3qehmKh5ZOc8eWaWnJDQNdW91z5WzdW7/MZbjx5eXj4e+GPYHQABAABJREFU1uDFHIewqMgc90gqiCWNqhb7lbP1tnsKN+6MDiReB7cN9PDg0D198mgVOXPj6lkVKsh96tZe4ibbtndOqGQJjomcWHLkFs1MIDNwEQdSilrOCkgMxgjEqKZMzIhs4LT4gLMavQOLoGzOEaNIKWAwjPo4yUVXDDWrAkId9jIHEbtpU3aaZKvptDYDImIAQiyqnsghVJVXxpxSGYv02WLBhXrwDaiHcX1+DlEACNvWM1nI4MAcScowRARtF21AjIP5RQtVECJzCGRsSEyIyIZqQlKIsQq+FEmAiX3Qkq+f5n5kJqqYA5V+S1dPrVnWi6PQ1s+0mp0ZbG/iwv0ynfZ7JZMK9CzSAj4suYcP7WfwTHPiD201u9/KvSqwR0x2Vegfkjr4oRVthwXtV7l7eWknGTwTlvbvuH8R3JlMEHAvG31Is+zPE5kB7fPNp7WT7TOX9irTxNgiKAHvI5pMd4tsE9OCOmZZPX707re/tbr/dtO4X/yVzx/efD6P+eL+Ux/s7Ozk9t17WQqjQpFFvQRQNfCORXQzrHJMxHB6cvtwvlCTfhjGbZ8ltweNJM5Zvfd93wdmBQDRWEa1XDQH78dksYtX69XTy1W3upq34e7p/Oz4UEquZ6FugkNyjg2Iick556jfjoYsRR27MY8GQIQ5iwGJFGKVlLpuFETNOY5lsWwAQY2SGnvuNjGLpqIpyZBNsoHh8ZKur+LTrgATEoIqEtiuOct27jEwsKnwzsAUEWhyiO1GxaTTTKd1dyJ2Y283JPd82O7k2D5YhaY4F1GdUsFUxYR0l3+tJqKSAEGkwL7P7sORDDuN06amwt2MyCbYDcBsmvkZ6DTmcKdVTSrPFHkDaEBAyFKKqjLtrpvJBfHseiKe6sxURWAKjKEp7ipP4qrtaZ9cCuKUOGOwJ4wmIQMQTMuETyGxqOyoqF0Eu8C+cw0RTRUMDUGmrGuk6TBUBPZGTQNApAk12qloEzI0sUtT6NEuBWj6REaIogLThiI+20g1KRnJTVgVMauamsLkpSOcVAhTAVRE3u3cI0oRRkQH5ADEyGkzM3a6+2U2FM1ETlSmO01GEykGqAZEBKqqshtJaJKLoiGzIRBRzgmRiNAARIuAMYf9tT0lQOYpQGo3198ZIgHA7JkJ2UwBpy/NOc4lMTOSQ3ImBcBEJmIOy0SI7TErRDIzEdm5LY2m5cU06kULIhE7E0OALNlAdqPOJh58MjcgEBrsO+s0C2KRvNNdAQgZEEsugMTsTAXM1CQXoR1AxlKEPU+6p0gxU2ZnNvnjpuE6EWofUp87qQhD8M4biGQj17bLKqatlaFqaslU7ChoFugoiFoBgV79JqoZSUHEigiGsWTUYdxmYctWjAZxh+6wDrOCTtEIY9a42VxIHtg5UhpHHVJ0HrHEkouKc3UL6DITuxqo8dgSy5gjOs0palGRYgAb6Sp5BO08j4WcDWPqUhlLbrI6CLN2EUI5mkMZr5smAOCsCUOv21WP/Wz1xhvfuX5c1/rzv/iF2HVlswhue/Huu4Hr6+22OZq99LEXx9Wwutws2jtf/NhnvvK9rxjQydGBc0ElB+fSmFJ31RcrSRGcmHkg72Dot6GiLDn4Wl2FLhcYpcTGBU9DGteqit77QA0F0FTG0QFWNRFgP6SoxLNDiRJQFosKMY+bzWhQ+UAaE6FbxKo+NvNZUAWIQ8UuYLVNUcqQc5/ixvtq7K/z48F5ikXukbMmSI6b7nrDPlg6PfLvPNm8+f71xSY/WkmGEJpm6AeHIEUpeO9aM3GgzkxFc1+yikCWEgUdlQi+IqOLx5cwVDeOm4NZzHx+9NwnVyk6V6kVD4IoooPhEMt1cAY0J39QaGEUwEbhkiwWNO9uIjsFZkQpYynJFJkWRUcrK8nnKXXcHuWcRRdVMzOpVOIO0pQRUJyWSTw2AwYjQpFUcgYkNApcq2usmpeykVLK2DGvnDXKhOYFVEt2VJW82z3YdMPw4/cRq0cXat6dnZakaQ2royXdODu7MR6iXV+srwZ58qW/8fl//af3i6xy2t597pN/9sa19/PcD/c+8vKjhyuQg//wf/t/+8Yf/mMeN3dfuXN5/fSD+2lx2n7xy7/6u//gd4/r2d27z7/66u033vvhB9/6nTa6o+PTi/Lol//6v/Fnf/oH1dGh/LCuc3d7Nrt/fVHV7a/+1iffen3zzo9//LV//s9++Ze//Pi977xz/+3//X/0997+6g/uHg/vrvqP/9IvBXf0wos/++1vfONnfv03/XLVHGPXb//i+9+OfrCh+hf/8O8flaOT40UcxzFDmPkMkVxDDG1VXV0Py8PZ0G3eu//IM6cUYYSn92N7p1k/3VAze5SeLOYnXDG2Lp3HuBmCh8XJ8b2P3P2L73x/WfPV1eNqOWMTBHDEuU9CozPNo5REYQwWsZ0dHYRDWXVHPDs7On10cXX7sKXtelydex9K/7gtrshj1lIxoKViVnMlUDibpATQrvMmO4iIsu1jl5qmmte3Li/vH8yfPzx87sn4uhgcVDfGFQ8Axc8/8StfePy17yTjw2Z+eqseHw+X1+vr7snnv/TqxYV+8iMf+0x3e8axKuAcAhoogaGB4iT6f/jXFFqg007kbrIy+bTBDHWfczRZmUFyAdolTnzjh+9+9pXntAx/9vqDz3zq09/+zg+Kps9/8sWApABE7oPH21eeO2q9vnLv9p/8l3/4hY///HvvX735/vY3fvMTeVjduVmOT+bLuzdPjqtXTua/98++tT5/+yPPze6f58t154LDVDyHFAsADNtVVVMAAE1QDAHA+Yv15ayqhSS4oMiAXLGyD0+2j4/nx5Qxp+w5IGHas3VgoGqmgka7nxZTQJ584AYoIpJiyRrqeox6fHj4/oOn88WpQDlc3nq67v26nNw4yCWlUdFKBZyus7OqZmZPmEbPgYlcpVacadxuOu+ZijZUTbocOaydT2OJffJMmrRa1mc3z4b11abvvfeIbAoSkyOqHMtYJv1YAU1KN24BgcjNm6obBkeOnVfDrCIiJtA2deW8R0SAdtaGpgXmlK1hh8yAUFRzFiNIVOY1PltK4DTLNp52BrWoD84zdbEYoBQxcnVV5yymFlOum7qt3ADFE1XBMzOiiZDEwtM2NREiI6FzzkBEJRUxRSbLWpjDju4WBQSxLGpUUU4iKeY8iEApJWpXMwdisQLGoT4gz5rSz3/ykxDjauzvvvAc2JRALYRE4FEljv0wjoY25rjp+yw557y93pQhigntifBs2FRV287NTDRXxBlGR0QcRMRR1fcl9ZtZ3cyW8+voupSY/ZR8CcDZcjOri5XV5UU1a45P2zaOzUF979Wb77w1nD9dISOZR62ccSy98SZJLv344MnDeX1wOL8RfLNebdYgNw9uHcxgtb6+/dzd84crFO23cYh9u5ylgovFbHlydLF5X5KIZnJA6HKUEMhTKBARbEyZ1SGFnGPJo6PamcUYtSiAA7W8zyo6OjtgM6R6LOPVZkCB+YJmLjZOrjYjUoaUbLuqNPmWtOtmh+H0Thu5Yt+IkYpL2eb17Lo/H1cxDoMl9DX7uraSYyrOVykNhMjIFMCcVwojwGzWhqollUq2eehz7LEf/MGM60qRrWSyzI6xnnmCSvSohpuhurOoPvLCz9y989rZCx/nPL9+9+L47ivtzQVAwZgdokSqeK4ulziQmufGz2vkoCWTChUb1mvJ5inUoe22101dF1LN2dRiX5rjmxy8mnHxzrFZMZZFW4tkQLu6Pq9cfXLrLGpvakA+9RnJXOsZiQlEZBz6opmcd3VtaAfPf/wjY3GhCW88wutylVbkFpd565oaiItJLpnRV65Spxq3PmJ6+IS8V74u7Yg+RtdXrpb0QAZ0dpNcnbbblko3jgwBIYBy6kdXuyo0GUytIDvi4ozAQ5cMkZ33BqQqns3IHACgAhAzOLTWc3DsnVe1mCKI1IyYFYAKYM6WTYiMkJwny6JmdXC7TiRAAIwpmwEjOaZpoZpVPU5BJeQ9A6FnMoV+KK6AI0dGkEt/fgErtuIQfb08jChmBQDNITKHptI6lKzZO2UIy5AdWOyRcHZ8AIzedMrR9aGWUrwJFwYEBTJmZOA0Fuvb+ZybwAcuGVSoeXMJBbZZsFTP5J0pn/un1KBnawf4ECzCXV/YXtX5UOvZOSp+WjLaGbefvZY9U4uehWXDh49/phbhfhkLO7boL0NJezXpp562+889hfQMStq7x3TPKO1db7s33udj7yEDmCgqnd6LEMEYDFSFkABBTRS0mGWzNHQf/OTHD773TR7l3uHSz+rharTxERzOP/KRO1UVZotapAPRULUmFvstEyhC3yW1Ml8eEfLB6c2SUrfebMeuqrxmESAqoes3RfVgNg8u1HUNqDEnYlaDCupmsRylrJ+ed7E7Oj44ctbUfLhsvKdM5JBkkFgiIpZiBpiTpBSRSERVBRCzqKoZWEqqACkmQjVDUW3bqqQyFNmmTEYOMaesRITM5GqHp8etD3B81GoptdOnl7M/ff1RNmcy5ftM6blqALv0oUmD0MmORjtwG50hEuwhCJOpLWs39gynv2Hn8rTJdIZ/acyYGRSRSZkqJanzCjhlTJmKlGK7lvmpwI4QaOq72hvedrgZ7ieBRFSKICohOGZE1DKFQyM+o9wMCVGlTM1WJopEhAy4U5FAdTd4YKLYp1llMTNi2u1N2i7y2lRhitMBUhM3JVgToyoRq5mJFJGJakJmdiylmBJMlBTqlDfESDKBQkUnnxQjqYFzvCssNJ2uApvEjUlwAwKcEoh26hwh7z6EKU19aUBopsXMFJjw2dVrO40V0Ah5UmbVJpcZsOOpWYKYpiuaiABo8sEBgU5uEgBidIG0QFUj9PD9H73z0tknp6xtYjcdj4oimhZBcmQGe65KTRGYCdQEmXYJ86bTlBvReKLMJkfhruxYDMB5p6KGburjUy1TDRwSTba5SetCIpq6CxDMhMmhkWoWyIjMCGL6bAo0JYUzUy6COwvgDrMEm3KmDMGAGcmjAQJPcGsp2fvKTA0B0UnJRDBFJolmRcHpCGACiLxIEVPndvgZswOAXdeOFHZ+Uv1UinOOyOU8BdWwIZqAQTGAqWxXdap9oOkIp6vA7W/6qCaOiR1BISAM7MGZFBE59ZytRKRxG7dD7ItKKdxn1UIAOKu8loTkpViKQymhCgFTWtaHlQUsRqzd+pLyAJJKKgCYYmYjiwbFShHJo1lxXHXXT+r2UJwXCNtUWDMaALORInmmENAAfbfdJMlq6zZUm25IadztaxeMWtp2TsxDWZ0cH19cle26n1XkjNvm4JM//wuvHN30Lb3w2oujxEw8GrVHd27cuXN5ddU0i7HIxbBqq4qVZXv9yeMb+e7Hf+8Hf3Hz9qkKjn2X3ZByLDB658hoNj8aY0YbmeH4ZEaMOYkajTkT8nxxUiTW1dyAzdeAbkwjAiwDCdDVqP2olk3jQFCbn82ObsLCOY1mxcDQkzPIqaiUcrXe9unwKDvfMKOjZpDBCGPpoqprHDHHoWgkLb0DK51QTJ/4yKcCKosNhP/gX/3Lk6Pl0vE2jkcnN590V6NyU5nHrJaR0AcEyuO4USlpjMy9csW+2fZrH6Ctq6ZyEvN6syXmBdL19fj8jRsL768uL7onD6mducCgmoZOIaskBGv9mcoWwDEeADmTLqZNLl0CqKF1NpS8AuAiSUoSZdBKNapE1S6VUdCBXgZXOxiEToHVwFQSaAQoKqNKRnKApMZIHqcbAYuUaKXkdKFiJr0WAbW+3xrXwTXeH4tG9t5zyDHvCTvYduNnXnx++/xwfxXvj5f/1r/5s9/+3e/M6/B3/sPf+M/+09//q1/80je/99U3Lq9ee/WGuIOwPKDtyrnw8P1rt7hz4/jklz93O64vT16oan/rjdf//O0nj1dDWZQQmpfPH/44BfqLr37/Y6/83OL4FWqfv+h+8vrbP3n03gef/pmPj7G6cVz+6J/99sPz/nH3YKYH23H43k/+yNv8pZfu/KP/+k9+40t/7/CE33n89m/c+PTh+bbVH/3+7/yOXuV//Z13XvzUx9zBUbXaPPnBnw358gffexd5mNd1GYcbJ7ek9zVV5fH2yTYuQxWTxTFzVbvq1NQ5KhLXtTFnUvHL2cl1NZCrkHOlvra5C/j87eOnTx6Oq+texpgSq85nzRj78/uPLeBR3Z4/eZJFC2fvOStpHINrdNPX5m+e3cXkwdLJ4mD75Il2W9Axj/GpXopoIXACrIZOPE43G905uwENobCYY3CGAiXFkezwzq1utSLnjAq55fvX2p48N6z7x+suUV375vp6XRqb3zh4vL360Z99pb2ILQjFKJ1brUZJND89/uZ3fuTrG/fulkPdfPnXPweWihWSiYy23Sxkt3E1zUXRYOKGpu1GnTYHdrNIADNFZJti5Cb8nYAYg/FnX3tRcn9yWN85Of6n/+obf+0LLznO3uv7V9vDkbounZwtb9yq3n3n4eW33/3Fn/vs/ffLWI7ms9M3Xr9+5eXq/XefHhzO7t1YdNvtO+9dtj7ffS68+973rIbQHoXgL9YXB9ySw+v15vbpzevukWkO7OMwomEeclPPiWy5WHSxE6w3Mc2xmnHwTAyOIdkoCpnms3q/OlAzFSWGFIcUkzmvpgBGjqWIqgJ6dAGdI8c+uJhiHVxT8zhul4c1M3bduBoHrFzXbdMqgTKHGQd3Pa7GFEOoBHJOYqoxJ9SMznHl0pBcFdBACLbd9dE85AIqJgDNoprPw9PVRb/emuLCz4ApMabYj2MGLFLKGPu2qcZUomjjnFlVVLMVYkDklAXAvGMHSI0jNIfgmQxh3jRRARQrHyRlRWFyhoRkBawot9V8061zSYhQUgIwJkzFHLOhecaSiycEojpUMtUxOASDqgoAMKZSioaKAXEcYlV7IkbnDJARchq5YkeVmAFg5YIBqWnl6yTDlIaQc0aiLLl2zorENGRJ0VLBglWzllEZzcBpQhMrnoDR4ec/9skXDm88fPRwebCYtWEYcx9T3dZdN6QiojGVlHJUgGHsx9inGEuOwETOM7KqEnMqSt45xwAFVAm0lMRMxKRKYkjkhzG++975Ky+cAbBra/RtGhN6MZWr7vxgcQbeeWVVYkWvWFWVjOnt774zpoxGdVUNsb+8/uDW2cvt4iYfzj/44CGk7QEzko/GEkI2reswpm1lcnZ2sFldVZ5SiRakcRWZVHWTwB4+PbcklXJwVXC1aq5CZVZi35OJqXj2qmUYt03VgrdcgBmc4wyQUyLGUO+yio5uHkpOw6bPI5iwV9hupcexmvf1jRqxBHMzZGyaKyvNwZxmlMwEeOxFFUSsJBj6IfWSY2QEcKBVtW3rLAI8mqHHyplHRyLFEKqmdgGr2cxX89aTt2rYsGgJ7hBnhzhfgBLKph/Gdd5qaFj6w9oOSv/y8x/79Cd/9aWP/zq7hYzZysAz46WMZQ2mLlTsuKhkVCZpD2aaUYoqWinJETiyUtZFOlctmtliO4zofLddB0egBQH9fB7aKpYxi7ZVG3yIQ/Temwmodf2KEG/eO4vaqeS6npUsIlLVlatrTRnQhr4n55xzLtToggE402Z5+85zUbvUzLY/ej/FzWWROunCap4vll7BVEyR3KHDQCYkNUdcLhclX/tgJldV0FCVbv3BEK+yP6NcVLq+2/Ra3PJArnpQLWU0e+pQPTrmkBzGlJzjqvEpKpqRGZKxo6mdRk0BCIHN1IpiYDCb3GSEBCQIKmom4JjUSuWcqZCa92xI5ECLGnNM5pwjj1ODURoGIlZARCqmYTJVAUkxTZKyaNGUyix4yWWzyopmxO3isApN1MiIYqAIMecK0SE5ARIsg9BBzZV3oBVaXq1qImwXiETBgU3rGwDEolyy9BbBIayu0/lDF2PVLlJbW0uSJcdYtmNVSPvh8aPhmayzX6fvlaCdXQd+WpjZO7Y+VHX2UO4zcAd2tM5eRZoUIsRnvrY9+7OThKY/tp8SpvbrUHjmNtpTT7gzgAPiPksIdmFGkyQBO4Mb7oCWCSjaiUP2l+ijnXiwP6T9+z2zZZg9Cy0iRDUTs2RaQAn14r2fvP3tr4+XFy7J2cnpoj2Atjk6O6ubxenNo9nhiYll0RTLfLnI1q+uL8HcrdMzAfYhuMqH2Yy5PD0/X19c1+iaw9nB4by73iB7Q2vmrXNecvaeYx6dQyZix+xmKeV33n730eXTfrOpHV33Dz/2/C1zPEbpx6wC0A0m0x6YlZx98MBkjEBYioa6AgQQZeeQNAsAQBySoQXnwYwBun5Mw9gPaTmfVd7XwbEP3rvKsa/AV5D6MW+HVT8C2fWWmDkVM0RDMptiwT6UEafvdjcCJmuXAZoi8K4P3naQzs4nOE3EJmljf1r2YMtu3Y26154A1UxyLkaKIjoxM6qquwFMhEBm08p9yhxC/nCkTwrQbgt5clAx8wQioZmCEtAOR5qAKQADAmIAICTVKSR7L2farrt+0mMMQBWYuEiczEGIZEiqSmYK4JwzyUaokqdSeQBTE93JbkDEpkLsdtoQIE7UDyMaKqhjziUjMk38Du90z6JlZ/ebCCdFNWEkoOn+hqpiINOXD7C3ze2/iqnK3bQYTfwfmYmhTN+bqO4KxYikZCScWtiQeKo/m6giIhLRn5acp68RCIgZBAjQARKgD1wYBw0/ee8pYaU20C7FbPL+qyNnKkTESCIFCMmMkFVlp94RGSgTmxbZmcWKFgFTZp7gLCAFQUJSVVUlx2pKCBMBaQCiCqaEPHVXEjGA2hRBBQbAhgo4JU+jTh9TCwLS9H2qGjiA6fvZ01GqE+Uw+SsVFNEQrcg4nU5CFjVEBQSRDAQGOFFLQDrdjqYDZmZkUqOpX89EAGBquzdTICTPZmqIzE4VjUhKAoAdaZUyTePBQEFFxHaNhZN6C39JKiIiLQUcESH6UDQrgykoUmjmOa1j0kFKn6WgZ1c5pZqgK7kUHYqOEYxEAYZNHwJZVoTiEBsHgKqpx9yPcTPEgQEkC6o44JKgCA4Fx20OriayxYz72AvVDiwK5spVroppiMNWcgKAMvSCDCY5lnXejI5yFsNKEzir66Zx87qtForn6zzY6AoZzxoAEXQSZx956ZUXbh6ByXw2216NJSk6D9Ye333xxr2zH/3kvbadnz++WjbLe7dPu36YtQe/9eu/+jh3UCO6CsSGGAUlC/TjtqpqBESKhqU9bNu2LVl8EQKskqqC94zgcwSuK9ccGGjEa1csd5ttj9ttXQzX15CLnBzduvvCx1GKKrFs+tWTUYppe7g86fp1P1wo47bvci5NU4f5DCxlG4ArlREAGX3EVM9mTkkNlGTM+bDA3DVQBmqrZXPwm1/8zW+8+eZ33nqYBPiD85QSOtuAHB42dYVKXAAUJAMET4BSIBpBjhF56IfYb8E7V3JcNG3KPMj48efOumEYYlcsD9sHzfIjOa4NvZrkMhoYUVBtQHqEggWALOVoWKINuRSXW0crgJLSljERBlNX5DpLbLwOcR3TylMV6Ih5iVhhBkkrlXOzXssAYGJmxuAC+RmCIXiViS1UIARSs6xacuzLOAApE+U8cByq2pCCghNjYEdupxW99srNGte/+osv/P1/8KfLs9m3v/Lj2tfzI/8vfvuff/rlO+fvfO3nP3r2c7/yuW9+7ZtP3rs+Ojt66bOvpevzn9y/Olze++wXfkneen95Y/H1r/zO//xv/dLvffX105d+dnH7+XuvHc+X7l+9/+PF8jmq74WZu/HS0T//J98YVj969WduH56OH3l5sfBn1xc/+t4b6/6ynBzfvOob71393DzgyeBP/p2/93d5OCjYvXf53n/8//i/fu6TH3/ttU9dXH1wcbX52M++msldbe173//6Ky8sm+US9fL6cvvoitvF4uzO2eX7m+ur2ITFR17+xOUH77lQcc2BasQcu+uj49OD2Z3Hw6UmqPwpXM30kqrbi8996va33nrw6PyB46phIkDv8WB2cP5kJZwusjRtLc6tt4My1/NFVQXnUYd+2fjgofFetw7W4jePSRiLrNP93HU6iKu8qQJiQDMxVCYgjYoO1IohGRo6p8hCMDhAMsJSkiB7nrUrD7BsRqDZzeNxkPVa7h4dcA2rZAcnL5KrZ34ghHp+gqtH4yrSMLYVVajbp93R6fPX59fOVd1YqIzF0osvH6zWq+Ws8QTgbF9psZsr2mTV3s0tCT/c1yTT/cbk1M4OAKY4/UgS5DJtYGEV3P13L06PZn/yhz8IBx/dnFfL5sTpRfB0uMCzeXtYh16sdS5dLs6fYmL94z9/95c+/7JFQ+vdx9pPf/LFJHp60LzVbWpfvfryK//5j/+/H/nYC12+ev+t95A9UFKznMemqVTQs1OQ4D2Qk2gyGHOpF812XHktR/MT1s2yYUuDx4oRHXMzb45vnPzgB98FTfvfAhYTzz5i2dPgaGpTgJ4pAAEykYEMpYwKyFLUoxusWa2v0fIQu+XyyPswdtkKjH0B7/wixIfZAwCiIyw2Evp+3MzatglNyTlnVY1t3WLlb9589eTw8Idf/wsT2sYUZlU3bBm1pNyEehg79UbBr+JaDJuqYjWfA4GN41jQwHuHNvQDO6dGBphVDltfMTfBpyGnnIuAEXUx+yJVM6tDcN4RARkUk5JzE5zkIgB9GopIFcKEdTt2zvHVdptKYUcxlVA5M3BIRihqpci0EZ2L6IRBAsZSfMZqMrqDIUGRjEYuBIOJZbBi2WsFQEAUizjnFExzRCYBcT4YGCPFIQIBoEtFs0pmAqxJspgQ8KJd5iF//Parr9y4+977TxbtvHIzE4hDVITRNGaNJedSigACD9t1v+1zTFqSlERm3ldgqqih8os5OSWPDCDOIWMTtQf0CMAEXRJEH8hv193FRXf7uco3ZWNeHKckKG5ZnYLhOA6gABkMYNNFDp4M+i6vh/H47vzp5dOD5eL24WkFNeecB//CjU9dnT+F4bqdnwaPZRxPTk4fnz9OJoau79b1vDZRBV0etqvLc4tUEjVulmIh8lqgZEMsRSJSk0qqQ6UFsxVLwOA8t6gVmCaJQ86OuGgBMOd4HPNOKjpZbK5WDuqUpd+U1SbruujW2XgdrtjqodE0iyFvUEq1LZQdwNxTjalPaSOlh/VVjFF1G1NUQ63a0CHOD5YAOG77sulLAhWdamJUMYkFx03tlRi9Ez2Suh7i02p5OyzOUhpIYo59NvXzg3EYDqtyVusL9eknP/+bL37sy2YLZKwqEfWnz7NqYSV2znk202rm6qrKPQSiUTtXN8SONQfnsIznF+fL+bF5P0iazavt5aYKGBz0nXTb7ujWK2FelWGoOZBxHBIi1SGYZTWJ3ebG8Q0jA9UQKjMrcajrAMBljGUcFKGqXQjB+XmK0m3W/bAeYvSz9uZrH2VN8u0342z0fQlVfZX76IIIkg8KklLnybv2wOWo6wFiTuGwXpyNRtgs+/h2KaNDK/kKqV8uTnNKyUBHQn9iYba9WiFtc0mz2rOWIaOr5gpZs5AU70md4wmaBNRdM8EUiEFKmFnbqkqqmySFObN1fTSEBEKIjsEhOgL0JGoQAIOHShuDMU5NkSAIk72tmtVqAGJUTAwocBZhpromMek3WxQmQld5BfMOyZGv65wHAFApMRfyjGoB0EuyKNwXLIjekWDqeq8lx0SZDNrQnoFHYVbN2cxATYXRE6qKWIJ6uy5X1xW7el6lQEKghFpMU+nPL6WUvF/F4a6y/JnRx37aPPZsrUf7cGl4FkE0hfdOaVW2t3HsUv72/8CferG9CrXbb/9QdtqDIriDT3ZrWHvWzGZ7gGgvCX34etNiaC9HAQBOT97/rO0SBnXype0XcX/pL0OgZ2ATIIAxmk2N5qaTHRiJy3b1vT/9nzbvvelimvvq1q0bZ/ee94vj26+9WrtQih2eHHbb7dzVi7YeQNMwjMPmdHmPXCjaDXEAw0bg8vKxyHYxO5hxm3P06lYXK+dIU/au6YeR5lQ3DKwkVNW1kRtiefvH77379hv90IGr22UYJN69sTg6mmcjEWUXJu1ORRUB1KaFes4Kkr13TJ7YqQg5JGZ2jjS5wMtmgQgaCxHMF7U5Xl+vVHS2aJ3DlJN3FSFojqVkyQUwPLpIm+i2fepSVggIeWKz0GwiHCakyHbBOUUtmxawDIYMfhI3ZWd63PkcdYr4MdQpJ9rAQCcOHCZtVxURQXfDaeKOJmJj6jsHJkCULAigE2AGO4FmqlRDMFUFQEAyk2kQqe5jrRFxCqzY5wzwLlkJTMoEm+OHPradgckAQKeoK6A9erRTx9CmAcfk1EynIDOYBAIHJiqZaI87AU/JS0hIhDplbyHjLv16km70me6GyCZFkADZDFUMcdfuRsQELKpAZmrMiIyqjACTWW/vuAREnHrudSdE7S62iUSahpOhEREqlZIJiaY2ejVEJzJthE0MGNJUJ4emJU9Hbyo2udV0ovf25wwBkTx7JnCOkMA5VPabfqiaWro4XbhSMhMzMQB470vJSqZq00ETuQmLVt1xWwZoMt11bKohIyIDlFKQgHgX2G2AxG4XRTRxUzsrIu1r7lC0TPZJVXPOAUAp6vxEOQGqTUlCk0BYRIiJ2YsoMhGwqUyI4nQ0CMjEqiKlsGMzJWScTAkTxGSyc8wpIKJqYceqaGpT1jvRjh5CZDRQyQDo2E1mQzQrOToX9uIUoKloZvbTDQHNmHiXoaSKqEjGwCqiYMgf3uV3UlHKicxyASyFALOlMQ2gmTApWa/jNl33sko6tK5qqpbQPekGnIoOAbPAGBOpQszjeLm1cXE8U1RjS6l0Yx7Hcduvu25bhypgaMIBEY7DFoECUYZ67Ag1WRkxMIqm9bZy3Mche84lr4dN8DN2AXyJaYoFMxe8oTh2k9rvHNeNtDNDd510qH2jObEBKgabOT788pf+HVdsu7XAbe7JuRpI/WG7WUF+Smh09/jgenXZZ6hpdn65qepZ1/We13/3N/5nr7///lfeeqtehrhaeZC2UfYBHJsqOAeqDj0oGteat4zUtI0I5rxhwsDOIAf0Oee5CwqxL9CvpV8JKwEjeZ6HZukpG5hEM/HErq6b5oCpMjOyfhhWEqUIR4hjKc4XV2fG7JjA+RC8OY+VH1YdoUPNy4o+ffeeI6HgFJBB7h0unywXzUvu6nKz7YerGLWoIlxdDlm0rpgIQuNy2QBo2wR0COhkUOep5OLJFXHDkCSlKlQlpu5yUOLHHzwZytXcyvz0TLHKVsg70WSq3hmRqc8GoDjkrOQb549tbNqqMqoVQykjQBa9Nmu9a02FuTYY6/rUuwPmQFQbBpWSZUVYVJOULZHmVDg0iB4pMNVMAXmm5oidWSE00UhuDikpmNqQh5WkwqaKzkLj6wP285QVgUF2Pwn3333y9uPVxz/xqS//8itca3f5sGrt5nP3Gnvpzfc2X/4bv/S1P/69y+220XT5/sMt87Bep/Xl2dkLP3lv9ed/+K1f+8ynfvSj7y1PX+yr/MOffOsLv3zvyab78z/55vPP39zG9NrJa2l8+o3Xv/WVP/n9ef3SR26+7Lv3uyebj/7Gvd/7/e++8YPvfu4XPhu7B9un758cvaCs/+b/4tf++//qf4xQXT558JMf/v6Xf/XF1fWSfvDw+r0fPnzLbp0c56vVcx8/e+Otpzaz/93/8d/97/7h//Nv/7X/5R989bc/+4mP/7f/+Z+ePL/87o/fuHlyU8ZkheLlJWbRqEGh2648Og4hol6mgdvqcNmo+LX07a1le1x/MKzDcUMMvm7ffvL4ycXl/GCRx+HJcDVr67GURX3SLg8PcpovW+2HYbV1qWweXtowssowpAYdGcfcgSiRGQojmbMCQh4JIGVhYmQkYPSEKFZEDaetHplwBTCzUntEqtX5zvD22c2/+O53W9/iTOfsDufzJoRVSdyP1mcOaRmodtSfv9uCVbgcChI3SeuLy/NPnNWPrh8jtWF5bIT377976Nrj0+XFenzh4MibI55uuLi7pe6qMxQIAKaKh2cQO6mqEZooAIkIEhBN+1wkaiBYcsmmpyeLWfCf+dzP/Lf/w4+yHv+//tFX/qO/+2kwOJrNi0jXDd9++4PPfPSji/qgax++9Dwe3Tj84s+13/reB6+9dIYEJhi341ur9bDN9+7d+If/4F+d3Lh7/8F7Wz2fVZTGcVkdeqoBcxEbcpdzmjcNACcd0VGCgZ2Rr6vGzavFZli1wWofopKUkvvctO1mU771+g+Gbjxc7PIpAIEDGxsHRkc67R5NzbFmiEhgRSTUTSxP2LsqNEllcbBAtDJsvQPSKg2bftBhGBtPhwczKfL2/QdFjBFyykKl8s5UWe3y6mLWzqrgfRXYfDNruhzv3L2zvbyI4zBzR32MqKAxZ41oaFZ88Jf9owN3uqgWA0rKAxo55JSGOjgUyONoCME5dj6rsvMLH45qDyKeSAmEYYgKBIzERIxYOw87M72NY8wxIpmUkg2utpuDdpZSZlNmLqpdP44pAaBDBrIUs01lq+odEQI45xhB1FLOogZEgOQcV8GnVKQUVDUpChUiOWZmMtOaAiMrwZCiV5dzdt4hGGMwFTBhgu3QJ8mjZGGLY1/U/PQYmtpDQuzTK3fuffSl5588fSJRTp879RXHPJDDUmS7jTlnRROzsR+K5NQNBGqqdV1pEseMYkS8ODmKeV0TjH3P7JCCaEZTE1SdlmW5bmsQNrO2mm1WI9rjOy/TojomdNvrseXgKatIqOpxiFJKtJJy8UZtCGY4n9dpiCeHBz4QOlfNvI7F5zblxIY3bhwrRhBBhmHYeOd91Q5Dj0UPwsFarlOJ6XIc89D4Vsacx3FMeljXZexNVGXaMbRccuvmCKOatrPZarOtfFNSklQQwTvGojIJRruaWwCAJw+fslpJkoc8drkUTTmTsSTbPInioa2q617Xay1GvhIPbrPqbVUoGhTUzqAfqfA4jDkJORg3o1MY339izmkuLCBJVASr3TYh5hwMvJaCJQ3auGbB83pWRMcxrZAsBJ9VmZvgoWF3O9BzC/r42S/cvvVFspZydsap2yCAD7U58sEhYh4jsnd1MEMgHlJPDhUxpuyRy1iGTRfCvEhRigTardaoBURzhhTT7PAWN3VfRgJom1pFkcBVCFbymLptd3x0Ro7EsnNEjLGPoa7JBUMtYwSmEIIPTEalj9v1atheo6P5vO3Hvr/ebh5txyfdc/V85hInzTHJCCmYaHAUoCCQiRFQi23lXS8lX26KhRnnOZLB0M2sr6uVyKZIyKDetwsO3bZP/hiP7pTtI3aZYI04Lg+XOcchb/IUNkHA5AKTZBMxAxO1KWhIi/iKK+/NjIgcQdaiYigqioxUBSYzU8sizpExFUZo8Oy5gyXEiycx9W4csaj6ikwKg6lBSZZyIWAU8YQmImMGw0NfJVVByDktjubeMzhmR4SokqdeZ5WiKRloQZOUJYljz0R9d1nVdSDuLtYOHHFDlgRdYZAyOSBYyakJaiFyEHtYXwXzAFUSMMSxlHEYFsUslSKYx1j2poPd2neP2OzEmH06zE8rKvDTzq6dZLT3rOGzhfQO27G9P2yHH+3zh3b9ZrALdNnLT9OfP8tXNQTYuYMmgGQ6sqlhzXb4L8KHsNKHqdO7fY/p/+o++Ghf6IB7WQpw73bbGUV26/z9w6ZYbgMARgJ+9OO3vvtnv5uu1ou6XiyXN+/evvf8S6sBFjduO6g8usXCs8KNo5OAEOMGMauUu7efv1qtL64fnx4ulssloq3OL5goBOcI/Nn89s1X3vzRT4AAGUVLTqVqg1pRdU0VMPjLy+vvfv+Hj54+LGN0ZMzsFJ48uHzywQfP/8YvpBhdqJt5A+S1GHtC5JQSOzaaeCyrHA9jZHYiOm16qZpkoTqICiGzI79wCAoGOeeD+cKg5GIpFmaPhikLiK5WeR31g+vu4XmuKue8yznphJbtIDRCdmBg+mEatGoRyapZ1KFRUZs6akB3J0xUdwoQgOHOcwYTDbPLm9Jp7E2dU7gfIGqqZiJTZbhO5erwzOYGuB9peyhuGorPtE/YmxwBYKKKaNd3jkQihZ7V+e2yqwCJVSddxSa9ZqoehylherpudlLOTu1SBZ4W6EZ72xuqFjGZfFuTVUms4I7fAWYGFVUBUABTmcgUngQAZhJR0bxz0iEhokomdmi2CyQimAiiXV/YVISFaABTotpkUhMtTKwGO5GCyExUhXiP1SCqqJKB4eRSNUQAN4lLqkoT0VPKFAJkoJPXfl/rMYm6RuxU8x7jNzBQ223FIipocs5ZkRfuPWeaEHYNd8TT94+qsj9vxo5NwcB0B/4AmCExg4oUJkZiBCDyqlJEiQl3jZWgIqriXJhys4HIZEpTn+LeAQFFhIimPCYVRZpOijnvAUykACgyT4ozEavIDluceC1EEQEwI9xtvtKuD2669RkA4I5KM1IzIUITNMSp4K9IYSbbOfhYtewgRySRMnFGE1ikCIAmkokDoldABFBTBFRV5ysAUxVmJkLJMlWimZqYIdFkkoBJUPz/M6BlGYNRitEjkANN23G4cCiIo7FshyejXpHb+LxuzMPAbO3JvFqttU+5aC0FTdAhG5aYI+A4rLv2oNaxt2ze8fXYj72wNaVTHzyHOWluq7lK6bajDGAJzDWrTd8uqHHKpAQ6dnE0FBAmk7HLAlaE3DIVTTuHoraLKrByPVSNP1jWJV6zryQpAUjuNVHQg+7c/fIv/9aN41t5vD46PgkuMGlTN+vNNYjO/NwObq2e/mQxb5enR+++/9SHIHmE2gc/y0NE6p5fnv3Wzx3Nbx9+9c/++N33emT0xMakGmmBbVvFPtb1jImyFDJ1bNEEPVahqdxcSiGnEsd+zKKl72WMAEK5QC66PKiXpxzzuQ9VSWMfr9Sl0BxQ04amXlaE1wNQyWUoiSSbmztjQUfRtmbkqYp5W9Wtks6PWtFFXMdDzi/duGU9VKFCxGHMNw7nNw4Xj667MGtOZi066sbROcq5TPW1WIQQvQ91w0SyXMycYzyoSsnsQr8dSO35WydzR2UcXnnt1TaS9fHmzZvbromjzNAPxOAWjgJJKBzRhqSX3pEWVBiCW5BVJM3cH6PNDHGMY0VEMCupGEmRq0BSzCE0rjrG4AF1uoBVBpONQcoyAqioAz8ztyR34P0ceUYAht4hFxGmwMiIrL5xWKwMqNvcXZnkMiSNkURhdgCaqjAHP0femZJf+eTPPOV3kLdn1fbjP/PSP/3H779/vv3Sr/zK+Ji//cPffnDjQfdAoLr8uY/eefvNLl5Gp/jm/cc1HVC2B29+l3/+E3fvHv/Zt37wL37v9w5C8+jNb4/jhss4nl9+7vmPvPHN/6FoXCyPx0Ff/fhH//0v/xu//y//i5/77M9846vf7Kwd3PJH724ePRp9Bf/2b/3VD9765nf/5T/j63UpFA5Pfu7TR+8//EYZnt6+4Z68u6lOXvrEZ3/rc9T9l//N/31xctC69Hv/5Hdvt7f+s//0v76ut9uRfuGzX/z+dx9YPNyugudCRcfL8y4OHpujxel2GKrKEyB5l1OC4MrSb0Yd1v3ZyeElEDksChfn521FJuKXhyE0TvDlO4e+lCfnb8H1m11KuaStCBnAWFjNmzlABmJxU9eDqfG0D0hQwMwTMGUDVa3qxgFqQQUmZjFDZMd+yg0k8KAQhEQtsB+VCni09vG7TylqjlsMs8PjxcPr9c32bEBrEIPTse8cm1TVYk5x022ulIRL0UvtFmdHQ+7ao7bkLGPUeLmp3/vq11dvXz1tmvZvfeYXb87mla+QhNAMCQl23QdGaGQqCsbTppMWACPDaQdEVYFcyYl9kKLkBNnnMqqAd3y93XzwOL56++Bv/42Xf/Le0C5/9uH51e3TE1Go5jWLkJ+/f2n/+Pf/+D/4dz9TMH/2ztHmenV6FBSK900u8MHT7cnxoqh9/WtPvvvd88Xx/KLfbrrNbLZoqoPUd2YKGpvgyfg6YTEATSn3db2oQjg5OLu+umybmXgX0LCUNPQpjRyYQGvffOfRj9tQ3bn7wumNOfzxHwOATXtxWaxIiYnqmUkBg139Kzkgc+x8FRyiOR62q4N27mSIq6eL2imUxtujJ4+qahbQ2LTvYtPUNQdzNk1mTB053m46x647Hz3O05gJpK5c0sIVt63fPsl1qAEUPceUA9nRwbLvIzMmkXlzE5IuZrNi1x7rzToZatM2uevI0AXKRRE5ZgHj07Y9mIUKSko5jnmzGaMJEbNjb6ZgWgwNkDj2sQ7ekIQQEOu6kTFllfPr6+Wi1iTi+Hy1QQJiDs4RYSlQew9FtYhzvpi4Kd/JBKy0VTAoxXA5awDUEJ3zIRgiViEAE4JpyTp1FBsWETJeVAdj2hIYqk5hIWJFsmLwvQ3iTZIO/YAqFTpMUiRZ5UPVaC63jucfffXuxeoiZqnms/lifrl5mmLvOEiMaiJkKZWYc9Gcy4hgkguCePIzNwcT56GqgpGgCwGMqjkIKqBjAgNnHsgpmWFBz96FEs15V0QuL3twT27cKgftoTvwuTAUJSNmhzbmnFzVeMYsScBC6wWhS0N3EW/eOOQCScvosOLgvfFoRbdjXGfFZtasuwfL5UeoakpxgHa+GtvlocE6bq88I7s0O6r77cY39fnm8REvOATnAmIZSwyBx7Ihiyo6jps6UOPdOkVwjgDZFNWq4EVyjmVPI8DV5TqAGzZjEjAmoQJsKLkSFmEduLiq6wczrCtoFtjWOZfROTY105ByLqmkFBVzaF0cEgzIBmWIYlZ7X4oioSdikuBwNgvOpSVBNfZNWxl7Eojd9aBrbMY6aFWFmId6ganLbLKoYaay4OPXfvY3j09e0DQACgF6r4iurpyoskNVc44oNIw16CC5Q9EwW2RVF1iyaCppHdvlgfOAFZcxYuorchza8/OLenE0P7mZoGhKHgKoSR5c5YgoDbEf+mC+ciFrMlV2ztQIkb0fhhFRAKlualAxpW039FeXoHx4etM1fNFt+s149c6Dy/vnN2cnd1+80zs8vOhnT6/e6eXSyiZB0zbo5iUNJF3WHhEdo8bRaw2JYPNEdeX9EXl/vZWwPIUhRFHSY4FDH6wTcg21fAj5uknvY3EmPCqIURE0QEUgEN1lhQAjpaJTTzGigVqOxTsMzKAqsZSoOeGuSYascqw4Yc3eOy4ees/Abr70iLw9L6Fi5oqdIXhIud+OJRCrM2MRdQYlaV+KA2ZmUwyVrxdVaOqiFpp5VwphDiFwscrJOPSaSorinCMktQxccCrTzjKWqKlkzFZrdKMAkYZghaMJ4IgCKKKJXFDoScciRlyZqKhkKYwkklBKGgVEh5j3OhHul8xTAxLsF9t7BWhak+5JItjrK9OSeKchwTPhZ7c63yEKz/SZn1qi7x6LezsaPLsicecH3ys6OGX5Tv9nzxBNEXJ7BuLZm0/GcNvpXX/J/bTHjz6sXcOdPIU7qQN3QU0f5hdODNqUX9Svtz/886/Fq3R6cPPW3Ttnt28dnZwWcfc+cnp4tAgqTXCBoVt36023WV+37ZxBQqDL66uCenZ0AArrq41IZnKOCcByMSvpzR+/o1FnBzPf8Aidr/y8bbzzRvjWOw++8903Lp4+ZE+uBghwerLoN/1P3nnz6996/fkbJ5KVeQouARAVNUhmqARgRSbtJuc8xqFq25QG50Psh6qutGhgJBBfB2YvEwQBjAjeAaJu15GM2AUpJUkax568h+bk/sMP+mwuIJBljUo6xbcTIhGoGO/qziaX0wTamH04qHYOQjQxE8CfzlNX+lBexA81xwmB2UdQ4U5FMt2btXan1SY4hqb3IGQi0km1sclwukeYpriB6eHPxCAAIpier5MHbg+mwcSjTXzTjloxpGdN84a7OKz9hTHlA+29V5PpbkoUAlQtZfKLETIaqpRJCCUkJJJSANB04lEQEB2xmsCOYEFCgp1I4RCs5OK9My3TF8ZIgGCihorIU8i1FNkVu6mw87skYwNTw8leZ5NsQ6ZqYLhz7amBmk7pUwo45S2aSEZ0zrEZgRGYlJyImHZx2mCyM6FNLXUAZJoVC0xYIu6UHXTAzITk0AjIeRo2m9tHtxnJpnMGu8PYdcepAaKpESMSShGVzM4hAE3ooAIZqoiVzM4R8U6Zkkm9AkBQACZve56Ip7J6QBNQnWqQJpsiMnEpmYlVQUoGBGI3RXqDARFNWhnuDLakplp0wjN3NzfZ5URNBj0iflbYKKoEKCZTfjkZIToxNRUD5N1CFaUIMcCUPg64GyQTtYmYcyYyZsdEBkjgwUw1h1CBGiLsOmpUEEnLRDlNH5PVwETRGRGZkeQyVT99KBXFYa3miH3JIwxbssFjFhm4UsAcWEQGlZWkK3MUVdjPARoT7bc98VKym7Unh7P5dhPX3ZWvMDS1gSEZgI4xF2PRMGOfUwas+2yzunbk4ritqqpqVJmRcB58qDWXEdBEMI8dhSZrYZS2ag2wgC7bIADVsiZPtQfkAii84K2USyKrXIkDJWkZgXjRLqQ/evmVT909PVtvnh7N6/miTkmAMVkxkJwVchmu18swF92uBqXZMkoah+vZ8YGfNU6oasjGcpjTvIcv/eyXx0/9NdcsfvTGD9999LAbn2zzNRcD15RogLaYHQMIUqlAY4fFSFCNYBh7YKjnvl+NWqzxbKzDaOgxZ42xj6trXwcpsVk0RTA4YRiyWi4DgszmVRxTHjCV4tEQOYTWTJBxSNuiEYqxeeYKzd84Onj14DBIGEtKkpGRHYOaZt4MBK4qKbrQVEalJFXzHhGcKsakwyjrbSbC9apzoKWkUFXVrCWA2KVtnxrHtw7quqRZ1SzPjripDuo7xtV228vsEB2pKrEHzUlKLAlAS5FsQ8WzmioRDxyQTDMwiZVBYSy2ybRSGVh8WzXEc6OA4BGQdQQTsAIkwMhUsWsQW4RWoXZugRyQKsdcTNCMEUxKsTx1WIMAiGeoQmiiSsm9c9Z3H5hebbdPm9mpW9xbHB9PV8Hji7598d7P/cbP/8k//d0HTy1VR3Jy+7/7ix/92itf/t/8vf/LVf8BHX3hO+98TXDrZpofXF9c0cm9F9Hx7AC6Mf/h1//EB440g6pe3nbbLg2FgU7Pbnzyj/7gn3/iE689evhgLM27q83/+rf+5v/5P/4/ufL25z97D9jfO5q5j796vYb2hfn1oN94/aFcXl4+vGaoXvrZ586ffgBD+uG3vpsHd+eFk6fdVQ9X6Q//5c+89Nwv/upvGK7/8Ct/Ub1w6JbhpddeWS/6fH19VXJcuPqgyjlRSJ965XMf/PBicXAmkjNm8wqBfNNcX60V8XqMbQwKrjlZ9I5z1puLdnz8pBo76rQMqeYWxnV/eY4xdmNszVCt8t5AQNQ7rwLTLdUBmgpOMW4IpRjZtIGCxK7sthrBGbExAYJKkWTAhoqmhIYmamg8Rfwzo6ViUWAxb4OnB915mKHjSrjtRgWwd994qz5eHh/XUFIZYXaw3AquStymITAHpapqHsbzF59/7eK99/tYQuWfXKycr5D56MguNu9JV//9f/zWp1997e986a95IjFQEeJp85H6WFAlVAhoWcQxGSIw7No8CMxQi7rAXUq/8/UfHhzRc/O7Lzy3aCoogxTg/+S//+r/6m9+8aNH5JbjrTuLYVh+6933Pvvyy3GQBw/O6+A20v3t/+AXFgusfNWQDV5ePjv9va9/uzm86bP7xc88d911N2+Fr95/46IMH7z/vuDg6pkI1Rh8o+v1NZCxd0PsKnYoSMie/MxXUYbN6roOTT+MFMU5V1E1xozkF8uZDt1mu7p745B8zX7qzgAAYOKcMzsCRAVzjAI09cWCFAMFUVMduowUkNiFOpaSt2XV54LDbO6XJ6ebIQVPocmOKK+2Z2cHb73+fgW1aqmCzyljgeBrx3yWCNiv+43mguSqBg6Wi9T150+ezhaz1ZNtHkrd1sgAyExMJDEXdk1VNTH2psUUTFXFkmYzSzkiOzMaTRezZs7+bDFTicOQgGidSicmoIERTSp2OVm0mCRoIlc557iqK3LOAbaM21g0K4E59uvNVgmSiCPPgLXz267zznnnR02hrsciMN2FiFLKokZozgCLODWRTMFlUDMlJkUgMC3F+Qpg4tJYrRA6EVVTds6QNKeSe+cNCJ4OV0/iChwXLUYgYt6z5oGYKm6ouHunp68+9/wwjv1qrJv27r3bsXTb7cqxH0rKKs5zHGIcx6HbaBnGLpYoqBhCyGMqCTy5edt4BpFsvumGnjAgQ5bITITonEuanJ+bQcnZwByhiRYBz9XVdT+WzfPPiatPB3RqXlPGkpmpbhtDAGIT16VSOe+QUbMjjJ2J5i6NK9Nl29w7Pm5s8fTxej0mcXVTHR0faZ8Hl0yFg/NX26vbt04/OH8kSZwHEalrZ7gOHqgJNrp1lxdtXTtf0pqQ6upwjJhtQGPIthrWAOworLabKjgtWbJM9H/aL5LHUYeUU5/FcS+STZkwA5gW73FBVcnZKjZTcuJbZw06aAgpSemiZSQ/C4WjoRplZ8DKRQQV/URFq4W2DsFlHdgH56zyXsR1vdQoZmsUULUcDeIGJGKSUCNnQ8ymMOfmuDp47dVfO731KckFIaGlcSzeoQskaIqkuSior+pQ12UcUtz6UKOSKEwgpEoeu3WYtdQEwQxFTMUxlaTr1QW7ZnHrViIbu8GTJ+JhHOo2OE+G1g2bqqqr1keJZsrOITuV4ioeupUCVfNmWpehaBy6rjtvj2bOzwu7y75Larnru/fePgr43GvP3Xzt3qqYe7hatGRvPrlan1O9yGYh1G1VxxgdeQCt6la9X6+6gGgFmGZiB11u/OxIzEMpJJBL25dTy6Y6z/3DmixADIyGfsxRSzazFMtkv0VQYPLMEpOpMQLYbmVdBA2JM9XssYxsDEWrUElRAiXEUPlikk0dk3cOzKyDcbS0sBvPH8xn/eYqz5pm08dhFOYGo6ZcwActqgJZNCV1TMQuizVH83pRG2JSVEXtkwKgp8oFhST9YLGXsTS+HccE7JicKqUoYpL7TWByiIquIFWgteb89JxHIQvQzKgJ6DyoBzNfzfzhcZ+vyTtu62gRRNggp+IMQhVSyv6ZrvNMucEdv2PPpJZ99M9+VfxT//pQ39kt95/pN/vF/96n9lMgEuyr02wy9OzrRp89dA80PXPF4f4pkwtnZ3Ob3n5PIE0rvv0jcWdx25tsEPb88DN/2d5arnvRCJ+90IdJSjsJCZmp8lY3ePD8jY8+/0qmtjk+wbo5ms2PzpZuisbVeH29RqNZ09y4d6NtZ8O2C8H1eTxYLIZ1l1JMuYTaq9Hi+AxB+r4jJA/OKjcMQxaazZr50TwN6Yff/+H3fvD9882W2DuHqmKjuSb85K2H//pr3/vg4lLR2MPsYI4FgYuqgZFjYoCpat0HLwJIwTFR41UTAzIoNyxlgGIpSh2qlDsfmqLCLogqEhEZaK4aXxSQKJk4V46O2rfur7776DqaFsvOo2qZ8BBTZWTY6W2EYIhMxIqEqLvFMBKhQ9gVaExw0D66hWDqDgMy0GdY2t7tNfFjYGCEuzcABDACnDKDkJAAyBB2NepmhASARIS7mGPb9flNkb+T3dBgpx/sh+YkGk5BQUY6jWQDJWKRDLvAgimQaEcP2U53MjOdCBdTnTxmOysYIenUHVYmBQum1sTpAxIhuinCXa1MosYEkAAhE6sUhSlnHXVSwgiLFAMkxyZCTIBAjkVEd4VchDSlXCtM4TrOIQAokHOAppOutI8Nw0khAwMAkYLM06mB/afYuRpVYMqX2uXyTNeVIoHKJJQhMU+ex+kokEhVzfKk/O1k3ClDSq0kdewdAJoRYOrVxJu1RUBVEd20C40AooWBiHl6pqoYECIZiE1h5DmpIZg5H8wsl2w7Essc+0lGEi1ECARqRrivOSbSImaKE0IINukyZiqlAICqIrEZAYCKmhYkx0wl56kjGIoiOyRCMUSUUpwngh16JiIEZmhIBGhaCk5p4VNyARMqIKCYmSjug7cn/FEnkW4yCRKKlemQRYsCARkxI5ho8c7lnKc7ngGIKQEAo4qwc4AuSwGYLggi5gmeJGIRAVMkoF1O1k9JRSleqzn2ofQrKdfMY7EYc08RDLJ3Q99tkowKLGBifh3zsOn7gQsAxjVggFJbrrzjg3mbNDK5mIfKBWFdXV6XVHukUgaFEgtbqKNBidtx3Hh27YJjtLpuFrUi62YsyHN2Tc/n7D2aqmiBqpkvby59aLD1A5GJWu1hKDCMmNWTxz5H1hCoypRiEVfVF+fDq3fuffGXftXier5sK0dZzVjH2CNA01RRIRvefPX5J298g2y8c++5eJ0aMh6XWsziMMQMUHyYteS3T7fS5OViPmyvXrv7wkefe+FocfKTB+8yu7cevDOMm25cAw1p7LKNitgsDlJRxw40FhhjTI4NOeQoKYFNVlgh6eKT+48b7yDw/LDeqqikAlvOQ9rK2K09U8wlE2OLLdeAueSyHRM5B+6g0IKcZkbVokSa+rtn927PF2DiA5dsoWIA9Vzmc79YLjuBwMTe+dlMc75eXyYpJReH3sh8aMTAxIpMgXAEwmUTpRQmKshdKmVcvXDoZ6dLq91qc3lyehQqLv2D2UFbKJaMMSbn0XFtZqVssvZCTZItWgAuIFGkJ0dMfSorsG2C65i6tjl1tnDsyRFiiXHFBIijlF6kF8gEDVdnwLecO2DyYsDsSlHAJEAKk3YLiqOZsKkjD64CH0Q51C0QUOGcxpJ6AAU2lRxyGser6Sq4dTq//8EH/81v//7cVwez/gtf/tgffPPBu5tH33jwR9/61qPDs3sruPXzf+1vx/SmVn/x+HrztI+zk6MnT87PTm80Q7WOY1lLL+Otdhb7gRs/Oz3N0gzu9FNf/LU2wFLK6/fLq6+89i/+3/9JSKtA8/feOl9fpGwXv/br/94/+Z9+f+jjJz/2Mbm8f7BcPv3g2oXlE9Xm1mJ+VLuLbby8PHrl9p1Cj94fVhff/+zf+dKfvPX6m288+uXf/MyD+++HVjQ/vRvC0Qsn3/iTy5dfOrn/zsPrcXW0cPcf/0WESnke2GXzY9IRAgzmj4+FoBriYTNb3b9fOatUx8vu/r++0m5gQxWAKGOUbOpQ91NEI0AtIxgwAYhNTmr2TEhFNIMhajBgZEdkYKjA5LRkMgguSCnRMgEyKDIhgZiaI5pC05gzmig4FB84qpBD9Eq1DOtBPRcQj/rkwXU9c2FwcqXr9fXsoCZrtisbJAGVujkhRRIHBU+ao/P338bV4EOIUpxH0ZIkPnj4nl+dL5cnAefvPH30T//wj//6L/2iFw1NKKoAKEP+4Zsf/Pid+3/9S79IoIGdqPrgpOhkHQcjKfrbX/n6Fz710cWyvtXMbty8/e3X7x+fNOIVox7X7m998fM/fiPVtw4zX947FW/00ZdubyVef7C9d/vOO++t3/zg6vOvLrSkP/raB2987d3f/LW/8vWL73/iZ15ZLtp3H116gLaC4Mr66QeUtjePDx6t3w1t210NRa5CG4AZtBiYWSYL3TbPZ82iPZxVQbqRAVnYKavmoR86wJxy21b9OJgkNbx18+DJ9RZNttfjbmK0mxI823HZwcXT3tmkyiDRVLYrpvN52Ky7WMTN2r4v6ycjQZ9j3czZgK9XaxO+OO8//YlPvv79H9auEslINI5DVEWyqvHg3AzC6mosxJmpPZyXfnCg9WGlEUcZHWPd+HHoVEgNVJCMkqABz+vZttsQUBXqMQ4iFuqa2Cto7eyF06MFk4gkA6ur88326XYbs9QVj0VzyofztvZVExhBnXOM2PdRDbzzm74vZoi4nDcpR9FcVX6TclNVxI4UmCkELypjHj2TQxhyUoXakw9s4ExNDavKY0VI2Ia66yJ5F3MJYt5XAqiGU7caEAMyYCpWch6ijYrsyIOYA2TvVnmzzltjSCIASt4TgYFyVdU8r4AD8Edfei2lPhdBQjJ0UFLMTT1Tg3EcFbHEDGqkhiJswYG6mVdVEH+1uawwHC8WIAlFWEQQkIFMTBVICQPCFOWIMFniRadJEzEogjKxry82V+Hx4zsvhaYpEk7iiiyPCB4hI2vX9SpoKuplOW8PZ7Mup/Pza3aWvE/d9tr3HBPIgPNZKZvN2IXxymcIPLverIv5+Y0br57OgnTLpr4GSKUw+HHbqWJFjhGAcNbMPJGWcV4tUxq1yMTjM3LSkZ0vSRk0kEcgVStFnA9asq/CdBVsNpHAqaqoFdNQOSQoUjgImbGmkhUN0cvBWVOfBgy4HXLJIg2go/ZAPbq6NM24uLpYl4vBcgYlEQmVAwNGABRxYXZyY3523LgK1SirxjHGTUlbzQpCVXVA6ofVZRsWVtw4rgzczHEoenb08s986rdc1crYgxmYOCbvnZmgTjkXWtUNstMySNyAqpJDshT7aS3SX11BpuawJZIxRka1WJD4arsNYXn2kRdHhBQHR857rym3Tc3B5RS7bls7z45TSYTg2LNzOZeSRzJz3oW6MSIVkTF162tROTm8gVWIpLHvc0rp6vrpn39jkcYXXr0TnjseFzMreAx1HTw6n+7rdy83MVlP6eBwDs2sCAlgAUBmPjgZxxRcU1WH15GDiyrO4SKJxpyHaxjX12V7TuXah6f+eLOYxdPTpiher62/jDlFYgQ1M0FHYmCEHEKKuUyxKGrEjMzRgJNelV4EdIrfQQS0EHwIlEGUd1fxOAogObTNVS+3F9fDyE7DAcW4RufFqBtKl6EvyGSiJVtu0IcpbZQUPGMToJ0pqAMkwbIdQAUq6lSrWdWcneimkdU2JZmsHUgISKKWx+SINEkC80dNCk77MYydXFwM65G4nd15DrxHj4iBJcMoWRgaMjYJpKmglCLGKIqaRYxpv2swiUJ7SWXPC+37HWxCPfZoDn4o/OBeHPpLehHsE6InOQifqTT7l3iGD+0euaNFflrB2XFMUzTw7qWmV3t2HLpzPdkz3xsi7hSf/SHtYZb9i+wfajur2TPJa49I7XK6dx9uijZRBDPzvjk6XNxZzI+Oj8Lx2dGtM+8qrwZ57HNZX10s6tnBfKmltC0entSbzWCE50+3ueShS5aiIzuYz+vlQRG+Pt/klI7OjuoKJY8llsXpsmmb683mL7753e9/74eUogXXztpUUl2TxtRv13/6h999/8G1JFQiZrp8cvEH/+KPj5ZL0UyB6tCKSBVc5YL3rmkaIFfVzdXled9du1AvDg5B0Xk+Pj7yjL6qlCsFxlyY2VRVShoVzETGojokicUWTbXeXkeFRyvqB1DaZx+bEbKB7AbKM1VOAQAJJsM34BSKRI7QO66ZHSAQ4mQtnE7Ks/qlXVDVFBg8yYk2jTLdY2+TFMg20T27sB1FBCJWZNq9NILta+535SRT5tH0arZ73/0BTGdcxXbHxjxtd4oKoeEu5GYHmBGRqQLqREAxsQmoybPrZR+wzmaTbKe6e5PpamE1VVFVZecBYEosMpsW/2hqKgoggDjFG01bZ2WaQyArKCCVnBy5UgR2Xw3tSsFwZ7vbfVuqYEhEYskUJ85lmvaZwsTRgxkx6eRxA1YthFPOAhK6yZ5Jz1g/MAQ2taninYl2s0VEsGnTAHcGPTMARUQinjrUpho1cmRFHfva1YwIhIaUgLPC0dEcQRh54gYBJtXP7TKvYBfzbGbkPEyF9SLMBAaGpKI7PYsJAcxUpUy+LWJnaiIFiSc5zKyYZmJCINVdkJmUTMQTerOLLEVWUJOMzM4HKaICRA5ACaerRnUaa4TeORGBHUFGDI5IcsmIHhGAJvckMrEBSBEiR4imGZkQyMBscjkAMJECainOO0OZwrdwijxkN8VmSMmIIDAhgUjEKlnFFMHUmP10vifCrohIyW7a7dAC5Aintj1AQt3fwvcGtPhUDYftSNqBrJFj1237futD6+uAuhn6HsFtRomBo1pAyJnJVQRa136zGYg052xgTU2t822FpjU5VCjzuh7FLFgqJatVAdCSQmGn7FVJKodHxzUjeoBNSqFirgMotVSpZYsZ3ezo4Fbl5x6jaPbeWVl579q54xLKkItoVVWGmraxSGLkymoqzfHyzuc++QvOlzzGys/Pr87v3j5q6nYEYgYkXD+5mHlPVq63F6ftrAK+eXjT4hYkxbzRiJZNxVfhICyXm0ebRTs7mi2uu2vmrqTR+vHekT+Yn91aHqOrZofh+vrp2z9+I1H/NA6PLlYzNAIc8lYt+jmN3VZ80x6LbkeRnsaMZhZh3Io/8JZy6gklJxmoMSQf+w4pmKmoGKgLXNeeS1ZAKOYdcymLZtmPa4ViQM5XB7PmbDaTMZqBryqjCa7E9WqzqCoArLyvm6rvOgPMkOcLVzSPaXQIVgoBoHMi6sghqHPEjO2sGochjqlP5jxVNs6Cv3nzIOeybBf95YaPaNh07GfadNScLOaHY9yoec19LONQLinMVIOhhbYxiwXXUApACRVL6SrCCg49zIlniqq6BS2AsZTBICuWbCNzQKgQ6sBzMzIEAgXL7BgIzIzJ737ksWZglV41i2SiAsxELtRLtLnxYKnLkkrsqkZUxO1+XaC/2MwsvP3Ok8O7J+dvXW/L+Wuv3O2i6eot8duqhjZvtunorT//wz//+p/cef6FQyYgSOQ36w2qxM0l+ZYcDtvt5uoyVMtFqA7m+I1//f958c7NVz/+iYc/vv57/97f/OPf/53tW9et4ydX6c7NW+f9gztnp69/7Rvjo0sSe/0r337hkP/W/+Hfvvzgf1yEk/nR8e/9k//it/7q5++FzRf+/c/+2R/84OKdzZe+8Jn7P/zmWRi++JmX8/nF97/5QHP3b/36z/3w6+++/idPjk796orPV48rh0ez+tbp8fpyaA7aq9zN3KzLxjfueDefm9vc/8F48QhXmytT6tM25oxESKEIKBAYE5laLhICTbd0VWMmJAjschFAnO5UyKSGUwIcM5lJESXk3bwNTEqaXNwikzOZJ/DSkzM1NGLvEEi0KKECKJIgkqljNEerfhuLokFgjJIQStP4pm7v3rzz1oPHR3cOnz69ImShLVYUZXT+UAVKzABDCy6sbRgyGSTJ2DgK2K23dVWVARJfg8bN0+pHSfNXy6985NPO+/vX20+8dpdYbp4eX15un1x1xwe1CxRHFZL79x8fLmbLo4YFLvv49Tffv/3SCy837s7BUZD64om+fT9/6gUGhtnM/+Inbj14b3j7/uO/8leeBwAz5CzvPLp8/dsP/op8+vJtqbB68wcfSMY/+t0fz+30z37/+ua9w8ODxZi3924uVpuuH+NiwV/4q5/7R//w627uQlMPYycqXqrSpYrcyeGNx9ePK27UAKm42iHBECMYGkhK62HozUnwlUS5eXBaNAfsm0X75Om4uVg7woO2Gfq4m9UrIFKRAoClFG+7IEMkJODdGoErXzdABLkkid32en50kIf8+Gq7bNsSZZM0duZMGDx59+jR+b2DAwIHwMyqqFkjcyVmamKavcejowVXgcjZOL7z7pttu6hqJ0sOXSm5R3PsfZc2i3BAkOuqKUVKttp5NHKOHFeiiQpnsVLk5sHieFnXnkvMRZVdeLy6utr0Dim0fLhs+268GtN6TBsoL7SHpqpqofIWEFCDY55K30xUhJlBzTOiWNt4FZEsgxgRi4EBNr5ebzfoK2Ziximr0sRSgdo7MxXDcSxNUymRc9wEF/stz2bO0ZQpqZKMBCExI7N6ZQcERR2Rql2N20fDqjghQpaCKiIjETA5B64qeuTbVz76MUm5u+6bpuG2Oj0+ZAebqy6r+doZUS4KxRgdgJ8vbuU4PHzy8ObJ6diXcZOwSKgDQgnOGq7Pr88NAkDx3imyIjOHkkdQrV0AwlLANHg/K5aAi3dYslgElvnm2p68e//kzjE6r2FWzGuaIifJhyBDZl+xCymOKlhXYT0OI5ZmccCGq8cPyUuYU8rKsT+qWigxxs1seXvWBKSZ9uM4yDb2xtIswjBaBqNpzq46n837vngOnjBlAUJmx+RStlnbjMMqaprXNRuaCSB0QwzkZnVdrFBFts8tEVUAY+emOiQGJPYOg1FOaShF0CiNualdFaBiygBtU40UC1moXUCrKk/FjZvkCGwUKin1RYtlLURAZK7Ctm5nB7Pm5MwsVOgbpXx5Pw4xFT+UMZAnxqim9VFPyxkHVvXObtbtEZ188Rf/5mx+kvOAFitPimzknXclJ2QEUOc8h6Bqk1KKMG0kJmRG1bi5lJLa+VloQ8pbBAvejV3abLdAzeHzz6dQxe2WFcFMUpnPW8kmUYZu0CLVshFVIgjeS9Y8jmLgXJCUna8MTHKUnFM/KFIzX4BzSfLQdaJKyVY//FHYxrt3byxvHJWq8otZSxz9UMOs0iPBPGziT1bXyYWnVz3OZoxa8lB5rVzl6iMjhqED4MqXPj1pXBsF16sMtJwFroc3ZfPmvOm8v3jxhdPQEmIKHK67ki0iqieAqVE7i/feSpmakYKjUnY+AZ5k0aQIWoXaaitYTE2LVYzgLNRe0cy5XEwB2IhVcCxXD7fL48qpisi8DXhdZsgxio3SEJspIrGrSMERigI5nM0qApNxFOLiOA/Ji2nOjp0xu9lS2zocHltz1Z1fhFRQIA4jGORp9ehZQLwj11RSOSxxuLwuV6u0zcFb2/U8r3LBnLFlBmPFmmZHClhoSgYWZ0jonUc1KWLVszrMZ+LPs3SevRS0M2ShwbTrvTN4Paus36EfP80X4V6WmVSXSQaaXvkZnbT3Ik16APzU0z8EnX7aevbha4M9e/7uIPCnuKEPj/0ZGkV7ZGjSCJ495qckrp2UZBPc8swStdv8NyTExvvP/8IvzVBCPasPDtVgu1lBSiKjr2bL+WlAGsYxjVHN9dt+HBJ5p2KLg3lTBbX2YNnGflxdXG6G6MQtDudjv0m9zJqwPJiNUb7yR1/97huvu5ockVIZ1pscMzB+cHn9xo/vP726GrLS5NpRVdER8L/63T8EQ+8coKsCay5142sXKu/3Ack0KTc4OUzU2sbXwTeVb9u2ni/qWX3jeHFyMA/Bz+aNny18u4hjd3G9RXbB8/mw7kd73NkmMZLRpBoaApLqHgKzZ51lCBP3Q8zm1IDY8fS7TUzAjIS0i47aReHhs7vxPl0KgJEBAHedY7Bnf3ZYGZpOgJEREzs0RZiMWUbkwGySnAxsSrraFZeB4TPRcHeu/9LI+ynD2UQJTSqJiunkOFNV2y+8VZWJdTdZLog0QR8IND0SkABURZlxl/tjViYCCwAQ2TkDkJIBDGlKCzIknDJuwLDsUnRQdSecgYHsSTciN9V4GUzhmzRV/SGhThQJEe4Na6UUA3CIZobkJq+eAQCR7VAfUBXcXXOTbuWnOM+p4g1hkl4MdvzUzrY5tWYAwq42EFBsStthFYFdSpEpGDMqoIqCggMCEzRFNFUVMPZOs3Sps2kY2JRPRGCGTLZrxRNiN+XKlZJhJxkroyOiokLsRLLt/IPGvG8X4Z2t1VVhSsOSksnhZCxABgQsORM6dN4AAMSQCE1UYApvIp4+/V7aJgBSU2JChSnIfboMAMGkIHswKSUhmapNbp/pS5QiSITEmtVAxQRxwtx2W7NEVERFdyFrU9kuMTNxTsn5Wk1KKewYeXetScnEJCaGtgtasTLlZpgKMT8b5aqGCOgcAk1a1S6oaD8j2klFF09+Ujsa+w37XNI6576IFgEETZnbuqTNkEfbdtkxVPMKPS3qSsFHzQg4b2e+qpzzOefWiQ9aUTLhyXs1X7QybNtZ3XeyXLRczRCYPKUIoTpgxxQ3TZ0g98v5LenDmNZduW7cWVPPC0q2LZInz6hYSnl69WR1uTmcYd1An0fg4FxoyDEAykg6qCHi0lIDevjFX/7N49nSZJjVVUzu3t2XyUopEREBRFK+cTDP6/PNk4dH7QmJnL/7dPbi8Ri14vbJ+WOvWIW6pM3FZWwPbzWzBgTffe+R1vbRl44fP7hOIllNR6tVZDTq6iXjJ567dXrj1ntPVsM9bZqm246Pntynw7Y4uTi/eHr+4HFTvf3Og81lpAo8UxFZjRJd8ayxRPbqK1ylLTMxOe8ZRFM/llwCAQEVS0TIBfKqq2v1hF71/0fVn/3ckqXnndg7rLVi2nt/05nz5JxVWXOxyKJIaqAFara7JdgtQW64IaDtto2GARtoA77xv+AL2xdtwDeSDKMBNdBqdEuyJooUKbJYM1lTZuVUOZ3xm/cUEWt439cXsfc5WZnIBM53vj1FrB2x1rOe5/esY1bAYbNtPd14dZ42ayMKTbAojfebzdZzc+u4A75g59XUu8oASswhNGDcejeO/XQLUbIiAkqhCq5yBFQK1nVXVbWsRu/kVz7/Ilg6X6VZ3ThHx7cWyz5W3WK73FbgqqY2rA0ETBwgqlaeE0y3AwHMQEYWPM2kDGYt+/ueFgAzD0kxIoySNyWtRHvTkchZFZAdYOf8icO5o0r3UjwAgYIUQyRmZwaItr/xk5oBGdooZaicSwmAW2iO2a1AVymOznn2tdgudCAhf+Fr9249qo/b+Z/+4HyFkM+vH350eff27OSFKjSbe233H/6Hf1yRu3Hj9c22BGe5H4/aw/Wg237jcw4N+sUMLCDwdjsYXASwRXVvec0/fetd3+D3f/jPrF9y0Xc/HVJjv+jz0csvf/M3vvTjP3276qCM+KU3X//w4Qff+sn7a3UPPvz5N25X//nf/lv/6ve+BU336J33xqWO6/jBWz9+9eTk//1f/8Nf/+037nf8sOJUmu/9yeWTJ+5seyMzV01Vs61Xq5SH4Rru3Hnxk58/Mqoen54Tz+DBY7u62KZIm1SH2mcEFTQgccxT/yKaAXpKqRgAOopQFAyYyTEiTR0a7DzxBIVB5wNIQVU0DLBjrymyd1ykqAjAruwpp0JAHkMgVoioqkmYlBAESAHRKBAIqELwzXw1LNnITJ2xhzCO0hy4Vz5/++H7Zx7ysLl46f589srR+G5enffHt4/X46UaD3EMxN7IV0xQ4liAmNUFQMriEHIegmkqqyBxfnRjffno4uLqC1/50uKkOWi7f/S73/9w9fAvvvEqO/rCN+798Z+884XPv/7iLfyj7//wzc99abkdosDNO4fjaskEX/rcF84exaOIZdVfbuRv/NZXjzuZdQXQLi/X3ezwzgvzs2F1eDIDyE+fLo9OZrfXNb/yuXffjh+fXf/v/7dfPru+JKz+zn9ycLGN7/z4/Fvf+kl3/z968013cLshKuyhrt23vvUjiP12RVSVw8Xx+XhJoUIsY7y6GlFUQUvOpXLYNUElOqU+p64N2zyyV66wbpyknAqlHA9vzNu6IpZZ7a76IY1J4s5VNO0EEhKSqQAR89R4upsWIE7hdTFENuDAfpSLOSmAuqAuKAd0B/OUUn+2XTSImO+/fHw4w4OTZn3dSxEzqsNCCKDkIfZJJFQUnLvabN586VXkYbXZHrW34rD1s6o+DDXWJjGP+ejoeLPN3gVEQJBsMaWhWBYTj46JVKEoMNpB5QOTiOQxA1fnq/W6H0lxVtfzJjhGZM6VX6tm1Yv19qRt6yqwQ+FpxqBmgHVFlgwpxhjaZr3dgiFm1ZSc94AQfJCi3nuQ4r0zZgYMnodh9OwAREgzGIgoKEDpfJvNikTT4hzp/jYMRoQkqgQQ/K6rFczKMHJTDThc5E2CAoooIydx6Poirq4c+gpdy3z31o3g8Wq1betOlLiut5br4tbbdRXaOORhzAicx1QmRgJlsDyrF3Fj2+toCY67o7atMW87pqvtmYEJKLsQS2LyDGCaPXud+IimxJy1KBmBGggaeUAr7LmRrMuzLdP25r0VY4T54dAHTZjiFg0rF8QKEBbBlFPbhTe+9Mp7H33y1i/effnWnfbgqArhoJo5rpvkrrZ91sS+3WyvWfGwO+i3/cZyEegCzbDkFL0Pdb0YxZWso0YldKoxJfIujlsiVhnAtFijxE1oRQsRDH3PyBWzma7HbVV3RDoOu5rwqb88eOcYyNQZW1EDKOZ8c1DGaCYeQjIbojYZneV2djAL9ZgSEjuHoWu5sFVD1/HTsxWsLYEicSpSVQE8FQTvmYqk62vEOlMosVhU08qHEMIctRTLRUYboaxJVGtf6qrLffnqb/31V77wGzFlzTkQghYAMxVTNsOSC1cB2IOoDNs4DkTBVw2SmWQDgNhvzi7r7lZVVbHfhIqaulXDzbCMo7v3+dfM+WG9yZteBNuucQxaYOxjiamMOdR12WSxwiFI0ZyjZmjaGszEsOQc11sgyn3vq9l8fpDL2Kd+HHr0wbn20Xs/l/Mnb7x8Mr91RCeHs7svuraNOeVQFndODo4WmYMkJ3/2g4tar3y+GpbOXFlf1B7RsdZB2wUjxLgsxFV91McCQaxqrq5GC1H5gaUnN45rf9Rej1a5YEZkmJXZBS1rEjNFM2EE3i1uCxOgQhZVQRPVqfbSEbMrpkDQNA5Vc7IsSgbkA6KAo6rh7TpB0i64VPL2KqsZFpGUscM8wOp6m0ZygDXRMKbA7JjHlAGAmW1aapUERKJiaIgoWIZhma1azG6GowUeHJUCzlhy2p5fgGRAkywiBoZDGp0DAlZJaXmlpHpxJautYxqH9fXZk7pFg66mJiAMqptsoWk9og2ZM5ggqKIBk6sIUU32m2dT0ma6SNHeRLTrmNpbdnZ8a3su0cA+5YX7RfbkDfosKhie6zrPRRp45iLa/Wd7mvTzx03OhL3pY68X7HNquLN/PNeonr3Is5zZVOi0e+xupb17NO2qixCeU7vtMx6oXTIOEJ51nrLzoTtuOBNRTuniaukM2PTWjZuimiQPKTEqkjhs1utN3bRHR0eh8VokJZExX1/FNAxHxzePTnC9vBBMrHrzziGSvfX2u9/+wU+wH8jRcrUtOR7PAlAe0uZHP/3F2flqGBGYpoOmWlQACGvvDZiY2TsRSSpKILmMRW3bI067Prv6JUQkQxFxK0KAqgq7cA1yxYSmTRdms7aqu+7goDvoXnjlla9/5TXVoW67H/74w21GBXWIRZRw8o0YAU1Qnamu69lSGfaHcT96DBEcs3PEzJOnCCbFaHIoIUxjzQz3jrTdyCOGPV0ITJUQUfUzJXbI6MGZqJIjIgJiUJmGRtnhA/ZjfDesdnyZnRwIz7BIALbzzk7Gpb1QSiIT2WYan7QbRbswmxHi1E7FRFNj+v6NkYJOBhaRorAHHtmO0DWBeCYGM+zsSGqKoMquUjMAZXZqajZN/ICdA0BD2xV52QRdNgDUIvuV0E7V1bLjZO/je4RIiJNlCYDQtBC7HW3MAGHnENoF5HRnP5r6DadWQdwF656lDVEAJq8TIggUQjIAx2EaDkQOARTUEdP0FYQpoAeIwKRkCM6LJC3GBhfLU/YORwMwJp7QD1J23i7Hbnd0d1chInJWiqkKGhrI5I8D0FyQEJhUMpFHcJKLcw4mcWZy4BCDgZYyfU7HbqJHM7EAm2QjQsPpJwAoUogIiUENAaQUJBIzMPOuElSVQo4QQc0mjjkYOGoMy85xtZPjAQGI2HsWyYhEjKa7grxpPgYIzE7NCL0jzDkBQlEBRClJd+5NZGI0LTkrAE+5NTMQRZxq+zIAEnsVmdAKaoIAOx/WJOraNOd/fh3cs4rGM2ArZViv+5jSZttXPvRjqjfSzBZplfpVGTexH6H2QEquaVRQSVQzIjq/cBzEpG6DZFOL5GtX1aK1c634RdUNub9SYFc1vu4q1yYpaj1UlGUdTEXjoj6S4kUlUxPCzCRkWxdBpJq9L6ht54aNRumb1o8AEsd52w4xB4c5pyyiFgGVpZIyC/zy117/9XuLI0w9YTVkuXOjo+BjH1lxs+zbtkp9wjzk1enFo593oW27A2iPkwNl9mH+yutfXp1+tBxWmGgzplth3gRHLPdeff2tB+/96VufqNjJjdniaDGseyRu5gfs8fz03JJsnjwmo9rSHMqNRfNa+xJ7vzg62NxaHtz489/78fd/fPLxnx199P1339uOGciRdxmdY1BikUiGVRViip40pYgmVHkGjLGoKjisWs91sMzbKNt+W8QG0IyeR01exz5OanfKAsRlt8etpd/WKEnVgdV1VaQ0R4sxp80oyM47SuNIJsbISME1Cli5KseoAkCsWQ893zsKLx1WOuBmExGrKBuAAhC6rmvcbJmiDkPKV8osksbSs/dgVXCHvrpT8cJ7Vihk5lwtJRatQrihSmqQYTCLIMXKOKZVzGdGQ3AtxnkIR468gSjksYyEDojEREQQPYBHwKyK6BxPHYoZlJBa9YLaYAomY/ABw2GyakRGQzAGmLBtZfoWXG+H9957mrbpjNZP+k1zfJy3PaH7dJU/HvOCFdPV7PhQi/Sn2/pwfvXklJzWKHdu3lpfP23bo5yHHNNQYhUa57huZsVsceto7OXJ1aWqQK/zag6lUMsnL86XuYDhe08efHB1Eb3+ypdf7IfNxerhovlLr7z6zZ9f/MHpqkju3fzw8MbR9mLLndGt+q/8p3+7Lf13/l//9NNPNn/hf/qV+s3P/9v/9vc++OTq6WZ576svvH5z/h/++c9ObiywJlQpun3/04u2roeLKzeWQ09NobROgVwxpEgay2RIVLYMJirITIzFzLwTm+LZrMWIvNmE7KcCYtP1yMwTmSoDTk0RTm2qBkMBIRiLYAiM3sgjMdroiEBU4jjFtV2NoiqkoowGDZGZDlmzk41BCm0p4/GiNXKz6F++OV/Jo7i8WMzd3eOby21/87BdnBy+zU/Z0WbbFzDwGIKPwxay91W7HgYDAGRDiCZsJQCE2q82G3COh9QPT8LBSX08f7DZfusnH54+7Bfz277q3nr65Pf/+Acn9259/NF1akNd3//i668/OH360hdf/L3f/eGnZ9cndZEQXnrjBPpy65WjeXPjo19cHS/S8YIQIEYJdf2z9z9qwknbVb94fNagHhwt3v7oYjumz79y37fbz3/zvrr+YO7nVe1I/9W331rV43/2X/yF733721dy/+vu1RdudobsoPrdP/7O4f3DKBcFuF/1R7PjIkkgsXdjGl1wjCCIbfDDuCG0zWbtgt+pd+QQbegzAI5lWBx1RfLVZfQU1EyNhm1K6ZlUBGbGTAAF6Nm+3TRFn2B90+ypIKFzruTSNh0BefRdaE4OuscPPp7NuhzHUvKwBVdzVDAvm7Q1Bi0gimRoqm3daVWG64t2vmCo7t88bHlz/vSTo8UJsQMN7WJWJQ2i4zYXtDFlIjA1sExoigaE6B2UUkrJKXviAqCozbxbr66QXFd3y2FYbcaaORz4w7ZZb/pNKcXg4HBee391PYwigpikpPWASmjaVFWb06YUNMs5z7vKgQYiIlKzqgpqKGBiRo7GHBHRjByCSZnClMwIAsREnj2xiFVVW4oxwcnBgYF67xOqmqY4OB+8bwwcEMUiWpJoYc++pt7607xMHkTFgStD9ByUmOrWN7NaqFZ46e7d2Wy2Wm2R0TfVsBlvztva4ZBS285BcLndgAtIrtBoCGSU4phL5qpO61SSq8B5YMkxqK7HuB7HrpqjEbmQRAiZ2eeSCU2sEKNiAeQQ/KgbT2igakIqgeeAboyqrnl8ttLw9OTwMBkm1zn046jOiJ1PWcBgamZeXm3ilr54983VcsygbtbRfP7oennzpA7NCQx8tn50++QAJIbg6rk7X2/Pt1IS3YDmZE6Ok1oBSIK6lexBG55J1rqqCVPGqX0EAvmxZJ1oUAJiEJpOiwHmnEtTVUlEs+ybcUC0EPghpbriuvJgRgREiEyFWZirgHXlNjr60CFx1QRgB6h1xWQWAqtiA5ZLSsveJ1sNpRgZIjuHCIwwm7c++BgLyHoWSqhqoOAOThRg7HsTK3EDlTETi2iMgsSIC6Qv3v7ib//635KUdNw2IZhCycLOsYFZYs+IFTUNqGgZTAYi9E0NgGhFhtEzDsM21M3B7ZtF0oTSkFKePnkyRH3hjS9Q666vz4dtX/uqrbu6ppxzv10CInmrffCVy2lIw8hmSOi8Z0fJJMVBSil9ZO985f3h3IyW4zbHkbyTME+Cw8Or4cmnr9xsb94/SEjN3Tt2cAiMHtXaSsYIyCc3Tr6m3EL88Sef/vjqgYlBNWMPJP3QrypsCzYIBSgotMvlkAZhuaLVJZx/kMLVwREdfXHWNlV0hcI8GxpwSWkzwmqVJYsqqBEAquioBZiLmAPeNXMDqZklYcKpEGhCX0sWExNFAydqm3V0FaEg5AJm7KhAdg5ThPFpZgFn/slVNiTmmgOW1G+HcVaHmCICgFoSa+cdOBaiLFobeMKSUxHNw2BSLMFUpIgG1o+y7MsmSsygCo4lT0VIk88CcxFdbXNKUBHlpKXkrAJU8jZvrpqaA6LEUsYh1O1KSyAoRYJa8JWlUVEVVKaZ4r4Tdu/wQdtRWXcuj/1e9HNu9N7lg89/uGMKTeIL7Jb6Ow7KL9k27LmbaG9iws/kdfbvxPbizmde/jNv85d8TPsyq2fsasS94rM3qHxGoXoWM/sla5EZ0C+l5mBSr3BX0bVr0jIzo6jOGYwpG1lXt7WvFXFMpRhsczo5nB11B2OfFjdP2qpKQ4pFzErTtAddnWOuAm7663W/bFzdHcyR+eOPH37v2995fH6aFE0sjhkdcNX89L3H73/4yXK9AUAGxACMVkoxBeccO5gqyafqchEF0ZjFOxIzIFQDJNKi03qVaIomfYbYvR0UzDkmJjRFIuoRToGZu0VXNbOfvvfJOx98cuel+1y3KRoboanITnPTnX5ogGwq+Dy4tzOimNqEa3HICEiwa50H2It8MPkYaNLjbC9FIhEBwp4cBAYAqhPEGmESSiaR0pCQvCGbAhEaTAVoMg20nZADE7x6j8cC24Osng235//YVJo2MWgMVFXVgMgAEHlyQk1YIkOkScQBJXKllJ0BCgGmi4lZKSMRgoECT/4OJAJEJhbJaoAmZobEpgVgItcoIJBjNREVBNP9AxFoAoJOb15KIWJDUC00JVWZkXY64tTfBTp9aCF2YDRhmJD24hhOsheoCSKrCiIBook8E43NlMntvk04VWvRhH/XKZy1QyuZmoJOkT+ZqNWishN5kcAUkMRUTACnxjZih7M21I43Y5JsY9Kc9cnlKdEOuKSiz7NsRLvDu5OLZbLLSSnT5wAEVfM+lJJU1DunYAZAjqejwN4hEZhlKZPTzpAUCjsnUhCAGMUAAXUqtGGcoEiOHQKIZCaC3UZNcbTTvohpYluJ5Gn3dRKaS4lI7ELQ3XfFRJSQduFmQJGEQGYZkMwmqY13I23nJjOTAmYCtPtqmalpcAFQCJ0WEZ2Kp21XOQdIhCKCyMwu5SGE2my6wqGagBkQPPtiTFwndgERS9ltG+ykoqvz0+BcH9PF5RAjSDYHvQDcvFXdvvcySxw3vu6AWzw8WORohwe3zdi3wSp2rvb1rCilYU104asBkEM1t7Lo6Ia6Q+FqsMG5Y9+lolSHzglxGdn1I5lxkzC1NKvwtknLfMmO2QNmiHlUCowiaXndn2+4a7uT2eKoT6vtoJWrejTq2nHANvjt9jJJQux4DGrhf/Kbv/3NVz9f8Imxzg4OBSI3aHmsamdFm85hCLXkB5/+6EYLTetu3rzx9OGKdBW62tBW261vmwhhNSyNKt90MS4XjRs26c5Ld24uqtNH51gaLpTWG4q9ZiTnciweRWsuUtQkONkOG6szZt2cbzfpKqcYo7/VHPyVb/zGb33lm4/7q2Lp3ffePV9ef/T0yfV6k7PGgUrRdZ9UctOyQ8lSiBxLQFasQhItW91GdW0bHHu2cdVnLW7WqcTX7t92kiE4cm4ck4giQVUHysrKXefMghUx07ZuSo6OKjI1wop4ZA9EABCIJGpSCaEJoSYyteKC02tJV5fbaz5ezA9P7khOiJZzRiTdbrSSxeFtDYc5JfI5pSHnS9QstnXNS0xHXXXHoAiCc5VaAaQaPJgxm1gsYiY5jac5P455mcuAzI5rB4dgHaibEHUiBQARB5ARFGGSioiMayRnWCMwUWAiYNO8VSMgKik7RGYNvgY+RIXgQ84DOrefF4Hj5r2fnc3a40zcHrzy8ueOn77/Vgh4cPTCew+W1dFRXH7UHHT9ZjuOg65guRqq2h2eVP3y7CBAIBvH5FIQLdEjessxy6aHutv2NvaDD+nw4AgomlLTwElo0Db5cv3W9y+GEvJY/ehHy7/85754/HXgyzQ+zLrNX3jzV37xk38lwZ4uT2cEr71xN89mP/3F9z7+0Xtf/e3Xt49P3/7OO//2naUfORXyyX/hlQOLSQYYttC90J680nzw0wff/J2/8e7PPjh54ZXLs49gPQ7boXOcU1JCUeCWFKGoKgCBMTMRDVIcQnBBNacijtF754xLTkyOCCsOwBRzZnYxZ+/AiIBZgVSUgdE3TBx12KKKSqXmQafF4bQ3pME3TbXerJ2vlZto3qxuTdnSOG4qF7JaXzYDM1WNNgeb9VLVzfzRl796++0PP/BKVxdXdTfvV+Xio6sXvvz6p+98NG6vkpGMA6+UzSwHiTWUKAxIlFVTycwCqmbIjpFcTIWQtsvljML3/t2//ov/i39wdPf2g+Xlo/ffIuvxYr2M+ahu2cb16vL88cWP33rvfH1amb791qefe/P29YMzOshffuVWYCsp3r8/p5IA/LiNs3nnJc/q6ouvHfWSf/Ho8cmiK5sYN+PRcTfI6rU3Kq4YMzhgsXJ0VP29v/Vb/7d/9Ec/f/thV+onH58+eXz9H//2105e6B6ewacfX909oJg35gobDf0qBFwNV1VoyXiaTjW+DeTHqEKoVMzR2fKcFStXbVa5mnW3Xrrbby5C0xMqYBsTUSAXNJZNd7CYvgXMVDKoGCiA7O5oU9R/NxUkRAAfPCH3Q2RUT7y6vK6wKtuh9wTs2Fw/luZ4HjSnKFfXI3nvxRHTgCWOA2i2AMTkq7BYHKAPbde6KpwcLj55b3M8u5msF9EhxsDIas28Ww+Dqc6atnJNVzdPnz6skJRdKqXxbjsMtffbXGJM7ax5enk+cw6QL7b91WbTdHXNwJYd2HZMORcEDwxGOq+qtuJQ+0Gg3443FguPengwz5D6PrrgpY9IZAgW+HrIDbubs/m672MuLGXaXmYfTJUdKyh7poA5JudAUxElVHLEdd3WjnLO2zgyyBDzFBQPzhGSqYoKCARfSQEH5JB6Gs77ZSLIESBmZGLfDAXRExK0Zh7kzo1DBFEAcOCcE9HD4wMQVQHVUlSKqJpJTkJFrSjYMPZpuxnHIcdhXKa27iqEpqaURnb1auiDmxVVREpZiL2agSVkUcnOAbHPVsyKAAffopYsGjyikWYhZ+BNiH04+vTpqRS9/dIswUDO5gfNuBpTEskApD6QC46K5ZQu1le/8vWvv/fJL56u+uOjbn7jKKq4Chc3uvr2q+N67UHGNFyvHt65NzuQxepiE2BsZigMq+3gDKvK51I7osPZ4fnVEzMvnCtfZRMCJIIhJkbORXwIqZSo4gmK5OD9doilFHjW1g0wDgNZyQjkalTzja9CQwACBmBUVXkopkrAQ9IWmrqtsQmQxrjZokDa6tDnoS/D1eb6dN2vkpoXFS3iPVsRdBC8I0feEZJi2hTJ/uge1AtXN4vDIusYt6eupkbScHlG3le+nkG66W/+vb/3fwptt+2vPBFIiZsVuYDkTQXQnHPovAqA5jxscizt4hiY0kTycvWwvVqeXx3dfsVMyTlSLGnsN9vLy+tXP/cNrMLZ2RkRzpqmm7UgmnKfcyTgqq4FFABKGRRyNa9KMVc5MEtpTJK8r0XFNR2wE8WkBQHYsW9ngCCl9E8ert7+6eeOmkVHWrnF7ft4dCLeI6ijmgCyWUrqaj66Nfva/PO379y8+2Hz3fffehq3Np+NfaT2ToKTqjoah012h2JzWX9cx2F4+qc8Xizg+v6tujvw4IfqsJtVB8P1Gp2AYb9a5c2oURFYkLKVyjkpeeLoGegIVGKGbBNL1wCIKGZFNM/kMTDHUSWLGaAZOYA0qsFYBUYHisoOvXc5U0oewY1j9GTZsK5w6LdkxoSxlGmjxDOHEEbEwtjUdWDOMaJIziJZypjZE4WwFZtVnrSk9TKvtzIkUDQMagauch60SO5LSeIC+SyMUra5DNGymUiovZYULzcEAXyGrOPVUhj9YVPIChTPUCQiFgDKpZipcx5hDyv6jBDzmQTYM9sF7JbZvxzZep4G23GHbQoffYZp8mwx/kv6094bBBNEBnEvDwHspSrcvbjtRSB75guBSaMw+KzE9OzRvyQz7fUt2LmiPqMV7ZxDz2SDX3qaZ4cEp+Ac7gy4ZKa5lJjjom5uHh9eLc9Tr211QL5qfK25nJ5tgnOzedP3pWIvIiE0nqocY4lx3K5Dxy+/dHu9iucXFz/58c8ePP6kjCUDmIdm5pXjB+9+/NGD86EfGYNDZ2CqBSd0DQC56co1VVtZkWzqzZQZvWMAxZ1CN3lSeJrzEhIawoTbQZhiRNPSucQ0la2z0nRu+tWgozUWxq0yHsmgDgFRyo5kjAaIUIBwQgxNh1pB9ydSAcDQdntOk+QiIiUTB2UDmHpHn7HLP8OHQqCdw2EXUiNCMyZgsamuSwB1akMjJjAmZjUCBTUBUEMj00k12p38SWDZPe1zuvIULnsmYE2vb6g5Z8c0BayQJoLNlHRS27vmwBSYQUFVsyVmDwA7yPT08SfWEhICiCoxI7CJIKpMvVS72CQikhGr6h77AIRQJO/ox/hMxQUAUNkVugMiIoMJIRM4KXlKZgGAYzf96uSWMjXDXQWbmfFUeG86NbYgTYNiRzUWKYj0/PgomepELSQkMBJVREJiRjNVkQwARLwrcSci3AF3mPkZKggMFdRAnWMx02Im5oirqiJQR5aRSgIkaGqOqUdypkDIkxqFhqoKhqKFyJtaKdk5P6lFBoB7odH2qKbpciQqiGZWnHNqqiJmykxTf1nOmRARydTIsYgaKCITkakgTjhEVJHd/9lNZjnHDswmxVNyds4BiGMyAASSnBGR2RUBIhQphOCcMxXcmcEAEUGUGJD9ROomAzAjIiISBVORSbveE7jZOTAT0YnHBGRmgmjEu1ihqaoqe+cYpkkmAU8w8onHLpKI/E49AyBkyaKqRGAgzy6kO6lovZU0lE3UzQbQ2Cuoant4cOvkiyeHn8tD7l77le7g9ta0reomNE5cVuOKzSMCFEljiu18C7lGVeIetBWtCHwgFsgHB7P1Km/6JK5RAXKVc8RpQ7Ixu25r01SyWFYiV5kWAgVUz+zIsYP1ui95FGvSEEMFxbDvoZds0XxvJjBAcQRQgq9asvYbX/oLX7x7o99+GmromqPlphydzJ+ePjyaLRxgHMf2kK5OP1o9enj+8Y8lVIHoGtdsMqyW2lZN54d+G1xrVm9GWPfL+UGqHEuo5tXJ6fsfdW1zYzGv3Xyd+pMbL47X51zZZry+Wq2a2REgBB/m88P15ROAImrMUB0sBFnAnpyvTK2bhWDxbnAVhze++s22a8YyDjmeXy8vL7en262FtnLVwXxmBS6uz/uyffvtd7b5cnaw2PT1S/fudYuTDOM7P/pRHEZDp1nAJF1t3/zzLzdzt1pdOp1GBgyiAV02A8sNyvWYGl+xWUWVQFY1JK8qjOyJkCs0ZVRyxQGhCjBw4DyaxvzCzRt//de/9pOf/MC5Wai3XV05ppPjxeVqXF2tXEdNV0AHJ1ZKcphmdTWmkTg4nlVuRuaAEDGbJiA0SIiKGK30jGMeV2q9QS9FAh9VeNu5I+fnRjWiFh0xbRE8AqiB5B4hl1wQSdScD+A74kB+rhCIZwwI6BErpIZ8lRJPDYjO18wERYQJEZG57GLPcH118eab967ON3Ub3vvwk4v+4YJBBjn/6MO7s+ODqsmHNzen2zFKG1oCOpgfJUWlZrnpQX3K2SiMw2Bq6D0hSI6b5dKnSAkXhN/40tf+ww++67yTVb4TDs/efu+1lw7XhF//ta/84M8+3V70qbdbdnJKF+9/+IvXv/pbb328ee/T9x893tRH9WJWf/zWwzasv3L//o9++Pjm4uWf/vBBHbf/m7/7t//F7/433cmNw6NDGOmD7/78ZH7y8r27y+WqPy1OcIGLj9559+Gjp7Pm7g0/s9UFO4eIORdXO5nqIEshoCkXPd2oGIHMSh4BMJBDLVNrRXAVGqgBg4upMFMBdYzEjGgKBBbYyty54OttWm9TX1deGTVmleK5ruoGSYcYh5yGbCU4rhy5htzMYsibc0+iAkXFHDWLkAnXq+I3g6mM6eqdT9enZeGdf+3FuxcPV7PK37h19P7pVdsuNk8uCMemcRmQC4SmQsDYXykUaivwYSwR2ZERCLN0MZ37gs47gtzVTQNlM65/dr7W5dWPPvjB46sHv/Zrn3vxC7ffe//Jl770yoP3Pl59+CQZaxVWQ7labU+X6e7mpOsOP3hv8+r8xG6J986IQgNq4FwY+jKmzMzMefX04iDMbxwc//EffLKV+o27t7770x++9jtfL6bsgYAJDM1Q4L/8n/2VH3z/g81ifOMr984fPH735+/cerr4r//x/3g8E9GNa4zYadYhLzW7qgoKJiJZQLN1lV8uL9WYa4da+lXPRExgol1Ve24llmF5fdIcHB0cPB36IQn1mCE3TQeWnk32iQgJ2DuBfTPsvoMEEVVsAigqKDozJGVydeOE0DCPMm+7Mm4Cgw8KyTzK7du3Li+voMg4jM7VwbF3rtc+pdg2ddGaHK2znTTVk0dXIRwYa9PW4/XQr3pmlrHElFnBB1d3DWbIuXdMYxyJMMURRGZdu96sDroGwAL7ru5KTD449Nq1NaNgLm1TffTx48H47s0DyVJM55XHysU0OBemCo0+51D5dtZUg4d1RATvCIpk05yzJyKz9XY7FAMEMkDAtqrUMOZSrNSOUCfWIhJhYGYFj9BUAaCsVtvFYpFiqqoGVKTkIuh5h+RHnLaER1Mlx6s0nNq6J2HHMJTATlXFrAQXQnXYtn6VZ47n9dxXbUo6xnRy3M1mnSFJ1jHn9dDXgWOK0yZ5SrHkAg4QxKHEy2tIaYYVS5ovOrRkWtbbcUwphOCqLsdECEUkME2YhjpUo2zEhAEqboekBmwyMBiqN0TgIJBD7XIS2QpqfVG2oI+6oxOBxNT4GWMhGEwAcyoqiQAJ3MXptQq+PL/T4XUdNZCaJGYjHW8eHEQrSOHsIteVP+yCLJcv3W37QYpEBenamRZq6zYPWjSt0nVxWtfdetg4ZlBNqgQTh1O5aojII2EpzjQEV4ohmnfOwGS/h+YDQVFFBNWUMnsvHtUAjSBG6UeIZRyieZUqDKPQVhmZM7NvQs3b1Wa13MS1LJ8uV1dDioZE3rESEBIbkK82xlnRqVkaEYn9jGrPXsa0dGDBStP5VAZHyc99GWIlAlv4m3/3H/jqaNguPUEuUUXZORcCBycC7AM5lpLJA1hGovnhgtlEU1UpZ7s6O//oo5+8/MZXZ/duZjU0RMO0zRbCG7/2mwp+MOG6qpwjM7EyXV0oMIBLoEnUkJBZgQBZyPqkVoQAFKpcWCHkYsMmsfMBqQyJbbNZbxRWzsZWyp2XFrPA4Kr69m0+ull8NS2uAI0AHXEhCm2VmcK8u9F1f/O1G5ziH7338Gq1bQK1xwcXm1TGp1DkelBOV/jkJ3H5yOlZ8CJlUGyzOk0Yz67VzgEgl1xM+u2YxgIKZCBmbJBzZIUAlGIqAIpEYBM8F3AqGcaJ91GswK4iWoInMUxZAR0TZikgxo5C45KmsSTNJAmKJWYTUQKyok3gkgsjT7CY7ZiaEAqg65yrAknBUrBkSUlkWtAqKJYsALI6O/OhKlfXeRwACV1VTNI4onFwnkCYjAykSL7eMimBie4qnHMSXQ9BySONwwVnlZTJzG9DqdmY2LOpSTLvPCAUEBWdsi/PdZZfFkz2Hpv9Mn6/mN+7iT7DfXmu2pjtLUO73NHztjTbSQG4F32ePREAIe2YwNM6GCZjz/M3hHudyODZi+E+UTbRc3RvOXqmMn0mfPQcZfD8SZ9LTZ9BWX9W25p+Zee4RURmK8JETag92nq1bbBt5m5MsaRoWUrOdVO3s2o2rzEKMcQUHcPV5Tk5ZoTucG4gTx9f/OjHP/3kwadX16sxCQio6TrG0yePzk7PpQiQDyGgmYiaKtOOC6NqYKCmNrUqAThHsKtMMlVBBAUoRZCQAUsuiFP4bEIw06T4TWSVnfrBO7OKJpkyO4gGRvODk89/+esxZ2QqKjjpJrtH6879Zfso2h5VaaaEqDDBfgWsCBq7YlZMi5qIEU31Dki2M6dMd60993yn3e2H0KRWGgEa4M5ZMp0PEdj95iQM6K7a0CalxET3VPNJEd6/5x11cwpkmD3PnwFMy/UJ1zX1iO/Sb88VRkYQnN48EDLyPiCJMBkDEXRKwE0fjYj2NSHT9pspGAATTcb9KVk3vRYiTi3mZgAEkzSJhLprVSPAnXqlagYFESZMBBICKCJNbC5VNRNibwZAupvpIamK7U7WTtcDMyCa7FFItMdGPw+dGSohq5qqEhIS29SGthMBCztnuz3GyYZjKjINlWcXA6QJdm771KEhQEwJAHaEcmUSXHTwhZdfkDQqNAS4O1MEBkhEBsDKSCRayDEgKBoyA4KpTJJfLnlSBYEc6E4iVtVUEpgRud01BlFBPDs1KJKJCdCISYrgRGUFMlEzRGKRZETo3D4FacQsUgCEkIBY1ESndJvCrst+slwhEjpAMCi7njUGgJ36TKSmgLbji+8jlqJqKky7U8CORRQN1XZRTSIWLaKqOg2bfQLAdldRnaBhYMCExAAmpQAycQDACbalJrlE570UM1X4zAbBTirq+8hciUpVEQNDX9rFwUtvfvXF175mUh3eWoRq4bsbJ11DyA6RzNChqomVkpNZdONQEgktNM7Mgq+PChSHR0yEtlSNQzrPSbruqKqaOPZFVgAbLBfepOJqA8tRzqOoQGYOdV1bXG0seaqcQuVYyCfx29X14iBozCzksVIR67PG0TyanzfOb9bwlVe++Vtf/YusSyB649WvvP3uz0dJt08OZrVv69mn730AYH2P2+1o1n3uzV97/0ffrZqm1fFLb37l33739w9qZzz3jX/49OPF0c079d3ho19YGcbt1dOhvHD3Rixl3swaXa9PP+zL0LMj1UGTkjvwt71gsTwOfQS1XJggxYGBuvagF10Pa3bOA6S+WEpAKp5Q5Xp9aSWH2t2j6tUXb0jwN+7cLzHWXTusNr75Ijt+/I3fdF1cLVez5s6br7z2yadPZyeLn53cev/xJ+8+/fTh+ePFonvphVcahE0avQuatfZUd56QYp9zSmBjN/dsVIW6MTODgsn70IUuieScXPBIlYwbsRJCVfkQ45Y8G6EjE4aa8unVxcnNlyiLQ3eyuLGN6yEPzbxtD2+dna9XTz6d33jh8OR2D83VMAA5omIglZ+HqjFEDl5KYmYkgJQkXaFuyngNlAxKHgfwIdSvEHfB3Qh8ZASi2WBjsgFUkXEKy0re5jRImToLseRAoThfORMiRh2IauaZAgE3Rt4ASonVDKhyYN6ygVHTVmPJztn+duDOHp/FbdlGY6wvrpIFDtE8mpb80ks33v7e25vkVtepafziuL5xQMMo7Yw+fbJ2QlywroI6N8aC3MwX3erpNrK/dXCUNuPp8sl3fvITEMJiBx0Oq4d37rqvf+3uH/3R2+PZI7xedgB+cfBP/uU/+8rnTsaYA22PbtfH8+apgW3S+VUCnC+vdDs+fe/jp1/80pfuvHr34dOH/89//i+/+iuvn54u2Xrf5q/9xue/+/vvv3DzxTisxozpiixVnOrffO13Vj94EM8vvUgSM+equhVTxyQlOUNCQnKgZVICHLmpccGzF+RSSIm9CyFUWgqqFBMAZGQx48lUrmhiwbmAHqRcr64KWhEHjsgx1U5yUdRUYrZCXS2uu0hAdeO7is8v2s0KE0LK2FVc1SowktRdzUSNo3SVZ4dtr0OW/OQJBPauQN5ktf6Fu+29I0QrN9pFrzqqOBfEKPi2X29BIWerXMhecoq1q5gZ1XLswWzIQ2latEJpGRAV3f/nn/4/bGufe+UV0vHjBx9eXG3b+ujb3/neX/+N3/7VN7/4+3/4g9sn7u33fv6Fr3/5tddnDz5ZH9WN6cnNw9uztpQof/zB6jdfWrSd4wZSlsO2Pbs6Oz89vX33zkefrP/pv3n31Zv3vvLmrO+vfvvPfb5YTgM2jROwktU5R0wHR/A7v/PGj956ajkN2/WNo8OSD/pT81UBzZvtcDg/qipXJAWagWnVNpfrszImKYCVooMqNLWvV6snAuCd9+RSn5vWd50jSCfNooz25OzSe7fosCK/3g6UsR82+2+BIRKgqQiJ7CfK9mzPFwEAiR3XDSPR9nIDCq7uLk/PF13bx/javTuX5/lyW7wGZs4o15u+bhslVzlbXg9tXTGpqatC129iGWVxuNgKvXD/5R//0b93wXKxrIqQ0aiq2zLCw4dXoVILHglMxRwSo2c20Qpo1FQkmZXKtR7HAJCz1nVd1cHVHtNVF1zd1u8/Onvax66ei2HURMwAnGNsq9D59nx71XruamaG4N1hN7tep6RGoE1dr7YbyZkQK/QBKmNrar8dtpO1O4vt2Aa8q+M1NFOd+aoOYTsMzmMIwZvr6jnRUnOa9uRC8HvvbyFmy6IiBiYezvp1T5YzkGbNUbE2dIpKRIfOL0ppK3fz4IgojNmwclXlQhcU1SS7KlyPqYBlKVkyIwEQEznGVFJMJa62OsaAFLwdH7ZJch97U8kleVebSsyZoHgXpOSpmxWU1YzQIYSiuagRebTETMUCgCYpoAlIQNmRB/IsvpTt4yeX9103O3QlDbkf6/nNiru+3xBy6pMAVFVVNc123NY1n8zbWNLqYgg1H9+YeXZ5GGeL5tHjx4Qk4B6cLrPAupflZWo7H7gNFQ1lKDCw+nHoZyGIwGZYevYGxbMfSgFEZgRTUWFyWQpzIJDUjzlLylkAnXu+RPaeFIsjBBskqyQei7J5U7VY0nqAJCpKAfqnm5JSKdTaHDKiD7H1A6otMEOyZeFG60od8dAnEoOsXFXh9iEcd0ooMTe+CqETgzicOm67+hAFtO+RQRUtmaMarC+D/3t//7/6ym/+leuLRzIOABCHvu0a37aAlFMkR85zShEUCKFfDSknEchx3K5HMzSF5eVG6XZ1/OqollKpqkq0VIc32Tz4AABcZFZXUqKBgkIao/dcUV3MjFxFpgYqUkYpMemY2CkBgFocxlxySpthtRz6cbVejdurvB2YzLF9/mtvnNw87rh2mZxjns/4+JaFFqYqmZJMFQxVMnvH3muVjdz86F4tAeMvaOTZzXAZ5GmhaLzeZqZw3NYPH/1JNz4KOPgKb907jnb4aHl5HNzhjcV6c6FDzxzidiMgUvJ0t8qSs4Kp1ewALBcjIDQokmtmcsYI7AhMGQrtIKaaRYmmDRHw0646FCnqmIkJEFMpghacG4cogkrAviKGkmTMGdW8q2IW0Type1ELVhWEqq0rG6KqIXMGSJIVgKBYVFIdr65dsEEAhijZAMg7pxmm96w5a040qQRFFaBIISIzdQEAtCDO68oB6HbN2eI6SbbgfB5Hf7xwbWBT4pDMTHBAyFUT6lmNAvAWPDcT/bKEMy2pbQ+FxM/UmO0EGHjm76G9hqQ7uu+z9cveJGSfiaPtIy17OQbtGV7ol57/M0LUs7X6FH57Vg29+8udoWlas+937ndRtedps+da0E5i2r0y0meErymlMSXSnjlfDBBERNHYByZOcaOSq6q7GjZoGup6O/btYraYH9UOyxghS8qx6vyQh9Bo1dWqtlqv3v3gg3d+/rPVej3kUnXVjZtHDx88fe+Dj85OrzAXIkdEZqhFnkldRDRVoeOOGD2VpsPOMoIoU2vSHh0FBmgoe8FLBAyA9yfFwEwMaHdGAQl0Ny8kx0YWGv/Fb/7a0f3XN1LQIaggIu4a7mG36gfEZwrc7kVxJ0Pg1J456S27MJhoVitFsykxqAGgIhLvM0VqO84R6sS0Rp1cRaqCEz/LlKbuY9i/DuwsTmY61ZHvRQycmtP2Jxpskmd2f5iGGO58cM/FQTAAcn4XhlPdO5zUzGhqKDA1nbxCpCqIRsSqE6xpQvMwgD3LeKnIFF+DnaMEwSZ7zvRpd3G26ShO8JhpVsZMojJxeaaEHiEZoBrsM040mfxMZPd+RGhSDEsBwl2OCcjEgJSYTQsiTLzt6RMzeZEEqMj+eShQTScA0USxUQRGtF2CT9UckYJMBB+ajFmgpgIuMLGqsJtMUgqIjIhEIoWQzUiK7Hco0VcOQVSEnZdNAoHK9Cuf/9p20zezejodzF41qykSE5KigUnwbrfrLzLpikiMU1JvEssmTJAoMSOSAqBNRfGkJmagItOZRUOYMoYiCh7JT3VpCGimzgVEAiQiP9nZVBQnmBtOli9Cm0Q0VgEEMJFdvstMtThnSJZSZvZEDAqmSkxmZipIpGWC1tGkhakqTl94dqamkgTQFAlZpYAJOUbQ6VLrOIDZZG0jxClkOkmMjp1IUSmGbGaIjISm03AVsWyGRIxIxMzs0xjZ/bKriD21jfOVARAVjAFPbr14485rrp6bhEJVVTVYtc43Hh2CTBlJKFpiJkB0LjgnEVW46MzRkGKsfQV5KSZDuu7jdrVdVXCL8YCpLhqzMXFjfrEtm15WUfsRXWhukFQ1MlENjmqqwNhsRdwXjXXNx7M5u802F/QOAMyEVRHBi4OMSO5u98Jf/dW/5mUby1h59+6TTyTw3ZObWOW6avrxuj70TfCr9TiK3X7h8P0//m6S8sqrL5w9Wv/Z+z+/LMuF0+tt0iwxZ4xDVfmjw8U4XpirIkA06yWPj8+Ykqtaj67fDnncKPP85J5Ls8M6nC4/bnx18egxetfMOjDaZN1aLAZjAQ9UN4fgAJldzSVmqljGwVw4W29BwY0DN2nMnwQmuEQZY9UGM1ldLdvcUAR25cmDj3TsH777ce3Dn/vqr//WN/8q4caFsn26if0qkzJ54rDpN2owbTI0rdUh3Mfwab8lxByHdlbfa0+WywEoGBaV7JiKKfqqIk9EYkURVcuwyU7Gu/P2d772pY8f/vTmvRcCeZDxerusQ4jDiAGqjo+OD6MOcX3hurZqblf+cNNfx1JCZ9ksUEFIAN75mRlIyaaBvElOBQnBs6u979A3zDOj2rtF4Hks4hlLLobZLBsGcLVDx+SQasmliDr25AJSQGPJRSGqJeO64GhQiarE4gBdYANSRcLKezJjsREkiu78FIvD4+Ca9z7+9kv3F4vQRaOj1t25e/xnP/jJC68v3v35D86vl0WPPXW+CpWn68uPbixOxrOLRdXEVUQF72oTUpB7zdF77//k7sldavGT9LhynqpwveqP2sXBvDPfnw/j/ODwxx8/kfrwoyf5/v3X+odPPrzevPrlL331t974d//9f/j5n75///ZrH73/qQvQNJyQ3AIWtw+3az04Onn33Y//3n/6K289wkcPrr/8W6/90f/9n7146/Zrrxw8eRI//ujR8ddf38KApRoK9M7fOXxNP2Vc55qJ2PWxAFhWQVMVQDPn/MQEnGyiaubIgU23fzUxBtfVbbGJZeyLiIKpmsRCwRNDHpIza5u6qistMICsQSSEEtpkisCiQq0LVTMqUVtb111nkNb5Im7Tn0jGNFJxTdsU4DFmP2/CHLnldL0+uvUierrePC5o5pS9RJVPzyMKbbIbv18OTha+oXQxhFvzs3RZM6nTsr30GZFCV3dJY5iulamYijKlMihk13Smbip77vtSV3Ry0ISFX8fVet1v4oqDaw67J9cbun/rKti3H/z89hoq39w4aP/8V7703/2LP/1bv/2Vy0358JNPX33xRt34jy9Wf/XN2wZZFEpKRejOvbudp8dPrn/y7kOig9svVmO6FJKsmAp97xePf+P1F+sWodoHuEkF7AtvHl2ul98++1ho8f/9J/+9R5AURQYWXW+X7N3x/HjY9EmiDJmodG2oDronTx8tjhZJ+lyi5siOHAemupvZbObFYt1UIczW/UDoX7l//wfnbzmvs64Z+zIVoUwzZzPdzRgnJy+C6W42bQZIrGpSiMDVvurlWjMkMxM+Omr1qqzWV6Fqb9yqt7HklHPR5Wo4uHGQKs1jdj7EvF3M67IFLaWuK1V3uVrdeemlXLaFyrxdjKlnwKoJljPKuF2uhr7HKsxmgSpSASTKJRLZkMYxJ3TMLviqu15t67pazOeOXV1XaRhKTg1C5fjjx08/eHw1a1offMn5aDHfjrmIKlAqmFc9GpsqGuVkzFaH5qDrztZbQUyaVXITKlV44eDGxfIyIEsaK56ouDDGBMB+olwHF8gJIjtExJxyIGYKAmCBLzbXRoagHFinGRNSycUzSimmuW6qbY5Px6uBdNrCSzF6dpohZ5217oXFifSrQC744Ksum88M7Plw0XpyUjSmDAgZVBD6PHpm70K/GRCKSuw3/eriMl9derWq4nbWjSBDHNBQTNkzgReRafZfZEuIBrkIELGRgZBIQgxFEMB5zzFvDGo2x8yAiFQVU8kphAoIszSq/Ojh2WG/fOHe3fnB7LrPMcOQIqqKKjlWAhUDwfPLbVdVB+3hevmUAl9cD9t+yJDMYR5Rx5I2I3uMRJAkRVMBx4CcFwcH4zCYYeO7jNAcHtoIqhZT9MEUFDG44GPszSSWyM6p8WrdG3KWbGreMzFOG7oAoERYeb+rQfGEhMiYNS7Xmqxf98yUY3EFnUEpYxwTn66DD/VB6w8IuHFtKHGdK09tRVoArKk5OHLgxFfSBXfQVGpOhCVaIXJ+WK1hTHUnHgClpGTjdtswcdHD8MLf+fv/5Te+/hd+/vaHsyYTUFU3B7cOAUEIilpSJeUxIkGTVdKqyNik5EmCDws69EIuOL5xw2pvmVXGSMZxldbX6zFnZIacJCcVTatNTJtxvdVxRLOqC8iURJSppFjGgVJK6wEMDuYcWogQQ6iGzQjEyJb6wdfusOZwVC8O7zft4uj2LYcFY+JYyDm3OLZuJr4BnJAoMiFjCxLXFTcuj9nMHLl4mf/gv/uDd76/mh++uVylB7KCWw02zbg9t7OPrjZ/EPJVKNkxt4vq/pv3Pj6/vnk0qxwUz66dc6jzMNR1PfZbx57IjVIyWV+0IjKA4Ny0nNOszuHEmCZCx6RmU7KGYLdYhd1UHxQnLIqqGAKQ55IKKKnhtigkYmNUyGZiYEa2g3BUDnXoeyZmIPTOz2eR3ZiExVRBshVR6Zw54LH4iJq0v1hqyVXlCAGpHsfeUSixkIJkATREYvJjHKSoAqhCRYhEUow8ueAdMSvk7ZCG6DlMqybgtvIzCg1YzONYoxPmkcJYhdnJnRuzGuDfw7P7wS46tndJ7GWjZ6XisOcZwd4JArv1Je01FsQ9vBX2oZC9qgA7kQf2AtLuZehZAO2zUTDbLfJ3BOxdKObZqz9XdnaI450pdtrPf2YNwue60f7l0Cb6kO2pTDtXyVTYtq9t26khzz62GQIwq2oCRCmkwIEVMoAsDhbsqq6uiAA1p17VMPdDLskggRl7t92uP/zww+/+8E+vN9cEMF8cgujjp5fvffvHVxdXaOadd+xFpmYum0JO07suImaGTJNrYBeaQ5ySKHvdZq99TYMWwBRo58YxQCymoGAmRBPcxVQUCVAUACfWtEo5uHH0a7/5W113m4A0CxLu/C/70itDMhAEmFQqUIWdFKgECER76s3+VKuqFJFSJJE4E1QFRiLiZ8Ez2pV8TegbANwpTTuRzoBpwv0YTDEt1Ukh2skZhipi9Fnh0J4lkuBZa98z69lOYNhLiM/jbzuZYDK/qU51FIZIO3PTrixMzQSJJ4FLTZgYca8BgarK3sAyiTsTSJunzrJJdJvsV0QTNG2yfcFkGdt/H5TQ4SRagprphJGGfUxsYibtDT1TVolUzVAZJ+eRIE7l9VOJTSH2kzZH7FQEcXI24nQxVCloDLQTRiftDYwMjJwH05IjMgKhlgIwtROioiEhowNVMUHYlQZOwgfTpHkpeWdqhNMfjYSAyZN3zDEqIPcpB0cfPXn4WntsJlN5nU0zZYRJD9VpojSJoURAOOmnBiAlE/NeAUZCAJ4MUIpAhDxJfoDTtZ7NtJTiOOy+Vjph7inlGEIlUpgIQEqJTLsQ6C6WuDNLEiIUEQAgxyqFCJldzpnZmRY1JecMqeQ06U1AU5TVzDIQT1L27r0BGKKUAgDsJgasIKBNUKXdisGIPeLuarAPw0279wiTkrir/KMJmkXIU2cfEE4tfqXIs2HPzk0tcjnHSYz+JaloNq9zzPN5ZUKmEBp3ePuOD/O67vpeiyE4hwiaSkEjN60eIBWZ9k8lFY0bzQMiIFMpxYNk2UouU+ijCHs3a6ubR/MXsq7ber6Og4hETWvZOgCkDqwrkCvnPLaiPmtXVTdFlflA+WjBPQpUuMlpa5S1Cqq23fa+YtOAWoFVd26/+b/82/9H1z9aXp/fuXc/WWQf2m7RLtqzp09nszCOG24IyTvL/XV+utoMy1gipGGzHS6q+eL2ycuLdnber5vZrKKDcUjehXZx42p5nvvLe/dmA57eePGb1+dRcu+w3LnVCSpXVVGft+vN1Sc5z5Nufenm1VEGrVxlRqWMwZllOVnMS5IU+5IzIXDMYHR5ecWEgQkdi6DrWleBQWGexZTqdjZut6Xkrp33663jSrQae6ibG9fb4hrfp7Q+/wDzdn4w1wRgVIVaDEQ1hBqdlz7GnJwLJfat8QFhVRH7WZbdyBvyYGjMRiSoOWoOQME3KCjeCZukIUhuJV9cPzw+uUHAWXLryDOo5q5rs7lVv/KVdw7TaoMPP5zdlJu3Pl/5O6dLFyp0/pCdd1QVKUQgglIUJICfA1YYjsA5sEwm7AgUva+BWdkYALQQaio9AIT6BrpDzzPzWwoiYiLCziGg6UToU4Ves+WSrSRRp0oljjgRK8AFrhUYbGI7kA9NoB3WOi7HEvKv/rkv3TsMf/jP38XF7Mm2cA03777a6rb2bXvz/vVV83G//vyLL5bttcjspLrx4PI0hKrPBViW/bLzTdB8ebr57W984ydvv/Wbv/E7f/zWD2azxVbOpMxE2o+eyMtfvnPr8O5wdZFRLoclUo7V+Jt/52v8s6fbJ2ff+87DUJEH/eLnXnv7d781C/TGq7d+/M4TC7JaXubYxLMNRP7eP/t593L78XvDdvWn/+v/yz/40R++9Wc//tH/9X/1v7s8P3/7wSe/8ud/9Vu//6ODxazTsP7Jh/hpbEZViAkEERVBRUyEJiI4gAPQXKZmRgBQLZMzFFS980g2xhUSIlFKRRXRETpm53LJChDmDZlF5D7GopJc1sOD3B1fjryYdRVjjLGE1lVhiNuua0PX2RBPwgE+fr8dezy7mHcHhUhFAMk5HjIW16g4F/0cug8u3moXbdlOqvvI5KJC5d02ZbySs4uV4cO68uWUv/75V+kAfvDovds3juLYW9Gkg1j2MGso6JiEGAp75aweRsfAYHnUhI0LXXzx9ersycX16MFXWYAB8rDqWv+vf/cff/HzX9LjdOPVe6fvPvn5z9+7U81/46uv17h+/W714PGqqu9D0f/s114reeSaUUrVeBHs1/DjT5+8eGf+q195yQQ8bvuYbt5cEOjFeYZYlSFDU+1u2wZgSIRdS8b8n/zd/+gf/cOf5KvNrFs23ezho/7G8d3L7VYVNklGHbyxZNEk4nkpfXNw0HS+Rlgu17Mb7ThszSSV7Iiq4HORvh+4nqXoZIwffPpx1VYsEAgGK88WBswsOxcGmQjo1Kvw2emyIQCR1RWL+MVhx5vkfbNa9uth8JVD9szU5D6PpY8ZFUMd0KSq8PDuwerJmqxVEsech2QemZiAXnvt5vknn3pHXdWUnHMSKZLGRNk/fnw2qlXBc9uyD8U218slE8Qxm2meQOseVkMshrePjrs2ACAhJSkVQ+jqdz/59OnFqnZ1cD7GcQXsQ5MKGotjNjOQgmA+uCTShoYZHftF215stkPG0Zmvq34Yq+DX48pYiVzM6ionZkV0mvRRqFRECEYtxQqCY6Cq4VJsE4e4TW1dacp17cVoyEM7q2KKjauZGG1qYcEe5TJv+h3hwMo0++SuiEAjJ7OG4tqBa6tZtzgKTVOyJdWDukUOUrIU81whYDLIknJJxK0ATF2E4GB7eRavr1tkJJvNO9/U6/U2kFMtY0nBeykJkZEckplGh5UBMDpEjKln54CEsQIIKY99EWYHakWLoTmqiVh0zQQgAGoG6F2DyqdPRk9j4eWD8w0anxwcKpKBSpECigp1qERtTLkOcnRw2LbtAMPDsU8KanI4P9IeMA9g1TD2ZOSVTd0mZd9w3bSCenm9POjaMY5NqObdYn11qeSHLBzCmEVF0XExIQRAkBxLiaJWULtZO/Rjzpn2sKLFzRMKTvOIzrEPcZuHbXZSgCFrqhcul8JsPkDblJyj9imNfQbKV97PF+hrxZRzAbAwCw5DSgXUceWaqu4lNfPKBY5JFbmkVOLaM1Z1YzLGPETDmqqqmXUHR/OmOQ633nzhN+/fe/Pp6nx+0s5CRkPAkLIIEHsXkzo/z0lIzQxzL8OAWAJrGJdxFS9zkT5uUTUOvcVecgFQ7UfCXJJuN0NTE+RkkqykvO2dMxNhMGJalqSEuWgxUS2k6kjFZHH75uz4hGqsA7kKqoVj5zwjYQOIizt3u1v3fHMoGVgKblcA5puGXcXzI6sqQyZfqegOGyIZzMjVYqCUZ7M5FH7nu+98+3/8k9t3f0Vmh+f56aH4q4sLyaM+fH8uV17OvNsuQoUoXRdWFx8dH99IGmpfb8YRnJgA6uDYJ/AlqyFaRhSqjSRrIWRvamoiTGxIzpEaiCiiAkIxm9x0iOTIIZWUBIySChnRJBMblGTIzMYxSUnmzGkxIigqoxRG570nwhzTFL5h0wzFGAEtMOWxOPApJZPsDvHglp/fDjqGi5+cxZFY/fpy7B3UdcCAIHmULMlsAtwyiaga6NQbqmaKUZUJlaAJ1eJg0XpnKSH7ZMWIkLg5mHE3b44OUxbISIREWETQ9LCZYbEmtM8klef2Gdhbfp7pNp8JbBl+xli0txVNvJOdQeOZFWlaM+6X7rgLl03Wll3M6LmB6PmL/dJPpgXVzkOyb48CgJ0zYu86mn7B4Jm1yOC5CvEZ5Wei1Uyi0rMPAVOISuH5MvN5JOmZcoQGiKxkABgQ63rW1uRMbp3cXG36y9PBueIRrfRHt46ZoLcNU9XUlWp+dPbgD/7wT976+QfI9tJrL7VgP/jBjz95ejn0yQQr51HVRLPotH4U0clIgkwiU30STkVYjgmmXi5TIpx+Pn0UNdjnmACRgPdv/lloixCMEXFXJeZpLzUQopna/Te+8Bf/5t/cbAZCR4CgNgk6SERTqRnxJJ3gzgVmgGiqiDiteNFkCrDh7uzv7GkIYDrl0gSJDE1NiKaIEexEqKlyE4Am5M3ujACCTYRNU0UTkzJpPaqCIiAFTBD3YiIy2FTKYs8Gle39ZWC278eT5wLo/p/pgwCgSiFkANqPUJzSVjCl0sCIaD+0lIhMxFB37HAgQj891WRO4cnvJLqPsU2nB59rr2oTDByRmdhAEZGNwGw60qYwqdA7zcuEaPq6kojuPhCamZgKIKooExFMzhcUKcBM5KbvJ4LtireIDRwYahHnCGHCUhCCqaqCoNl0jlQSAkyonVIyIDPSRD2doOaEIDt6OE5062d5KAR2zNOdCBwRIRiCmRVAQSREh+RsTGWM9C/+w3f+q//530dRIJu8kwwGSJMVjKfPpDZ54acibwJS25niEamUDAiTP2gidk+eHVFTLTThyVTBzLEHmMQmY2YFkpKZ3DTMmCuRguaIPIABMiIQwXS4APeXPEREYJzcOkbIhKw42bjMpEwblqYKpjr1mpUyTa6RmNAA9wiqCdQt0zwcDSbYNTJjjGl//USYINiGMh0BAN5Jc4BTTQeRqhI7JBI1QiiSmAnIdJeeEzCbiOCmYgrE8Axbtw+gjWPlvYHkHAl9087ZVbnXcVPYVbODg8p7E9ECVcUTyApAaw4pJTUrJRcV57yJD9URts326gxJB8kGTYV15QkgFSxXmwfe09CfGmyG/jIVcXTkkEFWrZ+nsiFwBG3rmuzbQr5oIcbgEoSoaYxps9r26ltFBsmV98ERsl+EWT80f+XP/+XKzpbbs3lXhbZbXceX794crUSR+cH8/OknoLmt2z4lEjtuYHX2uG7ZHcxPz6/vv/Ti09Wld1ziMPfNdrs9POh8CBXxrK11cefi8qFKjPGq6Kqd3cy9dB2pXG9Wa8c1MRMwuSgpv/G5Fx4/2JaSS970VyJqOee8BmLXo3lsSqFhHBUwgxD7vqSu6dKY6jocnizW62unvmnr1dW6FIshO89j7tebKFmCd3ePjiD2p1enoa6YpBQLBP1Khz75OnRtGAcZUiFCZPYOwNP8pFPUfhM7NNLtGL2nUHfddnWpKrV3ANaXnHIEAs9GIAppiIMx9qNYhuOq/cobr6Z+1XYLicUsgXFVVbPZPKa+qThYrVLAcDbrchy2F4/aumqaFxZNK9I7SKQZsd5dHM2rQuXnjAEIlaIZkHl0mQgEh2y950Y1g4yWBysDWkGsER1Rq+acmzknOQuQ0+kmKaJiIhHNV4HVSoECGQyKgYzDCmmWUuQ8AoYsJRCiEmqZ7j0AMFyntulWm6jnfeWb06f9puGjo+M3v/b1d7/zey+fvH4i8vTpo6fXj16e/fpm1N/8jb8zpv7R41MUMVAEZ2jGhp6S9E/Pz+7fvvn0vZ/fyG68umZNM+9apWG1lPH4g0cPq5iR5cWXbm6vVxeXV/SOzeP2V1+/+4sPPvz1v/b1t7//7ve/+2ONRj6MfTX0MVLqC73+8q1A4WffeRpCE6g5qbsvv/ja9XtXD37w4Ksvv/z7//RfuT69csCf/uKnzmTeuxfal88ffUDZOFgcBRETCBQlACYGQ0doUDyQgoAIMwNgLtFXnggtK1tEBIVELuQ8QGDnagFMxZTAqgZ8vQFSYN92giJOByKoZiN01LDCsB423J1Qd+vy6kk7C6OlftBaTU5/enS1OpSqO3oVEIY4AOmQRh+qrg6DFs6hgWa8jBTdYXXkbrqLqyfOs4iUDCBWORrNihXJGnOWFSf91B25w/lMe6dCZgXQFCgPgmIqhciHKsRxOzFh8yAAmqeNJXN/9pNrX1UJUPNoAmiFVGaLllV+8q1vhW7x6KdLzGFFejy/vZi3jGpZfvtXvphjCd6FWksEFdGk5DBl+r1vf+TYvfaCqxoGleVqdXDj4PIiPXqyutW92Aqn7E5Pr+/cPlbalTOYmYiAhk8e2r/+F98/mSG5eH29RE39di0aDw+P+rFvm27mqouLy7s3T86vz7u6tWjL07O6a2Ek3wVxKbgq9uq9V9MkuZvP2dWCYx61BHZcqaR+1eexdIfts7nRdFMxAxPZT8dplwhAUFFVQ+IYC2hxoa5bzlrqzm36RMgJyErp19kKOobsUEuJvR7cqCSW7dCDKfC0r2Exlmx6595NyJsnDx9W9WybUlaNm3Xc9E1oDo/aIQ1V0xIQFtEsaipFSsmr9bZIAvKSy3Uc6ro6Olgs5o1IKaWUlBFMVD96/PTh6ZKR561rPRFAVrxabrmqiubjrvFqWdJsPjvfXAMxGCI6tdw09WHXxLI1JEMUGUCtbisFI8dEuElFtOg0KwKI48hEZdq+IQKAImIFY8oGlLNChUQsOQPAJG87dAhqBiJCgIntNC4jFSamYiSCZUTEYhmJquBRIIkdHp64ptGmu07ZgGez+ZxdQ1AAxJQAyigmWrlACp547PuqcuM4DquL9dnjCpjA182cXJVKZoAUo0hxVE8Fu2ZAMKmE5LmWImKKiM4FRSDHUjTnjQ++KDiqASmXxOTMStGspMyNak0cCUqRzAaS4YMPPrnenGd0lavK9fbw1klbd0UEBUlFcjIOFGiVtodNbWPfx9ULx0eF7PT0NEgseTyYzbZx9ASo5olJlQFU7Ozy9GARuroi0HnbUCkpLg2hqtucEiiAFgBH6IicJwJEZHQVrzcrIA++QsqlFNrPjKpuUTx34WAwzQLFIhLVre9RAnmduu+8IzZgI+EiqlAoCYqUK9kOiYPPYAriGsdVxahSinqL1vs5NK6Po7WL44qdXKGY1QRkNGz62UHrCVxJbhxnh7fefPlLx1BffPin31m+szg+Obl7L2gCInAL0VJykVgsJlYd+41B0ZItJgDIsVgqRQRUco7keNrdds6ZQag4bQcFiZsBDYUJRKrGKShgYQMtgmrAYCoUGOLgAAC1xAwB73z+1bvf/Fp3o/IEqV+nNLRHHr2jrCo0P7lbn9yG0KSi5FRLCcQqBFzz7ASqzhiJGUwJ0EBLEQY0ZgVIqThsu9z8/j/8N//un3zv9uJlL/rk3e/ffrG7fPpezpe5DPfIeH15VMXXv3QriY9FcHa0CsqLY/UzQ9KUWwYZx7Jax+V1CzWCWdpEMB1GT6zsoggiIZnbb8JOikPwPOkNRGxEKlaKqBZTITUmRORiBoaqgkZqNlGBTA2KGQEiqCkgBedA1EpWQ9BC0xBkIwfgiFVLP1ABLcg5ubocvsJ3v3EYTgyET3371h9tywrBXCka1TAVtURYOXAi4H0oWlQNVE2NpqYc1anwWABrpg6hBlyPueTiXBXqppo11WxOTefqQE4NlFVzHonMhuSHbYVhe3X1XBPZOVMmmi9NK/ydzjOtt58vq6cthX0U91kMaf+3zxAxu+jYZ3xJtmfD4DMJ59ly/rnT6JmraVohPncz7fNju7rxyZDxzLf02awaTEanvU3pM/6m3XNOm/I7cstnGEX797T3mzyzSAEYqIr4UHvQsd8ahMrz1aMHBaYbRd10wXvKNlxdrTyz+uq9jx/9u3/z//vpW38GWWaHRwfHN3733/9we3UFIsQusNNpO2/KTSFMwAYCNJhSKspEBhNbF7yjwGSm3vFEKgGAnf9iYgntDjoa7qCDe4cHaDFDmDq5YHo4ggFO/g1W+9qXv3bvc59fPT3FUOdSfGjYoZk+cypNKgwAEDnAaa2PoAr7OnYE2HGuJ5feXj00MFFRySCF2O8CcgCyi17Z3h21A+WITPQUnfg7CACIimSqaDpF0KY6p8l5pKYI/FzlBCAkBTUzQppsQLg3jT0LQ5r98pADMDOFqROWEUknccgEkHd5I5gesyNDG8jO0EMEgKpTmoyQePJpiyRQNSKkHe56sh3tRCaVZ8oVIhnINERNFQAny8x08ImJgIooIU18p+nnqmYAaoqKqgp7X+M0GsykaCJiAyVgJNpxnWkyK6E98/8RPdOGVRUAp0AdIiqASiFiIwAkJieSERGnhlrTyQWjpkWz4wqfwYlMiZyaFhFHDMhIJFZ2s0tDIHKuMgYKJJJd7Vbn+VvvfPx/9pXGSICEOzyPmlpRJiamqcfPDKaxISpqBRDMJqtZmT590YymRH6y1ewQ4DyVtqCqsZt8VbtaOlEtWohYTcCmj1kIWRHMimohJJ14YZMeTYQIVgAJJEcmP5nVjFBKRjMGFdmTj4gAgNnnHI0QeGfQyyUD6BTSA5oyhgBqCKRgjn2RolImvx2AiQh6T7uHKzJNB1MNJu4VoSOc8qrFtAASEiIYw47kDTANaiDnEKCU4pgn0XIqdINnUlHXeUQOtR9jyjmprGezUs1C0yzI+abuHLuiVjlnVhhdKZkJREXyZqqoJyT1XSBOwxZyZGWBwXFF6JxazgNb6bcfqbvEWKc0GAGYb3w95kjEhnUpqeTB+y5UXeOa2exok7Oqb3w1rpdpHMA2AkWpKoIyrgkMoBZFJbL26K/9pf/49uGt0/NP5t381v1bv/j00cnxIg7XHMLTp4+7usljOVgcNkjDeMVh+PVff+EP/+X350fN09O19zOJ9XYjbVedP3rkZwusEdHXLa6W68Xipdsvvh4hrVZLDzrCuycvzNYFf/HeeVPD+uKaodkmO7l9BGL9Jr6XHgxDZFcb5DGOdd2O/cjsCIqgzWah9jNUKebm7UHJad5BXTkViuN2dd2nlCgJpRI4MAhSCFW3ybHPq5s3b8XVxsoaLOWUksC8q9DhrJ1xmPdjevToQcc2n8+Ob8xiHMdVyholCwdnYIZ1hemQ3EYCiY7LlagqMRFq1iKSJbExgvMhmEpdufXYo1owmTtKOPLcPV0u7x6dNFWDGg1dylAKSlm37e0sJcXBt1VommGMl5/++ObLctjeTlatYwZOZNtcrPELxppCo1KyLgEFggEyZJU4okdiEgpAwXRMebDSl7gUid4R5t656FxlZIjiCLIkJKeSfOWccS42tQCCjABTTnQEiwimOYkMOW/AdY7QTAyEmQx30ZtiuO3z4ubixdfdJ+8/vXdYj95fPDz73qNVG/3Vk/Pri3e/9sUvv5ZOTtJZGX5x+97nP3mwNZdms8PVNisSoJUizgD41nZdScVHc/fo7FRJ7t5eLK+3Zhd/52/86j//dz/zRQ4O6j7GTz58eDTvhqFcP1lXDFd6tV1af0nDJg9qzlfNzeNLSS++cff9Tx+6WbWylczz/S/WZ1dXjzcXTvAnP3z3//Cf/xfdXy7v/Pxnulos6vDRRw8O79RDplsjXX78DvRFAVJWj0rkyGgXtEYCVLQC0/6HEYcgJRs7rCsBUCAMVYmZHRvVwgRNU1QyVr4+SKGx2ZGyi6LMDgCilTEPflFnCCbkwdcKcPWES7+Y3d8Gd/PmHF3qt1vMMmd37B0V55LDJmy2K98EH7gvKTRhzUgtFpHFfPbJo0/vvPLSwZ2DJtx88scPanColEUENGMAHyRHdVC3tUWKWbarsZ6fbM+uKmQKlDxOuXxjLoLe0ZC3xRXHAQpIFnTOupaaZt1vZxxydmmzPThoFIojVMmr5To1dVXVzunpo6eHd+8v2tnxyQlCZpza1fiD06tX75wgipQSmsZaV8acY/zivcX9mwfNTK+H9GfvXHbOK6Z+Qyn5Wy/j7Zfa1XarVPcpBhcmb2+/7BFtbOZ/9u4vQlrqOKAfBHS+WCyXS6xdSiMaoMmYe/B5mZcZ0gvHL370iwdV6ESs8X4cooAqpsWsMTIXuIIQgMqwbdtKmGZNdbm+nndtzCikz2KY05zAzIhZdNroMjOZkAbTpI+IDZm9T30U0c04AuWucynlTR8XVYglZs1MvjtoymqAgsM2zxeLTz85rZ0f42CiLjhyELgpoxzfOnagy6v00q2Xk66zDcg0xjKvwmp5mSWzGYjN6pnHAs5GSMM4lCTIfrWOHLDt2q5rZ01dOeyjkGgV/HoYL66Wn55esgumMJYoObVNyBlJYVaHqqkcSYoFgMdiTAzGomhAORsq3D45uOj7IlpMS1JBuC6b46P59WpN3k+amaLlXDxDTqkKft52cRy8c965lHIuhYmJnAA2dS2lFAGjumm7aFAHD0o5jz74zHZZNtlDSlI7szEiOI8zI6dgkofjg6OZq0oOGsJgKI6jyaxq5weLUEYulkSZyczKmAyUkUJoS4516wFAtVycnbO4rm0rV3WLG2qSx00aewJNWmp/AJACzbKOoGrEWRQgTQsNMislceVziZ6aqvIEPCllamBaIYdkPXOFmhUAYEiyRL8gRQMKTQNZYa0mA1W8icuq94SkKqGqk4iUkauCGYoaERy03cX5xbEDIu6vzmzwiHzebzIVdeIVGGHW1NtMQyxgEIshWFatQYe+5+RkEDE0g3GMZhYdOA6zg/lm24cQUKlqO2+wOL5dEcchO9QY++lb0LR1AqwDFy1CxYkjQspKnbAvSUo3966rAKFoClByTI2ZbQdnBjmyEIjpMCJZyQWrYpYAzIWqWZSbL3TrT1bDckNZT24c5tmN820f+7E1XHSHXIWmZRzlRnvy+v03joxks10E2j462z46++QHP2STJtRxKF0Xplabtq4lRgVENGLWkkPjyUhUa+cULbTsENJ2dN6FCkFgvbp27PthIC11U5Ooc0SkKRczRfK+JnYUunoQ1VJ8CDAmcFBqePkbX33tL/25TaAifek34rSZzT07NC/R6vqgObpjIagV7xLHkXJP4+gouGaBoREkYqdmVoSItCQmJHSiWVNutcMN/sF/83t/+N/+yY3D+2tO6+XbXV7mB+OXZv3D8mQV7HB+FBO+dPPgzo3Fsljo5u+dQV9Xs6Pb25ITGM5rF7xT5XhjAaRjzHEYLx/PxoEen26ut+M2O0cioALACAZaig9Mjh2iwWQTRAATUDDLqRAiG6kBGdTObdPophy8gCmKmhRBMHKMCFkLUUBVMDFVA546a4oVYDKzihqJyVeVOIiammNtD8udl6u6Q2bnGG/dqj5uN5eXBQ3VIFtBVe/ZEUmKJhnQOyIRIeQuuFIKEATvzNBM552fVRS0jJvBFEWdqys3n9G848N5VldSqgDIQ4WWS9ZSaiLtt6BqfPRMHdkze3aqiD2vC8MpELMPkE2OHdsnH+yXtJX9et8+g4rBvWTz2YzZTmZ6tlzfvcpOtNkDhvbvYHpi3Cs2tler4Bk2ZCfs7F9hr/s8T589+73prdrzUN30g31Ebffv9JjdD595bIkdSynmsGguwwZwRuQXTdfNa9EU++1qO7Rt3R1055urf/k//Ivf+/3vps0WPeVYPn36QMuHDojJIbEVkT18h3YeKdSi+80aAAAVQecmBr/37GkishMiMjERIk5qCJga7laDExHFyO141dN6lR0boplOR9gRk3dFtQ5hEeqXbx6//vo98cW1XsGSsSGBkZUCnqcJwWRd2Z0CBARGIMWyIxib7YwqWqYMHO7yX4gGJoIGIDbRghVtJwES7WJtSACKMJGkQc0mUWuH0t7RswVM9/IYykRDMkNAFQFU2wVJAdEmnWE6gVOECXbes+f48737bT9yiQhUVads2z7Ptss/IgBMSCBAZGdWJkMQ7o1dE+3JxFQSIDAzERqQik4KztQYoxPae/pC7dvfEJHAAZrI5PUwJCDi/z9Xf/prS5am92HvsNaKaU9nvFPmzaysubqr2NUT2S02RZG0IIqEIMmwKREWCHgSbH0wYMCApy/+DwTY8ABYtgDBhG1ItGXZlLqbbDZ7Lnb1VFVdQ8558+Ydznz2EBFreN/XH2Kfe7N1cXEzz7Bjx45YsXesZz3P7wEAE0UExcn9PzGZYLJfmSg7jwgimdlNw52MRJOawNTVBaQKWoSdmwjKhKxGpqKgpoZoRKRFDZAmxxkRE5vJpAaxc6BTIlIAJvT1HWxputTUDNBNoVczK8JMBqgqouLYg8lkAJKkSMRM5hANHGHw3jfV+vK26/gSFChoESBQVFOYOtVUJq1nH9ADNeecGQESmIABkYN9b5sSsxQBw2mkE9LU/4Vod2eeVIuCIU6XnRGRSvFMiCyKnl1KAyHIpOEh73HUBjKlNT3ZXqFGBGR2OLm+AA1QVJkdgk2L8SaTUmmGwry/lJhDsawWfajQVA206PQSpuNPEwfdgNmZKRFNDbygZqZmyMSmyuwA1WyqmbM71xggTPFJmwKPU7bUDB17MzE0ginDx9ObpOoeuf1aKlIzMuk3WgpIEqWYSwIWdgToNUsuAOgVVQx0TGY8agbQlLWkkRAZKhNl7ppZN26uoJJdHJ1VhMHyzlQUMpDu0oUWc6HKcYLFs5EvaOw9QnHoc9xiVQBUY985cpDzeKvxNqdNHG4nPxYDGGlFIQ3Q1LNh4L/zS//ez37tK589/+FszgdHi12W1fHq5P6qjP12uyF2YlLPFkmqunbbclaJ/P5v/4EJonHbHh6sDi7PzwUUWCOpWkZzF9vtvPUu1HFMoT06uP+lF0/fu9iWqtke4uC62cnDr93cngnebhRmJ6dnm17izpXUD97Ituuz+XxOFUuMVV0ThBRHV7emNI5r0VhQLY5YjLVCrFpXd1WNzFvEqgm57HzFjLDZbtKQg6sO5ifbvnhqb6/XZdhVFCiEFCWLprEAOfazNx99pV9ffPjJk7Asi2V30M4k5WEcLXIBG8ZsSn/rl37Fz5b/5Dd/6+UwZu83SXKfWFKKY/A+peKZJZeYB0Adh1gkP5p13/ram08+/vDe/XvDdoxNrr3zLpjRMBYgRnYxRlMpWco2U8BQ1+Bg/fLj5lCw6gaqvS5YqXatKfhARUZwKQ4vkJxzM+drMCHz5CaARwXmCCt2nRKgRHaeq5ZCjSySb9FjkgLkkMg5AOdUMwKRihmYoSkZsFkex63EXiUNfcKwoHoHBuC8iSEacXhFabncbOfNsgboTo/HeCO5yeAwdFfD1eMvHS67dtZ94eT45L1/8ePqYINePvj43dA0X/7ql3/4/suYjJ3WhLLdqVLVhV2vt5t823sNrZmcXY2pwFDk93783umDN7b9NThhyfeOj65fXM9nize/8NZ7H51dPN3GkX/vj7/7i3/5nU8+uVYtf/rhT771U1/sr4blfJk1p5xWD7tf+ptv/fr//U+efZK62eL8Qv6/v/brj988OLvtJTdAWMbjLr/1l05On//xbzOA91iKsPclFwICU0ICKykOVdtlsUzmQqMlYDVTg11Ks4MFE+2GzIv7Y+m9Ry5lc3Pbtlg7De2jXeahCtI0qeRqNlfJOY/ONz7XApRKaZhc2pZ+7V1Mw/rq8ifVyRcMXByZq4cHq1N47/dsd7uqDwwtglATRsspJdcE8X5gPv36g4tPzhBodX/VPjrYYTHv5weHsBkJsXbOUMAsx2gmjJj7QXIzGFvA8XLddJVKSSrO14xcYkL2IdRmlhUTTKtgAEAArt+m1kPFo+yylDp4jrsBPYkQmI6qIwnJcLO9PVl1W+qRtpdl7MQCKSNu1+PDe4dmhR018zblUnlar3fb5H/qZ+4Nm9txG7djeXIT//q33nl0kn7w3ss331pYOxIZk9xbzURh6FPVOGao5h7N/sWffvwf/of/x599eNRUuuvH04MDJb++vZl3Kz/vLi7OyE01ybDZXT9YnH7w4w9TsnB8cP/e6YunH/jKsVnlK2KftJhhHarbzU3gmaQSgkenwQMCZBEEfEUqmu4jnXcl7RkA00rSHoZAhKiIlGLKScC4X29J7PTByXf+5PtUqkCQ4+5g1VrG601PDp3z6Nxsteq6FuBK1UoqA4zzZoHChugqqH2+fHHd1nXWbUqjmIHydb95++133vvJ99ExOe9dAMAYcy45m3z68qVR651TKw9OjmeLGRtbzv0mppiJISd7/uLyxcVtUWxqH+OYso0KGVDUApdZ9uh9WzfIlkUtj4vQFEMDidEq75JZG+pVU91eb4yp7moxKojrIbIPY8kiGR0xUzF2wZUsrWPQ7BGhiBlqLlUVUhYECOxEAIq1VdsXRAAHJiWhsfceKrqIVzssJVkVAqhOy0FY0FMJzrp5F9DdZm3a2cjcdW3BElrfOCQdEXHIZSjFOQad7kQLspqWmGNTVyXls08/1V1etF1dtRMkJuVccmJHqND6Tvar1gpQkEwUalebqRG1oe7jznlmBhAEFe8qU2E00wJmjlwuO3HZceWgziUjAmIwIyZKY1QyIhdC2F5fixGgt5t1UjxYLKd6KSYmNRUh5HE3Mrg3T986/+yzF/Hmp771xY/efZrAXacdhvLGyenm7DxpStgI45iiQ8oRJSVnWFkwcSVj6jOgNU3jjEQEFJVgsylczXzdgvZp6MNstqOYQ02LSi/Xdld7oyU7gd1NGsbsKkeiKeVYFNTQcV076jzXNXpiqxBK8YMOQ9VQLklLDstZXBt5p6oihAWRueuqmGNLgW+Y+w63ktPt0eOT9Wx59Pjk+tmZy1zGVAxn2JQ4HB7ef2v5jivD1fpsuWjnrUPh87OzcbfLlj1jMB2vNj64nEseS9VWRYsR55TBQMCaeSNgqgKKVhQMFR1wGGIU8gqcBIKvoKrrpkn9ELOIq5aHnatrc4Dks1kAk6GvrKyf3fi2eetbX733U18tdYA0VsYq7Kjz5FAQMDSz1ncLrOqSo8MEucdhY/2gSd2ipcVcCTF4JDKRKVNjRdH5lEUThzxzl+HX/k//z5/80Q8fvP1gE281rb/yc4fO+fX1xVtfffyT9927l7cXfV5+4f7po7Yf1hhCVBo9rQmK5uZg3vfbUAVgRHLYdEABNajI7I0vpNtL1z11T5/i05eQjXU/rWGywChaUMBVlZqVXJx3qmZFTcGTK2pi4B2bCoDOW8bamXd5l9JGGNz+hlzBEBz6Cd2VpSCCqDIxEKEhEzrHAbGI5jEWwOxlfgz33gndKSYopmwRWP2irfpKpJCYFikGROgYFNG8ZwRVMTet/4p5Zq4ZgFJRNPLsGZyaEzM1aNq6WsysrsCFqGoSWTWVQqKkCUU1iQskUmzcXa43r/WdyTxiug/9fE76ufv5nUnoTojBO7/Oa0MOvlJ17HOP3m8O75ScO97QXX5jn6AB/PwvwesvYf/DV7LU5BlCnEQE0zux527HXmkA+HqHYNqige2h2nvNaM+pvRMGPqc17anMr21Nk/rADJIPFosqIDE4z30fb293OSUCpK59erP97nd+71f/6a+uL26IPRSLfZGYffDOOyQoRSaT3QTkUQNmnFZuEFHBEICJJkJwUSUCh9iw845STgBkCkxYsS9aphOnOLWfv7JP4Z20dlcEBTDpLKriJrqewayq33xw/IXTUydjvLmol8uOYXHv8HIDz3a3bbv0zGYChIhkBlPPKkwmJgA0IwCdDF/EdxYtJGIwA96rG9PvqiqbogogGxgBT/jo/RTXDND2x2QaDZ9LAaoWoDszienepbifAEz5w+maQyBmQNNCRgb7hrW9FGNTIzjvxT98Nexe3RGpwd7urZaJPN2NrjtJadoBQJVJaJjsYMB73RNwojUVRio5IzEzy75gM0+HaPK2AJJKQXTTdP3OQET7XQVFZERfSjIDILhL+REAELs9PJ7Rpn14JaiJCAKxuysFm9YCvVkBMwNhdiLFgJDITIhZTUQUkO+8YDYxmdXEpqAhMxFMWD8AneRkndBvE3l7fwVNomSZKGl3MiWxc1J0eg5gMoAiU7DXZlXdgiVQEihjqT11VZNSCn7/Qk0tlTLRlyff3HRdTsRvk2xozA6RSk7MDoxUDYmInZQiqsxTRwcSsRlMWqpjN7kVRYpOktJ0iaAgWS5xMt1MirhNq7UqTIzMsNcbZQKKF1EEZMZSonPBbGqVsEn71QlmxDwNrUlgMxXAPZbIwIAZdJ+AncaZAqhkK4rskJ3KVKunosW5ygSRyGjK96lqESkh+InNAgYqYnee0yk5B2B3lXeTtZBKLpOQZjiV+tmrq2AvFXlPaNzvxiwQvAPRYVhv1mfd4rSuQAp3s1CKig5JlIkJPKIhgiNWYgIlywyQh62xit304wVaRExQqJR1yjdGUgVOpoCBiDSoJFXBtjkhsr6/DWyFwTW5WF+4qxhTvzbLu+H8dnMe886YVS1mBQaF4KxCV4se/0//g//NYtx857d/643HD4Flcz3s0mZ2NN8Ou/5mXdd1O3OBGCm+/9F791aHo2aNtnl2Xrd0sb4dBp214cX1RTjyxIaV+I6y4ZgKgS7DrPa+bEdyzcHqoeXtMK43l+/V85M44ri5/cZXv7QpLqpcZJkdtiLj1cv1bN50h1XT+O1mdGRFLJehaWqJJaqqCaJU9Yy5y3EXx15G0ZYqtnHoCWmzHaq6DtUi5qGrkciQqOTSzcP1+TnPDtpucX19EzwhmETLRYyhoPhQNYv2a1/96adXz3/nd/7Jg/nBg+N79XLRVr4mAxk2m+H85ff5mt48LNUmXikNfRQ008LovasQnEOnqDFGs5xSUpXL7ebZ5ebx2+8Mu/WDe/PdcL1Y3m/qDs2ylOBDCCHHAZi5a5Gq63495tLUASTjeNt6XnYHiZhEDTJDsDKUfFnARPqKDtjNCGsjZG8GmcAhVqZK3PrK5+y4rUUjNx1Sp6pMJobkEJAIawAyS2YmJgYimhmD83Xd1eOud74pI5aiDDhV/wIKZSByE9LrFbhr0XQ/87Wf/7V/8lsPjttf+Ff+5T/4g/faZmGGN3qBdb0Z872HD59c7TaAl6WctHOm0mD6+uOTD39ys+x4HZOaqYnm4uI45GF+sIplyKWMai7kXCBJPSDnfEMYh1E2cTgMq8UxV07/5Cc/KjzrliuJm7iN58+3m00PLAfsZSvxJvZ5XB4tCKonn2xvrn+8BsHKL4/oq7/01d/457/63/9f/28/eXF188N1Q1ANcPP83e3m3ZAVakpmgQSBKwe5AGJwdRVapzGNVGXgIXBoDgE6a9qcx3Y2i7tLuXwauJLANj/Mgct6tzyZM22DnEPayEZxdhDY18yUxjLuGIV5jlw7YAexUgPY7foX3PrMicertqxcd89k1l+aXvxwdv2sEkt0nSX65sDQJEssqZrPBmNq2Bu0XfXyk/MvfuPrtPQ36wRmJ28/fvHuxyZUpLARACNbFgH2jQsGevTw4CZcRwFDTFFNhFlIzbKhg6yFvIMMZKBZCigAe9NKd3/p/uHJovv0ov/oMiartZATQFfY5UHNh6qp/TbZ7uz25I3q5oPv/+ofvPF3f+EXZiEwUb6J0AoiobFoAdNPn505fPjkbLNY9WUYa3JvPTiavTe+9/EnJ/VJ23QGZsbvf3R2/3hWSi4DnF3sVqt2taoARLPAzjdjayDnZ+daNg0v6sYHUoppSMODo1lw49m29KM9eutLzz59sjw4tHXxfnazuSlFCODw6DgOowqaORAoEj07JiayaHK13S59s931IiWKHszC5+6nFYCREHVaZ9jfGRsAmKKZijjvwLTqWil5fbV77+OzKiyQXZG8ux2O54ebtOUq5JxSzJVTNiP2dQ0o1sxCQ7UaFtGYxtXDw9mye/KjDxtfHx2snp0NPhCYLVenjx6/+Z3v/nOD0DZV3VRFhJDGmK62w7qQC6yWjk9mq1XtA1uBOErwPpZ0c7tZ78arm3VWK0Cay7QabA4TmqsI2d0MQ8ljE+qu9et+BELHbsiJjNQCIGfrg/qj5er68gIsIPJopah16D2QI2mb0A8RGcUwC4bgQ/CgAojB+yEmcyxI0cSJ1pXvd2NXud04hNmBiHJwSKSKwtyXTSQl8E6zSckqCJSKWCme6XixBMYIKL6xtqVZ3RcLwDMOXfAVgRYlwMB+VFWVkguzZyKznllT2nzyybPclxrdNNFyjsZ+m8YCaEaCaAjqnBMxFSByxAiQRZMKEFdiQCCIDKaMXDSKmKqG4AEgZWNCESGopAiqOfQIXEBMQUXJCBDM0/zo8GZ3G3MG5twPis67atY2JWd2zjsHAirqmDbrbQjLx+98+faD9y9eRtWZUU6jBC3rywuNgupuJJYiIFBSwgzVvPMWUq9pK8vFgpC3/VZQXE1d1YDjMF99enXZzaoeSmgrk1yx3aSdb31zPI+7qwD1dBVsb9aUIA9aIrrAWQbRwpVDJN9wO2+wIm5qA7NClYYlIy0peBdW7c3l5uWLTbyJaK4UENRs6hpPhavlcVvNaAjrs7NSwCOdPz13R7k5PVrdv3f57rMOAMZk/fCVh9/6xuNvdVCVMsxc54Ims2SlbZrUZ4qIKtnKvbffDLMFMaGRScklpRTnoakWiyylmdWSy7jZgkFdBWPCunENhxTHzTBcbw5nM1+39axtq2pzeTXE0Vf16mBuVe1mNVAoSTyz3O4gDd3s0eEX3j76+qMRRXN0ObOhCzNGB94BMGGo5wcWqEByaEFz3F56MVVgP+NqJkjEAcmbGiJZSSDGriparFgTW3gRf/0/+n9cvPvJL/7Cz35w8SmZO1098EcArTs6nr04vz6cnX5ref97n744y7uNuS/dO728umm6+aYURuUEUnLtneWsAnXbIgUxB8F7akUyQ2isKerLeoR+gJI9V0m0lBJqRhRQQ8XaV9w4QyOwAfuSpBQjMkL2IYhkYF0dzbvVrFrOnz9/cZluZZyue0xFkBhJSQoR+RDIoAiI2bSWbMgIqCoIZKWQd1RRqWEAbATYcxWq4aaMN2aZqwBCPCSpK05iJpYkIRgS8NR8PqE42Iip6lwIXhRU0AlL0V0uZua9A+fQV+hdLIJb0ZQdKKqWKCyx5CRGpURjKKC53E3Hp/n5q2DOa2PO52rC72w/e5TGnvuz1yT2UbDXYg7caSx3ysWr57mz8txpM58Lee0NTXdunldGps9FyxDujC2vtoeIn8sS7ed3r01Nd04huLMtvcq+vQIe38GzX2tTe6Fpb226U9Agq7LDdt5gHshRP+y0z2DgUMHLx0+efPfPvv8v/uj715+dIzChkyhmwkTOezNDNRBkgIlqvFfL1GBv/UYRAUM1LEXUlIgRsPZU+ykMJsF7BBJTQMhazGxPwyH0yFmmSBQUlelATcRlEUVCMSMAZqap7ghx1QWfd2l31R2s6lk9W4a6hvH2k/uHX3TdwYv1FqR1DlUzonPMagp3Bqi7w4WENAGnEclIwdhU9yiq6diZmIloQguAHgwIQNFA4Q6iTNN5p4nlNwFlpqk1KtjU7gST3GMGaKg4oZdx0gtpygQRTSoVk5vCWXthUBHuPBR7Z4bdIexfy5wAgIT7u6GcEyPf/QqpZQAiZtWp+MNMxPiOZg0morAv5VMmxjszyxRat6mc627Mqk2Y431tGSIiM0wvcPoSUESJhIgBCAkNBExLiY69TsRoN1lChBANSXWKaDkwIMBihSZ/mYkjBAORAmCIOrlzCB2AgQmaEgcVAWRC0Cm9POGfyKOZigooEU7yJcHkPTJiNr07xWIGhmRMZAZaCjEhopnktANAJg+ohM4AVAWQAHBedQ5RGZwjj+jJPCXRbNKi4JSnA0VCklLUlME78kpFVYnYjHIpxAYM7DyoMbqixUwRhZAUjNiVFG2SlgHM0PFEfsjEBDBdd2QK05IGISsYGSNZkWI4SaPA7KYCPESWUmwS7ESIeBKVmDzBq/drkyLsmKbeMcACKGIIQuSQHKgykQqATj4yYmI1c+TVioGxc6pmBloMDESFmYlI1ZgmpoM6plIKGjh2YKAqoOZ9XUSI/XR1EpqJIgET51IQxVSQ+a7rFswEiJDAQP+CVMSgZlBxNQ4ZnBVJt+vP2tlisz7Dpa+bOqURJOeyLQYFsQpL5qAKaglMYrwkG0oaYrwBG3LZmpZcsmoMfEAYvFsoqUjvwLmqLZrL0LM1FXV1ODWQsDiAcqNjymUY9daKE1alYbfbbOImgqqrg68x9SEpkIhQ7dtNCv/W3/i3m/T842c/eedrpy9vxy88elT6XbNqjH1dLwYaN7uxarjf3nCFX3nzzevnL9oGZovlcHG4Wvqbi6tZ27kqQy256OX51nkHY2xcc35z5paH27QN7Wy5OBxSKcaZYJDhkxfvPTJcLR53rv6tX/2/ffUrP/3g3pdWh0f+AF+eXc9XXHIxhqhczevKuxRzqElAxHSPtReJw1ZtWzvPrtTtDCiv4zj2vXPBGK6vLzbj1geHRavKqQ6aCkYKgXbD+jZv1GBcJ7XM2FZVl7VXLWmXQJGdO+hWf/ev/7tPn/zo8ub8o3f/9OHhyaLt8m5YLpcp3mo/ao62vdXoeWs5c93MmUNM0bFDwqwipmMfiUHULnbDnzx5FmZfDRmHq42vq91mh4KLecfERZPEojl7X3kOFKpGZqJFRikZbsfLQ9Sj4+Pno6qrADTpzjMjqYh21T3CTjQUQ1QihJwGJXbOm5EaIHlHLYHHaoneAzSOEFCKTTTrgFipIlkw6EWzoQIxIiJQcEErAZuRzkbonUNmYmTHFRo4X01eyLueDjiZVS8+/t7f+3v/yub2w6alr3z5/nd+93vf/MbPQIZ4g0Pfv3j5gmrKY7Z589M//7OffvT0j//0e/dWh2XY1K7b0Gy9uTxo24mq184rDBqHYRMjhqaZdSGB3mx18MN6U2KiamYZL56v52Toy+ZiXQJhbQ/fPF0ES7fXbFxi/tZX3vr446tlc7hYZjIhcxvb/ZVf+Vu/+o+/d/PZddro+Yd//s0j/x//L/8n7vLh7LyasWuAbPSAhh57LcQMVrBEKUz1vdOvfdO1+d3v/V6pl9c65/r06I3Tm4snjJlrXUt0rgNUdKVq2lR1H4/xeD5rFxCfv1xUqb+9MbZ2+VgXDwU0rl9A21XtDKCgqyQWGfPcl3F3YXodqjZUBw2Qdyoay+4KwB6p1C9+sCjIGAwRuMrIZmk77No6qGgxOT5Ynb1/fvzGPXp8WC1rbqiL3hilbpxBLuadyzGioalUdZNyGbIQerRqNTt9dvmiL9EjkGdRI2JBYQZmIAdWAI3IMYNLWcnRYbf46Pmz9c793M9948N/+gGwZ0ZQmrq/G+dsG3OKgtDWle12c1f/1nf+P1cXH/2P/q1/8Gd/fv708vl/869/Mw2FdIpS27JbeSQftoGXy9NVGpJn+sajw9b5PuZZ69+6Nz+/vnl8/7BpGQyV9eh4XlekmqFkUHp08PCNQ+rqIsK1P0xpGDe7JjSzqlnvbn0c8xCJrGvD4Wx20VQQtFnSxfqTWdV4rpDcMGoaEkCpF3U7C7vdNmBdYaUmCEpZl83qxc1TMMtjZOo+d1tEeZrjqokIOTcxPVFpDykAQZS681NsVFQur2/quhaU4HjRHW76666px5QqXxlACBzjEKK7//Dk+vxiXi+HkiXmzvuYZDGrAMowjl11nCwjWA1YUnrr4cNnl2fJODQVBg7BOceth88+uri5XudUGm8PD+YHi9msbra70QQVaNuPV5vt1eV1yqpmzhOZOsbQtCllNROzkhUkO6IidHVzU1eBid20BMTMzqlqysW7OmtuZ42vvCoBIwg5wDGnsZSurVFZnAgSqqhOXEtynsaYY8kGYmYOnSML3hEjEtRtlWIOwVWNH0RVjRF2aTNwb+ycoYkoomkBdOxAlY66Q0Bex7FezJu6UVFTZPILX82ZWKSYSgFiHnLKVlQBQaWIFbWyRcJnHzy5eXFbi+uaOjgIrQfAlIuIGrtAjaQeiVUzWgZgJM6aCJ0nnzSZYREwc45cn28qbBwH76phGA1QNGcwNY9EDoOpgZJ3Xc63qpG8RwTkidqgpM1qcXhxdY2ohNYPuzWzFa1DENBsCUkNwISqutpu1ij29Te/ePXycjNEYJj7RlBEkAXRyIUwjOvFfO4Ebi/6dtltt+MytM1B24MkJpw1Qyxm1C2Wo2SY82x+UnKOuyJAvqm2V1eLZTf0CZFDN/etuxNMKUohz1B0l3p0Co3nygGIDy4QskCFPhXQEfwwzp07Opw/enRSHR9+9tnL7fn7N33RAVXMNYCOsDAmkq3e2paiCSZAdQzj7e6kbel8SJoW6NsQ6uDmGH72p37x7ftvyWbjcA7HIpj6YZ2LzI/j0WmPWxmHvHhjdXT/fkq4PD1I693Zk+dV4Pv3T6GuFIBKzLvd7vomVK6dz5cnqwjGdZeG67pEV67zdZ4vT5pl1yzn480NYXVwsDx4dMqe+kLVvJMii0XNUbY7olDd/0tvw6rRYJzVgyuW0Zh9AEVCZq5804A3s4ga2YqMvUevw86E3bzDpiPv98YG0JzKRDIVkdKXECt40v+z/+Q/l11++PXH13K2OAmGC7cIVuOQU9Dw4Hgxbs/T9dVXVqv+fHhxG986ma0Ojm5342Fdr/vS76zMccIjBWQqLVOrlgmINSP7AcqQ2SwErs2Lc+RD7dEVZkMyzaQQOBCAKTAZQq6MQAa0gmDsWE1r710NpwezatbxzO/mbuNsVGOc1vmNwDwxikw9kgbqmEFynm7oRQRBYlZBZAZQp8yuymXUArSLw2W+fZZvn8QUXUnFeawqBgBiy2XSO6CqAqgW0HrVNG1tok3rA0LaJUIMPkjEFMfJCFqFylfVxO1gFYmZipY4iiZPTlVMxDP3YyymOUUAejVJfiXQTIvVr6NXr4SVCcd/V1i296zsS+vttf6C+9ATfs6Z9Grjex8Svl7I/5yD6c5Pgp/78Suq9D7r9jpHdjfXB9h3VL2yPNErDer1N+1Ontq3i+2fCV5rBWhgeFeYBq8icvuXs0feeCTQomqSxjjEXDJ7v9nt/vQ7/+IHf/5nH372WT9EFSJAMbUi+31ThX1dPcBU+GUTrMfwrjJuKvUiRMeci6qB8w4BycwBOkIzUFVmEhE2JEImBDAgNgBELiXzJGxMRoa7M6iqjmhfC7e314Fj7hq/qr2fSnIDm+k4REZft02JN12oGshjHsGcY0IueyS2TaQZuxsPe9loz4GaWtEm/YgIgfZqzXTk92anu7ExrerAK1lwmrAq2utzs7f/TGDsfR6McEI470/9nYY6WXL24+jOfzS96FcZxv0/0/mcBubnBvjEUxNFJOeCTiQhU0SdNLE9JXyvM5HuLU1iBvuEGYCqILKaAiiAGdx9IDp/N/gBVAFossYoAKgCmmM/LWbvqRFEU/EZEZnKXtdkp2BEtHfETd+f0M5gCJMOpMX2/fEAQEB7PA27PbRpOn4mqmogiPsaOwPNpSCac16RYf8qABlBCQBEbc/2nrQinLrVZH8zOZ0+NcP9BW6mE8EHgchxyQnd3hwEoqVkJqxd2CZT46qpkW5EBjMlcghcSkZERmRmIAQRNRUpMHWZGYCpc95A0YCJi02VgEZ3JW4EoFom2pWqMTMxZikIJjpZuhDQqQogMDmVTC6oliKZkYgQ9plKVM2KEzke4RVujXBKfaJObZlY8kDsFWA/8InUiqLt+VYqe2KUTsw2YyI2EBUgBwBiYiqmSs7v5U/mkkYEmIhLSDCJlWZQRIqKY6da1IiYAXV/yvakJAZAJGcmxkhMaoCTzW3acxNiNkAt5VVhwP7GKOWIGuJubNxCVZh9FbzkTRrPcwha+5jZNI7j7eS5wpmEqstJAbKkTepfWr7O/W2RXjWqFg5BsyEDoxMiE5AcQjgKnpUQ89pBcm6xnL1VdfcMZZQkI4vG7XDLlMEupFyLWFRRHQk11D71g8Xo2ZViznfrDf6bf/vvf+2tx7t0dnrv9OGbD323vbk5G3fDyb03urp++dnHJcfV/F7qd7nPaqyci/Yz9Gnzsql1u7tEs4PZ4unZZUL15G83Qzvr0vp6OYdl04rkEfLV+KRUyYcHy/v3Xjy7NC6+aq+un89DS6sHX/rFv/ajP/zuj/7oz+PQf/uXf/ne/XfeOD7c5GSNrNcDGqlKW3dV2+7WNwG1xLHynCOWIsM4Vq1DpiGPwQUXqnnlVaSaHSwBNuvbytcqMQ5JVB2hRjXgXeodN4j76sOsyXIRKRN0EIFKGmLMJbjlyf2jN9958+vfunzy2eXZi+efPU/yyafPrrmpb3Ypg2UzZL+Yzbq2KVCaUGeVlNJ2GHJJCObIccA45vfPLi9u/+Rf+6Vvr6qOoRja1fV1CPWscoiWJJdYDCuhZJrrboFa2SjbfnAVy+YWnn3/ZPXN5N8GF3ZRhVAKOqqr6kCsJvSAFvtbtU2Jt9O7rw8LdnUWAXKVnymagQN1zCQ2EjFxjeAJvEkBQ1BEM+ZgCKhJrQAwe19Bq2mheYtYmHygmefGEJE9aFRQ5/dXwYtnL+bOeboBLfFGTg8P/nt//99+8iS/+dU3z977Tka4dzx/9vLl1792urt68Sd/+Ie73h0fv1V2N4eNnJxWF7r47OmWCa6yIOhuF09ns3k3KxEE2VntQ+h1xFE6pg1a3TUMHIfy6Av3F03aZKu6FQzxaz/18E+/+0eU+OymmJfmdn103K6vNvP7IZD/7OOb1ZL/0sOv/sN3/0kYuU+jlHjUUb5Eein3qmWLbry+ZFeFxXzdXzfMSa2uqYzR2gP/5k9/fPkkPftQFRJVzdEjzZxuLsJwhSXP54yzeVa3bJZxLSneYDscH9+TcVMFnt87GK4/BHKi5mSUi/fMjTjukj3E6kRAPbGRuYqIdlnOsGRfH/QFFofvJLUsIffXjw9u8JN3F6O2oUWiVAo3FVY4DNvgtZ13xVXONYOwq49C2xZW3zlRcQiOUUM1a09uy04Q1aEHBs1crDJQMwR68f6PTvLpQWDqwno7iGTnUFVcg81pp7tUdiOhEjEQmSh7J2AR8OD40Yuriz/60bN7x+3T296oIqsNWGKuM9WNi6KCIzBtruKt7b7xzS9eXJ39v37/n/3x9z760r1H4+6bzbR+wI4xtQv4L37vd/+Nv/IvZetLNlMcx/Frb88R8eomHh7WL653R/NORB3Rbitd8OpFVVKMvmpqH/4X/7P/1cHJLOVrY1e37TqeBRciCLqxmdkbD05++OOPywDetH9+kbdDzUCmR8uuaeY3l1vneRi2bV0XJfYkGguaB0l5BDTGqvVNKsWYncfDg2UuGV7/wX1pBQDt7y0mLAG86rawrBrN1d6shJpODua+cs+eXzbzmfOad2WzHp13UabVM8vJbdZSQE01SWxDSGbe+W28OTxYXr+4AAhIKikBYlbd7Ppv/MwXf/juezFa13Dst3y8MtPnz6+uL/rtbXZK9w+7BycrRcgiDpkcjsN4sR4uN32M4pAduZxKYJxXAYkDAzP1ucRUEAA99VkKZDXsGpIsXDnQackNBY3JxTjUTXN6dHx2fklcWdFXZa2pTCuUaEXZtAlVIFIpGTBnYSLnnBo49G1NQ4ykbj7vDLCqayBIUoQQUI0oFklFHRSLGRSorr3zOY1Zx6PDQ0DNjKFdGroJf8nArfcz58NUdUGszormMfUp58pVYOgZcxpUdTOML19cN6VZNr5rvFkKTte7XsEyKVOTSmQgBApUZSvAxC6AIgMgGCO0dbtNPbvakB0vwICIYxbvG5GigoG9IOYSPXlVSJrRSKkYIk/dkwYMkLM4ckeHb6Ss1zcbL5aQ1siVb0KoCKxPfVvNikpWBUEv1OOwqsPp/ePQVc9enu2GgRp3ML+3lfVm18+8LE/nkIqv64PmWD2hcvIautY1HDewXBxsz25ReJx7aKteZFQoDOGgQUQ3FrxUkBKCc76OTT2WOF0AQy7mKG/7LMoV1/O6OCdMRI5M8zAics5bU98hdVQddeGgC8fVDEt7I7L0m2tK6PPyFA7f8KGG61u5uSn9DmJBVPEdAenq0M1cOOjmGboaKxcYd+moXvzsL/zl43tvUGWz5rByYfPyktifPHq46ccDLzOV9//og9Wj2b2vvZWSLeoOwfKYFg9O6pPjdrUsWnY3t1UVSj96X88OZtXhoZ+1jLhb9113tH3xVPpy8uCN5uSkajyQJOdWByfdwTIsOmAMPCcQbiyALyR87GaPj6DzAhnGTWB2zD5U5IIhK5jzlW8XSoikmHOwnLdXmKOmAoDczKFbgK/N0MQQVVMCMVc3ue91TE0fyqe3v/uf/pe7y93Jl99c542aLLrOQDQUI8wZGB2gA2hm3eq4I6rwe58+eXndfvWd+/nqDFSPZmGbR4K5ogJ7QMrjgF5QM4qqyG4/48G6CmqYM2cT8MCdb7oGqmosguDIgvUJTYOnnAYD33Ibd7sYBwZWFSEjcNtk3oXN7dC17ax1NpRhMEBkdDo5IQiRXNGsWggUTQMRMWkRNBAxU2VHLAZDlhe5aNaZT72O12V9gZudNLPGkIqojphT2a/PAylgNvPejo/C/DiwY+SG2XmzXLnSGxS3HbIBMnJhLuQAwCPIMJQ8EJgDSGWwnI04ZzElZZJYwBEj5VJefQ7gnTh0lw3bCzR3+bBX2TR7lfWyV5LK3kIC8Fqv+Yu/+HnzxquYGLxqU8M9qHcPcHltJUL8/AP3IbFJqHoVHHoNlXn9LHcIGNp3usFdIO4VGgb2GtXePmT2utvt7lDsZYFJvTJVRlBRM9lsy0FToa+unz3/1V/99T/5wz8crrc5qSICsqnJFGUioL159y7OoncKlYGATrkhuHvxUy5JVItI8N4xl1Iqx8GRKhCR98xE028y0zTDn2a0WcU7ty/kUpkOiRoqKBICKJgRADpngI64rcOjk6OTeVPGMZWSYlrOZ4I0FB3Xo2qs58t7bfXR1QX604IUDGECPAMiTCtJuldl9rmtO33GAAAIiYjv0j2IdwIR3aF/phOH9CppNh2VV/YwfTVocCLLTyoHAQHsb1HuWEWwj95M4fnPqVfTaLnT/l7936tzejeG9fXdkO1VyWnnCZ1ImnJhqjbphsR7BYvQTQg/ZkZQM0EgMFSxKYVnUKb1aWRnOg1rgclCckclRwA1nUrdp5VvVZ0ccYivCvhIJRNPRh0FNTBV0alHHkyn1ykqAMTkVQtOoTXdP2rqjEdkE1EjZgIQRCN0IgIMxGyT/g1WsoIpECCRWSHkyfclUyPy3kxI+xIuRCK684AJIBKhFjEhNWHn0QhNpSiC0yzIiIRGyIzBU83csSvF6rYiT+gcEIpEBxURETqwUkpkF5A9gEoRInLEE3iKiHIRNUFHagBTOZ0ZEZUiYOCIDUlNiVnNVAsCTgk+RKeSkfYRSFUgcirFACa+kqkSk5RkNrXyAdBerAQVUwEz06L7rjcFNeIAAEwkogioRRAJ5E7vNjBVA2B2dz5HAaS9Lw8MVIkY2CFQLtE5pyXTvgVPpzHo2BlM48W88xMfCicXOVgpEdERuclpyQQpRyJvRZHYIavJpGbu8egT81tfC6b7SfK2T7O2a1czkZN2fjoMt0XW/XjLN08XXa25Sxqcd2PMwZMhSBqVawQ/jn0aB80JTYpmICKuar8oigq54DDorYBJCF31JkmqXNjlrWCZTHDOtZ58Ak/sQweGMiuQy3ron9etM/PsWp1WRgxmbbNOURNhngkd/JVv/PJf/fqXP/jwh21Ly8PjP//R+1236NrucLlCHwTFMZfRLdv5Td7FFAO3UHluDmJ/sz1/Vmu6vbpmqmfLw4urJ1A5x1T5Jhclx1bSbH7w7PoJV5h97fOspFuYVavFar3pm245Xg03F5919ypa1kePlxefbrdj/1v//Dfy7h8TwPLo4Fs/800N8/tvvT1KGUsuOy19pEBWTDFLKS5UnavJIzAH3/SbdVU33jMzDTeXaOwdpRjzEFXEd8GHardZK2I3P7xZXyM6JETIqooFTGnyoVWhAjByKDmLmlmSLMuDw9OHD7/89W9dXpwdPzz7+PnLZ9uzTUmiRbXcDtfPrs/nTbh/dKhAztWkua18Ni25+EDzxvtCZ+vtb//o3b/9ra/NwDzAvGtvrq5y23lXnAPnSErmZsYIedh5XzsOi2WbTZBcf/6s07qrmqt8IG6hRUEdExUp7CXHSCQEuyIRwQzULIn0mhDZG6YsmbhyriJXAwmZZwIAnMo1lUTLoDogGoFXDEBTuDZ4ggLkeEvO5bT1GtXGVGbEgcg5Qgdstp8kL09XDVVPP+mhO/3lX/nWr/3673/z7WNM6//oH//Dv/bT3zi/Ot9e86KuK8rYdNshxbRbHVQm7qtf+rk66NkfP33r+Hidhk+e38wXs7rhlFQlNsEPUfrNRhVykdjHpq0O5st56693mzCfadpe72Thu6P58uXNkx/++XdnJ928mX/6gxcFq/NdmXfVZjOUJ7vK+eWsWh7I/+7/8H/O4s2EjEtZ/tTjbz19cb6sg16PMatTQ819f+0IK2BMkbMRzpZv/ezvr18I3rKvtPNh/ijWj+crylcfuK7Ju9326hnNH4jIKGOO2Rzq0FM1hBx96ddXz2W4MXHN4QORRDgOKdarQ6FDokpSLJhQjc1K6ckzURvmb5i6Ud1WnHeHp3YNP/r9+3nmXEhqMY0ZgQwxZ4FSiISqXrA+mlkXhBnqarg8z6s2tHXV4rgbNOd2UV1d3USswdUgbFYkixK6qoKCy7or14NQ4iLLWeWaJiz80MuuL7FPwTkKlQ47AARCieocOqJhl17GPK8WL55sm0U4WjU3Qv3V6NA5wFLKGOMIsYceENA59dV7T67aan4sl2/+1GF/FX/zvU/+tW++nUvZrnfvP//0G1/+6je++M2bzbZtGQh98KKmkJj94cqjSQh+uv0oYn/+2cWXTo+PjpyW7H3N5P/9//n/fnl/tcvXtejucpf77exgce/Bw3d//JOurrxvn/XDWghC9ej0dLx+WVXtcrXcbTdd016tb51D77Gbrba7XTYAExVfQdt67lMyEUCVCNdDYQ4FIqC5/UfBHm3gPJuimN4tyE03fwAwfYRx1dRXLzawBUnqQ3tzcckDOWhU7OB0hla6kgvAGCUPIzOQuabqNmlAH9SgiJlBzmne1YvKPd9mV7fNwQHINhZRtEfvPE6WIejsuKscc1ErQ5bqettf70ZAevPh8vhooYC5mEPIYqkfd/14cbHexqECnDeNV5AkpAACjth7RqZSlBwj71dy10MsptlKHfyibUgVTNjvOdCOg5q1TQdykaOgKoBWVRMHkSksTjgLFaRUMZrkkoiDI0IDq6u2H/piRUVIAdnqqs6bGHe5O2wGKRpchBjTVrVwQFOZ7gNzFITYep1Xzbxrb253TVga++DqpgoAPA++Q3VWSgEkz0yxjKIpSwFQ1KJFdmNC0DzkH7/77grree1njU8p1ZXrey2JgK2unJozRc8h2SA2IpEBqAIZMblBeyIacg+k2e5W2MHQBJGMA2IAsCKZvAauWZ1gYSK1AUk8hSw7VXRUgaF3HlRR+d7BG7F/P5aC3vXjbjMEIgve+eDF8pRtVytG1TAWXA+1qxazDvR4vCra8qgW0dqTJbE1lZNRGGD+YLFOpen8ZrMj7Y+awwo6IeseHvbbdCtltVjRZuSxoCGDMYCiHj148OLqvK7CkJVDtb3dTlcBGXBA8ixA6J2wE2bIBQFQihXxbEGprQMhVF11cO+4rgmxTREAKsMl0/P7x+Xnfn759jdXs/vhJx9c/fEflk8/YyuYTRzZbB5WB1XIzko4evDV2nVfuv+Id3HZNCePHxlRVTkin438alHVdc/kuOsg3nz4SdsuDt+4lyH4tqrqcPbZUyY+euut4j1VnK+3AckxCmJzcDg7OcV5l0vJw1hXdev5skC9OFmc3KsOl5K3u5u1mi3v3w+zuZg5HxAcGXqypqqeXz5d3b9XncySqS/mcG4iJaUQWhcq0eSCp6o1z1aSSWTNEgcoAkJS1DVzmh1A2yridLOuJZsCuNDvku6kGpBeDL/7D/8zybZ4azXA6AI6avNk8idnqvNZpQqaig+1kMRxe9C2X3701pPPntez6s37x3K90X5be7+Nu1gjGSM5ZkNCIp+Bhrwb0yiq/Xqo0uDqYElUAIN3s7YgMqMjD+jZAk3YU8+Yg7oK+h76GELtcY9y65NUBjCM3LAJG5BI2SNMwdBMS7YJngrgOZgpERZVUAGwXAp7B0xEVFJmoHyDPftrT8QgidFVy3vzdB1rh/1mJOUq+BjHipiRiqAQuoX/wteW9+7NDg4XY4E4wuZiszbY9CIKQxwVwAXv6oYrb86ZqTMDk5JTXySOowHtJEspBtx4C96PkiTlV603nxeH9v+Z4smIr1Jad8zrCQw8IXB0yn5NkZXX0a79LP/zMs+dkvBajblLh+2ffnpG2+s8d/oNvDIT3e3hf23DdmcRQfi8ngV3zqD9V68QPp8zQL3KtN3tz+ce9MrORPs0lO1dBKQlxcuzszMYf/L+u7/5m795eb1BUTQADzbFem0ya+3zbpOOwkQqwkRg03deaVKoJgjgnDNVUUO0pnJgQKoe0TMF50RlUpEQlRkdESISU5EyvT5G2CNuwNC5KSPjCNUQcZ+FQiRgJiDv+M37x4u2porn7bKMcnN145mXx4cGRswMUNa3zocDtpebtW8Xoea7Q/Na6dNJpNh/lwAm18pec7G7mwncjyIi8khMxHviEXx+xNA0HCbLy51qs7d02f6kq4pO6t+UukK8q/Ga7FMmYDxlae4sZa/O8p1qBMZEd59vd+f8cwMUp9aqPaK4AIKYoCESTUB3A2DiUophNgNEUjMilCLARMwqAgiEJIaisj8iNCXu0ABkqjKcCEdwl6ODqYHHzITYKyghIaJIQeSpqAsRtdjkYt5vdmplI0RjVd3n2tRUy3TU9e4aUhUimsam2eS6ntQ32h89KYgExNMS2jSuiNimllxg2xtp0LSYqSIjk6kqmJmaCjFPVyQxmRkh3wW1CjGZyKQTihhOLw+cA0Ylz24YdnXgXmAoeV7VMtGbnSGiKk3ndgr2qQoxKJhIcUQT09gICQgBZCKSG+31bQAk1DS1trFJ8Z6LFEAzBDVAEOecFhFVZCImy8Vo0t0M9nQjACIzVSnADpEASdUcUS6jr1oizlmR91B2s8nMN2ltRkSyj7i6nKNzFdwx1gEnzxcRk0jxoco5GiiBm6hSiMDs1RRsMs2ZgZoqwERQKkSOnVeDIuqZTQQQkBHNwLIqEHsDcM6llJCEiIgJQJEZBbUUYsSpBw0AXrOKKKRYEN3J6SPEuXdLoFHKdcrj1cWnDD4094mbWX2MBL7ypoCKOe3SuEWJcRwJknClCsEvjL3m3nBUTSI7JgowOntShSaWmuoayXtrMAnoBvEohCbGsVgtVnu/AI3YdFmGGPuSB1D2WCH5bT/0OdTd6uaz9V/+mZ//d/7uf+OzJ+/du3+wbMOQYb6Yty2nnGzIGKMiUE7Hy9Unz3/Szru6mdehjhJd4BKVmNi51s+SQhzEo2Pnxj6mmMmbd6XPxsMStNE0usCWrQ0h3gxUzb07Xq9HZtYxXr98ujp5aPXhRxffu3+vefJ0jW1IKX742fPvPXuO0R6eHj68//D04cNHb395TiHGsjxYxGGjYlAyGBRyIIqqHkRSD4VdCCiYco69iCozsefNdky+QKFSoutOS74AVSRoutYk5xQBAExlyiagFCkmAEaSRfJoiCKjIoeu+fLXv/zg8aNvf1ufvHj+4YcfPT+/HEou4i43+Xp71jHOFnPv61iKZlGFUUpJqWmqVdc9fXnxu9//4F//K9/O/a3FUlft04vrZdcczILlDAza9xzqig1zSqbIXlUBymx2nK5fVFwdHHxlU9fRWBF9IMibFM8A87Sg56smk6W4ExGk4r2p9pp7VWDXsyMBYKoRApMBkYiKZrTMUJhMJAGaWjIwR55QVQoDEXt2wcSbDZpvmWdsJjkXBNUitl9J9t3B3/o7f2fz9Ox7P3j+X/y//yhK+We/9l85HP7H/87PP37wpe/92fIP/vA75mXodFWTDbvTA/c3/8aXbm/G//wf/cFouWybQ+oC28m9ezfrDJBiHJuKikloQs6ae615jm3wla43V1k345i/+M4XYbz98ZPPAGqwLVN12DRdVzcz98aD+WU/liJJJYIc1Qf9OrXLAyrbL3958fTyyc5g4bubm/j9f/qT5dgRsMaMhmAW82iBKLPmWAuNVqTz7738cXRGHti3yIFcGG6vKqxgvEbSMqpYM6/ut3V3++mP7j360vrihZa8bLoCmxI35lsXVuQX2K7s9gdAW6znsLpng0NJDgARXAhlcz2W6xT7+ezI1a0qEVMV9cCtu/P3T8bWFRtyslntmiaXKBrHXTIG55pdssjMjh68fXiVEgXsugYFmAN2VT8mCjZbuXZhYxRFLJQziHeUzQik9uQIREsIXkZRnwR5vV231Qxukg3a80CND8ezuIlVIG9csrJDR1XMGpKZVqG49cUGDmaZcjS0pgbhhMQUZgDO2GPY5bJdbxOnD370/WXbHTb3f+8Pv/O3v/UOOvSB3nrznR+8+3Ij8mMd/41vf73fjYZgYD44MKE9PFBLwapyJeev3D9wQDlFFQ21/+gn48sPz06WoWgCGOeetre73uwFPK+arp3Pdtv1s5fX05rDy/PPjuaEUTbb2902k1S1C3XthmEsuwQFq8594dHp2acfrsJKQJ1nBa3bboi3dTXvhzHGXdFy2O4DaExUVE1puo8wESS3v+FGMENDUhEDcFUgwiLy7MXN0eHBi+eXzsL5zbaduWLS1DBm3Wk+PjzeDlde3bjZatBuMR9yJGIwRtD5PADIZn3tTVH7VEpRQ9WqqS3g5W7bLZYHTc1SwNLt1WfPnj0vaCdHq4dvPvCeSYFVCCiO8ezsJuYy9DsTYcdoICI+OB8CVSRmJWUz8N4DWtFiCqbqPWWAi91YJx1hc7ScgWnOuWrcMMa68qpah7aeza77QcBAdJfSNA0posmsmBIzIrFnQnTOpTQAUhJh77djbGtPzENJMykETsSFdqWNFxJgs5QcooIJaJTUhDaAMWjteFE3m34Xuhn6ip0ncoIcQrXq2roIAoghABfJkrOJBgtAyA4lbZUVK372wdN6yEeHizoQYqkr34WD8+2NqKAlRgGIJqLsAVy25MkBAiqWMpKvvEMCp4ZElRmKZu9ZSyIgDmHMmX0r1kw3lsgQ45aBmBskUC2oxugd16ZaNAMxI+dcHFUPHn7h42cfAntTuNpukKjTWhmzSgjeIca4g4CBQkkpA0sfOfDRvdNqhuurnfhwuOzi5ma4HoYb40A6k1KhXzShFBGBom3dFCuKTohYNW7UIjF4Ilcku4Zt8hBhURVuZ5i0qvasIhwtbaOlHMgBabxco2OnaEXNex/abNG8oSbXdmdoWOLDdr5ctTJuZ6ddPaOTt1dfehu+9O1VdbqEUB4+ttvLdHnV96POF3UTGEp2A827xeHJ42/80t964/R4Gery8tw2W2TyIXRNnccEgV0zUw5tVUO09Ozj4ToePnx89OhwJ0hKF58+qZtZe3DsmyZpHzc3NTkEffn0U1d33elpWB1YYNmsV/NGt/3506dQXHf/fntyuBm3NRIBzVbHrp2pD1XTmahj8+QI9Xq7PXr7C/VRuyuZrHiDwF4V1TkiFgX0je9WQgw6kEbSKMMW00gF41YMWloecTUHJQNgZomZXEAO/ZBsl9tNHj/87A9+9bfLNs3fvN87IIddMyuixoaGCgiSx93aJvEh9ihCxmXURd0czA4/fXFzcLBcHBzOU7nabOf5AOuql2ysxTF6NElFYyqbErf97Tqu47gdZxWpJ3beqsq6GoE0FYsDAirXAE7RCygE9lZjKqFuUq85Jy1S1IZoerFdSqbEebsrgIUcOiuipuYQRQHYmZhnJmaTpGDsncrUX8MKVlQN2bcEyjLi+VMde2hWuFz4shMp3O8GMCRm01IkAZBOiFkCQDLElJA5zLpZCy7PPEtdYry62cQ4qCOQQiTKYEiIHMfooeSU8liSWgaf8lSEiohY0DwraQmVQ/cqhvk5C8/UQLWfxk/TfCMgA92Tg/YGmH2I6/MzfnhlJHrlC3r94zuJ4bUkBPhKMwIzuGvFwmkOt3+WaZN2t5t3s/q7mNg+i7J/rn1O7XX6zeBzewD2apuvdwBeyzqv6MX4eRED7/IsZgqE5KvZ4dGwvu1H2lxtIBcwUNFXtaEOCXCfExFVRtx7AvY+l2mLALY3Gk22IFAroqJKhORoqiqvvQueRQQBiIgYK89ayjSJFZuo53tbASCaipgCmGNGQCAsAoST0ucACJAqz48fnKzmbYiyHbAAAQAASURBVPChbgMo1hW54Deb266tq5kTsOAdUybSR92yhqOnyYygGBMigcLeJAUEqKhgk+KnrxJ8Zjr9vQNLwV7pACPc03imP/twn8Ee5UN7yWnS03A/J4ZJujDVfUBw73zbz+LNRMWARDErTHIIqqlOTh4V28eg9mN8r269GvCfG1X7vdKi8toxiIZ6d/mIiJkI2b4+646vDhP2+NXYVBXTSUS+i60pE97pn2iEaKRWcDJqIZgJABKiEk3lhtMDmTwAqWYTETCeDErEqnsz0T5wSs5AwaTICAg8iRTgzExE8ZXSaQL74cxAhrhvSb7L8xmYEqIxAUzpeAQERgIgVIWJvQRA7EQm0jabKSKQc1MmTUtBBCImoiJJTNDMQAGNJz1H1QgQrfLeM5uWumt2l71nN0RhV6GRQy6ooIqgKAYMNL01GBGTqiGAI4cK0w5oycxuOhpT8IrJAVjJyTnH7MlwIkOpqptIQGrBVUUyMSCpGZhJiiO7yvapOQYDJhYVKZmJmL2oAZhzDlDRjMhPyThGNFWVAoAUPAGb7ovbEME7VClg5pzfvwHuU4E8iaTT50XOEQkJ3B7iNRXkEZjssWXFRBXAjJkVFAHR2ERUgZiQcKquU5nq7NhUCdhAS85EbDbhorKZiggYeO/3PH2ivyAVSRbPGmpPhL454DKiQT/ibtzdys6HakHOeS8FmAA9gtqw26Y0ECbCsa4gZ6t957nRTIBUNXOjtsCsaDYwsR2HRtVrTiqqaYSSBdLN7qNRd+i6XBJSAB2YCvoQk0sF1v1uHIcKDsBQdFuUhzXsrnb33Okvfe1rz56918w5KV5c7LIg1Lhyh+vLSw82P5qNCLFX2e2WB6vUD3WgXHbXm5squFrl6Li9fHGuHu8vV+9/8n41rwytmVWb/toBL9pwdXmLsDicnW7yRS/x+vzT+dtvY5mlbfTEuYxhubQimpPcXLz58PFHb3zlvQ+/3yczTLs0RgYC0srWtxc/fP6k/QGcrmaVDx89uzw4XX3p4aO/9iv/auxHk9EyKLARm2UOYVgPSC7l1B0ddT4M0UqBIuKqaoxZU/aVP7t9rgbesYANKUERVXDBScnIKCoCEFPyHNq26vttMfUuEFMaUxpHiwMWaZi/cHr85uGqz+N773384bOzmzGvx1TItte7OmQkqhnbbpZidMGlVEAVs73/9NPvvTv/2a8/xhi9s+Wy2Q1juo6LyjvR9e5iOT+u520zm4+jcFVZKSYl9WASNs/erfPY1bbTh1Yg5w3quN08810XmofOHztX4XhLGFSyFlVXiFQJpAwas7Eiz5iOvK/VZCotJDA1EUkAqqbACooMnrliaoRyJnDNgkoLcZ2HrenLmrz6RtGTq8EshP1ngs/8/p++/8GfvfvkZnt7O7z95uJv/tVvf/TRe0cHKxlvfud3/8vZ8VE7n93s+jj6lAiS/2e/8957n55nvzr29DO/+O3f+Z1/8tWvPJzNF9+5fVo3QSURKhGQSdruTlYPZcDr6/XRlx6th544zA/54vLTvFkLAdW2zTf//t/7N3/zt/75MORPz18Og2u9LybjuP3mzzz+5P2LXnzduz/5nQ+/9ctfmC1HxNK/PxzYapYDZ9lsrjtfIaCYsmIphiI8CddN1bdu6yzMD9AQ8hC6Q+MZxTKO/XJ5n6mNsKrbxRDHsr4IJmNPiFXtHG62AcbdzRXUX6DDN52hS2dxc+Xrujp8XGQVvEreEhgbUS4OJSs2/qgJ8z72EBqQsipp9fyT2a0gOgi1YMns5k0XN9EjJrVmPh8zjyDN8bI6aJTKrGYGDUx5LKEgBgqhRkmJbLFqb15uyTwhBx8M1DlGB9NgqbiaNe219UacouXRikgbqiTFirpgs8ZhzCbZQH1VGWXlEkfFsRBaUhPHlGy1Wo5D2qCIpsNqZkUbYE57J2rJ2tb24t33T7795ag3pw/fWve34ya98eDer//2H91fLv6lb771x++fD+PovFMrzk23bsQODHHWelW4uB3mlV+s6mefnd9v5qo4XKb/y//1/3d6eJ/x7Ob6vGlhMW9v+ltnXRpBAT/64KOudrPQXG9HkPjw7ab0u8DUBu+6IFmJiQx4ci47ani8OHvGyjkXUDTLjpmQnac8jszkgvPgm6qZrgJVJXZ4h5aYPh33GXAAm6qbAQEwVFVVI2pedbVr3LbC4LDFxbaPlrKrfCCc1ZXSiM6RMABr3hiFrl3kfgTAIY/Hs9nZ5ZrIty3MF+3TT9fOwjik2Ww1jElVT+8dt46hFC2l9JpHOV3O33n7gXekU8Mu6LDbXV3e5pKGnAC1ctTWgcC8c+qAiSZrGChUbrJ3ITuPRAoFRUANTXIahq2sNc+6DowcEfO0QIp13bB3UgqhMqGpeMfBOciy7gdIuXXOo4XKmZnkQoCGJiKq0rUdkjmHVKAJToFqwliBoAkaxOyBTSTJaI4wOAGxArWbPzz40rPNJ+prTyFUta+akq31ddvWWBTUtEgVGpEc49iG5ZDXFcOYeiBCUBONL/v+cvfo6F7jzVDYu4BhmzZZMhk714iOntA8IQgUC+Q9c8oDIrf1LOaBCSrnxyiIrkg0zcaeoFIAVHQczABNHWqW6F1ApMBtLIXJIbDDkHMxNAY2VCDHgKQO0MjVR/PDs81GEWrvh37nCXxxasx7pKkpFWUYY7RGl6uWEedR83WppBZvWnS3WR80J0MZRKl27U63kkpdtbJN28vt0WpWVz6aLdpKo4pqNgPPTTW77s/ROd9UOUb2JiVyiR5pPp9NV0G/GUBAi6pkVyGaEgKFAAogJeYItemg/TjqMGY2MjXJpVjbuNuX17i9vX9U/8zPn5488lex1Cy6GfvrsRRDR1XlG5NVE+YjHobZX/7aLxytDhfs9OZS+y0zo3pQztHSLlNhN6+IOCDfXHx28+nT48ePF48eZRpKP2rR2cHB/OS0AKTUSxy8Ul01L88v2bdH9x+Hgy4ZSD84MSa4vr4ah2F+dG95/9Fm2Hng3A+qfHh6PxZxwSMIMc7m1bjdxRxDu2hOVsK5EnIQZBgzunqxLClqiVw5CJUwgxVIW5KIZixmYiKCVFE19+0CAIHZMRVRdGxseduX820duf/zj7/3G7+/uZXu4XEiJEIyFmVV9IFzP2oSInOK4FFKXq4W4khKdEPV58F48exWPv7wxTe++vheO4sxfXb2vKVHiViQzEzAJJdhsy03Wxhu9eycx4jG4J1b1MG5LVosVqMGTZCGse+pXnWzuRiokRiWcaxEtAiixVxSFjEj5rhNN+PoPKU+TdFcNCOCMrUBqRGgiiUVBSOwouBwqjMHMUXGuvFSEzmTTbRMDlx/UXIv6TynZDn3AQH3NTgmRczIRIoKKLOnsh51E1yZ4RgXi1XSeqOpDAI5pH5oQiOSQlVFApFiWXO/yyVFGXMRVQPPTGTIuRiAohRMalPFG91ZTO0vqD1wh3IBmD4dJtMI2SugxWv55vXXew7LnTw08YHuzB0IYIRkEzXmdQZtem57Ldngq43bnWQzza3/wrPtH2z7/X6FEwK7k6LutKF9gm5vYbK7B97the0lorvX9FrgmmbRd6LY9L3J/gYudLOD9pf/5Xt/+Du/cfHijIhlrxnso2rT9s3MMU9a1mR/3780tWkUTdumV74YU0IMzoGZZ6odg6lD8m4/nzRABvLOARi5fYiJyZmU6dApEuwZRYBIRQozgoGpMTOTA4KT1exw1YXKszEjAwERzQ/mbg2X5y8XqrPDo9yvQxNmyypBWrGuDUUA+bXXZzo1uhduFEz2jmScrDfwOvJlRrxH17z27ex1wFcy1quE2n606J4JMzVrTdaZ6UghTCxlBLSpQF5FREUAs6I3ZAMCoOlW5k4YmshWRPuRbfhq/ABMLjm485AY6IQeg1fK59RJBmoyKZj74BghFilT4mlSEg3IxID2OCAwQGZTvRuUk6JWmPykAyAQIKoIMe3FUSIQhelCQbKJRy4CiPvGKxNENBAi0gklDoBIomITaGxCMyGoykS5hr3tDic60l6FMCMkNdNJ0Vahu043I9TpnnC6WuyO4TUZkvb1uHgnc4ip7JHh+zEwuZlMpYgWRCbvVGwPblJTU1BUJE8usKcsbFp51smMqboXbREnQzd7r1oQvYkYACgQo5bCHERlyiKqGuJUdU2MWEpBdFMgzgABUfYps6lKj4hIVNTAQIugiiKzARrxJEASEiGnktkFwOkJiJBQCyJNjHBQNQQjMjOVMr1rEaJIYSYAUJMpE2mqpsKTAAcw0dsJKUshRDFgdqAmkj3XYFBKYSZybKaKho61qO3H3t37mBqoomMkBlIAKyJmSrT3lzFyVjFSJABBACRikTz5P3V/lZKB2aRUAcArqQiJ2CGQrncvVnUjqCE46l1TNyXfbrfnOQ8pX6IG73zMjYiwZ2cSx17zLuUdMwTnVSKSM2P2LZBHY2NG5+uAbJKG0VdjKT0KevaGYxovxrxRKUwMiMFXpaiAczRHTZWrzZQULm9faq7NHcRtaoD+wX/w79XBzPqu7Q6Oj88+eb5sF1e7202/Xh4GZ+YCM/sx08dPPnn7wRfm8+58/dx5OFjO89b662fY3JCVshnyrD6H9bx9Q/tbNKwh9GMvZVmFcHl79aitUcXGvmx0/fKTr375lz94/pxtRGk1mnOcxujBqK1/5W/+9R++/+ONxBSHBLKJUjOHxg2pVLN6JDNnVFI/715e5U8uPvjB+//xkvyyCgerxTtf+Eq3XDoHBHq0Oia2q/XteLNOCuBcKaYK2LXTtMhVTcwRDVNOQExgkiIYoGNkAKO+7+uqrkMbY4op5lJUbSrnkzgyg4ICE5hJjkQ4C/5rb7/16M0HH728/PD5i00c+iEO/c6z34HsYkQV8hUzj3EEU1X7ve/9cLbovnK6TGOaNR6ge3l9MxZdNhXTYugTO1PnqqY1FEZfcraYvK+NmXY35cm/OHn4i2N1KGNOKTPWwR+SP/DVAdOUOqzjeBuqQK5K4y2oOg4mZjl7Klg2hNmoBvIEKoaEWBBFjNBLEiLHrmUKhgDEHlQIPAdhziVq2eb+hbgafEs6Ywqa9h8JP/dX3/nuP/7B5cW6O5p96aff/ht/9ae//+v/6K3Hjz78/g9+/NHZ6sHD2/VmLGm9zXmMbRO2Uc+vtn2uqYbDB6sxXRSMVy825zeXDeQlttnR+mZjoX7w6Hjm/Ths1OW/+be+/dH5i/lhp33fNvbFLx2/9/3N8aod0HiE737/Tz69ffKXfuqXf/iDF97o6uaqcSFrif24qAKBe/nhJ/+D/86//nt/9p2v/9TxwN27n360KoG2uVZnpEIgJTsdnFFHNTKY0U711oVtgCsrpk3ZbCszAuzLS+tCvTza3G4cKy8fi1zg7jn3l5DMqoA5l02Em5thvKjeeHNsVkC5XHzg8mUTjqV6U3ROY68sOt4yA5KPaSP9yxBCW62MGvY+5bTqCn78k3DeL0I7xDgWNUayUcaRNalAwKAScobi8ejBsTto2WAcx9lq4Q/8+cvrOZyYgfMeud4Oz9p5rS+vTAAQHXrnYChDMIcGpUCRAZxFb6Y0lghMdeNjn1Mfm3lN6IZtBrHaEZgQFxcoJ6mYMCETO64kR9xIu8wI0nhOxSqQMY3RiIEkinUkWm43edaFl88vF41a0j//8M/+6W+8vzi8X82PHs/yjPijpz/+y49/BU0YiYBKEWSAaVnCjIGocGKmDI8enSBED6QOHx2fPn36fuV287pdLeqj1b2b9c6H5vbm9mB1QC7UPlxdDejg69+8v1ro0/d7HFN70FkpiLYtAw6chth1VRV4GcLl1VC72nnKCECcRVN/6zkiW85EAHXVgL7iU0yzg8ngfff5iLq3JQMWEzQAxRITg5+WrW8vb4OvUUtdN5ttmbXzccz9LgKI6Hi0PD5/ee6xmoeuoqoUTVm5WE3VvaPTP/7BH5FDQld1s9vbjUth0Szni8WLm0skQPLgvKKvl45ns+OD+PCwa0JIOYbgctHNdne73uzG0XveXo1atK58yTk49qFab7ddFTwxMoEhIKc0SikueFUxUUkyVaWY6jYOVtT5uq6couVUkpgPHisnGVWtaStU0QLIbsxlTIUUXO2zKKs5xZxyXVVuokew63dJcq6akHMOVVU05yzNwWGzam7jrqhJHBtfoxQKnNmzkhRxHEKYn/XniWtPLSECclGrm+5wdUgoZJYluVBBRZCE1BlIhL6h0FVhu7kWlZL1g3c/OWkPK++NUxUCmgfCm/EWoPFckiQAcOTFihapuEq6MbHGk2kqxsoIaLvUI1VmI7A69tO0RkWRyyjqfe2bOo4Ds5sglgpGICWbkgN0iEFUnKsm+36WRORELYs+evggvYSLqwuJagpbNPa+813qY9d2wN6QsiplGa13nOeLeeN9QF8kn6y6EbfH97768uOXLNUYY4opz6jk3FpgI4w57UbvOGkRw1lbxSEDMXCmIFWp+pg8GFStPzjdDv2y9hxL3A7TRRCH6NCrTGvYxMSmVpIVMu+BZ0xUihZNlncDBbrKSbaz/naouybf3g7jSDjcvsRZaKumXs3Ds53/5NnasDpowozwOLSPjpZ1Xr3zhb/6xS/9wo5Cvrl1Y89A3awVAS06jIMD8lVddfNcct7cpuv1G1//djWfRx22610u1nVLrrx5RsgQUwgMKucXZ8MQT954KxyurEKIY3DIRtcXZ+fPn5+8+c7q3oOI6iuurN5sNvOD0yiI5J2v4pC8D2V0SLWrq/rwOJcRcu8ZyRyCJ+9MCzK6qqVmpsQgPZYNSiZElJQlq5Y0ZsO6mS2MSAE8eTDTkoig3PTlcju/Gm9++OTH//yPx+gP3njU+4JkbTcHCKiQIWnODoy7ytXklwf1QWekaoIG/dU1XgXern3dKoSPzz798fuffeGtew+OF7dPnmyfnS9O7ue2HWJUTZYkxBQvLq3fyE0ORH2/k0ChqUO7UjAOgcYxbnaQIgqU2I+UQNW3M8HgJWsc0zCKlnHMApBF2CwglyjKQOYAwbEgAoGRGhhk0VwiMwmiiTgwYlRTE7nD9JK4wG01fzDvn19uX/aSDAWK+R6sZCHCTJJNfO1D5YgoJ1W1IkoGJuZbv+6TC857XzLmVHCE/raU3lQJHPk21F2DWbPImBOa9HEA1Imd6hAKQSyJQ+2ZMZcC5ILntqub5pXsMqku+FrruVNS8JUPaO+1mbAq+8wX0rS4DXsVafpE2c+89zTXvyDX7GWpV6oAvHIX7aft0xz1VQvb5ywg/zWdCO4kp7t5/52PyF5tcRIFXifNXuGMcR8Bgbsg2Ovtmu6JOaavfFB2J08YEBkaoqKihwdvv3F1/hImq830cTkpM2BTJnHi0/Dd08rEItkTf8HxnhiSiyDSZMQAs+C4a2rTwsjBO0LOpQhgIGQymrwcxLYPEQsQOEJVJUDnHABKKYZWkTMAQ1AFJgeqh6vl/Xsr1VyEnXOipW1aUyHUbt7Om7DdrLU7qKsua9oNGTGZfPJmc/86uwGcIAkoIfIEXjKbdBa8ExRtn9fZG4vgjiekKtPgmirGJsi2TdaH6TFmAIb06vzA5zw7ZkYGarYXOdBs76WySeZRMwUVMzVVwymABoiosJc87uhIakhmZoh7JrQB3nU/TQHGO9WK1GSaQSPSnbkNAaaEl5opTKdSAUAQeYIBKQoiTFR72LtFFIn2+gIigNtrU7a35NDejrTXO8DQROiuvd7MEPSV8Wqvx1mZ7E5mRpPgiwBgRDSRjlTNjKYnQgBCFi0wvRC9G+ciQI5oj9XCSeWC6Rfu9E1gs8lJo8wIQKICqmqiyFMdKhhOgO+9NXKPRgIDc7422WeyVISBgZAcaTZE9D4wu242s3Letu76TFpygIRuby4kpEkkBADRMvGqTVVhStIhTOAjZilGAFIUiYTUDIiMmWMWnt4dVNjXhJzzgJqVHCGoZGZngMwBzJDZDJlYJasoMBIxmE5cBFAVtMkROI39feUeh1ISo2cmNVVVVFBVQ3XsbbqJoilNyGAiJSN6QlLVV0IkE4oions90gwnsBEh7WOteyMeAohIAiR2rCoEvH+T2icK2YRQTGkCVCEzZ5XJJTdRqxw7URCdjIzARHInmN5hraNUTKkk4pubmw8Du8Q1gwZyudjtzXkIVzldV/UMqSJ2MZbZYl55Z4iuqpyrvPOumiOFOsxFDJDRNQEQsxKRagGJRGDO4niLwCqWUhlTDy4ayiixqrssm5wNtCnJAKjztWvYQBdlJWMzJKaR/of/4O93teziMGu6fsDNR2dkEHfbrMWpJ1/V3o193sRdYH90fH9n+bPnL46Xs1JSTrHhythEQonDV95553vPftiezuqapYShH+qmNoP1zbUQUddcjzcm+XA5Vxm349mLq4/nx8fx+iaLxM2uapqMGUXl5mLu3b/79//b/8k/+k/jJtXBbS5LBi2SAUEJOLin24hii1lbV5z68TnEMytyu6Gbc37yAWSbO7do28rXTUOHxweP7j88mB9A0cO5L0X77U3ju+12t9uszTE7N257ZORmApvCuOnBEbFXpWFIrvLIdLsZUJWY17uRCHKMwGgAcUiOOAQuRRmBpLSAj2az1VuPR5Gzy8uz2+3ZbqtGV7GwoaXo0CRnAGDnC/Ovfe9d/oWfuc9A42627E5W85zHMUUAWbTNbptERgRGpz6wOqi4k1RKBiOLZ5+x/NH87W9dlpCpMneM4bBqGhBz3hOZiIUm5JwAq6qtTLPzJGkwk6xJy41BY2TOUQHwXJMxmAOvJScgQHLsPBIzs5RcJJuoipmhiBiOqH1FHiATmkixu2T+D9779OmLi3uHR/+t/+6/+v6Pv/eHv/Ub64uhqneDl/OoadytGtcu/cUmhXnHlb7zxaP3vvskQl3MFWfUbd7+xhu3z3IvyF23jnj/wZvdCng5vzr7BEvFtR3NVt3czr9/Zm7mfFCAbR8O7j/85MWLo6Pl7Wa8WF8/fuutq/7J22/d72/6Tbk9PVpBqTXay+v+xcXw+NG9/+q7P5Tu9INPrs4+PX+MByHWustmJXinYMCBkDRlEVCgbDAeLC5WYcRIyizDrEMw3zbN2G/UwSYWwBpEifvgdylfsBbnO/S1FkFOhDnUhzx/s6HGyw34DZXY3fv2s61i2VBloLkO3mkxEWTLDrvmcIg4kAt+uerfaz77yeJGPVCfUjJAT9BW3nG6PY9FV/MVGCVy0HB9OteABAXVtVXVhtDH0nRt3Xa7siEAT66imi3dXx2+uLhltuk90TsiFRBkMu88aGHlAhYto6RhIObaV84AFKxuqiFmR8RVvRuKcEhZm6ZGGUw05aSiMMJgI5E7bOrLTS40uJp9U+uQFYEqH4ctKu0AamdSyuy+//j609EN59cX908fbiv8yflnx48ebCK0DpB1WjFjZNuX5IiRrg5qFfjg/OUXT46nu59Q1S9vL9FiP4ybIc0PD59cPOWKTo8WAYeceu/ASjno/FZlVsMnH5ylsXSr2XYYx1QAsK0rjeLrVjSBlqtt6gctXoaoGvDoeAlZzezth289+eSZ88HVcHF2/Wol2aZlMDQDRYTpytmvJqnuUZsM5Mg3tUiJWWYHnfW4jcNitXz26blDFC3gXLKhrZusdn57VYXWkZvPD55fPAldV8RMyuHJ/HZ7lTN2oXWOUhoNaXF4GFwYY7m+2VShDlXtvd+udzV6dvT2Fx9XmsgLJMhJtjebYTc4ZO/8OIweqap51oSSCgDkEpvgzXSMaVoa1KLoIDifSkFG9k7BcCq7VXTeK1EU0Zz6lJ1zYAbZjKSdBb9DMvGMKWdLSg6ryqmJiIJCKgpYPLEaSJK6ZjHx3qtZjqnk6BCgqfyy5cNunW8VjNScZwBNMqhC8MHF2FV1F2YiqmDBhbZpPZNjrlw4CO2hc6UkSRmZ1nGoQD06ZTRKVRVSTChmAMbu/OnzxjfzrgHIJuSh7XPexR3DFDIzXwgMVISRHDlGdkZMVLSolMpXWZSQhEzNmLBoDtyqGRh6H1SzQ0cKgKaqHomJDQRAkMARZTPBzISOG9WsmqZqFNyjf8EKPlieeJH1bgNoinZ5ew0dtM2siCAQBjLL6ABQ4qiArurmjkPDwIrmml0jl5TCjGZdF12hCjFZBqm7ICbrIf7/ufqPYNu2LD0PG27O5bY55vrn0meWy0pkeQcUbMFKQAQjFIwgIFFqiGwpFMFQj+ywp4ZCTQYldRRBQCEEJQEkQQKIAllFlkEWKqvSZ2Xmey+fu+7Y7ZaZc44x1Fj73PfA17hx73ln7732XnPtNec////7l10dax6LGpQqSBfkNl1RGsKQTDXUYZgMqxZRDkkbg3pxdBUxiRYHLVWIeUgGwEiTZz6N8X6IJyF6nG5tN05QYDgc+s3+sE1IB2P3kgMqSPu1P01v3p6ePH7j5e+9/Y239cVF7KrQIT5Z1UuTdbv+3Gf//Gd/+jdVmMa9WJqmHGLrhO6gXsxRTldhvaQoqd9pnu597lMYq81uL1yorkMt0nUOlkoqabRsoYr7vDuMuwef+kw8XU+gPrkOmQn3m9311cv7r3969fjJIQ1kXjEddltpu7BaInBsG0et4qKSRvMwqi/vnWOFOKZ02IZ2aYBxdaKg7tlQMAQUAR0hbSj3qOjuOqmZF3MPMS5W1ERgZGJD06LJmUb0K4Wn/Ud/9J0ff/37Sgs4W45sQEDOnkkIcu5DrYvz2J0uugf3m9OlzmtQt367y7tJxq6bLA06DtMywIPzh09vLuvu8Pqj+gurs6fb9PL5UwmfTkWJJVuZbm63L55rP+bRibGkApNb0jJaaStjhinjpNNhBFcO5hhQbcgq9TK4Z52IfBwn04LCEtjMSzFGZCYBxgBdtxz6/nAYzJwQiBHg2K2qjoTgOn+FugQmN4iMbUWhmgYdDsWzo1mMJMFzziI4r6woCgRBYibLNmJ2dJ+TSQlgcf8ecLVarfcHsuwS6nHaFASu6hCFxM04YhK2w9RrySiIyBUFEilWwD2EQOgy02yAs0tdtaFpP9Zejj4YuNNdXll27iSUo+yCd7UqcCf+IL5yaOCdGnQMd90JPXfyzKvIzycCYPMy+c7n8/Er3pl+7v51dKncaT/HNBzcmZleqVyvdKk7D9QrA9QRXO12PMy7R8HxEOf/TfiKnz2zbWYbxZ0VCABgBoQRfeFnvvz9P/nGvFAGPzadAYAgz0anYoWJ0Wd1go4xKz9OEmZQ8lxIRIiz6yIwCRG6EWEVBAFNDdwjSxUka2JiJJqXjkRoDAguLGZmagik7kQ003MEAZgc0ZHqpjpZr6aSXfM6tEGECNxMQAAUmSjwSf3g6vL56elp08SSCoGjZ9OPGlqMsCp4D9AEyoxwusMuHyFQ4P4qcfZK7QG/G1VHAedjSXG2qBxlwaMR7Cga3QXB7gxBdyrjsVca/OjXcHczMHdVh+KqhoqEczRrVprm02+vYmDHhJw5GH3SRQZHqcWPLWbgODN3dD4cJima/ShVwRHdhURAZoxEZmqaZ3FHTd0NiQABbDbJHKWN+ZAYpcxkKzBicjMHxDs7zxxc0lKIiGCmRmLOmUiOziplP87icFYO5l80A/MZcsQINluRSi7A8+fsRKxqRHj85AFnzhDgnY6ARIRoMKfV3DIRIvLMK5qpzzPnm5jMFemooCHibHoCdzOfTToA5OTuBkgkcc4FAiLTEeJFxR2d1OqmDhKnAsUyQO0Apk7M7ppN3RWBifhYFUiEDrkkoqMtC2f7HtERBwUOZurAIrMEiY6qCYCQ2MlhBqUTAtG8T+oApjozhnzWhMEBGcFLyUdHkYEBhEBeDABEpORc8jh/7RQriDPNicwVndTudl5ptn25IxCzO5qbqYYY5kPNeURklqCmxIgORFgUzIzQkcm9zKP47ivwOPrw6EY1cw8SiiadSVU0v3F095yzqjETMc5fUUWLGhCTWXEguPt++1gqWrZLEQmNFM9peiZVqxOAEhshwJzt226u7OoSMSBRrBdapqY5XZ08jtVJaBaIAlLXUpVcuAIWJoqgpa0cVLMWDrHvt/t0ZXwY007zkHQY086LUwUpqxGjI5iRWZlGF3Kw3ZRYqvr0jf6m5HH4j//T//S1ZfXOe998cHraduvN5rZq4+3VsD45fe3e+fbqdnu1T3U0KwJYVxXaFtmQulQoH/Lu6ubNe2/kcXDd5sO04f1mryBtv9/WAkUns+wIKafk7IromMZJYVotV7cvL6N8+7z74m25iE1EiKXs6zq2oUqTb64vT5689Wu//Mv/3//2nyLULOJWiqoQD8UbZmMRgb6kMWlVkzJmMBNnJkfAQrtJPxr7abPnGKb3L238zkmk0xrPV3UjtFp2y2pVx3axXD568HhzcxmkVjUJNpVZweEQmnE/VgIqNI1DUzeRPbsO42Ra2qoy8zROEiSA5mHIPaasIuRm/a6fdgcyY4V7FJ48enzZbyeHt5+9TIaObvPlgZCLZ8vjxfZr3/jOr/zEZx4/aF9e3ZytV03Tjv04bA81cbPo1HHYacn98iRJZJIWmQ0ha+qWD/rtBb38wdn68Xbx1phPS85uiZFTUWYhYGCMFNwFMTAHc0DKZpPlgax3Lah7skRQkwiToNSGDJiOV0EZEScohGboM9RWMtUSFuY2ZZO6jeE+VksGwnC8IUSnv/r3/vzVN7/99N/8q49+9G57/vizX/mJb/3oo/devKRQnd/r8va234/rth40bra37//oaipBFov3L/a//wff/g//3V96+vRl4jgQt6sOfHjx/MeppMV0moYSIy9P2nffvWjr00ruX40TgIXYfvTOyxK0lvbq5Vi1J/1QXn+tuzzsOasDStNUy/aDH12gL6p2/dpbyyJBeHlj4y/91q/99n/2T2U7+oXeXy7zsL/bF2BzR5SCPCIOq+aDRg/sYrzsztFqS9e5DPv9gOGN5fpJGvu2riu/ONz+YCo9lZrlHi8ej4hh7TFfTZcfmNTD0NX9s7z/PuHgIWz6a/XQ1BKabhr2KM047W08DP0m1vc1nHnQNEzN9dvyztfXzo3VGcArJsDJCkyaR0PNxPWgOCWzlkqMn/3iT9yMG3bSbI52OOyJaiQz62NTjeqoHGLjk56tT55d3ljRApiTMUEMDG7ZVR06qdE07ca6kcyuDm6aS1aicRjd2OqwnxISQR0BEMUt5NiQZ87JgwgIlYJVaPbXCkOlgdS97bqxvAyx1WFqjGoO4vUwOlSWTN55+qPF47Mv/sRP/3/+6+/+ZPdlUPjSZ14f89BQEwStFJZwnCi7EpOBIRYE+vT9R+QJBNJY3v7R1eVHF02ggZ0TgRYbB895P1xfHp49uPcZJt/d7muKq/XJkJCwVnQtoyFnRyxFOKbc182iie1mvyPhjIWJp6RasB7UiiHhB1dXCbSkkUPdrRax5vkqoHnPbU7jz2n2meL5assVDeYNMqKcPQj2hz5yuNne7KeRKeiUFaDrlkMpY5n6qayaRerzkAaM3K2WiIxd2E+Hs9fvv/zo/Sa2peTFarXZ77tFyDA9fPyo1DRZDgQVMxAtlgtXGMbSRBYN/X7UDGp5ux2GaQoxTIeciwVhcE/FWDhr0eM9lNCR0I5TJnRAMkA6lkgyIs/IjVhFKIoETVdtNz0G9GKlpKS7tottE9tYCZFW5u6TFVWLIjo3zRISct3UjBikqqoKEIixFHfTGCMRjxPce/21LQ86DchQUiHyVCZ1dYyQxgqwlpBBqQrEwVCYxYALhEfd6am0OowljUyYdQoIlEDdio1SR81WUhpyT4TPnl71l/snq/uLhtQwcs3Ch31/N0lIZrnm1tUNlJByGQGz8HH+XVeNZatQWKohD0zMxAAGCuCoao4BQYhERMZxAqyKZzMn4qyZ5Qg8YuTkRXjee2QCmjtZ3U2QLBsrPTl/LaUPDtMhTw6Eu2GPTIAKOlBchCoULU1sIOfDtnfHJnYM7sm0IK/a9VtvJDQ5W93eXgWqLBcgUBAFLGrDvl/US3RLqWApFMoaF2koDQYyajEgKhhW3IBnR63viF3IAdyO97vJo7AH5Ijr+3x6FpfnSx7Spp8Ucj/mTpoAwIOjTBQxBkyjTofYb+LlRZ/x/bS52fTaLJrojsk6DI/vv/n5r/ylL/3MXxgnz9PO9/ukWSS4Q046TFNVhe5kGU660YynFOuGmhrJd9vtoq23m71j1a0Xs1+BoKCZOPW74eWLy3sPHjdnawtEWQFMCHPff/je++uT87Mnr03oItSEmHeHvM/d2bquF0TIbRynXoSIdNiP9WrhBGl7KdavqiaEMGUFH90VpYr10hyxjFRu2bMRooJOyTPaJNM0hliHuuNYIZGZpSkBEB883PS7P/r2B7/3J8N1X4Uur0/GLo6m99YPck5kySxLpLPP3r//hXtQczYtbEkVATWnNE0OGGINS8mDMw2d7k6fvDF+399/usk5P2jbB0u8/ej9fhzOzh9uerze7G8+fDZtRiulZAUGQnT1PGbLBjkxmBaFNM3oXgVMZQ4/1Y5UvLDIVAoRUuBsbu7MGCqSQE3XIGG9bLkJcI2ZbNonNQdEBiBCUzfTdHS0OAEV9RA4xAqQLOXt9gC7zERFHdiQDNAmzcBC0au2diTVooSKiAxVF92YmJX8MHhKYXc7ubMmv7zp+5xAIFQx1q2ZRgab8nDYQ0lmhoIxVnnu08VKDIMruhFQYXKqQrdUikVfLZJfiTPHNfOdGwPuALrHu8KrXM4rIQDu1IHZUGRgBHft0HceojmR5R9LQ59QlGbh6M4FdHypo33iTgPyTx6hf+JZ8E6+wY/lKD+qPPTJxx3linmxhkAfaxP+cdaGEMDvcnPHG+Hcj/VKmprriADQgdyfPPlC0y2mw+gAM7EFji6B2VdjMpsQEBD4GE5zI0RmUddU8qwYhSCz35XAlm0zF3UjAiNpUZHAIuDm6FUVmAiR3RVwjg45uiG6MJAIIg05BQxEpKyIUNzdtVvUD85ONRXkSMCaTCMgAGjKhjFGA2cWRF7Ui2kY2tWSPaexhzIBY12nc+s3pZq4JnI1pfnTQQdEV8NjNMbmTBncubCOIwXJ7E4ZMnc8+h1g3tjBmfEI5keJxt2ObWlzu9ksR96lxxBg3t+aB6iZuTnyPGnBu6F51CjncURId34ne2Uns3nV8HE73isJa+alISLRnRtIPeOs/fjc9z4vz49xM3B0cyK+s3cAIrkd7STHyCGxu6sVIGCevUqzQxEQeX7jZhmJiGYrCiKCgboZATGTqTkh3vXV3kmZs3ZDR9cUzB+qAoC7unqQYG6EDAaONj+rgyEK3l1A6IbANOtc6jgzy4+FYjgrb6aFiN2dUBzM1YjI75rCZoSRmTEzzpe0m+XsACyCoPMVf9yMdNBkoQrMPJT94qQehhJC2OzHwGLqZDC/FhgIszvp7AMAYBJwdFNCZmY3nZ0ys2DExOAGDIhkJaOLgZmaCM+bo0SECKUkZgEAUyVgc0VAJAgkprPFj0opxHPx/TySCAFVsyLMRiuFQkhzPZq7mSlRmFnU4A7zdaoZ0FXNAbVkZoZX5iBEO0qKPF/I4O4GamX+LmQmmQHqDnTM67u7MQdTNZ/pVYiIWjIiaykI6HN2bPZAMpZckChwKJoyADgwkYESIhPjkcWvr+4CR6mImBGhpOQ0xoqKp6k40YIwEtHUDzaVpDamkRGXq9qKxhgbadf14xhPjJiFQQTcSchcmUkYi5mmAdHBxyndHPp3trsPkqdSRjdGJuDa0TxRgycNLVy0H64BlIJA1ea0bboacnO4LcM1/2//nb//WpdfvnyvCVUpfHuzU/Plcnn6xulHT59PeWhiqGJVN3We0snJ2dvv/1lgato2ImVAC9XrTz59uL6c8p5ZF6eL/bS/6HcnqxbVA3NVt3nsHz+4996z9yhURnzbj4uuPQyDlQEl7tKu3T+9/+D0xQdXbV2BxX6f2lNBUh23+erFL37+85vnP/d7f/ZD48xAJaU6xMNQplIU3OaxyJiKzv1b6j5pJkYJPJkKgQfPWrxGrqueITHcovX7qUrJ+peo3gTvIkWFGqGrw9lq0TXN6emqlFw0d8vV7bjLmh89fE0PISdVK23d9v1YMpWxMLJAm4YdOfaH3hyHcSyTGlEspVipYtjnyQ6HUyAN4fSN13v0P/vww/3gU0rztzsK37u3CETvvvNev12Ii2Z7/Pi8a9tWpKRxGA+rJTFF9TJNhzFxt2xC1ZE5ZChFGOH2ww+6cVq/HopSIfE0YAhNqFMuwGJq5hBFDOhYNVqMAZHIDAw0pd6QHGKIE0lr3DiyG5CPiOYw5ZzMnKlCqGNVlULEjohCQeqTKqxDPHGjEMQgzVfB5dPd4erp8w8/Ojl5q6seXF2PFdrbF6nwQqeSnu5hP3z1K1/ZVflPvvmtr/7k68Vw28b3L/cVN49W/N6PLoabw+ny/MOX4+a2R5o0aZoSFW/bZrfbvPZg+fh8TT49flhNV1PbdP1m3+f+N3/9F7//jfeWq5Of/vnfbMbt+z/+nevdCBXuD0O3ONnttOZFv8/r80ri9LNfOvmDP/igvxn+9X/+9PyiW01SV+04jmaF5zytJiyJPBb3aRVvVvRBf7GS08oKTLc5K/h+OuxRThUq7aNoX/Iz13fScBnbT2P9AOOqh7DbH86YeEwk+PDh+e24w3QFmsygXT4+ZO+6YJYsDejWp2m/f77k5vz0yX7Yjtc/KIf8GOH8cLkukQEUZXRDoNjUPA2Wx6kf20XkpkUiEPS1xK5NPISAItKGdnO4NQPAksqQvbC0xKppFOEpQ8HUNnG/H4GCO5gWSFZLDMwAlJImzZpVQgyBTS3lMTCyQFArFz0hqEEmrZaVmHcokLRmceNiOZuRgBkyYRXCalFd9dvJDG6UTbzkMmqsJLDXDjpO1MUf/puvf/6N00PfX17+7t/91a/+8k8//u73r/aj/6M/+O3/6K//r7PmyAxoAGJ6nM6aeoiEjAI2twFKqN95+4Oi6gzq+vrDey8+fFG3YbFYKth6fe/QbwOSJR89OW1+8GIz9aluOOdSABenS3BD06qKi9Xpsxfva/EatKvjetneXG1QZHO7W7Qtoqe+xNjUNV7ubrvF4pXd2szgiGmYKXvmx/m2++zrRszZmIIWy2NBpLZe9OPY7zedtnVbSwwi0g9btMKmXdU6QVWTe1Pm/gt1AGsXdRC8vr6p48nkOU06bccyWqz5/P759y/fK+q1NLmfumVAoKzZtcR1w1NpXJ//+Ont5jaBjSnnwzgOOZvO2esAXjUBSDwGVUTkJnAgBwEQjF1DxggsczgVhWMY932og6kFhpN7q5z00WfrYRijmpm3XWwcukdPKhLIOOSyWLXq6uoMrmoSqunQI5EW1WKL5cqB1ZEIlUvX1P3tiE4901Z0k6fiSQysTJkdJAZunbgLVVDk6nzQUShUEmsSQWCk2LQnXcfTkNNATAbKQBLqnLyYmuUyKqmaJmYcbg/bl1ePlqvFAh0SAwj75e1zcQH2OV9vQGoFwcwLeBCJrgDIw7RvYh2wnTAZaso9zbOueRNMGAwN8jwfNc+mWQKaO6CYFjCuYpM1IYLZZD4SR9UB3aI0UzoINV5MDaQOwlggI8qD88fPXnxwM6VQCWAYx74SlFCXVAiRUTQrujJDSWNG5lDFGFh1PJTT2F5h7vuR3GnIVSoAnolKyZptf9sjhfqkgSkX881oKXtgIS+GnrIReEOcUx6TSghNszxOjEIILUeA/tBHrNKYGYAJlxROJIrJeMiWME1KIvNqKqI+frSqz6Sq6JDy4XY046sXm8m2ufSVEKXpbNG+0a0e1fd/7qt/5dFP/XIBJJ8EsARBCaHmPJaUJzNvV8tqtShMNAu1k099IoImNNvbfhppff+ERaZp8DTloY91k3N6+eL5anF//eCxBbScdJwi87jbPfvgg2X78P4bbx1KD2qLpkr9YbfftKcnq3vnUoeUxjT0sW5iFaDk5qwLXQ2COSNZNISshWLlXFiCEzkB2aT7F+wJEX1SU0cSU52mDFjVJ+fc1I6eNc+L4iox/OjZ2//kn6cPLnzyanW+N03kYVmvuxqtWNIqSnO6WL12Up8JMoBpZJEQwRIxmzpyM6TRkuyvbpFCtwxVwUPe/eRbT777Aez2fc2wXsinTpqXF1eeh81mHzjqxY3MTU+gqjaHFczRSGlM1CcUautqX4pb4cg6ZRYSXgRhLSYxxChJE5GHeZ0bqGorEaYQqInYBiQiBomcBcWoFDdzNVd3MEPD2Wqnc/+1BHe3KU1jJkPG6F6qBp2snxKYxRCdEVnRcogViwASm1kpmk3AAVS12JinPjGyKx9uD8+e3mrxUFEQJEjuYGMZ+v3YjynpXK2EwIIwqYWqcUvH3IIVoVCsCFokmNcOr8Qhv9ODAF/5g14JNX5nqvE79eQT8gnAMa6Fd/1iPq/6j8+NgISo4K/ARUeX0PGlj6LMceE674T924Gzjw/zTsc5ikDwyhl0/NNmt8sd3nqOQR2FqRkr8+qRdx6qu9+EV5al+UnvDDAICEfS9pFKMxO4Yb06uff44dN33mNH9WNF3PEVwQJLYMlamFh17rcGRpzLy2dYNRMiAAOYaR1DDBjnMaMag0QKiXxG76gbIwnNy2NjYUQCRCBXBbOCwCICRK2wm6lqiGwGTC5UdYtu0lzyiI6BJKNvt4f1oqtEONBsM1FVIKjamAfdXD1fnaxIiEPjiCwZ840oluoBQDtH6wnRgN0/QT45Dp05OoPz6HH32T5zNKLdSZB3diOHjxFW/kmzjJl9zHF6ZQC6k5uOP7dZsjGck1fuhk7H0zu7Pcjcjq/mDgg2xwnvEpQAr9bJM/mYiAnv8kF2NASZHVUad1ekubBEEcmO70OJeTbeAjgSA8DMHbajODVfKkROAKamZjaThpFYNdOMvJmjWz57Xo5iGQA64PwmzGcNaxZ4AACP4T4k87kUjk119p290pLgmA28MwAeU2x2pHGhE4qqApEDuDnhvEsox9kgAs5cRnQHcyjm6PPx353xGdPDwrMeSEjqRny8JtTNrTAJIs5PjuQo0FSiCTylVqSJdDDxhGEZgKiYHe1mDsWMCJFmbKVnTRLCqw+JEIGc7yx7fuxvM2IGRHBSVVUjnjnihMQsYf5SoNm4dtQR58zp7KCaE2BGhI7IHEwzEgvKcVzPEiRhUQcv7sfhcQQ/scxaGc5X+NEyN9uL3KwAzKWSNJscEcktOyELzq7HO28T2iz6zV9ianA8bYbEsxwJ5BKDuzvY3bkT8+I6A5oQiByQUI4BWCLPGYDMzQFY2MzM9N+WioiQRPNQ11axe0GJMeesmoDqMpWht6SYVJiVJ8ueZEwg0RGkqYEbFsfAZs6IY86MkHMSMPFcNA+H5zcXf7bZfFD4kKfBTEO1CmEBWQCZcdFVrwXwMe0rXmq+IfJpHIhpPwKDvLgZ/v7f/N/9xi/+7Pe//7sPz85jtWra9u333qs5WhkTYWz99GR9e3lTh+pwu2+a7vnFRYzcxGo/TGUaY+jQc6ziZroAL6JuJTuXuq2LWh7yNGgbgwTa97t7y8UuldvDvqZuFSsNuN3sVosqWb756N2WdBF8c/ssgzw4e/L0xTv3T19bNuvxdnCU3/gLv/Z02Pzg/XcCx6w8uGbzYjTlghW4GUdgBo5ctDQVl3lL+giSL02gaVLLoO4GqIjq7DXmAFlJs01MWwNXF3DaJbq9CbARft4gNZEW3a3ZMGY7eTpVzNN+FA5c1yUXLamuWjByRIAiIppKJeGw76sqtCEUTXnM5NhyGNWa5eLp5bbHdL3ffObe/b/7W3/tH//L/2ZX/PXX3vyZz33mrdee/OAb32qNnu9vw7r57nvPmJt7i7BoqxGsn8ab59frU7/38JRQtsmfPf/h608+XZ+epMPOpxyrBrDSYUwffmf1iA90LrI0L6lMjg4oEmp3Q8KZWCZYiLOWQyl70DzvlLqqQ+95D96hVoYVQS1Uct4TMIABMHhGsFR6Q3OHkjVKIKgFA5M4kWGGO6noR+98+OjNt372r/xyh7t3vnH59KZcbF5c3vaj4XmLocL6/up7b//BT771qV/93MPX7tHq3snyo121aH/4tFzcbAs8jsuzb/74/e1epyztOkqEtmpSGn1IDcOPf/g+oZysvBhurzcD1uuugzAeXjzf3VyuF+3+2ebJky//8Q9+t/70/SFcSeTN7fbmcorYjKPebsZh2G0uRqTODyQX+3sg0cO0n6RGZkbwmiWnKTKnBIlCT2Ff8qlUWEZ3nvYFTRWHqoqxYodnAI3GRIrjuKoefsbqtzRNhpOVcnoeg+7t9joNm5tn33I5yf1EvBDE3Ge1px7WSFKGEuJSoA9osWz85qnut2HQh/H83CX0ikYFLHsCJPApcDWgsntTRee4OUxhifHJ/eqNeyF2KEpjIq+Z66JGHKpQLRbnwjWyRA7jmMGAiKa0e/zG2fd/+L5IBHNLbkjFnAGJSEjQCwc+RuOZLLixaCqROSgC0DQVqWNUFDMi2O2nUIO7mmi14Pa8++DDC8F68mmX+/rRchgoG1dtM/QHDLTr93W1HrabuKi6xf3bq+ft2U/s9i9wexOv3948W//jf/n/+48+/+//5V//m99596OfeusBIoAfiYOmyuwkbNmBfHbFgoFr+KPf+VolebvfOulWD4dxt1q/UQe4Ody0bbM5XFlYSMUBXfNUV8GtRAklMRcUiFCXMhZ2ur25hsJIWgdGtUO/t5ShQGhb83L2+Hz3cmhiQNKz1fp8ef70xYefkIrmvx1XBQRkoPNknglVjQCRpalrKNnRN5thua5++vOfe/fdD4UqspTGvqrEC5NxUlPNp+t7U5/MbT8exJFEmjrmw6GYsLSUcwEwIOG4Wi3JbepzzoWNqjpWTW3J94dDUZuKxiDhZLE4X2+moa64a5puvSzFMErVtbnPdV2FKoypxKYy96zQVeJaFDTWEYBzP4lISnnY3oRQV1W3ub1pFzGl4kVD265eP81FpR/un61fXl6AecVCjq2EnCCaB9BpKpGQGPqUAiFWjSNj5YAkIaIEU3SdWCJSFKI+j7pst9PlZInZMCdEc+QojFrapibwIM2IjhKKmQBlQ3B5sDztuirrYcq7IGyI07SvqBIAL8XBBQO6TdMAYFb8+fvPT7l5sO5AjBzqurrabtVNsJiZQQ4kxZHRVVVmz7eTQmGMdVxkNeQpgxG60LzvXwwYLGMEAxYMyM2UikIGNyQ3Gms5H9OewADJHQgQGBmYKOaU1GzUAszqZqgopJaBxN3TNBHLpz71qe2P305lOhygEAf0tqOKAyfrupNh2ptrCFWaUilWL8CpmFEZPUdtF9Vh6GuQppA5HXSyCpSRGMehHHbjYlkBqSNPALfpsKQ6ECjpwcs4DqtqEV2nkgxgPOzmsR/aaszZXU1oHHsWDLGiADcXJU2Jm8kHyyNpqdSAyUPABw9Onry+Xj5cLxbSD7vby8uLi/56OzrI2I8MeBLbNx+ev3n25Mt/7i+8+bN/IXOE8SDgEqNNgzAiyDAdWGF1cipNl1SRuI7R8njY3gh2Yvzy8pKxXt976GCm2UvWnOa43EcvPorc3H/zNRUvOUHWiKRp/PDp+2jx/ptvFoG8H0+6ZWS+3mysQHu6xooMSi55fXqK6Ii62b9c33sgwQ20FsbkJU3ClblJ1TmwoViZcNqwE2OEktAJXM1sHLYl5Xh6n9qFIiCDmQsJ3fb529/94J/+bv5wrNv1fuG5qRDL8mwxVJLdRZ0X3f03Hk15x5Xncqj9FIrVVaUJqilYLmmX+5upvxnK5OKi7nVVcVvBdamlfOVzP/H1737v5dVm2I2nVfX5t9qbXb96dO+Dw3jLuEt5xifnlABBi0YWQsRiDqBGA3ixknNP1KCIDqmhQ4VkufTDiHNYyJyI1ZQcCQSYMnFSPYWa3YPQqq1wygg0jZiSZTcHFRYHJ6LiDpGqRmJkMOOC7gRATVOndFArrh6FTS0EFprXky5qRODuTSBlzmQBHBAW7WI8jNcvtk9DEMlX12PqTQGIwNDIEClkHTf7MWdVsBlm05ckwiE0ThgqGbIGYFWVgC0HJMOShfkTugnCnQvIZ+vNx8CgO03mKMQYmIKbQUFgBAYkRjSHOb8DdwLTx3KQmx8Xm68iaMfl0538creivbO1/s/+w49B13d6Frz6rdkHdJR75gXbqzKuu0UafILSe5Qn7shFd06S+c+7VSDAqyzUx28Jjx8OIZqa1e3i4ZM3Pnz7XXCatTVVJQREDBQQsKiCgzooOAIwEyLmUtTNAeaeMmKwee0+HzoaE83+mCmPLCQEiBBCkBBtdocxzYICEc0QmRDq2Z1QLNO8ejZyYkMLsW6bk367bWuqY9tW3ZgOJhGBxzw27UKREQsS1qEa8wAAoZKimPvEzUKqquQRgMEspCHikEXG0BiTmqMfNbbZZ6N+x5FmAtePcVDgADBrquTsQIB3WsdRn0M4QoWOMxM7VjIdQ21+jKY5EYEqIqo53AV8jhm12T+ENJPQ7U53uhtP8y8a3RHQ4dUZvzvDc6DJTR3NHVQNAFkInMFfEbXZTOdDJRY7KqBmjrPQgkigNo/sOejFJHc6DeKxFs1nchACuDkTA4C5IR19RoSIgKqFieefAB5rsEwLHoN7jARINFOW7jRdJzpa+swcEefed2ZWAwR2z6pZRGZXC6G4GTAhESE5ARKDu5nOQbw5ik7M5ubmRxGNhJgBHJlgLnvQBBzmIB44OgEBIVLRNIeyEONx4qlOiETAgm0VuMdS7PSsTh9tcvais5Ba5hY1ADcrIsGKEvFc1ybMTGKgrjZTpdGNkPwOK2ZqgOSmEthxZvYAzJqaAxiCIwOoFUBBBjwOYMxFwYFIXI0AiQXnjwLKjJRyADegEKCo+0wZ4xkahcgIMCciAcxhtqRloLnd0nCOvc2+HjcCYqQym474+LHD8VoCLWpW6qoOIWhRBM85CUc+ftPcGdzMABkRaW58s5zzFCQAOPEMTCUtCRlYKigGAFqUOLiZlYKI6gqzV+uTUlGsgkik2vu00dFRhRBT9pR5Gve5z7ngkJ0Fza3vc0uhFOuHXYw3YXFSxeA5IzARuSGYJ1PLUx5vdLrZHW4P48tt/zTpFliLJiBCKnnc1qE+jIcEqA1UQqSC1FbBr68+1JwBqjosfSq/8Kmf+LUvvfbhB98+P72Xlfc3uwXAvXv3wfNu3LZNODvvStqf3GtyynXb3mwO23E8X51wUEkTMrPQ4ZD3eTMO/Xq5nDabe4/O/uzdH9x/eHK9TYRhGMtyuUzTbuyHk/bk9dOmw6uXm35zscU29CXcXgwPT1sj9ueb85P7y+78Zb8fYFisq8kGKCwch91uGRd/+8//4j//V5d/9sGlFe4dS9GasYkM6FLXOY9ZfUxFGJ2c59siESLYUNy9ChHMczEkEiEWMStTLuoFhDOBEc60NHN05yZKiGFwv7euxyG7yWbUp9vRTQMSumM2L6o5VUm1WNO2BGXYFTafRtseEgDlolpUCEII5Gjm+eVtUght9aknr/3qT37uD//4XwUb/r2/8TcB4Pri+uVH0737p5QhX20O+8OTz7759GarXnPk+w/u31y8zD7d7G/iKtTUnK4fRNJ+u69IwJWJNWOIZ1X07f59v/nx4tySNiNhVjazmgI7AxCAEKp59jLmlMyyg9yx+5JCKuPe06TlmpkNIkAjsVJwICbSyLEAkhFjTz4JehUqxGCqfToQVUQiYjEeddM3H52WYYDd1Tfe/W7XnK1z+OjljQjVXr74qUdTv39x8+L+o8d+9uSdF+9fv+zfAvnip05+4zd+7v/xX357WfDlxf7t691eAnRIt+WkOWGyptJtTlfPr6vYtssFcBxKiFUTm2IJP/vpz+03y0+/8dl3374MUv7f/83/6z/433zhb/2d//1//g//49c+dy9tXbgpWhZdmzxfXR3OHi2vrzZ/7nNf8KtdyIrDIFTzIhTU5E7mZbI6tiUPowR78Hjfps3huedh2a2q+pT9fLy9ChDWIbKPafyR4+X1pBxea+7/4q0ny8ZmdUR2FygEBm1rh5d53FIjUC8XD7+M++c+PFXoVbsQu65Z58M1Hn50LjhurpjojJanTVwA54ub4hnrqphyHQzdyaZxZzk3ErIbRaGi8bTDe2tZt7obz5rTfTLNZW+7qqpyKghUdbFACQZIEuq2qnvC5LpbdbGtaBizEc3AyFGtKBGURpAjM6ki7w5j24XiU3OM+rpmUytElSdIh6laBCeIVeXIxQyRp+Sw7e8vFrUXZk5MEqMUy7teqZApG5zUC01axcgSrjcXX/m5n/o33/7Wolv+xV//xduPbn9wS7/yK7+1rhZ/+t44vrz4/BvnBM4ILKLFJAjizK5AmHcQzDnGi4/g5e1YLz2qTn1ZtKt68bDqHpZ8mYbDohWhahrLarXykoZhu1jUXduYYRoGALi8fNY0fFJ1h3ECCASMzOdnq/d+/P75+b3FMjx/sQncrBYVcjWMFw1VKWUMbT+M4zTeTbDB3AIF9+NNyszm6RYcqYKg4AxGwhKpKBSF3aHknF9/ck/V+sGKDqers+1ml4qaO0sYUyk21bHmLIt6sTlsO9ery6u27pC9z2PnomBh1XT31pvDdhqHGELNwuhk6G7EZAaTlsM0Lbrm7PNvrj/zWtVW+z5FInfFGHKxvFAWNi+xYkZPRecp1eyI8uKuE1q2bGUa+6lfVNWYD+q5H8AJUtbgVNUx7w9t1ZSkTR0xl8HcimfUUFVdW0NxHnPNoQpU5cFKnpIC0mqxmLIBx+ZkAVbWDJv9DqAaeFjXy3d2Fz7sXccQRceE3LX1w5SuFlzVQPuUqHHzTCRVVwcOpryslvcXizJd73cHwJktQiJibgUSinmyomo+FdN+yh9+dI2lOj9dMXlJOVTVth/64SDCgaBkz6loEHdAEkcvYExYSnJ2JStmWImRmGUid3BicXctWFfLbFmAi2WzEZ2aqk15j8gxdDkXRlEaVUdAVVUhZmAzIBJCKpYAVKSyuQ+WQTWRcBVCsUJmn3r4+P0PfzyMQ7taqOqYJiZRdh+2VawUC7iKoLsfDge3GlkYsZuKM6UxJ5oit6kkJyq5oBdGr4h8Gg63UlfsaBQCNuGgxbMKuQRAi0Vny3pMxbbbO6w1ErbtME7ehSIkAMkYCUbVcWt8swngJWVzb0JFpZxy9ebpyRv3z3MMq7YBwx3ub7eboegh5brihVQPpH0Qzr/61b/yhV/481kaG0ZxL1mHXJBEDfvt1t0X9067+6cpF0UgBx3G0o9tc4rKH330o+70rG5Xzl5KSn3J49jWwSZ9+vyDrluev/Z6Cp6GRA4MdLi9ffHifaLw+HOfKlymQ1q2a0a6ublAwftP3uSuGq140lC3hsxexmG3WK7ANY97AIVSqqoKVYVVBMTswgxeejcrZcvYmZNOoxMQgWouaPH0lJeL0cyRsQApwe3w8rf/8OIPvh13caq6PjSDZ2NanJ3U61Xpkxevu3Wumt2wPW0xln1c1+BmSW9vL4lk2pfDtr+92jtg7pMbhEYW3ZqRgOGEwrC7KmN6Y3X+w8vDAUIAr90jVvfunTx6PX7pC2/93ve+88P3Xrhq4BlSC26gbmBq7rEOJRsCV3HpElGYi1DSIlNyUABXTXP7DMK8/tcxFWSpYhMjOYAaIjlLtWiHMYE4A6WpvFrzohsFhArrjgV8OqiZhqqKHAHAGFJJBFwUhYIDKYArlKSFMqIbAQuGgMw8V/qAmyd8+fSmvx1W6yc3m5wAmjaCuJZccs457Q9bDazACChBHEiJXdgQDDypIUdGEYpVDMJcgN086/SxJDIv5uEVjgeP8R17ZRRyc1A3dy25t7IrunMDkjZIF0JDFMHmFBIBoYM7AQDwHc3ouI8OTrOb9d9KkeHdTemoNX1CxLkDvMAr4elV8dnHv3JMN90pS3AMM9mrg7+j3rzyLr2yJr3iYR+VoFcBtzuJ4VUUDu7yVDBDT5jw9OzBTK0mQnNnJCZ0RyFRM3V1d8slBCZCZhlSUjNmgpkT6I5MiETC86EQoiDVTYNAuUwhhCbWKY8zokgAiCBIcCtzcKXMNeHzESEJsJtJEBQsIFSvzl//Yh7GaXw7557IQqXSRWQKJBTstr9edStGcbNScgBGdGkDG4rjzeZ2sTpHavPYkzeR0dMVqk3EmVtGZ7RjtstxXoI7GB6xRUc0z1GwMcOjH8jcdO4om4k/szJiZq8+5dmcdTdq7MjJOlq6ABHBcPaOHSUgIiYyOqIi79xmBHfDYn7iWcWaldBjO7jfaYgAPpeHHTHKiIToYHPhMhgTm5Z5yYFO87MVLQBAwvO+m+GsVR1BHkhsM+qIANQA0KyYOQcGn5NCVLSAzYNHj7GyeW5myiSz9uqu7k5M7oYoxLPPDMztDhvmAM5SmZbZCANmNr8F4iN6EnmmOqEDEs+qGhgAArPMBi5mmNLIIrPHEGi2A+IMu1BTYSEi0/mxR4g4s7g7czhip2Y0j+aZPW7zAJ+bAenjpl0z1KRNbPKk6A5gVcVIfnF79WT9GjOqmRqAGgubGzLlMprPPiBUVzweHc+YJzVVTSIBZrcOOAmVOZxIZO7Ic4kf6gzmJ0dkQCymxwvfHYlmVxQgEJGbAiIyzUK6qakVQjEzIOC5woyAiE3NXNEIkVhQVZGAOLgzoOvcTYYwU4SI2XJxwKJmgERk7gDETKrq7iQ8j+pis97qSMwkAIjIWhIe9zTmDxdKUTwOZ5pPh7kyC7g7Os5Bs1KOyqQZIhOyESARqM6k/P+ZVMR5nAAKgyjwPiUE1SyaMCcdx4KzF6WY+xRDjajqh8mGPg+8velU0Yr3hjjbOSxNh+3ts5I3rvuryxfZhsz7cerzoObuaH3ZBWTXDTFUVTDaJSXHpJRUaXHv9d1+EqxubuwBP/g//gf/ydP3/lXdUS3t7Xa7PFuq2+EwGI4Pzx9evrgoI1SMFgAA+sPl6fqsil22vN3vG+niGq8Oh+7kwWKc3tvfnlSrdtENWW/2+ZD3DEwAUz8c+k0xc+XDIcE0/o1f/bV/8s9+ezAab8qyqS6TDj0Cad1yiTFW3Am8vHxxQozBuZXVerXfH7ZXm7iqfusv/tbVf/Nf5ashY6U+IJkJCKH5FIXNyEEqZoDEiOYIRgHZyLFgwIrZNtOYPLeLNtR1SqMmj1U1jFM2YpLj92VNBh5a6mquxdad7qmkZERoYF7cgiBwaNo8TEiR20UbpKKg48hgXd29uLjSm/72Yi8safZpFrWiCniyXN07Wf7sV3/yV770mfd/8MPu4ePP3f/UOJaU9N75vW69BnJIw5v25PKwa0/a114/+/Cdp/3TSyv54f3TIPzuRy8Pu3QY+2TerWLfw7TfCagpVXGlNh1SrpuzcjhU8SqGMw1LMyJw0Oxz/lSCuSOo2aQ6mqqZRY4AgkhFFZxK0ZIPecxmgiZTWGHsquZEJDKJu5VyNewuc9q6ju6T20QSEZqiuwpqHfb7Id1dDJq2L5fwxeH0dBjy7W7bLOO4Vy7w4qPr1ULunZ9NSt/4wQfqLNC9d+WVvdNS/ltfPmnwzfcvb//43Rtd8LqK1nEax5f7sW7rxenaT0o2DE0XpP3M5976xre+067bm9v+xbQJwv/t7/7ea2/ed5Sf/0u/8qff/9rvXb387E/90mde++w3v/f7B57265Jyvn//bF+ub63U3eLt/+n7elk4ARfMsZipEyl6NhMtyS2Z95Vc27hxBGzrUMXmTKHpNyONI8CUilGerEyUy/3V/RzrXTbig4CBo+SBVD31vZqEh+HxGkuetj3nnC/+hHWnqrx+A87fSnko0/PKN9O4taTgnYRVe/p5v/xR2bwI7K6c3QCMwIMgoE9TzxTHXJJ5s67WbRva5vLi5rNvPc5YE4AEBpM8FZLQNc2Q+pPTs0PRpCoMIKjkHAMJsOvjk7MffXSJ1AKgkgNCLlaRFOScB48UKXShPD49WdzvvvlH3zxfPjKibNnURayOVUrT4aD1qspeplSa2I27KWQch8P90/V+v4uxjRDz8z0rqrqBe1F05OiAfLZYjiVXZXXzo6tlWZ+E09/+p9/8pV/9lf/7f/Hf/R/+/r93/fLw9d/7xt/79Z9e1DMwCUxnt64h4Iy9nIs0DRwz/Yvf/i4xDP3lMO1FqKTE6JlGFO/adcCIugkS9+M+TdN62bGDagHyx2+dfPD+88fNox8/f1dPsXiuqa6k3fa7m+u0bE42++1JvVqfnZdYBabS78+WJ11XjS+vWGnMab1ezVcBEercZeEA5qZlvlsQ4IwmBAAmBi8l55IyYGUFwQCUhaA/TExRuX15vR+mAalSEScZJ7RkRM7IGQpHPl0t3/vxdSfrnDIplj6N41CtV+GkG/e7qWhgAafAtGrqd6/eS+pdE1M2JzTXjJKglEQoknMBB5wUEYNQMSvFAlJgVhwJkZGcKam7oqk7kDBIiBIWlbRSUUqppFJ11fX+cBqbMqVHD8+vn14RVl/63E9dvnh/348fvXjmpRl57BqGKlTdYn/bT7kg4QiWoxHmwaGAmxSuai9oEttOLjY33IUdlv1wqemgZWfQMDJwyDAtukWDPFkf2s5FgkSAgNJklIfL82VYQDqU4baOYcxa1CNHgqqUhBUBOLFPZerHHuv47OlV2aRPnZwtG0GfmtBlk5vdVS3Bk0JQRgSuGaqCxcB8JlYAIWFgNgBGNIRURiFBZNXiOSME4SqbTsUiKUsAp5QnKa5aACBw6MttkMbIwXMIEQxKAUDMZXQkkcrKxILZEhGqOhVjpjmUgOjotqyWn3nzi9/8wff3qSCFaX8A8KZe6HBLvIpt1DLlXAg4xMYdpzQ0ddeZ2CSLsHiZe/MyWA7AmAYbxgZZAMex9LtxtTgfdbJiVJCAfbTRStVW5ngwtqwlWUqpaY/gurrrMgMzK1tcNgSOTCUlQtWUOXuZsosHCkzU1dWyq5ogOqmEKu39+rJ/553r7SHnPkvGs7Y9DdWnTh9/9Rf+4pd+4TcTN5oTgzGhQomRwOiwPwy3m8dvfTqs20JoRFVTl5wE0F2GftreXjeLddV1wG6Q1YqrBaKSdbfb2VAefuGz1oRkRSJj9tSPzz76KA/jp7/wBapozGm1XrFiyfvt7ebk7EHTxgkSODXLBTsF9mGzm7YXZ2+8njHNwDIgQQzI4jIjdg3zyNoHwkI1FC+5uIGWhEzZY7V+Ak2TWUgwAtFYyrsfffg//P7mj34kuMauwTCVAM363IJrCHmC2qVdn2+n69iVRddVVfn6H//Bi932J//cX753flLHZtrr1bPD7mZXNMdWnL1anzg4CClkLQ4QhFctj5+5/3Cbdhe7ixxEGE7aAFoWITx6cP76w1/9V3/6o9/7+nfKmLKzqzmU4iDz9vSAiAVF0BnRMRVKBqA5J2d34KkkI1RTz47IDArKNqWmq0NOlkvRUghp1dT3T/L11i627h4CIpCpsYhUhJHrrippmpKmVAAZreR5J50QQ3RzBSfiYsXUGAiAi6I7JsjkUCMFhjpELKaKEuqkPtmqT51jNpjQgdzUreikqllLMs25RBFGMYBi4EpeDAmhOIJxECIKcysWwDSOH8e47pI/r9STY9BrDhE5APjMfC0lZT2Mw8v97lk/XuZcYnVaN+fd4lEblrXUItWc81FLDgURgCrh6ETH1qpZK7orBTuKNZ+wfhwLyI7C2yd+605FOi6TPkZg4ysFyI+S0Nwa7nf5nVeUJHwlDb2yuiAca+xhZubcaU1+ZBT5naFo1p9emaVmHQ3O7j32eWk+908hgtlchDTDVRBQKhGWXMohTbOnAJnQfM4IhSAE6IRVDFGwCmxFCZkYAIUAy9y97UBHAQQBQUIwBwcXicXUwBmouApDiCE5aoa4WC0evEnNsqkW036XDy/GkvN28/DB2q0MOnJcCq3KhIuqLqZmBQyERXMmDiFUsbjtb7p7jx2qccpNtDJNDNR6nW1h4HwsIZs/myP/+0ijQUIkmONXdwVp+PF5BEJ011d4KTyinuY80N1j5iTQx0KezYrOq/OEd0kyQEZinCNOjncU9vlkHiWn+Vfprljt+ASfUCTnOducBTtqXeaA6OY2t9IjzjxtRr6bMs3KgrsbAZmpI98ZndzsiIsmdARUd0LSMg94d2BCmMFFqABzbaH73IWH4HNg6k4hJfCZYD0rLzojrYqm+Z+A2WFueWWA4oBHhON8JJCJmIHMHJwRwEpG5LmDzsyZqOTMIugI7kysZnPP3ex1Clw52Jx3g7kDzudmBnU3JnYHzROCUaic6e66m7Uqg3nbaiYQIQCCADdVy651JYTk6JOWi82N+5PZmuczc1FNQQMHBCBiNyMUs3meDYZqbggM7swzxsgQUZjUfb6ykAjBNSfko8Jrpog8lyS6GbLMEHEEdzWWSpjNygyocgQCKCUx8owxMgdTnZHYc/Xh7EuaDY+OaG6koMdcHhOYuxMHcFQrWe0oWJsREiGZu4OazqAuQAAiQXAtiUXcDWxWYM2RCAUREEHd5sgkM5kBHccy+0wpMUUAL0okbuiuzGJ+zMC+IryLsLmal39LKhKRsAj9zqaE7oaFGJCY1AuoIWMgrpnGaUIOADDsNhLw9vbZ0A/7cROoilJJDBJrTGnc3RbdpnQ9jFcl7YqbaTIqaZ+HAou2TWTbNAqxTbmpSPAw5A+x7urlcnO4rcMiSo1cNfVjyPA3furnnn74BwV11XTDYdj3m269rqtGk8VYgWEbutcfvv7BB++UQ29YVqtlfzgUIwLQSceSx1TMLbjfvPjx2fqEouRD3l9fH1QRoqZsoHXknBKSdItFHSTfXl8+f/FrX/npb/3w3Ze3+/EwPjpbP7u+XnRt3uqhmnZ5352ctbDsx33FeHV7QaHulotKwZ2ixV/+wlf+q9/5Q9fURZEADiWGqqSiqhSYHYQcyJZdawWKQnEDZwfPOefJA3OMAuZpTIG5aqKrBsIMgMRYq6FzpKpiUZMy2Zhf7KaUcXBz5joGREs4EUkpBuDIcih9drwdkxcj9KvbK1XMZTAoxd0BZsdidxrPzk7/wle/XE/pZ3/qyfvf/c43/ugHX/mFX1qtlulwCFKdnp/7pPvdoa3pc28+eqxnT58+5Sk8PD0frjYffviyjnx+vn7t4dl+c8iaS9/3qGwoUZquK6OO6dCdnpAqIpJq//JDKhoe2QYbRCLLTgSmludUJbkjh0rhAEws0SAAcoDgBFM/jUNf0jZNUxWXpFpVnULrXrtimvboIJUM05Sn7TRu0T0iAXHOO4wNpknugL5Viz/7lZ/93rffudiPauXFriw6YeDNLvng11sD1mbBA0zLRdM0DqUgdi3LVG67+yd0edCUbIzbnNtIAl4HYZQaG4vtg5NV2qenT9+7vnx/GlyZweOH73wAWdfdybP3p8Fu1w+lskNvhxc39Otf/ckPfvidQ//2F95YnobV6rwb4PDwXn0y1N//w+9xqWIj2hdVLZYJOIPZ5DFQUTOhsatu28Cniy6GdL2ZJvYqrN56ff/c081V1uT9ITYBEHUYAPdY3vZyszp/DXldwThpb5LMPHuHXGusF6fV8MGfTLtnNvXtvYcQlikHZI+rWPaTL04wOcEKwfPFj9rdRdQkockAhigi87cXgQagWMdpKEWBMHDbbPqx5IJ7bbrKSrGi9WJZVTFZImEqxFVd8kGCUGQqDRCj0XK90jwtm0hgABhiBVaIhWbjK6PEkHO2MQXz66fPqtVb3cmSI+tykfqrWAcNnmGyMYOFw6E3y6aW1KMEdEMDz1DLIk25XcShH13o3uPTzYvbZVfFhoe+sMHtfgKHzeF21UpTx9wfzpdnP/lLP/vm48/8/h/9zs984dOrYMtA/W6su2aOPzv5fO8nQgA0NzRHACr0nT/57nLRbHbZAWIVbm+uwSkPL+sQAfB2u1M1RCCM5oxc52k3DcnZiUK7iNvtVdMsOEQOAXvb3m5DhHIzDHb4ys//xMv3X+xKIlIcJHvulqvteCg6IQlOaKSv5t1MZO50t/l2N42e50Y2e7wJCCkoGEJpFnVRP/S9guasBtlZATBInRFIcNkuhm1ZNktnZarHMqqPWScFoor2t1shjBDYab1ap1JutvuqCsGMigmQlqEABKnbWBGVcZwsmxF0zaKkwiwUGRxRaMqJAEPFOWXLJZX5DshFFR0lhrnpFhkoEgIge86jggOUru1Gne6dnQVGz9rf9uM2t6u6kTpw2zT0xhuv73aHwzC6jtvdzcnp2cn99fbFLXPYlBtgOum6MvUILhYgWwyr0mdVwWo10PDuhz9I08EttU3HHpjVMbJhG9cl99nJnQNFDnWZIFId2vZk2dVjP+2vgTxpdgdUSJaTK4JPo4GqgzFb0/L1xfbwcvvGyb3XzrvN/qap6mkqu9wbihmGqnZNxdTcGaDYKFwjMwMTMHhmkKJpni0yMTgErsyNkd0olUOsKqFoQBI6MAzRYwzZs/ukbkECCxE0mg2RzCZGCRLmNlTXHARJOKVCGIVYyxS4zmUA4BgqgGyTtiife/3N5xfPexzrOuQ0gPnJ+nToDywcQmugBETmkLNbnnwIHNL+0C3DMpGSRxNBcrUpe13JDP4ct4fUNaGJKRecEiHKpJCTlZIQgtTi7EAeI9Cr1SeiYds0VR120wiYTUwim6qTu2XFFFgAncAjccs0bA+g5jfjVMqHN5f72z5PqeWanZdJHvDyl77857/8K3+1VE0ZMpqilZynqo55Kmnqi6f7n3mjOl1YIEUwITUlAy6wvdqkXLgK7dmaRQ6HHSEQAgq42dT34zg+/tRnPHDRZCkDc9oerp69GHfT62+9ScJm2jV11VTTZnfY7hfdg+Xpvf1woMBVbAQrm4ZpGtK4Wz94NNqgOoWqiXVTUgbX4kbUADLmPvfXoQ55LO4VGLglZwcJ2HSkNBwKCYNb7aG5vbn92p9+8NtfO3xw2/L5ANJDCg3Wq8bbgAwSQh4toG83L2MLq1Bb2v+f/rP/8++/+2ejw/SP/osAcLY6+dnPffkv/cLfPqPYkY2bvmnrumbGOO1vncr69FPDZlvKhEkqrj9173HfD7fXz7zw6f22EqYJ7WbbVc3f+cVfnPryb779vZzyPNVBNQIWIi3FHdChklr75FNCBWQvZaRIjuhF3dxUzdys1MJC2DaBvdDcQp2TV7g4P9Ugt7c3gMAIjVCxTIE4MtciIYCjTgCZCPloeTCFY2M0JlV1ZwZmAjUUclDTAgiVEBKhQs5qoOxQLGsd6zpY7HCxKH5bs5hqNjU0aau07ZsqesqxaU3NDESCxMjIVrKWHDAwMQEIUxqnqq6ChCKF6WM/hR/NOsebwBEIg0SIqnNdT1FNh3F7OLzc73+82fx4f3gGjsxNt3jQrT/TNk+Wi4eVOCCrT6o3lvvAsGjPoTqXUCM64fySr2qt4GPB586887GacMfThiOi6G5N/3FmyF+5kGZHwzHiM78Te6V+vfIoOXxsGTpidI7H8snXPR7LqxzdK+lofn5/5TxCxtWDB7HtJuuxAAB4MRImpKJq7sJExA6Qckk5A4IwkTmZo3ucZVkEJnC3SFgxMPhsTHI1oXkl78xcVdX8d2I6InpcZxeLCCMTI5aigREdhYnjsj17Ui9PkSFWVXzz0y/eGadhG4QO+3G1WMyoUCQGxkO/r6uuim3JGkWyDilP3IXV6b1hc3Xz8v3zRw8jkR4OaBqrbazDVVqpdKxFZt4KscGcgXJ3nVU2AjiC0OeipmN8D45rYwS4yxvOvCI69mnMJxD9LvQHju4KdxEyOsqO5sDGQiEgEjI7Mc7/zc+IRznx6FZCnk85wsdj5OhsejWY3IFmCwmqlWOIChFRHByBiKRoQSA1I6ajnc3KEW5DSCjz87g7sSDNmB/TokRw56+bS+5ZdeZGz51rx6AX3IlT6jZb7o6AZwAiMjBzp2M/opspz6nPOYoGAIjF8iy1wKwWEaAh3EX+zAoiARKgOELWPFuBgBgA3BQQ1NRpfoQTk2ohZHWdNSkiRgSzwqE6+mGI1AoiEaO5K2RAYpKcJsA5fuV2Zw1zmL9bAN2rWLkjIwJYVVfo5cXLa8uGAmrGIrNghjMSCxCRDFStHOOaszLDpDYXs8yNWY7oc6/rbLubTwgyqju5ukOQWEp2N2QiZkci5jnXRUBFs1qaHV6z+mwz1GnWhD/Wr49ZP0UjZEQsJaMDIhMJItrxOxXNDYlMFdAJad44FmYFACSbkdgsczqSGdx09i3Og5OYVQ1l7inzV7rbLFABQlENUrupqiFmcCOUWYoFpBlRhM6Ajm4ObqBIRECqxY1UMyL/W1KRqhLSNKZSPIbYVLXmIYNNZSyTIwYOkUG6qgbxEKDYPk9p+/JpjDeH3UWILQGFGGJV+aRpHBXGId0O00ao5KIp582gmlC93u5HDVNhbUII0qmaTeW8HYvpMI1RSNOkigu5z331D/76vwvbHxKWrl2VYhy7++ddUBm3e09pHNArEKbvvv/ddbsUZaRauNnvdiwl1u1i2Wz7KRAupLFx//Ly/UePH43DFoW0Ez9ZZWQpQ7foKi2bfmqiHMBAqFrc/6PvfO/zn34cF3lRlCdNOP3U5x59avXk97/2Q3aNdVudLboFbQ7T6v69w83+6UfvvPnZz3ORNFmou5/7yi/fDP2//JNvEXgy9KJGU3JFZkQgtRG0FSURjG1RHPsDYXYvSJErEWKuhRgRAQ29mBA5MiO6cCo5Bg5o0UycwCRlA1B3DhTMFLWAm4KNwxC5YkdVAyAWRMeUipp6gcCBvay6alJxMERdLurf+Pmffuv8RMZx3YZn33v3+fOb/+Xf/XsfvP3hLm/rEGJVLRedhaGuVgSGbmTw5PyEgfd6ePzlz1x8eP3Bs+24K+tVWC6breK77320evDaa6896ofn3J5KFLT+MPQVgoMKV0UHGa5584Oz05NB7othMWAU1cEdAMN8bISRqXGITJRSskLkrZWGqAbcAyfHnjBCeebKbqcuXag6UhvHVHLlJm5GaJa3CoQhOIwRaeyPrqJs6ds//PHbL3dd04y7SSnEpgmNjGoMgavFdr/XrG9+8fHu+dX1xeVPfvFx2lw/fbafkivA5UcvH523Kbb99f7B/VVKh/ZkcXVxuPjxs6qiq8PLGBsctWvWoUqTlXa9+vSb51/711+Pp29KEw/XP/65n//8H/z2HxZq0zj9k9/5R09OK9za/fPXn//o5fXmZkka37v+s29dowbPOrhSVuDQtPU4jgLOjSBypnyFcFm3+/p0KTV3xFmrVavcZNnDIgR8NN7cdGctVbVRrfuD3b6M/DQKwMUGowxQH/xk+eDTTT1CTiGG7fZ2yjqSdYtFbNo0uNI2SDvmm6GqtYT45GcP/X4FMf3464v9UJfiiqNqEyoO7ATD5EEqQFPtp2ECp8WqNYEPLi8ff+6ttpRn77z32a/+tMd6m6xaNIFqG6qqCX06lJy85ACB2Yul9Wm7eb5rquawGe+d3K/jizGpU2BkKM4VZcehJAE0KzihdLUVufhwU3s1bTdNrCJiydPiZE1FffBJHZkcBZgMScuEroh42w+EVMAsD4SOED2XupZs5ndNHc9vXj5++AjHvN8OHFba73ebw9f+9W+/9fDRe+/86Rdfa//qL37mU2+d5r40iK53M815y8uciBzRoCDHr3/tQvsy6PNcxqqu665Ng5qlOvLt5TVQLuzA3Aop+dDvcqOEvmi7URMitm0cy4bVE/i66opp4AmhCbzY3m4vL56lqUQmDsxSLc6blJJbkSgGqtlNjrNsUzsm+4mMjiHpeapHRHe/AI4U63ocGB2E6XDos2Rpq5hjSmNsquJ57+5WKi9LyL1fkZxzE8eUs4Jh4x6SKpZRhACrqWQTPXtwb8i5H8ZIAS1psdWD+9vhhREhQCnZitYiwqymakgUmFCLETOAICoCEDCTAkMuioGCVClPYA7M4BgE3G0as8/4AESOkcciEuywFeIQ3bVMCWPbuDTd8mS1v043eX12Kk2bL660T/GgWS/ycqiRFu29D25yE8Kw3UYo905Otrt+2nFcVUS8G3Sb0uA3w/6GgIDIsTKuzX3ZngbPmHokDNwYVgyVQ7VYrM6Wq1gT66GUraOBSEoHcAcPBJLNhFnA85RymjLCNOb3f/DR693ZayddSYemqZ0X+9tnThBRQEvx5DrUYVUwKXiQyo0IndERKakrIVOYAbUErJ6KJnOYK1wFSbhzU/ec84CIMYJ6Aot1rIoNTBW7uWPKJQoJBy0GrkxUNLkpWEFiAlMdEKMDqiGCqBWe217AGf10tRB57b3nT7OOKqEKFoTr0OR+g7GOsUnDlHLvNRM7oE/jQESY8pfunb+93WEhM8ypeNZCquqRiFUP+6FtKxNCC4hgOtmQoajmQ9WuJIQ8OtfVHT4VwDFyE4w4wYm05hMIKZSpcJTW9ABALQoAejYhGvspGg6328yWGcc8gJWlMGVatavTWP+5L//aL/2tf8cXp/2wh5LrEPNhEnLLUIaUx7w8OV8+OjOEGGMaJyJoQ93vdx++9xFatVieZJzc/bDbMIVKYr/fEFOe0s3l9uT0Ub1cTJ4AIDCZlqfvfzBtptPVebPo+nFcLJfkZHka+sN00AdvvDb5mIudLB7oOOW0Hbbbupbl4kQqGPbXTVVjSsBbm/YkTVw+yMCelRUjx9KPVrJDBmRXpaoNi0VCmg4DB27UaLuxF1ff+5e/3//gpRw41g9VWkWERefLSCfrYuBWiOrQBZ9u883u9OzRd37wx/+3f/YPf7S93AODo7hLoIvt9l/86f/0L//0X59w9de+/Et/7Vf+ags4bXci0bOr4zhMBYssYt3WacD7tX96OLwkO2xvNml//mhZny4EuBwyY/93fvMXqlX9P/zxN7wfSzLLinwk68wlTZXkyZKTWnErDoBq4O45FUJnwJn5E5AsZ82YsiYi1WSWA9eTehV5UcneFZGTOweOi7qroqVcxpwz2IAlKQkigebMQfBIIgFUI0MtJlHQaZrUSpkLngQQ1CVEzQXYDUwkkpmXst9d96VUdRMDNiEGs+yBAIJ5Ovg47FkYyGPdxLYZnAhZ8pgPB2FiYkfvVgt3iFWdgJxFy10YeZZsZtLHnZnDAdS0WFG1onmaDuO43+6vN9uP9rsPh+GlaQ/AgloU+smb5rDr9xV3QKRlY/lC89BWnZ7m09NGODgi8h0F6c7qcScEHRfQd3ijT7qJ7vSdVyiWVzElOOZZ4BNhsaMjambiwtHYe3drOxKVPxFpw2PW6RWVyT/hcYE788kR3gR32hXCsTQKlquTxen5sNmRIyCKoANkMyRiwBiqVFLOpRRlYSQkRiFgcAJgQkIAQhGuAkfGKMQcimklMt+ai2VDquoK8ZjqQ3JVZRYmxruUFRGjmwgjh6JqxNXZI+pOSQKJADO37ePP/dQHP/iW2W5IhffDyek6jYPUkIlAAcsQupUDJRuQsG0XKR8Cx+akLbfX6eYyyKrfgGdrl+B08eTkwQUGAFBDmjvL7thAMIe4EA0QjrhfO1p54AgDQ2K4q4cDP+o6d6cdZ8z2cUyYH7NjsxllFjaOuiIRESAbOQuhiJPMTOvjMh8B3cGOZ+5utMyigx3jhngHbQQ4sqVVmWQWmvCoa+IsnswwMjejIwvriIuaR7UVAzoqO7P6chyjxxJ6KuZuQIQ0188fJVlzgMCsmo+sLNPjBWnzkJ3B3EYU3QuSIBKBmym4ITOYgaOZAiAdwUDkr5jNdxAxVQUmJEbAV223AIUwzla+o2UJkEngLlYJQDP8CQyJA6KAm5sL10fI9iyxmSEjk4BiHpOIGOksQKCDuQmKu5r7zAUnspzG00UbHF2E3Q0MEDbDjsJsbie4612ZT1ophV4NEUA3Y2JzmzVfMxMWpFkVx5xHJCpuAABqDkZH4XVG4xYzmBFRcwZxltEIAIjcgEjcCh2/A3gGGyChqYMbEgqzeWEUIE5pQiGbZSBEQlJLxBXM/rg5Box2pBABEYppUT1m8gCBmf2oe/v8rt3KPOx9lr7MieZcnTk4AJna7IJCIKFqlo8AnQhnopOZ4VxgN1u4oMztbEikqrN7D5BYKte7r8RXUpG5lVykqgrGwSDKcsQDuIswBHdgDHW7fBSoKabDdOuF1NSw6Lg53Hy4WKzcoOQUYotQAdJY0n44jGaVQEpWMho2lgsRQlFUkEqEwtTrZDm1zgjsXk9SVzV5tWjvLRdvfGb5Zt7/WCpvm9iPvt0N98+XI6d2VQ9Xu+Ll8ZP7h/3QH/q2aULd3N5cVyFcb2/jourabrfdh1gjWB6yUvJUYtMlGyWE0IbnN+Mv/OW//q1vflPLRxSBIZzUIQpe7EfkvNuP62b59tObcSqnp8snXf1vvv/j0zYgjb/xF3/uvfdfXlxdllX1cz//C2//GV88u6ib8OCNR7vdzaI9r6poyfJ2+stf/uUPPnr59otnlaAG8oCIAMTCsVJVUwAYx+ytjCxhyba9rijsdsUco0isQk5TYMklORAAm0IuI4ZQx0rAAknJPRKzVIKsQHmckNCNhkkNnSIFRi/ZnEIMBDAcUk5H0Nk0mbiWhGju2VjHt944/Yk333yjjr4/INqTL/zEf/8H32rax8LNyXnTijx6/LoRg+3V88miBsd+35+u1xxod7VZ1jUW704WzLQfbu1meLJcxbacnK6GMr64uT3v2vFQKrQmLlTqtL8BN5JIXJOW/uX3Kqqq85/blc4IIwKBISEAIzK6EdRJ56yLgxuAFp0AQbji+ky4cjfVbP1FKYB6KNKFuCJ3ictq8aiMsVi2tMnDrsXQtKcxSu63d5FkeH659wRDEoOpq2Nbsk2jAT88q8c+v9xdN520TcZpNx6GHlJ3k7pd/PB2f7oUWi13i5UXeO3srLv3+DOfenC1O/z33/j+m6+f7y4OZ/fX987P3n3+4S98+fNf/8OvV9wo4b5MX//ed7pHq6t0WQ4JSv8//u5vg8XmTGHi7TSsQ3u7T9/807e7uv7pn/vS9IK/+Q//RdqEJlQlZyLRitxpn42proX6w5gC5q7ZNHE4vUfcliGlksBBJx/zlqQqqUJYOqHE1X4ibx8s1hctZj0844CF+t1mR6svTLykUcGmlp2nXafP+8112z7kuG6wTPuDRi+4qysDNuX2dnQa9rK7faC5JWJp1V24AgF1wGLswA7q7gpVZC9+yGO1Wq0/95Y+fPjWvbO3//UfhbpO5MvTk4LkQOuz0+HmWYzBAbp6rVOpUKbiQWTe8wtBJs/np8tnz/dEFQAqWFHLpux51dxPaA6xWq5ur2/7m2G5EPR4uN5hS3WodDdZUTJzG4NECDSMiUNEMgETqacpuUNYNNVisX9527Ywbm+mPlcx5gJRAgmenqwPPmHgAEFF6q754v2Tb/2Pv3f15P5bP/Pa3q+vytk3nofXlvdbRbAjZAJZDAxxrlhQUxDif/xf/zOsDFI2CGQOJTAJQNkdBq5oLJlITk7ODre3DnyyXFYVaCmAeHZ+/8XlFRNXi/asXr28ufEh52moKl2erS7ef/bwyRlJoJDXq/a2h9sJnjxYv/fHf/Lm+rVD6s+erHPx9z+6uJuAOxgA03zX1FLm6bUfd/ZgRinMMGQwVqfI3G+uLGLhOlE2tWI45YzoBJxV9+NQyRIRGQHdyexktXQrSM7MCYEYXO3k5HwheP3yel44FIPVap01X+62joElKBhxIEQKUlSHnCNR4EgAxQ1sQjBG0ZLNvCAasjuQqzADYnYoWgJBmTIRMUkUBSAAcqKxjAaQchpzYvNAnIgN8dKGEqhZNOMwWbaHj55sry90HFPSpy8u760fqOppfXoYboAdHA5DMSgIY99f55G9Xo3TeHn7LARm95IMUlbirl6iFUIz06QQ6rZuV8WrwKuTZv1aV5nuDvtNLhMyuxNhZWgz/tLdSinZppLMTAstnn54cy+ePjlbAZsyK9Fmf1EYyDUSmhsTF41owWwMVa0lwfzQgEWnIAERHUmIptwDOTMV08DRZ48nQM4DABAbh2rKkyNrKe7uEB3JXMHUzJlF3YUICbLlUhwQRGIqqlYcNcZYVJmDeiI0ZjLPABiqChAg2/miHda67S+cYUgJNttlZwbeAHfSEAeJXHR0s3ku3IV60mkJi6bf5TyCAIAy+Tj1YEJCZrq53fCqhsjums1ZPHkKyAh02O+gbSXWKQ3T3bYB5BLQGwQGIGRzQSVFSm5ogNIQhjAjvAjdbZtL1oEJDn6Ida04dU2g4sFkReFnfuIX//rf/w8xdv3+AD4JuJUiIQSJw25joO39VX12asJqqHler7v1w8unH9RNUy+WqeQqhmEYQwjCIeeESGA+jP3q9Gx5fz2mNKbUtrWP+uKjjw7bzf3zJ/fun4/DsDhd1ovWspZpcLT1k1ONOo152S1t1PH6ZbOWB0/OUuohlClpt36ICIiqkAxEpFY3zUU4omCZDFGKDixEMahUFrqhcAEjqnjYvvs7f7T73g/Li43vaFGfG8XDMHAVtRU5XWBXGWJQLBos2VT2ZNPiJPze9//F//W/+y+fDQcScXfU7I7mpG4ECJC2PvzjP/kX37p58Z/8r/59PRykC3XVHvb9/vIDbClIG6AqU4vJTk6fTEip9zTdarJ+u3NvRWLZ9lKqv/bTX7283H/7e99gNwluWNyKk7iZ5zL1WSrxAMM4IYm5uc/JCDp2PzO4YVZ3hDKZ49i6MRKLSKgI4+Z6v7vtUd3NQ4jYMDa1I6ilPJXDTscJCLGO4qCOkE1nP4wWB0MCRKQ8KaNLQK9CVnctmhWZC7mE2meFnNkBs0J0LFqEuJiZJgStQyCyJsRbS8FbF+qWi8VqFeqmjZGFvd9rPzIAIwEzRSJHm7INUxSRuj7eC47Q4JnA4u4+81lSHqz0Y9r14367vxzG3WZ7s9tdpX7rlsGYMRhLyommi2nqd4crCYui2XRr5ZYBlu0ph7Zq7rFUVbUwc0IgZP+kHvTKV/TqLx+rR/Cq0/6o1Rw7wg2PubM7reGoPB0VgeOj8bhcvzMxzT999ZPZenL0qnzipV9lzI4H+Uo3usMhOSISk5mFGJenZy/ffZeZAFHdirkjsQgaTimZF0QPgZnIzdi9EvJiQTiGAOYKyMRVCEEQHZC5qiKjE2ApU12LhNoAXRUBkQkQolRzzxYRM6JpAis485IRpWqrxbms7jFHN5cZgg1I7cmbX/rKh3/2Rzrtppw2t7fr1dIDKkIdQ7G8399UsaY5wAckEDVljLI8OcvjmEuSepV8LKrp9ibGF6tluMEmY4w+BUI3JeAjoQoRZz63Gh6nQ69yXsfAIH6cBcSjQ+JOPpzTZ0cw+auAGgEczSMACExsYI581ErwSFwhxNnedBw2d8Pq7rzOPeWAQEe40Ks0G4Afn2cuBZqlCjM3IkGafUzuqoDgDkSkXuioq9qs0ZibE/gsSLiamxDbJ5KUsyPMXRlpVr+Q0N0NAGkuf0QrTsJzo/oxRTW/GXQSmfvUj65wc0d0RGKeQ4/A4I4GDm7EUTUhoAHeFZw54JzxOEbbiGSW02ZWtyPAnBo0RSI3A/DZFi9BzNWPsqDBXH2nBY5oN1JTA2DmiHWxbJqIAhGZ5fkj0jm8Nl+KREi46jpxAwl5KMuucTxs+lEByIxQwFytMDMAqCkRGxgTI1Ipykxqaq5MgWacEIG5upm5sQR3RSZQYAlmNpfTuRszzVxOZDZQM8XjfOYYGyUSAARkZspZEWxupstzwfaRsWVevLiiA8zbWa5MbKpqjshalO4sdSSiWgAQWSwXQ30VpjUrHORjOyOguRHO/ktnEXNFmDsKoWgOLCxoDqCAxO5QysRcq4PqRERI7LN2yOTmwlLMbI6bGaAcAd7uJiLqNqXpqB9+Uira3B4CkJuPSY1oKM/Ap0VsUELPmpKzQwFqF8u2an3bYA457RD7dBgI42bbFyBwDEWdsmpJkwLFMZkzQlYvKlKrJyjIQSCDJxyKEYYYeZf2nrRbNKPCSihyGHb6eF19/vOv52lcnjRg6uMU2nh1e11FSRpjkFXXHvpDzlmEqsiqE4B7ITOCCFf7g05Uhj5nvb8+v735aDhsS+69dNnp+sVm9fjzf+9v/YO/+Zu73dUP/vk/+ycXt/vNzaZhWcUw9H0tQATiHKvmcpu2aXj4+I33n958+MHbX/3F1vn2rS8+evf959/9k6/9/C//3A/eefdHP/ozwGZZ1TYeunuLflMOu929B6f/4G//L/4v/89/+GI8VF0w9IoluxYdCwCxAPFQSHJetNBWvulBQagNrghgeTpYmYahOPKYVaAWQEJhrIU4UAagyBWCIJihTkBFap3JExiZFNwhF7IQsLaEvR5cIStKDG6GnsxZYpOz+7T/zGtnv/Xnf4Os7F5ev/no8cNHj37na9+5ebn59b/+i1fXT1eL6my5XnbxcntNCJ6cK84lVxLJtRGWkyWSy/reeLk77Hbdqtm92P74R+8uTuKTJw8vtz5u9mOR9emaiUvJRQsxJC2i6GBQSl1Xfvv2enHq8Y1NAXc1nJBaRgNL6o6CAS3nDUNGm8Z+a2UCzYjmTgARyDkaWLayH3sLcijTLXjO2UwtlxExIBGhkHAxJq+4vcd3uwcYIQCcSPjSW6//8J23Aemm33f/f67+NNayLr3vw55hDXs459ypbo1vvXO/3c2e2JzEpiiRpilLNmVocBwlcRBFSQB9iJMAcYx8cYIgMJAvCRLBQWwZki1YESRZViDLkkiNJEU2RVFks9kD++1+56mqbtWdzrCntdbzPPmwz60mXagPB7dO7XvO3msP67/+/9+/Cj/0+kuPzp5KFQ5qrCHsLvuToxZ6JxLXQ2mafNSk777//gdrt+nDGw/d3SW/+/F3uh4hxYtnXS9Td3n17tPLi133wdXVOOmto/DSSy9++5vvpKQ1uTwNB43fbvsI7aI6ePTRufehcvDhW+/G6nisqYA8+eazd3/lPT73TnTCcUoCgMFzUUMpBK5LCrRYK+W21aMj5sqSHCxaGEpftnl3kTe75enrQafVKhex3PUVVZC2xNtS+qnroTOl6JYvS/36whvhFZEFk/78e8P6PYpL0GrqthApT5Md3Yt3Xkzpuh8zqFS2XV6+d9SVdppYA/roUDxiyZOLMbiYdUqlBzNPHMn3rlR3D93dQ2jjmIedDqujBUjn3LKUHL0Slt324tbR7bOzR44JOWZBjnEqpapjqOqxS7Fphm54+MKtT87OEcIkZgaeHYEz4ykRcFNA8rSLR7X2RmBDsRD9XDfCAAUQEJgUTUwcGeUiqCBiZmACZSrO2bAdLfBld37/4S3iutt1JweH01R0KlkwxuXF5vzAN9Qnqn3xxRkm1d9458Mf+fzLZ2+/t/Bnf+RHfnaHfds0vH/YVCuiQORZJQP4N//V02HXRT+BCBY39GuRogVCdbRdr+sqgnVYtPRDHktdx0lGDqyKptqPCSyimihuxuI4qhQfoo/ivb/3wv2Lp5dp6gMae2cGYMXbbUHabHTYQtONo4wOb6ph1VTEBSZ2InkuZwEw1Nn+imZGpqbKwAhMBFDDalltyrC7fhypWR0dPr08BwQjZue0gCpUVUXGYz9AAdnJ0enxxdX7i6qepoEIt10qqdy79SDyovSFgEwMs4alC4Ao5JiRFMBUTNWAwBBESnSBEJC5SAZTAgrkBx0RwfugkLQUB5YloRFTLEQOmZyAoZmxQjFBZXJOpokpQnBATI7Jhc3Qvfbi/fao3nbBC22nsl1vq+IWiwOH2A3dcLHebDsreVj3wBYig1o/jUoGWsAKcsC42lx+OFkCBRMzrjzWKuh50bg8TiOHiiEgL7JW4NrTk7u3gkNZ99vLufxXpJAoz9gC5iR5NoqrpALS5eny6fbQ4isnzWrFo+RM1fnm3COKmmMUyQRE5NhBtinJiNIiGBMQOyNxjCaWyo5CJQjsaG9uBgPgMXWubsmhaWZ0gORc7It4amJQsCSWs2VfLSz3KDg/l6cyOHKADkEIo0Bx3pvKvO6FimDK6IsUQPSOTM1QUMnEiMqtoxPBabvbMFIGGEpii6nfmWHTLLIORbNn5x3lgsOU0PDRo8vPvfz6r33na6NQllKSSE7EjilYKUYmu646OuxMppQsl+JxzEkMmSiJIkySM8iNty6lot3k47KtffBODHIStaiAM7aHi3dQSkZiAlMqzFxk8LEGVJel5hjJO4w/+Af+rT/+P/+zEzXDeudFl3XtPI3bLQHKmGQqiP7owYPMSIw6ChsFiv3l5bvvvXly92G9qK/XWxe8c4HZAdm03ZoUb+H86UUM4fDBMUS0BFUV8jhtz84/+O53XnzxjTsv3B5zbparumm1KAOM3RTb2lTHsY8+nj8+W9T1wfHKV8FwQhp9tQBsXYwofRo7IA3VIVQLFQuesQxpczETywgqFw/NebOSAdF5GMbhydnv/r1/uHvn8YnWdTjRJe9Ui+Pq5JZvm5EUmBEKKRI4Xy+KZS2Tsf7Wx7/z//wHf22NWoC4CJgSkoJNJSOAKKABkqr3v/321//CX/sv/hf/xv9kGjc1UvShEAZqoFiBDBl1K+C4Prp7KPTJ25fx8vLu/SUCuRiRA6o0rP/LP/av/79367c+fgxdX8BMVSjP9PUpTc4zEjVtnLKkBJoVRAiBmaQogAXHBlQARVAnNSfgTZTSkPLmultPeVO8upylYkZ0Qyo557KbNNnY5yKEBJCAA8RIBiBFUECSAICKESigYWDy4CuHRaOLVmRG5FHwUFduscq5OLEKXeVDJjRUclAUCBnQ+mkSkMkhHtV0uNRmMVDAysfovKdwehsRzQVmMgBm4pSpH3Znzy4+Wfdd/31h5ka6mafDpqKWTYYpXW83Z+vds/X6rB+2w7DLQ5ZkYIjiwHklpICIgwxDHq4VvIGJTmATIokgu3Mfzh1VhN77CvAm8bVvK8fnQo+BzWRt/H7y68Y+9HyGOVtQYP9P9num+d+fbD3nCu1JN7j/MeJzN+Fzmeq5hnAjK8yTcn2uINwgrudt3DhT5jm+gSO8//LLb/321wxmaAqHyouoiKWcwDTs7UFAiKYwZ6y8I8dECMhExE1TRwcGGohtttSAAhGHiMxiRmDM7J2bZ86ELKZlhhuDee8QQJBROYnz8bQ+eFGZvXOO2BEj85znCsuju6988eyd3ykwZaXL9frk1lIJhZwpaVKHUNdNStlKNhHHKGVkqkxg0nFxeFRQUs6VP8Ht7sQ9qep7zwqJUBFlJDWdlZO9J2ifv5nFlNkdRjDDUvYGMdTnJq8ZzgxA8BzXQgo2U5Dm44m4ZzWZ6t7eBbZ/VtB9uGz/+0Fhjo7dKIT7wWFIM7PeZB+HfL6GDDNvBg1IVZl5rx0hIyIDzmWU7PzeQmVGyGbzoLop2ANS0dl4AggOEMAIZrOSgYFnrzYLPSh647EDnb8Fs4OZ/m4wv58AkdgBCaRSEhMxB5UZ9U0496PPUTGV2SaDoPMHUykz93r2kiCxqQCYkhEyIeUyInuz4tirCs4WJwMERjMyVt2Lq2oy70jRROiQUVUJbA5FzngpBDKVueee5w0SiWRmnp1HhGx7UI6BARZeNG3FmmrXb8d2Ea2krh8BnBZDjwhGSARkiKbq2KeSgHEGUxGQgu1hzwaAUCSjoXMedM66muYbYhggk5t5QMx84/cRwFnLE9hDrJBQbsxlRcDPFypGBhQzcOjHMhHPmUFEJLN9gZ2UQvuTnXW/4xi0IBgRGhAAljQR+ZvhSICKxKY3zkUAQCbDGfJupTgfmZwWme1xBN7MQISIDQFUAIFdQEQQJfRgqiqmgIgmOl9JGRGQTAsyqe6zEs4xEatm53jPg/q9UpEZGnGRkqdMxEigokiGYgjgGCSNqb+aAhZKzhVQl8HSlKeCY1ExVvJIQOjExEyIVEQqms9d9QyAI6KE4Ah1zBNTZEYpiYoEDzGGwOwJGqxb1x7SwQsn9wlVZcrFLi8ubx08CMvF42ePDk6P+r6zSarFYn35LDrf566fuuVi1VS14zhtN2maum57unrB8hRIyrAZdo8XB/WUqJhUVRgG+tyXfqxy0bfu8OiH/+yf/9KTjx//yi/9vY/efduHtKhC7q8sTxwqVrWCBM4hfuqVF4ar/PjdJye3FifHLqejZ4/Ovvetb7328mtR9P133s91f7Ssnj56fHx6O7pmGLdVqP/dn/lj//kv/AMZR1BTK3XFJRfvYwxLGXbBV9pNnMtggskPE0BwdURLyQVyVHW7HQMQMBmakAIyQZFEaADFsCAUVUsGI1K7uF26a7BctJBBLtKwZ3EePDhO2nHliGU2U0TPJjwNQsqff/31P/0zP/nhW28bwkHdvPbySz//D/7JS6989o//9Fd2u4v7dw9Xh0s0zjJWiMFFWxEFMtO+77ySIbvIU948fnb58md+rBj0Tz+ANqDC5noHQI6rRYxmuOm7tq6wiDK0q5WM5NgVKSZoE5Gb0se/sbr9CKp7u6k1YnQr5QMoYEhcBGXAvJ3S5ZQ2IqPZNA3XVTh1rlWgpKIiZOB8hS4WkDKuTScwnbqtqZXc5bwGLAYQqDFACo3Bvu/j1q3lo/fPMtrbjz8wb6hye7not92TD95j50uireA6pY3k+7d9GzypPnp8fbCy1+8ef/u9q3O8d3C4iDhFCzmnH/8DP/y1t34xJ6xOVimXPI3BDHaFs2432+98e5fGqYCtTtq87c6fnK9Om8//6A+/+dvv3rl399E7b/nl+IWf+OFf+Ie/0ZycPtT6k1/7np2Z8yxQmMERKZpgQaDgXRkKUshV2C3i1LSTMRlwGimVaezIg5KtXnwRrHLTZvvo3QpdwzXkqXSSZKQaiVyBeiyNwQGATtuzNkpFAThUBw8kp9Wdz3YTBPtgTNfzE856uykkvo1+82yx6446WVoEI1MQoeAKYvJMADJOSUQ9G5qplJI4R65v31k+vP/o0WW1XFZNM8Xq+nxz67W7uh1TNwbHB4dH6KE4MUCTbEgZKbaNaSIfAc1H3w+libQI3I0JmY0syYiqzrgfuxD9qIVITIVzGVKhtk0gAXEau4hR2WW0xkf0LJq9JwMSTYjkiQ1y9JyHQfJYtfHunZNGvZbp8mKLh8sAGmo3pBJETpYLP+oCaLzYvDtuT9pVt5uWgfvzZ527+JF/7Se/873f+R50f/QnfpqwgpmM6MkMikwiGBP/p//pX29P2ik/Hfo1ILFH5wMAJc2HR8ur9VPXuLqNeavLg5NtNzaVsaqr4rDb+Tx6xLZtc05jzndPjh49+kBMpkkunj5lrKo2COsoGYscVMGxNSmfLFc8ITg6uXf3G9/4Btj+XkDMgDRPC+bb6vOHdgOgeU2LEYmBWVVzUQycDOrYpH7LCMQ+hkiBJA3nm/Wd1TEVIUd5mtgRmHimVb1854OrWDdiNsfHzGFsm/OrSwV1gVHRSnLAfd8rEDB0eYzMDv1sWScgJgTFIkJEauLJiVmylM0ceTBy4JgJgAFETBwRCYopOy8qecpqyp6KKhLGWHVdX/mKkZyaV3UyHAfwaey7fliPCOqBp82wvHerK7vV4WEx3W4SCzCjgoux7XbXXT+iM86JAy7qxdPLT66v3kfMlQ+iAtw6pdb7KCZWqti4GFEdonexrQ8OjxdU5V0arqVkck4FkJwRqoCJmRYiKymZQUmDKMCU/ZhfOT05XPnduEXi7bATMUYL3jOolmRzJxkkx5WPjZqRoYE68oMkYgeE5BiJVYSB57CDaA6uca42JXasmj3FKY3GxSmyyDDtqhhSSR4diLERgAigwyDoEdkRGZJnJynvn/4VjVDVGGjm+Qb2pkUUHLGYNE0UFRNYhGadrgxzFeux7z1y9CFh55A5MLugouOYfKjMFAFSGT958uFn79/5lx+8zzEAk6W5J9SQ2VQ3687FJjoHaL0WwjAWqeNi7LZDGqqmQQO+YRWRKJKAiEwZjRiZSoBh5JzruvKIqtQEFuIxZUT2bSOWArK57NDIO+xzneOrr/7ET/yb/674mKepqZ1XJVLJg+aCht3lrkxj++B+RicGYOaYXEK97p988Pbd+/d52Yxlqg8aH+McUihpUoAmNpvzDSgsjw4sYIECpGxAWs7Pzm6fvHB4vJzKSOy4jmoEauNQtpt+5T2gHR+354+ftrVvDtrqcCXSF8n1QWsGJU95UrJJkdhHQYeAjqEMaysTqBaRulmKuckwZ2F2zOBS/9FX/9n7v/F1vMqnx6eni3tPn54beXSRncNIBugRHGId21wA0eUiZewA844v/+Kv/N1z0iJoM9fEANhm2AXwzMZBQqBiAPTVt75+0tz+M3/4j+dtd3DbU2yIDyH3ZqleLihoGXeDaXP4wsmDrly/OayvqgWPYxdqRrFpcx1i+2f/6P/w//SX/nP2yeUChOhMNINy9BGNxOa6aGI0Eaucs1IIgWt0ji1BKYoG0XsALF0279CjakqboQzFG0LRwM5E8piEYOonyoWR2KOiAUBOCYhzBgVjYkRwjkBN0dCAPC+O27CsXBOUvPYDTmnYZXSuqmMOMQfvmXnMntiZScmpK2MpmgqIdGgiOZcCscFFwJJQpjT0oYO2qWIdCmLyZKu2IAPzqqocq/eMawUuInk/L1CZ8yazzcHAFC2LDGnqx+5q82y9eXJ99WQaR0lZs5kSAHoiUxUQLSRgCEUhq037NRIVAyOb1nzp41PmiBSW5PdzyzmMZ7PycqPD3EBq4Hne7PdO8eEmm3YTBbtRk26EHHv+et7M7wuU0Y1X5UYtQgDdc2puzEK4h+Y89xnh8/c/l5j29g7E2VoUnH/5lVd/2VEZS6xrDJUZ5NyDSmAi4oBYpJACgDmC/QImQvB+1jXIoSdkQu98dHM3KxSF4BrnWC0Tz5AdFZUZjWMAjp0nDwhz19JswSHvfXUUl3fQcXDBAXoOoAgI7FkBFGx567bRZz/5ztc8UhX9tOtDFYtjj85RFMGuH4IPU85k1jRRIJD54sWmadM9q1YrHCwNAxVxcHYanfqjx2UFFhBGJAKdJZi5oHyPQeTnB9sMTMFo3uUGSEj7OfJ+CALe0IPmd++P+wyHNjA0NUUAVVWVG2yT4Z77a3ozKm4IXDf2MZyFtXnB+AZI/XsOLRga7mtgzUBUAZT2qo0Q8awzKSgSqADOU3zk+SMjguyh1Iizb87mjy0wyz+zHwdMNSOSwuzKEUS6kXNmQKTATGRCVFPHoUgB3JO3DGAuRJvjovO3KyZzxB5o9q6ZwY2iCUA0t8shoM0AnZlqLJaJZ37Nfi8C2r4xDZFdMNM5zwaoJrNcI/hcecN9ZhCBRObCdVTbv57VKQMg56SkPZx75kub4T60ZpG4YsxZANUTOnD9UNjXJSVTmd+tqjMmqkieo4cKAoilTOy9qYLpbFMiAlCYlzlFyrxfiVl0rgNTMyXHIjoHe1UzGBrNXh4zE0deAFUyADCzgRA7MBAR5tkCX2abp5kRErMTyUB7Pj2iqYFohjl7OFdGgInIjX/yOV7aVIpqIeK9FxKQkE1lnjjM4r2ZaRHTQi7M+29Oahqgqqiqcw4QVZUcg1rOBYGQyUSJSc2KJFMgZjVRAEISUWKeygQGRDD3oT3X0G8CaKBStO8lZ1dUXU0xRvSAwAgYmachqW2HXWps650zGCBtSh44EJg27FNKkkrKXWwPESznSa144n4sSL4wEkzoVKgYUGhqURYFAVWbKiYYOkRb3rlXucbTwRd/6GdeufWwpLUnf1jfyo2B6eXl09YzWamiX/ijj95/whXFRUyaJBfJQuhS2tU1LP0xFB4kSx4XGGTMadvRksXE1ZxN1t24OLzDVQ2zryzCgzeO/vSD02987Wt/5+/8HeaSFDe7geoKQ3ueRkv9SRvfePFoZ8P9hy+eXV6//e57rlph23z07scN+Vdff6CA337zTTB+6eHpxeXTo4M7Jjxa/uynP/Vz1z/6//vVX1q2DTIqinfz9VEBOaeio447FcXgK2cwdl29IMEMvs3kfUVoGKPbdclFZiZjU52QHTkukpgAwTClSNTi1ep00afd2cUwFNwhKHPDadQeQV2EKaMoEVPfF+eaNBRW+7mf/cN3lsuPP/owRr5/dPf0wb2/+89/5VOf+syd01sXl1eEtlpW0YVd15GUlW/GksgnBQeEsWqaGIfUFbO6OvztX/tnXW+vf+q13VH8lV/8lVePmvb4MJUxXa3r9kjUUWoISmAwHUvGRbsYp50ql1GDR0NX1Vz3HdOj6F/aahDpwQyxliQMUMZt1jKVLNDkFBC2MRxrGaa8BaOqugPQAhjQgsEbQnCtgaRhjTZoKTImMXFeZLrIgK4NCVOob86CNL76wsn52u2mbrfti6Ol99EcT3pYh60HZFonoRCK4TgM23zZWQ4lfPu9zVqiUNp2Z8e3XiHSXdp+8uTN40UKrj2/mqYuT9M61vW2K4dHtW+qvtuujtrr7bC52kApMfg8lscfvS2Sd09HHLJbLTTC4lA+fcLw0fXlox2NzUSlSA7kI3MSKymhuCkreh49n3vCO3cTIxQInlyVdtcfQCasW1scpOaEsqbrgaZSApcywth578m74knIQ7zdrF4Zc+fg6bKeQqiuN1OX07JduvZBIUchDdNWFH29jAf3NhpEJz+u+fos9C6WgIDsAzkiRC2pAEZfg+lcKUCOvIEwJeLly/dyqDZnWxrBOTQiV1dTKsgYvKsdb6939WErvq6Xt00wXV/6xQG7WnwgIJTiVECcd77k6XC12E475EgGYkVkYHRF0CbhQDWxjJPlrFmDC+aHoqWAeQQwUSyCFtARFBOrYsxZCSlyBraC3Phqd72tF4vNxW6xap999MH/5c/9B//oX/7CW9Ozmprg+fzZWbs4uH/rwbNH581xu6yPdpurTdfdvnv4/psfLu4effNb377nXrlVVbot2e98VQM5AxAraUpVbP/v/9lfruo6lXXKo3e+aPHeNxXvcu8QYvB1ICMpaJkAJAGPTd2SK6VAU8dVU+8+uWTjTDaO086YOVgZVK2pqzwkCrGA1D5sNtPhAtDZt37nu7fvHr3z+PGC48XZ2fHhSUo3T8qMjGwlz/MnKfJ710v36xKgKgUAkJyPmErxHNHBIi5AA7Nv60ahqKvv1HXLbpiGYjhZYeCpv1q2B3m6do6ZeFG5y+vr2Yd+7/bht77zu9kkCTJIXYXlYfXhx2cIWEoBMWRyjlSF1IAsayYXDZ2qIHA2EBRCZmQVYQI0MaAkmdkhO7PsGcWsKCQTqkhHYcbYxtT1ACA6SHLUxG3XrZZ82KzG6+1Hu64I6IQnx/cun31dRPpdCLHebK7quhHMgclsuL6+XFlDHBS0CiSCmCs6Olxv3yFixpJFtOTISggBwTPt8uTdUq1Scgj1reM77UFN2qdpY5bJ4ziNxRRUHPgkCpqC8wiMKqJ5LKVk3j7tHrQnd0/q9diFpskpaZ7Y5ooTNQJHAdATmENnAI7U+zpPAyEBcMRaVMn5bOYpiCZTEzUA9lwhEho6BskdIRsKE5cszvsiwq4WNERg5F3eRXRgSIiimZTIWEQI0Cwz+cA85Mk5p2rBO8lFZ5gmgCGpmhRQVDFFBFRtYrVYtLt+2w+JgTabcdEyg12n7vjkgNgMTHMG16Q8zivzXb87Prz15ddf+Nb7HxdDxwzAZZgY2Uyn7SjN0jcxJ6nRlTSm3liVk44yulA5xJzyzfSAGNET4VgMwlSyDSVoXkbG0jV1NBRvxUhCBHMU2lAdLJ3jod9Bttxp6rvTB2985U/+e83tO/24DeyapqGpT9eXmpUSaFEyqxZtdbhQyKzmKMCItu4fvfXm4clpPDrqp5GcY+dnS3ruOgTjotdXV9NuvHX7tDqoE2naDc67PI1Xn5w7weOTY2ZPjuq2AgIEGa63/XY8Pjxu6mpzcf7k7MzHZnH7EJ0rOpVuUwV2xqVkUislMyvFA6oqtIQguV/DNOrQUWxCe6JMoxQgz6I45u35x2/9i69ev/NOsLY9OunT9EH/qD048OGoJDPJnCd2PvgGkcqI2YQa1DQUmRKlv/SP/psn4yYJQp6HLCBikbJvpd67EFQMAMx5B4T/+Gv/8LMv3fnpH/jhlHfINJ5deY9beBYOXH1S1ZtxUYytbpdf+uDr4+Xjt9CG6tBh2cW4QMSx2x0cL//P/5s/93/9T/6CsaMxSTEVVaZc1JERo+wdD+bAVJUchaVfHrVNE1NX+k3fD8lUS4GmilnVI4GgTWCDCkEuhb14s1KSqIHg3PxrDERQijDRvGxLNHsUCjs20OgdEvmmqo4OfdP6tgUM1ow+Fw07YwjtghAXxC6XYTe4kmb4myJNUyZEEwDCJBmJRZNzk1mehqECxFQGoMm5AhBXDfVdPG6K85OmNKlOE7IPR0cB9z2ARYQBjGyGkswBEzMtalMqOU3dbj0NfUqiSaHs85gZBFBBhIBAEAiAyMxAkqoRMhgp5Amu1vSh1+wRPLumPeDngsts/Nirtnu9YD9T+X7WbP/z35MVei7gfD+b9jxdNqfGnqtK39cBbuJqN3XkMwzl+xElw5s4HN74rGzvUbLvU65vokS2B48gcN2cNAe3j+7WHunp5UU/DiVPnj0TBodaMjPOBViOyTMzIajOZtzgnY8hOM/zfBUAyRw7jx5Mc8lIEMiJ6Rzm5Zkbw8zMRGSmgGjGAprMGNv28AHHBhAI0Dm358AwEDsCLKhAtLr7oORy9ta3HCMSDes16cJVNdUhq5ScYx0cujLlJLkUcx6ag0XoeLO9zER1aErWsZvIxF2cHxwliO3VQIgkJjAn1mdVY478wBwJ0jn3ciML7Xvekebxtj9Ee61oz5i+UXngJlcIpiIEiMD7ojkpJWfJGdBTyQpE+75zBSSdx8BeWNkfxHnJwUwB58vQDasIQUWJHSGUkkEJDIAMgAjRRIhIVNEMaF8tzsgAoGqGSs4hFCQEUyQyQ1MVUJj5w7MJiOaWdwcAKoZAs6Rz07k2qxjIyAaKwIyUS5rn+cw8K1tzae38AYiYiFUQyauKSpnlMUQytXktUGUe+2hqCMAzHgAM9p16AIiz6DYrXoYza2lOocm874k9oprO9+M9mno+F5Bp726aKVQz/jJnQADief5L5FWS7bcrM1FIoTTeL2NztdseHRxe5sTsNzu5PN8cn6zEMiHqzIRGmv/CjUGqlITIpvOXNTUgcIisqKoy45pUlRCI51YWVFMVJUAkQkPTrCrsHIABKIKf28f24UlVZhJRIhIQA0X0krNzHmdelTlTNTFQlJKYyfZgrvn/MiGKFKR5t5JKYWJAN+cERTOTR3I2E7J0tuOpmSCAGRHSHH+EmZ9PDlStFDVxgWfEDaIDQBUxsBme5djh/hpIgAQqczkbAuC8aIdoCI6cqiKTI1AEMPX8+7HWpiBT0kkdBgBxAN4hImYrxgwk5ApRiSFiGbabPlZOsUy5J1t4rFCZxIZBiIloYhRRyQYmyo5EDU2JjRxlEQcOBEXUkaJzdXVQRwpYmthyCRWsXr/7uQcHx8PuzBAc42boAMlIq4CHR6uuHyTb5XQ55l2s6/PthCCHy2MZUjflfhhj5UA60StXeFUtven11e6Fhw8urh7r0PuDRgFef+X2b/3az//IFz5D2JIngIKqPrZf/vJXHDa/+I/+2/o4VzVisS71VeQhYyf41nvPoqG7vvz0G/fffDM92uya5WGzWn7w/qOUx1de+zRm+fbvvjlouXXnREnzkBl5d33+k597o193/+Q7v1u1hA5KLuzLMPXDVDCzNyZEMRhzsSK+AiBdti3FRbE6Vu0wQcoGZVN0S5g9o2MgSKVANhE1UUVByNBPG2iKsVWAiFiQwMgAielgGa53ayqWRyEXKg7kg/P0lc9/fiW6fnTWHjbLdvnCK6989be+9pU/8JXbt9rd1TDtkudWrer77MgfLe88fXoGZAEpjVNOUlXLomqK5B2bfPG11771m7/4YBXi4a0f/+l/c/3JW2+997sPD5d9v0UOhweHqYwCHkIgrdMIjgmMY1OnvQM3lhzLmmDoj24tSpKk5z31wHecOzJzAorBeWwqv0qVk3xpOA7jU7SRgQ0LUQWASScFERU3S/uOfLOEbATi1IGts2x0fNL4hvGEeDGfBcvKHber9XZXV2HaScUuAC6ODz79qeP3Pni0G8bFMtRLNwxyvRYzHroxRh8WdTeKr4MO2Yy//ubjJPnewxefnl1+6cuf2k3j1Te3ow73Xj3eXI04YWHWUUtKvvU4lB6GMo1t8KZ69smlwfLk3u3adUPefudbXztWtN/86PE7XdBWCcGRqE6CpobIdah22xSadhtwu2zz6l4GQiQHGYcr6y9xGqv6hFYPHimQxpp1cfsuN95w4drj7aO3lh6mzWXKyYXIaJTOaPOJSAZPE/pw8AICWPeOTlfd7nvZBajYrCp1ezWK1tjoGK+eng62NBcB0zhgxWbZkACZyYshKJjonGBSsIyAJ4fu9ZfW43BaLfv15SossSuRnGtCqEOyTMm4arFeZrNYt6zFewbmLJCBDNl5n2DMQw4x5jQ8eHjrydWuABSk2YAPZGAyTSWAL0DT2C2XjUrOYx8cmErrIxupShOCmaY8qwlWkM27fujHLMWwXi6vhyQOUjGPnDK1Jy/85X/6Nzewo7hcj4WCIbthLJ9cXdR1M47idj1NI+Vp7GInylU8Q7xcf/jjt17+1W//+g+89Fki9XWMC7HdQFK+9dvfe+udd1988OoHj55pKQ4rJicwDGkQyC7UF+vNNKSaMF8nU98cLMmxSoJJiwCxv9z2AmWz3fkqHi7bNIyq5eTo6Nn11eHJ4Xa7U9NhN6mPXTdVVRyUJJHrFOrmlTfe4AqePH7cxv0toRRBVeeZmEQN2RnM5REA8zOwoZQ9VpEYkhiiI++nNBJ7A7vaXjMRM/VdWrS3q0BDt7PUkykYhkjtsj27Pif0yDymaV5qPzxYvX/2eEwZmE3EzFxV9WUYykDoeR/yNzETFbDsKIKnAiX4RU7imbs0uuABGFCYmYnHImIagy+SmNhEcxZwLuWMCATM7AEAMnr0oFMVAlhm0ypUANo2cTdN42V3cvdOsvRoc7a6c9ptd4LEMZR+GzAAGKJUEe4eHyIIsnmvkJK3xd3TWx9fv7MdnsZAYJyTePY5peBbJDeBVu0Rsyvs2dUH7a3Tgxq0K/1a+h2C9mliIkV1+1VYMqVSiiEUSbkUU3d5MTxc3r17vNhOYx4NCp1vd44hOJIsuKdqEAEKqEc3ls6Ty7kzVAFWEANhYjMLXMFNG8icOCHkLBPM2F8kQ1AszE4VY73abq9mV3UpyfnaUyADQKt800+9p+g4dNPWOQ9IBppUiL3YfiJwA3L0Y954V3kXHHE2VVUtqqpVFQ8PD0vJ3Tg0VXTeTzmZghi4ANWiiszsvJjMjVHGmAtcnG9DW//4p77wq7/1NR+5sBmQ5GyqhJZzXxtTSm0d1kmRpmJGngkdGqBzdNMDyIBWDGQyUU7ChDmxmgPvvIM8CHPIkIxZoSBraDksoiMIsSldPt9tt2m896kv3Xp4X2TrODHosEk45aY+1Aryrh913dxdJMkX588OSl0zOFddvHv28XvvSJL61q1uvfHR44zYNDOTwIYqm+359fX1anGHFmE3jsgYmFEl56Eft6vTg6N7J7u+W60W0zBoGkX02ccfbc43D199+PWv/tY0pcD+D/yRnwuLxZSHcadYNBPodiTP4IirVlUM2KbEMEFJMiUVo7CA0EzGuaCi90bcX7716//s6UdvTkOu2jb3fD0NglgxrfuO11JZWLQtOvL1gnxEIDPwwGAJMIc6/Nrb//Kr735vIpRiqHqTMbK5/wWRQObrDD2vJiKg1Lq/9E//9ms/8MXTSRs3xMrVy0OoVhYXkNaatTWEsUyhvv3Sj26pvt6+fUDreukBJudrMtbNk9tt/R/8mX/vP/5rfzU6ZTWHxgxqMpYUMND8pM5WEM3ART65f7K6f7c6PNKcLj78YPrgKRZEMSVDQnWYc1YTAE1JgKmYiqAUMRNGN9NDiMl7xAyOCUHMsApspkgU28bYfO3ZefQxNAsVT1CBuCzSSZFYiSN2rmHkYZD1hsd+nJIqJM1KTIBDKsxhSqWYMjtgRzmTGoxKsVaRsagNCIZ5HNPT83C60kXkxcpCDE2NSKJw684p/Pw/BwCRpEbO8b4u3kC1lFxEYCow5dIP/TAmEzRRmKegpqimoEzA89lOpCDAhIogZOAMXDYB6LebJwQ5VFXVrGK7IOAZTnyjDd3MfPdGkxsEtd14iL7vkHj+5/vs7Zv0yX6KA99/DbAv4Nq7Vua1kBv94fdvC26Sajc0IrvZ3I3R6UZOmH23NnMUQYGnZEe37uuwudhcjakjhki+ChFKBi2ICojRRzBlIkIjJmL0LoDjWAXvg5TikdUACD3xnCKZI75EnIsQEzvP7Gf4MSLbfJRM2Xlgn0UcVbG543ztAyOA9+yYFRAIiBkI1dQRIoIK3737BmU+/+gbBlLFKKb9lMSz886R76ctUVm1R6LZZmuqFPbUtsuplK67dlS1y5WWPmWwq4v26CTz0a5AZodQmNBuwDezv6VoERUxZVBG2JOX58SZ6T6gtm/H29umzJT2aCFTsLnP7ubYWNFiKioiOZeUS85AmSQBEhjDPipVDPmmKOsG+wswp9tprw3S82Nrsy/VTMzYeQBUBUNDIhOFWQKZjW9EJmWGB93oXWgquB8vc5M6GxLMeiiRSt5rR/sI3FwlhnOhlqqq2Vx8B4Bz49Us38z3dkJSkecXSABAwtldJZoBUOcjRTSLNUhIRKo6280QickVSchsKqZmtB/Yopk5IqJJAQIRmWtAikyGwH72uYBZQTMVZfaz5CoyI3W0aEJDRFIAUfHO4Q2ACdHmdpQ5czefbEgOEKWYCGTBql4W3XIES4m8XG2359eXh8f1npKJNidtxRTNZkGKyc0OYjAxMyZgdlIKkjcDNXFu7g5DUbE9fggRmRzvB9isbcF8NBGRVYWIiyqAeXZzntRM5lIVx872MBlAwFISIwHjjOJm5xBJVZCRyIFlAC3729yMdkIEwjk2NytTM146JwAzY0A0LWBENEusBqaIJCX5WAFZkcKMyEjABiCiM7kciZAcqjJyloRoe8YWmEjZa0RARrRXY00BrUieuWYCso/l7t12N1JRCA5BQ7F+MCNHAcmXYuKqIJOoTeyNQBCQuIrLNnoRKLWKWpWzpSERUnAxGU6TAhYFABc8x5QGcs5MYd4bgmQul1zAyICQi8Iu28nhgWIV4sEPfeknv/jiF6f81FeOONar+snFo7vHtzVLSYQUAdeOfeJ0fNpoCFZwfXFeUy4pe++jrpaL5uxqg3j/sy+89N23f5Vc7S2tN7tSysnpUT/0aQCsRuT+13/l53/q3/gfSxqVwFS0mA/hMz/4uXUZv/OtX8wXnDbrtJvapg6+dGOCZdut80dPt93w1k9+5Svdr3/t2cVVODzVEN979Hi97b70o1+yiN96+53HHz2p7sHxwQrJpSQ+uD/5x3/2akrffPK92TgsRbxSNu6LuBDa40PfDWPJw3Zqg4urSgRVfNPeccVVVT1mqqvNbvPBOD1zRIjFORAwBB6nAkakiOKy0EicNUPxAdGpGkFJhYnTZJ6rgokR0Wo0cWn8E//6zx7Vq052h6vqs599YxjyP/mtX5edNC5ePF7Xtbt7a+V9JYQ+wG59fXb12HlljuyZCjZNFNMQKkTYdNfZ8e2HD3/q3sO03lE1nDx48fiFU3/62ju//hsxL/LFbrMd4mq1rP0w5aNFDLHZXl+bJdeK4+K5VoXgvaSsY8r63dODe/7o3kdX3016MQx9pxVzxVA37YHzkQpoNNGk5NN4OaVr0bWjHonBvAiqTsmc6QyVK6lkc6VMIwMRVeQk57WobfPFfBb0u3HsdEw2aFodH2Ri58N6zF/9zlltuDxeoOUFWjclIsQ5xZ1tSq67yMjABKTu4gLPn463W+iv4zplcWIgPrp7d0/H7knRcbPZIeuf+Lkv/8Zvfq1dVcHH0Rid76zw2l9ut0rt0Z3Vs7Pta/deX/rp6dd/J6xrcZhSIgxUVVnEpASifijEzQ542za7ahWcr9j1w+DJqHTj1ZkzmTqpmzt1VYFTz3B9tTtsDsLiJGGoX/08q2HzIXYXi2p5fbG2aXIoWWTsegyL5s6rY/6GpqtcClPU5LU+rG/dHykKQvCQPn7nVol1QiuiCMETIRgUMFdVVS6qZRIxEomBJRvVLFWID1YT5eAJyGLVom/Z45QToIAm9j73OcQGuGIUSYJapr47OriVESKzI3Le1VU9DiXWfD3Zol4cLdrLjWQztaJmDOrmGlOFUaU6rpJadoApscWiAmSIDAIMKPsENxVUkUwO8/5BwpJ27Z3GdniwXGyfXl1vB4cwIrlY+5Xv86RTGlIhUMeUKZ8+WHzw5Dt/8HM//Pa7H6IVyfny46fOMY755YdLL/xa9Up6Sv/1X/2v/+Sf+LHj6vbR4ug/+o//wx/7ia9cXrxX1Zx6tSIzaBAc1E2zG7bJJmVNApJUgbpSwJkC+Kpy5HfnXQEiV1e1yyBDd+GJfI2JptVxu+7Wsa6TOBTx7aL2RM2y6/vK1evLhMG7SJfry123nq72QF/vKE8yh/PBBFXmApo5xg+AqsZEBoKUS9pOo/HiEGNkLHma1/FkM+ycwyRytnl8787J3Yd3H3/wfqyaMYnz9dHx0cVb35Nilso4TAA0ZbvbHl9fP/Po0HEakqqSUZoKIoNaIDeUrEBAIAZsDIAAzrErpTCSGgTyJiAo6BwZpZIQQcQUuEhh9IjOs84TUATJU0Fg5xyUAlljbEFRARm9oBry9W44WBwsmmV3uZUZaYLMxCzJsp4cnoz9NGzPStDj5dIjC/FkqVmejBfTrVuvYfCbq44cpFFRJfqAWRz5pmlVAEPF9aIA96b3jk7unxxXuivTFqF0UIoIijJ7b2BCqtkUiFi0qEiWosxpOy4EXrh1bDyIahXrdZcBuWjyQATEngEs5xEIipgjF31DhkXFyJPjUgqiJ4oiRVUUMxKT8wxuKqMyMxkYETJgbTQvMBIZYSlgE7ADrJ07JgAHSghZVcz5cKiqgkYhzuMGSEQyc/CEuSQiRI5z2AYpKPishYlEzWB0LqCSISyqBR7J+PgTUZmslJwXLYdQ9SJYZjcSYEnLplExZodKfafbfpcz/vDnPvfR048fr7fFLIuF4LPpdTf4ZcMe+zwJSBFRzKKSS1EteczP19CIYF5otSK5pLEUxQqkENCi8cHxOGUFNV+KHwsII1tOnrn2vl14oOUbn/7Cj/zUH0z9U7Ht4UHUvi/bYqOthXfriwLU3rtb1FTt+uJqe3E+PHl8ffb02SePTu49iFX11td/g9iY/OLwYMrZN3Wa8rTbTVO3u75+7Ys/eHj/JFtCUI+hjPnxhx+VnAJV3vnzx8+Yqfe7frebpmF5cOvohVde+sJqffHk5c99yRE7cEg6DlexPQCVdtEMm7XCpIIcgvNACAZi4zj1GzRSNQ4xtgcZspbCro6a+k/e+c1/+jfGZ+ckSVwYhq4/M88tEpcqt7xqqvagPaTAXDUWIhqDgJEZ4jj06PyjzUd/85//g0ymRVAUkABRTVWVHd3EjmxPHjEDQCkioBzCZen+s7/+l/7DP/Hn3NDHZaWYSYjH1D16UnqLq3BQ+SeT4rKKL336yXevxqsLiN3hMZiWqm0EsfT9D7xy73/3P/2Tf+Gv/w0nPpaZd0Ls0NgQIOeiAABYDBEwA/NqqbHCuq5eeLDImi53DsgI1bNvq7Kdhou1oXpPAFBE5iVA0zn4YIgCxAQQKxcdIaN69ge1Q2EfcNEYM3tGQBEUBVaUbixKY5nIES8a7wD6XemmdHU5bdaglgWmLEmzCwE0I1fKrIwIzhANjTB5UykiYKMqEtS1M6OiJSXpzw0Gr12yqo4HDQtCymnYN6D1w5aBkZ2b+6iRVHJOUxqnYbsbu03udjKNaiwzpZhoXqWZfQVMSIAgiggKCmpQWFSRRVUBwTma+u1ue9EtL5fLU1/5OZBzYz75PoBozyS6CYDtmSbf13TwJvx18655/n+jMXzfpHTjNPq9CtPeSwLPf8/v/WPP3UN4s+m9SnUjYe23a4BIc6mRAIrqkw/fR+mnfj0Oo2NfVQtSQZJJkuQSQyBHTESAiEZIzrvgnGOniM55IkMHjrmAUYiBnZQRTdg5M0WE4GNJaqYMVFSYidnN1GRDLgqibFw1yzsclkjeEbNjx57ICJC8L2AG6rxDAlQALejx9sufSTmvn7zFLCNgcFKphsAlJRNLSaugIbjI3sS0CBOSw9b5aeiSZPCOoi9cIle2u1iF7P3Rlc4ngs319qY6Y6GRvAAbMpAzQDBEQ0dOZ6aRmc2XjH1CbL4I3Gh69txNZnM2exZjAcxMVEXmjnARlgKsJjBbJwAA9jlX3C9ZzPoa6H7AqSIayA3DFPfg6nk4iWbCoJZRZEZhi8gscJmgATtGEQGYiTyFHROhiBA6BUDD2TREqKaFeU6iGaETESBD4HklGJHQCuLeTWWmqnl2Qc39bmJK+/2iCG7+4iJ7W9vcRoJEOL9AUi0i+9OCiAFJVcTyfHLM0jwhGioAEQQw3KOeRIlo3ysHYFrISAFFi2OHSDYXvZshgWN3o/Yp0exPnHNeOLuOaEYjKSCyibDz87FUEzBzjg1dXVdVYCZu43LX98FpKmmypCLsWUGgGDtvZFLGwNEE1AqoqYljRkQpclMzRoQIxHOWbh46CDR/kpuMF5ihigQf1HAuJTCT+SsTspoRsaqYCc3+g31t63ysERRVy7yXTAsQ0dz1AQoGJYugzOR+Jixis/963g4qqBVVZcezoZKYAcBUEYDJzaNvrsDbi5Xsbnx2uo9qzi5YnSlWoFIQ0fYZPZw/6j4ZAEbEYKCqJnsnmHMOTUWEyOeScB4DBP99VhHOXElnvnIM1tbko5gqOFaTJLJYtiTtojqtFqeJXO4ec95FnsCCK9Ieri6unzgHppiKUfA8w9ItR/Y63zrEyAQKGIhz3pky4jSMzgKbB6NXX3kJ+uVrpy9cXX4cWqp9KGLb9a6iJk+ipg7o2aOz6Iycnp+fHR4deK7ylE+OjgipcAm1H/NuvV43WHlo3nvv201ExlQfhbKT3dUood6u08WT3cFxPcH6r/7l/8+TS/kf/On/ERahwAg490e88PClr31toflKkhGy5QylLAOTWtvUfZ8+XG/+5a/91pc//drvfPN71921r9vV4Z0P3noHIX36s2/4l156enG9266rumorD6bDNMDV0z/zMz/+4BuHv/zOm3maAHm3Hp3zOElW60OqKyaxyIGdYdLgKzGwsZPsYsMV05S7PA2eSBFiW1cV55RB1FS889ZnVXCOt11yDpwxEy6YJ5jb9LAfkmlyzOBYMvXX5c//r/7coe3e+d67B7dP2DUfvv/JO99787U3Pv+Zn/mMpm6ddgsfwCQPvbFnC1LQsVvUrXPcTVd3b52er6+9d+PQpzS1TZ0Ui8jqsCrOT2jPPv7k/qc/+/Jrt47c4fWH3/7mv/oq+hRz9svaQ0gDMJIp1dVhIV4dtU8evbtsDoGlyMSOJK316XYV5PX2+PH5o5Q+vN5SOPy0x2UuQwwJ0RGKaGHnY32gZcilz6UwB+e8j41kSFM/jTsA0TypOEWnyllGslLS1vnCPGEd57Ng1YTD1a2nF++W5D01l73maNqVYgYLdh77DiCVSdQJHiwDKp7vdrUuC/uSRm5CP0xlxMPbJ0+fngfDqoJk+NLD0999992PPnkqqj66dlmtN9e0645j25k/PDj6ZHt22tzalfWf+tl/+z/5i//f8XxDSz7owufz67/0t//6wXSQTYqKIRYpgqAJmhCsWMXhOtNVVdvJXaJKC2AugbltFwQ91g5G0JRtd3lUH4+bC1+GlqFpD7rUjdg7wJR67zOYTmUE71zLZUAOFcfA8Wh9+R0Y3ieD5tYLw9bq5rAs20KeYPJ5gMuzu6leZWzICxbHBFhQlIHm/nIVAREicoiOWURSqPjlO3D/ZBo6LERx4Wq/mYa6Pagj516cUHQNtQ4Qp3Gsg3MATdWmuBiGFCquGPthrFcHaT0Ci4J5F3PR06OD66snDiJ5X1SIDI10EhcwQd7lXRVCqAIiqRmAUzOR2SAspjqHXoRNGZnYe++Bm8C7q41Ppamr3fmTUNfrq7E6rK2qtjn5UVMuTqGuGxnT2O2qxSK58vBTr7/50aNpzOu0bo+qNJbNk/ODZfz1X/n1+y/e4u7q49/efe6LP/XsQs6vr/7a3/tbr77x+WyJyDxiAhG1ccoKpa1oLNv19gqAY3Cro9XlswuHedxe1o07vXf09Nm5c2E3DFw1LtaOcXN9lktWcgC59OQotE07jKOk/qStlcaDhpUzoOUsQNwE//GHT66v181i0a72z7wiioCqZjALQ2owr1/NS7FoBmpIxOPYZdmQa4bd9fW2P1y2VoohrhYtOQRQ5/yzy/XV5TM+WJqZahmm7TK0jy7OdnnyzmUV9oHQuU5+9LM/8hvf/JWrkqXkJniH3vt41Z9PIiaYpSAhaDaFgA4AmbyIomkMzsz6ND6/K5NzBpLKxORi9KrF+wrmIh8FU6nYIdJYLGnKYj5UsfFakkqJVZtlVFO0uk/C06jeEUDf7eqjlWopKatKs6pSEtXCxNFXIDHrQFx4GlH5sDlYHMQ3n363zx1a0Sk5QDQHxs5Hcz6EmmJTElJwq0W4fVQ3sM3dGkHGacylpCmx8zmrGXmOKqMWBY9pGkWklFKGbDt8/f4LzGM37hz5KY9ZEhIQeFQ1zaaGZJ49AFS+ViliY+0iGSgoADERmBUdENGTz4oOXdaSbfTspKR9+evc36vmY0B1uShoIppbDT1qFptoxmka5TIWBJNMTDN3NefBhaBiYM5UCVygesi9AagkJABgQMpask7EUnTm6VsAxtDcPjrupy5LauoAmkGYkG3KJUvdLLVYmZQpjiW39cJscIBT16FWX3j4uoxvn+2ur3d9qiKg1UcHmz4jAnmfbELni4gPrqiZCyWlMpX5LBhzDkydJJJEaow49WsXncbqWnZV5YWLYfZL49rXqzBhGndXMOmdW/ey0cHxyQ9+8Yem9eMubSFdb9/T4en57uluGmBIEA7rN37yp93tOxgJ+um3/s4vv/07bz58sHrjM69+8af/4PLkOA89ADCxKZWSaBi1FOt3nFNw+NoP/MDq9E7SYql458bNNo2ZzK1Wh6Gt8pQYMMRKIR49OGIiRDf0w4RSH68Wd49MiR3kIlWsjZkRi8mceClWkNCmrgw742BpkqIuLnz0CmXMoyqwY1Z79Ju/9PWv/t3SDXGxUA27q+RoUVc+hANy1dHteyeLlY5MjgxBKwcOy5jQBWI0VBRMafoH/+off7TZTLPQa3ND9X6ZVebmX1De+x2UEBHBMZsamzKFb37w7neffPCHX/uMFMl9ykm1EiKOhxVFzw0vNQXTjfP3Xv/C5UdvnV99Ep0eHR0O2745XAFA/+zjL99f/pmf+tm/8o9/NaKhKoEwAGgxRSLWIkXEc8AMu4tduNiElbarlkB94BQdADarSgnHLAjG5MRkLkVCwFJkD5e9iVRpNgVzAahyYVnnZZMjVcF7H4rzooAATrX0GWQCVAiVMnGIaiIpwTCVq8up62y3Y7KpT4KsBt65eU6LzBoCh6hFpmnwBOMwGoP3UQGACSsPy4bUXNG66MV6IASEEUSmqUelylcp76cHfX9Vik2KHhGJHDKalCzduJ22l+P6EkoiUxPkfcwMtahjP68vFC0IxkQqKrM5Z26dBkJkBU4ZEUs3jFMa52nc7/Pp/D7bzk0U6blwdPP6xkKE8NwUZDdTot+n+cAN7xie/5dZcjJ83nT23Mr03DwEz/Ntz1Nv++3d2IoMEVRnM1ExMzNGuPzo7Xe//lWcOlQN7OtYT2VKeTKbAvu6cYCGaITABFWsAI2IQ4iOnAFGxz64OWs3B4SG3DNqdAFQkUnNTC2VbCIQUEDrqhIFkxKjQ8AiIWFdL2+FxYEIOnYEhIbzDJaYCZ1DRBQyB6KI5kMlWgTk/iufypqGq/djYGJIacLOCKCNrUKbR4UiVLGokWLSERl9jGhgE+apFMntIg5TKnp2cDwxk9LBtmBSVZF9tRyQqSIgAt/YwOZ9TWZz6aTsLwEgszRoAAjzFHo+JAoAZqKqhLTX8fZPK6aqIiJSoCSTjCUDMiDNE3ycO7/2PB69sWLNdWx6M472n2nmZtPvjarNvJibsWgiSA4AZ2OXzkgm3De+zbanm2AdIjrVYlb2iUedY2zzKEOcEeCqiLO0onvdB/dZObMbMvT+iQyBcS6mh9nBp4pAMy1on55EmO0z+7AmzAQBnVUmNESALIXmE2iPkZL9JwFEJFGdLTwKRvOXgr3NyRD3sompqhAQEpZSiJmIDU2lMOMcddsn0RTVChEjEpjOTV6zuoeEgAyIqtDElgyIqWGGImC43XXsnEgGns1TBQyYnKICERMT88yFKiXPhCciJkApeS4iNIQZLE3EOSXvPcBszOFZVTMA1QKIyKwCuP+JAhjMPrJSkBG+L40hEcG+k88AjJhRDYFm4xI7T25Ps56Pk0jxvipa8EbM1nlAz6YfVSmCRIBoYsReVdUEEQl5n4QzBdAZhESIpiaqYEaOiWYCNwkmNQSCosJEQCgiCIDEuCeC2X6EEt0IXvOHKbOFSREAkf97AbRm6ctEu2lHTI1Hoh1aDj6mNC58KBQCe4TFIj5YLR9kxIlJhvUu9+Sbo+PTVWwv1o/LVBRZwUydY1TICIjIhiCinoMYMoMBAYjzDkQXbVRjZ+H6yfp3Hv/u//7P/0eLZZOXaEUPloePn50fHNXjKLtxgDyuFoeFuVrUF9e75cGxaum3QxnzIkZzNG6Kd0hG5CQNCYVUdouVG6dx/eh9Ubxab8+ur6/Pnj18cM9VeLXWOy/c/i//4v/jy5/7kZcePjBTdgEJym535/jBq6+/8c7vnLnVajeMKU2qyVPd7yazKdSL4I/fv9jhOx/8+E9+7r/75X+RBjJw1eHti6v8ja+9/cbrt1++f2vdTU8fn905vc2ARFUq6kD/yFd+ctDqX333t8aS0XS3yTqAuZzGKz2o2mWcD6YZhXCQFAQGdhVZt+v7Jlrvx6zjwcHSewDNbeXnVTY0QVfMFJk8K3hGoq4fATMDcmA0QEYBb1xvp90y8B/9Qz+dzp9++/yd1194+fTk3nffem+X+pdf+dTJrbu7zTjt1lyEgZaHy2GcuvVIIG1Vc/BFDIqq2HV/jY5NZoujInsGcKlMu37Yjj5UgcqwOVsc3eWT6pUHf6ibLr/75rd2T59Jt7lz72FYrTTp6vhImUcZu01YxTvDcCWWGSX66BAN4PLsXSA5Xi62V+cPXT0O38p81fNqsMbBAmUt0nuPedqpZBNxfsnxlCkCxtgeUEgY1ia9jINlmYYdWI6eQa0Ofre7cJQ07ZP5JsPjR2/+xBfuvf14d73W2lBSJicPj5dPrzcnx6+cXbzPCuAQFHa7wYCz9m083aZN5fnkkPpOzp5cUbEU4TjGH/vJ17vLq1/87d998d7hkC0PAioMdhDq977zwfHR6vHFGUN+cHf15PH5C7dXaa3sqs0aXp3CZ28tfvGv/NXFeDop9dNIwddVGHLPkj05FMy5DHU1rI679hDEwZRqJC0DYpngurZUphSdd86PZx+7/hkOW9YM3HSX4+Frb1xa1nFcf/zuvZNjQzf2PdSn1d3T3aP3+k+eLk4qPDxyWiHezf0uJTXIateYxmE7uKrR3cUyw9Lq2qCk0TsGFCJ15CtqU97NkriqRiJTLn2hptLjYzs+LoVqV+36yeLCR7gmhUWcyhCr5ZTEtHAVDSDECDI5KNM0DXm3PDxN0+gBduNI3sAZSM6KPsZpGBaretXW18kmQzQ2KYYSA8eK6rb5wa98/hd/5Z/fPnzxavtsYQsQpKoeSyIKjgNjMpsQoK6rbiolZRZDkCJW+wVOnJHHPHKdmxXm0ueumJo5PD49XV9c1nVUUhbbPru88tIea85TbP3Z2ePD+w9jQ3kcBpiena13z66az07NQfP4vX/x+qd/7q/+rX909cGjL/xrn3n7vW9UoSrF2hh2kpHMinlXbYYLYQjMRJSHHF2MPm6vNlJ043a2GwoXj+iBx343eGbldtkOCUoSy2KV3w7SrJYPDurLT86Cj4vgP3ly4cJiuy2u5hdfPem6q8Vxk3bpk4/Pb6Si4pwnJgRih3NUHqCAGSEhoCDMqw0+UMrTcXurv7zyY+LAOY1QXAgx0kDkGfOyDoHDuEvN4qjv+8PV6W69FtKWm12/teiVXZrKq6+8dLwKwVG9aOs6pH6CLKGqnq5LMSNCk+yp8c7NfQ8AUiwBEFNEw1ySmiAhAAMzgImKo0iAYylNqNS0qKqJi84k51QILKsiQh0bASYi8iw4gqNJptoFLkjqm4Nbm815RdSnnC7PF8sVsZ/S1kmjCIU1NrUjchVuZDxZvTTsJsLm3qc/++4nbw7SFTNXLLqIQA6c1lovWk+uJPDgAV1bL27dPwhlK9Pk0TLibjc4z+QYBMSKaHHMc/vx/MgiiCq2vhg/ffuNKljSrSdnguvtOoGYC45BizJXTFZ0ApsNOC7rRCGOZrNTCEyKqiNEUlWVfWOuESuoMDkzEBMFJSICVGMC6mWMTVusxKpad+tVbNR5yclHL6qoBvunQwAzNERSQCWHpWSHESGMNCUTI3DMKaMj8EzTNBUT5xvVrEZoaAJFi3du2SxFpd9uWSTWVbdZQ1UQF65pFAjUxm6IFYuO00SKwkigtN7sHONLd+7eOVpedbvNmPtuS91QdqMAVItqvN6mcQLEMsJQVG0XGLHc9ACiIkPSUtdcclIxdWXCcukwHrvJZ3ZU1exXhJIJRKesSUPxTx49iSG8eO9LNKRnz97Kaai4cCqSncYFRf/wpTcOX35xcee2onpCY/elL3350y9/9o0vvkixjCUjkV8uvIuEmMdMzpmh5cRomqcxdX51AHVTUgnt0iGT7aoaD05POQaDnFOplisroIi+9mkYpnFAj84B+AaZ11frhmoZs0x9vWR2PAw7Tf2SV6xZxJRA02SoQKFeHYiKeJAMPiAVLJv1t//lr373a/+MMSnY48eXvlC0g5RguTg8vP9KWC3AYpFkrgCbCjFUROiril3NDqfrZ3nqP7748Ne+83Uh0lTMVOeGJ9gDVueHaJmHkCkzmynBHBFAQgMyqNzf/6V/+NnDOw0v/SLGajUB4eo4tvU0dVMZDUYYuspcDs3B7c+//c3HmHpUbpZxuNbQLMhRusx/5Ed+6HLsfvHXfmPJETWrmhRhcrPngBkJlZBS11+++9HB/WPpIoxjFV1e1MEHcFhAF02YrrsAkBBURYqAmYmB0RyXYAYgQ0J25JkWTRuOVteHoT5ceGVKWrErY7ZholHiyKCURNCUKgIVlCybTVnvsOtBBTJzdBwDAGgu5AiiQ0euXgrXWQVMaaaYmBEGMyDVbGLidkmr4HPqh91EIl7BQ0zFOHj0VAc/bxYALp6+B4BTkhmpM4szOclU+qE7LzlpEVTwQIjuBoMS0LyozPNpBVFCUJB91ZERgZo6BAIGZHaV87XzNc5Gvn1SDGeh6PfpRDcSEXz/9Q2ceG4Jwn1o6PmUfo9Dfl5mb3sq0Y2gZN8Xgn7P1m7eid+XmH6/6mTPbbawL3iapzkqompM/PjDt776z/9xToOqiqgLPkuahp6ZKt84pikP3nMMAbUEAu89IDrnAEiIPDM6nhnDjtmBKybIrpSUJGfJznuPvM3bGOpQV+y9ijhmEWVXielgJBbr9tTXLTquY3DsnWNQc84DelEEgPmXzPYd1bkE0Ctk1/oXP/eFt3+766YrxwBFbTeGUGc2xFyyEvtR1BEqQogxS0ppdIyVV6cygp926JiT8WazicuFBs9czzFEMIIy38v2UgxYARAABtrXosHNxHtvJ8Q9oXg2eO0bygwBiumMw7upt58n7TKXm0sp2YghF6RChEZoakj8fTOY3pTlGdDsyjCxm6r65yPNYOZ0CRCbKZEhsqrSHIibbRtEZIzApvMjEhrqfpzMxp/5t7MCzplG2ift9mBBYWJVg9lIYoKARF5VDBWek2uMVexm7W4/zBFQZmq4IiIjoGqZV/XmuiswMJm/CgHNcHEzAEYPQFoyMxOhipiKzY3p7NGUacYG7OU821cAzvAiVFMtMzSHiBBnAUX2oT5mFhVHbiYlKSoSzxh5x5VIVstIrJJAgUNgQlERMPLUb7anJyeKbzsGZlJFJbcZS9sututLdpxNAAyVHIeSByQ/h2D3e4QcAc5YNbwBl+m+HmE+s4yIAczACGcU20zxFkSGWXxEZwZFiiOcb0CSldhLEURDdLNvC+ZcISKzmyFZAKaQEREdz9tk9mgIWgCZeE5as0mZg294o3VrToBIxKKCyGagKmYwV9YjMAKUPHpfE2LJEwET+/2D1lythyB7zB8yOVUTFTGbXULzNcv2/l2YbZtEJAqqxsxMIFqI3CzTaCk3rrvnriIw5+HwkAEBRNh5hFoxkCe0YpYMwrJpjg5uO0/T9lKmgY2b9ijZEpqjs93jLhUfHJgGIAroCBXJ0Sx+A4ChiWdUm9OnBppAwXEYh+SCy1v5X//7/9tFhc8uni2XFVN1drk1DEguhKg6LQ+Px6nU7WKX+8vu+nRxoArsK1AgT4p2dOv08upcVOvWKej57nrVwsXZVel2wXw8XXz74/OxaCfZnL+86NdD8Qv3pS/d/1t/+//1f/g//t/ALKUpBGqPV7s+37p18l0xyYbMdWyzOjQepp2iMUQKAVzz5ocfm/ZffPjChx+cX20umoNVDcfX6+3X3nxyenL4wsN7fb9NunOhbRfVsNkVzbzd/LEv/+Cdxcl/8fP/rQMk80IigCUbbCVJauuA7CfJOlwjQlPXZNLtLkwxldH71NYL77EJJFmg8iqMSpCIEI/vL9bX2zHLAJIMofEmEhgh29SPlJ1O6Bt2WX7iB7/4Aw8f9sNVXR/fPn0FafJBf/RLP3h0eJqG7MiUqxDrGJvzi3MBDIHrZRChWMVpmqa+Q8SUhV1DrCn1bXWUcioGpliKed94h9O0O/v27/hPbT/3mU//2i+/c+ulN1747Cu/8Df+Bjt33e3u3zm4fby82F2evPhanqrddhOdS5DJgMDnJHGxSEm6bc8EOaUlnbahfHL5/rA7f5rq5oXPuNBF7cvYpS6NU29kedo2rR0cvBT4mI0BsjkBbghyjmnsE8gllquSnpXS9dPGVE37PO1104ev3knrR3dP8GLj0S8//mjHkO6eqtq1M3vn3Q/6aWgcvPTi3WcfX02pVNGappIsq8Plptt1o6RJnF/sNpO0rVuu+O6rt5ef3PneN/gW/u77u5LUkm4uu4cPjzHgyEF9dbEZf/hnvrgZv6sH4cEPv9z+yknFt6/fe+fs/Ilu4g4zElrgAlZUAX3lIU2Y1eXjo8fRj81RPFjqtANOPvrUj44dh+VYLL54C6imkvGTDz2pa710u2kLJGW6WHd5Ojw+uH//M+n6MfhqefeVrblnA8Tmdntbgde79SO/uF9s6ZtDg8ncmNNW027hPRfjQY9d63cDmXriJBnQHBojjHnSIorFGGvnAFANxAe6e5Ie3nGHy92Ti9Pbt6rmcHn44uXwbLkMrIXAlkfLbNYu23HMzgczySU70CmNu93u+J4HR3nqmrYlzSkPq4ODbd+VVAhdtvH0dHX14QVwXVW1ychAjlyRlLrpw/fP7pw8SLv+YNkQcBpHmJDIUDMUAFNCBwkNwRdBpLquUt/7KmTVISWbBJk3RdmUgGSYABAdb9O1Sh77DZsF5xYLXymmy7UaDAgH7Wpz1juknG3nwMVmMHr3/fOHp69cPvro4//m73/7/e8eLqpn5287R0RRtLCCWDHLdcBhd6lSgnfLajl147CbikyTswyWB6kUm8OVZPUQN0M6Pmq311dA4B2AuvWYXr37+odnH5RgaZTVi8efvPvBAYUyausb39A4qafEMuRh++DBrYuC8cZbl3JB4BB8LgKIIsVMAAFotrMiEaU0qHZp2BwdnTqMq4PbGa96S8osYLuxv+rO2/oQADmEJ5fd/ZP7RGZlwCIMVCQVA2LGGCYXdnl48PDu+eXHh21bu/bw8Nb1s2cVw5PzZykN7DlPhYwYmcEp5Fym6L3IxL7m4ApaUhGzyodhnMzUgFIZKm4JKSKJFkOcckYEYypFiih7LTZ4FxwSI0maXHCAstlcHh4e1jFsLoeh6wPcOVyF3fm16cCh2YxdE2t13NTVxXY95Ryd0zKScw3FvF374l/+7Je/+fity+vHTcSUihYlqsyImISBfK2KVVuJgfP+3unSySDjIAgplankEKtRE5CL5Kdx9BxMsyFMaUKwNE3ZbNx1t1f3jldxSJeg6tiNOQuTZ86iMwrCVIHQc51L712lKsxI7EsenYueK9MEljxTlgJIBmgICiSSDRG1mJkLTU49GbFjFUKsQAtjsGLI5ojBsqIiabYCzGqqkr2LaALk+inx/gE/AyuqiGVCyzLgjUlEDUUKMwkUACUAhywqyBCdQ7RltfQuXG+7MU/GhuwTik7ZfAGfg2HwschYN/UM1GIHVqz1VcrFDFS1Ajo4OVg8vEPk63q56Xogn1/IwFKSXa+3VFUqyanE6H77q78KAIrYl0yegUWskErCrV/U1uT6wHsfADFET14pYOr61GXqeLcdF8vDn/jRP/6Fz/7keHmlmOrD5epgZVnPPnqGtVucnpx+6lPY1AUV+5S0RB9e/9yLYGqkuZhnAnQGpgJiYMTGrqTimETLmIX8AqlWIXYewXLJrnZFAD1PZWBHronGAEwAOpUMWRi5blqB1Pc9eTg8OMwpu9AEYucqQ8uF2ua2oKYyeT9PThyFqgB1JasWQnYuYC5Pvvv2+9/8+ofvfa/barVYaN+XznyoeHF0eOveraM7oA5VAwIJknOuCsRVRiAuQGI25j5rygb6j7/+a0/LWGYa6mxXBNB5smZmZkRzHcJMPFVCnFuCiNjMtJTgwwfXZ289efeLr36BI05lVB8TTJiTlRHIeXOgzkRt3U8jHJx+9sOzb3I7vVBDQ34Yu+WyzWWC66f/zo/9wHSx+xdvfnfF4AiYQMGKajYTAJmTJglxO+weX9RtJEYO7IJzwReENGUQ00l1KgwgCALK7BgZFUCNHDinkxbXhrrigECkKU11s8TA3sgj5t2km8FGHbcDKDkfyHsKUa1YSdZ1ab1O66ECi97Xq1hAURQMnPOuchZYo49xCYpl0qmU6AIYCFhhMssug4JR1rRLgoVKsUlBVTSR9XzYhGULVUXsFzf3gm77SNI0TVnKpADsowiaOVGQUkyawHe8ZS3FTB0ysiNuVSGjmiVVMzVBMVMFQ2QjUAOUguzIIFBswtGyudu0t9lVc/vU3tTzPGq2n6DcdN3v8UT2XEnCPdva9lIPPpeUbpwM8HviZzduFEBEu4kj7YWJvRNjb/f4PvRoL1LAvqnqOUxpxtXOthQ0VVVjpI/f/u4v/ZO/2+/WxHR1tVUVdpLS5L1rq8qkGIiPMXqqYwWSSBURmTg4b0gZNba1AyCAXLIaSEkFtQ4RQLSUKngjZaMQeFEvgUghx+BySqrArkJyKctqdS82S1d5MPV+ZqjzjG+ZfVRzYG2fsFMgDHNdVxN8KYrGr778xXd+9zdp3BwvMQRmB5mU0JJmVsQsCBJjhcSWgZQIyKCsVhVmd/50E70uD1vyOk29ksMqOs8lYykCjAQgykQBAG+A03PpoeIc5TNFBFU15HkQzJP/vaRnCjjHcnCebDPz3utCbEiGaIizbVykoAja3o08y1F2Q8UyM53dbjNFew6mARncLBvsJ62znwgY3ezCMDNgtNldAjZbjAEMgVSKzZCavXyJBqQAgKCSgXCGkBjAbPYxnTHIM+x6HzjTWbMBo1k3171gdKNfIQIrKIjMLV+Ic8mZFjEiQEJVRZgPNzGxaAIE0QKme+GJ9kPa1OwmxTl7vhAR0USzqe4NQXM97jyfN1UxIEJmlTI3the1/ZZn3Y4ZiioYECLg/E8iee6Am08qQJxzYLP3ynu2LLGudpfbk5MVSlqGQO3SEJPpesx9SvOjzXwFUBNURXaArhSxIswEKjRTeNSkZHZ7ejw5V0py7AwslZGIiX2RjERgICK4FxsBmUTSrNEwoYECMepzhj7NcxoiMCKRomZzWbABmBoxqSTn/CxFIbJKIWQzvYmCmUiezWREqEUMCMD0xnxqyMiecKYpzcUHqlbmckYFBUKHIecCJkQM+zGj+1CagaoBmwEqGCOqKe5J5zLX2xFyKQkAzFBmzDwC7I12Mg/jWW38fVJR5V1KfRVMMVsxVzU5O5lMkL1X51yMkVAmWhdqwKUyTgXFORXZXV59I+3WTRvEDAHrdpWL6tg7dM6TlAKA7PHmZJsB2zTjrCbRdtmOW/2f/bl//+TeSS6yWq0qV2mm0KbduOs6KNlSSU0TJUk/XLeHq7vHp5ALYyTyD167fX7+NIu/uLzOJs1i6XBc1UZmFZGmSgrtCP+rv/XPBOEzr91rFgebyykVuhy7KhNNcXf55n/3N//Kn/hTf5YjiajkFALdWh1ur67L1KH363FHqG1TVeBVkKUQACou6tVHT9YhVrdOG73aDpvdzk2u9ptd6p5cJEwv3D1IKZ09PfeIVRXa+vDq6XZxiD/5lS+cDdu//4u/UFWRQMQUFaxonoo4NrBCCh6DjpIHYW5Xi6m4SRYnd+5Npfe6U91lG1FAdTautk11yEYxSoEOigCYafGIIQZhCRgQGw5umMq/83N/6jN3X/re9771xqdeDjF+9/3vrQ7r+y/ce/WVh9vrAYNF7w7q47ZdPjo7e/ut733+cz8UonPBl650652LFbNvm/p6PXT9NpVc+wPJmqQUBRcI1IEYoFrJMqSPvvlt6dKP/8RP/eo//PlvfONfvfKFT3309Ue1lO995x3+AW4P62FzFcCBB8aJfFNXbb+9AIu7TQ/Bo2MEksKaPYC+cOvFJ5snU17vPuw68+H2bccRWBBLKYWdkzLK8ARqx+EuUsVYnMacejVHoYorxXFKfZ1LWxSKjk599PV+dSOnPJT33929/W7KMWiaDg+qz37qlY/f/O5ysRzSRJiKyfnldhIb80ihCS5eX+8UJVZ1mkL0VW762/cXWWC96f/ez//qH/uDb7zy+tG/+N2PkJabJGF5xIFKio+edCevhCqcXJw/++idTw7q1fvn6//y53/h7oNXKrf6+Jd/A85HF/1uu3PBKZKxy2msGUsx0XBBMR8f42HLo5Y0EAhF3pYc66WZipVcrL51OmYuGo8+82m4frJ79qg6PXV3Wlct1hdX7Uld1XHKfYJiiWKsvUev4MfVcP2xjUnZ4ySQQNtVuHXEdjn221Qkjjsa9ARW1QQeIJqZFEfKTCpaJEHJokoe0XBKJfpQyPDWEbzy4i7CkXeLxSFg1S7irvtkdXKwFtCt0eQ8u3U3hoMDoiJTIS9TKuSdSdscfLG3Q4fbNFwhShUcc2UmVmYESpE8rZZ+dcjnO1HyqCRmgEJMDO7jT54d3Vr5RsY0eQcgBAgUHZBJKQ4BEEl56ieFEqObpCeP5q2oFMQiwiFkRy5jYNSigChimpLkkkWL44J463R59aQ35apxpRg6gox5hDzpweny6dNnR/cOrgEffe0by+N2/M6uieH1V+5M07O2XW2uZRoHAyg2FZ0I2dQuu/VqdaiCuUzBeSFsj9sdhWlXCoKr4uX6whXbdv3J3btUxTz2U9KSspV8vj0HKqg4TPbR049e/fT97bMeGaEDFaGKVsvDp0+vp4RPL8btIF2+6X5Sk6KmxsSSC9gMkdwns83M9pHykLQwsKgWtvpgMY67qsKL8dqjXy3u5mEoJi76k5PjYvnW4cGqoafPHklJ5HnoRnIkRVLRg9Xx/XtHu/Oz4LzjWKZ0uFoF1Iv15TBNFBwaOGYphRFVBZAEHDMbeDGXclKAomU3JQAHpt4tKgsO0awwkYgogCMUtX7sUCX6ynn2dQRhAlXDMRUiMDXIJgUut12Ii2rS3bNH7XHjHTM6RVaTYZzaeNgNHWPOaSRQKWVMHKxdXwxf+vIf+t71J5frR6qShgxSSgEkU8zGzfLw1jiNTVgk81UVTk5OKp/T1AWH66stqHpCFQURACyW0YyADazkYlYSAET/5JOzl5s7r929V4YrT1SF4/PrZxOkIhIIHSAaOudVyUxAFYwZHVAGg0huQixlIgBCYCITMVVGRqQk2VCZAgICMu3n7lkhOm60pJKLM7OcBUylMKHaIMjsq1x2CFyk9xgQPLMvJdcUHbpiawRkCgQ45jH4ZcoDEGcppuqomvKI7BT9jDkRhGIZgZh4fmJwFO698NJ7j9+dRmXOFQGp6WBmuqprQtVcOItBIqRpSg59DLGUDKgh1hdXm/cenyF5KFLX3vtIzovZ8XG9ag5euHUSY5WHoVnUvt77KdChYwZPYmUqiRh5FWDFVKukrmUGU2KwTEmLCnptrejt4/t/9E/+2S9/4UdkBFfVuKgMrLvuJoGjl14/uXMb29oxDt3QkA8c6rZOw1SSkAPJ2bFn70Xm4ucRnEPnRVJVh4DWbTehqny9cKEqaGACOZNBTrMjh9uqNQIpoinNpcuxWaBRt97kaRIQBMUsPjbk0cWgQ4KhpwC1r7FwGtel9OaZOVC1BO9NR7Cpiv9/rv7sybIsve7EvmEP55w7+BThMWRmZWbNQIEACiAEEARJoAE0QRISCRlbPZIimw8yWVPWMtODzGSmP6BfZWrpQSZrymhqqiWCzQZAEk2CBYAEQRBAVQE1V2ZWZVZmxhzhwx3OsPc36OFcjyp0pKVleITndfd79hm+tdf6rcSg+4tHb33lK+995a2Lx094GY2760d72e1Pjk9O77/S3r7PIZVhSKOn0IIoBc5N44wQK6mYeIydVSzDCIpvPXnvd772pWJ26JH2m+fDmwY0nNvZfdaoD1kPQpovQCYOxGp1V/WLX//iq6f3z6RxXDoHwxEnjJ6Yl2ZWNpOX6YizBGjvvRFifHzxTRa/S5AX2m+v02JRRWiE/+h/9Qvf+W+3Tz74sNNkooZezYixgjIQkBmgu09D5cBqwB1qZGsImYPjuNlLcQMwc1Gd4cbzDEINhgYX63R2fJSWLRKw4bgzXja5y8DEg8RafByH3aCVCAADxjzD1Scf+6nf+jSRGy+7FHM2QBuJiJlCDjlk4Qgpq6FXsTLoVOKMg4nJAxcAtBSYMvA0VMdianUYmYK5GypEi23L57d4uSQgu6G07K6eQC1VxUEcUCWbJ4JMkFEbt4DQOE7GU9XBgALnQA1SCABi46TqwGZiqkigYDTHdtBVETCBHeV0f7V6vW3PI0ecYTKHKjJ/6egAuKEU+cvg2Y1OdOMjukFUv5zu/eWU/1Il+pOBsptd9ZsR6eZ/vomeHf74exlH8/c2s3PczeZ2N0cwV3NzhKcffvu3fv0f95srj7TZbMtUkMmqMGMMlNiRIXCkENCNEUJKBE4UmDjFVFViDJEREeZl1OWWeWLwyJA5eeBqvhtHAclNIzKpewxcVWKTVeaDgeuj1xfrW4AeQ4jMzIGZ4SbVhcSETAjkFQk4kjuakqkjIQCl4Gja3Lp1/5M/8u43/02ePBCZi4p1XbfoWj7AotgVDIkxuxUBVbdJCwZ/9c17j77zYb+bcoaYEKwwQMihNLTtSctkouZ2I8rNmGpwm0m+h9n1kLwyOxypA1td7VDUdThSTOxACIf+epz9xQ6m6mZmAlrIkzu686EZfW6UR5zb0G8sYjeL5SAdwssFgTfLy0yY01wZBoBzEumw+A4xNuE5WgWEwIeEGxwkGHcnogOyh4OaOqDrwZgD7gw0Y/4AgJDNjJEBEcyA6EYjQwc3EwQEV+IwF8MTgjOqACH5jJiZAc0OACquRATuBIgU3O3w+sQQ2FQQbswmBuZmXv2GvX3QbV1hxlTbbFGh+cH5wNR2JSRHJwqI7OAi0yz+ESERqrtpneN/sweLiVUlMjugOYEa8EGbkALHy/VRt8gO7aJNaCZytR3Jed5SCJQQ0EDA1a0CAREyRTO7ofAIUWCemXcGJnO/h2lFwhkAZKZujjS7rZCIRIT4ECgjJERwoBljPeci3QHMOMY5oVVFCImJiAlcAYGQ0dGB5lzjrPW4KRAS8YxPIg7MEczA1A41n7NIGMHBRAHAvSKAqRL5DYPcmIMhidTZgThfzUSEidzcwYjYCcAQkWbnVMBANJObxOa/QFCrgjQX9iEgIwOSViFmZhaRw+2W2G+8dQepKHDM6+XliycQFCMPsidbSFWHql67HIKD6rTfv8jSj/trld5N3QqB67BREURC9UXbASWEiqlxFCc308CN1OqIEEnRKRIzMbCotynXof7oJ37s9du3d/1m2abbt8/7q36zu+iO03K5LJM62rpbOMA46Sv3X3/09GFMWczqpCT+8NGwv9440bjpQ4wR4ri7stpvN7v29GjS8ofvvf2V999Foo5zubpetc3rr77yxbfeWh+tdEAp5f5rt77y1X/zp3/8Z4/O7rZdrFXTIt26d3t1cnL1fIwBW2ZzqFNlz8dxcbF5FlMwx5Tz5Pzg0Yv1ctlCvn9+8uUP3lqe3l4edc9ebJ+8uCarb9y/i8ftZrPbXG+PjzRkvrx6Tgx/+Sc/Ozx7/G+/+tVgBkjm2gSKEayMTYo5UJcjubmXro1NJJgipUQIOUDpK6FyF8dp60JcUw4xQutAm6maQ4wxYSM0gVXTPoa46talz7bDz3zsM+d5efHo4Sv37tpYmwAn987P791lis8fvZD9dHyydgEM+PjBB++8880/9UM/ulh1fd9vL4Yc29DkKqJSd32ZtCAwGGJqctLrq6cclsxJTOo0VZmGOmJLoPilP/r84Ed/+ef+4u/81rSZHp/9wOLpowebfvrwxabZX907Oa/bfVrG1cmSDMvYd91CFczKYrna9YZIjinFQA4Yj08iYnvx4MnD01tvRm8KLtMyxkQqCli0KkIEV9e9AxNTkxpGdBtsHFR7KL2ONfIp4im6BAwzhBIAnr/Ynh2dPvpgkEKi4511C3X66u9/GUrNJ+3FbsOMpCBTIbdV0965ff7ut5+1HG7fvR0Xq+cPxovHD9YnfH/dvPv+sxa68nj7T/7B55bdWAe9cw53Xz356nvT+bq7s1p+6+n1ZsT9tL9zd333I6d/9Afvnd06gTLt3n5w9WiTrrFWVgCMaGjFC9fYBSJRrO6Lrj8+tkVCFwKrhYjTJHF5dDdRGK8eQ9nFEPui6h7RxnGEoafFmRwdx2aFACc+EF0P7z4Zr8bcBMhtf7nXVfCqXWyntBRWXp57OAphjwsqMJhTPHl1nRbTF/7FmS3WXrEUcgHTmSjBB/QjgkPiGBpU0cBsCLVNy4++sj/rGte2y0WsW3bbfmzDylObU7Shd+DLZxf5Vpo2zzmGQFnL1KWY0EPAB9PYLlM1DevO+tHdKPJQ+mnsFRc5pdh2MpWjnJ5tRh0dwFWmpms4p6AsFuqAIBgpuFvMQdWAAjAbKAWqpc5FD+DohiE1WnU/idQagIIbCJLm0u8gpMAMKXsIVUckQudVu9zbuLu6AsGipbE1OKgRM6eQmG0apE055phPml2pzf1bzx8/P6G07Tfi2FBS27lPVQABuq5jRSNrcrtsl2y46lYxUZXoFTL68Wm7atthtzHRZh1OVkuxcapDt2iAHFRXJx1gaUNixJhXPIxNs9jojpynUhLyNPVhvV6t23EsixQ/ePHBpz/z5r/8lwAAHJiQkNlVKTDZd0kNro6EzBxD4pi9O7t6+gghlEly141TP5WhegnY1AnWy9NRd4Iy7PpFtyaUy90FkLdtduC2w1ILO8kgn7h/VsdeDIgycFytmtL3rqZuxszMIF5VFrmdw58EnpFVBQBUKpE4qGhFd0JeNCuCqOauVaVyjsgO6rOjO3NATk1q1QoCUEBVNcMQOaa4Wq13cA2AQy3U2q6MS2+sElLTLQmS6lgC8W5/uT5ZlFEArGmzDpRBygTf/0M/+a2LDz548nYLtVdxVFNDzpmzuzVxIUUjtUypmJ3eak5Xddpvhn4/qU/j2ISAFNDm4I1FThOImqpJnYpZ1Qr7Xs+b0zfvnJpfcTTUOJTdWHsgDik6KCEyBa0jIik4ETMknTmVhEPZGxjHLC7sEQBNxYnEK5gQN2YYuMGZtOIaQkbrytzPSoYBI6Uiym3r0gdHhmgUMre1DClECp0BiQoBSR2YAqAZjOAtOLhLCq25IgGCIQV0DpCUHJAQBFzm6EfAxn12owMYEMA6N2/euf/4+Yth6hFDqUNKQa0p1QiFEaZ+ALDl8sQomWqZBEzVRCa4dXK23/XXVYhsu+tzLqujbuyHRw9evHP1LVdPKanK6qhp2u4wHyAZs5GFSE7ANoMaClUG4qIWIlHTltqPwz7UQFtZlsV/8h/+rdd+8EeIeL+/APRA3G8G3fnR8dnp+RnHhALBvGu6iLFOVScDQCcCBOIESFLUxQwPBAErc3OfD7VUpdwsMGQnsqmge+Rw+eIi5m51ulITE0VDEM1NRmYFIGdxz8s1MYJVkhw4jrseUDwUmUaoY9t2HMOkA5MjM8fGkcZazT0SLDLhdP3F3/wnF8+fTHsd99OiC1cPHw7Pdqu8blbre69/qrt1ct0PCTEj50QpRmpbzonbMNle3TjmnI4TrfcX12Wogw7/4vO/fSmjzqGPm6SQ2dz1RDB3AuPNQHhjKFGRgBhiJAcDcFNqw1tPPrjePLnT3ucQ+n5KCw6wmPYTYFEv5Jxzl44drExGXffK02L76w/2C1suPAYy1bbNpU588eTv/MKf+6///j8dtn1GFDCewy6O7p45KroBaJVxEI4hqOXsPPf1TVX6CcBSDipuxQOxF02JaAHLe8vu1qJbRc6ZY6qDsAaASinjaBHYjIbJp9EqcMiJMykYd9HUVcdASADcNBCJchOIeBjqdQVsQm7TIjWxGcUV5nIWs1pRNIUo5hQj5BACezUycOfQJCQ30dkf4Yohxu7syNZtSGRSc24W8SCYmgmgV5sTgXOyxsCdiUCdHJlTkeqIhmCmrgWhBGIEQkACQncwnEEnDAAO5IhECMwcm2aZ87pplokDAeDLWR0OXsSXPqGbP/leOeh7fuGf+I1/9/cHIPD/5PO+F2SENx/dlLx9VyDyG2DNn4AUOb58obnjXd1NjdAevffO5/7pr9a6E7fdi/3V1YYDNzmIyKJtMxDUmruYUzYVBU2hi+RgCjT3aHjgeep2AOOAy5BRNSISmIqVUmfaNxs1OTWJwYAcEwc1BTGECBhWq7O0WFAERmLAQBEAzT2GwBgclJgJkBGJGMDQabZ2uANhECmKHpgB9PzOqQ7f9/A7X2xCs2AGM5NKMZtooBhTripVR3dY5ASuhHkqI4JU2N+7c+fy6eU47tq1oMlwPaWTe5yX6lFUwOfAzAxbATNliIR0QCQCzk6KQ7hmfqsP14dZ/nCiG+wQ8uEKMdu7AFXBHAxA3VQkqLhpmMdiJJjRQEh+8KORw4FaPStGdDDRHI46IcqhNMoBUFRuFhfoIYJ0CPLA4YOZI+TmSjjLfoe1Nhe2zZ6PmYBERCqGPDs4DAnRyd1m5WJeZWZyo5bfRO0QZ8oWUZhtc+6urmDIhAbIFNSNOMwXVZivsIiHbBjO7j5kwpeoIDWYAVKOQMSmNn+CqB5QTfM1QYWQicLc1KYqc/bf3QEpMM919QfnVmBTsxkEZYfaQSaejepqYi7gTMgIRoHdxN201rZJF30561Y+lrPTrmXqR3347IWUcU4I0ly2yGhucCPDHVKIRICGzkRkrohGeGBuIuLBAYTMAR0UiXC+ds3IJaKbKwABoqgcBKPZoQYHLHaVMmOJ5jCeubnJXC3noC9Tsszs7ohAhI6GwBgIzQHB1dyFkMANEYhIVQADIJgCMiGh1EJM7o7M6O5mBmozvQfAXIkDIAIhEAEYGDqiqIIDMwG4afG5UwxpLvYzNwNREwCOIRJCKYVmsvg8/cwAJiRVmwOhB41o/s/x+tbm+tmts9vDtJ3G0samn2p2otAgNG2M6gKxw9ROXoayzYnbxUJTV2qpxUITZH7c8yq1kouBErIRAhs65ZQRVdkZKIfYDyMEopCGvpzA2d/6z/837337neOzo34rl892gbVdRFdkZnJJLrePbj989KL29dmTSyfY7a6ZuWAJSsOG1CxEbhZHCHW7ea5lXLTx3p27ddrkZRvb9s3XXlvcbnePr9947WjYj1/81lvbWm7HVcumCYmtvyr/3f/jv/5b//v/c7dIYDTtByvw83/5P/inv/bLtX/BNPSbAXJOplPcdW0wlUXuxnG/it0yLHe7PWachqfn52c2VnM4zrRo8vOL3fF6uHXrZHpRLp5t6njZLvjo5NY0TWC7v/5Lv8hR/92XvhQX3eV2UBEGiily27Q5MjITV+8GDZNMhLVrcb/bkAMXKa6GqQoEiCF2rgGx25ULzu3Q71vODaogSkhugNyqUCb+6T//54/TKvBYTd78yCcfPnh/scikmCwEdmfibnV2dOuyv3x2+fzxB+/95E/8LLS4u76InLs2X47XZNkdmrbd9ltCXLZHexx3+8sphYCNVxq9DHVMyCK12KCZzGxxvvqN3/0fALd/5qd//H/8x//DttdP/fBn3nr38Yff+eD8OK2dOCQMSwOsJlBqE47UVGrViTtu+u1T4H559PrYj2W79ymdLT8S7dTXd+D4o/u8SE1TakCv47ilJRtAJY85IIK5iVcnTF02rHUnpqoVIEQJ1K5eUXCOB6Pp8y1cVP/me9d3FidUcFuqUvzsJ+9dfvj44W537/b9oR+kTk2MKqO7XV9uu0UoQz2/ffLW1755a3X3hz/7EZFH986p2Udsjz54f1NqgG7l0aYiH78ND+3h3/n3f/7f/NuvtSeWOIJ5vxmuLvYcAer29PbdL33w/GgfoQQZKuXQ5jTIyI4JHd2JuM/h8vSsnJ2AlwYgcuwWqwBUrh7G3QUWa2OG1coXzd6xW7RcNq3odx6+c//jP4FH68ur/bEFvr7eX34YsIsjQC8T7BVW3auf1Om6jEDH9306LqmxEFbn5x4arWaw2G7s+lu//8bgSzCoU4je5mBiUh3UQJTciTg1WdVhKliEUtoz8xvnu+NFEQ2OoNSeni1Ob1+++163WkK7nLSECNAGrzVxd7W/WCxXIVCddqvlYhj3KfnRukPoATEgV1dGEhBCWyzWux0ERBc3x4+8dv/t5+/MW1JgZFWLDLntpp1kpHEs7WnqurTXfk7IBwoiRTxyk+YbjY0VGU0ZEFW1VshMtQqZuZcEDUNUBarOjOi1CZ0rbq92fJLX6zwc86Lrrt/fLEOujXzsh++8/UcPpqm2Kb92/9Vnmw8ePdgGz9/+/HsnfnRvvXr+3rund2/LeGXR6jBwZXOKnEQskJ40DUupo68WK2QxEZswigXot+OeGuo6NitthDY0J4uTqhMQdy3HBvabPoRYA2qdjpeLyxeXAULAUEFPl+saODHlwJPUaYuv37n9/rfeg5sno0BBq8y7ZFIVzDEc8PgODg4cEiAgLThEhNg1bS1a96O6LPOSEYahL+ZNw/0omXgcyo6D9EIESLSbJgM0cRV08U+//nqVzVhFkI5WixBIkdrFQkVR51EM4mwoAHEiNaviYMDkkSEw7jY7ciQgMOVUQgDgWqxSQBGpKohUdGjTMsdWqqIDGpkUJVMToBCbVMsIbv1wcffWHcphVHn1ox95+uABs8eYDGTqh7PVUZ3Aw0jETBgBnFLs+GrT/8Sf+6W3Hr333vNvoCoW0TJZoEi5GoSAARsEDJ667lTMb99uF40Ou4uy3wNQP5ahjCEuQMTBRQshGQkhMoZaR3UHClcXw9JXn/zYK4E2MyNumvpdGTEFAFN3cGOnWocQGR0AQqBUpIhMTdsigZsHRAMUtcwZUYtOTNl9AtAQ2lqR3KtWcmKMahVACSExVEIEd/ZSaoMLJJJpRA8MoFA4ZFEgCG7AJA6OsQHUikCcAYKqmgMzokoAjgmmaUKISAZqiGRezDVy424G6gBMbKYYmIgmmY4X60lqfdGX/cCIFkiQPORxX4k9txlMVPbCpGBRvItpsKowBcyf/r43f/erXzP31KSqcnG5GacaI1OOXZsAYLd3R972w3wScMupaQyKguTWWUuymtPCBd1dgzKI1wnJlpS8+iIe/5Vf+Juf/JGfGlCkaophe3H14jtPmbqT9Z3TW+d5HRQ1cgQVpghOplZKQXJ0ACYVJUYKbF4REJkoIDoTkIj1/b5dHsVu4RHrMHLAoH51fUkcY87iWkUIoIlpZmeaSEixjEOdKhJHzgjcj5MTTNue0SijlpLjEtRL2ZpVyrnpTtQUIug0LnMbwZ9882tf/O1fgaKwPrq63m4vdrLdL9RuL87u3n2zvXV7ct09v2q4jeoclJuU1wsI7KgDTHGRG0T3ZtpDv3vUX7yYpL/Qq6+8+44DgKjpDWjmBhE8j+eqBsSzbmjmiDNSYsbOGgKhGxJ5oOfT7uGH77+x+FROHa0a6dVRRSaL5FjyMgia5ymC6mYL5PfeOJPN4vmTt1NTluuSWhircGqo6J3c/Gd/6a/8vf/fPwIw9OBSjMDUUwhiEjmoWFUTqaFRzhTRXWSoykiIGDJb0cBkIbhpXIR2QYt76/VHbudFm8Js+MuZ8frRNSiNO224yCQFsBSBEOJx5tioKwYwRqzGYuyQ8gnkNlBqGPnqSqtKCjkmYnbDqoIKTKqi5EoUlN3JKVJcLCYTi2k0ceLF0dIzp0VjKsM4YRHZ9Nw0fH5sy4YCMliDRnKAu4vIAR5EATG5E2ILmt3ZoRq6A3qMolUNndwJLaAFllLVqoE5+swpm9kWCKhWQ2gcgBFDk5pFm5o0g6HwsI0/Jz2+KwkdlseNbORwA7Q5GFLmv7Qb/MYhMXZQGW8W1g1y6CZqdnAsfS8b+6Us9T8lYuNBxMKbteoI5HNXtwo6k9M7X//av/6NX9NpX133+33pS0AqolFo0aSuSS0TUWy6JjAxUBEHiFL6RLFbdO6qYDEmAjfO7tJEMJGpCLtRAgQKMZY6ptQ0eTHpyAEYUMXNzA0KYAFaHN3KiyNgQrAYM1EIMShC5DDPq4DzrH4AlhCyqeGB1+IOGgKrKxO7OSGev/GmUHn8wVdeb48XDUiZZNargpHK/IaI66TKmBgkxMAKOu0Fx5Nbi91eS79pl+3aUS8eaFwtjs53EYqFADRPvA6OcwZKzdABgYAcDN3xYO6yg6GGcE5XEzER+yGNOOsU8xRvc+23fxd5ZDfZmoM9aJaZZraLHXRDNDOa/Y1u8BLQMks1cyyLDo1X4h4wAog7MKK6ISDhTTgO0EwOJiKcGdDzTouZKd680Jw0MtPZcORqeOB8wwxUBgBVufmpDlrFyxIrRHKc9aYZqn7QKYgCALipUwADZCQEJAZAVVM9AIfwBiFvc0cYE3EEmPtKgrkS8CyKmNkhdwdAyOBASKamJjAX3uMhP4gY3MQc0dGsckyIDICEPDO8kcIs2WitSOx+qJB3MwODueIdkRhDQJ/K6mwVQ4e1Pz89iTGGZvHtB08JDx32VZ3QCZgP5yaryQwgZ7MDtcocDvH2gBTcjJjQEN1NC4ekasxhzsPN75Kb4oEJ5RTC4d3HWY8RQkAisRvTKPNMpp8j/6YOMBOClCi6i6kSEh3kSzVyNAckMzl4Z5FnsNZsn51PSuYAhIwIIaopIWsVmEvrDhokMHG1ioDMM/X+cCzA/cCSgsOKJ2KY6eRIhDPcekZ8gqsK2NxAR8xuPreqmzqwcoiqcojrfpdVRLeb9phgHO1JhJ2JZ3ZqIvORaK3TGFNcLLtqIxI2x0f7YQy5weAG+xh6RiT30cxJE0J1iMjTVKsxYJgKnnSxjnswjZEiCSQiD46Zvf3f/Rf/h4eP310dp7bD0K2KSWxj4hgYx2Hbj2NkfnrxQklunR9vt9vFqhnrjp26RXPSHT148CAv826YNlXOTxrYT4niquFxt1mwpdP2p37qU8Nu+8UvfyswlUGebcvkab1Y3D27e7V/4urPH25EMPjD/+r/9Lf/2t/+L3/g059enBxVqffeeCOd3Lm6eFKrxyYLxVFFrZ6u1j5tQohRq5I+r1ddG5/tdsf5pN+UNqZhe2keC2FM7Ze+/vaPffYHF+vlkwePMTbjtkyq9+68KrXYZvuXfuYv+LD/0nvvHLdhVK5Mg1kXg3rNEZY5bwX3YGXaEZmJcSYvPolejlZFF4iJ2ghNji3iOPQPMeIyBlBRLJMYhw6MwRaNL19Zn7x2fPb08cOmzYvF6tGTR+D4kY98FAWun18sOl4cNTr6Zn/1/PkH65OjH/7TP7ovve08x5hjKuPmD7/0r37mp35xtx32+x0BNU3up+tSK7qaBUNOTXC3Wh1JtvV6RDd1JELyNz5653e/8Ln97vFf/9v/5f/n7//3O8y9TglecBnG3XDnlbMnF5tTPu7ahgirGzdh0bUqk8hAyEA+2BYCgThHjBFvnRzvYqgp5LxwTDmmxL5sOkVynl0ECkQm4qYIgEAYmrw8Eu1pP4pDxOCGAAnxEL15/8XQq0hcpcVykmHTm6YwhtVnfxDLV78xQLxS0QDXfbXdtF61F8+3adXFtnu+2Xz6E5959nB4fFnunx/tByv72iL8R//JX//Nz/2PH+yfbTQ83/B+N9rq/Jd/5w9/7ud+AL9x9dufe395erS9mv748x+uT7rFlfzx53+/2TM6OmZhDzFUMAttiz5th8Q4hGZ7frQ/WXJHVADEEkfd7WS8onpddn3CVLYlvvqJ1K0WUOKzD+Tyyc7TRz/2Z4ZKte/bHKNwzKkiE+Ct26dXl9fULkpKw8UFcQULuLxNC+doMZuBT8ADBQ/Q2fvL7eY2p6CQ20ZkKDpbZT0Suc46Q0yAxi7FEHAoAG+ehR/8vuf74e7REvsaPS9Pbm32m8VqWcB8UgbHqUTGoa+BMlEwjvthrOAACxmr6ZgpBZ/cwFS01Ek9ZZZBOC5CNgBhDu7OXu8etY+f98iBEh+Mo9PIGKb92J4cbXxjyJNJF0NIJF7BVY2FzVxpNihXqWoQ2cVCiOCQYiMGVbxpEyKRGqm5DlAnB4ier7b7n/mZn3t4+faXvvSdj3zyOK7X9XILUJ5+5zEu6d73ffr9L78Tnj0KDGG0Zt1V2d4+PlksGr19BwxRlYkQgREJMnjeT1dHawLDqZScF9e78dVX7m/g6mqzWS1TcB3G/Uc/8vrF8ydjQRYBBiaaBEQyupWhpNSVae8cyHS3N1U/PVpu+11qmsrQtHmq0oNdX/Uay2W/H/aHIVlFhCS3naqamgPoXD9EL/mf6AaoptU4LCgmFplMQ4xgJqAMEAPXWpFIBVJOxcOgfeza7W5SqZv9nmPq2sVU8PU7a5Orvg4xhobYyFQPOfYylRDYAAhdVEcfAYxTCiGrVlBnNpMy1FK0BgQTjHFRxKY6uM9TCo1jBdUU06JZRkygjoClFDelwI5QqotORIClElGTzl5cb9qjNohtLp8zYTWLiQLmyI04TKTYpLEOMVBFh6nXyT7zqR9/98XlV7/9h8sI/TiKlSoTYg4xhtSJiYEvmwVTV0e/fb5YH7NJP+y3IFbFVN1DqIhtbva7PREHpiqTq2ktYFXEp7FmWrxx63aEaZwkEjHHq92T0HSIyMgoakrMjegAwBxQalUAQIspAZKaG0jkhB49JAdQ1ZCiWiFgokzcaBnQKzFalRQbc/S509e9elVzRI8hgwkBcUjMrVcDYOCmobjbP+WQHUAAgLL5NJetumGgVBVSaE33gDjVgZhdtFolQpGRmZHQjRAD4OBkTlBtZGwYKMcUCO8e32X3yxcvai3jNJg6QljkFQBWq6UOk4yL5TExgrmRxZjFpd8PJvrpj3/8wYffqYpmUIoOo2x3tWGO4KWWk1un/TDSzYBKhMwUPW1lmoJ1DSaIDGgWJytOJj7BpIHaYIFr/Yt//hd/9Cd/2jHK1Y5Ar58+3z67domL05Nbr95p1u047ilk5IycpqGUMiJbys3cqWw2OwJIzIAZiczNROZ2uN3+sl0eh0VXyLwe4hJXmwsDOL51pMyChhzItOpETAZaq4gKAAMBhVRFy1SkTpt+kwIfH59uty+adr1an+w3TxBTip2I1KJVJbinlLJPf/Qvf/UrX/ydk/PbzerWtz54xnWM7qUfj145u3P2sf227MsUU8Zqy27VhGSq7cm6UgUSCgEgOx1N/Xbz/MHuxQDqUms6yt96+s6zca9AaDMtGcDd3OlmS372p5obIzGjqSMT+Nx7A2AuLszsgFVcKT1+9nx6rVCt3nQCgFlhFQo4ERYYImclZtDFik0LKEyLpoZXn+7fG6ieUt+ujoZhbFuWfvjE2a1f/HM/+U8+91uUsM2sBoHRHfTAN6XsDqooTiqBs9Ic/qO2y9oPyFCrBGJzXxzFs1fWcNLV3KFEtjj1trueSl+tL5w4LxKq1lGFY4gZWJrUTk7FgSM7GLmRe6CQ26Ut1gESbjbT9aCDMOfESRVLdXVgRBCBWsFstk0YEjKDeRPzABjW63ax6I5a7wI2Wd2pCLnk/SCT7UNQR+2nVc51u2emlyINIqVIVRGACSNREyibGhCqmooYEmEXODkohWCYKwYMjh5R2Q0BIIUGfN7+nxn5FCm1ed2kZQiRyQgUkP1mhJ5jhgeR5jCv240zAw/dY3OoB2acy8HOPDvQbuSk71F85gnq5SvATYbNXybNvifL9l2FCGEGV884H5iDkT5TjufrpzshwBf+7e986fP/ehyutdrV9fbkZB1CqFeb47ZZN9nB2hxjpLZtQ2AzR+K2iVUsJgoEGBOCqU0YA7qZKoExkdZqPt679erz3ZOAJGbdolXDoZSA7mKKihRTanbDgCEvu/PYrmNOiJBzRggGboghREAHQnMLxIiAwEBQvSYiYiRGRBIQNyEkQEIiZtZackz3X/04DvXDZ+/cO2tWiQ1qalomNq9qRoxGPmh180hOIG3KaDj1PR81+fgolmBDr7XPjDpONWhM5wWCw2Qznx4NAdxArSKzA8oM9Jkb3wEOx/ZgC0JCmgnKeKghmy8k7qam4qYmYiKqqqY4Q2RmNvBBjsQDRAbgkHWcKTEwR3cAYBYe7bsrAXBONjERuhsouACgHcz0Bzy1ujOzanWHwOxiDjD3hxLxS/iRA9xgjNDdZ87xvF6B2NTMnIjMBJDnFPYMLzqQ/93mZUlEapXmZjeYGe7kBya8E5OawqHuA2f5D4nAHBFnJLOBAaCZMgdTxTl+5q6g7kZM8+mEDnBAL4HPnikAAFTVEMPs/5vPKPWKyDwH3IhUxB2eHS1xAAEAAElEQVSIZ42VFAzQFZxu3Hs0y2gOxMFU53ycmnvwSWq7XF5f9BQ8dWEL6fF+vO6HVZpXBRORugKRmznUOXUISAazPghEgQjNCHB+u2Z3GBE4MZkpOKq5uzHRwU9zYF3PwCCY1S73QwfZHFgjYkByn18TtFYHDJH1wEOfpSchROIgdTJyRARix9ldZXSjhNt8uaZ5iXoIAcAOybibPOCNKcwAX640PfjpwEVG5ADuajabBIlIVR3I1AIHB1BTd2D2mYZARK6qrsTsZgBIBCoFiW+6+BwQ5ud/ZPwTUtFq/XqboE579PM9PbnePCOcCKN6BFeHNmDHhQODKBfpOB1VkanfqChFAC3mYoiKEAgyAgBqAQQ0hKM1nzQ2JrDQXjy/jkfd0frE9zZs4//27/4fm0jVxqO2PTs9reK7oYylL2Pf5cYQu66LTOKaMHjSx5sH95q7DMGqG/Gj/WNPsV2dbfYPpIxl0KkfjvN614937r16ugrvfudb0/5q3G5fPcrtq0cS7EkpPpmJPn7xALGSU2oaSuhJotLf+7/9V5/62Jt/4Wd/9smj6y/88R8+fnzZNBhjIgh7k2qGjlfDdkVhU4e2Y+Wm31xliPdOT3eiEPHZuDldrYfrfVHtx7Fr1w8ePvnU973xsR98/Z1vPDg/WoD5++9++Nobr49DEYWf//m/ZL/1L957/vh6rAE5ZgzBT47w/nmj4x77Kkg91EA02qQTmNLFEHbSNIGqGbZnlFoIC5mum9iM2htACLFUsNCGvK7jtEzLH3/9x948OR72F6enyxzadrHIy7TdCrNfX1wNw5RC426K5Xo7Bc6v3X/z3Q8e5Rhv3bql406t/70v/uGP/sDPal9jJgfSyTMvLvcbRc9NixxHcDFRkTLJtlwUVg8cIpXiZqpqx+e3vvCNL19u/97//D/8O5/7F7/xypuv3P9Ti9vL/PXf/YNHH35rpAyIdgQq4/HJUZsY0LwOTRsZ2surTYGngRqODbma15CK16swxWWOGm9PqkaGxIyMHNwYEQGD0uRm6GDuggmOqDqlfnYSEFECapt42EO7vty/+pFXWP3x0wc/9+f/9B9/cLnvWa6ev/mZ4zwtPrzcjE++9oM/8VNfe2f3YrBX7p2bPd5VHfb9MFz9tf/4z/xh/6UnY/OFt/slyifv3z9ZUaT3fvLfu/Orv/3tj6/vfPXr1wVglPjV67H5vRev3r9/fjzYorl4vldNis27f/z2qd+2UnoZldADjqrm2hBr1cCtNO1mudgcHysIjc4AVlVTDVkuH76/6JZx0WrRdPt4ef/7Nk+/FYdNffGcMnpgXRTtJ06xCfH6gw9WbRvXp3VbrrYbXKTF3Vcud6Nvt2nRYuxS1zFjaCPaZKYFclShy2+Er/7BfT5p0KuNIuyuoogqPDtrjZxAALBOaoYExKlwjGdnpdhJSkddGiYNMTfLs7Gokrlj1wVUqC+udNqY1HG8ipmBCUISoVLIDPZXw/rWEXCazIhzpM7rUKcRShWy2C1DGHNumJ1Q37h76/GLb5mRuiGoiUSARRfKMOIUzl49oYxyPYq6TCV1HVE0RzIqtXQpxcwqQEA2mtcaEheRJnRSR3Ci2A51RLAg7iJnZ0f95eQABPG3PvfPcpNfPz6fHl1aGU+XaRhs+3RzZfXo1kduLY+24+VRBBSQkT7+yid+4PzVb3ztSyklhzhd76oqOxOyqlfvq+yvdli9hpDarltkfrHZXV1vHdU89BOo09Onj3UqwOH1V+6+9a0PFEOg8JE797/89jfXq+W+jADp9PgjOl3o9Q7crrfXYzEgrlVzTlhlGiWvlxexvPfhi9dvnR2eY9Q4kJoQEQIRCRLgy3YaN3cgBCDMba4jIWLMAUQgEFvc7XeZGoZgYhgocyxumWPXNtvNs5DRQpCeGeJUiSycr47AFAzbrrGhIpKbxsAUg7i4OxhwbNRHJK6ibmDmATFGYkYHG6aJODs6AcW0BJ/UDSlshwHIyThRm2IHboRURUyNkSDhNPSGIFqrSIgJlMp+RIDN5TWSgYCLdIsWA4oYhsXxarHdPoFAqGXs95EbRKpVP3L6qdDc+ne//ytd41Nfipg6IzeAbC6IokypWSsnRDpeL46Pcimbqd8N45iIp340J4JAxUUrmqsAEs+EymGc4rK5vLqOBT929+7pKu/6bQ6tSn2+u6LMBkIQzDwxuQdHZk7mXsUJGcFTolqHHINVdkNwVhWHBASIhoFAXYpFTLVqCgGgEAV3cxBVAiBzN3cnR2QEImiYEomaq1N1tDL2FuLgJaSAwERhqiNDcNPI7ETu5m5OZK5E6IAMTJQmmBKBQ5wRD2qgM70G3BzIhTC4m3ghCl4dgY8X52Xy6/0LCnHXjxyHpl0iwVgkxLTZb1JqU9tUHWdjlZunGESmha8+efbGF9/68mJ5FNtoVYvJosu3T88fPnmwvbjcboeQD22YrOa7ySPSsm3ufazJnofrLLbvRzKkwKiRoWl9kUT/7E/95R//+V+MTVc2vfXT9uLi6ftPCGl1tDx/9SS0OEqfmzalrmrZ9zsCDJFi1yKy1UlVCQDB9KV1HxVc3JC4e3HxrO1WzdGRMLgVJkKVMu2RqQkLFTevojUGHPpdDBnsUM6TUydqnJrAwWRwGDH4+Z1bpfRXu+dtbEBtc/3IwQhzzkv1awHJmRbJNg/f+ZVf/+Xhen9++3XL+XK3OUp86+jW0+fvdN0iKu6unjs1CQPnxWrVNKFDlbzKEshyCosGoLUedg9e7J5+wDFnzgKVtGo/fP7LX3EiqHYIe9BLc8BNFOhQRuOC6shEh1yHmoN5jBxDFPFAjACRYdLdYh0QosUAPnjpvXElgygIJOMeZTb+1wBjJKxFc/LQffSbX/93b97Jd7OlJairVLN68eM/8umtDr/yO793RKQKqhojOHCIc0e5zWLV9eV1WuW4iC1SrSIi7aqVUgCdADnGfNbJYtEslyG1voP9Zd082Y+7Gpo5EsZkKD2O7p6YEhKxYHTyYOxjJTRGbppFSA10K+TGd9pvatlV9EAcSjUFGKphYiYhqAG5iSyioWnEwAxkqCxcyOl4GU5PPKKCkWBxAyKiBFhBZBgmnCzHACwA3vflIJjCoTEqcDQLzK1bcAgOBbyIFXBC6+ZzWebUZMjuil6JHGhEV0ImMGB0iI5GxJS60Bzl5f3QnFDqgAMgOdDL/NgBTPTyn9mZcWP6wZcS0svM+/f+miUmgJecIzigm2dt6JBVeYkzIsSXiOwbg9J3vwF3n8vD6UZVQAdwNAR1QWIr42/9y1//1h9/HqCI1WksDUdUnKbStO3pURcJuy7ntuHAgAwITWCZqqmkADllM48x1Tql0KW8kmmbmcGNGZvM12P/9PoCKFexSWSRU4rEwRIEMzV3MhiKGjW5vZ0WJ04BHBmJMDAHmTUPMyJ3pMQZwREYGf1gqeGX8hgT+U3szs3VZK6vSrF95ePfP0h9ePX+m7ePIkOpJbKnFAN7RCSORXwYx9h2OaVJL5quXYfFdrfB1coAFen4pJG6efXuyeX4Ik+J8EzJxMHAzaXqFJDQcM5nMR7e6e/ipWYrjeM8vaPT4RAiGJi5wWwgQjBXdzVXMRW1WXoIRHjj9EGk2UICPmfg5HBwv2sl85v+tZtVNmN4DgqkzR4ZADSd74mI6ORy6JianT4qN01nB01ztnseQEXEAIDAbuKqhIQQFEzqxDETKOJc9EZuYGbMxHjANh2iggAIRBjhAFsmRAC7oSgBugkBmhM5mnviYG4uQhwJwIDmIB0xo8uc33JDYkSi2UI+85hnLW0uVicM5obIiGQuHJKKEJGazjWt5DNd293UTMGdkGa0oGqdPwfnqnslN5s3C2gWSwAA0EEdMSTU/f7O8fHVxZMcYRn9Kdiw7Z+/uDp55VYRQQ6ABK4qOtM0CcndfeYWhDhX1s0UfFUFQGD22b00k4XMwelgPgJARNXKHJDY9MAfm+Wh+ZJBxAfINM53a0UwAkYmOLR3zSIiAHigmRSpRDzfvRARidULH4Q2QgA1nTHu7gAuUocY4qEaD8nUOUQznxORqoJEzNFMCdld4bumIWCa2zCUeLYegTuKzRtLyEzgxogG5A7M0VwI0TC4CgQC91kmMlUKAVzNjAOp65+Qitr2dD8NXZNNsQwbxkxgBpaTanUAFKn7/jKmrJ5DOF0s7uLuiYIGdspUKiRzd2ZX18KM5ogE0Z2wLqPcOYqXAyhrgLTIbb+3/QX/0l/7j48bNOk7DuC0ueodnQCX3WprDujkZNXbNl1cXlatpnR+dtuQmm7puyJVjxbdWOTZw/e1jCeLdpmaNtG6id1RdJ6QpGstN+2715tdXybwy+s+Uzw76S6fPD1rTrf9JsVmo32/1xPpYohHt443evXPfu0fLnH9Mz/1c+uzeyHpP/rv/t/77WVOGHP2QGrq0D67fn7n/KzvZd0dyzh1yVCKG5pC0zRYi5AH8sB0+fTq2eLB+avnw37agy5XLaO+ePz+7ftvqHEZ++/76CcFcv/gXbZpSXB+fnx2loepDzFV3QtMi2WadtbveXvltSSxyFTWrS84BkhqAd0mm1TdLTRtkmEfITQU677+wN1P/+DrH988fCiLhJzOzm+vV6sXT56yp5NFevbww5Ojk0W7UJfl0fL5o4dNDKvT8w8/eH/dHK265vr5izLsMNKf/qEfc0MNst/sQmpTF/tpQ8whpWkatYiZuomKjLUKBgVAjKIGSG6GhFH13vnZwwff+Ef/8P/6N//G3/393/7t26uQQplgP237fHZPpt3VJXRd3FztDYjBMjulpCpt0yiyOud2Vey6jgNROF60tV5uH3+xuf0p43NvspjFQODAHGekW+CEbO7qgMnZ1AI0KUQjKGWk5F2bFstDMv90fRQwDv1VbvpPvbokht/7nXeMTj/32+9un12Ghj7z5vdfP9ksA330hz9+efmibWM+ihukZPrVf/e7b7/9HnZ3Okh1mNanJ0en/Ve+/Lu4Xr1yfnRVh2WD41XtiyxW7bPHVz/32R98hz/41qMPb9+Ju+f7i69t1kMXAZCokilqwUIY59tRdCyhfZJX/emxrxroey0aU8xNSxQc5fyTPzA8e+E6WvBuvZTduyu8tOnSmpRWi2E/TM+fMmKdFJfH7cl9z63JsLy1GHcvjOFKPB2fYrdMqzWFiGTEVn1iAIy5i4186Z9333n3zXSE1QWNIiGAiqNDCBQp6DgxEsbgIVDRSaQNCWNMp8e0zvvL65PbR1rUkYv79TTE1Uqr1UlTTdOwMa/dyXLnY46LvWwQPATuOJBLJM6rW2O/XZ8dTapgtl6vnz25Pr51/PTdixzTslvLOBF4SKnIkBpetWnTK88UQHKVutttE6D1Q30kligqGGhqI6iAG7vL5G1gkApEUgViQIfITMgOIlqsToy5bPfUcu5SGaaY2r54FQukcRly09Sxim1MJ5GyqQXZweJxs9o/erx59jQcLxf3T3Z12zZrruU7T75Nizhoenrx/NbZchqkbZLYNAzXIeeQybkgEFPQOl1s91Ft6vvjW6vd5dUw+emdZhj70+VRkfHFi+daPS8CEz29eL/rqM2cc1tEh/4iw1h1vHfr9NHDBxibHNmLCtdh0y/b9GS3/aC/liqjHLx1KkpLAqRaBAFd3UwPuOSD32fmEgIQIYKITEVCiA6o6okbJCAPTU5j2Tn60A8xJwg2DWPbRmVpuraOpipWXLCIt90iEohK5ZAA3MzHUqqYiyIyOiKgqSHifCfuS78ILSJW0bFOSBRCCARio9VqZmKjSk0htyEHSmAuKsCopqpCBHONsWg1qU3MuetkFA6haXgcSiCoKOAQCHLTMcSr3USm46gT2DIF4ISkIRuNqzc+8Zlf+b1fa9jrNAarTCiKMSQAyyFVN+Yup2WtfnaST24lSNP2yaUM/ViqYJ0RkZEJTOdCFHepalIFXTjxbjeA+GunZ7fWWG1wMad4tXlRtVKTGJEh6jQaEDMVGZAAKJhoio25IhgxFx0BiRADBgNgCoRmTNUGd8/cASZwTJz6co2RDLX6HrE57KU7MoTMTZlE1Vl0Gnt3LDgGShANwFWV3JlMRRhTplxsYorqCsDAaIZVq0mNMagZgAZmgPmp+gZkSoiA6MjIYNWdEmfRGjmYqoowxsVivdm9AJWUuIz7IXTdojE1GSsTT2XAkDruvIq6xRQ45mG03Yv9x+9+6gc/xpf947GMYdkuzhfiCA2dnN+6vrxqj2IIN36KaQD1ETSsT6e0Wh6tE2ToN9wyDBuXAkCJGtiVz37yp376z/4HKazHq2ufQIbtsNkxhKZdnr9xb3HWTWqRo6vsd9dAGLjJXSbCKpOBTmMF8RBZDcAICFUqshNCoLC9uorUNasjQVMRUiVyk2lzdblsTxfr46FsSz9ILdg0bbs287H23aKNGGUSdwfdO0/TfhtySimOZYtMq/VRCmF3/TzmpjhUxdpvaSqLRdJx89Yf/ubv/eZvhLi89+YnZITd5dA0UXC8ePSMBHJ7dHr2ke20W969l5r1brNHbKwOiBDaztskMVSE7bMX02WP12MKi9CwjqJ1MJqK9O89fjSfxUSodrAQ0vfAZJHQ/aaH2NzciIkBiXDZZNXKCBQI0OcWIYhayq4Nx+Vqs1h3IcTtuIk5RkyiDmoBomo/TlcQY2UAsAiGAB9944eunr1rj/b373O3yJQaQPX95s/9qR/48Grz9a98E5miOx8SLKwAYuoq5Mwx7zc1VwgcUITNKAax6omQmQJVg6xkWy8v9vtnpb8awJBiigviQAA6bqdxL0WsYt+dtLBIjJwyw1SCeqn73K0DIVEMTSuTDRfX26cvpGjbJA4oqsXUmdHQHMQFKEZgYqYALnXqpwipThMfdevTk9gm6ffg6AGIgEW9SrkcTCEi1TIUmMZVMKK2jTeqy6FKmZCb0AI2xo2JE2qxbfUCnt0QPOCBTURoZo7u7D7jVub+IeYQgUStEGcKTc5HKa8jN0yBTNF1PvxzFdBLyecg7sBLNcNuvD7uLwUedwS0m6zZ95iJ/EZm+G4M7eBPgRs+FhwkpT9BIzp43fylJvUyBTR/VUdQN8CgQ//Pf/WXP3zrq85aS91vBgYMlDZXO2C6deuky9yk0LQZCWepqKqSEc6YZ4rgxIjkRMAO4KYxpDlHUstodexi4yKxWUDQjjsErHMtuoMYtk2rirupdutX4uIYQkghhUCR00zESaklZnHhEN0dPTCjmropIbIzR2ZEg6oqbhAoELM7+yx4AOWcylQoxY9++jPvfFnef/rwY3ePYyARSYmZSKoFR8KwXDTb4XHgtQONY13GtlvErWpoFlJtu90vmiDTuOCaYNtfT2NeS0h0iBIazt4Ns/kddrwZc+fqKKLZzTIXcBnawX8Gc27NEWhmR5moqtRaRMQOPwQBEuIMQMMDE2t2p8GMxzaabW+A5jdL6aVUNH8RQAAQnfdRDuoPEQLMwbXDZ4sqACKygyEQHrZJUOf2WAzmOusaakIHVDuZg1sFQCCev/is031PPNLVzNyYAs5xSwAEw9m1BLOgaXPkzb7byBaRDmflLH7NLjw7GJgI3NzFXUF1Lrc1UMbooAftjA7KqbkyMSC42ayumCnMqxr8sHoBHdhM3ZQZAecsKZg70Nx6h2rqrnSg8/ih6g4MzMyUGNygauVM5cX29mL1jqeM4d7R8uHee/NnL64++dodgEqHinpgZnR1RNHZYAVIBIccnyIhIrkqzMD8A5lIeC7tAEFgRHTX+Yiaq6uaWmDSQ1kYziSmmzMCTJV5bkZDQDQVANL5yhRoZoEDkdSCMxWc5mZYmdOUB2SSFqQ5kozuzkyq6DM0C+ZYnNtcfGZKREgI6mbKFM0cyFRrYJ6DcrUU4gj2XbOWuTs6EOAs+Jsd0F6zpQ3UzRUqUpjpX0AMSFWqA6K5mhxOHaQ/IRUhUW7bGHjyKZRuUZfEqd9clvEKDTg0DjRo72iRmdgBtqJXghv0nlATA7C6u7iJTwZkTpSMbTxuMLKmhrHaZtNHXo09X7zgX/rFv/GZ7/+B3eX7x6tufXR2dXnVLDqvmmLyqpkiMO33GxPbJzP3xFmLaFERjRS4Wgige9RipJ4o5hzbyBNNMUqbVynRixeXjx9dVx4fX1zvarl8tL19emrjtJ+uPvbx8xcvLhn91Ttn7z55emt9Hpi2+9GolJEy6vd99vt/9q/9Yj8Jsz/5zjc///l/txv7qUruuuv93hRPutNtX5EoNXHyycBO16utelG42OwXKahq6YfJ9ndv337y4ZOT9eln/9QPfuGP/2g36Wvnt64vnl4+iyGvluvFR7/vU9ejhhYpbq6fvHPveD1Om4xdP8i4LzGQSd1cq/bdtI9TdSKAMLlCWrQKYhZQR7PiWsF83E/Msd/DOtPa889/5kdevPjw3mv37929u58mGerT7dMcQkeNQj09Wh2v15dXV4j63ncuQqE7d86vtvuz02NTut5d7re75WLt4FVtLBOMFSGM/QQ4qdg4FbDaNo2bX13tVt1yV6uAji4YyBHdJIY2EJprQahF1+dnT54++JVf/vt/7a/+9ccPvvOFf/1vj+99HM73bdO+860HBGM4Pq2suU1pleumr4NpkOboGCqWzVU0sHnbVGDYT4jOouOzr4U7jYS7DAExmJpKpUDmFYA4ELoTYM5RHbxZXNuEHpqui11KcZ5WAACuxxJVz++eEcb/5z/8e7/0l/7Kz//Y/T/85u6C7+2tI03b7e5FfzFOgj4GZhnHTOkoYn9Rv/XBs8DL7ZPN2ekSgr39zuN8u/uRn/qpf/prn5sQeDF84gdeffj2Zv/oehTZ7fX3fvv33crtk1145eTV+x/741/+Stau7PYcMLRJwVE9gAVAZoRl18fVdnGnUo1jaYCMETEwu9YRmaVN1Y6a7rbUUD3R5rp//rjDYDJIFTdpm6VTqIGm03NYLjnzokm7F5dbSNQ17fEtx0RhVVRyhMDIIQ5SJgg4SPn6P33lnUevhKNoKKQhMISAYhkCgqO7A6fUqYi6W5U6SZPbkOLVaNY1zaodx9od3V+c3erf/ZYN162ucreaSCKQ1fHpi4s7569E2cPufasKbuZARG0OIj3IaOok6tMOTCgGGYpZiZFhhh2yGgGFME29u3EMr37k1pe++UGT1lJngypQQKuw344Nx/6qX61TiOSqACpliqEhNVDx6HG1FJHiI2AgziOaMkS09rgrBSLFOuwhdgZgHEOKykNmCUzVi4EN4w5RY5sdvMoopih+ctal3ILpk4fPdIONlNdevfv2t75y+sq9Fx9eNEwNxgn63XjJKUAkACcwRTg/e/Xi6YupSCQK7DRNn/70Z7/8x1+nqk6s1Yf9iKle76YcGqk2aTk+O7najQaKjKQWtchunMb69gfve7V1WpwerzYXO3RbrVp3e3RxEZr2/PzY+OCtMxUzizNa1m8cRXZAQBIGMwVAMAMjAwwcAoNDzCn1dXKmQaYFdYBYwBxgBMiUte6YU9/3yn5rdfJcnuW0BA8K9t77H7SrRb8bz27fIzVzrUOBSTEwhYAcxmHwUpAb9QIqoekaiqJqQFMtgSIDpBAjg2gRhd0wcorH6yNyTJxMvFYBBAQl9qJWVV0U1GJIXW7EJALE3PRlrK6hCT4MZoIRxn2vSm1edgTSjzIOQFZpEWIaak8j/tmf/Au//Qf/3LmiQC11spKJGTxEQkfVwmHZLY8p5i7C7dvLUq73u92071HdxDSgARKjuAVCYHDxyNHB61SQXY2G6+lOt37l1nqadoQx5q6fqhoSs/n8jOYE8yMdMUcGVHdHJGS1ogbq1ORUSgmYzC2EWEUwkBswBz9QQg2Aiu+R3MCRIyOYMAIGRFFxrZp4ZhUkagcvHDOAO5KBBMqO2Ws0A4McQx6mPTmgu0qlWQfDJKqI0YGQgioQhCpGFGk22DuACWF0J8Kobjb7mWYEpkpqcnFYpvX98MZ733k7cOAUhuFytX4thTDuhxjCdf+UQ+Mxgrszjy5hnifI337y9puvv/Ltz//RYnkUGZ+/eI6cdvtpkmnVNk7fRZnuLrcRBZByhS6tYnUSxgo2KU/OCIm78bp+5tUf+oW/8je61e1pGqxY2e6un13t92VxenTrlftp3W6GSkwMzoiYqeuOSz/JNIrW+VxzxcjB1JkIiAEBUAAIIGx3+3Gs9954TYI41oCRAcd+9+zRg6PjuyG0m93VVHbrxTGlpbrqVLVMTSIvxcGYAlGQaaqDujUIjehooG1alHEqZYxNE1KrKm59CiF62n/73V//7/8bmbbH52/Go/V+1NjErlk2ibYXj1UGUO4Cb66f4tHJuGwKe/PKLaTYxABGzdHRUAcqm/2DR+WqJDqD0BpIlWmadnXYL7r07RePn232Ns8R5jOx49DN7A5E84g+77rP474fJn1yt6ISZmI3gKqHEMT92TBdbPpXz3IKbR1s7HeQiRTZgnqPMIkYJWiXx4igOKxT6hbhejBuVoT8zttfafkonJnziMil7ttj+MWf+NFnHz59drnJROpmIuAAgQDBQVXdJvTt6GKrBmwaq7tFBreYKOcoUgMQjT5ej7tLtd4VIC5DPmponQUsKNmk0su0HY2gALc5RxH2iYqYeE6LkBrIkduEdZTrfv/8ae13HEgZBKSaiBqi6TRmapvUoLHxnBgBDtS1yZX3RfLJMq0igTKAO7KBoozDXgaRqSBzYoigy5TBsez7rjmCw9xbc0zMjBQAAAgoBGCiOrKBW+/Uq70AzGQNQHCPZgsKSwdEZvSgOjEhhxxSi2iMShwRIwOSC+qEZQDptEweAlKcJ3MzI+S5pWoGzd4kI/AmfDZPdgA3RiN6CaKB2cqBPotMBynyECa6qbJ6aTc6/Dt7i17mkmZlaBahXmKDCWcfppkaUSpD/xv/7Fff+8ZXiHS/G1VqIFp07TAKNc16tTw5XuWETRNyylKVANWBmMGUAwUKgaITkhMg5pzB9eY7MCZ3AmBY5BbMOcVhUHQVMVcJsXUgNRvMBHJzdCctThyZHAITIsWQqgNSZGZgTUCJk1gFEDMEh8BkToH8rUdv3VndPlkfG3oIs06hhKxi83hZpXJgV+va5Uc+9QPvv61Prp69en7CKGUcAwYR9CgKhbhdtWfqtlyd7ftxM/VsGENepmbIJh7LBFKLeJ/b3d1utYHpBTeEyTwQsAKg6fwmA9JLK9nsM7S5cORAEb7JpqEDGDgwgLu6m4mKiNQiJg52I6ncZOqIEYPPr48HbtEcVzT/btzsoM7cyIeEpIfOcphTsqY+35gQmXBm9sxmH5/b0OeqMTMzczAgIkc77IM4manNS9TnCx8DuLsEzqplBgiBgboCQqAw+8JwbqF3P0T0AVQlhOSOYKhqxDxnwRAdYI68zTZwMlM4cKNJxRjRzA87vqo8b5DMSKS56cKd8FCLxhxnwriZzO4VVZnHqsObNRtDHLVW5DDXfB0wBQeHkRF/1wZIxK4OoIyBkWEuZwckREY0dFNInGOh28dnVnnq4SMnt7/06EWI8cvvvPdTP/T9cyfmASDtZubEs28Hvsuucpur5RkZCOZaObzphlOZj/KhxG3GOM6uM1XhmRqI4eAkVJ1TqMzB5iXqwERz5whSmC8z6IDEohXADZUwEs4fyswvnxOCJpU4IoabyxciI5jNObwZ5e4E7oYzPwgADJyRMAKSqSE4uM3MDXBQUUR2MyJGCIigUgEwMCGFeV2bKmBQVUKYfwomZiY3ByRVRQBAY+L53SMi95uL3vdKRUSBSadpQvUcOszHfb8NZFO5BKtuRcpUSYCjoTgNWkYv+zYxhrzvJ0MDFDNxIGIMTEU5oJ52nHyMOb3o9wrh6Phkf027S/jUR/9nf+qTn9pdP10sOsd4vR9S2+bUKYmBjjqOYw25KUBdm6tDs1g4mAzuiGXsObZV6+27d0jjg3e/sT7qht0+TDqUfddwbrHfX+0u625fBo0Xff/h5YbZITAQ3T279+HTRy+u96NoZn//2QdH3b1p4KFoiI1HxhyeP3j4nSdPyjjKJBDjR974vre+/vbQa6lD6UdX2JdhsV6x0GR6vbtedmkayrI7XrrSwt578OHi9vGiCcvQjENhN2i6R0+fHN86vf/aK9/58PGDF5e3jhehIdV+Kkpc7t07Kb4fq9PZESW/fbJ8/+FwNVnsluMg4+jgYbOpKAhWCR0cDKCoFBhCpEkQOSVc9tcXGKJbi3EppfmhT37/N9966/SsW946ntQJQ9PEcV9Wpyerk9UwXHuNLy6viCHHrEqLxXIvuFyftKl58PjBcpFX61VMzX6/lyplVGZMORN5CLDfb0IkZG7Sqi/b07PVdrsZfPSGGDISEEaCaMoFJkQKRMZIoMdnx+89+vZv/Jt/9bO/8L/u3m1t883H3/5Xx3l65Y2jYWMYSabhw0f7W3J7IX52fLyT5yB1HKTWcb8ZOWVObRPP+mmPgQj68eLB8uwVqbedI5LD/MWIZwsrMoK7ynxjCc6paU9RJecmdGv3aDcp+LGmq2fCW6EMvPzUN98fzhd5GPYjcGV+54PHt25Rw5FjM1aqw/j0erPWheytqF4U/djde5+8R299++u/8At/5u3n/q1HT37tX/6b5dFiuNheX08/8ZOf+s47v8tt+/jpjo9Wbz14vh36H/v3Xzn57F/4v/wX/99PhtvRq6JDDJUCcnfUdtcXY2Lqddq0dbx1JCkEV6wlMNai4EGqqSsG3O2GUo00GgRRASzW5kRHFqTIAIYpraeR8mt3+nu3+2ErWmupfrSm0+WoViBEjsBGIEBQVd0MQjDJ8sHXXn109ToeYQVldYAiZiAJMM4xfcSqroaOJFYNIBPX6sVNbp/x66/aqrl1dhtDmkybVRsQcSqaaupyCrGA89H9mtrS98g8yrZZNpC6KsSxAasYAgRdpJXonmNWyE2bqriLd4s1cDSbQs4B1vth1+U8DHLchmXmcZrrhXy+26koBi4iIjb2VdmPzjtkDRaaECYwJBUvY+mBYdXkcQQd+0p6cv/29eZ6VuJR1V29KiGXQYbdkMGq2NTX5tYpMsBQxRXRVK2aJGZ0u766ABV2Bm4Ugwt+4733HPjBwwdlwDY1u/1lkR1wIW8it01IMk5ZyHfXVq5NwcEFfbVaPH32SGppCYZn1wj4wcXz2/cWi8Xy4YPdyem6bVuKebFYprh49vTRapnEpqvnj5rcANGgaRWaqa9Shqnv7986u9xuT26lkfnP/+hn/vAPvnR4eDbTUjXwAe9AKCopZZgf3NHxELFGohBjgxi4lmEcTIep7lJqTZOZTjao7Np20UQCr2nZ+rAJ3BLZNI5tbByVY5hk3G3HDy93Pkm7OCP1SeskZXEaMYBWY7LACClSCOCdMxERqHCMpZQYY0DSWt2wmJq7IcYm59gyss8NxoQUYRj2pADoWqu6xxBMNcdGXQBpKnXuG+2HMTO5KGfuutWLJ88oejXr1ovLixccg2ktw77JkBR/+LN/7mvf/vqoe6TSJagjQGiRg9YJwJE9ha5bHIl4tw6nJ4uQ9OpqM2134DYVQWIDosAYotUirkWLqhy6K6rkmK820zod316uZKwmlnLa7PaG5gEYqQmpTiMRG4Z5r1StOhIgICp4nfP77IgaGmLVuUQGmMiczI0BCEN1cHUDYQKiDM5owjGDq+uExHPR8KQDegSiagOGgOQEUbQm7kSqgyKxmitIcOToriBeOKe5sMrMHJU5qlXDmkMnRZlCCElE1JyIiZPZHPKpgEaE6sXQHBwJ1BWdyOAon54d3dr110UlNLEf9oumy4HNtQtrGYdeDcC79bEoFQFmDgyT4YPL59//mc++++23nRzIX2w2HCfw2WPguT08GKlpCuigw+ZidZ2uy46GsQWTInWyJjVe7N76tb/6F//T4+N7/X6HiuN2u316ubvark7P27OjvF6UUmNq21UnRUIOTFT6XZn6sd/HpuUU61gi5yISOdjccAIG5gRY6lBkuvPRVycvXishmZmM42a7PT6+36zWkwxapVmsnLnWYq4cmEMXEpp5HQbkyhynsm/b2xFSNYlNm3lJGEuVyHEc91r6kJxMj5r4nc//3m/+6j/Iq6O7n/hTkzA4VRVKq1p0vNr6BG1Y8VEryLGJaXVMy1MPFtsuRmKC3dN+/7iWfohQUXPbdKahTBORytQjQsg5d/H5+GIKCgYOfsBU0/zAiohoPotIYId69BvzIiKAzc3HiBQ4gIGoAGGIcY8GpwtpQXMAExCIHamL6gRp5MYQHSNgYC2FKBIlIwiOdfS4WH/8Uz/6/gd/lLujk9OQcsZBfRhvxfZv/cX/xf/9H/+Klqt5bBWyamqOGJgJnFDHaRgHoQQMGoM7mUJErIBtd1okTD1YAVGjiAxEbZDIJqaEqYqMFVTYBI2nyzE0eZxcsVBqc0hdezwSxG5dPPB+V6+vvOwoOnOo7sNUSq2M3hIHYkIPToaowM7BwHJg92mQERYNnywHF6iSmUUBaxn7vo5THdwx5DbHzC0TBATGtFzpdAgdqKqgEFEIxETMwZkwZuAmUOc8TrIFKuDVtRcTN2Y8Ij1GTzc1XsEO3UyaYmaeDQ9S6rYfMSUyTbV2IklrdLO5Ng3gcA0jPLRbzbemmdh6YzS6UXrmbM9N7/2fcBLBwRb03TrzlzqRvwyqzR8dNCaEeZ/f8Eacmk1Hc0G5A7gZIEOZfvPXf+W9r38Jg01TrWMBJkQcauFEt46Ojo6PiJGZQoyOGAOZuU7VEZkot4tAwR2nWtrYOEKV2kQqdVKrIbIDALEzEwcEG4fRVREM3eZ53sGa5ZLyKsZj5FVICQlTjDFlNatmSkDBnRRBmJLZREiH1jFFIGZgBPvonU+YlCoFKMzug4NbiyFyUFAEQqQQcq11tVi+9rHv+/Ar2w8fv3j17iqCO9bcZTU1qwSkBtVK3UnOy0CZSx2nfdlrSi152u/7NsRIS9Pa5YpW9jpM2lZkjkQHz8fBZzGrDzc0oQN56tBR5qau8LLbbr5QuCH47JFRVRUV1Zsfh+YydJwVKDikyw7moQMMeE6dzYsIbxwkL5fZ7CsxmMkujC9tajPe6LupRkTAmQZNs8IIB+DVzJ+22annYHN3m5moymx+UZM5QAeAsxBgbmpzKsIRwsFrSYA+P40dNs4RXF0RE4C5iwMwB1VHcgAwMEBEYPAZDsSAgDyzlkVNyBMxgYOB40E1m7Hfs3xqdpPIixxFdOZYA4A5gJi5znQzJEb02Ulj87MXESI6ITKbqalwYDxgmdxAYa4PQ0cEtYqQnEBVqwMiJkTyvFocna/zMvllE7/w1rulVAcFVGZ2dHVHJHVDJKKgOvkhjjcbrMwPT4GHVrvZIsREps4czOrMK3NAVWMmYp779ojYVM0tMLvM0cxZnZwZ1uwO7sYcEMFEHGbo1k2IdY6qza1ss4VWFZGR2cFv9CkzcDSYmdghRiIuZZwflgIxEpMjAVZVBGTkWntiJGIkmiNp8+KMIYrqnLkzmEvM2E21FiLgMH9RnPdhXCuAIbK6zto8MM0JO9EaYvZDw6C9lFAPUtE0SuBA1b1KlzuoEI/OxrHS8LRMF7tpRwwIsdTEmRTHaeqBKFAyQkpL08HqFJDJ1RUYOSDHEIjVHCikGHx7PSWAMoTzo4/+53/z724uv9pEOj0+vbq8SG27H7ctHAPV1LTj9RQwNKkxq7WK79VTTG0uw27qp9XxarVItRt3/dMXL7ZN11pozu6cwLhPUNZLRNp/+N63z89fg9h++OzB490TiH598eL27bvT1Yb2HrmLsTTr1ebimo1LWYzDLufQ5VVvWxxrjPmDdz4MHjiBgt977WM//hP/3r/6vd8cXEuQOk3AlhOZ2linhpG5y02qA0GIx6fr08trq7wp9WzZkLfTzlOEJ0+uSx3T6VHOvBuHsMPGN/fPj66fX8CGl2m1TItByuuf+OROHu2GfjNNewsCeFlVPFwPtRZhRUDBQF2z7GWPxuRVdLtYHBdRr0ihcwxuUcHeuH0/TXV9sjg+Wt05WY+jhZRSoK5dEPjV5aW7tNysFqxYmza77fr9rusWi8X50+cPQD1CrKEOspumMccuJTM/cPRdtaqcHd810+2+315fn56vtjYO7ELgFNycQp6vq4EcGRJQKLXKCDA1i/DFP/jt4498+qf/lz/95N2PN0f33vnXf/+NT78C9eLJkxcKITXxg3c/PO1aYOMUysUWOTiCIbqZl7LbPxdwG52jn53dkt3FsqWBiEGQIrq7TgiRiVzKHO01Jq9WZVKQRYw55bQ4dm+a9nBPOGkDTPW1Nz/61uOHo/Dn392d00WKcVd6Zv/km3fSIlORJ4+vL8YpJlienCybjlJ99PyCUttfX62W8Bd/5uN3z/CPPnyaWJZptVzxsye7Lp3+3m9/wRWXi8UnP3v3O9/8zusfPTIUONLP/YPfuSfHYVMAtcl5cmDBIBON9Siv09nx+/2Hm9S2jCQ1MzNGRjY0BAbTnDogQ5yOT5rN46eJjkwk5nz0sR/2HiJIefE09f203YyF6nRSyj431HEmdYXk5oE8xSaiq1toEhqo20g8Dd703zx//4/epGNGswAUWHT2LxYD0ipASBwVZCbjInlDERJyomuM8Qe//yM/92MPvvCFZWpit5jUMLJPZv1EC2ACNYtt04Q41emoWUl3ZGgpkhMVU1eJMdTJF20c5aptbutUSrWxOuaT5dnx9ePnkWsdh7Q63m97UAZM07Rfrpvjo9XDJzuKDROqFLXq6LltYoMhQCSYnIZ9DQ2EmMZSgDhQEgUXVLXQxbbjsZj3ihf7Dvzk7PaT/VNMbbtcmBh6QAUtfZ33JFIex9K1aS87wICVVFGNJsYEWK2GJuzHMeQU1V673zz88GlD4ErU5Dv3X+8317v95mixFoDdfoTkhE3O7fXlMzRr2q5pzt558J37r7x5ff3hapl1t9N+8piadjH1MO6GdrG6dXI0Cj57ftFx3u/99Oh2G+r1/smt8/XmcqgVt5Pej8uYpqZJJDRN+p2nj3eZL/dXv/Fbz1+9d/dmi8jcLEau1cB9fnRHcAS6sfnPMalDhe/hgcbNFRJnA0IHqw6gAbNVgyr9eH16+1XmaCamXqa9AzYpTPVqkq49Xk0j1SrL5k6Tt08fPlN0j5EcmzY4kwECKRKCMxAUE9SqUpvUpRhUNEYCgElUXWPOTWgyNdMwJSQRdQC1gg6MOJbJ6tS1i8CxqolMooIcmGOdhjJOFIEZR3CrMvX7yBhkKvsSssQu1MEJUmqiXU8/8qf/8tPt83efvo9eyzgpElBkCsiELqNJ5Mx5iaFrKdw7v01Unjz7cLfZZoJpLAaOigRwMChrdUoqSoTjMGAkZ7663ERvXzk/a9G1TCHGoUr1MXBgRADXMjEwUuPuBNUcgGi+yWeMAExuxEGATYEREELg1gnUxpke5FYJiTE4IrIboAuAAgIaTOAIRiG05sUQzJ0pBMpuAuZOhhimOjQYHKcYO9E+QJv5yLQCcIyoMhKQUjGZYmhQPQSWCuyBgNFNtXBwpBnAGgFmoqcbGISIjiaFLKQcikxgbiKiShlPz+/lXby8uuq6DhjNrdQaAoScVOtkJVJQKypGGPsydF03FAkRT/Kt4+P777z7VeDQdjkEmm3zIZKUw5CMZuPQU8KcG7v+kBFtmopZnQgtF8V12/7SX/3PPvrJz9Q6oLns9lcPX+yvtykvVsdHJ/dOx6msli0a4Fh1N3rDYxmlFvPAzZGI9GPfpFjHiUOeJ1czQQCkqGaX1y9ef/OTFsmtMARSMBk3203CZduuRtlt95ucFpFzGUezElPTdgsCH4edK9TB8nLthqlJwKn0k1SNYZUCVdm3LUtxTo2N+w4p+viv/9v/19tf+qP7r3+kpM44NW3XrpaTYsrddHE5Xl7ClbbLtjfNx6vm9t326H5c3uOwTdaP17sX/WgTX29qC8tmsYCg+35oEnPEYT+U/T5yUKh9kidPn7mBq80BGQ5IAEBBDkPdTXuLOyKqyw1E2JgpxRgYQa2KMFCYtSW1CXWr+1OulqRtVyITLwu6AHhkx+QOVlycUuwStQtQoKGLqyD9cL3ZjbE5Ofv4hw/ezXTenDdIexUanl28eve1//Sv/MJ/84//oRkyG6jMRcICEmMiRi2OGBQghqaKgGgMnGIkDTKoA00eMISw5nE/tG3CRXR2K6W6yXYcn11FBXcxwNSszX2cPHAIuW3bhSBBm0vOXJSs1rIbS0/MwlwRi9hYFcmRsWEml+qThSa2q1ncQAbGIGDh/FZ7fmomYj5VmaTaMA79SICA0dvG1hEX2as4ejHFImSHs8DEDI3IHPQQoSIycEgd+JqgIioSiAiSm6q5G+0ZOIfjORYHJgBVrICDzgEMcpVtLea0zdk2+5E7MKxmEjjNA7aBIQCFkEJMMTHzDDg7tF8dOo9upvWD8YQOvVUO38Oznqd5pO/qQzfk6wN55iZ7hjeGo0OP1Xd/MzOP3ee2aSGMJvpbn/v1t7/8eQDZ9YOIMDI4VVEO8f7du4u2iZGIKHctgZsqE1apHMwdOTAhIwdzaVOrimPpU8jIGKEyUkAE4E0ZwCkg1lrMiqOmGBm4KEy1xm6VVrcrtMxt4IRAKSYkBHIiJuAUU4xRYWxSZgjmDkAUyG/cOO4KjAiYQjq8mxQIlMgB0ECRAhmp6RwRNnMkXB+d3vno97/zjS/w5XD/dO1ewBUQmm5pYhGVMexkSjESh9BQqGKlOMXU5UFp8pqJQ+Jx2LUt3oKwTzBi4IMTBJx8BujMrqJDPvWlauPgJgfO8iG9deMgI5pTdXOllMxakVtEckTAADP+GemQKfTvykFzJ5jfoLFegtMPZ4E64EG6mnHHBIQEM6bH3eZUm4Hjjf/opUVl1jZniDUdwlbARKoKMGdsnegQ6Tc3JpoRSMBghxCuIyEBOSBRICDREpgcUG0GLVugjKRmQsRzjZgbILCJhJAQ0UDnc+HQaIZsZo6Gh9AFaJ33VY0DweGs0oOsimoqhBEARBQJVRRn4VgVCQhmmjKbg8pEMeGM/yCaGy0R0dXcZFZhzG+UOncHNZUQ0o3pBsCBKCBaSJwCZg79brh11DYJ8tHROw8fmtTQoNpBJGR31YNiaKiHnYWDpwkR0U0JgYnVDNyI2QFMX0ZQZ9ELAJE4APoBn6UGHG5sX4SIDqYqhBQ4qikeXn7eOHUkIiAHZ5o7BgqCUWAmdgckUKk+Z1vB3dFdcA5GmBERIDqwzRIURUAjJDNBZAADpPllkZBCmAv1wMzNEAmZCU2kOsCs4jExEUudwDGENDsl5xQnuJmMDghOIjq3qs0/pkoB4BASOJioz9/UjZp+kIpSwKkfhu0G3MexxryM7XkarVRVqZFqE7FAASIO6GRMnOLKjYbpGoMGLMIAZqRezcG0I2qYUwqE/3+u/izYtiw7z8NGM+dczW5Pd9u8mVmZWX1WoaqAKqIh0ZESKdCkCNK0KMpBSyE7JIbtCPtFjrDFcEgRDvvBL5bMsPlgh8Mm1bARLJICCKIrAAVWA6BQXVZfWdnc/p5uN6uZzRjDD2vfBOT7kJGZ596799lrrnXm/Mf/fz/EqMy2rOfjVVjS0V/5hV8Yx7eahcesz66eOUIQXMzPLrfXq1nIMbLC8ero2ebCew+eu3FvaVRVT+741q2L64u9St9tQ40u46KplqdnTx88Io2uDWMZ33r3Oy/de+n8cv/uo9355lyDxn64eXLCmp0a9Zu7tz761vUDLdl5P6tCw+hoVlT3Y9fFnhRaPz9eHMkwKAk2bbM+euX9r3/lW195/PhBRVBMAlNtGT1hYTLOyQIgAew2G8T21vo4WRnGYTTN5p9dbG+ftVyjWbFxvHuyur7yl1d73e7mcz9f12MvdcMnq2Y3bDHPpJ9d7cahE4GIWCrWkiBnYGISM5ysOQBGm7RrfGlClaPPWiyLY586a2p/d33yC5/5xMWDN+c31gRmUS3mUIflfLa92peYLy6ub92+GfwsdltwtttuCWC5mjvvd8O5Ec5mZ0njptuGeiYm+7FDZDXLJe+GrvahqVfbsc950Kwf+fCPfP3tLxfPzjGoATkGNiACLFA8N4imiGycVY3AV3x0wn/4W/+Ipfv4p/885U+uYf/9r/3m2O/qJl9fnd998dXr6zKWoU9NG8gxAMbZrKpmzbCPKalSqWfrKNHKCDlpfheaH9ryZVEEBlLJJWYgRwEZIWdkp5KppAo0MbV1FcIsF1c386o93Aypf/jK7OSG5O+Mmou6mo/OTrrvXR0dr3/kk6ff+M4P23m73+y7uK3Iu4qP2kUFcrSsdGhY6HK3+emPv1Y7/MF3n92688r98y/LYDHxYrW6uNqVnPtd2afh5k28d5drlWHXfOkfPv7BG/4lPGkoejCLcVEt0hgJoa1nD5Xe6M/x9m3naiAPcZLIoShA1RiKClT1nMjn/n63vWj9fDm/Gbud89Jfj0PWxbxqj9dmgLFUizo1jtTYgHKxTMiKVhAIAUWKmgiBGGDlwNHi2XfCN77wITvi0cAd4P9FFUE9AQN5DjR1GANUwZeUEYkJ96n41Zpfep995GNXgo45hBqcc3VVtATEfuhCSmLZBLmiwEQGmiSqzNsqjZmpgNqYiqsYwA1DzNr7dq05z3ghOWdNo6SqmXNoJcWcDdHloUQExy7F/mQRnp1bMQBkEMSCDJjHYqLrRZWGTgzz3jsK07bIOXSOc54Oy7q/uq7nC7KyaD0WCcrxqq/QqY3Oz6OWGHvJhR3Udbvr920TtOSxK44CA3ij2lUijEhSYklpXa9d0yJRJfHRm9/qkp2+eBqTdVq5utk/ui/FohRgcIyAKCbKki35gOBwt79Yzprt5eXDB0/u3T5brBdScH68/PY7b52u1rO2cZUnIolDAzBr24snm/lx0++2ZsLeeUagdPvo1PGYxz6PY2jCvt9fDd1YPGElSN/5wePpLqirYKqliKkZ4LTvMvMIB0PKNMk1M1FTsGHoBBi0IFHdVH2OngIEEgEkV0ouOTez6tGzJ2qYizjWOtQG4IjCoj1eH91/cl6oOjubJ7m6uLja9R0wj0NXcd3Us8urXZ9LG9zY70WEK1/M5nXjkdE0p/gczSlmVjkf0FsypFgRe+diyWqKCExsYqbYNnNmb4Cp5KpqUDVKnMJrDMBKYJZK7yEMOcccwYWURK93WHvJuXa0u9r9xEd+BrD8wRt/QE1hMhNFcjYNJUUQqQpV287JXEtw42R1tArfe/PNbrcREVJAtcbXJStNeltW4oDEZppyCpWLJaoUNLeaLRYVSt+nPtUrZ6ihaUw0hCoXNcimkGU01KiRgbxvtRQyCb7JRU2SkYoVoEBTGN9iLslzAEA1BvRgoFY81wpARIIKKMy+WETyrqoZnZTkPWU1FTVQKRHBTNUwVeSnOfVEUjwUyZhIkYpbsahmKoLEqoWslBwdNGIiokgAqkWzITCHqR8EANUyEk0BJZ2A+YaoCqgHWzUpilXzpUOXpKiWVNB5UpFUUi7ZCwoyM4CgczirVkMeA0GK+uTx5Y3V3WF9/XRzn513HschMfh5tejH7j2bgXfMM3d0a+VmFCjE6+s07jGTjcCqP/9zf+7jH/5Ujn0RkJSun1xcP7uaNfPje7fWJ8ucovc+jsmSSCnMVRlTKcVXFYJ7drmLaZzN2yJahYqdLyWRQ1AFcGCw6XY37txV1pRGIiJWybHf74Oft+18P1yCl/li4ataU1GVerYE5nGMQ7dvGh/HoT06Bm7EsgtoUqiGpp25UG2uLlwwRRr3kT0uKlcN11/+F790+c7926+8vz0+5mh11XDTuLZ24LUb51jGvm9mi36MGmpoVs3RLSba3X9HrfdUwPmhl6adNctWurQdd4RaxiF213kYHDe+CoSkKu18IQSHTiNQZrZp0H6YzBPg89n7NAAGtCmapjYhJWw6FagBQlUxKlRk7HB5chzmzVhKHB5TFRMMvmmZOaXIxL4NPlTkanSB2yM2tiHHq91ut/fKYz+ujlbz9qWHTy6IwblRJc+XjeSLD91a/+LP/Ow/+u3fQYnI5EU4MBhmQDAFE4fkHKtpxYzAqmoChuSbyoXK6pBS0SGFQFBy2hRGiGNURRsLA5IHIg6O2mVFjSsqy1nD7EjFs1PEUjINsWz3qRsNGUOtCPuuH8cRiiFChkIKVIEDQ3Zl+pTMCpj5EJY3yno9ioacrRvKEIlIu8FnA+OwCNw2ilpipKyurcFXw+7CledVTVokCaKyr9ScWUZzBGCijIqWA9mk6gCCsakWZqmQPQVjAgLT0ayIliIKeQBFcii2kxIN99ebiO5WuYa+5O04enBoBlDUCqI6X9X1cjY/app57T0STx6N5zrR80MMHngcE0HmeY3an3CEHMwRf+KPTF9BeO4sOdCQ/phPg3+MxD6ElKbmK/Ba7Ld/49f/6PO/i66UlEsSMxBAAJgvFndv3TxazQE1OJ5Oj2xGZmaQS3TOVaE2mM7XqEWC86OWQI7QEXvV5L2TkorEUFUAioAOfQVBSnEEppEQF/MjqBZiraqr6+DYB+9DXRkgMDpfiRwkMMbAxsyu5JEYzQSJ0AyRiFmn9zY5GXHqh0JA4qmFkZwciFNoYAScU8xFlkcn9157/cH3vkbQ3TmZlZIcE6kQAGZ1iOuw0hRz6cH5djGPSdMYSWnWLsY0DkMfFEhJ9ptZE9NOtXnBXKMwEYCRnUOg5wmvKWw4XQZ7rwVtOiQDTKripOqhGeoUSNIy4XRwymUfxIDJhTS1wCNODVJ2SNZPGbc/Vg/fWwnTs2paDghELJLhALTG58FYeW4dQiJCBCB4HmLT52tIp/SYgaGimiFQLoWIDj33z8ntk5Kgh4wlqSkBIdHUUCZWEA8ju+m6AL7HXQIDAzqklgyAyAOAqqoJEj9f4qZSJri36lRzplPOF/F5Btts6nzDQ57OGZCZeu9VChISYSnpEBIEA4SDdYedTG+JEAFEBZGJUEtWM0Z8/rEYMU8Fu+ydqpgcGr5ySeScIRFTHIdlaEvOR8fzV148/sH3Yx7l4vL89u0jQ0OYRCtERlUlRjGZwstTAutgQTQ8UJ0OFEQAQ5VCTEWAmRmxiE67Ly1KkyebGOnQTF9yntJlCM/HomaimRDVhNCZKiEho4oClOlbnrrGSikqBcgjMgIQs4ioKTPqAVKO0+pyzIiQc0RkRENCVZger6JiIM65knsiR+Ry6sl5IAIjFRWRKZ5I7EpOh8thhjTxCWDCME3UdTBF8gAEmpGmkhOxaSKMQEhiwo6mB+Z7d8FBKlKVnMcUexeckrq60qpR06MbtzaXQ583WaNg8hAwZVHxwbOaSvEosWyZBnL7kgQEA1QMpeG8arxnKyUZoadm6GbaL37hL/2tlz7w0na3QbDUD76tZrO6DFEgt61fzc4uLi7I+MnlZVXXw+66bhsf6Ozs1qMnjxHgyfUF5DJsCwRqFnNfBYxx8+TJrGKH1cy1jx8/Xc5PNwN964dvjZ0Eb9f7rt8OizVfdZsX1sdzD3/73/+bf/ef/LePrt4+8rC/uhpEq3o1bC9jyTHnOlRc48MHb14/fXt++07sB65Ce3ryI5/6sceXjzMOyBTHOGaxlEuRgDaOAwSgqmmaFgtETbuxO10sj+Z0kaXbaylFoQydjfu8WLcnqxmobvvuwduPbp6drtcn+8ttCPUrL9zYpq2f3RuvNeZYtyiarEs2oEvB1Mw5IbJQdyWBEjkE6R3HipxKHrKZsETXuNmPvfSxuH9W1YHIv3j7rmlxWGCUROPV5SWU7APGNKR9p7m0R5UPfLSon110Aq4Msp6vYornzx4GV9XNLEJGRodYjNC0Qmb126Hfd7s7N07r0+aNp29cW3YuCJABBN8QEIohIiMXM0QTiaGpOHAchxx78zZ37ku/+csxxddf/9TP//jfKP/l/Lu//8vVMvl58+ab91fr9cmN25cX57suvnjrbDavdpv9KDtVqJCDq4lcnui3kod+S/0Xjz66HGC+T4Vs1DSoEjinriJVKT6j5LhRibNqhkSI1LZzpKD50Pdx1Cz/8//s7/7H/9v/qN8lSbqYOcxxG3sv1R995aGj8PY3v+3q5nRJp2enT949j/11zPtPffqDr947++0vvkOzdn3n+CMfOv3Dr77ztT/44idff3+p6q/80fd3MfdFj5cnu6snnq9/8kPHl937v/CvHsmuKvt2WUsFbn95wW3lgtsNO+/DYLwzf75Y7ZazxgkZoKEndmQQRwXEymXSLI4H4XGju8vl7IigrgrNfUDockXEvvaVjBvgrBhmN842Ta1i49XA4Jm9YBEG8ohqTOyd35fOuI5DqdPbsze+9kp/zJmKFXOmqAzKZkTqgE3NESlAluTJoRJjJUD7sfjVIte1HJ3ceOkujOcF8tjtQ7tkDuaZqR63j074BSI3JohagCvnCMg4NK6qLWVTdZ6oqaREVSP0417q1lhzoP5i82Q1OwLfcNWWEhdLv7nODs0HlkxmMO7GG/P5D+hJilnYuVAzoGcehqQe6kUluptjGC0E12TrouYJZsKMoWqvhz0hFUOY1/1u5ByC9/31IADtrOr2XVRzwZEIiMZh5xRkzBkS+lBXIY2dV89kHoidt5yq4LRAzkAx3zm+rZfdoqqGvaaST2/e6S4eBcw6C6WMq/miN+32e0CQmYnBsBvnCKb2+kde++pXvnbn5FaAZMRGsL/sThdnTdM0NQHLs2cPS5fa2apwmd2coVNIbnO+q49WJUuo3PpkNe53nq1dLvbdrm78Ytkmw8urnWXM6fkmmTlUFejUKkqCjOjwYOU3RHpewwE+OHIeKQbnRcomdciuCaezZv3g8ZuuIhe4L+ns5unjJw+W69sPnj5dzOZFmbSYWjKrfPPsUuf1zbOj9s7N5fnjy+uuz4jBcQD27NtFE5rzHzx8yIFramMcwLAm9uQIQDWHqt3mqMUCYnDeO4dghMpICmrkkVIpGRDGErPgPLQhcIpRQaumLqYKmrQgNtw0WfaKGsehcXPpigExt1XVKOQ0xsr72aKRcfOjH3t9edR87o0vsteYSi59E2pFqplNSsmZEbidV80qWLhz4+xsvfjud94Y48DmxlIKJDdtAhwik5VMHMiUkUzURIAbSf3lZvfi2YurxpV+V/o8mx2nHNUjezYzEADnwThJRFJQCVSB2uGvUM2SBRCYgACdGqlimHqATBhBEZRIgQyUAU0tqmWzQBiAkIlyHhGgWEYUK6KCCpRL9uSdq1WIAqokJhEDMxDpwbRAZsiGAFQBMTpvoAzBM8RcmByjgCgSeleJkUH0zo9phwBoXNR4coFPI2U1MVaDXAqgVxEBBSItUlTrUBHHQGigqtE1q77bQ1EVjUipKHRETMx+SF2fO1cHhGaAtLX00msfSW91T682GkliFEtG5n112BGRIFo1m9fzU144NxWS7EiL2T7/zKd+8n/wC3+dKxrHknf2+Af3r59dzdt2ebJul3W0YoIWkwMyRe9aZu8CpzwYl34/DF00c6hEYlPA00wlq6FToe31pq6aqpmbJQQic7GPQ99B8XVT7ceNAHisQQnECGDeNtth4ODY1zw78XXAOgJjSikXqdh5Ql+HHFPZD45s1s66NPi5XzRV21187+uf05xf+ujHr0QSUz0Li7OV+GrcROqvqOvGy0stg6Fnt1qcnUbvHl+cI1iNralz8yMT8PWylBRB0LOkwgqaTRXML+vZsco+jdlX7BxqSpgVnsvNdighn4bXSkQKNvFiVY2JpjpnIAJV52gqzyYmtglTivP57NZiFnfPzJ2goq+BFp5OGwHwXAWYARIFoBq4akxENGkp5HhxPKcSDToj2AGOfPLOm5uk2/e/0M5X9eX5hj3bcP3zP/6jD54+/dK3v+aNkUxKIQNkB4pS1JyiSUxjCN4AXVOjD9g07mQtnguh7kvZZl80DimlEjyDWukjckB2iS0sq9nNFTmXFAqQNJRRmir4ipkJVWEcdlfbPBZ2LKAyDi4lTuIgIIDPrErqvbkavQM2T6zjmJRHQFu0zL7EpP2I++gMgRjM1MxXja8rAdAoJsW0jFmiKqZM8h63hQBMpKhEZG82oBGbicZSNmSjlMjZUOuiGYEBwVlDuPDUEgEDsK9yHo0UoDcxpmBRHQsglBxH3HZ7p5rSeK1QQTaCQlTMEiLW9aKqj5bru+vVbVwcBVcB0QSaVdCJkDIZlZAm7AtNkBQ4OBkmYQHfO/P8SZ3ovSDSdMzGQ2m3PdeJptyQIdDUcm1SmMI4yuc/97tf+t3PepJuN0ouqMiO9/vh6Gj9vhdfWMxrx8hAddtKUUIgwKyYNId6jmDEzgxNGc0R1ilF0xiIyZUx94F8yqlIMUMfZiKxyBg8axq9AySXVNEvpF4rNYDeVZWv6hBqZmbHE7h3SlIz8CG1KqAmzGHC94KhYzdRd0yVA5UsauiIwcAR5TICKjOZZjACxOeHW3YBwTFrfevuixLL1aPvNq47WTVaUlYl4FQ0OEIDxABW+mEEsqw4b6p+t3HzZdU0Jebt1f54VaPT3dWzPO4H2C9f/kiZMXuniNNlI2R7rug9dw/BIZRqgEDPcVQ0OX3MQKe8loiUolP+QRXMEIymJYDP44lwMJSY2kR8NsSJ1Wwmk9X3ORUd4GBOMVNFk4N8ghOIX/CwyOggZB0ULSac7DPwXE6yCVV0WFVMCKQCYKRlEssPktKhl0uBkRBITGySzmFCfgNNwB0zIjflxdQyszOQ51qoAgIDmVkpwuwmns4kjU6qPRhMLWWTjQcMmByYmZYpe4VTempieBmC6bSwYILpmCERAoGZqDKxmSETApnogRxPzMQAND3jCR1MLWtgaEhGajplDg1lSkxaUZ7GtQBIGOpAVLhuKsYf+/Br//SLXwjFnl48efHeWoqoTnRsmt4bk8+5mImCIuFU4qaiZoZAWgyJTMokKzrvwaxIUTA7+BEnwOMhGWYiKogIpgpEMHGywOl0OcSQEInpOafJJhy6IfyJ9WAqiOgONisFE0Ojgz+IVGWCJU3ghMk/R8iEJJIPdS5ETFSKAlhJBZGsFGUjdgg8rShVcc6DoZmoKCIhmpqy84ZkhloKMCFMuG8yAyIWm/KDyYiYGQAQXZECZKBCDpG4lPwcVfQe1hokePJH6+1+s1wdhdlxVM+WCriqrut6FlPyVTVrK+lGz00eM4pTS86VWVtS6aiIq8DQhkGrCrHFRJlAqwaxYMXLEm9/9NN/5sWX71xvN1QycfC+Xs/O2sV8K09CTchuu780HE2pZHGhHJ0ud9stqm53V0Cjc27c7W+e3uybqCT73WgiswDDZlO3TfD47OGbVxdv12eLdj579+Lpullvdr1lnNdHw67M6sXpzYVun7z53V/9d/7yX/2P/0//h2aJuYCAbPN2GAdPfLY+7YduVLRZ9ezy8fFL91TJoLjKvfrhD/5Ud/0vP/vfuao+v9i2riECxUkqpjEPzaxioliMwkyT7ccCmpxzr3/gpXefPDLyuGqvnj3bX+6X7SKE9oMvfvzNt97cDro4dcsbR/t9B5B317vsZ5ZPaJSx7JPEsk82so+spQArMXoyMErig1lRGbJYdwXkGHzNM6oXn3ztYy3bvh9P5ifrk7NuiG3rwyIEV6UYi4yLma+bNlthRBecmKDgdjd2/e5ovfa+3vfXKcXgK0E+31+zYxErOcc01FVom+Z610WV47OT2ap9+8mjCBpmMwN04Bg9Oo/ggBXQyJRUFNVTrVkVc1V7Zre73vRjWZ8uH775tYfff2P3C7/4o3/p3/zAJz7+y//4/7Ufv2fG7771AMKsGL7z7uPLq83tk9tnq0XTpu0YRRSSgWoRk2IlKaiz/bvl3d/j1evJ1pZ2ZnsXqB+0gGdF9JVSVTQ7hw5DHDv2MymjdzU+T2P+e3/7P/hP/rP/+7cu4tF66URq9E+v97qyc5UzqYZc760MV72fheo65qTLqr5795XHj672ec9t2CT85S9860ruMNunX3/fk/PNajY/PVo8fPthtvr6ughuz265z332h+88PLnanZ4c3a7ClasvdvsevQvzVgQGB+16vfW+Iy1tMw8usFkqWJCztJ7GYd/MX5ofv7TpH2SLK4olbQzyvGIfo1286b3fXj2bnd2qMl4/fmt5Vs9Wp7w4Ml8oXjahUh3RnbiqKbBVj8ymOhaFYlzqGiS48wezd/7gA2XZKkOFWcQcGAAZBCAzQkRjzFkAiQFBJuoaYEXiHC798s767afPbu+uCWIkx772zGaSY3bOLVd3ShFlpdA4XxHTsNlzKkI+gpEjNQ3OAVnJ0YFpsXZ1K4HHKnBNadwhVYankbyBUmh9W7rzS9ewFioFuPIKcuNo/s6ziOiMsBQBxGQgBS93+dbROrjw1oNNGaBZO/TAtc9Fco7A7FxlkndpvPnCjQfbzhcRhKolBhQU53mIqWqC5AhRxjjWrtWcBMRXThoHxQBkjD07KrmwCSlSyl7pxvFZXfHi7q0Hbz4slV+dnvgaS4zdOLjZbLG4oTIC9K4GF0KSQuSbys3bWem6J/ffBSVu2y51LxyvNhebGCHM5sBe8zBsh8r50LRXV1sdU3V2ox97ScPZ6jgg124mKe+7aAUR4PL84uZq3vedEceSl+tG+65dhPemqCVnx448GWAuEWnyqysTT87bKWSmZkSuqpsYJefkOKRctPQJY6IOrRr3PRWTEJerRRq3q0WtIEgBQAOTMVLWYejP7pw4B5tdd77Zcwi7NKrS8eLo4uIClW+d3t3F4frZBbKzyjGz9LnvhuAYQTvdjxoX7cqj80gg5CpWhZKn7VWXch+qQIaj5HnV1o48kaBD0CjJOQeKc25LKkSUctfO5rEbVu1622dkodorArEWzB5Lf739zMf+1J17r/yLz//u9XiNQct+zJo9zxXAe9p1QxN8EvGhbtjfODlZrpZPri9213sOULIEYjPvvVPAgmoaK0QCk1ImtCEixmHc74aT9mxGzaJCP+ao0VdVKsIUnONStKTDyAgAFMyhMwHvAqAUUPauiPkQpAgqOKiQOKbRY0BAQMfscx5VikEiDEpIpA5qBCdlNNComT2oiAEiOwRQtcCtOqv8YugujcyRi0lBFYCJW9UMlpx3BsCOALBoQXYgxZAVFInJKOZuHo5EsogITNO8wNyCJgNgDgBiYAoy5eV9cFKSToAHGwC8KGXRoqKiFVVD7JFQs3a2M1DIFnxrPvRlw1aCYZSI5rJBHPq6JUM631+Xan737isX5182zE3tRUFN0/OxQdcP9bydz9p+zPWyKTmSZ3K1jucvrW7/4l/+6w5nMZa8i1f3n2yfPVot1u2Nk3bZGiECiwoz+yZwqKSYZUmQYomx63fb0TjMZ60PJDlZyobIaFVV5YK7fkcMRzfXormIEHkw6Lb7pl2goxg7AK3q1lUBQdms23e7/fb0xbtUV75uJMLm8qqe+ymW0TaN8yGnEcyGcaiquhR5urmuPC5qrvP1t7/wazGm+SvvSwUaMiVUoNzOU9YC5Wjmd0823WbjZ3NTn4H2RNw0vmnnixVFl0YpYCKDmRGqFBOBJjgQdYtFs5gDcrzuiHm1bMVyaGG1XBAaGykcMBNTbTEeSDRASIgmJoeSYUM1QASHhiIEwN4RU8kmSqHypjKvKhn3BmNYL6ujxuZMC/LoUMA5j8SGEbhAEYjJM4uREhXjsF7rVecCaUco9Qsf/vjTd/7wG99/+OprN5iojFpw2N5/9xd+/Eef3n/ww/NzYlBGLYJg7awRBCJCp6EO7aIGR2G5pHpOoeEqgEiKI3llR+NmhKSSNGZhxy4EHwJ5HqnwqtaWgaAK7ECTxVAt1VURVS2XrpTtDs2LeWACwqKQVRGNEYioiNhUwCNSabEEhGgpZiFuWzTI4ziihaLOKKDrhg1KdKHCgMVxRGUyzFnzGKMManXwzSw8F4pQVU0KlsEIAhuBobFIFNtn7RRLNjAtqmogBCYa2Y1iAxg7KIomZCiiYMUymDnvwARMigKlsd9f5nhlgAUQgUEzOiBUYuKdI9es4otRXlV7ZdWe1KFREUBERoRDWRrBVJ8INHkZ8L0gyyE7dqgze55Wg8PXDw6HP+lA+hP2okkssoN6ZGLAqegffPGLX/q93/FBwIyJ6sWs5JzieOvm+rX3vVzVwVfOzJwLAhNFl8xQsSB71zQE4BDH2DN55NpKckjMAUwdIpBjcEkH7xiQFCwXQSQ1YMeoaqZVWEE4i/5IyYXgq8pXlecJxcNUSnFIAFoFX/mQUvHOAZmhOvKIKKoTlxkAVRTMNBdPDh2jKSgAoGMvpoisoEYGCFqkaEbwilqFUFJGtTt37xLA40ffNqabR0Hz6IP3VSgpW06M6JmdC1CkxH4knS1n1/ttg/PlrA3IUXpsQrvi0AQ3anf9lNsXGRGJ8b3arefs6ukiHExg9LzAbHKJTMf86RKiHH7X1G8qxdRgKk0XMRXVKdJFCJOV5iA8qU41amqmOIGFRP7YV2ZgKkjTRQEzmIjrSPycImRqZTIqGRyoRKIAoIQ8JR7BbMoTmRRkmsA3gAZkAAQiSARwMNISkkwOLztk7ezAwHnu2Jl8lQCiCniouyJE0WnBCBob2uGrz312UyHJQXYiNgTGSWB6zy0FEwB+WvSTb8tMzCaBw0opz7lgOslliMDIiEDAWTIRE5Oo4qQvmankQ+/clN4znUA1RYvaFFKlyRSDzJoLE4Mj06kNQysUSWkxb9pILVMH9PX7Tz/1yQ8DKDFNbWUKOl0kQyOk6Z8Hm+FEwJ68yYiTtRkJAWyihisATV4vTczOwMzI0Gz6I0RqU0yTD8oO0iSTmdnE+QabdsI8md8Iscik2ZmomSkzATEAELIBSsk0xTGm0SACMpmgmE4S5PTcIGQzM9AiakgINGVLVSZDGU0rHJmYWVSZSUpm8mhYUnLBqSkioSkyASIiA5JImuBEk2BkViZ0lJRyMEIxC4qZlZKn2+G/JxWFqh76okaumQvWooGtpLRBBKIauQphbhxjyXW7tgwAhQg9aSxXplfD0KuBGCQBZUtYxM+t4l3e1BiStVfn/sc+9bM//aM/1fXXlWdAO14vhpyKpaF/imxttQRNO+kc4/LoVnx8XnTMFoopG6SxOKpmiyaWcrG7vn/+9MOvfLT0Xb+9mt8+olVTV9ztdrfu3Ozz8O3vfL8+v/ToNttdGrMWvdzsX33x5st36hDi6oMv/jd//+/9nf/L3/yLf+lvfuG3/5/tanV1te/jELM089kwxmy23cd51fz2b372hTuv1ienBYoQtcv1j/7knyEt3//h9yyFq91mPm8zDBQwOAcxB+CUx8vNfrVacXDdOHhmjGVwGwCRlN59sJvNw3K9Gja7Lg37Z4l83Q3x0f0nt164FeYlGHiJlvFDt17YLl/58je/6nkWeCyMKh1ACoFCSw4NJAvAWMRhjSJlLE3rhMLY05/+kY9/5OXbCrJa3oUMQ0xmsK4WUYZU0sXFk6PFTV/BftyEuup2mzu3Xt50Mfbj7mJ/enJz5sPl/tm8bUrKBaHIaKqCIWfBXCyzcrjoL4zLBz78ytW2/+6Dtw0R2OekHFxVNaATuGt6NhV2ftpeOQwQiPJoAUt2zG7Yb8YU1XReLb/02d/r/5T/0Ic/+m/8T//XX/7l/2rz6HvXz97ttlcnZzeOV6sC9W/+3hduHc1ef9+9xfFyHC/HmDx3Yb6an6xNEJgkPilXD+bN2fH65Frb7XbEYWuWi0nlK2++GJmpskPPddUgo2IZ87AIh0nyL/2LX/3eH755dnocgts83m9SfjZc3T2pf/Znf/wLv/br+97Nl6t777v7+Om1dOOPvbK6t5599JMfOD+//1uf/dbZ6Y1xCzt1bz8aXjoKT+4/fO0Tn/j1X/1cbFfrpn73ahhy/6d/6uVvv/nWdXcr75cnrg0MfDKjNEDbFFldOc8BM9cbKVtF34SKWcfBBWL0IihjBNTYvdumPvZPKieV5s3l9frWS2nVXGlaa7+/fkLkoIRyvpX5DVvcGpxPXZH+QY0Vz10a97nsvKu8r9k7Y1NDdjNPYLG37bnrrm9cv3VTfFBLORoSgBIAgCoqEKtRkcJGnhEJijKjdyHspE+NUOOstixpXi9ZietGSFMeKkuo4AITMHO1G67q+bFrTkqWhhGwRp4hUIZcOdaxc94rmrESuziMGMTVVLJ1MYW6zWUsGrFptYChQ00lJa4IKu/6ehzGHOMLN9dvP7tvwEXMijgEDGhEY+aLy/jynXq19n1Odb0cuqJACoJmVhSUCHjcXcJRs17MhiGCY9e6/dB7pJlrmLwOO02jB8/mTB0TNURdJzirfXPcX/U+MBL6JmQFDy7tZBZOP3Hvp99890sjJpo1L7zysUePvgP+OsHYns6WR6ePH180iJD1zvHZICklrec1ox6vFw/2m+5yqMA/vDhfNPzkcuC2PjlaLU+Pvvv2s+WqPT+/yN4rRqc+jzAjCjO+us6B8fJy59zsfS/f/uajx0yBarjz4s17R/N/9YU/SAhFtJmHNGKf8nQXMJJ3jGiaBZEQUFIOtQNEPBTN2MHhLGCihNjU9X6/m80X+ydPEDJ73zYrDwaZFssVQLKw77eXooY1Df2+qRahmu/ThQMXUM7qRqtyOQxGwTDFWGKfzgub8rDbFUd3z45vnDRvvfVsH8dkoKXM6wa8AwfX3eW8mtfsHQeJGdQgBCRkJyKFgWpfjSn3Y/QUmLlIAnCGVBSInHPeVyaCVrIpDCm3DXtX59wDpYzQNLOhH9vg56sjUnv/+z79vhc/9XtvfOlidx01lZKd8yhkamBxyAqEg2m1WK5m89Nqfnt58ujy6Zv331pUvh+6JH0bVs4cGwMxWBTTrIBoRB5AFIwq7vZ7SXBytm6dWtbr/Wa5mA15R8xmioJE3jEpmOgYXJXBgShoMsnChYlECYqZI0SUEokrUAQkBYKJA4SFEA2JqRIrYgLm1IyRGCmLOO+QJ445KKkRqKrkXsELZ2SVYlrytB8DIkSnpqTgocoazQQsScrgg+cwjqNzKCUjucYvS4lEIFIQKwIsOSLZNOhG4iyxomBm2ZKKIXkgSzJWXCMHhkrK4IgAjAxVtQkBUDbjSFOJmpKJljQyABWoqnrIEawMEsmh5YhmiP48y4t3zz78yU99/gv/alnVCiw5Ex6iN02opw05CRKR5GzBKNgcdv+jv/S/WB+9MI5l2I4Pv/+OdddV49c3VtTimPoZz5k8B8eVF8kgpKDZiqpUs0qhsBfOWIOVbvSeQl2JGhB1o26vdnWojk4W6Cj1I5groiZS8SIPWqT3lcsRqIALGFPe7wcwf3L7fe2yUUYRHfb7ZlZ5ZsmlrdrYx6JmKeeUj46OimUdxtW8bkHlwXe/9vu/Y1i3t+8OfXZz387nRXMC8HVjV1s3Dpj7/dPzcV942RYAWq3djdvzo3U3Xm/SNYwCBRpuJXWzRds2oYihwLypSyl5SFISmM0aCn6mBizgkI9ma34+eaeJgkZAAATT5lsNQew9n5ExExqZSlWF4F2RYiZFwQXHyPOZOw7w6ou3X/zgS7PFStsZzFCQwABMqqoFYCkiRZwzSb0DkrJNqsKsIjnK7GixSs2szo/ffoKYZ+2tBz+4mM13H/7Aqffh8rJLOi6c/3f/+t/4z//Bf30dr5ERKRkRMboKt92+bZarWyd+0dKspdoxuZRUJMs4lm6Hplaid34YIyAUINc2zoEzYKZq1tTLRlhDhQR7rtDMGRCSiWApLPtSdimPWZIZW4GSRMyQmirmQoDKmE1FAxSBYTQ5hMRd1ZAkHHpnFVbetMQSU9nm3BFQaGbueC7OuRyl78brSydFEaRIqettdzggsPOiaChqKjZGEeYRgAr1al2hUUALOzEBELUJEJIyXIMqoWdLBftso0AyMCRFTAiKYDSVQpmlPIoVmSyQ6EzVAMgBArBjhH0scRj2w5huHr1v1Z4EXxsiECgoIRFzYOeZiQgOjBgynAJA+B7L9tBSfYCLHMJozz1FBz3gIDIdFMuJ7kGAYFYASaX80R/+/je+/Pn1kve96zZdEyrH1Ek6u3H2wq2bi2VLPHXYNKagIL6prZRdv/fsgws6FagB1tXClEvJhDmwd5OIz6QqMffIvvIeQHLJFJwpkBQVQPQZzfkFu9aAyYVQ1yG44B0AA5iUUlU1kHdgqpKlTBVDgJhLAkYDcM4hsB6sMTiVfiCASg6uIprkDCScyiMoHyrSPSOJ6HQoZ3Oixftwdu9en4c3L97BUJ/Vs6EfQl0pgkfSUmQK3ACuFsfX/RVhVYdahihc2Pkhud3FdjlrwgwqGJolDARWjBwgKNJEKTKdfigcDA3wHlZq8t8g8aTsTV+bLq2amIlqscO1h+lwqnbo5TATADn8lSaqopJUix4QXRPgnN9rzZtekgAUUVXZuYP8pDS5mibpbRKokAjAVIWITaf+IVAFOPSOI5JDQNViB482ABoRmsp7sUcDJWZ8D2QDJlKQGKefu4wHO4kBTplxKQBoBqaKTMTuoCmhAfJ0T00k6edKh7MDyfvQc2d4iOYRsaoeTDFmSDgF3QjJpgTThE8CkjIVpT13QyEQOjrA5gwmtrcBEU3ZN1WZzFnIAASg4JhVFcC8c1IKwAE+TUCmEpwjs3kbui7ePjs5noV7x8s3z+Nvf+PRv/0XPdoIooisaM85ZOqIRIsVIecRQWXCYxgQEDIaAEzv4fnzgJxjkpIICcghORAzKUqIBp5Z1AidiUwL7D0cNdok0xcwQ+dwqmmYMFGACEDkzAQR1BCAS8408afFiBwhqCqhTaFYQgQknfrpCafAIPAEHgI7FIXygbA2/Q81A2B2ZgCKpkVA2QVCUsnOeTDTiXUwQd2Ri+ZiQsST5QoRmSknBcjI7hDXM8t5RCQgOIQE//+w1v2Y6nYOpgJAXBl7NfHakMEcJEPcd9iNiSwzjTD5RG3ox41xHhLsRiLVQvWYkJEqGkGhjFpyDWWJafXSyY986OUPDvG6DNvZbAngST1DMRDHHkFTynHYZ8nO17vNtci+9m5zdV07vzxuLp4OSHA1bHKKrHDv6G7cdTkNN+7cQi4gZX+51zQOofru/XciWb/tIVRaMnsjcscnR1avNt315vJZ6emTn3ile/NX/+rP/vnv/H7zbHeNBWahmjnnHQfvnl4OohSVv/CNr/zphz/44GqOQAIKqG07+7HPfGYWqmdP+zTythur0NazGuJgmmM3EMjNG8e7fr9czdRhHoeFrwj43q0b7z56Z9H4fb8dK/UIuRTsh1dfevnN++88fXoes9x7YckgL71469mz7mq8vn32odvHL15un23LU9Wh8qpkdYV1xd1+J0UQ0TnzBA5B1UgCperG7PT9N05i1908W965ffeHP3h3fjwLjRvyMHT74AOZj+O2G/o8xBvve8W43Vxunl2fr9az07MjMHl2flHX1O/jft+R9wrC3pPzIgIOHXGBMp83J6fH2657+OgphaCeFLB2AcmzC2iMxIqHukhiRhVVBTEVrcIs5kS+DogmMgzDxbMtnPBR23z5s//f+by9fXL6Ez/zF974/LJx+t3tG/vNg6L6gQ/91Kc/+emnj9/95vfefuH2ya1b7eVwdff2q+BrqebLs/f7mM7f6vrr7xm+sT4909ldLZT6GDUSaxw3ak1VLcSyc0yOxCSmUbGfL1ZicboLsuYPfOzVi7cf0SiBEjONVInoV37/d/Kuu318a0z02q0XH757Puay7fjk9Vtnd1Zvfu8rHvuPvHZ69a3rPsleVr/75YeE7s3f+sq911754eOLo9PZ064/e+HWl75+f7eZL/FuaEny2MeCGGBWD4kBfVFhysWEa1/n4lg8akVCpqJmWJ/culv2P0zjLizPqll48MPvrY7Obr/w2ugrq/z6ZojfeyNRmTf1qj7uO8EG3eKoPaou779d1ZRqldZjtVRd7hJc7c+LJmPwJQcglBFTz5L8OOLlKL3tkzAUBqcmmM1IJ/MQExJzBZxTQvDEVChcgtm9U3d34WrDESwjSeKkxgiCrqkMQLIwuRJTCM5SqKiSMmocM7VcLzCTdLFpWqeI2RBgyMmHOpAf+zxhPFQVmOvF8XCxXUI1EkeGXg1D7epZ0V0c+ja0CXsBhFKWlbvqIzqvJmJoah5c6iFXPuCM0/VsFsBRaFwsEUt2FddNvd+MjvG4Xtk2k8jxss5ZWbUJ3DTLMgzjOIbgHQQTrIKvfNMPW8bgy2jbrL52rqpDFW0ALYSl9S6H+Ydf/PiDB9+MYxdmTZjPz6+exmE0xPmitYwpah1cjXwdh7HQ1V4qXzWzkMb49OJqNH3xzq2vf/s7J7dfwjj2T/bHJwvEkrrtwsHV9RabMJu3+8u+mQUvtn1wX6RzSEWpqiquwlXc9LE7auq6Xuz6Td2szu6sH1xugnBOpZ01743QcipNjapiAO28jjmTc3DYVogZTGUZgMC+QSDNuWgEsyy5amtTMgAUV0RObt4L5B4++B65oiquqsiTBuhL30kkK+wzt9V3nj0Zchqykfc5dk5FzTb7LRku2raPvXXqwB1Vi9RnCkxt5Rliipp0wYtlWDggUmR24BhUiyoBqEHOqU+9OohSqsWMmawAOle0iBUCBAEkj6CoREizeiGag/MpJiTy5D37ov2MgozxxVsvf+QDn/jCN7/2zbe/j2yIkweIiJGdi+MYgi8xK9HpanHU1sez+vLiwYOHDwCNPFsPDc+1gKIB2ph6Iqi4QlSHpKrACGIl5203nDSrwIY5QrZAtXO1mjP0PNWmkCAZSDETBEAQ57AIIBEKAbJ3VbERwJAcsRBSkeK4Ia6lJGZTKGCGE4SCqgCoWhAdki95RASzYqKkhOByjMQWfFVSCeBiulbI4BgpEKqgEIdRRkZkRpFUNDN5x5ykMAAAedcg5MpVJnAotwFFmnpKQtHsXZBiIhkw4XO8KJNDmA5+6LkGDaaCaKiKCDjxtnEUy1IygTr2RuC8H8fI6JlZVbpxSDkFXzfeZy2SiwE0HqHY04cPX7hx9rHXXv/hW98FBvb83tmgpKGQ25w/9RBzdz0jMh26y8sPlcVHb3/k/M23759H6RPmPKsXbsZd3+kITbMQkrLfkqcwa7FCKTmmxL4KwWsuu+t9jlo3LaC4in3lp5FlHMa+j1Ly+vYZz2nMA7B5H8qYBakbOtUyn9eiVs2qyruUuiwlarp185TaoCqaZdh1mIUAx7F4JnWiIBKLIVLl9sOOTBaeF6xX3/nOG//qV7E6heXJ1XVxRFwoZeO6rYMf+ijd2CZ7+IN3hlh4Nbf5rGpmbr42grS5qiC1lUscAc1zt1g2hCUENAUrKMPWuxAqFkQwX6DomCWpqoxdOuJVZTggAZhMj3QAQkIzPmQAyGCSLLEIgAGqEWLNziHJlBAADhyqpDdU/7Uf//iPfebP1M0tk5BNGl+zI8XJBWAw5S1MERhxTkCKg2mvmqBQSAGyL0OXu53Hkbo4Z16d3rnsn6VSqArNvDHUOHbzyv/in/2Z//pX/uU+x0BO2WpnBCRWu6bGqsVQq69izLUrmktF2WFiZ6q0A0gypJI9e+c9EHkGXwyLBeDA4hqwKjUNK9qYokouXAo2KYINiaT0m04F0FHJ0UqpnBcwQTIwx+wVvFnZ9a4J3ldZCnsnjgyh7Hc1midTK0lKSSkET1XFy0VuKktJdju5vKBdX0rJZQxNbWMS9/xGQKAp3SrZpHhXLO8RaTpam0opooqqB4YHgIEV052WiMQqWQHFVI2YEbEA0vMjrAqYmuYMltWATMkMCBiJWdDAKCqhpOFy6Lsxa59hvYSKKzMtGs2iJwy+aqv5op01TVuHMLULAeB7WaP3IkB/LDYcGpgmhWEC4UyGlecEZYODtYRQVACQQL/8hS98+xtfWR+1kn3qRz9fBufGsW+b+tX3vdw2jWp2zk+G2yzZhyq4xS6d+7qqfe0Qi6qgmLIRaUkOFZ2Z5WyHtnJEcj6UKTJjQqaOsBQFU2YP4L1vwC8LBR9qH0ITAhOCkSH5wIicxd4+f/jK0e2qYiQyg5gTEwNSLoUQVZEJVIWYpsSaghkIIgqYFAEzdmRoOikXoGjkkDlUUrKqQwB02IZZHLsAdu/ePZHyztPH4dZyhd5EHPvAXpGJqO+64FDQAvgyDnW1UIc5DvXCG88IKXZ9KaVpZnnYO3eRmxsKzwM6iO8pEZNRx2CqqTycr5nwgF0+HH0BwFTKZCgSLaplUpnV5D2jEYIdKFXTmVuzmk2LedIydMJUE9p7UhECABSdIEcHq9rkMMJDykxpKhubeOHPVSs8oJoPMgOSBzCzoiZICICTLmMq9hw2BAgTOehgKjGFQwQIDUysHHJvZhNi/8D2pgOr+/Dqk2nkQMtxImWSn55TBGwC2UwUIpywSkQggkAHrPfBNgRqOsHiVWWaECKgqhDB9C3YJICWjKgAIIJqAmBE9PzDORiMYLJ/IZmiSJk6vExl0j+IptQVqUJJgqiTYlKHejfIrDm60W3++p/9kf/bo99989FmGKStTEHAzEyJGQBEFdkhIhAf9CxEA2WmQ3OhTfVzbKpITGhSSrFDNnVKhBkYM0+yHRA+T0hPiiC/R0cTLROLelqcB2iUgR6kVlZTs8zIk+mNJqg2ArETzRMNCvk5P7wUMyV207cAphNzv4g49saHlW8iosoT0grJ1JAZVOAQxUZQM5QJMUVEU8EoMkkxtaImYABIhChoZirlkJBUs8kQB8+zlTqd3HHSN/+EVFQ3dR5NsoXZHHwNAuM41GFdJFc11VFFSUUAdlg6ld55zEX6cZNEr4bc51Aj1Fw7McJx5Zh1IAiNu0G5afX4p3/q51bLRvLu9GhlmRRkc3XtK1wcLQi9SLm8uKh87ZuFpAI6pNwdH92JY1HBbsglo8Oc8sAVrY5m795/OI68mC2vL5/NF26UwbtMvvrtP/yDXepF834Ymes0ipR0sm79QOvZ4t133jpb5RfvnD394flv/uo//yv//mdunN265MezbJut7ndxBgQqs6oOiOh80dU/+Af/1f/mf3VrcfMlQ8MQUoxawu2bL3/8tev3/w9/9J/8y3/cX133m53TcVbxbtgv2wCQGDKb5th7sTqEze565dZN7btUbh2fdt229B0T1wbv3P8OgKsW9dPN5e07K27ItX5udP+7765PH7/88XV1xe98+VpITGNTeQdGUjyakjISs1ZezWwxP0ud3Ltx86c/+ZPBsKkqGdPbP3zmwDlVy2K5ELkqtKmy4EvFi6P1C9958+0+x5NqfnJyNDuut5edZw4MHFy/H8QSUUUKqkCgU1fq5fbaUD74vnvfe/To2fbK0SKQL2bofN0uJo0b0ZELCGimcrjnwB1KFI3Z167u+k3lWw0FgfwN7vcDXz6TDP/qV37p0z/x05/8sU+79uTJD++dvP9D3/3DL/bXV88e/bBaH909eu1bX/76LuFRDuvZUV3hbn9VBZ8v3820uv3Kj1+8CWBX+2/9i1uv/tlmdbNrTx9dOvCithVFKWZaMATmlaS9SPJaYncxW60Ogull/+jZ01dWi8eP3n3hfbcvt/GsDpfXmwaqBer72vgzv/Dnvv2DH2K5PLlx67pL//I3vv2Nr7zT97sxB9bh3/y59339j7776N3vXWzSyckdJyUN+9UifO/+vo7V/llt8kG2INCCH6iuXDPvuqRDUnTOVSZjKhC8t0I6JMmCpg5JVAS0AAz2xK6fLXg57ss27Wn9Yl83Zqg5dZLt6aB78NUJqI1dnwS6fnChuXp6yVUNjoeSqVjROOaMLnhyOgzsyeWCYgRmMG9XLbqChLLpUi0LiHnY1b6iHDVFx74AQSmOseQYqnlEDuuTrcO4ovn7jm1el5x8FWfIF999QHmr7RJIQ1MBuX2/Pz16Ybd/OF/MlGaWAGxXExNqslEs+bZFsjx2s1nT91dNda/iqt898U0TS645YCWA7BY43n9K5oC9+RaqhcSrqm4ojVm7Jpydx2ccyCTfubk8f/uRA+dccAyljL7y45j3EZ88Ti+/9PLvf+dbS4e8CLbPqITO+dqHcQAVFzBFiWPyjlLKxsoMlqMVQcMxKQFywO3+YumpQNYUyVnLjcXeAZsymJUhA7uLZ08+cOenz47q+5f57ou3ri/K3ZPlwwf3rWRRn4cdihv2A7LXpqrnIczDQiKIWBmXs+biYjufNW++86b1Q02lDmU/dLNmPm/dOMbLYdcPXbWal4LMdd/vjlfr4WLjHKqRIq5mdSQ1zWer06sn47IOd++shryFgNuh46ZOsRghP3eaxjHBEtlxKapZyFByIgwTHm8i/U0bIdOCqAexhMiS1KHJat3+vK2WzhnIRczqUELVSCOdCJkF9pLKIKlums0Y62CAUFTNtIwp9yOgAUNklAx5lxHMMdRNtVwiOPfk6gJVFYw8pq60TcOmpoDBs/MmolpC8CVFUbnuLgVt3Z4EyiB5iKX2jaqoFSLzyFKKAogURygiknMTluzCuNsgOtW08P7mrbuesvP+Y5/4xNtXT/7g+2+4miSVbDlLblxwCKUMqJLVSWhu3TpbES8C7bvdO4+e+KYeu7Hb7yAn9JWv3ZBHQkIEZ0zwvKxHNJcERJePrzDx2Y255f54sXzw6NFqccqIgFAIxtLX1BA7cFhAyXkpKlpAiTgABiMrpQ+ejSOQEy2TZ8OSoCVBAFBP1VhSYAbDrAMCqLGZEnkEZSYDMCxqDIreM08t5oZEZMQl5aqqtUjO0QHFMk6zcWeo5gwrADUTmppQUUUiACKj5ILAalbK6HwNZEUTARMZQFHMysbICDCZw8GAmVKKjOwRi4ysSSw6YFXEYmKZWCXnLnZdt10v75JjchVnY/RoKYEJ2FTXzUgCbAVyGcESaijmHlxfvPLqy7vd5q37787mDfFhY1R756DYsLv6zjmHKvpQxgziPviTn/xv/t//18sn+zgeBSWqA1QstaxO1k21vHF2e7GoV6v18Y0jKUOznoFKzS1hJbnst3vCMG95uW5TGhWIAHLOQz/mPnr2y+Ol2bi7GtpFWy1aUrraDJvN1ntazet+GEtJy8V6GHtN0dezm6cLVdExxrErY9YI82Zx/WRfz3xmi30/W86LAlZB80hJl00Lm8uv/3e/vn36tD5+MVaL0WS5ajQbVVWzWGDwooClW9Wcxm77ZONmC5w3sSK3aKNQI+yGsQms0s8qblqHAFYGIlRE9t6IqmYlxWLqsXYYSxq6imszwMohwSc/9snFr/6TrkBSfU4MBgMFRZk6EpDMJlSwIQIiG1jlGBFKzszEoXaGc8Q7x4vPfPwjn/n0T/r5WriqIDCSliRJfNuAao5RMRGYJynDFhIIcJGsWmrPphS7mPvuevMIKzg6m/lZNZe0vPOhb3x9+Oob1x99v58t2c/m+34cIX7wI/f+cv7T/+y3f0eQY6DdEANrMV00bcPeYjEbORUYB1aFACaCgGKgTYVCNbo8FjNhkazmAIFJ2fwM0Y/qcuFWDcg7IlQy04T9gJByFsCIQATOUmJRUCHv6+AQ1UwJjK04YhQZyzbMajyaa2jIsCqOTWXYACGa81yLJ1stSgja7W3Xw/WVjzHHSAUdEJXiXYjdc7g7HJCwRiiqUMw5NU1mGQzRiMSmgfrkIDBg0SkPllQMTEVJgZEYVCpHCIepjZgZsAriIcNDoGQ6cUJIDxiQgh4BJfZxU95N3Xg9u08WAA1sRIueuG1WbXO0Wt9aL85ms2VTN8F7ch6RYNLBEZ5zh+yQ7zgUMAFNwbTnx6TJivQc4oPAploQGEA+99u/84M3vn164zSmmEROTteW4/Vmsz5Zn5zdaJuaCR2GumpKkYO5gnyXtgLFcc3OSYnsnKeQCrGY0ujZFcOSlQBFRUpBYkP03pNIiglMJKuZearGnJGD80fCNXBo6tp7ZuZAjEzOezBVAzJgMU/EjDIhsYmdm2olfc7FQFUFp44ty1NmKvhq+niAEMyYGEAPLexGaGaqRgIoiDYVfjvvAtS5l7Zp773y6ne+PT667pY35iXvLRdfETBlFXTUlWgO5vNZ129VNqJepR+7aG65bNsedbvdOUDVcdxf+5sEizvIDKAGpNMDwQ4AaYDnDOfn1B+kKZo1aX5iaICmOrGK1KbKKxUtRSSxkCKRTf1RCghTfZgBqGaziYn0Xm2WHgr1nss0YMYHFwxMVWsAqAamSjyBjQ51bIAIEy58yj8yopGBqiQgQkItCsSHijGYOrumx8WkPZGITDKZAYLCQQkyQ1Q6YOunl5kUhKkMDBTsoCshARgjqk3lZZPxh03LxPFGnNpsjYEUENBKyYiMYCoywa2ZeLqv8XlLIAFNHhNHDlCRWBWn5Klz7r1sJwHjIe9JiApAJnIgfhMZ2qTXPLcdoRYlZjNyzgFQycI8fSgVAZPn/ppKdi7KX/zRD//G7337S985f3x5+cq9hYkgMKObNCwCABGbbFpizF4O3jECm8QyIGQEnmx0ADqJkgYkRZxj00JTi4IBgJVSDJGJJjI6IuaSJ3rQtEgYSUo2LQrAzuvzvZyZEHlTVlFiNikGxDCl+JWmPbRNFgCgqaFveuwgSh6c9w45p4RIimKmgAKGE2sJyBPiVGcmUsCEnSPCKUlJMGl4ExebpiT/pH97dKIAKoqihgSMCEQOCVQEDIkJkclRTmnaCIkUZvffk4r6YWAI5H1MGYo5QDSLQ5eLpmHMiZBmwdcpdmMaUt6QA0fIamU7eGFWMIT9UJz3yDCOabGeUalKCpAWn/j4Tx0dNWN3FbwbUxy6fLI+HrqyWjQlyTDsgSlU3gVvSK7iIebFepnEctEoo0Uk9ItFlXbZDC6vu5unNyuqYslYYPf0wod0+8XF7/3BGwOWzHT1ZIdA8wUfNbMn1z/42U/93Ms3Xvzq99+5e+vn3z3/o+2o7WL9dH9x/93f/dv/0f/+//HPPvulX/57FVeLVeVClccxFSUfUKxd3S6tfekrX/rMj2NYvWTJkFA9vfDBl31N3/r+N773/S+v/TonKyWlvmTCptFhGMY49OelcqEi2417YN7uYz/CNsZO0nrRakqhDlzx5dX25PSIg3gpb99/68bp6uhofffWLZDw7NmluhIvr1zcguimH5vVAhm3285Vs3rGrGnTXQeCGmsY3dy1H7/94u1ZvTu/3PXj989/ePuFj3/wtQ+CAyLbdNfEzgBUNRWtmd958r3vfveP/o2/9Df21zsz6DZRh1It66iy214xBx8aRnY+9MNusvHGkpn1xfe//M7jB1fdppmtslaCxM5VzczMTTY5dpUiGKCIIHvnvOU0jXNMTVRNtZ3NSho51GrGKlzpbjcsjxeb7aPPffaXqQ4feOWjt19+Hdrj/ro8/uFXhstrLCVRNT89Ue/3A99bHm/Or5dnJ4Hy5t3vY5iXxZGbnaDk62ffCf7r1WkZ3Ulz9PIwXPuWA2UrZb68I3icjBGh8oMDMS3e19NdMLI4ol/8K3/rn/7ur33vzW9hYNFC9XxXXC81dt1L1/cf7u9fxtQWymJ+tv7Db799duc0hebX//DN12XWzhc3VF95/f2/9htfvXvjlQdvP73YlLo5ghC++d3u7O4LY6ahZDZgxTFHJO/rWQQvqujBcwUiJtiE2hAQyXwoxZAdgIsliZ60N17IXFvTFFFXhVQGkLw8nmu/Rzejlmq07vLSh9krx3cvuqfY4PxkPsYuAYhAEanRgZllMcfkgggiB+8Ck7Nh8NS081lJD4xVU6qWbQFDIA+83w5t27oKchqpcv5kXubz88bLnaN64ZxjdqGLIKCZUTB7poQGgFm0ZleFmREX2aeC6qpxt63YReHF/MjngkjsZ6KdSSrgq2YhRZP0RhDHLMYxkZtVl/vuJNSunqVEhhVVmIwrP1PcIDGYuKoKs8UQN+jpaBVa5pgycXXo0xFF1WL26HITGmlDTYYYwDUeY1Gy7bAtTmKfZk3liU1pG3NhcFUoUvKYJQk3gZtmt9t6x6G908WChFWoYulzNlIdZUAEBKx8TY5n7Y0XTm6++/QtIMiZVeTxo/tj7hLgqzc/sHn6Zh14vgj3n+2a+azxi2LGjpXMte3Vdry8Hk5vNkyz1aLpd8m3bnZ0rFV4cj1oUSRaztd+djzuL4rYrTs3kdWupECDVeUqeeXFe1/5/rePjhfo52nws7YGkScPnj1+chVC4FmVVac+nekuCHVVsjh2yBhTBIJiMsWlp62MqZnolLdHJHTsgA0wCSAiGldu5oCTDNfnF7Wri4mmbMi73W6xqHI2VQSRIlkYlZEMmLmPA5qxJwNThtBU3a4bRKzk4IIRAfiT1SKpnT99XMS48cBWt81UqavEBmhqmoonwRJJ4WS98FW92exEBRmcYwFRNWZWA0dtlN7AjCh4tpjaplEzLcLsPbEDvhGa2lfFli/cfenh+cWvfO43m+VslBTLEKraRIlJtDiPItCDnJzcWFdtLZqLPjq/yKqpH7QIOyekUW3yZZnmmj0ImCkQjnEkBQCQUiTr6fyExuRRry+vEciHEKUIGDkXgjdQRK82gJlnjyzTsB7URAeFTI5LiWgCkkDVprZYRO85oxpCKj1AKSCAaGimBZQYvZoWiQ60SHZMiL6QKZiZEHEuw1SqGphKSWSOEQjUk6NJiNEkJbP37DyoFEli4sxpEUMAlKLFITB5Bp786s+BFCIqMokDxMROy+iYRZKqTOAGRyE4LGIAJXAzjjH4ahjMe/FEDoBNNPdmQYqR45h7KRk9g2Mj3qe+4loKiBTPPmdkB4zcxfGth09efe3D3VAunz2k58ai4LwKDrtRkshgGUYAcgaf+8o3VzWfHp3dvqX7x+cGrp6dpDqwg1HiO2//QHIMVXty87avFvW8Xs3mrlkUrPbjUHLXVuFouXz08NFi1TCbkzikGNrGBWdWxtyPOxglu9rnTkuS7f4q1M452HW7etbMZo1abOaNZuddyCUp4PX55XqxrtqmXjf9vmuPWl/5LLFuamU3DkmG3HBY1L6//+a3PveruRvroxtXGZEzNSwm6Gh+vAyLWdRStvt0de5K9+ThI6xrnddbz8WRN2grPwvVYtYWiVzPXE0ABaRUVYWV46YlIS3sqYVx4NnKVTPZdlW1rIFSzIJKalXhe/PT82fPMgIzTKT8CcXB7KZxt6MJqkIEKAqIsGibJlDXDzVXDpwf0wvr2S/+az/3gY991LdVTsCsGXLRzMGhc5ML4WBGkHEcOigpd9FhNaE2Rhv7fSmjkWvbG3epltoB9kmud10/Hq/uPbl+uOryvbqUUohqQLDd5pMffm3YXf7uV7+iAuoYmEh4e7mNfZRcci6YjREAkVrvm1DQtPK0aNSzUyixmJgUnc0a01wvG15X1iIFC3U1KAKhmhFKEZUYXcrOsI/RVYwNG7oaw7gbJRdyDsWcI3IMpMycx4EcVfMZNi2d3BQfxl3ndJ9T5jDReJmqwKtlj0ij1imOTx/ROErOuR8cBdWSS8b6+dlyKjACIEQxEIFCJlo8E1hlCtMpxECnKDLopPKQAhiomPI0cwdDIyA0sckabIBFQQxUZALLKDKYkmFRRGNmBgUElEyGAEQqvZR3u91D54KqkANGI8TrTe19s9jeWS9fODq6O2/Xy9m8rmrnPPnJBYATLHaKeUzleQe94XCofh5rem45UgObavgUK4e/81u/9ea3vnl848hX1eX5ddNUQ85FdL6c3bl9q64bMzUB51xRM2BCFhFmILR5vVJDBVPkqVEglwEFkGzSRB0xEKkJADFTEQOjUWKZIDZqqKhEIcywOZUwB+YQvHPApEQT8ISYGJAZoK7oo8tXQEvRYmouVGb4vPjJ2CGxR0QRZSQmzpJLLqX0SBh8ADQimoIzNBkfiBAO0F72XlRMExGLGhDM5/Myjoj08u179++/+eCqu7N0jiyWqEkMqalaZMoax7QnJpEMhK4JUiyPGYDY8/HJSR4H72hBFvf3zXPBI8SKaEoPHdq8JvvMZICB55dqkvYmn6qYwlSgeQievRe6UpWikrWwIqnx5BibSFRopoA6RQ3fSycaPrfbHH6p6nMn2sEaIyrE9BwZhEBkKvDc0UOHplg0RDM4VKSpohkATVVvBiiqgMbEqkpTmG4SxdAAEYjQ8L0yOCZUQxVxbnKWPHdZqSGRAZgKMgOCiuJz89UkKakIIdr028wID7k9taIGCGiGxDyxuw/SgqmhAJgYghqyR0IQBTRDUJXpv+DgjNHpeUvkpo4xABPNAAeW8iEyBygmqEY8SV7K7kB3VitIrpQimkGZCELlDdQHRucAvWsoDpc//ZkPvvEkppJVCqJTEaCDz4qZEBkP6oyVUgCnto0CJs4xIYrIdF1FlBkRQESInXPOQNhNQc487WxVDQGRSXOeLGOTU3XSknSyJvFUMaZFZQJfIXuYepxUpitAzuWiRAimKjL1dRxsjwiGoCKAICIGwMymJibEbAg2ocTtUA9H7MzUplQdTBWBVLQQUhGd+OVg2QzUVLQwOzBQUyYyVdHsvDcwQp56VVULAU13NCKpipo4z6qCxA6dHFKZz6WiodvVzSp4tCiiQ99fxdQVtTj2ULL3VT9eW7nOsk9aogKngDx34IIORYvlbfZmUKFhLObM2+BmVu8H/bM/8fMffuX17eUTp7C8dbrPz5bHM5XeNyQ8U8j9uF+sl4TJGV1vr9q6QaVFU59vdmrQ1qv9sPWt7dI1gNeIuxRPj04YNKs452dhTi596UvffPDwcWQPBYOrmtXSNbOL7un/+e/+nbd+74vffeeN//B/93/84m/+0oPf/erD7b7tEqK89dXPv/az/+G/9Rf+pl68+9Uv/mYd/PUwpDHN66rrBwN1jd/14R/903+Zhf/0zx436zNFU4Wr665eHn/i0z/zX/zzf5bBwLsUbea5qJZE9Wp9se3awPO24bgPAc9u3Pzqt79fV4sFAFb2+PL6ZDbvxxiKBA7Dtr+OG/Q+iMS+cqdwdblfn9xKwL7k+2+fU9dryjPnUxcLyqgFR2iYALBulqGdL7jeXO9+8S//tTMOQ9et1+uze3fu9h8sg3gPxbIUq0OTU07jEIJTcM3p/Etf+/yf+7N/bX91lYQ8luCq9fHJttt0krEgKJiaQNGSEInZp2FvTJ/4iT/12S/+7uZyu16dqQbXNL4OYN77GTrKAoQcfA2gWTI7bwBkRKFFgCJZtCgUtVKxA1cxe5paGYr4ZVM01y3G3eZzv/Ev0j5//Ec+c8TVR3+qfvVTP/b93/vV3cXF+dMrysMwju8+24h0d19Yv/v4/p2bi2o2i6UUzdshzhr/ymuvXjx5Z12I8W61nF9we70bDcGFBsOCYc6EJV1niUWMApUyTnfBqzdO/mf/y//JP/wH/5/vPHiUSSXmoaTZfJVBpHJPMP/DX/vHr967+foHVn1XBnYf+PiPfOrHfuKr3/3Gs+tnxzfu/cbnv7+o7PTo6N/6+L/e9a/82q/8t69/4qPn4/b+1lwJy/aIhqOGpcdrs0bjEILv91uBEQAIiuUR2E9sNFNw7ERUh+KYCJBKcSrOsqWdaZYtesmOCFIRKfIYIEdWQKy2k8l03O8eXLTEepXHR5BBizp1NRqgFhVl5wRUGM3QcU3qaqAGC2q2nGoqFVUSI5jlgg5InFVHdVZNINXJjGt/KWk77uo7L3DrXaga79l551pMBWWYrZd59yTmuRao2xpBnZETIRlFZs1qnXp1jibvBmjKu3Ne3KiXNxXYGDSXENLl1bPlfL3rBnSraU/gvQ9VYCelv2CPhqTmwFwpqDZ5NGPT+v0ugXNQ8LWXX/rm9++DaUxmRSmgDwBoGdWAUfjq6XYW1tD4cUxMNMboPLulF1YGhqTg3S4PnlcMxMbkxUqWhME5BnJmTBgRRM352lAjkfctI4o4Rui2Vx//yM8JlPl8fn2187Iedt8SitbUq3axuT5HwjH39cx/8IPv31zv9pt95sSSg6/2l7syDCCyue7dYh0avnnvxYc/+FZbVTUexfxIAaqWU5c9jJnMSLXEVAYjLAZ3b8xnmH749B3yBBrG2K2OayR49qCv57Nr3ES0GQIRKlIz84edl+Q49qvlyX7sQxWQCUwOrSrTTywDYp4C9oQERVJOxJVzyoxZEmc3pp1IadsFKbGktvJPurFdrtgn0cwhOOFA1LpGZJAsJZcA5gOplj6PUjh1iY1QtanrlMvYxSLmawnOmUNgl9Tq+RLQEXtHzOxMsoLMZqGP3cXlpasCFhiuNnVTF9Oj9ekw7tUQxVAVwQQHIo55ALYxF1B15AwgpjzViyyqJveJa3fj9ocXt1/+h7/091wIJeYoGZ0jpMo5KaUg9FGcn50sT87qxbwYmL39+HEpBR0N/Q4tgPPEDolEs+RSoQcAUCiqzgXvMZdMRNePdwHqdXDeZEbVg2ePbty4oZjA+QNuckrmKxN670xtBDAmDyKEFEt01UykgGVHFQKbGvtKDYslxGJEBwM5B6SiJRM6diFnx+xVEzvvkKIkVDRUACsCREhEE8CAEQEqEDMRH1zWnQve1Dy6wTJRQEM5LJOCgEDGjgg5W6m8NzMmMmU1VVUzH3hmzFE7zx4BUxkYmQhTiY6IOWRIjjwSqQE5XzQrphCsS6lqGo8wxL4fOuf8kAZHPKt53/eKMpahco2MpSj4qioyRQrMMahpLklNLVVxt8eUPv7J17/+5Xi12xwOB56qWT12iqoqFrN4QkH67vljS2UOT296PmP82Kc+8r6PvDC4dnXjVj9at+1zSUNM37v/w1DNZ6sjjOZ8i8FVs9nqaN3FcnV+vTg5jtmsG32E9fp0eXQa4z60riGXc1lUVDnMKiY6a5oJveGroMWGODj2qBEALi6uQ9ME74/my+AJCMEigIQ6+IogM3EZc6TgbSiUhq99/rc373y7IofLeV+bZVifrQ3dOGyaZVWt5sCsu6F0uzmkp2/dv7rYo6+UnF+v21Cv69miqY5mFakoL8EvFCHlVDN4F8aSvGuAHBSI+wIGhlYsOuesRis6dUcY8yzMf+JTn/nGb/xKIeiKgAJP8QRgLTr5kkGFjLxzRcVUlsv5vAo5jzWG1phS+dD77v38n/n5D73+CtZe2LeLlWVfsrjARFBUZejAAMF7jxK3zkQLB52DhBTT0HWC7PyiXi2zzxosSZ9S1BFCwSbbyzduPLx4/NY7z1ZuVS3Jzzxjm6QUzj/xidcvLje//8N3CDkRqAnEPu93qIjkEI0qBwhpKK6ufOUiMRiaqq9cXVVQFJFKLr4GXnle1hkKWGBEqmCE0hVFUVLEUaH3+/OhZMLgw7JNZsYm+8FErOsRESrPtUui1DbN8Vq48ssl1d6aueVc13U22ePYrlfsGAUwZsyFN/uy3Q7dhnJWkahC3oEqO+R2lpjPTtcA3waAJJkMzdTYgEjEgIxRyRCVDk4iVVMwNRB63raoBx8GECqSTaKfKkxZD1cMiqoUnU5LqtOJ2qajFk6neAF+Pn4HB8qlJAECKURoIlhAEZF5THkf47C5vnx6+WTWnq1Xx8t2HkIIIRADExE55+rga++8cxURM0/hkT9WA2zCiiAgoKIpTNIa/t7nP/fdb3xjvV6VokO8XK/ai4vLvu9W89np6Tp4IoPDKxAhYZ+HIY/B184RIjOwqRFXDr1IHmNkg3k1j+UiyhaLIVWlZDNidkhGZSxpnLplCdk5p8pJYl2fWLVQcMy+9lXlkRl9CIiEyIfTMoCASYlEQOycZz3wuU3VHCM7VisINAVzRAAN61CnMhBObmEznCquEIgcuaKZiWUCK5OhofMVo0MkFWNidCBit269sBvy+cWbq7YOFQAMhOg45NwbYMyRzPtQ77pNW7dNU0WTyrCkMQM61BQ7dSU0bdzmFKV64SOCDN4D6nvNYgfDDzy/YlNexg5aiBnQZJB5TiKfvusJ9wOaQQtIBuJJQZma8iZ284TZmejUB64LERL/Cay1TaAZlQkKfoDzEaDSdIo3EyHCya5FxO9pStP7ATBGp1N52QTJZoeAOr20AdgkYeIkQDA7VVHJAETMCDSVuJkZkZ+YbgAIesB+/XHeSrIiEPJknYJJLIGDbxMOaHcQyQQwAaEQAIGZyIoaTwA5Vstqk6CDpsDkRdQOLWCgooRuKkEGRBG1qUnNwEzUJmQyIuBUCzYZ+8xUtRyq66Zfqs4hI4oUM8m5M2PnvYEcYEwph+DHHJkRCBoXfuTVV0J4YzEPpKxMyDQ1ndkEe5rkxQNaHJEJDIoIAKoYHsYHQjB1i00OLJ00cTMhDGYyfZJIjCpg06iAptUy0dBVZLLSqwgSISEIiKh3bKAlJ+dYcmJmM1ApRETEjsnUEIgIZPoXZFURzY7DpGg6YphIfWo0ubqKIAExqwihiSEAac7OuSnDTUSqxuQVxFQVEnMwEzNlcgRkYFNUj5kchemJDaCADoAQeaJiE07Qiec8cAMthVDfg/4fpKIyXu76K3Ssxcxw6K5T3rrgc9mBjZqJNKYS6wrGLIx1zTNHcwIKNQ1xWxMTF8NsqrM2VGBOHVrzoVc/86EPfFDK3leuourB+ZOmhrYNUfpQL3b9bhx6YrjsuppgVdfO+ZRTH8e2Pmp8LdKrxlJ2jhYq3oCahtEtxu3eckLTfTe2vn169ezbbz3COrRtnfeidVNS2cv2xds3/ou///fH8+t/93/8d9740tc++5u//ujNhxXjaQUffP3oYni6f/zD6uan/tpf/p9/58tfcXBZM0IVfAiuJCKfhxE95FD989/57T/4+pf/nX/vP3jpxQ/UVRDMkMT58G//1b/1S7/8D4GoxbrmnPcpxpIu0+n8pKR0cXF577jKcb/f+Zdu37zc9p7cPvfFaD8UT7BYLSBjN/ab62F1Sr7Gfb/dber1+uWc0yLUiNWr99734P7Fo2fPJspGwdLHfr1a1p4k6snqbPOkv/Hy2V/4zJ9fB06xOzpdV65S06ppZg31fVfGVM9bX7OqocOq8iK4u7z+yIc+eX29rQM0BLO23m3HyyGlEvuUZ00DZKJZcjQ1FfIel0er45OTr3zjK92+v33jbiEP5DDU7Gsij+Rocv26gAaqwoBEfhhzaIOCqJn3NbGIcFElQ7UsEp2vwaCUXGIchhQ8hYbH/bPf+pV/xPXyAx94f+up37SvfuTHvvnl35vN+/lJ/fjhfihjN4w/+P59cqZjvHGnXhzNrGkg+P3VoNGI2/3F41BzoEXglyteWrT5auaIx7x3XJOrCGaiqRv6dHU53QUXDy//0//kPz3fjFKF43V1sdkwmZAS6XyBTsRg3cxW2l+S46HoVcwf/sTdXh/tv/H4Q3derKTdXl++/6Mf+eLnf6OW8HOffm2/u8arzdM3r5euKmMxx2MZgYGUOMXMCJoMFa2UPLIROW+KKgrAik7VDExwesiqEE0mYQJ1Bh6AAMjYIyKjWiFiwajMbEqGeRQlRsJRNasykkNnogTToA5FVfnAN8FCppgsO4fe+WJJhZx35Gv0ZHW1Lb14SoZC1eJk2R43VIXTphbHyNZvOj4+CqFJeylCzsZq3lw9e3d2+yXvHRApYCzx/0fWnwXblmXnedho5pyr291p7jm3yZtNZWZlNagCUCYKPWiAMikQEAWTpmSGpDDDtCXaDjOCD7YcCvuBdjgUlh1m+NGWXiSTtEjRptkIBEkAJIBqQFSDQjWoqqzK/t6btznd7lYz5xxj+GHuWyDD+XYjzzmx9lp7rrXmP/7/+1sbDRGQR81UeyDA0GzHuJy3/eNNqGqP1UAeiMyS5MzkDKBplgBVzahZGuI8TUwoasE363Hf+JqyATFx1cyWw5iPz4+fPn0/uDbFdGvetYxTzkQshKqCCG3tSt9U3VTjdohP97gIKlNTzwTZgNA5H2i42qJoYDpyVTBLcVIRT4jsAiJmYVNSaZuqXS6erTfNbCk2okLwTNkqg5DS2dnHfDi6vnqKAE7t5upZzMkIZ0dLkbzbXqEOPjhS+N73vj5N8WR5C8SdHB0/fPDg9Px4cmncadN4WLq4i28/+I7laYhx+tDG7bpbzBazlfQ307CVMXVN+/Tx1Yuvn0Okm2tZeN5tx9a5/XZbUw1ACtpPkRC/98GzR+s+eozDjglxVHqeQHNMeZr6fiSm0tJKTIUIUNKkZoro1IyZ2RNnspic86BJxTT1p4vTB5eX53deePL0WtPU1B407W4uZycvoLMYe6rwtJ2Nm4lUgEgxV1WY4oTmgwsGvO8nyJqzaI5IwBXLkMEMJFZsy1kbc9qOE3hggEAsIoTIABHsand9ub2OU+w82JTTmNSmhOD3zgHKJHVogDAX038u1ABDMJWUJIWqQsHgwmazP5sfkWGLrvHtf/X3/zZ5BEcQYyAq0EoVI+8AYTI9ma1OZwufoki6Gfv9NDGxB6p8R+CTiarUVZVi75XYkSERIxCgqeaMWYdpurlev3H7HlECTZebdVV5DpzQwISANItznHJGmEo6nrhJJqaqOop5dOVvY44DU5OTGGQAgALpNKdJAlVggIoIHoHUxFABy49pcQoEF0qo3UTKAJfBqZJzYZr2CB4MgycFERMCGtOm9sdl2AaAIslAG+ecq3LOzjdqKpKAgCnElIgJkVAlTikwbscb8EYmYupdSHkCcD40kkbQDAeRjLJKcJVHExm5mOQ9myQEvLp80rWLBIqD5Diy887homlFzbGLmvZj39a1J5ZEqiyakw5VMwcUJtjs17Oh/tjHXvnq1/+wrAJ2znlGhjEnVa2bipAIbb2ZxkkuUnqg2pm+9ZXxlYvLj7z86urZxc12YiCzZORlGtfb9dP332zAheDqee2Ir4AJ2Dfzx9xcPr3KCiG0rvWuWzYNdrOKRgxt086b5ayrmgYsu6qaLecpZ6qb0DYMyVSHFMdx8O18MlhfjSo672okUMzAjP1u/3TX1G1dV+TdbDlbP33vD3/rnz3+/vdeeO3FJHazjd5Xq6MlmcVx7R0f3b6TkfOQ8m4MScdnV/uH19OeaVHPjo7a41uLUJ/UTelI4lChczFP6LwjYjCdsneVTqYxg3oAyjEhKiQ0JZkEVIb9ntpg6KbeXj97dWEhgzqPYEiimJVAgmMwHHNyTOg4inqy1ap9/f6Llw8eeeUmeE5y++jol/7kL7z00R+a0kSjNl1FwoqsOiAHF7ykhEqgCinHfrA8MHrZp2EdxyRcteTrup4h1wktmkCytDefA0aoPe3lst9evrQ8/vDB7s3d449/5rTndTevDCma1M7/wk/8/LtP/t7ToU9iWcQ5NDBXOyhTWWfOc/AVegLHqpDHxOaaQKNTTYnMqsbjDLRiQNAekwBknrzkQGRIRpAorQfY8Lhnrmu/asLxXBDHp1duNokMOk6EbpqyQRXOTt3ximddRT5nmaCM18GmieM46xa4mJFBUIVxG692tNuHZJYyuZBRmdEz+JQ1SybWpvHzk4OryMQADQ1VLSkYqMek5sEI0EANUREFzIAMqTi2ygAckQo5l9AQpNh0LBuCKqCqoh0yM2gIVuT7Qnq1UmBuYASKCDmZZSt3S2YkAGJf5BFVJEhRt4jjercj/yw8nYWqaaqag2NQBnXEi3bV1su2ms/mR3XThaopLuiyo4WiEBzQJCaWlcATffF3Pv+Nr37p/O5pxf7RB09V85Ptpq7o7Gh+tDruuhpQ8pR9qHJORF5EQLThloljnhw71Slr8uokRzMJXBlathE5eyIAzJazZEIPaCkPmiODN8kemYhAwAzr5tS3q0i+dt6z84SEDMSlrwgPbVzkmNVA1AoTptC8iZhMi3GAERAdITrmIh1kUbHsnCNiZCbTUuRUngtiqRR5l41tFkWgsvnkEs9TICQkMkj3X7oLlt558gDPmpkzHbbtnBTAQ+3JO0AyN69nOeVeJmCsa6cxNeRzTJKT9z5O0tS8f/owQxvuvqpA7IjxQL23ojvggepTRI/nm/8yRjQo46zy38EsZqpZNKmMVtqvkEpX3sHUUXQnKBZpIHJFRvgjTerAJzIkBARkFklIWIwbdrA74Q+MaYUepKbIpR8NiIoCZ1QcbIhIVLrJoGiTyIYHSoyZgUmBCGlB1hQatFk2KbYjInpO/lFAKqW0Jf6FRHb4y+VYDlj3EqZT1QPMunxaAARidlkEoPRkgaqhJTgk/pCIBEysNH0DImTJjgMBC4qCWU6FkwP/6pl4fjAErGYmB6eSqjIRHM4DAFgSLcoEsjfLCkZY/NEZwNg7QHMhjEMkIM0Od9O/83OfPjsOWbVYe/gH9CYwTZGZi3MHwECVmIovEUrlhRWAkUFpr8fChCqCCJfOeiI0KN2vWt53i4KokskxGBY4VPlbZgZasnUMSAC52MjYu2mcXHBmqKqAJpJVS1GMQ0JTlUKUwtKBh4CgoCZabPKmSnTQ+0piVkwRHR7uVqyWDbTAlbKkckiqCiRmUqTHrELE5BANzExEmLkEVpJkECECETFTZl8MZYxOcioeuwOp6V+ViqbNhWdMKgrgQus4qiWEZHAzjNeeXMmA7geVOCnW0ViNADAFDKGehhGJchxNzWyO6LYjfvRjP/WzP/anLG4Idbk6ylHy7rqZ35qGYYrD6fK26Q1UGOqFJJpRuLl+HDr0HhUCWOp3gycaxh2oTcNOBWySdrnK6Oezdn31pA7IWtUz/42vfsfadj+m7bCvxcZhPHrp/I/dv/OVf/77cla98clPzhcv7C9vHr/5tReOj3a7WBGsn37YP7r+xhd/9VO//NridPmf/vX/4n/+H/30ya2X8TLuxmkc07KrXrxz/s6777qufnY9ppT/+v/xf/9/+Gv/WbM4zqPmhFUVPvMTP3ux63/nX/xTVI4Ws6qKVAh1FTbZNmLb7Fbz4+ub62LyGsftC+dnPLb99XYxX8Rkjx89CdWsmh1RBUOe0nYfSIZ+Or/1kQrs4mZ/+979n/mp9l/87heerXeIkJIumrbxFY6m2dd68pFX3vjo/Ttz7y6fXBy1S4w+jjlNWwxs3tkU58tFZpEo7DiEAAbjbpji/uzO+X67z/0WudrnIaps+6vzxSvK2wyaTVSVHagSMjwbHr5y65Vc+wEg+GOBSoh9Vfu6EyMEZPY5GzmP4JgUgCvXPLx6umxPgwtZp0NdJCGSw5zIzIEZRFECsK7tBgXHbhx7JA4eUo5f+LW/5dMv/ehP/iyx75Y/dfTKq7/z//3bY3997+X542ejpHHaXGPExero8mIIVS245nbZ5aN4I1m4O1s4b/unX+hOtq66o6Hro3KDySLG0XSqQ1hvewPxzzPJm150ZKoqcjjtJ1BirLK4PESEGBx3oXrr0WaYhiDQVu3XvvobH77z+bOz6pVz/IkfuXN6fPS733Zdu7wcHnWyffv7H2w24nd8khlypKxx3FceXM41kieQBGTA5CQB4AwRURnEDIHLexCiEhEzgR5uh2rIUOLEaEpAJZKqYGhMzBNomVMwGKiRs0yQwdSxI4CsbEqOi9LPxBYKnEMJ0SkwuANTkAIzOsd9nLhpBid2fj6/c+Ib753Pkm7yFJpQ+5D71IHe6hpbT2zIk6Yky9N2upLNxaNmdUZUO64EqAhACOrq1tgjTZIFiCCKrm9wurHdaLBZru5P7FJyaNSGFlU8qToASQXiWfkuVIsh7Wpl73226Ckbk4nmJEB5fjwnT1kwTjnU9upL97793gM0x+ARwDTlJOSbd97/4Oy0DZ6SCJFvqZYhloZiIxcJgJg5oOTKGDSTqxRyFCFCzdExk4JmSZoVCTQ2puyb7bBVQVZgcDi62/OPSpzaxqeIt1+Yv//0vXbZiGi/280a4hqOjk9u1jfX+41HrEKbBdKo17hf3r3jalxvb4Dk7GiFXXczXE1J0YXSINo0jWTo+z5KRMOmbrLE1clq2S2fff/ZcbXcXV6r2Wje1Q0SeAdNg5qQePbgu3txRFksT2E2czU7f3g3qtpZgklyDqGylHNSZjMBRCvjtjJEBlNFyjGhaN3MJKoDSFlM+Gq7E26mhJV3QMk5t1tvMRNmhIrn88V+PyZKrLbfrhdHt5UH0LEOVYzqNJBp7Qw99+Mgrt4Pg1e2lB0wkYxxrJm3260akPKinZsCmjoyIuu3uwfXz5AQDMRoNZ99uHvG5iXnq6fPbh+fhlCVtxrnnEhyjhEo5whieRzJuQoZgQTofHHSNXUF6OrwuXe/vkn71uE0RpUEgKFpsmYxk0zg3K2jk5OmojiY2XqatiLADg00iUoOlZ+STCZ5Gh0YE4lKlhzYExPGbJqJq6vLi6NmNvMEGkXFQF3bqlOx7NAzUMoTc41Ihuacj1NSHcCy40okAHo0lJQNDQQVDMA5RjBjZENGI081oVPMSAAw5Tyxo+K+z5oVgBBjjmZKhDlN7GaaBY1EcpZeQQ89GQC5vBNkYCTPHQAZmHdqIIxGyqqUNDI5EZk0et9oigbEJpCFghORih1AZHJKrDAiEmHjCPXQIutLpoMJENmxoSEpsetMclM7AWTyLbWWYZo0sRBbIy6m2HY1sAc0VNIRvHeQQNEQ8jAmx75yjSmpqgNLCI8vL+6dnf3YZz/ze1/7OgAExmk3Enrk0pKLDKoGjsE5y5D3CaPicLU/PplMaHd102+G5XyxPKrHfr9YwpP3L+qsXVtXjQ8sKhCqmUxRr7epn2aTJDPMMPV77qoBYJd0c91D026HTG1nFubzWeia1ofK19VyJd4RuMXRqjtZuVk1O7mlORPxYrl89uS6blx32vX7rWeeNXe0T3E3pe2Tt97/bdk/aKl641Ofevr0sVvUs7OjcLTyrto9W4/b69uvv87NvBepQuWmePHeB8++856OUC3n7Utn9emqrroaiDIAVlx3CqppTxIRWshiGUG8GJphvxua+dJA0RGqVq4eLq/yPpkPzgch4rCc1n3e+k+uXvj6/tFONAM7Ih/YTAFZ1HwVgHEQE8OXzm6/cHZru9k2zlXO9dfTR1555c/+e79ydu9kUgxUs7qcaJguuGp9PY8p6tijmUOSOGgUEowDJskSUYi1W4b2hDEnjGkaYcqK2TFWVsUxoUIfIzB6gztHt2T0Y//k6sl4C0xDz66VyfYmbj77kz/zk3/3N/4JVq1FUsp+HnzlwTlFoK5qGscchLyJwTBqPwZyeZoAVTErWdPOaFYLe9kIjtmJpF12rTZLN8SURx02pFeqvXpqsPJhMdNQgUE9m0FKUWyKSQF4PsNbS7hzzkdzEEvbCaYYFGuXtN/tLy9l7Lt2Do40iY1br4NBMtTsnHGrdZXyEIhmzBijxGkzDKldvnD/5fIsSCIEgAAEilpMEJDYgJEtG6JhcQ4djGEAygc/AVoxD6CaHfIWpUoNDrAVRQXQA1cVgcqmpBClxQoj6RATKjkONSgNS0wEYqBI6E3MUAFEJAqktNsg1wBMzpfOcjKr2NWhrauu65bzxXHXLmfzZVfPQlP7qiLnvOfCgbVDBxVCsK/+3u+9+/237t4+v7pe77c76yWmWFXu7NbqaNFWtTOJCFiFwES+qWKKAOCAO3+UzQzyrGqSDLXvpoRDjvNqAajAUfLkuZGUlQZCIHWqIjkZqPNOBUyh4qCKSZK6FpuTCZiInHfecXAOkRwHAFJAR47IYUELgTlnRCRZSqE7oBE5K1vjYuRAckwApmAhsCraITAlB4wyFsEAGMmIQMETJ0lqwASiwkjmFDxojoDcLhbTuA0Z795/cbcdH9xcvnY+b2cY05Am4RACwq6/qRo/q0LWDIr9NHjnvaNhvHLcztvVsN+N+aLt2vOT1eOLtyaamvs/aujQcfGimSkegL4AzwUJUH2OoQI10QJHBXuu/hU1BUxNsgoJsiIIoAIyYomV2eFLh4jkkLgYRgyAyJ4LSVAowmBmKkWp+YGKREBEJLlUrBkWslD5+QPuqMzXCq+HoRhtiBBJLYNpoUUXattBR9EMTMxcustM1UyZSE0RQKSYrcoOnwumqEC0ERGRoFCaAPhgmzNEfJ4kMiQuqT7JRSKR50nDw0oFRCo9G2oC2QA9O9WDZ4uATFXQ9CBwlDwgFg46IRbkOBxKUBDBkJ6js4nMVEWIuegfVCSJokkJqoqhHoScLL4+ULgd+wihnS+vv/XdH753rw11P5bzi6XPFdDAjNgRHE6CmZgqgANTx15NTI25MjURJWIrjMuSXCOfcwYwZldCfcXdyuSKZKQFayYCWNxqBd5UvjakksBQSRENyQFATslXtUkmLBdaDxFKtawZAJz3KsLsJWfVbERqRoXpCMRcgEd0YPlJcSEBEKKhgolMhkTsVXL5y+yQOOSUQEGzOF+ZCpgRHOrZ1MyxMzNHHtGlHImdqRBT6SWwAjNnKbgpRJb0/+cqqtkQckr7+dFy029VY8rPRCxDJt+wq8Es5+TRMts+pmkaqqryVb2PWSQpeVEJoSEBEm+jf/3ex3/mU5914zWSikI/Sdt0pycvAJoBnK5m43Az7DfL5WlTnT67fpbcsN7286pbT1NXz9ZZN/tpMWsM0NeL+WL57PJpRcFXzdKHPGwdG+aRJX3hi1+gihazNj26AUIH9iOf/eH23sn7X/nax9648+Ti5lf+R//bmquvfPnb2dcQYHZUo0zzk2PC9O2v/9q9z3zq7IWfA7C/8h/9tf/s//YfH6/uYbJ61iLhuw8eRuOLp70Z3jo7dZT+7j/8m3/+L/xPqrpVmJIlBvuFH//pePnsm1/9omRx7Ou6zSld31y9/sqnn37z929u+mFn5+fzy0dPfV0zu3XfI7BzrKZ1Pauaus95UJGBpfJM7fVmSrKtqsuuaW+dNP1ue143P/WJn/qbv/GrwcCVMcsUj45XaGdv3P5MV62349XuvccvvPByszrmwDklDh4g7/u+Dc2m3/raO8GYVFB85btZA30ah2G/2TYhJPBDHlzAk5OzIe3EUlaZxgQ5Vw1LTNOwq+fu059+7b/8u/9gfnw+O5oTUnAVcg3ogwvEQBQcKrLzPhAKCO6nzXc//OIn7v2Eq84PE498qBhAItVshKGaZ6fZgGTKtGdwzFXfD13lfA0i17/5j/9raNqPvPoJwlUj+Nmf/tO//c/+tvNcLZyMwceUcr/th6pq3vnw5qU3Tqd+37Rdg8fjGMes7HXRVrJ7VLc20PmF1tMIOaNzQOTEzFUBhmnYHFxF9z/10nvfebj58NJ7NGbE6v4Lp2PfP7655DbUTX19fXF3eXu2OuvXA3gjri83k68Chfx733hr3S/e2oB/Orw0v/f25369pZMpj7gfZg4W8/aDD5/UznVVRROv6gZUsSJvaoDJCxpKFhf8IU9bHuWEWSynzKWJw6DUJoJxuSmrAjOBGhkwkBlUhI6dmCKhDz4D5iigxQCOCBCcM4CctDQVawREICtV1cbkRLOauOBEFFRGFfV857MfgzaoKHrYr3sC7Lyvat96GieYbi4TmE62ftyDn1XLW7pHL5C8627dy+s9Vc3u6oaCC1UXszbN+ZAFYJIMztd1yzZcTcPF7dUtvXr7Yn1z6/U/FtFHyUwwTD0Akm8ApNxJGdRVluMAPEN1+xQhgSKbUtXM+tH6cZi3s+3lZjbvpnFYrWr3gMaoRZBXA1OqfBfd+NJLL33n+9/ipjITCDj1UztrppQIckC/HYfaL/I4tWF+vbma3zrNYICGXgVTysAQ0GEGaJsmDuM4xpCALaNq532Vqzt3Xt9uL1Gsq0Jo2s0wbvv9+ens4dNHJ2cv1pyGbCyIhkMaFXTeMpqAY3EVosvRPFS3z+5hhuv3L2IcZJK6cbPV7PJy23SzdJiH0N3zO08ePh7HSYLnegbeI1pVV4I59qMDdzw/X+8v+s3Wc/jg+iZSBEUmZK4QXco5x3R4JFRh2idVU1Vk54B/MEUBlZLZVrAyZSJfm6V+P+yHaGi+YldZynBc397v17XzUcb9blCDetFmBgJEKy/nLCF59Pupv5mGZRdUppRHQkmSnQ8EBDaaZTGNQ2KDaHHQoe9H5xkRAjGa06wE5ggJbUq9abp9fksJnrx/sdlNIPlkvgqe+2yb7dqzRwQAiTFzCIAKhgqQ8gSGXLcpT+M4Qs53b59i76iPq/PTaydvPni7WjhUsSyMlAGHnCxnNVDVZd2cLRbS7wVsL+liu4HgyJE3wphBDTSrJY9sOYfgyQCJLSURQTMG48rtpjSN0/3jY1ZJ4zTl7Bx2XUigIsY5CyAzmaEhKmomM8cgziElJaTGDIghp9E5QvMpCTrvmSRPxUmeJTnnBScjEVNiIB/UhBkdVSlFMHMctFAzTRgCgAcEtQSGjoOYUplDmokoO3C+Spp9aKc4gInkiV3H6ABFQY0U2RkkIgVgosDok2T2wSCzQ8uWbG+YRLL3TkxzngCUELJEUGN2wCwSATKzM0NT9uxjFiIk59IYRcQBqamvOfiw6/vgvWbJSbPCfD6btZzTOPZ757hpqgwavE+ikHLMCT0FDPtduqD1nRfvllUQcwqV3+3GmA2Up6TJRAXUiBHRExIGN1+ubt/56GcjuNOzs7uvLHK/A+xV87DdowVnTMJVNau7ehxGIHKNA+eJOed1pUKWG44zVgcck1Y+TjmGAEqpHzL0dTc/O647lk3/9CEh516vjT6McRonQhaJi6MlMV5frKkKJ/dvC8Dq1vm8Wzx7771486SCbZ42L3zy5X7ZDTKqRqO5WyyPz0/zen013pzePZufrMYxJkCQPDy56G92OfOAGu4c+fOjer5snQ+ixA7dnN1cbPKucaQp74iBg9tt+wzk6lDX3nBCR059vOm3u6eaJvTVOPQUSLHDJN/5Z791+c4f/tTd+6/q+devnj4a9+8N6yHlyMAqIOAcVS4sTxfnJ6fnvtbthm7ijGrb5fsvfuQX/9yfXZ6szHLlcd7M4qBixuoInWGFVIvuSNOUoomBEmIwUIFsDnxdR/W7aQd5IkyOKoJAwChxGnemSK4C5NB0TVNtd3G5WoWmubz8vqGczxLqztdzJac8vfLa7X8Xfv5v/No/7bolLVejs3bWcB3EGc86zRKzkGfb9S5FuF4rdqqmIq6hdlFTwCRCyZGY9Xl9vUlx6FZNs/LoicxbD6DsvKuOOlhVvGr3UbOaGnpmH9g6Z87XZ6d064iWS8iptpz6nY0TjwP0N3Ho07ZvAvMw5N77qkEBzQYME0X0oannk06MQFkgC+Ro43bZLZrzFx6/+/7zrXDZICuZEqGBKioAEjJgwY0IGjCYKooBoDNyZTdDpohY9peKxUAEGZ93kVkh6+rhQQIHpYiAtGzDEK1gaQ1KwsYAuRhHEKDUnqOaaQYwg6yQUxJNIiOSMwE1Lj3mHhHhynnng6sqX1f1bL6cdaumW4Sq5ary3ntfVaEmYOeIAG7Wz56+/+7R0Vwj7De9ZE0g82V9fr46WS0kJS3XwnlDFsOcc1Jr6uA9T1mQfe28aAZTAzW0RTtjA7UkaQ+Gk+acx8ozAtQ+bPsNeDJBdE41IaGYAqKvWvanEBrHVnnvmRx7A2Ny7JyVsiciBcMyGjg4zY2cQ0RGJiIRUTF2wM5jgYWaaREhVFWknG9ipkMvHAkoABb4Y7ZkYIjoyRsqe09mChkdB6pU1ACcq2TShvwrr7zy9lvpWw8vP3638oq+qibZO2LnHaMfcoopdr7t/GKcRk8eSQEiuXq5POEREHS3HxuSzbM3rT2qz17MVhEiomIJURXd42AiKv4ZKJpkeWyVj3VQcYozTQ9pJzEhU4LiXJHC6HmeNiImQnRErhCLEfAHUtHBsIMIpd4BSA+18QSm/0pbXElPmhHknAioSCRFvSIsRCrD51Dj4oShIgAhIxV/0CGlCQejUKmBIzMpsGot7VeHujQs/yQ6KDUiucg1JWgGRKr5ABPSQwPJ4fwgAhUaNx66u0zVlBwjsVpRnkAlAaKYGIDmxEyF/k2FnQzwvDaudBA8VyU0E5KoETMhiOZylVSVEOl5TSEyAWCRlDRnRGBiM1NTx4zoAM0RBocxTeywj4M6ubXyfTIFYkQrl5uIELNkxvKqAI4AgOxgPMOcY3GhqVo5BwbiyGXNxTYkOZb0oYJQ+VBEACg/qEVEAEItNiVCAyAmlQxlRIYIoKXhLufMzICQcwy+kpxV1cAcMTn6wTfyYH1DKYdppYIXWUUUlKAcrSIaEJqWPASZZjNgZtEMCMRe7eAtUM0lPQam5LyBISMIZtXi50JiRk4axQQsIToDMzyg20SViUE1y+HBqZYI/6gH8CAVAcSYpyS7fhdVaddfRxvq5sxUszk0Nom+PrOMC87js2umLGnX99eGJjqN+wGZDV1lHLN+/BOf/dN//H8Qdw/M42K+UEAmp2knoApVN5vlPJhZSgJZL/uHy9OmIggjZYnjmKe0N8Ll6jiOGyNyyLthqEJnY95tk5NhuL6cH9Wntxa/9YXP7WXa9hqkl2EShF/4xV/81ne+/Z13f3f/4JJuLz/24kvT9km48/JHPvuj/+Ebf+W3/sHfcMir4/OHH77Jvga6+fbv/tftn7y9aF75zE/+Wz/5L9/+9lv/8PYrpxePYpqm+ew0jHEY1r7zvoZhct9/8P5/8zf+87/0H/7VcZ/GffLVbHV0/PpHXv/GH3yRfWu7fi+SJNk4PXjv6z/xQy995913H1yOI9JqcYs4pe2u3+2FyTdhGC/Nru7fr7cDvPdkcomvbvpFp6vjuWjc7R8rzFq/iPuUE7760Tc+c/2xL3/56xV6VDtdLu+e3PvJT//ikwfv32xvXn/5pev99c365uTWUd263SYjIJOOU9wPMMTx7PQusuS89xUnySnHHMdZN1utFrWjR0+eTZBn2rUYrvr3fTh25CbMyCSqCYB97UL1L770uaPj8yheCbgJvp4RBCRXRE8i55w3BCAXxcC8a44+86lfbsGR96rZgICAD3ljQPKaIyAhiQ9VGrXpVnG39oFD1ZoMahY6jtv0j//uf/kLv/hLf+ynf1amcPbKa7/yF//Xj9759r/84tevto+oXpwvT66uLsUr9vnq4ZNl62UYq0XLzu/HSaZY17XLE+yeVEFuH336UhqPE0JvSEkmtGzjIBafr4H49HITKn7x3tG7732ontfT1XA9to6bNhyt2kVzbtlv9/0Yx1XbbC56Rnt6sa5n+N3hUc7rH33th37iJ3+Ynj7bv7WYL+78+r/84taAqvDs+iqqUJZ+m478jF07DVtidC6UbgEDdgEFShMBOO9EVCUzk/cVIsaUSng+qxGgAjCBZEUGNEUiVBBRA1PJhABE2VAAfFVZTsRUsY8xE4H3VcKJ2YNYFHHMaJYko6pChsLmQATGAfOt11/R4+Woyrse9tujk6W3CQAp0+bdx7s0MrhxmEIbqG58E1xVJ9T67r1nj99F6lx328cLSSRDcs4m2Qr0CAQyec+WM5oxCZnBkGy8ePbwSeTrsxfut/Ozm6stI5sgI9ShZeQYs2YaRTx1LeeG6z4PFmPwTfSUJ3DOC46u9qFr05N11dTjlIKzW6eL3cMdIKUUc85MPE2ZKvfmw8ef+OEf+fyXvxyIuvPOLzDJ5DpnxMO4q2qPLFVgVeYUnAGQoikxEeAkCTiQd107G+I4pb6dHU0WnffW1hfX1z9652cbbtK0bpvZOG6aZfvowROG+nJtXffy3duvPnr3i622m8t+Pwx1s7i4uZIZmsmwXc/cK2984qPf/fZXk8LFzeUnXnxpiFezeeeP+Hp9mXdryKLTBKQYbpH3jy+vJwQxZjXXctU1mi14iEhJsxpOlhTVEKGmb7z3tmBQB1xVouz8gXpaVsE09MH7nJLP5uuCxhG00rRKpf6Uy0AMDA0Y0fLgnY8KbcW+rZ9dXMdBxt00Wy4UaAIAAku9QRVCN2z33kGKE5I7WcyebG/qELJCXc81ewBa1m6KCTEvm5YdXDBf7nY+OHaQ02QGKSbvg0bNcUDs2Ies9HSzNssOaFU3T64vb58uq2r24PH7bV1v9z1RXUuFEwOJsQvVPGmUbHkaCB1kFVUDSDn5ZtaF+bj3H3/hIzFKnDe/8aXfXJ6vdrvBLHliUI05GrkoKVTtvF6creY2Td7x1dDvpx4JTBJymHKuAapQZZDGVyqULKtiyhFBqhDQDFD7LJb13Xc/fHl5ctJWU78BBVMOjqFwTIhLtEPRCFWzGJoSqSEIMnvTUSyBKQJxMMOYMQKiAAE6gGyasqmr5gnMIzA5UGFwCprMQBkUEJ1HkJTQRjMQSEx+zLvgvMdW8mCGjl22bJZA0TIQqFgyAKLKOyQgSD1YNaaBCZg9WSYCgYkNLUewkA9FtgSIY+7rUA/TnjmwoRiVFg8ARbLAXrOV0lhCyJKQCYCTioOA7NFMcgp1pd6SYeUrdqGqKsta5HRTRINh2LLnpuW0NzFJJkZ+tCwS0agJobzf1z5M/Xjx+LKsAmaeUtrvB4nZASSZkDFGqV0gYmKasjo/u3v28nJxcvf+naOZBxB21bTdd6G2IIsFO3Wr1TzMOnTm6xa8B8N+2zvPi5ryNKbrNRGcrOr9Zppk39Y2a6psttvvLPaL+fFqZgF30k+rwLtdj5IDuTnKmCfL4tia9bPtpr+FjDtMT76H3q2J9wwG4MmqZVV5nC6uKMb6fHXy8ovQHkno8iZdPXjc+nZxcpoZQ1dBP67ffSc+fSrj+Ozipjm9NTs/qVerxoU2VDViW63ipGm4zpp8XfsqoIjp2Gc2B1ZVkVmmMQ37ytfDoDhEEmQXEkASreuVDvKtL/6Lm+99c+YAE9/FcHtxP581T2C3xvxMhjGNKUM3mzkOyFlF14+e1X3/mgsLrpPXj73x8Rdevj/lPg09oK2HrYkikfdO+61NouoDUZ7EUjLErDBBzgnRvAtumHpTyRNWSN6xZBMdk0bL6NzcNSFJ732FFB0DYUcV13Q6PdHHl9+jZrx1O3ATC6cUwL32xmt/Ztz85h98a8hMhi45X9UWWDAwG2BKY58u1umql2uR2jB4C6QVWV0nRcwGmPaX+3Q9jNvBNGfhKTuagVGektWdq5tQrWpdzoScl2SaxzHFzT710VWVOz2hO7eBHSTFccr7re22ElNCSKgpK6qkPofFwvkqj2Ot4Jr55KuAoIlVdhwHiJPLZmJpzCTUndzeXd98+OCdwyZZ7eDpKHhhNRN4Di1SRSyJIDyYGdBK/w6WPcvB+GwopoaHgqmyCdbnniEGK1VQxVV02LjogXsCYAhIhR2MhqaYTRCYEKjEbIxKXA3sADYmM0sKWuITZRNW4htpGAtOibx/4th5X4W6cVXd1FUV5l23WB2dzI4X68uL733tzYq77f4Gsg77PSCenM1eevEUzUwzs+PgDC2gZwrr/ZbIatc612RJSOyCB0BTmUZx7IMLKBlgr2mouTbDUUfPtaeqj3uJ0VNAbqIM0xQJTYGQKYkxdy5UguCQHTkm8AeQOQMJkTMEBWHisvFmAFNTBWYAk4OT34rthrImOvwUEvPBQcWASEWbQyQojjHNYlk0IlLwAbSQHZ1qAjIwzVogVojIhcPctK1MgyG+8JHXvvHN7Ydbe2m1QpsMRueBqZqGiKSL2fFuv64omKrR1LVNjJpSVDQkj6JM2i2xy/TkwTe4adzyrmAhEVMxF8EhlQOHgFWpgxCx5/S7osEc8lVFJJIsmp2JSAR8roIgIDokRkImT0RE/nAasCCuD0DfYlkhRATOkpERgYpNg7CoDwfOcKmmMhEuxjrVAjmCg8up5PgPkbGD/eV5bMsOaczn4ldpWjt0lhMil0gUMROSaC5rTlQADmQr1WK8KqvMwMBUymVWE0QkdCZysLcgOHIqIjkjYin6QCRQU81ITkWICYHNikBhjopcYAXcfuDVkzMzBENAVVNTPHxUY2TVLEVDMzSAUlRfOEpWOjLoIIoxM1gm4lJkUf6vAjh2eZoYjdgZ0XqIr3cNoRow2CE0qlm0yDmmB/np+Tck50REqurYA6BKZg4qCqBGwOjgAPi2ItWBKTAzOzUg9JKlINdKiK+skZwPuOhDQ73KQQzPEdlTWYyA3rk0TUxcChbNQCQ6dkCsWUyRXTAVRHBEuSi2bIAHZxmWq09QKEtEjrn0zSkxq4JpVoFDNx4IAEjOzC5LqT0xBEWwUpcJQGqaJVkJmZoxsWhSzZCBmZEAgUWTITE701wsc8+VoudSkYpUTZjEiVLOOUtWsHJaq7AahrUj77kSGWLKx6v50N+MmgEGQBJJBEpZCQ2QX7732i/89M+N+w8l9UxVUh32Ux3g5Gw1btZ13ZjZuNsaQ910OSUGJEeX2y0Hbzm3oRJAABqnqZyEYdxT9kgkKRpOR7OmcyuE/PbDD7/1+Ons+AinjCl6z7/wUz/25je+/v6j9yfo75yfv/76q6+88pFmddYcn96bn87p1h9+7nPT9onut8O6F8juKL3/3V9vm/a1j/3y2f2f/Ut/6X/1j/7+6ee/9DdXrk4OBadn62sfwvnxSR7GtM9Yd+998M7vff43P/kjf8yHeop5v9+dnpzMZqvr/XUUFckEejRfXj29QCerivxR9+yifwr7O6cVTMkITKVdNAKuH7btrCLg83mTBwmzSkA2AwLKxfXj1XJ2+9Y0m8/TwGn3/s/+8MvvfOXbT/v40r3zT5y9fv/s9pMP35zy/oXj+SKE737/6c/8xM+C5b4fpv0g44QVro5ObXJp/czVIJrny6UVNZi4qmoi3m1v1toDqjOvivtp14SFKuRykwLNU0o5GWA/yv5hnM3aEEJGJq4NgyGHUAExADAHcs7AAJi9A80Ixuw8YcxZDcAUwQhRNBePIhIXcnvXLSdyu6xVN99vbwJ7MU3DkFMyU/bD5/7F37m6+vDHfuQnG19z5T75mc+e3n7ty7/9hfe//wdT3s+7eeUgyjRu1yzdYrlUokkSVU0GP+wH3Q1V6PP0/UU3XeunhasslpK0LVOkum0gHR4JX/7cN2uqlfTNNx9WlR8xa5808tDj8ax++tbTV+7fS2l6ctM3zNcXfVU12aYX7t+5XN/cfenek3eunr3/9S/1b83duV984u/8sy+tQxXzxJhVFAOnlCVbh7QbjQymJCkBgZYZRwGelTemfoyFZRfzoWMAEY2gYPVKIt0MjJ5P4UwJgRwhgogxEjOnnBWQUNjUBKJEBEPmmEZ2TKgESM6BKZgF57Ew6RDIU8ySiF7+73xqShld5t21DcNi2W4fPaqrsN3uZrNlAO1japetclger6akt+/f3a7HzdVgvnZde3R2fzv2SqxmEvuOVqkffFjGPKiOAspt6MfYOBg2o0roRwCqLI2Pv/vl9iOfrWYn1F+3wWPOQQE9Jul9ICTnW4DrzbhOjWNSoDg6GAh13G5JkvQ7h4iE+2Fom2ocUucJU0IXwKDyjkAk7jzh5aObD5x75e7ZltMYo4B4MxPL474ytiQqqsiha4b1Mx6yqngiiQIOLHMWRZVdvKbATTvLOTZVkCSp1665l5QuhwuiuLuKyDhs+qarBSZHnqD67gffuXt86rch9X0bpsGmbl4zmlluG5iuvv+9rz/TKdW1v/fSy+ubp5MM2+ubWbdwTmrm7nR5fX1z+/49wdkHV49WVe2CYwAxeet771xv1reOFpttf/LCix88e6t17unVBYG0bfjDt99BVyFSqAgdS5YUhwb1+cMVxv3QNbN2Ns9pEjUVIdQsmckBAPHzyRFQVgNyfd9nSeQrYD+Jbda7MaXahVm3VIXddo8Os4qrZ1i1SXI9q0kSIw8p9rYjFe9D1TT9dufRo+aYh6ziicmYxB3VdYw6pkSE2YhCAM1ZTVRd66KmGLOCiSWP7BX1Kh7ZDInSmI/rlYkGDpXzvKhRcZhS1MlwN8Zt62susB41NSVPdTcLrt1eTPc/cu/W2d03H77z+Td/f3G2moaeEdEoZctTJu8S0ASwmrVHbUNo/ThktJvNlhw7ZEZicpmiKRhS1MljBSpZBuc6xygimhNFwYbqgM+uNsez+fmiibmfYnTognN12yaEDIaSFdG5AAZZonddymbmCMlMc5oQzTsWMwJIWYkguCZJZGQyNCPHniBkBSRgcirG5Ak8UjZAY1TVrDm4AIoAjhlT3nqkAMRZ2dGUE2ESyYBcNnCOkRlzFhDP1iZNWSODgZjHCiwBCEAWzQZARMAUo3l2yJZyjyyOvCoG1xTrehI1U0Yr0YDynq+gqEBAjkLZLzofENBEfPAISsyAIbTt7btnQ5zUxJHuNrv9Htq2C1WVNVtkhKoOdUx5mibBaTlfoHgVMHOGmlQJJCXV9XVZBVlzLlNWBE05VC6bBWYfOCU1oTuL0zc+8kMfuf3y/fPjs7POke7XGxeAZq1sVPvchWa+XK5WS3E+5ZHJKDS7zd5XM6w0x0kzYMjBhXGicZya1S239G7Zxf0gz67ccr48P+VbK4JMsypu+14SqApZRSocHWOKky9cSxE0ZDUy887MEhMyYtrE5nTRzjpazsJyJe2Ra4+O5ovp2YdpmE5futsdH/UEcT/IxSU+u4hPtzcXl4vj26tXXvRNy5MFJhRQpP24BzMklZRg0jxtfdDAjhG5nu1HqZzrIIz9CBtNW3XGiLrd3JgLppXs0ttf+ML1m9+8fTa3OKWUIBsD6DQx5tsOXiGvyMmBDNJP62ncg8GLQhU3x209TWM0+8Y///UNpM/+936hq33pWc85k++QarMp9z1opd6RkoFTBoGYkVIVGNh5rkhpsNC0IEiswJCm5Dm40JZaQtCcZKhaV6+cjE6up7i/mbchpLubyw/bqm+6WgFJWSqMwT72w6+8/d6jb71/4UKDBkmsni0NqXJIaQKZYr9PQ1K1FCOChVnlulrJ5X0PMU3DNO4jFe3YNI2jBaoXc6wdV+jq0CznykEMZIzUD7De6rPLfLM3A7WGq1ZFvUkIaFkhmQIrGVWgIEVQNmTQpOuLCsFnNUR0QN5Nsa80Yxy9Jo+oSTQpdXObL/rrhzbtDoIpgoCV2MshRlYwvCoGVCwYYGVjRs+tDCWGowSgRV8q5iTDg+wEaIeqbzSBQzAKtFRuFUmKDsijw0YSixmVD3AXLA4G01LcrqBqBwajqIFyFgNFRDAxxAPzRQUAHQogQkxCpAjJ+YiwnjdVFXb+tuZATzaXq9nx0dHdq6sbIrxZ39RNOD8/OT6ZAZBIDpXPhoyF1OfXNzfE3LUdACRNIqlqajOdxgEkO3aELsfJkzl2yAGMxTIiGeAwpgPOiZ1ZBhMCDFSpKpoD8hw69MzkgnPeOWJAdkQMhM45I3qe4HGEBgBMqGiIBIhZUrFruUIRJTRSFa1cFfNYHCqOffEJ55wKxqjYyICMkRENgZicoYKqajQTNERAR4GKPIIuxsE5h8jMGHe7VVu/8drrb3376xU0Z7Ma4xQl1g3v99um6fajhKqyHC1rAuCowTWaRkfcVLOrq0sjSwqM3OG0fvdry1drXZyJAYPiQbgBAzq40cprMMABia7wA6T1AWhUsF2lMEyEnAEoIpQmOCQuoTMmX0xAZVdfpCn8gQEHwMxElZ4LVGpWyD/EDvEQolLNCu55PrvoQkVGK7geLVqo6kGaEVFALCWABxASgoE9rx0kfC78qCZARYISR1JTNSkKbZm7qCoQShZ2DhG1AL0PeKLyi/IczFQ2X+UcZUMlpue2PlE0x05URRIh6WGh0mH5F0i4KvIBeFQOpiQhgKjIXkUXMxXRRMymWtjPpmpWcIaGiHrwiCkAMCOAlu8eGogps5Os5IkRUU2zOqScQbPpmKBhQBRRAGXHhVOFiGqZiYu5zAycIyBWM2LW0hbIpJjZkYomiQjEVALmBAjPIV9ajlbBiAmK7woOZx7BgEhNNVnhe5oBs0OwbEIISGwqpiogxF4ku+DABCAToZqaKnH5apVXHcumqggEOSXHDGiaE5Mv5w1VEA+6oZkaoKgAEZX8rioRSc7Mvlxh9l61VFAaMZJjLWQy4qxK5dVUU9ZECEQMiHaILCqU+hogJDoQ3PFfl4qyybAbEByDd4bz9mSIa0x7ByNLar2l7PbTDalxRXFMQDlNfZx6x07GzOiD4yHDqy9/5pd+/lfSfutAbh3fiimBYtt4R7Dd9fOm88E1wT1+Jtw2vqLG+c027Tf9brPjNgBhGtJstkK03bBdHC22/bprW0R3eXW9WizaBqTfYU6zk/DF33+zOz1mx3J1dev+8vzVl9/+1neePL0Stf/0//Qf/53/+3/z9Gp965X25M4ru300hZsePv2JP/+dL/0Xy/vNza7eDtk51NG9/fufP+lOm+Z2uNX9mT/3H9zsd8P1Nx4/ffjO060Cn7TNcRPEuT1OVaBpbH/tn/63d174yPmde8Wztzo+/aEf/vjvfumLwXkViEk3Y0zsLzb7WdDz00VOdn1109vy9vni8bMnJHZ9NRkCUH1zPXkgD3a0qJLRZiv73vZjXtYNIl58+O69ey9nafp9f3T3xT/3S7/8//wnv/Fv/vwvvzGfvfP4/Sb4e3fvnN8++Yf/+Dd/9rM/19WZmtCvN0fLZcqJKygNy7dvndysH61Wt8A5jRkR6qaOmh5++JCMCLHrjvr9qKKbceurAKZjjGoGgHGMU05+0SXAerYS55DZVY1rOkKPyMzeynSHCYEPN0kzRJYUyYrV1Tw7BQMRNAJ0CDlLBjDvmknEELjqugXEHdRVneNo2aqqHoa9ATQ12qRf/I2/9+F3v/6X/5f/SZ/Qgl+9dPcX/t0/8/bX7//Bb//Wsw/eqZZV2u9Pbh07oetHV2qrxbK52PfsgpnfbuR41XRdu33v92fNpjr/kZFnznulnF1Djcu6Lqvg7Ph4u90bu+PZTHbTejPeWjRQ+yb4m8vx5OzWww8uqKE7L9xqNe5u+nm3utyNTx5fDdP68Qc5T9tXT29/4vXjv/s3v/DWB/b25Rq9kyjTFAMzJiAm58jAtsPg1DBUgyQHwoSAZM91IGQsHlM1RcdoULyhyChmB6n3ANZTAEBHZR4Bhs95dIpJynQOU2Yw9k5VVXQ0BGKLExyGEWwiRSxXsUCkpoZoiGIcwcX9VX6yb2+tLMZhLbPlUU7W1HU3n3Psq/YkJ6nm1fLlN2LyoyXAy9rFPPQp5/nyaCQA54icyVi19c3Nw1DNzLGNws6Td5KYiWrSq6FfP5U85FC7zeOHs/OnEmYxDW3wKQ+SLhlqhkQas06hI4U0bfZ1s2izbvbbZbdIvuqn2M3rsZ+Wq+VFdUEAu/WAgbu2Do4TICOGqsp5ZBzvv/yxt975UDHcubt4/M63utPb+yHlSXwyyJF5Me1HcCwAKV91TRXqMIy4HwbX1ADKnjGjCSRJNaPGpJonc4HY4vTSR97ww42G2DZhv4m73W7hz8iha6p7t+4+ffvDBVmoFg8/eN9SrCo9XiyudrthPTSVUxXnQ3+9q7tGRrnOl3G/76rw8t3b7z+8Pl3dGzebXUoZxIbx8fayrSz1EbOww3EfaVLG2tXeJ7x4/KgD82KzeReHIU5p6rMnn6MwAqDmPDpHftbpc46jZlER7zBH0yxxnOquYWIkAiuONgTVEhP3IUgcK18rmCc0kTROrZtzXe9unrl6LugqxxUbYTtktTyEUE+5D67jnAuSq6Hc76/apsn7CQyjTZNkoAYMr/aPV4vj42bWU58kJoDlcrkbdkdNuFxvQ+CoEvsECsjU1O3cs2dnBNMQmbln/+z6UhH2w/Xt4yMK6isvhtMwBbcMvlIdFM2jz5q5DuMku2m4f//+nVdffQjDl599EGp3s9sFpjwO3gfL2QiUnaDNuvmirVCSSNSA19tewUzEe4eqlqNHQDZBSxYdekRrQgVmjp0DDYyZqkR4+ew6XeWPvfyit37IGdEFx7UnIlAteXU1AzMyFe/9JD2gkc3ISOFAuUQkZTZAy2iGohkgOa5Vk4EqZOY25ck5TjohVaaJAQiNUA3ESCoXVLVgahEpuLZ03JlBjDtAIw4Sk6A4Lm/SkCAZqMmY44UBeAcxTuwYxAH6bNGxz2lgcgIJAD0HBxCBDJ3nxkwlq3duyjtH7JmzKaEi0nOUY0EjUWkbMTXyLQKIqa/qwm4kR/v9ROjayvfjOPXRVPf7MQHmpHU1UgjzxdJMwEiBh2l05NMwoQIq9Hn0TZszZsuVZ832fHiWUrR+ipYhOELnWHJU203R8Ryl++jrP/bqrbsv3Lrz4r0XMGSwuGzaPIwRICftumU9X63unPjgBCmkKqZs6puapRYBYam5cuBDmJSGZGTdnRfCSSOs6PZ6PbaL7vjunRwCSI+Oxn6oZzWo+K7VYeiOOxbdPrkZxqjE7TwQQb8ZoyT25Ig8Y6gbAb+8e8cvlu78uL7zQqSqmS/Tdnj24YNmdlQvlptd77p6bvLswycwJTPIkfz5SXW85Cq0vmpDhUgpKlMEE3J1NZvnnCUPwWB3sRa39BRW82b35MPLh4+8Nh5nsI3jMICXRJbU1eC++Vu/oe+/c2fe6ZREpllVKVtOqpoDkE1JUjTQZJZFTpFVgyIRo2GM274SmAcmrB785hfcxe6nf/HnBoJmFphCHuKuH5mpCq1kSHFSMXA0mligqmlbrvJuGq+2ntCiGSbJgsSubppQG2rKmTGi5qqG+exIaM+BAWjeuSiCzNjMP3gvXz/dgoznd6uq1dHiKCTUfOLTP/T2g9/ZmQ79XnPPS+ec9Ou9yKjTMG6HHFHVdJy8IWqN2SjHdLOVftrvJ0NGsDQlAlUTm9XeVdzVVRWCr8Q7ZBeAdtebdHGzeXKZd4Ol6OvatxUhVJIgJYvEU8zZkJ2rPR614gl2A+z3teNMitONIBv6SWAiGNECSYAJefKghFVyUZqqnx/vnn4I62c/qH5iR6AixSxQWuRFXVZyygioglCIvABYKqGKeUjUkiGZZUNSMGMEMVPFEngoAhKyqRrQwZZhUiq77blhpLBcvUdiBDIqTF8kBRAzBS0gWC2dPsoohsZi9DyfVDZWB9Y+lTpGU4TDHhyBQICRNCE7sizO6dm9Vz77w//Gb3/uS+8++NW07duuvv/S7a715BDVlvNFTtp4X1VV1vz45lEX5lU1q5tmN2yJOXBIMo15ZGQg9nWV4shOEYAdq3BK4yTR+05FkuTKN8qJCFJOFZOByxkMGDFUs1N1Htkxk/PEjpmZnX9OXC4KRZHVBA0AVA6FJSgmgIjMBEBokkcCD4oFJIIAjJQ1x5wKWIeZfuBnISQDKQqCiipmMWVkIyUkQLBSIg5kIGbinFfLiAamoa5M0+3z8zS98Z3v/2H7+p3T+Uqnm22/DXX38Prpi+f3Kg7CE4A3dMPYaygIoiGJNfMqmYDkoR88uVnq5YOv84sfg8U9ECAmA1TV51JMycTn4t9REdGspiL5QGQudWZY6swQiZAdsmdmcgGJiEvijAgdAuEP2ELFIfQ8gKZmSIhApmIAIuo5ZB2peFosl7ASURE88HkgSMxAilcIEACYHSKLRDNIWQCMwdkPDElgQKgiZsDkDrkzUDMp9i8quLwC8UYqog+AEDkBVRViB2YliIRQGDpiZgRcAOdgWgJ3VqBL7BCoeAazJMBydQHNuMRNRQCQyCExWM45MREiiQjCgWUNqgqHZjfEA4lMJAEAETKSwB9hiwq1HgEJqHi+VAQAnPeiioYqyXFVKvqcc0DofDAx5xyzInPS3AQ+SBgICIxGRAiGqiplQwT/SjGcKhIRkpQ3N83EhEyohCZmYoTl4AEUEdXQRAGo3DGISDUBkqpJVkJSleCrrAkPv3iQ0ZEQtchMWthPz+9jpDkjmlpCYCQylbLMTMUMiNhA2REgmwEYAYLjcOiVLAIXYs5yEGkBVTKAIdOhlQ0J2AMSqGpWBWH2QIe7uCShg+7o1AQL0suASsefHY72sPyRiFkkgwk+B139a1IRBgcjeN9UzdKbXW+v2ANBUhOmADlPMhFYGgdBGKe8223X+02KyRmDYFJrZ8uz2Z1/47N/qkUZQERlm3NO6WR+dLSc77fb6/Uwu9dpGvpkMUai4BxrxevdRWgYmHKyKerZ6vzk+LzfPW5PbUyjryg01eZGfLPoFufO1t0cWMfd5mLdpwG01mEx785un3z+K99MEc5feeUzP/XTTx4/vn179cf/9J9Z3n5NY/SQppRn89lrP/HH/8l/+9c/cn5W1YvtOCahnHPW3cWTr9959VO77Wq2eu3P/sp/8H/+3/z3j1+obx834zykabjp991swaD7G5nEYF7/vb////qf/eW/SuzEOQshYWVKIbj1TS9qyua6qnJ8M2zj5QbYZp273lyN48XRUX11+czpEVUVGnPT5Bw3+0mrbjFfnlQA2wkrnoarfhy8pIcPHnfLl+qqsaF/7aXb/4t//y/ozebdRxcxj6vFykH+/Je/8if+xJ/oZiHtBtsMAWsDqqo2NH6/22OK6lK3vMW+GcdEaqyQIW83N2jatTNVyKIKSoS+6aZpIjYgcwaeXcx2dHKyJ2QXqqZSc8E36CpGj+QM2AyzqQ8ewIpFyABBjZnYuSKCskNTUcmOWC2DGSAX5zIQ1BSmaSAk51peOHQ07K63ce/Nq1HWxAOg2mw1//6jN3//rTdfufdxFpQYXag+8ukfPb5193O/+Y/6Dx8slxh3Yx91vZneXl+8+MJZ1VbTPnVUnd7+6Jg2i6au8dF48R3EPTWvdOc/dJPcPhZTW1NWQWir/ma/76cXz1486dKfePVoSP0ffuP9zbQLobp62ld1Q6qv3Luzfvwweb9eX3NVxTFOka6uplnXvf3h8OWvfePb70zrXQ4z75nXQw9AScyyNrUjwDEZMJIjrWDK5A0gJWYANCAUMDQFokN7pwkrAjAQGJkSmYCKsmM0EMglzhEPtlwRYjHgw8QHgBnNQBSyAKExoBYwLJV+RCLMEUr1gYHuMZscsNp375x4n6pF1ZwtctJqscroTl54cX9zfXW13ozJ1/OuWlx88G49WyVxgq4JrCietCZfze8lZUgDg7ckOUamoDFzMOdw0gTKpkiqMPbjzfvMmRiHcZova8lxe319dAui4zhNhOac76dRklW2j3HyDaloSvnoZGX9jqfIQcdx55AVArjW8qiiTV1NKkBc1bRYdhe7rDHnHOvKI9CDp08M5Pr6kuu2hkpu9m3rYiA1QFdxtdRp7xxntaQyXzVjnKhytW8nA2p5s+vnWJlZBhiLJcfYMiLryh/xOk7TDpEGTFHk9p07m80eHBzNVu+//T2J2UyeXJBbnZ7NVx8++MNp6EPwUGuOKUlcnszRXJqSY65njQ8L0JyV2u7o0dNdG3xX8aquyVV5/8wHf3p6+8MP3uu61TTFLBaa2ag55/Hy+mrZrZJYH3W7yw+ffpgcNpUzEfSUETwgEShi2f8DgIKJmJlL2ZChapridiYmzVlUiRCJRcTIoatcOxexFMWAQtMEizmOmuPJ0WwcxrprDXKaMKodL46vr/oUE4Uwxmmc9l1oHXOcoqaeXEi5VwMM5HwABARrq7lGi32e0th1rWXMQ24wOPCKPKRMhqjW+YBVbRnrtgFV54AFEADqwIQZMXEVnSsP8ZyyWT4/feHi6kNfV2NMhMScV4sVZPrW1978d/67//aj9Yf/76//3qxxlqLmGCeRrAIJFJz3o+TFcrZo6gCoWXY53Yx9VDU0T6Eu1V2gWTKoGpmn1tRyVs+IhMM4BSLw3sB9+OTpEXX3XzxjSdvt1swqDnXwwbkspirsQpkcppwARKXU+JLpmCXyYTxYQA5EpqXrFwwBBJRMM7HLJqhZdCCrAAHBlbkioBPtTQWASkFIlFgxi5oBI4ghkHOWlcwDknMuSmJgRE1pJKeqguSTTgpsyM41BqA6mmZjMiAC78hliwbIyDHugZwZGwSVngBVEhEpKKggoaFmkbITIiJGL2oiQqWrmDmlSUl9aCWNkiU0VNftmNJ6vSew45PF9dXa1Y0ZcuBh6L3hbrMTUfJV2y2laj2AZCGmwAjgETDmJGbsMTwfouVp9OIbDpOqIKQpMRsCkIXarz716id+6N6d2+e37r941wXIYkwck0myOAKSr1d+cX4SVrPyMm1kOYPkDEihYnU4jkPASsUk91jz7NVleOmOWpbYD9NNc3S8vHW3nXV9HNF830+qUDVz8uRmzegYyfKUYbX0gkYgbHEcM4aqZkTVKUYxN5u1t+82926Lq/3JiYSqqRpv+Wr9hKv6zisvh3kbTU3t6sGF9pYnXO+n7s7p4tU7Vrm6aQL5OEoVPLNzDji4yXA7ZYhiveQYg7XjmPO+363fh3FPCZLCOFwjCrJmA3RdyP7qu3/Al++eLD2aTqIOXBYxATHz6A5gWgQAbZgyJgNSSkoQYyLAxs2yZid2i6kFffZ7v/+dqn3xZ38mt+TqyrPGcUTgLCmmBNlyEgnOH62YXE2+v7ySUZg4CSiQI891Reyc80PcKyV01M28D55JgE3Fq6iKIlldoSkk49t3Pv7Ou990zjxvZuZt1mgdhiGuFsfzttnHaBW5mncXl3ZtmLKq5pgsgSsl6MRdGyintOnHmON+VANiloK7FQVGDr5bzNlXoZ6xo+C9iEnMMsT+4mZ48qy/uAExEAVyjnBKAjERgkRxpo6prluo3NA0UjtftRqCpok1BQJQQaRsslPIYKHysttUbKgax42o1Ue3dnU7rR/TEFeLFn6gRvjSDw0iKqVDQ9TQSoF6BgUroTMqrytmCpAMpGwGD0qO2YEvY6SqYOAJEZQQ1ICh4HIRDzmPwy84RsfsXamDRwCQYh4xVMCDcQRAFBAcAhGqAh98CkRQJmEliFa4MGVbjAYIKoCMOamvvOOKKGyvRtB9PcO/9/f//pd+958v5u3yrD27dVp5l5MEZFdX237qmpY5GORhWM/qppktU7aYEoGrXAWSskwk0rarGHPKGUGJDAAnyUmSdxQoqEJKGckV7kyWjGbMLosRsbmKQgfBI5F3ReUgYvd8R1f0IRJVx86RUxVGVCjGG8ym2ZSIqcB0QJ0LhVUMWDgsaESArmCsHTlUMVNAKm2nACgqTAVbc+iSf94ERVDYxJYPFVGFIqyK7LzzTLhbb++f3R1G+c6jDz52e1kr5ThWzp2fnFueetn7EAwMLCFBtlT5ahh2CMy+lpTr0FhUKLbBmyc97MNrDbiV2YEHbWIiBoV1Xa6timk2zYd7CR4awQCAmb0L3teh6pyvyVeOubiBiAnJASAgH4JgB740Fh7XYRUQqUrW7LiIFFRS0sWrA3hweYAVawwgkGk5W1S+eEwIxVyk6UAuPriSgIBUxMBUhZnLbv0HSbaSxoKDd8/UFOwgS8Ghlt5Mc3FWqSof+uO1IHEAkdipSfnKZE3lMADZiPRgv1Jih8BgKpJLdxyVvNXB6WSlJAsPqT0qsSkrZKvSe6qGRFqazqj8AAFAyqkIJgCKwOUwzCDrH3XSF3AWIhPCQclSzSLsy2I3SUkNTCDnFONYLIpS6uTNVE0tIxIRH0xLRGSWJTO4QiI3wkLbZHaqmk0QkNirZjNFdqAKSCICpkUvJOIsqVwzJEMiUAMyzy5rFIlEoUD9QdUM9eCG5JR65zwzJwFiVs0q4oJDc2ZY5EUxJcMyfAVEVSU6ANEBIOfs2RmYAR1yiCBYSEbllocAyGogkku5DhAZKDKpYQGki4qZFQaWmjh2ItlUkKCE3w64JDOi0ldAxCg5GZZbMBGymRT17Y+koilpCJVpFapjNJgkaG4V9kCeuHE0ptyb5Qz7m+vrmHWz3g9JkcM0xq5dsPN1c/pL/+ZfOJr5NPR1CG1XAVXgwjCMQ56a0NXzVVO3Ivnq4hka1qFTbq+2N6717NC5ymk9pp2DNqXdNPaoVrn6wcUl7vetNR34eLE23I557Fr84he+Ksi16L2TU0P/T3/vS5TrH//xn362fzZdfP2bF1fn916sTmbTdjOGq/npor/a66x6991/2R0dNc1ruvsOCV5cbLyzUOO3/uBry9PT2e1rF8LJ8b1//y/+1S989R9p6N98uAGuyFeVryFO6HxMyu3i+nr/4Xvv3rrzwrQbKHgSv70ZT5Zt6nS92QdwKrmq/SbbfspEmBGUeEwkGeomEDkBUoFhSoHhZDnfDuNkipMaNrfv3N2v0YZrs27fx8URMqPEYTc+aapznlXi+NMf+8xb73/zux98cDo7a4PXmIGs9rVn7qcdhzrlCY1CqFygiXQae8edQ6u8XlxeIuLR6hSRdvs+TX2UDOyBSAmIQQ3H/QAu+/mCFnOIQhQQvSPnuBJjM3JcOXKH6DnxYUpTQpblRqnZACRH/cGTJJuhEKGBGZQiQFGT4BwAZKTdlLFZeKAWKPU3VRUqcibRjELbVdv93/p//F/+8l/5ay+/8vGJTEC4qs4++srPzf/C5/8///j6+1+7/8rqnfcfXvVZm/ZmP71yNN/tr2PIHBYa3I1sWbuTs2rfX3EvTVVJ95HkuaIa6LAYKHDN8Mkf+dhP/MgPvfXPf30xzT788PrkrP2zv/xv/Vd/61fFuu1uqHz15NFTBgFU5/nsxXvrJ1fpygK5s9Xyi194+NY7KQkZekhxTJM3h46IydhAbRjzUd01TEFzeaQhuNA4z2wIyqSirMDeZTFg9MHDlIEoJ0UUdkTIJmYAKWXCQApAZo4QzMaJmaU8AbKJAlQeTJAJmLMZEMggoFAy2iBqouxcFmEwABBCY8dAFqdlVy3mHTa+m8/jpLuLa6oxwRg5sdd2tdqpNavTdrgKDTZdWF/2dTdPoR63H8qYuToKoduN22q20mlrDFGZzUBJU4IsxmYq3rWU+mFzE4ceJt5nePtqd3W9GZ989U/d+bijLJqCa2rfxXgZaudlcjBB0uOj46tEN2I1uyh5yjfb7aV3YbMdXV05sDo4h0IVAEGaJADmmJ0CM02ikEynfdcdabwySXdeOfvgwWXVzkBZUrIke1y72kE2zBIpTRaGKbmAAWjmfaaRGppDE/ve1YEqFIcWwaGbNtMbH/1xmx5YTkMUdTDE7TjVEw4Q53FPu6ub9ngxn1W762m/T1c5pCEsu+OMm4vdxbKb1wEs5ZSm4aafL+YSW6D5uLsax8FGXFWVb93ZSfvk6bPHF7vgutVyfj1eYUOzegk5e/ZvPnrwmfPbQL7ZdwzuyfZqKX63n4bGu8bvLtehCwIMAEQOzeI0TXrAWu/7YbU88V03Y5/GXiWxYxBVEFCgg/3tgK9QLSAjJbQ+3riw1JyDd4BaEfZ5YGSg0PnGGTUu3NjQD7ldnguMGFbRSDQGnjsNKMqMjQsJgSAM0140B+Ys2SBXwXnmOnv0vNttauduVct9vz2at+hkFmrylaqlKaUU27oyBCbeb3YpjVg5qHiIAlmDI5vEId9snypmTS72eyWomplEeve7b//4D/+cr/j3vvf11bze3GzJIhOrCBAqASCageMw91Wdgdk2KU85ScrM6EKFCeM4OTJ0Lkv2iIjMBXegAsQiEipvituk47S9f3R0t1qmfjPGHgGrOsgQkUiRkByRVzFCMEuH8EJJZZkZjOwqUANJDB7BZRmzCSKbkubRY0gCIbRJkmMutYZonGUAVCQHNsUshEzopzQGRwTOo6IJqIBKNoWy0UPWHDErIpXiXBRDI09NjDfEKEjOcU4JLLnQekc5kYBHc9kURBnZuypnZB/QNRZ7BGXvNQtRQCNANR2slJoQl+yKGQE5s4kJCzmSQTxqVMimWbNvvIqCWJxi0/ib9f566Le7aYxxSFmgDlylMVHwZqAiKUaqXFaxmNljVA11k7MQAIHmFON4eBbMfJ2msQEpd84M5n1TVd209R975Ud+9kc/sZo39z5yj3xVNg/TZheniBkgcxpxdbRob624CTKOGFMI9W6X0HGoPHowJkAzVmb/7PJmsThuXzqn1vtxaqzZjHJ8dmd++4wtVjIxNpPt29AS+e746HrYNIuVjXtmCFXjou2j1EctQB43I8ahNqNGkUL36gt8+7a13Xx5i5ddlmHWdOlqvX58effFl9rVarLIHNL1dvPgcnp88+D9D8ZxvHt6hGSOnUwiBGScIAMbes/sSC1UxFZtrq83bz+wMYEZmrWzylWBnc8yEiPVOMQxWj3tRB8+3H73aye1t2zZYu1rEVISYCARk6xZUKXsztRUQYFMDRKob4KmbIDO15ZHzNAgnBA//Ma3X/ypnzfElCAQgrg8EbksWPpnNICFfiejbPrBUScRMTBxaI8bARszJhHycbVyriMADSiiAiqWwCQF9FVbjzoE57O3yyvFOrz48qeutg8+uHx2vqR2YY3mpesSd7NqBfIkm0ISy2pmjsgiELiqqqZ9JCZkbWYkmgAsV7WDkFM2GzVHICEPPlTV8tgvVswNTpD3g9RZ0PbbfrzZjVfXEvssEQ3IsQYcQQMDonlTMSmhDiICdmoYMPgqkHrd7WV7nadMpAPlzOJDgzRBmoxwt08tuuy8Pz7mk5du3n6/0sm3bnU6/8EmWVQIyEARAY3MAIQUQZGNShQIQa3sTwxAS8AHzECwvBSqgYEhGaoaCgAiZAQwJUQERUBQodKzYYZA5Z3SETk0OtiLEAG41MIqGJAKHoq2i1gFxWB56F8oeBgRRUDFIlsd9oBIiGbeO+bAQF1Vd/Wya2fDpDE2v/fPv7JeXx8dLY6OOh+YHKBqVzez+Xwbh27uG8dZ4s3uat7Mu6Zbp8hVZSN1zdEU9zlJzrpojohdhMkhOPaIrJJjipLRcW2ql9vdanZSh3a3f0ZojqjPoyOf0AxcVS+prhGdY3LMTMTsEQlAAQ3pEApj9AggGgkZiRiUkQzQgAgcgKGZqBASEePhyUVJhYhUsoIiEBqkFFWz9x6JRBISUakfAlQTgoMrx8rJBlMzJhIRch7ARHK5CRyK351vuk7H4dXXXvvmt+K7zy5ePVvMlhXKRIxksN1s5lKN09R0XVdXUXrEqQoO1JkaCvbDzlkGAESdzRCGy/6tP+he+mHulklBi9QHelAAn6O0mEn1YB0q3zhFw2IIIiLfsG9CmKEPjvm5jaokGqkcOhhQIbwgIjD8EdZaEZH5OZDLNEsiIkQWzYwMyIeQpgGRKyZgKAMcKAxrtIOdJxdxEAHBTPJExM9xPwRAhHYolnFBcvpB52w5+8HVKU94+NZLsYAd/EQABnYQUOEgv9tzMhhgaf88SLqlu0w1M7MpgKmYEjlyvhSWIFJKqShcphmJiMmkaDpiYsCkORdtsTwBiycJwMprIWGhO9GBbVTEMCvnlkwVseTSixojZkLskFjKNS2ZKzHP3hAAQExICRGQGVDBFLQoocDsntu5CNE0JXSO2YEhIZfONTzoa0YHaVoQGZFNs1pGRDvgOBGBEKjgup+7foyRiSDnpKbEDBwOpY36nMFvSsRIjiioarasJe3FLKiEKAZQPuhzyFEJ0VrRAbMBZDMEUGZn+AP0Vem/g4Jlg0IxZw8GasLkAEyyFGg6+6okLc2EEI1I5AcAciTIUCzqmtF5IqeSiNgx55wBUYvhspikEIumz+z+NalIRNt57ap5cIzojh32u2GYqHI1k0+Sm7qbZBIFx4xGla+jJNPMTK1rYoQf+/HP3DtqEVO38Ntd6iRoAqp03T+bN2fkGBSy7C4eP85jns+OB91P437Ybps6dN3R/mYjALPORe3TOroImtLlsA5ICNOd42apcvn0A6p1VrdX6/31ILRsKCIhfu73v+KP23l7km8ir9//q//J//B3PvcHwvNvfv7Ra2f3ps37bfua9pfNpG9+7Vf/1J//i8sXP/70+1+63r63OjpZ7/ppGKGib3zldz7+6asQuubs5z7+Q5/95vfevdx/9ahrNGHb1MO4GeK+q1ZNqMdxoiS/9qv/7E//0r+9PDnpx+knfv7Hv/vmNzdPn5lSW/m6oiFiHwWZRFI/xrqrxLvNerq8Tl3bISWJo7N66jWjuMYog/d1zFvAcdje5EwGjffStLWmcbfdd7P29P79YQy7m+045X/w67/6+PLRp9740dc//kpd0eZ629T1lGSMI7I557u6uh425HmYphAqRAPJUxx2252qxSw4TSamObvgEUNUy5KncVQHqkDsuZ1JFXaJOFRIHhmZA5JzrqrrlQFmTZ6d5MggyCiiRAEOtzHQ4hX2VWGGAZFJAZ6Rmnn2aCAmIhkNgcAxN1XIgli3aeqhagRHAhrHqGppl5p6XkH+G//5//Xf+5/+726fnqIz0eQczVfLz/yJP/mdSp69/4cV4p1Vmypmy9dXlw4sTztejARW+5VJUBkYGXC6eO/zdtI3x69qRv+c7f6pF89+5Wc+882vfPu9t978c//jX3n6/h/u6v7qev3Bd77X0uqD9bBs2ru3T7rZjpTrineje/roURohJV0PevW9Z5f7RJWrswHBsFcCCxWDoXc+q7Rts0v9pz76mVfvzD546xvtrBoFHj3anJ3MQ1XFGPuYfKBgzodwdb1Nls/u3oqbvYLFSarKHa9mz55cEFEVwpSyC+TBj3HqY6wct8dLURAFd+D2oQXSHE3NVWG93WfRqqPQ1OQw5+iQs6qompn3NExZgo+iOEmD9cm8rhkU6t1Avmo57HPKm+trFOtqjzLqBER2dH6+vr5ywPOm2a+3FGrftiVxHcc+xqlaOdIcunnUQk1cpCnHtA7OJDnnFgwugjbHJ/3lzYPr6eH3r7PkZhG4v3Y1ueABaTNce0crT1dvvZN0HOK0fOENd/ulx5e7vI3tYhHH7fJodfnkAnGGmof91nuaprEOTgklCrIyIrMjdJWHy+0NYUs0dI7SoHHKFjCWngDiFKfCgFYQF7BmPwt13Qbv0In1mz5k8QCKY9OEnDOaec+mRsT3bn9KYr/dbE/mqwkGX7na2oQxy3jv9ifHtHHdbEhpuoq6S1U12w5rV7dxEiA7Pr6VU8wKpMZEoakA5OrZxePL3d3l0f27LzztrxZN0yyr9bOrloPzOKp+8PDB6s4xAm72lw6kT+Pd8yOS3PjqxrZqcjybpWl6fH21/OT5vY/ee+fbDz9893ttWEACFDAmVYvxAHdXtSTiuwV3Oj1NTkwJOXgzy2DMBKJACArsHWRU0wIwmnXHKmZq5Hk7bDwtQBUNEHFIsW5Wu/E6NF2imGWfMWE128WYTY89pSmhgeho5tGQQR2S43ZWdZMOfZrYNTlrjpE4zNoZGnvR826pY6yCV9HN5ooLPofperNHseDIFOZNK4TzpgXRvaXZrAHaX15cL85OyEhFQhWc46PZrafPLk4XL7zxyY//nd/+tY0O/ZCncfKMqgIKzjtgjlnaKjTBt57TGPdRt32PgKTAxAG9QUKAmBVRCal2PoqO0+CIg/MA2ng/xSm4ikmDhDdunV4/uRh2m3HMTVPnKc7buvi2HPkpRiATEwRgIjEBU8YQ8+ScNy2z0KyaS2ULoQfknIW5QiJJmai2OIFBhqnydQLxvs2SCVBMA9cqapYDB9UCgzBDRSRyXYx7BGVmK/RZyarROTIofiMQzdmiM4/oEAOSmCKoRYlIbCZZjNGpZGYWkVIY58oUL4+AaChiB8O8gfMcFKLo5NDbAUudATJxMBNkBgrAvYOgWQGYGAlttmgT08V2t13vNusdOQ+AjmAYhpt+Olq0mhw7JqB+t27rVdvVg5l3bhjHod+V7ELlnarz4fAwaKp6Fpq55vUuZtN+AI9zkvmf+qk/9pkf+eRy5dt25mqfknjn0zTkpMHXMkRIcbZYHt09C7OQpkxqwfzNzajZta3zlU8azZTUHFdguV0uV/fOcFHlOFVom/W2qmdHt27RvJZ9DI0b1htVbeZt3XaZbRHmna+2j6XfpbZpmrZiI/Xo/n9c/VmwbVlWngmObs7V7OY099zW/XoT3gTRERBAiE4gEEkAJYQkUiiVUlUqSzJVNk9pVmb1Um9lVvWSli9lVWVlaaY0S5lSKFEvISQkQEFPNEBEEETj4b377U+z27XWnHOMUQ9zX4es6w9+7dq956yz91xrz/mP///+gED7sG/KxTa0zfEzz+Rrp82NG8fLhRZw1UU/bwDffved07Pbz37oxfWwUSTMZdis21m8GsdxSv3J0eLG9XbRuJsgCGGMLXTByBFgvb6AMft+o6vsDwdYj6AQmCGrCNpQTAyjZUgoRE1owml3Vd75+jeOuQNTAGRANTMvCEDAIujuBU2YzQ2J3EyAK26EAECNAB3AXMlrvMm6jna7y3c//+uv/tiPTPvcLqXrZnlvWfM+TVHqXGrab5JldyNoQmhaJE8Q1sLt6ayPkbO2OgZaY1DTAsWwABqCBzRBINcSJKArsDYBIVvuynFzY9hTHnfRmuNi99/b/Ztf/o/vrh/bjADZrZRigNzGFhhKmVg4NBElSNc2neiUA/Lo0QoGzcP791TdVZs2tsslHx8pB84Fy4TTqAMl9mk/ekmWBtMJ0QgFGTgwuNswWi5FjZwYsVhB9MBEDuxkiq7go+WhIKljdkaXGsLcUkmmhNBNSNOip5ObUvKCUWrIiJ6CKbT2/zw9m4G7elVeFDJKdQJRTb+D2QdZmvqfGViVkRzQAdTgg/Ym93oqcQdyrqwUgEMLOjmwAx/oBlS9A/gUdQ0Apmalok/sEIWpTiQzrhVZbghUxSGuhyU/2EZImJCEYyBhxHns0SRpMdTteiXBr99YHh3HQChMgaTvZoRhTNaEZtb1mnfTNC36sxi7sSRUFzSSqCXrNDKyxN5cU7pqmghGVgq555SYuY3s5sWn45Mjch/SlZZBGjYrAaMZIULTH2HbYRAGDCSEIMTVrMEsxFx7u6qBS0JEYAA0LwDGJBX0zkhqiQiZgoPV8ikHIEdhQSJhmUoRDpozMSIzUJ31HogzFaVMaIRoVio+F6tkb5VlXq/EGFEoau0mNy9uEhp1bbN+7JWXX3ut3F9dfejGoiHM42XbzZaLE0jW920qxXMJDOal9t200pRCuUwcoqFbTk1sEPL+yXtblsVzn7AwRyY0F0L3qos5ErormPHh1CFITCyomZCr64mJhKNwRA7CQnTIjMGhrP0ggRECIUM1bv2pABpV0QRALYsEq/dFlUkArCrdSPCBqImOzPUkhITqCgCCQkRIXEpCZgCgmjkicP8Aq+w1kWSmxORWVaYDtauYQlUlDgj5p7Rsr71hZoxIBGZY2+5rSguh4nUAgFlyyUiuWpjoAI7/E8Y2aynutVmi9pTVn8fBraJwKprbwYCwRiAOdB4tVZvCA/4GANGtAIDaQW4zMwAjOsRB3dQqn1UdkAxci7qbRAGCSqtXsFJUwZmIWyHB7TAeN3LQkrwOGJSIrZQaRSNCcPuTRjlErHVySO56qId38KJQbXGEqgVJmMRMCVE1IyGzmNfQWZXjDYmhptUQnnbQo8OBCWJWzJSJ6mJmIACocchaIQdYkyIHGJZ5FfGqyI+qKiyHkiUkMDX/QFiq2V43V3fzoojsrhKi44GDjx78ad8fPs3tV9uXmZUyIRESoCNTAPKqtCK6OdJhrQIAMEnRglQp8gr/f6yidt4VsAhmVpB9TGtjbGLLgFCMPLhxUQtxNg5DzhkpgGUhWhwvbMifePW7Pv3p799erKVB4rCcL4Tlwfr+3ePbN/tnTm699P6bb8yjnD9YCQXp6OHl42df/tDlarWcdfvdftgPqeST+QLNmmBW8PqNs3fffP3i6p2bN5bTZvXe+bv72Uzz1Dgc337lV373Sxjas6OTcTh/495bScvdO6d3593z19OHXvz05fn79x8+6Tu+fnb6zLd9fNwVdzs9HX/pH/zfv/cn/0YJd1dDc7y8brtzaomhfXSx2ak+OZ++/Ltf+CTfDXhrcf07fvpn/8u/9/cf76b7uk3jbmtg2/14upDA/Gjz5Ggx/8b7737syaNPv3Bre2XQHN156WMXj3+v5NJFWcypXK422+1gcOvGnf320W4cWwyNy2Y9qHnXpGdv3nr3vZU0Qp0MeR+ao8W832sxtah5FtqvvfFwcc2XbbtsENTU0bCEOB7faB+W1cV2WPbXPnTnuXT1pATykZPFkhOQtxw0562ZM2XLMZAAmiuTDnmnxUQaAx+nDKpTmpqul8BMOU3JAc1AnSaKbTenRhAphIDEjt7EligUF6gxSs3ZsMrEbh4k1PVZw61IUu9Yrmg108Nzyc01ARgiowNz/VQb3SmIEGJBnM1PJ8BdKuZKyMBQXEPTBsDxcv/z/9///m/9nf/22Zc+NGQFc0K++aFbi+VP/NY/X73/2jeB0K2kKUeE+fK4Q4XVY6BAsyPzJmd0a5anqpfT9vI11/H4xnNHN27Xu+D0ejw+beYC53v7hc9+3i/e/tQrL5ad7a92TUvPdYvP/Oh3lHH48lc+N1me9qtn7975ylfPbYrjZA/O1/183i0Wm81uzENsQ7cIZTQOMq2TZTXGuIg02TSsctF23jXzXjQcz+ja8aKYHR0tHz24PFr0pDTvZ5a8QFm27XYsTRNSVBbsF4tuN4LlvmljY7HhKM1qGxINXRu6KCm5A3UxVOsxBTbNgSllU6P9MCz6fr7ohmFoZp1nH5KyoJZs6vPjowQKCF0MNmyhmBdrun4zAXd9XC7Gi4c3ZyfnD94NbVwcnZw/2jZ9o0PIxQAAmFHibNad31NpA419GcuQhiW3Dtr1J13brdI6dlMAY2jdQdAFhml3bu7kIYT5+4/e3YwZoZzevrE4O9mvL91RIndtaNyn88d/8Pu/vx4GFD959+q7/tNXz05nGbfjznLOoYkEbm6h7fN6X9SWfTfuh0zGscm+JqY85VzMAYIQtqHknL08ejA+s7iGijZMCiYSozTMYF5iG4bdICKWBmactc20TYvjVsW3uz3o5EJFM2b0gm2gvJPl6Qvj/l7bznOyknTRnprlTHR2+7kHj7/VND6U/Z0bpxfnl8e3Fk82u7ObLweed508fO1LbdNtphEBAoeuobaXEGhc+ydfvrVePfnWO1+7fnYTfPKUm4Y3221/NBt0nHVLnwBAVldXz925mYbVrdOjo5A3VxM55qzb/ThO+ezGya2zk/M3HxyRf9ufe+l3f+tbgWYgQQO5lbZrD5PkEELbKnoIEYnz3tqeSskhRjlsAMhVwQHdtO7azcEKiYBpyrnp+tjMQmibRksuBpaM+jgXS4MaA/ZtO+1KyiqqHfcRJKFKnEnOUEjLLsQ+aAkhpGmVgQiEzaGom09aZi2rlmA5T6jFNJUJVEvdUtQuEeza2dXmMQbKDF7GaVdabnppXCFPpZnNApAEGUtSzXG+bOeLq/fe/9v/zd/5lV/7F2vdF0YdJxY3cMuKCIVYyQ11NpNFYEvT5LpznNSiMBN50YxDrFhmrKkjzGqmFoyFUcc9AUBcdm1XptJOfnO2uLr/0Kw0sXMrYBgAI3MQzpMhAZorlBAYERELgBI2dXeOag4p15y5k5kyk6uzhJILADo4B1HPSEwUtCCH2TRdkYjZnskRCJ2CyJjWtf26FK1FGWbGZESAIO5qpkAwTZmfHiAS+Jj2HTdiQTDmMXsHpsbA4uTFJXCMlFOK4SjlnYMnVQQKIqrJPNedeA3KmmYmKWAEDo4NtwhiZiJNsYRAlf0JQAl2imPkPg2jSERyzVpZ3sgdNntoGZlbCMezxTgOgwwsuN2NIhhCzMlkw0GVkUgIiac0zNpZSoMDaM5aymHD8GmPVQABAABJREFUFKhpZtfatpvt274Zx253IR9/6cM/9WM/kDV3i7bpZgbc98FShjSRFXIcxwSIs+unzdmJU2L1hucXmz1CYFISwQANtZbHYC1nuX/17rU7N2fPnu6n7UICDPly++T6c88vnlmORTERYbwqidsQl7M479lyFEznVwTQL04Wx8dE4DE0fVfW56wouNiPtLx7o3vuLsznRzdvBmbd7tqOusj333x7Wg8vf/jbMycTmPez4fKch/WYNpf7i7Pnrl1/4S4tWjeL3IS+a9tuUjMolkZUh5IqMjtnirMebkRCF3aYtGm7aSoinkC7a6eTuHpv63j/y1/opowAxUoMwYEcSQHE2K0gmqkJi5UChm7AwF6PGWCMAl7PZK6GYObAhm7ZZgZv/uZvvvw9Pzx75hQoNRZ1uLJx3zVdoDBNa5sYoWUWbjsOcZxSXPSzsyO/0bKkSOpjpn2GYpaSZ2UUUNdcwN2VzM1tUAwhiGk+OuZSEk24kIV1i9e/8c2vffnhu1//4+TD2CAsemkMAzlw182LKbpPJbWLVrqWm9jNF9mpWCG12DQ0lnz/fNpuPWUoThRkscTTI170ZTv6sLecyHOOrH2gSB5QIk2rVEoSAmEBN08TMSErqofYgicBkJQQMrNIzubNfjsFzwiAZEDOiJhGmCbONu4hJ0fiIj62PHe2J4+6kromODXHy+N6F+RUi8RA7YNBPyCSA5mrOOOBdl0tFBXp8sGByGuEgSpJuApBWsu46sG0NrXWf0sFoJ6xCIkQpM7HDVzdCICpdldZLboCqpMcN3AERxXkA2nGD9E38EP1ux042SgUAIACI4AAaSqxPeqOjtMwrcfNC6986uLh/WGzmi2aGNi1zJaLvpuRQzY1LfO43I2bMV0eddfR2dyGaZg1x1FkmFIx7WeLaV/UUhRkpYZYCQb1nAZzRWd1GNK+awM1PI0DgMUYEbGoC7dJXZoFtQvgQOCMJMyVJYToTIRc0QJIhODMRKZWu5bMnShUqUetADOgVisVecUPeZU83B1BwVFQGYAEmEWdEKCaZYjIzcAP0eAabz5oCohMgm5WNwDV7uDgiuBQVFmq7wMj98V3s1nz6sc+9o0/+sN3ngwv3pz1c7cyGehs3m7T2Pez3fYqE85C8GIZ9gpTiP28b/Ow249T08/Or7YNhaOmWT95z5umvfNqaY9RWUTMyGr9OaF7IQAAFjUadwc4EUCVtpiqOQspcH1N6bDMuFabeWWv06HJq6bJiA6RfK62C3eRIFQFMnRzq0+ng6emJgMRvFRTBmA1abAjWDXkemUlO5OA+0HIrDkgVWFhYtXCFErJBy51Vc3BEapvhQ60dySzUhWmg3HJaxmZ1mXvVdgDcFNAoFpPpm4fFGAhIYCpEiCSoxt6MT1sKLCO9p9y6K1UHpMjCwGpFzclEq9V91qo9nxZeZrjQ3BVVQcixAOqiA++I8K6czz8rWrlqT4bdyOk+llATK6KBu6ELmoTO5Hh/XtPzs5eMM1eYQhIZkWtMJOpIhECuR6K4KFCi4AArJRUvUU1/uLgVelDQEJBYNXsYOjCzGpqauZaEUCIICQOCOAlKzFV584hBKcV2o/gxsRqZurMQXNCCkRgWhDrPwn+VCKrmbJ6haaKiFjDaaDoT2ni4ERoZq6FRUikwsGrM0tTRmZhUVUwf+rMCmbqakZ2cLUBIAWkwx1NSCWNWPtX3A7Qq4NieuBMmVrV80yfZm7q/8YpoQiy57wZh6s87dwcKDgxU0BkzYUBRTiPut0MeUxBuI9d4+Gjdz/5A9/xY2wgAn3fLZbznJQQPvriqzMJrL67uN+3LKLMTEz7YTw6uaUpRJTAOGvb7X6DhMM47laXnkcz++O33/zSt/5oM+wePXj84O2HjM16PbZtd+f549/83Oc3abp5+6ZO0+X5+YMH2xu37p7E08uLxyc3Z0e3nvnNP3j93h7feLJ++VOfXty8MwzvNguc8ubVj/+Z5c0Py3xxcf+9Zz76qeXtuw/P1+OQkZviVEzW+/LNb/6e6peZHgONP/Znf/bh/fc6sdmcKGCczXaWry53wx4ePr4ylrfff+Pho7eWy+nGNczDennt+p0PvUoyW6/Hpm9mfdu2sisJ21YJ1ttdQFu0XcB2yOHeRR41XO3H7X53tOw07zabB/0S5ydN14aFnP0//i//6GpzOhDSDDiyqT16//G4WhHpYh5/+md+7of+/F8cIG32O8sWhMjH+axrpJOmc8DdbhhG7fplaDhN23HYDMOVu0pssufV7gkJe+iwmRnxkNI0TqYW23C1vYAg3emxxmgsRmyA9UHkwMUREae0M8zdbBGaJrYtx0gsxEFCwxxCiDE2MTSE7FZnw/UDCwDQn+LlnAAZazYKuXFkMPRsAtw2y6OTO/3i2Ik1Sr84co+b3RSZ5suOcPsvf+H/+c6br1mxmqlisNnx6Sd/8DPNzRsW4tFs3nET4mI1+oXGe5vx/sXV/fXVOJW2W1ATPXPDrex3dnX/6t67ZT/Uu+Af/9Jvfu6Pvvb8x1/8sb/4mTfP/fZ3/iW8+W1/dD999b2L7bheDat/++/+xe9//ivbPb77bnp8Gd5+Z9TSFHfpJPbzrj0CjLFpYmzEYRYbMofR5qFhCmE+3+R8crLoF2KKroGxn/Z4cnwDIQIgOjMFwYgABE4AXQzz+RyRGKjtYtc3RLybSgwdAjFSCBKaaAAojCJITIQSAzYCMXgIBQmYQpQobOYYOpCATEDkQPtkYyGgGLt+tljMZt286VpuiAgkjAZZYRgGgQJpa5t1j5SnTYwxZyNsY5g7SNsvZvMlMxmBtLNsFmdBOmnni+w6W15DCqGZjUXH8cK8gA8lJeROOEoXsbVuGccx6YT3nmy2Y1ZHQDy7fW2Y9uBATk0bAotZ+bXP/tuL3bQzv9qmd998t2y285aaRkw9j2UaBuZGvaQ05jR5LpAtIHYSHfxqM2pxJteS0jQRkRA4WFaDAOvN7njZkg6NlZkggmXNxTW7IeGYEyNhLpALgs+XR6vkg+NEtMsTILQhRLDTePTR2x8lexTj9ujmHPvYHZ2MHiE0ZSwlTdEYrb1587mubbsmHB23d5+97pvLd976+vnVfWXYD6Mlnc9Pbtz68OW2pAyRedysIo/9DF549uzu8zezAJJ086PQdcujZd+13PB2yEDd8vQmxjZGhjxud/nBxapdHhvGzT5DiK66eu/x7u3L4clufTEGbsC8FJv2YxP45q2nxwNzDiKBKBB3DbVhyLlYUVM/bF3M3Ikd0EWY2oZCoBAcStIpxJhLjhSJxUFzGUvR+fLocne1mUYHKU4KbVKQIMTc93G1uY8MqSSUYAzOYoyh7xPkbV7v0w7Jx3Ew9LbpQhBCYuLj5emY1RCz7qcyFTRqG+n7xAx91Igy65ouLvq+ibNFM++btovB89gvZymXcbtzK1H47OQEd76/3Pzd//rv/Jvf/cX3rh47wW43lJJVjUhCiCLSiLQEt4/7mWAx345psx/HcVfnVU1oDl0SQru0I4LAMk6jqaUpObgThsBd33Loxr3GJHdmJxFIYsPSYEAKEPvATSwA6saBi6u6E3NlcxigIxbPWiYDYyYmcFRCNgPhCObuZiWDKxEVTURo4BLEQOtcMXBnWvrYmiYCcC9mhiTAVOdpEXv2VlURDcEBVUsqeST0GAIzu9Y/mQTJCgRp0Y1ATbNaBsTi3khb+zqYaMor9amAAUsBBUYPCExNWJSsbegAXZiIiTAeeo4BTa2hpqRRiIiQOQB4lCiAkfqIIbAgqufCZlBSNB1XV6TWhUZQ9sOwWV2VkmLEEAXMotTgAWpK0zDkMQ3bkRx6abQYAwUmRtRpgqeKaUk47nOAZiknnS0/+conf/LHf5g7iV0bmpkqIUlSG3MuRdUtmWoXupsn8+duFcJxTERxmHw9TpNrc9RJLxQwl1Fi407b/Z5De3Tn1janJjak8OTx5ezs1vFzd1XEswmF3TRy38/ObpzcvE1NE5po7sV9MljeutNdO7Ouk1nXdMRk4jiWFO+ctC/cpBvL07tnIl6mPYl1fTttt+fn9z/8XZ+Qk7CdxqK+3455lVcPdm9+463QyvWXz+CIFEpKiiSWfUrqLM1s1rSCVhpppo1uV7pLOBLYUWMnbe5D6dokwfsO5pGvLfO8T/0S4uLxN77sV/dF1MwlhMORyApQIIqMAQ2xdh4jIZKQYEWjamEgAWRkQqofV+CMyMIhUOgkNJrf+MLnI7SlNIMGjyHEBjVr2gEEDrNmft24yxRznLe3bjQ3TtsF9brt9iu8eiy7LYwjG0VsGXuAQByBxcihYW4DNK3HxiRwG50zSiGf9OoqXVz0Fuf77qx7ZjY7g3k7MmJDsQmxadqmaSVKiPPFcnZyPDu91h4d59CWECeQwXizGq7uPxnOn+yenNs4CeP8eNkeH3nTgeaQd2V1tXn4ePXwYv3kKu32KSUdpzImUwBgq0pJzlBKdWAB4DTtc5kMLGnZjYMW8+w6jYEKwKQ6TpaoaTBEJy65jPuSBjXHlPKULacwXY2USmzESYDCdnu4C9RAFUpxK+BWQ0mH0ysegNFu9XPBayVVDd6Ygzqoo5MbuhEgGqARGbETG0g9zRMhEzBpBWQAMhy8GXXG7e6qWuq83v0AzqhFSgDVXlEdQ/URyY6BWJhEmMiFQBgjgRAGlhBD18RWpGVpOZ70xyfzo6vLCxT/+Cvfvrs836wex2ABQRyOj0+JozpOqhzkxvXbSoXAF/312fwaMLqVGGdOIZs7aYysnkffFrbYtBIbc0zjVPLUtX2U1o3UDIgW7fG43ep+AjcHL7mo0VgQ24V0SwpRhAOLMFXULJMIBa48XmIHdwRmYQzuZZ/P1XMd/qsVd4siQhA4MFUbC5qpuSGBWmGqwXMWCjXa4mDFsoOLRLdq/YBDS7ajmTKLiFR7gR4GvQBgQCASkFihGFpogxNQYFXNZk3XxRhmMX7slW9f7/n+ZV6nmIExRmsDtWE/rRgwoBAJCzaxITTCjDy1Hbd9626EjTmfLBcngcq91/Pjr5tPFtg4YAyxa5u+kxBCaIkq1EqQAgB/UAAGCFg5jMxwAF3jIS2F1bYGBMAIjMhYbTkHRnX9ZeYAgIS1vEy1FtWjuT11uSFU0QQM8NAWAYSVEmpWEL3Gy/4ENqwZDtAtBARiVC9qGRFUS01hH6JNB4QPAaJqBjBzU7PDjYJPPS71Kg6oJTSrFCd9yumqGdBaV6h+sEFVjYBUDYDMsf6g1W8CAOZ6+EQmOjyY1bKWomqOqqZaDvc+OIBXvLc+3Sva0yRchSW713scqnLk9e/joRBbTdULCwOSA2ou1YyEh2I6RMSi2jTdg4s1kqiVyjhXV8f6aiOSOKAfmjAOXjw100PuC9TVsNbSsoM5IBzw0lUVqlfrgAdabB1oERMRZdNiauAkVEtu/WBVBLODK+qgt9SKNDcSqa6jpy82qeVDY8cBOQQOoOb1aVpUiQNJbRUXN6usMUAkCQ5eckZ3IiFhDgEIrULXgLFSttEqEAa5PkqxaAGoejpXMlctuK/PciSqUUm1YmCGYBWSW4EkZvh0cT3NoSFPw4Cap71mVXcrOYsgQiPcgXPXNFfrJ5r3IRpJaaRpFifXj69j7n7kR37uzum1J5fvNIED0mq1B8aEZVvSOF2cnT67Xu+ZtH6q5JK2m/Xt5bMR7OHjR2fXTkYtXTiSpoWS5otZN2/feP+dL3/96zPEa4uTYLswa8+ObwTSy+3qs3/w5O339h994dvGdPnNB/emYZCmVSyP3/ra3/qv/ubs9PoffuH1m89/nIfx5372b/RhkfePdptHKd3Z75bPPfNdl48eddfuHGEfbnz8jTf/8M/9xGd+5bO/2QvFECUM02b7+NF7X/mNf/L9P/ESLRbPPPfST/3E3/693/75eXfk+w2qTVNS9XGzXSx6cHv0zhP+FP/R7/0mcrs0fjLYiPnm2d3NrqMw5cTs+zQVIWgkeiByHnb7XBDDcrWCPJCyo8PFxig026t93y0dNbZy77W3Lh8P/7sf+z/8w1/+v02Pt9/zqU8MQ2672W69NoAQZpvp/OJyE6dNz22D7YPLtxbHN1vqEY3Zx3FCDG3oEEnNSk7CQU05NA5u2Vo6QgvFkqGqqhmYhWFvw2bHYdGf3rIupOSMHGNkcmFCrn2CJgJeYbjoyIwAROwOAFxvhtoDCsRcxW83BFAthuho4M4cwUGtNheU2pUKRAKEIThgdi0c4uJ2Kl4MiqcYiUUSpoJ+evf69v75v/iFv/dX/ub/6eazL49jRlKO4ZlXXvruH/qJf/fz/+MzN545nfVP1usirXvc4Rx8nHZjaTienAy6IitN09683d2/LHm6unj/rXoX3L5x69757uJbv92+/rqM5bc++1vj9snNxTwIXT8+ykVbCtNFHrcZtZ1Uh0GmgsOwfebW0Us3r71/b5vATvuIjRxdmwtg9/Jzwm3f7K5dh6tp//DR+NpXtzmfxHB8dHaSSy5lc3x6bRr2s34ORkQ4nzWb9cDRkJSQtYxNDIRwfDwfpu28ZTdsQidgTrkRDgyCJZIe9Q26s5MEFgEKuB0LSiRzhVr8VEaz67N5YG8DmlN2R9Ju1oYGUY2htIFTARTM0gz71JgKE0xTE2VI+8CSd3s0YArjbuIYw3yme3f3GNpCXsZctqNPaqrmyIjzxULIHSHMlm0/HyN4GXRaAc6cA5BM2acs/dmHWXebt981L+6wG4dusQSkNOau7/JUgAxNN9symmtRkegE6/MHx8fPQSTuZ4RbzJb3G0VsIq3KEJuGCIzcCHbZsxERENGUlREEwQ2yZtWy7LsppSPs99MU234POtWOCMBgQCG0ECJhMxebpp7ai4cX7mMCV2RWL9wyNlsbzppn8jg0PUCexqFApMizKa/G3fbs7KZz9mh5GJSloOSiu9X+/GIbeD7pnnguTZygdHLS8fxifXF6dn3clra5GTlfnK9np0ECPb541HYhSH/x5BKx7Lf7SHJ9uSgPdjmrnMVmYScn1x6986RdnCDny1EfPb5042du3r7cXg7TdnnSPHy4vf+OEcYggEjt0bw160M87Izcx30SRM3KRIUwxFg9/YfphwMzq2VwVzUwQgRitzKUcQoYwUMapzY6IakZcdQyqWPTLiaIswi74dKsYE5kY+DGGYuKm2vw4kW4yYaWJkdk6mLosqILOaqjYy5pNCQep92+7LrZ0cnyuI4uHEkdG41mnvM0ZgiKKZeAYdnf0GnjwI8uH1w7PdNxmoIjz0Lb5V2BgX/m537qFz//2YvVpbqjg2lRqCsNABWBtdhi3s6QUWk7jFNSERx3U4gdGE5m5hCIctFFNwfDqZTYNIJN0bLPU5AmhG7SAuv1DVncvXY27s7NipA4Gx+GnlKgKDq1DToBIFtCdCJKlqAQEgtH8ywYEQksE0K2UXFAdyapLNcgDR48800uQxtAfQIkAMtFhaS4E7aInZsF9iHvxRtQNJ8w1AK+AIROhy5iUy+55JSJqZQcusYF06B9lKJZs4e2QcwccFfGgA6OlicyYwkIFiQmdHEQYC3JIKHBUDJSn7MiMqCCZQRS0yDgroa19qQ2s1QvN9W71g0yZGIGciRExibQkNK8KJDP+s6FU0r7YR+6qKm0DUpo1GC/G4Vot92bYduLTlM/myFgE7rtsNsNUxsltM3heFAYzYgDzxbj2p6/fvenf+anAPeGmSFI01u11k0pj1MpalmnfXJkOTtqzuZTGqJ2NsH5wyvG2MyDSVGkMuaAjBBKGdM43Lh7F1ppNLYs6SptLsdnP/JKd7Qcx23XtnkqaTO18/ns2jF1na3LvI/bi6vVaoiyjPMjDRD7a9jg5uH9oHF7uR1G/dC3vzh/9jq0fbtY6DAWy7N+1ji//c47p0dn8+PF5LnaP2wPq3vbB68/mq7S8d0zkADg866jFhFA076knStkBxsmT82QoUyE2LZdcBynYQT3tunNHVS9ndP13trOisYJ8v13/a2vtWMiK8S1qgGKaU0DFChYm3odHbyOo9GdwKpAYE6OiohW+QzIJG7kqGDgjLpAfP/zv/sDf+nndtcXWagdQrq3G84vIixmi2ss7QChuTYLfWciDTt4gmHLU8FpiGxeuPb8GExoXqF9GIUA0RnyRAjsbjaoljFNVkafPCQQ57OZYGqevfX8GysoHAZdxY5iE/KoYMpdIKGu7bnvrAnFXZh9X8qT3Xi53V9eTLtt2m6rMhLns+bakhZzJ5w2W7i6KuNO8+SOCrZ7lGTZoJqOKU+HHVPxJG1siAJgKdklxtm85kMQjDBk4DwVMCPPadxKAOrnA7qYmhYCAiYTBUBzIEFpopkDuGZVxFLK7nJ9kIqSMtGhQgwBHPAp40VqLAKcDPXAZDFHrHN49OpAelqwXXd4QO6M4EBGB88EAnh1edTTtBzGZO6OakYIFdMLFZ+DDuhEqIb1N1TbOpzAD9+UgADBCSMywEEXIApIgodKNO6xi356urzx4PLq+rMvSMT3H1+9+/7XP/zci11ELXp8dCQcQivFFRi72O22F/u8n/XX2ma2HgYmjLAIEtfjqpUG1Pom5uJNiByaesDPRadpbEIHzsWnbNq0Myrze0+eNDGHIGMpwoIorkQyC82MQkMIjMyVwwtOBEhajUUGZoAisapCLKEJS8qCKIfEUz0Ig7uX6gBSU6wcEwlqWi0wjIJPm41MDxVRVRFxK0gB3QFcmBG5lORewJAO2B2j2lqjxb1UCDkY1Diao7shE5dSnIhCA2WcH80+8vFPfvPrX1L1s1lcRF1drPo2oggaqtkw7kIMbYzTkMZxDU5MJGISmmlXspbV/jELLVtYPXhL6Wh+5+WwaM2JiMyRa8kUBw5GOZAEEkFkh+qRCkxCEgmrKbY24H3Q614tOIZPNz+HBKQbPWWYggMxQ93u1HRPPfl7qd4ls1IP4W7+tDbKAQiA3IyIAFw1EQuSgIG7E0UAUsuEwLUkxI2JEVDNEWpdDZqWw81hCjVTWe09Xs0yoEVrNNEBTa0+VL1GtwEBrHqmav4LEFQzS6jNccjBHSrzGBzMjFAqJxsOpYT1LgZErsalqiAwMgGoW7Vn1QrCClAkJgJygApRgkMiygAZwFX14NU6oIgMAP3gIq3vBVZ7gQgDKtYOPjNw0qJBsOfu7XvvP1VR60U6VUS3GQHWng3VomaMRMTqBdyJxd2BENy0KJOgo5oyMAKbKQsdXM5qCgZETKGUydyChHrB1ZwI4LUC1t1MDQArAB6LOmjJhTiKBDdDEnWDOmOtvkukQy7MwUqFTLmbE2HlNFotD0AsqgfuNWDtKTNX5oCAVooaVC3M3VSzV/OUo2ryik8icQeAJCzVlFFKYYRqGUOsNUkVhHkwDGItN6SaasRcCh2sn39KKtputoRwacWKsgCC5XHgIIBN319vmz4NIzrkKRvAbN4tZBGb2Y3m5gsvffRsFne7RxKDNASE4zSE0IXjxflm9x2vvnrv4eNHjx8/c/vmarUqZZjPg7QhaWp9drS8FmKUURFlvd4R2LyBd9765muvfykocbd48P47z16fXT+9PUzt199//dH2cWqW127evbaMv/IbXyl9pw7ZyydeWXz6zivrffPWll766F9ICU+OxtPFUgSu3r668/K3Gz978szZ67/zK89/53f2s+Xrr/0yMc1PX/zIp//Ma2+8t9m9Pez2TuBtw3O/9/iNN177vQ998kNos+/91Pd+7j/+0zLkCCYd73Y7gHDteO4IKWucL86ee1nZv/qVL128f+8jr3zyO/78D60vHv3Gv//355f3m7lg2W3XF5oTmgShbta5lovNltF1MnSPzp3MMGEf8fjkGCe9vNr4olucXv/iF3/xx//6Z37513vLfu/9JzmNcTU8f/cloq5ve3M6amaxDZvN/vGD1xaLvhSYpt04jCQzjowgaTusrgbCEmNEh5R0LEPbRWJ3hl0ZhjQ2sY2hncp+t929++De7Zc+9OwLz+2KZ8C2i42wAza1rYBEQltnOeBMLq6kh3LTurgqLczr0mIktSLEak+fvkiA/DS2ejDRUX04Ilkl8HH9dCewAk27ODlj9jKtC5dA3X6/AcDV5QaJxunqX/z8/+sv/Gf/3cnxdQdA1yDy0re9/F0/+CMPX/9qBCFug/A+bbe77aJvneGNR+/zPB7P+kgTTHsRCVKm/dVmOoQOUs7vvfXkKFh88Pj7vuO7vvq1Jxs+C035xHe9+Oi1B9n1meePf+Ptr+62ery4pUMah6GbxSYel7EcX8cn+uiv/tgP7tY3/uU//1fPfduHLu597a/91Pd89w9873/49//Lix8NF1fTpM///fO3P/TsHd3u631ZNFFw0VCskDMHTF6IWc04YDOL5s5RmAkImGE/7JuGU8kk0M2iqlEpaNYgIQBLJSCie2H3kgbLadm3VrSYzbrOcolM07BD9yeXF2fLE2JvD92T4uooJEKG7kDollbrdjmXYYIycs7geLQ4Xl1dlTJev3Hj0b0npWgbBMCMSA1BKMTARmmvfb8soG5A7uOobXMEikAzoMYBYtMjEaJIiGlAFSwcoJ9RCGhW1Pu+AcCmbRyhapZP3nmYrSaTvYqP29XF0p8x96aLOWAIhMHRsaQMQDEoB8wKeUrDZmRXRwfXtgkIbqpt26WcyC1NA0xlfX/VNA2SAhaOYEZCouptoDQNqn652147Pk0rYvE8Zmefz/ppM3lWT/koHsvk426dCy3mfROiRU/JIza8ONqlRBGPz04ev//k4nw1pogcLte7NOSwtFvXry043N882ieNixNDD20gKrMez8/f2afd9ZPjpulCFB1zw23WEloGjyRx3G510CDSRha2vM0XQ2pic355tZ7W+xFBCBTc08P7bx2dzK/2Wy3DtGIOzbhJQ8H5os9Xq3fOHx0+EiRqAUKsRIN91qZtHFzN6keIV0HYUMEpiCFO04RoYNA1XYayH6e2EyIMIbRtjxLGVBoiRixpA2Dz4Ga9FmgkWEmzMAPA3bQDJHMwMAMYyhA5FjfPQ0663e27dha4Nny5MHnWWezUbZxyHsfQ9UhYxiliP++X2+lJRjKzUXNBl/0ml51Bni0XyRUjOLAr4wibS/jZv/HXf/WP/uDexQUzT+plGgFIJAgwABhSIC5WWmnBaUJdDfvI0SZtQsMkjuiqxRUNPGU1Avc07UNoCuZcUiDqm6ZkjUjL0N7uW4QhxmDKAJ7yJCGCq5v1MSKyI5lbQEaHkkuIJhQQwEGIW8+KyGZWNAExE5u3AKg6MolrEQqljOgKzkwVXJbRCTESV/LHCBgIoOhYoEQ6KiWbOxHv82A+icBuv2qkFYkcxCkIG+72w24/DYNpmqZhGHbqvZDU4jRW4hgYHNAQWnRnLJFnQ94GadDUQRlJS+HILbXne1j2Z1jOi5qV0rZ9GkcAhdqEYuhQQK2RRk0Z2Sm6mVpyNDBlDHUQguDTVCRwM2v2u51O2VMJzGHeG8KYNBWNIoS426+W3dIN1S1GGTXnaSKiXHY9iQTDpyNfAOAmRBLf27TdvnDnpT/7wz+AreWhEIpzUCAKojmlqey3o8TIITh4f7o8uXPmXiCNbQyXF1sHQiGH3LbRNEsI7LBbrcf99vj69dM7Z6v9YwGEIV+er45vPXft9q0p7wWw5LIbp25xsrxxjebNOI5t16b11cXDR01/dHTjdnOt5RhSMc2jEHKIe4VrL714dPc2zTsKEYpOm00j3GJ4/Ob9vKaPfN93ZN+VXFiQgPe73cNvfmv3ZN2d9ifPXisOHTciwfIUWzEIAJB2OV0OtnfJgAUQnKJp3rtNTeiFhTnwzLGlEropsGKkDPHB/bc/+yvzcRACRzUDUHUDdAscHNCRkdByArODc8gMXN1LPQEISTEnZMdi5sjsqGYFwUQQDEWast29/cUvPPsTP7YpOcQQ2tD1R1EW4y6XhpuzHlptu0l1D5rEjbWgqgAIMohYrd2CWq9lZgQgmpGVyIpOu6KaplE4iDnEMMYSmzZgF/ZTyvvN9nJJ4QRnac47HCd3ZK/xoPl8ycT77Jks52L71fqdh9t7T/KY8jjUcEdsQzPr2tM59qJlUFUfdjqObkrkWtTUOKNfpjIlz0Z2gETXky2UiSzGo77MF7ntrRQY0kzQy5QMiJDMMBsWU8vUt0BY1ErO0aK7MwkCskTvgtqUzJKXCOgkKU2l5KeHZLTidhBlwMHdDGt5PRM8BaG4Ohzm1AQAbgXAGSGbIlQ/GJkTIhMguiOicC17dyGsZVBVNKzBMVMHJz9Qjax+0Rpuc3NCDgzVFUBWkSH+pyjFh2sXetq1QkShKl7kLkft8cP3V4TD4tjP7t64++ILD17/pq+vvu2ZZxiSJlocH5EEQMrFiJrlfDnmyQyO57eaZpZtbBtyC01sd5tNFCbkKJ0WnXIWEUAcc9GsDNhK62rZB8fMAYpOnIWNYlykNJpCMQC0pp9h7DkGJqfDE9AJmRCZayqv/uIDSsg9BAF08xxCAERTNy1Vo9Ontd+maq4IxlRzLrVUPguLWakFakrOJOxYVB0ciQGIuHpkQDUfJDoDBBIiPRxK3cHMrJTsALU+nCgiUik5kLBEt1xcm66zNB2dzF/40MvvvPM67XIQmbVdGq66tuvni/1+zIVK0m3akhuoI2KeTBg1FyRsQ7ta7ZCkb9C3q93wDXfo735U5sdGAc2ZjUDcagMY1ZH1wWdTBehKMEZCJ3xqfgMEr86pAw+rlpYdwEXwVEo7TA5MqfpbqLqu1A+wl3qzUeUJHVhahKZ6yPM8XcREFcFmdXwOh9gPAULtv6t2ngrwgENUzQ7eouoJOsC9DJHN/SCkIjqaugMQ17dVFcGRDgSlipmu8Teu6UKvABxWO8gzVYgw08MbW301lS2CVJuszT8oKNKnIHAwNzA9/HRu9TWrL4JZgRqCQzh8nxqHOgh1WErhA0yHiuaaEdSiUGHPRAimnk1Lza2SoIM1FMbdhG7uWkVoRDgg2M0OgRVwYrba5aal/iBVqiFCBNR6KdWfAOCAJAHqNbsRExKVrO6FmMFQn6K+a0LQDn1wAAhMXBNhptm8MEsFqCGRgblXuQfM6icDI6KqA1RSG1USEWB18Jk5OYBrQWJ3EAkAhy49AEdgM0e0KnPW5UpMRKwlH1xRdOjtUy1PF68TUn0mu7uqMTEgILkXc2RzqOFCdXMwPBRFOjG7VcTZn5KK1lfrGLmUknPOJYGrsLdtR4JMG/BCIiQmQifL+TRMR/F4OzTP3f7Yx779U6vLN4+Wc5hIAnOMSCDkZbTTrnlw773NZuq6AFaQPEaad/12Y9K0xqkJFEN/dq17/8G9fnE0DcnF33r3jXnToBqz3nrm9t1bNz73ha++v35v77vv+b7vPb/3Xuur3/vyH9bjbdH9UTv96Kc//kdf+OaP/NCf/RDddmy84A//xK395ios5o/eef2l7/5hxYBky8WyPz4CS80S79z96Gl46eEaTtuTzf3fWsbm4cV58qabz6aJvvZ7v8wQnvvO/3yxXP7cf/63f+Gf/aOTG+3VVRIZiMM4bCESCv7253/9J//KX75558UfuPuRz/y1M0sDdNHt2Y98x4e/9c3XfuGf/hMZVhwQvL84v3RPBgzcLGY46BTZCRQCTOMmEX3PJ7/z0b2Lh1fr5enR4gjGlVxu7r3+9lc/8tKtt//otc32SdfNhnED6IvFctjs2CVKB+PAZBerBwHvhH6/92G5PBn3Kh3vh00vjU+43W+XpzNQx+KQkSSm/Xq3W7VHs3nTDtvycL9//ereJz72sY/cftG7uDNIiAHZ3YqjcFDAKoG7c01kAApLBDgkfvFpsrdWUeABB0fIrBUy4QdlHWogGGoVs9SwKAIiU87ZAQzcLAmJqxJAbDstSzDNu4kZWo4cJZXEIbYtb9fbX/vFn//Jn/6b85MzAyzm3eL40z/5M69/6YW3vvi5k97Xm22wfHN5nIthDPtp/eDRevlsBPbYxGG1PWrnVnYX2029C7JzsnFEahp+590vXay3s6PTBnzzcG/rrhR/9DDMz55/5rmmTOXRt65269wtex0Lt/H+e1dUdqdx/SM//Kmw+56vvPn6D3/6E6SrJ0/uUfDzB48un6yu1uHPfORTOhWc91frXds0IoFARIqQWFIhNM1N3xQvCG5QEIEJmRvk2LR9KQXMmhiQlZgdGZEZSSIzOCNRI0w4TQUcpt0ERPGoT4YgAjDMmLrASUNKJRWf9TOFHRM6hZwnflqValpiDJbCsN50UQA97XZRmqxIxYJwyTkrNrElm0IMsV2I9KATmKEBqTHiZNYsj6eUZhKo5SnhTOKs/5B6BiuxOyqQzQGwZWm7WbMfxmIASqguSr6bQsDV+a6fBwMHhRC5jPvZ8aKYuqEVu7p8ctuK5jw/7ndU3I0JicU0oRmhu6kXC4EVi+lEKDmX0ErKU3GIKfehiS1t9/umieOqxNtd6GCcEgOysAQu4x5QpBFvhKjfQ9yNm+XxjCeYxVbM3DV2DaXhpDs5bnmb/ORoMeyvdjqdhZPdsCZulFy5zKh/vN5g386XTduFhw9ePzqeLRf9bjX0IBdPLhlgHjviEJouEUbGKW36nvnGPCAOw8DcZ9Nxd97N5tRzyT6yzc6aaT8VyrOOmraxYWhoVtQfbR/scxmMKeFR30GaopJmKo33t2dlLJCRjpqT+bXlnRuf/+oXPvTyrcMhORAhooGjoQEAC4fYNFacmLxozUW5IxORODMSGuZCWhz03uM379z6cNMFhlKbVExzZBr2U06ZLSNi28h2XCPBfLk4v9hiYUS3ktt4k/XSc3YOEFtBMR0QtGOivt8VO99eHLdzAY8ByamlLsQAObe8EBfPFJXQbMILYZ03wCyz05u7rTbI6hNzmHf9flo1IYZuDpY3Y/6Rn/urX334xuv33iygwy6NaURGn5SJEN2Rk5cgcH25jO6Xu11CYAzgrkWBCKkwIKG2s8YME0EpTohROnDLOjGRc5yS+VTunFw7bdvgwGChaVTV1Pp+RgAgkMZtEwOQmIN6QSdwkBAMknALpo6mVsyDGQlb3WUweVEFDmCsRu6lnuccmWuXDRCiuGfXEVEC8zBukNiDmQIQozn4qApN2ziMZYTAXVgcqdIbb7359df/eLXfH/fNcbs8PTo6PppP22G9uXDLBtC1PUNwLaGNZZgMSaQYJEJTS1kzOqZpR3GGYEUzMhuqe1HbBuwm2EoIalzcmcgwGhSwQhQAnJwBsIC55zoLq7N0NAJFQzcHJgZTUJumvN1P0zD2/YwIAlHf9Zemu5SHTb5xcjRr5ggAgWfzdrNad13fNyFpcdcQxJFSyTXxAQCzftH28zyuT4/PPvMTP31848Z22KCSdH1o2MiC0Ki5pByCaMY0ZBBZ3L7enixgvxbBBm3Y7rJSbC00IYZYNMcglnR7teriYn48S7ZphCI326urcYCXPv5RniPvQUJcPb40hsX1k+76iYOrpijx/P3NuE53Xnk1HvXYNrKY6XYSUgN+5717Mlve+vArNOvD0QwKUEl9E8lt2G7PH108/8orI0zTVELfTldrHPPDr31je36/i7A8Wpj7vF0g8rgfiJyKueXsCBCljV3bj6ttaJsSG5sGzwAawYQ8WDKfcbh1ncghQSjzdti98VufbbYDg+dSACFIUFVEYGLS4g7Aczcl4NrZY+5IbLU53fUwyCY6HMieclXrTr3SQdBwSfi1X/xXH/3RH9eTmLaladrmtB820HcRFrN+cZTswsdVQPQyMliDVsGeRZObKZqDoDA4InIkraV/eZtsHLVkImJvUJueuk3K864kz+sxS99c+9AzR+eL+OC9YX2xS6gztggFjUWIY1bIWjyE3cV29+RyPH9i613eDdXFQ0zct3EZu2XHs2hefMyQku12024L5rlkJgYtXiqrgg7RXwAzb1pGMhQqBKGJ3IUwCyWpIw/rQchiQMdEaSppl3frru9RFdxBNXBr7oAFRVJJFhi64wjuecdWSD2bpWG3expAe2q8QACrIRgWBnTgQ3SnnjrNEQ8oVa/sGPVKtq3VTkjIQGiGh9pwNiMjcpY6+lEGNAPgQ/M0HAquKpnFEQzNkfFpjxkCKBMSMBCgo1c+EdqBKIzohOxGIEwRhQ3UDU67W+uraTPZvsRPfO8n2Fcnz/evvfEH0+VuThACcaRZ0/WztontkBKBsMRtGcaSjhfXyEMuOecicdY0dLV90sZWmPb7dSstIBBaF7vkvtpt5rH3UooqAqh64CDMKWka9wQ87HIxa2ShpiCBuwWGCOiEJsTEcEDGoFdjgYIyEVbFB2vxkgBYpXke8kREhHTovEd2c3cFRyQBd1OrKGE6HOyBUKtpwMDMFBkrFZvhkLGqiaFGorsTSpomRqynTUTPlmNoCNBU1TKoORSDQky1xB2w+iZYYtQ0nV4/M4L7b3+rXI7XW5UJUNM0PgkUNau7UxBiZ0bGdhzH7X7XQAsAjtrNZxls2A3BoRnPw+VbA0L37Mvx9A4VruMPR4IEXB0Tdd4M4GCIfhARD2GmwzKq8K0K0foTbcjdDvNwoKdiESIQcC18rcEhdHIwOhh86hrECnWCSlrHA2oIqWKnzYFcq4UHwWufZyFk8MOTzdzNatMZCIlVM2U1wjkyUVVzaoOZH8BefLjnTAEMyGsaCgifilFU6T8I7KBuZl7fdKrqDROpQn04I8oH3GpENLM6+K9fgZnNlBEcyV2rOsXEZhmoSlJ1fbmpVWlTVSvVm5EruQyIKg7HkYQDHFDixCwIZLmgO3F0JDfX4kEIwSMJotQcXJCmoXjg+JjVdBo4AD5lRWsiYqJQ3zpwq8NzUz3kwwhq3qXeKWbF3anulQCRhCpokggBCKhSxkvJCKZiQCgkdnjcPa3hU6uaDWLtfAQrBocrqj6+A82/ilaVOFZJ6X54R1jdD9oTAlMwMzBzt6oCHorlqlxYbRbApWQktOrIOhDVkYjMHMCIBBxUFaEAeuCguSCL6lP1HwUcuT7GD7HK6mvTp1RvBfzfSkXjbj9u3VyHMWlRiQw6Dbxv5z2UnNOMF8tJU9HMWXuReWivzT/y7I2769XD2fKo72ZXVxfZIG3WGKKiX6yvnnvuZkplKPuubwsKc6tZV5sBAu6n7cnyzpPN1Xr9aNhetM2MbbRy9dq33k4pL9q4eXJ+89nFbvvkd7/w1attNmxfvP3c+Rtvbh+8qQvMwJeK68vN6Rn/N3/r5774+a/9wA/+FfbTz3/5d/+Lv/qzb339m/sVIDQA1C6vAwu6etlb3zrGcZzaZ+6cfPyjDz/362OK3/njf/fRv/7a9PDhcnFW2uscwfDqfHP5h7/zr4pAE+987Nv/wq/9xneP+VvcbmMu2Ura7diiNLGN8k9/8Z/+13/3v7CtjdMQ28jAhmCxeeVjH/vvXnz5K1/8yr/9N/9AE/VBt9snOxqzYohd2iXPnotzlGR8PqX9aJ5kfbHx0hzNGuH86ned/dK/+Ye79Spjd74aT3wm8/Zb77/5bKGeWLCft3GfUSQ8f/vbUx72025xfGOzW8Fk4E0MMl/OHj+8bGYxuZqVwBip2adUwFGwm/P6yebycrjM9tGPf5xjk12bGHIpTQgiMuVJuI3tzIECN0AI7ihIQA5kBxAZHXoCECv7DokdwUwrRQ9JGNmxoKlZqRWW1cdXKV8OpUIRCQkJi2UiNsAg0dncuZudCUdTy/uLDJkpAsT9mKIghfDa17+IJX/mP/0v50c3KFApWZrF7Y9/37y7+bXf/uWW0t2Xbz16uNpPaXnSPl6dDim/92T3zFE4ksKIx4t+HDeLA88X3n/34XPPXQNPHOnNNx6sdzKli0H4wh6iLn7ke//S19/68iZbWj8puRgTh6AO0kRnkcVi1n/ot776x+e7+x/+3hf89rzkRw/WDy6+tjr9yN0nl/vN5vT3v3Lxys2PEu8porpZKdI1KSdCZ0ZAZCBQMjcwNOUGhUrpJA5a3IUR2FEM2hCBp5yVJDiHDN6LINYdNFZkXRBxZzPJCUopbd8gcJlysUKRyFBi2BWPQeqcP4ZQfcRwKGRkQhmnHYoAagytSCyJ1J0jd+31CbGJgUt2BmDcpNGpCQIhULbUz7r9JOpN8SlpLqqxnbsVQ2aJiJrryMORiMp28Gko262PGZO7uRcIHjRpScVcm0hJlRtaHC3HMbOTmXPbmplrYvAyDsQ8pbzfT4vrx3nITJDT1DY9gQMjSoxtJMfq6TZHIdnt102MTd/qUKaEYR43m+Hk+gxTnYImys5F01hC1yNiQXj0+L3bRzeerC85CseYUgJzK0bZuPjjJ49iaCfNYb7Yjb4d0KhHltlsUXxkVKYwDD6btfvNhWQad3mCXNKoax2m6drxDZ342Wvf9v6TN51z09tUyt7SjVs38mrfBLTAOefZfGHmwjCMU9oNZ4uZEh0tAyK+e//+M8/c2F+U19984/qt0+3ji9C2x8sWd/tchsWiz2T9SfPdn/rY5371t/qj2bMvPftLv/qV73p+/3/9H376V3/tG/ArdRxhxTxlpShG4IhmNX9NWgoAOtUJNziAqTtFZh42V8Nw1bTx7PRW17WIebfZBgklmzONZWcIqhak36XRkxs2+2kzrlaEsekjcjAJU8619kpLZqDIpMIhimZ9tLvsu9MlHxNxZGoCb3e79bSL0KIqOc6oZ4TlcrHdbpouZFPLSBgYJApqtiidQSmWg8TgwqqXj5781F/762+s3n/zna8D5imnYRpySiE2XdMPw5YDgybEdHJ0Qk6rzRaJyzABuITAgXIpfBi+qJeiCnlKJCIH1gEhGkkU7iHbndNrt48XrsVLQaRSTEFj20JCdEUFDy0JGzgDg1Mbuu2wJ0JAcTW3wgRukytwEBRBL2ZqasSYy8jYCMVsVisziBCBU1EkA3BkNgRTza7urpbRBAldDzZ9CpDLaHmad+3FavuvP/sfLy7PhTwQEvPjkd7Z3yfmNsqsaU8WXd+Eq82u6zoGCYzL5ZwkdP0CgdO469rWkNQyEiOROqWiXYhjHhoISt7FWHBnou4IgK5mZsjOSMUMSN1BKJplrgZ+KywSmAGAUbIqswABMzWBhKHjsCMuQaZc+rZdbba5mIQA+6RuStaEULKFwD6VTiQgAFgbwpSnojZOuZjKU5RpuphkwzdPbvz4T/7M2e1bu+2WIbgItzMQU7eyG8f9oG4o4llF8OjO9Wt3jsu0n7a7edM+fP9SnbplDHNGQs2TpQxNWF1eMjV3XnhxLKtpSm1o3fRqd3X95Rfj6WwcVqFth8srRbz1wkvekkWE3Rgd8357sb669vzzy2evmxB1oqalJCoI3O7T8PHv/+7m5sICgXtJJQKy87BZq+OtV5/l43YypdhOo2KW8dHlm1/5ahdwfuuoBFo0rakjemgbitEQp2ECajC0NNlmsxXxgskMQhBNWVW62ZEA7nfn8ehaoZK1iDV+7+Frv/Kr6f6DtqE6G3azogUJDQGJ3d3NVSdycyuAiOZkbqDgFc3BbmqqZgpc+bVuXszMqzmlFl2bNkL7i0e//wv/4GP/+7887Wz3uIDCNJXFrR57GscLtAG8GCIoKMLea0AD3AtgVioADRR2RTFwyyUVgE5IVJoQZ4jJQPIOrx492av2R02czfIcXMh2U39y/dpU0gSloA+XG9YSI6IxU3JN2XaX67LdTk+e2DAQQtMFQ+TAxZEbxi54jNM0NMK2242bfdplzYUIiSlEyTmbeVFjQDe04kDobgxOqmilaTlg0f2V7i8sJSiICihsaJhTWg2eSmRSTzaNzkhqNaKFxIYgLGnWe2g5DeTGamm33yUfx0xF611Q+SpAYIckFCA7kB/ILgBqXszACd3BDBCYEA0d2FwdUQhNzU39KQMbCQAVEZwBpfY4kQJoAVMHBFV3rmgWqFpRjUfUhjMARGIGrt1WZqAOBHwgJ4ETGlXoVS3jJgxMBalrl9sLe3IxmoRXv+u73n3/Xs8Te5kerI7mR8/dXL7/+L2j7hQlZMUyZZZm1i4VFHw6PToW5JKym81mvbnkaVx2i1yKm/bdrAoosemLlZymLjQcZNKMwkJhP+5C0095clNlkxDVlYiUGSVyO+PQEgGj1VIqABcWt4Ot4xBaAX9amiSHcynAIU6FWAvkARGhliuZgSECiRBKDZQxs2ky8KKJmbQyWhwcK5YF4dAe5YdIGpgdModupqGJAFUSMWYO2NRvRMRqhsjFMokIBdOpSu6RxQwJMQRgxbNr1zHBe++96WY3Z/PkWyyTtG2ycUjrRTgpVtxSFDQx6Vq2BiyrawGXrhvHHJB8td+881bvYYO5D7FtThwYidFdQsg5MQtLgPqHCER06Iw7rBCryUev55BqGgIAdzg0kxuAIH6gFD31ylS5CcAAmMQt1X9tqpVtDAd/DiISVN52tRUBWa2oAoc6gyMiBDUD8IPdA/gD4PTTHJ/V1BICGkDleFTl1g+5SlArAMhMBycPgppSdes9NZ3gwdik1TCCyHBwVblbXQBOSFUWRAQ1BQDUp0wlNwBQV1BHpFISIiOiU8WTGTFbncmqInGFcDOYOgGxu5mDWaVN2wdQfDjwkBwRS0nEDsjAUDv0arIMgcw9jVmw7jRxSAoCHknhwN5yrTVvVc/iaoszALNyMOwAmFndQiCRmyJyNUP5AY6P4G5PXUKq2eurhIBIxUqV+4gZP4gnMqMaAKhpNZRV1w8gOYCqMoW6YIgwayYKCF6Z8WoFkIhCxSdVzcjNFbXe+PU1UU0AB3th5VWbaeVaIbGr1fWBxOZKpkiM9XoAVA+9w1i9skSOzsI5pUouA0R0IxJzdStI8tQR5gekUn131MDr9/lTUtHl+UZQimVzt1J2W8qlgJbFEeHtGTVkpWSbDDXEmLMJz7/ne7//eB43w2MAeXJ+UQPL07g9PjlTKIt5R6jb3Q6jtF27urhkh1nfb9ZXx9eXwvHBg/fX2wc3bjyvY1nMF08u7wXSady0s+6oDScv3MQuvPvg8f3Ly1s37gSZZ1+V7WZxEs7H9fvbwUtGwmvXT3/7C1+To1de/PRfeuPdBz/xn3zf7vLB+Ph1vDXy8iWANGzPJcRSkjftjWc+UhL6VLiZAcLzz79y7/VHw6bfXSxffflWKtuvv78fLkdLFJt2lfIv/rP/9da1DuP+v/07/9k//uwf/MfP/oeXzk62m23uWhIx9eV8/ge//tnffPbsB370LysGBFKoNcPRTJseP/393/f83ed+/n/5H2O/yu/ZZGshPJq1N64vLh4+zopX7s28a8v4+OHr3/7ixx9dnqtbkH6w/ODRG9cXzfmjHOddaIib7pkXXn3zja+fX13cfvHuo4vHZY6aE4CBJ2ZvusX9y/FLX/n97/7kd76wWOxzuby4InckigTJJmnapGmzH6Ypx2X3xvuXwyphWBQZCXFKZd61QMjEDjil3DaNSEQkQDRTAgJiAkKq7sSDAF2dsWaONU3u4AZMtQ1NwV3B3BQBmYK6MlWOV4FKdDNGByRBcgMTYnAw1xpiKwWQgOMi9gMREMWS94FQ2gCkznR0du3d9771h7/7qx/71A+d3brNiMW06/n4Oz/i4/4//MP/+frtFxW361VOul/Omnvvr9DkRsPNcbhYn+slEj69BwDOTufDNK0vr3JJYTlXJe7iZtrD5ZPd6v7ff/zWK3ee/6kf/5k3Xvu9L37ud/o2so3c4LgrFymVBpbL/mrT7t6+HHu9ogS2u3PUJx2+8MdvPlpv9peB25Ojlnbj5MyxoXEomvL8eD6mKyRBNEJlIkcTYTTvpLVkbexH34B7VnAF9YKWOCKR5NomCc4sTYxqzghFFYXGol6LNd1JwECFidtoWkBAIve9ZB1mfYOegGu4RYDAvZABuQXGiVwiJiSgxplLTgZZmFM786PTo8am80fxRGKcg1psGYYhD5d5uBq352337LDZTMN9Oe0kBgdTbj0uUOrMQwFMpEPlMu3Tfg3kYT4zYiB0aNLs2IikCUVLKEkTABBagaxeVXSnYj6mKbSSzscmMJQIJYDKblq1QuNUKpsvT2W1yxJadtPkRNzPZsMwuetggw6FIvoEbshIfRBuKYR2u9nmZKjStk3A0CqR+K4J0DYtpP2+kOo0GpaQit45vRHjXJqw3/uTq838KO7GjCWLxNgfRem3l5ftPJCExalcP549TOsSgyrtpjSTxrSZx0XauefxavWITFFLO+9gZtnzMCYDD8DTlCZQyhhFSkrCcTlblkkvH+1Yyuyk7c+OR4ojjt1iPkxy+/T2o80GzKZSTtrFu/l94472zbf++D2wlmn2O59/o5tf+4NvvP7sJ/XbvufVfwl/WE1EDk6MRCDCLOQle1aRyIwlF+bDaB7AEUiYueXSSKPzs5PrV9vJPWtJs/nRlLNzMNQxbxfzO8OwU+M+HnmZHPdNgykPaLYdh25ODKjjNOvmpUyg7tlRiYzTkFXz7WvXUtbIFJpm2uyVCEj6LiYoShATmFMBvdo8LqWcascEk2WdaBr3u90w5xaYXJMldyQ0Wj1e/Sc/+bOpmX3jK1/0PIKW/TROJbuDahlL4ighxsCO2AqGNA1G3MSwHXbEAoSWTYRDYCsuoXFgtSxtW8Po+/2+a6OBaLJW6JXbLywCSS0GkMa8EGRlEOaihGaCmNQkNDknJCTGAkXR3BTIiQRqBS1TINFc3DpAAs8ADBVw6+x14GnGgnWDKYBATB4VzDEgGXgBMcIiBGZgeQKmouDkKU19t/ja21//5d/4j5o8CAtHdHUrihhaIiJ13Qzb2HjTLNUMUo4B3Wg9TDEYSAMhkmAqowR0L4CRWLxM7E7AwsEhZFATHctA6AhaDKJ0Sg5u6EAQauokkCSdIvVD3jNFVXUEAjIzZjZXd1XQ0YwCj3lkwQDREIY0hr652K2eu3N3TNk8DdPQMKc0LU+Wfd+ag0iYcmZSQkh5RATTAnwgdi3i0bPHZz/1Mz9zfPuZzebKVGPbY2QMjER5s7KUddJhzJFD3uXFvL9551kKOO6nIO2wpu2FmdF82XAjpgolxdiNg46b3C/m0JoNJUp0h/v3Hxt2N159wYK11KJpytD0fX/tKKOVMgXmMuavf+WNJi5vv/SSd9y0LTKnYR+Jwfxb77x37ebNs2dvQBchCjsCUBe7x++cp2m4+eytHBgDtnHmCNPFVTdbnq/eiCIn1xfxuA/HR/28n3aJAnPfh36e0zjvzrywrjN7schpSADITqUYU98cLUjAXKU9tVkHASVDZ/6N3/rl7VvfPO6blLKhCTgKqRnWMwxwMVI1Cl7coFbAOCA4qDNyAUV0YEB1JDLTQ22OKaA7Ggcq4ORUwIStd/jWr/7ytRfvLl/4SB5YJYe2yYo+DEQaEJowNzWHZJ60uJk7O7mIMAO4kSmYgqoBNGAzMDIDBdqP47jfIkehNs7nkRfdaWc8mOcCyZ1KMTmaPdvcaoa2rKbHzfgA8qgJ8pjUh9Umr1ZYCqchtsFZmsXcA6tjGhMSjFOicfS83+cpTynv1FIgDqqFBazaR6wWHiNQ7ZaiSsalEIygaIJpbSWBJjJ1Q8FogxoZA0JWUwQijI3GiFq0ZHA3EkZ0EWhn3C3c0MYrG6ecclqP+2wItUYaAGrKzJFcsOJzDMWJHYWfHmsOEZbKv+UPcMF1Qo6IbgKkyAfeLyCgc23IRrdaloaQzZxIFZ728TiCMaEgAmIt2q0jdUJwrFADJqJDiTQAISgSIRtaJWaSE6IYlMJMMB/38d3Xz6/dfeGZV1++Wj8GnBj5c5/9wkvPPk+SHz95cnp6erQ8jV0YikYKSLFoGfKwmJ8EpnEaAodGGjcraRtCTGUi9z4ukuf9uAkYm9htd1dM5Fa0jKopYATDWZzlcZzSZtHPJUY13A/mxurUNi2HjgkPlWfE5s4ECIYEVCWzqnohOyBzOGBNqn3BDaH2u6NZ+cDPUh0fVF14bugAYAjOxDknABQmCazq6MaEgA1YqQ4/IhYid0fg6st2BKqHUkImDgimRgbEXCwzc/VK1K20W4GDx4zdGFzNDIGYJKCfXb8+Gj558Na97eqUw1Gk/W7bBO5np1YAqRmL7cZp3nfmyfIQm5i2BbVgiMvlfLXe3Lhz48H99/Xq3nwedu++IddfmJ/eUDNzqz81IkrlWDs5eGUPA30AQD9IQBXUDPUkg1VJOixod3c3/EAr8goIrquP3M1Aa/WZuSMGqMFQrI4kBFVAYhJ3AjeoRhasWoNV4DQwIwAcomFVViJABtcaZ69EG4D6tb3KgohGgKoFSKhKNpUFdBB/jJigGvkcrb7y7kSsbgiMB8sT1AI7EjYzQkISL9mtaNWSgQ4o66oWmbq7HK6n6m1a5bBSrNqOEJmIAQ7ZK3fzUqoJrrq0wNxqTx56FbFMi7kTUwjRS846Mje1+T1PiUjql3GDyCQHYgkI8FE/QwDyip2qzBNCFDPFekBFdLMqqwBA1VmqXxiJvLqlCCsXnAgN0N1KVkQMEiur27yQBPSqujC41nrHUnJlvSMxi4B7RVc5uGlhjpWWXtnQtSqkxhOp6o+azV1RD+g3BHAyUER0A7ca6MOqaoEjgNUGNGJBczVVMHBgEq2pPZSiBdEqGBvr5+lB/rKnsxnTXESCqdV1gmZABevwpq5sFtXyAdO+JkyJyfx/yypSdSBIyfe70YunjIgoHPZsKalq9mSesiefL2bC4c7Nl1rBXd7PZnMCGfbrbrGUhhGMwGfLRcCYxgGBCW1Kk4gwOoXAba/E437YDLvnX/nwfpugjee7qyDhiBfXZkdaNi3Zar3dXwz7zY6ox4yP3nt/OW9u3r5278HDh6u0Hv320WwR+c7RUe+LxC89eO8yjKu2TF//0hf+8He/eOvR3ZsvfeYTH/vEsx/6TshDzThx10sQtfnZsx8p09hEPX/09gvf+8Mv/Zn/Y9i/fu/1X//O7//Mr//7Xyr5aL1fhQh7lBb9X//j/8+P/7j9xT/3X1k6e/33/16HumMj9v0wIfKNo+M/+v0vXe35u3/0Z++edaCOBORCREpaLB+dzf/8n/vpf/RPfv6oP0lMDx482vPw4RfunhG8+3hzfrFmIoEyTPbavbcKQ+zaRw+HF1944dG7X99dXbz80p23zlfXTq/ZQPniMWwulfGNb+1v3Pqoa5unbWQ5WR5vx01hG6fyHR/53mvL03vv3j8+mzGBNEzBx2nKSZOOScswjUA8qe6HSYLs0/7m7RuWssRegdJYJARARgkoEUIHyKYKVIlfRMjFCpIwCyDVZK05EDLSUwiqoyHU/swabofDPQtoiMR2QKwhujugMBctAMZY+wPQTGs3BjCYA4l0cJoDqJaIRRBK9jwOaiBBHPHLX/yd1W7/3X/2x1puFqcn5GVcbbWRV77z1YvtPWrw+MbRu+/ei/O2i1xYz1Xvzk5wvUpJYxeX8fBMnKYkBbsQPWMx1Sk/erAKQjdOqJ1BaOXq/N033/691/74c2XcusTIIASFfa/GATgqiYTFtUdbWFuaLZd+Y/HHv3Pv/tiUNjIvNPl63LlhzpkC0ySB2WwKMRZVJAc2ZGNwLFbTFgBQdN82gKDClIYcOXRtk2ECcKkFCAZ1qMdBAAHdEGCcUteFcdJkU2hZTQlAzbp2NpYxBF52YX215VlbJxiMDNVtWklSpoCYAY3YAJwQgJrYNBJ2u530bXHA2HRnx1MeC6KYldWq9Zz2g4Q2D6ldypC3C5nbbkAEkzSVIkLoGZmQnEhKcTZ0MAgo2FbDEYGZptnptd12Nw7DLLTjMI0TdTF2iyOgQTc7CXG3HYftpqSRDFANUczSUHY3u/nl6lE3m0noUtZZ1+0yuu6y6TgWQbGi5Eaqs1mTNIcAMTYelKR0QmkzJrU8ToRBgKWV5JgH3KapWYaT45OWLEaMJPs0xBD6EDY7ff7mh6f91Wq1dZZu0YYGZ9ic9icX6yuDvM9XsSNNYwHrZ+Hd1+/vp3J63E1mz7z4Ur68CKG9/+C9tuupF4ibWUdkYcw57abFotOi22FgapsoKALo6+n8bLZM+/xkdZ4HXK22XqbHF6vltdnu0doGHcd8+1o/7ldpXC94EUwXIFrytZsnr37o5W987fWCdO1WC/3y/sXmxrPPxSn+0Ze+Ue+Cvmvc1QGAWdEdDSSEhk3dHO0Q8TdkrlA8TcUmZWoW87gddkwQOGyHyTkiIJAXz313NOYpBBqHbRciBznfrENgiYEctuOeUiuRIHsBVCZQBYBxmhRKt+guLwfP4BnI0aHEENC9lDJOpZ31y8VsvNyNu4FamHQa9+M8YNtKH0Ij3bTed1E4BAdseimpkKd7V09e/c4fnJbHv/cHn0ctSu6oDs4sjgKuMQgCYtGOhTnst5sy5dB0qagwO7iWzEwcZLfZdLEF5DymlMe269EJ3Pq2EWFTim28e3L95ulSpwHdHVlVEaltumRKHCMgluKKsYGua7ymBBDdjJmJQ9ZJ0EJsplSEpXgyUtBYC2uF2lJGA+TYTFMmADIzMybM0yDSwiEZoqaJMCBSKSVwY+CKEmJULARYcjKg++uHv/Rrv0rOLARu9qfK491NzQnRyTfDDhHns76YsVsbO2Qxh+3qkpcQoiCYUFMUDMw1ORYSSp6Jxc20QAhRUwqxRwAgBXQg08PGVxuJ7pbrkAGcqKlxZ7MJEbONjC0xORoTu5WUcpKyH4dZ23d9//jxubtfWxxvNleBqSCCerbSz3oCJ6K2ab0AMsam2eyuhISF1D221bgO3/HRj3/mM5/hrrna7rQAhXbMuQ8B1EseA1MhHsYxhOhqXT87uXnNQYerlWC6fffFP/6dt7RAt2yJgczKNBGhZ3vy9uP57Oj01o1RxzTmrg0pDVnLcx/7eLuI+2EHDrvNwE137ewZbps8DZgyqz989GR9tfv0j30/zXsIDsgORo6abP3wXCf+8A9+mrqmmAm4FQ3A6XJXJnjmxVc36YqlkaYxtXE/9NwusPl3f/iFWzeuwzxY13DTm7MjgRMUT5tRFYRhfHyhV2vK0CxPusUSAVLOoQshBAqN2VQAjWkc9nGKy23+2r/9t7tvfmvRxNrbYgAZD4MfAjIgKEYIJKxshs5AakZutQINzcGe2hgBkNmLmqkDlhqRICJTcjQ3FC7gyDrX6XP/0//743/uZ7v5WXxuObs95342lcyukKe837sV52yeVM1A2CRgpwZjGRzRi1rxEKJDKa7omqYCITSLPp50BZvYzLCYJqu7dhoS6siho0XXtrPpan0U5SXKsH99BFu57Xb7ZA7TwGrCxItZ6GbOBE071VSVadpucUrD5Y4DMpgmQycRUrMgBAyqYIAxMOaixbHCnQAQ0VTdmQlKSWWf0UpETeMei3AwRnAtCOyTqQKEFpjQHEsSUwOKIRDlPYJ1c2j7st1DNipuxUpRMGCGQ3oD4NCz5IAIRCjCGIzJQdycclF3Awc0qOJNJazQIclQybpY2Sd+SCUBuhH+iQXDFMytOKK5q7sBIQtCABQmRFc3eOqL4BpKq8dE13qOJhdAdDCpefy6WwVEZgITQkIsW92s1rPrp888f+fqyTuX9x90oXgXvudTr7zwwnObISFjF1sDGhMQtzG0OSdGOp0dmWmZxlZiG2e77WBlbLsWMAQvkUO2nG1qYtNKt9/tLWk/m01pp6V0IUxDQkeWmIq1cTllT2bEgThEiShtCBHJAgkhE0F9h71CpKt3hFBN5VDYRAcu88GTAUiCT00xVMc7DkxB3cAVQZnZtAD9if1fJAKiuhNiYDG0DwqPiIhQ3I0qVfeQUsFqeEFzLVqBbgbVtQsxxlq0RcQOWg/hbowOZo6OQlUlBKiUcfBnnjkjz48fpn0pM86oW8bCEmII+91u1i6Tg+vYhIBgCNzEBl2324FiYMKr7WU/69fnF+7IN9rNqK0Emp8iMoIi1Qo8FpFMhK5g7qbgBVzBtS7Pyio6BNEO3FQEQzc9QHnc4CnQ1/FgJ0F38wzE9jRJQSQIXDRx7SijmnoDqEG1milDEJKspdZAYzW6OFmNhj2VhIhYTaGqGAdpCOpRqH5NM63YYSS2+pPUWaipuTHJQXkA8FJqZRiAOaBBlW3JLNdYGxyUCANwNSeo7SRkVohq0NsP3xEcScCshkG8qmaGfojGmQMisarVNJ55IecDM/wAtHZE8WoOrDzomiJ9+jdy1lLyfjrv47XqQaZApmaqwp5VgTDnXLSIMFs+OpqlkitCH8zQARi94sYRD4Qmqzj8ev2IgGZaOU1qyswHqFllQiEcYOEf9KmBQaVvYkVNl2r2IkRiwQOZt65qsKoGPk1sEQdANs0V7oYVEOYOh2/CdbG5GQuXkrG2AFQSGzBWMBY4izw9UHudY1VUFhxoUIdaOjU1V/cKwKaqHiIxVEi9O9caExL/YBUhObqaEgkT56KIFVzgyARArioiqmBuH6DfngqHyfbbcdjkNFgasUzoSpY9j8URmhbFc4PYhMW4zTdP737kE99NwpoSWFHT2PUh8H6/diiBEMa8v3qUcibpTufHDNTFpu9aidEdhzEHisuwlNJyphaaPsyP5vN7915br+7fOJrdunazbWf7USnOd5PfPz9nnKLo+ery0cWqTHrcNcuuayXmEZ/s/Yf/4k+fLvzlZ07f/NLn/8nP/3xOw5e/9c7/9M/+ebEtN8Gy6+QAAkwOHpvQNiTkT96+uHX2Yn+y+PQP/ajBsa5buTofL987Xc4+/u2f3Ix0ubP3H9kOm3/zr//nGB//9E98T3926/T29dOTpRPtd2na53E33b+//uJv/8b/8N//n5uudyNmxkNJX+Ms7fHiU3/2z3z4I58K7WKccgYnaS+u1q9+/OWz69ePuqVkcqV33t8+ery5dnK0t83j9fl+++DWc9dv336WTDf7YXP+/+Pqz4N1XdOzPuyenucdvmHNa0/n7DMP3S21RksIEJKwAhIgGRzFELugkkJxqFSqXOGfVNlOxSSpOFUQVyVlXAQTgkkw2CZYHpAQMo1AA3Sr1epWz6dPn2nvs4e11/gN7/Q8933nj+fbrap0nT+6zrD2+ob3fZ/7uq/rd12NN4/Oz9/5wz/2B0LoqxZvurNhvFAbncVQVHncTE3gF28dZTepBHRg4ZRSv+2JBEi6lDZdH2ZVbOP1zTpWYfQMwWbzeaiq0NYugaqIMcamlaaF2CQHB+JQc6iQQvlqCgXeUf53VrWiQ6rtyOs775pbqadhDsRihcJvBgAIwERl6YGIBRdW2v/cFdxLbWWxclYxknCo2jg7iO2MYqXuEgMhCxEhSlVDgA/e+eJn//E/ePy1f3Hz3uexexiq6fT+vfP1ILPZ7XuHCSZ2bTEeLGK7Hy+26289uFhtPRtJkK7rnsvnuQ54sD97dHY9jAzuk4/zZTOOY6ejSh7cfvNf/tpqGEVqCNEkMMCi4f0Zi47dxUZ7C6G6uBkvLsZnD9fvfvPSwt44hirEVmwxj3XlIQb0sv0gYd5ZBMFDFSmElCcgH1NH7MAETNktBnFXRBrHhGYhSKnKZkZUExamYO7EjAjCLMJpmiJSIEhZGVF41zqsSQnJDRqWPI6uQBKEiRjdU3kiSGmDBKpiJEIW8SmjZvIJITFaJKpCg1BV8/mYNswSqpk7hqrlagEGjJjTIITCkiey5CHU5hhjm6YRFfM4gBuQAXcprWMMk1fDZsCsaKo5h1xJztvrG3ZApKRqHJfH9xZ7BykZms9nVXdzKe7qNuXJQ1BwaaTrL2y8EUjTuEU04Gw61jAd7Ie7rxxa1HqPF8ft4qhNmoUDA1maNNrh/UV7WCtIPyhmmy9Zw8bDGFvMPqohmC2Evd+Oq36zXldc9Rsfu+nl05dWl48956raj3V9sLeIRss6Xk+bOJ9XbaXWT7BNvk7D5fb6zMYUkwzXg3UCOr+62Zw9Od9bHEo9Ozy+ZeKxkU77MffdOukQ3HkxW0RkSlaZYNba9gT2VSmNU93EqmlJJG01rVO/6gz9cH/ZTdtVt1H305M719v12fo8zOn0YHtx/Y1Em1d+cHF4dHVnv6tlffb06Ve/8PHN2Rae/89Uc1ZE1Mk8G1ouTRzlHE9IoO7qhXIT2wVALaFNo4IiJJ+GKUh0Y/SQkwWXikJAJQDImVI3Tpv5co+qFnmPF0e4t+8BgCH7NE5bQDIkCGTsUleje6ecJtEEjNWUNNaRGiaCZTOjKeeuRzWU3LaymNdNXW0H63tVlSHlyLEJNeTBxl5HJ/abYf3yd//IKz/8o7/11d/RNIL6ZrVdd4OnZHniQrkFSHmsQ2AIg6qRoFSG0I9bc2OWIHVWGEYLMtMEOeUgXNc1M4FlHRN40MnE7a179+4e7THnWFE7m1dVxcxt28YqBhEGrevKIZOgVFUGquu5OUSJLKFczkSU05RSJiwQniBSCQcEcXVzLWUfQ+5C5BBi9iSC5i7SKGC2yQHAVIAYDQACLxlnZOA6GjlHTmaE802n/80//kUBZCRXzyUbL+UAA+6mquWAOyls+nEcpqkbPedp6KY0uTAHmaYtkZqlKZc2YQQnVyYMCBK8RROyELxmiGgELo6QdQKwGJiFgQJSNCNEiaFRmwgdPSEogRAE4QpxB5NwRABBZJRQVQ24pzTOZo0hqloaU9/17awSQlc3x/29fVc2Y1UHwKRaVW3TzsC8jvE57xLefOu7XNr1NuXR22quarEKzAHNPCtlX930OZMm7Ndjc2t/7/UXZVFtnl3AKj977+OxW4eaD24fAALmVAkJ8Wp9nVO/d7LPDWmaqroRlCcPn+3t3T26fTyO2xjInbpVv9g/bI8OIAbLuQJZX19//ku/fffNN+Z3j6CRUNWxjuRO7pH93Xe//eIbbzaHSwOITY0ODAY+PXn64fJ0nli5ivWsDk3tWTHZst578NWvNcyHt0/q/WW1dyDNDFDqvb3Qtmawvd6Ol5vVOw/hZmjb/Wbv0DmYpWEaUKIs5okwgc9OTuqDBbezCIt62371v/4fLr/0zRbqnH1SFydxImQxwOSCQhDQyYGMKKdMQEzknrKrIigiEggRALIwx4ixdQ6I6AAs0ZndjREqFkYIQQxBUSRw6/CNf/pLx0fcHs4myzBsZLPV63PbXg2r8zSs0tQ5GFIOkgG203DZjzclCOfOElpDnoA0YG6pOT1tD+61iwNuGqogaZcgoSixumsd5WDW7tUzUUlbHUbBsXpxfvKJw3uvEN/yvLQs2RglVnXdzvaODqSJ1ayNQRic0sRjh9s1bLacQXsft4lUhIhLKY6mnDMzF6zvrtzLQZirOsQgxELEKU8COWJi630aUZmxgoRpk6wPaYuQOYbaSTV1nLbiyd2YCVCNkes5ccNjDpsuX62mm27qemB3NhTk8B02KqiiZlQl9VJjgsDk7oAlzm2lzRlsRwn25yYCgCJ+FGuPO1EGUyoWrrIdN8xK2XECGsl7hBFZiRUYqCIOiIG4vGByRBc1STkkZXUxBCczysQTcarYArr4VCFFqgjFQY0VqY7VwflFHqf61kv3rzePVusny0U4XLTzBl+4d9xNG2mo2WuqeR1DhJxEIY292kjiRM5Bl7OGyK76M+c+thUQZO9iVSnapAMhNu3iqlsl0FjFrKlwRsBdGCQC4YRopkokCrW6KNUQKq4qICY0ISfSAqlFJuHIxWgE7kDCgViYhIjA7XnmzEvyDJCYeGflUt2Bxt0KgchcmZEZmPk7PfFEBGaacpn2wcE0FXHCd+tdJeKdCweBiExTCcIJMzMTCjz/g4qLxUq80Qy0ALmlDOClUxzQDQyFq6oSpNPbxycvvLK29nyQuDg0jJtNf7PZoOMwTtPUa86qisjbfj3B4ARVqGDSo/le0swx7u3P8upqePoBrs6uH7ybx00JnImQMAvK89nY1Yuqg+6A8B2XVlHHdgkvAEf3Xarev2M8es4qghJfK6koJBBGdle1bJYBlQih9NWVxB4KYmEX7frjs5VpvIQoiUnAgCkAgKuZ5vI+F6WwfILg6rv2K0cks4KFEHBHJ0YpEapCX2IUd3QtiBwovycC7hQi9dKwVsSC8kqpJAnNmdhBd2I9kKtrmeVs9+aUskIwK14kNysxq/K5A8BOUdmBxAjd3QyfayvoCG6ueQf1sdE8F5sPkjhSNpc4Xy5ejnHmCprNsnnOLOxI2SA2c0ACR3aYxulk/9iy71rqkJHZfUf1KZsmcCRidN4xxR0KCx+Le4YEsKRiDQofNzta0c2KikVCgZByzm62E+12Co3v3t7CcrLnd78dqBpx95OLj624np5HuwBds5sxkWUFB81W4qKF3wQ7jyYUDqbbzv+FXpQ6ICiUJUbkrKq6I16zCCIJR3AzTQRe8kZuSru2s6IoFZY2MLFwcEPLWpjlOyIMyfPvPvmupsC/cxXspKJhSK6Wc9krOJGbqVrmIClrTimEEMIMPSxnL7zy8g/GWANOEnjT9WcXF5uh59gw1wKhjovtMB4dngJUefJ502rOk44h8ur60kCPTk8ocm+6XfWbq359PWj2fpgc4N7pKSt0N103ThqjIdaB2xiPDpZUh3W3JYRQx/mirQgapv35yf23/8CybSEP51/98jd+64sH9Txo0tXqvfc+d0VPmcdsA9VSlIuczIAdGbDe3lwuT/Ytb7F/+oVf+vv9yr72zuXhC5+a7x9fdtTpbHUp508nHao80S/+/f+0qbs/+6f/Z33PVEPTcGxDaKKaGqSjObfrp//p3/jLqciz7upmSQOxqefkd++/lg1mTTOfxXlVXV+PH77/dH9+cDSf+ZBBnVi65GdXl+PQ9Tq99+ixgxwc3D05ePGwCtv+Jszb+dHtr73/MITZ9Xb98bNvO13t7wXwkWsOQmJyMK+vr979G3/zrzz86MPVzfrpowtN6oamfH21utluppzcdLW56brO0NqD2d03XuHl3KrGJKAEDpXEyomZmBCFSEREGBCAzMqLg1LqV6hggFhKeJiJpPSZuQVmYQFAAGIRLOZTEmQp8HyH0rBggJitrA5VTQNX5WkkLLS7ZyIBEkiM88Xe7fn+EYRgnhWUKbpxsQ0Swtd+6zd++7O//Duf/YV/9Hf/r2cffWW1Wf/s//wvXMvycr1tWtk7PVbwoVcdpkjtR49vbrI8XQ8PnnXY7JWrYLZoKfhqvVruLXpN0sS9+VJdQBqj9mI9GqkJQDMfzDD6pOlmvQ4NLZcYa5vykHG8uLkcRlX11Xr8+OFm0+U2VJvz4ebJto2MocjkPI46JeMgyOQGaE4mgpENAzE6gXmUSgKzlK7NPOUxgZnQoJO7IZArZFNHYKbCmCyRZXMcp9y0TdvUptmzg2NZnpADIxJhiKFu6m6amAt7EoSIEYWlqNOWsyBbMgYUYjIOWKkaElqesmkChBCmaUCDYKSDmtT16YuLF9+Qg/04a27Wl9UsLpazYVoTiROaT8RgrkgBqNIMOk11PQ/1gtqjbjJAAQ8oDVPQYbu+vrEJLJPM2usJc9yPx7dPX3xxmowZm6ruumE7jNK0I+Sr7fmsiaabVmIetuRkql03pmm6ddDOGvSYT1+b77+8XEE24VhXoY6xbs1x1KnTsUsjId86udd3vRrcf2sfwmY+p3avmQBuNunJ41XTzqtm5gPFiZcQDsPewayFICIV8aKqTtertOlGcJcps3rf3dg0oerBYo6e26Y5PDoOtbTLpk/p/Y/eP73z8u2XXsQYVsMmkTnCNA7gqY5YVxV7qOpgnsC86zaQh2FzHmw6P/94GnoyX622KhTqKrSVMU8Mg9q6z2HWZrL95fzh0wez+dJDdXj/5LXvOuk3l7MjXuerbz78+PPvv/fS997OhwdnqxybplwFKSkxqxqwYJRQB0ccp5xU1Z2YvJACBR12zkIDJUauYtMuMtGUswM4icRlFVsEQMx9f1EJLuLeNE11Re65rUQYV10/qmf3ZKmtYkBg5KxGGMvmZ0wZuM5SUQyO5oBAOAwdE9QsizrMYmwWbdVWlhU2YwV5EWqhesw+THm1Xk9TJ+A4JfFxc/nkhRdff+v3/+Bv/vZnQDeqW8t9MCejMRVMU0LLFcK8agKFbZ6ux17RDGxMo8RAiHkyzWnK06Qp5bzddiQEgtlgzCpRJFaRQ7Tw5q0X7+4f1yEwkkhAYSeQGInrGFuhUIe6CXVdt1KFULVBQulANdDJJmAb05aZJAQ1Jw7Fg1xqXN2NJaoaOgVsGVqGMOU1cSF4uAEKl5JgFqncVXV0TGWjzxiYCFDzMBE0IS7/wa/8Yr/JQGygAF5OJO5IgFkVylaLCAgBGVmymuHOta855Wky1ylNfd8VHICZCgs6MwZX1GSqhq6IST0heXadbERGZmIMOU2OGQmSKgCrKhqAuxAxs5o6wuSjoqIQAGpSz0rA06RuxoCjpm035pTrKobISEISYqxMHRw0pZQmLk+UKsSmKsetnBWBbErPV2hw96XXDCKBiMR+SBLi/sFhHaqpHwJiSrbpklFUx+XpYu/O/ham621/eu/lWXvwwfsfZs/z03Y9dkYATJp1fX0zTv2tV+9Vh7Hv18N6FKk2Q9/O5i+89kr2lEyT2uh5cXIcF4viGm/bSjfrL3zuXzSHx7c++WaPqRSJmppndfezi6fz0+XRG3cndGdyMxsnRhnGtcxD3KvHNDpSSjp1Qxq1gqY/P/vML/y9/flsymNKaUqKQaAKKgx1TGTLo/3bt+/vHb/QHJ+E5YxmdZjPwmLeHh42h3vVclkf7FWHiy5PYzaCWp/1X/2FX3z65S/WMRbToSZVhZSy5QRTZkvgnfrWaLC8cetqcsxTTgMhA8vOg1KiRPqc30lOvJtFgbgUPDhg8cCigVAFzu4kQqzrX/9v/y5up7MHV2fvna0+/NjHIeUUmparxqk2q8DCNPQ5Tck17VqYFcnUUrbUzNtm2dZNJNDcb9aXa914tNBIxeAp9TqNeZt5bHBbX3y0Pn94dnV56bGqTo+bg5cOwusn6dbJIIs0ztvQ1GGxqGdNFAZ3nfopDwknlZS1G21Iacp9n1MyVyYXAiYEQss6OWCaUhlSTF2I6iB1FCEURkZKSUkBJ8Uh4+BTpzl5ylNKnVl2MCPlyBwQwAMKATuGWM2RKYNbaLFe1DFWXcfbLU4JkaCZyfFBvHM6e/HW4Uunu52BuxtowjyhDpCSJ3NDdAHFciB0f544My9NSbuR3B3BnMqBzwpPGQ1cEZN5VsgZ0ghphDxiGsEyuiJoGcbcqdTelfQQICAZsBIpokKZJoGM2JwyUAI0QShsWjUXJgabxUqg+vDhNcj+y6991zSsL548DNC9eH8xP4wvf+q10QkgurGrTJNOmueLvaoJ7aJZ7i0Wy3kGJQ6TWjd2NddVtTDwrKOrTSkPeaLAbduuVjc5Z45CwtM0TuNEHCdDI1ak9TBkRKqqLKzERix1FasYhUMgEUZmJCyeXSQiEiRB4tKuxRSQGHdOHS0EZyListMAd9fy/4pvxcCJOUgUFiIhjlnVC1fK1dGJiFmICRDKMFl0KDcrkomBA6ERAWJ2UzdmUXMgtNKFylg3M+QAyOVXYUYiImYFV/MMCsSOqKDEjMRmikAcqrpq2qo+PTo9Pn39Js+erPOE6AGkChAJanc0AEzqo6eT24fGI0WVCvs03FxfLNqDIUMMVUSE9dXcrrh78uzjbw6aEpEBSAwcglDxXxVaDbgWK4aCG7pT2XJD+e7i8792RX+uuSytdw+DokS4F4+Pg2afiqgH6O7qsCNaizReutvd1E2tlF0BEDo4EzMyAGbX5w3qsAMYmZaJHRF3GgCAuwEUEcqfCxLkgOrZXanElH7viiuUpCLjFoxXcbPsqENmWpb0O/3EvWCJSloQ3ExzAWiXpf9OhwQwze6280LtfvKuXZ2IVc3c3Yr8qwX1VExJvuMklbEEC2cLKRQHoZllzWrZUN11GkczLbWIEqKpgVsax6urm7qZjym5g5oNvj29d9hPqZSS7cQhdGIsLjAuNSvFzbQ7q+BzEcl3fx+hIJawQKIJSxcTMgPslOxyJTrtqExIWDofHdxMzbyUypV8PxI5oKq7OjO7Z2ZSMy28/VL2xLQTjkrqENDACk57RztysCLAIQGVkC0yS3mnkdDRS+geiQCptM6pJiqcETDmgnQv91pAQQdLNu2+MATMaKBqKWsu/FEnJOKSX0HmIpkSS9Ly7d0x0H5PKnKHrDBO6uDEQOIc0SMr4jTZtptGRzfyQX/k9//U6a37/Xa9ur6c0ohElo2DXK2uJcpi3kLUeq+FmkYdhrTZdj2C69SlpDnbqAy46BRPX3ghzps4Y2oxzpvLy2ebvp9L3KuqOsg8Nr/v038YMsxFqkpGsyHrsB2Gbox1IALNwyuv3Y0HR7/vx38u5uH0xePRLj/72c8/S9svf/Do6mx7IvTBV37n87/y36X1Y9/hEJGEkJzMIQ2/8y/+odlYxUoQ61jH2WKt81c+/RPc3Pmh7/2jr91/c6+Zz+rZctkihne//fV/9o/+v7dfvPcn/+zPV7df3mSYtbOhH7vNdhzWm/F66Mbf/JVf+o/+z/++DpMrMglVhAwSGCItTk4urp8N256Sr9ab7Tg+fPIIpb//+sneUU2RY4h9touL1WYYN+M0jcPF5c2Qte/9sF2cHC7HqW+Q+m3/8bP15doPDk4vry63yUyn1c1ah0lcv/alf77pL/7U//jPx3j3+rqroxws26ery3cfnV1vU93sh2q27TcZlBqpl+3scC+0LUpErtQJgEOsKAQAEBIiqmK1000dDJi5QmIkQSBEYgq79khwBGfa2REI0Xbis5uZ5gzlRuM74hoiAyAT70juzIiUdDLLWFhHYGVZAoQsQTiKVISR40E9eyVUp4BN3exJFUVCXQkwSFXPT4/ffXT2t//eP/1rf/1XvvCrX6uyJYUf+oP/+jvvXwZesBOasVGVuY1CUpssRmpuuoS041PceuHgxU+8kDk3katISBiYc0r9kNqT0xdev+dwc/9ubauLijSyLuYxzEEW0oENQMujuHcAh8umcd5v94AaxFswHk0pTLnOWg0jpGxQcGUIKU9AGGIAsIBEjgGNWczA3CcHECVyYWQCQgOyZIMEAXIvHm70ENjAiIkDuyuAEiMgjJo40GwRyd2GjOhUiZI7OzIWlN28CV13A0gZUd12nsxyywKnKEYmgQBJquhgVBYYiOCZBTAgSRurperQD+s4Ew1x5GZ++zULB5qhaRsWxtxTPwSkKI1jxVgRR5IGuGZ3yp2mHsgJch5XAK5q2MTZUZwQnUBCRGSRkIc8ZZgfHoXF8eHt2/VsSdKAIRqaeRBGs7aiPimFOG4TgqSckSJxfbw8nLNA0oi83F/u3zmURWMiY5eGIQFQXTddUjPDKd2cPWpjc3m2/fhJl6W+uFh3fR/bapho3ccHj/unV9385KVE8+PbL775ybcnnao6gMH68oxFQ9to3IvNncXsYBq3kadZo2O3udzcoEisF8+67ciwvPXa3vFBdNpu+cPL7tb9T75455O5A9tM12dXw6afun69Wl2vL5o6hgqBUtNWIH68fwDg4zDVs3k1YxKjxhe3Fl7hqtuY2dQ5mqzWlycHi3ldaUpC4opnZ+fffPfJrVuHSuOimX74x+9//x++a3EC8VWXrlf97pEQAiAykSBUQoTmZkGClGewWTm0EJKrujmZTeMNIBDHMQ8OY6iCuwbSIV1w9KYSVz/evz2ljUFvviG0gJhGm88XTVsfLA8DtabITDEIaErDCtJYS0C1lkME3XaXi9lRXbVMPI7ZjPtBV91AVXRCzzgLbYjVfDGrQnvn9AUfh2XTEkLThqapYtMuljN3/cQn/9AP/dE//U8+/5vdsFadxmHsx8nU06hgQFIzVw7EzFXVDHmT0CsJkDSllFSzgQKTMEE+PV68cOdAMDeLJmU1A02KZtNknpRS+tSrb7x271UEFOJZsxCJDDxv21hVZX1UxUpEiCGI1LEK5fYIVlUcqpjAQ5wRhRAke3IwoEDcGEg2JBYkAhIWMdTReoUE4IRCXDtiWVigU9ZSNSnJXVhMEyNatmzZIZsDuDjhf/bf/s00rpjc8u/V3qqa71i1gOjPSSQAbqqWDJxo1JRAJ9Mpje6oVrzNDGTg6lndQd0AGQveIRrHYscWYkZkRwwhZBtL/YsjZc1lNwy7rD+UUhskdlNzVHU3JQYmymlStZyzegZwEuLApXq9amoSGVNSwsk8gas7RiYpeM3Ct8Ts6IixqWhnmIU7d2713TiM0G1GVqpkDkrD0GnWrp+ePlt7FpwIze7dP61bnlU6E9ib733w/geC1f7BYQyBBVjIiZDk6uxq0RztnRzfjBtEbKp2HKZn589OXrpf7cVkU11HV/NJZ4d79V6NkRA99f1H7777+P0nn/ie71seL0MdQlNJFU0zZMi9v/Ol917/xHfNj/ekrURCHgYxGq+G1aObk/17hbxRz5oQ49hNPgGm9Juf+czy4OTZeff+h08BAgHllHLOaRyD0OHe4Z5h9+jh2K/79dYm0N6nTUKTtl0SGAFQ4MkmIKSBh4/O3vmv/97Tf/GP92twG8e8Jc+VoMGkME3qQ+BVE99J/UcBrvbDtq1Gr6cRiNidgQLCjlyjloCFJICBTcmzCjMRohXWJwMQOIMDUSyDQgi1M2Ukaevp2fVn/tp/vDdsBXQ+j1Xls3nNZJRGHiddd9PNZKO4ihMjCYVIIRAzB65CTcY0GfQgHmM1a+rDGR+GHNGMSZuAtSxmdGf1GB98q+vPseaTZnkQFhVHdI7HB6+/dPDK/cULbx0dvtrmo+AEnt0mh2nMaRjHbT/cbMZVl7o0JlMnQyQKAJymrCkn1WyOFEueaJzymIwpMJVNHDIhuscQmqZpY6icoFcYGVJICTOjViHX3Af1RibxwfuaoYLkqUPDxGEMM5sfj9UCWKJm71Zpc60502w5f+nV0+//kRf/4P/o1qd/38s/8CPw3GjtyUABMuUEKdNoMJqlEulxcwTnnVEBn2OQDcgBHdlAABiAGAQUXUkzqHnp481Ao8FkPBgnpWSoDo6shBZEhUwQBEHImZ2gzEEMWOi8RAxIioJUZQzKDIGAgxFl0lEN+STpS48eVtV4//7d7378+L1nzz44PWjuHi506E5OjsHY1Jt61oamlbqtQ9vUIuyQwaHmekoqIVq2btstwn4jeymNUx5qWdZhZmAhxKpun1w+S9N00O6ZavbM4E1sCDlnH0dLgyGEGGMy7ceRSYRiJTEyV0JCXkRvQosiTBQoQMGRcGAWBHQwByUkYSmv2l0dy7wOAM7EWNQfJDBzy6bJNLm5pTxNGVF2XSXMxOhuxSdRFpDuGiUQ8s7URAJI5lb6zokQ0JNmA92RTBAcCl8CfNd4hSQIaGU2Byz8bTdX88k9O7qECmknY1WxisS379xent6/HKvLMWBYJMV+HMioqWJTtZrgerO53Hbt/smkmvLQzALgBDqo9ttx08xCFXVz9iFvHsj1R5tHH4FlZARmZsFCVHAAQ/+OhcTBixvEtFTd245jvXsxCFgEBf89qDWUZDqWBxaJuxMCAZGXYHcGU3c1TaoTIqecEci9jDbwnBWlZpNbdkuIjCCmhkULKJEnU9/1ltFz/jQDuFkqnj13U8vPZQUr3Ovn8pe5wc4wVdQI3UXsAJ57yZCK4QXcLWcsYry7arayKTF3Q1UvA1rKqdQeAuz0WgTaeW8APKvn7Gayc5/tYNoEhE6IAoae3c0RkLycIGAnMBkSsENxKIo4ar+BcTN2q2m7tnEcth0YWPJxsCdPrquqAjQznVLucl7MZ2RWDDtmVmZMMwMHVc+6czSUzxl9Z9UqnYAI6Kau6mZMvPvOkzh8R0wq3i5gJAJiRC96+a7qrLyiHVGeSCznneRYcL0ARZTxMsYSmFphWpmV3gbGYg4CdFXfRQ75uZ0MEbFMxA5gKT+XKgEcvcQODdwcd4VzwBKKxVOfvyh3K7pnmcALN4wRwZJbLt6xnQsJqGwDdl/6rMyBkKw4ulncDJ77rHesIjNDgxAkZ3VwdGDCWLG7MpArslpwe/0Tf+j04NY4rhXSfFmDy2p1vdhbxGo+5bFpZotYrbcbZjeDatY0VdNNg5ufnJxqAhY5ms2vzs7GoasPl9erS6fh8BDTcL69/vj4YLl3uAxJn91cNsIffPA7OW2p4sltu+2zKxC1bdVEaZu4h/B93/XGP/n1b7z7lS9///e8ShD/+W98bnl4dyu5nuHM5faxrr7ya//Vf/Jf/h/+6t++dfS2qgEDGBhqEIbzTTJuT+5OPVw8WKVNfv3t+2+8eP/m8urhFx/8se/7I/jGD599+E6oqmGdEvnxiX/zi//4E9/73e3s9E/+9L/5dx7/dZWnkDh1ljWdPcuahQSffvit//tf+Ut/9n/975/uLxHAMJsqIc2rMYoJZcj9pu9fffM0bTun8fR4bpv542e2uhlms5AnmFQDOkt1cbO5d/paE5PnhYbQjd2zy7OPrzYvHN05W62rEI8Pjs/OLm4d7It4Bscg9+69fbB/kOvFItL9e6997Xff/fDRt2a37tVtCPOIlDebayPFtt072p8fHDhWJK16iLHmEMgRmYkCESM4IQMwi5S7K+1U652F07wQbATAzVwB1TNA2RsBFeNbwaABAqBqJmIzNZ8ImYl2ZBBTInFSB0VAZCEtcn9p/ClotXIjkskQYjXfvzsG69ZXDBZjmAZ0hGyuUGULTu3ieHb+6Gx/AWc3m8N7b/7xn/13v/Sbv3L7Tv3xgw8qoMXhfFCvF7PV0HnWWaw/fnhRroL19RVwU81qG1IaMoclCqXhgqkeNo/TNtcaq8HePK2PlnD39cMvfe1xonBzcxOhXV+N7Z3Z3n67eTrkrad6yKR18u997ejzH30Za1gsjumcNFEeJxHO0wim5Kg+cRDL2UENjZgKDq2uApipKXEAQCZhJDZEImYck6En8ud8QXAAIwqQzVFBnSCYK6I2dT1u+tne3trXSFpaxzBKHrSuwtVqBWohIgN6tnL0YBHTrK6lyJYA3bOhgU8VtXkYY60wZmUe1UO9mLqtFLhdzqCWR233aBy2hgD1YurXgclsTBkcy/hqSK6ph2lDPoVQOUOwcbmsztHMNMaZ++CIs/ksu+ZMxtpWTdUujQO3h3OEfv3MVLXvQ1VPQ69D3zTVOKxfffV7vv31d6umnqyrZ2Ho4dF2fPTg6ZSmqo7NvNmurjzCNIwUwBo201BBNQsbSnfv3t08/Dhv1KPEOOOe4lG7uj63sW9ms+pw7/zRM04QKPZXm3GtBy+dXF2tpyFZhorw1gtLnBnFqhLejBc6TR6rF1/6vg/f/Y2mFkXcn9WbzdnRwaknfnbxaByHvb39UPl3f/ITdj18/O63ZzFM49X+0Xx902+HyYXbg3o1bHXSSBRjeLa+OmgOtqstGQ2dYkDrO3XYdCBmjlgtZrfuvP3td7+8PAIBvOq6KauaSUt12yS0Oy++dtgkocvHX38XW3741afXqwgGhwfL52sDE2HaPYpp6HJdMzCgCKi5ZmQq2W8gQqZQN1w1WSFN4zRukMnRkmaxjJ5nVTVNmqcsUiEFYQyVtPWe6SpPo4/DQgLWzXq7IQci3XbbKFUVBFyRREcFy5SmO/O9lK7BkEgBsGnj1I1jcneWIH3XnT2+nC1rQZdYffvjd5pAvQ0YCY3ymGJFNurewYsv/cAP//K//IyO25y3MI3TMLqhA27SugpVAUXOJASgNAzFQe8OQcS9mMUt5URsZFapPP7oKYfQ1JIU1LSd165OCjXDS6cnr9y97WquAsBAQpgRgAMLADiQo00pMnXjCiUAQKzqYdxW9Vy3ZoQhsOZsGMC5YlEjd8t5QDByJyAHM01IDuwMKAoAGUHdTByFW7MaS2AQDJCQAiA7TuiIBVJrQIwZ5Mtf/dL15TWxYKlHLcUggASQc0ZkKHOJe8nyMxOCuWZ0ySllptjOiQgNGZDNdJpIGBkUJnB19OJBU0joRBw0aSmyBSLNmjQF4ZRAMBplo0QMpuagCAZgWRWRmYKbAQoauBkJSSUA7oIuNI2GDlFIhH2YShWxTdPgoOZIzASEnqfJHEF9yhmYLE85KzOyVJZTuQrOnp2bZgx4tLdg4DH55IOLYk1Dr30CyAaq+6fLEBnG0XOuM7z/0bvdZrr9wn2q6+1wNZvXqRsp6fnF1azdOzg46qdtVXGQ4H26vrhaHpwe3r+rwfI2+4Tjtm+Xi1iHaRxjK6hpujj/6ue/XsX9k7v3ACGE6BksJVTI6/y1L37l6NaLe3fuZDABJrBGKkxwc325v3dCkUed6qZGlDwoGnFKn/9Hv3rz9Mnrb770ta+97xu9eva0mvaMjLgiZhh0XF/cPHjcr/P+/ftNM+PAGFVCFdu6HwfIKW1ugLkOjXjdPXzwub/11/zRo4NZ7W7uSixmKWkytxzkWcTPPH7nd7fPzlNvIrOa31ie/rE3/8AbXYLHV5GykzNLziMU0bCwVhSJZEwJycBdhKcS5bDv1NwIgzoUjwOoIyNxxbC6/s2/89c/8Qf/JN9awJYsbggg9z1VNbIAsSOBOe5m4TJYYogVcUUcWJi5Nqfs2ZJC6vLY+aRBpJbF+eP+6tmNjdg2c64isGdJ4Amy5hE86eHsttlNo88OYxeH4QPF6w4mJARgwrHr8zC6akoTC0PWChE8MYG7ZVMgJEd3d82GXLwaZRgMldRthYDDqE4ibZQK2SynLAzF302FhIxIwsLM5q6Mip4sBnaE3jM2e1rPpiFH86lfITrVsti/vbx3P9x/MR4fuULVb5fzXQANnwdPEcgNPbn3DuBsmQkYFBAIwJx2UdDnKSZEJ6DSn4NGVtxHboyOZmUuK91F6LorNAdEcHAjJ3FALwYXoO+0tjEAgrqToBWStSM5OiI5AoFZBiIABdM67ulQfXB2vYz7L53effr0y5BW+22YNU0epsP9I5aWmRbLPXKckk7TGImJZRqn+WwJhCnnlIZYRSc+XJz2Q5dzJwyBW1PLNqTcBY39dmywrut2GNcp94IsHN2s7/us1lTNNI4ikZBcJwYW4SZGBA/MVIBKJFDG8QK5xXLiFYQy4iECMLIgjTaVnm/AXUkRPe9L9+fJv7K9RXdmtqzqGUGEK/cE7ky4y5wAIVJgQkVzG6cxhoil+xsMi0GpuFOQHAxl16VVgC/uhoi8651XLx4J06JYlTUDFWXIIyHv/DhIphkMkaWZzcd+++L9Ow9tvDp7ryKfVRJChaCuqpTmldTxsO8mNhZsh+kmSpVAnKblsu1WWwW0jEM3eH5So2199nhIt++9YMbCLIzC5ApYKs/c1XL5DW1HV2XfIYv8eTyNfPfQczVD/Y5YBOZWBBp1AzCAQke2EucrTC4jUNeiyDmAaWYUh0JNLnYkUNeSCkMS1QyIOafdVVP6qsrTl9hcdxCqHejNijUJ1IhDuS0QkqMBQKkw82I6AQBk80xAiOBWVvXuu0oiKGKTAzo4M6um7yCfAbAEj0rv1Y5BDU5EpurgsFNbAMslabZLWdnu13juh9rBkIv+Ud7nnQxXfgyYmRFJgGp983G/udKczaCZtaCSEu4d7xt4P47j6CKcx4EspZzn82WysZiadncoBHACJDPLrowMDkyE6Ka5KFyIaLvHExdYe3EemZemN8LnloXdRpxA1cwUAFlC4V4bAjKBGiEaYPG+ApG5gaObiQRwAoCsiYkBd29R8VipOTowU9aMtLNlmVmRbiwnQChPt+docCcmACxloEyx/PNdXhJRNZU3wEtlIYhbKhFAzRmArNg7CQnFNBGgWmZkIlQtxjFKeRCJCOCmAGia1TISFe8SEX2Hc72TisYpk0shIxSqZowCBJqmcTvhcna9Sp+8/z0/+EP/au6uzcbZcj50ncRYNaFqaG/RrK4yTNNVN1ZVEwktB0iTkE2uYEDI66kb+lU7n5P6sj5OXWDFzc2NK9+cPfEuNXt1dzO4Dhfri8OT40fvfjyp+uhmlibDQHXTQLD9wyOpw8mefPH9B/026tOuDvXVg/ff/fo3FgcvHN6ZPfzqg2WFt1u9c3ib8eDyIt8FA0EnBAQi9mn84j/+/7zxyqdGxroJzVxv3bszrDq6Wb1y6+Xh9fHw9gvDepWvn+3fPl1UizSuYTNsPr7+xf/87/7wz/zcq2+99cf++M/92j//z3Map6HZTtPUjwAQ6kgKz87e/4/+L3/x53/+33v93i0kCCLd9forX/jtg4XkUadKBvVhm7utvvvR03vrq9ODJfB8Sh2QiMjQbaaUV4TbKX28+viVF16upuV6PW2upibyUU2njeqUn519sLp5dPvWG2OuZwFNh+XyYH8+GzdTurnSofugny42m/b2vcOjW93Ya56mPFEbuJ1DaLhZZIhCjYRWuCaiKOLuwoL0PBpGodxD1ZU5IBhAKitm3F1+VBzKLAzgZKXRE8wcwAkZqXT1iWUlocLOQ4ro7qAFjc+AhW7IXO0I+0iFKccgO0OzmVoSrmLkfhxjM0e6NUyDDz2Yx1iN/QiE7cH+gdDDJ+uDOX/jS//0V/+LnOd3f+hP/Zn7b94N8ad+4zN/+cW3Dx89vFptxorr2/PDD8YP2/l8fb2inUMTbq5Xwzix8XLW5uE8e5LY1Na4UwW2V+2NY3d5vnr7dn3UbO1m8933F9vVGo6X3/iwx3a+2sJ6s7p9cAqpe3j+7VfffPXRw8vPfvigni0w8M3V9THNQxDNmi1TICzcASRXIxKniCxAxCyWtsgsIq7IzDuypAJMQGSlAhSAyMGSIjB4YewhMAFgGkeSKFXM1s3bxYPVdqbEjpGRqDDQSISZApFYNooMYMxiSOiG7kFCxsTEiAIimiwbVBTSMAYkGHowt4ohxjhbjpute3ZzChIRx62Gpkps3LUYWrAMoQMcCUXYM/rUrYSUkfvuXJoWQoy1pIvz06PlO1tADp/41PdiSmYwDWqEVFfJABGrNtSLeZyo3avytO26Lm2GwMSBMfC4SauLScji0ACtLCUIYbsZzi82YICG0+jNEpZ71cHx/je/+S0gaqIQwnwu6/W6RllfXuYx1PXCVKfttF5NB2HuBvO2tm6zUasa0A422+Hk1t7+SRMAQJ2Z5vO91Wa1bOcsPKxvwJRIckp1DB8//QqHyFN0Q1AaclXVB4LdHK1tIoX87Nl7L+Pho8tvSVpDrGNDq5uV9Y5J5wfL9kAur7YwKgoj8dHsdD1s5ov5+fnjcTOQsE+m2ZP6wd5yPW433U3qvhaCjQNdbFcJwNWBoJrR8t783hsn//If/tbRi5/Ydl29PHnhpWOBr84lWBrv3d4F0KZxXNTLaUiznYc3EvPq6vJgvwohpJxcDZiJ2A1cHSFEaQBMhF1r4EjAinkctoSS8ogkFCokZ+QQYjdcqK1ilYhMdZ02fawbMkijO4TAMwN14axmOZWHOAZxdPVpPtuHEQgUzfLQgzebbthMowG3i0NVqltf9d3h0YnreH5zvr844HqvEUwX29svfPKtn/jRX/vKZ7c3F5hztx0AMwb0ZABQV9HcU+4bacgRAOvASHLTdwSubjmbuTJJrEItgtmub9ZNO5/yZKY65DQmaBpxN02vvvLGa3dfQndhCtUsW4oxkCBBoR2QA/iUPVJAqSwbuXsCYKtbwSgyJsvoZpoQQ07JsyMHYHLLgYM55tQTRdVcggqIoJZjJIYq52SqCNk5J++BcjYIEhhQrTdXdarjzEDBJkeQWn77dz/PhJ5VS9EQwg6m6O45I6IQIqFbgXdiwXVOatm8CQGYyt+ckgZEAxrH7SxGCUE1BWEEB7aku7NVNnMEMEUQNXdAUAekUEm2noEJMedc/O3g+J1qTrcMmMlZNZcOaFebhtFVPZlIQDMopZrMCJA1q2kItU6TW55GGPp+sb+PyNM0AWCkoKwMNOY09Ea8G5JvLleMoZ3NYlup5raiGMLTm8sppe02a1bJ0NTh8PRYmMecolQw9GcfPNnfv6Nd2gyrWAsxVoG71Tje5PuvvF4taHN9M58tcrZN1/XD9Knvf5sa2a4uowRW/PjZh/cWMxZHhjri08ePvvG5Lw3d9Mb3fTdVIjtODUxpZKSb9dXTBw9/8ud+rlrUGbIIW2+efeo2eejqo6Np2HLbhHk7TdbMqu3m+uqDj67feygULh5cBrWcps3T83GYYhBuff94b9pcwyZTaE7ePJLF/maz1W6oZ3MSBpQQyNH6LrfU4rPp2de+8pv/1X9SbXWvmkFhY5kLkZoawEbhS6unn7l+9HHeTqjlcx/G/KUnT77w8d//I6+9/Gfe/MHhm8/mnIGIuAJEIAVPhRlqllloh0V1Q2Q3g2J9BVQwJwIHgjKayZgmJAqzusn23m9+5v5P/YwvZtdjMjENM/AoXCcbyUkkxqpNmllY2ibUVZgtmBgNPWU0A84pXTlpiGEelrW03U33rW8/zBup6YCWotU0eJ/HqTLJY9fMYj2vDcJi/2jJtz746rZO4+w2VdebL2/6TY6OQinplC3lpNOoEwuzgDACkmq5FoCJzTIqgZNm4MgGDhVzQGki15LT2MwCtzOvKiRgz7FBnUZc92gGHiwllhCQUbM4ASCoJPMEzrVQXSEhmFbkYDlWDDjzOu4dn85vvTAtl2FWBeAgkIbn3DoCBAACMCVnU/KEyo6oDFbuEsXL4AgZdmQTApPdvEjmDg6p4K/ByBXLjt7REBgJHHE3BO/YruyGXnQhcuKSGgFyInRwAQByBijWV3cnAAJ2E0UD9KxexdOKb390fnnn4LRdhHcffE5MTw4P6qaqqvrg9HRxsMcczNwMiITJUCiEytzn8wUgq07g1lR7zNhN3SZ1TlmQACoEmHTIOi7aJajeDFd1fdwP3ThuF80eAmyGmyFPlTQNBTM1yOBhHEYD4dCEWKFgRSVaVnRPLGp+YX+LBHcuNpBsY3EMAWQADkLooJ4ZuYSRhcqm39FLDTaUCBWhE4Cj1bFyQCGcspsXQAzTTlkq3LqgBuBo6oVtDDu+LmbbIUbhOUMHEFWVEAoDGJGIUB3VJhIhDqVUDBwES2Ma7/JX6GqZcMdacVBArOvawO7cu+ugV1fvxRgEod/2DNBbntcN8SxP/ST9vF0gJlfK69HGqZ7FWkIeO6IgHHBKqw/e33tjL1/zhery6NQJOMQgYZz6koTOWTklwtHUWNgIjW0He3oe2XJEN9WcAcCd7LlOhAUJjgSmWA7aQI5W3CXmVmJ8UODBu7YyB0TQDEUygGKOdOFoqqa5eP/dneg7SB0jKN6fZGAoAuBYjEuWYae2MCDuFKQd+9V37WegJXRW+EHleezmRYN0cwY0y8SCKCV9C/icvFPCI2bETEimmYvdA6BggHb0asDddQvFm+SmYGilRs3BHe25plaEKQLXXdWRF/asmCkLITg6uOL1+ZOLiw9FhJSyaT2LmnTKBDGOmp49u0LHJghqEolX1/3+/KgAu0wVHGnHazdwQ8QolQOaZzAAJkJyR02KiMhCiO6KQMUEVNxxAMS7BJaS7DhWOWVExOKD3tnNCK34rdxUDaDsyooFDNyA+HlosNgsoBCLoHBpmXLWwlMhYixYb5YiVWpWZjbzHQndjXZ5SCqM6sBShGAC3NXPIRQ3LhKDafmDENA0mYOIlDKp0k/nDg6GGLhIfq6qmRmIQ5BY/g1mMnPzUkogTqA5EyI/PxF9pyickMh2NXLEAoAWmYHqaRhdYdnc+pHv/wkfVmPuHbTbjtOo0A/tfO7Zri6v1E08gjRVO1/WcnH9LHXbZtFuNtu2adb9MIz93uG+Oaiju77z7a/O2qGWpH1aPXny0vHdqADuT59dt7ePv/n08dPVmtDZfRpytxnmy9m4TcTUNjEMa9jAT/70n/qnq/cfvPNF+KlPmK8ywtOby7O8rs26zVV7ePrg7Fl7cvfl7/nRPI0OxFIzkY46fPz+2bff+8n/xb/5NHtltLm+Xizmr3zytdn9gxhm1fXtpzOuP/Fdn/yDP/2Nr35Vasaczx9fVYvZevP47/69v/kX/rf/3uKg+RM/87P/t//jf1DPZ1dX4800HC6W2y4j2fHBcrN5+l/8jX93Wd9+6e2f+N4f/NHrZ4++/a0vKEyntw/wXEPjefI4X2zGsYOAFb4wOx66/MGT1aTugNOYe0w1hm89eKDV4oBkudgD14k2Pc3eO7/sJxomqdqlqm+3K9FxWvWZhZummseKpOspod597ZhmTR66nEciSKDQNHF5ELhFqcwJuUGOjkQk7kAUEEmBSAQAABmL2XJnq9utOArDouDAdlR7QCYp6VxENDCEwm8DQDYr0DMxU0cpBwAw8J0XBokk5VxAOrk0BTo4Fu0ciD2lKUhTzLY1BjdCXBzs3+43z4bNihExEBPnKQnIG2++7T7Zxce/+82vzWYff+E//Oy//Rf/6rff/Z2bx5sXls32+kLqvQfPnuLh6enRrN2bkXVj2m2SlaJ6q3k6G/oRxESGro9YV9UMYmWxutR1lMWT1FA/3NqL9ZzmlSOrPE404dgTOl9XSWp67a3Xr7eAze2Bx5vrJK3bSEgEhgCiquo8ZtirI0PIOqBZWU4YurlrthigJIdLoYdpRsdsGllyzghIxORAAiBMzGa51EmCg2VDBFAVZJ1SW7dXm01DEcqRfbeyA0CrmrAd+rZqAZwIwIt108EBnRklSg1M05SZMITabds2sy5lcEOG7FiFhkBms6OL1VWACiAj1ehYZZ8y2KQiYdysDm9rdrMpjf0ojOTgOSEzSAvCGUfg+NInXv7KF94HiC+/fI/VBJiRU8rBIcSwWW9mdR1QotQ2TRTnOA2rzbaaRWHZDuPjjx9CWL7zzpePTF+9FesQAyGKQtCGqXHZZtu7PXcbO1vfe/3oetP1l8kdRh+JUxBBkrGtcqLN1XZ/2fZpPa43ZOqWw3K2ffxkhnugvqwju80Xi4Dy6MmzO/duA/LJ/ReqIDpMhuaUuA4AojYlJQQy1Kvr1enJXdq7k1C2qyfRNbYxRlqG5iuf+920XcVI3eZm/2B2c75dzGfHdxZJfErj/nzWw6YKVZ7yVT/xrM6GHn3RzPuURqL9g71x059fnmefmqauWIaYbt1++eGDr7CEIHB6csiL8aOzp2N30QSKsHo2XvfX7d31m6l7z3tt3B68/7hcBX03yK3S2UwSJYMW2EE/rGayJAEMbOpgToQpKwnHZj50NyFWapbNTI1jEK6GYdj0Q1XPQphJCOOUgPh4/6UxDYSMYOom0qKxCCDRaNoNneNAwlPy+bISxzRZTr0Hadt5rKPmpMMIgu2ivlqNp4cn/XRjRlerbYjN5eqaQ9ONE+G0t3cYmXly7NOrr7725o/+6Oc++PpmdQZ5Az6RO2KQEEcbpjSYoxCRk6BkSyTmKP3QJTUWHqYpSGR1TwmIJsesagip71RtmqZIEgIH4OB0a//w9TsvNU2TzQlJQYEQg0QIaBZFtts1IyiTWiZUQgyVTDkjkGRGR8bgBFGiyoTC0zAummU3DYwG5LuzAgcHLrXIADqmEUgmmwjATGMIU0oKqY71lEYig3I+EGCAXNz1pmCOxt163W23QaKjG5UM6q5/oIj3ZuZlg4xU1tklK18MEwg8DakKGZAQc6xbwojgmkEiMRIZGOacjKhmFAcDECYCz+BMFJKlJoR+vEZXJFDLASoHIw6OhdXqSGiObokQKgm9psDBIDUxDr4SBNdUvpOglgGYab1ZS4iETA6aM7IIE7OgARBwiKCGgDmnyDHhbgOyOxAFW5wezA/2rlYbDu00TpcXVx6ZUG+urtKQDk4Obt89rIJZhuXe0tU/uni6f+/uYja/un4iIdZt44rdzeb6cr042A8NP7o6WyybNExDN223/YufeKM9nI1TVzqiblbbpjoQrMZpauZhvLp48NWvv/O1bytWx2+8wvNaDUHdTW203He/++UvvfRdry/v7SdLJJhzBjV369Jw8spdF0NjEhqGyVGGYeqvn50//ubykNrFwXazbqt5Ujnca/tuvPrqA5kt9WRAxDT5yQt3nxngdh1imB/MIgZXHzY3lpAUQ47dR08/+o1f/ea//KUFYF1XUMxpCIyeh54Ac9v85sV7v7x+cEUAboyipohMimRGgf/Jww++db75Sz/75zb//HP7PDg1Zk6AChYoqCZVRUZAdgzgRgCApO7MrDsHBWUdGYGIASSwJzfIEAht6H79F/7Oq7//R+H+fTloYh2DhDSN1ayOdVugw00zCzMJiwUwGpkjWjLDpDCYORHUsW15sXqW333v0XZ9zfFw72QvpTRoPwxjU9VN3bBO9WKvWmaOOezvWYyW6fDlT1893oTp/U8c1qtn0+UGtklrAEbEyKnzSAGQysMdkZwsaTYkNzMDN6eijCJSoHpRSxNYmAJJdBSSmfC8xSqQ5mnDqVdS5hGyGoJAdgQNHHJKWbWKHJmSiLGow0JQh27sRog1LhcQY4VUHZ5U87l5Cmma+kmQ6735bkiGHQej+BHAgJTzlJGIAInLfr0MrljGJwcjEARUBSNwA1XPgEbgZO6G5lr6md2IQBDBIRsAkrmLIwG6upVxSo0ABBlIi8nDEQBYi0XRLCAxSs5mlAPBkMbZ4sXu+vDDs4t7h4dtNZw9eieIVyHGiJF5uVzsHe6FWLt71hE8mGtsKiRhDrizO43EOGtmq26cUgYBc53VrZulbN04BMZZbIdhGIdtqLgbr9lpf39/nLwft8BE1hDXQOPQd6GK4JzNSUKIO/YIERd+cjHplBCZmhEKYuFOgroSClEBNYm6IzBzADcER7RdxkYdHERqJCpWFERUcwAFxGLKclQiAiuSDyOY2URE7mBu2Sxw7Z5KtApIijzExVSGUFQoc1fNzGVm3FGS1VRdVROiM3MJXkGpZnd3NEI0VyAs5qkdEcmRAEPVAKEQ3b734pMxPbn86M4+LWZVnvqmniVTxL6ao/q4mTSiI9tyr1mvu2Hs0VSI0qQUpa6r6er87Btf2XvF+tGeJKj2a5gtpFuD1YhsDjlngMEBOacQQumrcoDd2wtoWqhbpdIPwZ0En18FWDJrTITIbrmYqADIARx24GoALyoO7Pg+pRUOCNnddJfdQ2IxzztxyhXAC54Gid0saSrsvzJ5uWffvWnqXrLYppqYmYjMd2pCtkTFGQREJKq5uILVMnHxrJGDE5NDyaaVBx2aZnco1hgkNHPzDGBFui3zWbmq3YyIzXP5ZWFHm92ZcmwHfSp3Y3RXs+zICFa22s+B0EZMuUgbGdDSowcfSG0Nh/PzK5lJGON2axjadR4c/HqzIUKEDKaq0nfT7YPDKQ3MRSLG8naraRmIHArpuSCfdqkxYjFVou94owCRsioyiwRVtR2Qp7ggDcsAWihXDuRFTiMzMy+zrZXPscDCi+hraponFnE3ZCkcXncHc2LMuXicIeckIpoTfocFBcBSXF3lzwVyJKQd+9+NmLAEQtW4+IyKrQ/cHcgdiSwnREEkcyVkdHDNZdOrOZeQnRZHWDEvoSA4gCKhZjMzJC5S9U4sBmBmeI4i+D2pyNUNLWdDRiaUEMY0sQgaCsXrx/bn/jf/K/aN+VDP2SYB8vX1KlQzSzKbz/vUtbP9/fb48uLKPI1manp6cLLthqZehlomHV25qRfXq+2smZ/eu/vk/EOK1bKVpw8uF+28quOirp6eXVb1/sW6e/D4IjDHiDl7XQnR4q23vutXf/W3bx0e3zk5vvrww2V93AK//dLy3ic+XTU4ruDo7tHFtU3rNJxvX/vxl7/3T/xrX39w53////grcekgAFrUC2O//pW/9ZdvNS/wXr36+GqJafrGe5df+vZP/MzPNJ/49Prxo+OXoT26o2f9TF69ufniD/zIj1B+9tnf+PVPvXW6Sdumz7/+q//otZN7r37qjR//4//L3/riL/Ogp7wgNK+8mcHFxYd5u10c6LNxu/3i5ePH/7zbXixa6266i4u0f3xcXT3bdPlmvXGHm0t7kPx0j5ezZd1kIId6pijqiII315fvf/TBwZtv3D194eLz796stl2Xu5HcsAKpZTarzKaOq3C0f3J+3h2fxr3T5fW298g3V8P8YD5NOm435RMZwebNkrBGqoUblghSM0d1dZYQf08tQpKyG1B3dw8cENEsY4lJmyEJuBMSIpupO9pzGHupVyDCnBWL2Rtxx24jAU3mXmhgFCpyB0PEwDKhGRFqBiYpVTsIBJYNQLgusrTlqTQ0qmKsD9RoHHMaxtIIUMd6m1YA497RAtrTD77xwUvzVq8v//r/6c//0T/2J37+3/75b37zfP9pyjIt87DpV0tuLHTAeTlrd0Nyr8g4DeoMiSgSgEPaphryOHTqoTqdJaue5nT/9MVEV48fPDg+3rt61j275Ek1EOmE/apjMxyxux6l2qvC8c3wqInQLprKZ+QuInmYfFCfMqNLIEsoJcJKwIRCaJ5iaMwyMgMhmTGTIST2pi7JMiJCS1lzQjBAo9JLokpAKTsQi3C2zGBHe83HNyuQFgIZqFBAF7MEiPNFPQx9pGXW5FgWTSbEOSXcZZszWYhMyS1GGjpFFJsmGDvadzUAjpZ96K6rJpjmKHPFLWNrw9q3w7TscGbr6/OT7hVEJvRYVQgUuM7WSzWDMKtjnYarittFVb/y4gsHp68s563107SeROpQ19mtrhvN21nYa2bN5XqtmOrZwWazPn/ybG+vzaTx6Og6pTsv3rang07b2WzWD9cpp+xmrnnSCZ0qebpet7WsP75Z7DcHdw6HfE4GEIDruU8uiPWSA8lhe9hfbDlWQF5V1fV2rJu4d3ha5+ZyvNo7PH37re978uRjjuH23RcrmXXrgdpcoa7PL6WaLW4d3lyvMUisYlFGpeK7t08Y2836Joa5S0ibXtXqtpmGNUuQw7bv1ge3DsbVOD9YICiITd3UWtPnLqVMMh+7qzSlxNSt101o+iFrzuZh6OXq8tprGga/dbSPQqrjMF650zQoKz7cbF/7nrmc4fL+/Id+bPlrv/Q//NzP/9Qrt376L/yFv3V6tHfnEHK3+e7vfvuf/pfvlWWtg1vO6AqqYB44NrP5k6cPhaWuKnAzR1UtMHUWFomWcnZVs8B1ykMVWoLQ2TYG4YBoqAqz+f7YT8ghD9sAZBkZOAaZpqmuYxXqs7PzWWTFQCxXaxVaTGnLUfb2KbLMmnB5eVmzEFMyRc91zevrS0RrqzZxbudy/9an3334Qbe5CU0NSYlSHobXX37re37kD3zuyQcfnz9xm3LOWg526u6T52zZCLAWjsTJ1BkNTMchJaUYJzCQIDH4aIhCzN2YVuPQNm1kSesuxJjVxcjQDxcHn379U1XFxBgw1E0cuiEQUEH2ghNRVVWM3ptyFFNgYUKO0hCKeo8KVVVTpolJLTliE0I2zzZFaNR1d6MGdkQDV7VkFkM75cToQCJUMVWwW7ab2xRiDZYJKGBQmAxYNbslkYBRLs6elJpVQ0AgMyswhOcFMKhmpLsJhhDUrYy5DjBZ4mSztnFAJ5QgDgkQS0lz8KiYk47EHKEZMwB7AHRTCOaobsjA4DblzMyIJoHG1AVst10nVCkgEhoCOhBgcoxUKWSRYOCmYEDdNJq7IaiBCIUYun7I2RGFOY6ompNEYQo7m5RaP4ztbMYEgcixQXfKOOXcVDtuXQyQx2F7Rccn+8OQz8+34yax4NnTjWW8dff09M7B0fHe9rpztSzDan3DTh7DzeaKiAxJkw79dn3VMdb3Xr1/tbmkSNhE66btagiwPL7zQsqZzGtm09x1q1Avi0dCt5snX/3GR195//LZ+od++odO7t9hYWImwnE15NXwza+9u7rJP/JT30eRCqYNLIPDzeX64PDAwcy0mrUoYZgmIbTN+smXv3Lz3sNl0x6c1rP9mLZ5+2SoN2qrNWadLsfpZgJuTl+629JsGNehqqTY6DWDm7uJNeO6X33zna/8d/9vWl3sMQk5oSbLYhUym2mepgnkd/rr3+rPuudZmlJADV7W48RZkeBbq8f/ry985s+//Hb/4OO6EjcDjETg4IxBAXGXZicoLGvL5UcABWEycyInzA7gruaZJRiYAWDNNPo3futz3//KKzKbjzYZDvVijo6mhiwSQrvc5wVCYAcNwuDZJGfOFYhgJR6vzvqvfvBoWFugpl3cc5Y+bVIeQ4hLrNzMdSMCs9ks7nl7e+bz2TBlgTq2b5n1j373IlL4gfuH0/nN157cqJUEhIFlMjBEME2eETkhOAk65JwNsLROG3isArcCdeN1KNkX5IABpAlUBagCJoyzmmtXHYU1JMwTsBPk5JCFkQMTeXYjElWllCBnGE0yQrs3tnuMVrdNZhm6LRpMOZHIkNLZJu8WyEQOwFJycc8nY+dS/+yWyrHPCsnEydH5eZqltHSbuzk6ooGCICg4ABgCAIMLgJQub+IEYAAG6MAAqAZMDDvTEQCgEJplRFQSLpM6ods4gRNzUjJuxnTsV8fTRj9555Wr6w8ePnsMkG+dHMQQo8Dh8f7h6VGelJOFGKtFzUxqylIqgczdh7yZNwsi7seBUDmGrC7ultXQHaZKKEp9cXWBqDFEqYTFddCcfUijmlWhJfZp6s0TcUSUbpyQqhCrpmkAQFiodIyLgJuBMRFTAMuOkG1kqpHINReaDCIBoAO6efYEgFJyQ4AA5J6zZqQQCJkoa0YOvCukKrJRBjNEEiY1Q89IJAV158VYUnQrFw7mim4I6OZEjLSj8AISmBVUZVGykIDQDZwIq6pJOZETkpRADCCVQvEd2AUIwB0UkQ0ADbBkCTkK67xd3nv1zXe+kZ+NZzHWIp7UU9IqSlu3lsYxTyE0XbeuQztfzM6vLwions2DeO6GPqUQq5ynzcOv3//uxXsb3MrtlMxZLKtqAvcxG3MIfR8kiggjE+++qiQCiP48h2aegZil0ue6T1YDACZSzY5OxIgMZjuLMCgAwi40ZO7GxMCUsyGAqe7eAABENFUidnP3CZCFiypnsCuk2d0wi5mn6DD4nXCcqbnSrliwhLscHIpTBrE4vczMwb/jLWLwHW2HmRDQzZ7/l7uPZBc1L1U2Vn4g6u47oN+JSoHDru0L0fKuYJ4ZsxoiF5LSLv2GUBIqxeliakJisDOvOLjnXJxZDz561zHnDE/Or5Km/dA8ePTk6PDFq27CbooSLKFw6Lo+iBHKeti+UtUAU/EQFTaQaWbiEqxjLKUIWvD2iOi7okAy1fIxFLfOjsOPBgClSK6QwQENy2OtyGFahmCnXTrNmUUNzFTNiJiQTVVBEXewMHVzzYWORMgOOadRYlnwExODORETkpkioNruIF2014J/2dGhiEyLtOfgIDuV1p4HDzlrMjURZhbfRT6LMQpLOgeKguYAgMU1hlSa2Vxz9jxJqBCJhalwX4jNFDRR2Q0g/f9LRSWY6uWAaIbglRCgzRazyxv4s3/2zzFsVtvLWSvj2nWaMBCJIylHGtPYNjMC6vo1Se62N5AaAtn22816axWHQBlwhHS9uvaR6wovzj4M1bS3v7d9ej5tbNbU872mW63Wm8sk+OD8DIRNrR+SJr97dPDeg4c//KN/+Jd/8XP339h7+ZRePXz1zbd/hKvTo9t095OfGoZU77380//Tf+sf/e1fuLUM1O4taHF661MdHF3dfDg/vUuO0gY302797Hd+bTG03/+HfvLdz32R43J+T+DZ+Nb3/lTz+g+PQPXp3U/vHxNCj3r/8EU437zcnjy62IK5W7Lh5vxhl39L6k99iqP8/j/0J+6evvxX/8ZfErc6MlEIxGnCVe/r6XK5bNGmRcpg44DWbzeOuO6vUtcnoMfX21jzst3brvp6b//V+3e7Sa+up/WkiakfbbMdNPfjdv3t996PVh0f3rc8pPTQFDCEtBqIN103VXEWZie/8Cu/8/Dh2Uv3j//Vn/wx1MShaudRlScbFAAjMca92YG0+8wxxtZcRCoDJiAUIWTCwMy+M/UULafcfRDACgW5OIEJQfMkJACmZkRU9KHySFNTt+cCOBaDID23SgIKmTGYZjUSAfdAMowDB3J0M61CzDkjIkvY9Z/tdltgJcAPbClxCIhU+dItT+GqW92M05CRiGnR1rlL3U2aH+w/efw02Tif2Ze++BsdzV+//xOfyLN/+Et/8623F/31OKbUPekMaT6rdhdDHbphGyLP67jdjpIHJqSWMQ3JpjyYBKwxrM/XT6ptdVvbcDRsZ5eX+bqzEG3WgArHGKYBxt7cYFqNV/16cRCZE+i03GvGq5QdiSEICCGAah45kANkyOU9V3f1XFcVsxuyuasqgqube+JAxAQo5iaIGUhEnocroTRTGnisWD0TE6ojQk1hHPq9eY1YegicRUpxY8oZAFgIAAgAmYoPwNSYn+viBMyYdDS0fuw47uUxMwIxEUpoWiBz8ClP4MNknjIyxTH3+23sN48qgRBDn5SICvUtp1FVq1g7mXtGx9pD//Ts7U+93h4f9qhVMwuA681K8+RMmkZBUgeTEGdVolDxstnOtuuLvBkkBjU7WBw+efBtDvTSq/enYRy6yRKq+aKpckqWhhfu7V/naVFXe7FZX26uVhtM2R2S4P7xwTR2urWJRp/h4eF8/eB8f6+Zt01KQ806pypthwC2qGfzdu/p+eNZ5CoYSBs9rPvVuI2pG26u1jJPQ8hE1E1TW8fNZrNf11Max2nCtEXV86dndQgOdnA4e/bs4diNe3sHm/WIEtWaftuRUTML/WYIoSIMAHi4f9Rv1qGBWS0Dp6gxTWruTRvnVXv25Hw1bluZvXT/xdXNKqdROKyurqpaCMknm8bVrdhesQ/vrX73/as/+MlP37z34X/w//yPDvhE1qDd1Q/80IsvvHx3tzbICk6A4ObEGGpOecQs9+6+NHSD2dg2tYOTcDk45KSawUGY3ZxVXc1UE7C7jcxIyAZZ0xQlBKachhgjEw2biSg07Sz7jSB345YEGIOEEEJ1c349bnsMbpCnnHSamJtZqMaumy0rARGB80cXd5f7UZBZmjsnY04fPHnfUGeLBjh2q62G/Nrrr3z6B37gK19/73zqAMch90PfmxuLYNGfVUMIbormTj6miYNIrIbUSwxZbVYHNejWXR1Ek/k0Iejx/sxVps1mVkcCIGYBmIt8+s1XOBqGCE5MYuoxVsBewA31rFbN7Ww5jZ3UzgDjMBDEQDKMW2CQwIYYY0hdkiiGjIBDGgmgtP8yRzJ2FFUzykhqlpgYAZkQSsEqpqTGEtUFUAhH8LKvEgdK3gE3SBGAkuaQ5fLyeldrglj6sL/TEVu87cU3AABlBcvExfSezVPWiAZQIZI7G2DKxilzCGY+TdkpMQdCbGSWpkzcgvdJh8itO5uJuRKC2ihEyDSlCYHVLVS1Ak3ZYxAHzzlV0giJA0x5G3nmiEhi4Jera6ByZsVJjbKZWs5ZQggF5WIWmCPj1E8MVgUJdQVO4ziOBEw4TVMgBqQ87hymH37rfclBPezdPZiSImDTVGdPLzPQ8asnx3dPGXC16YlsuXdwcfGs326rpopV9fTqTNp2f//w+vzpZrVOaPun+9u0HVLiuk6bqbtcg+ILn3qZWsk6EWHXDcN227bL/bt3URLZcPnBB+996VuPn2z3ju6+/a98uprVyESIqR9gwu1N/5u//oWf+Tf+1N6Lt7OPbGBDQrXU5eF68NmeBa8Xs+w2DSMD62Z8/IVv3Lx/hj2dvHYP2yB7jIs8izPYDMfHC9fpyXuP87Oz88crfPxofPMT8f6x1JxGXW032qeGKxgkDPjuL/7y5Tu/NssdEmRLAKiOLNERcp4Aiav6mQ//Ytye/NgPX/zubw0X27Kc3q1GDRwhO4hBTdVnv/G7f+bf+PHm7NpAhRDNkwNgBsssofRgaM7ojgwEpMrMbGbkpJYJynZKHEBcDNlUzQyJmKlG/e1f/Aev/djPzu8eyCxkVWF2SHUTm+UszIKKZcuAAGnyMQnzQpaQw7OH64vzp5vrQaTaWzaI0Ntg0yCIQsGTBUCIMDvcn7VMVYR5Vg6ekdyGIU2JT1/5Ibvuzh78VpMuvucQtzf80WjrMo4RumdwQ0fL5gRK5GZoYDkTMAdyBxDmKLEKVVM5UQwc0pTTVM1mdagc0cGy5oAK2jNrjo7kQOxZC7CbzM3JidAg22j9IEDjpgOoSOo6iDG6Qdbkbjp2TQh9n6idBaZZvYO7lwMeAgFomRx2STIv7AFwR1AARTAkf862dXB3JjQrnnQnd4NSQ2VQ5J8dWQeBqCAMS4sTErqrO7tBNhOiUo1XACnMwR0YCMhTVpYAjIgZsrew/+zxePfw+/J0E3F9dfnN9c15qKWe7VsKTHzrpTvt8gBJZm3FyEAoFFRT+SVMc0pjO1sKzaqq3m63lq1uWkDKqRMhN3A1BAwSu81WkKu2JoTtaguAgqiTsmOMbUppHDoOEqVKk+fkRKGqa4khEAG4lDkMi1UO2ZmQ1DIQMAcARSxwnDLG553ViAp8ppiwFEsDGhITUxZAUM3CLLRrn1DNSASIBWSNBU+9A8oYPQ+zPJ8qi1aARadgEgM1V3JyL6GY/DyuZWUuMHDzYoxiJgHgolBTGXPBis5lu/xaoecW6YjKVVyGzappHAGxuv/6y4/f00c357fntUDKmiXDlBOiu5ECz+cH66ubyGHRzrbD1KeRY80jiaMKCGC3HT/80hdO3v7u9y/pwcVm7pvh6sI0hUpMIlGoqpoocohBRJjJnTkAcjH2uKsQuBvHumoWbdjRGwG9uOjQGQDUkoF+J7Dj34mbFZUHwL3Aidx21CEor9e8OFashM4MXH1Xka6lHAzRAZi5HPjLFVH8O+WjKYwp34GU3VzBgYithJfKGw1KTGa7tvPni3o0Le0AO33ITY1K1B523WClXA+5SH07kjWAgWvOCCV7XiomsHh5zNDcsIBriQDBbEfISVZ8gQBAalasUtmSAxACGt5cr54+OZstm9XlWoiVeLXSzbW9/OLdb5+9t4C6Rhz6rmlm25tuthC3PjCnPEkowkfRonfKSMkmuyMCGqBh+cohImXNhe/1HFXu6s95QKWRiSVrAi/3LyocXvAS4SpEJFczd8VSDohQLjdEBCZVLcwxh513DJHcMhJmTeWOpw4sbGpU7oqFRbV7IHCRYAC1XKFF2isSYbm+CvwLEHeXGIP7bvQuwCNwBYddZaFOzAHR3RRQCNl8B+cCRLDsjrsJX7h8bwtrylRJwq4hrzSyme7+hd+TihDBgACzqSYYaYqRYxVXw/CvfP9Pf+KTnzi7+CgyMEu/7bjCpGPV1k3T2pS2nUpY1EIZU6zjfjN7+uQJs4cAFL0fN9zDmNRz7rM2crho2m999M2jeTVurt774KPDau/l23f6fPP+4/eqefvk0eOqqnogB0hZ5/PZetuttn2qYw6plf6Ve8cEy5P7b5jtnb52LzmpETendz/5h3/4p8KnYv93/uO/+8qtH6S9N2sd481V1bTnX3tPA+y9+mpcNs++/vUf/CM/89tb/e4f++71r/wqfPB47fpDf/7fGXDNEoRry8FceB7f/t67B9SdLgSOX1999pfqk9skz/RrZ0/e/fpJU88Xbx3dou/6/T/88+3/7j/7a3/prVfuPXm2dlDNIxIYTkMyUzp72B8fzi4vV5iMqRnGtNpMg2rTNsb28Ozi7rLdbgdEfv3l177+9UvNdJnHbjB30Jy2enbrYLGdbk5PF9//1vd+9jeukm6Nq0G3l5eX1cF+XS3/yt/7b84ebU+OT6ydvf/gne/99FvJDHh5eX7Z9SvXfrl36KGV2QFLyyBOFSAbBwoVCZeouYGrG1MQZHNHImY2NzdFKhcbARalloUAkRx1l5I1dXciBFfadWEQArgqkgGUrKOpKSETUVZFpDGZ5zGTxiqOOe/iaA5AAQERnYlCiOCeNQuLYiaPzy2OoGkCkLrdR+YpwdhvVUdGaJo6a9a0RPPN+ATBum339OMn871vfPC1j3/6j/xP/tSf/osK73/w7a9cPfv4+qpfSPPso+9Eb0ZXwCoOAUzEMXRDBtC6bhzwxbt3zj76uN/0OKaLJ/nF2/OPLroPn4JBs03YkHs3VK6LveXj1fW4UcJ62HbzGcuUmFA3I82g2ESzKVVkjBSCmZa2QpIqJ6eSUHYlgp0JFomM3ZWNLAMXQl0Qn7ID5zwgwHNsrRgogo+a6mrugMyMYITcxvZ8ex0lAE7I7I6lWKYK1e7DIjBzYQZTd0Nm9QToUgWT4GNWKyciamvqNOc8IaGpGlNs66xriY1STNPYNGEcrpvUDVePa7vfnT8TC9wsQNdIwFKRhJRGyx6i5CnFWQBs84zne/sXl9ckNq8CxZrNgcgt11FS0tAGkTqzU6BFvZjWGBd75uPTx2dHx3t1zUeHBz/21qc/+PjbmKYAMMZGGrrZbEKkYXI0kQTLGBn48nydtkpV88Kt+5dPn9lo6QJyZq55Pq+nnA7nwU+WaUppO1GokvYh1GnIZna8d/fu8f2nV09NQtZpGK5zf3Vy61Z9u91u8ktvv/LhB9+6s7jdjUPITaVScdxv56vr4Wq9qqmp6nh0/9bZx2fCzbOr1PpeE+Hy4RUGB9RpStjrdtCjwztVa2MyY5/GdLO5riNd31zNlsu5SKgzVD6fh+5889FH71+v1ovFvk3c3XRTN9VVqJsqTxMTG/M29y/fu/fo2x9dX17cPv6eabABw5NvPXzj3hv22q2PPn74c//Wn37ns//9b/yzrz7foTkwGgAxBUGdkiIRYV23BLjebGOsqiBjGsHB1ZkFkCQKEEya0jQ0Va02EfGi3T+/vsCosQ6+nVABQcFTVUWw3EqVM21vtnU7v7m8qma8WLTotu3WFWtLufLMgSjQ1Mmzq1XkYKZjHmqL3diLYS0NpCRSAcjlxQWHkHN2cw0RTNjh7U+8/eqrr379vfcvNmN7uHz/w0237TVnc0uqVZA0DZYVwE2zxJhyrigE5nG7TaolB6+ja1LM7Eg5ZUI317SZMPW5Ty41I5pPt/eO3nzhpdlsRkwIFKtoBiysOaM6ExkgUVB1NxQJw9SHEJUglyMJBisLSfIggYzyqMIVcpuzVLxM0xrB1RJBRUjqE2NQyCKUTc3VIJsZcmAObsZCY94SVkyMzuaWdZAwZ65TNgc3VQQMLM+ePWEkLoeq0ghbDr+/N9GBmgWQXcYeicBzzoFDDBFIs2qaNAjt6KwsRPycX+kSOeecZV1gu5NPVWySj+oWOKBbFZp+uGam7bBpwkwzZhyrUBt4ACZIiAIkAGw2BYmEQoRqbik7wmbsWMQcXNVGLeM/OhHQOA1pnJAomRPj8nAvITuzoyNjViWwpq5yGtHKQLs7GOXBwEbl6elHqzRobIVOD9smrDfD7YPDto4OMLlFlZuLy83FmkRUx+ublWXlDKtnV+N6mq5Tu7/cP2i6zUbz2HjUTTdcdfP9g4N7B6Mmymnqx74f03Y8uX+nWca+T0Gwe3r56OH5Zkg//tM/uDzZS4AkotMEE4jmX/tnnzk+PXn1e97K7GjIaEykkz958PCV196WWruUsxk4UILK/dHvvvPsy9+Kisdv3feKsyOoSyBeAHBFVRgmal8+aVBuveoff/XLm3c/Hze3ce+ovX/X2/l0lmYxrB+88+v//T9IT8+PFwGFJxtznpBFRBC0BvacHLjT/BW4fv1f+1M/+a//7H/47/y51WbwrJYgayai3REVMakRUp+nX33n8z9z8lp69FAkMPmYFZDKbOxpBHCmAk42wN3qgpEyqJMji2dVg/8fV/8Zq3uWnXdiK+y9/+FNJ91861bdytXd1bmpZpPNJIkSxZFkcQTPKMCCAduyPhiC4RnAgAF7BjBsj/1hDBg2MDAMA4bF8UjwcKShhhRTi6HZ7G52TtUVb918Tz5v+Ie991rLH/Z7WpK/Vd2qc8857z+uZz3P70FQQmMCVXDMo1hW9EhhGe9/6fc//Zf+6vTlGz0YVVDVYbqYmCPBSEg1O+c8RWKq8gbPnq0fP3mSRkGrmnaKlUYYRKxidhx8YIuDZmhnjmeOgveOsukwZI+1QBz7E3DOhDYcZi992ip68J3f36nTzzw3mx2dvC/2bAOGLuUsGutQE7ukZlmziEPyLti2bgZd7dgToYIkJidJMQkjgZgMkZHJMXnQpNnGZD1QUIzc1uaDVl7rGgQQvIxZh4wqOAwIIScEZ+wox5HzIExxyPPGee1lPSCBwYbaSRy3V0Fpri3Yka2FAcFUUFWzOiYzVAFQBEWzwofFSxdMeZ+zMmQ7xKSAVAZRwuILKPtILEQiICYEYKYCM0LV0hhUfA+IZOBKBbqK1hxKkxpVdR8tnVZ7/BwOw8XZhxhiGsfJrAWlhieT+fzOzet71w4UyCHWIUhWVzmR7B2JGSERunY6M6CsqV+fBA5VXZullBMTx3EEs+BcNDpangbwdTXJaYiazMghMzMijv0AmiWnkh9KSYU4xTyZL0LwVNBOWN7KQK2s5hUQix+LoLxFUzHhlMo7QA9gaslRgAJGQTBQZodmZV51lTOFUiSzHW0BiBgJEY2QTJXAiu0CCU0JzCSrGagKMZIzVStBAtzOrsjkC0BXRQyKS0igOPsACiZG1bboA0LQMmMyFktq0Yfg0h32k6HTFImLNkVEQFa1tev1YH/foh4+sE3e7FSx9urQqUSDDOxjTgrom0pkrKqqi5qGtbFMm8W4WZtIHeo4Zovds3e+c/tjn98s5fH9xzpuQBKCovfMfvAeXQWhQiNmDuyYSIEBERlNMmo2ABfa2UKo2k7HRKw5I5efl00iqmxDS7ZFUpsJXu5Tth8P4pZ7vWWQK15GyRARkBhANZe/n9GBUenDMlAsguyl2rFVFREL80fVAK1gYop5DKEEjooXjEoSbsvGNnXEZlZkX4DCn4KfsKu35UOGZiVuqqUTXkSIcFt4X34v0bI+KqaksnIGNSQ0QytSFOGlbGWlEgcIDIRKmz2pyKjgUOHe+/d90/TdQEiM/sMHJ/VkXtH0/Hz55OG9F5+/5kKALPVkul73i93dfkwNUYqjr2sVIQ6mUpxQhIbEYhlUEIGJEKigdogJQcppDICFB0YMzE5UGEg1C+RS2YRGxYhkmgspfPtqBmxqYIWdJ8xMTDmnYrxi5wo1SQVUMruCSy88XyXyqtmKKFuq2USMjcoa3i6Phmm5G9r2ix0UvjVAUZcMLGtCQGJWUzSULMUMh8iEXkQACMwIGYEAy7FQQwJTAo8EKUVmBi0yNCOWv7mokLKNwJUdgYgBE9FPqgB/0oBmIAZgjhlwS6kfuvixVz/97//qX3129L535l0Y+oTo2qYCXOQ0jkN2zk8Wu4B+Pp+cXRxDzCexOxuPd9s9TVkhB4+T2i8vTitXTZq5I9/n1bSFlDanR8/mk9nu/vXVuHlyfAi+2qhl4iGloR8dueBC27QKVl+9RovZZ3/mL37xZ181J5KjYRvqWZdg7AGS+onj5bNH73314bPj/edvPz6ir/3hD1779EupPz/72p+/9c1vvfzFL3iE+PCHq+++fRY+9kQ3n8zxrd/49Zc/+cILH//FJS0JyGImz8V0gY7qPQfctTtVv0k37r76tJN4HGc7Nw/7dTvfeeWll/7wK1/+tb/1yx/56Cd/7uf+4re+/PuT+bX1mC5Wa006nc0phAeP7+/vzEPt67mHnB04YDzrUYkrR6VGsGndALzJfrUaPEwwjbPGkDXnKkOVzJYnFw96zBFmi7u70/3NhZ6tE1FoJiELvfPus2WvB7ev/OVf+gtnz57uX5meXZxBaPu8QqRQ1/Vkx01mihPgBtkDkKsaQAIiZmZyBCBa3IgEVNJnUqoZoSwCSplfuekzpRy78aJyjZlVoWFiUUNGEYEtVN8VN2epsJCcmbaJa0ISsaqqk+SUtKomoErsPaACMKNkYXLsXM7JAAvI0pCjiJgReQBV8q5mdE7YIQRjt6Aqp8367Ch1K9GkWdv5tHY2rhYp9f0QHz96JvTtF1/5yHfuvXPnlTen7eSf/86X37x91Ts7evTo4iL+RDBtJ3WKue/iOMZpFYLB6dnZ7vOTTWcnh4ezpj09H5xrDjv9+g9h3TfHF9R4DZVfzNr1+rzTfLbZJE2ZwJG2O3Ufx/XamuQWPNXRUKxp2jyOZhpTZCqXMRkaArPzRJBSNgQmBzISG5Z9q5mYIjGRN9EALJCD5w2qD54I1QzUGJE5iK7a2hPlQpQHACLNmqz4u8lQyNSQ6LI7lJC13M1L+4OWKmmzLAm9z2Y5F8odsnOagXKqDBIAkcOqyt1Sxeq6ulg+nu/taB4x+MneQZZI5CdXr60FjDhpQo8cQow9Ova+SZaVq7HfVD4g23w2r9q6F0sJKnCbi+WNuy/HQZq9nWGzrNoJNo3rBEzZVQYV15Pl+Vk4J5g3Y/brbsI46zerLIaOsuIwSGKZz6ewyY+edOfrbj5fENre9YON5Ufnz9CyMxgHMzSvPAG32sQnHxynpIAUB9BNVzfN0Cu6tm3mVZgcHh+Tg739+cmTJ6Z248Zen9fxLHrvuyE65e98988Dt7eufuTs2RN04/Gj98w0Zj24c6OTbui6oMCID5cXL167Kau1bxy3mFKOCNmF6cEuVNWY1n2MvuZVf96G1px/4eXbDx48mtrMR8PKxxE/fHIas1T1hLkl75JAyjxgvro/i+MwrofpdDIO9vDwdPcOH1yZ/vgH76MtNutxHAaYHcp0fZLh13/nDw/46u3nZwBfKYc49dm8mqgZsKsUaLPZ1HXDzk0mbT/0ADUCZs2AW2KgqahlT065dIg4BUsqoCgpk2dk7NNGJBP5rOaAfFVli3UVMijXdbZMBMzBNuzrdr4Xum5DxJs+KtK1G7s5DtNZqOt5N46MzJ590MXO4unRU6BAjgTFOV6fR/K8s9e++onnb906OH5wcfRoeeuNl3/49G3NvcbEjnUURjREVXVEMY+z+SyN0YBcFYZhVAVVqjCgsyw5xg7Ay5jrygfPXYQxZYcU6oqpBpHdZvLJV1+dtVM1ZWDHLmVxLnhXmRqCOuelQA2ZC4S18o0poFFgLBtgX1U5ZTANztWNH6LnwQ1ZmP0Y10gCyGYsZcXOjIgiGR1tpRyuUxq3Na8qAGXKxiijcw7QvKtM0dB7b5qSmTlPPhC4TMxliAe43O/b5Stief/HMiOoIzIRM3TsCZBMKiYVQQMUHcd+2jZY8CJqMuZ2WsU0Ot/kHEEdmBKDWa7Ix5zJAABT6hlVTTxXjiokzZacq1eb41A1BKimjp1IRnIxJ/Y+Sy7vT6nv1udLBiKmKNrHhOSqwGTkmBWUHJsoIjbBMxMQu8r3Q9IxEiQmTmn07J2j2PV2GTp4+ZW7FycbVwcO1fnhcT2n9cnq5HRDDD/60wsgVO/2nrt95+YLF6slNdN6Pjk7eebayptu+lXu+7EfHYWDvWm/uui7ON2ZWtZ+GLmub7/5qpJJksbz2dMVAi8OrjWzViWKyDqP9z54fHh6dnD72gtvvpwJJGZwZNmgi1/6rd988uTB3/3H/1MJSVJ2JRKe87NnD1752KuZpUsjB5+iaVTKvDk6fvjtH8GYqQ42meKsZu9Ect6sY8ptVQkhttV8d6Z9ggivPfcFiJuTB/ePv/d2+mC/mV3DtX7j+398dnhvCq5qcBx7SFpVDogEUL1LMXFKCOi8u2jl3pB+7Zd++o/+9W9dDJvQVsOqNwRyvF0qabECISISu68+uv/Lv/Dz/umRIliOyGxoGUoNKoNm3E6hUOjnqiagxtvCeyi+plLUolElM3o2RMKaXQUpX5y/9Zv/3xuf+dz8o2/Wt3aywYCKaG1oUL31OPYmHZ4/PVmvBhBACpOpE5CR+iRDCL5i7zMwAOgYphjmVZigVpwzr/rsmMw0Dp1KCdU7GPLYrbCdz17/qdnF0+XjP2v16JMH7EY4j9CRN1bPhNvKanJbjx4VsgwzaqFXkGYwlNFTuccCoynGCMbGpIiaRTaEFkKF6mvnsAk2aaIPg0ISVHFgYCnjEK0bUk4AXDeVa0hICJIKM/thvbH1RV6fK+T5jesxdlzX26F+W2yyfZkrcoIioCIhiZkCqqAqgkKp8Cr1UgZbbiwCZJNSpYRlVCum9eKBMAQFQiIzBiNAZ1a4yiVCc7n8B0SHyEXKdYBqiKKOPFqzGicXKVzfe5mXx6erH1KVkKydVMMmLyZ7V2/e3jtY7M0Ww2ZoZ5NJ0wz9YGZUMMZEk7pVhSySNQGiSKx8TYiiEQuBiRGIHYPzvFl3VXDM9WrTsZkPwTcAqmNKXT8ErnJKWbVqWjMeYjTG6WLug1czT46Ikch+ksEjBEPc5kpw6zUAQLNSel3YM0W2QQQ1AVTamoAMiQgdWOGilPFembhY+6kIMWZb/wJiEZCQkIAYMJswIpmWIZocIUAhJStooYYnyWCKZVHMRIBbhnE5EmYIykzbPBpzsafgFqZLP3FllNHUNAMAMKlJsUuIiQIicNVOYegODvbR8PTDty3GSUCjqHkg75tqdxxXgkrs+jgG07YNiYImy3kgz5AppuzroDkNF6tn3/vW1ZuvPwEcwTkeHZhIlpRjP2AYjL0iEpIpMhEQGyiYQU5MBoj1dAEu1MOW2GVmyARbRruUi1XLwGzAVGDeP2FwsUGJ7BXHjW2jV0Ut2/6jApTERYlwqAEabD/Sks8wUCjxw0JfRlCVYgor9Vg/cfmpyvZBDUDMAJhL2AqBgFRFL80+CgYAZkpGhV6kpnQJwy7YI9X8E6tOWSiVT4CIy4iBQGVooAKnRtTtCbiFBJVtUQEdbT2IZiZqoAQCAMz09rs/VBeHTT9uhrqZ/PiDB+ImZ8uuqjO3F4jt+mzVHiy6sd8TlTEjEGA2lJxziUBlyUw/sTsjAJYQi0gidsXvaKBihmalHw3KSbjVZpCQRRURtVxuYGpSdNVSV1ekN9Dy5VIqL8xK1UdGJHJOVYolSK0YMF0JkSloBjETh5653MGKFUvL2GWGznFKuWhRJe5vVvxh5RFZpHrMWWDr9S7nF6NY6eMj9mYCZqJbE18JBopKodBboYKVK04ECBWNmXPOAMLIqmoIjoMpEICUfCI50Qy4FSH/HamICIlQxMTAsQdGqpjc4lf/e/9gvVkDZk91WzXnwykw95FiHid1IykeXL2eRm3JTo6fEaIkcI3fW1wnwOPTk9qm7Xzv6NmFzxPHk912f7m+OFudnp4/QaHuIu5Mm1kNy+Vy2XeKcrbeJLSui2YoKimlJo/7t675i/7Dr/35X3zzjc9+5vWLvOY8gkyhXlS73lxQIjLlzdnZu++cPFjeunLnO3/y/VsffX7HX/3Xf/i7j7/18O5nfnb/xVeWxxfw+CQdHZ7d+9abn/lc/0e/1awTnm285Emj4jwAbaFjAAYuji7gOFycTcVfi/Nb4eZ3T59MmgUPQ4zdez/4AzpJ5yef2Gn3fu6X//5v/Oa/fKmO/dBrNEKXNpmd253vAeDhyerWNeI633v8pAoTqjkNSfoxMB3M/Bc+/dxwpt3y6M7Nm5PJ6/0P3q3noW7Dj398uMlgok+OL+iGf/j0Q9/ArRefF191Hxz6ms4367UOf/aDD6/cePGjr79m4+bl527OJtNnx0/nV5/rVuvJdEph4aYz4+D8lFxdKFTMTgGsPHcJzMy5QMgGAoalPpCAkyZEMgHC8oFAFkEPCFS7OZEjM1DImk0NdXs7A9syCZBKmx6WR1t5WTcVE0k5ZdGmCqoSvHMeTSFHIWBQNFPRiFjSDgoMntkEuXA3gFI0KjdBIDC0zI5bomq2wNFNVEbQDYFlh/vX9pfnpxTc+Wl/dvTsfd2Qxydn7/2Nv/23//Z//+/86b/4v8xn/rWPvfbBO0+3F4OiL1MSuqg2Zt2tpy+/8Tqz1+bZvSePr964c5R7IM4QHj2LbWjcKJYg9dKJgjVHyxMlm+9dy2ZPH5xNnRdFCl5r32eq2wn2EVERTZLAtinYqQgTF4ArOQ/E4JhdBaQIHoE0KyKnLBmx8o3YAIqMZIZjluAbIofZQBUUSxVqICLici8QMXYVoXMYxNK2r6G4hxADs0dSAAI1A4JiXNLgMY1EaEaoROiQHMcRUxTIjGOGbqDWDyky1xXvkDakofJ15UOfB7Dsq9ap5fPz6tpVMiAFJFn3y0DBMDjCqJECKYIPHiFbHCa1NZ5Szin1IYRQOY+QrAgREdUYAqMPHsfM5tvgbe/aOJ6dO8020je+9V2u5cq0HoauappBo3kWzX2MMWUiquoWhau6khw2MGBldR3GVacxt77SqN35JguIArL3DFk0qdAAnLJJtbd/bb3azCfTB4crX0nULqr2NI817Vy9ujo+lM2we22Rnvbr9fDgyQ+dwWx/Wrk5GFyZzPs8hkCb02O2QKR3b1x12A944Stq2/bw9HxYb/YOrvkpKeTY5aypH+Bg//n12Xm3kaPlGOqmaiZE+fHh+ZBzyqGZNXXtYoIU085i1jbuIp03B7OnJ4cVcoy5ar22NvQPvviFu0cfbDZrmPorZ5sntaNNt5RVjgE3E9/jCFvFlADJOSYCIvB1Na3rzXoFlnzVIFFOOY7jZNoAgiRNSRDRVC3L9vEqAkDBuewGc0ZEMWWx7EOFIzChkG3W67atjcbgm8DuokusREhDr4udXZNNlGU98c2Ea6dPV6eTdrYaJI9pnXMIrahsTjce8mZzVjFHGcWAqN2Y1XvzV154bq+1SevXR+c/fP/wI69/4WI8PTp6pkPPJuScEKfYi4pzwQwyWLRKLRNxt9mIWlO3ZJpEUEc0vX7QMtfPnp2J0pBk2IzIPCahDOrGG7tX3nzplVkTQmAwLkmxgkhMaSA0RGBXmqGighg4RHKuBZUIKx+CjYMzdugURZE4VBwhWwKQGHPtGkeFYKiEXKIEDJRFEUPZfZkmcKySiHtgP+oQhAvSVBmMCcUATSDlHCtfEaOSSzmdnR4P6xUxShYoDQaqhUd5uTZFNUDewg5Lf44qlBYCJBCz1gUXGs2lFlZVRdSyoKi5nJGRyFkWJHa+VdSyGiUgUwUgM/C+Ftt4JpENU9PnYvtqTdXINEtpCC4r7hCqBInIsho6GoYBATSLqU2mk5SFsqhYM3VmErME72oXHLs8ZmqSyBijkBmR+cA5jqSQAL13UbZSkflw91PPVdNmdb7avz4bupWMSU7OZ5N23GwUVNa6kvjdd95fbvq967ev3nl+fuWWd3j86H4I0Ew4PTm/fffGbM/de//p0HNVN/1m1a2G3dt3/XyhkCrvVydnm4tud7G/c2WPAg/jIGM6f/r04YPHmvJP/8IXFvs70TQ0laqlYTx/+PDt77396S984dpzd7rU1z54IIlydnY6cZNQu6yj4+B9yDEDKZ7Fhx+85zhbg/MXr7U39yQLEcGQ2OVqPq15t1uf5e6cSSaLWbfebJC0bV3z/K1rz+mTk4df/9rjH7/FlOcThjgoEpBUVKsYWGY20RERxjg418acjtTNPvqJw2X/pd/7fVExZGR0yIyoaklTISWoqokS2sOTR+Oimkymul55cB5RMJdHYXl9jz9p/ymv1GiI6BBMRgQTM2RUN5WEJr3zAYkxZQAc80gIrnWxj9/70h9dO1l/7Jd/NTiWAdnT0I1yvskbWp70kjQZurrKtSjbqJt+HCty0xDqptKYSFLVhnq/crteXJKu00hZOatJJiB1QNNmt1uf98sBsnhvyur3J6/8pb/+7p/G47f+YObl08+3bpG/dr8/O89gjGqEWQSAGREcV2LADMxIaOQInHmHBEoaRSV7xICR1007VRqIfMoy5A2DeQ4QzbnafD0Ysfet5dgLMI4Sh80AG8lrM9UwrRIhsBCk8eIMmbVuhk2/PjrrD49d4C6T351dmU0uV8gFhYqqSuV1iwCMVBSIVCUbiiDq1n4gCobICoxopoaqgLYNqJRZaKv3FfqJGjCSmqEBKTki78kRFKkCCMqA6pgVkMuQLIKE5FpMZNgazo5OVrvtRLvDoX/mvEwmbrWKfRwnzeTuq3f2r12dziY65v29A0NRzT6YAdWhUYOS4ytxW2aX0tDUTVW33diRsYcqUYQyIZKdrVaSxIdKUdmzS+SpTjgoqplUzgVPSRSRkfzQpyzQTidVVQEhqwSuAIG4TF9Y+NDldDbMJuWeSlvwAsI2cLR1fSACIUrRelQVgcyMkA2VCyu6RJgYyeyyjwm0FOPCFjijKlbkOmJCYAQDQsQsEQ22jiXAYlb6N+27gLgFjUHJqKGRAUI5TAigxkhmJlawwVzkYFPkEqlRYSp17Fv4EZYy5Ut+mZghOOf8zv5B343rk/eaoIqDirJi6nOZg107qQzHbuMZmro67zfe9+RCNW36PoGaD9wmvjg7YnIffenutx9feK1pHC2pgqSUpY8Co2gpuyRANMKieRGhpoSIY1KgyXx/Fy7NdcgIUPwgjKrMjKRgJFvijZWxuhw4UTPTAnMlRFG7zO6ZATC5rR4gamCEgIgiWzBzCRltbT4qCGSwbeqCS1sWQumuSAbERCoZYbs3IkdbZRDQSmYKoNRII237thi3AoeZUmkhBBUDFAU0IjbdmqFEdVuRBopWCrcAgbMIIRFCYS0T+fLb0KXUyIRbexQQACC4wqiXZHWonz09Oj86rZrJ6rxvp7Nvvf3wwbPTl188GHI3Rnhyftonef/+o/l+0+zVm+Hc0mC6J5pDqFfr9WJ/XiQMMyzlqGZmkgkREdiFIgSXY6Y5Oe8uH+qGRKUQzMQM1LEHg+2+zbQQygn50nJV5DOALT4cTJXZIcCWjlQM6aDlS7ZRQSpBMwIwZiZDkYjsRBS3hWIl7qgimYlKOAxAtUT2kBDMRAwMC04bdAtyUTOz4skgIJF4qU8RbHU6kpSsuAuREJFI1RRMEBmQil4seUs9NzAmNDCRVMhiBkaI2+COgBao2L8tFTEiIvuAJYPDSKT8N37lb9rmLGOqgsvRNtJXdR2adkjJ0sger+5eiasVoxdUdkwEB/uLk4tzwmwQZ9N2d3rwww9/eHP3ugzoA5+vTo5Pnu4cNJuksB5rV12ZV7E/uvfowYWJSTpdbTwSCnigUIUkowvh2dPTSt0bt2/efe55bKfr+4e3b1zv+wEngaqAQBQCeWxmUyN49bVXuK8//8npF3/p+vtf+mcvX70xXM2f+Gu/tBKlWTN//rWdF17dny/sx+996w/+xcHu7Ws//x8e9xfp+DA8dysbADggQkTvXXtzb3K1uv36x/oPH3vv1qfLqPDmx16bnizeefj9g/3q2Xv3/+A34uc/94vXb75++/rHTs6+Wzc1BgjEKY6pN+9ZxxhtYNwL3rez6BwfzMJ0kzRmE2DC777zjEadN7v13DFUL95549nJvaerE4HmfNMNUSdO1v2yaWwzLM/O5WB3cnzYpIg1EQJe3V//lV/8JbFhWnGOcv/JUWjbUWyyt+PrWTRCP/NVDchEwTEREaAr2DP2HsmZiSEDsWZT3DZgOXaiAGjOOUCUmIHQeU9MiOCrxgA1J3ZUWo+RSEXJeVPNBj/RwsutxqSUalvOiRBEJCYhU81KBrUjJMKqSWroytYpg4JKNLUUsyloViCQLAYExI6QKXDwAFpVpmKShELjp4PFofPHGjc6DM7VdT2L8WI6rVV1eX7+3o+/94mPh2//N78+DTd/+sWf4b3BzYHc9sUIDPt1zFnIgY0W6ur4/CJqvRyGedDZzrW7t+8+eHAIGmeL9mKpMcesQgC7+/MUczarqiaNw8VRbHcn08pRP1g/QlURw6yaDuuuJT/0HWMODA4QLIvYJc4peceEqHkkVsWhRJfVMjtGEzFFUKRsYgoCBEhmiMSoIoTI7DWVZwl64mSJySFgATw4tmwZiQpcDokRESTbJdVOgcwyApNjy1JAdkyczJxDAkQGEoya68l8rQPlEaEVBa6D9pbyhjUET3HoHQcV6IcIaZQhYqjHMesQ03hRVR6MzTTraGJMk82qq0VdysuLw93dnWHoSHDStuSp2Z9DUzF4BaiaOmacYA1ZVSKq1pMWFIchhAmpmmFPJH2GLlplOHajUM5DFsK+G2NSVzHXYVj3zjcDxGtXF4f3Ho8ii7aNALWfjrnnylkCBiWwuqqGrkfPdVXHUVTy6ekhe4fV4pUX7jx9+l6/OX/uxTdmuzdOz49OH9yDzej9ZDKdXb/pN5uuadv1xbKdT44PL8RoArg8fNJOWlezA+wvLm5O2q7fUDSNcX06zqdNvah9TeLGylfUBI8OUNDZul/XVc3qfTVdXiwvTlbZDIG9Z2f5/OTMiPYOdgFyNyxnre9ODvf3Zthr7OLOvHn39MEvfu6lH771MIbJcy8+v5KLN37qxfsfPppaHvvu/N1+/oLj/S3cHQECMwEQASjkGMW5tm7OTlc7u9RMW8S23/RdP9ZNyCkjgQuenQeALNEHj8YiKeeMyE07IfKWkgkC8GAyr0OOyftgak07GfuBPASPhOhDBSSOMnFNuKqqSlIKxPOmaVyAmsYxktLFsGy8j2kEgLEfyLn5dJqTSmjGzeZjr7xwZRG61dmEww/feTKd3wmT+uzR09R3JpJz3uIXiYr8nVXMbH161E5aSamu6ywoKavmFMdre5P9K9c+ePfBsl9JEi+gKTMQoDV1JUPan7afeeOVgFRKe5zzzgdEAEZA0GLsNokyKABCjcSem9gvCSzncZvUIARREwFFE0UBBmYoldCQtRMZJ81cIKto8JhSZK4QUTMEClkyGKgk9GggCgzkwSNGSnkkIIdhzF1pOCRAFUsSg6sU0BA+/5nPvbFZ/as/+NMYU9lawdZFYOUlqdAXHHHBCyNQKdNtG1+xJyqtiSMQl+ZnBc06eN+KmJo49mPcMHkzdWZoikAi2cy8q0vdIgKBITFny4SJQNEkafaODZC81ySI6LkRlZQzscs5EjMQAUEIXsFUc+N9GkYlQoQUx5wzGIiYeERiVRn7eHGyZFd755UMFJioClXfbRDZuS2lZbKzA+DXF2kc8iQ0ow3J3HRvz1LsN6luK0ubk0ePs+Y+p4vl8YMP34FptXPjxt27d1DH1PdXXnwhV/7+4yd9HOdXbyjGRP3i5uzlT71glUPyF89OTw7PvasXewtysOk6ZNYxv/WV7x8/OL998/adV1+IolQ7QNAhw3r48m//6TTs/uyv/KKyeGIGgyF3Z2snzcGta1FHy0IuyJg05mbE07fvP/nRe0S2+8LO5NZMQWSMHmg4PZ80wbdTBaZ2Mp+2Wcdh7GMcs5kYam98eP74T/9k9fBJW6uUrISpqDUhAFDKkmKsQkU556x1XQu40cLSV69/7tO/+wf/rErn8xv7ZxfjMsWhy6JFhgQAEBUiBDAgNIG16H6oiEdJg6IoiqmQsYEAKjICIBmaikoEKC4AY66MHEIGIPZTU7WSxTZlQgMGVQUdxwiouy2ufvCVL7/31o1PfuqFn/65PG1lFFwldoGzr+sg3kGtaxkYCbO1oXYK1KU8bOodP7taV3ttDioe2U1YQncxEPsQyLup5J6U+tUS0ugIeVo1u0EX9di63F6/+zP/weroaHP47Wm9/sjVOo7126Qn62RKjqm2EulhQE6KSgho7IwIs0UwD9lM1EwEGUwdZujXISAbMDKSZoM4SMg+JU0OqmkLMVEceJ3SqHEZbRktM4hD5zAEnvhkibqIo/iqxjZ71TFl7dO4HoZsM6RxsbncGhhuJyjTLV/JAI3IJCuX4ImWtiYo7GQVyKaCAGhGIgalN8hsi0QBRYYSx0WG4o9AM2KiwFR7Yo9qmlQBVU0ZyjRFaAAinhhyJtOqqo6erly7uOLqqY5dd1bVOCbr15oGm7TTNz/+kZu3b40xyThO6uk4btiziu4t9rKpZBFTXzlAREeVr1C1rqYAMvYrJmrbRY6KBH3cGIBltYxNaFMeBWLlK3K0XK2YDUtw2OWL7ryuJhVVKQkYzeczXznR7Ml5XyEUUCObiRl6CmKioAACasxchsOSTSuSDZPDLZvGzJSYywhYmE0AZAgqxbmhiFSarT37lLOYXDbNYFELCErGBS8TQoBEIhkLELcccN6yrpFIwciACVWViGFrXUG1rWwhWn6dEl1TBCrcnEKj2mpIUDxJJVgDpkJMRXwENEJiqkoKz4UaERDz9Zs3H0l3vvzBbqOTqtKcPVvfCbkqiYhKSeDlUR06SUJBGJEYSSnGhMy1x/HwkNXfnNxaj222dTPJpLbpx24cNAsYZBErm/BsJeJa+usJMaWUk1zWOgAgZslEgMwGuO0LK/x5LATwkvLbEp1L65z8G8CQlSr5soYx4vKKDghgJQ0giMWxJQV5ZFbgR9uMkm2dX0jIIoKEBc4DqAgMQKLChIaYsygoky+49OL/ItzmQkFVNQORbh1OYAA5JzBUAyYyEWIk4iyCyNvWPCIsfWdqhVVS7ABZjYiISVWKDFHQOqIimsEMAYOnAufOOaGJ5wpyfvft9+vZfLNck3f3Hh2/c/+onu70KS37dVyLb/1m1ZmkB4+OJeFIYKNC1oCYEIZhjH2qajMiQBJJSEylvAxRJSEys5OcSxwWiWWbPgMoiHfcmtzgstTPYFtJR0SXLiG3DfyZqSkXVdFKhVxRlstfAiZCtMWq2KWxDgwYWTSbydZ0hEBMKlIMSgUXtYXKA2yRYQTEXIoAgMkK9YUKHknVDBkQkNjllBUNmS/TYwhAVm4RlwhRK9pTud4lM7No0fWosAUuxXojZlDNefS+ISMTLSiDLSPp/z+AZgqGlox8qAOOYq+9/pmPvPYpG46ryk2mbR5ENBJ6BtIk3nkQ7ZbLnb39o8PjWVMjB2YFD/1mBI3I5jCcrtc78ztX9l87Onw3BO3WJwfXqycP35cRdBheeP75ps4fHp70kpOzcRhTzuScxFS3jfOhyvXujdvny/XJ/Xu7i8Xkyq3NbLp35wDzsj97Z3r9JQOnZpINEWmx//P/4a+99+dvf+e33nr+c6+CaX//+Nm6eQxu+uJzR+enLlkax0XD83q49/7JR/7+/+Ssa+X2cyf/1T9vZnO4eps85zGzc+WOBu7g7us/d3x8CmAYEs3bbtrWr9w+uDY7/954fnyax9Mnb3/3a+vlr/6tnX/4j/7hf/6f/+M2VLVnsyiybuumYTrqO3HN+dpf29m7Oo2rvlerXXC9cjI7Xycx5LgZ9zw9Cc/ffmHn+btYhXf+7I/zptufVsvVyN5vNpL6syR1bjb7u/vTiV9TirlaD/Yrf+Vv5G5DTnb3Fk/ixvxivns9zKZSNeIrJu98jYiE7HxFW9cvMfrtzQOhcHNVFMkFZlExENqWobKCGoBjZmYkYsdjPxpEKCxGIsnmPEkuArQQITIjOwSCUlPCqCpkgZi3jZRkonEcukFGNkxdv1webbqL87PlZrWMcehW63EY1CSnBAaaBJDEBMSYCQBcCFWomF3b1s6z9+y9z3FEkEm7uHb1DteLarZ/cfgQMgybgVqH3roLe3b/4fcy/NW/9itptf7O177/C3/v77/42ty5r12+GCEYVbVfdx0hkrM3P/Xaj79379rOradPnybJf/79b7rKHGjfn0sGNOcCGemgmzHGdjKt0XQcjIInXuzp8kl35co0Oh4uVgev3majDMiI635k50cjdkEglsMAoqXXLMfM6JmAmC59owJg/TgwE4J4Js9kWUFFJHrPZuaDUzFiHmNCNO8YwG/x5IAsoNv6BldqEcq2iLfEe2FPAGToqWDw1DCrZiUxMlQFByQxm0XnnFqyuEGR1A1Ue/J1Rh3SKtDEeXexPp3Od/uhn+20wDFUVAVvWUDiuD7z9RUNwQzL93PKEMe0XuvmhGDUPGrGoR/qpsqCzoWcc5gvVsM4ayYCtXM1waBj17ST2SxYDld33njv27/XUrM7aTvu2zZ44InnnPKQrXG0MeGJ2w2hS8vXP/Xxt773Ts75wb17z9FNhkLYyaFhg2HIm7bZ2Wnri6NHznGMIzkYlqNTl9J48/lXMUaazRe3r731x9+AOMxmi3vvPNy70tWVr0OY39pfdvlilSyCGonZZG8v6ljtTACcb4Oe63rYLNqZQ/TEw2bdx4gUJjvN2A/YTClQt9xwQPRE5IG0Dn6zWu41EzM+e3jRzHCz6dWRa0IclVJWzc55YL+/WBw9Oo4xzSYhDRAmdRoG57xF14bpn3/92ZTchKtn54enF+cndZc3m2pMjXO7ewsYVk+Ots+CYvrLUUxAZYs69LXPxClnXW7aSV2F0A+DihCTmfRjcsyIkrJ41xBWOZ+WqkLK6gOmFFs3FYgIAo7z0BdbvCeOOQf2tXOO0AiUiBj6LgU3aeqZUsKc5pOGwCLojYPdZyfLOG72rlw97fPubO9k+RCEunVPvp60V37ms18Yjx9Zv7m6d/X9R0ePT/kzv/TxMT46PXkCqpKzqiiAZZEc0XnJ0XJaTA6wwZTHUc3M5TTkcZi04WBvX0V+8Nb91GX2ITSEAKLGLqjIOMSrk8UnXn6tcoSI7IJnL5rNSAEIG1QVzW3dDmNvjokRtj7zjGQpJTMDJVD0Pqj0YMbsTAA5OF8TMTL7BjULa51SASuSGTqmksrxDIRb17Njn9PIwaXYMwTCZCgkBT0+lm9vps7XZpnNI/tAkI2vzu/eZnf0bPnlb37LM5WxiB0VUkKBcTITMRTWqip4j57IOVYQR8SOvXeq4BxlkbbyniE4GpWTeM5IDIRccJoMpmCEwRCs4AHEcoHOIAoYgXgOtu39VUQFxmIqEMiiObCTKGrmPMY+pijb+gRER9TUPmdl54L3cYwIkFO+GDei5gi89xRzaD2zS/2ogJ7ZjAwdEsClq2j3yr5kUNT5Ytfl4eQU/GT22t2Xnn14bzZZuIqX3enq9NBz07T+5Oiwnmgf+7Mf/BAPjwB49+qV6d2XhpjBeL64sruzGzcXuZHZ7l4zCaPp5mw4uvc0L+Ot1180wvLm7iif3//w8McftOg+9qk3XFuJdwY0bnpI+OTt+8vj5Uc//6af1X3KjKAy6iibs/X1m7eURSG7qkLitO5qqHmIh+8/kN5oWk12djQZM8U+enM0MFK1iWeTnV3nLaNtNiMk8WFGQnZx9uirX/7wS1+pxfngrIJcZhgVE1UGAAElRcccSpcyAYBFDM3TNHz9S//tYAPnRID9MCA5AMmSvHfouMgIplDmlww6vzJHRiJTLiWBQESMZFm2SYiiVWwLjBHZqwmY5WJz0Jy6I2av5sHEciRidj6DIfshj4QWSCYeZLi4/6UvPf7md+/+zM/uvP7Jmhk8+ikZa7depqPNrJmuV+eqVtUzAvSBqp0wudHiVHqXjEnHjCuRZWYLJqAq5HpJqd90TN57poZmB9cTrPK4nu4cbMTC1Tuf+OV/9Pbv/T/OHnw1DeuXd2dO3L2aTjeSzUSsDs6yZLHa+wSYVAmNTBGYVUv7kSfnLJAaZGcJkkUFASZgzIKCDoUQnERvFxHjZlgvUaqxgzigjV6iiQh4pNp5Ry4nGjJkFo2gFsckm87GKMnGtElJ9mbzyzeisu0ukHsqEF8wKy1loIqFkGqogGVBrUiFm4oEUGA4iApQwjlQBl6ln9CSiVBMBSA4IrYQgD2DYQUcVQzIiBwiApMVEG1Q8kL69MKu3fqpk4dvI643m34ce09NE8LFabe3t/fm5z652JmPMXlPFdfe+5S7Jswchn7sMlrlgzNwzAjb2qGspfg9197HpMO4STGqiQthjGkYRjJGJSYOIUiSnGPTsMdqtV5ucucdO9+aC32WfuwmzaJtKsFMSIE9YkmQKGFJi7ikCZF5229diop0i4LbCgnFdES0zdZwCSiBKaABEhHKllSCYFsfSjHBICpvif7FALJtuyoeFrZiIbIsiYhLAxeVzJRkQCwIU1UDK71whltGMgKgimw9Lli0EkK9/IMtuF2KAFj4zYRQeuVVBcmZQSH7lsoxRGTkmBOzY6yT9M2svf7iK08+HPvN49bHMZ6gS5Q9KgiN8/mioxS7xIZ1PQNMm34ZmEG1rhYgIqDBITgbj58uOEz3bp8MoXYpdcMk1Pkc1EZTi0Mse5rSjcWE2ay0hDlv4Dg027ob27q6sCzSACnn6JhsGxZjAkAr8KLCJyqJQQZQES0qA5gClD6yLQS6KDWIZJYBt+qdimJB7agyuQInLnKhFcYwom2pQ7ztpAcsfA/c1pnh9poEBCMTUdRth+Q2PLj9xohcwNxMbstrRjJRQSlaVMlhmWxtSlTShGaElE3QtvksRMKCOiJQtS3W1kBFx6Sm2XFAAgB21L71g29W9UTFLs42g9g33vowUXju6pXz9aqqQiCta0R1j4+OP9t89tH7T3Z3eb6YrjdDM1tE4xw3koRq0kLQQAfFwgiFnIWmJpa3/04ooqC4vWuRITGo/RuUMxcmLxoAA2kWYofb+r8ipRlYuRCEmPByUrYSZyMkclvUNIBqYiIEFLAssQgyBVBdhDPadg4hqCATAG3rBa0AiSCnWO52iAREACJyyTsXBTAkNMkFE4bos2Q0U5FtU8ClEEmIWgjCQJa1eJEM2DmnmgkJDKy0kRJLzkieEEwzIpKjnBIxIGJOW3jlv+UqYi44OslATHvT/Z/+Cz+7OX/S1oTAQx+ZfdfHCbnlajWkVE9m603fNs3JxfFkMfXkhiGncXNxcc5ULXb3To5PfY2pH69fff7Z6TMT6FZnDP366cn5k/uhmd842J9Pq/fuPTzZnAskT2hADXsDa2dtLzLmwVV8unwGXf8rP/ezi6o6fPjg5rVPDGej+Wbn9mvACCDIHkMlxFQ1z33uF955f3Xrszy/3X7/j3748MHy9hfflI7vP7g/X+yQM2qbTR4evPWjxQsftTfv2kl3np7euHl9AmM2NXZggoQqCo7Vql/7x//FBz/+3XuHPwwzlaafX5s+OLo3370RZrv333nvYL/psL/28sFXvv3tT/3SX33hpc+c3HtLhi5Dh4R9ymCVQN316f0PT85Olx9/8xYdHz87O5pV9WYtMaKxO1l2V3b9GtefuPPZe+8/PNgLTTP9zKc+cXj/xycXx5ZkOeQwmSaRh0+Xs+euw9HpwdXFtes3fvDuyWLS+ABq1XRaPzzdmJtcufniZLGIoIrkfUVEpeWPmbcVeVuBgLTc/JVVjRGRC5hGwQyIRbXscIo3kglNLafkgQy3LTi+8uUGImqgwKZxvRz6DhG71XKzWac49JsL05jGLo1pTHlzscwS+83Ssarp5mIlBsNmNYxjqEIaRSW307aQFOsmAGDTBFBzlYeUVcQ7N46jmY+DpJhWAEkkjT0Hl4boquA5qGgzm71y96MvPveaC0EBjy5OU4qAzlXu6OLoj7/+9Zu3P/nv/cP/6Ct//t5ydSGX44EPVFd17uVgb55SkpTf//CdusHRnt18aXF+uFxuMiFjFYb+QjQwOnKGzit49LIaNlCAdpmWh+vr16eH67NZO++sdsGHtZu2jGDHF2eeWRGrNmSk4Oo8Rk8EnlUSOjQ2LGwoQwR13otE9H6waE7VsppVGJJkXzlkYweghgSSEzkUSdyguGxZCD2SAiOCkrOsicEVTBoSGxmYFvabqZRqCjJTyIxgzogJCZ33SYlKg6ZamIaU2HIKRENK4p2oYmgqM0TMZj7sZK0ZB5XRyTp3S59yt75gXGfpzLs+Ze98ThIc9RJHs72duYy8M70RqqYfO6YQu6SayCiNmbJ6dIoKmLp0jE5VSBldXfVLizLdmV91iYZxNWnDxpFqEM5jjstxfT4ur9y4enzWYZaD2e7D9z9g0xTT9f3d8aIzlVBzUrWYqwCTqk7dRrQHpFFyv1xNq8qD0xFJasrmCLz09370nfluZZkIsDbwwV8s17tX9tV7Xzm2lLOgq1Lf53Gj2YD16o2by7OT1jXKoad6tV7teNdvxn4Y5ou2bQitUlDMeT5r0SESKSQlPD1de9ds4upgchCHNKQcJWegiW/j8vT2tRvvvv+93d09M7dZdpIlyXD7zkfuP3hoMQ1958At+3PANI5467kbb73zeH5wd6EWV/3+zvzi0VHlXDYlsAfvPShXAXk2QiNUQCCom+AbX9YvYTpR1X49VG1dEY5dj0AGLCpJcnk6Jk0IYMQcvJKxiKpO2mkcR9ExEK3WpyG0agYCmoXRTZr26Piwms6yZokDkg/sOktDTk3rNuuNRxpzNLbBRl/jfrM7mYZ1TVFWofKARBzOR3nh2ougNiDM2/mDx6cfPDx79dOfsXDy/ofvH6/Phpz6IRKbD1WS0QwcIjr2zSSNaZCMKHEUSQqU9ndn853Fo8fHq+WyrkvET+KYi68cwDzxtK4+++prTVNnVQZyoRQVu8KwVEllQhATU0UjAEySkCDLoJoNFZ0zJsFiXy8UArRsgmnMCTJ454Y8IlhWULOqarNKMgxVm4bBMUFWgUhEBmoiqEbADr1oFjFAVNDgmxgTUDGrjwapZOOzqom6wHnMy/PVR1979cvf/BYRqinx9g0ZAZCY0QhRZYs49M7XLphlBrVs6Kq6qnOKhuyQRAyobL+JKCxVnbFX5eBSKoh0MiOmoCZZIjtHzqkmpmAi3nlHlAZMOvraS85JovOEZjFHM/EekYEMTYmIiQu0A4ZhAKIrV/aeHZ+MaQNZ1SAnURFVQHLrrp81AdTiENsk2cw5R8wpJ1GLqv/Wdh3ywOt1RxNPTrrl8uLo2Gk1LLvJzl4NcHp8Tja5fv1uFcLq4vjGrcb5EEfdbaXfdF3fHaf87OHxxVnXzmY7V66ue5YYb3/ktcXM5ZyG9frk8FAlzvYm1U6NDQ1jpEDnp8f3f/TW+fHJjRu3brzxqlVORQHVkRu6zbe/+f1qZ/7qz382kVnScvSXq4vFlT1qKFvMGRjRsgRqXcdvfeVrh0+ehUWzc3Mv5WznEXy32N/DKIFaDlXrm6RjTCkDivlAUx7S23/4e+9++bfrrHMKRiRgEpOYoVHwAVkFDEobjrEAe8CCzQHmnVvXzi4ePnr45GBnElwwNRUgpsm0zqPLIlSiK2wqilD6uV2/jGRsCRAACeTS8sBExmiaQUpRsxGRISUAINYcidkATRKZMDE5B4LgQFViGgBN40AGRJSToloV3B5oXj/74Hd+ffPl3/zYz/3y7KU3hx4AXOOq+bSRIVaVD5V3TcUV1XsVzRzNQqIycFBOva4zqmPnmTCNQ9bBMLlW67nfub6jXlWNoQlUKejcY5bUHDx37SO/fPzkvSxPSZaLGifij7OO0QyQ0YgxeC8KDlGQgEzFEBhIDTJ7CpUDUym5FUBAIMttaDyBcLIUciZSYkIYomaIsY6DbpYjETlmKE6eln1g7QdU0aJ3IwABOPNTL12VzjZMFSQ7f/DsckhWREAiLOaGwuhVw1w402gKDsioTDFQ8ji5wJvBwHLZKZqBaCFUAZohaJlMxczIgMgIhdHVzgfPLjhCUWGVWAAwAGzIyACUFYeUVqkNO6/ff/YI00nDAhIX09oBDV1/sHfl9Y9/4uD61TQOKFBPas2y7E4Xsx0F28iG0YJrZ5N5TL1KaXaDYRzRLEYhRDDrU2QiAvA+iMCm2xCTc05SzqaKGMi5AOT8ydG5qExmrWRFc2NMgrCzu1tRUMiohuQMWDUhg2NXFoxWWO0mhfgGW7YL2DZuBnRZdLVd62/HznJtbKuR1LKCMPlLXYmoQB5AEMGVbe4WN1K+stiTwABUlIjVwAwcOTXZklmA4JJDBHCpU1lJffK2HKn8MGWxJInYIVJpFwfLhVdVsmY/8WUIGCFDIcUAFgw2AYOJGYgKcwVgxthMWsl5Qnzj1keXT6bd6n7FiJabOgxp06e1twDepXxR++l5NyALB09mjGY2MKPlQhe2RU2npw+M2M1ubjIJUkXjZD7DjaNxBGJRkZwLP3w7AYNpygQYwHLfb11FdpnukVIDty2lKm6pcoB0S4chKzOWgWghAGJpqhIRAEUCQlbNZiZbAwsiODUVE0QsnxsgEKGBluwYEQGAWjmSwORM5RIzZGrqmETFzLhcqqCIqKqFVawFflTMabC1OpXdfpH/CmvIVFSUnSvthkScczYroQdQyIhU9CZELsJi4S6XVHih2YpocbugQbbSkaZmY06bOsxO1hdJra7Co0dPhpg+PL7YGNTNdEjjZrnxO+3+zkJ6cUB3rl9/5917XuqLJ4eBdnExGfsBXGibuhs2VTsrqDE0IMQieEnpBCi9ZuWOBUBchp2tD8hUtjYwKKwZA8TSNI+ICHIZ9itSixC5kqS0IhIhEPN2Y0GopiqZGC+FUrSSJURSyaWcsKTYsmipZzPNSI5dyBLLfwXajt7l6JsZM0tOBf1upQSgtAoSIXJOkdgZQhYpZkJVpRL5JNxqguX6KplEZoZQPG4AW7GqWEG00PHRI1E2zTkhE7FDKqakjFQysP+2q0hBRIJzPnC3Hv/O3/0fX78+k3HYP5j2682QR5YBSELlTWESfDMNvqrPT9faRUkKk9mg8da1O+dHpwp00UmXUkCNXX7y5F6YVdDabL7z53/yPY5D7aa3b92ZEJ6vTsY8JoIElnrRmDVH8MG5yuHQ9yMR9wPvNTv/4H/2jw7v31+4vZpmXX6Ei7lFB1whFPMKIOLYx83Dh+nh/WbsmuXwB7/9r5+b7h7++dd/8J37btN98e/99ZOLYRJ8RrzRLoYnj+LTt84en9y9/lmcTR7/8I9vfuGnxZIhlrucACqZXzS33/giT93wlUfnm+HGtFqAf/nWbr1a/O7Xnn7yM7/44IN37syrP/vyN/6srn7tH/zj//f/7X8N4+msmT96+Cyn4eHpKXMVKMQBz0He/vC48biYT19/8dZ3vn9/OJFNlwH01Rc/8fTo7W9+88+8uOtXb2WVZmdxg6/dsJl89wGciozWd9FYHj45vHPryvnQTzw6HttmqnGDmVZrxbC/e+16mMyy92rW+Co4XxBVTMREBqVKopzraIYAmEyJEUGRyBSYLGehIpGbmggSp76PMjJbHNM4jjoOKfUpDXkchq7POa3Xq5PHj4d+ncduHIdQhTQM3aZDVJDSD4Eqxp4lF5wcMIGaMTtVM8tVxTllMwh1QNBxjFVwJqhZosayilYRRBz6QURSHNCUCJ33psZ1jUx1qMGAiMnj2Pff+sZX33vvnTc/8tEbz72gPpyfHtaofpdPnq4PP3hvuOhfeOXzr3z07ve/9BvzK9s0Zgh+TGPfrWfTRRPCpsuS/bhZ1YhJRldhBc1q0J2DK72qizzx1SYur17fi31cD1kwB22yuLadnhyefurO337z1vq//rNfP3j1VtPt/srn/9Kf/et/2db1fruz7PrUxyuecBio8jokrXSMucS6hz6NA4wjhGAqhsHYBQVKIo2r2FWx69TM1z6pEFVVqFTGLELeA4F0m7qqVLLEjM4VL2zO4jmogaQcKDBRMZFKToCAZFz2KQVnTkRkAoVAVfqIiQHikEERzCuQY7MUmSo0cuQzYBZlNctaNyHmzGQwrNbdeb24bZNdlLPVxdJVe1jtVa7KQxTJvYBvJvUkxHjWkIO4hJDIeYi0Xo1XDnZ5iBxqcM5yNIPM0RNlSs28cQQ++FUv1f5d1vTsR192kEJFg6TNatA0Lvb3j/MmTHbPM0qiyofz077HYTZZUE1qBuBGMUTedOP+3my+d+Xi+Lziar1eA8GoqQo1uxZUprPdvZ0WTFZnq3qAcRhHgyvXF1U9WR+ulMYrN/acrwL689WZAbDzV2/eevLejwgCWtIuYZ/SpnfoDk9Ob+zeapp6f+fg5MkzNFqvBiVOfQ7C04PpGMeAgSqfk6z7tXfk64AxNYvqqN/s7+3rhek6bc5XtJFnF49T7/IC6spFsLOun125nqvgGt+vhrauZpOdR0/uVy3ffO5WL8ef+pnbP7qHx92wmFQxiYE3rlbng+MulVUhQKh8iolmE1AhNWfYLS+m08XN61cOnx4hu53dhcToGBOxilaVz951OddVFcFykpSTWk7A6FjNPHDtXYprS8mzt5jrSdWNCYDSOBI5RGb2IoKos7bOGl0FxAnMoYWy961CxWqWpW4aVRi6uJhOV+uj6Xx+fr4mg53J9Uphc3HRVLN11LfuPTu4dXdnrzk+edb3KyQQyaJqoDomMAiuMmDnMYOux56MXOWryinAwZX9i/PNO997b1JNGGomDyCQc9X4HA0zeKIp8+c/+om2cgbqsCJkUOTKZ1MihyoAQKBQ4CrFiltXKWkgD6oEhIQpj+RQJZGgqjpHiGwSwRw7r8qkXseBEBvX5JwcQ8YkFsEYOAqiq13Xrat6frm/JctgRoqxciHlJJiAglkiqs3UNGXQKswQMKXsEMGIDKvad92AYGoIBowERCoGBCG4MmI4xypqBiEEJGCjwI4dex/sUkDw7CsPopmRUxoaP1dDs5QkOzP0wGyaBY2LV4HZIwCRlQ11TJ33PlsulFsTVFVPwdSJDsy+rEzRwLEruQsx6Po4nzfeu1HkaHmet2RIG/oBwKqKAXgymfbjUFV+AK1dNaqYmA9eTR17UwCEnHNdhXIVbFbnnmtGPnn85Pjevb3dm+QnCde3rt7cnJ5Nd6imG7efu/X06cO9/YOdK3swLB+8/b6A7E2rawyrftwMQ6yTw/7Ruz96+P4PuyR/9LU/+sgbL/zlv/wXd2aT7KezqzTdnQvq2Mvm7KINePHOg8c/vsdgn/zZz0yuLAwxx8jgMcvy0ZOz09Wbn/v03q0rImMgrMkfHp1oj5PbU/BqitxUYGojYMKn3/3h4btvh5awwYu8vuYPVAb2Luugkl3rjazPY9To25rEt6l/+o2vfOO3f4PX40IB0IklJFEg74PmBIZiAKaBQXIEIc8OTOIwhqbOiqqIk/lmGS3G48fx4Oqur9vF7s7Z4QkSucCUsdsMjOwrz2QmEQEmTXsw3zE0KjEBclmTIpoKmZmKA0MCMmBEMCUTM2e+Aix8FgGiQIyWEwgSqKEAZxXvHUhkJCbOgECut2RkYUJe2XWbb/13/2L+wvc+92v/I6qmF++/2wdpp7ODl18kBzH21ZXg5yFj7mMPgDiMsRs9eUNnZJIHMZEUOeBkt253D0aCVLucBgdx0rY5DmkzOFAME6Gwf/cjdz/58+/9+HefHr47UJPRkVMCGyP2OaOgJ3XsU06C6Nihs2xijtFxcmY1O1NvDtmYAMEBZMhCPDaovnFJPaMjTJDSqtuMGYw8tR7EhiGKCDsKVSAiJnAGUUYiMbSYxIj9ThsQuAnnp4ON8eTp6XaF7AjMTLbvBaUDl4xQEZQMyzkBaIoIBKiwHR1NU9GC1ESA1AzQZzN3WbUOhFaG2EL8JTNGcATOk/MECOCYjERjKVzTBFwpBiTOOTT+ztnTxz6ftaxs4EPwdejW8dqNa69/7LOTvZ1x6GvmMJu7QIlj3baVJyOEhItqN+Y8pg5MmJDZIeOoo/ONgd90Y85qgFmM0cmg637tgnfeO+SUhworIjSJMY3S94SWzJJSElUVYO/Q1X4Kmkwt+BqRgSADMXtGU0vFjpNViJ2hFOmPkEUFiaC0zF1CgrfCDBMxXsZNSsgLAICAyQC0eH+BiBS3DpUsoyOHQGp6aQopMUEzMyRGZAJDMFHFgpUCLE2dTLwlSyCoanGk/qSy3TneIs4Rmd2WpwuFTVMyywilwxu3nXcEW5UQkBiplGOKCQLitgsKsISqyDnHonEyW5g8vxxy1gu2s27cAFFw9enJo0k7n833xiFPdvcPj+9PQmhCI5rGOCKYr2pj18XRnM4gnz99b5p1Z/f5Rx0mGEwwhIrQeecNTURUMhmCZiQgQgzaTKaTwPV2a1B+MlBRwlLlBgCEoCpSnDZWgE8AaKUlHRTB+UqLMCHZCmIcyEyl2Dou2wJ/YtkzU0InkkotPSCZFpGOijJfFCQ1uewsAyTSYm7QrS+sxBJVpHg2wUo1mGx1RhWFkijHUplXRA7Jadu8zgUIW3IFggiElKVAbAutHgDoJ1CeIgRCOe7bj8tUkpg5romYjJCd2kiuctyenR5NFgeHjz7sx406fP/xs9s3X3VV3dZIuqNmfTIU9cQ5QR+7tOy/8NmXHh8+EMO2bjuNwfsoWRS882CYVRgyIjl2WQUAGPky7CqqsHVJABWy9eVnXpDPRRsFUVPNRIRcSCDK5IhckZFhmwZE1W3HHJKWZVs5nYvVzFQASzUUIHK5frVQovAyb83OxEQEEJFcOZUsq2FJpBXzmEkWRDKV4qQGK1M7qWYDdY713/SNGIG7xE8X17kzA4mpZANLj3bWbFYoW1J+HwAsl3+Ma++mYEZUCicuIbYGqpnZm/27rCIg886xcYzyuU988ea1RdJuOnFnZ2eeoFt108Xkyu7+2eqsbqZ1TadnR4udRXDEWAcXSPNiNmXvu2F1vtkcL09feP5GqCiNMQ2rqtahH370wSOLcf/KtYO9HXRV3637i6USD2LoK08wJgnQKKFYTimbkZnfqWd+1T167+FsNtk8vPfg/gd7L0yHlfjpFQADo23rH+q08V/9V/9Kfvzw4lkvt5vnX7y7OTydXfRXJs2Pf/P3b7bu4Kd+ejOt/UuvHeDs9OGzOku9Pj36jX924+pLB7c/ljbZIHNVy5gwsMboqhoYw2Tn7pu/MKyfPrz3g5tXrqWV7PTds83Fmx979YePn1zdvfGVL//JS3c+/v3vf+WFG9c+cefjP/7ub7FrZpVb5SGQhdpP6+ZiueklP35yvjNtb+zPDh93O83eUK3Yw2Tu7z34YHW+ljzu7+xxZVcWC53cfOeHxwZ248a+wYVGGEV6pafnnbjV825nHDa7811lr8ybztr57rU7r2CowDkDqIldET4BCam0WWORBEwVEBGj5HJ+VqjjZjl24/ri2TguY98PfT90axWN3WYUW5+f5zjUrY/DKCqxH8dNN5k3gNptOssa2iqNWW3r60PLRBJqYnKSNdS+FCuwYxEZx1w8tXHMGCNXbFsMNsUhqSq0lZFD58cIVd2QL+ZcUhDnXMpZJYJjR5BTRmMokraCoYUQzBQMnPdqFq37/T/8rb/wU1+4fe2Wy+N6qZbX0ymLxKPHb//mf/WfvfbxL95+6UWqh8tHgh5cXZzoMKt8Vq3m0xHz7u5Cx3G17tfL8ytV2J9c2ZxfMNb1YjJcnLY1DeszFP2Zz3/q0bOTdNqdnel+uEbz+i9+7mfPj7/5jft7T88vLh4dvr23s7s7BxNHfu/KPK4779yqS0Q0aUPdtsuLzaRpjWwjOF1UdRNq5pSEHQVUY2Biz4xEwU8cVwCY+2GzzHaADt04ZPbsmMezoV7M6jDt+qFbZl9zO21NyZM3I+e8KTIXDyS44JSS8ySWiL2obNlnaIZKAZnZ1EwSEBMjQYhZMpCorLsuorKNdYNxOHcKNkSEoAYpjjO0Lg6RrH7uhXMymLQsB9O9+cYKciL7uhIzYxY1B4yu2VwcZjB1HqliYOIqibD3QN40K2AILXI9am59MAPImQyEKprth/lsVnG3upiF2pbL+aQ1oGo6C/t+1fVNFjS8dvPmN9768f603vQXfRyavemkCkBWTyeNr84vljmL6gAEdesX+5P1s6h5EnN87dZrfX//7PB0db45uLIAdlnw/CLno8fsKu181bIpD+ux9TVCjiAXy6NuOLcDd7gAAQAASURBVJ3ArirdunPz/GJVu3pMeXd/f15XddVcnJ+CxiyZ2aUkgHnolt1Ks6AMSXtJmqbBwWDL5VnbNA+P7w1p1BFMFVEJcb6YnB1fKLnRIG4GRIg5aopHT+67pA1Rjun05IiYJ1M6fHY8vwKPjx7lOA9w/L/6j/9P/8n/4j9psNWR6ir0cbnY3UIcEYwZRNUMYlIVrifN06fPnAu7+7tnZ8vz84s61K13vgoppyjKPlAIhoqOISs7MmEAUDDnfeoTkCS1qm1zLmsuGVLX+Dk1VVQrVvx6MunWF+wwRwVQdg5NuvVFYLdaX8ymc9DRFHJOoaqH1dohS8Ku65z3EmESAqZVaMmAvvaNt5rm6vUX7uS0enb/3hD75bJLmohRszmAnMwQTNL5almHuvXBuZBi6vsOHTx+sJaE1+YzUEhJNeccs3NsYg6JyKaEf+GNN9rgMigqsaOcBZFzSr4KqKqqznkEimlkdN5VYxwdVtnEgwcBUGZnYxZmB2ZUQjYICjnnSG6CmYydSWb2oCC5R4IomckhWGEtJhgqnrJ3zGEYY+UqoCymxB4BRSnniASiERkBGdAQQwAmRTGrK28aDUEBfFvtVdDUzogQQs4KSt4gONc2fj30ppSSIID3PqYUHM8mbUoxeI8A3Wbd1tO6qRkMROqqIgRUcGisKTACOjU1A3LByBA8MIFlMwrM3bDxHJgCkSCC6Mi+UjNyXiWZCqKo5YCVQK58PYx9gRsx4qjJBwre5RzTkDfdUNcVjuyZAWEymfRDb4CmqSYDzZv1wBO4OrlqomDgXBj6ThWCd7mMNwAAsDx6Nq8nq6PRcn/j6q766nxcvfra7eOHJ3GIO7uzZm+nW522DbXTfUf10eExUrO4MkFIedMvD5eKtrO/W4VqOmnQadcP77794Z9/9Y9+/7d/+7W7d1658+onf/qT0kNMVQLcrV1+9OjJV7+5evjk9ivPP/fmi0MS4OSDJ4fjef/w3Ufs6lc+9urQbabT2gl1p+v1RXfr1i3zpqAApjmxAnW6uv/o7a98PQSiCekEZ1dnbuZjH+tZm7OOqQeIwKjGlt2E/NF3f/S9f/lPN4+e7RiRZ4zFoWRk4n2NYAyGgJKkkGtUrK5DKvkRIgJCQGBv5NerEYcUHI9JM8NsMnfX/Hq56i6WCOC8swyShRQQCGFs0KouO0FCk5S1NF6ZEeh22gQpHFZ0BIgE6NglKwGHSGSGkBUDghYUsnMCyqbj2DtQA5AUCY18ABVFG1NShcYh5aG7973f/L/+R/O9mx+7+5FbH31jcnuarBt618wrqLgbR1FlV7GhjOitzmPS1BGCoU52J6FdUMUYWAgKNMRRTVniOllGUy8qmtfVdEcao50XqvZqUz9d9blb98oGgNGkIW+i4ziOVKqmBEQUNaMxeee8bz0xoGSHCJpQjBFJ2WMwy1UItaMRsyWQmMyMmbwjI58NJEWywlgBUC2bdwR0GDCrSBdmM5g3BuTQVy4k8OO6C9uZsKzJS+4PNIMibGVuA7vUEi5X6gbbIncEA7StaCCmUOYkMLosZVQpZY5aUoUKSIAOsbTwojksNhVTw5hlJHSIaDImiTlPmK8Om1zlEeLggqIYGsfert184ZOf/TiicyShDnVVjUMfU2L002qSNY1jrF0tlrP2JA4MnfeI0HebrWA0dMSYReoqMLKm3I8b9uKrigByHIL3pluFGhT6bkBwBZQGBEgYqhDYe1JRZRcAUEEcew8ExbKz7X+BMo4SomoupdeOvEJJZiMAEDlVKUAfh+QoiGXc/g9bg1LRV7Vww9EMVUTL2O/QIaCBOOZtyRaamXERAMjlnIvrhMhxsRuowrZuV3VrMXKFtYxYPEVFFygjs1jhKhd+jWkRtohIxArtaHteXGa3XKGrlBEZqZwSiIjkkVlVSuNBNmHvXEDkHcRXzj+8h0ldrWRrZApqlnPMCqCm+WC+v9mszvtN66oKQ7JIRIk9O405otHepD49vm9d37gDqYIA+NpV3oaeDI1rJsS6CoTEnpkBwVxV+zABy5euIiiMYBUhYtFxK7ptLTlGuEX1AwkymampIjo0VDMkktK5hablM8SClC40YgYTAARkMQBkABNT0zKjGwCaSiksAxMAUJPyJ6KqpqUI0kwIWSRtQTylRZ5QLReLk21btPRSKgAw0JI5IoJtCVfB05cLFlS3bh1EMsAy4pWvg/LzbJMqYFv4UZEJlRRE8k+axlJOs6aVvhcZ1+Pq2fGxIL7zwdM27F/fvdbJoJL3d/e7bmiqdr3ZXJyvEaieU5iH3oR9/fb9J3/h47eHfB5T8uxFlBm3EHYwM5UiS1LR2oAYCVltWxavVvzghuwKSAvAdEvvZpFcsnVbsjgYAIiU4kcjJNGMiAClcQwMyuuxMfP2GJWLG1mt5PgUsKQWUUVM0aDgRNTMFE1zJkImLhYkIi6lZ4QgkqEg7HV74pmqlNS26aV7zwyVvZOcRTISbu+lCsDlrg2AmHMm4uI+K8oSFtYRUHmwInmiWkufHZcfmAyk/OQErnhY/x2pKCarAitoM9/7lb/1d/JwYTBGAwDnnZs3ziyddieOqparYeimYU6ZRXU6rb1zfT+SxPc//PGmP7772sf4AxhTXHWdjckRpdTlODTV/MVPfyTFleace2WpXD2D7jyPA4UaolrORI3oIIxNO2P27Ovv//Ctf/J//3/FQWRc/+BLX+rPhi/+D/49qjE0QVNmQFBKMTtH/Xr5/I1b5/XjxXOzyY7PyzPqLngFb9x49a1v33/yJ+/f+bmfHxGe+8KnH/+T/9rlOj0hf9TtHtzAUGuYw2bj5xMFY8eITBxkk6gNlgyxeuVjf/P6nU9d3Z8d/eihXDx7Ye/a5vDsxsG1L//xN//T/83/9rf/2W/Ehx/+4I9/+zOf/RTXR9/5/vfmu4shY51gWle7M46jDX3aXczPT4dJVW2GfPPG9df35u/df2Qp9+vx7DTuXltAhe+/973mtbR39WO3b91594N7wcedhUGE02EE9JLo+OjkznPza9f2T8/ixboHbq699Il2Z4+mUzVDQkeuaJMGyM4hkSIW7qBpKveCEosUSWm9+dGP/uzJvff61TqO65xjcF5ExAAda1JizGpgmhJqVgNLUY0wpjFnIWZ0bouiR0CEmBIlYCJFQTBkAAIRRbq8WRZ8P4CrPGQFIEBl4KqerNfniAxCBOA4iCozC1gbKgQwAcbgqpBT9lyBxYorQgaGdRxC8IRGwIrgKycq4FgJZ/tX3nv7XVj3L79w95Drs7NHOa3nOxMntnnywQMYb935uzfu3tp665IOXWIgNh6XI04DVdaPSRTjxnarHck9yXhxcWZ+qhqdz29+8uP3Hz44PH706PAY4cbq9EfN/OB//r/8333jS//0D77zf/zgh1/7a7/4c7/11ccXxt3pWdMEbngccs5JRJxjduhrVHPEtnttDqJVoNdmV9uqljERuWTAgXHMQPwz+1fYVMa+qhsMbuzGq0h7ezfaCcSx278yWZ2vA/lbV26Nthn7lQ/VGCGCBIkVeRoH9jLZOehXEZFNhZBNksTOozKTgRAqEBE7zNkxecdkxuQzZEc+ohGBc8zeL5fnB551veR0XFebrnvkYTKeP+JqX/euwrqPerY5e9Zcux53rghXaknaHWh3JPaOHKEBovEWzkyZhLDrBl8FsbGaTCPG1K1BVGNmQyInGQM3qjxvbgBiHjpM1gaXbGWMVTvJcUPqDNEsA9eodrY574O/cf3qOMjTJyfdSnav7qOXaochkmsNc469COS9xWS57Asrbr7bzGaVVbQ5Ws73Dm6//vLxo/diXO7v7U4mLVc0blbO+3YxGXtSqXeuvDCOnVlq2gbZGI1VdExXd26fPHliqscXa8sNZlx2m73bO1Ul54+O525ybOPzz9969OShjpokeo9npyeumreNhyy56/YOrj3dbKJCG2q2VFN7sT713nNT9X1fsx82mzD1+3vzo+N1g7Y7a3TsbiyuPHj/sa/Cqh8lQU5pjZY9n57HNz65g/H8f/hrv/D/+S/+M+oykscQfKN99s+/dOtP4OsAsN1tAPgqyKprppV3/tbN60+fPXrh7ou7u4vVZrNeb4gmddNoTCIAETBrhtFA60kTu965qh87VNIETKwSyRjBgG1MgxvdLMz7IdaNNwJSgygoFEIlWXJS58B5RwAefUywXEHjfd1Uow5RIqjOFrur0yViAMsOg6ub2QSZ1qFdfPW7b41gr735klXxg/ffH2J/dHh8cbEODRFzVVegKKxD33nvG18RcBbJshERCjidzDVlUVDTjOK8dwCOAcjnJOyoJvzMax9dTCZRMwoS4Tj0DGTBGXgsEFZgUENAVPKOu6FDEiZzaAyoyCIRQAHUsjITZEVkETUC572qQPGoWx5z9OgUMrsaTEBQNZNHxhrNkzoCz1iJLBXUwELwKUUHDs2j1G7bYSOSB2ZHRKJABqKJnVcDVMtjQu+IaBykbipyPMZYu/Da3RdfeP7mH339D7ctHew1CxGromM3phgcOU8GSs67MPOevCNSYWJmh4ACQ11VgKlgEzy1kgc1CM4BAYiJQlJ0XBkgoCKZSgaDpAOzFxvRlJmJnJgomJkkieRKcw95zzFGQpAsiJgs1xxAYdLUdR3OlysDa6fNMMbCKahDFQCZwHIyNfbejIgcgZbmRyQuz4LZ3vzkwdOTR49vvnD7pB800MuvvLA+3ZyeXCwWV67cuHq2PjZIOwcHk6o6+uBEhub2C6+mCtvgvv+N79b1dHqwO98/6JanwYkMm6Mn48vPXwe+dXy4XF8s/+gPfv973/zj1lWvfuSjBwcHN3cmj3/w/vrhUWvuzU+/7tsazKMPJjkPKS43H759Xwzn+7PJvHVAw9ny/R+8deXgVgg+l4Jy50ggZKZ++d63vluZA4Sji+Wd23dchUMazDSmFFMSS+gc+4YG1IvV7/36/3P11o+n4CY8UUhqhmxITA4k55xTcWMwKig5cpqSEQuyswQiVQhZDMmN0V3Zu3v7+oN3fvwgBH/69LjZid7tLeY7aM4xnZ+cuSoYG5mqZOdB1V57/s3patgsT530ihbH5AIHCoqZTBQQgLICokMroSVgotJZTciEpSy61AdTGeEJzUih7FARAnmCTCKqSuyNBIwMxMyAXIzy/tsf8CC/8Nd/6XtHT9urNw52r9b7s1UePZmoWC/9ag1mSC7HVLW1nzg/r/2sZgcJABg8MQsHhNxrTjkNMbhWBSVn9IExV1faK5/5hJucn/633+s3G9OWgAJYbUaWLReCROUcEWCWiITewGMOFcdN55s6BA85M6JIAhRXGaGxrwRDlsER9X0/DJQlGDgiViAwcOxiGkDR1x7BGM0bVi6krJq1aqc4Dc3eJPeZHJD3FBabc+jO1ltXkQcwLDAXcIDGJrCFrMCWe4+ETIV8SKwWCsffyLQ0kIOaaVkbFEoiGjKVrkQxwMKrUXMADoiBCYiR1BQJnFpFHJUSBFE2nKq00g/d+lHrMTR+VrtxI7nXF1585aU3XnbOIyFvkbQSQiCELDCkIeahrReMPMTeeVe3e2kcCWzoR1NvxmPuiMihc4TOuOv7HBMT176SIWYC70JOYmakmtI4DD0TgHeErEiDpLqqau9I5LILnrdwLYIsmdmrKVNVemm3XNktu5qKY8jACC7nz8JQImQGsKRqhojEJgJFmilwY1N3yVclBPJkVlhEqKZMZABoRkCgaCrEbKoI6qhIc8W9ogZAxISoVGxHhAaFsQI/IVobAGDxmzjnRASBCsuGkBmpoE4LsqhUqgOUTjbVYq8oYFxVZMaSVTPF4rChElxnk+K2SM7x7MoC6c7mMeQepO88GJrzXDvL55tjA6kQm1D1Y29mVeXH1ImoClbcDgJjHohiFWDsjhY7IU1uDzxh1qBah4TEIYRQ+VBXpZCOmMAxEppxVf3EVgSXSPZy4rvSpUWGVvqtyBWIE4Cj8iEYiCYwRQImkiwFakSlhf2SX1M4gFAieFuE1FbGKMGlAhGm0idohrgNJdmWDAUIhLqtLy4i4/bkMbRLOjohiSoVWQQEAbPk4pC6zDOqlZbDrfiLRCil25oIUQs7S0S56BRanDJborkqmMFWy0AkZABIOZFzZkpgwXvIenJ0PI7d46dPu4yrIS0Hf+3Gq+VDJc9jzIh068qr319+fzbdOT8/G8fRt9Xx0Wlor3/5a1//zGs/NZstlv2xJVSFJOoACImIVMzMEEwlb3FKUlLMRszMLucMhGBiWj4cQ3bbTxWEmRAhp1SinXApwSKygZVDVtqBskgJoJkiU8kryiUqhEpy1KyQyrcl4EwBgSWPpkLO+eAgJWRPqKrinDcDBsomJiagpW7ILglxhGRbdxghMBGp5kKGQkIEBlDcqkeybSRBJOcRCNEBGFpi9qoJCVUNROGSlE9EhI3mAcGIXOmoQ2QTM4eXOu+/i7VmZO8RkvzKr/5N1I4ptk2tWff2rp+dPxUSx4zIVb1wVSDtquCJa0VUoPPl2lI6PjsSbz/9Mx//0h9/Mw1pdz61lEPFV3b2nx0+PljsE6LGAYFUEZmd48C8iqfzSQBfn8eNBD+dzKzTlCymQZy+un/3P/7f/8MhizLWV/be+OLn//Rf/lEze3UN3TBGh6HYZjgwMNbV5OVf+Ll/8V/+k//gb/77C17/8T/9frT4i/+H//PTb7/z4OGDG3fa5srB6eHjquLDw/uf/Ot/b5zKwx+8N3v+1c5cotzf//bB9V9Ec2bIBDmN5NiRmVMiVze7WRsI0+ravs0fu+7Eufr02eEbL7zwO7/zL6e7e+7kKdTVn33jq5/45Od/8PaJx3UIfazlfBhWcSz1nt0moublZtVWszHTZD5rXfv4yWFW4CaMhsPIj1O+vpZwuEpdDNxEl6OMo8T53u7heU8y3Lg6DZSW/XjU2e7113evvzJf7BmBGIIhIZWuMfaESAWTBohqGpxHAjVFtHGzibG3HDEND975+rDKwARsmsEYBEzFCJACpBiL1TQN6pyTrH2MkBV9EFUQqCpvTLa1zBmQ96FSVbHI7JgIkcWUkQ1c1hKzpCSJkZBJREWNPUfNhkCe+3GoK5dznEymXIV1v6lrLFwKREJiZM6oAcmS+eCipGwpUOWcM81JtSIvAoAqKauC9+2y1/cerj76qS80x/c+/PBHy+UzyJlJnzz78Ld/95//kv2V7SZ5lYY07OzsN81CMd26Mz+5OFmeHlfVbmwIYobgN8P6YH9/vc79mKpJdXK6CX7n6vWZYSt2Mb8zkSr/l7/3nz559/u37+zf/sQL3/32d/tBq+kVP9s1GZJoCJWIsGczG/tUzyaSouTsPaup6P+Pqj8P2mzLzjqxNey9z/CO35Bz3sw71q3x3hpUQoBKlhAggRgFtLEYmqbddmBw2Mbujg47Orod4QHsP7odHe5u48BgHMZBGxBgJJCQQEOVpJKqVFJV6RZVd8458xvf6Qx777WW/9hvCvrPjMjIL7/3Pfucs571PL9Hx34IqCJW1Y7K9+pYcw5OXeV6Qa7Z1Uzo0AyZfAVzfyjiJvM5g4EeohMFGbuhnrTCBBlg1MNrc1+BD9NhG8lBljzsdtvYTQ6OF4fXhrQduoFUgHMeO8gppz6lXT9MJk3rHJuKxqwkiJrHQXbrtFqlMU6P/Hb7wfb83tVrr643j2fVUd3ONk+ftjPchVl95fWdchy2FjckfhxWiOCMUDRm4XaRk5Al2V4E0WYxp7ZOgHUzPbt4eHX54m7YAaihiifHPpuAgg/TceyXB9f6yzNL/WTWpCpIfeS13+4uCa2qWTFPW7zBizOP82Zx4la+rcwzMj25uPRE6EPTzsfT05rZkJomDOPYDRIFr9y+8vbXf+PjH3n12kdv/dY3T7aOAylWc54dJLnIg+XB3b57E50CIVkYh+28XYa2WV2epRSbhlRttT2/cXQzwWNXhdUueoGj5aGXVPsguxRHPe+3Y7JOUFRVtU9p6qr5/MpmOyQfo8V2Or/sh9U4tNXscHH73u4bLvBm2C78UsC6FHeXsewAVmdrFiXmJAwAZ8/ixXoESsOQRNFy9jXT8vD08uzkXsM6/5t/+yuOfDs/GnfGzk5OV/PD6YMPn+0FU8WcVLPGmBHwcrM9OlxMmvbw4PjDDz68eePWpG7QoB97RUB0zmN2wN7nHE0RrST50Tvf1rOHDx8czI9iMiPVgjCqJwjLUYakceLQZUAy50ByUgTN4oG9d0A27DpAIMS2boesmpEdVYHIQRXCeZbgKwx16iGjqDMX6IP37z86Pf/oJz997di/9Z1v5RxX47AZR19XYJmQwZg8SoqzxTLFmGImgJhiHMeDg/nR8fHJ08td19e1B5Ucs/NBEFXAEXvQmefvfv0TbaijJklK5FLOgE4BvassizlBB8gVAMecjSFCUlamsvEz0awCLnCMI7MXEELOFp13kmTSzvrNJriqH7eoRoih8uMQnWPnXRozOAquyUYGSkiipqY5R+89gBI4NVMTAhQjYKcq3nslRTEyTCJgisiOQXOSnF3wvvKiJpaP5s2glA0OF5PPv/mJo+nB6e6MmBg4JiW0aVMrgCIwE3FJiqCYTkLNTNnEqTryWdDAnEPyvO4207Yd80iGVeUyasBgJgSaZAxu9jyuLwXrH/xkiDtAqHwzxJ4YiFR0RKSskZFFDZGrEPoUk2QRif0YyBvC0WLWVnW3Hdo2jEnGMWl5BGUVId+4UDU5xZhi38e6CmY2DDtG8i4wg5nmvN8kjzo0V6e3Du48efisXly5c+eV1en5GPvjW9eXV4/OzlemduXacb+z80froZfFjaWbocbh0fuPGp4cv/jC8uaVzWYtcsHsABsYtlPvm7a9e+3lDIaaLs+f5m7bnTz62te+/Db5iycXvg5U8W987avu+Nry6g0/acFUc9xsL7COr3/qdQ4qfddtdmfPns4O5ge3j0dOeUxV22pSbwHPtg9//a202Ry8sHi8Op1dO8SjCdYIWTXnvu8MGLHJ0uSt/eY/+gcPv/nr001cuMoBKIgkISRF6lVGkeAdZRTVqqqGfkvkjHypYFJDZ2g5EYFlMAe+nm1FFsuDBpENmhDyOG5XK1RwxJNmSsd+1+3GXVciSoyq5n7wu37En1xqHNGKPmUlbcREEmMB+ZSkgCPCUjKkGY1MEvLexwDIZR5GNZAEOSNkMhFE5x04r5Kfb/TVSl91MkkIqJXXZorvnrz/1/+v/+c/9R/+R+N2PL/3oNpMU+VnB5PcD5JMc0JP3MLkyryeNdzWyhiLGpHVA4qR9FnVdMxD1zX1JCMopGY+Aw8RUr8zwMP26LPr7cFq128ULiQ1tQvEJgaMkIFM0hCJMKZMzGggasnIt15VRyFFImJyNQY1P8a8ceiVKI8II+cBdjvMELICq6Gk3GeLQkBASN4JgHNOVdUyoZDDIdhkUtd5N65WqRtC5Z1axaNb7ucCQyAERwxmqKCGVup6SmcqQKm7BQBHZAYK6ACzmkFRCoCIpTRpWaHdFpQ9mEG2whAxAkVAZ6CazbR4BNSE0Ry5bCln2GaXccZ8BLnXeDGt4yR4G7ONlJPcefm1j33q41ghoDlE79mTUxNidkwQBRgm1ZzAIeF0MiOCGAfVMUP2TQ0CEnPVNAiaxpxir3kUiWLKNCF0gpmRclZE7ccOVJDA+QAetmO0DBDCfDKt2DkDZiIKamRmjp2BoIkjRDRHzp6jhwCLOg1ACHstQLA0VZupKj4f3AuqRqCYO4yIATIiiCQiKskfUysyIvvSxU5FpGMuDNwSawMiX0zjCkYlFaaCRPtOAQSRPRKHmctxURMCImIrmKxSRVdSWISqmZ0jRBFTTUXIoILcNipMay7wGkACFFMuGJVSx25aENCiKsUGg+ScB1NFcMAG4K5dQ/AXD0CGrQsMkoZx5zQRVGreQIjJV3WnEvOADiyXc+/MJlkVUIPDJsBqfHjgrz0FHhUJgB3Npm0V6mrScgilvhkMkqqY7T+050ZrLLVx5Q9AzC7lDpARHZJTNEYsTWSApGpcAGFEhQmIhWeDxRJeyNSK6Irms0dQI5gkQC6+M0Msf1U0c1lg7M/ec9OQCRQho7CXiazwaLCQsxWJnmcG9/kzkawmCK4Ek5BIBWzPaAJEJCaRXGA9IhmBiyYlOcE+2FViUmBWnDVFByYAJCBFBSjvJECOixSaU2QmA3hycraOabvLq22+d9pPDm9x0+50M479fLng4By0J31HzaQNFHOcL6p+uxliffXGCwfH+StvffuHfvS7Vu+dpTHFGCvHSFywPIZGyEWi4YL4UTVEIBZTSREQnAuqpRqslM4WNSY75wpai5wr5X5mCmbMrIUGDlJccVmzGYoKogKAmhExECDstaRSbcbMiOacUxMEFFFAJedVsqoqKRGqplLugIhmInvQVdFmC5wN9lItKCA57001awbzQGwmTE61mNfK14WGe6iWagYzcl5EmBiRRDOAFjFrX5RGCIA5J2IPRIQmkogZFQ2MHIskIg/PdaJ/WyrCuO2/+7u++/Of/sL29EPv0Xse8nixegYYhWE2XdZVdXGxqUI7m049EXnW3vfjbohxNp94xDGlb7/1LopbHrb9rkMKg+qzsycYsyZNqW/CJDhUR+ApDcNuvTalSTt58OycqCZ0XddVdeMPqt12/d1v/N7fefTakW9Hk15oUlU5zHwc7/3GN29+/2fZt2QlZbiXUQ1wxdM/8tf++s1Z9f7f//H2hRv1x948yYQv3f2Rv/SH3vnNb22ePBFBMwbV81/7crpaL4/mv/rzPx+aq5/8kR/cXj51CkmsFMESqOa8W+3axYEagHHdNqLZtXV786PP+pPrfS/3v4MdB8iPL97ZxnNeP/vOt+6PefLdv+t333vnq48ePZxN6rzNz04u5/Npiroad4dTuH3jKPdcSV9R/cprrwxxiIhtzJt+VMMh8wePz6SPd25fl8PaLkz0NKkSUtp1NdhROye7cjFcvfupz81mMwweHYNa2Ls9jbjIjQUfB1xYgoCahhzHlETSMHS7YRhMY+pXYJpzD4JJsyFm0TgkRPS+EhOVWIWgQN2ua0OILGNK89kUeb+pcKHSrEQ++CqlwRSYa5VxUs1Sksp7YFQREyNyDgWcGySBKVFAA+cUTAIB1y6BRVXy5GtW0VCxMISm5aqClCv2xC5KZId17VVyEmEfxLIPNRIF72KMlasICIEaH4bYgaZ2saDg19vhm++88/Krr9yg+PC9PsXx+Hq93sTN44df/Zl/WU6BZORQxVE63H7s06+sL89ef/F7/vFXf/Yjdw+HvB6Uow6s6HJrukVyY4RH9x47DBBos9kQDfMp7C7ze+Pjtq0ePnl83U+ypVc/dfT+l7cM0PhqiN2QepU0a1tLuF2NFQE3REJD1yNYs2hjH30z9w0hkUMCRUQ0REZDs+Cr1I/k0BA1qWWGUMhTNsRd5bzmJHHkwEqiOIyjOnSISiyAnC2aj9zWkMI0VFN/7fbdT2geIYV2xiQ0jGuE0sGJo2TyyqGSrGxotmXCOEYEhHEcLs8P7rx2fvmsOXyhnV3LtqwXB9gc7qTzc+7i5aDDYbsgMRCZhabbdRA8soOkw3Yzan84u2VpMOkr9mCikplcOznarrvgaldNs4jmiJKonoihjbHytSiSecQF0IZhK/0qQVAX1rsuCYKlqnUXlx3S7ODo+vvvvkfbx2kYQkMvvDB/98MzIZ+yhiZUHtw0IPBLL9z4jd/61vUrL4mlp+uUXX374y9fXIzn23F1Ia+8dG106zvXX7zcXB5cddLxdJ53m95sNKdXD5rYp9X502DTaB1iVqsd+plbojTNdCapG5ICxi6uKl8NF+lk9ayZLTIMy+XRar3iqvaOLHkzHnonVlGob10/Pn12Fri+e3yw3e0udx+Kpmm9DL4aNO76zjy8/LnXf/EXf/m1Gy9sLs8OF9M8pKxw5fjmvcf3lZnYfHC1C9tVGnrdnq/ag/mzx4P2YxWOhiEmIsEoxNW8CY6xj+UUEFPKyTvynpjQA5tITLFtp3Udnjx9/MLt2zNs17tt3/W1r7z3oSK1xOxMYo6pbPMbVwVmstGhJVCHlIYOva957sJUTBtsgsOx20RC593F2dni6tWosU/CVSsyEBIQVBW3scoiu213OJ9CsFGijpskI0H26iRlDBzmYXDw9Xcf3r7z2ssv33328H7u+t04PHp6Fi2JiYoqSFPR5vKSg88pxRSjSWBfBXe0nLPzH957wOg8secKKbNzgGwCrObUFtXkc598I7CoCCqXDlpNEtNY+Vo0eiZidq4qQQ/vfCwkAiJybl9IoQrmiFglo+MSBdeccY+u1CyCRs45RHNOTQd20I0jsgN0iFWSRGLOuywRiNtmOsSITGAFSxkAylyXFaQKk27YBc9gRuUlkxzsMdXAFMgAyMT06MaxkQK45fLKZ9/8xN1rx5uzM1Y5WizWcaAhoRE6dAAGUFWMZVWeEExd61wAZPZcBw5gmYEYSMQmzcIEAoWmamPOzjcAPAzns7rxzGojGCEE70JMScWMwVFj5sv7jJgWNwnxhI0c+dJZk6X0moAh1dNJCGG9WbNxJgnB7XaDmpnkjJQFprO5qmXJm90OwExlHAYmcgjsWbJoGllQTfR5xcFP/Ld/+7/3/X+IAe+8+HqC6uTxU+l64nB4a356sXXOz6vF6dPLmiuRAG2aX50YSb/baYQ7d16l6SRi1Kgv3/7I5dPT05OTebhy5c6RbyeSx83FGbXN0eQF9HT64N6denHywf1mIVQBoj15/97/72//7bqdf/Z3/K7rt25euXsFTZtF9dp3vRohy2DbzSUa3nntrrAAQtM2oEooIcf3v/yN0289dMvm6foitL49mnJwRKqI4Ag9eW439x9+86d+YvPtt+l0NYsycWiaerNZW5d1TAb1YGCCADnlAKyanXfFYZBViQmRFNkFypbAlNEn1uX1668sJXzlp3NMrgoIPG76cZcq56fzKbu6bZEELI1+Qmm8/NQnf+B3v/S7n375bznMKKpmxKCSUdGY2ZGaEZEpGIKoEACyF3QAyhwU1EwIURWTaRlhGM0kE6NjNAMgS5C8dzkLEQsSW9SUgMBXPuc+p+iYE+G9tz/4O//5X//BL/wxl+vm5pVw1KrFNEo9ndRHS5421aKlipGUHGtOqOp9IKDcjyrZYkIO5Hw1m4WmMoOuG4QwZ6XGNW5y/1vPfuVn/9nJKnepGpO4uholo6IPVdwOaKYSgTAboiMzYGZVlcjoHQCiENWUCKLLFRKAr3ybcvTsMNW7DQ67JAkV3DjuvAEryBAhGwIlEUjZqRQldDDLMnJweXp1veskrmTdIUACE1H2kOKeW1doJlz27wL7LixABSs8EECy59xjKoZxJFUpHWhgJApCYEYFF60IgCAGSgaAtAcGI5ITJCWfS9gJwICzWTSNFsYEKjVgm7tV3p01GDEJGDl2Y5Ybt29/+rvfpJokx8AheM45G4iqEJGm7NiV26/kXPlahdIQRZSAwUiTFY8hJkdgmFLFLCKBfDOrzRhBHDEggUmMI5oYOTBUI4MKOQJK3TSOODBqiuwqQDQT7zxxufHusdNmwOyKN654FgoAWE2L3R9LFz2hY29mqhkMFKnQpRFKhxqyc4Dg0TFhzkJgBkIGDpQ1m4J3lIwyQsHfuqJKaKkSxCwl40ZohQFgBkqGpRS8j5GJoDy5wEpE2lTMgAlNc+Enqer+V1PDEkpE3GcV95VM4Pa5nlyUktLABQAOSVQBzJErkR8shBcz01wcVWCixbQBMD1eknt1/RD603cZ+jooDFJzY5li3MXUIfrdmL3DWZiwT3nQrB0RBV/342BqwBBUd4/fOb7zxuM4bAVkHM3z1AfHDskxM3IpXsx7A6Pjf2MpKl8gmEoyEzUjciVPSEAEKnkkCp59AeIwoYiqESCZZtwXDkPJmhU9QosfBMlUiUA1lSCYqQGxad73RaMz3eON1UqFueyx48/zV8V7YqDFHVaOWiGLqQqSFd/Q3vJTPAQqIgKGosLssFiPRYof77mbDCVnAGRiAyo0oiyZ0Rmgqej+2i7WTt2vr3ICF4pWY6hqFogfP3q8HfuEtI359DJGDdcOjrpuyyTOu8X86MHjR3evv/Do5LE4GAEHy9gnx+Fi0Edf/+Kf+Qv/sy/9xN/91ltvH8yXm81qOwy+CmaExDmJY1cMUHuDUxFPZY8DL4h1ydlKrAWRyKkqQkmQIRFp4bIAmSox7aO1yGaasxLS3p2HxXkHokbEIhnBqBDRAQWUuGC8pGTTymcrKkyM5tQ0JylHhh0bYs6ZiYmwYNHLP8uABSaNiKammsQBkiMgVTUQMivFVMSMJioZ2Bf3GSJDsU6JOCIDJGAz2SePDZjJ1JBQpVBG9s5OsLz3sgEBKBf/mJb48L8lFQ39cNQ2P/B9P7BePULMYDz2ebfuyCcmmS8PLWmXd20V0mgmMF8u3n94Txxdu7YgR9t+6xx+35u/42f+1T9it0hDbF0zxHh0ZdGvTqo2jHE7axsH0PWRMIta7McsOSXdDZFdxVxt1x2TkudNF2d29ce+8MP/m7/4Z/6T/+x/3X7kIzKdRfDHd+9W82mzXPhZm8Wcc/sGTmYAUjWCwK49ffDu2YMnWwhv/uCPWHVI49k4a8K0lstVtVi0bh5l+v5bTya5uRzW/SvXpi++vFrgbHZdh3OY3IAiJ5MHTU8ff+2lxfeXbijIoA5dUwH4ava6m19eu8Vf+5lfvXJrenLeP3x2ubiRjpaLZ/fvaX82qf3nPvGJt999m/PoAMYhBfYG6fphfff64Te/+eACpZnPjuaLF2/cvn/yDG30wbUNxazrzen726d1NZKbBY+H8/p8NeyG3ZVFtZgcfey7vvD2o9Wtj3w2TKZUWOyGmpW8qSgT094mt3+E5xQRTEzGfid5zCn2mw0zjTmv15fj+oyqGlyfk/Vjdj5YyYKooDdTEKMhm5gI0pBktxn70Q6XXiAxEaMj5piSoGRLyFAxZ4kAisSKalB0ViBms6yYCcATBO8BMMUIjCGEcYi1D3UITEhgjlhFLWdGrIkqJlRnhpUnsVQzegBgNgazjJIqIkZVjciIRKJJTRx5zZCSIAI17vbLt775m9+oWF579fYM4a3Nr3b9WlW0327P9n0fjfcHs7ZWsXEzSQdPTrMi/enf/xfff//nw2x6mYdD16wutynvdmMUYIeoZs5nVtcPW88p75iCc95fdtivaEfcid24kWpvwzpyJcCASOWeguQ3cfzY9Ze/88FvvXj19fc+fACYXrhz44tf+pXjzyzd1CkSEqUhIbpQhyxj3g7DRqfzVlV95Ychi0pVz1cXl1Xd1nXN3mHG3UUPrqm9d0Zu6KqG28Pp5cWlRnWO0VcQlRUJUfKY0oiOsdzmSBzXzF4BAVzD6siyKDghUyQNnhGsnU2a6dRyHPokdCT1zQFS9q46mGz6XS1VRbUZUDPpx878hKDOKpmMcRYBkAFr5tFSXiXQUDGN0SxWFXWby+niiqqSC+vd0LSHKRtkkAAi4gCM2MCRd7txnU04eAMZh3EQnR4ep35r2VRkOl3efPn2L33rayHw5bqbz5pxk09POh0lD7GqW9kpz/F4eXh+eXl+dnn7+I4CHR00xHZ6/4NpLY3xdhg/9frL6/PNwdHxbrN69ujh8bWjw+PrZ2dPZy4MvQxm691uWA+mOKu1qWsFnE+X211X+7bbrLvt7srRVDcync68adqOVEnMY3Dj9euLk6e7ZjpJ20QEbVPJQAbGbM7RbrshgGEYvDOkSOKDn6Ws674jByA2w/rDb70/CS7ncdo2BrYbdt41/bBJOZrH29evPXnvqWOcNE20UdpEVVw9PZ9XE88TP2122+0w9JtuffXF66cPntw4Pt6/F6nUjS+jQPCucj7thqHfzecHk6aZTeYPHz08XB7Np9P1dpdyUmQTJA6S+vIsda6J4wgudLFPmoE5J8lZqrod4tA0Ybc7mdT1ziRnpcqZQeXC6fmFqAlhNWn61KmmJEPtqj4Pvg6YFBVFsiQRU4+AjrIYqIGn2cFCennr7XuTydVPvf7q7vzJk0ePMtnjk6dDjALZMWs2yUnZt2HSp5R1HPruyvExGA5xZODV5YbBV84juziOnsFVLmarfBX7btk2v+uNTylqlsxGQCQp5iTddrfd9MdHx0RgVDpLsqkpKvtaM6oSqmlWCoxMKSkiZEtZlAhME7BjHwwINKtl8i6JGXogSlFz7hWCd5WIOKqHnJvgc1QHRAyi2RRNS/NGB4CSiYzAwDMakFhy3oEhMYupagZCEEBkzeY4mElMERBXZ6vv/fx3Bz+ZHhwQkI4RFSb1ZNK2Q84cEJklDgBYVz44NjADVIW6qp1jsthwi6BMyOiYOeoQmEOoJYMZE7VIvYCapVBPkpiid8hQCpHMEKEOEwIQjYymcQTTugpmWQAg58CtAWQZnfMGCmhN267ONhl0TNkQkqkv7XMiRjBfNJvN0PcqBqhErCkmYg4e6jpUlS8oSYCyzMcUwQVfTsHf/Lt/5yd+/B995uOf/rP/o//V4uiF5toyjU17tIwxM9C1xeHjhw/buhqH3XnavfTaLUu6fbZeneyu3749ndR97jD10wlIzOt7qwbao9sHy1vL7TBC5MUhJ8yb3XrYdUOXoeuT4dELtxY3jwny1S6l3Thuurd/6ZffCTi/ujSpjpaLOguIkOOmnszmC/G22XVVXZsJmQ7rdX++G/ru9kdfee/RW2GO7VE7vdIkzev1bjqdu2mTCHe74f1f+dLuO28dJQTPl0M0AyBsqrrwBA0AMjQhDENWEWYKvhbNJuZDrWAEiBLNYBSlUDNk9mykjiBert742GeuTY8fnn4oWSFUvqnGmCLodgcoBmSMRt7FzfrG0e3/+C/+x6t/+WXXnVU+9EMEJvDE4AwUyWmMiGg5E7Ig7DuPFQyEHJkKA+Q8MjkihD21NZsq+cZABYFM0cwQkJ3lTGBZogNARkYcTbIYEaNhBVC3FTx9+tN/72/93j/xP7x17fqGMk5Duwz1YsZ1Q8EpGjDGIXGGNAwigmwYUVNmdsvDm123UhgBwQKMMYeDRTaUlFPvN2eXv/RP/tbF07cGTbuUxFSzRkvOeZUEgETgPIlhSqIIgVElc2kSdBQaUhZCySTERd/M2xEcTKPWu9Nh7DB1IwAqjEEBJGUxLq4DJDMjNd2NGSzMGiAU562uwLENMG6BrDGTNKSM5qowPax/e5lcclKIwAwgAkQlFrgffdXKHsv2Pwqx6HpmoEoAZsZGZogGjpGAmBlVGSgV2jGYqbAjNAFzOUcAEECHaGpjGmIS0LbxlcTdMK4rGAJB5YL0OaHefeXlT77xBgdAFMdEaAjGjokKfRmYEJDVzHtfNw0ippyTiA+BKeQcGW20xOV9UoGcq5wfxz6bYslvxGwZACRLzCkRcwYaurHbpDBrwnRCFj0hAzgkDC0CCEBwAUvhNtDzBivbx8oKtQQKMKYMtlAKj7AEZPbmHQVkIwBGNSXnsHzghKpqaIRKhoEMsra16y/PLp8+Wa0uNutdU9XT5fWrL78uwOTRiobnCjUKqVCTRaCIWASMCAAiAkDeOQPYh4qKuFOgu4RWsp1FR0AyRKTnuiHiXkc0MCAtDYeIpgWfBICkoshFKClt44aApJJNBIyJRHLRPsyAHBM5wyRJHCM31fzG3VW/hn6nsGUfUkxpGIHZOU+mNVgS2g7DpHKe0cSQ2bEThUFyzhENdbfqH7yVsHrYRQI9eabLxcGVK7fadhaCRwICVMuCrGZI+ttHQEQAgJ9/cQKCRo4w5YhIhf1kpTreMoBo+ZQNsGA4npfPYUHhmDG5kudTTQamto87IUBhEhEhWKmDJAMrfwNw3zdUSgDMqMBsil+yuErgeUmdmuxLlAu9mYoIyaJCe+myUOGcAahIUSwLtUckFxB60f4QyUT2sUQkLVirEoXDvZagpXBClZwTU1UTFYTkiNM4PnrwaMiy2aV+I3Hk44NrcbvLcbs4XnR9XK3OJ2G67TZNE1KODMjOOc913bShos32n/6T/+cnX/vkxS4nTdvT1a22zTmlBM778r8UVQQhYhVBgr0pb8/oMUDG/WW1T/KZindO9r2bIKZUmGpmvNdopKjWhb3+XMEGtbzP3hoQkqrsxZbfZvUWlN0+syjAWC7sYqNDJFUh5kIOK1X0ogkAgFw5a4akls2U0QMgMasqairPAs1gaFlyIYUZQCkKIGJTLfp4+a2ZOOZERZlmJ1DUWC1u0XKTIXaSBsPSMQXFtFjaS4Bgn2D8t6Wi6fL4j//oH5kfH3f9OJlWqGCmvvahbknMMoFXNJgvD4dxXK3OGXQ5XYDhuB435+skfQj1L3/1F7yfsmtl2A427LrNYtKgVMQ6bZu6qWTMKj1VjsnYpd3lZtoeL4+OfvUbv1qF1NYVO1/P5xfb8S//6T/xD/9v//kf+L1/oB/dYdNe9H3lHEd79u699g/XxIFFC/OpJC0RFIjq6WShtPD+/rx68Y3PHbx6a7se2+PF5OAle/CU3DD3zfr00Us//IMPnj6780c+cfYzX/7ID/7hZ6dns5feHO5/bfXwvYOP3hC0FHMwcr56/N57L3/8hx0M5eNy7DIJANVs7WTy+qf+0D/8//yDF994faaTW+6j6379R3/0+/7p3/9XEKunF9vFnF947cXT3/iNa9cW2E4vz05eunvlIy8uf+XL749D6Em3904++mL4yKdek7f56b134wBsbthtkmcO7v6TC8CL5dXDttKLPMouX71y+/f80b/w01/5+htvfJ+vq6iJeX8/cp4NVC2jikZIkswspVFNJQ7DmMYkaRz63c55JpLd6hxUALAfVkM/Dv3onG9CtRvTrJkC9ITsqU42OHaGpCmbEhojFgeT79NgpkJIiM77FKWqfE7GZpUPvYzOVWzE3iFqlkRGaoDKAKaQGMkRgw/ZNEtG5uCcY2QgZMxZvHPouG4nSaGqGmDJIs201iGJ5RCqmLIPznuP4wDMPjgCiBlqX2nKCCCSQ2BFB851Kd57cr+u5f1vfxX7k0//ju8177/1tS/m3cXRteX5aldOQY795uSUJ/XiqPrIm69+58m7FftAHmNXOTty02kFMZ52Oja8AJrtdv3saEZEMQ4ByLuAhhpBO5gsJx2PYxonbcjr7esv3rGnsVnMdt3GOepSGvNgUcx02+2InDjQClGpqivQpoJm7DbrdVweXEULiE5yMrJ60pxdnFytj3Y2AAD5+sO332rqF9sK0TnBnE0YoZ3PwSgP20Tc1hMjGoV8Pet25xzUVQEA8hDJESLrmAgBg09JHDPvX1kMSED3CXY2kJRAR7KA7A29cyFa570JCEyYa6ymtZxsx8unV44/ulqfxrxBCM45FdxtL/y0TbYL2AAohVY1WO+Cx1Gy2cC62fWrGjHMr9D0Gud2XD1rmpkzODs/ndRNpGDOkqjJmJHZNVWw3GUFXEwWm/6yDpXllpxL4xaJso7fevBONfHV1Pvl4sG/vrc8nG42W81ysGxVfOwMBujGLvfJT3gH47Z/cm12vYG4erThedOPtnq8nXC8fmO53q4V1Pmpp+bs4sQFQGMHvkHzzDB1wziOsZu0YcjxcuyBwLNP1k8PrgrsLOZgKOPQd91sfhimVRuqJ4875Go+nZ5uNuMwNpNWDNfd9sqt64K9S7icTp9dPvN1hUYK2bSbTNs++m0/5EGm9ezRex8eXp8ySL/t60mFVC/n195/+G4UcEj9dgiVW20vqoqoxkVTheCfkWXtxq7fipnQ0e3Ji/P66NaNb1ysNn0qp6AbBvA4qgK6vt+uh/7O7ZvrzSWQxjzOppPJtH786MmtmzcW88Xp6SlCRjPvGM09L+dgswyaJeU2TNEZeYYoouI8E+aaQWLMqQ9uRphzHoBtOptMZ14yIIauWzW+YXaqwg7Hsa+CB3XB+xjzrJqmlAmwqlsCukwrCmG94W43+8Rn3kQa793/AALce/TodL1OkhUU1AgghGrXbRxb3YQUAwONQ4w5ixor5JTaumHnoSxmDBA8W9Q43jq++uYrH2FnkiMDm4HklOLQdb2YDUOfc3Ih5CiawRhLBQwjEKKJMToTIGFTQ0PTjJ6QQc2AGJ4vx+pQATIh+xDGfjBFpuCoQXCiGrxDqEzVcRDugDMiWgZDp6aePLpJTAlARctYTsBGQCpQGr5SHhWRnDcDMgRUdk5EERmdg4y3r702m/rd2PXdwBwmk9mY4sFiMea03ewAAMlbec1VE1Ui4hAQXfGxu5qJMevI7BSsrgOSiXWEnLXqY69orXNRMwKRM8hCYMA+6egsGIAhDdYTmxQnubBIQlJFM02klKHExNR7F0U0jip5N6brt67dv7/JYl0X6+BTHH3wqJajGFnuNEdZHs7qNhAimO6GwVdNyqlyjpgL6tUg5vQcZRoWJ8Y/883f+NJf/ot/5k/+6T/+Yz92fDzfagIVh/jed97t1mt74Uafulc/eieZnp+cbp91x4vr7WSy6dY+wLR223G89/aHbZhODufzW9OtjIJYV9NR0OLOm2FOXmy3G27dfmF20NbHi27XL2deh1E3fQi8uVxtV5vzi/P+4elXHp9QaG/evrm8fqte1A6oripChDFqJ9Oo24vt1du3Hjy8lzQ21Twip6wuVPPFUg2H2CXGajFdb7qqapeL5uzeo3oaNKtjLvMbgQEgKuKoPuek4H09DmNVOVUcx8TBeccgmTSLoWhWFR8cK0KS1cP7r/7AD/3xP/dX/h//9/89aRr7OI4ZiDIkHSIghkCM1sn2aHnjf/mX/9OjJ7v3vv6lK5KAieuwL9ZWRcQs0bEDwFKv44kUlIAlC7IzNYNswOSCATgkkwSmjllyBLViQ3I+OMcJsyoAcFR9Pr6CZHGEoOoQc1ZQDDWOo4r1P/fP/t7dT3zs8NXDsxivXplVzQQtmMU0DDAgJlHVEKqchZQBoK6nQBB11eWz4JfO+aGPvvLk3Jiiq2dxi2+//ZXDOddw/PT0w+wKBBkCIChmTWgIpIxEjo1ACYidSg6+Mh9yQHPAHhUzo3qDKiuaqdbd4OPA0rvUZzIWyVlyaUyXpMyMVAC6YGIogKNiJdmDkUGysLuE3YiRkZ2ghgbYRr+3YxU1qMCJARHNgM0MsNB8UUGyUdl7A5TinyJ8GJWUDgKA7m+ke+6GZ0IERhIxZlKFPZsWiZABWIxySmjmAcgENAYCYonxPI6d59LGyCmpR3/75Rc/9Zk3qmklOTGw8wymBOrYl5GafQAANWjrqgxGKY+k0gYnmk0zpDhKDKFCAkUzgyGNIjJKJGIZe0RiwxLEE9GYVRXPdpeV8y+8+tGHZyeUtfLA+9ayvaWfCb0P+6kfBMntff575ggRlM+MFExNwZSsVK9rSQRBeZ8HYGL4bVWe0HuH5esR8YYcI/a71A8PL89OT5712+04JgXcyHjyaJ0y3frIa+RZNYOW0m8zNDJLEp3b04JKlxkRmSkTgYGoEVMZZUsaSlUVgJnQlMvloEpIjoNaLgXKUqwZVnBEqKBIqDkXkDOAIkpRNBCRwAhJc0THnsUkmZlKRIKcS3IHWAVFMEvSlAV9mFRHN+Nlp4PFuHXk1CXkoDlK2s2bSSe8GWMtznkUiCn3QH7S1jr6PCpgYrZh/XQ2v5KHrcYRRePl+frZM3aBAyM5RgIsIaLibyqmIgAsUY1S+sKimQhUjamUjpU3ZSw+u/3JUStxAAAwtSJJFEpReXRqgcEU5hqRiOzJ0EURh9JTJsXah0hUGtyJsQgfariXeIiR9nidQpRSKf5hBCVAkYTkEJ1INDAigOJT0lKvpiJKTCVcBggqQuz3SCxAEQEGg31aCp5jmhBYIIOqFNoUWDElpZwd11BCUYYm+PTxyZDhcj3senl2FpNVVQhGvQ45m/Y5by9Xt67evexW88PDeLZOar6u6rYptpa2mY4i7z+9n/K4mE9wrYvNMK2ryjsTY6acBem5VoNYivzKE638n9UAQNkFQMxpZCzn1cxIzBiAyRXBy55XWyChijB5JmeWVc3AmB2CqSixLx8yF9C1CiIXza6ESQGBia1IP6AlyUnOGzIiqgqI7NsGSoIPAUoTWpFRAYkcPA96msreHaXK5NRUVJicPW9YM9NCsMbnJi8wE9xfBmCCRkyskvc0dsWiBINmYjKTwuUvHcH7SxeKUZD+O1LRK6998iOvfX59+dQxx8EkRa6YHTlikbgbtteXRyG0222XUz9fTKpJc3a6ITRG23bx+o2Do6PjL37l1269/OqwVayqEIxr3ux6RqqCM8S+lxgHsdSvNpJS3OxuXL1zkeHX3/pSPa3QII5D7MfZKx//9OdeWM715Ntv/b4/95e6TuJgfR/DwkDzdjuAn+WUnpvrioUPTQswPI9PTs4uL3Fx5fZ3vRl3HSuR2ukHJ5OrNz3F7t7Xj64t3xnOb33+E1pNXvrExy7v319Ol+9+7ZdvHk/J+5QGcASMWVNwOLt6C6zTlKhqCM1ACU3yeLCc2WYybybNLL3y0vLdD/71ZLIcAX/qS79wdOdQBzf0wZPdWB585OXXH95/mmJ3feJnhm9/52Q7IoIEj5WHDx48bCb+pVdedLK9/8HJrhtylC6OKYdBcHk0b5Criq4czQh5OVv8s5/7F9Mrrw0pUnCu7GOSAhg6kiwGMMTRTCSPknPKETSziSvbQgXv1GzUoa/TznnOOWulT/o+pUzOxTEj4DgmVGzrmkglJQTwwSH5cYg+OI4uaSRWZ2RAjgOYoVnjPAJ4x3EcAaQODgAZrHIhyeiIJSshV86L5qRGSGIqZiVE6JGDb0A2iuYdpZQdcenSDNMmiU7r2qsasQ+NE0VyloSJiFnJDBWQ2HGFRADsw9APjtEUSNHEXINx3EyP/KCbx0/fufjZ7nf+/j/5GTz86i/+E7I0f96KKRlIeDFvBcYHT4etuovh4vr1q6fbCKONmraessG1G0fdeRhy28XVjARM2ZmBCaGIWgaUnHKPNlFEN5Pd2T2bHCjFbChqRLCczIlQa33p5RvK6cq1K31cTyaeDE/OnsYcH54/XV6ZLxZT73gcoR+6qzemwxjnTfWNpw8/8eJHd2kL6lKUj772+mxq29X26MZys1vHlJksqzb1ZFI3p6fPJrOFm7brNAL7yRTBmSKKKJXdkWbc91SYrzyKASHs204RirSnBiqM5NCBEZNLgxC7oVtNIfJ0PiJHyaEsTZq6aueGJBbaK3dGFyxF0B3ipK6mBMQAWZPFQdOYyu2670CtqtrW00muJtUVf7DcPXlyk0KXDMFBgiZUfeqjpHrSbHeJNEJ27AJk05Q5D6Jydn7hEETRBJ5ttp/7fd/z4emDZ2dnkzhcf/XaetMPIx5ev7Z9tIrrQUS5rrfj6GfNiXTn67MXbt4MwbE3X/Nu28d1XC6uTY5unY9xFPvkZz5178NnTavYD7dv3v72d/71ar2ZLtvVZvTOhPRwPidm4iru+vlsGly4//TJ9Zt3dJecry83K9OenT85PasmrTfdnaxcaHU+zSlJ1qGLBnTleLpYNLtBJ1ydnq5GkWmoZk37zodvt42/3Ky6cRSUajoNrkkQ0XEW9VWYNLMnT953yTM5Zk1qQ1LfuqUPbTv78NFjGXF2NDs8vLq9OD+8Or29mN17cH7jhWtTzm9/4y0Zxms3l+UUqAioes8i4irnPGSNk8lEJA5ZELiuq8PDw6dPHx8cXZlM2mEcUFJOWVTIuRyTWiYOYqaW0WFMnRbeWR9dzaOkXewcBXRNHzOiTafLYd2bQRx2bBBjv5zNxy4JpLYNIBAxqSKhi9mSWup77zwQjTl7osl0Frz7zuPTG7devHqN33nnnV0aTi5XT56dgRmq1lVABUk5ibpQp9y1volxAEb0yOoq57uhB+Iq1FHG2AsTe+c1KqXxtZu33nztY5J0TCMCI2HfD5JLrFYN865bbzc+xauMzjEBooi4UGmBiZo471PWssuynJxzTCw51c10HCKRF1FJIsxgWXSQHFU0RzFUQlPJms1YVTsAiymKmUGIcXRQEwUEAZOcE6IwekJSIFACVCBj0JQH5/YQyrJjRwRFjRpVo0pCdNV0IUkutxu13NRVzqiSQbB2IQB6NGNiR2ToPTumbhjN0JELzJ4LAV8AqQq1ozDGXV23asLEHIKZF7ECsVIzBGVFE1PKjsmzY3KiKaaefN4vzZm0uBEkA7JjB2Cg2bFjZtnTZ1TiUCGvL1aMqKYxZ/GuxFpC4Kat+jExwNHVBYIultMcU98NapJNnGcAVMmAKCnj8/ozADDPveZpRdth99/8+H/zz3/lH/5P//xf/fTv/L65a9bjjlye3lhqxdeOXoijrs9Ph013ePVoOpkmG6umrSfV2YP7m8vNUXvIbd0cBWhARmuqadqMuR8tD5RjXF/mzdnNWzeu3H0BA2WBaT0zxNQPWNUQewVzoZ7P2APaKMPFxeNn5984/yo0+Opn3zh64ZWrL98NlZ/p7Kf+zt9/9fi4F4WbLTULdzDj1rm2NnHr1Ypr7NJOqBbS6saV1cNVdI36ILudAyMXACyPAxGJSu0naqDmPDAie89AwJ7imJ0xIImokXCoFAytBF+sFuke34PHTz/7kd/1zU9//5d+/eebMAbWaGVoEVTEIZnEl+5+7D/9K3/t1Wf27j///14jU+I+JUELhIyoxmq5zGFlC0oIBtnABAhdUCJgZ6D62ztPI1EzEU9kYMjGgNkQEbIIemADLe3UriLMkBIBjjkrgao5coRoWZnRBUfD5d/4z/7qJ370D3z3j/15P6kvnl3knU6nlFKq6jr4tm6XWQaFQWx0lc8wkKNdd0bUVs1MAUDGnKKmDFyTuJO3v/3er//T6zfaR2cnz+J2RM5mWdQzIbGouECIBIzgSl+zCWtdN56rDOhDFWF0oQqgaNFEpeAk0eU+5mg55ZxiAAAzJiYi3cO9UXL25FRMTCYh2KjxopfG48wxMxkYk699Bs4koTHMUSSx4r8Zkgvbg8AhWkHSmEkBaYDt4f1gUGA3hAwIygpmimpSkJklmURYaMtERISWs0LpYseyTwdTg8IEVkEEKlgQMsSctEswmDlKgNYg8ksvvvjqpz5uniQLGiKhZgFQci5LwmJ80QyiRCTJwCAKjGOHTAaS4mCilQ8ECSWjuhQVFBqCcdyo5hBaTaPznhk3fWcGQlDVHFWa2h0fXG1auPjwvdfvfBxUNUld1YUAgqWPvBR0ExoQAZoZ7d0eVPhDZcBjgOfZFisoLmRCYjAk84TFrgDMzsDMUEFATcA8YVxtHn77LdmuGXHIue9HACTnZpNpGgUNzh+9c3jgJleulAo1M1RRdpAlE4AJKhTZTg2IkFAFHZtkZrZsYxyJyIhzFjUDAjNBE+9JoqhmzUpAooqIqqZgRYgH0dKIpqblggEzlVRatsyUmAGUEE3ULJtFlQgpS0xGmFLKORkgpFhYv4I2mplhVm2Cb7BxzlLuwJWcPGRjNQmOWq37LGRQM4MKEWUk9GTJq6hpdsi1o6PZcr154tEcE8MIGAlIFcsqwjJGkd/mtBTFYY/vQSJiLE1hiCYZAU2NmFVNQQGRStM5cekX29tZ0BVjmaEhsqkh78NbpWqs2Ez2Fp7yrQMQsVkuZp9SXUaFao1oIAAESFDsQLBPals5rsVNsy9AZyl8+XJUoTiDBJFU9oVopvt0VlEZzAyRRIpoUWzLxESiCghZ4r4qyxQIEYq8KHvnMpComKhaZMChH588eZqKWgPcRz28fm3Vn3unPjAqeg2z6UKTNSHEfgOo4zDSZII8AxUxRNDKu7jLftIgNZs8PDjbNLOqqh0gsCMAI2RDLJFFJCDCrAKGjFBkUABQExUhJENAxwCoSZnQkFVkz/rdW3IEgZzzxfy3x5kDiMpvyyhEvtCLVEuKEwFITZmdqWIJJpqW/gW057YmKzkbKvqp7b1dpVtSiBkJVfNzYJmpiXOeHZtZQdqrSUFq7wV2RVUtUqOZFo+R7SOTikhEpdEhYwmV8/6jYPaqKipAiEhZhZD2+r4Zc4kFkz1fG+yloh/7d/44YA5TF9B74qfd6cHsSr8bdnI5m8wPZ4ft8mB1uerjMA0eqFJzGILzjDIYSa/5G+9+53B5SBLTsEWRabuonRuHbjZpp1XVb9dn548LMI0J2LlPfPyNn/6Vn/vw/GkAanAChNpvb778ZrebvLI4fP+X/uXCsqWL+vZrnXPtjVmf19N4fnCwxKlHx/tgLDuAPQsePblMtM394+Tb6/XxrRHYV067PlQHkDk/2vzk3/n7P/Lv/+jRq6/vTvr3v/32QQjgw7Vrd9a5Xr5wc3d5bol8RaYKSST13dNHkCMXapfn2I1+UoFaHvvZwU3Xti50q4uzF6/dXKU8Gp6eD0cvXf3ar/6amx31PZw+WwcXbt8+OLk4O1we3f/wYtMLCts45CkPFgHDtz94/FLGg6O7Tx/vdrudq5rgaMzxdL0VTWpxd3nRTBur69Ohe7bd3nn9c+w06+gcZ01ogEASQVWQgSirRMw7SAmGKGlESd16myX36/X9Dz7YbjsBjUMi55LkQYa29UAuKxgBO8fMQx7ZKTM2dTXEMYQS0kVDiZapAnBAxGjE7JGJTFVLBlJ9FThwsdlClgwZg0dl1ejYpzwS+0CeEccsZYnpgi/09xC8mKIjEPR1lTQfzaehbTlqG6rSy3uxjsyubRemqFkYLXgH7OpQgQkCKTynVyKzdzuJnQ6v3L168v6H/TY2NdGYzy/u//g//n//0T/4V179zPCbv/z3SPaHYRygvTK7++InJ9deePS0e/DWWz/8x9587+s/94Xf+cY/+ulfnVxZVDeubZ4MXnC7OmmX+T/8j/6D77z7rZ//+S9yaKUIzxkqaIYtuF64sh3Jx15640bz3fnBwtabnHtyiIwgwC6giq99QYr5hqeLaYqJHX7v9/3O1jnFxN6QhQgcSCmKFU8yjBcnJxYMyfptR23ddW7I+F/+v/5P//6f/EsAKAZJI+VdEmBPu+ECYeuaWdUsNI3IFEGQiLxTM5QByQCdmRT6FO69rFgKMJkQTAnJFFIeiYAmbTYMdb1A4r5z0LRCIyJmkfEi5vVgveM8pjRfXrnMQGCSo3OhQGpBxLqR+97FvnI8gsJuQyqkhsCTxayeLfL61M3mnSG2Uy86diOFzIggpjFMqRp3a6tQ0YPD7eYSxoGdGy06DL6epW6YHS4frtbJw+ufePGLP/Nbs6MJeBbkk/PL64fzy7iJQ7+T/vjW0bNn66pdTGLsusTmkuQwdWmn2fLNV+4uXj5y0lePh835sOsuFrPlrJm/987bedzVkyAijATkwePl5XpSzWazaw6y5SHLpl9v3a02+XEylc36fDqfPXq6askNQzw4nk6Wk81OztcXXYwO0TFm0aqp+36ofHN+MQzmBY/XWzvPl76ejbkXTSKiKNO5v3//QxcgBI8Ixvr04mw6n+UIAGgmLNStenYSWh8RHHlnqJvUqUKu0xmtL9cLpZNvP3pmabZobYJR93wKBLJsnhyZkSqKjdu+akJdV8OQhn5g5qZpfUUXZxdXrx7HXvq+ZxeG9WVVtw5dFuGqZoDcD21oco5t2/SrC2YPSEmUOBhQxbXEKJZzpq7ftfUcRXyo0tihURYtL4lsFLxvwuT8cmWkJR5f+aofHjV120ts2uOLjbVh8dGPvrJePVhvV0+3q7PzC9OcY2binDIpgKoMAzsf2G02q5RT5WqP3kDzOBzOG/ZVHvux65pmoqNJ37fefebNz929fnvst9mycyGllFLee4tFcooXF+dnlyevvXLHcsTgzSCnRAaaFTirjQyIFEzFspSKDTUB0ZwHzBNLpsQI3mwAQlWpQk1IiEzeCQTlIDmiCwIgaFzVqpmMDbz3pCOkIbKUV1PKmkANDdGYsU15Sw5Kx8d+6ZYEAYg8ERnGlLaeq8r7wWjbDxA3bFlFjN0YowuunbdR+0kT1OqILo5jHRiJQCG40E6mhFYzV5ULnkNwCpFJkLGqGk8TciZCyWJG9VwxKDN7I8tsmSpuswlhrdhnTQYJNHv2AArZHFRAnPLOswcgtX1zAwAUcAeyc6Ha7Haq0jRBoUmgwGMIPjQhxkiMzNTUfhyTIwSj3WYwg3YyadoGRHzwoMDoU4xIyJ71Obw0yqBZdgZqSL7+5qPNX/3f/W/vXLv55374z37/H/ihxcE8obNmErfDbj2cPTx55fXXXetOTy/nsylmPn+6GdYQhgVDBbUc3jjcDsOsXUiXZYw5Jk+cUxzH/mh5cOcjr8B8BkSY1IWqlzTUkXLEXXdxuku4qWbTied+N6KBSb5yUA2b3eNf+NW3z7945+6dad2A+aXA17/5Nan06udfm722bI/qs9Xm4bsPh23u+u2NV+9Us4abxgCHqDxfVrNjf3bO486yOkQwdM57BFUxywjqEBRxGPrp/DDmLmAmQu9YQMh7VeQCa/VekZ3HENjF7eYrP3fzpT/1H/xP/hfLf/HyL/+r//bBwweVD2hKlJF4cuX2F37Pn/h3/uj/ePmd83/9j//rxXbrA20oceVSMlV1Rg4UkJQRqDzNi2pURpY9bLYYLlSF9qXSiQCc84YZmMBIEV3TgJS2ZvKWk/QGbCEoefABQFLszYEnErMkgMijKGZFpUD6jZ/92SfvvPPdn/3koyfrnPCzP/iFo1fu+ADjsENHWcZENpu3OSXJBITTyfW8S8Nm5SYTF1hyZl+ntd7/9tfe+spPTTw9vP/eO/fvDeSjopJEJVXzZOTYB0LgrAAZWI2RVYmoIgrBQPsUPLpgSNmjRs0GBIpIRqRlFEVEQYmSANgZI3tNAxITOSaHLKUULGdV45HULTySkzhMqho4APvKM0EPSXQY5vPl8ym5AB6IUEt3EyOQQVIFQQAgK4iiYiUxMGRCxyhigJAUEYwQmJAImJHQmAgBDJEYM6gCEBmgeoKKHYJ6JGIoGssqy1osy4AOkQKBZzDIdnSwuPvii1h5I2V0jOAIxcwxUbHGqaJjAwEyciBplKTMvvEaZTRQz6KQh3637UYRA3BAlSrlmBQ1W3Y0AHOoFUyzmAFXbTNkA6O6CWr1B48eX51ctZgItKlq5xwVOAgRkStzYgH6lmHTDHD/eiUAhRiz54QA2J5GVPxGUtBFQEhlVYdGaoKMzJhzoVfK6vz0/MnTmlFUrFQNEJnBMHYI4D3HlO6/96367DG7ytABOhUJlcsqydAHV0I0BSUKoDlFQpMsBlpAJWaKCCoZAIiJuBCOTbMAO0MmZMJSCq+IoFlUsokSYYwZ0KTU4hgYgKqUnAvSXuhFUZME0snmQlYrTKaAgppR0PsiVNmk0doBETOLMZplhMY1i4og9WbJLJphEkXIjngUG6JwFTgEMBrHLXHdtCGO6sz1fYzd2Ruvfd97FwTSsQkjlkyGAACCKAAgqMvPFwdIZMVfjw72NVqoJmDF82Vmui8335voiniE+2oqpD2xWPd1ckQkWVTVSp73tzXZIjSWP1tJ/xSGuGLxASOoSrm4S79VKalXzUQIpRirVKPZcyAS7qd9BCsmw1ITuQ8zFmubme7jkQqly60w+8yYi4BFksUYSniNiIsUxcigxRBj+zihFVehAqklIKJnz571UcTcdtT7J+fGdRPc2KNkUNXddmDy6912euXw/PLyyuFkk4fa1QROAckFDj4NO19xU9f9bpe6Uc2vNt1qmw7mSpJzJEQPAM+NQaU5ngm5eKbALOfERMWkQ0Q5R2Y2JOe8mUpOzntTeT73aNEUCiG8tAIUoryIMHtQUxHAMigRMZnSPoVpIDkSEoJJzgrqXGUqoAIA5LwZgmXcC3CgKnvyPIDhnoZlqrrn4AATARQClaLtq9kIWdEAFMq1ilQcZEWjBiydeag5Iz2vZ2MGAGNXPH0lBWmmJpnJ7e9DCISoIsSU0uB9vb/e/m2p6PzkaTNp2sblpEAwrdq0TQbu+NqBBx+Yx81Kuu1yMvEe+25crda+Cd3lJg391SvLs92GwUuS06fPrl67CoNSVNA8DaFCuP/+u6q9n4TgWojSbXpH+NXf+uKjs+8sl1fGnTDIZNY+uhj/0v/8//gzP/FVOnn70dffq9vZfDH7B//8Z/77r30Ctu927zz88NfenUIzZJ1YcdCRmSKVFOWefVgt6vWj/pUvfDrZyGpV5UTS2+/eWxzXH7z9yF99pb31MtdisLmCkzsvLM6erdePPnTNsrs823XreXORgsua0QQR2sPb4CgPmVTLfRxNFcG3bU6cBP/Un/1PZs20qu5/9Wtf6cZx5hf9enX7pZvgG1N7fP/hrOXFQavnq3c+OO97mM4m07rRcZ0hMtBicXB2en4vyyc++bGXXnkhffjB6XkPSiJwvulYte8zOVVS51sfJi9eP/TdRtPoptPE3G3XopZT2qwu1hePLh4/2K3Ot6tVf7kbo3VdGqJalphUEURNDLJZkYGRkAjJ2+c+87Jlq2ufIwqgd0wemkmIcWTHgWrHbhz74F0Irqn8phuJnOaxcgQmjlgJiV2oapUIWYi4WOrqyrvggJxloRrAkIGyZCoqtGYG8o7JURqkahulNQCDmnPOsQOVo8VyOmtIeTJtV5crBBx6F8LMoR+Z2bsUR4feV7VnZ4BRh+B8EglNnVWAVCRRjGmrDI0YbLc9o9bzelw//Yl//F/88T/4B3X10lvfeqecgvm0ZrBf/Jdf+tjv+pF7D8/+3T//7339F/7hvXffe+WNN1776JvrGAXrz3/+4+u3v/LKD3z013/z6wG/HfLDKws+3yVHmAkRjACd5zzEUFXapyff+ODeSMN68cOvf1w2Z85T1v0+gIzJEARCqEyMDNoqMMHsMDhA4GA5E3OYM85rAJnwhNi+53t/wHIki6Hm67eOLItDqt3Rj37/v8cR9qPPdAJGQNguaoXRTOJqleyiv3zSTm9wXVvxIaToHIAIs9MRjQSw3EioPM5NTRUMSTRjEjRAI1Q0VYtxgtqki7x5Eob7ta97gJCH2t2o/fxyfS7CA1d9HP24mbIEM0ec8mgIzjkUlc2W+swxelCzOAydB8iyCt356b/+IsNsyBk4QPCjybxu8rjDqh1JIGegEdEbVwqsooPGZtpMjq/C6pknQBmvvnj1a/eeCsUPvnOvqQMk3Jxt0mY4vrGM2x1orCtyYsNlP7Mq7sSZ0yi9xd0uxrGHmI8Pj9vJ9ORi5Su3nE5O77+/XCzT5YCVel+tdvno6CaqnZ88Q87Xjya6y62fhAZzEmLsdsPh4mrcCYhCFsmRqD5YLvKu2+z6h/fOdMhqtt30k3aSLQPgresv9N0aKFFAUnV+vHF09PTRPXIAplU9QxEvgqJx05+enUyXh5NpNfZdE8L52enB/CDn1K23s6MlGm0uVzeuzs83ZylG4FK86Xd9WkymB8eLJx9+YJ6R/LbrehibunXV/lmQxxxcQER0ZITBtd5Xjx7de/mVV5qq0pg369VstmAOoWovzlch+HK2fd2WR5UmceWWrClrIgLNWoVqlOgCdbtd6yagiqgGSUGABRxR7V3l1quNAogKAhByH2PtvKn2uR9SVLTprEWDpP3Bcm7ZSGiz3Y2or7zyinfj+++9e7HbPn16rimjkSfHjjKiaQJQ551pMmJHgal2UG02u+BRPTV1w4y70WZNhao59RX7N1//6Ct3Xhh2URABOAvkpKIaRUx1u93t1uvZZLqaLEM9V+MYs2PwVQEQCrNzzmXt2DyQKSoW0ENxkTuvBuyrXBCG7IEJs4CS9+0wnvvabVZrVc1jrHxwjA5LngK9q4dxrBonDpgIUxXHvnKh7M3Kq+UgW3aMJmaZnTNJSMbsNCt7S7EHJ957NFYxF1xoq5y9R28peueCIoA5osr7xbRRzSjsQF2BjVtuqso584SkKbiJZyZE56vK+5iyAYYw67oz79mTV9HATjRlyYgcfJMEFZQYDFJMfXBTz03S3tQQgRGzRAMEYsDSYS+EYAKgmnICACSsmspXvmznLCuCLdpGRcZ+cN7npHEQDjSft1lzyQWG4HOSsY/U+JgyGKBaSskHRi6lx1DmIkCMJewGZoBjhe9ePPzrf/v/8Hd/8m/83s//7h/9U/8uhTmhQQe3X3wJHF5eXh5enRDDcLrqnp3Pm0WMkRt37ZWbiTN1ZBniZYcRalelbrc5vWjrg+sv3q0PZkkJAdkhey9pzJkBXNftMlQHN5fT42VVwfnpCnPenpyNp2u4TBVzvWjS2bM16vTqATkKM5sczS4fPe2tXw3DzsHk6sHBnQNVCJMQUaIYk5vO2k3dbzImC95VaGNOqWInWuDEDpQkJzTywfsmxDikHIOj4FEkcvAG6F1IMZH3gKjZlFAJAo/53d989OPp6g/88I994S/8wKd+zy+/82sP3/3a5enF7Zs3X3rtuz736R94ob76wT//4vrLv3A9aWLX5zGroCAzmYikAUCRSyOUErOCMQSTWErBABBNUUFzJvLoKMeMaGiUFCXUeO1IotDYp243DS7GDABjTmBgmjWPwAxoUeKYE6A5BFAszVvOsSDkrE3lJ0OO737wnfXaTQ5cPe8ePAk1H754h70XGfp+55upCoF6HTMhkscoO9f4qIMmcVUrsbr/9bd+69d+0uBZb/2DZyfnu9gn7EUMcwmnIhiDcgmCMDOjgzqLcFWFykMZMpMRak6RKnL1LA49MTFaNwr4CqN6h1a5lIRDpWIiQkjsHQCFqo67HlSJMCYxRE/sHPrA4KAKDVHolLCpvUPrh4prDM5xgP0WHYrTjpkNDMhMhchoPxcWPAmaKSJD8cogIKF3pdcLVffJJwRzWBwIZcAFAiAAIzJQYzS0ku50RKAqKp3oVrRj5hBIlURyygJAjrJz9y6exNX9ELghj6LE7HxFZOSc9wFU2QU1BJDgHaqSUY5ZpCuuzF2/yyJoLopmMfTw8qtvqtUXp2ehImb007kQjTl5MocI4JTIV+R9VSQcHjD41oViDguGpISADhyLgRQCcRGEsNg8ivKGYJnICM2yATMAKJpCgSqhiTEpYiy2Hy1KhBFJVIlZJA4yppy3u/WzCyJRsD6OpcBsX5VFpGbbQZCgW3W4WqHz4MI+BibZAHxVOaacVTIYsREbWrm9EqLkLFEAwTGpZDBlxNKtWepyFBFD7actGOaY1TSlXFJXIBlLWZchgIiCqrJzJUbHxFo4vgbOewQE40q8nm3o4gIzEQdgVUzkghHXB0ebiBsTrrim7BhHSyvUuasXbmppcIwIztezro9Qeh8MLbsxK5ME9vOm3vajUfBESCXnp7D64NZiOhAiZCQVTYCYVQBBkooAKunz6M2+YqwIMOXrIF8o0WpaZCAxhT2cCA0ACMTkuaOoGPOKEoRqpRuwaGdW/GdqWp5xRRoHs4IEQnZQjC3IkhMiFsBQoUcVSBGg7X9oQfMAqikWiBDRXjwyI6Kc8r7/vCSVig1p/8gzeK7lQQlCApRIgYiIJse+/LTyizC5JCM8b5cHEzVDLUYYVDBQYXJ91z14+lTQbXfjk5PdamNXrlx7tnpS1aBpdOwPZ5MHDx8fX38Zav/k/P71o1dJsalbQS9jplD50ABoBhMGM5E0MKa2rk9OV8tZs5wymIVQvK4CpU+eSCQTufKFIWMWI0BEUhEEIXJ7LjuiaCIqPmuCfQCbRSWLmAgTgUpJ5pW/U/QVZqaC6QIzNSIHpvbcw2WqkhMxP/f+FEN3UWekIMZNjYgMIUt2QEaomorlkLmUK3uzBPuqNRLJewEQTSUbGBGKJKIKcF+eCCb2b2VFCbncbySLc84A9h15JY/F+z2AAiBTsSCJiZoye2JnhqXE7b8jFVUVk4BDn23cDrvby2vbfhAbK6Q4DArgia/Mj985uXft6nFC2QyXd6+8aKlvsTk5W1NVZRm7zWY6bbMaM4CNi5mTPD68f3+17hbzOYZWBC2LKPnZ8unpe8uDOwdHs3c392Bnm16Prr45D+00nn/qlZv/ly+99/k3Xv/yL//ij/zQj/3cf/lfvXbL/C5cW978ijy+/uKdLvaVd+WtYX/+9nfgNIw9VaQS47PV5OpxurzY3H/PV5rY4gS/9s77L33pmzfvHq4ePxWB0xM/bPXZxT3xj2/kW3defvUbX/kXb/7Qn85daqbh/L1vL+qlqkfEvOt9XRGTqbGjOBhiQKpe+tgPz2bzYfXPwuw3v/BHf99P/+QvHh1cbfNks+47HVFxGOTy4enqfOdc6ONmiu7mlcWjp4MDvjhd+TDBDOvL7XsfvjNnu3o03W42Y69oemU6JYCL8x2H+v6Ds7OTh+jc5TpJF6vaqaFIYraYchzHLALRcnFpw95zh4iGBIBmCGhAJGZiALL/3NDUB1weTPtddOTQsPEVoptCDiF456KoKXjv6sAISoSO2JsGx4oVg2PnXBWAyRCICIHrth2HYTKdxpgCExMp4pCz9+UdNAE4YJKcq9pLzIhkJdisEEIVx+iCRxUVI6SDdjoJvmkXoZ0iVAA6dDv2HoHH3dpVfoy7pq4AzUTIsfe151LACb7yltNi0tq6P66uUOvWcWN1s7k8I9Gm9ml3+qUv/fjUkav3EMc7L964eHZhA37wre9MmsVbv/JTJ+fnRy9c++Z7b7/5ymc/+vIrf+1v/c23Hm1+8M03h2pysHzx2f1LFjs8qNK8XT1dEQBIVsgJRD0ys9dqdz8534xr8eZMBZ3DDEhmap79Xvm3ApNTIqcGBAIGRA6YAYECgAkBEHlRoRY1a+sqAwVUACMSamgxrclBRSHHZIDjEL158mCAlrGuG83eHywVEjBqzqAWXEhdP/bbr73zc5//5I8QBUUxU0ICQkCjPb5S2JGklGLvncspulCjI28gak4QL4f2eH75+GRyOHfNfFyfYc7V4nbUUCFco2dnj9+iZgY6yxmoYRXRfhWYQa1yNObOeZwvjtJmY8Pmg1/4F5NQ5dpzW+UMWUkk5RTNhNkLmasQVLv10+rgZvYhs5FhsEiUXUVZIGU9f3KRNj00oEJjl2A7mlld+93FiDGmMQFZYtIuY5b50VEHbtuPaunwWnNyf6x98+rdm5XLfcrW69nl6nK7feHanWuL/MG9d3YxJ0nby1M2q5hThBpnG9lqzSqjh/To2UlV1Y4k7s7mh41wM8dD04QiqjnH3AQER50MxJTzyM4b8sV6Pa0r7wJyvYqX5MLlxcn59v7N5UtpI+xplGG33c0mzW6IAC44d3l2MZ21ktHDBCKNqZ+1Vc0GZr1FK1lr4IC83fZDjFdfPuo347vvfzit2LV1NMm9Vg30l6uz05O9VJSiCWhSzTKOI1eumTbNZgYRsozEPFvMUz+2k7Zt281qLVmns2ZzuWIEYkxi3nsFqH2d/C7upKodE3cKDbeOfMKBgRQB0Jwn773GuJjMh5QyKgE6YiKqGjcOPRig8zXh5WZDRE1TM5KJEQK70A8yadtnl8P1l+4uDt2De29fnJ9ebrd5jFkyAWoW4uAci2RyFTmfYnZAOUlVBzZ1NV0/mB0fHTYufPDuB7kbmHzfrZeT2fd+z/cfHR1L7A3BUTAcACGBqYqKpGFwBk2oP3h4uos2DLlppWmqsvZRUMFEwGBa+oIdEyIy+dSNoa1UxhCqLIOv23HMjslyMiUAzBHGOAaPhqmqg5KO24GIc47EbBH3W1BNqpZAicmEFUxETQHZJ9kwzz2GLAOSSR6Ag1o0UcJaXQiu7nJpWjAEE9t5CGS+CrOu71gdqUdUNEVVUmnqartZBYQRxlm9cEQmFHy1GqOvqlA5cp4RsuaWGgIMvkopDWmYTpbduCKo6hAAyBRMwVAZNSpVLghEU6jD3EwRSxGMgCFb6fD1DARmmtVQM4yEjslnzeRYssZuyOXhRZhMNv0waSsyFJDKN5BSqCmp7HbiPaPCGPNkWpce6elshggxjp48M6mZxgT7FRrkHMHIzOrGaRbHCGSjIUyrd7r+7Bd//p9++WuffeWTf/j3//7XXnsjVBzZpkcTDnz+7LS7WC+vXWlqH7WbHFQxR41ZUkSdOJuYbIZhs92snKuuvvSR9upcEAM3loUImJkBqmAqOo7SNJOrL9xwh+1muzl6Za5pqK8uN4+eupp128uA4ybamGXb7VYr8E4Zh8AvvPbpeC2g96GoAmYZHRIEtNksVHPu22C5UnAVO02R0RGgEiYT4+C4VhUVZa4AMqTsKbBjyYK5xL1ZcmYwh2jsVIGcTymxgZdOf+O3Tp9s+O7HX/vE529+7E/K5/4Hk/a4QqkE1m/95v1f+kn7zgcHTgTztt9hTZVvk3RYCCdMBkDMkBNmIUQHyKYKaEJSWrUcA2QD1JwdB49ADlTBAP3NF/LLt6ps6998y42D7He/YGre1WQA7IY4omYzQ24yaCkyZ8QhRQCXisvFRJOKwrOL3fFkOWh/nrrDa4e9R0YPQLPJUpLBqHmU1I2Owm7YVG3t22qzGV1oLs+6J2/91ju//sXUPxN38d7DB/dWq4FZSES0oCd8wxTIB3QMBhY8mUmpMnI1WBCAFELDiUcRajw4jSAcwmjQqWVwjMw2ICo4EjEENLRSWUYE4EISQUdklHP2zEMcwGkILZoyVeBrdaFirnwVZBi6rmzdhy4+dxXtUbrF24CICiUjZYq2x6MgGrABKqLSfhJxAEjCjFnMCAT20AwEsH0ODRiRy2iMe+sEWWIgU00Sd2kYjA2oQmBRA2EsI7PGbA/PHjxdPSzN42wKht75KlSMVM8WyHUas6qUPiB2jgBMUbISGgIakVEpNlJgdZXLcRgl1QfXZ9PGDEjNfBUlZZfROUIiK2I0CZjE5Mkm02ntsAoe0XmuBMghF9SHAjoGTQkkSakaUrWsKgoqOQ+7cRDNeUxxzElUTAwUk3hDAgMQ7xVMFTSmnHI2SXkcCc0T51FBAbMCoGpSEyNLagx7D5ciRtUCHNecAQkdG7thzERoosUOY2Za+CmACqAgTKULHMDEcSHrGYKyAQJlLc1tYCoCoHULw8jeaaYyBoMBqKEBIaphMeJoCS5lI0QCSkqmgIQGlNWIjByR0cQhQfZUScFmgQVg0SrvEEOdakgOzEEgYy9A0Eu8yOMigGRggAxqTCXz0zA7dMl0G7OoVqyBfR9HVcw5u+DJ0LpudnAl025fBUeoyAAEhdaUwRTY8PkpsGLXKlwYsJIzpD3Ge891RpPSbk975cX2cTvAgisuh8AQilvfFWoGWbJ95/3eCFSOkBWAjRoU1k1RHlUIUawUnBUPEaChFc+SFahy0XFxHw4AQEA1VZF9/AzQbB+A0j2iXveYoj0EW8GAiIBARRCd4/L7ChOLCIKYKOz/n/8mvyYiSA7MqKhgYs8enbKrdtthzHK+3hm2HHyFVDUuQjLJp2fPah+2F6dPd+dHV65tNn0SjTJwCJaQkVVT1uzYp0HZVSoSAhPAsJUH987nr98GlpxGh2R7E54ZGiEVPpSqMHlHFZqaqHe+2MGK4kUIQIUesnf0MJfWQlQVImAiLRE9BdPM5LC0QpYwKYCJqgq6wgAnUCEkKW1kxASoKSIRKqihqCIzERkYoREgED2X5MrZQizzu4EWybUE4NAzVyr5+f2WQBMYEToERHSgAr/dJmmEJs9R5qXLzWnJqbGz4kB0oSTaRHR/makWhBoViREds8tx38z7b6SidjZpm3kTvOzWL1x9+eTpg4thfbhYXq6Ho/msqqputT3bPgWyzXbna779kZvbbnf/7OHN+fV1319ZLqP2V64u+90Yd7Fx6ilP59VXvvaNSTi6cu2qqaUxj9uhDb49mn7l619JuXNV9fDhk4qIfb0b6Y/9kR/bnN4/PPTJhjc+/5o2E5svd5NpOD5+/fu+5+f/i79z8Op8eePo4emzq9cOrMQqc0YmE0FmFc1xqOdTB7eiwZhyFTWN8v7b3/z093/h7/6NvzFc+u/67GdvfeRji8O5+nkv7I4Onnzrt75z/sR25we3rz48Wd/92Pf0F5dUB/D07MnDbc93XUgiWSJzYylLBGqrFJOrQpaEpMO4efGjrz86/62PfOwzP/czv3TtePHhg9PLs3P2bjap+qFno2k7q+pATbUZ1+tui857V80O/KYfwATEnzw9Hxt++e7th3YySmwnIWmOfepHfPeD84ePzlJGx4CGDsF1oFk9UuUKXI4AubxYSSFBFBIdKJRWxdIcomAAYlYMhyCKCDUyEXn2FVeh9V1KisjgOQQQNYbYD4zeO8xp8FWYLK3LPQUXMqBg3bTEHsGMhBR9aBTRcQUYmqbNsdOcnA/B0DQBeeBcZF5A8ORdHVLOCIaSpz7snJd+8EQ5psp779zR4fF82eTsfD31u7Gatqcnp/WkkZQQuambvtulbD64FCM5l0EzACMhACOKIbE735wY8BZnG1QlU1p1fVcFBs7np2fnEBZXrwB8GwA+vPfk+q1rpoOZGPSPnjxyVBvVFwP+5C/84t0XPtIeXvncZ3742aN73/7WQz+98pWvPvmu3/HSxTd+Y3b1Y5fmNSU2UlUERAp9nyZhEkc4unb8+Gx3vtnMnJMsRE6k5JO15GaLTZSMTPehfkBERUAUhb2sS1Qyt1lVs7ErGDlFdkiSVdG7sR8UCMgAlJBVzaS0JNLe/+sQmMETI4GoIfjQVFS98cr3uiyJpECsVaWYZhUMVIFhr2Y55uANDUERbIzR3EF4+cVxtRlrVx+JpPM63/PDlDjvKFBys93Js2/8ymQSdBzMTw0dc+XTOsO6ct6l85gG9g16C1V1eb4zrq/cufv04ePQLPtodaioqgmS5dE5tx3HQERkrsbN4yfV/CqCl0GbZgm2xmzDZmTPRmyCq4stoTs4mK/7CzM7vnJ8enqaskApohTbbcf5bLpNQzy/BGfc+HpeY+3Ijy6323Hy+HJ9dK1eHk5P0nDt2gtJdg+ePDk5P1lcOW54GkI1ny37AYLDJxcbpKmpn4FDEAfsmSI3zXQhhOt4WTFqgn7dUZgf3p4ESs8+/PDwxvGtW7fvP3jgkLIKo+acx6zmUrKxX42T4+rWC3d4V3XdJbEOQ79YTJqKHz98XIXQNG3Okai6vFyrcaimEWyM2yFmpjBdLMesVWjqNmTTtubkLq7eSefr6u6b3/Xs7fcvz55cu9LOahGCDagj/3xIFiQiBiBo2/rxvWfHV+Z1U2dIrqLtxXbZLDf9xgymB8txTHkcch5UUSQbACBxFVBykgjI9aQxlWRaNT6OqetHUyTCmBIhVpXPpoQUx8jMpuq978bBCJBZQIFBLMVxLMswBHPIu7idTMK42UKCvpPZYtbUtNudf3DvXXM551hXPgs6QnU+qwzjjsQ84djFMYp4zWN/NLl+MFuEcHz9YNlOw5NHj7vL87jrHbt50/7+3/tDy9lB7KMBIrBqzllFNKU8dL2qenYPnt5/79mj984uUavv+57POYKqophG1lLKwd41OXaeG1U1My05BsJkOY7rEKph7L2fqGQhc45i7EnZgFSygaSURdN66JArBQTy3lcpZwAyNec8s3OayWjMnffljmCAWruJRCJXCk/FOQ8AhISMcdyy5yRClACcpMjMVWjNSMaoQ+8JkljUGHPfcONCaNqpSzptJz6BBecoMAEyEGHlHQKGqg6V92aemMhlEaVoTElGgAoR+Xm3CrMTTI5dloiuJl+JKCGqZGIty1zvfE6ZyAU/6cYtlqWrKqAjcqaWrET6mcl8CECYU1qtNznl4B2SSyk1dZtTUs3BOxBWshDCOIzGGFNmRID/P1d/FnNplp1nYmvYe3/Dmf4h5sjIrMqsuYoszlKRJluU1BJIWjattixPggU3IBsNdNvtpoEGDLt9ZcAwGjDgvhMaVtsGaHcbHnqQLZmmJJpksYpFVrGysjKrco6M8R/P9E1777WWL/b5k5LiKhAI/JF5zjfs9a73fV4GQ8fO15xjJufU1MDwph8ZGZkoi6DnA+kSCRCVSQH3hAONv//8zz7+vz/5wunrf+3Xf/3hZ798cn85Xu5ggCbMsQ4vN5fL07Y68tO+z1nAVYGXaX91uT6bxivwdvzoQXP/SJkJGcmXNpspJkQGsb7bu9Yf3X/Ay1oFajcPrp4mX1e1Huvi1pEBbdd9XA/58orHPJu5Pury9QfNCaVTpqMFi0k0BueIpqzIzlCnUabJsgKEhptWRicZQqCUIzswwwwy5YFVwHQauooIJSOSmfM+5EkIGJBFkmlGDKgJiJXQscuqJKll4mcfwJNP1n/8h9PxkSxv7cMxA+b1Fa+f17GbNWEzdFGjOVDVbCOREpkICSIoAjKyHXQJExUxMHJelY3ITLJkRGTHGURsrIqbP1vl+CJ1Hup6MWfZT1eXdVUpaajrfrdz5CBliFlNCIAJHbjAwfKEKmRGJsR+VIMsiuSYUjfK0+fNneP1uFsTwJhT2n3m6NgS9Ju9XKcwW4RVAxVaL2IudZBHvzvbfvT222//we+21bSLF0/On358ubsyjIGjJjNkYgL2zifKvqqNAA3ZNzknZApNDQxY1QAwJHEMhMzEwfsoKQsMRiJOBGmKlMEAJWcDkpK8N2NwSVKiVM8aEUlTIoOcxLOr6krRas+KlNQcs0eqYsJhoFEEjZs6+IOrSBVUQBlEbxgr5LQwc7CEU7RsaRHItNzm6ByRM8toaoSqSAQHuq8gICghkpGpsWHZ1YtC1iKrmBpm1YQqhogYQLBQ4QzRUFXEci4NuewYgBUIGVC9gXd1UR8kF7CIoIGIcVuLqDi0gg1iEAUVtajMRjkj5adPPnh9eW8as2di4jyMROiRNakBJAUAdIAghqUnS9JuknHyBh5sVECQnJKoWc6Gw06HNcpkOSmAJE2jqGmepgiwTXE/Dg6QyREzknjCGqEhNhVkAMgxpWQmQCJAYFQwvECuwHUlI0BWUQKlQuU3yRkIBbETA3I6TpQ1BA9ZhLKoaZY0Tg7IIROB5EQIIknLMc8UAccxiZRdMkrORKV8kDNYsauYZD+v6vt3DJO3wNoQISGpSFHhkaB8j4RFPirXJJQIDxChGREwe0BVpOy8BCQYDYOhAopjSGmovCe09bBPpwsi6zUhVCigqKPhhqiuqtO2ln4Xhz2RY7Scs6qZauUc+jaa9TGVgzWQMHtCJIUc87SfZEHJpEA8DEFAAdAkG5SI5J+j6w7BZDsExMrLAg4reZJS1FBwQlayeCUrRAYH7hcWVs6nQUREKNwZsqxGcPC54I2Lw8wQbhDL+CnvHMTUitxqN0oRHP57SwDKbnA1BzcSkqipoUl2zsEBjFR+/oHWrVrSgWpmRRRFKKwfyJqZHaipZEAjZCS0w90MBiogAIR48AuaoYkYZCLMMZ1dnk3IXTe+vBwS+OM7d4axU40uQXAhptEQmGAc99MoARabOI5xbB06rKr50kwRkI0dOGYTSFfXV6tFrRaCr59f9HdeodscAyQGj+zMQEuvhymzA0KioKrElHPCQyEaGqBIEQIVAJn5QHEkNLTijyNm1SxW2E5sCEheQVGz45ByUhECMDRyzkAAkJAMNKfMTALlKMwKgoQH4Za0kOMtRyA+DIGgzB4AkEL5WlVFNB9Qcs5JKd0TMRPProQFS3VauTYRQEEOjrai6QMhQvEuqUpBrRM7xEI1x4PVDZCIi7sKicWkyFuiyQBEUrkY/gWpCEE368sNUbb05On7DHb33q2s5pWBgggr++WsWvDi4mILYlfdTkXvnNwZxiEOm+sNtT7surEOlQ6yWi41Xv7ON//Za7cfzf1xRJimCZI2ITjPf/bD722nrSMa+mGK3d27pxPPnz57rLKfLdzydB7lcsrTo9nJ5fOLyxdP3FIuP/phmAUaXcjtUWhM88HLiCgqXLEkQ7P+6iWmCVn223OBxdMPX8ZesDn5h7/7u0LNT//y1//Zm+/8T//2f2PcTrrfJRACWVX+9dce/W/+d/9Be2ulmep2/uFb333tp34O1J78+PH6Yvtzv/YbJtFSz3yc+yE4b6btcpm7F9vzq9nJA/a4nfLXf/bXqvrRr//Nv3fvpDr6+KN3/5N/8ODOyScf9qba+Co7XZ4srs/OrtbrZeV/9muf/+jZU/bVfoizGYOoIuy77ts/+GE/TMiuE9mtN1XT9hq73Z6IkVQBTFTAJjAGMASJRiVJVMCCZlpC0eWhVrZCgGpGJWkJdnOVICAAoaAdnS53MFZc11Wl45gFa0eLpunGMVjJi3IgCE1DjpDt5Gg2a+t+SIYQQhC1pvJVPY9RPXsF61LX1C1SACVfkyAoJE+mQuCAWTdxYiOHQXL0FHxwJhMzMqGIqiEwGzEwh6ZerI5jshDm3vusNl+s2nmVc7w8cyE4ZHIIvvIqyFRl3TJWjqipPZIX5ESe29m7H7/zr/4rf+ufvvl7my7FNDpWETFIHLxD3u0PDWiLeXXn4Wzv5ckn7/7t/+5v/vi7+Zvffnb8xoNw6/jZ4+/9x/+v/2Mcr37mp/+1333+g/pkZjYtFuFk2X/hC0cquAnzfew8kykwARAniVvt3vj8G/vd5uHDE4+oSdGRSvbEAMXEbUXTJQYkVpFi/ANQUWPEss0GAwU1AQQkRxBcmSskZx0yVs5M0ziopma2GPqdIySbE5laIiQG1JzKo1ySEjlRYeQYM5FSwHB6LAeVGgsSomwMCTMyIoNlUDRiBNJQuRxTVYeewnZ2i5fHuarAICwW8JLz9kUnl0LkFrVtz+n59xbWEh4Ln/Bsnsao/WhXT/XqrLn36jT1OUtScjiP6u4+euPZy/fr41yvoQmmw5DG0mJLGiVww2hoHONIbMN+s9qdh6NHWrnzTT5u5+3s1vpqX82aW7PqD//4T4/vHG+ga4+b3eYydzFJT567fufIOYK2brLpdprMefbU9f0s+HGfLz54MaP20WcfcdWEPE377mwcbBnufvbWiw+eTik3s8VunHyofbPAUDuk23dvx+FymmDstmdPX+RpqI+qqAkQpnGgBKlPswWOJie3b3VDHmPf1HRyfJoHev/9Z0zs6lpMZifzftdlkXk1Z2iOTmbffve73/jS19e7a3TY9WNOyc2dRBuz3bpza3Gy2Fxvx2Eau8QE/dSbWRMaZJxSRgNSMM2byx5aeLl+8pO/+Plw1KT91W/91r95e07/49/6NzYfPa9dcIsQPb5y/85b//S9suGYpkhEoIoKkDJkJbPzy/NXHz2aLWzfdad3Ti5fXnrftXXoLWflYv0d+4FDMGZk59EyA5rbD71zJKTmhRFM0HlkQRPOSSNEzxVaYlP2bhozUkZzEsVUVckMUxZX1WITG5qZolV1TaHyFq623b0vftZXPHTDdtslGaZxxNoREREDmqW8WMw1a56GB6f37t9+dTGv750cjcMoaara1jv59jf/6MMPHmuS7Xr7+S99+a//jV8/OrkV+y1gwcRmRQSwlEfVpJrWm/3HT5998N67+5SWd27PqMY8qbhh7Al9HHrPbMARMsSMntm7cdpTVRX4tPcNGVtSyeyoZRCd9r5hFXHBcWQKFbNjxqpwfQ3juJ3N2+I0BkYTMSrV4FaHKuEEQEYKiAaac3LsU+6qEETUlMEw5569Y8bC0wAIRISkCSKDgHGAOVIDlEF6Imp8y0aAuaobxzZvIuGESkxeVUNwU8oIEKqQzQBRDB17IBITV1ekyiAAnXdY2p3VsigTeYczxQnVctx6T6ZmMCI2BmKSjIPnIDlPeTRUswzk0AzI6LD9ykAhpWwI7P1kEdwNlYUgSwJkR01d0Xp3VXlfV24YBpVcVX61akPws7bxrnbEmjMxAwIzO/K57/HmBHZ09xgcz8XGPiIEzZZVHVEb/DRmhYm90pH7eFrHi3c+/D89fe309a997svHzZ2v/9zPcsA0wcnpqap0U/aNr6hRoXh1fv7xj0ESIbm2nd25y00FRs5Vms35kFLEymsyURm7bnVyUq/qSXPjZ4pOU3YGwL6+V0ur60luf2Gu2279/bcu33s8v3e0enBvT9P81VNp5hCR2PnGMzoCCDUrmoH6qoZZa+2IfqnOueDRMxJDRkceLEJUAGuC68bICAganCfCnBL4UFg1xCaC7L1IYjDHQQgSKNVhkCyj1MbOiCH6l9fh/JOMlQpUSI6SwbQZdETA1osWCmtCEBQlYFEEwJisdo5QzcTAjAhB0QTZqWS1xISSTDHHwKsHdzX2tt4HwO7s7NYXHkzPe9fFNOwdqANNMRlZG0I2T3XwzoFMOE5pSoxWSLAK0NbVXrMj8qgGBEzsSbONu52r5OytN5d3lief/eKte6/Sftpf7pNYt5/uvH4KjjNBRCKq8+Qvn1+8/Ud/dPHJW46Gi+3Zn33y46miK6EOLE7ZBFGJuYTVuVq0OTCzQ2ADj5SVQUNtLELeMToPMkykYhbj2KMn7wB6oCkPg1QYxJA5oEsGmYCQTEwnFXAInpAJK8eGlnWIua2dILm6dVWTg/c+BOdslP3lPm+2fbf3rWvBWRo+nZBVQQSIDB2JWbYDQRcQjIA9EAIdAONIYECkTGSK6FDVIWRAUy1N1MWpbH9uukBQUDQmnlCxFIGXRBuh85SQGBRMAS3njOiMLJskzWrZSYpqDp0jRlFP4AVAWmExNDAFhawaHBmCopFDUylEcCIgRkRHCKZaea9x6C/POCzTlAWw8oGMsio7D+BVpFgIuv4qpUHSmGTyPoiq91XOpQLJsphaGnfbePlErl7g2NMUc1YwZ2qKKIA9uedjHkVrYk0qKTtSH3DpmNTqUKMjBgMGCpUCF2aEaib2leNSX2QpM9kkKSOMerC7iIqAGmg2tGysqmMmK0EiSymBYU4ZTFG0rF4MilEIiEhKjRcgIB+G20OjkynhYRoGkJwqXGia8r5vq3beLswwgyHwwQRRaDhEN3aTYm0pzW4GXJBMBmCATkQRlNrKN0AJMnHE5DjWgWTsVXxdzyIAInrzXoJDVBsy2gB0SQZGDmoEdUTEIghgQghYDHqiBjwBzppaZSzh7OCZoiWYgDNYJkUSAmAALe1TigpsnwJ94aCClBz0YTi/aV3Hsl69+Yuf9mhxQXyqAZioCYKVEB8hi4ha+tT0QYWSDUhIIpmYD1VydvPJAago0o2XxQCA7c/b3w9uJjTVYgoDIGQEBHNUquhAkRwAmipo+SfpJg9ljCBmB4iSWZEaCgTOERevEpYAV2moZMrFoIdIhe9TYEUGqgrlN6DXm/NBU59TMr1Y7x0vK+KM6uqQMiCbYAISdn7W1qkbmyps96LCom4cBt/MNEqKlmNm9KEKVRU2vI5Ro+7roMG17733JHzx4ckCU44o4lyFSIdwnylkdcxIXBp34GCyKvVxyuwOj6FDNTwiOTU9fIBECKVZzIlkQsZDh2EqDPiDSccECUXAJCO54uFBICIC05ylkPsNlNknyVjMP4e/yYRIqAcZ0UQREBgRmMs/J4fImCkTHmR5IFA1PSC0AJSYDn9+mP0NC7FJcxGWFUqMNBMzEYpokTWJSs/aQRJVyaXx0EpDnyo7/pdZRWmKaGiqR/O5iYAZI7rAahRTTCRx2h8vT84uz69fPvvsZz/PLjx7/Ik5UNPl8cJXjszU5PR0MaP5kXM/fu9Hr935/Gx2MvZ9TNPQjczGDt77+Mm234dZaII/f7EJ8/Zql/axO10db8+eXK4/u3pw98Xv/cj2/vpJ9+BLn3nzu3/8V/7Sf+k7/+9/9KUvfeHuo69+5x/8X+NlVy8XAgCEmkRVyXkznTbn508+euW1By+efvLs8SfLhw+a5fHu4vytt96dKt9v6d/+X/6vfvjkiYjKZIe2OLPdOD3+wXu/851v/Tf/5t/M1303pmfnL98IPo6jX62a3EDOhOJnMzXhuinRBzQCTZfP/pj4J+cnX8R2OW/raHjv0evBufufu/3z/8rzt//092ZtW985+v73/tixp+t8q/b3vvSFd99778Wz2ayt1ld9AAiuyja0bdVt0zQkddR3nTM3W7SXl/3L59fjkByDmSBxUsMiXRdfhxGhKVASYzpkasuNWl7IRRIqwvNBbSwvh/IwMQjk0jC0VRWqFo2qmlpw47j13lcCWdSpzedtjClb8p4QrQotcV23NRnWTQUG07Bum6XKAKCABMCg6DwlgipUGVSVPKoRSmTNNgs1q6keimycD0wxxrEOjhmdc+aInHPsMZFDF5qqXZ7sthtX1R9/QPO67TpZLZccsIjHCqak2UaHzOzJzAEjETEh6u37rzx+9u7/7T/93wqoUpBuGlKsgwuE9XwWKOyGsXwiu31693vPXRwe3j7+43/8z370wRrCvbP1sHn8wWcenFiWOc/+/n/w94d+4rr+1/+H/53/z3/+H/343Ze37h9dfGRv3P/aH3z7mw/vNGOWQDamJKCbabfvrwxo1+3i6n4gFjUAS5oZHBGLJANz3okaCjIRI6M5xFzeUIfcMxATqgoSZMlImHMWFeQADsUMBUickbveRbSUZDTguqoVMiIBUhx2JlW9WCJl1dJxq+XfU9MCtbt5IRkiqSoqFvpAWRUikhgWvl2oaw+Qo2A172Piqt13/ZLrCutpvzs6euTqIM61YW3rjyqYNctXr+t5F1MgnAHr9UYGTAPQ3AfHyYRCY1lUNPdDd37NEbYvL45nq3Hajg7mJ8f7q8uZMaEKexNjF+p20XdXi/aEXRCipIzArp5Xzl29eNz3ne9CqOnpu49B06JuUtcHdicnq2GaYNRtjNwwA0mWZRWqyoHHYb3xiKvVraPbd0aPx7dnl+89bvyyna+Gy6thfzH0+8qFmvnOvVfXLzZpc9nUixe799smX19dzZoK0hhTbHnhPE44jd3WeRccX51fzNqmnnnn/dIB5glXTcx4te1BcLvbzY5X6+2WFZsQLCZUgETf+NqvjOvLrutzSqujo360NMZ+PwRPR4tmu12jx3GK3GJFIUkixiS5ClWgRqcMkKpZgDH62/zGG/f7qbsXmllr/97/7G/9vb/9d977sx999sG9r371c//0O2/60I7DoRpWstR1JVkAkD0jm6jOl/Orx9d9N9RViFPcbnb1vAFCSVJXzbbbSkzes6scs1MRSZOQjtNoZiF4RpwmWc5Xm+0VAanmKlRTKtHuUIWw6/bBt+PUiQEiuMAiOfgKHOc0TVMKBkwU4+RBHPthTMOUdLSofn8VR7l89tGHqe93+x36ECfrh+iJFrOZ2RiH0bGXQV+//fCzD+8OsatI0dmkBFm/9Uff+v53v5uGGMw5v/ylX/mrJ7dO4zCaIjmnKZtBTHEc+6nruji+PLt898Mn19ttu5zfPZqHth3WyUTJuZytab1jx0yGJJaD56yZlZ0PAGSl+8fEh0ZSZqaUJ4OMzISsCmgMJpaUEDSNgDL3zfVuf7Q86qa+ZlNJ7L0RICBjjTLlmC0LsGSJxEDEJgYKwdWaY5REklUV2ZkSWdFHEGRKZs55xECQc3ZN1azjM/aCrCJG5FQyIIQQhjw5F3xQmXLlq3HsAwfnqmxj67iuKgeMzqUUmVzlvIg4RALwVGUFA/GuMvRjQu84ymCaGX3OwhSQFBCMjRTYB0PKaco5eo8AGNjnnKzMrZoNmJkNlZhUFUT3m85AmzpoHgxx0bTTmFTGqBaIQLKoWMpEPGurWydHMaZZ2wJwGiY1C0wAlnMulvRP7dbtshK0hsPGNEdJosGhIzbQUDtAUoIpmZDlGSeO1/r0D3/wTLb8J9/7o0fHn3n19de/+Itf0mWl7Qy9syjSD/vzM5ejgfVxevTZL9KiIXagZc+sWVXNHFbTsBv7fnXrzmxeZxMGb8qaIpkxYZjVQxrBVzVxMLh+/Lx7elFxE05Pw2fvzWfoW28+oPMKWLCjqoZU3MSAiBx8lonmhb2eiFEAwEhTJlNKiZwbYwIA75nYjUN0AExgmg3MjPKUCR0aMjvLkULIhoouKwGIMiYFAXNoloURCUfvOJlMqOBU0ZIalZBKlprAkhKQc/UkqeQ00VBSNFD2iEQ5Z2YDSaXGBZCJkAw7oLsPXpOrJ4vKy1WkeiEbqwywqcjfy7sdqoTa8tgRM1MdGQnMs1lOjXE2TUwpkUKpqgbJKWqum2BMqbh9vVPF4eLq6be/Tft+bjBWq34z+ePZ4uGclpQgxskA3fp8+OB771w/ff7s8Y9T7nfp+u1PHnfkh0k7RBeQFSUbEQORMfm6Me8hODEEQVLUqK5yMiVaBK28MeYpgQiJBCY21hHArBpNoszNiakRqZlRmVIVrNjJrQ5BGTFGFCXGmLRidoZk0ni2lMk7r4Zdb/s4vrga9/0wDnWucsxSV4ch+WBTAFVDMUPULKZ0s58stgRgplKhjURaHBGlRdXMCEroRUyZDhb3UpZVmL2uxDgkIWoCYABmBkA54DxymWPRmJGLuUIkZ1UwyIAGaMjw6ThIHsyreSVSUnJEYuSDkas4qCo5UFBCTlkMgciBCBIYUhS4vrx0R55cyGaqUcwAGWJW7QDA0gCSRKKCKhBwk4EVAIyRGQBTlqSSVNf9zvZrWF9wVoxZxJyimWU1cbzJ+HgzZDOWMueTR2sCb0UcEgDnUnSFgJ9CaBCMoNQsMSABelNQyabKWG5LRjRRooKqQ1BjAC4feuHfcoG6WbGwlwQTEKodYm/If462ATIgOJhXFJCMi4xkRWhAV9MwdVlALQLU5Y+JSXMGNAIAZDOlYmglBTA1QeRD1omgeEYIjJjBN8bIk1ZIznmF6Aydd30xq6oomCdGUUmGrIyKHAbDS8WAduqbNPYeBcmquhqHKUti9s7imHgQlhGADRE0Kxs4F6axTwqZMhKDqpXaBBAwOWC0PsVaA5SudCryyiFlZggKWHg1SIQq+SCmIRW+XgnAlqkbAA3RVNVy4Q3pIcJkhE7tIIsTU+EQmymTF8kAWFxBBVBtqoSsJQBWVA08YI/MtGDQi1hV9EEztOKRkQxgzKTFe2xagEd6iB2oqCGqAZqV2vYi71GRoqSUeSEAHcofSsCrALgLRgixSEiCaHHcP3788ZRtv4nbTXZuVs9WjoAqJFcPnU4pTxGrikGBUYIITsmh49av90OjNe+2TagscahqdKwi+25wyM0sTOOoIFDDPsYff/D8p37iDZ2uK5cM2cwB3og+pmqkOQKic6yqxdaFxFgubjQTENUSQSsRDyJChMIGIuZCLzIzYkI1RLIDgAjVxEDJiLC0tR5GcANT1VJzhjcAJANjdoVlzsRWdD04WJ2KW63M5khYrECIZIUzjYCECiCSkQiJ4MbVdsCQq97Q0csDtmTluMjzxT1kJiLZrHjekRABpNg9y5WAxGpGVMj85fWs/zKriKuqrts4DBVxBkoGaDTuogHU8/lqeVQdnb73/g+atr5393437MSEGzYCSaBRhKRdzEKzqKsQpuHF2eXR0XEXMaMYK0Wdz6tNtz5/eXGxuRTUsZ9kwFDTMGSUVIP86q/+zNXHP5Jf/hVAt325u3/7tXhxdeuV+/cGmzd1v55mv3AaTzLV2c3nVuI4BADATGbKmK+evNP42X49rM+GFx+s6+NHt08Xf/r4D/fjzofj3/vOt9755HmcKKuoGkFpJwRFAAf/83/33/urf+GXNx8/z3H84Ttv/eJf+xu+4sXq5MHD1dW775188fU4CCoTUaGUMZExT/2OJeswLo+WhgqA3gcArOvZX/mr/9rFR29f5d31fnt0cjuNqQ4k47Trp8bPzy52r7768Giu52dX3X5SoSw4duPUjaGhR/dnL86u+4kef7Jeb4TZgUk5BhQDZFJMZseziszGOB369wDAgMpzrSj7RSk0u1njQLEUHaRVK0wNO1odxX4/bxdxSgDZhcBuVtUNcTUMIzuqQkD2IDE0TV37+/dX283FLFCKtpjPh2Hw8+O2Xk4jEpmrnSYJ3lVtjWao6AzmbZtlqCiYJem6xjtUGUdzXHqWEIhzHKu6ZuZQNV23c75Ss9Pbdz27+XzVNjMZYxZcLI8Ws7lj320HcDmQYx8ENZJ650hrAzJLhoktVL4aY1ydnlhvUxyJbeq2Bli5SiRPAENK5CCnqdwFy9ls2A5V8N2uOzlpjubzVbV4eZHipLSws8trVQD2wkDj8K3f/13H/u03n/7KvV/5vW/+w7/1Gz+5+qW//OYH3/EOAMUxO8NQHVcGWOuLzQt69ScAIlouep2aGpRzlRh6zYaIXEyDouSQCQlQNCMqApXDIJEnYzNAE4iJWjKvqFBziBiis0DZU527TiZhckhewQCIhMkcSoYggAxWno9FfgcCKa5S1HKxKCEiEVhxDIBkkRRNtA4zUHFG6DxX7KlKOXlPrZO2Fa3X++HFZ45+5nIk8w5TikO/nK22SWNTg00kCtsXaXMJwnGP/pQAclVzJkGUxWKZsl5enTe8mtWLMSVX+Uajm9bD1Y+OT/zkg82amEUkuhC2ly/q+QrbVTurDWM3XFONZ+uzL33+lT/90Q+dC4Zyujz+6IMni5OF5CzOktPbD4/Pn1zMFu39R/erKT08vfX+u5/stntJZhHjzr3y5c93uxEXs+XxSd+eT0N8uDw5e/7uuIsU3GazaedHOXPT1NTtRIdpvwsajtr5+vplPTvy1SwNMG22zPXtu6eb6wtHOU15Pw55mnheBQ45KjCIFwzUuDqnbJlIbRoGN8fgneaskLWT3W7dzBseQ7tY9ONuNV9qVL9q2iqI5Lpt13KdMwEoMQ7jfjmbh9BMU+pjDCEuZs3VMNXgtzlfPd+CjItXTj9+v//3/9e//crpZ9YX208+vgi+2a33q1l1s0lWkWwGxCSqkuM4DrPF/NGrn3ny/uPPf/n1pm1TSv1275Y+Z8OUNGkINZE6B3Wz7LstglW+thokiaH13T7G3ONAQGIyxqmt5mqTI09E4zQ6H/p+REZURAimqJo8OXY4RGPwDsKUYlNVatmzAxnngROCWX7x8Z/maaizHC3aRaU+hJhonXbMHlIuD20Cni+Oxl3fd32YNZJBMfjKX7988eLjT3IXAyGI/Vf/7t9+9Ytv6LgBM8ch53gYScmy2b6fHl++eOf9D7EPn3/187M5mSX0RDNfzRpBLJT+nKPzDRohUM4Te4flgE089rtAztJAgYZxBA5IggaE3A89AY99RAsppyzAXBGlnDOIdsNk7AyZ2Zdzakm0MbEqITpClOIIh6SmJimQAxISIUeYTY1zzpYmDIGcE4lMThXYm2ZAs5g36LIgIFWEgKU3BDyKErrZfJl7IzIkmodZKLnvxqGNNTJRAleZee9mhOpQc85ANOWCaiJRJQYmJmQ0c+yICJmRgJjFXIyRQBhQCIG8d7WJIKgSEJGKFmu1aCJ2Zd4QFUlZBQk5TlHFyGEb3NgNk+SmrkMI4zhVTQhaWVHeuebK5QgIUFd1FiUk4HJ+g4JkKHfBZn2FWF2PO41aYr/zxSw45xyFilWBvRssLWbVANEChIWZ0uqzq4v1bv38zbefvCN4+ZO/9Asxhar1u6dX14+f591W+5g93nnjEc9qFSTHQARGxCp5cHVtE4gJAM2ObhlEmQYzVkngMoembqpoCZKr2nnYjut33rn4wbs0op+vTr72+nll9bI1FVUIxOXwBwomJRJJYipStuVpyoOvg0ium1lMFSVhiylNiCxgzjkTU9EsOQSvKTnvTBUILI1MNRCbZtXSQY8VEogEZjALzo2aCCnq6EMlmLKhJxKAUZLXm8P2lIPzgOYMkbyKSB7BjNAQFcnhYToCVSHnSts4EZf3pIhKkoGahExiy7a+vuqCZ6MgNvjjlfe3ZNdj0bc2l5yi5RTHHsZolNHAE5pqIjbnRMAAslEGJB8ykgPKWbNqXbtuzBiqs0/O++33csZ7r31udnKfT+d0PJ8cukz+egCQt77zgx//0Vsko7jh7Rfvnw/XaxVt2j5NmW1WAysyq/Ouz1I33rU8BqA6AHAejEWJQ06CAKYAJqhMQAFdzmkaokNkV43DYBbUpIBaR8kZVMGIUVNGUC31VUZoWPlakuaYVBWBDDRPEXOLymwK+/1+s8/rPF7vRRWNhn3iIWt12CSbgmTLBKUIydRQkQTMqABJAEDMHGOhnjEdanRID/EcwaJDGNtB7lBEVSws3oIC4UMfD5oIFdkJkQhQFaQcfsCoJKyExUhLkRMxkoGCGSEisFM0AVIuJI7iTgICI89hxqgsykRZJkCsfVAAZGeiaOIdmsaImQqNztAIyrsDDnApVMtqyUr5HBgYlmfvgVaNiACMNqaMIg7BUGMSMFbInsBJSb9p8c1KacsiRVBkCg5bJg+goOhZBEyVELk0U5lJiQqokJjj0llrACbAxcXBZqhF4Sm4GsAicqARUTYoiDs4cIVKjReAFTZ5wdSAAtAh2AR68H0dkFRgVsgoojDmVBO50CCq5Q2HmSKYWS63LGJRFQBvaNDlO4eiaJqYOfZ6k5TJoBPPZ/NTHTpUETUMoY85ALFz5USazERUAdVENSpIYsVQZdGZc2niig0kgcQkRlSJoXNNxaSWg6vHlIwdoiCRCUwxiSNlVlNBMs5qasygClkAUQ9CTpmjS2lUgVQXTs3NIRk4qxwYEoUPe8McogIxQiRAAwa0LEIHLQnN5LBUUyGy8hgFg4MepYI3Rj1VJS6l0gWAnQ56Ahzg1p9qDIUGZogloktAYAhUpg0BUBNB8gUsVQ4MBIxQ0AFGhb5cJkRVAsTyLkHKJgZIQKalg/QQdlNRAEVyesPQQVQk1Kz7XTeM2PWy62yzy6GeOU/DNDjvNZNmy1OuqyVKyqpdzHlI17srro5unZ6+uHoJyGmaFvPFNA0OZNYsjdS8YM0WyBnFvoPIISx2g737yfnnHs4qSehAxMDMeSZygGXKocLfKVyO4r4idAUG5JwDRFFFA0KyQ44MEKwg5UqpWOk0QwBCVhEFY3Jgh544xPIdgpXGJlUmXwQYvvlpBQJrBpqz86E8C8yUgNQK6p2LkU3VwLBY1g6qO7MWOhUAA5WqNkbSg8RXhHm4iQsVT1lR7FFSQjbPTowIEenmIYDF81fq+9A0lw9HJRfJCbGogPAvSEVq2vcdml7vBhcaRBJxxJ7Y5rWra//OD39YN/WQRGMGpvV6Vy3CISsbE7lq0S7mzeKodte7T6pQRUGAuNvs4jB5xpTHl5dn+2mIZPvddnEyg2S1rze74S/+zM9/8ua3/iu/9iv/+T/4z8YpnpzOp931V79w7+3d2a3XVuG985ePP7h/f/num9++9cU3vvQzr7smiwACQjaQpOxIrVu/aFbN9YuN88AVdQ5tfvTNf/LNZ08vq+M7//7f/9+/uLhMqQh/Uuw2B9txoT4H/Tf+B//6v/M/+p8Y6DZOV1cXddVe73tqZvcWNWCpkEvkAoLlIbmaJMni5MHs5NE0ETucdrtmdVvENAuiksLnXv/K97ffPmKPMvenVRNofXmeuunk7p2XF2cX7/xoNasCMgC6qkom3TQ6X6XoP3kaP36aNrthjGRIzmHFDk05OFPoxsSIznFbVZJzP8TiCy3tCnqwEhVZ6aCJl6fIQUwqEjQCGKqaOl6tjvYpO/YYWDnW7cxHBnRV4ww8oOU4NLPZtJZKw+nRK6cnd9/Z7Srm0JCCrY7vyDh6njfeoYd64Yb9EKd4eu+VfjORGDkP6ARxfnS863YAzgSoJAcdqbBzjORcFeowMzlLYzajUNVgab5YVmyI6B07xOXp6tbmOE25aefz1ckUu9l8mTRPw+jAN/V8sD2R15RMsK3aAiEbur5uXEphs9sVI5YazmbzKY2WVEgWq/nhleB4JJi1VXNcTxxvnTbvfv8Hb3z167f5YSbK2O26Liyq1bKVbnzzvfd/+a//ou6Ht/7o2Wde/+rl5s3XPveVP3574KodYpQphUC+Dtvtpp7xalZfb14e3V4OGj37YuY0S6pCjGhISA4REVSTqQARgC9FI3zwgpkBmgICTn3HxjZqpoRVNrL9druXuLj7uuEYMyBb1TqDARGnNCi6umnjkMHrftqFMLMMRFyqW1GNDsr0zSmiNOEAAXABMjrnLSUiMgXvUFJG9mrIZBWyDkMDkbqUxnzvtV/s8F6mESdI23Vd+zrQrlrtsFIWzFc2vGRP5H1YzKTiPEkWBedYqN8L8/z2o1dtM33y9Owzr332ercmVoz5uD3NY5enjpqWfTCAuj0a9jtkUxQmh6hqed/tF7PF1WZg7z3DmPXqen98ctfY1S2OlCD2qd/NHG3X3Tlc2L7/5MMnKYoOiuA8+wcP7ifjQVIY0+MfvT9e7tVXwrZczbqpOzo+7tlNwjFNs1mb8/ji4ny5WK53axObL5ehWj178qKezT1VVVuPw14kD30Xc6xn824cPMXT09Ozzcs85XoxOzk+ZZP1ej0Mm/l8Fpo6gYIlVenHCEOOam3lHIdd1+cEZDWgX52sDEnUhm4kDZIikDNJNQUcJQ7dArCh4d/6d37zd771By+2U79Lpw9P3OiGl934cqOXOTNPLmvdPj7braPERC9fHGKYohZTVhUTUNHZfCGm+323WLR37t/94L1PHrx2r2oqM+36PRq3bT2zZn25JjPTnPLovE8GyszOjePkK++belIDhjylum1UvOPK8VTVnFLKKRKwGdbV8dh3BAYA3oWYUoyaTFwdklo0bYPXlEx1mhKTa4+XjR/vhOOx6+IwVI2fUlNinncfLtmRGcSUINB11/Wb3nkUNAEkclM3mObZcvb664/2Zy+mYfyFv/SXvvITn7PUazECm6lpmazTZGD41ntvDzR95atfPK1vY8zj1IvFZlFfn3V9l9rVEpCTRAJNUwbMSAiSgMkxxykzIqNH5BQH58gIDTRpFsimaiDsQorJhRYdUfDeh/W2U53MiVh2VAGCEYoogiGa5KhK6pKgMgXLXi07ckhZRAxlyh2gEHKyqGbMLFpIOGWRDoVsagYEahoVhYDAGExLE42j4EouDHBvOQQnYj54UphSrkLVx6iG3vmkFrxHFLGEyM5VxBVAsRT5rITIqDZNIwXHIQxxq4qBGhVVQ+8qypIlsQegwlLISZNXryrEvmw9mJkJk2QiDpU3x74K3jtirCqPCPt+IMdkRi7kafSOvK9zmoZ+NPLXm/3d+7cdwtRHAVSEOEUyRUDHrAilMAsAdtvJCQ9TNkMzqVo2AD8PR3du+4p2V1tWAUJfh363c4F20im4qVMfmjtfePTq6b3rZ+/+8f/lvbq599rrr/dT59tALU8elw9OwsOVVMXxbqZK7HNWA0/ghmE9DevV3SPX+L6bmuVRHBOQ1e1cQCezKbNrlhbDxbvvP//hOxbVhRkumr5mt6gVyyGPDbHU2SACOSYiAQBEV1e+qcGF+eK0pxqATdRyshyxQhMGdhi8gElURtNkYMrEImaqZIrGJujYafEOcEF1sCNvkggs5Yl8RUaIpKXyF1BUDCz4cNMPrg7EG0hh9SmaGZZaYrCcJnKGfLCnAIABG3qzLCZoCmpUhnTN5MmR69c7NpemiN3gidCSjhHTiAoqmc1AEbJVWmYpMkVGYnJJzYgdE4g4dpPqRKaOm3o2Dr05SmiSRLZdPfMA50/e/q7ZeNr4VXX7ej+dNH6+l827z370/e/84PtvRxOcyY8ff9w1vHcYrcLKtVRRxYEUBdKUlQzBEqs6A+8PfjZGds7SpEy+InbmCQ2Mg/emeeiYKCdQUEMGLTMLs2M2MVE2RcGkwBTMZUZ23mfNKQkYkPekULQDYs9qOEyYcx5luNqjEHvWyUBNkUPlReTGT4FgKGI5G5IhACqaGBlJ0UpKekXBEREjohEhl0V5ebIYFu7BIUdT0i8EXNgdCKpgCCronBEzAKqSWkltoAFZ+Q//dOWJxa90gx0xK+4lNAUTUDlk6IHRGUhUUe88cSmLRioVRWiuGDkN0FVmmVDQk6qmuOfQgqtK9koNTExUmQ/DsGEZ00izAhIxHphwKYMqSk5dl4c+SBpzNvBR1Rw1RqV8XkQVyUr1F0IWQ0QCaJiXRKyihTaAZq6U/RkhRlFCM0ICRbDgHZX/G7BJrWSMPROalUDdwRhERshJS/TNSmwQ0ZCMVFAPcywhISgBACgTl6+OiRRBrbS/3cysxaMCBM5PAp69wRTzZRXuKNWoIiaeGEwZlW58D6UMDQEBuIzQJS0jKsiIgGpo7KFqDK9Mx1SuKyBJmSpViyJsTAqWDICRmEvnAhFMZiZWESw9BJs825QzY6WEMQ8gMXjXqILC3jSKBIUZ1VmlrtyeBQk1CjFSMYgVZjsgHGS1w32gpogF7GJaHloHkMfBwnNIMBUcNTk1KUIAAskNCMcQ1IAAVZUQDRQJEdigMEpJJRkIHaAzIpr1kChSKEEzKIKCFipNaYVXU77p2NKi9RRX0uEzRyBSFSI0pWyHwJoZIJJIOmQD9ebfMCBkAAQo/iMAAy3Xa7kBpYiWxVRIBiSqpVjbVEQmmbIjeu/Hj7cD7QZL4MHhbDbnQFmZXbObRvQOMjquPYdttzGSqnL7qdtePUn5ejFvIfZifr9bq2kNPHQ9gFWVO1qeIsAEVLWOAGIcqmb5wbMXi+Z+dau2FJkdczAwFTE0AHPsxBQNichUkLyZmZUlYpHqGMqViggA5VXFxEhUNGA1sYOFSpEKksOsOPOAyqBtiCIZgJmcHDBrhQtFRZhTUWUEQgISVSAi5tLhTjcoq5L8QmAAMjQgNjUijikyF6M2AjnTqXxjquVKUDioPAlu9F5RMwNm4qIUalbVg1aEoJrZeSt8K0QiVGMzsZtuPhExQGY8ULo+lYq6/d6yHp+eCDkBqF0FxAbSNMbU/9kPf9zgfLU4Pt9e+MbnmJumQeQ4TBTC0Z3ZrZMHt+t66tfPX7xoWweGw2ZnhB7NHIhNl+vz6/V6QN31U3N0tLh7PF13urX+Wfov/9Zv/MG04XQUZvPV/df663NieHn9ZAz7l+tnfUy/94fv/fVf+PnL989kvbr46Gn//OXi9fsmqpJy14d2rjm+ePu7tx697uYz52m3XRPbdnO2uby4urz+7nf+9OLqWgURUaUMAf8crwwN1aYJ3vzRR3/0g+/9zFc/f+u0vXz6ZFEd37l16+r6ol1+xaaIQly7QqFi74rk2FZzJHaz1ojNMqoxmbiSQfT3X/ns7/7O/7NuV32XVaYNy/ZqMw5T1VZoWnvarLdHy3mO0VTbeb08OTo7H59fd598fJ4S1r4OpOp1GOLM+bZm8KxZnK+33RSQ2VDUPDEipCyHoFBRgT7ViT5ViOzgTruR+QEKRDNnctxUtXc11eRRDEhiqkJFLiAFVfHBs3fLFTqtjk5fX91p6Unw5FKWk1u31ut9CDVjQJXVYp50JKhznkyoaReBg+Yoir6+1dQnIVxOUZGNQJtZPQ6TJ5zVTYyDqtXVrG1aBjxZztIYme1oecqY67oBxKNbx9W8NSPmcHRyksWdP39yenxrN+zGKVa+BrC2DlnEsGqrYzNXVyFdT8SIVOVRqhhSTlNUYhhFUakbhjjxyZ2Dn+Lh/RNehOVq+fGPHp8uW4x2cueNuX/9yeYDoyEz3rp1J6lUbTWInF137761fvfNs5/88sNgfZ7Of/jDP1idrh6/HBfHR6bRNwjTtLtKx7fu7du8OL6VpHdMAFZi56bAgKYWp4HIU6GwIaqVwBmYGRCWKKoBiGRQAeOArubZTibQVLW022yaGi6fv3ly754CKR2EQpuEmQMEVJGcuv22bY+D944go7vxKiIgmSihQ0UDRSg7KEE1AlBTMQF2WVUkqybnQ0oQmBBjNjNylhBiZo/bq8vTh38h++CUXTDZ71K3HyXkRYzd4OfIw2Z6/jQAxLh3Adh5jc4UPM+qGuRy219c9kcrHqH75Eo+H/1y1adt3pwft9X12VWzuLW/vGwf3I3JQGuGttt3q+Vx2naogwi5sHIVP//46euv3X+8uYwCaTtVAUafU908ev3B5Ytn/eWOme6u2s1uGLqpaRmAuPbBhSjpM1//Sj/F1WLRIl0+72W+fPDZn3384QdqF+RjzvF6pHkIEKfdNI7DiPPV/c998e23v3U0n/sWT5arftivbp1crzd+vpA4xt2+Pb4VwbbdjsRZhhePz7JITY2zMHb7+SwcLWoi7MdYz4+yIwouTVfGagZIbkhqqpLj0erk8nIrapxRMMcYuQpTgnlz3KAf41Ywg8ajxfFPvPbFx8/f/Y//4e+8//FzUg9K1SC7i0u6zm88vJejAPv9xZSCGeE0SlW1p8cHwRSJEcE7YgZMGhxXodpu9nVw81WYxubl0xf3X7nnfajN+l2fUlkOsmd2FIp3mdCmsVfRqq5SFstaeWciCJUmSkmMElqAXGioQkxoES0RwzCm1rdGBpCYDMmUMSUAhpgnA6h91fe9oyqlZJYwJ0k9oiKEPFpOws7IWRLwrjbLFTUn84Xup6nr5rN2QiTvwMR7ZPOvv/LwSfOdr//qX/+Zf/XXgCjnREhGAECICTVVPiRnj9fn9x997uFrd7Wf4i4jI/vWNXPyvNtEJm9qFREhsrFnP+ZtHbyiAeScMyOTGJV3MzCjAx0pwKHyiRAzAAoymRlRmAbJOYcQjDtfYSAWMztYYsX7KpeGZSZEYHSEHm0qm0CjrISK6qhWTYjGzlkSRMcuJ0nBtcamkoicZmOoQhU0751DR2HoBwafsgIFxJJCNVElTGCg7LmufBbN4oMpV3U7V0Ii59h7BlW0QqBAElOzbCgcSFNihFBVe+sIvOcFIDsKWUcG8BgMk+XRaRSNZsrsA3gwBqNisTYQwNKmLYAmJilNSSSOadU0ylKGyiRRi5zEDMDXu06zANGYpdI8TmPtHXmWlNM0JpXAbCo5JyhHsfIroSTx5A7BiWy7/bC4dzR6yp6b05VcrsnAYr5/ciS5rzBy4PX2OobpZQp32N9/6Mdz9u3qg4/fPj1d3j76wtk2zu8u8f4xzmtmLEbvNIlIAtPgGxON3eC55lCJZecrAXB1RaFyjZdpD0RNu4gX/dPvfevy3XdCcNlZpLy8Ncd56yt2SJImIu/IF0eCqRJQ8ZYC4DikrOi876YxTtaYpRwRAFiJnfOOgMY0Ze+Xt+/mfueG0TlWUzSKvQSuhIuwgaioFj2yKQmaC840MngthViI5RpQETBDxyYTs4gYmgPy7MI07R0HBacm7LyBWjHqoyvDDBMZAihG0YNzBZSA4jgBskqmmNPlVeM9EHMQCMY+lKZdR+xdnffXaNlgYufHyRDVqJi0wRErUDAgQrGcwQLzXqQz7c1ccOBnnh3plKacTfbT4Bf1xfXZ+vvj6ebqy3VevfYV6OL+2dW3/uCbTx9/bG3b+e79qyepstwG48o7NHI1e8nGbFwztzBqBBSDbKzeBzMhVmII3uWckRhoQlWfa0GGQAMKL5u47ZVQWdF7UVRkdDzEjGwsqkksac4kDM7VAKXwCzOkmllBAUGigUM2P45ConE7pS7HSQktx0gE9SxEBJMEN8QuFT0QessnRyRmRChqRMhGqMU0AlYQIViUv0ylShxM0QgRilel7OQRnQEBkGIh2ShARgQswGJVIyA/oQgYFcSCAVgqbhVmRREk0kMkRrFQh8iAi5sAyLFDpyKEnkk8M5Er6VEmRmU1JHIMYJoIENCrgmNWzZgH54NAEIMsqVR+eUI1KY6o8qlA0a19waQxqgATAeVoaRzzMGkaWwdguLUMwU/jtCBk5m5MFOpDLkiBDBwjAzZEFaSKkdkJqGQt/p2CoTGwXhVUCY3ZDEEJvQ9CJDE6FCZhpFJ6rMAIlkt5N6hjzIjEDEw5ZWRSUxcCGUFBniASsKkEdqrGxCIqYEJogAWKc4jRGLCRlBGUmVCJvFOA2EEdjNAUFQ0sIyJhAMuFyAyIqgJU9GWkwitCh8CEICAJvNbHvn6ZR2EmcIZitYdE6j2SRzMVME+KRNkKTChb6sBCVtybmumxc/NQsUWJgEyJYFSJOc+spEiocs6rWcrO0DwlGZEwsGNRKxk5VMBchAFivnEVHWxvCgqAhEwFLgRoBSIDqCoGZlAkm1KpblzWscXhBYhYTHUACggkORMSFfKXid0oTiURVBrcoUgwBoSkoMVvaGZAhoe+O0IzLTivQ8WgErKSFt+PqRTyl+rBiwxIBqiaiFzJAULpvMfiQYIDsxulBNDUij8NVBXh5i+r6iG8BmYGihnNUIjA+/Djd97bDzJk2uzGqKQEQDlmQ1elOFpKFBrfLJKoKQAHoiggo0z7YRqn0abx9v27kmVa75tVzZS6vvdUoW8tI4h6ChyCSszDFId+Nlt+8my9bF49boURjKR0wIEYOVcgYiJCJZuDCkimxSnGplKo5NnEMR+K4KyI1EpQyP03R2PkAmUqNy8gMLqs+RAcw4MhsiRqPXlHPosWQPXNbl6LW8vMkBBU1OzwiIVypx+YTzceATVAJgcHNpUeSEiIpsXlhGIF8XsDui4sqtKUhKhQOPLlmwIVZeKSHiEiyUIIVgR4IxU5qFlYChLkU2/dQSpyTL5p+2HIpkcnR2mc4rbzFZ8Pk0fX4oqde/bsWVI5PloAWwjoHLHKJGnVnjTe7bYX+4tns6YFwd1uF7s+aq5mASCevzzbTX2f00BClb917/Ts5dmj1cnLl5t/99/8t58/fXcKDkOoF8s8pf16kMp99OFZ7erv/+PvvP5Lf3lXbV75zGf+6X/0X/zd3/obb373zZO7dxJPqAxkXLOwoYmnqt9uCdzUjd12n/bDsx9++P57n3x8ffZnP/4oAyVNqKUQ71NrDejNwRABJ8Unn3z4d//OX/vPfvsHr3/u0ZO3Xx4tmzf/5EP81V+WnIErMwFgkIyeFYHbuW+OcxzVzz1UGicRURMKQVM2tHo+O733IFEl1/vL9XmodDdOZKZTNuNxFDW/3Q1chTTFMcZtP7y82qVoFRgReBTVjJIDk2dURFFZHNWo1O+jQ4vjaISEpaMRDyZJ+1QjgoNx8kYTO3iJDp4RNDM0lFFT1MVs4euliICJq4KMw2q1RHRZ7eL87PjkqOunJrhxHa+uzrJ3jrhtZkhccXX7qLFkbVh6nmZzP0zSvvLg/Xc/WLYricaG7AKWF6SyxMxoVVMRkfbqyYNJW9f7LeVx71ZCrMuZTynWzing0Z0H86MZOzcN4+76umrb1XIRR6uq2eqIYt/P5yTnst1tfdOqZO98OwtxHKt6NkyDc66qa6qqbdcP49rXdcN134+xSzra0eJkt3/RVH6/OVTDXj+/CsgvL8/eeHQ/97t1HO/M6hfrH/S7+Ml6fbQ6bhQsTS+ebXyAGcL+6dlv/sZv/t6f/M7P/+rXf/id3//GX/jpi3d2d+8ch3Drzfd//LkH9165de+T8eMhtlN3PguqU0LmnLMnhwBZtarqlCcEZPLsSNnMUoIpQJ2VARwwiyUwI2JgQFJPnBNcXV70u/GkmV89fdKEmXH9uUe/qn1GNjVCdJIRTCBHAW3qGYnUDuJ+5+cQt3uFRZi1IhmZAZQIQSFbJOKSQUagYtQsa58okqdoqOhZtBArzTmMOSVu2tBQyiwZsrmGh7hlqhz42K3JKI0xdpcr/6WGgXaXcYjslq6m5GEatww2ZKGugyH7YbI0YA3L4zuzjy/Pnpyd/MRXpQPYXQhkiPXd09f3Lz5OQ1dV3M7n+wuN43rczGFIyyM/Io7TGBg/ePt9f+Kyhzdef/jBW49BoW1CtxsuPnzRrbuyzVxv9khhtgzFNZD205i7z3zh9dbrftsL8JjEk7BfBJ0bYqjaMPOe6uDRHIaKDZk8zMD9+K0/WYaAOW0v07B5VnsG2DUVTf0+qjRHc8l678G9uNljxvOz8ylxXTXzcNrMaqdrz5AEuv0UB5XcDT4jaMhS1YHQte38xeVZ3TQD5tsnR1frHbsgaFlzPWtCU8e0NdM+dt24u3PnFmbepPG/+N63LU+0yU1op32HgTcvI3ENTd/rJlp3597pbpp0GtEFaIIfYn+xPczIMTlyh32SwjDGW86HUG/X/fJodnJ7dfFCXz4/u3vvXh0qaXWKmRTrdpbGwVTZ+X4YiLmpZrv9xjHn2Kmo84TepZSROY49ciCizX47a2viil1lbhCIiuK8jzmmaVQ1BlfiwpXjLG7Gy/24VeK2nU1JchICjjlpccWPOaoiGwasQ7XfdSlPjv3UDYjU1g0YdruJmzqaBh/UUprSONrXfv4Xv/6NXxI0lcxEDmkcIoh+egJogw8qX/ncK0q673IahnaxiKBxjJxlilPKqGJjzFUgAMScGSswBkP2BfJgahKHwZgRKaYomkCASxSAMOXJ2JtxTlOoQpoSGMQ4magjF1P0dWOamElFSykboiKTEYgJ6IioZiUrQcG3JkjgDHLKnaMGAUxTATSWyD4yE7NmA3NZEpKZgJp4Dg5cEiOsm7ruuzNXz0ihjnPLouo4rFS2oUbjkcGpWF03SRhNY+rqsBQMWZMiM6GjkJOgAgIrMAFWXHOGLOUgrJ6doalOMXfknRoi+5Q6V9qLCwcCXTEXEFKSZADMTmKq2hbJgGCcIlrOkpndOCZVyzE3VfDMKWpKebmsF40fx363Aa3bEBrN2XImglk7n6Yxx4mYyB3OW6e3ap1g34spS1I0qolR1AFQIPZ1O7dxnKidV61Pk+uGDvq9dNNiRdX15Tir+pPlZt2fzG89+NLxoPLy4qP1WZyffK3ClrkWSipZLSuDd2QZ0azfXoNBvTgmT3mMYuC9w8AZJCZR0SY0aT08++6fji+fNcwcOItyFXhVp5yccZbs2BOHstVEBEAu3gEAxAx1407ur56/1S/nXJ8s8Bw0SvDosB7GrvIeFWpwg5JZkEmdGaEomACxc6ICZSEuSTQxGpbNK7GWfAqQirAaQGZkM2OmYtQnFwpFEQ0L4ZV9c8i4kBloMdWW7BEhZ0mAgoRm4JDAlEwRQRGZnRoQml1d2dNnsGwIyJhTTi5OhE7BSxbOAoaOg6UsafIEKcecJ0HlA8XTOQAEBcIMiMRM2TuyWciU2HsEYq4IchqTCLy8nlxIc4Dpvbem4Xx2+8F2Hz586xPE1fzO/R++/OiKN1z59qgyMB94FoIpBOCEBI4weDNBY7DJsUONDOBUyMSTFxyYTUA9U5x2Xo3YZdGUjdFR21A2rTCqjVGAiUteUgQgWzYV8+gOoxDSNE2uDk3T5r4jsxA4iyLSNEkG8w2bgmX1zhmobytEYzQH2C6b4G9mIwQzkLJgLKkyQjR0DCrqDpMrqpEpKAgbw02JkhlkAy1ZKDQkKlAuRlZTKgF3IiJgM0JgKkREQikQHgECRXUASGxGh94fVGJiU0YsJd9k4AAZkI3CwZhkxMbsTMj5mpipDNFEyA7BQJRLihV8AZckUDMj5xBAcgYGRNLSLQZkJioidhieHaFqJmZ2vtxnpmoqqnnKe8Fptpj5hF4jZXOAY1ZDV7D9jpAQmQhQEIAR2dAB1QAL7yo0FUXCUnhsQDGJGPQA7B0xs0gdPCBrIQeokRqDOUIERQZVc8zEZFk8u6JhUDMf0GlooUFNE0EEiTWyxZ6JEK2obWDqCxASIIGVKiwGPIgMN+xcLfwP8oiZCNRE045dQ64yIICy+FEAPdTtgKARF7vXIcqFIonYEbJpJoeAXvxstpwf1RYtWYPmzYP1zggiWkAqGciyNzCATDAFQk+sxpk4Oe4NLY1H3rEpgZEiYIAkztBBAukc1oFoUYWYhCt2DgHMgTlEVREwoBKJQ/nnszdgyAUKU1BFJSOsiAwABSmOByosIrDAQeFTAEYCywZWNB4gUBUiAgAEAiRTVRMiUpOSIFOVMsYjIqEBFkgc2Y0770AQs2IsKoilQ6vaAU9Uvq4SIrLCzynddObIiUqJRplZsReoilm52A+EphKoK0VvNzlEKp4pADKTm7YtQyQVBWQFM1MHfH11/f23PsRq2UtKZklT5YJIQnYEmFRElVVMIgDnlMiQsm36zTCOyOCCe3lxdb3rVserRw/v7l6+5Dt3Q906DHU9l6zDsJ0dHQ3jTlTQs2mSOIxI733y7EufueU9qQCZsXdUdlxIZkoIBuXdYqDgnFPToooRsQG6T79NgPJ+ERFkAFVEIkbVosFQeZqZatZsZAbonMspMjsRE0uAiICiSkjEZAZquQCkxZSQgIjJG6hCvpnHFRAdUZGQDElNsPTDohGSqAKWfmpDBC3krxtPExKVn4zEBp/it1RUir5B7ESkiFMAB0IZqAKSoCAAgiciAyDgnCI4OkQpb34dpKIpplA3aYhMELsujVniOKNlSpZBHHrnfDNr8q7frjtHICYp5tXp6Wu378X1VXd1geP02muf+fCDH69jN42yCvMK8jDs15vrKaWzbX+5237uc6/JaGcfvZy18wXdWbN845e/+g//0f/jp3/6Z1HSqj05rqv15ur1zz/41g/fXNx5MPX76vTBs+7H7tUvfP7LX3/87kcTCM1q0D5NOVQenZMoOu2vHn/46Cs/3QuaUb8dAHXU/dW4e7a9HlLMWfBgqbWDhAI3N92nTWGEH73/8XtvPW7a5fOLbZenPKBJRcpx2vllKInFqRuIHANrpOXtR8Plebh9RxPYmIqplAAlC4AFq37z1/7GH37/e93VerVoYo63Tk9zHId9F9MAxirmvVLAF8+vNnvp9rkOzWoR1rZOavM6vLhMkKH2jj1WvjKDtvX9LoVAjjhOqhnEDAkPhqJ/+Rf+c7+/iZ8VLbywsADGKVd1dTqfK9SS1eXItXcnt5uqkQyzpvZc141r6wQIH29eknNf+qmvbS7PNWtdVYR1lnx6924eYgV+Nq8EEoLllJuqnmYLAmyYokKUjtk9fPiZF2ePAZUANUtTe02Wp9G5EKpTdmGxWJ3eerC+vJzXSx+CJqvalkPrQq7aRV3TanW0t+Hug3vzXUrdaOK3m+7ObTcZmuZ5U6c0VOwdV+hc2zTESOR8O+PaDddXwcnQDSGkHPuh60lzv5dZc/BTbDcybXtLYr6d+vHLX/naL3zty//hb//2rdUrc9rUEKug7QK7s6lyc5pSkO3/91u//+gLb3zzTz+g6dZ3/+Tlrttzw9/44mvrd/Jv/MLPmNaprS+3jx++cm+3OTtZzYr/Ak2JymCCZixRfdUygqbRuaquWtWEyAaIjsHQNJMv9XDUDZNlncYUJxk0VYsjQUS0bClUwZKSZnYcjU0zIXrnTKORU7b9dn9vdX+7Pu+mlw8/+0VgKIcARqdmZgLgmUBNQIHIww0QnQhiP2qfHPqcExqh6rR97m/97GgaAUOgcXc5dru6XsZpiwBxs9H93nk3DQAuuH6XYw7C485Tvag/c3ta3gLJKfatm2h9df348a3lchynbj/O8uXtJX3wZ7/7xZ/4ihxV/c5355enx3d3csZzUIJsmBV8e1I7G89f6DjO7t3OuM9q017GpCCaAS53XX2rub66PkLC/TCmBGYJJDtDoqT24OTuen0Fjr3CnBefffSKjH0TvBe3n/au9t/4yb9y/vyHML0cIrrGb7rd3bt3zLp+f/nkoydHq6PN0N05WkHWfdcjupHS6atHdWu1o4/eP/dNHRbVrh+UydRizuHWjJFund65fHmRmxYr7CcNyAhWVz5KbBsPAtM4ITrFFGMXpxh8vWrb9f6yi9c/8bWvXFxdYgYkaqp6BzsxSVEctOdXXY579o7YhdvNWi5W946H9Vj5anOZcrRl0wC7n/3LP/XP/uQPHx5/lnx9tk49TPcBnYw3xyIFAHZsZlxapVJsGzd53Oz283lz79Htjz94+vzZ84cPH3gXsmge1CGrprIGcc7lnCfbI5lBdg4NaYoTEIvl2gdXV86zSap87TgkS+PQM/nKB5ApoRhCMg0uIAEYTXFo6gA4ifaAqpZzlpTirG3HOHTDvqorLDIEVcPUAyBGqUM9jn3TzAk1i4SKpv2YolUzV7fVJHk5azYvpvnq4Rd/7htG0czYAA2zCjuXJTumpJCT5Jxm8yVaHnZjcKE98nU1i11MOS5ns/ff+5PTxYN7j07UEhM5CpYKXSIqKBswGkgG5JwyISlkDl7NWEymxGISs+OqnFvM6ODvcI7JgZKKpDiFqj289dGZBYKCbFCnmRFVJTgWASQCMVBg9OPUAysD5xjVjNCIHIBD1CyD47qc3qcpeZeJYgYptFphE0mB/JD2VdPk2GlOy/mq328hJZjWhE6YjlaLs8tLFRbxBug9kIXS9Ow9MjERxqRmVlUVGBs4MyETpmqIneM6yxQ8ZY3Osa+qOEUFITWmgOQ0TiUja2BETstRTwqq39h5IvaEiJYl9d3OiGaNA8OcMzpKKUlKkMUjebOpH2aLZdPUIsJMVWgAYT/s+3GHij6EQukod8H9V2ZXl7terB9ERFsmbxi3HQRY+gVINw1jmQsSsPqKfSX9wDCM52fd5e4+dl/9+l8aV4/+8L1NaObLps078Mk/+/Cj6tnLV77yhfrWMbDLVpJViA4x2rjfMAQmlCRm0M4XflaPedAcXWi8b/J696Pf/6Zdd66qczbJSlU9u3dSHS8HA2ckWcBZFZxILKcRRHLss2YAco5RVaZUBXKBJsXilDYREKldAPIqyVTYEOPQVs2w6SWLAkwEt4/vTeNG81g6W5gdgCB7BQZmQqeSoiVytbGH0nNMDBKJUUGooC3AAaDliAyILDlyaAqP1VSdHrockpTKHi32FAIDkwKOLeBNKJ3Cw3ZOOQSOPdZtOw4TRoWAyhQCY+yQKBOhCzIkQnWe+5iNHJHXFBkZbIyqHCpXoQCi42gxAVAVQtOkKVIdZk0YtrthjElh2E+Cmsbp8dOr8/4HI9XoVr61mLoN7TOMnrWfLLQekEiVpoyGgZukZFmYIRBkRTJFM8xSkLlK2XTSDAnQIHjXGhimxIwKfozRM0HF4tkAGNVMJAqJYk4q2fmQVVQhZ2EkCkhVgCooYWgaHSOSjrGMBqBAij4wZtSUs587nlXL1SzHycjNl7OKEb75LgAQoXOHIRiKvUVMTAnRMVIRjgTJGIrBUYQBzLIaimoujAdQBCUguEmK0SHTcoO5NnOAbMp0iKSBmkMWEWR2pAikoEWQQVAm9VgapNjQgFkOVxtGSw7VeQfoGFggIQE4RM+ohmTAbCTADFwEHyMCLaEDBFQDFU0TckbvmBgMBAXNFAQBSvaEgNE5JEfEhIQ3WkhUGVkGkKVXw0Q4BSSn4ClMQEAwxGxESaVQQ0ufGBKio9bzLJgDUFImKtSTLBq8H8SC442IN2gdeyRCSloI4ZlAHTOYAgoYghAQWZaKKBDFbFrP0skj9RVR6xSrNIzr88plBi1t9QDiiMzEsbcsdIDolsybIZoCIBa7kJGCIjrnEBGZkZAIcjKHhmBEWHRhLB6xEl47ZCC0MLpLwxc5toPPpqRUKWPlQlvz0KJLVErqUFQb0gBUgNiQUbIqJsPsmQBF8qRAEUMOtShn5CV70jHH6LGJRooYAdBr62sTzKIJBdAEhdgjCGQAQGQmNAMxFQYqF+aNYIoiUhoPTLW8I5h9sXkUJ065mFUEkRyhAhZghNjhpxdz6qE0TJSZEA4mIyYCoML9uWFVo9nNpqqYTawUohU4UWnty2D4qYKGgGYCgEhkBxpVLlc1GWLpGzQSlZI7R2SR0uFGAFiEYDMrxW2FOFXyTTc/2W64twpmqsV+QjlnIlZAkGSgovrd77+zOL4zKOZ9B55q79mIyIWwzEqau6TJ4t4D1/PZsOtQ5XixeHr2ohtE0ZLCkFQ0Ptu9uFxv37h3vH3xMiyOQrtwTeBQA1M2GWLHntu2yjERiYrFKGfnmyYcNXVJwFrWzMgimRCIWc1Kd6NqRsSDUwRvKOXF5mNYgNZAzOQKZ6uo3EWBIwAkukHFOURnknOaiFkkGxQlPeQczUA1ASCxR3BoBKiFmmQmJomYiJ0BIaIaAoBKKrcMFcG/YK1UAYGJzIzJ5xwPszy4koMkJEBXkpGFzYRGaqJWpFk1A1QjJBVBOxQfEGJpuQYESQlIbxKLwlzgpyxJbsSoG6kImbrdELuOKXfbq9u3ThPxGFMCXM5aGfIwJg4kJjHlxbxezVYzT45r2e9NsgsMVr3//Fk37tARkY/GMe6n1AFZF8cEev/BvQX5mMZNv//CX/zJxz+4/vI3fukcxpfXL3/yGz/7/OXGwKUxPfvw8V/8+qNw0izuH/XP5PG+f/0v/uw67P7iv/Xrv/ePf/e/9r/47/e559o5z+zcuBuRXWh8ODqtju9tnl+M+xHAuaZ58uzp8/Ozi3U/pgzFdwMHVDsiqSnYn0spZkDI2z5WJ3M/O0rDdHVx/sqrt5er5ur6ql6tsqilBJgVhBiAwRwX/dZVQdTAZaRkRoqoZtGAQnVn+Ub65j+5fVRl8C9fTH42r48XGwddr1Xwz54+DnXl2VUOqvnxsF87rCzltoLr7V79QlUNaIqymLV167e7/aKuduedCWSzJDcp2UNpY+HMHB4u9i8IRX/+1DtIzFhUbjTDMeWT45O+BwuAcaQqMOB8MXcUclZXLRZHy/XVervZCtQ/9Y2fm9X28JV7Lz95slgt4oShCgDg6nnrIVlibtjp/ddeB4+hbVRVkjJxXVer42XV8uX1y6phFVmKY7IEenJref7jK8ftEFNVNf00tIu5ijJ45zyxN8PQtI7JNJJQoKoJs1jl5fK4341N1WZAndLiaCUx1nXoNntmN2/8qm2WIVxsdrduvboTf2d2X/LeNQ+i+Pr40auvfPFHP/hPzz783sWzF+XDefj66znBeHZ1cb6rj+689xLf/Pj3obp9vt1Wzuc0vfYTr5+dPT9dLc6u1reP5pf7Tcbq8SdXY6QGmxe7+N/+b/3X33zz3fcen528evsf/8k/OVtvF3duna6W6+cXD09vmYJq4tJZhmio0zSYgWt8sp6tSjFbwmSEhmpdlDRbLQ0MURBVYsZAhupXgbgOscY5AgMIJCVQo4yq4Jn67ZWvbo85ckUGZKTb7UXdLOv6dJdTe/c0Xe7GPKI7+NFEI4AnrAxAjW8OTYaImrOiAoFJBjWVCcrOkHNTMaRE4U43RnYQakdVu+3X2WEWUQVViEb1/Yf86msw81mxfv0nZ83nq9D2VVxbXIUA+3j9/jtv3Ho0RFkc31m+Um0nqyXd++Ir5y8/Wr//7dXXfjL5ihB3u+vlw2VNYduNFHjqumnaL08eXp99aBCvz6TybVyAjF3m7EIjzrb95GezL3/x0fW7n5hhFgMEJTe/+2D98hmjXlw/wwTeVca8XKy6Drp9v1wd13VzuVnfv3v3x2/9/y7Xj6tQ5Wj7ax2mnKbrvrtwxCfHR3FMsxBUxHm/XC3SiBsLt+98/v0Pvp0Hw5TbGVU4g9n8pIFnm92kmZYLMVt3m/nKVY13y/nF1eVp24a6fvLkxW7Y3mluMxAFmsaxqmsFadvlZrcHbeM4zcP8+upSM0z90Dbz3eaaCfp+yn0m5qOT+eW6r9vQ76a4iyefqTfTea6srRj66Ov67vF8zN2PPv5IjXdjMq6mJv/3/t7f+Uyefvv/8B+Wu0ANpikjQc6KjClnRNzutnfu3+nWtt/1hPTKK/devDy7uL6Yz47qan61jYBggOzY+ybG5IMfp8FUlSxrZuIQgggE9pLFMyKmLBOgAnrn/CQDO5fFjCjnyM6xCwYAIM6RiEfnGpznlJmRnVmfa8egGZl83UwmQ44VkSOPAJJlMmFfVfVsGAcElSxJVIGDD1mSGYdAgHT68DZYTQ5i0uDBO5eiFtbFwYUpykwQaL6quVIXFdSYbJzGaZx22+vlyXK9uRbJgAoiOZNiZPPBh6g5VA5A0TDlzCyCWUGRvIoRWTmFeRe6vnNzzKUgJoSEAMETEzGVBgbPjR2CxGoASUZDceyyADiHJqoZHN+4kDGrIQH5kHUKLhhQTskxi+QpTU0Vyh46psFxSwze19PYtaFVgSSTC+wdiPYAyBzY15JFNCeVUNVILBAUeBKoqhlzTVWTcypNNQJolj1XKY5cBWZOKZsasGlWZhc1CY+uLid9IqxVUyFrBu+TJrADdY8piInnasoTewSwrIJARGyqRND3ncSMYujQOYfsplRMvVgFv2ja9WbrHIQ6LJbznHLV1PPlLHcJRGPOBrZsG8kpZm2rma9Cv+3LXXB84phczNJnqdkFdorgfMCcpourREzGzfyIlncTQzdOREAU8pQsQw7ywdPt//k/+UcebvmHX17vlhTdOMrxrburahVf7p597ztj0s/+xE+H+7eo5ZwASKfu2pxUq8ZIilbrWjfkIWb1PKthsf/k+Y++8/tsMLu96vq9P21zzICUmrA4qkPN5NAVwi8hki/PdgBLJaGAaESC6JsGGFO/E7EkWoWAQEksqTXBg2QjDAwYRwyNbxpLe3ZNVkZuhuF5AGHvsygQeXYmyp6TCGBidFaCGVhSi2KaiVByRGYDAwFgRmKyiAhoOQQfRUAVGQnJSA1AC+2YCAxNMpVCKEAkVDVTzaIiGsUm0MXxAtom5RRjYrTgfV/qrFUADF2d8sToLARVyeNoEA0RgBkB0DwSUJiQFFRSVNDaeQPmqobgUTCDhmbu/erq/CXEyZFsNgN5t8vpWqWHkQG2l5du8OqJAmrlwqIGI4mi3iEBoRdJOk7KxBU5Ly2poWXC6JHUKUkAqYxb9CSUxE3AHtMS/dCPmMSJaeWxcpKkcp5MLauoMiA5HymDqUE2I0eIYBITNS60Ve2YNPc55UlEFQycd9Q4sCxJHGJoarek5vY8NEGhMq7U0G68dQhAYI6Lj6b0YaNaETKw8MmJWQSotCcBAKLaISWjBkBAWHAchADFTwGKzAQlbcNGap6BDjMNIlOpiCJzBEwGBMUAII5YhYjJVBxSElMEcFwUdAR1CBWjI59dQEgiSVEDcaAAmFVMJRFS8ZMoErFLkhHAhaCSAE3MUIUkKnkmr5AFhAkYvKkiMqgCACGQcwXpaBqpNMpLjFl8vaTUByJnUDPlLFGSJ4ol0kHgGD2iAJR6EcdUIXqTCpnMnPOFo0pqKAoIuWRmvPMqTvPMOTVsg5904owRPDSnOL/dEYX2pEIYX74XhmsdNozU1HVsVqk6AZcwd9ZtII5V7DUpOAyUGgaRBMLAhAZMQIJqhoBZFQ68jsOmWc3KzeiKc4kIICEygiKpIKAZgRAeOqGMUNWKKkHkgRwUQ0QxeYEJWEHwAriMPnOtOhGQc5XkDKa142DAakSGoEROQJUsQ0ZThoohmIkgjAjJLEPVGNwO1ZxMYkK0nMwAfahUo4HWRHkUQ7ACGS5kK0MFPfhwoHynqDdjk5ZsEKCBsatEk6kpGDKDyMEyrMpEBTwMgKVSjYjNgIizFMN+6SYHYigdZKZGjAcgMhbOxKGNyuyw0RcpLhUCZAMwkyL7wE3eExFUEpPTUo91A9MSFEQSTQAAJfEEWCoO1KwY4Q6j4KH27kC+t5J0KmVeBuVPindUi44LCohmoFZCqQeaUuX45fmlm7Ui1XC9n5IkheWsGru+8jNEGPup8qt9zmY6TqNCDJbunN565713dFJP0Myqq/WgaOTYwLLI5ur6+Gg1brfo3NXFi3p2wt6v970oVBRQq8KerJxXw/1kHz09/8yrd5qSJMOC8mBTzSKFPa2ghJQlIXnnXJbJSrGFAUGJ23swNVVmJzkVUakEO8wUDMEIQJEJTFUFiAEAgdSEmc1UNN+IO2jFxYFogCq5lOMVL66CgR1oJOXKQ2LR5NhplgId1AK0Ku9QxJxvBvjDs9RMBZGLE80IzRTNksTiDiFyYKV3T5HphleHaFxYzaVKDdkV27uV1y6SiZplQvwUZnOQilarZuoyC9W+WW+7/TAawDhOzjUJJhAdhp4qp6IOeZzyrt9+7uHDfruPOd27e6/nvOmfD9vzk1XT/f+5+rNYy7LtTA8b3Zyr2d1po83MyOb2LcnLtlgsskoll2TDLpQhwLYg+MEPAsqG3+wXPxoQ/CAIBiwJsmAYggEDhhtYKNkqFciyqlORVbxk8Xa892afGe3pu733auacYww/rB15ScVDICPyRCDO3nutNcc//v/7uyF72uayWd8w6822u7q9wzrur2bdpl+f32jiv/bX/8bf++DvfftLT/bm7CyrxeHJ9V1DwdIWSKp5u9lcLuI3fnTGv/7erz797E+23cXeu1998td+d/bGo3E9ohq4g7lUbArbq7vbsxv5Bpv5Zr2VEDenl5999OnZ2cU3vvvdj168stcM692r7/YXgT7Tz8XAST/+2c/fePTk+dOn777z3uknP9Xry+VyqbPGMsQYyu1aJsddcaeaYwx1D+AsVlKHRVmCahEh5NKtb2s/+Ft//e/8w9//f97cjg8fPCpEqFmO6vf2288/f3Z8n0jg7vIKdXZ2dh0ogPjt1Z1baZeHngsAmRszxSoS4Ww+a9oml4zIuehUi6qqMJ0IJz8gfMEnAvjLgtFfsJJ9cQxAME8pHRweow8G2CwXRry+odXsSEJNLKenVzW2x6vm3uGTk9PvV57G83Vbx5QAXCIHjrEKbUo2bxfjcBOXy1yGKjRlMDZm4m13XVWzULeatY71cr4/5sHSuGhneRySjUywWuz110Nc1vNWb/sxzud11RByCCLCQCKBDaM77997eHdxXbV7M4JyNKR0d3DvXjq/NMzz2axIbttm7LSuZf/horu6XTSzk7Pbw/0nJzqL3gjKolpkJyAaY/Xke//jvTd/bzj75J/96D8BgI8++DBKHUne+6WvjhrHsWhHcTaHGDWHEOqPP7igYoz18d4R4/jmo6Nss5OzzbvvfOnm7EIq/K//0fdPz29dHGPgwO1qATlfvrhsY9yivTs/LGM35h7VclGfNkcEJgXJkhlFVLOLk1M1rOpWGIfzNagyURXrej4XtlLG0ZQY4jIMOWG2WNVEjMCljFZAFfubsjg+xIayD9sxN3UD80NZLiX4UAYGOpzPc86ok6psANPUahPmdnoq0sTbm0YMYVAM1IhTBkhoxoXSrdx9JGHucY4ExfuUejMzDJmDRGv2lrAZ+etfyUd7PmyB4A5Nj4NW0qeEPflQ/KY7iHOFdXVQX26vcbn/7nfe/cf/xR/89lG4/5WDZz//V+89etu1ksVqffsqPf3pG2/9UrrqmmV1e36+mNfZ/fD+o9NnH5Qr1KGerxaXp8+d4OZ2jQt5+OT+hz/9xC4vvfdYt1p0fb0BiRw6LgQ6GgJ5ZZ0X4W9+7/fuNt2isu7mrltflKwvTp6/9dZ7g6+BjbvB0QFyd31X8gihUQREqdrZ4f7hycnTxaxm8UbqDz//SAcgqEFyStvhzorHyGgMwvWDg4enFy+aWT0TyplK1sVsT5NfXlyEOh7ODgIH8xJmqxlHBFzf3RFY29RVU19dbgy5ikuzrZox5PX6rp3vF6d1GrKP63EtoapYNIzJyr6kb3794PdPn+tIbIYxvvf4zT/56Q8O3nl79XDx4mVfz3xxt/0rX5s/3nvvvW/9r/9//+DvAkAU1pJjDFZKqKQKMTIOm7tuPVuuZkXt8vLq6GD/0cP7z16+6H3dzFfzRbu92QSpSh6MEqIGqcuARETMwoo2VX5wlzdE4uASAgUzNFAgIGKppO66DgmIyUvJqTBSG5tUknBGBDdPwyh1JIQQeLPe1rFCB88KaGMaiAXQKyZkVh3r6qCkcYCBSMxL1QZTMx0rnEN24WrbDfcfHabBiUgQhJiIp1UgupnmoqNEHrYjOJBbGRQhtG2du+3F9XmsYNbOKeBsOVPryZRCcFeJFRo5gBaVdoaEriAcEDOyE6FI6LsuiJTiQWQsXTZFo1KyK0g094xIOeWSRkJ3z4ANOCHwhIrdPeUBgDRKyKMiVcWt7NAiZGBIhIYiguBmmZ2seNEsIeoOBksVzQij0lg0uUMqRYAIK3BW7VhqjjViyMMNCYOXUEmsZ+4EXrVhlodzEokxqI51bEoekKDYxK5loBQDa2ImQgpqDsiAgShMjn3VxFyVMhC4cBhKF1CEas0DoqtmpEDgCIroSAHBrShMS1HAVLSdNTKrHCyEgDQTltvttq4ZgYv6Xb81RAMbxnJ2uz3c3xt6BdwEZK4DA6vCrK36bt31/fpuyzJE2g3JImDjcLicZfduW8KsWq7mFgXENW+VsJ1JwiIOpto0c9Sc+t45rB1DIxdj7k6h3D1bveruH0D82rsy45HWKAT3pMX7sYTPPv6kfPjBu7/y7fnDN7yys6fPYbDlsRgaIwNQnxMJt7Fhk0//8Aevfvzne6uqBB9spEaAYxUrhciLuYUwIVaFGQHBCiGSTHEnNDUmmmj5w5gLk4ureHO4508j5MQgWjLEmM1ByxRJzn03gjgFUOVglcjd3UkkYXAiFqBibubkjhFFAoIxV1NliFlCZiAsqiQMzEjT7h0dUK0wgVtBcAZmCa6TnaMQgoPtyMVTIRAAGDghEaspgRE4EWT3MdAl5MLMxI4ZrAQ0ImViChWUPHUGRQmQB/CClkXYiQpRybYQSjrWHJP51CFspjEEMYhGnlJoG6qlQE5uMp+v8BBSvzl5QYE3Vu6o3FgevVRF65oIFQtCVh9JtaSUAHkdsAoxe2GYAJnsGbT0KIrsjnkkauoFo+NYZsSeRi5RhddoTtxl8hTRiMGzJs+pYuUoNrqpcQhAmJCoaVkNwHPCgJyL1lGMIEx3FcAMCDGyumvhNoZFhSWHKGVTDKCqAruiagjRBDlEKDusdYxBhISdkJA5l4l8wgpG7jqFs8CFaDf+O0/DlJmb+a49ZqJQE00lPhNGZDIo+MSnFiIkRCho05pbiKfAjACBo7kCMTOD4zTFRGad+C+MgBiJG7KAVIUqEE9teQYooSKggDEAZysIgmhMaDbRWxQBhAmAHb3Qbn9aBcfSkRM5FiEjNvQJ2uYwDW8gXFMIMK38XLTkkkrpe7QEaD1kCftBuj4XBkXnaYynmsxgKjQABHMIjMTsDgKEDs2EVmEquYibsGS1ykHMAyhZEXRVdyVDVFnR3sF87/42zKyZV0HRzUoP49yltLWzalFqMaSLF6FKAfqogw4JpcW9w9EhpStPdzVT4KhmxCwOBJ5VHZ1h53GZPg22c2C4A5AwEKBwJKachApicWE0sTQwEyKTK4AzQPRiXhzbCQ09ocABNJsCADOpJhYCCUatyIYsGxGHKGpI3PBE5XG1TFM8DH3i9UarLEchZGI3RcZO6WUa2ioc1DxuNntx5kb9oFaKsY9ljBBEogVPEVnAHYR34OgpZ8m4g+BPwMJJ+MYJHao6WVEI2c3NCiETkoMSMiJNR2g3wwn57xPOwRgFgM3LzhWEODGzmXl6VaeqJvddUToh++4YbtNHfPIlwK4lzWnKXZsCGhDxdBngdLFMrWSMk44ACIC2K7mjiV49aRQ09UeY73A5atOlOvGtwXdVeDvUkrnvYPI2NYWpZQNyA0c3VyswbodPPnruwDmPq8XsdjMi0Hbo60qqKlpGBdqqhbjsxw3KMOY8X64+v3x1vt4EoEDgpRzNGJQSWCe83mxnKm88OtxubsuWkuo8tLEJXbKqXqmVMaecRsFGAqWShiLjiPnTm2+984hZYcI4EZqZEKMjEoO7cMjF0bUURaQpTrd7m9EBHR19SqJJMJu8OoYEk/8KgNzc0BCcOag6kUzjE+0CfUBIkytlwjq5O4LS9IGbtEl3M0Oc0rS+owIhCIVJsTLHKfZkbuCKSJP5iZABTFV9YlQ7OZCbEsmUB5ycccJRzVTLLsiICO4MiOhaRpKaWMAYAcF150VTckZhKSVNzFxE/MJRs5OKbq+udVBCz6lPZbSNGkBbL0V0TJuh64GpBmoqQfAhp9ly5WhNW+/z0eXJy5vtJVpB9fVtvx07K44hcsXDML46OXUOeRhQCzJAHX/rr/61F5+eLdvFrK5zp4SLdV/Q/eLVqyZg6i+S7W1gQ238O/+zf+cf//1/+lu//mQRjq33/f2HZchohoF0HIsDIBb1s1fPX12cfL3cNbPm5d02lfH01fNhXB8c7T3aP+o3Qwiy+5T/Zc0EASZJGBER/erV9WE7Ozq8H9rF+x9+WJXtNq03fQbOdQhYrHvxNM6KzPfVAWNQSM6s4A6C1Sq7ar/2aglqZNB3UD15lC512R6zbWhGg5L2edDSSHhwcOyDZ9QqlHh//9nT22ZJBXJdiZXQNsuTu5PA3EYEg1ldbcdtbGlIg6ohytTkNZVlIu48i69pRK/B1a91ov+WSIS7Q9juVyXZvF0NkTGIszliRL5/72G/GR35cP+oqWIuJrNFCLyMvFjs/fzH/2pRLwJFR0klI5CgIBABIWAMbRXaMRUSmi/2+37brOZj6jnS3t4s6cOnnz+VqqrbekxjPWvb2arkc64CR2IIM6DZbFZLHDZW+i9ulaQKLPHBo0fbi7taKmhlzcGM9g4PN0OmMDSzdqsbE+I6high1I0kCvyVt7885iThqJI9tJGbKroTATgNRWaLh4fL+wD/CQBcXW4ijMSS0+fMYbW/JAMCKYBKmAY5vPcgBLq5uT49f/qr3/rSdnv58uT0/PzcozJU/c1wc4s4iy6WhtTdjVUVjg+WRjZW8vlm89Ef/POF0Jv3Vm+8+cTZF21M3W12G4fEzCqqVsB9xCJRZI5N06Req3rmW3v68enp1ftf+8bb7SqGtqrbCAAUG/KpRgHNUEKQisBLf3v94vany7cWGrmq2sBVnFNOBbNFJFdVVzADh+lEx8huYF4QhZGnM7r6BDSZ2NbFTEMlxQc1gRBS4bY52F6/qmbvljAjEuamag5Y6k1RDaFq4liq+dFbN4vDEbT2AuqVh+s0GBTTwgRIWM2b3NH1+e3e0bEors9OD/IbX/7et7fYVYuDzDcLL7eRbk7SLMr9/UNJOAyb+mBZzWelFChiiO3BIuR8/vSz4+bLV5dnxIyuOtjZ56/mRNvrnrGmyHeXayawkrvTM4oYajo62O/XA9e6uv8uWBi3XenX5rnvB2Je7B9/dnZRku638fLmrCtD0zYOYICxbfOoHqqU7eL8LA/97djtzZbRtjSgBHL3tB4lBLPe0c0qEBMr5yefzWLFmYwCo283A5LkTenXOSwYWW6vt1h8frS829whmDBt7tZ1W11dd9JCs1jcbbthvd5bNlUtubRV5Ku7LVYUiEvuwOLVTd/OOKXxxWl/lvqqCjRiVrUx/eCjn1qGFx99/Pjdw3tH4b13ls9/dvYf/vv/m//53/27/+AP3p/uCOQQgkzLsZwLIhaD1cHx1eW1BFksWwl8cXV9sLd/7/j45ORMqoYYQ6zRDJSng1IxLwAORkiMBAh9GiVWHCMgCDIAuiMxp5ymrV3WkjXXsQogJWUYgYgmZ9PYbZtqSYRJkzuUMVMIzaqlQlaSaTawNrRiWLGoZUQobtnpens7lu28njlCGlMaintp2hBigEJVmMVqNox3pUwhEDByAyMmcAYCciIkYUllrJqqqNm0ywfc3zua78fnz5+V0d5558njtx4YGqohEROqKnElDAgZgXIxwWDqBOCWAYKbubKWYlEBDcURjZkto+XiSdyMJDqSVLEbRwTTUpCxWI7SIpC6OqBZUh1wKnZEMNzVFgRhA1VNSI4hTmfHlMcJS2HmIdSeFAgMCqIGqcx6B0Uizzrk0V0BVEupmC1rrEWoUl2CSYxBh2TjOoikvi9qAKCl5DLWdaw4uqPCLg9vqkAAREJh1GQ2AhThOuXiChAiCbqro4OrA2XN7qNwMGczZ5ZUhmkJipNnGgARWcQSMMswjKWoqrmBouY8Rm6QKI+DFpygkpoTgK/X2ybOhu0Q2lmMAurJU9dtDeje8YPb25t+6LmOu1OC+2J/eXaWiXW+JGwIa59XRAZu1OW15hk5bS6eChMDWT92NzcpZXcbig6dxlDXUcrJi1d/Nvv6e29biN2mI67NcGvr/f3DNi5tLCcf/zw+e77YW22eP3/n618uSWEe77qxqqp2r8HscNP97J/+ye3zy1kd46LqNnfoIouGF83BYrlOivdnHsPUf6TD6OaBWd2MdpOJWREJImKKoY5UR6hDqASoBwJQde1ZSyGKsSnjNoTo6CKUGSO3MNbjUJBw0bYQ2zT2uSgCEDGhE09qnsXJl2HAHBx3CA8WmVIVjKSaEQhRp1MwUXBVouhFd7tLfz1guaIZIJMjOAoF9TQtwUwVhaeluzEQsGshtyBAtXTbLXcdzXjUVLNIqNNmI6Lg2cvgJbs7SyzESAIIIj5qQsfkMOMKJRhSNBiKVhXN96qsdPryLkZVpnbZADLD/Ppuc33eXaP3jo5QwMEUAawYEQKi9b1aUURFzM0sEqPmgITMnqmgKvRtHVuhEAKMI6OH5FoGMiDNmhUjj8yUnbIjmGmiqdXHEidwQ/BE5CABmNTVQIkpkJNqRAhIplnSyObDmCLL2Cd3DyLgRu5cBQKf1UGqxuZEEdumKaWomxBSza8PkIiIvOObgKlrATUjgOLGzLCbX3waPxFQAdRdzdUAiHwiucB0ymZDIqRSnKaKH3RgdPRp1Y7IRDwVZ8FkgtgtwXHHEd7xhX1nz4CdlaMOgV2JhVnQDK0EagfUIJEcHZQ5FiV/rX0g+kScRSQmUjNiVEcicTOwTJo9JdceYst1owbmIBNlBHG6yQOogyGge3Ibc+rVC0bu01AJ+2qVNzeVbcRMDLOCgRiU7EBIPDF1wQMAu1UoFVPDjFaI2NQDIIFHnqqtqQXLDk0lNaCDULPq4xyP3h5iE9pKEZ0c3ElEswL67PAe3Kn3vQBHHSormHqQsR/6evXGOHtU9t8pGKk/1euf0/YMS45T1q8kBnZ3BOcdAhV+AcnZjcpIQThEIEQGcveSCd0RnQNJnGrbzEi0VLmvytqBsrTJAYmQmFHUUFgACMGE2RFN4kgxF68ZwBVhQsd5cAsgCXHC90yul917N+WAnCz3GNwMM7VFZs/SWDWxbeu0HdqqcYVBFVClEkgO7hgEhKbqYYBdnwzA60UMTOXD+PpZMF0JgLz7rBMHBFdzm5qOphSPKSA5+CSV7oBERO7mryG5DmCqDjZxYXZtQ4jTLxHMbacv7IQmUwRHeo07NCV0JFLLhLzr51IDAHMzcGKBnRaEau670isEmOr8pn8yIqHrri4NkV/32pGZAhghqemUdtNScEIYubtNWSx0d/OdfjghvQFgNV/95Mc/ytkVLQbZDlpU0bFiqWJVCrTNsgx3IerYb0m329uTb777y3/+/JOry/OGaa+NVIqZLea1qV/34+XNqMbYW/fTz7/61mp7+SqGul9fZOwzzmaLPes3YN7EuRuoGVdCVcy9n14My2b94F6c1ayggiAyiWJGhEiUcwJ0QDYrRMIiphl4+jZt4oIjkdoUpZxipwgOQKzqgEocXiOf0cFUM7ozTe8IoAIgqimi7zjXu27yKein4ECMtBNjeQIVAU1RQgc3dcPp5jjZyifG33QJosG06QPH6Q9OjCxQt4xfdPCZuysRuSoiA4KZviaf8y4c7AgAqgVRHHy6FlQLwITOJvddfd8vpKLN+i5QnDXVZtsZQN3MhjTFEwsicRCpWQi2d3eLeXzw8DC0cbi9Op4vrq4/ubr5fFYdmum6HxRTN1zXYUHMDpbSUFeNFg0x1NXq/PbmpvA3fu2vfPYn3//Nb36lv9wIHYDNA0TKd8t2L6tuveRm/u2/8ntn/e2b98JBu7i6un5n8e2sXs9qy9kVkLGkUs+jO7dVefazn7z39lfreexuN+vLzf7eYtvdjikDLU7Or6uwK330XfsXwi4hvUO/T4eU6bm76aDvaPVg7/nnP/3Xf+c7p9vho08//fZ3v+Uppc+ff/pH/+Tq8ue/++/+ezhflAl9B0ST569kQu8vL1ZP7m27kxfnr16cnb37vaal49/47d/6L//e/+1v/ht/60c/eqnFUxORZ+9+85devPqH+0fHx2+3P/zTP+6LN2bHh/NXQ77ddCpXEni9zYFZBIIgZSCq7u3d+3H/LArCJFAD0BQxfe0fMviFTuR/OYT2xQ/ffWYdcCq4KrPFqlsDVzGNCYQjVYf7x+swDn0ibNt5c315Xcby+OGbP/r+97/1zS/92Q9//t3v/JUYmqxYUYXAhhpDAKw1KwWZz5fZtI5ClutKYg1j8VxKzjBux3lbl5IAbLk3X/dDP2TMJRJHFgi1gxIgcYgVciUcIxCZGhiB8diXMqbAdNcNe/vzu4sWxIRDW/tqPi9bY8eDgwPUstw7vEvmxu1q0W2grfZDtQDKDm5DAvAYpYkyjMraT6/Me+89CTGevNyMo1dVLp5mB415NeTcLA8fHT/86OlH8/kKF3t/89f+6p/+N/8lNTUt9g9i7K0FquXB3v7h/ueffNQSGkDTVAGqOe1f9S8zZNf28qo/7ccXH7/I//yHQHQ8Xz7Y2zt6eLS3v394fIjCoSGDsmhHipy7AQBdsMt9M1/OjlcL9OJ4d7sZTi5m87ad1e7oalXVaMnIFBcxa8o6ZuPl4aE0lQgGkX4zMKAQlVwAgQObFSi7qk5zN1DAiSwJO/4+wHRQMEc1R6JhSE2oAL3kbTWf93lc0ArLVmxTBTNNXpw9kNQImdCgHwPOaHZg7SE4aQFMRLNZFDUrbMqujgT7S+oP7p6d+JDahuh0a2ev9o+bbgiPH3/j9P3Pnv78Xx59+zdHW5YBu1zdX9VnVx8cPlk1M9y82kKz7YvUy0e0PenX1yENgSxUEIUtxH5I/djt398nlQh02Cy2V9vR3FmlElXfbIasCbX8G9/57tMPn2LXb7YX7739rZu7UzQ9u7jev3+8Ws6vTs+WywcztKqpLq9P9u8fU1j5ze2inR+uFk8/+yzwbL5oh21Xt5S9PLq39/EHn7dRWGRvtdys3ZSbUKOrq6PRdky5QNtUq6Y+v7rWQVFpWOejBxVG3gzbu/NTN6jbukCmillkuOuJOOnY1DX0pnXoIDFKvx4j0ca0qeqD9uDq4kyzjQMYcTeG6g4DeHNv1if2VtJo6vD4zWOs8Op80B93Dc/zLf1f/9Pff/u9L01XQbZiWJxAophr0WyuVV2J8IvPX77x5HGI4eje4csXF288vr9YzIbt7Wr/aLyDPIzCAYWDV+ACDpoHwmrCMVSxAsQgbAAx8LbbsnuIjQDmvq+qys2iBHIPCKOmuopgOIxrJG7apavnsYTQLpd7l1cv2kowUrJszKqW81g1M3ACgu24rmjmEC/W1w6AGB3QDNS879Nk82bGlF1CEImMEISLEaGYQyVNKVk1TXUyWlRLmcLfdajUEhLMVm0L3I93DVV5k957681QEQZGxSBkRQFQ3YBrZ0Fz5sqSWilaEEQtTyiGONiAVoP26GQ++aI9CBMzA4tM6yZjJHWaejgIEUCJzNyKuxHozgNg5s4cHDIATcm/oi6AY05E6GjkBoBMOJYtMSPBaKWpFgyoPgGAgJALZCKJwABRICCKxFm2QlaIQtentt1PdGs5VXWsrFGk0NQCPAWd1a3mZZ+GafkZQq1aTA29AACjgCNBFMQyVY7hdIB25poA2dl5t65UUyZGBBbJPgaoAgd3MFM3RyZ0IKRYRwPrx46Imqqqq8rNBgBmSsmCIEsQ4du7u6Eux0f7yS3lPA6jq5esBjulpWkbeu0qKrkIB8JcCTlibOvcjyEVAjQdm2oWlR06TMUd0jhaytYXSE7CaSzNqg579V5VvffuXDLYhnBGYgELCgIxZckYsVnOoWvSq6unH36+vbi5cpkd3+e39kliKp77AW77V3/6k7uzszffefPk8vy69Kvjw+Jc2kAzGuecUMIqciAozkhIUkAZgwgVV82TSBSmTB8BejHhRlB0TFJSKYokaMgyNY/nwOSloIiBO4CVDCU3TZuLWxlHtSqKFnAnDoJeaFpzG2pxIiZGJAJwIQFwAneYhANzDJMUxFN9izkBl5KYgmkREXXbEbCnUW06AINOXdMKaIjA7AaWCpEPw4DgTYiKaEURyNyZxWNkqzWl4CYsZoqahcg4BrVYeoaiSo6RYwtliBQisptHoHG0WTO/RZ3dO6LFgkZYNPtciqbBGOPh3MY9HIaC3mdL03ErFWREclPzAgzOGJL2FJlFtO96QkGQIKBqRoDA0ugQgDN7oOieuwoZ0KTCQQckCjFUdaOWZCqOTAoI6ojKpTgQSwhkQGpKbIGKmggGd0ua1fuxq6L03TD1Eatb0WLZmElc+n7TrJYgHGZ1mcUSWVASQmSWwMBEOx8JgLNntyn0WrJm1AJg6LDbVL+GpyC4ITO4m5ma27QMh6l63R0BaWeymEiIbm4GZo4AZibMzEQTW9l3HUwkxEg+xSfAaXI+AyIzArhZdCIEACQBxkiAQiwsdWgSc8RgZgrWxjhF+A3ECUQEdFIHGADMFNxel60ZEqhmMCC0PKaAjoGAmul242Y4AZyQHSfMbdJSsm778dY4U0ihgZJ0bSnuVYNt9ll00DFZQk2Oo5uaNkgJKDJRSi1RcJvCUBElqQKRmQsSmgeQoWR2BHMPoUevFnvXNPPlA5zVEILUAOqmXkmNjk1oe1fGiucPke6821DJVQzZ2oL7zaOjsv+w1LOEBKZSH+Dsm+WypZurmHt3ZdglyHw3Kk1s5mnWmDq6iFmMmEiAp6olCxKJmFnASCgwqTOS2Yxd+0tNVwYNzB4yCxIDMiJPMjC/hiQ7sroDLZBqwLtAnNQcgVkiA6IbORgwE2AxVGPMiEXXzFqTMGVAUnMwGZVOE+MVfbmeV9KlnBlNCFQ4BA5kPoB5cQvMjK4MSEgTPWqX65pa1f/C9OTFkCeCPxUt5lPCSEx1Rzt2ADdEMmR7DWkyNwJWMwdllqn+GBAJaeKfuJm7AbADTPEu98k2ghMQaJr8bdc0NoGh3KwQ8ReS0NSKhRP/3w3Bd1AwoCm4O13AjGhTiFR3zk9ANstE+LrXbKr2QidjktcNSK/Lwiap1sjczCcPFDKJoZlnVLy9vL67uyPCy5s1VfPL2yEVY6d2MctZOUqfkwRmGBrMl5v1SpYfPv3o/OZsJfV+JZEdTKsmWsndkM8uthmrZtZeXN9cjA4n6f4iAEP/8lW7nbcHD92SIauXGDDnsTBVscp5TVzFprq425p37715JMJqDpP7yZVEDJSnrkMk5uhWXIs7oILT9PDZvfGEqGZI8Nq8o69705yQ1QxASxkdeCpP9Klzzl7fFwGnDDgROdFEvn+Nn/IJPgUOhgZghEhIqlOyz4l2UpSZEtL02ZhQ8WA4+ZWm86BpJnSE6RNLhGRadvKiEROrTZBydkd0R9mp+1YKAiAx047nPflAmUnNyBwR3b4IX72WiohRzW42dyEEG1LX92nMGhYxYLdZL/fash1C3VRi6EAQy7bUFM5enSbfvPfVdz786LNuvZnNFn0PUu1lR9Oy3nYAVrQAwHxRjaifn51+93u/dnF9Mo5rmt8Xwu32pq3x7vrl0WrxnNJ6+6reX16c3K63vL/aP/7a44/+0//LX/8f/K9uum61nI/b0SMzMzKFunKC3A/D5vL66nocQvP8qvJVX/T2/PRu6BL6gzcfvP/5UyAq2ekXH4ApJPvFPQCny8ocm1Xz4ac/uz67+Ld/5X+xWu0/P7/bu3/vy9960l292H989A/+3v/pu7/+txdPHq4vLtoQEJxjtNnKDEyVpJH5wfrqD0MdPv7kJ3/205/90u/8ndt1NyPokz964+3Suwz5/oP7Z8P68uLyO9/99T/qh+9+6cn5xdnLn33w3pffODs9GUdjlFlVL+bNnfXzuaCra8l5FCEmu17fBkYhIEIBLGoOYPb6Dvf6bAVffKO773EniU6/izue9+5PpVHbdiEykISKmUJgpno2Gwuoi0EhlEChKBwf7/38px9vf/bhaC3wohTGqbKRjMSTDSFWoEm1VHW0lAi9pA7RYByO5vW6L5tNN1vMh3S7vtlyAERDU3RZHewVtbEoOEYJQiGXQqGeThKT33Xa+1+9etX36/Zg3kEfGM+e1d3Y5+IIrNnJ8N7h8e3NVd+vm9gO1OSEq9Xe1gbejsm5DQEMJTCAg7maoaHhbjwgGHPqfum3fm1M+PlPfrbdjFfXN029KCXfXD67ffkRstzeXiKWP37+viskpdnBquLZWwfvfPSzn+RtPwzbppTcFSeKwjmPl7enJDgM3epouV1iThkZIwRE2qT0s89f5k9fkGEThVHqtpnN2+XBYv/wYFbXe8ez2aIydASY7VfrnszHNPQi0cy69WgOsQ55u0UzEurTCAgS8eitN2MUdU13qQRx9b5LIcpkoCyuhDiZC6Z2CwNH16m4xHx3TjczYjHPQEgiQ8l1ZNMc3CF1alAtj/z0kiEBZAcgwc3m9sFureIx4Bh4IE5pFKmkngPJkFSLN5HL7eXV00++9L2/crWxxcNDfFaNecBG6pbSq1cYVknjTRlms/nl2edvzX5LsBo72Obh7Pyzw3kL3a1kNpUG25NuzRIjVY8ePr48P7m5vAFGB8/DiCIudcEadTi9ufzKl949eXFRNfGtJ0d3236zHkOIsWop1d///p97n5f79d7q6KK7GYdOVIZB07wbAyYv67Gj2Fhp9vYf3K3XbnermrCs7263s5lsu0HFedlYW8GI55frqm5DoKHP/baQVacvz588Puy21xLiYjVbb8YQhSJsuzWIHr91/PFwEqoqxLjhG2wKeGUFm8X8+ctPn7zxuN+mpqoX+20vGCtqDg4ubrq3vrw/bjabsTvam5W7ZObbYWuCDUUX5xgGcs4h9dvRtxtPs9mss3TvweKyZMWQoCqA2/V220cW/1c/+GD3SJBgxaZTi5mzhGHMALDanx/b0fnJedXO9g72jo8Ozs/P27YtkPtRZ8vlbboDntZQMk2SAPWEhJSqQke1AuiuVgwIg3kpYOZAsUbhYegxxFwGQAIXc6yq4CAALhKGfgSXYrrttpQVqDfBMW2F58xxFsQQ1NQNAtQ1VplsKMWA2nalQ1fGUi+a0IIje3FwRIaqDbfrKxEupQAyctQyOhoRFx0IBHZGcpRQBaRu2I5lZInIBASLZhmQtpstsYMjIwFTqCKgM4W7zV0d9ggr841ISF6mI+AUdHNPuQwAOuQ7Sylr8WLm6MBqhd1yKaZeUplwmOCFMO4KONwcsruaeQjChIhehbrPnYMKcxpKEDEoSE7CboqI/bBpaslqhmXiviFUMdaq6m4+uazJiucpTg9ORIyE2bKRl1KaWDFTLr2ZukPVNC5oQw5cAQQDEAmImnNSyOoahIeSiJCR1Z2YUA3QBMF0UNMQGrNSAIigaCaSYtkwAIm5EQmqowuiIhI7MYWSBgQg4ZKyg+eUNtdrL2BMuVgQ2lsthm4w11DFlFXV3Hk+q/ZWi2azqdtmvgiBxLUIMzIwStHStg0zlpLx9bPAkcfetaA55RF6HWaBcxlZyECtjH0pUTTWywDc5+KRmEJoAiNLHHm/iffmX//lL++f2frHt2F55BXU5KvZ/quT53Ub1RoHjqrFnBd1O+5Vcbl5cf3RH/5k76tvto8ew+HB4htvvbjtXl2O82b+4ctnY9k+fvigWtY24uHhwd141ZdNs1hpyci1uat7qCvNY7ahqubj2AkHJ5LAu8pdADUEaYEicwyL5SjsxYhIjZBFtQSilDO6IZPmIohTH6K7OTASABi6T6eOKSLhphwZkN0NWcAntK0Ts+aMgsBToyIJRdVhR0oEnhrWi+ZAbKZT+MLciBiJFaYqEQJAMzecYhUIpsgICMUzg3sxMMzZEUFVTTOyZOKmbmBMGAS1qJmLGKATxxCGNBBMvW6I4IzaAjPhWFSQtqkPR3NrBIWl6yumg+XB6fV5yjZ/vIxvhsvxLp2cOQEBqSkIFrOiykQk04ynEtC1mBb1HohNxFWQ2UyjCGUBBiDNOpJyjVgsW1GYOpC8BAXrcjDn2CiAEAxuQzY0r1myahVIVbP2BQNxZGLLqprRgQjNqZBwlBHRYqUloQiTEiI5RgoVkRL17sJUBEvJ4iFIwGmexF1LihkScU7TcjnAxPrbHSHNlNwMjYiQHBWJyYkFi071yw4gzO4QUKYIlwGaqttU3QRAbGCRAoCzIxgQT2EaAPcpbWSALASmiLCbwGiiujA6CgcHJhYoFpmJXAgIFLwQIxFrTsReLBdTlAigAE6EZglph6lWcyEWp5KHIBwBchlJUJChdGXbQXMfqZ2AX8zTIQoYA1pJaSilzz5ihAS2sa4HHaHEiLJcSBO311fSd9YDG+ZBEbEimokYZDWrmMBsagyzrByomQLUjmwqgGClDbItZYGgZvPl3hBncfFgTSEGdCyAGEKMxAEASo/b0+hjFaqNgoR52+revD3ZBlocQFimWdA6UKxj3pICYkyw56tvmJ236w+CdlHBtNTUJlADM4AAYDu/DZiDTXBvkcmJQ+RMCJ6ZTIAUo0IfiIA8ClG3RdwaZpb9jFPfG01Co1CwSYkoA4ATiiINNBuNK0YCE2RAIqCaoEbaIDpjsizTrA5GQC5VBjHXwJyKsnmNZAGLzD7bFCF6r2bMd81MSF1RQVGICnm1325QJ0A4OdjUWQYTe4rdbRI5fzFLMTqSu4GpwyTGmJlNN6spZYnOvrOlONFUIOVOjkTo4KCIYGVn2ZiKiKZH4pRIcn9NAXdwL440RUamEiYiMnMAVFd3I2ffOfXQEXcTHqKpIQDAVPzFbuZAU9Bykm6JoKgiThYJZ6IJ/uZI6FC0EHHRiW1SJly3T+glQEQ2VQNkCtlGd/CJAOlFqH51eQEhWE51XSfgbTcg+HxWDeOIEp04lZRNXbMlqMOqmJ+cf/5gNbtX1V7G7ZijcEr5dj30BtOisru9WjYYV4vTzu7Ou7cOmkXNDOXu9FWctcujx3lMmnPKGmOIddtt1xUhoQ/Zbjp+dbl5fG8JnpEnhjxPNB93Z46ljCKCNIHaaQprE6EBOJFZBmRiNpscVLvmBQASiaUkxy/eHUf0yXs1jdXuxsxTZ+v0NRNuekqBEZObm01oaSTa+b/8ddLaJzOaFvXdEpB4YsCTqRGSue4QSICw+3rQXIhlKq1Ty+RGRLsjzeu/kwB0olaREMtOgoQd/8gnNQSAd/4mA999X7+QisAhjX3XrZezWSTOm6tZs/zKl799erWezzb7+7OS+8XqsIrtvJlv1+vrkxedbQHK0N2enHalGwOHfkyGWFVV33clZStFVW/79dHh3nKPqcXlon70YG9vrp+n7f033zn9/DZttrOaa0HH9OLFZ78iv7eM7bjFX/m13/jjH/5LsEuuNtUiQE1qpqYV125mOXebTbtacVWvz9ZvffVbi+bw/ltf+sE/++Nh6J59fjKuUxl0u7l7+epFjKxadnKK/wWVaAph7kJoaIZ9Xx68ee83v/nrmO9++Zfe/smf/nB5Xz7+yR/LTYf63m/+2//u4vhtruizP/vR8smTYgXcEIWAIA8sQdNVqPFH//g/X7zx9snnr+b/nZYrJqyXR4fV6u395beev/+DveVhyrBVpTq8vPx42fzVn55+enm75f3Zxe26bsNiNVe7I7V5HRAyYQALxKJZ53U9jolqsQzu0wxgk9HRfRc3hR2JDP+yUvQXf+wCahNECwDqOoSKQxWlaTRnY5ot2mbeZgXTPHQ37XwBtLo4v4yVvPj08+Fq/6A93ttbhYr7YSSzRtoR8gCIAAEAAElEQVRtGsDMTaYrp50vcOxSv61CK1V7fXH+n/9X//cPP/qsDLq3nL/35K3Hbzz+0jtv3NxeqeFm3QFiXdVQYLlaDilJVceqGTaDm+68U4BlzKGKdV1XdaVZqYpgOp+3TrS/PN5ut1EakYFCqNrF+YtTpkWUG6n5/tHsOiXKGoXVoArBnSeSnFjRqU8HAADOr2+j+5/+0T/lSH/nX/vvXnXdjY0//+HP5vPIIVhS8tFU9/cWCDSManl4/snF4Wr/bPODlq7vrb6a8nrVyMB83Y1kNpQ+DTBv5lUrV5fnt7eXoVgxE2GAgkAQEIgRKKmRpdTrUPoXL1+Cg5tXDQeG+w8PDx8dtfur4ikulkeP7oXAgNptxlKslJFDQAAO1MRQxmSekVRxACMRKGkkRwmI7Gbg6sT4mgAB5tMBD9XMsRBiMSNHBMbXxXpsDqVMPYyWMrNgHtDUDah5YNICJVUFqIhaJzAkBCx9t9lsj994a0Q0B47RjAwtSK1d6l9e2+mmXN34qtEWm8dHlz/5KEjDoMPFeraaL/aqi1dXX/ree5/84cXTP/vD8JVvai2hbvLd8Ojo3t3m/Kh9eHtywu3iydv3nn78wzeOG0NxgKI8Fh2Tj6x7bz6qNAN6CMuZDW9/+QFqOX91enz0uM/P28bLmGzEStrlfG/vjXBx9ukvffNX/+VPfhRic7i4j7cdGZ+fn7vlvfny4eGTD55+1pcxBp63zaKFIH637ReLpu+67W1HRHWInL3f9n1KTR0JwuXJ7Xy2N2+qofQYlKf+9EoArWQakxrBi/NzpWGvWZ2cnMS62d8/PDx48mfvv78XeLE83tz2xfyu3z54tD/2nand3mUMsejQzmSz8VSKj1tACDFiS4TlqrsDDF03KmMrzXY7gLnTIA7bfrP3xr4tKu233bVitm4sKd92/a4BjZG9QGCZ4vl1DE1dn7w6m83qUMnxg6OLs6uLV8Pewf7B8V5321UST189ffONx7FqLI9lUBZONoRAaOTmRJRTIkAkZGdGQXIIVApYKqUok0wmlzQMUQJX1BgM/dhUzdB3gbAKwQoUBoi6WtS3qfvw80+/8/XvEYVh0D6PITIyZE9tfewYQYCwEPrebMaEwLPbpCFUgKnv+sW7j1NJIVYAEEMk05Sn5+tok5UWWCiggyIAYYyNloHZQoRFaKLUKZUhdU3VWow4g1gzixDy5HZx1JrnLe8LNmCG7q6jW3Jzy1PziGfLxUb3YObqIBxK2iCiFS9W4rJKZeuOImE7dg5u5uzuahSk5BKcAlfF3HIxZzAlRzA1VaH97IUpmm2CiJYBkK2YGmSFbEULxLoxVWYfU1fFmSogCUk0L6kMCExGybQJVc49uMdKUKiAeTGXWBxCVRkoIqQ0EjQi3qfcRELUtqoQOaAwYc5FJBDT2A/kLHVTijEU2CEnwMgCM7GMuiUE1R64AmRVFSQmLlayaxVq9JzKSFNMZSKZAFDgZAkAiImIMMjopYAhQrdNqo7IWe1m3TvSwcG8retum63CWoJnX6fN0d6iFLi5vnXw7XZTh+r18iya9U0Tb7cpj5iLQrH9BlaNOFddhlEVCVRp8JzAoA6xqmccKmem5V0a0xbuXpWVzIpfnd+c3X/nreuLkzBfzZv2+uRn9/e+2z546+r5aRtmccY3L9d79w4ZFg+ZytXm/M/+kQnjN7++2l89+s1feX7z4q6/WX/0sgMqJVVVvR5u1XVvvj+ozWKthMKx5OxgpiNkp1jHEAHYDbRM40bxwCKBm5aCsLCHFJoax6TDiMQkbdEenB1ZQu2eahbUwkGcmMzTkDywFndVYSY1QOeqchLnXbZMS6EgSKTmQIAiAEbEaMVNzcfXS1ogRyA2UyYCMDSd/PmTTufwekZCJCdH9V+AGxwYs+UEZYro9kCE7LoNaKhqZVTLIIGEc781VyN2lJz7oiUwxyp2fbEYoarKGIsDmDKxxEoBPI9hFubLeXSkyD1t2sX+sR69uuljtV+aEJaVzKhcZyVyJzMDInVnsIjioKYKHBCdmKdy4uQKUjkSgjGAWwFwJi6AVaxEDbNDjgls0IKMauCEVfE+d4ysJSeQAqAG6G7gXBAFgUTcIQ1FPZsXgwqJXIQgJzPXMItjSWXs2RFBs5plQeQ0FFnMK6lrDNkVmGqsibx4AiHi3YnIEkwhxqzIk3AHikjgioCqQCSIgjaFYkogVgNGYAIHCyHGEIWrmivCyCRq7m5gxUyLTeaGMvWsIpMhFXUAMFD3woiArGaGu5GFwJGQhQExiMDkUgRCFqqhRgxGgagSaqq41WLFBaiRuDEFBEZ0kglY62DoCIRuxhIAHUtBc1adbvwAXjRXQp5LsZvQMEhVcIoYCygKuOdBh032TcJ1z5vLoR8wb1IiYEEcBRI33cFhd8PzluLoo/VM4IPPY9hAJ0yuIIhNYGGcRWFVERDG4kDEWhTRGZARIlFGgswNSRyD1MGodSL2fqIaY0HJ6uN1RIwiIs6agmSzjc8e+f3HJQSDzKCgGvItAkloANLAAWURKFa6ZVQUzq7shpPki47uwmRuu1UFgQQhnFp8AdkwMIISEVGlhQE8ogMwhMM+vyBv6tm7HlqQ6O408dTAyB3JnAk9TrP4yE1v7b50AE7ERIwOjWFjxgaJXATRXDCQA6OowYhgDlqU3IIh6JYgGsWR4emQK6geSC3ooObOfTLIoKomTBEJi7uDEDjtkkPmuBugvmA0AUzEHncCUldmnvJaOIk8EwxIC6NMwsCkpeLUlrRTSN3UkKYgGzk4IsGOGu5Tbki1EBAS+fR3wJTPtKkKjQHcdXc7RN4VNNFUdOUAgBOtBg2nHfAkT7hNmHg3AwBVnSwkk4nJ3W0XjtsxrQkQnKyUyZvkjjs1fBd9UnBzQFVDmBwrmQShSM7penO77sbNXc9NfbPZOjqyVHXdpwRSYWhKTg4U6r1nly/vLs+OW/nywWHAXjB3llVtNL/aDneGLvFmXFduDxq610aswg/uNtbUd9k5mt1eLxaL9YuPZiQsMySwZN6glqxFi+V5Xa/7HqT5+MXVcrZazaucR+QgFADALBNSKRl5VzzPzAik6tO7gCQIyBQReKqK25XcTTerqSNP2Kww81RGr1ocjEjApg673UhOuw8P2tRYsrOu2USVQmQHUy0TuBoAmBjBTW0XOHOc7BiuU0QQeIKjI03kPiQmElUDB+YwbW6YpJSCu1QaIvD0qdOSnBkcEBmAAZGwEJGqAgAx+2SK0oK7fw464ReCyU4qkhCJKDSBCVVtb++waao/efnBb/zW/+T25GdRx+XiYLF/hFSpwZBTvfdgGF/dXt6g08nT68hLxSHWbR4327FXz92wVS1ufnTveP9gVrWsWt576w0wv73cRI4nT58SzFdtI8y3m02WgKtZcY7Vss/2/MP3MdkHf/zzdv4IQjX261jF0LbODBAKJhQxV/H+4x//ya/81t/4l//fP/j0g5+/ePG8z4N6j8LouL65DciErJYRgHwXvfJf4Kx9IsAjQhVELL378MGv/+7vXF33J9dnZ6d3OUG3zr/1W2+dPTs5/to3tKY89kFGHe+wng3DEKqaA1y9eLZ49JU0DPXynWq5rpq9L739XgVeMaXtsN0Ms3r52Scvvvr1r91dvMRSHj6cnV28PLh/vE7j0/PTg6Pl1dW6CW237meLuFrOsqamDlQJkm/Xo4O5FVJYX28CMxrgBI8xn0olVPW1IuSANHGt/yKTyb8Qjf4Sx8gBwLJWUjMSA3MErhsRAQwsIbYyV0NiId7erauDeP/xo3uHxxkaDDQFzUMVuzIaqKAQiapyxQ45jYmxyur/wX/8H33y2eeKAM7geHd988nZFXz/B8s5vXl8/Ff/6m/zOC5m8ykGXtdtckQkFrKSN+tb4icKruBVWztgNZ/N9/cLYd3Wl5+dzWbzu/Wm3atHS6uj/cvzqzT29+8fnLyqqtViNhzaAGhB1BrH0dTMiiMqkYNqLqlALorj9JJ85zfeffbxq7hOYOM/+mf/j8X+8a/+9r9++cmrTz784Etf+/LTl0/ffHiUNc3n9dXlzcnLF/cePF7Nlow+juVbX//V3/ne7/xn/6//85i29WImISAVYouh7XJON6MM3m+GwLUzGQEoTFUa7lociDAwomUbJ18ygZOaabZPnp38/POX5g4IEqOI7C0X77z75nJ5cHB0OJuv6jaWNIpAHkcvSuRj37NbVosiqqUKkrOikrDgxH8BMjOm4MBmE2LNWRhgZzR9LY5PnR2ECMruIeiQNaW2DurQpeKHx3dVjsFBSx42IdZjHi2KgMbA5qCAkWBbkoVaSRUNmdGpXc7KRbT1xtkGFI+zwydvRNPONrP5Sm/K4YO2OoKhOwNAL5ty9tRM+GCWSqlinceU9pz389XNJ28cfVMUM+Hi4dH2+Xk1C7nPsa6qup3X8frkQlPJjF/50sP3f/Czq7NhsVx98umn2TK6Y5FhsMMni9vhdntajtr2Rz/9QXe72VuFYeyWy8psiHvt3e3t7c313fVmGLVZxNW8Pr+4bmT/bj30Q6lDKTmPQ7dcLaPJtruumNo6mtl2GGdHC4k4blXJDvYWrpJK6nNqOAx9f7252j9ciMN2ux2hHB/s3a1z2tir7bOjptJh7Vr6vgfiBw+O0XFWRRsLMTTLioJfXdwVhe26kxCGnHMhQ27ms2CDstVTiIhqJa0l6DYHprqtLz+4GyCVlOehbZqq3ZftdkPVjk9hanWsTQ0ZLfs4prptSUK3Tg4+W9TH9462m+3LZy8ePXkUYmSmPdhf92skJmEUlrrOfUbAcRzdnQJ7KRwbdB+3m8PD45vtCyPv8u1ydgA59imFnNWs7/t2vyk6TPjnPvUYBEC7sUtZ22ZO2YWJg6w3d58/+zTWoZgRg5s10gSM7gU8abE85kWMd7fn7awNFOo2aikBKW3H6fyUU64leHED1GKxjo5GQgBi5kA8XRSEgOQ5jyAQQ+1OjAQM9XwxDmOsQtPGXBIAMImESnOuqkokSgDEVFSr2GpJwAygwG7uwzCCg5MHrtQU3IjRSiGSHeqoZEAIISBhEBmRA1dWjIlQgQADhaxQQKuwl3JvSIABESXEAlZIo5tqEcGcrIqxUGEGIgsuiMJQmY9Fc5B2alCJEmw6soCxCzmn7ADqVCJFoomyGQulw72ji6unjYRSRgSYLeZFgQIFCIRctG+bedcnNyMJO3aJuZBYcXUa89hEUzW3wLEuZQDHrImZkVikUQPGYKTmo6MiAjo7olkBCkQMjr4LwAAzZ/WcS0gpEAmhp4JmalaKTQfvwJzGMqSUtJ2ztIuaBGehfnV9ls36Qdkx5aIAgAFfD8n7R7Nc8+31cNdlRLaMVRVig1UrQzcEb2bIxdRCACCd1dWijgJ7HFbSbu96qme9VjOvKiPVNJuJR1s83O9hszxaHHf38RYTdjyWcXudwVQhG1zlbv+tYzoPi7u1XG+7f/JHWPtq8z2L4+ze2+2Xjx3XsAzgZKmQSz+YiueiwtEmiKUDSaU5DUOCStwLODoGQBAJxQpqwr73krH23hMEZkKOosktF5Ewpg0jgmUAg9QH4TKhjImZQiZi4imUykI7xiuyu8KENWF0VEdG4YlhzMxFC7oBoXkBAKJYdALVJ0Q0JEAGR8Sp7scBUR2IBN3c8lT8hzt5EU0h5VIwp5QisNRNccA6VqA5D0QRJIoF0gJeOMZS8lRQzyKZgyLmIVMlGRXImaO7V4w5GQBEkYodnOPgbArM+289oKp699GTix98WM8P7mxb0LkGYTBAEnRVQieayC3TqzDxJVwYmWiSxqAoMQQRJmREcCjgEDjnLI7WaxqdBDHQaJrJ61D345YA1dNYEodd5MnAxS06BWRzTmpCMOaRzMkJjPKY3QyZXTUVJYI5Ut8PVgxUnWzezCoALDkk5sHInYUbdsKpVNlj5NdnRwFHLQbAtqM2TLkw3M3DRE5ilsixrpujxRyralrJCkMM9WK+3zTLNsyrajZt2tVUcyJ0ZAaAVLKDlzRm09Esq5mXYehz6gWBQ1CDpCWVPE3PLkIhEIkIuxm6q/kAWLzg0LfIjdeCWBGlqQvamYHQp2ooM3NGcDNwF6JiGcFo6jJwm5wBgMhMY+oIY8nFc8awLd5xc+hxVoBNjc09F8zjkLe3uB1l3NJ6xCQKR1CR8byKt/luW5IK2nLvsu8XTJCKpaJlZEZ/TReN4HuRNWetWKbRTHUifU9sETfd6VpEMPScxma8qWoxeW8bD4vKVJFLhhV6P14R1kNKCBgNqN/CmBvBcdxPstcjBBfXutLApDZuESCIcDmn0gVAxgBM5qRqMP0TdrwimFJaNhVATbwmEkRAzwTKWJImZAoxCKOggkPGtl59PXscqhrEmJCAGBmQwcBJkYgpgHm25AwaQ457Dlfi2VyAAgPUCK0BGwGbIQAWR2eUCOLICMUxMyBRIw4cAhiYoEdGDdcDLFxW4mRjywKcOQBE7rkAE7DAVA4FZNN3xkDAoErg9kUK0wHA1aas0OTiAXBlEtOdHQmQDGyy+ZjpRAjaWTMAph40B5+MGwgwZbh0snUQ+E6emkoAp9nU3BQmYR18pxiBEexo41OBmoMhye5/Au7CEm6vxQ1wV/xi1MVdoW3RTIgAqKq7TbljVgUw9wyv+84mBWSCKdm0sZ80KHBwyJon4nxD9Q8/+LPbbRoVB6NxPd7cJlBq2haEXMkZ+lL6QYNpWl/a+uwolDdXM82Dgt+styJcVZhGhRDM5W7T79X8xqI6XlS3d/nzi+s+Qds0Zch32WzmTV1ZLjevnrar+9isotRQvF+vSWHWtt3mNsxCaIG1fXV6umjvMROgGygaEIrtxmAiItcMTEWneYwBSU0ndJrhDt/jYAAGO/LUlPlTgAkStUsUTqqC7WI7CETTKzcN28wB0EtRQlRz3sW2d51Uk/PM1BAnvrQRAk3xRjcHmAKhr/U/m5Jx7o42qRdu6kho4GZGHIgFEH0CA4LBlI4UNjMCJGR1MMvoOrnhESYw07TLQHdwIs2KBP9tV5ELh1mDKbtpKsNGYdyO4/r6H/7D//jXv/G11b3H/RrH5MBmxNXBnpXY32wvbn6618hsUV9fnAE4QMRcso2GlhWQoOu3i4Ojo/v3rm+e1lUdZ7PtWJyWqZPl4tHnz07bxez8/OK7v/yt29NzhxqLIqRmtTy92/7mr//GzdXN8dtfzYGCVT6O4+a6Xh6QMVTorphzf3377IfP33yy/eTDT84ubtabdVhF49SnMUZxLW89Plr3uet71emCn7oMfXrXp0vCJx8YQpfs+O1f2iQ83dz+yt/8lU8/+rDcdB/8yUe//b1vvPMbX3v/s9OvPVyl69N89fLqp/29X/s9ryltepkBZeeqLkDmh48eH3322fv7yzYIpL6Qjh/86T959ODbz97/8Ve/8+7v/7//YO/BY57XH334yb/1P/y3/sW/+KP/3t/+197/6X/2YHa0vesVN8vVynLqbnqkUEngwENvTEGnlVo2dmzncdOPk6RtNmX5JyHTdqiZ13eG1wij3e3uC/r19BtTWI0kcqiYghm28xpDaBctMld1zYyakxD3BcihrcO9w+PDe/c1V+QYQ2UFYqiLJqY43ZhCbPp0pzk3ce/5q6f/+//o/3B1u2aUSa8EdJ2MdAjbAT58enr6X/xXX37j8e/87m/lnANSykVYmraKVd1XvVqZfHYl5RCY0RFge3vDIlrAMzbc3JxuV4+PxuJ322G22J819byeNxLnVSyM0rYVze4tjj5fDwkdmUpJkM3T6GCEQS07716Sba+5UCl9EyRS+9knZ5X8wapt/tbvfv3p1bB3cO/uetvlMtymX/7W16hZpKyLetEgvHp2/k9++JM/f/9H1QwX80bHDFiAeD473t8//uBnP9+/fwi+JVybmoERMoAHkWLFAYopAyEiIyJ5ATeFCXQ3PVxrRgZ3IEFKXVpvrv702bmrs1AUmc1nq73F4f2j+WLv3uHecn/hWOq9GsByGvuuK2nUvnCgwkaBWXZ3M0TFiaJnNpGuph7PqT4RgMANCU0zALg7gzCHPHSeDVKGBnS+j7NQzIVp3HSaSpzXpZSKyACnM76qIRIyIOuoKUoQMJgxBXZPi9B6RZdSvvIb3/7wxz84euvxarH/4Y8+2OvnfR4PHzw8eCf111d0tr18+tn+d75c1/PrV1up5y9OTnLbfOVLv3byk++vKhw3t5tNb6krwyZUFc6EQT//9PM5CwHosM2bvvQgNtdUSuoL+HJvj0Z48ODgu199++Onn7ZhhYjzg3By/nHZeMmmbT69PJvv1xgkOI29N/PV3v29zc3lo8cP+213e7PhOKvnzWrVhDj785/8WKhO23VsalXPQu9858ndZgu53J7eCc5yUxFTYQtVPavabnOGGuZh8erkBbv0W5gtiJjbunp5enqwv79Yzm6vXxJgKRRn7YvL87cePXj67HnA6vTsZO/oyXa4KlrGkpvFYtZI6nNx26wzB7nur5rY3o2DDmMpA3PjxshhnZUhVokqFOGqbIq12vWd6xcJbSipEJIWUy1VVQ/bPkbe31tc392u1+tZ0yyWi7qKLz97VdVx/3jVNvX15UU0J0IWRgJwzbmEEFI3VNWspE3xEojzMOiYQhBIXmEbMRRPqn0bWkchqSTUph0KJQ9YMGAEL1aydgkENPvps9PZ3t69gzfAwQrkfqirVtUjtiSobhlQkD3kze1l085jqFI/ILGDatFQxd3jGTFwKHmMMbjvyI0SowGQUi6qxdBRzRAxVJUjINK42RrRlASvm6rvOhFEjsQBERmBpwAODMiFwL2oE6pmJMg5EzhQzN2WKQypi/Uc3a0k8/j67NCXMS9xaTap/25eEBLRiICEEVGcsKCzCKVBGEspZiVZclckzJ5ibMyKm1vxpqpVM7NPHS4EogUyZUAb82YWGmQDVXN2YgBjnpDbqZ5VRFSzIAoAqObABNBXAULwyLWbiVAuQwhguhWOCAE9blOPRgRkDswShItbDC0532UVD4HRLSu4qqo5OAvPvfTujoRmiZzBFFCFyQyIQ3GNoSmmRFiKeXYgVDc3QxKOEqrgbnUVTO32ps8K6iAgJBgk5ALDoDc3A7m0ixZHuN7claxIWNTaeX13eW0OaF50JxUd3tvf8poAzq63215DVc+W9WLJsQED061asr4kQGyqqg18fDir2NafvDy/9r7L1b39sFgqyN7R0Xp1WlJvdRiGEt1W+37y0bg9v7jXHorMb7Znbz55++mtb3E8+Nr9br2R2cHxKq5/9EnThODd5vs/aGWTV69eXff3/sYvt08O83oQIk1FYpQIr63EjgTIEghTGiNNroQsgF4SECEzmAlRACeMilCtZtAEsBRRkuecHLmRqvWhc/BcbPosGzNMfcjuVQg2bAXdCEgmgsoupzTBMSb47BSIYHB1M0UCAJzaZ2BaZRNHs11ND2pxdMQpbyQTaZXcXocy2MwJCNBth7eZsFpo48hgTpwVQwykhYE1DUAOQpg0d1uSCjigDmAJS2YUdGDzUkodWcfRiyE6YhREr3kA4wKYvY3N3e12drhATspMy2rxcL/em91tK66qxV57eb02Z83KTMRUciaQ4sjTlQZOIoSoGYCRiG1UrsQYMUQncASqRSJ79nFQy+BOVswInHFQtaGriXQigAsgOCNMhoIYmFBzGUOohUG9X7bRxpwVBKgwZAdAKqUgKpubGYKruwghEiMQFK5aCUiYAzPyCLgLmaBCGXdSEUuIxIw05FyK51QQyQCdMEqYuBsIGLk+XMwfP3zw9uM3q8WealF1MI+hns2WIrGdLyW0UwfemAtP9esOzGIctRTQlFwVfEydlTQMg0MRoqkBzxFME4EhCbAAMpFMZVOqOZf+JuVsA56fVZuOFCBnikWIQoimA4K7eeCAxOjOCDrlq8GZJuOAIpqCsmBgBtexlCiVW+VanIwopTwoGDNiINRRALWs++H8Ll32ld+msStAVFfuQSGg5G6sUKAiQ00hWl2PV3dNExkJork6EBBhIGa11nRWBWRmYUYEAyIydXePQSaiOzjVgSOYlS7kDgbPw3D0xlc2sycbJEZF663c1MLgCMhB6hiVe9JxbLSDk8/2F/vLem+QB8lXkAdPt1YjkLblsoaTBo2NptggoTGRghPCNC1PAGSduDzECMjESAHRgBE9m/WEAwUhridhl5GLKtZz5GjojBBI3A3RgXZsICKZRurIkFFNqiIHkJ8J+WhgxA4GqDOSCrAnAfbihZjA1LQAAGIpuAWZJa7AjbHiKgCyGaLExL4e5hUMyj1hDgshhTFnZ0ML6BaIAKZGHkbHqeaLSKaqmN1Wfco8utFEINZpjzoRiGnieyCKmTIJ7KYwnOgZgPh6WT+Fy9QmvQtgCnsys5lNaoKWiTOYJ/PIFP+wKefrShgQ0MzMbOcJgJ0tCadW1AlI/MUMCI5T3hNwJ4dpIQo7Vs6k+Ux5N0CziUDG7k7EuRQh5qn/DDmXgkQ6BUuJATBrISYDdisvXz0fhoTC19eb7JwLjkmx+GJBSEECFqZcwjyuLj5/X/tXD+ZVQzKO/Tj2SCAsRDAM482ga2lv7tZ7Am8uqRW6S/58sNtMTahCLnvL2Wa7Sc4X130twexahKoZD5aJ5pBt0awydrODZtN1bJEMt4MPHddNIdGJ+BM4AoK7vq53opwLM8OuD4qYacpbmhfi6QLNjo4TEgoAAQzEXSfTjk06DTEiEpqbA7mr0Rf2IjMHFxYRcAMi1FLwNe3NiQlBy4RJMgMgQNAyKem7zw0yorqjObgZkCBOtwibkOSOzsTqhSbkGjq4WsHXf9KmjKcjmmZ3Q4mTCvJabeQpQ4COiALoU5Yc4Yuk22upKBefzFbktJjtpVSqNhzP4eLi9Gc/+dOzx3eH9750yAsqJenIgasmruD4rSffuTr/ydXVs6YKdbsPnrwMljdSVRELYqoPZstlazasu81s8ejm9vYr731pzOM777x5evn5W19/99mrz2fL1dnl9Wfvf9yu2k+efirt/B//8z+qEB3KkMf3fv07A5SmwvWzn/+Lv///+b3//v/o5fl471tfhypAG9ev+re/+csf/Kt/tr59eXjv+OWrk8uT09XxvBt0sVcLyfFy+fz5xcWUq6fdNbu79Car2O4KBjacz/e9zCnnOeHpJx8dPV5V9+7v7T9YvPNLHuPREiBt7PpqOT+625yal6IKgTDq6HcKhlR1gwnZzempLN+YH+ybqt7epD4dvvGNTz74+PC9J9Vyvh6Gr771jQ//8Sff+OoRqD/98JlqfvB4/6efffjW/UNESOqVNE3TjGOeHmyEHqMMKd/1ed7Wm66XQJpdwMvrFRbvONWvhaC/pBPBFyaiXyTTXrurxqGEUJl6NWtCxAwEk0c8iKuGKBTC8mh//aOxGVcHRw/JeL3t7x8cAIFaMSjCMtH0ARRBWcESndyc/G///f+dFwemYgrmu7veTsiaWHaUs+8d7SPjbH4gGWOMXkbhUHJhguurq4nzFpraxtFMtaTbmzMBH0peHa26i+18tTg/v6YKui4BMM7ml+vNam//6uKcMcSmLmgsXDFDDAm4WHbU7AkcMYRQR/M0vUanL6826yQ5jgjNoj1SKYN36fbuZn3v8YN1V+7GZAFvi95u+u/+6u/9/d//R2Hb9dBThHsPHnTPX9UcKSioXl+eLGYHcd6cnpweLPcvz19VFkMjOhgSKBqCF9PpJh0mj6NNsxwicSmG093KFF5/TK0UzchMIXI0IiRAcLVhs767vvr4089Vp+cZoPB8NTs8Onj46N7DR48XB8dvPFigd2nsu74b+g0RmzuJOABNtxNgcKXXufRiNlEMcbfZgqqq+5RmE9IfayvjOBbdEwqx247HTRiMSrJgWDlA1u1Nx/OGmtk08OZSqEAdG/Oyue2WGEZ1j3NALiPWq71ntzerL39jvV0jMe/VsZ41Mj+7yrP5wxc/e/rNX/rOJo9nl2cOt/Pl4b0HB3/0yUc8r2/0qs93nsygPr3zL73z9U/f//ne4VJm0A9DixhjRWx5jdstbTdjzpmRFDzWdS5ADm8eHMayfmO/CTF8/MlnB3R/OZ/ttfuxOrodrtoZP7i3f/LsZaxkvre8vU7ri8tFFe7WV7mU5fHekDA7D5tUz/2r3/722cuzx2897q6uY5S1OToFM+vGe8sWGcbNQDXyor4+v97eXHS3azP69JOnXryuGuPKKa7TnVQ8m68A9ez8ogzjXrscjU00rmZDyswEWmqgm5PL7WYLArP5fDsOsaoMtK2rIY1NvbcsWzdryOYrOb/e7q1mi+WDl0+fV6FezOO4HkSYmG83XR5R4kxK2UlFPM0Y02EehpTfWM1fvny+mc+W8/l6u91suqbSet4+fvLG2auTs1dnh/cfzGeL0t9N7dkGxRFRhAlDi9ltVykhIBUn2xrBttswVMMwIBoDVCGYKpIO46adV9vNNo86ny3Wm5u6CSDsgTd97+TSNn0aDu/fi5Xc3Wxn+/HyetOEdsg5BjGwjGXIKVKFRKUUIDRA7Udnc8JtN7qRq0uQieNXSmEhA/BioSJwzyVLCGPu1TVWMWVDCUiqClXburFmBbeh3wpXgmheBIMWQxJAF6YhpVw0RnbGrCWlMXDUVLQUxF41ARi459wrcNGiPjkp45i3VQiat2PashZw1FGjtP0wxFg55JQLCiUF8EIMQ74wUBYvCupFgHLuQxAkRWawUgyKKkBx9DLZ/HDitGgV6lx6IkfG4ja50iNVo/WHR/dPzi5msVJXJGWmCmpC4bh0dOYADjmpRHF189zOV2ksuYzEDuiDpli1yDHrltWZxA1CaMW1TCwqKoGplD6G2mzaTo9mA+JUuKOAZKoh1infgTMQGYGZGSKLqGoQAqBc8gQARUCRkJN1w1gMQ+SGqIzp8cNjBGhHLeq5aMppQbMyvRHss3nFAuvtJkYOdb3phtjsAmgjtCmGhL485jFftU2YzykgbgZQnCUuiiOCRAPuE450/n6/XfeNeR5Vs8UxJsL5/N52bU2I6+u7PRRwTgYXV91otO27fcgQOcybLBjm1aBrIiOJWdEfPrLrtH5x1uL8wVcf0Id/cvr0orrNmw/O519/M4M72Gw1kyqOlogpm5mphCqbMmCoaiBzNxZxg8llZaMZOEpMxYFa91DU0EGIx1zcDAXMirkDaMWBPTqyc3CwGFi7HgBVM4KqF+RKCX2Cb7EoMkswS2hGE9cadFqUT3sPJi6qSGRA06TDKAAA01MMCQBdp4cdgRmBgxaYsB8kYAUACUDNHcHADV1QW4kYGy9unvNmBHUgKqUgYHEibhxFwSXMy1BAPApoUpZYsxSwKoRe07yqlDE70qze3q2DSHLzbLPFQVNXhVJ3t7ZjO3h8b/BhTBmQAQMz1xQ9KKACupBMhzUhMrC6rRTc1AJzKcAVR2F10FGVdErxcGRQg2KjAsA0OKALJx0EPRqiWi4ZiQDJ1VAFs7tBISB0BDUfWJDNWNTAfQKYEU5sRyQEp1xKKtnL1JtDbkU1jahVixaI6wBNKDY6oVlhCeCehp3PWjjOZ+2ijqnkoR/HPqUhJ/Ci7qYsgUzbyA8ODt68d/zkyXtvPn63ms3cLaXR1YiCSOQY29nMkYlcs9Zuk1RERCHWhoJIriVrKaqqi1ySakECQSKgGKLvqoGMOTjsip9d1c205HHsOrVB+03Srkua3UXryApoubSB3V0BaaqvckMSAjRH2/k+PBCyk6OCKwv3XZdTms+PNttNQDAsgUIw8Zxhc1H6dd1U2cfb4aKEbYppqzkpjhSq2AQjzJu7zaaRqsuaK1utmv5mQKbZam5jJiER7LpRiBCdkOaIhwIVG2jhWE3wWnKc8McFjYTJARXZSASSaKExxthwFzbPmRZhfrQZMocVi3A4HUdd7R3fbbbG6GFGSOKxHQfyF3b3fMRPtH4n43wz0p0fIp3F9aez8bYFIhAAU1AhLK4MWNxxAoo7OmBgBiJE5hAQaCLYT7jjGBAtqw3EAlbclMibyFqKUSGiaRtKr+MwQMi7SMf0znL2AiSDR9UIlIlDRkeEQDgjqNzNQRFE+DVRXQlMRBkywK2hAM7Vg3kwgshsSmNo+uq4p7H0r1rKPqi5y6zmWYhMAVEIrBji5F9Fc3BEMydTh/zF9KRFdzxjotfmIHJCsJ3jBsCngzO87g2aJjEitKno3hSQAPj1RL5Da0xwmUkfRyIAQjNw9wmgAxPUxglx+o/p1UZiNRUKkyvZ3dBBTZGQiHed9qoTagQApqYsZtll1t0m+BECAaBaQSBHzCUhssKkGUHWzMRqgzu7ZkTyaYJzI2YD6LuByRWtsFzddI647XM/uhM1c+EqbPvBwON838fF+fPnYeiOFm3VOLj1o7kDGYKZmg+K15nuNtt9HN9s6OG8OlnrydYGoFkjhNT148naZoyezViSa5/627sLY2v2V6rgKWAlZRyWs0ZmizyWVEry8OmLi6+8u0RXAGASdQN1ZnI3daXJo4UIKGqGr1vpCJFY3M28ICEhFy1MNOVSAWVnvJg+idOpxYyApuvB3R1BLRPKFCS0Xd20gTPyjhFvluEXCHkiQndDJtOiXhxQWNSKubkXwGkRI9PXTwoUk7gVAHA0QrSitqNY0cSacfTJGjiJjDuLzJSdnKxiTuYFwIiDm6kpEWoZkCp4XSrxC6mIUL3kMiSpKgOMdRxtm/PGGDZdd/3xB2sISvGo3WOSbtuX4EM31svjlb/bp9HGEak11TRqty6cacwJ2R88fILs/dDVYUWBm1UoNvh6vYTF5cXm3jfu320u+hHr+sHY/ejRm4+7YVgu9zabfPT4+JOPn0XE+1Xo1h2I/fgP/1ASnbz/TJb3OVDKxWz84Oc/+o3f/Td/+M//m9/920efP/t5OMG/9Wt/+wf//L+er5rR0QnS3frxvdWLq7NumDoQwRGm98MRppUCE5FT3cSvfPfr+/eb9cXp8qB5/4efPD46+v6/+LNf++6vWtXCqKu9JVu6+uzPH7/91Z/9q5+8/dv/JpmPnRpFhEqAkbTbfKZ3L8owxFVgYQK6PX+119SLo2owHQffXF7u782k25BW6U6H0z4NN3tzCXA1q7MwtpFTQVYJVUglu+O8naWUGOl6uy2l7B/vj6m3KICWukLIU65xkl/+YuUZ/gJl/YV8hLt72F/4spxGCbGdzaWuqpYhaxAhYgARySHEUkopEGar4zfeTuCQ/Pb2xbxdbcdOiMGdiRnDOG6yw9APqn7Znf97/+F/QOQJv/Bx7spOJwDB9JMhbMvAZN315d3tsF8tm8MWsPRDqqrail6f36ADMk5+P2JkoeP7DwERXWNTnd6tSez85dO3v/nlyPTy/HRfl/vHe8PQM8Z2HkuxZtH0ZXTLgVjNOQYDYKgRubinIfuu9xEO9uaILXSdzeKrcb23rC5uNk2sn51fjtCH0LQr8kpOX24//eTz7PTo6Hh9N9QcTa+GzbCaLzcXd6OU2LTHy/vZbXO9yZ3OFm1oGxjYbM1Mk3sQDByVgCaU2FTlU7QgARbHqacQjZgmawMAgJAjFnOzouowQdimI2bAmhlBJmuZOqaxfPTxs/c/eDrqvwwVVXU8Oly9+fDBm+++9/jhu7OG3FPXb1PJaRxKyTv0AIsDUuSp8WA6kjk6MCtYdu/6bS20HQa3uqr3CkctWgUBg5yyOY2bLlYSiK9v1/P5/aZdDaVDBjB0BXHKycTBkfffeRv39rMN2+24mC1L0SD1GhMu9ufHaoD1QsYO2+X+W1/69suLZ8PB4uHDJyd//ucz355dfbaaibNguVByFhnW2XHv+m4NrEO3YZXZrNl2XcMxQ59BE1cj4Gx/OV9V1xcXIVS3N+Ny/63/6b/zv/yzP/g/3pTtrUIusrnuhRp1f3VzsTrEd954+0c/+mx73q/uH3kgmsW37x3cXV6tNxuKBI7DXRLfCzFsx8FAYiWuqWCexVj1dvr+C8HUUHV1dbNYNe6Yb3rcNJqG7fWmaeeDp2ylCbPr801Yeb3kalEjMksx18ihqqqbzTjf3wcNh6v25uKsXsb+tl80sR86ElG00aykBNmIZOy9UHtv8db+qv3k809ypz3ianUPSzo7+0zd110m7ACGRE268TF56p3RvvBbV5WUnGIdwIyJNQ0lpftHByevnn3pS1+pqoAAwLC+uZov9u49enB1ef38s+ePHx9D5lI0bZNSYYJx2BBIyrmu2+b/z9V/xcyWr+l92Bv+Ya1V4Ys77+7d3adPnDkzZ84E0hwOwWDYMi0ZpA1INCDA8IWcZFGAYV8YMHzrewcINgwbkmCDgk3RoChKzGKa4XDynDk5dNp5f6m+qlrh/3+DL1btnkPti27s6i9UV9UK7/M+z+9pG60VMDSLdLu7arolOeFcm0uhzdBPgyGtj8+u3rxeLO8gTClxrVPXte6mBrlbTMMupbZWQ7QAruPUZVay1MYmZpW9Gk/TkFOaIHGIR2d3yWG3v8mRPDWY0WKdrDS5K2UMAeu4ocBu7HXA0BGFSEmtGGDgUNzJFFW1FgY+gFJQxcUjOnHDC1fNsQUdCcmxhhAdjBBVa8oRmQwcBQI0bgwOzISESsSBvQBCQGd3Dk4yuVYBQyCYdBcSWeEYW4SsWgJn5ujmiAzIbkDIiMGhMrm4ICVCAKC5a1XZOQRUr7USsUgPQEW0wRhTnGwMkdzADAIl9+rEXj2GxEDEoMLOselOtltJIYHVQO2oJca21MFqpdQgogLFfDT1faJlhS2gpLYpVWJqixnrNDeDEFCxPToCJg5cyuSgRA2CI8RSa2BERAdliAEDgDrOZcDurEwIACI1zH0loPiWOmimZj5NddkaEtzu+qGII7YxuOnZndVUVEXcLcU8TtLvp7bbr7p2MFocL6SOMaRalZAYiRClHgTT3DQTrqeN3A5Tt1i+c9p1Xdxte9D2cj+UWlIMiZlRZSh14DooWhhEmDG1OTYxLFo0u3P/7qtPP4vgq2YhWbxW7TcIfv7ufb7bitg0wquri2Lb1dkqAsdp7BaLGjF+9XH71Ueffucni/vv7p9+Mk4/Aq5myl0IkFgNGWIMKgpMhhAxEpC4gjoamtQUsmh1A4K3JnJCEVUlCgtO68i3VUYijCFXlxhz9RIAkZK7M3rVaqpG83rdORAxlVIDk6g6e8gRneZ1JficZUB4y+swdYxh7pk1B6IADrOzw9QRwESBCJgPk5OhuzMTEbrUOb4mboCz6R8AiYKDVmZUsTJOX7z/BBANkVNCwpADdZ0JBKs+bgOJmfE0MFSwQ16DUCFgVQgG6J6b1tQwhbxeLe6c3vzkJ2bSpBkeb03X1oI1eFq0bcO3Uwm5qQDccF4Fx8zkRYqahCYewkzsHDA0yIEZqOxrTAEZgR0BAiKqggFHwnHSiczUqoN5LcaB3ChScjRGlDKZBQDkkIsUKLNZGV2MA0RO5jZn2oZxDxpMQLUiUQzMnMZa1MGBRJ2QzSoAxByoibzMuEo5JwSvDBSyAYGWSapNU50O14KYc8pNbpuWcb2oZT/UotdDGXWeWzAC3D1df/ELX3z88MHp+YNmeZLb1lRS28EhKkHA5EjEgcgDc5kGcHU3xoBeGBQp+HxKQwyMTdugKxO6E0EIMSHP5YwHR8Nhtq6T1VrGkcETwn4Ey2lCC4mKV6CaKIp6RFREVQT3cGBwOKDNzdamGogDoshIqAhigOo2TMOyseDoVjDQVOdiAbexOtUC461st7C/nXoJvBdt0mJpKnIzjcOKFw9OjkUcOd+wbiY1IkBfdMm7CGYG3jYJ+cC7RcAFs5aiwIAhoPIcAfE5HO0EnkwXgGRcsYF3fmbXdWDEm+s8DWn72XEqC24LNUOtVTtuGgPMjU/DTTo6EylEwYFFrwLVND6F4aNGwqr5Sshf20kNw7QEZDBHYiZ0c8SIAEhuFgARgBh0PgARfS5lpzkEjIETeSb0yBAIDRE5hxDQ68EbA2icyC3OhN1DETg4BA7kJiEERGFDd4UQvaxcR4yMlMRLBOuAlgABsM41YQBO4CEwZA+RrFlSqSpoQSEP5hNOZJQAxX0fgiOtjj4Yx9soIwd++KUvyslK6yBDQXAGRopzpM4cgczQdRzJR4C/P49ORIQ0d0sBEqEZIJjKbCYBB1UJc1oPHInMjIjB4RAT+rws4cA5VjUhZkdyPWTKZpcHzoqUq79tsCZEBwLEeb30NlgGM6sYYF7vOhCD67z6QrD5rQGnzwMmosqIcx0YvE2VHFCthwoTR2AzICQ3rypMqKo+xw0O9QKHnIeBA0CKXMf9dr8bSqlS27S8ud7P3FxqO8g8FWvo6JSOf+sHv35c/Z3TtFfpi4B7VQsx6lSL45vtuBPYTbK06ZsPmi88Xk1e39wOmdBc5+YLTnRV6nK5vHO0fnn5hrsGMez2ReTa1ZsldXmpu117nK2YqtSpgqITX9zcfEGPUhNnKc3NEIkADeZ8FYqpf74Sx7emq8PXKLi7CjJHYjd1mGNIDACmlWaC29w9ZAbEM/hKrc42XyQCRzeXWjlw5CTy9rvmdJu5M729paHZOUsczc1NbO6n84rEM54RZnOICTMftCBEN9cDLCSYGyETEqgioqkiHvpEmYiQVCscGFhOjm/NZeYg8+fN3Dkkd3azEOK/IhX1fd/kplt2UxFTN1UpZdz1Y60pRrbxxSffunz52fvv/sz5+m6k4NVNyYmguffgw7Pb61c6XF1ffaw6xZbHcVANbVoYp4qD+tCXmzO606XGh/H+yV187e88/tIw1uD+8OG5wg2vKK9TtWjMD7/wQb+5uHfnOBGbGLqZDLvLy/Pje/dOH/7w6Ytz/dKkEoRye1IT74fx9uqFBfrV/+6f/dlv/hmsL17/nX928Wp//8mdcb//+V/84Fp23/ruy1KAaBZ8AZHMQNUYiZkII2L8yld/fry6+vTFi6OHd7D6V3/xw5ubq3vvnecuTgWnfg86cdtNjk+++M2bz36yePQkLjtxW919Uspg6qtlvriwozuPF++8t58gBR52r67efLbZP/3KN3/2xcc/PHt4JtN4dveBttw9frIr++3zq9vb/cVm39IyOm73PTMTwTDsU8oupg5Fam5y2+biuD5dP3/9IlIU8hh5EgCfddA/1oV+KoH204+8/ffnLWnzmYYJiUOTUpsJjRkcgGOwomYeUqQQrEzr80U/3a5O1tfPXoYA6GBVTStGKjqAY0hRQThQFfm//Uf/4TTUqrMVbhaJ8KefIQDAYSsRvvTlL/2dv/Nf3j1/8m/+5b9cVXfXpVnkpmvqNC2O1wbuqu447ftuuUAMqWkqmCJIP9y8ue7H3cnpCSN5jIQWJtHt7vGDs4uboT1e9bv97AUtdYqY3AoRcaBA0R1qUSBIfDgYSMN0W1aC+/2bnAvFdnkWa1+Xpx0Cf/jw/d/63e+FdTpf5Yer9voH3/kf/dv/zt/+R7/zl/7iN/+Pf+3/tN9M24nefXTv577w8De/8wdAsTgtTu5/9slPREu3WnaL1bOPL9DdTANHNQGd8+fIgDNPjAO64zysmPscJxY4dE8A4dzIZGAecBaEwQxn0oOJSw3zXh4BBZaJAzNAJMZitrve/PbLq3/+O98FwtUyL5erd9+9++jRw5zj+uhOTgRsBFVlqjJNU0UgqQaIgpUzF9mxtc2y299sjxapFtgOGFK7G6+WyyZR2O0GbJaco6q0Teihpnax1b2g5dyakKjI0AOI19r30+r4KCyST2b7ipyiuRvuNv3R0fHRyen+zfNueVKxFpc9IT18pzle4bo92e7efP/bd+gx7aZh2F1fBJAQ10cNG9wOr168Io5lnGwot9d7d9hsBrEhZh77zdlZO+52wzAFQhXLy5MvvvuN7337d6ep91Kvb/eLxQKcYtfGaLa7XeU73/3979xej4tmlWIwhJNFunz9+s3r121sZcL1smsi1Gm6f3Jnu9tNrzaBYBxqv9mdPj5enLW315t7p/efP3226jKa3Tk5u7h8PdwM7aJpj9qxiEIZpz53a2B0VSuly6EM/dXV69OTEw5Zqrbd4uio2W8uvE8Xr6/fv3+vl+2mTNiQgTHHDLn4zP5QNQCqL178QGRnlYJGGSjkxfXNDXAWpZBiu2prHTDh2Dt4CMhepiJvIWbuxFiKpByl6mK1mEpNTWuK1zfb4+OlivT7en7n5OriarFan56dSrXtbs+G4JRSUlWKGRBLmZbL1X6/zSkVEzaiEEPbQEwhy8yA1qm6ewikDuqeUurHiTlNdsMcUm6G/QgiKQYP1uV82W+YQwhpHIamy0Md1aHWsmhadOhtv0hpGqdi0rVdk3LXnTLTNA3c0P52YLapjkgopu6EyLPp+ODZxjnvgTpVMA8hqktKqUwFFWLIQ73plidjVVPhEJCCuYN7TAEMxTWF3MS8GbdNzmqqRk3Oo+1m+Mt8cwmAYsWsmkwCAdiqFgRw9HHceqhtE1Kgvq+m1V3Gum3iotaa4/EoUxOXDmSiKTVTHUMkdzCpzOyGgWLAWGXPxCoCphyDGhNgwgaM1VzUkA61uyAFyNDRYd5RUaaYQyTUIiNwCjlPQwmorIrEYhpyQymQMkZEh9TkftqbCrq6HVy6gRhAY8xSFABDyIxMDsTBTJiiG47aBzQidifEwNTMVRzuPjtbi0wEAcHAmTAgsIMgoYPP3WZdewKEuYkpUTWdC19T5EAoxUAAg09j2fbjnbN4tM5d06yXeVKZpopOhFZxkklSCLubbcpp2B/g7jLd7i/H/eZ2d3OTMG4n3BfVESxQCI265SZWlSoKHChFqlMZSqnTIkYy8alfn3EZx6p2fHos1VAoMz//9NlRFCnVGsSOwwRYZdpM+81wdueh7bX2AyB2d4+nbRlKWX/pyZgXzZfe0U8/Wiz4djuO18UbYiZVMRV0EKmp7ayKuREShzBXFQEQEqkKhYAGxKxac5MXy2zkhiRlFC2sBSATEiCBAZqGkKcyEDGHgA7EsU4S3WUYU5sjRQ7sSkwcKIGquYKRzzepQA4ErszJAe3tdh1MCZgpiArhoRUd5yubq5kjICPOkAdwIw5VJyaamat+YG24w+w/sswcMZyf3y2GRTQxhUBIoDJ2vJqqxDZpHZEjBFADaFtatojQb2/DcmG1NpH7bWkWC+fYr5vmvXdOHpy/GXb9x59ydVIPywYAA0c3SXlF49R23atrlcJiKXYJOCEYhyimQOzFgiO6IRvlOAGZWV4SUvCAEEkcbBJ2nGFoiGBuOWPXUd2LESOQ2OxAqMgBkVIIYmZaidAZwjywuTKxWFFDA69Wq1KC2EQWMidyEwJh9FoLijMAuKUQHB3Y8zKl9ZJiiOARDNDFTdS9TMgkY/HxEDoQEUdPKYKph8CLZrEMOEyb/WBmEfioye/dvf/OvUd37tzLbRdTcgRAAp+PWafAIWcOiUNUre4WUlKx2Xwxw1DQAdxDbOeVu88nAITAnHKOsTVHx5mZMy863VxUycjVdQa+aDV0sqrozilKlZgYEUBF8GAcmI2tB9aK6+E98FkbwRTzXELFgdpuqaDElNNip9vAqd9PbUhWddIalG/KG205tYGROKSoTmKumHkRqW1Te7F51TbHRDE3oWS9HfZaKiOpKgKoGxE6UGBCteNlN06wA9oNJSZsE1WZbd2HiqNMaMWRQBd36oOfH5bntfrydJOH19P1x9Z/nJpF8mvDjtbn+1rFtwg1rc8sHwtRbI43233XoNWNXf3+Gt/k/gbL79Kum3o4xkW0SjYZCBKgOYAFBAUnREaYe5fgEFsCQpybvHFmFSkxsaOYFVRhTggcidgcTJAOOjVRQEAiMp9NLowUDQApOIGIMDqwxibUgSiEA1wc0RyD49KADSoSEDmZaWUMjtxbjdAsqy/NoN8ZR4mh16DAgSlyRAwjOoa8TBHK3glP7z3RRYemGQMYxpjcnCMnTsQhxFC1gANY/3ZSQQNz03l1ajqXmYI5wJw/ehuV+FzumfERBu4mxDSfBAlJTAnnTBnb4fZDD4O6w9yqBm9/xnwpJCSzA2x41mgcHNzAwUDNdX597e13merb92j+kW5ujuiAaj73IKm7is7QRtWKyABzVDgA4CTCxH7A8JCIEMHc1zGXbLuBmruUJvHtsH/+4morNhUchwrE5N7kRIxlGhNlu9p/63t/dD+Hk6PYLMLNdTXAWoUQHWkr+no/bEYzQJjKg9O2Xa3+3h9d7tG/eO/0iyt/drHZOs7nEuRmP8r17e7e6frm5mqiw8WH8EbU27sBKe13Y4rrcRzLVAJQaok5jkPtujijV2cc6wwkckIDRz7YJ9RltsTOgqYfzho4L+eU5gQaMkXxQxu9uTMHmEOFFOapej4jzcgXU6EQeV4UuBk4UpgpRWIyY+tmRdDNDHQGHcxoNQScC+xmgZuQ1RQBiD4v0UNXgQOU2uejbG68FRWaM4xEM0v+LXp21v4QKACoARzaZvFzXZfQnJhVnJgOguTnUpG5l2lMITIFFfNa666WvlQZaOmMRFqmzfPPfrTfLe88evh+e3qeUpqGqtogm+sSdIPSx2hvrm5v31zfOX1yfnzEXIWs7LRbnBYpSHBbt7n7cK/6la/84m9+9N0vP7l//fSyFqNm1awffuv3/+AbX12fNPTDj14tPnhsIhjJAHdvNr6fdjfPn+HvvdxuvyC/imD18ubd+++uCKbrq/c++PDbP/n40TuPP/79H/3C17/+O//8X1bBYTfUUaab8Xx5skqvByYRVZ3HAaAAxMSApUpe8tHZyV/4M39Ss19fFkUbS3nx+s3XfuUX4jjYdOuWcmqw1tMPvjy9uXz06MOPvv+7R4/uK7dkBO2RO+doMXm7SC8+u3ny6AtE5NOwffP69U9e6aVub9t3nrzXv9k+fnLvx9/5/oc/94sfvfhxbhafvProzv3zz15sPIfUNiTY5GxoQx3X68U0DXWwEBJBmPodAIN5TmG/VXKIzCJmszvxLQ7tcz3Gf1ot8rdS9NsH5xweAOSUKXDMKcTgXskQFFwdHWIMxSQS9r01Ma4W7dXlfrFsNomQKMbYLroYQxmVAxNa3Y4q+Ht/8DufffpyTuK6HxyT8F97anPg0wEM/tpf/8/Oj5Zdbl48/cmdBw/apnHXOShbxm1AKqpSqzsjxkDN7cXrGOJgE4MM+416CTm8vrx98sHDd+7bsJlOTo42NzcAOedu2o8yTJlTii0RNTm5eS2TmwOFEIJWMznYrX/86bOVLP7dv/I/ffyFd//ff+P/8o9++x8/eP/9XT8W95bDpy9erBaLCereq0RdLsN/9v/9fzyv8k9//WoNudTSnK1vdYPw3n6zj90ihyCyHfT68YOf++Gzj2uaDA0Dg4DPeDxCnusPIVTxyEB4KFskJCYyMzWb28gc3NWA0GfoGjPObTBzmh8dHJiQEMQkUDBVd6sCSOiTA1FgXqIft9mRzfzm1eXNm4t/8et/gOghhZhbarr1yepo3Z2ftsero2XuFk3LEVOW0LHaJ2OxvUCIqR81xW43gdeob8Oy+3E0W7DFWmXXDxDi8uTMOUz72xwyuW37bco5kLjV3TCGtolugAyiWpzcXaHjHFVQbHuzz/fvhmi7un//F59859MXZ/eP33z00cnpnZPzh1gEhk3EIjfMdBTdbqdbjk1lCF2LKW9vbmMIjFSHqW0WABYLIck07hMv3ODmavfOl97/1V/5+ird/qX/3v/8f/1X/zd5/SHJNPS2WC13/dVXvvju88+eJW1OV8vNNJzm8PzVm9W9u1OpwC3FlGK8enUdUxxVnt1eJMdu2Uzj2KW0hlMrZdAJyBTK/nbbdW2K+eXlm6rl/OHdq4sr3YtQEpy++tX3L29LZVq3zfbyYrlYFfN2sQKCsYxiEhHMJRC2vrzdvQiwLPBSrXbQEqpMgxg1IfQyxpwjh/Ozo09ePweYrayMGGQsxK1CQjeo9sknbx4/4l/9c1/4W3/je0jL3X7qGuzwwKeYbzAAITZpmqZaJkDjEO7de/D00+erxYeL1VLk9uZ6f3r33rOPPjk5O7179/TpJ0/bFJrEAipSXc3UUbQMEyOVYUBnF1dVUIqhEe8RQM3FqhRdxJX0O+bIxIQABkwcU1KTFIK6OxhRqrWYCmGihKnJDtak2GaPbsTgroG51NFdbLKRdapDl/B0ua5VTG25XE3TTHdPpdY2Za0ld6mCgalqr9rMSBQ6UFVMvYacmVNgNrEunZBCMK+ixDQViSG1zXKqPSDk1Fr1YpMDohO7iqCjzMhPACZMZq5SQT1ROwyb3J2qKlFEcJERCRkjVC/7HsGmqqaeYjvVKYZogDkmMKUDPEHRsxu6VHNnZ0Z0K05OcJh/YEYFA5IjUqzjGDiSY6TgBF5A1UwtdbPIzIQs6H0dQ8op0HaoRhkjl2nsuvVYKjfZVcZpIgYiQoVpqikycQQjN3IXsT1hJspuBIgGBkSOHiMWGVWre6gusUmmgAwYrBQh1xnf4GrEwaGAY4hNnabIi2J7DICEzFGtmpm79f3OqkSc+9CVCdsmBGRT6466ah7dmXi1Om66LgdnhN22piYlxnHU9fERkNUiFJhNmLFbtPNRMNUtQd3fvG4Rg3vZT+3qdHnyYD9KbjjqRD6hT8POq84AF8st56ZldLYpyjC8/Cy882SUmDHvb2/3++3o4G2zPOkuLz+ro9qkddv7bgzQNeFomRb91C+7s4vL16FJCPXkJL8Z625Z7n79g/IP/6luq7dDVKYYTaZlswBFr5WJ0cDNHXQmdYqbmbqJmwaMDDhJcYvgimX0MgLpoFMGb1YJb1xGUwVWdVNQg5iYE5gyJ3NrQp76iZApElGogqgATm7sh1zPvJIPrkLgiDRXawMCzuXNHJzU3EDn7l4EMyDnwA5ubgiGHP1wKVFTc2TkAGAMQAdX0TynEhKjICCJ+/mde4aYAjHH0Cy9ogphNVfixZKwqYrd46SOxT2uOhdbOHikIJKbTBtZr9NUd3SWp7YbOJzcv3f92TPghBDz6ghSwqk0HSPFJvpRJBhVR17kdenAOYoXbGMAUORxLK4eCVQFc2pSqrVkdAAVd2UIKeEyBnUTo0AcQyR2qaZCCWNkdHYVBYZZyGCUWgB5tBpSYoYy1BgSISpaIEQKCnk0T9iAmrkpmhEDBi/FtTCCmKC5qoUUHBQT4yJT20QOASEhgAnWkQzIqlSV3TDeHLx1ZarTUPo8dplDwGKAXhcZcmj7vkDx89XqwZ17pyenXbs0AClDgIYwqJOrqEkTYmBkgsAEwFLV3WJMHH3WBZljKcYcKGbmaG4z2BpcwYEMQI04Q4izE8XVRSs64WzVMENUVVc1x5BTHPcjUJJSmAOqBw6jk6ICOQDGt0NgYAYwZK7TBIEiMyISJ9HSpAQy1rJVsSY1rKYyrHICUUNXsOPu5EK2hDRpQfIk4tMuh8YAAWJv4yg7amgar5ftoiUaAt5aKaZ3l+3udsCcyrAjcEA6qHUuEaEhvlURJVdGIEIAohCAzYNaIjfO6ehsjCHh6MFDy7Q4n7rgt0/3F5+tujHms8phZueVcYhNEKxhcWys8aQtLh6WdPK1/fgm6I99t4Hb7x/BF6MmssA0n4EP1V4IIGqzo4+ZZu8JMyIghghMGBj50GVYpiklBijulcLKROfMFM1vMyJhQECaY6R08O84ghGoGTsjBzd38hq953TsjGDuDHRgOB+HuHStBkjIxOwWDAAkkKMyTM7TFEcAJknN2NwbHRDZwKkhQa6GIpBjiszg7iJuAIl8TtESxZhiiIRERBRaQI7U/tQd0cyBQEDg2RkERsSEbK5E7KoH/dF9vpM6oBoO5rFZj3QEAgdCOlSjo4IduNSI7MAARoh/LD+ZAaGDzo0TBwq1o7kGnmFhgIfqVXlL3SVzU9EZsj93AhAGx1BqgbnDxmZlidzBPcxrfHNHUwBkApqtd6bmBvOXmRsoAsH8H20itDoMz5+/LsZTsd2gRKpm7WIZczKozGG1WP/gB99bOLYBNejz3SWhRwBj2gzj1VA2YxEHRyexR8fN3dPFf/Wj1y+mXIyfb68/XMEXzlcX22lbtVRJKLei25vxm6tH58f3b8seECnQNFb323UXMzvkZnurMWCXwjQOVi3mxbOrV8cnjyORExKQ++fxFjQTZCYic0cMczoH33b6zF3hCASOoKBWiZCCgatrnZtYQcXMiRDeYqIdAHFuEJ6vlHpIpCGZqCMEovm3IxCAmRZEZmZTYZobMmVmyrkpc5xffQebkzmgMO9j8K2ATgewlqkUn3n4gG87rHAOuqrPMhSBHTgwOIOxOLobmAMRiAAigJtWQEY0lfqvSEWBKXAYh4m4TtOUmAI5Y2jzogoWq9ZXB9tNr8fb68ubZ6vjO+vFO+fnj2IXDa07Ovr06R8+f/nSQdWNkNwZgGSSgoWIQTUF32/7brHOTZvunw1VsmlOZr7f7xENLp5dnq2O3jk7/eS3f/3BMqnc3lxu7/rPOlHoonSefOG2ePbi8nj98Nt/92++996Dixs73peh3w9Mq8Xx9dXQrOPdD++0K1x6ev5yW4tMou+89+6/+K3vmHuTEBzEOAUto+mg3aI9PqJHX7r39YfvX3//93/20Z9tn7xHqf2Djy+/ce+D2KYffevvfv3OY+paFaLcxC5/9JOPTv/8L3/09//DD+QXVRoOS0ws48RQb69eMNPYTzl1roWDxVZXJ4u+l/XdB5P3F/32aydf/6PvPXv057948/pq39/cPT/CZffDF9cVdH3a7a72/b6fpJhKIOqrMjEid223bXomqGaBqGlonNSKvo24H1Yt/rlj6JCY/JxMNJ+85liaH+Bc7gBgjm7Oc0ejGgamGOY5iWLwvRuiV5FxpFF8N+YUy1gYWMXbdmFq9JaRDAgO5f/zt/5mdQcxcHdEh7k5Y5ZI/Y+fDaC5B6Lbvjx5sn55cw2RX7x8LUJn9852t3skooRqI3LKiw6CaCBIdHR2JuPg6uYylenk8fHHL69vdv0TepyWq/0wPvzgyR/93rVplWliYDVFhjAPnAiEGGNQcUSOyNXHOeoJAI+fPL7+dPd//U/+z4t3Ftc7PXv4pevrXi0qw00N2+3waHl03i6//eqjcZoapvcen/y5r/7K3/xn/9XVFozS9c0omfX4neXxk7p/s99vfBh/9Rffv9iM7z5+/OLpC26z7HWOYAAQMxoIkiMAx+hmomZuSKRq85Yfid29miEBMam7IJobubLj7CJ1nDs+wQF0JrWbzZChg62SHAGsCoHD1M9aaUtICMuWmEjBzIZpP726vvzUREWZQ3TucrNcLO8+OgsZX71+s2zaW4fTVTttb8/vHe214P62WTURvcpWG12v7zhai6JjPTs76VZBGLoYQaujRYKWLSdGbIbbHYC7FjWdwWHdsr15fXVyuuj7fcqLlI9gTK4yTfVmqyfpqDx903FLecGLu7tPvjXVobIwLsfx2vp6s79958u//K2n30c1syFmCRBKPzlg2yx3m8vdflgcJQZY54a7ttXdn/mZL9xZb3/rX/7ex89/uDh/IsDd4miow+s3t+frcHF1dXN7gxQ2E1gMGOnuvdOh7vsyNG1adOv9ML3/5P2Pn/2QY5yGvlkcQbJpO2TKMeJ2HCknotwrP/jCOxfPX3uRIhZi04MNUh+d3nl+08ewGPtR+r6JuDpuFFbg7GNZLzup5XjVoeFuJ0Pfk5XLenG86F7fvm5SXjTHF9eXzSKGEHb9pu2WyybHGLZj/+r1ZpExhvjZzVarWdRpUFPJiwWjIliiFqH5F//kJ21YaMwZLn/+59d3jx9+5zd+DAAcgpnXIibmiDG1wzAhluXx4s50/vTZsweP7h+drK+vNjdX10fnd8a+d4Cjk7N+ez2UwuQcIyAtVicj3vpsH4QI4m2zLLsrIh76W4d5m+scM7AaKjcxx2aow9SPIYSYo7HopKTsBqmNIsXd2tzuh9ExRGJRIfCOAQXAqJY+cQSHNnZ9LfvdNqZcRx1GMa3mstsOy+UJEeqoMcUylshQawF3NYXAFLwWmcrYwGz1w5ybahUInVy8sDO4m2nbNbVqCBEYKOQ4Axcoc0T2GjmlkNSwohIYAjAzoBVRjMQxWx3Na4qJDV1EXJkr8lz7K8mwDCUtcmwSjRHdzJwoTzKl1NY6MStjKFLMGRSYmRTBXaw6iTqCB0UPHAzM3F2ICAHRAKsScjaAUsfcrAjYHQEgBEJxKaXpUhn7lNIgExPVSQg556haRQeGDAgKSoR1miKHFKOZ5bbTqaBZwMAMguzuotVMIydRKCaBg5mH0KkWM2FqrFbihJDA1R2ZmDA6SuBsUh2cgInE0QOnefsx982FyOZEBLVMizabGjg0MeWUt7u+n6oTWXXz6uBNG/sigcOd85PdviwXzQh1318jtUQx5bzd7tfrBcZg5XAt2O17AFgsqE0hwXJ99ojiGnFZaTuVUq2gWnVTo6ZdOgYkJJGAjmCyr06UUoYYuntnYXu1u7zsb248L0/Oz4G1W68wJxNx0c1+d+f+2rFcD9sQQmyiqQbyEHm8vmqaZrBxF7B7//7wyRuxMdSClJTQEYgRAzuho8+3cm5S1cyVOQBGDKRVQD3GJKKE4OZ1GE00dFE4D5NlDZkikAIYc1C0CorshGSuKhUmxEOrDoEpIKpKyq0RurkBMIcZuhE5zEiG0DCgu2qkoKKHIAYzIrojUiBQdzVwJAJXdAdg4uBaZzKHAvrMVRURc+QAACZV1V3BHGsRAy3jhNPQdp1PKkWhW+HJqSwXGDo+XYOaW+B1qv3OtVqMIBqbVHWEYkMEPloWq4iLuKLBYi2O1EbO7gSqBkqJpl1pm1xrCRFMpmASmNVijGlQIWRySoyT63IRIjirlRqRWA1zamNGImURJ3BmUQf20GQjNKIQI3oIYJKKjmrFC6BjQGdRI2c3CZG6NrZdUtXQgCsY0GClRUf06iAUEkcKVnwAThgCIxE6ThIBpgJ1hrgScopp1eGirQhuYgRaK4dgNZReYPLb661V0uGPDeDjVDabrbS0XOTAjGbsxpEsAQU+PT+6/+DRcnkcYzvWUqQiR05NZDJAgACzkOPsIOZOxMhtiIHxcAfogLFhsPoWJMSBIzC51QPJhgBQEfgwrZMzBlHEEBlyaAQTm2iX83hzRUREJG4u1WohCHOYBykQEyiICfjnzBcA8BQjolNArYXRq1iTU85QZQQmzplkAjUM3E9DpOzRjCyFbBxWy+Or25ujtASqRj7YWC1OFEJSEo+Zr273eZKmaXOIHCJNQzRniiNzYK6iQJgDinkOhIQUQkJQUUcCQjAHhMBIYMxhVOTFqXFCk7aOaFY1lvxuenAvhdaH6zre0uKUOEDI0KwKW2xWxZFCQAAtWjE3q69taY+wTvUPcLdZl0+WbKSFCN5CcpHAGTy4HyBiBw/UHCPFwIwcZo7vDNzBkJGVCSCwIVMICGNAJmLGUNwcGOYo7GEmJ3KoXgwIiMFDRFRiQR6DbtrT83KJYAjmbkZIYK3JWvhGUZkBgFANENAESkJKztlxwci+hfGFOm7gTgKSjFUnCA0BBwopctskZHJzMBQ5QH/mLjMlx4hzCaY5VPvjLfeshbuJmxOzqBDRjIOZA7ZIBGCI7G4+cw3dKDCYI/ps1lDzefBSlVlAg4MJpb6tPJbDPT3g/CoBkKoA2tvQ2YzDPpBnZ/rYWyYRgbupuvnbb8fZ4TuzkFV1xkrMHgJzfdvpbg5oMwMZ5ugcqdQDK/UwMxHiWzfefGDqwAyfvHr1+ma8Gbwqqc9pegrE0k/dIlFqvvdH32vN3n98b1+v97VIJWS+vN1tVC+2O3WXIimGhvzBcX7Y0OsXr45SvHWf+nI92beqXpftk3W6kzED9CbLtt1V/PHzN8smnC6zu+12Y0osJhevFdxPHt53MA45E7kqgpnKIFwhBJ/AACnNqGZkcoB5whJVOLA8UE1nEdARDGxuGCckpogIiK7gb32dgsiEaLPShzNNKIiUz129YGpmZkoUiGg2ms2+CTNDPuSq3Q2AidlM8S1iykwOEaiZoD77gz4vsEJCnK/9YHaI+zqYuwH6PFeqCHEAN9HibnP7irmhzyrhwcIGjvOhPzcSihapBTG+LeD7KakIjIuIg7eUIha3UVEKVIcGiKQKuWFwdQXU3e348vJVF36yWhzntkuLdnt1fX3x2W6okcmttsAhxDbngjq57PtRWSfxuFpe1fpyGL7w6EHqFomjl/7m6sXx2VfqSM8+fZnXJ2+e7z767h/effeu1NNpmAK3stfts5vMi6/93Dd3r8uH3/j65tNPvv/X/39f+p/8e81J4qV/9PR3f/HPf/nbv/Hdux9+KTQnxfO9+w/66fX9dZyUc6ibvv8f/Bv/zS/+3Ff+7//xf7zZTDh6wPoX/o0/YU1+/dl20dK//j/8y5tvffSV9z/oMI/1OixWi7vvtOenXQwBJQVThjpUd7/95BUTQYtf+zP/Wt0ODBtcJehWhNPti2eM9dXzl0dH74bQBgaw8fLNs9Uq31xeLtfvfvzDP/r6r/5yX4fu+OjRV770w+/97vnD9Q9/8PLx6f39dkjtYnO5l7F8+Wtf+Ee/+ev3zx/OnxUOyVyrmIoH1O32dr1a3tSdhQCoc4nfgVA964Hw+fkNf5pJBG8vj/P53f2PvwcRkZmJMKVaBQHAD1WmFGj+Z2gSNyk38zLBF017dXO9au9t9teARVVMp8DxW9/9zvVNT4gG5m+5/2/bH39Kwzqoumim3TKfr5sPvvlBUXejtk2mGmMUca8iY8HEzDEGHvcTc0yB0Cdzvnn2atyWUjC1Te7tzdPn69h1XXdxfdHkFpT3u9sm5aYLMo2GkhLXMhHHgM1U3QClVDP9nE8RXR2mIaXb11t0WCzahFjQU0iiiug3/eXpyUlBf321fXx2/19+/5Nf/KVfurm5dsCxjpJjXOZ//If/+E/9yq/+0Xf+i0zx9UUKctQ/e7EFO1m3F5v9olmM+7JcZzNr28bGXmvtt715BA+BEc1x9qASw4yXBkME+inP5FxnqAiuc0jNCbCaMzgjINJsU4QDARLMlIhw5vGhwYzkYJqjbUToAJE5ORHhOiVAFAM0iGyy37z88bUhhBj2o+wV+t2w6Na7feUHTfESnLXKMOwevPPu+aMnY91F9tV6MfZ9lbGUqkW0eikSU6Oq46RWp+LT2fLesN2lGLumEbfiOto0TmJSlm17s7899gddajfjrRuO/VS1xtNm6/Xuh0/Gm++d+fGL64ujRx/86MdPj5tTe3X98vkfjnKz7ytIenD/zpc++MLF66tPnj+/HnepS2ycjzocFze3JZ+EJx++s7m8+Jf/4qaO/uPvPTdLxyd5czNwonXg3MGby5cP3z//9OWWU/vo4aN+c52Tc4owOiPs+t044SevLzHm5WoBFoZx8+j+WbTj3eXe3Yfq9+6u1ElUT9aNjctxLMgp5nx0un5479Gbn7yGmK3K7XXfLiIT3lxdN+slTEjOAahUH/oyDMOo+OTBw6ury6nW9Z3Fy2ef3WnO+uk2JWhz2pvlvHQgQizTYOiLo9Wb66uTowV3uewGdQAj1GjVaim5ax59+b2Xrz5pLAWG6ebpn/vlu//O/+zP/5d/6/tvzxMEBomJCNB82XU5p1evX0WiO3ePP/usf/X85b37946OV7vtvo79jFNdLNvr1x/nGDCimRpwSF1IMgx7dGMmQ6kygnmkNOiemaZhRAqIITQLGccIbZNigMCBx2FITTayYVQVMUCiTDS5i4OpS8BgrqUMy3aRA/X9bRdX1ECZBBx3/W1MWQ0A1QH2ux26igkG4kRmCqoR4+QYQprGPTqIlEANKVoRBgYxm1RBQpMRkB1DYLHIhAyorkTGEcyUkMyKucbABqLiy9VymG4n3RNTCCEQTDa5igNKrRwbIq5SYxvFzGxymQADQJ20SBW30kZsYnI30+pWq4g7Vp0wduZlxuNSYDUFiw7ilB0EXByUGRGoiqpWDJEpulqMDUAZ6w1y2zaLcewJKYZMAWoVplbFKRzgfTEuSikxZYHKlokCA05lRHBGRA85tAYO7EgKXpy9jPvcRmCoom3TiRZwZSapHrhBJwJFxEDYMDtFqwpqKk6YZJrfUjQDRndQ9TG4g4sbKM1ofyWOhKHI6GrMKGpi8wpD7p4cI/PVZjtORQDFLObgb8mh4NbvtyLVa7ta1JCwSC1Su0Uah33Knbmvj9bFdNxsu5TmY2DRxs1m264ZrV3ku82dd3Yjam+A0Wsdt8Nu3BSHJsYaYmTG1QrGCa2S1rM799ruiHwVObmrIiLh6fLk9avr1fnSiuyutiy0enAvNU1ztDp5eLr9+PluGkIJmcpimVHR9/Wz3/io+8L901+8Iyb5wwfbp69zANn04eGxm5hqDORaFZ0ViajKRIBo5ohalYI5IwAh0pyeUAdDgsjEwIGAU8TMGNGdKTiz+TTfdtJcmmPAHObNLRHsR20jo9P8Q9HN1ASR2BkcSqXAYFCsltRC07SmXsw5uigROTN6AYiqEtxmKYrAI0dEqgZmBgbEQExm5ghFJ8LgQIxgpoA287gooOvcRGNIaLXyWKfNTbj3ODx+UKhCRMvsuz1gLlMP0zaKt9S54Xi9TxFlVG1KPr0zbbXe9m08shAXDQ3LDtGdvMtRQUPgvhoiVDfIbUyO5BRixiWONYKiGVRAtwQWGSMYE+WY1FkxIHGcS4pCRQIkZlBn5ybP/UgRQdVUHQi5IZFJp8LI6F4ntRoQg0VERxRsOOVGyuiDQEwdInkdA5PNHSUUYrdgDKZqZhGhgBexOY5CziIW2ga7JTaJE1B0RJDq42D7m1I2lUZBa8Ec9FD0MfWFHd1qFRpVcgiRSQ3MxVXWbdt2KWTCCBxDl4OaxdAyBWLUeT5BBwgzsXtGHdh84+dAlNzMVNyqWXGfmAOHaBjm4i2EmdQhYDrvwR15poogoRMBx5hb8iwiUoam68YQOJCiE3pAI9eAwcQEjJ3Ig7omjgg2h04QzcAUJHJLjhGCRw8E1EScCjiZeUxJq5F5jknUONJ+um4D9mXqN9OSF6Ew8+mk0Maxn7YKoxSoU2k5OKVaYN1FqLsQwnGXiKBqmWvfA2JizughgAwltjFHCuboc5F3cFc9tKVBDGhAkFfAuak73b5Cm7Q52wd3JOzuVQk83oR9Jbyh/aVJ9sWyhkzLhQGg1owW0AP1isNG4rr9Zh5+mHGXmRHQDOc2dPWZpyNMOO975xGDAAITwdzqTW5zGHTeVkcnYShAtcLIoSWICIpgTBQMkIK6kDIzoxkCyRyzoQD+OXOFEBVi2Mf1YEcr2zh5BVB0As+o5yG8BB+QFap7TZgJPIdI1ZCTswlUxHJMGuubpE3lR2PMRjpvITCQyoAcQ24UWKuquauEnJkJEaUqOKlWmn1V9PZ+CFFddX5dCM0MgcFRVYgYgV1tRprMlmg3R0BiNjMEVLM5t4RgfkAOH7rFANwATIUhuldwcCBQd4A54zZ7l2ZeqAHMTcjzzze1WTfyA2NI6HDux7m8GMzM1Q0Ig2g9LOpNZilqBreZOcz8Y5xvJ8xMwOhgYJnRx2Y2P+s5X2TFvQbUq93mxfXtbQFO7W7bO6XQLlNn6tYsm7DK3/vxpz4OXzi7pyCv+uu2bbsmP73Zvrzejg5VNbg3TE0Tx77v93I16KrlR+u82NSPVd5MPnr8yR5uZfzKKq6akKpOXnOCQulyvz9d5eP16nq7EfPFarntJ7q6zV2bF+uA7oSO2uVYq1Tw3V5Ol8ld3eaGeQQDn7cjNrulAB0QiHHWRNHVOHCkBKCIOGttyORWmImQDzlKpMAEiCbFXS04zXhycDelufAbCADUZJ6HdT78Oc2pG0JWq+5qPi9WAcgRDA8/w2aH0OGWxsH97WyNBOhgAoiESEQis54Hs+zOHGzeBLq7o4oAceBgs+Q0fwwOdyg0CwhqdYaRITGYfC4oHKSimBZFiquMpXotw3ClFapSdRncVGskZ+cqFeaLuft+/3q/fTWMIzEGbtandx6cf9On8Xr7YtzeSo69ilPt+y2Y1ToxslOkuHBvEenN1UXT5v2gJ3cfes7cxciaMt7sd+tH95s7R4rN6nQBTCHG5Spdl92ODO+cTC8+ffrb/+DRefvZRx+lr3/l+WevPvzyh6OUJ+88xtWKKHs8XXXnx92bf/O//6/9p3/9b/7Kn3j86Iu/9Oxp/gt/8df+6r/37/6dv/2P/t7f/a3Np8//F//+v////Nt//avvH8VyO11sv/lL3zhaH4+3VypXT772M2fNcS22378J6LuXT/N761pKt6Th8uXV80+8HZbvPDKbiH0aN2nVTX25vdg++uL7N09fdvfe95CKVK7qRTAha5Xri9/+B3/zL/5bf+HH//zbj7/4SxfXGxmpKo2j7Kf96rhFytxkdry8uIoUM8epTmAeQkgx9/0+JTxZd+Y+DoUQRSq7RY4u7gfF521H4pxxfmsp+lcRQW//fL5VmdTFCYmIVQoQmnlANDURFRFkVrXQNIaQcr7dbc3LXrbDOF5eXwzT/mi5NIMyjAD4z/7lb4r7zFmb/XuH33YIvx38RDO6jYik2pc/eGgyfvLRJ8vV9me/9LNqSBTXR4u9aqAY20YAq+osXXGK43578eazcPye1TKOk6l0Tdr4ftgXAWnyInTt6nR59fKGNcA0HB2fTFt1tEkmpDipo6q6lHFQMdMS8yF6c/3qZrngzbZvYhLV3TAimFkdtuMiZwC/Hvsff/yjL9y769vxajudPHznP/hrf2OssFw0aYXcxtVxO10+/6UP+eMf2ER6cnZcUdo2jv3+weOTizebly/2KSVmjm2Mqy4/WJyujr7/3Y8+fnm1DPm4bbGCTEXNGSqYO4K7MaADzYoxAKEfzuFzC5qp+awJAcos8x3og2Dg4m97T+cPCYC7YwxAMOeWgcjV6gwYJXAvrgBIRAAg5BCJAYFAUVAFBFRif7u7kQAa0zCJtKyQ4jJe497Iitrggk246ffYsJQeRNenx2Lh+nKzXsWr26uPby6X777DkXelDpMiFaOaF3F3venWbV/2J/ePJ9nn5qhtzrp8tl94LVujIBqNMS6aha5WFT4a9O7X//TF5R8hxevXr5uUx4jr9Wq33f7Rd78toufv3r/74MG3fv2f5LaZq9yJkYjPjo9fvHqz72WVu4qooHLl4zDt+/3pydHDk8c3r65O4vnTcS/T/jY/v7c+efrZ0zZ35pBayl0Lvi9aT9bdfrdvMPl+X950squqdRhqcdvd7mPXNYlud2NB9BTunB1fvN5Nt/L68qP99RCaxbJbkteqJYbETG3uduOQUnS0fT+sFu3JevX0R5/e3txLKe59dKd2sfKqZ+uTp6+e2nKBAKZKjGPfU8CbcXh+c0Pot5td27Vtl293/WLRooChCznDtLt4ZjuB0LgbyeKTj4b/6D/4x0aL+Shg5hB4vh2vosWkXXYnp2d9PyDx+fn55mb76sXF+d3T1XKx2+9R7Ppqv1jSYnnqug+JyygAqnUEwpCYIKC7VnHCmNvt/hY5EjEFCciTFJZgU3WiaXJw4xhy1xaZgCgwCZFDGErZl+2yW5ZhnL3i6JJjB4gxBkOrYBjISwGPFBkYAs6MRmfGMpmji4qacYwcs6oTB0NT05y4FAFBm6z040xzmzkdM0rFzWuFEA7kSHMwJ5UJAJgiuLu6kXMkamiovQdX0zlKBeYih15gDKiOIiZiwaHWwWmWhee2WjeRlBMSC1UTcSnmXqowBQTj6OgC4IETmLtJirzvb0JMroog8/wOVOfOqdmXTqDmCmjEkUMU9+XqqJbewdtmpYZmaDYlCgDAHNTqNBWI6IxoNk77RZdD5Biju4oKQHWbUm6YcZpqarODjGUbQkPBpE6iFQjU5xM/ITOqMkUzUDfwQoyJAgGLQQgJ2cnJLBqpmXOIMK/7TGLIqhNhMBXBMgeZkVhKAUJBc/D9OIr7vh9EraIFpsDY9wMRrJaL1eLkZnNrYCpyvdmfHndjP6BjDMHUxEXdtAoxuVopB7v1zeU1J16fdvsrm8bNtHkjtqCh5MCr6H3dRZmklGHAsWm4zTGnVQxBmTh2xydtXpwcPSzjEJ0FeBprnbS/3k6lnCwzADarRBGEaXm6GMvt+iitjldvLjaPHj389h885TYfxWU/+Pjx5vyrclsHXSRetVaL9ENyRUARYScACoFF1d1CYNBCzO6gqD5jNFxNVV2JIxOAAwU2nFckhWEmiEAtFQLgzNREIg5mFcxSygZYpSJCCuRugdlnQjQYEdos7QCYzQBNvjW/84t/8pbS7Xe+fdpfZcbAQVR0bpw67N4BD1kAVzM+XJtmVoKLlBmeizBXN4S55QdckdhU1FWtIrCFXCdISySQKhPMvIkcncFLxUlrv+tOcgjLzc3WBGKzkFHunt673Lw247Mnd59efdqktW8hNUF0sAIibNXKNLXECBgCpRiaptkhmhEhB8KCpjTjuRQBEwKhWQWPSZDmJuQcIzAjmgEg8lzvHYAMCYoHRxNVEbc6jgVFCRyqdU5u6gBVhJ0JkRwSBZgMA6pZdDAnEDAEVwlQ0KoLGAcOMSOKqrkTESB9HnwAsRgJzIkJ1NgoTOrFputdv6teyQZFRXNX1bct4QDmw34aRusyTmUE95STAiJiC96hgRRQU9EqlXOTYkYkmz9wqiIFFN2ZcgiY5ukJzMUFzACqA7rNPQAiqiji04icU8rMjiZ6uIM15IoAFBpEViCb7fCABqgz7F+RkYBQ3Q2d0MmdwVTr7JgEM0Cnw0wVEMTdY4gz3m+G0TYhi1bRSaUiGCqO29HZAXyc9st24TZxiKq6btr97SYw99O+eOgijVNRHD1q2+Uq5hKZjNDOjk7UtG07U0xWloGux+I+Q13AzTI5FEkxzXVZh/pzc8RDHxOaBmJAxCbtsQ46BsQQM0yFJR2j592FXzxr+m2row3VZGIsedS4XIzLO8PJw6G7LyFxSIRSZKLouD4rN5sl8SIBuBo4Mbo4EdDbGAC5zTmZ+UUDBHUjDO7oBgRIxMSMWoxY0BAKwj7zaQUIqUlqBBCJkHUwREoxJqklILgJhUUGNqtzDEe9MiARF3ZYnm/1eiFbJlFgo4iG6HrssLSpADkSIyEguwZAQyBkiGniAtEbHFZph9uX+zELn2haegizS58RUI0gNiljchUxcwAQUUJkIjWZ16xMRJ/7KPwtyRAckR1tHmDm+4SZPTSPO3Od+Wz9MT+wpRHB9EAEI2SEAyrEZm8L8cH443bohvMZ7zoXp/nbpT66qR/cAD4PZQgeYPZA6ef4kbmKesYlA6D5DIOz+SmZCtAsQh3ST/OHkA5OJZ8ZSWaGSLNKSEDmauoI7FYdhaCUqf/2D55u9s2+EpTiDghGIAgYGDzYRx99unlz8zOn9z94/PgPf/z7TV70VX7y5vnNIO7uqiSSU0jM46a/G/GDde4iXo+lv+3vIpyexe/dTC973QldK3y7li+t/biJKuIGucWmWw63vZutUjuUYdiOKYUyyWazOUktU3DEEChwVLeq8OLZ8+MPHnIGB0OiQ/wMQLXOwuysgiEFBztEbhDNtFjvb2n4s8mLmdXUaf4yVVMAY0pAfBAAzYjIVNwNGWfhZ87emCtSPFzOcLYNoqgCoMHhTQcKqoUJCclnwWh2n6nSoTdi9vq5qMx6OpHPtQ3Eb4HZsysM3MwC00zPclPEAxDSTA7dmuCEjIRmbgCqlTkcEuRzsvKnpaK8WEMpLtVKv9vdTOOEZuO4N4yiAKCOViYGwuriFF0NtJraVKUJCTOGlrFb+OL4wfvvBiWc/Lp/jbrf7q8S+qJrNtfbGDMtG++n0y+2txcX61V3e/njob8+oXNKYYDxyZefvPqjHyXONjVQwn4zMpO77ra7L3/j185+7c/+03/yW/+t//Ff+jv/2//dk6PT3/7n//Df+it/6Y/+4X++fXGZqFXpFyy7evv8JZmv//x/+8+1q3a1XPzCL//s+f0vbPb7tErPPn76c+9/ePXk1V/53/8ffv2739p+dvn1u6cdnPzwe2/+xDf+JK/udsfxyS983TAv14t+V5rUjq9vfvT8N77xzlfGi6toCxynYC36ad320+3+5H4e9wMdw3Kx2Ng07XpCNRicgJBt6gPbj7/9/S9+/V/vb1+0hYaL8ul3Xp5/uNCpbF4/o74en52OoxwvlkChTRioLaNYjd1yJVK7blHdEY3QmWDd5tBwv61zJDUy16oBEQCqwew5hM/dRP+qpejtn7cIo1leAmibbKbEPJMtCGfQN4EJ6AFTahV0pGnn/Y24oiuUWquMtTZsRMBVFAFrLd//3o+Z2KrY25bGAxoJD2rRTz2Kbhja9PzNdpP81/70z7O3QARmUkadMjjG2AAyqJtpDIEZm3XnaHW3obi5fP2KU1gfnWyefbLIiOyL1UJuJyt6cty9/Mkn52ePpQxlLIvc5jCIg5qJFJTBpp6sUohS6jgeUKa3u54p9LWEJnJKlXiVF+PLTxrkhhE5ViKHaLfTw/XDv/fdb331+H3L6+XSx1JqP91pAl336Wb6e3/rb/zar3zjn/zm737y/FldHTljE02uhlVe3tYdIL14PagIxs0kEzhGQwxtQXrTTwQYiWNCpQgmLpoJwT1S0CpM81GORSQQi9l86gAzxgPdDuezPqCqISHN5DaaBWoQ9zk/DerzpcFVZ1YeEM4sNQNHAtNZqya3ma7HiGToznzbb8dMX328up7GxbJLDVOToU6MpdgYvUC/83G0fkcN4asr6wWPjlx4US1cE128vjNJ/OgHgYKM2mpYHR+F6xelTtPN5s43f2GoNbVNEfWyJTRRSdxU7NkDYbN79bRLTX8zmJV7733xTV/P7h7dvHm6XDSTe43Vp93udjw6iVXx5fPLq9fXJ6d3bRj6y+3yaCm6/wu/8Ms//MGPlsvVyYP17kbGXiBHTkGn8fTOaTR68fo5On7/B98FC/eOu6PV4vr6etl1U7UQo5hFhvVpc321QWvbkKn6k7sPri+fr1brmDJH229tuBl3Wz0+S8NukL42zG/qbXvWMJkXPz0/e3Vz3XbtNEzr1bLtFuPF6/3uBgOVCjLJvUfnu34Hpm1ubOiPzzpt4/72dhm4bLcTwbJZ6OTGPlVhRkpRAm77vRYgt5B5GCYEjBk4mKii6yrDg0enF282DdD+qieApllcvClh8jZcz0eBaHUzKarVQX0cBjg6Sjn2+6Hvh5Tinbsnn/3k1eZyQ3dOUpMTgE5exwnBAZ0AUkzDUAC0lpGJ0b3ImHNSNPESQ1AHMGFEYlKZiJvcrl++/Oy9k3fTFPv9btEth7JnCSq16tR2R+i+N2UOber24+iuhAzEVat7FSug5Wi5sjLKCAmjqksd3CHklnNyUwiBAasgOhPybAKfA7kOAGag7sW1TIhkBAhCIRh4SFmmyhQUkRndABFdjTmDCzE6UOQAb6G7HAJiSCFKEa3CgQkyuFcbAErgldaBIdYRIiXXidwjxmIVwOs0HC/DfnczlP3x6XG/Y0APgZmTqhKgO9jsMXdBwqI7pIqmphNHVEcXCyHPq96qJYCrFbUaYyYMYxmWq6Ni6ozuKFaZctWyWOSigkzEoYqk1Kh6DkF0ImSrQdSR2BExLRxqDgvmMOx3wYiUTfaLxVKkAjNDGmtNoamoHty8iLmAqJPVGkIa6pA5KwzurBpTIiSvrkRUZSSKYCgmDi4wGWTi7B5URuIQmB2RAxEnCnGqI6hPo1YTEQNwEDeYB3Bz9lrqyf12uYzVbHN1K3U06GJObUqX1zdtzouuK6WO/VhLjSF+Xr0wFWlzRvOhVK+j6U2EukIQ0auri6kOMu2hFAQu233oGm8THx+nJjungULkxtAjIVMIR6vtBY4waKuru2eI9eTeCcaEQjJMJr672McmIVPosgUqoJDi5mZaL09/9JOPf6Vp+zj1kStxl/z1T569+7XHTh5CiCFKrVZrYBJwAi4iNN+9at94dzDJcwgUELGKUuJ2sQqcmWLsFpQCuKkCRzYkMY8U3D1QEAJXkVoBwyG+CI7gVQ56BwEyOIOjCmJgZkMCMj5p991w2/G7v/zz8ht/4NNedBc4OrFbIWI1mAeLw05rnpjAGUHRzeTw2zghODmDGSHPKNCZB0xiYG6AkBaCIZkFrONu086dP0SYGUWs4qI5xknHYRMyY0cFK+egLM4ylWkabmOIBMJA08UmNLFsLedFjWwQBsFEgdBi4ti0ABKEvZgDQMjEWaUAYkBgMnu7sDP0lCMihciIQcE5JKt7rcJgkUDAsaqNoKNIMSllLidCAFefjS4AjoKMjmY+ViYjQgEPuZ37UOf+ZEUPCImjvIWaRAYKIFUmqQYQUmzcqziYpxTCsg0xRSfYTzLVcTPVUV2ZMCQMMfOu36lYDIflmRadW25HKTQhIAx9VUJC5BwXJ8eBQCatQ2UaU8qg5uRSdCrTnFo6FCe5mypBODTdyzy+ChEy0WGAcTOzOS5hMrkePhnmFpjRlYhNlOaPDQK5zjOtTGKE7sgpc0pzJbm4g0ugzMQ+yy+Ac4gD0NUrE+HcXMQcCMEthFRrdUTmdtJy3B3tpeyn7bJbiCpABxgn2SdiRNYyBqzKDlzdfPAprAJCl0Uiu4mftWkaLxtMPu4oZwMfZYgtC+qi4d2oCOwEqioip8tuMveQQQVUUojVXdwMnRExBBVhB3PlpkEAR8Z8Esa2HaO//j6Pr1JRG3o3Y46EBBW7kGA3pPFVt7nYt8/s+P1y/O6UQPJQVNFZmdzVRRABMCBaIALQMEsbhxFibnZHJGJ0EWcKxJGJA/McJTVSAQmRHNSwMFPgpOhANjdAkUtCrAYAHlNWtUnmaymiicPszZnVqFoBJNjYnE/7j5MpIwFENWO31vwUaAOkwIwekFhHNSM3h0VBiE3qUSfoEkiT9s3wutuXiZtNm9mV0NDVYeRAENIcglSpKlXN1cC0Bp7rednd7a1g6uAhsJmbmqN+XkDPFMDVrCARgBPMRw06zEuWGRTk5pi4EdPAEd2rTeEAIT400MxBMToghwyBANxc598zW0iQ6O3ohA4wV3eZihHY2ziJz74UcEQkBJ3dT0DmDsDubuBO7IeegMP2fqYmq6nNK2IMpuZGDm5Q7aAZAjjO1Stgolg+eXF1vYujolQjJiAGIHUArbnLWuHFp2/u49Hj5fp3v/dbuYnrdvV7T5/dTGocxmlixMWyTcze779xZ/GkgUWg22pbToNjVlmSfHnF68A/2tW9xxvF726mLxOeL6KrTmWIxHu1WikCnR+fvrm5NlUEevnsNXJ8+OiBO7rhOIwKlFO8vdnVCdU1pEOPHxK7GcznAVMG58P7gjSbGh0BwKwSHfhdCORgbjPMyGnGgYPPl6fZO6kqh5+ATMhvu2YccG5VAwSYc52qBQADBweC2U+E4C4wt0MCuam5MSIhKTgxEbiaIjhRYGZVIQyHz6rP6tPMYqsISMTgThzB3V2Jw3zramoAyDT/zxLhTDhwpuBuQEzEbq6qCAdJ4Y+lIscQUlCoiJAXC8SVyzDVoWsWi9iAqetY1NVVi1S1Ok273U2MCdHFrIXldLMLcY0N1KoF02KZTBiIutOjsr+CiKnL+8vrWNttvHFKU7FmmSEsKa4C8nBxRQs1mSLDMjbLLo/bzd1H76qJI+luSKU+/+5HZWt/+Pd+8yiccbf84s/d/8Pf+Ec3Tz9F7T/50Q+uX76ZMN395jecY8ugN5frxx/+N/7Ur378o+/fbDdnx18tel2vf/zDb//28my793/g8sOffTj9ylcWOZ1+eP71D7/+1ePFmaFXc0aw/X66HrTj2nu7PJ0ursf99YMvPdjev/Pew/eqerNYXrx5en6ybLuFmOQ274eRb7a7/a492uckYoCA3fHDD3/lyE/Ocuerx/df3ey79frmzeWTn3/n0dnJZekF6OWnr770wfu/+4ffvXt0T6VWFSPHzGgyTGOz7LYvhsCpXSyvNxcMnGIqdZRaMuUCxdwBkQnVDpPPQQA9qN7wU6m0+W9vH3AAgBgCuCA6MkmdYWZzcTYic2ySmnNOHAI6l6IVxBIM07A6WbpbahpADYyGYXt5eX27IyYFn1VP/Cm5yn/qiRyeF3oKvOJ0/3gdLEotY78PHIBDHaeUAmUWMwfkmFRUVGtf6qj7zW1aFFGjJu/22zJ6v59O7ubFst2J3Fy9gS6q8TgMTeJSyq6vDTcTJ0HM6KbD4AKmSIgM9rYadr1ud309Oz0ix10/lhwn2UQOZlqquIGHMAHc3tw+uvfh//J/9Vf/xt//T263u3YZQE2men2zu+11vV7+9sc3Zf3y61//07/zvb/bLhtey5e+/s6bj69/5U/9zH/6ybOiVUyd0VTdmIiMQcRHEXcDIjNAIqeqCpFDcCfwzJyYGZzQzKVrspqiI6iEENBU3MiR3IPxXNiJhO4wm49E/SCNI85WRgdHwmp+oOsTixke2EfzI+RzrymiE1V3rwoIUi0w4u3lJ//5/6tpF4Ze1Pf7XsYJAadS1JXdSQTEPBAJmCMzG6JUN1SMEUP89HvfIoeZSfHcDE2FWLh772e/MRo6GwAFQnfYjjuRCuDkGCLv++Go7bR4G2JtyrNPPvry1x49BZH+Vq4H6nV9ssJj3g6jIpGLEp4fnW12U7tar85OUPCHP/hRnWwkff/9+99/85EDPjh9sN0NyhMEHPu67/dHx0f9rq+UaJWHKlXqYtlOu2m1Otntt7v9uGj4ZL2SaURER1CU9Z11jPny2aUZLNddjquL/W6z25dSFzmRh6r24HxpVW53u67t2pwf3Ll7efEczcq4a3MuXrv1SoS2+9t4lHcvnx0vT+7fOxv68mwcOQSplLssi9rXqgpmJuQ5p9Uq3w67Zy8vQrNEZldt2nYaRkT0ikqGBsvlop/049eTaaIIPom49bdDdoWYJz4cBUjoAJzZCVKXotbdftd03XK9moZxHCswnd07Hqfp4mpzcrImprRI2pdpqpHmQQaJiTmysJuJ1akUbmIpxdQwBhMRM3cQqZkzgotOBlPf94ImJkUKOMYQPWEMyUDUatuup2mqphTZHYoomCI5GBwtTncF+n2tQgQ+1P2yO3bAqloFCDXEMEzD3BQhxcycObhok5fjbgtI6m6lKG2dEQjcsbpEItEaQwJyZqy1QormFQnVlYkdvJqEENzI0QlMpKoCIoq6OTmiWjDFQAQGzIQ6gAuguTOlrugGUGvdmVYgQg4h5rG/tepuNPSjisx8RGIqqsTJzVJgxlB0cqImHc/95Q5TCkszMjEgV3RANzAkJjMGMpO2XXFuy37b5lgV3dOcLgLFzO1+GBaNg1tIYRInWoQEiOzGWqtOxcmmvpaqbQgxQMirGFi0JmRBrCauAlGN1MDImZhcXU1yzKruIaS0nD9haMAcgZsiU2IyncBkvjUxVQQAAg5RRKrOqiLM8+ThWoJoKlrratWkEKa+IEKTU6AwTRUJYo5VdOjLx5+9Wi7b1fHy+GQZYhaVEEI/SYiNG6iBGbRtund68vSzZyke2jCNwijgU5/zUZEsyl0L/c2rorSdhq3TWIEdEnObkyvCoDvf+0KaoyXHBCbD7QYglXEMrvv97nbzmtrmdRm//uH9681mfboEhNS1UOtwswshI/FUp4vry3sP7lOD+Sw9/oXHr8t18SEER/BaVQnKfhcZZkrC3ETTT7cLWht7UajiCQCcm2ZdTd10njsIyAGYSUXqWNARzbR6rRaJ0U1EKYWUkom4qtg4d724OzObJa0VVJFxnh1DSPOyhwDdsKDHHJ2wkqSTddPlYTuOi7u3NZ9gnyIXFfT5iwtjQiB1O9whzLfCOtMN1cGZGQFdjZGZWLWaCiGoz60whugIlgClTJibWoQEMHbFvY1hKuJaOyZgnrR0IW1vb5d3Tge3mINM01hGWGauYT9Mk/aLZYPO7h4WjQVHEzZvumVv8TCvYVWrOYaqFglTk7bSC3g1RTNmFDPCEDjMSFQAByInAkR1L7WAmA+qZM4wlkoGLFSH6goymRMZkZtGjg5GTKLGgQEJyUNsMUZ3UTMUj0bkaFYxxBgWMz9mLj4u7jelZjcDKGLAgRAxBppnBebYZKMw9krFuFLtyQRiEwKncT+MVYp5joHe3qcFZiIiQlUV0ZDJwatDIHLCnJvYZgo079aH/ZYoAoLZgbTCIXJAB5hqUVVADiEwz3WT5GiiUsXcTUUOCBsIxAgIM3pD3RDRANHQAFyFSBHo0J5mZuqIRMQh6ORqM0gfUQEDMuGsSaK6UYwgMzMOEIgCI8C8fA+AAMBOhmAii+WCeVmlAGu7bpTN3QmNyNqUELmKFxnZwFUb9jbjdrxtYjNqbQlsKqEWc0GpOazH3YABDIMjc8K2YUFlE1BzAgq8JJyJn/ZT/vEZVUlAhE4w43YtMo21cm6lTK2Uc30dLi/t9iXpBJjVUfOyxlyRqI4AEHDK0bL2cf+c9XZ3+3Q4f39z9qhq1DIe5XPnDmjv6sQErj7TWtzn9qxDGxTR7G0BdyQ0V0AHAjhYXgg5UmyRjX1iahCCOHNELY7ohBaRMyQhR7RqpiCBuaIiQmTyOQDLEdEBJDsJhp7aGhcN9HM9CwEhYga9Z/q64DYwEBkaBLEEUAbkFEIyEgRGTYgcgh2lbenL7jLx6Qf7MElkYUs5AXlMOTAFJtVoOpUiZiZiMwgvojsl/qkFu4gCIgee+cJEbGZiUyB6+5EDd1FzpDA3rs8BtjkcambgBq7qcyRNEfFgAZ5pru5+qAECnyO+8HlIygFRbS4yJ52L7Y1m44+oIJKDz1Hg2ZEGB2uTz+qPmgKiqMwuKHDA2W0EZjb3r81TIQFgUXVzws8fVH9LRLWZ0iP+8vLy2VWpsasypkRjERFv20WOqRjsh/HixcW90P3Su++/vPxEAJjDH3zy2et+UjdU7cBWbViQd2zv3AnvdjW7gpkzbyO1KSXnqR+XBh+00Ab6SW8XBfca/vC6vlv1vWUic7N6tloVi7v92NfheHmsJiFwLXLz5hKl3rt3j0IQBI4cOJr4m4vL9z68X3Rymt+tEkIAcMQwv2bw1mxhAGhmYMyBMTjCnL1QL4x0aGaYe+tM6cDgm/sZfLbkOAI4qtUQwltUL5gpuKGDOzCxHcxfinPY0A9Tm2olDu6KCIQEfqhUw4PJEAHnjvsK/sca4ix4zXxaU4fZOubuaEzojqZ1tmQj88w1dndzRaeZXG4HCyogmKMxBtMK/zWpKOdUq6mLO6XcuiyM6ez8NMQVd627Wb83oGHa57od67iRKbbNDAuAkh/fe3x2cjYoSD9mhIoyDltGiUZ5udSynYog9KtuGbHVzTZQTgFvLi9//OL5+dkdgjjcbN679+7T739fNpum9wrFGl9/7dzdY8SHD48//i9+L9fFN7vu+nvf+eDJ/e///rfufONrP/r7v95vty3B0fLx6snqetgvV8fPPvnoK++djP3u7P6DH/zG907vpy997Uu/81189eKzBd/mqR/r5uzucJRenPln25vmy3/x337TPM7Hx+P1bbc6cZBxN6QuxogYbdptTu492P7wd5uThxKCALerYyJDnVLTQmhw2hMKQGxW5zliIFgfNTD1rpkpJ+pO3nuXQxy3erWRX/vv/AxeXj599tnX8p++un6ZGs/Jt9uh7LWW8ehkdfH80lyP2rb2I8Ncmhf6cWybuE4RCZqYaLG8KpqSu6IjIlNAElEkFPc/Fmfexhnd3wa/3mrkh4/ubENRmR8zkTl0FFICRBMDRERKEShCajwmKGVsV/G4a6ft/ujsaLMdMeLsTjTAH3/ysYLynDv7V/Son4IVzf4idAAy1wePj37h575qO0Rb5xWK17bLMYSQAqjEyCIlpiYwTqOEGIf9HjEAxfXx6tNPn62Oj0/PutdvLpiDm5dhcoPUpP62J851lNrv7t071TKm5ih1LY5uGpTMRFXLVKZSxq5bzq/M1eU2cDR0zM2yzS9325PTe262ub1hjGOZPCM7pES/84Pf+MGr3xt7TZnqpO56O06nZ8vze8ubyeJi9fu///Ef/sanZ+2RT3pbpx/79vbZdcyvP/zi49/73e8kWiAELWbgFNHcwSkGEJuvBMJOYMgIbAQHQIv07uDGDOgwmBM4AgRinqm0KbJBCkBIrjbvhWb36rwWUjfweXVzKJPxw5kHxeEQYqMZPujk87VhNowbIERiFzWAIi6KAra5eubFGCEEBnpLSp+lJT+E47wKIaojgiGgzQZXFRmLAzqQolMkndmVYFMol5dvwoPztoll8BQSgoBXJqU2C7qxFulv++3NtS+O15vN5Re/8O5nn768uty+/+HZZy9fOuXIbRm3U7HlOt5/fOfNqzdXVze12oLSxfX2nXc/DEPIp0fXt1cvrqYRLUa4vHi63Y/t6dHdh3ffPHs6gVCAOlk+6h6en1xcvj4+Wzz77FXbrvbTIO4Nhf1+zBHIa26a9fHR7dVVrcq8b9umVONEw3RDZAjo5hqoqqemhVJSDu9/5dG4Uyrh6cvnbeTqsmwWEGURk5qMxSTg5HRy9nB3calE1WCxTDJViq2jG6fL6w1MtVl0zUnb76cySiRO0DiwWmGi/W4E1dDGd7/8hW996yeLNpSdIHe4n6qUlNlcKDGgPT69K9dXcXFYouUUpnHgQKYamQOATP1oenR6QmZapUwjARwfL28ub/vN7uTkqBdpVslLnrbbJidAImYwCkhFJ2LIuVXxFJtBCpgy+jQOTWrLMIQcay2Ofnp2XxwBOHBHmCLbXJDDKQ77nimO0hOimwfGENupV+aYGtjcXDGAKey3+yYjB0otGw3EjGJDkUgIrq4aElMgDiSCTKRoCOo2BYpzfoZF1TRGVtNpGptFYxzGUlIMSCFzBDWoNWIGtDlzxsAqqmpMgZANDBESd6qObk2OoFXBpYgUC5T6YRcDAdnsEFb3+Zh3g2maBMpUx2HYhrwgxpTavt8h2ViGGBqkAKKREzk5TA611sKwUB0psFolmNxy4nb0Sq4x8FhKJCbyQXpEDrFBJEJWx2oDTrVpGgWRqYSUmDJ4SjlOwz4H3u9unNBQmZjayAHcCkBpI2mVUiZLeTJcdEvAlcqY8trKOCMt3CdTIg50gKcQmIubOhBYscnn+06TGBmIjTyGxqsQJESTWjiEmWtLhEzgrkShFkFDm3FRgRmNCGIkUzP3UZQJxyLm7u4MFNq4Gyu3kcYhEBMbiJW+phRNzYiG/b7W0jbN9ebWwPb7Q0Hybjecrdqjk5PbCyfoQjw6O4q0qp+9eL1ak0AbjxqYKk9KYl60KtZJN3YLAVmEMazP35kMhGixXAbGsrk5f/+Dpz1eXww28XjRN3GhMYBzcGopR4sL7oL4dr/vAkqMiyenH+qXNhcbXHe6Q0ypWTSwXFiMgbkOI+jIFDMtUJ3BwZ1TC6bECQDdKjE7IBGpCM3eeKTYJEdlDhSshhTaDPsaEIt6QEd3CklUQmA11FriDCUhRA6IAdkdyJAQDWlGcpABCZqLO8N0s1lvh4fnd4f99v7Xvrj/zm80HMF1nn7AwUWJGWd/OzgCiAnNqXUiRFIH8jlKCUIWAorO/Ah2UxUFsCLqAIyObHPrj9ZChGaSmlTHCSl6MEoObcRIrjBc7/n+EiIAeBlGWHTU5brVGNtahZZNj0qRrVY1qdMUiW0Yc25it6gCzXKx3eznHZgBjLXM11ZVEZ9SbAEygzsThmQwi6FmIqBKpWgp+7FCzOoAVWkqbg4IwCBaEBHRMBAiV625i02K4pBiKOoGnIwSRjEkNxUoqjEgUXD2UsXg/8/Vn/1slmXnndia9t7nnHf6ppgyI3KqMauKVWSRlChKpNSiLKndaFu2b7qttmGj4QsbMOwLX/QfYBiwb+wbGzAabcNtGG005KbdlgFZs5qkRFLiWEXWlFU5xRxffMM7nGHvvdbyxXmjRCuQeZEfvsiIeOMMez3reX4PzOzxoq6O4OjIoJahEiIFhEBhveTNikSkaNmVPBZESm2sVvM01iFnrRKCqWt503oTBB2RPMYmJuJIiDiZtbFZR+7a5enZvW69aZcnpVjJBchSTADOIkyBRd707rqaAlRGcxOS4IimpZrDPESYcYghNsyBWNBBtaoDUhAJRATHVqLibrVmN9eqjogOHCNQYABhQnQgQDdBCcSuFcka7FpOJHP+Enh+oMwOCgcCjkyEUitYyavlWqgUNQQHVBbMpbqp1SJN69Lud0O37uo0LdJ6Ww6kJdesDoe+jo5JGiughSPyRs7dY5NaoDqqdTHkaReggtdIHFGVMACcrxsiQCIlD8jVnZnMnIAMAFQdYA6ZUkBiZsSOLI2P6+4jyJNgqULKYAhTCn56UaShWqsW0NIPu6hth051OLWbzc1PmpsfXCzOtFo97Frt3+wVj64rAyRwQDScr01HgHmJpA5E5I7AwrGZSSh4rBwXBAeobALVKYKbCQnPSS5wtoqApthxY7U4igiZFWFSKG7VITixO1WDAuapfVXOg3mSgkgzD9p8Wls9V+oFHAkYHcSlhsoMEM2c580mKnjhWsKu5ULjC9+vSneayQnUtPCMU/fZygNCCZBLVbNpBhJVm4PeR7FovqrcZzg/ILjqSDPADdkATG0uiSNwBDJTQ3c3IkZHR891ZKLZPHX8dcHNlI8dZOBmSPPhHH3W0P1oLDqeqmcUuMPsB5lHfncEmOnUSrMGhDOkyI9+MMAZPKeOCLNJsR5b4AHMDQlN3cDddKZoz/2AAKA6MTLMNhYSQ9QygRW2+vzZzWGKUx1LLcjJDJiZmMxVyRnc9je/9LVvfvTD71XW07M73/n88W3NiggO543cWaSW4DSlvD980IV19Oq6jjJlN4LbXLpEmcNhMgN8h2AT6Ae35aXyrtJnB0Wf3lmn5Gh5WiypBLw99DwIgBeqFxcn29v9k+eXjvTOu/dwPspNmkK6unn1BXzEWFVtRrKjoXmdVTVzlxDm/CyCAxH9a9VIZyWNnBFRzYnQXOmnBjOzuZfBTefak1ltZw4+H0V4dsUCACMwggG4mzETqAIhApkqAIITzfYld5qr8eBIKEJEBCfAuWvtDXFpBtAwEfNMLgJjlpnKNP/GnNBtbpU99rqh0+wpYZY5sshHtLabm9tMlK9E+FNB4Y2raLbALLqSwdW8jGM/htg1ccWxCzF6GklomA7Xt0H3V123cIah75HjvfN333v0c4tF3PfbYXw5li0jjPue2saRyLuT5RfOLt65vN6dLMI3f+6X//Hf+bvPnv7Qrq6ev/jJTf9skb54uT/cv39/32/V6eEXHz5/ff3gfDHl7f5mZyfFIOltffnHH60Pq+dPP7++vLzzKz970q4iN7cfP/72X/tbnz/+g4WvLz/+7uXt5Qd/7W8+/dGPxV4PefrOb7/yvEpefvfX/9kX/uL/XBpav/fgUW0/PFvvD4e3HqSzf+cv33n/13D1FdkD7l/+8//T/+qbv/rfefgLv5YlUeA6vb65fPXi6eO33n6U9qTdRUDfPvuMi68+eNc5yOLUplz2e4tKInfffden2G7OUAIz94N2mxMXvHn16mR1Z/XWF779a/+tpmt3u127uXt7ey1BMuRtf9icrMjt/HRz2O1Vq7vGGIGoTLVbdjlnRoxJDn3PRwqfNint97lYiUJQj9ZCP/pBjsYi+Cmq6Kc60Swd4fykBHMAgFKqqc/ZM61m83XpLiGoe82lZAetWvqxL4tN2u1viGV/O5ycnnvVwOyGIMwIL56/UDui1X8qRh4fjgA4QynR0B2P3YL+6O6p9gdT+ke/9U9+9Zd/YR1Xecgu1oXWyrwy4lIUyQCRGNrEhlWIX372pO+HzcXp9e4QuzQftjRnIj05WU9I+bPX3qyRAFm7VYs5O1YkAjWWkJomZ6+zK++N0zSlJnZtnfJhd/vO/Xcf3fva48sfDyVvlitAmvopISeixbLZHaZS6txgoJOOJb/z/oPo04N3L/7gn//p0lrKjljfffRg+/r15HTzerw4vffq+Ys799Z//b/+7e/90efbvd7cFCYErGjgrrUioYBbjKxqrubqzsRM5j5zxxV8hsUO4DGQTrVmdyR1xYKgmAInIgFMMUQiqzkKgSqaMyDNLLtZP0Ign1kGyLOABMfLgJnLkaJHkViCZK+jKSHOFBsFdwdsoiQXmt8/7uhHZIXPqTecZWoicjdXyODEhKgIBOyIDGAE7qhBAhgGxKE/vP786XLVJFlOarqztluoeamqCNyE/e31crns8nh1+er548uU0i0by+Lk/Hy6nZq4QoSfXF/feXjBN9d3UuxfvgrjdP7WyZQNpjICvvXww8vPD7vdVWT75NOPzu+cRWAf/Xq6PW3Dy6eflT4L0rIJW9LXr56tvd7ebD1Qwjj1g7Nr9aaNdy7ubm9enW3OL2+u+8OLDsSrOQhgHYeJVPrDUA1CkmWMpvXLX3i3398sgK5e7oDg9asbZzq92Oz2/cnJupSsXsXF3WKsGHnKk2c97DK3AQKdXSzGXX3xctty3A97LdPJqlWtgpYSNIvw6YtdMfBhYvSUpKhV02mqT15c/YVf/YV/9v/9r05P2iYKjCUhRCFDcKxCemfVvbq6ur68PR6MAANznSp0PubSdAsJ/OrFc4kxhtQtWkOb+mm7GzZnm5vLm9vbHgksm0ioEoFJ3cxQSHIuZh4DB455zAhcrQA4CklKhJGjMnMQ6nf7k9O7/TQSeqWS80jg+0MvaaHVpnGMMSASBSK3WopDTXFJ3msdOQoxgHnts1ZR0MiBWdCRUph0CBJqmeY9W9WMjEhgs6zJLLE1ABJxFzTSrOTIR/KLkXiMkQBBPQgXq6EJJWcAMKvmCk4sQa0ShaolRLGqQYKVXOoUxQx6ijZHdxxRRBCBkA2zV3WdwWBY1QE8cWTAWoyTjiUTE7OUOgQJbipM6uZQHaHaJCGYzvT6gEhBupI1cXCymndRgnsJzOjVwQNHA6ylRFIdJnIJ3KmzIRsxBzZQYVa3qWYHr1q7puvH3DZLIh+HXZ6yWUVgRyAWAQ/C6KRDyWPJVhaLtpQJQWNMqmW2buU6AbHWquYibZ4mVHNANUCsPrvovQoKuRoY+KQ6HctlbJ6byM3MDN2YBAkcCdwkxvGw73fDolkEoTFXq3UynZfdzOQGXhUUdSzX/bRcLFZ3u1KU2VOb3G0mQy1Wi7Gfyn4ihcDHE1HTNq6qE12sTjdvfWVPJxvfIenUvu4aTNz1k/lYgxqrQ67j6EZYMZtbnsaJYCjjols6S+1rm5YdJUYhdCJcXaxffPL47bfvMNj1dis27vcYGg6rEAOF3GIKtdQCFpeBA6Q2pDa2q9ZBuQsQ3MxDI1zJpupgkxpYJQhi8/wmpUyz0RyI3Y+FmAAGc8aYGWMyUUfSouJmRYlCmcaAECSqGyMjAwmb1pmhkFLbjz0LG5h7RQDzqhyM0JCBHJ1C4PEwbj963rV3eie8c1pjqLdbCYJEZggoLGRakQXATB2ZiAgcmLjUgjD/JxIgEZkrAngtcxhjbniY9xIcxK0KOLlDhRC7sOws4DzPDuMApUoTD4dbaZiDdczZ9vFsobk2i6WGqFXW6UK3XqYh3T1VKbaguORcNJt3QSBXLTmEgExIhA61FC/Fa15I6MeDALKxOhNLnbwCqjCbCoFp0TwFjljrsD+UXJFDybWC84wgDGzuECmKsHC1wgIhpgQI5kLQChIHNqqFKE+C4sWgakJBatxAgcxUhKNQRN71Q1ECxyHnBORALExCiBZjgEWSRSJTHyYsiB4x4KA1pZTHbI51UKjIwjGm4+ENmYklSIzSLVNqJQiAAGttyU6Wab1YN+2SMAjPXYuW80gsRGSqrsaRhAQI1UzNxmkgkuAViYGIOZgXZiY+xpqY0E3N53OIEItQnLnsDsVqrZprPR7YiFg4QhA19AkQQFJAQjRgMrdK8/8XEN0c2dwACIDcqjCDVwIPTI6uiE4+wQAWCHnc95EFHQMJkqXQ7go4wqSHjKWhBoA6lIo8ZMeIC0lgUJGTxMlGR9p0Gx9vhrpbNCsjJrU69LIkMPWqCJREstWEdt7FgJgNvHqFzEhVC5OAQ3WgmVECIExWNVT2XNJ0w/trLcbdojZQWEpag5A1qyodxoSueSq9UbN5i6bB6i7tP08+YX526pWGzyxPCgFMzBwR3OqsYeAcr0SqOLe/H+cIJAYyIyJkJXc8suc5BgZxQ2RzU0JHdiciZjVEMBSwUskEMDjSqJWIHZC0Es4fv5iNjuxAgCxkAAbCg276fEv5lhgBWYEdJEA9sfCqQmZUryRlNiKJM7m7D0TiaObo0CmhJY+1x+vvt/YzA51YEFC0krkBAyulsMg8aTMjRMc5WIEISPbGW+dHSOjcMmZIZK5uOpuLkY6INbMKxAaKhA42F1eZGyKSMAACoVUFFDdFACSad0R4DNzOv8q8ySf3OZtBasZEs3nEXOeGNTCtVudtrM20WnBEVC2AAjjjFGe+8qwwzAXZBgBzr5HOpjBwAzuG1NzVpllHqq5zPNMdgFgBXCu6oepPPnt+M8VsBKZCxICRhYQ5EjK8fP6Kent059Hzq6upiYtV+9GLF7uxFPQG4aLhe13ovDYilse16IrJTDnFNmLk2rqbekJrwFOKh8kalFW09hQ/OcBHg44VP9lZKcOH5x055Nt9d7LsTs4OfVHzw34Cu1l1nZndXu6eMT14cNGlhODNMkXuDofbkAIBgCPTEbE/WyfmDTcgEDEce+7B3RDn99GR3wVIZnVWGo+tQeBMDDbvwmcBjmbJbzaXmVu16l6DyMw/mq8qJJ6VSJj3OT9FSAEQiXo1V7e5NZJw9lfam43L8Xb1Wcg0mzUhdQDzShRmMwgiz9nEWQImCqp65JvPFiQiVUWCWQMlZjc0V6Iwi0ZvtAM4Qt6RSFJMXeIQSNpuc7c5edScvhVXG+AGZMFxFeIqps2iO9usT5M0y7YJBOfny3uP7nfLeOj3BbV4Hvrbcf+abcfeh0RlNzZyb5neuf/w1+5/8b8t3Zd/4a/9+yT+ve/97sXby9v9iy98+M3ty6eRIe/LOE7qnJrN/bsP6tXN7vll17GT71+/TNDXutV82Hzpi+HsYrjdxbTZXLxL42Hx+pNPfuP/vXv6yeWPnlIJb21W4/X26ol+46v/7u0TgM8P8MOn7esnNOwhrNcPv3769s/vnueVrNf3f3Fa/NxE66vL6fKf/tbT3/zdx7/9L22o+9sptYubZx+Pr549uPfg0NeL9997fPVUwR88+iA1DaNFCUlIFbeXL6QoAmpYthfvt+3F9skzmCZGO+xu+t3h9tWwvxkPOa9PN9Dop48/++DDnwULVy+e1rHeuf/AwooSNrL07NzG66FnYS0VkWKKNRdCaJvFsl2BYx6LWWEmmS2gAPYGm04A88rrjTDk/lNW0J/RjNydCOmNhJPaVK0cM+Rw1FKREJkYCdzc1W3ubRdgyOqhaUtxBhSU0merFQAdcDfs/2zi7F+rVTPkAOH4TCWaXW4R25/9mX/r3S998/zuF3/xZ3/h7tlFqVUCWzXi0HTdvFAi5Lk83sZc+v3tq8umWz578oIxYY51dM2GiqqulTabOynK7fXLtx4+GMu+XS2laW6ut106ub4dKvJUTVWLuVN0DBhTSvTm5EEseLoM9zeE9dn3fvB3y/RphCo4dYlWabXgRTHYT4MTUCKEEiJ267g5a8Y6vfP+/fOzs9NuExikA17Tld785b/5y+uQfMS2aQ0kl9p09OUvX3z9m3d/5ucvvvnnL979yvreo3jvEW82NUVjRsxAxQNCm0JkDnOyuZRpymVSchSmrgmJgcCFkUwDOmlGm3IZt2N/PYxPt/2T/fB0qp+P5cmkrw0OyANyZfGQUAKKgLACVXVVNzdTndch4MAAop4cEmIATwAJUdwZPYAJeM3qDgW0uCp6UVMAJwRGDwQBK5oyYhQO3C0DCmUkDYwi06QArOazaFXGOfRgdVI99OSFAmyniZKgxIAhiJAIhQ5gvYkXPNEipmm3XXXJprJeiq1432y+9LVfvr28wrzHwy4Ajoebq9cvp/2kRcc+11zLIb9/8eG/9bVfhXx7vqST1DXeLuL6cMiH/T5xuLu8Tz1ON9z5xQ8+uw7n67/0K3/h46efnp+cgqKTcyIttQtS+r4f98yomi9Wy4h45/wURG8rT5WMcKqZApNAaJOHcLJJOd9CnUo2zb5plst2JRJr1WVsBaANod+VmrlM7mZf+8KXQ53OT5axiYvUkFEeMBcHUHInwAp4qGruhz731XrEgTy07F5duFueOXlWDSKHm8Ozz17+e//B3369uyllMrDUJohhMn1wenKvS1AHDOHO/fvzXTD2Q2zi0BciGcfR3WKMJ+uzcT8cbrZBmIlTk7RqriV1CQGaFOqUp2kMIbpT4FCLlpqDiAjWPJS8I4R1e7HoVlVr4s7rbDsD5oAVBCTvt3k4MHOIzCLnp/eHQz8OY67WrdbqFQwI2WyG9Xm32mzWK1dDw5qNQbs2SCAHDKErRlOthNimRnWaxh6JDrtxGgzmi5yYJBKHJi0ZOTCRWwMRFMgocGAIbbMGBUYHU9UyU9oAoGip1UQCMokwQgliZj3Y5OrqXs2qVYDqNgFUgwpQokQ0AeQ8TeYuwoTE7mhVTcFNEANRqX0QCASMhC6aK+EM5waFAmBas5mZe7U6j1sEZDrWOrrXSfsKJYToblUncDDzUnMuk2OVYNV6FgwhAnku+1IGgKp1JMwO1QBFAs49PI5NSNr3OlU0SiFwwKaL0obQwGqz6Fbr0K3SYpE6SY2YlxTJamZzrxVNwdSBmZogSyKJMSlaaBbgGKQxIEMkibkUBKy6NxjnPymi04xqmbcg5gA8/80ji5m5g1bVoik1w5gdcNl2TCwAOGNKgRFRQnj4zp31evHg4T2JIhJZRGJQt1oLBY6xCyKMvlil1WbBcnwX3G6naRLVrm3XiwgN1/X67unJh+89+taD1fkJhKUERKgBawSLHlcSF4nbLjZdaJpus6z5qpbrYX+LRSzLYVurBkANEXghN1cvIAMzksj69OTly2dNF5AJzVz1dLlMXdCSY5fSZkmLlsBSEkIYr285u1d3g5CiBCZBSU3XnZFJHf361a7sFTOyITkIgVk1PQ5+mucur3bMA6IxOjNwDMYMeFQGSi3oXqdJtRrRPE25QVGVEOazolplEUnJ1JAAJaI0KgE54QDl6Rau+7AIP9o9ie8+DMuluHsupDjbQ+YRaF5Z4FxqBeiuSAhEAHz8gtsxRMlENBvwEYjsuPlmV7ecHTilpblw06oZOhCCSCcUBAOTIEZ1nHJNq80ImImHPKYg5fq6tUQD1Osee/XsidxyH0TSYiGLTrrusJ3qWJl5GHp21rE2ofPi4iTFsCiauDVQhYyDxZhRb4bpeqj74pOUHKaBS+3cl7nEaQB0NgcTwk2Mb63D2+d0/8IvzuTBW3jnHmzObLOJF6fLB3dP759vNuHiLJyvYdE5BnUxFQRCcmcEkoiRFsuQkjJtF501sSKqMBKTI1JMLmIptuebdtlhMdiPXJSMaq65akaairNznorW6maqYH68C4JEAhFZdN1pai42F++d3/vg7p137919dOfOW6vVCaFXrQaK4EHYasnTOI19nQarvdlY85TrVGpWB8QZaOJVq9Zs1VwRMfKsogK5es3Vqmmtbkrg7Eo4295LzaNpMVM3RXBkkdhybDkklMghobvVgohBAiE7OAsjMrpXs2w2S2fgRm+mZwCs876ePIS4aLvAdMhjt9osl8sUIzkKSKlarZJQTG2QlGs1s6Kj1iImNpWAHrCGeuC6JRvcdlfjS1kFTgGFcxkCW0oECCyxaVKdW40cIoBVU7UQhETUwBwYw+zxQUJGbiSiA4FHaqb9qHlCy9RudPn2tHxw2NzNdx/mszu2uYDFCtvojCCYWtksmtAwLduyPpnO37lp2qljT4owJYPGiR0RiGf/hDuB05F56bPLaL7rAjOBEwCouzqSICI5MPM8SRAxUYq0QGfGueRs9mY4AIkklpSRDgQjI4SWiRGqmym6IiEnonmDApE4giLl3K5vZUnCYoP4CIwZ2CB1nloXwkgSlBgoBo/BOc7/mKJlVg0WwUNhxE7X3De7xziB1wYUec5zzfab2RJcy4wmQnDTarW4FfD6ZnCa1RucExg4zzNIAGx2LF53M3dmDDC/OH2m1cwvMjxa7c0QeTZownFVT/5mla5eZ4K2eXWrM41otuqrqpnOCc15028zUQfxGChznB0khIzHsQ8AwFTNrWhRr8yMXnFOwB0P37MyMTvxxNzRiZEJEAwB2NTmOhyw6jCx58vr3ct97Su5IjjnapMaCqUYll3K+35lMY7N/mZ48ez5arP64dOXT7cHCNwSvrdJX7yz3ARaN0ma9TDZ/UVaNizMpVgwbwmWgTddjACJcBNpFSkmRqITom+fh2+v8X4CJH5e8E+vhsmhiWHcHmoZHcrmpG0l7Lb7cepDorBIj5+9evH6prqGJKaac3avzD6vM93czYlohgHNQPH5CXOco/2I+0EHAnZDrXb8kAEZ+MjxeTPJzOLL7PpBRzCYyyhpro5AcYM3qevj98zqj3sFsPkrc4mKqiLy8f0IZFbfsK3nzA4RMsKxtwIBCOdAKCAQgYC9sR05zoFcQDCtZgUQcFYe58ok1aNiBX5MRILPxqhjm94bOUHebA+QkKx6jI2xqIZFWHJKbKjVJXXN6lQA8HAwRwxQyzRmbJvWKKpTnw/Vd7v9y8NweX31SoII1n3e3W3urbo7J+vTi83Zfm/NuG8qLVfUT7cV4jQ6Vhp7MCCzMvXjGOjOvbdfXX1/vNqtD/kf/fp/8dW/8he3+/H2xbPVncXbP/fV7zz/8Yd/62/90Wff9enqyXd/5/zuu4vNzW9+/1+ePXxvfbKGUk7evWv/4nbVNnT67fHel3cP/8a4/y/q9dQ//vjig3efPr3a3DvPKicPfvn1d/8u3nvn9rau1tyu6aPf+CcffvAtGhfAi2I7A+h312cd3rx6sdzcP/S2OLlAgNefv7x4931EL/tXn/zpv/iZv/7f57bZ7y435/fUvd9m1zCOr5FqGfL6Xjeq3nn3m5fjyap7+8UP/8WdiwWkZn1nvbk4WZxvdre340S3u/7e/c16vXn+7Mf33rovoQmxLUUjobshWtt1FZQpsITrm30KNJVSnCTEcShO3jSBsuYx/xmB5qe+nmNJ4k8lozlX6Qgzuyam5GpgbO5EWOatrHmt1eYWH+SqfthPbbvZ3x4CBSKQIMwyU1o4MDiR0KE/EAG6E5G9AWIdT4Y466cIQO5AgKD28z//dc/7P/rRD2nSr3/ta25D1zZNjGHZhCBQJrMp9z3HNsSIgvVQXM0x/uF3vjcctDlZ0DI0yy5OOvaHzfpUDwPUUkfFGrhLRaur1lFPVqeHYUot96VoKeO4n4oCRwWpxf1N08HYF0cZyqBVm64VOfNCo5ZhuF1TRMICw6GMUaJmjqTg3m+H7qQ9SfHxj6/8ZPXHP/idE1y9GsfFegHInz579q++88fo06O7y/sP1xMNqnb5cpzGYpTXK1qdtbcdyLt48TY9+872R783MDcOMI4VIo79tEhRh9pGev+dk0cfvPPxk9dafb873Ht4lm+HH3z/sxAWrs44ox/mXk8DBGEEtKJaDMxhVLp1MwNGDEzsHgIFwkTUcIiCqsrgM8nUTF0tsIBWZD8SkRicSY9vLEiJDE2rcxRQRSRnrOCqBjLjjQCIIXCvNRJPqNbEkbg6hnXXH8bQihCBltgFc0QDCHj35GQar9PNyz5XXp5ebXPm1uuwOVld375CiY8/+uGG603ju/62kLw69D/z4TeuXz+/2l7+k9/6jAMPpTark/1UFmd3d69ePHjrXs6DM987v6ivp4ftyT/8+//Z/Ttc0Z483kvLzy9fprRYruPk4Q9+/EMdSystQVgtV91y/fsf/ej07v3J6s0wNClIDA/vv/Pi2Q+7tt3fbttF5zH0h54DXd5epbbl7t7jH/9xF4K6dSlhZs2+2ix2w8tNampBxXp67w7ZxDFYgSmL53LQab1outCFECYYF1168tlnpJC15wBISAz9OE7DNA57XVCSMDHnXNV90YbrQ39gnAqwAwtPU31x+RqpphTaGDv2V5988tu7m5/5+ocvPn/pSH0pyC6SXry6uhOln4ZxHLu2ne+CqWjoIpBLE1KTdre3IYa4bLnI4Wa/vd3FtktN6LwZxym14XDYUVqmRVuHvSPkPLWYOAgie6GSlVmIeerHfnyNWNbLJk+TqdZSYmxKVdTs7MZOFVyNiI3hNu+bZTflkcBjm0qtIYRxyO5AIofhkPINJh3GfYoLr6iemWTevAF4IzHyolYDnYgoNWmYBm5CdYpNO/SHlCIFcSSKceozixzKTZOSoyvopEYN6cyLMGeSoeSLxYOrbYmpOez3MbYIgDaBuaOjGbhJbJDA1IOEKkUrCjVB1mP/MkpT+hutBcUdsJgTkWtWz+yuE1a1pk152O/0SiQM40G6lgMrIAPVWgnIxd09SnJncBGWXL14BUBVj6ktXrV64K7mPbiiEUpgwmkaWAI6TUNGcSSsnhWUg6hXs8oMqU1YwjBN5twu2lonQQdkSREpaIGm5QohUhi1LNsO1Kt1g+ZF0yIMogwA06CSxNScAJ3GMmAMWfeIAmRDvgUMo1kFQ7fAMdts9SYiNBTQmYUBBkWtIoGZEUVAKrXMGLVaJiB2cBJRBHdfdk3ORz+Xg6cmmhm4BhZkWkXOFZbLjoC22z0RdV2yXMFhdXKy7/vd9U2bwrJNwzQRH9+SUw7Zln2ValSmfSDb7/oFrk+WJzReVh3L7TQCjWB9VR0HNwUKiOgmnCR2IWrx8lzSV0RWHOLtzeGCLLHvt9u0WizunB+mQ3t2ltqYFil1KQS6vLq5/+jO7/6j37h7cYpF1fF2e7jYXNioOhhhAKkkrFYpCgCM09wBamaTR0eBMtZufRpjm3V3vX+2Wd1FJSJxnq3mlREZo5fKQWLb7dQAWU1FQiWrYAwiMWrOjJirhhhrru7uxHNOOcZgNq813ZUAqVZDnucY6scxtG0Q0sePz96+c3my2MWQP315ITWWXMxmxIm6Ess8MmGteGSc2WwvclMgRiJVgyAIBuSOhm5a1Ysd3Q8I5Mqujl4XbXjwtq8Xjm5lKmVsukbdx6GXTuJJB8BRCLSkKGgmi1YpmHLBWjQjUanj1Jfpem8FpZFSQCBw8LwbDpe75YO71gQLaI4xdYBSfFQgQBvriMgxcINNv90DGWHxGJ04F63D6I61ArkJIycGNkpCXaBlC11DqWVCMGcEr4oFGDx2kQMKKmh2VSIbQB0QEgcJOgGpZnVTgEVDS2R1MdexglI1MPAydyyDhxBSFx0YiqKDZQOjMk1arThj03J12+8CMadUwZu2MTguk81ROGllgLZrz5KsAkJKELGIjkJJq9U81RmSf2S3OgLmMqFSiInItRhzBKbADMTmszNe0YkogCNzdJ3UzYu6zxt2EgB3VaQ0OyRmkPd8zdGx1/lYJmWuxd0MrMxrfecjkBbATYsQC3Ch4O7ZLDKamwEGCnOWI4q455L3jDjmwc09xIyOzP24W7WLYehFxGb6K6qqu5tEtgyKBDEMZTDLYDgdphhlhVJrVSRn2Q9bqxljFELH+bqegyjICIGZkZjAZhu3CAOBe+RQSp6/DcCFXN2ILATXSF7JeUmLMwWvoIXQZiUVmDhUc3X22bjtBSn0mrIxnyyv83Qx1TucLX9Wda+mc8UVgrvbsVLO8Y2zBhhoBoQxoqvP8i3N4+LcseDmZBmzYEVC05FA55I5lIiaZ3l/rqupgu5EbrOnmYErISK6unud+yKIOFCoDhrWW6nbenvBOy8ToQgDFFgQnqoNytvINciShT1H8IhiTmo1KKJxhEQABlpZpatxui63rxo5gzKZZdVawRDQFfzYBQ+qpjrHw8iqEtKbnTuYGeO82YF/rXEjIon7jKKvhGg+p6zAwexNx5mBuRu+wcSaVzoShXG2Nh8nLjNkptluguRgppWQj41Xc0rsSHM7spHndmPT6g7I7LNYBPNlarNfZr5ziNDAHOYG0tlSjT/9zuo2J1IcEN1mH4FaZaJSFRHBC1MdyuFPP39ceF1US8mOFLgRkZiClVJ2OxrGt1fr3//Jj5aLuFilz148r1ZjI6T21jJuEqGpF61t+1nfP1xw42XKCiKBuI1M4G214GISJihqZYHAjKvU5FpLmb60pE1H39mWF3t8MmJ/PX79JC0TUc1dSmUcThbLJKxjuR12m1O+OD3f3R4e13rv/t3Nidw53zhwKWZUhWW25agfOeIAx1LCWSjxmRUFCIhzQI+R5sYoYlKtDrOTB4hodhw5AiHZkTKNc8rP3RnJHNWMZAZ7EBGaltlzRsR29GKgm3MIqtm8CEcEMjNHRSTVSjwn3kjNZnGIkMHBcIZtzwAZB0IAIGSYf7ra8ZnjNMNfwEG1IM66or3RiQCZrSrAHHW0Nwikf0MqAp+bKZgDC9o0BkKEQAxNSrFbMHFAorAoruplvbkbDwxYncQOV4+vnhcbtzcvzdBYxmJDHl0Au0W4c9+DdOsOhWwswzguzzduPMWCke7deVv3N7vrq+b0LO8O01qYbdnRe/fjP/6HP1l150bIxIvTs08/+2S5+OcL1oPV+9/6xRf/4NeXbXiF29uTD84evduGdR0Oq7ffu83p6vmrTcybd89vG//Zf+fP3f6n/+nZw/t3vvFVbxIMYbE8gW0dPnn++Lf/8Fv/w/9eniianHH+3qefPPriF19cfsoweDWd7MWf/NGj975cr+oC4uPv/vj0m988PL08W70bdPP0N34PPVJegsUU+ft/+Hu/+IVvEGi3aPNYlquNWIECuUzb109rXSzf/0sJmvF6//u/+RHkjnwxXGu/G69f3t5s871757vhlqT0Zd+E1opzw5QYMoC6VWu79vR8NY0TsKVOzMyqx6ZB5F3ZeTUEYJ4ZdHPK66dYoH/zh89PPjzy0gEgMiI5CSGHUormDJ0L07jPbkBIJCwpQAgprZfdZpqmOk2qeSw9EMYmcCQyNXfNxdR5Vsz/rDx13OHAMf9GTmZ3H7z1S3/pL+xeXz188O7156/zqLf97uE776SmRScwQ/QQmRBULYJ7Nc1ZAmAM189ftYt1EkAvM0rH2StjWC4WyzvL7uLq6ntnfNot1iEuG25cpv72MHXL3aEvh97KBIgl79ymRaQIx+3BchmHUmIK6mAIw6Sp5bFOFJpRRyrZDCKFLp54bPbTIU2yXjaJ3Pb7dZdevBp3g4Y7KejieqhQx4bl1fNnZ2uh0H/2+BJ5cbN7lSBSaBZtUycbehizl+K3jwtpiqk2bdNPw3s//xASS0rXz3fvfv3dk3sbeP3UxuHd87s//vHl5uz0Vb9rIptBDBGpqvtMNRLmOkMriNTdZqweAIIi2mxIzKaqNn/IBMgEghSYknDDGEIk8BSBiFghpRAZRYEZzEkRQ2C1qoCTqTGNjCkIVG2bAGDkEFJr6oZGHKoWxlRqVfBtP1WgJgUvBw1qHFyBHJKHnAsSxhRPThvj+vyzT4bt65tAiP6qKFfdT8N0mCQGMLyukMcJE3e13B70H/1f//fm+Qtf/ur7b//C3//1X98sFkX1yYsX9+6c3H/vS+3yPOphfXr67OnNN7/4ASq8/mz37t07w9Xr/rANTbw5TErTw3feXnzhretPP1qsugYXo+qjL36x3z3rpDz48MGrz54tVm0eiyi+fP08pbBIbZNkezjkQw1gKQaruVh9ef2DEEN1t2oHm4TDfphqGr/65ff1MNQ6LFN7uL1BLI7h3YePnrz4fARdLxuHPEzbnDrqGord7uoWDUqdDnlySd3JSmLYvT6cn5+Y5SRxt+3vnp4xjMtlzIvw2fWuobjfbtsUS3EvkwgyWT/2IUgkuXp+VbFIG8pYp8OEJrJKY48j8mZ98fSz6+aNG5EJvVQbsxVlmDPLFdAcYbXZjON48/rqwYOLIGKhItDJ6cmTp4/v3X+4XK/3t5doQkCMs4uVhIN7JSBi6qcDEoIDMwFDSEHEp9x3bdffvFymDSQqY6kTqOGUdyGFZSc3t3viJqYOqsWAediLsBAO4y1Qx2EzH95YpBZ15KlMLC7cBFmM4850TClUAidDkd3ea63oAO5eXYKAtyUXdKVkEAbDg3oHRRlZQosYzXLkONo4jtfE081+K8j7/W23WjKLEA55jJzUGUEYUyRDU8BMBGPRAsoWazHHSgLEXMmIyR1rmYQZ3AWpoFYjtZ7ColoJYYEiIioSkAAMqtZjp7gT0tykNJ8OjRGZIxgigbDl6RYJEHl+KqvZXHmiSlPRNnRAVmqVpiF1CQE8gtXDkAF1rLtGwTiklghw0rJadOhpGsPt9ia1EdvGq+Ypss8UD98frh1yLYcyjMghT+Oy7YpngAA2UggVlMB9Tg4RMqGL1FpLmQjbcRwZvdYJxR0slxyZHZyYtMzhCAVXRp6hueoT87wA1H4cZ84AMdYK/VgQcLlp2iTb7TY04XY/Xm77u/cuPn/8MsVwfrohJjSTRhDp5YsXU6lNDGq42w3DvudwHA/KYHnSuEkA2sj09PX3N4sHNVzw6t7bb70bX12qwv7169002ZR9mhAMxN1qZiJuLm/KSZvasqvTNldomiQFoioDgbpIWK1W0/WtJLGi1aBp2t3LbQzx059cLdP7UdZDfhW7td9sJQag6DFIF7mUwEIIVlWC1JoJxNFzGSUld12tF/txPFiNUU7Wb7lV04pKQGxQkXEeNIjFmc2qhMgstZQAwkKB2RxyLTOoVViO21VAFlLnCrVqNdMYWs3Oy+giqZHqWKcMLGUoYdkYmj+/mr778b1H5+XOiX/xvfy977ToFKICqRYWMpsMEEhQ8M2p5Bg9mzeiznMjDJWSmcndZpgGsROimKQgBMo0u9c8LE5jtxnBGVWIEEHLVA6I6EZVsbI73A7SNBCAJKgVwEzL1nIO0vSlp02aqtViospYy3RoNh21nMcDWoHQFitu5qWSqSDFGIrlAJKYyjToOKAVlMhtKnFRJJRSanbtJ2YWQY4CmDE4rhpqBZtFQSJkRIiBaLYdzD1/5mQAUpFIJ5gmGwtn4EKELEBoqIrIEirYANCleLPfGsnooI2YSS1GTo7sxA7cMmvNpR98KFM2y1ARpqyUi6jJnJgAiCmoa0rx+C6QCOocEsWWJAmlJBjIvVZXMHUrPu4PGNTZIwsghsCzVWeWeorVFBoinpu53M00m1bTjDghAHMwmfMP7E7ugGSmVc3mjT5YQZSZaU8kJIGRzMFUvU61ZgPWStM06dFA4arqBk0Q0glBCNFnxD8iMgExoc9qBhGg4zzduWem1IQ2CJrXmajZSkpNMx72kYMbqVqSQEJ9GaOkNpbdNFYkNVPFViJqFYCca54Ot8KGwfLQxa6aV1UlM9NSpkZwPxkhOHgM3AW5HSYn5HmkU9M6zXYWAqi1MpEACFXwEVA8NKWCoBg6QKLAToTEWg0hGmCQ4GUnNkDeT5FyPJO0SvXQCeL2s/5wy9A7EvL8Yve5dY8Q1WxuPwEwM0USYppdLODOBEwILCTR3BmRWdyIAyuCmrccI6WsIjyHqqjOQTNMQbxBUhBSRTYEJWQFf9M3yvKGWz/jfRWU28V13WCFNU9e2c3IRBROHMYEWqkgNqoLP0Qa2cXwAI5ssvBOQAwqB3FmDygp1svCg0EpRA4IpuozU/PNlP4mNWbEJE1KMbyZjpFF5s8AEd2ASRzUrYKTMM2b92PKB4gQ1dTmqmE8ykyqdV7OkxMYgKG7EREBmVcEcGI3m/0es56OOB/PzQGrzpFJdwRGPtarq89AD/pp4ZrBzJAwVwQvVmcPi7qjK5gDEIABkB/r0hCBYIYigQOYqrKLayUOarN2VMnwMPQfP345qlS3ojp7zlKzcFAUKH3lCcpUP3r1UbeEB3fX33vyPCNM1YT97bPuPBB6JS0S07VSP+4/uNMu8mimKTX7XIVhFeG6+LZYI3AoNYQ0Y70De2LOqZV9XkVvFvYx4nd3eJPpO5fDFzbhrA1RvLjdjLvEkEI6DKXvD9XGzcl6mvTzT59PU153YnBWHYVkZqSYaWCZic6zMDLrg/NnYqZwDHDBXFU3G27A4M0Xj6cFRlQzM0eebThECERopuCudQLCudvR3d0LIQMioRyvdgR3NXN0A4qEoepkVudgmru5OTG7mxsYGh4biugYEzLluUZt7jfQCq7C/AZOyLMqiAhVK2JAnq1UPvu1zW1W5rXabOdAYlM3Myd4YzR5IxVVrQ6laRZEhCBR2nlvU0tpUiscJAQyKD61qSFYtpG0W6YU4mK9v3o9joehv/UxA8mgNaMb4n4Ybl9tH555vzschgG9CU06bEdulwKesB22B8tlt7s6f/vtPO5Xd7rX0/Dqule065vX29db3Wwq0u3tyDd93eMiENf85PHn977xaxYKgoa4WD385vLDV1//ma/+4B/+nS/+hV+VEO9fvK0vPnuYTn/0279x5+ze7XbqHl00d7+wM5J0Po2hC0uii5vPXh1eX2p7v7JsP/pJPUAM56tm+fTHn1uzHCraOO1ej+y0H/3+l78Mk+njZz/6w3/2lT/3F5796I/e//Yvb+58oxwOw/VeRy2HnA+TxUVYnpbp+vLy5diHtF4O6qsOHn/3v9o+//z5p7/34c99SN3pyVtv593zfpj2Q78f8qJJZZ/XJ4t7bz263u9Wy3YYb1ZpU9FHy30p7XJJIbSEYHW17oZxmiZtl912PyBRFO7acHW98zcdYz/92/3XTqI3lkR8oxjNNeoAEJhcDQDndU5okmolc3BAEVAttajW1MRchhiTA6hZt+pUTVVFCAnBvNZydnaGxDCvEOx4f70JeaIDmAGBB6j/9n/tl778Mz9HNY+7WxbCiqcnJx984Z2r1/vd9YGZ4wU30cfcl1okNTO3ol2214+fjIehbTthLlNZb1J/czttJwK8vdl/+QvvlTJuD9erzVK9DGNeLdlYDtOtom/7w1CMuIJWIQZQiSxoOB6lIlSXWoMIJh7Gw3q9vrzZEzsSL+Iq5xtwEGbtD0IYdul/+u/9z/7j//J/d3N9fedO99YHp7K3i0fnf/iTxyX7+fnJ9eOn99d3dMwR6dEH7/zW7/zkdHVneXp++fx23cbDfp+cQ5DzZrnNw37yiEpc9/vJmnL2Hg6l7Pfldd19+c7pgw+/9J1/8Mn2s483q/t1ezCS/W4M3KbYjIcsDVetkdkNcwUzxCPPGtBpnorRkYUVdCboS0QicKd5uaDmVW0wNwDzQgghIAMmwlOgCMZobRRCHiaFUpBs1ppGBZKYkIBqr6ggAVCyTKVU8ibRMJVScxsIzLRlj3Ly1tn21evUxrBqnn1+DY5TkNe3mRpuJL6+3uN2/MLPflEUDtN08vbZF77ywXd/8w9gTMUqJQGjkosFUfVpuyNuFti5LtOt/t7jf3xns+qa8OL2+ttf+/rLp0+xz2O53B2m3csyHPRaX19x1pgun7xGhYApCm42m0Oerq8u760WG4bk2l+/3O+rJBYpPtLLj28O2wnNI5MQoWNsVqXw7WGLCXI/UQzrZTv21h+2odaUIgJVK4OWpqMEgNN08/I2GCYJoWGOtFpvypP85PFnVXNoqF2l2pe2awyFMJn7mOv6JFqPoDD142LZ5b5PDEBYK7YUl12SPN29s9nuD+lsueiaaT80iZsUQwiHw1YI0MFdSzF0CCG8enrbnC2OrcoO5p5dSfBHn3zq6PVNLJmRQggkXNQ4xMVicfnq6uL+BVQvnrtFzLleXW271bJt28N+LyGdXdzZ77eL2ACSJJm0qsNMGiYJMwrABqhVObBWTW0rmpxwKKN7HacxhK5qLYax7abhUIpKDKp6GCZEHseJmUspudQmJQw4PxDGQ08AWgshRhSJ5qpdE5m0mo79tmqtqkg0r/4cvG0SMpHI3C3jhiydw0iUQ2rHMsXQIEKgWLIVc3B10+IlhS5xHIovmsVYp9CIgxqiKjTSIcL8R2ZycM1lQoICUxsanSZk7/tdFCl1qGpEEMHZoSIwc62TUvE6aqmuuZHTvs8YI5HUvK9TTY0RqLMJU1VTUPV9CtyXLWJMcWkV1J1Qcx0aOmGKFZQEAGo1dS1qBuq5jmOeuJEZsKkW6jTe7qeuWTKoCHCAk/WGgFSNjAygZBugllLwOBVgrbZqNwLtOPXtKh2GXrPHtjUth8NOEjOLA6sCuJoTqqMBmjZpMUwDCtSaGcVI3LMTO3PgNPTXgRkRnNjAYN7OIcxtILXqXM1TrRCCutaaUwoshEzDVLVqdVf1EKjW7EKoWIa6XrQlw35futT2+3GMY7dcDFOJQWrNUYRZmiYxz2FrHQ7DfBeQsw+l9qXaeF12i4Til852wHbJslqtw6vXgcVhQC9NiuDuIbqTE5nzbjdC0UOZti8+PW3eq8PA0E79mNZdE4TMSs2txJS6z1+8PL9zRyh++v0ff+lXfmbv+fLmsg4I5kXHKU+VOKY41LLcLLzPtdgwjS6pn/Zny+UAFdSbpnV2Vx/ysFwt9ttriStDjCnlnDGIVp2XiqUfnYoTKYFGoMgFbLFsx900czAjsQHVWjjgjGuts1IDiAiMJMyTVhJhCRrpALkN7g6UiJm8SWOuxtIKD4+fNg1rpP5ijcTRsbiBA6g6yTxJuqOpznn3mQlKSM4GhIaGTApGTE6sql5tLmkwM3dvo9g0whRT01Vz55jdHbRCdfRh3AMSsJsDKFsp7Wrhk9k4UsvutbozgOYxhkAOh2oOjiEShzLVzsFrnYahO+nablWBtNTpMMaY9uOhlGxaXHNCRwfKdRq1qE91WJzQUholKUg1RAWHqQqBkEkIxUtcCCzCfKwXYiyIajILRdV5RmVgKVBd97YbxiutGaapUAO8bFQVioPh/NsGrflgIZDUVLOTqiMYQBT27Oo+TRkZp95smup+wOpYHZyIiaYJ0abqAOhIauDuDIj8r4G+zJQiB0E31TI5U53UymSe3RfVFMy89JFaxLnAMTBxOIJizWbZD5wIicTM5u47taIlH+udYwRzoogUiSNZtToBVOZZ/qgcIhKTBJJmFsRJi4JqzTlPQI1BQLBaMqKRkDoEhECaOJSKh2nC2Ikf2QyIiIQIxkizW6PUgmbzFxjAXJsgVdWjQPF9vzcEEkxNM/T7hsOYx27RTTrUOgGq5kIsRAmPnCSh5A0oEzEzpIAIgYNhFSYzr0UJmMEZMQq1gYVQmADZXefmanVH4mOLLZEjuIHUkacyTJVTy0BqFZkcoYKTjjBaUzONYxhfp7LH6eDlBgn37t3ph4mb6frT0O9jzloKUFBigPkc6LNFxrTO5V0z8JiA/Bhs8Z/idgEAiB2AheZhGMEMqoETkji4KpCjO6oC1CjsagYQUNRckKtlJBDmudldrTp4IHJwdFCAAu4OrDlJ1eXq0/3VB+QLr2U0xpURRbYObV0yozDYCok4E7opELYBWYzIlSWwV2dUw5j4ZMWQD17NVM2dEEztp5YQJBQJAMiIIQSmIPRTdMfcDEWms6NtBk7P63irdSbBzdAYNz/21R8bhmbH0LFNDmd00ZzqIKS5gsqt4JF0jOqOSEUzExORzmwaqMRsx3YqdHAiMnPm+elG5oBEaA7o5uA281dd3d5U1MhRzwJQUwczJJurZsxndvHshQEidXciA5ufJWRWS/n+xz+53dNQAmAlhHqkQsMh9wsKXVqW3fT06Yv1KrYtP3/12g2ESYjvL5t7i8heDvssDiOHH+8PX17Ec8jqJVDwasKcSw0JxDVgWAiVIsWtTXLIOqf7IspqtVbvI8NJAyjlezd2m+knV7k550bK3UVzPZYYAgmfbhZXN7dZaddfvnv/znqzePn0Gel4cvfsLG5c54IdZ5rryJCIZ+WIaDbaOCEBuJnOhT3EfOwFB1Qzc2VGJHS3mRM8lweZOxGDm4OpOxG6AQubKRzNdbMH7XjHqWbi4A4ORscZ2QCcJcyYVwRUN5htP0DIYGYEs7eo/LQpb36QHjVeQjc0AAA9+oMA/cidEXADR5h9TEyEBDbjmVDNjrDzesxVwmww+7NSER8tVeqOLGgoFBKYNd1KRAgopA6qOY+hSe5Nybbe3AuQuFu2zVt9mXbXL+PijIPutvvd9qpC8WkIVS2PFGgYxpPNMh/yyemC1dqmtTHvb3cAcdvTvfe//tEf/cEyQDter3yH7f5H3/8BWmqaO8axS5arLVbLTz/60XAY07Qfn13n6xHu4OnitP/R9++c3m3vvQ0gq9XdtpH9i483z4b47PPNq/0nr15cQMvp5PLqZvHgbVKwmCAT7i+vHz/33ZY2bxWy59/746999b3XP/n0/Fu/9C/+7v/z1/4H/6OX3/n9MNWT8zbdXeDD8/b9h7/zf/+/feOrX1mfLddfeudhtO2+OJRsevb2ox9/54+kKgZ2t8Ui/skPf/zBt3/uO7/78b33LlJ71pxcTE+/8/kff/y1v/yLf/Ljz07uPnS22IiQLpaLUSWkUMYhIDMGA6hZz0/v3+xeN13rTE3q1ssTVS9TCakLIdVKbaJmkYZxUHd3UEBgRlV0NLN5KPqzlqI30hEe1Rt489YkaJqITOTshCJiNbsqEzKjI5Rc54uRCUPigenkfFNf5EXToHmeJkYhdndaSnz74X0kwiP+zm2OYyMRgIK7eeL2F7/9rV/4uW/F2PtUTf2tt+798IefnZ2dtDE1Xcd9DYQNBzcfp7zfjqUaB8jjGEVYcOj7aaooOPR7gbA6adnD8893MdJhd5Mf5OXpokmLaT+kk5SH0a1wXKRIsbXxcpcrtizMpHkyrBwkuElq5k+nW7bv3H00XD6F2Dzb+mTadV3R3o3YQCSNwxDIWMLh1e5v/63/8P/zm/9x5tvNg2V/M129/PxLd85+5evf+PzzpzceXjwf1836+mrYtM2Pnzxe3T8f9rsTbm9vhnHs74XzOxfrH3zv+xnWZ+vF6QJSG54/vhWEioTN4uVney/jvbfvLB6c0qvL608fpNOv8ePrOsHJWXv14ub89OT66Y2bNW0zh1mqqSsyMQoTHlN+TAQIWo0QCYGDqB+5eHPLBuKx/YIQiWUObJsbMk5FFWgYRwZoGvGhEDqaRYPA6KCni2YdmNgBLDSNR95mve0rWaaAmJq9qsbItETCsR9lgdxS6ej03XtPXl+tGubTDinkUr76tffSyeLxj67L7dSkVbY9BQ9hwaF58uLlFDnXsSTMUMdSHWG5aWtfukAkjaTVq+evd88+jYv01/7yr/zdf/RfOqMfbn0crm5u2+VqdXp2qPAz3/7q6nZ3M2ZH//Dr7//27/7h4nRhPgUiPfS+H3b2uIUylbI4j+99cD87v7rZY+Z+nxE4BeBFE0AkSdfKzctbRmpWoUa+tz559fy5Ko6KjqxZXYsptG3TLhr03BDbzg4HX9ztmFLWvN/vtzc3MKhTCFWG277mvIyBI0qwYegFdDwchtvqDpZ92hZoJne92ZWUiIbtRUM/+5UP//CHf3Lx4PxPr/rdmBtHJkbTFGXozQ2qMSA65EiJHNZdk4SVcHADhk1MLw+7qlgKhq673R67nyRFYOEY1NTdVierm6vdYdefnG6maaheTk4X16/3h+1+c7bult3QD0nCkEufxzalYTzMlYgppGy1n4YQGERCaogKATCClczuwzjGNokRFIiN3Nw+bbt7CgTiLcuQdym2ZRpMrei4WCw4RrG5tQIYTNWQAqKEVoaxR4fAPOQxcCBpah6LZxFCSORWzMtU3ZmZvKibzZAGDuKGTdfW4ZBCmsZ+ueoOuxvBWKwYetMupn02N4MyjRqB0RBQGRFArVo1F0FgdxKRRBS0Dm3buAMDiUApmaLAQCxiVk1rUXVHAqrGWIojVMup6VCzYtJcwTAP1hnUUue2aQCMlNwM3BjAjdlTRFSFJHFfelPnIJEXbkSEWCEIZwdyhBhzGQ20WmljNx4O7WpjwnV/yIchNXeePP78C+88RFTPA4pzWOQxd2lRTQOGwNFdm0UHSaZcoSBS2g9bBkhNMhuKwbjfv355ZYpelJjGyRIvxlya1GWrkWQm1QoFM0Y1J69W29AooIK61wolwDx4gGpFpDkJG4IAmgi7QqlavDYxuM+VtGjm4BZinMqETpxYq9/c9oHCommFfH2+2o/FvNZcQgwcAgsmiHXKMQZwOd2cad0P/WBgHAjleCIKHBJKQ8thP0kzIlvXTvvtk9vDtVWOtrSyX4QQe67YgqNjDcxMPBGhkaLus7ddvL7Zvv+WMwGHlDarj7e3j1arOJZ3vvLuj3/wyb1Em4bI83KxfG3Xm/Vid7X/87/04R/+zm/93J//8vMX+0Xs8iG3oQnmraTJYdm1dRxpmWwyS1byBMSB5VAGdmLgfOjJ1Iua5uyEHGPoCimYOWhqEzmFpi0Ki81yhy6ExYwCKzvMxShag5ChEpGCkczLT1BzMkOrwcCqjoh2eqf91leG7eXu9e3Fort+fctn7MJ1X0JvdTtuhrx9fEPvPGjffl8/+xRKdqvMVB3IETm4A/ocQKtuRijqhm7oaK7mDizEBBTMDFAJwYnESYCckoRWswoxx4a6DYXkedc0MpYCiNwGiObLBqvDdcn7sfGQd/1KzswKNlGQy+3QNGnY3Uq36GuZhoGDUNu40GLT1FKIE6YWmrZddjCO5LWUIilNQyVkc0sU9/0hZ3SEkBbGzQHEUgiLNufsBQIC9lM1MKqYEAIHTo7oLmzJi1NFBFIncsMoGEBZPflYbRqKDqgjOEUzBsSuJSpWixfVWK0jBJRDXwjaWqow51K8FkCwam6YEkOexkPFCWysNDc0WUUkRvdcMTXYNALVEClFaZL/NHXAFDgBC4k4OrOBa8152N9GynjnhBiByBnNvNZMBE1omKMjubq5olVmJnTQWa92dAVXK9M0DLVkM+UQJLazHKTVEBRd55ZuQKA2EMzF4+pWHMidzHIpPSEwktZRbcyqqjnnXAtSCFnVSBx8yJSxKyoNBD1WuaCDEzPQT+ulaMbNVC8OUCyjpqJKIujuY2ZkQx9Kz1GsQskeOTjrmAtKakinMhoUpw5QtmM2g2WzeHXz4nxzLzDlaQBqVDOQM2NMwW6nuYZtZrDnqRBirhXdADzNbdbg4IrIQaTUKaB36E2+NdqoO3EqVZKlOF0tdSv7Z3S7DXkMtQRVmU1CQOR0AgxXP1bLC3IHyMbIXQVHZDV1ncdjd4K58QqRHJEI3QsimDsxAuKMpgAnpsREgmDgLKzmiERMCDTl0jWIQkxEDuKIzohSTQ0gcEBEAS6qgoGJquW54tJQASoQIyE7kVpCZRjKyl8jo0/rMW9CGotXrH3AgTQ4kFIMYfCuCTjmkaBhx4CYGN07dE5iRXOSWDw1wWzIpSfXUKsSOeIxeWQOwgIkxJxCDMwEEiO/GZvQzZFwhs44GJgjIwLZnNqDGUTkjuTgCMeAJIGr67HXBY58IjwKc0AAOgupRHj0eChzdHfCCEBm7j4Xu9vMRlIts53F3QkQkapXc1BVJj6ibNxnpvWcIDxuWGzeA88/mQAJ524Iq2+cMmTmxORHDhwisHtGN6v6/PLlfmdqqZQ5jQcSw6JboNkwjo3h/ubZdtgFsk3bXh765/s9pwbM3lqn84YtZ/BK6iU1T7LBYfjG+QLroQBFo0WTxiln4FmyFCECCABEpA4CIIiReV+9elUMAPhoQQ3gGuxf3tQt8B9eT+/V+gFg1yV12O37pqltFE7hdtc/efHKgi/X7W4qv/VP//lf/6u/2m021TEQvAE/qUA4Sn/qiORwDP0RsbuimWMFQKYZjY+E7K6m81p+bkyZ7wFSq8fPz2fytc0anGmF2TYLRyCRESAKHFHDx9QhvIkrwrGkDxB4dmPMqUFGmlsmcL7Y3JHYTN0MCZCQSebMo5rx7NUzAyCvdaYWzb4yM0d844ZzNXMiIYBqR3/vUUw4GqfeSEVai3CyWkg8SDLFqpVIWASRRCIAFB3UJnAXlNhu2rDZLM+VyBGkP8g6LNPGbNh0UO/sbm4fR/Try6fDsHf2bb+NqTFocoUyVNai1RAYOU3OtLn/9qMPrz7+V5tp8E++N7x6fPdB+zjxB19/f7y9Gm8OuyfPIQZeMg5jnHanwT/FIO2p8uLm1ct+gvull+VCD9dt1dXijODzq598mkt55/zh/vY5dufdw7cAaThs16dnWCF2i/O779xcvorv/zkL8dXHH3/h9M728asvf/Nv/vr/4z/5d09OPvvHf/LgS19+vXvGy7CreXR+972HZw9P4+li9/xFEIxvrw9pVThI6G6fP795/oTOH5Ypv3x6WWstxcNi5Rj72+n2xZN3v/T2eF7XF2ffaNaffnSpddw9fX7YTUNfV92iW65evXyymloh6aexbWOBPJQRJlq3q8hMQCGiDo7mi2Z52L1wy54Nq84rIJlTjMfmsaOF6M8Arf+saPRnvjznJUKwWoEdmfJUKIRaypQLEJqDqjVdiiWPw6Eu1mowZU2LxW53yw2ucGlgBEwkrvlLH7wXGi4TgsvMVnRXrYUA1m38lb/0V776wVfdJqcsFPt9iYGd/LDrv/LBBw8fPbCzOw/u09Lrk+/9IFboFgtkVDV3k9SWUiHr/nq7u+njujOjUX13KMNhyrW2ywU53NzeLC5Ot/uqOAMO+DCOGaWSlCGHaZgM1FtVb5sIWE86CE5Uj0qaokBoM+Onnz159M57T559nrilXDLxrR2EPCA3Isx0uln+n//B/1EiYaQEwOh375x9vhv/6KOP333/LX/eb4edC4/DdBK70+WdmuvF2X01Bqhds4LM24PF9nSbbYELK4eDw1SAGDHY2194SyhvJ9bcBC+i5Xb7/NHPfvjeRfP3/i//+fJk5YhaS9NICdKkNlhN3Ix5nwsAHtN0QmgGqgAMSDwfPMCxuh+7yRDc0M2EmRyA3GtBABECc0LsmGIT0JwIl8vo6GSwbBtXHEvRrG3kzYJzmXK1ro0e6aoOzSk+uH/ab8eTu2vucD9Nj39w0IwO9Oi9B692L5nyrh825+HsbvfkyXbdtG1A2PbD/vbdRw8+Ojw77PfXLzkJcQv7y9vh0z6DK3NmyYc8DVkW8fU0TdPwhc3FtN3dbLdDgbcevV8R/s4/+/voIcZ0vdO+6LJNIs12PzLKL//81//z/+Q/e+/9d73o7//pTzKku+cPrq4v9/3uS1/94PLjbYzx6ma/XJ/Igi63PYU0Et+5d293+bpr1vvDFjl2bXj5+vNVfLhZrhaL9rOb54HmchbSkkutwCFGubPYPH71JEVcRJQmjbueiDLUoYR8nV3q+w/vf/bp7d17Z092/bvv3NsOzwuHto2fP3+x7NZateY6bnvNc/o9bPf79x7de/HpU0JaLFbTdCvL5Xcf/wRDuj2MwKJWQgwlT9M0sbmk6AhGARZSrq6gDIumRaLh+pDa2DCp2ni9W0Ac+tJvh3t3Ti9OGoCPAQCR8iHHFKaxeK0U6Pze+XZ7c7OlZdvUWjPmzabd99Nu38cUl6vVbrtvV81hvyNTQwwx+ZRnIYYkgFvfj0EiQFGz/Th0i3bM2ZwCJRTO4z5B7Jo7poAMEptENNlesYBQinHKXs0dMDUdIU7l0C2Xt9vrPu8WcRNDCBaGoQdkAkfHPE3M5GDOjIYRuZTKzESx6txTCrUoJQcEYmq6tq9LEc5TQQpAQYE4RYWSHSprEGFqioFWB3hTFKlGnFStla7PNxQQQHMZHEHdtZqpDYe9BMlagbW4KSA4hNDWYuZRwlLzrasyRgBQzyGG3XDTxAtHi1EcKjM5UikVAkoABHbj1JwOtleUIFxVBcXRBaSqSUyqBgi1jG5KJCRU60BMzIiO426/Xl+sluu9XVrmgvD+e19CGbrl0s0ltRwWFNTQWDiFJjSRq/fjSOwIVCo61G61tpq19F5GYZuGwzDsJKXYNHmsnTSC5L5FQEJ3Vycf855cVK2JK7VMXms1xCJe1UoUMi/mju5qo9ASnQAKOLlXwlAsMwVBMbP5pSdEWjIhDsMwZcXA5OBCXZOK1UOf14u07aduuTjs9g8enAxZhZmIhnHfxpaZh8NwefkErS6Wy7v373/8ySc/XSSbuVUrFdUZLQSO2+mQYTrsrgYPpGVCH8ZqBcHjVEcWE7HsbgSlWiAetOxuhw/f3SDQ+fnJzfWA5ClKtjHG9qYfTu+/tcvbw+FyqolWcuDxZj80p+sX18+Wb5/5spFiUQQIJMgcc8gTuCC5EXvcLJTR3W9vdw/euihYBCWrU4hN5GPbPeBcEE1MZhVcgebWecKKQFyr1upBDWZ5hthKYZGZm25mcwMGEasjOkYJpsXNStbB4OTdR/Fnv3Z+2sS4LNu8LCYNE8P0o8/1d37ft4/1+vbuh29/TnQ4uRg+/XQZmNmtZpKAaqYFUYjEEJwQgR0ZKbjXucYOHcAUwEzr3NA5NyE4MgNVLVGCCUBVXoWJqWGXUqplBqxaxzzFZeqn2ybFcMJuiCHJ0qqoTiBdsinnfAghoAK4N4tmC4ZQm0Ucq266BMVHonEYT9yT0NWzxwno5vVOzoO6qRuY9nWshIXnbi0QJIhRmQSxiZ2i5YMxspPWaJKCsYyTG1DAgOY2ZVNAFgzzaE8D1bRM2mjNDawy56qm1RlTUJ7hj8ZC1R2YDGEcDmChmA2GgZAQEYENaqkIVPoxdUkc5+mCEE0dj+vhuQJIsiIBcWwqGBtwfDMfOKh5ErI8GmJl6NmncZ+Hw6YTzV6zR5mzD+xEjFGVmQISIyNoBnJzd1W1bOaOaFq0THMHL8xDGZI5OQCAAbjqNLepCIT5iY7AAFhrLrU6EoIQORG7lpKzVSsKJJiCs1csUtx3ng4O7tMADC1xIAInQJ/RrccIh4ErmIoEQHXgGY/QSDKDYTqsm5PDblg27aHsiFgkWtGYmv2hV6sg3i4XUymOzqTCMUhTSg0cgQCwNk10l10eBAFKWaYuYjUTIiEsgoiqDI7oSF5yBWdCmLG4cwAGKcw+OgYy0AXU9f4zuUVNHZny1Mfd7Wq8pLwPxQIkAGaOMzNKjcCIAErNzuwQiqm6FYRqikc4swkTgKkDmCExEQMhAcx+lrkdygEJiNnVkWaG7gySwqNMBApgSMAC4obgNZtFomykiG4emc2dARjAAAMLuqnVhJShltnk5tFVUb1xJy4FbgfZXvN0vVpcq1wswsl+CBXBqyVyw1CAC4qJ6HqcxgjCOnUYF5gaTrkQEUQgpgCOahACWLG829dccEnmleYGRkQkdAByIIQZoM+z2+jNDyLCGYYNs3to1gFgjnsRUFV7M1jNQsyMmT46lo6FDG8yTni09oCZAsmcfpznc3etpu6z2wuON6zrG3w2vaEMzWKD6iw8ER85Xqo0s7SJqlabRQdwtcLAfkQfzwQcVKsEYMf0oQPhkbkMWKsCK6IR2Kuby8+fv66Q+kkJhTkcpilxBHPNw0WzeP3i1UeffLI+WZ8vOgphnyuEJERnDZ4vBK2aqheNqX0NeLPb/sW76cwOo3uvYZkwl2wA1bztGu7rMkQZR+7irVVVyrkC1oAUmUfVKDGkYHU8If7Z0zCh/enWryp90nsK+SFDE0IjBFqZSUhCkGr+7NnNatF+4f1HMPbf+b1/9a1f+vnUrdUJj5X25ODgoG4EQCyIWDXPDffuQMzmNpOD1SoxA6CZ86wJITLJ3CxdrToYETFxrXXOc1WtxEIsc0+dz1Q+kdmMNN9taooOalU4wtFuZsdutNl/5ObgjDBXnc1NfASkrscrhY/49Kp1Xv4zMcxlu1rdnSXoHIizMhdXmaO7IqCZmSsBGIK5MpKZHrvZ/N9wFcHsZFSvVsceMSBGVSNHEAZnU5ymrDVbyWYqiIBIoYuprWWK62UTV1YH8DoV1bxt0urk9N6rzemi3TSC9fB6Yus2919v7eT8Le5W2KwIKY9bQUUCR94sLk5iu3/2ZJz08mp7uH51Z7nEvq6bunzn3k++/4dlrFR9ePH41cd/GFm3V8+bh++cXKxef/8zIcUk7SIy2GQHKLvuwbrvS9t0l1ckeroMm770y8WJABKrYZ8Ynj75k6/8lf/G59/7SX6yte5itdz0/VVZjmE1/eAPfuOv/kf/k7/3v/0/nGzOofGLe+0PPvq9j/7Z/+vb3/4rz579Cd1bfOW/+W+bdFBVEj7/yU+kwrDfdneXv/cv/t7PfuPDz//09wHW43DxrW/9tdcvPv7w537xD3/zt69vX1jPb7+1blu+BqVGICMLDUVDtyQKDO6uMbQsSc2GaejywCzTOK3b1TQVUx3zQb0qqFOjDiykVYcpz3rh7GyE47//f0rRHDaeZdQ3LlMURCF0ZBJxcAkBCMcpp9ipO1QFA1cHM6sFoYbETUgE2DZdExk0e51WZ+fTsEeh9x/e/9t/45deXd1+90fPsubd1W6RmrcePfr6N751cnK6bIOWYkXbJhKTttiddFcvX4377eHlJdhXzx/cbxers2VMOH7+o0+Yuqmf+qvLLiWwwOjFNZcam65W7ZbL17eZQ3dyvvz40+dTzkSKbjHJYSjd5qJpkPBGzRyjQmxDeu98U+oIlKDmIJhz9b53s/jGbj2Nw4/+9I+jELHcPznjafvsai9s2WwssO7awHpzfXW+wm6xCANDw440lWrEterJ/bv/9F99T2I8vXPRLSiwRF5ua3/v5GS7O5DDzeuDsiPw6/EQF0RhfW910t/eTnvPUQJ1w3jQUqFOyNWnHDT+5OVn3Wrx2aePbxJ+4+7D5Xtv0zTqmIdhGIcpEE/TWKHOa4okGAMRc65FZvMqMjKPVR1AmHNRBlRzNQd1IeTAxOTqcxGCA6haioGYypRbt8CktaYJgSEgriw7MaMXQiy5FlYDkTYfzLLmydcr3g1bR37+7Mm9B2fLRfR8WIVVAXn+7HFt4P7du8M0UfaUgaaQVRFyd7b64Y+fvl03C48/fvr5u+/cPTs5n3x/+fI1W0hh0d3rtq+343DTm+97u7hzti0xprfkfLh9+eSv/o3/4Cff/72Xn3/Xi1MQQs65v/fuhYp067e6cfwP/7v//m/8vX9wcf+iVghOt68nqPTq6dOu63yi/nocx6mYp9OzZ1v/+oOHg10qjA8fri+fDSDBmCjF2+2tePuFd94PFT959fkI7cXpgiyx9GlBEnATNh+/2C1ifL19GRYcEPr9oQxuQy0cLu6vXx/2795/uH11+dFHT81czSanAaMQ5slu90OEFVjQUnIui6672u/FSVpB4qvbQw2Ux9FKatvU3x7QPZDELumUQ4SstaqWkgWIAat7gVwM/hf/0f/4f/2//N+gczCSgCgy9aMErtliw2Ws60WyaczNkU/RNY3Vslw2U3V1zENGhGVMu0O/6pJwmPKA7Jt1sz/kfrcPtApNQnDsQUsloPnho2oOIERezcdKq5Xh5O7z2V9YVE3zIXIrIkIR2GudsE5uJRMh0TRWr9S0KfveapmydnEl4nmvISycmBnVis6RLmJixGYBil6AKXahqJestcx9LgQY0NE4CBAeBVJAlrlNPLUpTmFiTqnZhAoZSkqNWg5NBICsRSACMCOm2BadVFV1BNBJjZncjNHUgYJMucSURidQlAYGtZAaLaMakqJQrJDdiiBk08CcTa2OqopAQoESe4YY41jyXLABhD6/thGLjuyTE63aM809UAkRx8GLZ/UqgMzB3QFVJNaiBmpOtR/mVZ2Etow+7HuKfPfR23/0wz+l7oO1sKt1q2VRr1bBvdSSFhHYiioQtqsuhKQZWEeJiRBYpNQeAXbj8MmTzxrpwCBPmRGqF2BQmsaSqQlWy4yDFE7qqEjV1XEyIxCJscnjKCzmZTaDMx1tnkhc1MgrMiJV4gRADF5V1cxAnZhSGAer7liK2sxQkF0/lTE7AA3Tu6vu/HyZxwkNVaEQrxYrgsjstEAU3B12w1Q+/vSxZj26aAEWy+V6swIvy9VaYdpO25upHxwmDe4MAH6YdhOCLHyy1WKxWUfnDK6G3izZxppNlWXdRIHCMXpxHKaz2HaCQZrLJ1fNYtFeNIt2UXNuu+bhO4+uXtzeeXAvZ0jC21fbzWbV5+xFSY0JQW293uwUItOoVd2laSi0dbe3g4UmsHCfRw9i5qXkpknEoaqpl1LK3Eieh5FRwdEBvTgQc5QQfbgdaBGqT6jKTEysWoH5CPWY+Rcwj0hoTKGJNbRwu3v5W7+1vOhKHiStK4R0cjLmIns7b1eawvVnL5vNmZ+/Pa7jnXce5s8/TcaEQbW4GyHOAcM3JUpYba79RXcjieAK5oxcvQIhOFZzVTVwR2maICKKatPoU2/plELwPJpJWK6F6WY/MFJkrz66dExeyEobrGo59M1yVRxFOoaWO6xV4ZBpsFixHoYkTX8Y2yiITRPbmFUwV3DiRc1cezv0+wZEVWutKJEcAwhpAXMhD01wRwEyjBhYqxmaN6ZtmRzYijtpdqvFShYQx2JU0qI1UVowJCey1JCf4VgGVEeT2FKJZmpBaB4mLVB2d46AjJgdWZFIxPKYixKy6pxlAAMNAessYaMBoqOhuEioQpAiOyC456lONfBPGRxECKDZJ5hK3Y6joo/9Dfi4kHXJdRgLN8YxGFqUZGo6lqn4XI0HaHOHnepITLMUVEvWkgEshsBBHJgluiGaAWYHB1OYycpAMbQiC+aEaFZLKaO7EUUIHVTMpZRcwZC4ZTQJTStCIe2NpW1v62jYZmhXbctkaookappEEI4d2GjOaACOzK6kVtXqcrkZzU7aB+JcGjUrBhYRUQ0qVDJjbJbLfdkZgtkoKQhFM1R1YmEMANYlmFB34x6Rm25hjFAmL5UdrRoRgBkDBAQCmznIzMyIglxLJSZmAqKqjjBvNHxJEKAfr35SXckqaw3FkleioIwAWJEKQWVHgFzNqpNWFjDUYsDk6u6Ms13L3Qjc0N9U3cyeLyOQI5tgpp86GszJGIc5Y0XAIQIiEwfkYm6V3YqRIxoByhEAHBnQLAOCgs1IlqIZXN2NkNxVOBRX1BC4Ex/QDbQGKTf44qq5ejUdpiqDzdMmlEap6NIoWW2nw3mBpIzGpJg4iBOYiCyVuIKhRHEkqxHbClXbdlKQZFyKjgd1JTN1IgYmPlKECNDdtJoLWAV7I5gem8V/arbD2SviVQHAEBxnCNAsi1ZE8Z/2mL9RE970PIJpZiIiVLWZk41AZvr/I+tPnm47s/w8bDVvs5vTfN1t0WUiu8qqyipVVbLYSBEiJTOCA8uWBnJYM888cNjhoSaayVNP9Bc4GI6wJDocDsoiadKUiiqyUtVlZmUiGyCRSAAXuN3XnWY3b7PW8mCfm1SDAYAA7gUucPbeZ79r/X7PY1r51I3EExwEzFSMCZbGpNmC9ahaEZa2GS4kowU+rFJpqdWpLHJzVSP0BkqnGbuqAiAtH+eiGgJQPIVopJqYIiCDKdaKKGk+fPH69VSxGBfJgKZkTdu2XSCRqmYor+/vyUcWJKKPXt4MotHxo3VooCwbMxL17OYQf/Xq+qtBv9UB5Nl8h8beqdfqFSMDYjIqyJKtrmLYzyWjp7YRlXUAnQXIO1DyPLtAJk3Nv9XSRYN/uSuvZ/7pbR2zfP2SmjZUs12ae4Cz7Wo+JjP36nZf4Fff+upbd8Pw/e//xe/8zh+sthfVlJlUhRhMqnd+QdajweI+sxMfXxeiHBgyMhoA8VLOedNOPP3ZcuhWyQSe2aGRmSJ7OPntBVSZyZZF/bKRWkjmRqq2zMQXhDkzn9qLBsQO38jHAGXByYORLgWhpQKHtDCrCdFUCcjATCowM7HIIu3Dk8yPyHSpHQIxnbBoSAbG7Bc1JLxRp/2PRkW08KxEHFEBUB3ZNcQNKKakKU/EQSSr1SrJOwy+DWHNLuQ0xRhyzi6Ebt0D5FyEdXV/57OFs1XL3KBvyEPOx3L4UqVpgxuVfCzo3PmDq0mMOJxfnO+Pq023ffZXH1oTxIb1e+/95h/89b/86NnZmkLG9fbM8cA6HT+9Hh5fB2o01XT3ulm3wa11dqkQuVBr2T756v7DD7x3LtLqatP223LMw/ULbGJwKCUbMNZKxjRPvSX36i5MMB70OKTXh5vzy/XLH/8kHMvrF4NNK/9o/fGvXjy9l6s//F/+Zm9n3bv5579o3nnI2wdwl6lRRDh7/DTPNacbOLPzR+thzr/51ff+9M9/+N63vrV+9xvh4uH1y5da9etf+crPfvBh9+BptcAYxiF5H4ZhIJk9YUpT2zalTkwOqrUxGnoCL2YAWkTIMwV/HCcmWq/Xec7sKDZumtQ5rp69Qporwa9libZsBd4guJbvX6NTPxOcp44IRIg9AJgaLhmxGJARFQHQtSHXgoSrTV9lvnp4frgb1VSlVnFSzTdUVJpVN+9yHdJ3v/2db//etyGuPvrZR59++Nnm/OxmkPVmu9/vkJHN+xjbNkzHsarloqba9yE6euvho0zN/nrsyvj48fn1s2fFBMv06ssvNmeb0LcihtEDAkfe3d9XzVQIZUaOXYvBV7fiaX9dprckAYDlnDk6c2TU3LweAtB6E6dhIJiqTWCqNLgVFzHkE74utn4aKCNx7H/wyc/QKre8Wcfjy7uiNEllk8ur9dcePf7ls+dn226yOhaj7up2eJV3O3ChaXpPMO2nFTVmAp7NNZ+8uLk4W5VjOewP7bo3UilVAQEZdNh2bcPhetjHlv15c7eXCXCoqTi4Pey3Ty7f+o1H1z/9El++enE4Xlyu8kEx+bliv+qae7y7LWyspSoiIdZaQZbnvSHCsmQICEbIbskRo6iVqt4RGNRSyIwCeceiisSOaRrLVMUjB+ecaRW63Dzaja8IgIMDQ8xQCzRbTyxo4p1jpNl026y+8s7VBx/+snPrx08eX7+47R5fVYFDzUYUw2oC6TcP2vvDq9d38KXfrDa56Ntvv322tUmtZooU33n36XFONrqkCWIf108+3+vaNcaKcr9uwsPzlRKo1ZvnvyiBv/bk7Y++/0cff/KTEHzsVo8fPzzsr7UWqpqyHXX6G9/6LXlx+8sf/SyutrvD7uHDC/OJ2Fxwo6qxG4f58dOHL69fnz/YPHnnge3vdZiQdbo+6FC11lKjijhxVN1hN1G28+4JU2lZ98Pu/tUBfdiuNochP3j88P7V88fnvWdb++bF3Zdf+41vffKLLw5T7mql4HbD/fFuz8SpyuvbO0L/0c9+ebWG87Pt82evwDls1RGXUl3DNde2bbePtr989uxyXJFS71qstL89SK0YHCFV4HlMQRVN2xg9EDhEooBM3l7f302vn/97f/tvfu9Pvp+Gud30EiRyi2YpjQyMTIg85Bro9GIkRRyHPGeOPk/SbfpxmELX+Sr7w6Hr18G3RfIwzqu+NbH9furXbcm573qZwbSUOhkakRetoe0lzc5smu8Wscb2bDPOY/S+QJaajZ0PzlANFZmX6rfk2oROyqCQjimjc0uYvkiKwa/7PpXJkYeIWpTI1WoAZAhGSIbAWDQRGBIktFQyENRsUkYkhjcJYakqIuwcIfoQzApRQPJN7/NucM4H4rkgoPPeqc7MsZSJ0Gj5UlUDByoAoKIiZiRejHyIBmCqaMje5TSRglktUplcNWUw0WTVGAORN1OFPOVd26xrUeeDgprHw3xEJmTK8xx9o6AqCoDRNwTiVbEeTSaiJuUCTEhAzswqQJAq3qGBiRoHH7vV7rhfdytTIufYlIXEeKr5q29/reVQSzUBQHaRpRI37FxrmmMbHLtpTAEciasyhkhiklM1nRAkxPCzP//hNJX+wQYAHCgzS03eReeimRA1xkxgqChigK5YMhLPsVYlY6sgpSKoYTVbvrFMtahWJFx0zg6dQlGpiChmzKxqzGZaTBRQm+inOUXvYmw8W8mVPWYpgWh3u7u82LALgbkqqBmqzNPOOWLnW4y9b0qVtulGUK4LGwLWwZ1F9sETQhEgpP1UCq/uDvNcMtaC0+Dcuo3kGtc1zUVcGeXA3LSxbxAk+76z7mwNvYrOWvrzzeH+qOsuPjovoKvzzW4/XNJDotB16/2r+6aNtu64CZdPHrRRp1c3/aav0+z7UKSGPsyHXURREcyikJu+0Zx9DE/ffTvXoQkhm3gfVZCUQfGwv+u3lyLArEBoiGDQtI3W2QgRiF0E5xcnmfNkzOw8pELIpSzrh6UpQctPZ3IqqZZKHMpU2ott162lSDv4YrjdbI8zhLFiSjpUQHLBhdbXIXdKozP/5EqvX+NhRDPvXNWsyI6WRQWjyHIiUlVaTq5WwRSX9b0ujTVlBnWkBlY1Np2IiUhYN2Ds5izHgY2avi3ODCysOvIeaolNOI5HT75MUNn5NvTtuhyPft1nSGKzD64kDQAE0q26nVn0MbStGYR17zkeduMURyDbzfvY85gOvqJnCI7FMQTXAcso3jdNvxYyElU9kUwYyRjAY3YKhmbAYiyZcmYNohg9CZkQshOi1JDviBJiJa4NwgURKY/VU3JATE1oYrJcMSERK3kf5mlyAA2alqoqusC5AdCU0Ve1pu1QlZ1arjUvHFyFJcy0Ctj3MOU6pWWt/K9duloAbR5yBvTBTwapaqmTI526WGpSzVJLzRqiLzWRORNhdcXSsrT0zjEBEoqUKkXViGARZzH56IMCIRKgqmQwRER2DoEQ0fnI7BB4UTibLpCQYkQpTwAGanOaCSMyca1ac9M1WFFnN4P1IQDG4trOuYTZyBVFZRTCXzePFkK2SQECPPGQvJU65/Hp1dspTSndNf2msYaZkygQJB1ccKJWC5SU0cBUydBUCKvnlpCG+VjZtb5fO5vmyYxr5TwcRCopllTUbDmKOYBSijeIzB7JrJJCcFFBzSouIGJyRly1OgMv0ElGZFM0YfZNzVkAjVGtKmpZhheSBaxCZTRDVAQlEhWzhZaygGlAFgkToi1vjGSLLhzVQOsJiLvwT1VxCcQQEaNoAfRAJlYJgVCAnZKRWuAmqwPMaIImgX01JPLVUE0doaoyeQJVQEWpKsjBamJRrzDh+Ay//IK+3EMydhG1Y50EZ6WVw578epKzytviLipQjWgekVQODhvhZiYRNO+8mWKRgMyCQm4gSo1qST3ZcH9oHoOa4WLLgyVg9evfw6Ll0Tf0xn8N/oTl/XmRo9kiHTuhnogNbHH2LN0OQ0KCKouXahEJLUd6NqCF0b64IJcDOSIBLFM5QFjM64AEIrK0AlVEAQDRjAxJQZaPR7UaLPmuU3JwMa+DCdMSxyu4QLcXgLLagmhWA9WKyAaoqkQsWgCcSAWtjqXk46efP7s/iFGb5szOIblFWSoi0zSXqdzs7+a59E1sY5zNbofc+PCkD4/6OI6ZGVQtIIILz/bzmdjvX3iYdhZQT4RmbRznCio1+MZBTkWHUqID01IlGzeGpKrO0TSXtmlmSWa46hsR0AnWUBrSD27xk1FfDhxcfpe5JVi7AIYqAgCllrYLtzeHX9GXv/2Nt8ph/vkPf/A7f/iHHHpVMhVYvlm0IhIAAYKpnGqqCxOaUFXRBMAAWWsmYiYnUk5IKQQzYfaEaOZMTaESOu/bUpKhAuBptLQYwRcKGJGIMNMyIlySaIQMJoh4Ms0sUXk8CQd1YRidcNQAsMSLTiFJXeykTACoKgYGiqZyar0ty8Zl0HmKqZGYvrnNVVUBmYhACZGkyhty95s/IhgTOUJQNRU0Ay1a9lDvod6R7GvezeNdSaN3jAYE5F0QIG47Cj6uV03fkCcmiqFn367WF6vmYtU97NdPt2dPzy7fWa0vIzOl25g+c/n26aNzAhdow6YOc1xj0SkEqWkHJl98eNO+/256q7+R2/ho++Ab3w6AWI2Bzy4fCjhAHO6mMNXjs+eIZTS/O5S4vgREvzrXksrt88PnL6ZjAtS54Nk77++nuQ2RkVzXGJQ8FB40pnL343/pwZ89ejKA+e3qPG7ST376NWzqL18cn+3cUN968kCs4kW//q2v2VefPPw7/+bF7/w2xpXvGx89EvhAt88/v/3kk+Pt7ZN3f8Mzp5IPLz8PWHXtVt/6yiGtmR88eOffQDx79O5XAtG03+f91McmBGpj5zkMaTfWTBy8o1IKolv1KwOdyxzXIZEs93cTg4K2rZ/nBKImtQl0dbX2rMwAiKonMtFSjj0pyP4HfTRdfoQa1HrVegAxYlugVkgE5NghGKg13hEZmjn2Wk2z5apFtF9dxKYHoM12HWOIzNGTd/HpW++99e63tXYvn49V+/XVI9+tQ7Odp0oGbdNtz9aAxsghdqFvYtuM9wfvYPVg89HPfh6pPHjQb8/X93f7cTek3QCp7q9vtBogxBigqlaRJGmo6ajHIanJeDeUaTzs76OP+9v97tWOuTney3wswcf5OGieYiTkCjR3K9yu+Xzt2qibDqgMHVvvT6miV3f7yjwWFVHLgAo1z9OQH6y3jXfBxbH4gR98th/uUz4M6XBzwFwPr1/onJ3jIsV1Hj0wwTDc7+6uGRyhP45yGOt+ys26NY++jxqYYxe5LRnvJP3o9vPuyk/H26lOh2ngrn37/QdnjxuK83F8vj98ZuNd/vLVh3/2w6szujz3Zw/WRafLFa9bU0kNeQAyAxEFZDUzoCJQFRVQFKZUc5bjcao511pKLoHQEziEvm3aJngkh4gGNReqEhlVDQjE9GK7EgZxGiPH6Dl6drjZdpnBmMWs6eIxzUJqAODw9VAvnn7r1Ss8vGpefFZefXKMXdOum9j7YZCS6LPPnnETj1KfvHX24DxOh/3L18/ux/uLs+Zrv7ECG/uOm1WnbeC+3W7Pvv2bv/3ub721fefR1W88iU83ehal5VJlG3tveNVu5X73+a8+7Fdbt95mkePuDgGswnRf2tXFew/f/nt/86/98//6H23XnUNvDpstP3i0KVomLXfzfPX2+bvfeHAou/feu9RhHG6/GI47lUKqwzAVKBQM0EITY0dTmSRNZ5vudv/Cap52E5qrBR3SNE4E5erSf/X9xzGu0ki50uOz9/bX+zby+19/UqdhTVxz8iEqUlg32ZWrp2fnm66L3fNXh2pckhyP8/1u5BDGsdaidcyH4fDkYrsi78Sutpv73X2/6dXT9qrzPe/ymOecUzYAMQDnQhNUhBgc4sXm7L/4v/+jZ5++vDg7e/vtB3/rr//2YU6VtUy5X7XzrLVokTofh66Lp9OBqHPBO1apaR4ZydSmOV2cn+tcS5pdQHZOzGbJ6220mrWUTRuZqmol8m3oEbgUceytZEObZSTPaU6lWiliAlK1zqNmAXBEHg1LrbVW75wPDZAHIEJmZAfBYWhC03gHmpgcgJuPx8CBDBrHkXzjW2CfhedizjVqmuusUpnYe+diICJkIhcAyBSkSlUBRCRGJtdGDI4a36xX7GPT9rFrgZ1UIQVLVhM6jVoyLs+gWlSEcAldmIosCtEFe69STSsj5XEyEySg2OSUEQQYxQosG20XjKDITLRU9FdkHhXJOI3Zc/AueNcYLIUABVD2jpgMBViQzFQQnCkH1zAsdho2rIBKjOhI0TgwIsTop/mYx5RTStOBGwx9127PYs+AU7ex/ox8h2DZYz3f+nXPfR9j05a5ztlq8cddLvOcU2aO29UZmvhIHPzPP/zxl58/W7dbUEADdrxAH4w8c6MCrI6Ua1UlmMsEZs571QKAZlgtKwIs+XllVFermIJpNRBCQiI1Kmq6uD1qRURRFdGqprmWqaqgnrbe5Mk5hJwlZQHiJPTq+ni3m3f3R/KuW3VEqADOO+dcKfk4HIdpZCbnfU5LLQAAIAZXpgoaBEJKmHZJ76vcHfEw+0Ftn1xBLAJl2kTrqDrR1sJle/nk/K3zzdXTt77+ta/+9vtf/872na+unjwWqcOw48ZnNSOmdYOd7x+eo6e725dlFAY2UXJURdk7KkQFp5vp8OqOzaQIOldrbjcthrjdPMiztK5pQpAyAc25HEEAKwQfiIjZsWuWS907B1AJyTvO8wRSyjgzYMmzakEAE1FTJDOpqEIGYOqYkYiI8A1HAU4vuMzk2AcxwPXaVh2AK7Xh9dPUXIk7F12ZxLOzxyXRqlt7xPz8ZXNXOeOLw1BiqGKkZKU6JTrNDGARw7zx46nDBQ9f1aqhLQdakwrL2y0AqjkzNujW21JNjVARx+KrhOirmcQqduAgXduolfk4MGrvW6jUcgjMUorsBpWKTDJNKY/qHPQNO3JM5+dbk8rM6DyxY6LxZTp8tt+G7jDcbx+fIVrvmaFsV82qc5EheOg7Xm+arg2RyIMRVIDqSFtvLpiPFJgdYIvopFKtVtKyxDa0wmbRVadEec0pytQCEDAJdJ76pjZ+dpp8EV/FanaOW9c6aqACVvWGEVwDQDJBrbkWZCpWlJQcLC/2i/+UG46rEDYNdo63HVxsattUz+YdO2ciUnKa0um7wIpZrnWa5t1u/+r2/tVueH1Mh9nyYToepmNKQ8pHqVlyLSWLFMWa67T8dauzaVYVrVUNAdBOy3MIrgm+924dwppDE6KPTeNdICIg4hBC08YYCU3qWNJQ8oiIjsDU5mnIaTgeb+d5dq5Z3ECH43A8HsGxgIgam6u5BqIOZAX1yuOFwzOGbolrLEszADy1jRgNFKCYGkF0TVPa4f5aNJ+dXZph9I0WaXwEpBibi+15mXZBpHMBEQ3Yu8a74J1jpwC5iRGJapkUpsZXVMsJL7fnjsCxM0BmBkBHxIhEbACefMM+oOOTaInMQEXYxDQjKC6ESTWW6mDpiaEYqOfiWD2jYwTxIJxHX6ZY5qZWrwoiInZSfKmKqKqZqIqCGiEvpBxauqZL0ICQF971iYILvOgIdUFB89IuB0TPTiAnnDKRoNZ6m8Zrhya1IhAhMSz6OiQzkKqgdpKuOUHNWr2PIYC33HGc8fi5/OpLvE2u59qtdbVK0GVq1V1gty36YEoPh8PjaX5QfGOBAaMTptkTAuER+OjIQgOAgWpPGsBIaiF8AcfkUtc7rZUET6owIF7MCMuTbunt1WRSteaST4qDpRp2cqirwpszvJ366x4MTcUWdPGpwFXM5MRLNgKj5acQMYIzQxNb6p+EjIYEhMAAoKeG/BsdECAAnVIryLQgkBBRdRmvVq1LSsTejJhE5QS6QhKtUosjD2qmZmq6tOIMTgVEYABWXVB0ChYQg2lVywDlxfXN9V1N1c9ZRE0BfPBIIKWwZ0F/GPLubjKxXOpo8tHN3pAer+OqgVzmEJwVxWrm3ItS9ofd713GNo8q1voOwTFh1WqqqloRiljkQBQBOubQhbBy5BRqVhXKYuDcXMrCZgJSCp6adh3iO4B/68z9ZiuO5MP78tHtOBXZdMFMDuOYrCLbqusI+OXN8YNffjkqjlL//Ht/IungGYEcGBG6pR2GAFIrES1Dt2VSeKJE4amWyOxMq2omJmQ6PUbYnyqIxIiMxlaL5NkxL3fTcruDgko1EULgRTMKqCqiW2lcxAABAABJREFU4sghODA2W6TiSou+URcoMajikv9YuEgEzOS1qlZh9LQEn4C1iqoyO2IPyESeyDFFVDZRNCByALgw4FDNVJdKGuLJLbvUz3Ehbv0PR0W8dHDUzCylSSVbzVrmedrN434YbtJ4AzqTCqtpKTnNogLE5Bvghn3PvicXXVxz6Mw4eN81sYtt67uVvwzUdd22bzcXjx5b0+zStLo8H+cJyLkAUiVPpdus7m6+VKnbs8eP3vv6wT0R8Hn3iin/6nrXvfPO7u5OZPZBvQctKTTW9ezqWE1W68gBZ5lqCHh24fs+ja/P3bF8/r0H7/vmgf38xz9Wi5Hjq7/80fP/5l/87J9/z7UPx5d3H/z9/6x89BPqeKKa2IrbNfsXn/zRv4DD9PGf/NWDhw/TmF9+8uJitfnVD//5Z9//k5onWjUWfJHKxCaQD/Xm5aHtzsUU2C4fXT379Jfbs4t3nrz/2c8/OuyvmzOXpsGF+uKzD9/7+ldQ1NIcOvRnzVgKAnt0aNCEts6VwXkfAMiB8wCWq0M47HaWChiQQZ0Sig2HsRZtOucDdZ2fytREzim/oab/z36zpX9msDxozMisdbhuljyjqKiLHhDQO2SuCrWaEYOZ1BIaBrJxmqRo28bzy4ucMqKLbQQiA9CiTOh9OH9w9s1vfvvJwysAv9k+6NdnTdfGPnbrszlLLhpiK6WmLL5rOQBaqWPt/cPXH36Wn33UpBfBp/E4zQUunz4qliuj+aDkUhZmio2jAP1ZV8FWl2dJ7cvPP821AOOccpKaNfkYzVGzaotUA3PeTWk4zNPLXXpxV5/v682Ek3n1bXdxAY1LqZy+EQycc+tVc3F1tR9nw1iqSVGHylXlmNs2Hkr6cr+nVcw5q1bUvOnieb9p0adUypRKzsRIqG1YlxlsKp0PANR1K0Qe5pLycdXT24/74Gqd9y7Cu9967+zJ2fnVhQJsLjsd93cv9lxd2uUVXj37aLYUxqGa+l/94vUXz0fs27OvPf1itwublpFVFNHYMXsiAs8MCM6xYwyOQZUJHFNg9sxQDRRENKWakqRcpqmOs6RZPGLnXR+4b9wqcghexeaculXYjUcAx+itWJ2q1RK8mtWkOquR9wrYtJ6pHuTw6LceNJc46uQ2Lkulxj16eh5dECubvrt9fR+5efLo8fXLG9/R9mp9FPhyqA/e/crsfXO1eXkz7A/To83qEeDtT3/13/0//subP/nT+vMP7370yaptn7733gdfPKvG45jR+/PN+Qeffu5jWzGAj2Me7vZ3u+Nxzvnq8ZO4Wj/sL77/vb+8zyVerPdprmRfvNzd746m87rvLzfnax/LPnlz877sXk03N8eH775tkSrWKaezs4eg4gTm48AEIjmb7epw9mirnu6msp9T1545Wm/6KxO/u9lpMqmwPTvbH7Lz/aq7ctAjKBpN+3n3er66fCwFPLrHDx+Od/th93LY7cYxN5uL9XYV2dcpt0SWYJ7t/Xe/Nu92CHyY5nGu+2E2H0ZQ3kSMPNfkIjvHzgciT0xTmVOunoPkOt4nNkLyr+/z58/nZ3u7xdV/+n/9Tz3FMc8MbOr67dp5t77ox+P45mHBpehSuQ9NPB6nrmuBca6pX6+Hw3AcEgJ57/Ncx3nenPVF0pwyKBN5kQXazAaohsVgzmNoozkozKPIfp5nBWoiRe9al/M0l6Rgzgd2HojIoVqttdYiPjRN02sVNCPAPKfd4X5II/m+iKYqhiyKtJAkFVRBiXxovGsQueSlD+JqFkD00c9TdhRrMSYGsMX5oqpEoGDNpnOBHZFIIbK2b9ATegbUolk0ESM5REJQAORSpaqkOplVUwEDxuCAS5kRjLwX0ZoVxNiQzXsMzEHNiBCoJhuBXZElnB8AIqCPTQtiZJCm5CPneUSGOc9gcHrbkyomBlBUgJ2AFlViLiKGKlgEErDlmpEZAbWa57BabYZpACnEVkC6i7Uanp1dPb5628R715ABE8mcp+Egkq9fvjzu78bhUNPMKDF652NsegCdxlvCSqifffLhj/7qBxcPHvsQyUxLBbWqAoSiaIIx9ClrrVilAoB3gVCXDHYtM5gQZoWCjswMgJjdslVDQCY0qKoJSItkg+VDJgMCAPIOEIDIRec9adUQ47pvi5TjOLPj2ATvfYy+baKkUqve3OyPh8RIViSG4IP3wRWRpmkRaTiOIfg3jh9gx65thmPNdaW2qbmZ5/D69XR/sDyjE1emXIvkWlLRotC23fnqsm+26/XFwyfvP3r8jYvzt/vubH1+cUQDb4jVyuRU5mGexpJJi5NdGs8fPc5S+4uzeNYvr6Te2X5379pmnuv26izVI3peXWyGfDi72qQylaJacSgyVcKuE4fN9mwZEHp2JmaETdvGZj3Pac4FEHWhIRCWUpCQiNHQxUYR2TuOAZjJMzIpY5aqoACwrGQJ0MQMUAxFFHkBalS/7W8Pd+3FanW1Vq65HANXy4Mv83T/ylnd3x2UyLPNNzfdpkuR7OE5XqxrwOARtJLhScDDQIyKKloMtEIROJmnFw4OsEdyCKxFF26ZSWUrUgYEydMxjwdNu5yO6nmqcy2K4HTK881ex3S2vUBgA+iaCDUxSYjuOA3xfAOxFQOtqgLzYUQgMwsxOKs4Z8ul5hKbQIFFdVY51pRCrk5DDKu+Cy4QYBtCG9xq3ZJHoayI2QoEVgeVtMDgfGFUJ9XNNcw1zkSTeWgckfMEKAZFnVpkbFiogKvOIQdGhiomqpPl6t1slKpNuSxBAFVQoCqOoAUlh77xLTN7Hw0MHWL0FIJzzhCqgpIzHyB4cOi8d6Eh8j7G4Lwj9s459mrwa5Spakl5nPNwnPa74/1hup/m/TwfpzTfj8PLm/vru/vpOFpRMlS1qjqXPOdpmvbztD+OuzmNuWQ7zf2989754ELDIRqSAZgJk2MOxB7JMQamyNwSejOrNeUypTzkWnOxnCTXmmrOeURUQBBTY0w1D3NKU/FIDTvPLiCexeYc7YHWTZk3tZyBbrGsEBVMCJAXiKwhLl8fyGht8Ew45cGgznOap9mzq6a5yJv+B/kmzGUsNQNoURHEZDKbKFN1dLS6h3Rkmclc14rzV4+fXm0uNk7OmxRdncfRBNCMDZYdAxgRMC7SYTNQq6ZFVAzZe+Q3G4lTO8wIqGRJsthWQE1US5WqClrVioCoFSMlB0teC0xFclEROFmS8HQsJNZFmo5UVRcgIDq3sGvQAAwJcPEXvwm/oHPk2ZEBq6JWQhWHs9qg6Qj3SQ9VkndE5E7jQREpo2l2jgxMAYqVDKWCGTIJN4I9WcXXr/nZsBlyVEFlGaMmslLRLMPlGN7a17eO9a0aOzEznxWVfBEVcwIukZvZATUEjoE5AwurGHq74/kuWsUZ5NgyO3WiYrRMyejXOSJTUwUVy7mUUk1PawPTU/tOARXQDAgJ3vjOZDGgw6nYJaebEpfPbPnvVxX69b+FFjwVLt81qvVEVIcT4vpU/SFeNLVqskRdThyhU9VJlorm8q+WBZ4IS+8NAElEiwoA25tf3EKyXSbtBlhq0RPQGJZg0fIrVy1aZk96/er2yy/3lboCJKjkmZ2vqgoWoq8ljeOewNZt14fm/PzsvmhGerCJD1fuNCAWtVSi81ndy/vhm2f+LZqg5OhDKbY8ERYGBiBEH9TQoCpqViAHbYOBtHccHIfgkFkRfOOMzC2W4ayinDI2FB95/r0L/u6DZu3leiw/uZmujzMZoFnJVcHu9vsQeNX1Nzfzp59fl0ltLh9+/88P98+RUYxKFQBjRjWhhU5ArPLmtjQFBCKGNxY5IDI0QKhSDBQZFQQQpNoSxnAukAvLVaNSAcxADNTIkGjJCqlWtbL8cxAXfbmIppPEfNmRgImIqC0NUGa3RPwQYGlrq4GYlDovNUkAQHZLc2g574suTGxYUurLRHFBr+Ob6/A0CV1YxsimYAbLGHm5C04FNAOQIoggtRBjyXMBM7MlpOocic5MDkymikTO8dqFxrc9O++YTAGWt0BErgix1jRrzaYJq6cKANG3DXue8iC1mCQG37UXwbe+d8wh1dKuGpYutW3w673id37/b7/64IdfX6e7n/9Zs/q9/hvfxQ9+wJwP969tfodrpWOhuxS9luk23173Io3lYFnnCuKdQzA33dxIC9v3Hj/7s//+rW//1su/+vjLP/5hu17Vu3uJfref19c7FxRZvnjxfFKe6+7Rml/+1evf+Na3/+zHP3//qw/uX4yvv7y1uzm//AU/2cQOKlloYpkrM6tCvFzd3j3rW/jLz3707b/3N371wz+b7+X2s2f76eZrT77+yxdffIO/0vbu4aPfqcnOrtq7QUKgw3znPDcxzGApTUByfvbw1cvXVXSSmYNrKfjgDvc3Xeevb8Ztv9r2zbiPu+nAQKWCKrRdG5qVap3r7GMgTiD4hmR2Mp0hvHnKwxtj/aLAM9heXoR4rHkkJj2NkxCBHDupSgg1JwR0hCZVczrczk0MzsfPP/9VyUmshLAxtdh474kJROo3v/l7HnX+dNg+2OxvplozQ21aHI9lmvdn28eGkPbDw0cPvtg/7xyl43S4m77+u7+3susPf/Dh5tn1H/yN3715eX/YT13X5DlbypaFgIEItYyHQ5knBJMyte3DhnEajj40TePSOIsIWJ2GY9cHwEFqmY5HUlGw3TTFzbaC2x9TBEGTsXHAbEp4WqFBG8JxP2rAMRXnA1J0LRWW12NuuotvPvn6r774+Uq5ECBG79l7VMx9yzVz40KaFB3NOpW43Zy9tbu9jkbzWKxkY8zGxuniIv7N3/3aX37/g9ubJAjdptEx2ZCnPasQGEWPESzvcnChi30X1lALqpZ5XHcRkb54dUxynKjc7HZPbd03Lk+qZOioFjGtAMjLF1QVR+iJQE1qQcDgHDGrimN07OaUCcgIg/cOgVC9R0fAAAFV1ZrWOUYPdbNyoGYlswscaSo5Oo2umulCvs8ipkgomo55+nx1VvfX+7YJQ60V6Bevvvzf/of/q//8n/6Lw15XfvXq85uLtzZZp6EcSzmsV6v15Wq8nT+9f5XuxmYVzx5fDK9f/ey//1nTb8ZRZHeTrwcH7gs3xscXX3v69XR/W2ped9u//MWP+rNt22x2x+M4DRhCquaZz55sC8N0HP6tv/u3/urP/pk07tPPnt8d0vrxlZUSG2426wdvf/V4mDpNXVjnKEWrd+yNnn3xepirZgPobm5eBLQikpI4R6uuOx4nqdh3bS7VqCeE+/1Ezh9TajpmgnEYcpFWY4ytEs8lPXn3Kg133rtaoU7z65u7rFUPruFgds/RP3r63v7jZ7vDwVkFzSXX3e2cJpkrfPL6ZfB9LTVbqaDAEJs4TRMx5qS+aY/VADmgioiJRvZsS0LBocM81ToW5ykS5bn+6X/35198+MF/9O99R9Pxj//0i+c/HWJtidw05dbcm7GyLjYPAiUyrRXNecZaSxvb1Xp9f3v/8NGVY9AKWgzAZC4Tlu3mzKmVabAqzISEjjBNx3F30/R+mhMYM4Tj/j6GPpkBeEavYnURXRGYACCCKkDhNja0mqdE5GP0dU4I4rwZyZxSE/uaxXlmxJzTtj+vkeo0O8h5PobggF0F9RikJgDDwDWnknKt6Jxzjk2VEfI4Nm1D7MwQldiTWg7RN10/jbsi2bAyc60CJtG3cx3QyFNjpKILN8SLGhJrFmMTElNpmx4QiNB7l9Mcoq9MViupLi4gM8cEYMk3axkOjJZlqGYGxoSI4iOlnLp+QxwNdfkV5ireRzQtoqHpqxixMxE1YAomgojEDApWKwKhIBiZSU12efX2T37+/eCib5v9/f32bNfH9ubF7ZyPuaSi1SFtN+vjeFxt1pv1Rcq1pe7s4kqLlCzdqnc+ok6SBgJgRx9/+MG/+uM/Oj+73Jw/ZGQfPJowcq7J+w7UzTI21Bpq4FhkZALAoprmElbdgzFdM4da9yYTcQBQkWpgdErUCxLWks2AmUGF+HTwsROE02rVVDWVUopxDLnUueSUikeMwVcBreYY3373om1CyfZqP1Q7vvP4CsCKVI/sfVi1F6UMhGCEqcBhfyod5ONYKqg1hHh1/nSicH+7z0V2GS6ci1C8b/rtg0lAqaGuhy64wBoqeCOOLvTN+sw8HsdBY9gN+8jt9fOb9bveRAj1fNPcHUdSc85ZUWuUkFXUI4jAs4+++J1vfuv7f/bH3/nuN2ty9Ajmo1Bqh3tpo9c6rLeND75U6Zo4z9IGV1RMQAFBrWol54PvxEYwCD6SmGkNMVourmEDJQK1ShwQSEp15Ix90gORpopNCGDFwAAdOrZTjWLRp5NVrXn2fdtcrHLTEEka9qhrEQuGUz40fl36aGAOKoruXj6Hq4a3qwPVIfgzx5QKx2jmVJGYVRISmpEgILpFGY7AjLYUuQTZAMjUcHFumjp2kY0REBy5edw3W4feMlPse6mCog69jDqPegxz08cyz86187T3qzNTQCOZRhV1hN4HEotIEwuQhRigW0s16h34WkmwkWkayIGBdsjr0EeCCKUJXVidGwhSSXPByNLErABEAnWGJJiAplBcxEAlO204VVc8CCsCRWMiRmPnxcxUgDTniYJHplmsLHgmktq0tYpDLpKCRinmnBtrJbOIAaxKHZzvGcwTgDcBdd5ng4JGBo7CsoRWVDHx5EhVMyEiiBkK1AJSmbkQFT0Ru+Y0mWqRLFJUl9M0mpFl3ok+f3HThKaPQ7faKthiLTTTktNyuCJmwxQ8xRCCJ2YGIkCUJSPBRKhmVUoGZhVbpJTkPVLQmudpzGkSMyKHMCOQSBarqWZC5yEA80LolGks89HVGhjBU/VoDhzSiskJoBhUUUYyUeIZOQOogbcF0WxmigRojESmlT03TClVM0u51ioEqGp1HhR0TCmnyYfAnmtKpMiOkbiKmGZlUjNDId8QYCb34jjCfO+wlOMow+iasAydlovbETOzI5QKy8OfkaqoIQJhEiFg74JoZQSHSqpkYKSioiKgEIgX9PsS2UrVyIjBi6qqKKIaVgVGpqXxAmC6JPhoiUwQLx0cUiPHvIBRwIwBKwAhGUE1e6PhlmXWDIaB2IuOMguJsc3EHKSBYkQKFcxwSSsAmSkZIzZqGQmKFGfqwJuCK9VhRU7X+bO0rtURSAoIa0LQPEcQ7i4kXu4PV1kvFR1iJqiETglRAYAIR4I7HNhvAnJAY1GH5AwS6I7KM0c70DVmRqCwVm5iaJgAzREinDzAS+kVzBBBAVnl9F2Apx+DS3EMwdSqmRJ7RFITPNGCDJZ8ENKyf1Ktp6oakZrQIjVbtFOIcGqZoagsvGpCQlBcyqHLaQyAiU76NIDlFlwun6XPhkCnEICCqC5m98WixcQqArZAsQWB0AjAxJbBh51oO8usUInIqZVak0iaQT97/rpKU9kXm5YRVWQGJCVC5mGYSGG1bm53+37VzIz3JbeIT89axiqgnrylrMEdPP/yy9v3HH13zbjbNdH1Me5yzs7hAk+iJXcluZRN50omACMChxZQmOqE6ogARMHQKLKvRmba+nj05D21KxzH8cLZmbM4l0+L+8VAwxfHP3j3MqCQt2GewTlb1AwKd/fDT+XZ+29fUNSPP/jR17/jus2DKgRop8eBqHfe1JgdmLJztVaVYqZM3sxAKxC9AQUt0jkgALNlB0m2IH7NCEFkCZKfPvg3n5ppXVpmtoTGTYuCLWl6e8MaXn48kzNEFQFa4Ea6BMEWLDY7j0bLzB1MVWS5GLXWZfjonDcw1bJkmqRWcg4QVIWI3kCO4HTNLGEiwjcGLPsfjYpUzIWgqqhGzEUA0XKeANRApzk5F1XAOaq1tO3GBVVkNSNTVdBS1IBdMEKTBdNQJc+lHl3kY3rh4yWWRrFhBiJhpKpTyYcIaynifbjfp9ZjU+vd65ff+dbfenX30f2v/tXt/uVXv7J6+cX95b/9dPP4K/2P/tHx5ZfS2aPvvP/xH8vXnmxev/q4v7pgf0GMHmzaHQHJxx7Ijcd5moqVdHd/788/acdPrl/89KwLqy29/9e+/hf/8GPRHB70269c7oZ7W1PYIOzT6ip+8k8/41Eru9AzNrXMuXJ/dvF4PKRyVLbNPAlRYR+ArBaAakbYnD349h/89fu7ax/ZdU3K5flnn331d7+T71/rvjjhs7ce/Pynn7SbyEBg9dOPPowulprJ43F/JB/KbIfDsV9tj+PYtWtycJyH+/k4ZOPQVRUAELDjPG9Wral0F+ftZlWn6fbm1ne+axt9viNik2WaiPamc3YKlQKcqFsGdpIfIIANu8MCUF9suMuTax5nF4IquOjH44HJ2nV/ezdtLrZ3N7fHYSxaWxdKyeRcNWuIIMZVs4U87YbxcD/23SaPur+ftKqxM8Hz9QNEj0htC1nTKsZ1w92qU/Y5xrFYqYeM/f/z7//n67PHsV3d7gZV9Z1PZSJHpWRmACRVUJGmay2n6xevU5XYwZSnftNsLvqa55zm0Lv7V/eI5nyo1VbnG5+T7zZYTRE9sdYyOTyWGrD08cSnyKky8XEaNxvPTDVPGNiRg8Z742dffvj0rPvak9/+4MVP7obpbn94+rDJknNOmrDM6ezyIudDzYCCx3RnVqd0EKxtH43t/rh7/5sP+6b9sw8+nMZyvvGV8OJ8e3d3E8jtXh6O92NKprPi9Yhkd7k+2m5fHe+6lsZS9uOYSQmxa9RbyUnP+4dYY5kOYA0YqhkRaxUmZAQiRFu+B40ZyZAAHC3fPQgKSbLnBXEPDqwLrtaKurhRqSGeSrVUfRs6kuXpSMHNc+qYI2BSDWDsQkEGQA/oidlhIJB9uXp7uxuO277xNW0uu5/97GdXjG/Fq4/uP3Od46YpxYT8e199Mt7ZtEtHdxxfDA+eXlTXpJvD/uXONYCOMpSqlQwcaIv28OH57ZS/eP0qKF2sNo+2T1/fvELE4XhvUrbbiJ3zyo6x8zy/fnn14O3h+Uc//P6P3HmI3l9d8uMnD375yUtuuve++eTF7vVZs37r6Vd/9pffv97vOTAh3N7frqyo1BgarXkqQ9dFQtjyGjmlmtQgJZ2GmznPIXIfo7OKTt956/Enzz9ziC17A1uvfaBmzvMwDIGt69bPjp+fna8cQObUXrQyKPmapnl7dfnh55+rpmlSyNUj9JuWiK5vn51dPZ5rQimWdLP2oW9z0U3X5P3YeMIqwDhNM4MBodQqcwkcrZrmaZim5qxFcEaWp4JEzpETfvWTw4+b7z24LP/hv/N3XPnos9epVnAcz843y10wTbMLbISlSAhhmmtRcC6IpmmaQwht0+7udpvzbWw4pVkAmq6dD4d5zqhIzosYoeW5KqALzTTPKQ9aNU9DaFZ5OszjXcwX6GK/3kgpZjaVQqgqZdYsKp5g2N3VQsM0Q6+OjJ1j74ecQLFtmlpmEw0+NL6ZhtGzM0mIhVBFC6Jjz6UWMXWMOSWpZTwMYub4TMBUVav41gGami2sIhA0JB97ASTvwSE5VyexalqzmuY6MfCihZaaABkBTYHAoRkTgSoTzln8G0aAgVcDBQQkcr5aNVPHXkuukkwzu2CApc68aOzakC1VGw0SaDCtWoo5c45EJDYbM7VaYljPpSICvcEsAgEqMLKqLpg57zwC5pLYEQA2MTx5+N4XX3xxfsHdurn74kXebLabMyB4cPnwMMwl567t+9V5t+qbtm87ZeeDD8mKc1LB5mlnWiHVfrv6iz/9i+/96b84O99cPbxqQrCijKgGYpkQpMx9s4LQqSohASMRqIigEjMUhUQkXqCwoyS5xVbJi1YmBOVUEpM5JERHBIjAzFUSsy5AEVUtOSHzPGcxEjCpikhi1nXRIw1DOl+tmuBi473jWmV7ebY6W91d7/b7vQ/OM01DAqYRynG4O1ttRNQIzi+38EsAgN1+J4fUrC96Iu89xtZXPYteEM5Cf7HyGFzFGAOgWFBwYjLt2Xwe3GDKENrVtgK2TU/rckN8tu6HfbrywWqRYeCuObx82T/5ates719ct9utcfnVLz/+vd/9XW/w9J2nJcA7v/F18BxWERz43lfRaZR12+2u71fvvVO1klqZ53lOXQhAGojMBJnMDNgbmA8RllO7VFBBdoBYS1n8PAIVY1TixmOZZ0CAWkyEkU+238W6DEZIiw0NiEWFfWjXXhb22DwJasPeu7aqVildv00FLYTmbB0Gnm/367e+Yk+f7G9fuHU8+71vph9+xDevVYSsIEAxBENDRXCAZEQgRsQi+UT1ADTEJa4IjmuRYlDNyLmqUnOOojJk31ljKrWE4MZpDqv10Y/+YdMUP0xlw+1huPVkYbV2IcxU2vUGJUSnrinD3b1vV7DPOpfV+YMKXKQwWc4F0ebxWFMebnZzHje+taG6Cqvz1YOLtmYbsuXxYGLR+6GaluoZc0rKqCDMDYFzYJAnhuyMdMxmDs1CYGJOkjzx0m+pRaEhRMtlEtN9Na2E0th4dBV0SFarJgb1tY7FOW7aQBgEwZBpjewLERj3XVuSyfJwAnQATg1MTE0ZpMoC5vU+qmMxILXgPClqURedo1MlP9eMiGKCjCf1MwCgqdRscpiO8zwu4QZ2rGBMXEWJXNVSa0EpVUWqgAJhG31cbE1gCCampUoRK2qQs5FR8BGYEbCKlDkNx0Muo5mSc97HE+GXkYnBrNZspstie7zbicxty1QVHQCWCCF4FwwB1DtWq2DGZmAymmXzQoi0wHecyhKBhVpFTBC5iQE0r7vL/f6anXeMaVRmbBj200GJAKyWimBkCkaiVaREF9g1gtV5YvJi2aNz5AxK76LtD1h9ydnMTM0jeaCTDksNkE2XeAF4JCYyFAUQxVRNl//3BlV0CfwQgdTC6smgAa8qpWZUc8hVTWHxJJGIGZ2cN0gmmhdiynKLL+0TUyRkspPYCwlRUE0AeTFUmCGYLYdddHTqHyAgQK056XGiCRmRCrPJcYcCTEDInkLVGTEAWdHZBBVUNCsBAWnVINYAEspexoF0AjW1aClabQGPwCMFlLqa0hPVrSCKitNKYMuYoaoHyFoHD+KbgC5K9aZcDdEls4HsFy38IoD60qsg+SLIbYNESERG9CZBp6oIJGpoxkxvUMWnrTsR2TIRUMUF+o6kpotsYhl5vjln2WI6BTAiPmGOfr25p5OgXFSWkDAiIzIgLqmm08kc3+Bl3yz9RRWX5LMpIi0JsFNg6MQgUgBcBkZwihot5EZFJJClwW2iAohiujiyDNQAxQQNFzeWyNwwfPTpl3NpMvq5zNETGAIRGBVRRHTspiE7qdN4bDxVhc9u77rg3l23LUrOyTOjFAKsHD69H87JfrMXnibyrip5AgSFWjoHkUHENBu06LwQqpYGgp8ksUO0ipIBcEpKiM4hAlhVRLU6P754fL2/71y38s2xVPJuHA6XCI8exetP0z67v3h297Wz+Gjb5ASIJASmGltfq72+n9ndg664ll/96Eff+P3vUuirmJmyA6TTW9vil6siS6RIdfngjMghn2AvCwB7+cGEiIBqCoCyoIKQkEx1KXLi8tkt18xyYeGb1BeRe8MvwiLlzdyQFcBMEPiEu0JdPnHV5Uoo7DwuQDI4lRFxoZGJLdNGMwGDZW51glWfrhk45YFOwSJdZk+ntjJRzfl/WkBTURVlROciKHvXxbDp2rO2vWjiedNceb9h7pGjjy2QAJlZkXws06FMd6hH0oOk1zK9qPOrMr5K050zdUqsgDqh3Uu9l3JIeRbD/ThT8Nw0Zhqiz/NRyjjdv7j+9MW6eXDI0r3zzf7s0avPXsHkypEAGRAo9sH7ddzWqWKzPpbw6O1v313PXViT5Lyb96/v2WEuxTGu+56SaNLzbh1u7oYf/fj2T3/4yz/+4asX1798/lEpw3y/79Htb4Zyt+ti+/Ctq3G8syxbujI7v3z8tXqs4/1YExj5D7/44sGT97EYFCH2ILg4S4kIVLf9BqiuHlxtLzdNI1Tm2y9vvvHVP8i3080vf4q7e6wmqBcPz+uYHWoax+NhL0p5llW/WrUbj3zVn2sSQrd0Gr1zecix6VPWJrQo0kfvmWMTlXiYyursEkNQ0369NvNsiGiqcnqGI7wZEp0eN28m2W84dWKHm32I7TRNJ2CWAaoQmEnWOhFVq4kJUdURtkw6HxtXAxmBETGzq2p5yjLOlgUrOPI/+9lf3d/cfuub3+59IIQYm1W/6pp21TfemQPCCpLFUC8fXZZxLrtj3h86H56885Xj/c2Xv/r8/mao8zHguFmHkotHTMdBi6AhAoWmISbnHBj0/epwHLZn51JhvVkBEZM/vDwGipFdzkbM3jerbstKbChFqyjHSE3DXU/tqttccH+lsT3NTZmdwwfnm7V3KDUwTUPe36bxIDrPZOX1zct/9cE/vr75uKHpyXnbUOt9ONwPmxBWLc955xpsw8rQHYdk6plcCF4JfPCr1arv43h/KBPFuMoZDofx9vULm+fb1zdDLgJE3lNwrnHWYHNB1eeMc23RbSFegF8Zt9b3fLjey1SCAaAxOWYCAjWroo79kqdeGBNmb75QEHzw9gYAA2hEFD0HR41nhwZSo3NNcKENS0oxBseel0No770DRKB+3SsAAdUCVWnpsoNq613jAmSj2hxeGIqLTUzj5FDrOF+url588ezf+s43Kc8glo7l+sv55Us4HN3q/Kxr6TzCpg8BQyRed6vteTM3pl2fRSmY3/gjltc20dXFXqYHl1e/8/VvRRd/8OGPa4HILjAF76UIiTnEkmqdytuXl1cb/Gf/zf9vtdkG1xznEnx/vnn05Pytsq+3r2auFov89Kc/enF7rcnasNqN96urFgUDOKi1Hncb1z9YbwR2U/nCu7pqm0ePH8ZVbPpmu9m88+gyevUNgclnL56p8fEw7McpRD+mtNvdgZSU7fY6pVQ2qxhYz3oe0rg62wLLNB/OsH/st1frGBp55zeenj05zxVzlt3umIfhyaMHWWd0NI4zGOciOeV5mnOtTYjmGAKDmlQrc0FRx44dTcP4b/7h3/qH/+Afnl0xUWpX7DoXWm/OweP42/+br/0v/vf/bjH9G79/+X/5P/07j2hkUVY73B9+/c609JsQWauZCKkiE5MDMCJdrxopJU2jCQTv0zQ457bb9TTtiSzG6IhRgRGcc8QsKXnyCMDMaDUwdCFQnerxbtpfz4cd02L6cHpa4ZE3vn9xk/bV++Z4OCKSGOSczWB/uI/MwUcyQjUFdc5SvW8arHVAUvYhlYyA7FhqYQOSur+/qybqSE0rKjlWM0SotdactVQmNCloEGME4q7fOg4M3qFnwhAigIGolllrBkIkx65Z8vVWa8pHdpzrBCbMDMai4HxQq0QAWnGpwZMhBea+aiEkVJCcYUmjExH7Zcw7zoMWdcRSlIkJsaQEuhR0aoBQswRuPDIZmFbDbFiXPHmVDIZqoqAqguTYB2Cshm9/89vNen19/1wluxbmciBnzSr6rmnXHcYA3vXbFTksVigwN34sVRH7VcO+Wsmr2LJz/+Sf/td/9C//26sHb109esu5hhlX60ZVkZnIe98Xg1kPQsksG0gpO0IWlWpVMBtqkQRsYokAPLFIMqlMvLSjAQyAq1QzEa0LQKKK6ulVHlXEO2KHSJpLRbBaRMSapnXOr7rmycOzy6vV2Vm32TQVQZ07HgYE3fRtcMzs+q7fblZ923Rdd3HxwJFrm0arjsO83AUVuDCmeT7cX8+ofNH0Dze1Hi8avDprHlycPbi8fPLgzOeRS9L9fbp/eX/75esvfvHyi5/OwzPTa4U9aJKS0/0tlrnMeb1Zhb4NIToXQdquuXLcVKPXL1/IMKrZ2frcmaRxv960leTpV57MY5JKGL1oWp/FNOwC0/7lwUGQqgCoRru0JzAGtEUqj+ScA1ADrLmCaKCAxqCkBepSXEFHCiE07AKDISFHx9ErO44doKtiYOiJCYGQRGx54QEwyxWqmlCzPldywi72K3aRESAdAikjOR/As5IbkoDnuU5uP6yeX+uPfnH48PNRwhi2laMCARIBIfJCunWSnFZnFbUwLLBPYjBWZRGnhrVCMagmRWsRRKpzlbk44DpIHgob1bG4zPN9Iq3GhbddvDqjtmk2F9SsumY77EqgpsxZ54qGFGJcb5gCDZmGjNzMRUmso8YxSynMrioG6qD4MmUqpmJt21Tyrt8GF0gQZ5x3Nd3ZfF3rXiADFwqFggArqSiRY401q5k3YwQjAtUCqNWqSKrTkIchpymXlPI4j3c4jXgcys09DeZG5ol4cqG4Ms6oALnSlF1V00qmnoNnbpjWwUfQTdNenZ1fbLbr2G2a6FEdqJViVcVMTDg6ahtyIYRIIIw1OPCB+75r2pOCsEjJJYuqmS51LbPFZ1+LVjFJaSDOIkWqMDMQkuM3/iYSgZJz1ZplqnWummtJJpXQCElF55TGYZzn+ZSiIAwLV6nOJU8pjbmmVNI4TcM01XLa6zM5JCa3uMXLPA9ZSspaBFM1NYq+9QQO1DH6QEoVwLRmreJVWlImy0TKfKLPACJ4QqeAZqwV0wSm/n5/W6ssIGDybAi5JkJquk2IfYxt0/XYNha9MPm2A0ZByZaApFjZpXuMjsmhlDIcUMn5QB7JARMhwNJiElExY0THDtEtKQPSylY9ACOYaRYQMSSP7IoBEiMFz23EhhQtJ6hCC4FJK1gV0yJVVRdoigMBrSgSGTvvvfPGQYGNmJwHomV0UqXCQkZDZnLLcUEXjgvRkuUUAPSe2TlmJDPSjENhEQSVKqVgyVQK1kXdqaLFkXfoWnYewFQAgJmYIaA0TgjG63T9AvZTjCoBZvGlhFI5O7Y1QmzK+CQft3nvSNiDoHnEYOI1BzCykIiOFI1WHgOpeSMmBsZMcN3Unzew90GtJpTCXkMbV2tARULiBQxtp/qFwVJkBztBoN/krE1VReoialyCJ3RK7yw3hSwooOXsfUIjn/Ij+OZvvYF1oZkJmC7dn18zhAhMVVRPzaOlJLjM9OFUGzuNhkRFYUGzIyGaiEklVXca5OJChUcVrXVpzyksLbMlprLQsFSX2q3JQrhBNCuTyXg8Hu7uVSlmE0NbqDiOfTVFtND4LNI4x5o3fQfsDBpSvmjiRXRs6sktzJ7Z8yfDFMf5Dy7aszoFAnQNGjQOe+c6wJUjAos+aA2x22QrV29vREtAbpsmhuCCa1pXCTi2kV1j2BBHHzNEbC4fnb/bsAvM+zxk0L5fhbC+PDuLpf7hA357TYcsP7+bXx1L0/joULQUtSInTtfuWD7+5PVusOub+7/63p9SycHzYoNz7NVMZFGWLOUzRHSI7hQLMtAqC43LTtcA6IKLtWWWrXR6Di7qsYU2qWZy+gAMAVkX0rTaQiJbaoUAwMTLd99CSCB2cIq+0YKaWtjkWgsBaM2oFVRN1MRMFs4MERGxQ2QTAwMGYkBcIOsqpnrCAxLzshsglqpWFZCkVq2KRsT/k1ERmgEiRxe7ZnXWbS5caGO7afuzpj3vu4um2TTdJoS2ade+6WO7BvCqliUXs7nWXLWaznlf5Sg1a8k1l1pzzZOp1jLXMqXptoy30+1zS7v93UsArlWAwjiV880qhJY9ri62X37yQZqvf/rZj1aPzzl0uarrNv3F2lxcnT9cnT3uu3WpNE3DmA4CaUqva9mrTXfXX2SdtAxghVCZo4gO04QNbh+u2Nmr4/1X/+7fnls/b+ff/O5v9pO8Rd+S1/j3/qP/867ib/zudybHR8vN083r+5dB09W6lZpHxPjoSmJz/fwLzeMJMI4ISGJgomMaU05hsxkn2Lz1/pNvvBtXFNYOaX761uXt689CtybvG0cvfvXLs/PVl59++uLly9hGZCfCY9bjPFbWrLlvfRMCIczTTIjRc6ql7WPbdcWgiDx6cHW2uogUKc91GOcpdf0KjeYxk562Pst86H9OLDoV0wgZgRGRKbYRfSh5NhEVAYSc5pJSztlMa5VaalUFInSuXXfjNN3d3pacEGEac8k1zWU4TE3fNNFrTU+fvvvw0dvb9QZF01TbrgldULDYdN1q07S9C7Hpu4ZDrABzdZF8p88++fH64i3XPhzGUh3+/NPPXg+H23nyzh9vDz64peFpjnKupVqtOs/peNxxcCGG+7s7p6bHgmKbdYssOc8ixj6UIj46zznnIXhHACh52t+laSARKCKpGv56h1ZzrvvDfCx1n47jNDnD2K6+9u5vG2qVapFycAXaw1wn5OfHNFkg37q2B/ZVqgKYqjNpY1vUVusrqTlPBwcQDK8/udm9PkRPBgjcYXMx0ypjPJbSdB4d1FylCjk4P+eHD8L17vnmQeSNO//K2XrLfcC8G7TU9bpLlqvqYZqIWUyXUu4SRSX0BrjUjpeNAzHbKR9rqmK4NBWWL4clV7ysTQwAmMC7BdGrakCM0ZPUTABSAQ0cEyA2faPE7Eg0mVZDFansGBSo4sXD87N3H+9m3d2Nx5vcxs0//d4P/9vv/XkVqaJAlMeaR/nxX/3q9d0+bJvHb11poI8/fXa7P4DT+/udYHP2W199+t1vbb7y8JhTqULOffH8dUD/sFu9vr4+jCNj8P22WW1RKE0Z1NZ9q2ks47BqumHcp5K783Ulck139uTR9nIzpd3N7tPeB0sSasrpfhz22SBsLg5Djp299/7DIdX9OL717lcuLi60Tvv9rUPYdK1DcQR5GsCK85DyeHd/APSTJGL1BvMwPDw/d8BpnlXsOAzDfA/eoF9xs91eXO6HOs9126xuX90dE801fuNrf+0HP/1ZrSLVpFZ1Cg7YsXfOx+7Vy2eeoGu8DwiOlcBHXugdbYxkcDwkESNHROgQtYgHWl2uPjo8/8HuJ/356uHFyszqVD0QOoDeLt9Z/5N//GePvvadf/b//Yv/13/5z4OPuVgVvbs+sYqqLm87CGYUCBlzkZIqIjFxKWJgF5fntdbjOCpQv16XnNEseqcmgIS47KhMTRbM0GnzzH6csuMeqfEuSE1aCpKNx8Nuf8iG9fSmZeYN22amRBFDHytoSqXMKYYmGQP1wUfHaFbmOkySBBUdR984di6GKpJLFqmucc0qznW+2+2S1rlkMK0qVXWhWLLnnKda5xPlE5BdAGRgvzp/ohzBNUhkYOwdOFBAAEcUmLyIxtgyOgT21Jopc1RARcg1lzIa1SoFmNhHBVI177xgzTo5b0AluGiizjVFgNmpCppJsb67UFN2oVmtq5GPLTvvfQSj6BtArunIyGhoRsvYi8E13KJS4JYdMRKaIplCUa2gUEqu0/i7v/vbDy+vPv3s49d317FrkszHPNwc73dpWJ2tQ9eHdq3kXejA+ev74zCm6TBPuwmzsMGXzz77r/4//+CDD77/9J0nD67OnbBDR8S5SpaKAFIFVJPO6AKCqyaOvFZ2GPM4BXSmRuzVuQrWNv1cJtVCIKUMJtVqqTIRqaEgs6gBMCKJZGaHZkiUpYiKmiJongettY386KJ/63Lz1sXZo83KE3VtALAhF0FC9jE2hCRVyWHftsGH45Sy1LaLKjUG162a7cUZec75xK3z3pv3h3Gn+bC7e/n81QtzbrPaPL06f/r0UVif1dgfUlrq/RXh5eH45Xj/8fUXn7385efPf3I4flzyM8L743jz8vnH6CBNM8fgungsZUp5KnO37Y86xvPu6r23D2VynW/X3VQUYnx9v7vfD4V0UptMS5V0TDlnbrl9EOODLnuouCyr8NHmqtRsUNExBZ+lGFhFy7WGtlfEuQiid64h9uQjOs9MtihvzZaJHDJnqeB8BlwqFI7IpKJUtMqIRA6RCckFZyomxsY+ttS2h5Sy6ZDG4rgiz9VGMdtsbXMu6KvWbhhe//EPbr//y+nV/OlPn91QfNGvh9hS9EbGjLSgWBAISE3FQHRZmLPY4hIiZGccTJCBAjv2fqGhY3DIi2kFQwxqEEILwM67EFqHTo8HnA6a5hBp0W8Fzxnns7cuxcSsYgDrwJrq1g4iJcmbq7V3cry7QxEwzdMsRTm2SkyERtJvQtv5povUcNc6rlqTDrskqaYst4dpn6UCRHSS0jInAgUtxUrWVKyq5+iIHVAEIrIsKeVZioxD3u/Tq9fHV88Pw6s5PU/lVZYbsB1yDpZV5iRpSsOBSurMXJ7XLjTkSVGrgkDjw/nqbNWdB79ah9WGY0e+AQ5AWuo0TmKmjhRMrDIbWDUVMS1a2RM5kjelg1+fcQUAmeyETbXluFEIh1ymaa611lJrWbIvjtmzj843hiRqpcxznqZ5HOcx1VpP1nU2YEAyxMUJBRzEsKrUOg/D/TDvUh5zSXOax3kc53HMYzVIpYjgsvyWmodhP4zHLCnEIAaqHhR1abIhIChYRTRFWwRerLWz2hrOAhkAGO1kFdeqamZE6B2rzV1sHC00Zyy5VC1ggmaOHBQBkcCevFfvChp7ByZgVUG6i+1tvafOPXl8jnJr0w3nZKkS+SpWRdiTSMXF6SYKBkyoeNrtqxoCmUGtJqqmxVkJZAiWSzLTwA4MARxj9ISmVU1EbQHWLPMdA2fGamgKkkWzELgk7sXMPx/p+4N9fyg/meqNYUE2IwVRqM4RSEUAQl5SNQRIyHY6YS4bSPLeMfESV0hlFlYhFmUD1uVnERdQJVAEh0FUi1VgYFSPGh2TWFRoVVqeary/ji92caqMwUInfVt6zkzqS2Gf5FGCt4oEYFGa1ZS5VDGrTEZsA8yDU2i9ByVJBIjIglYMRkevOrjpoTRF3VypGLI5VyxXzaryRgbGRIzMy/n8xKNBNPvXBTTT06z8xANahkNLLe3NS/YCCaJTP9beCKWWdylEZFmscyK/Rkvjm8rHm3ekhZx0GkAs4ip8Y+Ay0zehPj0pz5fxFiLCSXyhWuDX0aRlwkFYVauKgqrJm8GWnUADJoaKaGal1nkZ933yxW3irhAwm2depI0Kyow+xoLNl3fHvt1E9i9f3B73w2YVn2yaJ2tPKClVEqViwP7LMed5/s01XsJURIw4V9v0MZABKngyxiKS5sQAhFbUyCmTQs0OCRiXqnnnnZgSQ7UkJZWazYcduY/G24PpjDalwTlQ1JnRde086RXh76zp3R4U8Bc304v9TEhdE0xlnOcpFwMtRV2z/uVnL4m9TtNf/as/KeOOGWu1UiqALDt1ZEQiMxQRg8UvxqfijllVWaI5Bka4sMKxSl36hctsXU1hkY4hAyAiL0MiBUByizvPDE4ja8TFYbhQtAEYkZejG5NjdohkAPU0QmIAWsS45PwpEkhoqouh4wQ7X/4igp6YQstltUy9FEAVFNFOVx6TmrgY2DkDfYM8+jWrSFVy9S46H5yPzvtaXJ0mNTUQIq0lRxeQQ6nJqIurJ6G78t4JqmNarnIUcdaYKkIhiKLCpoyRzDN4AFZNjFpLIiyOWE0QwaHzBljzqy8+9Pl2cxaOv3ixWa13d3eP33m8P47j7jbPd0oPjWo57id6cXV40TjbbGK6vt/05xNSzndn5+GQRlLbP/s0ShYasQ0ttYihsIudJzjeaTn77t/+4T/+h1/5a//Bb/y7f/e/+E/+4z/4znd/9vf/s//1g7di0BevP35v9bXnrz67ePCN7p2Hxz96Ne1rSwFwvr776H7Yn3vHq9YMlmbrcr9jQxSoWW9s1b18dbc9e/fJO98itG/+G7/zL/7f/+Ab3/7qB3/6g2//nX+/FGHvf/SXf/S7f/Ov3958/uL1876n4XgwsKkMTR+O5QgeRaupitYQfZqLFDu7eND3Z+Z8KfmY68NuO1NmV9ugRe1e1MfIYQTQtovjIS9O3OURd4JjvUkXnZSPZkgops2mG3OqOalUYmNCE1GFKrKoSIwkz7lpQx5mT1ymMh+ziiK7xbnOzqnM5Exlik3sNtvL/m1JQKQvvvh81bezFXOYphKZtutehZkjsDOmft1++OffPxyHfhu+9y//yf3udj5k8MDeffnJdbg6T2mIEE0VtTKBKUopMieokmtSUMmJFXPOrvXvPv36D37w59/+xjv3+10MYbj7cj7c9t0jrBjQ8ngTqDJHJEcYhcqcpwTKvvfeRf9rYhf5rmO0MU2rzdnhmDzk8fbwuX6qw3A/pXYTM6qIBlLLVqxOUvum29f5/NF76zmRG3fH13mYSgZTGOQ+rBxOLSCqZREnGU2cI2PX7O/vGdV7vXh8CZKb3h3vpQISeYeiVb/xlbfGnA534/3ufhu642HmGHzvomvDaMPrqVdHzlvJ7NCYlsTjkiklXOwTtmhhCWGB7yNTXRRp1fwbdBoDe0eO2MACI5CGhqfRCphKDQG6zt/uZzDzgAU1RHZZESy4eJDC5BQslyQE1Dmo8+7F0PRXq4vz4914LBk9FArbWtbnq9gEyRKdO9v0X1zvq0gW+3L/nJ1zl6u52gTyN95/98MffTKV6fwb72dufHRPn15YsfDg7OtP3v38J5988ep149tIuD/u7nS6OrsMQ9Nedsf5+nzdrt7a9pd+ui9/8/d/85/8o7/Q6oYa5tb3TXt7f3z85N0++lTnR1978Jd/8gEqokDVtD+Oq/N3Pv7oNTt8/5tf3e9fo8PNus/liBxNZJIJstWZD3OKHRBBqarV1t3l2fn61ecvW1gf7qpmXm37BiJGQq7vP33/48+/KIOBwKpbPzhrnr88PFhvfnJ749fbz19/tnn6aHN57tndvD4Yl66PTPjsi1elQN9v0AZE2ayakuZ5nDZXa8KYRXMGUxqGjARtG4uCE6U+ujaU1dq+9fv/x//4P/mtyws0vz7vJ8ve8fuP+H/3f/jrx2H3f/uv8MX3n/WOp7usXbu+XLk6QxrfPDVMc2YgMwVFR6S1+BgQDYnMNOccQ1x17W53rJ6kStM2jtAh7vcHBkbzjkmdVjMKLrbeQFdn2+FwjBbNdM5TdK5tAkFRlPGYinnvuq7vEWoaBqbqWKdplBkb1zTBzdPssDmMcxvCONzH4IJ3koUXhUfyVcYQ4nycgosOKI+zb4MQTCWD9xh8cO7B5WW6q9H7A6LzPqfc9Q0tsQY0YjQtWki1IhG5aNmaZjXXTIyGqFpFMpsZFwURyUgOVGuZG99KqkQtCKgURAaEEJucUnCY54HZpZqBMGkK2DkMqUzsHImRmZWsuOyBvGpBqM5DkSKijrGWjMTBNYBcJRFa7BpDQ2ZRJaQYOillOS74NuY6BO+rYbajbzoDQ4Tgg6jOmb7+7e+G5uNPP/1pG2J4ENaXDwFwmgoba9VpGEqtJha8a6AyhqqSxV588fzDX3zw048/iNR+9etfZyMV8eyaGENs8jARMhKqAlR2QLXk4AMyM1AlBWDmCApACnUWnbFqQfGu0VIMlJ1TIDBdhMRVs1REDKXOjCFSO+RCzfLCXdySiFdouvXmwdnbF+fOeTLLosaeDREcErQRyfw05ekwBu8b8whYyhRCrAX2x1FrFcG7+935ZjvONc8zvOFTjMO8WdH5tiGcVe4JO++6t999hw2KzEOZP78bZRr6mhny/lhGlbFO7OTlMH1288XnB//oxbQ+Xz97Pfpy+Ld/67v3Px5ct+bNGhKWJQ4aONeCoB6w7mYuNJbKY/Gxefebb93dHXxH54+2yZx3wbsI7JRZSOY6aimOkZE8UQFZzu+aJuPgPRMqIpkDwGqI0zShkffOuWBYoSoUBRFAw7Zlx6ha54qtMyJmY3daRAOzASHSG2kRAWI1oehDXJEi1UJVQ/S5JqMGfVAAETMV9JwDq3dlpN65bQz3SlOW+Na5fedt7Ojuez9pd9oiVClLKlYVInk1NSRFqWSoC8LFTsF6YEMyVAGrakTOqpCJ5BGU2FTnDN7P8wAENVdkDdjJNDECoivzZGp5LNjwXLM1CMD5mI5lojVNHruwyhcPXv/q5Te+sikApuiwYxjnPN3t91eXT93hmBVZjTo2BgPSkso8kQNEJYapFmXPHNoYHVYpUwQ85sy+q2JoClmwenAe1HmzWoqxKmlGGSTlCgDIVsFQM+AMPJqvrXMR0KNfjuLJ1HxwpdZQEjGQAiFVtZQrNyH6BrwLBlXNqxQtc64l45jKlLQ4FHDsQ/WsiMzAaOTIal7QP45cB365C0RlAZuqCp2wNbiwjg1sLnk3TK9eXzdtH9pIHhWE0InWhQDtvENwqlVyzZZVMRg7QlMRJNECCJ4DusgcmElNxnkqec4p55LEFmSwkoHWVGudzZwLHElVBUWkzimXUskxkIwpN46BqkIiBgT3Zh2magBqzIBkjepa8pE5qzkAb6YKpkXNkNlwWcqbaCY2QGbAPIkLDYIsVjCp2oRmzofDNGeU4FxAj2whdNlxquD7y8nI5Znn+41tDVgJQKyqIBqaOgJHzNkMwBMSUlFAEwQg55dJgGMPoAjAKGyiwIiA5EzUoR+LAghbVRRkIuBSRM2qihgomJihWTUphnO1Q5YXWT8VuFGZVMxqBHzS1t+4vHrSrVmzh6PHTGBoJguFdzl6LrkYRERgAgzMwS/PAhNN85gxY+tQsFpVRGA5pvu2W5sVQNCqajWDqEIHRECkQEpaUrQ68/5ar5MjQucy+0xgvQEgGXoIRfr58JU8rmsxZXRrxlqlOANnZgpCvPc8B3boGpUI5hAUl+oQZ/Ij1AjVTB2AZ1eBgMC0QFVZGEO0GJ8J4VS5eNO9VX1zSFZRxBPgdTlHEZKBETo1RQSTisQLH9sMf70wW3J4CwXmBMMGNARaZmoqywoNF8GkKS331fLxL7NNWHRmpxDAm6mEmoGYIogZmFYAekMbIUBcBpGIaLoEQQ2BFy6zWV34RAROtYKhGamZWS45OaD7+3KcfWUEqSrA5IDYQIGIiEX5sC+UdNJ9mWU/prPzzU4OVxdd0MLAjishDEKvhjzvxj+47N6z3YqwtF6NCwE4TVoTlIw0iW5Y0bRxNAzz2aP46uXB8bqUBKJiVcF6Ym+yqNoCOzJHyHMpzFALNU2XDC7Pz++PObIPeFit2tfHpne4xdIR/mSEn+zqp3sTmB9vQhf8MWUfHBIe8+yPsHbw2SefP35wCWP6qz//i+989w9js8o1Oz65xkzVEYsaMsKSxyQGREZXtdJSV1zMZEAGVTQzEuFCzefTzAgX+R2e8EBwaj4SIIAi0puIGdjy0ENAXGZAdLpIDOT/z9V//Oq2pOmd2GsiYpnPbXfsteltVVYVqSKrSdF0S2pJ1EQGFKCBJGim1kQzaaa/QJoKaAigJECAJg0BLUBski0Wq2krWSzDqqzMysx789rjt/nMMhHxGg3WPpdsDQ5wcLCxzzYrYkU87/P8Hin3iiQQky8ZM3AUUQAksiUKh0juisiwyFSL9MS42AKZ2Ezuo0bL3ENmWEiESxrV3d1NDACIo///SUXMjE5t36d2zaFdsnoxqtY5BE6A5Eqo5tqmQM06UI8KRkAxMDG42r1zjrRmBEspiDBw68TujmpOAi6EvlzM26YZ6BiaVCbVNR3ffPHTP/vj9y62Tf8er3bUtoebY70413VXqzohJ3YGWnfolj97Vcx0nepYGdPp7uby0eU1x5Q2kpWVMDTj7WndP6kxKOMkwnPhTfjN/+i/315tH3CLw/7w+UfNB++++dbZf/x/+N/98s9+b35++No73ylv5jrZ9tH2337yUfvosa/6XZ/iob7T7t4063UAKJnbXqswRVBwREDoVxtQTByO+9OTD77+ZSEW9eayuXxCj94JL8ePP/3415/85jDOabU5nU6vb55T395Ot3fHw2GcUgjzXT2U4fLsQl1DalQ09c2Sbuu7vm3aw/GOU9vGZhxGCNr2DSVmo6p6d3vUoquuTWGsdQ4h+Ftd+W0AzRc+0ZJLQ7/XGc83q76bqs73ArZbyZVi0FoBHUyIaD4N7ZpP+3l9vioOx9Mk5gA6j3NoY5VqplIKuud53F49BooYjRDOrs62dxPNMxRjW6FJYC4lb1ZnSlRB5uF0+/ouTzbPnprV9uJhovTi+WcB/HJ76UVQZBrq+mw3l5OZqRHHxCGEyF1sGLyqGFCuk5tQ01XEDJ5B9/P+OA+x7zCyg6kxhSA639x8kitT2AUDxoYggiIAlFyWVbA734bQ3n72+cVZv1rHBAmGw3/wW3/zH/7BP4kRuz65QwwYEJrom2376s0bAq4l59FSzKvIz59dNzHGvuUY7w6HqQyJHRhO84G6YE7atycVr5NMc3RGNQGLSnUuWiClEJDWfa/ztTpaFGZskH74rd/80z/52ST9935wFejuZ3/+edOttYFW0mEekFhMESkighm7qykivSXtAQKAObIhUgwhl8KAgNhQUPT4VWWeaZu4YTADVm0QmalrosroWjdN0KoBCAGq6NLACmIJvAmEiBzSoCXGEBj3z48ffu/Jfq5NR2ePz5+9ePnu4yuZujKHIU/TYTjbbQF1t43gkKccmK4ut69eX7chTkP++Z/+hR20AfvF7/85AHRdvHs1uHty+v1P/kCP86ZtH15dfvb55z1gACpeLcDt6ebpw7OO4MH52bPbEnH7D37vjxEjp1AcY6H5drh79bxd9fOmN4jlo9dETUeqaCXvHzw+P99uP/34WorfXR9WhMPt1G6TKg+n/Ohy++r6yxQFHZmkjavdevfyxe1wym2RZy+/4MoxtolTmfPZNuXjqSB0sXn28ovkuYG+2XRvbuZxKkX8+vb2wXmKXD//7IuLxxccbMrjk0e703yaawnUsJECz4dDSDIdpouLHWGMnaeGTZWbGLsW1MrBwYDF2hguLttm1T58uvnP/4t/fnV19b/97/3df/pf/m6R+fxC3vnOw+vb21Vn/8+/9/exCdt3P/xrv/0bn/7+H4VvN6+0P+Hu4z/43V2491OY1BiDmS8eVhEhDqZLHaIDEFMQVSTs1ysRqaKYszeJOHTrnVTFgHWqHHgeJ+ZAzBEjijUxqQhDGuYTYgohHE/7dtU1bawnHffXIuvz8wtmJcAuJl9BgFinUjhEjrnMw3Q05wgpMoBIANv0q5NpVR3GMbYJwbXkru3nuQBzHjKZB4yPH1xt1mG63nfQoxgCLjXVQGjuKsoxmZo7NC0nUKmamjb7qCpE5GYilTnGtLLqgAB0n55xMgoBHB0IAXI+GVjT9LU4B8aspku/eBjyvqrF2IGTW3BndzQwsRmZ1IRjq1odxUH1PuTuMQUjLAu9mUxNu65TQzILnMQyMxUphJhSd5Abt6jqwUE9uxWilYgREQInDjlnreXJo3fOzzbX188//fjPHun3KTWR2qFOOYsV3WxXvtlN9eRMt6+ff/nFF9f75/vr58zwYLdrmlUTQiAMgcFZ1LiKK6h5QB7KkVNIsVFTEXcBBzNxakChIDQiHiioSYwx1yGwxRByntHNTNHcTDgEBFusUoEiY5NlDEBkWHRiovtN2/litfnNH3xLcz4cZ6nqFE5ZHpyt2hjGnPu+BaPb/b5UbUN8cHmxv7tzgK5LCGbWhRBDxNByAKTAABDTvcO0TdS38Pjhxp3Wj6+Qz/N+ZePdfDwcT/uXt8dX+4GMs1rDDBEPRV8PAm5aKiN+9Ozn7D/bnrW83n376VMUXDVJplKzdoHbJt6jIUStVCQ8Ho7r/dxCWsV2uB3CtpFxXD1a77+cLLmJWPV1f+5lRPMmcM5T2rTjVJK5ayUKMYQpT6SIaDVXJA6Jpzk3zYoZazVF8Fq1CLlSDBTIENxR1JCQ0AE8Ba7zxI5mJo5IHENC8rc1y+bLoFwV+3j3/Mt8YDpf+67XUkK/ohC8CocmITshPtxRGeMKX715s24CEbRNi+e7z+faXW0efe3D+uef9LOmYCKiFNhRSsGYvM4AgBQAHJEc3dXEbLnPG7io1JIXZGeea9N2kZp5GHvxOo8Uu2bTT+Nc8wBozToCaZ7G1EcFnKUCknbdUVENEVvsEnXotY7uux/9dvPBPDS5ylGM5HbM00gtbDb97Lm0vbe9htw01BDhlNnxZj+ZYeya3jw5hjZppEiESpqViLvKYJbVmKK6N9SgBxbDbG7VSfJUBAuAsDoR+jSAuc4GIhEgNZHRHaqIzUXQwB1qrUxk80xNklzQgAMFjhVShYAKJGJVZC7kSkB5rnkuquJOQcSzNdwRYqyOxGoEmFJg0RqbqG/9FIS4HHGXzIqZOTgtYEtiI5ql3h3uzo+bbt1XE44NvOUEIxAhB2IFBHMEUJM8D65lKbvkGBnDAlQlr6ZQJatqzrOqIcUQQU2R1VVFxQlKraJWajbXpbldqpCDihbLbYjTfEpmTAHMHM2BicLbASkstzZG76Q2HmaOHqADicTui+HMi+XqxU0FZ44JEKacJ6ulnrKNAKSG4orVTKugGQVzd3UEAoKx3k46z4Glpi5Rq91we9xCA45msjQQqQoAkEMAioiiIshE0dwBlID97YEM3NDtPhClAhQAALyaa8REDgbKTEWqmdzzRh3NobqKmpkUtcnorvqN2jPzl+bTUsaOOCl8KU1KT2jz+Lzecv4cdXKtCLwkmwB50SvM0ZaWTgSLHBcyN7OZFp8FdFGOyAw5OSmiqWqARc4wQCNAd6gm5kZOKJaIwOFQxtdatW07s40VMpjqgEmNeLKpo3A56kXVSI0DKblAQQ8JEKuRw4R4DMDsjXkyiMvv2dydFINQqzC3jibaWcvaOLeANJcpqRAGdQAITG8jGIuBRxUQ3BbwM8Dio3tbHm1mi40IfOkLAjNBpKXszKwCMP67T4dquuTUCGhJDhGSmS4V7IRk5gs0xlwZw0JIWiJPy5R/oVy72/3qI168UO5g5ojsSAjL56zmDsBLVljdzcVh6c8DBDBzoqBWlsybIzmYmIK7qTDg4TC/up6UIqCjORPDkj4GCKmdVV6/ufFiLdple/6rN68hhOM8bdex1uomRJQMJeDzKdc5f7fF92BckTARBxoRgMDcqmobQuJoOm/61FOYRSKbFnUlKBIDBnRg4ECqTu5SaorgqswYAXaRrEyX3r6uoiFmd8VSFBLkKsmJY9uHcnfF+hvrMI35meCnNyO4Xa1iQLSsqY0tx/1+brZNTKv9cHr6/oPT3c2/+Rf/4kd/9a+0zUp0eW4d3cyX6xQtkBe4VxLvdUAAR0I3d1QAdDBHQCJdFEZEAF5If4uRyNwIl0fKVRUB+J417wtFaCnXIyTA5d5ORFQlL259VXvb3quutkzhkEBNAJw4qlRmWvA4y1dLDOa2cK/vL4aIyyNFi0nKl8Qaq4mpId9D6QhpaUGFrwJoItZ0bUhNaFep6zDEpl/FPnFw8IK8YL2t1KLVE7WbmBIjA6EFFdDqksUsKCQMvQI6IDOHEBc2v1RxzWYn0UKBAzdSXecCxWLTgYDM493zF+lkNMaX+/3N8eZ8063AAvNUKqKbChA60tmH3wwaGTvjdrXZMrSe92lOdtvUgbjq+OpZv1qHFDfbs6bZvTzND//63/7mf/u/g4197Td+DeXU6Yv3ry4biN3lU3qyvvrr/9HDX/utu5fPU/A6jobpG9/++n4/I1+IrQhX722fzj+9bl9PMJc6DGaoVa0ImC6JY3aScQIjzQMHunx8gWyxbzaXD834wdX6P/u//aeb/uLl579EY1DNxzeIUquRxwAcm3Q43SbiVZPclDGIzA7VyIk958E1d00wt5Jn5KaL7dluRxjb2MSQxqkAcp2UEJkXSBZ+JQr5W2rRW7LaW/XI/XR37Ls0Tid3dPM6VzS0olIKqEsxnYupoXmdKhjmU5bsOeeaMzFiQFOpU8lDIYS+7wiMXWGaQeHR1bmUsQ1hs96m2Hapi6Fx44DUxHA6Hbr+jCms+15G6Ve7R9/92sNvf/jkyVPIeNafoTugIVJIDWBCoMCEKoTE6JGZnYLbeBhPt9LAuWoNMTZ9S2RdwpKnbrfJtbTrNraswmUc8+FFPj3T+Uby7LnWearTPk9jrfdSkbmXKW/OdtOopxGniqvm0ZvruyZQrX4YSs5lPpZEMcR4PAybbrPr+ovUtQgtah5PXddjYNcK6IEphcjEqQ8Y7IOvf0AttH0zm4Sg284+ePyQgdgwj2UcZqhuYvMwH+6OAD5LqTh84wdXUl4drn8htXjxn//ks/ffe7zbpiDaLD2pcH/M12VDIybixAQAau6GS4VnZF6Ea3CMyLzUf6gQAjEAAQdyduDl3QLrriXz5M7oXdciIhPPRQHYxGJMjNEhVBOQElDZtEFmwux4Z/bFYbwb9j/4tUvVF493ti311R/9cl2h1R4qzcXnGeZTHg/TcRgiIQ7l+OxGrqdyN61SAlVv2BN9+1tf263aqGG8g1XcQaX9/hjb2KzaoZ52l6vN2WZ1dnHMtV2vLs8e7FY9En/+5c2Xnx7a8PWzeDnlkut09/ILLsOaNSEGC123XbdXz3855DsZDiXn+p3vfvvdB5svfvULIs7mqW2mmt/91pOgiBXkdLx983oVzkC0acOqbYrAZ1++MtZ3PrzMqXz7u19frVlsPpSyeufJ6t2u2YRKpd30Lsff+MHX7+br/TQ4l6nk0GK/IqmnOt1oxS5s9/vbJgarOUoGmd9cv8rj7IgeAbtEKcwiwzyExBiilEpSE9FpzBhiWvep62LfGJOn1c3R37v42vgXn/zTf/QPs9Ux2npLP/zWw3l+kUTkxt59dDEMt//l7/79OX32d//H7//w8tX/7K+t/tP/43/y23/168sqmMu8jD3cXNViCA6mqoGCuZopOjDiPI6mQsRdkxjhcHeXS2WmFNmWg46ZgXBIq92lE6qr1pJiimkVsKHAChCbVR4ruYHMZdiPdzd5mrhrsd+tdhfoPs9DTEzkMYaca9P2onPbhhA9tMt2N7exzNNd3/Yk0MV23bbj/oTVfaqUs5zGVcO7vgkK77773uWDB9UEOYBjk1rEgI5SqoktExtVXWDVoA6mRB4iU0DmmDgKVODlVe6BQhURc4aAGBiJ2ByFIzqY1qxVXI2dAke3ykghYCBjmN3VxEVUnQwphJBl5BDMbNVtOHREbdu0AQIoOjAFNrCUYoy8VOiWejKXxEwQAoXALcWOI7LHYJ0ZAFJMO1vSqeZeFYoGQaiap2G9Wj95/P7VOx/UfHz58R8f3vzq1Wc/Q8gc/MXLz3/1yU//yT/73X/4X/znP/5Xv/fJRz8bDsftenN+ttv2qyaGyICmTNC2Eakxs6mOSDFQCBwNi9sk5ajmxGmuM2IgRuZAiKpFpMTAIUYHdPOS1Y1MDQGRorqKCQIGYnV1UzFZQtORmFyXCCFxNIQ8jVoKgZ+t09VF/+hi9e6jzaoNuc6rVTrs969fvbIiVlSrzdPUtg0g7w8nJKVow3x6fXdNvOQlwdSn+f5d4OzY4JiHs6vd2WazWW/PHj45e/iUmn46lusXdzLlabwDBGDCgC5Fq+moNCHNqdx6OfHh4Bi7frXOh/1pOKYuoUOw2gVgdDfcbLbGob08475fN5vx9c31F8/Pzrrb29vx5ji/qXVvLBiJ5VgaTOP1VGdom/XpVGKTjMkpoIUXX9weXg9eYD6V6Xqcbsfh+gSF2tCTU4xNSG1MLSBxIGYihCLZzJhiIiQCCqhqUoU5AgXmEEJwQ3MFh4BMHJYQSoyxzBIuHtLllWNIvGJYxdAFCDBnGQoqk6lZ0cSla7TrS2Vum82j8699/2u7i/7BRbd9kJrvPPUnD9XAi5MROjoxMC5TzaUDhx1YNalHCGREhLFJgEt5FqGhZulSCxq82vzmFUxzww1xmKtKsZabhhoUt2FeccuVWo1rWvXabqQ7m9LVCc9vxu4Xz9p/+9nFn39x9enL1f7EZKvL882Tp+ury/XqnLxlaMw1BmQzRFqu9VQkOOZpVMI50OAWOtpswraRi1AfBnkY9Z0u7qyemTWzNpVZAziRG2nFXOo+Bwk+O47ajLU95e1c+HbAO4Ej48ie3dBHP2qYQzS04owS0ANWVCBo22AmijVELCXPJYOb5mk63eU8iNRprtNc9sfTNGdzX7A4DWCv1kpttbJVLbPJ7CCjVYpRquBbrQJAl9eAL81P8DY3A/fpKVE5nQ6H29vh5uZwfXO4vT0e9sPxUOdJ6gzq6MyhjU1PoQVgNyhV5mnWqlYX3DW71DqP0+k4DuM8Z1NDoBBSarqu7WNITWr7vk9NAnLzOk/jPJ7qPIJaQwuemitQFmUmAkpOwRjU3BwBGJzcCAwMXMGrtFXOtDRaqkEGKKZVai3iiug416qKanwY5DTrROEapld6ugO58/rGj6eUTzGPUaR36Q0amOQ46uFUbhuCbf+IaXO2vQje2jAlgEQBXAIvYMcF8OhzKSqyblKMEYkDB0ICc3Dlt0oNACMsL2OipT3dACk4ODE64axezd1dVcxM1ADJwM1dALLD7H4yO5mOpkYeGBJRAGwR28jNbt1/5zuHp0+Pl+9UjugQKAKSeFlgl0uEj+4rttHMnSOmBpHAUbxWuAMrJDWAEZIRKudJJnTSWlVBxAkRiZxhtrzYXgiqwTzh6U05ZWS3iMWolGQSI80BKjpr7Q/X7wucKboZBkeoASmaB7MAUIieq2PsWqCu1g4QQYGpEhQGoSiIzDF4cuwEesM2hOAoMQQz1aVZ3E1VFw/lEiNahIClX+ytYEr3og28vU354lUTB0fkBQEKSAhMxIR0D45xZyTCQBABGBzsbbB2qUlzACTWBXrspPcwooXP5m+REYb3FeiEXzGGEdGXoOKSXbpn5cC9zoj3DxoEMAADdje1e0vHfZvb0v/H6O5WyDVnvT3OxWhpySuiDrAQbphjETucJheJLlzqx88+eb2/yXV+enXmtXiRrgkcuTC+qTIe9t9g/0ajHVchb1IMiAnhMkJr0kdYk7cILiAOjo4oXd/lSft2LYYxhRixQU4Km5giYWJMxF1MAN4GqnlaB9qQJ3CMSRGFIiCftyut98hDAc5CneiPOvhW5+r2yX7+4nYGDk0TzV3M4yrdTuWTV3e3x/z82StVu73d/+kf/KGVIQUSc0RkDsuzge6uCgDoyBSWKrqFJeV6z3piYsawjFUQYWmNVBXkQMQA92/ge1w2AAIvadPFyrOoh8sDZua0uI3AzYUIccEXIfqy1FU48P0TQwGX9oJ7IQnd1N0Qne/ZVEYU3Bcj0v0uvtCUCMOCSFat4MDMy4ZiqibFXf5rriJz5NiEpkNatilnImAuYKqlQgwhqWjkhISByclECgQ2UDRHB8c0i9c6UR2xznO+DmCRgUOPiOoVOQTswBFwA2ihw9h1c5mSF5+vy3R4+sFjS+3uw+999kc/blbdg/X2+OrF491DohDaVoFC6sf5eVvnfLrbnfU2HIchf//v/NW7nzwf6932nUfz+YN29zQ1XcmwW7XWcAhPt+/9hnz9r2wvV/yHv98++CFr0vGEgZ5//qvN5eXNq+vz86dF6fndm29uvytZUhvf3H15dbE+vXoWcVXmMXSzv/l4+viXby7i1+SW4oeIgJF9MasjU9s0V5dGGBkJJM+5GlSE80cPP/03/zRt+NE2/oP/x//pi49/9b0f/s7+ePfZp5/N+7vguF1vp/l4tb188eJLbrpa5rGcelqn0ERMfYNDGduuLTmb5nEueTo8vHokMrt7IB4PExqcDsPZxfrhg/OPv3yNhOCI4GqA96K4L38Wehsg2IJcIz6exuqx6RuTmcMGGasqIFJYQDUmpSB4LhkYci0QuKAqocHyKwfiSJGnUoEImVSUUCgyEHFKu02XC5rUyEjqKYSL7bm7SK2bZmNZzRSBA/PxVIeT+Awi0sTw8vl108UUkgdXKURK6KJWRWNqRT1Pc1Fdd2m1SjcvDrvd+fX+RrE6mRSVQ9bRN9uY58ooeRynodzd3XGHlcBtTHQG2EQ6U8RaJfT3k+Qvvnz14XvfGE8lrTa+3T5+7+L2s2c/uf189fCiVaVSVh1pKVXKze24as4aaqbTYZsgBSTXm+u7drPq1u31i2ebbRebkJpUx9Oqjz4rlWk+jVRh3YRf//DrH33+i/38DJK4IxDGtt82/Xg6tn0ILcZNQ9jELv7hnz6vM12/ODw43w4HkND/3j//5bTXvlvNZZilLptJQC4OkYJKdVdzu98GzJc3Pt8nU8FMAyK4LWV5xGhqROgqgb1vUyRAcTQnJBUw9b5hqNI0MUbiyOpRsvDiREYACKTqSgWykuVhtJi2Dy6e3R5y4YcP3n316TTdhVK6jz7+ciryre+92/XmjuMgkWEax8ePzpWb6zcTGuVSH1w8WDWizWo8Ho8o0CcbpNulL29uvPH1eh1Wm9e3t50GRsxVtM7tNmw3bQySx+F4mPpu41bleNrbs6q1Cd36fDepDK9v3AOIT2OWaZQycTBBBOHPP/nicHhzvJkun7zTJzjc3HKAl/sjeAwxXDzozeuvXry8fNjv747Hm3Fzvnv/ydXrVy+7tm2POA/7rufZc1Ubqd6OJWh5eHbWh9jszl7fvG77pHXuVw0h5lMOgaaXuWkCEQNDl/pxOty+PqJlqTZNJatTVF+al3I04t35bj6M9ZRJpG2CQJnzhEgUuar3Xfv8et/zjo85Z/NcaM3aBeniZ8fp43/x5+vN46sPHtjLfeQP9i9/PrtPA/6f/95/1dbh29+8OO77ZvfO/SXZzFTdzQEd3REChSo1CyMFd9VqTqFpVyZ1v3/Ttu1qu269FRlTezYVd4BaS9/HyOxS0dFRmVJsOFcRm0KisY7uvul3dWk1iezo5vrm9auzR4/a9Qap7dLmzfAqJQ7U5CoE7C67zU6rzZI3bVL0MhRVXa37ec5EYRjuHq0fbbf9NEyPHl8+f/5l2K3W267ULg/jxcPNPNg836Wud6lOgAtCqGmXLjGrWaQgOBI7GTYNFNW6HAXR0CPzQihQFzM0RCcgoJyHpmnHchvbNbydbLp50/RlnEPrc80htkXGhsM4z11aFUTRhbcqCBy4qSpopmopNloyqYjWUuYFeBBjU90UlIA4NIAJKc35GDCig6PnOgFTNRHLXWhVMgbmSGaVOYpmihwiV5F21VYRc4+xMbKnH3yrjIMkunnz2fMvP6suDE3g+PCdS1QqWpGhjJO5RmZFZArEpIYxNmOdyLuYWkcfZFzuQRyjawFyBXT2lMJYTsQE4IlawIYDz9NtjE4OVRQAAsesFVwIeQlBVhUAdHUFJA4IfsoDMrl5kRKoK0XWm16xypSRaL3ujqfBAY9DnQY53I3EUHMJXdy0azUYxrLqmi6lvm9EXU1Acd21kcNwGpuuCTEFu/dTvLgduEubthEppGoq1akYXR/n43gKMZLZ6nxN1JxKLbUcpYqKq0ajpRxX1BgDQDyNFVchraloBYBSZRxySI27mVZ3qGi3Zbo7jbvdOZBBF+JufRUSBNg9ORvKDABq2q87vQ7ATWgiEBADR6SWEHh2Oad0GK7Pd0+EI5uHEOc5KzrH1KXWoVQVMydiGSeoAAruSd3EqVZxdQCXIujUUFhCDRBZHFCNiZC4FotA4D67P/72t/aXvQ1V2bTkCtB0ZBDnWgOHtm9HzZRSPDsP/fYY9jWaHKZXz15N26jUpaebY4jdN742fvLxTkgBltppX/AfoTUnN4gQHMQXhqg5EEldgGYSXLvIieAwT1Jq2/XNKugwN11uulDFrEScTpQYA9SJzIEg1NMN66ynG5Lq+0ME6Jt2Ezh0acqn7LM9ndePHlUrZR3S9gpDA4C12jTm1e5BLW9cayCoZZqRHa2aG/mchYAje2riEqnbtdFEEyGLl8PU0HqSMuSZY29IIOZAY6luUFUjh1rN1MR0rmrmjtUAMKGGkJquOpuYAxcHdWRAZqxogxRmkjxUrAycANkqa5WSB/e5aMnqtYhZdQUAZAyB3VXEcxUOzmYxJnMUByB0YmKyen89QFgovAvWhHDJxQComxO7QTGdRWsumktaEQJorUVtmo4xNE3svQWMoWlapEDuROhugLSUe4XYIqB6rVrnUtR06UsJHAF5Gb03GNDMF9KKzrnMqrVKSSEwmQMgw6hlcGvARSEBkJuBArIBqBmCIbgB+f01m9l9XYsYHo1y4OTagDGauRYrFdERCcigGsqpTiVmRVd3ZBCvxg5VEElNZhJJLQdjUooUse5Po4do5nmez2IigVxGMDddkLAc22YuAopmpWNGIFOvLgCIHO7Z4aaKyMuBHpwIEXgJYpoKcVQRcERKqhWQkc3EFFDMqpq4T4qTeHE8iEvk1KbGPRQtUgNgAIxt8+4H73e7VaXNyI7HDdZrUCMgxPiVVAEEC8QAiRABm4SREMBMqoxGhhRUTMgAYzGEqCCz1rFZ7tjLkNqhWGHwUieOMaFXzy/0emjMkRNAsBoY3a2QTAhJ/OE0vadyKRiUKHC2TJyiGYqRsxPvocBqQ259xZUF8qCoFnBGBSA2XRltKsfQ7IlyCBFjFQ+hogs4LVU+CG6+4NEcnRzuvUS2mISX27HZ4kszMyRe7tl4bw8hwPsPeDuh9wX74PcJvoWKboS86ABLF7WC4T0pBv1ejXJEdjBzo/tKKlRzB1sgceaygJENAEyXnyoAIJCaLNBRAFIpAHgvCQE56PJ1IqABLs5QJAY3NUVw8IogZvLi5uZQqDJLFSLkEACdmQWgis5ToapkSlUZ8eXtDQZ+99H2eLxDg8BEitX91XEac3mf6OuNxjpRGwInMRCEYnrVciN23qVjlVo1NjG7BfGiMs/SGBexrJqJZq8mpYhnykhWyXfNSmd1wuxe2/Z1nq7y5C5DKYiWYg9Wu64ZTrVr2GVGgC7GljChrzm+sflFxRen7AiPd11gzFNuV10WG44yGhVK0zhcXp7vX93+ye//+Pu//aNudSHqhIDkzLSgzhc/jltFYiK0BbKHaCZMvACwiMhMFw71/eJxXXKGZuJE5rqoiua2dA0hLEbqewvbQqByM3NZkGtIBOCq6k5E92wrAwAGU2Voie+FZl/QWPcPo6sKEDjYotIuVScA6m6EvODNHRERlyfGF1mSCQxMF6PTvycVNW0XU9c2vWNkJnBgRAWKnBRGN1N3kYzuiCjlbh5fArVSY0BWd4RKDFl8LoOVI003iWd3SM3KIbuD+Bh9EzAAoqi4F9AmNo1ahEAcgcA46t3dl0M+ODfzqYqdxmFGTVY8cjSt8yTrzbur/un44otOIQqN0lB45Ecenj1Xunvv1/4Ggo5vXm7UbBVR6+nms6/91g+w272emqc//J326mmtp1aAxvzLH//Li9/5nTKfuE1ofPXtX+fVxf6TTza0xWP40Xe+8//+J7/36x/85amxjz4/Xfzy03aG4dmRhdAViUGNiFQcIKx3lyEGqZVU8jTMp7sXH3/0a4ibs7Prl7dfe/L4cHj22a+e/s3/wd99/eYILb+6PkgxMxtLmUWyytnZeSBE1ya0q66bhgHciYCZiIAZbm+P1cHVSi4QUQF26344jV2XjgMa2Iuba2Iwc7pXhBy/Uoi+Alq/VY8WOGFx7/o2tqFKZXAvBQFNwRwCARG56kL5B3cMWGWqtdVSlwhAzTUQe615OhIqMSMvDykwOzRhs93IzZQiFawUA0JIqyAayly7VXc8nmoZjsMprFN33geG6+H0wY/e/3m5jfsQu9Xd7XDOq65pmxgZlSA0q47ajkJLLjrn/uHlNE3ENk7709S/8/43TsOkSPP++PrV9ZP3PtBxLyYhETVNiE3TBZFTrXeOGZ1rfUPcclgf7+6nB0tq/Tvf+9rHX7ysYzlNX0Yq6/PWAGrxx4/eWzXr6c2L1NLZw3dOJ9V5+vLZX8SUHjy6NJ8ehYtTpXbd9LuLQGG1TXd3Yzn57sH5N7/+7r/4lz++On+KHJv19ievjtD3ILy6Wh/zwI4+VNEZg4pIKeH61Zw2TbkuXtlri5swDEMpqBpIPWAjItAySjQoaGL3Yw+iwEU0RBYT5OBoAsboDRMRmTktVcoGkai6tzGOUpb9rEGKhEzOAbUYB3w7vRAEWK27m+M+kkCCXAERAnhDAUzJkEI81aoBgrmMlcsU1vHZ7dxytH1m7WOTjqcc193z1/v59tWTBw+fPnnwq/3de995cvvR9fHF/unT9w77gxVdxfAXn/3FxTvvqcOLN8/PHlxud3Z4M6Sdb1ccACHKgyeb+ZCjkQNWmd559KGX/d31vpSKznk8asPf/Uvf+Mkff7rbbtFk3W8/e3GIHpsuvPPk4WeffWkmzKDOzXpVEd+cjqnpL7/xFMbZaz6MeXe+0YK1TEi82nbM8Uy63YOzm2eDFZlO8MsvXv/gu9/87OPnrrKfp9r4d3/72x/94o3X44Pzc6NoExQbAVCy7Hbvf3z7nEZBwyLzzTyYwzTk/YQXJa8ajG08nY6rNmar3CYMEDjk0zS4cAoQAkK0Sl696TuPp35HcE1SSYZsLkWh37H5KQ+n/pynO6dV/90f/fof/uz3XWM5lKz2u//6L77x5NFP/uTjhDbMeWjSK2r+l/+rv/zP/sl/1b6U56/eDpJNwBbHuJkiAgbCXEcITCFqhZiiqDBRSGm3uSxlON0dOSYwG2/33XYn7HmMbgTuJtZ0u2k6gruaE3GpwsSYUpknV0ldO08nRAjs01xLLjev6Yy6Veq3FxdznQBsGo/mNM1T06b12W6eh3XoAiEg1pyJiCMgkbs/6HZtq13XAM5TvTu77Eq1eR7ahpuzVuqYVivVCm4B2QFzzl3TArpK6ds0TfsQmFMUdXVNTRrKTBADg7CYSCCac+ZmhRCYkcHzrGpqLoRgUqwoBHNjhsBg1QajTKwcUpaKRsFCgIZDcpljIEh8mkvCxkVDR0UhSy0yblabMp/Q29j0CibkUhWZVNwAOYJ4DSCGAhAUrIsp18y4wAg5pm48Hskg6xSJqmdiDClWHc0kxU1RCamtWk3FOEEIq/OHJqVr15SCHEeV2q95Ok5NitT0B7EYKDWxiBKhaA4hFqmLfd6tUCQxJ2N0coMmJbPacgcYRSyEUObi7oHjkHNo1sxcNeP98dUAgUCB2JzVDN3VNXBDCOoLg0NCiObOzOQRwImBAMBqdbu9y/vTVEvNtXKKc5mrlIajkttcRzUOofV+E8I8T1i4Filz6bqQ2t6d1pseA8cS61u79f5QtleYVW+v79BfhA73Zd7f3WXL2GI95dC2qe1CiCctd4dBqtSawSEQoWFqmxAiOPr+6OcpdDr5WE5HdBBXohhDoyJQrYhCoPN3H9/W0+W6/dnPfv6jh0/LYFDBEsY+2FgIZZRhk0jzHATnfYmrBqaqRcRmqrJOsZTj+W7LCdSEqWmazkZHtFryeLCma8yBUiijhLaLfU8UVCohRsYUQ64WmCigAtf7G6mGwI5ISGYGoGQAgO7iMdaOS+PN+oxmcKUYOwd1KBwoEmnOggNTzyHOjvBw7W3aYPP5z3467rEcmt27Z/U8DVc9Pjivz18RRRN3tcCsqqDOCGYmIhwiEAAoxVhVsRiIeC1RKFBbPa0fvg/9Km0uu9UqMvB+sE9fh8kAopaqgikCzrnMpdZs80BMwTS2CRtG9LwfTqNAG5wBW64DXFw+uoWsMYpqng+r7W5/Ojbt7iRaQphRExIRiy81g0gVIsa5zBTJC1BkRz3lsQ9Q60Rem8bHeSjKxYK4amAkIHRtqeYK7nKay1zEbJaqFKqpk3LDwB5TMHRFpsAAAcRV1QADsVOYAdCDFeiZIgC7Wp6r1yrldprEOaszLkkHqGociQMjkzEKRoSIhg4RwFNMFd2kiFS610uXO4wTgDkYuJnSgmFzJ1ooRtjElDis+nW7WlHbZpE8FVUoedYiOZ9i06l0qVkBcaKGOBIYkZlYdQdkBRNCjtGNmQNBcKK3Hg6PYTF/iBgieAqM5gEdAUx0gRZVqAKY1as4G6tkJwMI7ujLaPPeMLJAYTSgdiaENUP72uGEvtdC5qhCCEVqg+TuRAqlEHtPGInHPJhjogX3uaBpqEv95IUDckzFPZko8kXszhIep1ssGCyEQCZKTk5kAAjEkatAiEQmBBGJmFjsK2qyIi3jPUd3RADzJU8HjuaIwImWSBKqoyELUvZaHU28ihaDYjCrz0iV3GMT275HaIfJGdyAOaTt2dnjd5qUSmBSxqYFikgF7kG8i2OS7juVDcARmLv1mmNAR0KsIgqC6OaYRZUcCKk/r9M9H4JCBBfVAgRVMklKwGSOiHc6XUM2CNExgjUkTFapFiuplkeCX9d8IcLeYogVgCmSOYuTAMXmxuYpxUgeTVuBhoIaK8fsmhk6wEY0anzK7diEKSDEmCpEHVHEbAoYF7OUudO90OOIBPed8u6A90gVAHBYjs0ItNSig+tSCOOgb1EfjgCIqFoXogMhidYF3O732s19xOjeCELs97CIRbRCQHJVhIUwZACKSzoTXKwCoTsuSYHlP3WwJeyG4HivK8liZsGvAF3mRMFMFqY1OCxBNF30LzCrig3eDtN+kmKxVAF1dFLTVd8rBasyTrXMRfNU5+HqYvtnH/2qup33a0aikBK5VpsEXh2OZcoPCb7R4YqVOKaYInsinxSqo7sp+KR+Kn5C9CaOpQDBqksuQmhcldwNsRqqoJpV10yQCe60VOCcK3Y8IgMAozeqzJ6aMA8VrUKIU1V1a9dJhhISOQALbKH+oIUm+Gcn++Rufj3K+2fdisM8zDFwv91+8ub6WOxru776cb1u9c1efvzHv/abv7m5eFDUeKmMA3QzJvYF6LJECOEtIpoCES+iISMZ6JJnBFt+X6a65NLI3JnCvX0M0U3B0BGI30YL73VHXLz5xGmxuDHR/cIkImdQtSoOHojzNB7r9UX/mJDc3F1p8d3aQjsiAjIFRCRkRHcHU0Wme8IVOhGB2tvO7HscNzPb2zDyV6yiGDgCuIMgJAJzVQImTt1qq5JVDARLmR2h+mFScyRzCExiBqDMS0kcLXzQJq56ToRCTOTO2Jub67gsjxDIiETFqUVnqXR29WAeH5R824Vwdr5dr1oRe3h2xaGNyYc8rGPj7LGN0/FlbGJqN5ji9XZLP/gPzp79NJ1drnFbw7mFKGSx2Yz7g+Tj2eP393ev3zlrcDy+2b84q9de4+F03I/H1fZiurmbfvn506/9JjXN6skP1483H//hn128ezUOefjyxeZ8eyg3g/v2yXmgAWJYPXrX15fm7FWbVfJqAACWgUBI1eTqcf+rf/Pjs6unsT3MQzZm8FoH/5//r//3Zw+evrq5O1vxy1993HZpn1EmQ4B13zXRmxjuTs/W/Sbb7EDEYZ4zclQyJDtNw83+9YNHTzbNDhxC5OEo4zQ2TRzHcb1uEeB0Gqt4YHIDM3/rIPavmtAWMRvfQvcBIDJ/+MGZaYkpEZMBE1qes6mKI4kIGBNNpVQtomJu0zBpVSNtu9VpyskAkLNWI69S2hDMEQBMlJAbDpEDEMDlRnI5Hk4P+wtCHFSbmGJwSvzgwfnx7i7lKZV9ByIjoGNMxGDEvD7fcmpqFTN3UETSUlwqoTfrLpuIKQc+P9/O+/n99z78xU//SFy1Izxrm4uzm+s3u1WPgY26tD6bfF5imI6zKdTyoolrsw2n7v5HpPryk49Or1qHRs0QpJTSbjardadV33z24kBgeWpW8cXrV1Wgj+1lv2LT5y9eNS0kwFVojne3Fxfn8yR3++PV44c3z14a4IsXr374ze9c30zFtAU88d1733j0/JcvpsO03vTjkKXqbGJiaDAdhdCqFCkGROs+YOR14juZx2ny7A+umkKWQoxOGRWMwZ0Ii1YwQ6BIpIYqAoYYERzUnAhoYe25OZiaOIAoMGAgbFN0LTWbsSK5mYNWjskdMFBEn+dTw6RZIQAjxhQWS6sTqjqC1VK35+ujzTZjnS2mJJEefPP96z/5aDy99paadTx/9ywhOuucT5OUL16X0uyw2f7GX/v+z//tn7np2dn6MO8fXD1GRxJ50nXB8dmvPm0t9j3vOL65vl5tL5wQpkkcOYUPHj2+3G5/8csvq1m76lhtGvP5g8vRrkObAqXjWJrkD6+uvCCyTXlqu4QcLq6uvvjy1d1pSOv+8unVNz9851/+7o/1MPbb7e78vJSaq6x2Ky3GHJjoansx3QwwlkfbVY4pNLY/3rzz9PHx9eu+b29O+7uPbpuDpTbYIIEiJrw73X79a999dfvq8xdfzlIvdmdffP6CYUIvbd81FIdSH+0urq8/7gKvgnfB99Nch4zgiBaZUIGaJIDzKNMkm/NtLSOzv7k5TdVCG60Ur2K5yklFbwO1itR1zel4/P1/+XvRLBdjjNNt5pSGqcKbHFHe34QHHz741bNP5NMXP3z0tb////3ll1/cs4q0VmKw+3g7IoK4p3bj7rVmt2W6SGriHDjytj3b7/cAWLJP+dCsd2bAAeZ5JiIhajbnaT7U6YBEbACmpjV1sWYdyjHapol9mWqZVItToul0MHgVr87b2O02u9uba0PgDjYXu2Bcx3HdxU3TH083m93WtJ1LadsopZrUlIKqqemDJ+eihZmHQxn2Q4pNu0pVVDyFEDhFyZVDCMS+dP855lIxJHMLgecyJwxmQJSM1VDcMTZNKYWa1gkwkJu6O9+/btHdQmBmFyf0yAHn8ZYac4RShGJEka7djbdHM3YnCMlBwS1QUhG3bDojGAdGYzMhhAdXl58++wQQIqEgBgwxRQBEInIUKxxDjFQyAFgikGJmmcFLHfsmGaBIMPYYU63FlLV4zcroAKBLv3JgAAiUTB0NY0qxCeIotbpLWvXrpp2K9etNrbPaEoCXTdepoBt0aQtaEClxXECgkVIuM4eIkXPJEbHWTJE5tiLVXFNoVERFwaGalpqZcK4FgR1A1KpJz5ExELJYJg4LrcaWI5r50ipM7q/f3N6cjqaOIempSi3MBKWYVASYhqzqfds0XeOAdZ6G4AR4OpyYohkfjmXtkQgispl5qfKWW2eSnn8+9DX4Q/K7m1aCgU/7N2XOyLjapLP+DIEqILi3TSxuK0huSO6oSCEA4Hrdb7p1Fnpzd/Q8g9xMw7xZbQ0gNs0C2mU3BGiljvtD2j3a7rrTcbAa84Tri3Y+DXk4ah7bNRexuO2AU0osFNBxlWIIQaRsdn2KsahYLcxBTWvNsWFVj20zDsNsBTGkJgV3r7VMk1tVzMCg4A4KCOK6dPFWNUJMIRKRGhgxIgE4MaO6AqSLx2G9kaJdim65zoWZVEzyyNwgakwxWEOOOFcruU3hOBYFqBbKnE+nm+Ofv1p/P03rLZ09iJ8/bwMRurlaXfCfCoTgwi1JKWogIGaElDR2zdnFZtW/c3m5Wm2wOgeElk217F/WMW/MQgaZKTRNiH2BasMIRROAe2p3qynPwDhUORwn74hWK3h8IYnSatUkkM362qq0CdoOYiK12LR4GjRnPZmUWVEVFK2ohZJVxec5GzOgF0OpArnGFvpVyF5jE6voIdd9xsNshnH2ud1dxJRElSKDKTmBLPkWQ1AGdDAijBEpIoGEEFOM7FSNwYGdiMXdTaWaYjCEMJj0zC2BquU5i1YVVVcgSl30al4CCXpEjAREhujqgSAa2pS5aVx0GYpb0a+6n8DdljiO+nJNBSKg5ZCnIMIcI3Ibm75p2q7DrqdaCXieNddcZPJqUXOtQwin2HYlpJR6QjdFU3dHii0xBWIIgR04hKVr3MARGR0i4z3AtZYoVeqsZAQBzJCommnOGBbjNEZOkodIrlrcFdCAAzMDgLuLKyATMpg2DmTzGZUJw4vAIy/kWCXwkKjkggFiIgIQyVYmYw9oQx0QQ0ZoY1PLlJC5upLmUlDalNpE1DWY7t7UYVqNXeQUwAJhcQf1JZgkeSKPogoO0zTXYH3ql/ohB1SRZU5My7j3Pq8EAIAUCFqGCqpgwoTZqkkpiNWWUlsDE1dzh8UnCOixCWGzGpGZqBEFQhGNzWr3+P1udyEGKaQUG4HgFtALEjqQuiRmuQ8pLL3gvNArnQmqm7rIKDYExJZTLuQIAjLp0LRJ1Z3AVIgs64kgosKCVCG1kabn9XVutCOONTPkgDOaJK+PvK7rtCt47jEgG/FkmYiDOZqAKGN3N885hRB6k9IBBw+u7m5OVrUiEzs0huRQ5/x+jCf0Gpt11WgBvBoAU4D7J225GJk7Ogg6LG3yi6n6fhUgqCoujeduRASO5oJIhOxgAE5A6npfkHFvJdKl+8wNmUhc4D7Qd58IUjdYTJRvP7NbBTS8/4+Rid1AfaFWOwK/raxhhQpufm88scV55G+bixDRoC7fyUJBEisA92xER1pkWBUhVCDa7w/PX5+yhuWbRg7m2DZdBS6At6dRxdHMVHe77Zevr8exnO22m81KSlm3DVrd13y9H6dSHzO908KObdVxLehAUmdsWB05BHIrCrPQIDpiTQ0J4iTarVLRcn7WEkPbkqs5M6RILUEIIVI5zVvSxAAJZvLAGCJtWnrgQZhWyblAHVWVnJCczJQYEpCYnW26mvPjqgLVevzVkW6HamIfXm6+9c7lrm0+vj48vy13d5qcnsRIjcSU9renP/lX//rXfvsvnT14UlyrGTMAGjKjoxsDkllFBHRQW1xjDgBEZAsaiGxJjwHCQi4HN0ACk/uwoLmj8/35080MwdUcFzulG7gukvQSTzN7+7iaOQAxA7tXQ6SUwmV8FJgcwdwQEdDVBIgQCZwAfEFXmymoITkFhqWii5YcpgD4kjGH+1q9BXH8X3cVcQyxSUtOk5YEghRzB3TmSEBWT0wxcFVXlVkmBQ6AUKobkDssYhtHRKQGIVchDAGq1TFgoJAUAd1ZvG06pIF0zvM+UONCtQwynICdWJMfd0Q8zw/OH+U3t5uuLcc3yfM0DvnmLmyeDp9+2unYXDW3x/zO3/jb7eU6hs273/v1v/hnf1gnZYbVWdNDajwwrXQq17/45dO/IRqgffDhZvV4/OlPmio172sxvun5YuVTFoO4enjz+tXdm7uLr13Y+2fvf+9Hzz76o+N0e/XuU6mNfPEl3BTtCnFQXzYXclBCBud+e4XMEPzZp7949Ud/9uRv/Y+sv/JAfqxdWustXP7lX0tn/etf/uK77394eqNNopjANKww9n1LlBhTQ8EBI7OjO5KoNYFjXLVdF2zWXDWPJrUJzIHjkoBlGKe8XnfNqvvis3xxsT18eYP37H29lwu/SqC9dRWhw1KyC15PQz472wBGraYEMRKag3vNiqhWDcDLmANxACYjF18QBq5EgE2MtU4xYERKsUE1ZNI5c4yIeHa2VUfs482buwLcXZ6jc1WRaiZoGadTvTw7D5y2q807D8/H67Ec8rjXVdOvdmfdet2uNwGjVatThtCiehN7EujXXQOwXq2efXFgJxnn0PXTOO/v9t/74a9noabdPtw+/MWbPzjvOyiFR29tO9RiouIaGV0EwKfpBCQ23i2roFmtpagiiRSKMXBqYyh5rlKc8OzyTKtNcy0FE3Ns07rvXEulyAaPLx9+9LOfbc6uIgTJOu7venOYRHK5PH/67Fd/nGN+8+Lu0eNHf+d3/so/+Mk/fvLgned/8Wa7blWLztU5YGDiTMG6Te8CuQgynV+eWRmvp6kNjaP1u4QomHDOGiNRqhSchZ3cwBChmiUKRCGxGWKx2lAwk0AEAMyoBo7kRALLWwwA0IzEbJ0SAAG5Wm1ik6Jj4LlKzy24tLG90yFQMMJIZop9txKdjWjJx3rsbsrssWx3582MRVTIsdWKQpEcA2ySBP/ZT37+3/ydH47HoWvXF+9uTzK+vHv50Wc/e++7D5999BlxcfOmTRXg7nDsYtjfHR3ASfs2FdHd7qIW9oDElFgvz1Y+HP/sl1+YYU2Qsaxjsqb8D//mf+Mf//M/2q3OJIOEvo3xvXee/vQnH/VAVv1i185zJa8R7azdVakb8z/9F7+vx7xUSg/DFEErqLRRVGP16hWq90jUyHe+8/D1cX7/gyd//tNPbqBykBHowcNzN5ikREzH6/HhxYPR6ve+94Pnt8/Eh/Wmw9mL5dgxeLhc7V7vD451m/q7189T5Kn6MJUqcTyW4TAWNSgipn3Txiat+5SPk6GsdnTzauw3W3JmRRbMOZMZIYEaqiJmqURRUQoVdWV05GCi0KxS29EwH9fr7TzL9bM9z+3//f/6j//y935nuGu//e33fvrnPwMAqeKuZsaMS8bW1Tnc1ze4W52GEBMiS1FzrUJt33Hg1aZ78eVpHm7W63NLzVhzE1rJRwJvmvV0eJPS2kmbLhBYIOq71f5wqjq167N2t/IBzXNsUIY83T6fWkjnV/3uQtTu7u66mMZcCKgOPs1S6W4qVvMYU6yuoOF4O9Wc+76tZqVMm1VzOJ7aps1ZHejlq8MsuWlbmfTBg29hXKCSSIhEEEMQ8RhDlVqrmAAjmykZMbq5IWCMrbsAhxDBdAI3DkxA81wJ0B2lVike2U1NPLcBa65NZK2qXBhDxApuHIN4QOIYoqmaitUSqXFqUYUYRaFdnYuGAHA8vnIqjCy5uBEGRgxLeTOGqHWm1IoYEeeSIaAKGFGimEth8sTgzoTkkNS0CgDEEIkoAlRUR6RcRwoNYjQpIbRuwsDqEjC5uqkDeUQuCAaLId1Fq5ot6jMjVs2iU7zvekYwQ2UgnEtuYiJiF3IXBzXXEFgN1AycEMjdmBaKpy4HaWZiarTWpdzFTBAiEAdEwoiuojUyqaoUyVlNUdxBRVWQgCPPuS5Y1qwaU7x8eKaIq9QMp6HpOw5NGcuq647HYxn0dJKHF+fzdKAAtLC6AQDg7vWwavS5YNc2q94RyjTOqUlt0dXm0UNhCCtFOBSR2NjNNTjGjh0W1z82qY8ctpsd43pmmqD5xjeefPrTa8ZU5n3frnMewZSbgGHhxzKMliC0bXPxaHN6Pe/vpjrLfKxN6MVUAyHRasMyz9urR3PWECLJjAweQyDEgKwcI1vVwOCoJg4uABAixxRLqVDV5iy5lGHSaYqkYgBiOldTRCYGChTqkvlSMzR0RAr3vhQ1olCypu3OAElBT5mrgaILAJKLI1m1E9PKgSK3DAJi0MWpnuhy894Pvvn5L38px1rfHIdfPd/8Wrv55gf2xef1dItlDmbMEV2cXMmyOTDD+ZavHvG6j13XtquE5ATkplXGaQ4oUOd6KBGAivipeuwdwMHGYZ/HUUulUiMCxCDVB60FLMSuXJyFp+f2eJtXbeEUUtRi+fpl2LWYEqnbLMSRLQe05Z4WmraYMYVhOhFkiKgWxnl04FoLoxEgSCmmIJANZzcmnYVuPe3VjUCyRHCcJzMLMYrMhC5eueEAjRUDmdGXriEMhGi1p2R67/BV8+AciZhpuUsgogE4O0SmpoVa0UCdFINGD4EYrO+ZzMdDdo5hu4FVtFrZLJiAmKrF1KhkD3GqExmxgel9AA0WyqUveAsEI3UEWyoGEQCIqWnTYlyPIWFqIbbICQndoJS5lqmUorUGylLmEFibzmHxUXDTrRokdEYmRyAAdkIgYjZA5IBITI6IqOIcWoCaydStOqWIDiLCHBiMHRHUdWZSNEAzMILFz4FE6OgQwJbGD1RA8wZ160cPccaIMXrDJYIKILcW49EOHDVACVAbRfUKjAE7BwAt1SRgbD2UoxB6rwliICaV44axt+rHELEndLUaAqOqlIrM6BRCUkcHC4FCDEBsAC5mhAy09EcY1KXgmpABHXxxMSiR0v0R3kTMFgnDXUXFwFRV3IEMzACcw+wAsc0YsGtrkdVml2qpVZvt1fbBuxi7yImYqzvGnjiSorviEggFcERHICQHMPfKIW62GS0SoViF2UhRcYWETmDO7lO5WXU1DxPAKlITKFRXFgO3oseGV5jCi9N1JiB3UGXXJjQqCiB9HdZVLiSG4kakAdQ0kZNpUCUvQnxjMqcmxURm7NqGCMBGQQzBDF0DE7oLGMAcHNZzuQhxrnGlibUEjogNIhHS0jEH4ItxCh3AdBmuLzzhf7cI7pnXS4Ts/i/LJvlVcAO/+rB7YwgiuqrY2wr0xYi0UCAAkZDcdCkZ0iV0hmAGRIsFCdzMbPl3VF90QEP/yux0z6J1RwdaDClq951nCEjIdn9ZBVoIi27VFBwIGR3RBYBM/fmLwzyFcu+8i4buqCmm2WB/t1fRJsZhOK36zlRf3ezbtlv3fZMCo4+lvDmdjrWWUtcEm4CGXgIoIsdoJgkgGAFYy5gCZCHm1AVx94gwoEdmpyC1IoVqkB0RvEE/iAWkgIgCOw7baA1BcQoE6h4DH6asGrSoIUvWVYp9G3xWKA5GpUqIbRY5CyweTPxBsEfnTcvll3d1qvCz6+H5ae7BJ9WubTcpffrqZqr5A9uaxcuL3X7Mf/IH/+a3fvs3dg8f5wpIyBhNDQAIGREdGAEJ0QzdQVUckAMS0YJgIiTHe0FwCfuoCgJ+pVG+zRCaG7gpE97LN0zuCwWbzJaL/HJcBTN3NaAFte0hEt7TYBqzuoRGiRiJlv4zRFqcbhTZVN3vGd3g4GZv+Vv21b/AffhxySa95XV9JRUhkdp9TrOU7O6KhDEhtCDVzENahehMpCBDnqqqOYCqmajDW38W2yBAFJkjYxLro7FK2xDJLJJBakMtORBO/flVJFRRq+aG0zG7htifU7Mb39zsNu14+5pUbu4+3XVeX36+Pnt3/8nn733/aduCHue26xlS3b7frQDnLz763Zf2pp4K77979vrzL771rR9gBQsNNLyiyNOxjNPlBz86Xh/ys0/KfLx+bg8+/Prro509fRBZRPP7v/nOdD2lbzy9+ut/rV5+fdZNjY/KzXV9fXznatdvN192NByeBzY1ocBIgGGJSiunWJEK9F/89Ofy7Pmf/KN/ePHD37g73ASKX/utvzLf7VWrlenFZz/9Sz/6jcPd3eE0gXHfpvXD8zzN43DoUqOq0zBwiCmmPIxdas0NQa0WqSVSevHsWnKMkU/DxJHbJrhrNVtRBMMAiDESocrC6PzKPPT2L/dUwkVwBDRomNG4Te2bZ18+en/rgNOUSy61VOQI4LPULqUqxsTzLKaeS207dodaM0KdpsFrFRVRazmqWmDGGJ3AANO674oWt916g6t6Oo7Dfgohdv0KkJvVOq63t4dZKKq2L788tSGuzzc/3u83XZfLtDpf5bnGSG3fcKTZvO9WcbUNXb+92Pzqk19cXV26GaVmmKfGrOa579e73dnLNzcx+KfPP6ausRSHLP1qdUlPTm+K59M8jwIg1QBLiAheTe9/QpdX2+2Ds9bws1d3+6FSszpv8XD9PAQsmmse7l6ewLGjNZJ//cOL65fHLz9/xilenHefv3jRbS++fH378OFDA37/a++/fH692z5ESB9/9ClpCgy786txr//o9/7V3sq/ePnT6G1mn4ZaJg1t2vTr4fUUIrSJhmoOLFaKDF3U69PLdx58+wRwe8jbVRdjPx/rxIddaADRnM3qIoE1kVRNVGuVpRON3IgQ7S3m3AwYA0ertkptjDDkQuSMKFoTUZNCLcbISMTMCCqiibCqpRiQoInNNM8KJpoZzcQiBQqhEN4xUWzYV0gY3Obr6+tn10Vssz57cxg7YznY1dmTL16Xdbv74iefD/mTqwfrNfHhbmrf307HIfXBKIxWoaF+0waB4fWbzdnu60+//ezFn3UhGXi2Yk5NiA+unhDHT559GtteS5nG/PB8V8q0bTc//lc/0RqMm91lV27vyqgf//KZedt1u3feXf3Rv/39bXt5fHUzTBWomCmWcDwIxogpbC8fHK/v2o67TTuOU9+35DYVuXrw8MtPPjpO+cXdKVd98eXrh5cXx2l6/OThx8+PLw5DCuxdLNEfP30oswiUX10/j5FiWM0Op+GEzjLPzBoojuUUYlyvVjWPqYshhXZoD7fHVd9uFI8yxVXfblempZxO+7vpYn2eOL25PaxWXR4yRXPRLKOrOSqEiJgAzSk0TePgahndmvWqzjMGatpuX8vFWffke48/+cVt6MM8iAX6X/xv/pM///EvOe6Z7/0U6maEZqpitJR8uTq4O3IIih6IkEDViBecBAJBLoUqnl2cI9k43mFom7Yr0xhCKFX79cXp+FpqRXQn8ADFqhGuNptTnm/n2y41fdfPuZgak4vMtzdvisenDy+3O5OaXaFvwmrTTJGO+0Mu1RGOx8HNjCoYaFEH3w+zgU/TeBMQEVSHWjE1aSynaqVrZB6LVGLAmKL6MpAjdjdTp4TMRFBUOQYCq1liCIK45MndSGRqYmtOAORG4kChQVBXAoQQuEoG7sldrToqeoxhJRrWq26cbsgbDmRZBMDZpVYOIcRGRRU0EEfmuToZigqtVqdcY0gphFzVGR20ikRMDhBSb+DI3TTdrOJOHRvuBBFc2qYRKTGk+4ACYKnC3IjVyGgOQKBKrkoMxAxgotVB3JG4MVHAEFLIU1XHea7qskqrk/hchcCIoy1jM1CRgQgbSksm0QHmOhJSDKGqsqOjGRTVxRWPKqqqbk5kJvdFMMyhlNkB29CKzQgAZGBKFB2iuatUcEMHczWrTA0FZOfYJNbMgLnUWjISmHoRKTk3TWwDNTF0sd3vD/vT3PStipuolFoicxPldEyUrvfXOs+rbSsAsbmvCW9TIIaxyKR2miePrcWeEDpsqI4xRQixAK8DQEg9RbisQ855nouUfrNq2xS5BWgMWqVQS7wd3kCcynToEwGimqhYEFfzWqu73Zz2Z8NlUZxlBq7bizb2bTqF689vHh7fbdvu+subi023Px7Ou/fzfCcAIrVbNfNQmek0DpETodRaALjpulJzrRVcOSZEJtBxmKxUAqDAdS5ePFdRZuMA5kAEAmoFFSIHQERfICmAiGqOy52WQ7i6OpRKgVIkm2rwBIYIiYKFsOB3ShYN1obsdSwOqm65R+vw8QdPyl98SbXSbHrMx27DDx/HelyBoyKuthqQ21V38Wi9WmnqPTJQ1DpaLfV01CoLmBPFiYgCI7BowpCQFGDY3x58PIG5B7BaAEM6Wzn4TFjXq/lsU8823XqH252wZsoWcZrGUCFWtf3zi7MPqsxuoKDQJKyac25SdxgOXepKKeAgVRPD3d2dx+gKQGiEqkUNXcSKAfrtZOAYIk/V99JaF5ssUOaA7AqsGkJYsBSIZIGJKSTsQzCBPBczdTUttTaFmphLjrELnNSsSg20CCtkophCTE0lqAGCuJVM6EAQU9e0IYIHFCXoNmtWwhSMMSqzagSopRCzahEBV2GQFKNLfauXAsAStgAzv587LtELA1FvIiEQI6eQEIL7gvXFGJF6ZI6l5FqyirgpISKzqMkw3t+NOISuA6Kl9MANlhpEXholOTAGJLzHwQYkJ7KGTEKsRry0uLfokHXMgwA6gSOou4u5gWpFRnMCq4SBgNwN3RQdncCICDogsHoxFXfMoQnJlfw4F4hskYzNVFdNiLOoMFPLRDUP4DhV49R1TdPKGJUe777285tPpfMOZ7u95bELYesq6oJgVRTRKZA5mEFo0qlkTgFcli4AQlJURvLl0rn0V7kBBgdzUwJkXMxRRaW4GxKiACFFwlyyiyAs2ZOlqgnAXdw4RIhBU3Tmbt26QrvdAnM8f8rbc6dAHNyMHZAYkZm4OgAQuLoBMC7SCTE5oLcNpri0I842ZRwwxASNlciuBbOanKAk+eK8eerVBGRJ1DGT1tokBsrP9fXrcCdMDNXMGDXUuJKwlvpAYyuWhN3RkKtUQmRzUgUphe3I9AZlm/pgilWWHIxjUDAjqLInLMGIIS3l9IloZXY5zyd3zBAjmSrHSIQhJgLikFzFFjQ0uLndG7jg378mOxEtyS9EMFuktIUMvcCYXHwpLCc3uy+ddmPmf08PADddHmZ4axNbzHsGCzuc1OqidzogIi0lf/f3NliatZY1uTiP3G35SDa97/dbvhwzM11QM6ZamJLdA3AWnQvUqpkERhH7xecvp8pFDYnAYZpqu0kpJXWfxjzdTW3TlNOpj0wIf/Hp54C026zbEEjxNOSpTnclI9HO7WuRtqTHqmctTbWuQ2wDs8qmaT6fh5ismhJTVnWHdeBapNUF8MxTFhOaRGdtNVcvFsHnPEdEFKgqxAxqtdZAac6CAZTMIScKCNGQgb1WYVNmINAYIyEEBJVKzI68Ytqg/uaWNoSfDPhskr37gD4XWRlYrU0I10NdHysAIear89U45H/7+z/+/m99/+Kdry9nMCRnCqLGS1kd3isghORAsCjqbsRspktsF2lJcMO9+IJfSYrgBgoLvhoAEYlNKwAsRXsOjmBM7EALXvre3UYIgLaQzBHRadnv3JeZrpv50tLGIYE7EpirS0EiJLxHkdA9+0xNFiGLOahUJEIAkZkowb9z1r2VimKM4EiBTJUC11rVljZBQxe1QoBgzkghrinutDkXRC2zlCJSRHOVeVlbVUSlTmqBy8AekQBLZA+Bm4BFRGBcJSt1nqfbJiSD3PbpqEceR9I2bp+umt22AR2Oqd9l0A8//NrNp5+sdl/ftquqVou0bZtPYyQdD186fHvev3l592p6tT9dful/mB89fjQNEihePnnQnPfT6ym/fDnk6psno+WXLz8q+e71p8+//x/+9etnq/7BAz0eTsO4e3B5/frNhz/6bz39jb/67PW4we63/sbvfPSPP3dscLA/+7Off/+/+7f6pw+4P8ciVgxiACJQsjnrOMQGX//8y4//0T/7ra9/77N/+aff/Su/QzKP4g++/f7f/7/8vf/J3/47d4fXx+GUx3meKlNsY+eCqWnUi9xWZgImNlLTcd67i1EE8KYJFCNHH7PsD7Lut9vt9vR6atp4OA6piQZ4OuUkQBTGoWzWq/1xMtGvtid4u5l9pRo5ABKqIzR9v70ah9v99ecP3//BfJpUSlVRB6jimslhHGarAokcycxBJVdHQxFYr+M8zm0T1cGqoyGqMaEjMkHOFRQCc9d1gxkjM1A5KGGIMdU6LxPcSVUr3F3P6Btze/HquuuSSkYJXUjdqmvbHsEZHR3APA9jYD8cDiGkNu1Ox/n8vC8+K/Dp7rhK/XTIr69fPn33fUdst2tDV50pmQ1H4EKAZKAqImPVfYSE6FbvawVO+5vxcO0K3Ke/9hvf//zzT/vNRdNenu8enu7ubk77H/3Wt8fjBLx6+cVP85u6odW3vvkhBC5Qb455vX709OJdMn/24rOb2aa5xBh/8Dt/6cf/+l95kUOZOEtwfvbmddxSH1uRMk4zkMeezXQajoQgquP+wA21q7jZ9jXvA/E75xeN1pnzo8vuuLdxL31KM9J207yAqSOqDhyCueLCFXAwBHdr79MlFDhUtWUbQUAxI3ByMS1nXatqmzaK+LK3ciAzJdCWm+C5CRHROUBgAjBVAUAhUpeAHgNKNim1AsS+qSCfvXl10Zz1bbtjfNzDp+ijlqYN3/r+N37xiz/fdd3Lz59P/eM+nR+P12Wkecp1po9/9mXXP3COtG4PN3dJgpws78c+bns6v777vAldG1bACSwzNqvV6tXNzeH2tqE4D1lFzs7P8liC83/8t/6n/59/8o/b1er29bWut7HdlOPQBQeZ97fT6LBen5OnAPDovas8ls06Pr9+psH7VfPB+08+/uw2ACC3ZpG9oHIKFQIOw1EAFMPNYbi8PDeG65s7AtCpoAIBuzjGNOc8TrriQOtwezo8uniyr/u5lP6sPd2OXRu6Po45n6+2Vxftl5/f9nEVJpqsTmMp2QxpfzgFoprH/a2fbSO6rter2HfT3djzFpLn+XC+6XhCFIQQDVAMHByJxM0Z2J1j1zQtB621AIYyKXThtb6oaeLHm74/Hw/Zzf6z/9fv0pHaVffgycWyCqQUyRMzYQVCXIBVJoawnE/xnnSqsvhj79MKbtM0x0BViqt16+TqViyFuD/dXFw9uHrwwScf/cm67QMGYSs5BwyRed33t/s9Fl+361XbT9MRxFJgq3L36nXbthfb86uH8YvPP+q7zopVm0JiMQkNBgzD6dC0YR4His4xyiSuip0XNYoUOZSTCNZqNaRoVJUztmAmMQYz58SqWquE2Ey5BAq1WNbS9R0zZytMwVwIwEEcITAHZmUkxBjaOU/MIU9D13WlDE61lNKEHlzcKjG7CkGsaNUVCNqmO04n5uBMGBqgAsgx0FiqU0AOKgpVYttx0/C2M4n7L4aeEwEAeqBYtQCHgAROhMZWAgdEBDAEKGWOkaoUYibiMY8cQ4gNlMLAyJQQi2aFkSAomgMyN2huVFIIJg5q6lprhugUAnpFd6qV9IQ+E7IppkiRAE0d1dBMlZgdUN0BlImNqaKCQ64ZMCEwWQCrwCiSAYAZzZQ41Kpq5lABXNSyF9XKHNQKEDuoIy7WfXFHcnJnCmpqKqIeEutxVgFT7VKcRAVc1CCEYa6RIPX97fEkqmqk4iRW67TbtDmPTddttitViBw9sJhNw/zvvTMVnK4ePAirK6E0VQZ3zXMidpCb/TH7JMjrzVnDDW8uHAHUmzw6WGQ2s8ChX50xro5aKhCk9ny3TpTalp0JiSlEdqxmMfGI9cG7DyjFVb8tp6ylTkW6qUFo8psKE7ft9vX0avPw7OY4xyY6eSlVs00303AzLOAkUEeKBIBKOplJicRVwaoDUwiponCk4XZfp9I0DUBJfUsxUTKppVhVNAYEAkFX0zZ06guvjDECAcpclHzz8PHUJFJDN5epnoY2nFWtTljRmCmXOYZG9oe6v7F5EOe2S6e9tobvvPONEDcvvnwR56mPMVxs4w9+EM/XG9eGO9ntJlKqnoc5lCxv3uAw43GIrl5zIkAMRSrHwO6S5xCAoXIVBABVnzJMWWcpAtx3bgLYHqvoupvb3t99LO9f4kV7ENM8kZgeTymFLTKYBoGKK9QNaeuMCSlQj6iTjtVLrUfKfbWltBDnLLV6WS4DKAYUQMc5ew0qZgABw1SkCerABszIANa1LYEjOXoldVDXUp3JYyhA3DBShSztkrcEaULrItgyUS3KxVQEBMnRHWqMHLsEqYkUo5u5eEBMqUzZEIli4BhciCmXqZo1XQsBZwN0lKkqAjDmmhuIITWiDuaTzJEQ3hoXTA0d3J0WrAmAOixxBlw66hBB1VSkVEQHRErRl2YcoJjaUutSzrQkekyrm0otbtp2XQgJOdACfiUAW2qATCVHUOTAru7RkX3xRhBT7GJPqkYmqBnJszE7d0DRLFIgV5cZjRHYHNA8IKAWx4CoBGigjgHY1MwNWeECEbNez3dh1QjmXbsZDE9SKmAuWYGSpg31JDKWPRbpvYtkI+INHFaoXdzcDa9DDLGJlw5W50hb9Zqa6JAkT0vFfLVMANltGeMVMasm1by6oBKTuYFDoGhmCz/c3taSgIMhOoKYRLIFQcII1d0dAlFiVndTUKS5igYSByQXlRYBGYXRm+TqtN5Ru27OH1O7bZqkovfxmCpLaU3EqICgb+8KxGaFHJwJ2x5jJEYTyTbNXshD42eoOOFoUBUckIZyd9E9FXX0GSoRRwSLQIY+xvnVcC0kQGxQQ6I053WxXT6dQ12rg5sgIJq4MgO7uairVsYXTXou/qA5TwbJKjsbOJgBu5GImWll9GAAZEvhmIMn1wsL7ZwZIppXDNXDpuk5BHJmRGNHUUO814lM7W0X+r+nmJo7gCsSIyHaYjIyMydmA1uCgmYGZkSECAa0AMHVDUzAiSkA2BLpM1MkemsuAXcUM0IEIDDFe0joIj+hLQvQjYDf1p35cu5CX6QrNHNzhXuTkdryCgMnZLd7Bg3cm3nRPZtXSvjpFy+Ok6qzARjymCdMyKlrUvPm9uZwN3Ztl+dp3bcR4Oe/+mgYh0cPzhNzlZrrNI0DuK3RdoHe6fAx1r5JLy2c9fEMqqsQx1xtqlURyHEWaThO2Wb3iO7uc7GuC6NIm5IUaftUDFLbig/M1Kaw6rpqdTCuyTv8/5H1J0+2Zdl5J7aavfdpbuPda+NFmxmRCSZ6FEASLEgsFkWUihIHMkkmMw2kiWb6F/SPaCKN1JnMJNFYakxWLBpJgQ0AAkggkW1kdK/33u+9p9l7r0aD4y9ISj6JwTN78dz93LP3+tb3/T7OY01tiw5C0G6a4WZ40He7gpBWs+ybJjBpFzCkYDk0jg7StykbWoK2a/ok7bj7jeM+iu6HemcKRKumMSl3k6y7JgV+dXG7ah5cXd4GtpNtfzuWv/yLn/xOc7R98F6RzIHM3N3UnJdW+xBMdUFTL2SfhZQOCKbKxIS0mBkJyUAR3c0AYEmELS2HiK6mapWW16EBIoYQVMVM7l1+iyNyuezhPdka7+NtDABg7mruRhQciAjdjBCYY5VCAKAC4AAB3NARENx18SSBVQdCTgRgJiE2bgjg38Ld37mK3N10nmYOrACmopoBqtcamJGCWeEQAFi0NqGltBVqsHNwVDEFqZpVq8hoUEVGW2o7pWYkM3STuch+1sgca17N5Wp3U6ep8A1NB+SjgPj04w+e/3ivtlLx2LY+5/50M/lU37x+tP7g7i//9OTR09omOdiwG2t2WAdzrADTaL630wdPwvq4EXr+4uLsvRNex0Hq0Xe/q8PnF68+H84vu7NPXp9fPl03N7F99smzt+fj0cPvrPtYxt20mx69d3K0PtnffDHdXkXcQIO4jZfjeHk1HbfNtH7v0X/5P3a8m2omCiGwivp9MYFmHw9Smrnfzjzsyvc++8HXf/nFJ0+PwTisjr//679z/fKHP/urH33/2fdm0wcfPD16dOTS72/GeRz2+93tYXd8vDW3YZ4otVSN7nHoisQxhTzuHSUGFvdJlIhXq+7mch86Ptqszi+u3j95X7Xte7p+fm6q72DW//9fS3TOAUlNVeur168//NUn1xef3129cermPAs4gYdA4zDEriv7wRRkrthUjKQqkROAiRXDAERAgAGBHcghRTEDQFUnIlVDJFdEjtM0rE+P97s6jfO0P/SbNN1en188356eNkdRGzh5+hReD9dXh9PjrmZZn2waJCsWjoJqLblA6Ax4vV6NY6YeUgzEmhIr1tSFlBonjUQnm7V9UT988uTLFy8RjB2mu0mrjPvbzaq5mLBJ693hxiE14WEudQHGLV+l1OOTzWE3zJfDX/7xn/aJ796cc78ZDjruhqkWhptcCsT90YPj891eXQu4+fT47GyV8PbiSwUnkSYGLr5y2n3zs5/efd2LYRuPH260Zpvr3cXdquUsc50LNdAfb+tQatakniKDoyq6yXrVYs66L9Stm/60CE5FYwsUMTTeNlhnH6YZEKoaEARmNDBXJkohVJN7ip0ZIgGWNnBgZCBALmYcsGOaq4bohFZzdXCIUEXQDRxRHVW22wZVYopMgITFjBEm1wy2ismsIpGhEXEEG6SmbcNJBWA/TQ8enD3/4twzHW+P3r65OqXt2zdvPzn9znGf+r65vHrVHdMHzx6/evGaWUvJH3302eX1m5JHhjJe7yljRHp4vF31acyH2HRorFm6Jkai+e7usDuk2LQpSs0K4ebmrj3qHnZHP/7xz+ZpfvjweKxsVkHmUqZx0K4NqUmX18Mn7/1KzXfj9VsOs1OtY57306ZdBecXv3zVt00D5OVg0LL5PI6ekAh2d7vU98P1Xd/1L86vu5a72NRsL18NkML6qK8HvTtUaunNm6t1CmfP1kerVb6727+9NkboME8DQUCk/f6QkK5watZdzXa3n9K6U4LQtrvdPE1VCYFsuMsnxw+bJlIMoYlt5+z5cH1o0oowdEebYZ/zrCnG4Kg1gzsTA5rkzI5WEABS03PpNQzH7+/+R/+Lx//sH/9J6o6e/+guFTtbNcPz7NRZqL/4/O29VFQrIOr9dWRpTCC87+TwKhJCQCQK4OC1ipuZyXJglaJIUb3s9mPf99wIordtm+eJuX3v6Xdvrp4bEMcYtSUCd45gm7YD82LTpj+24tDUwzAIZGd88fxr+vi7D/rV+08/fnv+3BwBMa2TB8h5NvTYJrUaYkLwucyAEJqQqyK6qjEbkFYVYqy1Nn2PdS4qwTsVRyYOScWAiGPjlpEIqgB6rdUEHFBVkdhcmcM9dRSCqqtpBCeOJkjcVqmicn85zYIAVQZEcaJcqnsD2hCwiIAui0Z0R07tVOc2BFUsSh1EBwEic2QKoDbm6fT4weF616Uml7HqgETL8t1rDomqZMTgbLVqw4HTwqkxreZ8IHC0CiaMrj4H4Ekzx+iqMfa1VAYgo+UuQoimQARm0DW9mDk6MUvNVWu1ak597KtIqVnV3S1GbEMqdSTkrEKIKZAvYXsjAvIlK8FoKlIzhnuV0RXVZcn/MZOaIHgI0QAJ42LkBvMsBZndlSkFpuUSxiGAg5ZCgGUaGQkYSi1EQAhihuBL6gEdAvphd0eU+tX60emDw/6ujSkycdurKKvnXDanG1MoRVJK83ivFlUpj4+3Hz59YOxWJyEs88yi01S8DjdXQzFu+y4gtKttxyjOERbsBLmCKJgT5ZlYtwHrbsgNPz46KvOUUxsCNU2jg9RZUGqgMO1uH7/3JE9ltVohQOrbMI3cMBWMScfDYKvWqy2exGk6tG27Ol7t88gJDiDbwG3XlVKU1N1rmQEUAzdtJ4chpEhMVYVDAAyh69qTI7264di5oeqyPjdiNicEYmIBY0zVFcWYEQm9KiGjaYis6p4daq5zoWJd0zGSuUzznPo1AAORmhOgCjL2J8cPRzAA6DdpLoKb9tGnT5uj7ur6brU+06637alS3d/cya6U/S7kkqrC/tALrBx9dyCRmmcBD82qcbE6k2tU9ZLdKzlGDmUcOYY6z023RmZoMKR29HaIbfPJs6MPnswPNrcoPs8BIRm10GADhnNMlCdJqRspHIaJkjkjxwZBa51n3berNd4RgLRNRMZSpE6HKl5Tm3VMHAkYDdjJCIw9j1OhgMTGER2lVEDv2sDmXrOCmUMt6kqgqq4VwGJ0ZAqcAgCRStaiKs4A9W4GjkpKkMio7VbIBAHbNhISIidAUXOHSOiBm7YBwkqGgEWcAVPTc9QQk5G7wrdvISZmCsps6CbF1YoZN4n5Wyc6AgItH1JZeKd4jwB2QDFWh+oySZ2yizK4gXJAd0wxmStxWBq+FrnDFyGaJ3cLTZtiQ4BL4szBFZABlo4h8woyOgcEQEJwIwBHNvaQEGpFcS1o4ujepYhFE7jWshgxxKsu2UlarFBiasx8j5WD5cACIkLV5LJWK4B5EGMJWEHn4Pui1nKCirWaeV0hHGFTBFpu2SqiZmCFVBxAZiYKU42aZOoROpPZg4lX8+oO6tWlhhCYYOmfZV6AH+6gRIGYTM1BF3MLmCMTOjBHcFVTM1qcBogUGEut5BSQ1BzM2sCiFRJplhjQEJiwIQocIvNsOCkCxeP12psNdpumXTtFRFatIcUQGjBlBs8KxIt25IAIoKYLCNcR4mo9+lK5bmZFUQMCSkm0JgqBI0ElKGKHnV623YeYSwQ2EfNIHApO5+XtyCOQokHDliStM55UPxaMXgzYMRqS2cTAqEsJk8xst6wXkDi0K5Om1oisbsDsbuoFzEwBdGngYUJ2QgckJARqAXtKYqLGlQhS4pQYeXHmLEYb8GXMdlNZTA//ntgF8C62c8+CWVxbS4OVurkbIfg9qGhJ5xiAqy+eGQJANyNCNXdfQpzA71qpF4+YLw4RRIMFcEPusICpAQCBDey+Y8jNkfw+KAS+RNIQFyS3mbnWRTwiZL0HFpG7AoKpA4ibtA1++c2LN9cVeSUqClby3DYRiB15mOvri/2mi3WcI0HXxh/99c/247Ra96sUrOZcS8/+KIIZzVWfVPkE9WHPNfHdXHqkhmBXVZQCEgKkEJAIhMx9k7r9lFMAAA8hEGBkHEqJTKBSSwVuvUk3k1wUvnA/tzS48a48RBDDMs1uMZPOkQap4FM2P+S4JpuKDSUjR1QeZ2tDAAJxqQaUGBD60LaY15E/PYErqV/NcJPVZO4jxRDQvYrOM3796vKT9852+yJVnz09nYb8p//yj//23//7iER9NLfAZAvb3gHcichMFrx1oCAquLxCfcmO4lK64qCA/i2wf7ERgZPa8vz40kYFiA5m5kRoDuj3uXZkXkR8N8X7tP89yQgQwI14QW4vLnhSrQDu94CggLyoQGruACiq4I5EFMhc3RCRFyCkmzESEpnK/6+rSNWAnFCtZHcjsFBmK3NyJQIyUyvupKoOWGtmzYqYmAmJAysgOzu41uReVXvSasaq6hhVLXkW1aqK4IbzbrgKJjF047h70Mh49RqLqMRVe0Z3tytXqdg8/OCa0w8+/bXXP/n5xx9/8uVP/6Jdw0iaUjPthu7hca1uYBVAwFHms09/RS5o+vJzEn/63ferjmU0ubp6/sN//pv/6B/tf3Lxyz/6q6HoJ//gb1wdrn7jv/jDIX5ne/ykWfeFDWpw7LvjZ/Xm0KTeY88I/+z/+U+fHj39ycXnf+9//j/4Z/+v//erDCvQ+c3dw9Pj/dWr1XufYEu1TD7vt/06NfWP/x//+6d8dNY3fthf//Dzx3/zYzp6qGNG7x8en/1f/viPf/fv/YNcRzJ6/+zJl1/9jGNEJVRIlBoO+3m+uLrt10fHxz2YVbEYCCpYrjLl7Xr99uX50/dPDTR1qVRhIkY4PVrtdjdN11ycnwMlYv5WEvoP5aL/KHeL4G6MjIqq7ZPvfv9wefP65S/ee/9XpNTQpsPtXbtai2gEq7Valdg2YCC1aJWQGM1SSgtoq0ziRawW0IrmFIL6TIEYKSSOEvIsdRZ0hvubUQFTspKIjrdbI8HgU72bhjeOur8Z2Pl2f3v06AiVpGoMybB1jJwiIdR5kFJOn25fvdpXt5Si1Nq08TCM7Wb78OjUIPbrI3XI07Rq2+Lerrfc3NzcvtHMcy4oGEJXClRHS9uqHrr7nxMzD8OsUonBMGRsHj95cnF5UUWhVlYf9rsmBirT3ThaVW49Z1k3eHd1k3NFotg3GHgapuCIgCer9eE2xyZQndXg/Y9OX735+tHTxjRPhCLl4fGmXXdXWRiUhYkpeFptOqB9H6JN1tOJTpxVuyZSpWG+W7ebakU91BJ3eUpxhYUDu5qaWSL05b4P7rTI0oqBzLXhCG7oiugJHRDVtGtaRnIHqUAxhBAwmmVLHEJIjBzIDVkFXAwdKUZEDYCO6O6lekSAQEOtoWksm420aXuZSts2N7tpP9QgqeJh3dMvf/nj7zz4eLrdT3az7nudbo7WxzdXr3WeXYWEPui6ry/fNtt20zUyjh4iOoHn/ThhSgIpBgxNSE28ePENGCd3E/XkouOzj9/7wQ+e1nL4h3/rt/+P/9s/6Zo4TDugut8NxqGN3VG/ynW2AL/z63/j3/ybv3j64Gkb+lCpgrfr7W53c0IhtaGNIbWJZrE6R+5yruIQPY1TnueyXaXjtj3t1l+8uMhsQyMxNKDl+7/6iLZgB6O3w9FZd9iPt1eH1AUTa4BabmerDXUa/fr2dt2edcRtgrvd5UeffXS4vgSj8XAYBll3W7GS+lZVmWy77VR8X+ZV197J3aTzB48/++lf/fUHT05uL26w79ulpUQEvILL4kpVrczsVREBGCUf7vL1s+/tvvfb8uK//mL8a+9C2ByA2b2WwFG69sOP3mOo7+RkJURQp3cIfAJ2EHAw8MDsZu5LeJo9GHMjVWJszKqoNW1z+fYVYUWbmRyRFovkOA3b0wch0osXX0dHdiLgnOemazab1WF/q4JVcoqRA7nofp445LHKxdVbhkePzx6dqbx4+Xnf9SngrIURhnHouyaGTjwDSRf7+ZBdvfFws7s9Pjo7DNeu0MY2MBXJWhQr7u9ujzbHosqIroJIpoogjGambZdyLoSgVUW9iSGEWEtd6HQEZpLRMWACB+LAlGo+JIIiFsPicK9tas2SGYI5O6iq5OxG81RCXNVqliUiDWWOjlILqIAZEbhTaqODTuO4Pe4ON4dAveZbd0OP3AY1ZUrVVWqNTQ/kzjzlsUmrqR6qj6t2M45362Y7l0Jh4yaanRnJFZ3JEnOcfSBwjq2KuolaXShmStVMAyUAYAIRixTFnThpncU1+OgmblAdAgZRqzIQGjjWklOMDuRgKiVgUiXEhNgYFLWMTEUzw/0JRchugEBmhoAEjBznMhKgmQZmAFYxJAycwMFrAU4LW8cBOEQRDxTdyB0QYhYRUURExJJzzrWY9NvVdruds6XA87jrmlTV5lyQMM+5TBkRply0CCHFGGq4vxF98MkHD49Pjx+cHOpUhn3HOVGzn+cpzyXPQ51y1kOZ7mY53eRVYubQNZ2BAgZxcFGus7oIc3O8ffH2fHW0nc8vPuu+9+DJdqpjKyYqWhXM0WzeC1Yarm43/dk8VUPpugA6o+X55qbc7kQpWdSDYA4tdVDHw92Y53nTp+1q62rzNKsKBxIRAGVktWXhbC4iZrXMaNA0cagZRFQExBkiO4E6FmNKAI2Vam6Bw1KlnEK8H/UpIREFC+vj1D6y6j45APTtVgo0HEPNnTHtTQ/7vok5iIweK0yz410JKXJCD0GOuseffHc85PLydXM40HwIq5Vc4/XLi/VUsUJ72ME8RfagijkbuA8DqmOeU2pcBp8GzXN1qwCxjbFpHVmqOzdGgH2fCWaiyYyadd0+yB+8V56c3ARUyYbUApKb7mcMHVOSPAJgxLYMswaqjAGDCcLsE12WMpbZtyfbPNfjdMTwOoYknuoQD7t9SRijxwhFs5k6siEDaOyiUogcXRHVmoYBwAjFKmJtY6OmuTpz8IAA4CpES92wGmBoGmKmEE2yVgPiaTAFCAEBiAnjUdNuGlxQZEBelU2IArmLSRNZ3YGDMwAHiIToJCgggQmrap7YkIihGkQ0JozBxRBBs4jIt60ohnBPOSQGUACsauRkAARL1svVYFmbLnEaX1QkYjABBKLg5syMHMwd3VUKMwM6x8QhLLyYhfoaiJcjZ0nxmBqCuhdAAwJ3BDNwY2IL4AAQteSpijkbuqsah+BFQ0BzQ4dlooaF5gJMTgDIWhyqeWOgBm6IBN5HzOASoYY4ahYwhTlyVM0IrZPHwHW+jjB3XUIbk+pRYN+sbq9zSH1V4YbXR6G8ukjzCjiu2lhAAlNdflaAGILUDKApxVmUAPoUmnDf2m6iyOzgYg4IzOF+r6tGCISMRA1AVXFUBGQkRAjMs2mgAGDqi2oWSAwNCNDZIbC4A0ftVtL13q3nuH5w9pQp1YpevYkpOwQ1cmMwZBJ1pMU5hk6ICIzkgBZiDQ01namiilj2yB6w4VWiVngnZHHBJcO4t2+AunU6YkWswCEo040ON753tCCZkSBbEt2K93luwYjdzAkdfGyhOJB7mF3nBl6R7qGL2hxD7KwGBCN0YEdyLEgB0QOYm4IHVkJSX0I3CGqQqAH16FRhqaprY0zs6La01ivi4uJyMzXVZWDyd8QuXxBFCEurOACaOROA672iRrzQiwHQ0c0KUSCkBTazRPTd0Q3BF50gACg6Ln1TRAEczRXvmxsAgQEQwHipMHe0xeW1BEGJ/R6LbIupBAHu5SJ0cL23nLi6K7irgYMRBjNAmNVqZHp1cfXqbXXo1S1XAUQKpGoRg5R5t5v6vrVa2obaJn359Ve3u/16u9n27TxOfRP7vjlr03h3fYtK5BuQbUPXLtfWjBRV7TBMoYmA0EeutXiFGBIzBubJqrXNRN4TW1F2A/O2acFqE9rBbAz9xcUOuMkQWMLknsVvgY+auLM5ICtYrXI1BOJeSiGO6s6xzWDY0FCmJoa7qTzexAUBi1aFMTvWCqbBhFjL08TdKvzytn6z11Ixmh0HDgQOcHeoLy7vYjwhl7dvrp48fTjM+f/zz//Ff/kP/8EkEmJwwMCsi6QLy+8X4N58vpiAAJBcHRxMF1sJm92vx95Jfr7AwNwBOQQIKsXBDB2AABHwvtwXF4qQ09JOiESLvccdaCkgpGXZR+BoBo6gKkSLTIVm9042ACAkouWdY8hsKov4SEgmlSjcG4fMAQyAvnUV0bshmcDNpNSS58Ot5tnLzFojap2uoM5W5nm4q/NkYgoAkWNid1EpboVBSDJrTmBsEkzJncCaGBqiVUirbrVdHW377abf9s2KU6cQp7EeDkNRi21Lgqp09uxh0x9WrW36cHHxKvUtaxEcJHG9eznvXsvdW7A5rNLm8cnBpDs+Moe0ehRjOj198uJnf/Go1aYxOOnH1Sozvfjm7dnDD7brByF0HJsp39zsr99e7PalYHf69vy8TPuu4WF/g0TT3d368TGYhWm8/Mt/d5LS+slje/jo8d/7O/xwlV993aAljtHrn/7j/3WsIkYe4tc//vEa0vTF62fHT4vlcBSOvnNGq5Zg7YTPX15sPzr72dc/Xj19vC+6n/j2+s7VQ7JHHz5wr66amjhPo5a56drrwyAG5iAiMcSubVNsYugePn7KKXTbzhEx8GE39pvO1BBw1fd319eRaB7zPFV3wndBs3//tQjU7/r5wJwRc6kSYLffp9St+1WMIZdhmgYFy1IM/DBM05DnXJZiRbDl0Y1OxCGOh0lF1cxRa5mR0ExNi5sAmIMS6jQfUpuA0BBF0An6457apIFj3x49fhpiDIi725vtSdo+PBZyY2i23Uzanz7oNmtVLeMoUhDMTYbb21Lr9e0+dKv26Ak1jZmXfY0BH79/Qql5e3XZd93524sF2ehZgWCW6e3N9aFAwTZDr3zM3bOw+ijH94bwsMbTb2W0ecwhhNXJqqY0KLy8HYSiipwcb7lPo2jbPXx09oE6d92q69rtZtu0q0qkhJwaAgqB3Gu14uxpu7V21Z9u07Z1GV5+9c3rN3dvL/dXN3o1ltXZ5vb2pUxXmxM7ez/EHmr2PJOprRPmeZyzmFNxscA1oHOI3XpGejWV6wI7t/jwSJddG7AtyWpHRFZRAgBTN0f34NBSaDlGosisqsyETLOZI4pqdTBiAVN0BI8xVAAjjIFqlRCCuvdpXUpl8OjkWYOBmE1FVSDFBERdH7ddCuhlPpjUaayHw0yxG6vMVa1pvnnxVgWpabk5XT38gPvOEV++udkd8jjBfpafn798+ORxctJDwSwc5MGjkxkEY9N0a+IutceO3d3dAAAONU87gnr24OjjX/30g+++D/Me9m/+/Gf/dkw5rtdGYRxrSN1qfaSmYPThex8fSvnF8188OTtSrRXD9Xg4Pjm7HfPJ6YPNZoUh3Oznm/00m84+3uVrpdp2EVJQxnaVVEokfHt9jWRKooixaQypQc6HA3ARnxw0NLxep7v9oVRY9w+AQmIYht2k+emz94zixd3d9VTiyYPbYagOTg5AjPTq5eu7u33bJCnF1Io6ABNQ2zautU/hMO5D04LbNFbycNodr7ltmAMul3QUyUBAISDyNFQwP9ogNl/9D/8n/ge/3b75ypM9TXba9Sdh1bSnG9zEG7QaV4fdvZ9CTJdCAxW9b8rExV/jeH8+OTI7gLhTCI7GkQ3UAZ24uh09PEtd3I+7w7QT19g3nCIRVCmYVk+ffcwhGqIzh6Yd5gk5rjbHoWunWooroIcUDCqSMeXD7eXl1fnlYdw8ePzwwdM5j4dxR8zM1PVrd86igOQCTKlrVjpDF1frbi1SQgoYuUipUgHRREHvNzVgBgS11mUdWeusUsFAxBB5aVlGRjFXXzYzVKuZB+QITM6IzOYmmpmZQoztpnhdFuCOLp4BpEpxkBBJLcfAUmdl8xgMGIiZoqk0bQsMHj1bUVMid5SmYxV78uzjMU/dticmM4sxILGoOEBI0cxBsQsNOwWOgQMiK4hgLTAKqQcFNAVjbhZAJKIXGYnQrIqIuROH5WQI3CBCCIwUFO5N+KoAEEPsYlo7IHFExGXacWQzBMcitdQpkJtVMQdiB0PCyGlB22rNhJ44ORot3zUII6kWUSmS3VVNpU4BGcxNTU2qZEcTFTNVN0dzWBbHujiv51zE3QxKqWpKFBC85IJuZmJmHOKc5TBVE6ulSK7LlrnOc5dSYnbQzabv2yYxnR1vmGCzbZdPwdHjx+tHT64P5e4wm0HNRXPNudzux4vbw36aDnmu4Nnleti/vry6uLm92+8Pw7wbx7FM4J5CFPdZNbNgg9dv315++dXFF18Ph1GRi5oz8KqhPlKP2KC3ZKDVa7tJECl2fJhHQRO20GIusyWiGGpFQCp5bgJ1IXHEtOL+KEK0AgqBYtOmrrcADj7OQy5lrgXbwE0UtXEYA6JM2YGBI/ctrdeQQuwadosGEaBBQpOGiRfQq2QK4MGrZQgaOqLhIt1dtXevV/MYytSS2zjEIm3JfdF+N61uxvXV9Nnps6Om7aPrcFsuLvqGT5+d8Fm/q1aVTx89OV2ntL9ZB4N1P8eNQLOiozRZO2NbAhpz6CaR2rIdRdtGOGpw3ThUjDQDDEhjE4emORDfAPjJdlw147o5rLq7B8c3nz27+f7Hu+9/5+50tWMr4MG4Ee4wYJZVf8SB5lyywFzStJ8BANC6VRItOQ8mNh0OITbd6uh2t4seDndjEXNycAOrZlryXKap5iFA7SIySdNQjNhv2tOz7cmmPYpMpXIpVLPXiswWkmBARCRSl0ge3KJBqAJzDuJMzO2K1xtNa2qO1Xq3jXsbqWHF5NxiiBhCbJBTRTYKgkEpiAlQ4KbBNmoXahd90+JRL103pzSloF0nqcFVz8db6JNHMkRDqm4QMAR0LUzg5Mj3c4EZuCMT0eKXI4hMgI4IC76tVKlmTgiMgO6gYAaubgLLMhwACZmZEAMxEQeOISYOkQMzIS58VgBa6p2JERmRzMHcxaRKUROzpdcXAzFjiBzMdM7zbtzfjIcxZ0ZjBPAAHIiJiBxICe1+hqJlhjIwQAIExOou7gaABiQGjkAEBo5MisTYrvlkbau1d6fdBmFKDYMjBlb10/Y4TtZMtsGYwNerQDrBeJXcUugIHRgoIBPFENyhVkUMGIIDcgzLq1VFVA0AF6fosu27R4pSAGIOvEx34IouIDO7wtKSxQQIjh5j5MDEMYQ2QEoeVpy2sTlt0hb5yKnJdYPUAEZkCowBDSQGCAGXVb+HlRtHdzRFA8Tl58fvQnBLGgs1sHVdVgMEQ599LiQYYwyrJh1Rsx0pFMRCDl1baCz22qiqi6FXtGu4vaLrHIqTgtagoStpPds61xUYiNQKgFGlYtVgIWfds71e+y9W+eUW9x0Ez1vJLO4Q1NHQBGrx6kzOAQjQSkAwzejqUBCMTBcxlpHRHMVQjDkQByCCb3lD7nbfGAdwz2c0fzdCIaIt+7HFiwGEyODEnFRtcSDB4vUAc7Bl+DJXWPrUll50ooUkRMiLiONgy4djYYiC67v/Hb37qdvSBnjfj7iYjxZ9C8xMEfldtTm4my1/7WJtxnctE/claKCm5gKWmwC3t/uvnl879oZkquCQUitqAKGJ/TSWcRxAxYv27errV+evzy/PjtZPNn1P1kTYNLwlj1ZCwKp+FODDbViv0jXyRNEpIuLpUcduq8hNjClSy86oKYRS6iR1r5Yp7Ks5EKoNo9zOPiq+uMlvJrjNNmIcxIdSxCt7iSyeaDAT1hItBAzAgE1V7NrmZN25lFVjYKUn3yZPUZpNqBErGph2IXC1bRvdK5KvmtQRHwV+CPK9FTzpWcAz0q5KFgOAqnY71C9e3QzZqtKbV1dH260X+Vf/4o9iH0SrmZopuCL6O9M9IjLQvRdsiagtheNIsDxsiHQ/hiMtXfAOjkxL+FSkAoIjvsONoZsBLO0f4LBQJvHd79QAnInBcQmyObi5iwsFNvdvH28HX96p99oUgLrpoieCO4IjAZDex3EDEiET0TtK4f1R8M5VZGZSJPSJOYTVhgCY3LOD1oQrMHItbkwhGJHGpKGh0JAhQAUTAmwi1FqRlr63ojZbHgDNIRJGwc4xLa0JqBTbVqAoFAUttXbrbkZt205gxv7gMEsua4jtfp9fflXCo/TRH9Cf//nVy5+H/hC7Lh4fjSWnNloeTCCuT6/efJ2nadWl6fwcK6T3v5NQvviTn3zw8INNtl/8qz9aU7A6pXV6++ZKZnj+r/70wfe77/3+f2ZzdvRQgSe8/OLrT3/w4eGutv3mzU//YpPk6y8+57T66U+v6i/Pf/LN/+E3/lf/y/MyVYItd7/8p//kg//2fxeeHM2xWa3TN//n/1t9OTSb5ic//fxv/effaegwP399tH7yeho//u3v/av/6t88+/D97XYT0dePzr786osmIeQR3dRste3dyn4/ivmUp1rFl1oKYEQ/jMN6s/3lN8+7vtl0qQ4jNfHq6vZosxKzacoPH5xc3479auVU9e0tEZgupw3Au/8ALMlEBAA1D0hIKIAffvLhdHfdPvw4xa5tG62qksHNSs1z7VebPNXYLKAqcABTcEFfYmZItLgMMFgtHHh5SyGhm4IjmlvOFkRVAqW6r1AprXguigTnz18FXH32q++/ab68vhh+8ad/2WzWZRxXIU374ejZWkWZGNVrKX3fFHdinuuMwFAtNmEcb5rUYLGmayaRowZeHq7a9RlR3N3s2r7t+34aaplKyaVpe0x91ybXBBURghNtQdeoie+PBEc9OtlOd7e1MlGjtRbNZLZKcdI5RXavNb++rXW7Xh2vNle3N0jJHEKwlBI4nByv9rv9qmfrVl7t7vxCzEBls4Lru51W7KE/6k9+/uatBXnr13/jk4e7i+t1PLs4v+lkqxWr+ZOnDwC/XG82b54fGKxWVfFRICNgoHnIlijn0a3kooGSuxIzOy12QlAIHLKViAENiRgRHN3QAIGIETEErOLA0DZkblUhRkJAEDOrqWkmNY/YthgQmNnNzDCFBOaGrlaZ0InUjSjMVZzDYLqXmuJ6dfz4+u1V4tQ0qUAZy+wYQ0dNt86qEmizee/y4mrTn6qMTQxZgWPaTfky1w3Hm8uDFuvWZx//4LeeP/9JiF3TrpiiDjnnurvZqc+h4bmW1dm67Tc3Bxjuvnn5zauHj7ZHx6np10dn7Xx+4IaffPje5eXl8VFznndzLTf7abM+OT4Ost9fXdwdPXn4nQfvf/3Ly9evLrftOoV4exi7hruWrm8vzrZNcBZHdQwGiNQ23auvL5omzXlOfVJST703q6NtDCu+fH779OH7Z0dwGKY2wfooPTp7+sXnX9/tLqTujPRoe/z6xZfr0w5SfO+7Zzd3U9uvj/p0M+xj2/BJO9cpv5kVwmHMBcERa66d4dHRWki7owYLjvNd7FJz3HBtLHgebxHmyFSVGckcI5DY8snHtI7KuldtHj35P/2TNzfnsEkfBjm9ud5Dr9uz9r1nH759fvmMToa3N+PV7n57gKBVmZhDuH9yTAEX/jEQgIGDLnWftnDvTGUpRkAkNHc0BNpsji/Pb8sk/SqK1C52ZZrVpN9sV6UQ74b9nom6flWltm3X0XoaLtqGmdlQt74C0EA4Hcbx4uaNNfzs6YOnHxnDm7cvTtZHSIjuQGnY76tK3/XDOPSxPz4+3c07RAf1dbudyOb5gGTm0q6203AHwAtbkO63JYgAAblKBUcRW5ooCaHKDBSIwBEYo4Agh/uz3EykpJhmMY7JWdxTgA1oHsecmjZQuocKBwYH8AxEABJDKOKwCFBuTA6sSBqQAWKpc+Akpm0XSq03+1rUTvrt1flrdJAqpkUckBJS8koCoEQmQSqaQZs2ImPbrAIG88pKhFCsiJECMK9zvo4cgJhQ41JIDUAU1KtJQUwLZ1nEEseYVlJK4MZNwIFQq07ihTk4OBKn2KlMRM4hlqJIJCKKbuAkxq6g4kroJDobmWkxRAFUn3nZu1JAagAdSNABkQUqh4Bo7spEoqACIQTgICpNbMhRtSACkGMKgi7uk2l0M9UYmZGYgMlCSiJCpqHrHjw9m3c5EHNkdE6p2Y85df1UyoPNgwipb3ugq3m4h7uvV0mtFrB5nlNCd5rLeHGYrw+5ocZR2i4SsUsVVxGbNEcrhAAcHHUdW8JOBGjVjNcXybhV+vjp+/vLKxWIsQV1xHiY5pTIzbFjI3nywZPbq9v3nz4rNUv1ftvbq/NQlIpgzuhWJ2IOAJjamL2K+TSLVlMrHFMKrCK1Cse43DFVxJe2sFzcQUDALK2aZt00J6tqlUwOc145pRB0LCrAyEqxVKHARsAUNTF3DYQU2tb61D79QGJXrt+sMEst7WodYizDhHVm98BFROe7ESmO/lbHvdZ5++wpe8DtQ/J03EUEnqViTem2v3v5df7mpZycZK7j/lVTPNQ7Vpe5SC3S9cphChR6Dqvg7v1cC4xKnNs4EN8GaFex5SjjPEcyijHRMNf9mvPJKhyvYhOGqfSKDKCkZR41EZko7AllyGOlNnAuh+Gk7ybdw/BCtytxkBLb9XYyGV3bbtOkk1o8oKnN6qWaFhUBBPOZahtSBVcEZgtmOs11nJBDGSsIyL1GoSE0Tl41g6lRCIGnKVdTU2CITUBghBAyEsfEqfNiTaNQBZUiuVeLMa42vTYMoIgeg5fx4MLgIXKjROIFyCBhk1IIrACVUI3BGncvdcYUxY0C1HFOGBE9RkQiQDUTAKxiIb1rRiYmYiIkIl44KLAgUIkRkRgXSJm4irr5/Z+b4mLDcEOAhfJCSMsVFCEsm3MmXiqZYVGfnO5BrkiAjuC2hIPwHv7hYETxnUMe3aBKGeZRUEFrQy2CuQqouSq4uQlDG4lBKiABouJ9foeA0G3Beyh4ICqmKcUmUecyYW1jg7ImDb1p4xQsCBF7j8wBqW2SzBB4VUb2og7k5IEaKjOKmhBEBEQwWRZ14iBuhEnUALCWSgiIwA5tYCZiYgTMVslgCaGYCbgTsxPYfbYJ76c3IlUxcwInMHJnMOLYcIx8TziaqwCghgDM7DhU33gcayDrV+nIMnLCJoUYYapiZVxL6fIuuSNzNXVbMDfg76ruFEBirF1yRiKY51y0wNLbRa4wH6fVON8u6SsDQVTUvcsdwBlwmkO9tvODXXKqQT0Ronl077EEmLUKYTA3K/vGS7CYnYamedXBVystsRfXaP6UMGYHCZjICWihKxArBAREUwBBECQQv09SOKKhK1R3YPfgSngPikMwVUMAUVPTZWhyA8RABMAhvRuSF1eR38/NuLiPHF21EpI5LMeog5spIRsoAgCSudECCF+aqBYNCOzeke2OSGLK90M5AKKZISLRQizCfx+CM1j+5b6UH94LVAiLA8QN3AkXhDGqm6qiA9yj3JkA1atbCQSHafjFixdSWycsJiZmhjIviVXaj/OQRd1J6/qofX13+fz87YPN9sFmY5LNpEmBwTaR92W6EacQOihspQA3nNx9X2dpYc461Uq0Ot/PTxJDoIpQRdZNmGuNyKIObrHUo3X/QuhG+axkB+KU3M1FIhHGhcEDzEEqDFUTNmUyROo9ycwV2xHDVGohW4Iv81CfrnGaS1S3FGotmFozAq3RM7JVlyYSIjRtg5AZ5bfO4l9cTleF95WzWiDcNISuhwleXt70fZMafv7VN2ePTi6ub378Z3/1G7/7n8xjBlDiRSA3dydHMEPGxYyBQA4KiGDu6iE0y35rqS1714Vn4AboSOEeUI2ESAhyb/MxdxdkZl74ZQCugAvXHMD9nZgrxGEJ7zIRuBE6LXTr5XlCJWQgWkzc4I7IgAhuTAGX9j9CcwMTc12+B1FFJFX7j6SiGGPkGFO7JOjcqsosJSf0wCwiSyTTzB246Y67ZhM4BIyu1VwNAdw5rKtMhB7a4zofiG+03pR5h2E1zcpxQ4FA1F3JEcSb1DSnD4a73bZ/SHF9eX579uTJ/uK861cZSrvp8rCr0rbvff/VKLl/z/uXsGqmgtSfPfrVj69++EO/HbTCw9/8e4cf/V/Tg/bo/ZOSa1h1ZMQzodJ3fv1v/9lf/Pn68dFqlZqXR3YrUNLJw8d3tzfTL37x63/vH8yA3DabB0d38+69X/vstuyBoO3SF3/2r3/w/e/ydz8drwacb379t3/r9S9/9Oary2+w6Y7WyMfXP/l5jI8e/d0/uPji5YdruPjzn8occNu9/+zjvBvWTbr961/0GyPAw8VXb775+Wff+zsh9Vrr+fNXUDw76O3YtG0IiYhFbb3dZLBSm4YgxBRiAoKqQojD4Y5NmX23v3n//We5lqVmkQkSE6iZSmr727e3hAjvjADfGoqWgrp3tXyIjkjLTt18vAmb2PSdUpyrMlNdmhdMa5Yp5KI1D6XbNodxp+BFJUupZQ7GTGCmRIBYgUBK4X4Ny99Ojsixb48enFaFdt0dbR7eXV+UXBHkaN2uGv76Zv/f+lu/9yd/+sdyOzzYdFfnr98/evLw8UZ28+aEHj07mac5KM0ZVi2Jqbo6WNXS9txvYnGeDuPxydnz8y+ySurD+Yu3BqndNtdvR2KnRKFLspubfguEJ6en0vaTuIgTo3tVzREdwejd9iAgmVi/6rLUFJvZZapTw11RI9OS96uuAUPi6MbzXLs+nN9dEVKMTRNCLWU63IBWsNpCECmMbkCHm2HYZQcPDeGc57o76UPgmEVpWMN0V2TcxAQGGIJm2+12faObrXaJb6+Gbr0KMR4OZQRYJaq7klo+OukOE3WVHDA2zVzG0LBIBXcCNK0B0M1ijFULYwBXBId3BeFmkji4kpm0KYlrkwKo8LLYA/CiJNZwCMSqhRMXz0CG4HWubJwgBG4kCAKY4SQ6YaaWp2HUOgR0ltlV3WrHCFrRWzONqZuz6mzzxW3bhQ8+fPbq5SVR0QAd48PHp7c//zqmvtuGpk0vX/08OGxXbdtERcqo03wwnts2BTLG9OGHT16c3xzGcdP0oBL7br2dL74ZLl7lR/1xlTIdMjpdXV2RawM2355fjxOMWzTZro+m2/HLi5vbq33DTATXtzfglQj3d3a62TKUk9XR1X7fxLTfHxB4rmWY9yfb9/MwbU+OcA2vXuSjJC357c3lo+PjPM03V3fHxw9O1s3bq5u3lxdtH65uLo+6/mZ3AKf3jh9P11O79uO+7Xo6XN/e7rlt2lzqPM95mplh3aVhHILBKgUKnFTKfmjamFJ7dX1jKYKn2Qo3PGrJRdGMZPHBIpiqF1BSU3eNbgHo5lZaXU/1wzilFT0Yzm+s1N/6rU9++fL8+S8+D9Z1GwqxS8dHy6dARWIbKJDNhg7gruYhBL+3WC/7E/T7yk8wFURkYkJ1gBTiYT+ESBzTycnZME4KJNXKKO1qPc37Kta0HYPKXFWrq7l5zTXGfrs5GfZXzpJaFg2lQMmVQ1T3/e3teQzp2ZMHD5+C2e3dZWQWtRCgW7W1KDKxsqpU1369QvBSZ6lTwKZtepE5JDIVN6tSAdDdqtZIwVzJMOeZib5te1FTswLuZkrEDiSmQISMxkwx+owMCAaqhVGW2wEziwKnxpCqGodYysQYtc4AHtrOyrwcwBSiWqQYqs1Q8zIhBYjAkSgkTPmQ27MOZGi4ub2+jSGILh4zYuTAzTDN3bpnNEOAQKndHHYXiNVkphQNFuUumxkFNhcl29fbyMCRa82E4G6B2pIzIkSOVaugOkgbT92AkdVNVBCBmN28icdL+2cKMUshdIRaSzbXjoMvvWYBl4uIg0/lEENEBHRTN6aGrVE3B0yhIye16lbIDQEQ2U1Ei1khJHRMHEWViROvbqerrllHDLCYANzUjAM5e4xUCjSB0dwREbzWomKBCMGZ8Gi78s3mkEsEcsdcirne7u6sqMyV23RzuIFaX759tTTRLp+CYXcXeGawGDkF7rtmmA7ZMruVuTBAA4Tq4hkFGB3BdS5iyqkPTZQ6D7WIUwRBcQXsNpvf+BufXgwPpuvD7vp63W/6s4fBIbVtyROuNnelroAnsP1Uyyyz+HbblKvsI3Todrt/dvJkur2iTZOxQMOz1BgTYlSRjkMwKMWJg5ZKKn0f8mGfCJtmNRwyZytSotD2+Pjm+euW+w3tVpBnV90ezxrLVCW5IiMF7tcWAsYQiEO/4di1x0fQ9t3pVhKU6uX6rnNc21ApcH9iNWPbNqse0Q+7QTfBVwjNyt4/xRL7ImmV+qY/yLW9Rb0JHDq5Kdf7HKZ5ennVTOqO/eMP4s2b+uIbubmKsYNppjLrvCvE4exhDqk2fYowvHrDqy42HWR1pLpJ7YdnWdRzKmxdFxqq+XqMqw2tTvZTJbJ1v22aFgXnsbTr0zzv3Orx8em0u9AA2tAUuT15cHO4HXNO/CD1p4d8oGpymEw9nmxV23Vnz9/+FeF+nUQdoQmAkAgZGRCqLfVdNO1GKjJPNYYwmHo1ppSLuVmohW2OMQEhciDyAGRLobVZFYmUEBxVOaCKAAVmChyMKToAKLaYumSJIZIClVlYhUpBjRgbpkBh0SpRqaJadC9mzEQpZinBkEN3AM8hKREQBWKXgmToJSaCNshsHCLecwwA74tz0c3RARwJAZkQENTvAWcAai5ZpAqJQmAAdzNAcvMFtw/ubuJ47xxCWOJsC7eZFhzsst+gZd6Fb5fnC9bNiBiQFjgIArohBQqRAgM4NAEIBGH5c/dFrvJFJAIODO5EwdwAKvji3FFAMDBXNYCAJO+md7YawAN6ldse16CmpaTIzlRBGcHZlWqMgUMEd6Mweq1IzebId3unKOZI7CJg0MbtILsmRIo8F3MAMSPGFCCCsxuhqwkQ42IhAKoqYYHkOlRTM4/MC/AB0REwMFcwJmJHIGdO7kHEiJiY1TxxFLOpZEZrY+JpRISx+m2xWrE9e9YaApmQD1j72LWH81QHF4NABIZguHjKQJGYQ1Bk366lazMQehFXQ2du0ImpDc5HGiqf7WFWMkKoPs6GG5wFZbDbQ70d+SqGCS2DEVE0nVGVrAioEKpbxRrRG/Os4rG5buBVb9edqxcG0YSFwHBD2EidnREQGAQxKXcKljCZolshFkIWEMAATLrEQs0TgIMF9MW8toCqbYkCLV3kC+WIGXmpt/8PsdaORG66iJnEKFKZ4oK7Wp5eA6AFSeO+VKS/Q0/j4hAxMDXxpcsc0GEJ8qM73qOLlwotAHNbJnYDQIoqdWm2E63uhkjmS/jJ3Jea80VUQtW6OPkQ2d0MVLQCRr+/z9Qq81/+/Ispd7hgs8woEBmaOocAiEMe5zIG9DZgng9ffv31adf/ytP3bm9ucp3bJnRMD1Zd534+juDhCOmR57Mm3KmUKm2bsGuIJUUMKvusM8bJtTqAmquvAjpoBEputZaHfZT5EJrjokjEU7WGANBbSmIGxNVJ1E29pYAIm7aZpKwDMig4zWhSCjeJ+24yhLgqXdkH9KrBPQYIfUPM5iGxBMQAGAOaCLohYEuckq/I+YR+cQefDyrGh2prwhigiOwP8LMvz3/90/fbxLv92K3Xn//1l323/e6vfj/vh6XRfuGs+cKQdkIiNzEwWHxGCO4mkomW38vCvLbFFUjEBgYqTGwIgGAmeL+C/bbJXu8TwaZL5BAXNgS6ud5XVC4EdGTRSohLTSUAIrHf5w1Q1ZjDvWFkcaG5IvHSJIBEbgs9H91cRJfOS3wnJdxLRcQxpgZ9CfOSEzhHpOCWxS3XYupVS+AoRokjMyMiY4AA5sHQRQoytKGVpWu3XZlJJJCca85iBU2sOlFsmkac+tVKxgIALOq7XGY9DIf3v/dw98u/ahCbk9V8rY8fPiXx0G3PHh9dBSCAFEyHymkz5Uly7nNGg9Pv/spfYjz+6Dvpj3/pK76moenW3Tc/bC9/cfezf/7qF5//5q//Tx//4IOvptVvP3rwL/93/5uEenM7Pn1vO2S3ZPM0EmiI6o3Tfnfx5qKTIYbm5Dsf/It/8ke/99/7w8Ph5tPf/Buf/+m/wTF/+nd+4/b64rP/9G9P33x1/sN//eGvfHdNu9X2JELq19vz+TD+4uIRH09Xu9DXX/7LH25+7/urvrs9+Pc++e0f/vRHn/zqf3J59+ZQxm3T1Jyb/qRtGkqU8wxogejRdhuY8zyvV8cQMM91uz09f/Py6Ojh9eUwjFOp+fXbSwfoVv3d7i6khIiXd1cnCF2XPADIt5irewX823cdLm8/vPcrulkphblLLYdVA4yuBiLMoWm73c3UOIhIWAK4VULgKhaazr2k2KkcQghq0IRYqy4LKER0UQR2gJhWm22zH8ZHp4+yWDw+fvRBvL58CVpqxZynNtVf/9Xv/UTMcv38r35+ehYbsi+/+PK9j5989/2PxsMhhBUFOuxu590unqytah5KPtS0jjf7/ZNnHwkB1tCu1hzqxcvzZ9/7/s3br8a7cnzykClKLoQIKvWutGGT++YwZwrk44BmBDuvs1ad6ruacJM8TBG9aWLAOSXepjOpZKYcIEEKTeyZJNs8zvF4LQZ9TMTJpI7TNZFNGRaa5/nN1Xeffc9gU5RomkLTZnMM7TRdyjT5xNvN9vX1dK7j+vRU8frRe48vfzEoGEdS1Sb243VGoVXsG27GLAFpxdyqKzRlxumuEEYzcwfREmJYkq2KWM0SABFlLck9BQb3JsUFTllFGBzEKZBJidSaSgIALQQemBDIzRgDugkgMSp4QAxt6/sdxs6KuWogRreE0HCYtcaUAIwNYpN2+yGGqDVP4yxmWjU4zfsp1wOip+5kHEdT2R/Kbr+l0BQZzcrT4+Ph7et5N4dNvz09rnXeNA2bg8jhbqgO8zCalsyK7ofb4ex0Gy32jNbhWG7/09//7T//2R/9w3/0h//4v/riePVEJavl/VxialZpczvd1qAe/PTRIxkKE1/cHro27W4m4NQwI5q6rY/XqhKKJ2pL9d00AIDWuW0jU3j5emdx/eEn33XSXOTT7dmAL98/2V4Pl+vNs5cXlyEEsR6bI2Nv+nZ3t9usWgmRUwihrQpMrhXGQ20AxmF39mCLzNMoLi7V6qRuVufZ5iIKnnDdrUvRJ2dnh8N82M0AXWr6jqlOc5kmWq0suCuCiWlGjIrFoEZuEmAF9ar1gF1IZdQo6yBx0kN3Qk+Om9eXLzbt0zCt1tttt1pdvroo8z2rSGutc5GizMHBwBzR76s9VQEZ3Jc7jJsBemBWEXdFQjPLOofYIGOtNa3bDAhoFabqerw+0gjTcOibrm2DVh8PB8Oyvx0wGcFhfbwSn8fDsKYQQpSs69g6gnicxunNq5dx2z/etk8+/Ahf4M3VOQISGQQKDCXX1PaS6zAeOk/9qq3DJPMciBwcA6o6kAODunLgWhxFIAYm0lpDilqVAyK6m6gKM4GBm5uJu5lBCNHMQB0BTQvGFTiE5aw1R8dcc0QgRmIMIYHLErmo2WNItbq715oRGkBCCo6E71yBVQtAZkJCVIfU9dhED358un1+eX5ysh2mPSIAhmrOjEICLBhUREPTqk0OGpu2SA24EsnEgSMvbWiBKVeNHBmmogMGlqqmSEGcjIGY2+qBA1idTTP4bNaDM8dWXQKRyoHIHElsATMGUUmB1L2KwjyjB3RRQQCgsLR1gJrWcmDA+2Jvg3meOW0M0UgMKhGgQ6lDpKRaiShwU6QSOHgJHKayB7SUknl0z4lcVExlAWZrKS61Zc9ZzNERi5qKVbUqgGzrVfpPf/d7r66zqR4ubnfDhCEGgBiiBxjVV6t+P+y8GihCQI73F6NSixtUlW0ITUxM0YwDLF22gkCETgwReZymYq7mHNBduY4MyTk4BhUoc3bRjx8/4EQH3U+8OjNobm5vf/ZLf3Ic0yo8OsPYnnh/vDmZ5uqcQ6DNmq+/efHoUQqB1qvt7mbSi4Fgvnt9952PPt3d7addbYK+/Orzo0ePbm8PXQxNSiYglahoJByJZJoIKDXtNOXq1m03WvIhvQ2AeZjsoLtx1zYPbF3XK9Biq6fH2nW8Wa8ePkJArTWSpdTUYimQiCE6OdGhPH38dPvog7uvfhYcShEGcebZNTWtP2Rqz+JpP4C9rSOHLla/eH13pAXnTBQ5JNW7MIjsB0703vtHOd9ef32ea1eqr2N78uBoPByilSYl1CrjEPuON81Y5rQ91tWKofc2xMnm3d5Wm7uYUkeCgzSh6buio/aM7cahidGJUV1qzdNQyzghbUs9IMFVnrTSfJfjaQ+I1+f7DUKE47JzDZNl2xw/yIdLmbPB8PPXf9rGoU43q9V2l2cyL6U6GEldXiaJ0ywyl4ozwCx5Kk2jzpiYxMQZnXEGT0boziopsAOQCNQlymYQQlZFd8LipqldAQVwB8IQY5MikQEahFBEKrm4m9YgGh0QrejYNsAxkmlKjQEuSBsHliyQR3YI0CE3IYEHYQLHjFpjjIDusiDPIUUOoYF3m2QiQsclNuPoYHBfsQyIzMVcHQ61TLWIqpVKSxk0oS2pmIUcjbjkc5wJmAgAl7cooZm7Gd7735cNuSIavMMlLOkbRCaMQPTt3ALgMYQYooOrOZrFFKBkdGcKZBqIXJY9PTkA0FIYRAi0tMOqG1BAcCRnAANW81JNICBWcyeilmKDsdpkjmnZgIboCMpe5cDUuGsfeQJD10iAJbMWQObYECNyIqmGom4NYghhQuAUMjOIqVmDZGJSLRI4QiAGEzeLzG6yoHOBkClUNzNLIdJC5AEgCgiEKj0zAjjE6lbV7r81AEJsmqZK9Vp7pzVMKzG4fXVVXu8Pb8snv7J98uGMYwjrVrCd37LsiQAAEzOoI6IsMy26C1gTrO8ldoDsKrlWRGhx4fGUrKbVj9tealO9OpjTWmh1p/sUaIC7SS+BKloFl0A9O0bnjWuUHboTd9XBmBVUxSHyofGLjvYcQRXQCM3JD5EG9Gi1iyB47z+L3IoqMRA3gD24oDprSZEMLUgVJvUVOrCZLtEsc0AkIl2Kw1QBgJiBKMQuxES8DMDvpCKEhRf9rX4KjoEiAJoKBSJaKoMRAXXBCPm7df19rb3fe0mQEBDc1BUAkHi5byPQPW4I3BaRAQgRzQRAARGBgNBMfanuMydYOiKc/F0JiTn4vSy19KD7fRgOTYXRq5cff/HNbgyxacXAwKqqubfUjPM+pY4jqahJjU0Tm/jDP/98xf0HZw9zHkeZGGmb2pMOe5TbaZ4dGsATnR83cLJud4epCxzcqmro6W4YT7tuxgjz+Hi7+vr2kPrVWHOkCFQJuHcs3GCRNkVmnF3m6mB4yCW2cVaLIU1zTZxcpGnjusFa8sSeS+aCwbFfha5KgJDcnw+36cFjDyZSJHQtGdV5dEAIGRiIA2IACkSo3FEHNvYhEPlNxlLshNPvnHHC+eeT7yuj1s8ebiKZis0KP/ny1cfvHT06XQOag//wz/56tdk8++BpmWaO6ODM0UwR7inT9+4hMDddfF/EYfHpUCB0JOZvX48LG0utwoIFW56xxRzEbAbuqpIRiQgXqr2bwD3tHwGBqFmq9wwVkZEY3dzuW42IAqDCwttyczCzZVpHs3sUOlFQEQpMxKZ673pbKrDoP5aKFlR8jKGaIQJT0NBgv/FsJQ9iwilGdGdMbUeBAQ0BgHF5NMO9cOqIHiCKh8oaQovapRiH4TaqILO6o2Mt4mYIyKk9XL85Xj9KbBliCGtqjpg6qx4kSJ6a1OSyrzSMwzc1qsZ0/PTx1eH17bx/+uA3B9cTjggwT9cffe+z25/+6Gx1dtmu1987/vG/+5Pxr/6Nd7a/Hbtnn9jJ+19ccfPZ75995yP4v/+TcvH5fHV39OCkOelLLgDcrVbDboxgZdx3a53G8+urXT74R48e2n537eer/87f+uhv/u23w/DZqil3/OrlN2dtUK6v/+ov3/7oX7+3/ZvHzx6Zp8ufvThZNV/+7C9vBv/1v/0HOyyj2auL4Q9+/794++abw+2bFPNUDpzo+Gidq8xmsYuuWqbh8ZNHMTYXlzfuhDEpgklFDofDDEbzODJ6k2JEvnx5cXb2YJpnMz+MUwypD2uqbmq1iON9veK7U/VbOxF8qx6Be6BAnEJ7gpbmvbCrVAEmxlikktVSx6O4iYSmHmK4Or92JSLMZYqEiW3MlQI3bR+wMCISLrQc5Oi0ELkAQ+oAkH24uWvXp7WD/d2Ymub89Zv18fHNdL0+O/qD//7vf/PTb17+4uu2acbh9vTh6sPvvL8/3Oz3w+PTTkW7bhub3hGdw+rkmJtXZSp5LlMZ5iyxW+2G3aZrYrs5FANqAnvbdatudX11jc5u2K/W4+7F1d31zX7vNcM0yHyoOgJ7Ged59y6A5tD3vYtiSNWRIYKQu6Um7cbzhho0GufJTDnG/bxHJscQqaPYzkptE6qNbdNJzuL+6bP3X708nB+mry/u/vDv/udffvWvr69uQgwpEogPRdrU727rTiltGS5mn5CKhuA+Vevi7c2k2syju1NWaxN5Fs1OzgxcC4xarMM+8bLJV9UICABMFBf7gzITqSss2zxYmjIhIjmFgNylRg2IcKnRRPKw1HIsjwyFFBKBtiGag5bC5AyKgM7BAkvNRFRBq8rq5PhqGGQqQSSB7292IVCZJbYNJ3TFohK4xeL9iud5B+TtZnU5TiQGSH/4d//+H/+rfzGMfj3sf+u3P8t5nKZ6fHZSp9lNL3e3q/UGXbpN+LXf+uyv/92Pj4/7muvzr97GBh+fpV/7zU/+7R/928fH/Gf/zS/yrlu3mF2wjY/Xj9+8frubymef/uDlxUtIGFJ3OOxjVTBp221uC6egRZsm5ZLb2MQYLNYqs6lmzU48TMOq7w/D5VxuV+06l0NIcLo6unx59dHZ6d3tdbdOdze7eS9PHvdHD7rz/U11TwDr1JbbQxv46vqmb5oxF7CQuiZEHnLOFEfzPuDqaPXy67cUQup7HKaOu5yzF5myw2GKwYdxOJRphauSKWzjepMuL6+CWT4MOo2olQOJzpGjeYgUCNlAgKlWBccuBXIAkLTqLw67Rx+dfX148xsf/ObVFxapV4FpnJwJ6f4m5EgUlk5Wo8CwuKmXM4p4KdoAYjfVe8+zAzMAGbg7iNTYJBGfswavtZbYpc3J5qpeDHVHEYmDmBvGEPsYqoC1m17Fcp6MYXW0yiZjrTHGboVuOM9zLVOz4pLnN2+et+EDxM3TZ5+QwfXNuZuqAjKbC7o50dnDB7v9bUBcrbYpplqFIs/TRIFim8YZTcTMXR0ATUQcEKFqMVc3V1V1dQfAgI4qllIwWhaATFhNhAA4NLZwHQJpBcBgIE3qLE9uLmoYSKqZufuSz5ra2EduEZIBL/wHlYJECBFjjhSAwM3mMsfUApm6iOnh7k3ThVpmU+DQHqZDpFTKSOhusxZjj4B5LpmJHAQBJNcqhdkpEUMExCrFxYi95kLMTRer4pInZAymqlJUlUNIsdFaArcNN3e7qxSjuau7mCNiiBGAVJVDCkvGzyUwmmt17GJjkkVzAAJ1dwJ3jH3Xbg/DVQiCQG2zEmS9b+kCtULghATmhIGYFtAzMaNDkbLpj0qVJiZxq1XJjJkCRDVdSCBuuNy2q/hQyqrr3ZGBwC3FNhv/5JdvHz168OkPPvnX/+JPEvJunh9stynGYT91Tbh4cxECnx2fXJSbMub+qF8+BU0TldCyh+ixJa0wFY9dF0thVxIDolGkCpTic57QOIXABI7F1WrAvm3BSQQYcZxK+8HRbdayjrltV5tNfvWWity8fvH2819i33IKx/npdLU/3mzLq4Je5fnzsZb65tCcdOuHR7cXd7HDWvdahwTQBV+3GE/ee/Dwk6F1DmF1ery72SVMeX8XQ+PqMud+1WutagrR++Pt3W6qFLab3ub88u3r7cN1yDq8/70tR68aNkmphkgReLjZT6U4FBPVqWSEGCkEtGGwV+e3l7c3t4eosv3g49OPPxumc49xP0xKRBwU9XA3HKQkQDOX2YJ0ddLIHWHk0uhht225wKxzPeQDcaXoY96lR+9Vh2HQdHw8vN5P52+7gECY5zHpmZtn4YFa7/DB42MahnYFl6TBrWmbJqwj0bA/mFWISaHUctA2KdVhOKhzzapi9WBmJRtMd1drAUJsqmzawAGxTkUPLnd09dXdfrd/++/uxlfX052sy+phc/rxJ6Ec37wafRHezEMKVgEcHdlEm4jsPB1KUXcOUy1NiE0TiwiozQ5GoYKTIxqygEcOqUMzqA6ecYmXhOAYxFzFAxiRhaahLggikxooRWZBUY1IlQgZMKBWBaZKoFq7xApm7kI8iEwiYA6SGyCHAKrIqTUQqQ0yLfAKEzBSg8ApEkguqU/3UhEgIrm53KsPFIBsWSmaOdOoyqWUmmuZXAVVwQgW6uo9lAPx3QobgYFxYdiDg7qaLbO7EiES+uLqw3/vXmViJLqH9SzHk90HOTiE1HTdajsf9g5FbW7JQZQwqAkYkpGLYEAkVEXkFsDdlk4fd4zuBIBgSkQUsHKtQcAxEVdVAkYkhUwB4hILUUcKsxYgSU2TMOSCy1xzkEIh1ixYpGcEdQISIkeaa1ZTc5ymXQCqJROgqgGQugfiQJxCnEHxvpPLEdGASjViWjwmSGE5mdHBoKIDYqBFylNTVYNaAI3YHUQVEI2CgCkaoTcUcagPgx81fJunF8OXt4e7t1+/bD/9vXbzOO6f69tvOnRmqOZmGgB0GX0JDcDcPAY7Ws0BXcxzZskNubhyhIHnSmUIFpvHfffZ3c0vxQcl8MAT3DpcZ88N1ChMQiH0aAxODBo9RzQCFCvOPYaoMiGQxXDT+HUPGR1dG4AkKs4Txzumjj2SOTK4mSqip0hCIl4IBUFFR3BFPCCiWQncsewdGoSIyACN+730464ATkSOziEwNxgCxUSEhBzfBdDeuYsckYhYtBDSojIRsy6s4nuVxxZfP7gh3RfHAYDpIlsigKsLLwoRAAC5iZkykoO7yX2nFTgiOixGE0R3tXsrNCAjLU1YuGQVfYlcugDc3+EQyezex+MALtlBpmH6+vXr81vwtBZ3Bp/nHEJcoqeIbpBrzl69ie1m8+BP/+KPj1f9b376yeH6+vz2kkM86poHq8bGOw04GShC67X3QqHuzbNrFzs0x1qmHFXoIF5xXkXogpnlbMmZigkSWMkhRkfPWQJzdumYY8Cq3geu1dAxS00cEDVFMz3MGSJB1cqRY4jgMFgVq141MDzbdKXuybk3YSmlirlahWAEQGORkMLkmZ1CgOzFolcs2xDSwvVhh1p+bYt9G350XQ3w7X769PFm2B/y5NatXl4d8lA//Chut40K/tm//TOE33n64dMyz0TuLgBAhO+SiI5MrnavTuPSSEi8iOb3E/ki7yGHIKpI7GbgRkTwrh9N1RyRQ3BdRnd2NDNl5iWUy8T6bXKNGFzcFQCRUG15hOAdPL0yBbWFOgyLZ23hzaEhIHFgQBCRe78SACG62rc8628zyWiqRBjh3kEHxNU8cHQIQG4Q3MA4cuia1AdEpmUB664GhGAKhCKFwEyR4gZS67lywoQdEeRcoE5oCmYKWIpF4u7oFNtI23VztL7+8ptudXqxn9wwANbD5bg/reNd96gz2B8/Pa1ftXfnsH3/V+v6ydGv/uF39k3ZbmqEdNQI2vnLrx4+/fSvn+/7FOzlq8POvvNbHx32N+unHx1/8vFhv9tgk2s+efLk7YuftRzLYTzcTY+fnNy9PMcA1MSu3/z0hz/64Ncec2rOd7Nj2hB99+z0J8P0+rCX9z989tl3y2Guz7958e/+YvX7v3P2t36vOT3+lH/z6qsvC++lO/GzPp609e2eu3AxXv715Zd/9+//zw4vX3g2jf7g2adIoJbRk04kTt4wBJ6GejjMPzh5Mo8viCLEKLkW9a7pz9++IupCiEM9xERzLZe761lMDSkwMgzz4fToaLNuTcGBYgy1mi9K9nKu3tcY/QeBNAdAdPSc8zDP02TtqkO01BC4EzGDoXiMnYogwrAfYDFUGnXNqpTKTZrysJgMaq0cXKQGpnvMrd7HLIlZTWPgueR+tVqv++nmICanq37armPjj55sX355mSh06/bXfu837i7vHNvv/NavPHj6IDsFjDFGyYVit6QvAU19Ch0A6tnRNhIddEcr3aT+6cfPXvzyl/Ji7jdHhpG4i23jZqlrKXmzjjdX1zus03xwUZAJ0CFuuV2lmPo1APwzAGibVKWaK0OQgnPJTZdmnZxX282pV4FiiXtpCamVaQ9QROeKypCmYqvmZDYotQJ5szke5jLWu80mhVs+PdoOj7ZZR5fKAazVq4ubo+4hY5h3NUnbH62v604VDflugEnh+gYeHq0YpWagkGTKU5aYmpLJ3YFRA7an29ZDOVSiqFPhSEwkIkikbo4gaGLA7oBeVQI3CshM5hIDz5KZI4RkKBwD+33m2QGEXF2asBYZsdb1qql1bAMGFFAh8FXbvN0PHXEVjSlaTNIBYhsYsu6bbq2uTU/FkJlKyUisToR4fXVZcwYFC41u6fQBPdic/dP/5p8Nd9ODR08//o33K5QAHt0vXrxpm25/t2u6diw5BN8eh75lNqmuq7Y96rvr/TAc/Oc/evveg2fHj04P522ZJ+qEAxvq9f7csdZit1cX5AXE81AT6HvvfzBO84sXzymRFi370nNbx2HCkbuuiJrCtvtoP14g182mG+72BsXAHh49ePv6ctW3atpvj/IsU54//c1f+8XPv2CKYvPbw/nHn3yk8wgqpeSz97Yvvtnd7TXPhRKePHh4/uYycElNsIrTzmcSiApNb+i78Xq/2z99cuxgzapxIEXY9Mld+023WZ3t95cn3BRVRW66/vLNnTBJdVIhbOFdqp3RVCsBeeTQRrfaJXrvwQeX8+XvfveTi3H88P3fvf58XDen426KqZ9yrYZZvnUVlTLvA9OUZYnlI5KKEdGCe3x32DkRMaGbLU21uNThmFs1JyRiVeu7JjChe+SACl3f61SlzhSa9dGpSYUC6zXv90OZS0qSYjw7O3379sKLpphMjRm7hpoV01iGw9WrLyF953tQ6ezR++12/c3XX0gu2+1Rsz4e50OpszcUQzfPQxO7Jmzn8RoJm9TkMmku7EAILroQAQiIgBB9QQu6ytJSAchuoCJMQQ1U1R0NEMxjYKvqBmqVyMDIHCIGBo0xDuOeMBKS1EruaKjFpAJR596gKjAgodTKyMtVb5KZOLrUSLFQkWKNEyrPB90ePRh2d02X5sMcNGKGgIG4DRSKFrMYopdamCg17f4wJI1GzEQRk7uKqpjh7DHFwFCqbLonu/11CuyqhIwYRDMSSK0ppLEUCK1lO15v9vWOWyR3soVn3YrOYOiAvnA9wUWqiiIqIGd35vvaIwBQVQ6tVan1gAEokQOamlilAKCKABxiNXHzQBswL2VkABEhx0XIE7O5VHDXeXb2yAGYqkgAcl8CZ4FjGg8jUECUwEGXLRlCNW+a2MR4dbUfD+XN2yuwUEom81pL04SuT/NhYnBiGsuMTGmV6N18YApZK4FKEa+pa2Lbso4QEkckKLWaTwAKWJ2YOsYAHsxcXa1lTMwNDUNxSOTNF798/bvf/ZSbyEmhgxfPLzVh//DB2+trb5kb07y/Pbfx6mDD/urLKax6nPj1j95QjttHp/OYy7Df316s15vp4H3IdY/bj5+sj9bNDFyrDftyc90ZOGofyasUA6SipcqcMQbumsP+NVmMEHdfPy+7fUypmQyKdiRWhuBpuNqpWBfaMtZ62NfdLjZYMI7jXWzbsF5fjbfitQ1AfcPIPt7lcju+/DmxWFrngi2zHnaBbdX3vRCpRof5duCiEXDaXzqTKorpneU6Tc6wG0fvbCabQ7Pp+nWCOKlFokcnADDtb8ZcdPbT0PC2mbuuHOE0FXQbpGgXkABjAHepSjExJycas6ZtA6uYi4JjIELiEBtX71brw3ST59q2rd5cB7rz4fnPP/+vQS2bXBwuodEKUyUEcEzGPT94eNw+7Q8JUurCSUwHqsMdBjQCStimLjaJoSJRkTzNUgUQoYkJAg42hxidQVVTYkJUg745UkJu2lkdEqVVA7M7szC4O6sxEZaKAKGRGBlXnaaoYmDipgzamrl7cAopFMkcAwaWwEZQQyQEl1LMFFShOFhgDYS1lkip5oKlRq2xW3loqiOJRmoiEgV3cGoWdspyGCxVyyCucenNAUXkxRMhqk7A4K4C4LWIVcWYkJEQfEEZYUDwxRsC94ReICRAF1Ww+8InMHTXxXmxpHgQCJgdmHHhKy+iFTgI3GcfIHLsmtVuykgxBg9al38xATtEEVGvRJWbljCqKiCoCaHZMqyZgCsRZpPBaUSNiWuFBQqERlKrkifkakIIrugAGKIQGQEgUmUyBERA7iiyJLeGgaWKWSuuAB5TKwDqhgSBbRYxtxhCDBiI0rvRcOFBBU4CS6G6Bw4IEAgQgIiqCYgAAgERB0QCqaay4PzBXKtBBGJiAIfFZAnkZg5G1kZmc560b3mDYZjGq8MX5xfPbzBaHj/rGUQFDImCE7oBOBG7KSGoeSWWpjVCQyi1Vskc3UQk6hxs4PmOpcLlpjmyzek0ImlhmNlnA02AvbVcAwGjEeBsNFeoKaRU2UTcSwRAjODoygPwTcCJyIGjYnRIEhwssw/Ri2FWQ4UI6kSE7jYSooI6I4KrFyLMLoTMGEAJXBEruikGjIZqCyXY1BDgvuIPGIkCEZLRskP69xOTLZEfU3Gme+Sw0zvgi6IvnjhEcF/8QfcuIr/HG913WtFSmnY/nzmY3zOhEBGcFqSXqd73D93v9t2tAihBIHKRsuDXzdxNEcndEJwBHNDAwV1UEQiR1KuBgs1V85v/L1V/1qtJlqXpYWvYe9vwTWf0IeaInCIrs0Y2ya7qLrLABpsULwQJ3bcSIP4FQTe60C/gvQBBhEhCEEBdNKWmQFECKLFZbHZVF1lDV1bOkTH6dOZvMrO99xp0YceT6juHwxHh/h37zGyv9b7Pc3v/4sUh9KcCaFYIoAvRPd3s75ddu1pt9vu9SlUjc/qbX/7kyWp50S+nYXc/3EFgQFp1Eb2mhpHizcPDefRPWus8MtgyplprWIR6HPsQgLhd9EPVwhTU7qcyRyNrzUicBQYKayZz4T4hQ6kCSNuqxKgE6DEhZtdMVUVag7PNSbtaVOIM8ZB1yBWJG4LU4oK81KqaYfbMSTYoXd8YcgQo48BxgW5Ejvz4zhdoti1SYEzBGNk5DKBJ4aPGcW2vCt9OYNf7j046LHX7sD87Pzmovb66J1iv1gsV+cs//8uU+PLp05JnbRkR8qNlDAjMiYLP7CoEFbE4B4LcTZgYKc68KTd7C5AO4PCYMqNHGNV8qc25D7M55RPm9CU46uPQaQ5mzjhtMFUOAR7/PuimiAiP16ozhZmDZvPv+6Mg8vF/g0CIaghuTkjM/jZb9zgqyqVyG6cpz0CjLNXUYtObOLZMQUyyIQhwoDCfBcQM0ZkYZigmMBgQB7PqwcwOaMAoEHDVrEJoajW3Yb+/RncUJLRF5OptrZpR+TQtSzfUu3RyMlwdG9G06ItmcNBxgGFysdWz76zf/4GcnT79znfuJS+/9yktFvnmXm9e+zaGsxg6fPHjP33v8hKbVa68PP9O5yf59ZSiq2ro42Ldnr3z7sOyX55vTj/8bhYoUrdvXh73rz7+W397HOT662/OTmTs5fz8fQvd9mYkoVOjJ+tn/Q8uQh9bb3h1psQXn/7OrXXQptV++zd//JcbTr/79/+1v/rH093h5SdP2y4svrn9JhfdLNoM/Rcvv/z+7/xWva/TYQ/T9N7FBZRyf3//5IN3hywB4Ox8nYejlooAD/fblEjKUBEbbrJq37fM1C1bn8rt7dYci+ac83AYVLBbrO62x9DEhdiyb29uD495SCL/9RX0FnCNNMvQ3B3qVB5ut/2HH/F8+ha3ahgQGdWdmYtUBaWGhukormLuqi4GDVFEDImdVdQDm+HskTQpSAEQgcDMzZyIrIhlm8rt9vpV17X73YTOXd+7pdP1ed816++c7A9Df3ZyOO5PztfjNIzHw/rkQqxSQ7WW7e396eJ0fDjev7wCl9PzxWHiMY/Hw4hIkfjuzZsYYte3FHB/nJx1GHbqElPKUwUK5KFtglOwjE3PxFGpMw6MXd887tA4BDF3cVNgwNBEAGpClzjUXMARlb7/0e/++Zd/HUNG15QCWwKFPrVtuswUsnPOtenDkMcvX3417bI4XXbpn/yT/zw2Ssxd393evVg/O5eSG27CEuG+Si6f/+Kqo9QvUzYwAFykdXe630sUC8E3q8W0E8KQYoMRSxkcoVkmAxL32W0YQkBydTMCcTBwQC8qIUV1cwcGZAJHrmY4156tukKMVB2YIng2NQLjNkazGrTIwIz7PGJKrljFmhZDYMcsauQIak0ba9HbXM5/8P2f/tXnK6rds9Nc7Hxz8s1XLw5j7sGRIEVMqb2+v1ssl22b8jGjZjvUbHy8GcuuPn3+lLrEyS3nm69eAcJyvS551OIgEnH8+3/w5H/2D/7W//k//2/aE65Djc3y69d33DWtk0xG0GxflW9ut4vV08M4uPlQxm6xTIkTd3kcDGx9seS2OR6HN9evxuNQ89hy71IvTjrkcbNouybm4k3qnbDm6lq0TuNRvQiyv/vs6dmymQ755HQxTHl3t18tVyWEN7dXgm6IyGG92ljNRARAHOhht7u5uTalsOn3dbjPeySbxtyv1k1MJ8uLq4fbqVZSMVXJEjgcDltTZQoiNag93WyOdeIQ3YuVsY5HnYQIdvud6RQarOYVoaHoamhF0YkCGi3Sejtt1VAd0OzF/QuE8PUv3wRIL7/4SgYP7yzCIi3Wy/3rV/v77ZPLxzxFqeqADsA8Y+VnN+fjvcPcEUBEkN42phGJSUWJOXKA6imloZbYJDMjRnLf3m3bbgkeXKHOHFBOThwXSwHRKl3TySCafZJjv9lcrs92xx2jR6YY+3HKpqDWxGBTHl+8uv7k/fenaVz06w/efe/lyxfjcEjtpmuWbmjmMbWJSas0TdN163HMBmaOUk1dMATiYDpRYnOTWoAgIZQyAQIThRhUiZEhIBKJFCdwU/d5RPaWsAFgbuCU4kLmNpeMVUrD/byAZwpaKyNXzRy7qsV0NLOQKCAHImEWLwwKSOCmUhEohhBD03SrY5lMQbKj5FXfPdweuq4xV7CqWhmDVQGXRd+JqNTaxEjIarmJTdHJzVJYqBk5BerEfdG0tZTQJPPWXAPFnA/i2jadghKERI/8qamOWQaCUOfsGARAR8hEaOYKFoi1lhhijTGPIzOXisGA3JhpmsaUGnQSUWe1OhFFAFeREBMyFClzm94NQNFU0AU4untMrdYyr9zapq+1qthquTrmPSGaWghRa5VajHnmMYbAzMlh6ImqKoGbW9exme6Ow8WqAwbNcna62h895JpzBThKEQBo2ialmMdcx9J1UYa3A1N1UwuBtcr2fts9a9VriFwKC8iibVgREln1nTwgE+msyvO+75q+WSxT25HHdNhmqdJ3q5ub8b130Klolc3zy+up3OTcXF5eXV8t+5ZXUZcdFBmnoWV+uN51JyeLy8VXn93Wfcwi2+1Ydfjot799yA8bWOhDfrg+hHHMdedFV8uEUsfdA8fYbE4n3YbUaZeqoM5IYkwp4eF6J9PQCEBxzQVIyvEguZBDHo+YUlCX8VC2RzRN3DRpMU2y7lrJx3JXFqtNTZ4WbW73WIXC0zEPiB2peqFWY9o/NMeDDbvgpEPWnItWm7SWfESAZBZ4yjKoHCNQ30CGwzjENiyfbPKQD998fcrGRvnhmtqNn6+yT9g5pG6Xsrcx234ggWW49RHWkUPTJQwtB3MJfBiLZjHJAtAO+2nai+Hx5sV+OFSQw3Q87rcFdSqTYjAiyxNghSQYqBR1Im9pnKergRZLzlKXBOdhXLTt+5fd9R1h12MYKXGI7aLlhAQiZpIQATwgMZKQM7m7AhqxfOgAAQAASURBVFJoIoWQkEDNqgaEiA1h5MBOrOgYKDaJS1PUJwAVSUCoGgFioG61wL6F2HoTjbWOSgaggApuNTCiAzFKzc7JyYScKTYcWaVRDxCDAyAqSkB2BgOXUj0XhKrFISyUIVKruZAJGGT3GNOvYbrsjgqGEIkiMICHuTyAZO6BKCKmx9KFuatKIYsgRBzmjTcigYmazKfhGeiqpmqibuaPVh+bCUSuMBM5ZiAMMcxlIXeYyzj+KOXBGcdP3HWLcBzAJ9PKwICi4IQROSFVcK+1EnlkNzfGSMj+iHYRN0F3R1RmDajuoMbIVUrwlNQ776hgaLDOPsx5QsWYTecfAlI9HEdbrFLgnAfHIjBQ7B5/LgYxNWrmpORMCIgBgNWdEMEsPH4mWmvFGFSq1+oADh6IIpNWc3BCUM0EEAKD06N1y1SqgAkylFoQsU1thioOGElqyaWEpkVyQ6y1BqA2tmxS89ghrdv0hONHlmuK3HiTRwSo4Eg2Dy9mOTvNq2UiS6m4mxZX91yiJRRvA4kMWTD7UEpFBHVLujZdqj4gA5EREztFSdHYQBzMCQywCWssGFiDV9KJbEiKXkPwVAj20SurFW8cgjAJMBUnHcNwFF84mhZHR2odzHRCIAaniOZI3KjaI+4Kg0N0sIBA4O4qLqpWayWfqfTuagCztq9YNiRXUVAX0bfDIlSdadOsKoCuboQM6GKKsyx1rn2BI5KbIfJs/pn1Z4TsMDfdnJDc1c2AyM1oZoiCzhmix1Td3GFDdAc1BXzsmoETIqDjo+94zpaYOgAAmxkCzd8pdXcTMyGUUoYX31y9vqsee2AGK2TWtH3OXqQey7jYLIZa1YiYupR+9dULKPnbH783HIe73YMaREqni+6kYT1sOdKhlCXatyN+J4Xr/RRCGMYSAa3WZcTRTGqmFEFQFSPHYxaGYGNJoABIEBRdwcGNkU2xCQnEB0GGaEDMEEDHYeginz79IK2f/fjF9qef56vtIIBqNYZo7l1wZG+hPl+np8vunVWTZIhIrpIBMgiHEPqkDAxkYwUj5khQiHwGSDuAiTJxMUspIUMo4/udryL8XD1nvzvkj8762/14f7fHk4U5Rh7arg2RXPTP/+TP/vbv//7J5XmZRgpzvAfUlGMCm516DqYONqekEclMmIKDoRs+konmSSG6AyGpCSE6IBCiOTjMtjXmiAhmOJvxAAjokXQ102QAnDjM5/uZfWk2G/GMORA+ZkRmqfHcfDRVhdn4Vuf0J8ywCURkdgNTfVuJezsqEqk238jNTCsBQhUAQWcE6FJbwbFVBWDZ8xTccgpRwQCD82z7wzmr4iJEAGaqeZp2CVLqluDAyNx0Zht31ZqLGWFums5lwWOvo/bdom/pdr/lZrM77JiaZt0WLflgeWe72/LOd78Hz9/HvquoZpDWGwpx2o0Xp6chpP1+4N20PH03R7qPbpE61l/91//lof9uORz7Vb8/iDQ5cqwW0+L0N/+tv/f11V2Mbb6///mf/vFv/p1/4wGauFy4aH35orEpj/LJd/7WsE2bVcAasaHuZM1aR9Hu/BKWJ5cnF68/u9qsv1VrBIr//X/7zz/4vd97f/FbP/rP/m/vffpcpt2z588GywjGsZvGbPXKcf3w8sVFu34Ytx++/ymtWn697VKDUJ4/Pf/i869UVMQohqZtUqIpaiA4DvfiyoEZmWNE2AemJjXXh4nc94dtoiBVDvshH/OsnqC5of14o/v1r972KAHAwcnb1VIZ8zRxoDxOKbAhSZXVsr2+uj9bbUANHSmQuzOHGBjAVC01nYmgeUiJKDM3gDyTseCRwUXuxpEtaznW2+ttTGEac9emoxUVeffjD9RsubBat6YRhWdt6vF+f7LptXjX9jfbV+88/XAcyzgOT5sI+0HHqUyyf7DdWFdP1qvNZnAINlmeVv2GGpJKVvB0fbLb381W7ND2VnmZToQy9BtsUuctYKNMyG4egj+yio5HQTem2DbdMJaubavroltLGVQ1cFLHf/H5z9/7zndffvNjrIjMi/VKRicPu7yrIXZ9f//qnry5OFsd73Zdu3x4fVxvgghjSjjKYRDyTRPfS93ttC+pA817JmYL55eLF7u9VTOfvvs7Fz99fcuITRucvNokIDEGhGouRtqenKXFBDl37TrjUQHNzc1TjARm6k1sxNHB5+9iwIgMqsocJrWAzByJAiG5SHQEywyeOKor+BzVNaZgBmJg0pZc22YFLuISEgEKoKWYjlOB0JS9xP3kNRcuMTKq7e9vVl2L3LCMx3EoQhWmrk2udjyOHPzsfHPYl2k35aGsT0+ffPDu7e21Dcd9yVKn0PRd2sjxen1W/+E/+Dvf/OUvv3va/KP/+B+dvPvk+Tvjv/7D915/tT+9/PY//cXrb33nA7jdjlvu4/Ov7j7Lum8YoYmrdoUYDMppf/awvXGgm9cHkH3T8LA/qFta9lJh0fDT56uH7SgM2bMFPpQpqotlQANl0nDIx9A1fQgRSjFdNqurNw/o3fnqpCV/Z3Oxvzum6PvrO0pn/Xl3OB72+yyiefQ2JdlP++2hWzeWs7sQ4939EJrYdiq1Ni1SoN3tjsmRcHG6kOi12vm6A8kPhwOkuIpptxswJYhUhWPb1e3BuZgErwVcUhcIQZzVMWFLyQ55ByQhpBADgEQOZEsTUUwaePH+mTcLdX11+yoQtU3cH47zt2Dmh+IcgzYDc0B3UwfDEJjI1IgQEdXsbZTJA7PZfLPwIsVFrFqz6KTW4TiVg/RP1jEFcCWGdrH02GoW5MjUIRuAnJw30ziO+wHosD49R4Sb+6suduAYAtRSWiJfhTBB3l199Qq/9d1Ptg/X683p+zF8+asvDtub50/fobTebW9BkDEY1N10vzl9GlO5v3/JhKnpRMo0HNXU59czIpESQnIhNEYmQjQxN/VAakpACEjIFNBmxRvCrKJBZCYayxiQAoXj8BBYA3EtI0EyrwDuUMSdU6iS46otTgAqNjEFh+wgKjlyAE+GJmXC4Ig+STEYvQ9oCAboMOZpLoxxjPOEJTIzs4O5BckWOaiKkbsoJ2HG6iJaECiEWEquqolL0SNSIG7Hw9CszglTBHcFQhV/4NC4JaRYijEsUvLjuCdgkwEdSYE4IXjkxp2IyF1UBZARqIlJpEYEQyCMbjzKGIiyjJEb8cqB3MFFRTOTI7ACGFjRkkJPGGpVcwOVOTetdb62SAlGNcBgrgAIEAwcY0T3EGgsGVxdDIFSCnl/ELdS63qzPDtZHEoNKeSq680iD2MAEPXg2Hathhodvry5e/L0nXu/KmrchGX7uDYoU1aFo0uzpOVqUd27vm1FVFMKbQBKAp5HRm9PujnmP5ZAADFh6hgjU+S+aQuwWoiaroYS+7UZ5aG0y0XYLNtnFyEXGHbpbNOuFtvdbvH8fAP49Z/+inGVIrenKTxv6XTpOS4uLwxibm1/8wbu929eyrd/59++evNFG4gDlzzlKQOSe1sGk+loseB60zTLSlPZ38Nx63mK1U42FyXL8W5YP3unTxdlJw2FpsI0HUOp+fYKRg+ibuNxdz2guTpjrseDT0oRqws3oV2s4/ryIZeWoYHoecs51/tDnTIjuoopaZ1FLaoGwKjqVkEZsO/Tk7Pd5ar2/GS1bu5ub/ZbPj9b0dAepek62U8OUcBhfeJdiqwSw9acQghtU6TEfnV/fyWOMo73h4ODQNZplOJYykjkgoTBpimDYQUtIFkLpwhv4ySOuYgoqBM4YlWt5KYFhYuaEqLrIUNlPihsnPj2vvUvMD2/un+ghjeXPbFCqdFcS9FcyGm3H7UIASA4RRLPTdNyAxxIzFOIGmoTKcZYpsoODqgpMEdVpJiiGyJKro0bWQ7s3AksYo0GaOY+85lJHUNkIq1mrqZFyD2wO2EVdkMlCg7qUT2ZtkaA4eBaMROmqR7nUT+4oiM6sCOZe3Urwk4MVMcS3m6SGyZ2MjdyAlXmEADdPDkpAjN3kboQuiaag9Rapyk1CbmZM6bMwXUm7wLOEGlVRwCV+YSDAD7fMGB+b8TZrOmPzx8zV4YZbuBmj3RheORtQ4yxb/tF19l0bAgZSiQyV0dUVwBNhEyIWhgTQp5zH2g6S6fNATCooSEUFcQEgoHIpbQwboyCsSqZ4EgoAOqCsUVCsGqiGZwMukU3aLVAWSkz9IuFA5ObkyVmRFaskkfyFgCk1PkgV1V1niVECjGKICKIKrgzc+JgpmrmbggIsz0OHj8xdBcpRIRoQChmhqRuR6k70dGACRv0AASlQFUIAZWyu3tmREdH0loPhHHJseTMgZ3MnSICEIiKz0UYQlFwB0O00CIldK9u4BoESdomNWM+kvUOY2Ki/WEVj6dt2JrsnIqziDIymYAqgFEyS5pl6h06ix0Rw4xfAVMxE8bWyUpKOQQBFxcwBSdwi4RutSbYmyxq7BUwBqBoDnM4CFTcioMSNEjBKZorEAsgIVVTdHNABVUgM0VXd3ETU1GtoOruKtVBzcFKLTU/np8Q5lI04iyuYjVDBgBkpHnE4+bggPR4JPfHRxURomjFOSWC6DZ32x9BIPPYyMyQiJDNBNEJab5Nmdk8RZj/2+ioqnPqf76AH78sj7OCR57SW725gimZT3W8vtldXddMnSZWVbNKBAXkoAfS+OTkSaCYp6IGEnF/d7eKeHJ2qTK+uHrVdx166GNsQMtxWsSYvdw+7D5kelJqCOwJt9U+OVu8GB7AjdkjItLsIRUv3jTJ3IcqjamDCTxKQJggIk9ibRNF3QRS0ysC+ZizBA/PL9/L7eaffrn75f1nGJuLk/X3PrrYLJcOHpugeRoOu6J2fzjcV/n559dPFl1PcrlcBNKzlsTH/Tg9bduhZiAwh2KhVkdwEUGnhgMhptjkqn3TTHlSQHUmj88aSBv/5UCf7/R+3H3nybpxmw5DmcRUQ89PzpcdA3j40z/553/09/4odZ3KhGSANBtgiNB0tgY/ttDcDUyYafaD4SPGejZRzSE0c3rcQboDwSPAasYRwbynBSZiB5tLbKYCTIj4GNn0RxSxieK89nMmZDBQ9/kt9u3UHsyNiAhZTNGJiMFt9uk93pkdkOjX9cu3rCLTOo4KEAi15oBWxqPVkTloMWYUKRxIZZLxoMO9OXR954jEQJQAwKkxQOfghmbgnBiBKIAjOMYQxaoUDaEXyYzWpmiHfR2mpjndrBdf/vx2fR7KuB93+46fcOyP11c0FavQrRcRrW2CxNi0AQACai1FGdAgH6otl/v9vta7s3eetsvVQbx7etZpI8fDcXsvCxwOY1glQp+2R8RwnOS95+8Rk5up29n7F/g/iAyjKo4J6el78Xx/+K9ebs5WP3+1+/SH3/vLv/rj9yw/PqgCeZc+/cO/pRG2t3fdSXv+wVl8fvHqmxf/3r/3D/7rz74ODHKw49WWpun+5YvE5Wc//8XJ+r2//vMfn62ohzrcXC3iqqHYdGms5Wy5ySUv+rPbu/vt7mBIAOhqZoAhjipN1z1s7wKnYSzoyDEQU4xxt98jYmpSmWQYp83p5vjyVtSQ5qCaI/7aCfP2fgePvfGZZ+SEse0QYNhtsVZuiPs2jxOgTXm0kl1VxdRtmDIiaZ26xaKqOHBRh4IEAGSu1V3eihvVOfrM6FBDSub8sDtgiNW0iDgKhrB5ehGaTodxdzz0bZv3Ou6np+9fUqAy5dQvHrb7acoIVkth4maVpmkaj6O5q+puu5+ETxcX0+6FSu26eDhs81GenJyb+uW7l1mm4/7QdC0xAmmZpsix63tHxIzsrC4R0c1RHekxatoEBgrTNB3LuGz7CLjb3bfkaG7VpmHq24W6fv7l58li30VRrTkcdjuL3rRNPuzcwpLTtM++XPXr8+BhfQK16mGsEJvGYLTpw2+982c//+UGlyerhXhdPrk8juN2mFZA7XKp+xFDfLi+udgwLdOrzx8oNLuhRGIIISxalpoYJhlTrjaOUmPVR295QBI3NQvI4gruxFilNkRqlpjMBVXjvL8TpUBIjG+jtcBggAD4eLMwKFlCqIsmIsfDtO/Cwg1yLcSdGgZK7tB27f1+unh6Ptzdd+DLRfdks/r6ly8BMaCX+x2iqZl6idyCmdYpJgwLqnkcHvap7znFiydP727uD7ths1mcnT67q06xf309PO26Jx+lP/3Rz3noDz8db16Fs93tx5dNvn5JQX/jB84bev3yR6FI9G/9t3/+08vnJ2rShnRy+vTVzWsI5gJvHrZScgjEXaJIg2YBMICcNaCnZnHc193tMUVanDfriye/+NnnJ6E5WfeTwuE4BEonyxNKLaoNx+NU84vXL1bLfjjSV7f799/rbrfXbYgJqF0sPn+zax9wzNN2n5+dnl29vHdtGD0rb9ZrjjahsqfDMY/HwfxK8uSKpSohHI+TmR+H3PfN9v5A6MY4jLJK3bAbfAKZat/1+/1hGIu6ayl9ilAjBrZcIJCbIcfq6lJSIsI45MM69l2MJ8vl7U0dpW5OT5fLzWq5GPeTqq/X62kw3Q/Pnl4+rg1UTc3nHA4CIPqjQgEdDBwdgRHnJ8287nh0KCCKaggBkMxdqnt1ybXWujhdeyBBKLkYQeyamNrb3dVi2TVNPNxrfnhITWz7tuSSs+x2u9T0F+fPdrttaNvWoplxSjkXB0eX/f3LF6/655cXh2GbuH3vgw+++fqr/fEhdMuu70qu6NDwAiyMdYIUVqdnh90DEKamBQIDRQYDU8eQGkCuPu8DIaSmepm7ZkQNIJtOAOaMaooYipkjhtTk8ago7l5zwWiIwDEBD4wEMXg1AK9aYopuU5USZUUuMXZ5GiN3UipCQGcDNygWLBBzxJylC8FUgbhILaZny363ve+7jQPkSVIbYhNLngIysY/TsU1LqdWACCmlNOQjkMbUMbZjHpDFkRgCEwOTuU6ya/o06kgEADqjPVeLk2EaXQuHmKcRAztACExIuVTGxglqGc0FMBiQ1NIE11rNUNwVFdiAgqgSAAKJCRA37SZrbUIgTmKToSCDmHUx5ekQIcQYZpGHoczOxBSCaNH5pdnVCbKWACAiHJOqOCBy0FJijIEdObiaEXIMHCMiKpBU3B/zux8/X61Xm/PN04vFT//885r1WGW5XO6nzI5MkJC/+ObzgKTu6nF/GH+9VQmBDWNFO4i1pToymPSRA2EX0uSlX7SgDjkUA0CglhIj1JK6ZtWvwEQ5rs76QtS26/2YvhnHp+uuX3UcY+x7RTSm0LWbzUk2UlzXhiajdnlx/erII/r++GL78N1vvxOOiUfe7idZE2z6ZXeiL3/1zee/UhTE0DgUtwohtn2tWDIgNAjuD3ejXNdxiDG4oRaVXK5e3ck4gPr9drfHRqcHPz7Y7gHyVKbJjnsvClYRjEzdhZGYMVQFQw4c1GIT8O4u21eJIgBORbxWJiKMaloJqwqFZBzUnAkwpLZfKPHp+elDuWvOlvXZYnfCe8dXD0PsGw+rrw+H+ze37VH07EnPa+nC0XW7P2YUhHysQkh2dVukDrUOqvs8VTUwVXMOQOalSGoCgpaqFgIzQTIEmkwdwakx5ilPbhAjIeok4IFzrupYEbJZYoCpVjOOhAXq5JiCYby9CxG7Gw1962RsbG0gR1cjzXXmU7hUZkxtWDihupC1occIYuZqkwI2MTRdRVWimKKKenXTSlgMAkFMMQUCDU5kTSCKxl1T0GIIQJEtqglSik206egIJo7uZkWRQtsSYDCzminXYsE4RQazysSGXkkqekBgJC+KYG3o2INZrFalKFepOStYdYDA+na52AAFJjAAMEZGgEQEiA4oZoEoAqxiSDEQokjNw0htCjE4R3cnVwcTLQjAHAwR/PF2gw5m843CwR6NUUT8qPee3ZpgYJUeT0HwiFxkfgQHO4BB4rDu+n2ZEeDB0czVkJ0xJCaGSOzGRcYQGCjgLJ/XTOyAbmCTSQ3JiNy9Ck4GS0tNOTRmddpx0w/lCMsTCA0ZOyBRJBSC2sduqqNBjSkB0cM4nPQcFovjQ1nHvpYSCAkEwCInNHIzYFYwNQ2hCWgB3dWUBCGqCgCEkOZZw0yZDJFEzTEYIbi5zBp0cOQq6mrmNlbLAIJ+rNNOXJkD4oKRzRgQFdAc2d2tOIFaQEw4N3wlmROhV0f2+fE906VMH6ErjFirOgVHqiIm4qoNd4GaAMFLceZG2qbebxZNU/ddkjB8vQROzHegNUYkZq8MHhgriqGGyF3RtVMLMaIoiAMBMoAXUSfL7COpPpqeSDUhkkI1BCQ4tnRQW1cGF7fK6OjVkdwFyRHIzAORQQE0M3UigxkSHADJEziaSxUrIsVUTGstWYvMfWqzaq6gbq6/3q8Ts7mZagjRAQnn4zqD2XyuQkRzQ5+Tx48xU3O1OZUFj3/qbU4JEdFMf41vx7mDiASPIwFDYndwf/sQnGepM9JonhKYOagDAyAAI7pDnekx5uY6T9nsxYvXtw/TwakCuGokaJo05RIFe1pmsmoOIsMwHqbp6fOnb948PF+uZJQX250jEdDl+Wrc7xKEJpKC1yzvcvxBJzEXx3bZL3bDWEQSIzL2gXMpDuEw1rlxK6Yhhop60qXDOFQxQDTQQSE5FKN9pcE4AAJIPQ496/OnF+vLb/0Xf337xXi0dvn93/ruRcvs1sUQAxyHehxGLvp8cxLa9DAM1cuTi8vt7pjVfnmouzJ9+/mmA87qneJpA2UcesJgAIEcIXEEF0TN5sWcIs+fbyRWRmhDtLyy/NGiuxd6EPubm+G8gWerxmothb5+uWXkpxcBVdoU/vs//bN/5fd/PzSNaQFUYjabXTDwWOuiYK5zPihEnkeI8wAGieExtj+/YAthmKFU9ngpSCB2R3N01XnG5ib+OFCCx74Q0pwmMneeb57u8xBpjqKhAQA6OCLYW/K6gxs6IgO5PVYewQ2QSLQiMjK6/cujIikTGzKDqqiU7OKaXURMwEzUiHAqAAiUUlHgEIZRHJwjuRV0C6kxc1UIqeFA5ElFnJFC9KlOsjestYJ5KPXoXlsmVVktu7uHAzSxW66Bj+1yZWJdH4bj8WxzMu6PkiEpluGwvX755KP3mq65vzswEjimkAydE4r4+fP37/7mxXS/pYctCpbzdnm2udsfmaPooVs2+5ybtrcxby7OjuO43Jxtb676tpXjtL+7oTIeX/209h99/MPfo3VoDFIarl7+aPHkW8M6nFycsFHTMYCBy3G7vb365jd/43d3x2Ecp53aO9/+zZfHw8vPP3u3X335z/5kycs+tSctffGzL26/um1DePn11WE/vdef3f/iq3fOnq6azT//0c+ak3UT2vefn3z51ZfeNNvj9bFkatsqFQFRrBxz5J5Dz+1q0zXTMHRtu1gubq9vFy0jooLFEDyguCMjB3JCF3ubHoJfQ/gfQVlvQ0WzZ48DHXYPTXe6OTsnU/YJoaYUhpxBIIWWJKKFNpFmZQwRMCFKoICBIA7l0DUx0DyNQk6dYkAxyxXC3DFnqXbY76tou1yCDqkB4kYkpJC86qJbDodjs2xCcOVDjKmL2HDYbBZ318wA6+U5M49jno67Uqa47Pqz0/vpeH6+Oh7pxTdX+TiJ1acfffTmX3yzac9xCsPtsPykA4fjYbxYrIiiTOPZ+RJ/WZtmJY4EQOLkwEyiwsEB3/LrzLVqJIyBU5jKNGJKxyEjM4XIHA3YiPsYdDLDIArr/qxknix971u/fxquv/76F6/2pV9297tRxNq2FVNPsT9bri+WMD0cjuNhOmxCK/djFhBU7FJ39tzWtTo4TAAYUgqJt/vjdHMoB02rljmh8KhZ1ZF6h9zHeH+/7Rva7Y4Ndwhkc1XE0eYDvaGBNzN2BZAAiQiR5s0dGGCg4DgzZXFOZDIhoYpTDGai4MwegxKj87HfLO+2wwfrRYy5KDGy1GqhIWdRnw7HtOinaey61edfX7V9Hzntrt/4VDQycUDARd+VQpNOFWCzfMqA+/H6049+62heCbbX98h6PPiUx8M4LJbL03X83/z7f/dvfvnj/8P/9b876d6/2Cw//M3f/vYH42G8koZIuv/iH/9iuS4nZ4WwE/6Aw6ssGiDU0a/tuuoUa+rT+qiV2wawXK5Xdw9Xq2UfAVbN+rNX15T8448/ef31FwkbqDBsXXDH7KI25pEjOsQpW6Bwtji5ufn62fvv/Ozrz077ZrVohmncHXXz5Pnnv/hx314G59TEb3+wHvOhW3Vh0aML9dxC9/zp+XYccMxlGBfL5ZvtlgKBGzkt1qucBydrFgt7eauCNdfqEJsupq5UTZFTNCBs2na4Ok4FKvqhiBL8zm98/0d/+eduq1G06xOYh8AGUMpkhLnWpo20OC1xYWj7rXXtyQfvPO+Xq4frr4b7ShQjc3R+/XBTylhy+5gqIiJEDowk6EbMbsqE/ih3cDc1ZHBHAp5DsehuhkQpRlNFJg5EGJkxRs6ZVierCqrqVZypJY81SwjRHELb9KuTcZyAhZH6deditSgEWfabMvkoY+LUdRvzauqA2nVxOpbjm6+2HM7Ozqf9VYzxg08+/OyXP18yLZoeAYdhSBwImQPXscaY1pvNOAwWIiiaqKkSg9SSUmPuSFGrELgGVJBAHXqs5Qg8kzsY3tIHzBRM53l7CEGyE0cKHCSCauBAHIoK+Wy0gcihGjSJQSrUDM6zcQ8BmrAwMQWrqJNLIA5MkW2OEbeJq3sTGqsamnTYPaTYpxBErUmMpMhs4GimJkCQOEz16KqMMddsbhUmRzIi4oCiVQoixZBEKnF0B6/GGDjEXPelGgO6SxtXStlJi6gaIkeA5M4KB0esIjEGIOfACB5i1CxVBIKPpsRkKk2ISM6AihaAECIClTISYeBmrKODjS6Ijc8WKahv11wIyAYKTOwIAERhRq0hIIY48+oQsZRMSCZmau6+WHZU6nZ7NHULczmFALmo7w55Gt7cvcIyiBO1bbfomv2U94cDtHzxbPP1q1uCoFqO2yHGR014aiICVZEmpkABsFUxMARHc1PQ1AQK7IBOIrUORZUZCEMggOTqBAEdExPFSH334Diyt6vooBSJOQTitGjBAWtcnZxW2S5P+kZNzo8rT917l/2pNtOUFp1eH1DqYburggUXX7+6IbOrL79OJysR4Cyk2ZyZIhPINBFaYAM3UytVXXE8VnOvuUAkL+JW837ECnb/Oo73NG65ZDNjV0KY7SzsSogRCZUAEUMw1BixCQCmCOwmzGyNW0BBNgAFAg4Yk3IzCjg6L5uma3Ziuerd8CZjxeOk37weH0J3uu76zVc//+bFN6/2ZaDQsqYvPvucyQXHUWthwhntUKwhjKquApGRqboisak4ogMaUdOFRC4lRwQMWEmJ4TDkosKMLmBEIhUQXcmtZlUEymZTdiAyhoSxqDRICaIb1ALRolc5Uh5it2s9H/a5ZEXUgA6VwKvIOAxuEEBjkhQZXMdiITAyK7mYJkXG5LGfyJyjGUdHzwaqZMItEAQXhBCZsWkiRII27ajESCEm4AAu6EA0OUDxBE2ykinOJxKnGCpoNEOUSFmKCEZv1s5BEcgAyJwaR6xT9aoRkB2hVmUl62PTshNbdUE3Q9cYY4qP54JFimDOgcEZ5m/oXDRAaCMHChQ4NQ0S5pJrHv2AoWuh7UMiQjBTUJnX04COBI7gZo+AXjBw5BkHO8uhHOfqxNvTNBISmCKhA5L73MkgYnKY2XGJQ9cuh0MT1CMkBgSMc68ohMAkEciRhdQAEXQ+pbGTuxSoxVQhFLDUpOruys2xnnHTQJ3GgxmIVG6Cus5wWbWq4g2mhMrAXduN0xGbMEldtmkRA03Wd2uqHrGgA3oVUYaISGom8/IWyR1dvQEiJK3atl0kKlojUVU11cdSEjIh6NwxciAHQJjzKoY8uarBhLSv4mCGSASqgsxmiIAyRxLcCZzYFVTAWkRwIgACq3PpxBQAZ7j4W04FzegdM+OAAFYRjZhMbSqBOIXEHgQ0ahsneM6X7e6wRoheRi9NFMdeyQenWo3QA3MgUmTRHAA22G6cGzUCgMBmDB7nv5YyZDJkRld0IDQAcVcDVRCFOLDsEyN2nGnGvDibgxI6EbgakpgLoAEaOBI0M2e9eHVyC1LKNB6OroPUoibmaqpuZmpqAu6q9hjyepwVwdvq14wTViICB7US5sfo/NkjI7iqzUtzh1k/hghoZo8AIwBEVlUigDlGAThPQs0FAZBYXQF5rnnOVBp8e3x7rAiZggsRqgG4ARj6LKolRNBHVr2p1qurmzc3+6ws1CjRomu05DnUooLDIMZgKBE5JaexvPrys5UHcjhC2R6m80WzDGh530DtYwMRXx63l4yLcnxyGktK+9TeFGE2V28JDHQVQ1W+FQ2B0YyJQf1oqrNgByAxH8vITBnAzYVi5DZYjS51stVm0T959s+v/Bd/8abC+oP3n/c9nXVBS8ZAGNNYpRBS32DrJeDueDC3PjXLhjsOhnx3c7/qEwk8PIwhhtf3w/Pzdpp2bdOZgYOHxA7mXiIFJwKBGIK5NW2DniYdmBEhxWwX6B8k+8noNyMcRhuLf7DpTCwP9c3VAwGcb/oYbdwff/Ljn/zOv/q7auao8AiQmsNoj1B4DsHNEQ18/prOMSOFRy4bMNEM+UdQmkP7AOBOROb4mDvjiO6qeQ61z9fkbLj3Of0Bj2QsmKeWqv54zTCCqxu4gxvNKc7HGpk6EFFQqYBAxEyPaU8AUNFfc+seHwmR2d1DiqCEhEzJLIE5MDGR5BIZ8jgBY2x79RibRs1NxBHm8VJWm9Mskg92zDxzLEkrcYydgokMgXtOnZtux0yb89Vmw5aZNTVY6tSQIyQB5RbHw5DmzClGJ1ucbWKIV796sY7PuO/RXVSdmjnOmuuxfXIGv6Lt9mHVtQmSbKer1y8e7IrujsMx2TjVwtwsYtMs1mdPPvj28+/9Ril1cbZm0m69PHt6effimk5WmycfmAHh6vTsab4ZaqcucftwX2XkZjMH/M/PL8r0ddkfQEK/Wqjpsx/+5v32evXxh3/2j/4f69H3R+mHUnV68sGZyMPZ881Pfvbz7/7gN+5vfklTHfNRTN69PKMQHvI0Tbt+GWoQnLxpUwZvm4iGzKx52rTtyzcvTpYrU237tlYFBmcH96ZpVM1AUgzgst8fwD0GkjonWwHA8fHO/3i3+/8fGyEAuq36yAAhRdXq01SnMbQbRwBGjiggRbOjR42q0i66qiJSQnJHalpyNkopklFo1NAZzcEc0BQJTdHcJWdEFzmW6ZBSSF18+eWr9955P0YsWlw1lzrW0i2Xu/19mfYxhSx5dqsVLeY+jnvAOku6x91RDnlo96ldtQsIqTveTD/62c8uTi5NDT12/WJz2nHAKqVbdrNhcJzGxarLZgEYERyECOc7Pv6PWlaAAGz25Oz0u9//vb/+m39WZbvqFi6qakz85ORizOXqfguVUqDdzZaD3wyHJgYM6V/81X+T99+4UeLWcy1WRh3PN+sRQQjYMx5F6y2NB1VcAMnCQ7Jnlxc/+dUXT1JsCSRrmSZT01zGYzvtR3BsMXQYV4tmf3+/fs4T4NW9NAHzWJgDMcU22MFMhZFUKyEBUmCcgyA+B8HBAFxEzCylJAbmXlzVoJlBa+4xBAd7BK2puVqM0ZA4daXm4BqIlAGNnQlMq07IrmC1TtAQ9/Ti6k2/aC2Sc4wxvPr8a8oCiH3XAVueSh6HpmFhPbs4vbt/E2M4PzvZ37/K1eJiebLuFk8Xx/10d7u1CmD+4sWLP/uLv/rDf+sP/oPf+3v/yX/6f795dUM6/dHv/+v/p//0F9/cWEdn71+kv/v3v7eVX37vO7/zH/xvf7VZnaUwdV28v902uGRP75++983NNw/H3TtPL8Ts/uZ1naZpqm3o6jS2ZIvUXN9888Wbz54vPzk87KKQo5yuljrWwHw8Hs9OLnfbYxG5O1wx6cP9Q8S0Oj351c8+a7pzZrp7c3d59tQyfPnq6oN3npO7GyyZHw6Hr69fLru+QT7mY9siGEXrVnHx9f7N2WI16EGUGlgSOKOZSArssS6agA0djmW727Utx5QejuPl5SnTol0eqtfUpmAKOX31+VdTBTJPfbNoFmPeG8Ew7VLqmNjdA3qVrABTlY+efIQ1lsMh7x5SRCMQcZNiAk0TA6/b9nFUhG6B6FGVgDhvtBjBVB+3H8RzoHr+2syJ1Tl8hOCO4KaIQIHAXaS2fcsRa3YmSk108aZJh8OxFOlPV4jgqe1XZzevf7k5PW3bpgw1F6lQiYaz1Umui9e3L/q2IUNXZyZzcALJ5evPfxlCu16cSdm2yd97+t7N9fXkMYbUN4js7IgYuVEpozqGlKYyVq0OM2WAYkhmaC5NaIXJXOZOnWl1qSqDWeDE6DNWIACDVkUOVhGJzD21bakI4I5eJJupQQWK4Cim7i5SfQ7aBk60crFh2vfdBsCri/O8ieQUe0BVNSDOIuYjpyhZ0fCwP8aWmtiG0BizihzHwVWZLQVWJwByl2nahYDFJcUmQuIYGPl4PLYU1ZyRtRQEp+CWJbRBRVpKx/HYJWAkreoORGGqB0BlQnSUavMKs+joXphCCqHKxCFFCjWPKoJk43gIsIwhEERgr6Ymj4rfcdqH0Kg6Eamr1QHBI8UKzBhq3TkoIQTEUisRVq1IZKohhJpziC2hl1IwhMBBXQOzmAAYY0RmISQIuVQmQgctpe8W6ICOCHH/MIZIJydhvD8uQi9SNZcpQnToAgNg18cusFU7OVntD0d6q4Y1dSLsF32zCIq2HYuYUgpgCKgUiJDVwMwCWkceEnuMhACqTaDIUWtlhLZvJ8AqigBQCql7VUZUFXRIKW1OLkS9Q0KEgAzg2esnv/3RAzysTxZPzzYNALVNc/nU3wxt6sflqCUv3j27f5jeWT8bj6UMNREnTuhonqvngKLHnA+jAQkQUMxqEAOuUts3ecxNWCzePx+uHhK0ME1NdKtOiV1hPo/6vC03I4ymEjgpQIhRVAS8aRNhlALi6K7UMnCYj0hVvaogkTIA46iHKR9JTSFURo9uJMd8fHV12OYvs5A75ppHE5FKGsyxC+RYBhkQQgrkjsEhEjEDOgEiBUZDQ+cYDQEJtRqII1iHsXjVaqrqwYMaMwOje3XwtmUBcKJagZGPeQqRGjVmLObjVNCAAxiakoeGAUuIaG6vtzsQ62Domk4cc/XBS2RW9LiIMaHLMI2VsoOri8DM/uNUrcAMwmWmNhZ2q5iHGqyQutR8rN4itkBDnhaLBVhHGG3RhnYxN+VM1E2ZYgyMyBVpEEfgoChVtLpI4YZDZB2ya9WqaKpu1CQ3qIJMTeWgSFaEqiEQqpkbQ2Q3gFnEVmJYqRYgaLoW326SETAFBtP5CFINKRA4xMDoGDlgCHOzwEzHceiIrVZyN7MQ2Q2RGdABkAMDB3WfYT2OThTAHm3e8zLLcQ4WOTIj0XwfQCAzw5lvAG6mc5WViGYBeorcNymWShAQCiMYKJAjCjK4oqHMm3xzQTNzMYAKOvgQYmSXo+bkJ9WgxbZzCHUc5FhdMGL14k7MLFbdLSCzYQVIoQF3cupTr4jJpnVKjQJiNOIsQ8tUpiOoilQCRHKRx+hn9RqMWS0YokOKrCrgSOCqBQ0QZ0AImZmZIaGbEZCZEQKATiUXw1zFiSqAg86opwROCOQqMssIWMzA0VXBDCkAgYpXwAawjejmzG+7eADoyEhzz7eaOZKKA6MzZ/NhHEQiKhmgIcbAWUWJDJxdFiRJB7baIoBIRGpQLIA5ARiggisqJmhawjW2yTkgmBZAQ2IAdANyV2QBdNe5rzwXRhVcEQ3B0QvpNsHBdSWE1QGsavVAcydxdqESRaA0g70deD4lYwyKooQi03C4ccslT3P9wh2Q2BxU9ddLdbe34bqZ4EvkbjprrR7pWrPlYz7Wg5owBaJZb2pmRhzAwdGJUNWIeO7ZwfxmPsMfTQHgLZ36sW9OHNR07r/7W0CXmsAjZ9YdDJwe+e7AAqpWHMh8psWbC9zebD/76mpUYm5AqWsiIhhgKZkd3axqbfsuEMl4tOmIeeqNLi/WP//im/tS39ksvnt5ftjdENpiESVPYvG4K7+xWj1ZdsdS7sQHhYP7RUopIRergEcRN+0xMEEbwjAVdBIHJx5KNcRRHSgoAHIotYQQx2FoSRPr+Tvv/fQh/fgrelmak9PTi3W/acNUJlAqAn3XxUWLIg83I4Ov1ktAwxRdwHIWteX6tLquRW/eXPkyPXv6JOt0dziWdj09XHdk5lqAKXAF5xDRITA3waWUSIBEIYY5u1WrxZhisCead4Cvi42TXw1ey+Fbl8tlwrLPV75tQwjMfZPuXl//9C//+vs/+FQU3DU0bI6IKCrMj0kxJLI52mMKbsj8KBdDdHcxwbcgc0d0c/dCxHMsKIRkBmY6q8UfC4z22MwFAEQgRlNzf6tEQ0KkOSNkbjA3gADBIQRWyUhv51luZoKP88j5DIhIjMToZmr/0qgopRaAQ6DZasPM7gDgWcWJmBJpDsEVTJTTYoUcyDH2ZOZgymoOrjXXMoEJsFcD0BrARF18NABEBpW8vzctSAHzQXxynVpuhoc3nDpAZzq3Y5IlP3n/k7tf/fjZ+UXN8SCpTI3tfRj26VlZL09EFIBczMyIsLr1zz+Mp+vFyfrFizfL04vDdnz9F3/9ybcvXrx86JoPpGjTdaraBQbkZrU6e/7RwE0Em+63qYGHm7vv/mC9fPe9h0lAbd0tEyQo9uT5k3xbX/3q1R+k7m7McRlCCinV/+d/+H/8h//+u1N81n/YGVnO4+3dw+qTH8Tuv7Lj4aPvfed4PPSxCdU//+lX3/29367Ay6fPrt98Mbx5GGy3PsV1v86Md/e75aI1s37R3Ly+XbVdzjOtkNq2OUoNTapTtnadYmyaJuskihdPnmoVVzMpBG3gBAAxNmaODimGIZe5djavMB7nRL+2WjzyraBUdUHkDmPnqePQxOKlTIm5aaKYmjsGMnXmFMxHcI6RpAMMKjWFSOAuFYIiEcUkWWNIpQ4cW3O0MnFqYhOWq+U0TiCkuVavfbdYrc6kbF+/ue0Cl51sy/hkswLHyydP9ruhHofN6nS/H9s23b++OX16ev3V1dlzJMW8OzDjcrVQDJuTi91ua36/6U8Owz4wP+z3Ty6eh8aGw0PXdW2/2m3HJnSH2+OzZ+/fXN1wiOYaYuOqhOCupv5rllO/Xn36vd+mXD//4rXTMkRc9t3d/RYp1Uq77TAM+5O+X29Obg4PpxfPFK1OI6BIPXjOBA3HFNsWwT0jiveOTYrdsrm+uX243Rcp1G/uDgMinZwthpxv7q8/fPdyd7vbPNkMg8RVN+GIjEaxJtws+u12EphuD9vnl6t9ub08eVrbpuS9WBYtsesIRckIIhkagLsgIkF0dAOvbo5Y4bHcjwgK6ujODjMDgJjAQNTBGY0ITM2rRowVXN0dkZn7SJDgZj8FX7FRBY2BU01NSEbhZhhub7eW2m0pfU9vtsff+jt/dPPqociUVgSAbUpWBQSXzcZGcaVls3I37ABU2wUZT1Vziquvru4846ptfuO8/c4P//Cf/fGf//Qn/5f/3f/63/2Hv3P8//hP/t0//PRHf/GfnW8Wh6mfpvH975x+84ufv//95fbh+HI79qGy21QktAkYu0X39e6r0fPp5jyPIwelEDlGIg4MKTlv5fnFuw/7N+9cvifZaBmeXlz+6qtvztPKRMjxvXcudofKJJzi8nQzHejzr95cXqzvtrcECXJedcu6rXGxlFrX7Zo8XV293Jye3t5cr5ueNaUmdH3io3Wr6AplrEDh8uxjk/2qW+VSxmmYpoFDHPdjPkyhYURLSFZy4FSL51EK1mkYmximacv6HpueBnxd5f5+7NMFhCYmG8vkTqCQQgdODXHWkoeaVk1LscG0ada7cocKXeyPx2Pfr1fL0zf3r4CYOSKAyvQ2VfR2p8sIDlaNiedXc3x8oXR3cYcQyB3ElDm4m82x2Hl7azPBMWh1n50e84pYlYBAfTwMqWkoRDVB5s3Fqco7+/1u1S9SE6WbR0JScOjPz9N0jwEipj5AqbXm2nZpwEKT3L76sv3wY+BulOPZk2dg9OrlVycnF23bjFKkFiRkDgKERm3TuTsyYiB3YAf3CsBq6jKajgazFZ4QzNEBnBBVMmI0NSYEx8CEEBUrMwzjMVHSKsQJgRA4cCeTIgKi15xJE1h0ya6GnIA6cagKNUcGyGUEBDdoKDrWQAiei+Sm7ZgiemhTF2ISJgSQWh0CeHWDyEkhotNYJoJgmAg8sQJRalD1iKYuOjkEJBDK+bDoegns4MyMlMzYXLIWjEGgEkfiOFRtU+/TBEBmwkgBA2NQyKqFKIhXMCWcSykECEjBTdt2ocCkDiTzSr+qMUfGuUHgkQGBwEuIqdRJqhrzpj3b5SMymYzVnWOY2eHzG5WKhtS669ysRzNAcHMFNQemUGoR89MnT5i8STgOecVhJWJMqaqO2jfh8vJkV4cf/MEP6lR/8s9+ltrQgJVc9sOeU5O37hAuzs8Pu13Xxf1eTR8TpotFArU2ESFjbBnJwEutbd/2i7AEt4LbbGaoVlMsrTo4MvEEpczY9hgBG6ZlE4ACRSfZTzaCWEmNcEyABKarvl/0Cx3r2Xpdal2sW1yE9gmlXdBRVtz2GksVNFwwtFoXfdxfS9tQaq05WfNpaJ9DYs/Hyauaq5VSoOqxwokyJwyB+2a57DQERoyJtIqi2XSAQPLwGq2ia4gBQ6q1OGquEjEiB1dV4JSWCOhaVZFC522aHF0EAVQ9AJkQdIuGFbQGh3XbHWtpu76QNi3mOorTXdWXZXxz9TCKixsZgCEFJAKIHEMgwnwYmi6aZjWLHIgIzOitg9jRiYBCMEd14xiIA4AGhODIwMvUM1TFtJ8KAoFBApkxxg/DoWnasWSMoZJJriGlvknuWsVUDOiRFqwEyc14Ti25ESFTRRi49omQjR3JddV2BylEFFPDhF3bRMT7Lx9Klk6TAYugAapi9po6UsuMHSlirQvG6qPkYmoAToABMCG6aUACgCpCwAToik7RigpYahmDMWIMBOpW3AZlo+TOUiFUL66CiEmkMFbQKRIhNARWLSogq5NZQA3sCAkNwzgyxxGhWaxryW1EJwTilh9HRRw4ciAI4KrmxATgzlgREyEwhkCmYkIVxnFfidlKmWc/ps7z5BWZQ4ixc0QCV5jnNTPMGh7Dzr9OsxAjYYgBiQEAKID7zIUlDmZGjuiOMLulHNA4xFW3wjIwRsBoUGeOMM5vOswIAGiGHsAdqgYYnPaghWnB6FUR1I67jptFMuKsItxEUYwNTeVQAZDIOIiWBpghZlPhGFDBRau11FyGPnomZ24Xk4knMiiKIxNxE1FBS0ECDDwOMHpchhRoapmZ3JCQWFQCB3cBREDUx0XMXPzWgGjmjwEY82KqGJC5mBepCG6Oqmbz7d1BFASc3BGdkVUVzBnMHVQNMBJhUEMRjAFpntDNLiRzM0QMzCLGRCKmYAIwVaOAtdZIAR0gZGMs7ABaokx4cBk9sIhFMo7AAYA4OBIGAiXThqJaXfOy0RiJEZXYwNXmKLMCOIExIZML2aPTHkDnn3pwJ6hINDG9hmntTSdKTJFTVXVyA2AK4ALgAELgBmyzcAfBwB2RKco0Hq26K4AjEVMAIpoTeMRqOjvv/dcr5Dl/IY9Q6jmqhI6zt+fx5RqRMCKAgYcQVCtzAHdzBX/0n80HLyIyM1OFGRkLjgiEc7DI3M1dAAwJZw63m7+VYM1Dh1lsxwBgJgDzXJdnhRCSUBU32e52n3/zeqgmFKcqytQQ1lrNFSAg8zBOTKCqVqpLefH61arru6b55YuvB6mbrjnpw3HcipceKBjyovnq5vbbKTaHB+jwapqWfZ/zuEIMjkckA1KH6ojAfUq5TiE1jNQEhpAeDuNZ39YyidvRNaaoYAA048khNO27z//LV35TFtSvn3Vps2yahoacEQlTStSY03TM5tpyAicwOA6H9ebk/uHQdW3qYrc+3R/ukcBUAuFYx+Vm/dXrq8+OepZ6QTmWw8nibDzmhigjBAcCZwQIYcXNKPk4TuqWQluyCpgoEPDzBs4b/kzqg+Gd0XQ7fOu0+bDptJTb+23RcnayXoX44rMvT1fL5x++pzN03MAJCImQHrkN7mCuMwVsnsrM1hckd0UAoqBmDqIqhAGc3HB+2wYyBGN6vHhcFWZLITpTcDcHmi8zRJqJbL/OCpgKM9sjT50QXWplDv6WYf3rixwcbP7Km9G8GwakEP6lURGFRBhmxk0ITEylljAn6sxQvYxi6sWECcwDQZjXz4ikXjgGc3dqmmYZiKVmUSV0Dqqllmk0nQhNSkUKJVdsKATPtwfNx27zbPX07NXfqFXO231VIVTAqCUzxtGMlovb/WEcd+++e3m8fhM3y+aidycFMwNDyFMuw3T2yffl4c5117Un/Spt34dDvls+2Vx8512x7LpIyyBlWp0sD2PmZhUQjrvt+Or1k3dDWDWHw+409jIOdZL05AyQVAbqzNsATA8Px3B+Jk7jIF/86Y+3L+v/6z/6j9L5x3/wv/pfxAUHtCfvvTsaPf3h79LLL5pjfPEnX61O02HYHl7Sv/J3Lz794accbz799NmffPaj1WrZOd5fXclpx30YyUx91bQo0HYNF1GxyGEas8n0sJvEDJjN4bA7hhTLYTxZrF+//Oa9D5YcEyLXIl3XO5KKcQil5Mdrcz7zwf94x5vDvf44LXIOHGJqU9O0i26xkeqiAESqWmtBcKmVIBSrxIyMQASM2TRQ23dLGUZQQcI8TbM4AUMUyeYOpkbsHIG55hJTNDUiqlkQHUx/9OO/vHyydMsnF2d3t7vzk5NhGtu2xUCpbUzz6mR98/plt1lDPFY1RVbAvmsx0KLrEQhDurq6Xa9TF2naH1RKu1ifXZ6LiRzLYTvEJh2mY9NEAV1sFg/D/jEFPdMTEarK3NH7dcRud5//5q9+mkIYx8kijbQcJ8i0sCxnJ2fHYTo9eXa/ewA4EmpiL44DwPXNfrlYOHNW0rEsMARCxCbF5vVdVTW4EZfkulLn1DxBu8o575xOz57e3dzk47Rcbsa9htBGpoe7PYNKXWw25+N2y0DH4wiuU7YUFr0k2BdCRQQO7TRQK2bq7OBmDBQDS63opnNpyBx4/gciEZqaq5pICKEhLOIE5mqJI8zbEkMwwuBMZKYmSAaMiIbq3qRmPwzoYFoDJkcrhmI+VRvH6eTJYnQ/XT/9u3/73/7Fn/5348P27PQkpLR/2I6HUkuJ1N/cHzb9k+3dA5JfPN/srm+nUs/OTw4Phzrk21f8ybOPT7vTb7785SdnZy++/LEZf/aC/vf/4T/++Nmbf+d/cgar249/7+zNnwZ7KP2Gf/iH37p/KTdvXv/o/3vD8ImKHceJQoxp0S03D3ff5N2uaZvDdtcHWD3rTIkdui7VArXIZtldvfrGgn/nhx9+89VD1ny0kvpUDrWlMJUsYjIdp8OhWy0ZtWrt2tC30cbpw08+uXp9/Z3v/uYXf/PXn3z84f/wox8DtaOThTSoN+1iuxukegjhYXtYdouQ0nF/NKDDVBxxEOEU2xjzuGPEgFgRQog1a06mVkOKgXkWbXZdC6bmuliuh6FClXHcLc/WNfuqW93ut25OqdUiJkWdmpSGaWTkGML2Zrd+3q8X66s3Xy27ZWwWk4BA69Dd7rbZ69niMh/zYTiGIG/XBgkIzMzEaNbm4dsas+us7zSHmVAM4MRkrnONEQHUhZjnZUeIZIRIMVdFAI7kDjGGorVobZtG3c3cCY1weX5ejba73aqN/bIzcFWZhiNSePrkyd3VyxqAEkegOoi7pkhgejzcff3CP/zupzJOxeDkybm57HZbNQUMQCAuDBia5ORmwsguFcwQvEhNIcLcVDKbyX8zI1MtM4ZHQbPVwJEhIqJaBTO1AlCcnEMQ1RCDuauaOxCymzpoaBDJgRyI5jCEaHFxw8QNGxuax6Zx9ylP1aqCVPeAhBSROQT2QFJ9e7hdd4xuwOCoIuoYDVHBtY4pRYJQalm1/VCO6OKggVtDM9EYG4hoVpoUAcXcMcXsQiFMJS/6bhoOnCKiWK5iSgxFCrrHEKdakJkAax2IPTI40qTWxiSiM1M3V40hTMeJISKmPO0YopimEAAohSbnrGDLvneVCtVAEWXOWYjKUG7Mi4m52UxfdQNAAiJTd/eIpHObnhJQUHNiVhGOAcG1VIC6XnfDVJqmPw66etpfbNqXdw/vffAcstx9fR+i/+D7H/RsD7UWrSoWZwEV43q93Hkdi9zutgnpMIwhMr3dGzBhpJRz7ZomEGJkIlosuq4JXcITgNFHMXLg7M4AUVRzNTADq2DmOcQuMRUphsSxjU7bu336pB8O28VqtdvvYwxExCkWNzYHC6geAJTSUaDrlvl4bCDl+6MZAIpRnaaDMw6ldjH2Z6s3t/snH70PJhS9WUXmqI5VFcG9YER2B0FaLFtFreaoUHNRzOiqti8i5NZSmAPAJv72MU8EaAroESnWOVQC7qZIjUNnRN5ocCPgQNHdDqpaSsdUHYrxvotv8vHV8TCwPIyHbS2jWg2UDAlCRIruMVBKZKK5uLicnFy82W2LGs6WXmR0BAepRkg602wCIaCKzDyPWguAU8QmBjQw9CFnQ1PDk9MVoR8PwzEXjXhxetYmLJomLKUaN/3DMTeBi7qHkKXMU2ADBnJuCNCJkJC5CdCAYcXoacldm/aHyaUWgLbrU+Sua5BjqcVUuVVuBtlPADOu15smeUBoGyMCs+AaQQJjBXWGoho4MBM6SBWWOg1DTD3MAF5w11JBSCFwMnMwR24QHskVRKhVApLkCh4YgquL6aTSekLwqiVwABdFMUBQSkyulY3RBR1MzTlS22WhJi4pAJCbmr1dKoYQiYjcCMhBAHH2HKi7gkecpU5QXXXywsgpqImpkgMBEpIDECNRmNtkOC8ticzZzJHDLAjAWRYQGAEDE4cwN3rc1UzMZtWPIzOqzQIpRFZXJkLTGKNTgGruSBQKFiQiT4+KKicCnM0MBmIEE9BAZmq9IjptQpdrJlMt2WsuZhCSRhZUTCEQuldwblP0aszODEedknnPIVggIABWD4BhcpyC1w7RNCjDpH3ALrDKAKBmJlCgSW5E6K6C3opYSsQhADg6mLu5uivMyi1RNHOnXK2CBw5ixZlzraKogD5/qEDiJgBehZAcHYkFTNXIBHHO3DgDKriAZUA2awhFhRwACefsEzgzv+VnAxCBOwaKKaqBVWVq3BF4Tqo27OYoxVV5ItZBY8LObWAEZkAwciR0Rg8g6LQMp2ttOgUoGU0Y56+ygztxcER1Q3R3dZh5L2TIBj5zfQiAADTgrqE70fcU2MQUCNTRCQns0UEGPEvL3QiAFAMqipAZSi17psVsAY8hzLL6ufZliEDEGEXkXzo42Xzwfjxbiz2+8LhZYDZVd4OZYE1kxvOTDAnB54D2nEgSAJhboQ6Oj7F/BEQ1MVMkQkRwgkeS0a/fvOahEpqbzuEj8Lci9ceJGhCaqdZKDtvt7lfffHM/FUNmCoDWdGnVpuubY1XhGAJAFm8SM+Lddnd3c92G9unFs69efHN/nE4WzUmDPVYUa5CXMQagNw+7S7QPG3fA+6qnqXnax74c22UcptqFpiCyObtzE/diRjxOombmWCWHSFW1qKTQEWIpetKGkHAYMjab+3TxszfpujvvN100xVq7yMDITTTAqqRmbYilTMykZpcXFw+7hy4trGK/XLU9p5Qi2XF7CAFOTldtaq/fXGcJ55cfHiQCHNa4X7YLLNBiZEwRFBxaDPfu1WmB4ZD3SA0oOGlAJCZOyWopx8NJ1N/cpJ/tpp3Fh8o/eciI+PF5KsdhZBybtOiagOHLX36xWC37zRoe1cBO4A4GiGZGSEg4T4ARyMApRHPl2eIBPhN+EclNMQQgNYdZTKamiIoYxJSIAAkev9FgAGbmCK42867gsUtk5uJms+EKEVUrUzRXRHQgAFczJjI3N2NmQATU+QpHRnI0kbeGg7ejoqbrTB8DcUyMCC2S5bHxam5iE2GhSDlnpg5sjNgjkbkQBmIiRtEKiECROSDFyCEgquaUPLUCVtxE5+XY8vkox9ilZrV8c3Vt9hBpnMpwsfigXz5NcX1yejrkr4PAcBikUsTD8nl62fh7n3zy5z/9+bPvfs/c8S1plYkIScxo8bQ7++7zcmrjXS67g9G/+m/8O9/86OcMVQ5b80VYLDW7Cz1559206vOQm0U3NlzdrOjX/+Kv3v/e7643m2NsKmEMXoa7sEE7Xz6Uw+2XP3ly8q95bOJq9eWPfxQFUhuKZ9dhsTz5s//3P718+j7W8OLzh3fMDGuWh3eWn7Rjfvrtj29fv9xfXX/y/L0v/uqLtvZ/+Ef/85/+xZ8U90TtZ7cvlOnDjz9EGRhsKkLgx3Hoz/vFpvvqy+uxatOsl8uN5ilxCqm5f7g/fXqJTHPBABlLKYGpaj0OY2oXsQliBmb2lk0Ev+4hgc83mfmwJ46uWg671bI1R1CUnGO/JqygNQQ6bHfu7uolT33XtCm0TZgYvUzNyQbESUqIpBgJgNB1Drmpk7nVibgnafb3Y0xN7Jt6KLVmTPFh+/DwsIsdnnRLA1fLQeNwvz3/+BPELaEdjgdxs3rv9Wy5Wg+HwzsffCBlEo8mVcGvrq8//d3f+urrw2/98Ddvr3bX2+uTzfLN1fVHn35vOmYRSE1TBlink5wrSJmmHUcGAHqsWBmYk6P7LC94fDVquEHB1Wrx7Oz07Ozi/lBPFt2XX/7y5v52+3BXrMRkYxmmfEhdg1bjatkvwrPLp6b65vqqXYSwZgYaD4MztU20qg4pUXBW4BVRqFlJ+8idZbh6NZYjgMtx9wBdvHhyfv3qoWmjV3QJMHiUlon6ZTuKjBVaCru9JI4A7Aae2sMwtNCFAMG55srIRCk8NjE0AQFBdWQAB6yikYiJM0ETmlqKmgE05oruIfDs4HwM3iICSN8s3Q+EBOgyWQqLqYyBI7NOIrlOTeg5ctOxHfzlV9fedT/947/4/E/+uR6ms8tLNLy/uWX0gBQ5rU5XOztmjYvUHyHHzUmcSj2Mz568+zJfWYJFunj//OIvf/JXAod/8uO/WLTT//R/+W/+9Bdqd19vRU7Gwxefv/l0Oh1f3f/Ge6fvfdhO93+jePj+7/3ef/yf/JPz9XkpBw/JoRmOhzHfihhhEKXN6fJysYpRrvc7RhKPRcr2dhtW4d13z3b78eH2ftzeu8Ld7XUW++iTTx+uXjUUj7vDYbu/fPL0XsfLi+XLb14s1ovtcZr2IydJzfKzn/+oSWq8X58v9zvFAOvTPqb09a/ufPKi/WL1Ueruh8MRj1NM6TiMTIhVlm17Nd4tmrZLDYVw93DfdCG2EUro+nZ73GEMFZzAp2l852xVas7Dvonx7OmT2ze3xyvIcsz5UMsREK2Cu5g4cXB3BWgWy1LEU1wHf/Z84yWsoEdaFUpejufnT2/f3LTLptHL0MTWqmrqFovH7YJbyRMHIn7bVAU3dSJCoHluBCb2KAZxAHMzneHXRMwBHHBOwgO4VaQmxWAiMpVaZdF1QJCaFAgDo2EsUzlm6Zrm/J13c3bzig4UTA36pq9TTql/8vTjr7757Pz0QkiwkoIUyTHRw/Xdw/Xr2HXPn51PKgng9OwCOdxcv1ivL0LoBqkALqpzfX9+TrsYGQBYTFHEmdBBmRkcQDFQqHNUCtTNm9CJuVlmiu6G4KYV0UUruLjP9mZQraAFMShIiMFRBbJaAcsQkMHMpG9XD4fJXZwacHTCQNHyMJMHQ2hAi1sRybGJ+ymHbtW1CRiPw15r7duV1CFSYIiOZmgACkLmXkUc3IhQ3RwmzS23iBHR1aWJMecxpf54zCmSKzBipOXokytkV+boosSJQ6hVEGOkxBTm0VgIMfsw7zsFYhXomqaaIJBKKTKl2GFqUAPO+TI0dc9lAnAxKAoEioDuqK4Oj7ifQSQQoro6xsClqgOYeyR0B+akrm5qYDiXX5CZIjOYGaBlLeK1b7pVtyC0JZ9uH/aq8ezsiVbo2w7CTo1uvr4v1+NRYbHoE0dRGQ7bvllA5ePhHoiahGYqYt2ilfI4MD1/cjlOmRwduQIgcrtcrtrYBwwmySx0HqPpWI/BVT0EDUiHYQCKwa1hQnIgCCkVFwd0gplqBAzqNh0PoJcVtRZddml397CApWMYdlp3GqXB1h6O14v1WTnssQXuOCyMAqjpybOzadgtl82b22nZ9hkmIA8I5Ajm3DARaQVCVHVyMHSd1+9NDMQRQj1MtWBatLDacL+KFs2KESDU4ETEKoJo5u4UkcFBYzAiKJgNGvMUuU+MClmsmlpqF8DrrH7Q+sX29kvbXts4TDPUziEEJGuJyIRd+fFN1qRWRgoBje3TT354f3/tFdS1KsTZaA5IzAGBOQQiMXPC2MWAoG4dBwZddcHdEcPtcarspsCRRbOjcEOB08GUAo1Vn6+ftHI4e3bx+ublYdIiKgbFPaTYpCYfhhiRU0BUIMWEHJBj4JQilQXTIz6kbWRCpeDIZEG9cSdsexsdKZsXYgRiRzQm7jtNDF0LrlZHLhOLSYYQWdzbtqnVRnUXWM2pcHSKkQKnQI+yZVdzo1oAQ1HEAIJeLIcGPIvTFNq2weBEXnFuZQZrdTRVMIwCDaAU5NA05BCdUbyU4pZjiKqOVD0k6BoPkWMgFEXl8FbFrWYGqU0xclA1Uak1AmRVdyheghECQqBAJI48jcdhSKW0phgIfF5lOwHN1F5ymyE4c4ZoPsMAIhJRiBQCITETIc3K5rlx88ge4zjn4d0qwMzdmOMyAsSONHtVqk1AbqDz+M8RycndQcxDqITZZXRFDD0sg7VuhRmYgbGCiXvpm36qGtwtBbMSWZUqOKUUgJEAyA3nshaIo6B5E8LkVBUEZcCCPXNY3yF1fQwQEkxz/QGLhRK0lCLaAjZEiJhicgIxY4BArDY3jNDdQRQc1QyRgLCKjVpAq0GoAtnMkLPOxyEtCICoc0NJgWb6sQMRqDsjVIOAQMA602oULHAiZzOcZVvk5j7PJuZZhjk4gCE4o6igcIpt4ohzbUvB0MFxX4cNYILMzl6jU/BixIBgqMhEjMCgDazWftaLBS1sOQCCCbkCBCACRQOnQAoKNNPpwG3+ILwCKmAAjKAIZUh+Xekk88oQAAIZmBEgMc6dPUOsLoomwSrZ/4+q//rVrFuz+7AnzLDWm3au9MXzndinI7vZVLdMUhJhwzQgUVe2Lwz4T/KFAdu3hg0IFhxgWwE0JJlBss2m2E2eDif3+WLFHd+01ppzPsEXa9chWVdVhV2FHeZac87xjPEbFkzRkUBoyjwbQBGRHNmB4HGC/kjSBnd6ZOXNc3VjRnd0m4nAjv54vcdfxzUI7PEaP+f9ABHd9FEFhV+bxMLMpiEKjx7Zx7ybE7PNCt1MuXYk5LkXHRDUBJ0Baa7LAsD53xKSoTkiMs5NRLXWL1+/ud2P1TikIA0IwuXq+fXD9dxYEoKP45BCjEQPD9e391uAEEL66Tff3O/2Z8vl04s1lAFdg1FC4pxe747j8fjt06xlKEAHjNpa2WmCeAkRwCqEXW1dR1KmPgZsLYWUGG1UYsTm7MBz01+gsbQu5Kk0F9HUT4urfzktsDv5YLOudRKt5xenTR0EgGYmOajoMB3On1yUOg431w85hcykNJVCIXDOYjbuDykmkZZCkKY5drXI1cXF9nZryw2xtt1uk8NQC7qTo5iOtTa1Gkj7MBY/zQtrrSEAWAZmQEvg0IWmTxNsNvG64U+metD4k9tJvP32h+cicjwcE/t6uYaRfvLnP/69P/6jGKNpU/XIpGpziR0yILHpXI+F7opO6ChN6NHJTwRONKdDzNwRyAHNK+KskYLPshD8m4Ygf/8+nU/cYFoJiYhmEBYCzSE1RnRvPPuS5qo8JHBAQCR2n2EtpqpI4KYATDRjtv4NqcjU3NDMARFCLK0ENJOi487cUoziXlVztzI0kZEZzF3UAgUwQozIAUBd3FhijDFQChkwq1trAlCZyJpKae6w3d/3z5fH+2ODYbyF+28OqjRylNj1F6eSpuE4OvdlUg6RynH78CZv9Odf/8lxd9y9/Xq1/sQDmbuLcWZy81bToq+xv/xbf3D/1Rfs5fe+/0fnH3ysKF//9UvZPsT105BiOQ5g0kSRuY3T8vz0/IOn62X96Ae/LWPy9SkuehubM6T1MoxtkuP56cnz7/4O4IgkMtX9w/6rn/5k1cW/9x/9/f/nP/2pkhWbdg/3Jx/+UNLm008+/av/5J88P9+cnZ8/bI+Hw7BpZBT75dndDunik+99+gdHH4913JWdPRwSYjEqxYtaNT9OLRKuuy4RHacR0VY9ToVWm83xrnAMUxm1SYgIAfb1oAB9SqI6jaXbnDizmLk7Mz3C1eBRf0bwfw24fqSKoEtbdQytHbbb1G77xUXX9wLQWnMCRBjLkFer4jJWPbs8eXh9TKvcc3KCqR2BQ0RWqSGGsRwdHRBMmwOIO3bZDClSqQMQ1X0pw+QqFLj40aO8u92dfHIq6ovuhCCiwXF/VDnK1GJKx1IuP3l2e3fddQtw3O32GzdFX1+uxzJOh8NiuWllv38YpUpglqZPn13sDvdeNVBIsTu2JhZaOTBstvtjWfdVNfp8RJ2RihDiHLR8fBgiu7Xxy19epxwIf9GkOjiaU0hdCCcn68Vy83vf+1u//PwnYxnH7baZvHnzJkaTIsS5ijf1FNgM4iLU/dSnbmrQggYQ1YLqM9zLmZddBNAQ2Ryc+GDwweWZvryNEcVgO1TRmgA4hWQL1klqud1Pp4v+/Pz0etvQKjFTSIzskax6jMHEq1pA0NZ4Ll2c20qYxCxGUtWifi3aAC4Xfa0jE8Jc6AiuBmgu1tiJmACx2pQDj/WYcKFgpTk4LHNUGx3sMLV+TWJ6OBYHHochutLUAsb12YYQQVpmNPNFis8un7zZXj9ZpR988oN/+o/+4ebyan9zE9guLvqXX3yJlE6fnN+9uvmzn3x+/zD2Z4yb9flHT//Fz1/+1V8cn4O8u3v48Ad8Gp9/89PNk+7syacPn/0Qdvf6+l199xMM7XtlrBKaKSSgOl0byzKfaojEvOzjw3Fv3lSmbW3raDK1q2fnBxmHOo3b/XgEEFv0ebla3W0Ph3I/6UgGy9zHPnmkoLB7u8eGo+zPL06Pu/3d7iFCSMFOv3X1MBYPuFnG8f5mLHWxXvSZi7YnJx2224D1ZEGtah/TQPUwjJzT6ebkXraLfnncb3UsjGG12ry1LZJy8JRCY4q5m3Zjl/i4G901xyBlOo4Pk5Wuy/vxAQM6OUdGdakVHGNKpIZmGYgIQcvp+hRaRkqJ+8npUPY9wfhwC2aBKQa9314zUpfe4wreY61NHy3ws7F5Bu09HpTUAJFnBKY95tLmWIABogPNBQ5IIoZA5obMWiuYtypI7GSAbmag4GoxxBmVag7r03PbPbQy5MQQjDm3XRnu96en5588+fbD/j6mbrWGqQ61VkJcrVfb3e7+9SuA9uGnn7X9mFI4P++stu3hjkMfw/K43ztSSAmJQCnkDplVjTk01bnXDMFm8rtrqwoIpCpqjYBcESkisbm7wTzVIUJ2MxJ2qG2aK0uNAkB0K+gErblURFBpFMN8vmuqKS9QB5BSWnVUMSdAwuAQCBKgMEfCEGK2AyI7G091iIgUsLQRwSNhmUbzEjObgaN2uW9aCDkxq2GTulh0rYKZRgS1KoYArG4cgs+Dfg7Hdig4rHg9TLJKSwpQtZlWRK1N1JvCnAzI4oAcGSEZm2LOm5BBUNlRmvdp2aS1elx0oZUxUmQmdzcVes/m9Lm1Fbk9QkRb6JcqBsiACF7UFInUnGKamkaMBNSk9iGoIxFNzYF5Zoga6HyPJqTpOIlO62XfEYXViexr3+fD/XH57OKjj14EksB49+4BA0Et98NhtVmrkIk/7G+uLs6+fnO9Wp0wuuk0DdOvQ9q5W4hRRowxispitehyCqo9uDYFYuSOTFZ9rJhGq01GHQfqsxuTqKp4SE0asRJTrQUNAbjKUdHVIYQ4lbpYbjSyd5y6mBbR1fqTiH2YKuTYAS05L4PZsN0RIYKhAavnLte9Xp1dHbdVhxqXkQIggFYhI0JwMVIgBEYM6AEsxgDIbWpWldQZbTgcjYKnjokhkIMRsUlzQ4eATITO70fg5uAclRli75zZGd3LNM5vDSO+K/XGdl9vHx60FfIpKQRf9lxlBhebus53e0JAJBVVJJ1RygmZbXf4ZrnojruJInWLYFXBoZmtUiQkYkJ0MJGmQKwmfYyBAhqV5odSqh6PasSUclaB5L7I/c3dHXIk8Do6o7+5vhm0vtwfEdSIybBOYwOjTKVWZiRXQm869cuADIJKLqS0TCERB4wEjDNARKFKAa8+CeeFJ4QCaigqiOpokbjrEmau4KITgVoZ2dEFytgQCRlCYJ9JPxFyiMGJFz3HaI5orKiOTuCmrdbiGgQ8ZmWmDOiM1CeOPdhMQgNzjCHU6jNoBpEJnMiKtZgYWw3igOQmwQGQGAOH3AzVIcRERGbCYMF/fUcGZArMyOToy8XS1WqZmogCqqkhKDi4S9PAHAh5Gsfj9qRVbw2YjRAROQQkNDew2VMUkPF9mZPh/DIlDiECERLNDT6PveNIDDwXcYI7UwRQJ1Ct5uagpmKujh5ztunoiA5B4T0k2JwJm4m7Y4gFpIIrs6nOOyC6V7PJiwRMHELTTL0aJsAZ/0IgBg21cwRpxQEjk6mJa+SueQ1MzauYzVctBupjEEcT59RX971uCWulMcSkQIIWA+Ho5AAzfTrMC5xBTHQurmWzR06EqhIGAzczc6+1ERFQNFRDbehKUKWamrg7QJvDKLMO586OYdZdmETAKERknu+c4NUQzFPA+PjzxsdrghsgxsDeFB09csuBc05x4cBIQVUdEVMEVKN0aHnETuWms9CEWnjMXiUKqBbNyfU0nEa4yNaR7FGVUd0KurgIckQK5GAqiAxz/yo5EGiTgIEQ0ZUxkhvP06mg0wrfTaWoJsB5yyEwcTHw4iroEkyIRvYWvUDLHBKCyNA75LQkfmwQDyHOHF9EfmyONgFSV3n/HICqIjHgY/yeiWweEZlWLbPmNEub5gYU8T2Ea55DqwsAEvK82KVVmJN+roHYZqDMHEibIUcEAOgOZvp4X0MyeI8tAjQzBLRZUCAUNTBF8za0X339+vXdcGwIgVQEARfL9TAdd4djDBiYIgXgpjrWcXr58uXJ4oRifnN3BwwXXbcmlOMRpa4vNwt1UN+LDdP48TIvpfTRRbC6b3K+ry1xd79tK8I86L3imbGK96SEPmpdUOgCAUAISOKltQAgraFqICQ1cWuLzY8L8+kVmWdsh+FAMYbVshzGkNLD/cOqX3SLldkBozedWivEAQxTXgaist0uV8sU4jjU7d0DAC4XC7WScg4cb+93zcwDm4RRPTBMVpu3aqwM0tylMQU0vNntIpC0gREcnGOUsYJgMUUKy0WGNj3p6arzVYKfDfpugpe3tgnHH3x44lM5mnUpd6tuHKaf/+VPvv1b38sxAICZMDEQGpIjmpnDbC8iMHKfE8joMJcKsXnVOeyCQPPHG8BcRgQzjZpnoRDAzQQfD+xsgARoqgaOFGajGc0YbVACfKwzBsT3Lk4EnPvEiXn++5mmP3vlKHCtEhjfh5PeS0VqGkNSETczN0ZnVIPa9ADG1dXEa7NAbEzE2ZxarSoVaJhfLrlbIgWdqyY8oyqQGCAidIlVDNFDYgYF09Uq7o67tFwNtSSz8dWvymHXrbuYkh8OC291tbkL0veRU95XzP2Tpvz2yy/3Q7r5+t3pd7+jROqGBOAmtcgwQFyMw5A/uJyMcjq9+tZvSx1VfbU5sVG9aRXlxMHjan1KmLtFX47Dsot3r16vV89Ov/89Wm8YMJBSU4C4u999mjaHr988X31w/c3Pv/PvxuND65eg490H5ye/+vM/RzqpjW4+f6cHo7SiRT7evcX98fSDJ1Nrp8/Po8oPf+93QqDbifn8BePpp7/9vX/+n/zv6nZo0/ThZ599/cXnkNPJyQY1ukGOqbqenq263HUp3isOZXpy+WzcbhFwOlZPzjE58Gp9ogqJMyOHEM4vlvtjhTn+aqBiAEDvA884NxfNWwDAo7mUARRWm1PMi2FS90PgwASRwkPdd/0KUJlmwR1rUxeRNna8VJsuTy4GESZepP725i2RS21M7AreDNTJibkbyxQznZxcHvd7YArM/WIz7h6wyqrLwtGML64uf/qjv0ox11r61L25fZtjNvU6lA+fXd6+/WqxXt/vXgYidGll6nJfVbtu1SZBaMtNePHs+U/ubiDyfjd9/J3lsR3GfUXwzcVFGXYXz05effP5IqftURI+0meJ6b3NCsHh1w/Dq3fvTvqlkRZR4IDI5M4R3YDJM/j25Td/tXuzXPBvfO/y1deH4zR89GJNjqXoVAsBOYacupji+dXm8uT0zddfVciXL55fv3sLDXa7e+hDDsubu+PtuD/up7NFTF1Chs1Zf7qmG9RxLxTo8mxzfX8rjX20qQ2ljg7GuSujHe/e5kWKEOqgXchICUmq1Ejk6BCimAZmAOOZNePICAiWQ6qmgOTOYFBKAwA1b2p9jlUUzEwh5ZXbwMgRAUHBkJGIiHMYJ+lCGrVUl0iAiTBiSpzHRFM7u1jud8PlxYJCenJ1fhyPKYbcojaKVCethLww/slP/2XK/OnHV7vx+vrlEVeLGMMy23e+0/3jL37hlOLp4t/7n//e7u7lp8uTf/ZPf5bqGZysv9n+1Ytvff/zPy1/9s/frlb+d5+P4zCV8fj7f/w7//v/9SsvwbqyPw6ROS1SzD3FfhxNxfoM1qbdYecY+q4L69Tczy9PH27398d967OLS0NxODs/u9/d/sb3P/3y86+JeRrHiqnfLLXib338nW++/tX5+VktA4ZFWqyePn/x7tXNOvTjw1jdE6Tj4UAc14tOS3H1sdSTk2UdpryOVWx9dvHNy1duSuTm03Z/zJgZqFWQakD8cL3VKilzKy0igmkCiRHdXBuZkbT6yYfPb6b9k/PFg5bXd+NiEQHdrBFiCBEJzRo69HmpzRqEjz/67iqvt9eHi1UW8oZTQv3Ws2//8hefA/nZ2fqwu+5ismarzXKq9f1xEUQaABLPPFHiWddBQ+Q5w48z+BZ57vOEx2PR3LoghMghmBmCx5SKmItxIFOttTBCMwM1cK3TRBQDhbFVDixN0iKk7uT65Q68c1MFizFw8LHcr0/O4WBlnNbrBUZyg+PhcHa+MbTdw3b/9tUN8tXVi+Oh9X28evrCqd28e9cvPCDJfNQ2b61haDHF6XFujYgQiEUbgDpHU2VCBEdEJlbzgDPqvREGnIe1DuY6I7xrncjY5jO2CYAhNBcA0vA4HCQxFSnk5jZwdwamSg2JREnNmmoiIgzSSk5xJluPdaK0BMQmNRC5eQM1tQBYZAh51aZEkMxLsxZMrVWmVCYlQFcyzGRUpOacAjIBYwhlKqFbIlIpdbkIBi0wU4xUsqjMBWSlTIERyXNc7IdhwblJbTI56KSK4KNMm+XSrBERdrDd3UdettoGxGVKKs2JccaiqXd5KaLoFmOcxikGRkT2yMSkwbWR+zhODsBM5ubaIs8FRkYYGElUiVFMAkXiiAQKHmKoTQPHACimOiqzA/vh4Zhj3lwtL87P9ugP42EV7Lvf+nicJKa4vx+mqZyfPetiO0oFwpzWOd8Ox70ZBgKtIvoomI5TA8j1WIjbZsmnCZZBWI0Yi/FgPLUqqoG4Zyix3xIjdgCgAlpr4OyAEWluv/EQndGYpkTT0IB4GMaTKtY8QogaagMbNa+X417XmzOUVG5FH9Czt0nNo0wkg2gF5tin1RF9YU8SvguUuIuRzR0UyBQQyVQMjUNkQtEKZo6BgMhg7lZyk9BnIZ2MsoLUEpmVe3MDF3Gfa6oZ0RHcWZwJEmIkPglo6EoEFjogmiL8bPf28/FwdPMYadl7K30OHGm73YEhIrmaKRAaA6mpgaccEdGaUcDQkbW2ffXFoqkwVzRHixkBiCAigbk7qlsFhhxDjlkaZopV/TC1oUyOABE8UEEYmqAAEVHkbtG1WnMMyDQdR5qTTcTiFCKIN2R2wGK+iMQgi0VSVkisroliz5GFohkBTcpd2GyWpzje6DROWikvzVMzgMQioNJiHxcX6zFAs9gU1G1hHl3ZTFXrQRCyq4eUI3lIxCaSAztiE2d0Cr7qNLK7gzbV6t5MxVsFkzYh5eRlZGSOqcHMJeDEhKUFBAcBBzFvSOqqajkzsnaBgT2Ig4Kqzvi2HIOpILFjQiAT8zg31UuKSd+Xem+Wa5RqrnO5nhmknIGgqcwVPPN2EEIEBGYE1+m4r8d9WqyFKaT8vg0KHAyQ5g5NQkIMiKimjDQXpHKIyDQfXB2AkAFnp8Zj7wghEyLw3APGpg7MrgLuTJhCNDMRQTd6dMOiIjZUY6qm7lAEG6UB2xBycorakNwZxdxTKqKjwoIiuIGKSyPklFJBUwzGcXQBSk6BiCLwoRYKGGIu9RB5BRBddcGZzZxwdHHkoU2iGhZry7R/OERiSiEvqO1G5gAuIXITCYGZqFnDuR/dlGYysjsYqkMVcYTIobI2hdKqqDpBVSu1GGMzHc1Hgwexo2k1ZMYMsGI+ibhEjG5IYKCjWCLqGBW8PF4GCJvmFBDIQRH9serFDQHF7AiwDwG6BYalW3AOgDZzUgIYEEwYB7wSvdMKhEGRKxghZsyRELChY4JVgESuaEoAkZXBCIAoyJy2BESK3px9hty6v+euEFoAM3BANnc2IXZjPy6DRFaBzIF9yqDg7uwDegnmwRGjkHlg8uSEYohhCaFDjhTIAZiIiByBmRHJZrM0mImDt/cnInr8PuGsFc35ew8UGigjI6G7+qPl2Oeqr8e71wyEn41DM4oIkGg2Bz0CiucEoclciwaE9MgJcQAkAHefdQJXU3SFWaUCfCxZM2Sw2mok/vru+PJ2PzR0ChSSinQp5JPVdn8QAFToY2TX2g7Dbo+GH5x/9OrtfcMHRD/tuuTObqxOxlKt1EEYX2/1w8CfsIemKcZFYB5qZE+My0V6d1vW63S6XN7dPizTeijTIuSqZRQHJBNxZjcr6inEZrVz7gIlDOoMXfeFrd5ZvwIwl9vt+NFHH//sq6/SdssxRYa+7w2oSm3WmggxhbAMoakYOuwftgSG7mJ1GHeHw2GzWgHpYRzPuj5kil3IzCV0DAbtgWKnBgIY82IYDyHMDEglTEP1y74zFQALIaJjnWpkjB4eJj+4Qwz75hdkHxCfrMIvsL7T8LM3x6btb377ikOox2lKuFyd3ly/S7+ib3/72yFGn+f1szHHER0J2B1MhObKMqA5W6tSYaaqzT41f3TvIzG4zsosABLGmWll7oQzOs5d9ZEwgjP2nuZMETwuWwfCGY9kPjfGGiDhI5bkEXpFSMRBtc1YUjPlEObM7L8lFQGoSAMzcx+OQ8cAUOpxN+62m/WGKFUTmt/RGGO3ZgjuBWBf2zgO+xRS0WFmhOXcq6opB8bAAZERPcbYROrUYuLhuD3cvYQpGgYDysuT9eqs7LatQKl6v33YHDtoHYYFeDHsuv7i8x//PBd49eXLz/7wf/hwZGRWlZwSuM0XYwwJacFRrdZgsN/WvpGOenN7uHpy9eXPf/LxxZVvzgO14/395uJUUWOXy7G6Axrc3d08++3fqm3AsOj7wKjgIWxO+pNVlfrFN1/9nf/gD998/cY3zxvC2dMnJ/lis37it8PTy9NXP/755fPz293wgsP2eP3ku8+FQzX75v760z/4/tlHL8px2v7k5XJ3eP7Zt7Zl+/Krv14tLx1pqkIJB2207LW22g6rxebd3f6zH37/5mY3jccQ08XJJoWQY9SATYyCxxi294fNyfn9/jhNdnG1Prza930fCbVpiL/27T5C1N4jrR8tkI8BRwBwIMa77aGdLWuRFGSSWsVEa4jhOI7jWDDEaawhc6li6LGPHjGkGBe53Ew5+1AnAXcH4GTz9II49EHRVRql4CRp3U0yduvl7vb+MIy1lDIC9zDV4zDs37xtRNq0nF9dHscDhoQxq7tRUlzXVlQaxpD6pTidnS3HY9nd7xeLeHt9K+i+iC+v3zjS2fn53cO9OQDG5Wp1/fbu6sPV7e39i4/Pvrq9/eCjj37xFz8mjzqTEGfr3WyBRuf4KBWdnPQcO6j1eBgD02rRQTNyeniYVmfMjKuzBQTdT+Vnv/py2k+AsDnfnK8313f7JeB6vbx4+uz6zfVXv3y7L3dffNFyMCCvr1/mPv/yq6/6tOn7k8M0LhaUTvR/8b/8D//T/8N/Thq91pNNPnz1lhEWm07FI+fVyXJ7fexCMGkmLm6ecKiK4tEQDE77Zej7cn+IzTgEMHV0B0FGBzS1ZoaEhqhIjji1hoCR4KqLC2ZrNQYc6jRTVAgjzhl3MyISLeg2H5kiJzcPTgRY3LyWcWqEhsBAaIZp2e+2hwUvDgc/Pe3V+O7t6Oi+JBaXiY3xoOPlxUdDfZhEFOA4Xe/2d8jxeJQC7Tu//eQf/b//27PNk8vTy2FvL/L0p//8l/d2GaC3esxn03/4P/n+L3/15m58eofp9c31b0NYHdvP/8X91//5q5/+4imvAHNN4iqyPx5SXCtHCMccU58AUU/PNiEthmGKBLvbuys+vXv30K26F0+vfvazX3Lql6uTXQvri7NXr1/d3d2en56eni6tYuRu2o7T+rDfTieXeSDaPdxyhMP+oev4MB6/94Nvv3t1vdlclSLk5fxi9err/bSblt0iZ0I1Rrq7v//mzX0fmUk+fPHsZy+/jEZlsrFUVQs5IIIVG4ay2pyZNjVlAhkbuB8PY1r0jNyan11c/PWPfpkvQwxwerYSqU1ktVoeh9ERUoghAApL8dwvlquOIdioOWTOy+bHGMNJ6N69fTsO25PzyzYdAUFcnKBKLWV8PBjRe1f148wC3X22sqrOMy4ERLc5LTST92zGkYKBP7oYwVQ4xJTSseyldUguoovFUt3ALcToRpySio5DE9HAwRyIUQA3F5fj4WAOUmqMIfZpmIaxHS+eXWy3u0kGANycn0AkE1lvTuowlXG8u7mOuTu7fDocx0XAs6srCrjb7VNeSrEyFHeb6UIQUN1cJKf0ON6eWQBgwDNvlZDZ5yEn4ox+AAAzNVUAVzEwIAruhhADdk0maRNyBKSpaczczME8uDHNNixSMSrNxIlYWJqMzF1OS2sjQnQwglzbzaI/01a75elWDGMAkFY0QkzdchwPKXcA5A6tTZyJOamJuSTsm2PIsUyH5Di1lmIAAlUzHZkyx0AUmwi6DcNxJjMcpqOCmqE2y4tMIYCbE4qDB6hqtSmQMZNOY6bQ5QUBmRR3IQKGlGM66D6kjEghJgSqrVKMzFxkArVWRQyJGShMpXTcmVu1KtqI2IjdtKNsWjgk0Ta1Yxf7Uoccs5mEEOo0BCK3URzNhCE5kKmC+WK1yCw3tzfdIu/3U7zIi/XaWtlkOtw23Cxfvt2NCnU/YUyrs5OGEroURVJeb/dvvE0EXJutzjcIuZRHwfT6eku8DJxijF3uAzJxCIFvtg/3Y/GUmCNxAECOgd1z7icok6oEAdPICGpNTcUIIURE8WksSfB4u203tz2janXRTmz7129Pr5Y39/eXp8umurxchRDKtmAMpWqtLSQOXQ5n5xMnGwtK67r+MO6Apm5FFdTnWutEUEVNFNwRzecOaAcHb+ImMhVAZKKYEjBJjINRH7M6REJCg5kKREwUqkp1BKTEkQIDRUc0ndyEFFrhKaWvp5vP5f7IEM77FVKp5gB95GFspYhTmLdZMlCXyOxm6AgI4ooQlB3AgQhiPDb5/mcfvd0NY2u3u625U0Ayb02RaNF14izg1bzWZmJ3ZSeG6uZIMSTRYi45J0VGQnA57I+5zxhYzQg5hFClEgY3Ugc0CSLRLAd2dkPniDLjfCIBNZgRzNjUofgiMY+Jt1oQDUVIauSMnI3BSZyDJXZKhF1qRScgjjlEcEPzLlGtxR1V1Ewp4+Vl/+Q0rTo7udgcH/Z3+3q706aAm5ACu4FYAxVG9aloGQMAqpmIE8ecrE0cuWkjVSNmIKmOgtockB1MyTFED4jgi5DcBZEKmoOHrkN06gI7teapQ8ho5OBGgdjD+4QYAMDJ6YZA3K2KOiJiiBRC31EXtU2mrk05BIqB3IO7m9VpauPRWoE+iysaoRugE4dZJHoczbnPWByay6Xw0ciMM+HlfRXLfGuCx2oWm+t/HAg42Nz9TIDE7gaErg6Kri7WMMYCWBgn0kJ0ABM0gDBoqwnFYV0tqmMbAnGHDCrgXtXEnTiEGCMAkBEiUjAQo4YEGMNkVbU1p5xzqwM6ZM4JsIqgOyGRxWaNiTfdgpBrtWsJUyt9H7Q2Z42ZBzFRAnOpLQU21daMkZCAgFUaAjCimiOimAGgG6gLODW3otakAXozE7Gj0juBL4712nyPVN0UCNERMHhbB3nB8HEfLgIvQQmDAigQATRRJGTmgFjn5mS3GAmRmKA2IWIPKIukXS9KHRMhmRtxmKfS7Kzqzpd3Ei/hcsFTae6ZIXhgEjNUCp5DXEbIbk1UO3C2RibsjNA5qj8OsdHdkZjBHFwfmxPQHdGJUdS9OUTECAG1EjYPnSJbDDtoGcnAiahiHdktomlLQH2IgAzuBoIURDGGFFJPgQhgzjoCEREh4JyNBXclB+PHq5I7EgKgmSDNxBl3sObtMZ+HARBhHp7RvHKJidTE5oJhN0Ka7VqIaI/0FHRTc320/z/iIB9nFQhgbvPFbcY54aNxidQE3ncWIYCYMWkg3T3sv3z7al+rIrohiOUQT9erw7Df7g6RY2Yirbu7+46UMRxavR/vJzmcLvOqW2RGRm9FwQApjEUTYQBILmcMnbdBJXg28fNFjq4M1sZxkYOjJmwnmQFQADiHtjuAZzPqUiT3jhmrUEx1GLqYRUutYwjhVaP7xWmX18tIitydLt8NO6IoFZbLvkxTv1wcD8fxcGytnZ2crE9OmvIXn39ex+NVOV93PSHubh+KTgqWcwJkBOxT5ypVdNUvTC0GHUdTj4pmAG4NtJGZATuTuzrCJA0otmro8xkPUpxpYhaQi7VzCl0GA2Txc7DvLeAZpc+37e5B/+rLu88+OHsaUyzGaTw7Pdne3n5D9PzTjylGJ3C3GKOIzLaeudHe5h+sC3OEx7JyRWQgwkcTkIM/Ws0Q5wfBwQTmPdVmyi6aN5hlRGlI5IimgkTg4HMJ2gxHd6c5kAvzJwBq6o+MbYR5SbszszmoNABFmuNv/m9JRa0UawXMOaUQs0kp7ShFtNFuP2EUcnROmXpOnTogYUyZcBKhFEOtjaxFDhwSIzJBYDStoJU46SMzCXLPhk6Rx/09FV48f5bWp33e9D2oDBh7Xq2FQsJQ73akq1YVqgPrb/69P/zP/lf/1Yer08Prd6OTlRoyuehMqWeC61evzn7rBdleh4f9yy9w/bxbL1poAHK4v79/N3yqOu3un704+/rVO6uqrRhazBwzEfjJxTOHQEC1KhuNt9vxOIL7NO42zz508PX5BwPIRHb/zZvDV9vNdz96enI63nwu+31X9O3bV0++//u+P37zo3/1ndPlcNzTpJ+/ffij/8Hfeffq7ur0pLd2dXWZlh2WaXVytogLKX44DLofnLMcD+vLTV5ujlP74W/93sXzF4cGflCpN6ena3AVKVMrDq1OGnO4vbv93m9+dnu3Z2ZpFhOp6f64B4JSGs9KPBjgvEm/z3vj+1fQ4+IDU08xjcPEEZm8I2s21QnNVQTLIIjJVBYZm3sCVqdhrI6wLwdAAKii1q8SsYMhGr1XzLlOI2HgGKWI1JJiiEygEqJj6MRnpGu4ub1ebF7k5WK73fX9cirFVF3aOIyxvzpdXZyfnjUbcw5aWpvGKurQzp+cbO+2T/NGQzOGxNAtY7FShkGPNUQ82njyfEMLT5UwaNO6Pl1YayEGAIBADOxm6h4YQ79crx4pLa5laE3NuQu86BpyqXaySSdXabHkd2/v++Sb09W0NaYAkon97Xb/1du7NsnV5UnZ1te3b1Oij3/w5O1uqgfFpCkhokeO3/r40wzQKL+6edj0/e7h4c0v/uTJ0+5yc/nVz77RokcoqpAiHg9HOBwt+INuz84vGEMsOhWNJ5lKEjuKtNjF5bo7jNNysWp3BXFm7hu6gxsA2SMsDZTQwXvkeXeZ58ZuRm4MWcmZCNDAlQEDhVFKHyHGgFgikUnJMZdSAkVTNSQ1CLGbhhIAWrOh1bTsP3j+7P44/s6/88M6PgwlJg/O2neLMgz3x+sffO+PKhzeff11CHBA+Y/+p3/8kz/9/+ScGLuAPdm2X/bL+OH9NWeycthe/yp05WqSHHMMXfkHf//Fv/zpf5OuTv7656XUfjj6T/5iur2pVJ/+w//rwx/8O3/7zfTjw+4QIqc+EUCIMLYSAizC6mzh7+7fhm6NYZLxELv04sXZfniIq0RBb3avNpcLb6Sgr968+lZ3SkqBMwKIiA1+DOnbv/mZjnuPAVGfXK1/9POvLteb65vXz58/3yOXBt7466/fNdfv/+an//Kv/uzpxYvtNw8fPjk7Pe/KcXJs3bJ/GEtoUd2/eXsHTHmxOpYpgyuLqD4MZcPRwJ3IkQLlmd273w2OwRqwoXfLnYS8WRf15Murk/7l9S9jn+enQrQSJGvIQg7dZx/+AHFILEy47BeU0v31Tbdg4Hi/P6zOn7349Pn9wxviuDm72D48qLjUx0lyigGszacaUIdH1hm6vR+smYIz4qwq4qy9uikSoPvcvQsYiaKbo2MXYkDjyPU4giqYEVNkNoTAgI5Syzi1i7NzDuNhf4RmhAzoi9XqeBjcTWoDhfGwP7tY9ct+HKzjXOu06BZlGlOH+eOP3nz55rgd3/mr0C+XXR4PY4wcU4qJGGPK8e52MgBX4xBAjIiYWUTm2DYSujnZXK/rZvPJGWRWiwCAAiCpj4TzrA8AwVSbTAHcOSDE6EtSBdcYoruhMwAgxFJ3OXSI0WnBYWXNVMecg1sPSoRetHFaKyoEBqSAIcR4c9hr2pCDuZUy9ryYpiNYZeyHunWwwCszjwwQWgAzGFXGamdd7gMEFgWokzIGVjDGoDo5tKrFQLq+b9rW3cl2P+UI5CZVmzY3Y0ARBSrk1McTlTuxwQFL0z70OSRyEA/9sh8Oe/HgEAAVvA6jBGZVBedI3bEeADyF5OSBWUSIJkQYWmU2J6IQzA3I1XSSoVqdq/RCSDbHA4DVWmkFKE5t7BcbAARicAhEwEkMPnj+7dbu7h5u+/VyP8LNdnw6HD96dnZ5vlkrX989nF2e49LK4QghwVSmw5EDE4aIuCtlc7pxQTkWRwYE18enQIsQ1xpQ1itfPknnq5vx8OrljSiP1jNCF3IikmqqYmYRaBJipACJIHizubhJhZBUREFh3B0Pb253X9wO50+9eIW9c5Zt+eJHX/zGH34/p4iYTk7S6/3NxebsuC0Nhs363PFoDmgWDVAQBcfWUs9hFZMsgL3voqgAIRJCQDSgQNqs1hEAXT1RVGkiqqYpci2T11KbNHNFNA6cu7KfMEEFACLyEJ2MoiEQsRAzkYADGKMLs6S0h/AXd+92C/WLSz9OU1X12oqAYL/M03DgEJ3wYBXQE1DXZXaFx3ArmJtibQjqro0SpyJ4ey+l1NMeTy/Xi2V/v9sdDpViOjQ9lDJYHVy1aqzOCGpKRE6EQExIGB2czcgpEQdDpMAcCXipCFYV23IZanVEGNS6kJYxDsPkwNuxdIEoh+aWOIhVwEjIjqbkMSbFoO4QrQUjiqDuzQ56CD2Yh1UDQzIEZTTnRCjYwMGVwNAUhkFMvTUzcO95eZ6X54uL8+XZMq3PlieffFiIr28P37ze7koUR1Tw2qQWqdXGYqOYg4KjMwY29kyhCRhFCDRMuiikgtY8YiKwyMYxGUDmQFasCbOiQ0oB+1jn0m4mBg85W+6N2cEQqIiiS2AQfRRMn3/wrE4TEporEpbqom5mWLPWyVUDQoiRmKHWxO6GKWepxWpxEQjhvT3FMeAsmIM7P74/8f2BlOA98Q2R5jvRXDU1L0dwcwBGBNc5CAyPHGzi2IGLm3KOTq6mYEaGWkyZeHNRwrj11iINPk0uh1o5cTMZXQX0kmIuRmZkRCIBvJo4OjMgVDMbm2oCAxNtyCmiK7gxohqHBSoh9hGHYG3mERvP+2FmQkLqcje24UF0r/o0pzWIUXOE93QATCGA+5wDADSwOQdN6GDgTauDO5C5AUApY3VqhgaAIbQ2qeON4l+X9rnKwV0AHM3BaKZwOjSAO+UHtW9Evp3z97q0ZkDSIh4Jg4OZVUEidKQAGJjtvSWGiYqouCp5iNRFJgRRE4Q4b/YGYEaBijUJfoCTRXUmNYwVDRkRWFByiDl27K7QIhLVGq2RVDcwb0hMHEEEwedsjLvN2RmeGS02R67cyIDIEdyYnMgLa+00oguAMYKbClpjEKAGQBrVEMVDRgUjRKPgFil2XbfEMJvXEAneS0b4virayABdf31lMjMAe/wAcAIEolm7dPCZJw42m39QzdDNkd5LAABAc5XQTOsycwJSE2ayxwbmRwHA3B8T/W7uTkjms3Y716iBzWaiOZfkMIP/wIEi//Krbx6GSQmZY5kEwEMMCrrb7zuOixSn41HLMRE6xtfb/a6Mq5Su1uls2Tdp8wAPlRddVuS74+HJOkIta6bzZEsn9YwYQtSO3CZFgJh4h/62TGddCKhDtUOjIhaYpcGoFlIYhsM6L0AUDKq4u6OUsWnYPH1bY+HlSb/Q8WhqCHy6Xo17ebi7OTndxMVirENIqU8dhfonf/mjP/3zv6hVACACdpS+8+mHv/WDbyGyOrQymFYHde+WixW45Rh3+yGyBTRRoLAKfkQ0jonhEYIwFFGjqoKBirZAlCM0xKqSEExNqj40o8CnapsICJbWCZucaei95oVI7Laj/tWXN/3pqg9hfxgQ7ez86s2bW4rp6cefcGDQKmKP7TD/WhCcb+JgJvSIGppBowhA4Pb+N496+Rw5Q4rzn5AQwcwMkR8h5xxnGyC9F9xVBSkQkroCsCkAOFGYKVfzK/RRCQV0N9XGzLNgNM9O3ZGJ/y2piNDn4ozgCN7cKmrLPT3cDWxJGizywrTFDsiJATMHFKm1ERIGjoCMsFyumXPXL2eHFJCXNpGaGsWIxKwGiD4Mk4csZEh+fnpmt4c6HnO3vN8eW8jp4qrlyis0wxYJITx5/uLzL3/5yQ+/pbc7b+2RkA+AhFrFiQTZubu9ux8P1x9++/Tu9nVufTI7POy0Aq/7J5++OFbtQcfDLncMF2t1nQ6FY2rD9HD9zXJzdny4CWvrz16ICHEyBBMftvfL/XB5cjFOw+5wH8/PT5fp6Yunuzq9vX0XNpnOTvIy3b29+XhxQWN37l3GANxiiudnH3zy8Xe+/uLtx6ur8d3DInamoJTX5x/tb+73h+PqfLF7d5/OTxeRV9x1y7MPPugvnj5XAxBxVQcoIggYiLbbHUcC967P929358uzN7g9tON+GGtpGPLM/yR6NEfi4+seZ7A1vCdcP76wHvnWgFXIQUVaGx7uRTm2NuOqIhgCserInLRMAg7Mh7GG2JWhlgInJ6fDfkcExJS6Xt3F3BHNnGMEwjYWBHZjolhLY465745T6VedmvZ9ng6VYz5MB3CSUqZhyH1yhLzMCDAc9+6Qu8w5o3ba5vEhDz6dna9vbq4/ePr03ddvx8O0OdmcXJwMw4EBr19dr06XTz44mXYPyxwINETa7w45JsHZ0w5zfj4CxcWC44JCgsfHQJY5qk1np/3pBbSxHrb7syfSFvHrr+82q5PNKuyP97vt/vn5i6cXV2/fff3h86vb65uzZxf7YeoT/Xu/990f/cWfXyxgsV7/9Ecv0fmDJ6ev3rwpkqdRq/n5afz0+XkiWKUPf/qTw24q3/mBvn5nEsrm4skwFAdK/YLh4W/+3vOPC999fXf7cjxfdv2Gb+F+Vw4fbi4y9DfjoR5HrIWZq4M2YQJ3z04OzoCIjK5uGpgIiMwTR2dTn5s3LMC8CTmCg2MI0esEoOASMJVmQCjgIaZqRkREAGbWLKXYaulz97B/SP3JJM2bcBEpk40PQWURebfdDiT7drw6P/swX+13vyjTWPeH/nRz9/Zm++qL9Wr7x3/4G//9P3k42Zz+vf/xH/zj//qfcerTwuo0nKwx5/L93/745c9vd7uJ4v3Xv/zmybN88tE6/xTpBk5Xmz60lB8+/d3z3//d8y9e/ivOEkNYrfrX77abfjWMO8CcY1/Gw31zCskNUHy1XIjLNJX97tCmlpbLYS8pxX7JJnC1ANntTGG9WqZI4IYQHuqUO7u7PiRO9w/7y65fxH6Z+y50WXBEur15GwlBtByP716+fnp25V4WqxBi3Y8SkYdjnWp9+sGHr7/+4mrRiworo0qrB0RenSx3h6FD6xIi2DSNIWHuO5Uik7oYOfQxWbPTJbftPTdhpofr+0IaQgCxwMSBeMb9uCPhMncgR4XJPMRFHKtSm/pFzomlCSO41TJu9/shxrjb7tvUmEPf5/cHI59HZ8TgyK5zjQJTeA+9xLnUwwERkQDN/X11MQE6qVQAnLcAJE45ACI6GWCp4kCqWqt0i05E3QwJOcYG7swxp9JKjAliNCYOqZaJA+cu3bz7VR/Wi83J1I4eoF/35VAbsLvFmM6fXsnr68Ph+Oqrr548e/7k8mK4f1j251ba7d19yMvUsQOoUex7cFKdS4qjOhCxgJopBxZtkSMAz4j3wAEQZw3lsa4EQN9Xv6lh4NxqjVJMi2sLGVxGJFdtiN68uAdkAiIEUucKIjBTkAJSLK0G5tAtijRGcKjEJFqZYgwh9bm2AQ37tBIRM0fn/fHY9Yvm1dBctYmAzl5mRwqzIaHqaK4RQq1jyomJRIqoBGsp8dRqQHLFJInKiEmNFIM4xdKsg5RCbjJm5ipb86P75DUmiEwJ0LyNrgLCKAqlFBRCW3W5TENOCdCGOlkDAQHzhJGR0BxRh7Jl6jgsm9RAmbmrw1ZFiMDR3CwEqiKIgQGqqbqaATsyogGiOdFclg21tkRxtdrsp3uA8fRkqdORrK36fns8/ub5d6DAP/uTv7w/Hj47DJdn58O+dCmFrj8cRnQy1Yd9KUWYiDCWcRJt6Mj0OFE5Xy/AwlClTeVXr69/eX9fXHGsqCoR2XnUNgctvFQAQKLErFaxObfJtIAZ8SIRo3sttQDZIh2k/Y0//P2Xt99cPb36i7/6q9+O6NUhCgW//uYtcLfckAyjnq7EBiJxmJjc1XSqNg3Hm3c595SyAYYupBhUGyrNdx4T12IQyM3mZxGIVYBC34pjjCYjc6rSaml9iD7W05OVEjSwmIKFYK6AFKnXOiFgYHd3bQ3IKSQP8RhCyfz17u6VtHFNxYvtRz+KVU0xUgi8SCO0sFm6iKsu+u5YqwBO6qsYtFbmEJCKSp4JNoAOJMrq/vKb2yJlvzAjOjnFh305HiejqZqLu6J5wDjb88BD5BBJHFSlaQOAmOY+daxSDC2CuxA7HceBgxsDACFpLQMQqeOhqgME92ViJMAAnFi8ALoaiGAXOELoPLAYDgceenBCxXbU1mDSkStZYJiEl44hRsaIISzyoqNp20RkfyhEUUsVlZAodQFX4cnT05M+ovMibZ5dffLi2ZNR9OriNoUvv3hX7veTTsXq5PtdOxY5NBdkotjnVo0DT1UMDWIsLokoGnGVDEGcGZnAAB6ZvUhkRl3IIWhzsWaREZAciWNGF2AGdG0VnTVnJXBQEeX06LM+u3xRanU3YA8RS3N1Fml1Kj4Nrm21yDknR8I2gDUVU8UQI7iaCDRBDjSXPM84jlkPgkds5nwrBjd3n0mpTHPa7H3xzjzsBncAAyPgucodEJmDqWBgnjlYDvZ4tHViYozW2uXy6s3+65HIY6qhVDYLNGElintrTNyJZ2NUA2cACIwQaKqjaQ2maA4YIqC6MUcx0CYMTBSXXTe0YlacOjGIzGCl6dFJQ+wRgrgTcYS26fNwnBZdD2buQI6kGtBw9g8gIgEZzfE81QYIaq4qhqjmDuauJrNF1QkQXAmgqYjDttlfHKcvHSs5EpAZ+tyy9Ni15DAzrvHB4OdjXYbwLcCEDgiEEc3gvcuhGfDsKGOcHcSuHpDUXCMpVLMqEBEjIhg4Aiq4AwZ2JlBMD96d5hz8OOCxIaHPcUJiIHaBR35ziO4dGFkDn4vyAsxcdjOfuaE2W3Xc52usv4ecIxu6EDQyRHQPDBAtozYCICRHBgjkikbVPEJ0T1VcEBUwRHZHrcjOgRk5zEtlRmMhvl95SKqKhPPnD/C+Q3qGVSCJNaQA4CJCHGZhCwCR3AHtPfVDbcZOP8YnZxPcTB0iJABnDuDGSIioao+k9vmDwGf4zOxPcm2IbKZmBuiABOSOBoaqRojSfH883O/HaoSMrhYYuxj6mIZhSkiBQcqR2nCx6b+53n9zdz+UtolhzXTa5xD5OIw58jS1BVFH/HaYIhOraKsppPmQkZGHqsCUGD2laawZKAVX7xA5mGOKwF0ziEQBIBITkqG7GTO2aj1lKUIWIPafH+ihP0HDaZoWXUKM+12Nue8WuV9cVhOsDY26lL96+dV/+Y/+m6oADMQIjoKw9/Znv/rrn3zx1588vfyN736mQ3GpCCQoz64+fNjfmjeREiKGSJySwnGojVmBsXllcH3UThzM4gyCttn/hZEYydnAACcLbwUjSd/xomoEoYAYsjftYOwT94LfDPBnf/753/29b/eZBz12ebHerG5vbiXEJy+eRkRinTHSZuZEs2bkj5IPzkLwLB6ZOaI6PsL/EUmtgc8fSTP8AVxm6vzj2iRy9zm76vb4P7sjGBiYujLzfExHJHN5fKcizOtQTecYGtFsKdIQI+D7z+Z9D+B7V1EVAGNmd63DAxOqtDoOIWdAcjNMKYSuEgbGGDCScYeO/WFs00FCyH23CHmd8jLFzt3cxR1DQBMLqNpaaxTjUgSZM+Sl0HHaDX3tU59UGqKQWTvWRb/UYbful7ubfV6n7WGEVRpvh0UHYRN3uyI6ESZVcTEEBEJBb9Owff36eHOzWcL99f6M93XaLlbZkRYUhtrOnl+NACFlU+pXZxj6tCCdtA3t1U9/+ht/8Mf7mzfnubOym7bHRIUIx4qrk4vbn/81TQcjQ8VOJ4qIXQx9toCrjy/5PE+7/bc//bjvUHb3w/VDd/bhN2+vlxcX0RMrLiXdfX5vIy+W50fyvLxokPt+oadn+7Eulpk4HreHLnVdf/7Rpx+uluvr169Ol939faMUuetk0BDJxJjIFbVSDnG7fRfAmTFGmpBDCBRm+B+o6uPbCt5Lh+C/FoxmzdwfxwbgVfMi1zKl5MPxuD45zdHbNCEZo1EIJIwWEFPEgACt1vXZcjwK0SMkhdwDRan6uPOFWA8Himwi03RYLC9TvwZtrU6r5Xp7f09I681mGI5a1EpptQ7748nZea1DpJhSnCYhd8OhXwYXz6HHgKXh8uRk3G51ane7h4+fffD65c0nP/zdaXyH0C/C2Zuvb168eLHerL756o2NPO1pmsp6FcatgC4/+OD79C9/QTTPDqKrhRDTYpUXK6d+tXyUivqlr08Sqbu2NjzEgJ88T9p2NMDVRxe/+sk4TdAt1xmwDrrdvZzKVKZycbZ0VQ+0Lf6nv/zy7Oz0lz/7ycnVWR9jLfGr1we1cGhTXnZi+Go4yFgP43R6dfHkk6f05qtPP15/83MtxAjVzYYq6vKd7z7dvr3ZbXdniePVCZFzcAz9YbtNS43s0amheLZh3DaQEBAdOZC7gau4EId5duUOjE5I6sYI7k7IMB9SmNkVTQ1ADIGNvIWYRYRSMiQDACJ1SZHBTGwEJjAsbYq0BAocQur49f22Oa0hbt9uQwSEdsZhw+ny6Sfv7t/ev7s7SQkBrp5fmKfv/mb/sH3gpnF3nbTevzz+7L9/+eEH3Z/9sjSnh/3hb//BBx9/t/t//Bc/uR0XhdI/+Ae/9eHHf3kj07PnCeh4czudrtqXL/c17v7wD353wIkCHPdT36ddqn3upBYx4dQvE41akANDRDdrNkmrajGgu6ccl6twOIqqA9Jq06WOprIniEWqKTTVy2W3Jn/e150qAF5dXk7jeJpzn1MtMu6H/myxOA2b5clXb35xcXU+7iSl/O7NSxjk2ersUB5Sl/u+L+OriyR8lmRsb++G5WXfL7voOxcdhqO0Ehi3262rSZUYszubc63VHUS0SUX3LrZlhPE4pLyhXOq0a+ABHTlGJI6JQmylnF+tLtdXtRzPznsEVvEgoO6UYoh8d/NQVD+4OBnGvYMRyTBN56uL++ub3D0ejHKXQwzmMHctuEHg8EgkwvnQDvRIxTN0dQCcGx1c5wg9UVTRGIM2IfZAwdXF1R26VceB61Dq0Ppu2bSmnBjRpDCqIxp6tbIIi8XypIx7Ctyn9WF71/eLF88+vb2+75ebZy8+fPnmi3VY9t0ClMcytlbXm6WB3r69Hh5u7lxCxD7m5uHyySe1frk/7nUaOfWojmqtCTPPlRJuiuBkhJBdlDGiMZoQo5lzQPU2jwp57pIgJGQAUxETYE5D3aawABez1iwYgLfi4KIKFJAcHFAruGurIZ+FLmKJhO5iWiEkUkFGBhMEDtSDQZf7YyEOfdelMhYxEZPUn3gb0VjVmhXCQMxNgDyoirJwIAQzsRjS1EYOC5DJ1RWaO3fdojYIAYjwWMYF9dNw9KYcsZqoTQvsBaCKugnObStyAHdOXRuruiJ585o5Ac9pVSbqYwDvuqkOIoKqJhpDZOLocZ6yijaAaM4pnBFiU8+cWzPnxzQfELsbYTRDcGraFD1EdnAmYuJjGRhY1cyUKODjsCtM6lfn5/v7CkDXr2+qhvPz+NU3t/3my/L1vWiognc397v7Y63l8ul5rVJkutycuOrDINz3rm2sYwyU+r5MUynvG9AuT6b9tOiShTiYizqYh8RomAKCQStu0kjJ3d0AXItUQQ2O5hXdGETbQbGqkFI4VItpfXfYvnv7+rB7l1JYnGxkgXrW/+k///xT/92Yw/WrL58//c3j3iNDl/j2+rbjrCoQcdxtaRJ7OPoZWliW6lNzmNekISFyYHB1cFXUVsDcClIgdJextFodAVxbHUz09s3DeplPIXx5e7dGVuPkGDAtTQzQrZIDGKQQnSPkUN1GCq/b/uvh+hbGRgFDHkXKNC1zBDRKjCEf6rGMewrEIVhrpdScFgtmBECyWoQoNDMGJ/DWtOuyahOVWpovejqNYPRQx1J1e78DB3EkohDYm4E2BkIANRW11D0WqaVIcW78RUDyiKEiaCBzENElhUXu1adqgIaJYog4oO9lyhg4d1XassO8jiOMaCrsTm7MiGBNBEJBSIAQgMoYkIYDHI82FfAQjdy9TSCYAjRWsg6tSx44bVLeDVAVpmKqBmbBPZherheb3PUxBcewOFu/+F5/dZWRwu3nH1Tbljf7qU1Vhv1B94d6aFBmJgypSEgJaM6x6UwT1XLsBNPMUJu36Lkv2QHdkEJKm/k0bta6mIg1RrLAkAIhFUNE7ZSkDK5NUh6grvvF+0EynF89U3REN1PVxjHNKsZ4mKAVIEghRGYK7DK4t1YaugPGFAlcHy/781zXH6vBCZGI5isZPU4xSUHgsfQnPEI5cP6L+bT6yOQwdKIwF1ARIiDPmS9HBmIkQDAmVMAKWPOa15dhPCyhTFjVHvqYeg5qYOBdiuN0HIGq2gK62ibyeTAQKPfDQTqIoTWpFrmzFCEvyB0MAZI2NZFFF8xDgIgGVSczZlo2Q2I3aG7g6q1JotwTAtVNzz6VLmfUpu5qhgDWJAQmIJEK89j9PVUEMYRgZtpaDRym2hRIANRgrEURd82/UvsasBKFGVsDhITmczfX3JX1uG8D+pHwx8N4ulpmgYyqwLM2MU+YHFDMIgdRQwCONHvNquAwNdk9WFJLGFNHgA7m5I7GjK6WCEfiQ6AdmesDLpYSsiGI+Ir6yD0DojTGQG7slexAWoB65E5B1Mqv6VQIxIDo4nM5/XyHQWREBhX0Cjon5lQN3MEUFRFYHZEsBujcDbyQg3tTN5dASHE2JxEwYYhMhEQ2ew7Mzf0RrwJgKm6oomDt/S3Kae6NcgMCQgZHMyWKM1AG3InI3ExnAQh8lq4evV06l/z5bHsicjfGmfdoj7d6hEdP1Xwz9/daATxqqPDrbwUggTmiqc6GSgBKOd599TAOakoibm4UiJgDp2HcxRC4lg4Nl/GXr25eH6ZpapebLpsuIjWTWi31iUxzYFZ/OA77Wp/38bRLo9bBQUNyNSQ0U0Z0MTeIORIbMt0PMDUg8+IymjSICk7E1SwhAYWQc6slBgqErh677jDhoVtozGeLTkXXZ+u7m60Qu1tKzCkNQ+lT30f+R/+/f/yXP/8C5643+/XrABEIiSeEX769v3/40Q8+ev7kajUN25zy3fabuFhZcxNlSodpypn2++E0dKYTOoWYiBXNUu53U8GU2lRqm909HgMROAENtagBMR0dfzH5UepvrvMCrUm1vsshXgRHqeAWMX+z1f/vj7/8+3/0fYrw5vrW0JbL09s3b7scX3zwomnxGXmOiLOcg4+l5LNnw9zQAByYw6/pn3OuFpHnBTUvEpu9ZDRX9c0I+Ec3HCIjooOZCRFyYINZdDV//wyZNwcnYnOd3ZqBwsypBHAEYg6q6q5Igeaaw39TKjInVUcCRkKYAFHdKa0jhma66heMiBSqVtcqwg3QMSD3qyX3/SKEHCgj5Rg7dCytOoZaR5ciddR6LG0y88gdYK5WKuj66cXhy4cPlldyfI2QXK0PuTVZPXt69/lPlqdJ2raWk30ri08++Oa/+D9/72P8/M9+vjj9EFZP9+NDXi3NFN21WUQf9vdXnz45vG37tztQS8tuGMZpGuOz85OPPl1BdxwO3XItw4HRgTCQN1Vpte/jkxcftdaWZyfH/dGnd9d//eWL56eH+8Pp02exC6uVvL4ZnnznN67/63/Ch/u4XvKaN+eLd3d3Z9/7LZR0eH3fEa6Or+9uBuzCneyKH5t2q6fr17/8q2HoTi+fnnz0dNRCMRyHXUj5eDelZdy+fb0+Xx8mXC7XMpSrk4vcdW0cx4dtyDYcRgAGYAdz81alTHVzdnp3v+UYS5N+k+tdba3EyCY6DYWJZyw/wlx3ju932vf60OPG/NhwgUy324eQP2y1EVIK/VhLTJmqFpmMFYOqNdHGYFIGBnUE0VZEKPelGXlIKdR2BGZHMPAylDK2ACZiauAEHAKQSSFRXS2XY9nxbLQEScwdxhz7LnV1GsyAOaBLqRJXS8gZY67qdhSm3kxDnxF8ETL2+ezJ1cPNy2G436zD6VV+vZ02Vx+9utlSjk8/ePGwPV49u2jTgSkixyoWmRMRhWxOMebc9alfAjJimI5lfgqktePD/uw0mXtDZl598c315eXFzesvP/pw3WrllDlQv0xjnWpzwLw9SB8dvHz04Sf7u3Ha798eYfnkw81ldzcOfQkG6hQN1Pt4en7y9cs3f+O7H9xe39li8/mXrz+4Wv345+92R6UlHY+FU1hkPpSHse5PFrkpPDtJ/9WfHFarDaFiZ0vuW7Fd2QPHpseTVZ4OMpdvqktAQCJXnBtqVTW8j9MAkbnhjJchQCcwD4yOru8HImDMHBFJtUFrCOju0iTkoLVySDmmAqDeFimrS14u9jKOLovT1bSrl5snOS1My7OLJ9d3v9ofxy+/+llYd5Bi5S5yOJYjBO9WvtPD7/zeHzwcdm/vjsj8yR/+7pMP4R/+i3/26Ucvbt6UPU7/3T9//e3PPl4vwle7ohcJFlfju8MXP3no6Wy1dFsM/SfxAeHPf1x+9PnbD66u1icLJ0pxISZmY8cpdHCsewWDuFwvzg+7O0Mf6jSWdrLpQ+oXHed+NlyH87Orb968Sl1qEMo4gAMvexXrN6vy0H78s7di0G8CxuCihFJLM6vX17cfPP3u2GD38max6kPk7cPxy6/fLjeLCrg9aEhYplImXW2WX978qvdFBcAujOrbQ1mtTqwUgQmbrJbd/eghJ1WwZl51tVjdHO8JGZkI49TGqbW3D9uTzUYMDwiFMKQMYKqaiWxqaUGU4PJsPU3bu+3tyflnoKzqr+9fXV2dSZVjDUZ+/vxJXufblzeLbtNl2nTB0Zxb7tfzU8CRzY0QmNlMAcAA3P3XxRzwyHtWwBmsiO6GSPioIKErhjDP2QhwNreDgYtqiFytYcCp1pU4h4AIRN510U1F1YEod9Ps3+boVhUsrTpwLYrdov/6q59/+Nm3L6+eH3cP1o5hkYIEUVOR5WLZLsr99bsm0+uvXj59/tFytbrd3l08eQ63EFNQ52lsKXV1mhwBAis4EJg2dzR7BHYQPmJdHVG1OHiInamBo7aCyLXVSNGdkZAZQugdFJkopdYqY5KmuV9I3YKDqsQQpbZA3KUo1gKjk1eRFJe11hC6qYxMiIS1TsTMHMdp6vvlcbyVOrq7aiMO1SYASxwNjCmqQLVKzBzZqqlVcK5yZOrUjEOapAFA4r7ImEMGZ29zz7OrN6GIXSIDRSEKOfZD2TVrSJ1jqnVaLzp3qnV0MzBb5EXIoUwFUInCWEfqojqoeer6YbcPMZgbMoICukqbYujMoLbCxKbqkJBRtIpojrlNAzo62DzINfM6N4Y4gFnMWZqK1pn7wM6gTpFUmiMFCrvj2G+W97u7Oo2bZf9OKDKfrrrp4VCm/Rdffb0/jrRAxVpKVfUytVVe1dJc68N+ayp9Xq7T+t3t2w+fP3t5d4NMm7MELwEAaqtKQBTUPQYA8jBrZ61GCATmWlTFiYfqzMHcK2p1H3WGtgZWQWLRRkw5sIEfc/sCth+tXgTaHMsRZX/96itYX/ytP/zDQxm32+HycvP2zW3zZEVsKKaFUQcRXC8Qr6hvKSfHtlicHMdx2rshYSIM3gTKWL214/3oCP2yR6QAbRpHcJjKYGhNZdkvpMl4OJxfrqwNh8Pu3e4OU2qOFwCdio9TiJ0i43JR2G/qdF/LfTnelaMEP5pAhtAlVG91kiaZsJU2jxNQR2FVAGQ0dgO83JwPtVxcrLe7fZvqHCXKXVZpIRACzwwwRE0R3QpF3G2PWmW97MFgGhszznL1jMI1ESCChOzBycVmygkDADMDKJgGTsjRc5KpAJETIHkERoluXGvJOYiMgLFhsBRz141yRAlICchyCJMWFEdGN68mBkEMzVueKjk24UZ4EAlENk5aa17kygN0mVJcpy54Wy6X7FCy0CTW1NC7nFLwPtrF6Wa9XuUQe6DlcoUO3jSQ9MgkrJMW0f1UxrH6UKBohEiEMQVgjCkDZ3ciVCk1iaPYIvaRZvoimiEAQCaOjK7I7NZQramSByQ2stx1E6AhKEIMbLVRE5taM+P1CS+Du/h7SsvV0+eTiJm4K4DCnHlorY9JS6NIIXBOGYBaHaSOKTWazQ6O7upSiZmZH/cFoLlhxcweS83wERLs/jjkn38RzWRqJMTH6yHA/K8c1Y0AZhUazYSY1dUfuR6gphAYUtLl+diHTbeyfTlZdB9tPmzVbnf3OV/utdxBGzg9aO3DMUeDkagaQTQXQM+LtYgveBkj7kCmsKrIoBoxcM5cdNIqXheJM1A7Hq21QJnQFY1jFpkYAZFTWKo39cGpiJPIvgvcqqkhITN5zrm1piZmbj7XqLm7ECLF0KaJ3VVdTVShmiuY+3ywjltqv5yGKaQEjuIzhWTOK81sQUYwx0cqswMBHhFuHE4BMziBReaZRmJiKWAfGUxCijN7HhCn0hRpaiLD1mqx1HjJmJbODAAx9CYtEBnY5MeHEHto6yTEIxgH52CcKEUIaIIIbg0ACAbVQyB0UHABbIjsBsCMgNoEke2RHuj4WJxBM40fEAxAmCp6RaxkjVpiAqD5Wg2uwT0gZDaA0NyIUEFNhYgJyKWiVqkFTEXlkedKBA6q4KrgBubaxN7HMGdRZ/ZxqAkgzphimE/OQDBXWQESoz1WmNF7I9KcFDN0mN3Z839oAI+FUwDuOpeSuuvjUQrxsfvMfcYVzT8OhFlANZhNIo+3OgQt9w/XgD7PdBExRO4C7/ZbQk5EIcA4lrvt9mZ/ZKZnpzmbozsxOfIwNQZntHXMxfXt/uCBg7daVLRhCsVdEHPAaOiAbkbI1aSpG8XKYQJ0s0gYmQXBkJoamQUHABhaDYEfyvFZn493g6T4Rgw2511YBeJiZTdMabEEkeNwnIE8CUPf5f/bf/mfff3m+vHt8fj14uPIEg3n+B/StslX7+5OeiYTAr398svzDz9wQNRx3I9StA9r9pixRSJRQISAJKRjaw4AZnO/ZgyxtDoj8pjIiQTcHZu5c/p5EYnhj074jB1RpmE6SXRQrQCeaXT+5bb94z//8m//3qeJWUprcTg7P719/ers5CQtO7X2GBB3N3h/8QJQMyJmmk/daKYz9BPQCXGuT5m/fHOnWb4xm9cB4mwmn6VJdLc5l000t1wS2GOpmagSAIDP6+qRWIzoagaKgDrzJWZXKj5WCczW+39LKkp5odFDQG9VHRhjTDoMI0IgZIYI6iIaEqtUCkG4i3EdqAFDphRCQISINBPaE8amxe142L3bP7w97O9LGR3Mxbu85Ji968fSZ0/LzbN+uRj3v1glqg831ce4WrStbK42L+s1GPJpX2kqD1/sTs9PF6d5+UyffADEM9NLTQBp3O3lcCAiSxoSYAVoLUb95uvrxYvvnvytv9cO0xe//PrSS9dn23NeLTiE0KUySsxp2g/9quaL1asvvu5WT4dJauBpanG5PgzDh09PX5bpZrftu/yTf/bf/fF//Pc++OH38O3h/LNPVx98sn93Syh31/cnv/jpwzYtX3znh//+7/zT/+P/Keb+49/5/qu//NW3fvg3Stgp7K2OgAtUT8u+LLPUyZrtsRYPq7Pzn/7Zn/Xrk7Ozk3dfvV6tu/3wAG5mOh0OfbcENMOKDBziyeZke5jGyYZxunpy5mrG1C+yFplhIsSkRKA6Byjea7Hzi+v9mw9mF5upUxM1d1FvtYo7clZprbpUZwIwbFNFRA5IwcgsBiZEVzENrdQcc+DEAR3cEVVUFSMQkaGxN3UpTKxSuxxrjrWwC7rBs48/ePflu5O+e23javHi4aEhQJ1aLSLVWmzb7W7moDDj+fnG0Thx7vvx+k44xUW9ffPm/HQzaJ3UP/udP8obnb7468Owl4hvX39xcf6b24ftyfnFs4+eenLqSYpxzOih75aBU+BkACai2t4/BbxY9t1CqrXPvvXZX/7lV9XiwfQ7f/Ss78xfN0c/DpOANJFIkQAcrEpzH7fb1zIZUGytwQTrfrnuXFR3xzKJxmXe31VvBx9G9MMPv/P0x98M65xKaaP4+YvT25ujWBYFUY3Ob765S6fhP/gf/eYXX7+qFKaRAlkzIYhF41TFbQw9l1FbNVf1SKCgAADq6EhAQOoh8FwUiBiCgzExoDcHQHKYa8J9HokwBkUUNwIw94gcZoAkIZioGrDHFCdLALDoV3d3x/VpdxynXdUjNMcwjuX2dk8B3u1uQUZtVkajENzaOzn+4Lcv/mf/8b//v/3f/F/+7r/7/eND/Vc//tWLzZVS1Nb+5L/90e//1gdxyljKb3z25Isvv/nwt558+aN3ZepttH/0n/7pn/Dd7/5N+v7f+dafvLnLpuvT8J3vXf6//u+/lF8cf/DsY4GDIjLRMO4TcK3CzKtARlwEwep4vB5KhYib82W73xPTMLZFTiF1qq5u+/20jOsc8+Unn/zsp39pY3NIKaXrt0cGnlg5EbmItsvnT17+6p2IRcrnZx8u4jK3env9MElbrrNoAW0iEjs+DtsV0ljtq3evP/3sKsaEGBXG0xSG4g4sOhWtZ6cnFHiYJjFgRgSIOYpUxWDwKMXua3MKHrIoLHMe1Ha7B47oAKW0hIwYAi+Wy82zTz5tu1tEOb94JpBbm6aizz/6aL976EKajrU2YFjevbs9SetylOViU6exeludrmN43AsWy8WMezRT07mNp81Ze8TgqgDogPjreTEgYHgseAA3NaY4R7QeJyNqCEDEMXK36tycKKg4IIFVbRoY5xqQlIKAuTZVyjkEyJT7m7vbGIObmApzWHbr++t3Fy+eLTeX99dvGT2vUpBYay2lXl6dk+v+fm91uH39Kn767W51Mhy2q9PzXPfHScpYupQZ0RBdxNACZUI2q4DgoOasjo8kFFMkAnMTURUEjpFVNYXkOh8dWa0xxVaOxNHAmk6ZsjX3jsxqpGggoOAI5lbbLuSLMha02WUl6joPNN1sbgEjBBHp+9VBPaWeQghgHiJAUFU3FxsppoBpPi+Sk1R3IUXiGAk1xlBaSZwBqWpTbVatGcbUuY9uEEII0BbZx/EIEJjT/bDvIotqDKyGZLhIC9Gqhsis4pGyuhY5MhNGbmVCaGrC7F3uPSiHUc1TCsW0tBEokxuCIuYUO4QYMYy1QuAAABEwOBtOxUU8E4CjW3OAmDIpILKZqzZHdzcwEIcud8xcy77rs5p13dJ5GRQEU7dJh1amUfjNu2dPTt/97BerPmCORojmzCEm3h6HUpiBx7ExxS5gm6ajtVpsuy1TkWmc+HQ1PwVlmorAfjzmsEy5M+4FsDVh1dImwNbMTYqCCpAIO4ETGhhgNG8N0KCxAwOiRxNGtwbwxe21j/r7n33nMvd+02Lrx3c3MPnUtd/++Lf+7C9+1A6x9tRHePjVrd+1G7/GvjfOXUnpMKQV17vjcEjLvDzeFjjt8CMU0Vbb/c1RWpNRdGrhisFlOu7WF6fLPn/+s2/Wl5cn69XXv3hFZm9evvr0N741NHx3tOuBQuL1SX/irhIw4JRWn9fDl8OrO588gqk5oy88hNCaGfm4L0sMJ/2quQHRvuwsWFMRLSESMpq16GHZrU7j+mqhx+H+gxgvzl/8+PXnwrSfxkWOpWriQAjDNHJkDtTMZCop8FBlOxZs1nXJXcyMHBmdIyOTAQwifY6BvQsZIbpzF2IDZQRyVFHUyeohorvhpNUAlos+RK4a+7wK3LpaMqfS/GGYgD0wHQ4V0RSQBoDcNa0xI6MTKLmq8Oj+sJsGGGyiIqbudRJA8WCQqLiZTF2jipQij86MpDmmvtrt5AZFmzOdLZcgyMriNIW4bwjq9TDk6A/X9y+/fnv99t31dn+8u9VhimaRyB0FIcaQFh1QEo+GkVBz4OAacWnGtU3MIO4YMeeuEWAMQRu5mJuZIKIjTjaF1DciYjYkN/emchhVBI0icDR3AAEwfWy96U9PWdVMTZuraGuuQjHllExEDRAphDiPx0MIJtW1mTQErqLoBqZgCgaGTqgIbCDMwd11hsSBg6mbzTG0mdWrKv5IL3q8I7kp4iNu2N3mPWgu61H09/4kRfRA4Ag1hLw+qa7r9UnHvaKfUT5ay/60w36gPdS3wHthG1O4a8MqBmzRFBzUqEWcccdggNQzLXg26ClEcHKaRAtoYVH2BwYPFgnWk0zWJ8cOAyBqnI9bVVNe3m7fJodVjjAIA3Yc0S24uykBGlEgNFcHbCKujuDephQDOouYqiOzulYVN0CHZrp1L84JMaAD8cxeFjNDdHeaEy7zN9TRzYJal/nOZAjpPEYwdUdmmluOYmA3jSEGTo71MTcUuWit0XEZmzi1h75Clz8p0Nms/SGpInEMGCrEgdaJ98ysztEjGGZCMiVyQiS0CMxYEQriAmkOdDNhEJ7pzxY4mBsSIdgc1CEMCGhAjhyBAhm7GooQNkZhYUR3dsK5qzQRKLpCU3QLCYFm+IvPbh2AaRgZdtXEVeYSMVMFNzN0V2tFW5My1VreGyn8XycnFQDmylR5X0AONuN/H4fxFIhnWdNMHWcwLM3RuplS424EzMTqSsj2/+fqz5ptybLsPGw2q3H33ZzudhEZXUZGZiWyCiigALJAiYTMRImSQZSZZKY3velBv0I/QA/6C+KDTC+i0WAwGgWqIUgRBEgUgOq7bKKPuP3pdufua63Z6MHPzSryPITduJ3ds7dv97nGHOMbru4uroS+oJoA4B3PammyWrxOQMSOD9pfpGDLCGaGoIf7t21aWdcZgYgO677VImW6Or/UMu7L6eZ+Px3nVRcQoUOLDsOmm9VOVd1gnalHQvPv7o+z4wXRWQeJPfd51wQSz9WtCjMHZm8SGKUBIxAAMo+qj7f9i3kE96bAjoGIiYJhIiIHdDKkSbWJTo1GCqLcp1jHeS4nzqnLdDgezjdbZFd1kfkf/aP/fLc70DtvlQMQ0NKo+KAoP/zABeD1fv/DY/fxk5WRNAxff/Grdb96dHUm5mUs/Wo1qRWSCG5kcy0xUBOLgci5NEP3HFMTA1yq3yunzpmr1siW1VVNzF7O7fOIf38dyjRlNKa8D/zdsb10E0e38Edf3TDQP/jdn41zET/FHFPov/vq25/87DcA2U2cHuifCOiuiOHBBAq0NErC4pte7j3uZkrEiKS2/KLiwinyd1rRIvygq1VafumBQIOyCGHgbh6IzR+yJMuUbu7IhMjugrS0ZKsvfWe0fK4VHnyUf00qAg4xLgXbzIHd1cFzn1waaTN1aSXmzMzS6mp9tloNHCnxSlTcAxL6A41fyKpbHXdv726/e/3y85u33yloqy2kLuUeUsKGMefxzQ5H55w45On2sMrQxgOQxfOrhmjsiYBR7XQP++uu4vvPPvjyF1998OSnr1ZPmIO7upOIIEcQdZPD7U1wP7y+jeSnNzehtq62uVRtYi5NdmnVx4G2n17E7dlYRnIKfYpnw9l7T067r3evvwLrrt+eatyW4qEPnng67E8b4piijPPrr/w01uOhlAbTGNZ5+/Ti7S9+VefTd69ePv3tz9JHH/rZD+43mTerdLZ59pPflP2sXRoebe5u3677eDhNfe7QPa1zKLDanr86vV2dbdIq5vUwat2f9k2Nhnz/6qDQ8iovpLXj7kQQYuRpOg65I2hlPI3jfHZxtr8/yCzaacyRA6vCQ5sPPjADH2Rp8AcpdtHI373/2+0AVtbrlZSKyENen6YpMYWI7g0j51WHBLXMylZbEbVsLqC4NE+kzDF50NImB0UkcUF2dQEmmYuiNivcbTF14lYrUEj9euUBmhtk28k+b/ub8QZSMFMHZKawiSetjPHu9u5J36N67BMzhcjDtjOwNy9foHMX4ma7qWV676OP3rzdvf3uph5PTx+fd5mevX9lJCGnOpd+uyrzGBAj8VKWFyODe22VmZkBHpKikDMNXUhBynz8xc9/b5ufvP/D97988+0mrBA1MprIODZIXEUXc3DPXY58HMdyLKQxdZECxD78+c9fJshDl4qQkRP4mjnN7Spvf/X5/i/sBKnD2uqxkPcM1iap4Ks8lH15/Cg9ejy8Puw+//Mv/vzz5ym8n8jFbH2eQL2cSg58OkoIeZwWQwCqSmYmtwce5EIXQjTTwEyw7A9A3X0JEZkyQQjx4RvxB6KiiAQgJGAKpzZlIw5ZVAlCn2KaBGrLOYqYu6lUNGPm6VBIHfNxlgmBQ5fd4PIibzZ0fzj1kRq4e/u//z/+0+1FWG/n62/1y19WfwLsMJ3Km2+6/8+3r6YTf3c4zXu4Hdv47T6cfHd97zEahQNufvkNvnnzZjqldpzaPf3hf/b2g/h3nn+pI8y5j1qqwfjk/ffG42lNayGZAQGUApm12hoT5JgZIFrjitsus7vOqAYYmFzP+hQD1v3NOkWkdHeaA2LMvDobRMyBzq7Ob+4PGPg4l46AMiuK+X48CCki2vF4LzY9ef/8ZnfUEFfvD1Cnedr3/bAatl5Dm8dpOg5dL202TH2f0bvxMB6OBzCb9qOKYYbc56nNKedh1c37MldpqjGyNTXMs9TdOA1DVG2RQjcErW3oOqLV+fZpxlCbS7PYxc3qcteuOZzqfIQ2pRQb+9VqvR7i9cEtry8fnZ1vVp/f/lnoIhgy/ZrYhUikTZgjuCmoATARANjSmOC4MAWYg6kCvtsS+8PNR00foC8O7mDmyzIt5uCqKUYV64YMAdyolWoKSweOg7emTNHFkAKxEeLVxaNpPoz7OUeMfYwp73a7t89fnj/68Or82f54baC5z65erGr17dnluJ+byf5wa9/zez/4YH3+uJ523uYuBTlzARUVMyNG4igqjiZWQ8iqy/rOkFkMwWPmvtkBAZiwtYLA7k4UldqyDxcpDs4xgQKY5TyYgqCM8ySqjMEAnCogMSMbLuVlAVPu0+6wxzRQTAKWAyMoqEEgWzIrbliNlNXErJmVlNYGjujkVKWEnEOIohYjj3UOKSAlsCJSyEBbEyjE4MQhRgMwpoY1p76KAdA8Gzmb6OQ6ZC5l6mLW1jpOczlxF1srWpuZDXF9e3h7cX4xT/sYsyogQkrd4VA252emMwMzMAQ4tba0vRB4Sn1TJQBOXasNHAIHNwW3lFJp1ZEhOBLKEtoHjoFbq8uVJM1NbegGsYIATNxac2/MVKQBgCmed8P1y6+R6/F27wDj3F5d6/19ubgYHr+3Pn5/d7w77aty35+d961UCLDu4/XtXYgcFg+/8dmwPezvQ8QU068hjve3OxUvtU22T8PG9az2g7SJp9lbdamQc/ACoECREBFzq9qHBBTA3UADDYiMCqahSY0moJB4eD7uv/ujf0WH/d99/9MfPft0u+oP4xit3h6eP/v00dWzp7+4n2U9DKvu9tv6/mcf3I2TvbybXx3k268unvydfW3mp76Pp7I3/PDo7Ga7qY4LLTfn1aZv2BiU+zjXWeqEQHevr0+lnm4PAam/eryrrhiefPbDavjNX/7xuOmO46HVcZT7Q3t5AG2hGco6BQJ1JAQybzFS0bYADERhrlN1o4CBgVMsreYY3KHN/mz9ATnsbm5C8qHvxuOotiMmdOsiI3HMsZZqpl3XGXpgLkVqNWkaHAMip5ATNxFkZGeVB2hLH/IQO3PoM/exO06zaK2gzbFqy4gyT4TubA9JJ8SuH5prazCLdjkYGri7NnMOCYsBiM/S+sRkpm5a0cEMiAKkRC4CVcRgP3IXvYtJi6mRgEdkN3QBrhQBmaiJpu0aGzSpLohTyebj3CCCEFjKAjxWpxhuDrXyeHF50DRbKy9fvfnu29dvX73dXd/r/pgxJERgQGBMA3JSJ6JQjQxDxwmBBUZ1TUgpQZMZiJCiOhOCibtjCqnYHDgQh9N8oGXFV8eYhxiCI5k0BMg5zKMQMpiWhqHvY4rvDsmAxIFZiSgk5QroLmq1UFAvFQDNgXzJoiIyYcgemwKQgTgho5uaqbuBOwdHQLFKxMi01GcCgC17CVgsMeAAThZCRArvcji+oIUBDIgRw7s0Dro2cic1U0VGF0AO3G1Ct0JACtytesGAzA6H1WpwtfN+o9FE/E0bG4VGUclyYCklUQYX1BmUDGMx4zggmiinmLSBtQreQqTcneU6t8Pr8/X76KxKIa8OINWLAACjWXO1PtHcaBU3q6gXXZO7XZQ0lxnUHayVRugxkiyCmPtC0iSnpg3AHZTACMzVA6IiAmF1L00mJyeO7oRgRAgI7vyu5Xh5/j4QxAGYcHB7v+9K0eKoIplR1RYELjHWppEdyKTNQAbmbm4YHEmYulXXK/s8abuBcha6Zw1IVAIiI7nhQLnCptru6C2FBmpmsae8cKrMVRd/catoRv5AAyRogaIA8jvzMiKoq4MhOPrD3nMxXwADmyUAAiRncFT0BhIQgsNDmgbR1YNTj9yYDB1dF+QTOLiATXL74vkh3BqglMndQBW1IYiKmLZWZxFx0V9jPP6KROwPNCNxWcqlFzIsU0BAdSMKbmpu7vqORwP+4Aqy5aSB+ABpAzOARW1yJAIzeKiqNkQyN4Tlv4iES0jQXM0aApkLYkR3BgdGJHz29OrmNO6bNFNy9yYd4rBaYSun43E8TFGBc0I3kbbqu5hjMZlEq0MfMJgkDs93x68Px/eurlgrCjAHNlxTyK7izhzJUZqJGjK4g6iGFIOH29q2EVQrYSeCgVjVuq5Xa6CWMqfZc4zcWhj6t4alX+VhQI7n2w3v4pu3d+cX20ePLw/3JwebT/Vf/fkf3+0OuLTNLVMpPMQlH9Sid1D8ZYdpHF/cHz64iK3W7dVwMJNxuteyvbhgtLntc8LTWNYDE0SkQBycysPLC54DllrPur6oNdUUIjo2h8mtSYtIjKgMYv6mFDkPeQhi/PMD/HKiNxIEzVwYSIH/8OvX2/PNv/s7P0YZyzR1ZyuptUxjHJKhqzuY4/JphSX0+IBepgfaKJotd0RjIiT2xd5BD2kcB1d3WgZmRAc014Uc6oBErKYItAiORLS8ZkRkogT8oD8Sgi0g48XqRKqyGE3M3VQBDB6Swv99VlHK0RSbGjM7R28QuHd2JwpGtU0BQ20tEm7OLpmjuzMCgjNic0khllZcNJDMp/vD/esXL7548+Lz0kYgzMOj9eV5yqvQb7q4QjR3DTodr3+1yX/fMJ32+yHAd9ev19v+++9uFC2uQxSDUbdXF/vb3fnw9OaLm6fbqy+fv/i3//f/87s337338XuqutQILiUM28365ud/JJazR/TEs9qrb+diFz3v1FcJWVWlXjy9uN83nU0R3LG6QgirVfjlH/ybT3727/7Jn37+O/+T/9X1n39lR3n83tN08aFg/+ii//1/8v/+W08+/eLVv/nmv/59b8e7775/9pt/czX0f/Jf/se/8xs/e3v50WrzseNWpru7X32eq+ueYbex2baXj4fVR9vhKemK230XqB1254/Pv/rll07gHi/OL90aelkln++uVZQGRo9DWGOf2jxvz852N7fLBXaxOX9z/VZdV31/tz+BLs9dWK/Pqny3ZF7NHJbeswdv5YNNcYm/Ppzm8J0ga8qMtZVVf27uQ99NpcSUTtNx268dg5Tab1alVhVliqdp3JxTAGbuHMKqj6pTYIgUUdWqpdwd9rfEXJu3WegMhtXG1Pt1gmaE2K06Qzkcdx2fZTD2/uqyu76+G7rV1BaDRFCFbT+cjvuz8wszlSZtdioajHe3ozS76LrDbq+M37x8ud5c9mtsX7xVkdgPm83V/n63PTt7/4Nnf/B7L54+fdZOs/ZDa6IayaiPkRfCojmAzqejlPFBpEatp7uMFBJtL4bD/W622Mfw/Os78Pls9Uxb3E8zh3i2HUi5yqnW4zjLsB5CH+vddDrMmhIZumcmsGRxwHkPkdNxd/Iu9mfde0/f+/q7t+aIAXOgVc9392ohkykxPnkyQNszdTHhl693v/Nv/cY//72djUUAWijrrd6Nc/Q+IkZMs0w5ks+1i1HmOTAxB3RHcxFhR3cDRAefWxm6qOCiTYECkbqKaiBkCgqs6M09MocYSx2JGSlUsyqCROYyyX4IPAH31B2rdBz6GLrE87EOGaf7OfX8d/7OR48/fnL+6fu/99/8vt3e7ncnz+XHn7z37fX97tvD9mLQVv7lf/EF6XYdLwiGwCLSYn7y1Rc3cUi1SN+vP3j/7O/9b3/nn/xf/qvz1fby6uwvv79t0E+HsxefX//k07O/+XfP98VvX9jq4lGxb/MKckejIgK4QIfMaf3ieJ2UrBZCXMpwLs43Q3R0yts1gL+6PT17+v5uGsnrhx9e4mzTfnKUb375/frsar8/otv5sx+gz2dnV98//37lYTqOTeu82zFAdWtVVsPFRx8/+df/7A+1ZoP5/PwiZqs2O1iM1EBJqRwlIL99/SbF4XY/pm697Tf3u+fOkPqOVnx/2L2600+eXYAdYpe6jkUKgKcUJfez1ZhIESOnu93pgw/PAsdyuDdp3jR2vaoQYd7029UVAp1ub7UxeJcp7o+7ahbCMAQ6tttTPazPN0MMN3ffrM9WUqmafP79L5upO3WKtUwPm+Sug3exseUrMLsrOBIRLBXFS6TVHN5Nt8skhIiMZKYIbGYPlQ8IAOpKiej65vbR+Zk7dV0KGaVCzHH39sQheK9iak0DMa86QqMQyjSlFLfpnNVN2nyaOMaYI5ge72+uHj86u3x0ffO6SY0pnJ9f7g/3zPny8ft3d69i8tP1q7fewoc/3JxtJ7cy7lf9pnlCILMamREQkFSUMC7AgAWzaSAqC0uS3JU8OQAvE6uaJQEERUlEFAgauzpjkDbmnI2get2EXOfqTIzI1NkC7QRiDil2bbojMsNqYmM1RxVvIAUcU+xUJwcLQ6pVREVE1SAQgxqBOIg5iRhycjb1GiF0kQFU6pEpG4CDMaIq5JBFPTCP9cSQU0pqqt5QWIEiB4c5saMZUhan2ipUj+TSjsiBECNFM+j7c44balUNxcgAmnhrcxdia9Im12oaGBAJGU2QzIhEFLwFRzM3E2YCcFHRim62cB3nNm3CqpTCCPKAZnQmWuwEIq2BB8RxPKw3l0COgAIaAySIWca3hxOif/XNF/3qHGBarzfj6fjdm9tTnadiCCxqqc8ceD5Z8ZoSYgxI3vVprrUb+tlKTBD72O7bOD6EDmpVbUJobR4reDGfjmNg66zIOBqAqJhPOXDOCTgqhEjJ3GlR44EDk7Wl2QTRNTMAOEe3Hq3CNOF/8l/9ix8//uU//Pf+3odPf5A437287R+dt9MhzYf770e4uW+n+fr5dzLvgtRB8YTAbQrzscsDzBXHtkk/vOq3quNmvWpd5ymfDpOWkjnMh9GcCaOK9GdXuch0/+bTjz+1uQ5n24Tz6eV3bbJffP9nx9Or2zrLaXSVjpqLBQJTX2p+pXokImCRxpEyDwfZA0ub786GvoyVOAAaY6gYxiKEjJgO5Q6lxiEAhuZ+Qnu7v019ZG+uphWBFowtNZWYmBEitbP1hpn0VPuuS4khWClQRV3JPZTaiDpGmlXdbDy0xtNyNvZRU8piYogxEKEv5YaIzmRgM4c0R0bm23neJHUIU2uGoKAx9qY6RO5yPJzG1pxCIuamNkvZggdKrskEDxVPhJte3UBJT20esI+IXk9EHjSiJaMwRu2sRQZkOk0j1hbAALkC3gusA/ek4+5+Kr4L6fnLt3h2dry5/v7b5y+fX1+/3nvR3iMaClhOKcaEOQtHCAEwkQRyZ68NBJfQDYEDG3OOMaZgrhFZ3UGlSNMFY6GVkZkDYzJzBqraHCxEoVpCiGmTkVKLQOtOAaQ9+KxVqqg7YiAGJMRk6M6WuugmxAnMpMyq1bQhB0fmEBEzgQfAoC5mSOSgSGwmqAYAS+UqIQOyuYGpuy3DqYkua20wA3MgR8RfOy/AF3cSoDvhMsguf8zRiZCXqgKgyGmAZQdGjOjgqgS2opl9xd0qh3F3+uH2SSt7VCH2UncBAyaQRbvywmFA5NSlCRxrc3SPwcAhJBRt4OxqgJg2p1ZQPAEyZHINwLM2IzdAMSNWRF+lXE4v2WadStsLiDrHNGQKCEpNTN1CiIQuUh++K3MxRcKUopmzaSY28wruZkyhqLWl7xYe+pAWIAktySV3BFA3QnIAV+sSdzGcGjRgZgjoTsgLYpQ55xjI9cG+4RQY3ETdzEVMPSIlQtUyQjxxro1WiEtfF5F5r4yeOPTgEV0CGBknSoEIkZZ/IbtHVRYgzETJvAGyujguotaC2TUnMkAA4uXqXuKMzEAW3NkgEoErAokbAJIBGhCQAosjmwZgEHXgnqmBKTqAuiMBYrPT2+curmboqibuoPPkrZqZuTooEhGg/PXeejNAJiJwF9NFFkUEWKrrl38GABiYu7sys1l7GJMczJQXi5wpAhiAWmViBFYVesh7+Ds6yAN6xtwR6KEua0mlqRESAjryouXBA5aKP3z63hdff2WTgpfNkNYRWFUdbneHUmQ53Qc0Ynt6ttams7R9U3e/6CLX+cl29Wo/fb0/EcVtCmmeSoOZGVsT9fNNxgizeM6rpnWGMBkgkouQgquPxrGLpDarDTnOVkfTgAqm6NZMFyaUinZpuJmcztbD+mw8VCc8TicAaMtXmThRbftXb66Z2R+wystx1hdg8/KW/BrbvoDs1WA/2fX1NESbTsePPn3/zz5/ScqtSu6Ig+IkuetUqzXtU5jKLE4co4nEEDKTVhWHWaTrByitmcxOGkKgQIpFHRADozO9bO7Kb0f9ZqLr4hTQ2jtCNkKh9E//6FeJ4R/83c/U6v1+lzm+ef32kx9/CPJO5DLzB3C/uhMstCGkd2AgBEemgEjLVQRqgE4cEcHNzcQd3A2dgMDBCXkxX6rpEkZbwEaIaAYLPGGZxBeTaaDFO2/gjUN8sNwhLjSoRT5WNQP/HwbQTA0pZM5qYsCKgG6MSiE4OHlorcU+D5t13qxjWHGIDlhba02QcTycAEXLOJ5ubt589/rVF7vTjkO4ePRpyJeczro4IJADcghEPrdJzHbjGNfr3f7myScfv/qLX/70p79xevX8jOi5hyZKmYUdt2fdRz+qv/z+Zvoq3Gn+8Adv75+vN1nMXd0dzUzn8XC8BQRt3sXw7JMPvjilsDnrUnpz9/r+xWvsL+eZXNPu5X136RyH2McytSY2z36Y57NukObfffXLD3/8yUEOl+8PxzKaAndrETzdvPn4s4/i02fDT967G99+/NnHbR8jXK08yfH47L3P3rziP/v5Vz/63fe6MOUYj+af/eC9+XBz8+blDwJVtQpa2gm1YZu0leOdgsd+A203no73u5cvD7v99jJ//8Wrx88ex5jHWs7PN3fjkRkctMzTZtiMt/NcYeg2Pk9IsF11N29vU5+7Ie33h4BsYkgQiAXN/dd9fMt954FP9FciOQAwnaY5hDiPNXEDDodxTwxNpJZax9N2eBQRpJbNpjerIYZ+nc21H3r2cDoefT2oVgq4iFPIAVkxEgZq02xgyBhTPu1O/ZBu7t9w4rlN86QcchObtXbr/nA8InI/bMbTXGuLfZ7udwhtLpkyUyAvgMRgi7/TQ8B5msU89/08gyf75s+/3t/tz58+PU2Utuu4CvV0/PLbb3aHXak6DDlkc9K0Oqcuq7s06TMtVUat1Hd1H7BepUzpfn8/XGzOt30b92r10dNVOZ5+/JOP/uhfHQ6jY4jowEiBMSBEQINYNLpA10WyRgOTkVHKnV/fXQfqnMJcJHWZGMrcXr98jc1qqV3Hf+Onj1+/vBOft+fdJvWHnb29L0OAr5/PEjGe93/69XXuuFTpclittNsqXo9gIa/yYTxIDJoV58aGIQYXUVMEE22EmKmr2h7QhgjLeiMggntgLq1EzibqLqKNOXBYUIwK6JMcgcyAVIhQI3MMeW6Fg1SrY5M4hObSd4mO5mrK3Ke0e3Nze7x7TPL1q9unsSs6tZC/eL0f5+YKNIEIfHDxt96+etVHf/Xq25T4N3/nSYDy+PFwmlph/uQ3P57wy3/+T//5pz/56O6r07FMl++tdvtyOu6evRee/SiW8e3Fo/defn/87s1LHrjKPB8g5xVgt5sbiMzji+KCEPucWpkJaKzj0D/WWlNP56uNNru+n+73e2D1OtrUXd8cnr+8fu/qcb89M/Oh75sDp3Cc9bvbFymxuU/jvFmthxhzwGp8ebE97urP//zr6YR5WD99fIVmb16/+uDDp1pa1w2n18chhYvz9c3d3UX3dKq+GjaTyq6M6/UmOjWRsU6S4PLDD/f1NElzRe+4lgoErc4GhN0wy+yIMSRHyV3Qqk7UdymtuIluuj738ezsrEtxPM6hR6CQIXZ9nmpRETc71hq73iz066en+frsahsoa0374/1EjXJ6cvXJ/vobfBdTDSmZuenioHZfpkF7t91Z4KRLtvuhTnHZetGiS9oCJzV7ADGaASIszBAK3ny3OxIwBdIHYCOpk4sRIYGfTifi7mxYWznGHBOhq4ta6AfS1lQcnROB0TSXV2+/f/LeR5ePnt3fvml1jrlLIU1T45CG1dn9/eu8CqfD/avn39qz91fdUObS6oRZkTQEFjNANYNIqeqERsvhQ6TSwpFEIGQErq0YtBi6BzqKmrkHQ0cxE4cqUp0iMZb5qOhItaoYmro4RGlTzn1pB+IkdSaAwAkR+66fa3HXB1M5kKOJKFE0tRCTEBkhIhMxU5ilALRIROApBHNQ83V3NtaZcIGkWjfk4zSGyEQRzWtrbg6kOUbXqrU0aF0fqjhgCJQBZiAs0kSVKHBItVofgqkQOAc4TZPiKYTc7ASEAYMrpxy0NRNMQ65lNLR+6K4P48XmTFtljoBeFicRkpmL+6ZfH6dD5BBCEHdzHHK/Gw+LlQnREbDMc8pdn/vSCnBMIcx1wkDqwCGJGTKoNGCKHHa7+z/84997/9n7q6E7u7jYdOdGjCjHUnIXd4fZHbvAZ9uOUQ/347OLs6nq4XRwd09xFFqfP97t76W1J48eH+bdSC61vlsbgIBPh6PW6qViaf165XJCazCO5hFyFxM5AahVaxYIKBKF5sKcHFgdFYwJzIu7RBzccBobdh5CzFeb7Wfvv/XwH/3Rv/lg+MX/+Mc/fZY2K7lPTZ4lqfN4kmNI9Xj/cjsMbCLY4nn37TffB9b59HbzZP14WI3f/snt2y/K/sQpdmdXvL5Yc6KuCzmswkoJaynzUc67LOPsx4lPn/s4vXjzBnQux92rMt7t907N0NVcm6+YiEjBSFmrhEgRCJ0jB6GCIE1an4MEMOTqmrpsLqY21wljFwhD4A5B2pEJu/Xq/nCSWhq6RZqadTEEtlKBEDgxAJFBzrnPgfuhSGP3vBqqyu4wYSRQL01bkxjdzaTW5ayAFBGp73pUFWncByGkhiEsw41FZjBYbQc1dQrrzWa631sTIFGEwEFlXg/ru/vxftrlaBFNAB5KtUlVVAFT7lRqyp3UpoIAiJFVScCKSWAyahDQsASjhLFNp4miIAwWuQvToR5Hn4uZGjQ3glZKO82nqcqkh93UDtNLxPvXd+Pt/ddfPn97d3c/zTHFhEDgOcfcd+KsCIIgZlgLiyYiRDHy5tFCUmRG6GM2ndA1gLE2FZ3LDMCQBlEFLTG5ewVw7jvocOlkZIDtdlVOBZBqNQoBCIBQ330K5nlaTmptgQYvttIYIwcwS6kzV7VmBhYCLSUXISESIxCiqZqqgzU1RxeRpRWeEHPuzAQhui+AGEcEU1m26OaKyM1aWNrMiBARl/5od1N9V2br5v6uRVwXgYSQmiGGvLiODBEJQaWKABKwWwhFYZO20aYrXt/bcUZSCoweiKsJU8AQHaKReiaNzAEHca1TnwcIWV2a+UnBmq1hVdU6p9rcZaaBXJRc0DlygmhGlaKXU3Xl0wRYsI8Bg5nRJOJdAns4/ZqrmbmKIgAwRAYgU0EERBR1AHNRQojMiTkYMBHBAgogd2NaXgx80BCIaEnqmXNkYNiXWs0DIdhC56OOODGhNfJg0pBDCNGxuTszLXtHAgALnPqYSZpIOYR4HfPQCAzMOIApQQS1wgkgRyRyZQ+MwaEhBgd3UHRhK+hARu6OHBfXiMESQAdTQ9foAAa8mC4cF0NOA3cifJgrEA2NwBiMl/hXAwOjyEstmls0QqOlstXMgd0QRGudT/P9KymVyIm8NKnVHl46WzpPycwCEPK70/FDMtLNAJGYlnx0MzfEJeJs4I6IouXdAQsAyU0fHF5Lt4UvnmtBREZeIkgP0g8sHbL663aqh+IpRAD0B4yXPUQv3yFEEAEATVwVh/7svadP5usizd5/vGbHu8NxnFuIARLWOscc+kgMgB7UYVKZW7vs0sprJh9r/bO3d3vnR0MOogiYQmBGMAxEIYYyNadYahUzZwaKZt6nRClS5Vm0mgeHnoMQBg5WXcS2nFrTVexv59KaJMDbqcTVee43fV6RJ2nzxeVVu74Bt/3NnZYKqn/8y7+EQKb+12bOxW4IAA8YnQd30TuVbWnsaa0Nm1WdxsP+7tHlejusX71+q+TPHp0fp7LOUExNCqMThiKu6Ia0QPuBsIqAYzXtYxgPE+VBHcUxI6hIJEwh7Kv/ixsbBeqSiEWjJS+IZLbokdwg/ZM/+uLyvct/+2efktW5VIixiTRRohAZ1ZaSMiNkJEIDfbBMAhASsesyGzsiI5K7uauZLv2PxIv3mR6URUREhmX89gdBTZdk60K0X6xJbshIgGoOauZIxKYmUpGImX3BaTG5iyNwWCxy/31XUQisBkjEEJyAE6JWrbqEOcUQQ1ptL1K/jt0ZmYMTqJkoQgMTafsyn27fvHz74sub+5vmevnkh/3qvdxvibtAMSKq1qUeR6QGonh+VjrYnT7vYYBSpze3z/7dpwXYuaWUTjcHLba52o7mNL59c/P1/+Y/+Lf/7D/7fWNa5YyMYIaBdZ45JfS2vXps1rquy5m6YfC0nVhPN2/heHr+x39y/tlv3kyHv3H146t1+Nf/7L99/KOfXj59hK0xYj2O3eWj1MerR18e97tP/8bV7/3Z5/mzrUi5+/rlo4+qmr35+ufP3v8pu1xeXtx+9cLkaGufoNn+O50Px/0bhHpzffp3Pvh095/9x+fvDTmJ2R3G+w8+++j29f3760+73KELhTCXMSTsQjThbjP0XcwBX3zznUuVueewLlM19KFbr9abIi5Yx+k01WkYzus8pXN0wu12FTksD4mlq57AV6uV0x0gNF9Ss/AOpr/MvQ+kInywuz38cpPWat1exFJ1vRmmUkPqtJ5Wm346CTGGHKSKtXa4PwIkUEghmIrUljsMARtQXq0Ru+XTWo77ABA4J9ZSq5fmKYMANjq+2cehm/Z1mvzp42enWZ+994gII1EOiTC4QquVM9U2Zxq8SZ0bh7janAF1KSRCIZeU6Pl3r7fn54bR0FOO7B3R5unVx9Pum+vne8Nps1rVWS7XjwhTParm4jvrryKapRjQbdzv5uloqlZnedf3Ucd5NmnW1Vv0WcH6u/3cn8WO+8NO1MgEpCoHnmrJIaBDcFIEVfPGx2Ndbbunjz/49rvvUBxjGobhdJiYgrlgjJOW6VjOhq7bdMXtdrSff3OK5rXqOhXVsr64fLtv7On+1OIKGSIlOHuP9nKMKVo7cOE1z/0qbdcXclMq18cf9uVtPHzZogUgdjBxSSEuDtgA7BxAhQACUZHCgASOYMSxqK76zhyaNDDJgUQUwYkDEjFAjhERGWMEbK2mBFwLhF4qdylKm0GxzrA/2V31n/zg/eubl5vUr3r70WeffPlHL55snkx3d/fHO0Bij8dj2Z7lz7+506mSoCI1Cvtj+c2fPf3zz7/AlubGv/j2yx/9Tdq0829+Mc97baEpE4hBIl7z18+/+Z/9r3/y9O/97ne3/91Xf/B2vVl5rRSYOag5Is7W1GtEQoNM62YWcNisV1WBXKvQVKUPEaAzTZTs4vHmq69fSoUhbIHCaPeI0YVjTGy6AggxpH6Q0ogI3QX1MO26mLVR3+cyN9dUm3/3/PbJ+eZyfTEemgMdTuUHHz2q87SbD+99cHXz6noYztzMRKtgSnkuM1WtrWCiw/2JdOpWfZNGjMwcc1Spc2veDWfp7LC7Lc1Cx6B+fXtNyIiEzoHyxfaSeihNwE4x5X4b71/drrqsuPTWyMXF9vrupu9XQ8jXL19Gxm1Ot3f3Zr5vhVepp25/eOOm9V330+Zsa0uzCSEruRDAkjt7mJuW28ki3C7gfNPFvfjOK0y/3ozBMgOZCbgAxLOzs8P+EDhECpFJtTBFB+QYkFm1SmuBulrKEEOrgpEQQcVVgNxSF4nNTQCCK6rLzduXV0/eu7j64NWL77zJsB5iasdxPru6ajpztOl+3r+9AcAf/vBHeeiqV6YA4AAGLmiIDm6MHsm75ZmvJhzjklIhEDdjIHBeqisYU+RVmcfA8XTah5jnaQ6hd1UzoRCq1BjXLg2xMwCGKFbAkSinMEzKXewLStMSAjOiAuSwRleECGR1LjGupFJQgFp6otmtiXlwA42cAQCI1ZtTFG/U5qVemQOBk5rlmFRBwMU1hJVIISQDbFqJM3oymB1VbK7FUB2cGhiQEZB6Aeai2kwChFZnhdKAA5GrJA4iYtpC2piSGSJQMx26/P2rFymuXRXADX2ax9WwlaqGbi4CcqyjgroaEokaGs61xHeDUGsNY4whVVFkdbMmJSC7Wpc7twbg6qJFI/NU5tRFQwAcXlwfq77ZblbXr79PMa26/KNPHk2lzEc9266n0/Hp+RqYADtQN9FxmgmoqetJMu1DoNPxtCP2YBS5HxK8epg9G7giGGhTtWORepsITvMEig5RpmpDjysistwHVWEHZBTQwL2nnooRmJuyAyJXwKINia3OOuMwrLtH67yOymcvvr/+/33+J4+NzlP/6aPLZ48vEPP5drMO3S1M/Xq7+/JQp3nVp7RdTUWG9XJIO5WdyJv75JkZrz//msK3CkYxcUoM5AxE2I7jdW1Q5rK/x2AEZHf7YbsaZ50LYO5IlNEMbEhdsnY6nbarjUE1nHPMgekwHokhhbhK/WE8EGNhOJTmSDlEbZ45RQYnBKYuMlkD4Cp+M56aVAZ3BLeWU+4iMURAC5ExgIGRMSHMtQVAFROwmaSqGwSytB/3ahAIrZlJY4QYGICI0UHdjawRKQcUsZQIA8+m6gQxpCFPjE6pqslcRWYGXw8RSatj6Dd381jdBUEUI/E4T4nYyBAM2JsaNAwxiXjggOboiGbSvBEIuBIoaiCMBMzuXgnSbKhK9WR2qvNu9PrukGeeEDtinGtTKtVqGdHkcJuO3Wo8lLu5HWcDDtVNEDZ5qRmGwKmYmZuYBlD3VqQRQYpbpMBIbACq7siobS7oNml1IEVuTlqKguZgzWofI/ZoPYXoGETNVcwipz6JOyci4mowz9Xm0/Is2N/fxxDBfCGxikpIKXadcAhMS/s3h8QmdZ4iIaeY0sAhAfECsnYwM21Saykuo6mauYEj1hiCKwIyPJRpPsRvANBVPaC/s3OAAxG7ybuV+yJzISK6yTu2AgLy4uJTphCzPWAYDGCRF4gxRBIG6jLvDzsjZSBph6FfE0fkDsUNi7MbkCDtyUPXI4bg2KQETQgMQuzcoc2GFLtoIVjaxNXu5o5SBvbIJEoETApqFJiaHUKgRvnm7u0jIa1GZq4SmcCQAgOwmqgpuQVOps3RkUFViVBbc5ecswPGJoioIYB7RFu8AASAhExhOUmqOdEDOAcdDdDJgbEQuuoFh423wBaBjQhdTWUdOZItECtwdYDi7oDFTFzZJYBTYPIc0hp0TlqaFI1Z0NWVQQ3IKFQcDLeRDz3PrMCMARm80dLJ643RwBQgIvKiYoETLUrK4tsFQvMEHB1cFwkMDdyQAJAR6KEwDB1YyRu6oC/VGUBgpggKpoSV3JkjGC7KUtNGMVeopYGX2a21Vp3QKYooAS/nI1ciJghkf7VddyICRFWlRY6kpUqYFt3TliQPLVkNXiDtS+RtiZc98D8AEZZvFsR00fXcDCmYCtK70rMH5PtCICZYQlKLWRvp4S83XxATjkDAIaz2p3p3Kn3U1brbZLrbTe6OzEA4lZKCM9K6z7W2UymTmpt9cLaxVqJrfxb/9VdvDpWYaAAykZN7HJKphJQP5l9M8082m6mWBT4ZQ1CAk/tJdSWKzap5MVSD0owyGrmrkboRGLujBlKskvu8P7Wqvup6a/Nxf7g4W9VaYo4pURtFQc305dsjhOguS4LvXevZr5uZHt6Uv/YDMvBZVT3PdR7OV+wtOXRcOWiCbMe67aPXYwhsMUSiRa4TcxXJzEXausvBQwgqZg3M1LUZgRN4TDm5W1NyFff9rCrGDG5ACGIOgEwE5oTgJoFIPf4n/+yXj55+8Dfe7+8P+5wGAOAQ0VFtWbayIwOYq+ByIRG/azRbbBG0tAw/GPaBTBUfEmG6nALMxZcoI/LCHUNb7JnGiwTpzkgG4KbMqCoITsTuQAhI7GZE7qbuuLy5buSOywfa/+pFficVqZo6mruZmhqTIzqSiygxrdbrxCH2K069ttaqhJjnNoMJUG3tsL97cXv7/Ob6+Wl/HC4/enL10dBfMvZLoxuhAQiROzoghkAohOKby/dGm+FGznFI/Wr39Z+f62735s/6UOZy03Quh6O6fPT3/55x2r5/9fSTi1+9ebl7+fzpb3w8zY1zQkQ9ze3ueX95NZZqjl3fH4/HeHGmJoB1c7U+TeNHTx9P37/Zvb37V//kH794ee3x8ebDMfddYH/1F3+yeXR5+6Z1l+/vy2k+HjdpQwHTeV+9lfHu8ScfXNw+xTC8+u51fvLeWT0e7u6hNb9//sU/+0NXmAi3H34UusNhf7r9xRc//cFv11Jur6/Pj3ep719c3/72v/eEqI7HOwqd+hxXXalTmU/3x+uy25fIavTjz370xRfffbe/+a0nn4xjPb94NMnsZLVoWkXqMyQKXRSy0OciRdzKVG5u7i/5cSvTZo2tFnI3t4Vm/SBLL263X9sbH3gisNyiwGHcTyYaCUOkGHlux0Acsx1ONQac20lNU5eq1jK2lGIpZb/fI1pTvbi40gUKB6DmdRJnclVOcRwP4qDupWlEyKtQ63E4G8TK+tFqaMFblWn35AdPxnGmEJxwbKOjYWBzX603hsRd1uNorsSIIZ5O0/n5apqnJroaMoDtd7eOcXeg2mpebe6vb9rhLg3D7nD/kx//+NWX37x883z75Gr7+PHMkDbhOI0gLgKtVjb1NrHqtHvbnW0fPgwE1Of5vq7SkInfHEfqN5n7Ouq3n5+OOzAIXeauy0ctYy1MBIHKVADYzPrUt8m/+fo5ubv7XJUgrjt+dP7em93N7XH88WdX6xzee//qX/7LP2dlLHi4qxdncVjF7dV2v79LwWprHSfsiAY/TtPRILHHAG9vT7HL8xTRwth02t3mDB3WXGsF4Ri1BloeKqYcAlIwEUIANEcw86aLAYRMVVXAAVzBg6kSIjERIFF0qzmvRBubsQsjErEZ7sfTakUxAzDj7EAQInPklDgEDtpevnnec10ZPTX/7s3rtY7H20Mro7Fs132OwzzW01QL6b//D559/gff8S5XgZvX8++3V0MXoxNVp/n004+evXd1fvjLu7fzoZhu1n1HuF7Rxx9tX784ffvL758/P73+4n7TnRFQzFHNSxlVBepxSF0j7jg3tel4qqJd7z0TTIeU4Gyzfnu3b6pPHp+9eXPbp/zLr7/vOtJSY5e+u/n+Jx9/vNvpYayJ+e72Zca0WZ+X2nLOOcS74zXHbQBeb9eI0l/kaHTcH2PwEM4YA8Zwv9/1m7VHLrXc78ZxKjFGcQnZwQwMVLUbeiktRhRt6NjpTKpHFUAngibTZpMQIiGMpXUcNPfoFjpEwlUaoMhcbTjrc16nrj/Nh1XXpW6YStPDHCJPMvkMidPZujvc3zEwGM1Ty2nVWhvn4qiIkHM436ynm6MHSJFKe0fsiomQW1NMD5WJKkaBlr0ZPGhGDw7gEFiXRZ/DsvcFJ+ZgKvBQjwwP0z+7kXEK/Wpos9RSOokpBjN3dAEHZkQchiyqp+MU1zkwE0BVi7HrQtrdvO76vrWjqqIDB07cnabD65dfP3v2k2eP3n/1+sWkJSXOMVRpl1cXd9c3q+2WQtkf9t99+82jR5cxJEV1A6KozhyySzWvhKRQzVoMkTCAP3jJS6u+DOPLhsUNgMwVSMVmDsGAMGQTMVADCZBdGhGpjNoqhY4cAybTxsyijcO61FG1cuBaRm3mKAGplZHZkAHNmUk9UMzRVa01FSdoUoyAMGhrCGpSCQNzMFcwHXJfdSSOAC5aEbCqOjm4Rwh9jEepSN5R2J+OfQeCbmTFKyY6jceUA1NCCwAN0YBoqrLpMiOpcYo5hNDmKXcdBQY0A23cKta5FdV2fziJSiSV2kTUCQN3oLA0uaScHUBawYVaC0jEKaSpjOCKKGbedVmkuSuH2KQwYiRSkZiiqLg1I4xobhCQVrlHREBdxSSOHrsQu5PNMJuXCUzXm95LmU7jat3dnsapamJqUyHmPjBS2K6Gm929qaVhe3l2frE5e3nzYjxM1j3MRmZOXWaprczmLiImajG7h9YqgasrmqOK1WrjlJc8Zkr9auXANfTulAC1aqslBQrBU+gg4HGaI/SxhdCxgjHQ5nw91fnaXVcA9eZXf/A5e/7o8gfHfQnvX+1mem/7pFtDsaNfnvOQbq6v13lrctp8/FGa8Hh9WK03w4r7SODJlfow3L9+rVCG9ao4yDgCWb9duTeZ2+WTR/N4GPrALuOpTqLVNFFoAlIrRrgvhyaKDNWrKlFiBegoiDQAb7VFStsYBZniQIRQLWOoc4s9Bg2nuSBbUwMCEyHEPmRFBgMTA/fEAcDRnSLNY3FjVe1iIF6mbR9y3B3nw2FC9sBI6ITAITBCl6O7i1bE1hrOqhhYUarXuXnCTg0FqQIFwjaPmXvmJGoEqKCIBRBKOyYa+khjQXEQg4ihqc7NCJ0zonpmQgADGmsZ0kBoqIaRkSwg9MACICDoMUCQWomiaRPHB/CKYxtnkhbDYpjDLschUu4TFGTQbrN282oQI3sXMSUiCAQUmUPo+q4jSJxESQCJM2hNxGikoDH1CMBg7oLSyLzVGmwGmQChNFEH59AMqxoQYQdpTXlDacONFObiRUlRLc2MYI0ih5QdSY20tvvrm+VTIK2CWUwRAUyaaTW1OtXcryEOC9MQAzkIMYYQOUQOmWJHHABwEXvElYgDoEsrrbbaAB2ZiGlpa1ogq0wEFBbnBi5OIiIHpwfvgD/gaMgQeSkgf1hIuJtJWHAySycSR2OAhxM3Oi6WVmpqiOSA5rFaui9T6lf5tAVEY/AAtRzAhWMXUlA1TLEBmNZ16CNldiA3l1LmY+wSQUO2qijWu4Z8cYEBq96ieUBmJJXqYK1VQtsOW0HCHlfN929OLAHUhhBT4KKyPGfdfl0kysRUVUyUU2LMKk7MbNAFnrSROYr1BsFUOQYCB2BidQCARIjuBKjggGDuYqjuiixqTzu+ZFgx9UDNMREyQCR0A2RMHABUwQ1RxIjQmpg2AnAgzpENbDehTNSaYKAU3AFjMhFe6OJIYgDeIiEtuUIDc2BiNiIz5iUdhr+eDRDITB4a5dWCelarBqKGS6EGODgsFU4E7t4Alw5wM6JmwkjgHgCWte/yiHIpCIYYnYKhE4OpNpFpQmhWSjW3pXyeaYFSMJAH4uVC+7Uq8euNOzGZASA0EyJ88M29U3h0GZAQVRSJEMhhcYItprkHGWipjgV4l+1BMFsACo5IgO7mDgoPfbIAgEsBmvmDJOX4a9uRuxmAB4bDNO7nOgTc5m4zdGrp+m7/+PLJ3fGuiyG4xRRrqaVVjqELHDsY0Iq0oUt/8uX1y4mJ4ibgmi1zPCo1kyZ1NoTUjwAnhEltkxEcRTWnNBGqAZh2udu7hUBd5FiQjNw8cUCisZTINJa6BBrM3DHEnEXVTnPmOB3Km7uXlxeXjGQMnOjm7s4AUH8NTln04Qc7Kfz3v5b/J3Bwp8CIXEbp164FapEDzJuzdcJh9/bmbGXaGsSECJxCE2WkiBgXN9cCkYSHGCwApdS5QkcYmNYxTqUC0VjElu0oLZVhju88ZO6GiIEW1g8Yp2Px/9c//W/D7370v/yH/4vUn5tPS9XdwyGcFzz/Oxj6Q2bUkRbH2HJ4p8XU+WA0A8TF++bvbntuzMHMHQwRH0DUCL7oo/RrdQkQ0QGR2FWWi23BuQMulyE/vJoI5kpMgLTw5fAdvfEd1hqcaGFMGIGC2FxOABC6nENerbbL7RykuCO5mil6U5uPx/3t21dvXn97t3vebZ9cffxbw/mzlFcJiMyZ2EAAlucBqpubIjgzBcJ+e34Y9z/78d//43/0fztfDS/+6E/s+duGBxHcfPoZ0Emu36xE51e3w9XZeBhff/N6nta7V3fv/eZnAsiMLtrP06+++P0f/PR3m52Df0/hYi63H3z4o3p9a0eDPpT5REml3P7xv/xlp5WI4vkqna/b2KLK9Zd/9skH/9MT4OWHPzzV67dffzvI4/33xycXjzkN8+k+5A/x6vya+6cfvH+1Xb/046/+0z/gOLT6vNmx4PnIc0nehgpZH18+on59cfVIa5jenkSwUZp1Hvf3LuoBY7eqquiIEEDYZ7h5dYOAT947G487rq1Hfn77cnPxuNZJ5xoDITq6zGUccr9ZD0x4t7dhvZYXt9Ms43EkU19jnWsgrAaEZPjOufeuEw/elS4+qEWIgBCYTmNJw1DqHDse52PioPNJ2okpzeMpriBgNx7GsTXzuL7c6nQKcSAAbfs6HQmGRCi14eAUggFAgNU6n14eWvWz8zM10KbH07zdDJtt/cV3L3/04x9+/+3NKqfHT56sV5vr2xK7zGEG15y7Jh5CVLHwEFwOKfa7+/3lszBsV1Knzfn67nDsUl+brIaz8Xiqx1am46MPt/fT7fX186fvf5SwULt+/fLLzWp1qvMqhGrTdP/59uwDx06P2Mabvt8kOZLrs482TR6S+Z98vHl9c1hz8So3I1ZO2203zoccgamj7NacAGopAU1ct+tNc1lv1gDu5EGCioPUuTYMpLUxuQq8ltvaat+tbt9Or8vhT3/++SqcZ4wJZJWHWuZnzx6d6rS93K679CrZhjl1ur5K+0O7Ky1vL2oVyB1TmCdl5Ji8TmBlSmscbyo4xcgxspghEnFUUwREJwIQEUQC5iaSIyMAETFwYBQDp6TlEJnNHhrOzWiBS4KhmDbT1J+VsSIFNa1Vuk5XCc0cgbq8AhvbWCKTzOXis8dff3/jv7j97Ecfv/7294lRveQQ+rh6dvn4m+kbcaY2//QpfTWVs+EyBn91d3j1ZvrJ++erVf6Lv7zRw/m//Mc33TnFrhuGEJyGjB3jfCrffn09XF7U/uLFn96keRDVal5dCW0zbA/jCTiKAzCc9IjIT588uT7uNNTzzcV4rOjAEJvoNnRvdneBre2Peho9b8Nq5ern8QrnfNy/LQ0S5vVq1Uqb2tHFnONh3KfVtjSu4lorx/xoff7ly2+25+sf//DTf/Ovf7/fnOWu70TXZ1uI2KY2n+T80ROK8vSjD+rcQNu261XpcCjkPKz7BoYB0LTWViYR9hhxaruVDjEMjNgzWB21lVU/9H3Y3Z10hhT6dNVtn17Kqdxd3+aAsctOfW0jFVrn89nn8349jjd9fzbvsRUb1r0qrc4vrm+eVz26mVNWtdNuP8SwXmcifMd2h34YpIgDxEDSAB/ayX6doV+Mzyiq+JCNAgJGdwADVQBaahRMjQiXJP3igtXaAkeNqZS6VKSDCTrmyBQIwREgIhp402WH7yHFpk1dIPBw8UjrsevXh90NKGkTZI4xlVrur797/PQH7330wXfffceuqUtyVArdZnPRmlw+2fDN/vbmZrNZbbarqU6IoE1iCO6C5E0151XRkQjVBN1Moxsu3R3LNGlaUcjJGdNUdyEECEmkBQ4E4j7FwCoRHAIAWnXz6Mk9GCISGLiZmcxAJdJSBE1uyGHViiIvWyIi8hQCEWkBU4rcPQgrSFWamLkLOYhUprjAfYhT01NpRd1BqzHE3GsVNs+ZazlECsUaOlor1S0zk4Q2T9Qjdb0KuRE2FJ0pkMuyRPBN2qAVIoalrEggYBCxYiVzMmsEFjktnjK0QNQRhxSTtAkMRTGQqSoTm7Ym1QFiiK3OhAoAgGwmjGjGTRugmBmYxTg4GpqXZqZOZjFzUyNjUzNzTAxuOlps/G/91t/45uU3x8a397sEfnGxJgcALWOV2loTdzOF/djWXVanhJRiHynVNpsZB7rd7ZjC3ct9v+o350HKuwCaqBuYeIqpjSd0Z0QTdZPAhEiszqjz/jjfT7MjoaeUnHD7GLu8DQ5BSph3OFVsJXY9OGMy9vVl9wNUVJVaqlETrQl9c7G6fXu7O1T8wbNNxzGsTkgz640cPtmuvvnFN7dvDj/47GMpb8+36f6bL9LVTwcO08sX376+3Vw8e3tz15/FqSjHrVQgvUeqMZiZTGW0wGm1OZZ97nLqsozzkGF3fzjUCutgJ0BLKgUTqrprCwSQkUxzCgBQxdOqH+exQ2X3nLIBrDmcVLGNaDUQJ4LEnUCrWoywmSu4ag0cAqdaWyCLgVVN1EN0IipzgeqRqCEisgM/1DcDnEqdW4mcchcookoNzAhkaqIWiRCo785EYbSCKdsQTrj3BhGXLj1qDaiLQz/Upp5Z0WPMtbTMhBTBMhJrETJKbtpaAxOVQCSkwS0gMZI6mROFTsmQKwTv+iUvE1s1UsgeyIMwhrgyj+IlItg0SvNAmF1jphDjOMnQ5ZApJsRh4EB9SGmem0g+v4BVF71sLs/m/U1rU4eQYupjSshonCkThGqeLBCQQTDigD1ARVWrM8qJTCMAmhRp7lAVAUnNxURVA2FBT5dn8WzjXlYBnHScmtegDsLACTerjmJfjq0eT2U3y/hAbyzTBLlLKTKRM3NKS1O9qYIvsh6Bh5TPAIE5QMhETOgB37UTAaA5uROgm5qqu7lba42YVYDpYZf9rkocAWFZUC2LcXB/2FIshCMHAFA1QHWgJZWCQEtQWhHdESggZwWk5WH00KIF5MhgDABN96X6aki9Z06oJ3BXNs+EQoUqYNEQK0VCD0v1Ac8MTsFEISdMDBNNk4zOq9Cnu8Mh98PhOF7kzud7zgOyQmATMTOkUGvBJkPOl9swhoJIpi5VW1JAqrXlGN+VZrmZMkCgwDEAgpqmGKa5qXqOiRNLmwfmKw4rbUciAEAzBjTHB6eJKRIycZOWmAENIyf3LcETws7cHRs6Bk4pBAQxAQACXhrjPSAHNm2MjgSOSDE0JKdgnImz6MzoBNEBmNCX99hbIBJh9LMgLaAvjCJHCMjkzsYubcEdEqGjL8ksAnAiMEBEF48ckjZyepgJyM0cAdjRzYEeyrzZnWFpyEQ3YCc2ZwXW4M5A7AhuIAEN3BGaalAOSEhBtJmhABBzBAKHwASAbgauQFhqIXggUyxHYzdHBEa2RVx6SM4tB3+EpfTDEdD5wfSo7+qpAJAWZvkyPoEjY1hUHgAIxAtixh5AkIBAD58dBwdy0OV30gK9dqd3lVi+8JWInSEM+dmT89im3Wn6/np3dnV1GMc21UCYYm7SiCwHYoRVTvM4Bqmnuf7l28O3u9ZC/3hIME/oNPRpP8oQYi+yr2098G5q1yN2FmbRnahTGNRnoKvVxuaRglWzo1hAmJsM665VTP1qnMfH2+393W3us0ETxCJ2LCIIfdcVlX7oRD11ycVnnaQUJ/j8+Qtc/DB/pQgt+bN3qTOAh6DMX/MZOfhyN0pDEqBpLqlbT00d3UlmdVcSW0hoKOBq1gwlEDMHDiimhuAy1YY5zeaiNqk78STaJcOAoMvcYuCIy9v9sAWgB4Y8uC7exsRY6yfr/H/6P/7vfvzZ1ahW5YiuzGFRcNzAzNwNOaK5qhAHXKR1QFN3AEIyVaJ3xkhf4FROv84eGjIGB2AO+s4C5+BM/NBetNz5AJDIVMAcFz3dHJEJydzMDYwecm1ExEHEzBzRmcO7Ze+ix8Fy51VwWwzXWqdS9inBsMpnl482Z1fEGSiJwek0T6dxmk+tHMbp7m73/Yvnf/nFN3949OPm6Q+ffPj3Hl19tkrrBMwAgRFRAXSBdTm6mpiD2pJCtHKaqBkgo+dR/Pa0f333Mg4X3411/bPfvT17eo2a1v0v/vTnETZ58xT7fjffTdNOgJuTuXRo5c3XX/7xn23Of7w7lVO9JzbUMYz3Nh5P09hQrm9fH188pzffHF7/Ig/l7DJvnm0hRtMWM6rd912vRpZzqfXq8up4uPnZ3/7dF6/ur54+/e75r+o4v7nZPf3Rb9yN8PZW1pcf87CeygnatOovA/b7l7fj989/ut1+/f/9LzLiZ7/5k+NYu0189sHZ7v7N9nKQegDQfpW01el00iJlLJePLok9JQK31Sq9eP3akl+crV+8ukYPp9M8TW0Wq02OdweyFGO/2WzRTWpZ96FO8ziNZrA/zOM4m2iXM1GgEJrK4uPFvyZ4/nWFHN8R/UOgKuoYELiUihiAsFYxdVNJOQHHrt+20i42fdXxdNr3XUL0JjWFVEdypfX2HAHD0ImbmJrKcX9g0vOr7nD7ery/DYFC33HqlNIPPvis7zZdDh9+8p6RqSkDpIBuNUUOOZpDDIHcpZYUk4PNdeqHgVim03HIYX93X9Sub28RvdZxPF2DTU8fD1Xeci9PPn5moV4+ffTy9vXFs7OXr395PL0o483++VfPtqmfv1od/uIZff1+vt3I151+5fXLl1/8q1df/Ony2ry9vf2N3zr79CdbgAaMrerbF29QD//W3/vhNy/f2FLEoIbgm3WXI42nORKy8niQocepzZNWR/QQVmc9RVxi0qUUFT/cn3ZHc9qinTfL5r4e8jTX6nB9dzuOhzZPp/1EasdR07q7vduBtm3wgW2Vw6rLxG4gR5KRbdJqjq3lcUawLAWlIWF4ePogOdKyaCJw8Ic+ckMSAHcnxsihtja1CszqJmZNXd0BUEXcNKZMmJgjogeyHJiJc+4INIiboDkysYmbQiCfx+n6+v5Y4L7SH/z8F5Ch34RN12UK2uzm9joAzzut3P3i8+vHV++/eDMDB+9TdXx1X754vUtnQ5fX330+3t+nyfwHnz7ZXob+PF2f5Obop9odT+n3/stfHr63wMFQKQCCdymiNBJdxQgyq7X1tt9uO7XTJkBPPk2jSmPk/d1JT1JHqe203fbiJeZgpq01dCi1vr57DWA5U2tzyimnxORIOHRJpZ6tuzpPFPLFox+I+5vX99SyjPzVV89Xmw0wA1HgVE8tKOqx3jy/Dw3evni5GvJ2fT6dTvN+mg/jfJrHuTSF/Vy/efE9NO9zn3LMIQTkdTzzCofdgZBiwGFI/dA3gWk2hMShS30/rLfTOM11DgFDH67vb8CFqT/rn9baxrKf22FYx1evXrVS1sMq9Svv+tf395vV6nJ7Nu1281iR6AdPrxhVx4nN+vywNshdT8SttofOIV04ZIsfFg3A3MR12d+KNjBTrbbg9IiX34sIzIQIywW4VA8zM5CHzGndh6GbaxNVtaVNQ1oTB2gLIBDAgJqUKoWZQwoCgJEFsIr3q/OYuzysAAkD9cOmNfnu+Rcc6dn7H85VpnmmgFYdPQMkprS5Or969vT25u50rMydOcScmlS1pstD25UpcIhmjsSG6qAU2YFEpWkDImR2C0TZRJaloLloE3ByQ1F1N3MxVVUjTu6CqE2qOTSpASNzNvcQMqC7SArZAIHI0AwMmdVc0dUFqJqV2uZaKrjWWt0t577UhswOFELXRNWkSVlGKQNfGn3RkyqYVmuVgWqtZq4ujDzkjTmBA5gTuLei5RjYEMyAmXKMm6Zo4G4iy9Sg5moMFpZIiBsgaGtzOa1z10rLMakaAMZEAcBNqs6ISkQcogHMdVYXJDTzwMkAOaSxTXGpPyd0YkMEYo5ZXWutqmre1EWBHBNxQGJAD2y1FpXmInNtv/rmy93pfshh2/dPHm3d7W4/7acm5ucXw9l2RQCOtjnrOLEBFLG74/Hl3c2xzH2fcuTM5Gybi6GY1nnGd4KptMaIgaNzNMDapDav1ZsAeAiUCKDNcwILoMyw0AFMTYoQxYTktV6//vb6+uXxsJtPh1rKXEvVIm2SNuaY+5hXoRs4t7Hcv74jw8Pt/Kd//s3PX7z9Zrz+Du5uN9N8Odqj+UhF1M428Ju/cWby4m//O7+dn12N7qfTSSTsDzphxotL7fFk+329qV5O7bTf31y//WYqd5jpoPNRpAir4Hx/lFMZT9PUmpAHZnJtMtc6ORB3PXdDdVyvzyImE+sIZZ6qe+A+QPDm2Rml4Tx2Nn9yfh4IKrSjTXtr93U+ajF2DOAERl7BIOKok7gQQgicukyB+i5vV6sYOUfqIhGYNJnqgkC11RCHDgMZM3LAptVcFVXRqjZTUHFTXeVAqJNqRY6p02br3PUUTZBjFIYZxbJKZyUK99SwNq/9MDjzXKW0WqvEd6fiHKijBGruy3nWmjQn5y7yakirFQR2ZiXPiaNJRgjoolXc5lbMEM2jeRBZB9/2uBqCB+3Os/fQbcP507N4se0+fI+urvL5+XBxoSkTdYlzx2HTxfMhb/s4xEjEaqhAQERAuGysEdFLtBqtBqlZSm4nLDdY7qGdaplPsxynWmpD9ByJg0X2zIjgilBSkGHVqDcMALkpKzAEDn2HOYo1dZ0Ox3l/4HfnA1pefKnqFmJcEmLLXqpJnUWKqAEjJcLkGEzNpWo5yHTr9V7LbR2vp+PNfLw77m/n01jL3Forc5lLGedSrImpgZubmtUqgMQcOCwd0r6cwM3fdag9mIuWn3dVMQczN9WltdkBgEgADPThXAQA7rTkd1wRkUNUppmaDJ3EPFsDoCp68mJdkAzCiCE5YsfcgyWt7lLkVH2afNzrW4t19OMBd7f6uoap8dHW7ZaPdN5xopQzozM7hRBDXOVuSHmdu6vLs7xZvbq7dXN3r+oGaGYI0KfO1R5OxLRwqfHd9221FQQgc3M1xEAdOSbgK6BHEJJDIEqMATUH6AJ05H2iPkJPsI4hB4yRCfAx+c+6+FEOm8g5xJxTDIwIhJRSyinllIiYOYCTK5iB6FLGgAgYOUAImDrqsvgJ5ZSBwdmAqgm84xo5PBrrmcKAoIulKRIFNAA1UEAGJKRg5giIbvBA1CVFEHBV6d2zeFBCJwUXEKOayKMjOBtgA7cH+o+hA3kAi+yR1bJ6MoTqKBAVQAoBAqCZGYCjI9uSV3QDW9zQQGikTUqtY6vHOu9Ox3GcTqd3dTfuS7kfLaVmpu7VXJZ9z7uzFaIjUwQ3cHMwf0CTL+wYW2KC7oBEDrY8ENXNXNXVwGyJEi2oalz+qC7T1rvT2/I3CKA/lKy5IgEzAdjF2XDes7tOVafqXR6MyQJRzkgBkN193feXZ6sPHl1Eb+dDJKB95V/d2olSDnjRJyJMKanjKK2qEkJCPQ8WI00ceOhCIGRWJKPQfOFnYRdZERqxGxBDM63qszWNPpaJAisSMjN5YGQCN6UAjngotV9vUl6fXV5aUzboUjydij8Yb/4HX/hXx9i/bi/C5QOOzQERIsEqhiGHq6vNWC2ndPVoazatc0pMpgqAUiUgx5AmsQbYHBBApZlDSh0AuuPcWg7UMSHSKDoXEXFxdyRdYNsEQGAICqBISkFj0hgLKI7H//BvP/6//p//Dx88y6dpFHUmDBRN3UzVxEwBATkAIhIsVkID+6u73IPKiMslYUsnygKPoaVRxhzB3M3cAReM0cP16Q+hIXWFBedmioQP/ZK0kLBNrSE6Ey1QswVbow9yGC7U0XdyHMBfBdCkqbq2yhFj3+Ww3qy2kckRpBQRaXOptTFZiNTqNJfy5u2LNzffq+hq9cH26Ser/iJAl+mB1I6IHFhV3disORAABgrgZGpm1hS7y6vdOE5l3J/KR5+eU9cG6V8+f31/mKYXOx1vP/idv/2DH/7wv/qP/59/83/0H37/8y+m0ykFf/n5z382/wNK2Stgrb/3T//Jp7/1H2j/VMsvMoyo11eb9Pb5N0//7m+n1TqfP3nv09+p92X8+qtVbl98e3/26W+sHr13up06YGwNqtbdIZ+tZK6RtwDD/c2fHm5vnr73dHv1KF9t59v9ej7c/sl/u3n8Qzrf5OFT6Ae7u9XD/euXt59+8uE69Kfj/RMP/+K//jcXT86++uM/W4Wo+/3u+xcX55vreZzu7+r9wVXarNnRZjCFm8N+quP2rD/etruXd+HQra4uiMPq/OJwfb3N/f3uzt1SyG9udlKdmTZn58fdCckRfD4dATB3wRA40jRP4zgu65dAvFDWzd+psPjrNNqDTI2Iph4y7cei1ZiJwFIEd0OwEIMzA8+iU7cachdzwnZ/PO5Djp0FPtbjs7P3XRAQxI3RobqLitqw2vp48/LV990mP/+Ll9St3/v4cQzx/u1d9vD18+8vVp+RhtNxijEz9+PxbWKdjvXZs2f73fM2i0VBoy5tztbn1/AqYMcpt9YGiHXy+/ubPp9LBzHG4/70+PH7ATnGcH+9Xz8aZJyM/fZ4Xefj5dWT5PrJ4+2ji+76+u76+Xevnj9vFSBFU3P12mA4W5NSq/t3n4L0i7/Yv361J1g/uji7f3tAiHcn+W9+7+dnw6NZMG87KVrdnj364Dh+GRN99Ozy62/evPc4/d3f+eA//2d/ArbmnDrud/M4hGxqHJXRi2ggkQLVqEuBB9KjTUdTh8i8vVy/uXuZWnc8HQqQaRNKsR9gOsUEx+kWvC8F+i4OA0zq6349jYciwslWZ30rBjHoUTkQI4hCoGAGjuYISMsTZtGUUZrGGGYdz9MlEZ/msspMhMxBHZnY3TAwOhgYonHAuRQXY6acaG6TKpcZuU/c5W7TFzMxRaU2+e7N/r2PH1OG7bP3X7z9ZRvFA8lkVht6Q+TPPvn4V6fn//4//Pf+8X/0z86u8Dd+8vj6D+/YbXc/9jFXOV5+sMpdffTJDNfdm/0tP9oHCL2uQ8PieP/67nf+1t/BG//TP/++qKXAGNJmeyXT1CW/6C/vZDaglPoI0nXp+n5mIr9UMvTATmEsU5/yZtWJHgDbsOob4TTOIWG3iYxoambad/3r+/3l2RZDd7HaTPPY5by7vdeiAEyEp3rq0qYKXDy6qjJth/NXL14w0Xa9moo8eXL1zeffrM/Xx9F+87f/zts3N7/61VdP3n/68Q9+9PM//LOYIvV8fXN9drX+4Md/65f/3S/GIsTMFBCg77sqrcsx5EgZ7u92/dmGOB3H2dlDl/oh7Q+7Oh+6rldvpfmwOgva1j3P484NVv2Vqby9uV73F9cv78/Puzf7u/PLi7PYBxLP2l1eVM1n26vzi9imw8D97v6k+sAqWm029TSBGxGiAzMjuGojJKSH2pnlqb2YtB+M1Q8/5+Zmqg641KKZ+UNd7+JoFEEkDoS+dH8qBey6qGa0VDMTumOVVk2HwOBgqiklVVG3nHurMSKb7A2AXKTOwBhCDki3b757/OyDR0/e//77b9arfui7+Tg5YJ0bBRnWicBaq5tHF9P+4ATMHENSMbPirTgocGAkwqBWyB0aoJELuruRmiljB9W8ibMbWN+v20ET50KziCFnlUoUwcQYjCwS1zZzzIYEEJbTT9HJWF1oaexWWIzqtKx3xCpGptBRJITggcygqRFhawKm3iRwrlWcyX1JgVBrlQMnzmMt6qOZRUpNKxEDJbIc0QlB7Nj00LRLKSpwjFxpipkcbDwempCpQUAKAXxuWgIHpBZozTGVehriRlQJU9M5YMDlXY9UdFb3QGaihDFwh97UXcQAHTigGyG3Vn+949JWMQRDF3NVYwyIYA7WKgEAeuLYwAH5NE05smoZ8uCmkVTRhjjsbooTpjho01pLxBw4zu7Ho9KxPHu0lrkSOMZgbofDITJtt+vTVJzBYzhOZejib/3s09dvrj9+/6O/+PyLnchms4G3fyQ6AAEAAElEQVTnAABu5q0iqIg6kDtUABJHB+UFkQObdYfEtao7T8UCQOq7kNe1oU4uk7YWl4NDFkOkUkeTMSBHhwL3kVKKPSbGzfb+fi+nYyZu8/j6+rS/7cYn69/+2fvpuM+n8OgJ3RR8df/Lfv8kpXhUL+sSh/Cs+3H85e7u7dg5Hr/5ltuuu7y42Kzu73w+pSFZv3FtNXbGEVIXTAK1enbeySQNZQ5UwEMIyWIVDRCcygziRrPMKJFAeoxo7oxFayGPFLqO22w59ZuLixf76+P9HYKBwyhikRVdAdx1aeG0wIJKQNz1QtRUAL2ZhBSqeKugVRiN0dyMQw4xVmsOzjFWKQup3UyIWNEatC71AYgwCmBirK2AmJuQgpMGDiYutXU5cDYg3J5vDm4iCoGIEoUUY7wdT8ZAK7JG3twYUgxVi4ARBjVrDl0IbrIINU4hICozp+hl7sgRtGMSMdEGQEiLb5tkOnWAhEqk/aoLMWltgoTgnDmkkPpVd/WIIU83h/mwM0IXI4c+RMmpSulyCJQbhRCiiKHDwqU7turgOVAMZFbBJjN1P6nOCDxNpXloC2CVzMCtHRG9z3HReX0u4+EQ+r62YAdC6YnBog9nfVwhRGtu1aHO1ZbCxYfjgdo8YQxENI8TEnBA0GYNIUapJYRkbhBjSEk9tDpO88mtLeEioodQxTTNol5LMxVXV1MTUIdsTsERfDngEAV1Q6dIAZAIHInd3VEXncQXlyBHJDeTZaA1UzQAQGYqDxyQBL6gQBCRAMgQRQ0Wn7WJmXtmQu5i3gxbabcuIk7VCR0TdPM0IXR9vDA5WhuJu+AdQG2lMXmR0wl9BLd8dlQUk1nGY63Ptj/gcGaCqKZzKT4CRzHiCMxY1XLqJm+b7fZUVNlSjhxAxRwNkJjIXNyRY3DAeZqYOMUcY2SimCNMUloJaEOXbZovGT8d0mx6a24L+hsW5wsxIb5rOkbE3PQRwG918QeIa7SIlGKMgdTMTTFxCKzuYLZgBgMuFXXITotpIhCqA7i7Q0wbljFoyS4NgxGqERqiIVESO6seBUYCBTRc1C9HJ1ZQCpmqEjgAqTYiYMy+ZH+YVZQDJG9nhgf1GU0AAJ1MAzibMmZDUzRgjuZEKASN0RzRIAMEAjBXCoBasQABuweIYmhkhq5ohhYig+dIhozJUWoTg9ltcg0A4M7m8K7ow5cZxsFNgYCQlmo+dzdbXnhaWu/VKyIgsFojRF9CQkhLTR040IMrRonwnRvpgWm9FJMvdpXFSkfIi45GgObLgR9oYfuBM7GZE3Jrxc1WXfc3P/7w+Zs3p7mFHGNTcHfGs6sNNgWRdJ6lNGlVpaJbMfn+ePrTt2PNnZTqrod5WsSIqjoBTmLimhKhtEQ0up9EO3ZCJwcwBW+jYEZutQHDrH4eY6BGiMQo7qY6/P+5+pMnW7YsvQ9b3d7b3U8T3e1fk/nqZWVXBRQKKhgAEkWhQFAkIZNoFM0ISSZpopEGmuqfEScymWQGM8mMMkIgITQEQBAACwVkJbIq+5evf7eN5nTuvpu1lgYe96GKMbsRMYh7jvvxvb71fb8v9ftaiSg6R2JxjMinXKe7W1lf1VlvX11rrbe7HTAKBWU/lbqoYnqvU3ztH4J/y7b+Opd2n1FDQuoAEyA36IkatqnNvKIglMf9qmew5qaYUqDUytynpKUCibGcco4MSQgBplpjjIA+EyEJe1HzViuiGbhRWKKeyHgfKrw/3Rq4By1Xkb77zfP/2V/9nf/Jb/1GiFJAhROjEDKALxBPRECme+4V2D2j2hoQLfxhRCTH5dIiAAO4/x23hSX0VjEHIllwcEKICGoG7urGHN4KPgT3nWj0NsNm5qiquMgCvvQM8MKVv88QAGirgLIUUf4pqci1IFqXRAL33UpSctNcm6OX+bTYIlxdrUTz8Xj75vrF9d3r2G0fP34/xgcgUZZBRxsBMaOZgS23EBAG9YZ+r1egO6NQZJqdAsu6uwH4xnq1gfHw1f7d7/7GZ3882Ue/evNP/unjf+f76+8++86vP/7qR/9I5+vPPntO2/N895KtEgbmeDy8wLP+nd/9n++6cPvqi4S42x0opP7xGjYbevL06W//hU9enNZpS+A15+Pd+Pji0bDpHUJH0Rp++Bf+V62/xNMuih/vrqfj6ThO02mu2s0e2p2/vP1qvel+9Pf/27/8N5+SoGJsD7cb3IL5zYvX37zcvn791WGart45f7UxSlA/v66qsuo0pLRewbj/+A//pVd99epasV9fXYVhmPdHLUdQkJimNp1dxP0pH+6OV48uTmWsnu8OL5vOjurgZZoSSxdFtTatKYS7m0PXBXdgRNMGDLW2UtTM3ReB6C0bHr/OA38ty+LbNCaISFPwZX3RSp25ag4pWZvUNUVWVxEk8uN4kj4QEwWIER+GC7PSVEMnZS4MU9q82whBEjT//Fc//fyTn7SiP/1Xv+LYffDrTx5/89ez+Yzt4nx7HCcHev7izWrTX9+94Y6NfXu1PpWC2CBA6CMGN8XDdHDHEMNuv/vge9/tMJ6m41evv4x9rtCDUeq6sTWrI79p635IAWizOXv/6dXZw1paqXmd5JOf/uRH//pHbRpTerB6dFmKowRERc2lVBKwXCQt6Em4vUMtUPNaQZ7Xo1ZrapFX5SS1YfMGU83HAiS/+uiT2Ccl+vEvPj/frgLpr3722cPNFlCmuRXVdef/+//0P/9//91/+OarLwmLJN52URsSYbM5n6Y8+np9SdbUSp3b2XqoY16Hbev5toAWzzmvOA4xzXnMc1lvQ1jX45hTDrrPfOTOU7mb1aFOBoWEk5ZKAYU4kFTXqmYA7i7M4GStcgqKbI4xrNUciIIjqju0EKIWa+AhECzqIlrVkbB3DgXKUipOTg4eRLDjDGX2poQGIMJhJXMrzaodjx9+/xs/Kn9UjD2A9DE1KbenZw+HetqnMv+Lf/Dj3Z2eDemPf/lRijQATXelgm0frT95dfPswVa789//0aebbXr6zfmv/pUP/sv/1+frB1ejHfttvH3z+elLMDQOgUga0M3x8PjszG3c779ya+w4j/l4OsWOAKyZrldnmc3EiuXzs4t5mudagW2qtYOQorBwFBqGYco121hbXUG/HoZIIobXb95cXG0rqFVWnENHv3r+8TeePfEcsM7rzfbl82sjbK11m3VTN4Bf/eqT+ZRjt/rsk+tcpiB2FrcDr253b6SjrPW83+xuj+88G55cXX6ZeFbD1DnRfreHhWRIcLvfXT64WK3WxUFBUz+k0Jec5zKx1SGGGELfb5rnNAzHfAKCzcXF8ebEQAwtSgDI51dnzLyKEWvRqWLC/WnukNnr3YvPPymElSucxuPYrdJyF8QgMSUzQ0Zi8rLYhcTvUWggROa6PHfunz543wgLvpzw3e8VgWWrRu5GDuDOIZg7ujJTLTV1nWp1ZiJ2RwNTt8CBgN0xxlBVS5sZiNFRnRAdhVAip+N8ip2swrbUbAiE8TidXr14/ujRN54+fO/Vm8/6laQ+CPHrV/NwLoxkpVH0PE0sXFsVInVHQhFBRmiNENWbtcYSmEKrIBKr5mZKUbSpVkd2lti0MRoRloYxdBPeqTcJYcotSqilqToQqs1MZNhYgrlXLQTiqMyJAs6txhAbFAMkFaCFOZiIIiK7oSuBOyMweq6nFK9qkxC6U5kVaNVtss5uFQCDBHXNbRaRhVQRQ6eqQtDYcjs2BwSKHffrtTVQVQWbWjGFnCsJMTOCGaj54vhsiEIcUGJpNTkBx6rqrmal5GNMUVsrqut1jDEpemAu00QGzbSL1KzVWlJMBk5IdN+poMKxtSoigCZICgAUplIDUWASFjRVVaYQkFqrgZzMAcBadbBiJklu9tcvd9dd1/cpOcJmvT7spgqKhKkjUN/vp0Sgal5MQvjWe+/tD7cxCRGMcyGzFAMj//inn6SYfv7xp6VpZG71/lkwzxmdiDCgGAmRNAMiIieoDZ26Pj65upSuV4TTVI7HiZm0OZqtoxSCwqISzCuYqpaWJw/q6katWMM6tzAgUGXE9ZnURnX0OWspVuzw5lR3c3yze9Lxfr3b32IcLj/4M9+/e33z7oN3dSx53MFG7jis3kmbqxSFTnt49Xo/VT2dbusp7N9YdzV0a04BNI+MMSCo6Gm8XQemkOIw9EwlIpaR1Drq0Q1QGJnMMsZI0iEhp8NhJyIb6V1JYu/WGky1zllzRgzMgaxOkzA4OSM2WDoANDAjETDOrZBKU40MYKWO6spVmyEFdAToojA5AY2lza2RQ7GCqAE5srhjkAhs1Y0ZDAFicDcniDGV0ykQsSMAUh/n1tqAvJHu/XO3uj/OwlxPYxc6di1tNqFu2ExjAWoyQhRuTY0IKQHA4nVIwoGJlUSYQ+iITI2Yq0IKfWJDVCWsrbWGDTiXyuDoWsfGJDHy2dmGCIwgCntphADaVlFSP/D6Epu02DKfgDyEECi03Wm92o6jBnJyIAjgYlZArZaqJOJUG1ofi+aELNKjldZOfd9ZaUDuACEgMYlgTMgoFYwCuVMn0SPAXMEIkCQmYkbXMHDXWYg8aa5Np2PWuVk2vG/YgTLPoC4xgGvX90jsYKqqOlnN4DjpXWCGGMhXisFqqTWbVnNjYVrm7Xu4h0ggYG7N0NQWI6mpMyN6CIGIzIBMtAEhIfjyKLmn4i2joqmZAdzDX8zdtZo2NiezZktZp4MpAqqpEDnYUn9urTEGIrRWVuv1au66eCZmTNzQDRXJaytECN4ciJlzPYTgjOgOMfSlzQYTCRBhbafN2ePjaU8kppWxfvfRoy8+/R+ePf5L4J3pFEI0IKUw22gOyjJPE7QZwFA1uLu7tmaBYIm7gpPwsmkBJDMlQhYCRGZWrUwwxGCOphYjK/Ja4P1J95VGsCLBmcD1fuJDWKQKVuvdnsbwDtF7gglciIUIEVSVAJkFzGtrCg2diRgZiBDU7uPlhoYEjowYWKqAY0TrW8toE/q6enVGAyc2RwOWDJhBnNSsIgezCq6EgUKwSo7g5k5LXzgt5i8iNEJkQAGxtm12XnBiNyQDYhR2XZrw/G04y03BfbYKnAhdDSuHuVlir2qZaw6NiQArORJWwgaVRcEaRGaJvuB52cERSy3AjOhugOaBCOX+LliIe28HKHdwIV6OQEi0aBUOBq7mTkgIizdkURLIHHDJcQMCuFqBe7LM/W+AmwMQkHqj5SHjej/G3YtNSxANHdzh3rUE90AbZZYFHv/gYn226VozEcxTWUzBavW0P7naaTzudb7YrF+/um2Oz29OXx7mUV3IVhEFYW4aCaE1CRKIAoaOtLrnMTv3TXBqms0IAcHA2jYmVQD0tXBsFZzmWpnRtLk7MWlBRiLCXOvXryK6WinTcVwP5ylGlnj78u50KF0AZp9zvk953edX31q2/u0U+yeUo/sN5UIjt8ereDmEPtHcWjWDCR+cP31z/fwiSocG2Ay8eg0G1TSYqjUkdgQkZrDAbE27IEtvXRDKaMLQhWBzfbaWT077XCv54q7HAN4TdcJnfXhysfr2e4+/88E73/7Wew8errlLRUWRhMOS3FJzBFh40gvglxbyPJKa3ge+kJaWSQdH4vv3fgF1LXBQWHK4Sy4k+FI1rJUIF0WNmM1sQRaYLbDNhV7kZveBgKXbF5l9aSxm1tYcbQkzAiG4IRISv5WW/rSr6DTNKQ3dKsXUUQjE1Mpk8wSINk9uFcG6jjXXl88/f/nmdal+9eQ7w/Zxnwai4MjChq6mLuRwL7UtLrwKgACsXt0LYwCwalWm0hOdXtx+dfH5n/+9v9HKbj30/+af/dPv/qV3Lre/fP3VT4So3NZ/9v/9h+v18PmnH334vacXj959eb07Tte7zz87+8Y3POHHv/jk+3/tP8fHD6fd3SaakaNzm30Vw7y/i9uLsrr4xve+WT79WQreD910/hDTUzNhRgWUblh/8H1m9RdfBrwlHmcIfPnos+vbl67lkv7in/srf/e/+C/CeRjCdtzV1Sandfit//g//tHf+r+tcP3oGw+ffuM7L/7bn1597zfp17/95D/535w++jF8+svV6jI9/LXNd37r+Y8+erDqP/3hH27X2+PhFFaxTbV52TyQ2+uJ0T/5avfmWu1h/Pz65Xe+9f47j5/96sWXVuAw3g2bfiq1WTs7f3DaHRBJhJywqJpD6lIf014PJHRzd7w4F+IAUB2paSEkwLdWIvgT6bO3vr633/DjnEvVasoM5g1t2fyrqursuKoln/puuLupJJRr6aKUohGEyKfTifkMIpiBFuUYzCSw/eE//DvX14dxzKeWVt12rnq8u66FHr7zcN3z65fX0/7OWn305MFpf9hs1sOKD+PhfNvfPmdQWq2Hu7uaWzscTutVf3z5YjNsx+ubz9989V/+f/4f3/n2bz169K6sVk0thiBeLs6Gst9FTl89/3wu86c//4M2lblpf7YNpjHQfMJuuypVS1YgpICmjjF0MalZ6ldvmzVgrKGNjV1ERIvnUbU5dcB9P+uobgFhGGJuLlFy0bnowINpSEOftR4Oe0YFwG9940HH+se//49fffJpWHWIMNZpnXoD7Lu+Fks9XV7i8TSdpf6wm4877c6DanEBy9mzR77o+s3hzYEGsEarroU+N9NHiWQlX/1sttYLiGO8e33Xx2gZzc3AwEXAm1YCDMuITqimCB6IEJGYHJBZquYhCTk4aK2upYERMwtBLaWLjBgCDezQ1JIEjuHm5sXZ9lwNiuaOIgfuIozzTIzoLgy14XQ7gp62o37nvUc3r3cgvNuNuZbhLKTzvon/zb/xv/g7f+u//rWrZ8fjm812iNzSNt7Mu0A87qd0ufrFJ7fbfFitzqZxf7r1f/L3Pn764Fm8GN55wje3x215/OOPD5th/fp2B4YUg83ziXzO45BYxxJQvLUQg7fmQK1pzh4R9rd3MfUJ0qv9y9SHIfRTMnRcp977QQ3GqZ6mu4uzywplFbgfhqyGbS0tbcLDX736cRo203TYrh7WjNXm2sq66+fjCWsoWtU4N61j7fv+ydPzV3A7DOtW6qqH3Kw/k0fvyv5Qn33j8ssvblB8SB1kffHzz6fTxCw6twzQFGv1Lspqtbl5/VJ2YwxSlVJK2mxuk7dq1UIgtSpxnYbBi1XPNdcY1hUEh7Ra9dP+uRafvNSSu6Ffr7etmBB7tmka+0h9DPPtNN92Wp24nZ+t5npvtyZGYTZ1Vzf1f1vIcY/Ld3NFAHSn5eHv9/D8RYQmRF9qPN0AnHh5jC2cNAQHa2rmrqpVXaSZ16zWakCoqnGJ0II1rQoBEYIEQkR0EkKgaT46etel3k01IzBBVK3OmDiU+XR39/zi4eMCTz7/4ounD86pk4uH2+YzIl89eHRz/Wp7/sDZDUmQwKzViuiCAsgEwczcEQjUqgJFNoN94IEQzZoBoAg2z7kMSNBqa5OELsZEBoQhYvRmWgGJI/WmLcRu0jHIyiEAchc3p9OLPsVmhhBQg9tsbkKkrbCwW2vaFJBbA1WwVlshaKvYo1PWerSRpKdGpcxIszaikMxZ3YQAWcBMAKdp9ADzPBFF89YND8djQcNaJgLUqo6RpXNo4KZ5Zlg6sLUBmzmitCkTtqZzxD66N7NSTiLkiBWczRiWotmowBKoGQIKh5jnU4/igBKSmSm4Abov2jUAuZOrgbYmS0cLE5jHENCaWmUAs1bqTI7mFnlQqyl0ag0J1ZRA3KM3JuPjVM0sClXX1br//rOzz2/u2qmdTrZab46nwzzXPsDutOuTtFKS8OZsvT/O1RCYhn67P406zn0fzy5Wh31e7gIzQHd086qyBF6aAlIzFE7MIchq1DXAWhJ1PfI2q3uZaojrgIrGxkwSm+amxc20kJqeh55NEaG0mtud19q6oSVUUQSrU67H2c2IpWSrCpcPuvMw5crWjl/86rOz88etUAzDu998WGCuzvvjdSxaK8+Em1/79Xj57M0vPjoLw1laW76lmts4lQxp/UACmB/W/YBVx/2dFYNglJKqWisLTKzWSTBEaNsU3W0BBTTzBJBq1ari0nROTqW2RxdPsd7N0Jq5hIhugcJcswCqgzgzs6ODU0FHwLkV4dinpA4k6WLV94He7G6QUtN2yjl6UwVR6FNqWgG7mLo8Te5qlFlcROaaG3EctnS1zVPOt3fYoXoprQzDufVcoMnVGZ0Pu7NAOsj6vJ7GpBisCgMJceK+H9YPaL7lfJjKUbUpBSa3RuC8MJDRwSRSigygoK2X7tg0SQgOXtGR66TOySyDkyiAmrZ2P5yyWSn9KgFBT/J6nCjy2cUqDGl9dVXThghstZrySFQJ0A3T2SYK0LHnMpfdLZUZ3dA0KzBEdWMDA2i5SmBDdDIn9QDaXAK4U0rCQ1r4NqnDRDaXmVOiFFsxpDATu4tG5IGa6kVkt4JYtYCVHBRgnthROIDQ1+dFInBrKIP0Q+o3mscyj7XM0JYpRpEjGZAVA2PDEFYNM2gLLARuXgGRAjZdNgoNoKFp06yurtpM0cGa0tLhpcYhIoCwODUHAiQzs9ZaLeZLFIPcHN2QQMHMlE2tZUI3Le7ioAYGxLrQZJDMMGBEXwYhMbVNSEFtmjXotvghcI+GRhpSN5djl7bTXAYBAHXMxKhEZhVtIE+qljzQyaJaa9MqrSKWi5I7eRAOJa7W1bXakVlUwXRWx1mzBcekanNroRZUIyQEAiZyQ1UlXQJKSERAEIytqUNDwFKaOg1dzOO8PIx77k3rI+HfCMmOh9cGd6CFHJDYMTmQtq3IOeElhXeEe7MAFkQkdgxGDOjOABIigDO6ugiRupp5NV1IBU29GVHsKaaFuIMAGMRbX8fbLp44DhQwYwNGNkNnMXbs3CJKBahEstjs7lkrSya9Kd3naxa4mC5FSogEyD7XgegKuEUcA08IJqSo7IpkiCAA6EAIDZxCyN5EuDkWchIXp6I6kTdm8tY7oIkjETICBKIg0WuBhZRk1lSbtsiibkE4CAb3wAH5rZFiyWPcN5yhmS5qAQEBgpobKIAS0r3fA4GQ3ZdJf9HfRV0Xu4YbMIu7070IamYGDkCG4O6GQOioujSILJ4sAAACvN/GAdPSDXivpziYqyozBqLAwExk7AGFeZ7nQRgMvsynXGopdZ5KaYWDoPrTFMda3V2BaB3QPQQ5lhkjZNfaFBH6IEeEETBjpMAB/VgaIfTCpWSP7GZA1DDG6Haq0sWqtQG0kKoamGczRmoOwEzsMQQbNsdjjgNv1r3ciKt2qetJp1IYQf9thbe/7dtFgPsA358Cqix4c6YNw4NE60GMXFZ912gy/vLNLUhHxBsKQXzGpQ5MzcAcBVmcqiM4KqA7mLk2DSKlKSGVqXYcElNF/D//b/+DXb376qvXaBCC9JvNKnXnF+sHDx5cnG9XqxCCOITW1NzUEEUYcGmvZ17iXaimS6h3wVQR0UKbBmYEAF8ylYAIBGhLDvEtrA2IFmsSLLHNt2lFYgIAsAWJDQTo7taqA4EDERKxWkNEQnKFpbYPcalaW1a5gkB6f2kRIaguRlg3t6/VgvubQYRiihICMlXzcspeSqttno51PoVO3NvhdHjx+ovXr768ePDBk6sPHbsoa6LoakGY3MydiIBcDe1rYjcn8GYAiIzcgYNZjUx1HrsUr6/H2Yanz771L//e3/qt777/O3/9r33y8ceffPTF43cezZG/mhqklBV3aWPf+rVvf/jw+Pf/ob95/vxnPzx7/Lh/cOlKw4N3ymknp5tydytF+7PVcW5gKETTKaPRXE4Sm/Q0HyZP/cPvfliIztZ93o2UkgCMd7eR24vPvhy6oR8uPnp18/SDd7/84nnq7bNP/6jf2Pf/ve//8b/6vAt9SFhryxLC2fqDX/+Nn/y9H1z+5m/d/Z3/O2bobg7Hl7unD2X/xc33fvt3//CL6c3uCMGevbv6+b/addvLEMVq3V8fTscmdZynCbBxMFlvXtcyXD3sVme7/clzSUSNgxXVXCq2nMvURgm0u9vnuQxDF6Lkms1ra63vN3d2YgzWsqo64WKGvo+euSP+qdDnWzgYEKAWbVnv7nZXl2tPwCGa5pyziCCYk3V9KAZXjz/M8xeXm/mXn/7oOx/++f0Ray5mvrnaptT3CXS26TS3ag54fr6tvhvLq2/9xvf/wT/4Sby4/PLVm2lUSkNYy3h7uDuMD5492e1umvlhzONpvrtpN29u99c3x/2xqr16dUAJD9/7xrDa5te3v/ZB98nPf/jHf/D3mh3/g9/73TyiHk9upUz7aZ6Ot/tPWKf9HiiWXADh2btPS2w9RApB8wnESfQ4HVy9zY0jazG/L9axWlvqhO6RTtAANldXhzc32pRDCMSpQwxUrAZhz/XBdouI16/3i/XzpBVQpnHuSrw9zBUYifPYfvWrnZY6np7LagC3yIIczWCasoAY4Z/7/ne++Pin+3mfa+5CUg83+8IYNmmFcorJDrsTlTpXmt5oQFqv0tklS4wvPt7fTbm1vs7OXp0dNgPG2G6nDjksErYtSw9TWPRjQsfq2gXJNQeWZlq1mZoIZC1DlxihOQoFM2cMQJ66bpobgExaCVTdvBpzd5gjUZzbPioJ+PWLWwYj8pyrzJyN0AWy/vBf/tHUpvUqnOb5/Seb42Faby5va/3jj7+M/3I9+vDVoT59fPXN773/D//2P3j84TdIbJrzLBrOu8shlrvD+xfb25v+Qh6U2W726Z0nqx/8+F/8xT/zG/56c70bSaqEJMJdH0eYDvNojusoPUkzermbzjbDeojHw6QUjmNbJUyRh46vn78RIpBYTRkdzKaSq6IbDJvVdnVOTKuwOludvbrepaF7/Oz9n/zsRz/5+IeR4vE0f/i9Dz/76XNEgqJkNOeZhYahf/V6H2N4dPHuL28/vnywam6hC7nOT3/tMnThxaev+5DevJ7nYoHQnU53p82DoaDW44ycSLBaLccxpdD3KSXZHY+RAjoyQkBmQIWW50kCr1ereTpdXTyMabXfH0j4fLMxlf1x9FlBeRpnUxi6dDjsEl/mUfevb1NMx9dvumHYnJ0hEUjEYwthfZj2EkPhVMq9VISIS5kCLYcrhKVW0R2JGVzvtzhuZkb3hTIuzGZ6H5/2t5CI5TjovgARlpIIFoHaVBUJpnEuqte7aejCWRTKWVKcDqVbpWbNICCCCLfmCLAYXmLfuWlFT6t0vDuqNTMyFwcIQ8dGh+PBgK4ePFKU3c2XQ7+OQ2hlQkBFyFZOU05DQGYz53s7NRi4ApQ6C5E2RQ/qZADFphAGM/PmgAzkjuiAJLS4rSj1xUppmmIqc3ZvIjEjuxMxtTqpSUCxmp3MQBqU2EdFc/DAnaqSGRGha9WGxGYeA1dFTsEIKUgX0iHvmULVGQkdMMZhN98MIcQYZ9fFtywcXJuauraeVkUrStiuhtPxxEhm2SXnhm7mBC4FiIE7I3PXIFy1zeUE0NREncAtxGStIMim346nO+k6C+huuYzEONU5YiKClhs7ByIBwii7nIG8VSVC1ZaCzPkQ44AGAQXAvTatNXCoCs6EiHMZJURmyq0tLyAhMQc1U4VZCyCyu6sHiQhUcimtbDdrItjvdhyiSBz6Htz2+9ODYXj4bPuDP/rqOB6bWQgs4GAYus6hvXpze7FZRZEy1lHncLZCSfkwdx3mCmm499YRAiK2XAkocMRg7lAMltzC2eYsdkO3uToxSQzaKktgUPQGnKw6kQaCfr0yn8HLEh8QB7Dm1gyaqgdmnU+uxpECkTG3pW3E0IiL2fmT4c//tW+887D7/Mf7T35yeLV7fnOcXtsAE7/7wbN4Hs/eeTw8MjlOXODzz9+w0e74i+Sz7w7Xb7743m98OB7frC8H3EnyMB2vZzixAU7ZSi1jC+uekWUY1CXEWMupQUxhVfMLg3pseVQSBugZQvAyg3kQjxIdmJxv51cFmzFY9cCB3NxqYGjeqjaRZGBqCkhR4jhmut98iqMOMd7t7g7oTjxNuYviwOoOzBI4l+rmEKC15l2kED2gRZMk0+1sDKcwrd/9VtbUXr3qDq9Ad6Fo7Ak3xMNqjkBdDZ2WIzhGIytWBQwDXJyvhosVXTwsxKdrng7jy0+OpojEQeDBo74RnA4zVMjFAoogMNKplYocQxJABCjFkbCqzFWrNm0g4N6sTC0CCsOsJeeYVr2Zz2NRCEjAMVIajIO3JQpTRXwJRxUAPts2ISL2w47oRCVbnmue1EWSK3CrCgu3FzEkyK2GQNglNOgCIni3XeEmgZkE0qreymZ90YBP2QwZURowg3cRQihuDYhMuC5F00CtZgd3YgcQup8LNts1IXKIvN5yf6HAyA3xYG321hAQ+Z7H2poqEMeeHAmDiRq6tRqFzZupO2gDVIfls2UZyxYMh9l9d1SU0PVOtdRcmCUwI6GE6A6q1V1VtTYjA3BgdHBTVAQM7qZKxEtpvJOqFmuECKYaiMFc3QMHbRURT9MI5trmmAIGKPME4qBgaCcbMeHBdtxJhvt2z+a5GSALNJon54AJz0JO+x2s1k9FQfW2rYKyf3V83QXAIEykXhxIhHMrDTJ1IY8ofcSRGX1hdQCgNuUF5YJLphtrq7QY8gKqGSITRTMH9T6GWiw3N1U0Wgk9MP1zQ3cH+ApsBu2YE+IZYq9hYIxqEaBHAkbk5HyfhFpq7GV5CAISoxCbKTvYwl0WUgV0BfXmpIjsAOoEBBgwDhwPZEdpFxg7woaAgMyOZkZOblHrGEXACVGWYcRUza25MSzfwgUQzEjuwAs3IwAF7BpdjOTVT8muQ8vMuqRxwBBd3EABnZozOAC2jkkBGAHJyW0mm8mQPbjH6kvLjzkoKlBVc1i8IWCAToyCAQE7gEaekNiNiJEJ/sSXubsbIxPx0hVOtHCLHHkBPqDDwgolALev+URmhnXJSy6mkAWMvByr+D4mCUvQ7L7uCoDuKUiLTvQ1qOttsr/5QmqvWpa9HZPcl18BqvqSdasLBokDMkk3QBi9+XoT99OdVb3ou4dPz3762cusdGhugLVpCDE3AweRAHUaOorCBFTnaZO60iowEXJRQPDaFKOAeUCbXLOaozoqERkAEdPbv9mRRTiRtP2hX208iWI6ncYU5eJitb8dDdQEqXmtAEQLumuJX/1pn8N9NdPb1i53EAZ4r4NvnMW5lLQaWgMDDDHc3ry+vNySzW65NguCcysuFLpekZGs51C1JXYCNEBkMq1L7ZO7o3sgCs6ZYJviB+9847vf+KDrNxJkc3mhOSMbAip4q6rqRogYER3IiUjdwZcKv/tVKxOp3zeRAbiDm6oDENO9HImM5PcoIlMkBuJ7AxkukSFDJCA2X/oBgJfrZOngs/uI6KKyMZK5LlTNhfYgzAjQtCGCORKRW1uaBBdemAPY4uVakgFf5/2+loqCCIMhtDrXIB05zLW2mt0KsIHWN68+/+yrn8XNw2ff/C3BFZIgEKMHZmC6z1o6mTnzEtG811sJyEHZWzUlBHAIMQqZuazO4nh+8eDbv82Olw/OHz3q/pt//o//yn/4n9rHnwzvvbd+rJ+/OP2N/+x//fEvfvHO5gMchtcvKg69Ut3fvTpdX+/vDkM66zdrI5+O+8OLN+dGrcytZLOxv+gk0tmQZkTvu1rQjra/u4uBwGHcHQJR6tAqeoBpd9BjmXbad9O333v3zWdfxuP86T/+gwd/4XfLCR5u34H9RzCONh+BQj3a7vU4P/Hts3cPAp/Ph9/7j/6z22Kv/7t/9N6H6fEgLR/PV/3Di0cvXt8d717GSGXaT3e74TxOh9xmU9Dd8bC5OvvW42/C6z2neLXZXKXN6fial5vFjYjGlhkCIEWW7bD64vPPoqy0mVaVKAba9YjsgC31wsf74j4mMnNHXeJmi2/3T9xtiz6Oqp5EALCV0ncB1EqerRZzdYzsQt7q6UQxoutxHi/PzwY7e/PFgUKYytyveiu6P93aqg8hrc4ephBy0ZvX12R8Ohx+/5/8YaC+D/z0nW/Mc90XeCjsEboNv7l9/ennv/qOfFdLnqe8Pe/fe/cdq8fz9cCCQjIdbr740T897e7mm5sw8LgfSWIt9Rc/+AUEh7GSMBAMQwIKIpyGNaBIiCQ4j9N0nChKGDq3ejqcSkNKLEH6ris1x4ih76ZpGtLQbbsxH1q+Dx2QwXgcIUbporVCiVIMc/XxWJL41dXm9e3UpaHbXB5OI/Wpjzh0UnLdHY6z05KlV7VJiwFh35XWIFfrAgBF6berdZkzxv6Hf/TRKvDl+fnYlFLIpzqEyMzW1M0YFDKlTX/UadUP5Dzu8FfX9fxR9/pmGi7WtTbrJYDPfvr+b3/75cefAblXRUZY8DBkpgqmHNiIWmtO4gS+FEshCHIGBSR3Ul0waIYBcslr7BAbmArRcZ7Wm54F6jytOiITJxzniTl1YZPbAZElhDRAnRqgNK5hg8+++fjV3fH2Ta7uMPif/e53fv7jnwNiqePf/F/+u9oQrtDLvD/Un/34YzDowedczKKI9JtIlXc/Pzy5DKVZmDZ3eXzxenz46/af/M0/88nv3/34Dz9hCwjCQRwqkzqUlEJMMQTs18PhtH/ncQKdhth78rPtxX6cxwzE7s3mPIeYGuhpnAEagVoBwf72MFFYSSB3mKZ6h+OwCozl0zd/KJf0oHvns58/jwOdTrvhvIMmq/XqeKxCEFK8u30JQHUuu93hfH2Jte5ux812U+d6Okw2Zg6hNTvdTpIiqte5nJ+tiLjbpAcPLz7+7G6uE5MTMwKo63q7mefSAK531+88fpL64TgeWGI3rNEcMZxtz82kFDxbPSleQHFqp2y3l/zQqNzdfBkCuhchfPDg4Vevb6RiU2NGRz2ejjqpImxWay+23aysw9cvXqWgb9cGwkwLJO/r0oRFP1rKscBhcY3eQ4oc0GEpkPjaKExE8NZqC4B+D+1cNhgGbk0bI0mgRn6aR2+RH2zJ3bWiuwAaEKhhU0A7je3hg8vcTuNUCCUlmeeJkEOM6jWFMI/VnM0boQhKLWOedpcP10CPXn5x9/ByfX5+dff6Vms+W190Q29QAKiUnCTEEFWrG6lNQATcOThzAHNEN61gRoA1Z6G+alOcHQyJSTTno1PUbJbBudZ2bHoi2ZgXMyIKigt6E6Xrs4JIctOmEKIw2m56EzC4E1FAcGT2JTcOsjD/3B2d5lKMfM6zEQrfI6u7yAptVlMHBJ5bdtaIiUNU0hTDKY+leTu5IDF5yydCXPcPbvYv+5TQY2tqdQb32kxctDYP6L4UUbuaSQyqzQ3mdgBBICWD0motpQEd59P5ZnBncxPiWhsKzzUjYGQMgWqtRI4gQTp3UlMWZqBWJm8GDMRs99tCac0bZEQQSsvpwdzVKoVUtQlIU2NAbdashS7UDCnFBEE7lRS+3B8Hlu0qHiZref789qYRiUjN3oms+nCasp7w7u7u8uJczULkjSCCCdvQpW13lbp4fX3b3jpMy1wCBVNHhqYtpI6FOhIy92pzm8uxZaR4fsEaJEYym6YCqoRmTUlI2GMMszsRn+Y5uAiJxLgcjtzalFvy6LNauU0JQA3QwyBaDaJIN33zu/H88TqcP3zvL//65ffzpz97+dP/4fN8HKmkNz/crdebd6/LWe/rHkAbssUAx3pcP+zrPBOeTu2lr9udakzbMt15KPs6nvfnEa3OfGarfYpvTvslcqFqkQb0MuciwIDVWLGLRQuaHsE5cEOYtJgWuA9IAIr3ksxAmLNlcE1BmBAhKQCocWAJbOYmQE6mbRyBCJXmGMEMnIVNDYEYUVwDyCoOlLSqR6xRstBwsTameZqPh0MLeMzHuq9+2NPlO/HJE9kw+Vo8g3puhYn0NJXTxGEiT6Viy8pk5q2xhvV6dXHuq20XUx/t8O745pWTV0ddb7urqyGdhcNJDjfTboLWWJGrg7Nwn1Yp9SKMoanPeQKHUgouGxmmalhwST8ZhnDyJBgb+HFS9cAOLfU2bCuziteccx6blcQkgTEEBTQeXNg00yrlMuWS5wpN1es8OwJRY+MuNIBqINwrlEZBkpREXSdp2wV2AWuuGULh5JTGShPUYuRqqe9XfYhYWhlBS+M4WWgcyanm3OZSqzpw6PthfT8XDNsLlsBdh6EHjFbmMo/leMBWCZAZpetDfyGhR3PUVssI2txggbMEksSdoxUb1SrVqtVrLmBmVoicDFqtTUHdEFmrttaEBSiIBAQMIkEyLwUCprXVUs2cBCkIEqJaRaTmLtWsui+AF9N62McVdP22AqNZtVKtITMSjjUTolqLoa9em7amxWlujqateSCK1rKgpNB5y8yhFJXQEborgTnC+PBie3N995CfcjlzOA5nl7fz6yMe4Owq250WWm2Gu9OrM76MJMIq5ugicYAopSoYQQVo5s7o8DVTVpjBzdQc2KyRuzlxwAWiCGZAYGghJlPQUocukZfe8KHR47mk1K9T0KZshghqyoEXwy8QksgSCBRwgfvuGwVz9+CwnF4RUUKorRreA08ZiUWQiImFxRsUA5COSSDfxv6SPTku7UWNiBQcEFUDYmJBN3c0QlSvSrak1BjQVTEQ4tJ6QQ52X7UQyMWhtDUJVuoACPm66TGAERI4WGOv5EyezNnAyYWAwkKPdmPgBh5M3TU6iEUFqdAUXaN5UGMSNTBrpuo1ghAhGgigEAUiV3+7Vn+7Yr9/FsMSvsJ7CBbaUmnvxAhqi5loKTM3IloSbkhgpveRQAdENjX6OiMIyyAPzRouvc++IMfAVJEciVQNAZd0m4O5OSKZNXBc9EZEVK0IAChOUEsmYCACgNB5M9daHz56RJszdjrux227uP7xxwWOtU6PrlZf7QoSnxqJQnFTBms6zTO5A2EDu4oYDSLllPAm5xVv7sqcUsexKWAfeMhGDToRwTkwn4oh8txKIwkxTEqORIZa68V6dTJV0NIPRf36er897+dpWq27aSxTLqsej/NyuFzksT8xub49dL5NAqIjYaCNwIeXPbtSt+n7raFrrVM7dYm9Fu6jIEOtzFwU3BHcQsSqM3Fgt1pz36UGDcBZSBmJxapSJEYLIYYYMohL585jhkBCGRg6AiQid0O2r0lhaAZL9yLqcnBWVWJiRHRHv68yo3vQ4+LlQFow0vYWKUyAC7tqSTIunCJY8lqL9nSPPXJdDktAREyoasS0bNYAwM2bG91fv67qCMDE91opEOLydykzg2OzBkgITOTu916TPyUVtVK6FMfTEZFrLt4qWK15JLT5dPzk1cd5nM4uPuy3T1NISKStgTUPYKrC0ZxqbYsVavHIBWFwqKqIBuoLIcq8mTG4AygwxxieffPDzdWj68+eXz48/1c/+KPv/e5fpWff+Ni7v/7v/zW/fkMf7168Om4v34kPYuhtd7r9s//eX//b/9c/MMVVJ1/87Mu7Q/kAzbXNhyMDIkBKsZiTQ9mfOk5WWlFt43x+fnGcypq2QxwC6HqVoCnUOt4cAslw+fB8tToefvTOrz3Z/f7P3+xuDrvrw+n2w78Utt9+//XuEBN9/Isf/Mav/Yd//MM/6tLZw/Tkox9/iV0v0xQ8at7ll4enQ8i7EU1uPnq9+eDXj7vTsNrMx5dmXuYyVBBv/TqGLlrjxxcPpzLbYfzOgwsl3427EWoj9I5P+8NxmiADQ3BwJODIHz3/TEJsTcvUhiHMY27Vzs43d4d5u+4BmpOzEAC00gjJ3jr3/kdfeD/N+bLyZWZCKlOJXQ8ATMLcmdY8jyF1Tj6XVpg//N6Hzy7s44+mYqe+22zP11HSeJgBDJgO+6mMt9PNS+g3D59uHv767/yt/+of/+Zv/lZPm8uL7Q/+0X/D1HFUfbFKYTjO09nF0wchplOW0/XpMP7kp69fv3ydRGMfWiveat9Jv1qDY4o9Al086sucYwCHMGzW61WfC5lVRFBHImjqp+ORAYUiMas5mJdZEfR0exfSlgUQW4+dsIC7qyLC6bSfZiylfC0V2dwEQ9eJRBlrlo6nsXaxQ9cUrcDspFXjcapT1Z60j5Jbxojvv3Px0VdvlIyXMHl1A3R1U3NDQfE6m2VXQrEuduAIpgVKCMhQMFJpatqO4zTOpUu9qtbiXaJOcp44+LpZm8aRqHXRcEW3NyfpUvN62L3MYwlOFELTgg4GaO4A5Gh+b6hncGSk+/JEc3VnCWbgQMKxtBkZGwCG4ETEsbTqIO4IDrXOwvfc3C7GWmtgz9NRrVoEcw2CUCvM8r1vPXA9vLz78sF7D9n6Vu3W4Qc/+IkdNSREIMrHT375/Gr7SIk+v96/991f1zplPV2dM8yk7PXlNB7HVbfa3e6Op3q9P64vhsc9zp/MP/mv9ac/C9dfeiBzq1oqs4+nY4+4QX283Xz68rPV2WPUqQtDrkAJ3G1/2gFInU0iUkgc4nE6rtM6CDfzy9Xl7W7f9fH8Mp3yFNzOzld1tuNhOlv1Tnh2vr05HIkgRpznsr+eXGm77Uo1dd2cD3k87PY3fVqt+24VI/XUdA4mrVidGyEIB77s6zTiaGC8Hc7ypALBzNvu+Prl3Xw4bDY9kXPf5VzZcD7kNpXETNIfj3PqWBBNoYtJtViZFTwOPXIopoTYSkHUIQz59IYjpMjIGIS2V+cvb96stjEkQ+fswlEC026/J0nTeJd3NaS+ky4SvQ3mAyMicTNraogIBG5ARG5vjULw9RZiQTMKgC07FSJaal/h3jy8dKcgGLnbUvmhpoAYQ3C3IFLNxWWaWs7F1JyIGHJRFkERcwW129ubB2dnABiE56mE0EcZNGcAITQkj33I8+KWpRj6UuvuZr8COl9f0OO+zMd5tvXZ+el4F4dtdWVidJcYl4BCtQrQUjfM09zAkATAzAoQpzCM022S3jku8brEchwLEjZyBScWYySqTo6MIUV05wCk7p6RIIRkzdTMHYS5lpmRVMFcRaI1A2y5TCkkQiHyWkutmaiz0hjRzeYpM5Eh9Glb6syOqtPYdkPcogcmyvOIjF13Nh+yi5nZMR+capChVg8xLCXUBHg8vU4Bm84MSE4MWh26tGrlVNucQrB7MVkxQLax5SO6KxuSjOPUhVBKUW8siRWaVXSVECZvJAiIpzKzJACoaiEEVS2tgJMj1OosPOZTROr7/jQeJUbVVl0BSDguJakYpdYZEbwW4KUrlhAlSNA6mysJmZOZ39zcnA0DB8/l2JMJwtBJoPD81RsiXIWY+iHgOI/zocxD6tj98cX5fqqrSF6amm764cWrGwrcRSll3m5X0zzdrw0c0CEGQcIoHQVuU0WEpt6QSlNoreodI7t7XEVYtEOyECVIcgctZqaEBERMBKau3EBjCszmpu2UczVh0DyXpTnHBZlQLUJ7NNhvvb/aeop2NZboHM+ftO//lh2f37z45JBrqBle/+iPf/vPPxu2q/XVwO+s8ghP6GJ8dbPqvHu27fswGXsrN7c/vRoeqeuwXoNSq23ous506GGwWHJOPTatN7vsjQYGNTU3EQ7suWZrVoErxcaerYHXsxDdSKRjbofpICAGGoWBiBCaKph3KRJHNfXWQoi8SjkXEjLzNKTDfOpWXTf0c+bAbehp6OThNx6c1v2IbUU+7XejNtmu+9VQIZiy7UcA9WmWGkG5vbneXjyNw9oBNKf5eNegzQW9OhlXH7HWJIotQNMgXRSOUS82V9v+DOOmcDuprfq4Wfd5rwB6HvFb765wbW/2NUrBEx5uPDRaoGlr4Q3j2WbNEEsrR6pokPr+MIIlbK25uRIIAiEQQ2nt9SlbEAp9y3q+6mLfH4vOZYYGnAuhd9JFQgUAIiSYi9U+wWaYbmDyMpY8zqpAQE0dWYKjuGZ0r02BgCNi7LTDKbCs17OQt9bM57mowlSxOk6IuboTqHsHaK0CI2BqaC2XApCVydUKRVjPbWwQkUNcre7HA0RhBPA6HQGwjlMddzVPrbW+Xw2rIa0vKG4cRFidmjYtZVS1mguIcEwKDZAJMRBmbVaLa1MHd13yOyLBvWm1qjWrEVEQJhKJcZmcYwgEzoRNW2211uawNCs7ESMhpa4ZuoEbAlDO6r153ksADBE9VS2OiogG3qxUHzf9dkYv5vPxUMdRkBQXrkpATAQ9MUFRt9ktK3gEgQpMyGDVKga7PX0Vh6Ecy1z3ElCNAMI6bfrtpoyUa85z2XSb2DrUjFb7xLnOkEfTWZWGsGlgATAg28IpNmBE1QbuRIxIjATgImxadJ7IJaXBvWWtwkEEEaDqHAnVTNBTiujEzROHUicgroAibG5MpGAAEJjRPXBgQnMHpCDEiK6FEFHiIhcRc23V2j1pymnpSFGh1MA4JCLgbggwetlTODMGWHi8aEzoiA4MjtYUAzRTIUREDlHVnWhB+YDaAvm95wMB4NI42wfDgqBrx6QApTbhidSInXDJwBETKBqiLWcMBaaFwIBuGNUZSMEQQAEUmpPh/cbqLYIDARHZGQwZCcgQaIHDMAksyaBFpXD/mhuupv42XwZIAObo4A3uN/V0f8Q2hyUKZEpECy6agJapjEnMltUamjVzJ2ZEcvfFl730xgKAgYGpL5f6khG6H7F9kQMcHNzd23JDLDVqHIgcmlZbaOkIbobgl9soTFDKvKcYewThGKPluZY+rlptnRC6l6y5qvcRo1Sn3QwbBzNWsM06cS5gpReqbs6caztLvHYzp+qNCIoWQgdwdjc3BjJrxAmqdzGYtlDKSvAIqgCb82GaRnPA5km6Dprp/cES4S35+y3CGvw+gra8N0gESBuCb533Z30AbMRi4FktDp1P7f2r9SlnJACwdZ+mw8TOaoZBqjlK0AUIHcMCa2laRWJpDUCAiMzMPQWeGKe51QLIsFyvvKQEEVzVHVgWZQVUTZjcwXypn2d3R2RwtEUqWlhBSxARnIneRhoJ8R79uViDFhHKzBCpaV1GdXNDBDMTElscRHT/Ai1RxkWAuhefEYjZ3e4ZxbbY3GzJHBGhtrZc3vc8IzdiAkC7LxBYoGf/4wCagzdBqWVq2tA0oJK3529evn79gqi7ePYByyZ0Cd1clckdtbYjesa48gbmCGHNQgBKuGC3AEFMKwA6uqsTijq4OQGD6Xh7tImePHv88T/9H5LpL//44wff+ys+rP7i//R3mVfj6Y6ov3p82aqVfRUJZ48vzh9uP/zt3/nqky91bncvvrTVI2TzOr341Y8H8r4PZhpbgBaOr4tPOlfMAE/effbTf3II635Ynw+bs5Yr9OK1OidXNYP+/J01t/PPviKxSFBujnEdbnajXV1dXZ5dH8vFsycA1d58Ba++fPCdh3/48a9Wz95/8v7TH/7//p8XEnD8+eHzO7u7RncIUaR79OEHN+v1aco4Nq4tbFMLIpueqnZnfPvlvpQDiNcTR7ZS9+t12CS6PuT94ZjNWlVEHPr+dDo+enj1/OVzIXBm0yYI4JBzbe596lYV2QER8lzvIRvE4F/7ifDfyrH3SpHfY4yWSkbDbrVRRweKEtQriTRQJhKW6+MuXJ0VLWuBLz752eF482o6XdizTd91cbCqD87PpKN3njyuxy9e/+JNUfsnf/xvIp/9H/6P/5en734TqHz50Q9uPv6032xZWn6VjvsiAUP64jTd/etxRGzD5oykX68FDPso0EdwJ3TigEB1KtM8JY9thvmkabhcDdvXr6+BV6XU89WmWjPLfb++2Ay1Zh46p/Dg7NHd6+eIjVA250/m08nydLwbcRNMoKmFEAEJOKVuQKpdd/8SXT7Yttv97/7Zb52mN9vHj//ZD39C67NSvTQ8P4vf/62nn//yzS8+nbX62bDdbM/L6VWpOabtl6/eDCFOVjviIC27XZxdqc7zOOe5sqACt6II5f33L67Oz375+ZfQxdP1uFp1V+eX13c7BdrfTlf9o9TTdKoDy2E3p86+8c7Duxv97MtRiM62klJ01xT4bE19pGQrvSvrsLr1U9XGiLxYPoGAkNEDSjMHdQmo1ojIwNWNwIRSqWqmzVs1RXBwXHXRwAkQiQhY3VmituLEx9PYDZJhRLLUD+M4Pnp09eY4HksuJKGLdaw+prvbm5yOT997/8Xzz1er9WbTl/3JqiH7eKh/8M8+J7RNmL/49M2sroU3T7/57D35N3//X3n1q4vLKpLHsnl8VtXs87sVBYmxHk/ttv/BZyfVHh2BDRm69YrQUBceccuzhygI8HA4P4xTLlZP85C4mZFoikKo43zbzM+2nRPkWgBarkeWmsur9cN3kkWUBXu577shtxyAbq/Hvuc3r15x5E6sj3w6tDfP79599nQap+v9837Td8MQurjtEwf57KsvHz44S5QkdShU5iKUCOg4W82FKGSfqLei0/Xt7V/+7T/3L/7FH6ZVp6qq5ugCGDla8/VmOBwOTBZiFyICCfEQqEO0FMNxeiNWUDo3vOg21a4JS+zE2jhPjVYdgEwT+J7ZyUt2LXdT3W4vpikfy9T1nXWrd589+cUPfsRo8+GoZepXw/2HhBOzmBozETPkBdd4H0BDRFs6QxHdfNmoMLE1+9o8tBQvLFs4t6/JhISwANNRkJpbKxWJuiDvPL642e2JHERwbixMyouHVtEDEYOggxA0VZ1mHBJTD5ERHDGUcnCzEJJbsAKCgbta2jQfbuMZrtaM2M1jgaD96izEOE2TztqlrmoxAEYRCAAAyqSupjGGpgWRm05NCZ3IGfxtVa6pSCy1CbOgKTizG5bm6BCZWIu6KhObFmjmoQGANXNCN40RVSGEeDrOZIHQTWfB3hURDEAJgYTNhTmFLkHoJHmbJzfIZQZrgdjMA3UIYmokCCyM7I7NClcRDq0WIVlaicmtmZecAydATHG1K7fYMCBUn4E7IKhgILG0bNrAKEaszcCbO3hrxYAAGak1MzMRKlpdKzQDgJyreUNk1bqsnoOhqTvhXEsgTiFMqkDgrl2KmlupTkiEoUIhJHQJmGqptaAwuAOSsHBpFRGgKYVlPgBkQoDaajPYrs+I7dX+sF31K6HTpLtTJdDt0KPAVNS9BsYitEqBweeprIe+C2HKuaBt1yslODtfC+Ht3U69hY5T7O6vVqZalVM4O1sFxkhICQ/HXMCbupkSupaplllrck0SgwigRAnEBGTQFjwkh1qUXLTMMQVEQkKIAYMCsd8jJxyEgJAAHahLMjCs0W6e75497Aa5DCmdbl4OkdMlXSbRSi8ObsJpOJfvPfEN5UEqJty13cc7uK5td4zYz7fSWmWh1flTCg9keiH5AKdQrk9VcBqPijxsNtKql3nMY+o2ULgPME83qNayJo8JBLAZAAUC8tIYKRn3WupYTp1YSmJm0i3LyRqQsQV2QMdmplWFxSt7a8E5SJrNW+pg2+GT84v3n1llRLV8GOikK965QVqduJ2iGltIA1Fax04VI6NSu7474gxoNL/epeFLfvRuv7qo3Av1Y56az0w1YPBcCpQucBx6wXXO6oHWF+cundJ5iIP7rt+kFKlL3HWCBKHjEXC9Tk/PH7J+hd7anaGhSHKH1ZA2Ka27VCYXhGHozWtTNU3VoWXMU3XAClBzPjsfZCul5xKDqSKQhdCISKiVuWmVcdoiECsw1+ZgXFpziupWCapADTDZXJhmN10q3sFSIKUaWRDA0O/7wiiAxLGBUIgexrHlOuQxz8WMcFZjYQ5OkQWaaWsFKtRcFBFMEKzmXGRSy4RVpBswrqS7j2HWMgJEL7MZtFbqPNV5JGIUQBbkBJyQgjk1LW0aS5ndqqsKk3sDEHMPEu7/ESUxscNc1N3QzWoFWioyDUytVWBSIKCqoIjoRFWZRRSgqdZaXNXdipozgxqziJpJcNClDtoc1YGhwnRjRNkGWa1RPI9zjGvCRhbcjK0RaC211GJiteYQNgufj6ASNGHSXJyJ3dAqGgN70+oO2tyoy/PYYC/nT5XTV+Vm1nqWznxqYm7NEc4CcNRVMyUMTKbliMAsQFIbNgV1YkM0M0YGhGa6JLsRQa0SUnPnZbgEQG9oCkhCcaljEgRhYkTa9JNZ58LA5kiuqQ9uLs6AgDE4kHgVRjZXcCFGWsiBHoRA1cybOjkggTe9h5swmIIzATFHhqbuFQmBTRFiWuHpOtlpJXOJXQVFCaiNQd3JzZQqgBMFtUYg5gUBlbEBdHxfqIZADs0XgpGbmzETBDYVa0Bu4t433xSfyBuokpkso7g6loZGSGToLuroYOCESmIoqg1dySqDkQIam5M6maChuwICIZmZvzUKLVkedCcEILI/MTrdz/f3HGVCYPfWWiUiBF+OQ4Rorsv4DoBujrhk9mGxyZqrmi/LD3BbfF3gyMT3SwUgcCQkv0fPLFxrAMOFK4Pgjsv3CZyY0Fozb3CvnODih3qLVgIHRWBTJWTTxoCubUj48GJ1eb76+UdaCzUnJ1I3ALPmqk4ooNoTVG0uXSEYEaszG9JUgkhwnEprqzR6Y6WqVYh26jmgpFA9gBAYRJJi1DE1a0wmjEzY9/3u5ngV47pL14cTxz5BlLEQkUOLAR5erT59cUJiNycAu19JLkdIAHB8G59phIn5vW13FYiZU0qhHybVu1ZSiMVBWkXTsU7JaaxAFDm7JGFCN28uMQ05j+4gKOSOqOSMagZASKUpBVl84dndgADA1KwpOoArIgMCgYMvWNjlAiI3hQUVtFRJ3/t/zB3wvvhlYVovYKK3PW6+JBAJHZpVRERmcERnuhcEjRbjqoMZ2BJkJETAhX/kZks+ccEb0ZJ/XFRFMyImJDVQNUIGWGL9ikjg5KoAgIsjalE2kZZ82J+SihBxPE0I5K4EtZXSOH/2+Uej8tnDpwxriQMakgGTGAKCLkxPa24YGABI5noixRhTL52jaqsO9x9J6EpC4AjsFAG8hsTUCLqLefLLxxs9na3ee2/97IMy17NhiDGdiA/HYzdspuPUB6JqQfr5oOfvf/sXf/jDAncX71zead9apVLPOBVrQFJPp/1+/rPf+o/evPp8Ph37zXZ/vM0l76733arDM/7iq189/MbTw+ub6Wa3evAkhNhvzsZTnsss66vnH//i9nr69l/4yy/3N/7ZFytZtXFX98cgbff6q/KjA1wf9KuzwEXo9J3vfOcn/9UfrrmfP/3qq3/x48vkfVTzfDwdv/30gxdV7Lz7M99//LN//QOSU8csaf365fPYrY+H0zTtHj54ai0GTnny7TCEEG5vn9/c3IYuTHncDmdzzutVf5wOQrRO/S6fGFmYVml903arYc3CgbQ4WLUkwUEdAQCZcam+XhKwyxu8FDF+DalCZCZJAdYDqTUSb1aLG1lFNxfI+Xg45t/87d97/P535Pbmv//Ff7daxb/wO7+z7p7eXr/ebrZ5vm3jmxcfv3yOctzdidC0n+ZTvnh4ujtNL7/8+ZyPTy8uNhc9CZBEQl5dxi7yfn+8enQ+HaLERQWvrc1IUFstkwGacMBabTYA8uTH06SVSPjTl5+9I792efUkZ0uCjOE4jzHUed6XpiKh840W3k95nDLCvBlWiFTmrGXcnF+yEDM/uXp2KhNKRzHkccdCIvevyXq1vnxyeXd8sZ+fPx3e+a1v9bev8qsDa+jn0/jTf/NLYfzN33j/449OearH8bUgdcPZcYIYOEYRbHkac86xG3SeCJwNtMzcU0i9g2zXqbXpV7/6eOiHuB7muRrii9s7zeoQtsPFbNDQhyQpyOLe+Px5CZ5cmxOebze/+PgNetDShOM06jCE/e244jXYwjNbSsqhahUURnIzQdD7ylpXbcwoKGgA2LSVIDDniZCJqJn5Unbe5jT0qi5C4J6YXHEIQ0Tou+GrL15S14HTPM1MuO76XTUMeHlxMXENV/33vvvu6+u7hxfPXr65JvUUwFXnad5erHLTeSy7Y0kpZYOdRVrBz37+eWsiEL98Ob/77GG/jmOtlM5TKi3Pm8S4knUKt9eI0UPvVXW1WW8uzm6ev9gMQz7chWF1ykdA4kAvbnYBukAQg5yv1m9Ot4/eefDVp68jsqs3olW3LtNBwFbDMJ6OMXbf/e6HP/zJR/1wTlX2R6y5rYbWD4JO13e7zfAIVeZx6oakis2g5PmTzz/vUuIYE2+YaghxrCUADGdnxaEL4sjo5AitFm+NyYZVmOu8O2Z1JXcBPu52x/2+1eDgBgZoBJbrTJw2Z9uial5EgqkzcnB3PZI3VV2tzpkJCJGCas55AoY5F8IAPNBq/fLN3aPuAZhi0NCF5199ueofhHDebOzOH1w9ffbxR7+8fvV5YNjtX5w/fKo+rM7Ovj4WIaLeQ23vgXrLtkzN7jmhy8TrBk5E1Fpd5nsgBAMENNe3T0d08nsepBrfn/MckYmDA4K2PuKDs7R069Sm5miIUciqelMTiutwtCkSKTqnUFpzn7swGLGZShxazSgMxlZq08oCHMM8tcP+JN0Qug44tXJCLa1VIcAUFjm0mVU1gOCmwkRE5saM2kyEtQkAMoepjssh0hxmzcCQ88jYO4Bbqa1ykNZa1SUfjuYM5iziAE0zY1yl9d10pI4cUX0GA6KuOQkblOLIc5uH2KtWA4yS5qkIqtdjnkdhdIKsTm7IOJVCwsBYrTW1QBAplVxTqIECmHfDMI0Ngad5XOIDQn1zC9yfysnsxAwLQXboU2lS89zyKQQmTnNzRMxF1VoKgEKqBmYO3hyYzN2aQhfXh3bLlPKUY5DIKWstWmMIigCERZvWeyh61gaIKSZ1I/LqE1sCl/04boah1QymtU20DCcAy4nZ0RiBwJMka7Utjm0HLTmtuiN4MVhHuTq7KHPuEmXR/VTbbE/OUwIQsNPpFCQAgIR4PJ7mrObOLORELKXy7WG36Vcl25OHjzzcF4kuN0GeS6TASGbQkEANDXfFcoV5yom47xLHxF2EmHJruTWJJCwIltvExM5m6hxY56XajsFNmMGMGgoJkVctzRuxgQmzmBkSdkMYVr0xvTz2T94AxoOn5sdio16/mU9Tq3067U6S5IPffOfyW9ugXL3d7KdVDOmsP34xhdR5Q/GSy4Gxi/3DYs3J1pHqyZpZaxLi+unVKud97uDlNF08unx9k8mKB8a6IO5iAUIhBgRHILDWRBGaq+chSBCRoKELqopkSFKntqBZAaU0pZBCSrEX6kI3BInSAlwNHYUungXsMXalzKZQSzu1xMV83a0Ps06tEcX1KvYpBZNus8mNrwuM6cSrWE6lWrWm8OYFdQyCgTmuNiZSRQCqj3PXPdj0A3jz0IHEKLn3NqAHcDOeDPcu85H2VeZmQVwIheLc+nkO0cFlrXiKq8S8Eg6y5c3DTRdjKa4Fi1JBUe6m+WgAoBaJ1ylaLs0hXnbhLITzFM+2Grjw1DutNoEQ63widYMuhljyCckouHNyjGDip7mNx/k01tymOSv6bK0iOWHiyOgEAOZg7uzA5IRI3lplEYM2K2vDMrfctOTRjZhCJ4hgSRDEAKeGVKvOc5mbGLEHgTaFfIo1teyEsSCGKF/70c205MLCqq3Ms7sBSbPGHIkFkGuz2WZrIFgAEDkGQahemiKYMgPJfcCWQ5KEquiuVGptrWRHmWtpzc09t4Lq6LZkgAANwFtthswWCLk2c1et1UzdWquIbkLUfOaUZjBqrao6spmjghOO080hvzrr3nOJfaJa7tRbFJ6Ou04GAT3NLwpOBoYYrDECIHtttwJgjRtYUziLZ6jFmhMzi5CF1nIf+6PuIFjougbVqzNtia7qPGrzxJuz9FTrfqBzlngz7p1aH5PDYGkAO3qzLoUGWHQJemvVRoBOhEzuamoL2aPMmdHUWq6NkWMamIMhkmZYMgQxbZ99kPf7+e7mootzbtUASIQMWkMEQkYOiMgIpLYgocHAyaOwt3w/g4oQUakzibgCBAIjq00NwUhLJlhsI2KmShY5GHjIt+swzO39SuAIjqStIoOpF6sQ3M2Xcmx3c2jNmyeotQQAQn7rF1kyOATgaGiYvAs6V9DMbEFpKHSmqtFGthJCE7KFJcONnBlpoRk6miC6VWdmNwIc0VTIwBjUzNEQlMic7u1IAERLtAdgGYuNkE0VDJzo7awE7qCm9LZ/yuB+W7achQxsETsBlhcBzZyRze3+P+4N8OuwGKkZLrszBwOlt3VeZm0Z9M0cid0XPxEtB3iiBWJFb9E16o4Oxsyu9xQjJjHTxdXicI+SDMwArhAQvMyVSAIiAqyHVFrtomy7OBaPXbTWRtWTugGVqbi05rkbUoWm5EiioEOgeSoB8a7MELpDmY3xnOPt0eMq1FKYQdW1li6GQ8sVpaCJaUBsbtUMEE+3t917F13XzWNZr4f1yss8Ns2ljt98fPnFq9MilvkiD7nTvdMBYZGWmRRwk+iDi/TBJnXgBSNhLE0pcvCVeYqJXQuWyUi7LhStHVOUZq0YAlNgx9oqArg2jhIc3D0QNkJyslYTSwFIDgTLGwDuRsQArqbCqE1ZCGixCC0uDHJHQl4ycmZvS13u44y+iE0LP2gBny+mM7g/3bzdv94XAxoCNSsL/8GsIfC9AASOxATYtN1zIN6ahuBrY5svOCO89x+ZLZxs5mDuCODgwqJL6Ix4UZqaNri/VJcmvj8tFRku9OxS8zRPx5v9m9vx5Wb1dNM/lpDI2ZomJm/NhFubEZTFwRURXU8GTsBoZA3dO3flENTNtfr9ihnA/H61TGDNkLC5r1bnWkoc4OXt7uGH71w+ubp5/nyecwhdi/reh89ENQCvhsSJ8jx5CN/+d/76z3//hz618fZVvKTj6dSpvfnVLy+N5skkSPbEq4v97b8GXZdiaO4ZdNbto3Q7ncb9RB4hz/tXu2FzRUJg1RmP1c6evP/ONy8+f/537Gx48/w1ztZVKuAiNNayZoZiOtlXP/3hXHZwl9j5+c9/eXE+nJ+fXZw90XzY3RxW56uaT/uXXwxPvolrqnC8eHb16sXpct0xEssQJCdkHq5aa1+8fnGJjw2wFvvFl1/sTuNcigTsYkcs8zwGkqaWQjeOkzdTNSQY6ziX48X5U2bIOK43qzG3XBVgIaPbfZXA8rH1lqG+SLP3xWgOzjy28v6jx4m1G2TBXglh7HtUJyfR+ek63v78J3/wd/+B7d48eedhDOf/5p//aL/7ffEqkV3betWFNLhbbgJdfPBrD+o0eWnhNK/Xa7w4I1NnDF3cvdl7s24Ix2rIWKYZ3FR1mishaM0hcghda16sEcdNWhWcDLQ/6xxEi4UYnj67Gqf55tUvU9d3w2rO48OH57XuhVKZW0xxd/eqqH71xU/efefD995/9uUXn5ydPX38zns3p1PjrjUQxze769Jm4ugADIiIY7tvvam1jsaNV5NdvXjTXt3kWsLzXb18cpVFDztE0+O8b7WlIWSrSF3grrPZKFXD3XgrLKvVmTtoNWChEFfb1VzAvAWS19d3v/frf/YT/eV4OJ3qnuMawco4Q+UUexrCqYzP3n/y4hdfdRCE4e54Mq1n67S57G53h+dfvXz3yaNPPm6Bg+WKZhRSGFBarzQiIKEYGLgbOIEBIS8eSF7OBvdljcvm6r6QkRCRzdkRizYzaq12Kam7gpq7tWaqhBC7eDrdKSYa1pP72Xk8jkeUjkGolTLOrw3Ptum0H//dq28PmP77P/povY4q9sGvPf7spx8Rk2MFoMtH5+9+6+kf/f4vqvnPP/7V/+7/9Df++d/+CkIYs3br+ff+2m9+sP2Nn//g5b/88e3t/gSxPnl2tfmg+/0/+JTX63mc3MWJ3OVwt4sSDsfj1fnZnOsgEumBeZfzLVBTsCTDq3lUSXWybjWQwyqE4z7nejpfd1PVaW6xWx1z+emnn189uJpGF0RlPX+ymcYc1mc6t8sH50Ki5nEVAtE4FQQdNtFiR6lfD/3Ni7s5l9iJpDRs1jc3b8jl9s3Y9QhYifv1Ou0Or4YhevOWZ1fL2eKAZw/OXl+/gapdiuY0Tzl2FLqUS+uDTKq8Xgf3vj+bxxGMun7I7RTDYG6AYoZswnFd8914HNOap2mMYWtErfHV5TtP4oPr178q+Q6Jrh69W/ft5Vefxa4Xh5vXdxerYf/mq35IMiBFCs53t4flLiAiAHNVdKTFC4SECLbUI4IvztzlQnMHRyVCQHLXpUDWrLGE+9PNW/HJ7Z5bhITo6A7CorkSA4MFRnSPIYyTSpDDNA/eEYCEoFpXg8zTGIb+vuzBWs6l5z6lNE6VkUWSIwCjdIkN1SYw64d0mk7rKCTIEQCkTCqIwIJuTYuqMtPY7lby0Gu5Bzq4qc6IZIvBB3BuTVVDiu7qVoHQnVGSQkBnaA0UgNDc2JEhELRIEckca7PWpc4Vqzb3XArHfoVkqMTu1VpF5RjcMFAPIFWN2ady4NjnnLs+hhhrLXVJ5qXYWjFEpmCkhkBAJVfsA4RIkqxUIc9lNGSiUNuUUprr7LkSuhICBfTmkNQMnWqFOZ+SdL2s3DKQoauB1KoxYLPcrAjLhlenOudcY2SkhThz0FbZmImRxGHOrRBTbTWm1Myaaop9c2gIps0chTC3gjFI6AXCOGXhUJoCIDKpNok9eAUCM2ytMi9k68Yo6haY1FurWRDcICXJuZxvN0n8fLv+wz/6ubbmGIDBhTfbAcf9Voa5tG6zur7ZOTFFCSmAYxf5bH1xOJ0YhNwPU8bQucF4PIX7QykEiYgE5lYNEErJeRrLrK4YmIjYEIMIEmYriaTUPDCFxSXURmZGtXks2JpZca/gXmuZ87RarwITCXOMI0/kDEEwsDalIH2XZobbCDmsX5bVRx8f8vi8mdd5KtO42+mb6yOE4ImvnmzPN91AQQH7AI9DV1+PpZ5evfrk8fYqRbx6f1te7DDjvBtFwpy9dp5VJ7O7/WHXF5ztbn/Y5VqgH2/VFAn8MB7d1LERAyJaaWnR8LRBs47FhTuSGFAcTaTV6uBMVrMipUlNg/AqUcTVsO5W27gdcM2UXB2ko1UX/XofcaRSIoIDFWjmczmI0Qpa6D2WQkzUHVtSSH1PLQUKq41mneXRCuqsYwPEse3x1nsf1714t21xMJZaLW0fQFOMg4FbilUPQwjnEhPqphsw9QfUbHA369HQuwgwQvF89Hqw4SK05rP7CaAxpkR9ou152GxD7LoIcnpZJ/NTyYdqjuTgzNAxQwtZXMnO3jt/8P4lrPpj4SC0urSV8PkmuTTHwsDuVl27hBDX0K8A45w951Lq2KaxlezuwlSFsDoAORgiCjiqkbMbQkAODEINlQmjF9FM4CdlAyiE2hEqK4N5GRKxtKXvqqHP1bSlMmJTV/YAtsIkOBRFkhi75ASLzwcAap7VLMYICCFGRaaIYMaMISUQKc0AzGozL4hEcSMpgbm14nVsrc5jIXX0SqjIpCSmTQBbnnCuWqu36q2AFtGK6t6qxG2fVsjSSlZoCw65WTNHJmRAM1smCljAFgBu1d1yqbU1QELHWio48apjkmk6eNiSgyuF2DctzH2pdZzuTuVm8pO2JhgjRUYDzIAaOLrHDBpkg3RZ2i0DIvRqM2gMMNSZQ9qsN/E0fdVsXIF77ddyjkDc92q14ovCN2N+3tqRyc20QAMwCqGXGEg6li4FoAoG6qZIxGymCECACx7I3ZcsNLG4uqOZVwM2wyUe29y12sPtBxL2r3aHVrKWDMwhDexNyJs5uROAAgJRFFFAd2wGSZC8mjc10AYcUyHFTsyQY6iu5rqwflGAArkTgjAEMgWG3Ooq9oPesN7V9qh0SQndWBhBa/DEEMFLa41DMDCS2Lw5eUN1aq1o5AGBAGhJSDECADgpBkFZKzcdTdqJYZWyXoY4ewuRb10VSBFcyJHUWyVC8cWUmaESAqgGMUFgxkwAgObaCEQiYggSuDQHWlDTtBSsAqo7IeliAXHDt3eBqjEvrdK42D8AVJirKqGaLaF8d7B7i4k7ODos6hIBLDIdgC1Em2WuV1piVkQIbKZqSoQLC5KRAZYsPxrC/cHkHsTlfh/3R1+o1254D5YyW36AAI5EEdzVliSXuikHCV3Cqu4O1Wqu1aDr+8h88JbNAXCX7aig6OvEQ4hT8Uch7fJxiMOuVlIIURpZH1BaC4H24BWRrJE7UVf//1T9V7NlW5qeh31mmDnnMttm5rF1zqnq6u4CCq4hgJAoUYGQQhEKBXXLCIVCd/oHutL/4Z1IIaQQGaAsKYkCRJg2QFV3V9WpOi79NstMM8b4jC7mzhKx7zIjc6dZa645x/e97/OIJnYTTx464Ef0GaIHYwNVwxQCYSA/398/+4Ofvc/l3ePUWRIxVRlP02YbP31+89X121+/KcDkxgAOpE/DPAxrUYbRr3P42U3+dEekZdNvumEn2hYCpOBE7JwI1KTrB9bCTMu8xBwM0Y0YA3JENgqABikEBFZvzLGKoFOIEZdKjJlIESPjvBRyLK12fTJVMAN+okTBOsR3B1NAdFBiXtlSDr5qy9xsZQevZ3KkdSzC5opApgpoRGHVTcGTWEYQCZl+PwAi5LUPZmZPSjR6gr2DIxIjgDmqSmAGQDcDJCIyVUIkCrZq3sAQ0QyeyLa+jsBwpRshIiDZCk8h+P1V8HsdoBJaK8d5enzz8pvjfNpefjz016nfuVmdF1Rpa6cgBSImRAQGN/NamzGTK5iCOzBpFQuQVVVbc/An26U9sVHRQKXoIiQt6mTnKRj/8PW3l88+rvOUYne5uRzS5ttf/+rnf/sfnaYZPD5184qJStgN/+g//F+8fvnnp8fT5e5ZPdTuMnNnZEnOYwnh6g9/fFdfYbTudl9i7Putz0vYpdN5zJv++WfP837Yep6ODnF49/rVsFs2z16EoZdTvPnxj69eXB/efffHXz37N7/+xfFXvxj2+dXLb37+D3/61//0t+fjwbcXARPuN4fjcVmoUX59XI5wfbJvu9VhM9d8k/vL/bvHY1fPf/Uv/58IATm0ZZHjQ7fvtPGySMy8yGm/3ZjWbugAZZ4Obkbg1mrkBGAMwBBKHXMMCB5TVK9IWGsdhg2CgUOMoVWZpro0QQJXCcxroRGfPnYAV6T571N86xucyK39+PPnbECQLvY30+G+TOXd6/uHh+PD/eN2cyHVu27ueOCr6/NYTqfT5fXF0G/XoXSMQUtVA86B+ty0vnt3761tNt2Lrz4+P9411VaaAWYNMW8wuSXYbHfn89TUDJ8SCE01pQ2gqlA/dH3Y1VJOy4EJEH0+L9YgpU6sABK4XlxdI1OrzaRMp4PB4glN/eHhfR6G5zcfbQf//ne/dj9utjuwEgLcXG3dA1IUVwWnSkwQAOtSnDCmuF4Fr3+4C35NZJ/cvPjtX/7G0mbPdBXbp1v9binEdH11NY81p7BoDYR1ORvM3TDMIo6RU2cqxnSzuzq8fb/Zbx6Od9tdohmsOqj9jR//MbJzqPtLtJjvxpojAYdAgQnNlss+PL57DQ5TadBaF3IOaZmWzcWw22w/veG7+R6pRdxpwt1mU2ze5HA+HBMjNDPQlSlDwCEkWUriiOBiMlCnVggJCVtrhOgYCFGtDGmrKgrYhwgKFDzFtMxjwE7UMGDs4jK2tsyIFlNA1nEsn392c1cK5nwc76bSTM2rkMnf+xt/+K/++V9YpZSotRpTuHv3zrH8yc9+/l//i9/kTX8az7/69cvYhexQ3x++/j/+Vw9/9hqcWvU+Z7gb/9P/w794+RKqD2vs+d3DfCcnCy4gMbM5dPu9Nx/S5v58evHJj21+iWwp8bTMd+czGocURLTfdOfzY/BuPs/eWr/r7t6d+5isFEh5qXWcdb/J5FBONbiYYGtuUQNhSuH8cGTQzbB9vL9b5hkY+01aav3si4/u7x+vP/n8zfuHpZbEJCGllMda7u8eycBNc+iuts++vfvdi+u9UQ0xG+BxORUTcpxKm6ueTnMZJ/WoTVSFHLRqyJkjdyHMi8SUiXFZZnMi4FOrfepy7E7zgRwdqrXF5JRo2ex6h5oidQHFvai9uOrffvtXOs9dpODp/m7c5vTpZy8eHkY9L+NyTzjfXl/ev7tzh4QaDI6Hhw87NOIQYQ2qMRm4uzIEeOIyPsWKDJ5GSOvyhwgB0MwBnJhE27ozfErUA/gqYkBn4irSxBiAmDhhbd6KdSkgOwcScA5BFABZ3dSRKBITxY6dACsSEsO4HLebXaDkUAHIzJBAUSgOHfH59AAAkbtlrLEDDCGnDIJmvt4414g2IXU8mFQDMW0OhgSmhk4xJNW2yAQIhk/2DV5j3eArrZCRmGP1BTmQi7dqXlVnJHd0R855D4gUkxOkrluzfO4xpe24PIAKsLk3d8a1ZleNOKhCyFEbYAwA1FpDF3XX5uDAiODa6hzzEDg6EDGrusviXswsUl/LYl4Y178qdWmoy1jqIyC0WnO/exwftnHrFC14QyvWAMVkMZCAvaEhGkFUFCeocgLwlNb/ZBIxZqYYmxR0IPR5OjExOAQMDOxm+367LGdGIAAmbqJuLSFoba4KbIi6rmibKKF3IdUygburrfZoenpeeaoDOGDi1FRjjrVWq5IwPDye5zr7y7diwMgJADtWsNfvHhFx09nhsJzbycxjikOMem7AVIO9Xt7mHJ7fXLk5M5/OY3VhDnnzoYBGiABS63FZOKGbLuMYUkeOpi0wuLpKFG119gLqbWmnJQdwljqN6+JXlIFSCNTEmjaukgqnPvQc2lNfxhGdmJmJU2TiLoYUfFIbLgaL9H4+n7496tLK3O7vT9MkOeKwh88+v/jyx5e3Fx3NrWnEQVGayGwX8Pk//gmpk8jv6h1e4n5O9sh9ukhs5+X7hlqzAujMwmiOyqQb6gj7h/kxBGImIFbwukZQQyJyE+XAkWmpknIcWysK6ormKUZpRdxVXcx9u99+8Qx2NFzEDkyEplAgMhEu4twY23IJtTN1w9wnJTzPZwyx765qpebYVEC11SUQOQMPMWkIHiP3MOzDi4Za2+vmzYKBzEvzh8djjf0ieee5c1M36EIgldjliqZmrbUJQp8zYsJ1b0nEQycR01XyU47ikPLD/TJGFFdpELhX1G3G66ttt8HNMAANiVmH2c9TA5uadAAx0LZLaMYd7j/ZHy9T/2KPISDyfteTlc11VlGJ6Byx7wkDQ0DXmPuKQTkaBuXW0AuoIFBOup47xQMSA1DgEJARY44QI8XAkTgGD85AMURGVVnMCWHIMYJZAwoBpVUKqOSCbtKqNsTkim2ydl9MKmXOXYz9MCs7sZi7W2SS9gR3n5eFCJtqjDGmSCFRzGAFXN1dzWOKBFEdai1qjU3JHIgZrbZFa62LtMUIEclCzo6orWJrOFebZ1fJRGCqphyCWU2p73KHGBFijETgYmoIqzncnYgCBwshEqAjMBOpsrpqo993hMFVLcZADi7Lw8NxP1DAVIugZ1Nv5vP0eCovZ32HYYYmTBmfZOW2iVcu1mq96AYRJT1FBwCtcofQ0LeIXYrDaGWZHmo7Vi4vtjej1HO9V8SGhyO/H/UxgGxSH1hAS5tH8ZCcLol9qdl6qs1ViV2kBiBCDhTFHBzcNIVkbiKNaFW92ZAzqMCTQ0C9SVtpO6i/+8v/BwBvXBxRGTFFTht2qe1MIeWUDJjzYCZIBC7goEWioxu05moaYxYXz0SRZapg5FWbuCooPnm5GJ3c0A1dHYj73nQf5dCnOsKERhSyODInN0Vkca4qIUYFQTcCdkBn9hiN5hDYTIkM8YNyyRyA1NUiGpB1nWkhmYPBABjNSN3UMmERoRgUHBzUNaITIwKIOSCRQXQKQGYWEBiDgK0Hbgc1NHclYjZ0ZF3zN/AUZlBwdI3gxB/YjU/PNr5SpdfgJwKt1r7VWI5rW8cB6QlrzcymsorJVRutHjN0XbMpCEBoT7UjcFQDW0nCTGwmT5Ca9fSOCOa+nupXG9ZTpnvljoKZrOkhXI92gO5tPfI7ruEjB7cYorkR8zrm4Bhyl7fb4fpme1Y723SY1ESpZ6E0z/M2RXIR4mlRNt4hjiJKYakmDjFQPzon7Tg2bV0gjHYsCwcCUmRoVtYeA0AA4BjS0qo7IHNIaSmz3r9OxpsUXSDGbK1S7MdatEz//s+//PL24b/+5duZyI1hFarQWr+TT3r+yW3/bL9jryES9kPMvYOLm1ga0m7WBVx0KdIKdQwYRqPN9nKWwn0yIM9xbE2GMLUFA2LDp0MyIkVuIHOZlawBSq0eya3Ny6S20ud9bY6vR2k3NzAmXlNCSLw6HdcX7Yk/9aE65x8iRr6+EwlgpVkTuqO5rpO+lSREHFaqNK3rfJM1BLBOAX1N+Dva+n3WWt4abUM0B3An5idAOiEAqK3QbCdmX9/M7iuaco0yMQc1RSRcl8G4jjf93xkVoUzn8+nx3evjfKht7jZX/fa2ygLLfeSOqbgt01xDQqs5hY0xgTBjErOYogM0W8DNXec6iyjxvNKU3H0dl5rqCrDXagAGDM0aMKWc3efDw8OPfvq3ImN1GciX94dPr79qE8TYYt+d5iVkNrN+u5mAr3761enxl/NSPt5uYxzAkVD2+8taqohtb5/reZLz2PABwYnp7tW3IEuZ/Xq7Xx4X/NSE2qmeerhI6UKnRZciRRrEl9+NwXbTm3dFGiXrIrz7t3/ZxvHh40sT7D/748/+zt/75X/+T6rI1Rdfho9+srn9NNNc3zyWt+9uLy8dzEFwuz/H3W7Y//X/5RcXpPMMx8WcyLTlzY6ZZWyyzKdyt3vxR+pFW3t8e15GOZyOtdXAg2lFWbpuO8q5toIE5JRTKHXZ77rzcYqYhiG1UtAdnJbSiqisbx7G1cC3JtHAfX0j0poP+AAMYwKGmCEtc3z9i7epO7blJMq7609S99mzj75Eou2+B7DltORNljbvrrd1qc3QwKU1dQO1Vgs712quShTy5a4sZXw3am0vrq4kU+jyeD4iOgE6plaAsTtPd5uu7/rNTItK7fvOpJm5uZDFiCGyc0ohBhVQF6SwQv7XCmbkHIddl6y2GvutiRJAHrqltdM0Fkm3z/+IuM3jgoOej3d5d4nIIm7Mudty7GMQdkOkkJJ9aGMO/TAvUMpUyigam9pwnf14ev3ye9dOFniApVQ/n467i+Fit33QQ2stK0txxzqEbiyTir25f/PpzeXLt9+nONwMz1q7c3VU/Fs/+eh//3//j3/y1ReuMPTx/eFUqueYPn1x8/D47vXbu+3F8zxsx3NRM1uUiFtmq+DnNtb2u5fTv/ff+6Pv3/1pF2roYj+QjjYdSodZWdFRhQjZQMBV3WIc1IxYididAFnBZCXqE4uZAzVDVQIDtbIJW5fGHYiUyDFgJmtDQg1hGmvmAJ4udpevH95n0fPDqM2Q9cXz2+MPrzf7DoxCW96+/vb6o5sfvj1sb/rT43FxTTAYDf/qL3891eXFJ5v9xdXL1/P+evBaXgy7b37xQwSeJXgPc2v/1//s3wKm3X44neyTL1+MdTo9zDlmtN1H26tvj+/7HIfQcTAZT9mblrscWk8hYzi1usyVKdXm1es4naLDZ8+uf/jmt5/+5Me//u3v3JG9DZEYK2UcrGNnJqmOGcNxmvttAofpOEZO06EAws31xeHwhpGXqUzejCLjts/+cPeY+9CDprD5/i/fDlfdsN08Pj5e7C52A79/c353+uFiv5/mUuf7nAbOAQNGzPs+3h++fTbcvH57j4gcYp95nGZwS5Hm89R1cYhBvfUhMWNF0ZA4BFN3xcnOMZC7k9N2s3FMZTpgwEUkpTROgl169vGNz1MEzdtutroYfvrF5w/vv+s3ubZSJnv1+nB5nZdFa2sBqMG0225DvH5aG4Ajsaq5KLgToAOaCABCCKuYg2hlCT096biZw1OUEYlVhZBh9eq548rnUyWKTgYGIcSlzkgYCU2AjEGx1eYIKcRlbszsBGvW3ZowBzUXMYRARIEQAx+O913umAOAOKDWBgaEaF6bcb+51LJw9NYqmaJqQITIYj5Lo0BMndrEgbWAwfp0xyqCAIGDiiIEgxACiwp5Q2+iDc05dQCR2gLQMCYzA2AwAgN3wBAVZg5BZTFtIfQh5HGpQxcCORA6NHMUF2IOxgDoBCLGBGqFQwRHDNikhp7QEdxTCmaxLa0uJYfQqqa+S5w6TrUW8lCqdBylauQeAyhF9ZpzWqSoICqIlDK12PVdn4scA3CH0VXO0wPlQUUcGhIiMCvwev6yZa17YCBAQXV0jnE4zQ/IUaURBmJ2IwdTkD5uiy6EoFJRMUQAa4aBmZsZMToCOrWlMtNqewFp6ByJDNFgJTKSqjEHIgBgN80xN2/NRzIHYwIGIA6Jq19uh7nM7TwhI/XdBvTjZ5eXQ//m/iyBDw/T9/en2iQwOdLt1WVCOsNpvxu6rn///pGQxmkCcNB2c7XdXN7+5je/bfXpkFyWkjioCJohBiNM/UZVFTzkLqWwVvoZzdtYSjWQTDCNc5GGZqrABBQtRDAK6783p8RgXmbxCE5a3asigjPgkGOXAiNboyZbBz2F001/qrxlqOeFTV6WUhffNZyh3qp2kYgkRsbI6BXJed9Zr5E5d2G6O9mShvfL4eu76Bs3kNN7LMfEYcJ2rtOzzz5NtxfH8zEAtnmeWinaUkhIIIuaG4hRcgZYd5ciAkwVNFym/mLXX11+8/Wr5Yfjjny77WMfjseDmEAA2ocliptMUvqux0CtFlTkENzQpCb06EghUOAq2u+eLdUsbEC0laIkzAraNKLU0peaKCF1EPpTZ4uWzcX1PNZ6nqyBVpdWMXrzCYR9ORsApX7xQBzIuDIAgYLMpSypjXNKHKSZEaDqbsO+hdYba6Y+Ug9NrBijxC5BP7TeORETBo4b4k7mQpE22/z42DJxmwuQVQTu0LvQP9/5VSdddmUGDjFhrRwAUyLeQNhq6KoZU2LCpg7kWARFZJlVBVEoYlsUeN0/r3UBTyHEiDEypahMlKKzCFZGYlJHbG5ICSk1pwoGbm7qYIimbktpktkCzoosNt83ODvOGslJFl2W2X3WmLGvDoQcEWJ8OheoGHdpdTq1UgOwk6JLqwshhtCBIvAirYosMcYYHdgYiAhEq0ix1kxEkYwoBE4cSFqbz1oXdEEwIsg5ugbVhkNIXfAYuevBAEScHFVBLSCLIYIhIHMgRqQIHBQVdG5SVGqA5GiEiggpDORUazQNtU3L+Yh5G0PUslTBhnhfXp3ljdrZtBCSaVoxe2C5NQKTItWlS8RWDoGzeGM+g51TnKU20xuHYa4zxsDU7pc3JVjB3QLdhDbx6KwEbak1s6KJeknMwVqbTiRWausxBg4qLYdgauhq2hiwiZo5kVIIGAIRE3OtzUTdHEEwsLvHmBTBHGNiqSVRAlWtxkwu1duhWQrQUcgIQgzOjibm3KyhU5cYWynLjETMxIEL0vbFl7uL3ZsfvpbTiFUSUwnQmsTYZSaXyhRpFV2gIaLF7JynViTWkLYLClBwsIABgAxI1TORoxD4yoFyVKU1KyG4OpnIkAIgIQgCsSOIdKEevNN4ETsuR4nckARA2DAa9YGrOa2EIY9gToBEAcFcIBhk44RsDlmsUkSvHx5YmpMREYqF1Tfj6uig5uquBgzqxkgrd/vDl6socXj6qRUlbboyo+GDlAsd3dTNEFFN1rrUOh0A+FAys5WR5OvtQ1XWQhMjEaC5mel6WHNQBCJQMyUEMQFc0de2zorERdcYFoeVWGxmAKTuSGQAus7ACNDBjAjZxQy8NWhKhtEo5KEfNsPVTo/nWtAps7hXs+uLgYMgYRFkSEVb1wRDdA7K5IZL04s8LIIENFa86HvwI8W+quSUQS2CJQhsgG4YsDQJIZZS8iaLQ1mWUJb9xe1I2ppcX1+8KVMeeqE83o0s8qPbq8/++90PD/Nf/3A+FuCYNgi7TD9+vu0d1EW8UcpGEbmvq3g9bxYLsoyEqM3ctBAfLDfBDPQsc0bNm6EW61OigR9GTR1HLFBVkFS8y6wuUhVUdZ7AvdrCfY7uU2nEAaQBoKm46ToZBHziRK+cFzdfuROuBh/O3OCGzh9WjATrEAlXw90KPXAkdlNwW8NBroLrW0LUV4rWWjoDXuflQEgYRSpRQHBzXXNGjOQIbmu2DQ1WRjuICnNYSTQrFFy9rV40ACAK4KYqq8ve1FbgqInABwXa0y3h7v2r8Xw6HA7GFmIfYtfsHDk10eU8BmKDVl1rm4kd1uB96ohyIA6IChrYq1dHdAd3YfDWGgLaqoMicjcCXudV5gpGrjZp9WiOASg9+/wPHsfSSp2XaXN1c1iOz/afI5mj9NvYrHb7zIg//O43f/zVi3/5L/95TzeKRF200OJlf/Ppi9989zLfvMCc3v/ql8HpiUjn8u67rxkUMG93H8Wcl2mcyikFyn2ERaQs8/kedx89HN+Gbrj59LPvfvj+/v4+D4PtL3C73V0/c7y8m/xv/q3/4Pqrz6+/+tfju78SbqOOb+Tt3/rkWaCJ9EDYV2ixT8//4d+R65vlMPMukPfL3IbtJm5BUzdXxampWeCglpADkp7fPvLl9RTGw/IupcCRvOn5PG8u+uN57HMXiOZ5EanDkE0tBDZRa1VLYQIhf5gXBWQOjoZEGFHdcSVcmQEAO7jZKi5K5LcX/eVmeH5z9ezq+bu74tzlbtjuLgwIIACqWnHD42HmmM28PpwYRRK6eZ0LEbhDKcpE/aYT1Yv9xePd22HIz158+nB/J2X2jEyhmEtdUspdFxGoVlVVQtvvdwZWpCpjTEOI0YlFFZ2R4jTfbYdtSL2hd322ZEghhKRSUk5FJq3W2mTiqduag4ENfS9+7hBVYbO54osQIrsVRKC0dMOeyERaiNlMp+OR89CamAGHD2ongNq0tUKKsygl3Oy6X3x7Vu03/QUxyzi3yQQD0bYu9vr1CYH6GAP0HWiTqm2B0hqYEx+ndnP7wiV88/17T84axoK/vD/9T/7D/2i+f/X9r397bst4XJjzsjTCuz7rfru72O1/9/CeA0v1mHoAQMfIjA22qbMGf/4vvv94/1xqK6LzOGqR0KV0cf1490OHxBFFFcBXRKJWH2JYWfoiBdCBcQXwEYQ1bZsoOTpRQPdmSgCiQII5MCHFiM20jBMD5pRnKQ/HBwwW+3iY6nRabrpNGrIAoVgieHx8HPJ2e3Ux//pVJEiZjuP5kxc34Sod7g7i+DCetsMWtXRd+u7t49B1p6nmbpinRgPvboduC99+8+7v/OyzkKfX03g/HW5uN53Bu4f5/aP1KQ1pcOFxGYOaOd0/Hj++SUD45t17jz0S7/quqTw+zv3Vzenx+OvHb57tht/99Xfm5Ai3N5fH92+YIXX51KR5O2vdbC/eT+35s0+X6fHjZzffvvkt9dEInz1/9vbh3TSdd3krTYZu5zEfjmdUDAFToHqaDqfx9sV1CFEAhs025jCWRkQyT3kzAPLH159+/7vve0wBKBKNp/O+687TodQSYkYg4r7v6OHhgTwwpVLgMFbueJESLKSui2E7z2W/uQgwz6XEmBVb5CSRjnMZYvfw/rvNLtel9f1O4gaL3L18I6fz7YsXOrpqejyO/XD5+vWdWNtf7C69Q/RF6nZ/SWK1LTEQfyB2uT/daQABGT/chBgQzE3NidkBPhT8AZmQntZr4A7rFAAREFar6zqvZg5ASERSBYkCEhPkHEurhMCRFZwDoUFKYT6VSHnFUTAHCsHUTBV9DW8bx5i7AYgUlASBPMSEiOjczJZachxi3LQ2MqOYIKBp4dARY+47UTET5qiq5mCyWioCcTStDg7gtS0OgBTIAZkd0R3BsVWNwZjcHWLAIkLsbqJVmBgAKA2mC3EUdYSgSjH2iCxNiCnmAMiiFXkdbxlgJGZmJlKAUJZz6PpWa97aovVpFWTurl3XuVjMDOAEINoA3LQpBkjsLtJKDrmWag6LNAIwKSLQh2E7XB7OR2CnAMfzwbwxo5oz0zKfyQVsJSpyk2oOFJPLjIhd6lLszKUttelkpkg0T7M7ovs8zuyOsBphsLnuuj2JV1m63JXaRJoLeCBDstoQgYCLKDHF1LupNCFG83UJR5GjqIs9MR/VDACHuAPqzSqYlKUSM5qdjydg3277uelpaao6DOpeSmnmNi+zqyUKtWg3hDKVUy2ff3r7eDqcj3O36U/zson5s+cf/fb7X41LeXzzfddH+nA8IKIm+rTuAwgxksOwicvS3IOahxzzMGy2aTqV03RWxxpZBc5FGZyBIoOUwtmQBZkNdW6VdsOxjBUyelD00HVWjJTEkZD32yHpstw/TudSCpzz3e2z3W5IndLD20cwXaoYcFB7ezfdPVQakCCom5sYAg5DRhyneq7QmCUpctXOwCg68e7588vdX/7234TQ39wOLY6H5Ziu3RrBGUTx8vLi+up2evn94ftDCoyI5IhuDWD/yY3nlHfh05ue9sB9Thf9fW73b+6v+ouptF2PzMHAzERqE3JVz8xTUZ9rWwSYPLQuBoUadttN31dXSSIBChEwzygaXJlEGEOcZxdUVro7PrrJpsfc9dd9Z9bBbrdcLPeiaktxIwQCXU6HHKqzSeBAUTk7Dwvy2GYv88aK2bLVUGBgbzFEbONVmPf7hqdl8/mu1Zi7IQ3xLDVw9MVVHiiZmBa3aqEs0icIHjCyh2UTQdEXJ8KshrFP+TrX6E0qEzuCiakCEU7mSEPkLUASiBxDVW/mpMCOAVGluQm36suMyyLLoiKmyoyRWBkwM0VGIk4cus5SJAZkjTEIoIKnGBmDGglIay1pZAQtBdxiMDafi8wOJxFqjUcNVbCpYcuZzOO4lNIMcuZ+QCJtqvx0GRCjSYupA0Q3l9YAwKwCKAAWmw1MVcxaCKlWKD5iQgDR6bGep1Lntog5csqQcjWyuuA81unoooge+45yNgzYRFsDaEiB8kYDk5i7oUFABgAGdAfiJ+h9bdXJwAUR2AydAmZQTDygq5RGFCJlVYHg47wc7t6ltFxvPi7HY23zyONBv9UwIrW6TMx9H0lQ+phVBN0AmblTJQk9dDjVo/pD5up4XvwuDn1bSDV1w/O5HTDxyO/kolqXHg/TrAyRDcyXI/mQMAsmYxKDpBbBgcmY1Onc2v6J17kyXeEpccKkblIrAao2cAwhSKvIQdQQmjkChVUI1aog8YpAceAYgyIY8NX+J9Ppu6k8bDKl3HMcKpg2R+oRNSIsS2uiKSeKHPsugk/Hd2JnpqcKlpiJoxBaxMVrwqioouIxUAgqaOAUO9EFAMCqB0Q0NIgODKSeVosuIjqQ24oADoroMWlZEPhDbsF/r40PFrw4oNbt7hR3M0ryJSwvA50DQjToGhp6ijRXs0CqCICCqIziwIpRY3aKgIHITIqRA7uxOzBZCP0MkYEcDUzWRqOiMSEBKTogq3n4/6ufwM0B0NzRPVAUbasYij6witxdVQjR3QOF1Vdl9gSkB0B18xXci8RETQs9MZKeknJPUBhcS0ArwnjFKIO5Iq0tNkRANXNTQ3e3J2/6+hvN3RHWrgZwIBKtuEZymRR0haNblcgMXR6rzY0T9/PYdtvNl1/E+vU7cYfAWgRUatVzXYZ80UWvhsShMxVtHDoxKyK7nMfaHBlSHMtErq0pAKorOqlIwEyIzNjAZ7UtUwDKTilGHSc533WXewx5GseLwbo+IfWa+PhwXwAeDschycebeP3FhVZNOVutGIkI5qqUe3AI/VZqMfVdCkX9ZC4UL7vQ5skBt5vtvIyvVN5L/dHmmal8NmQMHXsjN2t4rPb88nIa7zFu42bzMLUhB4RWogVOy7b0DP35OC2VxXFljSPU1jhEcwBgVSNauT68DovUn7jQjk+UH/TVemdP4j13AFBTZoZVc4e4Ku0AYM3SIDJ+IFI//QIwcyf68FCkFZHNhZif0vrw1F00UydACk8KdAd3MBMER0QkMjdzWe1mAEAcVNVhnRmhg/PKq3FQ07Ut+e+Mir779ofNsL+6+YxykrY0mdt4NOSceobkvhFUIYphZ0DuwgAOTcxi2KzXagidBvbgYG4i0hyczNVkzZkbMgGgixAiIahIawWstLmiBhLP/a4gD1Kmu7v77/76dP99SH/fvS7nsd9uj++PVzfPpjenP/2n/7vd3/8THlHD7LWmgHdv7yLt372qubseXvxk3+c3v/3r7fb5mXoTG+fTb/78z9L5pMtctPWXl8tctzGdpkN7uFvO4w+/+TefffVTVO63YZ7PD3cPF9uLh7vx9rOPfBi2n38RY55PU+NN7JOVI7fZD6fP/uCPxuObf/A/+h9P3/3q3eO7EGw6nk20Nu+efWJgCpIGvthdlv/mVNVsLGq2udgXfhU2EbzVsx5OjzHYtqOvf/jun/2bv/jyo4/qYiW2UsWAlmW2ZpbA3Ey01Lnf8TQtkVPkKKa5C4fTeTwVdkdEW72Nqr5SLMB1zcs5ZMJNF5/dXjy7vvjs9uqj692L5x9T6FPg/e5qLtrt+mV6BHA0bcdGgP1uYMaqU3+xrbNoqePxEHjb5QEDSpMV0QXsUsp8OoPKw93dIkvC0He9U1ikqOIyH4f+ogkSYgyJCFOXtC21NQOXIqBsCKYOGHeby6Lzli/JyYyaCa3tFbFxejSwKoWQCfqUGvfkxk2XcbyvIYL7ZrOLTOfzbJ6QgjTJ283maj9NC4oOfd70m9N8vr25VYGKDOhNGn6IFR0elyFvRF1mHLbdZpultI676ZFq1ZD5VMYY9jFF8vXBSVPAu/ODe/jk9sX93f3QsbA18XePZRgGbAua95xG0/3H+/vj6/kX0+P7w812R2q3u4RMS8XjoX7680/C/d3xcLbFl8MCgSEFMJAibNqAOqaA+Xhf8kV0FAFmMMrkHb8pd7kf6NzcnRGRoxu4oohi7ByVgIiiag2MCGighqLu7uxuZg0AY2AHRAqcEaG5WbXSAFRZiiGEpc2h4y4Z2IIhM6eQjAmnw2m36czB1IBdq3/7V69yn3fb7uX58W//5JP7+8mBagXu80effvx4N54f7n/20y+GP/joux/ON589u7ncj3/53eef91amb16dv/z7X74+33/99t3nn3wSr4eL6356WUpDBgiMcy1dGKdaLjb7zWY71rNhbHWsteTUi0r1WlW62Blga8BGZeiX4sKNEGudHR09cmClKXXdHvou8ek8qY3DRf/24X3uhpRo7tOwyw8Pb7bdwIwcqIHn3SChfnT5/PvXXz/bf/Ld+0NzSsTuvExydbmZl7nr+xG1322unl0/PhziJmNmrXm//3gpj9M8OpARDLtNW5rKNE3FAbscKeQY0twad5tuG5pqH/o1SLzpEpNIm0BtWWYJrd9eONrmIgaPSS4jsLZzd/mcsh1/eDVsekW72F1VPJjhVGRRNCSPqYimCHWcqpp3/TLrxx99TnR+ePf+wyGZ103XOqFAJ0QjXmfOwIimgsgAvmrO4MnYah+erVYuoyEiE5k5uDMH1VUbASEwEbXaAMhEIroRphjcLRA1kchUtUZIxGQia/U+IK1B2viUy7XI2KV4PE8o2nfRua3m0ACBYgCnpUw5J07c5scYejNybzF1jgbERUPgDsCX6Q4NnECkuANzJgRzXW+SrTVCd1VDRAjiLaATIZC7SWlKeWNtBpWYcm2LSnOkZkBIHAKGhJ7A1M2ZIiChE1hF8tYWBCEISxXmSBhamfuuP7VzvxnMF0eIHCKFIj5PkyG6IBoycU5hXM7eGnMOITayBuIujEQWIqVJC7imyDEwaMvBz/UIsZilzea6tQcLQBljS+bgyA4616VLCdzNRESIE2C36DJAbuJLqUAurTBi4jzZGUJqUM+y3A77cT7PbXFGda4As44p4DjOgSJDRAIxEVdGdl2zYcncAchdkQCJzNFMXZUgrAs6IgJU86bm5CGkIKaEwgTM6AmOp1OkeHu9OR6mMOTTg9y9P74nRFAxBddPPtqfRpnGttsNjHA6t9999zYPMeXu23d3H7+4Gc/tL373m5v9sEwrlJe7Lj2likpjJEKKkZ0op/DRzeXVENXimxOcJrm47babIUA1neRdNYyi7qiesRj4ItnJDalojB5BwWV3sR8uttsMkfDx/RGiLKNGjOYYzZXxjM2IDXqgdJ5sd54vX2yynx7Op6VUUxj6gICJeVna+f742YuLHUzHViarVSumHYZkfYemwSkGKKEcSX0+bh0R0m/fvmlwAU1HqEt3tCvWyzZchqvQl3OLO2rT69ur/g1cHA8VOQz7vL2I/e2u//xZC6DUlnnBkE1dl+X5i8s3QxhrvdxtllY8sJvZUkmBVKy6IOMopijqgdlRuytMqXgPcw8WL1LXteN5dqnaKqiHEDlYK8JmBscqkTgz3DURbvvMILoD9wBlkwivllLBOUgoj0d2Qg+mNaTY6qggELDVKL5EBwwMSB5LHMxjVXRqB1ne+zIOkWsf8r4Pw0Uz6mI0c41kU3Mp0A2jeiMM1UtZgquUWkpRUTQbYnZCjsjIrTpm9FbtLEQRBSAEiIH7tD6NrGZrMVNTZA4c17KIkEsr7TTKw4HmBaZap6UWAQ5iYDGEoYcuBWYKnUX2gEIxcCygRJRyZ+CZhmBNjRJqgqa+hOiuBVTaZH4WrYhNGD0QIShkx47CJmhzW+aACIzrBw44OjzxKaQsTIzIFCJTZkqGwH3PhOSoqlKbNUVoamJCBqXhXAWgPJg2JGAGRvRV3KOtTlOsprUhWOx6ChnTlpBiEIvNpCCxGaKIq5jMrhV89XVBv8kcUi1tnkZwESmuAdyTY2AMzmLitJYvxG0OCKWOD3rATVWD4/x4nu+xFTOrsRQeq07k7hQcowIEqIxkHLR6wC5jVkVoWm1UUoeo5IBZtBJBjCkbRduEvC18Omgdy12xaS7CuGXrlXNA5NKbRYQmPncxpUDdtqdoT3SelCGILS1QBACRFomZ0NYswhoiEnMzMF9jK8CA5OwO5A5sZoSAKVRtHAg8OiOiGszH9jX1MOwud9CP82OV901bn/cpdrUeihYKGPveCImoNicCG5cyzzEyu3MKZWmAVBE4cwhEHlgZMZg5tIYerTbRKN4rRXcjSuAc0VyAPKhGs6giISICMhGCqpkhKccQoouTCa24HVdGMqtokZQXXc5J5dkWrrqyEN2nuHzftxIgZgNsJuSRsVSpGBUjQFNrAZmVoyYWY0AIiGaBQAmdeFEQS1JlSGsLK5ibqro4OjCRmDA6A3IMAOhPxI6nORYA+NOR3hACAiGEpgIAhGvuA/AJJGTA7GDuhsArstg/bM9MlYnhg/pq1c2sW7W1UOgmT2EgM0QMFFagDPhaQHuqNdEHnpGt8UMgJlRTdzNTRAMkc3BDc1QDQgPG3MdSWmBWYgp5u9um6MphPJy6HOcq4hiQUwghMYqqeZEFAyyqrs3RpbUcwtiEGS1IYz6BbrRe5VjVIjOIu2pMaSZozEjgHE9Ve0IXF2ngOKR+OT1u58dPrv64nGQ6Lzl1y1zypttfXCZt7WwzuCxLhJYSal1iSqWhuDlFNQwUInLKsSAbd2omjl1gVjPShkvVufPzl/02g3+2jbma1XEWWT+OYh4K1tT3CElcUrLc2iZRPZ1y5Jub3WHJmRgTvbx/V84Coiq6hs9hXaYi4FNhUM0qIEbOauAIRLRCpVUbIRHyGrpfX8UnvzSgGaC7rLXQNSCGbKYOuhbiiHktBz1NdhzNDdyY0ypBI0I1d31y3iMxIoEjAgEgOSAoghMFVTFt9ERVWhN26L7SuoDAgMgdPrTnmBBdFf5bs6KnUVE3XG8vPrq6uXXGVuZ5OZT5Xq3Oy4SwQHInVMPsHRK4YW2CXDmEUo+7vENwdcshr0ytBoUAFBu0mYgJ2VwZAyGvXGV1J8YYw66PXZL+Ml999aVuOznN5/vX2ZdXv/7Th1e/03KGlLr9Zh6nEMGn0/L99+H7tw/DLzJyGjoPIW8Tvm6x38SWYT8gtuP3v+r7fHNz/f3vziJQHk9y97ALjDFKnQPicZ5vr5+19vJ3v/nNze3OpLz/za8m+3r77JPd80+/+c37L7948f7Nb9EgpfjD+1ef/PjLH/3BF7jL6gbmX/zJ333/yz/78k/+0b/+zfuffvLzf/Gvvv70D66Hjy7kYWoaYsrNuywGdXl89768fRP7zO61LeLni6vb4/Lg7GWaYkqBcCkyPty/evUuQp7mcrHr1azWmobufJ5kKY7IAVsRAABVBgYKQ5/e39/vh0TAYLjru+uLDsy7SNsuJ4YYQ58Dh9TnvN/tLzb71HeXV/vUxXI8hxQUY+5yShkIQuJ5OiMAITOhshPCUk4qGkIWCMN2cy5L5N4VWi0Jk7dKRI4gpYHiZrtnprnuLq9v6ngOMaY0lLJsw0bscpwm4tDnYTyNDiaiTAHRCODq8qIWDQQxbk29ljlEdKAUEoYQPIGbi2NggqBtISIOMWHnaGZCObDlEG7docuxaQkRLy6Gx8dZCbvcq664bmbCVuuMk7QSIiE4k0NAClHb0wA450CJg/s8l77fS+PTyQXBXaWe/4f/s4t3s/3rPz0FuSJom01csAYyIUXPqNHAljrvr3KX+3fvKrg7qJmfTrO4Yzqm7W65n9pMHgI5Xeb87de/vf3ok/ez/uZXry8jtWMjsy5GCRxjaksFhadLWd3ZY04xsCccx8ePnl2eTtNml9xAzNxJWqPM4IZgwZEDuTdQQWQHhSd5uX4ItyMzmbGqm1tMERSQDJGYWESQQBEU2CkycR+5Wp1LkVZNY8pRAxgxuFxs8uFufH65+8f/87/5//6//TPOGPvwh59/YuO0jPPpYSyNFTFmGAJD8v1PLp9fhFev6927ZSp8Xjwme/3tm543p1P65Z+/vbmJn7y4fpyOn/8EZHr8/usDwY4JNjkvS+Ggzz+7Quju7u4vOOgyL7V02w2nkHIIAR1IFd8+vq+mbPTu/v2kcnWzAwcz6YduKUurYIqt+OVuY6V2CG0ZlWKMmPOQuLt/OL58+bqWFvswlVbrAnTe0wWleD4/bHP3/pt354fTxfV+u7sSBVzMDMZpQiBCY9HT/dugOB/ALd5e/+hf/5t/8elnVwh0nkZtmNOgWJxcrDHyduiWKoHjbtvn/da0RueON6WMrpURwZqDhsxiGnhrngJYZ6J17gjr3JDTyx++vriI8zTq7TYu+vLrr2mzLWDbiytmOxyny/328PadixEiRY6BF4S7hzv2wvhUzl+XVOYuohwjMqqAr/fA36/RPngVf88YcANifAICPDlNQN3MlNZqN6KDt1oCR0dsYlVluNq4V23y+92FuSN4jgndnhTBKuGpR8vEjEJNFo4cYwYKKfTT/IhoMeIKvuHQOxESRwtVKgfIsRMRBA4pL9OMIXYxu6CI1DbnvjOzJo2ZWzP2KCLEQcwdkIlrHWPMptURkBgImi6A0EwjRRFRK75uYJ7+4SqqaV0AYS51ITdFM2iRQ5WqBokzsbopExMIAZhplzeuS4jU2qJowDDXEUCRMHepiCLweV6uh4t5mtSlH3auLNBSyN4qGg+bXau1SQkxqIOqQQyOsCyTim5yJLf5cK/L2G82LrL+z8cQW1kCkqm6CxLHiA4gDn23KyKzLm7AMTjqurYwW7RV99B3YRxPCEbAhAmRl6WYuDsxxy5kaTouEwc2hdR1iNiaiCgGrkUj0mq9IH56iQ1UzRHIzWMkQAIwBZMyIsCm66QtRADgFBI4vrs/B+KrbRfEjvfzsMkhxbuHIwOOY5XFLjZ9q3NRSwQcgGMU9T/47JMIYe7L+1Pphx3ZspQ6HWf4cC9gDojUVFyt3+QM8sllf9XF1F8KWrVpv+s/fnE7PbxpHQ19XKr1Q9rsAl2Eh7GcHoqclZS9ORpsY/zkk2d//PO/ka8/u9oA2/z+9Q/fvn3153/+GiWpgrYaKXoiqYghEtlFF7chjY+jdDgWaBy5xzqVTY4d+fl8ejz0L7/5wRYZ2zRD3V7t42Y3gzspyVLODzKdaz3pJTWQ0/xwnpr1RUECwBll9+n2MBxsk70n7iBEl3aIHS3TYpfz5vri9sefXXzc1XpEoqU8qjAwzVMNAW4uBwx2Ad31s4vTd0XVUp/Px8UcpUp5PIfr0JAgBUGpTSInr8ioWMerT/v+dq+0t3h5LEvNaZ4bMl9uL7b9rqNumQ+P5fFEumSbzrW2cwNZ3EfeAFF07XLeX2zjfnsa9TyLja3rNjrXtYfiCoFgLvchhgobZVYXctsk7sFJMGz4YRlb0flxgaWZMKkXL5BK1YBAjKEpREwQrXHFwK2d/KA2exe4qDB6ayoKGVHcxaGq6gKB3LMm9pgMAJjW1E8kTE05p6EhATE4sTNzRKfF2rkuICLTbFNNU8Fpis2QWJwSe+h75YSxQ47A0Yibe8BAFACqSnGyPuZ96BljPS/o0LFzil3gaRyncWqj6AlQIYhxcIps5KnLGDECsUlbQV8AaKqtECWmp3NBIEZiIkoxMsbSSuwHCj3HbCrAGlmqVTRkZCKa69TaKE2tjgCYuhxicKBF1doSMUVixJpCAHQmBmIRAeTggtrQXU2lzk+AphDEGjjGxCEE50ixAw7mrdZWm81LYeJKUBTQyQjmOpGDGlCkw/n+HMuRxpkmYVEHsXsKRogKVWE2M+LkruoOvhCBNjXoUuzqvGSMibzUM1JBFHc07fBJuM5kC9mBWybcO4YgOWqsPmcsyZgaCnXkFLS15S1EYqyZY1BNMbmPHWNAwzUb4gYu7kCOK1EUAaQJEQVmAXNftzIfMMmuAOhgTBnQAUxU2Z6SwC5ObCkE1qpIgDxDEzBt4o4qrYo4KCByJA4cUnZRM8sxQmsmIE0BUMTcgZmZYnUPANGNXCNjswpmbq0PVgst1hfqGn7A4gI5ICCBsyoisru4I9ITK1qBC8eQyK2QBzN3furMIAQyCEYMUTktFLjvoY+EH48PJzq+7sGSUTBsIhU8AKAU4mhQcurO1ciAjANgcBRTck/aAE1VQSlbJ0ruxdFSiHOrQBBzNCMAyzEBmLmbAIJ/OMADrYNUXyuh7u4KCu7g1cyf4DLrwdtsFZ2p6u+VZ2aGwESs61zMFNwRWawREgCtQTlcsTWg6vrhpLz+cWC2kiFZZeUTs5qYu7usT2RPLbd1jLVGiwDAXd2IWdV0jSmhO6pJEYdlkrnYYpoAYiAwS5FP49KIxbwuogv2sTsJDonJ/bTIs+1wnGckiiHY5MU1IIjXy9223Jf95fbd4/1nu0s9z9vcn6cFMC2txUwC3thnwx4AHMTAE2ur9e77bv8ZoxWVEElcy+G47bfbSCPT8Xg0TBijuTYQBMYuEVqtdd9vuhCmJsyAFI+lAKQ+Z/LW1IFCl/fkegH+seMf7Qd5fDVE19YsRo4BVaG1a6rTw696wqgCp7srs/7gWGWpY737XhXOCnUcnSJPCgoiAuTEqGJqaq5EqG6IaODoKCbuikAiumKZHch/P2pEWl8dB0AMjuBgxOt5DRHW6/wpTf2h6YiABLiGyFe3vaxv0ZUB+vvZ01OifzXIIDuCqQAg8VM0ycERec2xEdEKjvYnRo2JNubwwdGG7ipmRORuv+/cPN0S+t3N7vo2dr0Z5CHlGKcQlnKcTLXWZXwb445xMFXOyQFCCKCqIDHEUu+IMYauSHXKCJkxCwAwoCqjIVDwVbTUCIg4gKOYgXl0JiO0/KOf/NwxJMbzwyuO5VRP+8tPW3EAQVKtjfsIRH/x//nPv/rRi02qb+7f3u6vDwdH45df//KnX76IC/3u+79+Fp+9/+avtkN/PLzPPLTj4/nrr3Eqm+ebH75734+Pv/3Xf/bR3/3Dx+NxWZaPbofj25ft8fHl+/Pt55++/vM/O3167G4/jrsotZzeHFm5HefkLSc5vv3d1YvnjINunz//w3+QLr66fDHdfP652D/98m//7PjwrRocz+9vv/w7l598VLxOb99dbrPZgBTYqY1iQfS4TA/TZnezTKeLzdDvtq/eH5xc0WOXUopLbfPcnKBO0ziWiJQCT2V2s9jlt+8Pt/urJlIW3/dDx7B98fwnP9l26XKz3SXyLgZwCpGJYb8b1CnHKLWGkOZiSBBDHK4v0nBRxc0eG/nm8vKSu4e71xhyv7mp50fkCuCI8XS/gKZmi5sGwNzvW1FBFYOYuxgCAJi2tOnVvJbSd12TChyXaibVgBFdmsUQiVDc0tAhohOo1NRv3NZyfBDRnHsrS8qJQ9dlnJeH1pacBhNb87dp6FK4Xebx/ePrgS9zREeorRBhjAkwcMp1AlUkspBzTCmGlAK12tyUKHhgMQfM1szdaisZQwiJny4CEG1YlxCZnMfHdtY5p34ZbbgJX37Jn37x6o+3QR/x618Dd/28nB/LfLt7lpnGx+nV3dep67zUslCrVps28H0OihYSfPbs+dv7+/FwysxxkJ/+zeff/vUbAfnf/m/+l/+n/+K/xM0GoD68PlzeXoz1bEVFGgSTINqWy8025ITupUhpJcTw/Lavi7nWgSkspUN8XOaIPUU2Nzch0cCpCLBjH/siBd2c0BGJyEwRkQDdn37IxKqqDSJFN1Q3IoqBk4HJsmYh1Q3QyXW/3cyP0Ss58jTNrS7qQcXfvnzvj+Zed12UBb7/q28u+t4TvIRz18eoyMEe39xpLbub/a+/mz//+Ms+TZm780MBtd12C5BKs1jiq6+PfQSn+3//P/r3/8k/+Yu5xhRiYncDVVIPATbNw34//MH2xde/+1VgiJnnMm9y1KIpxdN07ve7ksp+t5FqOpPUUmvddPuQ++P5PYaUQ8qRSZeUo3nwkJVT39N+t/nFn/3b3D+LxjhstYkq7HYXtXIrRqhjAZjbvrsBOzKFZrWp5j7lvHU4bPqtVJdSWUNOHJ1OD+9+ezz1HTr7R59ev3v9kryXNte6EFK/6Wtt1Rt1vL3M/UW/YK2GZmFZtIozSd52qcPxvNRaA8Wuv4CKqSOZx+Pp3gCvnt88Ho7Pbq50niil3eWLIPLyr79OS5mnU2gldPlqezsdjhEo9JSH/vHxEUwvry7c7Pj2vNk8PRit6OIVxff0eOdGxIjYRJj5g0zRTXWNf6wqjjU2DfD7JRqC+0o7tlVMQ0RhbRIYAN8dp01OuSN3lFZjCLVIDGGpSusNz3H1MKi6mYcuOBJgjGABw1Sm1pc8dMsYVUqKkTiyqKqqV04DUGRxV08hgpayLEQhxiw6rVjqmAEwq1RH5UD2ZH9DM0MO6ADezDBwRCCC2ErjFEVKYGLO1MwEAVaZYFJpCCFxdK1dSGQ+zWfESoECcpMSQ1JX5gGMoTm6G1SEgKREUM02/UWZZwTsum45HgkCR2RmLQZmQwiCYcgdEQlwxL41y5ymZYyYI6K1YjkAwT4Nb8+PQM4hmNvKElfUZS6ELeWNx74a1EVi6msRZjATxsDAi2oIaKrgirJQ7M7LKafsolIqM4ijqonhdtjVIq3Ito+VMAARGZurg5rNixratkMRiZEcmAlrLQRGRMzIqQNxZjCvosIYOLA7KkDMycVcVczUjBHNNOSABqWIqgWm8VxXDO1cG4I4o1ZBsEXaNNfA3OUI6m72+Hjo+uDkKVNK3WEWKXIB+e35INRijK9evU0hdJH7IYYuP90MwMmBnsB+uLvoYgQCC17324Acv7gZMtnzT56ZnR8fZ5ymy9v8/DpefnIBwV99/+b96/PxUVvhSMmaffXpj3721U9k+2mXMft8s7/cbtLDq/Hde8DE3sntPlzdUh+TZbp7tXhgTpAilVaRvNv2DLjZekDshy5st/Nm897609uxyxhTllkwPJyBMAeaRWvVugjoxDptXYd06BtcEtagtXnM7zZquz5ttuQmhiEj59gEqunw40+Hqz0mOJEsDsE0uLKbNoyA7l6lAhoRbi927341tqAxEwdG9GlevLRMvTLMpJAUOQRK2CT0dvXjq+7FtVD2zQ3EjT6+1zoFnHZd98mzzUV3FeFiLrthGu6P70t5nJsstngIM06jesScqDL59aY/V8PWGLrjeKYQFxJyqipULQWM3qExG64EF20NHCPtfdnC0AVnAzst4FWlNpnVwy7UZddvoM2qqYfs0FcwCilQiovZ3OwIo0gh5ehg2HcJzUIfzxvHfc8E4PO2x5QLBKfQFy0AgWDN42zNMVA0NzBgAIDQwBXR0ct0lsMDjpPNczmfTBoiofu+49ixhMRpyCFVZAqZ3AgBAc0DgbH6kITh2LRFRLcSULsYm9QUg+Z01LJCQoEVCdbzavOOMc7FbVaoYCgYFVkZiEKiD49ERGGVbmQKqiWRe5sBFMARg6up6vqZKVKkuYi4AkqLiMgdQSbvDCAgAlIAQYJmSyBUdZDa5qo8uLnVBdydAEPkjpkZYzRrgQYAiF2mmA3ZgAgptFAWCUAdIhFlkrZMQbfN3BjMFAiKweKnQ1hGmgpMGNlQqquQIqGW0Z2cejWF4B1iZA2cRMRIKloa+jYvwYuhYAzMXsvI4CE4wZatA2hAR21qsmR6dpE/QuyaHlJunclUzzG2tXaEyYGJEWudICSrhq5NanAGUchIxLW2FBIy6tNNyDiQmauZw+q+Xj+H2MzElTmoCEdFIFVAN6InRVfAgG6gFAIzYJ1r8ZJy6LthWYqpiqgDYGBMMFvbho4YqSki9ZmXqsSo4KjMZlKlgcdu2xpENLQGtBA6eWNXJtSAVfvJyTE2NcbgCBQIxJEQnRl4VbuLqZlwCMjgkEA2DrOZkEckciTxNc+GZIG7vQ0vCmEnLXCI+wtKX0F7XcezGyAkUiIrBLiDnBTYDaQw8uLIkNRKYGq2OFWiyBQVkIBadW1CXs1prLOAGdGCFIdNICJGMNNaI7KUCh86RG4rPWpFBcE6EmJa981hRaG7u7kxB3eDD8xgAFR1fwIHq5v6OgMwAyLG8CFsgrgu3AAd7GkV52ule52vISAgESGbmaGtIyr8MMgyX8dPRIRgxk/ic1szkg4QOUoTRBDX3G0ixzIXcjiOy9XuIvVZmzHF3cX2biwImgEzWm1eIfchnudpcVgMHKi2FgCKt6t4kUi4WIZgQAUcQnTDFPm0SMjD5IgUEQyIlIGItJYAgdjPWnaB691d6L5/Fvbz5eUMWk9zK23DcazHLpFvNs0dUEyEWcyaeVWTYbMtpoRUAxCyNe2YnQlQTEuBaMQdZygjO+vcqLXedDAujig1so3jqcv2sdk0PvRpR2odsSyFARJSPcl0ni0iucJcUxe3fafuwNFdVkMXOSEEM0WwECK4E4BZI+I1h4PghGgrY9p1TaHTSq0ycXRE5pVY77R6qMwN9QOZfCX5qK7eGCQCV1vLbMimtkIe/IMrjACBXNXcDQmYEyK4qxmaKQcmDmuk3VQcyM2eJtOwHg3Z7anuxhybtnXw+ERV/2+PimLoc+6YCNSIONIw5BTipUOc8YHcHZr4zIzu1sUhcHB0BzAP6qraUJoRhwgBwFWMYZ2wugOulwMQIq80MAACRAwBc1hTLRe7y3o60Hi+e/kqd/Ttd99+8tU/8AhuhuTdJr599eb48PjbX/7yi598RI3qOO6v9wfBXcft5ffpoz88zGj91bzovMjtx1fnd4+XH13c3b8fDw8OeDjVUuTu7lX3/ofju31/ez2XWdS++4u/OL99PDweT/fvZFoOh0N4/uOytF3u9/ttMYKwuX99bNjaUt5+9xtjp0A1baxPw3ZelkdKen/3Km2H+hFdX/w4fPTx4XQCtXJ4R6nN5+poDmbBMYJMp3ff/zVSq+KhZzGpRSjws+c396/eqcp0nkOgYb8Zp6kVVSuP6FW1ifVK7nizzxdX+1bK5e0zFiUM203fGhJSWVbhKZVqIYe5gAFUUebkGOM2dRnLWELu52V0pIuLi/NSvGrxc5dyWUTBsBEIiBin2O8u1cPSfIBMCAosUJygtkqh49y7gSI5k5six1qVIaY0DNshBXTQWlqTGjkSQKuKxMQYYiJgBtxst9P82F9fEcA0nmKMVzfPXr7+gQEBMKd+u93XZdrki9N09FpKreZ+ffNpCluwxVRLlZiimZWiRUtZmlph89D1zHFZRmBPOc/zSRrEYcMAxHHTDafT47DZ5kDn0+gfLgYMFFNgoj6nVnQ1zi/Vbq/xRz8bxnb3/i8fqW5Jb1yIM1zmoZURLTVzdRz63RaDygIOH396ezgvm67bcTaZ1WavMlWZI4cN/+qbt32fDo/v/8t/9n/+7d2rzeULLza3JbeOI3W7bpf9dDrs95vrq+H1m6NpgioAFDg353d32uWP7u/GXd4O/RC8hKRQHAEDh+amCHOzBjyLK1QCQFBVDxRXMZ472DrMftpXoD35PRGBlmncbjupAsU1G8VQWwOz3CUzi6lnl7JU9TpshmMIu4vrx8e3u2cv/qf/q//1f/Kf/SeR8yblZ8+vhhf5v/nT37qTClip213aXw5Nedgz8XDz8e6P/u6P//xP//rFi6taikg7zufdbgNiKJ1j6Df7//Q//ubb7ylgH4ilFTDsYrdN20Tx8fEQqH0/vVnMcmCI3TI95EzH0/Hi8nK/uzhMxShcXV6/f/dmO7C7yiTkfng8hZC7fmDM3//wzacfvxBFZFaRhL1VffnDPadNSAE4lvl4tdnePx6vd8Pr+ZwjewRF/erzr375l7/5+EcfO5tziCHUKlXmPgeZJlYvYpvd7u2rH7S0i5udNQCNcdPfHw4IvBvyUmoMwdSlCBHs9/2sNrVFRiUehu1OEVw8phgCzKVW1bosASlC8sLzIoxYzcJmY1XP5xJzz7uNmmLRNs1TrTeffRQS5vPGDO7vHsNhyV1CN1Gi6tratByuh81cFyaNvz8kI6m5gpkahScDwmrQDCECgJm4/z5Tva6vDFZa4wctyFOMyJGIXPXpcRcAiaU1AOWAXRcUrBQzVeJAgQkciQjdzJoKMnMIbh6Ym4uKUiBb4UJSY5cAxZExoJtVWQyo67ZWDdy1FUdIeZBSW1MOXUimJuoaQqwqHCKAW8CYu4fH14wcYhQVc1llFjkNp3kOEVXkiVuIuqaDLDCoMBKHUMyIAhG5Q1kmConcTQsScUJjjTG7NrElc6cGSMFAAUzNHGlpC3oyYEM4lxFUHWApC8cEhO4kamZuCio66SEGXkzULAY2h2ZG3AN3aiXlUFqLIZ3mM5qIGHM6H8YuZ2RTrZeb5+M8AxIGD0yVVuu7oWnHYVkKhgym5FTNzItbhQpggjQorKAoBNPAmJm0LrFLRCQVzRlDmEoxw5gSMzUTjvm8zKWUHKI7TLWkFFSBEm1yPk+juzknYiQIosaYaqvARIj29MRDZopuKTESamvmmJkCUcx9kZYyJxNEl3nq0sA7XlzROEXoE3Vx8/27B07x82efffv+W0c6TdPz58+n4zS38sUXH3/3/mVOmSwcT/PmetfEank6Hoi0gEwAEKM2CwYp+nbTqT2RMeooXQBQSYjDJlIerOe56yLH3V6++En65Lb7/tv5+2+KQbx69mx/e9vnrpp0oSdPFLoUhmcvrmeYNzlcX1GfYN91OYXlGvt487DoWW0WVwvrGrHb0NVmd3nRV2TaXfQNzZq4QB5mcDZ6fDydIVqiWCTMFmt/Opb3pVS0VkdFCAFnFd5FZC1RLvotqsHTYYTR0Ra1OXV54BDruopPedG6D3S+r626GMZMVqouhYxuP7v65hfvDIBjpIUNjTguU9sqiklIHYcIE7BBfzGkTy67rz7HeIVKNXcVDDCgNayH26v0bBOu9/uydBhgy21uU/QZc7PqECFF3mw3OW92aDoXEj364tV0hlO1aTk1dKLgriElBuhzF9wJ22SLy5wikxNR3GwvzKlnnjlc7y7rKA+yAHqAJLOcx7d915n2sR/OolPyjjI7I1VzjTEvYkttLEZiQ8J+18WroWy0ELNZxi2zNgutcOAL48hhEO9cV8kxuimSkRkBGSggMFrQpS5HX45gzaBhoBiTqHYhIbqnPqR9iKm5GREwoYKjNdem2gGjw1RqioO2AM4xW2ZEhxRwbrrUWZqao4I6IQZaM5u2go7JEbEuhVMHJmIMAF2MH6alwCEggqmXVgKztlqtsFRWRUqiIq0ygIibWC1FVHylJyMimak0WwDQgRIiQWttDjFO57O5aQN1hSAE2OrswIaQe4uYYwoQ2Glrrg5QEABQwWKKZEYcu7RZlsV8YWtmp4vtMzt3JnORMSUWEeuidvqO7mHDFQshCS0TegEx8C67q6qxI0VLiTZBGziRUdNarRBFcGz1HEJqioJBeVPtMTqBJTVucI5dUJkDDqoH1k2XvljGN7i8xnyufCI4F2fAnFNnAM0txs4oegyYIhIyEbiuiuJAa6QX1YyZ3FRFgEhX8ROQtqZqhKvoKACsYUy11sDXdHxmd3dHc3BQVMQGCClkA0eAUiqHUIqJat8P6g0AutShKziYe2tKiI6gSE3aek4zAifAiBwj2UorXNxanwhA5iLmceG+IQEAY0Bil4ZuDiArXN2MyZFcTQmjqwKzcRaWQGwoaLweXAHMXdCZweZm6jRwl9UTQGEfu+S7265VrpUAWT0+MU28R0SLKiTkFsywGVMxdXYOAFqJE/mmLYplcL5ojLY0BW/ose+928L1Z5BiDEwuej7aMvt4Ull+fyGsoQ8zw1W/ak7A/sSQWR1xa2cfwAGJ/Mk1Tk9ga0JwCyGs32F9cnKDp/I+rIIqpfVtsDKJXB2A1zgZOgKKFAAgZlvpvxRMm7mskBoHVxcAcFB4mmcJIcH6MhIzobqCQ61ts899xqGnw+NBbrc2LznF9cBuboTAq4Gd6NWx3F6GYI6KUxHEoC5DygMGUY8UOiswnhkUXQ1JzVRlESXumxiRi2rKvXur5hzC2IoDO3MFujCA+5c53EPKIzFzjiFFkGmpXbeJ0KkoAHSb3r2N433HVtvSDDBy8WYGaJEhbtPupGMKUcGLOnI/a4sOmZAcGR2ZFlUxIECo3qUNk+8JO9saZqljCNjFKCqBab/dvxwrxbALwIgFcLG1Wl1iBBVlCro65hEQyMzNFZkA2RyZCBBWEoyDM7H7OsZDM1uFdLxq/1bjoisRIgHaSpJFcwMFwg9f8DQvVm3IEZEc14wSEIG6g+pKC0UicCAAlQr0VHx0hDX69KTtC9HXxJw7EJo7haj2NMxaB0y06ibhaRT174yKur4nohACkqODwWobSZv+hjCYk7SplUNAzdHbmrdDNoxmKy8pKBCQKVRwYIKmlcwMmnkJngwMwAhXdKqrtkboiA8P78tS+s2G50kpNtdSTuNkx7d3H32hS21BMG7xNJ9LO3z24lPkVE7t5Q+PycN8ONEtzmX08Th+/Wach+df/Pj++Obyi58RDJvO38Z2+Szxe9xfDkmM1SP7cLUZDw+f/vSTV21+fDMF6R6+f4Bg94e3NC099j/67/y9eXoXEef7IwS8+Xh3eP0Dhs2G4uM3by73e0xJ2+wgXZfu3r158ZM/vP3o59/86RuS7c/+g79xhMvtxbPj29fzVFLCx9dHPeFpOnz58yh9sGBaKwbBEGv10/lxHtvjMr06jVZ1Bq0G01gOs5ZStXlK9DiODo4x3h8e/+Gf/ORnP/38fG43n39aqjCn4/35cDf2KW9udrnfUKBAsTQbhqGWEmPIOZ3H2Q3qOB3ZEjmQAQdkFGt1GknmlNNmfxOS1EWK13636QClKXI05wycUmYANU+bDXEr8xw5xr4HogR936X5NKq23GfnXpcGUA+n02YYuryJMXCgJsLirYmqBQUAbirH8ZBiliKn8Q6RlmXJOe6vbpZpymHjZuPUEONUJaSNSVVrIUWK3TLOCG3oc+w6DjhNc4SoTRj59tmPUuA3r981nS6ud2ObdttLFC7S+jyUpc1tDIw553E6SaOYUpMn683QJTC/2uVvzg/ogd03m/jea/dsePnqO35+cbLNn/wPfvanv3pggoD85SfPvvvhu+bEBAa0jBO7RM6BcwYI6nevz8MmpUDvz2MrpqbH47zzi/N8/vhFv/uUvz+f99sbm87bIfx7//iL3/0wfvPytN2m7SZUS22pj49LjGkfrt89vqKAYo5oEYO4ZYqZ94e72VIEZ7PVwISgThhSYmiYGGurQ8ruvmYYHdwRmzsABF8Fi0C8Oh2xqaFhShmAmsHY2g0lDgyGrS2ByazF685SUynkPJbJwdkVkh65fPPyh//uP/r5Zx8N//z/9W33xc1nX17/F//Vv62++eizZ8t4+Oqnt6e7+5uL7o/++NP/77/89ePDsBvyON9vr64FNuPxWBf9k6/+4N/+5S9C2J5P0izYo1DbhA6LViZKQ5eZpc3JE1HpcljmFnedgTZpz2+vlvMJCWOIj+cJMZjYdD5gLSF3wB62A4fUrJmhLJiD395+4mHTxskrnOX80x/9rfn+r+q0xNR3OZ4eJ9UqsZnY+TTuLi6ur5/95v3L3OHr09sjTnu+pWC5H5ZSoTRE2G1396/eDtur1y+/vv7yk7TZPNTTj3/0xfuHI7sFcG7EmF0cHAkY2XPeVmmvH8aL3eCRcreDhlqKGUJDrdLfXHHwh/NJbf+Tzz4ez49MGGMGXBouOeZ9vhoXswCPD2PHabfvo+l0PvbbrbbabVIMO3GPqQ8x1GVa5sUcU5d1qR2KhZafbXN+orTEnBBHJgQ3IgwxmjQVJXpacv1+lYEO9ORQeIpB4wfOoq0uZTNHXMlHDqiisKroA1PEi4vORCKTqBmSASFiK42QAyGRozdTIXQDR/TAzgGdyYEIyHG997YYglasS4sxuXqgSMRN1bSaEwCJtsh56LdLW2u2oUwTkYfEUg6p317uny/zdDidtjmtoOVEodnShaQiyF7bnLgnc0RXU1NEBzNkCwxqoKs8gjkhECLFEByAYw6pMynWSk793GamZGbEgR0NZmYGTFKg1epMlGJbNMbQpMQ0aKvgRIFzn9ztdFqEHBlRnYlMoev7UjRxNsJiytx13ebxdN5uetDaJxbx1G0aMDgVqYfzGAO3OoMVA0zMhO5WDETUzJZ5CURDpk7gaCa528pSkydSJuImFQVLWYBTMQ3IF8NlWR4O5zkkRg4OjSiCEwIR85DSPJZAOWBsVnNipoDkY5nU1RyR0xrVBsAUc+BO1Q29SUEzVXFIMSYgbGodMgQIkchaJA+ks0wY+pCitfbFx19+8+p3Xdowb4qdglcp8lAO3TYcx/rD45vc9dNclkUOxzGZner8HG5+9PHz+8fz8TRyCu8ej94shaca5tXVDgtS7A+15T73XRciE6kBxC70xmpNyqx1CuT7XdzEeMzw6DKNxyNBXubnQ7i+7qrmt2MqQ8S8TYwxwjRPqtWrVyUd0nwhQ+bdPu5iyJROpzYVWZBGEyFaxgpiEeHq+sX1i83HN4gcGnGMAc+nXYQ+QdqgMUtVH42cFMgw5LDBGes4FikerGdG1s71Iudq0bnLkXvtAcXImTBoheZRg6lai+dzrVg4spuO80kktTnOSwPyhI6SYk9zLemj3D3vy5syl8g5lfPCnI7HcrFA3mYRH2JsUPo+fvzFZ9u//WPLG55jKlXNgJoAp9TH/U1MnTrN9eyBDuPD3M6IFYOHTFu4oHCx2X60H65Tt+sJbWlB2uZwnE+Po4yslWjLUBlD7obqDVMU5sVrq0UJsDZBmcgt7YVbDOpL2TNd7i7jNrxPD2/LD26PAonAhxA9dWNbjFHAERSwdhvKkeSxWS2BY1DISXebNOwZO73d5pPYBvtWqpS2SMJ0WaXjuGuBMUSACC0SomlBCIk2AbEpMossY2pzdZGO1Yxy5KFbrV/cpRwIeagWK7lBMwBnd9D1o5c9O6hAMURpZMZNUMPGHFMXpvNhnmgZI4ekTQ1VEAWIKHVhEyk1qehmRdxM3MhUTaWW1NUmT+cCZkJkA0RiU3OEkKOv8wKpZhKRTAwMAFIgRFRktrwq2MC1qUwRg4mquZg0qcSRiEVcVZ1QW0WVAAoMMYQYCcFJm7sZwGrrMVU3BUShMwtgU3IKFDyEPqdoYdN/clgKu2RNkZjMRymnOkMPtU3GMyCRGjNGCK6UyB1BA5vULCE2YUxaOIQe9OR+4qgOEFJQUS9mToaG7gEyeLdYgRAd0Lj+/7j6s17btnQ9E/qq1noxxpjlKnYZxYk4lU8lV6QAZYpEct7glEBCQvwIfgA/AHGHhBDX3CMhrpBB5AVKZZK2jG3wsR0n4kScHTt2sdaaa1aj6r231r6Ciz73sezLVUxprTH6GL3173vf5wl88lrCm8Dwll9T3n0s/6a/HEp79ODw7IpgHgBp06dEvkZjgcwBVulVABKucSIODvdwICJzD/cKyJKADR1WIGkAw8vjqCEFA3u4mwYx0SqUB0lSm3aZKLQF9HmrpTWPPI5RGfuejEALrfTARIrRgtxd3Y0wkJqaBxgAJE4YHOFm5MQiG0KI2tTOICYXhqNTxiAGZiRGgtbAFAPdpWlb00WJN66BVBHUyErqOLFrw8CIMAAm8kB2R8TcbRNszLnzxZo9U7n3p0T86fbmev+BvSEREgSCa6CH1IzRA1cUFILQRoyODgBCXbMBrOZyyK0/GDsC92wu4MIy8rDNmzFfXPabHbnTPJWnp4XuWj38+1ERYbi/cKxjrYa5WhEUwhV1CO7g4eFNJDsirsgYhNUtRSvCBsxsnfIZAa3zVAxEJAYOoDXc8ULNJoBVbmXOzEQU7hG2OmctGoQR0tpYcjQIACIIdFdEZqIXXrBZMASCqTKTU9T5LPiSDjaECHAMEWrmHuDus7liPpT6yALSZSw5YJvT0XwxyInZjQBnNSQZMTWnTL2Rnww+GTaPh+fLcZinQ88CFqKBrYWwAgEyRRCyh4SMWadb1ud3//ZnP/njp4vbb58/cmbsb0/u2hbX1skQHou6dNcZpepTygKBuxQf9h9cAgXPGM6pGnkwQXQBjDZ2AtNR0ARQCBwxGAxptcrNOg85EcHk1u82qhUgigYgWIbnRTvEjtFChVJotEVVnQVW4DR4EMI6hA035BXLAKs+6kV6Ry+6upW9YO6EBG5MvNYSV8YnE1koOQEgMyOArZkzAIBYi5DuBgjMaUWUhUOAC9G6fiBkNwAI5ESgpo0k4TpoBiJg9xahxEhI7kZMaz7O3cxXxDHxi3/GzS0AiNcu2w/mvr8dFbFkWC9TX/99KMLs1OGmFxbwaaaCZN7cmyOHG4Br2Ci9ADIlRESUtVoAQQAJ0NeRFgAEkJsZOPlK8G4FnASneprrshtxKWVztSmnh1KPr25f3V31vexiaofz8tnnnx3a744PH27/5D8dbt/0l0OrLQuqQdrlQ5le/+jHJnH861/5c4aEddNBj3/9i3918w/++z3A6f399Xb39O5u2HQO5dWby1//5tfyRz9++OoXmx9/cfnFF/zrr7pMNWZ0ubn6vCG5xec//uTu22O5e2fn8s23737/7/7nD/bYBuCbK1FvFnkcuzoJ9G/++Ke7n3356ud/1mUpwzYNV6Ht8OEDwvLp52+efvcLGzP3l/Nii5+baHdx0do+p3Sal1YXAzPy58OpACLx8bzU2oDZ1MAh1lGfpOxE83RJZEuQUTmYeSzRct6MF9vtdns8PS4GjOxLmef2fDqak4NfbAZrbdNtJOcG8enbT4iiaZO+q8t52G62w2AgIjuVE5Kmjruhb6Uycy0GFKnPCNj12dRqmRF9M4wIiEBIpK0cn86h3qJK4rAlVAP4YnsJ4LVOpUzjdqRATtwPg0eoelhIl7sxL8cFonX9QMKffP7F6bCXJLuLS0nQSsu4zix5M+zOx+dNf/W8f1KdNiMv55hOZxF+ePhu3GzQ6fD0/Hx8/tWvTtLh4fH51cXNH/d/thn6w8OH6XQKIdUzych934DcMY031hZO/15kWat3WT4+HViYCBOTRstb+uKLV+n4wR697gvsvn09kPRfVum/eXe/P7dQ2/VbYUbHy+tXdx/fWT0fzqhO16/fhJf5PC+LDn1Wj5vNiAj754cv/mz4n/yXf/x/+T/9268/zP+9P33zy6+/fcs7XaarzWARzx9Prp5zvhi29w8PU7rnDVvzlHMihIZO+epqMx3scDIeCJGY0U1brUlEm6l5QiH3IaWUpLYaABpmGiklsyCELMPSjsy8gmccAhgNKlOotpx6FpAszaqp5SRjTnMDZC++dAkRxcIBoZwXnZeb2+se6h/80Y+fn+4A8LuvjnURge247bzpq+2wPB3Pp3bdDX/9i99d7S6z6KtXu5ubzcf7/Y9//ul8PGyH8RfffyVjX2sUqOW0jD2nhOG2246lqYUpKKjevz9Kpi6P52bmnjOfH/c3n702507yNM/WWhqkGzt3Y+kuLjeOcz3Fw/1TBA6b3hw22/Hw9JwFqIPivhsu6vn+/ulhyBfneWZ2AqNEwCEpByIL3x+eX13uTOc+4PX2mkKjQcW26icE0/F4qFpud+nq1Q1araVc9Vf1oFBVtVaGvtucTtWSO/lcS5/y+TzXqrRlZR6H4dXtZ/fvf8ecJOUGcXt5IYSH+dRn7oar5mDWOiFDVZ21xavhLdZiWjFxn2SXLx6+++5yOwyp3z/sL643darQn1NCwNpqCW/X17v98ent2zd333/YPx+CMCUu0dZPgXkAkJsB4ovIw3wNVLs7ERELIpoprO36NS1E6G4rq8dfuve0Gj0snBADHRkQg5M4wvm8pMRhJkO3VD0u9SqNgAhCYOAR6NCau0N4hLeL3ba0KtRJEkOjzLUoGHgYJUmyi2nvGLUVBBBJFkqc1m1MP25dm61xfkS3yF3vAWotp0GLIVNOfdcXp3BCkU6bJo5SlqrLLu0SD2FBjABODBqWOZNAaTMS8spSJQgF9WpWmbi2ih5eFgYuqiIpSwZMSATmZgUJAhwBmNk9gMQsiDhlgqpEGR1340Zrc1PX5t42m915nsaUU05WLSKS8DIvjJ6IkehcZun7qS4EobUOeVx0DtSltX43LscJediMmzY3fZG2NXAjIRRuS+lyNyvMvtQ2iQgaOTTEqOWRqSMAN+v7EcIAjCj2p4eDHvvUm0dthsjru2WlSOJWllorY1Zors0sOIG6C3G4E3POSdWEBSACrOrZwwhZKDk34SzEqgHkYL60uc+JGQBoWhZEA3OvWqFuUv/9wzdJ6DwdOA0j4bbfCNH98bC53BwWm4u+3W7d9dXl7em8jGM/9v379x8P53PKaTqXq1cXGHaaZ/gBa/35Fzf1uYbaxYW8uhlfb/liENeGzIFOQsXUTmetp9mXbswVa+rscDxOz+6ahfvqbejk1Q3fL83KVI+n4+NBLrmAl3khV3ed5+Wg8/V44ZGashU7HNrzqRTHx3mhnHpJErzrh0+224F4yyNJOkxT3c8dhhIopuNpOtclQ1enttiC12O3E62H5XxWmzI6Awh0DgUsmoYjOWDi7AqAAhlZ2G1pUR2Be6kQRc20+BmsaGk2+6JWIqBjCU5zKUYaQiWm2x9tHu+nwEhdys29WNTaDmVzfeHuXOrY82efv/mjP/+T78mdcrS5aiHInAeXLeI5aDnMCx6fp9OzqTRzrad6PCJljpS7AWIYcbjpLpJ0ORFyr2XxChe97ZP2OQMOZqBFB5CLlAfJiWkyL9YwMGPPqiyIxOYLK22ILm5uk1+Fn2+ur19v8uP9/vDkubuJtNXcgxPqmRBB9+N40cvAm2ydy4YPlcvT1KMk1iGhexlmoMjkCtEpJAzinIm7IAqn9eV3x7aukQJNjZjdw90ChhBMl4TdK1VDEnBY2a1rC8FNwMnaibhFWww0wjJ3CQTAwT0QmKTUCQxF81JgYdCl1Gkuz0teQFpQWE/igIiyTvPBjMwpTGtr6u5zkFB46sdaliFvf8jWGTKmfiRmryWlDnNWa4HU1AHYPFRNTYlIuj4JBTEyhS2uFlbDwc2suc5NtaRucGdMmLhhK1Ubrt5oj4SYEpOwmoUXa+5gAEbI1hTA1wmJGhGmcFRFpOyIkPq7wyMCSaa+ALq6R9OKaH1mJQLu1J0wMokoQkMOdXbhglHFTXAruAVJHoCIhDzXhYgaBjJAMEV0ksU7cHFIItmihhvhgikICrbz1ucRs4N9WJoOG6DhfHq8lM82wmYnBBhbSRG+FGiFiFdoiBAHahJuagQObmErBgXAjIkZWC3IV3HSaguLcCPECFvjRStDR8MQkAiZmBCYe0DjlFKAE8pu5BRLqZRGNXWv2z5rq2ih1pCp7wYz81Zqa7W1Pstc1RA8UXW3puzIRCkMorgvywJH7yj1BIIOaB6MEaAeQhhhgOiR3NcetSElwBbhgKgBlHLrRds+rQO1cAsOQwkIM0ZLXk5LPdcPdw/PH+L4ADMJnrrt38vctaoRgBxISKBWU0CgN3EXUiqBDQgNHYDMO6zp0vT1Zoel/3dHK8AsDC363AtlNBRziDBJ4NCNrMczIjG9xOsiXgY8GEFIZm0VnBPyGqAOd0cjAgjwVTK1jkrDIhyJ1lFbU1v74EACEB6x0q3xBWADL1QZgNVvFe5rRBtpzW/gSkQCeEHVICAhmrVV/bGevNag0grQIUJwQBJAAERiCfTUdxDGHN2Q1NWMAxGZtBmEjQnnEoS4uJ+8DuPlUuaNkJALhaoj4tRaYohYj4W2GbrnaR9Qjapxao6SCAWJyRCQsFnpeQUqBwqQB6JVSEelIfx1qtCeyzeHzfb3r707zd1iksGGsY+lik2ojIjVFAKoyy3C2pLa3HHpBpnaMkUhzOxg7hnCy9xxfTFmcJqWaSPIQgwIRAEUrsLJV7kvOgAFhrkLYV1KDcoswNLAOedWfczJSlttucjktpbDLMjXXqAHePxAHEKECIQf5GXr/n7VAAcg0nrIQaKXhscPtCkPB8f1jXqZIYKvZ+yIF90eAAIxBoKbh0c4keCaJ4IVcUQv18J6bajSitxCjrDAICJbsxHxAtj+YepjEc7M6zI4HFa32npB/gejIkkdQHg0jIhASeweBsohu/66z+NpPtdQEnQgD4Cmgs7ILCkiMPqMfQAB4N+a+cKRkd0U0d2ciGopSNRaYSGdl/vD3f7hY4LbaV+3m/Lw9VcU/HQ6Px3LH/zsH3zz7reboV+mp2//7S9vLm+//81vBWYsCvN0cfXq/uuPf/aP3h7vj2H5XM/ksz1MOtBXH+tP/tH/bPjr715/8pPz1399fn9mxVJhWXxI3cf75zjVp1++r++O5TauPr+l3PoUr193pwf89m++/rN//J9/891vLtSeHw8dtfvH3/30j39+93xojxNurwS70/uvOhB39oaZ+5KCmDafve76/upHrx4/PJ8+vv/2l/96vBhBF3C7uh1/8Zv3Pxp224ub/bfvytlUmyBe7LqP96fD8fzh6blOdQo8z6VUaxaS4kVznIWYsLYqmq1e953kdHVxoQ2JNvePT0WX+/0+3gdhAMjhfN5dDhfbHVM3DBsS6bLcvhoJXWic5jYtxX1hZDBDj0xCjKrtePx4Ph0Yk5k/lec89CnLkFPqc0q5VQwM7rjPAyGY2iox7votSVKc06ZrUcICAfMwchrKfNJSAIGZy6QsCdCyJKQEtkA4o9gCAJ77nPuLw9PpeJhVo8yTiHCFzPni8mqa96H64d1XqrpHZpGynLs0zueptQJQTf3p4Wkc+8s3V91m+8ohDwm/9FbqaV/saZKE1nw7bs2bltOYts8f7ykwpaStYUKnfx8zNSNJydSXVoCyBEv1X/3LX/z9n3Kp06vX+eH54S/+4o/+6//uK86dJqyK26FfWrOlbbebyRbJCIhKkvKwd+wlnZZlu92dT5MvRS43gjam9PV3x3/yf/0X7x9je32pV/L4jefvP857O58wdQmRqmmZi4JJSsfzfrfdThphtgC1XM5xuN2+6sRfvaZS6vOjpiCPkJQinJjdjAnNm6mhrHC0td0DiMjk4aqwBKJZILmZqUffd33uSzmpKnDX3Kq2CJincvnm1dP5+eLyzbh9pbEfuWtLG3Luul5Qtn3/pk/1m9+VvcZS/+Ann9zuXv/r33wHxG767tff5debv/9f/N7X3z7Oj8ex31rmd/d3VxtppwbVvv7FEzvsT0dOdRSZCgwbmpdQdy01J0byMWOobvLF8/6eZWCL4/1ZIwG4S1y/ulSLp9OUuAOAlLmYRpSL3WUEPJ2mN68uynwUSqY2TfW0zNefvH796etf//rXbz65Ph4On99+WdrD1SevosSGfNtvFl+qF3VD1GG3qWGdgGq9eb357pfviEfZYASfpmW72WBGj0AmTB2m1CcUWLZMOXOX4P3Hp2GzaYjH/Sk8nNb1QwSAqSrYp6/eBGmi9PT0gUh63qkXFMwiHx8P4Xpxu212ng/Wy2DNlOHcbBhuHo+njcDSypj7LvMg4lYMqOA8bHul7vrm4un5btz2Q+73+/sAqMu8u7hazhpB3Th0vTx9eOw3LwU0NeeUtHmbl34YmQiIiMjMiQgh3IxIiNlM1zwauPlqNACwNWL9AthDc3PzNQ27NukJwh3K0rRaT1BrxYhpqdshEyICFmuBJEyEAYRmUKvWbBTkTZt7lsG9vEyrwxGRUk6td291KQiWaJdTV+rUTAlT6tiiEMDQbQ/HZxZIIiQRiq6eUI7Tx8uLN1sYl/lEjllkjkUjDJhxaC3CESNadSQgYsS1K25ASEgezbw6AInoYsTSlpmcEcWCmLLwEBZVLXWEAmDgWpDQwkIjDCNgGHamMyAtc+GUtDUBmI6ncPSQpam7g8WYe6jVmMyhVUdEEiEWjEiIEH4sJTG26m7AYeGWOjF1ZBYBjDIVXQ8rzZoFZmG3qqaI2tqBeaeIlEarS3id23lLm6Y159FCCcVdA0JLu7q9dIKE3OdtbYsa+/rfSSgiawqJUDzMXSOgyykQTY1TEqalNUCJ9aVEdl2fj4kImzZ0zUOviyLh8/Fwffn2st8ez/cYAoENHBjHfiCU8FBwa3XstpCwahFJGHgouttunp/PO06BWKZKFjnRxeVFPc+77fB0PnOS5nD56uJ0ngdJXd/l4eVT8Cd/eL08LOfDCdA+fzvebBMlaQbI/W3KtQOd2zw3dweQlCl1YFRKxuNs+4/HYJqzX/QbYX49MC5YHveP/bjjHMlHBuE62TIEXcSQJphLxZyt2WEqc7EWwSSuaiAX24ubm4vNzZASzKeqZeKUBhjaPE1Fz9NimFS9hTY1GnIvQhGN6ynmU0zBBtF8JQwynUEwM3AkUIHQOAPxxOjpiBEdugbNBR5Ooec5U/bqFkyIjBCgqk10YYTzQdOYDMvlJ/k0LqgJeei6tBQVxMPzIb/dcickuL3cXbz55DCd+qGHolqXUJch0zCc5doFlof5/v1v6/F5222ItyS51Wq1AY3VjMh6sR79ApYsabEGmJu6mo9jN9YcMyCQB459vmLOKXpSihUmDadWHDJ3w+Yip2Hsh+xtDunT7vKi3zC0spz7NGx2p/dDazjUPFRUg6yPe3XrkwspcaDYINz1xMZzP/TTomVZWiQiMEcLdw4Zgchzij5z7g2FiZGSOfI6Jgeq0IA9CN3NTAFTYJ+2I10kYY4AIYpiiVg1klkrwEXtDFWPzom4JzFUQwxEEyYDDkwNXxL77HE+nCAMrGwCs+RGjOQJsAfRgF4kr4lRhDprmbSWQG7F9qzqqWPuwuxv8xQMGFYDEgB7JMIMOQMxs4dVeunZSAAQ5yBgQgRATpj7gFG11LlEPXPqMG1BEmEGq1COHE5ma/YhDb0MnfSDpQQWWtS1ATiiARq/9JgwgsLBwVPKnDsTbOhe1bwMELo8d1TKorkfuopVfduiWQvmKt3SHsFasgDtwlNAdVSM2gljVIMlwoAENdxASBrWBQ5jP3rt1hl+ENV2DkyIQwQhZQ/SVoFLhgKxLKUApZtLeGinELsaNtwmj0JUMwV5Ccu5S8CBBJIYPdyU3DElZDB/qSMhsaMDs0cI0RqqcndhYgBbaX8RwuyBFriCSBwDAbMIRnBYGErq5qXIMEzurU+XP3q7EbFFz+/fx0mrN1pvxgEknUNk6Zo1sCCkQFStFcCl67aDRzDSyEhmi+rj6VnzZyDbIFQzCOuoL6sXxykQiMJNDdkiCYPRHOv5CLqV1aIAmjaJz6a2dm7YiWwNr9WzPr87ff3L+zu1vTkam0hUpA96PCKMzmHGaxUPGzNExieYp5EjGyCHmSMFIgAD9mm2L2Tz6bDRvv/tcywuK89XSCI81HSe8m5wbxQUrgQGYBD6t88FtPKJY73cEYACfM0Wrcm3cF/PN+Cw+uoIMV4UH+tzeRARhsRaCgIC9wAPcEJycPeGlFd4KAF4UIQzMiIEuJkBEkCEK9FqNIYA19UQ/ZLsZgwMUAA3h1jNWT+03VamjbmZ6pAlNBy5tLI/n/s+l+YAmFnC5whPgpsuHU71utcrESSGQlM1lO5wPsOFiCAKImZYWjNLXSfMgBiYCMGZKnkwMQ/zoltkkpgAemAkZI2eMcgWCHbZTyHREyxp+eqGh9urz79TODY8G1xyllrGnpbSSGvuOrUqwTeOV7ZAO8VcKg5Tq95vj1ZAug4QSuXoFPRkswJ0QWhGuLpZiMnVXA2EkNzJFD0iDNAJ3MyHPEib0R2YqwID+VLqPLWmXSRYedIegEjIEEGI4KsvDIgSAgboGpJBwoCgl3jOqgYOEV4B8PBDAREBiZhQwqFppfVVfOmoMYSuu/yI8FBCQGRkDm9uQeAQqwyN3RXD3TzcRYKIVq0eIfuazlSlNRW1KtZeImmISK6N6WUyhIhEWJvRvxeg/TAqAnxRz7yE3DBeuniSmIR5E6ojXQ4YFgURQEPbCaKanpX7LvVABrCOvBHckCy8wepdQlArEWbVCNE9TGubSmtluj+8//jh+vUnecjLcpzK8fKz269/9/Gnf+cf/O7j91Otl1cbRSHZDrv0L//Z/0NCn/eTzvPmovZXNxZznUHVPvnpF9mPz199fZz0i7/4H30cr9qba+7O/nC8vO6mfUCfyV0uL47zBNQP3RYT1zz+1W9/m3ux5TzNiwdfbobHr97Fno6n082PXmvsrS1Fp5++/vPffvUbRR5uLub3wpvMCVOHwzDoMU6HU3/ZW+D79/fL01OK1l9dheTf/Pp9WBpuLz7/yU+Pj/ebuhzefw84by/G9999W8IPz0+nw+F8nl3guF8MoEU4g0Gse1dAsoBBOkjtf/mP/9Fn1zcfnme1ZUy7r776ziT9/Mc/+jd/+U//9E///OO778Vos7m8vtq+ef3Zh+8/fPmTnz49Pep5Gvrt8/O9MOSUj8fj1fVtq9NSzbQ0bcVl6HZO9OaLL07HCbRdXl4eToftMLa6IK7TTydC89IPPQD2uRchlE4bYBpx6FpTwUTI/WbbyqIW17tXp/2TaiME4THlXFtNqev6QbvRwkqd61JY+rla1bnfbM20LpBzv724sLpMh9M0f1fK3LSQMGEvkje3V287ufvuu93NRT9202kPThAhQpzxYgdLKeFWWwBJStIJg5kDnU77cA8PrWc0RfCi1m36siyqL3PTeVmGLgkLikJbc5mQID1+7/+myvZq+ITyHPYvfvtxSsmW2lHK3ZiH3rPt96fzGSqiYMoJ3r7+8tvvvjE94pC3my2j9R1DSt2QBGXcXliFZRmTmIL81a/vhbfuerO7eLw7EjnnJG6t+VyxG2R7c7MVBqvW/KQf/8f/2WdMcrqfHj+Un/39L//V//eDmjfDWAWOSItVAUJ86YI0DwwjYjcf+qHVmRAhwLSt5zpCWsO6BlDN3aHvuqIGwZkyg0s/gNdt33WJHp7vEtJ51qVYYqhF05CSxHxa/uqvfv0v3v3qf/O//V/9H/7X/8dlfvPL397z7UUBsK7T7fDXv30szc8V08XN9vXl068XP+jtp9eHfbu5ffXVNw8KdHt5/fjxYZV1YiiFd8xJKGpxxFqWR6cw7lKaTlPmfLHb7M+npS43u+35OEHAxWZ82p8rYtr2TEAkQBHmZabzuanHdjeo41Knp8e7676/7nMH9MnrT6b5BAFvf/zj775/d/Xq9XI4Ta2OY//8uM8s87S0hMG+/3iH7fp4LJ9++Up1IUIMd/eipblr6O764jBN6ow4VD1HNDe/ublprey2m6++f2g6X+bLVuL65vru3ePQdf3Ybftc1Ot0yN1NuBzPre+5F865fzx+9Xf/+M8nexCjOts0+c3l5vl4utpcj7vtw3JETsNmE3WBaIdSHU1T+vLHP//mF99I+LmdQjTMjoe915ZIzsdDB4YoiJxIGHAcB8kvE1NmcZsjwGI9z3hAWDgyIZKFApCDuxoSRNjanV5LyRZGRAHgZkRoqwpLCInM1qTzi4OhG7enw6nbSO7S6TwnEQRwa7W4A2oYkpDQsjTTAJKlqraSPddmFWizGSSrWQEgC8JgzF0CVj+7txpnV045k2M1bbZwEggATLuL22nZN69CqdZz4h4JBrosbSLkPg/T6awIHrGUc5e3LVp161IOVWJZdRZh1QwjgJGZxEyFs3oFCGIgJGyk2oioBSIbULRQIDJVx5KoD+kBG1p0aSyuRAheTUsiaIum3AcEC6oaILMkYhlGmdsM7uiOixPkze7ydDoFOiO0VmoBQJKAMXWneWHuhzyUphBgqu38DG4qgsRaA71ZeDUOhNAWZkPK89K0HpJ0HupaBsw9ZXdlHoopEczLnBIBBKVegcIicdfaVMtp6G+QRLGFOWFoK7VUTomREZwAzF1S4pRqVeqFAJbzxImb1gC43OxU1cwkcazGTzdJdHVxc3Xz2fuP70icE1tTUOs3fVUVmlLiZbGyNJaXwSU7E6CZR4PXb3bXm83u5vIv/+qbxdQDHo/VrITZcb4H9pRo/zQFwthvRkmv316+f3e3fgq2CYYdZpHt1W6T5WLTh2M1d4xLwpmA0vC+1eLemDKwgF0xC5G3qpUWhMVgqe1KNCGW6VwVsL+cNG/6xHFO7GoHrdMFX3ltVaQuFdG6IQe189LwrAI4XG03l5vxalw4LOx8nJzBYjnXeZcxadPWxvFtMyza0tBLFiZoRY/P7aEEdZ1ItaiYgoGZOMt4ZBPihIQQA0hzaCbuQ5dk6Dwkjq14pVY9rIA2EU65L9aAXJu3eR6V0pCNEnTcj/jJT4aHr1qrDCRChODzXFp1Sn0Ja2k4UWINfdo7nBik73rOwhiblBagNmmbcUYHGXjzCQEZXYkfZ41JTxmTSDKsx+UelmfHNDeaJzODo5apnhWbhXKiIfVZp20f1+Omi/Q0U0875esmTL1xttR1As3DKEMwVoJMud8wlLmcY8hqLg3oHLAYOm0xfJe6HjEMJKeBWRImwj0g+sLUAUlzxdRBHhl6xZ4FKbFJRhIEZmBxJErIDKYA6IFG6YX6BcHAibIDYuobIjFlQpDwIK3GHhl0Ws7qDVWzZMKR0FNS1yMRMbg6TDoLpyFnnwo27VTbsrBAmJp6Xh2I1RAxpSQiHqtUMNpibgQOCBFWl1qH4SJGa6W85CnAAMIrnpYDc+Jug4iaO+Ie2TEQvbF49QUC1KtXJVO3ljvKw6ZydtlRvxHLPk8I3IIdJdwA2GpjQBZM0qe+x360JECUGMOX0FDV1Y6AmRjAXCOAE+c0hgcPPXZpdgdxOD/Y/OhaoOOcs7d2KXA1pJbbdcjDbI/ZkFKzRuESCXCrWGdcEjNbYEzV1SMzblLO6JeOrfq7oGjkIqmDjdqERCGp2iIgThHYTBdDTd6E0FCixwJnTO16O1b152lRSqolcZYuz5Vy6qU3FCnNe4DwhuoArtr0hWAVklhrAQx1FyRvBUwJ0YBW2EoguocgEgnGWrgfgrhCEKJ6S4gRUUpxTpS74fq1XF4v19t20Z+b5m3yWufDCVUTRCfkbgYQbjV8fRpsFghBSQAsD7m1JpkdKUpdynHfavBV9SEkAREgcUrAshJuwhpZYzcCCkwVoEKNqIwiTKGuWihREFfsQUaqs0BACwJjd0c/5PZb9l+241NG0EiE6aWmxUeNO8AbouQABuQmHAxmANr12qWghuDojqtCFRlddgFv8jAkqoQ3G71f1CARA2AQIjGQFj08AiESn85LnA5ayt+6nyBWgRQGvjjvLVxBBRndEARhPQJhc1uxhuvzNkR4OK2SWKTVaOZrwzBW3DdBeIuGa9cIVu61qRmQEJC7RdjfwlQJ0YkD0bVFOL5wcIgAbRWrrbRRZjP3AEIydyJxAzNfHw/CIponiFo0UI7TAkBhjoBrxak4qsFhKrucb/uUo2LOSxQ0aIiGPJU29P1RF899JNwvE7umcAaq1ioRdfmsiszeGjUTZgqfEgTlQ2sR2mNWDxcGTOC6NFRniJq9csDWdObL++iOkHoDbo99zilgmc85MEU3LPM1hM9xmenjacKUjFMxc+OE6F6D2/08V2pknBFEGCGEOQBRq2AIcVhjog44LBb1TDAkiY4WACJA9ygrLTSjtub2IpdvFWId7QWAE4LHi6j+h0vlZZD0osB7SQatRfsVBrRaxl5mMiuuwVwhgCiJpAAL9DUg5K7rljXckZCAwMHD1h8kZkAAInRcgz7CiVEM3MJeDGlrxmlN/gOuokQmDkAPYOawl2vMAzBW05+rGqxxxf/IgLZyjJCRnQwAvBhCxJqeSsiSKAkLodfZEnK/6TU683JuzUWAmCT9ra8tMNwquoG1AAsPIQ5gbRO4WtNWSrEGAJ7jq9/8+vd+9qen4xHKkljevf9+c/VmyJ+k3F9sMgsQG3Yx2Xz1yScff/mrJDFc7HjoLHPaIFFzU2/tVMru9nJ+9wH27y3/4fDlz9MwPH749fMJhrFvAtClIrjrgHp+f/fbyebqdJHxbtpTM3B8fHd//Qef3g7bV9eXT+9+dXl9dffdXW7x9Lv3H4ZftVa6YcOUQhfzksdOPzyq5PmwnI+Fcyq1Xb/aHY/vvE4ZFFy64XJ//s19+/7w/HR++vjIUqcyT09TtaW0u6fThHSv8jdPpQLOs6GgewBRMxdhF4wAI1yW5fg0392f2yw4bEJTtbJ5tTXSkx9/9pMfffXtb4THT25uBHjc9Mix3V3O0zN42ezGp6dnhgFVOPHN1evDdD7uH6+vPpFNp+cTYV+rlrYwQddvM21Oh0kbnK2sesWsaUVxRETz8GjELixgjfpNXVrKKeft6fm51WLRgUPqOnOjNF7uZP98aA5jPwQhgB+e7nPfby8v4axuutluXJf5PE9TTanbjH2r036/ZErVaxKUJHnsL3eXh8f9fDos0A5lYsrmsMzKMs6nE4Qzp6cPR0KvZemHvixL6od5mnNO9TjnQXavLznz+bBYm5dl+vD+fasKHYfa/un88mGQnIfueDrkRF3mhIkJM2Ct9Ltv9Q/G1xeXn/z2q7vDh+eOIA3Z3TE0akuEN8MtxeCYz+c9I/3OHiSNP377xf3dV2hhYMUKkiG6nw/LoV3eDh++nivK5jW6kpX5kz958/7XlSQYzNQBEAO3/TDPy6yWeqpEbz4d/86nl9/88v3Dw+nm6s3+ked//vE8LX2/YSStHuHqmphXPWZzY1yDDwCBzQzDiNnU+zS0VpDR/Qf4fQQiqlUKw0B0aJBPS2TUlCnACHw7pu/unrohTYt2aSitAcLusmvvnHIqif/wz//+//O/+2fvy7m/2CrlZb9MU5WOy9wOD52GLaXdfftxOR7P+yg2jwE1+dPTO3Tp8+XUjIeuTqbHBRF2u1EQlqVsxouyFKSaKFf307z04xiuEyyHNu2QU9iCoOruSMjF9Ppqx6bLMqeBbz+9+vqrp4vhouHUZ0rjxfNpOu2LHfXq1WUYP3w8dRsikPPDCTV++uM/+MW/+Vdj7kJ1ReMtZ999saGODtO5b5fnex1/PhaEcGRVFDiUDzfjq9pM0AlpGDZE/Xhx460QAjO24mUpT8+PBL4/HFPXaVkMoAD+/Cc/Aj91Y24ab17/5Ntvfpe3sru+Pj/tv7t73uSr0/Pz0c7Xl5fPzT/99CdlvuMEwn48PjHRUkoaJIKGIakCQsYGd3cn493teAuxbC/9eDxUPb+5/eRpf8qZQ2vqmSjdfzy+erPBLLW+PB6kxMxC4UwIEJKkqoY5EAQ4gCGimTELgIf7GnvFAHeLiKB1gQZIQQHE7O5hRohmioimjaRj5Gbu5tbcm4fFGsIPryS5LqrMK2cRwlaOXvUiXQay/fPH3SYhQSulS4MTOQQgu3vOuS4nVURgZmbaqk5uqu4pp7lMQxr7bjwc73K67btds4KhInSazh0msNhux7LMSQQRrHni8Xx+5CCi9b7PEJQpuRlhNKuM4GZInDDPdQ5XIiR0IgTmLD25OkTO0pq5GqXQ8OYNEKwFCCAyC1adU5ddKxKRiEUDwUB0ayguScj5PM0iAkLMEk4VYgpz95RG0yqRM3elTa4KUR29xDlQz7WknKd5GqSfq4K1UQZXJYTMhIDVERxKM4PYbMbW5ggXSYFggdUXYIFAVF8Fq6WVCGzNlqUIEgtm6c3jBxgikiRt7kCZBSKI03KeGcjQa1NJ4h5dFo/gnLW0pboBBzVrigrN27DZEMA814fzQzh0HXpTZpYkamHqrYW71zLlJKnrJiuYBQOzyGbYHfenccM55W1P7z7eA8TiTknmaY6IRJgTf/n2y4/PH17fXrRq4H6/f2y05PRyIvr0i5vnd+/7bQcc43botpcc2BnORYe5QWg1kQSKaXEbw6EVwfPbjYzx+jSlfWnP9mhs2lriYfv61XB1xZc347jl2Ps8TfB8Wp607QfGRJr6qyS0zAFIpVog95uNVH919ebTV59N/nw+PYkuAgidnLRql/o+YdEORmtmAePFteQBfNbz0coZlkz60guRzKkDgOBoOdV+GKrG0gozkBkqQ6hAztyl8JTpaptPgac2WbOMoTo7W4loi2YeqinlROmFXnOa6nDZV31g6cAtD6m5nbVOD6d0s8FstUwP79/B9gKIKqfNdtOF4vxINYrq/uPd/Yf3ej5uN9ep63j7xhskDe6vlvkcmM+u++InPx8Mk2moGvL54fx8tIdTmaZjLWcBADHMU7eLnKzL3nn3Cd1U2pps9wLR64CVrLO5kUPxk+7v9eothEs4VuQ8JNqXx30wEUGZZjYY8kiFqGNJkPlCmHXZh6rAmgoAVWzYl7Rdcp95qyGVkEUkZQIiTAyC3tARjBL24RAgs1mJSgaMXVDngMjszEiIjBZIFGiB6q7G9STtEet9540qEE5CGaIAW3iohZprYAErXhKSlgXaDLqAEwI0XSzcCYAJuKN+XLkkURf0cENt4I6u2PcZhJrGeakvoAyAVuaqhohuXkw3UTJdhq+sUVKdoHmAc8pIZFp1mb0uoVpbtPPBpcNuBBiiKkXztqAhxKStgJUEISmljlPuAzNAgkBv5u6gihiSkLuBswAhWAtLS21E5GyUCAUCMRGnLD4yoLiOhO5I/RBJbOwcJXaFOqPmscimcinLntjdlyAPCMaOuQV4UyObRVEgOBwY2RGciCrDwoEIpWllySxjVGZE8yXAhJiikXqxCXqqUVoLoHI8HVykG28sYK6LLS3hMAbVWkfJwswhQiLsUFsLI0mw1qBckUlbA+QAZIYVS+zx0i7yQGaB8DXvlxDIawCvdame3cAgD323ObsRpRivu1evz3rOzTYdc8+16yrwBgfBlhI7tOph7hwhDBmZ1dRNPWr4tuuFeCBgXZZl2k/vufssyeWCycARQThx6oJYkCFCHFAjFMwpkDysct9jibBgqFbMKiFBPwZvQCLBPlQHZLNWKB57+DXD3wA+hhWjRJ6AIJDQw90JnwgqJVoUCzIimnJkNcfUC7CFYSAgG0IgO2Zu8WketzkzU0/42S6+fVoO0EdTBWVBRnFbcFZDDZZ6XPA0e60BL6miWPtE7giwHn6AhDBBOCGDg0NEvIR+EBkRzF3NGYMAEGL9HY/wCFx7ay8+dAhY8z6y9vfDHQOIRM2JEJAh1rwQWkQAvuBrkAMowMMN4CXptAKzIXCFg4QbIifuItbynCKBI5IjCVACZAQUcxJJrRUkr7r4qtECSFmGShcc5/P5atxS+Er0JsIIyEmoFkQE4mHM876w48b4QkjWfXOW03L+hIdMvIN01nnTj/NiCXEJAwg1hZSIyVUOzRrLFjFb2HJ83SekZW+V8ng61S1pUk0RrI0oTobj2Pu5uKS5oURitVzPiGQWCDZZtaoYkJFNzRhbqBBh0FwXRGPKBEljATOEcMCZsBG1oEJgrQFCYmKipqjoTrg0K0WHXiCQidcl/FrBg0AihHWqwrAyrJE4wsKd6KUPFhFEtAaLmNe3FQEi3Oglp7YOlyCcwtzBgCQCAwJsDQKRhyEQOAD5i00PaL0yA4xYbIVkB7kbEAfGSrZCoJWZDQ5rQy0iVjLRD6AJcHeMQEI3YGZbLzik/2BUtPb6/OW4vyLcg3C9Y7jawuAdQViTTBSEqhebC4uaqBaA6roOpsPDA8waIAI6MQlxmGGgRQRLa7WVpdYJOIIwov7q3/3ln/7Zfyr9iFzvPrz/63/3m09/5Af/eH17PexeBYB6HD5+zPL68uYnNf6bm4tte9yzEUSOuQVUou7jvUbt26KXF2/vv/7u7V+c0vT0/hd/00s3DijoXj0qdkapzLWcl4mrMTq+vr56GHoUIxkuu6urP/7R5pMRxsvHb7al3/3sj/7sm3/9L+HszUwuoR/4/P3vDvcflbrlpAhcZ+tQiJyd28f7D3/zy/3dL4/7p+nc5mnWcALwBnm8cpA8CBDvD096bkFDGse5LH/1/rspUtdTdii1OaBZgAVBcISQvLq+/ZM//pM//IPf25HXaekutpvLq8ePH9CtQzk/H7rA33/9ZWn4+vrN8enQzt4EyqQ+eiytJb19+wbacPf9Xd8oyRzmSfHp7q7bURYS6iK0I2K3Np1qhJYCkq/ffGo+I8ZybpgGzl2rVaR/3j8TWU6UU9eNHUaOsKWoIe1ejZiReTwe5jC1qvNcEAUQv3//gcC6RJmhHI+nj99paynzw4OFmUZJKZ+Lupt03Erl1OVuU0pQzqVoad6WlvvuPE1aGqdAovn5gBRg6hHuA3OfUsr9tu/74aJVA4lEkBDt7sNv9r/9utRD143z+Wm4uqA8Ru4Ld3m8vn4TAP8fAEiCXuvYd24eFApeizaNsd9Upd/97vB8P9Wqu2220sAjNDzodJrHxDcX116pRBsFzZytZZLHu98xwbkuFqroQXhxfX3cf6M1uABG9Jfp9352+2//8ivV+fHdx4cTWYBqFG1ETIDLcaLQLvMXn1z9v//th7nW7+7981fbPvrkO12efcFPb9788rdPZB1EIAEGEiEyuwcDCYGQtFpRaHVrrKlUXfcI4erOBgHgFmHhLXabTZmP293FI7bZzLR2284CSrP7+yMamSqE930+zpUYmvtxrj+5uqzt/D/9T/+Tf/rP/pufXv/4N1/9wqCBExNsexnH9PC4V11ub/tNlvuPJ0zpy59+km333/7zf9N5vL4aJ/X9+SThUVSYmDFZUBeccdF5aWW37Y6nMyFxBh6DWYBiLHmAcIsAffX2EpoDwaYbpsN0OfCYu2WZnu8eqVppZxaczqXOd69fb4b85uH+LmfGhmNP1NHu8k2bjstU7t69Q6u7bX93/5ESbHZDq1yiTKfjzW7XK8imm3VaoqJDWFtOp6v+piNRQcfW91Ka78/LuN0+fXy+Hq/O+9NSdTtuq5kj19JS7sp+alO7uXq9nM8MlkmGtLt7uhs2XZ9TXebjcY/Irz957bF00s/LQka/++Y3u4t0e3NT2zKOeZ502c/dLuWc6lxqaducMjISZCI/HJdymvTd5cWGFcp5GoGH7QaHNNUlbbqT2jKXN5++OR6e1nuBmxOjV1f1CDTzIEDgl8VXEEQgvfCtgBiR18wr/u1ZidhMwxReTPIAAebGksJUUnJwId8MzJkNwgmGoTN3ACBhCyPGWtU9HEDDM3FzHYaeE0cEItemLAgkDmFuxBxM7pzyAIitNgBc6kkSM7IIqlsEJcmAINJfbF+fzs8p95K6eZq6Lm/HndbKQhFiK1kSqZSZ0XoZ5mnZXu2YaD5PXd+FwWJlSD1KClgnaEnDHUmkR9AAQGQLTRQUoOHCfQIqEdKPGhHGHNCnzTLXCPQwdcx9HxoaTb0Ehs4TqDPHeSmtWakTkhDT+j6wUG2ngDNLdmgsAh61lYyuVRFJwTU8DMIDA8fcU7CiClKYap0RDYGQ0ypoJaROmAi9mdUqDIta05qZk2StzcK7LoOHg1DqPICFhdm9BQRGMw8No5CopWklhGWZU0rmnrsMBopIw0DCQ+LptE+c3BwQmOh0nocN7bYbjWhLuPNSF0EWYA1NWcZ+KHMDQkxEKYEDEHVJyjIP297b4kYWYQqlnDJDLeXb909WzdHqtPRDHjfD2bE2XXEmX737JiwSMUbs90/Xry8QZSovxK6uH6+uLw+H+1LObJL4KuN2iZoTWFiKNIOUUVQSeuuiBk7j7nJMPGyGmypz0zmNhsX2z13ebdLt9tWG+ty8soAn+/7h6dHg8osv8nA9pHSaQwhpgWUunZM371O+oLja8eJPSzsAh7o3s6cP7/NmB1jn4onFfMkio7D5DM2wNQfrh87OitamuuSBOHgjW3DOko6Oz4vNGj2yJnE38AAIJhykHxBrC5HUpzr2nWKNcEzSwB1ZUjYks6i1dSq2KFNLTJg94Oy2WQshRAROh+fzMMe4zUs55k06HMxIcPtaawgRnu/AnvfTsv9wPx2eqy5FbTSnsIQioXNrHKRKJ4uDlQeavrU564L1/Hx49Id5mbRWtLlQ3212u4Q8Cu/G1CVGQgFOQeBGAysss55FSHDcjBeneX6ePhTTOJ0242ZjhvtJp3KaqnlrtZVShUBrFQTghfqeSUwnjw6RtAE4df1mnhfmkbvtmbaeB4QOkR1MMFMwBTElBHTAWI+6YWYO6IAQFgg5gmezAMwIVJo3JQp1R0Pm3FPS8NqKgWEGcK/lCCFZdhHz2eNye8NloXNYK5O2TnbuSS2FcXEgrRyWUAgFwNfwhwYiROJE4tFqnzJmcGQGEo65qpu3ueIPNcxSite2LAURINTaPLbKadRVprN6jPOAwB5upogRFBauS0UAIqN5McWAYFYmcQ1blqRQajUK7pi7tbgCdSnqjgKtFUamQJLMSSRnSqLLFOgpGDGQULqch9EckmNMc+5zRLZq5NragjJf36Y+BwIMJXWzPi/7Z6VGTCSMxr5o040kscZRmbsEmAATCkU4mlmVqJwwdMKUODIoCHQcCRENThZLoIYfGK/IGF2AMvVivjeIczkYnDvoYb5HnTiIPPoetzk2ia2VTX9Bx4iA1moCQAdChtUWii9LOyCM8IhgQg9jRjfHly4JEmBKadU+AFhiTJQBKSWsdXGAUmfcjBXww93T9eXVph+oNj0fYZunj09ucpynuS3bLSOXYbeDFhxo3jK6QDQPh3ACpsa6MLhOZ9VJ+kSpM3ImIkwi/QvPGpmAvS5Qy3rMJEAIbi20NOzMbHZIStYiBCOsMTIFJ0iUolrTFN+K/yrB96mbICGGtJYjwsEsrWRxz3gkPWKlBOKSTJBS0yjBJD25AgOTh0YQOSbyvGn6yeUmd9kIiPx222672J8NCTBAVTEqNIUGNE9AGC3EASHU/IfKzUtLB8JXHcmqh4kwWHVU7msAe9VFxctfI3cVEg8NWG1oAAgr1dHdCNnChNh9/ZIIJCCUsLryiRBwzWL7Gu1AWne6TGyua6B7lV7BChgGNG/4Q1MOEczr+rRv2hDYPQIBmS0MwpPAPC2louPOPAJ9npfwEORE2AFlpKXoyDmam1bsOobomc5LuUCSgNq0YDjnjhMBS7OUiSKizcPl5Yli6GSeANXfpHGZogFcbLq5nYmSNsNQDACFM8gdxnXo28y1NSrQp3oLYdN57HuY50/G8ePD3dVmdPWDWiM2oQJ5bqpAB/SRy1xUqzKR5I4wxbywECAurRWM5o0A24u8rIE6WBMgrTWYG/IC6E2RKdzNHbVxzoHYtIKHNW3NrK2LwPUdVM6ISKbGknwN5sDLn0KsfTEH5BcJ3spqQEIMX4d+AD8gjijAA2CNhgEEM0e8GIfcw9yYsJkSEuAqkF/B2IGMvpbgfrDLmAcgkfCadkJG9JfQ2loRgFWgR+gvEmwLX4Nr4eBhED/U0vDFWvMfjIqitSb8MsQiTEgG7gC2XnOtzh4O3hi5S0OXO5SUIFNyao28LqYOEe5qGuGIREDCKZMEBYYbBgCDq0W1aT4eDg4YpX738fzd17+9enOTcz0dDqnPH7776u31l10/YECt5VmXNhlZ2t+9y8je2pjT8+P56rIf+qEcH93tJz//o999/K6cjmetjx/2qeHxm2822cfrq7vzHfeNgkPyze72Jo+/vv86Sf/2y7cdH6fngj6M25RIPvl7f2f8oz8q41ja8vrP/3SZ7f27d+nq9dVn47EoMXVagvw4les3b+G0+MPj3d23H37z9XT6OB33dSrMkARROt6M4/VgBuNuV83H3eVpntCstrvxaid4+bx//uzV7uevPtUB/vmv//rN7cX0eGhNu6EHSJcX27fXu22f//yPf/94qhhJ52PJIr042Hcfv7cyX2+uxgRHjy7nh+cTGL+fvu+SXFxe5a4r/tAPr3fXbz9+eBdG4c21DhdX++PDzds3RF07TNfbrrbp7v27y93FdrdNfefFxu4Gto0yz7Wa2oq67zZDvxlPzx/3++/HfsgMKKha6uTzPAsJBJC38/1h2F52m2HLLdB5GLosp9MRABP2ZTr1DBLtNJ9ZqMzl4d3D7vrGtE7L8eb2FQAfz4ceuvP+yGnY7lJO/WZ7vT+caittpX2jR/KTnvphGyIowuBRzDw11e/vv/nu6785PR1LLbVOAYpMF7efbsbLi4trof7i9ovbH2+iv1JOstnODgrDwD808wnMmjcnBGC2sAZm5toWbTCmYX+op8XGcUlCS6tDToOMUBon0nCDikyL23bXmfrx+NiPw7DbWJlByYER8Lg/jlcbcuiHrJ7vHqZ/95ffocYwyPffHx0GRImIza6vs0rPKdHbT29/++3vHo/y+jJth+7shcduf0zH+4WSSHTLycnJVZGQCF8ShWGMJOvQ10yYHDwQDWH9RAeBmiFiYKyHEiQgCIiwMF3XKK3N1l69vj493+8u+6raI1LubD4trRk5haVuc9qfk6Svvv/wJ3//1T/5r/7J4f7c+e233371+mZ39zT3m3T1un/ze5dffbW8uXh9//0HGvun0/QXf/HHp9O3//Dv/t67x1fffXMeCZBo3xyYdhddhuUPfvL6229OQzeA0HSuiVPuMk2TkFxsB8y8Px6GnLeAGfBhP3vDm+t+snqezts31wjuLR6P7siXu8H9gTNuNoMQ3T3vQbX5WbXOe+rTMOwuL9/e/tVXv319sfn8Rz867w+7vn8+7cEITBFxDvzTH//o7rd/Mwh+fP4Oh41kEZbjx2esJW2GnHsHjWioDt7Qyrtv7v7s7/7JA9bjealm0vdLUa3WSu2GHEQG4Qm++PHnc3tk7H0io4wZqKNWyul5SkN/cXVxe7W9e3ccuk0no7Zz30XT+Ouvv719dbFlWc4TWZyeDptNjhanw5EtsuRzoes3r/nhUGvt82hFLy8u6lRv+qv9+bnPuQs/HJ87EoLOSq1zffkUgK/IGPcgZFqdJggB4LqaR14Spx6BGI4BQCs3ATFg3VREEIl5Q/VAAAAMCvM1ne0RWaDPgabVfJNyGoQZ5yW6LM0AgSBIAGdz4dR3oursjGqdpDHvtMU49O4FQKzMlAmFETEcBTtgQMJlbtaOIttAYfTwEE6tVSJkkkw5UULJ0COEAZA14y4RSZe7UpdOuiqNyYgYWkzzQo6JcivFIzi4aQMIQ8l5U7QyEqEQgUdRL2gCTG7NFD2ytnBzCnItjCQ513mJgGql7zZuLtwRACAmyW4WIV3eOTZiDNMsfDyUGrihjI6UKGWuZpuuX4q22jpKGj72ozWodXKHlEQEm0FZShJp2joGTtS0uiEQmy4Q4G7oToG1VkZhgAQiXb/MT5yIwMPCWlvD9Na8qaX0Yo2UxKfpnETCddP3VVvO4oGtAIKssrhAIATOUrU1iG23ae00z5Q4+4tRbrm9+Xyq02bYPj/vU6Z+6JeyuId0iZlradVrIqpL80x9381TAwtGDovb1zex1Ot+bM2rA3mcjjPshAHGQZ6WZXEfNrlWOz2fz+cp5y7nFK0BoXQM7olTl1JrWs4V6GWH1g8Xnavw/PHukH2SckiCCjShDZs+97IZUzvrATh7SkoBmLsUjLnr82Ijbj1vGkfd3jiSe7eM9V4/QIEh2XR6etea3n5xcfE2d5c6z37pmLv6NFGLtz/aDa9eJ7GL03fT4/25TQlrKVUXL7MjXFQAEDvH0vWbYdwMue8yP5s2U2hL3+PkZT/vQXHDPIdoGhz7npC06xyTVSkaYcclmLhjZ9SRhMFa9N4NDS0L4OMBawkiyUK2rifJE6NqQnF0TniuBgPTZTe83dm9srOVljkJ2zQ3V5I0bJLsUocVjudKovu2n+nQH7+1cm806Gzbze3kmzMINO9OpysUggibGaFzq4vSMnnMi02Ph+fzdFxKaw8TNYiGHWUh0urbQcDnRg5pQJIsmwQpBS92bzphWMUBWECXpICtVX0uc5TH/UFrcvLFtAVmd50kIwmVxDY2vu4tB3nJAagnXWbQgLnVksJAiWfewHAjScCESHpKAcEYEMZOSARISJHcMMJ9XR4iRg4QXyGjrfp0qtNkSwsPZu7GTeqY0d1KQgfG4urh1CVKiUU97IJ7bBW89oiokIC51SjQqrUaAMlxXVqDoEBUIuwlGXNEWDCGqE2J4XKXnEAcM4Vt+gNZwaZl+mFtAKohLGbFWjHBMh+5VkdhYSAUJtJolZqbAaBVb7ObwopKMQNvEYEMGAokpg10QRdxl7QRThQCtUZUiiweociJKLFQ5pyNwjmFJBk5akM2TolIACVJljBVwwElUzMkCyaIEfKuo7EhCWokhusNfMr8UX0ONugNVAgHyK6EIKbE0jGBrGJoDANTb5CksRFsHElhAo712yyiOTw5OsqGo0OwVT7TYp5cD3ScHLrtJU9GZQJrA/VBma1mPcWUY2refD+dr2LLnKwtSTJBrHwoQgwMhECmBtjUETATubcIFEqIBC/CLHR3ESGRQAQehv7NsT7XqsEjdhkHos9eJ+Lvnk4ZC+9n0poSlcN5+bCfDmckKA3a2ceERA4USNDWrk1gn7u5KqYEhF5KWybGhQcM4UimWgFDJLsIIFHK6GzhCJViJjLkdahCtcr+aJ2hZAaIBB0Cm1XEJt44KogatDO0O8JfQnrHXJ0MTUwlGgJDdO6BQBlgMQuu1C2L1rG7mJvOFJG4RNcSWgCsyQ5KjmLQcfPPUz9mdkYgtrBt7n58639znJE3Bg4WEB5g5BSo4eZqRAzu9gOxKwDcDYEAAn7o/njYypHGH6phAGhW6SVhYi8jG1uxSBwvqR8L93W0FAEQtCY5woLXTBAEAHmsIKQX6jEihTszmkcgeriDE2K4QTgRryTigEAkAogIRgJkcwUkc4u1/uah/jIuMF+JNDRNi6mGRzNIfV/LaQWf1WWmqLvLSwoMxL7LNYCYdTEjrOrDODwU67v+vMyXIsdSKpMSq4fkzWRhkiaJiVwYtCxMPWBUi5w2Bv3WPDfalXaRu3vVBWjmbraYmpq5t6Vn/NFWTqfHzTDW5TRIR0G16RalayehOBh0ic9W95kX1grm6BtCRlWzThDXWQxLde1TnspCw6huaE5Aqhhp8FQIoTkQw9TKrt96c0moZsjiULok4V7UbPW4u+HKoQJGjwAnFgBadXfu4RiIRMgB+JIeQwh3txW/44gkLKtrjpDC1f0FRLVeS0hrFi18baoBEglCEMSL0h5eIEJryVGQXups8PLz8QPpASHAaY0juQOzwMpKj4iXfmJArMkqZKb1GlJToJd59X+MtQ5XxDBvRAL+wm2icLNStYC7W4EAQcxJFElZMAQMkJNAGChGuLdwFwJCJkorBF2IISC8NbfqRJxVbSmnx6d33kCIAHclmgJJlz/cfbO5vG61e/2jH5mznS11uBl6IJSh7+q02fU07cfdztUvX92cTycJJ/anx69RzjAkIh4cHp4e+zdvMKaPxz334QPm16MqVjg/PmM1fj4epOvzpmPufvSf/Q8vLq+6iyvtemBJaOlivL39vW9/9Tt2o7GLoSuqr19dPT7c/82/+aDW3n37mw/f/vXx7oPkTmvbXmy6TQ/YSyLT5kBGXmsRYRChBnMpKDm8Hs4LE2so5tys3N9//fufXv3k9/6TZa699MtcIGVbfLsZXJdpXt5/+OgN3GUcBxQqVQdJ2Thwk7y3udQKIDqXqe+2LbTrEgm11nKX3UBrkMnT3R48NkO62ErgJiXY6/m7D38l/BkJ397cJKLT8XApeN4fz3EAiG7IMuwo9U19Weaq0+OH2eukZWoiGSHQGSl3ue/k+Hwwi6e7u5Tl+tXt/bQ8PD54xMPDU1E/nU/u/P7u+Xg8UaJQO57LAjQtBWyFvAMiEnN4pIxMzuHdttelvrm6uLncXQzjlz/5dBzSclpkIIJ2nE7hdD4u03yYdXn+cM+BmyEnpotx6AJ78TTGOCbI9MUf/nwKunn7tln9m98+bHYXda7QgRyPiuTejF+2B4BwcXtzeHxAwgiry7TdDNW4Vgh0dUvCQ2fbMdWgabIAxIAlpFSfdWGhaVpAelDkMOw6R3C3MYshXg3X9493y36KBG+uLpZahKCj7vxkHaXLN9d7e9je7uZZlzMMfQaA5+P+9k03Ne2Zy9J2Q7rawU1HhT5GJ2PeJtl+9/4OB5Gc2MOsBa4iprU7aJKSal0H2+aORGYOFOZOZowEEcj8gryHCHd3X5oBYCsONUJ8rudqbV7ocrtt6puLLTzPKQFjAEJHeF7abtMp6uvbi//2//ZP/8v/xf/8f/+/+z834FI0LNIAP/3p7tym58Mxqu92u2maO6T9u/fcLQ+n+4N6dZrOOqkycDV5PCz/gz+8JpiPyz7nZEhJMjIdjqXvx2VSdbD5nB1sadoWzhkoE9BxKeeyyJgD/Ppmq6cJQZmSmt6+uk4QTedI3dB16ASsn33+Gmo8PB1cyqu3F68vhkQ0Q3s8P152Ms2GhF3u2FmNw/F4Pt/2Wwj80Y9+pKrhYa6vrm6/ffzw5nazlDMA7A/7zy8/nWrrE7vWNlfHY0o0l7p/vLOlAGKr3uyEpL//B58v+rgy/293rz/ePb3ZvQ5aFtNW4OaTt6VMp/2cuNfqtc41akqSMG3HXbjMi7XaBs6H8wLRMqYs9NnnnxwOy49/+und3bG1Q7EpiM9VGcepNZ2PTni5ueiQ5vPHvB1mhcP5nHL+27WB6QvgZs1LmxkEADKQAIa5M/ELlo9ovXJgFSesNy8HpJVtKT/czgJhXa6gqQJSdY/gZVm6LgeGh2bpXg56EIBgEEtrAKEGgByoTRsSoghJ4iTNHFeABdIqhQAhWxQdECXAkMhDVU9uPacuMNy9y7lZBaJ+c7mUJWpNlCMasmx318/HfWJiTswa4SllCFdtF7vtea6uNmlLnWibAzDnwdzcjUGaNmBGQtUiwjltlzYPw6Y5Uk5WI5GYI/e5WjCHNW31zEk4p6qK5EzmDcBUWDAIufPA43lBEJZ0PD4TsSAhp9asttING12B2wTgAACqi0Na2qzezDwlrs1qa5thC4BkFFqEpJYp5REIIdL64NTc+AABAABJREFU3vV5qNMZkbvcmyqwnLQV8OweQdvxamlLhK7TwNowMKZ5Hsce3AWJEBW4tEoIrS5I0lq5GC6XtqQ0bIerp8MHDxVJF133yfXN1++eiXI/bpa55Jyzdw7OgHVaCCXMVYs37/tN0xgS9L0ACWJ3c3GjdoSIToD7fDpPpZjMlUP3h0Nzf/36rZY2Dnm7I6vt6fmITDdX22Wq5Vw3m77jkZnz2M0zn89103VCaZnn3WbkTI+Pe078cjDqxnY+AXSO/WJxnk/RyRKsJILdmLcd8Zn1QWm/Pyl3KV0srgQhzJeXl+g9JKzWZDMYlONcn+a7Y502ebM/nNr8xGmTd2/H/kIgFZoFNJoOyfPY3968TbdfFj0u5XCIu9nauZbH0yTeQVBgcMrGkSMFjQC5zxKupejTEgOLevWIsAxWkZwYG+BsLQIzDI+n6Rx2KrPVedxcIUd0uBv6FrPaHGCKYVqnU23TUs+aMlPAkBi7tG/VO2BCdBRO5N6lHfadcrbtMzzaOnejRBtP7jofTq/pGhIXRwHbjT0l3l2OIjTky3JcQMbM/bG0IDl4zHNs48icFzR3PC1znU71eG7TY5ewZxPqKAdB22NtYBqwABEIUBKJgZfJvMMuj+PRqBK6q0IxqNXQu66M26qtgpymubhiR0h5macl7bpxR9k3Q0pLf1qWp3mfLvKw62NETB0vi56naDVaFUctDZWR+hhG3WxVUsaOkRImAELCAHXQADP3Ri7AhBAWBWBRI5BVLBFevBmXczud59PkJsN4HYhACSEFpqoKNHS9Q1iNQlRTBxmXUt2hYeqC07ksixbmZNrQhFNvWjmgmUdgCyevCCHcG3BrHkhABIaMSEJkJhyXm36ZTizRC1Sb+W9LB12f+xEIyzKjVaQAyaVZWGFHkWwtojXmnsKbtVpnWxZB9OYBiAxEaB6ZxAIiSPpBumStsjNLn1IGCgiPpoAtZXGMyJS7DLoygsMQhJmJibq1axFAhGhWCD2lGLZ9ayrdpT0/U5mv+sKjJwFaA4uJyPzzIX04LodiNSUQ1nZmyOAEsZqREBCUYo49cU9AzagBNYRMsgAKRM9kEB5qBJizm5NfMmyCWsXqZNbFTLWhYh4ajMNmQ/FBoGOz6XRibNTtUuoqlspZDQcEJZfE6o4RDOFmGKslQhEJEYk4gFiSBSRYb50vDTQkUgPh3Iy56yp1i9cnq12wa0ppHD777LHz/lI+fd1Lmznr1e34/HBcysy94uKHcx0krcvE0zyzpO1mdKhqDqt/gLhoRYxoxeuZO+i6cV77KsAKAihMQvKisAxvhA7oGo5BSGSQ5kDUkRe9St5hc10oMarDUjumjHZM+ETdrwK/Q3giDAKuiyiyowFWwAAXhASIgeiEHuqQuXMUJy9UjVYs9oIKGBTuBqTYsQ9DOfzo4lWwO/2A4jX68c346cfHD6cWRAyqzQKBAQgptPEa8Yh//5AML+JzjCAidne3iowRGIiIFN4iXggyHsEs6EFAgSu0kSLg5dQU0dqyGpYj1g1cOAADuishuzsAIJGbIdFKPHbTgHB38JUm44RMEOoG63gizNbTFwAAeoSHYSAhBiKjmLUXdjI4AjJgEC2lBcAqvWLEjFSCHBBMt2A92TZLbYtG2e02BGaGagghDmxYh8wxawpkQ8fgnOZz2Q3d0gqRnJcCBM9alJO7Dx2v5tkzNCeZtPRJTJtKNFZOMSZ0B0MkEZY8KdTwj6cGwedj9XAm3rtNwAVgG0a1msFuu7s/LT0hFUuAKKlqCwRrBogMaqaJqLlj85Tzsc4j+qUMp7n1nJ+WJSiGTAnQq16nDs0NsFUYhr7MVYgAYK0UAlEAarPU5SB0BIPAAAEwU3wJRwIRAph6C3CiZOEvI+A1cgQYAOZtFaOtCjOLtgKlf5j6Oa5ZHkQkdnNEMm8vU6d4mSH+baNt/SUArAa8FwNaIL7kl2LlGAGR/zD+cXcIBFzDQ7zm4JgY3Ff+eoCvLIn/GGtt6m7KIr6mRSGEIDwAGUCQgglTGgGAUx+UG2UmWX03rbWqk7mScCIIwCwDIjIxRSA44MqTb4JtWuYyzwa1+NkjQjZCdrd//+bzz/Q8dZAEjbqOqPckbWldL/dfvWvL4eJq+/Gbd73wdrvTk443l29+9mV0vU8TAWz6rQ2vy70tx/vPfvwz2Q6nw1KWpbu69LNYLNtPBskjjkOJlK6ubj55nTYb6rIMQ06D9qml9Pw0j6G5k+VwDLB2nqY6G4ATL0+PU6K5tNoap9SNI4WPt28BuE9I4l51LtVIIFMtBU1AUtd3gExgJQJTh93QX93oaal12V3c1LKctQBma87RRYVN3tVab3bb1twoXY09I+MY43YbQYlwuOjO89T1fJ6Oc6MOumJt020/+/zzh8c9inzy+ZsPdw8XF7eGfP/0/OXv/WxnRZVyRvfatAJxK3Xs8pdf/CQPPRNjSksru5urpnhxdSubXSJ4fv8ebJaRylLzxqFNkuvHj+8Ph8Pu9mbx5Xe/fXeel8en4zKfHx5Pc7PTVLVqrR5Eta3QGDDAQDBzWy/6dcJOFEHrl9IqGIQIVF13bYQqHHhsOVF5vr87PqYkv/7wTTsrBJJ4m2dmQGubLg2Cu133xcWw6entmyELPT8cMSfJ2OoyDLXU+vDN/4uvNg/5DeqG+FVRhywpCwUIYTDwD/eDV5fD3KbUiUa0Wft+GDYj6fzlT99qKl9/dZ+gJwtrMPTyD//0LY/0l199uOjyRdfN+/2f/9kXV7ev/sUvnj/cHZdqXc5hLY4zo4ojFOUW7mbMh2VCXAAk9WxtROYP70+cup98/vn+7rdkNs/H2uqP/2C8vOnuvjl88Qdffvjd89PD8vx8/Dt/JP/4v/jkv/q//6YYPj7t33y+fWrn+sxMEA2CwHw1cREFIAFJAmIEDQdkUlck5iRmKoTgQQCEaB4sYuZ500MsCAxIALoZOkLox2H9gtGlQW5LWUBy7run5ynOxYp587xJ4+ZtzPlf/Nf/P9fUVKFoNR+j+/ZvHofb3e3lq2nS++fnv/cPf/ZXv3l///Tx5z+5Im/PD09vXn0+3x92fSqIGnJBdH6ePnl9MW62kiR70JCfn88C1FwNMQgtXIRNcRiHUBBJirUfun0JAq6K+8nF0tDlfpv2z4+3r97cvXuXmA00CW+G7X5/wg0capXMp1P53e++3Wz6n33x5ldff4uowBLkqtrxUFr86CdfNFteffp2+nA8zu0LxmWpnojda9XdeNWKOiAlGIaewvuBD+f69HzX9QMjttZabRGoLzAXKVVTv3nz5pP9cR8ODG6Zx5sL4H6yxy4PF1eIXm+3w3xaLjY3H+7vOIHDkvKWAiDB2KXS6MvPvnz6+DF3ebsdP9w9DPkybW4u+NSdFzk/0qAyJu6GeMJFy3C9oWPMJR4fi7fmkC6Gi/k4taV12xc1rLAghgi+SHvAEIJI1jVEAFDQS6SWABDM19zrCk0EQgIKdUOw1QeMq+BkvfcRiDAQHqayVBtyJ5mn03nYbMKbEJWlCsmQual3Q67NpnnZjDklsVoTrw8NEOGm7gEiYuYRBuEM5BgOzsxh3nWdW6u1kLBgpwCtVUReD15EmZBUK7J3YzqfTjltcurDMeUBGcuyDHko81yn1mrrdtv9/ojoXSdMPJ9naR2QoJiqgTsgrYiCAApHtUqcEMO1irDQMNWZHKHPBs7IQh0JsEsLB0ThRMStnEhkrtP2cihahotBOuFK/bid6xNQtLZQkAwDCEcKXUonHcK2THPfZ69FrVCSBkwwYGVwb1YwQAjn89PV9tPJvNUaWQhJIzx88YpsYWChTUu4GXgaulpmhlQVVWHIO2F8PB2NKRF3XRYiWHesxEGozcbNxiOWparqvJxbOHsc274xck79uAXQbx+/dwcROi0FnMiII+bDKSWRjqwpbweskJGYO8B2Pi+EwBxPhz3D9OWbz+b5g1UXoof9Y049PJXUiQGlxOdpVm0goZMsh9kUJKPOC2ncXPSbi+3q4LvbHw2YiM9TZVyqBqU0TwXwRUoNAG5YJjue7UHJEnZs6AeQ0b1Z4wk5IQrglchwfbnU6HIHsCxWA4ZZGWPMlDCqVUVqu/z/5+rPfm1b0ixP6OvMbM65mr33aW7r7hEeGREZEdlEKqtIKBCCqickQCUhIR4Q/xJ/AEgICR5AQggKlYRoJBIBlY0iMzLJjM7Dw/32955zdreaOaeZfQ0Pc1/PLJ7Ow9Je2mevZdPMxjfGb1C3fpnfn9vVIyL6JG924+/lmMVl5Oz9w1rfk/aY/f7pOTejm+E0L0vr83K66LKYDT6od86AXIgADBkSA4FHq/3+Yb1YelztTqiksa5NKC+9OSWLqOBlys/L9SpWA9bOu/KGPG3si+ZIMigyE9blZPO6PszX69pnHxuXMX/++rfyLT2gPmAtS90b5zRmUZpYi/k+8x/I9+9+GRc0dUyWBsozXt/fz6fPYrdTwZtdKpbf3L3OP/mpQ93V0a775XKehqW/W91iVQBlbG22OVzNcXU7z89zvS794TDc3Qw7nt4opqelH+X9+fz4fDpfzX2yK6/Q6sdAe2YN7y6EeIkqgNVOmDJKmu6OeSx4BQ3MZbSwrjX6NVNWTi483LwOQOIylZlv0KglaoUJ+graBQiCr1d6uHq0AUwwDSXtoIyJSqI8gDAKIRhu7Thpuzc6pBbUVSNi9iAIajN7gIW2k1lvyxkd8zhaFB4KEQBid4XQSBFc3DpZykK9q/aFY84U3eu86lLbJQzYwBq2GIEDWTb/C9FqIZjQnQBcSZgEqZoB2oBEnBHAwyN0qbMw1royMgeDj9sqyONEmJrHOB7Duq7VXblALmmLULj1COjWrNbaZmsAigovmj8xEEsSBiRiIGIncEQeIqEHJHB3N0EpE/ZATDl4K13rXXtvGJnTbszDgG6IzAC9d289LDy6CAghJU75BsZS7BnZSo6SIsCR+aVkmtJd7H4r031vHzw6iGOFqAgmPGQmRapMosEBkysuinBZUJZIxSMJGmC4IaO6kyQIRGsCFQ0ACfLeI7qsRj3nwTv6eu7aqWMqHyUMSYskzeOurddhl2AcGura/Ho5vdoloRcesQMK85ZDQQjEIIwtm/yCm6Tt+PzSXiQbaNLQqXii5+eHaV9ykutZw6g9PK9PZ7nujxOdHq4plxPC+8upra1GfbDlzc3O5sokY6Zm3tVPc/MAhyhDikXZcMhlQUBxNJDI2jHIwFZyQUIgJilITCwejgBoCq5qHSPAKUBdhhPIood1Hd5Im2g2a7G27AXMT4P/AuCXau8ptyBSBXYBGIEgRNEaMYAxYhiam1EUxASMAAQBhAquGICUtsQjoAEosQEPq/52ylPGmgiIX2roiYcUv/+q/HA5Nz+yWgKPgF6dkMShe9suJv4jTdrdt1pzd0fArfOPtsasTb/ZBJ+N1Rdu1vE3ffaI7oERwhkCw4O5IBghd7WAEBEGwK0UDTnANtgNE6p2QOzahBNGmCohI2I33yprEHHrX3MwQkGw2MJs4IC0eZfc+qYZYYC5wfa7moeHq7feeDcFhggJ4rXVCKCwA+F+LAlUUID0vK4JrSoNkRzR3J2pNlWHMF/WftiXpbehpAJkrR8O++fLclMwO18oAMOEHaJrr0QK/oRwJ0SmygAYO4HirVBu2lNh4rR4OwOfHLm1Y0m1mZhylmeAavbpkJZFa9C1RQRI1R6+QqQ8BIKqEWwuLUzMTKhqpMGchZysL3VFj0RDoCtRB1hb31L2RaAGrKFbdXsWarViiId6q9Z4E2lcK4EhvFDdXkQZepnIAwZTtnAERkIII6QIRKCIwK0wZtNPzJ0CgLZuMgffqvS2NmEPD1Mm2s5Z8WOf3ovXB3HLor4gi5Bi+zbAy+LYGtS2l7frYES4b9FRAvwNWhsdtlK3zZYXTARAwLjlAP5LUtE4jMyJCQM9zHtfkDAsgHgcjiIM4B5ECPTyLNPZOpqFddW56+KIGIyAjETh8FKWHO69txUjAqH1pbdlXc5zO+WRWm958vZ4/eaLL//g9/74/v6yu3lb+9x6q+uq6CGAoYMEwKjXpRgwsuIqd3n/k5/A65uuup5XmQ73zxfc3ez/8HY/UWNeuZc3bzmUk0h/A0zjftQOARw8HD4PMtQOIEMaht76/P6aSzoAsLdUgbwtp3NbFmZyhDobZWoaaRqZdkjCIsyEDMtpBYSECCxlX15sGdUQc4Bf50oA7ro7vJlXLcNYiGpbvZNhmNqUxmk4XJaZpvGnrz/91Q+/qBJAncfkNCYc92V3f/4asc1LryFrq2btON2W/etR9s+Xh8DWa63zSoYCauc6UE4ZJ+U6X4DCPYRhHOX0PF8vnsdCgGtdDodcdlO3xGnwuLDsSor7h/fn+/fr5fnhm2/qvIR5dzjcjd9/9wUn/uWvv/vq3XPj0mvtDmtVEomAQDJAxBBhyGQBQSk8gMC3xGYKGcgUGZEZEUKIgonJWUgQwwIxbo4DO4nQMI3T61eCcX54nobSm/Z1nfu1XtdtX8pZGHIZM6jOC2i11qO283GSCLDWmV94WwAQ1+v0ZjdNcL1v1hLA4GpkDoDmxsBBL66i5+fnw83x6d2ludy+2te5Pz+u0f0X3/zwW390U23WjuNhGKbp/t3Dblr+h//xf/iHv338q3/z3YdTv/v0cH//9PhwOsrtZ3/0d/78V18IhkX15QJhVXWuJ2AqA++P09Py7j/9Tz/75v39V1/Yd1+Z1X03xhT/4l98q40Q8WYqp2v85NPhl1987ev0/a/eWYXdNEgevni//m//8+8nv5mm4c2rfveRwDv5dlmTZSLcHhkOjgAIoWphDtk9HDAQgYgtgrZiXZLm1V0TpHBwjCRCwkjc57o6EIS77ctuDa/W8q40x0u9IlNdW4S7ETMdXh3ff/H9TqY//ZM/oaH9+V/+Kg9EMATFm106DPzJ7eHX353uH+vN69t/8A//4Otvv6bun37y6stfP95/tw548903798M/Hs/+a1v//TPhfM0pGG3/5tfX9jZenTrScjMcxaPSAyh9fXN9Pxh1grjKApVe4uwcBlzypg6MmDcvH79N3/xxb6lJPz04fEw7lyVIffan67PmPjx/oKU33y8R3+CbvOp/tVf/qrOy5B4P+YVWJKMefrweP3JkNblGcLU2u3bW6cOQiI47YfW1mnczesSEEYhA1+Wc7WWx4yYHWseqfXVtO9f7+krVrPoboZ//+/+8Yf7b7Trftx9+umnPzzPXA7z8jivS7otQaG1a+Te0rNdMJDdj7uDrmvrNaVktYf6Q3+fCsSy6BI7zgPmpStjLPdPy7srj4VLojBearcor8c8po7w7v7y6jhe2kXOVavV2sZd+lEyjc191puGBwAhUzggsYMjMKBvFPTNggT4GzNsAIKHASAR44tLLcx9G1NsFAZAMI+66HeP189eH22pda07m5jIQUmYiBmxukIEcgwjr3U9loMDIGFtFRmRIOVcW3tptTUHg1yEmZyYiGzLiRNDuOvS3Uh2ROQegAQIvXdiFkhrvYbAfn9c177b5XleHQgAmcXCyjR1tfPpseym/TT02uqyYmgu9MVXf/bTn/whIrhaTgXCiBAkIwUS5LLXru4wlrxcr5e+AFhYiEuAae8AoL0KU69VJJspQZAgCVLHnJO7Qrhpbb1p7yVnIKzzyszjUNyUwJF9afMO8pYJZBFfjQwFeb4+5Jh6WLkp83wRStP+TVBRgBFDLcxt1SqSzFxADuN0WS55LOERl1k4A+acctc1p9StnZaq7sjYrYV7bZWY85DXec1pGm5ulzp798TE4+huhcXUWXCUIUmar8uQkIAIyc0QyUHcgjwkDZ+8fvurb/6K06jrMiTp0NUWDM9C87IednLYj23up8sjmInFcrlKwpSCkAL8uC9CNM/1el1u3hweni/QOQ/pkzevPty/D8D9YQ+Jf/3lu82qf3N3MxBZaymX7Frr+rys+zGj/LgKrA4ZzPD+bE01W3t9PAg4AjfvEFf1OAy3i6lwKWnf2wnTTfh50Vp9xoDRd2OiMWVC7BDuNZMTGxITpV1q0n7IzKIFvKW4zHaPAcvzgnaydX5oSxEpsdb5iQcphakHIWMOC4sex3wkPkyHYXdQPc25uF+qWVQ1rZYKUpaEApJDmhADoQwwgCzN9l2yQkJ3dIjorefgjmroXbWu8/n8rB0FC9eYEH5//7oOa+eYYZji/IaHBGlAH6fB9nGGc77B027oi2+FqSXxKtZbfXr3YfzsRqYMmd7ubn7+6Wvc5ebYcTQ+du1RWy7lcv/IHaFz5dUlLd6ljNV8dS/D7hMZ9nlKGCUfMQ2QgPN0eH13d75vUeeuG2W0Oz1X5pxcaW9RcsosabxtEMN4SGWXndBU1VPede/BQuCmqOFDTkyYEdNQ1rYwQGK3tmgPloMiLA3aMz19oIdnzxA5gU/UMTkgBQlmRgkgQ1vMkDdvEWh023RHAwQSxBSA7hS2qjV1RAmeZChMSSgRKmjvS++8tiAcJAiXdeHWwjt4b9cFtWttANTXVa0VGdzQjIqM3AcM62oCDgAYwkCCFGEB2jooCRETkulK0ZnYNjIHABAWYhAaS4EffdbajZl3hxtAWNfGiZmDk4hIRBB6aFuWtbVLrUu4EpN1ICJKzIk5MVAIBCK4A4k4g3kn2JK6hoCUBrBm4YQJwyAg1Nu69BbdkEKwz6BCzG6mAH1t0XtKknIqA7MQMASEGGQGysyJSNA3Dgazo0MQuNyKvJX2aNoohzCDUTQJIs41TFGy+6H1Vycuj1Gv588+/pkdP9XHL8cEK7d558/FqpC5skMhFnN0DAPjUJzXeHIIVyVPKdhUu+W5MUDLQwJbsJ2pX3W5UqzjbooOBAjaNkzkVrukuqV/hDBcdbtkmlsE6kbyEt4cB0Rs7igk46hORPn17vi8PgCm6TA4eYL2EWJ//8jHbO+XNdrTYm0HN6+n68O7hGTdC7G6Lt0YQLW+bCI5AeGQGUFGUA2PcCGcUm5QGSG8UoxCQ6OEjIHg277vXVyjW/IM4RDETMpuWRrsL6on15+XXY579PmM8TdoX9f+a7bKDKACINojiAMt2IMVwRAEPBwNwBAjPAeQCzqZUVAwJyRRc7eOkQzCgXrkZONunT+/vXVKsRXHQUAwIiPo52/l9rv+TgPlpYxBO3SNAKDApa8i6d8ZKhA3FCMyW4SHMxIgbZlLdyciQrQIZnY1BGaEqlU4/TgKVwiNCPPGJBvgBgIQyFQR46VH6+V+DxGgZkTsYVvhzIsegbHlBQLAATZ5Ln5TzAYYSJvs+KI7hNNLgM4xELYXgOq8IEbO2BQxYJkbh8PW26FKEZe1eYytG2SgnB/P18NuWq7m3SZOzXztJgKErmAo1Al7kHlzVFfLQABBRHXtanC8u/vwdL8v07osmBMiKUGFOAgF4LWDQgxCba1FuJsPSXPi3h2AhqGctVdvO4ZMro4YIIBKGDJ8cASSKfyqvs+JADpYIUrM16WVcaBAAmDAwuiqNXTHDOEhfG0XAAzKPWLIOTwCdEzj2hsxO1Eklsx1rYlIuzf1DfbkEY6gpokSEJkpEiORQyAS/FhyxkQA4KZEGBAB2xQ/mHmDT22SUHgQsSOFu1kXyRBhvpnRAhHDPMKIZXtjdyfm2B4YES/la1ulDDgCOnjEJr4AEG6Of0AgJHcPBMAtRsfb+T02uWY7xiMQvihKP2Kz8b8kFaVcRApRqKqHA7BZB/ckRXAUHgM7A4FrbyviFmer3pu32bUFYiCaKgITJSdHBKMWYBAKvbqtc1vqfJ3Pl97m8/yA7Ewh1qSk8/MDikuG03zNg7x+cxN6fi3a2vP87v0v/u2f/dbP/7hbg8x/8A/+a89t/p3/4O8/az8/zcdEsMvpdlcNhpvU3bqgYxAyeEkyBVIeorXuwMQASLUvGFByGcehtWW+v5gpAdRT7auCOSCGe04JJH/44bu827/55LPWZvWsCGUo3k3r5l10GQsQMIFtdyoUAB+nzCTu9To3QChlcoJpf3h+fmq9HY5vl+vystEG1eXM6rW2X65Pq1diatfLeLjrazdfv3745ngstq77vF9mL2lfkoBHx35aL81iN91I0NqvblH2d+NwM9enhNR1ff7wtS2/m0VYSrM43N0JUUATTCsbQ4pG63k5n57W+fp1fLn0x261rTpkMTDJ5a9/8cWH5/PqvUUj8ub0aOVyNgpEAQNmAN/iu0zIWL2nRJwFCNwhZ+RcMsW0z7vDuDHi52WdMnUzIFjXJoJE6BrCWBKoypvPfvLxT//2OOxL+rEuz9uXf/ln398/kHk0vSkyJuEI3vRRYQ+81DCENKTEkDJKqBHMrbvHbsTLdRmXqzZo/DsSnBjNIsBzKWaboQ8A4O5murvdrdf1eSHBYUXWRaXEd+8+/O2/+9nd7a7ODN3Pz5Vx/PZd+5//L/7ff/DbH//yr07PfXf45CiQ0NvT03d9/aG1OIxZdjyv59vb/dIUEUS4usV17Wr/9l//6meflP/Rf/dv/x/+T788vc82A0VZl1hXw8Doq2j79lfn/9Z/++/8n/+Pv8DoBdO165TL9ZK0SyOYr2tzbQ4QwqmwBxJZeNe+5RQguGkwkYYzEIVHgKkxM6hhQABBEG4SMjoCCLODRbNUStdIuUTAsjTvRoGJU+8nB97tpmr1dO0DSyqptzYWUNP/5H/8P/hf/0//csfjuenlYUkT/PSzVz/9yeHP/+yb49tX4914OrX3754uzxqNf/huXlZGL+daP//ZIa2ncVoTNeHUavvq2/r6djdInNd1jcCS9/vSuyngYZ8SxrKs5urIDuToTVtJg5kmxFB71ssf/4O/c//1w3QjDJwoYfi6qms/7HcKa2AMw24v8v756Xy9Wm/gBKzVU1Wfkrx7vFftKTGJ7Qbz5Vsq+bBP356ePvuDv61g18sy7EtrwZEkuPdrVy/DLkOJ1rz5lHOfl+vTnOg2gMswWjhFCHN0H/eFSM06SbFI989zSrvazgTrRx9/dJ0vYdHcWr06pCKR98Xb1SFhknY5ke/z6KmUbr7bZe9weTqz5N/+7Y8e+v3HH796//U571KtUGsvGQrJdNy9vk3f/+K9KhOmRe3nf/uzb/766914BwFlml6EogAi3DrvSVhEeoWwHsjI7K7h245ItLnhAQjc3BDwpYMD4GVnQoTwLb3v26tbZ4d5IRpTul7am0NRlL7WxKWk3L1HADmgY6jnxNfrWngM3YbtjsI5lPilds20E/PmqgU3tYpISCMYeFPiSHl0s16XDJzS1E0jVFJxB2EEpl6xry00IqLkZH1252l3QKJa5zKVsg8UfPjhu7ubOwzURW3utOPz8xnfencFc+INXapIHOjuIWnkXHRea6vdZwjIw/F8fn/cHwHEojMPra1EFg7ort5pk5okL7CG42bpEpZSxnltpuHowegWunQlVAQkSFm2JKz63Bsghvce7GUsN+nVt/ff5AoJZa4mSVZ4Og4DOTJod73bHeZ1piBQvNoizMR0upwyU1t7BFpyC4+ubn1eV2JBgqXVfSqceKktwtMwBiQLQOHWOhcOBDcoOd/e7p6XRYhCtewzqa9rC4csRMPh/vH+br93gJz9/fL9tNuF4dzNgBLi4/X59c2dqt7dHCh8GA9zLPMyf/z2Rsy//PZrR8zjuF7mtvR8l8JtECqH/WF/VyI9XNqy1q++f++mZbf766++xyJVg1BuhpI4ressjK6WmSijya7Ny/P1tK2CdW313J4f21oDCJ4JglfupEjCaWQh7ExVPbV2YzKm46vVA2QV+64+/8rXR0/72N8NaJkQcAgCHgLc1C8B3e29z23Yv0ok5+UR45tdWsE4Un18uvbTRfb7MVEpxfTmgnU3clTMLFQyGjKPQkOwUzGglgcrU2una4spA2aCnKCMgGGIFwaaZLQ1DJgAKCChDxRqazAkGQoa2yoioJ0Rk/iQhiWHu0xpmsqwtOVmN90xfrg+s8eAOmYKjxQk4E0Y3hzf/PzVu8sH7LzBU7LguvbzV9/ffnqXd69XRqWWuBK4UFr3H10Xxgnt7GkUSc2vT321WZWng7IwuQGm3UcTlUE7OA+7AWTImcYgGAvocDOViFpbc22LNhfW3tMCk/RRMYGgMNMwHu8cywHuDkFmADRoh5RSSa/M1+7IMKIBLhVTHmSPqHO/eFdwz0wUvTmcFl8ucHkC7AmQNUkeJ0hFOJujGVqghTsEIKoraA817wsRMTG7BLEDESBA8uBIA7I4BA8OLAHgZr0tAu7uUc+GCVyQIvuJ5/f9evF16XMDyEEDUEaPBCvEUB0TU4oBkQEgScGw6HUkhDDXBkhda063RpwoBSJRhKq2a3ImQEE09eamhwTjDn4TQMvIEWGNRIYiwQ4QLByI5hTE4RDkLKNwr632XsmRyigllTETSbiR+ZakijByyijJFE3ppT/IIhxCAQOCQbWrgwMHAQBBxPWZWFMeVGnuEEqMQoApSRoyIoAAOkg3bmtCow10h8mjA5s7ICOA7Li8Tet9kS8jCCiRMXpGGCJ3F+89db/rJb+X4/mVLR+NeFPPWua3onZp1+Nrvvnp9E35vvIGFcoBK6AFJkQ3XiFxgPfeWURCKQ+Ht5/a5bJcvkuhBaCd4fXhQANcYnEYMA+YRKO5WTiUlN1ig6AQgrozUYQTobkb8haAMjSP8ABzNkIaU03cNEZs5Gt38OAV/dlOn97c3eTc+0J1SdDffXie605fTZynt29+cvn19612xoABTm3eDWMpRXH7v8HaewEk8onh2rW2SKhOK7ClXBTMonUhTAWAERlf+lTQrAF0DvJw4cwcYsZWO8KV8kKH705f/+wG4zb++vrhK06PLVqIgycIDhN0g1BAQzduRoAMpEaAgVv5ik1hDA4owaSwBRIdwNXVIlwwkAAo9/o5D2NOykTMsc1HX7Qg3uX8+2+H03fNIhmCaXeNplHN3TwQuxv9eEneiL8/FpZBBGKgaWVMiKwOtKk1rhHOyBAYAUw5NmkngpEjHBGYZbOlCLNtN3gE07Yl+tU6k4B6hCEAMWHA1psGG4zIAYnx5bavBIwIHs4ACGbeERiBA8jNN4ZyOCEiWiAgIqhZQDjGRrAOJEIJ97X1kkszR8ROYokeGz4uwY4fjYeUZVfYL+dBJoxYunvITdp/fT1R4YZ9DXegTb3YHQ/fN29JVuSrRQWuSidiUa2+rgHCGT0USYEqRhFGD2SylJRYoF9rRcyFQsNHFmJR9THRVoabHJdlpcwfaktSjswHIrVaIhJEBQfCRDDuxkvvwNxNzfpBUrirZAUoaENKl94Dqbr38B2QuwGye2hEM2fYAOYkSMIcdRs+Ujhs3OgI9EBzIBbcWuURCWn7uMy24xxuNh3fnnUQiNvzycPNIfylPYwxbPto44XtSeBASLDVDL9YkTYs+guKCIMiNunwR4vQxsiOl0b72IqJrSNygAMZoRC8FKLBC/V6M/ujO7xQz7eR7+bAg5co5r+TilgSESMCyUshXmtBCZA5HFRboMaLBcrVmrZnb2eKcKvugZLNSM1FJgN2V6Sw1l2r6+K2WD9Xm7uuS+iMC4yTR+/9sq7XMe+/fvfV0h6mUA777Z99/nxa5uvTv/jn/znUdb8/vn7zUzU4X9v+zasy3uQZv/r1r5u2AUklaW1QxnUxG8DA66URBaWsS13ciRMlRoe1VQzkzITILK22+Tov5ysE1GVRszFnU9jqupio9XntpqoP9/cV95mBmJtjrI0RtVZkGMa0RU7NwYncmIJVFcEJOnIAMVDmsg/k83wZxqTzvndNO+q1SozdsSsIFwa+aq0Q01T2aSQqiQw8wmGSm8PhZplrGuA47c/LWc08nCiVIgqeGW6OZTUHtA/nRzM7PV4Eabe7efrw/X5/FML3P3z4+ptv3ry5uZ6eSxm9wbyu+9c3CfP53TNEm/bFmk47YWhfff3rv/zyu1NX7d2Bg3CY6NWrwU6tP9dAB4ENuIWEhbh3g/BebX/IN3eDQ4z7RByIsFxbTrDfQauVCQIjsXqgawcOIXCHtcY821AoLPpKON68+em4XDrvUqay9ghRZJjP11sqhyGPCRMSg7u6EyRiIlY3jHhclBDGLJlx2A1M+5EQke3c+vM6pT1a4yyAnRAhwuzfy2ICPDxfhzz95PNP5i8ePnz7fn/zZm7Lq49eXWrX6j0qYmnVqzkTU6Rrj8j71t8PU5ahnCz9V//47/zFf/GPFwwVnK/LDrgkGvN0gSZMBG7NA/zN/qinNX30kV7h0zc3H37os8GYSAAv8/XVzatcxtaH58fr/+f/9t1h2t3e7K6nxoruvRSiwr1rr5GHw1dfXjkTS/K5egRRMBAgJk7VLABEJAi3PSw8iIBeuhJMtwAzbKfZMHd1Y8wezCw93N0tQoB6QGZ5erikobz+7PYv/vkvEXbabXaXpcNaXfiX709RXv/Df/gf/V//9/+Pec619mE3mNr339+/+dndfX3AAQPwObzc7B/v63Vur47HzQD57Yfr3VD+2S++pzK0FjLkpv5Y9eO7ou16c3tsHQxo63I8Hnb1fGFAZvYe2lsizsNBpIRVYFb3N3fHxx+er48XrTWlKOPgiE/3c4B5vSBzMAODo+13ZciTTvT0+CQo/drHMXeAMNiS4df56hgf3j9jGhZhj9LWyjlKkaXWtfaRxE13qShqYOpa+3I1rYQRhih4uq4oEJKYIADDHLz/5ONPLqcfQnsaxv3u1nqb+1PKDA6tKSE79FTIgNZ5LjAdd7cP62nE0siH3bTP02W+pEHyze33777nBo50fP3RQ205yfn56bnNcjs+/7CkYYKBXh/Hy3x/vfS7u9vL9x8+/+zGw64P968Oh93N+PiofV1/XAe0oavCfOvLDEDc/IPhP5aDUWwVsu4BEbTdQAMIwDHAAdACwg2BYjtnBZgZEQYhEu2PYzxfc0nTlO4f2/H1jaoyb62gYKY5ccoEzsNQSPi6rpmxrX0YaBjysixm0c0QOZfi2pnAwwHCvbeGnNmVXt4w3KCv7ULeKe/AQdUdTDUQsUxja2rIEW3tNeVxWS9NO6dhPJC2ioRlV8Z1f57Pu8NtLsf3y6OElWn34fnbj3/607q21hox9LaWMqg6kBCxhq3era+HaVqWuek87g4GZg5M8jLGgUgpdTcpIzADwryeS8mn5+ebu9tam3UsaQd41ViRIeXicxynm+d+QeZeKwEBt7VdCifViKD9brp/erq7/VjJG8yDW6tuLmmU+8u7N8dPL+erghIEuZk2BFq7Jiki2d3KmC7nK0kCdwLOOSHA/cNpTKIRg0ygAMAIjBAQZN3rcrEhp0GG3S6gD7lYQ9NoraF3DCrj8NGnH/3w7dfuWogjiCJe305jEibhhHXtKOygaFCkhPdP7j7CaBSA6pd5EadxSt146daeny/X2cP7UsMcAJ/mZSr5ILIu9fruvqnf3BwvFzpMw/dP96fn89uPb94/L8j+O5+/1cva11aIhzG9vn31N19/GcAYDg7j/qX7qTlcFn+a/bnhCX1gMmnevYXmxIecGGNtV5BXkt8MwzEPR8bQy/ekAcBnfdd6PEY+jOmWZSqHEe9aNMOn6/Vp7XUoa5Ic5+chj5ZEVxHZL225tv7cVNVfH4a9RKK8QiEecvIs4hCccJBhNnJb9jc3eZ9bqxo55Xr7enw+4yiJzSDTQgFUKDGmofterTWg67o2heYbXoCIPAI6UEIgIEojAHa0jj3QyCwjJMTAdZ55JRxYuOQagNQS0dou7OjQeVemT4/2b997s4zZVBkFvfs8X755R7dl+Ond+6bfPZ/2TfbDzuoFe2OnsUzLrDmnxLgsV3dCa2amgMrp9f64A54oHOTSNMiFWbq9KsUzrEDuWhJYOHVdQevlnc1Ps2mRKXEmKsIpHKax7DQ42tL1arTILUoQlcQAvSegHQ0S4OGrrmbVzd3JSbRqAaxnBy8iwMkjMIA1jdNhBxTYtXB2Nwtq4O7h2lUX1Ia90Tp7KFAyzjjuaZiAMrmhA7FYcNNKgJvLn0nC1eqq7aKXE1KiwmatPn+fr89era2OeQ+S07jjPPbaNKBqMIl63xGp9TALYIfkaBaakjC5mmVkQE1cMMy0u17Z3V23a48juhsRN4sIOuyGl0tyYJjrMuchobv31dSURYZBLaxrqG8dBOAuJIDGmSUXHibIiTGg1wgz7+AMoRohxGEzEiFRYtRo7qZeCXOYuXsPcmBVB0Co5t17F8CGlDU4pZwLI2FQaHQEIEiB4GopkSAH2UttD1KAbdxZAkbUIfzNIN/Wbk6IggAaVCHcMZxST2NNt7IDVR6kiTkjSGIrg3q868B2+OhgfFldIQlmcm8kY8N1Zl3L0CCcPA+7PQ528fnhewYElsgCaXTg2XpSDcZL7QnGxXQamBHBQM0SSbcWtuFKwhkRQLVvQAFEAhAHACE3MoRIbEWe0KdXB1CXlX/+87/zV99/KXd3Ln4Gj9MTns8XbecWl7pcxC7v59bPw9prqwMPDI4QDILB7oy83WQxp4Rmg/DEOFGoWZ6SAwAmdEzkATUQFDkhMASYMphZJyCDpAGMDAFqwcS9GwIRQe0LlRSZ3ou+G+hZ3RklOmFI6wiAyA6oAIgvPhmIH9M9GEDMBBnDX9rpt88Ng0g9DCwggLIhFo/9uv70+NqDgiQiCODHkigiJAL/vTc3v/j+4b2LEhICcPRq1E3Vnch9+/MDbGCXl6hm/AgdRgLe0mFItIFjtv6G7fRCTO6GREAYurWev5SjWxgjmW/9rgBI8aIP0salBtpSSGEe7p1RAAJ4S/2DQ3jYxqgORACK0BejGYp7AEZAOLxk3yA2vHK8SAfmwAhEKSfCToxNdRz3Ndy6mrsFdIdzjfvzMhRKGU913QkuvWbGbi1RUoaVcYkeoebGiNUUXBCcKBZVlTzPnUMBwUjO1g05S+JKaAgQjlopWcARMHsA0qLmPC2uOawwR9COcd2qMcCY8bLUaRraGskj7dNlWRIkjGhmBp6FGL236hiO4eSr++KWkAbmDMCIMhRVvarrsuAYa1fLOUt2jUHGpV2GIZu5EBaRaJpSSoTXroQabq2tHnv36F1TKgFAzOEeAYy0+b8MX1BTsDGtEQAZgdwVCTdpxlx/E/4ionBQ7fHCw9qmsC8/bK4RjhsoHclDEQQA4ccWPCKKF6Y1IJFvAcPYlKAAf5n6/iaS5hBuiohEsi2r7RRq7kQMAeYuJO62nfLj38N1/egqSiycIsJbJ2JDFhkTCyIxcaBX7w5E3lu/tHqy5aEuz4mpdwVMnIoaEWQjtSRMZtp7u7q21mvtV7e5+lNwDyQplhERht57EOX9rrf1+/svj/fPrv100kz7mHicds6+uzmktAuQrnW+nM+ni2lPKTlgY6rtHIhOz9ZRLthVuSRkkSEB5iQoshPJaleESJKk0Pl8Wq5mpoSJib2rGqSEZWLgFIqY0pDH83qecknlNjyfnheHXiiGKedd1qV5SpSEiBNLuIfrOOy8Y3R1b2a6qvdlNUfwvs6X/f42D1mgAds6r2bcg019FB4O7Gp6XYn8dsjuNHI5XZ4B293+FXW+nD48n+9vdnvXWGBlxsOxJJTw8sO7e0ysAZfLOY/FWmuxjlPOo8ynU+vt19/+6uZ4l/aHus7H2x2kkKlMUykYH6VBxt3z8/Obz3i+rh/ef/fDhw9fvXt/er4004uGZorkYK0INcCnea0tmEIIzEmblpLCoXYDRFAgZlV/fJhZ8HK+IjkhIVDPASHmJszA4eE5EUT0rplJq3tjU1zdcBMxeofKKaUMI/fA3oIFxnGbtySBLDxNCUwzChCRZEIHJEHksAjvNRytuiZyGWSUUeGCKEun1etOtsyyJ8kRHuC/qQME47uPbz67uU3T/v/+9T/+o5//ffP5V198nRMcMpBYvhvwqvOjcRKe+HmOf/k3f/nRJ8cvHk92r99eTr/6DH7nj3/2//y//MsBJucorwQizpcHM7VIJcunn35yPl3LTf7Z3/5b//pPf/X/+qfPQtPcA3dpaW2H9OrmNkF6flwwS+gwpNT6ch+zma6LZUqGmFKeq6FFTmncDWWQ3qBCCw+CkI1d7V0IN9i9mge82O0ZAPIGX4+thhOQcRtYQCBt/6C7AoSkCO83x1c/vDtTObTgoPTtaXGHcJPM89OShyLhZTfAtcJy/Yf/wT/6X/3P/rPdq7c3MoBwKnRzm7DUP/6Hn3zx1fftdvi3/+axpF1gpIG6Lp+/vj0vi60hOX/z/jJM2aDud/tlXtfeLzN0gzENvS7L0pDQAJdLZSBTdcNpGEaO2nvX2Ilel0Xynhh3N3d5d/udzX/4+z85n5/Ozyc2m0oyykAxt85MMV8Ty6v93bLM2pbdoVzXFhRPdbmVW+FEERjYqw37Arm4YV0ipTztpqWruQWom/ZYV13APJFVXy1MJDESEE/jeK4nQj4cjkufdV2BWc12Od8c9qf1HICmvtQ5Qte+Bk05WLu1bjkPSNnquk8SddHGTLheG+fMMlrIuD8cbu/ePX3wCnrlYXp79/bzdX16tafraX56fjre6C4LCxlPq+JynTMcv//wYBjVW+F09/r1F7/4FhKXIS1LfdkSGMG9JIntVsTMhADg6kBEtJ3UtnHZBnVECKDAbYyAuNW8RoS/zOPCMDAAkBAAXQMJXP3uOID75TIDUUnJPRDQ2lLKKImX1qwbhO+EQ3A1L3lqa+fA3hugIyR0FCJGZnKPjoDjuHt4+jBlDjNEcPMITUlg2IXF9fI8HfJYpqaBGLXOSQSRSsqXc5WEpoukPE1DX87InlLmJG2ZOcXrj9/+4i9+Ef44TpNRX84rVIbBe3MmVgSR7ARCpcNlXddhBDfgrWghyjBCXxUNsVV0tB6cMIlE2KqzmzKPKY3dlpQHJV/W6+3wafRVIM3zVesSbq7o0Zdeb44fffvND5jyRImZLUyEM+8i6tLr03VOsavnpytcX93d9tqGUrzydekf3709X87XeknD2My9NsDkGNPxAJ7Gkr759utyGDmVIDmtp8+OH/dYLtdnmYqq9d7HCMSobTVILImDZCgA7fp8uc3H1gwlleGmoTrUAL97fXMsw4fHpw/vv19rG4fp7ubNDw/vel2mIevae/jrNzeQ0zyvbnQoe9Zoqk543O0qugHc3I3rsrTrPCLOp2evbb+/HThyOA9jd/PAcbpZ+nPPaW0oQgA0lJLyqG6uvi4dNDLy9XJF7R999PH5NNfevv7w3gFLkeNu/N6ehmH4jbfOPCBlkKFZ3M++qELioeyqY1UIt85M2ScpN3lUhI6cypB8iHKg+ughszuaJuYsqdXazcKtRayY5tYl1hlZYjVwAG9rW1p/330VT6zipxtIpXy8Djc9T3cHmdCx32MsmadnkCQi5uull2BVxkhjpjjASOhNkViTUNmX4UDGEEzeXZvWmcNcr4Z5lFSYMjEPt4fjmAPrdV7aXFPw5HtdE8YU/e2nJR0xoJZUAtoJ+6Ut2a6HlI85h5MFX0+1J/Jp7M8rumk3KJIHMbXTD/f65jjsBaZarH8yLT5M6rqoKkIaEufkQNVgWXoz7u3KJbPiqze7w5DfFhrcv/nwfLnWaAeK3ViOW+Eo000SCotZq8YMEMQLuj0vixfjbPsJBrQSdfDqGrXqeb1cvF4EiXgkRO+7ccpwFNuxtWYW0DniwBkTnteFNWRd7Ay7TvOih900pxgPt368GfdjAEKrGcKooUeoq3v0pfbVdUVtyVpEKItxkiTYwjgKFUBCYoKSQQpKrytjeGj16L3ZfKG+eH+Y71ewDvO8XNdEk9rIMkI+UJ6kDI7JPYH2ZEZ6dVjE2YBbax0l5ZuAcAj3a3CkxD3EiVM4hSK4elMNJQx3CaCuPCRIGVgQXlxFpsDhIqR9RWttmXvrZiTyYxgZNsGRDruD+RTAXJKMgwdFbwwNMCyQAtysttU8DLWgRRkCxIAhOEiAS0dGe3F49IBuwYSkBACLqkhhoZIH4cQipbCwovVSpLeK2rg/Al1J3EN/BBhbBGy5HkRipiK8z/Ca+IdznyNnFAbWvqLbJJkNtEtTS4OtMrd9tmGqocWUp54t5NyPKUu6O/M3SrimtCZ0aor9QrhSsmjnqO4pK0g/2LxzPwZ91fRaC6JjQZowpDBjDFn2x7yuMwDnrXiRkIMCIBwdtpw0bd3VCTEwLEK7MUsYcGZzqquV28N1XjtEUrl+/yEPh+l49/FP7+b795PW5cMHTHG9rDYApJVBzg9zBKDhouuYKAJyEmFcTVU7JUglvWzb7mgGHYSIGZAFgFQNyBGRmIRlw/EKUbhh6BaEcQTkvF13kQQDQyEhIs8s/dkuX3W7rxToBADqCYNBIhIAEziRbVEbhjDHlzAlUCAIhQQGMFLS7Yaxgb2ICcIcnShIcoefUp6YXBJCIrCXeA3BhuJFhNsx/c4r+eGby9InXNeCkZBJoAi3iG74I7YONlaxu7+kydwcAcDhBTMciNuJx7cKc3dj2rzUAIFM4hYRAIQRQMgI6OYbogUAmWRjuwOCWQ93YTZ3odTdNt/QhpSgFzIxIQQjmxthMJLHJp4BAuELv+alTsRgCwxsObiQJBrWe0fEJFLP13fr+vHrWxGuy4IUTEAW7v5qLPscNwO8b21e7G53vL9cmgGQVeHniM8IwXXiBIApsCrkEAZ5qhc+3hBTTjx3m8NNvWBBSoACAGiWGRe1nIpac0JnDmBFqK2RwEBorTtSNQSipXYzv0mpORGBcHQKRaKIXNJc9ck6YgBxzmkBPAMCwGo9Zwlza5YZzNBjJcGztt2QL6ZpGrR29EjgtV+RXHK+XuYNyK8WzDZ3pcQGEIRdnSWrwjAImIf5pr+5u2EAApGEbzk+3nQ89wDYSmDF1JCAiN0DgZkw3APDw5kkwgGQSNw9torqzddDsqGnAwGdNyUowiN44085uJsRJzOPcOYUEWaKSES0ZYsRgYg2WvZGzrYNmg7kgREOERbb+mCz8O1L6z9alv59qQiRPNzcEdG6h4FrWETO7O7dtHsYYavL+fnB9cnqB+8z+ourzjCB05hLktS7a6zhVvtpXefzdV7aZRg40joUqbNHENUFUHblIKJrvagCwrysz+X46vDmc3JykHEccIzu2q4VUEFQxgEjpAwYwYxJEg7oyCXT9XoO991xn4eh90DKDgBeTdXUAb32C6PYRbvVYdxnOkSkaTfW6zWo7O5G7XO9tjCg4OpnNfO1OmQm3+8ykWCAu7brzMRM1nsfdhMidg2MaHWuS13nExC1tmSZwiPnbBGcUkCYtrqcta3zec5pN4675gbWu/a6NOJcl4WRAeSs1ZTTML4/P0PXqmoBcBulwHI5SRl6xavW86mezufTOodrSXgbo0RBbIpHB3paH7784YvDmxt2+nu/80eX989a6+Pzt+NhWK/fn86nxPb0fDk9PSlDrf279+dv7+cW3N1EwMDLCJg0cUiC1lQD0q4MNakZOuQsrmaBJJgEzSFlMdUI9CC1AMScE4AT4nKxYScisrQGIJfm6DRbJovlAtoDhXPmUhjI2+Xx21//y5ub1/1wmwy793meH796z4EpUZgrdFc6lDQwXXoVTGDKiXLKqLGqSSLv4aQoaVWFVqd9ASfJO4Sb5iEIiOyBmy+C5WUxIJX5Wf7iiz//r//3/6OHh599uH4vQUykAX/xy28++vTjL38492ppSr3W/uTjjv7uP/pssnT614+w1t8+To+//OYEMKadd+DMvevdcXh8uh92Zb66a1xO17q6dvzTf/Y3Aww/+8mbX/zlt3knueDiDsgRCEzN3chQ9NRpSqKt5/1wXueAePVqxwRmzg7Wz5S7o4PvWcS8BSEEqfeIgGAiIoDNV4GIBg7Ipg4R2za7nU8wABzIA93dNOfkvYKpO5SM467kwpCgzh3N3324ZpfIIkyWhQmYU0S/G+gf/2/+l7nnBLmdOiea9rmpf3hwRrw8Xb/7LtBbjh077o/77x4fMMmHp5PWXqt9/fXDMOVmuj/ultrm2nZDXk1Z5OHp3KpCYBAxk6Mfj8W6zL1182baWjUikVSGYRom19bb9cPzKkN5ffv2V7/880Ma8oBdwczVkTwgeh4Kefru2/e7XT6dzq9e3R1HmpcFiRFxbctAcLPftdo5l3fz/PlHn60P5wDwkObtfL4/HG5up9u1VkJYdXVTyWR1ERJE6NrrgruU1Hq7nFTr0/2HQoA55yE/Xq5L70MeJA/qDRnLUGTrf2DZIIfPy1Ov9bbsRJJ6DNNNqwpg2tswZu/9cv9+ff+eUajsxkzny3cUvjzq6bTejK9KwHRzvF7Xd+f72+H27e0Bw0Xsdto9vj/1Mhxvx8NdyUW6+rJcf7wkB4t0bRCAzL5Zny029uZG1tuARA7OQIgUsKlGHNYBwPyFCLnJQxiEiFuwHyCYycFJaD+keq0p5WHQrlWQPIJyNoDugcBbj61H9KaJs2MYuoX31pk50AOdhN16uBHh9mha13qYbokjQq2pu6MjIaFIydHXOcIARmIRKgiBxIRSJsawZV0kQc7Fupu3DOIQd6/eni9Pgvzpxz/79stft2a7w754mU/91cef/PDhw93dK0ps1pdlUVUqmMuAwG41YQ5O67oM4hjRtd8c92udFw0AsohwE0IDAjPtChi1XdM47Hc7YKIxi4TaCu7C5BCE8cmrN62fPALVoEgzE0qEtK6LgRpFdNtTTgK9hXej7fPTykLzdWspZSYqeajzEkjdbBxkWdeH6+X25sZJHVPrcHc4tH699utlue7KEGaI0XrbzGWE5IBvPv7sL3/xZx+/+mh480ZhRQoLv79c2CEP9Onbz7/95utzv5jquBsKDxDx4f5dYkLmIWciIpTosbZ6OB4uzxckzgSAEu4AsR/Th4eHw/FQjoe6rvPT5dNPXn373Yfe17tpF7OtS93vx97bfHo618vx5vjJm8P9uw+X81OtWut8mHZra02VKebLguRZ0vzDhzAUsHWpJbN1f7h/zkniN30fCHevjhfwu6TPs83a1MDNJ7etdMR1RZ+HUfYpiIhyotqktzBPlPe7uxpyrV3bMmPLDTNxh7i29Xm+OqXea+1L2S/720Jky+U9ucyrPF18vtirTOvan/JS5MzD4e3ruxHW3C/u1UOZ+54RoAO5wg6BnMUJmxsQBsR+n5iyydCBES0Q197Me7M1i5E6SxamQ+Zd5pu723L3+TDuotdH/2FhEIZbQ+319S3fjfzqo5wmigZkq6/XeVnAWhGt63OvyLORpMg3zDoe0vzlcyoljVlh4+DzclrxwzO93V8Q7rkzrGYjRiSZAjwAkzALpSyuPl/0aT6N++Nhp7ubmxv0Xtvj9fnD44fneR0OUJjRSyB2WxllyGXMTIEwpItWLsyaeofF7LosZ0mvSr5F7n5BvnMjC9cwcw91r57QpsQT77i86XrS5Qoobl26JLeh3vW1i5eOfkEA6cPNcT9NngbFNFAwOARFr9ZnABMNnWty59airczBsfqyYC5lP8J6IhyrRWOMYAsjIAo0awkQ3Luq9ghgdgdt/ekRmrZlwSBTooQsCXOS3S5zCvMIYOAAs/WaKQkQMqtWQhbKhrlR0bCUB0Zrppx3CdjmJyJBGdTBRYC4tu5qR5HaVbsVJH1pCYdcEgFITt0ahFoooCdJZubgQSZEFp544lTIEfPEQ2YBcwME7A2QVJs1cwAg0t4RLdAILFPqqmEAknwDXFelCEJioUhCiGDetTMiAG2jRNrsNQhqrj2wNWpXae+nYZGsiA5qG8AVt/gNATJiIJEw52TLW+kL2VNl8EyBo3fxGE2TQfQgSV7UeKWcNUwT8oidAF0yyPgwsyMe4nyjJ+51yM5hepV0Q5GR2siSJKsR8X4cXv2HP/n7f/Lnv+jYz/2S/OgY3n1PVEredZsyBQyoHt1MLboRgDB3dyY0swAiYMKAzQsA5uGAiCxAjMSXy3U43h4Ou3o6hbTbV+OXX37z/t2Xu2/3+93Rn843rw5Pp6f7pc45e3e0GupGkpimAbW2GsiJeMAEFNVKlr7UlMR9I6QAOoSHO4T3YAoE8IZ5dG2aTCihAW79FtHDVvCGmAMbGoQDOroGNE+8HD9a83E96eXbS1+IxTu78mbwBzZIEP4bieal0PsF5Q0OEBEFYkQEDUcACPNQgCAUJjMjAnMgwH2nn+4+BhoMHV8q6GNTNSOCAgAYyH7+Vv7ky8ennkdCNCOEknmQYW6LgK3df/TWvRiLfoOR3gpf6ceYzjZsDgR1RURkcoiNiehugBukGBDA3QkRiYG289GWI4NAMldG2Uwlmyjlm20EApHMtj+AuzsheQTSC1/IQyMcEGNzdkNA+EvtLBI4IJJteboNukRsjuva0G1IvKg9nOqbQyIk90APDKrm6kEQyfQtUh3HeW2Z+NSsZImIq+GltcM0VF/RIKGEBycEcOSEXQekLZOWABOjuT3E7BlWa0dMQ8hsoSM58bWbRnTciEucwDOxUiRAIcjCbkICuyTXVldTZPyggSk5ycPaHDCVIbuamlEsEUtAAmGWxBRm4ZZKXtclp3zMh9PqSK6GrWoSIcIIiCBHep6XhGDhANtAdHNpbZm/aGsTlt77OI7q3bfdazvhwsv0FBEDwt1i01uYzQxoG5yGICMK4kYnQ3cl4u1HkDDcAAW3TmGkcCPEF2sYRLgTvGj4EeiwWfoNabNPbt9ttHDfnJtb/hABMTwcDBwMgHDLpuFmRAsPJwzcsO0vjn5zd+ANasf//wG0CA+H0I4YhIpeCd10Viyupm7N3IiX66n36/X6rs/frvOFgJiyMHMaODwqEZOb67p269f5CQmaqXqA52G3D03Rq+kyJK59zpLn9ToNd69fH/70X/zjj+Pmj37/jznJbjxez/N6XViYI6SUMh1quwAiBAolU12X1hgAKKKdrlXDUhnQMawwgK1ri8rUVMFdASOVEqGt2+7w8fHwURi0ti7rAojDND09nUri/eEGEFjGto1wzXtVQO+qEU5B67USI4ggRMrFYS0JlmqJBMK6GvHAJaXxyCFtvSzrvL997ciFR0og1Bz20+Lr+Wp+xujWdG6ah2OET/mQtk4ZAQgaUqpz3e/22K2BPsxLQiQVXetV/XQ5o7uCUUnXGrP2p+8/YK1/6yefI8fpm2+5P95Zk+vT8vzu1+en83m5rE2pr7Wba1vmlEkyU8FfffNDSqklayXmpXJCT0Bq197IYiwgjMyRM15Oa6skSbpZERJOXXUc0gtRi6E39u4YBBZIiJBIrEdPjHNtrTVJzBvvPUgr2OJeERwJoHmvvd/elLXOjw9frdYerw/ZcX8z+BS9XjxAGDMLCUpiSsgiDN21Fqaw7mgJ/fYwEBM5rU7nS+3K756ffvrZ7W7H1+WSjjdIBUM33Z2IHOjHERoMd6+HV7/9/vv79rx+/vr2l3/yw9/6+U/PZQjKZu+Hsg9bhl0ZUupzB9Cqa3/o/+Ivvos2XOfOQwbiw20pd1O96CFzu57PgBE4ph3vGFrvzS6tvi43rcn1sc/wTBnRXTw9XSplqr2R9Nub/of/4Hc/uXv1v/vP/ot0cxTy6S6f1lO0Tja2ugin3vuQ5OO3080b+dOvT4I5hNzdw4mYSJoGukdESUm3HBGJWYiAayRBwvAAQQ7viBgRiRgAkDkqCCYCh4D39+effvzx37x775A++ejVd19UZl/W2hCdLJf08OEeGV6V4Xbc/dM//cvd3a51XJb2pkx/67df/eKvP/iVPlStsN8qglttS5sHKsc0Pp2fct49z/7mzavz5bqsxmDAeJ3XaVfyKFE9ENQNgMFjzGitjrJ/PF+amhkkUBABj95No93eyP35MWeazP/w7/3hv/6TfzKmXMZ0vT4zypSHIedLm1ssr49359UH43GaPuJX2vt6rQR4M0zQaptnmMZZKzNLKol3x9uP9GmJRfXcsMDAA3UIWpmdITORGROLJNHm4HbYHU/LZZrGZv35eQaK3bT7ob8HkJvjESJ2u0MoEubDdLjUDzkVCOpL44kz7Rlxrvf7YWIKZjJrlNBcCeVwfMVSlvpDu1wTS1j/6Ld2N8SPtf7089/55hd/fVpX9wVcqj1C9J98/mnotQeMEpm5t+X13Zuv352P84Gz9VrXbin/Zi8IQkRAU3N1QmIkp4Bw1a3SEIgQA803R7MiYgSQEJNEmIMR8panh22f2xSmiEAMhwB0VWv97jidz+shlyJpWVsSyiKtK4uYKTOpKUQ8n9aP7yZk5JyESRURgIi3W6ZHM3cRzoRJEB2WS50mDu9ZcjdCj7rOLJmzJLRaL0A+5DuKHFDrMpc8lJJVbbebWl3DkIy6Vxr6kFL1NYus1+Vwc/PZp5/9za/+4u0uj0OeBnj/3ffjmNvpcToeH06ny4fn129e5fTiN56X666gQFq7AqJ2VW29VsCNhml1vSRJSOLaMVMSvs51GEbtlllC13btvYcTqFsu0gPW6/nN/u3D6T6JOOJpaUj8en+j/T5CkWJKw7mdFCmnLJ5CY6lqGINI65rRESnnYt3WtrjZtN8TWFuvHrFG7PeH1h6v87Vgnsqu1gXNxzKOKS3a9+M4luPj9ansB5K0Vn18fv6D3/3d7778Fgu/+uhVWfvarbfoy0xBv/rqi+LYVt2/GhlJu25AqHEaalO0iMC823vrIw1gVhI9Pz5O+/0kdLg5PD+dW6xvX92dnp89QeueSzo/P6L3vcgo7ntxgePxFqzVD/e3+yM0eFo/DJkDuKRsgUjQrLpDqOUxz4rIKGFzq29ffYRDj16b2lDK+TS35SWGmZk60WEot30AsmWBnNNqXl2jhQMNRcAkl7cRo/XwuCx1Jr1gLBE28DG5pty1a4YhaW19qRit1QRCAdwNVkA9r8vTUFper4IFapmeW1RJQEuir87Pd6kcd7MtT57LfHrGzbsOa9c2yjSAMPTaFFOGYWrdNgjLNKScBkhTM+7uTuFWm1coblCJfELIyckux8PHn799K8c3AbSS74+7+VrpqqM3GuDNndwcUh7cbUaQ9bzEUtvjfWZprE7SO9oCBmuaBNzGG8lHqb1TBJcEHqpWhtwfTvR052yWx8vy5PlEQYf9gZjNBboJcyBY7ctJwcBwvt3TsJxozWvYh9OqTrvdXZ72JOhYL+7kzewkAMFCckzhRAa+Mvg07QIhKML9fD3t3HAixXbt89wvgpg1peCiKwE5Ow0V8wxsROCmEhGt6fKULIoNGLC2znl48/ZVP9y0NARlr0bavbdAMQSyFcNDV6oNDXLtOaLPa9erm4ZZD82cos88NEpNQRKncExB5ACQEB3BEptbI71YW5JHrR6RiHOAK0maiu0KTgWMB8DC1udz8i6JrXVDVmvNGgIRhkhySmGEoepAzBHsZhSo3ZMUzhLmzQxYGGuzqhq2rjEvrz5583JJBkBJiqgehDTtbiIQoJhBt6a65swWTiwOadjtqUzN3UNdu2C4Wa8tAiln8ECUgYp7B/EYC0gWxajd1MIVQtiYkJk5J6iBbhZMzMJcPMTcKWMqoL7WBVoP6dLm8ySn3TSLLcRAlAjBTYMcwpkJI8IMgwiwmyHEq4y6w+g1IkMwoQglUM3kyFQzQRoWvo0gAelxlUxlKPNlDYuYV37mzDDdlTNfr9BX7wIt4KNEheKBmRLU89yAponP//rXfzUcJcHOrd++uoOH98fd0NuqXSO0X5cUJkhNu5AEkBlYN9i6awndkYjUOrgBMQYQsSNXc3YH1SmnqM/j6/GLx/e76WZilrvX9u4RLh71fHp61lF+eJrvmyk6Ny3ImfgwpDCLUCd3TIsHm0eAIGjvskuOQYKAWIgIoaoNtkGPvYgARYcqoBYEFgxA7u6OZqYa7kAE6hAuMPSlIglDG4YF07s51u/bvChHwtBGAYxkCE4aGAgWYQEcHowIwYQgrhzgXCTw6F6oo5BCBIBunN4INyWQoAFBRo9PXSYsygxEAMibRLJxn5HQN7sNvTkMf/hb4/e/WlUB1AI0k2DgfuCVw+nfYa23ZnNw2/BZEU6UMBDCAhwJzTZWIyASemw0X0YiRLOtuQwCHF9QxC/pM9zSYi/d5rw5nhhJ3dABEc2NEF+iwg7mii8kYttI2x72okoBIKDHy0gPEMN1A2xvcHRGVu9mGkGmFgiJ4PO3r//q2++/O93vp49yKaLdIBwdM8/afzKMCL50WwGPmS9m4zAguCD2wJxFw1p4D5yEra0llVkbUm61qVuAIHFolMBzq7GfrHdEHEVqtYmTIZ9C3w7jpbbKSd0LYjePBI4BSO6aiEmgq4HrjkIT5GG8tjWCugYU9oAl0B2GxC0s3Pcopp0QMgEjkvA2/BHEsQN43A5sFhjYHIBERKC5mQunxFhY3YMAomqPSMLuHhHWK2JguIcjEAIRMoISprAOuPl9XiifiBTuAbhlZV90OveIurnrNz8mAIU7bKNWwDAP3NDj6IAR7upIvLn0ASE8aEMaxfadJAL2rVcZEAMiXFg2YdXdPF7A1kiMDoAb1twBNkuREiITb2M5xCASCGSCCIuAcMcfV8HL9cCtArDpalrDNbwDYgQs9ZpZADy0Xpelrtd5OTVbH54eqrYyZKg1akUSIXQ190g5QQ+A5JDDQ02m3e003E3Hj7RqGaHXr07zN7PCQC48usZBzB4e/uC/8t/I5RAYl/Pi7iQkRQgQiDVMUjazWnuLSGhE6BRIUVsv43AYx1J22isZruvZ2pVS3u+PZoEU5rqfXj08vnt1e8sy9OW8rEuEcSLVjoFvXn1i7ojmsWhUYHAN7XrY7wCtVrOgnEoqydxRspAvS91NN5yGwyTelAiKlbauHqjB4VyGw/HuzbA/IMT8eDnd39++HY9v3hxfffrLf/WvAnopUtt1HAuIt77uSrZu7gYQw5QfT09SxqfrfGmNk4zjATity1ym0qJhgiL5dH8+mz637my3OY0lZfG/9Wr8bv6u91NLy1rP1uqvv/wAGcvhWFt/7osFmQCR1/OlWu0Mji0GWVwbYLSekQqDBJJkR742aK3T7NFJiKRIABXkWp0hzxdTDxHSMOvuDmpNhNi99TVMywDu0NwMyRwIfFlUnYgEPQSxuiNwOEjiTMwMgPH89CGXNDB3T+uzz5eVgN1Cvd8OY2FS8xb9IKmMeb5eU5I0DO4dSYTSOAzYaozZkZ3H5wvAD1fap3y3qF5SIndHNCIEQo+XIdrzuf36u3c/+/3Pus3TNO0wtdNsre9e74oe3n/7uDzV8TAiY1v6zcSJx7/+i4vbaBCU0+E4usWURYs9PFdRBnAGd5S3dx999d2HNrdgeP3xwOPpv/ff+Xv/5p98+Ju/eKdG5AbYb/eHcKDg21f78/tvHt9/9yf/9v97d3NDFMD5m28vCYuQL7U/PV9yPkzDNFd793Qe3xwCKBTdwyMAwgMYwMGZJMDNDRx8U8vdwygi1IwBAr3ayrB1WACAhGOohzoyMZGbr7U9XQM8WKIh5P24WiXQMiSrcJjKg0N1aKZ/8i+/YJlanVuLDfP55bfvgpMDnueFhuwW5mrqIjEInc6X3e1+RcArdjVC3w3FzYTkOA2JsVo/r5fdbs+Fhlwu55lCxsyPD6fnU2vdiJmYxiHfPz0FQgl4Pj97AEMsp+u/+tN/mVBqV1zabn94fLpAZtDOA08y/XCezWza784+N9JSiiwxlPG6nAvj69uDSx6GBBFLXZfWnk9laRfKeTwcqj4RiYcj6nV9RLkz0DSm2RQU2HC+LoD5MtfLuqShzMuahtw1pGRKiTI7WuZUTTV0aWcMjG4RtnbNfejrHByCpK0db473z6fdbkpcSEpox4Da1wq6aN0P0ygHcljWeRD+7sOvLu0CFJ+9/ejh3cMwDcGS0JTk8uEZUkDQ+Xoe72DcDX/2V3/5W5/esXVIMuymHw9G4G5ICBSbP9YhApyYEJyQww2QzDogbyFoACCE7exirj/mtAn+3UTPX2bBEFvZKIlIyudV85DculpDhNaVU1KPVU2KqEdv3Z2Qk4GyASMurRGREzgGglkEIDDTy/gPU63wxfdf/Df/0d/TU5WcoKFqkzRuZmESIe3myzzXVI6ExCIBpm32gFKKuQFQHgk7nS/nXKYy7JkYEkHE8fWrn9nPv/3+y2nc7/ZTrcvhbvrw7f18XYb9KFO59m6d0RxRM2UEiOjCKdBKycDQVUHQw10bcwnC1lsAEGe1GPIown29dqupNYQtM87A0tS7d+bEzI/nx0jETCLYA5+We/LGhOEQtacgRb1U12qjFHc93Byjteu8sGTzrr2P07QufRwG1/X5/OF4c9C2tu6X6wDgn779+MP7x3Cpqy7rPO33YUFCt8fD9doKJwIaS25VW12q5sPd3fnpvJ5qbT0PbN4TxbBLNQhqDBPnlC8PJ8j5eNzP59W7TQNdni9J5Hp9ilVzloHL4XD4rc9+1tb5hw/v3z9d1+7EOfmUsgOuKVMEj1NCPK3r8vC0yDB64vfnCwM09wFFBD++ef34cJrXy8/efHxe6+541OuMzDTkYb9Ts+Vyud1Pn78+vj8tS+vEcTjsbJlz5jIN8AUAQChQpNvd8SMwzJRIgNzVrC1MDtYZcJh2MKTKFeZvgF1tzYQDF2JsamuftaNwCk9LjaXpU50BcC/7V2PpWZ/wclnm8zyfokpoYewr4xVytTDtQTTQVZ4nXKVRJWm1plRy2S1awWntTWkO9QSpL7BoyZimsew4yggJkNGF3IibnofSMtTKeDZrXoecxsQjTzf73VBGEXaCtnqoQVNpBkp5fH1W6zUnKLZ2jHZ11WhCoF3X2p1Q01CIU5EIOUxFPufHb67X75qgJodhSPPVE0IBj3fPGtNsvet6YqMc13U+3h5yORpFJ8IpO0a3lWWCiNq1df3hw+MSvkqWw11K43A4aFuuywWZjhnRdG16BXCRgD5JTLm4OadMyABuAAAZgpbVHJelWaO0tgtCsrVpODNfVxNu1FcPdJRaKy4trQRr0lYxNMgZqWAKZXFhDSZ3d4RkJRunq1Y3x976fMXWY7GkAODoph0CuS8qQHQ6uyiWcxp2FEySVKFDinSYo6BkQLDepXVU86r9aqAIID0EOUqZEJM3hApbJBgVxNkNwihClm7g3JyFyE2TQ6ZtrN2BAMDCbEOHYDk4kkYHSRRY1yv2h82khm6kXdtLGBlYbEsSi0TrDgGBHgg8IHrhgUA90N2IvVtn9+BkAZgGUA9OwAXB0jhAzhrk1U0bZSKWbsZogBumYrVgxAyUeusSbMEQQcxM6NYl0TiNaZeoMDkYUGtoZ53q8/6jedo30zUMHBACA2mj6vF2AfIgJKAITgGWAt6OSq1d1vVUs/uIsROdYV1BcLUKGMEYFqTKFonJADlxXdtuN81WY4Y+zz7VSJ5y9tXINGHy7qEWw1jydAL//PPfrl/8+rJ2W+fMHuuJe12Xs3DqIYqpG6D2l6sjOZP0TTywnpPU1sBtA/0hswN7ABAjoruShkGHTH/9y7/+o7fjz//gt/7NL796+GKBTje74tAfHh5I6/trf3+dFwjxJhEjSyFJgClnc80lCVF49NaEWYgNAFAcXjR9QUhsQ0YmCACRhMDMDoToKoDo3RG7NwD1IISRMJmLeg2f1c8cbEaYYslrjPRubT90t8wYPSEJEgQZuGI4BocRQsBGlOZwIApAtDANB4BdCFhv5B037oUzMgGGgRM3h4T+kdnvpr2bBaaA2Kp3EBPivwvvgCOiAKQ/+Ojun3/xoVtikSTAhMHgagy4H/llFcCPtovwAENEjxcwIgC4G4FsJjbEjScd4b41TG2Woi1EthVTRbzwHiNsA9ls74+A7vZSaBUYCLRd7AHNFbeycwcgVO+yEeUjAIIJ3TZdYvMubWcyBCCPFxjrVtz2glpidkCSXAoK6e/99OPn61VVt3NZeDChITwY/PJp+XjaG0tVb9WzQ1UdxxRLzVRqbyJYL0sZb5/bSgmW3gBsdRhS4qoWSMRIgNp3ANSrIEGYujI5ejenJeLorh5EWCAIQZhr62YuicY0LF0TChOkkgicoi7ag3ht6ggDIAXMtTOHIbbed2VkxLm1knJbqwOWhCmVS2sd2Vs3axaJQZvCON34ON5fH16lXdNVKFG4IPfoCalrdNgEISTeTEBg5l17YrFN7wsP70SbqWeDn8cWWdzkIXMH38rsgpjNFcEZIQCY2cyYZasKZubNPgQB5gqbsAj48qnhRttyQNpcS9tjw3/EWZvbi6oZjkQemx6EZh4I/CMKHYCRKFSBtvBouL8EJwm5a6MNlgS40eJ+E0Z+kYrq9cktwtW1hXmAg0i3bmAVESDWdSEPYGu9PZ4u786tuZJHVIrVO/jx8OowDDf741AKOyfkrTglLJIMEjjQPu2hWif8xJprHNAjbKbp7g//7n9yXMfj69emV+2dmacyVqxpSG2tYOFNCQMJ8zAJE0vVcO1Y8m6Eg2rvtff6ELaKlGEYYcyAXvtiapKyu75/+AEsLVcvpbopuJahZJEl3Luenr5b6jLkgYECZFPVwuH5/NB7LWUSlgimJIzcqy+tfvTTz3dvX9vqIrund997myOMEw95aJ3rXCVNbvXp/h0RJh6PN6+u59pCH++/ilJUqV0fBUZJu/O8hAIPpN57q4AS0cowNXAkJuBCAxhfLnNmOs+9unIezvOsLInwI6FpIl5POfD68MO//PYXi2qSflpXTzQeSm29hulsVbsKmuG8OjRaV3Ml4N7Ua9Uskgg1cBzYe08oGjyfSUDMuaML0QsOmDAseoO6hBqqA3EgoTluO7W6GwKoDSPX3sxBciKW66wQosbuWrJvLFES0gggJMTWNGdZqnLiLBjWW9Vq3rv6iwQbjMSAWWhgAW3WrywAGwYloLY6DNJW1d48glimYbpe5ix8HNNe0vP2DA4k5nDz3ullEUBBmq+Ph9/76M/+7b8xTW9uM2eVnLy1NRBkV27E0OfCb37vs8dfPw7TMa7n128O3S7zfJkGvpyv1+vp1WefzIqwVoQxy6hql/YcsdJAQPTpq+M//Sf/7H/yH/+DX6bDbO+iwX6UPKab/f6rX78nluuiyscvv+u73Vu/dI5YZnUgnIpCw8g3h1fLCmncrzHPAMOQUZSFuSsAqTtidO/M2T0wgpiDQcOAKMIGGT36lp7WqCOKhQOxCa8BE5F5lySrqm+mZ4DT5drBX93SenpGw6pQFcDwZjycl2UjqB2OKQ/5X/3plwMisIDQtNtBrEi93I3SLQ3RF6WgRdFJGNxCj7s92ko0X6/nw75clg4YLpaIiN3dLLD26Oa9qRO0uoyvb+viHlASa8JaO1x0LymQx+Px4fF+l4dVu6Nlkfm0llKCIdIoxZq1tdfEOQGvzXbHm+syOwBQ3h1fWX+8f3r87KPXbm1ZO/Q6+J5I1mW9K8J+SRPMPYxWDCcBBKjdbnefXuezQ1htOQ19NQ/6vZ//7q9/+HIYh2ur026/I0ny/+Pqz3ps29IzPexrRjPnXE1E7H6fJjPJJJlZxapyWbIBCxZAG4bkBgIEoaQb2T/LP8B/wIAhyIAEwxAkdyWqKbJoFplJZp7Mc/Lss5vo1lqzG2N8jS9mbLLkuD4XO07MteYY3/e+zxM/fPgU+3jsDm5NVCogIQXux8tD5AyJjGp/GCKBLg0xPX92GOdzl/fDXuaqIPWw3w1djtHLchFdr1/fPHwsVuuLN8Orr69P5/G7Hx7W5ofr40PRdHXz/MWXy/rDnuzuNB77bFLyLnF3MDnvdum7d8u6HIYhc/SPH26fXgnMAMCEzVRK3TwObgBgm6TVNs/PFkADcjDcthBmSNsel7bOtm/4BTAwQ3oK6G5ZdY5xNX5/d/6D19fuVQ14GzypSbWytKsXPZn1Kc2z5pyIXMUix6WVvtsZWkQWl9Y0xgD+VNpGgsfT8ue//Os/+bf+CRHXVpkJxJ86Zbq0Wtxqn3e1KUhN/VFFzVoIEaRp08ip1NURu90BEKd1TLHv0lDkUa1x5q9/9tOQwne/+/bm+kgoP/z2t/Ni//IXv/jf/Lv/W9rr3f1tvwyvj1cUAFVLFQ60til7SDkC6LrOw/WRO2hTZYpEVt0YqZWC/OSbMLTD/jivZd8NzMEdVd1czsvly5dfATNk5sABYS2KpECVYlzXi4uHGJmYOEbkMo/hOqWwyDpbaX2fFR2RnGPqhhR4qWPVNXV75y5n5ZiWea6yRu4CQ5UppHCdr5yjre3Vs2cMvpYLh25e1hAjmqr4eDpRpJuXx3VZDIxien6Vzrd+/3i5tPLmeH19HEJOx6/e1LXt98Prfa+lUcCvf/LVXGs1f2iXPvVffPHq/f3l/f3Hh8uFHLvI5ma63p/foemr62uktCxraxaYmbmubamAEWsRdehTeJzKtKwhpd3uJoQ+5H3QVpbpH/zoy/fvb6nnqq2NS8ccAM8PJ6vlejc0DO/vPj3L+dnN4VLq04noMmeKVuvAdGSkxMXQQwhpwCLkQNn64SBDkjgDVtCZsKra7KaWHXJxBo4xBVJpujowcTDuPEcJxoj9sbtIPZ+oaI6c98DTrcLk2sgWM6cADCTuzqYOSP0QESJcGCyFpEylQETtEy7TWhU5cUgt5JwSsUPuumdXryh2WG4jPT7eP56K79L+vEQ3b2BMoRGMZem66uoOJiZVTNQJo4Tegp5r7VmIHJmx72MMoTQgrJUNvamh2q4fBg4oy7CLP/4nX52n73yxskqMFBKLCo4lZPHr9OnjdGQOqBidAZVbNGmtza1d1rrUpirNlwg8V1rByQX74TBcpdw38VrqPJ7H8wnRvU8HBvcmZAFDxCmgAVVgn+sKvg/cQ0MKWbQXyAyBnchawCwQxSSACGVBvgh0yqC1zhdpJpcJ64qLIqR1KaFLMWU1IAk2aQzmrSQViL0GwkiMpIhtlVZkPj1EgQ57qWtMcdPxolMQh3WuDKhF1tEcG7K4YUyQrzDeKPauxcu5nB95LaAMEE1BQ/DYd102g2TEHrlAjK5NrCm6g2FRDZCIUEw5MCAzKIpHqOACpojmJmtTI0YMT8QQRsccKKga4M5ciAzM2zqX0j+diLqsYg4KiqJa1wsouU0YBoqMaGDgChgcQDhAIGJgQ1vL0oqCRECIQ6C+h5QYPCZDbUgO7oxM0pwwMSuDY6gGGqiYi6sWSxoSKYiFDvrjsb/e9YcDAJqKqcF5lPK7Z9dLn0+AlcgJwmak3uwR5IjgiORWHRXcKZAzI3kEeN7hMUk4aa3ZVqOl5xIBAc2IMXoy1axKgOAh0KG0RwQXQMCQ8Ga6PNqbPOm9KWTKrAxT6OHr5KiVjBbC6Xfv/kWsFvo3xSPTuWmNSVk0pSDM7JRSBCiB49aKVG+2dbnQZ1m32c12LdwyCM0NndAlMiBBU52aYNj/V//8z1+8uhrePv/+uwertFruzL64TvPjejfKoxgGdjVVQPRAKK2CQkAObJFZwCN5ZASwEHJzIAru2syUICbsE8S0EYLI0GtrlkhhESxEGBwRRLSAVnZSdfaqMoreG1VjpthLUtvXhZY746YJTQIRGbujIzaCFQCBk2twB3oCHAIncyFiQShMO8O9A1FWVHECAwJCZEU3JuWgZi9L/X1PNzGM6M4BuD3dn8ERN7aMAjylcDDw66v+3/zy8FffzxyGxJhZ3cyJ6ipW2uecNWz4oc95EHTb5K5OCEzsvgnK3fDvDjlAiBtCCJ8KPgjgBORo2872SW/vqqYUIuAW+0DYCMfmam1jWDHwU9+fGdAdCJ7kVO6mjE8jLfItrELu8oS6QUBgdwSv4rIFXKQqCFzGsjjJOL398ot1XGqpQLyZrLVaJW/IJ1Vy/vHV1Xr3u2N/9WldDWGSGoiW1maCoQsRE2NQtqIlhVirAWVAYDd1XVULxMwcTLsQF/FAZOAK3jOO4jFkYFQvmbCUSpmrWY8wBF4QAuNZfBRzwNnImjpza8ZumYnIIwIzilNmCIQa4qRO5B5IASOnSIi+FFkrY8ccwTXgWKupogXVtpql4xsrGLrgplUWQDSXHHpViU/DHUcHabLZ7t1h+xu4QSAGIHfb/sjmasBuSoBGn9V4RNvzsA3+ENE3DhAj/b2JDD8v+h15Q4Ns1TY2gycCuvvWYfxcadz8d4rIhOig6GSmDkoERIxOasocdAOyA23dEWtCRO6GSBtEHRwJ2fQJiO5mm8PsCdH1r4+KtM6qJiIcSEGZCQgixUhZEaUWBEa24BgYS7VV8uOkOhVroTM6Hoevn33x4nDscxcir6W5qrmTiYqYrgC+tmKcY07XcY96HWZxFsSbIoc/ePtv+Ol9a4vVCkilrjEQAdW55NiF1Dsyopa6MITAbF4TpRhDW6sjmBs5qkLXHzkmM6+1hECcB2DTJrXW3XBtHlU1RFpVQuRSa2mlLCsDxT4+3++sigr2uyMFYPbpMo1zSd0up67vB+BEgcGdgw6hC8xQWnJ8+PC91nVdLiFmJERQ9HY4ppzRlak6MYeIcUhxxfv7x11H6eY4DOn2d+syyf18z6lDpgqCYSufg5gY+WU8MwREXurK0lLKqjavtbLB2ubzkmR6lvl5bG+HPezxcV1++O15XOsc4sBeOQrhVLQqaWQJcXWttQVAcF0XH2fsou16UrEOQhEXcGIUkWlpMRASE6qrEhFF2g9pmeYm5gjawjyLCQJCDGSgKg4E/BSXRAdAZykYQgbk6WKruRupSk6hzzEFWFQ3JyUi5hSANHIY+oNxVgCG4GgANnR8ydxCy4H2SCjSioERRxdp6GZGIVsIkomPeU+Egkj7q8tpOY/WESYCgjqN83G+5/01SjLwvwPcMT991f6jf/j67u6Hn73Y/cvfZRmS4MJpGO/HTlqdlt1+eL3rP366vci5LpeuP/5wfny+z5FhXBQI7y6P4LJMD7txeB72k5ech/N4iUN4vFyWtbjBza5/Ew//7N/+n2WxP/761bfvbzHs7r/5tV6qtEKosoq5IsDprvz4sKPO1yY//urm+w8P63jp0F/cHGWZg5rOY6zrH//R8/P3j1DdxBGAAkEzMI8cm5GaAzoRi4qqxj5slSBzhW0qbZt5wRABCRQFNho5kVRRRDcLYZe5m8bHfXeISD+6vrq7/K4b8lrldnoAU1Ovq8Q+PtyOh5RVpKgRwsOnx/11LNXH6TEGJkZhbbMx0VWXvM19DPXy0Fo5hJj6fizlMtfccc9c69rWcH3YtQqHYf8wjS6SmIYclqnOY3FxJwwxqKuLuQElGteTtLZ6dBUwF0BEL02E8zxeZBqPadfFbnc8PpwmMkzou/3hcpoPN89uP3ySueyGnQADEhDcXF1/eH+Xu8HRxPRhFQwZcrw/n80tEddatVSjo1NwrTLX1PPQEYUs1A7Xh/vTaXccKHEq3DvAeYJFPBsSMsaQQhfiIksMIeegaIf9rut3tx8+DP2z0O37Q0eBT8vDebl9cfNlqwjmzFDK+Xx/q+A8pl1IB6yXx/N0Pt/fP+4OV8fnHSPvhu7x/vL93e/I58dpYYjXVwfXsJTiRmrQpL398mXaZcpMCDk9uZ/MDIlqXTFEIAQmDEwApmpuqhs5DwkZcGNfbskhQNxY10aEbkDEqrZ5GJ7sI7Bx/oAji9vjND2WRfwYIucYAUgXBcIQyRxUzAlTny/ztOHXzSVyyCEbgGzHXkRmdiAKgGBECC5V5pdXz5dxEpHAydURycyaSIzJ1Z02jhe3VgotTGyuy3jph720RjHE0Jf1XJlCSIf+6vb20/Hq+eHq2Txfap3d7fDsht+9++4333eZP3z48Ad/9I/CX/7mV3/969e/9/pHr19++PW7EfiQ924KCOoSYnAkVXW1Oq39sAtIau7WgKBLudWW8g5dtAk4oYNRFDGkVGuRuhKomweIz2+ef/vuN0AR1JsDqasKIDkDUUDCFHOta60z0fDFyx/fnr7H6EOKc3MPHGOYL2Puh7nIep6M2v4Ql7nN5zOZ530HkXLeqRmHJO616n5/6Pvdu8dvCTIgAjIH/L1XP/n+47vhuAPDcRyfXx3v7u9/9PbVOC7zvNRRE/Ifvvn6dhr/6c9+jrSeT5NJ1Y6l1to8xnR8dnP76ePtp4fQd6HL53G5/Oqby7wyEAXW5ojdIXVgkLo8LdNlKjkjAJwfxpyQCYecFilMyVHBcBh2HOpa27fvPiJSiPzh8dR1nEP49DCie9cgpHDY72LK29dlyt3QDwwxXUFkun52M//wfvsUrOvkHFpT85rMMyIwNbKEFEIy89R1nGLxi9UxMSDV1i4G1rxI66sf1AGAIvQ5eJPpOsauxtlatIqGIUUCamBIiApqPpfagJBQUdW8TYZSlkqPM/Yp7g9ZAaNDB5CctaIDnldkQNTitUIoObWek1PBPAy74+Hll1cvfjYw8zKUS9lHi48XGAWR11W9uYupGQfQtgKzA3CkvOviJYjUabwwo7MJwPObm74fWnW2wrlMunZMxp7JBsBkNYkdhlildsd0fNlN341ApGIiWlrDAHJZ8iqhS4saLguzxTAoUbbRVoXTGR8nbBaZAey4i/urHtByn6lP6qLLrEudLpfz5f50us25H56/hF2Xu6Rybu0kPna5cxUOTJzGeRGvgIE7iodjoJAMl1ZFmjH7tiVyNHc3q9r0fDcg+7gwpEg7N3VfmxrlLBS29aq2WldxVxDw2oYrgl0ysqaqTWNMpalWQ3Fh7/I+5VTaUuoCwMu0sOAKxmxuCgbUJbVCkZuf80EpDvP5weZTHac+dCHuqY9q1QAbJaOcEDoO6qDrbNCxeWKH0hjRPBPtnM1hNjUKiAogIrWCV8aQU6ji0YNg3oZVStinzik1AceAyABkoATWlnU+XZ7eBW5m0srallF1bbZ03LkbYSVIgYN4TbkLHRsGRxRvUlZzkzK3yxgBmBxydk5aW6ul20KoJhQCpYC5426gmLqAnpMQGmEzkXE5fXgotxMZItB+11/f9PHYrdbUFEPQ87re3T+Paz+MHFXaBvIQN0Ik2lYXIKZoxMzMbpGBNg4ORlN2TiHoYa+36zq3FDUfae86kZFBQRDz1rbSCK4GTtmpAhmEylJxH78WZvHJxKOuAyL58Xr4ch5P83pudNoPDbzu6Uo0YNB939M6xuApArSyg06npWU8xoiiCOzEJjUQq3szQwcOCAjqQIjIrEaIjIZuYoi1tVH9VIVDTEUffjOGH+oekBhTWSHhQ+Xz7TQJImEwi44dcUSI4G6WmJnI1VQEXIGJiWtryIaAppISKwISmjdwZQgO4OiMjAjI0KOJ26YMjy6kxXTFhlAubtW8MVuzU0gR42U4YInyUNaxIAMHV/YYiERMn4ApxMgRIngjpadSFTmgmYGHgAidyg5AnWo0AY9A7AgOarCZ1wfH5xO9jteGmfropI5Ajk/ld30a4gAzuruDYXRdf/5Ff/dYLp4YjNkVnIBi2kJc2w3dN2SMuxGTqmzVTETccNNPuQ5AM0NEJm7attu8aCNEsWb+1DHabu2IbC6f5WVuJvh5miCqgKSuW6GJkdx1YxUTBvCnCMmGsUbeCMq4yaq2xDcAqCmiI4KqfPaqsSMQcy0zEahIEc4h17Lu952bR+b5UgKAEXCrewvPuyTSbs93X18N7jMgAwSRikzAqGBgHtGtFLM2RIioAtBMvDm7BzNFbupV9EDktUakJmqRRZV5I9DH6oaE0SREXjcUvfkux3UtxMZF9kOeVU10I5UhSWZyUQDAjYGA2oC8STOLMUzqFXCAGKAFlytiEQUgBtsxmQMqZKIQQnUlA+fruZ13CDlgVXbAgEGkIjoCOIKZI1MzVDGmp9y9uT6hFXBrcujfZcic2BAZSbQFIkBwB0QQq0+8cSbQ7Tys5vpUpXkynz0F8RHcTLdsPuDWEdsO0v5ERjcDgu35UTcHQ+SN9+RPcSMFsC2yhgjuSFtj8WlQSgD+9ANPobgNDebmCL5Fk/CzB/BpVFSrIG5fssEd97tnSAdOfYjDKmWZPnn9JG2c56rr0uZxOS9elGNAGBLxl89evsjxKiBpBeDoYi5lXYkMGdZ1neocu1QaOYXcdRAsD0MtI1H6/S9/apdb8gbm++N1WZW5CyGymoAThFY05kgIu27fqriaKZk3wG2c5v2wk1YjpRDTZtrJIQMaBmRGJdv1RzCSOh/2vYMjM6VkZSHmbn8EZOQgTt0hxNRL03F+DAFSt79Ke6SErs3aUsr11RVlTAla8+nSAtm6rtYkdx0G9wattbnMBCxmaGmbDOauq60u44QOZV50tXfvPqYeM+Tz7Ty3lq+S1wbuTB7Am4CJ5V1+tr9ptfbdgACBoLa1MjJ7Qb19uO/Qv7yKP32WZbo9f/wt8/rs+gDP+w/T8dSC7FihCmmVOvR5rOd5lRQjZY/BQ3Ax9QoKyERiIAZTc4/gql0K++seQN3UlRyAwQVh8mJkTFssjjySA6oKmm2mia2n4maERIgmLtUhBVkVkRzRzJlDVW1FnFgBMfLSSqAQEANYa3OiKwOqYA/TpR8SmLP4btfNtUFpxFENU85OKIQC1MdMHCrp5I22Bi1AbXXX7wF02Oe3L/L+amj6XW0iy1/uDi8YbpTQ4Km7qfq0PUDW8/38n/3n//zHb/cY8N2nuxevh/3AJN44ouO6roc+PdvRT75888233//kH7262advv//w/nz71fOX5jUFCuEqE376+M1Xr74qc7/Mc+RA5gyyO3Zu8Nt3H7/4w+Nffnr/F/+vb/7oxWH/Ztf9+Ce/+NtvP61A+65VGKcaAl8fd+P9hVE046lUV/QGjfjudO6jU+Ih9yPBX//q7usvr1InbRV5qm4jIW2f+I6jqjNh3WiRRMaOTFiJIIBARwMDi4uJAQAncgQxA3cCSsAIzcDXVp4Nu8f709Ad9hFaKdBx7sPj+fz29ZuPl4+HYW+Lzeta142oTSJweWzHZ8e5Lv0+l6WO58fDoR+NarHzZX5+DNqgaX52c/P9bz+SVgR0ophS6jvVEkNsqmor0QGRWhNmWsCZSJT7IT2Mj6QgrrVpH5IR9Gnv/aEuLWc6r483+8QxlKV585hiIc799XladUIxGs+XSKHf9+flktd96pJNLWC8TOu+R3AcJ0Xq3JBTKLUqpqvd1VQ151SXoubWrOuvc9wpllM5lTJ3gSQiM13Wy2Verq6Pc9HltFDVSymmjTgQA7jtjrulLmzkBjH2bbO4Qv744RTDIEgg7eG+hkBu9HL/tloU1at9N54fHCuHkPrBK704vD6dv4Mlvb89xdzPtyU/i72MbWKRUpb19as3D1NhhdOl2jrvrvrbu4sifvnTt+005cHrw2kVy+FzGdmfEKCOgEgqYk3BlAkBYmB0B3HbyswbQHF7EQE4IW8SWHJE981auA2JnnLRuGk+3FXXRcokpmaormqmBEBOa9UGlHMOrIbgYGDeBXaI7sAORYxyVtXEydUI0EWNnRGReF7bP/6H/8C8MRO6MgXCVGoFB/QU0YC1teqKKeaHh/dv3v5eaU+0Sg7hcrk/7J4P/fN5fSDCoT9eiV8u97njfe6ko48/3CVmN1rVvvv2bpovH/7f/+VPv/j6zRev/uKvfvlv/MPfv3l+9a/++s/+Sfc/CTlJaLVVxBRCV9YzOhsS6Nbpc6YoUpFATRVLjhw1uQZkXpeRfN8RtrrGGIy5iioFpAziTs5MtZbAiTDs82GqSwRW0Gbi7oLx9ds//O23vzReuYEmEFRdJTT1ZhXrbj886pxTxylhaX1MjgiuKVJASDk/Pq7M4frmOkF6vL1/fnN9mc5o2KeOTNfpnAy4akihuxp2iW++fAUGOfDLL18b+Kf39zFiR/jnf/1XzHDsdm1aQxemy+RIY3P/8CDTygQslaNGF1aMTMghhS4EYLRapVWhrIE4plBFxO3w4tnD3e1SlqWWlHJOV2W5C2Af3793YjCbVDiHFHCeW8aIq7Sm17uOmSiwmH1/d3vd55zivNR5KuhlqfXR/dP51314Whucp7HD0BR8iKIlh9ClEAyWWkOG6KhSoRWsIFoLVAoW0PuYqkIxdIwCNRBWV21rc3FrW3o9lOISz5M8VCkOs+NUlQFJBbvQAi5jG7hb3V3o6DF4dgu10ZroJg0xAa8tehRn5O40r1a04x7FOThRD+7g0UXYlXUyi0Ps+t3NtGrbSy1zxvhQi4BB6BSSQyRzNA2uhz4Jedv3p0WXas4pdeG6312lfZ92RrqD1O2NcS7rTJ0jtmAaSchWqyHGWIK/+umr7++XenZ34hhdhckZV59H4DCWktVIWsMpWYtXc5trnEY6z1fdDl1E5aoLCQGAS1H1lUOFFdfzpc3T6eNH8Xro9o4J8yAhoit5jbBzhaWt5NDa6sCEomBV52K7klOUCtSUfHGLiCnGYAzb15gKGFQDUmpt8tYApN8PWiXt9xVgWRQQ1jLN1cgQFNipPJyoBOqjcMfWbL1oqaSOCk1blzPnLjGUWqQULUWrFzJHR4PACKoQ3KolSlbu1e+8LL7WaCHh8bh/tiy/CxxcASmgEbkDWQRjUBRllF0Kg7tit3BeWyq+QOxCAjVrrYACb6MenxUMDajFTLu2MYEpOrAhhRzMmzkbc5mm6Bhi1M+KA6llM/yKioiKoSOwNxBzN/PMscOQqeuYeW1qIE5lGU/WVvTqxtZQULycjQKZmoM4phjMnTloYA/Rux5TwBQJmUE7Zs4Dmny6jEEw7/t8c0iHgzJz9GjpfLucvv1Aj++e/3jIQRnINWBwACMiMUNA5ti0qjUH4sDWjIGigrlQF8y4NKHolDXcVPHL/CiPFxnMCII0RYyMnRsR4oLS+jTs9lHudR4DhwMPWnOfrn6z/rUlBAoF3lv9dtEPaXim1ZmwH+I64ZFwXj6yjmQzYayrltVjJSh2k3YEYW2tAxI1M2cIKcSlzDkEdVDbsLOfbUgOycnNnGAt1TCsDcZZPSoaMiGLdO47pd8/PntXTqdF3IKZEqA0i0wxoZg5QiRGRCYwAFMkDOphFYicBNyQiaOjI7GRCaIBqxrShuNBhKzqsdundNP8ZBWgKpUV1wkboq1qykgCAF2nySktHrWZVXFEcm/uIl4dwYMzU3YIZuQtmjBEgugAAmZaFVQxFXNiuAnUu1SEhqxuZBs3iBTIkUj0UPwNfbHjHxV1YiKd/GkoxACMuC2tzN0c0NUMCYhfXoWvXoU/f+8RmR2WUlMMXZeQ/p7eiLTRhUHVnlCNZluPAcEAnIg+T7ZUXMFRRREBkYlCa/UzvGZrhG01NNi4xQ5PswC3TbTjiM5EZgAGsimxCBkjwHZ2AAdnCqYCvoWctm4oAJKrIvMTEVnNdeNLIjoQYK0t5Sy5ceIP91NEOM/Tq1fXjJRSdj23ps2sj3zoKXLbdZwoHpnJS2mS+z5QAAcFJ06A2kCBsTRIgdDJEaqbOYhBMfUUKabmRhzYralxjBM6xUhPndi0GK7miTwhLqYx5MCwtWrNOKfUMR1Al1qB41RbQwRkd2kuQAHNCji474kyGLbaER26fp4bkg05gVZmjgoijeNQpPUpAOJcC2PuY+ivX3wYx+t+aOODuSJGoqhoZpI4SVlSjLOqAjsSWDNtMfbuT8hJB4+BP8MVAP3p1IvuW4zI1BGcKT4NlrbHEJkAFZy2AY3bU5oMXHQDg20VRXB3JgBi31JFbqb69NjYU/5oC/hvY0QAACc1RXxS5m3UczNn5g16Zb41eRActtmTP53SQVWf0Ea2zT/hfzAq6nc74hQDciA0Qt8hX+fhmihHPsPyQEMex1mrYkyQWbDmFNypKDw/7l/u9s933VPpwJy0gUskZYRm3vfcCFTHtRZVXeYezGNOtTkrWCOO7MghbRBK5sBNGiKl3MWYkMjMVGpplSi4e6ABQJlDH/dTeailhBgNvJYKIeR+ICCR1cwRY+pT8CZ13R8Pjm2aLghYW80xMnPKO6ZIgUyaiNe2ouqQEwVYp3Vda+DETHnX7wOVddVFcze42bBLzWfMmCgsywoOyzLWWnfDIfV9WVcyr2tBxs1rB6L9rv/iR29uf6DbU3UaRM18cTO3LubU5axqweMOk9W11dOiE4QwTpfT6T3E8jjdYk6n6awA2sBDGiv/5n58zvXlocuI5e68WMvD7nXYndw/LKoIOcTEWHQKqgEQUAMxBEUX5CzSzpcaQQ+74WEtjXzotmYQAtCy1gb4/LpDlalKKSANUhet1vM4hZSr6jYoV9tsW2Tum4TFHTa0CZibOqA9sdscAD3nbKKuioRdTAqgSGCaKbXailbKKXUx9wMYDMeh9sO4TNKEGCKjm/YxOSFyiBxWwMZJZd0Rz2vZ73qEPF6mxP7lm/3Pf/aTD/ff9cP1h/ffL/K7o38/FW45NzemKKJMTxG7Z29f/cWff3df/PkX/fp4HmI+PZ7mxa53x9hBBXCnJuwj/emff+/RPv7N754d/OWLq5urw2VUrZ4GzmEQS8+uv1jVzstUq3AlIgyE7K6tOMsP9/X69as3L1//e//ev3n95U9++O79X33368P1/ptv7rDtRN3cIoQXr6/uLz98f757kxw9dLlrTZjizU3//bvbVjTm7uThviFEpEAIbqI5pbWUJg2NYkzNpCkYOoGLNEZUKYSEhGJGYMkN1ZlJmoTkCVldIofFBAFCjK1qJnhcl6tXw2xO4yU7BE4te97383LJgaN57vrbxwcTzX1W5Waacn4Yl6KSuONsVzQcc+pD9+njRbU5EpA18B8eZ+Hgiwhgj2RN1lpDDtakrh4xnU4TMKl5yN20Lsc+E0JZ1xSCSO1CjIm7vH/Q802X13GZ11M6HIauswoY0Q3G0+nt6y/C8EKByzS7aj7gbt9vU/dht1MRYne3aZwhwPO3b3/3/ftlPsWcSi1kQSq+eHGTeA/yQFp0OQuELvW74c39w/2z6wHpcKotpm4mffPsej6foKmZxsBlXp89f/7t33yjCIebXdfnWnQTzXZd38axiqX9/vjsWb1c6lxffPkSOc2zOsamDkqcOaJraSAAHFTm49VBLS7zfJrvGvsxp+PVMLUVQ//87dfz+2/qaWla932opwsQzut8WiVK3V0du9yn3M3nis29yLMX1++/fx9yekqY6tP5VFRFBNkNHBHsSW2GsAk6ntLOvrWpN5OsgAEyAAFaU9leOVvqVlW2BKyqAlLs8PqYxLpilSOpKyARIzIttUQy9oIQtGnXd9aaAKIpBZCmxWuX9sxRWt08ok00Ajmiody8Ob54NqxVRTQRGQFvuGwMZqrSkFFb45AQ7frm8P2HX3391e/JeTUTcNgNQ62jYUhpWMt5qetw3DnL6eFDjLvU5/2+b+v8+quXd/PlL/7il8t68jp/uLn9Z3/0s9cv3n7z3Q9v375+8fLtw6ePz754xTlGiHWuaotKzakPKTbRuNvXKm4gRQJT5qxSuTuYLtVPue+1Obk4VHAVV1UptX315dfTNBIHZGrSQkobJU08eHNDra0GMnMf8tX3H75d6+l4TKZw//Ag5K14H7s3Xz4fL+vl8jAMcbjqxmnWpjHHfr+7jDObL1qaaZeCG4CbYUspttL6bmimkdPx0Lemh8NLIrg8nN5+/fru7jHkYTzPj5dFOKvWdH31MM1ibs3AeQUwpLoIpXA47Nb7+5y7FQMCnaf2en81sN2fHpyNAcb7hyF3+Tiga+gCp9haPV9GQzfA+8vjuqw5p7zbLdP6OH7oA5rLs5vru9OYd7kWctJWW4ocEgUOXMkAHhfdQXB39Lgobcu4++Xy5vnNuNbqGCiGTRQNQIHaKosaInZ9FmlMVojM0iI2mzapLs0COUSxPOCeOLJg9pZCH5ocIjDDKrW4SyMJbrbm2CUV8KZrydiHZr1R3w05Z/DVMxphK8oL6mqO3HXEANG8I8o5RTBs1qUuWlpF0MxBKTITGnu375wDkje1pnr76Ttt9WZ/XQIsj9N5XB+mskqcVm3KHjDl3WrhsfhgktDBtQtpYD3s+mVRq1ScU8pDv9/HHhrVVawiegQMGFPDGkMHqmWdI/PpUnhHemTqOezT+LCYAQVnRBUNReFxmmuqbpLCdRzWRYnEV1GpUiBAP/Qes4mUyFovi0m3UnuwGhPH1eXxUWQttcQArN6lZBwAu32fiMC9TfWsniYpbq3vE7npak1taSIHBvJgOIQIgEiRVUkloM+1TG3qaUAaAgMBtDKjisUu3Bwsd0SOuLapNLMK6E1lqtCcYtaRu12PHdQyzuNdXZbWageh2yfuonJYhXPqyrqq2LrUiwtHziGDcy1riIkdjUVwqa21skqx7vACuzS30RzAse87yTlyZpMq65DJCSLzEMNxiP0xXSaFgg5oEhFAcbuugIGxmdXFdK42EaaAh7acPWDCgBZMQNwxEnJACkAh5c4ATKvWp/tBqc2rUiBMPXEicJFCaK2uMTgCcdzF/kaQGljLOi0XWR7n8d7rnI2CJgIKFhEdQoiMTTXkfm0C0qo5xkRSAzhKSogUgNBImq+jLSey1TxTpv5qKCbaTHOwsY0/fO/jr3/8Nvb9Sb2hABM7iLsjeGQCq5v/h4Gqg0NA8k15k9xLUydgzmKm4KHDcCVTbbomUCZnjEc0kFJ6S9WLHFh6uJze9fKYFd26BDrMl8dfvUtv9FxWibp2Ywprg29VRm1L9DqtoHWNgtBRXSYoY+pfhuGqyV3eRTNk8s295MwhkVRBx7lWRwK3phZSBgUmAlV3R1UAU1NRN/fmUhU8hdXrgh4D1+ZztQezd/NHQIWQwB3B2I0JCUxEkKk1oYCbkJsQiJw5OgVxM1WOycGbGhE1tcpGCUMgZGcOhmgiREAp5J1e8tzWkixhQ7ZOrYAXgMbbUImtcKhRJeJiMCmsxsTMJsS4RVS2vxoBi2F1FOPIKXgUU2VCVXOQEFAt2XwdBZwXwAZA7JG9NQFHA1agXuyl3lx3fyTxpSZR+AhkDATIZrR5NojZvACIA2+YcIQc0L867v/779aGJAl2N/n59ZBjGPo9fP4x2e73ZGZEtPXuAYBgS2oYAmwr502yZuBhG3iYibbPqWtCpA1M42BE6G5PgSR3IjYXBdv+LmabpJiaV2QG2GxwW7IECZ6qTKayCbY2u5kDMkd3dTcE2jRYRIQIKgURXbewkqqsIhq6tIquVdiIsJo/bZqL6drwSH4d6FL1w2yv9unYwyQ1JKoi7rxI6wnQSY0WhB2jqBbxEGICWMEyp0VbzsO4tpcB2CS4ulJIqdTa5xhUGSORb946tHoIYLIaaExhGPL3jW8Xuarzz3p8NTAl+zjDh2ZmwIHRfZtfdMQIrK0euhgBprWFViKRtbLr2KREiNkZ0YpWR1cxQOhSdPDL5R6HTwqt1pY5QER1AINaBdxVhNBdhR2aWpVKZE00mgGgqsawNchgGxQRkJp9ZgD5Zw8du7u7ISARu5uqEaG4PrXCEB3R1DdJnrttokff/GuI7uZmSFu/DOnvGUYICFuW7Skc9FQLwA1cBYjbnGhrv5n53yGQzAwwAOHG8tpUfYCwefSabuPpzUL4r42Kjlcv1KnrE4WozREIPZKvIBeTEWBsbSZGjLlKKxa470F5bR733Zdff3HdM3jrc445A2GtxJwU94q8NB2nUVs1ta6jWmoTbVrP6wPHSG4fHn53uP6TtgQACZtZB5RiCIFFVLWhATJShBQzcSZkbaW0ogpLvRdbY8jLInnYU2B1MNmkPJs7Iay1OcsG/e7yzjGCbtAvA6+mIGuJmR0cEciBIpnoslTm2A0ppI5jzH0CqWUtMfSurus6G1oru92uNWvSYkzEIbE7WOrzcDgETJEDEhBRLRe+vvnw4eO0TGr1yx9fXw/9/W/+8kc/Zw793ePjNJ1deVzKp/O8qjgUZJ/LLBjE/LKWikVJH29POYEDqLmU8rEymV0TDWs9Uv36wLvOrH7qcEI8QKZHYnTVul6HTnwxRwGdFzHx2tCNVPGYmF2IGwZN6JEgEN8/NjUO2AV3XbRpiSkFCEUcjGsLtPVuE6SQ69roCV9CrkJMMQXfQpCiCoaB1IyQwEHdHXBdNUVUc3SkTYhoEEIKsVNyJxCELqWhH8Axxz5z5q/n+pv3CUNiZPBxGSGnISaFBuSH7qbDQZcJIwFBCN73sU85UHm4/61WOF3Y225/oPH8Z3Hfkx5j2mNIYkAk26fgT//0z9KOH8b2L/7yw//0H//oIsvt/Xh9uNZ6McplLArMxIDOKSoJqbrRp4fHxEQ5XSxoyBHkdC6r6/6Yc5/rTCSIATmG0/nyhz959h/9+/+jb37z/sWz/S+//f4/+0//7IeP/3VrOBr9L//Df3B/9y/uLtAPGczHaUJD8/V/9yf/87/6829nJXPjlBTNypRsutpdK/vzV8PL5+Ev/tUMAuhISGK+XcrZKSBRREUzRDMP4IG5tZa2ZQtCNe8JgMiAOCQDctgKq+BMzSAEduTU95/Gb9+k3SrSXe3NoJzXMVoXjz//6ut/efmXrdaybGcYaGImxh08fzksKA3wPM7sTim8Ga6++dUPAWKXco7YHYbv7sYQQ7H2ozdvv//uAxDWYrna9c1wms7RiR2tKQU8HMKQHB36yOuyFZ+UvKYUJAYHO8RYlnsBO1537tXcmvk6I7Z0lffnUWprqdPnb1+WuZA3JGitEe2+fHv1za9+e/XyJl/tdFrntv7q/QdiwMw3b1/eXy4EKS46V5E0DX3cxf7dw7d9fl7ZQ0zNy1TNwIfd1e1S6Xp/brauIlXnxwI5MoW1tmJVqmNPqhY4TOPUd7vzuJKHm5dvwMr04YfxVPrcg4SlTSFxoHx+mB0t78L+Zr/L6Xx7mlol9oCuWl0Lh/T61fP3735Hff7Jm5/+zS9/Nd1+qnXNA131w/Pr3Q/vTgyKQWi3hxluLysAH/e7cZ57zinyp7u7fkiIT/i6GMJSGzEzIKhzDsxsYm5upkRPJWlHROQNSvZE3HMj2CQMstkqibZtiftTzxEcnBjNrK2yj2FBnyapzPsdDhmlNkFpaq7WRFMMnwPoVmqLCIzGHKIxuKkpM7oaUOBAQAgE47y8uL652ffzeOljqKWtWq7iFZApGDhwCKql7wYnc5cYQrmcl3Hd7Q5rnU3ExGMOZV3Y+113nOs81pJioP54ephSzuO0PL5/f/d4W+bH6yto0nK3//DD7X/yn/yn/85/+B/cPdJvvn/3+rq/fHyEW3r26pU0BUUzs2aCjZkv48OzfkBwVeUcwckEwGhaF4qEwiFkmRbqWNDFmrS6aFubHLvDw8O9qRFhq80iWnQQj9Kud88u9SFQKlXAWh6srIIY1lXdQdTTfkBWpG5e9TQ+Bu6alMeH+5vjsyKLNq1rM2057gIhEeyGPStUq69e7e8fzzHsj4fr24fbnLKJ5D51MULg65fPPn46FYnj3QxINzc3tdUyLSPMV0Pu+9hijGnwlNhjWxYEXOalZxpPj32/vxtnLXK5nBao5n6+rIdnB0+hgGcD7iMBT5PUKo4ccrq9/biWOVJaSuu7nhGO+xTYpwIe8dXLXS319fE6EDzcP6Su63J8uD29uN5fXXXvbiet9fEy7Yfh6mp//3DfpxhCera/unt4NMVV7Plht30KhuO14EyBvfdm9TB0gCoNR8DZ4bwUAmAV9lBtidg7hYZQXMjd11OfcQjkPqNrnwbvd2NZ+uGGgDt31+qQWfnZno/9FYjmkImLsK0mMAhfAakSeVUTBKbt6xIwOKE1dENVKyGG/RCsaGLvdl2OkBiHfYfYVlmWIpdL+V6+63KXGKZ1GtXvpjPEAUN2MVMnbDqfJHSHDKgVBE0kIliAkkSUm+gyymLWpAHmtcLpvM66KFYmYLEkmKSrM0gJzWgRiQEPX18/PqiN1dwxho5CXYW5ECAe0gpwErsm9Nota4lxZ1bdzjGGwG6itaxSZJ7aZDBvcETx7IKoFnxxXcA8UN/vMOxjK8wISBUCJbV1ch8EEGzhzEPcH3ZDII+JXbWaDNwHSuBtE55Eh6XI6qWazoFUxabK1tyFvTGIc3Qprs20ceB51mmt1jw6xS4u44KLuI5UxgBY3JViMXLgLickWse5FS2rLqINQMSQ1VNwiAj90O+Ca2nNlVwCxw67Pe5SYIkrXZYac4r7IXDHZQkD54Cr1TTsc44cAxH1UCB7nTzMAOJgxqIqTSSUVshJpIFY4Op44aSq4IoUUVvEfOXgdR0DmKkFZPDWdTl13dP1IHbcZ+5SBixlXcvq472UBsgAhkghBOIYh169NlnGsj4+3uk6o0oUu+bYRUJCcpTWFEPqOooZEM1VHWApvrjOlXNHrYU+NzZA13Gspwu2QsRsgrIqhWq+XLTc3sv5Vy+O883zHUcmDwEJwbec5kYaAlBASjGtVbfFvoMDY2YOS9UcjFVATcXRUdseuT23hiXBldW4FNlJkNJUmtDcHV9beU/nT4fNyr20IRA1eVO6Ez9f5ZsWyHmnAFVYkQIlUjJLGq4fi++OB4EYGzWPJpXAkDwTRMQhsFuyUkPMMQ1Wi2sjDu5G4CCmtl0m3W1zQmEjUICmJq4pB6giKka4mKtjiFRkW8lGMky0jRUcEJzQAwKCmJkiMxGGGBFcwB0dDBCZNiLo07yR0YNzghDdUJmYndWQODT3uX0/r4XjswAHhICQOOxUzEAQDAjFVRE0YiM1M1UGC2iOhgxERAbo6mbgFAyDIgpB8UhmAoEAMzESOMTe5XWUPVU1QOoBVb2pU+DUjIwZyPqxfNm96vqXMlx5JKgl+AWsgRtCchMkd9u0ZogUEAM4mjUO6UdfDj+/LL+91P1u/wc/un7xbIg5gz5V8h0ACZ8sVQDuujGtn5ZlGxPIEXGDBW1X/+2i8zRpIg4ArtrcNrssEGzIdXZTRGzaiCIAbBkTA1CTJ+QRbzQZV1UmRHQCFDN3Rzcm8r8LlaC6gW1Xf0AzJSLdkkWqyOTgKYXaWkqYE4q0JnTY94RooOK6rfkRyRDGZtdkN4kr0ke159xLu2TMJhop1OYR0V2GmE7VGUKk6LISQHCLCDsiMdfWjMu982weFECNQ3xqiSJGc9WKCBwCMcamKfa3dWo5XdAeW13QFOrLQ/fFge9vf3jWXWmkRe1iqGY5UEAqTXMItWoKhAilaUypWhsoXO2SlnWPgTwszXIKKUYiZI6mWtViIC4LjQ/X+8HWBo7kpm6BApOLqYPlHEAxOnDI6uyARMGd3JCInyY4rptT2xF4+wtuZCj/rHNBE3vqJMKGCGJyB1EDAjAlpED0BDU3AAd1JSR3cBNEcnDaMOnblBAYXNVtSwYhEgLZ9m8CAEd7ekoJkdxxi6sB+vZNuT1mW6ltM6whbQ8kOhq4m8lTJc3/hwW0od8jJUYU8Nihy1LWj1LqOD40LwhQy1yr5ZRySyluoTIY+vji6vgs8Y55lzqOkYBzyDvsCC3l/VTri91QdvWH999e8GGVWdT0qWLHKTMxpT5O4wrFODomGMezlDXFxAERQaqiUe5z6iNtAz33ECPFTt3rOrnC4bhHiKJARE7AHDmE7ddl5NCxgyD2iUhUIfUg9TKOL25eOgRzENdx3fCaNvS7dV1CYDWKmZGChqgAbVpRGzCDg7Q6XB+tae6P/bDvQLK3UjSmPrgjYa0+Tp8ezg+/+fabupxUYZnLuq6EuOujLuPl4dGkaB3LPKNaM2N0CugEeYjdVRdiYOY0MCG6mrWGZK2tQKbNpam7DLvUVIr56lzUC2Mb5SjLyz1XWZdWnSLmoKXpuhwSqqwcdoBs4A6QsmP1RLgJduaymjIgtgKGHoAROBAygKt0XUxD74JgUqq4e+56c21VTJW2b013M6WN2SUthGDkIW6LEyNwJiNiFQ1bM0SVA3EgM1MzR2xVW6juHlLqdtx3rOua+8NyKUPCngIQq0JRyX3ocugPQ8QQ0QRMrVRV1RYo1SJodnM9IFkIcZ24+i7AQFZdpj4uHV6Mw2QkDQz/fpOcff36J89Od+O82C9+/UOXPTtzc7CUh2Fe50hJ1QgxRSpiXQxBPTBH9AKlVIYA6JaiXB4+vLn6vW7fr49nM58vpePwh7/3I63T//O//Oa7D9N+97isE0KI/W5ZR+fuv/i//mk7G4RoSG4lR5imhnb45pfvvG51HlqKjtOyR/ryi+vjYf+LXz589faFrxUZMAaZqhJYEzcjDuhkCK1JDpG2d5kBGpoaB26m5NuribbLJ28aKxBibKIiRoQmgh5c281xb0XmqTWlq106r3VHrGX+V7/4V33ORVd1i10ANkMwcWfEXrSIuwcgBzOx337/MXGYpmopPs5ISw3G0cPuik/1kTJSg0AIra2zoLY+D4JeW2UiRLx/fHz7/FVtJmoCthtyrZ44aZFaLodjl0I+YQGH3W64vT9Hxuvj8/PddD+d0y5pk5CTsj1ebl9e3WhzJ7qcLm1ZQmQ0XWp5OD/ud11b5Pq4T7u0rM7d/s2rF7/+y98Wl+eHXi/Tp9s5UM8hcOgf11vncjzenB6WFMMhBMwpcvgwn7JTTH1V7bh7uHt/udxeHV9QChTCeBqJrO/2TGkfmUHG8dP9p0fw/ub4csjDOJ6Ak4l0/VClPru+/nD3PVQ5PYyhP+4O12KFImbIgP79+w+utIy1jD8cIp/vbt+8ujldTm9vrkub391+uN7vkuJPfvKTX/3iFxSTQOiOx/M8Lud5HknAd8+O1p6sN2YecqpanCKnJ4OeqzJxiHFDmCPithhBcFcF3nZcbBu88SlTjRvPbzsMbe+tDShPRBSRa73qs9Pw4TSmIWZzAFtLW5ukLnJAJHK3pUo1DwEDccxpmidA3r4ziJmYt3UduJu5VNnF3KSCYwix1FpkBDuGEDHZeplzToBbq5wUVJp//eWP//Zv//Ln/+B/zB7N1FUoEIWwtCVw18XduD6mGMHwxYuX7z68H8fym198n3eml3l+uO+HjgD3+/7bb775b/75f/1v/8mfzOP6ePe4jjWl1b2Y+6qy73fk0QEVyJnQBVxrnVMXKdFlehjSMdBQl2U3DADk5LGLzpI7HlfpQ6og0cC1drvojM3VwbrUsXd1wa9ef/UvfvltzntmwEgupq3E3IUoaBq5qx61StdDQI2R3766ef/Dx2HYZ2bsOjdtgjHeEFhMfNz187TW0q5vdg/3jyEmU/10d5dCKEutc0tdyDfdh/cPosKRUXGdSgNtnhiDCJzmtQsdEQSOZRpj7Uzg+vrm+rq7f7xjunqn71otQyTuBnFHCm0aX9xcT2tBAIzwcHcb993lvOzT/nAYbh9vobYco0vi0BHzPvcRqqP0OSQqUpbcx91+9/HjiYgIgyuQwZdvX69rWbyLURnlx1+9voxL8Ha973ddvj/N7x7fD0POEGttXX66HhDhi1cHs0apKiQCohCjoF/8pM1CeKwVxVzFTZixKZ6rphQy+g6VgRyQI3ahW6oWLdztDFITR9OIQ+zCup65c6bco3u1HBNknttaV3VVSsAhiEggHnImc0asRbCLznEpqmL+1JFo2WUHtMdMog9nnatNxS5WdixRIfWsUpbpLImqSrXLoR92MbguHLWACQgGZCmskTh3sbseWjM9XSyjqayPs6nzWsd5nNo6qi4pyD5RIgODywJlNq8OTirOh+S7EJ53oI2crVpwAqR6WZkw76PF5IChSyIWMFqM1b27eb6aCphGpj4RT36RgB7RHEFdKSIj0WQxpOt+n4HYENRSjH0IgZkDzq1g16cIhJCpzfPETIeEEZut546qBVuqOMmQh5jCOM9AuTQbHyvXFojmeclSoV2m+zJc553tRYU8aAUTa4v6qtG4OaoIlhXBXEeiRlbR7HrI7jTVuVQ+XZQ8ZDMtjQnRLeK2vBVrEAKSK7u6ttqKAeSckQeOkYm0SSS4OuxajM48RM7ACBAD9TEBY0LUVTyGABhQE2vxQlJ9rS4FW8PabJkEVtMFrHpA8znI6ohuoBqAMjORB3YFR44dNo8xdn02fboeDPsbDAnJzatXLetULpMupxAg9Fl1MauOJqqCUGtpuoIXBAFzMAo5pxgCQylVQRNnpMTh4GBqq4mJrEwE0gzUvWEJ2CfsghOqOhNHxpQAE2gGmcv6cPJP715l+fJ1z6yoEhgBzcGJ0A0BzWDTSLMqAqQNAuKbztMDWChNIDUhUWnouw6SyQy2hCG1ZV2hNoSIwBFAJDvspsfp7vs8dwgH42tAW8oUION7ONx03Q6qzAn2HcdO4zKdnvFrMru0EkhzxvHxV8iWE2hbdyGymbbWh6zFammZt3SJ1uZsEsBdGnMgZnPdFvoGpuBAWKUZQDMXAMOAQENgJGLVatoYEwcIsDZDctyuqfY0yHDbIBFaVQgoEBVTMtqAJYSIsJF/EN3ElInVXdEdncITtYqRiLmpLNDeLd/ejd/3+foYfnyTfgo1ODgEJs3utYG0oEK2+tKsITAAZYxqCibSGjISR3QGU6K05ac2qm5kJDAHNEOAoA0PiW8CibHYhlkBBEVgVQCg2Lgz+TG+vsI31B9aHy32TNdQPjGCODoggAKQmSE4h2RmvDnNgQwYTP/xjw79OcLu+YtX1xm06/ep+5wq+hysMN3M9LjVxDb4tLj4VjVCh8/KdAfQLQuGiABP6aHNCeufW2hPWRQ3AKawUWocwEzx88/fhb+2cZUDwtPKDdx9E8W5m7kSupoCsNuGAtgGC/D0i/v2rzVAUAAxYwqBQkIOhF2OZSnaKpozIgCqeaTkVkg0qrinD4/TEPl+XrouEiBtJJfApQgz19JEgV0NXMQjh+03JLMEBkwVAHkD0lgTaQBjK+Tu0tRxQ0ERgJY1Mc9VjCE0+f2eykDPtc0P48vdAWoZLKXWArIDoJoRBCQyRwdRWwBALQBUZSZemgREUSEgRzfH0laEDTKE4MboA/s63e66LxYD5LQuIyFRiO6rbHNnpG2somVlQjf5bLEzVQX0LR3GmIhYbeObCaA7OBJvfrGnVBmSGbjrFvhycGLeHg8iNhN3/Qxt2LTBsGG8EMndng7VDmb6NMHEjfQLG3zawf2JnWWbUw8+Vy4BDBAYCcCAWKSZOyGbO26pI0AANzU3QybaRGlbAO5fHxWlrqfQIbjVwqSqxeW0lnFtJySa52YAu30flBvnfhmTYmcU6/jF8XCdY48YuSMEN4SmWmckXucFDX1d3aRzXMWrhiEeLvUkqmjSVozsp/Nv5/OcMElry9xijDklEa9qu12fsqtBSBnj0/9TJoZNyEHW769FWi3VrAAxMe27nSPHHJs7SjVpCEzOSETAwZnAisr1/vm6OqE3EFEPlDl3ues4pBxLiKnfHwCkacOQEVnmERpVre5Fsc2nas1bXW/dpulumi9rdTNZlgdmUQy1zl3f73c7HDoFrm24ezivbfn243dtHQPpKu3xcSyzZCJkqtL6Lhh6B7CLjFBzCrlPpVUHJ7OlrNNlqgCRPQEyeZLGZh0TMABSVf8o8WGxU7VII0caup6aE+SL0KyFmK8DBsAgy75P2OTMaAKtacqb24zAUcynVZijAS5qMehuYIVquiJGCuYN5mrW1i5Q4jTN6zbu5BiJyU22Ki6Qu2iKtJ13AYAQmghHcG/NPOeIYIyIiMxBVAlR3YYcy7KqG0Prdlc2Lf3Vlbu4JaQEzXKOhJRzhEDjPB+6QxdiraOQY8yE6X6thkT39WrgIYX5vHxs6xfXL6Xtyu10/Xzo+vVcfhO6HwFcIZqO5+1T8Hi+WPVDx+rhYdE//uLL5Ycf1tJUsXi+2vesvi6Nm7lzM1dnjmm8TEU0dpoH7Disl9INx2P/bBoni5p7BECxQDH/97+4/fr1czg9//W9D+t6PDxbL2tsvlb8dD/u9gEIY7SvX79ezh/WVhfBdcSgps33XSxG61JvrnazrI+z7q5pt+tv35WH5d1+/+VpWjkGVVHTDXamrs3aht13QnNjBAI0AHBCUEA08M1e54y1tKCRgBhNN30VAUeKHYuuHYe2Onkuq5ytLYFEDGsryxwPV7WauCARqGOg2MX9TX/zYvf427sudQGDI56WCRzVPGfqsw053z7ePd+9MPShp0Of/vZxIQrMSBHdra6wuphaFQmV+zjsws3ppNf7rsrF2IoaxtTM0P1q13WpK2IZaVnrOtWMoevz3E6GeNx13uHDuNRp6Xdpv9svS3WCwG6lcpfn6s9CSlT3Q4cBpeLagImaOmK5//43iayp5hhWEPZwfX39ME9vr54XrTnkMtfSREBC7lJdT3fvn8U9h3Re29WLK5hXqWsMOSRWleBxQxA3r0M6pKDL+U7Edjc7aSGkdW0WY+hynurMMaOKrCspmdPL1y8up6lNS/W13131/aGVB2CPXXdzs/vmu1/87Md/eG4qdT1Q//hxvMxTN1x7xrDQu2+/veq5XJZS4dOnh64LbdVarYCdx7HN6/YpaE0BEYxAvc1LvhrQN8uHPiH33FwJn95njkTba2aLqhLxZ4ikgZnj09Boe2uB+1byJ8S+S0N09XA56z5xTuicprYCUARSVWktJCaTIfbBnQzAHIk3UYduy093IHNVYnJHNX92PUhbEBEQY+p7QzAjFeQwDDuw5qoAFPOAWtQqEf34iy++//aXP/nDP/bFfdvEOi5lVWq73XWf9lrKfBmny6cRZZK12+PuENUOP/vjn/3NDw8ffnj3ezf7mMJf/9l/+2a/f/vV2/t2KWsrS737dPvmx1/W+sgdqfH4eMm7PUHv6DFwgcDI4Jjz8XOOObghEQVMbZHj4QAhr6JgcIjpqxcvf/vu1zFz3x/KalIrNG9lEuXT/IkCi4mjEoRD96Is79EFHEXUilHO//Qf/dO//fVfTaN55ctpIhUCZGLQioB93u26m7vH3/WJp1IoEAqJsnq3TDAgtHVK+z1jMJDTeT7NC6jllEKK7VJyis+vr5ZpGfb9kPKLF6+CEVoFstc3rzkdP/7wwzwt7TJboN//yc9/++4jd9RLDYRjqchkimC0rFVru7rqVins+0P3bF6XermknNwlhcSmw+6KoPNyAbCmcJlajHlyRnEy3O16Va2M/XB1mh4fl0uXgo6zS+1TyAk8AhVRw3fn81XfjWVFCueHh5ur65vj1dOtIECJ6GW9TkAhAES1ugN729M1dcm5cZY4qFvWlgRBbUGeVBqCUoNVDo4J0lhXgdw0AGWMQ0OHQtqkuWDuHaIUZajEAB6qkxMNu87LAuIOoUuZApCNbsAW9/GwKJbNUE6htMYAMVDEGA28FbC4XnQqfLfC2XzOzuC2rjlF5Jt5LiAzuZquNRERUJ8FdF7rtPIV4yFxUghAR8oV1yqzIZ7nmpkl0NL01Exl7UGTr1F0gDAzPTBq34FVbxICaZEaLbze6TzKRVG9mroCUdBmXhU6p8xLhNAlAS9olpIjKDjkbLKztohzCOBtVWkeia9jQrC6HAilECihe6vqWPs+omPHGbDLXBGgC4CKIKvuu5Q0RWVaQRbTta6CJVWRpQfnjJjFKkMIqrgUV/N1OY/nwAAQVrAFp+OeAjkZWq1B0cuElqSwC9XS8r4nQgjklkGLkfk6RkdY2jJx5IzTBF5dFc1yCA7MAG4SKHU5RCYFcFJFzBycKHVZOfbp2KqQAQYKISTy4AruMfQcuLaCKIxk7qW10iooMTbGYnKWcmFTb01kUVNzaS4IZMhRKqEBx22xrMtHYAKLCIE47W4OlRAiZ3i6HhxvbgxY18fx9uP06V0d5zotTiRikZAVMCQIGfQYU5RW3BpTU52xxggDOlEgJMKg5K5mbKY24zbgoEAcGBS8USAzRdtsTcBMkYKrg8uQum4YCjXsw84d3V9d5YAVnMJWQzBxMiZyBLPqHoh7NDAHJgJz9LCVM5INWZuX2SKBeRAFa6a8U19aIfAmrUmPGCbVY0BAJcHxww8ZhhheUSForHMzQnXEYv1jt+sGD2hN6/oQQXaYc1tdadf5SrWNH2469UWvILE6LeugxgjFK/XX6dC7VC0zODg5UVJBkZJwCxWJOURicFAQM1cMYtbMBII6iBs5RoAOGcybeXFNgImBmVyfljoUkpkyITMjkAIEIpFWCRkxhwjoYsrk5hg8EsbEShAQGKyANhODQEQREZxFyc9JPjUpiGiPgJTis353TZhtKs5kZA1ry1Ziq+jGvaqxOpqQqwNg7IEJmN2cEhH2vYKaNBcwD4HQkDi4YVUYAu+pqJYZZce9K6BToK0dgwxdX/HY9FX6isNB2aljYGS4onoAWZ+sUxAR2U2YbONSqwhhcnNoCIgvcmpXw3s4QHpFHVOKf49pAVDdQEAEbvB0gOaNUO0OW64a/Em/QUwMQUwICRHNFJ8IRIBACKgu4Lh5PrZ6ESEjkLqpCj2NFgAczLdDjiFCQDIDVTEwc2BO0qpvEZ1Nyg6AuB3vQd3cN9cbueHWZnJAFTEFJt7uazkH9g197SnkUUcCKABD4KWuhxdp2Id4qdJkvx8exzl3PZOJmEIQdXKspVnIDkEcM0UCDcTmaEDiximKKSKYbQwdYwIxU8ZKLFUG5oLYAHcBouMQO5kuQwou0jkMZXX3fU6XiqKhZ5NWr4lHdOTgKk2NEKSpM3BgAo3I5mYxLEBR9RjJzUMMWqsRwyZDAXaHSEgOrSrCJHcfYsjN0bljBCaMBA0sxdRaZSckrs1dANG1CaMgICIDKAG7g6sYPInDfAt52efImTsgbRGfzeKNiG7o7khGFMzNTDfHkrkREWHczC28NcieHkJz90DBwbdwnD0RhrYhJX4eViK4brktN3VTRIYn7pmDO7puVUz8/DESkW1QCw7MQU22//bpPP+vj4ooJDNgpBiS6FJrvYyParP56h6BiDkDxMjdfhh2V8XnJYHdZL8K1hERYGsNVJkCcEo5oYoaMPnl8VORpqq6nmttFPeqea2SQ4qBxVfs/SSfvnz9D1mHqpITsEHe9YgAralLDEPTxrED95jcTQgpYDBvVdacwulSnOjly+fmFjjUJmVaVpGhS12/a6LaSB3mc5lPFwourSpAcDap8bDLu+cxZl2X5bReLp8A6v39PQcOJOt0b9aQ7fR4z+B1vd8d9HCVWvXLVBoyGCFg1ZJ2u7UUqTV28fnLtz3Euky/++a7j7f3Dfx0We7vx+KyrK2JdZGXIrW5OyREcyeCPDcm6HfxeaAukzlsazNiMlWtWFZeDLoBwdWJBAnBuoTAHkGQ8KLmSpMDuwWxukzFeV3qqJ53yU0XEiSkDhSLuTlHDmSqOwaPcLd4E4vuRhgCFLOUkAJyhoDspk1d3IGDKRBxNS9rDRRtm3GbY9gG3tblQGi8Yfld0QUANlheSrGJODC4V/GnLMB29GOsig8XccX1sq6r1Z0EmnaoighlkSqdgCZIOTF716U+ZG9W6lq1YgwE2IVw3dG46ljrzTGFiBT8R/v9UirpwOOrdSU8v++fJZJB0C6Tfk5bg0N6eKxdPzx72T08zh8ezykRWDVMtRUwdQB1BSOppsyieHtaYrZ+8J/97OX9bfnhN1P03BZXixJjPu4/PbxfS/mnP//xX3/37T/7P/w7/7f/4i8fPv7u7U9f/cf/+//16XH5P/0f/y+95JjDi9dHTPTp8RybvP/0vkvUEF68eb7ejff3F0AEthhxNxCyQgcN+JvfnNy6T239X/yv/q3//P/83+30aCbuHpnNvDUZUt7EivbZPOVPFBqo0hBZzQR9aYJuJkbMQKgK4K7NELmZ9gGYrJ7Xr7/84le/+u3++ur5m+NfvJ8OVy8+fbwlQEr9WFUJh2GPgW4/Pj4/7i/TSsEf76cOQy3azFKXtFagoExNwYsi12N/he7ztDDH2SwwqoGogkBiYgzFDLfDDgdpxQSqFOKSOzQITUhIX3SD1PNx/+JS58tUEVCJ3AmAQkyQwjjPYkIV9sesRlIlx1haebxcbm6OxCTmL26Oda3nyxQjQ4iucPPiTVtHn9c3r5//8MP3OfR1Gttd6rFb5iX3iTk9nu6Pu6MDPD6ci0rquldXb+4//DZzXGU1BTUCiGO5Pz8+RI4AVpZl6Af28Pbl17/64Rvqe0yyNNvfHN/d/cA8CLkAIIXE/ahLimG333389Cn1WdxTzrN8fHl4piqy1vvTBai+evvm/u7jq2fPv/zqS2lruSygdcjD/mr//rRw7p69fjZ/OJeKV6+eLUnK3fnj7fuvXl1Ba+h+82w/jfXJAwtATNIs5zhNTUTjhuJz27ZqRGxuSEjI4OTuRAjg5oBEAGBuTxxrQtu2N4gbf2GToZnDJolAMHU11cMuoIpZQAQGyphUAADXWskoxggEsc+6LKUWM8HtKSVCotqqGzGTEas0kebmbs6EUhvHgCHMrWUK1swNAjPFiEAbfRIAm7SQY5/z3/zqb7746scqaCCO3veDuhYtVi2mBID/3z/70+OXz8URe1pEQt/93s9/OtHvlmU5vrnerTL/4t2f/dmfhd3xq5/8wbvf/jc/vLs/rF13HGKKp9N91/XGoZokAhFRt5RybZXBiEFMTBqnsEqDKoSIZLVVNVwaVsNn14cP5/eC0OfhNK6OISZ0MbM67Ib7y10I2W2b7iaRJQZVLY/T9Htvfnx/ey/W7u6/O+z6pi2kCK77/VUT72MXjnkti4KP48ecIzAsSw3MFOm721vquj/40U/HD+/yrvcEhtiK910/Tpe+35Wmb26eeWh1mnc5Q9+vdZ207frONYznebUpdnh3f7mczwm5o9Sg/Ff/7f9DvJpzJFKxZZr7Xff8+vAwjVO9/OjV62U5f/2jN4/Tcnu6D4hUY6JuvMwhcw5J21J0CQTH3VBKaWKKZKFbwbWKaOsjgvlpmtzcVI95FwLENKj4+4fpate9enbz8P2HWfnnL96AXm7HJaeIAH/zy19vn4K72ZLS893NaGcvS87BzAB46LOp77rYM1zG8YBwiNFnMQBVO9V2QauJils1h7KoujRxhKKRDwnYqmmq2wmOR0IIrOZdCAihuq/QVW9oBtXEu9J1FIBodfOBIqg2DOCCFIsYxORqzSVjWIwvq0ZO09TmaV2NciAfJ88h9tkiuEbXNSkEs+ZlrtIR5ybItUFYUj8Tz32+6vvUpeJsYByxVlHAudTLOqnZqtDaEkNi8tqMxRu5EE9aevIcY8pxdZSYhzeJp2nUR28WLZhICEmZapMUIieuIcw5kgswE7GjI3clsGtnteviTqvadAq1EEFiPLAWFQz11MBd1MSYkGkBs1YAFMC7AIfd/ip1WpdpVQ/RsILP2krgNs71YWyXxWeDaClHryHsc/LTmK0t59u2ltJqjKFVa0TYqM0SM1cwUXCQxIH70JiXImqNQmhl7fNGLyGIu6ZCqFbW6f5eICnxOK99JFPZrnORGMzcPHQAvhnixMzMTE1iBnDjGJ2x3+3L+QJt9RSBEicmDka8VgURamKMQgzAboQKVA1rC3WVcazSihRDImYzQgjm7rhtrYSQCKyVyhyaWOQu5T2lnnOfAteyOvnnO7Kv58c6342nh7KszCHkXXFH1CoWyW1dijzwzoR5qffNxtYuAOta5+NwSP1OqIq2Zg3Bxc3qnMjYA2OUje9KEX3D1AXi1Dyog7aG0tCaiWuTqrIG6LoeGF++fZbTD0YWiA2c0CkEdTNzh+CA7mSCAdBUQ4wItr22pEpiyhis8jgTRkRfo6bgvXsKQM0Cdfl+fsy870NEgS6wzAp0EDZL11ruyS9KREjRHKgbimcZHmX1FLQbKsTYCsqZLZQyVndacq97vy8hTiFjWWQphAvy6tiTcV6b9XFo0xlC0MAegZnMUQ04ZDOtqm7S1My9mYEZE6O7uUuryJEduoCM1Io4kW007A01guS+ybAgELgaMcUYIruqgUopjk13XSRwYHdywebb5M5BQUJHMaLgVnGJIqroY4B32k6OHK3yuuqEerULf9TnmGFX5ocGrUQtqa2+NuCtlMNUYwzgqZg5JWNYpSph6rrkIRuOUxUFoqjqbuBAbp5Ad+iMMpsNMYiL2Zqox426ggkkd0t+5S/7+BIwAlkAUYoUY8ivZD2hC6ABovo2udySQeQIjsDIG9CHwfcOLA1gaGagTzSiv08VbfiZraRj7qbbBwo/l9A2+vUmyBATAHdEdWXCDTqz4YToqdfzecKECMiqDqh/lzVCRHXFz0AidwRwM1dtDsbE7iCmT4MqfMLWbE0aRAAEdd9OSKL1iSlhW00JCZ62ewigTRWhro2RQkohJHOPBKDCbrVW8eikh9TVpoxhVU8MCBCIzaqohkBKDmSLukvhGEstyFnM3MwAuo5idSeX1gC9tsIhITghmUpmnEQaxKpqhGo1og4UBIHdOvciKggCaLU66UBwTAClCQGBBUZVQyYFIyTUdghpFb8DIcoXgYNbZACQTBgRIyO4gDU0RERxNyBGtlZdvCIKUGBuZlvv080DETRLMc2MEEiqshnQJrD3zcbrYMRkBgYaaFPd4XYYZiREUhNwNAPfxkmA7soczFS29JkJERORqJiz2qZa4u1hUzcg3EjQjoBADhtrUQloe2SIgj6ta90Bzbc50laCcyJS0A1d1URiCEDkrpvjkZg+40e35yaA6zba+/9nFSERGhBzq80NxUR0YqQcD8VD1x1MjBzYwp7pZrh+8/wFt+klhBc5BLcQOCRubkZubkboJBip1GpRuozrZYVoQ0yXUgD1atg3WcbzkgawKv+f/+7//h/9B//4kHJMaX/Vz4/rOJ7EKgOO59pFAhDkSmYcI4A3A6KEYHUaeRgyE1FiMCIzE20SmIZuAC9S17KspTTkNF4mIlfXOKQAfDw+I2tzLbd3v/3w7jfzfNfaJLIC2v5w3S5e5+mwCynz/f3juNa7h0ew+WZqh8eSM4VAHJMBSRUXnFZaiy51NfNf/vIvHx9rrb6OstYWuixutZkB1mbqUMtW6QAEbIRqHjYPWeJl1kdcrg6Bh248T3nXi6gbzieHkjsGbQJo7kiRIAQlD+TWGiCpsyGdPWPzXAlmSTGCyHXAAdEyrFIUkKOx69DH2BwMrrpsXrRoIILgrtb1xAmiS+wpZMYmRh4ijKe6O+S0hvmirbg7AAcKZCKuYOpeK6KFSG5qhDGwASCgIDu46v+Pqz/5mS1b0/uwt1trNxHxdafLzJuZt6mGxSqKZLFokTQpURZg0LBN07AsyIb+BMMTw4BtwAOPPDfggWee2IAMCxQBwYYMNiIhNipWUWRV3bpVdft7szl5mq+LiN2std7Gg/3lpaTZOWd4InYTz3qe3w+YEQkksRB7GAmZozkxU5BJT0y8rGGKZrSuwFSHAfx8bKq9wHhxsPsJme+P5xfPdx2zAeeBzsfTbrxOCU/ns8LSUQaRubS12eNcMtar/a7YOpeq1fN+vxsbd/H+8aeEj1cx2PKUm54eatf152lJ5ox4ejweehn6cdxfvPv8LQYgisgO2walI/OAiJeHfHGg73337brknse863Oi02rm6cv3D/31ZT1NDRqt89Xy4//N3/rk97732Q//6Heef5Z/+Ns/fdlPf+Uvf/o7f/jzhXINCgVzmGpdENOYv/z8LqZFOQPx8bGK2JDhIPyX/41v/b1/+N2bmw9u57mZ/ovf/Z1nzw6PPy/SdboUYkZ0dSfECmYIxSwLh4G5CzoSWkQn0qwSg5qBexJugAUQclen5jUAAYitad9fO9XTtA7jWMt5neHqYoemA2ED2B+G81Q9UA1atO4yY8/juA/AWsiciWlZlgDO3HOSnGQ6zaHomB2lH4fZtBj7osMwTGgRkDKtdbEA09rl/uLisCzH66ubd/fn1alDTwEGEBiSemPs++Hh9FDdh93u/jz3Yx8irRVEAKtZYhz65Twnpho2Py5NBBR24zVJblpvb9ebm8Ei8jDUUtrcMOQ0l2fDLtZ2mqamULVIJwAUQePVxenxyH3adZdEUsopSMzRLN6+/wq0amtq/vzFq8dlOj7e3d8+YrCrAcLVs4tmDdDu1nf9RTJvab9rx8cSsD98lLrL3cVlPT9Yq+tU29IMbK33/WU/7DuavTG++pVPjm/Py3r+xstXj/crytX5GAjXP3o9f/rp88cvvzrIOA45gSzTTIhY4e5+7brDfF7OaxzPR2Ibd1La3CIswJuHaVnq1ydo7mpuyCIsLMIkpCVkO5PZoNYBEbbxiezpkMS/tjZERDDz9noUgaYGHABbRTuYKLbsMtANtGqXaL/vpnkdurQf+vO6hrvkviwnsAQOSiYUGiZBWcQDZq1JOscgEiCy2O6nJASChkm82lrrlgmdy0J8mfv+WI49ppQ7rZUIMSBJ17S62s2zi8cf/GS53w+HZ/N8ksyECUurVYlCupz23O/CT2dgIsptieNxub4Znu8vHw/j52/vLi7Hv/Hv/bv/4p/+3h/98R8cp7effvtTt/Lzn//o+PjsxYfPlrVoXfZXOw+kpuu5piStnImxLhHQrq5erK2QoKoxsmQxDEbo+sw5laW9+uWPp/nBKI7HKdAbQC+ZMLTYzbNhaUVbuNZCPl5eHR9ukWuYXo8Xc10DQ8Lfv/kqSbq42dUm6KkXTOHLOrUauc8d0rRM2qKsOp0qM+cxAxCVON2+D3RD8hqt1stxHDv+9NUlRHc+lVFtKmtYK1Oc18opLWeNOj88Lrv9aOE/+fyrqpBzuq/lNL19eX3RoHaZSZgIdS1Dl0Pj9nwXgq7+7vZxyHT79s4Qk/unH3/r4XSPEXlIXUpJegMNjkdbM2RiOnS5ml3uDo/L0h3G08N8sRtdY1rqi+cfvD/ePS6labu+ukyZ+5ymtXx291CaI/IfffH21z69met5aTX3CZ8eBXB7fzocLq1Ol4dImRarrjPA0GXJu/EgdHEu3BGvE4JXxLrGukKtVFHg0GtvCkW6xCERHVtvc2vwgL0QAmPfTNdyPuGcMBo7YFtcn8AighXdnRvIw1LEYyfRIRhCMEzQyBBNA1mAE4XkJCgK1IbD7FR8AGNZSgoNMApKgWopIMbUsxk4FMSKErrVTQsSkEVDfKhLNfGpRK1VgdI49DHVumhbFcMQwQYRJmJICBlJVGszMAMH4eiwCWY0F4/YffRcmed625bgLIZRXH1xOE+IGa8OTRw54zCioUhSyiWUDAlUqKDE0OcRFb3kUtL60A1okJTHbn8NuZ+9dNCClAjOdc4CY3ctEg0mlJXSdK7nZqtTsMD0MJ0Xeqi4Ety1BuvUV4OclqXm4+ynR/NFozYwRuSEDYM75OwGruBBGNnVi2tD8cMlW6vuEebsXDWqO3BGp1Lqusxa22IVAbPI1Frf5UScgCIAveW+k5wAoHqcq9VQaJETjmMHDgRoCsBZNeaylBrz7nC12zFSRwlQrOh5ntxrlh7TaK15U11Xnde6lKpeLVYFzOgR5kaI5gHgJdQiIJQIzU0ChInJU0/IGFEBMjG1+tQwfbz9Uo/Hui5dN+brXNX4kgklkUmdQVdkXNoSMxUCywYEKSWzMhwEyZWUopEHQUIyR3LEcCMn1ZMTeniguBsWSdJ1ARgOBKympTIAJyyxcga1dn5zO7z7PF1S7oBzD4Bu9WtPOTCxRjB0QWCmzsHI5hoOFk6ATgHkBKbrMhvTmAYeyUcozTGO6A3WZib9qB7Vq2H3MNlgWBfwbq8YwVqsdXKIBsLSaol39fCK14uL99UbRctp4SjSjcA4pMGv1+/HcpQrV2Xff/JxsE3l9pBGzwXZd11nVigkDX1TrW7mhmECvIVrAeQIGuFMBqDqGmEe1c0RqUsNSN1bmFoTISB2AmBRc0JAkvB4AvMQYISGmzUkRiZiUYXJEBN1IMCdcYsMKp4To5kReCJi7BgskA0xEmNUX8+x0Z0NUVPgur7F7gOAiyCy3FdMM2tBNCJDcDfyyuhAxojCXUMhCkOFYIjQtiQARmdK2jAwEKl6CNG+o4HMoTpLQeMgRnISpWTACSEv09WSPxpfMXYozMgRIGGICN0V1mcY9w7ubojiFiBEiB7q4MTpyWAG4K10QX1JURfY7Zy3gObrpAgDELf/dfdtVsYQjvT0whP/mkb8BCsHpK1DHRs9JgJiK0Jtriv0CPvFkAgRIAhFwSMMghEo4kkZG0/iMwdEAIlAtcokROwRiOC2bRUxfOubAAHHFnAHECaHp+UeIgfUCKu1BYADyNgBiwNoODEhUGs6oPV9V0yZdu30RvpuyKRIc6tDJ33m01m5p8TQECMcyRfHjsXMgMgDiJiIW6noBgCukISrtQDqgHq3BHjDsEc9IwBgEKq5AXSCoQoIK3gGkERzK9jtFGL1yAjglog8nMAJqLr1IgJAAereahkQLwEbBgWgOiGsrRJnc2sewuSBjkEUdVNfJfbt4wMiDCTMkqZ5UghHYKRAAIJmtUUIJSLZFLZbzQwAiIhRNgk4ArspISExBoaHhn/tq99YVEhI7h7mEEYkQbQ13QACiTDQwxE5ns5dMSIwEJGYUK3xtv8DeDKsuSOAgW2WmW3O9rRVQ3D3MEPY+Nns7oTsHu5KxBAbgQsjAAHNHcGfym9Eqv6LwPQpKmq1EEptBhFBgmlI3SXo2tYWmHhzTnU9urSAEeDZMAR6V1qijiIxswAxd+rmtkm7te8P+92ACGWZEIA5RWASbwFA1Jycu6nU5bjcxc/+z/+X/8Pf+Ot/az8M03L/wfXHH794Za12Q7p+ticANVzWRcEyJekGQZ7nJeV0ffXNZT2lJHWpt8djhBOC1oqAmHurLUkSxi4F6DSk88PD+6ketfjbt+/f399bOTrMu51cPNs1svFqzxYR2PDeRlx9fv3mcTkty1SUu7ulxHL8+ILiJgYPIevGjoklk4igQlA4QplrPfn798rdUCN4THNpKXPuuVZFMGFWDUYgBjSHCCJg9IERTIWZFOqilSpnaq2s62IKBIauwLj4OgwsFCkTAiZm4gDgFpYZImAutXfIHrsOCaGB9ZncZy2RQgh4FO6pO5flkMURSVWY11UlAjkuLkgNqpV+YBQDVFOvFcyjM9BHP0+FXDoBMwgzNwsPQNjSH2bqUwowRArCWlpASEraGhBIlu34TPYCgNCgLZGS9B0i+jhyqz5rI+WESGpagceEpoOQCKYB5YThdtjlKGU+nnLqOPeHQ7fUFSpf73csZOrs5aKXBC2RvHj+7P7h5NFh+DhKQFGFNr8THcOQsFy8eLldBeMwyLA73p3Xebm67r/5y8+/98M3j0q7Mu2kh1rVN0Q9NGscgoDm9O4dvHtjubskNSckjlIXYW9RvEA9235/uL014Wf/77/zg+vxR3/tr/3qv/2bv/lP/8vPfvT59MFHr673V9/+cPnHP/hMddAGrdqYU9d1y7Fd3iBI2Jg+evnsix9/SSmZe238j3/3Z5zGua79Dl/uL9+/efv8cryHY1kbIXhtIiJA1lqEIQETR0RYMJFQMte0rb3D2LHPqTYbUzeVRrapUZE78qq11heXu2WtxX09nShFTruI1JpSaCvVzSxchLqcajV8loZ+Pz8cu5xubsY8pC/mRwgYR86Jo1aMlZSJPaXkTE3r8VyJkThdX1189eUDuIY3wh6FQp0lJ8KqC0urbTZQESCiLnfLsnC4GE6P50POfWaxyKO8PTcJDK1DzvPpLAQcpE27nIT7YhMAAgdAeKvLrCAJDzJcX33+058f9mM4UxBKl5mmaV6thQEwEcBufzidZiG8urg+gQ3dbpmnOuNynob94cXF8/vptpWVNTruWOL+7h2yT6e1ldLvxnVeUkru0NYKYRDrPierNi3zmIjrQlUO+zRNRRdLnSxt3g1Dku7x8aHbj1i8HudZax6TteVqN651Craxp299+PKHP349nab3r+dPX7784sv7VzfD/VePb46F+hxqp9dv12GXUObHNp8XJhwGISBKxkSn43ldVb5uFQE6MZmHhxOBtrpx9czcwZiYiBFJ3TAC+cn3AcyB8dSCJvDNW6yGSMT49VNQwpqHP03TSUT6Ug2IplWJxIGCYjdgrTGvSxLpU3p7uu9lWJfSSX7COiIJc0rs5kgQBNaehtw5dQ7UaiNAN0dmTj7wYF7Oi57KhHBIkokhXJ/SUEpOUM0//uanP/je919958+knFpt0mEzQyZ3W+ZF6yKZh64r7l3u1xqHvte17UV+6Rsf3M1lDf3hj39+c3M1n9dPPnr+859/SUP65p/+pdefv/3pzz+/6HcffXgzL/b8g4+m29vciUFgJw4tY3LFUrRoSTICOkvOXV7WtWPUZYlSBeM4P87HkwDu9t3j6dSPOytKhLurXXUtpfZdWsIQeFrnlHm3u3q4u+tQyjxNp9NuN15eXtalsGMmePf2fRKgxKnL02natSGlpKaH6wvgSLkURUPaD5IigUVKqRMY8qjFrKnNXsPuH+/OD6fjQJJyP+aytseH83A57g7D+eEI6Jjt/HC6vLye7s9lnZ8dLoT45cWHt/6OqD08nA9D92y/f3t/S8LSZ0npIrp5Xr/16YuvvnzN0uXcff7VawK8vNwPDF5b03Kq58NwsU9jmTQBSIQQg67Jm6/15nBNJNUNCR8eTm7oFrX66bTUqsQATT1Lx/Dq5eGr2/Mf/OTz+VwMfD/a9eUTn6K1KLPSjh5Xt3WGTnoiSuQeEhbqXStu3tyMWXuZ1rk5odoIAeczQNtdyrALrLlMILYOod5qCYiA2txaLXAMWAOpaOMuOrIIg6BJ0SpHhYYNGSEUO0Z2x6pQRXKCDEZu0aGyr7vEEB6UUfK6KqEwtI4xHF1GI7Bqrc1BLBDmHkTF1VIkJA2G8I5oWc5df1HDpvMs3CWDPncIHmQRupTFzNCV2BIRCiB7l1OHEKAXLSsLAAbkVrRBpH0/7plBuqh2e66lBlCXBCFaUb1/pOipQxpGE0CBHhN4yshBSMQCxBQtTKETIvHICYlkPjqyPh8HyS35PWvn61m5RLaOl+cvrvaxDPHCNWssc7srUabqx4bu3OY8rbRYFFNM2ZmLKlUo1bgWBAOw8WJHzQCgExJy6BgzR2ImTE7zMvUyRkYX6Ics7m225WTITiwMNK+LKlhpCAiICQUCOk656zjlRSsT49PJrAhLHnaqKamjO7gxhOmajDmMJWkQEbZlbsvChLeu+8MVAkptOk11ejRXzU2SWVt1LdEKRtM6N2vNFfkJVezo5gEYxGhu8AQG2X7euoZHlrUtKRyNlnNIysJPioOwlRL1fACiNQoK9hfXqe9RV5lu68OdR4iQZxyHnUNDZ77wR537NCTI7k2YhKC0CRATjYAISOpGTEyM0ZqW1ppgr821NRx67DsR7oZc6mTaypr08Xg8zv6Dn77I1ifuMhcLBAroANDdkNi9BVhEcw+LZu4IQoQBLsIBEMSKEIxRH3JcHpu6jGjPuFmBKRNnhpUwkENC0MEQkQHhIN15Lmh3iY2gU6ViSEo5uucL3/6J70Z+u9NjO6VdGtUPWl/1BwjUY9Bxtz/H88NV9LmW4a4+SIxtrVFX6YdXO2SL6fGUCYK0Rc3byWvUPlGpKyC3aI19aa6IBcwpAtAgzBUAVQOIt5oMBQIAIzkAMm9mbpInaIqib0LvJLJG8LbdDUgEZnWdKmDgQA15SShCg1AIWIrmCxkQZDAHoIXqCR25G4AkHMsK3FTfzPRj5D/bKjhDEZrRMHEEG5gDbqoLKwqulqMhsGCXuwAiAAZsazErRNITEpMCgocgSCj56lApk8VqIBQbowYY4RKoX/gVPiPLJDkQPVwCyJ1IkDvuntX5BNgAgUjAIdSIhZyRxNQNhSMRgYf3TNcZzsd7OmTirG5fd+u2PjVtZZFNOGXgGBAetAmlvi4WPZmkNgssBG14MgDAjT9NgOimEZuwDDzQQwlx62v/or69OavMGwY+ma0AkdhUPSyRAJBqQ9iO4xECCcnREDDAzRQQ3BUR3A0BkcgiHFxSbmXNCTsGNB1TCgy1KEtNiII+EKaAOfB1wR+8O98M+y5sL1iVGBg8aitI2d2bg2NYbRwoTIjIiK2VMSUNb4EGgKrZCYMYSZAtAs3ImghL2AY9SRwEAI5AyAE9kxqsgRqQEXJmF6+BRX12qFX3yIkJVIVxh4zeBBzUN3ZFT9hXg2gQTghdlmmtu747zedEHEhqsKiOHRNxi9BAAxQma61LGbQ0XYSQkTFIXbe2fJSC+kS+cnfTFlYDkXgDDz2JxgA3QxltVUdmDjdm3upniODhHr6FM4EIiAhEHLhVgcyEMyEjsoM9ccRItoGImWEgbInk18syQH+6DSIyiQcFQrhupqltNenhYB5giF8/ghjAg0jMLcCe0EeE7rHloW76BNj6r0dF67r2XQeBJNLW2VVFUrgRR5i2NiUCDV+LIecuy7N0fa6WN70JpjFntZZA0ClBuBkEYzMBHqhXX1vDpVqgW1UtCybFiGbt/vacAQ87rseHf/aP/qPTmdQbafzv/9f/p+sPnmtbVN2bMqWxvyQCj/Da1FeO8ObHx/J4f58Y+pTHYRfAfZeQsJbVYagwn07n+/uf/eC7v/3Z569vy/z2dCQKVJQusXAWGPcpHVu8NmJKfUYEAHEjJzyfi2oVgD6lqcxV2+WYU3LCGuCAogqBTaQDEqbKpkPGQx7u7+ecc3GUnIeOWRILVW3kYIu6gTsCAlDkRAwOzN48AlydmWsLkjBHr+ZowrGuZamxOlpEjWDG3QCAhmGEAmGCEQAdeVUKRyvazPudWFiXIQ10XKMTNKBZozmfqjJKl6Dr2WZDYgSSTAAWGaDWXWYSBzQR8i6tM6jn09nWBUsJ4dhdJBa+P80EjMXMAgnQ0QyaAzPVatvVgUSMFCCupsW7TCxR65o77rJYi6rqweOQ11ITyJjSokoAKJj3nWbp9kNAzMuMzXc9pWTEYbU0YU7QywCQhpxcw0JdS3K86bE/dOvSzmd7e76j1rrkxAmiFcWMz5fH9z133A0NMvhTnwJzPk3Fg4eRtDxeXjy/HPNppnVqm3UOBXVd3QmQANjdCTKEgDmaRMSyVEYv69pEX+0HI5ruCrd6PCkSwvXz11X+4Gfx8Md/+Of+/Id3QFfT+g/+/n/xN/6Hf/Ht+f3rh65NjdKQKLVW5/Xxr3/zl64TPDxWzutXzZSFu3FlB2/SDbfn6XA9PJ7K9YuPv/rJo2tghHQpaFtWIzInQDNHZCIwcETSMEbZyDEkROGmDQFcW3LfMbLbatalRM07Im26O6S5ughe7nrVYJbH+9f7/oqJCKhVL3MloLrM/+F/+B9Qwt/+T/6zshKDLbOxAgoIYce+ED9OJROiBSdU8sVM+p7EkLzLea4LNO6zoFliUQnm5K65ozFfumIH1CLaWpVImFqp+8OucQL3x1X7HOsyd5zMIsJMdXexIwDXAr6C5i71s8/OiGA9+zc++fCHP/8SZJzVlHjYj2Zm4f2ud0VbT5y6d8v5Oy8/mh10bobEXYZoS52bW9/tTufzSUvaj6Wqx3Gpj9cXz9bjvNY21/LRh5+s7fys36/rW6+2rOs1pdpara2XxD7uu8svHr6k7AN3rVXH7uXw/Ief/XGt04sPXjRvwDK1hwaO3p0fj8zZVt3lAxs+nk4dY5971/Ljn/+x7K8vh2ubH26/ejxPb5+/+LVamy7r/vmhPh4lpfvT8XB1ubqlIecwM5d+b75eXdyYT9MyD7unQsW8LOgGlK2VMBXqtw6to7sFuIEHCWFsRVr0MER8mtVvZxTuAOiwlW9/MUlziwDc6JVI24OSOaUuhFuDTFhba2pDJ6b2cKrPrw+tqSTJqbNWAhOIhKqBE1BirgYY2JqCRh4SYJxPWtaGSK0ZgFAgCQ8op4dj6oeL3UUoQICbAVAEmEUYMLEFIKZf+bVf/d53f+fX/uxvLSF1qTaX1A/mbiW6fuy6fH64A85Jun7IrdXltLZqVzeX62BD6uD2tr+Eh9vX92++vLwYNs7MF1/cv3tz15O0qpDHDz769uFwrW1uVsGhH4d5WvqcEWGZz4N06OGuaqatqJaLQy/kgrqcHudpkZQ0HJn6oTuVGiTHSQ8oXZ/Od0cLOFxfaVEJmtcFJa+zdiLPLm6aaVsVDI+3xax0acijvL97OHAn+6tx3Ld1ycNBa1Kvg6TDoR8yd+iq3nfdfF6IOSd+vD1pKa4xr2lSb+Kt6t370+GiO6Rc2K2ubx8m1+qJ/+j7P3lx2GmbeC2zKVHpFW4/+zln3u13q+hJdXp8NNMdgzoeb+/77gINvvzi/dqCVcece9ko3a2adkBM3Zho4MHIq0MmUVNBvLnoCeM0z0pYSl2XykQvX718fXtf6nq5H++Px6XY4Wq0DUcNanPpuowNVciyOMLj4/z0RnReB2VIu5I6hb0GRgJQCm5R6rpiVDMLoK66TmVafWLovJVMg6t22UcqSZHUuzzUVlybAYMlNZfoGFbUOUVLiF0YThv0mOrKU0mmxEAqDB1RP1QiSqRMCC7EXXigJeYOA9ByQACZwzqtprQup9E8szioOSKkZktD0qrmzl6cpGAjpgVaJhxHYmxJekRqocAC2lL4HoiI1NukIU5gQIiDEHPMuFZuSEHROrGrLi3BUyVCFmLMTCicuxC7+igPi94u7+191WokQsilkc940ygX6wcxJIFEOFpEr82wGVTkSEkkMlogW6B7JczCVglX0fd0trZIWCyJpoCbi35ty0d9kjiVVo/T28c6N5T7CU62c995y0pdwXCxQOGUu4SlNUUwoW439h24aSqBCB5mEunQQWZNEB7MkNOIkCQn5JQFxeuO9ZBprXVuDa04WsVUI5gzM5lCQhTkvh835FwEqEe37zh3DMgsQcjWCGuwMzbILXUQUUzd1DiAdWUtcLdCN3pZqoylruvxrtXZw7MsQlNiZwLVeZ7namVaqhMgo5oHRXONQCTnzVwZ7hGpy0gYgcjSmrktJg2JgjqPROnpd0GSvDq3tVprIHm8vOSrZz6OXFYchh5lXdbMXJAaOIcDJqLx4vqjqGFzMEZoCwxOhCgUTohuQTIiIbCgGWDJKSOAq5urCANnsapMQujq+ni6/6OfwCrPlyTRhJxJkmOEIRNA8VDzYIaAAEpNuVjXoFtqquZLqRZY1QwQLLS2pLvBvHKGNp6WzD4vjKmTSyGPhaBqBFh1Wvpn134/Haem3GVJoLqDtIQB9Wji6yQFP9QP5ccQH7/9cL9fHtuVU+ibtHyRc54ejGvs+/3uupvAMM6D60Uv53TsXqDF8uMvfrKuc6BYxNoqJKQgU69aAdnCW9hktRAU5hZYATZyNwNaECMSkTuaAyBZICgiggPiZg6lp/ESMalD9WAQVTQMYECNLELmqpwR9WRDgxSMyUwciKVP2rvljjoig0B0lAdsR1MOzk5sSWPvXDED+D3Ea8N9kzZhMwJyl4gOHQSZendQV0AB7N2OvewI2AEtnFkiOYeiNSUO6qxVx0BkdzWvgJUacjRCokgMA+g6wrxb5Xl8OtAFUALpg8gdwoLQEBMxR76W9dHtiKgAhOCMCUIY0IHDI4LANDzIIaV0g7gev4qLHHiZul8Y0AKJY2uS8AYnfuoTbX7WJ7oQkrvBBiF297DNJBexfQ6x9Y3MlDYNWiAGIDIGbgJ1hA0+bQibOtZ+cXwHgB4KoYxBTNsYDb4ev33NufYA9I2bTYQA9MQd5dIqowCBqqOQA7orERDwmPco9jjNOafEiBHgxpnfKkwthskO16N4tRYegYQcMGA6BzKyUKzbVsvB3TGDAwgwBYrQsUVjTohDBJiBoJoyiTAH4KqRu746SZ+jASImEYIope4ByRGJFBEgMKxDAmtIWKtddbt1KZnREEZAJG9NMxMDOWI1LeEjCbk7i0KUBpuPKJGwpFgLATNaAtnMZRrczACUPbw2Em6qYIAWQiwIao0AmBEiEKG1xik5AiADOiBRgOmGuE4eipvABRA5iMg9vmZ/EPg26UIkiq2w5h5hQEREbkGYwsNj+3KEeyBh+OYRNqInpuVWWaAnnAPTlkWGwQY22taV2/ksIBC7e8BWaBIP3bpQHgbuEVu2+a/HjwAAhK6BTyiu/0arqIJH143N3FqFtlK0uS5zXR2ktVk5gXcOAma7fsdwVZIh+Oo2JgAKCDJr4B4YvJHso2nVui6tFHeda13qvDar1mCpToklffDsuZjuoj673pHHbqTu2avzuf74xz/607tBcqdAEeiYvFifSVLqEne9mBq4tlJefHAznY459z/54Z/87Cc/Ipk///lPCTncj+fbAozkrbZZTVX3o+Re3OzhNKeUy1wfFkgMINjvei4LIuTcmUZrOq8a6m2tjKl5ONnhMovwuq4pZUestQmEuCCsgdaaAWUHHPsOfOq7ZA5raZxSU52W5haAxIiEjoTjLl/sc2bs+/7x9lwe1xCGwNoA0WYuwhANhGOe3BoLkoXtiJI7miOGMIaaBgSlFk0DWmALzxBMOC0OBLmTu/sWmBYlEKRMpuGGu4xmCk0zgzUHj0HyAkAI3a4DCG0NAiKkFDDlx7OXFYUyogI0CC+lDYypSydVc/enXRxWM/6asuVhmdBbS8AtqFUTcgAz8pxSqHFgZkyZiCEFdZKaOieEaNxTv+uqY3WAgNwfUoJ+sPX4fseYpFuaP9ytrZ6eXR6GMQkGkai5m1fT+Rylyv1Sg6APrOZXl50TXL34pEtKvgcMkooB0/kpKjodp3G3P82tNOovDt//yT1hPozp/nEJYWTa73YP6z0GMpMwO6CqtWoM0coa4SzSLLpOiPRSuBK+n2tpAQ5BRrswjavdof/45atPv7l8Cq+/93PPF5988+bPPb5cvz/Nu3S6W5LYdz651suPfu/z974ux/vTX/5z3/zVX/rowfRffe/Hf+Evfvu46uvPpouhN4XbN/MbbCPQuOvbqqaaWdayQhDKFlkHhHlzJgBwCNawxFTbgoS0yRTRm2+bW9PWwtzJAwKJStHzNLkA5m4p0KZVhK/6w9oCkS8uxvMypSy7Pkk0cHvWPf+tX/mVf/q7f8T9nsEuD7vjuiytaQsP3OcOgTJDra3C6kuR8TmhoeL9V28HSbMiCQVCrZWZOrK5Pd4MH87LCsYIAO7AbOoQ+ur6quiKqupgYM9ffHD7eO+lABIImQZTAgxAE6KUZFmmIaWlwbQ6D/m02NXh5aJR6vzZT3/6Yrc7zZObW9a6uqh1iT9+dtOmxaZVgE/LdLnbhbbc5YPt7m/fI+KzVzfrXIPq0s7MfT/euEGdpsvLw1xKaARZ+EpMWZKXisDDcIAwjfpwft+PmaGvLXJ/eHn57c+++kPTKaeUu/Hh/q0kEun3+8P+cDgdzyywgxEBd/3ufJyuLy7nZdrvDusMhjhZ7btOOvz25SfHu+Ob23uWjJwxZUZrrcznaW128/y5Lscwu324K3WJu7cRHaUEX+Pr3FxrIQGNIEIUCIimbTsW8QBg3KybAAG+XfRgtr1RbRwM9CcTg9P2rkXoAR6OQNsxl7kzYiBKkggoVYHRAiyChQwJJTkgC6YsgOHugJpkCARCKutaajOLcRijurExRjOVnM7LNO52mFInuZQ1ZXJwzsIJAXFaWxcdS6KILZIBEuYUrmEmSb7zK7/y2Y9/79Wnf8aAQ1JxlcQkEaw3z17WUwHmtc4IDB1IpD2kNErMxx997yffvD4UtG/9+q9MNbzWSx7uHk7nVR8Xh0u27G+++jl+b/mtP/3XUt/Xu/fhZs0o893x7fXNs/1hZwHCPSJYUyZxxqm0VnWDU1w9vzkei1aDwHmaJSWtVtY6JArE/XA4z1UC0ZqWtd9d7A7Xd3czAySBzElyaoZF8fnl9VTWU2svv/3i/vTQDGzSZ30nwQj87PqFW81jn8Ufb+8MAUjmWU+3UzVTL+OOzdr53WltPuyRkFR9XW2ZzyTUMUctiERkh/2AxC9fvLp7/fmuS1vQxyJXl5d3p7tWV86CkRAgEUnG8Yavnz/7/o8+YyIE2h3Gq4tdOZ5JRBK6e6JUSsmJEsWyzCxp6OSrd6c12uM6rXNJnMq0pC7VVjpJP3/z9nGapro+2/XPxtEHSIfs+918WlUjD/3F5fDZz96njidtifkXrSIiJKZpWgKBe25IoS1jHWNt6tqyBwLvIXFb7psJykXLjENvQdGVSEvOEW1dV121n0pE83Vx7titchw7KQwlE6pWVKiGdxO34t6oRTBY3nXU5QVbA2wAETpS3iEnDyqFAjOQo3rQYmHWZo8Zu6VFqDekqgpuGqk1iBpADghtXXKfJfFA5MIA3knuMwtGl/e1iTdLABDGYPM8NUjTOh+La3WkhEiOFRibOEmqAcHMwozesc0pqyO6jTknZKlRHFA6vrpIl0e9XREscybpZg+vUKaC2buDEmiQm7tZQ7Tm2yKOEEGCMBCw95S5D/YR8ezuCq25zuUEnKw/RBIdx6XzWwuv56mWe8MT7NYGGrnBAaDfDLsC6FaQJFA8HIUwS6TU1sWn2kn0HAYAJDz0MB44ZYigptoKkBet+9QzJAwc+gsvq1phaoSNInIiaJIJS2vYLBN0SM+ub7DvgQaGCHBDhn2PScACCDvJCJerPvYcVqvwSHk3N8spkaTh2dVzifP5vq6FQZeHN9r37qDYohdvJgmblrosQuC6mpcg4xzmbgEK4RYagcyIQIROyJQxsbAwQiACJ/JwBXX3UiJj2h14v/vFEXJtFixB3A8X3cU1dYMTSz+gG17cEJ2mVj1Mvda6cB6YryjdLDR1fVBzau7VJO81ADDMV0eQkHAJYJQ+SzIrFC5smDva72QYpVENJhBJrAo0aVai8P1BlM/hgEFAodaYjJMTpnmhChd353TXxscCx8KAORyIFDAohQDDNHfe0PZgC7F1Ha5lSWSLFcCsjCyp1XltBbCnBNrXYZdiKgm6aGCRilYEDrTm3KXBi10uI9+6x3x+0aeoPXF0+36A07LEkPmCIfjN/B57T5Qyp2ld2r6bXc/FpZ0VrVorrUUHxmARIAgcRt7M18CKODsUhLBgIQLAIDIQkIRIhmFBLLStiQwyISKZmrXIiQijWQQ4CQSJIBlGMGBGCugFR0xRtfOeG6UpaD1yc7zYVUOGHVxQ2eGaKreW+25BeBelsslauDFaF8RVhVkZdK4/Iecm6uJPZOMwYiUgADbHjYFFoIc0iIMt7pJWU8i8H4aOoJTabDVnIRZwDkWwYEQQ0BAZICg8ecXr2L2w4XK96WoHGUKiYQNICQCBYaM9EYF0KBcxrwC+1Y1COBwQIhAsFEDAkSMBkBfOARdO0/nkXefy9Ro54AkkjBDu6pv0GQkpthQWwj0CbXN9PM2AIhD5a5ARu7cn9PXXuc7WBjK3iKBNo/XktAozoycVFgFuJ1sIEYSMGO7b3C2QGBDV7Kl2FBEBFGAQuAUHAICo3jZxFgRtq3+38GZmXtRev398/nwkRFdvZkI0r7UgEAWLdEytKgrdTfNCXQMPkqUqcC5hEOSmLD2AE0Az3RBI6rU2DEiBKITSKrMUVSbRiABQr10a5rKMaVhNg8Q8LCLCMjGYZUoIbshTWzty8aAIIaAuodso8dDqSCjwlMhBoIUFigNaBIOJezAS8FKaZGlWiQLRRfi4Vkmy5SDm7uCwEaEZIgKFEJDBUwqPtvWDwj1MI8wcUCEAI3AT/popkTzFLYgA5OYBARi0+ewIIcI8mAKZwIGIzbedF8GT8gzM3N2JNjB2MHAE4BMxyQFQmM39FwvHjQ1q4YxkYYQUEM0NAJkZ6Rd2PN9qUJtlzwOQeWM4Am5OPQTEAN+WlRHxhGBnoW35+F+PiiIckIIAQwHOtdxN6+1S6rKoghBKrU1SEPddSq7REz27vGL1oU286S02jx9xYkJCsEWYw5QoSCgYltD3y8pIJILAz3a7F1fPfv3jb5HxSOt+hwr2j/7xH/zk7rYV+pff/d3vfOc7l7te8sFxzWzL/DidlvW0HN/ftjI/vL2NsPu7d7WtX7z7bDWngFYhwq6uD/3QZ07Pdi8dcpfT6fT2NE/r2zN5lDnWZgBhbQWMfuyY0cGxFmJiorWe18W0BYZocVUxZjMbdkzuACiJ3SkcmTisGdSgJNRy7gLFil+Puy4dG6iBSdebuXu4ozpKShCehFlgt+/6oeu63LHgHt5PrVazDYEeNNcgRnS08HNh9g2NHoeUBg7ZviIsaogoDaOgMZl0uBPMFX2VFlRbm2uIZDMEJ22m4LsBxqRXHQKAFaPgBQOat2SMYGERTAy5Zw9vwcfmSoB9EGC4itjFmIq31sAb1KVyklEoEHUbxIdRot04EACGJIl1tTJrADInD0cCADQFa+AALNGPmDOVCZaipSl0SXK+uhlSlzqWlMhaHHbdnkd9nLs0nKd1riUPe0eYl8Z4nhbLXT7sL0Fg0bK0xiHhgGaX+0PfpdqKug5pGHNDX3b9sDabymqSdvvDdhXkHH0H3Yv9m7slG1+Pu8/f3QKbmg5DKmvLTIhsDhbh2NQ1DQmJEglUTRlFuBvhfHx4dhjefnUM7ZB52HVmWqv+xq89f/v5uyF99bf+1p//2fn+n/yjH3yYb2orf+c//d214M/frCbxP/kPfuOf/Ks/+PW/9O3/39/7Xl1jlOHTT/cvXl78s3/+/e/82v7/+L/7K//gP/2dly9e+SMQxHFSJH724e50e24TO4AHVNNfoKw1ojkkwCQCpkxEyDUabZ1HRMAgJCJSxJqypwzhfSfe0CwCI4C8GWOMmTrh+8Iug8LsoA1stYaC7L6W5du//O3++QcvvvPxX/8zL/6/v/3b3756gaZrqUPPuoJwLlPJkhY1M1O3f+tXf/XTl9d/95///jgOx+PilFK/q8uMFk7QapMhJ/Kb/VXP/RGPJAddTRKBpBBqa1vVjtPxIl1KL4natJyO57nPsqwVJafUtaohYCK73Xj/8LrnfcKUQLHrmPn9V7ec81odXT/+pW/cv37vQONuqBXGw/XVxcUyP4CbEbfamCL3F2Vd0aGkqK0Mw9hasFKZTsjQJdnxbpnuS1nM4vnF859/+e5i3z8+PDz/8OrNu7fDbmxaa/P99TUCn0+TIAvuAmNedXf9qsT7o56HIaPDcbmjLoVBK3V32LnPFsTOF/sDkb754jVLnm3eXye86A9j6mi4++lr2fc5w24II5knJeLp9paI3KQa4mx5PDQRTB1SFTHE7jDs76Y2vrh59ezpKghXrZUcHWktZbABwREAAwkJEAzQtBIJxNPCOXybbWNsz1R3gkAiiEAMMyd22oqsT+bOYEZAQFP0p6OVVjX3EgDLXNZ5HiX3GA/HCmA5WSeMDuxNVYMlHLS2prDrIxF4bYzi6oAxlfPl5bXrFqFLN6TPPvs5pZz6HKbrch5SD2CYEIW/PjRBcIpQRR8vDh/gBz/+o9/95Du/ucyVM0nfB8R8/1jn9XB5UZrer9XqyoKcqcxTH0PW8Pfrz97fP/vw8vDs0s3mh/uf/uD++tVLlNrvDFP96vb2409e/fK3vv29P/mD3/w3f2tHz6bbu/PjreR8MV4t57Ib92Ywr9PF7kZEGpbT4xogh5ur0bxOKw4kzA3AnYQigRbT59f7XepevXp1/3C+vunX88P+2c003R764XiaDrvdy5fP3331ZZa0G3heIyXocEYs2dt3rl76hzd38/rTH75+/o3nzGbNM+fPX79b3r1byipEagaEZVkiqDVvfVtSF9PaC/WCY5fc+aqjRLrWenE4XO8yHKeUuWUQkjKt7x++jM6/9eKTL++/7IaEhMNIO89dkouL/cPDmVCGPq2tiiSr5aOXVyLDvrYafjydPri5qkvZX+4PGvNxIqGq1cIS8zpPp1IAfCW8O84fvXh2/+69Bg4553E8zasjunTXF2OgK9koORNOS7k6SOr683GZ1+X6ZkeJTnfHuiynr2eYSKQGIBIgW4e7YzxAJX2ICKQDaUNH46E5ryoaJLmTHrTNaYfS1eX8TsCs7d4dY6ksTcABVZWjy9CJZEVht4rL2tY1h9FajJgrWk4JEMiVuhRCKH4x5mfkuVSJ1AzdwKLNXksEcLaAYzEHDWJkPjuQiCgFdGjJ1xZRHGxIzMRjnySapA62aiBSotTznlmMqLU11JpVRL6b57UUoEG9hgMQmFbOGETAAQJHb2u4DN0IrgV1xWwsEaMD6WYRjrXj/hvP61eLnbXV1u2GgfI2dTbuzwZo2qfKBqhh2gxJEwdViiZIgh1SIlDuoDVEINVpWR8TOvce6JwbJWNbzfm18xLd2fKdX82QDVKfRmqMIO5MKA6EbETMuaOoYcWZqnRMuXPQdXI/YwoZ+n53cOoAEpIEOVABhJ6DVDIJWC0FvY2tsiRCWYBzRW8ICMR1TU0DbMiDd72M18RD0blPwV0qXbKcGaQTYWaMNcl+8ygzCij0bsisZjLw1SGP6356vA2Pqs02MAj2IBk05mVOVVGiejWbLJlRgJggleYUjoFqQQkZGRFC2IhEOAgtNCgBuUgAQwIFjsYRHIs9sYpU1dS57/ph14/X0u3BXdzdm9mmyMDduKvrubV23ffuRoiLLkOfByKZ5vCUuh4odympTqipWbiXQEDMnIRSn3FAJEGA3EU/iFBnEd14jgdzAyaAaKk2qg9UUuqBtRVQT8e2X0q6X7D5cFpFGwZJgEMYATF4hJKHgVGguw4I+3yB8I3P5x83OhMOl/0HMreBb86Lqq4TUfAFHEIr1UZxbruS97jXhhwe+WqtJwjM1FGCxRwdEzo8TIdh7/OnR7uvnZ6BZ1sBeWTIeV1ny0nYEq6azJZaZ4DaqWdVtVoMmNVBzRWcRFw9wJXYAQtAgCwawZIcw6M6IiUBRsDWFAOZADCauUCgYnVwNSEGwxbg7sDIyfshZm/iMlujHiw7JErYBpO0YDy2mwpIHSLa3LC2DqW11Xa87NvahRzyPJWl1TvW6usAlSATilCqnACqRnHRHIhgCRNGZGICU2iubbao0DmlYnXwIJSBWEg00N1mpRDOnGRMHCU0OIgDmKFaDUCR3rBNXnoiMbsoy0XrPuDnbWFOTB1DBoTKgWiVLAOQg1G4I3B37XMlOxKp+SmIPQAd3FEgEXAAAjIGYXCG1LVaT+eWCBP/66Rom+VsP10CgATcHJyQPGxDzG2q4XhCHG1sGvSv+yAbTxoQMDgc3A223+lAm7oKASEszAEIcCuPbD/anzA3gpsfMJo1AmEScwMkeMJrk4fhtnEKNDeC8G08FYBIBGxhzGQQTDz0mag6xpvj3fWLcRiTtug4S5INc0Xhe2+fXHR9mHmIsKCElhbGhB64Wb0IwBAws9YGJE1185k4oKsRQ9WQiDBjYY0wsNUqM7SonCgLUXN0ZwSPEGFyTErJPWGs4JQT2jpESwyEgOA9w9mcUGoEEDEHetTwxESM4ESE3tpeOAgQwxlyzlUVkQ357IEdndsqlAPRIKq1lPuAAE6AeG+tB2YRiBqA5gFIBggY5uYpISFYhHmoU6IAAkdmdnf3iHBC2r4YgLB1xJCYkQDC1QDIIZ6+SxARsUE8EZCY8Bc8UKRwYyQkcmuIoFqRZJsTbmY13MJCIEQw182F5xFmbRvC4eY9Q0bcQiUxM9o8wsy+Ubg8kAACEJ6A6xt1a+N7wX9rgFarUQpf51bnut4dT++n5R65t8AW0OXMgkSSUxJhXUkAA10ER8y0LhFmVpNQL6PpQigi2RyqttQJFm9tpmgMlhD7gGTxZ6+uP3x+eUFVnaBVbKkj/Iu//s3Hf/7dL2d9OL3/z//u3/lz3/4WCt9NX/YXnTarc+36IeUhpXz10bM/+e4frr4OO8jnXRK+fvH84fGMXtfjND8cr673Eb6WCWfHUAftO0H3Wr0sWh3cQRjPa2UEyUKi4d73zJlYiBFdMRCAQwSMYN+noUf1SoSA0bQyI4VpU2RtWPsEnLtgudpdvHx//tn7KXcpwDAMTTNGTmymRHC5T7ljYgFgNRbklLI3hebuLWeU8I4SRKOgxYADEUMockIGUwXzMEZFFeYEpWfqAhQJzHIg5+5+thyWxDoBYBUmxAB0SdETIsS5hpAMSdTc3BOLhQlAJ0+Ufotg96atExBs4x5rw1AozfsUPYFZmh6pVWwVcqJqjRE2NRITcjgzJcmlrtoUGInR3AGxWUDAXJ9yUAmcHuqaMBQigliKWYfEYDq1cdcThK9KQ6a+P7x6Jon1/ZeYKRHXtbSm0YEzTLW1aeFogHBxefDFHOrl1f50nsvkhPz+4fZb3/gmQUNJx9PS5a6/uC5G1Z42yZ1wR6HNxEHPev/VBB6uOqbki5LS6fGETIhBzADmvgZ7GuDNu3dX+YIVz5PxmVLGoRvu7+bL/WFaoyxt0Xpzkez08KqHr97c/u6Pfvyj2zPp8NmX7z98tj/N/rM3jwqZwb//3dfn9/x3/vZ/5QV3+3Q6zZ/ePPv+979atP/iLf7D//zHzJcUOUt+/+V5f3M53HQgJaKZEyNgYms1JVINIfRtPwtIAVtpEUI5giPCoMtpabOTBEAEWgQy1jJf7K/uT6cNy8cigNJnKnP95BvXdT3dHSckdFUROc8zI42p++Lzt//b/9W/93//T/5vy2/+VsvwwdXN7ftH6dMw8l7k85++RtrtOjlOC+ZdzuNyLP/wez/pfvjTy3H/cFeneb048Hla+6FjsK5na6Xrc2nlGg4P94+hkXbRCRJLcSXIQtTqcj3uAfj9eXrx7KK2JshNNSWCiNrW3IG6HOc6BPQ4Hrpd0dUdJFFAOMHFvo/7qet5maah70AVgGQ4XD3/xvH8LrPVaaZuf3Vz8e7165sXl60CtDBvkFCG7BzTPCdOEbEb8jQvx8ejSELg9w+PNy8uvCxR1+V+iQoa67C/bLW2sgJAl9LQ5WVqMuyu9wfzej6dbvZXWrXaGtaa+r4frba6LmtKnCQs3r97w1kda2sGE43SicfbN2/FcSdUTvMSaTouS61pyEUt5wEiVquvPny23s1qrS2FmqK3w6ETGQNpd8mAfv/u7VNgyrx4aK1KXQA1NUoStbq5uhKww5b2gPs2MsVwI6TtX+NJEErb8N59q8eGu29PKPcNyLcpRLc4fOMBsgdUswBIBOPIqU/z/TELL1V7pC4nR+QuL+saCJzIMFYtFN7vd7VUIM5dvzs8Q5EI01JZ5DSX++OSEyRekGA39B4Q27zcHTwA3ZqbuaRkAeawG58fduf797fD4Vkeuvl8BgYmacsyjp2DPRsv3375HpqsdZ0ei6/YU/dbf/FPt/X4cD798Aevry5Gm6f1uMzDRGEvrnbDfrj76vbDi+vXP38tjd599pP+5lXeDzfPPnn95eet4cXNzds3X718/gFwj8ylreRRo1U3DRSiD77x4fu7h7VWAEHK3/rWL59f/9xIx91Oj/b41ePru7sPPvog1vj4Wx9/9r68vX3/8OZMw2FdHzgNp+ND0y7cWqtzwG7ol1X/5Ic/urgeF9VdF4/Hh6rNzM/TF8fjeXMRXl0MnaTT43G3HxHk2I4oaX6ssvhuL5zQXaeHeZ6mD7/xihCX9bzOkxtOx/nm1XV/0d0udxnFkd/cvV1XPWQxbI+Pp3ktEXj/cNqKjXfvj7uLYb8bf/bZOxRm9rJq3nWu8NXb+7Hv33/+XlJ+eFwuDvthyHNZQjV1gkbPLq+Oj2/N/d3towAeBrFS353mD29uQuIHX3zxZ//Uty+7fF5n14jFxODZbnyc5rKoRy5rnVplZkldXb9+FvRdGA/DoBJkNUXrveWe1llPupgmb60WqOwYPOJgmCJxgdWxGp5amwNxndc6Ia+9OGDTzByglHsZEwRgSJnmWoAxU2ALdQhFUJG+T6njLjEIN6AkMkrOYYi0Fq8rNHMgXo1WsNJMtWEjDDXOfd97ws6Z3dDcvTRXBFkNQCh3w27XDwhoZACFgNEOY3/Zp/tazGypUWojLWFQDT0Qmrm6qiNhbWsG7naCoM1dOlT2oFXVJJIgJUygWh2whQxcCF2Rd11/s1uXRxQlLyNzZUDnqp6Dq/m6njMm0cyByDlZB1YDKgcPzkRQQS0Q07AEqJALIjSJNUcI6ijceyyTVtg9YppjnDwRpITMwRbM3DEnCEDDQbIIGwaGBFHBMIC8T8SCeQDJmFxTAt4hJndOqWP25k9xuVmstXWEa1ldGSgvtTTkSn0VcghtgOxAQsxAXVBfOeGQ+t3VSG4Q0KcYejIQDUFO6aCZioVaEGGHCKqrGaZRMmIrPOf9QFYLtYW6hJQqZaOuVQ0rycp8enQrEIecOFTRGkaMbmJqRcmBhNkdYvth4wTgUQlWGfqcARwAU0IoU9OUPadfoEw5ZemY8kh5YGZd5rFD0tK8lbUSwu7qqmHsO8w1Jw53U7V9Gs8ObS5RFQ16SSwSGFsDwi1MlXruehBGBBNicJCUuO9dmNzJ3D2qy4ZwVuHI2ZDn1P/43phHh+FxggbJjYgSuroHg1IYojOiuUaEJEYkCoywlFJytromyAJdD5OQVyvMzkKpWefysGocLjx1ZrMvVaCTlAPKsL8EX4mSxLhhHyu1mtwMrlM39HmaJzllit1S5ocOS4fD2HsNSjDsd9Hart+DTbUu/XCY1yMhACEDpc1FDMSODMgCaqEYwN6eEo3WIxu0qNDUA0Vy30zMXXbCALNOTAzoFoENS3UBUcbQQDOzljMym6WaB/ZAsGI9t+zYswUvyvu4ON9w92VKc7DDkLtWw0EWr8eqj+v6ABYJq4oVg0QZseOeeXCQDRoDvkaouyMmMjSgAHfgQHZEBySMhASMRMJAreqEzlBylw/sRZutURESY8+mLUIVwBwMQsPRjJSx7wZwHzGujJ/b8/lcJR2UrXFLiGGVXKCaYaU+Uw4IRZKQjmEnpZG7M1k4B4ULwkW0RoQGDQjNAz2Bpj10bZni7uiRf9EqAiC3TXkGsbmsECLCwgKC4Klh9PWIJ2JDUD/1RfCJOR2wcYuQCBBhi5Q2xBHYZgVBRPPNksZuCugBEB5E7G5qlZEZ09fNpSdBVQCA66ajjychGsGTlxqQSbXBNnVCJOFzLcCREgInQjwez88vd1Y1j50QkDBKYre91wsCMSgaV7tBK2VIHZGSo9JTL0bQ7OlYoDZjcDDMKVePRFTNRAAFDWhtBhhBIEzosEvjSVdi9MU7lkQeFk6ESMBktTEhRBSIIffhKrlzbW7e7XoqGsQcsJgehDKGhlMAAwtSUU/AY9fNZc05r2EgEbVGJK3BjsWik26T8xKRIHk44PaGmh6N/tJ/57//r/75/ycxl1oCtl+0hIGMDF/r5MJjQ/mYG0KAeQBsX/GIsA0aDYHIRGSmWxDjG6HBjJkBwLwRcUBsLOpw3zhECOCxxVEQ26MHKcKfwsYIZjZV2F5jwzY4OiAjUphtCRHYEwoJARHZAwLsydr3dQYZDgFhEdv4Ua0RMhEBBBOZG8B/MypCtlrO4O18/1Vd383rqTlgKGCXaSBM7jHurobxEEZ9P4Y2rpOfi2B1rJJ6Ygi36lWQwYEoBwJTc7QWGgB11VAwCkfbcX89XuauX1uJECFZp0rkzy7HX/vOq3d/8IVj++L9O/Z2c3Xorg+RLrvdOD6D1GVO3f27h7uH6d7x3d1dL+ZmFPLTr97VtY4CxDEcDksxYHSR0BrqYXZ5kIdVl2rVQUOQMDYDPJAqpB5zRzJ0uSMMT0JmNIlTEHh0lIxjNh0B16bIQETEbE9NLQaHYk2sSRrNIDEMXTaSGl6aarUgBDJOkBNIF5KQAINZ+oQRLLhlOeixYeBaKUAQaiWoNu+FJIfkaObMosEET+nP2FlCQHVGQIQeveky7LottwFQFGzaBKPDQEZgahqGffF0Lk2AgdmjQUSXCBFVww08QBWBsM+B4UCRBBCo1yCIUmEDq4F05BgkfeJ5mZIjAKlTWHMPQyMm8KBt5+lmTuZImNydiCVzhDfFubREROhBsO8TYbQWbnT/dtZifc5dmoWXFdqnzy4pyvHtFz7pbkzXh4FEry8ua1AARzMCTIZ5t3t4qIT27PJwnM6zYn/zsjuM2cuHry6/XB8e70/7D34FtD3eTdtVsNt3SWh6WBJJODZlSD2UxojmoQbJkSQiETKb2uHiwqxdere/uM6MH3/w4e/+/k952E2n+StU83QqVtyH6/Sbv/HJ40+Py/v26qL9T//n/+b/6z/+u/+Lf//fT//ge68f55cXWBVvLsYx1v/l/+zf/dv/z7+fp6wU4/OcdxmEzlr+7f/xb/7+/+Nf2QN9+6PL0qYv39x1u+7584wIx/vTB/vur/61X/uP//bvXY9XWhpYkADh9uM8tuxazSgcRcxNiCCcwMFNgNGdiC1cAAQdAoo1EhAEa7FRjZrFcSl/8zd+ff3u7/3gT15fHm5Eo4mARQAp4eU3vvHsxbOXJ/+X/9nv/M5nx1/9S3/qq3I3I/fUXw69Xy9FAxPllD+/PZ9Vv/nqGy7dm9uHanG4Hiwerq/o8/vzYeghLDRylsRxM1xwy6fTevPsxbo8IjiEpCTHaRqkP5bp5vrZu9uTiNzfnYeRnUBVLy7392vDzNJ3BHyR+javQl7Sau6ittQFhFmooUeKaTliTuxRS+NGpV9/4xvXpx/86Orm+ZtjAHQvP3h+d7zHoBRMBPd3t5dXz6zBtJ7H/W6Zpi715gqgQzcG4KkuY9qVWsr04F7n8+LElxejCNW2YskQJsLeCxN3gywWAFpLY9Tzcm7LckE3HfF0f5SOPvro0kyHw7BOBc6OEMO+k9YQ0rI63t4LAkF88PLiq/fLl++Wj1512OzcVlUkzteHPH3+2TDk2ZqZ7vtDSLYauXt++/DO1fdXA7bpfPe4XQXH01TaijJkQYjIOa3ThGABGLgpOSA2Fh+Qq23MC0YiDAcgJAuPp4cRPZ2bADggMUEEIweY+1bnsfCNtk4O8HBu62wd6a7ru57N14i21ui6vrXWZXZoobC1qHPqay3mpOZIUKolYccYcg8ARJy7rpQKBKnrAanrU6uNAe+Px49fPrM2b/Jg2OSiSBbhzohqKX/r1375j7/7+xbl5fCNOp8Q/f7LN0O/f3v3bujSbk8dW5ivVL7x7U/ffvX2dHq4eNZ/69uvhuGXuz/46ec//aobL292OxGQTkpty/HEsz2+fzDBP/Wdb5Hb8c0br+Xikw+uri5vb4/v37y/6C9Ml74fU6LlbhlydxjTNJ+WeR1SAtcs9umry6W4Vpje3kZl9DZeHWyw08N5fL476fTpt5/9/u//jhKkrvvmdz7+4ov7MhfMQBTHZanruc/ZA8vZtmOQ93ePKDi3WLQQwOn+gRJKh66g6muLWpZ+SBmVczys9x9/8Ov3MJGfn+13P3732fXFVXit4O/P6yF3HbTVtAJdXO2Op7P7mQXcbTf2Za0UeQV6+eLF52/eZEQEV3cAakbB6XFeXz8+hGPnAWxuOp/rwKxLW8PGlNay5A7WtppG0zUz7Ib9aSmPD1/92nc++t6PfzYCFI2R89lcaRwPV8v87rf+1LfrUk9LSR2PhxFD3r4/2ewBpM16iSHJeVmGbmABdn16Meoyt74BNy99gp5jSKHtzJCx9eCpud8bFmdoxkEpp2aNKXqGWtel1ccAX7NN2T1HLeTmucPcjRcj7/qs7FOdTlNt0NCXxiI5xpT3+14gmYmbGLh5z10K0upncyimSwt1DfBoJXT2WIOoamcgkMFC1aFjpEpWwipGJDF1ZsL9VdrtLMc8yi4gr+YolIcBOImv5FkAAQAASURBVLouRm+dH41QEdwpXCEsED0QiAgoDAR7Wk2wDLsGW9ba5ZBwB3KlMAQGDA0jSzFbYZijJdXhwyudFjvpfF5IfLgcFbAdy4pnvByYuEINNHACN4mz4EKwCjF5YLAHCLoikRBRz4kYXKjumYRaEqg15sAil48hQD1AIkA0CMrcDUsAEQNidhAgAkeAlDoGD3AUAPfamRNR12PiIMicwIKADRGtSN8DYjXNhEaVmMpaG0NKuZZU67y2oxpCbKe+1O/HtnqJdLi8wKvDyqwDzxDhFllAuoTGDAIDSDZxyCERoIEGxEkQHCUIcMhD18WQW2no5lkoCLdba0RZJrIE46gMGyrVrKWU3JuaJzds0WOwq1hhcgQlWxGa5GAa1A2hYR4NEhEeopvbrvrIKF9HRV1/GJG74A4RcoSX4/F4u7RC1HVp55IxpWG/w3mGUiXhsE+AhAgzHn1dIVaIhu5AWNe5VrVg5gQKZIa2SABhjmAMRvMAxZwod8Com9qbELM08OLolU0lHIk4DD2Aw8mKuxNi4IbuIAQCgMT05P/iIKQElNPonsAw8eEg5pQWPc+xqgj3ZuXhWuJxusz0IYnBMNe1PrSW+77n7HN1bRiKtIsIxMIM1PHZVzHF2RL6y8t+JXlY7m8676FqXau7pTHRoj6BKYSDU0JCxqKBxFkizB2QOHMoEiIESoqIjMAsLSJAKUKGfimsLgikLMLJrQjYoTuoOXkd0AHYISwEBJUTAdY2R4aEKXFam/cJkFpzSY6BlJJjShHr0tEd9nqkPJOCV9KQyV7Q7ViKQJnXRDwkxADSdcGzywhsQI6kBDmCGHshFsWIxA4BJUwVWZEQCcIQjQAEJYIUeFZPCLlZaO04a2tM2NbKrh1xi9heYbGtOXIidMtmHmGD6ou4IFX3aIFBWFAFKUV179zMzNPXLx5uBQEQ97Q2KQG0d0LZmDowBLhjgDQXh3CwDhqNLBEhc1Us21WwoRYBObY/BAYGIXkoAm22cofYDGVbUrCFShABALRpyHFLdra/IRJHhJkB+pYZPSlBABEkPJA2ChFu8Y67eziibKAPcxUiAGJC863KhADhHk+VlE0yC+Th4A6wyd3B1ALDPZa5Wou11at91yc6n5fEaVmWLqVmkwqEtZtD8mqrNcO0mkFENpXIUKogS0IUyJjgpEm61bUDEt6MZ8FmDJEAIBgF12I9E5onjEQYQWtbiemhKTGHmSPqVkUFNghJnNRHgOaGlCDITA1pNr1wjLAOXLfqDWBCUnACDNXE0oADfG1qQNVgdtvR5nSAIeXzsrKZBAnnglbBg7q+21lUd/NwAkRfMAryLiUJ27pjTgZWK0unqjBQEJq5uXsAb9nfZgUHctg+yu0fwiwCKdwZkwg9Ec0jABzxCdaCT86yjXIFgCTEQYhAW3CPSPjEiMDtc9wIVkhbkpjN1NWAjIgBaLOuPQVaYRFOkjA2iTF4WLghIhGTY2z1p+3xDWyuiMEshPjfNqCBLdpqKae53JVyMiDKB6Yx5wPTziy4SzmPDpk4MeRqBdQSZbd1HA4C1CJgEyQDM1OzqmqEVEpFTpRTzjmpYvNd4m99+FKEy7oyxlzXjJxR0NVWvenGq8xfPpzlJlsnRn7z8sacAzmPey2TtrXr5HQsr9+8W6r2F9k9WGLsiCG3piJSouNOVjVJMg5Dm+5LweYGEClL10WdHAEbeBL2CGQYRxp3sj8M49DP89ksrLlIEqJaFQgfz8V7uByQCIUBMdydcYv1gFAYASIIJXe7nPuwCu5MmHnz64GD7nYiDJS6ZohuWci1cU5qGgBB0hxDtcvJIQBczYBg6EDIUUASVycHbuESkRCBnpaGQ+alKUSoAhOnTjB8TJI5zFQGEgZAVENHoszNeKkGCD2y2sIIDkhAAA5b/4g4EIhJtoocQxRout3BsXq41v1F587LydUDkXJKjFRbhCEStVA1SIjokISAPDHrdl+jgO3yaYoYAUQBzNTMKBDcAvDh3QzcCRJ4LK0s62l/2efd1Smh3Fw/vxjW92/qOjVpgvzm9v7ycMFiLNHCa1trKf3+IiWilLrcPTw4UV4KfnS1O759+P4fff+XvvNbXdqfljeJny6Gu8f51YsbIIGA0nyfup7l9jR3kj2cCM2iP+T3p+OYB2FER0R5PJXwgOR3P/v5/mo8TSt6TOe2P/R5SHE6D0LLu/c+dZ8/UlvTv/gnPyPb//1/8E/r0r/65IAZ7x8e/7v/1qfvv3z99/6Lf3D9jYu7n5w14vJatJiXFVL7o9/+/qCaGO/enNa6/PiL+w8/+UjyeHxYqR8++1k5Pry+GPZaDQNJaDtqSCnVugK4G/DTlNkJcJu/ErGaPdH3mUMtwsItgEtrQCgitVQ0YIaqUUz/5Q++e7g8MN6iY60tdSnMKfGptl//q3/53fnL29t72j0bv/On/41/52/+wX/0f/3wO5+W1f7kZ19CmDJ9fLnbR+3H6Md+ejh98PGLN4/Tw1GPU/mb/9avXV5d/lc//G1di4dLwsvrQ0B9eJgyuxEAIacUUeZ2PvR7SJQYIx/uT0tVG4au1WDAvsvTXKZmEFFWPeH8y5988Cdv3xMjd3Kc50M/Si8P1b71p/785z/+w3WeE+knnzx/eLBdP0ZD4l51veIzt3b7+h0ELXV5e//uxcsPBPjhfrm5PPDl9TjsT6djrbXfH6TvWms217YWJH483b94/omjzOejEHIC23B4lIE5JX55+ezd49vm1iuT03ScltW6od9uEYxAvQz9MJ0bcSYhBJqmNdlcS5vm6cXNbp6X0mwYhrnEWudPvvHh6by8+erUHy6LemyGmpaB8zy1LprWVgNyJ8dpkdVvLoZ1nh7fvMPm81L3L14FxdWL9PQo8Nb0fBgvmjszqBU1VVOh5GaBrKaE5K5uX9tCEQHBrQWih0GAPy23ydwJ0dy3VygI37KmbWvtgUAYCJJk1bqstQXeXF2dH+6zpuO8dpKJhImKegAuRbc+tiRuqrmT5t5ULQwgKKe6LJt3NsK1No9gJttadRuxMfTtw+sXlwciAnFAarUAMAs1LQCck5iZoPzyd371B3/y3Yc3nkUqVuR4mE73x5au+rdfve/TqFBHGoHq9YsdMgHRl5/dffpLw1/4rV/99Fsf//F3v29lfvjqTX853tzkM+GphSHudhfTpA9LuXp1s6727v2DdOnlh5988cWXhHKeHi9IPMmwG93aeVmeP7sG6mNdd9KnAf7qX/kr/+y//KfRp7WGDbsPP/rwJ59/9vr1u//R/+C/9w//+R9+9dX7D68P3/zWr+Uxf/+Pf2RJfv03fuXd/en+8VhU57kc9tlca4skQgTjeHk+nVzd3WsDBkSkxAIM3tqL/TDkYW7rWs61wJB2f+Ev/Jnf+6Of7fYXh5vOxW8uL8e+P/TDuOpP3t3N3fAXfvWTL778PIGH6pYs3lxfzktZ18ZIibwu5Yu7u4elDB7jIBTx9uF+N1yFRYRtZZmLQ09V+116/urq4e19v9/3++7du+PQC4PNy/rs5vLNl0cAns+rOw8Xh9v79x++uAJP7+6Pd8eTM6+tfPHuqzGD6eotri4uXt/f3VCezydJ3X2pc1sPF7uU0ph7NpEeHu4fnw6AATJht0+NyToJQgdfDdUBDADJ27S2Uk1WMKjLgQMVhkzldF/5XEynGmPXk9MGV1dsIuzhQHE5EHuZz1MsBivVc6yIvNvnru+TOHHH1HlhtdPD2YkHAJAAkuNaU2sdopoaQLU2ua0BiJFcW9FmZoAkstSz55wxOGVkImEwYwzoKQTyMDCnaNQxKYMDOPOxrVVrzzRPCzoTDeGhMEO/A+9LLcGg8zkbW2AxLZ6AnXPSQkKGnKNFuCGFoqVANoPWlMCTRCa9Enm5X9eHZKFz8QBUpU6Ycytn2mdLpgmFu2ZopkRE7JmzJ7KIZigWFmjSGyL5Tokj5TMgu3HDsugZdW0BiTuXEYTBw8zBFJuimNXMmYmEsiNaOBC7G4MBRjBGHi13HuAYANE8GCyRQAQYAYgQofMKBixFWz/0oGZReib3BmVlpAAfUtdaAc6QY79/4cMOc5ZETo7ChNQQtFVHYu7AqLamRObg2qBqNfCmQSl3XKZlGLJECwOjnBIVJAjPBOBBbl0eiqEBShICc/cIU2RALO4CRChJyKMxh2QGK+QVrLo4QSFXDg3MiNJqc8hhCaML/bpVJD1iIhL1aFrbWufH96eHrwLs6nCZOluiYH8wFPKU0zAeLkN4tRqtGqtSj6FGEubewkw23XjKXRKKuppXYEAsLF1KObR1afSAYu6SI4lZOAZ1HdYWVTEYELMIOEQ4AIXHRjoDQnBgAkakpyZ1AFInKQvmLgHwWkqFWM2435mda3GmgWBF15Fbz77j/Ve3/ecPX9XrWZ6d9bnhNdT7XZ3iZb+XVdDbUh47GAkwmqFRx520PPT7e72P+czsnw59w9sd7Y6wOwFGxE7a9ZjLBCVors2RzMmCI1IiDgxwjOCAjYDLBDIkrqbg4hD9jjVWiz6nXef9upSex8R7cG+tBWCAZjRyIzRHJcIG3jqFMExD89bZgLP3khEi8B0GkKpU2kkGFwV1qesL9gvHgudarFXJyS9o7asuOjh1gNmIArPEQS5rw0LopDW8eTAFQIDWrVjDgeEtHJiyoxFBbPcftQj3QAQiQOHEuN2AINoZhJ+gzizA0NwQoOeM6v7/Z+/Peq7JsjRBaE17m9kZ3vGbfIj0iPCIzKxIIjNroNSQVNFSX4Aa9UVJCARCJSQumjsuuIHfwQ3itrkuQQNqGrqRyCq6q6trysyISI85wsPdv/GdzmBme+81cGHn/SIS8RN86/PXP3/9HDt2zGzvvdaznvU81jCQnKJYNxMjNzcRpoTEyISMEV5bnWTdGTT1lpwwnNwYg7ljW2UHMAxghBS45BxOicDZ3AiJLYECalsBIfKhTSeoyGPRE1/40kEQ4e7xyAt6L1mNDr6YWi0B9kLqWYTkIZAQgXDxHlygIyQMWARiFozJHMDcFsxnoWabOxHBoiy26NSgMzMs0toA5uoRi1Z0oC9iSR7gJ/skjJP4MSIEcag2V+i71bAq9dCsqRYjIs50vl3dHveMjIhdok2CxGwIhikAklDPZLogtR5hFrREcEIBGM1jQDJ01LZC4KCjNQLXiGbWc4LT4hpqashF1SQ1w5xSq1OX0a2BmWCsmA+lZWYHdPXFYruGc5ZgzwljrEhpiS0BHMETsZurmjsyoVlEhIITsKubgVC0WhOzQ4OA5o26TOxzICIkO8n2rAF+9rO/YlmphRsysYctjVrmHggOEGFm6uHqDZDdLQiRKFwDYwEDfTG6X9x+GZHo5HlnjghAJ/EDAnJwgsVK2E/0NIBA8AgIXaAkNUXCsBNsFIgnX7MAWPrXFp9hAF/Ur+GkreDhi0efL64Cro+MNgyI5c/CgXKPRWKCCCNAVd9jnr+Fisq0q/M0jjtEN+OC/dC96Punm/7Clm5YRqGEEdoUrYK3Oh+zTWFaNYBJRChAARCBRZophrGkFGHHg1b1pTGu6zRke36NaJnEvVkZZwcD6iUB6Ibpxba/2U33+4dfQpx/6xv3b2+A+pxX836adveCbj69+eorgWrevnpoofXJ1faqv3AYz862fV4lGY7TfXY+Hu93x+N4fFCrwPhwrB5sj2xqN3cEYlhv+OJ6NXTCJKWqB4twqbVZtVZr9YTAQWaLaY8AOoATkqomYdXWcUSADJL6Vcfr7fk5v9lNxWr40mEZRMwJOSl6oiRMbkEkC+TU1NRial6BOcBmD/SUpZ52zKXBFiYLAyIAYUyMSRDBm1sIW3hiAE+IbIabhE7BoI5OCQ2pMSCBMbXqCAbsnrh4eIR59EkQQr0FORIEAJMkWSgj1MwdgDM7sRtocUwsCdFqEtQepmMlF2Lu0hBsda5qEQ6JOfFSo3IHT8Ls4IgBzhJdz9as1kUU3gMCGTmzooNjcRCKuVUhv7zsDdp4vA8hjQ1qWNen5x9yOz6MDyliM2yPFVaEeZVB1R2G9dAi5mLYpJlcb/qLs9U3n53p7d1Pvnq4ua3PL6c/fPLs/qvP5NETczOs5+r7MmHkPtE4jaknFt6Pcz9kSfKwf/jbf/onb97eGyBa+Kyy7Y5lZnRKqc0+nHc5EGcoZs6JGf7u95/s3t1d8urd0/67v3/x7tXuJ7fzi2++eJjKcaB3r2c9zsD0+S/e3b9+eIiORGcmIhxLqzu934//7b//7R/9i99s03ZEfVMMgs5eXI8Orcz7Ztfb1XFnetcIJWeYxgpI4R6Os7ZY1iyIQEQmtUhMjuAWXU5zq0xk4QJLg3W4hZmLSHUHj5RFmyFSyim17vPXN8/OKCjfjiMwNdXmTRSB5O/+2R8+/b3+gyfbl7dTfLCJ1bV6un/7YBrHhjUgnHdV7u8eOsHNhl69fVUaMjF1aV/mv/zpu3/3z14QMQCmLhewfSlaysV6VYrnvis6Q4BwzggdCwmYlg7zxdlZKzcc6mCSZArvhk6bizD3fJyO+93bnB2RUzeYQykG3vr15SdPP6R6O+/3737562cfvLjBh3GajMAphoE/+9mPEKDvukP11HfUD9PDKFEurs+auSO1sKKNid7d3rx48Xy/v6+tEUpV71dPnl995+3+84WEPc2tmQPFYZxXA/W5u9m/hT6Ns/EI4BMhRGtGcSjHs+1autQmcxvysKYUZxcXh3nU6I53x67vNlfbWUsgMfrZ+ap5Gw/6y5e7zdXq40+vx1oU5M3bdxnh4vrZNGlVff3mdeKoTTFLB/2Xr242fPn8xdMvv3j7wUdP5W63O97rFB88Pzslyamv46CznT05L60+SRd7oCSdazAkU1t8NogZKAIBEM3c3QCQAE5VNQ8AcFx2S6aFPQuLP0MgnZwcwBepbI8AAYZgt3CD9WaNIloLIQkgAbk5QddaTTkDQKtjkqQLsqyGzoGuTQG4zPPQ9+rQTK0pd+v1ZoOB81jDbLXebrsNWgsWV3fEpSANgLHU21gwyNDy5uwPv/8n/+w/+79/9Hu/f/Zi8+b1uzKrOBHy5dXFw+tbUkKP/f399bPn3EktMe/Su6/uN9f29MV1aZ+WCr/3e3/wT/+LP5ezF5Ux1vHsm5/+0d/5/uuff6F9rtyXclfGu+k4ffLtb59tzjZXa7v13f3hglLf9W9u7wJtk1df3n55dna2vX72wx/+xX/6T//86mz18fMPvnhz+OXPvhh6vDg7rzV2x+Of/snv3zx7cvtwVB3t5QyJ7g77/mx1mA+ASqYUEZyQBNWm0jLReKxFndmY4eH+cLYahtXKoiJQTv3V9vyLl1+lobv+4Om71293h/lzuLk8P0vD+vPbVxfn55cXT968vSGJ7fZsvevUy8+//EpYZJ06SWdddz8fFXF3bJ2gWjO18+36q92DGMxzYcLV0J1vzrOk2nSzvby5u4uIvpNxbmVqr292pfmxzF21Y2k90n6cS9FXYN163Xfp7e3R3UYNgEZgItUZDsd5vV0NAx7KWJRWnc+zP6hyB4c2H7Uex4kRrp5s7ovf39z98e999K1vPesGf/NKxvG0GWz67Bql1pR56MUNDz63ClLRa2uljiPXwKNPOu9ScrKRGpKVVubWzCMXwA5QdTJKjb0R0mZzfrXtUX3c86G8flWmnWkhEBKEjGZjCfOivj9OIoHc80rIJRqWaSrNmvls6qYeFEGtLVXuWEwAAcjcAqdMGLXQkIycMlsmR8qdgPBEISgAlFjDY5vWiDaEgs6lHJq6COTM4ywuHXWXBUXNDZO2iom1oYZEXB0mDMKs0FCYkEUIdciCGArogaJAjZAJwzyDAdL5BQ2l3FavgQ5tbnlgIVlvV1JBAatrEUdAFjZg5M5y34Bb6BzOauQRjigdYYeUQ7YKZB48q+noMZM4UYdo4WVRgEAAbzMgC0RKAR4tg3IycDIFt8Uqpizia8IIaOCJGQK9ttKcCJkHBdDWUC0UrChichdAH1gOD28iLKeuaIAkEqlq3jAaAg/crwygzg0TiINZ5cwIjiycBmuBSDY2jAZaKMAdAYg8dJ77VYrmTrYU1gRzgHMgG4VHq202L12uOQMimzNhR5IAPUywEWYAR0bi3hI6IXZnRIgRzjBqSZDZtGljIs1mCo5YnWQ+8Sm69ZoIszA0u7l5c/v2zbi7yYLrPkGdSTAHTfc3tTnLSjZb1q1h4pAePPqVXr0Q1Sizz6PNBRnX5xvJfSBGm4TYw8HDwDBKm6cUiNQ8p8mjDZv+OU/ziGBNteMslNQW5RcEQFNnQgQQIKalahoEweAEkZKs+jzkjhBqq3fT7dSaNXfzgJq7PqpgGIIAYK4zm/Z4Nj7I25t92Ugkztv8+f3BU+Xren6ZdnNcHXU9AdmWA5EGNIfqhKjYThydfF1oVDD3YTeahdNqO2Oqvr6dp7WsSq3YIZiROwcFkrlDRKKUKTUjNwvO1R0VCXhSKqE1pdqtWrqEucvKqwzhERpmqUvX6EKqK3QOIjs0PWAnMtRbvRkwrRtuNZ/fdwLdQ+4bY7/pj+mh8X2WLL52S4SHYHcqltkThicAKgnnqMDkFTMyKYgDIImn7OuBul3oHDMxMYjpgdgYwVE5SFBOUoaYCUSjJSEAC20eaJhaq4zAPgESGwGkHhJoRXBCcw81bOHC0oCICZnRUIA3IWvLEBgUfcrAwELRGkMAklEUcNBGY8mW2BAwYTBWStHjbDl1DQAw+wK54ELeE2q6SLqESKh2soow9ZNiVwAicAC4KyECLI5UC0hkhBEQBAKL6VSccnQ/ZfUIQBAKCOEBGBiw4JsBQYgeDkAOjgi4EDqAENkjCGkRSQo/6ROfQKXFyw3AWiViDCDAcEckAIKT3swCOgACIIhaiRPzxSGiNaiRRCIlyznPql3Oh6k8OR8Sc4TPpsp+3vc5ydQckxzK3CVsXi0y5NRm7QCqBTq4ebSGEEY0mkkEkbSqkmRpyBARRDf3zESEgWxoxS3nPAYawlqkx5XqkTNBQ4iopkZYLDhJ0+aCDmHMozouISRyIDp48yAiR2/h4ZZTCrfGIMx1VgxH4bm1dc7TXBCc1FLumNOxVsnrZovcEmmbEkpzTJL245GIfZEwJwYIIiJkD0IUhAjH0EAQd0tpIaeCuyMuoCEQLdVJROCFl+buxLxIeZ5QQ0AzCwIAOsk2ACEKQXgs9aBT3xkCQiAs7wsAYDclIlcDRGa2psyMAO5+0jePRXyImQgizAIXtWqgMCfiRYFhidsxlj9Lx5myJAgHcHgPOP22AQ0ciRHTMAgiJT7v+yeDbDiIEZgAkQJCtbrO5XAfNnUCPafpsAsBdVsa61oLFJq9RYQkUjc17Vcd7LDOGkX7Tl5cX2zXOYFPZZoO+6bKCHNrsF6vNsNq3T1/uv383cN+qje3Dw9PLpOkfjDpYzrOQtVcd+Phbnd3PB76VfcwNXemdNF11z/5+W9ud7/WqiAE1tyVI2SRLSPAcCASAaZIKSACE6QEnOnivMtCBNiKtgA3n2oBCwGUlBPG0q8TtYEyEYkQIQRyFgYwFgGvCJG7lHKfUBjRWwujnBggpqYA2ElapdQWqSoARhJCdyekoe+CYEmv1NGqs2AZ21IUBDBmDAwKSF0KBA6QcDEXjp4lkASJW9vwamxu4VZLklgNXY0mSQC5hCEZmAEZMyJBFhgArVkwMvpC7kEmMxNiZFmkDgICSRjDa/TMDDCjz2Y5oQhkQQ+H4NqIOZdpAmRhwMUYCQK8MUYsUKw7IeaEq9UQ0URQBYWpOUYQhDMAupsaAiYmQqcEXaZMENWs6MObd5cX0a9WrRkhKXUV8pr8YW7rriPKx30lxE543k/9sJI8QPDTVT4/61bbzsbdr7/44su7+0/++PvF5D/5v/6fEfxmPm0JEDEdS04SQH3Xk8g4HglRyVgSsnSDg0VyRTJm9jCtjQA4QFog8OFhr8XcYr3uEfHdm92L6yeffvvjP/nj7//v/o//lz+5TPrwANf8B9/+7o//6qevv7o/3Nm3v3HuWY5t/t/8r/7H//v/w//zjcGLj4f9OP3+H1x99m++GlawuR6ututYnd3MZb3Cj59tf/HrN/fzw0ffunr5ao7ZGaLPNI/T3AKRiNAdm2qSjI99zsIYqrgoqC101JNuXoQ7CuGy+nICr8QS6lqtg7BoV5vtzf2+56CQl69ucpLWIiXOXdq9O3z8jTNT6Ka9FjxfbyV1f/qP//Hf/tM/+H/8k9U0zki8Wg1EMc328uZwMZxja88uNs/+wbPPPx9vjg9zmzuP3UF/9Ndffvz07DCGWs0d2VSZsO8Hrcc+c5fleKilOQtpNYToRbabbn/cdZ20WkvTzrK7J6a+k+M011q3696CU09AfJinaJpYuqFDry9/9Rfz/m6zGtbf/Pj25sHNd1PpJKHN51eDVhPunjy5Oh5eWy3QpE0FAUMCA6IqD8EZo/lAoGXU1rbr7M2wxmrY7MYv2/FBCCgnv1PB4IRIUOc5MyF4z93mcnu4v5/m8exs2/dsXq/P1+NUDOzp+UVKaWpt3YtP+7ev3j19+vTDDz74+U9/MWxXF+eb/d1eUgo9AoaIcF59/I1vXub6y3/x400e1sycqI77cS6XV9fTlDqRvk/jNJ5lKVPp2Z3a+mJ1fzh0Q48GyJS7016gzVqD8Xi/OluvV09N1cHVbSlQ8GKcQ+wRARyxLBHgHiyMiG4KiMynbrQItJMbCCIiEuNi34C0VNUQUD2IGZG7XIeBzaa+T7EQTllIHACHVaehs9niCJqkMzWPQMmceAGhJLO6BsE8V3cNx2Gzbk1TolCSTAAKbB88u6YEIBCOCIEEhMTCbAnc1BuhQERp1uXVn/zdP/vBv/0393er7WY13t9fnWf3kXDTd2ygT55c3N1DHIsdy1TLN37vD15/9eXu7e758+tPPrh89eWb1sU/+O/+2Q9+/vOHe6ViXvxf/tN/ff/u7uxstfur8cPnl9uLs/X5+XE0iNbNeduvlCTlHiI2200p01THjz75cJzrT3/5cw863k9nq/Vf/+AX42zn58NUG7gJyc9+8kW4z8e5v9jc3e3m/dj1krr01cs3+4cHEUEPAq+lMmVyTtES2jwd78bj2SqheQ+xyenVzduzs00W0TI/7G4HoagtQw+GPUOC2I3HCPvm8w/e3h5e7259rkFe8PjiajOPc0dYDdebzcO7u8RjED0UvVhvEmhtGH1SagliyMT99uL8bDweORBNvRTaqDUl4nF/jFZLtQmdJcdcHu4P0nEgHA9js+DMtUzgJmCQcCoWCB89v7j54u5snZ9//LxA+O5Ym5FHyuxO73b7y2H1MI5kgKoff/L0eJj2D3MGfvv27cj0/On26ZMryAP8638FAD5XEcbWYNZmcNRG2Np0XKExokgCaDQVarqRvmmTlFEwTBMogeU8oMBcJsqduXOXoB+s7wzi/uZhevPGD14fgqF3My+11QojqWurStKFMFLOvXRDBmvzux0KGgEREgWeOB+IHh1KVQMQ4CCUZkEdimBGyllMkAQDnAgShZZSCWdwCRWhVe4RpoCY5qpa54a1OgYJUk5SQ4CYKQWHGmZhJo0a0RogAVCEEyIjLCbHK+FB2EEUEYE4IAkZQaAjEfQpd70/zGX3LmokwSjVKc83ezIc8qaic2JgcNckwogoCQOtlojQUtE8E4cbgGeBbZclkTvUOmsdEWqWxcKmAngA1AB3AHdtrTUT4EINUpqaedchCdaaw5NwBAoldPdmKScCbhoMyE4YZkUDwRU6YkADd1Oz0GJGwAqFc2+mU2nIAojqxoTcC3VZVv3stdWorXFxA+dQShFg3TCMU6HozGAuR0E0MM5dls7cqjkARmsQbar3GbVPvY77XgazFgpqqgjQ99BhGjoIZHXSAF20TQIsiBBQtKlIIs7BQMhaa0ICAA4OR2hBi/YGEAI2BZfOuxPDtO9Y1caHu7vb2/vbt22cEgZLUm2m6Xic5lIJk1ug+eTaVLFfmSkTXnQpzlauAdpN90YEJD2zkAweQJ7DapunpoWZCanV4h4cXhs7gwGESB46sJaYbZ6B0N2ZyMyYUBJBLI7w7hqMyBxdwqHvh65jQTV7KPvd8VisqZubAkDKGZD31vrt1fF4Z9o6gPPcQ50Jru5u76ozDPzxB5ubuG1Rx+Srp/2rV/f3OgJecsZL3qQ6M2RyAEEmIBbFllN/L3hXbxvpkAfuwHIrATbpPDUAcm0JAaGpVUHuWAxJeZF6YQ0NQuEEIICIgqWVzOzQKfdjdLL5Vs5d3s99uE1Ts6nj3q3LvEVpXShqzSjDaqMCM68u8qqHshlvP9L+07Pv3Ppxvd18Me8h+SQTcoKgBEm9Y3AKR2EIV8QkpGCQMTshUpc4HLgTD8MAFmFE08iSG06gLXWSOEeQIMHSgW4NgRnQVMPNsBKfBD8QkcA6RCKkoFYLhzAxRrgbUUhO4eQWDOwBDAgeiJDcuqKXRhd5gzPkbq0RHAGYENhRkmQXLKRMqGoasOKhjcfQaqU5CGfRCCcHVDdHYljMr7QRGCdyirnUxKlNM3U58ylLxpPR+NJFhh6OsRS7AlAWtH5pKQo4eVotbwAIBFp0iBF5aVNbFGIW2eOTUzQY4mKABgBIJ42YcFMPf7SyWuInRkQzV9NwR+T3skSwNDKeYnpbcCJCMm1Ltc79BMyKkLn/5uXtXIkB2lyeP7uEiFbNS4HaOohBmNqMtYHqeKwtIbKDKTMgYXEnggTQi7SmHCBIFdCIwyMxoRmBC5hAKELVhh5dQgboIKAWjugD15mRaO/RRcs2i7Y+JyYECyJCDIbgCAjIKRlCMRVOqotAtBGxAGYmCCcnYpSU1I0wTM0xOqIGMAy91lKtCUSXpVYFgjGo9Fe7tG0+raH0qE7Mkuex5g6HfjVPR/DGwgFBEZmxmkdrYAa4WN4vXRrsprAwSPxRqBPisTiKizoVxOKftygFLZLVgYjMvPjrLZrpsTQHLkShcGQhIvNHD/vlIQkAMGaKCGIiZEAgYgg0VwAg5kWW9uTYtzw6i+vMgjMxL/ymhUYVAItM0kJNikA3NzMiICKPv9mA1hQw9xvmYrWCdXl7tro0dRlYmGqZEQ3RwY5a9wEHgsnbodgEiA3QAVprbobMzFSbqtniiiyBppA496nvcyq7aQBGs9qaNytjTV1CgHANotZcDrrloTfPDtzRy3d3/dlapzjsd13HOePD3cN+nkKAhJBRzTXw89e3h4MeJz0WI+FanImEO7fWAsy9Yw5XBmyBNcAZc8ddLw4+rAfIMlfnnilJJj4e5yAmDmghKRn54VBq81WHkjGlEBERUQ9CYGIArMVzNwh3Maknw5BwSp0cqxNAOKrhPCtTSX0izqotEDkAg5fDIBCzIeViungNOWJd5HfV05CIgpMgclCUVhaSGCAiMRG7KnN2QhA8jm3bdbmLROTQJRYNoMC5muQkTGEGQMzo5M5ECLlLWgoiorAhFg/0YEqIFgCC5ACSrDY1gwbBidSNKMY2YUrUo5kBcBrWjOlou2k+ZknL3hAAKQmiM5NFIKK6E4kjqSoLg0Fp5kGltAElIlDYACihCK1WKWXJaXU4lsTepmm97twjWGjYPnn+/KrfsMf11RMt6kgZAZv6fLi83ECZNwOtJd2+fPn2i+OvfvHlrgBdfLj5+Pt/8Rc/vGC4ubmH/oQU7cdpGNabrYzHslrRzd3t1ZPn724eUjdcnl9/9dVLxxh3RaiPaIYUQd68VU2Zcx50nqsZMQEsvFNylh/96uHX3e766vU/+ve+9/mvX7fDPT/96J/8p3/xybPz73z0idVffu+75//8s4f/8i9v/+TDL/+X//P/6X/0f/s//fDLd2D9j355pIuN1HZ4c/Od3zv/8hZX3/jo3/7wr7RtMKKv/jwPI7e74+F73/vk9tV9e5ioy1MxckCAJEzMrg0B3cMdMJAAiFgtECUCmBiBWAiAmpah7wSiqCZM1ioAGuL52YoSnW36u/0+Kz959uzlz1/PFrjqdY5uWLXJv/vJRw+/+rz75N/Z3aU3rXzw5S/urjvQ47c/OdtPON/Ufrvd3799mH3VX4rWdy9f/zf/ne//q7/8MufV/cPewb/4vOpxOrs+f/nunlf5arOph2ilgrsT3OzuLy8unUhB+zwgQbhmyYmHUo5qRhApcWulVgthFFoPnRCkxPvdDJgwswM6ERIlZrDIAgCpVDgcS07dvNsj4NXZ1X53e9blz798vVoN9/udI2iZE23TkL2quXuLq6uLu8P9/rjr80BI03HyWXnod+OMPAh08+F+verudrWMdSkzdN3ggGi2PTvbjwXUA0arY+KotSQhM9tsn1ffC+Bw8fzd23cdZzAExPUwhM53N2Nm1XF6U+ZO0ALK/pBTQkrj4eH1Lz57CFtJh8DFG5S4WnXV1UtZesOsRRsBsQ2Mb98+gFyhdCywG20u9duffng+nNKDtFptBadDqVU5iVrDcCY0d8SlZdoDXJf2W3AAYBFzd9fFsvXk9xlAQAiL8yvBIiMNsJiPYsRSqwFEdINwsNiuurub22G7ouC7/cicJXNYgwAhmuf55OPT2mJDa9o6yQ7o5gguEfNYTb0/y+OxhIerkxs1f3d/WH3jKQaVua3WnZU6j9r3GUIRwV09iIXcoU1FGNOqL6VUhbMPPvj4dvdv/uW/eP7xxx3gxx9e/+KXv5ydBETBm5KHeNFVJ+t+eHj7KlEe97vP/vLHz58/+da3n//mJ1988Pw6sc7faq9/+uuvPv/Jl1/d992A9JS6/s//+Q//9O/90Vd3n5v7H3zyyWc/+um3v/UxdSs7jB88e1GRrj764PCLn33z0+/89WefpZ7N3EojlLmFI9wdHvaHIyUCBSG2qXFC7zkNCdO6Fn379uFyu9qeb46HcXO2yYpO1KxxxqdPP7g/3DKvV8QAdbtZl8Pcd6tu2CqkxCln01aBPCF99dVv3Ly5mfcOjl7vHu7vdoec5GIzeNPWtE6VBZvjw2Hal2Jz7SVy3395e3h+Qb//wbOi0xe/ebU53z696lVnBfrp56+HPl2crx/uHi7Pr4C4H/o85DYdzjerM0mvxvnq4mLG2/Pnly/f3uQubc5jN5bdWH0uZ6snQ0fr83WpsJvH2X29GZ5u+3npxDrY8+szN8ssEeXZ9pwSFyuSuA+/f/eAQCK82qwgwlJaP/n4xz/9bJNPs+DyIk+H/crGuguUzjUHcoauH8C99nJGPk8Pdx5d69A7iW6AHpSmlLIfjkAIyA1gLFW63OVue341OvnuMI/H6TjbpJI36tDmJh1LtlJnTMKrPrphdXmBjDKkplaOEyNENRdihqBIIkw8ThMqVJscBVN2spRJUoKcmIlJ1A1T5j5HLaTG4RYQKc3Vho6bNYPiTvNclfNxVGB2ygHcQjitITJ5iGQGCmWzhjxAz1DngAgPQkZGlmzMCCFdhnBBJqbSCmdQtkBgYFnlSsFAw0cXh/tdK3sKXq0zIIa5TcWOXddnzlIFgCBEkJiJFybjPM9iHB6NSJlEaJWlE246H6rVMiJZiJcwB2ZEtZpTbxFO4W7NHBwIXKtioEs4sEYTJ5ZE5sxMBE2dmNCDwlM4BrXDnpr7PDNh1/WU+3BMXY8B0zzWVs3AEmYmyd2QoalNpabcAUOtJsOA28Q9hzYhaGUObcCgpeYkUSKAOEfTymFI7O7hAVrNjSUD03g4dNxiustYwmofDNiXNgMmJKaEfVxw4+YVKC/klEBv2jpetFBGBEjo6BU1mXJKfS+ZAA3AKRGRzjMDUEptnjgPGXkConxKkvc3r7X5w93tNI7c9Gy9QUFH6ATdzBDB0cCFuVt1xCKrZK7e5pTYjpOFa2thUUtDROlWsLQ3IFokYABwakvlFd0MsM3HeVYzBG+mZkBECK4GZl1iJjcEI1oQQ0lpobJaxNDJMHRJ2AJ3tY6HY1UrrbRFdBYdBABxjOpuxzoNwNhfQkT0cnu8O1PxcXrYH+1iddvu4DhDBzgeN24Xs+12dT1vpuPuTQVi60ikbRNJt94yhM1VeDVXeyhactp2GfRd9uyR+u789dsbbCkAx6pn65U5BrkF6EmnRgzcwhwpp87NqjZIEmCRMWWOBXhVo3b0CNp2435+ev3x8f6tVe8gX6wvp/nYyo5YrrbP1XcTKGVwLWey/ubZNr/bTbxXnnPAleCxDlvc9tmt7M3vu/wkWian8Bw0qVVFAYbWWpfYo7m3oT9rEGbASJxyneachqvh42lszCOYBioxeFNiUUczHTiFm5kjsZkCpEBDRA8AMLOKIQGEIQGgYUKwRKpLGw17tNaABIQhrLa6QtmQ9C1a9RXnpejJSD32i4+YhWsrechBfLN7aNx5tBSOACLpWEvH6SQeZBMFRZ1PWA4BIrYyIXdEuZqteD3Ok3ePUBGB+8IZigAIdySJpTEVGZbVcFGTOfXW+8kEPcJcwwEQwT3cFmMpAFi6KNyUaVGzJg9fqCKmDYksghDdkZaKO8Sif6zaIAIAiZYKWTRtzCfrkIUCswC/ZraYZIU7ERnAolFTWj3MrVHGLPM8b4dMiMA8TnpscXF+nm9vEDlx1+fBHSkEGibueDEowX5qsyOZRwS7KRJl6twKIoQjsxCHNkBMqiWEMRBQLQgjJtXz7cq1RURzK6aOkqSHaeyJ+xDC5ubEHVkAguuyrGVHM8CxVEgrko65qdtiQ1VLY2ZBVrNAJA8hXktfbE5ECXhsddt3izRXHniS1dvhxUff+wdf/fKvpvHHZwzzOJ/1KzUkNit21COGAVKXkmpbnO5VtZktoBsQNVV3B5QF9FlAP8STRpZ7A8CloLJgQ4DobsvdXnAmRCRatDIjFqhw4RAhMIm7gS+4USBRQJipEC/ENUIKAFdFXqSOACKQKMIjHB6lq8yBiCKCCBFIrQJA0AIj4mK0Z2a0iNRCMIuqxVIShjBzfvSEPUFFwcySIEnb61xq8LQf70QyWqgaEdRWvU2uUxsfuuyuR627MNXWFu2Tvu8CjCHqVCIA+3zcjwl4WA1pk+/myeGulLLuu16wHo9MDBG5WyTeUNY9JVJ1Bz3fri4v17/+4kbCHx7qb35pT64uCAjIPfxwGIHhOM2Bfng4WNGxmMjMYZuLzaRVrUooI4IvvjqUic2NmQKhVe26RF2YWrgxRQpfCfcpEUFr6u7CmHihYGmEeUTKKUzXnW37EAzXAoIshB7enIWGvgM30Lnr1iljSkHiCLoecmuhakKotc6giYcgPjWwMueuTym5B2bSUs0bCyOjWaO8ZFIx9Fxb6zMBuKpKFmEBDKJwq2HBYX1O1tStZKazHJkbuAtnMyq1AlCpmjMSoJuFGqC3IFVvClnAWhAvj40BEmFgaCfLYvfIrgMgcGJcrbBZLFXNOkc3uDCHeSdRZp/nCcJTSgEQhBHQdxJuhMiIC/9MwQgxM/VdRgQg1ofJA/skVpUYMAAxrLWz8zVTzOPMm1WfktYyj8fpAZIkpY7XKytNVnh1dc6c+vXZbNER6mFcrXutY5vbfDj+6M3bu4e7mzdHTv36+kl3tfnxL3/er7pEdvf21YeffmOZBURk1VebvNkOWuqCjvZCqD7v7ym87Mbp/nj15OrN63cYzR36IVMSVy/z5GpCuN7081S96lwaQ6zOV1D1Bz/8xfe+9yyxfPTi/OF4eL2bL6434/jm7//Rsy9/9pf//r/3Dz/+4ON/8a8/e3f71dXG/lvfffqDv77Z7erTT5/93lXOpIdp+vKL4/1v3mVOD/elW/t6Iz/+wRfq3fTQPv/1mxWLqSto16U2V4RY1MSXCgbi0r8fAdHM3SInJiJTNdecMkYkDPYGLixpmkchAIo2zhgpCYxu81g0J0du3poGa+Kg9Xb19te3n350/eEzuVijRxWy+voH3//Of/8nX7x88fETh3Y8jO0w06yX3N1++e6bz5Mp/LP/4gfQ+O7ueHG9PTtLX/7ilZKYgzANiTgMw8/W3XiYrRpU4wAHBMBSWjd0fT+0sR58CgtG2KxX0zR1ORWdgNDMy2RdJ9vVMLYRmQ7HMYBWq6FOVQEA8O2b29Vm5S3mscIKRdiqPRwfur5fNLzmOvM4IuHzp1c3N3cecX62xcAyz/vjfi7zdru5vT1szzdZRFZS56ZTc4/VxVNXMrfx4WCgJNmmNo2TsAgLBHJgqDXQpsrCx93hyZPzT771zS+/eIuBwemrVy8vL843q/Obl69D4HAcxzn+7t/5/k/GY2k6Fbt+9hxa293ePfvgw3Gs81dvjfVBbX119nAz7g91WHXUdU9X13f3h1VmEd7tjh6wPVsFlvAoZR6Ppd/080G1+mE3d3BKkrcX59Ntgy2NdVarBDkNw/E4YeKoqqd6xOKsCRiAhNaaRxBzBCAhLo4PRLVVQFr0sBeTWTMneiytLT3Z5otvompzjDz0hqjh2jTCctfX4u6mptNU1utVEAFzUxMmElE3DwcCIbawVo+CjBQoXKcjBwSGRl0gbwSQJGZ6nMbdbr5+ctl1OSJCo6kuARwIRZh6RcGqLVQ+/M630nrz489+kIV20+HiYivQ396945xubt62YlmklfLNb31arN28fffN7z77/KuXr756ZVrPrs7MFatfbVf+4vrf/4f/g88++82f/+f/r+2T6fbh4Vt/NEz1q1df/UYyt48vedP/6stfvXx148wvnnwwtnJ1fY718Iuf/Xy3n0Do7GrTKH75q18R8/3dIa3zbpxSTt7g4uKsu0imur/Zp743N0DYXq4BvGpdr3oMdccIauMog+Se8BjVxqdPr1599bkGPnt+sbs/sDdkfPPu7unFed9v93e3mHDVd3d3Oyf86s3bYdXtih7nXaiNM677M/dmbvd3h341mEVxS8pDTp9+69l0tO2TFw+3b9/uXl2sLq7PLon51e4G2G93+7SW41ymd+VyM7i1aTfO83hxuZJ0Aa3u7w/PrrYx7XNE0/nZk7OH3b6OddXlmaLfDA+748V2XY/z8TBePrmqEU8+fvLx08s3b16eR2pVi05jaRd5O48TJqgPaGofvnh2dzhobUBGGuXu/oNvPJ0O7eef/fRsvR0P+2UWdF1sVuntl3642R1He7XTp9eX/QUyYpiWArMqMHvVLOJg1aq3yKt1xx1JGKEeCkAaEq+GfrPZUG16nO0wap3QU98PNfrEedN10zQpRDBD6ruhT+tVZDKzqRQyR/Mg0NoEknQdhlo4onDKzRUdESP1iVGImTpZ0pEABWEnAFdyA7PWDPvcVC3cUlaI/VRBeDaoYU2yRwQFUmqOaq0AOEmgh1pmipTrbI5iyOEe5hZoABEcIMzUgE+2LQQFqISp2abrEqG7B2MTlg8uN9aOTfVoIkGOZSqEVB6OaQAcOOdcCDil8AAWb9ZKbaUgk7vWCq3rpeucZCxlKvVQq2llZgIMEiBxCKeuODmEhRkQ5h5BvbYIn6fRI1Q9kIKTr3vq01xHlNTMg7nWskjehDZ0s1pBS1W1UryrOWXOqInd2EAbOOTcDFZJuo5smrq8ah6NEg6r7uKsPxtyx7vbZmoEIMO6tanLq77L4ApEARZmAVCbpqGXfoA258TEUWsRovFYO+sp83Y1RG1mFBCU7NhKzG4WgZJyJ91KLSKKRWAYchCBBaQsc5nbrMiZOFNnKJ1xciB1NHKWbKoxqwd5NeAEwtXbMgvu3rws+9FZ+i5zJxqGzAAAphSeUnYHyZJyL8OQctdM3YPZzWprOh1Hay08am0pJXOX3PVrcfcIpNQhArC4q7mFNy3zPM1zq2a+6GRaBEiCcEZHBOGQxe9J5NEwCoUoiEW4atmPbVaba4loFqGhAUAQKTOET1NJuUOAnoV15j5R109N+m0/f/Urmw+wbTu6GV5s7+eJK+Ou2zQbdL3B69km6dLB7RXWvtTr9Xp/OPRMqyAi6oHGcpyP09D7ed81PddZ3ZO22OTNwzgi+XrdGagg9sPg2qI5YtQ6Y4BwAqAsjIvdNePc1AGEPVrjvLoYrubZmWy4WJVIx1oQmd0uVjnX0tocDJjTQdsa+DLnbeJe0sp4Ht9RyruoatAnfs7Du3JokyenOXLAbDRy9IJctQBbomAKYEIAJoQQBG96ZEkBGmbujZF13mN8eS5JABpoCyUgQnZHBEiUBD1nrLM2q2sBYHO3cM1CHu4IGIYQTMCEajXR0pUJFOZqFJDArRahDhw6TtnjDKU3W+WcGrDrkDmRkx5JMmIyrZRzQtdQQ31998b71SAAZgkzSJ7qKMwRnhlRHQlFRM1jERIi8HJwznMx5xRi76GicKMgg4VGBLj0cALA0mW2NES9NxlGdPeTpPGpWwwW43MkBFyaehbFJiTCAPdwNHJ3YjJrAMiLBRYAMXp4mAE4uKvqoimxkEqtNSZiWeSL/bEDyUTQTRf6d7gBghssJh7IbGa397vWnDGuz1fgUWpFYFMrVTsiJGpWgFVRAfOQiYBYMNzA1LVlkVZmksAIYgQEtZkRSmkdISKYGTFoqwmCiEbzDjysMXMQVdMIZyRG4gB2c/ecEllpOudwJC6mSMEErWnXpeJNPdSsx/BwhWAKCoBQhBAOQl+EpYW550Bw8JoJkCKjrh6JRq1pHtbKWbvVVzc/i7gfuiA3JNCmFsBE8zR1XV7axNzaQpew2npCMWPi5q6tdql3bwCMSETkrkCISKpKQg4Q7swIBAHhrgtRCIAgAE9aQotJcBCRg4UDL4LUsQA+frr7AfDYzOhgi8Z5mEMAEKgrM2tTIkKERfZoEftb4uuFi6S2OBQFEnpYBDhQhC+PtLsutJOmlR5TRYJFG/1vQkWZIWcxwC6v0gUfZ61tzzxMh2MvYhR1mtt0AGih81in0MlKYfRWChEugu1mIWQA6A4wTW0uFpATDqtV6och9/V4VGgGhgm1GagysyQq0yyZkEIYIVHq5OLyfPPmoc71MI1WtVpFIGKMALOo2saxSMLjsWm4lhpNjxD7I1BgomRoCKhqLOgRCGDmoZAHRoQIT8zDmpG862S77ZgwCB0gHCGWSQqSuDXVVokYwAmaYHR96jbMS0dYBCcGQQR0c2ZJjExmfkTxzBQtXMIoGMFUm5o2dwSeWpczChGDumbEwCDmLvGkhoQQgQLuKkLurg4pIS0CRbbQOIkkmmufZRH7R4jElDK2uRJ5iHGmxq1alGoYrE3DoANiQe5kLoqE3aojNYIAcGQCALRAAyJGdA/zpQOWQNUiMGWxuS06aUCIgpICHAggUbhNzOLWTBsyRQAJQXhTI3QCLHUR4YOwqNbMmiRarbtpqhChqoSoS/ubOzFKQrAGgMOQrNYyq1VjoNdf3WzW/Xbdt9eTAGWst7dvVt0mDUNVYMZ2LGzjy998uX84Wi37Y+369PR68+n3vvmLX77O3AHg5mr14vri4tmfjRUA/g0A6Ex5Q9PcSEhS2pzzYdy1VrebzeEwmSsJvLu7316fBwkRujVzNAdGNPc6F2YuiVt4x1iLElKH6TDOvz4cFWKe5+kwffLdDz78xmUt9Wc//aI+rLbrjY2RrZCnn/7yfjOU/94/+od/+Vf/+ZZ6f7v74ubd1d/+zo9fvoos89v95dXZR588ffatJ//Vf/avOuyZu+2ak8hUFTuGFoAKbGERABoQGECLFgsGOCEAOuAiehfESBEY6g6EwIIGtvRqmjZmYkFzwwjJnDuhLDf7OwjthMHrcWy06byv0dXzs21JflMP/bB+uLn/83/z53/n7/397/3Rd//Lf/WjaX5pjThlYiep1x9ff/Lpt/71P/+r66eXkx8j6mFX1uvcdanfdle2qWOZHkY3oyGpuVoLi3EaT+cd6pGKhYMTL47trhjUdYdSLXDpDQXwMJ6KYs6mxsilqWldcHIiPOyOgFCrE5GbopuqGrhTHEvphiEipmNBSkUrmZtahFXz2qaA2lpzjvV24Ey3t3d9yn2fJaFkvDu+rlrAjZKomTtxNyCiiPRDN5sCk6rprDl3eZAgDpK7w269ygEETNNxBmg3ty/N6mqzSqlrpf7mi99gxKbPa+Lx7j4hbYfVw5vbLnW/9/GTu9uHcCxzS4mur9YRMI7HI6hBpC6lLvUa02706mdn61Lrw+2dzkYMWm2V5PDlq3Z7v+wFs427Y62FV2dX0FInm3acWBUBE+fWipCYOZ0oRSkcRMTMTmTWU68+LzqOiGAeYEt8EwR4CqggIBAc8KTJx8LgWomYkjQ1AuBwwKAkgQDEwglxkUciiAU7XOwcAAlJiHOutS0bnzkwCSCH6tAP89yiFmAGQJbUDeutE5hhAAcz8/JcQDgJgsfiM7rq+9paNwyffOdTB/iXf/6fXF19N/cpDEV6zvxku727fchE9w/trz/70eX11fb8/INvPG8Wt1+9efnzL55+9OLZxy/+8I+/8+Mf/Mja9ObLn4y37/7D//B/dDse3vzq5Tc/ffHZD39CZofd7kf/8p+nrkPhP/zTv/Xmy9evv/xp83h4y4ygVVPfk1Ats3mUqaYup01PHV08OeeAMlfzNo8FPdYXWySEOc6vz/a7ox7HVlQGHtYrp8aBmHMS3h/HvlsjYbQ5k1iNh90xdSm31FR7EVWtVdWQndQ8SyIB1ShVa1EPSInDbRyn6ViSYOqSJGqtffrtD+f93I714eYYxpcfdW9+fbybeXc/D11n5uNcAvV4nLZpS4AMRLk7Tk2ChtyHRtEYKK833JFMVQkZg8qkfR5shatt//LVvTfthm6utll3V9cXV882x0nv727JFSJl5k+/9+2f/OyXrIDEZ9dnq3V68+ZeWy2uRdWKbs76Mo8McH+/V4POYzLnR1YRcT/qoTjsDqUq9Bnm+X5L65r6zeX17s041r0t9CVgjsjK59veXTkwry+qWxnBq/ckT6/Ozq7P797uxLTMjZU2w8X5k8vDHA3xuJ+wS0hgAevNCoGnucbcWqkLC89azUmIORG3OhNhltzc1RbUFjfrrus41LKwn5h7kZNYhFkBDWstEA3QjpWEsmAdGwolkWmemU/FRkR2A+kxiN0hASyVakEKBAdaDWsFV+lbbeGLwWYgBjMxRAcWEYxYShX3zJA2mZpBM2+6Tat132fpPvj04s0BX/76ZWKmiDnqqLv5OG0rdE3W2A2ZAgKEq6nWOh6OoUZCRIwE6C5WHOzmYayHagj9kPuUtAUHajPEICaEUFM0F4AwM2vWqjatc0F3b8pJnKX64JoCOcq0cPw1otVGEKHO7jC3zKCthchep74fzPvUDatuu4rVYb8nAmLMibNkolxVSStHpKG/3K66xBDBwyq6PkxJUmut7zJBgGvqu6IufAAIRChz4bYP82iQk0CrGePsfEDVKNBSpzqpKXf9oU4OpBGgFax5reNuF8SLy6Rpk8QVAoXLCJLJa2Px0o64DhfN63UE565r5in33mEdjxgQAAKRq31wuf6nAACwm8btOoWl9Xrr0cQtFhMlSMKEwSml1XrjkCh1pXkpLWUxM6uqtuA80qz1uXM1N3WlOk4iXZcEEHlpxWGyOjezWqbxeByPo5mDQzf0gIQsTCyZwREAmemUnD/WG6o21aZWq9a5NiLWCEkoSRiTAwB4qyXChanWWVvjhMBQxomJEgATFkTaxPnlqkE54DGmKSX59u9/unuznx5s3mvGs9XZ+eHuXRF/2B21j22/2T283abOKpwPg/a2XeGhTq4MqUNNptIqlYJdvzZTRkAhbwZMAcKIAMFgaMEki7AxEWMQIXY55yx5WPUqTls5fzYXjGpS4+nV01XXvf3Fb467m2ilaJkf7lbb/tmLK/Y+FZvv9gOns9xjqOW+lQOYqqqkGQfM7BfU7edJDXE1UCLXqlgps1oTQSClxIxu4EDISWIxowjs+k41anEZAFLNQtMM6E6ETKjmZJGYDDQc20lj2UyNiQHdw0ArLJ6pCyXGDYkZ3VuDiJQYkdRUmwMyElotCZEBOiQG7YYcVgUlpwAyIGI5mYsRCDuSgQSMu93tq/vSr8+2Z+LYU2aUaI4OIlwIwSIMmAQAwcI0ECPUFIhy7zqB+107AaaQMjRDRUA+kUYwkMiXp3FJ5gEA8KQQAxKxVKbeo0IAQBGwzLKl0efxLQ4nPwmKCKQUHmYeQYt5FTxKtEcAkSz9RGYWCEjiER5ACO4Q7kvzk6ojUISBRwS6BwaEgeFiRkL7Q1tJ1moICRCZxCMy42w6NzdFJkDHWcEIwoFpcWIDMwB39OBqkpgg2DEHmQcDihoSLi1j4EEAqE7ZyEMCyQMpCADcKYAgCJCRhrDQ+ZR2W0QEITkARrBHYJgpkkQEzo0CYA2ETKaCsKBjHCdBYGYgMEJARgJ3RCaIVheFKc6paQ0ELbWTuzretcP9WYa5NHIPBLOIAEdY8DswBFi87YIIa2tqlYXnpZ+1OaMgMi58IqCTcrnIiYtGFAAnJ7LlBxAGxEJOg8UQBk7m4ECL3dDig4Z4gpMQFj/Jk2bQ4xPBS+31pD3lIJICAuJkc8VEAEEIHrFI3CwPIz/CnaeHDxkATo9kBBIt4EM4ugMxEqfK8PX4enw9vh5fj6/H1+Pr8fX4enw9vh5fj6/H1+Pr8fX4enw9vh5fj6/H1+Pr8fX4enw9vh5fj6/H1+Pr8fX4enw9vh7//8apD+3P/jt/jBDVLbwCoRMjE4k4ICYGRGTs+sxMBriIxmspaMWbhjsSSeqECUSCKRASI7gFQco9UYpAD8wC3mY3FYhpnLXWVkoby3FXyIUMMYIR3KGaA9FCpmYMBJAsSRgCFqJggLJwU3d3C1pcnAKACINBBB0c0RwiANSMhPvEwoDAhFyrl6oLBY6ImZMBmQeiC0I4ODjL0sR3klqzx2aJohHx3pTOiYmIEBDAGQAttIEpRZCahsEgvBnykBMxVtPmGhCIjoDmSCmT5FCrtXgzIQkFBRhW/fmm06Klec609H86xNyUiBkgPERYhB0AARNhhC89X62aByaGUC9V1YFYmDEhmaokUURHIAyzpmopEQv3SZqqRai6AQaBtgoALJI6ISICnWvziIXEttxhcwDk3HXn61UHoFoPTXVxzQMXYVPblwZEFmHNtn3uJc1FSzMRXGX2qhpICM08JZlbXa9XHfM0NUPw8DJXSUgYTU0DAxCZm1WKQCFGVHUUZMIytiQMgbWFO861IUJKSAxqzoSJqFhYsANRxHTUtfB6M/zFjz4HgNWKs7Ke5sSjOwEAnEh7gY9cwxPndelHhvevh/d/jVM3MWDEwj9cDrNQ/d7/dSEmLk33vz1CYDz2Ov/O0XE58Ontv/OBpwMBIAITLFxDB3QHh/BY7BOAAggXyylwAFvorQEBQAgEsLj6vT/mchZLD9HyfZcXx+Oqgb/7fU+ndBLoX964XILfHhEiHr8InOiUvnwRxPevWxqVlgPi+2970o07NTgtn76QefHxsAALhRVP7qYIv3NJ8bFZCRYrvvfvWJqh3p/p0mC+WBIsqjuYEISAIICWdxAisnBZWjshchKtWprq0t33eLVPVgcnSbvlWVh8MvDx/p8+njiEIGfEALNQC/NHEw1/7EwnfHyg/LGb+XTZf3vOy93B01X8nfvzO1fi5Mzx6MUQAIg9Qk5yM84A8L/+x/9IgoKJk/TDClC6oW/mRDFN+5ubl3/1b3/4y1e3U2A1l4RZsMti1Vq1Wr0pWPiJPwvBskha43LDl6u9WJ4JU4AjgfCivA3VMJiFlu517wXcfC6BTIHAhF2CzYaEuSlMo6kSIjBFAEgSYQG3UhoAMINbRGAgzs0JMDESQWYG8LkuLosnZjUgNHXTSElYiJgSI1hE+JBl6FOtNldLggHobnN19ZDEBFKbb7Ncr7sPrs5zTl1mDPxv/K2/9d3v/YED396++8mvf/PZr38zzqPO81wK93ycDpyJBPqNbM+GdU9JeD/VcWyb1ebVm7ux6IvLVXbdj9U4dYnEQh0iw/XF2mo5TGPX5Tb6w6Hkvg+UKLXNVQSePzs3QAJ/eCjHYwULygwEh6JPn56Hxn53cI7c5ah2vxslpyfnKy3leGye88XFRpCO+/lhms/P+mdPtsI8l8k5EiSEROGvX90O62ym59eX10+evXt7c3e/n6uvUjob+uNuKiRDQtIYp2oR18/P+24YmOf7/ecv35w/PUeg+/sp57RdcQDJutM6J0CQ+KPvv6ij/frzm0UA8sVHT8Srme2P5dhsvT6/uavDNn3jYvXTz34B0E0Aq/Vw/9C++cGTcnf3b3/4s8hrLZWY/9YffueM8DAVTvxsszmU43bTu+L93JzhD7710e71w8M0TUXHyR6OY9ndUSf/9We/AID/4X/w9zoiYKBl6i0scAJYFqVHAvdpGgEu3QcOsbTZNNWmTV31pO6FhMhMRMxEQiiETKeF4SSJGaduBXdz8GWOkhATMxMzvh+nvQOXyY6/s/GcWO5w8st1d19W7tOC/vgpi0+On9xWTqsmEkPQskcgIDMTMiEuB4RFPyFOzi/LUU6tF+HLivp+nTlpdsLSgsqMtCw8iIt//eLl6x7hcRILOS31eFruT2KaJ3MfAHh0hjkt3oGnG+HLlrKw9mkxVvqdlW/ZV37nQ2MRTTvtp4s1/cmlmBkZYWHh4+OpxOnzFiuZ0xHf7x+nPToed70laKBFhmTZQfDx8PF+6wv30371fhE/6eGaLcs7wHuN1Pf3FpZd+P9nTX8fURASEb0PWt73LCyRAS09Eqc97bQnLRuvA7gHRLipmRGgIf1H/+T/AwD/s//Fn3Z9BCPz0q2TcFnXAQNR1XZjM0qb9XnHKKFuS9eRn4wKzMus2rSZqpmaepi5ehiEE4a7BTgiEbPIQJzDTwZPTWtr5aT/epopyyZHEeQO4RHmJzVNP1kSQZw6I03DNLR5AOZBNufdapC+YyZiAgzX2uax7B7q/UOd5lA7PQFdL12S3HFiEkHCYATCRwcjQgAMYOAOqWPuEEmYiBEhMLSVsczjOE4tiFIvOafUAVE4m/k01eOxjGMBYgRLCYWD2EkIxJEcJDgFZeTMKTEv5jUOYRQW6EiLdogvuQ6f+roB1DXCTdUiwEOY5GR4wyS03L9FHIZJmEUkp9QzZRHJkpYwJ4gJSTgPHXcCjCEEDGFVzbE2cwc1cA/1IAwIU8cARPdopZUpsLW5uTsheJgGOrGdHNwhWkF18DiOtXm01hYZBGBInQiFLWsMeCAgOiVarc6G9SY05jqP8wjokijlRMSARMSICMCBTEgo1HEGx6phy+Ji7mYBWJr2OTOFqoqwpNTUDofRtPW9XJx1QkBMkrKpjuPEqtNh+o//4x8AwP/2f/IfrAGo64/j4c3+WBRv7+4swWL0VubihmpQrQmzmKPD9fmQwCN0e7HaH0vu+2pAwTe3O0zkRG9u7rpezNRqNUfp+DA1cxj6BAGMhAjW/LxPT3K66tOq7zcXm9z3TLTOiRmbh7khwzg3ScndtHnTaKZB7BhTrRyREFkSIDpzEJ+t+tYqi+yPx+pBq8EgDsfp7GyFFlP14s2shUcPKEDnfYdACoCY9qWOddw97Etr4LEdhvP1dr8//vDL1wfVUGuKq8RJYJwrBKy2/XbT3dzshKPL8tGL56t+3XfGwrv7w/hw/M2rN5JzaVrHZiCb9erj5xc983Esr+6mo2qX0/ZsfdHz9UXvAPvduN+P+1lHNQA8W6WBuENeJS6j5k16frlu1RzAAO+O031taSWeoTLwWeoGGTrOQse5UOr6LleNxJh6CYAwmo467sY6Kk9lwBRTJdcn69SFXqz78/Ptfq5BsZvau/1MKau7A8zWyJUBw3C7Hu72c1XdbmS75tJac80iTaM1Y5FF6WUQauNcDacW+2NVha5L27P04bPzaRz7rpPcPdxNb14f7vazBQrFKsuK6en1th9W9w/zUXVWrXMJ9CGnq6uz1A19J+9u9qVpa5qyZILatMzNFFLuiHg8jrXq2Wb1/GJzvhqm+fjm5kEt3CzlvB261bonSXe7/eevbm5n69C63L3a3cB7rSLaQDUgRW0IBCKAiVCQJZGIByQGZHcABwTBANBqZN5KEyb1Omw6FlIIFkKOJKAWiYWEmJM5MKG3YmhIWk1BvLUGXWCQqGAlrJAWFzz1LFI9HFCQPIwCTKNqAyZkRoTEVExZcDF45+BliUQMJzJcWu/APJApgJ1xjugXG3v3YKSOMyNoCPFjlhXL7u0WkFDdESkI3JwYiYnAIaLPbA7WkBDdgRNZgEVgLGZ5AQiUqTUMz5moY+pyQgdtHoIVwN0TPNrJY0zLhsZEgIKdA1g4Ipp6IKVMQAEUiNhKAwSLcA9JgszNnISIUeGkiEQs0nOZHVIO137gY63uykFq3os4YphR5iB05BZuAAOTE5pZ6rioOqIBBkJiosQkYt6MPDgQwhFaVSEMAhTRQBCXgTOSjS4ArczEDB4pMTL2jOoQc+07JvB5mpA5OBpCQ3TiOpZVn6JqIDCSezhT4NLLDiCADIiOHiKsgerAKYFVRCdBllOQSgwo0EqgCEZ0IMzgYQDBCQNi1oZIkbh4mEas6IAuOC2zIIkAC4djQIQjQJxiM3zUB3ufh4f/Nht/RDNOwWLAY/iOcNKue4xbaTnce0QDlsgDEWhxKAsADAwA9CUixeWz4/RvePQzhPdQzd/ABPAEeZwgocWdAU4PWvDSwwqnYNd/G4k+ni/9NrAmRDqdIC5yMnGK0x/nCp7eukweBHBY9MjhPb50AmlOLz713MYp6I1Fcu2EKuH7n0sitFhCLxH2429PXb6ABBBACyCztIjj4r1+ip7xb16c5Q4QYgDQKS7/G1fu8atg4CncP50voiEwni4aIITH4klBy/O5zHoCZEJ/jyM+Rvqn1AwJwB9hvuU6Pd5diAAMZFpSnSVnI9AAPyWf77OWx2+/AHqnTmYIQAJ/rx29GHL+DlQUJzsEePyBgUG/8xWJ0TgCdXn9ai2uyKlrzVRbzmzmrbbUp9StknT9qktJRjUIah7HXQutwqdbZIsRA5CFS+LFih4I52oASIQQwUyS2DwCMCd2NwRy8JSREk+zLl6xpUZYcJIgtBqA4EjqMB3HlLNkcULm5E2JYVh1puqEYIS0OHKCLFqqQsIUAf2QwVUrIGIsGQ9TW9IRYhk4AFoEamgAQXRdntyPh9kjLABbEEIrBoDE3BwR/OJseH7RP9uuxTDc+7wWwnGab/Z3DJJJpvupHKarJ2dvb3SVcT9Nm82K0EDifNuJOIKNuz1AfPLh9V//+O7tbbnYZrDSzFCoTfM6rSICEFd9Vw/l3c1dHtLzD9af377adB1lmRog8dnZsN3Qqktu7K617tYX/XFXmnsrmoS11Wlfa23AhORjdegH6WWcK4TPhlhsnmoEjqrDWp692ERRVQ1oZ9vVw7upzpUYWnWgACcvjlqOx9IcTJVXePZ0UyJ2+/l4KGukfpshZ8ndX//819/66OrpRfchbg6ltgZNC3fQbVdv31bR8uLJ2qbp8iKVw+7mq9nuxhjy+YsngLK7vY3w1692su5XG6hex/spaXQ8VJN3Nw+fbJ+sLiJE9/v77cVmXzl33abLK4xDnZX1j/7gm9PLPVF+sr78tz/68dnzjyun39xXGHfXL67f3Oud7WTV19zdv7pfZgFLLBJai7wAMf7u8gO43BCIE8L+iJUvQABELG7yEUEQjgGLAnQwAEAQLCKcCHBSEViW7GW2BzP4aQ9BpEAMQsclQFhWjsd/fnetAogIIgo4SR7A4pYSvojNnzYzxPDls8L9tKedUAU+6dQHnBAeCHRAWTaFk0nCEuSgL3vYItJ6WnvgdFHwtBEtEA4RPQLvj+v56fWLj9Wi3nAqDJyQLwRfwH18v79g0CPmhQCIfgLMCMBP6+2CgeDjHggAYaetDtHht29/1PukZa30OAGCJxU1OgF/p31/2fj8fd0I4rQpnZRJTmA9nmD9Zeda9DJOmyYELfIsj3HBcnqLtAgAnoxukDwcHE/VHjidDz1u5MtHn644PqJdJ5wIiOkRFsITnHe6ChiIvnzoCedBJITFlXJ5gBhiMVcm9nhvVQn5LNBBmBZTS8TgjsEBPUwdkDSQpAsAD/PQBSdCWuRQHTCQ3c3N3TjULUIdbRHBCgii90+GIzYkQUlhgMosKdhORisAHi6Ebr+F7YIiwkONMAIXRZBFI/Z0CoEgJIAoXUKiQAwkIARchFIQhZwBhIGXyAUcEYgc0YHssa5GCA6xTAJVRyRzZ5Kw1oJy7hDAzVhwQRI5CQuZelhr1dRazoLEiDgMxNLlDueq7rhkFEhoaMToCESBhMTIabHyBRbGANdAJzR0c3QPjcWhiYgjHDwIw8IC3BZR9uVBEKYF7OFAjuBFttIAAI3cJo8axOYIhghsHkxCjjtwdxeRCMewiAhgIEZJDo+K12hE6CgepLWhW5tHiEIAzESCILRslESyhDaUiWpB1SHlZgHQqaqjOwYIRrg6ELMFIoM59qvcDZIyGTkGYHWWIDII8ggWDgxmdguiAGQhFuYIyxSB2NwQDIWqecdcp9ZKcQtAVK3uHmEQ7jPNd3eMJp3U1lgImMxVp3Ka7/M8qj7pL374+uXtYR+IpbZxLIuEzTKFuU+pcQraivR9L4kJ4/5wnA/zcWxxLM+fnlP1iyHfPuwr4bDuRjNT35yt3GGqhoIEURwEAcG5EyByBzFY5dXZ+booiFPf90Go6kgMFrW1WhqJIKBwtjqvez4Uq606+JAkxooRBOjEKgTRiSspnXM/Ee6nOEzVmad5FpHoQFscxrpOKRz7LBwULZ5cb8dqkzZITF1ed0KBT4bVduiP40E6NDfp8MPLlRARhYwwNp2s1b0h0a4UKg7tzcV2uD2MBWid80r4+9/7dJrnIH73bl/VQ213e7e63g4JGYMgVGPaz89W6zBlgu2qzznhboLajqpvp5Ict6uOhjQzYKsCKzdbrfpp1nVKhlDMsUJITMUaI5pWdO6otHlujZkbsO/nCG/VXfFwmNBxjXAc57OUXCOML4Z01afD/RECKFObVQipw2pwdz9FeBTddKnLPM+1Y0mJauC7gyHFqk/jXJiYGVICQtCpEnBR28+xPyg4ppwisDT86m46WyeXfPt6t+o6Qg9wQwwkQ9qebxgEFabjhOvOPLjrVr0MSXqRLFCtKZpjQCeGMJamrQGCDOIYh8PEXb6+On9yfbYV1un45v4uBMyjy7nvui4v8IjWME80dGAahLbMghNUNDli4jLtITAsVj3nPik4J2AhByT0CF8S2qKmtfVZSJt0ydyIxcADott0KYlaA3ckoiQkWTXAAjy0FKDqYnNr5lUFUH2yChwiGOop5VMkwhiOizy+MGOEA4XDsZq2cFVBA4wuMzOlzEzAYV1mc1UJR6w1mod5EIIvOb4BRqC7NSNmYFKMnHC15jBoClYjENGDk1QHLf4YlBBzDB32GbVZCJhTKyEI7hFoiESBrRgRYVCdlQiQ0QElJ0FggggHjmJ6MEMwYGQIMocAGtYeUYv1QitJzm7HomPdz7DarEHwUJpFJIYgZEmnigohUCCGgQNi05aZgqLWJpSGdd/nDsW81nkuS3gjWVar/jiXRNwWAghFcUssRgBMqZOFblJqxSSrIaM6ECFTIDZzByNEM+dOAL3NjQMgJBvVQw1GANBWEBoTUuapzZByd7Hxw5gCE2jPEomm4lUdmRVAEnXrJBklpbBoCodDMYm+532ZDRGFd/O06gQQDD0PHTkKY1Q309zL3KxWI0QQrKrc57zKtdQyuTA0s9xxcwfCMhoLEiJq1LEgcITv51MI6E5CwUvw56cI+hQEA0IEETxSTx5rdo+4TZw4RwDv48H3geUjJPJYpFzQooilQIYIELRwVh6xofdsGnofoOKJsROPWf/73y9AzW/ruoQE4QiLGOkSghMiE/CS2AB6AGBogJ8ghxNwAe/PmXDhQ50YTwgLO+kUvfvj604Un3istP4NBOaUOMRv/3sxYluyjKUkHXHCcU5ZDzyiYBh0QkIgwk+i/KcrBHTKK4IfUZSlau6PFf8TbhHgj8DYQuuB05kvF/JUN4b3RebHWjG8/38BFlADETzLolTnAb4UEquBLtkC/g4W8ztFbXis+vpycD+d25JUBSxqdu+L3Eiy7A5LuRnUAB6ByYUEdpKdi3h/0vF43r+D3f2NR/d3bwi+T2jxBOoRBoV3wPh4TwmTLeCbkIU5GlJ0q07VAbgfNtdPLn718m0P1GqrahCYOnG1WhwJmJEIzQMQFlOGUo1lwR1BmLSpCBB4qa4BGggeqSMgRI/FNkK15U4G4TI7Cc1FIWHXS9HSkWwvB8lSGx7uZgY8P0tZiADuR50s3L0jTl1HPSBgzI6C2kwDOggiLKqSBKp5oAGQYAAhcaiZxdCxOQBiU7NZzWOeGydCADMTRmux2mRiDofnV5vnq3zdyzc+eDJNJQ+rruv2Dw+jzXHRzxNcymY/zur++v6eV6nO2hBdy5D8bNtdPklH1SL46v7d5Xr9dvcWOfUrOX/6/OoifvXLXwTg+qLfXm2O+1LK3J/n49sxcNgX/PEvbjPK/TSVw0yp+/DZ1f27tz2u9rV9/vkbC1YKBGutXp2vszOH9SlizZwTMJ092bSX++NBp4hdOXz04qKxbbMMeZir3+5nFF2vyTrcHabq0TviavDk3aZb98M0W0a/eHH1yy9vbh+qgTHLsdWm2uV0dgkvrp/vX716+vTq7f0IZfx7f/u7Nw93797dX1+eydp//as3+4epFJvDN+cXa4GPPryCfPR23Hxwef/w5ko2q8tzHVY/e3X34vnzMh78zFzieDzM+8Nv7g93F/Of/dEnb35198mTsxingvJfff6bbo5n33z68pevvv3B773o5YOrzf/7v/716my4fzgKYVX/9ZcvL6/Oqh72rXu7u0nHw81uOs6hJNiN50Pnff/bOUO0TBLCRSSSABdP2UfD42UBWwhAhABo7vFYJ3gk4CwUQEei8HA0BHb8LaMTTwnmssICEoIHQhA8rofLqoIYy2axIB9LweCkQ4lLYYNO2pn4fu+CiBNCjqdzgkdQafliCw0DEJlokV8lokeKDCxevnjavB45nh5AC0vmBPEsHB9E8FMZ4QTW82nSn46wOPqeigd+Ail+u1jGI/gVsZCJTrDYI2Tz+JffLt2no8XioAiEi3HTe/7Sic8bAfYI7ZygMIBlE4twXFxmFyA+/IQCvq9PPG4ocfqoBYpZ9tXTvr8gR75Q2sMD3jOeACCCljpHEGLg40EfL8QJDEMCCgukwAACNwQkJHc4gV8LuhcnD+QFU/PTA8MQTghM9IhwLeWgx7t+qrwAnhwBHq+dOyIjoXsQkpOFLVeGmE6bgQYkIcdYfFfMwyJco2OsZrf3e5dtJgmCYs0oXI0wwE5UL4sGiI5R1Iq7NUVwAGdhYgK3CGBZhFQDH9V7AYk7RAcEbmbgQcLh4GZIREigAYRazT0WGhQ+wnrvFYZZTtEQIjAD86IUix7Bgm6IhMQA4EsEhQGPV/dUbDvdyaVw6w6M4NFOfBnQqQQqJiAmQkEMAnIAYMYgZAC1Wg1YLJoaifBiuZV7lkw80/FYABAJKLGjBwVSOEaDgAA6eT0QsDMTS4QFBEcBVwg14YTI5oxIqYNW1DxmrY4pJ2YDFmBBh3BCR2VhIGtRc87CGVDRFYOEBTXCkJAWcA8NytwCeK5Y1B+vIwEnD0ROGJ4FhRHcAMWAtJqV4m1yq11O/ZCnppAlDR0mzn0HAK7aOYA1RmMMg1Czpm7gNSKImgcnZyE3DwhKIANRh5zIQaNWFNMwbyEAFKzsRIQYyOQIYK7NvBa008I7z9WR1F0NiARZuAcBBg8vzgsOTDYMTIGpSwiYlFzdIHSyU3kA4OjY9cM/+8Fnb0uDnq7PV8e3JZC0lSycRMygM3h+tt52fTIf1S2lm3H8ap51X9yh69PDy7dD0MdPz5/I5hBxX8eN5FLII4hwvc7rIQDASJ88WddD2Y1+3E1PBvnwxaU1QJQkIMLcJUSExc5McDpU6bvDXC43W/QY0qBmVRsDNMK1MAHm3M/zCACj+ld3d+e9sEfP+cX5U7t5t75YvZnHg9cn683Nw76YTqYDc5hv1utMORmhUTlOCCFJAHDougx8tV3nTB+8uHhVxpk9h333xaZLMgz86t3005uH0YICSTglnMf65Nk6M745WCC9vNmXcf7VuzcEsVqvn11cPL3qY6pnXSr7idC///1nv3j9MB2DEw8bBm0QslmncxmSUFft8O4WEuWU7/Z1P9aOeZSwV2//6Jsvwqmjbo2d3h+4lT5nGfIPb+6mmdfX66nNq16wp2JKJNWwS+LVx10tU5SprrpMEe4O5t50srBNP5NfPN/sxzKbUc9l9PlQKsJRvVvJ5Vlvx+OTZ5vjpMfJa8DBnJKUqu/mybyer7uBobV5INterN1k96BKzGtZ94yONw+zNnq4bfsqz56m8yfnq47h3YNhKDPnfDA7v9ysIjTg8tnm9WFqiYZV7oDO18OTq+3t7qDhqUvFKueuS+lQ71Pfs9DDwygSw3rg1D17erXJvLu/v9/d3Y5zn6VjGTJdXK0ZIsxzn/rCCJYIs7DEqZniBBVZG7ESAXXSESMRqzklTCLqLYjN3aoiATKgBVuwOjcDdfRYX27T+eABJBRehUJyCkdHNouOJayFlnp8qDpyAistICjQXNUaE0/jMVtWUxREQm0qIpClNQVi95o7VoNekiu1mdA0PKJRqR7mjpHZJHOgk4ehqzmwJEYPz4LmDRwFEJ04CINbA2/eQl3IdJJuuDgXBTrOBTWoABolliDgHjA1wra0hjAQEFZQZMo91WZhFoHg7o4R2EnHKBKYkRJSmM/NADwoploboiGoxTaRcEC4q4KHuCPROB174pVwU4WAMs8hYQazOWQB5IQUEBZOAU0NXB1AjZqZImUmwcgi4SDBBCA5w7Cep1mrdmupszJKFrCmtbbAIEjhOB5qHiKRjtMsQEBoLJw6jGgKxWtFNfBM1FqDQDNDQu66IXfifbmrh0Nr3s7Ou3U3zKWE1lqhugFgmT2Dr3sS9PHudj1sz/LazGfX1/flfN2vO1ZsieFy3eGhHsOBtEv9qAUAS7WeOrBQUzcXWsAcnMYxSSpFbbFgYGaBViunkKzuc+pJa4XiBUM9XA09vCoD5tZNU0REC4j0mCSTobOfyk5LeIW/xRoIw3zhhCOCnzCgR4zhESNYyGKIGB4nhjfgKZgBAMQ4pRkLEoGxAB8R7zsR/NTPAIhL5XGp28LC2/B4HwYuBKZTtOmwdMc4QBAjBhACM6KdoCcMJKaFhrfUQihA3R0eExfHJZwChBOFPmJJCZZsacmUHuPgU0y6BHlwwh3e814e060IWJwdlqrvUpc/tR/AYzYE7+GcxxgXGGnpe3B7pAc+plVLzkMnej8iwOIKsVQbLcIR/TEVWa7S4qfw2Nbw/rb9FhR6TAaWfAwEUR4xQTXQE18HUC0LijCYd4IThAdqC0BkYjeHBZT1kzfBCfpaPv791fDf3uv3iY+1oIXIFoBELBwKqg4B7vH+3WB/gwf2CN894kOPv19ymL+RiT2W8vE9Kw0gPJAQzAIo+LQlcFrIOJFF3KFMLVPOXc8JyzT1q/V6c5U9xmahwUhqZg0wQoSFoTn4o4GsNfdTloceEU6qdZOTALh5J0CGptE01GwYkjUF1YBggyhWFQgpSzJzNS9TDcSpBDF2FkT05HqFLPNh2iQx95SiBaaUIeD/y9V//VqTbHli2HIRmbndcZ+tqmu7b9++Y3pm2GxSI5F6I/RXCnoQQIAvAgSBIjQQNZIgChjD5tju6bm2/OeO3SYzI5bRQ+Q+1aOqi6pbB+fbOzP2zoi1futn5lMVQiJoRz4GZpYy1QC3CHRHwjK5BWjAdtsPOXk1q84EDDGX6rMFkUMwEgGGexZhQgKoNdj1x7uLX73Y/vxHn7959eZvfv1d37+4fHXx8d27u8eTpzzPU8f973/7lc7mFcfZwOz2/tPNxfrNm/WQ6zifDqM+jtNTxOefffF0/3E10OUlMcL4+PUno2Gduq7fbDenpyN4udnQfP/xdDe75rnC9noVXMs4dcNqKvDtu3tA2X+z3wr3OR9drrfb94+Px8nYTm+v1heX3ZdfveuGjSC7xjowAUZ1A5S0etwrmG626enTx/769c2Lm9cvh/F4eNo/gWQFftqfqlswp26YU6ljTYSfPnzLmfMAP/3iF//2179eY/fVV18O3QoEyFbkcf/du0FoB/3a/PffH3/24s3Vy5t/88/+l8urK0YZq56SvLxe/f2f7v7w+3//+ZvLlfnv/9W/r5B/8ubt8XHCbvjij37ym/dP80Psbw+fv1z/7tffHB5K3l7sD+Xr28dp2m8YfT58/urtx9t8q/OL/uof/qOfvu77uy9/9/HTg6SVnmj/afrTX/7oP/6737gBF/vRj158+e9/5yBEHIJu/Wcvf/zlu399+/BRID0/LYwIizoJ2r4YS4LsImJt6EHbecK8cRXCHYEgrO3Y6o6BERAWBhDulM6oNzMguAfDwkYJD0JA4gCAUDiTbZb/ISw7WDSAOcwXHiMictsKzrsYAjUQJRp6AYCN/7TseOSuDbgH5AiDszx50URDg0MCiSOQiAIh3AmgvWYsUA76guyccRhCOtNmCAkDCBkBA5GQAyDC6DwPASR3P6P1bVfERea9nCDL2dK2rqa7Pe/hQc/MyKW5X7bA5d2Jo3kGLECCL+gORIQ3sVbD3s4THKJwCERmRHIP+lsg2zOf2Nph1mKwG/qFaM9SsfMaNhVaxHmPBQBCiCDAACJq5gXPDCfHljyKDORNDgbLnIZaBC0CeDgiejhCNNISAACxe3uzpZYAJCB/Zgx5O3LPyFY7yQgAkP0ZN4NAQCJ20wZELU+BtEhlqlqG1DMnC0BEVb4/7IunYVhj2DRWBguhBhwigFdFCJ3mqlAn82qIiLFI5JrGrjk3EAYTAaEHmMYylEJBJBEAZsKqHhAhIq0gQ45wClyQTQTE0ACDM+vNGz7avqyIYO7qIAIeLEKIwQ4OxCaSMFpAEjQadfhSXjNgOAEFIQVYBEWYqQNhhDmYubdHgyAwAqPhbhCBXc5aq2DUVuWYTTrnlB0cCaXre0m5l/E4lWKlBLeaQaJbCbAjOBHbbETccCzGYGE3JEmEQJjRCXCFlgHUrYAjOhJkiy48GIycEIQZiUnZUNwlAkJzASiUJFTFOTAxsSkQZ7dwDzcLDodk2FkgoEQEEwcRADoBAii4qxG4kEVILQWqonMoOUJhjcwoiVYr4oTMFBV8Bh2HVDpA0+KgkXia62lWMDCWudbUYcLEEkEte145Ajxq0WmcICqSIwVyJgFHpZQCCwAgkBC7YQSHOiK6adswILzx6cwJBQMZqhEXIJjmQ58RuLI4ZyQWVCenWdEp67gkoE02Ph7kwatLHE+zu6tairher19eXWamzeryer39+P3X1NNhnt9/eDy679GPXvuBy1ED3Ka6d+j3T59v1ivuHz/dHzBqMEs6Ho7X2z4Tg3mXBfZBs7/arV90q348ANr12xdl1NWqb64sGkERnCgMO8njXKCqjhOApb5b5YTBbnZSFTMhzkwk0DmeHA9VuwyoxSiku+tTOZxGrWPq+NPtXVGrc11LsurOdJhKBrvu8lZWuN3Md5/WYqttRxrgmIPKSTddvw76fLV62fMFpse7p+4yveqILzLlLDl/+f7+k6I7B9lqK294q5BXw+zlYpzK4TQe9no8fPz9d3Az5KtV9+OXl/V4wsfj54nSm/7bu+P4eHj15iZ3sn+cU4oVp9l0y/LhOM117pgmg1J1ND5NfvWhvByyaUnrdJkIuJ/GwkrDhN8exnLSi9fD+FDdixJUQ8RuEKyHuTzZ/OTEPLPNSdI8u/iuTxTw8aAfjvVmxP08PUwziExqgKhzHTIi2ON+/5Ori+mwf3//aTtcEycvZVIsakkoMB1GVyzXa/rlT7a//fb+/X1/OBBlAQqi8CjrC+436XiyGUFJZreYxuvd6tN9acqZ1692pc5lqrnLiWA8lDnRZy92O+Qesaoe5hYHryvu1VBr0QKOwaarYdgM/TCsXr18+fTu/sPH++/efQyJCK4nvbnoLvohM0OEGZg6B/cscxgE9blbjoL2L1dFSjx0CGQQjZmLiLVWJyjViEE4VAuqoUPv3gFAYl711A3WcwHsMhOGo5jXQEEiDwy3cLfj03S4O+0fnUCECIAyBCOgby768WgsAGplhpgDk7i7oVmtETAdZyRIZiEiImhKZMLUCK/CXMw0AJgfx9qtCMiIKTMoIAKk4JTADTmlUIyWjRxETuBORKdxHjaMnRUwYJYugm0lyZKgUrCnIQwjAGrVQNBaSoCzVAIEdXFiDLOOxGvTx4abhwI4hBsiAFNRNbcgWIgNiAaobmAGlASCGHmpFKGlvIqQhrqHmvX9EIEOQerRBEWCEVDViEmtqhtJHqc5Mw050M3GwiJI0nXoBn0izh0xneZiaOqKHICQkSwcQOfZCoa69UMmo0o0F+sACc1qrWHYURCysJsHI3BCYgiMMnUprCoxFC3IlJhdKAis6Mmn4CEleng4bFNIl6tan3QtACVmQQ0NIgcqFA91LAxBpRtWIOXmenX3eICEjWfBSEIBZfJAnTznrGYEID1PpWKEqZqW1OM8j9QBr5gc/ABzIFBEBZEUkwlxeKwHKZUtAmyR3lD4UmsthdeCxTyPFJuUrBXfjXezQDxnMdCCp8S5YkVAOIsJEMIbu2Tp7hui1FIeFyJrICzj5UWRvgyKEc7zLmhdK55RkoClieGlJVi0WmfbIWQE81YzA9NzTCIQUTwz4BHw7FLRZimLigAXL5vwZebr57s6dyBLHfwM9cDZHgLOA1A/x0U+s93b6/0tEtO5vm/VQThTE2/AuTmKZQAe0WbFAUB0rmfbmiyvEeef4w/8oee1C2i1w7lBWNb7GexrkjYGTERJIALMHBEUmyUCBkIoOESXECxyYnOzBnVRkJ+VDWeOUZzbznZtzSECCJmoKUMaMYgaEcyh1iCiJkA9D42XV1p0Lm1Z2pfzb3PBftCYnUfW547rGUTC595rWbkgQAtnohLQnZfz7KSEERju3ZDVdBqPklfIybT2/frqerf/8KnvobbP31pDjK2BWu7PHQnMjRMNXZ6qzsWROSVab7JWnSZ1CFMQWahVmCjChSjACfk01kA6zYbgwtT17OAQeDzM6XpIKWyuBJCZDodxqhaIPacyaUuf5ZRO42wEqyHv+nQ8FTMw9/W283BX6xBNIxExwDyWxIjoalGrQUTXi0cEITdJMgAGmAUC9Oj/1d/97H/9D//ui6vL24/2OMnbn/zRKfhoU+o3292pjuPtH95dXVwj1PWG80kdbA7dbVerixVtun6oejAgmMdaAd69e9SJ2Ojh7jEC376+BoRSIks3nhyQhl4Sw8N7VcO8SY6QN/zw8LjedZer4dOtnqa5G3j7crNmUo35fjodS3K/2ax6gOk4n05j5u7lm/U82TzHYT9StZxAVtytk03jkNlAX7y+jn5Xy0xhtQYnyRfD/d4zu41jjnT4+MAiX3x2dXg4jGbGdbWWD/df/uT14MfRu8SMOffffvcpqn/x2dV//me/+snrl//X//Ffvv3jL37xD//i4/3h6lfT/Hjr+6cf/ejadruk9f3X73abq5//8S//8v/5L0brNpcXj09ohdJktx+//I9//WVVfLvrn+4P3aoX6Segi1cvpqf55vpawGud//Dt70uZfvTZ63U5yfFxz/mz16ur6/WdTfO96eT/4d/+/un+6frisuvSl19+v1t1gGDqkJxi/vX3/+724209FTlLbxZYe9l823+2XWOxY4NFgAQYjs80n/YEOnhAIPoZFH7+q2HZQgsd0yO4eXotvTo+0yKoAUZ01nYhINKZ8rDsd0zki0A52sHV2DPYVLY/7J/uQNFIlYu1TuDyXtE29ThTVmBhLzKEI3HbX7ztmwuLdEGp44cd7nnrX9hPjYTVsK1lsHE+TAC5QfdnydOyZov4dtmvl33qmSl7ZlI+79bP1/CfGuo9/wbAgsucBxrNL2UhqnI7XwwgGvu2HeGB2PTv1CYivmykEbacyvAMxFE0wBAXqdfCnAJ0QGpUKz8LyOCZnYTnEcsZtT/fLQRAOC7i9LPD0d86wv4WdEYAHs9/dKF0USM1PZ8B5/MUAH/wzVuqhWdy1pnGA94M8HxBuJYKBtSVmZsou7iH10Ah6r7/cP90nK8vLgTcbUYIIKiKguHQJjbmqqWqVnD1qNoMNptSOpbKKYCWM42QCciXu2iTMw8CocSEYGZmyICOoAjuAYjEHrZgt3EOtYZljc1gofIStiqFuD1N7q2OY/QISkSCsUjHF5Jd07YBcQCaOTIiQDWLACQBWoolDzev4TWcEClakUQIKIjMQljVq3tEDWdBJEUEFHJSEsq9SErzBKcaURs7LFRDKIBRrUpmA2eSQDBkXOK3A4k4sJUBGAhBakEIbhUa3uk1mCgxMDoBECKTkhs5JXCJYHYMYgYXVY8gzkkDDAITAUOMVoupFQ9p3y6HcDPkdgQ7ETCRuxdVMxWmyGTqItkpqiBk6leDU0JEtxp1EjBhYkcW5iRqXs0Ia04wh3sAMqi7V1OKLjEz9n3XIYcFaoBBNWscKwRHsJyYyYUFPIgzBYW2tHUMB8WYbQ4SCwWW9jMCBncMB/RSNYg0zCiYzBACzQXB2RQxWKflKfj9V++q8ehuHKU4qiJiBE+VHyc8TMfDt49QZg5NiSmBsytWc0sUVqHvOchkwHqKx7kOPL59efWP//wv/i//3382eo0c3CVhfvvq6mIlX3//8PBUy1iznlYCN9v18VgEDqlfqSJnUvMsEuGlWnOeHRh7Y3fDxvAycwRg7FKm8KhaAobVDlGGMtuQ1WsiIoSn43S0OtWyXmUD81KTu2TOSNu+S46guJ/KWrrVRS9anybez7VPhGHCrFrV4qlOyB0ZhGOxoJQKUBa7WbMRaC6vr7vjXu5Ox6lWQtyt4DiPl5cIFQ+P6MYAi2roabJPx+NXt6frgV7thiHh/sPjVcrk+PB02K773bYjSfNYdqn7YhXFvM6V0G+6VJQOxTDl/Th/tsrr7aqgbi46QvQHgKDrlRye5tPT9GDarZJWxdSgXDxOo88KhtkYqwNhOKz7YQCMiOOkOusU8NXj7KqGTlgB3RGWxo6JPT/tkYAvNq8cbBh4RozZgYIZCDs0XUlPEX/1m/3tgQ57H4a+WAw994OsN+nj41jn8aKXPqVhnmzyY5kPY3SEu9zpqV5eeId5pjjNda52tekuh+Gz7QYmtaLzOIdZGjoPZkxCcf94NAoW6ZguL3e79erpaf/91394+rgvriZa3AQgMd/sLlar1ViNRYJwmqZxngggSXb1TAtGtPyL+56IJIsgazimVvg4Mnigk5s7ANZaSFUQEgZaBEm3zjVnZSKCGpGIEDFJAiAg5ABiIJ8VxrAiSMGMCM5mHBGuXrVVCRnnI8QEQkxEgVHnSghA4AhmaAAURhhCQAlqqQFkARTkgJC4RlBgqR6oIYEMnaDWQIgIYsmCQohuihgpGB2n2YLBIVQhd5EEFOpm0+noZFwRIDsSVqzmal6TQAUNtQBqDt3OEGB9LxJExBWgTMEYQE4MQgwBsxu5abghVoAIzAgpADwUOWUxQMEQgJTQZ63ubl49iJMCGLATOXJE5JRA3d3UfZq8Af1CaM5AYBCUBQGmOu/6AT0CaZqqSFqvBZM4iTe34pglU4RNc2XBnPg0wrHUhNinvlYAj0jBgqXYPM0AkJhMwdypQfjcsQgHep10mjjEgsHgNMnscbWh1MtYCiVJjOZm7nnoHp8OnYKE67xfrYYNkk21WEXNw6qfy6zg7rHOrDpOqBlzn6mSl1lTQkKZrQJSnTXCGjvXzTNJTlzMkaPrMg2YUjpOExQPiIJQwwFdGQijuxB0Z7IcoRCheNJyLsmavqdRUKAVkedqEeDcmCNhG+s18AifKScI50KwTTCbI8zCF38WDS2lM/7gUvGMXNAZQgJfMJHWRCxvQYjhGNAU54jPOAAuKnuE2kAE99T4D81imdGDHEEDuF2mL/U1MXq7smbbSo36A4TkHggLHgRnuUHzawZYhFz4DDzE2XWjQUHne4Xz9QG2FfMGHoXDMxMGn6v7c5Ha/FPbOi91pS/XeEamzljJws9a1goRiNAD0BvisVDD2qdisVCTzgylVmcvGkAkcghEzIyCwA2QYjZ0ax9ngAdokFUnIsnIADlB0TAgjwa/gS89BfrSESzfKAQMBBZp5lV0RmzczN0iMDwkIQSEuyO6exNqICJnIWII12qgZ0Tq/GVrXURAnIHA83ueAaSlscKFCQAYdJ6v06IFiVmXJhmBUu7QgFEUa7iCOsQoXTYiZBl2ly/f3Hz36ZNVB0JmVq/UfBcgEoqbu4OCE2BOlPtMTS3IqNXGqH1md++SpER6Kg1GNHONIIokCch1tpRTQFR1Zna3MBCmagBEZTbGYIae4Th7sKAyY4BH11FVqAqnqQZhHjpgmizmagDITGrOGTCcLFIWDEJ3Zqy1ujoRJyF3iPYUhyfG1OdqbupAcLVK//hXP/6v/rNf6Bi3+7HI7uXbzy1lv79Ls4dhNvr9t7fkc/KSmI+qk1Zjv7t/2Gxzl9E9DoWU8zwfBKc3V5uPH07dcHN3PCLDbivbK5mfpn7Vg6Snx1NHeHgcy6ynE+2G7cXrm+8eP4ynAzsicClVay3z1GPmLP26++bhFpwfjyN4bIbVx0+HzSrnjn/66ubpfi9DP6zS/d3D9eWljOOoetENB43UJ0j06vLi9mGmcV/cQlBLwUms6rE6cXJ3goIlYsa56mQ0kIRNxzKDdJd9nqv/0U8++/Bpehpte/l5kR993F/+D//9P6GU7/aHnDZ/9r/5rx/n8gnsj3/+JlT/43cfuy5l6y+6q3/6P/4vyfoSeLNed2nz7v3j/VePv/v9Nyui42z3p+OU4S/+4S/vHz/sXrzcfzrspHv4cHp1szuOoxC83uVO9CV7tXp5MRDi/XfveX/cps1K+HSc3lyuLzfD7959v7naxlwO44mDX257lf7pq2+NGYdhEIQnAABCZqKmmD0TQxc/X4oINF8es/DwxfelUUTCERbD6HhWTjXqSKPhWBhGYghwOhvRLEMDQG7dKp4JME0E1mie5w2VCPBMjUQ8s4ligeHx/JDb4q/jbX8z9+blFo1IGgsxtbEzAsgDOP7WfowAYAEQyABgEAS0cDdg6ciX/QIpwpfrW+x92sHADU8iasEJ52kBnKlQ5wMiFje8dlsL8WlRhQHC+UCB5Z8O7Q6xfVLPx+jy02ekvr2J+TmPggIW6yYIwvDG8YkFHYf24TUKzg84lIchnBGH5fbO2yngMggJ8HBozTuE4xmQQY+FgAQOhEje3PmWQqCZ+jSLo4A2imk5CBERC2XGzycsCS2AHlIsYmQAAGoORG0B8ZmfFYj0PKeBZ3Py5cylhmi1s6oBWYTkYe0ullPEsRZrXytDdw8zPOnp9lT6tA6A0/hEqEnIG8+JsNU6BlDcNaComoYDWK1IAGBBjgC4tLSL3hLAEditEhBhCnDTGSgACIi5WS2EURPSGwciUbCboYcHwlmaTecGDsDbOQrBjb2/rBW3Z4kRUmJODACMpOCEAESO5EKBy4eXWAwcPIA4MMDdABILRDgFgJVamJN0GUIJiNv6AyZJBRwcLbgWTQHBHqA5ZwwND8naZxguUEbfP5lVY2KkpkH0xoUhaEYhEhAeFd0SBnLinBRTKHqJ5lBUrJqbRUXKgYjMkZCyEZEzgQAKAVWnQEHHcOAgcWKEMEgIHVKi8KrKHBggEGEAgOrggEhCjd9GzXAzzIPOSlSMMPcSBRJr5m67lvXKJDORa0V3RhOwjNFJJoDwiswE4BHVVB2KhREGElkgk7FgEpCecVBt5llNcItGgKFnHtaibA2CAMbAQAy3AAdDyV2NRiqnQEEBYAZVYvAKFKQoQSk4Y6qBCsQEgYG5z1Ujzv74GVOm2IoU0yJwsdsoyuwGzF89jRP4DDWn0FNNWntjcj1YVYTN0ANSnWczJcH1Knf9cDT86uP+DaQ/+9NfvLt7/+HT/esXF6/Ww/h4PMawXq82myHV+nj3YSXy7YfDi4vL17kz89xlbDxEjMSsWlKWCE9GhAJIHkgogcJaCA2EISKQvakNKLokk3nOmdU4cF/0gFiJpv1MEqHtrHFwFes2uSdW7oaC8vXd/dsXuxe7PH3/xFV6kTaV7dbD3eNTt1uLJZ2n43HWgG8PdWD4yeUqk9/ef9x22zddx1veZJxUWUhrKerbLssLVCLzJNVWu27/ND2UOFSfS3zz/dMfvdh8frXuQi/74X52OmoYzRKz6jjPXuw68wwgAKh63fdPOU+j3u5P9vpidzU8naZ5qk+nyTzWa37TdYL4bhyfRi3TXFWrYeYQLqAAkb2qAGRJRWuQd6ug8NR1lPjjYRqJq3kASE5ugU4IIUL9IJJkOtZJVcGKTjkzHsHCCRDNEYjYAhw83+7rYe7miYacpMsBnlZ8V+dxHw4ZQ7cJhz5ll4nTt4cynqbLdU8Tbq8ut1nCEUICZdTjbruJRGUaVyljGph2Mb6z6uBgDGOZkNN617+5ucLT+PR4uP3w6XSck6AIj5M9nqZg2CS5vtgKJ0LOfTIHAiwGBLhbdUezYb3ls43FGSoSJmYAsHBmBERTx9apqEMYAEzzlDSkHle9XKxTUQ1OGu5h6tFxIgAwcKdEpBjmhYUgpmk8Btj2ZhPzanYcpxNw6lZMqHAyciimJVuJEEiG7AbAMaRUXMd5diLmDBqluhXn8C4xJTSvm1WP7iyCjH0iNBu1pgGhMYkpUNxVO+yDGdQYualVqqtDEskGiki1GCHklKL4MZQpdV1arZiomBVGe9xXrWSTF0eRrNUkILmYewB6YUrsQpgo3FtInIO6gS/YDDATEZLBKpBU0YECHZyBEFwIUmIPrxgOFgKuoQRAkljAQc0a/RpVp9NogR5AhLnPUzFDzqmDUgFcw20yduwIIDEie7AQJckOfCwFgdQ9wgg0cYiAWQCmiAjjGpRX/axHrTqXkoVFIjQSUPVAirDIeQjqvFQLDIAiRIgZeUUyFXez0zjjbKvN9lSrVs2JTuMMJOQDhAmnWeeoFqDDSkjjsD/V4sMqE0gn3gmVgGOpyg4EIGizgkcQKxgBbzYrrRMgJpIh5/2pGOA42WbdMzMizfUkLIhxmPR0UEnYZQQNcy0IASGDDDummedRO+fj3dJht7p/0UItVgWtGm4VPzicFe0A0kwSzy05AkKgLdoycANvuS1I5/nes5TgP+3hz77LgeAR7o38soBNrWVdRGgRrd+ghQO/0JlaeewQzbU6ACKcHZoRUGtHDJoJhcsZzGlUH2qyKAAiFKLzhS02nd7SWiLOJCOPNk5d5rcLRtYK9kWqhtg67WYdtGAYDTuKsHPqCy490sLbWsp1gAaytCtuC9TI9Lg4Q8MCCmFD0WAZXrfCvb1vnJunOM9Yn9d/KYEX9IowmslFa0eaMpAhGJ9Tu1qqTov8gIYEBeFcnTCSUBbysHA3aPwxpGg+V637WO60WVZLEukySWKgCFcDd1WLQGvzUNMwCKHF/xmaKoSpz5mTaNWG2T8DQmeWQeMEwPluzzf6w+/98Mut9WUEj8YDWv7keZAMWmd3cMVip/BK5POpTPMo0ymtrzl1U4Hd+qIDBHVMYqoUiA5mDoGO7gYBCMJFLSfIjHWqVg2B2tB+PlUkyynVqoMgExU1x2BGV5gndVXXKqlvlpqSCB3MolSVQVLuT9M8DChh8356OljX9YllM+TTPOWBbz89gFPfJcMYNhxG4/0pPKr5sIrVwNVPr2/66ZGOx2Dhp2ORTG4aFilR8x7uh7wZUhmLaog0a7n6q198/l/8vZ/9+M3rMB7DIvfD7ubpNN3ff9f1wDZ+8+33tw/3n04PQHp6OPZXL6eUPswzgCkEYAU4jU8xDhzJ+gzXq86P/qa//t3HU7/1F6/7DPawf1gRHw91tnGdB6h4eKgBtFkNLzfrx8e7pDMBjlDc/OExwORHN/3nL17/ze++e3gY92NZr7YpEQWSxcVmezjNn26n607+t/+7X/1//udfT9Uvb7Zk0ilSwOnxSQQd+QTDb94/RCXqeHd99f2H96fRYj4Cx2l8ePXi7Zub3e3Dh82weri9dfXc8WcvNg8fj3OSyPK4P253w7vbu9OjDp083n/8/t3tt7+ryeYvXq2vVvSrm+Gf/h//91XSqRTe/aTvN93AQgkg/e7rr9G3Tye7+Ox1Uf3ym99fvXzJE//xL//4X//6t59fX8xVt1fbD4dpHg9f0Gcvbq6+//7u9nbPXfrqw/vdSrabdHf/oeQVRLm4sKfT8ffffb3OP7p4+fOrF5vbv/7X25V/9/ih26xfvP789O23P/+jN3Wuh7ncfnx0c0OwWni1Wioieo4jcMb2jYBzpAAulKAFg2jyM2vqGApjcMHw1mpBQETLK2jPIweQB5oTIlOgOxG1kUPTNjUxESB6s0tpUEOz6j3zfoAo3Bpq5IDLfrhIo9r+Ry2sygOaQz42LGNRcLmDm1uT7bZhAyBHc5hpB0lTv4X637LswTPsBWeLvobzIC6+1207oYXC6dy2w0aUgQBcVHjQTPsjsKmd2v87S6vOBwE18+w4+z1FQMOk2ql8BoXakR2wPLhnbXaDm5oZzUJKcoSFshIR3s6QJhlqvFFsg5hm6BPhzSyw4T3Nf+dcOAP4wj5qROBlkRZmCuASmYcOgI1bhkGIssiisDFsDBYxYpucNP1y+4XmbLVsymfYcEG2ntlJZ9QtANv7xjK6WWjEwS0OD+GZ1ttgySZQhIUJA7YAKfH/51qoXoIIDNUioRKQhX31h48kW+/gWE9eRyHoOmHi3A/hYO7grhalmpmWOpuCtUlYAFC4K7iLRBOdPY+azApIZ6rNRBsgzC0IiRgBmdjAbJ7pXI2BV8EgDD1Ds62PBm/CvsVc3q1iY/0AuAMiMCGGCSISdYQdY8EgxPBFQU+2MJUYpSkPApAwkCE8Uk5MjBE+GZiZz2YdBsgShLx4PEqSLlV1CA1VoEDXQAFz5xTEAAIokXq4uMT1VX/a1+OjGZAbUkizizJXsAAKwLQAugEGNUhUIMzCg9BZqBo758AIMmAMjiAFQSRBDiUHjiAPMsTAxBxtUioRAKEYiZCJBhbzWghCBJGjBoaHRiAyExI4QYsoi8XrABAIHIIYWKKAyWaF6wH7FQCUqtktilrUjLXvug5Jwy2aplAlCakjxzSVEsCJN8NgDhaQJSNlAAqrWgq4MnrjUYFw85JIARDGlMIcwAk5EA2gea5DEII7hEYgeBIMnxHM1a0qOiCHBIYqpiAAMEVANrHqMJvrYuh7mfO6Sy+23cN+mosFyL3Nxp764QV1+5gjD9989Y4p+j6lDDHWHL4i6cMBoTJN6im4c0uqZTQVf3/rJLQderjZSYC7/exHb9/f7Xf9QMFvr1b5Vfc0T39Zb9/Np6t5tcop29xJcoq5qkEIAogzSbFABGB0DbUKAILkpszhodVQWBQAAHvJsxXSSk5elRU+jhMyjhbbjjeQT0/3Ly+2b68ur+X622+/1aTrYctqRFnnEIutgqsLQLWZBk9OQ6KPo3ciaPnm5uLj48PdsQwgQ9Gf7vLP11fzoRafTRwCqxqI7Hb5eJrR52mMMpkpvtymy+SfvV2PyF9/d/AERdN+sm8Op13HOpTUZQEDmF/erN/N0/i4d4MX6wzbzi3m42xVN1lWlA3wsUz4EF2X1n0/TxqEn+6PuePXm82bl5d/8/X7p7keZ48SmHDWMQszEyKhwVxK9agJPh7Li00PAY5AvZRpXg0SRubRJ+qCheh6u/1w/DjN8zjWIzIMdHOzq7PujyO4A7DVGuGudbeitzf970c9TdDlJAyE0RNx8ZX0pzJZVD3dve4us+ZtwgnssWowz4TbPvPQjcjH43yYxvUmr/rVRV6vXgyHTw+AGUn3pzkRPY2HWUuX14z85uaqmL3/+Pj06Xaaa7eSLDgdZ0p0mgslDKLqkJJEwDxrdGkq6qZWLXPqmGKcuJTuXBEtUFHKAomX8EoCj+roGIbGYkwAqjW5d4BIycL3ZeSMqSdMAoSJiIARU4AQcQOjkQFgBlJD065XFkiIAalnAQUoVnXVDRaebO6HiA5scnMKcw0zghrFEodiKIJCEJkQC/IqcWJmEo8efTqdMnHiqFFIFIhMsPpikjf0XU/9VBE4JIFwzDMEUZ+wCHhg3qwwTA2PD6BHJWbJKTFzT+5R5jJp8RPYnFxTkIDQANNSQ1q0U2murqwkQZlxEbKgV/AwBjFraVMMKAzAJExEwOFOQYk85R5yUq9Bc7Uaqix8NGDAzIxY2R0pyjyBg5q6MSIzJTexiGCm4Ewyz7NCINHjaOvkSJiZAmCcTcmJQoIMcbSy15MAMIaWiTgPeeW0LrNDlq5fj2VksgAFiaFnrVCsSi8o4gEVUcE4HMFrTDj0IALqriUhKMBsAiqbQr3xrrtST8xzLe4aRuY5Bx33plMtKQcQz56OJS47HiQbHEstwQlTOgVUi8wo6wGsALIVAEzFeda06jut9Wm04owsuzzo5LMiKlQN6aGWWg0yp+TaR0hOU2AIziXcQzNGivWuu789LsVfLCz1OEMlZ0J3wyrwuaYLAFqUUK0MxAWvWWoxiAhgsgjDMICGHgIAMhCQL8X88rOFB49LtejLZC4Qmh/nUiIuA9BzUdgubJk2t5f3YCBrLYqHCCA6A7mHhDcTaz1rENp0EhH4zKNsY+9z3wGNkm6L80xAAJ0hLVimds0tMhodqym/2uWxozoA2LlKDw/w57hjeB4eByG2vL+lOMUfVru9XKPx4/kDwjPqRM+IUgQEWAPVqPUVAAs5y898muVuF2+R9sOWAwMLWkTQABsEBEcQovCwgNroPRTNFFwDPKACucWOXYQTkBVHoCBsw1kMWByplnE/BgAJc9dx7jl15BTuSG5Ww93NzRTPsi8DgPDm78NJur7vcoKAwAY8n9lEsPh0/i320PKtaI3U8++dbwsC0AE6IWwj9ham2LC7xaoInh7ukbtaVBiFsOskDzhr+fDdb/r+evfitQCtV5vNbkvjvgYBIwYSoUb1QAYkaooT5oxpI9W9X+VQ2z/NRIlzHsP7nG/HCVWZuUIgQWIiwUnrXFNOPYnkrnPTqDZXDY/UiwtR6irR0KfVGvuUbquipFqomB1PxxnM1F++3HbM06TNLtbKvF3lqRhTDB1cbGWqBcLasLFicMejxXZI4uDAhrBZZ2E0xAIBTOVYfvp2/d/8l39+c3GZWR7fnaZYf/w0yZpyfwo7mu3pOH64e/j1t7dff7ytMP/pu/2f/uj6/unh3/3mt5MVD715fblZx/o6lToZQN9ffnZ18c1vfnd48qPDq5t+s51fvd2+//7p6aA1w9M4gw2HO5wLzXX9+Zs3b192dx/+sN6RVCHh+jiuNsMUcML0Ucvjx+PM600vP37Ff/WbD32ftt2WEGc9pr5fx/DNXfxP//z93WF2YeOc8qxj2V5kgCjum8vdb28f13nocgDN3+3hhBLrG8EAL90Ax2n47Uev3h9YDocTEueO7vZ7sHhzfXPw8vLl9d3Hu6FP6wvZ74+roX9z079/92HV91/+5rc3V/l/+uf/+umh9tcwH8u//PWXly/7zau3a5Dx+FAB//TH/6t/8y//8sPD7d2x3j+4SdnvHzeb4c3b1WcvX/z66++OcOpX6Wr90wOssYy86m/64a5Yt34tm92bP/r5brp9eryPUn/3bv+vfvfrX77+ozcv/2RYy+Px3asvrt5e9l/+7usJ4q/ffbNbfx54MentpNqvhj/76Rv7qz/Yxm626W9++y0AMC20z0XchADA3pyfF8y8NeoLdL0ovxgaGMEMHFiNkITCGqRPEYQihAIkyASIgLzkdtEicoNAAF4saXDhUbYTARq35TxaoGeWIMAyM8CznKhhQs1SmQjAwAnJwj3MAQLBoI3NvWE0EQBgjmCBTNzQ+SZt8nD4YQ8OxIbd+7JA0VhLLSMKz6o0XPZ3BHdXUApvfneBvAQx4LNDX0O5l4AGoHYSEQJhgAWYWztAoLFzoOm7sKmQ8Uz+WhCVhU50xoMWMsniHwcQRGHQ5GcEiLyQoRrvBlv6ekQVIGgUD6BlWgN+BpS4QTMO4M3eCBsD15fvQWPvNq4rUSx0WIQwACekZaMn9GjjtqZWi+Z9vhyPDYQKQAqPZldHuBjnnAE2QF6KjkXq2DjH1ELQlqONYQGrsGWsA5Evk5sz+rQcwE0BGXA+DOZSWVKElgmEJCLev3tXjyH9fAiFsEHICB0sZ/aiiQQNwbmaa7CGVQBD8DBzIwQ0CDAisJZlhqgABTG1eU8oQJCDqwUbCVU3WDhrFMiYkylGgGoBDAjzsADW87ysKdw8sElANeBsVSdInFN2gkTAKBiG5CKYe5QSxRBa3GFYDQfHtIjRGbHhJkAMOTHn5E4A1gvVcQxHYgAEa7xqQhIDZ0KSDtisWYSrIjikRAmMCFJH2KMJKBux9dd5d03lILcPcTi5okMAOfaJgSFAgUVBABs/AaoiYAWLrhOxAANMXTFobhgFa86Zu2Q9U6PqNPsqVhAV8S512TJHj7FFt1CjyCnWDDmaY5VMgJE6HSEgTACBkQO4VXmJHcPMIQIIvXllCfTYd5LzesvcMRPXCbB6nDBGitpx5gCgQCbkDtUpgLxh01ENKzAEG1NNmLuEXUci6GDggVFNQ8w9Irg9/SmJkiRKgRld0BnDPZwxSLgaubs6AA1dykCe0NRVQ20BeRdE0gGBEhCCeFSlBc/9wbDr85sbDo/5dNGvP56OT/V01NJt1vB0FLUXQ2IL2g0P4zhpVa29hTs4walU4hQQ2816xWszdSvdwP1Ft59mm2mu8zhO2/Xq9DQf9RN361O1ju0+IKludv32zeZf/eHd6Rb+/k8+H8d5G9512QyZcJUZrRIFr3u3gIjgqPOUJAQMzQ3BzSGJITqJhtcKUMOjADJCrFheMB8ZZiBUTkV+dfU5UaKj39mHFy8uh6Fm7vaFHk7Tl+9LBpS0QiyGIbkDAbXTIJLRj+PoxQBT7lYrGUetXz2Vk53++OVwc9m///RpfCp0mV+9udCwaoCrBFq04jF8sxa6wNXl+vbbe1J6k8kRu3XqkOOkZfYPD2OFstsyMX33cAudR+opxwRBAEyxvui36348Tn3XH2d14Uqs1V1o5jA37smI3t8/znc2TRUttv0w8eIJ4xYG1cwzMgBIJ2lIwDgzGcXd/ijbbRIGs3WX5mpeYx9KBo8PHwjBNDhnzMKZRpe51BI09N14GrkbEsEK+acvtp/e39Y5htxzslKrz5GZqctVcbRCghe7C0csSO+n/R7ynI0xfTyU1dVOLtan0Ccr78fyk35Llb9/PHR+yp46jZvLi9UQx48KaaUk1y+uymn+8PT46dN9mU2nGZlssmrWe+jsFg4szpJTyuuemRmpaE1WCGi2amoesCL5QcL9A6uInBgxApnDHZmqGRgIEiFpmGk4xskqQwhKHnrquGiQGQH1RFHGwIm4IxRzVahMinHS8ghlprTCvA4FrdaDI7o51nZ2zO5zlCkoMXeN5R0FUXKbQBkEhiNwBAIJ5swYqHMcT+MTYCcoQMep5AGRg5PMxTxgDhXAFVOYRihFQkVDiOocaa4O7IlBGMK8jA41yEmguRFb17lQBNJJYTqaFSJHDA8rrkoSScTBAq0JZKBGgkVHAYiIDtAS25EcAAiQMQSKojkFMnMOAgJ0y4KZ0AJDupSzT+MUE7qnCKzVVAnc3SBMmJ1BklhAEskpN2Ktaw2vY6NuuoEpZ0RuoXOVWZhJqwnHPM9TzGoTOgYRIRoU1dL3A7mHmAg9HT6uEhuYJJFeKCwLkQokdoRanRNkYYeKjhAJc7LqAsAB1Ss6MXH1ev94EJYuYSa4XOcZtDAEp8kMgtWUCMIVPTKgGj09TbXLFz2X4pTArVZAFCagCIdwYlplthJTKRR0PJZMIQiBzhhYK0fzssPccXVV9+QyHwKMIgH0CdAhI6oFRim1peu9/Hz3/l8dzxBGnIdqz1TtBd1oJWU04+QARghzpqU4Q0DEYEJZ5sMUEQTIHo3Kios/wpLL643eBrAANkvrH7TYMODz+wKEN3Oa5187k3oCllK4RSFjG2828wKAqsG0SOSaUeoyPm6za2iVOgEC+XLPS1Rv81RqE+Plxp9NNwLODq+A1FJZJfFiNQ4ALfoXo7Hj25K2mz2Te/CsF1tAODz/x9lyaLnTtoAt+QXghwXA85R7WenzkDYW9tNS/beq/VmsRUvtvrzjQmhagDiMwDbSBXA/D7IhgAAbCYuR24A+LKKxvwIqhhAkwmCsjRdlC6+njZoBz9oCAELKzDln4RQGs/p5FLYQ1dwBKc4DYAAASdKl3KWcE1upRdXMFouPs6fQWYexwEOt7zmDgctn8qz0IAQhyNC4BaQN5CS0Z84DgJbiXiklTllYuEuS5boTRPz08XY+Pe5evsxkfZfco3pD5iKqAYAQtQkvSyrqCQFmb9kzUK0XcYBalQlKVcmiTdjMLCRzmW00r0EQ7gqhxQDDKYKQgMGt7i7WfZdPx+KquBFFlwGvUv/wcQwID+8zO0ufunIcwyJnebh/uNntDvuqRfvtsOnz4eFYlGtAGEdgGXW9zT0CqwlE1dhtu5998bLa9OnTQUt9sRv+zh+9+eUvP9+kgY0kb/sV338apWNhKuOHh4cP/bYW1L/+3ZfvD8oXazySH5zH8uVvfjM/PcmK+026XMcqz3Z4uLm6sDqXx/rV+/txb5T6FHp9RTfX62++Le/ez7ttv+pjPB3XfabqH0eNfvDO3z19d3XZXb/c/OZ335+ONvQ7Avr0dMsvroeh39BqtZLx8dPpcLxZ22nUu1PpA/s+77b9Mcps+tvffP3m84sZdK62e7ULoflp7vpOp+PH+/fXm+v9fry+2r7arr+5H1nSxeXlN7/9XZd7ggTA+2mONMAE/Xq927DZ1AUYyfQ0KcDt4TCP8VCL7uf10P353//VH373N3/yn/3p/d3jYSM/+Tu/WH32D5Km/9f//f/8iGUGfLg/oX+6e3q8uFpl2v7+q/+AtOeiXVq92u2OD/t6spOWFQ1/+PXXl313dXn98tX297/9+u7h+MXli4uLfr5/ukzcb/uxPD3efbk/1r2VP/n5T45f/m7Dl29evpXp6fFxDLK87t59+1Qnhy4+u3k5dFcw1V7wzasvfvfN12unP/n8xf60f7x/ak9BA3AWGcjiNmOtkY4ABF8yYT0aQNwMUZbN0dsWF8INkl3g27blJiRB4IbsnDk4AY2hgovzEAbicw78gmw7BEUjmjbVW8MrloOq4cy++CUtU4Rwc3CE8IUbelZIgbdd2dtsAvlsnN/ixheQAjwCnEiaUq2djgHPIMZCcDzvNu1Om6DVAILOR9wiCSJyt3YmtOtvbxi4oD+IZ0ro2eA/kNwNIgI8mmPkcgFE2FTYTRHtCM8E20U4Cgu/EhwWVtLyOi3Jvh0V7hFOJO1oCFdCRiLw5XocHMIWFTaFgyNFQPN+XhS/AW0m4QtbCs/1HxJEi1kngAV1IuRYoC5r0x9zpYXnG4AMC/DfIDxouXuI7Yvm7s120ZocJ+JcRGDD6YARztJA/EHGR8/TEVrgp+XIbTMfo+dvBTxrIQEAqlrRCOM6cT3ND4+nMtbNdgibYw4Pr5aBE3MyhoAa5AKCzRrZ1MwCEQSiOKJrVUQIt8TkBKEOZCApwDSMARUMkDmvAcCBIpbvLRMzo7khAXBUMxLQsnwtGtksIiwMokWnNXkdUMvHCBCzDIIO5IgLdEepG5JNqSNOlpRMIQJack9i7iWLDDllESJQySwcBCA5FQePCoSdqqshGLoxITmcHQnZSVBEehSw8AhHV7SDGwcJQgcQSJSBHaQaRSS9uVy//Gx3/6nePj1N1R1FQ9EVrIZ7SkkgRURAT4xzTJyRgrNRzFQhJFIfNBAyZKEBUAIRoSbwHOTuho4RYtjVrost+wZ8iJiDnILCRiDg4Eyp6UUoU8o0zxrQpoISAUhUXVNqXlEQSbDrKCVmFg9kwWDQCJ28VrdZ5xHr1ItQrS18xj0EEQFrqwtrQLEuABxZg5U4pZ5wMOslomiYhlub6RGzQ/Pzd2AAC0PCAAlzrbKMCiGgOdKGGAEhkzOpewmda6mmKJgBIMCQWcgbGMwGbgFWyRMBiHB7CmyuIpTzUBSSYOeyk+gSIqJLli4DI6/pat3dPh4BIxkEsKq6uwNOQKPD++M4UA2f1qvMTpdXm1GFpjRHpEzzOJ8OcwfepX53cUEQDqm4rFayXudDrV/f31/3/ekQfcp3J7uL8uOXV3/69sXh4Z5zcKK5qhBzl0otQEZRVME1AgVRUTq1UmYtY60+ZuGL9UYAf5q2H8Zjb12a4YryJigLUKbcX81uRELSZ9RePSJO01yOx51QFyCZJa1rPXau18CnsWaRp/2Bw97s1vfH8TiNyqt3Rx+nEpmB2UrYeOrXuZoVU5h1nmjFuRT9eB9ljPnE04O+WsubbX+zW719dfXx7mG/L4eJPiru56MgjKd6fBwrxWqdx2nadWnIkgIodNX183FadVLm+WAO6E/q4DFrBQRnuytlP83q3qW0G7ppLmGeGBDRAAigo7haryQzZanVno4j9nLz9vJx1l2XtnlIBFM1iDCR/dOYERGc3LshH8cRImysOUgxuULHHRkl9x/d7D5/efP1x8MERhAdB4C7z+ipTKWEg5t0gpyQMmrKwK9W62/iXpx315uXl+txPE1cvE6vt+vrPoGVQxSujAKnEUQIJWeWi27VQ19Ps87jcX+YTqcuC6dAdFNj4MvLTQSuI560rrp8mSSpB9o4jtKz1tN+qmOt1RwAu5wjIMr8n0BFuYuIusQshHEIBRIQImkoBMiqN4hp9pQzo0PHFQMTBol7YCnYYkbBHMm0kEAiszqf9neoQViSKyAPzOKKYQg+kZQK4+S1JmbKvdhspk4Sl5t1aSQGKwoKHonIokJmSDGdJteEzgFggF1OQ0fOpuGEgITFNJF1QCthLwaoCZIaFwWdOBEThQh0GJNWQiKj8AgnpDB07iD37KbjqcyThTFw0liAWpBw0AkMKBwtcyIgcCBkt/ASyFLRCCN1SEgc0kGyaoQQTC3lkAAYUYslYUHIwkrkgR7QSQcB5EpkZdaqzQYmPAxI3J2IIwMwh1CYV69MoOrUhE3CJKSEhR29ElHoyI4YJhSl6qSzoSNCEhIGUyRmi6pWJAn1tGUhLcdTCaNwLADg3gsretWSQFovh+GZs0RyJBboqyIZZSfgiWACwBBkPJaHy61IHxcdfrw9XG4vj56Omk+Ps1kkYUZKKavRqaqFd3nggFpdQvalZoYQ7fpsgG4t8oUl8XEqucd+gHGcVqkzn9w0E6IkE9bmTCZZT2meoUxiyC7MXLCzoRdiLRQkzAnCF6KpL0bNTdPUlGKw0FrOXBUIbxWpttrTkWkZczJgONhSSZ9RIABBWDRfcJ58LsANeusxzvDPAsnEUqU3Dk2j+pxLbjwLxwC8Ra1BRHPhaZKrQG+D30aMP0NQABDWeOfP1BxcxsHQJrKx/GvByCLOnhELPAMA4Y6L++nidYRMIEwtWy0AzJ2Q1C3csTkxNBDn2bln6QrwmezzLEJrkoKIaIE6hBTLOxHgc9vxfD9nCtTSiD3ThWCxcVjG2xgA4D8w6xdx4fLfS9OAS1Y1BJBFqEfL+ogIBpAAIghGc6AI8lBAC5gUiWLosE9o4CVgtucGaDF4ale1VPJhFArAgYAE4WGq7hpNBNGoUuHmiMRAmHLuhz6LkFtRq9Xi7J/xXPAvreszlnnuLQPOLeN53YEwMAggEUVAXeymCFvbdl5dFKbizAyIszk5afEudS/efmFK795/b+/f3VyvN30n1LoWNHfwIGqRCNQ4WpkpM5STcicnLRHo1UVSl2m1Hh5PRwICkXBHB2TRUDVIFGboBkCkY2WiCCP2vOpEbJXQximHFJP5CGsi1FlyGJr68j0sU324m5sFwii+Wl9OFdyj77rpWGuNxHwoTpT6hJ3Ei01uzD4MmU715UX3+fUu1GmuP7sYbm6GL170r6+vEnW56/r19rdfPv7h/YNjvH37apqPnz5++Pdf/s2R4XgoNAENaT4c4ul4cbnZvRw+/OX7i43gil++GLYDf/r425sXW5sL2ayj6vGSaDfBDHBih8f3fnxIvewYTvM4Xw5508V//P0D4iaLrVZ1SPj4Yf/p4WGacD9KTpen6Wm7Hi5XOauuUv393e3N1eb4eNhlenu5/auv9s5yKjGXg+u0ebnl7mK162M8acDh8VFGAxetZB4EtmJBGfxweF90xcNPLjd35bgb+tOJEveAkKK8ePVax+P0dK8il1frcixE1EnHyt99/LiV7TwzWs4+/O6vv9qtLzF/trr60d/9i5+9ffP24ZNfvkh/ePPF90+P28t1B6rHEc096NWu+/b9BxmAnMs0P+7HvutvXl3tUmLKF5v+8qo/7afjpwci7ERevd19uv3uN9///mL7auNs5fRwt99dbnaZ/8Vf/punTw+/evnzm/6ylHEjcJy9HscPd59Oevz8xZvri4ui1F/i/VdP8XEcklo9wHSYTk/z6fQMFTHSM9LaSCWA0awiGs8FARwcggOaE0w0vg0iEDG5CzrFwk31JiaF5q7bGAgBTEHoTRTzt2zvwBHpHLJJ5/25EU5bM/9sdYcErTkiau7r1BDrZraCYe74jOg0Nio04kyjPC6eSwhni2FEDadgQV7CAwIAFjXugk8DAJ5f9myy/58cHQjQjI4ayQUBGn3xLHT9QZKMy97VkP62y+MzaOS+jC3c3JQW6mvb+BxwYUadr3HZ4NthEmEAFNDsmtuGvDgPN2kXLho6CLMIYJLlOAwHbHShs08gRGBTkztQYEsCRVokiYC+cJ3O2rx2QoU1Jx5sNoOAtNz9Wca9jE/cok0ozls4UltvbObZ7fUI7TyyCsTGCDvzmBf8EBvo38JSl42fYplXYIQTcos4bR+YL17dcR7YLFObZwXadCqhIJCqdrcfj8fHaVh1pg7shICBpgZAPpUAJw5nCknoYe7R6NHQImE0lr8wAsxsEQJSNHtCs0X9h+Koc+KsTRRH6FYZHBmEwR0CHL24R2CoRy2G5C2rrAFGiBhuEEBMbh4RqArzTH1Cd5HMhIROSCw5Je866bJpAQIo1YQ5zFI/5LzKadsNqywsGJJxEe8zsZtCDcEOcR4nd6JwwrSQ2wKQKGoAgAgxmWGYQ1RyJysoil6M0QCCnDitgjzEFHwlTz/+8eZH9PLhyN9+X58OTwCYMxMBhVZVdgoSdMwWCawjFeiC+q30PUVXT6AHRM/UVZGC2pLkEcUsOSBCMEjyncAOcQjqkCS8hmv1GcIFk4cjY5MtWIAkgnAgCmDgLqi5hlIgAROvN9itV8OqS71EQDUdJz3sR/XiGBZkQQ7iwC1gBZ2YzBVNwQw0hJgBGDy0InOdy7DtMXwg7IBmq4hRbA4KCxcUbH0FotYKYeQzcACgkEWAmbYtA0mQmTgJI9YJVEudVS08ENDcLBqBGyVQWounLpiIMAkn4UPZt6dAeq41cspeDznHtsu1Kiez6q6mB6ckayB0vk49JDg9TAG0GbbmhSVN6I8lcpfWaYs85sQpkEYsp3m76lc7Upupk6HjqczsMR4EUtquV3cf9pzkmvFpKsjQD2z78f3tU83p3Vg+7qePH/d/fL2u04FEcqZhNZDIjPT9UWly8GCEYvMgLmLghuoEpiizhk0ThA+c126XGa9Xq4S02eSHuSoDhEP1MJzqNLsxangcTsdikJhXKbXtFJH6iGvmOedgMcbTeIgyd27OcDiWyem35QiI3mVM1A/Duo9prjYaaK5jQacIPs1Frc4m/dDfXA6vr3cG9uGpHCusdyuX8rZfv7h6++U33+wkPc58f5pZ6NVuKzPcvXuUQZ7GctHnjnHTcd8nSmiI8zgKJVT+8HTgdX4CUkpFq3uQTm9eb+72p6naVIxzEsJQ6zMIxek4EZIaxYwPt+Op6BigQ8qCY7Wuk2JmxSqE9HIayzSXfkiDUFH+dP8gQpNaTkwI4XbVX3774fBYqUCgGakP4pkRzJlEx3nDWQzN9JPZm4tuQDw9ja9x45iuunU5TbuL1fRU9Hi63Gyz6mbNXp0l1hnK7GPBmMfLbVeqP568lun4+MgVLvp8mrTnVKuFefPl3OTup5vLp+lA4FiqHvQknvr0/nj8NJ4ei1V3U4UafS7ZjXxhmC5QEficEzOxqhOTmeeUKaA6EYEAOoKbd0NyJCM+mRljhujAw0wICTkEUFCtYPIgn8qYyVZpmEtBAB8LIeS+F6B5Gj1iP03m4spmZMBmmjvKWbTNDgKQ0tDLSU9h6KbMjqSJEBlUm7YLs8Em5U6wuien7MBMh+qokILBKCpNBgbq5h4prI9KXefrDIS+f4qxOhj0hKskDj6H9xL1NLn68WEMwxQSJE4uWTy0mgEIRPOcZ3eKNnIjWBJYkcMDqIlnKSISIhOaahIBIrNGE4LcdwnCAE5z4R6TJHMPoiCuRYPQAtTBESCQOVlgINXwJvGo1QAiJQFwQCCiRj5OOSFG1QmjDCKAME5FMs0VwEFSCvSw6qbVPSVGwmBbbXJxnDlW285GpYKBcBxnZjYNyqHkSQYCmqth4sSJPNANg8SYQA7zQRAo0wniRMROK9NNjs7x7t0hoa86Un0U6lbD+nEPXnGuCmG7vgNzcaAa87EKIBQHxI4yuobGXCpGVFXgLqc0zjNlDzJOvJL08Lhfr1b9ppu1MEMkHscC6onQvCBLU8iIiNWRiTAzAExlorDttg86Tw/cW3XWuvFWs8FSFLbcEVjyhS1apaYBrm5tAkwoBNhKyFg8HXBxLl1IHi33tg0C+czcAThXy7B4KASEBzmcQ1YaawaBoLUIS8He6P24cNGXKfNCb0fwpnL35ztpCcitrqbnd2zNkJ+v4dwU0TleoxXPi8ypNSqx1MMAEUTNcrJxbsBaVX4GLeBZo8FtUHpWKGAruc+Y2vJi3l7dWzfR1qpdDyAtoFUrxWBZtBZEfdboLQEhS14x+PJkPtO3fmhl4vwLDQdc5sMBDmBtKOmeMZiZntlJ0TqmCDwrwRzUvBp2AkNCat7MTnEmX6HHD9JB81IKEZE5BLl7WDWtrtYkeIEtvoMAgJlS6oa+ExECUNWiWuoip1ikBs9fH4znH+EZIQqAs685nnkJQO3rG5ECrKFFuJh/nAFLqKp9n1fr3jwCpZdVqcXQA3h9eZ1vH/b7A0MIZysGwLjMrBEA1QIxMrOrZubwYOKxamAkkc1uPR3my2G1XQ2nw2nSgGbTFDHqtESxMIVbMKkGADlAs191R0HWWi+2q7v7GpSHdcrZ1byCDdtExutEhHp7716jVgCU01RZoEe6ub7cH48rSTViCr96sRLPPh76Dq+2VErcHXRfgVPXp+5F1wFE169zTtd993KTUn9huP3mu4e76aNC3m4HBr3si26kH9YaN7+9e4jKyL4/7vu+v3x5s11f/OGbu+8/3F9eXhy5ppzC7Udv/iSv5VT1eNjvhq3qcLt/6K7TRT90mW8/HWrpShq+uPns3dd/1ff98QTYraeZBqb94/FoJ9F8Gmd1vNnclFJGx5998ebx/jBP8DQ/nma7Cn7x8vX77z4d1G+uVtOhHt2++OL1WJ9GwyHx43Es07y+WJcZ3emiW5+mU5fWbz6/+vK776PAxDAqbVe7R8fvH+7v9tOQtsdp2vUZStmiHKpXkKejrrapTpqzpMR3d48DrZhTklWffJVXV5tVOL3/aK/f/LiOF0xXTl/9H/7b/+7Dwy2s8v2Tbmg6PN6+ff3mZPH77747jeoYu269W/UeMqx3X7z94sdXLz/dzevw12/o9tvv3394P8jFdnd1+vBhu83GWAj//C/+0aff/od/8+svU/aPn8ZplL57ebl6effh4DZfXA1jHavO2iVav/rFz/9LvdtPn456ZZTSH7576NI8/Lj/8O79wWLY9cuu1Uy9gM5bHQKAh8WZxdA4RW1zdFyMhXGJ2Go61AbzIzE1rBxbeHwDwiPwh+DGaPTABfwGbBFcZzFXw0Ac2m7PTeHeNmuPZV/zFoWGC/NnAYWsVfQLVQfczZsU+Bkieja492iSVEQBRAd0DARDIDzzE/H5ErFxL5fa8UxoRMAFyPGwthedhxpBLSZxuehFOrxc+7KLLds/LJ5xhk5mHgjuGuHgHriMw9pp2G7Yw2kRjCzAv51PVYCwCIMw8KAFBGlkpcWMrtlJLXLmgIUi5gQIKD+8CjbjqXMIWpzNbMC9OVc/n0xn9zda5jBN1A0EEW1uslgkLS49Hr4Mf1rBAXHexSkCCJmR4vzxh/vyITR47W8fbU0Zv/hZe5tLtIlIA7EwAhezwuUMbLwyWCqR88e5cJuW9qDOwcHjWB/30+Hp1OdB2MtswJA6JoDgqKYWjmSSAIPcDQMQudFmkBjAURg9QlXdw9wAhIKQHANs0USSSERorao0DBTUzGbYPDy8WG3f/MxIhAW9VLViBFJnp+AAEuYmOQpoOj7yAEQ0RzZQb1aMKCSdMCC7ahJiPPXJZ9AakFgQqct93+9W/eVqdZW7gTESOQmEGxMFamgFBIYuZVV1rVCB2Z3lvHBNKxgYBoTIiaLdJVE1HEIogCiInMjd1ZSCyDkig6wpS3/96mdvX18d745/+P4Ptw/fTHwcsmZBVyXOAEguAhShzENa9WHpUlyAArMBIEoAVmLDZpfqAUQ8KFNQFziYG7IhAbuEAYm4z9W9Wi0RBCIsOQs68GK0mtUZKaFkDUIRSV1eDd16m7r1kFeCiT3M6oke91r85I4+z2OUsW+k4xB1BQ3IEmHgBjUYRBgJSxIUCKOYAYAYOBln1bDA2c0YADERJ0lA6ADN09qsuFVydwBtm4waQApwNwViImAGDBlPZZ4BQpqBKC5+mmQ1lC0EhJlBUDqBKOioltLSFwShIz4+HaKqO2oWzhtlBBzJCwaoKWqAxThHZU2JOhE1RZSO+1L3nVdMuN0MET4Ib3NnGjcDAxkn+fQwRoBhqmwWfrx7ePXyGiD6m02GefeEhaPrrw/uh+PDqs+WqNToGZzm9eYlAKr51fbiVKfEUiKd5ohyGuu0Xney7u/2x53KAMgRzHSsRkme5rnPuE2y7jstflTLg8zFg7rEkIgpEbjPpc5aOVPnugKfXQ8K65COsMxzBpbEDF7MKkY/bB5P6GD9ONLQ70uNQWKiuUad6+kwnQ55YCTXPrEzpSEoQzlWEPhouu7TNtPuhh/iuHvxAjxTSifFI03jcb59uCNEZLvdnz48nhxh2/PrYfXTH7+tfiQSYcCIabZUcHfZp5yv1uuqev908lX/qZYuQU8yhqfEOWVTvuh2oScQnYECsd90I8Gak/o01qJEMM+p5FrVAevom5V8/ub6/d2nUJ7HMZCnY6lmATAZPdQRDcOiuHnEFJUIX11sVzn9m998aUGbdbKCiECCfab7x8l0doee2JUV4TROOcbdkMLzm+21QUaw3AeYebHterjZ7UadHsaZUqwSyoqV4f64B6A5xmqqTkMvsBpS8gg5dUaIaD4ep154nss8FzXrc0jqUPLxaBJx1HE0f5y0iMyKhpAS7C0Gop7xP4GKEEG1WhQSASJmXEi3hKHugYwgZEiOIO2YYkACBackyBTMMsUcSCQUws0eGLwIwRxMnB1Jli4ZEw+HcSqa0KhD6oc8FS0cZo5uHfeEMhA9ajlNNZQQQwREyGF2nYR6InSDxNwJuapDdCxBXE5WEcxYCMaJGBMhzNVQ2LUmkraH77Yblvlu/3CsiEYU1Cb8gZ4FGADVbQ6IFBBjrSyB4CwcTmGmBmDEIGDWCMut9W2CoAhj4kBQi4jIhAToiDkJArkjEatFK1USwWhWw7NVRNOqCjFbmdHCMRCRiYQ8TIQDHMFTl1rXSoi1BBMTi5kGQbOXQQc0W3Us4dWKWhiQmymSACdKRA5mi9txUJI8hXGW1XoAGYZV1+3Wx47vH0+oZkGOdFB34TWKOwZzREzha5Y+M6FYiaJ1KiIYWahLpFmR0nrY4Fy/uR3V1vM0d1usVLveGeHicnt4Oj35WAvESVdJNsNwPBydKa16DS5mASCApnOdvM+JmAuieqXkmWKVRYtO1ZN0ZrifLciA8lhmZuLUqU2pj7iEJ/KqHlwpgWab0DskSj0iTMdKsnTJ6gHQfK2BWgxIq9zOpSgsaE8QL3UfAFqAtkGxtbSSpikIQUQECYSWsdUgEQcDhzPkdGa5NJ7Lkprc0uqt1XrPXPIIRORmu0jYRqEKDVd6/s1orsrLw80IGAvhCOAs518Y7e0n2Gglscwn9Yf6/oyetEKUaRkmN65Tm2fj0hvE2UvBWncCTazwPGSm8CBE4GbqvQA7fOYunSGlRaBnEUvRbD8opxbHo+UOoPVKtNCrsJ39jXvVZvO4iBQWWK49lg4YizH5Ar1Bm6U3KV8ANFIlUCBYgCGge2veWtOl4Rpgy3AeAqDUhqphYuwSeMBcfHaMczOBZysOCwD1UmdWBaRSzd1M1QGaBcriLYIAyCyp67sud4xQ5nme6jyrhy8I3RkUgoWvBHD+By4j5CVoZ2GEAUCz+kAgwiQkAKUGnO8lFvUHAECfu3DfP+5XqyHCCxwgEAmtlJTkxesXx+OxzNMw9EgQCo7BTN7iSTyccIqaCGsoBwGaeSChBRxn3Q6ZDPTkTKxVhVm6ZEWLWhIp1YC566VYBQggJCZTD6JZo+awUtbbXcF5BjNI4Sg5f/9pKjO8XhP6fHs3ZxkenhTIAZwd5lNB4S6RmjNTIuw4bTHN9SQCEfZ4OKGDcGbkFxcX//Ann8Px9tXV+vUFjaUOw9V3v/7w4eFwd4J+Ndz85NV2tZnqbGW6/fiRcip6+ru/fLP60H319buVpxdXn1399HOeh2p+VwI3Vz/++RfpRTcfn1KpWchg7jtwvtxtb+7LPmNstoBe3306mudTrUTd958+bS924fbuYVytuxc3mbfp/Ye7LWeo6LS+GtKL3e7X338Vgl+/28d0vNlcSpbj+HgsOnrpr64eno6XF1vr7XR/ezftt5f95y8vfJ5tOuUN517Wm92nb27n0xGBwqneFSM56fFq++Jm8+Ldx/3H0efJtrt1AM3kj9PpZ29//PWX36+2g6zWCkabQWd72M96/7COIctwmKeVlj//2d/59v23sFpfXF5dda/+0Z/9g1rpn/zz/9s//Sf/JwlJ1xvJxDm2lxefv725/7SvFeYSIEyAxEMt/Hp78/Of/JxgOx5pPXR11se7+HjHD4/5V7/4+TD04+N7XveH8jc77HGPqANxvf/47tXVZx8K/uqnv/z5Luu0nzDtdf7m4ZN2hLD64vrnp31Z8/z6Zf54OtyN4234L1+//f77rzshl7h4vVn2TuYlbQbAo+m1AgLdm1c/eFNRLbiP2uJLg41E1PYowiZxIlyy4BcuDf2Ajy8SIlwsfmPB65eXOZsmn2Hcxpw4UyyRETGoac78zHqNxY2OGmfEGzlnSShbRGexZAUsrsYYJLSkn7W6DRANm1aquVYznDk4i26O6KwFO4NjjdXqDhgeHoBMywihiXypsSzP5yoANjH3M5WqrUgzQgpr5w+4L3bWcNbrAS7r2/7IQqdtIrA22mlezst5Gx5ujfEEjeqCHqDRtEoR6I1pQ+BNNtb2S/OWBdZuzqJlPEQE4tlyzyGWdLI2a2mGys2RGiCQl2A4RHRwDOdGd4LzfrxQXZe//VnlHRHgQAyhALwku0WTsAEA+iIzb0hZY8nCWfoHhOiLs/gCRS7LBMsSL0TmBaE8L26DkBDPgmkAAJugmE8ne9wfV5seEQ2snTFqwY2zRgQEM6IpepeaCrNljjWSWiABMmIDCkEdAKlaCCF6ELWVazbxXgxO0xTE/ZCsFgqOhr8RpqZpV01JPJgZqlWIREBMaN5M0HUJ6EJt0xpugCxKkJDkxImQhDNSDnT3PPSlzEdOswGZcyBTd9ENN6m/SP0m5yQYSQLJG/3cg7ixzMyZIme20IioGKk9uA3GJAaRqrMTWVRKTB4lrCokkJAuS2u/KgYQ9Mhc0JiZOF2l5DFfr+af3nz+85/96A/ffPeXf/j1cfxt5ZLBvCobE7EiU08nzNtw0pEBiOeipywDIFYIIXNWNY/AcA+BEAQkQiICBceo7ZF2IMXsqMEQNiNoj9kwCEmASDiYhDNRB5wRk6SUug3nrl/tcl6vUseU0K3UmXXyEWeqeyvFC9US6EgMrpwlMKxERA0tEKDVVZ2FUY0QnVOwOJEFuKMHqdtxnuYwlEDwIHNEZ1I1chWAFAFgFZKqMncBREhBiYkkdWxhqsex1BkiyIq2va/txI4xOgpIuPaUmNHC+5SFobp1w9CegnGaOk7AHF6T8ORuYGUsxN53qSM8TSpZqvrtZCeNC4Zuqhzg4eM8Exo49Enj+AjEKjw7AmAm8+qHx1kPShnGMvJW8pqnCR5ZT+V4s9k+3h2mqoz03d3Hn/zy1U9/8kd1svmbb26G2K19m/TffvlvX/bbHhKfppeXLxCdIP/85av91D3UiVwJJffRlJJzmR3ZiE5TpQS47p2ZmTeXFyTd/TgpYG7COw8C04ggCohZtU4KNdxs756J324HMUA0C4SIi6F/mOvD+IQcvUjKa+rwyjyv8hezO9IHfro/2naQCL98uauH6WE/0crrXOreV5xXSVKG1Gtcang+5UAYHfLdRzqcks5jGKjVKmN1D7AaeD/a/qT7oler/Nn15XG8gwAGSsgdJAneCHLXr0noCb/7MM8VmLzPqU9ymHS9zttVt8sJwD/sxyA0jcOsRwCtqg6c4ma72z8eW5jihMru7777GFAl8GZgZjmMFg4p5dOoGLGiRX/ZYik54PXFRZ3LkNJ81soiUaF8HMscOJojpocZEngiHmR7GgOqb3pWtFJP6+1QyzRNdhrn3ZDITKs64PZigwPfllLDn7AgwTjb6VByNySWq+tN+Xg4TSVIuEun/T51SasCQAh9Oz++HTZPx0OXu64TYUwkMMUvb66rm0HsS91P5aTGQf2qg0+38AwVzeORCZkpwIoZJVEgsKbONyw6JF4THPd1rnGs83a3S0MfXp0JhdxKMFSvWCB1PaJoVQ5B7gDGVZcJE3ToXtydwjrmynE59K5+GouQU3IqMZuX2YIzhgaTm4WbIAB533OXoipgMLCoAyNiuIZbiVqRhEwdgImBhOdSPJAkzJ0wCByZtKokyB2IuGu1Q+k5B3k4VrOmSXeH2epaxR0nDQcHJgDDMK8eQWJBQUXNCTMyeTATN64TGBJiYHiYBQOGlgAtYQDYdRnRRcQdHIgAw8xca6kFY/Q5Z6ZwjRjLXAAk5Zy4E0ZhC4NQQFK1JDLPCuhWDQBr9TAgBmACDyJ0qAg+F1NEBChzxRCPKBbdsJahY7YMyoKA6EGri81unfuu31xfZ+mxxvxwa3lcD3UI3p9qaRmuzBAgCWupiugAK+GePAE81kOZVBCziLurOmbc7fIasD7V+9sJsCOB5JEF0C1s6oV8cKs4G4Bpqa6zeY0ZY7ehPrM7nKaSCBGEET1IwSdXdgAzJlNzBzcFgmAGVe12Sdk2Ce00W2UM0nBMsL2CBDhOShlkC7M514hZhZiQTJ8FaO0hb+OsNpQDosV4oHkrMmFzEUAADLAl9B2sDfwaHQgAEQr4Enly5l3QefCLiKELkYTOpS+e56IRjSvUfr/9EURARhA+dx+ALpEQzCMitGnWWhXoi/GBAyJ4QxJsyXJp3UVgBCPBMu8NRLTzTBMXklL42dI1PNpkOQCwWW8FQMCiNKIFYWkc9+dpMSM6Eno4tNMbLKLZkIZHnPOFCMmX6SZa87qEBRrz5cab9u9cRXsAAqHTWUXXDD3Os9k2OV3Gy2387s9wiQeeHTXiXA77uZr+2+BRePNXwgo/JLiZN4uIlvVDz0wudZg0ArFjXHVL6EZoQ5ngjMiBqjmFuzXlXnOeNW8LjxANIgwI5CRJUp9ECBqcNJdaVSPOMcsBz9DfchvtJ7ioVRZO1tlyIgKQFvQKoaUQgKAxRDTtFQKerUwfjyciirmKEDQmg9kubTjlqi65G4Zey+nwdMrElZtsxRNThDsEB2KEMGRhM0PijChMTmTqRLTd9pfbq8f3J6za0EYLE0AzFQBQ1QAKyAg1DD26RExYPVDt1YuNOCRiEemZy3ikXjJLiA+DMKZBaTw5LdoHQG7KXd/vq3oEYt8ls3i0Kcw6ic1O1pv+8DDF7AnoBXT14bhd9YD9v/qrb7+7G9/vfzedbJzxzWevf3H1cr3OQOXuw6c4HVdd3lyth912nkqH2+t8enN5+aPPX9sqj+4UfnGV//F//fegTk9eWHWzGh6PD8pT2m4PXj7df8ybblMSgx0Pp4exXm5WNx0U3X++2328n+cZiNaUZCVOZXzddRm7NMj7u4eHfRlPezSHkDnsz/70i09f3a2z/N1Xu4f9aXWRx9Nkp/FYVQh+dN1zJghdh5YyBfhmtz5N893tPVFIpuM455zCVVLuV9ZB+P52HYGSDkqvN8PpNPWZ/vTnv/jyq99/8dnrWedEHofy4fuPMfpUprX0ZdS5Pl5crm5W/e+//JtaA3n9d37+XwxD/vKv/9n/8P/47z89PjDB5a67fHnx4faQk/ScBpAH1cShOqcO/+TN6/fvp1W6fLFZJQVwr15u3vSivB+PnutP/viP+pxLHfO6W1/wL764QeQf/eyXD7e3f/6f/wXA+M//xX+8zG+2bk+39xc3DDn+8Juv3GN78bKH1fjp/cfjp0NGD7h/OKZk0sV3H799fZkeH9UI8sDnBypa8lG4u4dhWPjZNwfOkVqtXde2mThQ68ma7phgCYA0N1xMrNuPPJCFCDCwmUY7QPNYxefAdVgc0yDCG+a0OCgDAgECtmkccNsTzzB6ALaY3mhXi9QKVEcEN1yCz9RhUSEjcAsaa8MGOhNUAx0ACSkoWtN/5rO039CzFKr9WuMynVMR2gKes9UimqLKcNFJ6TPyv8z2sWnAl6FBtCA0WOyew1vakiMAIQUE4SKipkWFC8saIDYvpOb93RAIX9YznrH7dqceEUD2vPeHRwQvTnVwDrMCQLQGYzRWEba4WIz28cHCSV3yHpbbCkJmRILlSgkW/Vc0hAgQIFos/bnw8DajggZqeQDgOe8pDBCimYvHWbAX5zsKRCQCg+ZnSM+HHi7Bp4vrEjS1Oy50Mz8T0iLA2sDmLDZHeNYiw3iw6eRz1a5L/UDIVGZzDVAE9WD0CCYPDlBQYY+gnIjFPQTJoXlMN09yoCRsZh5+jvxkDK1KxOHh51GIR5wOE3LkjObGIo7kDhTOQA5EiZhhCE5pFSamrNVSULgSOoOpmxkAhnmEGSMRY8o5E/cknUjHCUjUrUtdlzvhSRCcySwIMJEQcE59opSIGti1hOCGeZsWtVIfkRNhjWqKTaEKzpTMwQPNgYAjKhFGI2qrkYKPqpIyJyAM0NCa3BAVwpV4podidXPRXW7ywIcNv3p9/Q8+//wXf/1X/+9/99X/PM4n6WPgHhzAZ8kbpgLOkGk2EwrPaBEdMoeiaZA6KCKSkPrYAQ4B7DOgAGozhAozCkEEclWbJTwCFWbENWEGYhEGRkrMLAGELDklQupT13ESZmHGs2uiqZoVaH7i4YFYrGYERqqmjlhUww3MBQgUSTlhkDuHYygDrL2IQqCo2ziVyXQOT9zCR9yZXSCQymQQaGYFHdgAnVqtJszSESCFg+tcyjxVK7Zw81oWnqqbKdREfQALYyeYEd1JIsKgc5/PI9csmIUm177vCJGQbBzZaJqtsHlYEE1av/z08O7piF2XV3K56g8PB0oy5EyY0LyTGDq5P82Rk8p8tem3Cd9/tR/3Xk+Rcv/qjz/v38i722+22/VDRQf87vaDj3uo4zTin/zZ3xs3+k1XNlsZCl8+yjTXr+vYvegKmlS/Ap1G2KbBFNPwctX3u4s1TietAYwcZZqKgQVSn7NNJWf48ZsvLhlB9VhinOdqGo2mb2Gl5FqDCBLNMVsBU+gkbXMcio6lTjPvhPthPVk09hWboZpkKnM9lXL4WKeplPCL3fCj16/+6NX1pFzACxda8RAxz+UxKjGtO0wUWbIApDqjduTV9gd3H+RST+P0MHapbrt8V/R+LMeqEc7QIGl+GOfbufzuadwl/nzoL1f0atMNgGiYp7reddyn41w2qTupR3ioYeDaeMfpauAV91nwi8u1enz3/m6PcgjoWdydIjCKpPDZk0TXSS0lVISRwi+3FznjNO9rVXToggjxZy+u/n9U/devbUuW5ocNFzHNWmvb469Lc8tk+eomyGaLTXSzSYCiRDUBAWzoRQApCQII6k1/h/4HgY8CCbQMqRYJUuym2E20KZtZlZV5/b3Hb7PcNBExxtBDzLVv1XnJPPdss9acc0WM+Mbv+8ar3c0+qwRuRTqjDtrv3t2M2bMZWpnTFBoBitw2xQsK4SGjKoCZYuHODPq2LUTbaY5NxFag8LgdmxibSBSQQYz9ze443RVwl0Y2T4JP2Y4Fubzf3rl2NtlaAgANUz4MmRyc7PKyn1Le5aRMr8ehx7Cbh5bkoo9NaMSbEKR3M9Q+xCfrntdhuxtVF/Z0kYpuXt5FkSaiCFMoIXI9xcaWDXSa8s072B8mzNL2q7BqyZgYkVBIg9isZXvYh6YLIiVnNFXz4l6GcWW5ESl5ImzqakzCXqZVj5RzYQdwLZrNk/lgDBCmEdDNyRE9BIiBQIxP5YE6FzCI2jbiBbUYIM9GoJgNiFAQ85hXIoY4qoG5CJq7msVW+pb6Pmby4aCATS4GkEWwlnrFIWVdcXDDYZqCyFiKEDOSFUfkZXxIcUEqZhaRGYDNEYo7Mgm7liL1TFzXS4U51QFNiUQCk5sHCcLCydGYXFKZJ3cHaIPkaTJHVxXIkSUSonBSyNmAKAoDACGqep4zOypXO7tSZpHaYXEr2YGaGFSdSExxLpaVyjEnP67X3DSSXQG97fvN1dXl48vzzSa0PRYCg7Tqm9s4zimXXRNdBVkYEEOQaRqJLBDFGAJqMjukca+Hpm1pNvNiChTow4uNcKSC2SVIo04AngugUN/QIU2HUswsxrhq+uNuNiV3x0iAqFkN1E37EBlwmstoiqZdZAFDLaRYMhUAIzGGunX3/Qp1FFMROcyQNaRMoYUMysREcLayzVlIZiV4PA/D0YYxW6H1pjudDtwdq0zA1UKAgHXiAhox4zKoBGofGJexUcu5wU69OjiB67qMlkEkx4cG5NK6W9gbBgR0OgWp1szFxfYFS8MRHYWwZmMiIhEBQjLlmvhL3iBBnW9S1QawGn+spkuupTmCMwKfAgnqMYQXKgVrj1hPiUffx2b4wwwgd8CaYox0Ap3ACZYA71PSKp6OFovfy2tKAiyzsSoOB1QlBnWwpTEPXtEeqoU7LOcoQqCHRm7VXaiqS/gQr1TTqWs8UP0DJ2JrUUoAAIAJtSZbAy5F81Lkn25ZLcBrxClAWmwk30ewOgCaWxXFvMaFojoW9QLugRqkVVPPAzAXXH76cmPq+Q0dvA53hoU/8DouDZEcHYhCDG0bBFFzyaWMKaWS7TR6u7rNlub0qabHWuAvU8/+kkJU3YZEcHpWCcHAA1MXqJj5kgXy/VU6DsfNxTof05uXr5tVf7bZ9G2T5oGt5ALgvt6sbt8NXeAQBUZzgNpa9sUi4UToBKNqFwQdGDkIT6oAcJzLbp6lSWLeCRbzcSo1iJTckVG9MkqnoUuEwORubSBHt5wNQwDLlvcjXGyCoU9zzskt08XV+jDb/f1ds4ohhinDPOVAy3SpELl6E1TVA0vLbR8TlDwpc5SIXUY4jgPh7bvhm5vPivukNLlD08CqWX3yZPVs8263Yx3m8bhqogcZk+ZRYxsb1J/8xk8EZByx6Vfry9KF1Z/99OfxDGdPSHMrlDWbHC/PVp99+922ZGcJLYOOc0qxWVHG4WiSck77cuz1wMMABVjX+v7u9npz1lI4bucJJinar9dmPOcMwEx4czffT8mGXHJarzfHoRhoWAszgJamCzF6LjC/HziXVOBwTF0g3EPTByP78Nn19n5b9uPMXmaIVgJ5mqzpkT3fb7cth67Q7v27dYgdta3gOB5lIjSI5nHWjy7PPtvft9KcxTBMBwFnlLGUris/+4P/7n/66b80lFXX8FlTON7vhosuPHm0akSO8/4IJd3v6/iEt9tDbPvf/OGPS8oXV/H+YGfSjIfDbty/vrm7vU+/8SufzvdflFT22/0w6mp37M7ST//8v+OrzRfvjw1tnjx69vzq6ZUUVfnp52++2t097nAVzn7j4vl+e9Q4BLN3N5MLQuDVxfr2m2/PBVpuD/Ouafubd0usNRKZ1SGJZDUZ2R5Cb6qjSx1U4aRv1MAadwJnQDSoAwqq16kafh2cCBFosRIBIhgQ1SMOeEGo+hQouNaTlxsSlgX8+/4TT2AG4K76sKG4L06TOlZgwaAWwBPcdXnxegqjXogZMHDHYs5I1bMKS/xbhWSo2rJwAUgraHQSuU485kNiHOCiXPnibyI+tR4cwJZIp3oFHwZi1oCZeoXqyHOssKrVkUeLrAReg5sWfdwfXg6dUqXcwWEhv5Co/muNm0ZCr1k3uDBipwFpS3cFicAqy4TFnevLfNje0a0SppXYtPLgpT5d70WzogdJzc3BibkCM4Cop10OTrBpXXhxMTjbcmFOezgtqCk5kta3YEZoi8gIDzsIIqCc5j/oUnR4RcTqHaxPzmLIWizeS4FRebRqY3QEInroo9zejOSBhJoQsD6iBCg1/det+tsBwYEQimtFvGJwrv0xJEQ0JGcCdRIhtyCQi/qSXKWIYFgVVXBDcMzZDQqKITUcWd2NgZjczRmokZphJCHgCoQahFCSevE0K+TsJYtWeMlQue7RHIjIGyJZ5lSgI0jkkhwBRQLWphcZoQsDIZAwCVGNdzilgTOAk2tOCZK6npxzgUzrbSBAJqy4rlpNYyIEQ0Kk2oAkzwDWliSMLKEhzD5PzqXk4uopKa2dYpCIfNa11AWkj56ePXn8dz++/dH/+E//0bfffQlxbggCmJXZiY0kAyEVhGOLhQCykYEbYyYoSARIoMRCQASRQNTZABwn8gKOau6WwY00OSijuBYDgOAFo5LE2IK0TuIOCAWNRYy9gKmXXAAY2XIpOrlNbtl1ppJJi5ZiRRVcGRVY3TMJuKu6ulL95Lm3IbiVROCIUEqMgaCoQUpjKZNZTsmDIAUwV82Z3Voiz1oyuEIRR7I+ehMDIQUiMveU53maplwyuBk51vVZTU3NVckd1bxMwjEyBUOUBq1wDOOMgkugr6HNeQaEYmbuHLglRS8Mnhx3w6zsQ8rjlMWoZN0OtkbEQCqcg4jZnAoappJD1w3oyez123crn4NhbmjqAX/t6ssr5Abogyff3e41YLKhhQyWpBHrYn5G8rgf7qdx3MJwHPdpYsczOMb5ou1WDW2BpvGw9ny5Oufh1rM2VLpC5+1mnCZFQ8Q2BiUc5vRs1RCYbbcHDCjNMOs0z6XMbZAoIKaqPpf5y7vxRrPmsgnt02Z1JpECuU2DmyFoydlmCZFZSKm3OI6WEQ/zfBzK69v59nC4zx5eDn/487dnZL/1w6fPPnk8hWHKExzzpl3t2zAc6bzTy1XzbqBI8uSyRYpTMhSRAMWH9XWzK7MZZNQ0zCiwaRqZylyKF+8CA9JBc9dIx9hG6gKetRQgTBAohHGGueSPr89vc7l/O2eMatOqi8/6NQGD4l1O2fTJ+fpa6NGHj3dFP39zt52sMJHQfprddc7FkVyMYtAgq0bOuvZ+P7RdBBQODYB2TA3ycZ5XTd8EL6XMw/7Hn/7kanP+s/dvRnRpqJUmJnIqKLBLU3I/56bQaKBFzSgehyE7F6RVgsfnm0z6cncLoOOcGG2zunKmEJq78TCb7rcTFnP0ddhcrfp3NrVdmC1sC0zT+GIll+v1brib1ZFhdBOFQt6etdlA1A7TFAPfpzHzWePUUhjudo3QOObRnNHOUgtMOk1/RSp69c0QvWF2FBDRJqKQhYaLWSk4DlYFkxAcwaIIBypzigEEYB6m++nQ9DGwCHFRczURKRlLwlRIGN0tp4yABopGZsSEjmqeVV2QuwAlWsqUCzBTzoWBIjsLdg3V5NS6mwoDEM5Ao0NEJmZBytnAgBCLIRGt+lYMQCGXggSRGdGL5064J4zghzHtJyOUNpCQ1wSw41zcMCBS0WK+aZrihijZF0O6liLMgJDdCR0IqEEQQK5VByChqwYmRNJ6AkNwMFRXwGFIXRdCYEGuSSeNoM+ObuwWHEmNCdlBHCIxGaScYyfTmKRrAkHJ6mpFNZdCSCGgmedSijkSqJUAQiWtJLITs+Rk7qAERTVnm3MOYmkoBrReSeya1Wa9unx89cGHMTAzr2JsWilFc1hZPutXq8MwoulKBMw0Z8q+IWbEUgo7BIpp1tFAsXGKIo6CSedN13RIJeshw/txhOCiCCKDlpwZRgen7FrQjNUlNRvTZPujcmhQYtO1h93WFDOAgQuGnkC1QFEvyby4B3MvisSAjMUsgUfMVDIYTmPOyUFdwCKGVNQBj2MKgnAsjMTUZAgp5QwB1A43y4fhe1nhpPcs9xRr+9UcoW76i7EHkZYoeFgChRYXwYNQgVVaBK39usoWLb8ITwf8Wp6bLeECBHUgl6s5LSySFUUhbwgFqbrRmEAdiJCJa61caz4zN2AUqLKILmNZlsoUoRaPSxYp4oMeBADAcHI3eHVJ+PeFuYGjn5i5+h5PY+BOvFLVh6pMVNuGi5YDvIx/ebjAUC8H1WgeMDN3IWocVBURayxU5fLxNB7Gl3QhqDEE/BBZBAvxtFz7Gubl8KCa0Cngm+snGZbSGU/ZHgAn+uekkdXXUL/wwQByUpkevhAInRDMXB1HNxTsAvcNApi6l7KQP75EVtfhcoBAZk6nx6w+L+aOzCFGEQmIDKZWhnGY5lRTtGp0VT0WnvS7E8T18PzWa7IogU6LrlQfPwTE4lZj10Kg2XJKDg+2DgAASMfx5fv7w91e3dt194NPPhQr3aohUCsGRQMhFZjMQdEBiUCI1JUQiUmY1V3NALEUAHcFzUXVzByzw+1xXnW6arrtOFbXCgFcrvrduFdAVWiiBMacp8DoiG5mmoWjE5aihzwkdQptctrOPg8JEpH63aFMeXd+eYHm45THBCNSYUNCQu9aXnfNOGYtrmj9ytZXvDtMXdfvDmkdOwTr2hba9sZsMh8hhLWcx2ZFMCOuNmdti4fjMd9sAVQBx+ybNjBvvnl3t141Z9z/0Z+87FbrbnPx9uUXqy6t29g2tDtM2+noPbrl++Ne4nw/HIuRaEDmnCadjmoFs2gSUju4nl9fvt+Ps3obA6KFliSsXcSpazpMU3pyddk08Ivv3s2JQh/P1/20nc5plb0k9IbCbZqvri9aHG5u72MI27m0zjf3x6dXzxiMmuY3Pv3127s7eXZBzfzqq18MKc/JXw/7R0+vYhc46Pn1uj36d9+966L0bQgzfPjixZcv32/Wm+PueN0Rjemv/fZf+8M/+/lxvEeJX72/S2bk+f39XSBuWxRKN+9/+Z/9F59xLhzWjx+fz9l//v7N9cXm0fpRw8Xm4d3NbpuGY5Af/fjX8ptvAQVT2Jw9frebQgx96HeaunWnx+2rV58Dt48uP/jsqy97GqfxeH6xSfPh/PHlNo//5KdfUNO+Hqbf/uhHnz55fnt////+s19cP3/uQTiEX/+VH20wpu1+/+5w+ahThMPdcLW+7GL33auvPmhF3YaUNtdXjdBuyqcPEiCgaY2DM0QjKwv0YIZ1upUZMgK4o6ktUzH9YX2t0nANIXK3k9KDXl1ZUHNzqqhzghSNgLyacJn9YW1b4u+q10cXusgXzzI+5OstyzIaoqpCXVQWEQIqHbrY5x4W4jpmGYiWtXTZ95bZ7gC2QDpmQFV8MdWap1T5qdMSVPkaexB1fCFka360nXK/q9pToZ/FNaZevwr8NCRuYZT8JNDhafV1B0Q3A6puZUNaWEw4yVG2YD6Aql6Xw5q1pKCwjA+w042o0E29OAQ14Q+ApeJSjlZvVk20XvaNE8UEAEvokteriSe4CL5f1B2twk/1WlEdnXG6YvVGGiz3BREX+azCwbWWKAZoBoZcIS8zP01KqC7yOmav0kS4tJYWa5zV5wPrDFnAJf0NHMDNzb2iRrgIgLg8nacaBQA8BGh6jj25KxIKYjEHAiKqFwXcNRuQm6KqkiC7IpEzgwghuQOBQyVrRJgQURU1wwm8rSnf1VlvICJmOo2FBNfNioSBkSOCqqMDcX1giUlaYWYy6FcM5mlETZgn14I4ecmlJh0GYieswBmRAEs2cwYERvLY9EFK33SHsTTM7miLiVKJoDJFDurqlhVJVZPqlKE4ITCTIlomyAvQTQzmdfIaExdVM6h5knUHtOI+AyRC6tg6nhFamuFmmm7aFSNTBt/PBcYhMc9x/XgVLhpmbjpb/eqH/+qH/87v/9lf/OG//KP/+n54f95hI9l8rCOAzIA4RIKcinCN4ynmCoRqBcCEUAEzGHg2QwUzm1lTpFYVHE0ooCUzTZa1eERgkq4lkugkgGBWHSoIloEhTyUSYdFAIkCacyrDcNzmae8lRRGVxsUAfCzJ1DCYAGQtburmVoCdHQmJAnFBN8LCrOaGMOcZUxqncRgOLqam2ARiIWFwBGcHdwUrVsxn1zYyUBNCKxgBiECLako5pQxOCOjFwF3JVK1kI6uYorODuMOcArVBGMxnQwZ+QLaLGQOFIK6Wp5RVrWgg3B7Tu7m8vt8ZgxFcrNqQNak2q/Y45cu+SQZ5zO7lrIkE7uBC/frienO+uX17g5A+f/k6PO66j879g3gzDpx101P/mN8cdu15d3iznW72a6D4qP/u5Ss42OH9vivpGqUJ0l+e4XPClqKTlfmYHfswIotaLxGy4WxlLG9v36DbcTj0Z6suBkJu24YYtcB+OzRNh+wsTEARRA9p73q727+ZbLTy6jgezRnhcWMfvbjaRAYH03AzD7fjoWsjTUMTIfTsjAAKgW5u9iXlrm2IBwnWoWvRrHiT/Z/98s1Ho/31v/mkb0qZ8TAZGwXVT5+ER+etfcNE5cXTbhzhfhoGH3784sl4d88k3Spuh3wzzJOXIna17td9fHd3P5WsxQX0+SpctNI4nIn4bGTBCcwKUmiiOHMb+Ol61d7t79O86jtjQaGLrp1SOozz68O8m3IL+Pysffr4TEi+utm+nMbBbVIPzG2LkcjdiShPFhqBWa9WqwLgnhxADQ/jfHF+JjOg2jowmD959vRJG3/x6uuhJGll1hRo3cZ4fh4/f/0uuyHEAu1sRUsxBWAIHJvAh3mM1N7sdiQ4pBRXLQg9Wm/G/bFpOmC6vdkXQyveR5FI+92xtbzd3fXN6vLs4v1hUoKMZS34o2fnN8fp1eG4vuhTsVlZpyLggVgQUUsbpUAxCNSScDON80haYphmn++TIHQnzvpkQDsioDhq7XoNREGMBHMyAFZ1cAodccjQYoKEhZBQgUqG4WBDwr5rGBmX7UhZEQpqLgIs1DhYyda0kRxtNgd0F80ZnNogoEpoZy3lCQ9pFicRjORnfVQuMYKhJsWiKhJBArpGQnNOCRy8c2cwcSfC0IYgjObMMmUf3RlJkImBQuwigdtxP+WkQQlZGggN6WxDsbwKoqqgJNUkYoTmAYNaKeYmzBHNVa1IQ06ABCyA4MQIhlIrsLyA6Q4wq5l5Q1S/IAElo2DGLJULr223nIyB+obaQO4YpTHNvvBLPs9WAEzB3IsamJWixMRBTN2mAkyEBoxQ/VDEkxoDsUQ3GHM2s5QLQR0dUcpMFARmXYue17EfpuKMpqSzuRenUQGabv3k0XQzqaaicwAKSFj0rOnUNCGhBTSxQvk4SWhUuaBlwObyfHV9ST665HnOIM4tpmPRAtbyLpVsYRUYZDDmUW3OA2UVamMMxfSQx1FjwO7+MB7BCX3VRjBH1eJQFIpaE8kZBlVKjuISGQDIHZlmg30mVVwHjoLgcC6rueTZuSSegboYDwluXqXAaA0EAywnZB4X5MZPjEmtAXVJX3DCSk84/iWI5jQvbcksrWB3fQiWvuepeFwO9Yi1X73IDg7qgODMtV6GqvkYgMJCfNRjCxmPxcKShVQ5eZfFHocAiFx/m5stZqNapVQFaRl3hpV+OgEt4HQqeesbBz9V0fWMAg94zmJOqN/pS+D1gwPqdMo4aWR1gks9HSxnGQc8BQhVzL3a+ADdgOTkcAAIlY6pP9m+p7FQzbQ2gh84fj9FPbnhqXl6inSA0+zoigoth6kHoutBK6oOgZO0VN/XyZcADz/1ITZ2eVYMHLESRhaA1EABZjARCUFWoOplBCyKwFCDmeo0mkWoQlrSrhYwAQ0hsIQQ2yiEkLMepzHNSbUmU/5VPQtPnXx8eGt/CTI6PcC2sI1OJ80MhYzIAQgpCEsxddRTABUAvH57RwV324ki382H2d//3m98FIshe2CehzHPqka326MhS8Bc1ACLASBG5GKAAE1skmVzUDM0YERzCJGcaT8nAH/y6Go7j5ZKMkBz1RyFkwOGUMDEgYlZmAVLUReOTZNcp9kjo4MhamzjbjtBZkuFwccBxmTNmV9cbvopRZGbQz4cbdWQIGx3MwKqOXf84qpbr4EiXG/iMUOQHpQPOnCT7uzOPcpF168DISCIugrRo+tNEDqOQ0cwHuHtdt804Zhw3RlCGqfy8v2ru/vtfX6V0I30xQcXv/Ojp8Wmu/H+bh6vnzxG00YRqH1/3IPIBGk4jFebbvP4ent31Klt43o67iE07Xo94EgSmtDfvnvTQtOsJJUyHtL+dX7x6PlffPe2X0HXrtZrKZh+9Ki9eTkjcxO7Qw7vX82z++t5vzrnXDh0zAGoi5u2xfO+6PFc+KubP2eWN9P2V1+8iHT18rtXobVPLjcjmE/l+vps1fbffv6NMG/W64D9px89fvdy94MXHw/7Y8n26aMf5nX55Zevv359i0GBKU/p7GJ9KHlzsY4E+928eXR1l7ZJ3DMq+s1wGIo//fgHj/or24OgJZluytBQQxbm0RiuLzdPnj/7Qds+2t1/N5f5i1fvp9IN86s3N7/8+OMfB27/8Gc/+81Pf/z+9VcKdjPTzbvtOA07KHdZh+Pxo1/9VQjwF28/F7JwJb84eKPxJ5c/7ODs3Zt3cypj0ru3t/dU1m07j+nF6mrHdFemXdJp5vXlipHG8b5+CpaoICItVuVKqJYuAEJY/OALYmS2YJhLbo6CESARgBPYkhqPdeRT9XnV6eiLEKrAAEiKTgjFFBGBwC1DzbJGggWNQTSoEwzN9YSC1hgSAqtzDOuYLjCH4opeZ8mTu3p1KxsYglltVCCAVVvXgsss7QGGB4QJCUCXraqaTBhrdh8QkqMtAJEvqOxpJ8Xqbq7+Has2ZwdHq3jTIuxUMcpOEkfdRRfuxqtTuhKiDxsLICDV4Z9el3jA6pau0tcSIb0wqguPVI3DZQFJ0cC/F9RPcUHubkuklBvWjCVH9BrRbbjsGGZKgOAGtMhGD72PxXGHJ+4YlxtMCBXoqYtyBabqQ1b3cbPqIAczPRFTVS/Duk0AkoPpSYmrIh0t3jqq63+FyBhDtZ6ZOwEqOJo9OJGhBmlX4gi+b5IsHQvwSkU9FC3NKjAjN4DshEiC5MBulp3MS/FSrFoIs6IaVo4IUmmFhNC1IFU8qyYcOQAyCQVyJzJXqulZRvV9gHkBQgZgK14U1J2YkAgRRBgRRcgNXdjrdirEjuweHDriiWkyy4hWFAGVyM2ECShARQWrqR+RmN2RkPuuPe5GRmaCWR2AsgMyV8W3TrkrxRmc0UqaprSffchcp1sUcyVGdnErhgDMerKYI5GDOpAj5KyIDFQDughMAvbrcNVRMyQVXhXYjMO7pBOso+ax+H7MeCgv7WKw9XS1smBPEdrr/vxf+2v/5vMffPLf/8E/fv/NH1lJG1TAjObo7OYFETmisoJ7qDi8AxrVgs6cyAFyKWrOVgqoJZ4NHaGGMyZkzEYm7kyNBIgthY6FtU6Stxm0pEyjzcSNgEbpGBiKa56G6TAdb9O4ndNk5hQaLaZa0I3Zx2ICXpxUgUgEqyiuHEmY2TU2gQN7kAxsCGNO+3nKiqrORGpQ5hwQCFEd3EiLmTkAEXjXrEPomSM6qcE85zzmYSppLoDB3NEQHE2tZGUXACqWA7VEgYDIECmSEXNMGB3ncrLeFPSUk7rVOSASKOs8WBnI722eBQ1cgfZTWUci4cu2Xbs/3vSlIBkGhouz89s8t018fvYEzajML65W372en6wu6PLZu/Hd7ZdTzvMKyjCOAaaLc33UvniXfINxFQSL3n/53RzLuotP+2hHXTUXsb9Q85ikqAF3XdutoEENmqYZtO03HZwPdmdOJecS4uCoBqtmRVAIHRnr0MxpnpsmtCTShgS+HfQm+xf7IQNkcBQTpmKlnkkb5meX/e5QDvP87bF8vOmj4byfE8OQ8n6aFSy53R/u7vO80+KOKRdmCk2YnH725cunT+THP5FpONztg+UuMtpqDo/Wx2/Ts543rRynPCtNavvtzjKwDmKFA8FmdaPpPMZiLgAhNBMKYKRxPGd6HLkk8GybrnGHrpGegxKAQkS0oudCj1e8c+qZWgnTqN+NA6B+8Pis68uXN/e7AneTHoVerFa/ebnu37//+m7XIvZBmoxN0lXH108v6zKasg1mQ07HpFktijR9yyE2Qu66amM+8uXVk13R2/1RCLNayeXW53MWTfDR+dOp2LvD7Kaxay1gnjKh9E1fLJ9dnKHqYZ7QTYBA0RnF7bLrp2Hu++4pN3eGh2gFzNSePN0Qpc26I4teNIFZ0lE5Utmsmg8etZszNIRi/GawN8M0Z3MBIu5it4rNkOYpz9OhnG/CMEwjpoIZRLquHwf78Y+e/cln33wvFUkQKLUfsxxvzdyKabF6iBJxRCN2FnNXKHPTRQGyMU2HoT/rm4DuORcFQzcraprJis1qYBgAmFHcyBCAjcUBIpPmUh0Hx2mOGFvQ8zbMCcm9FWFCRxzmiaMBMnF0xBCClQO6Bm5zIHDB7E2wy1WsUwZcCyF5yeLcMyNAyYVDBDArxoRFjRwaokakMq/kIM7EbfbJDGIIutQgiACBOQgebPJgRF4sc4iALoCuGcGhIAGjQVaw7GYG5A5OtW0BjOhqaggKnkpq3dlEKr0MHoIUdQKs5U4xQ2KWEJlSTlnNANI8MTMhmZXAAQCgeESKUXIyR8qmLZIgl6yrJjo4TJrcEEFLZRo8BiYgQQJVNmY1TJp2+zu1oZHzTaODxNhTv6pztUPf2S05ArBJoLawTcW85FICh0BRFViwFQEkByiAytSftwFmLNM4ju+/O4h1gti0XExDRB81FE5JJzfqBFcRSgYjJkKuqQWWkgZVdEN1APVgRNR28f4whRBwkUMKmIGiZ5DKThvlAq7OhqumsZwkSK2QUZ1UA4rv9ZDHosgFFTRsvGkpyPddyyVy4OHvpyxPADCvmIkTQuVkK3dTy26qTPMS5gBeoXN8UC2A6YTou9NyyK/18NKada8escqr1THMuPSJCQxAXYtT9vrTEB1i7RsjmakRuXsxX0xylSYy06WOJnfjZRT9MsEFazZG1RCWHI1ThkVtdi5fcEqwsFNsY+2KV5zmhLsgLsBL7boykusyzW0ZlFwvAboXZ6E62A/cnYEImHjpXiMQe01TqFYmZjIFIAjGBmJmqopUA7xJFz2OlsYv0mmaDDDBiQlyqjAWLJlTtFwAOJkg6klkqeZr4/s0qw6WL4VFgKt6UwVqCIGJGJwU1FAdxrmErulCrb7t4GCLTdCQ2X0Jal26yPXMgAiIRBSFG0F2d/cxzcM4lxqYuzxVS21/UvVO5T2ebhLAgxeETifeBWsw4NMFBiBDYqwBA+X0vk5/zFKyfhWJaXvMu7t7HZ/MXrQYh1azTsNYso2zHudcaiyKayQiIFeNdY5kzsvBg2pkLICCFnO1wKJ5brFtjQ5GQOyoTeTgqFrAtYnSEM5TzqUO/6ZiMKZETEISRYrNALbbHoZJG4yhnluJZrWbm8PcyMWq/eSD60f7Y9edz+OO2V+9s3e3ZbSQZr1EoT7u7/bn55tNI1TymFLX2/rcmCk5TCVR9iBhux2F+PGmj3k63s958IPh+3eHhKpToWGOQsLeRibDs00TiLpNy6sQSJugjIrN/PRy9fb4pqcS2G8OQ9P11IZxd1ArAJ5ms4wNog37i4ge8XDz/jhr2/XT8V7KnAY8jI4Eq3X3k7/263/wR585gJOoQRrHVdC337yP3IcOH3345Bc/fUklkSAgbPfl6DJrebRZrVeXaTdOd/dX11h247t3xxfPn/eqn//hL9iGYvPZasVMx9sBtGzvy3ev3wr2q+vu9mbAeWrBPdMm5es2bl6c/9nXv/izr77eJWMWKNZ3seG4PyRo4Tc+/fDnP/8lid7cvw3R29BuD9kcuuyx8AU3NhxxgD/97rPf/RufNKs2JuBxzIfby/7xR88/CUjT8c1++1aCrp+8eP78+Zef/cn1ejMPd6/fvXvSTGu8uZO7b25et3bx7m53HNLm6SVqXoGcF/3ND5+8Ob5Zn+Pf+eu/9X/9B19+8vTZv/6T5y+//bbtmn7THt+8XEUPj6/nEVZhc1+Ge58OlFFwnHI+psM+n62XCWgVEaigppkVraFw8GAT0iXKv+bi+ELKIC96TTVjVsNZXTzMH9JgfFFs/EHlV3A35WUqpBtApTCWV2KlfsLr59/cTsbnxV4My18XtR4czZY5YOYPErxTdR5VAQarxLAwkovSQYtTDul72HGRP9BPivyC0OqpIVH1mNogQ0St7iKk03awtGBq3tByecAB1EwXV5cByfeXih420NNWRPiw39YitSomaHjasBdlH8wUXP0hMtpB0YtblZy82tBO0shiW3ZDoqo5LXd4WVoNq/S3pB15lSfMAYBqr4hqoPiy9tYwU3gQDSto5MuiXD137kv+9KJ24alV4ICAvEBHVeSqUlrdxJabS7h8F5zmaiwMl1coyhUWfzwt+t3JYbbIk+A19Kr6JU9hS8vTRd93YQAAYisSkNiQTYSgijZEXsCzsiEVTEnBULOTgplrNnSY1Q0QmYw0SFgAbLeaZGdIHBDc2bCyt4hY93UMgOhoAMCgUKYcm8CMDMBEuOyU1U4IAB5QI3PDgmqagGmZdMcIVmPmkZjBoaBl0ERGVoyDnFp1yMQNkwA0wqmU4gagjlqR5mqFJjD3rGWc03AYtiUWEwIMwAiOTCRuBCC0jL3DUz5TZHImz1YfA8uKiE3btE3bc9NjbDHEpp2M7qahQJOOA5dMvc+Uct6qTe/KnU13aTxetrRqRX1mbj6+fPrv/ht/79tfvPjZT/+n4/498hiBmawGaSYydcmmRRWrHQsQCZEFWNTAHJyh5BlUq3rIDCJMwBgbtURMxMgobegbCOJUo/Cz5nke0jg4kBNJiDweBCMj5SlpHtXTPB7ci5kSMovEENx4efCQxjIhtc5S1x+jIshIRAx9aJNQYvQY1MHRDdVcGdmzIQIwmKtWrZ0YiChEBA0oqxC62MXQYV1Wiues06iayY1rKY8kWuHmKty6NQ0LFlAla4jIyxTbhoFm94DQnCagZbTYhWmcGeg4zWksqcz3h2ECYoIgkA0V7NFm9RtPzz9/dWuTXvbNRdPtder7pmnxMOylaQNiOuxgTsVyAdxvd7loevPto2eohxtJBscESSwjv2V8fXwyN9J37rmPfVTbz6kXsaNvrp6sLx5h32tgUiB18hS1CQbklLNm0FKGMWewRILjVCzIVPSr/XSl4ceXfUSLvcSmLcVlmihAHcwg0ePkTzb95Ph+f0CR3XEkgcdnZw1630d0A4QPu9XXRzi4vB39o5bYPE3FRg1TeRzCPqmmchYbBxzHSYkdMZsFhKur9av3t49vLqf3HqmbTC42zWYV04gC8PRqrXMuCokIioHqpoll0mbdvnn1NlNzuV6BzhK4TCOSjUoF7WIVCb0oHif3aL5haIIhtSLCMhxHZ5xSmue5Q1yHAGPqO1533R989hLFBht/+OTyd7rrm/vpu/vh82/vpkv98ZP1kyDaNG/HIUz5WbP+W7/xqTclrMOU590+vd0ON69uEmpAdBZQJ6KATd9QmueUNXbddtTbdMQOmwKq+vTyDEoc73c389TGdhX6RyR5Mgyx2fQ5DEA0zFMUCJpMgQC6IES4c3PD3Th0RY77Q5znJ7j+nU/+lZf2+ts3n404HXZpsPmsax+tz+4O+uZ4ZEchWm9WoSFuZdPI4Ziy00WLOQ2zAYBnkFkZJ1O1VObz9cqm4WIljx7J+zRNN+NwSB89+qSluGhES2FEbliQkRgMjAOg4FwMI3m15FLNzdNpTjF49iJYQJp5nApiiKSeCAnUTZ1Y1KAgUmjmaYRcKBgjWi5MwkJOkIu2TMOkVlwd+9h5tvO+S0Xr7qqgQ1EXxxgM3QtkZSbzPFUTjKaBuE8lxxACRjNsRNQ8qbOEaRptLhXWCIGJAYDTcQSmXKyh0IRmFULWbIiR+nmeXJUMCQgdRRCZPYM7MLmRSwggQFY6ampBAeQBECGU7JolZy8FAdjRgYwENY/MDMwkTAgKFiNXp5iYIcFQvCA6OzFPYKVkruEo4IagjI5NznPRsu5aBPSkBaSXqFYiUUnFinUkZlYICX3OJsQ4qxCwSC4ZmKQy4UCllCDghGpeFIRk3I1tR4dDGufxrOezvg0SNtcXHEMxmMoMyKGNOs/FbPaZxJWExAvMyQoQKWkIGJt2LoZRLp6vmjPj3X5O0+37Pc8iHACV0ULAqUxPVsEGnDSS07QHKiyQOiZmTEndiZRaYZ1yyrMTA+L2MDOTjE4sJRUmMivCQgw5a0SEgzYtm6OWJs3J1GZLPQMDDXNCN0RaSQDjYlxmRyBRjTGkNBU1juHhmFzP1ScoY0lzWKa9eAXmwWrRg1jNR4BIFfGopA+ju5MjAmn9SQ5Mp3Ke0M0WtMSxthqp/gBErrPHK63jda76ErWOC9tiVSeShWcidUDCApBVrQ5xc69JzzU45vS2UCsH7suE51oNGlRFqtbVcCpSoeoYdKKG6sjo+gp8aXmCKWDtTFT8BrFGWrgBES3xDIhqio5MZOZejMgFAzm7OzOhe1FQc4yAhIRo4KYuSGrOgc0MESWQAQIoOrCQ0KlGNwAmqx13esgR+j62qQoxhvVoAuhgUE0j8Je4LgLzWn1Wpt/h4YBV75U/YDe+OC5qgrQTAYGTOyKKw2Q4ZWdMl6vQRa6Hg7nU6LUKExAgmRZ/sCLUyEumJoa2DVEYAad5HqfZdInBXX6tO37v4YNTkKSfDJEPUuVS+9e3c+pwV4cjONCQHMwjW9HlkHBCCAAA5jlPQ3l0vTKzdcNmVsaR2hUR5DLnNJrraPlgOiO5Gzlk806IEYIEQi5eokMyqxYZJgqMqqaqFEMudnN3//HZ5snFxfj+FgI9Pr+Yt+MPnn/4568/FyQh3KxWjMOxKIprhjkpAoSALoUFkDEETKUExpwyKHix0ErTBUMbioWkb9/fP+rjs8uzkVnTwOd+eH+PTevSjIO9uZ3auL7bg1vGMrlZ20bywOY45xWF/TTHi1W7wlbZppTGqYvrCab74/EmjRyp6eTgRYuuuNlPc4/w9Hq1CvBbv/bs5e29IQ27ccxpO6qkYS7jWU/7cRjyECOHaXcu00dXbQR//f794/V608KT80fbd/t399O9W0twwTRlXV2cF4q7+5Ebai0d9++vrnib4fJ6U+bx/cs3z588Pzs/f/NmihPc3n9+fX45T/m4HzsMQLpayQ8//eTtF+9fvnu/3kTS0rcXo43XfHEo5qIB2HHVYQCA9+/frCP3Z+3Hjx/98ovbjG1La/Q05/Hlu9sAXR8aMXg5DF989900prOzdWYqc1qt1jbnlMuq6b7++qUWjpFbEcKWafNy+EqBH/fPMN/u3788jzIfuF93xz3pZPthGI6Hx2dXz578gJkbgW++ed3FsFmdq+J8eHsu5hb6yI8/fvH+zTf/4k//4O3unbVdd9YEgSePzmPf6b5YKYevvp5X7VhCSfjFV683OnxYdu//4saD5Ilvxq2Ad4gvNk9G8Hm32+b7/X7/9LI3E78d5kC7u7FfLRWRuaGhn+DfUoUeI3BXV1wQGj8NHD+p4eiMZFgTwarms8BEWNcaUzeDOriqTmhf2FN0hKX0qW5TrNkalS2hhVR2d1hGGXD9zJ4Gv9c1sPq2zGsKT9XCwbxm9FR9CmDJh14WD6tTC04LRm16QJ2XeOoRLKgNECGerEuARA/KNdbw7Iq3IJyw17pKa4VTCRdGtI79cKvUrCFADfmuYX0K7ohOi+fN/AT3Pkwg8Gr/c6/QDDjVZf/0BWaLVFTpqjqQ3BEAUdWcqlxS1/gaJF73+lNvAJfhZAaG9tBArbtoFeeobsqAS+A3PGh01dHmWq/josChgy23oG68BPBwuRgcTvJQ1SexPgZVkKq7rLm71nRwR/me0qlc0RJ5Dg7oROBOy7AKA3NEQjd6UAaBAKDUkCZcMr4r8OoA6k4LigoAwNFZkIRCZGZEqU0tAHEKYmqkLkIpmamSk9UIbic1gOIEIIhmSksnDIEFHBwKoDE4O5Ia1Rl/BMSMYWHJqhRkxZCIgyARMrtVnxoTESKBASMDECAxIqBLYJaEqsvDBICIzAzg6kVVzVGYEepHW9GxDbEPsQ3kDpk4OZBlt6xgyIs+SegpTWZ5yseEKSOaOVdEu/Z45SH3ChEFwBGJiN2V0R0dGd3JgpOTCAbytgkkTKHJCjH20S7SmIk0b/dcima1FoeUqUTNOY1zXpWz+P5i/QHjRRObK5Gzn/ze0yeP/+znf/Dly8+ncR8BmDwVB7XAWNDVHZzAnMmFhKgtJIaGDgWKgyLDAhQjBnLmgIEtE7qqKjMhMQEyMROWknKeS5ryPBm4WmEOsZmSEbjN0+yawYuVhACCUtOoohAaq2KNwhcOBo6mkYABmNGRDMkAYxAUBCgZCQKrZmKPgayYOxV1yIDoUisz86KFEFA4xKbr+th0DGLqYKaqKeuYsqmbGrgR1hR6W6R2rpqjIRgBJrNJvekbII1kkp10JlvMyMk05ey5xBAHsttxKkVHdQWNRB9erO/mcVIsnqZ5uG4it6GRBhTP14EFkI1bOFjGAj7oRddIpLfvbzyApiwzDC93HZeG4rAVtJXM1MamsWB6HKdj28iUjgGwoXDYgT/qzj98mvqegZMilsKuWNxsUieHEhxcXNnHlPsGAwpydxxmLX47DBq2f+2jiwZdC4ijxKaLcSxlsHxzmMuc0fB8FR9Pc4gNE143kpletH0EVnQC8qznKL9yvtkZ3h3vt6NCyQBiuaxAG/OG8KzbHM2tbw598247vJ/yUSkT7pMNtz58sf3k4tHh3diLPz2Tp1cXuz2son90faXz1spw1jd3OZnBfjwGabVoK82UXcX22SDZRbMCmEqancrVRbtWQ0Rj9hj2qmuiBnkuFsSFCUK42c8TclK46OIHF2et8m4YHl20b8fxl3fDq7vD7zy9+PHj87M2vLobX98fxjT/+Hr19HzlY95EedS2odWRkQD6tiuZaTdTYLfiVtjRihWH2wIR1wA+TNMnH74w8Xc3u+00qKpLFHGwOUYPLac8z8O0jvFR3zA2r+5HR9kecnEsqEMZWhYAq/3Wvg9N2wUo3+72QiJjXsnw5Tf/5Pd+/8f/zu/96//kpz9/Ow77aRgVb3IYcwmZYBi62EShzfrs1d0dMB8y7Q7jahU/PF8h8vvjfHN0kRViCQ1cXqxWjXRtHPapY7yO7dzPFtq2bX/3r//eP/iH/9/vpSJhEKGiGhsWoRBlLmrFFwICneoOrlAGx1iMvW9J0+xuTddwDFkzzOoO1eHrbl6MiYb9AbFFtnZNJMgtCmjKCmqDJlVw86LGTujGBDG2h6ytEJGBQIKyiTEAqZmmTKgMbqqBMURXz1LHxUrMMxQlBjfUwRSIClsI0gTSrMdpVDXISsZRQsPRwdyzmyqAEhqSGywOviWDsTBC5KDk6olA0qTgzAGB1AyEyAxKojybVPePKSDmOty1WGBxP9EogG1ogjm5ElCxpKYF2JkNjRCjAheiECqNkQFVDQF7CYzYJutI2IIrei5ghdxLMTBnQlVXB2ICYDcIBJYVtGycklpBAMHJTAMrgpEbYUMuiKs+NiHm/URDnqa9H7Hr2pL2oW+EY9LcinsrpUw+635ODWBBDREBOZsCh9AyCJQyN22zaSWA2b7kPZYpcOouus1cwCSvO3y/v8UgQI4RVk2rx6MYqHmefULNyR1ItbQxpjQHJmZ0Ky7BjYbs5BCEZqB1i+gFq/07+2zgqqqQKm9VhAAh4eBWHAkaRMoK2UwoZEt1kA0hCQE4MVAZykknWvSFBdRAxEqjLIIPLlN7zWqvUgEIlxwcImdaSl6E7yMAAB2JTsnNYCdy5XTcrznTRohMULOpEZEWvtzxROCbLZINEyECA4CBgiNiUj1x/EtNDzUkm2u1u9Dz6FTA60DfE01Tx7os7oNa6sL3c8zQaxOzzul9uAxQ0zFqmKhrrTqJ0Ovot9r0NKw5GYa1aEYwZkJwxugZsnqeh+Slj9BwSyxqnkuJIq1I9oKObRsBSgicDBAwEKqhgwEiEXqdl4tuAExkNU/USaGSKIgnZwE4VOsIouvpNp/K+Nparu1Sd1tGXS9mtO9R/EUzfLDbLbUmATkExlBjTRBYoSgekrOU80Y2UdzK1mEqYFCn6tVgdFx0uWoGIIxCXZRIiG5Zy5TSlPJDmDf+JeHyQdY0BFpcG4AANZ5pMWn499klNXRkYQ/U3T0DHIvN5K62MEt/CSuSwFfXzZA1ZQ1Bbobp1XbuNysoNs9ZVWMrt+P0dhgNjAkArF0Qr+XKaimEeN4EdxCR4zzPpXb0OatTiBO6Nfz4/GoCfbc/Qi5zybfH2+fn5wVhMj+mgsjrdcygibQByMkUcM7mlM972ojsiwIqABTA2IlEpiCFIWVLSUu222N6/dm7Hzx7tHm0uRRaXZU/+vlXOVmZc2gbw5Dy1Ld9FJzKeD8eO2rWItNwzGlmbvZ3k2Z9dHZpXkLTZ3flMvoEnJEYAEQ1TdPhOAthQjyM8+a8n//oL5qGw5kAlO0wMPN4yC8++KGX19wMl12L4nkqiHzcDhgwwir7+u0WNKf7m+kwjP3F5pAP4zCPCYJEdS/FuSfM+Ob2/jBlbsL2Zh8JPvjwh/eHw7s3d174x8+fbjZPv/j6u90878dxc92gIKkeX74kUSSWAAEAAElEQVTj+6k1WD0NanDzfnd7u73sznxMjz+8cA5f397nrOdd/+mv//rX3355P6ZwrwfzeR6mO9urAtE0q+P4a0/PfuX50//yv/wfrFDbdWcXmy/e3T17dHX26HJ48/ZivVoxNbqaSvPkgyfb9y/HlGa93ZzFUuIkTXO2/uDDq/39/X7a3cwp374nnRvGZ9eXw3Z43d0+vuadZ1ohkl1dXn/55rYZ0ryb1xd9Uv35y6+3715ngBTOV+ebb799/yxe/ujFj766/3YV4du3B6fuy5s76WIhW7949vgrevXumHF89OEnH1w9HV5/0YWCYrv7YZhSWMUx92jXY5kCSWhll3Ih2g35tBcsHyB1VTVzgHpOdVsOsgiIULtouGRdA/gSWKSuCEAoDo5UlfZSP/aMdLKOLi61RQSq4gvWUUnooFBnWlUp48E3TFxpUDotUzWs+qTuVNWAARmWHGtwICRSNwfyZTgg1qFnuASZ0UkIsdOSiG6+TCFY4BuvzKK7AbIjmishLSfkes0I1d0XbNYWwGlZ7uy0JNUYN1XTuqXVn8yLtrLERgNVQMdqJ2KBaKDm09WR92BqVP/uhoDfWwQX3QXs9B9pGZSwbGwKp+QoP8UWAakZLQPpEGCJwa5PQlXoH5TBZZV1dyQHq7jOAjsB2aIJFgSU6levt9iXmEM3dQBAAYAFya3OMiAEfmDXsAb5VfQYq9u9rv71YVwS5mrrByuYtlBYtLwWIEIGVwZehudBVTRrqHTdfKCKf5Unqxucmp32OUdyYqLIRMCBgNENwIwNwcAzESEwuqMpsbq7EaEBmLkXFfTaQUIMCxKHQLw40gmoBl4bgJb6DJI6AmDgQGYcoJgSuBBV/5ubqSmAMWJgIQB1zOZmqq7FTcEU3NBJmIjQjdwUDQHnPGfvY70NBApODG7OYiIAc24jc3EOzgIki02R0NSKa0r5OJVdsiNwLxIEo0FxBCszI4MwQkXnVKuCy0FNkV0il5QBvBEKHDqmCETICEjIUShl7WhVUMfZBy1aAMcRV0ETYc52UWw1zToO7ZsJDn37UUvn3G3YmiePfrjuHj1/+vkvfvYvb+/fqU1ESO5JJ2MCYGJG4tpaJG/IoL53ASYiUFSHECJR24igR/MijK6JVIWYAiMviCEiWFEOzFHynNXBDCAXBM/zNE+jpmzuNX+844alVlYOIEZtgbmoBYaGmImYIAKZqjqCBGF0JECMQeook2LOgdtNA6LjMXsBIjcrJaUmVJsKxhhDCG3oJTQuQRU0J1fNw5iOAxRFZ0aBuoAu8W0kHJkYXUHNs2MT1K0QqZgFLVaYRSCRLZORx6IxMiLvp3HMiQJxTps+CktO6mpSYIVoRlOIvIbifHtIEbATHI/Tat2PqrelTIGeXG5u5jIM8/uMHANE8zYMhUPXGwfzsaf2gvphmPbTAac5OFgqq3WXx2MbeHPV+llIkg86mjVI3EeQNpZRU3LQhGnuSpGEUfquffr29cvbm7fv5vn8fHVO8apvrJRv7m4+uTojg6LUSIgcTYFjvEvz25uZe5rHQwO47hvR8LjnlvGsDVpyLhxFpIteSkTaoAXqFG3SxARN8JopLEyMhArAuGnDB/1aBb95v7sZ84BwRHw7cMr5nOjZqt1gyLc8HeYX12elzOOcmBkLxCjFtBHep5lb6i/l/uaA6I7lzXZ//fhxj7JCDUGnlKJi7JAjIBuI5zxlaNumM7VVGye3ro9343y+bp+DPO/7MTt18C6PHYjOlM3+9M3h1S7/4MXFJy8u5GZ7M8yfvzusA66kaQxiK4Ml7trYxyaEgkQ3kAEwxEknIXbCfS4FEfa7lnTd9TF0f/jdZ2+HmZlS0ZzL4XArAqt1vF7h4S4fZ8sIivjDR9c/XJ29enOTGp+Y9nmep7G11Aiv+3YV+8tVq8cJzN6P7i2cNbxZ9dOh/OKnn3/19cvrZ88//uQnP/viy89fvT4gYPZV34TAcdVM4DqM0neHOTn52dk6p7R34shPnzxZz/D6drcvJQTb9E3fNR1vaplt7LyBOcOR8J/9yc+Xc0H9n75lB1+FWLR0jSAREjjgPBYC5Eh1rxQK8+SIgITDmBEpZ237FoTVmdzR0dSLlVzMQCBDVonGKlAKBoGSFciIg2YbkrMBqrk5RWfwXKYu2KOzECgOkE2YNICxm3rCSGyYIyHEuNXSBY8uqhARWXMI0QXQYkrz0TOiabCzLiKiW56TDUnBMDhEhyBEVGbXGp1yyMUBGZ2ZDS0BOHNR7QHUS1YvRUvxoq7qgSgGx4CDk6cMhRENOEkwjupIWAwIDUUBvXgdhiJEXLRhICwQQnYwN2fM7uDYIrhZoEYB3CASEZgAas5r8JhTlynMzgbkWEqmxShE4A4IXFmouU7RMkHMBYidGDJw1qIKicEIjXAyo0DSsE5D3wqW/SWHvm1e3+7e3qnrse1CjHi24vX1evRJpQhDKklc1MnELPKUTVFbZEOQNXbEPaaWfNqNecLbN0diCh6dPEudBVqGmARIdVQs65g3GMZJj6lEaNxJi2YzJ0BViRxBuOEy5nEsjKwZA4bsLi33LSPH7TCqQymAyBxom1UVWwTNjkCcKRvMCZsGI0e1XMAQPJViZsldEGveH0FtTC0lGH0/IeuBOnnASQABkFDBH5j3atpCghqE6OAEvmgjCFw5HDAisjpxxb9XpWoZrgu9siA8SEDfZwYtycf19L8k+0BlxN0AigJgwTpsBIDBCRa5hhZrAaqbAhR1BVcDAlJ3OA3OXSShZXzL6X0CuGmtU+srtqUF7d8zRoZWe7vEDpZLPSEsLTioPWRCMGcCq6H0iB1LaGKGJKuz//1/8n++fvH0H/9X/+Cf/sP/D0PluInU0jRTdBEZp3m16dCsIRaWeRwFEZgrF76QMASEgHXKSHUILOBNPaLUsh0cqWI85l7MDE4jYL43oNXe+XKIg5OTrj4CFSo6yUqLi6LeSAYI7kTAjGYgBFl9Lng3WoN21nGPYF7MLRnWIv1henU1KKCIBOm6EIQJaUp5TnnOpQ4nq6ciPz1/Vb06UUBeY9UflJ6TklVzUer7g9N9rc34etqF7JoRwZ1qSDkBPXwz8jQXEZyAX94NrkYGnkwhTcc5zWWfxsMxuxGgFTMyEAImnHJ2RGZDIiFiZzfriFZ9k1QNHZCO2c1Kv2ond5/y1eUFNe3Nbj8jHtx+5cn1OEz3k61bhqLS0N2cdsOOCSASBSanTDAhlDGbQdNKsSJERNTECMBznq6vN53DPNv9QSGE7+6L36sIrCJ+/OKsDMO7bcoKu/00QYamGJRBKbRr93bOMRuAUC44ZrlsnzK17nfG9Pb2ZsaCTeAMQ5qL5emYXQ1BSnUPNWGcMz0Kl4FfPO7f3m8fXa1gwLttnrc3sS+eUrduIBg6H2YDa/YT7+YMMLTcv9mVuQTTcBafH/C72ymbIaYcYpgFnz9+cd3py/zqow+vdvepZHx3dyxNMI5YkDHcHNLrw1Yjxiv5O/+zX/35L7+62LTrLr558+7FR8/fvb0/u5b9iF9+fh/ieYL1o8tz0PDNqy+R/PZu5EerP/nsdd9sqA9fvN89Ors4v26mMq0en92/fdt5nA9gk/30F5+3503XnL15f0cN/eavfbi9G16+fnXB4ezqMmY/buFwxIvcOfEw3SnT1fXm5v3h7eH1Jxc95+nd3Z2BN4KmeRoVu75JHNunF89+kMzevv72zc1n/9rv/OqXr392d8gfv3iyWeNk49fb+3/++S8vV327ao67okNuc8xzu32Xbrc3Hzx9fLO1rXbvpsPv/v6vv97v/8mf/8XHV6uPf/L062mfJvr5669i9A9+8GOG8T4bIRyKHvN0n6Zp2rHLdJgt9s2qffzs7MufvgMAV6uZPqblQQeqecE1gK0e+B3IVBFpOf/DMjeMWBZlBRdEtWrXtQVRSRlbEBU7Se91aa85/nVqBrjTKV9mUQpqVDYDwonpUVCo48lqhDCRqXu1xwEugUanz7ZZTWYD+suxZ8tu43V9QkRfwoC8KhlapxygG9RwNgUHqzgkLLztaWNbZAhGQCR3Q2RAqFeHFolF3WsAICySfF1nCeuKbpYFQCu1hFAHG9hC5DiBg6kjMUKdeqnLtQYFXLyA7nWdpaVnYHSKr/ZTdl4Fr+rbXH4OVXgKcJmptrClD9cP8GFhRTj9Flw0OgAkgxPFCUBLAnqVp2h5J4tNj/y00QIAAhPS8n1V/ANFpOJK1QaFigCIvJgX4WG7qqJSBWAWuzfWFJ06MgABlhETuDStsKJP7jWyqoJnwCctECuHVN9uiBgiS8QQgAWBHAiAHYBRARwiY0nKQoSeJieomyZphZncGE2LkbAw14Cl+mxUmImQjOq9Ja4JXIjgaAaBQIQBtc4LIQYiU3NnKqiODlBaIXMzlJmIwEx1tpx8MnQW8KJcX/JSMbCaGlrS3BCxG7kSsWkByhKzTFkCMuYYQs8WUZlgSajAmmZD5gDSGkUBAWMmcidFIzKwAlaAUU2LmSAAo0BwqN5/GlMhYQBEDmY8TzkGnXVGFlCHAr2sp6FYKmO+MVJIVPY4bWgs8z5JD8cR/N7w3I7NfHluL1Z8JdaehfWvfvJbT68/+dkvP3/9+Z/k6dbsQGI1EKAgSaBC5AjL9FZmZqpTRCo32MaWsA8c3GXOM4Ayibm5J9O9c2PWIbApSIg5F2SJLZupEyMRqlZlwJimuZAhAwm4Ua7h+Iik1HoIhgUgN+iEQpQBgSUSNTMERwNQIJCIIoDiTkTEhAUiu+g0FXeAgmqkxojEwhza2HRt0zsGA3cvRD6nOU9TmmY1MDXhiFBnO9bEdAM3ItJTthahR/J1S320JoCosae+seNyOIZpzgqEReekLATqYzIGniAFJicUoLmU9TpgsEzGDMOUSkMj85Fgm3OejtxIs+l8RbnAfje2zzrscBrDu/Fo1B98ynE6e96MWYFKOTpri6993k09hwyaVCng2TryWQuaGEkFZ6Pd/WGv2XXYSHtBviG7CjGV5osv7l4dvr0f9payBnFIxzyuWZo2/PzbLUL8lWcXAcizJU2mBTk8udy4+pvhuJtLH4Kl3BS4WIUOjdw4MgA6kaIDoXoR13WkWd2QDa0JWDCWog2xqmLBKRVwQCTI5YcXZ0+7sjP4i/1gGjcd/PB5ywk24XK+twZCaGnD3W7am/thd5TYnJ+tA3gebUD/drc7qrEDJb1cdVOZj7k0oKEAxxC7eMh6cIvM2SClPGLoWyqaEBCYgDGrwWSbFhnhOI3bNDWIH5/322G+O87O/Oo4br/Kz1bdo+tzJXi3H44QPr5cb7ouH8r2drpADJvO1Zjgct2uWsau6wukIRGZWbaIE5qoP3/+XNn2OWMjfRfH+505YAgTeDEetyoh8hVMGY8T7F/fPO5WHz1/Nn391X6cAoETGmASejMPz5rmBwGlDQ3JDPDFNE+qw7x/0vUdxXmmz3756vlkv/fDT3/4yQ//2z/64zweztuwbhtA7vs2mzlhYd96WnNsRF6P++NRu+2xDfLovHu/T9M0DVt8FDrD+OKDD5tG9kASuu/e3/N8nMv8V6QiFCCkueQQSCLOYyYkBhAEFobK9jrkZA5w1NIYxRpjLUzkrrO5KWAkAbeUZicCszxrA200wWI+FyeYIUltpGRdiGa2lokA2CmZljIKUaDiRcGZEadhDI5Y/OxsfcgjuOfik1Iq2Bq0gFX/Nh0ROZcJiwYEIQSWBqER1AyllGKgBrNqY94FagM0ElJ2LQnAFFzQzTSbTZrNpGUqRQ3Umc28qIJ5ywhJDc0FHRFJClqMFjsQNnSbS4Epo0fhIM7qig6VFCpqyR0YiikRiCAJm2OZy1wMzbONYsyGwmQlByfMGlPeqLaKOCnb4i8RYjVnBiYCx5ItCDKgZzU3YQQEVDetwYhUDKbiKFYUDdDIbT8bUyj77MASdoeZzeZZCWHcpUlV12RQumtR17ZBDDyPaIYOMs+AwuTmmGfLMxhJo4BTluNtySOTBCvWRFEtbcAjltnAA0YGUc9IBmpMFLQlBMcyq6rGyEYyjxMpoBuZ94GRXZVcPTJLIAzqrpZVAISZGnCgNjLmORuBoWUP7p4LAjoWZ0TmABApKHByACGG2vgF5gCIJ3PYQnoAfH/25lMdvZyzEcl9mRuy1GsnclzBwc2VEQVOcQuLJYhqjV/L7kWIqt1KX4IeysKTQ0A0RLU6gcsLuJ+m5yg4uhc9dYLBnYkREJwJpEL+8BDeYKcmoTsYE5i6OyjUTNMHon5pby8Fbf0GA1zCACrCvYgqUt8IGLhHRIhtKTzmgZbMHV+iO/301qs5omgL8tEHP5rcxA55d/ybf/vf/F/8b/83/fo8j+U//Hv/QbN9/8/+5R+7I4EQuE7HD3/4+//Wv//vz5N+/Ys//eM/+kfi0rZrm3P1E7gDkiOx2xL1AwB10syieJ1mAy2AUE2RMnIEda/Yldb+c3Vm1XEx9UR3SrQ9iXMnCc2XW/WgvwA4IVLNMjBcwjNgYX9KxndDaYX6wNKAermfLCsRwDJbCMCRADCIdG1ohMFgsjynMs85ezWAwMMTViVLP90yQsAqklX/oz/olyf1q96K0+mqAgNLCoWW5fDq6HWCG3x/HOImjNmGqbzfp1TsPMqjiw25TnPiIPNxeH2zvz0WD0HATcHAirlbIYQg7IgItI4taSFiykrsH15f5pKz28awpFyyjttjf37GwOP9Lk2TGeyP85v3hx88fgxliEHWl836DG+H4XIFQ5m3Y9qN6uYu5EEQDCeFAo2IGkUOmK1t2MgvZH5+ee7KX311Z5p3d85Nexz10UV8etGEdUzE26koOJFkS+CeDDEZw7SfDkH4crVS0rQbktFEnUjKVrqmnTWFnucpN8YIAGQEHhjV3NWJSa3sJ+81vn53yA4KOg7zKJPNeBabx08+fr292W5TMlhtzhqR42HXdHp2mc7j6vO/2AF1P/zg0TfffDOThlZakjInDB5byYc3374+Hicn3FkiAjjvqSe/uHzkWVno/c12dRGh5XxMv/z5Nw3QZWxvb7ZteHSf/O3xCK/CPPlaAgmena/Tbphn58ylzI9WqzXJ+bPnf/r5V6EN61Y+ul7t9jMVPHOcDsfnl+2zHz+f77azJnV89eqmp+C7ojk3RS/Ozt3s1XG3znLWXjSludsO+/3eDKzY/n7UPD65uiLi+8PghGnWUlxL+dGPnt3c3O9v97/y/NMujj/7i5/e77fUyZ98/vkPHj1CnY+T0zn80c8++2o3CMGk2rhftHGzitdnbborR9ueX593bfi9X/vBz7999/yCDu/f3B3IhR59cl1o/u3f/+DNZ1O6OGupGe7vCWG3m272t/f7I3YO80zuQZiDtCvep/LdV2/h9MfdzRSxZg0qEpojgqNXQuaU+I9obnXiGIEznXaQmoe2eKNqMg2alVNEDQIRMCCygdXmXFUXELnSLAx8WqlP7jC3ulZbncvjviCUlSddYraX0Wm+CA+LZwxOpOSyoC2Sui2I6wmMqYubLf+/HvRqbBKSg5lhBRTcTkSQE6KC+pKeU4OOrL6yeg3riHs1NzNYgtqMljleWL06NUeHkAyMa+wOGCIwU43cq6ugLbtm3byqdOPEaEvukSNQdcjVLdjUHas1D9TNq5frRCo9jMZ0AEcvprxsfVjZqGVVfOgoVKy4akTLt9VMq4XZWSZEEAEgMxICAxKgaqmCR60LCBbFBpHNjYkZpTYPzA3cCRkQeYGFFyTUDBbOYYHQEE6AsrqC+ZJjBMZV1ABU04quVPnLXL2yUfhw0yv0teyYgOSAp6cASIAEOVTtH4jpwU/p7q5G5MJQACliEwjVASAwuyEimJUloRxcbSYm5NqNw6VEAHQCAHYzEqLqykSGmoXjQDHWiVdeMkckZkV2BNNcvCQHQnOmUpS1YMpSDNTZEZnTMnvdmQgIldwglzw1wU0Jy/Ii3NXcDJQipVzarkGJKKF2+6hmHGZHInVnCYLgFGphQI6uWjtp6kZI6gXRGxEzpXoPEVUTADaB3eqEmuRlVGqSd8pkqE0UCQJm69Um2ziXY7GZE1K2Obtbtz+Uiznz2Xa9Uod5fWbzka47vhQK2LDJ07MP2p98crP68Mvv/vj21Z+l+Z7ZjTFrXu4AsrsCChoqGpEQSBQhIgJGq6uIBXZ1UHVHT2WAIg2tAq1i6AUbioGbBuTobiVlNQci1QxkhFnVCajybgpQR+IxESM2jRTlCQFLKYtGQwWcMYTQKTUOWXUAdLOMoCFwEEASmEoqEFCMzYtrYgQiJBZpmiaENkqLSFoj5ExNUyp5zMmI1JTqh9CcmFmWFVtV1Yp5AXZgUNAY4ipIz9KQg2okjJBWS0gL7IY5FgCbz9b9d6/e7yadppJKYvRAbg6bVpomTPtj6skBd2XkyP2FfHO/H0fbzLJGu4x0Rr7f3hyprJ7IPubccoq63jS7u91xSCmXXjiiZNLuyTqGeEg+F7UMlHJ3sYnnMZnZ+32XJp0zMwUlv5ldM5cZm9VYoITu56+/2R3tmKkAInor1ApPxykYXl01PfLnt9s/Ar8t428+vmhVYoyhpeQaez6/WinA7m53kLljDIKa0j7rJOXqag1AuWgQRDU1czdhcYeinoo3xETiMJZSBDkKCSAwFFXMyMK9QEB6lmyc9dF5c8WNReybXksq03HTrN/vp3eT7cfUQ4gQnq7WEmGc9kOxBJhBG5HWWLOyxNU6Asw+lZTVg5N76xQAik7Fwz7PMh3bIAZYss5pqnG3zVoK5AJpP07qZJOuJXiHqai7j+rfbA+pYAgcjN7fDfeH6f7q6kervo0RtficXKKInF2uP7RHf7E7hiZM+ymwbjoJjD3LmXAX4OvbV11DOZXtdjunqW+ai017mGcLcHbWvXu7Bfcnm3Xo4ryb3m2nkqenj9dws7+bEwImQiToQ2jd97e7F6vNo3W3Bf1qyiXbYHoPKSCs42rT0s2rN8P99tOf/OR//Tf/1T/52c++unlzVAhRiGgq034cUcJZGzlbFLwILenRLZdsQ57POJy3LSCnw9D23Yo8jRMACfHjjSQ3S/mvSEV9z8VcWAARDCLLMOs8OTiRMyEgaVEoCcwNBSjAPJamNUOwMufB2y6Ae3EHM66DBgx2t2MzeBdoGrO6dhxCkHkqjJ4UMrMzdSJYrCK37NiGJqkp2rqJU8p5LjammVowg2kfGMUhZQgazD07doHQ1cAEeUpZC5SSJXKj1AZumcchafE+BEDfZhNaTKrkng0L4qwlg0vkwKTqquBAal5cAyCxKJOblWyBOAqOBqjOFMhEy8zkHKgGD0tgxtIgaSZ1UDMSjEGwkLqxhAIKgClPrXAgdmasI2wBQVUUgnsLSEVFnVW5ABdr1RpiUgiERCQ1JgfJ0aQefAkZqwcfEHhB0QmRMGkhxAK+CrT8M1FOyo0o4nBIxjjYdOaQzXq3ohCACNhmyLM1s3Is+egwGWUBizmZDd6tCdmYHQSVYyo8pdQgexsmZabARseUFcrZ+rEdXwv2m6bArKLADAA0jsXKTIG7rpnAUMmEinoMEhl6iW1o55waNmoltyaI/apNngARCiDqnHNO5ox9oIu2vR9GQw0BFtQeHJFK8UOeuhiYWc3FxMwC16lQhiBgfkqxroX/qZDGBSzCk3q0ZPTUOo/woZf64Prx2jt1AHAyIEJw49PRwU8N5wUJOWk0S+0JkMHZ0eoUUAADL+B6sgQ8vEIEB0QBoOpHQ2DEmnCEC/y3BB8sOazmFVOq6cK1gnQ4zWmGxX2Epzjk+luIaq1c+9KE7szEhhRDy+FHv/2bf/vv/O24urRk/9X/8z//Z//yX1gGh0LugFTjGwzRAAISJ/+7/6t/9z/6P/6nR9JJ3/35P/+D3/6t3w9XF9vhCOV4+eTy7//H//EPfvyP/2//9/+mAdAy/43/+d/79/6j/5RoAqa//bf+5n/7n1/81//Nf7G67l98+pu/+PlnDNmGBM6aDQkZSZeiG74f3YJA1acAaGh2iqNAAAKn71mhJfsUl3gj85omfkLGHk4Hf+nxWACD7xW0k9RIAMkMAWp2QnKcErze5efnsQ20dipqe1vilYDI1ZE5NrGNITCRQ9IyTjnlbKon5mx5VBDg9BSgOQhh1QjBF+PDX44bcqiRJYuoVB/Nh6tAy4+tb6IGTiHSKd0J4M12BMXjIc1ZA8EPnl+tW+RAAXBK5TCO90M6ZnMGVwUzMyCGugi5E8eQiw5pPA9yfbEupcxzJofGmAqI6/X1uYKUouTGTSwplZzJAYrrPD0+o48eXx0O02EYLrldr+Rps9nPUCB+e7PbFQBmdgvMReLuMGuxAEiqWvTq6uzHL55dnKHt57tx0HGcEvcSr19crBTA5lnt7jDOhVyxDSEEwZ6ddLwZ2cBBg3jX0ph282hTKjnreoUfPu7vbg9Z9ayVku39eGy91QKglFRLyYyIQPM4yYYe/+hxt/LdsE1FrzZhwgNt6KOPXnz2i89utpIhTANkUKA84TjNu6vzdQD58stvtGw405vxdrcvg9vTZx2ppnE4a9offPDk5cv3sbmac5oSqtuHzy509Aaa7bv9y7vXH3/8QS8IJR3uExC50vYA2/ttE2TVxOE4XZ099pFtSKSwvR2eXUOzgve3tx9+eHl7d9zv78ouvd2+uwodxQZ8fPf2PSSnIjbF3/34d59cPRXEvd3Juvmn/8P/2DcdK0UNa2kgNl0Tsus3r1//2q/+6HZ7+PDTj797e3c3HD68XEUK2bzDLrJwlGG+f3y+uZvGp5u8XjfPL6mn2Dw7f3Eedjef47wt084pq4785Ox4+KbdbN6O48HnuUxX1+tPr5/tb+/GNF2vQsR5d789anM8+A8vL1598zqmAe7i/dvpbvJ/5e/+9gw6HO32s920y7e3+7/x65/uj8eXr7Z3u4PH0q19P093x33fb9ruLOnduL99dHa5Wa1ewrYuD1Xmr5bjSAAIWrlXR69cD6K7s3udwe6AtAwJr6PLvObv1tWmGn6IxCspVJ1J9XBP5AhVbV5mTvqCMEHN2Dkt1u5GUM1WeNKSHZZBYQin8JmqvdjiP7NlYaBlXVt+0iI/Ux2ttsgOpghgyzBPcjWkOu8MwEGrwQnqFlP7BuS+5PWcGi3KizPOzay6SnzZY81Nwcxd/TTD6zSKYEnHWboMtNCbJzEEKgJT+zvgToSmRojIAmZFC1aPNi0+NaSqqiyLt5lWcd1PGsbiBl6wIKhH3EpOVdpF3a0mEZ7GpYH7yTu8bMr+INzYshIvuovXUeUAJ763rr1gyy+tYmEN3FkGM3md5c6AbriYGd0rWVVvG1jdnB4oZzPHWv2RfU+FKXr1JmLdFf4ykuqA6jUO0+jkg65cElaWzRCJH/YCFGcGZmQhJiAhQHaz+jPM61U0R2uWu45qjBzIEdyCBGFU0ykXOr2dxdCv5MxeH3FQkiUPnoiQGQCkYvFRIESkJSe6DrphZEIDKzlZgIJBCciLaSpclMHMFB0bWZL6CMBMnbAYZc8KDSMQOBFZKQxGdbJ9JCsaY+sUmYRDRKy0lxEzOC0fFHdXQxbiAKCghhIQAKya+BRMT+Hm7gjk3jZSchaRNBsRuRlFToigOUAMSGaFhXNWAlo3ayvHeTh4Ka0YRLQtxHUzvhvvh1nX+3y0Upr1WnYYMCC2sZWGFNdBNp/++vmHj77+/Mm7r//0fvtWYYqs5FhMa7YGgJtlcmMiBGYh5sgkZuTkaIWwqGmZM0BhRWYFn7IOnqiJm7BqChi3vee5DMeSDZxmYkQydFUVzQWKIxTS5XiDjmBMKOAiRNTZPKlpi9QIGwhiIGmcG3D2qMn3pUxRObTsnpsOiwkCNAE9axqsGIOJA4YQmhhFhJDRTcAKWtZSDEyCu7q5MxcACZFCi1rclRnMzVXNMgMVVwwxdFEAg3oEAEZ2mBHbE1vHodsNc0nzIcP9MR/3yZiSGjIWR3DABMm8a2PxOFvhrvOigigsAbQFvGi7p+dXUzZnn9V3h3HPKQ3QuD6OKz+6DSixXe1y6Jn69VhcGK8+uspn3bA9rjYNEJlwPszlPo13+bAbecpPLtfXsYvWdnz9/m16PYxvykFd0C27IiM5zAaeLAR5FJqnIZ4TrZ5e/+E0/f++fP/Z+92nm4sfPTvb9F0pMIwZI5+dNx/545/fvaPYnHexa2h3C+pYSuHY5ZIVXCRGDDkXYBE0wwKOAobkKXDrguSqJRE6k2QWkpvDhETHeWzUnl627Dnt95eX50e9H3JumM86fHkYtwUpxjXDeJi+fP0+iFxvNiuj4zS+sSaNKQhL0wzoFMXmNGXvGXbTLMBd059LE6U6Jn2ekxgYR0Cd80gSFeDrwzROqQR+O80th/fb+8t23bZyddHeDfloME6jR3ty1T/ayDfv9OVu/OL1Ow3dxUdXqODkzfmVMaHED548uU9vwxHWmwCixzKDwTzNjz752IK/2e0HtQbRDNkpF9wd9FDKfpx303TRtg3acBzbxqjhweGb3f0w4KN114gUal+Pkwi2iKtS1t36OBWze7Pyg7X01E7Zwqbrerk9TAKsDiXnP//zP/v4+dXf+eu//vn95X//z39xdIxFDUUodhJU8/24TS6zgWPMZpq1EYoBPOeWaR6OSfpp3Et3vmpbtaFt1Lp89+7mr0hF7qUTVsMx+VwgZ8vFg0hRK2UZs13cmYgM0MgSTKOKIDcE7p61EqfIxZMyOjnkbGnOrhzIhKUYbg8aBTl4EyC2bSICVwdgCloKuHVNU0YbzEGXA1hAmpASAIscvVyH0LAdx+yKobal1Av4cZw6ia7uQCoMwtmVsJCTgQtJIGsEGBTB0Hya5qYN05CQBJFaBiIMaE4wa02lBDMzpAY5zxkdWmZw06ISaNV1ZS4+aTDj6E2k4paKogdyCeTIULIRVUdykSYeSh4LOELDyIpqljQTx+TmDIgowDGXtqRm1h4Ic4lAqB6JIjAatK0g1R4fIrqx1XYhuLUBmVjV6vxWUwdCYjC3jtEBpFYmCGYWEUGCKhSziDyoCnHfCpXSRBuTWXES4BXHVqwUtSQUCWMirkNvG3adZ2oyLqoDSAhqhISxaclxUoc5IwI7DIftKmHCI4BG5MM8Q4gNoBsSt6kYW7VDQzZoG+FCbqhF+74pmYFBXbVoZDbM7CrMSQ0RyAANHWh7yACqmIWZiOrQqKYJkbmYmqMQmUIAatvOS5mn7ADFwNwlkOqDGHDSYmrT7eQ9swdB6NR9xUUtAoBqxYITwYEG4AYMiyFLzWlxLHj9cQQL61GFptqjhkUeAvM6ZcwLVoRmSdqsr6rqH4xAsKhFNXiRTxS9g1dUpA6uWdSimgC9eLFqu7lGHQPjaUgP1mCF5Z+XSc9g4ECMaMbgqyjr6P/J/+n/0H70Yw308jA9u3z8t/7tf+Nf/NmfLAOBliAMMgQldoaryFdOVzwXOxy3+/Or+Pu/91ue7HhzB8l6FjtM7uF/+R/8vevrzT/6f/0/tofh7//v/sM5jMdxbBjXj67+7b//7z39tYvrj35t9ez3//hf/LN/+J/9X/b3X6x5nbmGgtGSswq1Eq9lNdULam4L/eU1gNzRsI5jQKi5sOAAWnv17sUrjQdY407rGcUdKtC/TK5ZUpvwhB2hO7k5EQFqjRKF6gyA4+xv9/n5RtpIG/VsNmZQJURg5ijcBo5CDj6lPM15rvlEfsLbYCGGALBmcjgCOZycGt9LiIvc+FCgnri1qiYth0V/OBL6EpJb8Qc/SYYAAFCKEjRAyIgfXKx+9PwRE+4PEwjd7Idfvr6/mQpHMihFMxjWzAgwc/CsxQoQUIjSd+2cC7iHLia187798Ozifti3XbMfVM1vt4Ptj20Xe3NTbVZNMv767e1HT89ePL28uaM+eGyb2G6GqZvG44vLZgK73Y+Q8fXNeDO5G6l6DNQGujxfPb84287F6HxUvzseqVltzmJztWk3q7Lfb9Yr19xJxzznfNvHAOb/f67+LNayZUvPw0YTEbNZ3W6zPXn6c/t7qy6rb0hCpEVaFCS5tyzYgN5sA7YBv9uGX+wnG7ABQ36zAQECrAamZFEEKdIkJbKqblWxbrFuU+fee9o8ebLd7epmExFjDD/EXHmKTuA0yNw799p7rjkj4h////0qbr23fghzkAozBTqqQ9fBbuwJIMYxWewgDCRRYj3EHLHyzbBVUyXnGVk1I5JnWM14dcqrZSbJSe3u8d0YdbE6GdL+iy8/ARly8qGZ3zua7+K18wSE82a1vo37dWY4Wh3NtJPjpnq13w1R54sW8zCMNeXw9Nl+vwfgGJg9+RxQgW43m6uXny9mZ8dn90I7e/bs07pGddT1qR90O2K9qN9+8ObL51+0M39+fvTFs8uA3Kti3a73m/NFMz876dCak8WDd+7tXuF+u6uOVn4xa5fh6Uefox/efes+3S6Ql5eXKePYIT7+/Akczcz8yeKB7POd9vTpzZM8RjfzD05Pbq9uyaS/fCw3u7fOz+Z1vdl36mCMXPlw3fdDTvdax3Lzy+82771zcvNs/9nPdg/fefTsy1dK4D2pjSeLpvHzy6GnY5yfio/u0yf773/9PR21VfvpZy/mp0cPv/Ho6snnzaLG9uQbj9588fOPBu/vvv/gjfOTn/34s/Vm+9mTm+9+79GYryLlV/V69d3l83xhrjt+1Lz6+NpkzN268bP77Wod7WY/ksqjO5Wm3U79a1kYiwEQabJdAiJgVvlKaClakeQSHDo8jDIQl1HN4Z7UCRZd7repUQyQQIuBA9EQoIgGk2kFjHhKKYMBgmrRuK3oFFoshYCMaGqHm1gBiBBESw3W4WXCa5uIgiIilCdVWbIYqYCwsfQTlNEbAIErdQumYmaCjJP6jFbsRa+/bSgHfjMVAJj+jSRFP5vsjMal9G2q36TJyYil2V1xym1TUWsMhBAYUQGEkIzMQBGyTeMWomKYFDXFhtKQDEFViElLFBmpPNjNoITP1AQOebbibiKaXJgIyFPwG02lfA0DRfsqsVUI5BO+SKenbpFnCqW8WHmJsPyQy1NVyxpfLnxZaA/P90Os2aZBjYGqaKnROEw9dNpjCJTqMcMDMa/4e0ERxdSsUI6s4LEOvaWGJoxFh0ICAgNFPtCvilAGX5mnzA6Iq6+Wn2KEQgLi17cCiaqaAJoQigiglQQYc2D0AN6MCcx5VzQCDElyzmlQVDAhIGLKWQFxMjohIgEIln5ZRHJIzpjYAzvwbJIysKgiO3ZOIYOhCaiBxaRYFkgEZGJENTIEFT2kBcvabUxJLUp2rlZy01JtoIbkvaQMTAbkmRwhQ5GnkBELy8k7J+ao0OdVzUYEcFQcXozOFX0UnQdCAmVyqmoWCYkMBViBUswOzSyhJsBEKGo4ZnAAHl223FYLEem2Y7ffbW1YLivtxxBnuFhkS10cMLPCNg8Ex8HaBrhdCi2CDygGdLS8U339t9++984Xn/zo88sP18OVlGmxZIBsxABkSZnJOULksnFTMgMxkzwlKdAEWEQxJt670Lgwq9o6hEYQEDkTkWS0FPtkYmjASIxoBAUmpYJsXDsyg3LDA6ILXoaUjdgs5xIUqCg0vl4lAtJKeAAd0Y3o0WjKdxM5XzFgaeVwMBh6j0RITogA0AOCSZaoKSEykHM1kdcqi4I3MUKH5ckIGGMyyWbKCMW0GIJzhN4xMRWsOwC6ie4PAIDe31kddbeb9bjzTQjRouh0NFADxJgVTTzZ6VH75OXNAPjg7rzb7Y/JX/XDYjGbVY4Y1118Og5Px/X5+WzRhi5F3cXnuxh3yZO3ZCpeXTUPp/3YeQgUElYyP3PtqtIM4JjEOMHlZZfUMaCO/riuZoHmrnaKtzZiF9VyxRSHjEoi4D1DEnY0J6ozesAlwXvUZoDbIf9Bf/XRuHv3zuLNk5kPYIDUuNOwWOp4ebU7r9ArZUnbpGMa8zHPAxMXmB6hUYyjqjYOEuAw7irywXmYzKoumWZ2MeaUdSNECG5eNabY1hqaXtNxaG3YLWdVTvr45Yur0ZJQzeQqJz3d9nlIeUxwPK9+9517X97En37xDBwNoM7R9aargmdy2zGPoyw8tYB13TLoKNBU2IsA5/WmZwcZ3cWr/tFqBSZf5t3WuDpq3Sh3Tk7TaNshZTNDOm7D0nuJeb0ZWu+//tYDurjqI2y7+HJM989Prq6vHpyd+qZKg8x9OA4BQ7Qundw5/vLqgtQ5covKP7u8SoBtW1UItfOVc+goSW6IUe1+O1NUGyQQq1lMaUSrVo2KPb3ZI/mmpqO6UbBuvVUHykPOmo1uc9bAFgDAMuSbIW32Y1Ztg/f1bDPsfvH55vmrp7/1l3/p7X/jX/1nv/+nN5tNj7nxDM4oaRPqJNbUs4z+xc1t1+1jkpSEAdjlk6MlGHTr3bgzqdBgePnyWUNK47/sKgrBa7KctY8WnItZkTkV3zKgQzIpnDYUQzOQRJ3lqgUCiDkrs6mRKBlpjJIVuQJBZBfNdkNunJnhKNgqzTn0Q/Ykns0sIYAxiEJWYpiWjCFlSxYl166aN3wTNSIkgdoMsyXJhs6brergCHYDZHCdOrAsCKGpzWOmbAKqGpxI4pgSENeenPPjfhTkUVkNHTGKeADHKDmaQc1l8iOtD2wECMwUx6yode0c8whiqp7YPDprkITUjMQ5b2CiWQENjJkc0swRgiTphZIyokEGdpXLAIhcISBlB5BirBM1Kbsh1wIB1KkFVMeMAp7JOfA1gQciR2JmkkWIiJnJgVNz7NSDiRCBCk8MRVPHJAbZQAGZgMgd5ApFZs/IZglgzHpGvCTsFMyhtZRa4LnriEZh9syOiGTX7bPYrGKFVNU+AyAaV24XUxMq4Tpla099lWXcRBKac5MERavUj1jNRsIOY6ONKo1xqFyomHNSGKVta8joXPBsQJUPPlhJY1GKUaeGGRWBlDSLBdeQScyDmma12pOKsffeoREktdfH49r7edOAMQPF/VA1tYWqG5OYYUAFdRX8S7/w4DoBELOp7PagFr12mUwHCZu4wTr1TE3uD5m4oUBa5s/T5x4MS+VvmNDXVP5wAleUJuGyGUUqJ4Sp7MoYEdGYkMkOA2k8fMpBKSBVIEUsPHWZklNm02JZ4J1woNyU7fIUlmBGZicixMjEJlMkjT03TZsG/eZv/cb8wcPc1OToTphR4KPT5fnZ4sVmY+iApfCSiCExn1b5N8/rr7XH649+9OJHf3jva9+4ef54vdsu6lPgOvZdO1+Zp4T6Ig4f/KVfevDg7rNnzxMvfAhLRkTcjoPVs+/+xl9PGTIP3/je19/93/4f/vZ/8H//5I9/iGVDo4p0IECVThmEbMKHMl01IyuUDZzgDnjYoRech5VGNFUwLoN5K9fEDkdGnDxj04AbJhIQGgIms2oyCUz76zLfLRAKMVxHdZ2czKitIBqowWhQQCTsS7FITqb9MKQkh5xIibegHd6Ok7vNDuBqA4Ap8vcvkbS+8oghTPRysDJ+nsSnKbGHDAeHAQBOvrPya9iNpOpY7t1bfeOte20726UhZovZPrvZPu1iFgJImifZEcCSCBGTKTPVzpFYQN/UjYxx141ChAGSdD3GxckqCVUNhpqvb26jQVjNz1aL+OzqdNYS8ybqs+uOrE7DwIsFMZoaCAYOnpx1+5MK6znZ6PsuAfNtVqkC1fz2m/eRYVm3PD+5fHp5s98BETfEc7rt10GjCp/dfzTEft4PqxYaz1fX/WfPtn0yU5ov5+/fP3p2+2rYpWE3VADzqtqRjUkvnndRspoQ+9ur/ThkEQzeQQWhYh90Fqhe+PtvLCD1o+TxpnMWLObYbYXp6HTpYIjeifm+yzc3N1yTRbdbj4tmjtsQqnbd58EPD85Wzy42QOCQ9+s9WpxVfHpy8vj5bYpVU4Wqbu+en3728tOnL3ayHR+9+ehWKn9yOlDX3J3XjrcDPvvixXI+ny84OHp69RQojyM8+eISBMi7Xb9bPji+86heX+zYz9br3vummh9pDffm7cnJ6RefvXh+sd3sbB7q5fz4dojdfn3n4ZtW55/9ix9v+lskGfu0jZt51Sbh2yH3vd6bra7Szcvr9YPFLOh25haiTZ8ojfVo6a2Hbz+/Wp+dHFm1HGD3V37n23fw2cX11cW2GegNgONqEbphk/p03K6+/vBXfvTpT7Zw+83vvPsnjz8FXu75+PnLsd+/fP/e+bc/ePeLq/RHP3zS1Mmpbi5fQtpF0aEKQ+wuPrryC//og7sQQnP/zmD9uuuOHxy/+OL69/7oo689eP/B+2946r776AG+uv29/+pDJY4ZLuL+7TeOv/sb9z/9xafkl/BHV1BqAeDQAIlomtUU2ZUoJ04gGARAKj3rB1GXJlIdvI70TtFlnM7eX3n9iieRUQ7+xNcnfENQO9R2TVOI4h48BG6tOCBLH/hr+0JRQowQpKSMqUSm1GzStnFCJpWlq3DQClMZdcpiFTQbZMmkgEylCI4ObQhmiGVaUSYT05QCDvoM2QFZbGpUmgRUHBCoIpb5BU1KDVIhdytSWRa1iBiEKghMRREAUePJAlki+FGzJ2YFchSaOdTBNf1+11mGZKBmVIQzAkJWycwuHtAk5jCJqCkSlDI7dgwgakiEU1sblFIwU5DJhVq+ezUrpWzFsjoxfwARRLXQeSZLKBEhCVr5WxCVpidzYRkyWEH+lgiYIdJhk3BoSyMsTW9gxlgupOA0xpqWAbWCznq9KqhaBiMEKiV1ZbNHxQkFkzTz/4ewK4vbZLGF0nQw0RudI3boAyEaM2FxZJgCGpAlzYOxIQRWQGAEdoRccExlgMUACCDOEzIpm5pYjmoGoMYoVgKAOOUQffk8QkByzkMwrCa3GpMBiioQmKkjADKTkbgSRTJDc6VcmNABkaoQmSRziGIZEDxjkqSCKAzKqgHNRIqwYQYcRTKSErGrQE1FzGnpdDczUVUFBUbyyC6DMgIaOCYGSKZsRVSzLEIQjJEQsNT/GWrhljI57wGdFeQ5kABozqYYBAhQsiGB52VbjftbFKW9SqCqi0QD4xDweOY8RSTNBNZh3pJVyKLZmrBwrhFlX9eVf+Mby6Pj20e/+OiPX62fRekNIZlqViJ24COwGamC9wagWH5cKiknMTQrnkSwDLLvfZV97Zu2Cb4eFbJoTqMhZVAjS6oxJdGYISeTCXqFnA3F2ACpJPHKe7FyYJXkPBAQVJ5bpNqHtvZs1iQcTAbAHjCZZSBKkgQMXSBDMatd7b0X9ISMyAYkIgaaJWpOkkXBDMl5Bw5cRVnZokzW0JSyJDVL09YOFTCEmhwDqmAyXxuBJkM0DujCdBcM25tHd8/eeePuLz7+Yu2uJUqOpmM0VWRqKi9G3vNi6UbL26TbmOcqXLmbdb4Z0/1V8M59ebHbEe1E6vl82+u+26Y+5t4k1y43CpkRzTmJ1X7be++AfBc5y1iHeh993O4CJuk1RbzZ9Ca2CNWzm6Hq4f5Z2MEWBeeVtVlz1NGm0a+xERMTL+fVceUaowBuf3n11nJ+3vK65p9s48X19mYzbt7Ud4/bwBRl9I7vnZ6ct6ujJMfz+rPt82dx53s013/9/JQgApo6MmVmYVQIOOTk6waNBRRUEYkcMaq6YLXu0pjns/2wdahGsOm3u72t6qZ7vlvquFyEqHo72PVOQk37nNejqGb0VYzjXl2IEG7y3WZ2/s13DfHy5lYR16Haad4CMLvhdlgwt22tDaVBYxYC9hVf9EM5XHWGrnFfe+veg0d3/4//yX92remkqZzS0fFsiGm3tzErOTCEt++/cX19ddvv99vRst0/Wq1jfDHGz9c3R7U7Wywljn5eOQfbsQuBuMpc6QhDrkJ3sX/7eIVMXUquclnymLN3Vetx0TYXm61z9KBdmElWCPPGu+qm2y+XfhxlHfuK6qPV0e1+uOg6Zc8e27YBsahyMq+uVdfkouTttpNsPCoFn0FHNudhnTtLItm+3Izb/+pH3/vuu/+N3/rgssM/+MXj5y9edIYzQXK+bXzO5mR8cFTtWtr3Q8pGdRUbX5/docwht7eX60GiwVb67W23W86bf0kq0qwMftTkGdDUMSopqRIZM1fO0Cgm0TKNR1ZRRpAuCxEETikLkqTsWTwVWDCAaOMocvKISJhEkgoqqqSqorSLyEiWAY0qZHaGDEbMGjCrgCB5F2YYlGXENIBlhKyQQGYVO5BTX5EpsWs8gaIYZICG0UPGnJ3n25gInUVhUw/Q5Uy1U8vBAyFs+p7YNb6umMSyJHU+iIpDQzQTIypTHogxMRE5EhXveea8SA6+AiRII3kaZQQq7DwFwGQ5OKcJ0CQpEBnVGAiJweJUl+6QIWqMPVmqhJYDNF2aZfCCrQJnbbxjEyZkR57RBUA0QUJQx1iQ9VPISI0dUxEmCBUQyRBYVRmmXiou24msYuB8SXsyIeUsgdEhEkBNnBCWNY5m2ISNy1Glca71YcwWLc8brn01aB5jv5q37FzKEMdRQT3CvtuGylCk9gOpIomhqwOlznIaZ54b5l5T7dmDgUpTB2colhdUuTo4T6Yax5G895VHnSiCOQ+Nc0JsIESQQCBrIGJA9s57NFVEdYTe+aKBGFrJIuUsWTIY7hh9Vc19WLram7be9ymoWtQ8pJRMXotE02kcD0Gtryz3026xWIoIgQkRsASWrPzIp52XHcCaRaoA1QLXnGwbpUxsGj1OJh4s9TYIwIdYkSt7ejKC0gaDfMjETeEnKFZ6K8G3kqsUKQqFqUEutcw2BQAKI2NSOIo2ZJMKQQZc/PAEYOCZAQEUfPBAKICSDLBtz95465u/vR812Xo1m0GvgNHf3p4xaeONMbnYzGrNfP9k9v33v9PcfHreX58E3DSUP/9Z3/pmuVjntm1X6CvnGmQah6HvB+zTclavTt9Y3L0fbZOkll5ZeR8jBXfsEcm+fPzSLVfRVb/1b/87n//5T4f1mMnQALMiAZEzBBAhJCqgUzAsMrcpIhU7gHwVFywJwRLfgAOZAtSMEXXKch6MOTiNgF//6OH1/vz1/h+xFFaXd07pJgMEy7AZhBGOG1oEALUbg1FKHkDMIIv2Q0pSdjmAOEX4/oJ96eAsmywG+PqgYlPw5LURaTq3FMwCwgTsKLbQclwsatFkbjh8gwBfYa0dUg18Pg8fvP1gdVTvt1tjNuZffP7so+ttzkCkkjIalr4/UyDQJgTMNq8bx0SoDNDtOhV1TBEMTG+7cRRvXPX7sWJ3crI8Op4juy8urkihZbYhgU/ooNukW/HnRyvphYwgEFMFiin2rgjAQ3YRGvaD2dGssdp5T1d91zTUID1/9uzqep3HPFstdr3Y1c5VeRboxIfuerPXyKRm9vTl9X6XYoqO2Jyr21rBZu2MHPVJgsLmpq8WMwaI3WCgKcbrNMCgrer82L/3/mLXDR0O733z/n7TJTGzfth15kI7n2vWqvabXa5cuwCfLJlvLp9vvCfNOW2Uapq7JnaGQH1KyeLd0/b+vfmLL15Vrq5qd7Ssb28GF/jlzcv9mAwrGm3sh37YMWfvfG7c2w/vdMJ///f/8PvfelS3+PTLL3Vs7sxnppb3/XxVS5LsfBwFMyHg6dn8zp2jTd6mV7uFC4vVLMRIVZv2ud/J+ZEf1xvQ/OrZDTBWrv7oRz8/vX/v6MHdi93l5z9/nG6uuY/zozam3fbm8t6b7yzObD66s7PZALFtsV+bwfhLv/Tt3/v9z+pgIPuKnCheX9/UjDOwbtTq+Lhp3vCb+Mmf/HwXz+bV8e2ms0off/H8DJyM8aPHP5L0ytmNju3IuIu71Z35G6G9umh8iF97652bP/jU4axb922lX39v9fEnL2fLs6Oz1fPHL7EHN3fv3Js/ef7sH/4Hn7HS9c26beTh2fl3V3e67e7l8+dmu88//yRsIap02yiMq8oHhh//2WcP7501dXO4vV9Lr+VcQdORHAHACrNfD3nUcnfhpP5QgRabvTZs0nTnlTu0xPOh2JLUDt4cO6wY8BUZp0wM9PWSoqqT3QlKxsXoYA4pXwrMtGgCYGJiVv5XjV5LW6ZaRuITtc10ovUUc4cdGG6K5gDQymLHVnw5gKrCyI4mTrTqdKjUAgAq3lZA1cPEpTxCVcEsSQYAxfIlylpppsaEIEZYjFRkCgSUyzNaxDNnUwRSMBQ0yc4BgwRfxd79ld/919/64Nuvrj/9B//k74x913eRSaqAsU8CmKKqMpL3hN7BmKMmDc4ZWVYhxwpAB5rg9C0ha3FoIRz+ZBLwihgDqoeLZFh+B8wMNB+cyMUahqpqxowApSCXAOmrejIwKTmg8n7TUhc6WccQTW1y7hjAVKInOEUMpwe+4sFPOr15SqbMphAbEgCJalH0Dr5kOqyA0ztWS54Oy+SB4C+MDYriOXm9oCQmubTPAWkplULVnBOTMXuQiERlTMNIwA7RsiqhAhmhWvlEERFQQQQina64AgCjgTk0JjIslMlqIoOxCAgCZjNTI3JEjp2ZIig45CwCpXwtm6JlyWSAjIZkogYmaqIqJsjl56VFsRKdfkDEDrVslxKiB42gsViTQcSSgBojJQXT8jzgipwHdoA08cWyAnj0gCXHl8HMM6ohslXO64jKWLtWxaNVhDWYI5QsMQsaZjPHSK0LOFvs97th7CyqBQrsnKhuU0SWThdnx5JhkHE33GJmt0DJMGatQ1IAX9eeKNThjTtvnVbh82e/+OTJL27iTUqCDjRnNUKADKo2ipIjQp0s8GAkWeKYLAmYkVPfzOv2aD5f1VWN5GCMksYUx27fDWnQmPbDgKLZUrIspqCFdy9IbFrsN0bkFUxNHKIxaDZSVkDwzi9abAIxWYRAlQ+L0eJom6QSRYss48vW1xMiVaFRnJuo5igpEZCAxRjRFIhMNARP6FWMDTmZsqkomMbyuEYwMOcmuAKjOjQ0zIopqxEHqlTVMJObgqc3l7ef2yfvvf32e+f3vxzjbrxFI3YMikTESJDzYhWWp3V0kEFq7wK4lDX23fnpbHFUJ1ZgS2O0LIQ4xpz7cRzNTENA74BznjVtCG7Ifeqt4kUg31RVHvusnRMJLBrFE6dxcGqjQEypQUekR87d3G6Xs3mq+Om6c0YDQFV7j5zG5Iha72bIMyROsUtpMT8WGdteWudHoufOfxn1k+eXcfRfO11ULsyq0FYhVfSwuvvjzz/7aL/O3iDreH15ujh54BhSImLnSQUz4j7DrSijzZjBJHDo01CeVJp7kLhoq4sI0HKSOAtuvRv7ZGwYcprXBGnc7/pt18/b+ngVUoIcET1gW401gyoZhKapKq9Z67puVmdi8Vtvnu9y/uRy+3K9PzldPKy9JSE056l2Dsh6y4mFHW409+zefnjn7t2lI/rv/dXf/X//yZ8GkLbhoesN1TOE0CSwq5stDS/ffHBKppltMw7zEDRa09Z9Sj/89MlvvP9OM4yw65WrmK1etCuw0TYZskomAjLa9f0oeYi5G0dSqYOTpDf9uo85BPbZmKye1SnG3X6nSJJ14d3Qxy72auO95dHVRtZgzmFgbM1B0lVVv9pvbmJ2BAzgqXQUKpg4xDHmXrJFGUYT4Rxv0+7Pr+8uvvad7/z3/9rvfvzp83/0x/9i22VzdtzConX7zbppWkbnJGCFiUCyXlxcLdpV7G9XrdUybDcbk1HJlA9jg/KfOKqKaDYmTprYu1ntJq4tIUA2NSEEE1TwaCYEyGmUZgYAIMkkgYxkBBhK6QTHMbEBTchG8shCkADGDM7QMSBqIPSOLYpzqCCAqKIIPK+qTdaZ8w1QTHnFvAzcmwIDC1dOSPLC4T7JmJNDZBUxa4gXoRq0I4OkKkSbbDP0M/beADUTYstEhmPK3gdF3KtmBqcKJkBYwvQOuQ5NymYCgZlDyKU+FEkUU5LgawTOqQcVEmOP6LJqTmN0jHPnCCUTScJk5DwxgINEat6xIcVY0voESWrVOuX5CG3G1pAJHVLD3jF4JCAOQGBKZVKjU18smZGiiooCiEK2sqlBM+9cytkRIjJMzSVWZA4tjl+yCinFhFOcnYyBAU0MTZ2nxtGgsnRuizAqIlBQM0Xnoa0dDsLk/Mg6YuU8ZBGzMcVZFXIcwTR3UDtuHHYxdkNC84TRO7/Z7marmRmnOLjgHQbOYGKLJrBQJqxY2HmVRKhAEBxV5uPQtc5vcyZTYVPJhbQQUzK0bAkAkoxMVRNCTuLZYUBJlhOiJ+9pSLIfsgfMKqdtXSkkTYTQNLyqZpshDiJfbYzwdfZnimodJKQCksBiJio7Mpo26HDQFYoxGQqPpmzwDUABRYGnDt4SgTowDw7y0fQCJrYkIKIvdh5AnHpLoFjh6YAj0qmbAyY6tpoCiIGISkF7Hvwn5XtiKzVpBR5Rjhw0TaEBgQgQmR1pVqCaPaZsfZpVxOSqph72m7c/uPMrv/uXdtefz+p2pvTi8jKPg9vmf+1Xfot+TXPMaxnu/9KvxBya2yt4/OWqWZKixHGO0H/80aub6/u/8a88ePCBVb5aVPUij5ttRey4AsYQquHmdrx+fnv95emDhzQ/djxbNk4g5D4ByYPjeoT8+Gb48smznFOFFjhEjaZySA2ydyiqKkZEhmg2TalLEM3UmChJLjU5ZVR7QIZPV/91agTBBEC/QkaXS3/wAxyu2VQ6U64lUckPApmJcRkKIyaB21EJceap9ZJUOFlSTllEVFRFxKb24+mwQJPEB69/FSdTEfO0cFunk4wSvjYb4OQUEGWaTGtEVHAZ05H2YKt2UA6i01v+NXljOW/fOjn62jt3b/bDMCTHLhl8/Oz5JxfXozhSzaIohlgYtAYM3jtQrYlr5hxTcFwHN8Z4vJzNq+qL201vuZk3xE4isiCS2+8Gx8icdIj7Ic+bxhTbxh2fttcX2103nLbt0Z2GSffjgEhVFYB0ve9veru97tOe2llYLOpZ7XGGF1t9+ur2zunqsrt9crup0S+PKw3sMkC25gjzOFxePevEv+piDkGHLnZ9w85VbohCzqrg1vtNJkbLtXPeVwE8Vh4DbtddqOdOIMXBeTw/nudZn2fDrNaTdnFzc+sye/T9pnNi7bJddzdZ7XJ7e3LU1C4Ez08+3xrgbFb5ykILq3l7c9ttd6kD5xyNOZK3WY1PnnyxWlb9oFRBP+ybxp09XO12yXhPPtxe9s78zfXuzoP59naoPA67/smL66+9ec4qoXHOV5Wb7Xa2Gfqj47A889TOX0S7d3f+4vnNYrVU1RTXKW0kz6Rxw3Y9r8MgWDXVnXeOeLj9yY9/QQxnp0fOex8gbgWq6qMvP9YNDreXVUyUTTZbjno0azbr69vt1fJkcbHejzkC6Zwq7fs///ynGIyz1ceL6nj1+ZdfhHnlRLcXL07efFAdne1e3v7iD5/P7f0kfOfBnb7fbIb9veW8HZQgdnH3zQ8eDvuAo+rNxTfeOHr3wYg7/PSx5oo/+eGfNcvZi4uL2832rTvhBIZF7WTfi1vDEPuB47i+0THtBs7cX2/PZvQrb7314w9fxtGU4cvH+1/6/oMvvvjFg7PzB28d//THL2eh7caee86SP/3oZQev76TphmEiUSNERSJ7LbRTeaQglm4dK1J8SaoKFnNNodyXxWPC0uvr+kIkMzEzU1MExWITQgQAKdbCIrIYWAY7QOZKVOmr03x5wMEhtDWtToWco6Z0SDsd/iow0IlnDQolX4ZoqlMtIxy+FDHT9FQs3P7SmFCsIljoP6BFBVOdLLwi02yCppyrHgxWIKA6VS2oaaaCBQIq+WYzIyo/dBKVstQSIIMSIRCBUtbsGQUpZiIkY6zqxV/+nb/5m9/9bXV63n6r+m386Mmnl7c3+93OTGFG5w/fOlud993w8sVnNxdfZOkDgZogUBb2gJg0Q+YqiI2ENl2R0jRXJjfTw30COqkKTNfVJlfPX+hce+33LD+KMh8SNVfYTwCFJISl2a24eDRPNs/iSy2SDZarXN4aNiXNAEvFqOrBB6RanudlBTwYUsvcyhjKa7JDvHhyvuHUMlZ4jmiHGt1Sn2cqRd6b1giHjoAACImYENHQAQoCMYCSkIAYGJCJIJEiOsXKeYdMiGZZAcAETEwlMMZsWRUB1ExkSu8xI7qD7EpF7ypvYI8onpwiKiAAA5Vdak6iDoiBEZmIwcyzU8vO0SCaFUYjMHWo3kxNrJxhAAwkSqxCpSJIDkABVWQkUCr3AaBBGQQKWCqdqQBCoJ6cKHvwKmgmjtE79uYcGAEbBTFK2Vwx0KCqkSKrgJm52puaM0PD2mXSoOpM3C6lyNwZ2ThIHKoqAIasEuqqmc9jvDViUVOiUNUGahFzjAOOVJuOYBH6DFlwPrM6p93+ii21szmuTh3Vzvmj0/sftE27OP74xSfPXzxO0iHKGDNmcR4ckhEZOyYu5kFVlThKFFXax109r++c3Z8dP2zqFSKJicqgeUipG8e+H7s8DppBUowxikoBJpiYmZIndSBqRFym1eUQqRqRSMV1iaiaNSdH7DxmAcsoVsDoOSkwjhos50CAmkQUXBAEV6/a+gRTHPaXu2GbI6pRufecJ2Jk9h4ZMpmiQ82QBFJSAzBiVDHHZbJvvgqeoSJziKxokkHFHJpky2JpWguWx8c36/gvfvbJ1x699bV33uv6MfR9RurH0RO44KK5tciKGOqqWVTPL3brXcOD1Pt8dgyNyzd930e43fQx0GbXYcqSzJgzjgJGjmZBqjYbklmFcAKwIFq2YKMOeXtR16H1bjN03U5pxDeOm8uUQdFreLnujwBP2zkKtwDfPDu6lPjZvs+ZCKhpwqyqTtq6SuDBty5cbgcIighptDQmGcdmwa3HaPD46fXK7NGdoxx3AKGK7sW4+8EXF50OdYPbXQ9u/i8+e3rytTdmrJIzei/Mu6yvBF9KnlHYGq1qn/I4gniq0AOYEEE7P/6zzy9S7I7IwDtWXLYBojLJ2eq0H5LzvuEURN8CJO/8bDaMdrnezGZzF9xtHEexBHL3eL7fjgLUVA1ROA71OyvnMyRJQzTPXr0n0pzyrPII2EvejnHwnNWWy9kW8eZy99bx3a8tm6fb3c0u1Yy7PgKa1jRvZ9sxPen2s252fvf04uJyFAiID+4e3ax3u5Taqnl6cbNYyt37y3VSNGAi8nQ77B48PH+1B8D4zvnpF5dPiMwDBueIMGehykmCEBwhXm33oeaZ95azqMSskMCAXIY0SgKbx+5kEagbQdSSikd0eHlzK56zac5Qe+cBd2MENQfkCAbrmtlsFFPMSrIf9cntkIL7/J/+yW+sL3/te99579Hf+sd/8Gc//OTn0Kcx5Rxhv+uJoSY6ns03edxFHbue1FbMb521H37x8uLqptPG1yHqYYRc/pMNRcyHkMSMKg6N8zWbccVVGwz6IY0OskHeX/eWpqe6KY7REDI7p6CEKgmSIqFJzlXlHRIGTtFS0ip4EFS1DGiIIpJy8swezTtzLrqKqpoNwQVmtlXtW/atq2E+6/bxdrdfVuwc7fIgqonpRtWxZ3OMZNyBopANOpAjAoOcwSADKXrPNeQxlBC+qWSdNbM+RUkS0cygcVYHX4iVpFSC6WKQFBKoIzJTECHilDIAZgXC5JCBEU1TlzSCKUBkc84cq6Mxy4TCMYl7SZ0wBHA1EGQFy8RCTXbcpbnh0jAYBEI0C4dmOTQt2BoEUEWSYv4iZGICU2NDNCqP99c5ezNxhKjFikJgXPI3yTKZCVjKKgBEpENUBM7sPGNwBkjgLKllC0i7XayblTbtdtznlJXIOyY1zBxy2+0JnRMVVDBCUVQ0hQwmSVkTSZeSASDmmDz5hDo/XQ2qhsg518AI0pDfZXBmwbucchl5MuFut63my24cr/ajifWbrYGppYjZ1yF4HsYRiQ2gqSuNEogAWbOBqUmah3Y35nw40yNYHTDnfJvzkNJp7RZ1jQAUMHtQI8qvlZopxH/IoB3myDY1i5UdXAEKU2mhmvrmD58/JYAMcTIcTQUzrzvvX+/aCjZ1gi9PJpDyiYx4sHjY5EbC1yd8nApcDOlwDCmbsTztcwtIoSRHkQHMjHjqBitDOcaDNwWMCnUTwZWsnGQiZLNK9M5s+f3f+Wu3z3/x8U/+pEVcBXvoN25/XSNLF9XX7WplcORPV2xW227Y3X737OGOna5Oj+7di7NlfJq2X1zfO15iL0Pf283zqx/+4Ghx37f3hpQYoGpr2d5k7hfnZ8O68w5XTbP54snLn/50t9u+8c6j+n7IwX15uYX5/M7q5Oa6N9/ciXJaNevLnZH6xkdn0Cw0eUbwuHeaxTk1JLMsOE0aRaash6iDyZwDCgxkJgDIMCGeeAIUYTb4i8HDQ4CrkDLLx5AeDgiMhRhd3j/FCIaEhXJpiCQZbgcx48bhLBipbA1EKU92gkI7gIOTCOyryjM4AIbgEFEAICqzViqoKZs+owzwy1FU1JD+AodiCsQoFZoqlNwDHLgeX7Hdv/7+o5Pa3+57JAKqbsf8xz//6OWmyxmYjEysfLxZFlU1Zs4GYhCzJBvPj9pVW4uqX8yiyTqlOoS2dhx8cH7m5+J3dVvllMjRfhy6GIV4VLz/8PR8ae2M57NQK4Xa7dm8WZ+TgiflmLGLuhc3anu0WgUe3nn37s+ePH/xrHfNrKnqmLWTaOawnjsWYRk322bWbPZx7mk/Dvu+QxcqpqsuSsI2+G7fHa3q2fFiP/SoklSX83ZxfC6S7zw6c2349OPH5nW9u4VRjhZhuWpPj91ojFHQ1UVRJGsRFkyzq936xYvN0dwN3UBeqhWTpS9fXm4TmeqMMKMlJehtRB69nt8/6S+3MKI4I5plMyEZ8n61cvOmYV+5o/mi0d31niGPcUO84sYPnvC4Pl8dn9y594M/+/zozmwXx2OqUSBzGkxxxsd3TryP11e3r26vKzqfL1fc0PJoef0yLeo37s8e/fzLT/ZJVrNqN6SzZmyDPrvqenQns3DvaPWzjz958OihcvXDj56gR1uPZ8cr2d/udmNYtPXcd/ucknlPOWeUeLasOkmiYxpxv9M3Ht4Z+rge49OrZ66i/b5btP7ug1Vbx9tnPz16+33/4Ps/+fCiOprd7PuLV2s309PT4++89e4/+6f/+M23Hlxt1znPnGt/63vf8/B8e3ld6dnJ4uhlJ+1xTX684/y8nmfDLx7n3a1VHl6ub5/ebt56eO+X3rv/+cef1Yv53cXxLtzElH768eWtGJkSVy+v0xdPLx48OELGFy/X1DqDvGxpWZPx/Nnzy8XpYtJgJp8F6DQumA7dE/0XrPRROgQEyGo8uWSo3BpwaFMHg+JPnnRahFJUUEQYVSnqkgIIoEgmMy4fUhQcUNNMpdJZhLC4ewiAClW4PHJ48kcWfOF0TwOAlLMvHgxSWBoPp3SSAIjppDgY6oTcQ0DQqWjTFMWRMwRkcI4IjezwjQGYysRungJlKAe8zvQoKwsSFq4gaNmmEAMCF+uNKhAzkpqaCagSAJKJJTYkBGNTUASsax+zRkOjBkWqAL/2W99//8Gbu+2r/bA9Olm+8/DNr3/jO7xoL59fsvNN0xyfHi+qGeb4wx/81z/4g//SLYgCffH4BVCbUrh//mjGTSZ6tbv8/OWPq1rGYU84qW/lUqoVE49OSGyYoodwCN1pefAeQnxw4AQRo6oxIQOjGRYNistP/gCXmihxZIiggliMvwexatoZHGYAamaoBKqliWNCSwERHlDR0x4QENR0es2CQEiEr/lDoDKF5kGLCgZW+NgHM+sB4w2AYAyGahOeeFrXOKuAEaHzng0zIEGOUZQwGxCjL4trVgXi4vEmANGyzSEAOaysaoaas5qCQ1FFBWTOloFrdWUkAQRs2RQR2Lh4dwjUIBc3NBKpMLJhABPvKKmoWsw65KgEljOxF1UgFBM1Ec3kNEtES6IjUAbLkrKpS1nMKZmYJIOcJaOgTWXCVGZEZOaMSpDMITkEgAzArOCY1NQgE0A2IaKkQki1A84aPDXO1RbGAUoZmidaq+1iBElxGEzCmPysXTiuT47P8th1w0ZVUmT1vq0bAQS2fndrAhVmoNjJOmeJq2E2W6IjkF3QZuV1MTuvlNn5qjp+417bLu4c+9XjFz+52d7kLEAuA4Ja4ysAziLecakGs5wIVExn7fL0/NGdk/szP4MMGWM3DP3Q9d2232269e04dnlIRXukPPopugAElQFkUILsiIFQwAAyIGZTJkdKBgGqJR6d2WzGGkmiq7TbDWNOCSzmMCaJ6MAC5lzkVzJi9qGah3ruqp6Qdjd9HNU0OF8TMRoG54mYjIBI1ZDJyIBBBIiYCJwHEbBiTwuBvCBplpzIQjZxDpLS5Nqe7hhfganb3Y4//PCjvs/vvvGoffFMgwHWfRc7ATWkeZ2r5mqftwKuqW/6NNfczGcY6i9udrs+CQF6FYx56GqrmiaMKog+E48AIMrdcLaaB57Vs7MkWdU6uZa4NYXbfXxys5Xe5UEX9fx00TYY173dXu7uVDQabBVrc0h4VlUzrR24V/3AxK5yFcIRI2WxcRiIFidN1mzkkCCNFlw1Xo1QW7WsiJdPrwbi7u37c+/z6qT+B3/88547JCWE5TLk0fYElwJDTC576IYhyd5ktvTnY5UHfdV1GGhRgRFmSjF3ZXbxyfMvQKxiMcTbPuYyt8hDNavHmB6v+13GfWSvMnbD3dX83Yf1rOaTtopR111Xs7vKOfcmujupKx/4dN6iY8j2sA5pTC+Gvl6625ue+lgROeYauY8jBYcmi0XtAHS/lrNVc7zwoN/7+nuXP/rwNqWd5b0kALAEuJPG0zDaTz794htv3rt3PKtqd73ubyERcFu1kLUb4eXt/r2Uvfn1zYZbQrJZVfdDSoYY3HroL3ddWIZ57aQTYHp49s6nl5+HQMQQJftZGB0CZdSYxsGFsO5jA7NFqNnxfkibPjvCwND1sRMSG2kWWoPTed3e5i7Lrk8RQBU886IKqhm1CkDZrHF4uxtjtn0HI6TT09WPP34CMb/79W/+W//qX370xtn/97/+g2fb28a7ylG37xCgy1kIu5gIWQyUofvk8vzsyLJ72qsdmrC/kor6wRziOKpjrpBh0G4YCI2S61NS7I2trh2YueCA0EiZEABTlqZxxJYxZRPnfDYDsxSFmCrnXNtkFjAnzKSMSUkAzLIaoBNlARzNKFuNgCTIrJAJrakYPQ2qs+AXYVbVfLPpOGOLmjG3dT2mBFTlnA0sBRrGCdS49EElJutrv5BEAanTITjGjGbWSWZSoohOaj+NaCB4qziKoKuQNWZhADANjrwnIgDPYKhImsiELMFgaoSNZ1AG5dibJCGBRN6CRspcM7UIoYKcuk4t16q0H1PtOBDX5LXvW4XK0GcrPBJyripEAD3MFcUKa0aVXOkqQpJY+NXqCBBIcyr1qkTIZKIZDBg5y+gMiFHAAM0jKEDphhgBwcyXOZIoEKYxEpe6UqCc2fm55yEmqmLjQzbuY+q32We1wVLUnPn8zYer47ObF58/f/VsNBhTao44zPyo2GdLFasAES/mrlt3zWwmCyalvIPWQm1+GDUpEochqSNFtF4kaqrIDGDfb/o47uIYARtiJj8IJMWcXdKEAgRKRI6cumBGfdZk0jQVQTawrKMQDWAVEhIwAhFEg70IC3cpM2iLRjmb6uvpARVm+FfaDhgiFAIpwqHKBriM/Wz6HDWbRr6vZ72HUWrZMX/14TZ9TLGp2LStg8mwTaWzrmTBYELUABAUOa7MOvWwfzSFAoVHNRMreMsyE6fpX6Xi+IDnMQUgU0QgKok2K2ErM8cIYIzEBEgUHHuFv/w3/9Z3/8bf2OyepX9/vfn0qQe3/uzT+MWX1d3jqDEZ8IxdXVeC2892rz79sOWqj/HsW++uK79T7asmOm/LxYv9tk6Y9h2u4/Wr67f/+t8SyuCD5iEQ7C6+XH/6++HRo+XR6Wd/9rFcX9lmO16u61A/+cmH8OMOfLjZDujoJqaxz2OMg8j356eXZ/Wn+/ys9v/N/+n/5p0PfqMyuf388e//nX9vffELiU4zDMOIhFN9DRcekbBDLTzVUqNjh8ymASBRORwVkxIhvIZSTP9M19ZKiLDgRQ7qzKTs4JRkcwRMiGpsaGYRaMhkoIZYMdVeo8ogJkZIyMiTNbqIiIfx8ut0GEzGn4m5hAClWogJHePEXDDIOrGIDomD6cBR3iTl2DE16kDhyeLhDflV6OC99+7uLrerVb3N9qc/++zTVzf7PiJ6UlXIKkaIzFQEI+cICEQVmIRpVnM0u+l6dK5p66pq835feVdXfLsfo43o8WRRCwEFd3WzHSXNFnVmv7tNhhYIaqL5qm5rv+3SXpTyGEKdR0sx932vqm0zv7pcr7t4d1F/+sXVejSlWtltNe+7OPbR+1pGic5iTg2GcbRd31+DJDFGjzbIehcI/azxbds2tTNl4mqBfafnx3et20VFc4JVX83z+99qFbwqdJt9HcIo4/bqKlA9DE5GPTtbHLn5qp797JNnRkGFE6JWJ6ug1QK7Yd13QxQ/b1tfNfXMffHkeRYn2+w9dmMOm62kgTgs6mq/7ZM6YJ7NwunZyclJePLl7fMvN/caf9TOXq139+6uvDu5uB6GXXaA1T1+fvt8vgpN3RpKmLfV2qfYfe+b77y82Lx6dbtFlZzuzO4EbGWX8mCXQ25cveD2y0+/DBygaqrZYnWOj47DP//nf/5sl4Drk4cP0pAf3ntwfvTws81H434nSLDGWNPL9W7Wni5O3hn2t8jp7oPVs6fPx6vN3Osb5/M/+smXqD4NfMbNmlNgiGm4fbk+PV5uNLdHdb1UoOEbX7uzj+s/X1+5s3D/7bebuyfX66u6oS5tf/rFRw/efICt67FZX++7TV7Odt/94PzZl7gMpzHB7uNni7n+xjeP/7O/+3iQBcQyRK5vRpmfzL9+9CCgfPr0y+XR8u6jR3/2xx8TNTejdrlrGc5XThDfedAA69H5auyF7rSGu29/497F4+f7oX91dYMZbm+203natFB7Xj/2FUyKmmIGBowEoCVXRYBI5RYuku90wkdCJXhdYGYGWGwy08Iw3eMqkhHkgAJSm/D8VlIyhYlz0AewPBuK7lPkGjvYjbBUH5btwyRjHzTugwvS8NDShWAAqggghQUoClBs3WhgGYEJy3GJCSauDKmVMgmdROop11ZkL5oQ1gLKOL04PDy+AA1K9AwMSpoMsThbM4hjX+I3wJxVyAUjY3ZJlJi8c1UI7By1IWqK66FO+L13f3l1dLrp+1mzonZJwUdwbFVzej94HvvcC+XdOvXbXtObX/uAV9Xzbvzg/rdX89NZfXx3uTpt5n2SAePzm1/5z//z/zimrLm31xYsRQD6ypwFBkAGBZZc1gNCmMYyxQU26fYlhoxISCUzWCRGUYFJSrPSWKogQGbFMQxGAGZSzMTTF5yWKkAi05LuK6LV4cqKWoneHFrwJua2Qklxvd6IFLTR4VpguVhlrjAZpA7vST1Y67wj5lL9QoQoB6CVKAAAOrYszIqqyKjFFAaWdETwGQEIFEWyIimAiGlWQUIELqTzEpVXxCyADEQglhEIHWeCvYmQBGRGIOSijJLmCllNRUGAlb0HR8qoRsaKEBgsgMZBLStgVnGekqgickHKF7QEqpJq7MFStCgq2TSZKRo7NIne12QJwAEgIoNlKRBj9mQ5IFfAATiwR1IwV1bVqNP+jSijlMkfG1jNUHlqyTl0NYSaPKCLWWDAzVZslG6Io5ilXmAAcIG8g3A0P0mGSIpq+3VuCZfHd/qc63AyDDebi8u4T9UyUEzOdOh7nM0w3wBbL0M3dkf1kbh57RtAP5+dvvfBLy9OT37y4R++uH7edR2xBueTIRGBYZKMpiIJAQbJ7Ko7998+P39nXi09sGTZp64b+vXmttvcDP1uGPZxGMhYEyAYoanlgiwDhkSYnfrKAdCEhAQGBB0zgteEMtLpgwez1Xnlc5UUaYxpv4+b/dAn1F4oGqJDBmNE4jqbaWRHwVUzX3mw3mffLni/61VQlH1VeXaIiMSErIhqIrncHlqsnwwMjK40cxFwWwc/VAGyZSG3V62MPLBHETA97IlyHsiIvVVV+MX1q4fv/9oDoscXT+dVZSnEHFPMBOBqx45p41bHwXvyAm1dxcjb9TAIEhkyU9IWK5LKS8soANm5ylUugDrnqzBHANlczY9nXq+52crc1nt9saduaFfiadxnzfvhdrGog3P3zmfnIdQJZu1MDYdtrBgbcm/MFxU637APrrtNvoOV59qRq2igvBUJ6GKM5uEk+KPaPU/p6S7fksUAfDnUtbVLiLz57PZxhJlnuh2kYl4urK7s57efH4fFad2sZuFmvB7S0LCDFIPRjATAMqIFzKbGiFRdvspPX4yuIcdo3m36PiZwic4bPlsE9vqij9uICDYLfIM49nF4ub5TVw8X1emRb5omcQW73avtbuxl2+aqqUdu5qvaOuEhn9T+k6ub282IBv0u3zlazNFvN521HA0zcRN4UQGCPHt+fe/Md6OkXAESBwDke4vjftd5plrUtxQH6Fz18mZ31PDd4wUq7WJs2U5W4RaMCQalx89Hqe1mO8wQDbRGL/usOd67M+daoOIhaYzjYBkFn26fCotlIQrbEcwsqm0l35u1i7YekPoQJdVEfL3etK7exri92d9ZLNXRiMBgXUrzAGyDh0zMITiLQgQuuAGF0HwIm35bIY9JUsplZ7/ZDwMo0Ik8uf7w8Q9++dsvv/v1d998+G/+f/7kj3/+6WM1dW01dqkboq9DaMLRyXK96wXcqzGGrn/zbt2/XEeh282UuZmkImQq3QfZDE1QlR2VudkYM3kFSSlhTJCTIpFz4APlJGM2jFI7BmJ0BkwoYGqkkGNKZjW7hrDkPbjEhnM2M5CMRI5QVJMYIUIEBXXBV3UAglFGIgaE1rdYuyjJtwtJg+fg2aP5DLbPg5hGgxFg9OiI9xIp9zVrEypglKwtwiCKzjfMacwBqXAKvWczc6pEiKApgyCZqmUjg+AYPakCEyAjBZfjiAiBQxx0VANC55EQEUTGqIpkQIBZMwqYCFTsEFHj2A8pKptVyBWZN7VeMMWZAI65AnNggTE4X1wrZF/tMFxxPZgZWBYxUAeZAZ2hgaXyECzH1MmBDESsKgZCQGIqMioRgDERETszMChdjzKlpcRMybEmzYwE4Ah1n5yxc7mugKuqMwkIvgpjv6NkJ8t5tbz36BvfMNS4fW5ZpkN4TLxCh1ovqk1vwxBrNiKcz8hcEjE09mzs0CtG0iziQsDgynFei8tdgdhl0FFTQh0NjtsWja5vtojGnkyyQ4p5IPMokKMo0pDEOUxZKgeGFiqfRGdNVSMO4zRc5bKWxxjV2BOhQcowjMvV7PVpfPIK8cFgZFNtPBrQFOaaImdl262HPeNh/GdWUEFf+ZPgEAIowEyDqeLDpi9XjD0AhIaE/HooTXB4DTb5TAqpFL/CbJuhGuQpayZqpSQFCgSjDEeKxw6K5GTGAGRQOUIzVSz4UTP07FDUIwEyZnKo33j/g16Ej0++9zt/9U8/+X8uPFHe/OF/9O+//a2/1Ny7s/zgfSFAiJz7Ow2L6Tx4G9Nw+fLo/nms5ovjFT9cffaH/2T/0Sd9txm2MTBUQb74/b/98Hf/Jq1W4/a2f/Hy6p/+I7h9svv5ZzcVzR+9NyxOTh49bF++vLm4uFlvj2fzQN7n7KPMMlURkevFuRsk28U2z47f/yt/45ffem/d32iNp6er/+X/4n/3f/rf/6+6vGVPTtkQc1IpKUDRAggxMypBMwIxQONyFcs1YiR57SN4LQ4BTOVx0/sEX899p15nnLhAcOikI9MixRTuK6uaQcqwHRUCVAAzbwiwy4VuYoXSIXY4P5Y3ykFlxNfvvAOXlhCZwPMEJEfDlPUru5oB4vRKDodGLLXKplamyjiRQgAA7KvMATD5+Xz19PnzP/n4s5fbrk/EQIwiBZhLRdOkLEkRidkUVNSTtY4aJEuqaCmKAQ48cExHi7pCOq6rojBdb7fC1A3D+nbvA6EjSnJ31pwx3V/NVyfNqEiOpCih7B05hSQxKdmQ9MnNVdS0bN3x3dXF/hoVRHTWcGbYbWLwPOTYNLVjM6YhD6iAQv2Ygw/zNqRum6gcrkVsQFUSRUFyvvFw9eL55nq33g+/+Ztvnt/R+RlB3Sg2oDamKueUwW0f5+5KINOcXNrFXvKON6hp3w085/uro/ncP/nis6X51dw5Cffunl+tb292103g81W4usnmudv3q3a2veilj/Ol9w1tb/fkwtFqbgTWDc+u1wyz83snJy0//fLPt5tdDc2js1X0gpAlx9vnr1SlbajvNnfvnV1c3rCrsui+2yGlsyMfkK9u8tFRUwXfdZ0T4AHEsa/88bL5/OKLtr2fYs7D8OHnlzfPruvlsj1d3F7ctgCLJjx78cm6X6PXIPTwzbe326vT1R3i9vL2qiYJFT9+frnbZk5ZKvjnf/pln9SZEjT17Pj8wdmf/exH6Hgxq4g91pAqvxPFKN++c+9P/8VnCemte6f79ctxuHjvtN2ON1hjisNNv766HhFRk6+Wwc2aP/9oN3P3BiOjXWikG/d//uGYOXRdfuNoOexTMnOcPeIpu2HsuDED/PjDz3TUTb+9jFY/Ov2f/7v/nT/6+3/vZx9evPlo9ezq5smnt8OoNq9OHi0eP7/wkM7eOM+43q43s+XEKhKbAMdaTH/0um9wmgLIQYkBQpUCt0EuBYiGPEGRyzKhCjY9HhAJrCzVWaVMGNTUAPUAyp0IhGAmWmytalg2IqUulQ4KtYERkB0e9jbZXBAPOOvyUDso3ZOxtOhWOiWlDVRLysaKwqSTqgUISsaMVjJGCEY85fCpiEMlVFXWKyvfqZkpUkloMRaTJqBOeWhCEi15YDVTZCI0VSFCMlLiyG4k75sKASygOWwXs7r2mnXl4cHs+P175/H2xXCxvzd78F629dNfnNTt0erubtc394KwGYmgCA1DrSmOzrFBPr1zHggGSXH/8pe/++3l8kSzVCY591Xws7p1du/f/Jv/3f/kv/hPTV6YDqgG6IvrZzJXTRjvjIBMmDMUFenQPFdWAhMzKwm9Sb6zoumU3rfyrAWiybylgASmpX/CbKqYMDMl4AM/G6yEzkyKEIjlPWWT+AMlFGkIhWWOUFaHMnIoy1fJJhKRllFRWWiApijbVAfxWi2aMNhQ5k0TXpvQkDwLIqB5QCN2oMgKgjkbeheToZFmzToQJmaPbtIwpeTlUWnKyhswasEETTQ9QlWU7BxQTgTGnhCSYTYOioiIqASipkCEDhERxTCbEqhDYGbHLmcVFUBgx5gGRtOUgAGM0QxMiFllMHViSTSbCaKCM0NxTEGRURizIzQdATyCEaGIgWkhPJBJMPEmAZCNEDNgWU+dILIZqDASIRoxmCGqc75lbIhO6oVk8hTIe1HbjbGPeVWH9TrmPA5jNGLJsltzy2HeHjWLk5vNbZkbZskCUNftUXt0fblZHt3t1z/Lca29deM2kMdjBu5Tt6VasqbdOHTLbhmOV9Wi9Q0hVq66d/K2+0A/efrjn/38w37s2OqoKmq+rkvBXTH3zZfLo+N7Zyf3FnVDJjHu+2HY7Dfd2O26bdrvc85qQM6BAjhEMiQUU0AjBQNlRgrkPbMyAqsKA4GZJyBDciHT4uT8jq9A4550v+s3N7vr9XqX1JBZTLlcNiBAIGMyYwgYicWVUkYKfnY0X2/3/TqCOchM6ssNNXEoLatEMBHJWcyxY88UMFQ+qbmKQ1NVtkXskW00CdM7i0Asq9pBMB33I2ZmTylHY/jw2eO/9o3vGEgc9q5l78Mg5s6C9wgO7j84Gm52FXG/2Ufg7b6LYMqQcooiY8YUpTKJQzeMEUHlaodMWTOyXx73p/PZrDI/ywY3vpXL/e3uchy6en2Z6nr29rL2FW6TiYFma8lz1SrJOkrtoZmRcy5nNTVNhHVgollbNaMsa3CgTe0gxdssqjBnn9HQrHFM3rzgR+MIlV938cmLPN+D6XhnOd8NQI6HrGhxsQLVjeL8Jqdnl8/0BmuM799biIx14F0/zOcOgiUnWfOQIpk4odv9CGgoCRFBsqrWvkKgs+Cd6p9+drHNgYlmHjVlAeRAN9td7ONo+f7JrG2aGoO73cwQk6S+E659HPNuvX/7zoO8H6JhfPKs7xN7ahYzT8FnfefN07Rqf/LkIlpex7wd0qWM33vn4fLk6MWXz8mkxVBDJNUFueBZRSsKDvR45hvWfojPXlwN+3FeV/P5jFEWrW/bQLCQ7fDh509zFRJiZTMxaVpvirNtfxLCOOylrNmOFj6wKchIoItlha5R6lMakzN1iJbReMUVGw1ZiGFRVyja1G4UyJIBoQ7MAPskFyIL785PFmmbUFEIj1vnA6UxIaJoEjHwjhjqyqmoISrDbjM+sxsb5F7bfvThz1988tEv/+73/wd//Vf/4Oz0n/zwJ/3utp05Z0po84rPm6Db8dn1blB7cN52Jg/vzdHVn7/oP33yF6QidkhgoqAgoWIE9TUgal0bkhBI6rMljsIpQlVbOydHyIpj4tRhACEEJVBNBFCHEJOkLJ1hJlpUAUyHITrnBIQslf1PBDUQz1ghCqo6NyJntcq0JlZJhjGTveoycIPsRgYSpkRsJKqSxQHamBp2bQg3FqHyGm2Xe4dhxtSJEpJmdeYsWznosEMGjiIFWl0xeUZmGqIUW5dmmHnn1JwjMdCUHLBzTM7nbFNhFVpwDGQIkJKoZSQeDXI29s4FDhzAAZNStn7IBMEwE5GKoColw1EsGyi44CoPjnzjvUmqSuWcMQCVygUT8QCCh0pwBFETPgRIAJiAwEixKBcEhKaAgMwgVBzooFbGTN5K7RIwSiyeXsOkhkkJUVWQpwiODplmzWJxYu1R7sZ8e6NpVIBmMXvr69+s77ytkobuBuc+QRa11IEKVnMSH9GjDy4YNwzj7a0T72uKKc6rtk+dorPKBXSpV+cwQ4zo9xqjt8qzizlpjGIZDSpehQYFwXQ2q7s07ONw0rjKOcNg6FSMSEWVmcRU1Liq05jQcOH9nIA1V8GN0ZB4SCORJ4ZdThHdy83uJIRZG3b9MG2MCMnodZH8FPuxg1xwkIimEwKYGIgVZtAUN0CbQNWvrRtAZXIKAKXTpGy/ig/bsDR7HLJFCIVuMfGuJ55RkaAUADCrmpFMXu7ijAfkqcgLjEwJ0eiAGTM9tNuIOYdmkJN4j2hQh5BEzUQUPJszW8yqKkBoWpUAm5vPH//sg2+9dZ31/jd/Zfa3Lh7/3j8NWNE2ff4Pf+/9X//G7O3zsDx20F396R/Ri2dtzqsqXtxs8yusb8/CnYfKwc9mp83seri2fRcchBBqn8cf/ejzJ5/mCvGmO3ar92enfXN3SIF8TrPT+Te/dzv04YNfOgZr++GLv/uPrn78gzY0IthJdIFc5V/tBNnu3p3X54/+9X/3f/Z0t2+blbf9bXfxcqvfeuc7n37+w303NFWdTLZZrAxvcwy+yiKOKabMjFkQTbwjQSwtxY7I1BxjzFLUIpy8OCV0BjRdZpxwFUX/w8NYHwqsRD1CEQCLEgcADoEMomGXEc04oCdq2MQkKqqiHdDV5U1TEizFGXTAWZuVfb8BADAjoTEVeoUyUSaj0uo8wVUnDavMtFUPBIyDBjr1dAPqV5kRAADX1J8/fvFHP/3kct8rEqiVNC4CMpGhiZqCAJWkJFiJlyOeL2uPDICBqE8pDqMZLZnGLmVJR8sFOej3edB0uRmCdxCoG8bTo4UpMnDr65OTs1Ajxqx5nGMtgca4MzTn0ETBAc8DxDTjcHp3GWd9znnca45pu+ujWIwZlVvvTmaBdM+Bnm2iREOsKvA0mEc5W877Gig4B3y73Z0fzSrHMefjJnz09Omri9EiArlHD++9/S273m2UeYw59YPk7KswZKRlaBrgY43rHeE8d0nTsLzXyiB9cL1GWe+d0/kiMOkwDF++eqacHWdNfrcdxyRLN1MjjORzQDMHFEIYh21K1jbtbuwXeHK3Pv/Jx79YuX6xWG27YTWvRcf1/tkQBzZ64517m323qpvPv3y16batttu+AxVfe1+5sxk9v9jtRg3z8Orl9WJZnb1xlLp4fXnDgXixB3GLVTh/934c8uUXl5efv6qb6vLi9tHJsSRMowDyzeV6YEWLbXA3++vjk5P5crW+vDheuQfL8Onjy5///Nn9o6Ma9bie7YZ0943T8WJr1bGk6qMPv3ShkbRfNnXd+uNV/eDsfnf50vy9P/9J/+o5rjt8ktd356tqEMC0bOz+m+/+4J//5OLm87t3Hz08Pv3Z55e7i9tPd55gdhxy5a+v483RMX7nV9//6Ee/OF7pd75277MfbYe9SNBvfGv58I35/un25c3lX/mdX/3BH/yMbHm7i8um+tWvvfH9f+VX/4v/9P918+zS8eyzL56vc+eX57NZMxqMFxs3DG0TGNNsAWB0/62zP/2DawDIogJKZkysgCD5NQ8IS24LIZsZUQHEAB56wsAMCm/YSmQJioJDAICg+aDyW6kAEz0sFKpgqjZh5xXNZHIsFsqKoQKiqhGVYoSiFpWuLsiGxYZYbm+cJCAlQgEAtK/cTIavCTvTrmLibBPYV7bEwvO1EuAFYyKbSgNs6kw3K/3wRFgSOzJF34qwojo9f8pfiVDkoRLkASYszZ8KSMA0OhdWx+bDkqsqMBicnM8MXAN0DtCyvbVseaOrx5ew29J6XK2fzzfbI0fjMJqAc01Gnbc1+krAMAAHFgMVkDF+MK9Thr1VvzRbzNMQ4ya2DbPPSnU7295uj8PC3Xvvd3/tL//JT//RxfUrliQqWDpCEAyMucC2VXJJgE2MKpniewakZorECAdRCMrhefoZoNGUPrcpwKegVsBMhFZAVZN2jwf0+FS8WawRYIU6raaCUF7ARNwzm3JjVnrPpywaqQGCTuyc4lSbrGcFXFTI20ZQSEkls/Y6swxESGCk5BwjAhN7R2DZeYIsqlYzSTZomxgBcyoeO9WsoCrRIjh2xAwEhrlQFwERRNWAAhsaQDYBLDz0hGxAIVgGLGOvlNWSMRIyajYQhKySgYqogWqaLFbOkxkiMXkyJ4mDiVlMQ2eoQxRgKr47RVLI2QQ1o6mhmWbRXBRUJCU2IkMG9o6AEdCBAWhgKlSmrJkNzbJyAscKWbM6MgRFSWxSiupyzkwolj1BU3GLGIAJAzGRYQhOEqxmLueOLcVKP7nO/abHChUoan9zccN3wsnR2b2zt568+Nx7M+LtIPRs/f633/vWe28+X+8alYurj8f94Jytn1xTzP7ObNxvxn2PoQqzPud9aq+zX43VsmmP2JwJnJ+/2S6btpn/+Gc/zSmbKoKBSpmeCVhTz8/vPDo9vtv6miHmpHEcN+vNttsNY9cPMY8RjcCQnBczhWyUDROwFdiFmlXBsQMP1KhDLX5qyEZGGEerfTi9+8DXGfO48mncrzc3l9fbTd8NxGxWSgLFqTCjGiRLhgiWDVKKG2hmo/QztjCv5qczSZ0O2dDFmDwFdphELKtIQoYUUxmz+cqRc1CxNaF2fuGJDSp/5BQQB9IU2AXnHDAjI8aDBRLaeY2ZBXPOWZ29fPHlnzX1196+e/VMbl9tqxXfO16u3jndv+pDRZDjrk/PHu81hWDCzhaN3+97M911a8dzAR0lpiwpmiOKWdk7lSQxr7vxol6/cVatd6/uv7Ggpd/cQtxZs08BsZKE4lScRN2PaTf2x02F5GfeNTPXZzsJeLtPY5+Bibwndm4WEuajNvhh8ICsWBsFa0GSpMjOZ0lJgRVnWY+N9onatrm4vhGcKVhK5kBN8sJBG+ytu+H2JtdeUh530a73uAruwy/WbHTvaIUofRz8DEeMrvGDCgvtN+N2n1Q0A223o5+HrF77eOL9N+4/+vTm8nlyoapWLq+cE0ZgCg4k5cRuPcTueZwFP/eVquv7FIjeuH8SlYZ1V9Hs9vZ6uN2u98Nd554CurZZLWerunbp9vQe984iDtr6q35s0SV2H12+mq9OoOWG2ncePcSr55LjuN7kvmNXG8fLq5vWtzWyaI7Jf/rs5t68evvhqWevA2iw9qgeFHVXveoGrFydh8Wi7m6jU1t6vnq1BY3BhatuR0iu4uVi5tTWuy2Bgwi1iQ/qquq6G/ZRxiQVYoMcB9vHLlTVneN57nczXw3j0I0Zq9p512fcZb2J4zsPVyce+zG5mQ9MOcltl4JhGjOFahcTEzhH6EizGoF5QoTL3bbfdpuj2YOj5Y//5OOzs4vfeP+NB4vv/eCTTz5+8uoIQ79JtM83sEldUuXK+9ubbqP5zt3FmPvFUT2dC6bzASoiNA34mtuWENTXagbdvkOi/SbHwUxQDJmRWYEMScyw36gPbhfVBW2XPknBeIhjAAEzyAYRgNHqGtVG7xAD5yhZCBwjQ8xaeyIEMXNEoDYMg8UYmBy4KlBWyYAxWsrslTQqENY1qxIBk6cElgzA+13SaIbohmxIeQA0teC4JieqfbIEJkAVk1oWVSJHzCKgwjlq1uyJqZS7AmTRIRsJEIjLxc5ogJTB0CGCWEqixuDa4HpRJBKP5FgZCNQLuAFAwEOtQKDZQQ6mQSyoabYWyTmoyOoqNL4mZFainFIyJiZTQCl9KCmLFhFhmuBhSgJEZXxhCKRagu6EiABc4ALIBhnUpNSBIoKZD57J1wwEaCYgamiiKCpMyIxiwkRm6rwHc42f8+KkWuC12JC29x4sVyd3nV91m01T2z5uL1++lMJ2ykieJQJWFFMW04BJ98mlYLkS76oZI2TnzVQ7SlA5U+stiioHImTKSoTBOUtWVRWQ3w1jMFlUblTthwSEi6oKrI6w9T6aoqMRFLIiUiBynthVTE5zJARWERFmx6RZxbMv22AH4JD6lDPlEemr7M2kEOEBKfr6d6nwAb4CyQCYTSPE4vEpc0SE10W0dIDewISMASQEJnBoYEXgO9iNzA6onGkSSAAHIgEWOUgEkpkYycEJMn0dNBRCQLVSaOtl7NQpOQfIZdwIKs4TgQUAc8SlkliZs2DFzgBFWoNZ1m998OBbv/7rjs8uPvppuni8+cHfC/NTGfhuu9y2i3TTzar+bF7x+sX245/FZgWpb/rb3W6DMXbr3WLWVnOfXz0f+hg5NGGmmdLYzwmTGAI68C6zPN3NPYXBvN8NnYAf5qGON/t4cxuAhnZxsc+1rzFUx2++2Tz+iOMwDtkzZBlE3L6nqopHIeaXH/7oP/6/vPHr/+3Zw9M6DsPV5+3ywf/w3/53/h//54+6hV3vtudn5+3pd2enx3/4+/84pxtnbKOMY28NEVMtqKaBORsOgGQFB6tmyAhyaKODCTxiU5QQoBiIJhypKQIygSoWF79HpJIvKEc8mGQ/LHwKwdFwh7jwULMhWTfmDjjbNIGkQ+pw8gSU80OxA9hrT8PkOJpeE6EpEKIUUn3xx9EE2CrqJr2mniHaNHZ+rXwe/EsAAPB3/vEfbS/S1V7FGAG8CTtSBRdoHLJzDERZNLiJeZVL5wjhKAbOjhczVNsNsR/icj5rmyZK3o9Ux4Dj0O3GBBkcd2M2w6+9cx+jXV+Pl+tx8a15ikmYkMB5l6KCgZrFmNG070bX+MBYjxK78fHterzOdeWjKYBtN7vRoPJNoDb4ehCfE8Vu7DIlQe9DHu2sct9+841f/7X39svGL+sf/94fXkJuGTb74WafP/zweeyygm98SDn/w3/2x3+1+cDC0C65mXnIPTo0ykw8f3AkLHmQzYvtdj9U8+a0uXP56UUzn61vt5AHrvnN9x9sunG/HbWab+P+9LRJ/bjpEavq9Nhtv9yub3Q0cWKWIlf1J59eESIF92K9P7rbnlbLxz//xYPzNiZNWdqzozj2Z8ez3TYj56o+HQZKvXUpzhwv7pxVrqbGV1ibwtXNbtb6CPh8vXvj7AgJtvs8fLlpPe23Vw/fvNfjeHW7v3fnroz56SefS7cLde3r6q3jo4qcn7k79+YXLy9Gk8qF3/z6d/74h3/iqtUuDdtXw51VCGQ//vFHzy+7tvZnd+YPlov91b7r5E518sZ7d273KRGJ+OWqOb/39qc//9mqDrO62Ty7uHr18p1vvPn408/ni3kEihBGbB+9f//3/tnfrzy+6H9uIf3yr3798dP9bsztzO+ubymsRqcPvvHoxac/DHN2gB/++VUeZ2By9fx2UTc5Kt0L3/xLRz/+40/jljTU/+APfrh+sf/ga/d946vktjev/vP/8D9qER69+8blq2u62X7t/Tfq5vxoef7TF59+/Zt3Nx8/7we7vtrhzFUniz5Ox4MkmRHL48AKfQyL2DM9IwyQyEmBXCOJiOIURZ7CYfia1lOcHUWxMdECUjM1ZTRRnSqoQAueSIFAVQ/+Hy3lX6I6DR64mD8cFhcLIJjo5Gqhom9LwSCXqdaEy1MQRCoC0SGDW1BIakCgYDINOgxAVWkSxNHMJtUJ0BECoYiiKR8ibmIkMhVxAgFj8beAmQkewDeE5fccY+lmBTBiYFAxUEf72v32X/7XTsfePf9yyeO4Ebkcz4/P337nm+n6uXbrqjOzhvY7Xt/McgXrfXObnMe5Co4jhUrG0SQjETHHMRGREaFjVAN0mXhRNcY+P3spJ3N376609Wi1+vqorQXgZm3fa++9PH7/eY+S10qaLRFBZQYCqGSaQiA0SEqT9dOAyYkK4kGfh4KHkykGbiBgVngp5Y/L0M4mo+8k2SgaIBC+fuQXhbG4hoqDy4DLV/wLs4tS1Ck2YfUQkAgJi+f1K7phoSFN8eZyQQEBTAppaco+l7ebqhq+ZhU5h54JiZjRB3IVOc9E7IhKDi8j5CgSkQYFL3FMkszENGczsFJwloUcIEvx8xsDFtIeTtMVADMCVTRjEIGUmTmBkgPnoNRvE6ArqTkgAV9SReRQQQEtk3ojyNmRR+JsJupc8JigJJ8EpVDJs5oaikQih4gImnMWMXaOA9fehxDIe53ABQpqZIQqbpKKFQAJaPJ6pR4xe2IyKfRuMpUiyqkhQe0doVRErEZEhWbchFYE2Knl8bj2ebs/rvw80G5W3Yx7QGi52g9d03ftkdVNM5/N+2FrAvN2/ubD+w/Pz3/tL32/na2ePb/42ePPPvn0R18+/aFv9qkbQpdmDrbXO2lkl4aYbsfFom9WC7zvdZjXR3Nfs7rKP/zgnQdHx7/86S/++bPLzxgVUSUJhfpodXw8v7da3a2gxmzJ0vZ2vdvutttdN/Q5DuQ8KhN4xdLQB1S7rGLkgc0sewaPUrEhMKtHI1BipJiFOThy0VXN0fnq7tI3I2K/377a3FxePL+62vSm6hi8c76ugCFHwYpKqSQ4lwEZVPKexmgWkZEdtDOOM9zEkYAM2QSLaw9BQUVFVJWQ2VvTsAs+e3ZtU9fNkoAFFMaYK7UEkOfMlfNkaKqODl2QAGHejn3e7waFNAuhAn65fXUnH/mTatVk1zDXkLOGJmRJjvnmtt/vgMg5QI86jlmSWpaKvaHVre+yNE2lCXfbWFVVTgkQfEWzumKPx8dhvZObjVz0A/jZ8f3q5rOOLJPRLo2zmu8crcauOwohBL7a9U+zVbNaJR6rzCnINnGo33hwp1m5taaX/dgGnIeQu8FT2ZmKSAJTVrWkMWk0rCgc1fhiP2iKIrAbxvnctRXEIfZRomgN4cnTIbi623Xow4ASIwzUmtrVJn2yvl7U6ZvvLhU6INlnjWQAGuZVNfL25eCAvQ+OnIk2qN+4c/Ty6vazq91sPiM1SwnJNSGQgzY04tKQYxrSarkcc9xvO2AOVThe1gZ5XpGYXVy9kmFwOUOO33nj5M1+afPZZtdFkgdfe+P4vebHf/TTX/2Nt//op1/Oj5bD7TjE+Ozm+Z2b1Zvndx21y6Nx1m1iJJ6l2lMV6jhGonNJqQleIQ5qBnCVuvxS7p+dvnf3eLRUkTaLKqfV590gKSeuobKr3fqEW8t6O8TV3N0/OU5P9lXluWZNeTAkDIz+th82OUeG47qqGu66zgxsTI3X4MzNZpfdeL0dTJEDulBb6nNMg1gWEKMxyXZ9+8Fbdz96fnM7jusssyYAY4p5vmgv9zt26KrACGww6JiiVrUjyYZuR/nq5e3zi93b9+487E2V3v3gzoNf/+1/79k/6febmFMIbrcfdrthsZi7NqwC7brtxWYfE1TL6l+SimbL4ACWx0Esez+t8X0SydBH6EbsB0dmTQBgULB9H0PtJBGZs+jHpKYQWwQGE+k1Nq6pgccIKY8xa+3RCETNE/qJCYJDlkCOA4ojUEnZBMwAo7jBfFC4kdQAVwiiW0NcLpcug7C3LMMYg2gIVDm/HYaqqhcEK7Cb/RDJGIEQsjook7vgkyYhB2ajaE42ZAh1EDPINgNySADek6EIAzBTUswGfdSKPAoAOFQDUHaudhZBGYmBRCKzB9DWkRIlM3JeTFPOZMzIw5BMqQIgz6iqorsxLtTPHLECgwUf2NdcNY68k4jskDMkFTUBMAABRMdJgA2DCRGJQRmCKBEAeMCABAYOkAnBlJmBCZAAXYQsohlAgBAsZ3UkDtERVsxmOopEZDFAA1ZjgJQTOWKENGbcx7CCMG/x/M5gp8s7J1w1aTuufNqtL28vX25vNgoQo3r0ecT9XmarYD6mFPMwLFPT8Ol+yP2YR5azGbkm5IyjKFJWBaegQBmSAs0dY7Jhr1XwMbMKzAzH/QBLB6CVJwUgRud9EswiRua9qxlZLQJV5Dx6ywBmzntG9YajqpgMMO4xeW4QMeU0X6xywobMEQgi02tRqOyc8DV1aBqvTboR6kSiNDgQXor3Zxorvz7YvwYcoTEjFn8yAoEVFFE5opejuioA4oQXOGQaCv6z7OJK35SUGS9YOfab2pRbK4hTQ/LORiFyv/Kbf9W19Z/++I916JzEeVX7ur7d9qAw9mMI3jwNZnXljiu2rKq4cLQwOKr41Dndb6qz5Te//90vP/75xc8/rd3jYb2eU1oeNVxxGLqz00op6cVzf8ybJ09eXXzZhKrb5u1lf3Sfddjo1X7xcD+7c/z8yad3VvOHv/PbL370k347oMeuG+cGDftZypjGYRy2u22D4HVdG/TPfnD9ySez73539fBdd3w/79fNncVw/+7+6jreOfmlv/HfAj/+9MefXHzy5c31E+s2Xu3J3/svb58+//a/8W/NV8sHM7fun+bjN/7X/9f/29/7B3/n7//t/xCdhSq+9+6DGH/pJ3/0z1Cwak7/J/+j//Hf/f0//OJnf3BUIUUn6Exy7VyvCqWuEUmzmoohEKKo/gV5pswmCaBA8F9DrHCyhFk5hgESHiTEcqACAyNEUkgK24SItvBYEaA3sTyqy1bOl1YiZZNj57UoCHDYVgMAZFMyNCAGQwMxEyw5soNuqSWzfHgR5QwDgGVkByVZCQjGhgJfKaMXz6+dhqNZHSUCWRzBmPT/R9WfBWuWped52Destfbe/3jGzDw5VdZc1dXVM7rR3RgJYhIIc7AgUTJFSSQVIsUwTDoclkO+UFgeeCFd+MKWQhEOhiMcJh2yTVkkBU4AATaBbgAN9DxVV1XOefLM5x/3sNb6vs8X+z8JoC+6sroz85zMf+81vN/7Pm+ClI28QwJE8OTMQNQck/fkC0bCRZM8a52WknNwvLU9yTnPYr01HlHGuq0FElfu3v7+o4tFt2rWF4trvmhIba88a8477/y4qtsVB84pdbFjXxGz5AQGZRUWXTtfrsBMUBdt1xpVDpu6TV0kck0nOCwS5hs7o2JQXpwt4zIhOUXsUh6P/UffvvHKwXUxOX12ZmfVUKtFhlXTnc66o0YWC3XoVBRy54jPj9p/8P/52mc/d+ul18oCaTIKnWYx9SDsiqaWDGm8V7RTHxfcLtY+FIUrdkZ4dnFpWqzO20WTm1qi2HRnkhUu61UXidg571bCdQsJiUGGo/HZrGMi9OQYPeQ3X5uEdnX+XG7t737w5PmsPimnw1WTsw1DZVtcHdx84+z5xer0rC1tMB6Ewi1W577MXp0DrLWuY/QD92/8uY/VR/Hk8dnRedMtbfcGVzf2rh/cWTQWoNwebTWz2bX90fsfPPhTP/nF733nUemHkqyL8cLmDy/P56nZH/GXH3wzD/PWMAyHpAJuNPjut9+7OFuGsqyGDjit84KrwZ4fL3Narc6HlXvppYOHHy5DUT4/Pbv32jtDpmeXT4ti6Lw/Ov6BC/H4bBFbd/u1j23deulL3/2uG9xYzs47Uyw8XMjR+crtOqryn/3ZL/6rL3+LCzk8ezyeFgcH5elp+oMfNCvhySTsUHzrc9cufu9xRv/s2WzrYM/f3vv9b36wPd5reGlDN4G5Lhyu474PixZOL4x8+drNvU+/8uk/fO/Zt7/7vTXaiYNRVSK0k53R+O7d40cPLs5W/VsQc2ZkMGCiK11GCTGp4B+FrhjNGAGBjAzVjMis7zvrvbw9O8gIjeyqpkpNwVQ1g+aNv6S/6avAVfysL7Lo/9ET6LBfEEjVEA2JslivPDCCAfZxMDPhPypE143I0A8M+gEHQP8lrtjXaEgqAAr9F9YroD5AjzsG6GUHAjOUPpdx5aG0DdfHxDTDZl6lJggAfdKOYMMD3vgZKZoxkJERoakBUVEFo+Yn3/rIO91qr4u8nk/iGvNwcb6+OdkZ2St67xMtSFXy7PSYzx7aqgmr4wIL6LIuWkJFwtSsEZCBDFFN2Tz3n0FnfWsksWpXIxEua5iv9Ol5kqYsJui8orVJtv1kMBrsvfLu9d3xh+f3Dxcz446HfqsYFLy9Ve05c48efm9+dIgKYJBNzYzJuO/nQgSgXivszTMvUr1X6B8DNbriGF6FAsH0SqWxXvG/ivWB9nqKbQrmzMBMRa/GFldTpRfoIeyfn/4DBOSruRWqKsKV64yupg39tgQGpgLc7wD9H+SFyTR49o6QgB0MBs4xec+OCdkRkiFn05QsRtUgqU2IQJgkAyqioRgBMgERKKOAqVgGQnR9aA4QALgvZlFBRWBFuvpj9SRARVVEU7Ccjbj/39FMNlCw3saiokRlUaoiAnuHCllp4C1ap7lTNVUxRkoGXcqFM9SkmNEALBMqEIaCGLlgdM67jYG49URkAIxdymTJgTCIhw3P0FQsmfWmMLCcIxBJ/4prD0ZC71wRPKsjQVALBMPCey5niwUjYOH29rbYpWsrf9HUICpZWlmyL84uTo14WDnUaDFJi9fGuz//cz/5yrufubE9ZYfvfvalLzYff/rs8//qt3/rwQ+++vjxHyqsFJbdetFVnYyDATd5tWiWA+22B9N1e5ZH2yO/F2DowF/fvTv99Ojaxe0P738jZUVng8F0d+9gHLYCF5a1Sd1qNV/OFvP5vG1bEQNVQiEEz/2a1KdJzBH1Hjpkx5wJ1TtkY1BH4AUsZlJzkBmUinJcjodQxES1tsu2qZ8eXZydLVMOOSKRliVKBnSEDlJWCv0FTQAVsWtXR+DHnQoAFwGIQzHggVBqs3FMYCBEwKgCmlXEDJUkBKJCuTRX+VBVgaqCEHLMkIF8jFg5dmgloCc2b+bE8RWepWDSzEoiIVQMOS8WF4fPDye70zQ0HTOVzpNLIoGCQjfYHbSP5w7dK7duFL59ejED06DB1Bs5bddjj5MdJgHYG6BCFp+yKOF0OqxnNcUcO21jHxDN6HGRrQCYz5vLVeRZ52g2ZBoVJaC2Bo3Ysmm8sxvT4QSZhghIwVK9SDAuFIu667LHYeHNqBOxZNFMmJwPY0aLAgJlcFsehxkFrCrCuHCp64oxAUNLpI5rdd7Ki1k0o+3rg0EZaVBbXlZhe3/HL6Xmyo6hRV1UPixjFwkCcxd0PYi0jfPLbntYOtaps4/fPcjRP7i8hISsUUg6gZUZaLuzN+jQWHNVFpr0PMcm68Bh18Y2CVfcWY7L5dbevh+MVllzzGRcp5zUXhqOB6bnEk8ul+995WmKo5P3zklcqDwXMi3C0Aa7Yc+EF+sFOV/yaNWtiZ0fUCdSjKu2bpIkM2xNFzlrwcu1ni3rw3XuwL12b2+9aDy46agKJZ8v69MTHRXD8cAPCR4fr2adZeeu82R3MOxy3uZyCdokG42GbuyO2rU6FqR5Sha7gLhdOWabDqumbcqCQPxpJzMx0DwqHRShRKqYLhcdEBDxZZvuAJwvV21iT7TKHSNMSpelHXoEdjHlGBP1i47lAqkoeVJVR4s1FDiv47fuP3t2vnh6Pj85W3zsE2/9L/7Sr/yPX/qd+4/vu6ZGc9WwWKfGm7myPL+svQugtLqiN25ehrLEQekc5+DNUJu2i4qimDurlxpbzBk8gipobzxkzGoCQoykRAZkllspSiYFT4HUgYpHc448aSg8GpCQd8xKqVMy3CurLOod+mAAmAWSWYwqSdm72qRDqiUOxAI5IkvQucINHMUGrUVrsBXpcs7GbSsGIigBsSqL2CbMxgaollPTkZp3RmoqolkSG9IyxeCcZ3RGaIogJWPf4uGZRMTUAmHKqWBGNjQN3hGCZCUwZ6BZCyS1bJKIXe/TsdSi2tAxEUs2h6QkLhACq6LGiEqqoGpkWFW+HJa+HIRq4qjg1FhbYxcNs6gyuRxFVAxU0DGiKshVYETVBJWZN3ZmBXRgooiQsyIgM+rmRguoluEK9NYln8k5AkRUQ2QDMCJETCJAhEAqoE0Gl63prK4NaTIaVh67ZDG1eVHH1fz+/fcv49JUfSDtoGAWE8uaY5aUPdDY7eZlTpbFZNU0kWirLBnVmLqckAtw6oBQrM4ZgSsXCKkNCJ4TqEJ2iMWwRFAUqYiSqqZErkAA8qjskogZembL6lQDqQC4wF3uEmzKSQzEEAryjpmIPbGpOGTR3LVWDXp6VX+Ug14V2LSMAZht+tF7d4j9CckArkbGm6BPb+UAeBEjMuaNlIDW16XBpgMHzAz6UhS5ihJeHc/watJHusE+GiD1IAHs8QF6lWiDHl2kjsDn/Gd/8Revv/7Zj33s7VZz/vvuh9/8wwM//N/+x3/7WWx/8/1HWzemhaS4mH/lh9/tUger5VbT7oNR2+yMw9hXSXUA5Xj3lt/ei4lvvvPpCixezlXWzeVRvLgAnZUAkIzZfBMbORsyZsKcu3VTO7R6tkjrrk0dxc4tLgrJ7MohxTv728WN4XGzOn3yvHDaXjQui0eRQM4F7MQZoNm2ufWzc5x/ebb6lzTxHPDSFTEW8dbHph//sfuTcVDb+6lPfeHn4egPfmv2rV8vlwu9lJff+PHR/uvq03S6P8idKpyuujc+/dO6Xn3zN/5R0ifFGy9dKzPNLn7sF37pM3/6l++8/sk3fvQXTj782n/5d/6zSQjj0bhetXW3UjMMzrKJqW1KsuUqBgBXDcdXj0Qf74MePUFyBb3ehNKuZL6NnKjWG/vVAEB7NOg6IgIMPVSsUMAiqgrlXmxU6ctWNr02fyRkXgUbek1KzUwEgBG1HzdvriD9vbB/xgA3l0yzKwWU+pkb0OZXbGSizRf69Ltve/QS10dn5xeL9TJZNigHPKmGF+t112UTDIGzGTF6zwRgYo7RBUCwlGPbRYel5EhmiG7Vdd06itpoUGSF2OZd51MFB3cHyJS7mHL30p1BssumBQLpYhbJ7EISrXOsY8cAgLqo6/PLdQ0Qs2R0WfT8rIW2s5yrgkaeQZKZtat5uzzXpmZDNPbMldOX7w0/+onreamnlxdtndu4XJ2c3b21lWJ7eLSQRkIgQrAMkDWJWo2Fhe99azY7cfs34CMfewVcElkqpOCKIYyE2Bdw+8ZBvWyX8zjj+PzRRdc0A0TubL6sGwENcOPaqF0tE9qwCMMCctbzkzPLMBr4uskguWQ/HBZtE0V0UnGFUj+fXXb5xt0bj56eZZGKqF12ROVykdjBdDR6dvRIVjmnHFWGk2EY+Obikt341XsfffzhN8LIg3O5nDw9XN8b7bz9E2/+vb/3a9tbYVQi4qhZJaf+5u6WJr14dlpT+uynvvjs4dF6ubpIy529ndsv3/7Bhx/OIFX705zTuChmyzW4eRaL0f/h9x7Vy4YQDvZGgYzanNRQskNYLbpRIEt4+OgyQAmRpbW2zCvpDMfXbrxxiQ+oaFxws7YdDSeDURlGxRsfezm4lx5+7avHlxfIGhf41u23zk4eJs5f/trXDYSBnY0uLySuYT4TO6fRnvvCj755/+tffXZ8v82L+Gw9L4pqGB4/PwlzLbG6M6iefu9BWQaL1fkslYGBisUq5jybbO39yy9/7fBynbPfublza6dYLI5ee/XWw+Oj84dPuNFuvZkkZ33RuaRMjCBmQH08B3r1p4fJYR8mAzMk2my4YNr3bhNZv+qrmSoZmZn2OBAV6beNK49q32qJ0JOGtA+cgQEoqhpQ/zNfhJ03d31E7Gm7AGQGgLzp7ALsoUCwEQwMUDa+KOuVBTPtFSIAA9rYUq4Ix5stbOM/AlHqRxv9KATxSnrQXonQK5abgW2gy0zQa9+9AN43lRMAIzmXsu3dvbW3de3y8FHAbkfrP721fW1dt/cfj1Lj1itAKlxRZqeLszXIahji5G7e2vO3bhQk/msX1NUggGLkSMSAnahuDFRAvf/DyPUwaSTuJTbTjORhDUBtAYrNMis4yYEU/DLPZFpPh2WQN3+0Kfe6uBzf3d07eB3sWjGYZExf/85v/jf/7f/BmTcCUgR2pgKmL5ZrQ9f/hfWnhr5WggGAWDfOUFMw7im7/bcKfUiNrhbu3pKygRJCn1Tszwyae/kEFHrFR63P9PWCpm5spBtrsmzGWH3H6WZgAbB5Vq+w2b3tGa663uAKv3d1wHAM7DkEVwQO3heF88GDMZJTNUPMwRrOSrkjjmqA6FQBEbNFRUJAVUfgHSkpIiv1EWtzL85CjKYAUXurDhmSKFhkdJgjMBmTApiimgJkIFQTYAVQInF9NweYEqASAjIxIzO7UATVJmdjQL4y+0rOkiMjmQlRwewRCbIRkUNkVQfmiB2gI0CQJBnZg6ozKM07KiF2jhwygMY+JScqWXJ/chBQVWViyFqxG7pgMRtAVQxDBkDrUt0lAV+Uo9Fqueog7e0PfHCtdM1RI8SSxVdALjYyzyuoLxbjYvDO5z77s7/wCz/zc5+b7m+DSe5EEMbo3nn9zu3rf+750y/8o3/6Dx99+Jv3H341i8RhxEUqth0OuA25q1udXBbVsImzvS3aC1x68uyGYTyuXgrd5QcP7ntfTbb2q2rCyCl30qXVbH55MV/PF6t107bRsXeACsl5yAzOsQsOQBlNlQVEFZkIFAxVoyE4px6ymgppduiyIpsbODepULpTgdQ164vjy4vTVb3Mas6yhgBt3WpW9oisYeAimKJA3xUZSOsERZbgulYHxEwD5HWozExMExgZZARQzUjmPAGLC+wLDCU5D67AgtEBMlBGU5N+fQ7BFcAs/fJIZMRXUtFk2xU3Ru8/OvaAS+1yFBFe5+7Wtb2FNFEahuA9W0Hr1SU4Na/k5ODGZGdYCkM2AiTnCdWSZgNRg+ZiSUiT6SClZjQYFkWVsqJG71xOeH003Z4wWbFs2vW8U6EmiwTK/cSiM3U0bxYMFqqCmAZMI8f7RcV13p4MG9UWsoUgolmbRqNSmVIWACUumcDcDGMLsSQmzQ4xp7SOerleDUp3sDNWTw+Pas4OXYhOs8HssvUpIbns+fDByd394s6NUFD1re/H3alzeTmtwmCYBKHVBr1Dg2hK1Nx6p4yniS/51tb47HCxtzX2FX9wfJbBBYegqQjexKFoGXzX1qtUV1SMPEdwHbij5rJwUFSDlcHB/o2PvvXW7HTBDs+PHj08P1u6NDBfsJ/P2maVXp2mX/jkdb5z8K9PFu99/3x5WbMLq9PGLud7w+HnPvX2zrWbp6s5oDm06Xqyiu3F5YnEZB5qadZdy4hnZ7MmJWUoqBgXtEqSID07O94eut1BAQilrybD6mxZR9PLVd1qvFg1jdOmMY703UfvB5My+PkiZoLhaNhkmc0vJaXhqIAobMCI7boLAw/OP5l3pshdGgVfUV7HDpzrgYPBOVUtStd2SZOsBe8/PWegQFaAFszjwu2Ni6ZNREWDdN50htWqiR5tWLpJWXZZuyzB8brukgExX7Qrt4Dl2azt2nc+nv/iz/3s4cnT3/jnv/HkfLVuVdnQxBNc35rEzKt6vbtdvf/HpaLBCBkEyZqu9ayOSTQr+q4F7AKKBTN3hRdqs5Y+VM5SSFAkSCDAhWMmJdVgjhI6h4qUJA4K7xmCRwbXb6koOuaAGZSIkHLsUAEZOrRGgCWriXUZ2E0GU6JQKATklNuubpOnsQuDcaGM7BlV5ouaiLucWrNkeVS6nNGhE1WJaeKDqSUTZ6hdpymSMBp2qlZYUXJg9mI5QfDIJskyIUTNCmgEYOQDF94LyngQSM0UCmaLOUpCxDIUINlB6G0mpBglsXNZ+tmjiuXAFBgpqzE0JozIvbGbsCiYvZXjkqsBoMfE5AIWnbYtiFruGIwNFVCtr9XtW6v67d360BEAEHEPF+jNL4CUVQWyioj1hWiUkhGSEXRmphZjRkTJpmqBsecgAKKKMnMS6IG1zfmsurEXChDLqbHBMNRNU1+cnJ4cXyznkdVVoQCX6wYBHGFMGRIWjF2kaNxGbSUBAjB5ELaUujYMhpUZYW5Tp+CNQys6CL6qgAuFCJJjGzUwloGL4CwmNZ1WFCMSuqpkNEhGypjAdRkIXaDszUy0l/9EuRFpNXlHJpollb4MTClL5Ys2ZQNlh977QMRXfgrVHlCpf4SChN6YbS8gkfbCPNTDazZdgki0YVRvRnx9EhA3BOXNNd02BTEGm09LQV+Ezq4s5yCbU1FfJgGbsi3YwC4QoO8j6dtwFTE4V1HxV/7Wr3761XtnXaRRXM+7z37xJ7fRvV7OyvJyd2fyZ+796HAyqQghpx/92Z+MzfL4q1+6+K0v3wp+WDghxjCia7uDj31i++2Pl+MhqnMMpOo0Zcur9RxW5/Fr3+APH/os69lKWqje3sW373Jza3l2UR2ewepy2aZBVe5e21sdn+XVRZ6OdTSu1+fQnBXR9rcPbt67U188uWxCkQNo60gIsGRiTMLQSjIKqbai2HbVsBGZvPrJycc/u9rbyYyV8yyYjTPBzkderR99OZ3NRuPRzTde41EwIk0ETWzX62ZcDfb2furP/8r6/W8+Pz4+mBTnJ/Hf/Pf+yuTaS2E0Olkehmp459VX//Jf+09/53f+RXd5iLYuHEoWBVMgA0IGE0UAkI39eVNRvfmsqWeNEAH2dx+0rL3VABTRAKW3nMFVjsQs9yP4/qkSiIoXRkltt8DAMHJmJp1yTw4h628ettExbTPD7y8bG5QE9ZYBVLB+iN/rVn8kOF6pli96iADAIQH2pFW7cgz8sVEzwE/8qc8u5u1owIvZ4tGHh5fr9fHFrGnWnoDW4L0bOV8GFFBkCA5N1MR8AbnNdaMJPSKvmxSbLjgHlNCRR4zZCgsFw5PDo2vT8c6wfH65OERI67wzKS+X6w/ef3Z6NJ+MqslOUXlzgRW5lSSeLhq5WDZNkxdZMWDJdnhRxwih8GbofdFGVVRiHVfE1M5m69hlz6GNqSzyKwc708nouw8udGVlCJ2mk+cLred7w+F63kojeRXBsSBIzoFoVBRtFFLXXuKz1tUz9F29d6vYOthDqr35uuksK4YqrSKlvDMyd53Xc6qG5brN2AB639ZJssTLmkEX9YwH1XBaxeWcshVFcTpvcturApAZV123tzu+fWsyNYDWnPD+ZDIrF0TFeHvShcHtl1+Ds+fH9z8sUBfNfDja0jS9mF3OT+eWQ+iSo/b5429liATu5vWDo5YOXr7bHM//4A++Jcly01oKPgwGftzW3cXho5hFYyxd+P4Hz6WtR35asWyVYXb4pD4+NwRyULCmun39YEc7O7tYz2vfNOZCGI88QNaYpQxhOFhdpDbHLq+iD7vDgSdezGbb2zvTqmraWFSjSTFYnZ2jytb+wdnp4XR7/9ru6PXXxs/vv2ewmnmYXZyWjgpPwOGDRx/sTry0ohTXKb4y3qlbmOzcfPTe4flJos5Kle/81te9aafrg2u79Uw8FatWcDK8dm3YXSbrcHu4H0o+SavhDu9MxtXO7ffPnr5773Pf++0/0NaXYZDGVTkgyW1s7cmT+XK1DgMqElXjDdZaRZnQ1PrOMwQDdpvuKqIrgBj0/5/2vhIwU+0LJ3sRVq+iQWaqCmKbNV2tX/hB0WjzGipS36RuCNabicBQBV6EStX64sX++zPbXPh7mXdDJwND6TWmK9urAaraZqsSJWIzJewlgx7IbWC44e/0saUXPsh+JQFD23hb+mFFv/r17tYr4brfFAWNe0ixbDY17NvYN5slARMd7N985+Nf/PjHPzOINnv0wePf+ycfrbb3n56OXB6iCxiES6yqwXQ326XODEdvTG/dWw8juAFhBYqcI3XZgJlYs/TrM1JQy32rPDBnUOiNTuy0h/WAsQtGzhRBDRTBkqEHYcvqfC5YuZ3dG06Z87qYd0IXZ2fDj9yWcl1eK+e52d57YzS9vlxc5mx9MEYRDdm479YmIlTVvtoekcj61hEAUATqh3yEvPko8Sps1g8YVHv1vg81G/bVpT3fyTbxaETqBaAN2Lo/E6DqBoq3KZe/Mh7BZmightTPDFT7SLrSValnf7zYJJqvvGb9b12UHLzznkPBwVHwXJaFY3ZUIBW9opiyMGZkzSxZOMvKSNARonlFIHCgwAKUkYBQjfqkpXjCgGhZjZERzfmes0R9Uas5SITEvQ4nmql/r/rsZk/DkoiWjRmQ1ERUHCBZz1TyMQsSe++yUI/klGQeyaOaimRhx2IIiM4HRAuOHHgUQjYkUxQBvYp1d4ZIAZxgicLOmPrunk6TiGjKKQmagyTJwNCIAzNT5b2JBYcmKGrRVLqMjYFkX44zdsF5GdDe9uhgN97cuX33e3B2uXp4ckmOEFM3X75897aE7XK895d/9a+/++4rvsIYo/Psy2BRqqHXLu1OR1vTLT/9t/9/f7+uZxcfPvh+vVRwahddNaFiUtooNuuaSz8cLptWoj++vvvSZHJQr6Pz5cGtV1N0KcNovGOguV3HLMuzRVs3zXpdr+rUJE1qbAIGxNpz1tEgAxEwACFiVucIRB0aGYOpGidRFEg5owKSECJCMl1rmz1kYl23dV230gEqYzYw05hdgTmlvuEnRUJAAXHeEzrCQBriHLRAHPhElpMABGJ0LqECIfcgTwJSUUTxHl3BxEYFUSiUmJCcMRE6AAOIkgtPKTbqhuACqDh2aGnT/giQ06quF9en5WmyVd0Zp8HEHy8vqssTHolorDLUOXUZ08BwgK7l0S5eH+NqcXKxXMlSg+fVqmanu9NBrdogn61WAxe8UVGUnjwoYsbRcLC1VZ0+q0HxwFWKztewOxzQtn96eimiSa0onUBSAiqcCCag2OZb5eDOeLg/GQnELHwR43ldD8tqWDmO3Vrik9ZeqirWiGQFEgF0Yq1qI1YCOqKUsyAI+1btydHChbBeIQh1TVPuE6OUikMOK0XtbOA8IcfUGec0KCjAR1/afvL0IRtc36tWqUtsa/RNFgWrLzoQ9sOy7uJoOFjU+fnl3E932tV6HRclMwIxQ7ak7EhxUo0KpsE8Fc34k1/4sVuvHnzlq//4vUcPSuSf/cy7P/nxX5r99gfbuYFX9uAvfHFxt/o//R//N6vnq1tb2yPUbcWf//xPXGY6On1y//iDSekvnL937Waxe62cbtej7cvjM19xKBwaTnenUPG6uYySLhfrtl4gkAfizlLslA2TKLOrWLKeLlaPn54U17Ymu3uLy1Uzbx2iL8J525zPZlNHjWVgbKRLSM503ejBeDQpwng6vOi6k5N5J4ytyHqNnqcO33jpxvnFaqnQlJxbbepuqnpjWIzH/v7JKimBQJMzomygpWTmXMpShdA1KXi6s12NEBCs9E5St1UWA8VTJIchAaCBERNCalNBOGJqHJUhpCSz2Lpi9Oj57Oj0d78o6SNvHvwnf/Pf+a/+m/9e7NihMAdBvzXAi7NViJ01m2vPRipSyESk0AOuc+6gXVnbQrcmzMaiDs0hMHBniELWYgpiDDRCSlB3Mq6wLACyWaci4IJ4xjIUngxEfSeOiYhSNhVConFZiKSYIoLv2mweASwwQmmiiOiSksU8DDwqC87WAq0lzWKS0mKMKbNkMNGMpimGwDllU1h3ogSlC46p8MCkMXUaJZoimAdmpKw69kiBhiyFxySSCJTRsrAHUxMxZO+ZWBAVUXTkXWlQuqJJKZswmakDggzGxIQEhEkUHBu4OiYEZU+OsHQVM0IWn0OrglZkzQqbZHPKuSgDegZPjrwjRue1ZWIkSVpvQICqltWIEAgdGGHfwIkiqtofiNQjZpU+cyIqwoQqYCY9ZwWRX7SzGGQzNBQAImKy3DfQoRpTKEJWRWJXlVHNIbSXsyQSBgNPCHlRn8+evP9w1jSRTKuAHlVNUTUrmoVQSMqx7gw4S8b+nGq59OAZgwBhkFo9sICRAgduzcphJbHrwIaF7k99UyutY0EBwIXAFMgaQ9ZR5bJIyl1wBYBTBEnikT1jZxyYchRU1c5SFCUQg+AYiJxDEYhECu5sFcfDQdc1zheu9Np29sIgvgGJQi/NbH4Em2vBi5xR/+MNhHgT8wcCYAKmjevkqtSsPzP2Z3uAK1Bef+i2F61aBgAgogooummihT4rhNDDP+nK3tKbxDaBISQij538R3/9r9+9+9KM4HR9PpoWQ8l/5vOvz3ZmD373n8wvvm3wigNbxiVNh0MfRtpIfWH1fNfRNYZhUZ03bRpQcfvazpsvm+cyOFQUUSNTgC5ZYo/EWBajaVUsG/M+5U4unrk7N3HrztbL7x5Mq7Mffmu2Wqkfs7Ph4Ifd+mL69sfz5MCatj56FOcziwiLR9TJcDgOxtalEnKO2TtTpEVO5Rsf55s373/w3rAsns0WL3/kte0f+fR8Or1IEkybuvauHJXOdB3TabNqoY5+UrWr50WbO+SRFU6a0RiF5suzdfA6nIadWh5/77cPdvcnNw+2Dq7X9Vlz8fTlt94Iu9uf/9mfuHb3zre+9M8++Pq/ng7LR88ulilZB+zKTiMTKRGSimzcRPhCeQEjMCMQMCISAEOynhtiRkDSH9ypN+FvPGKba9vVfN1MDXgN5AgmwQqnGVU66Yxe2Megl3n6WbEaE27CI3DlMOivdpux8dXT+cdMAZubwBWEC8DoCmLdfzNIyPTiuQYA0JwHwwI8Vtvj1z72OgEcHR4vZicXZ2dlKcOt8taNvfGQV3U3n8994KzQtgnBXV6s54u0jiQRNWbvHTm3WLUGMBmG0lOWBMk6g6N5czHrtODnF6tRNSiD0yocnc6f17C1zh/bnpCu27pdt6kjW7bNWSPzLoOZH1AZiFujmNm4rTvvSBizsZh1besAEHNZhJIYiQLDoKLJtKzX7WwtBMVWsLppHjx4cm0w+sbh82s7OwW521N3umik4FUn7BjBvBnGBEKffPWdo5Pj+z+sv3X/7Is/8crebiW4IgZyhN6irpl9kpQom6eclMDdunX3g8MPqLL2crE7KGvJyUyRM3qqBpRssU4cSqeSmtzV6e6d6Zv3xgc7I2MOpidn3dmicf6YzUIoT1dw6+2333jlne89eGCLujHb25oOr919/9F3HFsxZvERknqC8WSyWLhrO1uHp7WG6vF7RxcfHnbLSx7w9PpUNDw+ne+ic6rdel74Ymu3Gg5utOuLRJV3w70teP7s6aquEWhv4vdvHjz8/vfZuXkh54cX6zVl48l0vK4XoyrEul22eava0nDdlfPQdePxaLleXWacDsc7NNkaj8ud7e8/u1g1sAXh5b2D88tYue0b18eDck9nz44/vH/44HmGerh3fXd67bI+Xi+lztlXVZT65v646brx/ktf/cMfUDGajoumycPt0fJi3Ua9Nrq9s00XR8+5GLauPjptwn4J1/Tw8tTHQXNkN7bC9u2d0s6r4Kxr5heH4+AefHgUwsQxdRF2dko1mc+0xunzo6O33z5wOT/67uPptWv9W8D9ct+Hp6zvNTcD3SRPrhByvTrcNw+CKSIooVqPKoPe5oIAJlf19apoIKKqCoQGIGqbtPhmM+gpxwq971SBersQGBKibiYKeAUqtk07J1yB0cj+eGW7qr3IoCkAmqoAolxdt/vBByAicQ+3xk2xO15x9PtFCIlJe0MKbFD/utnAjLDPoJlD0gzIaAIUCEwYs6EAEROCs0lRvX77lZ/5/C/uT192LXcX8+vju2+9/bPu6Ls+NrpqLXZIaFkpa2pWo+2qO/qBr2daH+699Wq9WuOjE/nutzmzYaGAah1Rj2Q2zZH6kktAVem/STQE003bGDuRBAr9RqtAKJBz433A4MgTSOe59D6oGy6d52vXrl+7lofbzkMo1+PcXK9GP/WLv/wb//zv63njgDIIMRn3f8ObwrKeJc2EqtBj/hVARTZQRCQD7s8aV5KcbuqyepBQn2c0QN0okQbUd96Z9egje/ELrzxBV8qlXf2JewDRBo5kgJteg55b1APFtS/G63Nn/SalZn/E19rsJMzsmJ1zZVn5UBTBB+c9lUjBFAyQKYIyaI5KHVEkEiBDRFZPapA9oxGZC4ZqIADq+h4dAEBUdGKYDJm0D9mjMaJHY8usaCD5KmOHWUVMgUgdMahnAc2QDZHEVAVLKksOIgkQmJx3niCo5c4yMAiaYUrasTokZ8LGwXFBNPAEJrI5CRJ3AKV3iAZAbAam5h0R5yyMufTkMCtkYxSxJFnUACyl3DedMGBBWABoC1B5M8mNnjXJZWdN58AVbvD06cM7t65PpyiYupOZtHDz+stv/dK7r3/0HQiV2fx7X/ntet4Mp1MpJzfe/tj1lycsCw9DAWByue0chlx3nkksgelO5d56/dWRfuri+WFX12pZW1vHTmLk2HH2qdXYtetm2Y2GS1vtyGprcj2YhnL34598ByIfHz9ZzB81i2Wbc1vXy3m9mjXNOmkyMtCsxMQMTgDNWMwMFMxYkQyJ0HqCGQCRmYgoQWIlBADirAqgzmGradbmITlIsVm0IBh8IUxqTiSaAGQ0kJyMHZEiCWgSLsEFrymbogE4wGQ5gyMBSUjoXEAQJWBANBNiQmMDgYLJI5KpQ8G0ST+aZskG2UxB+9JiyuJzYjaOOWcL5DcnoqiqJpZb1HRtr9DkCP3sUmenZ8OiCEPMmLqkq5S4LCVjTNmV9OzyvBo6MkLJYhC4ANKctU4wk5QTLmJGkMm42NmZdDESEoRy1uWlpb1yhAkx6rViHDwd3Ju8cbBzOp+fzJvzmBrTmLT0NAg0BhyV1UFV7vjQLLvSlRQGTFqCc+zNdG9nK7bd4/N64NKtyoNaBitcGHSiYj06ZlK6CFaUwbU5WUyoLOrJt43d2BqNxm3TxVzBje1wNtdZ0kwaKhOW2/vXvnFSP226xeNZGXa8umfHLaNDR8OtYclt1MTqoiufHreLZGOmssQGuun2MOXVjb2t9SJaJ1lV2C5j64jHeXUP+OD58me+8Ivv/Nxfz6H7N19743/3d351UfP8tx8e3LtV/eDXm/e+WW1JQdshvfk//4k//3//r//brZUhz177yS88vgi18hdvf2Lhv7Tzzvi1P/M/e/Rs5Zr0+4/PHtZpfny5PbCXr90oRwP1BblwfTI9ms84157w9HRxsLvHBZl0RpYJU87UKhaeq0IqrrYmZ2eL/f2tQeHSoq1bw6FA8MZcFDodVJeraMQcgiU3GG0NURlEchSkMCyDN2rXA+dL4stGDzs7a1rnba8qWbkjOJzN37qz+/Le1pPTGYcgzlarWIVggoFoUgVIUgbeDcV1726HQJLqpMtFV7GzLu142t+enHTds1m7NlgnGXqejH1B7mLdnawjihaI864eDaqOnOb05d/95oOHT//Uz/7E3/qr/8E//H//g288+r4andcZUgeAMUpI8CekosHIAZJ0CURzhtVSV0vS5FAF1ASMgw4nLkfNHWNH0BiW4DxhSRhsUCJ5URXPAyNGo6wkWUYlOmICZvAeCMSYOKEgYauK7Lsutwm6TDEBWCxKFAZhU8vEPsauSd3QDZEZNHaGVhQLlVWMLKRAXTYfHDtf+KKkGtRSli4qIRV+wGSQIoC1gG3XDUtPhB4wx1iULgStPKfcA3+g1SwAFbuAfVqaUdkbFN4V4AgRJEUFFYxZmQgYsmqKQuQIMSdISWMSdeyMCudUrPTBoe9PmaHg1K69B8lZwBJkUykAxHlynhGKQBgYlSOrspIN0XmwmTQtZSBTQ0sI5iiLcC80MAkymSliBnNIGdGADLEVdGgEAkim/WEJkQGSaFJ2qD0ehwgQuCAiympKBC644JwPVvlBwclRu1hS19KkrGNzdn5xWXeLWS2Ic1MrbDCuHHkOgcCZZUUDYBeGTUxVcN5jbnLhA5dABOtZFnHELolixYOKI4CZGiVgWUZZd7CVpQDbGXlAqtvUGOYsFKAIkFI2siIEyahKYi6nWnJn3CFxVGTCQVWtlp0IrjIoOYvQ26hNkaOiEQJHgNHWUMTK8Rir4e4wwNeeAICa0camvcmY9eKQwR/zdwDgVQdw3yJ7dQ/faEM9lgF75anvmlGDF0KCgRLp1bV+cyQzUIOsKNb/5P6r9LkBA0TqW07AiLhvOoar1L4nN9Hutd3x/q2bl3F5j1+aPT+Cojiena3T4smj++35xdY9nr5R4nTfeR+7usrr2fvfhg/eu1aQdtG8v14MVqEorl1z4xGFQDlZFBLtuk5zxNjyxTM5euyfH8OibZu2KLlg6y4u9Wu/r0BwcOvMWTs73967tvPyO+Xi/MOz09XRucgPXvrMjTTeHd3cTbNLd7aE3UF3dIQZVquFWV2x63JqQ1W98lJ5+zbe/kh1485rP/HzzexInj28f/hs6+FzvldOtiaDIhgm4CK0x+tvfmX1jT/cnS2n14vausN/+n/b/sYIp9Prt++y+aXn7emE2w6K6e76wpoGnj2ZTnwpD+ePn05vvjSdbleYfZFbkN3rWx//3Kcmw1UBq+0by/vn9to7P/qxtz/75S//f3/9t/6JQ5fMFIEIWAF7WBUhMWqWPoShBkQASEaa+7yIqkMUM+rpooAGmA1EYVNxBIZ9cZqpZFwAeoKhpwBQeJVkGczsavzb8+hfhMtgU8O2uTMA4IYPqhsPwYuncfPPK3lr4zHa2A435FLDPzFEBgCAsuLlKs/qNBoVLqC2aWc63N+7c/PONpUynA4dBtS4qOudtJdVV+tWs8Y2F5PhddWT4+7saDVvdd0CeMlM3lGnJlm84LJJ7J0QlAV/5BOvXiwu5xfry8t5Wq8HASzQou2Ozi/3xhBTWrUtBDs5nc3EU+EtJRGpu7YKnkqXW8xmGiGRSFJkDOjaJnuHDjk4btsMhrGVDx+fO7aD/b1szenzi+WymQ4Gi1UOVnnm21uVMu5uD49ivHZzR7qGM6lP6zajg0fP3i+QC54Uo6nUVdjTgvJw7KxwDQGPpsvTebuClblMgBxS7j54/L1o7Xg8unf73rMnl+WghIJaoPlFI02OHRZU7F4brk/qec4v3bt59+7OZKQltK3llJpq32/vD9TFi3o9LsKnPvH29s29H37nq0dPHpYcGH2zatUdXb8+nK/mW7e2jy7Ow1ZVR1uedW01OI55d393PNr/2m9+Q87nSAgVNl3sok12xpe5xg4k19ulHR6fbo87Z7xcNAu9uDjT2WxVVUXMcHG0vMyPx9ujtm2frBqsqsoN6rXLSr5yhup8URWTTuzw4jB0izev3yJpz934UVs+a/iV/b1VVy+a07o9urXzmqGGsX97/+3zZauxaU5ncb56cPHeaK+6ub8FasVed2t/57vfW15edMPt0St37z742iNLVX2+Qj8px5OikMuL9arppMPK+9Mn67OnsHd95+EPT33Bd25vd+Hix378zjd/73vldOuDJSkMPjxcGjGCKXRprdGUiz121bJpujrJ2WKxaIKnLhR+uLU13Pvwh9+DqlhuyjCBCQn7IsI+a2aGhuSuuEJXZVWIYKAmV8qsgm3k4F6uwSuPjhropiFM+4bSDRy7X+bJANEQadNfSptyBVABAQNQQiRV2MDUdCMZbUSrF3Q96JepTfdBzy3u/az9jBwJkcjghcgMm5WCej+TYm+a3AwlYJOAoo3nSDd5sg0GTaHnKgOaEjvadG8ye2y7OjhUBCMsfAhO79y6/VM/9osv7X+kaLDIQBI9k2vWub1AR84PBHOso4YQyiGQh2jpyeORKXTH68Mj/O7Xh5p1kSphFKTCW1ZUB0SUIwiiKVKv2ZFq3lRKAqopIhMxIDBRlsTgHbuYFYjRkzkC1Ga5KkdDAF53mocD/uI73WjoRttd1u0iyJNHF08e+q0bb27d++Z03+qjrNRIH/Y3IEOzqyAw9Ss8A242ByDE3rtMfTqv14w2aoz1Bi9AAqQeb0S4+XQNETYre2/+MoONIwmxhxJd+ZOuxJ1NwWX/RdTUNhAlgN5NJmY93NzwahvouTqwCd2D2tWcgpmJ0Hn2jrznEJxj7zgwOe9KABABJgYT0baQ5Nmr8+zMTFAyUUekTJB7AQ8R0aNmFODN01UIIhI6c4aJTZUSK24qPnp8FyFgRhMBVTE1NXbZnBCaZaTMhBkNDBOJ9L2+/VYLAqpI6Dy32bKpVKE1YrGM7JEUENEpB8AKDQmBQEX7slDqHybYnLRYMypo4V1w3qRVQADSnASyOZ+6tk0iOXsP5MliG8J4/8ZNn4ej1XG1PJXWIvh5YyApMJZFvbdbepzzPO7defnmzc82y+mbP/7zfm9UUGyWKyD5sT/3bns+X80uy62yXTb26EHrizTammxtr5fn1tSDwQSlkWokACkmH+uRtqV2r9+70zx9eF7rZaNFUSwuxa+6wbojr36am1FMi9lF0y5is5VXe8XWzcm9yeC6GwxIXTO/wLxM69gsYlyl3Kp0qknUQLJ4x0XhwIAdAQA7B+wkGTswMzLKaqYEpOglgxSEKGJqipY1K4iYkrZgpXSJIOcMOYrzoRy4rjVpFQi6bAZkBk4pA7AJF8aFthLZGSOBaTBnbZLUqTNi7HIXHBMoobAn1T4ChaaIHtAjEDrXf+SdQiMGCiCSDUWAMngwarSkJngmZVimus6bW7KY+u1ifngRiSmG5aUBKEFYrtb7o6EboJhH4NJyslSv2ihmhQFCNkxNZs3MbrQ9qoawni9VASI5c2bK3q0lXeTl9ni7rSWmDJ7nmD+yNxk2KeY8KAfrZum933PF9mhyrxy1JPOmWURVxrbVso5v7o9Gw2HdalkNuxiz1kUpZWq3J8Ou6ZiARoEQTps4yDZEyqkjBjWIUZNAMrUciTBnrUhPu/bGa9fnZ+1q3ap3N+9eK8vF/ccLcV7LODRAwQi0inm9Wg599fq14XtP5ysoW2GD7QLFOyiQlsfigGnM3nPXZY6RjQvnQuHr4wXJM9/GsRtiJ9W4sJyaTrYmwxKM33+y/+Y7/8Ff/RsTHacP/8f1Im2Nwp9OAzcdNk9Pvv53/vYnp+PpIHTry/Wv/bP1l379M1/8RFXdODqZN2/t/+h/+Fd//2g9yOH2cPt/8s7PHX74ne7/+ZWda3769kemP/LxZ03z/GC3PrmMftBlqxsAo5fuvHn28Ng1Z8z8kbs31ov1ohNA7ZJiNk/Y1tEDhjLElC8u18NyeHK6qJeNRpsvovcIwa1W6wCBcegtqxiJenIK0hrGzjKyMRGqI9oqqvFgUrfpwXndJUBFzEBA3lHsUgL3zYcnH71599bu1rPVOrVSFcXIB5R6GsJL21Xqsnbdjesjn8zqrouxCIUry8L7LmVHwtIWLt+4NnqwameqMVsn1Da5axPlJAlCEW7u7CeFVWwHBZn6r394/MHpr33+jbt/4Re+gL9d/+79sx75Yg6r3UkZ/iTWmhjUVNXWS8hLaGonrTo2ZuyiAgN6oBFBK6risiuYeg9OqNy6FnWUiLIg+EI4IwKoEiITJpFAJFGcJwZgAFQjVPYIRKEIrUQisqgK2GWLScBBlzWrMHkgaFDZMxisVi1KURJYl9RAkCZbW6t1jaY5iTPwjJakQnKoklPlnGdnlBRQXUG9X1pk4NgZoEKMkpIhmGRBAOpLPzZ7NeQcB74syLGhQ8wISFogmjljtJwR0LuQohaBHYERMiVTAyQPJIaUAEECO0NUzQU55yA7kNgJKBN2onVdV+UQUMwrMpLnAsqE1CYFISxKyInJKkJVy2QABkxEbFmQkAlZjBCISQHgyqnSexiiGDEiIhYOC8emnMhnUhE1RQFGA0YofEJEdq4qqCrQezceZUc5diY5ztcpp/WhdBIvF+vWuyKwZisNTcklVdXCESTLpp1a5ZkIPGLlCmwjB5ez1CtgpLYlMS0LEwCJUgxJTUCg62Kp4IBiLfMZI+j2flmU5EJuNRr3WW8mZ+t1XTjKmVa1JnWoiiIACpy9Y/KujimmBGBMyH09vCgBOu8KDqlVj64CnBR+d3t8OWuKUNy4ce3qkH111kbgjQkDEF90mxghUo8EfpE/uBKPNooSXNEaNvzrzQBw8x+DHrrZW5GybX4VqIn0F4ceBwGARlecbMIXeGMSUwbrMTlm6ME51SGZrk8sN5aTrpcD9AtJZ0dn4+VymsO0VX78iIZQtuewnNTHx/b8yD28v9N043JMgwpBOSVYz+vTc6xTa7Mhu7SuAxqrFbqC9VIPP1w+vD9oQlerC6M6dRihSUmXs+GA57PVtU/c9aPig3/11e4Hj3eubW8Ndsqb1bP3H5w/eljs7+D+NPJkwKOdu3e3f+TzIUcPfPrdH66OHg2Gk8Zjun5rfGO3zZia5410xHjv3U8e3NrT97+//O1vLJ4fAdCQbVGvclfTvCvVrZdNOw5S6Ngk3J+RPV1+59uaotudNjk5sXVyN1vYBtdlPRhsN9Pbu3u3/M6+C5Ugx3knoluBRtd3m8P9Zqm3701ufuzu+NabVtnnf+Knf/dLX0qaDDGDIDITiQFTDhUghGWKjIygm1sXASDpRmW0jRL44qO3DXFiExpA2ljNzABMlNfJPKFjHCiKmQkm2XSobdCzYITU46i1T7ph/5bbiyewV4k2jxC+6Djru3kAkeQqAukINx6lPyZWvpCLui6Fwm85TCkyczEpRtuFoGBXjAbOe4fGqsmKETXderWuAvsht741iM4hYORsDmCx7BIqoKIao5OYN066ZEXlfukn3xZnPruD8e6j+8drKFZ1rLtUlNTUFzGUYBDYrdtlyQUnGICLuUldM9qrppMwi3z5bB0cg/V9VMrIZXBm1nbQNJ13LkcrvBsM/HrVloEPD09AVBGXi/V2WW5tj07Olz84PB4HuHdnb6usdNa5cdjb2fvwW09aEQCrs+mqu7u33a6XB/tVc7xw29dDcG2qy71BazGxxJR94O0qTNh/8L1TcrI3nRptLZpmseiGHLpWvA/E1EgKjIVnjXnb88uvH4RyvLvtzDcDz1b4rgj7uyN5dgZBlsvljTen9izm4/uzi6dnX3s0NudH4enZbGtUrp4fr6Ntb43Ojs4gYrg2XNXrLs4Jq+3Rzudfe+dLv/57fHY+nRbELCCkupKmdGV7Prs8XAVh31Lb5su8mC/qNnXj8VZKYgCXy/WNUbG6XFUhTne9y2mwvdNBUfH26r2TWKfhUHd2RnEmFmPFRslyFyVGpQSsXZYoxdmyyc2synzvxnU5W2oQzcOceN506zq/NNx59MPLk2Ubxlvr8/rr93/7Cz/9Mc169/puVz9P7erZU0odtW103naG/Mq96cMHT3PushL54Dyv123d5Is1BnWx1WYvvfH6+P3f/+b2sBrthEffj4vzFVY2Gbm9rUBF8fjRxRDLXOfFuk6Sd3bcMq7GBU5H1dPLdSj84QePMOp4XO3cuPat33kOPVbXgIl7AQdQFTer9ZURdPNGb9xCZoom1pPOem4RGhiZgQrqJuipAGJoagymJojEG9MQIm16zzdTAgOCzY0fe3PKhpi2CaLCH+03oKCERHgFMO7pxtBTb/r1BxAACXuCn4Eiuo3vBoD61BtiT0JiJNEMgP3KA8S9BgaqzGRG1CeiAOwK02MAfYYNHRZ++PLLP3Lrrc8MS3x0+GC+Or21t12l7rPvvnOrGlHXpjaCESOFGEN9aTkaYN20xo4HQ2+ZoYjRjAEz5CjQzQpzbmkZogtTUVNjMiHHoACmBADE2GcFsxDiBkRu/YCMrFeNjBDAUVAidaQmWBauHDRNBEtGbINCqgrefMNeeYW2dwfk02ptXcRQ6Gq5X40ipJu7ez/3Z/+tf/gP/rvjp+foGVDUhIgtp56AjqZoCKLIrFnBkPvA8lXa10wBCWzzLAAoIG8MQX24DGwDTNmQqAF0IyXp5jdCMN08mP3usuEg9h/ZVVcfQC9YqompbtQhBBPoHW1X6qBdocw3gKReMwIAMSGHzqN3TGAOmYAQXOCS0feoLiBmFkfKKBuUOIOIgSbnhFiQiAyUemamoRHjRiFVQxMgBEYTA0Ni9j3VMaltOmbFDE0lo6mIGiCIghmxQzBEp4KAgExdlqw5oWNE56l/YxGdMTORigiyZQMGU1IQsBRImYg2PKf+sBxN1SEAWDIlBM+ewDNTlkRICkBISTqHYICuHKxnyzabCEoibTOw/5Gf+nd+5i/9Z9Phzrj1Z3/vv7j8rb879aaj4tDc+JW7z88+qJsnw2p/6/bd1z/xc27nszt3fpSHo6bLLmh3cYnW4HLF65WedahODOuvf7v07432p3tvfO7BD97bv/cGPnj/8GvfJV7vffKL00989uzhD9r7Dxa/+c8mVfdz7755z/MfHj3+g+cXaJ4wVFxim4NkRApo7HPE04WDYug7hOBoMqwgUeMpuCLQwFtia9iidz5aKwoiWQWjZInRh1CWHrsciuB9cEQExIEZPYESYkSligKDNwsxSUrZTGPGwhshFwWEKmdlDOA0FB7BIHZoxuZVYkzdlWcTDEwMvCMjImZ2FYGXbAkYpfQeUm6gRO8JMXtPzOSdM0FVExMGQzYiIzCnGVUVVMBaFcZAlFNcQ06xjWhubtLm1rHvVCWDCPdvwflp1EuXlnyxbljz0JUg6rw5i6522bgYbPmCG9OVdIOhy00qtuCGK0spL57P3aAYT4tOMiIqscvqDJwDzFqZieR4No+dobliWIjyooVkHqWjjIvL5XhAqY0ELlCpuRswVwM3LQUcrSoFim22ro5DP8gZW6GWoBxujwsxwWoU2hTREQYX27SKqSAXiKJ2yGAELahn36gUgQLL3ZGbVANd1B7s2rSoSSdDOj1uHAy1Gj0+n1dlKaqelIEGgy3L+fqY68moERxMt8+OVqETMhkVFoqAHi4u17hqB+PhoPADc6t112Lcm06GWYlsxyGhHT1+vrszrFZ0s6A9oF/5t/7aW3/pb69/+MPHX/vq9emUBg7I3h3fuDy9WBbtq+98cnH2cH02B4mDamtwkp7991+9Tltvf/pTy9cn8997+GpapeftP/v29/7tv/G/vF2+9OCf/w+8t7xZ5kd+J+68siwq2sEmGqsISFHRZLr9yc994bvf/r3L0+MR2nBQxuXZiEnZ1FFPozDFtMwXq+apzm5ed8JYSyoK4BKCc20SzSAqy3YJZg4wrlpG9dujrcm0Zrt4vvZctE2DagGLZp0u1nUXc+F54CAE9gbLJotqEULwvO6Wt65N27ZeA7PoBORgMpgybhutlMtiUMZoGT1CGYIyUQFtjuMBD3yIAICsbDIIadE2ouumdQaahTOAYwekKYHZTLQDhnaJ4lTTb/3eyfe//7V33r777/3U5/67f/yvLfCadTAs89r+hFSkZrHL3TKvL1mbQjpDUTATUw7kmMFLm2Q8diHA4jKrAHoiKtU8erFIolgEluCUqOuSKTpEZBwwGViUnMWGnnNqmdkZYMZkmT0WY0DUNmmTeo6jB6AupazEZmp6sWpgLMNBGCS1JAOVYlDEtjUk1+ZtdU3XdGLoApYeyApC1IxmiByYaskEOCj8IBQSJeaGEFSsSYKMoXBZEhoEAALwgGqQU3YMBeIgUMmQulhHSDkz5KryiCaClowAickTFxwQcGtSoGrsOsspxgyFBwImC6FYrFviMHCYsxAOG50hNEiAjmbLpirjqGLIAspAwsgJyIcyZYCydEhtszKJPjC0AqhgGRgQSSUTGJAZoiHw5khBBIqAWQHYdSbEWOxMyr1JQUlXHcQsMdar1iPEbKKQ1Dh42h7juOLR0IpgwTMg1o5znRe5XjepS0LgyZvjuhNVmFSDZczQCLERWWKjgsMYfaGk5DKmtoNWJFNOoArgjQEVKKqix3JMvgKP0M0TZil88CnkThRdKxbPpRy220PUmJpIFqGJ1H9fLoiagdPUKRGzuOA5kyUTB4ygzlPSnFJmdaCWszJTYIySEG1Qhmt747hayrqzNk2mu5/42FsA/7g/NW9Iv70w1N+2rxJnANb33fLmTt4Tw8E29TK6+dEm4GNiYP1haHMWv5IN+vndFXm0d5lceYnsxa3/xcmvr0VjJCVEM0V2RsCUco1Qffqjb75U6en5g9vdx9JyOTYZ7gZ3cX751e+2j7/zUkyF6XqxUO2w+jBb5mXDi7ZSK6fDJtNwNMjrOq2WW+Xo4v77ebso37yXBwM0cyaVyPzpD+F8KWfzLRiOCC+aOiZBpq7Lo8Gwi2mZi/Xrrz+b3KqOzrZv2+Wyq6NHtqbUN37p89YW3/vK9z/50ZfnfhQGW7AzvDQuM7gU6+Fg8tbbneNcci0FKalIOdwCHYlAE4vls9R85XdpDSNxeR3brEwWvFVFsXaw87mPwe5uJkdPvulmF5gw1bmgkTyOgQ2QIIt3VCjVOaaL42s/PtTBoG8udM5l4AL92cOH8fR8bMPcbRWDUbl1Z7i3/e1nj1/be+Vnf+Znf/s3f20uyOzM2MAcpGuVv7E/jKCPTuLJQhg9o4KxpCukyabEbjPotR4//8fzAnDFkNjEEQhBm4xItoXoHZY9XWIzSkZD1M0j1UcT+t/6CkMCmyaWPjUCV9a2K1fbRtHUq7q9/kJABIgogAgmL+DZV4rmeFyeHM9EpSwCARAFDsys5hDEIDnkgh0OC1RbOKW1rMRSEXhvdwJmw5B3PLx2a/Dd7z5/voiEHIhQoRwMdvcnz89nVeVv7ZdRl+u1hjLEVoelnwyK84uIYte3qrKQ+bolorbtnOcuNiLSda3ltiwxSXd50VobgikqZhUHGLwzpiYCIJTBMWJKGqoCEZokW5OKVFKy1KFH2KlGhUrKOu8ElKIxnMYur05Stkt5NV2TUEAn40FoO43q6li//fJ0a4ePj1ff+12Zt5mCHNwL05cm47t+UmpdLy7nq/p04UFDUWB2GcJwUHnQPEhU52WtHqzQcG1/Wlbu4MYOoQRfJKP55WmlqRB/fH58UQK6LcSmWS0vTxY3difZ5PEPDrtOiqJctW2qG93xO/du/vDhB1hWeweT8+eXyy4DeDAcMX3k7u7ipP1//F/+roNBcFXdRPZ2bX+Uc5wMBzthsFifjaGYTreaWA8HA3S47UZFtXu67NarTiC+cnv/9PElGzVrfnKxPjgYTarxk+dn093BICTSZAqLRYtN18TlTrF3vRxSGZx3q3V7Z/8mObcW551PRbFcrVPGQmHiygIwSxJMw+nw/uGD89zkweCsTiT5YP/uN3/wdH8S5rPLoY9vffLtH/zhKRgXVfjoGze/9o3vHj2+xAjX9/fPl5fvvvQSZPn2g6PCh/GIRmQ716tnjx6eP/P797bOjtZn6/uvvXzrENNMwJAPTxspuyi4tz0sx9vLp6eTwk8r6M4WO9v7p2frYelAdGviL8nqOj16cLg5GPUMBYKeAaxqSGSEsLF29JZd0CvlRQGS9kt+X4ikiKC9viO9hwgY0AxEeg3nKuOMZkT6wgbIgApGBqpyVcZpYH2nGm+2oyuRx0hNzZCIVI0QeqJ2z765as5CVYUrlcus70JH3dCurccpE6OZ8Qa1g5sIG6Jo38nYB7uIgZEUyByzZSNGUUDkPhonAH5Qfe5H/8LH3vgshOBKvn7j3np14ebn11i3lwKXR76oHOjWeLs9/DBcnNnJpdPYWeaDW/jKp+DwOH/rNwI4TyOGgHllOTEFVABfEBfqGIEtsaFsAMySAAKIIKpqJmbre8ZMN/XzyL3uJtnQGBglhIYrvTF2422b7CYlV7n6/EJdx7uD6rMfzcW4OZ1jvQ4FwWxuCX1M1Z2D87OLnWuvfuLeW1/72g8X898TJF034IoCDDGI4brrfCC0DAQ5aa/vb3BCfUIQrljOsOEZAaiZAIAqEPfyPkAfYTYAAEaGzb3ZXmiOvfa3mRn8UckFAPYGtT5wSD11cZMy3Gw/ulGU7Eo27GWmKyMSIL9IPzMBSmZjR+gIPbOjImAoKDBWCIxo2XK26FA8iWNv5BIlYRXLgALQG+6QAEStt0JLFkBnQAZ9VShsZEdDIuxlSUekqgbZjEQ2HjcDMCNUoqzQdSAdqFOHZqqcGRkNOxBEciZkiphVo3feABiiE0xixkHYA3nelIIKEhJ60I121neuAfWAQkyWFTCbOcc9DkIBQjnIbetdkbMOuEykouJ8uQD77J//93/l3/+bw3KXsvgOX7r7+YPdb7rT+xeXDfvRS2/++MHP/MpXHj/4kV/+qcnk3v7u3fOns/N1uz0dEdj89FSe/pAX53c9nfzLryCG4d7N6uA1eTzLTx/CwOm3nxSrs+7m9fHajQ5PL8+fP/vWk8vf+8ry8sFktPVxwsPLdixn8LjdKvcnlRwFPerariFyZVAEzdFyGLGSdRerJR3u3yxkddqNG1Zatkc60HI8GAHkrKYgWLuGY9IYVUXVzBGJgaiGsiAx1czc1+K5SMDBj0YeKoaCQSWoFlWOOXY5B/UdO++d8z5wiQlUMjopRgUxYGoTJMxNMiNLpqqYTRXJAROGgfdDdp6pBHUOESQ4I80GFiQqYwSvyTSUhZBjw6AqOZplQkQVUmMQRPMAhp1qAmIR0VxbMmeWxCJKl7tgmjOgguTcvwUBy9XaaQoVOALjzDmqgIynI2MXFTAJUGeAzvmIFlOsSi/om3WzPS2JdFAWeR0lGecCbF0Gq5MmBI0ymA7OmnmR07VRCSTr+bwAzrLuYtdEi2Q7xZ5hm2LyzINh2WQTE2+SOynM+UHQ3HoOk0E1bzsblOBpxRxBKHbaeM/Ourg1LtES1LJad5WvLpdrQPSaAkBAp0Tn6xhKItCKAhC1uRGTQVXNzyPQZJUaRnLAbUoppsLzeGskhpeprRerUYFVyteHePd1Nyp3zpdtbRqjJCQvHKgcIkwqzmtNg2B7Ls/SFg0nBW0Xw0984Se3fuTt9dnzw3/yldHJbLyy4clpPvzG5W6MX7h2dn13fdY9+af/4g6626+8+mxUPBvZ6O7t4x+0E7+NCxcAM7Txk6/wZHDynYe73/jOjm/b4d7b19+9/+S5fuNr213Tvnd58fQrHRLu33j106/Qy29+WLd1udvJquvaZ6d1KEdvfvJHgKy7OIX1evf6xdPjZ0dtc7joKu+GFYvpZOjXdTpcr88etRBYQLOaNtEkEgfpqM+sq1pMUpEfOeIYQdqnz4+qorg2HdVzcI7rOi7mjYVQEYHmPrK6NxmnJFHUxJT4eZPSyXI6KkKbxugnoneHFai1jXQtDAfOgwpI6V0U8ZVDBxUUknNABIFsQGQ7FZofcJueXCzXkaZl4QsTQK8AKus2EWIE67KEoiI1Mbp/qR/+xntvvzT7c598G4L8oz/4Vk61g/AnpCJJahlXC801OQMmDQESmgHFCOTQjGKGNrN3Vo0R0PVlibFWERh578CyatasjFowqwOAThOR5dx5Qs+uMWMHBdnY+ZhTRlIGcGBeISboxBLGOiMFNKN+cyXqstYpb4+KCZR1t1YPXLAjNuTYdayAQMQMzFl0Uo26LjLDuuuyUWJkhyUHRSqyT1h0mpPGZJp7Jl8yZgcMBEbSH+2sKDwAFI7RJOVcTAarxXrVxsK7LiobcAiT0WA8mUxG0+vXbzJhMRr6suq6TEQSJYnWbfv0wfO6mbXtgiq/XKwVS1+NytFU2KfVSdM1BaMLbtWsh+swrirvBwwAIAO07CCUPkJlzjtCLiNpqsqgSdquiYrZwDkfvCe94iWbsCMkJEU01SiKzpDFO9qd8q1rYcBOIddrbbt8Nkv1qjlZ52xJxLOWxMgeOKArgMhitihx3barpmk7MQLPTU4qwh6NeQUxAghgVZInyKAYiAqELJrEZdCUQjVYL7oQ2BkbqisZRJUwaZ4EHqAks3WnXl3qwEwxMJKPq65ZS8ypYh/YX6xqppBUDKHwhCRUYkCWCjVSFnFIxtalRFkAzTtmY4fKBF7QTAcVDyowo5J96ULbrQeDsss42d6+sb21uipIphehnU0fCfIfk4pw0xDzokt4c3aBDWISDa23dVy5/kGhDw/0VwPt/d49ZHRzAXhh6+jPaj2/DDZcmd4eAldtKWS92wgI0rqWP/PLv/jv/vW/ORTh2TNc3pcf/uv9JpWrdoTz0Xp243wuaZ7RmrrbrUpZtNwg5g5Vy+AgmwAUo+oiZRQLHDRZaNbwje/m88e4txf27qxaWXQRa4HTFdUWo8xjFkAlLSehsGJUFLRy3e7w7s//0ll7uX/91mWMO29du/2JT52dPVkePoNb150ff/4jn+pOHw/C9OZLt9cimZyvBhJTMRznepmbevtgt2sUUjImkaxJifzI4XB7ZzbYlsUa60xIfjSUQsVDJvIv3S2/+OOwvSfzy/WzbzhpCh64osBMksQhmoeyJBIRTXE1d0cPy/bouZuCI0uxDEOHiO161Hx48uHXr9377N13PtOwiyUnkHv37toZ3Nu/88Mh5mU2xCFo0vwX/5P/8DMf+eINHv3n/8Wvvn3rzsG1rS9//3enW8iQK1/UOfVTYTKgvp+YNqmQPne2MRKBQZ8JQOovaWgIAjljDTAMVJKRAzNLGbL2zfYIKHjFRYfNvWDja6ONjglXeNle57wCimxCatADLQj7+jPo8y49OctePI8AAHBydipqajYcTouqanOKknJMxOh9UXCIBqJiZgSZIBPmmPOwrJwvkBmNL5HqxfLa/ta8my1WOee4vzUiQFuv90q3NfVvf+TmPMC3PzirnC8psJuUBAUvJxCdpdjmBBYKQnJCFKkDJ01MkoUMt8d7s4vVvCFXlSqmEbMYADjC4NEAIBuBeYcEKp0AUeqiWtoeDSxQ16TRoNouw/PLhfcIxi64dZuSCRpSdifPlh7l5s4gAZ92cwVadVJHO/r+82YlZREjh8nW1sXj5jvfenDwxvCNN3a7uN7aqsYH/kS65TkcL+uiGAdPDuNwEN6+u5+icwrBBR/QQAajYGizxUpId/cn5TCtmvO0Pp+Eba/Yki5WC3Fw//j51nqobVDlQVHqIvpRoModpcZPBqB+tm4bgrXGxeHpEGF3a/v9Pzh8/HyR1/zKmzdiW5+f151A2yo4qwIsZqfeqCBcr1tf2NtvHPzgh4/OV8nZVhToDAajAfvi7ss3T0/P1i1U49F5atfz5iNv3bk8vhwMQQhXrc3Wedt7T7y3N2kXa1HNUt66ec+nvO8sLs8ax9t7g7OTVUbb25+Mq4lYePb0FAIxX8yPH4s1Di1pvvbK6w+evbe7PSzLmAq9sxeW7cViNWvWZVEOnh2e+6LIXV6ukgyrUTly63hxthwmnRTdz/z0u/d/+LCJ65s3r63X8ezb9TTsW0y0X+y+VGw7t1qls3kTQT716Y9efPjh/DLduHb94flRM8vT0bSLWWLuEHJeluF6peXFaddYe7UXMPUvcF8cgSz9awcbpHMGEEBBUBU1FYDcs4X71HFv6ukzy0i9ExDVUIH1jxtNezOpIfMfvbP9EgGIRKDa8/6wv8VaTxW0TfMamqkB9HJyn3Iz2rzRmzIz2OTEXlhWjACBerFAr9LSG8lAN4JT39vwIrUGgEpATGRkXIB6t+pyMfBOxJuoqAh6x+Dwxo3d23d3hRZRYNtNOM1tdjqM8cYQ9ymGeiHPn7rUkeTReuFmi27e+eEgOue27ha3f0Ti97L8C9DOFQX4kblSYnRVgeRUVQCQSHNGx+QKFUYEcgYuQRZIHeWuh/CY9AFh6Ivh0Pr6dgBFZd9u3Sx/+c+utgbJgQ6m4+kEyfhsRrPHmuPhUrY8jLqlzp9Pr+3gy3dn3358/vgZHj7cu3Orfv+rmmD84MmBGyzQdq8NRoOpS06kamhUO704fbqaPx1UGkrJdU3AXWyIKgecxZDMVDZNaACmRpvi+L7UzDah9ys1sP8oe3QVKVIPPe9BUZt5gPaJMdp0IWzW9P6AoRtS1R/ZW5FYTU2u1EN8MWrojzM9M3vjKspdxgCSTdTAUBQYEUMwQwRkI2eIgBnUY6hYC/QOnShEMHPoCElVkAlAzBBZRcDIDE1R+smFUp+49BB6s7aQqCZCJIXcK00vBM7+b0gEsa+zEyQyMjBRIyIUVUQGRskJ0JAMkczQITGwByo8C6ICZYRe/O1/kgI4BADlnhUGpiZJxAGgKSNgjyNElySqWWQOzoV6XSXVtETL9clqNN4e3Xrz5/6nfwOk8LhmaIalhLvb9WQnPfj+fo7F/OTy136D//Qvv/un/nI+ePvhbFGf4rXxaDIWlcuuWwRd77z90cXT89l3vjYelt3Rscb1qqrKdz8CKOFyVj96WmiEBR0etYgql8vL7mS0fOwHxWrcdKv5qAU6e/7JncHZcv3Jg1vfOT4+G1fvr9OjVRNXuCqoS1yv0I/TznUicFRwuV1fLv/g4vLp06MfYHUTyylrOcE9xJDTcQwupowdaq+Fq4JlTQpiLFAUJTlQEeNQTatQhfHAF4GYMeVIgJI6Qc9kBKhAw8I5hA1RiH1yKtgygncFOsDSyKGC5twxO6zYj0e+qKrByPMAAEBIRYkJkhIiihRSIFlWYVcQCyJxKAIh5o5RYhIDIGQEUkMiVMuqSUREyZJhf6RCTgZo6lnNMgJalhw3AbSm7TSZiGDBKcqgYPOcJI8m06Tiy4EWIaYkCoH9slsVgVQkCdR13C9GYIDmkqbpcGKWF/V6jaklWWeLqFlzCMMUQ464VboG8rQk8hpzE8rCwM6bCzGPCk0bGQnJVVwMWEPhl5IbCGe1ecyY5i5UFHzAcuD8Ip8zknFpBqtYE6NXyCikuV4uagSRNGD0SXqgHTlUxKRYx2RidUzJ1Ls0HVenl/HsdFFOyt2hORAGcsCUxEQtaQepLHzJPD9biEEcrcrCuaK89sZL88vluo7bk8KWq/nT7nw9+j//vb8b9rrf/cf/7MH/67fq+2c/8Vf+jZ/+W3/txHLTXL70H/3KH/yv//OT3z364Pe+Pm9mL//qX3tAW3CyKA7PD6pBzFHLsLuzffrsuD6aj4eBELt6Tgjr4bD8wuefLmYfufnJZ//1/xVx1XU0KJ5VR1OTTsAh7fhIVdMMjx+tnz08mn7pZN+dvf2FtjxghK2trfXZiffDIrA/GLKlPccHl8frbn56dhmXs2fPnmWEKGsqXYR8ul4HHeYkFeGgqtbdGjkXzjEwoxORHCWKdU5PVsuzti4rLyiVyZtv3bv/4dNF0vNVOxqWoDRAoNIncou6ExVSbAWGQ++J2kaHwBPFl6ajIVibUhIrvN/dCQGk8MRVYZJD4Yz66LgEx4QcWEsK2TS3aeLCS2MeMh/Wuq47ASu9n7iwalZVcETUxVSURWZb1VEyjMoKFb//aP7+o7OPv37zV//iv/sP/+H/8Oh8+SekosVlalbQ1chGBlKMcFDSWiwb+pFjDrHLqqoZhA08O89RSYFTLZIVHTKDGDiAKB1mrXAIgDmnBgwdCViH6DyiSjBVTQiKZGIoimJcePPsUpua6OYtYwTHLklGglA47CQuBZOPCVMExzAqx4hVXtdAUo2qtmkNQUBbyUaQUBsWIMhmU++8GTFLliZ3rYnzTnLqxDQbgJXAoBkQCnZNl0gJA3tAc8yV986dXCzaKNFhJwgZtwblzXu3P/qJtwejrTIE58qU1Q+KZh1L9kVRSnJ11/n5JYbJ4ZMHpz84Wi4vzbBtOzufD6dt6R2OhhwgxdzGqE2qywDGMvYWu5EvNamKZJWcDImKQUUaLCdt1BNkCU1Xk9NEWJLzjk36oaMaY1Yl7OurLUfthyMhkJ+UMBwYUbm3HZsuDIb18XFcpgwpJkgZcBlH7Msimih6Sm23Oj7tmlUXu0jQAQKAVL4YA1bwJMVW0Q2K6Aw5W1b1Rg4oY25iE5EMfckdJrcL3jFHJiHveRWTiG5VYeSkm69XuYCuQoRMOVpKTIDohmyNmrjVShyTmW9F1gkqF1pUW7eTYVVuOc7CgDZEU73scuFcSagpl4UHs+xc03XD0u2WZZOFFH3wwZcZdL5sUV0WOdid3Di4tZpfbl4GQr5CNTD1GHG4cmP0ru/+gNYTZ/rmqd6ygYB21ZC8mc/1rv/+5rBpurcX+YWrweLV+e0KIYP4R0mgq+xZbzJCM9sAKS3LL//cz/2n/6u/tYxqDry/JtY9/J0v7zZx8eRwqQlVWWw0ZEMZV5xyh7FT03HhjDhLdsOqKAeSciExdXXTpOBtMi6TJHt6Fg/nTfU8pewIiLkMnkuXLbcUIzseFu14TMu4bpduEKSrF+99s7wx1MmU33l1euPOubV+d+/m9t6gdCcns+Vqbaft4YPvffCvVh/7qZ+tbt1F56MI03B7Z+v84XfzoyeYnSsqHu07dmhRk1h9UsQHBx97DT9aSM7rxWo1rwmTSdy5e2vymZ+4LMuUIpT46rufWfzBb8U6SZLI4oYwyzGwU4xFhsrBcKf0q8v4r//J7sdb2n89o7O4dfHw23L0NVk8H+NWqIbDW9e4KNpVk8+eTsfDi+7sMz/+7vtfL9wheuQx5Nff/cSb73yiGw4fN7NP/9k//6nP/4W90e6X3vvWf/m//48nDClpNsq6sf4bbLgePXJexV64jHq3WG9g6yuMGEEMOzEAYMbSQYGmAB1AAygbiemqB42wVy37Xr4+odBbAK6AFS/4WaSb24gSECME7qkVPei+L/UDuYo8vnjszs7OB0i712947w1FJfdlM8SUciKgJCCSUlq39UW7XqbYFOXQA0inhpRzQjegkA/uXu/K8YcfHEPX7WyPX33l4Bs//P7N3dHr965/8OHjJyuoZfDq3e2iLFyqY7v227zPpaj0pgA/4LxszxbtosvmDMgSh4J8XfO6dutlJtKckguuKBlFRxVeG5fDQDvTUewaPxp1IssmRuBEqDFHBInJGFrAk0U3mzcByQU3GAzOji+6rOa58JxiJKdLcJNBKJUXSdQNv/7ebMhUgpu3q8//5CesKtJ8tbcaXr6ffv/Ro1B2r737Ui4wp2I6oYM7r3Bi1HZr5JqTeRHjmGW0vVWvO0PnfcXqutiOyjEGUGwuzs+iLXbu7b13fNE8aaLEvdvFtOTTI654/+nhfDDyj46eOwp3p1vL5enpw+X+1s5oVK1m8+VlaqMNxlONcnxuTx/PC+8HYz4+O7PcESEXVEx4OqkSyPHz+cCNRNYLw9Tq7IOT8WC/ZD04OIjder28RE0XFzNVUIRrt28dHZ60yovH3TCdx9hermPO0CRUii6Utw9uWCAbDUceRU2sI9KL5Ww0DM/PltVosGxcCaNq+7WyGtUpW9heLi8Xzz9YL2Ztm9ZRt/f59Ml8whOSi+PZ8WR3en6+vH+6bGToitARl/vl9njr9HC1yPjWvVeDdKGVNq0GngpvX/3yd7Ym5dNH55PdG8dLCtWkzTWDPbl4PqiKgEMxBzIsyT2+f8hii+WyTZl8SZAXa2EzIK6d3n5n5/lsNT+xrnUUNpNkZt5c100BQEENoS+36pVYNVMxBRNVMUtm0juGVB1THwzq94c/FkolBO1jRKqgfddEL94iYG9J7BEpxNbzBYF6RJ0iAiFsWtIQkXp+IUJvgzDAHi+tdpUzhU17myFekewVkMg2v9KICK/iTf1wrsc06ua/rtJwoKYGZMRSTXbf+fhPvfGJHy2x/ta3fueD7/5hbmbYkgUyVg58Z//a2ONkiK2mxemH2NU3KtvhtLU+5vOlX7VwcllmxnUNMZtI5UM8vyjK0k6e1B/+bnz4YBKCrRM0nbQnIJmDT5rJOfBBukzguCxRBBEImYiAEciBAyhHkCLkFrIgJkAFUVNDYlAxQnSODNWMOOmIdQdyF7E5D/Ekzp/Hs3OOabB/Y7x1vWizPXmwevB8fbBaFM8EPRzs7kxHhczXP/jhgKuXLVg5XQ6qz//kj0/HL+ESj46Wg3v3FrQ6fPrk61/50ry53+iyKFFaKT2nrKKb+DAhickLH4+ZMdKVnk8Gan37Gff/unEf9SbjTVOlaq8o2QttqUcYmQEgUW9h3pTabgw5vQEOEPtsPBgSXS39tkkobnjYileuIkcOkQFJFJE8GAuwZBPKjB0iaAZEJEOn5tS8AmeADNrbpJxlw6wEwIakRmSE2n/fLIJMjoxVEYHYgA0JKIEYM5gaqTMnisAKwCqQVQFAWA3U9XlKyQLAJsQgks2M0aPBRkbyaESiiqQE6JgBqEMgdigC7JgKAkfUU8E6RDNLACn1faUAXacevfPgDEUEFZ2iM8WmLtJq3+VRCCeHi/FgfGNnspqt7o2249efLmx2tvz+3Tv7oZrUl6fhxh35ZuUlbTMsHh52v/Vru0Ma375148aePH4avv/t2R/+uuKiuvsGu23cO4DzZ+gX1bW9o3/1e2XJ89lq/KOfH1L+/1P138G6pdl5H7bWesNOXz753NS3c5jpnoyZAQFwAAIQAwgw01DZpFQUBUpy0RRZtEtWWeXwh2xTtqk/ZIaiKamKJmmAosEAEQaIAQYYYBJmenqmc/fN95745W+nN6zlP/Z3GkLfqtO3q2+dc+7td/de77Oe5/fQel1dzM2kHzHR+9nxS7dOH9j6yfngqeF41Hv38WL32RcWHz60q4U03k/XSWluLuBjWPwBk57yfFXk315ePqj1QsVSx+XCqWF1Ua/urN+/eDS7++57pCHJL3N7OBy9nKa9cW+Q9obTy8HJk1PXXgiDtIGDeAYBMAp9EJWwsawKUv0EilynFjAE7yUwMIgiHyVEBlEAypBWrBUIMLJHQc2BFYAmMIlyTNFHbZPU6CgRtFZZbvK+1UlhC2KlEJljBCcxONcACEjUyBAFDelEJRZNRPTBpkoQ2thCdEgalQYAiexiEHSBUQIoYIpRAUjUHDqtUydCKSqO4FgoS7unYNLLNKmqDY/WtWe8LMEorVDm09VoYpQl4NCzNiUjzu8l1k7M8nyxl2LZy1TE86ptS7c7Hs6mmxHpQZ4BUm0ga3mv3wsV91XveHi8Xjwe91ICSbSASEQlzMixZ5ONN1W7IkAi1CARVJKoKG1KVLoWU1LGemUQjAusqeUqpGCNlpaxDJUPoV65nsKe1mjl4WwWe30gqnxQUfsYRFHUQAjeYBlj2wSO4ljqql1Um3LdkEkZEi1hkOpN7QMo3zBytEZprRm4Cdi0VJZxOmswNmzs3bdP0zQdZUV9RoPC3Eh7tw6OzfoSHL/6/Avm5v0Xf+JnPveXfvqkrhsOpHF5dnHthdvyoLn+1LXL02XxuL6xu4d3Tqp//ZXgGqphc7/UbXZD98rTGR8MQIXgIrT1sG17H97158sP73/F2lalfZPk7dvv4vvv7BRxfvEI81FVxxZxMChkM38ui/jo1E3/uT+85iWF/i7L6NGmtYrGo2GIftTP08EhhUExOGJXvfDyaxcXF1W52ayXo/HofDabNeF80UQfyxAiSmgDJKRAgvgQ2AeJ7IZ5olNe1957YFCna74/2+wkyTAvDiZRkUw0XBv3L6abRhdr1yZZQlF2EkMac9Iq+gnh7rBnhIFFSGc5pSqkNmoAhehDmSWGSHXhAxKvTNIAhcCKRSJnqWWAPPpbg2TUg6Wzj5au8XDSeACjUxPqypDJ0LQcRcRarayIoETaBPqtdy6/fe+fXR/3/ujP/PBX3/z7vycV1WV0FQGRzUgT6hQ8hW6y6E8SYrO8DNJKU3KRgUl0YBFkiYKAhqKEzv4rqmViVgwKPQoqgSCiUDPGKI4ZDQihOAgqQa1U4EA2Y4gkkGrL6KyhxJNQJOHtTciHANEZnyWmccYHaLwoCBw3mbISMYooRZvgyCilkT03oa1iVGCBMCLUwVtUnr2TKCpGteXQOhEjpFF1r0dQJkk1BwHUzqGxWc1JY7QaJbFqUpsM095kOOop/ezztwZp4oMIxdJtoufQtkLKx4Ah+BZZY+PrTcurjTQMPrPEHKsIyuLOU0fPf/F73/p1A/dS3azB5dpuKI9J/6wNJvq0rvMk72WJTjIXlSjlW9eW6/GgL4lLGExiqvPWxcBRotIxitJaWxU7Iy9y4MgKgAg4smclSjk0AY1WSBaVItCSNt5ahxRIeYwaoalbxZxGDwitd1Xb1lXVBgcKs1RrpRyK7ulkLN7WEyurWioKqIwyicQK2kiBlWgORjMhiRGoY5Xn1ippXKDAjCIsClEjoQdfWVfpXOsisWWsvCIvEbbufvBNXAcACllhRbxhZmFAHRzVOsTWGwN53wSJLlDfWrcJEkkhinCMQQEYjT602msNRBAXy9nOcFcZPUizzOa1g1HRk2azWl5uByME1TkvqBvNqDP2yHba7wqOu4/S7Xj5I7awSDfY8VXsp6OSAm9/ze+hj7Y29KvUz0f/ArYX9qusEHWtxritTgEkAiJotY3pX/z3/qprKh9DrhOFyiWTo6dfqb7xbxPwBhG1AeLSR0CEGI0iJEFmdo1oQ4Oey0b55Fa+mrazB1on5qBQAmRIoSJRoRLtWysCJCbNQiAszCZUl2ThxsF+kdnStdNT72L/Rn+XrH/3vloN5DoNRrvVxplBTjGaGKEJu6M+9wcc3PrBw82iufO17+y+Ijs3d5F5c366Wl66+28VzA8/uJft7pvbnz/+gVfJL7BaTx+9k7TRjq6pg5uYJ4A8IOxLffrmG/0bh04UBKIAXDG2mVIjZZdaMXsQltwMamGjjatqHxkNZ4GTD+6Z5T/u7Q5Ds0HTt9PNer3CPEuO02wwvrdYtXlBVUwuz/279wabU1mGTw9weMbsSNXlT/7Ax/wwv6zXsVndfuXFoue8nj1/e/c//Wt/4+/913+r3yU2EISRBJkQkBjYb6EP6moYlyiCihRenRkhBungo4Gh8qIQLUGikEQicxORAQmFrjIhV6oOQmcQ2J4ZlI5EQdQxOmRrZxMEUAj2SkHqlrTc8SgE6KoK5yP6Vr9fjPJxMRybXlHXq9Z7ROAYg0RAaGsXghCG1i1Xi3nThuBC7qMYX/mGdQqkV+t62Cs4RjWPg0EmLV27tvfh/ceVcLq799Xv30GxoHq9nkpVVFK+d/eOypLxOB2lyfn5ykduGBvfNJUra9e6yBhZXJr0qqpdzBoX0GjDUaw1pIhDvD1KnjssJj2DAHXjYdALhKnBrJdXPm4qZpP6tmGDnXbb+NbmybqOrg0tlFQo7UFrTBAJVZapyCHE5iCzBcZEokewGlMTBgNScuZLfbS/c2N0/Su/+brqmeDpja8/vvH8QU76xm5vN7eXd5+cT+dtYbmNrYqQJJsYdJYXOxNUCTO4EDkIGV1V1WzmWokXzp1fhsKbi+XiuYMby/NK++HpeaWtRuWOnxojy/nm0dHuXsPkSr9YVEAwzBO/9jZwPzcXZ5uO9aE0FKnUDab9fN54p4xjYsQi2xlk/QhMDkHp0kdTxL1xAqvTxfncGO3BD/J8U8Y1Rxdcf5TIyhPx+cUqku+PevXCp46torKu61qnYOrS7+z2bJqul9VwmDa6baN/7sVn1n597fqhobRy1cbVTWgIE0G3LqsszcqSSZHn8PD+yd4kyTRxsAfZjbMHb/Vc73hnd3G2rNy0R/Zytu4nSe/WvtvMlQvn0/VsutQWRdQgT7nBo6Pxk7qcR5hk6ZdeffV3vvblvYMkNendt5s2ALRorVJD02zme+PB/l4+Y9esm7OTxd5ubnfUym8m1zN8ZM8a33pvzEdBYdlq+yIAEEEiS8froe5qzSK8LQHrgjwsHIUJoAt8dWAxvIKLdcLRdpsAV12F2w94pctQxxrs0NGdDnDlSgXEzvEDV1f6jsAjW7KNoICgcBRGoS1LaPt9CiBtkWVdvvoqt8pbtWlrXP3o+4nCiEJbdzeDEiIB4hefefrP/LE/FRtFsRy99KmP7+9+/fXfaRflk9nl808/f3zr6QQjtLNm3nIM13dHroT9/s5uQuk7c/d4qhulajaguUVkLQASwSoFhp88eFe9/InNM7f6+7b51ltJvQHfcgjIAcjGEFTWN2lGps9AoAEQiVJAACXgAwBKiKgziBmwSFujeOTQlb2B3/5hQVdUWq7l3h0FuybX1eNLX238+X2pm7qqd4zKJK4/vItnj1XDddixH/90hRrKtp2fycmjyX6yqcLnX/n4DzzzkhzfRg6ulJiWdizattf78Xm923v/8ElTfODnZVwvz5/0YkUSOUQgjduWr27dJFsbcef/gc563AUCQWIHP99WrcYuqXY1K+DWvNwF7DogUbdcQpCtkLRtXUX8aITo8pEC3H1ivDqY2xcL87Yz4SPXWxDwDEYUM8YAFGkrJkIbvUYCMCIIQhKBHYhXEpQC3fUFeo6gMIhIDEgGmCkwgWIWBUCCEJlRdcFGJYiCCKAAgDQQRwBE0EgcOQoIESkdgCUKKhVBRCRGRuwGsaiUISAUREYEjUCERshECJqIJFpMAASBFWmFUSuDDEZpEWYQgCCROTiMzjkJzjOjtmnESM5ZUixBGUyNqheb3YPhCzd/6Ac/88MZ7Dz57jcWD16Pjx6dh0eff+2pF1+dPHj7vVk9TersyQfvHr/wueyHxjFW4VtfM4+e7Pg2OT89/Yf/oPryb0+efaF967314hI3mzjyvT/7dC+n6s1fak7ft7v7rAa3X7t1743vPXvz1Xvf+T7cv/R37ue7o8WTM9tfueAfnr4ngMM0hcvN/HKZt7xZvk0iN1955uTyoSposVj3rcqqGtRmf0jpJPmxW698e1p+9ezBCfE0gpuu7j549N79GuoIHqV2tHrSG29cqvaKm8bs5XiY9Q9E9X0bReYSGRUAo0KlFKEGNaBkR6tCqxzBRBcaBgm+tsCglFLoA0cfUClUVhCbyFpYo2KEGCJETokCxECiEpNhz3GMIRqljEkRdEZ5QkazQSBmVgzEwCCayMWWJEbv2YmyQF3Itx9JWKMRUCCJSAxBongClCiEqvYRRHfObg4szMBKAVrkDEUJawQgBC0foeLJwSBPdsY6QlhRXNaxamPfWo4GwJTrMhpJhxNoOTR+bPJUlNVWt2FS6PPzRbmoUVufpb0ss6KgXgGFYWoNQOGYGFMJ5BfDQSoYKJWyXIbWEhhr0qbeeI8UHXK02lrUbSzrwAFw5YMEL0lmE610ispoBlAQfAOgPMcKWiZs6kqjDNJMS1RWUmWKSq0hBkHSynnHipTBNkQvwVGMHBOrnA/KiUJsV04JJMihqtdOyjIorZAgsSDIpBBi1ITMbCgMUxzkSROAdVJXwVUxcpPlVs1n/WJvsTy/ZpaLe+8ejvo3fvi13c984fsX5+PBYK/gi9/4rbu/+usToX5v6A3iIGu+92auFd89wbqViE2iw+5w8NTu5vRBRrFZLBEJldW7g7SXrafwXu+5yR/51OC7v978+rfGB/0brzy/+vDD2eVl/6nDFSduUwZFSnDoU5m2R1EPXtpxe+ZiOvfr6ozhsDgI7SacPk6LbLOsRoPc5kql6UY0at47MuXqEqSdnl1AlANrn33p2cdPzpeVc4przwoZEGPglpmx+/OUy2XdKDycDFoHm0VtVOpJfChzhT5EMratLWLOrc8IdvK8h3a3n1zOVnHjUwUjTUZCojSjpEarVBsCDp7IkNKJVYAYgQVRozKUa60ItQJk5jZ4F7w2SaoVMwxA5UabnlQRVj5sWn2xaX0VekShKbVVlpAQXO27q2mSmLaJLOrB6fyXfvWb29tx97fIQBYFMEnIagItLiqJYBNjYiy0BaPWTRRU7FlnSkEkxLp2EggASaMi7WPACBYzBZCgYWHPESMigyaQEMWL0kprZOYA2EAkBA5OIwYQRCajLCnwHD0Bk7WERmphITXzfqJj3jfzTVsyiBdg0JqNMj5G4pgpQhAj3Apw0IYBRVjTqqrGvX7UMTpWCYEDHwO3QYv0jVYIUrdG28gQGjYK0l5/t5iMB+MbT982qYkUh/00zVJjs3JWQYjT6do1sHEh0dK2rfPIrjmdXaajnouhbevW+b2nbx0cHF673v/CF1775jePfuutN6aPHlobbj/3/I/8uz97/darn/niq//jP/p7y4ePtKWnX/3MyPsny1VVjL74o5/Rofz6V796TKFwERtXeU76vcIqB1FZvd7UIqG/ky/WEjmKQkTFEQB0lKCNJZYQ2giijTGIggETg8bUdZvSDmirjUJmo8UYAqtICCWGCOKDFpm2rYD4GFyMDJGsEYVJYrRRBK4ysvAckLTBoQZuosqS6B2RJgXCFLyqm+g0CnKksLuX6QAqQvTRB3Q+RkCbmMqr6TLEMgkN9PvioCVSSpNhBmHnnEb0UVxAJmDtx2PlNo5bqWoZDgot0bWNRGmCz7RSgrZL/LNFUG3wLgYXnTJoC2sSvVm5LEkno36WpmUVi7xYTcvrN2/ePt43zfr05KR7Cowm3dXdXKk2sWOTAnaN6VegaviI6tllFLqwwZVLCPkKRrkd5X7P4Q8f/XUFOIbfCwV03VaC3VAkwoTYdaIwYYjsmyAQU9Av7h/PH76T0DDJLSzLTcnOt2rch36OrfMuEkB0KBENCDB7Qc6UsgjBm14m+9cne7d5dr5eztAOuZ/gYISh5TI0qxVpDVmujIbQ9DQk1qY7R6fVVF97+vnPfj5JipNf+f/gxeWk12MgJHU02XlwssKsV2PdGx7ZXCHmvlpAU3FiaJAuytVwMnr1j/5hv6rXizo72mvapbIWjDJJQZI109nxzsH8fBpmv1W6mSkoneTj3WcJEzUaqNQwh5YBCbzOdl/89Nm9d/qjZb5/qzFmoMzqN35FzVcWJUGVBvFt7SEMdC4SjNWXTS3jXTvAtm3LJzN/etJDiGFtIe3pIoLEy8frs2/q5z8dld4dD+nJ6vzON+x8vnhr8xLpG8PebCNqlCaGN8yDfi8mxXzZBqHlao3GXr/92k/+2T//q//Dv+JVuW2MjdE7z0JoOhj2VuMJ5LUye7t7ZVOGZg0OQKmP4l/dbt8ztEG6eL5RkGqKVyXYsD1IVyazTkyErTloe7SuMOpXl5Crq8IWlYXCjITcpSUJZNt3+z89mJBkg93rN32EJkQRpVUSuBXmwIBEztfsHYOr6kXj6yjKR5iuXaLJ9nrjvb2qqvvaOm6XVb13ongQrAABAABJREFUYy/Rcnbn/OL08tbxXvlBs7msQ5uULaUj+9zzTyVuNj2dLRdNaPFkVd3oZTF40lSyXFauqVqKHW/MW600KofEErM08T56FwwZTYo49DSlRjvPk9EwLeyyaYzBxJC09dhYCM3JfK4zU6R51ldluRmNk4fnKyYYjnuYNb2dDEFIpJ43KjJZqKq4amKMBtpABBngKDOa3MdfPF7Pn7z86nP33n9w+WSFEsmDqxFGOun1np7s3fvGN9+8rNpVcIDOu+O94vhaf7g3Cak1WaI1CmDaTzOrfBNcaKE/nDe9ctFOH18cDsjyZneUuXdnss6J2xzh6U9fNyZunLfK7JiJUebx+WkdAhqwRiHAzm4yHPeVW82g7e9ku/v99XS6uz9KCAeHkwePl7X37y5OXz48VqLdsjLav3zjhZO7d8Th5cP5SnhvXGgyZVn39goffdJTjNrq6Ovac51kOTKlxo5HabNwCakBKRRUteQJWE3StpUTiGbdxCTt7Y57J8uL51+4WW4w0Yk4qVqf9P3Z/cfV2TovaH8vV6qtOZoc2s1GWSarr+2/8Prv3rErTAvTLE77Rj+ePl6eFLSWFoMl2k/zhx+8XVhz4/rO0odN45IWi4xqF/aHdLg3fjSrv/ze1/u7GRFW85VxlGX90WgwXWwCw2i0c7ncFLRsQVaLxa0bE9OPVVt9Ymdw+eF8GJIioSDJcM/Ch6vtk8DdlV4+euy21WYMANARh4Q7cgxv2eogIhBBGNBohSwMgIiMwh2JEfD32PNAMTCpbWAZGFHBtrGrkwuIOj13e/8FQGAF2/TpVdn5VdgNQISBY2cUgi7yA8Tb8E6H+O1agQBQ1EfrjKtorGz70rrQnaBwBEZQolAkiFKopOhZacrUjixlrt7bs4Oez0WK4vD60Y3DrFdMH92bnr5LUvayNJe4NxrIaukvL+TJY1NxrFmxYhcANUOLLNEHTQBBpVmajfps62Lni7UzzTe+akNI08Q1nizavABthQU8K5UwKUj7YK0wI3sgBkUYBUIAXwMD6ExCg6EB1yADaIWkJDJAQNKJ5/Jb34qno+zZm4hJ1j9oTmYq3z14+cbq4sScPwzvvtezSpJi8Ownqt6+NgGo5O8/bh891KN0vWpGz9zWO3tlhg4H2agXmnrXplxfLh/eTaZnn9PL1bD/pRc+/s2z+19HrJaPed10uKLILMzQ+bw6mhKAAETGzsJMW9l/myBUCACdvwxYISIxM25zU8ASO4vodmEgAgSy5VMTEQBvU1ty1aoWAVEIADrQ9VUnZpd/B+60w23+DIxSCAoZ2MfQeopOkCMER0qzEDFzFOQQIHhiTwRaAWHoCsYjA5MVAUHeutMgdOQuHQVBUDrliVgTkCIUICQGYlDSxSyJUJEynZrGGCJGp5VC4ggEoCF6YQEWRmZk6hZ7AgoVEmqhwGKUSYhAukozY4kESUswZIkSRRZEsQ8ighCDa6Nzvo7BcxfSFgHSOipjNTByy+HwlRd+5s/93I2Dzwz6Q4l8+EM/ThLV8vL8wzfTTC1pufuJTx186jOobPby2mqFEbLDgl595uzv/Pe9xwtauwQI3727eud90zgcGTPJ2sIszx9lt8Y43kfWenRUn8z8+cqm5v3vfHf3Y19s1jO/su0oAcvz+XQw7nHTsA/tYsUXRBA5UuVdsjeYpXBxdrqqmt1bO361KSNVNfdVQrNNtonPgB2MJsmt/a+dvPekbKSaPmrdyYWJHARZG27Cerr+TnKjPhwlZHYF7fXrtxIDp48enj18FGNkL0oR9bUeKnWooAeog6ga2hC9AFL0bQk+cESliXqWMmCtlFLMkaNnFzUGxsgRfGRRAhREmBSlqQHq2vYILbFKbGKQFSkU9t4BoXAACCFU4isAJhEFpKKKJUSjIbNWeY3go1cxqtBtcFGEMTBzMAyRQwwxEFJEFMURE01aRHMkQZCoQEhCdF1AHyZFnz1UFYz7ezdvmLfuXjZVtIyLy02+7NldE4Q9sJdGRFwJBQ0z6SH4uq56RT+rY+nipikH+zt+7fI8uaw2JtK+ttpBmqY6sQFimujpdGHATiRL2qgkMXrimIMHI24nST07q5GICERpHUhF0XnW3x31y8AQ2FcNI2sE1Mo1zUo8oMqSVBMLMIaw2lRGaCdVWYCzGjaMfasdt5m1ZPV5WfrIEphZxAfwXmnZG9LDh66vwZOqA01XTgMPU5P0KbOm3njUgMwGqHWusMYosL1ivomZUuNE7Q1sPZ/+J3/7fzfce3paJd/9nS/vrmd3Hi9uPPXcOMeD4wk8PJ//8q/G73zYv7Rmpz/5zAvpJ/b2+0+d/4tvjM9PNnceWlZOsPzYM6Of+3PlJNW/+Tr8v/9/sKqiC7Y/QNDtdJFa9fE/+1ObXX/4Mx/f9P/+6vX7utibufu3P/25EurB6BXz5MF69ah5cjqIZhEjfPoHr/3cv58Mkk/f+/b7v/mL7Wn51tn9HjduvlqScsJzo0yRHj7zVJaPHQTbKyTR6aZyjarLZnpRnUxPgmtNltlEr8u5aNWG4FwgRYnVlWNppJeaug7zOSsM/cIAyvl64aO/cXRDe1mU9fnaoY856d1+79qomC0Wd+8/SjAt0Oz2bErR+JCkqRdfR1YQLGKhEwWgSUUBhVogBAQyFsQLAxGnSpygI9NtlQipCjE4z4AZ+0luWqZTV/ZyMxXTOq4ZsVGJ4p7C1GBkiBJYK8ooIBDSbP77A2hRwCYqS5W1SblpkJWPKgAqxlGWQcvIAAwWSQHGOgoEUYqYIhBp07ggSKAttN4QaGEMPusV2HILXbkTAyoNAqK8h0jQYMyKRNiH2CqtAwMLE4nEMMhsw4CeCCIIE8TEWIskAMbqvSE2lQ++E9QUIKYmESJpS0JpG+8YjbLIEBgEKCh0GIwSSMlaYiLnOUk1uBh9VEIcYqoxCk52xoc74xvPPnt4fD1L8sgkANpAYrAt69XFenU5vZzPV7Uv1+Wzx0e3n7p1eH28mrpH9x7YyD2Fo/3rgOrgxm7QdP1gPzQAbK996Q//0E/8kYuTB9cn7f3vvFe4Mz1993MvXMv+2E9/98sfzNbxL/yFvz57/1/90//u79fQPvuZz2uii+HRg1/7ZzcGe/08HydycVn7Yd579uXvvXM5eWaArnRvfiXtR+VCVTXaaKM1orcGfGwMJUyEuqt2UIgRC9xI2yNp6irJBFCJq3xZtlVLRsUIkcQHjm3wESyhRAcoiUEUpYwJCE4QYkw0Nm0gZcAQkgEubQSpGlaU6yToWG1chIQFQuREK2FHzKESEuondtFGFLJWDRJbO1yumpSVVtitdBauymxfgYrRR2FAhanCgCjioiSYDgg2TYhA5ESxKhtRZEQgMUlVunUDHDUhjYq8qVcaUUiBMDiOErVRCnyRZC42wrha1Pvjvb3hSAtvqgpgGzr4yBoC2MGkgRE4Xt0Ounl+qxZ1v1Ao/j4m8HbG2zIot/7w/2m856PhbHvj72IJ28K6bRQNt4aR7hMKYvCBBweHr3zpJ7Gpf/KVm7tN7S7uPGlMPur1e5Pg9LrcFLoXn/pYlXwYzy7y4LW4XCO1zACoSEAMQKKpNxiZ/dtPLi+MUvHGCyHb591hcXikLh7g41O8f4eCixyir0aFTqCVdbtaTRurR7dvyWx2evc7e6oo5WIRWiDgxk8fPHTU6xub9jKlBFBicCZLOLpyswZfam0jYoVAo2zn2k4kMkFJBHucFbBzuTinzbpAiCAYN/tttazqiOP+rU9v2prTFEh8bEJ00blGqX5/tPv0M/7uO5ZncX6+OX3SC3M0WgtxGxRGCb4NAY24xtMwufYjXzq9/bl1QzQ/03df1/P3cDWXdYgRWCOY0Mznvvid7Hjg24I3IXn8er98YgW0C0WfBkO3O5aL0GzW83Z2mU1Q++Tm/u56MbNZalSomvL26HAEim1Sl1Gn5JJwa2ev3x+9/sb7eWIDoBFPHP/y3/hL+d6zg8FYZPl/+s/+140XLwYQoggA4VUKMUR2AEahQtSEljjGrlybPsJdyTbAKLA1GXRGBiAQFNj6j7Z4dAQRBowgxEBb7jYACFztnj/iqG+lIpVGFwgBY9AgHkECQ4AgoQ0lx00IdVOVITSublyIEiUdHyWDw95gXG6WfrUWCLPZlCHRQfl1rBzt749PV1Xrw5PLlZfY28lHhZX1+fv37lWlywZ21jZZkrjoJfo8typy3VQhNhOrKIALWNWx9I0gIzOBWAKwKIREuD/u742yxGhMbQ3aR6cHxvYSwOjXvrCD2XSjVEpgkKFp2tZx264x9/2eeu7FPlilrddamJTEYZGqwP70ZLN+wnGTxCAF2Ou7Y6vivI5vvndeKH36lbttC6GJAGRYQeR8POoP9s7O6tlJ7cjHnppuEMCsGvrc8aHjNlV505bSNmnej5JCFG6j25QmS87eu5xOV72xfmYkVVvvj29LHP7GvTteeTugw4PDJ4uq7rELLrHBRa8G5uBoxNHPp8vVak1RuxClDSKqanzZNDYfVpCJFSnbG/sHUacfPjFpftQ289JtTpcuzcSHXFEYjRMXwkyocq7Isp4ZDQ/sG3ff2j2Y3H3wYaoy6OlHi82tvYHGqpfao/3R45NNG4KEMOzbRV0N+kVQeHGxLnqTp/Z2dkhFpujXsoEspkqr87P5nUcffuzT1x5/+OatnZsS200Vbt0a2lw/uXvy1A8cLJr1o7OLzXSNUsTR+PjVT95/+/UNu8ntpxylvRHNNn5S7LYn5zf7fQ/VweHorcsz7vX6x6NQXT64O9/bH+z0G7VZk3Klb4b7B4eHyQTtyZmbXW5q54ziW4d7yrRZqvfz3klkNLoR2awbdF4b3du30u8vH69HN/rwOwBbA4d0ReUhRLgShDx3ICHZFu+JxLiNFuGVf/QqXgrblwaCCETh7hEnpK7wEjrgEINghA7e0WWNtjllvCLO81UyGQGvGrK6Zxi7kAdJ6O7i3UKchblLPSN16bYrGYGu3CUMrESkK3Ckq9at7qWzzcN1USmODEQAICSo5OGjtzare+PdZxUZbH19OjdNeOnlZ/Mb1zyAa9vj3clx8eJ09iDP8yI2I6nTOF2+97qGvO4d6Vzr2cK6BQYPElGY0LCPJGo03yz+7n/VG+PaJDyPudfsY0sMJhGbBm1IpyAYPZG2ONxxWaGMFe8VRxKG0P3fVCBkEBlDgLaGVgFqCAFUFERBRi8QQCP3lmvr1nh6kdo06LTvm2jtlBtz67YPm555ERezHM38q1+xk8Sv3i92b7jzBbZNdbHKs8SdfCeaWTw4Sm6+Ska5chnZ9xLOcqreuffcqA3abppl7+i544Nbv/HGV97+3utWIYJIDILY5fsQCKQrwgMC4MhKqU7E6foOELv3AhERM4MgEF35RQMIkiB0uUgQUld9BgDUNfkidl113bERuQqcXemGAN2mAK4GErni3W0PGDFAFPGCBiVwlBjARyURFKBWWvsALNBE5VvkoCRuvUII4FthJm4DoBgkAsYIzFqQGKXjCgVgpZGAYnSiFSJqVBEJAQl0ojQAsUgkjcBETAQJG44U2FN3/2dpQ4gxAmoE1oTAZFGlyioiRQa0jhA5cFcw7CKgShUlNrAlg6C7mDaKMIdYV+y9q11diWBnJqy1Itc2Nk2V0n5Z/eAf/9M/+if+0qA4jstAGisG0eQDJP1J/rHPGanmZ2dmUPgoNst0pkLV1hLa3ev2h/eztEi/8uXwld8eS/CJmL2dchlnmfK39+W5g+S1T5fpoe5/Aki5GHsnl4v2wh/0J5/7vP3EF+Jyjqt5BmH1vbf5/Xezo75/sJ69ebdpQz/vC0OQJivI17P5RdmUEUgv6xpijFEwS2xmDROJaTewY5JeTT9187OrVB29sveV9+/8v77xtbirKM+AgwCWm3J+dmcIJs+eMb1DZWFM+2gUgJ9eXPimTfvKjig7SLAPDC7EoDgaH0IVGgc+OjKdzS3aZBhJG03iPftWMLax9hw8o1LWinFAIspoQ0iGVABCtBQR0VhlMBAScohKIvnWubW4JrIDiBqUC1ERbvO21vhWt5XVohV7qwwAMKDnEACRkX1UMRpGCeCjBA4hqkQbI0AYUUIkjEjMTIxRQKvtTDQocm6gdu0ywGLeDpI0Fx8CrsFXVRlLTPOk2myid8BWic4BjncnvPHTCjC398qGwZcRL6tqJNBPkgEPHGMUPjjcXV7OfIgtqXXpB/mAq3rUS4n0RoWNrCCBvrUaYdN6QWpESGmTqE2IdQRtktW81o1TpAprU2uX5coo5WLrfRNAmCHLjDHST3rrxUIwQY3DXjpwAJX2rR+mAUFvyuC9GOY8S8/dpvWh9RxFNVV8+/6Ma2kEKhUbH60iFWPl/N1p20t1z8JORiwQPYKhWthXNUfbONpnHM2qdF395f/9/yo/em2x4mePhqdR2pPNNRmZi1B/8/v5i9dPf+Vr+q13VUyTnd3DH/vc/Z6ZHU8CmPT5I//o3dH+RDd6FuLNP/XT5/tPn6/nw+svqYPvq837+TAR9u3l3FTrD3/x5yd/5t8VluXFNG5O91+6Nhscm+SLl/MHGUcop0bbPrEsp0mWDI7Gyx/7dx7V15M77/5wbrPD8UMRVmAwQ2QRPj6+Hl1UWZIWfSGlgerNZjOvcp0GG2eXFWuDmUoGCQdaN02/yObrkgAVUAjgXDBAWaJyoVEvd96BCrHlIs0Gg2EEriufREhFhco9vbdT1lVT1u8slgCcmaxnkjSKeIcKLRnnQsSYawWkTZcrQY4dKBqCSCCjAQExAel6j2KiwXkVAD1jBFGojUYmYNC1C7UHRXovM4HhQVvXgiKqr2z0zqDsD4oQ9bLyZRMZZZKaGMPvk4qKIlNWc4yMkmTWOW49I+ler+dDcFXtHBAjAAopQHFRQFMQUYkCRSaxWarJatNqX7Y2ocSoSDVh4MieABEVieMgCixoR7oiQZFCkwUBjGgUkRKU6INWopQoAU3KCyALoUFUjZME0aJVhivPdesrsEbLoG815YHQx0aY29AmWgXxhB0qkhv2wUejLbStAczzTAWxvaw/HI8ODnf3dousrxLbL2zPWpVmylpCVdWxrfnsycV8dnn28NF8Pq9Xy6yXWEWqbouD/aduTGyWVcEdfuL2pw4/oxS1S0cmoVTzuirvnS3Pp2eX0xsvXy+O93OTDk77l08WO3s7tz/38pvf+G29XKZt9fmPPf3Lv/wPbx4ubt3AR2+89d/8rb/2f/y//a3X6rb+1jqfOXKLeDQcD/OzR6d3H//uT/+FP7mKk/2bN9/7V5Pv/bf/3c2eTTKqmoAI4Fswmmt2WimjCSEgkjHaGuqlDUGCEmMkYde01XrdtL6unQAGZg9YC4BRVRTFXCChQKKUMchEGlBiMACZImvsE+fryD6yVkjKsiKbWNc6Typm1tWslO4TZQTG5LEKBFSz8q33pJLEZHmqvZi6HQbQhKRVYq3DaJWgGBL2vm1Ba62UABPGymOg6XkzyDLNThCVJ2Y/7vclUWUD8warBqInZFAEta8igM0TiqCtCQ0nxu7mcn1n5+S8NNoaJSHag93dG8f7RDxbrGeLrW6K2JW5AnepMZTQFY4Ao0AHUiTolsPAW2P4lYNjGxvrpo4tqOhqBMOPPj/gR1LRVZfvFjQjwrL1ichWYBIkIWHNP/2H/+if+oN/ZLqYHl/fP0rU5nI5X9TJ/o7K016Wtk3IdychbtTebh8+Ed5/K7z7Bj9+BGUoUt1wUCAQHIIGk15MWzVc2xsvqp294bUbdZaJpfXFjDeZ3d/JcSmrRbyosYohRlDaswkmHz/30qJRdvNk6BprVNMfa9DBuxWLtUqJbOrp4c4tleXoW3DeWipdaWOgptU2VzpxZFiJA8fBJDaNQTi40vvs+DCxqjl98NSzn+ZNRbFJlxWf4ex3v8M7w+L4GhUDJpNMdBAHJvfLypycVt/+bd+IFYitg1QLYWDjtY3WpIeHKhlJq3EzE7tROtsfjldGiv0bcbjj3qWq+TplxrABHwyG1OD6/hvqNx7tNKmcrqraMaBD4ujbeZMcFrsvHu2M90427WTzYb1+xGanKo9sUTTVanFWXZxc7oj7uR//7Bu/9VsfsLor4CZH/8Ff+Y8G/fyd73/3v/0H/4RAdvu9v/o3/1O2ShKDphWX/fE/8Wf/0T/5ZxQ1A3d4EhAgYA3ETKEb2RUCslUYRXyXULgyCsGVPaFrO+q8a90pIgTFsAXyAlN3wAQids05yAIgomBrJtrSLKCDqgIAEIHCmOQ21rFpYxM2pDq5bgk05iBdHltRQpYAMRmOd66/UGLz5OKcVnOoqrJaB5F+P6lWZ8+/eOt0sXm4XBaZHD17+Oh8bUgf7Re+XZ49OfOhwcxOl2UE0phoK71xejZdVR64Zi3ae4ltDIwlaJv0FArGMioBwslkXLtonNJBgVhQOuulAELaYs9UKooBnvS+++HDVTsn1U801E1jdZKlSSNy4+mDbFfXjdPC3gdNbHsZYeLEmsQe9MbHz+j5A/8oTOtpqDlEHVuDAcQ3wizOh0Gue0PLTo72By3Q5vHi+RuHO5//zDdf/90HD89r1R8M83RgWgk+BoibW3vDerbKGdrFahNgb3fv7uOz5Unks/Uem6Qqfmj0qS+/8/X37jV1aNnrmoPi7M47FzvPXRs+c/DG117HVVXsJnRjZPJCYhxNTH0aJkmKOkxP/e5eMQxKZ5bR3tssk5j9wPFzi8dnVb1JBBazuY6Mhl5++ZNPHl2khdKNPHV798179+uoBvuDQZLNm+ZyuvFaZao4GI3HB4PLpix6mYo46h9UDTy8uCRjhFkZzntQWLtzsHPxZGZ0T9TA5vt1NdfC1w8OdWIe3ZtCqs8Wm7TYu7zUo51dIl6u/NnFZraS27eGh7efOl04xnygEH310qevn5ysPnzvrUGSZ7h7fPOpt377e0Wqelk2O1mXjy+fvTaalzVMFxrwcrk8bernDsY7RwemFxpYZcYd7vb2Dm+8/s7s/BLdpe8P+vtPZane9DO4eHC+NxpFprv35qt1m+m0P+z1RjWBZ5HFuiwrH+v20d22ewpCjNt4GHd18+w5RmDewodEJHahXoGu5ZBZoiBxFEQhQJZIhAAkzNt2TBGQeNVkiIEFriysXX+mIowcscPicSdP4dZt0uXUuvfLFbG6oxZtO9qEr7BKnWi8DZ11oJsumSpbiwoAbSVj7vD2LCKiOoo1b22wW+1pqz2oGAUNLNbnd+5++xbq24eHQ+tLqfYP90HX3i9ihMRmmclbacaHh4VN88uH8b3Xq7sf5BLrvC97Y+/ArBbEjfINKMWkIBIpiqtWIU0EZdUwOUwtcvRkGMlmOgA775ByUwwRcjC5DEZRpYBEVAgyC6MPwhHFgdYQPJAHQDAW2AMHCAy+haYErTuOjgqcrAU3a1Qb17SKAuSpuTjBZuOGJg4L6B9anSUPZyaCzW5sFuPik59dffjN+OTtpG1kUUmz1uf3ClfBeB9tUjdFuZ7T/bf9nXcbTdjfKYw9Hk+i6cUf+NGqbh6/9w576hzGcbt16gYG4a51QABFqPOTIaGgsKDaThZEREIiCECRGUUhgiDjlcmsw6N3J6TD+HSOUkLpzhlf1WRidwS3AiHB7xlWAeT3cjcAwF4YOTKjVggImlk8SvSiCQJqQMliRNd4HzkEH2ofXAgulHVsGxDGoG0UKkgpUBBEoRakiDEiCBAoCByARBHUHJAQjUJUFsGQIjHAAEhBJG6zeAIKtbIQLQsriIw6Nq67FaMYS4pEFIAhsFpZTagxMDKC48hIVhNARACjUw0aFGlFHHxQXoLyTaydX62c94iAEmNI0FpNpMlQ6eJn/uCf/MJP/VxaXGuamCNaYVAoAIFEae28B487h9eUomZTg1Ga0qZq7Sh3bWDM9ac/ZV97YbGf4ze+yWcLpVv33PXJn/yftU/d1pO+1STOUF0NpMT1ic79/muvxuEg+ewXY3+cibCwcMg/9rnxh29uHp9IfnZ4dGwCxE0zf/KYy+V4bzegX1T14XgyPV+FVPDaYHzzmYffPXcxBO+ySdI/HA/G/dXiskBtvG7eOHmF9j/VFI+a9smaS4rSR0F7Mi158cH+UdPbaxIzkH6SU7ZnbphJ2pRTnYLKFaQUQJjFkomNa+qNr4NI4gMi5VqsVgY9KuNjFWN0sWka17bsBMGz0jZ4tFYxoQXUhBhBERKBGKQQHSAo0qFtxDcxVK5ZxbAJ3qNCUToCEnQ91x3nSmHU1BAyJGiQGUELJxZN45wiVKLYB4jQNF4x+UhBjDLKasAomhSACCtBctHHCL7ZYq0vLy6v7++Px2Mpq9LHfmZ7E/P+h5chxMWTzSQbsnZtE5qA/X5aVQstcC0ZtOtyNauwbI56WZvpdRlUkMQmzjmDahkdJfoi1JSoxvlV66IGbYteYbhpGxIPsQlNmuSxly0X3thMUCKodWhd8JpMkhcSDTQrXzVNjJCbzKaFSUARIFvU0DiOcrIud3IBBpNh55N10Smts6Qd9DIVRFjt7Ax4VS1CXLm2CVw7rj2rxIDGRQPPHh6//ejRwgHEYIiYmUCAaMPROag9CoZCY5aauqy1TSyqiQvHy9XHC5sfXr/93Bfq4oYKs7f/zS/F1z8Ysx739eru+fr8q+nbh/ZxmbihFDZmasbkNJtBkhzupPDM6hf/pdn4RBfjTNW/+Rtj0GE3+slAfuDj6uK+q1hHRzHYIhsCDe48Snrj9itfm//8vzW93f2/+YN3Xn6eZ4fq678WHr1nJzf0ZO/o2rK5fNJsLrP+zaShp2jn4a/8QtnMy4PbeNwTroobN4rEqsg6+iK3GJEBYhCjIVVMudGmdxIbm8DB7R3McPZwgTNdV3VqgyZNDSdW7RRDQzpTql5uKteiIkyLcS/NsuJitmJBHfwo0b1xcThMN+vVvG3Yg0bqp7ZA6WGwGhVEJQpAlAAgGAjEmCniGFBrFAC0LA2SQYQoLZEmUCipSIhcaW2ja52YQKKVjSQ+BEblA5e1X7exKpcR5fquJaXP56GuqiZqm5iqWnv2pBJUOrEqALvW/z6piLqv07eI1FYBmRWwTaxGjhzKTRu81kSJJs/MQRQREgkIaahctTMpro0TbZP5ZVkyaQvWKDCwWJSRNBORCBJ6dokli6opvbRYN2IzyHMTRVzgfqICo/McOuU/V6km7cQKJgYUgAFlVGeoFqWRFLjgWx90TpNkkKARcWQMoziIbLrMs2RZIsARJbekxKRJb3f3cFJM9o+Oh5MdpawKLSEE8ezdbDXfOF6uy/l8eXExXSw2dVkKCgQxSAObTLKEvBzcmNw82r3/7gNRpo0m3RkKGywXF+/fX1Lv4OMfz0Pyb//7v0smpNd3qw/D+sG3PqHy+b12WNoH//rrZ9/+F4dH2XPXjl/4Uvbew289c+PpT/7Ia6/2q9fq73z7O2+/9x/+h7Ja3q7O/tzP/mxjnvn5d9+5/9Z3h7X35xUN8o996WcuVuHTX/yRp8v4zr/6+fH1YW/36MO3H/QuT42yNsu8jwYkBVM3rRnlHmLQpJMUGHzbbpij94uLRbUqQdC5KESRoJUYRVoGLWAU5dawiAZUACicG6M1J4lSiP0QmOi0jJAqATAiJkXHjEq1LgJgokghQuTlUrTTRgsaIUO5TjKNfY3V1NWXbUEogbNEc4iMwQiwc533ywu3KFojgSAxtSKWQmTSqp9owtiKzzPSFtuGq1Y8hLzI3NIpwiiKgZiVULqu2LlEJcUnX/nYW299d1WzMd6i2hnluzv9GNvlenE6v2TeGk2jSFdVzNhN/xClM2aLQuwuAN1U1jW7dkALgqvuKdlKQNspDOHKC75VkegKYgRbTQlZtlcI3v4jQxcX2DKMUCI/Pd79qVc+Hz98q0frZv3gLCNVHOQ7R/n4AAtD3GpNPniNNsYQfbCafV2lqIoEnXMtMkbuIwCrtlWyfwQ3Xkhfej4Z7zA7iU6TPdjfof0hxHb1br753jeyIvSKnOtYMg5/4LP01FO2P8zKTXt+ml8/btcrOzrK8uz08twL7t88XF6u84PMscP5pU5Sdm3jQqJUau3sve+X5To/PoDrz4MyWmiLRoiCjFyFzXodCcOgXw0G5NjduxuXUcDF0nBzuTi/h+momOxCkeS2BabLb7zR3n83C6KEle21UTc6M/2sobz/zPPNeHeNmPWO5u/el8v7Qzvlxw9C9cuGVBXKuFr6R+/1PGyC7xtjJBQWbGYTBH+yiNFELxtPLUAUThKiyaS6ftR/+fNgB/rhSXVxt95U/YMbq8WM92+KzVDEVCt98W7v8p0fHLWfnqT/5m75QVOvTu+T7I0G9s//+R/98td+92d/+mf7B+NF3a4Xi2TQx5o+8akf/6f//Je4cijBUBdjZI1kcIsp8SwMrJEUgVbAAcI229IZzT5Ci3QJmO1SGD9Cm0Qh2noJVBdhQ/IMuqtzgi3apKtLUvBRjA0AoG3buqlFYlO3kaMSw+KRwJKtpWGpAdpES4yB2ffyPbT9+eJe1a7JN0kCdVspq0lZlZq93X2vYzE0ZZDRcX/Y07PV2jCfnzwRdkJKiJrgRgNLiEUiJHHjpRJoHOfahOCFJU3Not6kqbbkow8E3NZNajIIIRW2LPujfH/SZ1PH6NJE60LrSaaMb6kN0KzrVQjQL5SiSEYLs3cuHyXJOGnFkYGm9aSFY6QIuabAgIJaaWVx51aW9I/WJ222pERAKrV/uPf4wePp0nlgnSc+Qmb5mZsHAnK5XD14c6mU/cIXPm30Gx+uXJHT4X6eKBqNDmbz9Rvnswd3753O69qzQ0oQnh0l1ITUB6PUal3/w1//cqYoG2Z+3Vw/6OWjJDTsVovLd+Od33lzdjbVOl57ZS/b0Yu539kbJvuHMM7kcvbhOw9Jeq/efvbJ3RMfQlDN4bCX2369LDXEYYaoCglMmlqAxXp6ur44SIfGJI8eXBgPk9RI0+wMi9nZxehwJxtce3g2HxTpvdPTp28O5/Pm9HQxwGtsUjapFrTkD4vsqMhmq3qxmV872K9ndOF5WW04eBvUaCd7dHJS+pZsce2Za9//7punF/eOd9P1ajEp8mXNvbw3m5V3P1xSUhzs6MTpNjK2ZRH9XtQaNaD2Z6u+0gQwqxc3d4cT2NW23eFcqta1m2vjLD3oX1xcWivH+7mG8N5FffnYrc4XvlYcQWk7UnyUhaxXVL7pDYfrMj56PG/JpL10c9msqs3TtwrXVq7h2WoDJloUK8lWKuKICMwgHIWFkYNEQelEHIaPMl9dUVh3T6ePOHVdJ9P2fQAiwgpJOGIHqWGkTgJCZN4yqIUYFCkE2maQtsmiK5MHCkDsXhSdAVZ1bsDOrbRllm0Tcyxb9xEy0hUFm7YEsw5d3OkC0uGOu28RUV3hjbY0HOauOZEZOIoA1C7cefh2kQ+va5VMZ2PRO6PJSqtms1HKIKsQwTchSfKClbvz0Nx9xKcVj48DA1Zntqrp/ILagFHzVWNo8AEJAQUIEa0i4iACJCQSPIIFSlTWZzOg3iA4i6xISKHSxooLACRAYgxoYvbga1IedADlQQIEB8FBZsW3kBYcSmgqdB4dEzAIg0QbA6AD7zMCKNdqkODhQd0bx6Nhu7Mr/bTIc9XYRabi/g4skma1tKxxUcGqbOtv0dGhun7g6oTKoBelbpW7mKcB7f5ub6RjE6vGPrNz89Tc875CJoBIIACqy3sBYtzujJA7tYhQWIAQSSEiXcmJiBRZCCFud06d+auTnGirpXCnJHb9B6oLFW4PEHQaJUDXv4DYxdKkc65tTyoLAH2kFsXuVaK44egDpkHaUgQMJm0UUVGYQyQfVEQMjfObJrqG2zZsGq5DjCQaQBNrIkAtqJCCREBBhSKRIjIEIEASrQk0gEJRSIJdkWiHLo8oQoKiCBiRQBAIkZlQocLMEgfxIsiMni2SFiJRKJojWm21BgFGcE6Y2SEQihJFoCwgEmlSKBgAowi1jbgAbRuVgCYILROpJKGqgYObz33+j/1Fp/YenZeHGFJXnn/zNy6WF4ef/II5url2LG2bFIogxqYFRa2TuFnadq00pgB+wbycLwGaL/5E/qnX4pf/GcxOxs9kzZhxlK3KdU/ZIgPdnsWT91ZP3lM7z2SvvWh291ujKLTlvDRFCqGmzdSdnyUQoYflajroF/agOHj+BbJkkr7j4Ot2Z//p9PU7k4/try0Vk1vH16fn//IXD25Oxq+O711sqgEW1453svH88gIh2Rva/81f/uO/9va3/+Gvf68hK2IDCqZ6OqunqzvjRTuZHOrxjk1S1c97uGN74HmZFiYQI4OP1LQhNOIbr8lo1tpkQDl1BY2hiqF1dRs2lW/DetNEQFBANsEeiBIyyhjLwIFZFAF2tFUgwMASA7OrxVXg1hxKpLglZEGqdBI7DyYzAmIkaEOitRVUCsggoVKQgfgcFPiWIkaGGIUYWxZm5VlHVD5iguSd8yCKJIgLIQAoRNU9BJkh19Spsv2oyxXXHMpVWeRKnAZKQ5Aste0qKGtacL2Rqqfuex+8P7bjINisvBmRAFD07IF1slqts14+ANowb8q6AGWNgdAGVLM6egmjxM7LVaF6KSsjKrDOi8lmuXKKHXBQiTbpfp71i3R6OWWqN20jCn3dtiEWmY3MmOConwGAC7gOnIqNATHTe/uT6uwJkmp9VKB0DD5EZEisWpUtkCrLjTbAbSQNPkQVQUV1+/rew/lpVQYAVAhKK4qARFGEGeelIxJWGD0o0lneG278i4uLH789Pjq69upf/etPxrsXDy8OM1m9/uZgLa6W2eXKaJ1YlAdnRX5kMrsBvzvsz7/1naq+2P/Cs8Vzz5W/dTdfZlm2Q72+XU3td99q7943O0Kf/AOi9ynfD/VMvFcJiKVcQvN3/s8MYfXOu70qswDrX/315HM/4gb9MLqlFk5le7C8C9kI8/Vxv7f4/vczuFv9+j8+e/jwcjyO+X5IqlTpRCTUTfTeJoYJlMJAsqqXsZybamk1zheL/lgn+/nSuNo7zr1f+5pbSogUWMS9YW+YJUZlhHR0uFc1TZbk905P562breuN87uFvTnsDdNktlzWvpnXmyTLQIsSzFOt25Y8cIhoFCQYOSCCNQbQRYxV5zkOTiNpiCoGpdHaVJARokgIHEWAKCNQCTlh46Nz4h1ICIGVoEDPmgpiBKnbFjmMMjoakZr0LuZN5ZyLzrFQcARsxGw4DofF75OK0kxLjKml1gWtGFISjHlmgLgJ0oAICUPUKKixCpU2SVZkEEUn0OuZvTSOEyx9ixpNSkVmm6bBSInNtXDtom+DUTpTKUSKEJIk8ZuovcYohigxmCDGihofvTPAnCSQ2IjoDXIGygcvwtooF4MCYPZ5ZpXizSZg1PNFJQ56Jsvz7PTyQpsksB/k1mjT1q1EMGkKNhmM94/3bu4f3NoZ7w1HI26D1tHXPoS4WM0f3L8zny9ni9Wyrpu6VVpIIZLJLOVZfrC7c3h8bW//qD9IXYRhLotLf7x73Czwep5fvv/moy9/43BU7CWD6/u9HVMEuPzJF59dHRzs/MSL1U5xuX74z//Ef/AFu9MrMjCu2Lt1sdKnF2/+wZ/6Q/DK7fMyuxPx1s1Xjv23y7Wib16OB4Mfefmzqy9/9eDZZ/64HH/5vQ93+0Z6zz78zbOH3/i74Sd+9I/+mZ/4oS+9zL+ZTgbXbvz0f6z/ZPPl/+ff/t7rr1chYKqO9/LBJIMN1hWi0Za0SgtgVa9dC02IbVVWZVmJCBNE5gCRiRuWBjFVqkGFAgaJJRZWAwRNgQgFMNFq3+Qkvg1tFSkQpgrEh36alD5671WmdaS2igJKopZAoqPSxipKlQHnNrNVsxKLqt+z2mit9aKtoiBoFVBIAXjopWmm0LeOkE1ifM2obFVHIgITWHnna4npquKKvWc2hKSCSoCEE6uB1WrdBkWBFWJCor79xvfKqo5g6kVze3/y9LPXdZKC1qeL5UW5VsZu56IoWwQponQpfAFhJurgHNT12CvVyaQQu4bZbhbvIvkszLy9RsDVBV4AukZzkau6V+RtggEFIgh2YzMiIBEKKeKu7yQF9VKvr2eLclX1lbN1uawvekftKlmPyBTJJGA0qQIfNRjDJKvl4nvfGkyXVDnWpI/3Ry88EyqI79+Bsg22GH7hh+iFZ2KarJpNXmRap9Q5nJTGJLEvPHfzKFn+6r+lJfiYqdsfq59/Afq5b2tUnB8eqiyLy6WJcTAZcCoiBjSaWzfsbs7LDTYeSUNbt22JkIgPNF3h2bmfnsn9x5NnXkiv3QzaRqeij4CkNQ5v3ASEpJ/MF81kUA5W5fTi/Z1bO+7lm+3Qbk5nAxdW3/5aEkqsK5ZwEBLNmlnTuN+MJzuvfgomuU4KWTQwMHrQc2UTbTr87MvRPRWbTf3hm2Z9QecfmvosLMoxKfbNwBC7jVYQA5RLCaZQ3mmss9Q4rc21G84Wppfj8TPZsy/R/i3l1VCdLH/3V9z5e9MnH0IyGrUfl37BYnn6eP3O97hc5h7bsroZVMv6nd/99ms/9WPF3u6Lezu3X/qECly5DQrleUGoAjqb4B/7yZ/8hZ//hUxZgUCI1PXMIyBARPBAPnJG3dkDqwQisGDcwmvhCkFLvxd+vNpQM6LSHS4VNQIxEkgQFAQP3HUObwtvEKiLtNFHhBNom3Z+vtzZGyMLMVtKAmCMorXVwbfRSXQahVALKAq+9WftakMgCL6MHlCpLLPW6iwBNPW63duZ3BoNDq4Nvv8731bL1qSmbtqssFTYVbUe5ukkt3Vdt75yiKentRetGX2IjWsTnaCARW1RV9UGWjbGWpMD4ma92Sn6e4P8+s6YVcj6qVjWmaUUSYtR3PoSyypxWmfGpmy1IOBi3ihrTQ7r8gJR+kWi0iYtIE040UFJGdBaMolBYcjG2c7YLHblzf/xjVvZtb1s3CzWiEZpg6gfTddaq2uD3unZJYqUmzqfjCCz/dx87rOvvf3rX0tCWD5sP7hYrFp6f7p6XLoQFWlLSqcJCQXQyoxw6LQE2kSYVpymmFOEvmKJkyydVauzi4W+aNCBzEK+l9gy3rw+uSg31dmcWwaz09Tl8fj6xenmzrsfEktaGAT36qsvvfGd95ZNe/No5Dcr0mDz7OLsUgXnGK7fOGhWgSPtDoepS96/92i3n6+XM4Wxb2gBZEaTR/OLYZ65Ks1SvHUt3RsUb96bhQAJAcbmeOdGWUN/eP3G8TXerBZhvqjD7eERh/6H33/44d2Tg2vXlIFBWnhWi/MngyJHMWkxMon9xCvHDx6fDIsdX66ySMfPXz+d3gPsv/nO8vbRUaTSpoVvZXTUb9zu5cXlzsg8czv52r3vZ7sTjVhHX/TTsioLn0Lavz+7aFx9dNBLBgOSZFlHoQRC7O9PfvDTT82npx88fOyQsyQzSUx6pvEyHGfhYhPKsHjELuhimERww1G2t5fdu7OtOHAcELpCA4jCzByRGa5wYAixS5txRxnrXEUQr+63IqK2HwW4K7CPAIJdQFS6MrUtTHobPxXAyKCIowChIDCAiNDW+wdbc5EIqK60DKgzoHQpIhFEZBYE4quWRQQgQkYg2jL0kOgjyLZsfzfcNTMCd9KxCAsqko8WGkBaIIqAKDbJyXp58PDdasPm0XkPeyIrGI37N64H8jFU1XJtUeuV89NT+t03shBBj0MZd2jlFytqPVURRYGyIhGRo4ixSkR86xURGSXBB6toMgST6TYKEQzGMtqJjuoQMSuSZBjQMBvs0t+kgJQIiFJoLNhUYlCRwbUQPTQ1aA+kQSWcZBgygDVRDXUFwBAaEAGOAAIsIBGaYKpWaixu2mZS8t6k6veDyVbOXaxmg2Zt6g3O1gnliAAavUyJYnaY57Lydx/Uj55QVSXB+CdzTh/DqN8f7u5C/tpTO6U89zuvv603wQfhGDrjJyAA4/bnW9eoCHYDB2i8AgsBICnobBQgEVBEoUTuQmpdgwZvk+/bYUQAEBg6yhVuc2tdTX1XuLZdSW156tA17m0Tztt3gW+iZWFmQCYgdKGbcyKSBKgxxFAiKRZwjoN3oWnWdbuYl7723IpEjIZEibdMKEAkxIiokWJkZGECQoLAqHArd0USAVAcQgsKAECJQiAJgQChG662AX+KAsBBI6DVHKMmTQCklCKtdIKoNGmFmUgIwIgk3EIMBAGxFG5QM4KFEEhAS+Dok9SSa6lBIlGRkiSJUru2HCp4Znw4kUH6YOG8jZvV7M7bv/vP/s3Zg3tTqF/702c/9h//lTJK/7Bo59P64pEi0nvXOLbD4Fa/82sn979z69Of7eWHq3e/hXuj7LlX/fjT5ukd+PLfB/eg/eB/sDePqber2Ck3w/ZxFi/09WOZ3KyzgagY2rlqbNojTEN9537ywTv53Xu9m0/Phlmx9xyvN+uLc4OZTUaSYChlnKfpZnoQ1uvfeJCPD6G34UfzcUWZy6zsZod9tzPM94ft5ZxDCiqxO73TdfWp1z6emv1f+8abMxW+++Ck/9TQYEYAcHJyevd8cnSU37quTaFzk2WjxmmFLpbzFDFUtXNOxPrQ6vyA1LBIxlEwuAZ9Feq1+KptW1+2dSl1K0CKFSpEdJjmqbGFURkxKRESUOJj2xL72NbCXseAMXBbim+FHShkVMhMVgsqVFahYQ6EAjFqNL4M1qggqBPQhiA2ljoNFYTBx+hjIEXAwYsIo/cgoBoOKYEi5BBJxKDyXj5aIS9XzXoZz7E5GPf8qn0yn9sijYK2rx1ZPw+Tie3rpiXczJdaRpeXC0qHoVz7EFiZpo5t23of101FEYskjz5QWQ/TBAn7CfWLBJ/4ZevXq4CAg0nm2lCkij0GwLCossGOpypPir7So8nQh1Lacr26cPXGhbCBkKaFgBKFCLFpWokq8bJrtU3sunTcQMuOvFqFRRKVZiKkQZ7XTbBZlihVtaFmXvg2TRQylugiokVC4Beev44hbqrGovICASQEtEjRRSDsFyk3tbE6TUxGwCRYb571+Bc//+pTL96yr3z2rk8b5vHtQ3DT3Z0BPTrLJMnylJDa1dJqDI4gKRIitbLFrDFB63/zXvwXbw43JrP7jkNsgPefyl57oZ6u8l2oQpH0MjzcMzYh10Ban/fs8A/9xObN0/4Hp2U+V2atdVp//dv5g7PbP/hKVUrtI67O4sXl+nJhVK5Dir/4a8VgyI8XLuZ10WttuH3Qw0en6JVWhiETZhedEEYspV7y8qKart9+Uj+erj/+Y5/kpG1Iom/y3V5sYhNkM9+0wRula+ejq4Z9Xce4LGtqnFLrGCS2LlN4a294/XhQXa7vPX5cOQwEzuSBUIHv29QFtgCIhpC0NkTKUkCQRFlRxChOgogm1F6iZ85RI5CPYBQawihBuq0DRIRgUAK0CskJW2XbWLchGJ32svzudF0qZsCqjm27BlT7/RRAWokhRAK0WkY961xwwnX7+wNojGizpG0iksoyu2jKJNGGxDWhrDxEsqQZIhoUCkWqlQaNvpeAC76f2dTY2aJdtaEFRUguxkDEgGJSA9FXlWbLgkFc1tcQWZFoBHTQOlhHjshJbjzHRdtkCfYLIxgDSQxRAC1JKwDeC5E1xoBiUAysE4Ut+xA4wKp1Ppg+pv3hQcQa2zVHT9GnJAAqw97e0bO3b7+4198hVBziyaP75WpVVtOL88vL6eW6qYNzAqiNisi9YVIoXRgzHo+LvHewdzDMd0aHR43kLbcGyT2+DHcufdbOP3h3unzYz/pc+qqfPf38c4NK/dZ//n/YmwR9dnJ3Mf3C5/963HlpPchv/eCL7e88onmd7eqP//jnl7dutO8/mp5WG2jqzfz0yfJ6MtZnsxtDWG9qvqjiBbG7tHnszeBFXGReN8nO5trh/jO32xs7v/l3/x/luyfPWFQXj/G3fnVT+0999kde+Y/+Jj730i/8wi+890v/30cXD75wa39Q42JdAzNGD4DR+6quXPS+9cokrY8N+Dr4tY8VS0CJGoTAh6iZIyoCbCUmBkhjkiSMyiaJSRRwiL5eQFw539atynVhTACsooiIEIHBpo2UGLSitGoZc5OqEEIbuQVtjc20zjQa49sYXUQFqbECPkuon2ezzZrI1Chz1yjSZFUbBSMQcWg4y3GUDX2FdZAY5XjUs44vFk2MRIkuW7Yodd2yti7ERIFkYd62lYt5pns2n4x3rc4yS7Nq8eDsURP9zYPh1SaZgXHLlYidPUO269WPemq7HS0Cda+fbti7GqvlijDZrZK7ueuqWmqbW+hYAHJ1t79aBiOAECIBMQDEyFopAwOrbhTQPHr9mWtPt4vLPWtmlZ/f+4B2rklzq9xopUEg0cwUgzt5zO99l57MpIoQDR3sDj75Kbp9u52u/NlD793w9q3Rsy9PodHgM6sNMIIYpTURETlubdJPh7t2r090Ue0VyRd/4BQ8CieZBaVEVAAincbNRf3+g+TkyersnCaDnU/+6OWqxVbyTLs6gCKTFdi4UM0T5SGLXLVxtua2hGqK+zf15LCsVlnWJ8G0N25jvHSkR4ccTqIYZOebKq5KydPJaM9M6yzbHZa2XTW6nwZrgtEq6V2A2/3hLzVHT2OeRgmaTuLqrJ5PR5Nr1pXcLs7OZ200xx//Yjt9khuN8wgVayZMZfIHPkbZsH79qzKtDWX13rMMF+3ZIwI/emFPfvwPV2o0yfP1HJUZ1GUIwU8Ojp/5wh94++ff8Kczyyt370mRq3Xls9CHajXJe5vl3AS+NrQ1udPZ4+mHd/cObkTUVYhZouvZyiaZAZSqrtcrbpfP3ep3qqEiJBDcgkKQhRkxMoCAB9RbRxtEFBbu6Olw5V3AK4vaR7jaLkZGgtT10yIRYWToepA6EFf3QyECC23VS+ErcTOEIBqms6XJbJqkniNCqq0pq4XGYBJ2GGOM603lmWKsIwEqBB/IKKV069GoRCuIYUNslYq9gp3bPH64GUzGEFRo2slgcny8t6w9BBWEVxFKJ1UZCRW3RAhGS+ObDK0iXTufF0Vm8sZvWDOSTpKkbqusGHHkGANo1qmmXHHCybCHilhh5Nb0kuXJym0w0Ro1rn3jA3BO+8fjJK+CCvkwI5JBb2cwGmu/IW8BKSsmIdQEFCUlpbllKsLTnzh89I3lPG76hdKZfupg7/GdD7qIUAy+9imCQGaNwrZq33nz4R/5qZ/+Xz7/0j/+v/43J+yWo960hYsqWNR7Q50ZZBd0Srs969vYVuFgMtFK+bYOlIx2i3UZKDeDvd13708vL8u0sJ+89dzXv/FdRco5fPDBIjUW07CpnvTz21m2X0x2vZnPT06TvL9eNY7DOB09/OAcSgroZ2ezzMBy462Txbpym8ZkWZ5ACLwug+r3yvnycDQmhYu62b22S0bV8+bR5ebaLuz1sycnU4U0zvPzs+XAZL5ZJz3cS/urGnaG+2k29Gu3kxcyoaWupfXVatW6y54duQBz147G+L3Xv5lYlVp6+sbx6nK2WKxKCcfXj/Z39i/Wbze1e/ve4/5uYTA52Jm0Pl7Ozm/3jnWenJ5svCPvtGraxw+nx8fH5Wbjq5Dk2c5ucT0d65weX1bXe5kkJGRJyeJs1bgwnhxUjVycTX/1a/XOTr+4/uLL13d++1//G1PkptA9EdvD2zuj5XTja3dRtpwmw93DNqyX8/O0Z67eBaET+ZmZudOGWBR1bVLcmU+7FwJL140GCB3iTkCYCAUJibvAFwB3+GnoRKPt7R6v6g26YlAGIRZE6hjYUUR1mWgC7kh2IleN6QIihCQoSNv4DaBg98W3NhRA6oa7rhprm4zeonIAOrYSAbB0TsNtRQMSdqk7VMiRsWvKIi2Iax8b9NXsESz18DLksQ7zE310nEQdkU2e7vWLalPbat5+57vDWQ0tgBKuV5rIhiiIgFYEIUREiJG3aW+lTJGIjwAQ86Tt59Afa6vpYu5XpconEgOSQqNAkYsB8yGTidEhBzAGjAWOnUInCtFYZiGbU2gwzaHLe3mH0YGrJYAgYoziSuTA3iEIRI+EAAyggAVXG/7gA4TQxHKNhzPWO4djy7G9vGea2pKGIAAqNG1sPDF7frspW3O2jmczUCYgoqawmbkn77brx4PD6wNYf+rFj71/2Uwf3ZHV2mhkCFGiIo0oLAhAHQPxqs+rA0QxAXYoQ9ii5xCp86d1whF3CGdh3lqUZdvGCggsLARIaitEXdmeryLwW6z1loqOdHU37rjrAAASgQMyIHQ5uOC1UjGGGDkGioLOi9LIAm3rncimrC5mm8XcaSTl2QKyh8BdvbBW2OUemYQ0QAf3NooIOlijhBBBoQgRQURACYzoomfSIBiYCUgAFBIgcQfmQy2KGLWxpDiC990Q18aQWhUBFAsABREhrYmJgsRAGJUC8RVpZtAd5F1pjUZpbYAarTDhqHyTj/N/58/86Y9d/wx99Z3qvbeWf/u/jJnenE9vZ337wcXtg2vPffLG5/7oH6o2q6qMqenbZt4+eX95cXH4h3/GL8rFL/6r9Td+uZ5eqKNnwmA1O/36TvFKrl/yHpy6Zj/xY+H1f9RX9/D0t1Z8lKS+PXnf1ov68ondu6HSxk2xjbPR9Wvr8xoHRTrM+3fef/yPf7n94GznqVvVbjb8gU9tYobDo9l0E8XnWcjOp/XZ41LZYZ71rZo+uXv8A8VqsaE9WsiCzzl/elKvTxbNVPuAvf7w+r7Pe6bf12v4mefULZg8mD78X3zuSyfzM0qHv/wrv6TH3kEamkcbH8Ktm1mee8lpMNqsZ62wdZtYeYVRJyrXB9YcWdPTaADYh9pBcNG1m7pp3GpeMSdRFDvRmVagE5VYMBSUFkVgFCKHKro2tnXtSwylBQ/cBuckONKI4gEMdcq6MAor1CgKkRRRFNaRMagYICKyC2CwsJmGaFAwMiBFNkGEETUprTWgiqx8ABFwgJm1ELtOQkTmGK4wXqlKbN6u67Kqnj44fO2V57/33r27i3lwelHxRCWXD1fZToBED7BHDie7o82qSkjs0A7zwQePHx4NDmeXM+BYupCk1hjTU1j7OFuWmyK7cG1hVQEgMVqVtEEm2XF0bb1eHe2Od0fjZVs9//S1zWpJotjX3jfIDKLH433M2syiq33lfN3UyhplEqdosWlQ0biH++NRTxezct5C22qltU1Cy0DsmIDSlIjD6WwdEBMLe/3sYPfgt9+85xonzAAM4CJXClQbBAEskQAqUgA8HuVt2wy0ji3r6HsWWfOPfekLP/vDX3yuly5fv7OEa3V6A3VaXi5n9962kKSTEUyjkKp9MxwP0dW+lVjVg2ywWZyneWr1GKpaxSAu1K3ThYqhlk27+E6ld65hlsD547h4S5cN5xP78nOXcL7s5/NFfTTMYULDl59tz2f1wuu+7vlq8+Uvb6qoegOxioMLWd42l71ED03blgs1CbCJdeLq9iTywKTkQyyKXkJFExuhsKrXCspc2rRvv/725QfLEIgezRd64CTTGJq63piI1BKXgVJTFEkMYRNgvdo4YlXFzMdE68GouL6/d3567qvqjbfm0cfMmkGiz1flrAkAUAgM0qQwBCUHdrk1Eb2HoIkKmxPp2L23hTQaiIFFELVHaWK0jFqhcFBAERImEY4oEsV7jqRtcL4MNSMkiW592LRe5zo3XLoACm1iI+N00dakKTUGGAU1YQg+MWrQz8q6/X1SURSIqGMMijFGMMZYo7RC5yKXbMQoocjSBrGpTTQoDQjsnNfaxDpWAlWkTRV0qrNeEoiAbIgcfA0cKCrvowe0qUqBQKQpG/ZoUTnBeROjklRxqmWQUmoly4QjCGHJKjJXPkZWidYbiX2kFL0gC+jGC5Bkha5rrja15HaQmMzmCJoxrB23gDZPXnr2k08//SKgruv67Mn9i/OzdbVeLi4DuxjbIOwDo1WsRUVIQbLMHF/fOxrt7uzupcPxqvZH/UEvP8yK5M3ffuPRhx8sX//+AfBu4FVocyOafUNpo3vNpvjw0W/Q7UHvtetVtjO/82hXD/7lz/3fD3pGQ3wuxHpDPEr+wM/93J137hyWXu5cDPr97JWn7z9+3508bt95XL1zL1gVvIjH766a7BPPv/7em/FhpVqCOawniz/4D/8vm31qiqL/yeM3//3/raxLLOuDxa8ePnPzyeO3jj4zMeOX/vP/7K8s/8bP/oO/+3de/5f/dFTzXr/niP2mZPYh+qZqg3ATJLLUIVQe1g4qwGUEbZQgA/vUWNCqJYwErQgLGTRapaQIKdFIhdVDkuBrr5EpBTKuiXUNzGkdGMAjYjY0YmBUpNhGJk0hbsrat9GKUuj7vSwd5XPXAJHUIBFi41Md+5oGGnVmIqromjxFLQJKISezVTMeZyG2pLVN9HTjyzpqMaK1IPg6tl5AKReCuJa0IWMBQ5b4xLSbSvK00ACj/nD34GZRFDG4u+9/2FSVsDs9u9ju0DyjEG9JpN24vY2ZCQBt4cPbYJnqCKPcMYkobP1IW+5Q5yf/vQRBJzoJw0dEI8CPfrItMUZSQJoEEfbGfTvuGw15vaL1GdxvN26Rmyakti2XsSIc5vnAihJjKI0M60VcPHGvfz3e+bBHKmaQ3Xhq/3M/uAiyePuD8PCDfjkbDEfL5WX75D3o5dizyWAcSx+lUTYVQwFBENtlvbmc6rN1fHJBOwo39XA49qrVKnIIwbOrNhBBMUrjofG4dFLNffjtF1/72OP7d2IdJO/nT19rFWNVuvlpNZ9F14r4JFO+WoXZw2KUeTca2JybpbQtrFeqbXeLwabdPPrmb1+/mCUh9/cXcvrt4f7Yb2oVQSKvfWsMAAorXUWVHB7dfO2Z9mAvImO7QW01Zb5WZhGhWk/f/irOP0wFhXDZ/iA990xy/AX9lk7LlXUxuX5j/eynBjsv7PD67CtfH+8q84nnk5s/PH/zq/GNb9npyn/z1yfjSZpagUPZfSY/eNpn1glXXCjOdKuVh7jmut2kCcxdRYU6uVgMiGqU0xCkb6hy93/tq9lnXx08dcuLUYC+YQMYfLM4P2ubzcyV3/jOu6ki7CAUKNjpPSAKMURhQBaECACQaCAFCoiBmYW72yNcYa9EiK760eTqfIpQR7G9Or1XeRkghCCiALG7RxAiCm+PKgBAluYQxWRJarLEWNeWLBJjY7UJbQNQByrbUAULTisMjJGICI2JLN6JNomB2jexcessK6ILRZpNBsNFhXE0uP+k1mjZ+3snjYMoXgn6dQgBsdKoRKOWBHwvyYOPzotzXgA2dZhXm9iGhHQdpQ41GQ7OCUI+6tk+ZRNd8iZP+wFZkUECYywN8OGjmXhLyL4NlfOtcDay0c7JtKmqch3Guzup7betV2ps7cAmthUvllKTJGbiuYR000aR0FyeTvdGu9ZTlir25aifVhtZ12Fa+lW97qXK+Wa+qm3SYyX/5Nd+88/93F/4C//FX/t7/9V/HTg0qIoimaRJomW3UDF6ib4wqnauX6R120qMQLibm2ZeBafvPZrOvvOITdZ6B0t8+Pi3emgUqvU6UI6PL+bXnxkfHB4GpWbT+9V8mani+rPPVpXfu2FNLzULvHiwbtc87BvyYdHCnVnVS2lTxUJbK6aar5ulKwZFaniQpKuwblzwSL3+RHLTPPlwqNzHbgzqJZ0X+MWf/Kk3vv0uqiZe1gcj+/wLL5SXl+uFGw6KyldpZiBP3vr+w/5k10CEEHcHmbXm3vQ0Ozxsk+zJdFoklnJ1Oj+vyyrvJWQSpfm773w9zU0b8GIa9p59tW/k4mw27O3t7/WP9m/fO31sjGaMtpc+9/Te/OxtB40u8rNpXZbw/M1e1gt1vRwgXzvcu3u+WFyg5xyt6afm1Wc/c/Lo3cvNYrlqnkyb2CvvPDrtj3YHu/1QL0NoVm05XzaKcVMKquRy6aqoR7vF5GYW6gCwBgAfg1w9YoLCAIwYWQhFOALSR34MlI4ijR8tADryPHacI5Su5ZyRmBSCCDMDROjCP92XQPV7Ei4yALBEIhEEFBIGJsGu00pI4hU+SEdgQUYi0AjxKjUmANABZpCIqAtIE27XGV3fwpXDtXuBEWHnKgyREelKfsaugYoIlUZRlBhi5faVPJ9qdeduss4sDUwVZfoOPjpJ01QXA5z0JZZwdjG4+0ivvGctJioywCSUCIYOrsTCEpi2FkcibSIRYMK9nG/sx909fHwB5VR8abPC1w27U02o+33nKq1TNCrYEcQoPqKOQC2CUh0nXKmtV1cr1hkJcwwYI3BAVAIJ9AcxUYoYSwdRCJRwEDAiAWKEyBQFXFANmjtPBrml69d0PszCMl7cd09OadZCQCDlgst6lrWuprWu2tQFtW7QAwO76EBh4Lh8sDQ7k3Iz7+no98NP/cjL//I3F4/uuLQFgaBUd7A6spQoQuiEPwTu+u+2vjD1Ubq9k3GQqDt/nUUMu8ggbr1p2/bV7r/4VWUrqG6LgIAgLIqUABB354+6uYWoY1fhR64icGwUWlRGiAJabcVFxcJe6toLGY4xOmZkjnHRNGfny6r0FFEEYwBSIMxaaQ0K4+9Z6nRnnhUmAuIA3QPFBAoEqatF14SM4kQEJEYfo0cGhRoRIwiSRiDhjthFW6g7alQApBxsyXyAqDEqJBLlOTIDe+mwTS7GED0RKsXMBAwsEhhIqaJXYBHVYnN446X/5L/4L2/c+Hi6Tr73b//O+v2zePKwSGjX6lSfHvbyvZ/6ws7//GcuG8dNO9rN69N76hvfLH/9y4s799Xd7y7LNd1/ku8UOy++iM/vxMP+U1/895rLDSmO9ZKwL8VzLQ2S1VS+9ivc5EtXy+osS1Mj/3+q/jza0iw96wPfYe/9DWe+Y9yYc86srMrKmmek0lCohBBCCCQhbOw2CFhgoJsGG7DBC9utNrCg3WrbIC/MYMAIgUAgNJRUKqmqVHNWZWZlVuUYc8S9caczftMe3rf/ODcSiIj7z/kj1j3nfN+3937e5/k97vCF13q758I8dN2iK6zUwP1ysWrC14+LKfSh9DeOzH2obt/sBLOdrfHlh2SQZz2j8ZC4Xuiyd+7car7CcXe/f3vwkV095yF1nLeTXZdun3SrZvehS1XbrE6P6gSjR55aHd67/c3rZlFdLoalG4xt9fDlZ7/jh9/xr7/8S5977Vt1SbdOZ7jvn333ZXduozXsaJd7V47euDayG4mrBNgfPlyajRRqo5JCK7Ej34L3MYQYATiXZIGsYeOcI7KklhNRECtJYkgaQFqMNfjKtwuKdUqtYEpJDUFKalAyy0FBSQxbRFJFZieaWFmTV0hKXEWKKKVFJ4rJW6MZKyEpIBobVUSTY2cViaBWigACJNwDHlgCRygQQL3LzvwUzrIxkFiXdTud3SyOw3c99XTdbr1xsrh2sOSVWS5DbzTCgV0tV02y8w5GvV69SG1Ki6q7cO7c4v7cgJsUWZfSyWq10XeZwyCyiNrMfRvCXi8/t9lP8bRTsh0tLDNmk43xYLRT4zRyfbiowEdHZUywWFUuz5yxUTAXMh6sciOdHYw06gpCh4KGqxB8VZ80fndQb40GwOUrh4tZXQ96zjC1wXcBbETjMFpOFTy0ez7Uy3YRR3m+UIxBUoizaQ1VtDYjihZxNCisKJM+cm5vuZxhtoVBoJUisyOXf/wP//gTb3+GmtnR7ZeX904Xpy9Mrl4WZ8pcZ69O+4yTvXHtq5CUrUuo2DTYibUDyqMZ9DRUFDPpkoYWLROyzQxR5M7bw3vusI4xFi4Dx9R3aTGX114uB+0Fe44bal582WT98ZW91d5eGg78o7v18sjen9ZfujmAoO3qlKl5z7P9HX7xi89d3iCYn9aNFv2MVgmPWtrKyUl/pwi+0YwoSEyNISmMa46XbxwfHqY0upBv7YwO7XxzMuTC+GmK7crIUGoSb7qEd2O9MSogC3UM6mhzOy+VOMC0mh5N9zlRAlcFECUmalLThLBsIyInpPvzlctxM7M5WUByeUbOCEArQlozEpHNICNgpejIEGadBkUFYq+RFAAxaXCUq5CAGB6QCiolBK/io9gECjCvghK0nSogZ1QMejG2UGSzuXReyl6PkDhK03RQEKXAD+DuD6SipG2XNIJzBCKZcwwSOh/a6MAyoQMSFVC0aKKebUMk+dyZjDS0nQiVFoGEMEVAHyKGIG1DjMwUQQ2QeuwqAdGmESLDmEqymaFGgzXJomQ5WqNE2rU+AnaAxmIUiKiFRVANKaw0goA1lkFRUUABwWTMVoKvMmvrZrlYLYQUyYQqvvLaS29efzVjoyKoYhwmRnGJENVjhiYTjilZC5uF2cnc1cs721d2mDMy6frJm88///JunZlp18/E3jm5inbQpFxxu+hvjMplt7BEdVG+vh/99qMnj19+x3/6fY+86+nLpv/anK7/y3/cM7Fazvp9Fzf6k0sudquDv/8z/ZVUJ7N+oqqL9tELfYlLQ0VvvDhfNieNYwxsPvoX/5s3LveXn/ns1/77v/XuwXBoqNcc3v27/+346cc8DaYneHQUU9R+nl0Y9OIypTdupn/4j+Xzv7nc3nz8P/mT/+Uf/kOHn/j4X/vrP+1Y5vdPqpMVdSGDlDlDynWoq6YLKVUhNT4FxYxNisEyrVm4CoJIpGAVSrLgVayyIWNNVuYWYkBogg9EkJFYxk7rEApEVMkzE0RLx4a1CJAiAqMSdIJqTAAYloVmcLKcK9PSxwSRiQ2iURqYPHbRsIsxOY99Nky0rLy1uLvhioy9p0K1q1pNmFkGUSJY1W0SyS05TQkkIRlm3/megQtbRZZp3TTQhp7JLm5s9IueSLp/tH//6H60CRDJ0QPbzxllSNZx/bOCsvXrKiJn+TA8oz+uG0cekIvWro11U81ZY43KAxMRwANX91tQawRVJgQAQhIVQqQouTXf//2/9we//yPYL+/femn/lc+Nnn9+Eyp/4ybmVIMqgknQze7Wd66Pzl3MCGlV6d1vL15/0U4XfZE6Ku1sFW97z6zc7pYzaJrtnV2pZ1j0R+NzJ9dfc4+9DQwm15HJM+dUJTSdQDQ9N3Cd7r8RF9PRYOxF5q9/Ax//SOxD1awMcV4WwKpJAcuutdnlrXPnN5p7p9zM73/uM9J4I9ShZVkWvbxdLnOHnTM+ghKqsw2ACcGdHKsM3WgztQ148cu7YXYvgEfKN5KXEK3NYmizrpHbdcHWWBKwbVuzc9Er9obZ+V24cqUuJ85lFDvfVNzbyTf3euXI+BDz4XjQO/l3/wDDdGjl4LnPl1tllXi4XBFB6Gu+NTS9flVNDeUxQbO/0O3r3c5GtflQsXvfHuzT179tTYrkU288fO97Iy07s2X6w35sxgIxRKuIhWQ5UYYl2RUD1kW3aEPeu103bYepV7LgtVdecwcHvd2dnZ2d1KRX791dLJffevnadNV1vu06NdYqqKZEhvnsoIkKSmcV3ZREU5JIQKCGCZTiWdMZogqtm27OzgOAqgDISPrgAMoPZKP1o/utljXG9eZ6bYRTWTOyH2CtB4OBeqUsQ8NBEzGn2CESIdsiWy58UPEsJitURX1KXkPwIonAFJmxRgxDEMyHhbHK0Q7L0lkue9mbN6bLo6MklhxjoxubuUqClLIUR/1cVU+WnkOTE0JKzmRtaCyyDxGZCZEI1+e9xgebWUTInT2/Xfb6kKAbjXtigJkVUTCpgddv3GuCFANrULsYOLfWarFhXB4yB+f3Lo/6I4N9lWI07jvKLPYBsMikk7lJIaZpjJ3B1cOXtp77XBhvjkdbk36ZK+pqXrVNIqIQ0uncAzGipOS3huVkQMPRgGL7+ld++yNvf+L9Fx/+9gvPjfvFaNIfluSjZMZ6xC6leesByUQxKBYpADR1nNfyjf1pvZYOtVEBTWqYjTO+8Wqk7OcybWC/R1q2odqYDCa9UYJicVp3HTy6d6Fq0927N8psGIb0gfe+7frrz9er5eNXJ6+8OR1vlGUhj57fefXaHXXYHznO83e/7+lf++wvbm8Obh7N54vF8SwkDf3CnNyIj165evHhp6a3l+G4i3X1+N5uczLtFqeCvhhlSqEOiy03WHarWeou9MfOFYlW73zf+7/8lW/tXNnBkbt96z54o8hMuXRCYCUQxDTTJjTIbApnd8Z5N5u+evvGyA2kA1f2bt1/c7Iz1CQHsyro4ni6XCyb0LZt7TEvBtsbnaOb9052BuXVR/e+8uIbeT9/+87FL710vQowGtivv/oV6zsoUJnGjjqtYs0yMCfLw+miiyiF06qttsYT6wN0YaPoBWmrk+78ZOtwNjtzFa2xYHDGDhOABOtD6dqRI6ogSQBQU1JBBRQ8YwZBUmQ8S3Xpg3O3Aogk1bOMED6wd6xjZg/60ddoInlL9T0TeJDWRDtJQEBrLo2KIVbk9UMjidIao7Ruz1wPH86oxuu/Zz6UtVCzHn6cyQpn04z1r6oxRjIMQEDCRpLIoJdlDJMBK5WXIzwibkczXnWiyyihIOb7y4SWcpfIO+m4E0YLXCCIAqbkmbP1CplSojMo/5qmRsgsZLt+ke1drHMrOxNXDOLr1zl17sK52f2ZzZCd7RaV1ZGGNtSLbGOPyx0FB4mAiZA1giSFzJzl0TDKepbDLEkgRgiR2Sl2SozMKURKCVPA2GIk0UgCoBEQAQViBAFzcFwsTrIbN0f9gW+WzXTarwJqRkpE4Hq5aFA1petrEySopVwZwTlhU414Okj16WKkTr1aq7x8qTeZPDoerSahma9SbB1ACoFjXAs+iJTWyKu1XARI6z2G6vqzWtOdVIUIYT1oJlpLKUC0DhKqKqAymbVVSejBsnLWlXEmCcE6fEbrpeMMfgfrrvr/gGzdd6YwVBqTs8nZEdogHQKu0eAqojH4EIGh8v5kvpwvalCTA6iqNaygRESIORIDWCQFckQIyAgCjIg+BCIKQRAJUInEMIJqogQkaY0tUkUREkSJCYSYJSVGXncRonGqCoKIZAgt8/pTkth6BRLLxJGMiKYQNUFKKimRoSiV4WDYERrfRLSm6romRaNxoPETP/CJT/6hvzLYvqIefEhbFx+/e7B6eFBs5O7g1mm2uxmLyfZoc/bF37IQcgd9Y5af/vrJL3y26BcT161+68t+rMWVzeF3frh4+zNVlwBdBn3rckmxGBplI/Ww94EfqL/4i90r+zZtzG+dVvP57rnd1RD6jz5lBpty51VzP8S2KUy/X8DqYFndjY5zAmRNjJSD7fsUju6a/VlD0fdHlDt7fuvydzx90N7XrbhzaaN4+FxYdoeHt3rgNFK1ODQZG+fu71/bPf9oWBy7w9Xdf/5a+/p8Q83bH5289OKrr37r2t5Du9ObXxvnk9/1zLP7r11/fenLlkx7Mmv3t5+5ePHqu+vV5vbeMw8/fPn6G1+crVbbF7eMcwA+UYAQQ2o0NOo7Q2hMhj5Ya5msKBOyRUTR1LYpihjfQUsiSJGhQ2ma1SmhQPSIsDaDOTaq4lhRISODbEWDUYCUUJMCCooRUQkCYthKJ1EgRUgU1AgWapiUACQx0TpM3ENKkCyajlXReLAizlmbM6ZYmbVxEAAA6tZXdXCCm6Mh+IgkL16/laKOJ9vPXOkd1as8MVYAOXHrZpX3Vdvf6VOdpG6VpbZCAURwtWrVcEZsjF0uVquqS16BwIiuVs396MvMbWWjalZRRopcjt20uYcgJrMCLIqObehWZV4qovcJujh2JWnol72Y9bz4LM/C8tTXnWPjmVVBUlpWzXzVsSUGLm0hXlqsBoO8TxlgFAvnL14YDslCU/mmyPCpx3evv3ytzIsLW9sjY0uxV84XniStfPQRvc8KrqdzE2HQz8bjbHpz2ofej/7477/y4Q8eXd+/9/xLx6+9alZ+nu3Lm6/tyC508/jSNX87VBE5KImYnoOuziSBj3kfy7FZdStjRUIdQm2cUwQRiD7GShCd62cQwXgCL8lpCkrEvKyGK99MbyfWEQ2wSu2bx8qGbK9brnAukQfFRz9Uxopuv/bm6y/t/q7f1Xtigz/+Pa/983/zrC6Pf+NrfiWTPh7fr8PeRsqkY1uWpqpbjREgUNsakG66OLxzMNwZXHh6Aia1s6jLCrSwISFSU7fNrGNQVQhejua1ZTXKlNxxtZpDnDhrM8PgolcVKDJSBKZEqoUzY8RVE4ixSdKJBaDMZg4sWSsGiZC8N2unq3hmjikyo2FmJlBmFGYUscK9kBpkBYkCEEQ6NDXzPPhlilVK1rIyL2ZLR05QBgZMr9c2bfS+CRKTDFxeKrCDpgOITOg6L77tbG7/I6kotB4N9LM8+NT6xualAmrAzidEViAhAY5sOc+1Ew0AGdpEjgVFJQb0QdUpm5haSaqZsW2zMpKccwkSEMSoTGa18j3L6/Ydr925HYfOTatkHKY6+RYiAwWpmiAKxnBWZs6ggDgIBGrQxCQJNEjrkRND4RyaZK01CE1zKmIb36QQvI/JWRBYda01xiBlWcbGqE9gsD8ouk4UMMsyYBo47veKZx66sMkDq2F2NDVdfefNNw6ruOvb0f2w1WgfsyfIdHWNXXSuh3W9bOr+oJyMBl9YzDc++V17f/y/fe92f6gyiMlM772naBtovdH9oNdm3evTbjvC2yZUrkI3D9KCzZ1hd3Rtlgwehvb4XHPp+95D37wjr929NOFs/to2f8B++EP+T/xk+lf/sgw0YnP8z15S83zU7rDzLm2ljbKR1XO37547757+Pd/77d/69Q9Lf/rrb77ymbsXf/w7L/3OH/mv/+efqV56OY/3X/j8F1+5/tqd1271Ot8AJQXI3KpuVyF2gIoYRYIgG0OqSSAy6hl0E31iNOyBTGlTL4NB32KaaJEySMtZ7aOiiCYmIcBBP88ys7bkBx/jWhUh8VHIUGYM+qCglW8SUIgpSGKHFtQImCRVVffL0bSuaw1BwAOxcEaSczcZmB7qTDEZG5JIikEld0oGOwjiKKUYI2a5JQJJyqjnxhsoenS6yk3foN3Z2j534aIhDJL2j+9V0iTB5LvJ5uYDqUj/vWJ0tos+O70LYFyP9taBf1xzIc+8IGsupZ4N+t7qu1ckRHmAlXkgGMF67AzAdOZ1RUQABgRypFB++BN/yE1OpLTnnnzq4qWN6b2j7O4hJ0grlSjW6GbP5PMj+covy85elzQc3C+7dhM1eR8yax557MJ3fa/v7TaRzHCzKAfUTNvjWah8fedeN75gpABXQpYBKEInqsYwphQW0/jGt3q33jC1N8SASV//6uaVD7ajfuAYfLt2Q4j3kIInxHys/VxG23h04o7nZd4YlqoWOT7lblj2i0px8uxHN3NTx5aJgg9xelTVy8HQa4gkASRYxykvS7tNk715bzU+165ee8l4ztGI95xxaFfI3Nu+kELtQNpW3WQLLz/qMYGIy0u21hBrCOHwINx7XXOJp8scM6c5zqfnS3f0ud8ebWyb2Zt5LsVDV3TjfBtNvnPJuk/oN2+vbl6DF18c9Hqjczub7//Q4jd+2c0xNQoms8FXX/tCEbnYe+fq4HU5uTEMp5iB+kRsC0et+qKAva1eq/3jQzkuL/7O3/OjJ6H5zG98fnn35gbi7N7hptr5vbv3DlfX754ulxWyDUFFmQ2s9+hMTGtbAmNKgqCsaEQe9OBhFKAHpjaHKIgiayAprAsy11epSFpzjgBREeN6eKtnP7TOv6yvNxVeH9PgDJ+aQN+6Sl2WF6MSDC2XS0ucGQsogjGqdGGpRkKMARpnDUPgjBRsDASaHAVpvLSrrBwXxdj2irLojTZ2RoMJWezqeDr7xuYGzVbgkUrHZF3U1lprbeZjY0GcE6Mmqh7Mj4usX1q11lYtntStLXrOcvCNNdovre3bwuYjiWiEcgTHNrdkrHOFonjrNQu37xxFxaAtgKjVxnf9jWxnwufHw83J+bKcUCwMZ1mfrEXfedVKNRjNRGvxK8JE0iCEF37j3vS+7J3bHU7GNivapsVZS2jatq1WnSL6mIjAZjYqz6dVrNqy8t9cfe28yf/En/3j//Rn/uc7x0eZsigwog8RrcmLAgEclzHWRsO5rY395bxtWsiKFTVJkgXRqCkkk5uib3bHw9P9U1PyxiBv5lU9jeQSIO9t9SN3gcwTT11ZLNp2FqRKg2Ryh/1x/+XXXl61FYr66Wwrt9CZGPWVmyehbc9vjpPi3VtHs9Pq3ObGrJpfuLBxGFJX16mOqq6/d/7gGtrT5uXbL7LrP3Jli+bNol6MeDAq8qycLGfTQTk4PljePm4nxV6/N5YYqmp57fbB7s6uN23lp4e3rheo58f9DNUBG2sdFnU3S20sbb8L2qSmtDIqBk8+9NBs0eyfHm7hTiYxDeh0fqCYzu0OD26/6SHtbpV81C0jnlbHF/K9wVa+mq2ef/11KSTB6oVXX5ovZBWsM7w30oWvw6orMqvBquFa+bHHHl7Wd+u2Wy3nw3IwHPVMn0LlN/u9ENPJyaqX4/7tE5ufcevCOl62jvLg2dogayq8AuiabP0flF2u5VokEFFCkfVdqA/UHgJARVIRBkTANclozRJSWBdsnom8+kAXggcnYIC1XYYQ3nIQIjGjChHiWVk6ghKw8hkR78xiAmemojXl+gx2tDaSKACSARFFOFOk16WKzMwGMRZWHrq0ceXiYzu94fnN3Vgtm8SbXTc4PCJNmSsxJlKW1jMbFoGuNuuWLTbAIBrVGEA1xCFEYguGiA2kQOvglSQUBuKajfuuD/uNjdmd/b5P1Jz2LLIzbRC++njc3FHn5HDWNI1a7W9vdV3ArkVEgEQKJChAaMya8QQSiZQMSVLQBADIjMUIVahbaRBUp9gDC5gJ6CmEigkhJACOkgwZkABJIambCpwcCh3mxOX6aetsUHHGdZKoP+kk62WT2N3DGJyxyWEnpFvnNn/4B/H8cPDqS8e/+mk5mnYSjrsg59sr/c20U+7n2b3DJSt3bZ05D6HTJKrI6/ETAq/HBYiyZj6DMjIirMGCZ1QpgXXzHQKiYhJdy0Pr1CSsxw2IKrJG0UVJzLROudHaf6YPSlfXoyw8w57j2Q4FBmXWNyYzNHAZIxISoau7EBEQkoUuQjSgVScH0+XJqgFgRhIRJjRMjGiJGNAqMhAKkSFCzIhFBBAQ2LATFYtJFEmjRmAAgTXPC97S9TgZSApklSCBSSqKwKwAQMSEaADNg6ydruVDlCSpCcFQBIySPHiPgjFEJFIUlCTqgxd2jp0JiogwztjUx09cfPL3/9ifNhsX6yZZx2Ynu/SjnyiPP79145s3vv7yo3/w4+Zd73ejndMXX7LPv5Gf7Ff3j04PVzQHWnQn2KSSZvlq9P2XLv/pP3Iq/ZT3mSUr+xIJSjKWoq6degZgx9f99vp1iLiBvXEvT5USW7i9mE9v+ddmvUSbvf7JvXouK2MlJ0uOgwLkuck4dZoayN3E1i41C39/CcOiXkw3PtEvHt82PW1vvSini/zy5Z0f+xHXzafPf0u8KbbyxXy+sXO1PP/E8sX97tppOV21t+8edOCnh3mvb1xzZ3/pl2lOh5ff/dh/+v3v+bkvvPqN/VOXZLDpzF3/yCNXR4+8a9bf3nvm2SgrfzfYwST5hLEm9E01TxJC9CnFGEREMpcJIooLXlExBc8JVNhXS+MsGUuEQDH6laTGUAhRNEYkRGeBGJmtyZkiqTIZQ5Q0WVDBAJiFdVqTIUFASIIJmQJSK1gApyTJC3EwjgGEkRIZRdYoI6SkGlm8ppYwZLmCIQOOvE9A6SyAZp3LwI4NDsos7w0S5VUV6ra7fnLSpHp7exJDvH7v9HJxbmDcrG5KhQ20s+ChbYR12B8vxINPEJOzubXu3skyz814nLuuvT1bbfbKVRXmbUxgSl1oTP1spE4j1F3oDLEElZhEk0A0pVWwJ4u6z9kk51Fh2uAPT++XrkgqLuOBy0pnVyGlXmaIV6u2S5pZHFgGxM1hj5tuVUnmXNXGpkuulzs03FVdWGzujqPEk8VqZzQwCUeEmeBy7p01uYMsN0zdYDJQkLJXuN7m62/eytRdeOyh7/7RP7CzszU/OqZ7t91+deXcky+/+sLGM4+avUvtwR1681sbLZf5LkWZXOy3Gpf5hhwcmEFjyS19FwxqwNDEXl643VEShFYMM3SVK3ONIFEEE40GChQtkKhGxU6TBzLjiK03Wc9h7tBr1yyOe2+s+OCIeFjrSgqh4/biIr3xv/zNd/5X/7fi8u7mT3y8XZ4+894Pfu6n//neuNw5t/ntqqqLyUykw9TLs9AhI0su0i7nobKjLNvGYP1isSKvvdivZ0uEoCqmAOcgNkm8B4NgjEMwUTB0dZKAgLkxGWEMPculzRyZqEJ+VZYuq5OfSiBsYrC5WXnxNikHY8lQLgjIYJyFsMYaopcoSRCtgsZUIaEDcAIpAVJnmQAEKfmkYq0nd+rTQqGK0qpCF4W6fJD1y9Kvpg5kpyxmAqvWCwKhPrlh3//Qzv5s9fKd5Z3jGHKXFEA8MP1HUpEByBkHDIsmYUKISZIGn8gYURJMAZPJ1JigoJkhUI0pkaDvvLHgnEmCXStdEyMqESRsIQkwtKuQxKaoPiVnDQQ0ubGiXacBoKtSgTS0udfURQod9nIbgxhxFki9WMOESUE0JQkihkA5Rg+QElpLRhIN8kxSRNAmBd8E7ztrDDoOikkVgIgtkHbeT4aFRdc2bVo0qU6RkjGwNRo8/e7Hxxt77f7RmweLc7sFDEezN49GRTm9djKppm+v8+1VxVr2oo5R+pt5wK7x0lSreraM+zgqbbd/8O7x8PVf/Jnrf/dvbsTaBid3uw2A6Qoe+33Pht/zA8Pt3dlnnjv9P38OVg1sj/fe8fjd566Z2E7Gw1Wih378D1fPvPv06Yfgldv3/6e/8cjhnc//lf/3o3/+9/ee/c6PfvQdr3/2Fwen6ZGLu73bFYdmdrLoXd1711/8a//i1/7R/c9/Kam9+hPfU773e6+8+2MbLz+vb36me+Nu/Of/Mr3y5uPvfOxzP/cbscy+673b/8V/98e/+GuvXVs0/+7zX3r1xrd2gFuMkCuRdiEpADOGFBwba9EYBtakQoRd8qJUlggWwKEy5GhHrkDRrguhWSgzoozHJSNLF1OQFNcTK2Nsth7CFpwj+qggGJsYEwgaTJLKzAmJJB9F2picoGqDqBnbpCm0Asb2TNZzaKk4WlYLHyBDgwgQCWE4tOUwP6xDSFA4u2o9qVqEXs59YwtooRW/6jjPjSt2z18JyQ0S3T8+nC2PXU6RKSsGwmd3wQOjkK7ZDfDAsL8uNaa14RmU3jo6PAiSrUUi/PcUgHVpLcoDV9KDeqqzRAMC0BpKqmsUpQAxAhLBe59953CIApGatuDYHu1vl7ljVAPLNoLlJNJV0Xqg5h7fv09APWMwRi64cdiOh3sf+U7ZvJjUWgskeZb1TNji0xOzf4PbqhjZjQs7rZHOR0blRCkpo9pUl4s7evAmd23vwkUf0HI7XB0efO1ny3d/ZxgPYmZDV4lvutk0Ry43N7uuSymro2a9SZZtyMm+xkXPUjHondZVNt6inYvBDrHICueMy0LwcHCjnL1SUAXBLmaHSEWWj6k/STHF1XEpbfRVf6PXTZNPCOO8w86UToJ4aCz5XkYOu+borh5sd6Nhh4YNQcIQAjerYTM9feGz1N23krtUSJIuKsc4CUnv3RRd0pjcYCMUY2tsSP5YcPyBD5zUR2P0u9XN5TdermanWUOApa+VINoBmyrAy1+DW3fsbOqX8yx4NgDKIXiSVKBK47t9r1lV5uWjb/+gufjsMJrJ733iM//s/zfbv1YOyr1Hdq+9/Nr+4cm88gmMrtNlCAQoKqRqCEETAzCwkCCSCChgEgUEIowCaZ1URjCsqhAepMnWkNozpxryeiitoPygh+ms3QgenBXP+rXPPAxrG1zQt3wOAACEJnYKPua2OOtqkiQIUTQkjwVaon5ZkjEpiCRtfbClSuhAwubk0rB3sd87Z8pdSwaQjMkISVCYq3c887C2sxe/OaWyLHKct3WIQQxIEAQSDTnHINIEtNnAGJtCCCkRweagR4YNpt7WhnN2Pptblb7Gi+c2t3c3uQfRJDUWjEnQKYSQFnf3T9tVJEOcabeqijzfHuLVq8W5zYGRzJlMBHNLWRYlzQGpKGzSBKxNM+uaFQHmJZ7e6W68cjyb9nb7V8e9zRhjCCCRyqJQ37XJqWMVMQSQdFQ6CSIJIiKIrI5Wv/25rx+cNG//4MerL32uOTnqD4mA61mrbKO4HEEhOcv9zJwu5wza72dvHiyYE0pCVVRgJmPZku1C3Br3VXya+Y3esG19age56x/dDtPDaT7aqg+ni+nCCPX72aOP7w4Hg2s3j05WXI53Lu1trJazhMXB8cz2B9f27xT9kesVB8dtIhuQt7d3ujAHw6vFatRzi0U9GPRO5vXsuN4d7lCvECLMM0aamJ3IhhXn1WxaVatWMiq7kC5e3BOK+6ezqsOed9WqNVnjCssON8dlrzSt7zyzF0m5HC9Oh3Zjoz+xPk32+sPt3moRq6axhev73mC8qX5x8971rqkydmRjIBxd2CEXdnubr3/pxs7ucLp/OOqpstY1VkH724OwWg7KvK2b0aZ7+G2bL728fPjy5R7hGy/f71Ss1Rt3r0etyl6+ZXLSUNe+iTFoalfVaNA3LhWDfu3TaJiv7wJZj7POlP237hdd94utOT+AmGQ9ClCgs5dpfZMRPqALCT/oI1+vEfBW+GsNoj67+dZODjzDzKiepYDeWlXWVU0guNaMUc0DIyACJRVg0pgIgRhFFFGZGXBNxVZCEDljHZOuQcoIoKK4jtABkigmiAhoGEG7vVH/2Uce/tg73nvx/DvTYrU6nZl8LIbc8YmNjc3aVpc2h+jVraH4KakkAhRaB1oRQCX69crGZFAVEiokOvuwkIhBDHRt3Ju0k6KZzxzzZNKXe0cRqREZXNmrG3WUkp/bIWaDvDmdJ1sHyCPMC7epISS/cFmR1BpmRQFiIMa05iaefYzERm0CMSSFsAAyDqy0uSgYBQgK2gElgITkoiSKkZKACIABZZKEhMqgSaQLbDm2ngWIE5lc65VNQVMI6z29s7h5DrefoMFo5+176eXbq5tfRqlHhhZNzV37yOD8D/zeH9XBM13sf/FLX/r65/9x091hVnrAp8YH/1TTuv2C2YomQCQgWH9bRMqIBOvSuvVk4WxHoqLrq1dBIyiKkCITrdOHzHqG0iZAWTMS19ytNcTqLUsRAOTMg8LmbA2iMwaAJAmCYRK0lLyXFGPELsC8EoasZyIICjDQWUtDTkyqDtAwAxkhJiTHBknXvzIjqqR1xwdFRRFQ9TFFELJ2rbqaxBkYy4ZtFi1HoggK6kECpAiqqtEYy4gIaGyhaFATqo8xAYD3IaWWUJPvGA0C+hAgqEEVChpTs0o5ZC6zuV88sjd44h0f/sgn/x/oLnTLUPQzZIkp1dVi87FnVjevbX/f95y89x2N5MNrbwwPb09c/+TF+/fe2I9Ztmy6fr9wJQfLLRbnJudCwqpaxHqZIadUZZqHKpUbG5pQuuAXdZ73xm9/Er9+I2+K5VG7WC4Hk9HsYD93LCm65EhtUykDA5jgOyHTtgFyG32IIWZC1timDSElAaGMc4cpzl75uX/a+4PfN/rAU/PDHFUL2Zv7i47UX9jkoKNNF/Zv5rmDlPpbw80LF40232xvwizv2Ww5m44GNHrbldmbx2HePP+Zr+5d2Pm//yc/8etfeXNBszfhtY/94O/u9d7edsV4nD36+CVs37f64v0lhp411fFpXU1T18XURR8hcQqCyYKQA5KkkpKqqKQQvBA4xBDJZVZQgKKGVjRIjCBo2BE7JUc2Q5MltMRACAQBNTBFQI/QEjCCDSoCgUFU1QBBiggAaJIgoEIEUJNAmFA0IYghg2xQEhJ16zZeBAElawjFKipiSA9Cn4KDXq+Z3ndF3nkxJXcsULiU2dz055VfStre3B268s7NA2kNGTw4XBRZbooiKC7bRkT6ZZE5u6yDBE9kmw7YlIKUmyAhelDI3JL1sc1BRuqrtusSTgosTAhx0is7Ta2vWgmqEJKv0YP3/aI8qJZehDLyIL1er2kaARThGKJX3+/lg36RYhjm+WRQNm23OlkVzlo7WFQVImYmG/b6q1rGE9dJr227Mu9xqLeyclXL6UnYHljOXGHQEjrAje1B3svunC7vVTCfHgqbzd7g4Xe9Lx9u+dlyeeOVTGpG98b9qb80Hl7Zo3hUhpPlfDbZfmbeUHnl0t12NggnI6lDaoxzaEuT9wUpZTn3bRsqqhaFIPqIYChzCAKm1DZEH1CiBSFDlABVgYHzIolXJiq2ZNHdn83aC5DvSF98Ocqru4uwwDS0BrPx5qVLx9M3f+pn9n7k4+OnHrsPvZtFeuy/+ytHn//89Po3nj2fvaJdY8oItkPKcwJtlnU9Xd478Mfbj2zxVm16QCk7qLtVU4w3J4cnN8A47mUtLcGgMwwGMssbzvmmjSllRJVgFaPJXU6Rje1AV21tMmOIQtTGh41hr2jbKvhRLzchVW2YdYol9W1AIBVMqgwUY1RNpFSYXCUCIjGtw+pd0iAKGp11jIgURaXBdKzxbtMSRFuaZtWElGbLdmPUh6bZGpTLpq5mXQzSNsLOMphlTd949eDS7uCx3e3l/O4MUu1jjgmB/yOpSAMmER+D+KSqzrHGFGslYgSKEoNIwUQM0Uck03TKSH1rnQmjHGsfLNi2BsC8jRFZM4sEIEETaEIkA0TI1owyY2JkZUqCYKfHkmtKRqerjiKnaJoK2WK/P2LS1ofFXEF1NHKgESHH5EAFIzERAaskIFV1Itq2jSI4lznjBABDJIWgIS8MEmMKPurqdFVkVmJQkL3t8c7FjUd3rzyU4rabfP2Lb5jp7eb6nB7bWVV0tF8/vbXXHs/HnP3Q3/iVa3/zJ6uDu9kqQIztssMyl9AxUkLUkIbLMPvMZ7/xHY8/RPlg4aJyHdPI5uOL5a2jw7e97/c12x9YAY6e3v2q+wV3oX/1P//ji2cfp9AMx/jaZ7/60O7F/s7u1qyhO/e39yYf+Ut/9PCv/e3HQj/87BeW/+AbBwe3Jgb7Lrv21TfF5Rcf7rO68WOP/PoL1/Y+9t1088XZy7PnfupvvftH3rz0wXf9wt0vzMs7v+OT3zU7XN76335p5zw/WW7fO+mufeZTL/3tf3jhHe/Y3bzy1//UX/7C7V/7W3/3H9XTmIu2SVUVmVSEjJIFZGHLCkLOIFITBAwFUL82wikAGwI76m00YDpwUwkJonPGKIpQ3QUNZGxGKCjg0HhJq5A8SMIEiMhEzAHWcRlmV2hSazGCGs581BQxJNVoB+QM5ZPe0Et7sqjbgAIGgTpKKUMGSCneP1mRddu9DFY+y8phYX0ImzkT6qKpMnLjvA/Ek/HEZMV4OOhC/fqdW7VvwUJRuCCUHszQ1nkBorMm4rXhYg2ZXBOped1iywgPjuJrksW6hWa9zVrP9QTOgkFnJElam5XOfEhrUDGfwWYAgciQSqQoVy+f2xgXRsD4iCdHcH/fnxyBJkkpIwgqQqgiOSmhoveK0ER1Los+hbLMrjypw70moGOwNjfOdN2iBocXHtODmxlLZrzrqhQlK/IMAKP3yyk2s+r1b8Sju6OAMcRpWvjRZHTp0lbdr1/f7174peHbng3nHpEU2Lfdya28q1FCGTxwb0yDpIbtULMyp9SdnNbLhen1DWCd1HNbiM2zvOma2nfDvUta5LNvf7PIOzbjlOcnTSwTQuNNvcpSoMYzJN7oddkAL5wzF8/7zuNsnhZ1fePbzGK7iPsH4p/bePSpdHmiLceYRLtw943q3hu9voPUo4TDKzv3l6uso7CoXawsiVJKTZLje66YeJvXpLS7TYP3DpopvPDbd1+4XrgcNK/mnWZqnclJk3jTquyf2Psz9OBiEoTUAZMKpapTkwDzHEg0shEYpND5pTHZ7vbWj/3En/n5f/Z37kc4KR965pPv/8KLfzOqiqgCWCbDGHwky2vUBJGxBEkA0QBAIjQkkAQVCSApKGIEMIhEKAJMuEYOCj7wCbxFyAI1iIbQAdE6kIC07i8iJD3rxHnAVD+DcuEDxhEAgLMmtpLnRZMaEQUADYkT5GyS45ZTkbthL0OyxYZTgKaJSWMdE/J40n/nwGwbAka2YIxxbDJFmi2msanjvLl3cy4p+dWi87Lq2i75QeGCRIHoGM+Ni/v1ylPZAFpjiWjZeEAqnUGk6Luq7lISZ51Rg43tF1vkSoGuyAub51F8myo0viJ98fk73Up7uekXeX+yOZj0t7cH4z45ZqQCLec5UjpREoh12waTGAS6LnZdHTo4mWfXvnXYnJots3euGE3KiXHGuMJk2eG9w5BoXvlZ3cj660F2Fppl1y+zja1iPOltjHoaGGP3pS9/5bH3vv17fuKHv/Fbnz+9c43bejS0CW3ViW/bzDrInFrOMmiaeHdW3ZoHr8igSlA4S6rojCWsq1WOblLaUVkendRX9i489ehHV745Ojqaizk9DPe1ThKf3Ni42M9is5ouFiboatY6Ka4194uc6rZmC5JgmPWAgx2WLvHstMs4Pfu+tz3/zfbocE5VM/WoGc78qjcpv/uTH/u/fvWLF/Mhhnh4ezogHk3KctivV8vpcnH3+GTcO+eb9srVSxcuTCpZdR0Mz+/OlqnDsFH0X3nldYi00S991Y42B8uuXcTKusb298gOT0IgUwzM9unKp4TkOO8bBTye3mxpeuHqVjjq6iW2qVsInB53oxIf27TPvvfq/bsntw/9tzrZ2sjaVUoeo9f5ivOSH7u6LTE+/+LNvLd9GHJOMfb72yPjMnN8/zTEFCkAu0Z4kYAa3NzdWFa1gu5s9FSxSno6rc+kIknrfA4ipnU/FKICreUWeQClBgQlSkkfnLXXcWM9GzAAMJI88OshADESoAislwFRoXU4bB05AnzgZdKzPnRkZCZDyESEDEIIRICYeE22IacihAjK8YyWJ8hEqEC6RjWvzUqAZ11swKSK6zYpREjrovT1QwCJiQCbC5s7v+93/Og7n3q303R0/yTOZqN+WRAvVkcF07i/UZ8eZOdGkmcoIPduy2qliJBxSEIEuG7wVGAEgwAiqnImkpECEgiqAFjCvIjG6VNPpMsPm1dv9iTq7IRCS4n7xXZaLHpKcHvuKPgoTVFmWxdny9aNCjfa9qdLp8p5CUgMCBIBEtoMkQERk9LaqBkCWgYlFVGfKC+kOwvnAVpBqx7ZNWAraJYUQQ2rxsRrTKFQCqiamIMkssyWE6immIuBVUu2jglBSY0jVO0CK8B86u7dzGWMZe9tH/vEbTtzZdxf1iTRBXvhiXdevvT2yoxbyTe/57vb6UvPf/OEwEuC/0D2RwFBpLQmkoPg2Ve4flwDESYFECXiNRILcB2DiGuNkwBVNElSxjWAal3aqgBpjcZeJ77WFXgPjNQASkQiZzuijWGvIM6Ns+qcs2sx0IIHZOXkqYukCnJ/VpFiBsQgxiAoCTETsIJDdIiEhhmEDRoLbFCQGBEwqQoQSErRo6qFFMJZiaeQChIxqaBJKbO5gdzZPmRODSsqYZD1NF9Tip4JLTORMdYxOokBAYE4piApqWLwCUWjBEmq65kKgTeycfHR93zsRx4q7Vf+7T+Vaf3RD37g47//Ly9WG2DL0uXoWLqYrSp9/te/+U/+P3u72zsf/8H26iMby2qj3teX3vjSz34zSTlNLEG73C57cGGI1cqHqrv5r5630O198HF7eVDHivkinNo82QReii0al7JZTpeVqWK7f2DjhrM5+Bim3QZn7dGKCI1SW3U44a2Hto6PFoPeKDZtNZ/7VYeIgNyJr10qN4eLejkYZF5ClwlnXJ54/+tfK8aA2SQMdqNe6pcXY9UUUWG5XCyPxA5P5snpkuqwCg1OJo//6B947e/9m+XdkxyN994fHI/JJ+OBbXtSf/vTn/7Q+5859xN/7iU779OgWfR2toaJo8ZwbvMyrWRcptLAbDGH0KyWdd3UqAwCqUso4IMkgRTBdzGllNZTJgSjEDJOMVhMxqGm2MVgVBw5VYdYEJeAGZFDNoZAAQwLQUjaEWlSRgWRuJ6ZIapBQgVKgUEzo6CmC8DrkkcNypixAgEwAhkDTJoMOysGJQKloKoqKUSQ9BbcfT6vNZjL4+2DOg7cIFRNAuyV5WgHUvIJpFlU2gRcLIY2O+0Qo0vICWOibBHkKESNqZdpTrCz2T/XH907Xe7sbs8Wq972+DHavXP3dlp6yTJJCZbd2I7vr+pOsK82UZhBYI2oBNyro3YxMECP4PxGr5o1Kx8W0ZdlJk3XT1IgOWZXmM2+TcB9STaKtUVKcu/OiTHkOwHpHTbtUdtMBm6c82L/uCjL/qCPIW1sjqcnyyLLRmgnfe5ZU6LmWbHVy3vDYdYfffva6986mh8eVdUqqqP3f+DJ93/07dwmM3QZ2hqWz37PB25+/vT4+P67/vifPPnsZ+GFby9u7Lsum69O4miUntnr8+XVz/0Tu1gVAdkW0p4MU6CsSGxo81ybiNw4NYCLAzYhjXqxRVM3BEnKvhpO9SmK2NajEjFLFJAWg/qZts6G3/vR+SeeuH/rWvGLv7W3WBqTuwu9NjRdG2LdjW0Z785O//6vZk895556/HQwujkoe9//8fwLTVa9sdWZqgGPGDQS14tm2mptSzMcD4Pj5KksVJkpSlMt6kyc4dOo82UXnFISp+AMrOlznm0dNAEGUIS0aGYPbZR5YZedYkFgoVmJr4IPvhAKsbJsDGgEbIgGxrZCVAdmQibRhEpEGgULw2v3BkSlKIapEy+KoMQm64QkUAKqAe92eqyxRU4+SsSqVYribO4jOMbt3mTEg6k2mXrnqCWKCaLQ7cpVdzXPl089vBmBX7l3HIPGB3D3M6kIhIBc26L3SUkydj50BjmICggjk4BLZFFBoW5JPKEaUnEWFbmNoapiCiQSMEUwoGqILaZgLUeNuTUFskYI4ns9J0lCGzkgJGyaaEvkGCgBk1FVSIIp+TakAAKMZKezmBvOkVQFUkJVAkTUJMLWMlEbEzKpgBHOkaareVkaUXBEXhIpUkjiA2dGyYz2Lg15y5Bwpe/of+jRO9987dOf+6N/8o8efPNXPntt3r6yD27cNrvLb50+lpv3fui75wcnR7vjg+NXnxiYnseuS9oheNIoGTKADBEzcm5aDS6Oth/aevFrL+9c3rt3+2R6s7m05W7/jf92auCVKVx+58VHP/oh/Pq3b/wv/9vu97xLUiHvfPI8j+pf/arceWn8rWsnK622x42YotPdh6/whcH133pxs8jmTTiYLceXt3k3n4YlEfs3b2R39981/oE3bvs2G4nAjX/87174hX/x5F//O1t/9m3dqLz2ja8c/vwvy+08uzD+8N/56ef+1789+/JvN7fu4Ks3//a//MUf+tMf+aEnH/7UN+8tpgtTQLMMHBURjSGQgGCsqmFWwISMzBFBrRHRzDpnM2czy9YgDkVmTV0nrrsuKtZVk7oYvfeCiVAJfNMaMIIghMyERIBKSIQaQxKGVj0i5s5RokFmtFPVlDRaQOsMoynzjCks2noVOjJaOqpS08ROHfTL3Khadl2XoPM9RiCDossIg6zsG46dDHuFhsRgJ2WvnzuR8Oa963dnJ5yzLQApZZlxOb81RsMzNAAQPOiDgbUB5GxKx/QW1AUIQR4wAM6qauEskPBgz4dvjYrXs2FC5DOExDoRJIbYImnw/YwvP/X0+z/8bFreybNKlwtazM3sJHQCYEgUQjQWwagzZAlEFEsbCRk0BChdjv1Nc/HJGCmkzvZYg2+6pvMVqZtsnLu1P+2lJa9W9xZz6vesG6wWM9t11jfUVfmyBh9rMBEAbOfb0+PFsvNhqIynB+2n/7Xd3FrrE2ZRO5NrktCsXL8vimqKLoRsMLaZ9Y2XkAjSvVdvjT74KKKRLkxvHxZ9l2fg64ajMViktonWCRpIgEfzElPGlkDFsO+P6PyG2btC403IS5NiX2O3mOvuON27BQf7ZQj5wc3Zqy8Wjzzux6PRQ5fbvJ902aS52Rn55KVJ85hMMXaxAaGwqBJLr8cYWti/JwmES77wNsc2WzbTl64PO6NkD2YNWVeUTHk02GIUjijCqAQh4LqhQ5SVm2x4+vDVbO8q7d9P9+7Y5eGgtLuXL962Hn1rcpuk621Pfugn/9Srd2chptnrL2rbGcEACZEIFJNmzpCh0AYiYxxZQxBiSgkRNSVFkjOeBDCCACZRATBICIkQCMir4Nptj2dnWkVE0Hy9OwcFwKAgoAh0Ri16kEgJCnzma5Mzx8MDcGqSBICLekGWrbE+dYxISlFSUfLu+ceK8R60re80SeN9lbFP2ARYbYx2+gSUlgYzFWRXIsWuaVVSri2Y9mvfunH91jIQmwxYQ07qECx6Yg5gYoqLhSfIJKiyREEHOiqzGCNjSioqKUYUx5v9EXRpc9Q/d36cjSgQiZFWfNQ2mLbp2m989VZ3yrtbxeWLk9yWm5OecRhDxcohBpu7ur57Mms0xHY1TVGIiIibuhLhZtliM5odD7Tp97m/OdiO5EPqSFmNDd5TSsvF8vRkvooNEZa5C6o54oWdna6rHn7kMmdmNV+RwR7xQ9ujl55/KbTd+9758ZN8fOtbXxPnQaKL2OsVeVkKUwptCkGT9yH6KI6ZEHwSFiyZQ91sGheiXtwdNfN5YH/1wtbjV68smk64/r7vevy3P7t86cZse6OX2/L8uOxneFJLkZXEtCfcpSgUj6erqgk+dkfT+9tbRRP87GilAhtlEaf1C7/++el8FaLvCdWt37k0YrVDWx7uH/3ud71f5se3b97cu7KDwPdvHyzbVjR0bbs3HseI5ag4vzfpZHk4O2kJrXP7sxONzWp6ol6GZa/oj8PiVKyN7dIyUTIXBjurellkZBz75kQaQQ/9Xnl08/bs4N54u1Sj/d7OyemKTaia9vze6LEnnjy6f29678a89os6NdGY3OaFc04X88YXKj0schyyHB8eXr24m0y6eXCwsbHVEt0/PC0LjlWT5S4C3T08EeOGo81Jf9C0M6nalgAAvU9ZVow2e2+tBeuHtgCACCiApDWP6Iz49UD5XY8Y8EG8eD0ekDP5Fh5AIQGRCGTtJ0XCdfhHz+I/CIRElOABZ0yRCFDAEFvjmBGYmcloIlRaizCoCOYB2h4YUIXQIEBCREURECJeZ9USyDr2dramrVvWCJOe8W4QAVIiEjQC2LznqUc/+r6PNI2yk3Jj4MaZs+Ag8ADk+s2FX2Fp27IwexcN9eJykbeNgkYENGs5QxGFlBVIZY0lWif79EEbO6ElIAZCzC0tTvN71/30nlnUXRfcaEywhdIlqBSd6felbRR8lhfocsw8dVO490KOW8En0zsnKSkIsQMmVUARSEGTAjsgVkfKDCrgUMBoiqlVQlRrIYopR+pTSgtiRAZsg3YNsVGDIphiQEOiIowEVlAAkJSYCAUQEFOlAckKskNRZDBg+GQWvvDZ3iPnmx7Vg4vF5Q+Ul7YomgsDXF47Gm8NpVtQVVHVDpQ+8PaLr77uYhPRgPpgjDm7+gh1fexd98qimgeBRyYCXXfNgyokFEQ0RIIiiQBRROQtv5sCJFj7Q9dGNj4zncra2iYqdLYlOcOy84MtjmWyxEWWYTSGGICdldwqeu/FQ4opBN+Kr9LQ5sSIUQkACATJEalIRmgImYyCEBkgq2DB8BqhxSmldbAOc9CQUAMkL0kVBJKPqhoss5VkXCr6BpOQKAGgIJsMLEdIbehCYlDRqOQsJSIERhZNhLS2/klIIAJCGpMkTSkRmqQpaPH//HP/4MLOQ73QfddT3ze/+RLk83aVt5XkPQEL0iRoWjc/feHn/n5x/tKFP/KTBx27+/Xo3mvXfvZnu9vTY0+etMrUGamj1CtI84Amsy7rTuXb/+ibt37+65efyFM6uPLRj51+82T/9rXRe949+r4/TE8PK7FpiaNmUB0Ipro8l13c29YOlnenUIVy1GO0zGqUqtMqMy76iISDQembiOwYoGpPObcNBO4PV02t1jWtz3Lu5W728u073Un26EP2Ped4GH37Ks3unHz6V0xMj3z4A1U/HOelX82Lpnrzm689+v6PHU5p47FnsLwzVC1PV4fTbr6YgvH5aLhJuXS6eu1b0xe/lj/0kCtl5U97uQM2TZe2h/mPfM8PHkzfOLh/8PL9LwpJvey8cgyBVCWARE0CklJKEELyXWw6n0CVqWeZvJAKMCRdS5msioZNxo44Z5MTMxIBkhprjEVIAJGSBeiYUJUMsYAhZdVGRQkJgVkipaAoSSWEBCLEoAZI0DgWQWQmXV+qGEEMqEVVjATADMF7xrNDcrMIu7nrEe31HanHREoWKh+qUElDuWWVFDF0JrN2YCFXd/HCpen8WtV1seoaUGPRGuOIV4t4fTUtiuLk6DTLXHO67Bf2ytaWy1arJtRd0+fBhd1JfUJVV50uljEnbyH1bGra42a+ijrpD2zgvumxcWRDjMH2cptnxuLQ8MSSFWy7ztisUTNdNe2qI2gMil37WA3N6uVJ151Kqlb16XwlIaJtXFkXuSlrWJxWbZCYGYxhsJtPeiYZc3d6/Prz316mJAZaO7h66Wqqw1PvvPDI7mAzdL7q/LSybrS9cfH41WMTpmOHr3/xK/mdKd04zLTI8tL2Mvehy6tHh129yh49Ry9egwB+1ZAAipJR7BZxtbSOtByI6ZlRf9md6t6lwk741l1enUhog9kUngglY2sIHgRRCVRyZ5yvj+cLNxlRbzwePIynX5Lj4La3pCyaO/cMkMn7wrg5YMyL+atHB7e+9s6/+J/tn98LWjQXHj35lS9Mts9N5ke3Ki8beY2zVb30Dob9/KHhxRdO7zky3QIhpNxkkGJsFs7ouoRsYzRp08ImKS2tYlhCiokHZW4AwEAHoWX0Ic6kyvOsS35ZRfZCRHlmSdQ5G5NUdQuIaM2JbzjDIROEsFY+k0S0xtqsCSGJMrNPLUTJDQKKYWMt1alVML4jcNjlWUyhYNSuiwItpizP6pNgCSUQGXd63JBIj3jULytqD9tuUXWx9UVZnMboAkyXXkAHAwfJkeFXr/8HUhEqi4fQBGYsiiI0PqXIDESACgZQgaM3bQMpgiillJxL/YxKZ1MIGhCJk6QgEEEwQaupw5AZHGYZGt7IXb2CyocE0VhbKjULgUSdyHwOPTGO8hBS0MjWGGc60bZRIiNEVZdcbo1IpcmoZwQhFUwJACCiSpMw65Vdndq2NrHLs/5gY2O1ODEQyyLvZbxqQml5Mskgmujh+77jQx94+w+s9k9+9ad/6tvP/czr1+8Mh+4rf+ffZnCsB4Elrjb5J/7Hv3P75/+/2y/+893TF371H/6Zm4XZfO8n7n3uM1clUgKpvQFvEQybKlJMqI46T1+5c6+/iry7+dxGzD709Ju/8erTYjdEQhMnmVz94CcnH/2xG8/9D/HWF27/H5/OIZ/jpy6+521DvHTjq7cn0Afq+6POJ9k4N7p999DfPagWq43cSFE89Sd/7+o9bw9Xd8Js6r50jT/3tfOH+6/8r39vO020LN+8f7Rb9IoIfFzbd+DqvHnb3seqfi+t7LW7t5e/9n8c/clP3nzm3L1f+bXNafuhre3X/9Gru73eB6D5jdPTUNqRy4ix7ZIAG8eFzRxzSkIR1EhCzK1jaxDJEBOQpUwVAWTQK87hyDSL4+RbH6JPUdZmeOOli6qNJAfMgNFDlnNGFFNwjtqucWwIKELsFRaCcDJlsRGzMK+qorBbGdZdledmvlx0idUCMQYJMXVJk/pAQFBAEK7buNnLTOx2RmUIdv+4Xa3gBLoOQ5lZ8ZJC2L10YWtng9hVvr0zP2gyJVL0PmPKgQK0D7bzcOb0P7MNoYii4hlzdD2GPGsaWY9hgc7OEuvDwFstUrpOn53lA2AdGjgjeZ5xZwCBiJCIgAFH/c0rDz/74d/zIxsXeyWmLCWNHk8Pq1uvlRIoafQRHcNW3ziGpsKA6/GhAjhkzokUg1rh3LFha1zmJHaiaTI5F+/PZl/93LCdFQjU4aZGqhcwXYT5wkTBqKlrCCAlK0jahbRaFkWp81pCFGcYYt/YcFCBIhqD4FKMyi4rxgCYQs0m72UFRt82FSGWk950WW31tvztVzmzWVG6hKU3KXRpXmerLt29bXyXWVQrUbICNISkedZ4TaO9/Nl34OXzCZ0BYEJNxqjn/iBt7frVCg6PyIhkNCzy6vU31KV0fEsefsY+9jjtXYFBNjw6mv7mb6fju6btHHDsVAIZgLQMbE2vxu7G7UEXJvNbXPDshZvbJ6e+W7qHtty738Em715/fXXvXj5NqpCCApOCMBMqIrBw9OPh1g//Z/bJd6e8V7bd4Ve/Rq9/yZ3emp7cLzaXavodZsEyFFmW00M0MOn0a1+/d2XAN+Z+fSCMUS2RgCYfMmdB02xVo6HSWcccksYEsA6WEUYVJmaApBgRDZGCSFICpTPpSi0SIzCBgPK69gyAiaKqEKS1KQ4BEddHCz1r94MH0pEiPYifASBSSF5ALVqX2eA9AXYhNrja2drd3HmkTUPQBZNGDoYahaoLx1kUbNpI88yw4xyMQfCaOoSOjE5Pjn/rU59/5cUD5T6IVF1U78cFOasheDRZBBRRiaauPEQQHwMmZ1EhaFTDLrfGo4MsF8CqXZUI/XHZ23URO1PYqloSBMpScvKN527Ux/To+YfPXRiOCjPc3nEYlrN7FkGbZtXOmWc+tYCmrUIIBZBzeS8kDamZbOz0BvbmiwfUlqOsh4Y6bow1tjT5oGyqqG2rsV7WVRW7pND5IAJMrKSLutrZHrVtdHkxeuhCbziUw/vVqp4fz37hF37tpee+9cd+7HdtbtqXvvWVeLocDTdiYMvW5sUqdIA86I8Wd1eKoFFTAmussRyDH2R539gI3AQYTc7f3T+8vDewpZXCssm/+K3nbs5ucxGvnJ+MjJ0dnbbc39jYuH+86Hy3Oe7ZHA+Opvtt1xsUsQpb53qGZbWKojIyWQ6JSPdvHkJGxiIaY5C0S6d1MGXv/GK1Ojzo2uZ9Tz/E2+6Ng6pimwkNSquKGIOvl1cfv5L1vCVPPm6OhtFQjFGC1HV9aXcToncDdFhITiJOa8wpa+oT53g0HlhDbWiG/YGk4uWXXvZQW0tB4fLmQ4fXD7smNKtFr2cmveLe/q3T4/tPPbQbr+2DMjSKqr7yVYpF32VDs3Vu+OjG4HD/3tufftQg+jY8snnuiStP/vKXfpktMSTIyDsebQ5pUWNEv2zmVYM2kvecuWJQHk/nSboHheKwRsAAAsJZPxQggeiZc0gfGDcURM+spGfkF3xAt1ZFXa8MQIS6FnbX6woBIyMoKTAxAArq2uiiALReLxhRkGk9XERiJFSmtcK7/i0JiRQEiQCTiDKddZ+v2XcW7Zp+vUYkJ5SzNLUq4vptKem6fAFAKTGCwJkfKalKcoN+4lQUscx6YXmSZqcEM3RzHqVi8zxvbayiD6v5KA9gCcLaqHimTxEQABHy2uOyNjiBCigCMiACWwCCqBxpdPf+/JdubOyc56KcGU7bm8in2b3bsTpyW5fNRlnNPGNDoQv7x6XNrRaxIhwObDaUlPiMt4OahNb1bkpAcibEnWl6nVoDWV/bCKqQGRCENEPxaDOCYfIJjWgmCBmGJWqLKsQ5nDmgFAh53U8viMSUhAwLoi1L6RaKKp0YBcUEgWC+sOahMNii3rg8Puw+/6lJbyLjsqc5nbSHJ/PJk+/1AvPD2UY2Geb9CrTtAnPSM4MXkQKpAJzR6ETRw1tVBoKIhglIdK0bRkXQtXoFgFEFaL0twTXcCAFVgZjPwFiq6+2MoiDzGru1vjTlweoAAM7awmVE7DhjMhqRJOTOEMRQdZCwTXhvthoVE4qRBBHYGl5PIxjBEBMiEyFZZgR2zE7IKRokAUiJWJGSosZASiRABCSAginRWTIzJGZEiZAacgTISGSREFSELDNmzMbFFFDUUGZIDCCQTSnFGEwSjanzHST1MbExawa8KGiB7/vkD2/2JhQjZHZw9aoWw9ViBqHc2MjbRCEGRXCnR6/+3Z+6+q6n6RN/8PTS1XI6G3712+2nPuMWesjm3Hvf/vzXX/MxuRAVyEYtx/2jWWcbGI1zrCnNeRny81tPd5/FbGUn093ql++8/vN/7Z0/+fvOf/SRu5/9xuxXv/Lw1hNa9OrCzrva137r0iMbvV51fAyiWw/vgMH26LgLCNZqrIjVZNbYQaqXfZN3AWTumclkeQUhPzfq+vP+5mhc24Rp/+uvPfTo+/vb3bVP/ausueeq4+awfvOX7sTN4eP/xZ/B9nZcHdT9K9vnzu+PbXHuSv3p1dFL17YvXTl8Y9EmL708dLkJ3cbGpFvGW7/yucf+xLl2sLF3fnC6vw8NbF65HIGefPodT5TvqELz9LPv/Rt/5X+sZ1Wy2jUeUkBwiJBUUwo+SFV3MWhIa2AaQhKwTCoRxRqGJJbRGgJAZRLDwOwMC0FmEQxF4wwRJkFyBCyajDpIKYHQOrZLwBoBUrae6WoCJAGKEUgYFXyMAAImooI1ltCooiOToOzQMhCBiERC9f6sJvzq7njMHVbNuc3RoIe+DtOmOln5qoWZel8lm9kEdn8araTd0bAX2Tf3+9bm6OpaQgiYsFqFYmAxSiNdK7FX5kUvrxaN9Yk1UtQxlRdHg5x51S0RAkA0wJJwZzJKTSJAjDwEOwSXUggpHLeLFNOg7GFRjo2Vrs2NxU7YQl7S7aNpZFeHiJlhUCdgDNqcSelEuiZ0HUaTZyFht8aw1o1tsCy7wvDmRjke9S2IEn3p5Zu3TpaDjEbD3tsub98+nD6298gTo/z3/dD3vnH95v0btzXoOz/w8Y1H33P01c9On3+ubzOzaPLNzccvfPDwzRvZYBeFV22rFza6q3sHzWJvdzL+yIfk+lEzPy6KQeiAuEQy5Du76shhWrbiTpNgubHZinHLE/ZTWC2cQ7PaVzFcFqFbRSVnrKJlRWhajGHT4f1/8g/o1qOrL157puknk/nFTNqmhwQhAFBztCgG5fz4/u4j52cH09tfepk+1EuT8aUPPHNy+zsOX34OyU8G5QlUhigT1067QS+jYb6Rhp5A0KMJroRxYY/rzhJT25gqAiBrREWK6mIOPvQt50kU0ABngoWzAC2jagxAvPSSAfQcZ6ipi44hNwYEfJLa+ypI5aWru71euVGUq9iycRJjFM9kgUgkEhJabn1ShJw5qkaXd+qiy9oUuzalRjB0ruucykaG1mF+edREUaCui83KgyqiZhZJkyMtMkwA9xcVG6YWQ4pN5zPLhSPH/x5SBADg20TIpCQJ8oSWHKgWDtvUUUJKKQZJwF4wJCWV3EhhQRG6QHUNy0YVVAjQsoZEhEmAVInAsiRKgpBEQ6dQulWTjKjEt+qc1SCVebYITUxCnNhRVhgC19YJJWaE9mxPppKSy6yoAGHSWFhGTaohKqmIM4QIxsHe5Z1uYRyF+fSYnFsdnEbAvsm5YifFzV/5Fz9++cqn/9m//fj5CzeO7//u//Iv/Ppnf+O1o+WHNvqPZNMqNtk4yybw2ur1rDp+pQr6nh9835/+6UHPvv7jH2mvXR8aABEvkDR1ogBcQZp73wA88sf+SPfYM6uueeh3fvi6+nf/2WLn+Zv7f+mPZ8n3ULqTqRsUfnWLQEtwg6zcnmy3MzDvuvToR/8UfOnTixf21ZNXXObZ1Y9/9M2lf/z8+Rf//v9OFx+L7/+B1dbOvcPZ6tRfvvrB8Iu/bmLcmfRW91bjncnkFEJ7sgH67b/x1z/x+B+6eVs/9Tf/rw/ubLVd9/B5+6l/87MP/ZHf/9G/+gN3J4/e+as/9XTf9Vd+Njv+z7/ryT/1t3/8p3/u17/46ZdXCzDluMpNW/aOu+SsyTCCButyVOTMZlnJlHzwzvgYfQIkViXNiYbElUiKsmrDoguZI8emjT4iRMQkQl5yctjFrgtRfDCKhlQ9IQ1clntx6ogyD10ksAUjwJFviKgOrVpdIzNTiF66Qd80nVrOkNAHTl2EoIOBZGVGDo/nXR2TyzBJrIPvFTkCOi6dGRTZYNwrXnzj+eP5ARjUzETfOmOCdIWlt+bICLAGkJ5t/QFUlQjgzI5+1htFcLYhfVB5dmZHOvNmnJGJEB7gjPhBAy2v/38AUGFkAimRt8hc7fU/+aGP1LZSjwCRsVPf+JN7uKxjUhL1g8K85135U0+ydvHVl+DOHTddOQIvCpgMc0qiwqntNHUJkq/VELCqmZ9Wn/95/9KXN5xqhwawu3NggqiAtRZQfOOTJAQKPgGxYc6dESNdk5AJycgaI9C2zpUKYozT0HnfKtmU5VKMtBxhlnf1FJLGkKRqGGJqpkVnXDYIh/NuUQfL2lRcNc1ptTHoaRvcsiVs6iaawchHEGrk/EV58qnw+FNMUM5nqAq2p2hFUuiUin7/scexmqfj23ZzK121RibbZbt//bXxVi/k5YmPbcvZaA8efTh7ZZpO50jWEkUXmKLTmNmcOx0Qy9Ei3fsKoB+0hEDkIHv8sZOnv7OzPXPp7Xx6J/3qz9NJzQICKqQIKp2HCHUPdj75nfr0M+SGksCTHb73I7K10X3pZ3d7Zin3/ME36+3HYDBoUpLY9uf3F9eeW/32p9+3UVKEa6tOiMiAJ2SVmCA1kW148gPPcLbx4vNfnjBqG8mQAlKKImfNZKhgVDVBkkS0zqgIKTKoQcjOkioYRREACBQggnjVoKCgeFYftgZjaVqzRIgeZNb+oz9kMB8UbdMiprZZWEtBRY1MNnfPXbpQtxFxZfMUuo4k5WVOnFkPmfSkA42cQNq2VgRUjSG2Tbt/uPjSF165+easbdxonIu0SsjGCWMrXUwpSuwPR9GHoMwuFUhChpi6FEa9UkP0gpIU0ay7BeuuVYxbGzvSnZClXi9X9pGb62/eevPuaWjNU49eeftTTyxmbZHnjkaaVuVkorCcH94E0JjUt52g5NlGOTrXhTzLhwW70UOb8/3VV770BanSld3NgXOLttKUOCvIFb6LGKJv6zu37hwcHnuJCpBUfZTURei5DCAbDrKNDSxKsXa2MtM7q5vXbx7NlqR0+9rBv/qlz3/XR9725GNPv/rN55epYtcjTiFWhOJDGxTKPEPu1gsiKgTvNwe5EfEQ0KTj9rgJLWR2kfI3T1vZOplkfP9+g3YLKd05Skv1g6LsvEYf2JXOGuMcshkO4Wo+ODw9tmxtrz+bzYa7I1aQWXO6WrHiPGAxGRautzEcv3nj+aLiGGztcL/qTG7UTka7Tyxwuf3UUzE7lmbmD69b5mJkNs9tumFxuqxZJeMBrtLp9E6czyfjYVFu7m6MDw8Ofe0zMveOT/Os2BxtaPBFbgNQl1JKXh2f+u71N64fnexPhll/aPeu7HVtIg1k0/vec/Xw3t25b9AO9q48yS5duFBYk67dPowBptO6DtWTVy5u7ezcvX+6mlcX9s4fHbXLeXflwi749vmXP2PjfGj58s7e62/eYU2nd06g4+R9SgKWNh8aLqsGExyfrkyR587evXl4thQ8aIX69wihdc+ZrkcHKJIUgIDXgwNdl7G+tTSsu5nWAeazFWF9lxoCWCdHeZ34AlRcM62RmNZzCpIEiMRIxGQMoRIjI9AZwxcIBYEUUDWtYXkWlRFFVdd3+gPIkq5r11CJWORMElBEVUQVEFwbY9fvBokYFSB749q3bt78+taFd1A/72JIodHT47R/21ktx1v9nQ1tvZdl4tY6lXMc5sJMDKxRVQEFUJKeqda4jsUBnMVdgdbEb0VGNA7IQJABZjT3XR15c1Iv7pWruXbTMpe43O/qfRa0QN3Cp05dbzMkb4pdUUUNmDoVobynTCqCECUKsVUBAQUUtUYMtE1D/auR+rG55wpTnD/fzjtZdMMCsABYtsQxJqXCSNvhMqIPKGmdpJO0dn4RKKCKJCVnidHXKx70gyWrBLFNESQJpASm48p094/NpSeXSqO9S8XNb8mLz7mM3ca27+Xn9i7PX/+c2HJoy0mgDz37gS++9OUWFjYlYpIY30qiKYgCg4CCIKGAIhLhWSkBKCRSMgwPtiegqCp0lmtfP9BRkVSFAAlJGdiysRmoSIwA65DieqsiokJAb60Jhh2RRWWyVgHAMEhk1hRayzxr28NFpchlhoAACQDJIBKRApCq4TU+CJnIMBvrmDIhC2STBkUyRgUgCSkb1MiJQvBEghIVhBKE9UfgAyJ76jKXRTEUo7JxhnE9MWFHpBYjSiBEiyiiIEKI63YWUTDIPnjfJTrrXEHklJv+Rz/xo4ZJUwiBYhswM/1+2X7jy+Mi5daYK+fS8enxz//i5v351off1/T16MXP9G7ebr7wcnPn6BD7F//Cn996x9sO/+p/dec3X4Q6lKWJIqd1bXKzN7CQkhD2x+NHr+60909PD+6Sxa6Ku1uTrdi0//s/fv5/mlJrJ3YULl44wqm5NM6unodRVx+tiCWd23JFedAGx2Qm28x91xt207v+9F7rmwJFgnZ1NP0iy1xsOvGqGbvBXjyXz7ANTTUcb2zvTZZ3vuKym1cuTSlAV/BD73/v6e1ZO+sOf/nn6pefLxYF9crb4ba++3f0H3u694F38GIZyG9f2Cj3SnNxcHj9IMXGXJ5s2XG9Wp386r/N3vH+8Uc/5ow9ObzXlsNycwTWRgIVc2Hjbd/xnu/+d5/9xdOmDj6uMf/B+zYkH0MIGtPZCJUIQESTBtRaQBgVwCCCogUmRUsGyPAZowGJmdkJWTAWGUkMAqM0KoCgZ8ZIcqDJAKqescVEk0gSFUkRUJKgRRUUi8SomITJJAFDBgAVEgBKipgCodoHjlAvmPLssKlv3Z1axqZuWunIWVdO1Ec01C/L0yZMfSSAPpCmcC7LQ6D+qDeXVC+W/TzrUgxOt4f948MTtFJHPb7vc3IxKYYOCAyGoudOThcxROOKx3cvVNXq/nLBqy7FdNwse72CBP18TsrZoN/FKIySUgngq7pnOLM2eDjaP1AbrGHGGEHysh+Cj7W0SQkgQFj6jg3miszceu2CAlNpyBCVZW9kzIXtwXhS3L5z8K2D6alFb3nWtc0C6zdut3V89au/ebJnvvP95w/vLR579qnR+JzrDWfLm4vu6Il3v/P6G9XlZx4uTPXtf/3LZU1+ZS6988msoNAvy64u7x7QC6f6yhSPVs4UgABGJHacCu1UvAPFGGc8yK11YTbvE7fHs2U5cHnV05qyHHxUAdncntpYEmSL2iWJTWBjwRVXu2bxq1/i2ilAi6pDDs0K2mUXY5Ft1Ibj7vih7//OO/feGF3aG+1cdLR6+cWXK3ZFb7j38FOvvvjpjc2dNF/GzMU6vfORvStPjJ+7fjMCeIBxztIkdJSUlo1dHVWNjy7PTCTs5aQqnR86B5UfOOxZY4qya1MUjr1sHoCzbFUtGwELJKlKlHWAVWgHJu9bq0l8F9GayLCKeKfzHeg0tHmeW4uoYhgTqqCKihAHoUQGjRVEhtgpz3yopZWQUhdHzhIg9rLNzbIJtVpApdR6Q6aJC5NnoVUlI45b3y5CqkVTCsaQMSkhKQMZVMC6S1WK/5FUNB65KGiUUyelYwnJGeNT6hG6zDRVVBRm8V0sHWUWxkMHjsTaxdIndYoxQpLSIJJzDlDEq0OyTh3DqgsLCV0iNaSGo0hJaExAMABKjDEgkjKZ0uFo6FrfiY+549AGVmIkFLFEXtYYeAWQkBjJYOZEySJr61mRrFUfTo+mTz91+WRlNPK5yda9w8Mf/2N/4MsvvPruJ3cG95sXf/6F/t3u//yzf+FCzN13vOuT/+yvfrte4O++9A4evfRn/l+De/cUOkNHv/6p/4Z/4G18FQ7+7a999Id/JI18t3glx7l4TQwddK7IxIOPCYBHWRFLeE7b7/oTfyzO5i/9pZ98m39p58o7ia9sXF/embZLEAa6+e/+jZvf3JXVEil3buv8qDk6vHlwNA/XNvPfgb5aFX57Ix+xuXnv1sG/frN4+tn2Q9976U//2dNFt5zPJ+0+He63pv+2y88eJ1mdLKW0MuLfPLjx8T//Y8CL0R1c/tMvHf6Vf1Lk9Pbn7gUuGHcWbf3oUu/86B995i//uac/9oHp9/7w9Gu/NEfa295dpVQ8/rE/+vd+6CcOnx+8+pVP/+xv/NK37xcjgoWmZQBnWCmqKBgBjRascVEBCaK0TFZRV21TVVXbNI6pi7L0KTG3QVk1KTYxkWHHkBdZxlZ8aBoI6qxiCTmp5tYxmNUqkNU2LpLG3BYA5DWcdI0zprQ0GmQFO6na0MVE0HlBAUWKmHUeCnQXxjTOoAa+vUr3Kq9IIXlKOBoVg0nRBSiot3Ph8nC0tVie3rh5ow1tyiAhJwCMcdgvTpdLeDD75TUw8qwPGQB0vS9bJ6ERAGldOg70YHr8oIpYEUEeAIPxrDbnTFfidcLgLCywPv0DMxaqmwxPONyLp8uv/8uN7F28+TbbH3XVsj68GW9eTwsxEZUhXd67/InfpeNt9DUMd+rB8/6lF7SqTVBAAxrBWrc5yTdHIcmwX2roaDWNd165+40v9w/2J6jgE5EhiRRFYiLm1neSIiODEK2rmXPbQZZArHMGFRCb1mfEIQWTYvBzQAJsmMgKhryMe5f8eDMbbUFum/l940O6P5dmpqHNJ+MYMcxqRpIUfIrQRtY0GFnfLsBHQDTWZf3Co616eXX1ofJj36tb5yCzcjRtv/ZSZsPk2Q80ZS8YAkip7Srv+fQkP7y36lH+9EcP43l6aDvtPdYma70bOtIU2xZ7xhnw2chC0saWo3e+L50edzduazsHBGOMRGYtY+XZkjJlAsvZkmmQpazIxmym3N/AucaujZoAILOQO4yIg6c+PC0fsR2AxkxAfJA2dl3aOLdtBPhwNppfk8tPNXm0eebrNP/Mp6rnvvwMa4Du6uNDMHjb029dP7rlIQphYTEFRPzzf+m/9nW8v/+9P/XX/nsFMgRRVIBp3YwEuoagBxFGJCZZn1aTEiAqCCAArHkpoEqISpBEImiLBIgsALJu1QFZX5eA6xOsgBKcXdpnriKCelkTooYUg3e5k6SLthrwZAGkTn2zaNSreADCBOLBsjWoAQIaUOy8dLFtUcKiru7eOfmtL9w8ut0MsmF/hAGk9hGc+hDT+h1GY53pKkk+xhTQoHMmpJBUhO1SRBiLSUlIOZOxrl1WiFnRH71xc0a21+/hq7dvznxz0lV3782S4Pt/x9NXxhvLLrp+nxLFZc2clDgvt9yF7Tq0TeU3N/P+ZOjyfhdydqY0dnHn9JWvfvPF576RcXZl51wKXUNiyZisACRCg8oofrGaH5yeLpoODMSUAIAtuIICyHRV37xzaIqSEarjcP/OaXV8EH0HUTZG+YW9iy+/cs8Oik+878p7P/7+r730yuHNpSkRlPJexkzSiEpSEVWMAADCxgWmXk7WZRRCaezpPJ7bu7wYT/be9ZRC/cZXPt/V7XTWOVfQOM9KOxgWIjE2UTG2oQspxgRN1Qbk8xs7+WDw0o0jDZ16iikEzj7yng/u37kxPzgFdNPl4v7iKEB64ty7j46PltVqf3p/d3My6GW//LXnJ5c3zvd3nnrHo5fPjV74zU/deONGf7wx3tgGW6xCSgFXKZzuH9SrCoI44ocunL9z+6BfTsDmvptbMbnmJGizvs3zLvrEMhj17tarF771ajuvtjbLpvZ1oJMEkWQwLI/3b8zaPJlsfOmRdz/+DsP2+rWbv/mlz2+MNTViHDz+xABDMS4gzE9K4nt3DhEuLldtxIHtbbd0fTGdK6F6f/3krpoUPUpCwmIwKMbF5NaN23m2U2zp6rgyPddpwCT8FrcOz1Taf3+yETyLcIqs5R2CM9YLnL0ICusuTPoPBgigoAJAZ91WspZwzuDUtO7vImJEMrrumCKmJGsXDqIyEeG6UgoB16QiRWBClnUxOgiiiigQJUBBAQEESiKECizm7MgkSihJgCABiuJaBWc1Arp2qAgQQALm/fni5de//pHJ9sZgszu+dXr32qir++iy0Z7r9U2Rp6MbYf8VsloBmk20VwbpdscNEJozlLQiAunaRIt4lqPVMzsjWAZGNUZUGRAoI7UQDHZaFl2BUW7ewumploVPHTvUmGr//6fqP6MszbLzPHCbc87nrgnv0lZVluly7QE0utFsdANgiwJIACIgWhiCnpRoMaKTqMUhNZREOxKHIxpRHIgGguhAAoRHw7Wt7qrqLl+V3kSGv+5zx+w9P24USP7LtSIzY0Wse+93zt7v+zyesxFpZstRgszkY3FWlnY3cikJshAAhoCKSKRLsmL0kFmizLptoi3Q1uWOjudw95izrLhypTs8KCmD/hiLDNMwigIYSN4QKdYSe0QEZoiKwMgAaI3jhEbZ2fVxU7iYj+DIW1qiAwkscqj16MjduJFG66EyLYZ4fOYmcygQNJhj7w9vj7fWplyFld3B6qWPv/+Dqzs7//pzvxC7Wb9osox98JgSqKgmEFVUJRQVpvPPe0KjIsAICZkxASAQEytGSkueOkgCWI4DAZevRgUFImOdsRmKJogASVRUI+GyOoaK9OuUFibLZC0XoMxskgJhn1Q0pTr2J+287fzqYAzBEwMDwfIcgUAgzvCy16ZEhgyzGFZDLGSUjUVOkJZp1phin4TJabIERiWi9svxFS0B6exIkdSAMKNJSiGREhtBSxB7AVIWJV0m5cAQJ40iAgoxBEkSEoag3mvqEoiyIaK4NiK59SatXKjGRUqRcxenp+72Fz77V/4f+mC+sVkNd8rmYPrE5kX19Vt//Uv24qX2pM5GJfJ4UfAj/8XvaB9/4nS2ePyxR89+6gVDLEkUsBOJ3QLKMRmtg7gczyCdTuem9zll4sx0OpN5t746yorcWAy9nh3PTltPx9PNs6m2bWmtbm7IxgaNxxSikC2hqm89jIf3qT/Liny0ujuvBbLKVBnlFmKbDVzoWoqSjg7B+a0Pve/k6LWFLNYurE8ns9Prd12BZA1t52flqb9MZ207f3myF3b2b9wfr2F395ReeHjhP78YynU7bo9evtX2GYxHON8JxXDvmWsT0DdevvFkMUhvTJuDL41iIhja+8dNB+P3P253N33fO+kGo/ypZ5762c/9FEWTArVdTD6mlDofJakCiuJyAqOqjAAKScQDGYOYlBklBWGDYJEsm4zYGSYgJCVQYrSADMYCcErIQqg9oYKiqKoaIsIUGIUgikYUUJUoMWkKwJwwI81EoxcAYkgKKMBJUmIVEhYFjQDJxz6+e0meLLqHB7W3NIuRUDEzlLs8Y4lTYbXE4Pt50/VBlOju2ek2sFHo+gj1ohzkG2kQujiftWdd7Ie4vb5dOcxc/mAym7VNmZlhmfm+316p6llY2dqYt113OtswiEHWObeRk8L6eDvlqJ4WfY2EuRso1uxobVBaa8yQ6kX/+p17mnjM5MBt54MQ6lFuQt8msJBXgJzETxufovEx9mqSJyd0YX00KHmjdNbgYFydzeZv37s3u923875VmkznURAtHU7mc2dyay4/untxu6RWDu6dXHt6mJUbw7Lo0iyjcOmZ50/O7urKytt3Dtz2dr7fmHKYMtajs+Kt2/DFF4rjZoBc77c2OdGgBMZm4CNJA0zktA/CRQnDsS6abLGQ03m3vWf+8A/Ny/37//uPXbk1yymTRZTHL7jf+vEOxP/KC8XP/3LOlQ8BO/R1Z/NC2TUhSJ4RZUCIK95vDdOFnWFujiR9Jd3c/NhzZT+eHNyD114fmXpaN40frH3k/Y8/P3jr539hb2PtQSujqgBj2rp+9OrGzbc76OudlWy8vX7jRn3S2vasmzcaIqhN45Ux2azXNNdZvWjXjKkD9FEvjYu1URbVv3E8mSGcnkxGLhu5waKbu6xcywe+8zjKUoDTNkICBEMJMKbCuszlUhUdUZ9ktShzw4AaU0JyIN4rLProRVJQ34YUAyClpM7g2HFRclGAV5ileK+f1kHOTvrQC2UWsE1JHPSGXFKuJ130XQIARkuUM/Uxdl2IAJg0QICU4F1u3fmoaHVoEIgTBEMg2nURNCkoxaQCFTHkRplWqoxIDENR0iJq3yVJikyAlFuXiPrQG1JUTZoIuA90tgDDNsZYVYXvpceUIrSAbKxjk0Tarh2XlTHgDJuMfddZpEFVtJ13GetScRA1qRpaul0RgDQCKgiCghoLfe+RTAiCisWoqF3xzr1j0w2uPPXMn/kf/9f9u0ef/tbvecoczq7/nJ80buq/73u/+zN/7yfnv/Da2m++vRvDmw9vvKmb6+vvdeFFsj3X8M6Pfub3/fiv5GsP7T/70vFf/BO+P5ufpKs9OCBSwwQSExsasxONKwPsuvT7v+97yzo2s8Wjpbn/V3/kxf5HMgALsMI2gDNq4rSd/MTLGcQIOu87vJsySOs2m09h/zOvrh4eAenhYu6cU2OM98WN63f/uz954cnx06Nq8uOHpp4/PhiEvJp5NWd9XhYzhLPh6FN/53+6qfPmlS9/4ts++A2v3/fzw4PTfmN786k/8UM37Hj22ovm8z//G+qTOz/852/s7mTtIK6vzrPT2M22aH2vfOLeadu+doA//+L3v2fthz599ad+8v5dbL/KxfWzo21TUgLK0HKKTS1Fic52XdMjA4CxUHs/Pz5Noq3vSMQ5iEjoE0iKMY6LAgkIYpVT433C0ElQNBBx0jSD4WC26JlSktD4bjR0FWHOsa2Tj8EguywnAwhgEELfBx8ipU4lM4aN7aIExK2CLEFA2j9pFwmZqHQmtjIq7dAxS7JKm2sr6+trMfTXb71zMp9KhapB+hCl90zT0JdVcT4qWsa13w0TLS8M9G5wHlSR6bxEsPSOn4ttAM+ztPof+ER63vdhBEYwy8ESgCIICCCxQREdIl5A3ZV+0PvurVcmi7sr8+P8yW+U2eH8qy+Hu0fsoROOK+657/wec/laG8RaFzZB6jbduW19Q0mcpRi6yDpccWnxoIjOTW/iw9vzr32lDF3R9RiXxUBQlhT8ctUWEioaZmABIkqJYTQ2e5d4sMsn+3G6b2y0hXWVo95DDwrCBqJKajySYVvG4Vpx7flydaefz7QwxcYWOre4cS8cvZ2Hxq6tp8iAiByLlTx2EdpF/7A2KmowWyl91wuRy+w8qb73G8xHPtZsbk0mx5thsWE4W8xmb79Wz2t5/H1hey15NZ7KNOpjiZIXXtNLX96D1809KUeF9dzQOq0NNi7ugh1Pr9/q70+IlAoefvDj3SPXIDAOX4LXf85gL0lC6BUEjFnm5IGMW90FwGr2oP+lf+D278MZYDaKxUilIfGAQAZMkdPOk57XsY2oIa8cgk1Kyci0R60jLaazNz4rqx8I21uqnXvwVvbSZ3YXIStpHtKFURgM7FOUPbZ56cdvTr7wYAHKoOmbP/Ce7vAUrFkZFN//fd/7v//jfxG7RAgIkZmXiCIglLh8qem788aloRsENMq7kj0iIgRSYk3nXTMFwIRAIMuTNC4X0Euj07uNy19vTgKAoFpnXOaij8ZSjF61zzPIMqqbSQqdKhhDbIGBfAgK1hhsF4tQexLX1cdFiX03f3jv6NVX784nGtuycqXvvWFkC4PcBBIvElPMi1zJ+TZI71MX2BEbtJlpF74P0Q6yJoT1jcHexY1xObYB5meT2/tHQjhp0uHcf/X1KUvvvUdDq1uVpMFovWgP+lPpr17bzVwO05kjx7aqVh9tOmggmdhvbUQbG3baN3NKbTqbv/Di67ffOD162EfiVOTRo2UB5GJQRUmWbbfoMGk3Pbt582bT92RIITkiQKRlPzVEFT09PpGuzqosd6VMY1VWF689pn5hIa1urRguH9w9+rEHt773P3vq697zxOvp7snkVMGBcVnJTfAZoeNlrxSJOSRZNN44LhytldVquTIqsEHOxoM6xOndO0PIqgIkhK2NtcW8OWr6JnarRdE2vdeuLPII7DBmVa7WVINyc3P1ia1LPqTr+zffvnmzDnL79bcyq1fXS2DYn7eVc9Vwwy/2uV+YFEfWhenk8UvbdYTdrdFuJUd3b33+i/vd4szXAClToclpPQs+AwMQZ9MzZ9xwVGxtjveP9/PSmCw7PDnLTKpsnhnOCuuTTPwCjMktzev5jVffgGlfgJo+Wp8e2dhJPJq0p6B9BXT04HSv2oEFjE8Wb7xz/Suvv7IGuFOWsci+du8g5EOt09k0+MVi78L6oxdWHh7eHQzGs+bky1+752XKxlhrRdAAsOOAPMpHZ4uYGyYFZ/j6GzdTxjL3BDaBzLo4yDKA5eZgOexZXrL/A+XrnPMDS0QdyvILyyvxOYH4XGqJgJoESJlIQUmAQQmEkRGRkQBkmRPCpXuTCZCJGIDYkCyVCSiIct5BI8Al1Fp1OW4iQJCEgKRAtDRJEZ0DlohBABJqUIVl3FAFBGIClaXoSpUIYQnGJEwJIIrEZAhF6bU3XhyG+j2XL8YHd+KdB/nazuiR55PNfbfA5gTm91zW+CINNraa466PaXwcbbBLHYsuFyKCILLMtpDSssMFhoAtsFFjhBGUkRiCABAldobSySIdBepWlMvOY0qJ2cSusSbLsnUVI8GY0YqCARUkQWRAo8jnz2JykIJKhHfb4iJRPHLruX0t1ifV9oUuRtTOl3m9tRcjw/Edi85WI+lVmhYJxZoEGSKoRwTV5NHQMjOmpDQoICsjOLJ5mxfDD36i/+xP0NktGnNsgmgy7GA2l7dvcJSVRzYTW7IOXCaTE2x6sgYfzsN07jZ265bt6hU33n7f5rXd5z52//7912/er+uTt269irPjVJ8wLmWYCWAZTVZFWF66l2Z4S2a5AEBAACFBVgQiVBQSAAVZ6s4IUNmQcTZzjjhDgAQ+pZ4QACwgiiRVoXOm1vJpw6jIRBpBFJOEGPqUgvdhXveLebdeDXOTeoEYBZcIJxA6fxWyAhERIxEtR6W6HH0CGbaUzpvQygyMEGMrgGxEY2LE3BIzhKQpgQgSsTFOhSEBqkb1Evq45HuzoQQpeFEhMolADQhQ6CFETZH7XkIfYkwxSJ8ElEzUQuorT1+qgmeA2PWkyWCgr/3Kv/0Df3j1lCHy0dm8vdMMLO8f3jMimXb+7juDyobjbH9QX/yu733Qzndm96ZfeGnn+ORaxpE45Xbhk7NUjt2QdbBd7ubZ/p3ZwzfrgXHVql341ovkxpbrA1UCjm6QxUU/a+o6As16fH2+tTm0I9fNGuB5mnXOWaK8X5y4zmNb912frY1SFDLEqzuZW+3nc+3S/dOjclOzEouVSWuiXtkdPP/EvJ3zMB9O7th0PbN9/WBqQ7QBA5qtR1ZNf7IqZiij1Kcq8eHB2/feuLny6Y/m33zVHt0+/KWbZrw9ub84a5lD+9g3f1N/JenRWZVnp7fuH/x464YbZsQrV7ea2UNcy9uD6ciE+vprn/+Jfzwo5SyBthBi9D4lkSTLiq0S6rJESYQGgQksIfO5BwYlWQIStWwskioaNAyKCsvConJEYIUkhAgOMKry0iKz7OAqO8OWNTIklQ5QkqiKgCZZCmhSVA+JERU4UZ4xEAlQUkwCS8waK0taXggAAMYbwxa7aewoI1BdSADRyhQkMqk7hpSonncxAkYGywAlz7uFAmlAanRcFrX4lVwbK21XP/Bhrci2h7SKOBgUlgBDuLCzNj1atL3Mot6ZTo0AnMDzu5fu7e/HGA/beopiVkf+zO8NKhaZnJ6RNVzywazpQhTfdG3P1o2yPBNxRKdNV7c9MCVNWQaAFL1P4pkws9j6OAa3NRiuDSqEfryaW5CHR5M3bt87mC3QUFUUY2uurg0zxtfun9UxgSo660Apdr/n935f7vU9z2wOVreR87OjWVycDs3o9teu93fuJw7zII/+pm+of/SXTF03129mZ4EmdabqgktN57QQSCoIKoYRHAi0fT3pCMSa1bwkq+qQM8MZ5t3p4UsvwG98bvCd37H4+/82XwQeOPBtXRb3Ka69//Htz3+We2VWia0iCjjMMixzcRlYq2Exie3gm3/TrIivffnVrq6fs8Xp3/8XbvPrRk9fOWuuP/JNm32E66+e3Qy3Lz17IX9tczFbGFehhPH2erkHv/rZFya19yB3HrYFad3axby3GsvMNAR9l7qml66dd8EyrIwK7f28Dn2Id7oDAh06TEyCsVK2UfowH2cWhca2mvW6CJ0BExJTTBsmXx+4za3d8cqoT6ElqPu+nnY3m745PPWaXGaYBPp2kBeGHSoCEmuMKQxcUVhikKGzPnjfaiRM6h4081YgKcYQCME6A4Bnszb5psjyEIOKEpEVGTnHzsQ6WYMEmhAAGRk1/adY6yqj6NV3PaHzQsGDKCEzqIkdOVRSsUyZwxRVhRbz1Cm1KUlSFGXjjDoLGrBPUSpyQCRR207EGWfVOsgzi6n3StPOgzOOkZkIsLfODF1uaKkQTzF2CadtCglbJSQVkBADIRtCQkRBVJSoBrnvUpJoGfLMsDFtJ8aZK4+uViOz98zzd18/+/g3/pb7v/jWoxfXf+VH/8lLR29+74d/A63uHp4++Jl/+VM7WfbExUd/5fv/4mMX8QPjsBhvfezb/ps3f/UnaMBz3314sbj9x3+wuH3XHRzKoR0Ym0V2CggchS1RSD0TK+Cjz1yD+UF7vdn/uz92/51j/vhHn/s7/+Llv/1X1m5On3Ur9z/3q2dnZxLUgBBAC3DxWz/V3rs7v3vg1XOZ0eala899fTa72fzKQQd6EPsDZa92HUzb+ayH/saita2t43h9b35nRgUkUVNmJvbzej4dV8dVCRvDUTu78Zf+8ui4NzFdP9vf3Vv9pZ/6hef//N8afOxbBn/qT7/4J/5cdf0zK745qML1Tz77kb/6V/WNmyd/5m9/5Td/+4d/6PuvHNy+96+6/UWAy5pO0ke/6/3f/Ed+8FeOH/zD//ovPJ5VlCgBsAdh7TpPaBLErm+MUQFlA+0iJCUgGzUkTQYVEZwxubWAKQY8q33de2JRK5UxXdROIKhvNYxAAaJIsJQhQy/eR6GkZW7H42HoewuAYtouijKzIYQuyjLrvDrIdkbZrGkP52HeKUcZ52Y1s8UgG64MpNem9eNysLu155sOVfbP7nuKnfcC3lrriEOMMekidu+mioAUEVERUnwXLKHL1ovq+c0ccHk1WFIrcIl6UERgRDqvpJ3/GwRlQhRYnuuXFipCIVRQqIzZif4C27Elp2bIyMfN5DO/hC9/OYsRu770KRo3LfP3/LbvzZ/84EIy5sTG6CoNi+ezw/v97Dh2rfhkKGcYrAdpr3+pPj6MZ1Pr42pSH2JATKJoCBMgg4RAAGwMQhIwpIaiF3R9tQvPfWDw/HMibvHKy4OqYp20zVnGhsELkvhCY58BxShZVfWQPBryPuvnmBppgQdbyXJxedetmfb6y33o7fpevn2hBh8oCRdW6+4LX4T79zNnTDGkLoM2ad8UEdP+fXnz1SRpbFJRVTT34XCeLQS++mb7tVvuwsV8kBmENG+rRjWtog9Fak0/wZPOjTMkUdinbhBO3tLByoqTZvNCAsGh1e29WBVdn4aXruoDklkr0Vo3Cr4hYMMJY+rbXqFynZU3bsqdeThERwZAC86MQOwjsiYPRKoQubCuGKoZkDUiwTBnw61TP9CT1k2nA+MnX/gnq08+dXL97XDjrZXFAkxRB8lyAwLtZFKupvesDE8vDN4+7E97hsgXLz1era01/WK8svHcRz727Jde+epLr5qEZlkYgXNAFrOKaJIl9ZFSDEiEhJIgCjBBUlBSZ5YACmEiENWkUSQhRwWkJXn3P+JpAShSAlmi1s83yTmX1WryAShYg6FZxHo2KnKrsZlMVZPLst43htUwJx8hcT2ZL2Yn7TT0i3wxw8lkcXR2sujiZFpkJrOGAyfkpQEcgnhQyIijgKYs9r3lrPOxqgZN9E3dWQNZ7tCyK1wFulFw2Tbzs8XxnePaezIcvaQASraeR0OcossAu5kgSmD/1pdu7g/2s95sjtkWhrO8KFfJOkTeHFVdmBnq29litjisz07PDvzrX77etlkzx7nHgHQ0nWcpf+ripSovs+EwSVLAPmjs+us37xyf1l5ERWNMzjKCMpMBEFJjKaU0b0On0GuzOlh99MkndnYvoZ7uXdh+5frty1sbr33t7Xdun/zoj3/tI89sXb22O5jl+/dPkRCJ2OCVndFx214/6l3uLGJMiRDrRrSvR1sjdpH7uFfYx1d4Mj+KhyfU+6DtxfURa1LLR23XRRwCZ+QMyXjgvGaLSYeIaxvVvFmoN6Wrr1zYeHTrwjc8sf2FL77Si1nZXbt9evzw6Hh1ZTADN1wdnB1N6q6d1BEpPHlx+9nH97748pt0Uk6n3dHpZFHDhb29spqvDgeT03nj29HllbvvHNQPZ8rIhGsrVVOfUYThxc15H7sQ+97nhHlmy2Gu9YyqPF9duffW/ftvv45JjGieG4c8Kun46LBcrXIH3eRMlKY9jnj+gZ3s3p0vvPrKjQGaixvDy9urNw+OP3J1Q6y7l/zpmV8r1t5++5Cxufro1enJYUT02o631vq6O5u1keTixl6fpifN/EHzsMjG0+OZzRZUke88Ke5t7bxz4/b6zrgNoZNzPgUjkSIAMJ7XNs+BXoqouLRGLe3khISqSEua0bL9eQ6PPw/uLUezpIhkkM+DSwJILEs2EC67X4TEbAwBEWACQE2ECEBMYAzjEgyMywElL6FJgoC6RAItmUqEoizLOAouzZ2iiRMCkjAwE4JKjKSATAqaUmIVBCVAFogJQECNqX1/+vbr9195a49GF6VayTPsicXYlOK9N+PknWpnvanbSFO1eXVlQ96+AypLveKSzH0+3TjPuCiyAV6i+gmpEABkC+cJIAJECYJRCWyWDZKDRGKRnSbRWKztGluCAoKjqoLcCDAqqagGT1YIM02JCIAJEmFCiFFRAJQwB0FWhbMjFxN0nTGtxAajdcPK7A61y2S/hcJyNhCPmYngpxo8iRIaTb2KqGEUAVEdDePqih8UZrwKXVdSbPo3aYPIjMhriA1MF6gKxmTE4fZtbE8iF4mossPoZ+lsbnJKwFqWfRPchR3YvBSGqxxxy2Qre5dWyo2Der579dIrL3/u7FYPfprS+ZhyqSBQREBaBkgtM+oy7mQiRoEgEYgQgFUUmQwCJSAkQUAitNZkmctyJKspAQMgRREmRqJlfm75Dc6vB8wIBsAunxqYImmMoZu37fG8dXnuMgexY0QhAliG1NAw4zmH3QKoYRIAIiJjgJAyE5nB8NKLhgIqiUCNFUl56K1EQku97yAlIEHWqCqGhKIoiSelJdULVTChKPWIatJyRhmROQUPoJgCi6goLBtISaKoKpnM+Onpla2N7/mBH9599JsIMiOg8xneePGzf+xP7R2b1EqqDCE4BCDxhGSYYr6SucVksegW5R5Mf+lntO5Pf6kSYSazsr11cPe4ym1hzbDKVtfWju8f7xQX7/UPdi7tVdEd3rxLcw8j3nj0ArSLYTlM815dUXeNZO7Rq5fodA61H2U0HJW0MaJWqcHY1kWZu1xS32s1MOsbqW5IxPZtM5+WWWFArFI9cCuf/sR8Qw9e+JW17uHV55880Vm2+Ty5ddZoRnvtretWYl65pnkg7UZq7RBw1Up9Nqlt9+Ds0G4IPzm69H2/LV58psuv7ux96M6tv2Z8luYNTPX0xbdovnjqwx95+7Uvr1E2GJZW5t3x6cbG7vGrX+wmFwoPhKp+8av/179++5Xr9WaVOh/bAEE1CtIS1L8su6tlIgRrmEANASicQ84BllsSx2ZpgDHGICERKxJiJmoxEppEEIkYEAyZc7MMREJCskQGgQgIU0uIgCCa6PxzOy2z0ykqCyQCJIxRyIBXiZCEVUSyZa6TLaawfBfM+xiSHtddkaMjMoi58nzSQYooZC0h8sCSiRqiloCb6yvTehJiX7i8bZp5bGazeWXzC6PRwWymoqIQmskA3QrlTbSzRXs8n6tqkHjj7JjG5SLE/fmilPuD3B3NZydNs7DUpVPwMJ0sNkpeHQxS7ScdnTVxOm2LnJwzq5nNMrvCnJK2IQmzB4mIESBjFMSux2JcoQ9jYy9vb+dGJEpow8n9xcPT2Unva01NTKs5FUn3RgPu4/PrY1a6eTZbNAGDQApXH9lkgMPjxdaVR4cX12yWDwLf+erbu5fHzfXbT793952fe9EOd89unxa+MYsEkx5TnoHVLgRJmkQ5qHpjLQWCedOMqtm1q2uf/HDjHCxC/c9/Im8jzBoWRWOH6uEXf/bh/peLZ79+8MSz8OZ1iFN5/Q16+eb6b3jerm3K0882X3grc4ZJQpErqaoQaiGLNJlr7ddcan/2c/nl7Y88/tRbD29furQ3/dzDB28+hNHOrUmiJzdHQ3d67zYv3ozzB/ygnRz5aWYe+9STL9y4sThsXrg1DylSTH0CVN1aWy1yHA5zO8oOT/vj/S71fT3rfYhgoImyWqAB6hahFs0yN40pQ7WOOGFGuJg3Y2NBdTGbZsZuDYd5UtS4Xg32qrXZYjI9PL19/S4xiGWPvLK1RZke+x5IYNFvDorMZgREIhlnFgktKbBjtppIJU9pPMiSI66yW9PeunI6rUPTa+dtiPVcNSRjHTNL343L/NJq2fcJRGfTVhCLzLHVzkMfAygOs5wR7vzHo6LQR4MMyovOAzs1qhGjAkSNUcAQGRYkiRoCMVHU1MQYCUpnIRnvQaQXQAcWiVGQZWl8VSU1RBYg9JGQpesyRMdkUAWkS9HmxAyZpT7prPUhQBcjqwiCF58RGSYRRiBAUtAUJGcoCtd3UaOSclrEoD1luLE+bOvF2y/e/h2/7XtQLtxbfen9r/4c3nzdH51+otFhxvd/+d93Z0ex62dNytW/fnTng9/xdf3tL6Y3T2129Lkv/Z6nLzxyZz9w1EHThZ/7nAPJ0AQkjZKpkKEQo7VqM6tdjAkx+ntffROST5CdQvuNl9ePP/D8zYVf/d4/eG2wl//kv7Yvf67yVEWUIMe9PPpDvx9+4A9PZ8e/9iP/LPvVn/tAFtegT2994eDuXV30RyBr3/Vtj/2OPwb1/I0//0d0MR2PB6NqFLt+oaGbnFYjGyj4RWttOc50hyBBGB88GF64RIN80XSjgX1wXH/0z/4xvVx8/t/9+9f+9h8IM3nkN3/33qe/exPM9Bc/e+DstR/+27fEra9tZe7a6ltfvvtn/1+LaJjXmySDPA2BP/8Lb37yT37g6YtP/obf8UP//h/+6Ad31ktrbEgC3azWPjIBggZM6KxNQtJLZkwTBAJibrz0lNI4dyywhC2GqAKEgMOsWC+HZ/OFSph2zXhclsJd06+XQwZuW5/nWZ3aUc4uN+B9Jmk8LhksMKEQI+WGmrZl6waOVzJbz/3BXDxQBm53yMPSoopzxhXutKkV3fbOpcwMRnl28+Hbk3gSbVQFNqxKvpeIxMyxOw+anrvMFOTchHyuLBZ9d06EgAoEyvDrhBc991SdXxuWsaJf7/Msm/q4XOEqKAEAqCFjRdcR9xjHKQARLh+ZIrhoU9f2Cpm1EfWE/fa3fsvGN36q1gxCQNTlgtEHh3YFyAL2hoglWd/OX/2ShW6YDAeNoF5TiKoZK6EPgQ0mUHIuJUVFCxKid9aAw1lW7X7bf3G6utkM2WV299u+Nd2+649vy/zI2Dyc3MIkPK/jg3vk+0xEPHBR0GAcQUOab11aD02M1hSjMmVFtVLp/JATYTmGwdintlfAxL6eUETVDBwKG80YYp8kz/NMZ8fdL//U+v13ZOjs5k7vB4PBMBa2IC3aJt68WQ4HKfZdTHZYymglzCcQArNTylIXkWIKvYJkVbE4neO1p3V1HUGGl3b8+kZejmftFHcfkb2nFvUXKsNGPKos3bmYBFCr4VZUyi+/5/CLPzmMaiGgD+XOsNaEDAwIgqn3OH1A9HRUjaGLMSNQdlhujtqLF8PJ24B5HrPq5uvh+gubgKykpHXyRWZaSKGXYZmnNvr5/BpXH9goXj4VccOrzz2dHKKHuqkL5t/3A9/7T/9//+eXXn6HAjJjTAKISZIqATIixKQEyMSiKgoJICEEWLJiVTQxnScikJBAGSCqKlJUZQBGFACCcwU4iDIA03nfEgDOpvOV1VG5su4Xkz42ismVprfa+EZsAkpe6iAhEpgYkG2Zl01s5/P2c7/86vw4F9hslZoulVWVF6V0tYL6vvNdKspqdXXcdJM8t20IMWrnI4ITCRlr37VkzHg4BsSqzMcKTdezgfasbc/605O5cw7Ztj5mDtdG5a39qQ9LtAakpNEnQ1if+gQ8OWz3b392b9O990OXvulT71OB2C/Go/H87EE9O7719u3r1+/fuTcVr7OJR7VRsFv0oCLiFbBpOgGIEakTU7gkyTezk5OTSd1EJlJUL0i0nBq3XRhZXF0t1tZXzo7rxkM+Hnzg2dWRGXZN/+DB7T6evnL7jYQro6o6nQeP2ev3JqeT2QfOZh/72BMc49HJjDAbruQBw+6KmUy7QAoKXVQGIeRk+aAPcRFKU8xr/9ZLr2meAaaiqAoxe2vrRTF4+96DrtHJdJZaiDFtb5aHDybGlqTAFh3AcHt9UGYlmK7vDcftCyv/+W//VEz25HQ6+9KcNsdzLyezUHfeDQvTB+5we2ezB/o3n3lhY2frcNaePTy4/MgFlw/YaMB67vtsUOlZ76fd6f0D6ejC5m6ch65JKyPT+Xj7/sOHs7OBGRd5OR6UoWu7ZpFJskP+4tdenNw6zYKsrg37dlHkdjqr0bmZ6p7JLm1ut2ER23D5ifee3Lr5pZfeiPP7xtFQ9WJVdSfHl0aUVcVpKG48OOii3HzwoCAPVm4cT9T36xtlxcXx7ARBGwAle/+sI9W5aF4UVVH6pp36+frucGzXzo6mXTd55OpaNDo/7dDaX9+hLXcBooqovNwkiAKhLMEu77JkEFBTRIV38cN6TopRPX+iIIEKIRpERjRwnkUioKXEnJiBDRljiJl52SoFXD41EkF6l0/EhLyUiC0tYgkVkZmJRFSXIgVkBMS0fDcnEV2u5BhVCQAkydILsqzSkSVHBozRtiWBehYA0DM2Xf9Ynl1legzcJloCSdOjtfI9k7M7XXtqYTF+z+U6YAqYlPNpHW/PiplCNICMICIJRUAEDAORAqh1CAkMghIoAVk2pEuaUvJoMgCGZAAUIKkFJUKIyXfWWcTltIw0EhoLABIVzbKNh6ICKiARRNRaRYOWURGiQuyJjHS9RCFEkYSSwv49gWALm5/cltfnaX7CszazVto6aQcoKXrOCo1RY0CTqQfphZhVUEyuF55skeoKxk8+mj08OHv71ZWnntan33/yyz8/Hoyqard+43X0DUMCQvLez8U8upnv7UI7Cb7RWDN2xpqum2if2YE1m+NGu2pYhnqm3Qkc3Q9nx9p1O8N1X220sVGNSUBBCHSZA13OFhmJiA0YQIpK/O6ZBURFJSYhUEJQXm6/SBDJOc5yNIbPDxwExKRMSAAIKAAqov/R6QYVyYdoiVLoNPYosW272nuTZ4PMIZrFDIBZCaLIki7DxICExgFlALLM/iuJQiRSQjWG0DAZQ0SsiEBBIijHCCioLBwUNAU8/4GRNRqrzMqkyz167ImIwaSUYoqMyChMFFIKhGiYECAFkSC+hygQUmw9kaPQc2rf+/xzf/RP/IWLF95Lkjnf5/1MvvhLN/+3/2X9oA8xYe6mi7ooshgiZ9wjAAGpso/sOHNDVfWHB4xm4mj8iU/o1sWVy19LP/0L3PehCZTKZp7Gtnrw1m0dSrmup4f7eUEDM4zDkvORik99l/reODNYW1nU7Tw2w4ptWQ5XhilqP+/AVpSVCI4JfdsjSQRAZwArZ8pQL4arGxh66FqZ98emHX3jd6drq8NPfqr7xb9ZDwPrfDge3TtpXEnEkcYbqU5CCbJczGBo8/mrD/RUIrjDQZg9WXzge/+z8aUPqlyWUPqpnClsfPKR0599c21rXVjJEB23B197Z3z1UdaeU3dw/V6zILdSwAou4s3GmO3HtoY7+bw7HG6Pm6roTmpIkFkCoSCiikvtnrWGQK0hy0iIDKiEhohULWK2TOLDUi8oiGJQHGHUeA4C1Zg8IpOKVSIBZLAEEAGXL3AkAhEFAUwgAVGMoahJCUk5JkFVY9gLErMgRyVIkIyNnCXIwGRJgkBQg31/Pira3hojNKOwsIWNXYfCRiGqdNETmJR04CyDkdiRRoP+eHrcxsX22s7t2en6YKgKnI0DwLzvE5H3YRDyCrKLw9XNjY1bJ5Oj2LoyT5xmZ40d5l0G064XSR2nknSYOzXmXj07q9vRsKpKM23qRwY7GfHrR/ueDVfYS5p0UhRF6GVhsPdBAExZtH1Hjqd1DwpRKRLTWUekhu1bpyc+xm7RZeAHpT3SrrFKBuMspUBDJW6Cy3hrZQUHRQ9yD+pcZCNzH/7oe44e1qG149HmrGkvrlcHn3+jPjs1j27e+6XP28tX48FZrB2psTbvj+5vD3fC6SIC+r61gKzRt70xpEAElPJSP/0t/F9++/0V5BTcO7dFplonZ62GlHpipSHR8J2T+u2fgVgAFBBtNR7j//0vsi9+pl/bNTePmcoYEqFGAxgD9B5Y0CqbHIrCYsxvntavPGiL13e2RuHWlOvpUPuD2y98/Q/91rcWh920LnefavrDk8MFbgxnk+nNs65/652zbv5gftL1vQVwIhJtYgqApcbSWpdVK+PBfHqYxKskJNuDlDYzuVl0tcvAqhiKqfNlVbKBWeMFrM2yiEohstIYyg7cyEANzenZ4uCohkQuzyK7KmNBHZTFU4/tvPj6Temiy8x44KpyQFFiEBUZWEMiMUQLaBFzm+WFY2cE/LxpIfnt1bUiz75w/CZEIEZkVkg+avLdoHKX1tb6FE9O67M6GiJRadt6UBYatGR3bW89o1gVdrw2fumtd/7DqKhvUiOqMbmClcQRd11ERbVqVaxDNpDnQKI2QheT9ICoBglS1ASq1GtCsUwoGtsYBVhBbWGKkcnQSN/3IXiBEJNBYgYyhEQ5EIGELoSkEXiySAqYlDMENooEEQiSKpgkqL0YYokpESVISCIe+6DGqay4D37dx77rt3zql/7BP5t84bX3Hs8D3HoE75399E/6e/OdatVa7kKcA8S+z6MGSA342/un5qlr6CYnfmCjfeTShdd+9XOxCYVxEhODtkBeBUiCC+CANzfCmQ5nE990BkAsR+WYUg7Ugd8y7vU9d4CdfTi98z//meyVF3aP5YLBjY0hB+OlMzZO+tOi8sXWxd/8t/5qfu/1l/7k7/Ov3x427OuuBfb52uPf872T7U0z2RisjeV0EhaL6UK1MtOLI7OZTY+6ehapyLvZgthnpbl41t35Q3/i0e98/ui1yWNopu1s8cym+Z5P3RzbZz798fZPf7/70lH/4lfmF6898s3fLIvtlcZvnzzsFvfdUe/270LtAQiI7hYnw/ddfbNe7D317Hd+00dP33n7scey3/m7v/uLL3zp/skdl+/Gvgtt3QZZHtEca5kXIzAGZFRZZzPnsZHY+MTsNMV5346cadsYgMSYvHAFIkX0XTBK2CWnccNSia4W7X1c9BCZDurFWmn2dqqm17rpRVKEsvchgRKRD+JFGExp3XbpTtuuWXRdx2A5M7KWDwjEVfZk0Tx8eBIbLbQEL+vDwmP4ytsvTbqFmKUfhBlJmZVMSImY300V0ZJjmuQcRvrrLmRZsij+o6mQgC5FxIpLZ8jSaIu6vDPo+WEfAFREAA2hYUQFS8YAbDheB9guaNVwF4NarSkCgTUYUzLOQA6aV6sf+tCVT3xnjwOMITMWUUCQxZKv9199MT+bF5RMEkJUHySlIBE4S4Jpqb0xnHpFBD6/6UiCQMb4JJQSgQlBY5bbp96/GFXJBqvJKCT0bYZ86dJo/H4U4fYZziq5eUPm/wrayEiCJGBXH33/ZHvsLcTctme3QLtqY7WPfTs/VTK+aYeY+snJeFDlzsxf+mq6/jIfTQrKsQed91RVWBVuZdzWkTGtYsn3zmJY8PB+KEdJFcvMN61xFKNEUrM2yFUohRiSMVbFzkNnrfMhZlbROY3q6x7G5XTzMj37EaNxfve2mc8J7Orqdt3Xw63n7MO7cnIIsbcQEFBEMoOQwBbYbLj4yMW9x/5I+uyvmZs3u7bmnSt0/fVUCwqgY5IE918dPfvsPD3mcgOg6PLEIEWeXX68feur9ewd37XGqBFQUGQBoxVLgCAEoEZ6T+ys8tDKtdIfzFvN7M0Xf239yuVqVB2dHGpuykHx/X/w9772F/6Hdr7AoCApiori8tgMAgkhY0UEkXeZRcsrqSoheEmICMQgAopEqMuVPaiqCgAtb7PEKMqAjpGW9Kx3KS3T6XFZDaqVvFgteh99ApcbU3Cj0mMMnUdNZMhahoicDSOVdpytPVJdOw0vf+Fwdhy8giXmxDm6OrTRpyIb9mEmpEftacZc+1QNy0Xrc5KM4bHLVx/uP2g8TBs/sDlAHICOB+UZppBiO+/Q5ZwVEUUF0NH6Vrk48773ErlDAJCioL2VldA201lgy6kHMe7eYdQXD0L3VREPtpwuFj4sDu+edD0v5tJ6Q0B9C6LeR1AQIkBmBJ324eHh0eoT68BmMW2twXY+u3n9nVbEh5B8LHKrCklBlhd6ltXReP5gtr26WxdgpH+s2ixW1gbbW6/fPJzNzHBQXdp55vWX3j6ZHU6nk9zo7SlMXjwQpQ+9b6cY5vfvTZFIjb14YXPR0lmTAhmDfQoCIippEuP06OTq2tpakQP72XxhBsPYt1f3duZ9OK0PW99sDGhrfW1xqm2SeydN0zSircutzcyDWTvOES+tr5X53tXNalieHh9brLJsdX197du+7Vu/+so7pyeTqqhtSbceHAcv2gNQPtq5+NKb16dQY2gL6377pz+mTDfv3/nlf/Pyo1e3MwftfNY+mNhpyzjEVmLsH338iZb8l9/50tZ61U+neVUYl7MxmGNR5uNi+DNffuHBg7MRlUCwaBpLQI6299bZlb5nWMgiHPnpoiBc7B+Etu9QVot8pYy7A2bbn90/SCeSldWdg/mlre1DgrMkzz357I07+zPEaHTbIEXsCLCoNi/sPbx7fzKdtymAs5nmh2enlsN4rRgXxY1bR56woESJSFKR43BUPoAWAFiX9tfzFQK9GxRaauqXUdEksrSYITLSuywYJVmy45fBh2WjGVFFgM9nTMstOuiSFXaORVo2MgjEMiMSCiyfJku+j4AikMIyUYHnuG1VXsoPyaiep1xZldK5Sc0L6FKnviybLkdJ0lNGCVhBy7La3b28vn1pa5jPT++99cpX797fLyEVuexOjx5Z27s8qDQKrw1gbdSc3I0jbnzPJmtSFubzNbUc2/z+VN9suV8FZVAFjAgCzAAIGkFR2SiRIic0nFdIBohUBVSUDLgKUYUMggEUIkghKDIZYrSUESlKiGASOgd5CSZDUCQGQJTAAGCW8+8MABKogp4P7NgCKESPSGoYyxybHlMPMYFBQwr3HxiM0AYAAI2UEhvGAAiU8oLzcewa79i4wh8e5hVDZWG4aTAaG2ILqVeXj+ZB8sefGbg83nhNFw2OjNbUTxaGrE89bl9qt/e6zS2TXVLNUnMaphNrjASJ81mW4nwy80OTUuxPH0wf3OxOjrvjE2uHq26tWd166E9TG2FJN1QhQiRUIiLLiIQEQMTOECNFJbCMIiklwaQEgqoErMuXMCNnjvLCMBMQEEgiiUiIxIbZiHhilPN4GgAAMiGTiCZJmnofmqbtZl0/83G0su3Q+z5Zl/VCkiKh0PK5REiUK1s1OYKCJgJE9LjU4YngkkUMAHB+v0fKQY0BQEfJR2AVK2QMJB8loRUyGBEjCmhQimgUJWgKsJwTqqbYK2hKSkhExJZTSn3f9j4EHwUE1TgymwN67NEn/8Cf/39fvLgbQ6Ik1E7OfvKfNj/zC9lRDavje36WVy63hkOi3NnxsFs0Uoc8gDfiUTC3G7ur8zakJ59Z/cHfP1tfGw4GW88+nt56MT08La+MW1/mmPcPj4rBOGrKW1lhEEPrK2snfdsd7edaOx4i22ggpmicMTHkzrrCzkNgU2ok67KmmRsgtBYI3HgE0fZNl43XTw4PiqIqK9ceTrKRyct8teP21348pw/CM4+Vv/kHw8ktpiuTeVxdXQt+ov0BSQLeY27K0Vpse+jOBqv5/GT+4Pj04g9++IO/6eMetzWNUswY2ORAOyuP/57veOXte5OvzlfKwebjG/tHPa3FwTd+9Fd+8ufflw1Ge1BOUep84RdPfPJ9x/UZHEybabFCo4EuYGU0356DoXbaBUjOsDVEiEzIhiyBZWQAw8QEqOAI2IsDsgQWQAnYsDApgaIIJCQUSIqCaiWARhIDYdkCAFye2gVVpEONhgyo0hLSmCISE0oEUdCkqEnT8tzuKSMriqqsaBPZSDlxrpSjkvSNK88HphfWy6wJV9YuHNU98erJZEGZnfY9W6hjaghEQui7gCnP7aW9i+186kyxCN0ktHFGuTEMJiJiCBnQ5spwM1vbLUb7B/sv39s3ZdYiNchnTZ2tDDT0KYTNlSH4+LBvQaBEuzlenfi+DBLa2LCi4VvtyaLvPdHJbJFbBpG1lZWrmxs0bdo+dJ13znXgp10LPWTOxN4XLhMGCUFSbGvvnAECZ5QQ7pzN56FXpsW8G7IdOX7/5d2jkxND7uRkho4vb66dNMF0/e/6wf/y+fe/r30YH3vkcm2KrITx6ezX/ulPrL3nfbOzQXFWFRZKob2PPwePXWg+/wUTmdqgi8YbRYPMBDFkhRXVFNX70GbGfvh9s0p7G1f29/VH/+/iwQnrQJwoqTBDKk1UOOkrTmCkbxeI6jyUouWrZxJPAZLmRQRMisSFguFsFLkIplYr3ey0QiRrjDGlHR3e7/Zv3y/sXPzk7PC1zebr862s6y2ZjUFv0+x0Nn9rfDHhYnpyYjzE48MTjc3mapmp2Z8L5ZxMt7m3Ol/0+yf1Qu3wsW0/bU4mLSYqC5uXZV3PfcQ8y1dRYhNTlDVXRg0T8LOmc7lrRHeqwQjc5fzi6/t3T6C1Egdl3ngtsjIC+4hE0olWVdnM/Gw6WVstS4NXdjbQDabHZ2wwY0MSDaRs4ICwC+nEdwcnZ0GkcETG5iXs2Hrg5MIou9WDYZOYknac4UqRDUubnD48XvigPhkVL6rGIhAMClOqXKq0YGNz5969Hb9rQAuCxlQVDCormgCiWcUg1LepLKwzmOVoGFKErtM2aFtD3wIQheABjIBqIkCAFFRTYREsGuKEkBmhKGZJChYtbYYgmlLSZI0pHAWfLNuYtOt7FgXS3OAyDj0wGQL1GmKAkCJbCsGPKittm9lsoRKdNUV482Ty//2f/o/V62vfWD/Sv/EvTt+Ob/2xv5EBldpfKXM3rirLPoRUVtT4hWKJJECnGr/uD/zg4pt/a/t138RBn5g8vDA92v+ln82MFVJFMGRjggBpKlj+zh964vu/y2zs9l9++53f/9tGXcoUyJAoGHGZI2ria7H/0CPvfWR1NHrpa6/9yhcpQW6zFDBMCI2Oimxh6nE3yyfvfO7LN69c/IbxV381vPxOPe0NYAnlFPTqJz95u4Fbb782OJxvoDFVZjC1beh3dy7/9b9yb3d98cBnJ/P08sv6T/4uh6mLQHV8WqT+F1+ygU+By5Fxr71V/7d/+MlPfDRr1lfesBYHpG7y1tHRq/9gOLAfHdjbv+XT3MNpgktYsstOfAzPvS/7c79HP/ktBy+8sXl0a/qV1/d/7Iev/Xef/Mw//YUNc+Hu+Omfu/72h7I8tE2KKcusSKxyZ0QndTdwzrAbZKVBnTTtabtAgSDIjJa4IHWAheWyciwp9iISh6WLGCBojHGeurbr8yyb9zINwRSuqgpRPKvr2SwMymw6bYd5acmwhiRgiDPEjaJIqau7RhhtDmhwVDpbFX1XHx1PZknAGQ2yPlpZW13p++6zb75w6/ChHSzXqwRJrHUsCWJEY3+9jblsCoDq8g/net8l7+VcgPwfrGaEy4M8IGJSxfPTldK7xbTlIf0caQEqABjFMg+tcQLUNDQwdmDRx3HGhmIUEYQEJAzJWijy8tFHN77lOzxl0DWGCImTj5wEDvcf/vSPpXfeIAJFTJISgmCybEAI0vJY5MQnFTKAbFA0qQTnXEwaEhAyGmKyMaDkO8NHP4Rraz60ZWmsYbZcbq6204mzAIQebbADc+GyvbBJD6fqY+5M9H07vR023xs5m09rJueKfHF2YgEBEo2KbLzC1Uj7Dk/3m5s38NXXht6nkLJqoMAEgtVqyKBFl2xK87Mqs7kt1VNSIY2xLKHaTF0rRMVoPYBKRtoEndaZ6UI/K8c2CfdRwWQBNGoiSRoCostdsQiBSLNhdfDWl7auXA1kakzF9mNBTYVgSJPgOVvK8CgzD97+HL33wy2Nm/J96X17cuE+2K5/84uD2DMDACdlE1Nx9KD9yr82H96OF64lzWuvxFajo7XtlSuP12+8YKwyJ02oQhoVDYoAIjgiTWAFDdmOqrOgUI0//e0fFWf3Hzz4xZ/+uQ9+8GPMqOo7hCKvfv/3/a5//E/+zdu3bm2VDMsixZJFobh0/TIBIyFoRCVFUSVQVgSgPgKxEoIBJUYAoKi69HATIIAlRBELiKAlsiLIeQAcAGC2OMqOte+PkogaQU2SxLpBTEYwNxVnDmLo8iwz6IJUMZnEhKW9+OTKzdf3j+4vohp0ZjGfWQtra1UM/WzRDEqXSJ11kFJmiSWmRTOftdbqK92iQB4ORtf2Lt99+87KOH/2iUtv3XpQkMsLm1KMgJlhY0xqYycwnbQnx72IWeYVmJCBTmdNCoGcERQw2oeQgh7ebc7230GUTsQHP1gpBKjvFSIZgzFE1QgCBKAEiEAqWcZJ4mQxPTk7KlWHVQF9n5pWgriMEC3nDgB8Hwixi8lW9uLumvX49NWLjOS9d9LlfWeSv3/7LvayMV4Hzu689tZXvvKrxyenwICGMFLo0i9/7X7bzb/1E++5tL1yvEiLNrQqRenmEmLQYWbyYeY7TwYgxyBaltinBSa5fHHvaEEW7LTxzqWNtYHEDti5YeYL23kvEh8eJI/c+BglcMC66d0jG85Z38bU9N1Zm2G2vmL3D+dlWb332rZ7esf7fl7X8ycviFACm49Wpk33xOW1s+lsMpnwoPjcl1/QPt6+dXtrlBWcTu7ep6AZuTyPgzxPYTaqTObizdt3LqyvAkZn3fraam4x+IVBAsUvvfTS8b0TC87lpiRh1SCxqtzW2t6ilaPubFFPxsNNHK/07XR+cjjrFoONofOm78NiOhmXzTiHs7qfHsswLyFCmLTDQfXaGzcQuSFZ9G0YbmQu1bPD1Ia68TFGEhzkGee87tzD7mx3b3Xe+lu3H9isUrAXrjz2tVe/ZixLII3vNvMVAVBUGZGI0nkvRxFg+bwQEEOgslwMvGureleeSUzvSukhqRKiZUZEWgYuznljQIhyfuEHVmVGQiJiwmXGPL2bVgVEVKKkaelSX34/XnIigRhJzjtCy/9NEUAlLQnFeM6E0SUcxhhK4JVhvLr9xCMfuLL3RMmDcQZh9cqTF555562X7nztcxuh/TpnV7z1AJyZBJrmdZicyPpgvHMhoLFnNt7B/OgenLU0TxBKcBmIAKmiAIgiKxMhKYggAQ9jntvVdfEJpUcNS86kRBBUIALfKwlaAsR3YTcESNBHICaXA9ilqEsBkA2IQAIABEYIooTgBJYIDLIAKpIIDaQAIZCxGgTYYY7SejZGA4ACJRZiYpdSRGdAE4AlsxpI4srQF6VfW09sh20dfurHs3Cf0jwdv2MyUw2s7qs/nUE2WHn2/Y1x9tLls/2bNJ0Ug4xNhrXDNqKCH64Uz37DYXNkBuvZ05Wd3uHr89i3TDgoigDUN3VnVEM3P7g7ffAmEqyVDGROtS9VEEgQEZHhXKlHSICGlRGQyDAxEgMyOQImCX0SAkpogFICkWXmLAGyYWMtEhk2KgIAjJpQmB0zEzOhPZ9J4vn1IElKkICg91F8n0DqrvNJVtc23KAQHyAkciRtIgOEqjGRsUDMlJNxiYgAQBRB3sWbo4pqVDTLeiIRLOHxsoRmIYm1VtWzEiRkpNKZnhNZg0SIlJRUEdQYAUkp+hijYEqaBER8HyWJLFlcBJA0KdRzj1ZHmxtbWzubi9P/+vf9bsPoF21uAfvW3n7r7HMvwSkXe1dnddsOFqcprLCJD89WLIZFKCgbZ3bSzcPATi1kjs10Xo2H608/dc+tZKO1eDp5+MLLRG507SKvVfMjmR2cIDcZD7ANs9k8xpCVxfTkxBWkEdAM7XBX5ifgG+A8L3JZtBopeDYu56wEg4xkw4KCAoES+DaAdVlZJNTx7gW0WZTGs5XeW2c3L1/U/bs3//4vZx98urfd5td/43ywjoH7/bsye7U7+myexby4JAKdn1XD4emkEzAnKI98z7fkH3xKdIWCodBijIkyS6QdzO51w61HzPpxfTLbv3178/FLs+ZkctZe/YavP/3xn9zeGQwubRx0ppLR4Ss3IGfATNbjxz71iauz2Y3s7Lv2Pv43/9aP3Zx0BllArSFjEIiMQWe5MqwhEgEZcoRVShVqDLIM37MqgxJASmoZQcEgBUiS+gRRBBOQShLjBFVUDSZjDZ6LKlNMnkEEhYk14bkzkBDlnLYfQZe8xhgSGmuMiQnAEJlMOFNAjRFNBDhvG9x4536FZezEMq0MixDD4WyBxHlmPYJY6lJShCLLh5x18yZXW1X5NPXrxVii9D6OBoTK0PWPbu2sluO+0Tdu3K6TNyVpjEVZ9pqQ4LRZ1CEaNEXOhiGLVGYutnIyOW27qKSGgVnH4zyk3lg1BtWWmUAGPGLujud5krIwWcldan0IG6UrrFsZDUfkUvJ5xrPJoosRBuAMscPFdNJ2wafOFPnhpB1lOXkxPp6cnBQuH5TFtG4Y4cm1cWxgPu23tq/0XX7tg1dyivvvPBxQf/vezWxkBzujcljFDc7X3cndejxwt156pWL76DPPzd68ZzMEgzEkDN4hqpBoIlDrjDZz+dJXLo7FP7h58Nf/xuWjIo+D3uQaJGltCiPUKjlkUIighKyKqgkxKgQhZcltRFZA6D3ozFjMXJmgaeMc2JXGUVv37QKYj49u8jAzxhNRZuPFrJy98JXuo++PW3sHZw/WzUDdjAvZWC2e33B37/oQdKMcNErea1KwuYssbdceTTAreH1tvNifZhHmh0cVAYhy71fHruni3dnZymCMqtAlx9mk6SLGAAlUCcg33rEbG+zqU6PRQ1YNVzOLVWHXRxWV7nB2Nq876XRgB+2ss8iitLe+E1pIPVhbpugXfTAqEruJpPuztg1RGZA5gY6MtQ7WKncqzZDcc09cvP7gOir1HEV1a1gUWRYYHp7M5rVfbpF9iApKZGaLJnP5+trARkFhAWl8+5+MiuZdKHMGVocJOSWfCjZRQ56jpaRRKZGq+EiIWpVO+5BljMC9ckKSlCBA0ysoJbWlMxlBbiAgahIVNKbwPrAqEwIwE/qgKUgfJHrKmRDBB1QEr5JZdo69DyzYR5+z6xG8SPACqj5oURblSk4QXvnqg2d3Vn7T89/0icV7B5M3v/rf//f29YM9LkzTFRLXB0U9adOoyAq26OfzqbN5phQUFFILYJ95ulwbrg6yx3c3bv7FP/iV//PHxwi9chKDjn1AA5ZgsffEB5/5vr8E8e35Pq0Mnzvd2PYH9xjR92FYDn3jm8bnZD9gy7XP/+Kr/82fxaP6EvApZCkpO2bi4Bw8c+3p9z9z7+Xb8Z/9n5/+9mfhCz/xxl//R5t1gSZbH9lpwqe+51vC7/qBN0vLbC6Nd05/9B+kY18hFcb2R2f2bDLe29zcLeLV9ZVv+NDDX/yRdGOughkjO1sEqkqnKpjMarc6+NkGf/EX503coDWfaUSPfcztIHY4rXVNxj6pBYpE4lgUb7/95uV2Kqfxm7/pYytvVS//D//YvXH3xRf/frZ36Xf80OOP/P4/+9f+t79375//75s5FooxgUXXzgElDTK7SNGxMbZHlb2t4UnqOh+BucxLVR0OXWZJkdoQcAndtY4tFomaEDUBRBmV5SKAD31e0qULG2vG7O/fCcABuQ6wWpo8M4PSzXrhCIM8q9j42M+buokpxjQoeVxybvDhfMKp70IMxPPZfDVzTzxzdf3KjvTt3YMHiNj7SCDEZIncu2d9zhyxAZgALFnWS5PCOdn63RXuuxvlJSAPEHgZJzhX1BJiIlj+PV3GtkUBAUGBeGnCURVCRnAWi/Hjz33smz9sSj35+X+zerLvFh0lcUiIIAnYOGucqwbZ9pU+jfIQrLEhJSIxSNievvVv/hG//vqWUSWOIeVMETUJJAAmZkEUlL5XZ7rCmMKFxheEHAUALJHlzAdBqyEFsJmvss70RuZ2OKjWR7Pjh8lLUZZ9LYW2VTYsBzu3z5rR6kiHY9p3JAm76FTnX/n5VVt0uxdTuZ6tWMwJosZmgaDZeD1b3eoA06ypcIpOFoupKBnOFQ1AlopSdi/J5Qs4XIGm5qN76XA/9H75K7erZVcN6fKFzkA5GkfK+1ZMUeBsjienFKR4sE7HN7mLmc+a+hZaigqFtUQ+9V24/YZu74rL3erm7tMf0thSNhhvrNvJkVrV1GtEFDTMSATGgoHy1ivl134qXbpWj6/6wUhcBRyKW6+a2qtAUgwdgLEWxN26xfZfpu4b2t33mNGjTd3YMOHmSLszMIpBQHUplLPEESKQAihpckQYTV13YXNz5xs/ceW9H3HDcXR6tL//N//y//Tk41cvX31EoZtP6nrWbKyv/KW//Gdef/utv/Y//s8FOlURUQVQVWQUVQZeErt4mWcDVFleCVEQ0nK0SWgUUNUu3Xx0viUjBEZxTIhoQJXAi6Z3fR+z+aRvZohMhoXj2sY4q1aJyGJm7SpKQPLoxikpo0Mq8qIC6iYnBywxw344yBYtRBSTu4ApmpSSBAmCVJVFZotezlDivbsnEjmJpkixVm9lEWaLrl/PywLcwf3T1WIMi4UY6Iqoxnnfn8577QIxzU6aGDCm83luhkBB+0VQSEoQU7JMOUI1zGIfiCyApuBtPjg+jcjEjAahD0FT4hwymzVtiEklaUoioKbg42ndvf72ePBwb2ulIJicTfLMGYbhuNhYGc/r3pXZvbuH0xB6hIOZ38vIgi80fvxD13Yurn3+K2+vrez2R4dX3vO+8fqFX/jMF774xc+fnE0RISK0IWEEY/kk8i+8djhvm+/4xNO7G6Vomj5ogOxk0RtI46oYlFnK+JGr64ddrIr1+PBodbyytz3eXFs7Oz0RcDwa7m7tXVpZvX//5zCJNqEYr1LvNdWXr24lcLPTuqjcajVY21h95Orq0HB7Nrd9igInD+skR0W1FurFsKBqaFWrgZVHL69YQ75PvhdZKdOVjYfT1aOztXfu7B/cudPUM1CaqZzebaHHC1sb83ljKljd2jo5Pb32nmtns2Pu+66uk6ZRVgxNBN+u7m6kiPPQPjg6rrIizzizWio61MsXL+Yro4MHJ8N8tdgarV959I3rr7D2dYpsRitba9eu7d17+dXMu5xLCbi5unl48FbKDRk/Pzqsw9rBXb+1Ylnj9na5A9m89g+n0+RALfapBSIY2NzZ6enUFl1p2fvY+biyNYJIp2fNretvi3AXTJWx7/zyXcAKCKTnGkwlQiXS5fjfmCUfQxWIlABVdIm1hpQElIlEZTlcEhBmZDqf0iqpgCgRgBIyIhET2vPiGRMwk2FCIAQUYhUQEcR3sa/ICqSA9OuWzWVSA88zgcsYrAAJCJBBFVZQJdCIwIhLTjYiiRd/cfvK09e+bqC5JCEBtlmyw6c/tLY33Mivv/3I8WJNuI2RXQZJs6AwD2n/obtRxy5QQG4Ag8KyB50HYSBVTUmNBUmoSstNiYCyIzfgMgumh6QAor0n4wAJDIGoKiBZYANshVAkalQ2BApknQJhOQCTAxpAh2JAFZgBCNgAICAjGz3v3kXUBECwvGMiECUQTL5TY5YfkMSgCYGtYgJIAACGwQiIaJOSSJPn+fs+mFc6f+WlleEmGop7W/Jwjj6a6XEMideGsGEQJXXYPniQqnHwrblw2Q3X4v5RP+ug6TOlkpD2b9gv/rsV7NpyM9t+kvMtNGNZtBLaAEdcH8fJ7Wbey/wIZ8dQH0NWFvm4cHlsYFoWwIZMJkkIgcAsE220JFsBEhEyIxMugejERIwSNXpWJSFNiYAI1DDDkha07H0sfQdMZglYR1USIGAyhKTvlpEVRMUzGR/7pLEXmcSQlaurg3EnbQSGoevnkQTVi1nCp8goWrVO2QBZ0AQJQVU0gQCAJyBQi2IlBgHCdwuVTJRAlkBtJkqASYmEBJmIVTiz1iIkNaooIJqiqiRGzIESxy4RWiYTQ+rqGlQSYGz7Plg72tu6ePXZ9z1XzWejd758+MUvPvcdz3a9ccdH3U//6OFLrzkYdk89OavW7tw7HvyWjzz6/ie2rozrX/3Kq//D393c3yfEs76udwbrv/e7nviO33LrhVfxq5/Hd26cffEzK+Nqc/gbb/3bfyVf+FX3cEF7G2HauSwbfsOz/WtvTG9OKmukJzBZDJAbO58uBte2uz7NmsWAwNqCXME2F2ckK7kYhaYxaqXrBHxWlkQOmEIQVKMqxjKQqEO1LFy5Rx8vCjt5eEeLkuRwq7jg3zg5m90/O5jGK5f48l5etbK4XZhO+uT1yDBTXi4S04XHRNzO82v58+/rrNFJYO05WpcNF03QUupZXY22r3z3d9148K9Whqtnpwd4aiZfuYcPP//0H/pd966+1vTHTT9befqJgzeOxmXVmB5dIZGdYdvB1z/7nFj7rc899e+OFocnQYkySy43S5OtI7QqmaFypeQsH2u6WNj6ZH40bdqkSgjIKqpJ6VwvCSoJVVGCAimaoCiCKoQMqICqKskgAJoogpCYCUBTCOehSgFRwCVfDiEpJNS4DCQkNSmB4XODAVtCZOcUQ5RzhunCy9r2WpL+jf39MJ9Cm5Lvq2ElPrLqSpFjCgJpvcoomsJrCpBIbaIhmND2OcOjWZFRtXvx8dPpcTebzj3OSYJRdOBFWvJrWyuMq2e3blqJmHweIIRImFWZbTlayzxHEZJEbd0PFHLFgeMSXUhpY1xuDkdNHVLAmGL0vouNT/0qD7azEhFd0tS2EkJXowk6JCqckyTaSq5ZNio2HL9xOvUGgkgfqRVoTr1bcYu0SIAhhub4WE7rD3zww9fe9ywEaLPieDq79+D4uQsbN6/vpzbC7Oj0F3/KnR3p8+9Ze+Z31tfWR5nbqYbps29aF3ARwJXZcAzzmU+CEZAJYxcCZCrNP/6R5h/9I5TwaFcJWG+T2hbU5i5T6lOQHoSTMCSRIBGosCEldcYaIDVqUWNjICdiNOy7RjvQNmZ5ECn6ZClQ00PfbzsAAQAASURBVHj40Nf7K1vDZhF+5qcwSJ6ppq7+xS9MX39x9ZPfOQzD3aeunknvH7l8+M7bmzvDl19956zNXOkopCg8mTWNtqrRChxOutHYru+EXYD2/pGdyWZVaUi7W+s3bt+usmFlMEVrLArWFZNV7LrorBEAy1YEs2wwqopbd+4o5KVxsRNoaXNrZ3J6P036Wd9GzS5uX9vZ2PzyS79myK6MB/UiWs6ynKNBCTBbzJH0eHo8V5qJAYTcMDkuM0oG+95Di/NWu+bg8d3dR4aDRd31ORWYX1nbujeZzuY9KllLzhpmAy1ITD5pAOiMItKgKkJIqIQh/iejIkXTJTQe+yPftV64ZBIgKHMy4gubxZYWTVxEZgSfOgeakxgMLtPcaMH26NQHZgALgFEJWYMEg0aDiGCkoEli0kQoKSGijxIFRBAiRBOGpS1zk2Kyho3FBEKsUcLqeDA7iyFGVWBAQmpO++Gl0c1bR9/9bd/ynXs9v3L2R3/fn/jpv/Fnjm6/kxvXzo82oF2FMMqJYmA0ix5iWzvfd1FSJyVqhOQBczCjJqJO6te//Nk/+w83XnytShQsJ9WoSXwAhe2VDez50qeeuv4zf/fFn/5/TvHCd3zqT13c3rx3ciCKnLBpQhZlVDhFkSSv/r1/tQoZQT4FSQhexYoguSAh3Ls1e3ijOK0XvzY5++f/jsz2I2M3GaY+qKuY5nL82o31F7+0JadYDRTG8cEhgkVnA+pqc3L65/549dTlfDw4fuRC/pv+q0euvefWjXtJUKIQG6zAOYhd4pxdxrmz0Ecmg0XL0LJRZzEAcwIIqUkd2dIxN10PyXLm3gez7r/94a/c+5Mb79199lMf/obf8z35+h9v/4//pZoe3frKvWfyK7/j237fn/u7/2uV257ma+sb0KESEaaIUNnsrF5gvu6YCexuNbjbPRS0CC6CaGxtXhCxYVayMSiQzFPfIpbDFU3kirL38axbjEra2SrHhd6/eY8Ai9wFR72X3otqEgxk4tbqgDvJLLV95wmSM8PKba8MbAROctzWmeNotRxkxsVre3s7W+ux68+a+Ul3CoTWWtVg2ZJqitER2QwiqPfp3YMRLBkOcC4rw3fF4ojvSpPx3B5FS1Eavku+FsVl2D8pICzLAEBIAkLLtDUTAe2sb6XOf89/9UeHhW+6+rHqu+lL/27x5a8WYmISBcqISsMQehA/zAcPbt5av7QlyzBOK/Hw3skv/JPh9dcHmaDXkAQJOohArMrBCxMxRpe52g78U8/Zb/w4bOyUbc+Te92XPq833qxqTyosGqMQGQ9U7l6AvFSl5KNEMtmqJHSrayvDwtY1qnbHR07ZjQahGJMpYO6Tb42hPejCz/9jGGB69MNr3/BdD+oAzuo8oIayWjmc1/n6uhm6cOMle3JjMACoU9+ErjsbPftefPSRdHk7jtcEC1xJkGftrGZdAHhbZjjI7GgcSRNjIiJki1H6makIeSMpa2mj9enuEXvLnFM6zchm6CAqBMm6zoi2XTPzabS2WU9PRUMWJnr9i+nwXiGoQipoLCmCqAavK5bgX/9f4CBb3fXVmg6Hre/43oPYkqooKhujJNFjFhJ+9Yv08PVyNJJipeqS1J2fzVLbZxIbTCmBgLIhhMSKoirIVhLGkBHGjcH46z5QffSTMFidTc9mp/OxlD/8e3/o5z/7E1ce2Wpaz0BZwTEEFv9t3/bxF77w85/56ZczIiaIossVsShEFUPnqGjBZSIO5dzeDbp0jQAoAjNZwigal/0qAQJxTMvSRiLoUorncQgAADTqJfV9R0x9SEFpMe3zYmZyMxiPjWPOkap8SS0lshi9pJmt69lRN6pWsywumqQJrEWD5vi0HVfZaDhuQ9d3fTNrNPVzjQC5qBBBXmQAFDRiborxwCxgPB4L88bmGmTH948PEXVYufunEz9PoGINS4TcWHVCAGEp+UHtg+SFhSTyLvvLh2itUeKUQhcUJIBonnFV2ZhCVebzRY3ITRvBoCFSkhQlt5wxS5TTpp/3/b2HB7lla+xgkGXOLuoQZJIXWbdYoIFcKfSxD51u5Y8//dhwOPrarfs/9vkbIWr31c/lFuSdg+M6TmaLpmk4NxwEQABISXwQARja6rU79eBX3/ymDz137dL269evhy5UmYrXvvUrg2J1YyVEHrhhpSN0Ya3IFOzDo0mZ2RjT7tawKuxLb30OAMuyika7Xh4cnAwLm1vDHtZVr+ytXH70gjUZGe7a4IYrF66svfrVr6lk5coqWYsREMH3FpzNx1UCDE1vrONMQaBuuxLxysZ4b7O8f3A47VYOH05c8JmpjNq11WF+5cLp4enhpAlg758chb6ZLXp2WWa06U4PJu3uaN1qbH3/xquvl3YgqhRD5rKkcoweYpcezGURT+fTakXDjZsu+LxwZsxbO+uzN14uj46vYLs6GB2d+vscmtNjm1UKoTJ84cqFV79wUFWjZ5/ahfb01vSoAxTlliArqmo0ns/mEoJNgdrFyFFV0ebG5v5J9/BMi5FJzYxE3LCUgBGgJnj6ib13XnkbAAwsXQa0lCwtnwJRVUAR6bwNqksbPCGi6DmCHgQIgYGWajQBQFBLzEuqEJMiwjIcQqxyfrsnVIO0FBjCclCECAKARIgCwOcOBUCgZRGJgN69/KhIOgfrLaOw525ORAXRpJBAZSkeAkiKybCsFdWTe4+tcsaATfB123OeM5IVvmjXV4vFQG5r9M5xjJ2JgtOG59EmhIenDgnIAhdgLJCqSUm9EmlKyKSACoZANQoag4PKbu94MOoUrpl5Ydhs6OFZfnfhWkFgwCi0fFoCEAIyMAGjIAqAzXOxuWSMNmM0IAxIyAiGgQyct+oACEFFEBUJCFCW7HEPkpbTdHAMZFUNFmOIDWhU8UigkkSSoiABGmTKJfTV009MUu1u39nt5vHmjc7P1ncf9b5O0xMOHnrPLVhZDTmNNteEF5TZw/m83F7l9Sq+5PyhMHJKiKjZZK5f+NyokFFZSP5i9MvaopPQ69mi+dpLqzB3NLt3+37tF6YsDsXAat/maRaKo9mpT42AqKqcRyOQifC8/I7vpp+J2SDSUm1HSKQsmhDp3eIhIKPQuUhvOcckYmUWjMvtgRDCEpaOuMyiAkDSGAV86BW0C/1ZV1NeDcsda0V91GA1YxMwI43oFdKyIe24WGLNz89Py6u8BpGYIBkCSMtLQhaiKjomIyAIAoqiIqBRoiIwYWJWVRS2YFMQNmQAkoggKNMyDBViEgV0ZUqSMACRdbn6TjFlzu098cEPfcfvfs8TV26+8OLhV/71zulk/e5t87kfdQ9O5q++3X7tuhQb97dXy2/71vHz7/nI6igNVtRYKHn1Nz5x7frR9K/9zRzppsSLf+gHtn/gB7pqa23vA4Pf8p/t/92/ceXkqPv5n5r925+mu/cGw0G+uabMmrrbsXvq6W/fv34K7cPV8aA3lkRVtU9xdGGzpaRWSSQt5YkxaJMkgKnYx8gAIAIpquNUlVSOJXhDhqtc2fQhmfEQsqqdt4OVqvMyU189+axGb0KpE+frM2o36+vt5M5L7trrz/+mx9+5/sbK1i5YGxiqzQsoZaDcrQ/n9cyOi6P4QOaL3Y2rBMa4TBnKYRQ9NcPk59LW9fZ3f6KydOvv/W8bq/D+j7zn1S9c737q3+zsrFSDrbvvHNDJmSn9yiNXkZy3bXM4ZUdrO+vdg+l4bfDpDz3T1qe//LlbK6M1k+HRoiUA6ruMYJSZjbVhtbU53tnZkt6cHt9s2pMZiigwEZESJ1VVEREBIFJGREWBpOcwCEwigRnJMTEARQAgVgDUHlOPEA2giBKgKpICIkXUIBIBewUJCQgzxyhKipYIQM47xKBJVfS8kk+2PDmu7x8fHbk+Y5t8KNmFOhZEm1WR6mgDgGLWCoH0XdSA48FwDcyF7e1iUPnY++jvH57cm95JIFXlzCBDEw1ylDip275xTR+f3Lm0GZ1wTKh17cEZkXSnnSLA2OSXtldc7EySlZXhepWXLuMkZTVIIot6Nlukug0WyWQYfDsuXAzWJBOj+KQRqMozQW17j4jWGmDrmBSECocEG8Xo+cG63Ll1e9q0eYaFBcZmsfApre1ttbHB3K1srA13nxJ3Ic3fOnj57c3Ll576+vcZ8OP3PI2popVru6lduNcfvnh//K0fMZ0/fOkFf/+gfvFAI69fvdIYGxJIonw47BZ9aTkd3CJAkT7v2bUKzsQoYPoEosoEoh6ssQvGcOVRBym/81bee0YjkkRjih6NUUwawWAUH9gaUbOwZbO2NxwM+9tfWNO5XR15xmpc3Nl/Zf2Zb+9uvb5SZtgEw7Yqs3Hvyzv1rf/Pj/SKvzymwYcuXPjotWiwtLQ2rvYX3ntwRkN/5vvGUj4cjFKn8yZMoz3ebzjPWU03S1RKkXMTmwsXNsMirgzyaa+1AGX5xd3dw7N9ZrYGokQMfqB2BO5oOoPcDI0rzGgyDZzl9xYHPrWqfeiFA+/aja+99VKjcZCPEjO6Yt6mNmGI6my5srcR29OzdpozZpnJyMaYqqpA0gRCxoQYCbW3dLKYXdhx05NwsGivbK2t5XpCkLoYNFpLGSMRJWMUKQZBwgFTDjo5rZ0zhTO/rkY+HxUtFj7LSNmgZn1HnSIIZhYlwFaVt43eq8UH6hEdYxKyJDlpXoAFBEl5hMJqYVPyEAEj6LQTMgqkIkkUVRBR1FCvMaTISktYJCclBMMADGCUQIHJB7HOSkrENiUtcxJPGiABaApV5WKST3/dt36rPPnk2tln91/5pR/+01PeuPRbv/fxb/3Qytbw3r/8t6/9g39Snp6NhYLgwvsAYCDkYInZpcAABugxMG/9xb8wfm776Gs3V2vpIRA6RT8nMbsXoa6rumvryWrlv/Lv//naP/yZZ377b1i1F8uv3H/t77zmmojGcsSqtIiKFltPIXGOONdowC7RBUObKSmkVBgKt44ZWFUSuHuqi73t/vC4qmUUfeoUPIbPvjb/7PWtqkhejhMXcqxA0z5Yy8NsqPdae3KiW+3F/Oo7f/n/3rnzYMVlSfvhOJ83SZNyaNnBSVs3nEYljy6sY4On9x+Q6nB1GAz7iMRSjTgLWR3UuGQd31tE52jTDdY79wF1bn/11s/d+tDffh88edX/7GN8/d7wF7/w2f/i9zzy8ae+/ZGdt+MU3vPY8ULXHwY46pP44x66KmYu2z9ZrA5s082qQb46zLuAHqRjaImiioXYBo2JHBo1UPdTn2LOJUB2VDer4+GVdfueiwPf1HePTmezenW4qolS15MzXJk6hjb0BtRCco7qtm186r1Uw3zVZUMo8sJYR8X2Zh3T9GRSWLNTFF/39FM52jwhaRmhiLIYIoGxaJwkTEKjalS5eDSrJZy/HVLSczrFUl+8LAqovrvABVVMAKDKuiyawfIsBqJ87kfDd6dOKrqcOoHicnJEzuWHZ4e/63d+X16ixM7GRb427teH5YqlmRCBMjqAKG01KI1K8+CuuFyGFyDLe9/7k/t3fuTvbB6fDSFGVEB1xkRJKQVJ6hQNE5FqaeOFKxc++VsnOxfSxlZUNOJhZ8etPGKvv8yf/Yycteo7ZkvKpGSQy8EQq8qjsPjSkLNZbFtkDW0yavzD02o8hgc3zOFdaFpMXgkiRBXNM1rzff/aC+29V105GDzyPp9dxK1xyofMztc9Hx3gGy/S/m2XNARjcvv/5+q/oy3N0vJO8DV7788ce23c8BlpKzMry3vKF7YojChGAoFgBGpoqSXkBqaXTHePbLe6p0egGrVEC9RqBBJWwKKwVUCJ8pXl0lW6iIwMd+O64z+3zfvOH+cmvTT3vxsRd8U965z97b2f93l+jx9k4eH79Z7LHjGumoHpwvyWPzrIHSgPlpPluL8DRUZS0vEqs9obXlQAA0nrRVb2VXy1mDFSOrOJZ/YkDPvH5+df+iOtVjF1pjCFKbvZHNqWdnaYB5FL092mpz/RTW6Zq89udPh/wUFQEqkiCSAkAslhGWk5y7u7moTBxKCU2YCCjJKiIqYowmyTyn5FBw3hgWHbdYkFBTSp5ACRgARU4joRwoaSEibJGD0FPX/f9td969IU0nY5mWHmmNTy7jvf9/VPfvXJs1fuc5k1hYsCra9feuZqlMIDls76GNY4XQBIoomAQI1BECVQAUzrtiaFdX07KogIIBk2KJoTiiif2t/IsCqAF+i8rE/0qqeCaTHOEbHUPEVwHaJCVXdd1THF+mgfUSnDrF8QU6/IYzIhSd3ND196eXq0WM0tAucFSxtjF4ILbGjRtuCbEDwBhRZElKxhJpNjSho9agpkMEcaWmCDrTZKRZs7LQvMTN8aSysLntQ0VexYhr2iMEYWjc2MErddTCJGhBkM08CwKc2qWUrg8xd2E/J8tZAmmowdISJ0MY5GxcV7dzYG2cFB9ewLd5eLxqARVYOIQF2Ihsn7GIFUNSY1EMIqrQhDFFmsuWkRFEDUZAaApsvmqzePjifPXj2pVo1i11nmXunuvnxiSrf2fYUQQ0xsOKWEAkgEBEFFoPzSk9NYXf+OD7337PjS8y89S0GzXjko8/Hu+MK5ndZrXalRf/byxnizdzBflGVZDsgS9E3bzF+CGO+5dN+yq5tuVdfNzmCsSRcn83vOjl77tofzQTkc9mOnXtiN+iHwzcmxGdvNbIMzVERrnTNqXeGTZWNARI1DawpHXd2anMrMVKuZCOwOzkTl+d6KXZ5ROTk6bkL0IYwG/YvbG9fuTOeLoxCCy3rF9rj14sqNK/deDtM7q7adzSZlbsZ7w8nxKs9Hm2c2DueTIt+SwjXHJ0JIFuvgxwVtn9/yAs+/+OK0Cu++dB7nJ0MzPFnk86DTumtiOxTNhuOry9BMJ5jxucL0Undjfpi6uhiPio0CDmrWDBInIFS479wGB10sV1lJt67uL1PGbNsEo7Pbq6OZWur1bNOh79qmPl0F6wb6deaFiNbtPETrgDIkEEJVRAKjKqgosk54kUFFRUJQwgSKQADrazgpEqydjIyKCMwGERENGQRhYmaDRIqYEOhUBVEBJQAGFuBXss1ErzSwKYAgCqiqGmLUtM5NKCaRJBoQ12VoawUnIaIak4BKt2l5lESdAUihWR6VMLaitDjpH83Gyy5DZmNEI5NA9OwbTAjRAmWgCtaBZXGgKICoQgCINsOoKAljAnaQZWAkFpoKzyNctMdm86zed7br8t5ukMMjmAigQ0qaEWX5OhcCKeKaxsPrcFJkIk0ClNAwSAQmgARMQOvLISMhiOo6eEcGVUjT6VulCgygREoQWkWj7BIBkKp4QoZYow9IBClBEgBRJhlv4IaNE9cctsVos19Buv1SnB7YojD9Mq5W2q6al6/G0QDHxWL/lqFQDAe6PAkHL8ryFotHZ+umygprbCbeE6pjic2ElIAY2TCkGLW9ebs9udVw04BbWneH4oE1yZPJ/UmtszZ4XyMqwXoutba4yTr7jkQKsFaKQIHWnz8mTUq8bqVHEGAkSSqESOvuvNPTDWhaIyCRKK17EggUQFJcY9MBoAs+Bk0hAfKi6+rQbexccFyi1A6M4aIOXZlJIGnVdEBBLCYypjTo1gH+hCISSAKqMmBKghxVPAlDIgIU9KpRKEkSEIzqo0TVKBCTdkGTICYhTGrZxOiZxRgTAZCMKlggURLHiUKMEdmE4LHMRboI6c1f9y1v+eAP+loK7M5uQyp8tpjJTbr6+NVuf0EhWgXdzc2Dj26+7W3Dy3sOJKCKEhJ6i/d+8Juf+tf/vEs8/qYP3fejf6MprQQsCqe8d/l7f+j4f/6f8yZZJZs7cDhbzU1KbreA5JZHBzvntw5eYB8odSmpKEjXpnK7LLvoFV3mmIzaXLsEKSDZoBbz3ECElOz2RnQmMisTQA7MJs8hIVonziUrWS9H7wtboOtBbxiPDrrjxmLfL6fDs5dxqAW2s8n0xd97Sfc2eXRfryg0H4uUEtlkRWqOen7Jk8leP2jP527gg2mVuuWyzy1OnyubVtu9Vdj04422Bw/8wDcsfvsT3u+Aid2NgwxczDJo3fKZl7bPlbMXnu42L5YPnk/kkO1guNPramfSwGw8cN8bYPimV53b/sxnPtFW12OIqLA7zB95+MruAw/W/Z1hWfYnd24fnyybbh3PDQhRBQRYhCWArjV3OqU4qEiKmlREGK0Qa1ZEtsAlEYYUk3RJkqhPIg4iQoQYCYRJYxSDJIQSJSKdYgBRiYBB7FoZWuv7SVQA6TSGyWiWi9lwWJqgq6oeAm/0c46pZN7eGDRNoBQzlxd5Wde1M7g1Gm5s7jDEW0eTuy/fbn1ou7A56KmNyWgVVlytPHI0PC7zXoxQg67qk/39rV5vUTcdkMutsPouVanrDfLOpLr2G6Pi3GhIDSbBVjVpunH7uPUCqG2ompgyZ6OqQHMeBpyMi6kVzHJX+egRKXlgQmRhTBgpKSHkxCCgMRLoYxcvveFV5ndv7VdLuT5fnSuzRHxnuqAiRY3E/cXVa5/+uV8embYc23NbF/rnB8uwGj/2ur1Xv3U0HB3/w/+2t39ybnd06z/+6t79W1vHN6GiMZ+ptPKrVtu5iREbyUZbhToI84gUjTTLxgHZAkMIyISEmBkwiTqhTjr18P1/ofzQB1AA/+N/SH/4aV50KAFVMmMkNoQ5+ghZcoMtEIlh0X7j+wc/+n9HoOKzD7b/+j/qYpJWVULZVUyf/GK/Z22eG9vvFhWSJdU+ljvRG+gG0R48e+tzN29llzieGz3yqod/64sf2728GZJ3g4y7ZMEl38SgFvNQUYq5Bhu13R7lKbZ1jCe1P7/RL5g2MA3V7C8SEE3DCZVZiHXZKxtus44eHO9sqLuzmAzLXHxXt0eAo4Wf1loPhz0POVG5t33BWKq6Cg12Xe27yBZDVBIDhmuAwtg2FXtnHtqfHxqmGNqhyzfKMqbO5BZSrAI3TSAsDuadEzNru6VPN6eLnRHff6E/Dd3dZQoRooJR34TgjB33sxL4fJEP2IFqYVmiR/Nfsor6OZUlO2NOjhuJoqCQBAhjk1KWLzud1RJjQAoq0TrrHBaGYtKjaXAZ90k3WIccB5xH5JmPiNiItik4wtxwSqqMiYFUDaJJaogdkHbCRMYgqELSfO2vVrSK6PIudNJETGSRWkx5z07uHFwqt9Jh8WN/559mn/q1P/rZn9SU7c8Xr/7BP7P9Td92MCifrcP2u777zJ0Z/8K/L4yFJC7KepfKiDtERUpqA6gB3aoW8bMne1ACZB0wK0y8ufTf//iF9/7g5Bd+/s7P/tM+x3qmIYW97Fza3cTFnSd/6190y86CBTG5obv1zAAMAggYKvKUm9QxEoEieaWohFL0yHJoMqtCXRf3QYsf+fH7fvzHFr//W1/+sR8fk0uCaHikxShpyQCZQNVIYVJKPcPea5lUKMsHm/xt33Fw772kZ+pPz/RPXixENcWsX2iUDNJ02c7f/MbsOx+rd8/g9tnwxU/B9S+6j7/gqlp4YNQkSVXVmcIlHyDRyfb2pf/331vNrt39Zz95T92Uef/8t33vme973z9//JPv0Fl29UZv/+RshvrHvxu/9Btvchm84eu/8Z//zBe+9Ox/+JE/94YyrhZtjcFjt9ErjcunbaglWHKDcpCWVUeyqNvkzGEXM/IFQheani1SJGOI1GSEKjDM7G6ZURaqun7muZsxZePxVt/aSJoZVQcRkxINen0TfaYqbZN8Gg571tHZva29YlSY0reh82G8eSaKr21hUAo20kJW8k6v/8Xnv7LVL9Cg922SyGSWnZRmiJhVXRcjSjjlU5y21qyHlOsB5yuQonURK57+NYqqCDIhIp6ek2W9y5x2h5ySi3AtFxGpMkvdVK43eMcH3hn7HJZKxrXzpZgRGgfYGFQhMWRAMEIsY7t89tMnR1/bvLhTXHrd4tnnrv6nn7rPC4tHIkyioJ0kVLDIIpCpMoWQ5fY1byzf+c3LvXu9c2SYga1StVjWg8HG69/sn38yq29bdC7D1IpJkA5vpuWxy7LCRD2eLF++TsPt4UNXVoczlVK3z9O2p+Yo3nw63rmVRa+QyJIqRUldFAay4ItZNZzd0cMXg9r5KC/f/i3Z+bfIcsE3n9Gju+AF0SBELGjw0EP5lXvDsO8kZrm2h7d1tmirkO2cTZtnBqX1k3nWVthpntQEj1VMeSFQgFNPOWbObLhuvoqmX27uCOfd1i5PbsmNJ1A6kbatG9tOu6d/s7j3NZnbDtOVefnJjf1ndT7PQlJAYUBIRAQImiQhMHIISgmAHaHBKEjqkJxVMRh8UkVICYwh4igC3brrDhUFmSiJIquCxrgeYjs2sk4rpiQApOrU2EhpkI1f895V1iMU8ImYCrZtqMBoL+/Xx4er4U5vayvWjXhkCiZ1f/aD3/jxj3/ah7iWHiWtP5wIgqKqqIZofZdICqJg1tP5dWEksgKhQmHIAaioAESFmDQlEAQvENeGfwRmXl8yB6OsC930eBk7VMOUERcWkBE5kEhoDWFsq6hhPgtdG0Mni8V8erggLLs6bm0Oy6B3D6qUNIaOgdquI5TWN7lxZAwpGoshJCJSJYkJQX2bmike1LPNsn/m3Djrj5Z17QUFqVp1/QFvbWzPZouMSRiTqm+7jERTlxnu9WFnc8BITR0y4uWkWTXN2UsXf+DHfuJTv/fbH/voxzNjBr1cEUOdPAhbWrb40p3lSa9xzFtbw16/XE4aLhgjVrUHUGug17OA5EN0mYWEs7qzhpKKRoiaFNQSipeBw8g0abqPPf6cYqqTEsHesCxyM53XZEXEi5BzDgQcv+JOXBu/mBrFFsBw9vSNk6f+3e/8k4985Nb8n7XPvmizrJ9lVrSqm+ki9DLXK0p1etJUJrfRgE+60+sfzRYsQuwOT44SdJPZNCQuORfv7z0/ft1rr2ydHTR1qqskYvJ+0RsM5ou2Szza2Mt3NlRa30QGLQpWMRo1SXRZ4WPr2+S9kkBq2i5wjllCbRqP6jeKzCefunpQxu1e4Vy/asNi1bFmcPnyrJKjSXXn7pRFM+ZWW8VgAjka7GybNi6LHmQWmtWqb11/dGYxnyXrlvVqOBieu3x5fnRXWloenlwoxmXmVGsF7A92vfeT1bRPAjFwr59ys1o14v2jF7dec/7sc7ev7p3ZtJ25u1zKLGSkTax83QFgweXdu229bDPkIjhJORDmhLPFyo2LwmbdsjUJnE97m5zRqd2aCQhQARkZgYCQCQRUdN1Fta4RBCJMgutuQaR1peCppwdPCdZrEYgMEhoiw8rrggNkJD71KCEhrx1F62Sa6rpvUwGR1sVXAAhKQH86yxAVRFCVpLquTlcBRgUVAEgpJE0qHcJ6UremRSZF1AQKlMTYbAwB29CmLlm2MfrCd8Xx4eh4P1tNDMWYokSPUTkJOguCUCuoAAMYEEgisv61VRESqtI67QZIoAZiiuTjGNvdpv9IuXn2oZraNh1wzftf/vyleA9iIcjoglgUXgMqGUUQhQilWZHJAQS4o7wH7JQZ0QJbIFFkZCOSkF7pQGEDoCgeFSAl0VMslK5lI85QHaQAgJpUAYA4ISIZcrmQpdRhSACJpeuufiW0u3Z0ZjrqRI/TfJrF6AZlF4O0utbcpVqAdubYlq3N6CSz+eprLzbXrpv50hXDJBZFkyZJiYUwYbcKmDOOBya3CZo4bVQhVF1Tae2wc6bKsStsRFpUjV+GNpEgYfQiSoSK6/+XiBABmOBPa1YVAJFiCrRG7WsgjevXDkCnLXxwWowmyAxASFFSEoFTHUoERFX0T41pAAAQQEKSEKNAXPmq7G/0izxDlchIGSgGlTwDQp8iNgk0CZON2hkkBAMgIJ5QASSKTwqIDF7IiiKorMfJjChsWE9Z5Iq4tueZ03Z7VQFV30kKxCbFSAzGOI2CSBCVEyKwx4SIbI0za153cfbSA29/0zcbzTdHCSV88fFPuTv728Ivfen2CAtYqUPTG7o43tl6w1tsfxSrzhiwlpfLCg1pENgw/Stn7hw2D//lvzYDi0FMAkKsDk/u3FrefmFyz53DfkkE1MybbuWtgxTChXxD/ugPJvt3R1yyzcNyAZCMdUTcnawkLvubg2p6tLV3NhIGNOXWtl8B20JUk+8YHTinRJw57o/aNjA7MEaJ0PUk72tmXD9PkwOkTCVL2MO9S2brHMliY29zFeeIK0rJhDzO5cK7377APDSVmhwFYxfH1ofrt+58/vHc9iWL7nKER9w0K8uNM8WgNC8+ceO3PzG0/fIclA9cuTNpxzvnsrMPAD+uG6PdN55ZHNzN50f9ne2zV+6dXbud54BK3c1jHPbz3qAzPLt9kDXz1iVy/fd964ffkJ+R+cmXn/wyNM2G5bLMXv2qe6/c/1A8c16KUZge37555+Rg6ldeo6pAIlARUIEUNXTCyuQQTrmgBGhUUde3UTVcoCbiXMgGMgIpJU2xjUkYNJIaAFK0yAIJiSSqihhe11hSAg0p+AQ9a6Pv2Aaysj7ir0Xm9dfZ3X7HvLW9AUfTIzncLLK6ayjBYFAKcpu8JBC1h3dXW1v5aLt/a7b6yvWroWucIUVNFEOuIQtFBqDe123OOVJWhWYZJHZN23il4mi+3Du3p1JWTU0iaHWxaNRrr44bo15u7Mlxc3zctZ0iM1oskAzroussQWa5XxofglftEr60ql1yV0Y947g/zFKUECKzZdAiz4BVJHkVQEwEhgwRQFLrwKi+qt+/Xi2DBtvvo+r1k8PBuK+ENeOVVw8tmsnx8q3v/o5yY1i3S5RkTAont1fP/GFezQlpvmiGo025OSExe2/6hmufeZqt75YTp6iq/Y0hhklaTmKzMpZCjFwU0nUxRtVgewNIIrDOPXm2FkpbPnLfwsYYfPZ1j/ZXB/rZ50wKCKguUzApIahQaWNuDQHWqX7wwaOBrQ/377uyFVLbNzmYLHdmOpm4XDf/zDvDS5uTj31mlzR6nxJDUhviVgZDC1BLs9CNs5v1C0fF9kbfcdN4grjd3+pvlkeT49Go39V+MqtTNMbaPCdBzEuoFqFpRPOiWiVBGPZ6i0WXJ9GkGpN1uWOLQYxCr3Sly+dVmHQ6NIQRCPzWuFy0K0RMqepaY7EYD7Nb0xeW3dJSroS9nJCgtKZpVolS3utj1EF/3MZu++yINdTV3LKfrxprKbaASD51Ij52mLE7Waxq0QYpdOGoTg9fGr9GE95e3qkCSkpJcjYD4EtFsWlsz3CKHVsXkhhkfMX9cCoVFYW1EE0ClSiKyMBMZMFYEtQqJANrnhyoICZFQaMSBJTyhReUNMykhZBTlGQpsREMhDUGQLSAbNUnMYbQWsjBJOWkVpQNKGESwKQ+SoyxyByAalQ1aIwpXZa6gP2ynq28v/s3f+jrhl+xRXzNi7/7lfDMV0Hy3EFXtZ//T7/3Xd/xwcHOGKXcuNntrdo7XpfQAbiCMENNIl5CFNNAEGAPbEAVwIEjsBVohi4HGeVF/sxBdftX0x/8ZpkkJCSyQ28++Wfe8Yb/6tuXL96Kv/j7u5BFoE5iVQwe+5n/yeTNE//gf6NnD1xQnzRFzVWMUWeFKNWdUCiUCAzGmDSneWvf8s4Pruxwc/fMKIu0kqpOpetZ0uTwMM3UplBizlywQ5vVMWgUIf7q4TNvuPJXuw99MF0Yygf65m88Sc88Y4GIrLBoLWFrdP9P/ETzpp2JlxOP/YfGeO49xUd+Bn7qcTmqOZbGcpbzdFmLkCO977H7V2f67vVv2ur/V4d/69+YdnHtqU+VF374A2++d/HEp97+N37w4H94wSyrFLv5Kj14tnfPoxeyWXZZ+Cf/1b//tf/178+f/RwJjMuerGJ/DMtOJPraV06UMTnhQsgLL6S1jFGCy50QxpQoESj6ph0PXFn0jLRdHZ/fn1SY54McHUVVSVBkFvMMSTpRYeOMZgatyQYj3twaj1ymMUE0EAwmMxpsYswtOzEhJwafMi4H5A5O7jz+7ONQSA6KEVWghmSz/jjfi1IpcAQ1p/3IIKrrGA7xn6JHARFEYF1hs9aORDHIugFw/Yfr0R4wQjrFngogIpKqEBAIqYtLpu/9q//g/e94rKmv6bLayIvFasKFgSwzxI6MiFcFQLXOMhnf+JLd5lFV//K/DYNBuHV0fxIOrRrykjIySUEhiqqmyMCg2JS9Mx/8892Dj842d6DXi8JOKYaYZa4YjkUCUuw9dJlODgUAKdnCxTphW9F8yorLG09Qc7S6+mLId0bNu6vrL1587JGmH5UFuhNeTsDXbk3NFogilsnHjgQIQQwjZBCiIRlNFvCx36Ty4+JjUZYAXYchMee57Qh75++tNXX1fDRwGLrezm525kIWIN/qTRufbY9heNQ+/0yY3NrMMa6q0BvjA28QYy0V3aqiEDkfYME5O5dMNZ3MahoMtiIHUNaYFMRxhCefMddvxK4zYnJE7FYpAQArKCEzA4iQqiMEBQ2tGq4KhmHhstx4p3Wtnigl8SE3TkRTiAoAMaJSCOIyB0ZOaeZIKXmgtU1EFdahE1AQg0qAiJihQ2LY3otn71fKxdfiExoAhOShrZp23m3EOJbamPMRVoiA4JSpXw4+9O1v/b3f+bwNjOuWI1FCBIUIBAI5oSVw64HzmmiyvmgSrHuSmTk3XPD6KiBdFBFUhSRrAhKCimViPp0kz6bHTeCjWWWszUvKRi7bKJ3LAQwTWlYDioqigjFy3WIQuyyzzU3tIO7pch6WJ7UbkCZiypfzhTUIpFlZMKGKJlVwKKYDQ8mDMZxZJ6JsbN2mXkgxBG1m0NtoqtD6IMQ2y1bLquekcDY4RgD2PstptNO7dGac6qbMetUqzbvKOkub5v4rr/3eH/1vJqt2tjje7RkyXLfdcNQf7w6AaboMQRU83z1Y9EdZBPKJIxAA9Yyy5cxCaHyW5VWIkNaYXi0dg6Ne0ZtNmzLjFIMBtblFcvW8LkuLIIQ6gvi+tz/8ne97z29+4su/8YnPAbCKSUHYGEYUw4wGoheNmlKMKqCHbbdNOOiPri/iX/zrP/ami7vve8vD07b60jP7h5MVHCxKW7zpwZ03P3T2+Gi5apcb2z22Opm0oZpj1yGTb2rLBig2bUV2nCRlfXrTO96c0nJWh74rnXFRMGlqm3kXOpOZLMt92xlS9J6ZIaBIl5tSFaxQ24XYhYRiTEQSSLHzKSUkhF7mCJIPFI31YtAgWta2yk336H3DvOcKN7r2/NHnJ8dVotHu7hNXX37wnvM72Hv8+S9gxmz58u6ltunapk0M+8+/nJf59nhjwNKlcHjjhglp2gVyzIW59ODZOLu7d6FPk9nB3X2gzNQpK+xWmd0+OR4E+Na3PPa5p770ledvZ+X44G53Jy18ibkPrLGDmDu69/KVZ75yNwvGQeFy0zJVztxzbvfuS0eMtj2cqYImgBi2R/p1b73vuavT070AgBAI0DDDK8AbPk0iAxMRg8J6xoAC60LMdU4NiAgAZc1dXzfCMxMTGAZDTGuE8Dq3hsQMQCKKiiQAp9nmNYF1fXOnNfhOFda35lduMWsLrOqaIo16mplQFE2nJZ66FkqCrnNsqqoKSQxpWJ3cPbrT38oMB2bILVcnh24+3zy6U85ObLtiZgWlhAwA3oMmsBYGPRAGQ5AAjYMUKCloNOwAECSBrF8DAzPkXI3AvumCvzg8KrReNnmuzfxkN2xe2XsgHSlaBkTMc8IkqqhhLc2hrl24BYBV3yJHsk6NBSKQBGTW4EBQRXZARhGQWYkQ0mlN6ZqCIwFFgQDS+vIJiICiQAxA4gMQI1vQQIQkBlIAEoipeP45vfsSnX8omTzlFwPcljDLyz7jErzHxjsU7hpo6zA5kRRjRl1MGrQgNpRbtmLRlRRXc/ZegrLpg5GgmrJSe/3SQEyxvTsrctMBOqPOmJBk4WNjXMyzkFhE6qYJISVAC4RkkkbDp/MoxUhIioAgIomIkDSmoJpUEqogEQESGRBJ69iOBgVVNAmEgVQEFUQUILGhBEklCiij1VeiN8fTpbNWkjYaW/F7wxFDMEyKhjMjyaoiQWsAU1QW9QEkptyQytpZJ7DWD0UUCNY6qlJCNEQKmkCQCRlRgdBAEkAilACQEoqyKoomAGSzLhREZocAmBQ0IoBRUkREzoCNiCpCUkr53u7ld3/wB/q7l7R0Lg+zqy/MPvWZ8/uLkQHSbDX3480SNTQasJC9Vz3mjQopWjaWQcCS8YsqPPuS2vy13/cd8No3eAZObVbkkLTYGPbOvmP87d/19F/7f1y5sOsNA4KzeQrimqybzaw1pS9sIiqAwbAqIKhg6CC13OU43NuThLqKnBiSQrPSruPRBm2cUyLIMwxeI4oiD8dEDARETsFoJGsxhJb7A9rYNUlFoxlvNrU3pm8GpmzmsbpL0wTLxe2b+9vvKvPLO0RVNuzh/Jaj1ezxa/7a0cZs6OdYr/zqTuN7Ld43cI0MZjfaj312cL3nyqEdu277Ym9rBaqDmG5o8+iPff9xub1zcnjnI/8a7zw7eOgd2ate04Y7cHzTedSXj2QQh/fu1qvq6IlnNi9drAd9kXG5sRX6+du+5bu6+dTEqnS56W+dNNCLxMumPjhYHR2H1qsiIYkKrrFrElGMqmqSpHEtiqYUmXTtVCNQIFTGZNQZRmZLKAhkKHoQwCAYouYIDlQhrXGiygQKuA4XA6ImFJGEIXp0QutHLyZCMYxJTldBXXeAZna8LNrqopNLW/2v3Akd46QD8v64anLjJrEuB3kYZM8uZzdmc0dmt+ccQuzCommTNSvR7XKjqReE5BLsgh0krlqYdzp2peE8qHSxKYfjEx8qpdVknrPFTJeis5N6mJmN3IFEQlWLdlAKKBVc9t3x0XwE2GdXVzUZUxpaeT/IbFBZBEghqo9bWaGKqeu6FAwrkxZIZKwoiKoBSKIalSDdf3b75duT5ODY10ToeoXJsmUT55m9/J63DIdb/tozg/O9ANpNq3xrcHRwtBuqm7/2WxsHzejhN+73d+57+NWrX/9P/Szeuvq5VQ+3Lp+hk0lzewIAx8tZfnE32ztrJl159zBzxAwp1Eu/AhIAQx4ymwc2ZqMAiV0CBUP9nhrEyw/4h853Rz+z+eJLCElAEiIXhpzG0qYsN93KZkV89qm973pvujA0X32RMwdBjbMaYi8zTTu5NQrwbe86/wN/7uif/OTOjeN0EijPXek4NmK5S6HxXVZJyNxD589v55k31oAuDucWICaY1wFR9y4PD/eXhE7Et5RWwL3ReLHqYtJmFeo2pl5u2ORGEaTv8iCy0c+bRX12UNyzvSmLtAxN2SuUWVQFXem22m7ZASZRbWVrqw+mfv7gBc1NF6HnDGVZlg2bTgFYYtV2IeKcjO33tyCE3BTsCpBkoRZpY9ek1KiJm6NswLkm9bk5XLWQW0z4ws25VN253fzBMXdRFlUs2ZwdZCOyfYOURIDzzAEZgwiq5v+vAQ2RrLW+TVnuIEQmFBVgrCXkAZsmgDrSaJGZKQWJBjmj1PgmaO5s31r0XUroAVjRRHRJexkBG1QJyYsGQoohEIG1zJDYUNdGY2wLiAIWgYgcWbaconY+YqDSmBjC5jh7dr/6rq//v02f+r336vlrN6+fuT/uPsb1A2/38+fTnetbfQwn13/nb/3VN/+3PzwY9b/wT3669/iX7wcrQAEMSnKAGbglqGdZT+GQ3CrJGEwCDeAzMMMM1Xsfm5f+8HeOTyav7wXDAMkAWQx85bC6+Y//bQVxBC4CeEgRcPS+t5h3f3h5snz1P3jtwb/4qcUn/7ggUIIipWls2h3K7z9X7L56+dVb/fagIPTThhzsGXvrX/6d87ff8Yf/8CMPtNy3aAamcI696JWtO2ObNnvl7oPTz187d30S6k58yA0C+PuseeIXf+b+7/+RF3wIcr6ZyAO2DE3sqqY/dDSE7WF18NH/sDf8cNrdG48vLeZxcWt/HMqqBZM7jpRC7EKw975q+NbLR09+6uaf/P7W/RvVd37r9ge+8TD7Px72cPK1Z4dPPdf5Rkz433/t1771uz909Du/Wzx/JBEvx3jzd3/7U187+Ia/9jd+/t//yje+7X2/dOfLi3lT1Z2SGKqNtVnUummTQZPlfSrqrqp9sIQQlIiHeT5p584WyBjqZpCVGVlJKsbcvTO3eT9Dber2vrMXbrz80kZ/SzuNobMu99ELyLDIC6Gd7U02tvbdyXQZRZTNRt/lWa4hopD3XeEGIcFoI98ebMXl7FNPfsqUHCkL0RsqGr/sZQUkNGlqOM5Wc+xi0StPtSI9ndS94g069Qed0l/W9iI47TmOCkkFkeiVHvO1KUnwlcM+KAKIqGGa+/DuP/Pd73rda/Xk7mBkAkhs5zGEul25zjtVxyBKkQnYKAMkSUG6FAqyvD81B1MHlEiRMUpUxKgRCJAJJBGCZRYf4ML95q3fcJJpiN55D5xb61jZmSyicF3FZRe133nf2x75xVHyfnhhr4FYGNHlcf3slzfH2Wh78/w7v/+gmTnvv/rRn7/vm/9sqgfhmU/DyWHJJvpA1iXfwToxLnHtXvEhrWMBQITgsAomLARUNEoSTBGSJLCqZjK9TZcfSDE2TdMreuXumbZp/eTIi+/vXoxoeTziApFaB7Y62Pd1k198zBsWTlhmjBnZviuGJnZh0ZCD41sv59vbvLMn02Ncxth6tJgZ4sncxkZMllQxBCRMQoLCCrqe6IsHECJGlsWwsB/6cLhwWZpOZ0dpemyTys2XisM7ePsYE7ks85CIjHoxxigpMasm5dMZbwRRZAADKSYVQEIEYjIEICISU14Wb3hHs70BmKzjdScxETmXq+8g77/jnW/5g8/9MTX5mQvOaZ5iUOZUV1//njf97kc/m9QQKAGoyhrmKEoJIBJYREuYokY4pVcoKig4kIy4B5ABoJ428yXCgKAKQdfsdTVrhSueRm8Oj6cAzvRouFX0R6a30QNrUQHz3FqHIRqk3GWIRlMsRFiSWy2zVUhdqCcVFM3Kx+Q7vxIT20E/a7rQNhEsQ5IkElA8xu37YDQW64q7N2NbYdcItO2oyItNV4nX+erseLiOBPXH2SjvR26FlQwVDlPC4HE8HBZZfz6TDLLZKrRVmB8ej8b9937gte/4uq+78+QfVb5+95se/cObB7ODAx/D6ubR1rn8jW995Kmv3Dieaw5uczTujGuibJpoN2Z5puq7VBjfaNcjr2GIpl524zaY1JIhSByqSgNa4rpqKc+tZZfi+XMjNBJCLAkevnThw9/8jsPZ6qvPXVOyKUbG1O8XAOq7oMTIrAqKkAAwSgJEpNe/4cq9m6Mnby6fPplfvXXzqafrYlROKyG0m+OyWrYH1+/86tdunCzSeLu8cmmDIDglAC6H/d7Ijna2qlnDNj93YU+zoj2Z3PPwZVdKDJmxzuTDGLqsyNqurqsKvKK1CIA+kcGMnEoKUQQIMcQQ6y62oVVWA9yEdGZzp1pNsmJjUc2IqfZJJfqY2FrIbd14DME3MUp06uuqy3I6fx7e/87LX/jSi4cnN7fL/smdBrm758KZk8X8uAn7J6uCYLVs6qg7u7suc7dv372yy/PViSTdHo1vTetsWNaB5yfZ/NiZIfFshs54D/1ysFx1i7vN5a0z5tLOL/3J59xmORgMi81zk+lxhy6YeGZ3vJye1Ms2F7x99VbnERKVObVpefs4DEejq1dvc61Cslj53rCnDL2N0hTLo6MpvtIDiEivtImtv0WGtfhKuqb+EAqKgCLI6RNYQJUF03orWWtMikqIRAbW/WfEp8gZWi9JlTXVjuAVlTeikkpCYlpryaetVK/A80RpDR061Y2UcF1zBgAgKYJCUtF1A6eKSkDtQBEhnCL4QFVSp80nP/+L3X1vf/Dig0VeFqy0mA3v3O7NJ0VKGABCABBOkZKAAMgpRShlGYICCmrgtYEIDZyKC6qiwAbJAKRY5nJlK2yR28DooBQuucq6oM9fa++gbTcEQI1NmoARiVEMEAHSGk5ARGByjAFcsabXgmFlg+s2d7aKvMayKQgAwZpiQgBkxJyCo1AFIapFBcVEGBVjSCkSOlTWqIIRRRWioKIziATYwzTv3XgZbt1iYI+e6rkdbobaU6i48xQw1Cts29R2qOqSSIxI5AGpnwslnwRMrtYpAmokxASeOU+Fidv36M72av9p6xwAimhGkGLcHOCcYeG7Ju+1JW2Ptu6+dCiiioSAIpBSsmad5gKDqCqiwMCwRtWBqkhSWQefLYKqIBAqkGqSqACgSQWjJLZWRA2hCohCAtUY0DpCIlAijOl0L5hWfth3ItAR9/tbxjARJElM1loDZFAhhKhWrAkmM4QYVIUgqIAGg5hQRKKqErBIiAoiBNARMgMABUA6zUqv18BpWQEbC7JehsmvoZAiiYARENIpjYuQREQFkiTSkBMAknF2e3v3ocfehm7QJkEKaXL9j/7lPx8fzHaDjnOrkuxmESCEUJfjjPzx9DO/2+NvxHObHZA0ccRMi8nyj/5w+se/H9vu7mKyU7eAiEa7eYOKCRXCbDm/W45LH6UTlShaGlFoFj52XRebjDJJqfJLErTGrVatKrPLwMjCR6a8QGv7m5QEfMsEmLE6GwDZGtHEwx6AESRGYTbiGyyZ854ERLIpCjuEDCEhC0PbUC1UuthmoTa22OwZAGkmNZ88+/nN8sHlyc3SER7vh7sHMOmFu8lKP9dev793d3niZv1SXXjqxennvuS/fHe8czFz/Ze//LX+2bP5ptPlrH3xqk8yhRhHu+029z90uXvyicHmZVc9kJldwPnk5l2znFmI4aVZbrOtoaUwH5RjcjFpzHvDh9/1rrQ8/uLHPjoY9sphCYyrybyZTpY3rsJiKV4JbEzrgkNkhBCDmlwVQTRosqoWAEEgCabICJISKKp4wlxTY4gQyKQEsVWNPoQYQ5IgqMIpI00QCZGJwbIq5sigmGJyTECgKIkSU0IVEbEIRKiv2IqS4Zbp5vFRA+3QmBvT1bTtauYLWyPTymZent3dOqmb8U7mCn3m9iTL+eyoN07SY749mZKR9ePyaDVLMRBhYOqko8SJXWtZLY4KrhfdfLHaHNl7drZuHs9rcULSSeiXLsbEDrdGA2k8D+ykqmLno0jndTpZnCwWmpXU+l5mN4eb02oxGvdLwxritGmfnh0Wls/3ew5xZNzYDeuuBu+tZatJCCUkFgCCRDgP6aXbRzeCstC86s70c2TNDUUjb/vg1w0L7iaHOcXbN14ebe0V/bL17cbG0Piws3mFyNZ7r8oM7z/15Dg22Pm02N955F2BKJKHoe77VfF9fxa/6X3GN/a5O/xTP413bksdlt623/Dt9Ng988nhbof1sy/SwSEkMDZnI37SlcaCGcUosdwp3vEd1eEv2fqQGVUJOBOgZLJodxJscZ9e9ez1uz/6o/lA65fubDXbkVgZTc7WULPRKx5+/VXKr3e+vO8+vLnw08Pe3rjMMpKuXnbMKlVsVijD7MbBYne8eburScxi3mQ9K5B5z4O8B21WYmzrbtjLzp8ZVwuvHsRHilGJKLOBCQ3yAId51kZftQGsSQZ7JtM6VW2X5wwBAyCXThBvhztBKyTABOO8vDjqXbtzDYlzNgh2Mx9LwCxjDyF3zjloMSUJzDmCGWS5+jbLCkTbUNcu7rZtt1GMcoEtys70Ri8fHsYoCQAQrTN1k+62od+ZjZ55JHBnHAMMnQFBBlUmJAYyuM5/K8ZXSL6nUhEJpEiN910IvcIGxSaiZgzZIKDGTHyXjEU0KFHy3GIhjUm27/KOrJgmwig7o166DobOCnUbpohdbRWszbrYCRIZSik0XjApgBqbFa4wiRlk4qsQqOcMGRBmkGiZSV3u8GQ+ffVb3vnXf/xv3v71X9l/YfHJz308xmwi7jV/94dvT3X3ztue+ulrtpPcYvncja/8lb9bSDrbcR82WmgY1FmnIQSQJcQpmDpJ7oiRpEsW2CMQYK/M2KfUVnPOLvy9v7X1PX8uO2zu/tgPrr78Qg6GAdSoKHHISsg6iDk5FliBf8sHv7Gb3kknUx0NLnzH9zz3+B+SBydpCfHsX/vR/Ae+u8rGuOpt/trvVb/0/+mOj0vjNGHZEX786Ruf/OpjnGnnlSUrnO866OXDH/ze8i33xd5GQYPB/X/EP/mvhkVRTcmRJu2SsZsvPvMn3/jY7oe/QW+1r4pg2FLhhlvDdjbNe7mZJvnIbzz9r35Vh/3hpS03KozU8eUbG9xvIyOKHfAyptuXyt3/+oe75Xs3fvrf6B/85uGnfv3Cn/2+t7zqQvfC3f4iffnP/KVHv3OnJ2qOR3t/4c+Ze9//sV/++Q981yNf+5lfyl6ev/3u5z75G2896/PNt+y9n81/Xq6qDltnyiGIgB3n+yufgEqLhYVOu6jJCAwyp5CCQK/oZ0URQZktaV4hrxZ1Rq6TDCNVi2net5PpIs97nNlqFgnZBsmsyTLe3NgYWBdabbruaDY7ns2jYWHYWq22XGkATdYmEQWk3F7ZGAS/7MK8jsvMmSLLk2QQ7QitgWBt1Lhs6iYDzssi/KkstI4cEMJpm9l6vLwumAIBPS2xQVzbOtbmESAARDktMF77p4FeKawhFGXJsvLOM1++8kP9+RHV9ZIpi6ASOgvQy60ltSqJQRkSijUGJQFzjMqkquAQOk0JUgLJs0yTSoiGSEXWWaOAiqWph9tdv2exYsqK3kYUQES2CCgE2sszIymV+XLU27j3IdkvaXqosgptOlzNh/c+XF68/+bnP7sxGF/9ylc3P/D2Fw8uDLfvq6bY5+nq7ktlQAhdjBGtYzYKCVLKyUQfmBCRgvcE0KVgnEFLYlQJQ+xyaynLsAOp50wd3Hlq8KZviP1SXaxbCQd3/WrCwG1lN8YZqHgf03xu29XhtQNY1jJfmJNZduGMh06iuryIKUhKEVX7Pdvf2NbcVfuIyqEVCJCRJ40gDqJFAhEUsY5AVTQaJkTRlNZnXkAE4oBQvP69cvmNhrM+rFSKpnyAqYeDF2X3BW7+BE+ipgQhKSoTE1FSAQxIACiaEgFaQA2JgWUdRFRFFWRkQ+RYbCZn76nG5xIhIJBxMQaJIYUgMdjcqG8Ju09+9svPvvzU3/6Jv0xFxAyTxHpVzbvZ+Qtb+7dWRojXzdeEBKQCQTVEIUPWoIKktK4IBkK0iE61h9CzxETE6FNcJmkSRFzP3fEVFtcagXt6K2617vV0fG6wsVs4w2isdXmROzQFYmYEDFlnSgWXRCKiaMy3fJdSCqFbNtPbh8VOn1+eHF6bxIU0rXRdSmokaZQUk0ZSSNhCbivdye1b3nGvyQd3DqoB2ptfu1vVcaMYl8NRs6qOT+4ipK4Ni+Uiz7PxIImkzOqgKNvCGufYK3fQ67uT4wMR+rqve/sb3/v+6vaT3d2Xt0ub4exbPvCdr9+wTz39+L7P7n/1G87sFHEy++A3ffjcmXuu3X7pF3755yqvflW/45HiB3/khydxk7c26tT+3kc/8x9+9jexhb0R9wrdHZZ/8//137ly686zz3zyV37x0TffO5kelZeuPH9r+ckvPt81kjBWUYj53KXNfe9/7nc+Pp/4Qv3AYgOkxkQVEFKmJCLRK0REWPOQiRkJXj6+8/ZXjyZVN6vbcjyaD4pA+sgjZ5pJ2ywWr35kawfhy88eM5tJqy997SAndU0Un4bbo9GooKQGcGdnM5iKi/lj91/I2WkIlgwkXC1r8KFbreo4921zZuuCs3kSz0TWmgASE1e+SoLjwVARfRN7+ZAsLabHeV5MqjmoR4153hM0ErrOr9rO28QYIwDEGI0FtG7hQzHIj5Jvlgc+TLBc3nxpdtjygPuBzOse3Dk6OU7sOupEIlk9Ox6MtoYksnnpjEnHg41yc3tnuj/fG2++fGex8jB6/ebgDWdvvnhdpke9IqtO6lA4MfmwOMuF+YOnvzjaGFJJ9aqZ3X65i+ojxGCPqI5CwBkRnUyON/KLKcng7Hg2b4cSN7M0oRCzPHg/2tn0KtOmgfObj7zu8t0nnu8W9KcOUz7VjXBdcwCIoELr/rBTcgwCAREZEVVVgphAkUWB9ZQQjADCSMREay/SmpOGup6er6G/EteST1Tg03zb6f6iSoBExKpKoGtfIcJ6MkEKSigIa3+TrG2v6xLPpGvTYGAQREmaGIIKiohoQgMJzGx6+0tPfULqxdndK1eg69++2jucDXxASLj+iKpgipgUcH1JN2ozyUpKkbsIXQcKwAxKIKigSAhsERUEgBWxseBWH/3j3LnxzljylozX1Ke7Yw57MQFBi0yCrCmRAoJFUNUkbIU5sUWXGecADbBVpDVZB0654QggGAWQ1r4hggQKIoycAAQkASCiSZQ6zsE6s6zdutPBWo1IKYJGVcCsQPCqgmURfYup4RxBS5ie2NnCUQcgaT5TZ4CQ2fgmqiRyrMLed+iIyIBy4QwhEiujdM1KMSMmJCUESAEiWrZNxj6zvZ1LdjIHOIl1SoADoknX6jBl1t08mFGbexz087KqvUoSEAGFdVMYEygkQFE1IIQMp4l5SCBREogwYpRAZNeSWpC4NuYgSlLx0WdrqKFIlJAkEQIag8TEiJLWjXuvrALXia3q1cbWxih3BEqgllnRMtskiQmKQpAbLyFH7aJXgqh+/Z4okqwPRqoAUTCKEgIaiIgoKSAjCAkjiAJaRCQmUVQxSMyGRdfvtBKiGmBjABkYYggiBkgAEAgkBUgxCBiLOZmzVx66721vn4HNBvlhdff5j/yzC0+9vNMKt8kbyDPbKwrBxCkrHS5eOFz9ws9u90Dtm+LeJZj61Wf+uHvuieZLX/EHh4xx8rnPjz/7xxvvfKf08ygAQKGL6dOfePHf/OwlQz5G7yxZrlNQYF+3Ngqiel874xjJx8ahNapsrXW2ic3GxW3u9U0dOSXoavAdNIa2NsVllIB7A4gRsgxChC5yXioayKxYg9aQMVz0yGCCRgNEUWdKBS42eyl1BJulKw3cOT552VoYOJNevL4/m442Nus2L7P7Br3Xpk7MvbGX23Z/1Sy6S9mF8Jm79oWPdXerHm1vPPDWVE8jwfnde/2XnqciQCPLmzfP720NMo9x1ss29X3fTm99i2ny6S9/Znp3cfkNr9298qr25f2TF1/gA8hGo7Twi+NlW7XnH3tDcWY7mHxzY/SWb/xQ5+HWE19sVsm4RmPysxOo6lh56ZJxliSyQBIhJCQDaBSsKK8VG0JhiApJUstImTOCwZgkUDsACgECoCp0rfgOYguxA5QgyqrOKNHa5g/MLIAOLQIBJQE1WYHWsrVkEdfCqUgIMb4yPKvbqhHaj00/J7Z8UrUN4irKjbvH9/QG953dXTapSNjdnR+3q+1oKp9K28XaeweP3X/xS8eTg6qDqj7T34DSxU68qqItS/YprlqsRFtfF6Rd0y1gNiyz+3eKvZ4zRIvVyjqusSuI/LKOUbXuNErTJDJYL5umE07OC1YacshuHR+p95uF2xj2r714MzhsFZoQF8fz0tLQurKKFnTAZojMqoLeOoOoVddUbVigfuHmsVVKPrWZaRW4i0ftYjToP3blYTic3Hjh+L4hQlEPz2A0aTWd5YbD55/NjlsGWjz91XR8zPVSQvBRGbwUT3mJUJS77/wO3TLtWx5YpSP1XTBVRlUmSQ2a1z545if+cr3N29LaVZKXDo9//ePwuSfYV1mPJ7/085vz5caF/qo7TisXPvGZvKmMYWAltRETDgYy6KcgEBs4CRjl7FEeTVfCGKzRkHxTA1hKTfvifvHlJ88/9tYudqA+xulgO09x6YiKskiqbgC792zp/dvHQS88cGXrq0+e3GpY8fxw5BVKx31nT47aWdulhLtbezFVtnVF0KYLPXVAWIVQZKZuYzSgREW/385XTbdCxcLZjUGurdR1VZRF2zYx4Gj7opRmMj2gGpASSeyXfWthVi/J2dUilK5IkQi4ahplrFKVFxkAUhRDvusmnA0RIQkSq5osZXlueyx1pjYsw7RaWDGbNp7bHB03kZSOiA5CSEd+1xnjacNmzICkGDUzDggVCVE1xSBqiPWVuptTqajpUh1aQAQgVUBWA8COKc98CGDBADgwBKAhESKpFGXRpZgpwCqI8LHv8gRja6ahtUSspuDMaydJSNAI+jpCFEdc5E6Dp5AsECkoSp+RmQ1DFEkxZs6kOoqPMQU3PPOu9/z5O8997emP//o9JMN7Nw4X9Kbv+dBXb9xZul7xwCN7jz6w8ezzRZLcMbZSuNxlWScAnIU2dZED4AKa2yD2/KtQZGP6QiG+cLbPZRcDZkwIrGEF+fYP/bX6G79vlW2U5f7t6cHQslGOCF0Ua8kTCitF7CQFkL3e1vwu0fXl4ezl6dhd0qnXVCRdaTfNikvf9ZeOyzG3zXB/cuPX/8/B0WyQGe7EFXny3lg7tBpAoHC1b9o6YEKLqf3Eb23Y13Pmnvvl3wnPHVz0OQzK5UnVCIFll5tBpLffuDv/X36qXdk4GmcD16QUVKfVcZkyx8N7Hjh/rp1rveKb8+rZOwPOnfba3KFzql0K3mos9m+Eq09TK1fcPdwe3nMcDv7uL7pA4kyWZa9DyP/g6qqqH+Zs8v/8cNowG9kuvuoHL/7FvZOP/OsLq2Zsxx3o+WN0Znvnygfe/Ld+Mm0Nf+6/+/DLR5+FXQKJXQDjUuyW5AhrMQqSonNQMLg8W7R1XhQBcNE1iAnBggJl2PlqmJukslzNc+tWvs6dJZsZMoOeKy1r17VJl8uQIIbk2amgxBSWsTGxKhxtj3pto13VDW2Pu6pqFs9cvxaVMsCYvEGOocstJg2L1bzfH3LeizEBcq93mkDDdahf1wrQaWOsvlJzRoj6ytWBQQ0iASCAJAVARjylTJzKTEpIqMAsQmLUndw4+NhvfvQD3/TOxfQoSURrTVbEusNQi0S2pum8WiTAlMQgKAkYFEiW058aZgXZIxtWg0gqAEREKUUD1LVdf/tyDICMBixEIFKT5RITIMS29jGRr45X+73XPnYSS4ypLHPJCG1qxbh8z597XZM9d//m5vOf/Giz+MxwsHHp3MPzxz8Z4qT0kSiPisw2yRrbhKCgImhAWRESIRpHSSiJSBLjGAwRmCSKiRDAZsYYb+4+tfz9/y1789ennXNdctE3bT3f3js7LDdTOxevUPn6xp3y8AQW8wyi+lA9+fH++e+pO9WIq/kKjah4Y3tOU+aXOHnSP/u4m81xfXMCBeY2RU0ioMxWUyeCiGAtK8A6mgVRyfG6cZmZYp53XcMyh3benBzo7r2z1XJQT9mH0Cajazg5gaiSqiabK22JKVnA4hLCKqQQcqZOIYkCMSFS0lM4rrO8u5mu3OuzPsl6GmqynLuWRJK0ASSF1XJ1fEd8Mc7tJz/5yW/6pnei4dS0bT0/un27Z9iuJ8IAxpwO8NeapgCIqiXKCPw6YwBgQHNDJbMBiAKKQoI+qY+SkhKIYVKFTjXqaZMfvHI/OHfPYDAuy80BkiM0YPvGZIYtYwZClCTjHAIKgDXWWk4qnSZWryLO4WC719/NcMQYmuNbq9j6LLfaRWbS06yIsHC2ML0te2Y8RJ+4aM9tUtFx2B21R1Vb1eBCQpmtasOGyDBbBOkNzc7OOCzrHtFSlTLV+arowhb1XvvOd33ob/7EuQtnKeqLH/93KTwLQxhOcPX0H7nZ8btfe9997//zKe02s/3snpOjZ6/1l7fe9cjoXf/oe7/yyS8+8cnPfOg733hms5zczqTdPLux8Re+883/9ff89T/4ld966jO/Mprtv+f9jz7y6osvncAj73/Tw+P41U/81gOv2n71X/hzd1Yb3788/Jt/828vjsP2cNR5bhYzE1Ns6L69zfe96YFbk9UfPnXzYInIuFq0ohYMpxgkimO0IJKUNPUctlh86ku3V8t2kNntgeuZfDH1vUXMghZdikerucNxv6gmoa3re65sRcDJzRPK88aAr9uq8Sp6Y7ryHVw8a9/92gfKYZEVeYqqCZ3jJjYx1G1db23sdm0TO58762OThCQqEvdMwWRVIUCKVDu0Enk82qibatjfbKtV27QK4hOkEHtZaQkAOAKsVi0g393ff/bOzVurbnjhTG979MCO3dooau1W0ppef6XVqx++t8m68w+e4Xm9fWmvvjuzKvdevHDnYLm5uXP1pWulAzcYH6xkVcVb047K3oWz+eHx3Wp077m3vXM53rnz+JfAhF5Ptne3r2xvff7qk5nDWetDfzCwslw2Wa/sWTPxre+43+855gziMBss59PU4mGKzurZUa9dNSWbg2ljE6U8JY2bedEcVk9+puq3MB72AI7/L5PpKyX3ommdBmOkVxoOgJEipjX2WpKCAqqus2qEtF7na4zwGmtNSKhIRGsRCNcoO9V18lkECClqIlm3XZ3+3LojYU0gQoB1W/ra7IqwnmqArm2Gp1Ca9Q62/jd2DZ5Mr2RGVVAIAZKAmIynzckzz31efHtRYHt5PJBohERVJBCBIKgCnTKYEjjWvKcmR0wQPNC6zm+9Z65LhtbzlPWghNANEM/sujfB/kSvdeCDGW4Ea8mUkCUgVAKJQYEIiYBfeUECKGRMIqNCKSV2rGzBWiALSJqikq5ZzsKEzEoAaFCCIikZQFG0lCImUJGWIJ1/hDMOq2cgVsoM5QgWq0wFmVQVOVcFyLNOvYjhIoudybwFNNh5SB0Y4pCgC5C7qI11ZVBSAETLRru2Y1ImJIxMFhElJEuZIAkw5T1CXouMKYG1/UWAohy7i1eyl2+4pvVNLLNBE+Nqf1VnHQ9MudnLTCZEhMY6ihJjiOuTeYhp/WYYQmBGUEYCUZGUNKYYVYMiGgB5ZXQgqkScUhRVIUVQkAhIqEpoEoAxCGgAGBRVNMUAr/Ap2k5FQpYNc8oRYkweA5GzxALIbEiiWGOjBuucS9EyiUMBEmRQjKJCwGoABIKiKiOTAEhUSWTsaZ8skjIJIRuTVEHYrmsXYtQkklhBRAIRMqloBETriASSaOjalFJKwZLG2g965c5G7+I9VybLVba16aslPfHF8nce31vIRuBZxN573njpjY8895+/5A73edUpEs+DrV669ZF/vvmt76bXvi+8eBI++8fL6883d2foDGbY68LJb/xC3tfiTW/iLDeY8pPbT/+Tf7S7DIDcdkmY0YBlXoXQtywpZi5PmDgzqoSGgnhr2BBq6zeHbsiQ6iV7AiUIAUabPByo61MvJyAlg0UJzkCOhEYyB0QIlouhhKQSIbaihGXB+cBhxuy0WSEZlURFT4JAGo3Gl6Iv7t8+U7eqWxeGe/c3S8sszrQe931zzKxmg0clkqF0ArIveX4x0cAnUWDLGUquh8HYrJ00ru4nszLhpFkSt9vcnaRU9/pu9P4HutuLyfIIOm/Pjgu5N3cwP+kwxeao7tLq2ic/d8/2tt3rJ+Thxs6b3vMNcT6f3Xk+rFa2cLiYGvFM5AkjirIyQAZABIkYyQjQOlKrIBq9hWBQUAXXIGom1piETKwJUEVFFINniTE2HLsEooDCGkGZkYgMsTIDIACxWmUyzqB1wgbYEqJqVFFNKYSY9JRhOsiZUzo7zpzGnWE2W1Yo6qLmaqqm3p8couB4UFYdqMByWQ97Ga463/m8KOfLOPRMJhsNXY/Msgr9MgdHGJG8KPNKKGoSkd1e6ZNYhuPJvNfPSVQZt3vFYlWPctfLXAKDGOfzRWJqfChtFimCUUYMIHneO7N55oXr1wd5Np/PVHzRz5qmspCIkA0z46KrJk2DudEqZEj9vGxTMywzC1T7NrBGxjPb/dW0NsbkmW187DMN+uWw1ztT9l9+4dqZe86/9o17f/Bv/gOG0D83dm0XXnhJ//NneyufjYZpMrfHS1aiolBGY/LgF1b80BB96nfC/Ga5fLO95+zFey6H6/v1rRtNBVj07IXNZM1qOlk1x9qZ3nbPvefN8vJNWkHUdkdj+uXfSFiVNG87tJCD5YRJQxebeTYYaGRNJSAH6aDrGA2MNxOl6Jq0nIeuJYehiyB6MR/W//vPWv+TTpocNyg6cX1rU3s095YX910++/2vvnLP9lNfeKa3vzQI8bjNW3VWL+wOn7s1P575mdaLWde0MXMm4+Uw1/a4UqLcmrKXJU1l8GxtCFKvqqzIl/NpYcyozOZBYtDVfMmdjHtlFTuQmCe7nfUO/MImKJyNTF0tO1uXXjw8bAijSsQErGDYhwCJEO0o6wOAM1k0edcs++U2QW4yAGtSSmhs0P5quTJt2y9MiJIIMWGvl99zprRHK59oJbGOcdqFAeZnsj6ntUyPwBBV8LSuCVWEzNq9S/+lVOTFoo0+oOWoQAhl4ZQpxcgARV4iBgnJWCKyMcUesQaITbvBvSa0AbSTSqxBqyOTE9CirgQVQL2PCREVm5a7OglBs2oGORSDzABIirmlHrvQdr2iqFNWN601Ji+xQGym7d//7/5etpNLDbpY5Xn21DOTb/ixH7n8Xd8239o8mhzvPHa5l+Yv/vf/C1QeIRWGiVl6bnq86CsXee+4bW+BTw88+k0/9T8VF15lbx9/7Hvfl6+8iFYxWbYGLcZAxvlH39D7kb94NC4dxK29IW0PT64vAMQaIqKV70KeR4elcC9RWyfX3+J7HjwsoeONvQcedON5rRiFCEvd2BzYs3L41f3f/OXpL/x+7+AYgbsOyHDdNV2ZLQAsGohB0Igai4yIutT+p19KX70qQq/rpJ0hODOLbfHqe5Yx+f1l3rU9ZU0J47jouWBcJ47Pn9+//8q5/+Yv+VtHk088Nd+/PkCwxoArs3zgI1qQxMpElgQQLahpZ/XP/3Q+BX89Fpo1izTCXPvQChY9CuzbBrS/MZNiOM+bu3fe3YtH3/9DZ86/frv/0J07fyRvv3Lpuz989V99JC7IHj6FLzx+54vwPfd9/S89/cx0Jy1pkXq9ug2ZY8DQxdbkmccwGG4MSlq1k16RJfTLEJYN9l1vpze6cXArYtzKi03bP1xMyLlkQHw0rIOcbGoGECHEJmiVCNC4vBj0Td7rRcWq6aRZ9AeGqEAOZNQ4GgyLouTpYvnScoIWo8ZVJURU2FETqpA0cb8Wdhza1GKyWa84lYrwlQnd2mvxpx0zrwzZ1oAJRmBCc2rNOIXgIQIpmtMeNEJQCySYchtf+03fvHfPa77wq7/6W7/2s0PTnt2z5JrB1q5R3KBo2hOvXsFlhlUiEzrK1K+ihVDmDMaR9U2AKDlzkmRiQEzMCiEhOWDrDCBwktYUPWKbubKrfO1PyBkIgUiCw+libpTjZLl8+c7mIx/I7bDhozR9lvt0xmH1pY9BPeujMfdsoIFzuz25NS+yRfe1qyNjIUFAEKkgJTYWVZJGkJadiRiwMOtcEwElCWg5wyylpD5KULaWYxDU1EZiAiTroXzqKZmd9F7/tmz7Xj88A+ORpCYe3EJNkg2L3l7P9dqjE1tXbBLE0D3xR3jlAXf20aQmiZQGsJ241dJNbsBLT2Yn+2Y6l6ahzBpK0XeSJLfGmUAJGBPa9YFZAYABRRIahwiaAuK6pjktrz5H974GxsOKVlUzHPa29vytxZ/8tmvbPMQQNSV1lkEiESQM1daefeNj+KrXARh+4Zq8+LwevSyzKSQgRKOaEjASE0kS6YI1Gbie45xNHoxViSrkjNFgLWW+rar9qWt8P5rDFMqMgnTddIZsyqFtX2z8srUCyqhyqlyuSUOMJACdgCFlRquKqqJoiYwqQiIyAtpGaEQ60KDAqiVBwaCKC4WE68AapnR6MNo6M8wya7IcuHDsyA4xEYkFYiBFzgQQGLOswNQiiI8ri029qGLUar4SayK0hmBrq9ctmm6aogeA1HXJAqASJTTIq9tRG+hv6nhzfumc7u2dvf4n88Nbi1in7QGiwy6FrGd7zjCbjcFgejjpWdkb2txu1VUlXL/20Ycv7V3ioK9/9xvLe14XB2emdbfav3bhwtasvfTQe77rud//mB6/jGV/d/M+f1LV8xdzDTjcvPy2B22oV3efjqvZleG9r/6etx/M91/8crN5/q15BfDU47MXnjObG+/fvfd7/td/dPiffwFOVs/+yk8PX/XqbqFHv/HpQdfLdjauf+7kend0dmPwt7/5PU984fOs+Qsvt5jar3/vI9V84UbDt3/g0Riad7z6rNgLN45vfPSPHr9+t1lGgRhNniEKhhAFcsua0o3by80Lwze85vLLR7MbdxY20evvv3j9pFVpLmyWDz927mA22ziX9Sbp9rQ+mSwWVSCR3Jm2qWw/G25lEpSSLXr5t3zTWwcbuUhMnSfOAIEghdioSMll12mWmbZpkyYFj0ggzAn7+aDxDWg17A2soEoIqa26iGhCatrUJU1Itu2WGaGPCuBjK130/SJrPb507dbHPvNcZ4r68Zue/f3nRo/cv+uEH33rlS9fu3Nhe7PK/TyFtvWDfhHqRcR26VN944WdbOMLn/14/+zYufKF567tXrhvdPbMMjsx/W0b6/rlWxYzNx4PL13c3C7jtWcPXriDdvXEnSeOVpOyLIbbm8d1l1n76KMXbt+6WZR5D92q04M7q16mzoAGHRd87uyFW8fTbh7rudSr9vyFixv355PJSQPduf64m3V5Qmp5Ppe6mp3uBXTax7IWjUiB1t1RiGs7GCAoCjHL2j7BqlEY1m3LiMqkSkivTBpeMQMJvCLvAICCKiEQqEoioDUROgEQaNKIaxONEoLKK7HTdRJsXWMlgAoMogiKyGtyvZ4CsDWJaFLBJNIpAmoAMZIUFSSJQBJBC3w43b9n82LeQb9tjA/i+U/lLARhOG1ZAyAwOZicRSmqkkNHIAFEQQSZXultJ6C17UnTqorHC5GxMQ44sYnQgF0rXhJVGzRrCr9RVQAGsgCECAwIMaC1SICWgBElQUQkA4yKVomAeV3qBkQISTUhApwiu4NwjqoECUiKVFP9kq8bDDMnLhEF8coMQpA60g4gIXi1FqwVaIyzkLvkHeUFbu1AakEi1N36QEASMSRDJoakyGpyN84ldhKTdl7VGDBsSIEZER1rZ1MnWVlC6KDWtKgH58+J+mbS6+2c6e7esYrBQP/SA6/eu8gkn93/mtneG23eP95zduP6tWtPxSCklJIogEEVgaQKhjlFg6wKhKQqmvR0SCUiiKgYVfEViD6cekcVU5AuJraIlJIgoagaZmIXISGb5CPh6V6waLsxZMPhIHN5aO8GyojzpvPsSD0TMRCqoCoQYuFc66ISRgAwpGBSAkILKRoSRU2a1s2wtJa2UIwFIhYidDZYjozIhoAYQRKiqE0hdjVCEkFQAVASFIAkKqBJokBKELsYmxBYIy7bqdVVNclWq9GZfjo6Wv6fP/t6wUFul6PszAe/be8Hvift7l5+6+ET/+M/LO8+XVWtb3xZunD1QP/4c+7F/fr6fn3nCA0RaZI4LDPfxeaLX/j8ydUHfvzv9M7caycH7a/8+961k4SZj9Ib9L0kv+rsAAemjFVVZhlESp1nA4oSbeYGPakbQTMaDWM7q48WxdaujrbTxlarXBR9BEbAED2XJQ3HQJAIKDcAqDEZazQQxIgIpj8UAjCOR0XdtTbL2CAGTjEmUXJOYx47azfuhWXB/aERm8pNNWMuQ5JWEHi8U2wMu6Pr2eZmPFjSsuHBHmdWy0EyDkBgmWFqRDDrj7Hz/dFwNVtkC735K/9++8d+CJPH5W1CSnCmfPDN5gphezj9xJd7ZdkufBObuY33vvHNvbv7J8f7d69dq1/62ubeZSUHbM5cvvTe7/zw7/7Cvz188RlaAs7n7CtDViUSIluyqCFSRGTrwFkybo26XtdLogoyMCEzE4E1Zi3FQ2gZohJJTKTCGhyGRKIxRTSdAAE4BEqCnNaPNsMIyEkdu0zIapYHU4CuRfEImGKK0Z/mDULbMjKGQGSmVZdEMyYD2rdU+3DzaELG3Gqreet7zvYG+e6oLBEKyGNILnYPDHtNW2/0C0y8OygkpbaTRLiofOe77cJNfDssSh9ErTSpMz27Ak6p1SYMsmQJkvDJvNoYbah27JIts/OmSCmsJu2sjmJswZQSXL1zPUafk+HSGOoGm/1qX4LodFb1ymzULxvQFgNmHBGWXZj7uevZhbQZ2Y5AU7JELsvAEQBFxKrrio1+C9B08OxTT+XGXZtef62Mw0n18qe/sH15sHX2Ynj6Wr+u28OZTD21KRdHNvNVQNWO2+yB+7H0TGW8ebTVMv3217D/ZF038XheJO/6w4Bh+fkn5N/90sbXv3V/OcHRblKw+aD2KY/gUBNo2igyd4bBc7tMoUWLGDOuqBCDc48LwZV2pqMiDwU2SKE/8hd2Oa/5+kvj8U53uO9nS3RDGZYGM1svM66iTy1H46ykZKk8zjP55m+v3/hQlarYOzpztj+5fXJ8PMv7JWq7f+z3V3D1uBpYsEjCKgaWdbU53Krbapz1F9WyMGXeM45s6xWN9AfbbdcZ4tziogq90vbJtF1DEaerWVEUw2G/r/nR8e3tndE9Zy587frzWWHP7t6DUN458W3RW6xmYgg1OdE8y1PnSSJFEiRAieIc70lyzKSYmBCihhDKLKtaO5/Hrqr7ATbHxgDd9N30zlE/Gx5NVx5ge9jLibgNrKIApJhxRoAxBUdrVqCAXWuvEpL+F1IRk4IKW1KCMs+WXdXLCohY+2Ct64EgBI8qAKoUE3UexpBZUhSHViHEiLrS1vewqcKYs64QjCk3rB0l75tVaoMScu0TG3EZlqCl46bxKDa3HEOCNhSAjtR18+z45L7BdkHmwpe/8HJz1Ost/s5PvO/jP/kH7334Yjc7CT7ZZsFNOrh18AAWy7lsgwWSjripa+06Y4wKzNrqa1B/8z/7u6MPfve02FzOUldN984P5NoKA1gAEZ/qrkt+Avrgn//h67HrL9uNYbH4T794+6tHYyyRISRJWmf9ss4GSk2zajFgBF3W9WU7GEt+4ewIbr/0+b/3j5c15mwcpnI6/ZP33DdI3RBgE4xyvyFFxkXbybmHxn/1Lxe7g+oP/nj+e79tV7OeQ4Oo3hgks0S3gq5tcNzLMttG1be8Y+dv/5Vioz/7d78s/+mXtO0iJE+iktRLYDL3PnLv//j/PeDq6GjhX3vN/OQ/LBcvagrL5coNe8TkfaeqwQMTQBAqijyKPj/pD0oacJh40UQ5eZPYZsu6W9RVttH6zcGDH/wrh5/8DD9f4/HkngFMX37q1jT1sGpeevHmz32kWB72+sNLw/b4137ykfd/2/Tw6UdN/sz+3SuX3dWuNRycY6UuL01jcTAobW46qZnZSwwBWA0rq68XqXFWAUwgM4ldC8mogsa9rZHUPqWu7uq7Nyu1qIokVJR9bVfWsWFGZfTi0AbRlLr66G4C0yu3il7ZhHR3cuKcC+QxBAuQYorQtEGaJGshB9lELIte1rT6yhT59Jqg655dXLdcrU1G60I0tEyEus4WiGJCAEWLwAhECKJJ1ygSJIXMwvvf8+a3ftcPpFS85cH7f/4f/92PfvT/+Hv/5J8u5rMgom1rJ/uwmGS9IgKAoIlJUozgdDTqLl4cv/l96hxfe7r56hPm8KRAdUAkmm31tJel6arzAgopBEvArszJ9BFa75kB8v5qVSfsps2qv1GWWT9X7mcLSh6+8jFMyd66wymFGlfanLMKn/8NgFQWed1pbzCmfmEVunaBINEwJki+g0TAikwgAgkb68q3vru+fG8WPdw51MltWN51SGG+YKuZmhgFLBOyJGE2qBI7JZXcCN++GY9vw/ZW783fMtu5F/fOaN3e+P2f6T/wBnf+1cdPfWZTg4JgEANyHrv2D396+9LDne1Bb5xnNhzfiZMFNEtYnECIgMC5FQNKjpEwxZQSG2AWdhgMczG2dQyzGSVBWPNdGQShS2CAIgyuvwx/8CueY7mzNaIifOrTsyeeHFQAbDviFAIiqiFVDBK7C/e6b/+h6uI9wW6UqkyXzeZle+OzzZc+hyvPKqII6XTAb1gwKs6ndPBS1h/p7qVIDhmTREcuyzIRAC3GG8PV0QuP7I5WRX3+wl5zvMhLXi3r2Wzx+a9cu3tYM7MKJBAQRQRCdQajqlcAxS5qxpgzhaheFRSFsUnqoyhpp+oBPCECjplzTBZBDVhREginn/jTLy9SnnI0AA0ABAAUDCJKbKxhhIAkgJXIsq2aanUSpQpeBY3JU4K2Wy6GuR1c6Emdjq6ugo8AYkBIgdYFSuhU6fBu2rlizmyF1e3pZN5/4emj2cQT0Pb9Zw9v36DN0o1MaYhXS6q6i32vcjJYSNZsvf1bv+9N3/E9ulrtOvf0Zz95/sGHb/t8uerObQ5wOZjcnTEODz7zxLDuFl4feN+fb8MYNvr9cmnDwZJNed+D7dG0x3l1stu7MnTlxhnuNasVHs+rP/45+6XPDBYLsgC7Z5tb9w+rxnsdqD957tfTpDo/OLP70P0nzWL+zBfe+M3fsbx+bYcI2Cg1H/jwI01YLZubew/2Lj6yG9uv1NVUlneffnFiehvved3GYyfmmasv4WDzxRt1xMIDGmfZsCMh5jszv/Hy9Liq73no8j2X9rTtbrz08lvf8Oq3Pbp5+87+IOvnPdNlVQUyMgNWGpVuOOyVo/HZ+87tnDl3cmuyqpajsshsCklSEKOCEoMPqn6dXLHWRdWQopeYQspzKoqyaaOhbLKYNV1VFD2FFGPMXdZ13oF1rlhUcyZmZ6zJYruIoW6Tbm2Mg/rVagriSYutYmBtJkpOKdV862r18tVrtsjf/OaNb3rrIxrg0196ptwYh2RfvnmCjhC0GLjz5zfurg579yGfiY9/9en773t01tKdyZQzFyBcfNWlej8/mU5vP//82Te9btXWD77lNdNuOZ3M8l5vaDfv3J3YqqauOwltvC0W493DaZYXADa2Ie8PBMK0W/Qpuzs/mTde0DiXD3dGhyeNzeKgLLe2h6nGK/fufenJF6uOtrZGCBFgDrAOZ67zmYKnTlLCNSwMcb0IkXE95dF1nzOf2nvW02hGJCRBgbW4BKe8eV0T8XSN2VkLSYqqa8TwOsaFsq7bjAkUkfTUVkp66hiS9W6DimtIK4ggIoIgEAIoclqTXNAqgFIUTKhhHX4TiQLAwOwDGLjw0Nvf9c0/duHLX0hf+i1oQGOgzKW1hUoBYwQlsBlkmQ53o+1xswJIaDghkAAEv/Y1ARtA1TW8DRNkYDK2sZVkU+iUhSCSMRECKEEkdQQEyIUQQAKVCGRUBQVBGMhgSIgJDAEzsAFkWPe4IRJRkgTGrHlMoElFgUnXvGpIEgKDaoiIwmDg4GZmA2ABikQUpeVyJDFA14KxSggRILVorSPF5RzbVgTU5tBnTLlIh2gNEzCQr1UVkNk6VBIflATJqCPmDMAqGjSafMLgDSkXVhwrE6Jw4YrJXbmhMtpKbqyD83QyIex0c+PM+75Nztw33jz3qJOgeDRNx7NZOXjuzt1rbT0FIACOURIIAxpCNSQCIpogKJEiiWoSEVBETAIJ1BKiJpDEiAJAxJJUIACCJI1IuM5DIq/x5wgpxbimub+yGxjXG3OWhehZbPKqRgRilDaGYF12OkxTMpxZoqyUGFJKmCxb6ywQqmqHhhUtBNIUFKOQoDMOyYqIxrh2GkVKZExAYGOydUQzJlAlQyn6FEGBQAIBx5CQSTUhAXJKsQuh6xrPEiPKdS/bt2dvu8Dl/tUn/+W/2vjarQ22N3x49J/+rf6HvrtNhpPkjwxf9/d//NM/9IO9mTeOakOGublzFKuqXa3AoCdqQQFoOW8oat4b8X7kq7cHh9Mv/ouPDG5NNsFWDDbPu9oz6jjjWLXBN2dHw2qxKHfOgMlDbEPwveFYOeOMAkGt4oQTFCHb1mKzQ8Je4WPSuir6YzMYS1lo2ROJMXbGZYCcxDMzKvpqacshZKX4RjmJSVQMYzISEhngnqFIQRnLAeV5pNaUo+A7gwzEGrwlycuMuuBjo6EqwUIlbDZhEJQ0QiKNqVrZMrOocV6jsSqaiNiV2faGSOitpv7ai2a8S35pii2wZw6nzgtRlw+KbU6IluvprGnkmHX80CVTz93x4fSJL5vd+8t7XiWFVeBi5+KDX/ehkzv7y/2rmxAL4OhjBtSosgCxUVCTFcm4REaJVSBJTDFIChkqKTrjrLWM68engkTUCBokREgJFVBw/aBmwASKAFHAAEZRBTEpWmZDoijARhE9c2ISJkIBiRI6iT76rm2a9Rqo28DW5KaUFGerqmmTs7bF1CatRMjZReNN543yZuH2Mkd1TJK4zILqyseDu3NkvDlZbhT98bC/qJaUZcPxwKBc2Rt/4to1U2SUEqO5enT9wvZek6JmpnOmglRJ2uY8deBbDli13WxjPFy10sspJe0XRdHnm6t6uFlWVdW3bntn1PlQzbud7f7zLx3bPEOJm8PhcFjmGYuC96FqY9f4dZrGGNtFmLWNKm72i4Gxvg6XNjeL3M0WVey5Ngh1KYX/H1f/HW17dt11ojOstX5hp5PPjXVD5VIFSaVoCWPL2dgYOUHjR2hyA8aE0aRmAD2A9+AB/bp5A5qmnzEY3NhgG7DBSbZkWZIllaoUK1fduvnek3f+pbXmnO+PfcpAn3/OGOfsMc4Ze/9+v7XWnN/5+djFp965f7z3zINXZkeLD33kd1771f+Un+O8t7Ec7fQe9ft3Pj1MVZw3nHvgmHlMy8Wof7ZZLDj0m9xlm5uDluRk4luIVYqRlH3Ttr5f5pD2fv3Tb15/8bE//EP+4pnWdOPyzuEDa+3LRy5Vbb3wW7voMVXLGOdIyMjApApGPhQ9aKfcTkK7hBgMEgTiw6NsYo7RNZrIMzSWNQLNcroshqP+WucwTcZJMhhcHKQ6Vg3OtzbPvP/Zw9kxjPfaO/ss9bntK/3oz527kGXN88+/GZNmOdd1xXmmol3bhZC/dXicEaRoZRYkwWTcQuBQlim1LnhOCZATUbm2HlWwi3uToytbl5A1iQPKqo76RWkNzGKzOTqzuT48PDi2eDIaDF2vapqpd1kSmXWzWlwGPnOB2IlqF6ssjNgy5pAspRhz9iYIwCaCJJ10GaCBnt1cHy/quksQAzTQVtDPsiHnpTeKkJIEYkSQJILkmYkJ0aF1YIZMTZcS/LelIgBQS+yzxtFEud/f6We9ejzvAfcw1NoZuTynVqEWwuicOWqCaBs1tSkmMXDEGUWQtfXsZLoo+gONhM7T0OOs7uqFqbYiiBjYOedi0lq7fjnsbw4EtBisc5ea/Ukn6ZH3X/n23/F9X/7PX6qe3//lf/lv/MFkY6t0w8KdLHM8+NKP/9iLn/rVb/j937H1zvfNTqbXfu5nArQJCIw6xZxYESUmBDLHH/zoH97+6B+csFZt6nOvu/nm7M07vQZdkUV2SSlomzlHqb31cz9Kh++Wbn79q68c/cZzZ9R3IIoEZUiqaxuPnL/y7S++9ONe6sJB4Upvy9v/2//cv7h7tLj/xc8/P5yRA9cIRkKnPFA3AAvEkPnWUIksc5pxe25341u/rS6z7a/7Fvimr//MX/lhmMaEahYhZCdV0zND51NtSJpf7BU/8I0nu2XtwsbTH7r2z/73ziCseQ2ByLfTiDyvjr46qO7YRl4+tn3x6bPd3e+894//YbGsB4O+64cUJc8dSJJFlwQIiMQhwKDwGLtWEvY95UWn6sHmB3O8cjH8qb/UXrn68IOPQv/izg99/2t/9+9cvfaZk/qa/N6v6z399Qd//R/T9cP29sFWj9Myzk5uHzvtT966O7m5MxocHR6+43e+2+cPf+xjP7WtKfdh2dQgkT27ALEWMFLCk2rB4kqHo57rZTSua0UOhasbyYKjQEKYOKVMjo7nRBCZjSAZZM5aqFAsiwzC2LCz4PqjpUaJ0i1jb1D2+hvkipPJ+N7RZBHrRWpRk6PATOS6QMFiaE3Gy3aUMzvfJg3Ib08cICCa/RfnPZoRAgIQIhPi28GiFYgIzMxQQd3p7JqtXimrfBKR77uLj75rMpsuuhuP9osf/GM/8snPfuoIsnz3LDaL2KZu/w7OTpxJxxzyPnWsIrNE5eUnL/zO/653/rIQ+IefWPa32+c+LrMTpwk58PoW715e2J6fHi8XdUCXwKzXO957o319Ix9mIV/PB4Pp4s4b9+aPXXjMjg4Lztubrx28+Fw4uAd1i5AKhk7BBJwTiEIgjGZdWyLp4kg4QPCcYxeTNBWTY2NT0JQ0mVNnGPKNC9mD708XL5WBuvW7BVedI0yYXn9d77wGx/cw1dZ1aNGxA6cgq7Y6AiUQC0mz+/e7n//J4c56/53PEPUeWcvr+TFe+/TZooZmIUxIBIIJXVi09NrLWVkkI+lap0QGGhM5BGQysISKSmRsZm3y3juf43AtPvnb0sOPScrgc5/g+S966wS5A0U1Qsa32Z9hbvLcW0MEozuLaR28G2gwVS44agrem2pqxTGrz4w3sKOc+5z1STve2bVhFkMXbrzVzu8FB2goK4mPAiiQGR7PnV137NL6JfC9VGYukCVJYEaEIUN0R2/eH1L7nod333HlTGp5Nh1/6lNf+uLXXkvJVFBFFVQBxIBMckcMQAAApACgCkyBwDEgWgKYCnkwFpHVYCRCDmxgAYFPbWiwGnFZwdffDlDAYNALjsk7cV4gkIEjIOZkykyCVWxaQEgppq6uFsuuXkSIIcvaqnXBI9og1/HByfhWc+927IzElJmZqPBZ2yRgbJOZtM7DxnB96NFj8dlfvH/n2oLBO0nN4b31gbtx487Vs72Lpa+r1kV79NLO1u7OyD+A7dbFxz5YT2Xxysero/2dItv/cp8feGLzbK5Rs+W8nS6n00l/c+nq2UYWdHKPmvtrS5y88iWEE8vX2gAphozC+tXHLO+ZNvWs4s1Q7uyOhu+b3H9+tLUhrch4Fr70ZhcbtzFYe+pSTXu7l9cn909evfsFk9SMX5PR4e7mQxqb9TO7Zz/w/u2Hz+zd+erO8GrYfnLQ69eLl4uM5vndWHa9rcu9jTOG+L1h3tYnr73afPml25//6isny6hI4lzWG4w2N+4eHGxsb33dN37AqLj28mvJ4Ggy/tjzh1evXNo7OOHjZW+09vi7d4Zn14tEWrXFaNRwOBmf1M0d1yql6fqoL0KdxXLQT63hSs7XxpCVk3o8yMqmFsU06q0liaggHUvUCqv+YAAIzveiCCM754qQpSQptYFz9r3UVk3Ves67RXc4Pe46DiZZkVX10qO899mnfu6zL87HtRpm4FTAVDvTz3/2JZ0Nv/f7PjLa2nn9+q2Te/ebEI/nEhyaptdvLUqaXtwa7HXEZ3fb0cbXvvjG2fWeNzg3WjvZP54ezdXz9ZffOLTs3PnNl/fj5td/y/zFL7z+pbcqLVIqwjITqaouZszmy8EoryGi4vb6yEE01K3NksWbGKyHZafLDnpNR8LVVKYHywu8btq+tPeWLxgy3yKd3d0CuAsAzEzECIhv6y/NEuPq2W+MCIQRjdCAUI3QlJFWricRMzVb4ZnBCAxWFRAwUKXVnYdG6GxlPvuttQcMCVb6MkQ0UEVYwa8JVxJpAnIKgKBoaKYIvMoY6Uo9dhqGXSWYkNGpAUFu0IEtFZgQiQCR2cCVbS/f/uhH/8IZ2NAiR0lkykygwiAYkVVABMCgCLpzrhvsqCFbhKQgp5oxZAeQQBnQAyYEhFWJICCQeQGTzkpWwtRJFpwSQKvExA7FubTKaREArPTvYATIHtAjEZADJkNC4tVcGyICsyGAI1i9/zGBkgkgrlQSYikyOiMwVEoCq3i9c6ZO25aNSIxCbWlB0lAoABxKkvFJGAxR1GohJSO24CR2Ckgup9IlNWSnnCfnARkXy6Dm4gwkaQjqwCIQoHURkiNXKpYQnEHCkLVVk2UZEsOd46yO9QVxT707bG1Yv5S7b7i1kQIM+mWVF0yY0Ghkuzvbx9Wwrl577gu/uZi3K7A5AquarihSBAkR0QgtmXSWRBLC6iMEBRIANCXCpKamAGBovNq2ABmRgoEZAjGZaTJNJpHZSTpNFbm87K9vhgCxFTM/W1QpCIIjpjxzqhEUBFEAgYN26lzwkMyzhMDoPJAjJMfOIwZ2ZG1rlIDVCAODF/LIHFUktYaKyM5nkiiyQ2AmkC4imTpMqCkJgBAoMKioaBKNXUqdioJ00DpP0bs542R2kp8sitd+/tyXvtavoWV99E/8/tG3f38ajPKYHJJDJv/47OK5dv/V0qybN8RRpO33ChqNJjePvWNoJBShmTWjrSHn/pEzl5b/5qfvH9y7WAsk50Kmi2WZ++3z5WwyxarLXLCci7XcB5hXHTmf94fctdpSpqmpTvwDF70QL5uQ92VZZ7xI48ptb/i1EW2MzBcWAjmQrqFBwaFIFBz5LEuQKiAMbqTRYacYl4QZNZ6x66Tx5Y60ag6NDAVUnBFScKltMS8gd2JInjUqdq1Gw3wHC+D2NhamswkMB9h30lXkA4bhSpHnsz6EHKxrk55MJqNBKfd14yC/9Q9//tz7H8I80dkNeex8Vl5FjVsFLm5eX0wUepu7j55rbx29def1c+d2s+H6xTXaP9hrr7882NlC7zpByv2Vdz41Pv7mj//bG6NoAbReNpoURSGJd17JqS/JFcQuGYtGUPGoDpRQGZ1j55iJQBCAUE3AkqmhGtnKJJMgJVrpaVUNKSKAaAa84q8RgsNIjo2dMQiaIPBqeypJuzqmlFLXxm51FwzLkRgOsqxaLoaht07l0bKtBCZ110SBKCbSA3hsfXBlUHjjDmNLbgz2xnR+UnfOZ928E6AiznhZcYxNl4jvjPplmRpjV3d6FE9C1nt4+0Lpi7eW4yStMQQASFHMDXs7PKTXDm5sbazVZHVqbdmW/eFwPU8WB1F9go1Bb5gXtcg8ub2qPbhT95xbAxoOsjwjIEzSeoc7xcYb431PmDvn0ag1iV0bTcwsI8dOCBigqptzO5vLqnLEzUmztnGhzQa7D5Zh4BfXD0Z+aCfV42cfQZzeXB4d52u97/qONE9Hz31tFJLN69C0xaiXrQ1UrTtZSr9q2zlTwTSPyzZ2deTkCqcxOU3JRPtb577xw4NnHpkvawrheF5HatQqsFhkLs6OU6dJJR8UqhpPJsE759uksFjOgyz8YCAdUiOkC4yIUcGYy4Ery0UrY+/5/e92Tz5SHSyaN97sHb3GcR5Fqiq1t2cFRrXyK68v8rlYT4Mv6kqubG0e3lrODuv1SgT53t2TsL0RyHiYSxPznt/olynacd12wVnCk+kigfYHZVEiQCrKbDldGJjzHA0gC0PvrG787tlF29VNF7L1SnExWaz1MSPfxbQxHE1xwZsOUuVSlZaTNe9ENKFPXZsybTTmHiMyGRL7TjpHBsASycijM0cMFJrUzMfH2nQY8tLxrGpixven1XqvOFcUwRQUoKknS1nPHCEXzqspGDoAUsOk5JQwNLERTQrWyX+LtWYGIgwBzTC3MMLCN6bK83buHXdikIRZOakXdMBDguX0pHF1XpSySqsaooAtF2x16aXwnBdrdTR1QME1MeVCpMYM6xmvFbmheqOt3tDneQe6c3Hn+OZJtWi//Tt+3+/5i3/o1etv2LNXHniX0xe+8OSNW+M37+29Oc4gH9+ZPehyf+f+wd/6J2/pP8kB1iHfgoyYa41okGU+JUVQB6BJHnr62ds3D+IaYL59+Ilf/OL/9BcebbyCNLVy3wQtKSZHw3ww+cxvyGd+bQ7QA/cABIHI5OokkqyX06g73HvtP4S0VKBl4o4gE5MXv3z4wpyJd12AYGR+GjsiZ6reOdMCPHUECZ2ZLqu0TLqzsSUq104OY9M8cuXReOkJfuUlBlDvFqJ5EZCgPxwWrVC9vDU+GD7yyKtxHI/Gz47yzUtXuv1DBUutMUjhecDIb12v/l9/7ux7r7780suTfcleunmGefjg2TvX76W2jZaw4DwLzjlEIMxiUhWTFogBComO5jNN83QmOCwYz26f/a7f+5mDL6eDtxYvPX/1/d82+qt/441f/+mXvvorH/iWP7/7wFPdM28Opr+a67KZzB1nejl/z//81z+zzB/+fd9x/u60/Uf/dMs9MXrqf7xQvvsXfv4vAVa5k6znB16IwAAXbTSjDHN2xE4SWUccBkGlVVqWg4ItrxtAZDRbdomKfDKZFxmuZKPB+7VeWS1mSMmVYBbmk/b+cm7c7YwGaLq7ub27vlaQHnbVYbU4aWphBes8xs1+lnssANuW2mSByBkQogIkOL0Z6Lf8NwB8SjI1fhtiTQirbrOaAaKowQrVsHolAAIqvE2UUESAslh/4qH33p8fBW6qu28NR4/8ju//781lSRrulhe4unf9Jdc1YlTsbqsFPaqqiN1g4+KzX19ceIxyXy1r3Lqaf102vf2KoznMF9ovIlTx8HWrl0zgAyPlACmFbLB9rm01S2lxeG9ya3Ky9+YzO8+crdvXvvSx6d2b4eSwt1j2RVJbCwMGyr2XpgUBEz3dZxOaATMZQ0oqROjYmFYKHmJsWQmkNwogYFny2EyaKfSHg6FIrHH9StSR0Mh5Q51rU2MykWQpsbGpArtT2zMYiDmkINX2QdX+4k3jLB+NVK8lAUwtEUUTUCW0ToEBg0KsO2MPompJvEfn29QQk8sdMql0PoB6piSU592ky3Z2sic+MB4MfSxcf4hFDnVSMWACNLNV/F5NACF5TxAFG+sHAOgkMCombYzRAJGQAU3Ue18s5rf/5d8v3/2uzW/7A1VvrY1taiLmQ+97hmQGhKqAYEJIgExmVDVlupsWB514//4AFx6qiQKCpkhE3ntYG5w9t7X/0mT+yr2DzTc/9qVXbt69N1lqatGIgOTUloT2WxWeFa4EwcxAAKMqr2jWKCogAIC0updWEyxeZeUX0pX4xiAq/FaQAd7G10nVOO8hAnFQ9AAOQFQ6RpDYCDQibdvMUrJ6thBQDoQOJTbrwyw3Or43H99bvv7KeP9u06bMGZS5a7vUdbaMUZOBYmxjcPHrv+Hpel5/4fU7WZa1VagaXMvJp44At8+eeceDD/dnr61Hu/rh3/HQd/6RW9defPhKr1fTm59+ri9tdbw3TPeIjsqsd3z98/7kRu+wL7NSr72yuflw78xjGd6h5mC6d/v6q186E9bLAbiuy3qbIy1w/y72Lx8djMsHQn0yHwyHXQs5YqoO6ztvdbkeTyZpEXtr66meOFTIeo2Ww6sfWszvzTb2H3jX1Xs3bkvHsdx569643j85884PHhbnxjfV68Veb8f1njy88aXYVsXOw2ce/tCVp840qpGKa6+9Pr37+uMPP3LlOy5++APpD5N86sWvNCl86jc/t3+/laVsDzbf/fSDo1Ac788v7ay95/EL1XwS8mIxWWysF+cuXBL04BkdeO3mUofYZ4s7ow0Vywt2vteKSRfZc0rRACU1Xds4dNrFfggWK1DsFXnXLTtJqC5FMV0dP1OWO7OkAkXhU5dUzTmeLI7yfE20VdAutd67vN/fIt9qGk+nRU6AGKFTrj78wSv/+VdfhOgEVhFpQEva4FdePjo8+tizTz4xCtnk/mSjX/Y2Nyy2TZKTIn39t3zD67/28VaocEnh5sVzvm3a0veXs3EmmNQXA69pMr3+yjsf+kDduHsvH+Yn/vj+fG27HBVZFkSTbuWDjXzn7tGN4Ubv7Hr/6OCgbmIlXTIdbvaJXBthbWN0dffMnVfvTMcnImlYrq+VQ4dW1a2k2O/3x2005HrRvr0CoCI4AEMVBTqFC/HpPUermxD1NGi0YrfZStBEbIaIK2q5EZiZrhYKA9TTwTIARFkx8FbsDFzlgRR1lfJDNDAjtZUp7TT3Z8lkBdPzQGyEK9DR6dK0wiqtFJ1I7MAAlQxQ0Al1YAgpIoChODLU5n2Pf+SBbDObH0l9UFDHpKlDXqVQJIEjQAZmHW10u+el3MDFwhYAzkPXwmr0CMBcAHAIDCLgnIVMQYEZk+iiZWIwcr2eOd81HQXHhRM0MzLIjDMjx6AgAqrGp7VrAFBkJEYgQ2+yYjG9/TuXRZKVERKRiBkQFSPSakqFQcX09K0xidp1qAHIyBkEBCIx4eBAPaSYuk7JIztsulQtKYnGZKbgPRWldg1kHg3VTDnTLG+3z6dimN24odffygrS+hiTErKqAoPLcxGIfjdtr+vamkHkPkrbdMdHtr8vbadHY9m9mDYuq49FWtbLe73eIB3cH21veeuM2kXUUKwtaYhZ/uQjH3jxq19cztq3P1gkRTS1FA3A2CmZgYmqJLUV2MUIiZF90gSW2AANFIEYVw2Ct4UchgjErKYiAoiEsBpcZnd6LlgblJkTRmi6VrsudiKJTDoikZLZMRK5kIkRAWbMZj6CCRIiBmKv7ImQibwCGRqxN4gQU0KkUy4GrriQIiYaa7Mlh8DggTIwRjZViKwdpkSr2ilYUkBLKSKYeiMgas15UrVaUt/DxvHR3o/9y/0v/MJG6veKftpY3/jwt8wtuEZ00ZR5nhCmsf2mv/k3Pvk9/70DARRfuPvSPvWdH3FUzn/2F/PYEHYQLPTK4ZWdetLR8mBr2S6Ol5AXopjMept9N+i/eTR99Nu/u74/vvbJ57avnCt3dl596cWnv+/3Lg/vT7/63Oaw0M5jF70r1EATxla11TAqlNnl/ZDlmhLk3jwLos9LQm8uYO4gojQNk2pMlDtgSp1zfQTpIQVrLFZz8ACBVDKtW5PGZQNalXCjgjlFUCVEQDUCInOSGDgTwOB6MLtDzlubhIUzz6EEDJwQ4gJ8H9ABA7Lka+tQRqeXqhvX15br0xs3h1s8WXup/CZae/93zV65e/CpL97/+FcobGUPPmhpdzDsaeYLDlItDJRPZosvfE66k90Pf1TyjUVUNFnb2umV5aCu+h6qmptW2dLQkSFx1lv6wtAjejILRDEpgzpAh5h7x6gEYmZMrKZsq0tHQVYkfqBVshHBqSmamSUAMYxRWQEIjVTAiDyQICQGcmCSophAiqhpVXoFTacdZLGiKNvl0mL3dY9fvX8w//hLb/qicCSEQCpDz89srZ3jbMiuTdIavng4uRs7SVHARmUWgKdV6x32NsvJuGvB+plvSX3XDLPQTauNra12PtvuDS+tny8Ha8d1zQHaZTXs5zkVsWm6qj03LByDMRZEm0VRON8f9d/a21srs27RgO+1Md09mFRAUISq02Ev9xyLMmxs9u4czGMTe3l5MJ010RjIu6zuZo6dC8Gxodi8FY2LtWJgnYBBs2ypiyF3ivyN3/Dexfgk9wM9mTbLWfHUY0//nh+4f3y7P298kd/m4dXf97uEso1X7lz76X83SLE42B9PxlcLyDMFMacYl80iLXxKCdSVmMwstVSnrop13j/7fd+yfOrJKDwaFMypvnYwf+smJRB05HMysG7OzN2iRg7F0COKMcwunoXNjeb1F4u7kyzfIOu1tSSMwQVM3DZ6LNXaH/296x94Uspcy352vJz+9E/tf+5XN7z50EPketJU7SIf8JNP7tx77uPDJ/uNa7eeuhpKn8veznbWzMdhOHrg4XM3J/PY1Ns7w4euPvi1F98oPE2aOkonYFlwi2WUVjpJOENk3tjue8S8FwbDYn82EQtN2w5yB4ToKSuzo9mEIgy4QC8uc0XWQ8ev3rvxxJOPM8rkjSnzMPc0Hlc1GHp0DiBpl2pL0VRccP18WLW1Sx1pjsaLtByU/bpukLqRC7UPbRXXd7bWhsWN2cm5frbGnObzkSOF1HTJUUDhlpiAAgPESACgpoKkAkCgZiZG4P5vrCKfOQbPSQuDrdwVEB1xK12/LJuoc+0WXcfsAb1odJrKkO83WiOXGXvmVAmhJ2IB09SVmVPoTNsoKfisKGA0DN1J9ERlTj0PnKLr96HuguoI/Y3j/Teq9ul3fvS7v+kdP/63/mbB1578lu/63u/5tsPrh8985/f+/O/4bp62CjIYjGgphSQ09FCch0CwAjRiI5b7vI7dvO68y9Z8iHEhQC8vDvnSGit53Bjs372amj6Act6iWtdaJ+i4azHVUkJJIAYOwNhZTIKKOTkXfFCNdZuapcUGOHTojNAzWmvB9wXAgJImBgjkHKHPWQE5HzQAMUYuygBKVbs9zPe/8pXRV79w7oPfuIiUZs3Vb/xDk5f/aq7CHn2eM/t5HdV4vedpUA63HnZ7/uxlzUbrxUs3p3WbsVcmRK6XyoVjZ2WFi5/87M2f/JWdXumiK11Wk1VNfSFsHB/P8n5GjUqVCC2MCkAOaSmduVBUy6rZXqf3Pbt15Z0HP/ex6tq1oqTp116c/uh/+tCf+T3Xvvri5pXLKXeVPz/8nj/xA3/sR976//6j+Vs/OnruY8OqEVFEd+9wvvWhJ/nB958f9e/duH3+W3/742+89es/+v/OF9sf/aM/8KmvPdg2d3rLeSTOkFPXNF2toqaWcYFIjLFOdQsSMWVseRAOaXY0d9wf5FlObmN3GBPfTXuCzbxtiCDFblElB4oAZwdrN/bnXauxtbws6mV7pr9+Zn0wQGGRm/fuLqoWCQyhE+MAjdQiTeHrnqPtXrmoUJFrifNFjMBvB+uM3wYTEeIpm2IVIEITA1xtzOGUZ7Q6tuN/cZ+tcEcGK7CFz85ceujWwU1opiPnrr388sPv7x93DVXrv/0dD06v/drNj//8YLpABtcb9M5dFLR2uTdLuP6OZ8LFx5qOBp4GealsONy48KFvgy9C3L+ZvesbWiwnn/71Xi0MpgpMJJGGO0+OnvmOtFOAzvlgzif1+dhvXvj0fvtrw8Xx5jJqNfeqquK8Z0cKKm1io9RFdiimBMgrBKYagzkEaNvAVLeCoKEI89TM1zbK/sZydlK62E6P4f69wbl3cOLm/p4tD3u9s/u1NqKbu2dDe7UZL12nKgYmjMiMUQQJuy5lPiQVRXDEIBioEEVdRI+JVqckQB+cdZ2idjEFn0cvyCAAlqMaaeokqi+8z9jYDCRhLmUpo8I356rj6Df77p1fj5cfCSIw6ZzVYGLoAU3VUMWjM1Ajkq51RJGNEcGdTpeoJkjgnBOxLkVA8JkHICm8XLqU3xvqC18+eOMrO3/oL4+t5PlRmh9bjZyIVrRxRAIkFUBnyRCQSXnWtF/7YuN8sXFBhwWw5f2yqWoSHI1692YTrpVa+E+f/dqN6bhbaisAjKLJkjrnRFQRehlleIrJrdIKSAJGpLI6O1DunUNVlUatMsoAvSqoiAEhmKMICEiVaq0QAdTglJQNAACvPH/9iScub+0O6pmIq5EpSoMQo7SidZKq6xpJFTtPDpyzXu7ZoMj77WFz683xl78yObixiDFD8z4gAjRdp4qxg04SISCiL1i6ZmMw+sznv4ILW2ozPN/PiibI4pknLv3Q3/v7sfLnsfnk3/uR3Xz00Du+4eb82J89a4qT219u777evVLQxpnx7ZvnL+yMttbteN688Fn6/AyPmyFLd+FWuvwYuOns4N5evrP9vX8Bf/nfusXzuDHkq0/Q1760+ORX+Pwz/sIH882zw17RTCZnz28cvHWtns6P43b54d8dZ0fdnYOT+4frs5OteOyXk/nnPwuXn+LzVwdPvne+PRw8+O3D7Mw09jY44fUXcbSx9sDVdLhvR+oP7+j8gJpFSlj47aP744VW/QuXuOcfvPKOZnDeuUNtRGZjV2Tf+v6PnF0fvH/X/6uf/dX3P/7M+d3d3UfOxfZoIzYplZevbvXXHuSQV+Nlf2uQD0uEbDGduyiFz3rbW11Tg0FgXMQ2ZFnh0BSUAyB1mrrYgKYi812bmmYaPLVNLWLJJQPnkJ33XWpdMQrezxZL0djzBRN1ncZoSKgI/XIdCYGUELNIisLeymFYjFvIvJDrqiRdW0/Hv/3Zc69de+WVu9hWKgaSoveO2DWde/3O/GTyla9/5yPvfPYJKv1nX78XPA9yPzp7vltSCkOdVhc2106O261itDebL2U22D57fvfB/Tv3dzby6/f3dT575ROfOXPpytlLDyxg47F3PH7v9sHCYi2Yj0ZdU+8dXEeXpovjk85C0iL4KuV1FSdz9CFiY/nR4mB6rZmdnD3TS4wb585Xs+b6nTcv7+56XxWO5k1cHi/YBqeFotNnvaGtWgNERKfFGFqRVk0NxEzttMqqhgZgRORBE6IBaGI4nTsjAFAwACSy1bgYM60qUqsSyOlwmhECocPVinIKvQYFBSADUAIDo9N6uwI4XSk6cTWeZrqCDIDSKuUKgODAkGGgEMk5BVao1SWI/fc+9b08abBt5e5euj9xRU85UxHPQKdP2wjDQXvlcnvmPEfHVbRu5TVwTAixg5gsBCUH5F2PwecSHcbI7GOMoTc0RIhG4sAF7HFyTroFYQddVITTuTpNKFGN0JXGDsAQ1E5Tu4YGxnSKj0JF7cCIUUCEwSGIEoMnAWPHqUXnHMRkbYfoVTtMCsSmoGoYkCCSIiATelIDjajAg1FcJpbWUmuBDD10CQDQZaBISGZmeQ79taYo3RPv1n7hL+xEjfH2rWBDa6agAikREVLDIXSF46c+IKUhTqphWM6bftkfEnhYJmtkewcvXk2pblrLirVUt/jGa/kXPh5KRzvrZEXcuUr9M2HjTLjw9MXzT05mz0mKScUQ1IwARZGNRIBJEBKooSmYETORZ/ZKbAYCSUwRlJhWYg4iVgAAZwqEQIZk+ltgcl7pUt8+HmwOezkmSwkhiULXqiZFJAAxiauZzKxogDl3GRiTqAdMwKqQMTvyHhyQGCgiu2Dq1II69WoZGCE5FSIjRBQyRUsgK8pSMHAWCL2QCJiQKLIBp5TIIUQlx9KICoBaXgZ0rmnTRtBL8+PvaKc3fvO5fM7kkkGrXf36L//i9sVL+cUdKIMht416HgzPvvOJBy4dXXudGBwHY+Riiw0GoxzuzxCB+2ttf4PPvztOnnf399vYgWfOnGZukeUXfvv7TzbPPfbbP5g9ejlrukcO5utbZT05ele/oK2z2xN67a/9BXntpiXwvZFDj2HQtSfl5lrKSytLY4chx3xoi0pNrCd+Y2gxGhh6gkQI4LIMBKBpwTEggXOqkUOO0cDMY0HEenKU5ZmQE8nY2MiUyUDBMToS6QgJVB0Fiy3lfWXGZQOLysZ7ePG9qYmGiRi1kczEmgaKLegRVHNYNs4RapRl8lm/zDeaCgdbZ3CY9YsTfPlwefdj1G70j3MnW11L9bW92MLmex6vqyUvk6/j/GScc9bt3V2kE/JFeOx90htYjG66n+/v9TPeWB8cHSuiscZeCMquLXPyWWXOMHSiKSXvvQMjFcfEZIGUAQDZzEw7NDMTUwEDNSBLCAlA0YjIAoCCIpIYiFEy66IiOTUwSmQxJTMPZm41MxDbjlTautauS91pqigLziT18mwr2+YEL9+53zk/r1U62czD2Tw749xja2vSinQx1no/pXtNW3XiAAihnjcoSgjLKi3SbFRk1oPBqDeeNmVwxyez1CZnkody0nTjG2/tbK/xsi00oLJ04vpcpaqq55j7elwTUIAADhZtrQmreZuVbvfculnsYopJc+/nCqZ6ZpSfXx/sT6b743lVtQR8Ml1IEkJcdhFsWQRKVaKcwUHqFEIGSIPSc7Au0XK5pLZtll2GWTtfXhpmt964kWWNjMehz91j57dmvdlnfr3YobWzF+awyCjSmj72Q79r69lnWmnvf+Izr/7jH31oCAGyVCVZdARIea7YSloyKXbRk5cI+WgTKylefSUeH4zvXT/+zK/yS7eudkPnPZHCcsmEmEQdhOBSO1efLLTV1av5j/zhdlS65VH88X+dfeyGNIouwzyAAbaYROzDz9g3vV/WPGl0qTKdDOB4ePExtzispocWcqm0q7SxY1x3D5689Kl//pKeO//4b39Xu97fffhq2+jucOvLr76CzMeLJnG+P64h3e31C6llOl1w5hdVFbvOIYSSgdAFV3XSmHh2UVOdmv7a8HC81LoZ9fq+57il42aerDmzvhnraCyzarzR3zmsJuubO22Xbh7tz7uUe1cAZj44TzWZo8CsAAZdBAcaGyFiQ1Ng7yTGppama8ilkrtSO0g6KAMhzKp6fjLLmfqOgtKaD3mZq4pDbLpUa1ykVAEQSMHs0TAhiZiZYwBCEen07YTp6luMWg5CaDrtbN17VB1PF8vYDnvlvKqSSR4yNYwELudcKKaurVLGjoB9HxqMUiVrJWMCdcqMYOosD265mAYsi8JDpoPcj9by2NXzNg57vklpb1pNx1WH7Y3m+O/8wz+Yv3n8kDbvnk6e/yt/tdwu2uXs02/sbZVr3sXeqJzO5+vaImgGxgA5JACMsLJxckqqkAIQmbWpGYGrQ5juT5/evIrH1yef/PgL/+hv7xi3gcedMUrpYpaRx0ARASDz1EVBPnWNOGImjNodN23uS62WljkHnNihZU0XTZTVKAAQQdQihNgpG3ZdUw5KMR4rKICxJ6HMdFhkxDCYH331b/yV7W//rq33fuv4WG985dW82HFtTRBRMACCubp2JZIburVquv/Dv2f9PM7n9d1j3uhvLgO1TXIZZeStsy6JCawPRmRDci6sewDWxbhuonv0md2nMj1+aXH/IPc5IQE5IEUSTd3sMNbveW/vz/657sGHT1ou+Pz9v/vXr9T19qic/P3/Ea79O3vuBn7wnWf+1J9MYtf++T+8cf2lra/dSnMoimLaJCUu87ATeP7iF4vf+Lf9Z795tnbpP37mCw8ZDYfFvU/8o7v5C9/7gY/8o4//RitfDSSGNF9M1QzZoboMs8RJTKMAMZtpYC9VbOtkiUPu13pF7FKqOu+yjX4xqSModKlh5+YxOWbv/OHxeC3rW5fqtu5Uzw7XBmHIEkLuFml+a3JSqzGadeDQpwQVGkInaA7dsl4iMiOnpMR0qjoDIDAPRghI9rZKBhRwpT+n09k04BWOFAgM6G3dOBiqrfzG4FbAigLiqD+Pi341OT4++eCz70lZEEdvXf/ifP75ycd+pncYSXzrKAy8QkxWjatZhWcuP/Y+6m1oil7JUiutIJejd31bN93remVz+T0iA7fzZjebQNRkQkTocdGdjMresq2rarmZ9dz8mA7f5IM7bhktKSMitF3XGTMmZXIuC6ZGqGrJkIhIVthUE0KSrgYfAnOKrfNsqMtYpauP5b/td9NC5Pn/OD98Y72Xx5O9ODtKXZErU72IN148+/B7Dpple+em7R0zMXu0ls1WwjRAEJCYeQ+EjE5FwZyarc4FGhXBjEgRDUlXzjeC4AplJ2IA0ee+artI5gOSdzzMT1R0Z013HtYH3mcu27hw9uQrr8fjW2eefqzduLpYUmXNkFs/4pSXkhqzVbNo1RBHE2HnDRQVUorIbG7VdzdQME0MiIhRLDXi0drltF7M8sff3f7m7Xx8L736Ap1/p1x/WW+/CeMxA4MSooUQNEVICdVQWEC1AU86tIl/6/nZS1e7Rz+Mw5IyJnDtctLNZv2CCugydpcvnN16/PLnfu3z0iYkYz49TuUOg8MMlBGicRNNkZVURXHVPzeMYmjgiXPpImJDIELOjAy9ARCJoogimQCKGiHwb2F2AQDg/s32oQeGbQ4tdZZLlrO1lbRLiTU5CdbmDMysbQou1zjsTsKta8t7N45vXjs8GkdVF9DnhcsyUhFmajtj7yglp5QMVQAYB5ujt67fbZeaiTfRk/vjQWrfe2Xjr/3133dUttf2jubd9Fu++ZsujXaPy/VxPO5zvnfraAfSlad2Dvde7p3ZffJ3/cj9u0fX967Jm3fPTttBWoACLI+z+YAXs1l778V7s8s//L9OisfGt3+M714bnN3Svc/h5KDHs6PpbOM93zzTrj5eFt00TptisY9kePV9cx6Fc4+Sc3R8H1/71OT/+ruj6dwlDdzY1Yfai4/OwuCt27cvXDyTXL4x7K1fSePJydG49jzaPP/I8vWjje0nYOPciXW3pBd68/XU+pPXRuWZ62/sl5Tlg8WNN14bbDxw6cln98eT47e+uuPq/+l/+L1Iw4Pjw+roHlr0PgDZxcu7oqjkyjB0eWBwKrI5KlPXdim13TILrizKw4ODrAzBY9c1ed5PMaUknAVoK5SERGCCjMC4rGrnMiJeLGfOB+8ty/JWKgUPiIjOfBaKXttW0ZoyL7qmYXZFXi6qCSP1eyHBSiZCW7ujyfj+YjL3FFKKYtTN7dueffetuy80npTA+cxEYxR2lLNb1t3P/+aL73j0wruffOpd73gAm6O9vdvtPO69cLs9zDzk9f1Kl4mypk9i2m2ym08mfVab1b3+aNZOxnv1bH7zfJ4XO5uTWyfDjf6N23d9MUDXmzu5dGG4XJ7U1ZRcZqBFf1AvGy9Yt4JERSeDwMfTNlWhzXJUOd6foMnZ7R2mVMe67VoOjoyB/duhIgMAEUVTBuJVMM941VE4HZZ6e5FYIYPADHCVDjIyBFFE4BVgaBXeU7W3y0VAjGiMtIqKIALCSpFmakanjGqg1f9xGilCAzVJq6eXIqdVLmMlOwMQU12FJRF55SFHNDIwZHSmRsSmLYCgc+R1q39x3W9hfQiLMS+OA3qLqigECLpqgSEA6OYmXH1Q8wHVxk0LoYRYA0ZwDIkgZJhnFnpUFLE71kyztTWYzrSrXFnEXg/znJJpl6weN2t93DxHe7eom6AYrVJaK4ITEhAbEIAZIjhWXr3xeKqXYDQGADAOqmJIiMHAqesa19LOeswL0OROam1q0g4DYNegRYQOEBQMXBAGMBHpvDqoK6krZiCkFOeGtUhtzMkVKAZaEzIYvf0vWIejwYPvc3q0f+dG9vAjdZ7bow+tP3K2fvWN9OZrebcAAfLOQdJ6ErJ9mrx+9Nqb5QgG7/2ug/IsPfwQjy72rO2uP4+DMlEBuY+9jdCBZG2enbiDF6kbw2Ex2jq/AO2PhvvNskp+e/sSvPqVTjoAc7y6AsGRA0QzFUmmBkYMlkwJ2JNnYAXoVksuMgACAzlm8AAkpzP2SmgrfjkRCZKi4ukV/DaryDDGFNsmJW2a1HQCZsiYrKGqA9Y8z1yqQ+4sFzQP6JJJB2wEtGpkoSCoC4ziEGU1xE4oBtQk6FCAAIxMUdWMKZopYJQuIGaIDBotggNTWGXYOHNoqs60TWjsMaQGFClpcibbUn9ovjz8hY/1FrkhmSfUmLXL/c//5/jomYd+10cxGzqggVTNja997M//xfDGtbXSKcFs0WQ52euvSeDUzQvPXXTlYBOdm331M3B4uJxXPs/QZ/MWw/uf3PkDvye8733rwuQMSbvUlFd3sZcpjHFUdjEdtZNzv+eb2n/4z/N83SpLUsFsCmYwKh1nlmJC0pRSW5sLlPciGRMAE5KjPCizAmJWWlXD2sCCWHBKPXDONa0cHfNoKzbTjIbx+DDb2kQ/QpepKUhEAgABBDMhQoBVlTQZkLKKqTmC7a1uP2bdBCDDgJBn6aRhVi7z1C6JiHIPXaAMKKa26lo15tCkqGlZlFnH2fLOyZCpYd/fveA254t6tvPQWtUt6/FkY3trurfXkA3PXqz3b8FyIrN7J/PJDh5nD1/97E9/7v6nvpDfHctmf2+RUBEJe4Xv5aFzeXQcHCXhDtkBe8jNBFKHRMDYmTkkMeAVLEIFQBF0VcjvVFcTCUykpyG80927KURQVAVJasqMmpLPV96/qKkCsi6hGdZVrWCiEfQUax2A2ib54NfKsupk2sowZF93cTu0NQMGQA8kVYqdOOfWR9nz925XsXXEpSeJiRVE1DOmLj50fmeymLXLWEPnldKiHVHGWYZVOp5HCDkhrXUw7BeHJ9NlAgo6acep7fKc+6OeHKfFvCn7brBRLJo0XXZdp6OShr3itevH6Gh9c9A06cIwP7e5NvB4WMeDmEnbbG2szceT4bAcL+tURc4IyLhg7qTtJsPhulBvMW/6ua+qCtCdTCsE7DEZGiKfuXRZYoZQPP7eh75y+96tV17duvxMC9vVUfXg932gShfuvPj6U88+0tjkzo1XRk8/0hYbG9/1g0Plo5/41+eszfN80ZGqtl0TYwupRjJnCJxCL3ftfvvv/6FNDlXD0JUbIbhiwzPU3cKccGy7qJwVvjdqky2yB5aXh/Hh7XJ3ozcP1Cn0NuKDj6dPXM99JorYCUYlw5gVg2/7znkxSMfTvksAoczW9oj7X/+DN7/8mdH43/dIO1AfPHhojsfXf/1zPae3j+L4kQuDjY24aCaEh3f2X7uzaBid86lNYukA60u7mxgjZo4Jcsay8NpJ5jEUmStCmtdEhqCplZCyplnYbFYQyWK+qJqUYHSmPzGYmWQl5Rs5m58uTvaOJg89enE2nyybZn/RXN7uW5OeOv/QK3s3hSV20TlGTWpSV02e58vlQkE8h0GeGRfj8bKNVebi2tawrdpBXqrK/ba7FRskPJ/lZ0Phg3MZi4jjkAFAbqJyWDVTg0WEmVKKXYnmEUSiB1Rdgej+21QRO1e3MSMLqLFdgrEhG7FiBG69c1EFlWICh4jkNBomZx2nKnpv20NKmOo5F5yRz4jIDFrFBMLBe7WFJnOqZMZGDvuDgTjrbxaZKM6bMs/98fy5X/2pb9x94gd+6CNnt5vDf/VcA8dbAV2Xjd739Lf9T3/28PDkxR//Of+J34DlMoFxFpaNBbCcw1zaACwBMTEBoBhBcmAbyrd/4sd+6XM/0x1Mdk5mlwFqcIe57737aT8/kf1r3ImmBOgVbC7RESZQUww+aZ4tettnPvL1F55+//3rezd+9l9n872SoGk69t6zSykiGaKvGzVDYFN2Emh3dycmMS2y0K/iHLRlo1THSd2ZAYYs7cubP/Yfvvp//lSOodfb7fXz3GcS665eZp6zjXI02IqTo+nBzKHrxQFcb/JYZMNyUbcJsNcfLJtlr3Ceg0YIhWvbrsjyqqnqJutv7NggW3/m8dkYjj/zS5uDJRGhaFzBpKTFVkPmj+qOH31H9/ijFYnjlr/1qcce+Qf3/9if7tWQbfXHv/K187LW/PSv3fl3PyeOdpEtEkLoZ3lMUvR7qhaclX2vx83+//JPez9wuP0tf3770SezPDv+5R/d1YMv/NN/c/mP/M4/9P3f+7WDhz/+a7+whg5RDISQEsUIcxH0jvK8QHKBe9BlHaRFTLujErBpu4VGlxV5apv5dD5bVkhWes/kDbyCOBeWTbtdhGo2j7To9zbL3rBfjkKZNSJffvP6yaJSclp1iuyDo3xFGaYlIIPFmCQmInNIGDg2p0HT4NkBO4Skq34u6NslI/ytStBqBwVgtoJb20pwowB2+gI0MHXYePjgh96vh1/eu/ny1d113+vKAViW71w+6778q8Vh8g3ygIVS0q5ZjKv5sUa3tntmtL3jnfmcM89pUUm7SD3pBudiKI2sKEapI9rK2tvAYn3nTQU1tdPb8vLnNi894OYHdjSbf+EzxfhWj8whRY3JNKqGLCQFTNEZdVXjFQyNmFTUTJ1ziZGJYqeZcwAmAs7lbSVhUKrqYOPsYryP9/f8rbfWNnLt9OTGa6Nv/uh8bUOzx46++tnhzVvbVe0Ob+pbN7klTnWSFsACu64TBCByiAQCGhXICKjtImdZUiUAayMRcyAVYUCVyB7A1LrWOQUCSE3TCYzOx/XtRNTPRkv08vR73buezsNIJov20z/jXn2hPDnOPvDNceNSMdhpFm1va6u6f8OPx0FwtS9hRANFSYCKRiaKqy6cz0ARGiE0VLAEoErEBsjADhUInEL76peWbbP9vvfAcx+Ln/8EX71P49ezcQPjGZMXYBYF6ghUbQWQNUACUzDw1na37rVf+Xzvwns160uzzEBaaUlk98za6194GetienDyR//gXzy3cebHf/xnxEwEIZnzFLxby6lwNFvWhAwIyQThlK7rERJQLQIASlI6yAlcjA1yC+gdKaKgkSIAoYGoMQKuMlxIvwW29id88MJ9OzeXggwWSKbaSdcUvb4xN5VIo45DjDwdt29dv7l/Z14vmM0B+twH9ErOUuyInZiwZypcEggeer182SRyfl7VSm5272ibUIfODfIPf8dH3nvm/Pl6ebKnJ+neY1ce3FjsvPmxX0q6P986eeib38kYaXa9me1N7rxaL6qdb/3de/l2urTb7e6ev/Qe/id/qb52sygdQHPy2lezJz4SN5+69PXvKzce61fHMHux7PrFYrCY3B2MhHoWunqxd8P1tsP+TXfjS9Ubb2btNLZtnpXkXNUfNgIhP9frQo8DmlvbKSeLPdcsg5q2/I6r7+bCYyNw6/qbP/XXy8vnzv4//sH+3ZmkmLddvHld1vL+lScc0s7oQvXcv9t74z83F89mMLx7e/7E+58NXevr5cH9Gnpry1oQysV0hqGF2BTg2WWLxTSmDgAlArumLPspKYEAAjGTN+8o7xexq8XaQS837TKPGPoq6MDARJYt152ZtrJsY3KZB6AuRVOez2auzBCpqhunZuA4aFmOYt2wJImVJ3XBClYOrGJtU0FS9M5AyUBEu7YWD+wpZHnOntFVU53sL3zkj77/4a9MJ5+9fihLYuCcLDChCnnfJrx+d3J37/lv/uB7nr1yOfT7S5O9k+nGxgOClaPZ7fpYo7WzeG7nkc3y3N6kGbr++Patg8VhZSKVZsAvfOHVwQNnRiZb22uPF8XJUm/uHaqkIay7FNYGa5iHxaQ6PG6aVsS7vAgkcOXihZdefWP97Dnq88G8u7g9HORZ1cwaXVy7e+/caHs6XvgiOM8hz1Z3gYgAIKig2grlgsSAqGCyMmTairxmZqYijojQVoNogEgOAYETkIEBrkrgBoBGpoaOVm55w/+qSrtabwgIAERXfGpmVFOj/2qJsdVfVwNWIwDgU8eVrnJJq5bG6Rfa6WA0ACAheGIEU+OOoLuy8YBpZdqRNjCtc8496YocbV0LKzxQKNPuA2n9DHlHIWrbUVFC3YIJiIEkYI+K1uvNNzatzYm8xrxwkWKURad9z9tbdVcbZtC72Gxux/lifZpTB8YMK10WKqqiIzUjSbCajmJP7AGMVlMlmqwTICJCNUBJTB7bFqG1Em3Tp82+hlJRjRgXZEtHLXkXXXsE2iAjxg4tGnrBxL6PiqSdAQCwLWJqF5gjtDNrBWkTRNgY1FAUidURDftu7arsH8TbX1wvd4uH3zsf8n5vmPV2UDI8Pua5YteIaUJwPsO66Y73Buffpwevd8+/5oKPFx+cbz3knhQcX4d6nplZFhrGhmR46SIc3ptf+0w+u+9pKGCoXZuHsKsdr5VZCLnr1EuyqLYq65oI82rQERFBJEVJikToFT2gyeqKJQcKhqiIzB4JQFcwANW4ynjCCmONCN4HBIJTXzIAgCQxFDNt6iqmJJaiUdWosEW1Nqq3VAYsU123MfcFkWtRW5bYmjgS8q0BIpaEHsEZO/SkzqwVM2RUWCXEvHbARorkGVq0iJDQ1CXP0UQAkBD9qYBQDBTYkQdRBSNCLww5gVu2DyteXYQ4rqKnGnWaml6WbeXcVvXhL//ssZ8MH38H52cmH//EG//6Z8Mr99d6uUqHjKNhqQTxlZcGF86WDAoCavXJkQvI84lTTP08lGW3SFSMrn7vDy3Ona+FnDMWmb/83NqjD8cI09t70LneoL8UC8MMH31Qt4eZrnd7M1swA0LUDH3TVLFw/sxZMXMgPu/JSvfeIBYZIpg4IIemAICmkAeEGmJLjp0vwIQsQObViw17OK3AGGJSiGCeANk50QRAYEgAYBGSIHlDQnDadZYMsiHmOzZZUiA0EI+cF4AOcmCftIkGhszQVCApW1trY6LReq8/uP38r+d7d7afvVKMBi7ud9bE8+fWvuepe598OVKA+cn4iy9c+aYPwdlBy+XkzSNcLLY3sqYqlwfjvf/08cp9wl4/wRuzfnDVSRVKr57IEZFXdlT0mDJgX/CqtL2qBzkzdqbMv9WRVTXTJKbGtHpOi67sAABAZKJISGTJTNTARAwjkBkouU6NgUoEbTt0hpLUUECJuFMXY4yiTVuneJqnKILr+1Bmgcxev33niQtXtjLitjWXWxQXXEymwC5jJmikSyAAmhTbiIXz7INzbtq1/bzcGmwu54szg83JvIokh6kdhfLiaKslPS6mi7odZtmkWsxibMEOuq6X5w6RQygGpZC0Ege9TCkO1uhwPx608fKDlwd9uHdylPVC0c+Kop+a7oGzm4vxyVu3l1PwSwUPKnFpSYClA2O0TlQcN6C+xAJLFMJqucHFCMN81lXWRBMRbRPtXNqZnbR3ZiePXn4k0UTy7aWGN165/uATH8ln873X7jx4xG58c/9zzz9z8Z35rHzzP376HR/6gXS+E7P+d333y59+Lrz6pTWzZFovZ8FR7nIASqJ13WnuYzd1M3WIQzfA5IEyI2TqEiRQQMox88Gn1MzSZAnFWjM8O/ym72930uCFF+x//zHdLqt3Pb1+5X35zm/qXSZC7RXaCQm4XmnzVEayoiSKOKf41snsi68V77ryjj/+w3f+5n+sJlNOeRSWJeRA3WJRzxddMewO7g3eefXF69c2r5y9c3QY+lnVxs1RvjiZJ6GmtaZNy9msM9AkRRFyX7Rx3jYtMox21wAwY29duzUqC5DJ9GAYsuVyPluw9Irrd44vl6NFAk3SWxuUvaxaVsez6oFz5/q94trB/rxWYD/rYuwU5vdcSUEpOZzXdUYAilnWN8tEIGmbuVAtF96lIgTVOhNanBx19aKf97sUJ0kqlKfPrF8pcq9J0YjBEcOKdmdGZrtFvg5QtWkptlCOah2COQJEJAMz/38bQKs7yTK37GJh1mlryDOzikW1ExJz0AsuNtoiEmBGvj1ZoTYQ1EASATnCQe6tg66OEdnYRW1UU+lZJGUFk1nuCCB2sRqsDVsRYJ1Ox/2Gz/lilA8++9f+wePf860/8a//xXs+8tTXvfvhuy/fGnjTXrj47u/63Imbuc0H/rs/KC88R8slGTRNyrEgs6jgnD9JuEwyDJgBWAckeQPaJi4hnnv1EEF6wAh4AsOdP/23Ln3vD1af/Jk3/96fzUWDo1ZRk4qpBQ7O1U282cHD3/O9T/65v1ANh631NxcWq6O9n/oX2kmWZa2pRfDBJyRXlr6gDmiucr+G3u7G0eTkvOdhwWU+EG1FNTVNagQVKGRkUAZXEql4VpdSLV0xabrMO8TQtCmsZ+2wWcQuIzQLbpBjHXRet86BZxNpMhGwWpvWkst9f1BAbZEcbmxkZy6NTyJt2Jf394cf+OaHduD6j/2fu2uDfr+IYsocI+TgIFDZs8mdm8O224f26P69S5fONMOzTbEW6uXw7FYxKqrPvdZHGLksIZkYe1J0GFJA3zRRHVASZiKn29PD/D/9xOv/7P/36Pd/z9ql971x43A6nV88fx73Ftkr9x4+++jr/c8v9u5ABsS0giyAYe58hBUEUmJt7RLM+7zwaBGsG08qk6yMcTmvF1UbE4gaRHA5s6ljasfKnB22S2K33e9tDwpetMXAo/FkObt+937dCXsmg5wZosWus5Kynity37YS1Voxz7So2n4IG2v901QRERoIwGmYdYXvQYRTGIQBrIYIbAWXOO27rfZrgAaosIIFA6heWtvYiPP79+/1s7C9dn59+8qt2e3bs8Nn8lF152aBmaFFlZWAGNs6TeYIWZkNKNZpMaPMUVgnSZO718P2ueFopz8cLAIW88NYVYd7r/dRHVNshMFypqKu4i//L25zCF0Vunyj6rpoCtBQBIeaKBkjURaCOQRPhUNo22QdgBKtDJACptE554KoeWBwKiGFESXVMlvvtyfxk/8WFh239fhe4zMe9OLy8/++9+x3tidHF97xrji5V7/yel4foamRIy4AOVobFYG9pYRqBIrsiJQcGJhzvkuG6AWNvFNQZCZDAnLBc1BMII0ASupaX+Tel1XgcnTW4QKX3FKYjR7L3LmNarz4xI+Wr73A5cCVaxVZc/ut1LzK/e2QPazTrjtaZILAJGJqssJfqag6x4imGqOxkSMPEFHNNLosJKkBFZN5sNWni4DbhPWNl+P49jCM7HiBJ88zVgqIQCK6srpCY6vakILA293bhGioIeO1w9vHv/lL6898y3Jyv2bkjQ3kXrG5eeb8IC3y+bT5uX/8j3YvXLkQ+NpCEMUHDkQBrRecIxgUWZMQkuEpvejUhGSmgNSsQNrkvcI6Q206ZwLAZNgagYFb9dIQaXVNr/gVbx8Pxvfk1TtHr2Z3I6ayhNh0a2fXiVVSHc2a2nK3fjKb3T9cCkZDdAohA4IOQZ13ggKOnJlnDQEht81BvpwlTKxKij6l9tFHh+fPZ72Txexuc1LbH/qTP/w9H/0efO35mz/3nxaH84c/8v76hc/i9eOH/LAcYnv8Yvq5Ly2PKtcs+YkPnn/qB49u/sZMGNZ2D2/eHjgb33pls2ozdlAreNu4eGZ+84vnKK6/8H807aK+ffPZ0U5rDhdKsY2d8iStB3f8y/+ht/lxuX3DmqpYQGAfYgtNlaeqKHBUVxFzSQlGuTnWpaw3s+Y//MTmA5+oa3WDXmOJOu8q600ahWn6jV/r7R9VL//Sukz8aDsM3hgcvDDbv9NEYmmGGjMrs8tP5w/tdk7OPQjapAiGeQjBG/arqgJJDOizwOSZK69aVXMFzrIihIwNfRYMUAy1a73LKMudI226NjXD3gCS+DxvU5e6yhKigsMMHC7iNGrqqjZFSY1EbdrWRsEBmnMUmNtOKdJyfEKATazIOXKYBW7bOQIScV4MMA9NPUcU5pCHnNknI8qLDLmtO7WICtrR0b299TX8/q9/INr47n1rOo6NkEFUi10XgnexDiCf/KVfaB+/MsjX1s5svPvhq6/eepOKbmdjp1vGhLw9usTA9w/HbrgWymBHedtIhJj3vVKMk64h3hmWeZ7tHU2baE6gzIs2Ljy7ZtFOT+YkBKRJbNLV24Al4N2Dm+cvbdTV0htdXsvO9cLB0UEdp5DRKPRMYGNtLesVh0cnR0fL07qNqQCAqSdSFWBacaZX8gI1M9MVQwgsMQJIMgMkBiIyQEBAQwIEUrH/svatTr24UmjyKX2G6O1QEp267sEIGQnUTFewZkIwVVNc5ZpMbcXMN9O31eZiamZIBAhmbwumTeF0IvrUFmbECIzIVy9ckrbGLAQu3foGz04g1SaruWACBIgIG5v04OM6XANVZYFhxMx7gpV8HcmBkS1jczaTyw8tUtcvt/CosmuvlhHYE68Pa5lpKGz34kLrJHUfzZrWBMFQYs2hVEUBRGIAB0CgCdhZEoQWgJQQHBmzeQdhtWnNNHZErBCNWxt4fuBM9JXs38l9L1WK2VrqvLSrmgNCagC9GWmbwBQgOQVQUGkMjYiUlIABClOPyahuVUWRAYByZwZKmsoMvLTXXwsHB+YX+MaX+JEn+MzFt+6f9Hx/a3tX6iNskwHEGK3o+f5wuTjkx854De7GV/r7L20sH6tsI3XVsBzWh3fg9Rfs4oMOUXxoU6db5+P2Vpre6VcL9U4cN3dv1kuH/Z0wOchEW1wlR5HAPHHmHCKuiItiaEgAyOQIwLQzBANdXTJqSiu1DQADMbOYCRIFRRVECj4QZQq8KsMxO3g7Z911HRJYjGIAzMCiBiopmbUiC00xSb+FzRx8krY1ZtdaiiE0HDumIhQMDp1zhAArAT0QEnIuMSaMCblGUBLwSKzRoqAhOUIyxkajkBJZVHBIDpxDZKKkoAIrDl+KymBMmDMMo64ddvWhdSmzHiNDyQ4d1suq51149V529O9m8pOLLoPD6cZxrejATIG0UwHpl5w31eKNW92sCp6Z2aUYFzWkFMpC25QkDjZ6AXj68ktx0++d7D3yyJNxfNgf35cvHVn/odGjj6UbX5t85iuji1fx8Kbd2teDTkKUThJC6A21G8fYYcGyuyVrW+X0EJYL0IT5gIYjNUNXQhY6RPCBia01k2QA7EtLkcxrJ0iIW3mSiev1NFVoLaCoIWYBiCxGUQHwZs6SGCoQYgi6qjSLBO860tYge+Td6WTi8g2Z3seIFLIkhpJMDIq+FR5DH5ZjaA4B1VmSlFKRD9d3ytg1Xz4MPa8Dcpu2nFXV2QeHD1+Ue4dZ8G3VdicRdnpt32WPnLPp/Tp11jHU4NqGF7NiUg8lJXWuh4N1f9BJmndZ5lRMY4eZV+vUxFFQQQABPB1TSWBE1KkEx6qr0UNgATRbpS+jIaiQICEZKLytozEDZ6AIBtSKng5hGiSJ0EViMoCkKRkpZhK1S9a1bdu9zTBV63m3UeT1or0w2sasoNQZcacggKqY2DxCSrp6jF8arc9V6s6qJpHzAFi3Ujrfz3g2PtgarTedVm5Zxegcg6JHf+3o1sJUjcaz5Tqtnd8cncwWWz0/GGbjeVeleOfm3d1Bj5Uj4Kjff/XGYWLOhlnI9HBvfDKZA1NZ8MnxpGt1vmgyB1UUZCgRYtvMG8nzcDhdiErRy3ySLqZYaRgEFnVdLDVu9/ub65tfu3lve3t92oyrVnImbrsC8M7r185cfOjM9vDWF1+e3L+7f+utzyzar7v01PlzO/tvfbk68d6KStlvn33k2Q/svfVGb+CarOiY1s4/lO+90c1bNXCSSJKqoFlKgp7arg0CqW05eCDJvet06THTZEbEQHFRQZabJJRW57NCZruv3Wj/8c+gK3uzPM2dvucJ+vb33vXZIJSbcujKfjIUsBBgTefHP/b3+3e/17/zqeXFbXVZuTU4c/Vifedrd492548+2H3xubUoRejPEyzb1C+KM+i4V8Y7+8Vx9dIXXn35jZs7Fwd567DrDsYLB+Adl8ykkQkINQTn1PoUijzrmthztLi/NwiFtElUora1dAfVlFwZHBvHrAiPXdppZuPtUVGfdG1X37wdE0TKKe/Ta9fePJ5UveH6CF3uM6inwfntcuP1++PGIzufUiTnnKrnNKvn4LRNvTzvtx0qJOA06Rbeu42Nnel8Qhl6dq6xc0VZegZDMVZNntExG6ApgbFEcaajPBSifYcJoRJRh7EWMeBTXep/VSpCU0RMhuoQCmgs7rWdOFcGyFwAVTHwHvsEDn2m0DEAmSNQiVXTEJe5D0hBXLIklaQqNqDqMt/EmCImY5eTIzBI21ubRViLJydOOcHDTz7x4bu//HPnd2zgq5evvflH/+1/GI36d//iX75yxu/fPFxA2Hjns9WjG8M2neNiT5Na6oHPgMiIAGfQNFv9+tIVt7auX/tKN5sgQ8LQJQOwEhgAGJBdVplsPP7glY98873pfTi5P+jUGYiaGpB3WZZLp00Vu97GU3/gT2z+4O9cbPTaQup6kZ0ZPfqd33fr//pXHMABmqMuiiEah3uziIOh9EfnvvG3PfXd31GB7P/8L9a//EswmzazCh0Ex50mDp4xbxTaNmWlExJD6syiqPM+5PliFkdrPV+Ws83yNtS2XfYWy41IcV71hut+Y63rak0Jenrsys3f9qFBVt74zNfOeDg+meRl1ls/0wQHm7vzam/rqcsX33Vxv59NRxfXzwygNQBhylwoDAy001awVXz+hfv/69+4+Ad+ZEd93BuXC157+unuq6+M74zzo4X0CmyjdyCWMOfOIIEz7by0ecD7izq/UMw2+uXuhfbg7vKkemR9jX/9UyfVc8/s7uwV5X6C5z71Untn/8/8i3/1uU//POS3W0edQbtSniIxWycNM5P6WQOLLm32emUBObUH4yrLMgU7rhfsYCwtOpIOljUMRZigEXF5DszLZcUeez1XEINw0V9D4OOTk6ZakhEiOGZIxoCEnBpo0BxpbCU4YibHGDLnjWJ9GjRVNTMUUzBgwNVgwCmQCE6BkApmCLhamhAIVnjG1f4eVHFVSCLjtKhe/cJnSj8Pa+vF1tbh/GTeZJfOXaG9uyfHBxvi2bGSoYkDgaYahOwIsKsTpkQqaT4bG+P9+1JV/f6apTY1jSy7ONlTz+uXHkwHX3TMgIiaAAEXWjRLmC03gue4gE49hc4QU6fmMnIE4kkxRcz7i2JUnjvrWePR/TQ9ySxC1yJgABRV0+gwaLVsz+ziB77Rb6/DrI7HTXf7DaeSLEVgQIetlNbpxz4pv/aZIknRywid61g1wSAAtKIagjM10+S8Z+cttgaqnlMkz86MfSBOMTaRAqMDQlKNGEjQsBzUTcxKj3nUtvXsEMCS9JYTeeXXAiXFIe9e2Dw/qjmfvPRqduNawS66MoVszXH9ypfozVcwqT+/2y1iOZ9aFFUzQF4JpxWd42jJ0BFaQENVqytBNI/ec8dizGIemkgxsXrpCBTJWaFqRzMpM4uJDQEZCJRwNZtiQgaMJkjgEAlUTkEkmhKwSm++Ly/8dPvG58qdB9af+cZms3d73Lidq+uj56bLdH5teK9NDz198a+990d+4hc/9Suf/EJQU8S60SVJXjjvsy42GVrmSVQjkAJGMTMQM0Q61VWT5aDbGfdUEV2btAKaIcqq/quKjAQARAr/BejeH2yXXAq1fceMID3w5VpKS/BCSLuj3quv3jmaLJOAI2I2QDWGBMKEEJQZgaDIfafCffaZujzuZJSF/mTenr1Mjz56bnKQNghoaeeeuPwDf+PvDML69Nrr9Zef01vHmnPztVvHz3+snbTn3//hVvbh6F6YtTDrygHL/jGUT27txbf+yV8ePf4fHxqtF6GVV98cbFBqBgnR9diKtYHmsH+v31b9/jqcHSq7bLMPiQZtAk3INuvc1taTMrvhBpvF+YeqsfMuUNXEZpHHloYALBF9YsTcoFo2tw56ZZ6DhylSgvr+XgaB3EBq5c4XKU5/7hcKtB7nEgd+WMrJkZzcGjjOs2GrMYSi53vT2clkNBjlWxn6yZ3nJDrtHvSQFMwxIVMyaVNt2BpCyHNmBefZ5WYAEtEQBJm4n+ciKrH1Gg1kFk+Ayi4hiqQUGZlz19TLKLWB09RAqplhfDL2nPusZO+l1S6lLA8KyRSqelHmOTkm74nBQCNAW3cQgZirVtAjRgmZU0QFyfO8a5dKkb1BolnqlKxc7/tZ73B6ePLm4tHNMp6cRC7vV7IUYofWRlNwmTtzflDNPW31XV76Au7MrkExWYpbttQ1sYF45tIlWKZ+T2M/fOGNl0J7vLWLD58bHk/infuNdnB4/cBvFs16OZ8tkHw5KNp5FTJgx9hB0wmC9XKX5XIuK3aywd69+4yuyHKQ1oFaOz4ZLxd1XFh7/uzlQPNuUcc2Kuh6z+W90d2XxwCQJNKqPU2E6AxQABhRzMROsXSGq/4coikgqhkjARISkAISnQqm2JkqrOpHZiu0PBIwndaOBE/dCHaqSDi1pwGuViBU0Lelm2CmpipmZoqQYJVOslOi0iovAqanPyQEVVwNFsFpGgrJMSCKrhfryC5K27YxY8YoIAas5Dy6FdAG085ZOXsZs9yScC8zBfTB2hYJMQQABlQpfehl2XISCGF5bCb8wEY7zaBusn7uESEbSMiKZeXj3E/mBEBZX6xDEyBFF8xQETUmYCJygA7V0ARMzMQwYMiiyxQxT53EmSPBTqybx74z3sBWs7bNFm1eDGM0khaaFqvGV3NOZlbAal1mQsxRGqyXmswIBUgksQes59wtsU1gnhKoqUJUYyJnoBpCVaz7wTDr59bLfJnVv/lxn2jtwgNLwnD2shze5f3btlwEv6pp5JigmBzaZ/99RpTu3dn2Eb/2lQKypABZ2977siy0/Ogf9+t9Cr7oldWk6cKFNPtiaBYyX+IDRYvT6eT1+cZBTtX53bX2UETAOV8wEWAIDhFkNepgBpDAjExNW0RDQjQ1UwYkgBVR0RNlzhE6Q0uIqkyMSC6gI3VIGRAKmIognuasU0w+sImSwerg7Yh8YFSViCXyUiVFWZiR5yIE65J5bhrtfIpNhaJMAVNgR6VRUAAkUiAkJo8mKzhRMusUGJOhri5ch4CWBEDNkllCMDbQFk3JmAEBLbCLloSIUVElS3Kude5O0y3B571GY+kpdsIcULUHzBbS7SqIpmaWkoRAnVdwjC2UuaeMp/M2V+NR5vMMDUyx7Je1RAHM1jZ9W7ezmQCzr/d/5dcuPLC+fXWzm73WHxTz176QmmMMeZg9nW7f6Vee7l/nr74Ki9562GgmrSUKLmOgBCTmavCwvqHV3B0ecZlD3gOfgSF1DRCbAa1tEBBLBFPQFjCAGZCjEBBBGnNFJsuO1Wm1RANY1NahyzYU0FCRHRJDBwxoGilDADKIQAzI1lSMKqrJ59ArQRqJKQzXIM8dKIITBgMHlCVlXuG6uiU3y/nhHhf93tZZuXtrwMXk7tifKUKWYlWn5vruO5+934zf8dAT2av359eO14rRnFqmniSMx/NmfiKM2ai/tT7oedjYxJsH89567keAE9pYX2sXy3bWWjV3QxDPjjJlw0SoaZWlJEWi04OqpIQAhhQV2ggZECEoqBCZKiMpqKoS0Wks0YyJSMVMxNCMDDGJYBIEBQZEayUahqjaNJIATVTSaakoL7LYyKJp67bJs4zIgIGKIBI9OIFESMROa6mrBgDP+6zc3F7GbtFGT5jIpcITgkN79tL2m7PqCzfvQ8YA5AH6GRHX0qmKKknfsURNasjczdoOpCReLKdl5k3s+GTuQtbbGiFhz7utczvQdePZIiYMwc0XtZhWVUxtagoM/V5uwMHNJZECe+8BPGQkRCIOCRA28qGHNovdB97x4OGiu3n3boZSjcdm7dCFnLPxncOdrZ21razqpuyGX/3115b7s7auXnj+1y/zZg970/vjvftLeODpuOayjfzsd7/j07/x69/9jR8Y3xlXzz1/9K//2eUzfaPMjLI8k86ka0wMUyITUigzljKT4CDvxazAgJExtil3WWqawCwApspcglexwCHjE3G+sOB5zc9E8q0NO4Puj/z+47/9T3e3tqmzbrE0UFAdznz8hRcWX7vu/+D30KXzsgXQ1M2Lr5z5oR8Mf/Uv3vuVfz/5//zUVtvkkFWKTStbw9Fe3UXIZjp64da9sbQX/HqR6aULw+ODiRqDmHUymy6JcGvQj6hNJ8u4VE1FPmDDZtlt7+SAMp+3i6ppcuPRer+XHR7OrNJC6ge3BxNdJG2MbBFTKzrq5aEXupD2phUNhsdNk5apbhePbA+7emqJNgLstdPNjb5ZvqjSzvrWbLYsQi9kfjqfFRiXdZvAOOdxbAbDtaWKL8sEhkIPrfV3mDICZC/ISZhMwZSAjCQZaECLxAQs2mdgR+tAHaRkrEZqEN8+F5yWiiSmqG1wxN4lSVVqyFHL4vMsCGIbEYCcy9mapl2adTlGNd9zWGor4FoHBoCCfuVn0GBoUT1RcD5JBLBBwQVAu2x87jwebhcwPsC/+Hf+5RnAm3Rj8vqnv3Jz8gP/7M+8trF1+2tfufn881ewHEDqDfmtT/3yE+/66PGbL3/6f/s/HtxbeggRzIE3kAbaN/r5w3/zz/bf/ZFtzO/9D7+vHh8Ho6hCyGCaIDFA8M4FAqL7x3vHX/pkL4tv/MefOqMBAZtOjLxC8gqk4gGb9fXNr/uubtgfn3Q4oLZLWM1mR3PfP5u6PY3SSoTgu2RCIZ556LHv/taNr3sie+djlQRdTB77hg99+mf/fcnAEJs6skGbknPsBjnFzgdn3i3mVSJj5zuQ5XySI5hmHvrsdmj7ydGFM42XTedvfOwTw/w4xcjSkEi31Kn0H/nrf1s+8M7lrNr67unhT/5o+MLnkCSND2KcLZrD85ef3n/tjaqrtz78vukbszjlM/1yOonsxGPnCLVOwK4cjNhMfvUzi09/OUgcnd0c78/O7QzaroIunozHa7trCSMEj7Olz0oue3XXoAgu6qOF2p/+k8Uf++FWoN4/mv0//8i5xeHyuDZ1W9vb9WL88MPnrly4yoft8fTk5//W3/uup5/9N59+edbEFs3nXkkFSQidZZJ4OUtdsmEv3+irxepkPksJHSKBba6vH00OXQYCaqLkfEIUkwTJIZImIusNe4CRyedFH803Vfv6WzdWpR1SY4QsC03dJbWkHE1TbL2amW4MBuOjJTLmRVhO6tONka30KqcZIl5hiE4FxGB2OnRgCg7JzBCREBy8TSsCIEQVQwRkmlfx9Tv773pi68z5K8VgrXXFhXMbRpy6OWcKy1ZcBo4VrK4bb0jA7CELMXQLjD0e9lOU5uhO185ahlh3eDBT69WTZW80RL9O7KVrcQVLEgUDbIwZMSXwkJyqduacKAiAqYQMg6e2wnpwPvyuPzDd2O4Vw2oyKcaH8faL8sbn/Ww/q+cBNMUIhBlb2jqHVz+4aI/CWtXvC+y9ETyJJUiRHYOiU7dGAIYcGFpF7syIHai0gEnQpyjkMkShNlpSK4px7t1Dz0D/XDse683buZ70N8BmC+dZm4bQtYOt+I53wNlLTT6i+/vx3i0c3yld3UynBRIra6WcTL1xwb6rqreeLy6/117/xBpEz345n5Sxa371J/PFLNR1jAJvTkoR4gZSiys0BgIRKygYGnPqzClkRIDS5r7aPdsO13l0trjy0MyRy9juvpJ96RO9g0VGLkZCYHQMojKvHQMgrJipRqCgp+MguMpwAsBKt82mgoZkpAmxkb7Hav/aWt+d3P3qYZ/S8Lz1tnygLEtPfeADj525vH31QqzxT/3+97z++p89mRxbtFC4DpRE1KQovCSJCkbcdpD07dgbrrKlkEwdQCSKhhl7x1R4DImqTtIqAcUEYMlsJXL6rUGYp9/9uBkZxJjSyZ1DKnhRVS54paAo+8dH9w/2lVxGlDkQU+cYCdg7MJOVu0ldMij6obfNw6EGFRbqkpS5XDyjH3hX/+grU7egN+f0Q3/8r/bXdnX/Vv38r5/Lw621zQc+/M3tIK0tLpazZnF8O68PC2rYL3t96Jaz4uga1ZTdvP2MxfYXf1U70AJGD1+YlYPDhwowPdvGUjq4fwOaCizBsoYsB6O2mnM2dDvrqTpxmvKcGVrKuEpkJNzD2B5K4fnMWeJWWSJEDn3yozbf7Kqae0dx9oabn8C04YiDbACuAM6iVkh96tKwnYrv+zPnnLYiJwgxLqbl1UfrWNb5un/oYuPhUDTbvTJp55vbG4P5ub16afMxOCHDzOfGjNh1XZeSAoAAUchdljnnY9v5IjeEpBK7tlfkTJSHrKlaYNvY2InmiyxTiSvhjVki1KTSNFVczFK7zMqQOYbAXRfzLEPyrXSQ6OBwMuoXnp0YBl9IMlVtmgYjOKSiV0wWDRIO+wODromiyUSMu0q1NSWPjB5Ha4Mx2XgxEbO9A2mn4P7/XP1nuK3ZddeJjjFmeNOKO55cdU7lIJVyMhYyzgHb2IBNxtiX0Abffvq24XKBTkTj7gs0NKn9GJoGE5wkY9kIWQ7KsSRVlSqfqpPPPjuu9KY55xjjfli7xMPdH/eH/WW/75pzjfH//36Zs7Zoo4y2i9AJqEym+dbIGRKxxw++44mi2gasWjBdcAGHD1x8z3Nf/CzjpvU+LCHWra2wP16Jpuzs5r07x688P8/zYj6LRWb9RnE8a3uWyzvnbl6/FWLnU/PIEzu96tVnDje3yt2d6fVrt8cbmS/oZLm/fW6jb2wyKEaK0grHg/mRyQZXLtx/67iWjg2Ypu9L6bYn2WBcvnE1OpXOi7C1jlVQVEkFQdbYOhRdS6nwNAl0ulY4XS2AQRQUBRYkRFRRNAbX0SEUIlIEAFJEUcHTMZAioeKalkdfbzTTGsAvikrrk4gUQVWUgVQBFYBFkZDWQKO1xARRhNfvtcRo1ipFJBQujA7LyaDcaGMHiBPjstwhAJCFte4xAYDAsNTH39RVYzVESKCCIGwM5iVwo4aSsCtzs7kVAcK1lzAjO5m2LcPuprvvPCz62M5l2boebOAsBOBlnHcAWQRCAwq9SJQg6gwDibcKiQBImMgBqAihtZiShijZhg7GYXFg6nvGdgK1lq67/0pfeprfs5yKyH0zkyjkOjJIpgXuQZWZCVGF1TowoikgogCB9wrCfeNQoF1hSBCZ8gHUNXgDGQIgi0Duusm4f+TJDk2c3fFHr7lhWRgLz/x28bX4wM6ZNBjo7F6MbGzGwOA8uCwKWjB4dBdixPkCMyQO2LeWcubk+9VsdmCefK9923tNaVfLY7s5yB97qPtaERYLVAgnJ0JFsg0oS2ozj2WRGfKENiOjBIZOjZWCmkJUZgQVTQQYuEc+DWoRksIa9wuKhEQW7RqKxeCQkMjk5Iw6ERIyogIcWE6/JDtDJOCME3JJJAGqqoIRRWsyQtW+T5GbwOBEJBAaAq8gMbR9CGi7rByIU3a2FXRkc2etAVFBRSRKEjoOTNqhGhQEcSYz1uA6AiIMBEAogGyIDBiQ9fuDiCmBIql1nNgwm1U3WPZDVclM3TVlriUYRmMyb4hCnwwqAlqkSMjO9iEqYQosSHnumi5aMjGBtVSW1ezuoXFZ1yyA+zzPYxehDSawQMgGuIMr+egvDe/PTFV0bcrmK5+1BmJ4+siox6AqAzxq2nuNtUO7sZHapt2bURtMZsDlrhj2J/OBqUxIMPZSDaLPjBGbkRplRBQxXS2LPSon4DLhBIpKGZFRRBx4tQjWCZJwzHcv8CKl2EFYsfVkDEsksuocKqgYRgQklLROMAIpZZki9YuZLysWIX8+kdHIZJGsVc4QSFjQOqUKMg99xkyVtbJsrTOpxAY6GVse2mwEEyxO7uy35ivn7r98697svscePbl5i5zmZRVqLcbD+d1eudOshI3cZ+Xq+qoPKRthpw3bjWpjY7l3TG0yChx747SkchZb9pWlipUVT6c2ysCgiorCCJBAQlKjGDURQAKRdRATiVDQnEb/1w3WpOqsEQUQYFAEUBBFEebIwsyRI9EpM94QRhbnT6s3ddtPy6ptO+uctUhk+xhZ1WcOOUGihNiEDoQTMEGGrJVSlZXbmWbOn6zaajToUVdtfPG1ow714taUS7x+sohtmsWOUMlgjlhmZjouKdo6hoiYV955c3hykmLIqhGiU2dt7kITurB87KkrR7Pl86/cjgKZd+NxUa/apo6u8EWZLWMzrPzqaA6dGAIC4qZzCsPMl2RDUrCZNwSLNBwW48nmy3fbG4cLI6oEBlJlnFVyZPx4w3hbd3Vc3pa8HozbvVvNnMPkvs095Pe+7x03nvsIl/jAN797aey9mycD3Hrg8tvNV58pPvObd//Fv3vn9oW0bNvQuGqo1qwxeyEkhzjISu7apl7ZYYlDL96Cy6IXtQRG+rqp8iIFJWNjH0SEBMMyGqvGGKWW+5X3zt9Z3f53/zL7kz90d8bT6cW+XTCCZq6N0SopcXtvVjx4Hobj2ko/yibndw8/8/nVlz5dn/veyQ/+ePfZF+CLVzOAwpg6pa5tnMJjb33L3qJ++vrdcmNw9fo9k9HmuJCiW7SxHOXtKnQheuuCCjqKkUWYU6oyRyK+Kq/OF9uDYnOnrMOSIuVoYt0UQHVPyRY3D1ehSwJhOBxnWdktu7zAje3s1skJDge29FlMVrhk45NUk41Vh0H7+7c3+j72QGDo2tGBpGDXeDmCujvJC6sc26bbLnII2CZx1nQpDZwZJEbVGJLPvaia9a2AFVDW4kxnDToU5SzPJAlLUgVLriisMdAHblP6r0ZFDqAgypQsG0C3mDXG0iCDUhQVu/XVFSRq8N72kjoVW5neYi+uNyNQ37SNQBjneeGqvKIQpZNWIoKoBcy9LXOrMQnRfBG6ePzwmfsHdfni85/O37Z96W10+OW7f+7v/qmjceYng6LoLmKGSpvDvG/CyT/+R9c/+X+XM33TnUZBOrBvwPrSXoXv/9n/dfbW93JTHr34wsmNu6MWClAGKAgiMFtbDEaaGQ59isGc1C/9D//vs4VcFKo7AjAIYi2Bas8hGtrZdJhuPfMPf+L+7/yDxYW3lfeftW7W7L/28gf/dVot1Vt0RDZnzMS7WVY++RP//dnffb4t9fVVXWmry+MXP/5bo8xI27dIXeLMmqzKk+BxzZhZb1RUqRgIs2QeAbnto0Sr1K4WBaflF/bwGYOFvtb1mzotnF+leP7ChTA/QummF87G0bnbi5kr3Hjn0uTt39pfP7D9YXvnliNIi1rv7Y0OT+xzry1/9RO5GZzfun8xO94aDmOMsWmzDYcbQ+mUxUJkGzFbCiljezBhrutV3aSq8ENTOg7Sy3K2GhYErNonD8Yg9Uju0Ytn/sz/eDBVbK2x9z/43/1Py3/8325+ww/Ou3jj6a/c+fSXNpkdlu/9n/7p9b/9MzoYPv3MVb4txXnvMHaMKjaIztvk0fUB5n1/8cz2dASSmq5tClPVPTR1Kpw2yyBRM0dizDIGNqnFhGCyLBsMBtwsvbfDsijA14swvJD5qqhX83vzoyDKAGClTUlUkUCBTFG1zFZMYWl7Ml4ulk7IOqxX4ezOGGAfAITZrCmlRPhGzGJ901rLZkRPAaMqqgCWUBEEYc2XIFkDAtbpI2HVvcUKqococ8fL+fTcOcyGme1NRasExkHQBACOwTQR1HaQJM8Ql3tf+eT04qVRszM7aN3RnhAPi51677DeuzHZsLC91b/8fLj9nG1aS8QqaBHQWGIlFmtVo2ERBAVryMWILcfcGwPUozE7u9nb3hXPXSKjJG0+zs30QTp7Ti49INdfSV/+qA1zC5GEhIPf3FgR+9L5BrJ7N1fXX41NQAtulKEaZG1CR8zgDIOanGzpUCmEPrOsMRibdR07a1UVOkxuGJ94b/a+99NoU3rh5Qo2X8u7O2F+V/F2amrvq4S5ec/3hLe+C8qCAKqtg0BkXIDYZBcebm/v+5N7jgznuUBnMVTLOX38Qwdf+dyZ4+umC0JceSfdAua9xhBSENYUOCsMc48qhLiuXK39dgpC64QAkzK3lTPf+f30lndnUMECXGELkyjL7eVH8sKkj/16OuzJ+ciigIRCOXGKiKSIyOuU2ToZYBDX3mo8basAE6kSkpAoQpek73Y3zdG9F51Ly9deGT32ga3BXZy6tPXm/oEn8wceSABRGlH+3/7OX/6Jn/yrB8dq0QGktkuTwjkyCaUDXSl1qAy8Fiah6tohDYAJBICEwQlYVANgCUaOWoGUxCKB4rpoIyoOTz2AZ3bK+TI2zII02C3LKvcny2s3jvYO502KViWzWS+CCAnAGXLeWIcuN8pJGA0RGeMMjEo/ziGzcXc6lOgPGqo2h+qXy8PV+bwabTzxJ//8X7117apcv918+YuTG7evHRyF8QV48onFYsbD+y7u9vH667HLrvKGe+AJOF5sHF8b1ws5eh4XM2fERwPoocLj0pz8oT8Lu7uDusXf+CA88zuQjcE7qBUSQVdTYTNKIF3fH1Jawvlz/vX9dOeeuDTY2u6PZipttuNgugvawOJWQ4N89wm7umsPn+NyAruP0Nu/abm6XLz8Kbd/bDxIlhMZqVecald52tykwxkd7endOTPLsDguzplv/yNH95+1WaUKeTV2e89vi1I9l/6G745C12bZOSjB9KFrWlBE59EoxKUTcSZTGvSCqU+ShIzllCQFQipzb0RVJXTLLKMUUmTMnbVgETSkDjXGPsQkkqKkFgUcjWKLqV8KogJQRlS5KkONZpAV3hBAajsOQVQtEGeFc7ntlk3TzqLAOPPdamXWoBCBzPiQOknRWh+6iCg2Mz7PuoXm6odu6CbVvdXd2pMZmBS7rLLIxJjm0k43q8ff8hZj7i/smCRf3jn0PE0ny+f2vnK8WLGBBy5dyjQRdct6cXDz5vH+nVlRtK1KopBHMqYNKccIhiirjpOwsRcubE1z/Mwzt1qjeVYIuv3lXE3sWyUTrHN5lYWUGox3FvMtmm5PB6GeeZaj43ubk+07q7osB5Tje979+JsuTT76saffSJjCaX6ITGI1htZeRkAkJTmFpQLh6Whl3Udbf/ITWUIAWDunABUVgMy6ugaAiGSQCJBgHRxaN5pBCJGQFNcLdFICBSBAVkUwBlCVFRiUmVlPA0iMvMbkkajK+qMHFTjpGw43QCQQofXUCYxDAvPAg+9w5SZBLDIPs2M+3MuigFGDVpKCIhji8Ujvv6J5Zi0oi6QE1ikgkAW0oIhZCeUwSsabm+bJ+2KOZCnrU7dYRJ27kjQwWg8M2gdiJTNwwzLxAlQ0JTAA3gAjcacaCIcEEY1RzAAVUSm1aEpIDKHBMnLuAadWGZrXCTtuWoyKzcofXB/vXEyQ2vZIR1PnkaKYZWe4hy6YdqkE4DwUuRSGXAlJKLKgaIpWWZdz7GoynlWAEwijILkcTKZMgN5Vk6ZdSUn68BXcGKevfcXMDrCtC0NwdADcCqI0tQGJIUBRsB8ils5pd3SnqpwEF0MPy2VgARRJKTNpuGrkpU/To490YEKzYlfWIXCIWVRgDrNVckdYZvV+fRDjMqkhAmBrYc0VYmUDQIAiQgAsosrCCoYUUIHklEEHoAqEQKQqIiAGHdp12wwRFVUxAaKCVxVVFdHTKBKAdZklMgopJiQojAscvFpMiSHGFHPrOtZGXY9gmJwSCSZWIUyJO9szoFgRZwx6ccjSJ0OIhpMEYUFFUTRqUFEFDQmKSFdYB6qOzPq1WqubCYyzjtYZblVWXsvnXQrDEEcxSFcvUMyosgVb7bxCljuT+Y4TZ8YYY9m080aC2txmHgKBdFGJlh1vltlJHWNkWNXieDDyCnnXtYXP1GWKYEjBA1XOZNyuZuHFvW27u1o1HAJI8tMqpJWZsyFd3NobDe/H1dgUG35zuIxO0ZEDDrEaVSEsYRVHldF5DVFgOwt1bVxuRSR2NNkyYLFfQb8gJgAPBtERWK/gQVE5qiVVQ0DYBtu04FbQztxwQCyojM4BCigAMBpEJJCw9iaitesuqiCIMZhXVhniijOPlMvxEgWk6UxeoCug6wkgdQmEjPHkLYlJyyM4Oljdu5fvTpMnsDbOV2iHVTO4/bkbmTnvzl9aOr1++5lL50aD6e7+cuGnk1RhAe4kyGI+L8viwrlLL904mD526fwj993cX5YhP+vdnf74sGNFaptObBJrFX2QNhkrzKqikkRFkZTWg0QJzMxJAQWSNRYRQAAJ3niwRUFR1SKhISISQBYxCqcTTwFRZdQVI6tLgkbRqpKhdQopvVG9WWsvWdWKKCNrQkJOyQAQpiSRYxLhxOgKb4xtu94Y03aJFANwWeSgooJVURiAigDbsEg8LgqmpCxu7DOS1CRj7G41vX53PwGjwYtnNq9evxNY6sDLk9Vke5rvjIZFqX28dGabUG7dvkMGiLCJ8ebhUkLyuRNjTjrOfHlwuMw5kcLGeDgaFrODuU+6Zd2FzbKw0xg4z0bHi7kp/KwJt+dNwyY3IBLGVTXwxcHx7OhoJcTb3nzpK8/vbo0LqAkWvoItNwlBPvr0Fx542xMzAWzaOy9/4cG3vhmLbPnycXnthU/99b+2Kfl9btAsF946TxhXDVsUYZuCQyTRvp5TbrrBeI7VNNK4NP1qpWJsYb0ILqPD9ZhfMzTcBV9mHNRaCpGlCwQpRbSrtPELP3P4uf9rsvXQmTbr9u/g9g7mhRYG0Noom5fOLmUZ48oUw7qt7QJ8L0e/+B+r8WbYDUU+miu6nALEukts7bmHL9L26Nr+oZDsjsoQQuBw52AmSZuOySZFk+W2jwEYiRM6chkOfSkhntue9MtVFAOWB6VbLLs8H636aMWjGJNRE3UZwmA0RGRN4ACaEMtsulwsV8sGIkKXQp1K9Y+ee0Ti7Obeoo4WOFlrQtKTvk4QG04MYhPXsXHGBkmgNkOW0IHLll2zUZS5o4hcFXacW0bJfS6JCRSUQTQjEuZ1H5MFOEbMHKqSMykyESaRnoMHJCJv3X81KiLgUWZNEuTUBkBxlBAUYMXkAAUY1aJ60oKkj27Onc2tWOhqZjKRUpYHL8LBLJaQyPiyiEmictO3kyrrAx9yywIZuPq4va809Pqd8mTy1MX7bh1flUfe/Ir/pe987Bv7PlvduNE993I1HMZFmwIWkA1R3TMzBBFQAmtAAAiAW1D7LW/Ft75TIpqbrz/79376zP6qBF96lwInjcZEGpbsMo7a9qhCA+WppLLprSk6QMy8So+cmKEtts/8vu89uvrbfOOGufnijX/0t5aNK8dZ1xzVXe1kUJYjAZtiD5qBIjpqkyaq9md7i73l2E/v8/zcL/3i0Qc/uhHEGK+IBg2jkFBvyvPf+j2H/cH+lz9ZNV3m81E5mrcJnbMkBijzhXPGEOShg64xLeTeazhseVCMy6aeWQmifWjng241PrI3X/jcy8+9+NTOozJbNX2dlYNskB2Cq4vp4FyZXnl119i+WdqNwSSzYXnC4FrbtrYdnN2Rea9HIYVkxFqDvsyMlV7tcrrpzl04/OJntyi1AfjNby4funTzix8/czwzseUuGU9ZYTuY8Y2P53TmGLddgfdSvHc0f+jx33+0cdb+7jB46l+9/q/+yeCzn61/5Fs2zcMn1888+a63m/se/qmP/pOLF93JvYaMY2cRIPQCiptlcWZs+24eur7ruChQJBhSSy5FTgBdSIwCyHluE0uI4hjbtikMWYLUrVoBSZm3mcTu6GgPALy3BolZ11C8YZl1vfZRxmQnxpBw3zYGjFESBiTXhTcMaIQop1ex9dr2jfLZKUnv9Ep/6jU+NRkjvWGwwTX2BRVgXXDTKAM76FYxG7h7R8euCdOic3duOfEiEZxlFkXIvOtY46CqHn1wkI+Pr98OL949epYtZC5243Gx+ODfqe8dyP5NkbMgr/D1F9L+ESr3AtYY5SSiCmCc70UcGTBovQWBug659zEJWiIwqsZNtvjSQ64c+dC0h9c3Llxu6tXx0e1qMsk239tffza7u0AQsogAJ4f3XIg5r+jlp2df+IqvGdUiYAydMzYB24H3mV0FqAHzc1uxHA4eeiwVBYYWj+/pycwsZnpwkAHReNSa7fLNH6in94XmIC2O8umZ9vJ9wlv1803mbhVDojoSMFbT3uZtu/AoQ9cf3nmFjw6z0tGFy10HWTxOTW8QVRCiZCbZdiZJM0Qik0KvIgRGegYhDeo8Gc8i8dQunZgIFNfaO1RGMGvKPymnMN0Yv+UbV9MLGNj3qxhW5cZUq9FyXqcnvmlx83CzfdZIjByty0QgsTqynHgtcgaE0/UtkjIQ2XWiSATIGIUkKCCKIGAIkzQrpivn9MF3XHj2Nf7kf5BhyIfl4MGHOduMUay1w2F1sLoNsfnvfvzP/cWf+ueEYIiUddXHKGoUuj4lOG0/2tMaCxpda6+YlCIrEwWQHNASZEZHFi2vnwdikSjUiZKSvMGnONo/XMyDKNucPNYvPXc1Bjw8WPY9U+aZIUW2hgaZMd5Yqz5zigoq3pP31joLqoWn7WlpciFHGU2S6hATpvryhfFTl68UZVVN3/YLP/+P3ntpEj/75QemUzRNtbmxfPzdh6GKed7g5uGXv6B7+4viXPXn/unejt3I6qP/8Der3/7NPAqMKpAI0wy6Fmy3j7t4/ilezgevvqLPPguUwbyBtSi8sGAEQg8kbTl65cLlqYn2lVd3pyN57YY3w1vNRvbu3ze/97kHNm5jsdHKaMldPrgMdcsvfMr0oRhtd/OTdKVM4TA1teEE6JuTVQEAocmHrl7ddd3tflEP/SDe24dINK9yWuAjj1k7kMHMW62/+NH74F66c+f49evDYRk7kcE4f/TCfHlipTVoEkusayWF2Fvg2ey6LbfLjQvMisYak2nqhBPlzpBohBhD0khoQt+Hri1tLmxC16iwcErMAoljQxpDxGdeuHH1xq13v+3RnfF4OBwmAQnRAiGQz5xoqqpR2wdSYkJy2aqttVHokzE0Hg0MsvV5lllVFaG2DT7PFUzmbLd+vmpBoMF0mhXD51+5NYuyBMUhn7+/uFLIjdu1tzlgf34yvnO7HraOmxVZzt3Iezfxdv/68yEbhcrkW+UiHPLR/hC6p595iSU5BwmD9xmVecdtXnqNoVvOx4OqPZ5XaB549OEmNF/46qvgXDHOTeVp6OfN/nTsRyNPDvb2VvuHndrcjUbb/uyt2wdtjxnLtKLQ9s43D21vU8LD/fr6S6/tPbfKB+PTUREgIgmwqGYGEog5dRGCooJBgHWGB5gFgARQAewbyvr14AYAEd84SJAASGk9bkKgtUTtjWKzrsVfa2DYafds/Zuvn0HrrYWqotDXz6Q12lcAgCyuecUqKAKqCgKKoiIq1lpSZEBRdpkvTJWbjT5BAakwkC9npg1ApMwqqsLgDKDqfVd4Oo0pOW81cGx76FJeDcEeAhk1yMbYopAugLOyOUles2EOPZvKAYiyBsr8cJhqyOxQ6nlKCSFBTgqIDlnIMEjfzI9fPlm+eun+b8j9NppCcT3xtmAMqBWT6aCAcRUzQFtYHepJsKtjR1R+9eWyb8zBqzTZs8ba3TP703HubdbUsGphsYIYjSbVdbSr58jWO5CEmFgVIVkSAgXvBRQKKxrQA2RWUEWSsbkKw70707hqpcFyu1+xbQEC27YGVogBmhVZAgdorCXTmxzOXLHblxcvfkX5dslRlFVVNZIxiICsqpojHnzhk9vvfn+3fRYQHWGVVXOTRXAAwE2o7xzJ5oC5DBaTrAM2wJyUlJAAVRSAUFg5MTOn2BNiYgVENA4BQVIEMkjOkBUlWeOuFBRJmEgRgZkjIIOKqCFnid7w6QEAEFrnrAPyKDEyIxsiNGJVxIQQOpeiNQQsSpxAM0eEiAKGbBJOSWLXCglZyslDDpB5VSOoiVlEAMUgKrMFJDCiaMBaJGUFEEQiJRX1ACjRoDi0p2s6QWWmmLDtqO2GXeSjmD3wlvE3P5wPNk+++tXiMx8/49UXRo1DAgLx3oR5j3lGGJMAqmYGh+Oi76Dp47KBtu195qyhEJrh5nadymK8E+uZt7Zw+WJ2ouILP1wd18AhL+joSzezwqv2QHa5DxBjSm019cPyXNpX4cSb2ZGGWJZ43GYSrfVdW5s8sxxk1hjIoawUGQDSaomls27ECSBFkxqwRqwjNJI6yp2iPSV7C6hRJpDM26jaBljd4y66yVa3bFwxUB/RAFiDYDh1aAGMQyGICdSoiEHLPYOzmRnC8ojr2kw2grU4GaGqRi9oDDlyKBCMLTQESiatLFUju7MJfOyboRlUGxvDNrQQYwKYblTL7iRP7d7du9PLl+87ezku5nDrsGqpPzmZTAZVlZYnYbi5mfrVTLqtt76JH7288+CV6x/+CITDb/qx7/vEv1m4o362bM9fuXBA1kSTm/zwsOv7JKknUIuaUhQ0LKgihMCqoAIG3/B7gCKqCigZWvP+DSK8AWlHALDOJdDErEqJkRRYQFRFjawxjoYUMCVVIjqlkULbB051RrA1LCRGNMYgJCBS8JixdCX2J8tupkiYWSQWyYCyzDgiQMyqLM9MRHDenxzUaKAkOAkhLPrBwIPJD+YRbYEUktG7i3lEbWKfORtMzLKcDIOBeRv7ZT0eD0mitWCc+9Jz1w9OmmJYtH3MiowsMTjjPTok0c1RUa90oKbvAykd7M9RdXNU4jJyI+UE7TDrYn92axhRFGKhuFK9fTCbVn5ZL9t4EvoUEA3wSR8Sw9Zo8sil3ThfxMUshyYGvlen2y67+O73XP+VDy2/+mn33m+yPDj69Q+f/PIvvnkyrjsl0tB0WLA1QIyOitWqs4YMgkKQwl4fDS7/D3+3uvCI/LOfSi99Jc9yV0xD36VFXQ0yDgvIgeczk7liREmWfpADCyB451CJoxLZgXHZEWA/W9UJM2KPUo6El3W7GA0GboozhHFK/eGJeW1+8KmvbBdlsTpe/MO/xWHOgpG9UOYyt+mwTpyMmbfwiU99aWsjX83nZZWd353Ol31vGRVYElDGCINBLqyaojRx6MoB2gRpsVhNMghN1IAQ6ezOmf1FParKypezgz603cULk+Ojw5xMbqBbRe4X09JMBu6F24cnC7XG93XwhIB6fXZr1a3qjoeD8tzutDmZD/KhhhhSH1QGw4LbGBIbZzjEZt7kohhEYufQJxtaJZvZRehfaetlsmenowGqJyVm59CQ6yIDIJERVXSkCIlFec1VRZZkjDEW4xu4rv8yKppMcg+cWUqCbWICIAEQiZ0Kihr1jnKTcu+aWegaykxujRBCDrQQ8RbKyrgkfp4WNdURbd0gscu1KnPlZIRHRXE0b8HQmbF/NNdHlrhfH776oZ9911/503sHG9/94//w7udev7B1NKxnX/zQp/rFvITsOOgEYUxAjAEgAhKRCHhAAHgV+ME///+81qNr9NavfCT78peHQBZAQ/R5jhsbZnOrjWK72B0eJE6nAUQydbAsHLAyJIMSsZOMTbuzu/Nn/tIQf+zTP/Gjm4tZXHURdHa0RDXOTE3mo/OJvHFVJ5Q706fWS/fZv/8XH3vfubd+4F1dm3/2Fz8sd492fJn60LdqDYhG76lbtZOnHtz5A9/htnX82+f2/s1vp4NjT60LkOUu9IkyR4AWbb1qqjxL6I3LfV4sQ2vZ1v1I4wnVjTqobx288H/8NJ8pLmyVj5/d2Di7cVCVXTfLy8w4N8qGxzeuh6owW1NBU0305O5+kcjQ+MgOtn7sx8Lbz3eb08HN1w7+9Uf7r706xGT7PhqMMa7K3P/xP1R897fu//TfqP/th4ZnSv8X/9CNBx6Y0o8sf/Ivbbxw1aNABEAc7nV3/9SPZu+6sPV7f2Tw7vc2L/xGOOju/fu//eBP/co9zidv+1/nq775Zz+7HVPrbvNW9dif+MGzeX3/j/6e//Of/I97B8+XaHukvm1jgsmgmuyc7bpjYY5CVBWtEVPAEFyO5qhrFppaNSLiiQhw7N0S0KiNMQ5K54hWdb0xnLpqIxsMVULftYNRUbe1VfLO5w6EsKgy53gEtir9uCrqOh2vOmNtUI59WxTVmd2Nr78PaxDpG5EiPCUQrWXHuo6twhuWZJVTvw2prmMqp5UjUc2tLS3GZn7zuefPvvd9HdqY6vODiY/h4KVnRoCihEQGERX7FJpyVH3gB3e/+ff5BPqFz5x88cOWFxPX9vVKViu9fssAFNbI3SM5WeDyxIMiGRQgAUvOkWjXIfjclphqI2q1p6IwW0W7rLOyEgYObC3NZ4usnK4OF3lfh9vXcDAtsDDA1qR7q+XkwiU3u8WpR1U0WXV4lH31M0W8prdvxBjZEoo6Q3nu277H0RjKTEYD8/D7hvc/wNs7eV4mFrZVICLpMK5M3da/9PN48KpBNpOtRQf9yYr6eXdyMjr/sJ7PDo4O1ZzJ2mdFA5GYvl29/io+9s7cGhv6o6c/ZVeNdIli7G8874UIE5ENq9Za6lSKSZFs7pzVsEypRmeYlRXIOhQGdRB76ZNxisCamIjWPqLTsofxmoIlR84E69Ljb6uzTWPG1cDG40W3CrQ1VCrU9Lz1xNa3ntPjf5g3r8vtOzxvMp8xoUgyxgoyiCKiJVQQlUTGgLUKRMBoDSApo6JfwxmBxQC2vR6by5JdGk5vxuu3jViNVJQjHueBnMTUrSJEy9G/553fNhz+nMyPDXkk28UYJVWghaHAbIAQjQUlVFZdA2wNoFs7uwEUiUUrNCBf7x6rtUiJUEQAFFDfiNEt5ovN6data9e/8tKdZdN4LGOdXGFjx6EXCZJb9A5GlSNHKkKoeWm8pcxLWbgoGqNMKz8qrKKgzTNTWZN85bHVycq8/ttPv+nhbwyb5978jQ/0L38lvngrThb1wb2F+kvf/Og9NOW0ePSRC/3Hfy4XdtTltuGJc9SZbkHHh5BtAwEYBzaHc9vQzc7LKH31E3ryzOjpZ20fQRR8oZxQe0hLMB6cA2L77g8Mv+nHimbZ/9pvEr3k917bv3tt/N/8qcX7vxe7b3j1Yz8zMlv+LT9gabX86m8MX/sNtCXMBFE35Fif+RCsamxrMDkHP6oKiCuAtl0E3tk9mU6Ry8UzT2+PNnwiSN2kPzr65Z8dX9rt8m7wwAPDe03Yv8t9Pgpb/W01ZEZnJnw7jjbGq34l0nHQPnQ+Lz15SKwBm+6gyCsAAM4iWUkdkdUkCQTRACkqiQKIeueQu9T2sW2yrERXdH3X1st+0a96/sivf8lkxWOXHvKuQAWV5Lyvts4sT06cdU3bGK8h9dZjkWerWjQStGq9M84kCXmWWw8+K0PflXnZM+dKwggGQxSTZQahT6kaYdOaAsNGlS9mq3e9+8JtPtlbxA2l3cob1GKyQbj74IOXgCufW1uWfeM7Wrz06pfanKeXr9R1P9nI462baX772Xv7OiKTgSuEtXvsvt3PPr/ni2K6mfWr7v3fdP/rzx0sjzEXubffr1aHg3IEHqy1HeLB4f4jl891x8vZYWOHGbmBAWSGSYats4eFzSUrrOmDrpout42xKj1mkY7vRm8J5svTUREiIYoiGMOOwCA7sEYV15Ko9TxH14cCq4gIkkEgeuM9QqC1znz993TtQJP1HGrdVFv/GUQEFVhLQgQY3uASrQlDp3W3NUFbkNSAKDOv8yKiiiCK+l/o9gAq+l/qbGsStoIIW3LeZ9aWXWf37x2/7QE/5JyuvlY9/zmanwAaNYACjhwQQlXo5YdSlhmw0AdeLFTAZt5UubCQ8URolKDpZbYgvr/voxgX2l4iAwKKxHpR+iosFxaIrZMc+0WAmDI0mOWUouUEIg4pM+PR4EErpQECFRVWToBWraIzCU0sKx0NgyM3KHqVKr9Yf/VqRSmrLNcLAwUct4kbjTR98ElhyE7mNDugXiQCrlktaoVKzEqRFiUJANlMOYFGsUasVwPG28SJ3EBEwQCwICQM4ttk60UBjertlKwcHJAk4AR9vy5JKaFyZFaT5YLenL0Szj1uaIvaNh68lJjzYsj9PHUdiEjUACKIdHB89MF/P/qW7/ZZpqp07aUs9SLKggCpYAlHSWKEcmyLTFPvEVghhWisUxRPqCzKzCGkkCSqGAJkUSERAERCS06ASIAYEooxa0VHsEpkABRVVMG8gbpSiYAiMZ6WDoxzShYUnUVrUDQmTQlZVEQxc9TFFpAzoU5NL2wtWI3o1KFyElBSJWFFQ8oqKQGROlEyvJ6kAkmKalSByVhLBskYMICARhVEE6sighpDqHRaCY1p3fpPfe9jqILyEd//pm//pj/z/xo8dC6xyJ3rX/w974k9FJtD3dpxdReO7lICk1A9RoQYGZJm1rAoOkuJ1cBws1j08dy5C/38MHQtR55ubnXtPjTzqDZ3Rd8lT3niqHUfei3GA2IgNp5MqqOws2BAOC6YnHUjj85y14App9s74e4tQzYwF8Nx6sVBZpyL9cy1xthMTG5cBQSGU7I2mLEblITCIsZ5iQzA6FWEEWGN2IcYDakSQtOSHyKoyy0QG1UOiRTVEhoPxAxqyBOC9P0ps4ocgCWHanMcnsGiSrExyGgdQo4CICJrxbP3oKKqdroT6gaH593mqHA+LU+MeCOha1cZjspKhsM4u/Pi8K3vvXvn3hk/jiHBqh3BdO/G0Sh3y0aKUVZ5PTw8vv8tjxxNxt140i27B3fLyNh2J4OHH3zzmcfV2tcbMePzbqnSRX99/+q1V9v6a44Dp4CclCAIO2PWUDZnHBIqqkFSDZYcQiQAZbYI5jS5aRR0rVMEIqPsDbEqWEqJFBiSoARkUCQ1QERF5rqUgE8/yXtV7kLuyaFGVGNJGFVYmQGg9FkyqrFYLRtvbep67qN1Ji+NIkWGnOxm4UOMxpnphSmHmOf5nYPZblZw3ynRsMEGKA2oRxGQwaDwI3fjeHH7aBHrCMDOGAO8MawmAy8sw9Go7tpVx0gEAoNhORhnQ5vzoj93bjKY5HfuLe/sHStqOR0rw6pt2zb0osdNenQ4TCZP1meFl9SDrnvA4dy0GCOZelX5/CCGlMRnpizy1PdN6LbObH3xmWdHE/eWK/d7Xl574eThKxf81k42Esqmy/k+35P//Md/5MxwnB8ttzQ/WbSmKnqVwTBHSN18mbsyLmVgiYCctws1+/ede+If/939zfugNwNziMs5pq4+WmrsFPTuok2lb/Lcb90Xejg/JU3tsu3JYq+ymZpCberV2cKJQtIY5+RtSKr1zLmxQvDVKAIFkN3R6PCf/3NjsL26tx1jTG0fOPd5wISFYlaFiCVBnqd+laIbVcV9X/78q10fpePEdmMKRe5i7M9Ms3E5vHZ3OQ+dw0IQkFyR5ySqwTgYhj55q2OHvWQHTT8a+F4iISRmP8re+dD9X3vty5NytDebb05HgF5AtnfyiPPIvbdZz6wUBxMakD9ezPJpHltppL1Xx9Lp3XkdQV2mG6jAIcQ4clalD0TJkDAO8gJisASYUt0EX/rIvNem23PObs8oxdxRZdeKUESAytLOcLozGhuTVFUSoogFIGtYIEly5EKM6Y0Vsn1jh2bAu7rphGwN0pD6zCBoTyk34JxFFO50vvBdTW2nWYl50r6PPngPUFY6zKltebxdHh93gDkna5GChvvPlKHW1Yqb0DcnTSjzi8a74+SW/aNK7rd+vUjXpzeP07VbXvBez8eZfcd3fb+8+ckv/uaHB0k1pMDQgyoIIHUqBjSBIBh0k7S4cHFX3Y3rtz/4f58VnJY+tb0dlWF3a7FzBZN1R/s2BOXEoCElSFgW1YKBR0O7/TjsXy/7Q6exGvjXu/mdFLzfwQtvr7/0nwgQvCVHKqoWE1pGUbKaiHzRg6jhwhCdnNz8pZuv/LvfGZXTcZEhCriCnFpjuW0RIDcm99lyXs8N/epzL37gkQ/w9sLOPm0pVEaRePPS7slybjCSTSNvU71CtdzHjtUhG2fK0a7MGrW+GJBS116/em64tXnxvp/7lZ9/y4NfMzdunDk7RunjqseQBjG6hhQ0hCZ5C5lfxrh9ficEf6+8ZB/81v2Qzr/t7dml74CPfPjk1391Y++1DBkMe9vtP/3M5APfM/3hH+7+4y+OCYuzj9QnmfQt3bi7EQEEoBUoLAGeXyT4xPP9f/7JNMxN3V0sCL70yVu/99zs3JXx4zuXX7rRFlCqmjxML7v+8x+BczsXLz/6V/7Y3/+be3/rxoufbl1wkKrSXDxf9Lys65ZTMC5ngRiiM9ZZijE1HHtDgDDNBn23IEBv3JrWuOy74dB4xGFeifWD4diDr+cnkrrNSTku3WzepshllgXgsOyKvDJkOepJn+q2a7pQZGi92RxseKDV4eqNyz3Q+lp/etMGWCMrTpERqoprRNHa/7JGi64j/utdMRGwABJagBzEoN7b3795+84Z2iaU+cHV5tpX093bEtQ4r8oKQGh6FLh4afNtH4hmoGh2n3pvOLh38wv/cTTAIiepI/Y9AiFSatowO3CCYGAtODeZEQ2BovegwhkFi4Kg5ExtS9g4y+3LMYaCshRYMii8hb4rR8a6CgfbBgYxNjno4uBgeuaC2dhdtKHEnGOyDrLlsX7ho1ApMiZAtaYPnSmK1mzK297kH3/SaRNf/WL+4Fvqcw92KAp95Qkjxh4yn2sxqi35t3+DfGZPDk+ASI2P5Kbn7yu3z0bFBmT04FPt7U6f/y1iAwbQm2x1y/RHq3qVccB+Hii5jUm3bEI2oZ2Li9mtSS4+9cCcAal0AMPYLbz0xpGIACAoA7Fqbz3HtidJJAoASGbdFhTQN35DiAZUU9/1W1vZB36gsRvQBmsarIaD7QtcVGhhvOXi3kK/8rnyZL/ev1ltDqOvbuMCAAEAAElEQVS0EgOoI2OFBIBAgSODJWMJ7Zr0HJ0Hx5wwIRhC6ZOoiCihqAEtFPJ7z4keZfdeNAgQE/ar9uCqeegpEbXOOMejsjg6mkMn3/ie933sVz9IQircCYqKtThxBCidQM9rWJGisCEyxtBp+nodc1K7/maLJAqiRKgsSITek1GNKekp+wLqRb08kquv3p4lbdmd29yobDxul2uFobVEBhWp62RkHVlTDFw+pCrj0ZAyQnQusRCjJUxIyMopuLJw2cjmAblNxzicPDy87+1Hd+8MHn67Xe6tnv4q97A9Hi+/8PGdB3b2P/+Li9t3NheRPO00L9376T++886du6+9cv7aoc9GEBG8VWFkgtkSHAz3b8A/+F8gS4A54ACqLIIz0yk2J8B0cnx3+tQTgKv5R37m8pNP1M98ehyXzZ1Xy53RyMiyO9qw7cErnygPvjg2Oc+uhfnd7XiCq2MQBwOCcQN9nWp2YsABCBpbQD6AHIHoFbmCP/yX3YOPx3t1lf0DTHV39w5fv2Z6HmSlP5HR/RvXv9JsvvN7+uEsHO4vn/3c+XNboHJy9RZrHofS50vIM+W121cRyZXD0ejMyfzo+rXnd3YvWJv1KabYjsYXTD4SDKvVPHNEZNu6tmxBAIhTjILS9CsV7lfLSVlsn7vwoQ9+9szO5KEn78tBuxARXRu0LLPQqzOu8EU1Hlpn+7Y1uSnKwtjYd623VWAwzm2Px33oQFUEk9DJYkbOZzZH77uY+hjRkCiryxhMWCWT+Ew52Lt7OIi9KrVAR8FQk21Ug6NFcWb7TSD5QX1y6f7zZjBc7B8ezWaPPvXYnVdu510ysT985dZ4ZK+F/tacty9sYiY7Z8uDO3efffl6Vtjgmr2DxfnNjdldWR7b+XFqsT375JMn8xmE1aWtyf7iSDVMrDm53h0d16ONYjmPZ85teY6L+Xx5NO8ZBy7nxixTIKuY+ai0f1R7U1x8/ImbdTozdft3rp1OUo1Zf0CopWRIHFmHCkzGkEEFVSQDArw2H+gaWQ2icjohAlQgJFYGRQRBBFFBIESj6/PjjYgqrKc7onBq0gRgVfovUkKgdaUE3zimkIA4JVFhiUSAhIDrUtwbBxUgKHJKa3gYqOZlcf7840+99T1lPjm8szeuKjPrt8JcP/0RuPGCxSokRr8WBjGKhu1Lcva8MCNHTC3WNaJVaxnUEihHtA7QqHdmVKl1VGSUO6WQmt4RZtXI+CJG0RSUtdeQoJczpY2Q7hxQG4xRZGFmQhhOdr3uWl9JSmhULSghGABCNgrTYRxUSgrKBhWqUbj4hOwdpatP27IzgwL6HoJaQbO3n770O9ozHhwYKtGOmAESq881G2B1NmqEPAM3wq7R1EESDQmzAlzOCAkSuIzBECZAIQINHfRsWJEjxCU2K2p7EmBIHILhBBKUAFRQg0RRjlyNdedMu33ObT1iljfj8VVfSuSoztjAqqhICIZRhobg+qvlZz4ShFexX969R03NifsEtjClN6GPW9nZ97/vh25p+9nrX9g/eC2lSMYmZkSJgsIiIizCvHa6MSAD6vpAEtFk1BhMa8kcSQzALqHLVIwhIwnWFjtkEBQVlnVJ9Y1UkQKSNRaNiaoKTkyJFEkisxoAa01ube6NCEWJIECqSR2gRdI+JlFyZNHmxuboDTkBbGMSKwkxCSuqQ3RqhJwVdGpJHAD0IEKgwghK1oCIAgqApmhEnbDGpG2QRWObQHO+fPnN3/OTfzVtnQEnzklf5QnAlqUbjZss117yamhTQNQux6a05Cdjsxtfvxq7rlGsex4ATDfL8bmtk4N7lYEg2nM62b9TSQzN3A820BrrsK9PqkGxaEBVuG1NsgIMRggRHEVAyPKuaaAL3sW0mInNfR+1XiRGAmP9OIkBREyRDbncaR90mOtgpIBwvI/bYy02pBiEvjWEJsvBWk0RrFfrQJNaC2RSE1GRu0iEPXd2czdyL5BcNgCIalBQQBIQIrl1qRAFEREdsQYiy3HFCYEUCZKJZMRhwauVikdrVUR9ZiSKIjM6X2huMSsEUUOHWzvel2iczUZucpYFxCgpCC/Faza9+OKHPjbcGUzOGDJ2Mhwf37s7O1rSZtHFe6B4tH/Evtgq78tpHqckPFqQv/jtP8ybF5d3bw327nKiXLnHQYSVyUbOD9JqjklAQSURraP/BGTFGEGzflAskaIhAQBBEFAGSYKAaEDJkEFjFAgUjKIoE6AqsRprDaeEhMbnZP1a02eAAE8jFazgcgdW29iTIKRkyLyxDCDrEK2MjLlSlBxTVIWsNITqaLbkWSeHzbHdS+T8cDTwBa3qPqZ5SGk4yLYmW5mhMg9dH+Zdc7ioU25NbgHsJCuYdbw9wCRitJwMOKbM2Lpv62VzMl8E4fGoKqxdhLQ6DvPYpWV7d/84z22WOVLsunizPsKY6pSUjLhs0fZvObdbGGmargspsxYdimA+GAQJTds+8ciFo8PVSeQcnUaeZJmQzQkLIi5NG8PuxfPDvNxbxZ03PbaTsD64urDl6KHL1794VeuwWB6rgjeYDbJ5X493t5tGukUaVFYpbRR+dnxSlMO7N2fnv/1b6PF3derGBDg7WO0fWe8YUFPai8F8w/vz7/rB9uy2OXfeZmVx68A/8/lb885+17dBFY5evi7/+985d++et4rSaw+pazkvY+mjB+EgTSBHqZriYFzfuguv3R00R9rNswixWYq0qQ9UFi2KGpucYVBpmz52sRqde/zxn/lPv7lommKQFyU2He+dtFlmm2QCwWLRQ+Yqb4ZFzj03s8YQ9sx1wzvTwdaZ0aq7d//50Wt3Fixmv6kDgTEQxbQhXj/ae9ObH7h97XhUFKxIziGm6OhwWYu3m9PB/v6yqNyTD28+97Wb2WSw7ENvJEHKHG+WpYKMHG5uGQsSE/QMAiEpHB6FtrGglXAqcuOJMu+Dw3nTtazGmASuDrEN7E3FTWxTil1nSRFQbs2+5cGHHt/KkAOolj5rQxtTiIlBYR4TIX09VvQGq8ibaLAjZtBkNapEy96QIeMzssyC+fGMYu9MVGfVoYRulWUVBC3Q9iEgO3DjhmTrkr9zO1JAS5Y8CcaUTGHyLrWPvP8t73zfN939x/8hmx2WAfMAeeTbv/oFC5ABBYgItti+NPq+H2jP0OVH6Pb/9u+cZi0rApQuCyl5NA7ZoxLKQ0V45af+QvHYA/zxz99/sEK1LXK2Oejuv9K94z3zxupXn7d7t/NVZ5yAqgefxNxdtNnbv/Gt/+P/gpuXF//yn7e//H9ie7wMoSi625///Lkrv2dM05OYcpMrERgfBHIHgJajghhXZpnNuO1EjGHN0DEOMz/I0WoCNLaLIlGLDNXbYeWl66piqG06+djn3vPWK3r1sOy1lwDK3vm2D5KB27mgzlx98frZ3BoSUMOtNNBYG4eYpFHrQdX3idHgaOy5jp9/Vb/9r/3M0bUX+c6Hm3jsUiynlahFKTzki/1jki5aRWdEeO/unTI3X/2bf23n4PCxH/+RuQ399ujsH/th/IZvu/NX/jTcvTkpMmhD9TsfPZmOLv+FH5fv/4HyC7+595d+7Mnv/L7mldeqdgVtUlUcZsJKjMAA0XsgWXhHVgPj0u4c9sOXvwIf45wA8uJosdrYLeyWLH/plxc6/nK4+W0//W/+6Pt/9H9+4VNtXhRFNRg5Klw8qjkKoS9s3nfLrmsL8mxzRc1KBzESg6YOVZMm66oz2Xg+W4EBVdOnWBibAo83dobV9Oq1108OT2yum5tTAlfXnXMupEJUFq30cTYZTw8PT0iJnJ3FPvRcepoMB/v7i9MLOa3hpLi+ta+HRmt4Kawr/6dgCiQCxFN6xVpk/MbVChBBUVPi6XDnuMWF4Oxk/7EHNyXNTr78irz01fNRAEwvyRq21qugDqYbb/lmLDelT9aoAxy/5TumT7zn8D/+A23uub7OLakKaCAWp6gikhiMyYztKeC5M7o57WdLc7yQ1XI0sIp+iZV917fyuUf4U/8J7ryiTH6jMhnEemH3XsJxQaHWxY078eD8E2/u7p10s0XT8vbB0XCQh6NjSyYoAXNmUToOptQL9+HkXGZNc3Jsr7yZnnhXO9pEaWef+fDgmc9kk0ttlhkztNaapAmSdcTOqBH/5Fvl5U/Zk6ab3anObtLuWZcJtEsh9WHAHeP8moWOmBEZJJrZa3D0NTe4P+xd0+ZA2EBKduj5yafg4UfGu+P6tz46SLVl7o9riKu0vQWDbZndltghglHDfWdITW6Fck5FqheojUWVtbVOeJ0WQyJlQWvW/zMcbFt/1mNFOaFpgHvJc7AknLTnqjm5/rkPbpycjB9/b7h7G6EHwyDAvE6ZCSGiXZv3iMHqYDtMJzCubEhxOePlcdkvHCEH/vq2yhkYr+bdbG4VYoKQhLulufm84+/svAcD1kPhsq7zHJfveOTJ3/jwL7D6xJIAmLFGwCDTzBgGBekFAQ0RkIIhIFBVyI0REQGw5jTBYIkCcxRMLBbAryXKRG+QTGHvzlybZW6LjRI7wGyQ3b43a9tIliaFT5GburfGIUJR0Hhods7vDjYnGltNfVkYXWPaLTHLeJixhoxwVDoOi2zqz1x87JE0vfpvf13+5a9MN7Pgi2LZj4b5YPdSs7fQz/+2fq2/FGepRpaMggDIbr+Knzi8FEOmBUQFZ8EZNEY54mgEwjBbABcw76DykFYgAQY2uJ323PmTMw/wRh5Km//GxwZL2/3Un7bbG+4d3+wuf2P3yQ/nGxvuzrPxH/yRc8sTyw0Iuea1nBNkBGQBqkUdqq1tc/7B29VDA9tkL/zWUJYQTX08r0a2oeziB/6wOfNos1q6471x/Uo46gCn8cz91EaDWWoOumvL6q3funrivdnEGdHhr3UHL391mJXDK/fduRdpqvZippNMVaRr+2ZhXBE4WmMGow1plYyyRkOAaksqVvXCZOKFsQ/M0rcrBkfoiFQkxL4VEWQ7HUz39vf+2c/8e+Ds4ft2l3dv2/Fw5+z5ajJkgcV+PR67zJOG4L1HAGutQVvPalIeTR2il2TnizaEVYxiAtXLY7A2z7PcFWicIuXegfeIUB8tsvWcw7jtM+fCQyfP3rg1NMPf+9Y3/8Lnv4TDCeV25Cs/PTsebmLbbE43AOP+4TW1MNq4aMuzROlkb9H3TZmXi4P69s2WNT+6Vxej/LnVcVVkWzv+vnOujbFfsEjx+qurbkmD4figbh7YKZ56+O1f+eQne65dHh54aHRybZbhsLE6m8feOsxlWGhrxIAfZNU0O3fr+rXRZFQUG6t6tbk5PE77924v9lZfG1y5PLj8cDbs4He+BgDWWYDTpKgxZm0wMIaQ6FRxtgbToVEOBCCiaxEakEmqSIQGNSWLQGvSHa6jPwwqb0BVSdEkEUIEAQJgVgPrgJCsX0sRAUvrYQ+priE1qKLrHwFUEmYAAFRRRSIAQD1VLaCu5WiQVC6ee/Ld3/gHK2IN7YUzZ3Ibw1e/fPNLX7q0d+h7EIxgSYnIGZAYoE+X7udqSxYLDK2EVhcN2IEOh0lMZgi9VQbQgIFpuUo3r2UXBgKIuSFvIYkKKIsaoskQwSjYfrUQRjE88mqXQTmBrNu6XjFlhEAIZMU6zTJgB8AEoJIUmCxq4pKsHM85xabH8tG34WqvfvWr1XgIZdl2IcsKikqv3U59zLOcQcG3GARZrM2UAesFaUdlIaMd7oEkoiIaK6c1FUGXATOyYkoIIgCC7KxK1+tszvOlJTbEAKqk66/ukqKgQUSTolHAULuwQmVTYBPqjQfvw2dLOWnI+XZ0hgRcsyKHwpBYgMDVTXz+BS4IrdcuMit5b0gANXDK8nxrc2dw6cHNamA3ph/+rX/dhVlS0XVt2oCCijBoAkwJRBkADSADiAFVFDVIxgiiSAJmQCMxMCKgFQBjPKBR8GoMiBHEKEEjK79BaUGVFJnEGMdJrDEZAZEaYwGMIhNaS8ah+sQ9J0BAt6ZYoRibrZ9FobF1A/JWiZASoIDKuv8GYISMAVBneA3mASRCQx1rFCFYr1aUVUUJE4ugpsRNCMuW2iiLxmj5/j/xY/bS2ZTUGNsFOXzp9QKd6YVS0H4u5Ac7W83xbG8RBz/wA/f96I9qgPLo+IU/98eoDaUz/QILa22w3d5i4MzJ3dWFNz8Y6haQYrv0GxtEOXeSOBqDqRchcFUZ+sCoYGje9s7naLQWaVexEhNb8NMcwKGfuGTN7DCzGaLHPnI991keVqHMzoCChki2ZBXqF9B0sBqQC5qzQcWoSCAKZBwiCEdQIWeZ1ZCiJRSDVeHJy3hMTqWvoY8gSlmOhBIjkQIGNBZNDhZAEDDAGhUPEYSIMDadIcX1WAoT2xw9auzI5RDRIKBTRIgh4LRykzwcxWycp4OaevDhHnjmRR1RitEm3r1x9zd/6+H3fLevNvZPTuJ8ZYHrZln33ER1nYx2qDfYpZNxvtmt7oDJsskwnrR9J7sXH1kVu/ftXtj7z/+2u3WXF66OFYTG5bjqaux7iBEQiYhECYwSGrQJHKI1lljUYFovcIEAICmapKxEKuoNhRSMWkEDoEiKzMqJFBANrmXaxphiSMYbMkhEnDidJrDbVVNMspO6HfkiR+cJ+hC8s2JBRSNLiEKoeWYSR4w6qKq2ixxl4nyyerWZkbMhcX/vsBdd9J2IgKgAjMr84nC8WxXnt4oNzAcLvwJd1H1gNgIMagZutFGElOomVJuTa9duTyblxri8cXKYl3ZQZLPjRWQ9qaO13jAHpi5FamWr8tOqXDaNio7zfMnJEW0V+U7ucpGoAjHlVRmZV01rTO4x49Ay0+Zw+8vXnuutsYhytDAiaKFfNoX1n/zUcw+eufyBb/rW51b9f/74b75lmD3+8MCXGx//8gvcyu7EOsHQ8By0RXEG2xt3Nybb53/3d71++4Vl0xy1YfPd92WPvfnBWBzeern+6M/t/ea/3X3wYdv2vol1YfPNUefLK9/3PfSB7wjDjSG3jGS7Hvaeaz/+rzTQhT/6g6tBMX7XTndlR/buuMytWi28N2AVQQIDkjNRFkcIQO0K00r6AIG0WVJXJ9R+tUSjHt3dg2X5/ifKJx8N6Pd+43PZfB9KvxxOV+cefPZ3vuDHpm56NbbtEp40u2cn463x4cnCe6vKCng4m23mE0DTtn02GaaUWo6LDjNHx7MTVA4BRHn73PggMnUZJpgvzN1923SYqQkR6r6+cvEM2lUCbE8ChtY7dJReeOm1vstOuj4h595sjSejIt+7d2tcjqLqfs3CIioJdVxlhdcd1u2iXC7tqtaDui+dx9SsYu98AQ58bjWpiliTRxZrLRAaEiJJYjjZY2axTmNHYIOIKLDEzPhlF6xFRQz/f1jruusKK0joMlvPWyQbkzokSwQRmlVoYxt7BOiNSfnaqmntoMohQsHusOtgRYPJaH5ymBOVBWkveeguXykeeqp67rfunRzJvtG//uf/ygMbl377Vz5jb5/kSZTFExEbB5ABeaBIlL3l8dccHM9Xo0sP5Co2KhKBmD4qgbFkTk096H0PV56/1n/5uTyCd0Mlq2XRuGzjTe9+cdZkhzNz/UUXe4cqBsvMKpvlShoq3/Gn/9vw0EMnNVebk8IP0qrOCjwDfP2f/rXX5W/4+WxcFdHmqjaKAKKigvEWvDJlzlqFpIYZyTpANZV1xjCkyOLQYJXH3C6V2eAm6WaZjSfjMdjjj/wK/0YMnJmTNMhLQ5ySTLcGe23jvut7mq2HJu9plx/54DBdc4qiomIBQZNwWEI2xWozSgJgQG5OGnPrhcWv/+ftS4/xQ2/ne880h7duzw+D1S0/Cn3UypAO0ECXxD3ywGp17Or6iQ1/41/+3Ffv3Hrf3/nr99LxgcRYEL3n3c2v3fNtm6O/aLruV/9N/7n/ND5cQeIzx0ft3/0/YNnbURV7JJ+pJGuNcIIcAgbMS+CQEYMYSNEL+CwHDZAUgDbKYtG0G9/y7fUXnt28fvKtvOE/+m/tp154uJh8YaSpcp2Hw6Pj1CafOWIDSCLiwHq03rg+RQPoGIgARTLjhSBqoLR0VorMOtIUU5Y7yofjYtQ3q2s3bq+4pai2GlSjUbNY1R0uknpjLOkozxzwIPMAqIb6JpWjssxsDFzk5uuDHjiFQMCa/rNGQyCgvMGLWDMkaD1dgLW8GHCNEl2n/9cxI4OLkHqFysLJ0e3mJlSp8a/fydpo1QZl640hRQEVhHw6uPhY4gCRyfl+tt/GsHXp3GB3lw9vegAUICDtYkIVYxSsdWsecaTxrnnrd8XdizRfdE9/Jh/enp0cuuFm3LhvsXmpOHPZXnysPXjNGiZCNcakmJ75hOuOohlWw3PNuFhmZ5ZuVjz66MbJ4vBj/86sVtZYRRMloTUEGJW6B7+BPvC9WuX98h5//qPlzkY3GJ009eDs9sVv/AMv/9ovX3ny3dMr7zhetIGXkNTkhSL3yxYMBJFi4zxVd3ON0q00SSdSH82qcWV6gmtPu5c/5VIvSZAMAXqJzbNflvt2PG3R9iXu9vq7d2l3XLz53VdXs8HOpbBx7vLqTup69R412r7usUCNKiyJ0RVkvaHUquDlR0cPv2t16/n5J359wgEBAIElCZBBi8aiqKaIDjEDWB6Hj/+8nHmsfNs3hJaaJuYWs8yxUGiC3agu/sSfnl89eOXeaqKunN2xBt8gQROnAA6NIUlBBJPNu60r/Jb3HZGNq/7i7kTuvth8/N+70CmDAKAhFQRmCuDQhZQQrFHwKfL+nfD6jXThIcqJhAzZyWDQLO+9+ZGzhAAkSKiqAtAnJdRcyBKUFoih19PMVGS2REQYAIgIFHjtPBBV0qQQ5bT8kkTklINyijK9uLs1dhVQun14ogKCUJzfOF41y7bXJBuFvbSZGUTue63n4+nk7HA4qM6kqGqMpPmynY3LfK05rsoCNDjikpu+C1vbj1bl25oj8/iD+PqnP7aZh5NrS7d537JxkFHhV1aXvmG1GWU5p2QoEBlAcJ2B6AAYiMEaEAYgNF5k/Y06gE8ADKkGIoDOSU23vxrGw0tPvv/ana8NLj72/JE89B1/7sbVjz/+9ivXXqzPPrUdH3uPINhg84OXgAaQMmgCRAFrQIxQee/id+z8P77d1K++/rHfnvzw/yeF5lqXPXH914iS9QqrtsSm/I1/EH7tp2zB+1dvbY6r1c26ml7UnjE6UwwgdWUz677wETm/vXjqnSchPfid338Qr5pZOHvx4paFk+PFElNP80ExwnX1hhMSJU3W21yHa9+doBjCw4PXio0xBALhxFFF+64DCykG7iNJB11fFMXRiX7oNz/y6r2jMp+cnYwHNptuDgdbE7DZ6ri1Fjcng9FGZS2ASF4Wq75GSKhaFXnsAwcGJRWZjAbWQUyS+qQJohKRS6yxbwGQrHFOoOv58M7BnZvHxwfoyqWzx0eHgzPVZ194ub96TabbfjA+d3bLBYJhLr535Jx3JDBx4wCQDTbvvHa3bU8Q5eT4ZLMaHd66XZEV6M7t7txc7NcZNayDmBvevPvSHZuoDyuNpm7apum3pqP+zo3lcSwMGgOFLdum6ruujrWzMMwySVFDmC8XDO1925vNrE7m9k5hIalr52WMt6+d5M6e3R2IyNG9qx/62S+dmX7dgIa0Hrus85xIBtfoC1rPe1QUWAUVBdfgejCooIgiSkJrEjUisyrD2ta+HtusBYxKCZhQ111/UJA1lRpYVACUjEE1QKAoAKfqtDfK0arKwqIia/42CsragGaR1mkQOD2c1oMtDmlntD3J8ow4Ec7m+ySN6+tx3WZRFY06k1K0SsAKCDKZwO6ZEDpq6nh4j8hTEJqMIrBmyA5RhdDAqtOW9XipqrI5SlMy58Zua4NXsTm5pzHpcAt83vcJiaCobN+YJnDHGAEwsxmgERVBNJwCIaCxCgjMRBaYlRldhkll3phE6NRJQGkRHZVVOndFjm7EFLRv7dkzlA2kbxEao5LQi5ICKyG5QsGqEqTG5Axq9OjISUDSqEkRRBjXwiQRVEVVJBJFQCfcpBSkay0zGBRmjZ1yxNygd+QKgCRRgBmY0FmQkCVp7h6aMzVZhO2q7kLBLpa5/+bfmx1ej5/7dT6ZaxSXEZFhJS4Lc3HDbuyMAh588WnfNg4VgVICI1Iip3BMA785nT504fKzr345JIbMc2IAXTccRTUxKwCiWedqkzAbstYBEiqSgqr0KTAAWZXAzmaoCcEYa1QR0KxPxpiiRLFvxNl07ckBZgQiMkRJolFUBOuMAIKCIbCo1nJpnarGlCxgBNUMScGpZmiHaB24DA1AQjJKGkEETsev6+GpATJqAdbxIVk/zwogMYkmICB0JApJQh9TH0MXYda4WTzz4AO7D76rSzFT9jHhnZuf+6s/+VS0LnOs1ue+Q5y16bjaGv3R77v4J35s4WzYn5sJakWimpZtZjUbGJZAKOPxKLODtDoakIkRmKy4YlgOtTtCjdVwyII2ZkSaFYTGRGZy1manLKw+shVwRcmWXFZKu0Su++WRKcf5cLM52geryZPzGRslIZpuauEKI1DP4MwuFBO1gBLIA3oHiCBBjVEwmhIqawuISmt0q3OAioOMsQdA9B7AKVJiwZhwXZUFwdSRwzU/Da1FpBSTAFrvlZOrKgW0CST02nc08MyswmoydIWkntZhomrEQyfSWB3DwaGtcljuA3fgczce9JDKyeViUed3D8MXnw61d9uwco2PsDq6tX9vbzzOGXS1rKHAydaolX48GrSLdvvyfaKLbHPTV0MrtFyqhgZOrsOSXn/xuMU497rqVnkUZFVUh8QsoAlJM2cIKQElsNaSslFEtHkvPWFCjaCQGCyRCBOCChOhqIowABgyChDXlbzMq3c2Kwmddy4xW4lGTkdFVeZRUkJ8fm/vTJZvDypvDHBUADIeUU2yiCogmOdoYyMcQFNiRQmpTxwmpd0auhdvnXSAQnFjXJwct11QDXiyd88DbYyyndJvT0sSa8hE5vmqc87ePZgfz+qyzNsuzpYr7yl3NDtZpZVKxtlYVVLuHGIitdNRdbxsCGF7o9goy+uv71lD4zLf2dy4O591MV6aDipHy0XNbKz1VWJLPjdFCJKTKdhSG7PST5y7JzwYFaWxJ/vHVenvv7zbz+qT4+bTz73wlm/4ru9+37t+4+rz87uv7L326tmL42pn616818UYEZ1jtHlrEqS4VejB4u5D7/v27MIfoHH1wtde/IY33Xfz6v7wlVsFNvNuNmGcf+IzVVFd+r7fN/vdT80IX/6VDz5046XdL2ZNu6js3vFzV7M9lZdfy0K9jXT1v/+DD3zbd137zMeLL7+omB27wcoGv2ptvyqH3gF0HQBkhE5Rta7bukZVVEx9NAaCiBoU1dR1cvbs+Pt/f9w9XxjYXNXtlz55/e7J+NH3ffC3Pvvy8y9MR6ZZtsEQUTZb9eH2oRlkZCB17B0OvE82F43DSd4sgDmiwOykYe4vXMxD6Maj4uhkYQhVu8y5QZmpMqFbtc16lE8SNzYKtOGons/VZHmGmvLMZhaLfGN2uOCOxuNydzJ8be/1Pq9KP2j62DQBM2tt1nTJOtsHI009sJ6SxJiWXWqS1tznRZ7QKIMDM18EAo2xjzFKEO+NI1cZDxSD93VEMvakaQcGLWobehVRNYYUVEDNWqj8X42KBCCs110JDdiM0SENvDfWdm2LkMeYGmGwSRy32o/LDEuqtSkzB030HVtni6mDje3VyaIstJ/HcU4Z60Nn85ckLG32jm9+/xbj4quf93f3NxNtZYNWegW1vuhSL5ogdPc2Ruf/5O+9M852s3JyN9YRHCgJKRABCCTvAMWGIIjkvCtjgpQbRGNcJxrrlISWL790llFuvD7CIJ4iGCa01gZJkGXn7n909+1PveZ6N5Wdtz928gs+HNqQTGjJoiVEm096FkRCdQUZcUQcQoeOjBKpI3RgTIFaKGAKEZxXA9p11qHPTFu48t0PPfBN7zIun33ok/Ll55aB46LLfN7VDRsl60QBjItdezyfX/nOb4Cn3uvPPOAlu3Pj1eMbL5Ah4WBt5pxNKToxGihlvm2xGmwZXlp1F4oYPvGZo8XvIGuxkeu5s+Mf+p52u/raz/7ywzcOJpZIqJ4twgPbg7/w+53Zef2v//R0drhJ/Novffjqzrn7f+D7j8ZRz1WT7/qOk89/nPdiCqpKyLa4XYtBYqe1FjTOhyEJ48CRMRJY+44m5f72ePNb3tWAbT7/pc2XXrRJQBAyCyQQFIwDIIphHOnFX/1P2Xu/sbjz9HhnAnDXvviZ733zzjvfNv73n79+d391/zjXAherOqMMOzXiMuOGrkLBkLouJGeQLGZk0ZpV7AICxn4yGR7ODgszcNY1XX/pvmlVlYvD623ftcDzo/aYaTKozu9sHdfdSSMRYWBxezw4XnWoUBYZ98kAFc6Nyuz4ZFkv26+HgtbeYgHUNZ9Ivq4/hvWSmQgNgCFcS5IN4imLVBVxTZsEQFURn2fYyxkTtgMffP4lyWzVdrm1QrJeu61HFUyMG1vRDynF3Duv0i0O2qYt3Zk2LqCJFWCfFAgyItRICuQKAlUIDIw7F8Jol5wph9PRo+8MBztyPsruObd7kbfOzwS2nnxLeOV3pIlxIYZVlezeTJtny0sPl1l59OqLlUa4+rV0DcziZLhqDXqSiNYAJ0NIpNGgeeTtYbxrB4NBMSi2P1fPj105cNhGDAtbRA2vfvQXL//QJeuGbb8gxuHmGDhZa22ZNzdvFYsoi9akVf/Ss36yFcsiL0bas22Df/lLOt+3zAlJAxokm8TceDmfvCMNK3/xUcgv7DwVlh5rKoabPt8+c+bdv6t96ZOjKAgkSeXO3cHZHENPQGANqxKogF2C1+Om/a1fHrk03Rhj6MOq92vxS2JrXWAxguCzyJFNLpuXmi98xm2/Ls7RxpnjZz6++8QHnJ5XB+ohYCGbTzQw33wUimEjNz+jTdIoAA4tOu8SadfH3GYi4PvWv/zZZnF18NT7bgUrZrp/sjzL4hGTVWVVVkKDvF79MyAkBUnqLDbtkusjo5d51berrjNZG1Zde/zg2973gfc88OkvXHdkVZVYAbCJjKhDbypAJGBOQVCRrDHr4abwurVBFmDdLUhJ4hp1C2tGCogykeIbLNOHHzingmhg4+x4Zzw8PJ7fODy4MLHHq9nu1o5nRo5lVvVNCG09cmZx6+ZOuT0Y77Ib9d0k94Mi0749Cakz7MnZoPX2ZPP+81eGk8diqDbOTrebZjXJ+vmsOHsB3/3ObHp2+crV8OXPboLHygbvwEE2HXdh7kRdiGA9WA+xAw/gEAxAtwIVwjG4BBsJDmpwGZgIQKAGms44O7q3Bz/3Nx4MTevsW773zx59749v7/zZO8e3zGP1K1dfuvhtf7DvNNs7guWxP56BAHgLlYPUAUo9HOn3/KmDc1n23Oen7ecm+fLE0wN/4I8c/zLL7M7g0nZ84fNucQ9a4KOeL14pf+hH5leu+Jme/Iu/tzOMoHk4iLyQYrDhDlf5wSFXE9qi27Ol/7H/2R/szT76MVMfDhf9lY0nXjm6g7tqIENyokn7PvQRDApr38bYR1s4IgAV6Thynfo2xASAuc1BFTDGZjWwmAg+9snnPvWVO1Nrnnjgoa3JRpXbSQ6aCHVoOXMW88oUo4FxYC0ZNOjU9GwlYRcgW8NvLPfRrMlaSCkGIi1yyskYUpVoSS1w6NprX3vt4M7+wa2Drm36GMmVFg1zQIdND00X6vrO7M6r7/393/vwm+7bq+ujGBWNAmFCz2C9x7wrtszdV49ns8NyaxCMBNN1GNwou7M8Wqb573r7m66+eNuk0ae+fOS6Ympwazq+cWvPGrDWxzb0s5OT0OWD4nBVB9GW48XNy2jL/aOb4tLWRraq52fPbd7Y23ecjwvenx2NB9PKmuVqJYq+8vvzw0sbO/0qDDMHmR+704Ep2lNM0Bs+w9PNAaqSAq0PBgCNoHzKGzr9UFdF1KSw1iQqgiiTQjwdxeJaRsUCiCh6iideHzmiKIAqQABJGEXUAJFRFSBCWCddVAEEcG23WiO0VRBB6bT/fMrKRlh7pRRJMhXqVgRinDUZDeyowHTu/MRiTxS579VYNJaMVRUVSRvnmTLe36e+oXmTMFFRqvcmN0gBkUkYesb5EktjitKhCUcz73IX8tjVplnaZp5icFWZZEzWOWTTnsj+nplJFj2CF03rACRaQ+SQLNrylLsMVtGStaoiJApqBG0QaWdATI6dpKZu7c7Z7Bu/v7l7syqwCx2Q52PF1GtZUlZB30FqDYIKMhk+d8ZdudgZ1pWlo67Yuy5xbjApkYIQoqpKUlUx6/6eMEQmFsNJkZLJ2IA11rmKETQvOQVMS6QOMSkzAKkoioQY/WjaGDSFcYMyn47jrBvsbKWtjeSCXHk8vnYV5seOIPbsh8VitLXxzd/f7GwZA365jF9+jjghoCHRLjW37mZNq2M4d/b8u97ynoO9q+H4BIAVlNZAUTTWUeyZQ6vKKoyAisCIBOqRSVkFWZWAkqAVA4jA4K1nNGicNRkIkbLGqH0HCdWefi9ICmSNKjCoI7WWLHvVSCKkhoxXIWctGMkkKogCRGNRsEMFSU4gVy4Bxy5T9YToyCZhIBaBhJhU1os2IrQExoiSOdUIIrIoIdDXOVwpaYyJSQLXqy400UUimz3+u9+zoh47nN+9nd176er/9a8v3DrKgcS75YpH2w5Ms1zNw7d+96P/zU80ISC4PB9mfb81kK4yi5CODFdvuj/0CY+OnUFT5ACmOajL0hNg27e9cUbUE6ImdPlgOoF+lbooaS24UOkCoxmYYasx9+gqn2JNjXMISiPc2AomB+iNNzbPDbLRaIwk62zh4/HMDDYARjAYdtyBLcnknFbGWbRI7MB45p5cjmokrYviTD4HJggCxC43sandcIzkOfTGOyRENKgJGAiAYlQ9/aQBWAcjVVMASSYzqWMQBTXoCgAg45SFJCkjJkEFNmwmw+AI2fgNBweHgA3sjmBpgVPiEGJW5oOxu9PRiTTHdnwhWJme3zl5/qavaGOjMER3DlfV9q4lPrg1G+1uAsRiVGWjM9WGHB+/PsGkBDI0T3zTe6/+xq8c3+2aeysZDDoUMDHAOjUtyQGiiKrLsJfeqAFLZFRBmRQIWQGdE0EUYqWIiKoOxCCQ8Ho+yaqIJoEKGUEj6CUfQFaAzxGNcR4kQYqo/elb0ARhyob+2nxxd9Fcqbv7NqeDLAMCYVZh64wCaGIxaBFRATh4a5d1r0kHeWWcefrG4aKXwOAIVqHLCNWDMRpF2pAOlro/W5UnXUHu7OZwULjL56Z121/fm63E1Fli4SD9I4+en47gE598JbeDPHOvvXa7LIfLJkZm1nS4WoEoGprPVs2y8YUXhcMQjw4Oo6QLw9HbHnhg7/XXNoYD9H6ZYjSx9HkdEkO3Ndqw47Nfe/mVnUH2rsfP//qrt9HYrovO+4Ef1EcLjm3gcHBr/+/9zX/4//1bP/rHfvSHP/XL//EjP/+pH/zDF7fuvxgrG28dScelM07satEo4sJglcHTP/OXnvjJn8TL79l915tHeO/Zj/0H+/Rzl3LDSEESZhQkvfrs1Qd/6Mc7J7/rDz42+xd/+dpvf+nw7uzSxc2siX2QuOJGqBpVo6vNrX/8i2OubXR1Xvrf820XfuBb+7hc/M5Xbv/iz29z7dElMTEm6wyHJIiq7DOXkLsuCJBEcVaFEhY6kyVxU5X52Xc/9PKrTw+3rlz+7j/8t/72X7PQW/XVsCBEb1wM3PVdhHjl/LnD/ePYyByWO+NRzSwxIMGq76osE0OrNt7Y6wdjd3iwQJt85ueLlbUZedmebtaRj49Ww2GZAfNqNakKgx1HKKtB4hBWnfM0KKr5YtlJqqpC29Vhu3QpCfXFOOtinxvTKZKlMvn5ScfWayJT2LYJW1vV7lBnbTSKy5Plzng0cK5d8ZItg7B3deiGRWZAxrl9bPfMslsWVY5kXNs59ZHFOJMEmMW7LMXOEnIKHN/Il359VKSAiTDL8j5GQsAoVth4xwlEk4ZoEmUI4AkxscFIBJGcGlVAQ5Ur+05W877YHVkj0s3yQnOrUMdbnz+ksHHu/Dv+/l/935cf+hc3fu0XdusZdSFhxt3/j6v/jLY1y84ywTnnMp/Zfh93vQufLiK9z1TIJ67kCiMaFdBQMAC11KYaiiqqMA2ScI1QUVIJqkGAGpBSEhKSUqmUMpVWkRmRmeHNvRHX3+PNtp9Za80568e+wRjU+b3PGOeMfb6z13rnO58HDCXtgGBrRDCzl/6bP/Vc6vvDI372mRf/1b8+C7kDg2AUEgGarpsv5jl5RWSrZERi8t7FNrKoOmpCbcFPX7lZZEU4rjqjbBFDFZsaLcS66633dOvGreMbN/O3P7Roqsnh7mRWkXVBoWHDhMZ6JgRhSWotsSRSCwKkDn1BZS/0tLULDAYqtS0TacMCLJgwJqmaEDa7b/nRP3mzzrMY+2+5Pfv6C2FSefUMTtFFVrTQJgHUvOiFxs1fOCj4V6/dvCmLZXcunWIsHNUCONOAdPISXE+gRFu60qYo0CaKK4BubjoGLSXBnXm5duaPuEfH/fdm02v/e7acdDNHRW4ubs7GLm+rjSun3Y1lr575HHc++clz59eLb/+Wm3tzmbsEpiRVYVY0xpKSVi1YxdxAFUUErUkkKslZnKmRD3988Ff+6vHmaaOG3vsr1X//w/2aIVmwBCJgQJMgJE/pZJLOvP+P7rz70elR13/998I3nzpX9vFm++JrXz93au04URWorgKJVbT9ojxppi1gkWLf+EzJwOrqIoX1ouoFm3nITd40YXOwnhFyrK31484YquO7N1/xpUKdfO6YcRlDoyb3WZcTilhSEWYJwppa0pR63jVVtVctmbFT3p8kiyopCYCorMbG8CbBelXtJ1BDaOA+33r1LffJpPqmfwGAACOA+P57H3sH3P6Sa08okkS676AlJgSRgGnlqk3dXl9T4iaocIxMsV53FT//2/z69SKJoiKoMQYcWLTMDJBQ2XrH3Q3aeLRdsKmOQ2ox67VnHnFbZ8rL59rYZlkHnYXT/fXv/q76s1+i5TxPaTmrynE/J2knh+DM5uFu/eq1ErlGNQWiJW5Wv5eAEVA0AIzAsWaoT04WY2oXd/aoW3axQ3G+2JuZNr/44e+4/vSXF7evZ29/r+GxAsTEECIpQbU0B9uwnItCJysXX/lNae/mb38yRud5OXvuqezOs5ZWgmJYKV0pthnN8NqvpVMXdPTO2XBE434S6aRZp94++ern5dbtcceTxSReq+SMaafHLvdKSBZBElg76Zwu/uRfq6KaL/zC8uufLvM261KqCckDGU0hBTXWsSZCQp/Pu1vd7/3L/to37e2vFYtl3Ykb566UPMW792KUmI/LcR+ayWDnjfbWzfrVZwr14IyISlRcwWozb7qD3QDdBx/PD2+nW3fKvQncu33piXdt718dXjjdPA+0VGAgi6KkKSkLWWMIGACUJCZlpRibowOvECTMZ7vovBpBR3VYbGycsnZ7WSfriHQ108WGAVm9AULyqIYggiDaNjEriaJFNACiwCwK983eBlcxKOpqNL1augQAAE8OScHavFNWoQGAArAclo9c2cjzXFqg3He6Y0xSODrcvlW3y1TPD6Ksne6cOXV+Z58IZlGM1swxuT5ZHG0Ub7/kz8bdqj66VoG+/vwLPFti0OxUb0q2bmfm+FYRUR2GikkNcdtSBKNGKUW1BEAIzklqyRgAAevB5Vr2AdsllPrH/sSSG/eNT67degOigncgDEnAOSi6hYXDT/3/sq3T7q0P7d26OjieD4CbLz2bA6TdgxIgkbXOCgTSADkAhGma5MNzZKtyY5xtFPyv/vLogUe3X/9qdnSSnX0/t8nkVmaWjye2109nrrRv/bY0eOD29tUzDzwO6bW4d0S93G+sAS+HgzLc+FLzTJcuPjq68NC111/IDg6H945KFarnG93xcnk4PQo6yBppIbPS1im2GiArOkpk1Pm8Yy3Vzbypp+18RgqBuTscKUJbV01VmdwaYz7585+ex/zJtz5y5cw6Q9tMI0YdDPvdtXG5tdbtrIfFojvMsSCysJgvGwWFOuvnvRamd+829aAC6q0PnXepDda7mBLZPC/ypmkWy5ZjG+rlbDZbHO8vJseTw5kwhgRBs7zXQZ97a1Jbt229Ne5vjnq708VRnf3HX/7V9XPl+598r1krTZ7futtmrked3rJKGSXuggwht64o+qfOPvLG7VcATIPJd+zl7rDe2XUnze7snhnla1vdsD/dn6ZlbNVhZ5hDUtfBIss6g+6cuV405/Jxv+ipyybzfPd4t2pxa60bg5Kz+/MJcoo2r5KZNrODk72ivz5yA1v0Bhtnp8Xx3Z07Oca27tw/EemqkAFggFEF0RKxgRViWpKwMCQAVmQmMoCCaAwRKN5n3iEqkQISIdznBhtFIV1tHyQENEiryBZlxbQmARBmAyIIKCgMBpgMIhkwCACgqIh4364IALryAcF9YMjqNaSqaPC+TF0ltwZTqJaVz/ocUr/bzZqFNKHIfeI5gQIwkUNCZE4CtH4loqHFDMOSUqsG1SNbSM0yk0AWJSlFIdLWQXb24eNnf6843WeEcHKi0HbaGe4dYFW5w4Ms8229MGqxVqwtal8iYFQ0JnFUBTIIYNAYgcw4oxxM2VNyEhoUgWZmUcHlyfWDGFM4bVq13jlPNmcwdpCJ1hSZU0BrCQpeoZRBgCNGxETu8kY4PQrDwcLZOMiKrdK2J+5on7kV5vvKCmFFAiQGXuksEBWYlawUBGtjQy5M9kNqqdM1o7EujnH3Oodg0LCIqqCoxhZogZKwjnB8p957lpxxp8pmOdF7L+NoED/wod6HPl596pdl+ya3TYK2+5aHZ8PhPO8otnRqqJwIyAJaa1lDbltenJjqdJibrfG5j77/Yy9cfX4Z626vl5WjRWNv39vbP9qNMaJaQGERUDTWEBGRkjKhgTclfLD6C1mZ1FEVQZBWPj9QRmWDGEWY05sjZBQkBSBjRLVVYQQRiCkiKUdWwswYRTAmY2E0aEFQgTW2CqrsjcvAWVMAONDVTlyQ1aBNERhV2aDxAEbVoCZlIDKWHCEwgUEAFWYSQlBOQkIpJhLBJNLCIOuX5NO15/DmS3d/9Tf55ZvuJI5YWXUybwp07b0dsHx0ePL4R/6gzFSPr7tu3rZ5/eIr917eL2yneuz8+T//3649+eRye/LcT/14t6xo58Ac16NT5WiUHd7czdADsxn0JPjoDJZdcqUujGFtT6qiQwROBGxRMnjbKbIMAoBH1+we+eEoaVTo+9HZVB1qTBZKk3dbEdk4pWs90HlvsBHmEVidLvOOjYXRvAstJo2UBGNAjZhb5aic0ORqQY1N1pIzZtyFmMCgKTxLQ6BELMAq0dhcOYoxYAwTAauioRVQgQyRFRZh4giiSJkXBWUgg6Ji80zaAEhUlJBa4EgYXbJh1oJ6CB4kQnecxFujuDyhoO3ezIFXF1ubUBbzozrdvpORrWPbGRZhOT9zflB0zKKtXJmHpvImuU5fM/Cb3VG2FWcnM+ZiPKimdhg7i+m0F9LuvYNW5AiiJ2MFCQACI4i34AEyZ9UKSiIiIFJrmUhltXyLhIbRsCEDElcRO7QIqyMtIkBUNmSNsWi8ZDlbj76wxoGCI0KEFO63inrdvJPn0zbcmy1TgiC64PbCcNjLsiy3wowQDJrITABJNDOGCFDBWMjRKMedOlWqFUu/k2Nq1wYd59z20Ww6bUXFW5ORVFEOpnNlrEI97mSjtQ4kHXVLYbWZD23Td0VH0iuv7GZZNuyXpaVed+v1e4dR6cy407ZJkTp5Plm0dcOIjEjGkHEGqnaU+QHavbv3Op0OGUrC6KgVrVLVKXJDlADI6tnzWwQ0dnRl2J9VbWzbPtkrm6efvXFjGdogmIRn85v/73/0D77ze//wYx/6xJ9+z0d3tm9f+uB7LubyxZ/7zVuvbpskpzPbA1C0arxGObtY7vzPf7X3yJn92zubT1x54pFvL69867Xf+Oeqs1BXzjsPrb36+Rf/zh9ozjx49SvPvesdj1z+v/0IPPQu+Nw3bvzrn5K45KQ+Hwy2tqo2tNDtZHR4ewe3To/e+fFqeHY2XudHPoRrp3Z/9h9uYG3QigA7QBSOgiqhbTXGFFWRiFBUCNREcHOe6YkNXStubkdrH/jWNzJ+5c61UwM/OZpXgqP+8NT61vbO7dz42DaTo6W0DChZaWfLJRIWhfOFRcTCmKP9to3SLvU4NINellrt9Yo6CXMarvujgwNmGow7CZ2QKC+T45NFw1QqODSCzjLpwcm0TbR57tx0flKq4SaOe2Nj/WxeFd2i03HHk0VzOPXg1zI3XdRlZkKKw1P9venS5S7PcaPrUt5vT2rLcHm8cX3veK+qmzyN1jqw4NDKLPLLhwddR5M6WYMbpRdjOCVu2wwMoiHMAJgwkjEpKv+fsNYn03o4yCmnlMyyaXKyhBhRxFPZ9+EwcoVd74kIEdRnKFo4k6VEbJtWICoKzrdnZbe/ub7WO3N697Ubqa5G3WJtmG1sbL139MiX/ru/ab/+mX5bwULaxMuYHJkysw5N5rtd4KYimp6+sJ16y2u3/+W/HR4sBCACGWBx1H3yff3Hryw//ZXJa7f7CImUF9EmFQfJQoC2RXT9wvoSF2RTcCDL41ltYWalPXN2eKY3vXYNq0WnJ1//+X/18b/3T3KG/edfXU7nXjUUPjpLLmND3Chab51RIBUKkjJjkPLN84+eDLr04AjHNL13qM/d6bbHyK1debIdrMYiu4cH5PLLp7d8ffLCS18/lRF28+OD2oNB670jyrPYNsptE1pUd/zqnr6+XXpbWmuFsl631iYVBrIMAZq6bWvOc2oZPDlOC3UszgoruSy1FUDsZDSeHVz7f/2Z2Zl83BSP9DoSl1mnV5Z+e1Lt/85X3/ro49MTmO1Vo7IhK/3m6Cs/9uPvydoz73h3uHW4leUcly7PGCmBWlVvLQizsul7swxsjEpCMrVA8c73uz/zV+ZntrxJGBKdfrB/ZQ2uHQMhJAUDkBECASvkCAidkVvf6NDb1ov6oi3l7uHR5W//xLkzDzzz2rP3fuvnul0xhODKkPCgaU+0PWFJiGe2zi3bu5klsNq16NjWVdPD8nS3aKQa9MamjtV8EWvZfPCBfLDZnmy3YaIUYuKsKDWzdVsf1SZzmTFxkLkuIAGud4YoVgkQk0liEjcAsU22oDfr1iCwKlzcX0RDQERdxUWG7o+/CAAAV+51gvtII4ukoITAoArQKqciO33l7Mb4zLOfuwfWhVZaTbkFJAELBgyHZNB5oz7VBcVAwhwTOWNb+9pXFq9dHba1oBUSj0oWASU4MpZSK6Askrv1x5SGppr1+h1AwfF6ceaMdDqx37XVrI2c+T5I7h794OTl64O4z4dHmafUVLXUUgyqw0Oan8jxUgej3NtUn2TjPByKirBBRKeGksHccPvaU7B1WbPzkA6NOq+NO9zOq31LSz28mxVuuth5cMNClyh1yEBKSMEU0tS3vl7svswnO7KYc2FHNjXPfMHeuVZmGebWTaMsToCEEdAgKzCoISYRP9/2soC1brz8Qel4UzeTL/zq+RLkG8+XiYkkSASX5euDKja2UzAYTg0ktIZS4M5brtSDnnbEfeeHYNjLt5+DyWu5CVg1YAlcBhJSCgiIbZK2yvpDHPXxLY917Y2dl76wvvVHirUx3L11/PRn/MNPZO+9GJOU9+6Yr/2uP6zk3oFNRoQsqPHIRtABV8mYpuO6lPVk/dTy8PZg3Nm/fqf/wBNdcV6xzbyrUVPkAMaCOESPopwEAC1GtSteY+FHZ89MjXXOn7p4ZTAczeaT6d7i5rO34iwXAbKwyvodQGkpgkaWqVBGapEKZxJzEmUGEVZYXXSBRRBBV6BrAIuqiBbBEiVJIkDm/lOwNiqV4XheQ6uZz7NhtzNaJwq5gUaEvIWsV2mGRhW4v3l23epsNhcNbTOp2ny8tjY7rgizIu84h8jpbW/5tnFdDPcWrjnRxXazf2Rx2Xl4PLl5b7Z9Q29vdjZHvbCbd3NOaBwQiiOJy5npDJzzkBsBQSPQCDFAaAEFfAFZD9kxue4HvuPgQz+kp3z3D3w7/MO/Bi8+C20NHqFXAllAB1bWC4RP/lT6d9XlAsnnYPI+KqgD3wlR7XATgChYaBK0C4B0VuvDn/sLMPaEh7ADZvJc+9QXz4wyIIXZ7/OkNSZpYawfYEbz688ND96obLmxlQZPfkj0HW65s/2lr9HBvfHI+17jJpX82s/5wg6/7QcezMa9JZo2l/rAuezo937v/PnNfC9MyiVeHMeSUpZDQpKIoBaVrYDWMREjAmHe66a2xYBV04SUlGzZze5Mjn/pN790xm08NlorjFgb18YDWPfY6fXWepP9Q6raVN+1DmLotCfLMvcoWpQdJ5DP9vzBvXhvh9YegI2zTWJLmHe6mcuojQFgMq85hul8uphX1XQym81S26J63xuioUFZVkGbEPJOidzO782SJk+Ol01PzWBrYx0G37y388zPf7HYhI996G2j02uOMDmwCqImNdLrrzUNFLZz++VXR4O16uBAY1of9C5t9a5fvQWaMQg3+uAjl16NL1e1M2U5qxtt2iypHk3OX9p89dbdfNC5fO4ctrCI7c7RoSFXjrYGW4Ojk/kwL20BbZv6/UEzT1p0Y1z4tdy7Dnuta/3i88/haPDhT3wfLO5Wd7bh2vGbQwpVVCIkREBNqIAooFF4hRxSESOAqsLJECoyEQIaMBYNWjAMsuomqQgAiDIgCIKCAJpVYQYFAIgAVTgJKygIE2EUQJEVeYYAAC2zEBKvFqBVEYyqohKqIgEBqaxu/aSgSAiqCEhkEck6A46KPLPGoGpczPyigoOq3pv1TGYy33BEYpFoYtAqICIoS4jYtvHk2I7zzvr4kMXmDgUksOMkJ1NyBmOU5cL3Ou3pTB44q4tJHubxxdfsVCxY4YjOOBaAgmwX0SM5AQBjxKx6UhEIARRsAeBT5txgPQalGFCEq9qDwMldosYMT+v6A5KPYnVM6KRttVrYdkFxJik5cqlp0We262XRSMvUNqhkTAGMQfPa6mKxpyYz5UabUmyOaTEDC5S5tJIhrLaTwagKpoiRqU4YGAjIZ+iMWvAXT6svwFJbNxQLcZkar8YSpxVlmozzcamvPe0ph8XteHTbFl3NumXPVUeHKiGO16eDTXPqPOzf9p1cQyumxGxkIxsrpkCxkBnb1A05nzvDi2naudG5/FaX2Wm0fvTww4/1MTedficzvbrJzpw+eOHV5167+nwd9kUjgCFVBUUyjoAgEFklIrQALECkKsoGvRKQMWisKrIk0hUqOwZW5/L7URELtykROiBVVaBWhQHE2sgsQIlVwTgjRMaQu49VB7GkqBEEyXlvSlRnXQHKKUXQ1YYrsygroNJKeg5AaIyirkTlDtVal4iTirOeBUDZOcsJszzTmGgmvmnOF/5KG2787P9XXrrqD+uhyaZ1aAmThVG3DG2UmxN1Ou7Ql//pX39f73966Vf/t61HHrv4g3+3Of605OVyMBp94jsf/t4/UpnMZN0/9Pf/9uTe7cXXv377Nz/7UFhOJ3PyrgAT6xa7KNbkvZEk4FldFGWwqdP1tlfGtk2LOiEhpcQRjRElICp7oyhaIDSzIwIgqIwnqlqOFT18ynzkiqx1/b3rYJ1unNLeWn1y5Bdzq0nCzJjEoGAMkgUkshbAgCAYj4bVauLGuhLKDA5maCzlRYoINWOsaUBgUWMEUCTSFUODBI1FVYhsVtpEVbI5Oa9akSNk5piQATQh5kiGkoAygKIgLGqyxpmcNTfFGizuwsm+0R6kpW1P8ghElJ3dKrE/a5O2ex4sH02GWw9E163T8Zkz6/dOpmXH1UwGJUfNlI23rYgbnultXTo5uj44/5blwd3rv/RrevsgO15eyguo0+OPnB48+MAvf/Gp6XGNzKpkVuw3FGuQFO/b+xQZDaFTA6JCxqpRWPEXhVWSGAPWK4AAiQoBKwojKRIZ4w2qw8zanCyBWsAkAd7s1mXeZc6c6Q3d9t1W0vYi7k35uf3pqbJ49NTGuMy7jmRVkfQWm9A0wTCSpyDYkLl7cnII0eWZT+HUsD89Psjzctjv7c6rtmpYGFkzk7WkiiyWIsj+wazWcFLVxtH62mgyaboeLp0aAkdepMI4D2oM3t2eIBujWFKnbieLtmmrRM4FYSLDTew5eOj0RqdpHtxaa5Y67BVIeLyYlWW5aIMJ6GPsdwfRWSA0STJ1x4fTU6eGj144d/327XzYNRXX7awKsYkgqseL6cNX1ncPJj/987/y6HDrj37sWz/+kW+fnGznp8u3/Pjf+N/+nz/mTyY5+ZBJQns4RxMZPJmKps8erW2Njl7aXn/bQxOJ0Vd1PTcesQAOqedp/uzVLe5cePTKSSd7/msvX7jZ6tOfWizeyG2Z9T3H+eG9RcBm+MH3xyaePnPh3ksvX/vJ//vi9PDcj/yd9JZHu9/xXcfXthe/93OlNT538yY4EASQNoJJIqzGAmoMDQKGlhe3D/2LL51/8mMZp8Wdo92Dpbfrn/vqS4OCuA6dbo5qYx0PD3fn1azn+2cGGxULGAQwme20MTYhZV0jmU+LWbDxzKafLMNJxDZCC1nm8PBkajMTQHeraArEhlg0hLbltlcWmluuqA2qTUgxAKayYxTwaBpjPcso9DbKZWWqSqgJwH52LCepSoE9ZiFAlVoFrFvRHIC5s9bJM5NFmJ5Uy0nMxdWzeuBiLzf7qhUbCmASG6JFy9PQlB4VlLyjMi8rLjllmWVgQQ7gCDWmYKxRwsj8X0RFmS/qAApaJ7FgUckZBCIW9d6BVzKQkjKGTtchJI6MQswaoywXQROQt5KkOlra051z58996MHhzW++utw/eenlyR/50Z89+fUX9n/jk5thkZU9LmDCdj7uXzx1av/Vq6O26aAIQt+l9hd/coxST4821TXgCbwBIIP43sc2//r/dEjGDB+e/C//K+7tdjNSdQhWvd1dLqphJ3g+F0PeLDLInHGp9ItZw7Zou6fPfeS/66/VO/s/ZWG/RF1+4Ve//okv9jzEw2lhKZqsQYhggoIlp94IAlgCUQDVSGo9qVazN+oW2v75C1c+Yuaws7jGSazzopBSRCBEIEznTfaFv/UT3/0nv+vZX/y1+qnrhzPvi06zmUWfu1oQDKaIIIYotA06Y5yBRnKhwbibIoRpENLo0UhLCFaTCZFMctySM8vqSDqKRanRLENAQavYBOlZ+oA188NpSnWeWzMYNnV06921b70Qzp5a9mnruz5+OJ9Ac0/axbCf9ybLnZ/4yfWPvj/dmrd7+5nLEmBiRSQAZU2mR6FeEKuxYMnQSpMgVsjDaBxCimFRdAtzdi1tnLdX5yAE9+vxAJZABRKMtvLwpX+3duvT7e3rXDXp+/+SffTtC13XNv+WjSsPPdz9B//in0RjKvVA5ng2b1MCIChpWs8QwCmFpERZzw17JRCgTXGU2XG51jYnzssCeLx5ugqLe3dvgTGO8tJTFGnr1iK0y2AKyh25FIlcvUzdXlm6TkyB0LicqkUtzNZSCOk/T5JlRcZDwBXTFBRXawiABsEQIq2govdfrwQASoirXYAV2QgAUcDaIiEfpKxFmIdIyGvdQkE5JUJEBYNIqETY7r/Rmd1Orh/ID3LqzHbqF17uh6ACquxRQNoccxFMQcEaX2QSGuO9Qq4tC9QqFrr97OKVqvDM0Qq4zsDUbWyTQ4oLni0W4ysPN/XzWZzawqYONudOSw1yeHN8fj1Okk92eTw3p/q47uHwjkdSTgi5AAiZ4dG18Ls/0+ECFzsZcXt4qzncN2GZrw+O3tjLvuWPPf7f/p3U7VtjmTVFziTh7i13+LLefpra2o5gtkfLRbQ+ZQryxq1sow+lBXWMqElVV/mcRcKUDLAno0RTffUZLPrV4ANLbbfOX4wvP9vPcg6zEERUQNo2svFOKYpYQEPQeiWidvb6F+nGI+nslZN8MNu6+GCW4PPPWHHGWyAWq2CTNhVRpslnuQuT/er21/D8qfS2xzffMaxjp563ndks04qxc8JDX1X9aQ4791KtQKQJyFjWWpyJwsYY760n47WdPPeVMLDeiXR97/TFyb27wxIWL79SJlYWJAvCSCworEAGLEDihGTRqy8dF1YzLxwDJJd1TZY57nRP2/HlzH7DVlVd5rmIppQ8mZzQMCcGQUiAaJBBrCNSEyWyigiskAUG2K5a6aqigiLOeftmBqqAej8pAmbllEaDLhL4LA/oLNncxDA7MoSFLcAb2ymrquUEWTbIMwwskHg5Oez6brQ+c5ktB7s7J9Lt9LKsPjjho5f3nn3+7OaWpGPTajyZLZfkk+sZyt94I3t5YepDQ3kbUmatIKaYPOVaR2mTzfsJERwZR4oTlBq01ayD/QEomq6Z77646/OTcHABtQh2y3UgApQlZBmYAljAA4CFwljXBQjQCCiASWAcGPHlsK4rVwitmMBK4LuosHH7m7AbwfSgBnBFZtu4V7sig25jTBPbKjamHI2gXY5Rqv/4T6X/YO8d716sXX5+2u2fe//+d37k8vTF7PjW4mjH10ddafPJZPnzP9/dHJluF5swOWnSuHP6yQ9ITfzss73prB7QAWlT9sremTg5qWv2ENmIaBMZI1Iy2Io4Ie87asmvmY4pXvvtz9+4vn1O+puZ2Yiz6b0lO+WgLMZvZkexHa1vNrMZYuogZX7L6pAXdS+DcrFcfOOrafdefTRnEPcobqyd3gtRM6rnVa11VS3rpokp1YtF0qCI3FaW0BaFczYGQUAF18uwUw4PJifSLo0347KEZL0QWm2Ze9Z+9IHLV/f3rt+b/NK/f7oc6wc/dPGBt571DidHqV+c7Tx2Zlr25wdVTgWNTwWrs4O9ZlK/uL+YH0uRaddlb3/kyu3XbhrMqnkjbcwBDIdePw/c7h/sDgviCCPS2WS/3z81vNg7OFxog9A03dJsH92BwhAS+Wy4NohNYyANMi1z3DuY9Lu9d7/33a/d3Tt+487+a8/77puPga6YQChJDQIRcIoAlgFEBJMAMyUlJWAwQAZRVUkACY0SCxDCCnetCgICq26fAgIpgIoooOpKg6kEuGIJMydUBoHVdxtEYAEASwbJrlhFKiArzK/ifQGDruRqJLoyIa7qsKTCgGiNEaCjvW2DwWtRRcp8B3Re16HfLbVqEyAaAyDIEUA8STXZk1nfeGuLDNc6MMhmsZasa9tGdw75ZOGAqFOANs5aDBPX62tE3j5yx0dycuCPGozG5pmIAhNlBdBAxaoqYlAISYAoJ0OoFsiyEmQ9s7nV5Bisc7XK3i4iucFanM5cmjX7r5j5sWmaVM4pc9Tro1GtK5gcJa5d3tWiY/Iu11WCBq1o1RhAVIKUuA3twZFvH1grSppXvt5tDw/d7ISMDxCRGQBEIkqU+yxrwRhQkKwTtToa4niE1qV24Ub9RM4gGDIGC8NRFjPQKGGpBBxVRQ0kfuZTdOdltzbMHn5s1iv85jhmdb2/z3Wg2bw+fKU4ObGMqtaVzna6i+Pj7rhs37jON+65PJeWi27Ripqo4eC4evU5c+YdKWwV672N/lohoSXNyJGBRhddGx68cPp4//b24kRRFIBIrF05xFUAkopFYwBQkYiM8YBobOFMaSgjcEhWQFmAGQDI2DflrwAxBAtqsjymRGQQiEXbJC1HjAHIGGeVBXPrFRDUkE0qMSYwiYQNolElAGvoPukRTUptVGGIAGwQCMkIGjCkmJKQN8aahAoqSKACiEjGrIjOqqxoFCQ3Pmc9683WZP7Gr/xKN7ZcJUkQNEIGrbDzpoptFiFVIc/twmD/7MPl2z/6vltXX/nZfwg35rtPfbW7tbX2PT8AH/z4US1ZHwNLXQyu21MP/Nc/9I4/9L31T//T4//0mUFi5yhWUaxTi21oMirJWpeVOISY1bW02SBbH3Tm05mKaGiJysz6OrRa5Bo5YmU9YZozJdfvWEQuID0yoHdtYuFM3m1PlvKOD7TnzpuDPXj6ad2dGohpOoVeRmWpmKlE4oQQVwkg0MrDa1QU1GlUzFRc1oLJcoxtxJIACcVKElBUoVXxkIiVI1ijRGRoxeDkFFQTR7bGkc9UjXG5StIY1DolEABjuyCJ5gsxgXs5rm9SnQESzDI43Jajbe8HTYttiMYW48zdvbfz0OPvfvrW/pWtRxbTpc10f1bZbnG0nCXLm+eG+VbXFi4fj2vsoduQVFd3vzFavNy7Xme//+X+rKqrVE1mT6wP//j3fE925YE7uzu//vnnPOEKqphYADSZaE1EIGvIgAMg0fv/M1EVCcnQ6oEwNgNDqqu6JYkwUFKNiiAgAGIlOKVck1eyYKyBWlLiN1tFZeGsi4FHzs+qxhgXlQLpG/P5jcUsV704HlzojzYK4yJaZ4yjJBJabttY2WS8yVLaLDMBk+bTB0+tNRxPZjODsHpfAvOkbpglpkSATQ3rRemRrJqqSW2T2mXT6domhZN5NW+kyKSqmuOjdDyvnTOIePvw2IFYQwpQ1Y0CDAycH+UPbQ0cYe47PG+GvR5ZmNU1C4pQLy/qul3r9lG8ISjKQVKZzQ+6RWnBFk3dt1mL0Jh0/Y17jVqbeUM6XO+bIi8DVPvVrXr7J/75z+3d+a4Hzw3Wm87Gef+JP/dDz/zEPypi/dGPv/Xzn38Gs36DuCOxk0FOsgiLtdOjvW9+brp9Ox4eOUIgF1vGpLXyoPTL67eXa+ff/m3/zeDo+NZ//Ml0sNP1BAaisERO8wCSDj71FSc0KTNXFFuF3Tw4ufojf2nre/+YOff+ySuvlq5oU7LgxSBbIhKua4ksgsZbVQ4rmqBzxNXyC58bVvvFxUumgrc/8sTo1PrN3/m3Gxs9w0kSmpoOjg+Lwl06tTGbtPuTo6JbOm+XVaXLRWoECA8PUlam9WGnbeuJxETSoghBtVhSBmTAF7k6agOfWtto+cTEhElEnQWazud1TAAYa27bmHtYTGb1YcCmYyFlHWpO2rKTgRMUmlaNpNWftFXC3Lu6qjxSmWPWd5QjIMRlza0aplkSVTnb7xqtt8ZUT3NsDYaIYMmQIxWBZdOKxVpleneve+ZUz5GqiiEA08QAIaKSCCQRfdPZdD8qCq0QpSjIDTp0Tc3gMXPYtLJUJPFiolMlq6WRFGJhbMNuOmdMCRSB1Gdova4XUi6r137188PzY5otI7jNyx/cuPLoQ3/ioW8+9fP26lFbNXtqz/2JP/j4j/6wH6wd/Op/ev3Hfiw7Pq4TROtgtiRlAhfBBiSLSoq32T75d//H/a0BO3/hB74vPzw8+smfythacsHgi037jr/7P6x/1wde+epz1f/wDzptyyxBEYqCciqtvRLS/Jd/dn6+Z+7V3LLxUIh3uzPlSA6TtwkJyRJaL6oCK9awAVQkbygYBGeTys7yqJ9t1S/cXRx9udk/6MxPkI0QIikSIlAbAkUsyc2+cOtXP/cP1yCd6o5Dt3SXLrz1jz9ZXDpHB+Hpf/5rsrMv7SEVVmxukIzR3GWFyfYOlutnzo5yN51VrQTTTh2lrCxYfWOo8B0W9NQTDigOQCwYturItuCzjY2OB7NcLo+qal671ohx99iO3/XwAdozjzyy81v//1G59GRMmxdqGXKzDPT00+NsKEIEzmeGmiAcvSUQXYRgPv4+9/AVeGW7/tIzeZtQtfBwcPWl8qkv6Ld/Pw/9iWlwGU9LCeohRPAWBAH4vszXYMr64ewH/c1d83yTxkkf/UC7fkoP8LQbTJ79xcXRa2eIzlwePPdGXDJngDah4ciuLvxQi6xZBkMugWUlstaSvTg4N6/3U82DopxVi26v3+1vHs72Z8fHuYJFLHIX25BbI2SAoJ85RzDybjJnS06iLCV0Mtu2sYpVTAoiBuzm1jrAXbiPooD7Z5oVzhrRIJJBADCrQa0Crk7kK7WxCKzmNgora/LqpG5V0vGu8EMncwvDsXWxMBkv5yKMCpQIQcEAEBnAuH+vfvGL5WMfh7zTvvZceuq3N4FqsE1udeDKstVZ5JAMZ8iIxibWouwlQGl2dW5Yijb67uUHlmUnH3VlOoPArj8IVXRZbtSYg/386B6dvlS851vaz/+yQ515z+/4OCwiV+xGyZt5U5M8+AQ/+qgfnln+5k/3lwfWECPFxOi8RtGDPUpO6wl3jclU777aR2+bdtDn6chQdnkwm5e9aYBSYpLptr/7LOxd5aNDUhLgSFIUGXBQFcpzJqdMcTkxAEReOCEIYWBYDQMsg6FFwHAEr34te/S9abAmTLxYwGIibeOdBSxSVFRLiVOcglE0VlNSYUDuNYeLT/2HrY9968nWxfLiqcbNCwQUBYtCMtsYwtlH0tUbdmfiBdFloEitdtYukW6GCNPrtzcunfch7j61zdsv9x57f3rqM9VXfql0rSQLKLYwrQTFuKTI3Q2qZOBVHUyydfPRPzR867t3nv7de81R993vNcXG8unfUK3IkCY0YHJnAtb1+XF95nR6405/PiOFskM1EhNlmi/39gcX39Xm3YjUKpIrjNLxtCrzta2tQdOmRRXImjoBGiGEwhhWDCpBcBm5a403CggJICGY1X6ZikeyCAZJAQksWeMJRNUCIUJ6cynZ5y4u2HtrrW0TZ91hs6yXyzmHporTbu90z9hmeYDBIbpqGWOg2ILE0Ml8ZqoqxLaqc4+lL8CWRFztvxCaxWgM0bTSMa3r2vXNxdWbA+ec6ZiKzLR1PhNDNs8UwftM2kiAKIzSQjWhpNQvwXUAHRABFUJdrgUJXIK7v/Yfav/g+OOXN4GLwCAW7GoHCEFa6PUAAchAFAACcQACxoMSeAPtBJzNscMnFXZ6ag32PMQATFAIaFSXc6cjJqu23jZ89G3wytdBj9PxyfLsE3H80OTma5udA1u0xd25ne6HOzfsA98K50fBlRsXHymevxlf23bDU8XGW9tnP++x7Iy6i9kSFjH0u/MnP8ofeuKbPjPYv/D+b02/9Av2+F482u9sXY65sSB5t5NgOW3bhjOr9tLmqZPJbl10O5vD/Rt3s1m9/dSLi4MJ7tXnso5Y006WbHFERl/fbuFeCiJlKRzjuTFtXLAtnjITTd8o+6eMmtneNjX1GTHTk0ygmGE9uToxeCtcPl11RWI42NnrdsedQda2TYqBLDKgs94QxpQ0JBDM84LYLk+moOG0L7aPdxwICXvCqg1JTJMkcej07VlTcKkWy8nh/Au/vv3iN2598P3n1y+eJ5LZvIWBcSnvjAZuPJq9gojLdjqPSNMagdC66huvf/Ndb33wzt3D02N/5+Zhtzu4dH7r3skBln1fjPrZYOvU6ePj/RAP9w9mZtoUZb6c7RnqqvNJmzLf6GMeqjirZg5FOGjSEOncmdHkqD7a228P9o/m+2995Mr17Tv3HwNRREUgEBBQMGCA5D4oVjUlFEFBYAEWoNXnABGAQTWIq4RWAIIhTohABEiaAAhAlTABJVaQ1SjCrIYsoKtJhSbR1V0OVuZyAWFBZWtQgRRXnHoFUjSgAgrAykbwTZ+jrhbgFHH12sRyvH/3qc/84kc+9j3qy4hBUgQvgefeZuQ6EpcWlOoKmKGOfPdefumhqsCpCGe+6JWSed825fZ12ts2zC1m2agPlWDVQDzxbZu9ejf0nDYNSiIx6vJo0A26ak1qRVR9p8OcLEfmhL4LiU2MKkJgmCQ6TOvdppuTkp21dqeBVCNpAtFyVFx6H08P3O4NE68a55Ix0O9FzV3mqVtGm7Fz1MmCY64isVjjMSZJEQkpc53FsT79tEGCkwPg5EDA5eIQXFAWQEOkytEYhCRGklOWpMCseVfXx3L6lCISjLltNLDNHHQyDlO2RN1STg6RGQRTK9ZaAsQm0t1btp1rbwzDC0ssQ7R0Zs2I98fXT6Vm794dt2hFiVV4cexHT4g3/fHa3aNJxgRNMGidCLI6TtnNN9Lv/gc6OzSPPepMrzvo26LTNsxGNVbeL8kuXN/6fp6WERAzg4CCZNAAK5MqaiQEQ26FViRXoivFZESlcR0CQjEMMaWWVXnVdgMAgDYkUlUBbx0CQJQknBCapBxVgW3k3GDiVDhnrRG7oiEaozFXjaFFyMmStaionDSyiHGNhBY4cPJggZnArdpEBiyAISBLsNq/tAIMJIkdGVD2zgJkVYxdn3en83NzHrNpJxNr0UBW5ZCaNqzO/yHWIYqajnNqzXGBW9/7ie00t0/9XnHr4PWX/wWsjdv19cGj79jtn87J9osMI4DKw4884rRdzpvOx78jXL2ert9MiV23yMoC2sZFyZzQcAiFOWzj4J2P1gmK4QZUU3n2K97igIu2FkcWSNt5LYhUOjLaLk/6GyPk2FLkraG+/5FZkjEDdCQ7PdQLg4oV17vwwDjtHnjNbXfAhBBaxIRISApESCqpRUEgdeVAUwI0WIxgOTdKBXno5uhzYWQOxjmbFdq0pLxay5LUogiYlTjRokHhpEjWZSwRyaEwp7T65ATjQUGTkCh60qgo4I20zUzQoVjUGvMCyJt8C3xhTVge3e30tvYOd3Iwt19++fxw3c3v5daQHyyqg96oS3ljbFWuUbE1wm5HMFffg+5pWzVb/dP1K7/DV6vzo470L4XXdyFlp7bOt1P6xpefeu7FW2BdknSfSa3ojEkrsCiRqqKyAhoEJSBAILPSEjsDaAkQAK0g3h/Oqoq0iBZURBMZB0TEjZUiQ1VOJOA0hjejIgfYLztN3b7r4vm7z36TBckYdBSAG4Xo3EuL6uXD6eVR74wvzm30c4spMgpQJ9ue1SHKxUF/OlkU6DrotorO9cPDfuny8frt4wlZG4M0gUedziLOlXAeWp/TsLfWByLWhLY/yE6fGcyqZmeyNLn3zsQWZnVAb+o2OUshpjJ3bWBB7Rg4v9Z94sygH2TUyU+WLQr2ux0mO102aOygLD1BiE1BhlhjG0VVfELEbpFXEJbLlGlu2d2cTalvazV5bqPIsmoveLtz515ZdrtdI8jUo0+/8vxTz9H5zbULlyfv/fBjw8cevfuVF2+8fDKk7uHJSdbrxMzuTpYZImf9t25s3H75izBrbEJnkst8iqSSGEECQqiwuvPST/7NMDvOSa3NYh2SVs4WpXENNCpiKCUghU7n3KWd629sDEYPE81/5T/Owi9sajImAplULQwCeQuKoklCJOshCXPjvJnV0QDZJOnGZGf/hfAePdSUP/y2g91r8eDGWy4Mrz53p9cb+m522JlOanNhPJi0OyCwrGLU2Ok6QJtSsIbqwFXD456Qs9w0g7GvYqyOmwJ9Nal9ob4bMpdBVVd4HNt24MeeYhJb4GDZLJwhBSVJeQZU2ND4JjUcTNMEYdtWMCQKTRNaTSE679DoNDWdTtY0DXW5zO2ZjbJaBgEufF4nWMZUJXDdXOo4iRUodikrI28CROW89L0in4dF4fNFjWyxybLlycJxMIVVjgLGkVERUUAyrXAT40kd/ouoSAV85r0xrkStm8wZBJpNg1qzhFR0cXy+M92fEGhdB1KDYpYxJUVHFJeNI3JRUk6zO7sOl/1aFq8cze7un3vXuT+xmXc++zc+9c8++e7onvied37z97bXvv3ben/uj79cnkotDj/4JL/ns/C7n3aADBBVMyABXQH5RKAGd/4Hvv9w8/xea3wy49jG2zu5Nc6Z5TIcI13+q3+l/5d+eL/B4t0PYP4zrpp7ayqQFBjr6GzjbbTVSYx20Oksm0I4ioh1DiwktAwmEjYSjBVrHa+m7GSBFUWsIUCtJTTS1ChZXV0Ynlre20lHx1meEbm6btToSmTljCN1hs06Ibiu5ebwBKXf/e4f/gvTd5+6tjeteEYffNw+d7t+PQnNUG3bxtK7xDE47J7rpBKkLMr+1sDYkztPO09IKj4VRWym264YuG4WY4LQSlutVL1VK498/59r8Dg8+7mwmKGwcwSAc0kPfNsfuF10eih7n/5K+9xz872TYddm3oVZLAoP7QKmM7PmXJHLctmE6J1BTtI0ZKF+8PHsL/ytxca4e3e3ufVX85u3ILaguM7VnZ//F6e3zvJbLp20e/mdG+mF130lYBysaJwMEBI4DyxL680n/nS7f6PeWhvmr0J79cE0v/flz33hZ34GtdN008MX1g60gaZpFVpmSVhajwkIQFML3HqQtl7OTOuLzjLEpCfKDaI9iaHCdGZrzHF2tH29Z61VezSfo7NIFsiINIM8L4hVZVanSR2ImBRNWQKBs84LbK13ZilEQYb7IivEFfwBEMCsCNaGVlAiwvttIVBlBZb7rGtjEBRgNdtVMEC8mvVYqKeHsQ6N22y3Hh9d2rzU6Zx84ZOmniEAARLBCtgICDnL9JnPO0ndQWf+2iu+XtaNznxWfPDD8eL5Rk7svdfkxdezw2UBSGgaptRAm5FxKcuiLTdDqmq1ZVZgEhQVDdLWoa3zopdLW7/xlbGEavdue+Xbj8tL53U+b6WMRb2o19fX5rsvlWXp3/a+UAza8amsKOXKW/jq5wwHUuvIcEJUUA5ovbqcG+ZqZp2Hbq+yYVZ6PxwU+zsv//RfPvvwqa0/+CNNfl7u3Qknh37r4dYN5eCm7t91uQstWKbYVs4bxgisiKzWSlBEY+h+yoYr6KUqozVJe/duz3/xH526+FC6t2t2dh256G3giCtLDqAm9pAkztWSikBkNYTW9Y724Xd+MQ3K7iPv4L0T5GGSoDEuCqze8pF4+gqFrxXHT5WELIxFPrj88DKRBme4zec3zUsvyisvbIDhG19t/vnXipNZwQqJyDos7LRpwuPvyt79UT77uOmtwed+d/nZf1sRjn7orxyffTQV/c539OfTw2kOOjvoHu+7pjWaqVLgkFuKnax93yfmlz6+tF+R539juNFITjQPGkTm9fLZLwwefcuCt4J3oiIxAkhq52dH/fc+cParz90ERCVjEBqVrrMGlUSZIbEy6DJxIAZC70iSrNLPzBhD6OyKALaSKKkoIqFBVAD75vQAVDu9LgMxIwhNjieL0HpsraW+W6uDVhwQNUbGVJdlp8jzpprUdSsS6p0bxWCzKDqYZG102Y5Oh5ObYX5wsHPYt53C521h+T3ffXi4XOPKHtyQeuZIXEEqyLwaYGhq69x55UgogALA1OlBakCUWYw3QCgEionIQ8THxme2//cfL/9dOJrB+QfGcOoMsAAKcAA0EGsAC6kF48ABrKDnJgK1UMzAR0g19h603UEMgOqttpASNC1YhUyhm28fzsPD333hD/7Rr37ml96x9pDc/FJ96lL6jh/l8QX37O+Gq79hFztM7G1tXv9anN7dODesmhB35+VsR2eLOJjP9a5fcGq5me7bjjcGjoE7T/7A4dDPKuTDOnvupfy13e4purI+OpnsQ//w4GjPb2x1zp6ebG5dfOuHl5/69Gf+1t8bD3tH0/mpi2fCtLINu0ZO90YGrMuKRiJ3y0LVqgx9N04XSf2AfTpc8NHtJ37oL9BRA9/41wYWCk07WfR7vThfqLOqEB0um+pwb951+4OHLs1t1uv1kF2KUTS1YUkOY5SYAMCQwVg1yMyMTEkTZ2RCSu1y4dUA2ZZbJJ01VbJ5JCzKElU7FurZoUp7sTPeW+jkdvvZ6aJ/7trD7zf98/lanofXmxjQrJVnHn3s3v7+/s49MG7cy1W5DZz7LMNB7ng5X5RlzxmzmLQd6myMHzo3Oruze3N/e1es9i+cu3twb5Q5Y+yF82tCEhXPF8NZDKGuHXeyNrnMDrbWDg6PpovpcZx2/VqjXPQtwtL3XHdYrB4CUgARERFUWK164QpszaiCKaGiMoKa+zZMQQKyYIwQiViyCJhQGUCUFVSUAXX1AQKr68yqiKyAqoTECok5iRCCogoIEa6E9ygKIoDInFbxEQKIJoH7hGsAAFyVlQAQLJKoKq9eioBAxjDIjevPPvzQu9cvPxLVFOON/NFHw/Nf0VpAEiQxoBAZUgKA/GS3ufGqXHlAtkaVt2iLclrLSy+E66+7eYsbG3FzFHsOb5yU0zkmi0UXDo9c5ROI5E6sjYBmOAxlAUWGkU3qaARilCRkO4kyglqTSJuwNJh3cGuNxyMgsAYwqna7dLwI0wkha2xaqSzWCgvhCpICi8FuTAX1+smsSX+Lzp+vjMZe2VnOdbJQTSkGDS1kSkI024ftWxAV6hoMQolALmUON3usRtEIWUILzJAYhRERNUpvnTfP6vooOpP1x9zUlpyzmpaz1CQquqogJwcwOUERIDDeCqckCimBCOzvyPTAlQWtbRhgKgpnnBxtHxwdF8uGhNFa5hD2twFS22izTG603gMb9k+4aUAhNY2xJm/bcPUF3Nfq4BpsXtZHH6dzVzqdTuTqZDrZPbp3a3e3inOw6H2OqpkjgZRwVX42yqKoxjtrLKEhY5AsGudd7mxBNkMBRiGyIsLMMYnR+0sHbQyoNkZhp85aBYgiyWCIHCMzpxyRDIARkjyJA0ZjMyIIbVIRRyQMTQLARAioxKANhya1bBhQNTECKnMSQQVjDaFRBRQgRADwxiVJcL/XTYY1iZSZzyft6YCbDLBsIkdaH9QxtnlWdvP6ZJ7mUVicIyEgrw23O1V635lBeuELL/37//SWopd6dLxcdgv68s/82Lv+3r9SgPn+VAKvra+lyI5gUbf1qUfuve+7z5VfrJ57IU+tRjLKRT8H9QfH887/5U90n/y+/MJG5wQW1/Ze/tl//LZ+z+mMVCmmVEebFybvJADqjupFZTzGli2YhOTPnGnGPZN77wRm+0DrqQ5oklBsrZSjnswMOZtSTS53NtNmCYzgOwIqrGgsomLbaB1EAIyh9TVoG+DagBUisZoYMLKJDVUVYavGQ96NHNAX6ijE1oInFEUweaEagCFJIEVWMWQV37yAqVEgTaohkAXwxKpQDs3mI3D7Bah3oSRuC2s82ozcmvreeLPz0t7TaxdG9+7trW2dwhwls02NLkTvU3fsE0Qh9aON0DbkIyqBtPVykdp5ZKmk8WUvlej663W3/Ddf+epTB9fuzGdBCRNbNED3O5MIKyqcECTRWpUc5KBkiIiMrkD/ZpWbrzhxK3wECWhitzrCk1ogYhFHSLF2Gml12OVG0v1LsjduOQvW2rXeeLMY7jZNYC3RKocQGVF9bopODmudV7ZPZqjj3OSJut3ime39/cyiMWXenUMAQGPt3aNpE6WP6h12vI8WQeDCcMCMDsF5v6yaTreommpzbbg8mIQwvTDq1LOpMo4y77oGkgTQKlCz6gEiiKUqae5ps5O/db17qsh6it75asmJ7fq42+n4SRNDRJRkvWtAAEGE57HORNoU1akmIWNjCqUrNsajW7MDW2Z+rdhKGCbzJgiQr2cQa5nMF71R76iarI83Kxs+9J73n+XsledfzcxxDLU/vfHyyeyBzfVxSssQhNk5S2jnNf/eV14AJ8Z6QQosnBoAL5CE1SK6DCClND9xhhMASCRmYTAQFqEhh+TU5NgKmq3e3aM3NsaYSxPbquCQIyhqirwqbwAyN1FUkgo5L+qsyZRjSILGGlEUMZ0cHE2O9w/U1tvLf/nFX9gPaffopAlmf/tIcn/h3Jn5gu8sJ/lG6aSczeqUEK1po7SS2hDQOiTe3jvpdOz6oIxRjGI/cxrYOmMthkDT2BpOWR5BQkxLAXZYHB/vLqV1HTvodQ93tj1l1RxEuqJtZkAMWnQquqxS3rUpVaOxYUmupLLro9W6otRoItitggYQ1YpjPW2auWiWReS8k50sl2SoNOSMDJA7o848BKaYdbymOMwtIaBKPuqXmObLYAmN1QTRWBMktTEFgy1q8vBfREW9bkFERe7qJqGl2PL9YXSAKqW8LGYNG0MEQGrCEpukapCsDTH60nnCZT3P+8OPf+cPXPvtr51ZT+PYrvW2Lr7rfZ/7iU8+se4/QIW2i3u3Xr/84cf2N05b54bdIlqDUJ1++C3y27+ZxAYVyWxksUkNooIQyrZWcb1f9tbX2rqU7HN/8S8Ov/LVc6hVbFM3t+9+5IE//6e3kxLnazzerg1FS0ZMjNqwBwSGGgOOskS+JEOFZQBRqEClVTXqvQ1qlQwaK+jQlcZ4BbWiMdTiMBpYDAcPPPlDppTZa9dnL72my0VRFClGxkBoBI2QECkAYoLQBEBoVDxlWOaHVdi+d1x860gcFcPx/N7m4XM3TWhKSJIitmoza8DNVbIzA+gObtw+6ELTp0jec4JYR+p1l1mupWsRHKmiIxFgBJTMofbg87d/r/f2BzgL733P245fuLa4u5/lLnP2+uc/deZbftDP9ovnrttGjPepbrPCSC5aOEveCiwPTnwvI7UJkBv24qksA8f+o++DC6dqBTh9efSn/kz9k/8gbwTBozVbByfN3/+bPLYFBnNUl/MAjPCfJXWk4C3EBJxSdYR8XL1tffbO/3p55/byB7+n17BG/sST78E/8zf/+b/75f5wt4MzX1yTpaCCAIGaotOpYmwkzWOtofbGh8jMDQEvos2dIwJO4gfD3sYWtk11OKHYSORlG0ipFRDQtfXhkDAzMJu3IVGyoBImS2ljONsbrvtCa7HqlyBB2snuAbz5hYBEaBBW2zqEiHS/P7QaJ4sgi7AKrFpEgHpfqXA/bAJEVUXF2Xx2dDJz4LujTeitRdK809FqZg0A8KrJpsAJjUXq1HXz1c9hDjl0DORtd9B5x9s7T7wfR2OQxp19iEcv8CsvNdeuemkgWQWE9cv5xcdSaCyF2cGdt7zrY6IETS0g7Aw5MllGKfLRDbh3swy8vPlSPnp0a3Quv/fVsyizX/snm/nWuFTOVLbWZnmWb41irALH9ff8wXi84/Zeg0TaKhlkbZzRUO2jyRKC77pZEBhv+CtvT7PD6rd+YX7j2uOjcX33aPbvftqun5XmMBufnixZ60M+3pftfUmoNheArOgwWYqCHEwEADXOAbJKQEaLmkDAGRAUBRA0AQa3D+SNHUsG8iLxElanTqMgARRAggCo8SAMktR4BqCEJAwp9hZt2P8SsprIaq2x7KXC3ZfS3g1649VuIbxkYohpntAmI+RsZtRP76Q3XpWbuw4INeZ1IMMtAIKlzOqZLffoO/zj35lGmy4fCVPxyMXmGcvFUNcv+9F608w7a6OMtDPou1bq2zv2oMaEKOIKE23d7Xf1ha91DmK8/vzRonb/1V+uw8Hyy7/iFycl2c78dvvcb/Q+8if3o9bL2ezunb27NxpMd27cHhTuwka/37azeS3OAtm6aRnBCRSGUDGqKksUMIQEWiABqkHMrQECBUUAAUAiIEoslFbXYlB80/2ElGIkl7VNlWWeJEqqmhhcLo7QG8vCKSYEOxqNEsc2VEqa9Toh1NO9ST+iN7N+p5+i6ZWNJlo7c2k8OBevH9RH89nu7p65vvGed9f9zYPnX/TEuSfvIC6FWaxH0SSULPUUKaToyyL1TkNWYH3oQmNKK20F6MA69C7GmlLQ9rjPQnPYOgXw0GXAAqoFAANZ0AyAQBikBY3AAtaANcpwPN4Swp7LvLHLad31xpTjdHgMzGAd5Agq0B0fnXqs/53fUU3dbP/VK++/nGaXQmnWP/Lhw3zDcfJ7X9FXXkjAMWITTzqq+eTF+rmT4fpWy1nW78NG3yzi8ugwP71Bw7XpjVvKNs5qK5h++xc3Hrk49La4c7f+7Bc7yyreiSc70zj00EuXNsfXXr22d+Mkf/L7tn/zhfTz/+YJ35ksbc9uuH0+NeorBSgI83yyP6XK9EvTxJgQE3O7mGYMFDEuZh3mUb938plfyKbTvk7FYIzBKlBsTAZLr/sttFXI83zNiA+LDSf1WkcX9bDrDo6WR9tHQqKGq7oCIEicO98pM1CKgTNnyXMr7dqoW9Xa723uTubtorG58d0SCfIUex1qlzo7CS7R29ZOVVXrC7/E7s7Rcn8239/56vBK76Mfeezy2x+YHvcOj6XfHa1/5A/40Ds4vHtc1ZoYE3BFt67uV8LzwOfOX0qNGfd6y3oq9fIA3vAZl7mfxnZ7ehRcnWzvzuGxt+aoqru9fuksxYCMF86evnbrjaXE6XFsYmxSnXmvTqbtcW+QVRP4/Ge/0lsfvxkVKSIJCCGoMgEqKPNKVCYIKiwgCCoWEJWsyQAIlAiJFAkQVqtiIIggqBEJVmNtkFXnjUhXHxew2kQDSZoSABLJKjIHtCsGMRLdb7sqqIAIghVEFjVAoLKCXSuCgiCiIAqgASEkBdQYWaMx1M3H/X4/KwrKi7Q8DoNRNtqwYaYhegRICt6CVYhqq6l59cWOiDRnSqt4MoMbO273sHROS08DG0aYNgv0532ItH0EUcgY9DZGZpdnW0Pf7Wi/n4w3nVJiMqngw5mJERMaUTQCbU1ElOXctKY3YFswGJQoy2WsE3V6PJ2QBguJqx1SbuYnYAAMiLNkLZOohrCYqMswM9jNxQp651g0VqZJqIlJVKMokQSACjCBjWAVjIp4SRYkByIQ1sjKapQxMDGBRdg8jQ+9MxUZQfSaLCiiAWZQvQ8tzLxakPGmnR/rdFtjQm4TIBPZjlVlbNk3wSznaXdPSAANC1hmREuoiIjIqCGe3KHpkWAPFstiozM/2On2y1lbeQLNXcOrxTE2yyjbB+R7JtYqkdCrs6JytFzuz5cB2JWuzDMjACpJWWKjIAYIIaoEAFZaObItWYfGW5MTZRadohhro5ICqrByetOPDPNlxXluyKsmVlSQIGnRcBOiMFtCVRYC61UMrRx9LNQysyRNUWJil0JiAGMNiKRlaBsMrUorzBJLNpIYxVuwBh2KSgjGWcotYhJmQDBKomqQgAwjJcSs7A7ntZu+UZ8kX+ZB5Oj4pCid7ZbTk4qXgkyZNyEmsjBvQ69rHh2Pq5/+Z4e///mHKavn2iIbb2DB/Tdu8je+3P3wR0O/MNbFw3u7L381x7i/u7jwoW974Id+oLf7wYOf+DG89jqHBCDRkJx9oHz3R8vv+cPNqGAXqarcxeLdf+YH00/8NlVzsJDnWfB5Y3NGdECS2mzQC6GFPHNMjkN44659Yc09eBHaPVAHoQvqvG95eQg7tzC0WhegHZMhKwBlzEurrE1Qa1CJIIfYWGiESMkCkBhLFqhpYNbSbNeXuR+MwSBM55AErEmzE9tJvjSalpgX3nlCgBhiSMZZoaRGyTpgYw0gILcVkpDJ1HQhLVQqWxTaLLBljHPMBjw4a7Pb4D1o4vmRVo3U8xy5nu4p4Ki31S7A+c5kcgI57i33vHfO5c3yeOC6QMblAzVDlytYSbGSLLMbD4XrL/LsVdssjw+Os2gb1vxs162XxzvXi8zzollZH0VFFQXEWuPt6n8owEpTKQGQDFhUQ9aStYpASAoChESoCMpKqz0+BAASYAUgYU/RYiOsiVPgGFjqcN+AZp11zqWYHMNH3/n233jhuSqJIciIkhFv+MH1XqpSIeEuh2vzxRb7LhkzaXbbVIWoot9YbPeMPTcoPCEbwASSIqR0ftTbbZrlIm4N+3ePT4KAARh08sI6qRO7BKF66OzGWs+/9vq9Trd7amt0e+f47Om1LDM7z7yOsMqfbeSkSheGxaOjzsNrPWzYk1lGnUY2hKNutmjrllMIgQgrrZNCbk1m7YIhoJDB/ePjXuYoK2ZtiEnd0m6tD7Z3QnVUL+b1MC9t7o52Zg9ceOCFqwfq/ebGqfmJ7M1ndr7IH5tnZrkxWN5+4+oP/OEnv/jrv7uY2Nn2vUuGIlqXZ9NFZaxR0ECoQgLREOXOIpjYtuTIOMOSmIUgGQzJkLM2peCtBTURlNsq9xkJLJa1ffeHh9/9J8PzT5185pNSs3OmFSZLSqSqxhOo4ZAQkjFeCYETR+GVbxSS9xaD4dA2SeoNbQp6/5/9wS/emc2uPd1yhJgWHNRrCIHqxXQ6zTu2rdPGoH+QapPniybF2OaZcYWxRTZbxNZ4TYhtamPTWetLPu12SSEPNVDMigyn1XQ2bS+f6TcVu9yN++uv395mcqHVEGaOislMAYsQQpLUyTNvnQBUgSfzepznWU5l3ySysWlSBYzQ8S7ve26VFXzpa3aTWZ1jVuTYKoCGJSjk5gi4qRdr1pSoAgvn7YJhGTHVcUBWQvBkcs+UezWUe9u0QS02SRYh1jGxt0sJTft/ahWxOBSuakjsiWyGjYgyKEqRWZuSBzWIKSVtxYpXMgkSq5aFI5F20pDSj/61f7zRe/Dt4+/4F//0R05lkycef3ztoUunNrvupLVGQ4PxtYndfWn56a/On/rXa+9/R7/sxdf2jj/13Ckpp6lS40JMaNA5a42jEJKmjs3P9TebO5Pwta988af+waW706E3ZdFriWcPXHr8//PXdkOKO5O16f7tf/i/lPNg0a02hDJ00kTT70wK6X/wPeOzj89+63f4cI88KrlovVoSImUBQ6XPhEUieu/V2jbNxIIrUaIqUiY+Xbt6cvhKvTs1tWauCE1UZYYaMScikZX3XGVlyjLkXA4pAvDpgf2Nv/23H5i87+x3fmJcPnrYdnd2jwYWkyhluQWSaBjcB374Tx+/97HULfo7+/zs3Rv//hcuOO+sZdOBrQeOfRONrJeD+c07/aLEdk55aYnR1tXi+OzFU+1bzyb6wI3PfrML5IoCM5NbZ3fuzv/2P6Gm4aAmCBnkXJduAWuDeSMmYilc9Bw5DLWa/jqyULUEo96Yu7/z6+e+92PN2YcmVXPpwUer8bhYBEALAFmE7GgBBy04lPkSLEBmQRlEQBkQgBCMAXL5vD761C+s//k/HkPeq7eG8645mWru9r/+6uYf6n/PD/713/ncv7z60n/KCxpaU89j7h0j9Z2WBS6raK2SUO4NCklbDbI8tbFwtpNnx4LFcE2cv3Pt9cWsBg5l4REpBu51suCp4xwJnEyWIWiVwiKEwltISWI7VU0umMBNvZhzSyBZvL97YxBXqiVEtLQaWqyMxKAKLJpERZVl9T7f3wRQFVBCQAWUlepXBYiWbVNV9dn1jZrjMswaSpkxKQBkAJZWymVCVMJEkClCVCViomU0fOby+fd9F22dEiQJjdihPtSPo7WmOqa9exkVyyhu4xxnZbOY2MVyo1NWb7zQ4WY6vWn7p2i8mQw5h3axPf30zxX7J5rswGP13GeLQLYOauxaqODgDvScjkam/whk6yo2U6UI3N1oHnySlkfZ/AgzA2Q5AYOYLBNmFm1bKdHK9ot0/NoGm5K85izOeurhcpbJnLWJi6kz3ZM710rnuaIoSoUig+1aV3RsZqVC4WCzDCygNQYzqYIIK64m9IQiCiqiBh0aw4YE0BJFSYqiSEBGFQA8IxoyyIzGRxUrxJxAAARBDYQWEVhRohjQgox/6XUKCgjRIFiHqHmu7bWnjLlIJ8fy2nPFndfgpIrqOAghKoEYhwTKFjpde+UdeOZd3F2PYEJVlYOeWXdLq0ulfkIM4oxp6kYS87LW3R1oEahQTkTpcFiuf98fjds38TNfyl+5NuhCk2W7bZX3Bm2OBqBpxKk0X/tss3N90hsd+fXrz13fO5jQ6Mwb+8f9br417q+F5sjiwawGJJNTAoyNNIkTkjFGVVCVCAyiQVSFlfNbRHU1dEOIIqhKAo7wPlb2zS9jnfVuWTUANJ8c75wcHYf21MaWQbXOpLAUIFUCJBYRZhU0mVtUdQiNzWV7+96DFy/G2Bo3irOFCSId1xmc6XbnmvawgTX1g+HlsPWe2H25m2N9MuMm5sY5stZ65pasTUqYdzDPG9Bkhk0dC1PazCACCqP1KgqhNQKKpbn8eHa2mGYL964LJ+Xp/o2beTPBsADXBXQQBWSV/nRBEdC2rbv94f/r6M9+f2jhjdePx7dfGh3fmj71ya6PfjyEgx0ossnBYliOljHDzimNdf/eZ/Arn4aLj/GD3w/v+bZnb9+4cGFQP/P77vqLhbM0uHjn5t64W6TlAS1nRXcI4wfaM2/fV+umkz6/MXxrxhkdHrTVg2+xXT+UBuomPv1Z+9WTo8mu7/SLtNEETnZtOr5AH/qW69tXzw6ce9+lS+sXN84M8p3fPFrQmXe+79O75h0f+YjuvpKe/0qWQV7I/t3XH+zlISyyNjs+ava57GwU3TjnaTvujSiEYaYaqrFZkjlRk2DQBenAclm39RL4VdflT/xhvHqzuHF1YyiDCxuT6XJnOV3PivnxzGWYO7MaXbrMGptl1sWGc5d74xfTkyYtiSTrlNOqLmyWdfJTxndLN5vur5WZaFi0YT6pnGSkEIJhHmQ0Ce1yJtVakVXil0c6Xyw//eqXnnjPrStvf8eF8dvCFGm49bb3/YGnv/zM7aNnzq5xLhoiLpazOoEr8uk8NbHenu1trvXGGwNIScDMwnJZ12Xu+r0tAnM4my1ndSvkqejYjvDkeH5kj7ZVgoQYa9rod5LGU2f7r9/avXfUnN86m2PP+4Dtm4GpoioQ0IpGJyuOnSEVUQaSleoACMEAEKwqRoC40k3BfQgREiKtHAgrh5QgmdVupIKIIBAQgAInAVJjaPXZgaAJ7reYEEhVhXmFFkGAVfPizR6s3mfSEIgIIgGiqIiCQVwlwKagvNN7++Mfefuj7+xnQ1VuozrKpBxlZ640d38/dzk4kkWTbItWDQMF8LOJvvxieu2ljIPM28z1VQ0VXoZ5hJbaOU8VqHSXr6RF5KpSMMZ5Oj3QjWHsepOX4AtQw9ZjVqSY+US6YAclLypwjFHAgrAYa6SuoWlzVq4mUtVxBr67CeucQLU9YUjekekOlAQwCWhIrSH0RW4MiI/JcRPr1trMaL13rwNWpEVQVQYmaRvWYIyQodQGAyhJwTvqZqGNZLwwIDM3jXGESRWptSVdfpBPb0q9sPXcYYLZEc8qQya0rXHWjjqhqVlat9bH5SDue4gJUNHZuPKlCiiItInIWDKSIiMYARQSVFZERmQ2mIrde/H3fq3Xu9xODvypU+7jb8Pl8uiLvzWuJoYjA3ESa4kROqWPFJ3XiMLAbUqMpuh2il6HMzRZ7uskbWCAGIPzjpmFWwRk5hQZCFe9CkJCIlFGDoIAIrGtY2pTjED4n3MiAGBNVdtmGZmETdtagyIROdmYVpJYUVZrDEDmLXKLRJAMofL9XBOJJUqdQnA+VxDGyBQFEmtAQFWISTwaUABlMiQCnKJEVWSQuGIVoYACgxArgrVYBz9ZFqKd3DAkMuoIuWXAJu/m86g2qbTVqGuxBUfOAqxNZfFLnx9JDa5LSBbJeVsWtid893/9Z1eMuCc+lHwxeeXZa5/+hYfe+6G3vvc77kxEYwNV2714Lt68mpVZbKSCAt/z4eK7PxEHhYTAgfKCwEP9pWeLrTHUHkCD+OrhJ8wHP6bX7/Hvfs4cV84a8iYph5QscrYI5pmX4cYuUIS1dfELil/v9hLvb+Mb25DWALpsCYuCwEUJ1O2AtpzEmFxbhnqGKapWmnXQZYLARtE5N+5CYkgMSNASYAvWQK8LgM53uG1N6bluZV6bzIFtxaPt5GJRkVBk1ehVSoBIHqRlSbWmQKiErJCBLUGazFBazNAwgIIHFev6QwyTNjEiGVExNnedw6M9M+rlw/z2wR3fyTAIkXSKwpPpdPt5McCogMs4P0J3OmYbfPrja9/3liX/neVLr+9OpuX4/OjdH+0+8eCj58+9/8lv+2d//+9+4+rrSQwSWoDS2twat/qxmYHMikKEBhHQeUsAqkAAfF9ULIYsIigir075cN9vjGoNrjRzjYgk0iZWQbVO3Or9e0Fo67VRngK3yqd81jFu0rQcYuEcGAMUu7Z8ZXIsi6ZTFlWd1s6Prr5+vee7lGPG0DSRyGbOGkvHs4XJnLFWVIuiWENqk+aeOdYggkSsaohIFAlEwuWtrcL7G3vTZWWKgRPUfu5T3ThbnBn3ThaBOZUIa/18q1dc6neGaCgwiariURtsZk73uySMBCmqNS7GNgkLUBWiJWpErDWeYNAtmhgz9YhCFshaLzQuvDbtEunBSxu7t47suLhx9+sXNvvTNsznh2tZ57gODuilr13DB/HSB3v7rfn64Yvx4VN7t3f/yHd+R3zqme1v3CsNb57JlLGZVzGEclSqQmBoUS2gyR0ZTKoEYLxlSSpGBQUVDEVlJDVgXOmTJoyJwJxUUKZOezTrkAMjUUSIhJVZCCXFiiSoCEgSJUBUFXS2lcDM1hC3CZFoXBx1srPf+1+tf/jbstPjo5ufOpndu/zQ1v61gzxz3a4LlQwK97YrW6/e2en4wnpXlLkQFkVxPEvWeGTNM5NSEKlHwwGlEKMuJ3VpkBx551PVtMuFZTvKcysCDRYmQ7GLxaIRdpmJ9VIhyygbZTbPsnmzTN5zhDZKU6eW2VqUxAnhpE7GUuYdV42I1g3WANxGToI+KdHGoGcaKXOyLjtYzKatzJLkHVfHNM5o4JBYC9/fr+skrJQt2jjo+8CpdVA5WVZhd9b2CheqiJ7I2cqkinmZQnjzdnw/KvLeo1KMbZNqMJ5V1ZBxxlrq5cZpsKrWUdVQbMWRIBELg6rJCofiOh6ncOtzXzVrN7/8hd/6wP/jz6asfOLBt29/7lf7kypEamLIrD+exHZyUmAhz+zCa5OgYhoct0bJWNMtC5OHGEK0RDGxAyzIBLQv/NJ/eOTt715++Zkre21flAEmrPW4d+aH/8r2pcdODlpz2L78L/9N+vX/tOmst3bJQcFYpGj9G1X7lh//n+HbP5b1Tk/v3gtvvJAjkarkbkWkNmhFHDdkfUcp85Q3qRUhIURLVrRU4w6W1b3f1zTrdPpsnSAgEjmDYmXlZQWxIbEYj4VaG23CLFPisjCo/m0b53f+7ev4+V984ySW7DZnrXXZfFmhMxk5G6riwoae8ft+Gdtm/fLZ9dOP0a178y/9jo2ty4tOt2O5aqpdnG93gfK8x5ClKqbIJLZj16bPPX/6ww/EwRjFgQBZB0QExvuMd2pDlnJVrGfY6vnRme/+wPDRx+Yv350//cbB888Pjo+LyAatsCC6yOSbCE172sFrf+O/P/Nvf2N33JmG5NceDDe2PQchpG4XEKDxECNlGaQGYgRrwVrgBEogBA0Dmg7K7X/294uTL1949D2LX3ra7dcMBRiX3Ysv/sU/2v/WD3/rhz9Wvu1jX97+vGvlJE7U+SaELonT1kLrUBxlA1MaEoV2LR9oYdsY2iaJkbVTp2xOt+/eSm0C1YbUo+tkruuNK7Pc+GreaMzImUU1jWghQkpsUevqpBwRO2pDbDWWZGx2fwGNDLrV+ArQ0Ep+BqDKoorIqqwifN8qrqJIKCqAFglElQjlP+NQFcDCrXs3B2UWm8la3hmsDeoY7SpHkpUGBAhVSRkwkHeZSRBYU9vNO299bxqdMcajz50piWN0HTsa9SO3v/3z9qSSVlhlmGXCsT3YWdfh5Gu/Xr+ysT/beeQP/zl1rmlrjIlfftZdve1YG6DERkMrjDGzSAZRsLTazVtbtLOKs2BgkGYngC2M14p3vtfpPn/t1z0H4WgiqrUq7I01oMyCKK5l27AYj9CI8QktKpFlUrGGtDpJphlunsJlXErb63bBO5+SxmSWrUkibbTOoBOyzFGNLUxuqKk5KoIqJMXVO2DIeCElgxyjEhpjEAFiQgUiC0QJQEJrVQE1IiYUa0hJmAkA1RiFVQsfk3iiTFnEQCIEUonBGMqV2y/8kn9jwMczPw/Iomw5iEEQBFZUMU7AOGu7jic7u2+8cfbR96nzoJkBO7uzOztp9Uxmil7VBD/IMUI2MJIgW9yd1RMi00q1HDvzF390//xb6cFp346nv/vLRqChSAIqTFpAC2mpxriyiu7a7TPdO1lnrZN3u+PRl7a3K/W6TJu5WdQhiRIZb8mwLhkFoVKOqizRIjkEZSYAIDKECsAsTBhElXkF17Vm1fUnZiQEeRNlikQck7eWDHGw1WIewb9+d99JOtfrkgaTZd3RaPP0RmSAxGjwYPsEOE33TkCarVMbjtNbrzzQTMVGHvYG07u7bXPg7r1cH9zuDIfV1z+Twm51a2fNGDSZy1PpW5gvkXIC74xFSZSixgYATOZJ1JH1mZNACcAWpIqAFjggWbAdyXrLjXcWH3nXnpubGroLi7MjqBLUDN6CIhgCAA7eFOvgTMaz9cnV+NqreTKXpqovf9XufKPvUpicAJbWZfv9C5t/6n9Ov/vTnbvX3UufcftTvPcyhLy9dzvj6+l97zx/ds1Pt93+M70zY6ibOeSX/9KP3H75+QKL2etfGuXN4VGNj32A1s/0Z3vpG78e6ts63Oi97z2d9YfkxS+m3/tN03hMJsGF/uX38Xq5WLSTpXTe84nRw1uL9f7l9JYiLtBfDq/dsl/8lHn+d06fPrMLne/8mR8/eO2lS4vx9dm9C0+89+Z099IPPJB3YHntePLp33ng3ZfPP/7kJKejL/9GNzuo7949Nyq73BajolnuQgiIDWqDmrK8n5QXZy+++3/8cbl4eXTztZN//Hd5Prlz/ZZ56G2dtQzaRFj1Oy60YVm1DM4Px+AosoJDa4t6OVXVzGeqkhddcpk3ro5L7Xm1g471YT5nCWKkWkLXa+C2kvD0/tVLF+ijTz5y9+b/wdV/Rlu7ZXd94AxrrSftcOKb3/veHOpWupVUkqoQSqWIJGTZbUEDLYxhIMCAjd1jdNM0bTdgPPCwaTDJbcAmCEmUJEoBqaSSUJUqqNKtuhVufnM4+eyzwxPWWnPO/rDfkk2Psb/tfT6cMZ7nWc+c8z9/v7037s0u1jv3DkCQ2z588bf33njx8C1vuf2Od72rbLh87tLmzndVX5ou730+PrinhifHnRpXyMIDaK4q5xs+S8sK4Oj4pKicSIqK+wdtcI0bjbangg4Cy40Hd4PDzZ0NBSyK0d7hgZGHio9Ph8P2mJJ79NJTmxuTV199xdqu677xRsQEyFnVUAWMkNbdIjM1VQIAMELwiAzm1rRpUzMVNERmRFk7DgiNkMzQBC0jEYEyAJow0fqd2AyJTBWYAY1EAQ0dMqChkakBkSIggpryN4KrhmYIGRXXA3MDBENANDAEIjQkA0VPO0888Y5v+YENP6W60VAhW+672hsDmUTr5uAB0KzE/uqVHi2/fm9Lo0dDiZgASMvNTSsaCQEvT7WpyBehG8qCU+wBOe/sro5nRV2FKxeHCxt5c0SFt6yE5IqJIGDhyUapT65faTIwU3HOETovQ4eOXEFuduxe7dxwhvUYmq2+m3MRbDpVLRyjtqtQWl4epi5ys8khqiRUVBtS7mPhF4ToC2eDnx3J/CwIWkoO0daJSefEVIz99hXozzB30Ebi5Aa0Zcd+RIEpZ0hJkiZf0rNvHXY2SFrqe5eSdkskQjVztRqQgqoRYz9k6yGerqjNHoM6z0VF/Zl1vYkgOyCL2dbXTgYNZTAzAsCEppoHhcLTkOnrr0BxXGkm6/27nj298ujGuW341Q/T3bsYI5IzRybYr9qibs661M7PmGgeuyEODDYuw6hC7o09LFbdMg0mapYQjYA0ZwPOag48u8owIHn2AZHMRHIvKYqkFAczcQS+KH5vF7nvh+DMxNgXRKwCIlEls2nKgug0WzZLmKMfnJA3z75JkivPwC5lQ5G1e0vEREXJENUko0QwUsEAAY3JANQ0pWwKjAgmJERqashsZmboiQwoMG1YaO/spZjAl8ucxAExnj9/4eDBoW2Pyrc+1x0vuq+/XMRcZvDOiUAwdM5HNWULjR96U0MbtPF8ebna/9lfGJ2hTjf7+7eeuvIdV977B5aFllNXTRp47Uu2d09XXSo4GQ+7Fza/+3vTtOE01FUTRQA5MN/6hX95rV3CRgMO5MIj5Y/+x/L0M/U7Z2nvzfLlOxKzdELTrXZ5MN0cU2A5MFaA8QScp9TD9c9DnTkDUAmuINY0O/JcsW9yn7ipjJjILEZKokOLPiB4yIjQWeyQp6hqGM2URiWQU2ZUjTmyAZpgGbJDIUcj57JHkXhyEjanYpGcU80PaWeDsnNiLXHJdS3dwCRIzoYOYo/MtlrxpDSv6gJvXYL9V9ecViiCn4xVIJ/Nadxs7jx1e5b96EIx9s45ApaYHXDTlCktgSSpOWikO0ztndHGKtvWwG7B416q+b4U/sKVb/mWr6balxvLFp54/m3/zf/4N/7in/vJ+/uLVYdoykyeiQkINKBJHkRBCQv27NjiitijscNg6848kaqpiRIyeTXzPnjHJqoqDtVALKYhx87SMHQD4zLFtvs93Y2knE4PT2PKflwVIp6Ri3CybK3Cxx85V1ejIR6PvdtydnJ2iKvpI6NJa66fzZumSoKbZdipClRUpTbhoB1zBauBDeqoG5PKO+QAauoQK19sjifH89Oi5N1xdWf/CMyRC5HsKzfvXxlPynF189ZRQW53XEwZLjblpY3KpTwpvBckNC5wmVE9Fd41jKp5sVzVvvKBuk6RoBWbD0k0G3Pb99HYs6tB58enJ0Mu69B3q5ODk8B0Nu/DtFkMqw99+/t/8Td+h4t6Z7qV9g8vbExfv31E2ZwvHnl8/NRbfFWcHc3i7HB5ceca7Nb35re3rkxHq2z9anBmUckVGGkVs3PsnRviIGiFWekrGYacBiNm58TUsbOsAE5ScoCqmpTZAMmagPNXPrNsj6vDB77vsmDM4quwzmgAiqYskpADWMrDCgHBgELg1IHDumj6YdmPwrzZ3v3Ob9/4rh9akQ7Hy13wu+SOrx/LgF2vVWGMef/saEjQEMW+uzt0Dtx2VT04ngE+DPMdHx1XVVmWpQ5S+9Blq6ysfDg5O643SF3OqStgRIDIXpWlj1d2d2ZDaoIvXd7emSL4+WHrC6dFLr2b+upwf54LrFxhQ6wmDlysGjpbDmllzjQBSlJGIQNUHnrzAV2BAw07ZTW0nQtydctvdHKaOBLUTYl9XLTZoyPNmwhF6R7k3DlryVq1ZYq+G0aVWwGekg4m3EVKftENiWDd9f/3WkXrBfqU1cQbYgjIgRXRJBdqnGjVJSNcZWBCMsgpZ8tF4Tmp9ClQFUL42M/+4teb1eVL5cd+7s2/9Y/+7vTB6vOfeslHLQCMbBXydFTIPI9RAwaa5aIuBh8WO1txuhnbVbN3vdHsIItyBi8glUFhcv7mq93f/L82B6dN0MFs2fddpP2zxfL60cX3SJPbiQzdF39rUvnCFV2OVvo8DKAxS66uPeXe9743sYiH9ye1Zx8UlZnzwxQ3M3nAWqjMzW6rZbKld9llx8ENqxWDL52XiFhsh9F41Q+MqIJCUJZlSug9ORNK4pEyr323SdXyYAh5GMzUpsXoMp+DW73vOzLIaNmyZLOUKGBRu+JydTA/Pnrp4IPf9tbTL3zuzX/3+sFvf3obLUW1PJc3P+s4TkqMrTiaRFwqAZeVdUqEhfMbN/dWf/Nvx9O0W55XKiM6D05WEVDqzXHu87DqY1H4D71v+id+qB1f0WSpfHLy3Ifw5c+f/q2/XYmYCKlAMZaAQ5yHqmDMV26dnPzlv3T1R39s/vIrcLiHoRxWiX3ISQMRQILYgcP1FBRUISOQAzTICERgABmfRe7/ySfO2t8qAUPRDEzmqBT3pCwXv/aLb/zmx7/7//Wnvr63cX15Z1ozFe541ncp8ULKFJra65CoWzDauY2tcbmxN5sFc0DV5Nx4GnB+tBeHvq5LUmwHrH3YGhUbDUfTs9VqOWh2YZWHhORd4WMCY1GBlLY9H3eLEqlH8URl6f8PqSJ0azsyGCKsq2o1y6KqqrZGiOLvIY1sPUA2XMuQFWw9sEAABry/f3jt6lO1gs+tnHVOcjZYv+qTQzMzA9AE5jKvw1jJlFZydu7yMxEqrxoeJkLY+WrZI597D5778uLBl+vJxtAep9snw4O9Kml781YZeDhclRhPX3n53GPPptgNr7w4/+Wf3uySqqlkEzQEIDIR6CI1vty50HLVnn9SH3kaA4gD5VzXtRHOqdx+1w+kN77k9q8zZBARWa86CBo4AAVCH7KKiKiK5k5zsmzssEUlJhFJhmVTpC5XkzIPsWkqyNEzaRaoPV7c2Pdu613fo6NNmM/t9qvw6mdCAEJSIMsiZOQcKUgWLD14ABdSEgYDJQMko3UY2QjQkRkhaoYcLdYenFnAUsAPKSFjRuGigpQ1CxCKgYGBAq1hPrlszjIu981Q1SSKWOQ1HITEOxu61pxzVYB02nXT6i3PZQPVgR2WYLY4I/ZWsi/Bo0nXM4FD0vnJ6u4NINdHsUKGpx+fPvu+2EKa8PBd3zs8+Kz7yt2GNb/yq0VT8PwQBrCMORkg1Y76k7QtXVMWs6JA3wXPTT1mtrYfDs66ThywSFYgJDBao01UB4MMBAZoxgRoSoRqoAQCDy9aQiABRhtEHCEzwzew1kmEAMkHM81I5y5cjYeL1WqxU0FBVpVNJ0Tq5rMZAHVxntNgQ4tRaqXc6sXy0jO753eOF8sHJxdKGzOmNADXaWuraaguaRLni6/99qZy0YzC6PKxf+w03irhy9uVgoJmwJQpe0B1gWHotX2N8zJhz9Mdv3UxD0o5mwikHosStceY6/0vDp+5WYXAVPhVhMxgAdAgroAN1jSNMIVJgOUMdLX5hV+Dlz/Vt0fl7g4MOXYDPPIklKM8s7afteN3Hd9bjU7vOJzTwb1457gggNwXZdCP/AP36Q97Mwg6rsOgbq+duPNX7MYXdtNegif1rX/w5U//zKOV6W/9d9SM+uJc99QffGN1dPFic3iyrO7cfuTwLO0dpsjUbEZSzFEH4v5ksnhQfPyl4RcPzpV+XDR9e1ZceVs6oPmtl2FapaFoDrvDf/zXwvHN7vTgsenjh6ut7e/5Y5+f3X70/EXw6dpx8iev3X/x1y9++x8ZfeiPlX4333ttMv9K/p1fy4HvwZZduuhPrm+dnYxrRwF4vP3YD/2hbnT++m/9En3yX15Y7bnR5tMf/MH708eevjSRo2UrFyaXy3Z6PyxnKeau2snOyFN7esYq2Cq5wtfluXPn7926VZdN8GXB4y6jhlxN9AxudH2H3NTj4FS7lC9vbHb9amd7Q2w7wlFSWCy6uvAry7vbW4vFKsvwhS9+5eDwzccef27j3LuvPfre9269/cP/8JP9aRyPy7oGAG2m8Jb3XBvaYXZ4nGs76dqxL6AMrvboNKecl8Pu7ra5tDptN8ZFXYeZBw4YPB3une6GpiqrAfjBbMXjycXtyep4/8quHM/2RiO/ysPG1uhwrweAgp0aMlGC7NYKUDBZP73JkWZEZAKGdcTO1hkWsgSKgKwIgKwAogjgHnJpARCdIwDNgAxIpkBIYopAkNWECAmBTFR0zTgCAxA0XL/zgiqYma7f3NbP8zU2D/UbCA8zhIcfx7Z5fvdt7/r2TT9lw265okqcw7pwDWrZ93h65HwJRtLH/rHHV9/5w/OmwI//xvjLn+XjOWGQ6U7e9LaxqeisrrrCETBl5MFgb+7Q9eziC+91T1zDswenN18vykCuMADzZMgQCACgCF1C8TaymIceQw0ecjSSwY0aBSVyMJ/bWUsmuiIb1JHLXQugwozFGBL0fUu+YV/EDB6MFGzoU170ZaXnNmGrxn6Zb7+MR/e9eSMWI8wdUkY0YDZFDFXORKEUzBgFohIJs0KaW5cZJKUBQ2M+wPmrPZVBchm8oyKd7vWrhdvcptEu+cJyQswyDMRgy5Myzoe2Tb4w7xEMVTiqGLqqMAPCKPhQ9dFLz+TMAIgIlRxlYbcSbGdYSFGivHF2luLqbd+eXONCE5gIFRhzFs0ZneY4AKlY265kf3a6f3rUDbOQ++DIcZGQyPmccgKKqiTCAGiGRKAmSp4LYm+w5hmbAqaU09BlhSRqSI7YO/69YJGto7mWUBCUzUzSgOzXsWs2B4RsAMAiVgYyTU6tYK+W1UiAcxZwRqQEgERJh5gyqVXoRdCpQ6Q+JVZ1qpJBAFzwmkjYXOGMScxUFTxrjORYk4akYVTHqjgk1z92buvRq93Nu8tIcPHiYz/5R/ndL+Cie/2//1v5E1/QFqhwOafVqi8coHOdWZKozjmP2kfpbeSndnTSf/5TxbueeuSd73hw4+C0j2/s3bry2DV8Yz9/4rdGh6dlUVdNGVdDp12hnoeUUm8YXBHybLn/z/7BZWaoCgCDK1fct/5+efRp49ptOnr721evPig1ao7BSZhU2YMQhfEYypEk4C4DEAjAynQh1JiWPY4BCgNViz1xAWZp6LgYQQbDiA6BnSKhd8qAfgTeWe6kW7FvJCcjM8JMiBsbUTJYZsfmAhDGxcKBsCe3PVVSzQk6U0lYjS2UwGyY0QyA0zCAru3gEb1jI1MALjTlDCva2I6jbVxepnoSZ2+EKDktAGR1shdKv1isoi2+9uWXNzafLsju7x/tbG21Q6od5NylvBxzRFsCaLV5RSxJfyR8Lg12/aaGE7cYyv7wRA32v/T1J9/3VnW7zea1/+q/+Et/7a/81zb0TKEuqXBAqDlHUkFEA3PAqkmEARE5sPeimVyhQOg8cgAEJix9BYxCxgRMqCkaSKacMUVIQ+yH3M/alOwbEwGADmF1siR0ndkbr9zoJCPgIIlLqKbYLs7uL/tm4ovCRU2Xrpy/f9L3vdQljYinatfOjzbqMvYpDnJho7q3ivOcgSgOSc1WXayq5mQZY5LS8UbgUQCJi3Ej58/RajFf9KuLu5dVrB+Ew+ioy8PBbFpWlxq3XdaXJo3FRIZKBRlKFldgK2khkRnHVeEDH8+Wzjc5WRN86ZmQbFghhyEP6kjBqUi76Bd9nwDPgHQ2e2J7a9w0p7PFud2NL929d3l0+aOf+awfF0VRHB4vyZX39meTSb1Mevdo/vnD+++69MLhLBcbo6ffunXz1btXntpdHrWws5E3Lj/z/NNnd+4/+NxXV/cfXNwZj5OtTlqHgOQVyTFnDehD2TQR/bJtPRGhA0xo4NgzUUqCQBrVlFVts/Zy5zWP2dB3ouhczIMDQ/Drqg2QAJ0qGbCpGICqIAgTmrRU2OnV6YUf/4n28vSnfuOX3/v00888du3Ztz759kcf/dq9m+UEm2loh7Ro56M6tEMa1U2KQp69D/M+RQFyNGrq1WpRliUQq9B8pqc5AaJgjrEvmzpGLIqiQkCAlHpRHBxtVFXt9c7xfFRVMXZR0DFh8Iuc54usGOYpBwnt6mzcNOzMQOMS5id5aDMV1aMXrs2G49UwHxW+LjfO2lbTMAxmkhcxURnLqlySMwO1vFm60bQ4PTiZcEmDIBGoFIgByJehM1pEAeQYk2dH4LucXpudTZqyZpY+g5kDzFn+/1tFKSZLguDq4ElScEgEIhKY25nEwaVceg9U6GQSNBMM2bJ676vCg1p/1PsR+4uc2mHcuu8Gf/9v/wM5HJ6/tVcVvhhsUHVAfe42S4pdFCsY4ajScz/xY5d/4D+YuUru3+v/3381vv4KmQL7JODBKRCh7dTBDg9l6PMyIuSStPCU+u70f/mf3J1PXX7muXz9uDw+rLDqTVzjuigWyDFkU3DZb1FRUzHaODidlUDEhh4NYBiSI++LRn2Tdi8U7/vAxXNX+k98LN16FZKA5aCAWXpZpSIMrJoxhHEZSAyGnAZdYShliAQMhBgqUrKUyTmkKBp9QQBKyDG3lhPnVHjIWYi84+BD4wGGtEyFXv7+9w3PXrg0X37+7/zM6JX54s3DHcbCQ2Y1TsjZMIEfcVEHqmNcMYNK8uyAQJM4buxQgzpx/RB7CCET+Mpr1iRm7MpiOl/IU+/8UL7wllmfgq5GFzchVT6/t/XM0UJdxDxIPEQqsXJi5oh8V+1+7HP5058fSaqgQPJQwyBqYInIswccIAMAgyWICmrA8jD3WTnoAMQPyZzVje/Rw8pS9j5Z9AU59a61R2woXvnCB9727q9+dWn7J0LAVcEBq2a0aI8tqneuLMvFHDJMF4OAL30KQlw0jS/96s09loQDZoLJ5tbuuJnUrEEO7x+fLCNREa0/y4MSgaEBDlGZvSvc4WweISEXdcmMFNM3TAfMHsnxmk4EWcSQ1ntnoqYPWzsAD5HWqAYCqIYKhvbwtR4MiQBUGW02X7RGrEn6lBSH1eARhRDYDJECgikjsGpUGIRQgSQRHM1vf2Xn0cfZGyByKNAbGlW+SQU1O09I+HpZe5Tj4d7phDxJRhkg+ik7bPv+C7+yhOPFnTeqw8PdPmbFCIgekgh5ArIc1RMl0klRtmf75p6FsgisGFfeoQEiItehLXabx74V999gG4zBQilRmEwlrRcfkgmQMDMaYkp+7aaQNfwbvTAMmfIhqxEXsUtxYsXFbWnqcPnR1hSeeXr63HszNRjbdnYCG81odjvff8O8ByUWZAUG4qBG0ELilNkVAAC9YCDwHswZoKGoaYEOOkmFT+/5AD39XHv7TfnSv9ucKyGyC4IZvcuayaMmJXLknIqYZihYshFRVvPmMkDWjARgioQAugYyePImA/RJh4HHbvvaW9qsXKB5UsDVnTfIBvJmGNkzMekwgPNOFu296wUhZMWynL7vQwwhWCtlIe7xne/90/e+9H/fAHZ3bms3FAopQw4gCokogRUKy4OFXqs/8Af/gze+9NpvfuJz3LWXN89tjCYPlmmRMiMlARYtPLHnkIWR1CDmLEBMkEwR1q09NQNdr/QjqD2EhyYzNkBJ/A1WURILAGenSxeYinprPLZo73vukRHqeOexanrFTGbHr3fx+OjoeHZ0uDVuavL92erRi1euPvqktC7MhrCYvbC1E/duw9nB4sHesjAeV2XtuwQl4Ii8gMr8ZHZwMkwubX/g+1jecvbaF8tZX1tHzrGrDJ2oOMjQ9+3QSuHTrGv8KpQO3TqxUSAz5h7UB1qEWyfjooDCwwDQZRgSZIU6wLgEX9oyYSzhwX04twlPPtV/4Yt+slE+/yQc7CtnrLZytOA6CEUc71743j+4f+fVs67Qs/DoH/2rw7HhF34p3L4DUYiCHZ0FUg+5le7uI++Z/OX/rtys5CP/s//yL6yWN3d/9C+W3/6fVFXV/vzfb/Keu+r9pbenc0nKfKFP5Z0305290Te9J154cr6SYufp2YOXi8M3ZXkP9o69uDI3FE2g9z7Iya3U+63dXSsSoW4Uw9lX3yiH44IMhrk7/B27uvGWb31Hx7p5uFp95d9udItL06b7J39ye8R5GSbVZeZFQXbCZf2H/zI+/oJfvLn4uX/u975qy/1oeOvXfurenc52geuNjR//zmE23Lh1JJ/96G6ZSq5R9aBh/8Lz9fjq7c/8euRDv7Mjm2M0O3lwnWNmN7Eh7t+/Pq6nQz/keGbM5ApMSxNkz0GLoihW3UCgG9MagSXky6Ptm7fPwuTye7757Xdfv3F6fJqD+cmwNaHZ4VDulke5bW9+5fRrr4zrj1S+AuvL0QY6j/X8yoXRZGvz7OAODvHcdIoTd3jU9zlm71c2KauNPj/Y2t066xZJMrRyFvsbbxyU1UbX5iD9I1fOrw7nmnKv0oucn2yb9NMxX9hp7t3dI8XRVr15aQJfPQKAkp0BipkzzCBrRu86zEOIiM5AHWEgBFVGNsP1KieBmgoQA4IqGJIpARghETBYIDNyFSArCACYKooBAjogNVzLzgCJWdbpVEYBJbD12UqEakQPRcu43ipdP7QRgXBdkgOoIQEX7tqj769ppzuZFZS7vsXJeFQ1bpHP1ZK/+jm4+TrFCOQ4OF9yTZbQ1ecu1nWT9o5xGtpn3wmPXxiWswaEQDCK9YkQLbMjZEerYSE1t1VZ4W7R3KvLKvqQEFwoQAyScWDzAYqquHJBv/glB0hxUACTAfNafu1kMXeeXZiIeQRkSdCtXDvPAuZLyNFkEFQ33ox5gAyUDaAD76swsXaZX/6aq5gP38g371VdjblUdjZkZ3EtMjWCrOSwAufTMPDmtivHraAczxrp7OzYRMAxqJr3lZd8chbGdV2FODuy/lDPjrhT2CjElDjJ4izlDN585br9W3T7Bg8tg6VhgBK8CaKxkYhmNCMgdENWX3pvNix7YDYRIzADJc5kaGixw6w+eHr5ZZ8wTc9NipARjSjmxKGQpJYEQDPZvFsenx0+ODmer+aS2pw7CCFTya5ylTP1MlDORmLMQAyGlIFCMTU/ViwYXB6SUTIAjTENvSKBguPCB++c+71WURYlFI+mlkhT4QI7ToYm6g1NM2j2zptRFhoEKufA+pTVObU0mKijQGgMqIYCYErkHENCkYAeVNgQCUgt9R0RkXOWQNUJioqaY0FjApFclIEEmlGB82HeWbVzYbYx/u6/9t8WOzvti59943/4u0paPfW0bZ1zE/vW/+xPfPL6myidK3wegC23MTvCVsGyjmsYjz30Wbyrz5dehsVLX66ddqet3rm5XO6987t/dH79xuju63L3us5npDp0S3bm5weju/v82M68xIROY6r7+OCjPzuRwo1DajuiET3ydhiNTWSY982jbw9vO1y99kqd+/nZEQPUl6/GaSkBcdPHhD546jO2S6w9bY9gSFTVsSpVwYeASbAuAY14ROgNByzW41jVdu6mUyg2h2w2qDfPoQGqse0REgihKbEXTa4qkkS0TIzes6WE5IxAzbgcETnsB0iqaQWI651+FEUzYyd55Xww9NbNkQg04WC0WhA7jGqrnk0CGCazznPlNs9tHx/ux1XKi3nlwvJwuThdKmBmDA6Wq8GXoZluImQYjh2L8JaKka1GcmO485XNwsu5nWb8yEmk/ubr1XB1delgev6ir5vn3vc9f+yPvP7TP/Ph+Sopra9dwfUSpAoiZoeOCJVKIrQMxkwhp6jAAiFDyuydL4E0OFJMAMmkV23VZBhWUbo+5W4Y+igrMQORb6ze3Ll/JFalSMfd2YyhxVAFu7RTToJ4J12bLUtuoRwVsbfj0xV6N619jvHahc0KDEy9p9kiV6E8XXQiMgnlfN6WQD64elqSD7f2D/3Y+8CP7O440nun+1cvjLUfrt89Go037x8dJkNARqA8xM06PLE1ubZRBZECM3hWI1USAGVQsmVMyz5PiqYBS32qQ3WcQXJygJA1MO+WIVEGrs8iLiUPAiCYlX3tl108Pjm7MpmuRF8/PLh07dq7n3widfPtSaOajnPmAvvVQMjLtiOH063m9tnyp37ly4XDZRyeuzJU3POGHC/6+21Cdd39s7e+8L5HHnn8+ldeWdx5M56c1lsh91IGPlsMOerGxkaztXNyul+4oiJvOYkMlsUhIFOy/HAUw5ZBEyCqEERTFTBwDhgcYYoJRRhRVRFdhkyQkQtDzKLIwUGypKeLNl7bfd9f+xuvt4t2md+6dfnZR542n5Umf+a/+uv/7F/981/7lQ+7Egl9VY45FK6SzlKPmofsQ+VL73IAtMWqdcwq4thVRThqWzCHaqs+OROGUG2M5rM9RBzXVU4y9pX1oRMaXdgdhtVg0g+pG/J0Mi4mNajuVOcOj4+WwxIc7lwYpZgKFkTiSLhCSpSje3N1r6q1LsbLeZ4tVyKZmMsiVJ5mXTsTKC3LoC1zTsYc25hGVqDwzng89LEKgdD6mLSLm5Xf9H7HIja+9kTOjSVL1ez3ea6ZlR/bniRJy2W7tTWBO6f/e6uICdCxqjkmM4opswAQDoOmiFFMxMrKFSUhAagQWFkEibkHHAWH3m7dP7zw2BP/5V/9C1/5f/4TOz6++YtfvrO/PwbaAWSQHV8yUeyFPLvCpYFvIr3w//iL1ff+8Fm9dTq3gkp/cVO+riYGAQkpDckzEHMgXh6f+lAWZeh67HOiDDu+nDw4tX/xsUX5W8T+/M4V1HLZLtvcLQ08IimgL/De/fQrv3D12afPbs6PXnnNgAcEJTRgz47MZQXcqM997wv9e58Nm5PhTq03WjYlQ8ceQBaOwgef8+d2/Sx1v/M1kmRmzI7MIVfGiETIrFmcgib1PmTkUNYRMxoruIzZl6TRojP0Lq1EgRmKtFy6AG0v+6+0WzinV1+tPruHM9spdod4JklzjBQcuca7RhJZQoEBUWLWYjSKbQdmMmRiBizQhyElAQWvQBoCWycaO0gWQnUJhi/8lb/65PxP7Xznt7UuH+wfnD93qT29kbIphDZ23qPERIGSAoImUcehyKlsExQBJENOmgYKQdFJ17EpBQ8xQuwf7rcbghDkBBdG3Wbhbz9wC/KuEDXzTkGJCUAd+4SoIqUn77O8dPPp53/k5PYXz8mhlVJ5R+Pah0ldY86DudwLVyHEPjeTsarf3L345vHdEfPiZNafHteOq8Dj0cg1092tyf3j+6fH7TILVg4Vg3cBGEjT0JWeKnUqkKNs7+4eLg+yaY7GjJa/0TcldEi0ntauEz9qSVXUZK0fhodQl2+QIhBgzboGhwiEqKBgCGhghGQRIPqshFm6xSIPCRSdqhk4NRQkZiJcFwYG5JKbFmWZh/aVT4Z3fTDTJCN7hwCWYhdM8uL+ra9+crcsk4ilPqholGHVVYWTtEKDsuCxreRTvzlVJFElTriW9xiuEYBELgTJ0TsaTm5XzmCYLQ9uu8lmU/ouxl4qJw4KA46n+we7FHIeMpIgIWGSjOiIjYBSUnIuZ3BIZqICyA4dgyMysqEfNzua52VBEbF+5Hz72GV+y1O2eSVce5zne25jK9WVAfTDXKY1jJ+9/5tuu6xz1wMLIgI4UMtBu296N7zl3fLJz+Gbb4wCAS4FLEtbTqquXYbxxCsClTB1Shae/8F49VG5/MGdt3yz/btfwusvF9YrQAYSxUxMZZklMwC5AOhFEpmKZACLmo1xXbGpApkZuyzZGSMiU7YUidV3R2m1J7Bl7FyYalK3jE3leXcnD0POhj4EH9wQu699xq1WxmSeu6rYfup9CZzz1s73feGtW7rageXSUVLMAwBjBEiMmS33uTaoEI57ybl5x/Pve/EzLxWQT2ZHWfNWUy6HeVYJzCKasiEiAdpa1+1wENW14QxRTNe1rIExoqmZrdNxa0KBKfxeGxRy7k0hiaoWkrVGvLS1Mcawdf7c6OpTXWv9fNYUNUq7WU22ntwOStbxxtV3pCK43XO4TBW7w698du+rn/7gE0/df+W0n3dd4RgTxOhRCYzNgXOudAX1KHevf+nDR9Xjl89/84VHt2df+9gYOsgAAAiWIxXbj2WvQx0OVt2Od1s5Y9sWzVixEHPOBlCCYQDIID10ADyCMAEByEugjRfT5eq7/6waP37y5vDxvz8aP3r0YG/62IUVh7H4NDeoxmaZurnOFhw8b+y2914P9fn6T/z0sounMZ3d+/QO+7BdwfEZZOW60dybGWxc2H7/j7XN48e33zhfv7298P5zTz4L+bo/vNHtPFK+/dl848vp8G78+sc2L//++5//zC6cclze7duBr+zuvG3EL9HxZ3asW+U+PPJcaFq+cd+Wq7BTgbTaFNFovF30i1NqZcilf/bxky9+6lJj4CpbdVvjEj71q4df/fnmybcvrh+cf8+3dXduhTbh5mZGKa9d0pVYl8QsOM83fjsdf+7kK5/eOl4UZZFSWUPeuXNn+9xXtt/9+xdP/EhbXzSbFcc/Nz2+vslpNZCrGrh27ZFn33pv73j5uy+aL0+9Li9un3vnkx5bVVkttbTal+7w6EbVjJuqNiHMaSO41A+AUhSFCjBLl7rJdLS7UV2QGkHdtJSSpO8ubvi3Pvrkl+5df/PolJhT9vlYmrrcqEqXl6OouppTMb4z65n75iLX06KPQ1GEYlTP5kOaLcHME4MIDO1ZXBFTUxbdataEcrQ7Ojw6nYzG08l4lYYm5D6t7h0dSi6SSDkuts5NvbWV8ddfuT+0mpFUpKkeJkwLIjUUVUbySGKiiEwGwCLrswwcApgwMwKqERIqGAIZIKgJmBGgEaFHcIoEKkwloK6NCYC0NpkBSzIAMxJFNUBS/IZ4E8zW+FUUBQNTBgQTWMt8DIgIANezjYcV/nqQQYDMhNzN53k+I0jFxqgZT+qy3Cy4+MonZl/5zfDG8fi4Yxe0H6wuSVN1+6vdYH6xROFiVOGG4zpECJg1xT6QETD7goCUTbOKZN/UDNlMSZILZW5XyMZFhUSi5l0whr5rc+nj0I5ycqFUA6VoThCcgqGldHYW6mCK2dDVY1kpiZgREljstV2iRsLctZFdMElgFPsYqgCKRZ/t330c4uAgNuNtIi99omCaI4Ayl6YQecxPP2qOIffcMweep5J3LxT1HbzzOkrUIeeBfXDElvYO8itfDSW48+c0xzSsLHd+a8e2mj53nDpkAYl5eSx7D/LNl/F47gUIsiOmzOAZJh7Udxj81Ufd9gUopu2t1+T+10YYy7LOCDl2KgMZ5iEigyCCSFbxyUJQu/6qK29b7CGrIatJHCI5BGIKlQCrppS7Ni76fil50JzEjAgLx6KZNXrIuPaHC6CZC+R9IC4NfRRlTCLi12pMyUyqOXsOa08HsWP/sFdkTIKYwCpGT4ygzoDJITjIwmZg4ICGLJmtizk4VkwGlI2TWSd9AFcRQbY1OLBE742SmIk4Cp5YZd0w7YHIwGKKrKyQwJmai5GMSQkSJAIIddku/Xu+6z9991/6kDy4PZ+6cvOySd586lEu9ehotfPoO+4Nq0hu/OR747kL1b3boMqkdVW0HeZsCRDJcsqr+WLS1OJw3g2pCs1ObbO9PrcVQTHb737zl5vTY33zK3rnbkBSAkULTnaObz34v/1I9cw1fsu7ivf9cFWeu/V3//IVnIATqComxHqkbkzZqsLPu7lduIDvf+Hs4I3tBY2h7s56KhqaTCx1OigquabIhrSzpQyG5EKhiIolOpW0lk2rOUIAkwSpM0YoGL2yH5sBLM/QNVQ0EM90WIJLVJaAAcCRZF3MEAw0EpdAaElVEZDBMzsHqQVNKtl8MEPLgogmpkkYSHOkghyzpkEEKba+rKF0psYFS+pQhUsfz2Z+XCBvoHYptikBYajrctKls5jG2+N7B3uj6agY43hSkHY+hbxQ3M1xcWpcgJ/64F1/+8W/8WfjPcL6A+/6zh976etv1tPC3ztwxTpZxllQl/ret3zzp6qPvjI7XZgoAps4NBMgQiaEwYIjQmpVa+8Dgc/elwWwV1QMTj0kTwQgmgZtvVcHHWCnmnpZtXFY9iJZ+yjRAMw0P7wLln3aOH9l//7dDvT8tYvnxmF+eP/CyE8KW3bduCrP7eycHN09O5orARfOsu7W43FtO02V2+F40S2GKAkSprKkPGhveVQWsR0QqHLoTEtyHvmZx86tZnN0aBjO7z7xYO9N4WlRjfrFWW6H7Ul9fqe8UG5cbCqOUsbExJqQgJynDJDFADHHXiVXBBsFbxbV0XzeAR3ntB2wITURpzbyRUuGxBQ4reaGhoDK2OXYyzCZNmdd78g9dnG7YpmvlqRGYKeL4c5iGBVe+4QUL13cnS/aUlKcw0uffSBmWPqXXzy5uOWmzf7jF7dH5bhf5Rv/9vPnm0tbFyaXv+mD0/e954v/9hfrg+NsSw/iCqgIHa2k052N8vj+KYKhU3QeEBWptQiinj3BQ06ZIphJKJjNqYKKASRAZCIkr2AJs0lSiQxmWVCBiIbVccraNE0eO37+mZcPDg/v3b164eI7ft8LyKPD5WxvuRhL+djFp5+4fOl0OLm7t+QqLM6WKRuRNGD1pGq7ZUJ0Rgi40dSrbgWofTcsl10250CZzDsCwbYdOjlhIkA0b5t1VXo/dr7xhY84dk5SBrVuiIwdelBJbWxr56VB5wvCzEl88kOnq1Vv0auI9wBmi6UIhSSWB61CSF0qShSJjXOFh9KBIrWrjojIGBR8qOuqSroKDuLQBdJgMG6CDAMiBdNRXQpiIry8NbpzZ/BAyFh6WrWpi5FdODru/v0FNIaqQssgkpNKKIJBnvdZIkMENKrGoWxYSJarQVsJjiXDEFVEorauqN//PX+gvHbxLW99T/PMv/6V334TGDefOr+5tbPbdtOjUzpa8ABeELIZ5L6aPvrn/kL4wR8Zpo1THUu/xambzVeCSD6roZkjl0U5uG5IVQiDxD6BY65DZQQ0pIaDJEAArAuvaegMoXSjhny0fpH62Fh5SXjvv/7w5tWdxf29ySxLUXYcFQioYMeYceji/ORo562X4ZmRMm6/57Gbv/EbXlAjKNrQyCN/6kPu2z5IvhofH3/pU59xgwfypSsp+6FHMc+uMIqUxMwxlCBQeCFSGAaCAM1Y2SE5V83b7jgUYwRfIAbErmvrZlzkfP8Xfn350VHRd94HV5TZei6IoXLmLZGu4oCpqC9lMWcCrL4mcOBCQMnOOSMuJ1PJgrnvJNP2lTD2J4s92qhHrqaTM5eMHD4y15t//Z8PH/m19/+9vzE8cnmREs8OKlfhIrngGZzDyEaIkonzIJX3vgbICg5BDaqKVkQCqhlUCBUEVI1KgJzBO1ire5FOts6P/vOfjL/8z+K//nRtLmbUQCjqCAgUNYsLEamP2RI9+Nz1Kz+2+Sf/oz//cz//F7bLsGqzJhe4sFALgBDkttcoCWLdbBLT8fJwtFk0kyLJsFisJpPpucm0Zr/y/o2j+4dHp855DbjStDVqgveiMeeERWFR1k+cZKGkhogrDiQZ1Bw/zFN4R2yIiCIqAAqQTc1MEVTh4QI/rs1nBgAGprZeRAMDQ3v4vaqCARmg5q4fxpub6BNHyygE5IEppeCZEyA5UxU29phElWxIOWDRPnh9/tJHt77phzoiMbU4pOVSlovVV18szqIqhVB0bY9DIqOqaFQTu5Bi17iS1zW2ITtQAoyZ0ZELSVRFmRwSqygJpbajatz47TNxxuVqWCxDGG+/1Y7vbh7dtAdvuntfC2HDfMrzjnMGNUcMqBbTuhubuui4yCrsHYCuJbdOlVLyVRjigptaK05bj9AL3zF5x3OCaIOlJuTiiiu8npwWVLnTZTnfWxzfpXEQvWByD0TJBaICynx65WLzw/9Zh5vlGyfFgzs4GYEfL4opPfEEXr6cm8Ic+X6ge3cAz4qnn+wcB57hJKTLzxdXLurHf73/7CfCcuYEQRDVKabgiZkkCypaZIzmPCkqGAOgMwMFE1HNTEFzVgMzQmJkBDKY37KDN9rNZwNORFu7fxMPH5SMWO+4aovSXIceQKvVg9kn/21YJhGxQOW156g6R0N2GEZ+lPXs+NMf21AkBUsoxgqKSGyaJRM6RnTOWGB12ltCQzeq69Xp8qAfYpZxXVzoy8PVKiIaYDQyVWegYIGxdOxQotoga+sSAqhfcwUQDXSdmFgvtYjKmnCyvguuv37j8sVdxGJ5dqYCGL0YbO5cCduP5npaFrlu3NRN9x807M7AQ8DSuZGvmij9ccwSh6HQV/r84o1bH9vP71utnqmLQIiZakfoCFJHZWmBGAZz9bjEZzd3D9qz9OCl/TTacZKYiW04XXqmqhnlnYuzEvawTdOCZv3mIGUoYYjoAjEZOUSD2AMqsIMQYL0CygFCtRS9+of+0rDz3KYO+LV/NJ02x3mn+MB3d8OKxpeWJw9g9enRzhi7JZzcgrQCyf7BEX/spzWoPvK+ZjzGND9fc/3e74FhP19/rbtxt8zmJXUHD4oh9r/69+ruxfNPPK/T7Tff90Pp2Yvu9U8cvb63+/xb9/0z4+VidP0r9Ov/AvsHlw+XlvbvrujxP/W3HiC0K9Ev/PPL/QKA+mWmYrMaF7AJQ3DR9VC4o+1RHO2Wbx5vBSax4fGnXnvsucm3fsfiw/+ADo58YPAe7h2PXjvLd1K9tdt/14/Mvu8Sli7lBMv5po5OvvDlcv/1jdjyyQP85EcrjTsBUof9cWYErnASh/SxXz/9zV/BAIoltP7R7UL6Zbta9q505y8M1544PD2dbl5612PPnZ7t33P15ju/c/ttF+D2q/dfebmelqpgpvVkjKTiFRAkLRIkNSPKUQxDCFIBIlA4Wy4JkLvh3MTOn5u+ef2uEs/u3Qyrs/OFP10mFOwGO416NjtrmFxNoLicLc49su1DUe6U9VazWMU+qQgpBHYMInU5lrw6mx3nOCh6FS6Fu/kqZzt/4YoInRydThrNq7bvenYAAJvoaVyczha7Ux7MlsPqsccu3D0+ZfKBH5YHgQkABUFMFVEUgFDRBE2BYZ0IAVuDghFJYR1eVSJSAEETVUUyIngobSZCQC7WSgQgA80AtpYj0PosQUECRFpnk9ahcQBTWN+rziybAgKBEZIDlTWzfg3eJgQEBgMDIwCQnGO6+ervXtjYuvTY46EOvqld8LQ4ppe+hF++vpFGYGGV1U1Kecvb6R3fBFsV759i6OFitoM5HpzUk9t+ejlrcBZZFUREBiGGukapyATQulv3efeCQ2fm83IRghdRVSUmYFBzxqEITUmRTKHtAJALXB8WAGY5OyKLMckSinGan4ElVMCyMDaQDCIOjQ1TH73DuFhAXDFo6lpGIghVds5tZelBfc7inAcEB8aaATJkoGaUH30kE1Lqxu7ckDKUG4kKvnudTw/t7JRDDTkzZgBG0dHiAO69ovP91SoXI4dYSagtpdQtHAfpFxyPw9nBcOeNDRugLiGpxL5oKi6ruJzp+avluz6oPcLmrr/6eF9Mi6dP+OM/Aw9eMkFfVJiW8ey0YLWYRASZU1YkjlFBEmpLfS9ixEWWAdEIjADTIHK6cv2globlWVwthm6JKi6QmKbUppSyeRB2COv1xZRE0UwhrFcfTVVEVckgMzIig0kWEiMyNXHBJ13HDNYJU0FkW7s7EJkdORIFAKuYyIABSMgB98SAmDUW2gNxUowqhAUYpSwheMckpjlHZ0pgzLUoAAiDGrF6b6qWRTWpqqhgAkxOkcQ7ITDObYeFK97y7u994Vt+sCwdbT/mnJMWCgzXP/2Z/bt7MN5Kxfa0GnPjMeXL7/jm+aduGmBSWXmKwY1GXCquZp21kn1A7xlpubcoNpvoerfmUpUFDB0c3I97exONfYA0pK7rRuNKE2LGrSHql9/kr9+Z/dSHh43z18bnMjMS9IvWjxQDKuKwf1LUlSWUc+P89mfL5Xfbv/kllr5sKiSvQ3CMlFxZjXSxcqEwzxoqogKWZzDbK+uxFd4AcA3V73skR+BAg+lgSKpqZe0SwcmMnPIWgwkYoxmQmSQZBkRUEKKQztqwWQO5lAdXlqAiSUATKhKgxER10II1ELlGVys2RmQShS4itqRqVDouIQKYYSiIXT8/9NOxXXok36+CnEC7NOhkNbC6IlA5Ha1Sf7x/vDhaMAQSm4xHXded3xpHXXSnXblYlDuXuz76IGLJpTAdX3zQn159+wuv3D29/srrrsghFOOdSbU5xsKjWV1XNsRLIczKkFM8E+1VnalmYwJHBACiAGgBTC0X6JyBI8OSDFwIRVnXHLwXGPqEknJaxWHBNPQpxnaRwDRbTpSTJrWHJcG6OK7D7cN7mNp3Xrv8wvuf/Z9/8xNx1d89Xm5vFInEDHvpNupigpV3NhoFbdMGBxiSX0RATsADUTVCjzjveg6+UApFiNm6nM9tNKVzdGjQZlm1ksSy7U63b5zcPzieX928MEn91rTZ3N7YLsLORklD9ACDGKJzFLJqcN45VDSQnBFXoqpYe1cQdLkbTA+71BZukxEB2ZEaDipRxJsFtO2xnwuuBM8Wc2iqth080P2lnt+abo/HKQ/PPbo5tHHvtM1AcUBgJsXgXbda+ZwJCACAPSOIoqouWzs5nT9ydfsn/tyPPrjp/85f+f+++qnXv//HPzidbroSf99P/KSf7b3+xS+//OJX8v7BGCWa8HxVJtWAmlTa1IzHXGRJ2XEtqVdU1RyCy2qWIfeJiZKtaxkgMx2iM8J1oWYqquD9+ukXkznHlhNkjGfd/Er97H/4Hfs726N2ldwkh7IXo4bOX2gm5Si9890vffGXj9488hUaWTaYD90osEdCwRSzkV+vnM+XHTs2TKFm63Q8Cs7R7GxGjovAiG45yEZVGuJqNTRF5ZKg9efObdzav71IQwbd2Ghyhw5YovbdAH4oXGhc2Rv0g2hSRMgEVmJRFTSIig1JUuEVbDwOl6cNDUlzSFkWfRpyRi4tooEisHdUerI8dDn3Lqrmmmg6GS3nM1MgsirwWrURKocu3Dpc9DE1IZCtasaro5IV9oGjZo/532sVCbhOAFQQoRiFVZc8igKCh5H3ltGXEDUCOUVWyynmIWVDTNmY9D3v/9Yf//E//vrXP/f3/vOfzJ9+bfzud3dveeGb/vCPni3S/V/+uc2zW/zTv15FYTIYsAekS5dG3/tjBxLCLGuWHY32G7+2fPkmgDdnOSU1KZwPhQMHmC2DOucN1BUhAuQ4+DiQs6zgqey7uDpboviovpleYu2sW5VFwB4mdSj7uPzi1yfjMC/LlQ+qFHMmVcoaGEsPkQlTKPxkAFsIL6Nu+uA9apZhA3Y++NZ9zrA4vfHRj7EiuBqcF8lqpiaAEFPL3kWxqh55dCoppx7YgpHmZDnX9bQ9m6HvmsolGQwfgtbqzVEGDeTHQ+u7hXfEYKBD7nogwar2IaQhUR+xsMB1Od7puweecgbQIUOXVHPBLsZhWIoHju3CT0Y2dKu05OnUMsUUXU4xJiKcTEdbmY4fHN35hz/3yP/lx/s7n/7y/+e/fTdd5pITYTu0hOZcIDFMGkIlLKKZvT8NIW/WoVtOO4Ghp5IADYiyL/LVC9DeLh/sg2UQB4LgkcYX9iZPNz/2R/nXP1OfRI9ejYFcEnHMmBW1L6gQj8g4mlSnN1555/d930d0e5VW48mkmOweHc772LaSmBnLQlGrZrTANPKlDe2kqp3Z6Vk/3b7M4NXRjZO9JWOEWNTOVBfLzpzPlasdOZLeEoBzZCn1nl1TlCfdqZAT1UA4xMShfHgmGJiBqopBVk2iIqoAsEZRrhtBYGCGtt7BAsR1/Ahxnaxa593VHk56Sc/65W6xE7HgcdMjezMRIwIQ8d4DqBmIaGIzwqzQD11lbtNk+ZlfCNNHymfeFoeY+lUBOBwfxZtv1Bx80GwJ14UMpghKbNFkFHxOgz2U9DiDtc4ZTdXYGNkTxqFj5LIISuY36qywPHiVr22dxg4f3Coa2uDX5cWfHe68lJfWTC4MyRti1gFAyLPkXJRorCYDOS5VVNWUhDg4kNx7IkYwIpg4q7d75TyeTL7v/9yfe0ya4Emk67gowugiM7n+lJuGc3/4Wx/OZw9GthHGj3bzQyLJXU8VtQzp2z60GF9xJzFoy9MqzdPKRqM//GeWF3cEK9XEltuD+2M/xeW+NI+7Yruo63oyjdlWGzv2+3b50SeHX/vpcm/fQcYygGLOQ2JvQQoVVJeTKaMxWwLMCQhAGUGQvOR1EpsNq6QZkzgURwP/7j995NxF6E7SvMsnyynALHa2OtR20aWhHfIWLtovfHiUlloDKImge+TZYVggMrmiWNzytz6xcf3rHAWJui6tdUmg4sxywl6VmPuUg6fCV8CQaQXBdcOwGDQxJsrVqNwmfDBfqcAaV6QAYgZEwVMI1GfVKCKqBgxKgARA8H+IEq0dSrxWez8skvdPjyUOk/GoKivmIks+f/7yePvSaLozj+ILX7gQu9lKfLV9NaVclGFxFnkWp1uV1porNz89e+Rbv/lTq9XPv/K1K0/uPHVyuOOqFAdOTrMFjyYZelPpjTy5EufDNnGb4hB7pXTSdcV4xJuFqCbscbgfQ9ifn/UgaZa3V311YQNYMahRUjEQ5OyADIAgG5iAAIQSEEoTuvGb+tLPrr767yZde78rJ9//H99azC+tTvXu6xRPinqhuR8ObtT5BOIKXB0uXAOal/MT+dirQB62tuLuFXj+u770ldce+87/E34HzD/12+PTuW3sckxbp/vwqz8/k38MP/BHN3/sr7YeeXOrese3P7DLp/dOn3nqffrL/xt/+TOTxcH83u22yVc/+JMPzs5mE5aN6c57vu/s07/qbVw8siFdMjtsy27fwcHmpbNHnii+6f27Oxf9v/437vXfESiO3/bc2bd/53FcjB97dKvvQBlOTgG4KkNazUNezn7zF+mpt8P8+oaLeOMlPVruwFYI3gPzcgEpQzZYJCQsmxJithQdFmVdjbk2tSwUqyK1vZgjroOV3RwebTbufPV30/3FtdhulWO7/LbZ5W86nN2mPhO7jZ3N2XxVlQ6SZjEzMetX/WkWIWUVywIqCYCcr4YhIxYlV6vT5dm8vXn9wBh7yNnhuQuXTk6OytI2m2m8e3a6TBqBGr934HYn1WSHRpMJEfU5Hx/PzYW6Hq1OZ2RE5jqlmBGzoWdit7mxa23ntbpw9Uo0y2rLdlhq3qwnWLibb+yHEb/96Uce3D9MJR7O5i7hqAoUpg9mrVAw5pt3Z9+Yna1dnggADFQ4NoSstpaLmYkiEjKgEaAiIxKCkQnAWudMtg6YAuI69INI9I2bj8hAiQjWR8b6Q2uNj6rKmiNjTMRoatlkfaKgKpgxAhKbCT2kyZChGTwEFOE6oISI4LLk5fzszp2b25cfK2sSyXno0uEb49lx6BDZBktaYz8ux+/+luXkgquyXR3TZFuuPtrdvjU5ulmeHLi7NzpCYBUH6DwgGSuVDhVtUCjK+vy5XoasVu+cwzMHFlmyrM4seIm9jkbE6Pdu4Stfx3YBVgN5FDWJZoO5AiyTQxEzMoMIIKYDEGkaoM+WMimlYQkOtB8SrjwxOpau9a5iImKXZq055IqSJGUHSWzIrBk0Q+rRxFlqXdXWTZG6/uwkjDdkNKG+q9Js2HvgBDEhAIsIleIYh9uvh3xETz3ebOwCaerndtwXLBbPlnt3dTErZFH1CWezYlysjuZmSF4ld9qabV/W3//Di4tPFLdfpeUDHXbzdNfvbvnGD6lFKowIfAB2y+GESViATRm9IIOZpCSxo+AMeXgYWV6vvqMl6Y/Pjq+/dlLrPHdBclCLxjFDJsuiZsrOAbP5cmiXJgJJkAHMEReSAUBy7HOKhfMOfRVKMFln17KkbGZRiXnt8wCAjBo8AQEwOkfeOyanimjaEJmgZHHeeWYnOqCqRlGVPIAvENYyPmPnAFDNEJ1nUhsAxEARQSQ5YEdO0Js5gIEARHLKZgggMmjKGAHU10iaqRl94Fu+O1AYut4XYZj3tcNRbk8/8Rkum/LalS61fjICg4Ty2Pd/1ys/9dOhp6IeD3XIcYgihdm0dFFxfG5TrPNEo8abDnrQdkczOl2Nnn48iyVMocR+0ZpHQgrSZCTrBhzEsSu4IvBbwZzHDMssg1eAaFmjOzsm6dx0Uwmbq7t56Cl3VLA1VVq04tCBYhWgGQEwEGMWcA5DIcjUjCF31BoMZwoVj0YgAmjrMAWQA0NSMWZFFkMD8aUnppyWhIDOG6GZsvcmPRIwE4USE6ehhaxYlMpgxAaULKPzYExoat36iWE5EqmCoJqgMBoAiKH5QkRYTdoeVaysAntdpezEKup6YWTa3fLbdT7dy/cfLI5Ol2fLbkirXqrRRjVS6VcwxByH5fxo5/HtxVnnJCB11t03P86Fe/yP/8T0Cy9p8ejXPvX12dlpzYvNx58YFZRbOTw+2plspNnZJ3/uw7boG8PSeJ6tz1YyekciggrEFEWZOQNoUioY2AlgWYRiMilHI0IoPBYOMBsQtosuxRVAHyWnNPTZhkgxkhiSY1DVb9AbD4/nW1ubO+Px9qb/pd/4xNFxt1EUhkXhGsS2FzubtedD2KUSRbUXJw67YbMpEa0b0gh07Di4UHE5tzCARobTdvARM/FJl1ZpQQFL7wHJOX70ym47b288mF/myTvGmx57iXJxYyox5k4x89CrsWdyQ8aqLNbIfDQAyRFEjFwxqr0Dg67LnrlmyEgeSk/MkJaxLz0X4MksiThiIzvpY+tQUjo/3ZjWocRg5BF4EDyeDQfH8/snrWvK6cin1fz8ZjMpJw9WJ3XZTIrq+N5eNiQmQyxLV1YOnXvj9uHyTKcb5z7wwfc++bYndh693GafxAyluLTz5MVnL37nD37uo79y/KUv59y6eWvdaYh9zVxW3Ke5QwLNiI7KIsXBRBEMGdGRrwpS0JQkCgB45804iXoUyylDNueTmWUBQK7KFBMGGG2NHhx0+9X48e1zkPpn3vp46qu2T23bQmxrcqv5/L/4m//DfP/2hY0RLuP8aOGbclwUiCaEnYj3IXAw0zQkM8tRR3XwBZUMooNG291qXMmzg1UGTKqnyx4cmWTvS/PIqc/bKOSaEVLXpz7VwJhMTV3wAw0DZJABgnd1OTvLo9IXNYkr01xQoClcyMajYjoaySqPzVdNmdaOrbl30jGF076DgMTYJWtFHNDcJFlu0Bgtzdp2mU+HQZJt137kfJthvLJR6DJa38mF7VFDvi79xc1q0dr83jyvknf477WKyJEihaIYJCtI9pjJlQ1jVp9zybzqIoj2gym5aemdEgAnk6oK7Z32mae/6fje6bXdMMPNeOmd7/hjf57e99z1e8er2w9eeNfTV14+vjkMXjwFyolKCx1Z16qE0QDgQKvV6Y2f/Se6OBMojNg5q53vkvi6yMm6oa08QwD2YBC1LF1d+DYhQe3LiIiKrUSDoSjGw/GDypxT1dQZ8oNl1xeME1dWoWtlDRWuyDF6MCzQG+Ek91//O3//3eV/efV9j3/mzb2YcbAkpr50luH4t1+CifNnWrx4i6HIogIZTJMkLLwqQ07EhHVtqNmZm0xcCyFgTjPNReaKoo2oAs2ORmzgrEfts2YkTBpCWU08pBglp2HZNqOqqorBjNk058mkSKJuuqFRNS0MU9boaMKuMp4LWSgdV9jlxAiuHnmqZLnC1EFsTAZCAUjKoJp1UAbblnT0T3/m5kd+tuj331mft2QRRCmwQ0C1bGhI2Uh7KiGCLv1m9af/cnp+NNy5t/gf/9F4/zXoByMwpq6u4du+J12tZ3/vb13Yn4EaIMOg4daNyfLe2TPXzn3XB+BffdyRywzCpAnVyKGhKQFaIAsAond+/cPPvf/tf/6P/5m/+/P/E1OxSvlsvli1q07yxsaYqOAa/HhiK0WyyeZ0a/dc62zVnZST8ensqG2XZ3mBfgKCtSNCzMStuNTmFiEDhqY6W8WRw4KxZPTOQs2LlkxBMReVZ3poQFuzQdUgysOls3V2CHEdGVp3gx5mi2BdZq//Ss0QDQEQzAAJ1cAQGaxdHDXllaB+ftYBeuIsBcSGigKGIRaOOTOJcxkiJmbnCsyDBQB/fHL4qz+9E8/KJ5/N5MrgdDhePXitVEtGgIKmDiSlztWNmoCpZCuAJPZIzhg0RmCH6JhQIZMnyLGuSjEXh8gFJetD6VfLV8LwXJEkHLyIcTa8/Dt0sB8yE3pLg8ZMziiQJUUBJJ9VtdxB9DrEkE4JFcGBgBEQcJ6lPD3v3/LO1eUNOn8VDw/VQK8+TfVIJbm6gkqAnYIzBGh60AX0C5EiwNbywTE04qHgQJJW6kLnNjae/PblUIa53PvS5x7dGFHh+1Yrz0XpnUYbkkpuHn2cdq8M929bs+VH2+ZAzVuKaoPVZXjbtzpV+fhH8d5dGQbPaOPdxRPv4lHhXv0kLwbKnamYqodAzokO5FiFGY1UwDjnjA6JyyQDuoo54b1b8OAmSPbAvjco3Qbb4nf/TTt7s/qm70uTK7ayg0/95rhbuboqKLSTHf/Ct+l0m5iWfdw8fengYx+pV5oAda2WQcxioISmTqxg7tbYL1XSPrUPpLC2mxlYKDmKLFIaORrX1Xw1ZMkRQNfKOIQsuspQenaOXRZBNEBaS5TADI0AidAYVAAQCVTNGB8eCS/fPKp9nI6Wz17Z3tneKKrgikJzUs11M2ZHedX+4i/8zO++8sYf+kN/+sqFi2xDk4cCAQN96Ytf3j89COOJ33y82n2sO3rtC4vTtycaFgtfVahWOE5D670VjhHQXJmHgvvO67LgwM2G+RJh/CB5t1FWRhXaEOerk9nIYgApZXnu8mVIS5ASJBKDkjNjAc8YwRIkAUJQhZgAkqvC8hO/jN5Gly+eNM/0T34fXXvm0r2vFl/5dTqTcnvHmnEfHumeeVtfDtyu6sM7vmtheQgLYV/DsoOjo9Ad3V7oxnPv2Lszu7Kxk+/fWO69Nnr+7fO0Mbn6mO19pVTHl56ng1UVF9qfzfcPxzuXwnR7SA82HnkivvbS7IsvDYvT8Pj27Cuf8Ytvnrz98uJk//6LL146TRKi57bIupJ89o4PLF5436PPPsPJFv2JvXEbX36lvzfLo60LT3/zyUE3qeMj3/89i9PldP8Y4gABZJ4lR2xg+vj41svHu9dfLOWAuz64BtyRpWi+1KiYBEqH0zIuOyILo9JcmZvtQVJTlThy/MZ9WAFXjj2hVyJcnB4MH/7fdthSx0XfL8yeeO7iSV3uKdGoPkt5dvAgB28WhiH1fY+wxpp4j6WisgPV2KUhJvGFL8qwGGatDMu0vL03M65WZvdPVqEcX0p4ZXt3MqY3bxw2HWsHqqwzHZZiRVlv+uX8ZHt7iqqAPlS86lZF40a+ODpbtKJnJ2fnJqMkuMp9McwkLcH5Li5EtO/SdGNrtHvu7u29xWIlrqqa5ou394KkkQ+jKp+7NDk5Scs+n9/e1aGbn86KdSptHUwzQUBHhESmxkSecS3BFCAAJQBAIMQMRA9hQWtbkxmxARKyAhmsOzu4vgEJUXJCNEQgsLUYgUDV1EAAHv4GAE0fHj+0dhXqurkLYFnVCAmAER2A/N4iGgIAKiObioCh46Ip79189YW3v6uegiSW26/c/9gvNPdnpQ+dZNTEkUvnNSr0Sxu6MN7q264t/fi7fhA+9itwtID4Ol247LaKoc8cUNALA6pKNFH025fKp56nILCYeVNYnsnZvquCujLtnssFKCRdnsFLb+jXv9JoAAZRJO9AMqqiB0S1mAhA+whMBuY85ZTZeet7SImANPXZJKN31bjLEsoxjDbNj9vFEqWvtiuRbBvNyqBwU7tzK/SR8gCewSEMvdx+tdh/G17ahW6lCjg4Prpf3H4lfeFz3PZoLg1Jq7IMQRZzKr01o2Wfq8GozXrvOt95NR4c9Y6EpGwK8lWcz1e9FlitYpessM1znoFOZjgkYaQrT5ofu9n+cOduuPKMen92cObuPGi6ZGQpIrocrl6D3W+Kxwf5tTeL+RmZGVtGNSQAVEVgU8kASAaiJmhhHPa72f6edbt16ld1Uw5Kqx6t8FA4RKE1XQ8kkaorZchskrOQSDJgSUOMlKOZJAQCjpYDq0OzlM0cmqYknH8PWwdMxIiOwDF574EZmZ1SSeTN2LnolAzBjM2colEJbEYIwMksgQZyydbnGxqxc6Uzitly7omIuUBBMwZTRiLnkiQEBtMokkSSaUY0VI/mASf1ha3xLpEKgAjYwOlstfjS76RbtzQNW4+dn2yUA6IiYKDimae0AgSa7kwXYw/3Z2G5qoOjcRga2huHSVHZMrqu9w7BwLNbnc7DvbtNXQgyxnY4XpTmclwhIASKkT1zJzZILJyhB/VDG9W5IownYQCQEzs8kte/zt98TTi7gof7i/LeQffq68WY06FPPQAaT4pcFQTBuohlDWUFKmgWl20ZgvkKE3JJZoJm0PfoHCCZZda8jiGmQdAndgQ1moivRzkNQA6SwhANE0jkkmW1gG5lGVwzwlBGVUi9AHCo0YyYKQlIYgOJgZyTGFX9mh7gg4e41G6gDECVqerQO0bVSIPE+Ukx2hkWB+BLt3MJjo9Su6AyZOe8D4vD/apibem4i4w+BChd1Ww0pCeSu2G52H3qabCShlOdnfmNR5S2eny+8yvB0yd/+B13Dm5PVhQKX0+bjlLdFBlif/Sm3r1+PsWiwOlGdXNId+ay6HIHZAYCRil7Rs/kmYKrlEstSq6CH2/Uo3HpQ8FAaBKHANLGNqcYs0bVZSc6YBtZgC0bEZoqEvnwsDq+cmGrqvGR3elvfO41c8VYK1mZqa0cIpEgnR7Nr25ukiY0SxBRfUUhDZqHDkQe2WhOVqvGlSSpKZwim6dT5FnIJ6hvzlazITXezduYgIPHaABpeFuz8cy53W45cyUUTHEYUAzUgvNJNErqYy4JktTec0wpIKPDHLVXA8DCe0i5KEKWBCDbzJWl2EXHFIraQJwYAyfJqtimvLfqq6LMZ6utjfpcE1RAQCg4MX/9eHUyiy05HRKqjJnEYNavzNGDds6rhR95HYQQy7KwLIvFMN0eVU1587UH3/597/rRP/5Db756QxPXo0mKBqWBakUayvoDf+CHTl541uanjW8Ws8Xyxhvt3TtpvpA+Sp9cIM0ZJZInohIYZYiYEkTLqGRiCN5zlggeXF3GPvo6SMYYdd1061OUaC6gUqYiTd/52Lnv+0BNcufNGzE8384W5cWtZupHtNWdnfz8r/zq7OgVx0OSqnTONsaLnKOod5yTtt1Q+UAayePWVlVvVia6d/80q0tRAMx5ir0uliknA2bvMA8pDdk5N5+3483RU888FQe792BGFRpalySmBDGBVzdx0/F4Ph8GzXE4ci5MJ2NJ4gDHVSUg2Q+pl93JdN4lWWpNdbtgbHyXEpqMqJmGRgXG01FEQdRhSChSelzlFMklkWS26rs+9vM2Z4XDbGirq7vj8VhUwTu8duXCnb2zqnYPjk+Lwizald3i0A1NM4Ib+/97q6hVKZtiujE6Xi2XRzNQ5xBKIE+WJWcTQGOGgFzVtRyc7WxsTTjN5ifU5p1RefLVrxU3Xp8ffIFu733vD/xoub37+tHh1uLW5Tufm//8P3/j+skohzZTn9mZbkLMN7/04K//J1f+yJ+ur76lyYd7//2fbb/8yiY1LZiqsgIzSVH0XJLOQ+U4m6UsfSw3J93mVtzZCqcnPLTS9xxoGTFvXU1x33UpAGuUsinzIB351zar3e9///Hvvri9dywOEAETELiiblYpzgUxs3Mwvb/38T/yx6odb7I59SNiUBVDrju89Q9/CSUTVgFH5us4RCo8eDAP4A2FGCsHzpCmlzZlo+LJJbxzZzg+biq3WKbCB6fJUHOKiMgQJGZ2zpW1EWmUpKqxY0AE8+NCPDgPhQsKElMXF7naqOoGqKShS7yxq+1gNkiK7MlhGFJvDbWVEMm0KAtXytmwWiTOnNp+yJFLbw4DOewGAzYtJwF4GdEmaQAPkIYE5NiBY0dDzwQcvMQFRyGXsOv7oxupfn937Ro99fz4waugGQNh4GZxcPjql8OzP4JXnrX7n0VAUABf1Ed79S/8k+pP/yl97GlzHzdVyZIBHDEqmBiTMfPQRQVkT+HsYP/XfuHif/QHZitAXPGYTXrQ3AROw4DE5Ak0Fkas6gnGjtp2DjYs4tlZOu5kCLUrp4El1myz0wiAAaAAopwKh6q0WVa1RBAoHbXtylgbpmjqmVAsx4dY66wKiqKWTFVFAdd5IbOH+KF1CsPA1lbih/AiW0vv1/Eiw7UaFwAMHELs+3q6UeQO+2NjTo3beeGd/uLm7PjlC5XN37zVzJNrnXeFkXWWtEbhPMS8wb5OD4bP/6INbb/5uKO+u/nayLsC2ECwANRceCi0EEkEmrmAJGKOvM+qKOa5UAM0FbS+0oBAuSdfJAVCdJ5SyplhXLjhlU+4PhareUafETyVQIyMIomZNXWGGZuKY0zsl6Pd+kP/6aJbwvHd5gs/O1qdkTFnSqu5D1yE+mhyqXrsvQvXbW5fgZhHJGl5CmWpRqKWo4XGM2G3WDjnc/SprIdJUUVzY+fTisA0xXKjQYbzYdj7V3/dv/8/pOaZy0++HQ5f18wbl6/ZaZfcSX3pnEVQH4yqPB7xEw0U3siZAHpv1AcUjTEvZ9lN6iffEtuzoB6Wq2ML/Xv/CHQzfe3VDTdTJxoHTw4yUhUUcyKgiD4bcAJIPoC0KzPHHrJl6BWB1uXcwyBAJwQwBhh//avdy1/lLajGzzeRBbggGLpeLjQnza5i0chiZ37v4J/943EkU6emYmZqDtlAEmoScw4Kp10GV1SySjLo4sa9FDqOGR05cxKlb6MwTQvbaKpO26gGDAbg117YbAJqAA5AABPgw/pTFNYhZDVGQoKsBoYE8HszNHTVPNls3tO9Y2YkHWjzUlFV4JxIds4P0r164/qbDw4+/JGPbI433vXklaeuXnvt1u3fevF3f+eLLyX2nnECvq7qx57eGVvqNDbOqYjzxMFzrtQIFD0Rgqie5WFeeK+uMM6rs3ksp8XT37HcnM5u36y6Iy7NeRf3bxcWV92yfdcPxlXcWBrc+Sp2M0O0BMAbCkBEYAlKs9IjKrgAVTkpQDZ37uRKNt5W7u+H1/4avPHJ+vxlOHcZqvPzfjjdvbY4f/nqc2/FxeLer/6L8+lmgQ+IB9gaw1QA4oBu8wf+JDz6WDw6Gl760vTRd+pmhaNtTZfuPPWh03cttkzGL/02/7OfGB3fw7LK95Zy/sqlD71wtJzvP3hQPPHUMN4YRZjUxLvPfubnf+oZ/wFIq+1nvzXzjbj3Csz21NGb89Pn/sAforjy//Lvxk9+fFQ1ufc7ftdGO5ho+Nl/8WTqHR8qRzeMoZj2J3es7TnmMKryBfbf/PwT4S3ut5fyxqexIA1b1M0BlqbA3sAXM8rtlav27NtsbuXrNyqM1ZNPD2cPFrdf7r9+e9M35Wjz5Og4jCqWrGQOMMQOfHZ+mkwvjfjod/7Nudc+U+tqERcV6/HVK0cbkGJbhg2yoo+nCqLGHp3iIKaqvQPwZWFmq26BPi3n7cZOY2EyZPrs6/emmxMRvHnzYHkzaj/UhReDQAyeukEK5Ad3j/b3sG58zqvgmbwM3TJnkIKlXUrUAvDR7QuLdpXRb27upjQ455bLdrG0sqw2N+r79+5xCGOxJOyqauLK+dBXFI72Vrs7ZYk+UNod++XJgfOhDh7595ywSkBiwESABPxwPsBAhsKGCEQApuaYHZAY5bWCEcBMxcge5ntMDRQVifRho2etMwO1h1DW9exBAQVwPYxYi8zATG1936KZGiASEwKaAJICgAgiogmuzWkA8BCAhIgsIIaKJhXJ4SsvbnUXh+WS9950S1u5SVHliQ4hAovo0akdnlI5tZz72f7k/Lle+uM+XxpdpNPoAGx1QkOHgslKGm/BzgiKyky1mQ4XH5uBa9v2onPDjdvhxp18+zo1pU13oLggO7tgVMSB7p/iTJVFcWnsszpcb8zlJLFXHajwCF6HqKBYFgAsQ8IsREpiIFG54MtP+8vPrO7uWTXw+Qtu92K3d6s9PWw2L+TXX+duf9K31j9wA6bVin2lMZMOYBaC8O/8W8+fIIs08Vkg7e/hwT4cHpMjIEBPQFFAXXAL0/zo2xZXLnT1xG7c6j726W0Hrk8k6DzSlMI4yAIQnSTIPE5PXJm+51vKRRc/+otBItfcnh2mpseAzdNP2nTTtJ+6frU6SzH7AinOLQuef2H1zu/2TSU/+0/96y/mZYeERpCyMDk1UAUFBCbNSkSZsUdcsa1itzjTjNJr31mJVAKwASKQI0fkFbhAlDKknpmyGQG6mAX6fkgaQImIS0ZCZGQOaJhFVCzlrGKeDelhqkhFLAs7Lj0HphC8ARCTA0IzBA7MAGiqHhWZxSE5JXViAghozOwZyRISuaKqRU0kCbKSM1D6vSs6A5ryGnepZmqaLWVJYNGAA3XzvhYLOZztHW40o7JwpST52ivHn/zo/V/9iGvnBeP0/KV23mEdMCCK02Jz8zu/lV58OQpJJy7ZxubmcmhXzo1//Me3v/t7htMlfun1s3/9v1azByklDEENz67vo0hx+UI1Gfeu5KIY+uWy72Nc1t4zMjhWZClDEnFdpqIeBgkleHagBsszuv11eM/3rZRlKLI2cHIUUk+PnecBi1TS+XMaPIoQr/9PRAzACVMmUCgDbk6AC/AMkoF4LaAwQBDLfe8Yh9hRMWEGaZcgyYC0XyGjAVEIYgZojCjLOWY1jbEbyqIWBSMCx5jJEqsM4I0ETR26AOaM6oSnziMnQnQgydSAnYKgRiUhbwqUVJG9q7bNyCNrFDbNnDzk2fXXHXuLMtnaGI18059h5TSndt7F1l957NLh3r4f1eX2brTSmUPQYfGG65+3YpNcdfnRc69+4uPHrzcXtzcvFNCv0sHto7rattnp6rQ9+shHdrpYNrVNfbcdvnraeuvfzDpTBCUm9GYOEQGDd2UZ0IWyqYpJU4+r0rvCQV3w2vQjKeeh7buuTant87JTNNdGBITCA6GhQ3AO8eEI2YcApC++dk+LKieDJJWnbPn+yYGauKI4PxqPcp4WBajNMgZ2HlG7jiwT0v6iDeQgW1G4qixFjQpXsMf5wjSGLA5YTavaH5+tnr60e8H55vw51yXpFilG5sDrfBehZ0iaDNfYPCEKaATqmKjPOSOrc5AETSRLFtGkFHirquD/R9V/RnuaZeWd4N77mNf97XVxw0dkZEb6rDRVWVWUAaooBAWDcAIkKARCyCxJrZE0alprRr0GrW4xrRYyIGkamRaMACGBgKaogrIUlK/MSlvpIzJ8XH//9jXH7T0fbqS09O2uu+79ds57znn28/weYCMcUgJRhADCzJySIECGogMPcrPS08Vofd46ITiYtZGl2z+ceT9deGMoy/V82Q0H+YmVAUWxOuuaqGMqyjICutqVWQaSYgiSuJl0N7Zn/KdH0+25S6o6diKkXOqQ5WWR27ZrtcWuc5Xup9Wz/fMP6ny0glqnLnSNm87ayUFz43q7f7u5fbM5PMDZEuqmi0IUe8NMAzrvfBsJiaMLyesq5+iLvgXhBKnsFcFFn6Ipsmbpe3ePitMZmuG+Xoe1/JXPf3FyuNS1Mnq8WVZ2pT85PPzSlz75K/+/3wQLKGkxWzxw6tSt/b2DSUgMKMJHZUWIzoVxVZqMbt2eNDFqURQhK/K2aS2S997FaAxpBWWWtSwRBQAqMmGZwKtLW7enbaSEybApMqt0TFI7xzVQ1/XyXvRNTmwlcbfo6XJZ18aUCpOxRmHWBkpMKbItoap6oH0EYsToBCL0TO7q5am1cUpdVL7XqyaLqc5MFPDgOkxnzx3bvbG7v5iSEGE6VtpzA3tsZFNkTnI4OWDgZYunj28UuQm4PNbLBLO3wCtvSUXDlX650p/sz+tlZ5QhgEJp5RIIKjAhJRFArUdF6Zu4sTq868xmv8r6pF545vmBygfD9mD3sr99tWj9i7/56ye333jiJ//sm//p16svveIvHfZsSawAACyCB6k0Btr8xgvpH/zN7TlDPSuhG6sqgGiFyIySrjb1yb/6E5ubGy/+/C+MBLRHUEoV+SwKlKumf2as1/PldPrmq4vDpVs7eep7fnp/64XlH/zWSuAi7wNijHzt2PCeX/uNdPH4xlNfe/0HP9JjkpgM5ilC0wU+t2Ev3u0dbj3/SuVhtdyELgoS2SQCMSZJUlKJrlJkGDPWFCWpQiVE0YZUEnHixVIptYsCdqVfvuP81UsHp4djc9g2B/uWclRLF4LOckGUTKUQTJYBEIBVhKWOzMnYCiKXg75b7hsMXLeqyOOgpHd+08rmhWtfflbmu6XblgAod2lbQb3gtjOmQKKk0p6J7Tse8G6KN+a95J1vBAXTUgQz0xeOwhEUE+pmWpusEiGtFKjMsze2b7GIKvrUQpVF56uiJDCmAD9fUAhj7bd/9Rf69tL4He+ilREcW4G9fYgAPhBy/syXBxtlmk5QCBDBWBANEeHjz8aTTwd7rLDAjhRZRkgiVgNAiuK6OkXJohPXcmHl8m9/6pt++MePv+3Jy0/99mP33MXzpun2ggjFyDEAsEdY6R0vs8LkRZZX3C5cbNowa3hJRc6IbINSCZFtiAmUSFZz0x8UCLqeOhQySjOKT6JMgWwBOglOkbJF7po7UpGPDIwxpXRk9hcBkDvjYoJ0JAbh0XUc8C2xiAVEjuCjeBRLQzjyGQECzZvUP3G/2u3Kauh0Vq6vFvc+5tfG+u77WMHg9PXmqc+7N25Yj6SDHepufcRZESZLt7MYtktz4Pjmlcmtnelip5rtZoIBE1W2VTLsFWHaGVNRQJwfGvFRUJNmjlqYIwMpjRwhTlY3zLveXzazsplvHXYqMexdQ2BhAQ9JQGMEB0w2iYYi965TIirXyClxFAqLYIZv+6b2uaeN1cX9j6czdxWoTPtwNn8DXviC7VUeCnaJLEyY8yffHk+dUOyMLu3aZvfGc0W9D+MNrxEsilJJnFIGjSLKFt3SrW3c/b0//Ma/+kerMRhUNitD9CgQ586WNHrxG4cHbfq+v+NWTmaXXtSFXYx6gxMnskHFoFVhdZ5xFCAtWgNy6FqblwgESFQYJEhRT5dToUzZHgeGLOYbJ7LNzayL+V0n4msLRYSF1YCC+WLtBL733XD+bm6CPP0VefHLVG8jNtoKehLnCXRygYxOipGQYwTSRzopA2EIBUC+R2nrCsdgSoLGZSCHu1fMwVYwBW+9uP2b//xkC4sOhUAZgcgSRHFIGSzWSrW2Mr05U3W0WuZNJNC1YJr73e5gsvQhkKZEb1kROolWG2u0tJ6QQuKASIQRkBgERGuTmI/YWgiER5QVBAKMKSkgRUcRGRLB+dEuaHyS5DPz8s5se3v7bWdX3/2uD+X9ARpjC8vRB+/OnD391OWb08P9G9e2/fbO1fGV165cvby4pUqTRANLDKk9mNx+rcky8zYqj437fdUCCnYekVEQUsc2RY6Qj8X2pcyxP6LQrurmSrs4bPfOve3+w3qHbxdFocm64cBnlO76zu9q1u7NbbH87O9m8y0drVIGUANEFAUsiVPCRIMhaUy6ZFM2TaNqfdeF0/HGl7qP/n5vbQB3X0xLRrWUerus1obn8uUgxJd+W934+rnBaZAztexaro0wUASCcrTZXvmKdK+prS17eaJoL873w63L+el7+IEf5VMXm1cvrX36xRz2nVtECcfuPdHOG3395WGWbT/wiP+Oj+jjm7df+Aw9/3vL3tUTf+OH+ij5zeeLqr7VXSvzhEF/w7nTf+mnb3/xc70XnikWId8vfAVRD3eClDERSrW3Zf0M1GxhdPbeR3cv7W20I1jsgWKolHXCv/V/LLfmvS7ocgy6SGEBCr0YlRIknrHiD/9A9v0/0maj/t6k/9Ffp9c+nS7X+2rQnHls+J5v2bq5C8+93KtUlYursfMKRysQdmKKrWtUYq5jFZr2+sFYktJYnRrS+aK1SnxXWpbkvU8JCMl2cZGwVrpvlQ6SiFASGiStiuHmeDGdEsDedKKL3rKm5WJBPZtYscqXKCH4FIM2OgbGCGS1RdUswmioe72sc6EimuFyVmOuLBAezm+tjAbBd03bTWYppPrMiYEXD5g1XZrcOKzy/OTa6NaNLeV8vyib7b1jG6t17U70epbp5uX93so4EdvMr22O9vcm82l355HMAkfZrpRYWKFilCQiwhIjIipEFCBCDcSMSpEhEgAWIGCfBBAZGY5a60WYBRAlHaGGGEAEKTEfyUFHgpECYOYjyDzKkbcIBJCFj1RVRYAoxIxIwgCkEicFR8BrIUUKAUQUAAKRggCxLE2W1HRr73pdawW2i8ceeYcajk1I7jOftDFiYggpTQ/UvQ+3UYwGvbsn114ftZpWT8P1beiSbmfoG2WMLRnqFhbWU2bKUTdKMcZQWIv9tLuN127i7lx3mWIy3Zzr58qq8t2Sm0bPWOsxi0dJRuvonNXCzqE2hAgEKXpNgCEojWFWAxMAoga0GBlILFI/O33ftLepH+pXZdO0IBkW631T1rPDbekWvRsTXddQlMJoeyvRO0yUfFKCEFqaO5IJSgcgOjgTmJ2HomAWADImw9ASdyFy6m/YlQu90RiXh+rNb5TRq6AhaEUkgcP+0u3WYkCVNhDx3Xe5By+04PLJns0FhNzVnfTmFXjwkez82frWjrt9VbfT9OrXcbHDnuu2MTkhBJWjXu0dLuqLFy/4S08FSSiKRJQxKQoIsiSWJKAQMHIUbQdn7grcQiZt57zFJiSPyWEkVFmeRfDMR195yAiStpJZiB6SAlCQOLFLSTxLXuQgYAAIgVAI0GrDgEEEMJJIfKsTVqwCq0ymi1xbIqNIKYMsiGRQi1BKPnIga0TRUfTNR6eUjpEDJEEtIAweTYZaCykgpRQqVACkJHB0DFGLIEBkDyCIAihEgMjASRKnKCTGKtOvBr28OlbY/Zdfzqd7r/7R7+8988JAPCwXScWocO6DyXUUpoCUvHO8+lN/k77+yeY/f6JHCIGzjY0DhSd/9EeLb/2OtLpm2th79wegfnPxu7/vlodp0eg8N5lWi7nsyGJ/h/L+VkebT7wn5sfIOf/lz5KbcvS2V4rOc4vcLpSGKs8lBN+11iiIKT73FTn5meKbPuBEcuvB7VoEoEyvVOIqWjtOKnJiAYV5DgLiHIojIFX1oGvBZCAiiyXmJWQ5KQ2IFCOkgAogdpYU+hkEpq4DW5HO2AdSyIlEI6AgAaSoSIHNIVmLEUVJGxCclowhQ0qirQAARyQNzgF0aFBZRSlCiBgZkpemY0yS56mrKbIqs5g4YZ6ritIkzSdkWCdc7u2WwzGYUvlpidDNrzR1J25Zopr4bqXf237x6rGNFTddFGboWFN/o1GjYbmu04GmzMAsyVz0ymEHmsubn3tltXeh3r99sD3pvblV3bzin47za7u9/eXK8eMm17cPp7Kks5FCZhrDdSdEaAk1cFVmJrNZlqmi6PUGOs91ZqpeVlqjQAiCEsfSum7u2qVw6Dpft6HxuouQhIxmDclaVEYLEb0FLNqZTIH0/rST3CaQtdUyt7Q1j96h67hPzKEZjHvNfJllelSapg4KVFHkU+9vOL8v+PhKL0cRgdpHJSDMMYVRoZFj2dc7swARU9fed2LzvmHPzj1aqBddiSZJlkBroyILgdJESkGIwQBCIoOkKREqAY4pRgQOQcWklQKQCOJiIAFJYI120WtCFraCKIJIgaPJDCL0GWi5WCnLra2ZKHO4aL2KeWUWM1HaxoWPnhfLFhDqmavWRvPYuMAWZZRrH0Nu9eaox4md9waTrXJCnC3ir/3G5578lg9WOo5Or4eGm0VjLTgXCU0KUBgbNYc2ajSkjAJCyjNjTX+lOnMXPfRE4hi7uW8WaeuWO9hZ3N6tdw6aphZOy9kCBQ63DjKMJ+69ACvri+nisA1ptqDZ/ip7MC42vuxXa/eeDX03euTcQeir0YnxA0/c+p1Pu/nyo7/7Wx/5iZ9o9rY6XvxP/+AfLg62Tx4fXz/cVxoKr4/l47Vz/YV7rQmRDLYhsQJUOMxzUxTPX790duWk84SayjJvQ6ss5WUGjiCkpqkTJmLGlIaVvevC6Xranh2MJ8tlF1yemWnwPrgKsMjyQArRYDKhCwGc0ZCpnJBcEmGosjwJKGVym3dtkExpDTqkkBZL74ZmaCk5ZadNhzG2bV0YFN8E3yaIi+ijc2IoKUkcNvv9t21uXgvh+s1514ElffdwUIpqXVpE2VxdjcvWN0to47ExTdpm0XVDRQOlbu4e/ndSEcbY7M+n21MXU5nRetULtTPWECkJydURBKNwBFeIWIKdyd7KmYtnTpx887U3J9du3P3ghWPf/uQv/93P//Uf/taXP/oSfeYrr/7+J07YakV6O1B4rwRSUoQqYsb7y3pU6hIAp3HeCgMmyByJAAMLA7Jw9qe+f/3v/eOwc/3Gz//TMuDQmIgUjcY8o8meXh50kz0fFTed0mi4bm9dk4gMWXSRfaOVipSlc/fbk6drgtee/jqBIkZFmIQVQlfY8ge+HT70rSsp7f/P/yy+/FpViDLKMbU+oNZUGGHVhGi0SsIJPYMoMiIkoCkJSDIZUQLFHElW7z6z/q771X3HDfKZx+/a+txz/OpuqH2Rw4KY8150mGJQplCUczg6BT2mxJ6JcgwSOVljUJw4Tyrj3gn3LX/6llq5dgv7r31qkFFqI9Q3JQUCAq1RIWnl0Zq3PXzP3/4fZltPz375v4St2XDd8P4CupYUCYFBSd5xqVPpbaGo6WIKyWAMHIQ9iRETSc1WM/P2e4Eyntb8ypvV3BGSIYtBnQzt8t/+xuJ3PrZ28UNy/5P7+7+zThYigFJDUOEP/9iEFhjvxLEEoEuJXHf32/Dt97Wf/vf4yiQ2QZeZcCeRFyNUY1BdP2y53HCpoVJoRoP56y9f3HzsEv/aYjr1otEWqWu0wijCBgEhRtE9M+gPll27M9sVHW2pKygw05I8GY8FzttGlRg6STFgTgFdaTGQD0xRCFlrUh7NSn9FtYclkYjTIOmtQXKISQRZRFCOpFQ6wv3KnZzAW01vb01wj37DkBAQgAjp6Govb812CVKIZV4CYNeymJLKwnNskVgPZtaOz62sk7t25bpJTlrhldGJb/7Lbb8KN15vvvypduuWoXxUpC7IYj5f7k+VwODChdGj74gqi/WOWey1k0maHJSmtsEjUIwtIYkIx8AopGQ+wON/7f+5NKe7Zz41X77e+76/zAT+y//FvfalMiRCw95Ht2+LsQeFvX5AShzskTMKiUWC5O7e99fv/YHkUG+/zP0B9vNmsexUsA8/4V95JkYDSAG0MApjtz8f3hVUfcgHt+PtGzLdqQ//gDdfy86chbMXZLiRjI1NBKR6OXGTm7M3nyrr5QqJrQoJ4pouhY64Rwmhjgapxwjrx7vuFDyb0bHV8Qc+6E+eTiLoO02AqFEl0AqZU0yoMEbPImhIEnsXCEz/zD1mMFQHB/7qK1mm9OGh/tQv11tfX+qmevAx+43X5PB2wpRWx/0f/KnurntCfywO7NqjTf/i7IUvDugG3LhkVESImBIZYkogiVmRNp4jAJrEAgAEhCCRQRqlkYMAgLVwtts9/Pc/VmgpEhQ1hBaSQoaUYkIEgxAjtPedqH7yb6Tjp+1Llw5+/f8Ybe/1C7X04JmOnzz54udezEwOMYmgBsiVIkbnEiswRLnRnESbo/iGUggxiFHEURQiI7BAElF3li2AyNGsqshsjFGOHqNHduuhyYhuLFspVN/m9zz0MGTlsm6NyjAk9i435h3vfOKVy9ePrY/2dpbNYkqrvWKtaKZJG0CfFABK1BZRooAyA3vY1pSn0lrpOoRY9AciilGLXZX+ecZq2s1cWl0bFK+89MZLcz979VfX6heGMZVSEa4C9s/e/WGJs3h76W88ffvwoNu9vDnYULvhuBGVUlwubS8DbSiv0srJGerMVDUVXvrj0nV/8tG9X/nPpp6P1oeQV6nJQjZSQkZnarYfP/WfCkU4vUWyJyG/1Wyu/uBfO7xx6djyRX7994kMsBu/9HW/mNlqiGbYmQz6Zd5b296b9qgfmrQCb3Y3n0KmN/JWfc9fknvf3T37pccr3157YfXRt8/XLu7dOCg9f+ZXf/nd/+PfVscGL/7rf3HCqia1erI7Pndmhy6MvvUj+yttnz/X8+B97xDU4N4Lm+/49r2vPZWuPKsNknK7N2+Xdw9n59+zt/lg/9yw+/gnaLljhwBaz3un2tXB4kzJk4Nx06bprixmCZHGIywFtGE3SicebW7u1lf+OL305eLS65q7Zjg69ef//o1jm51y6fY2lL9hbj6zvL6dD3tVVkGpw+0OEhuyKMyhwcAGUUhVERbzLh5OGgtlobo0j+SKQZaYmA0FlSAHITK5yhiOPRONgAABAABJREFUlElNLkQmW/XHidNapfO8YMfmZG/Pt/tLuTXxCQkZrEJrkLXCBIOhXVkbr4/GLrmD6QRRshymzeGpjbsX05m3cPHes0093z28aSCbLttjp4ez6dyy+LhQATdGw7ph1/q8VMdWj9cR775wdtLUi665cPrMtZ3ttdVjrUKVQUb2+u2b2oGS/5o/YwACFL5jF4WYJLAQsIIjLxAqICKFgpqUoOa3LEUIoJGAExOklBBRiNKdcBkCIksSYUE6YtAc4e1IgEQ4QhI4uswfmVeBhe4YigyCIAqRFlQCnATwCIiNcEQzYkgaiRCFEwISQApJ98Yrpx9c2VyjlZUz5091yYdUh8Uuf+WPy+nMRNbSdi9/RZ1c4dXNWEu+tw9XtpSq4qHTJNE50gaDgtpB5wEaaK21JdSL4tbV5fT68Jveawttb1wLr73GNZqsgMQyWfD1Q4WqkOAVwGjdkSUw5LzUNSlJ4pVKfjGxgwF7l1xtip5yAUlZ0kK5ALMLEokht70Rqty9/pT6wJ+droyBm7Rkxcr6VkHljh8z597nP/dH6ZVnrGAkYmDSCoJHpSCBLB1IQNLAAYgwBRAWdmwxeEaEVDdWexA0oor5YfrMv1XYxsNF7hEhdyGS0j44JBHgKCSIHEhOn1Dvfl+LC/PmpdlLL5ccTQiZDvL8p7LlzegnuSHlpb596Pd2TGhTYJ9AZVL0clfXPJ2r3f3ltdc5JLTWH0Gp5MhXRsCAgCLAwkoDZL3s2P2892rrJpGSLgppufN+GkJPATggAkR1x+ccEgkppRkVInJKoePALKgFgQSFRSEiMwOjgCKrlFhC70US2+xODFMAMdNCxAhCQgQKERRFBlQKERmIdB4I2RBhIhBFGoRJATJ6HzgCK8hMphCj96iMIqVIA5IkZmHhiMAJUBSGyClJYE7CiSWyhCgxCQWxoAbV2oliY/bpj8dXn7n0xS/6xYK7ZnLUQ6GQlTm4tYUx5tK52Zb4eVNzuXYMH3rn4rd+Z6Wo4rC8FHD0kR/v/eAPOspAEDS2hnsf+MD8a1/rsdNEXZTSKlIRqOsGw/Gf+sHB+z60TCqXcW/38ODFr6I71JlhjCG0KUVrMsxsgkAKmTBioQXMwjWf+N0ya7P1jf2nv2h0Ibbf7jXZcBWp51MkFGUNgoK2FkCkDAIq7iBoblsUwLJ39G3AthEJaBG9YyeYl+Aa1koRwHTCPtFaxmQYmZQWIiEQkRBaFSORjfUB6EoMkDGCiEKQOM62bH8gyoLNYrfQ1BNg0SjSSYgAwt6DIBqiKu8Wh2W5GhaNSBdSYKGUusQJ49ItD8rNk4mMUSLzJh5My8rUy9rn1oo+3J74edcm3BIxK7kqwDnh1I5X+/P9w9Wz90s7Te00X11NXU3GhxjKjWPmiUeOvVLvPHs77l7vG9jM7Pbvfnz1RD4erSJTEhus3rjrRNf5FLuHzw33sN4/8D4kTWi1yazNysLaLOv3i6rSmS2qPMuVVkCJgVPomsVy3ja1996HEEN0np2XCKQNaM0mQ9RHj8QjtCMAwNJ7F4No1a/yQVbddWr95nR/b2tPGIDB+dTF0JhMA6YAAAwJQggcfIMRbSZM2pjgGkIlQpnNfAwcAQFdlyZtmxs1TnjP2vpdGyNZtKHxppfnKhtVq5cmB13XZZRpY5JwPLKYCWoiTcZLSDFxCkhAqKLzHJIiFWLamS5bTqYwKaVm7lYzm5OI1ZqIjvp95ajTUAxpiiFFvl2HzuoYZHdvyVrWxkBZlhubLzqK4AVAIWi1PWuLTCfGXn80WUx8YM2ogEWiyeDciZM3d7YJAEp97m33LjwqT1aU1aoa9EABS7RZnmIAjQYtMy4WvsqBgUWAEBhY4MjIA5T3bT6g1RM5chW8RMeh88ELicb05is39GxPr6z7cnjfqc29aVeIPrj04uIzHz91rFSgVs6c+vyffPHB0+enPn9z3qi+nzz1Zd8eHL9rozh+phppFvfq69dnXbCD0WJaz/cWq6vjLqpPvfJ8gE5Aa6WaNuRl3nVdnmtj1KxdvO3e84d7rfehQONbzzEqYQ9HdDBlTWaV8kkiwHKeJpe2bOgeP35hSw7Rgo/Jasx1yaKWdbA6MwpmrcuUXTZJKS4qW+iMhV3HWlOuTYrSOR8SBxBtdS/Luq5JRI1zzocFLzLMlEGTQFCcd8OVlVkz75pl5+rpzGX94uLpY6N+f+fG3nTSgpdM5NsuHH/y/MYb13euLrpbMdz2+xnIxkAPkvaYQOuuBQseCEfD6r+Tig63DxEMezYaibD2QSnVoEYkkgTWsIg2VhCYZb9tBlV16OTZVy9fvPee8drG9pf+8NMHBz/w8z/3mcNvvHlx94O++M7jw+ULr7vtqI0JKD2dN00dG+ffduH4T/6Vp37p3124dq3qYgLxIKwhSBDWWhFEvxjYx/7WX6mHwl3ePz6GNw6CJ0diekUilXsndWyXnUUVNGKh1WJ5+Du/Mlgd6hhNmRdW15OFMgqf/frzf+mv5ccsfO4Lx8Q6jhERjYmh8yZuPnZsb4zCdu09989uXWrnbWwgL3qkhHKlxr3FvMuEJAaLCjiFLlgaYIqSRNvMC0k5sJVKh0uDNDx3Jr9wrK3w4vsfnhyk6tsfu/bm12F7L8urLF/Tg5MHkxZiwzGElASVRky+JZFerxfrqFAnIJuVymSIuQW9dfV6trXsnzj2/rc/sPvSf4qsWZRfTknIFIWQZucz8Tmm+NRze//sZ2/XN0+OLx7/nr/4wv/5qxvXnipREYnvWkWKFOwESJuDcj5bESVeNbUXRFtk7bRlxGaAJ//CT8k7HzBUwrLDS9cmH/1s/dUvrzYNBcgM6QPMZx09OZ5955/V3/nO3Z/92Q1NkCIkMBjAKlAa7mxuAJP2xmS/5Qkerhcf+FD9+m/a3EiMKYq/7+LKP/vn9WrWm8zyj3/20i/+izMUezoHjL////3nP/3RTz/1+z9n6+5YVsQy7oaaEXqjvqTUtKEY96r+KM+KnWZ+ezHprxJFg60mJNAKMPkEXWpG/VyxXk6iKEgsFavKmjYZk1S9YBZShghVQdZJSqyTj2+1hENIjHIUNMMjjYjvtJsBIAIAASCSAAMgIoIIEiYRFCDBozseHclLAgCoSFKcHNSzM2tnbr/4jfXeys0rLxUnT5Qbm61oNBQyFU+c16tY35ZBAVmvxza3+crxe99lbHHjSx+r5wfdrVfPvuNbrm3TMjINxuWD76Sz91my6M5bRNjfU/N9fvlP5HALOFBCzwlRxGhEzdwVG6d4/8Ddejqf7/U374JsFHQ7eN8P1G0TXv4qYACNRih2rbbEfqnIZLmWrmUfMIhFi3llB2epdcPVwt2Gqn+SxejSuI7k2D2itAqYrNJnTlEvDicL/+Y3cL6TQRv2581+Uw6HddtEez2eXEmbK2vvfh/0Vtu9/TDb2/riHw8Xi/FilmKqFKjcmqoXowuhjSkVRQZVRq3qqxhvPBVu3crYZh7nz395tHoPr6ynkhIHApUYFKMAaZPxUWQwKcUc5nXVtfuvfVltvZEp4uVE6q5FEaLpFz6xfmF1sn43PfG+w+1Zv56rDMPKOI2Gqp9b1VJuuU/ZO5+w73g8hl169o/52a/R7g2oawDG1AKLVgggWiVmQYVHXdZ8VH4HACDIIADeAyTo+zslY12EpAAhcSJIoBWgIVeVa9/z55cnH685G9x9urv4tL/98SD5zHvM9ZeuvjZPOGtDhqr2XZcgI1IaRY5uvFAmQo3WKgUiLITKYwKEhByO5uZHyxWFgI5aI5RSlpAASmsjCKACqAHgO957z+nV8zPGzbsu5oN1Rqq7rhjkoFVoQoxpb2/nxpW9syury529jTzf2Fzd29852D/MSVwTCp2TiNZqJVfWyqZOXVr6zBzMlzVjgaYkREBldW40LjqjbqItaiFnT+d5dmkJuy09sFZsTG+UijJV+sWOS9HvvZCREm9MvnL8sUf3v+1PmdGwfeVq/Y0/rCYzYvChtb4T6hzfheMLNSRYPZuH0HzyF+Tq6+uDk6FcdRfWsyffHkzOhx0sWkSPZLEjcInOPQiv/zFqPYy+wGKhypubjzu7sdbtFjduW93m5Ri0CYv9FJVd9LFnjw36k1/5mdFqXHzlP57fOPfKuQ/1fvQv+96Z0sLqI0/c/u2fn+6/irfeKE50e6+8clLvfeBv/r3RY3/m0qvX44WLr9Tq+ID62y9Mb97iAXmlJIXhxY3l4nK8+/H87d+2W++9Po878bWHBjNr+wc2337XT/f+1Pf1KVv87s/mGcxOD+x7PrT4pndfuTldu3DfIlFvdJK//vnpf/iX43ULFsNCNUn3XAeKxniw+J1fLLLsVOzQ+zT3HBo9u+Z/5R/0lOzu3jxRbpJvCvIyHHUNy+5tSA4xcbDANWFMySXSdUogoEmlg5qXoTy+kZJgo4u8DMEZoeCBk2hjA6cUvTAUWU9A26IXE7etz/tlZrCyeX88pma5PlrfmR4+9+aNyhrdL7RAjB41VYMiVzjs5b3VjatXZ718BUbVXNzV/Vu93qnDkHWk9xvXQI3ejzeH3SxmsZzXaCnb3Z+uDscZVac3H5i7+XJ5qxqUSfjW4QEWwGTWzq83uuttVoeL7mDmjFUoCVGrAspMAywBILEw8FsOIwRIUSCIIAJx0kQc2B4dDhoIRJCZiAFZAI96yBCCiII7wWUleCenjIiSgBMBHYXVEICERRIKRBYAFFQRkEWUKBBQcufAIaLEiQgQKap0x8zKd+oVIpAQESkCIKUEmQCDJdg8fezt79vYqHxuO4vsaxRs5zdWRz21NdOaJIXe1nb70d8aXbyHi3FyhS5GKWJRdDBzMJ1TUYFR7WgtPflwYSh+/UW9u6sgQp71bu2Ej/9hTwl4pzhLOmslcXBlUUiKkiInJgGZTxnRlJXqutC2dPf9cP6uNtMF2mZ7lyqDa2uxLGEx7S6/ZnZvmXqCRgEQohLPXCfl5t3h1d53/kg8dzK1PnMDunbgX3pqduOr+vFvi0PKlI/GZVgSqxQTqSyxU5CEHUsC4OgbikEjIFEyRXfqeP7og+X6erqx5157Ldx8DWOilJDZNp6Cs0klBpaAqKP3cmcUJIZEOOqmC9evmBefXT+eGTwM44F54Inu81/Mlof57m67tYMSpMpTl4oaYvBIYvJcMRK2y8MlXNsbnd4fSzu/fk07FtKBga0SvLOEQBCAEIlThEQcWLtugGyQlYBrQxeg9dBFSM1yoKQsC2HxMRx5clCjynP0kb1jgcjsvWdirTQlJiRQViEo1ACSJAKikFGltUqbt8YGSBRiSloE2BitkBMEQoUmj0aDiFEFIEVJBtGAT6kDDQnBt10UagUTIyvwzgl0WV6KaJ9iSAyCShlmL2ASp4jIaBO4SCpACpF9TI4liiCBxKgUrY837z5z79O/9X/5a0+Xzbytg+TKE7i2K1hRCvHSC4df/L1C4cHlF4abx7Pz70Is5m6ld/p+6w+pp8uTD2188LsbyjElAiLS2Pmmxf7x9Zz3lRc+iECZWyt6H/nB1cfe42WkVlZkvlhu7bbPfNE2h0oESXnXKQKTW9aFaOO7RVFZYwuJCayFpMvUycc+xkqtVRX0j5MdwnAdjq8oibhoMHgUBg0AiCIIGpyHNIfcsiFSVgjQABJD5zA5UAaAgbRgxiaxRUSiUpGOAEDCLAk9K0BR5uh1QVEgBEiJqgwQ/GyhylwpggCkMlZFsMwYbFFCYATRSGnRWGuIdCJGRSl0Kca8WoP9fZofgDaUr4s2kDzyEo3kVRli42NnrIa9ebixk5T2RkzPrqxsTPf2cjIbZf+N+YHhlG+sJKPWVjamy8MizxVqdF09vZqf2Ozq/ap0SgVU0as229TFPSvdRnPjpde6g8XJFaMbh2WbrY1xs5dGZXASuoCkUKSHuJrTQiORznKrM2vKoizycjiwNsvKXBsiSsQROATfdW03X7aTxh3WofU8W6YuKgGtCTIreU55BqQoMcEREOtIKgoRlVGiB/nAKHvp+s1yNR/m2cGsRhZhGORl47nUOnrhFCgCu3Z1UDSBTQxnskraOjInSIxaAgtzVprae6PtsXx0YTR4aLh+5dZ1ntS51nm/R5ra1t1aTj0KESGDRvIsDkQJH93ykDglBk2REVgEU0oQEiROkcx22251zjeUK9P3MjKWMiQSYtFIERJI0lolSb6NPSL28dlbEwJZH/bBKgFZurSY19WoGvWLYVbNrmxBhEXrG6uHg6pdtNd2D4NwYQwn6VybZ0YpuzM5tMZEwNXN/smxnFopfZCYiCIzEGrNyJTb2HHwgSghi+vaXmTUlJLIEf8PjlLZCkAAODGnFDillDiF1DYenIOuyVu3dfnG07/3scHa6Pt+6EdisPrEsRMP3v3Sq9XC2nvve/B6A3f/375rbXX91ks3Ftx9zxNPvviZz0y763l2/OYLlwo9Xzl9V7u7UJCUoRD9xVPHvQNXpWVQwBkBushFkRdlzjHUy7QEdilF30SOa+v96AJLGo2y0Lm6CVpTIiyyXqnyuV+k6Blhd3/x7ov32fHo2Ve+XtlMvAQQnUQpDCFqUBv9EdZ166JCi8CLBUQLEilTSitSOnehjUJJRGsVUeYYA2qDatkwcdar1prllrEkiK5zvg2i7MQ1XfRglKL+g+fu0YQ7u8ubOwfzrmWEldI+fOL4iPD+s6fKRQc7uzvLuhU8AA5CVw7aQb/HygqlvNQQ1H8nFZEoTGQN5Tn64FbKykfcD4wGjUAAICKJQUcYZ3lmizOD8qItru4d3Ozi5deub1Tmwlrxn//Jv3307/yj0Xf/wOTpT24dPLfWz259Y8v0e1y3QUKfUsNgv/3PXXrix0d///3N3/rmHnTgNSAJiRJAIcVIQmow4PFadCEf20B9VDNOoAu7dJ1iXtSRkaGgOWIbpBQprLFNB9tdltmUfBOij21eZRc43P69j49WGUU5Z1BZ4dS5qLXhJsLW/rH1LjP5Hgz2ZqmXZbYg75Ejg4tct2VeLWe1BQ7ea2VMnoPSwFGR6ReDNiwhGbdcWk4c6caNRW/1xF5z22a8tbX3+Pl3ju99v5t91mC7PNiKB7c8hKzXN9kweAdKU2UwMLIkY6goVFmJrrrDG2W3tKQI8b5edfBbv2K0qac3z46zriXEUhmFIKAUKatSSp23pT1eZrMb107ftxnKM1M8SWffSZeuorSQkhXQSWoXu2/73vM/8+H5V7565X/5lbsrNNpG502uyHviCCj5YLgIk0Vc6HKkHr+398hDt/9xXPnkFzJDICErTJ7J7PJLu/Rj7uS9xx++AM+9fCSMABCwgLEQInACotB5/cS7fXXKz2N69CEPv6YTGYuSojm9Nh+emddLNhG/4/1+90V446vZ5VqK/jhCt7f76Ps/8ubznzl9bPTAE49/4Y+/5PyB1UyQhr3B+ko/AvtQH872+sPS80yS87ENDFmOZZ4ziQpKALQxqAQg9vKiNMk5XtYhMGujMzJis+FwnMIykQZKzCl7C1/Hdwpk7jzzESUxIxEhiAgRggAczXrvQK7xDopCEBEZhJAQ6eifU2IGQW63b10ZndwsNzbTZH88HoTtK02/V5y6kKjPOlO6mLUwIEiApaLFjRv1ckbj1X5W4am74r61VTnbu5JZSVXZ2zxbnXqwYRA308Hb8WZ2/gGOnXQHUh+Ii6AkRRZkawwkYw3Gwd1uqfK1k6Mn3tOFtRY0C81pWD7xvWn7NeimHCLZPjkiSRQja4dGMHkBMFmZ27wN2BuO4PB6Pbk1uu+Rrr+2vPnacL3opmmZOu2lSBxSC7ZUqiLd9rpF99qBqnQGvWWdEF0moJvWhi2+vZVu3Wat0/buYLwxqB00ScQkpRUl37ouemZJ5QB0Jm2jlh5NYaYTfO6TmRrhifvxkUfUfOvr//bnHvwzf85sXuQiF0TKrACm4GKKBoG7BusJcS03Lr/07FMXT9zjt3Zc10FbU2F8b6DvfcAsF7yYl/uz5Wd+u+ROFTqqDMcbqMvYRF1oYzMYARSq62KAkRod6y7cZ155w3/5q8PlTRwZmc/AHWW/CTRHFiESQZHEAEpABOQO7wqAAfAIhEtJmBmSCCTRoqDloNXwOz6cHvnmVjIMKTFMq2NtLHOdh0zPWn7+9etNStpQ44LSyhIakCKjzvPSRxApjFaIVilFyrnAglZrQAyQBFLiJCKEeHTJUIioLTMKYWatC/6I3H60C371U88RvNlB3Bz/8be8+51rg0FV9MKgSk09nRwYoL3rO9dv7mgT1ywxO9VBz3QXhriic0lGJxCWYZEVHEYlbJrMND4ynDm1QsFPdvfLIhNjlKLCRPIgexNj1TjLZovd3XY58M1dg/zb7lnNuk7lme8kQ0LPiTvIcqugXuykuqHhmWXIDq5+bbBYjobDKIaX81jvYLfQ06vGMsuU3fMUa+dvTtDoosw2T8QTA+9aaDpDyvSGyXVSWGOHsZvcunTVpHIFlv0xTH73b/v3fkR/8Cc7L7L9WvuZ/9PG67A8gE4UhGw01gUDzf100muXuYs5jWp7cfX8+2n/hf0/+VcHr780ltVyvnzgu378NX2vbRePnqn+4Je/VDz2bfTVSze/9OunPvy99T3vbajc/+qn+Qs/88T7L6jYOLQf+8Sn3vvA+/Rd7/zS174+KmEe5NSHP7jpdieff/7l2378zj8beiee++d/5dTlz1XvfPfwO3/oyo2XenBO3KW4t73RK9pnrheX3lBIfravlZuvnjDjTXXzBVgioOqjgXYOEMAUrECr3B3syAtPrwzzlSqHxdayxdcl1fd/s147Nv76Zwf7V0l3wCLJAwZlUfdyl3lxUYvqib57fP/LLjYGC2sGGBiU8U1uC5Sy8U0QiFGU0m3nlCGboyY9zMt6Pi+q/mA0WnRe9weXdrd3dm+XpB49edwTHu7tD4cVMFRrK87XLrjk/PGNcZ6PWh2eu/HmXWdP3rh1re66IssG1XC2d0uxjCrlYldqzHv5bBnLohyWw3G1/o03Xlu0i40+7Xdtr29Pr/ZV8M5g7cJy30cWyZUFyBJO9hsX4qA0rO9MktMRn05YRJIAC0eGhMAglKIhtEhMojXRHVfQW0cCIQCmFO+klVMCZGEGIBI6AlWDiAZBSXIUVGZBAmZARGCKwEKgAEkIgUSYUCOKiChUGhXiHdY1A4sIADOzABJoQR2O7K0oIozCKblhr2eUCUFMpcT5FIG1LnplMshahRRTBMWqv7uAg6cioR+cMecedW5eT/Yy9rrMITIjxbc/fvjB95R1k93aVQd7IATC0rLqFqCEeqU3Ktk8aIAgrm2JlyKMOkNttCJmUZKUcByt4AOPuvV1bxWgwrKXMpsG66qqwiabcw9niy336d/OZtvaaCkrlQ1ZsqhhYDfk9afHvTZOZniIeO3QLCbr1V2zKwu393E7ebkwFmypREUQtbmuGhUPttJkyhJIk2JArSFwF4kfeTL7rg80J08kW2Xzbp7/XrXzRj8m8UmiipyCECElSEIAHLXSPkiIYAhRISZGSJVf0B99Vg2YN0d8+j07xx5S9+74r36pmEfFgJi4icRcDIdtkwJDCF4ZJcHZTPkbV7v/8m/Q+qxpwejOg8pUwIgEEiUmJoLERzAbSsI6du2VZ7PUDAqd+vlOmwiQEFJ0ECWGvPMBAIKwNhkqo0GAJClwnGLkGBODSswaRWJg4MjRoEImkaNaPSyNCYxFXlh953mQksQQG6K5RgQuMwHGwpaISYSIlEYCIMMiklgSagohOfGRoo9ch9QEdIID3ZPYhqiYo1EoEuhooKGyJCGwD8kjYEqRUxJBZggCXYgsYLSiFM+euPCB7/zIWv/sa9WnW+hIpRYguAQW0dooQpFxZ+fGx3517ewqWhife6LOTBtd/9wx9cgDky/8oRmsHn/no7rI3HyZZUhZHhkyq/Z391Ld5AXZUY7OxWiiHVX3vh/6PVou/JUdmHeb2erWa5+Brvae0QAqaJdzbVbA5qp3CvsrrW4rk8e9CTtPWmswGG2qWRVVXCpYz/j0ydrqYnmo2FGRg48QE/uGlBI/Q3aQOnEezTDmJaqkgkcNkFSigsgKJ7ACfk7s8IiAogBKBJkJ9EgbEIAgGIATkxYOTC4pUwIlX7caLYSQuoQQsRgyRjFZSh2DwtCSMWw0UBZZFEdQLEiitIjXlCQ24DpIgK1nbFAiaANKQS9nCoq0JhKOGANGV6Ls3dqdWC3JVcqX2lN0QqCLge6X0S6T20XXSt3gaFCOVpPX5fh0DBEUm+S7Wzc015sXH17cymh6EG/cjlFHhm7py+MmKraFWkym7XI+mXQ3t9u2ixah1IRWFWWelMmywuaZtTbLrNGEyJJi4ODq1ns3W9S7k3qyaDqWzksUDQgGUWnOjdgMlVFyNDYTUObOLijynLOiQhmNtVXm5nUpwIy1wsKUVhVCp8qS575jJewTu36eV1kVFOe2tyJqXOYGhCOTwNS7iI5QhEzrA0i6b9DPu3S7vd0resYqBRJjijHO2m4SPWZFcrFGiAl0kTMxJ1YgPoR0JKMQLyK2ziegw7l3nBSkkGSnbqYo0WMpamxzFZzNMkTmlIJEIlJKKSIiShBH1qy3xWvTmoH3pgsTk86sa0QB1ZMlKWpcGPZzTjwwKi/s3sFEBFOQIGlj2IMYFBujdFHlyBwROaW9qweXvvbi02ufPHH2gtKVFsg3+zovk4Y2MKKm0rD3uiyswiQiKZFSR2P6I5Y/S5KjobygAoOISqlkiHKdBJY72zefef7UibUf/u4fClpKaUZ+brfqbmcLp/N3/fD33rwxhxzRVle3p5Pb+ytZ/vl//e87t/yuH/3wH3zqKzYs+9nxSlcbK2U9aeaxya0u8nz3YGuCbdTm5MoAoyyW3qfQzV2uNCB1XdAornFAqpk3KQYGHo4GlGtNlBLkxXg6Xyzcsk2+WTijdJHkw+947Lc/8dFD5zTZUT/rWuedTymlGB3wXOpekRsOne+K3IiwSDDWeh8cGiVBK21Jiw+gdMpw6ZwDidxJZHHQ+hmkZfQKwPTzPCPVdTOGhKPe6PiG2w+7e+3qimmFt9rWOR+TGNCn1/pZ6gblAKzLjLq9t++ADxq/F2Mjansyr5CzFRUl/Vdv3VtSkdLG5uhDaLxC7Jvs0IdekYmmlCQXFBYI3C5cV2AvE39jf9bgwf7BwnHW9czikLYPNhy9g7T9potnH+ld+tH/yD7NQFZPbaqDWZzuHz3AE4YkYI8d18dPtLtvGkyCdOThUAoNUESAM6uztVGtRSkYUl86jIAaVU+nrnNrw9GNw+lhb9RdPFWun5BXb9D2LQzLYVUKElizZwzneh68cry2Og6xiYQhI1SZTaJBI4oKuPNLf9g79cLhfJd3/YV+f9EsEZS1OQGXOS4Xvuz3kmdTEBrNTIRGJGZG+pWp/YKyAlSB1BQ9NZsvEy5GqydoELGfHxPz3P/+T/zTV+9fz/YO923W8/NFr6hK1e9qP87WIhmQmsFooxeugwvr6+97ux2O8etPw5efjXWDSKKhXHZGI6gRRk3GxNZRZAKvEDgICaMh30kzi9Mza4/8+F//2M/9/PrLL25mFwZV5mdJMwtEo1JRUhVckla/++7jf+X7m1/8DyUYZfO2dSY3qUmmw2sff2b1R95ZnulJrhPpxozv+XMfqT/x8b4ZiyaGpFK3eP4Zc3CLH7S9Jx+rP/98VZVACIoAIElSWgNHEDHWFEuyodiCrfTgE+X99/mvXqMYDXP73MtDR2qYL3deLs48eM/P/MP8U/+x+//8Qru3P8As7N7+vh/4if/t0ldJxenu4n3veH9y+7Wr97dvj0areZGrvF8vDw/afe6JjqmuGwmhBaWtaduQ95ULjZJBYrJaCGwhxNGhRlNI8oGUOM1UpQO6IXnnXDCZ0kBdiHfs1keg0TsWoiO+C4pAEqH/ViaOd3jWeEcEoLcqkY8qcI7+jvBIa1KG8PVnvnZs/KHCDhCzzcHZdu/2cvaJfPn45iPv1ouuH+pBdrzptglF2rB+/CINB9TLpH9yffPhFLvFrUt+/6VmcT25lmPDYEBlWZWVsRPuxOZ13Y3Wz3n6ilIBXUTQREIiKNwEzu979+Lcw1BVy7yIXbTJZUXVBpTsNPYr4yZaK77T/ZwxdzEwqCoxZZkVq7Bf+ckSJRa+swRpMIjjjYHN4vXnGxfXHnhYXXi4fe753GTdwdKMV8I8ocZiOIa+mk86e/qY1It2b57risVjZWERUZM1Yz/vQvCkNJLEJEVVGASITZgfFlk/ONBJY2TNisMido2+/x23p3zsvruV3H98eCHMDimbKaWgMMx8Z4puUXGk+f5rn/uVE8VqwNFd7/9p67r6+WfV4cRWBa2tH0QapVJ2t9L1nSwJd85WuY6k8tTdukz7N3H9OFgbgJQtRFmlY0WiigL7ZTz1SHX2IfjoPw19FbOAu0idIxYkUpIg3GFDC4FEQAAUOAp/oQJQwAICLACAQAIEoslE5ebH8vwD37VPY3EBREjjY9/z/U/tz7/++5/cHENb5bEBEAydz4wpDLGAUQAsDiExJKGcUARiBMeBSCVOnODos3306FUEgkioECBxsJlVKkspLHzUgALs3Z1dgJ02SlYH+bpCuf7inpfbbSQALYE5eReNLvvK5oUGkswYjoikjNVruuy6prAmhFAa6utc+XaAXFjQuVnkuCxV3VvXAO7m9HiZc+NtVRZl4etGl9myXSb050cV9m2qQyyKrrCpKO2SlCwFtE8Csc6Sii99rZ7dAqkeWu5VBTaLGu1Q6SJmQ5v3TJY3Ny7ZlHSZxd6w/JYfKX7o4uz6FTc7GK4NDQRh0aSRwRCm6FOY1bbXfOAnx48+eOlLv3zuy7/cK8gtb/frfX71S/qNT/W2Xof9RbM9p2GOqVNlA5rbsldf/DZb9OrNcvnsN1ZVSZ/4BfnN63f1K62NTwLrp7o4Nk24/Mu/kNPiJ37oR576xKcPF9uTy1c//cWfO/0X/tYH/+xffuQvfuTLT/32i9+4cvv5v//4T//4kx/8sbpb0fH4tzx5vrv58u3rt0Z5HrxxZTV88KH18bH25c/pK180+XpzzR7+1m+ldmvr9z6RWdVZXjl3url+OJ82AzPYMVV9/v7xd/yk//oX7Kufzc065DYFhig+CnSNUXqOwt/2XXU52H79tY1M0v5s+tD9vW/+3s1Tj+ZuN7XXwld2lBFTKh+N7ZcpuHrZ6MwSKZsgN/rNpz73ne/50b1z52/t74d27g8ng17HdROEISZNQplVWSYhKIMaue2CNbpX9hkYUSxJfTjnwMPB+smz55plPa3dRn5s88TJg8Pps69f22uX68fWhHyemUU3ncZ2PatCqI2xlbWxdu3hzE2DtvZwKzQLF5gdoCjNpG/s7SyaRV7kJrdlppJ22sYs17duT50tNtb7XUpJaW01t8vZtBaXskx1s7BM7i2piEmAhQAwcBKRyBxFWARYIoEoIYWSEihFyEfpsQTC/JbmenQuHGHFEAAlSRQWBSgp8h12NQADgQiDMCdhRgX0VoECAgKgwjtDClCoFCJoQmZWkASJJaEyERiEBYlICyCgSikiGgOYEdV7NzUawyYsQ2EAGVPd+q2tMoa833c+xohGEEVrpTVGaA947w2tDRlB1YO6AwW4Uul7Ts7aST9yqSIZBNIsFJkIlXB0XWCTqKiw7PMyJAghtVkyhNGCkRQAEZyHGCmrQhLl5joqnRX5iCJIF5eUQARSZlq1gg+/K770nN69ijwjbmntbr5w78LoYu3E/KsvuS99fhW1n/piMPQh9TZC6fcs5OJcjJ0Ekc3Vbr2My07JADqHzQx8FB8NZSkkXN9IFx+OG2PoiRgfinz1T73X3PyT9JUr5IlDYpRIEjAqTZCQWDAkRUhKhGNKGgSzXimJFIPsu3a6rcbzJCz9XgoOWZlMBU0+w2gsZ8hFRUYx4f6tvT4pw4iBpZ0hJVGSEiqVeR+0wggiSVCAhVmRS2BIWaLYpOb6ARQmG1vnWlPmfZuhRSEVjQLB4AFQIjKgaEwEiMGlkIQhxZQioyYFhCxKaWAUFiJMwAAxJKeVxiS5qQhQ0Z3ngescJJlGEgGXeI1UYUyHYLWKQAQqAyCCDJkTBlZJIBFHSWLQueAhdTEkxkAx5uAQtLJAlGuFpFKMIiz41omYEoROMwJzEBAGPtKMXOwVxYWzj62sn8dqfPq973nhjY/qphNtSCkfI2mKgS2hacOtZ15N7dm7PvxhOHk+3Jwxg4ugDg67vUXdmcH2VK7f9nnFVoYnqjYlBvRXb8DhMvigKquAJFec4d4sKZ5unDrHe5cyAdh9M5/vLTxLotRCWVJW9SB5XO6QUqo/iGUeI9ty4GK0JzdclKw6blKAgQJVpFNjPjHyyWVBbOrAJQkBTUG6AI6YBKgHKmHtlZqiCFUDWCxBIqChrEpdS9GxijqwhA7JUjHs3DI3RlxAdlAgJEDUAKKBlTCkkGYzGmSYMMsNtB0EBTaTltGkeHAzU2OT5wgWJRdRHBkjGzFSz0k82xyKHPIi+taMx6roRR9YM8BRogrjYi7sscwJMDSNKcrq2Hq9v7042DPGzBaLZd0IQdfUbtEmnd545fKTP/YjDJNM/Gg8avZuliv3Nu2cYo6mL64jrSBThU0mLn0IFy68fetg+3BnorUSxpBouTerMNoi63SJPbt/dXaYdLSqyi20LZJCwirPbJ7lRU9rZTUqZE4e2HEKwbeTRb17uDhYeBdl6TgkYlGoQFmsCigKFsXaaGZE0CEIvrULmqbt2ma8skKJ9+YHgFTvN5s636zUqFdWrP1sboqMEBoP1makDEPmo/ORcqMVc2AhYzFRDxKgc6Gbt9E7MqjGSLkmEbaZNZmW4JNwVGZvMb/adqdNMTDm1mEzyquhEZMBInqOWqmFj9s+7nmYLn2Y11rbGFMAGWZa+SgACsSQtq07MewZBGYxphBOBIiQEDCyGKUEAHw8OSiGi4UtzP7CrRRmb9EarY1GQEnMkgCRRiN72DYBATUTUZ7rnili9ASCIMqoXp41jTvYnxmrVkYDyOwf/M7nz67tPfbEO1SansRTeVYUK1XQGqNLMUFiSQElEgojHnmJjoYfR82fRMyAkEAAUUgEES1qYTS+xSsvX7/0wjM/+dd+bOX0+ktfeial4rFvfscbu3uD3ulXvr5/4vzdtJa/+IXnd156XU8PnnjXPb3x2s5tePHTX8l32mY7HBtcHK2eH/VMO61RRdeGCJbyPC1rqzLXSeOcRRSSMisHVXX71i6D9CrbdjEAtimAD8aY+aTTBpUgRGHbFRYWcwcK8nHfBF+CyVXcXS6MLV3keeO0ILOSlLQmJj0Pvm9IG9RRlKgg4FIsemsJQwCoQ1jTKrhOA1JiI1lKrjRK21JUuXuwx6FWSrrGKwKOmGmscfGhb37voc0+/+I3ekul2zSD4sXLV6e+4cSUIOq4bBZKc902zsWexovrQ4A4b/3ceWfVyzfmO5NmR+daeH3Y+++kIqvMaDBo9g9rL4R4a6/plVWJea+fmSy7fWM/JQyOtdF1CqEOB7y8sbW/FH32yR/7yE/+D1//R3915eZLq6Zd/qu/f+bdF269/A13c3L11MaJn/075dseDp/87OV/9S/uN7ChYfePfmejV1Sx23r9zYqQQBFCOiI/psBCnnna1ffqA9w5uPkHv7/WNGWWL12XOqcxkMDO3iEdP7X+13+q/sC3rA5XBr/5O7v/7H/Lg4NoO6HlysbGz/zPnZ10n/vDxae/rHzLyEKWGQNHimwNQuSBLvB2O3nxK6qMqhx4tEoVh13nTeAYbIJCUr19S2udSRFDImWltC3PsgHsNwurKmt1WRWuk5hCr2+ml1/46l/9K8fPjC+/+Vo6aPt1UYR8e7drfcoN9QYrIuCcK2zlF3uYZQrAz7tsOBge28jf8+j01LHdg4O7L56ib7xhuw67GBdLEARIypjYScIuUUiYSMWYUJLm5LNeT2uU4PWLW5f/3//4scm+39ozxW1Xx9QlYMVRKKfc6PCHv8MPdL3v+9Z4/33zsqBpg5x83XiWPC9VVqQ65MfOzFXsfOLY9dX+wVOfq4qec8RMCKBUcSqPb/zzv3vi+9++fOrVUTEGTpACAAOpo24lRQSJQYXsi3/kf/UXsm/9zkmRbbzvW/yz/8aAgGe12IfXvuqffMSNNheXtsuXXzr+xRd4PxbHshHF25//2NlveiwblrZZ7u9ehzN5TqYab1bD9Wa6yLK8a+v9ya6TdtAbLuYTIAFQueqF2nUpRgmYIEAEApMjpRhFJDctxKaSYJhTWFlVSF1QDCkxpI6UMGD+VjfsnfYZEUCQ/0okAjiyGgkACAi+xSm6Yz06glmDCL4VPsO31CQWUQA7V16dHb6bs74qj23vXqs6tPO0+PwXLr3xajkoFrsHcthZMD6kMNvfQCmKnhhw2uY6jykU5Nrdp+ud60NZabdvLW++WT70pK4oLPatYlK2N17rpots9aTE25mV2XShLbNrxRDnBR8/o3pDIeDkJXUMwBHbAC7awfgu1S1SF1T0EltBTlV+uHp/+fD3d9e/EXa/VskBKGxHq717z/jnP1fOmp36du+J4exwMkRczdjdfk0XBa1WSZN0EkTw5DnArp3MeRHhnof2m7rkfbVapKQ6C+U9xyf9Xn7yHuyvMKVlt+zleXv1VXf5jdR2eRcyiFmhQDwlFbsuG/SDd3a1l46f46YZ7V1qPva8kmxj/aTmlcbnUNgE/cCxyDMJqa6XxhTsq5MrjxS716a7T2Pz5nz3lnZLk+W+ieTbkRJ85jPDCLZXxDYqjiF0Wa8EbvRkGl79Cp67O+THUWsrgAJkAL0HNghlsJSdPT3derVKhZHUJVZKITByQoEkSEYBR0AQgpgA0h01kRkg3fEZMQAgaAXBS0puv4DiT//AbHhCumRRhIBQ5t7e/6N/d3fzMf/iR2++/LI21VgUmWQzZTS1IQJAG5L3iYWUVkoRs3QxApBwEua3mpbwiJwiRx1nLECwUlUA2HIKzJwYFQiizc3RWr+rorUcesYdX+1BN4kRJcYQMSAzCxkdsNUWOHplSJOQ1stlp9hlmSqsTUlyY5UIabA6iwkVmToEX+PKk+85OH3XG9vz0YXlyvRyc/NG8p2fd13CbuKlUMdW81Mag4gidimJZK3t1zYQ1ZZIRS0A/bLKMPRvX8nLAYQQ0eQb622TkBT2xqxTFKsHBSek8cCposYRBoUUoup8o5RBQiJtMC+4dSnG5Dlk7YkHjnVuubp2D9oTxt3e/Oqnpr/6S6PUSvBNMdx9+w/j//2nbvh5euW5Y1/7vdNhC/unst6avPBH+s2DY4mlgyFrtD1uglc+FPne7dfc/HYH1fFh1jWHX/z1n58vg1ktzzx5l3v1yu7n/vHvfOWXHnzbBXS7MFbjXn75E78GBqvByuKlTxwMrTRLhrh7a2XPuxBgcAYOn/v31z73X8qy7E5fCP0zQcfohpCnwhpsu6uv3+qvrg3uf+LaxA8//Oey1Y3FJIXtz457aylQrD0DGGsIk7B4o7fPPmq//++ULvTDLy+f+YPVagj11fZ3fy4t9+cHt4d23ehjkVunkEbrsrYuy1nFe7PZsrCZSsxav61vtn/jt/dtkfrGPvSwMXY53fex0xRj0wUORdVHHyWl1NXOc38wch2T1fNZa7NcUm7V0K6fxkouX71Gjk1ZBlt+7Kk3Lt28HZTGfnnXuYvz7RtZb70a9A9uXtmbbbWyn/d60/391CXXMScsihFLp/Lc102OYDWQTtmJDNrkuV0d9W7d3F+G7lRvvQ2yiCzQTaedUQi2PJi3sW4tW0DwnpWlGN8KnQmzACdmlsgJAGKKIbEAAiAoCkAKkyBQSgBJIQIdceqAgROwCEdmlqOQqAiiCOAdPhGACLIggKQ7HUcigogkzEBakSAKEgIevaOPJhAoctSpoIGIVJIUAYBFAQGKwFHJmhIgJIMSjg6zvcm1w71XB/a80pWxpl0sM4k939jgUAmrozSsiKIUE3AkbvDwptEFZEXsjyiLSIznNrNMnW5c/spLuLtNxngvrIg5YAy6sCKshGE50U2LkTEZro6Lj+R9bGZsCr/ea9q6RM5XB9qoZnIoRMYM3NbtOByY8+d1rxQR75MzJZ5/1Jn1/pd+l9y+gQbq27JTLk5cXJy8lwfjjem+fu45ckuYNVno/O1nQffEVokIC6Rxz911lt/20PZi2g+QX7uuv/JFO9vF0IFBKoxcOFXd97bDnZu4FLWyzjSYTTsrK4ZuWi1KUmKWxGjQJ7agNQNGT0aSTmTEOZ/3cw8pJufYWuRBKfNXPtMLV9JkV0tEUG3j3GqZffu3xuOnIBtoyVu3oLWqev2m+sLnuktXLQsAe46MKiYhG5VCn5JWFASEiDkFkGQzjN5EMIB1TKro58NeSUr1+tVg2E7mWZGSBKWMJEgpKKskeRBAOFpqKYTYuSggFpEQDCkTEQOnLoRckIAgKUUsjDEAOARVB3+0C4wCkBiTmnUCyqrORQKtXCnKGK2RFIGIRwkAGCQ48W3ykdgTRqVEc4ze8SJR0qRYUr8cJSEBRWS0VoIuCoeQBBIhKDIcOYVwx13EQkKGZWM4fvSxdyrUS0jLfoGr/dTWgACKpGPhhEgiIFHaphsdPwcnHjzcnvm2SQAy8bi3xwJEcfbGc/nFM72Lj6pq7NrO5IVS4nevrORWQ9EedggqieusP3b3hZgVyRbZWs0vvdR9/dnpm7e4SaoqBXUUJNCahetaqSlqY8d9nWdEM5VYut2svxZ4DknIkVekuo4newqjQgBlwXswijlhiggYGHXRlwTcNjpTFDtIGnIAnyArMFc6Sto9VFnJVFCWpdhBTMpmSQuZPkcCIAQQ3wEZgCTOAQtbAl0g5uyW0ixNUXIbEIjFkcqw88hJMIJkRIalO2JiobbACYDRu+ha1Oi8CIPpF8I+tAklAisEBREwAPngDw8o+ehb0LRoohmNQhvXNja6lK7PJ+Oi3FnWVy9de4/tTaNTNm9dR+NScoOmdM3ChhZoXdCCzm0Pqdlfvv65Zcv19m3VhoZik6DMYcCAc9du7+tjZ2qAzQcfOti7LfUkz40PMQEo0iozWmttlbYagGNkZh9d44M7mCz2F93eomtiijEyi9JKGJWFrESds7IoKktITMAMgkjqjqsoROyPB5jpg8lyd9GkoNnYtmVJIaBvOFhEl5IltgUdffV98ikxaATm4FFpA4waSVmK0bsk2uq8VxohQFAKk4AQJkmSoBPYCu6W5rpnaFA67z3bnZnXGVUJVAJd2D0XL80Xr9RNq/iesjjWs7NZqxUlYWLMkJTJILiulUKXXRSTAbH44EutgfTRGSYMwQdI3EVOKBo5RY4R53VICZgg03rUG8wXi0jShC7WSudaERDqldXe3uHcACRmF5Mi8Slu7c/axrdJPAlree327T/zvj/9nsffdfzC6b2rN2xGajFbTm6u3Hv/wbQNnLJBBplVChMygD6qBwUkfosAQojMCYAYmBmZUwwpRWYydm315NseuXTrzTcPPJbh1MX7vvBH3/j0//rvFnM3rIr+63j89dsf/LF3nei7+99/b1hMB+fWlQoEZKB/z9rG6e942GdWWoreD8a2jQxAyvDYZj27lpKIj5SkyPXt2UKDubHcy40W530XfIisdb/MsyKXEDQqABwMKh/C9YMbx4frvX42i5wIFovmb/z0n//MV/+4TWyZZnWwo3xjdXy4ewgCxuqobeMwRM7ybM2usbCXWORmGWMIofHpSL4YVkWzrJUhCV0OsPRLk+vBWv/VG69ujIYuRLAKFYYUlNDptWMDLf/XJz7pVXH27vug8S9fu7zkLoZAAKR0RHl9Z/LEuXWdqYwFoxEHzDq31hoxhd2sRrd2Zvsp7EyX+5Pmv5OKMMhi53DcywioafzhLPZ7GcQotRjmlbKcL2IE6SA1njWJ0aRSlNHJ/9fP/L3rly/1ClqBYGfSffH6a59/fQCNAbiVDe999DEse2vvex8OpFrVV3/xN7Lb0+X/8rOd5w0DRwRrrehogEaIHEKp6eTW4ZWP/HjmD83B9Lw6uZcaMSqApBAqnUVA9uHMuZN7RaG6RTkqVXIapF06Dw5PxHw9pxMXT9x/9262tvtbv0mJEzqldZ6pVrhLzpBNErvlHAstOvm6E0m+NBs/8P7NH/wOWLqnf+YfrkY2pe66jmuvkuoNV/qPvie7dx1HQSe69gd/0ry2YyKSa5rJshraMkH7/NbWC1d7iqrhUASwtIyxNxgyARKBNqmtne+0UuJqyPK1teHsYIIKptdum/MnTp0Z8TZUG5tysB390mYVM7tFW+mxIHgO5viJGEJsd4wojAqdhsgxBJ3RwEv7xUvBxCwrbMYsIcuNeOEUQ0Kexwtlb/uf/JeDj3/CDU4O8pILRy5kY6tIxXlwi7D1zNPHDn5Y9w2Q6CyPz7/Ufuz3N6wRIVDIKQVSFNLd+7P5v/n9SlklAFEAFSjFriUENhAFNAug0pm6+Uu/NPjAt28X2j3yfhX+RR8LyLQOce8//+vxh35l0WYnlrPJr/zL7VdvDhK5eSwrvvX5zz70P/0/MOHVN262sbq19HnZO3nf+QESRHZd23b+5mR3Bh02ijM9r6UNAYE5YHIZhNDvVwExsAscKq2ToEtmIWGhQ9HLdAAxkTi6EEl8WRUesO1YUP6bTnQ0AT5ijcLRD0eJgaOAwZGSxIgkwv/VZUT4VlsxHpXI3jEaJQESkeXs9af/5MLDb1td7c23FqMMYp27uXSTxQTnZGJFiIQJMS265soz4xMnAghbxFx1ewc2RGrTWq/MXRScy5Xn7LnHtKmgXCVjkyTHwW6e2ff9NciasFTrZ5P2erINCmV1kwbHkSFxIgVGkRYKKVjUOYl1U5i1yqgUJVPWkNnqFvbx7wjH7pFe/zBubd7zJFy+PNvzlTBMrlBMuuqhJmUp59n+c1/Nqn5B5Ise9rMsmcVkf9Av9WiApTqczoaPPKlzxuvDeDB3nYUTa4P3f3uXZwms6Y8Y0xCRBPST3zR0s/Dm5dlnP1oeHJadgC4UWWQSxWZEh207PPF4+4Uv5JOJm02E8mawP58tu/Ha+T/3E3jhEcwz7xkDY+sOvvGNav9y7+BNf+ONsQb/4pXCWCFUpUk+pMOJKisNmNrQllU4dVZ6/Xh7gvMm70TnWXjl1fzeF91gI6kECiVEbS0IglZRTKd7xXTPBDDbHRgpSDGLYgQgFiHSzAkRUlIUEzEI3EkryhEBC0AEGCAxKAAF0GUQn3ivPPxhBqU4IgFH7EIrCDNoH/zub4Mn79/+d/9y+5VLMdTWKIwpITQhTroASUsEIBSRLgajDCrdxaQVKWViiAjIIKg0AQonrYgUgWASRGZInGuKihRISAxvueYeP9vraTZGHx4csmsdqmT0MoGLnOUKjyqB27a0NLBl6GKh8PhaNa2VteqIsChHvI3gkVBlJjh0B/MzVe/GV7+S6sPH3vt9ZTff+vqNy1cpG/SWXdjbm54frKxmYhEKTBkQpYQcIJCqysPBSuNvjFDyZNvpwpDXxlSmahct2hxApaZxknkCsT2rBNs6z8tE1vRsrsSHfbu36KcJrq032A9lT1ySZm6bBRnEaqXUupw/f+mv3K/NsbyryFrot25rP2dWvYzG2U6Px3/rL8qZe3o5+W99Mj5w0f3xv1o5daLeftVfvcRN52Gera2rYnMaxvV4Ix27q3/XcdXOaH5jVBVuuRtNvvnwuX4Xp7OlsXLXg2fv0k4czQ7apUi2UTplotfVeq8LXg0yh7kqSWek+sfbLoXYLrOsKsPZ7/hODV1kU3Om+lmz7KtFsoHaGzMp43Jgri33uriY/sHPbu28WS3NE8Wm6o/d0uksA+8TAaLB2EI/O/fgk69/+qPNJ3/vwnKb3bxRNWVZ5tvxRh5OPTgbPVptPmEu/XaOk8VhEyYTdG0BpAdViFD0ikXdLZaumsN6kzLw4VZ3w01vxYN5r+xvrm0cWxvkWT1zk2beNPPdyWx/Nu3l+typ08fPnjt55nRKZr6sL+3sHE4OKJNKwckT/eduHnzt0s5kHgBA2ZRh90df/pNHNzcw+Mt7V5quaRp/4sLdL71xWTpvba6sxMjeSRC3utY7sVJBhpev7VYWVjbWbi1bYSPgxqPxWjm6cXW3MJip3nCtaLvtjWMrzqnJZF6MKpoSWts6Z6wej8p6fx8AAidkFoHEnBInSZFTSgCojqjSCYBRM1BgsYoYAPkOqTreeWxBEpEjrPfRUxwEhI5iqXTEvBMQQWFARSJJAfIREA/pjjZEiER8JxMNgHQ0nSCEo8kEocgd7xKzCBEigSKtAYQZJIrORNPl1z6/kmN/41xsjIaUJQ43b+rOh050YVJQ5BOiigm0MiwJ5xOiJo423b2PxONSrRYOVmUv9pc3wtPPqOkSQIMYbQtBkIRARjhAQOUBQwcugEEg1sCIkXOcrK7gh74lqChXbtJ+LdffjIMcL5yvZ8Z2SlQM8waK7Aivr/qjELU5dSH0x1m9C8KwdUPvH9rDeXro7enc8W52Pj7/bM51d3tLG9JWReeCC94ofeZU/s7HzfETu7EZnTyVz1uzqL0VJskGua9bawmF3dXL+cDoILnlw603YLKdjVZkbS3OZqK8RFCJRMQKK4lGxZjr9MC9vYfO6EER2lB3LtNaDnfqg3l7cNjN9jRIeu0Vm4QCgzBqpe+5Fx9+lxmPrDFhtsyyY3E8GK3d1V19id+8HCN2DlJKpkClhYE9YRTRyggzafKJk4LIUUWBlKJJbr1fnzg2fvyRjdPn89VNr/Pttnttf+ulS9+QrknLOXjWlAlHkQDCiZkVOeBwlImPySCAshISMjGrxgWiaAkIBUkrpVJsEh8VNAAABO9zpVGBYEpJ1RG1WMNoJOYECoKSxMQ+1UFjg7GNjgkCghcWFOBIkkAIIHahsdY2fqnzQRQU1NZkRhWRWee67RaIkjAKCSMkhCRHchdkGtbG64P145BlPiIru3nfGQ+L5iBFUBAx+ZRpJZGFpGndZO9wg/HwzZtZkZk+kri97R3ZX1REt576CqxVuut6D79L9QYAGG9ewv2b9fSgXwgr6A2KNrZLvxT0ergRBbITm2q2dJ/6KLjAwlqLazqNWmKiXmX6I1MYDEsz52Ud0Ja908fS9rZKS6XYC5qN80XnYOsmbe+Uq2Nog9QelSApTAEWM9BGqSrVcw0NxQDOAqBUGkdDmS9w0JO6w8RKK140VGUijQotNEuVV4AAeYYhQmRIBBIg14pQfIe2p7CiXq9b7lv2lOWQgJROrlNZAbmVNlAEMAlSA8gKmGJCUqICIvBypkkjB8wMiY5Ni11kSTovGQ0wK2OTZ5W0xJTAElnWi3pyu6f7090Zd3UtQY+KvZ0pDItKl93O/pU/+M27vv+DW9Q2b1479+gjPN/Btla2Qq+xGsQIKLmgmu5f5+VoecuTcxKZc9OECNJmnKeABHrl+KgqRwOHt74UDmtxQmWZs4JkSJPWuUGFDAx4VHMSWx8ny+awbmed74TbxIAKLCCiUlD0bVGCNqQMMliQlEISQhQUdWeErBCCD1k57DrfShoU5syJza++esUYiK5ZsXqY5xRFWZtCRwmMsoSklLJGgTABaaUZJPiOLDKnyuSojULMjPEcdWaUyVKMBCLW3mqnX54eRMBRZm7c2r5nWPTHva5xCiJHjJZe3T2Ys5qkyKSHCFWbCqVVWSzabqgtxARJRkUuKWSkFenc2hg9FZYIIiciJmZGiCw205qg8xKJTG4nXVeVViU2SAzgEs2cX8baFtnGxsqyqXtFVmo4nNQHh1GidC4xqdFqlVJYLnxMKi+yelG3TapBtMVv/dHvMCKXDvdWNirW1d6bO93+Xve6H50fq1KRVeKitgaF7gzmj5IZR11CzEdFQkgiLHQnow1AFHxomKcm+8Onr/7Jq3v/89/+8ekbr756bfdGLR2q8+dOza4v7jq3sfvMc6utC5Od2Wy+aPDEvXcfv/f+qjwZaheLQZotyg5uXr+iEUQLAd2eLvoaByYLQRLTshYiPrN2bG/eQEKdI5DtF2WR0v6iaXxoBdZ6+eG0JtQJfL80x1Y2NOWgXcHaLd2ZY+dODE688MrVIsuNwlbrTtiFNutp3xGLRkRhCRGaNmZlkXwqdGkJuqbNyBqKgUPtGxTf7xdL72bzaZ4PTGZSnNnMnjieUxClyk6CydWijpKvXXz8fb/2yV8DplOrvXndtileP+wwJYgChAyy7NIzl/buGo+VmpmySorqwMLIoFEIIuWQ7jm5MgqwaKN/q/TpLalIZNCriBM41zOKSTnvxz2zZor64GAtz7iL3qcE7CBFEVEYyfbMSaPDE28rPlNfOziYrDq7ntlK51zDrtYP/Y2/O904P9t74+aLXzv5jg/OH7hrMds8+bXP8ef/KM+w5QBaSWJmYUUgJIJaGaVIHzB97rLJ46BnlmUdMhWDI9B9YzSDQUIliyvbfP5hzLnz88RE2uRalwTBSNrdmlVt3S+qBx6AwuCyM6RSYGNIjm/oM2syXc4v7SKgmMDEhCBJ03Dl0Z/6yRuba7Ot7Yk2azERJEuiIGlSlaaH7r/wWn9xBeu3P/4++8xt9dp2BbEGNpmJXtilfjbIFCgLCYUVR46sWUQwRQ6dRGtNZvMqhESiBKXzbVVY5f3enzw19O34zKC+PQk3bljMi1UdhSDEwXqfOWirV7JeLQurGTPALgkAWsuCyuTBBwSoTB+UKFbdbmNzQsNgRSGh0gIoqDarteX1ZThdnfjun3zh139hlSZWEytFzCXgJrSv/uI/feQvfF95bACzZuvf/uu1SULWiePR4CmyUFCYYi6atMSUjFLcNUSGFEL0R1Ogox4OIFqLU3/tUj66Ow9KjIaA4Ekryr/4R/xL/+PZu99+85/845U3tjsoqJ9rSCOhvZu31I3r66l/c7aMluo2zCdTjcn0+/1SS0iehXV0PtZJBGgRdedFSxSXNIP3KRqTtDAJACvF2hqfglKggIAwVyghqswk4UgIBF0MEVJeFv/NVXTne3XkyXjrJi6CSHeacO6U1vCRdei/8SfvpNaOMP50pwMZMCZGgWuvXjp95vxkEFeHw8mtN60DJcIpVQWZQhlUKtaaWTm49tnfHZ2/D0YbIYe6S9BFbGI7megoQsHmmg/fOPzMf6DV9f7ZB+yxk1JVTeOjotGj7+WvzyLEtHIex72u+RPQnRr0vHeJ6kBiKhtcGxLonDJhs/8GTm4T2uAj+MgioJg01qww6/mUOFrZ2cf96xddHT/2v2db14ytVjTLV/+DOjzEzPQN6cT1zk4u0aDpfNcri7DzpiwyYbW2ujK5/QYfX0dj1OmTa499aA6qGx9LCkgUk44hZnnGPqDkkPfx/vNr9zzpr3x+/rFPyJu3xuNVlZu4rLsGzOkHZofztHejLLLy1Ga7tRfrpiLJFrcnH/9Fufcd/ce/G2F08MKXy8nrw3aqt7bCZJsg/v+p+s9gXbPsrhNca23zmNcff33evPemr6xMlS9VySFTILUAoRGSgKEbQUD3wBBi2sQADT09THQQHaLpxgtiBIyEUUtIKqnkqkqlslkmTVV6e705/rWP23uvtebDualunQ8nzhsn4sT58Dx7r/Vf//X746DvqeSoKaRoGkXnBiNjHYrw6e35Q4/3v+MHqRjQtXfkjefrN57rNVyQxM/+R9dbSxeejGVfyMSOSZPVVrXeguqtf/JfXeysUKbSRURzYkGwxjrpYlQCTycpeZhElUAF1JzMqe5j3w0BJyAABGgz2PzIJ+4EKKVFl6PxiIaABVPsqtViX+arjY3tt29/0QwG6SQKMzETsAISkDMkKUT2xrCkoGitRUQUIdCUUgIlMtaSMaQAKTGSYT3Jl7VJIiROJ8OCcH8Bbd0ypWQyN5p4CHK4DMwxc1QOiqpNMSmmOCxyb3RQOtRMAkDmR/mwbTuBrktdaSwqZ95wF8H4SLp14Uy3nE8oyVvPf/WZzy7nLP3RXsxB8tMTf2WwfnHkii7mIaAqWbRZJsCIwlFG7/2jKf9QfevZ8M71oRt767mSsIq2N2SDoCFiWknGgwkNx5rA81G7WhBVXWyhP8Iih5Bk2lBqynP5Kl+vvRVRml+Xpq7cdhbUf/OdjT3bP0d2ZxIWtfgB7yRLRts6IebbFwbZgCUdzTRzWVHki/l8dffWcv3c43/3X07ffqf59P931KejA1994AeKn/yvZkCL6mBx97VJu7bz4Jnp7d0167HnemDOFbmGgGDzDIghKbZhFYSh7iIzZtCt2izrRfYpttaJmDII+NJ2q7mytFFtbiC0PeMTYn8ycKI2weAysDZEEg/mfRys+bR9Ycd26Fh3315mofSJeH9aOOf7zrgc10dh9vkHm/3ye/rwVZeVTzbTCmOyZbncGq7e9z6+/H2rL3x1e/cVm5kBrHWrBal0x/OszEVCFzpLpCTS1BdOPai+f9B23M07opuUFs3y1Zf21srRRT9Z1rMZL/LNyebWpGdgtLV5PF/sHb16dHj8zt5tpnytvzHKxqu6/uLN19/eWzUdDEp3aqt/+cHRKy/drZowW6yOj97ob2263J7qbVaz4wGZ5Iu81w+hNW3XzZfDodsstw6P77QrHmbl6Y2BV78zyRhQMM5m0/nh1BaDatVtbowROytuvr+qm9TNg7P5pDeaLRbDnjOomb/fHggqILAKoPK7hjxAc38iAACqLIysAEDGojACRk0CGIUZQZThZBMZ7mOLVOBkTUwA08lVwkJKAAqiCMAn22toBRgRiIziSXkHBvFEcWY9OUgAQU5qaQUFZVIAQE7sjFcGdAbBKkeLhG3Yv/Xmm0LnHzOTra08Y2xa3N0N08Y0HgcU85RDR4HJeCWHCU8AJEDMOw/d7e0K6HA8mjTH8vpbeZMALJjCCMS2M4AApu1iIjjBLCIw2WQz3y1XBMopaEalV17c1VVd7N7Rt247smltoi4zow/FdOT7AynPdB13oSmGBqdTs7sX71zFg9vAAr4EjcDt5r3X5v/7P3ZneunNa/liaUzW5WUyDAacNUCBNifNY1eO18cqq8Hi9vjo1e6bL61evVp0bMiyWlSSJui8xtu7/UkPDl8GUw87qerEG2vx4feM7OL2N57vVWwYITIBepCQEn7Px4d//idbadOypsT9M2tCrj3YLTPbvvZ8/Yu/NEkpV6NBne2lIDAa9d//0VVeclRv0feK1bJdxn0j5u5brw2jigh5r4xoEC3r0MjEpzpCzajapaggogZFPZChVG0Pzv65P33qyQ/D1llry9z72bJdB7u6+sbbd+/Mm8YZ5zMOoRVmJLXOoLE+Y5+ZVlUiIyARIRkFVNEkgqDekJJGSQ5N0iSKoOkP/BTeEwKmmIxA7DpvOEabu6K0LiebKZCGFNukTZ26SBihISLWyCoqzIlFBBVQ1BMaUIdoyRBS5rLSDyiFLnRIHmweY2QBFSVDYkiQBKCX2a0yO71zHigLdTMeT9YzuMe6cbp/GKpmlQTYEDAwJwbAole4arr/n36u3T9S1MH26PDaYX8O5cZ6G0K/Xk2//PvNtH3Pwx/pGYLZ3nM/87d3dvchxQiWRQVVBBxinuIqGgaMKSPNYtv2BsUqdSFw5jwxWxENXbRGOhHEoreZ3vfRun+6VePoW6M4I4/52qnUtlYjxOQEZB4pXxNfaFphXAFYNAjSQJ5jkjifumwAbYIHH+qevoLreerAxsrHGt58DZa3qTSQRWVVJASDmecuUB3ReiADqJAYQiWoCoxGQ2h8YmeEDIGgrioYlLheiPdIrEjoLaCwiMZaWY2CNrVKCx5Y2NoCjO0WM0tEoCACIsoKSTSsFJhMJ+1KmtrwUkSx5wfndrCu2jv14WoxWy41lZRZI1ogtNHcfebZR9+zcaFX3E0hHt+FPrere8WZjyjmKSn4TIjcpNcfMU4UdvFg2SKFzAJ6kxlYzufjM+t22Ju3bQOd6288/fHvONPhteXy+isvN9Wh8+wsGRBEUIWkEmIMMVVdWnXSCYHN8tyjBSAVAGuRPBgPzqvLHRLExMqCRqVjazJj7/usuy4NSjzYmx6vVgxhg0yqVyF0CxBSCcz72qKi73SAZuzJcmM7ycgWlgyaNoQCkjfkiZCMGCeaUCV1wYIioSEkUu/AKzUCt8Oy3/Mj5x7e3Lj9+uFwlH3uzRtPXrxAg/zl63dDYY7a1ns/HrjFtMKObVEs28ScSu/A2GWbToIFDCGxRtFZ121423bBOW+cY3WiQTUBUCcSoog11w+OZqltmlT2jLB4Ms75jkPq4iDLiyLrmhBbWaSOLRAAiI6GvYNFe7jkhkJuYle3WZZXVSgN9XLftPDhJx5f7C17/dyoJAe3FvMS+lbTza/fLbOe2TKrrtMSERToPvhWEfRkPg8n9lgVkBP0HhABMhGBQBJJoLuLasF0tL/6vc984y//5Pdded9idHr98Ghxyufrw/XFtXdyL93x4qVvvLJ15QMXH/9IvTwWLJ3kyQbQxodlmZntQVtaSqZQ5kFuJpldNR0YM8qL9cnEuni0WgqngnBQ5qs6YoBh3l/E1hFGkGUTI6JKMlUN0YyGOWMqHWxMhvb28Q9/4H1fe/7rCWl9NOjn2Z3pCpJ2y8ZnhYHUhYhEBgXJkkFrADPXBZ4umk5T6amklGX+Tr2obCYRCHlUeiLkJJa1vnc81qxNokRNSqGNgXQwzr7x8hdVYVK488N8sWgHwzE6MKCq4n0mrIZw0YR33rmzPc6KdXX9fFg6UVCyIMAKnJCT5Kprhbu2O/tDUlHVhtF41Mt6UVKK0ZKKtE3DjYHMWdXUtg1qYkVDtunqjsG09NDyIL30qV/47X97aTu79OCH0rX6+NWbq8AM7E6d7n/bdy921szQ2BvXut3d7Qcv7jz2SPb1Ty+agMaBQwAgsoggoEKUggiRB+NZCAy3NsaWt7O1Dz5889NfG0TVWhhAIR3uH12a5MtRP/Wag727YRlUAFQjSOvz0fnzR9lq0isuXH7wlihEsAaN9QeHzcf+H383fed7s9J88W/9z0fPvNBDhqSkCYFTHSTUg54dX9y88tf+8tf/m39kSZU5H2SpAV4cvvX5X8z/8+9fP/9Q7I8fvvLoW5/7nXovDtdGc3RGU9G3iVURWhK0YnKnkcACOCQlbAOqUzGcAEkEIposrJJG8ZnuQB4+9axMwKopi6HJRlVTG4+hXomTYlJ2Eu/uz3MzppiyXqmUMHdAGSRxmedqackBswIiJ1cUnYYATNYak6mg0RRjFOnywsrq4OjGq+XQFg1xi9qwtKEs/aYp/UtXb/43f885a0PYMnmELELrB3nbtcLJCKMack5FVZMhBWmZUjIAMfjMECNXraIgIRjTT/P9n/kvz/3xH9n9N798jlCTovUIfhDj6mf+Y+d/8WwDoA6dSx2JCAgUdfv2z//rDz759ME3f68oM4qmXsVmNjNrg6Kw6M3RrXuLxQKA22ndrpbYNi6RcU6ATeLQQe0z9UJW+6XhGKwxsQtoTQaQd2Kji0kNACurYkbGE7BG6e7zKVSV4L7FCInuIybelbpV8SRs5v/wIJ1Ark88I0SAoCdQ6/u+CgFQYwgEV/V0Ptu98MBFaw00gdvDaXXYX+8Dxp3cFEPbVFaqpBF0Vd155nfPf+9PgBfUQGh6kppqGVaNOg9WxgOYv/3M6uWKJxelXw6+7QeKx59eQZtfeqB6padHd1NT+fOXuD8xskD0nvKIZIyIJms0xqQwsG2XXvpMXtUpqoKIGCSNKUyKLL3ySd8u4o1nN+UgvTV1zcw2IZ+BrCRRZcINmh0VvfGsPDP+/h+fCfcP7nVf+CWoV9iozfvJQJb1sXXxxp7tPWAfvsSTs1L6drKhIsKgUSDLXT4ANQaMNQbIJw0GMOUb5j2fKE59uPn0J6ef/8wYgrWU0AXbH2QKm17nMS6kSsjIEML41CAd1fqVz6evf1m1OKUdpJU9vxnqQ5NCsvfZrjazqgJE4DIISl2nowH+kT9ePvXtTIXnFe+s6envM9/5seYrX+u99YJbLY//w78Y/NEf08c/njIjAhrUHt0qdr98+J/+5YWbSyMZeCNgjSRLgtY1RhtSPfeAtG3/5q5PCS2QAUYABRJAQGRIrEZBjKHIgpAiGAO3f+c/4Pf+Kbj8XUoukSiAc6au2zSvU7ca5+sCo1mdxLKiahRmIYOJk3eAACTsCY0iqyhAYAA0yklCQAIGMspR9EQzIosMMI+JFFBZT0I5EA2ZP0CsuNjlkDWL0OtTY7TXy4rMrppOIAxzMqiQwGHMinx9bWzNgMoNGZTGm9Audm/faQ73grKGkJeZ964KnTp3qFWW4XDg9/f3yrXNkCV2PV7Z/WlzYdzfGqvDQC5R6cR5iYoWSIlCHPSpdiFO+oNT3wlbV8KLz0FI2EWJEEHQTzS0DnCDgh68Ub01H45PCRQsZH0uHAKWbTHJttTTEo5m4dUvZg8c2XMf4bIX4dzdRobf/kfQVBiW4/MLkA5CSq2R0UCfeurtVbOzMcJW3VF48a/95ODS1nj7fB4Hy2e/tj7kw/G5h/5vf/8wEF58ZJhuw2d/rdfNs3APq8M4It6w2+uP20O/rKt87Yz1hRbEqTOEphxw5GSIu4gE5Jwno72kbQ2Ge4UyECQ0rqeg1TI4y2BtFAGN3hABoSinBo3HFJSlTZEJ1QGwmvVN0+uH+T7k5Ivh7UVTXLocUooHRyOf0WJRH9zIjnfTYbO68tTuzsNb29tDe724cbMYp/rouH384fRn/7O3AsqrR+fTbpblVHWcKqgYPfUKC4bbxEAQ6s4Vtjy7U+drq+NDy2ldYbS9vYrh2t3Qud7g9JWnLp/7xhvPTLZ2zMYWj8q1jT40GG8c1/vHB4s7EfPTW1tny9E3375+dX8571idyZ2xBGnZXX9pj+edoGmiDvrFuVNnp1x//oUvDC1AlC6lhtsid2q6tUn/1Lg/PzpczCpyvUJo92pDRbd9erKsV10Kq0XtyuLUZv8Q4c7e7c017wwKQMepHOeuSzXP/AC2zjwwO7gL76aeJRAFZBVQFdQTQUiVTkJaDAIZFIKogmAiq2oEg4rIgBEkirAIItoTVyoLIBg8+UMgSAwAKEAkCiAIACRgkQAQRRWYSE8crAxGiQCIAESVARSQRS2giBKCiAgrqDAoEUAC6/rA7JEQLQn3C+9Zb199OZret+18d5nJoJvjwa5Vcb1sngFtn+WqkXtdXtcpJYmohoASNofp9vOjpy7WGZDNuuneYHGPqzklD2S0328mO/2HH63bEEjKtTLL6Oi1t/DwwC7aTDpnyBJaFkjR376a9q8Nglp0UCfwqTc7iF/8bRm8WTQLbkZc3UkXnqDJGlhxsYVqr7v7pl/MgVGOFhQZCgVuBos34lts69pQzmBNUag2RoS509RwYeXUGfSwKbV56evVK7dgnrKaEVg9irE272lu9cyZZjBZXX99/e4r4Wg/JMS1Db30oPu+7z5ai5M/98f95741/U+fK6YHKKIxwAPb9EM/Um+dzae33HwJPtd80EbJds40PvPjNfzGl+T1OyggaiQxGdMMJ+7cI5w7ZIQOurpTbwbnNrs33plkZWhnKhyNZM6GtskeO3/mL/zp+vQY7tw4+K0vV9fvxUULXUAAh2hAmuHgyf/2v90/f0nXz2CWY7QCMFpb84KZOBfRJwRREEkpxjYa70xmvDVWMRkIhrooZMgYow7JG+MICMEaNYrE1liwjsgYcCiE/0cCGgKrI0OoKaYYSdEgat/50mSeRYWjhpBiC9iJBGXipMCGwAAjGkFjwApYFYuM1lkiS0DOeEPWZQhoUAWVIXaxawEoJklJCMFIm1mcjCenHniM/WiQFdTEeHyodReaZA3k1opBiUCI3lvVRIYyhtmXXvJgFeXo5VuiRp0JxlTzerxecuZ0/970936De1Q/99Uzb9ztVaFG4aiIWM2WRH4jy9/5j//T+f/iH8SiTANy57Y01N20ysAyUIpqvS0zYAhTsTS+kG+dp4eujL/zj8jsiK0dXPij8//wT0ZDhThIbHR26HoW+hmFJQhCDZAi9r3mpWSo6oEApXVZAW0dj/fx4Sv2wg571S5R8JAy2FmHoxKOFtCSotfBJBiDrucUYVFBYuAWyECMEBP1+mAKVrGWsa2IT6BSubqCci+jIkZxSqAMAGAcKAOSwQSxw2aBzkIwRi0qM6spSmVGZ0hU51NcTKEoJQYAxa6D1OFqadp5BIiO1FnO7OiBjb36EMGX68O9aY1BoQsEbu9Av/RzX/q2918Mx9MDaM8/MPbFOuZbrGRdkVTQAmWIpM1y3u+PDmfGZtkCJAB2NWAjboR5XlSlHzx4pm3teHRq58zZ0xHKzfWXn/kixwNOCRKIMKKyqsgJpAvIZFnuSE2hqIgnrk/r0VI0OZJVNVFVSDtRARRAICR411y3PhkU/ZJjp2KcwiOn13f3DiY9WiVQRQVmlajsfB5FZ0kMyrifhcQrTgZNRtAl7qNzaqWDqAQsJVn0RTL5qE8uc/NFJ6TLEPc5HNbtqFdsuwyq5DMSsHlv7Y1p1ZG5zWoCG2PrtjOYInNkuVN1ZwZ934KEFAkkAZETRZCUOWDCQ5Y8yMh55aTWKKCgQxAiYlEBrHN4PTYrY7xXUaiaYK0DMikmb01uzcD5rqkJDIA2bbKWBOl4vuqalKOnDjqQ2IEkTpJKlwnr8s4sX/C4WPeQ7t26nga4cephlVBAYW45JScIqe0k886Y+/lIeLLRAYiowogIAqSgKipwkh8sCjExciSJt994A0LgKjjhC5vrww65a6PS/ttv3Gu6yXhr+MAHble3nvrxP5YWy3ovYONX+/eyXmd38nZ+IzvqOqLf++ZXhFzPozStNTTOvTBogtyZg9WcLEQGMgYQ56sWkYS77nD1+KlRaFZI/rCJ4I0S9RzWy8pmECA5orY9lLZ55PzFf/Cvf67o9QmoWjVboz51MvalGud8VOlUbVEUtXKrAkjeUcux6dgaaLsKIgBz7CSAVom317eq2V5JyEn7rmfALlJlVAvnzZjmi8ONje3182vXX3nJF+WyCq9fPxxkoxyOHjm99dq96wRGGQCwbhrNbCNyHOOgbkvjCLgoM0ATo1p0mPm6qvoOM+s48h+SiohotqiTSaOiv4wL4s6rTU23SKlAA4b6ZZHaNs4bl7nM+pZlMijGbv61X/vn9Uou/uT//fLTH83mx8/8jb9gbu5lFuZ3r7/1L/7h43/pL9rFW4/ce+baL/wKZvnydiU1DL1rmUEpqdLJioae4HBQCBKyRVFgJBBjZ5cf3fkvf2r63CvF/twgO7RJYWxx9+vf2P7wdwLPbr72ag+dR2BUtV7zwsBog9Qd1S/9/C9DB4JQt52xMhrnN2+/emfvNGz7y//1X1r9rX+I117IVS0QWMzi6tWf/SejH/sjp05tXvvcF9cGmSapQ+cIAbWt2+k7t57oPzQpxnJr96Vf+3drvdxrZidjqlftfFVKpCyzmYtdE5gzEhYw7Aywoz6CFUlAwHWF2KIjNT4brJtlbVILcVUCUYOYDOYeJuu8k+nmxNoscbvw4XByZvTwh3rXDg9/99c03qXM2K1TkXM9vMtSg1dFEI5ctRmpFjmhBVRhyDRyTBhFk/gyR4h2MZt95dNZDhrFOgdoAFCYtWkGakyTQyesGG3LhGSBETTPJRGsVjmRpE6BQQVUOdPpldN8ZgdvHtHbN0YGXEaYBMhqUATZenN/8Xf+6bkMAMwJvUdZkVwfLUQGr6DkrHAgE8VEXrN09xsvPPVTP/Wr/y44iAbRGByPh3mvl0Qz0Gq1mh3PtMgs5qmC5YKtd/kglxAVJffexKxtapNJAmQGbiPlJRFQ6kRiikgkETSiosWWgiIwCsgfrJIBKN6HisJ9cPXJb+A+TQJOjrN39wHeTUYDADnpbwGQhIXhZE6sCpgA2JnXrt55/AMful2Hrc3LMGXtTZeaStauWp45u363iYbUGOGE09ef96ceXH/yoyiO6/m9Fz/PR4eFNSgKKTVH18bejQzGareZBx1sdutnjnIX3Kh/+glfr4BYq33qrbug3fSAb33DnH0/DrKuqrmNRb8f1O0s3ppffc5Z3wCzamIlMSrgm7h161t843WNlcl8isCIiMJdJGsgCbaNSY32pPfBPzc79aSUg/b0avHyM2u7b5GwRvbFJGTOb5x27VqDfdBBzAaNdQactMusKNogRb9gQGtySeGkcEQw3hqJMSrW5aneD/1ode8VufqOYSCXlRr0pS/RsiHN4va5eqNWgrwNTdvxvB70rM+gmx7a0nNVddcWtteXTJCga1cGTewaMCV5AlJfIBA1D70Hv+372Oc+LvziRrV7M7v0sdYM7cfONt2yuP7yGsT02/8s3HuDH34/1l129Y3ms/8aD3bXap/QqtGoNaIQiDI0iZvLF2+d2RyeeRie+WLBAIRBlC0wnDR2QAQkmJ34C5KAQBIABlhh/tzLt1d3zv/UJbd5KdScYofMKXKzqgdEvXD8lS//lh9mFYioqoJBMgCGDJxgclUVMUli0ZblZGXFWmMssTKARJEk6qxxSKpacxIAEiVRVfVkABQN2XchjptFbjEf++G1inHrQtbPl3GZtW1cTJ2KBTAIuS/zyVaxdQ6z0dmL75ucPbO/e+/e/v7HHv/O5fLgt3753+YeD+dzimn71KTzdjpb5Xmxn2AVskuXNpvbq7ZOaxYeujj50NOn29kR5gVZs3+8N17v82xVaDcsIM8B8jotb7U4tOubWW8jW5vQ0YEaYxC5TZ202NU4v82zox4w7S6P2hfX3/Nw+fBjzfyIV7X3RR7neBwWN6/b1HR7V311ZAPTzhNLP84ff7IDdvPDiut649KvfOar3/3oBLOj0XKa79HIb6bGCJaG66c2LsD0nrx5lfK1MrarFPPT799rXOcG03vzLSbvbP7giF/6wuGN1+eXt9f+6I+54ZocvTG/t5+5oeZudOlhUYcxpq6zSCkm65yqaIrSxJRCbJaIYCxFZkUEphTFtjH3WN/dj8t5OR7FGLpWVZLPbZbb2FShC7Gp1Bh0jlMn2szapUVS1aPdu9aZasptaPIsSzk5OwJ7viw2bXG++J6/NBlAvPGGwO3q9mupSWG8M7/83pfeuVu09SOz6U4WVDOJ1LWKRJg4hrTqj16/dCGduTy8t3dm92216kaq026x6M49/Ji7fGmvd+rUqPfV1z//6jvf6MIrDz90li2IqSNbSqpkty+df/DxK7vTG0ez5tbx9O692a1ZVTkbFYvMrRYdNEAb7mBWS1JXkjdme23I7fLqWy+VmAknT1QxUUQZ+Ba6zcxVXTq9dWERszIvS4z39ubVMtx984A8nj8z3NlenzbNYl7Vqzov8uGkqEK3WnS2b0+fXr/36r0Oos8Gt493UxXg3RNfEJXw3eMeURGVDSKCQSRASCpWyVoDiArEKiAgKJHTiQtJVcVYgvupaKwMSqgozIyocLKqrKwqygjG40kM4f1rA1SU5cQIkviEYI2ABIiqKqKJFRGTCIAaUZAIytY5jAxA3vkcAMEYk6wyQSzWxtePbj4GGEPXHh2UySXkaPzRYAu/47syouGNPfOZ/5hNZyl6ZasGDMX+tz7Tu1VsDH0KCAcLrVYIGaFtqlV9etT/8z/UbW6S2NIAh7AELD/4baW2y8/+zurrz02qFiJAYBUhj75OwAipBUFIDCk620GdwKO9fSR3bmSpw/MXUntMpUebJh/7zqYcNe/c8NBqM8NGAUl95ihD8l1bkw0MDBABUKIiiN29Db//S6YNq/2jYhl1mdBkgChombltql5hClce336VMt04L+H5N1wkVErzur39Ejz9UFdeXrEtHi22fqI8+lf/yq5aMGA/+CSfu4Kr1oYGugTjTbGjpp47b2hVp8M7ZpVMBGXAJCgQpC02Ty0TxZYdUdsF6SIb45uut1ru7k8LgwlARFDReMwfPh3f8/5VOSg2HhjYgbl1rQjpjV/4TJ4YuxiJt77rA/W5s9DrpabySAAENmu6uFo0m2VvY7BVHR5Op3WQKkrnjUNAEQRFUuNtZqAhAFS2BHlGWW6ds2AMGjAomXEGgZWc8axIRMz3HabGo1UjARQArYvWqc+z3gCtc2SdRLC2Fk7SNIgzYXWcWYNBgYAJGpGIYAw5Y531ABhTihwKZwmVU0CDCVgR0ZgThBfAyWsBuXFFb7SzPTl3/hKzt0hOwHCLy6N6elQ6ybMMfC/VsQ2RkwhD5l1T8/HRauycLFtBtd4qQNV0mKRtJW/jYNXK7hvVq6+70mQiLpEIt3WN5HpFZtQr2tH6pLvxZvXJf56996n80YcPv/lCwVyOe+2yIcHYJQ+GvN6rji//D/+ke+qjJkR89evH/+FnRpO8Kbfj8Fx+aj1O35SwU5dnXVaqzH2aQmTwTp0DUE5tuyDKy4jWMcl01jMMaLif8scvryQ4kztP1mfS5XH74axXxK99xbdWA2rmTb/XVLVRsmkhqyXlPVHUxIiGusAaNUdbjCAqL+Ymt2AtjUuxXZKENjeYqUkqjG3DQCKaGQOzGYQK/LA+mtpofWG0WbnxiPKymx+LceQ8cFBQzgtNUZZdnlvKel1bQ38oo6GkOLt7J/mMt8bdbti9d9BB7PcygyBql4j3juInf+mlwaZ+7KmHBdq2aWl5mK2NlCJ3DRoiX4JRA6FXwunN/lG1sN5WNSeTLVdhi3r902cr7wBS3iuySY6evHWPffQjR0f7t148MEZRWROLMhKBojJZ1yutzUvD6BAxiSZV9YZQUDvjQA0GbFMKyvMoCVEUgUXvMygAOMUMtFZ1ZW87GyIb8tZUbWFAgxTWNl0sXV4m4wQiM5KZS2xSigaihJzQqoXABbm2qdczd77v1yfjV2fzL73+8uVTkyvrw0wzW/qrR7Ml8mhQANPeXj31aTgpxGJBetC17+y3BqQA0zah1/MpJEI0veLmLMym1YMTn4lmKAKMqo6tc0bRtJoi2VnovFNytmuDydA6Yw2pMCgu6/j6fLkCdIZGzsUEYikvrLVgs2xzo6jm3e7hfNUmtGZrPFpM5+TJ9/10b14UOVSymC5Ga0Ve5sA6LPqxS4iweX79kSceO7q2h6nZyEbb57aPmvp4NT3VHxY7g4Pl4daZM6vjmSnRaXaf8Xp//k4KAmQAEFBAEPRdNy1ZVlE1gWnRSMVi8kw7TUjE9vat5as3dp/+8AfKs4dFLfXR4pXnnj1//onq8JgXq26/bRZVT7k3iDv9wcFhWD+z9eyLb3/qpav1IIMuQtRWtJ63YpBTakJlnHc2O6znMQqBem8U2QFenvTO5XYwXt88P/ntZ28vmnZVBSqIvGnq2jhjehY0nCom4XjKwqd3Jotli0bzzKQ2dcwhhrVhQaaoms4qWUlRpaorjzjKcjGooEouUATmcVncbQPHhNP5yPvrt9958tyT7bKGzLnhEGKUlFIXi3756OOPfeHV15surY16DqmpmijNgODy+bWKq9v7x1UthUU22AKvbQ+Ru5jC7XsHhfOTDcxyF5idM6SCgCJgszwbFH9IKgo1Q+ggS+PeaDwY5lWbI0lSk5RdmkdEMMbmvdyk1CJonmXdotn02fFrt/7k3/ufx4+95/pimrUVrU/C6zdLxH7mB1/5zPLN565/42vn1vpmnnciuVo11CQ5qb4MIssJMZhANDMYDbKqoLjcYJDjrqGLZxaTnVQYkGjIB2YLOja4/+nPVEHg6PrguW+WAKAgiFlm8O6d3X/wdzbff6E9POy/+DpbrYScdRylqwOtqkcevnKTp83Aws42vBOdECFw4FGZpRfefvPW7S8dHg7eaR8ab1ft0jpTVV3ubFFmsU7LX/ksnYXXfudL2/vJSASL071DvbCRTfq4N+NWU2IEKlzPqVjnURnjSrUGsOQ8WAsajAqSVpA6qTOKIdYmBQupWnHRn7RN4FP98ns/Cg9u6WhoY333my8fvc39t+rq698YzPfFVaROFysqE+eq0XLHwsk4T30D0mk2oNSVGmITncTcmmARgKz30WiWF6UdwrBv763icmWNUWOjdtajSKfKgJqEIUEMyZd5t2oh894btqCchAMAGgTB1F46t/Uz/1scjF3byG/+xtV/9i/Pt2IApGMEsg6gtcPSgDKQggVmZVWHgCqQWwgJLFjLlJgLmPRzAri5dzgWHI83d+81Psty40aTwfbW+uJwKRJc4ddHY98vbt2ru05UfRfUNY0zIUDMTR8i2OQJrWl8ii1YmxgzQyoxYVf2yxhWyZqERlrtOGFKxjln3l3DxJOpLp2Ah+/jRwXo3YQz+INktPuf7/8I77qPTtQmQBSW+0YkVQICgfnu7sHbt0bjcQVw6kM7vYPxtTdfapbMCvZwcSr3q1WTBDLEeLw6+tIvZ2k6OH2ejw/CG8/ni2AQNNOKY5kLuSQpWI+Tfn85n8U7t/K10fFst8CdNr/A9T2aTXP0qkr1cfrm7+XZ2YBbtTUtlLZzver26tP/IpsHMYjEmuK7zj4rAiYwpZW1JCkYMiFEU9gkiB2kmLxaa2Idp9m5dT8qorEJBtvf+2P17/6shRlSQczL44NosKS8u/G8lNR/6MnB2bMOsy4ulNrexiafhPxKIGuFULsoGkEYGDJHjbRdb1Q88aF09bpBZU3SNPm8xjKfGVd89/fvnH8QRE1XL1963t6+1l57CZtjtMwGJTdgrHgProTQFOSkEW2XmFvypTYNlH1973eZ7/+/Nn5oOBaruU7v9dxEEhoTtT+wn/ix+Ou1m+7bBdhnX+C3Xq1feY4W1SB0zC6hiA2dSWQICTkpqu981PGoNxiW154b7153J1GPjEh6EqRNoAwaUQlAcmgCZkkTwqoGV+Kw0FP3pq//i//ukT/70/7cU+opIrfLuL6xhtXRG9/63enstre+ixIAnbelMxCTiFZRkqgoBBF5d/cRVFQSoANAETl5TIlIFFIKqJARKUJkYUQAYjRE0Igg3xdMc5RYLZD6k+31zY//6NqVRzsfu3oZ7l2dH9zuWfTWDfunFEtbDu1w1B+fFqcFxUcePDsoh2td/F6XX3vlyy6uiswWuW1Yd/qjacUy2Hxi54xwsz5rjp7/Bu7deOTKhq6BjDbntfpy2z/8kcP+GgWIhzdW158bLaZ2utJ055Gtx29866XV9Ga/ut3OFr18A7wJoiCh7WpcJWmsBCp7pzRrQbDizgxgaDfrm9eHQPVyCoOtw3NPDD/0fYv9a2YVx83t9ckGNLfmv/fF6qtfG496q0889X/5zL+f33hxs8Dpz/0zc+3WqL66Ws39+sSX5yVU5BxtbgL4lqMrXXX7dvPiVzc++N31nVczqZfZ1lpMmI22y9y88orb+1lnwmF9uP6Rj9azbj69u7MxgY0LbQBNTA5RSFgTo3EFWoaWfOYlxq6pIQRLJNI60YFNcXV484Wv5dnGuHiP8z6GhmPHKYXWAwde7UOsfV56k9ftTFNm8mEiD6C2zB1BalM/y03qwuwoNF0B+fp4LO+81P3i3z614eZvXB/MDBBwjzTrba49/ZH+FF/7bPHCV/hwiosFGG+8ZQmmcIHs65uT0//oH+FgMHj7Rvt3/3J682q8eVWKzNhh101vfeuZW0X+3j/+Z0/9qY985eXjqy/f/fQLbw+G8IH3v9cXdnW0GvYmXVrWi90nP7Qt19uXvzWbVXDYiBrtRGJMJncd8yJ1xaRMYD7+8Q+61J0eTd58+02oZ1vjYfDD2VE1HG31C57HRYh2fyUoIr4SY2bz2X5oQsMmo36epRQ4tstlvba9PugNr71zq4rhcNGIo8M29W1x496iNxg7kfWN4WrZTVeAxr87HcCTGdr9+lWRLOL9eknBnFjiT8aclE7414gsLJqAQFQAABWTKiHKSSmsCoCiICInOR6IBAAEcGK9V2UEIGABREBWVRA+4ZEBoty/YkQEhQgNJyA8SU5LIIqIIQRnCLkzaC2oNdb63FByxqAT2+JgWnudHT37kj2WQUTB4BcHmS6XozPu8c3Bja/CizVEq+SiA4dg6gaWFZDxoQNAtk6JwTa4Nhj/kQ8uC07dMalIFA6aDXrM1JKa973fpjZ+5mvYsXACRGgZxVgixgTOGCsgTEgp1jYh9PtFRhrq7niO9251KYTqGB88J5p6k0EgCDEfjMZSt4AZWp/sKhs6aNpYLdGAZp4ya7BIi0a/dUuDUBcZQcSkEBNKVjrrIMMg2i7ni/mq2sqxfeWmhKDJkjNh1sYXXnSnz/bnq9077/THp0L/tD19Su/spl4++rYfXAVLiyPMBE5tQb/fdTETW66q9lufqV95IdufY3BB4KS6swKJQ1n2F1hjlhNhCJVR6m4fpa8/R4sgosaRURMjZz0DvUyLzGZeYunOPTTe2lge79HOmO7OByW0TsvHLy4UU7USW0Ix8JkRC5gk80LSNbOj5Wy26loGUHXOenIeyQoCOUPekxWyRoWZCK1VIkACNIasoooIApIriDyRRInM7zpMMwJFATZIJjOZt4O8KG3hXE5qFANLAlAyJJQEJGLyxhlnmhA7lqTABAzREBNGVsOoUVJItSW0xrdRUK1AUmUFYZWu61JkA0aQbH/YP315/fSVMjOZl0VbF0im50zPBQHns9L0ZrmhhhEoqTbCfuhrqApCcLI1Gsz3py0nsLbsO3IU2y7cqrMqjAoPy0QDN0+QYU4OMmtCHQic6+cgftJspc8+V/3eZ5vS9aI307kYL9qhaL9XQMKqbYsPfxifeBImPhzHImC5e2AOWrd8ve5wsjaQWzfiW/fWPvETy9FmO1zn1T23O7OzhWEDqYFmUcQerp/m/ijkhds6FZcHrmnN2gZsrTuf2RO0MkNarMj3w9oD7vIx3Dm0RyuYzrStxViMAt2KAMFmwAmsIZNB22jXImVgInSJ0ABZSMLSoEPnCgSjdYMMwAzMrpelE4bw7rxbHOQPld5kuAw6P8wGvbRcIXirBhKbLJMQBQSzvnJrKUIToG6pa9BAOcm6vl27dJbWRg9+5KFrr7/2zOdeWNQ3m9QWBS2rEAHbOhFj0+rx0WrUdM4717ZmeQdyE80GqkMcT07tTK/vpcMgkjJruy5sOForHEUNVTt9Y4oDW01re+pByNYpZ5eV0nRZKnPjQ6iDJIRk8yx0SUBRraWCbGldzmCQjCAlBDZAyhY7NNSpeGhjWjWSVFIUkJMQMnu/L7h0YXuxqFLkZhmuPPzE3sGtva47aKIiFIoj5zeHZZZkaIxCYjQmy2dNUwWeS5eX3udFaBMw5D5b6/ktn5UWj5fzY27aQfH8otlTNtEYh0q0XDaDzaEzBkw8CN2t26udnullABlVkQtD673yMGnH0gmsQhIRRjgCaWuYKOwYcpaENYCiAKEgaSWpDaCGT2W+REIBBlWU1ISUIOTu5mENSZrAolLkGafEwRkyrHi45K4REbJlrhLn1QoIVquQyKB1mBMxlQB1E6wx3tq8yKtmnuXm9OnR6fODxWx1Zm3N2qzAie2uFwJl1qt8O797nPWLrCgCK9oTlBq9O5NH1RMvLoEqEILISWdGpAhIzlGWdV1yxoXAUfXc5gPz4P+7n/2UKYa/+uLxn/nehx8AvfXOm8WWq+t2OpVy4PqPn1kremvd8sYXnqlvvJyTvbHZfHb37VkOieL60DdLOD5eMkLpsqxwqWEOwVjqFxmUeQwdGlh2oWnDlUFZepdi3NtfPXhmIx4tbk6nxpuiLKXrIrCggPCf+IGPfOnZL2HJjiD3RhWbpmMO0y50ojYHTxmDatIkftjPjxezgiiyUcHMWUv5LDUJJAXYLAqMIdYLTdmlrYtdTVWnk76FjGZRyyybz6udU1vXbt7wzQI9drFpVnWf7Nao/8a9u1tdODtZY9K7h4u2TaNxobl/497hzqDsFy4lOUqtLtyOc9ZoSCnWnSNSR53q/tHyD0lFvZ5r6rSq+fbedFLka3mGUSLEjWGvxnCvXW1kI2LTBQqhAwLl0I8hm1eXr2zv3cIub/vrIcdZ6tO5hy7k3YzYTLtF3w3XL5yWe9OCJbeEjAJoDSFrxwrmBNBIKACoFowkBVRBECJEWesVB7feKafT6t5xJi4xmZNphsHNWR1+6TeMrhyiijM2S7GVZPlgirvPHH/rK11kSznYTDEpSVFQ6dy1Zz7/1LU/s76ehZD2rx88ZDLDiawxiE5ivd98x4/9ra7oX/+FXzh641WHGjpFIhESpSG4g1/5dXXVCIosX/dZztbM1jf7f/r7j67eLj/97NgDMSem1DEomNwhAIhR4wjwpPgjNc5NBLoCM+0WXE2btusPhmAsCbr1fusGw0curMxycRjKfBtB7Obmw8vKfu1Xp9/8WuGZBy5H6vbnejrv+jtgcpF96ir0GIAxKYv4FFiD9R6iglfrGNSmsGQiIuep69rAhjDrCYASJUlJxBCoJlRBx1VkNmq4WRuUTdN4tNFAF6JxhgQxSd2G4Xd/F599YLlage2v/eifjy+/XP/Op8cEQKogwqwOERDBADCriEUATCKOAEQgMzA06jod9Fye19eOvceRSPvqtYcf/7a7u1/IUft9VyLkhcP1wd7+LSLY2VpPkS5tD7/1xlVXWiFOKWYDmznHFSuA8zlA0pg4ahejJiCLatShZVQKLkZuSL13LOzQGEPvDg/+IJ5YERAU9AT1e+It+gP3kKKi6n07JOAJZgKB7kOLTwxJeNKonzj2EisqQNcsb1y/sP30Ss1rdf2D3/WJ1w7u4eqwwWFwZdUswDiIapA8Brtapmd+repZCDJcGkCviIIJEEOnEtmQYehQ1IUb8tL/PiHdAGPAs6bCU1pMta47rqwJePu5/Rtvjj74Xb2dR0ro+f03wttfsHv7rpyIcAqVYkJjDTkNHbCKaU1eErB0Aaz1vTJYjV20qsY7UGU19oEn7cZ5RuggsAbdeXTpT63ZhcskNbFwhV11oNWGsfLql8K9N7vxuH/5ieR6dO6S9AZs2FJuMwdokIgTI+YnfhuyUsSjdLBs7u0PTMFd3YkO1jbx5q3O+fEnPtE98CjbQhNLb7v3HRd1dufoV6t+sHF6WM2rMitFgaBkYAoNGAHIXDliNYYslWWyJVx+uikGFqlIXXPt1bS3X14863MvFCNLV27ie7/HPPsFahXi3MzvDkIlXQK0bESsJhQiDDFZNRjRDPxo2G+uv168k/mDuQvABjpQNKSqhgFZCYkLOh5SOL3uL10xKVv9xu/mFRgPQciz9Bl2rt579d/8zfwH/mrxxPewAMzjMKa3vvY7v/mbvzSYrLWd1ketFXAEzlBK0KQ0jxASWwIRuS9dCqsIIYIwksmcVVVQNgQnwK3ErKrCqgKsgKiBRQEYFO8rRZBCHORZmzqKrYsBQkDMChmcu/iBcOaxFKMiIpZ1F7PhYLS9XpaTo9mR6Q2MxaiLenngbdja2izdWm80CKEydZPl5RG38xD6XXtq83y7uvfYI2dPPT2a7++uaohZvv3Yew5msR6f19NPu2zU3tvdv92cOn52LS/SvcO2+a11aGN1RK1kZsQKTXWUyHMyqW2NM8Xmhg+NE0bwdHGte8+V+d3dwf5Rmu+WmXfWzK483vz4T99pr23M7p1tgz965/BXPxXL/qnNEZVZf2f9yLrjxf7WeDvu3TF2kA+3Yn3kCHNa08zh2trx9Zd7p84nseXF05ianenu6lf+afzUv9omZ/P+6MFvg+5oNftac+NwcuacxZwSarNWF+8dX7lU7r15eON6X4kGExbQxADqvLOKkSMoOJ+zRiRrFNB50AQRqvlsujpiWQ42dzK/kVQkRWNNbsum4Ri7rp55C8P+oAshciJTOD9MiYFg0BvFrm3qRQitc9aIEDjSBOCXdRzz8vgzv2OHfEA7twcf7a9/8PxFt+669tf/32vdTA6mrui3VVAAjg0bj8qgQKI7q9uDl3/t4PVrzec+d/44tcFnw8GxNld+4BNX33lhx/KPbcq//5m/8eAn3n/zlW8tgsuyfK9Jt28ebp6xw/Gm9ymYWuQo2Glwh/3TAoPh6lasGlYnWuB8Vl06Owx13CyyLB+neddz9vj46NbdA1f0j+eBc98vdh679PT121+Rut4a51E19/22q42xdQyhS56cJqGouUXs+O7dGadsr52GLkWQpo7FWrneGzdNdGUeA+fOTA9mZGzewy7ct1uf0KMVAQkxqQicOHruzz71vgrMqioJlIQgMosmBAHRkxxMlYSgQgZEEQmBRVgVLBIIKwIq00lcuJKqIoJ9NzNYWAENAyQUECS4byAEMqiASqIR1ACZkw05FAYVlYiOMssuJEOqShyxHPS8zzdPbe9o2du7W8zeqW9d37ryXrr9dgzMt2/Fz/62+/ifTE99OH7b0/r8swRjJVDEJoZCEIIikBgDQBxBOZADeOQSXjhtuiocHVvj8rys2oAaIUmlsdzeLp9+Kj7zLYkVK6SYPBiOCaxRTOQQJKUUKLfgjVgkSBCYl127NZFTltpE8+fgrbddXa32lzI5Nc3BAea5icdTAWTpwBtCJOOVRFgQogpzGyyiB5dUUkyAyVlHSCiMXSqcLm1KFx+krQf5+AgPZpYMWUSi3NrMmELM8mBRfe2rw37mn/i4f+jsvDroj/vNjdcIgWXVTly+1mdSIkv1NDz/he4rv+WO6iLaIIksSVIxaizobE9nuzjoBexsnpl+Do6yqqmuvmOFUmIGBEcWNXDaeOBC5xyoM0Wprmj3F229X0x6dn8Jor3tSZtlTbvob5wGdMJSrSp0WWyrrp19/cu/c/PtV6vVHCBlzhFYEkARSQ2QUwECyQwBGTToPQmpGIio1iADowiTqGjmMpSkqCBM91FFAAYQNXfkjcvIjvv9ifU9VCMiJzuaSgadNRmqWAQ0SJgARMkkVVZ1RLkhQxA5CFKIYK1LBrquVq9ADgETs8bELAoQY0rCLCAoBLoKkTlims52r/ZOPYS+wP5aMdkoUqrmlSXIDbagSkQGU+wc+uFwsDrYzzHbmy6sSAgMQNZqbi23rUvgrIUmmpGpvZTvPeOg6K4eyKIx6jyRrJazl16nmA23TruD/TL3dlBWCNDr27ZLVe2ADUJnzYU/818syGjL5Wjinni/fuO3cHGUZxad4SKLqsX8TvjV/00f+Gj/u//U0p8Km2u93Tu63HcY4ehOePOt/NSF/tYD4cGn51oM1s8sdhfjpz7QJpa2cxlxm4yhrGdXR0tPK1pO4dqrig5F0iKZrVOGMrA5cFJmtaInQ9kCKSuksEAgQdiATTFxdKMhSKdtiy5jRHXWEoEYCIljbcsBjJy1YwgJmoXJnQjT5khDEE1GIybVZoXSgTFAALGVkGITeb6gUOHqeHnrdR3YwXD0wm9fH+7099vpj/yVH/76p37r68+8uaztirUWkyuM+s6twbnz2/Wt2/m509zM42IvywKtOeY+kR2e2VkWu1lJx7uprRlE8oKcgWKQtU138PzLfuLXn7yI/TWX2cH25N4sZhbPnpocvGNCy0a0WXUZl0RZFOPyksxAKCOTOWON81HBIqpBkOQooQGTQlSDkqIpjI3SBBFGPMkdAQA4PF6EEJXDY9vbkJZVrJOoGlCRwuS4aDfGPavJRclLH2KKSn10bKWw1qCaZdjKi7VxkbH2fG4BjaNZ02Q9WrO9u3urGcvu8XJ9rdyc9Bw4Td2siQVHQpMAolLhzLIOpCbPqIkRLbVdCAJF7lFlMDbLoLOEd2JyPjuFsZ+ZJCBggnIlUV0+wyQiy9XqTJFvFYZjzI3Petms6T5/89YRQlHatg6sHDUORr1ekR/Om8AcFo23djDIuq5p246ddc6UeV6tUtNJbDpJjKreu8gamJu9OaqK0at3j//NJ3/lhz76I1ceOqt1rKbTdNiMTe46DXU9Hg1z7B/PZ26SCbMxVkFB78c2nvhr5f4GICqQngSFigISqFqDkuLauDy1MwzT1Q9/4FJOqdfPgqSDvYM7b+jZzcnTVx7ffOjRu3v3Tp/fUoVYH5ON1Wo5PHNuUPVfevH5T33l1Rc0RY/QtiuJsROrWFgqDBkWMbi9Nnzn7r3hcA3IziHElBwZcNZxvLhdNBVVrF0bTwEdt7DklAysD/I2cSuSmuaVl2++drB//vxmagLF2Arfm3exDa7MCel4vrh86oxLJorU1apeMSKgtcZS7DgmdgXZiItlk4/6JrFNvAyh189DF7cmJWoTutpmPRYJoFvra4bw8O5uWLXRmAPWsAqne6ZpmgfWxyQsoT6TmcF4cPtwhULLOrUDOpitVKHpmgg4D7HlcGZzZAHAEBqYLaqjxWqyNoZ7u/+HVAQI/YGrm7iIKXDb9660SArkxFtzxgz7YHqZs0o349IPyr7PJthNxF5/9s3/7Md7w15v+foXn/vX/zS+cfhDP/PLn/lH//2jP/onnvrBb39tsTzXX9v9G38nPvM1m6I1GFUxiTdoDSZJJNaSSRLUqVqL0eeOWIAD56hGdfncC+bTv/bR7R29dkDGITpWAOOMuoIMKjBKazCgYZufWDsy7yWEnsuSMw0xlRkocJCO43h2/PJP/5XRhx6xG2ff16NOFJCU0aBGaDnHV/be2f6u75APv2f+yrfOjArpWiWT9wfaBEt2OOijlgS+qYMoVDme/Ws/evehyfYTZ7bq+Z3f+vog62c+D7GhBKnrkJjIkCu0boGjZejahnwPjabUOAK2WVY6Fg4x9tfWZ2z8zrkGnEpTbJyONrfW+LX+di8evPp7ZmsAqGTRrw01iqoJVSqKxJBMigbRkgd1vg2mbQmBKAnqrKmLDdd2gRJkWda1UVJyxUCCWpNLbD0YQivSGrWmzEJYzHB1vEaS94ezRBrKnELXamST+9gGK2SVTWbmSTO2PrT9/hi6/PJTT81/8zfB2RNevQCCJRF1YADVAp5EhuHJngu3YdCDj7zP/+k/7oZ5hSn/jc91n/qsOWbYa5749j/3+1/8cs/Gat6snTkdkCLR/nR1XKcO9fDo+MOPvUcK/61b7xAyKWVZPq8aJXaSECFK0wbMChtEnLGxDXlunXVt7aSFqotiwQwhL0hjZyx14X40LBIREKjKSVojwruuyPvi0bvvyrvf7utLigBA94PJVUFUBFQBDCEaYuYTden11189e+G067liVFw7Orjy9FMv7n/pRvC0co+c3oK9d6I47sQR2aCQABo0QKJAHjilnAhECQAEwWIyqYsRmV3niYiMZ7SWEFkIjAFWSWIARSZxTs/9ru1/XSNnllzdQT5swBqfG+tkdWyNS21rndXcCnXoPUQ1ZZ9DIGecR1qBsioLeqyzZB//9rptjWmL4QSKrKmG7fmPpRsvhdhpZOcKaBvlFvMMY5vVJSRbLd+xT7+PB2uAIspysmTBkezJ/+5SaE2IsHe3uP0Kz3fl5nVf9kLdWhCtj1VjyLdx/Ty6QWoaBLGTMjSR7cbo8Q+mayvKpDAJVzMXZ7w4sqMta6K2jUqBeU6aoCPNMGydo9PnbZ6bEHhxj6saBzuJCstMKlLP7XDbXnmfYgFvv949//ks1lGDOmA0hACEmlgYnXUMETMSbGQV8yqCmqaF6OGEVgUARh3F6Eq3K5Le99Toz/zZxdYVtLlJ1cbO+Po/+8WsIjF2uRQiWJv02oPlnU/+43U4HJ1+b7WoPvnLv3Ln2juQj6tkk6QiszaBQTQIXeJFG1sxCCAqKnzyWt3nagEwK4EmONmbvM/BB1LUE3cDgCohIYIxFEFBFAxCCwDQsza3lkQip+7VL3B7qzhzoT/cTFUvRISsLHo9n/V9UjRmVXd1mhXDgpa6vHur3n0zrg4Ny1gQk3aLuovV0GG1e512Z5tZdvTyF4qLT0wGp7LqmBJPemPOBhCb7lu/Vy4h0rhae222tvnA2iNbpt8fTzgIunq918u76VG7amu7SqnseZf1TOBSKUjEjDJvvCtlPs2G5EfmMEzDg4+mUZo4Tgf37Kw7vPmmq4/PnF0bDWv48jdufOV342Q0+eDHwM6KtPvi1Vcu/eX/uj1s9n79F4rnvrXetW2k7ngxfs/5/LFzx+Tf7E/O/fCPr6z2VrD45X9fzpd5wLiE/sYa5pIaqfavJ2n58feWa9vhxkG6/qpoPfyeH1o9/dCqTd2Mqnu3B9tb1BtkpRdOysqxBUVVBoWUBElZWQhUQSODMILmw1Fcab+weW+t6xLmqiqMGjV1XZ35wKkNXWq7qWiZ+VETpiKJIA+BUuhiOFKVdmWcgvM9Mh0a4cnZSLBGdPPw9vpf/afGXmp+6xfr13+zn6X8aArgzNqpdj5PZUmA3LaJNXPGhIQxne1k/2/8v87ZrOeGXIHtj6Tn3WT4mTdfWr90ZvTOl1/4/b0Hss1v/tLzp0bGY5qu81HQm3tH6+NiZZaNwaPmuHXRZUu/A4u5Hs2gMtGW6aELw7Wt8uPve+zw1gHX+Nznrw1KM1S7vLfaOLd15dITbx/cW8tHg60H33Phgbdf+OLh4V3rfFd10dq1gcvAzDEVm3R5tHl4p05OrQ+TQRklDnEQQSJDEptiHPdG3YoXbSCCQeHmy9oZZcZeXwfjYmzKm68cAQBagwJwQhkzcJJFdgLcJFUBPaFJv5uTKSqiqCoswgjASoiAVsgQgui7l999DUoVQQVQOIGigBKwJSJlFDUnKGwAASFVImAFVsET26CoAcCTWxtANRpQAFEVBFUySTTGjow3KCExROMoL/o961wu5vabr9Pd6yM01WTE9eD45lSlF185KB/cjxcP9fyjprcOTaTEgRW9AwsKwApJGVnQWKS03Bz6j337tVv7IyfDtVNMeawYuxCXbT7IwGhbrYq2jdNpVmnhbVBjySbLyXEShg4LYz2RNBx5YQclQISY2pe/Thcfnk42+8P1sY/dM7/lUijLbHnm4s4HPt7+6v8vr+eUGGzBZGNIVsFYwykpa6g6nzlrXNO0yMrMxhtCIIOkqm0kluCy4Uf/2OzU5e3z6/rMr5nEhghYhdVZmi+Ow/yG/2N/6sL7r0x/+zM3nn2uPLxTGsezVfzCp7s3Xis/9L7sqQ+uALTIbBvgzefS898YTDnFjBB85pvEZIyCAFhzZze++DI8+kTq2QWH4caohS6X+ez6O0U0aG0iIgAg8Vu9MBqJCCBno4E086DINcc29jIXl91g+7TbON0EKBElCrioSu2qeeutl7/5ytdv3Lkl0palr8MJFIsFULouEyZIBgyoOoNiVUCctYU11lCeOSEBFVUmQGcJpXFolVzXtuldhynlBAAs0WTZIO+N8n7f5oVCBqrSKRFHUUYAr9CyiiHITAIAkRQ1JlGLYqxBRGMNADFiAg6Js6IUUVQBTaSoCEQn1G1EAJTEjFzXd9751pFr1ncea4/HaxcfSShYnLI4bqc3PIBhzlCINAgAKCE5htTEUdkj8d10FttV5kYmqk2p6+pB6blpSVRbWULpP/7dWz/904qiv//Fxb/453kSm2Kog+1MSm3bdRo08zZMpy4b2fEwHE1FNYVWFWUyhsE2uMKPR0omrTK7Pqhvvp1MPvyRPzHzHkeT/O1n0+tv9Rdfq176Fu2cLi9eMmYOixXsHwDk+dlz0AbYvWqO9jLu+fc9nV14eNGEPLGGRsu+I4Ljg0CgvRIlg9tH8ZUbbnsbNkdoPBhLKUFTQ8Noc0OaYlRhJIc2t9ZB25KzUDgg1VYhK6GLcDyF/hrH6AZrYKyu5hyizUtFxH5mM1y+/Xbh+5CXgllaLgSMxs4qQxK0CHlOXaSoMSXbc6pBi+Qno2Z/b3b9WO5kx3SPUG68ebxQ/tarN7/jR3/wma+95nO7ofm9RTKIVqXfyxbz+drFArsKYLpa3MMRZWvnOnXgvBv2Qauu82W/nLWNGmzJcRPPFcXGmc2jW3earps8YYanNluO3fEUJPNcgSw0BJsECYMA2ihkmAyAF5M53wPrgYx6Z5FOAotRoiURBDAexTKIaWuEliBDCSoE7yqmC1ZyRlfxwrm8WhwgysYgbzmg4sDS5fHW8d3jQe7Z27Zhb0xs0iAz/TJvQirIcidrWa/0gIosXC/a3qDXRt2dV8zN2cwOC2u8zUTne0unAIRN0w7W106tr794/W4nrp1W6GybuCZlwaoOxuXctl6lb3UrN7Z0d2ahzvOEis45EABUdF0CGyNCHDlqMOxVcnvRPFindWO2N7I7TfPi0dERYt2JBY2BFYQSRA/zVW0tluPB8fGShTlEZWWAXi+3CsfHTcuQ9zxKiiHlpfVllisuF11UAQafCXT03CtXt8uXPvDYo5nFVRf9cFwfhHq1v3FxvL979/Rg53ClaNyJrRaRQJXwZPIOCkpoFEUAEJTQgGoSRkQVTGrairfXJ9dnx9Oj0Mn8tz75UrOoHri0ff7Sxvd/7MHRzPLdVXtrV1fLgd1xXB/v3by3vzj/yFOjtbJa7e7Ges/q6QubL71823bsyAVVmxkVjYGrpiGkvdVi2B8dzpbW23m9BDRlkbdB9o5Wu3em6+vlsPDHi+XZtbLXs69X06WwoFGgxWpxef1yr789r98ZNynWiTRzQFW9UtTQhNKbheq9xfG278eWjVovtiw8awptW/qMmdsu5WjGk43jtkWGxSoEZ1vbK1y2aKokEDlh2wZNUU2p7AC8o16ZrYSTNZSKvS7R8erCRr/pZLGsgXSncL1Rv4tBVOfL5UNXtptFNc7zZPCokUUb3HG1PnRdmxBFDR1Pq2kV/5CrCBJGYY1iMh8FbhzX62W2OS45d9Ku+mRJYpMCGtMbDGrHCHa9WpwydKE/Gr3+4qmy+/Q//l9Oz6fv+8m/8Psv7GV/5i/ceOiR/fps/9w4pdV8ttpkOcHOA6Kiicoi6MghqwVNuR4W4oqMaj/EOLBmdZyUyZCM7iz3/td/Pe6VybkghoHQUgeqqfUu06TgTKOw9K5DKjQNRJ1RIg/O1AbcqQ3t5829QzuvoYMSTLl7jJ/98oqcgX5mbJOCsIJhNJyzrj73yTvHV4+utsNs2LQVobDEFOoMHQCwJkV1hm0uNaQ9P9o59UDPLa/+xqfb514cZEVoJEZJTcxyZ10WQgsJjERr0KLlNpT5CChruhUhGePAZ6DJKGvH1SLBkAbT41u/+Omz3/v05uPf1STrm4O9z3/j+S/+p/Usm1x6sJsdGqNNiAbQNMth28HRygEaJag4QYdoyDuLGSGkIF2xHt73XenjDy3bFp99efD6t4YatEXBJKIxrAwwC4OIQwMMqQ1SUvvk+ff//b8X8gH+7nOv/Q9//yGTMjJdjCLiM48M2ERBHp05v9jbp+4Auho63+wdxDZxRLWAlhSBIxtAsABKIIyKYAhCBIDQz/Cv/cXuo9/f9YY1Yxj2J9+L8bOf7LHCq9+YPPbBNuvXTeAQ59MpRHGAmHi5XLapmS2qL33rhbWt9ScfuVjPp/fu7sdFTFG5FWtRSPKiSApqUbqQ2Yw7AoPHxzX6TDri4NRgkyT1wJO1g1zpvghEACe6j5wkDZ9Qq9/dLHt33+z/9IUAgIIKdLLXfJLviqzCetJCKIgSQWRV1HtxuXtweHn9ClBaNcvtzLRledxSm+WPf/uT8OL84NnDDeOSvItBEjAEiKjMzlMnbAlJ4aRrQAVRNdiJYY0kltVkAmJQAAGBAESCkqCKatuhVt67tKqDcuwVOOpRMjZk3NSW1HkP/VHcOlN7W5aFq+e83OsNy7BckbQWauOpm4WkqGcuwOU/yiZ3nNq6MlluGLfOnFZQ5QQAsVphYlROka1TSHOvzD5PHMqiz03KCw/iWLp4onOwuGQSg6Ucbt2Yfv7Xx+trBagkxMwbx9wsoG/cxz5WD86UjfEhmXEJPrcm94GrEBSwN5zM9+5lPa8xRzfoehe4AwNTPVEpucUmcDmBRz8YfB9DbVBMZnoPPy2up+jVICAX5QAJlLC7+DifeyhbK/l3XoyRjaBYSMCIgIiU0LCy07g90nqeHbcYSaTzuWsgGmNQGIM4I8bBUR6Lv/jT8r6Px8G2Z1dgXmGZf99PtJ/6fT48yFHEY5fArZrLa/CoTXd/8eefPf75F2pYjU+3bshZ//B4FgxWYgMzqnK9zKwx3kDHAgosRhHkJHHvvntIWJnhJLP+hLN10uoiqLdEhKAST6L9lAGViMi8q352FBowGfWYwsEtVwp383k2cpNL7CZ+kkHPABJAIsSsN/DDUW9tJL2qTO3bbz3fVxakjY21EKtkzHLWQD0tTbfVM3dfe/viqXO9Lm6d7UdbdO0M2abFqldNi/nMsc+d1Ec3U6F7B1qun039rEvzQur5fO/WfPXq2/MKcs5oFPjp7VMDXkm1cgpCQj1DxianxYUJDMb1+GK987Q1i+Mvfm5HbbT2scun4epvL7/05vzaS5u9wYUf+7NfS6fa9TNf/sf/y8dG5578iR9aLUP5/O/AS89NOMU6RSjMxk7lLNpF6OOZH/krr944Go8mWN97cOcM7H2FFIvcOkN1F/NLl26iG//AT31192BjZ2ekr5zfPXY0DacfOmy79dhOLoy4m/Q2thog4URojMMUOpEYQ4iclAnIKCIYb1wZeIEWspEVBVuOR961TWe6Bp1p6yqk0HGXIGnbWuAsc6lSAhujM6ZkEcZymXr7B3etHJ7evOzLEdUH2pow3XOD7XbayBJp+J587T35116jb/6D9aM3PVioDZQKq2W3W4GzziOgNa6f+pvqMrn9DmnmQc6aXFdJLcbAUsS0XMU2PHDh8umPfHB+9Y1Nkw6if8hAT+Gh913+uZffnJLMTfzGi2+sP+jOXsjBYeDsjVfla2/idFm28/T0k8Pv+ejp6Z1uEcurb4d2IbHiqsOdcrw2nvR7w/lieefW3Ww4XN++1B8Pnv3aZ25de83kmUHTtKk/7JVO7u3fmkP3yKUN6qZ2jNmZ7ccePP3Os68p4/b26M2r+456q4BdTOdOlQe7sx4aRjk+muW5y3v2eFYbiNuTzaI/BngHAATQEMEJHuz+e6X3dSFUFFVEFlVVh8iakiY0qJwQEYBORCWDqqkz1oiCogoyCVgyRCYmRkUAUiJRsAiEYAA9EaIQkAIl0JMbRE7wRCKIoEisgoAAQqCEYJAAT5KFRUVBVCF1sVXpAJGrZIdrA9Mv7RBsxufPrrRpLJ563wfvmbBsadO6gXHkBmubE26SnrvYTd8pSBFQlNq2MZYQSdokHZAXzoAffmxPB6PmekkLkBr75wSzYjICNKZ0IHVM4BcrZlB1nBBVE1Jden5gw50/E45qureb7twtADNjoWmABQz1s7h8+SsXH3ts+pVfTNM9c3xERR981jv/1L1at0qMt/dcuSYug5Q0xhQ74pRCUGed80rEzFlmFQhiEgySkgiSSmYp6+fQ1fDq769f+/zqd6s++cQQESQlsAjC/RIWrz6z+uT/uva+D9qtvH2n0pCqpVIKtNqXVU1XtpvmfVXZw6TmlRfyb3zVHk5RPFkAibGurbMqgqIAiNXSv/xNHKy5K2chz2rRgK3sXneAklQzFBFWThCzrVNxZzuGkACiDdC1bG0+6PfXcjyeJSt+fbhsghvT7OBA3GDUL9p6efv6jb2jmwerarS9vqjjvK6TSFIFVVb0aFkZGEqTWW88JbWm40h5IeTIebQZgiCqSgzSKrnMmFYktS0L/IHROoFm3jl0Jstd3ssot+CIIXVJlAWMMgdm9NagNShoSFWSYJcwUKZOQQCNASLMDLAaBERBOnlSUSGgOFE9qZpE+MS6Z4kYjSKV3pSmRjiGtABh9YXZfoiGO/HeqwYpKwY4bxBiYnXOWaOoEFahDYGdFqcHaW9VeMR5sJFzRavJFzZV0ViMKFsf/Z5dt6WudB/Ix5/6VXz7FgmTI+dMTTHfKWDZpnZFbeDSdKHLmENU62wUxnLASiIQuwQWsT+gT/zwrLu7fvo9s8nOUtY3P/7otJH2m+9kWRycGs4C0eCBe1/+hVMf/244OgbtADy3yWTRxJVdGHjLSQiw2rYPnVY/DHVlDis4ukePXKmrpV0eOQlusgG2n1Y1DnrcNMxqQMAbQFA0mOXovYaojNwIJQZpadADZw0Voa2tMdAfJzRckHLMUpNWu67ohzoiFmE2zzPTe/BMEKOSG5KUpSqkQTaAtuJuASec4cQcpyIxhsrGittZS/1u1CufurJz7sqtL/7+wRs3N8+fni5Xv/17L+w8sfXYd15+8Zk7dpY2SLaGenar7G1qF1tmF48Wk0eenFfHLCmFudgiQfDjQTn23TEPRr7XRUBatqk37MUQ9vduj0dZqy7vD9oYlotpbzAGH66/9s17N17LCwgdti0H1Y4jFlayMiQkb0WRAK05eRCtMxYRyHqRCADoPAcQTYAD5gCQVK2A7bo/gLv7hrnw5XS1mM4WJi9KwjXrKpZbbeqgHY6y3CGhOmdCl6wziSMQorMK2J/0xFAlECM7i1QWxwB3VO/WzArjnqkW/MgDO/vzatW1w57PQNFSI/H6/sG4LEUTeLO+OTw6niVCp7I9KpeR/aBoYgRLjWQecHtYLJXvNqHy3ncxNO1oVLZdt9kf7M3mtrRVgCmry/xsWVNIZrGcciSDo8I50NyCH5VkDBjTdUFF88I2bZNiV+RZ2feLlYQAB4tqs+hhBOFE4L3LmDUJLqvoEEU4zzNlrquuS1BOJofQpp43beyt9Tni8milKTRgR5vD1XJJDslTAiUEVaWTlM//U2IQIqkKICkoEKIxKorGANogtr++/fCg+Nw7X/pPn/7W93z0Q+95a/fyMH//xR06lBvXj3qz7u7q9va5K/O37nSzu8nG3vqVZW33d2+Zupvn9mhgzPFy5F2Zu9IWR/t74Gwvc12IJwO5OgGxlqX3zgTxZIwz5DMTRe/Mmw6035f+oOhUzmyMVhTfmM6PuxRjKoN9cufc57/x1e3NQW5kvD44PGinVe0MdSGVmXGWVnW3fzjPxtjPB+tZfzGdDUyeZ+XRdDpjPli1zruzRY8i9kGHZXEEcU72oOkGxmwYSpJOb4wO5kdk7ELTztagPpoNi8yO6Y39g8gu2DTulWLt3a4DhQ4gcy4ve0lrMHjjqFpYgauHm6XfthTa5DLvkFJMbehOiInHoesMZZn/Q1JRObSqftPa2aqrViE4cxA5BT09MOPcE2HXRGssN7U3vsI0RHrv5sYF4ElWvvHzP3f9V0o4mBqCbPMJP5g8/OPfOy9OhTszs7x79Wf/5tbVl0plVAIDhKAKKEosBixxWgIOf/QTD/3EnySEW7/xlXuf/UK7WHgio+ocZZGsgBdT9LOlcBI0PZNQrUTlwBGrxOV3f9f6D//gLFS3/+0v89VrAxFNyoBpvJVd+mhxatP4b/DLL5Nhg2oA61kypXOlQc0RwBIaK9aZ3Nls0eBzr/TSsPDUtCyoZS8PnBxlk51LB9Ut7EKzqkjZOzeaHt34H/9711tt7afTsL5sGwBC7bIBmjJjNkBlz/SaRZ1MokxTit5x8oHyvopyaK1VaWtUzYq8Jna5GRVm/3j6+Z/7d/XP/cbW5MGtXu/smQfP41kGrKYLamrMyaOPqxVARYo+N12MmBEiYNMZJQXDJ02hanH5LP7pH7h7JhVbE/vRK9O/81qxv/QGQjsjTUSWIOPQEAEoAYHxsBoN3/O3/+aCclqE/uPve/qv//XDf/OzPjS+yECAYwRR9FiarH35zeH3/mgz0aNO/XBCa9tZhkZQCJICGTIGUCCKEKghUgYEA5nTEKvLTw5++D+vfe5XzbBn9piWlcAiDhh2f/9zWz/1/1x7zwdnX/i9AsgZ77Je4AZExyN/b7FghINFO5ej82Z06YHzhP724ayf9equRnV5kbnChrpxZEeD3tD2O+jqukYAElLBwrguCqGtG5YR2sL6zLxrEEIUvE8cOlF6UN+tmlThxGf0B+nHCAqAYokIUVQcGlUQ1Xfxw2gNoaroSYmPgcyLr791/vK3NbHqFvd2b1wver3Dw+P1U6doXF346IMHrxxKpc4YgfvdASIinUQ5qCF2ZESFTnoRVjSoGFUTGQuQTmRHQyhgCAAAWVQBxKom7laLWAzm+Tm4dLF46BE/OgfPf07uvJSt9SxCVuSNXbOPfEe58wDlGbet6Rb13buwd82kfUpvaduSYhJnJ5egG1nqaCC+P5CYdddfXf7WP1xfHhLZCGQdqSTjfVSJVpwPPL/tFRZvf7Ud5mk5a+az8c7ZZMvyyqO8vg0uxVgpuqaeZ9O7rotoS0mLcDwlggwdVW1oj1M/Ayr4xrX9W8+d/vCHaLRFSriYddevmekMCiiRXb9I2udF1BbZ9ikTTSQhEFoxELKePXNZioJAKdVJoh1OwGbSBQZRZjLkDAooO89Y8gMPt7MDjyGj7ISGYATVYA4m1d3izJXeT/3V9upL1b//+WGsrcF4AhdBzoAsCRX2uJ+t/dW/fnDxY6bcsIkdWKO2wKLxZy/84I9c/fo/J5IGWIawjwAN8Vxj0Cj5yhc3O1qkJjUxARh0QeMiBEwJJYkAJzbKSuitSYmBUPSkswRFUAIROcnmI0RVPel0rTOMGIW9JXgXY4REoJB5C7MGALiJBlGcA4Dh0KtUxowhV/SAvSzvl0AWnC9GvWI88qN+agHqFR/f0f1bZ9Z7iLq3d1RVVdutlFDSKtZHBjqXu8uPnrPcjsZptbgucQHLZWxdNjlXFN7NFrw8YKx6opJkdPHh6eajMuSzg/71z3/hrWu3DyPcbROwlDbXZV1Z7KXIXRw+cB7P71QQZFAMHvw+8MX0+mJw9rKfTM6S7pW42hOztUXOHfzir50+teV3vd8s4Eb94EVarM/f/5f/5L3/6W/vXXt78shVvPumrapVtGbc77//MdoaLA4O2nuv6eJOsfnvnzz34dVg4B85K3tn9r4620QnecHN/Gjm6MHzmz/0xw7ZPvzoE67I+9fu5u3h3TvXCi2y2svemxWthsV610TVQFkpqWKNqorkiCU3FhRRQVBTbEHUSgeE6JwEFjVdSjZ3XbM0oaPUWol5zixoUq9ZzmMwBnZEe9Zupk5y76q6btPBuE9c5e3esuWj9vD25taH+/2LxjhqA2Tl4MELa/svr37p/zNJEQc9WCuB8HBeZxcfKY6XvH8NFks/6KVVGvYKgCpA9GihYQBFY2KskmgTsTfIqJHJm29Xv/KL2d58kHTWdQ+u93JI6dXDD3S5xCqO3N4yvPPGdL8u5lO+tfQHt2M9NWXhhpCe3Ji0B3PVbLT54N5cj5Z1P++t3NHaxrYrfMTACGjLhx54WGD50tefuXXnTkSzvjYoqChL8D6EcBSo88bc3luuFUUlqtP6uWffLFpTHdPtOyEfTYIkW+KpyfCB024+Y0DTtDpaz2uOd+aL3qjPAAdHVdir798Fxoogmvt7XwqChMKJUBERSE5OV0FRQwCChIlZVARQlQHFGVJN1pzYTpEQUAVRARKgIRQFAAJFcaBO1aM4RH+STa0oIgnQIkRUUBCkpAaQThz4SASopGrJkCqCojXKJ4KwcUBJI4ZGQTNycbaA9RCrBieG1tcgf7QLzWvTu357q1wfbPgyvHXXDTOaT7vUzdpmzaTQRio9gbKBFbOzxuUZcCTupE52Pstef2316qd1fch+oKf33dntaLa1HCiXkGfSLNObLzsNwpAUea2ffehDvYcf0rOnU7+XacgP7k7/w6/Ht68NgNE6wUiFA6tlddz87ieHzQpSB0S6XAJVhy98avbwe6mrzg43ZbkSYQSiFKHpICVHJrQBLSGROpskh3yQTyYVgN/cyjc23fpgdXwn3rre372VjvdgDjlRkzgKUK5u7BFsSsbnfrC7cr/xTPjc12MbPRuNBOj6/SJWrVut8Btfw9wVDzzCy44/9ym4d5eDdGxM4TRCNh6GVUXGhJQcGWOBj/d9Wc6FPeJsf7/fWw4O96atiBhBIquOoBLwp3b89oOJJOtWbhnqxR4b5F4vH/Rm82ptI793fGRiEAN1tfD5IHALJvi++jTZME9lsZ0dtUeruVoloJQiGCMEmXXeeWOcsNosQ+cNEZnc2NyXfUPOoCKmFBu1nl0WzP+fq/+M0jU97zrR67ru8KQ3Va6d9+7u3TlIaqkVWrKEEw6yDbYPQeAh2IRhiAeYMz6YNecYGGBgYAADNsZmHIUxxrYkS05KrditzrvD7u6dQ+V64xPvcF3zoXbD4dSqD1W1qlatd633ee7nvu7///dTxE0MrbAI3zlJTpJEaaWMJptRkhAfPVlS27iI0YtDF6OSDsATApKABlLM4Jxzjg1ipjExNlVkFWgCLZJr0cASHZPVOgNAlsiRGSjESARZlhKDIbW0vvHk4/fZySvl9JrgYH1RZhsb2fJ6OTpZQcZhVlcVaoWgiDlEFhEScrO6C2r58Q+tf+gj+OYzk2ees2E7SxQ3rvNMCGA4tTGFWYpzh52L2LShB5ggKwV1JTtt2f+rf1l/5/e0l/cOf/YnNhcT1XpBatouITAoaU/PFtP4+ouDD5xtywb7IQCYY+9a/dg/1kme9Ky6tZf2ivTMX5uOjuHXPgn92/KdH9069piqqvbmflouoGfhxPlYeLn2CqqYj0aymIarlwZG4YWXYVmrjQKu7fpbtzQvVspxuPACTB0UNnSlXl7qkAA1t6VCBYBSTmGYo9FRM+R5W4c8s1h56AA8QudVapRNQtdhYhE0JQm6KYS5SQZgc+pKv1gkSS6zGSZR2VRPZm73Znr2BGmjFxU0tSIlqMQLt7UgpSbxB2XT7ouxu9sLtqj7eOX6q1HbYyeO5yeW86r5wEces+e1eas4Pu4V+5DG9uRJu7qc0GqSrA8B7cGty0v35zYmfjxJjzsiZhbbL7JlDWOc3Jr3eymIzNpuMa+HSeLabmW9p73dunGwfu9do42la6+/tn392vb1q9P9Q9DgGQIYz+iCEpeZtEBINStgFf1RMpSOxJREwoxaZZE5ADiR1nvn0q5NvM9DwMCo7Z1N8t7t3eVB/+yxFU2OFTSd67xrnfMkITNv1tPVLA+9tNdFbJqBVSpLWi8N4bQNBdE6hIxs10ZANkwNuxvzdgI+6atywlUXs8xcuHlARh00IQA+sD50i6asOq2AQxCF/WTQNGhQ1d7Flk9uruxVs6gpT5O2DgzBcZQQijzpSF/rGqvTjZXl8XSWpCZG18sTjRgUgGvbtk5ADfq29iE3xJF7hiKItsb72MuTzgUhDAiDftq0oSHXtDDrFlpJYk1iLDCY3MAizKf1QkCnpLQJne9QEHUv0RwUoA8Yi9we7uyMt/ZPrWzsXdtdXllRiHVb20Yho+M4m0+G6apDPIJ/HG28jiK1InLkQ0Pho7wsAAIqjg4lpolGa7KeXaHByXPH9xeLcxv9//WHvyfGeuvqAffT5ZWTSSKULUYrp6U93C73+xtDOn6ibGLv9FJ5EM+deaL99DYdlrFyxerAM7oQQ8fWoSAkKtVakVYzVzFiHQOjMQiu6xDRAexO616mlzW7rqtqrxBXlH10fW0CMJlMnnzHfbPpjk5IALM0r2s/WFl69uY2ZDogtDH6jrROCHVqsyzVMYQs17GLVYQ56+12pqw+KEsEuXe4qrmNwQ97vRSVsXZ/2iwNi8HQ3ty/dXxt6fKkcSjWSEmxXjT9dFCkllsp8nTUG87H01qxYLSZqtq23K8MquCD09Bo3VTRG5V0PMp0MSx6mU1Qs0gkLBvemjSzslEB/7tR0WxRry6vDIrMajuVMgauu3bvMHrXSkE9hSlQYU16YrMJcfeFS3/j7/6Ts9v4jZ//Z3HSUITcmQhZ0c8+/e9+Rr712x/80EO9/Ka9+PlLP/GPimeupl4pSpgjszCJJtICHEWJkNZ07kTxA390eu5Bqyn7vtXa0vx3P7fUHnLkykcPnOqsawIGQEKrbYyRSJRV4ImQYzE680f+h9k77o/sTkGy+5M/HXd3rVbaainH+OLvqet9PZ7rIADatcEowZ7WiQ2hZSFNqEURIHjvPamIcTovEiBxfau5a7mqilRbysCPFUdEJGvbrskSvY6Jev1AGUfZUicRQCeZHM737LrtuD2xdLKbc/TshysHa9xfhaIKUhI2NYVOB/LeMaNgIkZ3zumUuB7fPti7Vc0kWzYND3y7sXxauBVwse40gDgvqLx4rRUoRJW1wALBiyNkTAwyirKRvZaYEC5uX8Wbr129tHXP+5+Qawdq1sQKfGR08W3esiFikM5oigEgTYr77m1Q56Gd7x3OilX9rofkpXvrL325YAQyIIIxAInSOPn876RPvDs88t6211O13/rEb9yFGlCANITIwqQAAYg0BCdGo9ZQd0AKC5XDXM/2KOu3VRm6TEy6+/I12IeCTDPftvXh6fOPvfKV30t7SWq0goxDaF1XdwubcJJJ9ECeJ7cP9Nra+iCZzKCNkmqyilKlQ4gSJZBkRlnEpgtNzb2hbX2DKipL4BGjsIhEtZi50VLx9jToCDh09Pm2Bu2/fXkUigOBt7894lmLEKEmEkABYYHId/6OAAEhRmYWAqIo4/lke685+87NvWtXTPA5+XWC8fOvnPvDT05fO9g8sbH/2iQBQUEFGAUVaoSIGhUpAGyjMgQGWCSigAAqOsJVCEoAEcQjZ3LKwgAiUYAUIJIChFgPi+Q7/7psrDrx0bV5QkZKk2S2KCKodO2s6y9BYqDfa7ROj98V1s7n/l1w+4WwuGkLAgrkQVfb7uv/DI+dC+cfh7ihxjN+6l+lV18AwGB6IQYAQVCBgyCxixy9STIj5dLOpe5Tl9G3SzrjCFH0/PhJ9e4/aO8634Q25BnND9vbWwahGR9Eccaq2AbjGFKE1c2k2ESRGuTYO5+UdOjrBgSavd0869E0qw72NaoYRGe2m75pmqnWJjIH1rgoO9emS0NwNTVz3S1HTGNVu2qudBpjFwOATlmUBh2jRB8iIUhoJ4fNrF0mFRRo0tIxKlAZgm91n/Tjj9/qHR89thJ+79eKW3WikTwYQIpAgbnQN48lJ3/4bx1sPJT0Vpr5LMuS6CPlxoryrc4ffS/3f6prQpaA30x2oq72VTNuU3bdxujquL3cVEVGEDyRdq7rWte1nUFMkAgIIRIgR/DCAEcQlKPjmDvvUGAABAIUjgQIyMpQphEVWWsJJaBKjQ4hMIgg8NtSzCTTltJ506oecFQxWIDcJIPeygrnA5sabbUyyuSZFkiqSm/d2P/6J+DmdaNGNh/VxlkS7xoArhYLRDZZn8ACx82NlbbdS3oasNe2nUlXsjhoytliUWYhyQcntdYqTsWNQz2ZHW5Tcmqvo9ev709844HOrQytUBW6BFRZHqwvLWdLK/Vgk0/cC6NiTryf5tHCPe86Xt94I7/+ypXPfu7U8tpkacM+/sTNw93eseFs0d3a6k5DwE99sqwPj/0//8zW4Bx/15+cvHm1UGub7ygOXr10bWf/xL3nYZT56a2epgSPDVfOw1cvh72nalXG++6t3vun6j/+/775+d8ZLK6mmyP92BPZ+feF1y7cxdf3X3tW9wbpYQUp43ue1JvrQ7MAmLjpxJ64uwPW1pJm79vgaqRcQocuJNZgkPJgW6hmDBFV0uu7iKCN0gkmKUffNQ3E2FULcJ1ClxvevrXXVIEBx9PbK8fuJ3vkfw+umVX1lAxP9nYHUEQ1O9y6eWL9PqVSDl63jfGtLmfTL79RZGbw7idgOnV7u9NaHb7jw+0H3qdDceqzn0inWwlEMKyXDIwvdG7fQOa4p7UVzwwebQQl3VJ+leTx935Lb//W7M1rXdOtLGexxr1m/MhHPgTFfeHpZ2+8+NXOxVjAbK138aK5eblrPWsA5aWtyn4KVy8cHLur3zh/9Wtfy3tpoeObL76+sbR8uH0rgbXSR89xNMqkm1669vJ4f9+mRuXFyuhEvdi5vbgxSqEoitFqX2cUKr/St0XnsyU7K5u1peFuOXddXBot7Y8PNvJiSePNG4eoFIlOLB3bPHVj7xbEbrNvwqwrlFGD5E0AAOCj9pmARGGOR9miI1pdEAZgZDnqzDAAMwPwkXAQSQMCoyCKJtB0lEACfUS3RlQIiKA1sQQQJEXIkAAmpI5SQkBIqDjwnRVFQAMwKkASACGtUCESgRitNeIR6dpoBTEAIgdBABCOLCSiCEIQS32djlQxKDKVr6/WswUhRKNcTKYqGd5zSieEoWlM3v8Tf9T90q/Ai69iF4Mgnb+7OHtf++qrcLAjLIDKqDC7dTVbPpGs3FOOb+okgt2uYaGrCfZXKc0VdebiG/MvfKVfErKU6Pvf/h750IfiYFDOZ+zHRV5M8yL9/o+2n/9s+8JLGXhSFD2DQiGxVksblTbROySUIEM/Gy2xP3UMxMC8jmUdWYCjiaKEY4ipUSFN3fpm/u4P6pMPcb7U6jjaXILUxKaexqgT1YvjnV/7mZUd6N7aZYdgtA/MXsBwmpAmxUF0kg7IuLLODXboVZbUTVM1SsVYZAne3C4//nFcHrHXpvOA0iqmh+5P7j3trt6kKzdjWRJ6NBAgUmCudvPDa8z9ePPN/u71kRxUL1zSYmKiag5WgaCwBbO22szGNMixmbSH265sQzZKltek1xOrnVIuU/21tMQ6GxSmp5EcQ20SAfS8KA/2DuazynsJHBBEEWkD2qg0sVYbA4a0QqtCjEiaVJrbXqIzrXSEgKBQWBSKNgKxdV6CAwGj7tgwBYTo6BRfAkUfQ8dOsWbPgULHnn3w4ipuWqzIgpIAMcSoOCJFtEi5JiU+URZFLIpFNKQINKFCBOGIAsCeQJBIWwMQQxMzVOuDwcPnzpzo8aKc3Li5pWmEbdktXIyS9FZK0gLad74YFPv7HSBEEQDpfDCgp6Ie++if6s6e5dXVLCC1n19dTcdv7jQL7wkTEYK4OurB1tV47Y1880yu2o76IhQiJql2bdt/3/dMlx5pn8yzY6fkta+F//gfMq3cVrAYEkMAkjaL6nd+Q8+cfegh+/BDVdBO8hKyQZaTVLx3uYFTPt3Mv+vPuv0byfMvjC493UsGTf1Ws7uf6sxPDtvlAGcf0PVO5ufQz9GJVQBlBc+9QrqE40uwiAYM/M5X9HxPuw5swbWHxLhyoWwKAtg0wAKYSASK5NlrnfuIOiuCbwwKZAkAgFagHVCrCwIUnjjMhhCdNLVIglSI1pEZ0xQWE1jUOusQJRn0pJorSwSKuUGTiNJk0yAo1gg4T2NOEPJ+oZvEhqaeFSu97L6Vr3zj0ovXLvbu3xzkrTQ1LGZnhtl6PxuM6Nhj/talay2o1ZXVrvT5cC20XpFW1GoKvq1AWMByYpMlmxyi1qSYJSNgHVqOjISqN0zSpV6YlG997blqf3LxK18vlvoK4eZurXuFU0nt2CQDzPticlKWVAKgAUhQBRZrDJFCFEEVhT1g5cLCSReo7cAH7YPxwhHI3dEAwlKeHBtkPc3X9w4ax5WPLTAwlDFwxqTp9nzhFRzLtHBg1G3lpqFLFDDA0BZZaiFCU3cKpWWphavYVj7onu31srZqmNR43prMaKu8Eq9IBJeKDDliRr1R4RpetKUWdIDDYVHVdWbQowwKlQgsXOeFl3pp9NFGBq0myCMFS8u9qq6JcGnQq2rnurafkvOxb3Cln1zZbxUTEc0XbWGTrmFmnsUGBaq6SYt0elgC0WDYm81rjOw9Z9aw9/tlM+j3IkdE5UI0xkTkKHHQ65VV2zXee8/MiVFSlnkPb71yqX8ubJw6Jl0Xm7rIsoQMxziPITmxGo3cIXrgnRkRHFFK+Ch1iCCCR24JAYkRiUIXm6rpFVmfBjee/8bmQL11sPPMC8/df+zEcKW3cWp13prB2qk4m0e72N16wXYVNa66ycc2sgOu7frS1y586b88/Y02pcx5Jt6azrU1WiljIERnTeI6D8yKGQW896jUyqAHJPvjrm0ja7VTeztrk4HJjRmlmULOCBKNFpTW0Lf06nzWJpoZnEcjZn9/f5SaiXdvBwhAeb+W6ZNr+e3JYlb5jWEfgprXbfBSdUwx6FTv1GWh080kwQgcUae2bjuVqalr2Cib5yA2i1JgXBzOFMYiUU3ZGq+z0OZ5VtdlYfXU+0nl+n2TMLd110tz14ZF2ZoiN0o3XQu9dG2YZ71MK51oU1YtgiDRpHYhxNSk/92oqJel9aJDFzXJyjCLPsi8bFHGsw4bHCSUsrvvnoEOrPYnj47WblzeWTvxQbe23u5dNb3UjjKthzGV5vbr/Zeo/Rysr9Lz//ynR292hU8k0V3wuTYxOAYWBhbQiljEa1r+pg+GzXNsEzFkj62d/qEfvDa51f76FyUSk4qCidIGFLBoRUyY9QaL2QwJhaFzkm6u0OaGzxEs9r/7PWb3rd2f+MVEaXSoYtRVhfuzghHQoko8GSAKwTWu6SVZ40sA38/7zZyT3HhPJCqxSsS70FGEhIwjqAB0Ty3GY4UoIh2LGgwaH/uiVrLVZlGiSRbltIucr6XDe9biQ6fdVjt/7lqK1ASR9z72wI/+sS5xSyGtP3tx7xOfU5PbuQFttVa67ViZQqFF4igu6euN8+cCh3XbSyHDQXQ81xh144m12CRqpUMXGMmkJhmE6oC7FgOIciY1BEZEcdmolESAbk+bn//MB779fv7iM/DmzSQiixbHibIhdNo7QBFECcygggOHanDfA13X0fwwCWRPrsS8WBv9Tze//tWe0hhAW4MSITEg7WpTXvvxH9v8i3956X3vhxeeO7VzTQcBUiEqYxRChCgQGRIE0hIEMEKiAQBcl7x5cfwXfih78kP04IPNieODk/fMLl7MHKTEXPvxi7/7yLs+8CI2hvM0hjSGynfBNaFxed8uFZrbViEx45Xbu8dW+8fW8+sHTT4wEsmkunMBFXjvwOh5N66ic1bSpSE2PJ1PQXxWpNHxoLCL0Lm5q982oB0hKO7Mr+HIV3NEIJI7WaM7syQ56hfAEQGGkPDOhzAw3wFRHEGZmAWPYh1H2w4PX/nqJ/N7/qg9tR79fH7pcmjR1NmXfv6le1aG431q/ZEkDqzmwGJA/zdcNhOEAIY8cYoAIsgiR1pJYCIRYRINICwORVCYogBrEfJR93oGRoArS6GXRca6mrCB/t2P2bse9hefRvE1YZJlsWttstnv9aPJTC+Jrui2+qZpHTKCEUyxo8Wr3+if/4CM7hGiuHdx9sqn16zBkKJKDIhSIBLRd1qnCqxvOrDgJIio3FoWJO+icJL1Yfe6/P7HcWVFz8f5iZO12FGy5mHVXb3cGwyarlFamKPSfXXq7uqta8C3KBnZe9/j88KHSIldevie3Rd/dZjE2BLYwjdlF70qVrTosKhCy5GSnBNdO8RKykuLpz5lHv2wPnU2aDa9PhISoEpsiECklbaCUVktQWmWydc+22dSEpgIQ8c9Va9TvjbQuzvQeVgbwsqK0Kx/72m+cRi7wEGRRWIOKRx88EODP/kjU1NEB366lRbLoDPlA3eNAHKSqOVzkdK0a3oVJF5fmYTxYXXv8cE733Pfj/z66wfYU8jzKsTIpKNzsXNBgUiUgIIQYmRA9DHwHSAuovCRNOKOrE8LKHV0CAxKkTY20SSQGFIaFCEGhuDAe4kRBTDe2R6QBI04SrIups086Z+6R2/ek64sYW8koKMwuRYU9DGxO7en3/ice+6N1abpMS7CTjVse8eWWi2Y6256oMUr4hRsEIghTib7sZvNDg/6w0oVq2CXZX4w6HZkuh9LCaYnS2aRFrQxMCsb59dXy27vNz71bAdueXVlQ7yb1THKxtLydH9WuXKe93RPuoHlNBuo3qBn2o0zPrcHly81n3lJb99eKvT1m1d773wCTt2N9x6XDz9247BbPXZGPX/Zffrj+SRe/Ae/OPr//oT9A+9+50ftzR/7q+VWsx/hoZ/8l7y8LKCnz33e71bF7suwvad92ezDsh1O9y7fXinz7/ghe+we/I1/oc8m5tFzFBeLT31aVTdGSpscWlcfdl3vBz5UWzefVqt5j/rDZPMEEAlzjEKgtemppK9ReHZ4+9UL48Pb7awejdbWz51lKct6C5Qhm9nRurKpC5yiFg/lwpWL5ublGxCoKxWprL+6oUGqOulZxaJE/GI2VZZd51PddzPY2ZqefeTbBsuDpp5zbEw7lcbn2GjraeNYWyzZmZ/Sy+oj39d75L2jYWFevZRfetZWc7DCUaabq/abPhjffCm5+Jq1BEwRA6bQll2ZLelv/pEnPvyHrv7cPxxdu8TRiU1nZdeMfbbaf2H32v7pbOlPP/mxd35s9sxv/fw//5S7UVZSaEc2CqKQEhFoIly+XZWOH7r3/BrN3v+OR776zJdWeplvuqbofMQEEY3VGb3+xoXtnT1WhD1aXss0Vhrb5YLW1oa7h/MIpp8OurLbn81zcn3oVtP+kw+e/IUXnlKSbO1PcmuIQ1vjrOUKWFOZF9mt7UtLo0z1e2eOb+zHvb2d3XMPnju6Co4iecgAAsJIR2cIzIJyZBlEAQUSmQEJARhBABSiACsSIpUQG2AFIkQCQsR41Bm+s5iI0qBENAkRaECFQACkVDgqr2mCKCIUEZWgCFltWCSSQgAtQgAJgSbQpBWgElRGIaoQGEDaGAMoYahCHOi0f+x8b3PFpwy+ChK1LuaH24Oe1iaZLw7iYppnpzHIfsv0rvf0smH8e39fNyUXBT/5Yb9yAg/3YW9Ho4qgBJQsujDetWvrLO3u9ZujUbe++dhk95pc36l3d6CcZ+Om3ylE8jqGk6Pko999gIRImjDtD6wxrW+6YW/4xz8yeeN5Pek06RAZOUJPGEAl1jUtAZNOMGJ661b7iV8bFH3YL8kF7lqdJkBC0LICXyR09ox59EPZu5+MK8c6pmBQDFcSIXibqURlsakWpRQffNIMv3n27382vrVTVX7YS3T0WkPXlpIPMFFMxgl2TP2hJRAXWQrjQQcnrvbGiTXWj6s0EmemXllZ+a7vDA88xig0uOQWXbrYDY2TCEEAFUrr29/+L7afRDZYdU3VGEYnxAhaU/ABI/ie5qWVumnR12rvMpXjpN+fVp02wziZYODZzDlPPTvq2ACIBgYOs/H41q2bMfi8DTe3J/v7M3dUPhQwKVmtrFFaG22TRBVEihOtFAKQQluo1KDR2jihCMoqihJYOPpWIzIhAdm3SwdGK0MkUYQFJLL4INaFwByD+NKVELATP/ddB16HkFIkTXXnnbciRCIaKFGpxpTEkmij+oZyFKVRE0TPrI5AjhwxegVCSFrBStY7c+zMwPty56Cb1qfX1yfl/s23njv7wXuNtRNXzybzPDpttO33IKnKcq6UTrUxiXKVY9Wrle2tZIpOrrzryelL36gmE5MrCz1glqoOAZRXfmfMO9vdtct07aJeHNi8qOtWKzp590NmmGeZEal7D543K2n93GeTLGHS/tY4HI5Nomxi5eb+YvJb2c51m5NdP+EHBRWb9c7+YOu5oZ+DOeYzakeDwZ//Efj7vwfPXTIXDngOyfmHwC3w+Su0c8m+50nqP8G7h7QoQUcoMkAFLOhzmBAEBcUQQgu+Ap2AzYhbRFId46IC04HVIAwZUWpBKwsJcKq7RoSPoBY4tNHoKs/zk6vu4EDvH1gNpJHbWuYT1CBxJui4aW3Ri1Wg6hBBN4s2TzIIzo8P7OY6aAUE0dUktmMHupAgbZzj0jFdt6oKMDlIBtRhc/GFV8thwu88/fhHv2f93Obeles6aT7ysYde+09fXvLZVl0vrW+ePn8yBGnqLLDXTVCqK04My0UFTYumFVKU29HJtds3dln3lccE3VIqSd4vtxwx7m+Vx+7KdV0Wk+354Rwwf+z8w2/tH7S+kf6gLpYjpVSYSBqTFJRBbUQnpAySQlICIqxEVODICADYhdBFcZ6bLngPrScXVGQTRJjunA+PimStn1+/uXVt0SCppWGf2CcsD66uXtufz6q6Z5Qry5btaj8Nwp1Si5Z1xI1hMjJI3DFLkoCPsr2ovYFMK1I4dS5R6ju/5ckLb1wg1d9ftI1z7PC6Pzy5uawhcMDWhYHNbhzudhFcFKWwCX59bZCWrqd1Wde9fhK8lbrrZXYRWgihqlo0tq8xdWF1ONhbzELkGYS5c70in9WuZ6123DNktJ67yAyEmjS1PjBEYRwOCh+RgGeLhclTFKEASqnQcRRRxmhtbEIRUIRCgOA6BbqpvTVKCJz3EoUBK2i/48PvYOvb9lDalWbRoMOkl29fOVw+NqSRHd69EbBVSslRWQPwDhPwaHYEcBToPWIoRAEQVgSE4JqmVwxPn1i7ceENs2Zf2xr/xFNf+Ysf+rYHF7ixodfyQTvbqg/2I7uT547Hrf3t8W6xnn/tpU//7Ne/vjO/3Uvyw/1qaWM4m9cuuC7EtZURkZRtp0gVSY8lMrNgTFLT19miasumAQjBRQLIGPJ+0mp1exFOjTICATQAbn9e73nX1/rqzsFbO+Oqr9px2RO+79SJtukShNRoHyRNcyuxp/kDj5z/xpWtqfikw9D4qm36vV7OckyN9t0i0bjSG4auwbzHDtfX1l++ft0pZM0OSLKeNUk9n909OFYupkmgYjC6Xe4FUQnZUyvLntrazdeGo5eu1alVTedXlkcRqlFvsNuM13r5pO4GK3Rus4euzdIVYxOOEj0brbhzeUYuBud92cb/blREhFpT3bQsfjjooRbWFEIgoqaLWhQbtbVfHV8erA6Sg52rF8fh+AeOP/C//K3n/tGPpjcm+zemy+urmPJjd2+skLv0d/75DYT1tOjbNGAI4lFj4IAQlRACICChIEI0ZE6fH5fcJS4muu101js1euJDs9/4UkYaiChGLh2j1bmJEkKHqOpUGOqowArppnVue7ddG4ypFcOmWLN5QQ6EoyYNIsAaQcXWqzQSQcCgUPqEoW6BpOZJGM9t7GXF0EAgEaU0oIigMkn0dnrmoRP/4w/e/PIXli68aQ8PSAFr3SoojPW7M5d2CKGdtyZBvZxJkTeczNvRyrlHkld/G5ptzLpkWek1u7U3mU/nm/ceHz76gPv61MRaGJhQGxN9o4liU0MKymYrvYElPNbr7d7Yrye1BgKThMhaKU2MURBR5QkrgNTbCDWL7g19U6vEiWtibEwiXRvFS5Klyc40/NZzguCnixgiCAIrD5GIKDgOHmxmjBbfWZO5fGjWzjchjRbJqOn+DAqUnWnSX4qTBboARoPSR1BtjPosxvZf/Ev8mZ9qtnb7QKjICUUgZDaIoAkCSYgoAbWBFPy0NABAAKKWLx/Am78KQ920oVvJRjuYN9ZmyUrmbj/71eM/8IML5eq6qaZzEzuDzhoqsv6iazsXwaq69Qb14bxdXe5pYxqoCSiSBLcIoUOQPDHMdRccaALEuesMqSRJkDgwCwlTTEzCnTRz9/YZGjAfhYqQEO6QKUT+630L/v/nSEeBSSAEugO+frt9poAQATFEDkeCGmEiDBDr+uAzn/zP59///nRiH149ofHQ+jA8ddflndlzr92+fzWh2PURjTWxcxI9gKCixnmLaPHo3AKjQoUCEuGoVxq8qEiUABFwQGCOkSQSEUgga6OyUWq+eXHx1L9Yed93it20/WV6+PtnixoLDys3dFcVD3ywtf3URgsdenAmrSOnyWB47J6ZgG48+UB5Suv3cYY4eLArOyDob/RocwC3ZlFIs6Y73CZBbiECo0LCKIxoGbHxHZNCo4GDDXMlEb2H8byHinfeNHbF96VtFwZ1qD1niV22qm0mh7sJn5BbX3NXt83x03DCqzOPByZfKq7niTK+bW1eGJN7joo7rUAaR8zc1LqfBAlJlgXfxHoOF56e7uwmD7wrf+ybuj5JbATE5gOtlYAEVyOCaIUYs+mt3a//phWFWgCkIag+/AH5to/URZI9+zvymd9VzIli1Y6bG7sjhtABE0BkEZgyjP7wX1gMTiSx0W4ffUOcMGdCAMKNd402oO3aux8d//bTfgqzlyoe9O5618rp072fef712yab1JFFus4jUmTxPvrAzIIALOCZI4sTxiPqOSAKKEQEIQIiEBalFRojQAwEWhttNIFVmFglEgg4IfExaOKEABlUrnZmAABGAFxnTdKi6Z+6l07cTcOVAAa6iDoqhSyS2r7u7Py5C+3nn+4dsknzadPdLsdtf9JsXZ0vN+np1Lcth6CsZYuklRfb+S5TfYsgXqtAvVw1cQvV1KxYtKotpSyruDLS6dJ6f/PVp752azrem2md995z74O0fallxCQrI9oU+8PlaMyNvcXaA8fWNzbHn/vUS5/5tXf+mb+w/MQ3x6pcPn/35z/xpRNnNtOEeu2kuv7qNZudevRddFLt7+28+elP3Ts+JBXu63Hz6X8qK+vN7s3RrSu7O9XJv/r/mq+cngtt7+2tnHrf2t1reODKG9eknKZAFJ3rmmF+D5pEx8Nw+8ps2k+PkV8e2ve8s3x90ksH27ca79Cef6zsn2ZVRN1xsolatY4Fo0k1csTEiPe+q9ty2iy2583O6Phmdt+IOLIK7WRBJDazJJ6bkhGaepEZ0y66q69fmc1bo5cw7fdWhpGkAvHQsXc9syTSNc2UVLM8LMrJobVtbdT5dz8KWTorD4lR2ZU0LYam4tsX5nYzv+vJaze3NhcTfeIBu3R89eaF6j//Jr/8mq07CDU43n3gXf7H/+G0WA6f/91Hrv+YKSPEWky8XeiT3/uDvYODnRc/cevCJ+f7E14/aaSl2SxzcTkBveZmcDg187hx/tXt8d33fPP3fqz3tZ/+j9G1SKQQQoxaEQimQ/vuDz1639nN7VuTtXPFly+82qCKCVatP7U26BV6ule9deOmLtLZorz37F03dveXl7J6ttipbuQZDgfJtOnaTvuIbr+lhtqqNn3b00unjj/0k7/8lZYT2y/2ytrF6uF3nDw8qGddVEn3Te87/42vvpr3LbJGnb3wxjYHVGmvWvi3jw0YBTnIkaIAgY7SeiKCKAYVoiCCIRQRIRKkKIqIEVGRKIKEiFApIiTgO040AQQkFYGIwChSgiRAgAoRWJCQ6Mg6BQKAElEQBQVQkQbUAAhKoYABMCCGyBIY0gpIk1KIACqoGMEFF2NkkBgCt77UmVZJAspZVbiyFNHLmycTckaxiy5NhulwDVTaY9P5aVvuQl2tJFZObMjSmoDqD3PXtNop0cpHZzh0F18I5cbgzF0uPREG6vXLz/fRrYqW+VQtoqkZQxSCMjg8fU+TGqlb6DoFLk7KQCJtU/mKred+GsYtxogclY9ucpMCB+cU6RCCgMKImkMeOhg3sQykUDyDcsQ+WD8/t2m/41sHj3+kjcveFEE5Dk1iBt7V0XVaIOnZIB3FKrbzcPva5GvPQH2oFCtNjoUcRIXFE++xjzxWjsfMMtmfr51bGy8OPKvhPff4rJcUib56tf79r8KNbWk7CoKATtTgO79n9q73T0XXN6+l5fjud9/d7l+MbYjMXsQrBYw0qeNhGUWh1jHETiAQOmYA0AYkCIi4w1tSDZLlYRq3p5cu2LWl0cpSuk2XL12ZLmJtZb43PZn3etkIOXbeLabVfDppys72h027u71/2MUQKYKAUmSQNCmNhIAMBKRJK1RKSOWolBBJ0JpcbFEp57qEUKIggERApRVphYrozr6AWUQIQELsWiclm8hiHANIRO/AC2EFbhEaMN5a9s6VDVdOLTqnwaBSDBhEmNkAGEpQlATRwEqiEAtD5yJ7h64LrSNkjUpbU2htMahMxYScChCdC3zpraeWHviQEre/+8b+bL6eaIrgKqezPmFLwKFuWXRCVOS2ZyI4wmxzb3I5OL2cZROW6vRZ40r15kXtEJwvdg/z65fmzz6t9rbrnVmrLBrTdnVs23jjohmd7JuQyLj96qf5/T9QfeQH1Wz38Kf+Xv93fjezqBPlygr3FjZp67Dr7js2+NaP6WyYbL2+9R/+/vEPfBs0M16eqXveeenWbLQ/WG2abm/bLi3Pr7w+2FjRw4Eepb7emymLmyeW7urJxdfROZjtw+opMAqsYQ0UGcopQACOEJzEAGkGSknVoQgYAHLALeQjUAKQyWyKmUJtMc2itEF3+skn9anz3uaq6/Qrz7fPfj4NmkwB2dGDIvkQtIiUcwkISQ4bx7vZxEznOJ+LAJetaEODDDFGwhB0ytCNtyXOOuyn3vN0v7y5XS+pt/ze1sn+d/7jn0pPPOiiSald7Py2X7yaHjfLx4vmQB78Uz8ej/vJ1htuez9bWRquu5heGI9fXXnHufnOvjrc15ujGLxzDtIsBEFt7aBofXCxdRTTYVLtdxwVg3EHZbe3v7Qy9L3s8q1rlevUqL956sxWSeUiWG2stiJkSGttEBWiEiBmIEIf2UtggMCRATof6xCrsu3qzle+qXxg6CIAEr/NnvBeDubNOEpr0zwzC46h8UlmHMKJ9aXcmFbCYd1aY+rIdVsnWS8jWEoT7QOgdoQdSyVBNDqjbKJ7qL3E+eG8svZXX3i6r5ONtd7aco8MXrk1rQMelC1w0EoN+n2ntMoSP6uG/R4IB++mpW8XTV9jD0KIGIB0r7i9aMQHRZgkSVTqpmMVZeFg4pG7stcz/V5PK2t6mFlb+gaAOialUAMs2o405IkCUq1jItQIHDhJUqu15DgamnLRtDH0h1lwcbaoOAppGOaGQJJBxoG7xiOqiGS10akKAmhSAVcG5xFfeP710yfOHi7cDJjX0+T8KDk+KGOjFSISIQng2/EiEmAgBUfcBELmo6VRQGFwwQffdnG0vLm2Mnjw/F3PvPRSb3W5G6b/4jNPH5+aP/EH7n38HfexVYkdgE7ntP7q/uEvfen3LsXD/aZeXR4UaT4rQ69X1PMaFJJSMfJwuDxvuoodRvHTGSBkxrbeQ0QFflbVTYgCIBLPr44e7mWLycREtVjEnSjHBmk/zfqqv9XGkv09JzYuvXqbiUlhy3GvbjfLOi+yIk9qkKbxPoYUwz2njl0bz96cTrI06/V6NtMH0/3ltD+EZDhKp7e3lrNsTdvxvJlbBai39q/V0gqo4bDXzF3tfK+nh2lfWVhdym/Pxm1FSaaHvXQ2a0OCi4ZJ5zfGDYto4Sw1wTkEmJVliMF5HOgE6tjO43IvG099El0vNQEBldKJOZiXDXswJk8sjP9/RkWLuks0+rbTKS7axnUeDfa19l3sWiYgNOrq7V21CBvGZVZ+4K98jyh77sy73vzkmcXVm4UGS9XN29O+IzfoMugvEWEVSwx5obUHbRRJEH+Ed0RG9iJGlLgIk1lGGZhEFwkYGznhGqL3sUW0koEulO58NHlWW6XV0C+mhgIaJBbyzAcHe7/9+xsPvBO50tNGdhrFGHwAA2iJfeQuKguUScDakXItZjbVIgiJf/TxR/76D7amPfjyxWu/+turcZJAcF5EWCstPvYeuWf4t/7K9XXcuP/b4Jd2Dv/jM6uDY0onuOCg1PIwY+ZgKOklXRcUqsy53s7e9TeetY9WuWkVi+/qwxcvnCqb5VHhtXctubVl78oksEbrW68NSlGIStyU0iSJIs2iiyB1V9nUMAtR5mOEBFicApTOMwAlRhS4bmEQi35eNV3SW22pbXuttF2/lTQRUCEQUgCzAO9FR0VaUGsf0EW2tsfBg1FHDl+NAtKY+nD6pc9lo3erx84tmkotFSLSPv2VrC4VO2WTSEhMGJAjAEQOXgdWZVScCjGDIKFGMAwgHjINaQaxBlTgwxTI/4nvSzeXx//qN840DXgBm/ip5MHoBWUGQCvvotGyuPD8XYu3Hnnnw7vPjfOlTaXSqrrhoXMYOt+UC+9UpgyYFJMChLo01WmWlPMmRGliXC5y11WoSUjVXXReMEmdQPCgUBNGRowcy7I2JiGiIk/+WwHtzsyI72Bf7hil4L/Oku6Y0I5SRUe0F6WOwkeRJbJE4aPj5SNDOctRPQGQSCCCIhZfXrt5oeH3PfIo5yhNee/ZYTmgq/uhOL18MJt4UOTBWsmTzFUu6WmDQioLGB1HE5mYxSArVncm70x0lNz0zEDAKJpFWOKRh1lIuoc+KOkqv/msevP1yfXX0lNPrL7/e1tzDAZ9pxu98Zia3WjrmeS5lkSuXQvlLd64lzYfqOfd+MLX8nqhPKcKXTPx09dXcIBvPpPf9/CBWtp25vgH/iR/4Rd0CRLILzoAEEJtrJCJqJA9aWIhIOuDN1aLBhTtXKsIQRbIWiVZrHzSaj+Zkq+UTuPegUkTivlux9n3/9n9pZMnsqFe/BdYTLY+/kvDJyfZo0/iaF2Qk9GIDo0WDl0poUSVcct+OidjbZGAcdG3HWMQ0ZmBatdwDLFd4CB/9DFfJJTq6EMMHaCyad9H3zSdcrPxb/2ztbCIAYNJiEitry5/yx9dHLvf6gG87/7utTfj1YtLrOunv5jfmosDJq1MAtiKxJBnkG1kqm/AY2/ZH1z38+305HJD6HwT49Q1c/E7JR6KAaeLcRk5pgliGeiZm1w7w8FHCVpR10Un6CLfkeod2ZMiMIAAAYo6Gs4JKKKj0WRkUEheCFkpJGMT0IjMVmlNwCzOewmdQTYK8sIokBDlbekNsIuKQ912XX+Ay4PB5jGVE4jOjPUu+OpQ93PfzOvxdPtLn126th+5N25xYlP1yCOT3a268QQ955MYJkYAW8YOwVpre1bPbCyR0S8m7WyMyXWl6gCcDQtOUQpOtE2KwkQ1vTInPqltURQ1s37+qy+fYA+1uK5ZOr56st8v0BPC8krevPLMwYtfWVy6cdpv3P7JX5v95lPkXDLqvfevfLdaX7/xG1++8dUXzNSdeeK9/hd+uTt84/gHHnjwyXPf+OKuGZwtD/nUYaSXnlnqZ5KQPZEeXH0l3/r2peNrtneKW+tuv1Xd+FyehyQtBFseSFv2lna+IL/6S7uf+LmH1u6Gc2df/9qXl7/3+/03fYv/tm/d8rgI06Sus2mQ3lL0YX1llPuM25qUchxi49h7a1Fi9HVTH2zpVE7cd18MykmIxNx2Jk0kdigefSSsFbks+On24uD6WMFanlO6tlbGdiGc9awCVIaWhsdms12EQ3FTq+xiOo+dT2xv4+Rj6cpKu3tLlSX7DFlhOGy2biq1lr7/h+cbSxv3P5B+9UvDvVe6z/x6/eyXVgcJM0QR1SsWteCT3z3PThD3zq7e5503YCC3s0ceOfmX/uZ8a7v7B38u32ubodEf/aj9gR+B8c7sp3883W3TAvYPp2TgxPCtMyGMF6ZLzH0PfevZ01+4fLDXLtg5YAGJqLQM1gfvfPfdt964XbZhdrhQKunQ3NrdXuuvhLI+mLWi88lu21s1reRvTStObLSDTBzporeMhD5GnVROBdUcBgxhOeu3LnvmZfzc+FnUaeODtAuV4bGlzB3Ot24cYL42bfErb9w+dXpzXrWbJ44fHDTdYt46SGK4Md09ugqiDwpImIEFRQgZJCpEEdYIhIxyR+1LiIQYhQFRoSICrQngzlGBOmILKT46IUECQBXhaPvChhQBARACiBBziCAsQgIxCLMwQGAGUiwIAIxCIATKoEqV0YRWkUJSSIa0ImIEEK8YIkRhw8A6M3U9vf7mi3f3v1kPjM0GKvgOJRAMlpela3tY9EnJ9jXc3l2uF+rKeP/3Xx412qGk5x/g/lLc3Q+t720st9uLAEp0gijGcbi5A4PB8Pyp3QwlS6rrt/jmbrooUzY2I66jRvIQbL/f3rptDBGg8p3uWplPYywJam7n0NaxDUSKUmIB41sJUQlyVyvA6LwxKRJE56V2Ok9iFKMSRODo56t3rf6lH10cH9UmlXnbHO4RB2MstEFm+8RBk4KFjrFBEgo86K18/dlr57LMA6JIaDw4bpN89Pi3hdOnc8M4GvZtFgh6bt5HAZtaJybVeuNMzxSHX/gy7u277QMTGIs+n3xoDv0CeOgiTifz8U2ZBGlREFmTi0IaRdgSMEIrwUkkQlEUSBAkMmgFFliuvVqcqcYXp2F/321NYKcYDvr1YdXuLhymjVGdjt1iR0wqQtpQ5VoOnmO4fvPWmzdvNbELLIIkoAUxAHkGZLbCghwwSGTxQAgaI3EIwWNKAbwDiDEwKkUpgFIagHRiBlopljuLgUINQkQgMYbYTWNckDYscvQ6FESREsCjkii6C8Hx/lTGLdVsMlRaJZ2ighJiMqREmDk4CQqBiUPoAqNj4q7RXhQAxk4JpMqCuNZNOhx49kunTsbZ4QBdGQ/f+OIvFFbtvvkNo7WrODRRla0Fc2plqSprZNQsFIUW84PnvnT89PGIy7u3m2zSuvpgof3aD31rcnx98g//XrJ7uLI88Ic75tbKYD5vJpUEARD2DSay2x/kJ8+orsq2L1Xf+I1uu1r+4Y+1S0rs2v1/4e9OXvu6317QtE6X+2nOVNf1sy8WezcOP/t0P9Hd3uK4J7hwCbrXp/sf73/HtxfL55aXjzWvXcg2B5Dm+WpWH2zlmmR3X19/rTh1rrXDWKzxytRIACDoJbGuOTpaPRM5Ea51GmA8hSocCUrAIiYGiKRr28ffoe6/X6Y1vfqqQcLEY1fGRYAil4zw7nvCsZOSaoaAKcEj7yQCeO7r4BbCHoKII5X1IB5CNSfW2Ix9s01Lq9yXxIwUBCQLnWfXINaUZVYE2/2cQltNA3mJIV3GpWJw+eDGyh988H1/+n+Ny/c7SVxVB+DlEx/umQfag09G942IVdI/1noZZJM3Xvvs8XN3t35hO6dtD+KAF5XmTvmF7g995Sign/iEEY2DHii0i8ZZS/0VW2uYS2vy3LRqdnM8lsVicWiLZGfnEDHv0ChJohevROsUiIWDGCUGUAEqAlGBRRgF0EVpIzceqyZ0jXeNbxvXBgyBfQykdRvuNNBOnd989vnLCx+ixhhiYjWEuNOonZsHozyJIdrE9vp5J3DYcN3qxLsMIxkpkrycugqlATXz7CA0zOR4Id0oz//It3/TZ196bb/ybh4PuoY4GGWy1CgiSsx02kHj546vH4xzZTsv1rM2IAxNy/1hfo7oAx/8A59+4/k3bhxG9B3odlElidUKCUQrYKRri5kGtJo8qqbr8oT6ebY0KibbNShVty0jpUrVPhQqAW1D5MazCATnlVXa6CwxXdW1nZRtjCy68xTYh5gmNiAcTsulPD8+WLq+txcYfCNpglXb5r1UJxkiTq5fSZ98f2vxxJmzbYsbj51Sq4khEzjMuBPEBPRRjEihwB1N9hH3g48SRgKECAwQQgDA4ELXtE3Tra7nQPjEB97B3d5r86ZDuOtddy+Pxec0ne1HnOcrxa9/9blPPPvsYjG1JoGVQcJ6MZeym58+tryaFTd29o6fWH3z5i6SLsvZqJ95YqVpumgBVVXVnXgtpEGYmBR3UUSrG7PJt5zoL+lMKbVV4qSCtV5alZzksLfo9mL75tZuv5evEuyrmCV2VnfXdw7ObS5rJQDYGxZuXN13+sTm8srvv3qhcRIx3g4LATl5/BQzXJ3fTmx616nz6/ng8Nb81Im7q5xv7u0oCKZvm7KxodbaHC6agTF9YyEukLmwtkWYaw8FqKimresQOpSdtun1Mt24IrX93B5UlW8wBChbn2dqeTBwTcBBElBBFwkwJXTBB4A2ij+S8Br671JFw8IoA7ZfTBdzZCky42IYFKYFbiUKSMtRJems6rKkzRJd75d2ZfmV518JAIO+TVtprVsMIBUq22lfaTCGtPFd5zkaYtLIIWrLiBTaePSUhUQp8+z3f6f/+Ld0y33vGIH6s62nfuE/3CU6Algy3IrtJXux7R5/4tif/9jh7zwdfuW/DLo2AYyCoqCvuPvdT1a33lrbXG5jM335RhIVaxGNLjhFaHKDTIzhQIk8cHZkj5Vfe2MY6+G5lfP/y994ddUfpGr53Pm7hv1b/+Rfr1lCgeiiUYa8lJrK5eImT9r5OJst1tZOsiMSya0Cj76LstIPq9J1sV+mbeuiZ1TpPaNz4ILp5WjCoJeGyfzKv/3F9T/0kduStMmaS0Y9kwm0IpD2jJemTGfcP4FqKTEU2yob9G0MviuB0SqLACjMwqJtFBINaBRmttFk7j7bXLmhdq8NB322y5PkbPGOx7avP2PeelmqVpOgBK1IkKkoJLooTXRtiEhKop9ba4UxsEgMOkabYk/X/pUvTW5/del7vjc9fy4zbuszn+df+MUTnCuw3gck4OAUAGkGEtIAjoGBBCIiahW96CggDInUdy3n7/l292v/KZnXwKpJl+xf/dmDNe3davVP/0kBlmMQRKMUCUQW0YhAOdn1nRn83L/+jvsf+dkXvp4YvbTRe/bC/mFdl6EDAEEhhl4/D6ECYYkoTewxuxgINaPWZLQ2JKFtnUIkzf0MEaGtGmAXkG2eKYUBvZMYYtT+Dtb6qGSACOpO6QuPpkeEwEeWs/+KuL6TNBIi1Hj0eyhHr0IIgY9CcyFKjCx3AP8AjB6ANGhN8fDwjVffGD18etmMXr++M5tsf/TD73/q0mcJuYswr3ARwtpQrW6ud/UCmdLjj7x0+flzBSUQCVgYA4gQKiEiElZCwBHo6F+JA9CIyMIk7CFTd39HLUvuFgzis7bd4otfv3rhK37jvuTUu9be8SG0q/s3nzIxWGvAU/nKy/HwgjpbqvRBKBftq8/1ukaR5YjkFrTzhhp79+o3tpeP9//Kz87V6n563ggNrSdkSQgVRfZRhEWRySEskH3snJg+pTmHGo4cMZiKItKRxYvzAFYxSzPvCGBlncoJYFsu/BYsn3zwexDc13/tl+5z9cpawntTu/taecGnj31ze+OaGu9ZFHEONYAhRiNKUZKLsBjRmqXwURH7BQVUYlJtqcKtL/xybHbT+96nNk8FCMisxHdTVvVBfuNZ/+KnzGsv9wC9Uuwjss+wFR28Ix9qk22Y4ank818cxP8Zv/SMHoM2yitWGFFh9AC2T5RJcGiEnaPLz7z48Z89tjlaObm+cfLElae+sr/voJ/YMm1Tu9tQnUXCuvP05dcmV8skKEHi4DgweYYuMh+ZzETw7dWUiI7MZkfWPgHwDIhgrM1TI4EBwGa5QfAxaoVpqrJEa0XOeRRJ84xiOBKhucgRhOOdN3ea6hSMEhUHS4y6qeYZGGWKSAEkao1Kh4hxuLkeH34kbO+jFHPIu/seKU8tXbp9Na0Xy92wPNgf5kU/7WGURCUOwSFwCDu3L1ud63S4cvx0XZUxUL9YrtOiUWHp7Np0d8aHBzdfuDzlNV5a4USPDztum7XUOWqW8n5ikLoaQURDOR9Lv7UwqSazNC+oN1zSZrQ0uHFpO26cu716X3r3yeTJOT9VKe6dHTy8+/XXNjv2T19+ft+nf/rHs/c+kB8c9N/4cvVvPn776mFVTx767nfApr39mY+f2bDwytPNWzermzePP3h3nY2EZVG25dK9Z3/oLzdf+1R35avL999fcS8clOvnTuSLm+ObX076m1uXrp/5g9+3e/t6cfstfPPrCcHyiVV3e0JW63vu74qCB0NKsKtriJ1iZ5UGJGV6HFpuOkLfjRfIYlLFoFzbua7RKQFIuT/vakbs22FmRoXM2zzJMfpYtQZwMbmlhPqjoWtcQiY2U1d3K8XdoYPFratmemBE22SkyllSXU3rXVd39uX/tNSeCDYp9i/YJFhpQjGATtX7t3USMEvNsWNJt9i89ZXt3/6tg8/8xtnGAyWT5Nits+/dvdaYvWo1UekS0CDZLn2YiN/ec2XTBUqLYZH6RMvhK3tf/uJfqiNfKaXD7M/97f/5hZ/+8ZlwZlQwSpPiEFXp3vz8S4lIUzWhYrapITq5upoTrY1y21Dv+LnvuOude7de/+rFVzrn055FGzCipWTYt7tb2wctah+t10s2q1gGSb5okQ11MRBKLzEm0Z1ijl01DqNscCuAeDPIR6P+Uuy2D28edl4SJEwTv5ijfruGCQAsR9YLFCAEElSICIpIFAqBKFIiQkQKiYXxSA/xtpJMEb29ZDAf3ZoRWRiRFBIIa1AGNAAiKTlCYYMKkRlExRijxCBO2EkIiKSBFCGIBrJEqegENIpoIaM0AmhARSoSBmERJIVKIzIIMkh865Uv3b59yS7bYrTRIzp+8m67srJX1tz4UeyS6Ja1DvvXsumkfPP55HqIYro8WXriA/PlE+nycpxuh72tuDcHDAyRI2ultYv1q281s7G5/67swXs4mWmZjfJ+c7BgFh2jAtGG+8OsbCbBKbRpNhrFie/E8bJAuedeuwiTeY7WsegoJIKhI1BCCkFxCFrp2DSAOsRWJSqq1inrap9r0H0p7js57y07L3H3ylJSOIF82KtnM4qg0Ak7ESknNWjWS6uUj0L28IPf9x2TT382JaYUSNmmDoO77mr6a+OD2ibl6tCIyHxnl3e2x29dHNzzYHLfQ21ZNteum0Spx++tn67S1uKiRVfp17/Se6Hqbl2lnd28rcuD2lRRUAIJGiCALgRF2IqgkuDZazDqqOCOnkGEOgBwEl/bS9wsA03BZZx3u/HW/GA8D2VnnEDHEiBcfeXZ3tlm+dhZm+RWUdu5l1+5dGU8n40bBEoVOhAAIQHxwXNACRiUAvECTFYRKkIOgWPnfTdZlAydNjrLMo1WG9N4p22S2MyCaA0B3h4VidJRS2SUGDk0oQtJAoSRotVMru1rG0hEIQblHc1nam+BhxFTAwm6yBIVOo1CqnUNQNDiNZD3nVUEHAUQwFLkGDwIk4QYAuZGa8XAMVWD46dHq6fV4vD2C0/3/LR962nKimQyRwi9PN/emyXWFMvLXFbATphyAvZdgGbvua8c+8AHwvLS0vLq3vxwvQAw+ZzWRsMH48Z5nszKemGKpD64BsGhlqQghRg6PkiyB370/zw8fr+uQ/2N1xYvXMnf/f4uN4IcU2yX08OMTmwOVfBN2+aJjqHpG4TDenmvAt8KI2wcB+zJzsUzSQdvfBE3diNKeW6zy/xw2sZpnSoEm4fpzOxeTdY17b0OOxuK1mI2UChASiVWWc1tB5FVbCE4IIDgQZHUFer8Dk3z7F3yrd9f9obU+D4quPUW7ExAd6rpYtXx8bP08CMhtyoGDA2hCZSp84/XVy6l+28ROGDCuuN2Dlwit2hz9hDLKrM9YgTxaCn6BbADyhFQmjl0MZZjMhrJZUVuE9u4eRz4emX9ru/9M14vt9O6GGogb4oiRL3YiqrOItq733l894XPhV5/7+p/2ljNACpg7mpHiQGb6d46JRTDjMD4uiJrSCXluEl6KaVCKFoky7W2XNWla3Rp1LDoy2Iyn8yiw8PZLOkP6nLmKIUEXOtYS9DogmhrlAKdkjJEKmhlgohgDCJtgIaxCaH10bvgO+9DZGaOEQSEI/Od6s2Fi9d8BFGY5Lqatha1QhPR2FRNqlZ8zAlNrmPjoldC0niXpMoHKIPjwB3ILHZj5zv2lNgYJDLv+ja+cWn7sLK2cC05QxSZmItRb3Y4L0xqTF6sZs77knnh0WZYt10eDGEyn/n5zcV6Pnzizzzx2TeuLQ5uSAZqMNBFmiSqabpQx1Sn1pLLkpG2TVlPF64wKgYPiZ7UZYjMHK1RQUBlRgVuWzevG2PNsN/TEsUHFGSBybRuKw9WKQXakBYQhWliXdshqX4vjywH47nVVh2JO4XzPFFKN2WjkV555o27Nu8a3p2u9QFTUksWDNatM5bIs7GpMACDIPJRpIgUw9GWC1kiAB4Ri1hY7tCMJHauGc/t2bQOsjJYP3Hs1Gb9xvzG1nf9oVPmZJFrfm7r8idfvbLH873x1CqrlnJfhVWFvTQtm6Y/KqzQ9mSGqbk9GedZ0vnGEhR5kmcmy5Oq8kh6US3Kts2yZHd31rRtYAQWVBTAzrzaGFoXomXoAngFB4v58WK1cb5if1Dh1uFsda3vq7pvNbCddPW7RnevNNO9gxk17ljaP5kPnnnr8t7CSZEX/Xw9oW4xp37PJnKiGJQdevGTarFhhoXpXZ5c9bFum2q1n24MMiMkytaxaTpfY1jt98UTsrQuSqrMKI/ec+fX1vu7s0liTGENh6493Dc+dZ2rWsgTk2QqxG4+nw37g8PDQ03DopcaykEAWOqqa1zIbbbg9r8qDu6MitquJaeGwx6n6awLVdVpjc5HTWA1+RDLukuMabjZ3Zp+33c//pl/+8//h7/5Y8GnX3n68rE9CCkfIrzvx/76iMzFf/sLxdVxVzkicFoaw8NCla3YbJA2h8iRFIIYFghABsm9/tbs87/Hxz8W8zW3KBe/+ZurN/eMFyACBwRq23crf+evzv/At45PH1urePKZX8eGAXTdBVFkDKg6hGfeqBL0GHpJzlHEKEZGQFTaUo4B5jY//jd/eOlPf79iPf/Xvzr7P/4x2IP5+LVw4q5EQn9ppbc2yhItHJUySWaAUSU8ffPaOYe9TW12bHetTT21XUClidkg+Qx/m+R7/sHfPTWdHvy9f0N1yzWrfpoNR9F3HCQezLUJAwX7v/nU5GBr+PC95h0f1iujaao5Wuk6r8I80/zeD7WuN9gbu9sXTblIcAASrR6Cg9DUJB1CkIRCCCJIEkEwug4fPBH+xPtN+6H9/+0n4/ZWz4hqF4S85PepXChOFGpbJK3rOAoTaiQVSYFKQYOP4py2tgmBSIkXspbbVsBlhbV7bfuLn1CrI294ePNWH4eRg5DRRyebRKBjBCrrZti3KBgXnRB6iVqACDUH0DA5vZ7/659bQLTz2/LxzyCGY3r3jd/6Z/JDf6544K4QAJAls15B6522Cg21AeKUU22WfTL/jS89+nfeM1g+c1BO59ProZlIiIB60Msnbqqt1hSt0eNpmSCsrAxHS4O2rRqPmnRsGmsVkXXNzNiMQ9TMCiImaTWH4AKASnIcDlUbfBt5Npn/d7mho/qZ3PGgHYWHjmrLzHfwRYCABISoUBBB3ZkFHT2mk1Z0dEwMCCx8Z3svwAjaKEGIzDG6w/3b1w8G/XMb45oub934z7/82TONJJH7jNZjw7FbFkj625f3876+6/H7T9+/dvCF38kECEEYEAgEjm60dyJdACB8xFQVUYKoIJrI2E7b6aty9rvWP/IDi6cO9fXbaaajdLL7lju4JroMaZLbFdt75/jSxX651aumvhk3177QpizenoCxRM8OQAxpo0KAtrValm5dbn/zJ9a/62+5/vm4+mS8/vvsRCV9IqUCYtUAOaBEhYgoKRrvHCoSH+hICKcFSMB1CjgwA+pIjXBnjAphjolxk3GmN3sVZV/5fV5c+ODQ1vOEQpGrnbh9uWh8V9buYDeLk+CCuKDBqOhinCMewcuDazzX3I6O4ff9SH7fse4bX5h//ql+OZVYruTL1Td+vbz6nFs6xqAVkW0Ok9mM9m6k5aQP3rMJEJUBcmwiw61d/41PqW8575OV0vvRAk0N5eeeVl4kt2BYK+xcJCfogZ2LSnsix5DU8oX/7T/m+3oc2u3qzSx98+7NQTVmc0wucpkeW9lHee9Hn7h0ce+TT795cUySpqHuOh88gwuRBaKA4B2vngIgRAaIb5fRSFFAEAGjNGmtExNENHGiSRNoCNb4XkapQU0eEKwKfSXxaHQpSNYIy3ReSrgTqVNWAYDrTL52mjY3ISkMOj8vWxMoM1m/12CkVDdW83DF66rfX0mHK/zIOfPg6Y14Y/t3fvdYcXxW7pN3ufJExMY2bV15n8W2GPQiJtjbqCBlMj7fgBP31kUS8/QAWa3H6uKFZnWsl09dF9MnKMrKDOGBxzfT+mCxfdgPJneYRqePpA4BEJtgZLIoV/KldnzYLA5XHzmbfOxDh8fvFpMtqdi+9Ezcf3XrV/6lrqhsDuONuLS+BNSRX+Dkonr+N8fZwX3/6m9upUsHq8elas7B8a1/+Ddg7/YgWRd7ps3vyd5x386VS0maG336pae+ujY7KM6+t/eOb3JNbKfV4OSD1a/++/aVr2tlV3ysXrsgLfVs4xZdiLFMKUuL+nCnPb5sH3zQPv4RRzpgIPRNVbrKGSmaduLaWkB8XcV5o40hZTACzwNHioEaX6Oy/dUs+IxtPj6YF4O+F4plpTtPqJIkdV3HjimENNcNuLXVc9nweDNurI+jfMjzKtTTwewVs/Vq6kAfsv/Cb8YvgxqcnkvIv/n9cj/VivrtKKqQqRI7TrvIv/jzzc9Pz9bcT4xnPBzl9Ke+Lz/3GG7XJzGkjz7avfZCVmSDC18/vPC11Nfn8gwz6wMniQDp6GyuUSXhfD/bofTsw/f/3L/4J9/2l/6WbxBZdYGtpqJYGnfJo489eG5J//rP/Koq+mJiv5/Vk65s3Km1cyfOnZ0eLvRy33QuydP15X5cLPoWKRu42djabHx7vpbDMB1UFXtJ9uY8mZa91UwyPxiMOoeenM7k3odPXn75ymOPvGPy8k3rOrdbXtypxFWhC8lgMN45yIaDtY2RZbgOMwAgEYWEAAYFATTCUT4ISSlC4agJkZAFjn5No0YEQCIEQSAQIgWIR+YDpY7WDCAMR6VmdUchbkCABVEEYiQWYhARZmaBlqMHYCFGQlDMoIxRgAYgUXQUaEIREIajIZSwHM2QhTCCNdozhRDJQnRdfXArdro93JsrA13bd6dsrtz+pGfJd4upm3eHk7PnHp5cutY0Owezrjiz3hWrlGbRoHn0IUNle+M2OsAIFLWJhjsZCqW3x/XeJDz/SlKFNGrqYhIlSLSKhIGUbvd2ZbbdDDK7tLIzvwazQ8JDPbQ02UknB10LCgkIOAAdhWMgQowSgBSSBCKqTd8Pz+Z9qyS0QspD3LmKsGjDZa2D6hgCUJ4nS1mEltE1i6nmgFGsRdeOlcm01r4wbUbx7ndU7W8rDxhBMWuNZnlUDk9nm8X6hr341EtFX3qr52aHc/9W2xZq6cPvacsr7sZeY2bFI0/g5Un7xk0rpNs4/rVPaA5UdkTasWhSqFAlitBxAIySat14DwiB2SBoBPZHr5FijKyVF0pRUeu7y53KoXHQxVg57iItWIcUGXBM4fRjx/e6STy8URi9f83vlO1rV24uAtukl2a2qhY+hsgBNRAjHsnzNHYgAVpmyguNzIBeQmwWZevaxndoqa+sxsyS8sEb1AQKUVlt2m6Bd7YFYERZMBCBA3kP0nowTZeQNz4GTBCaI59xCBQMdBQXgmVIKRr2CoNOrSLUITB1AsYFYXY+imIMoPGIzQ6MHBWwgIgoROM9zdsOMpOQXdo8aQepzY+du298+8JbiZUhFbNo28YsbNI//w63edfm4++AK29c/+wXu0nTKN9LtUZoti5Xt16m1XPT+lLTzWdRXM/ms13bPbay8Zh7+ZmlUdb5ilbuwfOPVV/7WrxxI5UQARmy4EYY0aX5klbb6erm/+Ov1hqN0rENpmo3l1YyG+fseyv51muvnzxxKly/HYJXHXdzL6mqy4kcXhs9eBYOblRPv2H4+qJTa3/+f+Kza7v/+/9nVDoCALJaARyO4a03DAQeX6Oz74X105474z1IAoIUInQdVCV0FSgFvoRIlGjILKhERGh92aTsTSCIdHYZDpijJxTgKIamaPomx7K2gwGRrXcPVL4WRNmH3hOeL211FbAFnlPZiV9E9JyvaGttsoQYpHXQVJBalQAkaTQF+hJ9g4wqSaCbYjP3beAkW4x6oz/8rQ+/672NWk7AKzcjV1tpILQqNfr4xij94I2Xf2F7fOXUt/7hyq1n+XtyLYeXL9119i5YTm+9eq1391nje1Sz7fW9U+AMhujqVtuei5zqQiVYl461CISNjZXSJT4kXiwkcbSiG09n1lffmE9nroyip4dTEo0m2iSgsjZmykaLoFmTVl5FQQWIXfARVPDcdc4773zrfeuiD+wiexahIwHX0e64aYskvWc0HK0WzXo3nlRN103aWivbAthe2pGcHhRZVry1v4iBWx86gYNmQXmS2cw5P2naeddpo9hD1zoolBO6fdBsrK40ESbQHubmkQcevPXm9YHRx4bZ7uE8C2poBXU8/+Dd24v45sW31lILISa93o19+Nmf+NXetRd+8if+4UOPPnL63Q8MHzn2my+8eL2ufKMQk8HaQJGJCG1d4SiVAL1hLyeY19VBU/et9U0XSIDUsFe0ndMctVXeEQfp6oZRjNFRofOeiYLCRClrFJJkVtdNZ7ViRd4LRhGWBkIA6Pc0MFZ10+sXIQqC+MjXrxzcemvn/nMnYobpxrB2HQaAKBqsb7wii6SJdAR+G/0K/7W5gUdYa2BEQEEEjD4orVBh9K5XkBgVleRrxx+976GHPpyd2Ei9i4NTKy8dHi5EWTtQ1LBgZBwWue4gtBIDujwriv71w5uHXXV+c0W06SAMB8ViujCpWUxrhcidyxn6WdFJXOulLrddF7abRog0q6ff2v7w6dFSQewbZr0/n99/ZqNzrt/PsjpUrQshJCodUpjNyiQxmNiXb1xdL5IHVlcXe7PT/WE/TQ/mDZkkT2xsWmWKw6Y7aKcDVZy7554Lb9zKNbxr89RALb2w9Ro39dCQzrLUQ6qisEyq+Xp/U5Q+WBz0k2hQiZKq7myq5lPPrRPw83ovV3h8OKgO5+TbU5vr43lLKWiMZeNCFFRqEgUbN6+ESRURBVVPqbaLIbCJsgFqZW0t0/Ty9Zv/bVTkvFijr+2UZdeRNbnRGGNTtpZB26T1zvlIEcuSv+9b/vgf+xM/8g9+9C+aN54f9QabwE9+5wO//cmX7vmub147/c04v46hGvYpC6ryvDMszv7tv9gON6ApcGfifuWfJpMd8CKECCCRAXCEces//5JRzfE/9KevffHLV/6Pn7zLmFRBZJQoDlzvo9+e/vDHSkpoPmuvv2KLtItsrCijnIggksHEGOGANtGF7hrvvUMLNs9ipERlAIB9O3jvt9xMcyfNsT/+KPxUq+b1YHv/NNY7Fy6Qw71n3rCURBDvo1YUOJjEbEo7/7H/cXTa3Hzx5tIs9WhcYLCUGMgUXV2Vb/vF//TU7Wfziy88EMvlgSUMgR0sdnLVWm1aGyBGsvr46Ji8Og2vfXH+X74QRC8PBj4xZJbKztPGg63P3aVXqyuvrOZDDSpMW+aW0n7bOp1S6EplRdsCwbIPJrU2TQHC1qSchZ7qFclD7wk7XzC93rqJi1e+khdRqwQpDQFiYM8MHLFZRNdoIvQBvOSFaVkLR6tFaYz+6HHWIBBFVIF0jDTzoKJuHTsfJQopk2iQABirRObrfeM36q2t1EVlFDBrpVAEBQAkglv64T97fbiOdcL5xgYmGQG0Xf/nfub4H/nOl77w79dT8A46gJpZKUQNAJFRROsInEVJghlFs16cZtjbrtxkNk0NCcOk8YO8Z6wty3lEiYJNlNb75aXlxTiZVZVJMw5tkipNCZNtXTBKuq4mnag0SdDoSjFz7VgL9FJjoiRGAczu3KOOCrMiIIJ4h1ctAHAnCId3Wmlvax01gRIAwSjio8jRMA0EgTiKFz4iQiIc9YVAkdKKggQiwOiuvvFmYdWpM8f6lXG3r5xYK/Th9dOpvr2zKEa6SpW69x53ZT8m3m9u1n69XHvZ7W1BCIYAWfhooKXhqDNHiMJCAIgURSSIECBSAjx/9pdhsd36JGm3VWrrxYS4TY0quuC+8IswGOSrx6U+KLYvx/2d2mrgoGKZbF0FMZE7Jf+y784AAQAASURBVBgic2SMEFAMJIpMiqC+8CvTG0/TyUfU3lskLKg5HqkgJQqDSljl0XfclQhRtJZmom2Kus9RIAYQQKlZhNIlH6OElsBBx6GuNdlieQPT5HzP1L/7c1nYCidOGxjWt1OVI7tKDm9j5KSdOZklWosy0QN2nXQ1+E5IgXbpQFHtumLFn//unSJL3vcAdj382q8mrcNuknkIl3Zi6GJghYlxTaoVErGwF4kIghC7YEgMEgQOn/2t0Ym7w+n7u+eeG3/+qV4AIIHNvEtsTT6wx0PIa1YBwmzan98Mg97h/pXJJ3//dB2qAFliSh+MtVuHfuXEcrLqti81Vw7ixvtPpetLcnMx7y9Pxs3CeRciKYUgAhHuvAER7nQh78D+8KjizaINJVYnhqzSIQJHTjM76lkVOx9cbin4YDT6owMQJ0qRd+wieyEfxAfXhKARDN7JUywWIU911u8vJtPiYNLUNQyNgC0Gfd3PGCWoREHSBNTLa7ByWkxmRmbW7KSgzbqe+3rr1m0xTVLkMXSg9KyqvUp1NpTaIqWoe7p/YmklV2W9c1BNJs2xE6da25WLuLh2K0y7wanTVd6/+vzreenv6fXO3bV09/kzDCd6j+EQpdd08fWXeGuqdJpaGxhjak+954Pzxly68Mw733NPown1yrSUUc/tPPWse3WvT4TieeORk3/sw/uf+/U++e3//H9tvnr3a7/wbzC3j/ztf7x137cccPvlbzz9nrNn9m5c6+3t9OsaVZUNncTtw2rtzMf+VPn6y3sXr58+fw7qdDabNjfdtcnB+rGNYzrri08FjAeDRT3zj/y1H61/+//yL7+SDnu9XorU06Osd88apHO5/Mxi+6A2kp09qfOVrNBAfr6/l6SalPbOG00EENuWJUAEEO0dB6fAGkrS6BiIi2IAoRuYvpiAEKzNeoPiYLxlyInW88U2QKphtVoY8pwqHZuZdMtLa/fY2y8UnYAjzUZnGuJ8T2VrP/LXZ4ur4RsfXymSrvGH97/nYPzyye2DZNLmDeR6FCNI62Kqoi3g5sXlS7/bPv+c2FGcKz8r2lr6BEs9YrSLg1pp3YoL2hfLvd4ZbT12tS8V9DeWbr70+Wja96xlz+/KokOdUJHqfj85/eDd1+aTWxeur6yteVSSs3Q4G+/yibs2Nk9Mt/djbFfWi0E/qwM1EzFe7+B4v9u959wSAp1YSQqcJmlxeXscSY/6cXMt74w7c/expeWNp57fUk1zdkW9/vINE3tPPfuW0VmiaDZvJ13cGFnUdjxuz51cT4161+PvRGlefO4GAKSKNJICUMhKwGhSCMB81CcTIgBAFEWoBESAjn5+dLkKAxIzi9JEBuXITQAALKCYWVAAkBkQGEQFEWQQoRhCjDGKhMhepGOJhIxH8UEUEC1IiCIxsgdUIbBVBOAJSOlEJEZmJr7Dz2NEpQg4dg4gKhEMMOhnJBm3MjvoTt118q77z7avPT/fubWY7o6Gm/tF4ZZXOrOdLyk0UFcN9pdc66Gf+81jstyHcakjQYgSGAVjG3TAHiHPOgqRlLBAlIAKAwdCRS7Ovv48TK4lj5xMHzgDi3b/hdeKUGWXlssXr8oN36fUewZDEgMBiCBFRhSJIIwoyJCq932HPPFNknd7t2/ZY2eUi4t/94+KakKHe4N2u9F5WU29NtJD5E5hwCSBJoTI6KK1Nslt27azrslXlhUyMCgEFkAXMMLeWy+dK4r96N547eby8on1tdP1fOf4xtJsYODgtnvzlcn4cp6AWX9QpWthtLEIignbBjB6CN4ERQoFJCSBEmmlQw2KIQRQEJUGDyoIHt2xCUIXhCwKQgsMWlXMFpOubmLgINAonovMGRqFZLHxwQ052UyT9VNGr1y+vfXWhdf3arVw0Du+aU0a4u6iw65lQEBhiaIQkCR0HTBaDS74HACVikqJggDKMzCIQUJmjAhCCFERgHTio2MDiJ17m94YEQgUk0ILwimzq1y38D52oq1L1UR1vQHkRUxQIRt2TkLQNmpg1FrIIOMRELsTpgDgvXQ+A62ASCfKECIaJiPGAERgZg6Ru9Z3s7BUPCL99VZrQi6Ws8G62j2cb43TenB2+d2Pn338w2WyPJG4+tA5femlyY3tcTmFzjGT61ySNrvPf3XzgT8Q9t4ia1vPJuruy593ezVfep4Qo3jfRcdF9r4/crhzMDq4rp1gxH6zmP/0X17/9nNbW1y/efnuH/3J/UWZDUcQoKj2y9/853ngyfLZ4k/+haof4MWnxreuFQf7ca9xXiS3mNKiLPPxvnNz62ORGWljz5b7v/Vv+h/+rl6WJZ2DuuvqOlnvcxAatxAqbBzcfgE2lhwVRi/BfApWIHWgckiGoDNAFpWir9lV4gTXNujEJjTXzbXPZ3d/iFHh+grcfy8VBextQSwD1MOHH3bgjbC0C1CY5WkwzIJ08pzf3pOtm1juQTsFjGicAgZf+WpOmkBrnfaAMxBm50hliFqQiIzvOhYxxMFXMUbn4ry/XjXFs5/4rYGN73v4XBc0myLr5+MbcWn9HixOLnhj/R3fRNVzatSFa7dWNk5J0k9p2PViuP2Kzci7YNdOePSx2olhoIY2h5HJ0Xh0wjHGKnKMsXbNyfXlEBC60Fdxd2vLc9MfjZLgujYuLxU3r+6qxBowjfe+i9p71JlqXZKkiQ/aGFRaEaFJEQQVATKFqDlEYAJRWquQMABHRgEkiK45ugq6yOfObDx0ondr57CX4dkTGxJ4Ou8cw/V5eTB12ze2/+b3/cDBdOvpX/1cb5hmaeKapmSVWVOjn3ZlQ6yLxOiMKY0iFbthPmCwqcJ7Thc//3/+q4U6t5KqT//Uz3794x9/97tOv+fbvuX0sbu/8ruf3jhz5p0f/ejzV+txedBUN37lf/93XpVffuF3T/f6bldWzvzg6buewFrcydUf/rG7D69e/NQv/9I3nnpGD/Bgb76+lv/5v/znf/oXPh5P3vP5115vxLBBbbXzvLJWBMBp6dvWcRBF5LooUQKGIk+CF2P01u5+nud5kUkEjswAXe1RsCpd5KN1DgWQAboogFDVMUtMFHTe16VjBkqTfJhcu3HL9N7XLRaRIULY3FztapmOZ6OlIRKQJiBAIAGRI3r+2/GhI+ACAxASoCCBMiZAIE02t4ez/Xy0rMo5Mjz06Jlzo8wm8uKFqy9+6StX5jsqtNODOcYQu9gr8uWeue/k2RcvXWpaP56OF+1MFN61sXp2aemtrf2esj46Yem6OO/8cn+QpljXTeNi3frGxV4/zYytOMybEDEuMnuj8zZLlrLMOLo9q5dnrTgnzIUxGhGWkkVoj632R4mlVBYBrhzuF9lmTxmbF48/dN83Lr+hC7sxyJvaxeD2ymkDrovadcmNazOq4eRgsFhMXrr5xhS6SlwyMJnSPZMO02yyKBXOI8+bCBDD9qTuZzhYzhsTu0SmXUxQG+Watu3rIXtJrU7sIDH5ME/Kw71BYmatj4hCauw5KZLCO+9jvaimgK222hqdqFEqdwGjMqPN4adeePm/jYoWC0csjABaSwSxGKNEduvrS4IGDcG06aXmYGdyz3d/z0TFM8Nm75nfz2w+3t09OLv0xPd+sIqSXrn82id+YWNnkbdgKD1M1aN/468V3/fHJEs6r9qtdv7Fn1Pz3X6KrUNgiAQemYUGvtn/pV+88KlPJeP2fm2CD55EKWUULSCO3vdEtbz8f1P130G2pdd1J7j3/swx16fPfN6VNyigquAdARCOBAh6URQpcnrUUoykHjW7J0KhaDMaqVsuumW7qRE1akokm04gARIg4R0LVUB5lHtVz5v0mTevO+4ze88f9xWjGZGRERmZkRlx855zvr32Wr+lq2bQXj3aHh/tlgs6987NifYuOG200zUzYm4bqcGgaEM2jZ0Fy4yTMUWgW8PX/9m/PPur/9XBymJ17kR63wPw6t7Bv/912Rj0R001LMgH9KAIPHgmBRAbhTEivnZ7cpm7lLKgdz5pJWIxpai1M0XY/OqX7vnAev+Db4evvuhev248Ut6CSQnhqAQPAKQTV7i8paSJBnRLCIjAVVwV2rhulvvDy9XmE8ci5raNpLwVqCpwRYMuoohu6W6LQyUiSqMHHRJjOznG0kwm2cVr7XvO6dWTolqz0SH6ijpWp1qcByXSBI6kSCUcQzWzAoqEFYriifKxk8XKJdGDKxV4YeV9g0QhcGKVTkmqGcZaGu/qBhRGL4igU5gkefFLPy8f+ZGlV2+O/z9/nadThVYihcCJIQQE5Dq1rdMPtpRCrJL77i+8ZDYHshu79ehf/MbSKzcZAYAQqWOUNAEbgkQa9AUGQEkTxZZGT/3Zo0uPv3hjqyOtXrd3ON0RllEx3Vhc1aR1p3U0qweDPkuYuWAno4Vud+JVGXyn18acx6NKBHxAhUoUeAM2F69Za1JAk1lJkVOLNtXtPAPYeTNadodPRH/e4AhyR9QGuMMymqeBkICACJCARYCB56YjAEQUEEZ2cU6unEOFAEkLAgsbbaJ30fNsVj178UpcXlw/swpdemlvfyPiIE9iVtYVDG/Vp++L53/0h5/+1h+/8ux3Lnz4k70H7nJ/essq7X0gRYQQRZAFSOb9kmp+y0UmiSgCgkGs5rhweOi+8nuGUBNymoICnZvIjQFIJKKvwuYNf/MaeU+R3LQmg1YIpwHR6xyFlNbkfFDWRgT2iDFYRaaXtG5dl53t3OpYTjBdYIBYzxjYoPGuIhTmAKiCBDIKQgPRI3hhAkBUHQldjghiFXtytbHoHSgWjt6jUWR9OctaEe1yDE6l5Xh/r31yMVlpScVQ3pRYhaYEa6nxJBFiqSEDYzh6RGFCUjpjsSppdF0PTP7A47Nv/++DqDKlXC1ZBAUgSqF4NBwRIvhIjCQAhFFAY+0ZoqQK8j1u/sU/V10rt1zLqyZElSF6san4nm1325PRoQ+iNSxUsP1P/7p628pLf/zS6g62G+HEbEKNS3glVN1My3m6fXlk9II/qMuXrhS9rVOtxaOdo3GtK44sAIBBGBGjyDxxhohzdgkRgtaKyBiVGA0YjVZEELxvp2lKJOATYU1B6RBiZOFpxRIBUaFWZcVNlKJm5z0JAQhq+r+8uUFHHafs6mlR3Dzc3M/ve0g/uN7tJ8BVrEAYBqvLCJ2kcOWsCZSyxHK0feHRB7//xJ9a6p068UivS2U6y1piFXmWdpbPPGZpry4PO21JW8loeHvz0pU8MQJ5c3Rj2rxW+iGpzkrS0ev5Z79x/Zbf01U0GLMe1ZPDV596Jsuof/bkHuNEqc4gr3cOXfBl8GSQJZ0dzcYZv+ef/rd6eVVeePJbv/Wbp+5/BOMWP/d8luXiq7Guy1ZA12bV4b09U2wXw9mJzvG1Rdr5k+926FhW3/zw8fNtzDcG2H705NVvPD+6fggqrnUWi2+9jIczHt1a0pm6mjTpSmvhvF47cebeh/PRtc1f/5XuK5dSbxllVJftY8du3PyeWlyU8/clMrv+2uvp4obqrTqXYXquGRU2Oaa5tDE7uH0whWiXepi2xocHpNi7GgNwEAIwynAtjJZ0EqoGXB2sg9pFmUkr39m5eeHYw0p7bW0MrpxNFBSKtQ9tbVaXl09SqWQ4wSlrN6V6P6tTf/hkPtwBV0FNortoWeri6MzK/t3rMGwGT/dmT3ylSnP4ub/f6r0d/uDX4JUtaEpIkpBIkrXShMiJ/+zXcl1gK4Whr/ZmTQyxa3Qirg5CgEFsGybSuFUdVs1973j4+sXrgw6dO3v/a88c3nrhu53V5FMfuHDzT19vSlPUsXDsmd/5/gdefPVye9DZfOX1p1+9eOb+syl6yrPprBqNq1aqKdf7e1uunFHQIdif+LGf/89f/w3E5OZeE2Z1q7uWJb3rV4aCmsCd3lggiKvHT9y4ub/jDx59/PRj5zYm119//oUrotuRpALXW7CzCvMUIEsODsv1Y6ff9853ZkpVs1uj0Z3xoGWURq1AjNIKiUDojteURJgFosh8PteAwoyASChADICgECVKjAGAogIWVHcqEd7MigYUDcAMIBIjSOQQgw8xirgoAuSYAygPCCRaG0VKARpUSghEGvaCQUScQIqUKO3ivJwNWHEjQZAFGElIYQRhAs8cGwdH4za5wzJcePj9q8dPtiikx1ZH+1cpWdbnH9pMzNIDF9LLr4etkqazTFGttMrz+uY1uz/O0rbPXHAuMGsEjgpFAxAHFmBUyBwFQWmA+V8HRBfzWvxLe82VvdkXngOGRTI481GPO46TqBWIozk6mRiAI+h5lBoJAIgBADjvhS6pFFpnVl2WNyV277orPHtb71ThyS/jxkkoCifCggoD+5Bo3QRXFwWriLGJwjHvZJ2Bu3518oX/s+MBGgBWHGNqoV9Mt/7d38seuaffIj2CV5560e3vZtL0u1lVlPvf/YNsoZVsLIV+N+T9+sWXhLn0EAOnqKIQKazBUxJDJumpNbW2wAlNtg7VtODRTNc874KPLirNoDEIxChotNLoAYBgREFrjCqUIumCTljLngeiaR1BoXFw8NKlpbfYosPp2dNraPNDdTQclhB2tjdHk1FVliJBhOcv2rx2AJEARSNaZQmNkG1YmAl0GkMhPqSJaRttQUcOUZoIASVVQJUrQNDH8OZxSKJwYJ8oUCoqiCawKiWNtuLgW1xmAh1KFDAzMHsOHpqInFijjE5somO0KBoURnQxUhRgqKPXoEis5pAiK9IQOYICMkKxZhZNTROPDkZLDtqpjWkybty0dBzi7u6s99APveMX/x9ZfzCq4QTUVod48nj7nW/de+0pCqqsgkGVgcKrh8Nf/734tW9QAQlpPRH37MXp9b22n5BQWUQ2nupSt9e65x7y3/hDgcSmbWHtvvl0uPK9fndDFpYg+MHqiQCxnnn43hftxeerYn/wk3/54Mw5gczel7azJ/zXv+DYNQEp0eKAJz6jiXUkvqjFmXPHfcO6qA+/9+SqDxgaIU7WemC1VMHvF3OBlQ631NVns1OPQH7SY6nbPUCDAmBjfTDV7S5GkrrhTl+dXD/i2FFtOz70Tz1rlx4NuS3Gt6aHe6vLqzzaV+LiwU7c2rSnj+lMucIZk7J3Jk1jU1HjLY7FVThzMJsCBIgNRtEEGhQoFdFHgxCC7rTJGO8C0AjChKAvxoRmwt5DarXI6Gj39aduvPHKE91TC2at/fLBpVMPPRbyqvJY1kleDZJ0MCtcPjg/PXy5unVD6sUy+MY30l2Gnimrari33Q13234766ij21vtY9ZFh3mm87Q49EmWVoVPGtVtVK4NgkFmP2u0OAkyY6pnxGiHE5cubDz8lo2vvnStLJu6AVFp1VRkamPSGF2MDSqjtNEmUYFBRGslSougIeUBlBiWYJCByCQqirAPJrlzIkq0ORiPf+BmMTU7t0d2dxZc3bP5hWOr6y0U1yRLK0mNb/zgYr+fgwAhB1IadOmx9k0VImtFST6Z1HnbfeAjdz348KNf/exX7JTWGvuR+963VvW1xd3Xb7x//a5P/Y+/2jm32ju9sH1j+/GP/uSgdWr3lYmthg+eWBDver/4sdlkupawd7vX9l7fuPvMoGde+LNne8n9gtY2yWc+/jN/86/+nYPty1/8/J/W5d7Wiwcr4eR73/+ZB+59+dd+57eQ7XRW9gcDLSyeMVQ2USqzXgAKRwoa7/IkH4Vqf1IudHu1ZwgRIHoJvgoJGYmodBICL3TT6azyLIk2CqCsPEdhhF4vM1o1LkgQCTAui7SXTaKuN4ednnRapjmsOabigQUNAqBEYQAgujNy3aF7wLxLCOZaM4jMRzEElSRZVVWNq5Ytup3y9msHCUN7ceHz33v5u6/vTF1z7qEzu89fqQ9kpdsvVYMC7Zb95ktP9ntLVkkEmDlesL1j3TYXbm1x8NSrt1Nt2u0MUFVMHoUhjji40FCqyNjto1mMXpAW2nbs4sj761OupmF1IS2mlRbY3jm46+QgDwKzRuf2YDRtsem0smPLg4DCk2K1M5jOnDG80msfNbOLe1tRiZuVJGAUCsDbH3h0VS9s3t5atOY9jz786rWXXt8bOxS2bAgr9KULk9iU0izmGfusmNZpWzFBI+CrYIkqihPPE4SzywuzQ9eWXo4tcc4oZSkZj8fswsmFNigkH0elZ6JZlFE5vnelv5QnDILCPiJHYA5aSa+dKFTFqPwLATQW9J4jsEENgMH5jjUSgIKsHFsYFbWfTORg0u9YUc3ujReKrdunH7n32edf7N99/8qP/Fyrt/ri5/7jjd/7F91bh8u6HRo/RWh/5EP9T//8lSJCXbY6vXYLR2A4Wt8EBGYiRnHzfVn0C4J8OFGgAKXGCEphiAwI1kC00MRQNXtXbraXFvqnV+XyrTTTReEDs2nlqt9hEI4sgkQGI5EX8YEnR66suKoNJQvWhm987fC/vrb06U8fPL9rL85saOGIm+EOUtJSitK08bM0SyYxSKKjBx8jBzBaGW3QWFdHmxCxFy8ErJQ5ly7e+tf/iv/Y4NIps92k7RVflipJUhsn9WHIPBe8mC4mVoe6iA1TSsqkAiwsiSLwDghaKUgREzQs2tURs4TSgCYVZYg4EqHWEL24gL5G0ipRzWikKRxLpf7t3wyxkolKa8DEcGJiiORZIsdQkdEsEb0TCaQQ0froC+V77300nrsw5E73qGq+8aetw6uIAXWOOqmbkKUmYlAhNKMpRk5aiW9qTVhXRaZMmPrZux83H/nZUUY3nviT/mRkjAJS5EF5lCCKCDiaY2u+tdwcHHV6PXX6NGYAJMAhpnknPWVGgRIjHqQJCIoEObAgGwtJhlHoSDjv90cH4x29e+LcA9UUgSnVqk+Q6r5E70Kom5qdS2zWeK4LLybp2bwnTZ6gGG5cpMRMpqWGhBiq0BgrKJglOvhASretbZyf1YEAKXnTbz0PoN3BB88VHyDAefHN//Xz/JZG8ub3ACJDAAQQhQAwj5jJfOgnQBCY009RJEYBRRFwDjRys/HNV15N7zu3tLgQB/2wal+4evHMYrqhMByUOy+/eP6DH169977lTs89/aS6sc2samZCBRyNIhBkJgJFgBpAYiQEZKb5HjsiInpPOvqMNEtEsOwiWRtrIUWePDFD0IiGohcITJQZzXWtAMiQThMXHBOEEJQxIQRhJm1tkrqyssA2ySlQGDcI2kcPzCABlQIl2FQcHBoTCUzSdoER+6DmPaQMjBgqFIUo7McQWVwj2GJRwKhJy6yUSpQKmBpXFGKAjU9Xl6o6xCn7wlsujGUU5soZZPYFiENM5+RnCSUlPcjJNFvTz/2T1uMPSrvfPP/NQXtJZlMAEnCRVGQBUiBMSrMgCEH0ggDMBlA4UCQREgcQYgLg9p0VU0UWgFAKTRsOFXXAt5WaiG+AAbzH+ru7s6d3TyvgGlTbgPdFCT4h3W+PhvXFp/fyBk6lVbsHptt6/rnw+RtvXII0KGDAGGCu8QgLsxChsLAAECjExJosTa0mDYIQgEAkCsGgl1d1PZ1OE6NDTFKtqyCVd4lKIuq68XUQwOAd+8hzQ6+AzMPJIhLgzQAakUE1Go64pXVqoOke7UvSUp3MNpNpnNWTm8MEcbR11e/u9DU7Y9M0XPnaZ493UHF5+vQit6HMklDNitEoy7ok0TZNuXsw3T+YSWlz1LlVqcJWarVtpzmGEVUj0nE6nW3tzCRIx2SD5fTEQG+001jW5bjIWZUvvEStwZUx9ZXksROFWpmiVJ3pHzvavNzRk9GXQ1x6xKr03oc+tOS3xy++Ut7aaal2XMk3fvLd5q3vuf1G0d05MUhDwHSWLacfeieUl5a+/2z1j7513/H+qPFI6TTUBULVW4z9hVOnOtvjGS8ul7NtHcbRT9ztapadlLiaq+HC7v7kC7/d3anX73vPcGvP9rPx9Kj9rseGvZVy7XTyrt6Vq1ebe99pF8/kxVHv1mthC7qnH3nlhef6C6mbgOP2XuOW0nUINC2mmWk4UjOpc5tzCGJAGrI6VWAp1uJLnbQxwym4YUxt/8EKFo04NLlOOMy8FvCzGnHQsuvhELmYpjZpL/arvXE66JoG681dkykpm5jlIRo1Lhj7vfwcJGeaxbz9+E+Ea1um024VNt54iTe3pJkGYgZMVgdxPIQaJFILKTpRllhjdiKP4/2g2Wa5b2KMLIjD2pf3ri5+6LFya2/7+Ss8Hr9wffee5eOn7++fScd/9KUf7Kv2hmrfrkpmZMBL17b/l3/4q/eePdbvdDpSP3xiBTV6Ca08K4p66/bOQoon7+q4WYlA+XL7Ex/70YWcqv1t3U7I1p6qvJMT8tJgoaPg5Nne9u3rTd3sTcfLi4PZfjE9uhEO97vEn/rUz33lC9909XiwnJ0/t3Tx5VuDLFdoL9x/4cTZ+yeHR9dHB+969Nz5u8/8p9/7DgBkSmlQikBrIgRCmht7gFBEfOQ5M2y+hSYgAmJmQASRGENEEQwRmed0TgJEigwxhijcAJJCM1d/Q4weWMCHEDgKSBSQeQeMogBojFZIljQJI3OE6Jlj5ADAhIlCFgQWDRC9IwHm4JERQROxACJqhS6KC0GjOhoNPWUqlbJ0IdimGBsO2cCs3XPPLrTHxuRLg7JtEFhGM39rn3rHIE6To73q0us0KkJ3kNxzKuFqdvEVPJqYWtCreUggEAsiMcL8liUYmZWgQqWcmIqRRCEggmYrIhGBAQWjKAgQQEQCI2gOAiRKKXYBSZNmI+Ni/yZrZ1C31juEQVTwAegAhp/9cljrpHedoO46Kj0/0KMmyDSR4bpCNwm+QeZk7yY9+QQ9e0vNIDoUIAFpgCnE9Kln4aVnIwKy2mBVBlYGcTprEw20SFOqw2n0rmoieYIYCTQLewYkgExcx/Da4sYPPe7W1v3iiso6vWaa7F6/9ruf05emNnAiClAcMeXQamtEdXTYEIMhKTwnmdRWsoFZy4EdS8XR0iSA0tBqpzdnJQ/D/rXbgwcX2euljQvtnn716hevbe2MZmXk6H0grQTn8xaCAqVRGa2sytLc2hSVwfldPbHec5q1owVjCFCFO5smElGREUME7xSQfpPY5XwULUZpRiZS2qpQefQRhTSSj6xZLIp41pIKaAIy2qD2CGBJWcCMdIJoWBOpChwChgjEiCjEQSREZCbNokAUCIfAmghAtKG9rZvp1tVscIGUGU/M1tVJup/amKx22iaJOsO2n1y6fHlh7djiylr38Xe4X/814DI1OpYxjJru/uQ4XdsdH9VeUispoC+LREedoMlzAs/YQPSRYN/FVYLE2rqJBLHfadGssHomC+Ke/0I1eW9+4rwRNfnS5+3VfR2ag9cvZ4/7aTHmK09e+dX/eX3iVbeFNZIicpJ2csMSRMpzZ+r3fHD5J35aqmBG1dKkjJ/9dXr2OzSa+SoYlaEms9CJswZBqy6C26ZtB+VR1CsVtGnW2MYRlGixCT4xhlEFrSvKYGW1cBPRqVHob+7B6RXHKZuT0+2pvrJldm+iL+Ibz9CppVIBdNd0a8DahjSTMDu6/caAJ9BGmGmoEziawGwkRYV5CbMxDBbUYg9QiUYhB2ggbzEFhFRIx3rqmhFUFQaJMRhlVpZTWegVHq+/PumsLjScS1SD5Y6aaV8N8yWns0Tpu8uya7YmCyt9hMOjK69lx97OaoPaa0m3THN9eLiTtlpZbriahSMfMOjE+ijkIjJA7QapylpIus5z23O69l7rsDDobk/rjY1jBvV+XURNoZqFBps6smqYjMQmhNS5wmeZNonS1thUB6+IYiRjcyKtgVKldWqjBk6MDw6i9oGDYpQ7V0F0EoPsj1xsI5ukbnyGhlB2h4casZ7VmcNuKx9WBSgaF83Aak1agy6c4zSbRq/J+rJG07S1z69tnujd/bc/+kuPvetRX00C0Ovf+4Hk6VseetgVB91zpybdbG/mhyKnHnxLtRNOtNLWlctf+fX/8Imf+diH3v+eo4Najye+7RbXTsL+5Gh78sDD7xuz2ts/XFnOD3euFDgtx82P/eVf5jiNSfuHfrL17S/8gd+6fD6hQtEMMgUWAO8+u7Z161bpD4dlVUZjtEWKSiV7w7FCBImVi4a0rxqO0SpsdzNwcHhUolGpIdc4ZInO51YnRgsCEIbQLHa6h6NZp524EKPXZdlsnFw+eWalmTaDwaKKnhROi6myBhCiCACoOyYiQFDz5gdEFJH5LgYRBVgAeG45DcyAAs7Xw+P5+Svbzg0P773nRHTDotzpL6Wz6Xh3NC1c1euYVqKqKjYcbu77pd4ykRYf+902mHaoVdWIYchSnSbWBbBtMx3V4MKN0bBvbRV45pvCc6K0IeWDUwjOM3LMUj0VKbwrZ2Ka2GrnYtTOtNrZn5SeZ8632gkB3dg62pLR8eOLNrE5YaxdWciZc6diCovLrWRaBcR7z98TS4dG1Uf+ytHr/U5bmfiNl58eucnioO29K4oSUgXGFszTAEXZOIEUaNBOIIWavHescjslN1PBGw3BTYtDQ8ghOnGReBaiARO1Wl7oT0aHuU173bwMk2njQIg03ZiNTq6dSoNgwHY78cIhyqws++3MZrnz4S9IRdqiIoiimKVx7ANikHaipqVfqRrxPkLziQ+859UXXnrt6W8snb1wbvnMK998sQD7nl/5OwfnT2wqet/f/W+//3d+pVfLbNhY0lW7ffLn/vaWXTUwy6wqX3hq5/P/afn6ZeuUAqVYREEgU4VAiiyBdgFZQFlRkJAS0gieOOZAW1/84tkf+cgsTdSZbq0vUNyd/asbumgSUoyg0ECyFGOdKgjVhItCGLVKxAWPLjYehEBYAvfQzr56Y+8b/0pX0A6GrYlKNBJZBYgxRaG0WRwsvO2jR7f282p6tLOz3stdWTTjaRnrvG8QiBSCiwoNUOJmuOoJLgV1fVcRMlDWbteumqy14l/94bCxDH92ce+PnluMrElLjsFgdDFBBsOhjHl7IbCNjULooFGeI9dOKandmEzaTpSbjTimre7ABXZV3Ru0rCZg8DXEGE0FaRUlcohKE4FnRQIO2TsghHyRY0OxgKZEQBBkBVMTWp9+vP2zP2OSVvBmqTUQOjr6z1fbgBK9AkotWqXrogET0yQDbpppqUCLD6k2qBULJ7aTldN46Sn5zue6YsBHQBYw2mqZ2+nZjyu/NFiHvCrytlo5r1IPtYKm4Y+/o3r8DPzm1AgleeID16GJIsokUSQyaITGS+lCcJw+/PDR/trP/viP2+rg5Te+QaTIxX67NeMwq7wPHCM3VVO4EAVsgP5GvjboHUyHPsqwdlERWQUeQWlCghhjWaJJktQIh6Sl0jyvvfdFc7h58OeWIpqjg+HOh8A8iy8sdyyR893wHRUJkegO2Gi+RBWQN1GmGJnvxNUABSEiaD2PL3CUwByiAKFAlINrt3IEPru+n6rW6dNveeyhS7/7uRMhVk7ffGUH0yfW3nvfhHu2ltHVAxOohpAqUhQhMgEBiyIQxZFIA0WOhIjAhPPQL0cmUpZ1FAAkh2AsE6BuatZpAoojRwEALSQcGw/BCYpqaee90iSeSQhdRB0TEdLRx8Y3RKbXhAkZCsEnqQFW4BhYobE61aFwhrRKLVNkF2MtigkUilLMUQCMMhJq4UikpAmsM0xaMXiKzB5MatEmSLouioBGtdeVpaaZUDPtGBDl2Q7U7FB8JVrZrB9cQ6YF0aNMhAl0RqAlNkAN1NXClS/DG7/N2pAncAWAAYekMkEVmCpmRaiERQIxaCIOEVxAIRURfARtytqnSgOJqOhixJZGoXoqGRtkbwI1B2w1RU2zOopSTYyxFGhp6cKB8srAhV5ycbdpmrrvxQprQyY3eTf/6o2D1xp9VWeFCLDnyJFBEElRgEhEc9+YsqS0zrTOlCIJKF4jQvCA0aYGMZo4TizblvURZ5VzMdbOCekyRBfQ1RIFEYWjECkRjiIAoggR2VqdGTOpZgCwuNZhlfHCInI6a9LO0l3VUnt3f3P36hU1q9ugeFqCOOOmUs18J51x4Qpupx0sQ9RShH2Q4EZTg6qPlkJsZtuT3S0DndwqFqUsZquLkJqoMoyBjOZ6gsaGGN2kcNMGHBjJqsYlC8vXXtvlcgalV0um144mTs91HhhNt5LF5XQj3x/NdOPwcFfYu5LSK/Wk8HJ2bf3d7ylf+INupzNS6e3tyeLqiW5rdfuFS9/87tM/9bGP0bfD0TdeGJ/qpo9/5jv/9h8+tHm4BGkYKzeDbqdbAGz31N1/41duPP3MxT/8jaX3vtf+wt9AvL319W9066bdWl9fOt1OZpPvfp4ON/VYhXf90kENtpFCl6c/8ZGmvzQtIIrFOpxY36jbZXdpvXjq5eZrn0903779A+d6y9qG4SuvdtdOn0p6vXFoNjft1c0stdw4cJT1B87PRAk7UOgxVl10WoGJNBk3sZmZUxsn7v6gq/bSVBVbB0mWtuxyYxPfUjkurTrBgxuKCt1dGW9f6Unhx5ODahp8nU+bnokhU9XqW/ThrBNn9ju/M7vyLZtm6ugwPTxEH/U3/0DP6tQnYBvI9O2i6pD00kRGRySJoGBmC+UOgqsCbyxq00A9KUwn66yYpg6qnR9729v95LZ78vuzWzG9r3/u0z++o9dWFrqvffErGdsHzpzZ6/L3915XIDbRsfblaHY0PGr2RllurIfpfhWL0pXN1IWN5XFLdDGj0azcRfe+d96Hy73r1y8tL7cU+/bawpGLNktHe5sg6vBgCqbUostqplDbmWsnynEYDWcjn/pvP6vFbyx1x8G99Oq2n0gVpr1O30OVqojarZ04uTPyo8tvzJ8FWWI1kNJ3HHcKCQAlxiiAwsBza5EAxzlXCATmneMCGAAigCAAidIEAIEFkAMLIweODShm1MwaQUQCgo/oggRGJ6K1scoqpY3SRCBKLDIhK2QAbGJwwbOAl8ioIujIAMQG5wINB4kRxFBg8SSREJHBkKmh9lF0qirxoT649NK3Hrj3/OLKwtU39oobt1dPvbW1tFbOJo3o3mMPVOULdNTs/d5vnR4e0mynuPhUK6VyweLbHq3ueaC1ttTevl5/8+vumeeyyABaABlBEIIAAYD3gMSBkUhAEBBRiQC/aV4URNGKQyRUQkwMEEUHQjAqehAlTIasAmAfp688m9x3nlZPwqSw+1vlKy/Q7s0oiJjzfgjj/Zr84P5pA6WQZ8VFVYECNKaf9qqdN+LOvrke9l+4DlfLpEbxQoQ+CpL4CIqBAGkiWiMze45CyAyICAqdi0oRl55ZNBMgRQAtyIZrcXY51+fWl9/9SDx2ssrb2MrTbr/0mCytmo0T5oVX3evP5aAhxFki9tFTemNxcnBEk1liq2JYQpDMqib67grobsvPalXBbOIK0RG1ZggO+oN8FjlptbP+slatKsoXv/SH165dnTXggzBHUhR9QEIyCCQemHRC1lArg6yNKg0RtIqoCIB0ahPIMHqtJIAge0TUZANqBM0ckDlKlDcxLTEwCoNGVEgUHTqVkaqlduIxegIH3qI1QhgFOCKCMsopsXkr05Qj5gwJoBEihkxRjFAiexGRoKBWEAwBSARBmEuyEJh14xsipWZy68k/adevjoqCRkfd3qpy2WiseTL0O/sDMMMXv5DGdvvcubqhJiQoSJ4DSqIxTc3RcKj5DellR+Us2CQoNGiBQlMpQW0QJYT61huJ38kGvViCD87m/YgWLTfTQOGoPtprdb/sbg7V4z/KV6+5Fy+2Sbkoxbe+0zt7ao1Gu7/zq/eapXF9oHINDDpIgpzlajaewrGT3b/zK+0TDznTtYqkpeAMJMtq/Ov77WcumoWlOJ7GYWmZVBNEJ9Bbhsk2zDbhxmG6cXeaGqgqGG6D29UWEmOhrKGq7ETibY1rx1iD7vf5aFJfvtJ67LHBYtdeuWlu7cjV50zOxnh55YnZ6DatLOQPPGTue29k632UGJM8hVtH4MdQjEEEVAKd1eCnqo6kNLgZHDaQOuwtATuoPCYpEWi2ET1KpcQjEWV5dTSFxC73srve985R60S9fiz4nUtXr3SWE5uvN6Vt2ZZMb0UYiKz1+m8fdMez0e084Zy6KSShDtnS2Wrvdl1N0sTS6Ki6ebt99jwprXtd01KkJc8sNtUkTBuSpTNLYLVRaVo341slgQlObGbHRXEwrTbrZmxV1h2U453SRRdr0laIrGlI6RgqZYy2mWqMtRmR0tb6WCMZpROttFIBtGJGEU2SuABRSGMzvwrOHe9323Zzr760M7FWZUrnfTMq6v2yzlGRyLnjS9L2W9N9ll43TboGKx+Wltu7k7pRRvdaLpSLi92f+MhHHhhsvHX9sdsXb9/zgY+271qxuRnXPgsOJqPD3S2+fmNze9a++97Vx+5pry8+/fXnh9cP33vh3PjQ33P60d1n9zcPNt1wyPbFCx9/L3gZjYrNSy8+8vG/EvvJklk0OD6xtnb7lRsxdi8dXhzcfSpZXThS6vhHP/bAR9/7sZe/9v/71V+/vFVsXZutLC6VO7c/+Ohjx8/1f/dzX70xrutZEUIgVKiwlSeTptYEAJJl6WhaWK2aGJtZo4zRiVpaSKuyAE2DlQyCnkwaRpRIIcDu4RiYIMa8nQGlUrtQTWfDnbW1U73VxfHBsEK/e3Rzdf0czWnCkUndMRCJRERFOA92KJEIiCIC83pfBkVorLLB6Nw8/dyLfMvvPn3tgXt6h6Nb4xm9/fTG7DvPrw/6T7x0pZkGVLA7m1otVVnpPC2qZnU5PbG+miSqibJbN6bT7fXbmztH6L2r6/EhtzudST1KwKy0u2TVjb39JAOrs8msLCC2mOaVowTSCFfceCfn2tmorpfagx/cPPRZWjRldPViazAtfVU1zpqd8bSoGmtQOV5bWDkq3etXridWnTq+qqS9oAcXD6/VEJHDIw+d3t/Zv3m0eWM2dQBHe7MYGqC6w+Z01pukeHFvp9NbKGIBqIKbrbRbVmTS1MK8t3+4uLo4DCFhnO7uD/I170rUkKa21c2v3R7lCR2VE4vUVLx/MBXUuRXxERB3ivjdK/vvOrWy0k2jb5IkUWRAdVjhqHJo/2IDWnBRo0KW1OqA/GaLB41q/+q1HUotq+7Dj7zz6nefufKVL73nR3t3ra5feeL7ZvH4rl+2i28fjbevua1pf6mNt7OeFhHTqYqXPp/hRf/C1/TVK/TKtfZembFRqJlQOMYQWbEm1SAAAAFrYWKKkcUQgJYgiqQjnL7w9K2/9pflHe+n+99qTp3aMRt2bRm29sqyUdYoUs1sSr4KHGJ00jhhS4kSQAGGVjty0AqDExVjy+uOY4UYFUd2AhAJNFmOcQRCb1vJrYbdN8z1G1L5HiVuiEnv1OGpBfuZdzwzvjJoZsPP/dE78hwbkIq4ZpJojA11yamqA7daViyNFhb1Yz86SvRCdvbgma3+0TTVidMZ5qRms2q0W4VKVFRiW7brG+4NOk0IChljjaE2Scra1lWtdZabVvARu5leVI2fYNAkuQZWlsSzVEGM1kZLdOCjryqgVFkNwlAXBIGkJPTGpOLFU+j++Mfbv/Tpoyq+cfFyb/1YbrPqxg1FCYeamwaUAmGwkC8m3NRlMRUfEaImBXEOlfM2hcUXnip/4VMtv9ORDkQLEGJgBhGLBEKxIQnl9u7wmSd6P/yOA44iRZYqCDEokEcvNLyXSXAexCuxepIqf2p56a67Jm/ctLduxZpR6VZivJPjH/xM9pX9pg63Lj6xt3sLu/mpjdXbe0NKqKrqoqxReOZCBcikJx6aCJ3EVKXeb5wyhlLtQdCzB2dNMm9pjaHSBBAhRFZa5Xk6q2tUb/aEgyAQityBkYqICCNEueMkmp9xQe4UpSHOhaF5uAyB8c+/FBDEOb35jmtPCCO+SbUMDIAmUcGzADYMO5t7nUSbhd5LanRjc/eBex8+vHGtNIeK1c1XNnlFDx56Gy4NxjHkEYhIWIwgEFiImhgiIwELBEACYgEQQWQihSDCMQIQeDHgAUKMGTbEiSUbIxDpGCpBMiSaRTTWVrnVJd9tm7IpRiMOkyRKklkJPgQvOoKNrHRwYqFCIGtNYNSoooqoEEG0sMqCcMVpA1HS1PogRJaDj05pnUeJMXgkRpS5B4vnG3AEZUQbgxjBiNjGtlQwACYKcKpsPRyTrxypcOZeoFIVN4yKHMaANoooICTFkEUhlKCAgQGMhXoMTFQiQAVEEgTJkOHIziY6EDJwmM6YmQIGQQVKEbmKlTGcNMiNbYP3ARkEAVNxjg0QCM98xQTUADKmNimFZuImZVQKggEy4iEe9uG9P/32sI1bf/T6ZHv07odXDnvpb1zcfeKg2jnwBduK5kIlM3MUQEUoGKNwRCQkUlpDK7WZNRycEnYStSFjjEGtgSOI1nkdKgSoHJS1K+sYYgABL0EAOQJHmdvNlOAdyjqRtpQqaFmV2sQqfXt/BgBQlhzqpaTTyMRYGd184fKeoU7sRlBjtqnJskyQlVURcOimuqUDI0DDEFEbMIDEKhAIsgaTiKJk8dQpcOAZGSTtZunqhqtLbFxZzBqbSCMhAGi9uLrSHZjxxds0m3Ljti5PFrROW+iRjsZN0+BKlu7efr2Vi08sZYtKZ3Cwuzc6GCwNcp0sLLWa4o1O0fBLVfXGC3bc+ECDe0+c/4VPXD0s7EQeeOAt8fTZNtnb33hieanJuu1b7/nM5PDpE712ve9gIauJLo+Klf/b/33n3N2drs5+8LvD4S6ahSu2f+lUcy/Mjldp/aXPj688N7p6O1npZI+9X7313cnmN6vxaNs3Za/XeJMZPCuHxVe+7V56KgyHRX8paVTXpmTA/eApiIE0LbHHa9dS0X5WdhEXTcv5gD6iAGTbHoH7x3T3RFkZrGapQSLiw7icLC2EdnNjKPX3nUwclBm2LCxOJjxRa3ZprVc7euK33O0XaDmJ7Q0YxWp5bbu/Wjz+8+2Vu4vf/fv3jWtKN0if0fwq7O8sYLWwcy2WDYeACK5xOC5RtQ44KU0yO/tA/6f+lvzgO+b1b3lfWiEuCo7ycrZo/rv/FzZH1/7pP9ioXSfRFYbac9rDxBbDr/6Rin5Bt91q2C8yu/AYF/blb3+Lg04zderkWtLsAQEi+hij8OGkOqWSfNCpy9mtg72oTeowRGaSqOp8cYlITiy2Hji78tYH7jVgZ1DOJB4cVdQakPc7u7enwwOLKVpiT4LUaXV7C6ukYHywGUwWCmwpqFylc7M1HjHYmec0a91919obz190E9ff1uuLa5F5sjO+vbV/52CkjEKal5jNI8fCzEhRmFmCQBT2zgsHFPSRCRFICwELRhFUBKIJSIA8AAIxi48RQEJgBxAJQ5xLRTFEdkFqH1hQG6u0tSbP05wEgkSGQIpFmhiZAVz0NXuO7LxnVIGNaNKkIwExMMUgHIk8ikWFKMRgtJGolE1q7yNHVEpEjm6//Oo3f0evrkIxOnbXW5uolIRQjibTCSyuFOsrXRjn093yj34vaaIf7pX3LuHxY+n5Y0fdvFKt1tm3OezZopKXXja1F1EsXsgI6fm/DoFJa+YQGe6UQsxj2kJRAc5DZoiBBRwTKM1EXkhFFu89gM1sFAyMwu3b1933vqre/WNlUMrx8NIPlpsJGUZSHa05xnJ71Lz+Pch1aVPRmUbFjk01xcO96qmXxtf2sAFyoAS8SBRA5KhBWDSARFAgQMBemAEIBCQEEBGtBUQ4iHCYFwrMGYY1uzKH7vseXPihD7jlE7MsjZ65brJo49BFk9YozCqmXU8gTJWE+u7TC//lL81UGg8PezCNFy8ffusH/vYw9bHTgtWT58zGAwdPv3TraLNut1v3nM2R967eIEXBUp3FtbN3DdbPuCl848tfefX5l1gnQWlANPMFHjMQAEQkIK2R0GgNDByiCw1RQNCklFLWaG3a3Rg0swveIygCNkTzkw7wvMuVSd2ZCyiKIgmNp4SMRAWoELUCrdCLsEZjopJAATno4AISIlKaZUmSZAhJjGqOrBLRoLSiBnwkisgKDIgHCQAY34zvzzdtwCEKE5quyuv98pU/fV437vyZU9W2PrF+3wOPPJStrHluXnn281ef/tOlez9uNAo663cn03HfgzXoQigVHfvIByazYRjms66ZEpw/d8EGGL/4/Z53XGEdXJoamAV3/Wq31YqZ5iBJQnXluBQURkryLGleejnZ3ipfeLqNuNhKmmllrFk6GFb/5J+zLvtNKUmWpUldlG2TtRbbcLgLg2wqnH/0w/HkWaFEvI9BdJYHR7L4EH7yl+LSq/7EXZvPfPvkpRdhuAVNQFGwfQSeARgUw841wBrCDMYHEKagAZSAMgAEEVUT4GBEIcB+TmQ7AeGPn4+xSeqgTQtigDIwBz04lgTjrr4adl/mwxvpIx8s1YnGuTSOm4OdZFpIWaExsLDEWa8eeD0Zp+MdhAYwQmpiR6tmDDbTNoJEHg9BRIuPICIcfTCaBSo5mo6+/Tl68JHvPL11/+kz7Va3axcPD8atrBfY728VlJ9OWosq7w/lWutEa7S/I20rqeckZAurzbUqXc7caPn2t195+vtbH/jFu7mVcNsKRhTBiChRLdHaD3+s/56PBufVlWvV7a8nqUnIDhvonzs+PdpR0jRNce2g2ar8eFaGOeUmRhBxVCltvDHaWJM22iaNr41JDCSKPSibACoGQlKkFCIqUChKiwgoc+cqOJ7mG3naywBmzUw4ulCMggcsA/ngO4QPveX8rcNt0+kYaoUxpixppuppefzc6q0bN091Vj75vk9+6uPvf+CeC6999furee+ez5zGB9Zu7Q8T7CgBEzF22ysr57ZevyJ1U914/RbUZa9//4Xj/btWtl94XrX0g4/92GtPvxajW1hTUE3a4p21/vj5C/e9tWgtKSqam1dG21fL7dvdwfLG+kBvHw1yQ92BVsbolh7vnzj1yGd+ZAfa68fuee/B9uQf/cP/4U+++dX0WTy2cerMhc6T3/w+aDQtU4NmxMzmBiBwDAI20exCbrQjRFBNU7fTvCxFEJU21iYhsM60a0JVKALUicnzZDqrG++TzF65sVVO67Cs9o6KVt6KoTR5z0eeVU2OGVrkOXCBCAlhXuoLAgBESmQ+b82rz+YuI4VoXCM3Ng/D/uvZZP9uPrFxfL0silaAX/joe166fjtL7vr29c1XN3fybktjWDPaCxXOjQpPQN5JUzQxyLXb27e3okZS4qsSKl/fs9gnJSuLnUbCzq3dTrdlIRgN2ULeDmQYXeTCuaNJlWakcztu4p6LOePu/qQA8MGlCP1W53h7gRfM3nC4N2u2jooA8XRrIU7HrcRc2t/fqfjek2dnk+nlN65qvHa8t/joW+6aeL4+Hr529Xrh3H7VOKJ2Lot5Ki62jL25f8hGL9hBQkmCrqhKCFAcEUWwGo1IblLlOIwqCWil673ovNVut3YODlwdIKVSvBFVewiM4wikcW7jDM4L6BuzRm8fPX5sqatN8NI4FxEKL+WsLn38C1JROzNz7l5ZBY2ESgeBIgizIESZNIut1ualZ3t1dd/xjYEL49f28CisLIar/+YfvFWr99x9prp1eWG0TbWrG8gyatXx6N/+2xRjj0kF6QhmSaooAgEHVEZziBjZEketI4jVgjEiaaOpioEiogZSqADJ6+b71/n6Uci+JmfPAnVPnrl7tjfRKSMLkSTiYlWKD0arqBNNafQcG6cNqhwCU4hBaXEqUqIwOhYJgZis1ood87hygzT7G78YPnT/9Pf/qPoPn201Bjkh4ejGzeGRtBZ0+JG3vnt1bHxy/5mdf/sfN7TGGJRSEL0QaY0RvPaCs2liwN7cK7743cFDG9nu4d1ZL98/5KoxVFrMjiLzfWft4/d4S5MvvqSu7EEZMSSxabCVUpRYNzrJda4ksEZE52oBXlvDt921deOyuzJcMS2qikQ8R1YZRI6x9uRjYlPUKaCyqdR1ME3gUANFROOLYMnWNu1/6FNbnU6j3eKpcwPbs1duTl5/xY4K1GIIFRIYbMoqaXWGZ84vfeS9w61p8Z3n22+8lkMV6kqTIiXAPvcM1AKASIDGKAvshRBUYIgBMl6z8No/+pWNrc/YD/6V9nSfF3Jw1c5iJ3v0gfF+2TWU5AaKZIjY+vmf6f/cT8fuwsn9nct/62/qN7ah8mQ1RoBrr6dV/dwz+zsvPwm5mbiimtI0NKJSa2la+rZJjKAEUpoU6sP92YmFPjOUs0b1M4NoLIqKZXQqaXE0IarAsWMVU3SlN5KIMOdZ3iaAfQCYjwXzChmEO3lZljnVWuYuI3yzEY0QaF6SIIKCKGAIAYkUAUcOLAI0dyoRKiJEQBBhYESkuV8cSM3TblRW4dqVrVXwsUV7B8NDX9+t0w6a4P1CZkcv3eiGRs6faVTAKBREJaAAgdGqCAQKwXtABCIBRFTAgnPYqiBpYwRkZkhfeJ85+Vgx3IaXPpv7CqIH5ijKkPIhkCMAVy+t55/5r/HMPeBnZrRVXb8Y9m7yy88lRQGsqt463vdw6C5QEdTwoLnxfVsXxKxIO/akkV3QnsA3vJRVJ86r1Y1weTsZTgzOAANQJCD2JVICpAQFNQRfo1YMIgI2VqQQdYu1aVY24PwFHCzEsoTJUXX1eisxZkkIoGpCmZ3MF/rCSG5L143SilUigIAgRAQMwsAgzmOiIVYgBKEBiKANAnlBL86mhNGlbEEbrzIvUaKQMBIKRkqwEh8kKA3tzMwKr7RKi4iV6Aa9c9Wpfu8DD9qOne0X1bWd8gc3J5iERM84ktYpyaQKQFS1Fp7f7TAufmv84gMPbfwmlr//0t5msBWoCMDsEZREZmHhO916IQohoUYCMJoyS0jeN0ViyKY2T7SgKGQAKZtYN74p69pDDIoICZXzgVBz5NrFOUPljrlNQBNkiTYGUmttonKtyEcmnSQJwCEA4LhKHAQotImUl83k0onkWDOJA+ylnSXBMunZqqnGO0OURlkzm5VJL3cQdEKCIbDXGFMbnfMA1LggCpPeUlG6SjCQxaxbjSH1XjfTBZtWUVCbLOs0YMZVTbq5556FFkI9K0IxVhJTRWXBRamKijaH4xRKMboq1Ux46vNy9xAZpqOqxbC/fZlUfv17z0onJ1e+ZeF4Kla01XplkBebTz1f6ezFF3+j1VTqqHLPPeu+9jvdR95y/p/9s+nW8PXP//7pD7z71mHdW/qI3P+2g0jtcqaGQ1dOzzd+efWeU+8/BpvPJH/8naNvfUsjQnthz9PZzrHN7/zJ8Y2puuvCqfNvg1P3LB4cLd54bevf/Vu7PV5oJy72FKakDShPKhh2WLskRUBAD9673CBGUOAIGCkSgG+G6EUxUu1Zr1PSFSTWHRM1TLatlLbgODywfhJSpdKOwA0Rp5YXuydWqitP4841LWQ7q2Uc7BYH5sPv6L3zkyePncESwqlj/ESVuq3i1m+quoiMEkECRAHS4KMoxdLMJj3xv/iLC+94mFRLUe/oX/7T3mSa9hcgJKSTA6Jzf/cf7T18t756Ne2aPBYpYVVJIAuxMo2y44BiZ+IbE3wcD//o9zptdYx3Z8t2NA0Xr17bvj31LjICE5pEPfLYPev3XKiGozQ1U3AbSytxfxojhQC3dw7Wj63uFuHqrcnDD77VTPnwaPuPvv3saFJ2F7tH05DomGTlhcfvf+21bQlu9/Y40doYcMW0CYUYv3rh4bXl7PD2laNiTMY01CweOxHqZjSddF399rc+kEL6xqXrXavvfeDEy5M9ldo7MUxjkBEI7xhhBBjIxRgiRx9jDD4EjswMUSREAEIvjEAaUJFWoAiBESLSvAFTAAJAiCEEaESIgIC8cGBxIc7zywHJ6FTbLEsSYwyBEGPDEtizxBC8Z18F73yMLBKZQYSFDDYmRmQAFo7RACj0IA0qowhiNCJGRQ2UoG7Kxgsjki/8E1/+k3DXPSt5//hdD1ZVNXzlGTcr1jfWPLarey60j1XVpSu0cxS0jnefVu98PBo93Ru3slVathFjfnyj/xM/Px7973T1tvVKAzYeIrCQBEAEVCzISCDzTjgQQQECEZYYgeaLlAjIrIA0EFKcqpic3GhMK4y9H4572oMXmjj69veqaOVdH7612Fv41I9Pf+/X8pRC5VwjxqKM4fCJ55dM1jl9vnTYbucyvDF7/sVrP7iFuxGFFCChkLpj9WIRBEYEEWAFwjD3hREBKJg/uCMBzL3EOB9VIAYhikDIue5/+PH2D3+yWFqOZH0MCFFBLlPPrkpX26xMcVQfXj/og0IhzPTKux+DwemUJM26qYouW2zPaDj8Lhe1uGhrfeXPnstW1jZ+6rFq/djg+Iqxjr7+5Z2L15Nu9q4fefRm6E4PzNHe7es3Ly2sr4xKJmOV0cDgvIPggAMLkKAARpbovUbgZhYJGxZxZLJMZe2oWoGMsu0Qo9IOmDnULrKQBlFWKYQQMcKb3U8xsJJICimyJoWMEmNKOijKiaKWbkdnWGpBEInAypAFEmMSpS2yRs++LoO0SGvSPoKQMDEojIw4PyIJIioC9DFoQhcDScjSxFpKlWolpyejyrtifGA73ROL972//9b35cu5Mqbdmi6V9x2NDof7+4HMtW99LSMkksrHKCE9c/L4L//1W3uXqZLTSUetrHdaVkM9/dJ/Pvi138uOpgkpixjGwf3x57MLd5nlVVuUniND8D4kVhWusjoHX2ejEWzNGIIXF0uM4hAwz5T3Lrv7bDHT9R6DJKkwEEBP7S3m6//Tr1fLJ0KMzXjWXT/m6oYI0AVVB7766tYf/pPT/89fW33ow3FWwc4ls7YMYsGmsF9C1gGtod2B3V2oCugaAITgIc8YlThRMYJRECpAgbqCpAtBYNpQYDQEHEFpNhqSrmDQcapTBVPX/NnT1eUrsd1aOHO8urKnDo5AArZTaEqg6E1HnVo0cQF32jDel9VF/vQvNKsr5qlvuq99vbUYoK04TdD7WFUYo28qgcASw6wClt2b4/sf0x/5kR+99YPnujka1SwcPzU5mkIEbTMvHq32lve3Xrln9SG90CvHXsWxG1V11qumEbD94m9//+arw2zpODVKLZlIFJtgGMPIeVWf/isfXvjM33J6RRpIlx5uP/PG9YvPVGlrajtYh6SXrfbaX/nqc5fHfho8A/P8KElu3lasjPZeaWuxaUxijbHapMZZnaRKZ957q6xWRisDqLVWEQVJK1Ly5gZ5e2e4vyk9hR88uwAoV7emMU0uTSskUKk63NptkX/uuy/EsWQ5+rLGHD/8qc+019a+8eRXf+kzD/7Uj36qk/W//pVvvfSn3+4WuBVeXn1kZZHeEZMFdE2/3SmK0pWbN7cvdzbaK/c/PJ3x7n6x2huEmxdvvvzy/qXXl8/f7fa3Fi8k2bFzi2Zp/8tfeflzv2PPnTn2V34Z2isRDfoWlUW9ebksinI4qw5Ldk6ugNVpunGqhARNe3c/YTjzwDvekx47u3Af/H8/8sdf/sMv/vav/usbt3eTnfHjj9xbuGY88ZsHo1HTJAojCxBNitoac/bE8sFwZlIYT+uNjbbJLNkEGpgWbLAihYiKUEACM0UQpZULzEJl7Td3J4wJM1MTgPDocBacJlLAwvJmExICzTfub3ZQCzAKoOB8wQA458eSOCZSJ84d+/6rF2dx+L5P3H+qZ27vVSkoVfos0+1SHlxavXkwuZIcIPLSQmfr9hAVpkkGOtndPzAKV/s956fTMrBAJ9GtlglETLhzcDQp6uBcFSXPWpNKbKIhclXN8ixlLZlKfMm5JKiV0kQGyml19/EFNPbajW2r7UY7X8g6V24fnjl7Mk1ai3lrsr+/0u1rTN71jvOzvR1XlW955IFnnn1VGo6l/fB7376+yJ978ukj71utTn9jqdndX8ioCjHJzSDBh06fe/HmZn/QQdZlXR4Oy9imdqfriEZlIz5kpJa7dFSWswKiQ0s2aymT6WGop5NRQJpUnCg7aLck4NBNd0elZ8TgiSNHdg5aLRTEW+OirfVJm3TybGl5MBpPSGEZws6b/eB3pKKFnml303IWjliIqGgCA83GTZJbQlQkUPPBpZsLy0vDo+LrX37ynSgnejkeFPfYW7u/8nOzblXM/GK07aTXIDN69HGRKW0gxySqIEbXsWGKc7keIyOwpjk3WAJLALQms90lz8H4ypceSTyA96CYW1bDzMHBjtvcW25nTStXEAFImH1RR9UoEaOVMgq4MVYbxZIrFWLtfMRIVgMgiAoQQAIzUGZB0HvXSlJVxJBAfvreqwUu9lerUZW3ewotCGgtSRrP+P2b/+vf3lloVn/sRwanHxjh+nQ2SlhsotBxrCvNymggjRpJI50u1Ow//AH7IUTo6I5oEo6uONJ1pnzgnjdpf9RGtdYxl24kii3FyH48dSvdRBsFDAqZ8jRJs3pcNN4tPHqv+/GP9oxeTFb2vvLC9Dd+f8UdYGxAMISgjRUWVgzGC0QvHi1RNOQ4EgrM68Wi8rPdL/5uS96plV5vH+MnvnHlP/2Hhb0jDQgsiAiIdQB1/yPbZ/rH/u5/cxBCUyUbHxuP/uZfg4NNHQU1sjBBAA6ASoSYAZCBFGgBkBCiAYIAJPgQVeW/+PeH//LfDx2cXh1sV91jP/uPb6h3hnB9RqeSfOYcJKfWWz/8yZ3e+vXdG/fnbbt+TK5st5Wqg28t6IMXnxnIqUp1bx3tFsB7s0pFKZrYMrTYzlMEEshN2hzULobgS5Podqb0lAetVsXALnr2aWLyaGv2QioGgSizwqeK0fngqipCFNUfdN90Fc33n3MEv8wpwnOEy5y4NhciRO6M3JpQqfmGFIgUvOmjfFNNAlQEJIrebK1iYmGh+TgvIsAsCGAwoqYm+q1be52i7GjjU7UvMqzF1VBq6oyg2bm9cnnYtYQZlROaCkLLLB4f1Hu3YyHdFIwAs3BkARSrSVDim2TThEyqQrpYHXuXP/GecXaxc/0LNszKAowk3kdQQSdauxCqmhfW9bmH2VrCUKUDeOB97Raxn8EbF6czlLd+vPXRn3bRVjubenRb1/tm+w2CaNBwCIygSFlSZSjDu38q+Ym/MQmt9PZh9cX/w77+JaVLr4hRo68RagSOzIKCwBwBtRJBMJZIQHF9+gL9yC+GtTPMacdg8IfNaxf55muzl59ueegvdv3hi2kxRr9nICAx1zPSNSgvEggYorBABFJJS1DA16SNdFIUA7Mwc1be9YnieFvX+/mVy/L6pVSmKYlOk3rqNFoKjRY3WtHxXY+lJ+6CYaz2j+T8WWmZ+nd+C1/YsYT1AE78vf9mePr0rPYqDleyw8P/5p+G19zEyUyJaCaCfiuB0Hr5ZvWZh9/+8tbewz/xw3//dz830tCAkhB1BERSCAA8f+n4TvWPEAkiGpR2Yjpt67hChUqnmVXMAighsPMeERglzakpHThQAk0dfZhTsIJERiAQRgQloBGNJkWYIViUFFncvDMJhEIZ3mQVMSIrsMRUp9ap8ma75A4lcTYe+mvdpXZDLbFaAUmoWZp215rcqFZGliQGD8YmyNXE6BYkiXOuLoumHImLg94i91ai1zoEbDY7uSvrWdlImrUhImEiSFaBIY/EVtm0PzDGhLpp+5B7WGhwe3vfapN0jGm1RnW0zGXAy9tHxqjlPMmVzm2V5tLoMu+3DyeTEIteo3Z+63d03j6tWm99y4W4Obz9x5+zS52kI/s/+B6cP/Vy0x29gaNNenjlbTqtZ2rma+s4DvTqbNxenTa3fvnH0uU1lmFxsL2eDo6vndgumsvenvn5T48WTg9ffWbAvHj85KDT5RvXit/5rcOnv7+ue0x9FYgsG53E2ATnwItuWczbqCOIoJD4ApNECCNIZK8IIgfQxuRawxSKIqMp0YKTEMuIkMVmRJkhjZac+Eogy1KGejOp9xd2m+Eff7G3dBbe8o6hPRHvOR6OCnntUjtN2te/2vzpE3vf+vw5J7bm6FjXNQBUGjRgDAG0isCYaoIQvXdl7HSPx9bKwbDe/Ppvffj0Sf/yDdFZNSuTVp50Mz7cyreT5uXX9l4ddbsQ0JX95faHHy23L+UHw9g0pgHNCBtp2u3L3i3cGm06bxcH6UJecq2Vb+XKEdYsCHGgdS9Lbu7stkHOrqxm7Ww2qSSGJoThwW45OW4wnw5tm9a2Lt7o9vMzK4Pnq1kEOhzvUai7beSyXul1Nre2UrKkUStkduzZJt2tzeH+ztTGelpNl5Y6n/noo1/85kvdVve++9cTm9dbhcnx3D1nR+PxN77xDR9hYb1/Z0gWuCMlMMcYBYBZGu9DnHc8OhaJIfr5UkAAVWStUJEgCQOiECGiYkEgmN/mwjybNu+OiTGgAIif41oQkMhqq40xhAoFxZNSETjEWHN03oXoIofae+9ZgCRGYIEYJQixtjoAokIIEA0SsfFATKBJE5IERmQisJpCjN57BphGd2t29LYPf8z1B9EWh5deu/Ttr2bvenfr+Nns5DluqcF77x8/8dSla3tn3/0hv3Y8a4pm57YZ79NeKmnGOpl0lrNP/eT+v/k3HVckgADUMAiqiMKoKArEO/AJiQxCd0qQ50sUBkHBiIqVilCHAOdOmY9+oD51WreX1SsXyz/+PG9vJaLyzORuNvv6F6Tasm/7SFClYlf5BgILomckUOmen/zhtzj/FiAcenBTgAooAPHc+gsgAvEOOFUTgIBHEAQB0BqYQQAEQRDmCKr5sYhQIs+NwqBh/hu4tbrRefTtR8q40cSQ8p67/U6r3RpffsPXpVeBdD/Uw6SaqCihYlxqt07etV9yRJ9bczCqgFu8fmbinmxFNJZujIrOR94RN84mp08F0MPGBZJ45p422IhU+oXFhTMWum+8dmM8FNXuZC1BQiHtoocYokD0EYDnzw+J0oQILjKB4wBKmbZFK0qlVrFGusMGCUjy5xF2YhAiIlQxeol3QgciKAIQJRILRcYYpQFEFVVi0qBCV8UcdYwxMjjmCGINYqJMaiBGX9TRhcZ5MhgFMCqFFokDgEKlSGvWKHyHwSXAiEBkDAaujeimadpalle601E1rfeW1k8un8qPsHHSaVuzfv4t0wNpL4M73Lv45J8dffU7VMWISBp1u5OdPDOTxKyfXz1+nIumLJ1vt3cmo2Of+Mv7378MP3g6jkaujJk25tqLptls2HsgFBNcEUQIVZKYaloiQwgx7a36yoWo0SgJ0dWFJ4SVbvbQe3z7wjiBsyeP7f7aP8vD9mRhdf1v/ffV4oY0Hqthqzdojo6AJRIhx2YyzrvtpaDhP/5js3qv6mT1qSWTtuPtfRVKWF8EAT+cGcWgFbQNGAAg0FZsCiaLWAGISgw0NTcVkQKpoRKoalQKvAInNdcqb6FNLXt/+7LJLDiyaSa3Dy3f5tcuqgIiICUZgQKt4rQwURuuCAhSBFyRU4+4s+9yWTv70D00eI/ceAonz1LjJVakPWaMjUQAbQgZY4C11e74cmNPLK8k2jbT28/dnAyHkPSDjuvnFo9qV053IsS1C48H21apSq34yVjEoLbrx85e/sM/40MhcA+++x5tSibLbGy7ZXWVAjcGFz/yl2K2KD4RsmZ1sXX8QilPsZG1JfrB00+snt44aHimpIjeS5wvYzkKqnlzMYQYPaD2HqnxjbKJJTLWpibJdOK0bQIam6TGJAJKa62UUgZRRCVqfhWM6hgkPQC/zJQr2ehlptvfr/aHrvGz8v71JSmqSY11o3s53H/PXT/74z/26Mc/WMX49vvPtcqD7Teufuu1i1tXtmUcT7XTC29/p++djH557fTZzae+fPvpb802jxYHVkHmW91i15sTp08sHVf1rDk6bGZTdOrglevjK7vDNX3yXcscd4fX9+NmU4RxuVkk5zVp5Z3qrJ+Uw4PDG9ezbmJSY/J8ePF6eO2W7mcxXd44tXIia518zwcmJp+6iGjY4bseeTd/8Mrnfv/3XOZvuk4DaceIEdAMYtV0WiKRVsr5sDkcd9K0k7YlDonjbFL0WsYHR0QQoyYzHpVNw4mxLOILt9N4V8cYMMlsxLC1u3n3PRcMqRj85PCoM+gpxT4wVTUyKEoFAA3OBy8EESREJcIyp7ySzEcxRFLWUB21yVc2llqOPTch2Go8WTu2ttBrj3f2lgbd7VHhp7MEBIm2dkaT0gm5JDU2EGnK03QO+mN2tYvtJG+1Mia1tz+ZNZUxMgE3CtxfXV5J7dH+vk5tq9ObNs1hMVnqL7Y6tuaARFz7XPBEZhIVX93f6/Y7ncTkqCrvN9ZWgRlFkhgeObZSjUOKKFW9NdlfOL5x+fLr5fioJe0f+8THr95++amXr+1NSydYzVyvZ9/1wJlXLm/fLkrxnLXSW6NRklpfxY2V41erN6wRIk60tNtm6+gwS/Kb1dSXZJVhgcV+vy6d0YQESNS4kOZ5l3ScFVD58bQi0amF4ENudXARSKkIWgBJovc3Dw8PENutblpUTVE6V5k0SdvJX5CKFvtZUfiWMXbRDiclEkQAYxMArAu/YOlv/KXPbD35B9mK/cEu/pf/7O+/8Ct/bUnLUmK2t+pjnXbcgYw6GQG42O7osmy0l5axQYJLMSKwZp1kII5DjM7PfWQxMqFKgFIS8BFDjGVtTIwclIpWa++j0nMfR5QIgKCRuKxU45EMew4+aquBHZCxNq2bUrQ06NGmpt+JrFj1xVehmVmM0niLmpRnRoVGSKWEhgMp1uN6+0tPdn75hwnWjp1+zBf7cwJjbIJHCUFWbK8zbJLf/l7DT65VxNEEDNnAONsCVQOhn9USAbWCSOylA1pg4AnAKGZCkoVuh129AK55Y99/9k9OP3pCUuM7PV3WrLm66+TaD31i+p//Y7txXAXgtgA49KaFSU/P1F6nvm1bZ1n71U+841g53fw3/6JNBjmQb0SjVVRLXZggNms7SXxEXwJ4VAkHBokknNUYP/sN/92nwbYKk7mdm2tlI6DARAQiIq9xfPbsyb/51yGMA1RKeV9Om0lFxRBrRyReIhKSJQYjLoJSSBCbmsURIAGSYgDFATUSl2g5XxWIEN1wtpx1it//1VPu1R/8wZ90D0sY5DZrwvho/7vfl/vfkp87N97ZbrXajSA3jFYv3Hvs1fqwvXH3iUfP/+Fv7amOImMd4mHTUGMzhf1uXk5LoLi0kh1Oy7bJmohb20eJSVvGA4YZe6MSEWUQIzKhREEX7aRG27GGGAVrJwZVKO9Uw84Z+3capwAYkN8UinAuFr1JWiNChayVQiJEEhACUDAvPpsbWkQAiEDdqVVGFhBAihQE5r9WESKjIrFaRcYQxDUy3Zqu9dRSmvWXWxV1JwAzIJA0aUK3TkWquopESXNYJ0my/uGPH+Vhur27++T3wrXDngFEUAqlCUorIh0FFVBiRPlZXjW0/Vx8+O1pkoOqq1FQRCwAKmFhQRZiY6haPFmLFl+3NAs6r7Jqht3+KvVu6Y1T8aH3hcGijZx3z4k/1unH2e//r7g7xBiQNHMEALBKTtzFb/0RyddaoPF0v/uTf63+X76J1RhFswTgYBAhBkTgSFol2nDkhgKh1iAcU+se+6C++21kElWFKBjzLH1sOb/77rHjeP1VGW/1Wld5bzPJ2hidICqOGBVIBHZAYb6TuNMFAwZ1ByAiBQAFdWHuvTv+6KeShYHlWTLeGf3Zl93F5/XLF6V0OmqDGLkenl8b/Ff/w2TxDGDWPhrDD/50fPKuyepS70fr6tn/LW2k1SKbLZnkJPMsVe0W1MOx92PI22ai1e7Ui6K1Xuvy0I9nxZ/82m984JPv/q0fPO1auS+9mTcJcAQUApTIKEhEcb5nJc6t6hrVy03h48QXAL6TJlqz94EFQ8MogkC1cyxAGiEIeXaegxekuYZ7J0+AczYgiVZgDSIQizQeYxRNGCkmRvkQomveVEsZk0SMjdFPjkYqNaPdYdluo1caQg+43N9LlhY3TiwVFSULeUllqz8I3glEEJumqSjiTKMiBh2JLGJKmofeHUxUEQGVlohxXDVHIWipOcbaN05naxptcKWCECBGZSNTGSWKjiJaowVePbEsYLhRkbOWUSf6J5+7cjtWLsbkVjEFz6v9fLWv2UFdye3D6VsfOhfKqZVZ6qGpK3X7yvYLbzgIg+NLzs+Gzz27/rZ3Ldz9bpm8krVl9N/9j8VM3Epr4cc+1D33lif/7t+7d19R2s4c+kubXcO5pOmx5f1xmA6WFt7zl07+zE9d27tsj6WyUGxej90vfkO99py6dD3PepVNhEQlFLxPHINg3u/EEEJUEkJ0LEZHRNsdSHQQOBBipxO4QSBGDAzE3sZGR0flJAWKmHlxIRs4rKU4AqgiSPQeZoUqD7SrTS29WWWqK4W7sdhemD2znVv9QCud/p+/E6eTgdbrqKKHiEa8Tk2XeTZHr4G1gT2xgBMUBuYO1uN//XfHy+3OoPfJ97yvmlq9+pZKezO5wbvj1u548g/+32GZDHQfvffc1tUrstJvf/zT+IFHXLVdvvrKemv36LsXw5HOP/zx2pfmyT+TQ99upYVTohxoc/rkYuvIudqTjxLxqe9f+vQj92er+Y2XLy530r3hQUvp4BwSINYrWRGxvbq8cLB1e2Mxvf/s8re/u39ifZ1lWlWT3HRNaq9u3+gMlo+dXITQPqrrbl/tTo+OnVtvxZQYdw+LuoRuOqBSvv2tp9tiu0HTjL1FH3RZRhLtajM8nNXF7NjJ5flV4CID39kWCEgM0QvHGINIjNGLeAYf0UeSOOfZgaCympSAUiAEQgCEQmp+9XHkua00QgBEQWaJQhIxevGICjWqBFTCAC6IQIwMqglN8M5DqNg3zrF3HEEYCQlFIaKSKEEaCWIYUAwTaAQERAYFUSSyAAGBUyhaa1TIEQLHCDJ0zfXtCS2uQz+zWau/cUpbe3Q4Xn4wTweLlSvdQiv95KdWaqUWV+vKcTmzAq9+5wsX7rt/7R3vm0xHnFh/+tjGX/3p7c9+Vo0nUEWrTeMFGQElisyTtSwiLHNY6Z2n6Hwnw6yASFT0rjq+uvCX/pJ74CxAlDKY9Vb/gWMjdzg7ikUk9qaVkX7tFbp+BQyk0wmTDiBMGESIgUSZMsIImEEiaAQRCERiVEQEEYNAc7wgAkYGAkCIAIAQGBIEYuAIQgAIDHcQlwpAoUQAYCIETRSEg0EGKKpCq4RMmrCu94bF4cXmcOhCTFcGICj1GI6OciKt1RRNurYRe0ldKwHWnR4NaKFlrnUyiaFA79ZW07e+mxdXJ8xuUilK80G7aA+XHnqwmtRoOk4oJvmFd/zwPuRPv/hG3ZQozBxjqCMLiCgWAQTmyAIayZrIkYFAlChwhnXbcBClWWkFCKQVRxH2gkxai1aJMRobEhXR0B2lCOY/zCCIUcixnT+yI0TlEROb51RapkhUOgQCL6KtNooMETKgMg7MoRcOzYJBlFj56AMgtEhEg2hEZGGO87dEBI6EbLVVNk1Tx6FIeOn06uSV61qq3Z3Lty4/c/zCW4OBpqmuT2eqnQza2t24sVTuxX6v2NtDDq7k3GZLG2vRS2/1OGd9bWMnC8140mtlFTVLn/4Jh4V+5VXtXHAx5bq4eB1RQdY2/Y3Ue3ZNPXRaGR0ANYjEyo9iaBSLaWUHR5P07nV53zv6d52bfu8FvRDO/uKvBKzlU5+yx9ZXTj0aVjf84UyVW9XwmsGOWV4C4bKcZotLvkFoX3Bhxd+cNnsXuz/8/uxn/4ut8XgD49H/8fnBQh+Gh9R4AIGMxCCKhmAg0zwtMYlWhCWAQgBDlAMwRARrQIBDJGWAUYcayxJ8AAGTEhQIa2dw9Rhu3fSHU6O1TVuBCBxzVQAqlWbADYgBcbDYB7NYrK4aRJNEnyb6vY/gex+AW+8efvd7+f4bePNZjpW2MbgQYmAlACG6OH7hycmsfvhsVYZaHfnx0bRuHV87ubzbPYJOQgNj1IqSsqm5lWZVXZjWhm6n+aDne8nR7kHe7a8k2Fkc167KV074GvI09TSmPt/1kz/l2uehyrTNPKjZqH7j4hVMEidhuL+7vL58OA0v7R5sj0sXGO7cZ4T+XOoEmBv5nWOkEFmF4LWxMQbnGl3XpBOtrLY2MYqM1cogKZvkmU2B7jhMTa9buHjUwMv71bluMjsosyacHCSHl4eLrez0iXU0uUk673n44XsunPv5X/rpzomVBpmmxalzZzaWHm5GVTmiB04/fGxp49aN6/f+zE9PFNaCAWTt/FuS25v9MKqa5sTpM+7o4PDZV5L9pkyumdzuXL5iUK+evnfWNMt3ndq4sDEep2at1f3Eu5bPnIybu/TGdQJKVpdIYdM00Fu+8OlfSACuf/d7mfjFY0uTrVu2rvYufX9yI61mgdZW0re/ffmeU/WkjpQwm3c8eO/dy3/5S9/+xncP49MvXXzr+cHCUgsmyYxZQ+19RGKOWMya4Ph4K6HIvtb7u6Nz59YV1ESQKNVJs4PDUhgde/CxM8gPZ0ftrOudJ4KqaZ59+uX7T98NwRxNq/5qV4GaFU2SZa52TdkEXxuTkHJZlmutYhQ1Z2kyIiFKhDnUmhkAFBKILPQHjzx037Nf+26KJ8XkJumMS26aIpasstbW1b3Xrh3qHoEWR2TyzBJNyzHF0O9kzDJzcX9SkNakcFK6mEJsfK+dEYazp5e+9dqmFnt0OIbcMklR1IygEr3QXgze185VTSTipHQnlrvLbbs3rpe6Sy/u7zk2SdqxSFVVKJPfPjjstDIJsR6Xi+3WzWvX1k4cu35Uv3Z1b0O3f+Enfuaz3/6DqpqFwneXe5OjqjyaStVcdLeV1icWFxd6XX80HE2bOnDeMi/vXg7QtKxd7XZube6usty71D8aVZlNjeMqsM1spOlinwaD3tbeUeoRmI2IJvIoQcQQlVUdAhgNQErP2SQhxsCapdu1C/20mLrrxcxNCvQeRZLaGcK/IBU1HNvt1nB3OqkbFi0gMUYlyrCkqTmfHPuxT3zyX/7Zb1Jv5cwv/LVbq6dnZ1darxwWYSTAofEM3iY5WPJNrCtvtLJRmtqZ3EKOZdO4xAaOfXEpoFaK71S5qsAigVNCRQoiIQdXzEgrQSx8TJSJkVGJD45YRYHIwkKF8yF6rRNKlI/OJhrJhKho5Xj9wD36kUfQhfKFp/MUzOK9sjsMbzwHxQFFxxABRGniGBCDFgYPSX+wrPIrv/2lyfbXV3f8cTUo0tb4YF8lBq2AQ02aQsxFmUNnQMS2ygg2T1zjVD/ZnxxKrvK260KqnEYXJcSASJkmIkYXGShGk+ahDpSpdnuj3incn71Rmq5ZWDGn0mevPSfve+fRfefpC9JBskkOIXV1mWRx6EA/+LGVD7x78+lvd95x7I9f+9Lj73v3xSd+/1090gWLeJNqAESK+91M/xcfs2qx+L2vmRuXLHgmieyBIUoEUtok7ciweSQ0gQA6OrCAFEWiZ3YxjO4+d+af/08VSvn8Fry6p1fyNdUqv/35xDkB8sqEIAohCAsD4XxdBwgMmiJTZNCEcxIHx3nvy7xOWDACNlX78rOTf/zMfbkyzrjNkSLINdz+9/9yYZCd+uAPu2d/sPfECyto2LBXsn9jJz/fTjyPbuy0Om2HVZoo7zgzqQHt6hAbr7VhoNxYTqMhFUJEm2pLYTImSyGIUYpFQEJkJwJ92z8qBaKyxMbgdFpGThTEo+EdqYjlzvaRQRDuCEZvqkRwxy8kAAgEoOZoawH5c1QnyFwTDyG6gIRAgDBfoArE+ZIORATCm+XnlkQRGgSlIAYABh9gPItXoGhMdt+9Z8dya7x9SCkUrCZ1zCL72vVySVPPR81rX/3ysY//3GZ6ovfJD61RsfXv/2HXN8ysFYXIgBg6C7R8nP1Bu9jLNPo3njIPPtYspU3UEoxRmr0TRRoVNzWgRGN4cK6OCYamKQ4z50QBpB1sdQNKHCwmK6eHwzrr2UZiIyq565314m/S7igBEGTgyFECBDx5L6zcFSo2lkOi3fpJe9/94ZUh1UDCooxwAAEGLWBYQEnQAN45VFY4RoLszFtdtEoFk5pQNtoknvLDzGTv+4Uyfta+8kdJNVMmdQF1RIBAiDAvDGaGUIEiJEXAEDBILDvLOjdS77RnswBulhS02JsGr13YO/TLj3/cPvj+/c3/eVDtu+FMq2zatr1f/nvT1lntssRaaFF84j+1zafLtR+qep1CQRuT2f5s83O/n/3i3w8qG+7vN2/cqn33IBQYWGs8tjBYaYXFM8kNcoPeoFpZ/bF/81vXGu2iEsbAkQUUIhAgs4AgsiIwBqyixKpuon3jxlUdrbE9zV5NPXPpEmVRGRdjWbjaRe9ESM3RVAZJRJBgXt0tAESIIojIIoTIQJ4JAFBAKQDCyJwizsoaERDv7NA4Ikfw8547aChWS93FF5DuvvescrdsOVvudsmaLE1C0vc9zamKlow3MQgDSEsHx4gpk47BR0AfoyojTN3w1hu99ZOLq2dcHBXjkUa2uieZLad7edIqm8ZLba1m1GRyQutDVTfoWZSouizTwBoVoCCTD1Unx6a+IaqpJNRMrVSD8LByzjUqxMXFZK1rD8rh0XC3bRI/kZ43mxevzGZORMY/mC2sLayZfPLv/uP+8H9LJ7O1jY5jA7XI5ZtXn3kj3nNykJ8v+FKYTsCwFewur22X/vrexMSQJbPt3//vv/vCPz7+6AfaeUdvDHJpx50X/cFmasFBYEY0SixIptkYdKHRXpFYqlJFZSWgMxCpIGquVQzMSSSFpICIbC8G8NFHrjVMk3oSy4gqMdmAacYKUPvIje60sB6pSYNFYfIWhajrGKfDXppAOWrNCrIKJ2ZJiKXFjQRuOIRIQQQDsqBXLByElQJiYRSPkVEbY63Z8H51axcObsJwW4+y2jfiCpxUlrXS+cJw1AmRV1v+/0/Vf0Zpll3nmeDe+5xz3WfDm/RZmVVZJssDKAAF7w0BkIQEOjVJ0bRa6lmaJuVas7qlntGou0VRopqUutWgaEBQIAmQBAXCFWwVgPK+KiszK31EZvj4/HXH7D0/vgQ1/BG/IlbEWrHu/c45+7zv84xx7vjhTdfvPfmXq8vjtVEZ97dNFnVum4cLN0bPfhG5jVXNaGylN3qTKknNcrO/56rapYQC4gIqgWcee7a5ZB546x1P/eC10pGtXOlZNVJMk+2dotuyg+Hk5OHVboT/+2/95o7LBlxGKmgTi0clUdbtFFxxabnIR84mWWu226G6cs5ykDQyd99777WrN6zLm/Ndw0mdy844UFNl3dWZ+cXe5cuB+5Uex7PKSzl9C0pnJQgLWB9EOHAAAhbxzBzYheAY3DTghz9UHRDS9K5BgSgMAoBKIcq0gyWMQEoZEQ4QQggiXkREHClQWiFqUiLoPRoHQYAw+ACOxXpf1EVROx+sR9SEyig0ZEACMjhhJlU6pzSRKGEKTggCRF4JgkjtmYSZJSHUWitFkZZJZQUEtDj240HRbEQrJw+emWmFVkOSVMAAJV5iHceGpOK6CnW2OGda3dOJeu07n2fd6K4cGoEbiaqO36f+zrH8u1+bPP74rHdaIAAE0AAYhEFEEBkFQW5e9AOwAAEoAPRePNk4St7y5sHhE6NhPduKIfhopjU+erRoLYqPYX3TXr0UuVEkHndHQcAqEMOI5H0Qmv4VEAIJAACogQUFQJCDItSICgJyQFBx7KoqEyVVmNbJBUAR+ABRAA1gp6s+AQOIAmZgAUFgFMUSgkcNthj5Gxu8WOtmV8oyuIChLnevQaD26qHQSvPxyF26amoQRRxRKPPtM2fq1oNZq5mVASbjUO+Nr52zyk2Mp1Zy5P4HixC7/rjZSuIIRvuDslcnXgcsQ1VUvdDbs3O3zEOUgUpqdgElOKskgAQUnm7GmKdgDxRG9tNMHE/7c7Z0qUtssEoC+Mooo6dOWtQq0hWyU5REEXornummcehmqoiRArDRZKFmsibhULJXSSWcMikB9IJA6BCFI0M6UooQWFwIKJDXaidXXoAjTsgZMMLKEESC4D2BImGFGAhAUyBUsdFJYmKDyri6rsd74Qa2TKMc7DtvR3tbxfYWd1OMcbI/7r36/PEZ7D31UjkKpjOLTSVVBYUTrn0+LPZ7fnY1d+XCXCPiurr0PJdbyerxXeHlD3xi33V45zrCaDK4AhyStC1RECp97qAKCaCyLoq1ykxV19469FzbgokbM91++9DSW3+mqnYb+P1Lz3751N/+xw675s0fdM35snRx5bNQ4+7m2hPfPPKJU740HJyJu/le3R3h8AvfVRCB8g3FxSsvJcePu+TWndBqPLRon/2zqJmoNAFbhWZs5+dRUPb307CrGgoCQ5JQ5aWYiGVSEUgA8KBiEC/OsnOkptd/QWUZVCUQQKezMRm0dKyHw4i4LgsEFUjiTBM6ZgVAEALYEhB40KOPfrh131tr1sZoXxVMJlHdQfOB8OY7Eljv/8E/arpQ74yQrauDtaKJIViT6Yc/+u79735OOH/Le9711HNno6Ta718ft7JOoz3Ms3a8lKZdca6wQ88JUcMLF6XtLC2LcaffemJjcyvgJMka7ACsA182OlSutFrv+7lSryjWKoRMJsquUSam29mflHvWB4Tzu5Pz+3kJnEbgvTAACwBikGnrXkRuhkMB2flAgEYHp6w2WqnaRCaYyDptFZGO4ihWJvECgCqKbp6O2zPJZDRUhDfysYLy1LHFYpzriA6fONhJk7gRbdy48ZMf+9gdp9+edhut1TkhmQxHS81u1ExA0/54+MaPv//Siy+9ePXM4ZV0/dJzoTMTLxypWXZ3J7lvNFZXFu4+IfVw/StXo6Q9v7Q82L9cF9Hiu358n/TcXSdUNdDDjUZSjL/1Z/v5PjRWm+1DS8cPXnj6+/uPfXvp8PzADebvfyCeuc0nSR0l8+9892xHq0ntn4s3n3262B1u3+hHkQ7D/nA0GF55SQLEUXvttUtQ7c0fnHvzvW/+4D3vubK79sef//zatXORMhGrTCunxTkBRbULge3V9b12IykLG2dpWVXtNHIc2IZxVcSxnhRemLudLG1Gqc2AgyIaDSeHjx9wyj35xLMRN/fL8cc+9RGtBBHj2EAgACHSIjIZDgZ7u/MrC1rHwFpAhAgQpx0NZmERYAkiwQs7mkvnVpZXLm6Nzq+NgoFji0taQ7/vBuu7gyh0js10MlXlw9tuv//rjz7eK8ftpva1L6QSkMXZbp6mO6NCx6oWseAn48nBuU5izE5vstxteQeDfJxmGWoajy0AJJEWhnxU545LT+1mHAt5wKu90lvMJ6NMJ4bS+dljl9Zfi8h4D62lNnhVVlYrWl2cH/qyZ/3m1vZ8C99317HvPvHFrf4eKTJECdCR5cW3v/8tV89e3x33t63vD8q65Lbi+XazrG0riX1dWeHE4XAnVyobFjA/kxn0GkJR1HEU58NKWkZr8nvD/qAIjMp4KG2VVxikqOqI4gOLLb07LkPY7peZignJO2cYGLhjIghBR5B6tN5FGbkgVliR+Wujol6vaCYmr/2wcrGmKI7YBc6DViprdP7nX/v1r37pj4WoVs3jDzw8qYvlN50aXjiDcya765a8P5Kxw16uPZKgjJ0xBCnUkSqaTV6YW3n/2+pjx/q7vfL3Pqd3t7X1GnTNbDQSqRCYvATFOjagMc0ajGTH3qC4EDSQc8HoSMfKlS64oGLtmClGFxwJSR00Upwoh1HrgQebv/Qz1dE5e31v8MJ3O7t92L6hdsd+OGYWFGNDiJqp1i3LSZCBLyakTFUXVWpO3nm4uxjtb173ZZXECjsNr9TQoLKoA9hxQYi6qUNpBW3cyEBrY3C/X4a33UcffejG2t7e5758KDhjJPhaBDRnwAggcSP2yg7sKJ5PYvajasikNWQT35r5wEeL2w5EWx9avvtEpoZ7Te/2ykY3mfhSJwGM72Z68tQjw+ceg3EJ+crHf+6jfcrvvvct9Wu/wxXGWcSkIx9cKrOf+pH6/e9p7Ef8vefChQkFESQVQLyjWHkIHkB8nWgIQs4HHZELDh2TIq2o32wd/tV/bGe6NNhvrKzg3GHfaiSTXDbOOTshj2AUGSREcFZrjQxUB0RRaHzNAqwi9NP5kIjSJMwgEsQDABJ6sBhR0wAH8RSmMjUFeHzCg3/2r+mzfyj7/QPDOtTORwRsbjyxMRuvFmp87INv7RVVo0ng2Be1Qh2TqosizqLp7WS+PzRaBedjILE2UslCq9m3k5aKAqFlW7q6Ft/VEVgHpU8x9oU1GhbnZjbHwVf8Q0v4dG8k8sPLz+ns52Y772bsaGqTQ1SklCj6K8Y1sAgieg5OxAawQaZ0IiRAhYFZkLxnlB9G3EUAOSYQAB94yhpmBk2qtFCM1e76xDd6dx45oOcaN167qlO69aHbxs7tnLlU29wIzJrG9nObMPzC4QdPD1bnolsOwqGZ/PyOQdCM6ELZztL3fdy84Z26dOtf+I8LO2daUlbf/c/dlYVyZ+xrEBO0NoE9IBsVkRR57UKoA0Zxa9Ymod6vtU1dsS2DgYxymJ0kSaNhMpWQC4WPeTeraXHJvfSKD14SLYiRVuRdaWZMNpMmihSCYwg1uAHkNfhAClgUIAJNk/4CLCKeFWJiPLAG4CJAjiqAZofORQpCOa6tNs2ObTdk/iBIaZwXJCZC1CgA4gEApjUHQPZMyiN78OgR/eKRIo7bhfHXX9QEtHk9vrxhFhOudtpaq/WeK4etQ7f4i2MVe8ekFg6Gudu16hCXEGGYTPKXx/bSZ1d2L2w8f/HIQpqvVyaG/e9/a/HHf7FajNyci41/ZTdf18pymNWq3cbdGf3k1mZ/rnV+CC8++UKhY0tIXlwQEFBIikgEiAAxGEVpGmUGFcAodz0XmEUpVD5AEdgHBYa9GpRixVkHYpEFAY0ECMyEaIFBAG+exYRhOqkEQhREIPSAQUTf5GahZVbMtQdDICB/9RoYpaoaGAISRKo9meRQRneeuFv1LqeEK2mjhQ7s3uj6WWw35g6eHpoUFSRJEhSaVgci7Qf79WA3iPJVRcGiLUc7Oyb384eWk8WZ0u3X+TBO57jC2nYLAuvzKBgkrRoZabR1DRIpMugmMZuO1hiciCVmHzigidNEg3LD4dbO5nBvMJm4IhYIdj4CyAulk45ptgtcOHxkHfjQ//Mno5VbdVle+5//hbveI0VFiYtZMyZl4rgcTOIobq0mJXNvv+5EsYK00Wnt3X165id+Er//zeLPvjhTyOGlzmso+TvfuPzAXcM//CJcvb5CnXAD/PXvZiHUSVYYHYKEzu2D/OoyWGPIKAQoQ103oiXKon1jXDaX9TezemCSTISNTmplECWK0DkW8YodeRuqmlWras/b+eUQa9zvZ1Wd5IMo5HrUgxAwWSRWDL4Jte8POQ/sydpaiUFmP6qBvbAIM7BVCqyzQSkWjjItIL6smAMSMQCSmhKmkXAaAg8oUjoKLEG2N0aHDy2CG6m6pIhJpCrqeFbrJnFZqsL1d0Y4G3WONOHC5uA3/vjIqfbCLa2dSzMvP/n6ydUoHpaTSR5352n2oLdjM8lBQ6yok0a1DVqhItQZpakqtnYbqnNpfTMR02i09nvbTISMCujs+c1Th5Kjtx449Y7bvvYXX73R59BErbgsayxUpjQqj8yO/KicdBOzutztDXPIQ03KVtXC4sLB1aOVpWhpdmMn19lMB1LD46XVpb1hcf61Z54L3NTxg2984MorQ1cXTzz92vQtGBWVCARmZkYCLwyAPjACMAvj9IzOLMggChVpwpthIhSFrJCAGAFBCU8DRoggQqIUCchNXTg4YCYgRUwUQIGHWpARYx9EAXi2NlTO1d5a58UFEICIJFFBG02oURR7D9oABqVRNAmJQwQkA6QBEDQ7X4tyzKI40sqQKE2ZosQRTLaoHCbzq9ZWblAsHT+iUDWaM850lABFxloHGHweNEfFxKokK5ZOrdz3iR988Qt33XNH0oji2ZXQDpAp/shHj7z9jZu//Zn0xhAdC6MIgBDgdPLAgsIEIKDgh94IYtSEIBgpyloeIqNBnOFx0buykffqzpvfZQ4cg90b/Sce2/jKtzpjGwuTBo8hIGB9U6KMCgMII6ABDOAYQoBIoU4wxDrElK2upAcOU7eN3QXKMjXuj597pbh02UxG4BGDiIAoYASFisN00AQsgDdhglNPBVAACED7k+KR78pCt0ijOhAmsQ2lQ+9by53bTxd5bkbjwVOvpiNxHlFTwhyefn6mGeX9jd7OZHB9OO738lEfvXgA3e0ki4dVmhRBlGc/zvO9wdbOxtLszLi3vXXtYnNubvmetxiFRiXRbItNCEFIi3iZPncCAECgkBmQUE8T0Zq0IhEBAkUYhBwalJBCDQFCcEpI6RjAAAYdRYYIqRRCIoK/ShUhKaU0GsUVggJRjp0nqcBVIYk9QWUF2TkW0Q7FEpIypCIgJRRKD05U8FHP46j2jdh048gQxgAImOgIfGCAIIEJWWtvCFMjcWKVQqUSwOB8vTf0zsfNdhT8pVfPpAtPzN7VxNlssrnprl3dvDIq9ka8cHTl4Y/0h9tu7bJJ0SjauXRl+Z5+opQocuxsrzd5+tHqylk6vNJwktvaxOXw6PEjf/Nv6GJt6zN/tP/8q0c7aTnqxYhO8d64aCakAkWSiiauXNxoigupjtgGuHax/INf7xw/BNmJI+/5YLXFOF905g5WI0m5MpO90Zd/n199tpsP+3/y76ODd0fK22p3tDe48tXHjkrUbcWkKdROrV0pPvc7S6dOR0cflOGaNAWUgWIiaRYeem/6kZ+G3NqXXqof+a1Y5cAMFUKYLucKvJe6wLQB3oGE4B2pOFgHESkTMzOlGvxk0yYLv/zP/MtP4N5VjIEFbHAIpIqJ0RGpJFS1oCJmd+hOeNMHqvbBZNwwUEGAqBGHsuDKS2Vx0JvsPpGo/TDYQ/A6SdiVNZJKk0g5DWN48g/q3QKh+8Tv/2D5LXecc3tv/NDH1rbs5tWr+WJQiytJFCM34qSLGsgQ4qiq+gkt3dgcB+dA6lZjthiWcX+smQBhV0e3/J1/WOhFAw2KSIq+v/7amW9/5raTM/Vu57K1Z7aKYW+yXzhLiB4iAEIEoxzzVKA13VornDLHIAgL4zTUj4wxRhi8Yw1lRZGJIwVK21prk2jrQ4CmubntH+33DJAOUnres7IxqRLS66PR6TecfPDWuw/OHGm1ZxaPHp5dmD978bVop2g3OvNpu7ixMdjfStLWaJjX7LI7V9/3wXu/+e/+LT7/4r3v+5Hm4nGLYfauw60Hj/LY9UeDSU0H/tufbdRz5//4D9NqKzl2JL774EJDj+ywEJ5cvkKvPmkv7JgsSbvmypnz9W0rjaVbT957997lizQomwMcv/qIp3F2+Kg5dmyf5g2k4cSp5cO3mO0bg2ef7sT28qtr4dyabK3FzdbIFQc63bptdq5f3t61uW6evvfY/f/6//ib/90vA+4cbHWuXvV1zoGDSSgAW+e988pEnj2L3hlW87MtBshrS7UHTTpV7LEoi4WlJsy2+v1xEsfAiDaM9sfj7vjgoWPzeVQV+41mC1GV41GWJiZO4sSQ0sbEHMLGZq/VTJaXV+qaGbxIACCAKXEffPAcQtKIw8RlSfTGh+564itPtJYOXN26yhm12+maTG5UI6Wta0ab6ztNDher8xFZSuT4sQMXz1/TAJo0SJibaTMCKdzNvbcwN9cKBP3CcZRtDgdzjSyN4/GkbM+mGGFdcz4oVGAGBKXElkkc3XaoNd7OPagxsyCkccwM6711ifWgLGLWKlJXdncONVonji125uMzFwebI7vUmP+RD77ly3/55Ru90cLcDMbRcJBfWO+nuvCuvuPAoTedXNwaF99/8XVvC5vFtqoLL5TAQqdrx5PZVnNtbw8ItoelABgMxOHQTJeV3tzcm09nS5sP6grSNipJM40WxlXVbc8XwyGpqNZqcaklImlU7vVr7yGOtDIqSQANmUjSxPQGE/AspImUcACSvzYq0hqtK9NUUU1IyChBQrsdd1qmFl938Pb3P/jqM/+lmzYPJdCNi+cvrl3bDD/2d/95cvrhsLnTHe69/uv/78zWwXKzkSlv10Z8+7/4VfPgm6DdrdJmMJ0GKnnylbC9F4EHhSAYnCgMmkglauDrfVQ+4Jz1zYCxIiKTV04MaWW85yDONEixFg62tCaJSaM4YUIgJJZmE3VD78NoUptsRp16/7vsH3zO7ezyoECKPVDaaJH2UZJJvMjdI5fOf/lAcK0ojiOD7KLe9uAHPaxTlkawVjeN3HF69a6H9n7308BWGwguTGwNBDqJlWJbTEyIojr0Jjpt3t25Py6eXsuferoRKdVpIGlwGgMqIwz6SlRlH3tnvb01+8xLLWadNTWqdn6j/NJvw6PxCUzd50vvR7eoWM0s1oysai/O5cEkEo8sT/rdKLn2a/+2+eqfxYcP5t8601VZSIKXoAJrgn2q4NbbXS64s9174ekDEoAECAkYIgpKPCph1olicCE40CIoSpFCwoCuksapU7J8cvvaRdm83Fi+JWs1Mobijz47+s73Z1CDmRJUGBmJFDAAAkYKXAnsNYHyIVjQBlmZ4ALwlP8MSpmAEjggoLAgICnFLIH9FGqvwcx7B2cvp8AeFGaGgevaAQPntVLRYKtnWg3myWBS1YFrButcGkchyM5gkGRZI0m1cF3VSkV7/VHpbJRFGIBrJ0xJYpgTrSVGDEhRpCKQdtqyNtcYN6l2miW6GTT9r/Gh6Xkbp/2dqdTsZnIeABSCYiZ1E8s/Vb9Pm/beh9qz9cJMjIIMwEwaAYCBp3eq08M8Tpl7AgoRATyDIKEIsidEYQijcPnsRnTQnT7VmTvRGe8Vf7lRfeSnfzEsP7f/zT9tJBMUdyDVvLU2czW3lwa7zzT1bu2DDsjGB8MwqYs4m60aB6u0lT38s4M/+X+ZamKLq+71i0orj2q6VbUgmgUVqyBNBZOn/lDGV1rHFtE4Paj8lbXy2kU7HDUp1Jefk70r+eytPm4Yxhhce7J37YWn2l6HwLZyqEhFLOjJNOqqMFGKwRGAv/pi8crLjRrJGMfMTpAEiQglOIcqAgEhhjgRDogqMMhgjJ5LV1IdsmY87u+adC7sX6f1q/bl/9yJHDofPAuxGJKpT56n90c3czocBKwjBylac+7bzpLmiCoAgJn1/u7/8KnOkipK35hdVAFf398+9iMfCdWCDEaGHPp9LHddbKwqEw3xKKe+jm6YtX/5lAtwTUFsTBzRzBa/9Pf+mwf+3vvH1d7rf/4ke1W39dioMriR8HOXJrutxpWtelRXQRArBiEJrBARlSK8SUUHyNKkYSLLtp/XIqLQMPMP9ccSambWLqDzofaeATSgBwEPwtOSI4YpkIgIhKcYViRUdDMpREoLgANAhXUImdEKJAIgAhRhBPyh8gYAEDiKJYiwddahqmiuLGcwN50GmC64fnX+3Nb61vb+gA90D7XatDwvjSz3lcSx1lSN++X2tc1zL7U6hzqN+TSOt4scItRJygqc8io2kVmwkzG48XB3bRKCYTeoqrTbMQZt6Uk1qly8HaeYG7HBF74eswNho3VaVJPSVCg8GY0jiEW4ZbCqimHPlwwHuhS3snar8ZGf/qnXz58Z+ejG4rGlmdvFrr/y+pVVlY1CLag7hvZ3B3E3VSfaxrY3X72UZZGZibNWFFygpluOq+qlx+XamZA5q/UPrm4/e+zQh97x8cQPrbWLnYzbtPCue3fPvL7g1fq5C7rRvHH0vgN/939Uz36z/uz/pZGDY4kTc/we1LObly+U7/gx+4Y3DJ96TH3+11bnYyyHIBNoNUFBQBIUBGFXKxLNE6W9KyfeEzRWIYlcknHSDFZCXbOjpLmkyw2pC19UYAGY7XCkM60T5Qrm2ulEExpfhmA9k4DRoAhJbHAsQkpHaJj5ZgmIMLAHBlAYkFG8Q44WZtyBmcM//xPhuRvl2WcjriUwxmjmaEg9Jo4NGJ3Hi4pnlZ5rNkdpW0CN/Y3nbzQ/8iOHPvWR/tO/S7gjmamsVSqyLldRreJ0PBiSac4em7O21q4CVPnYuknVLJURJDAaTBxHeenEhZjAT8qFlW7nzqMv7/due+9bbMt//evPRx1lIlQeg7fDUV2znZ1vzDQTDb7dFBdAk77z0G2D4ah2dndzc2dyuRbARO2FHcbsUENdOfvS9tbmKOek2Wkttutheezgkd7axaylrkAPAErvPItnBgCYNqkQkYgAWTETIYISQgESmbrSAEkbRUhE03SHaCQQQFQgYZpOFWFAQVQgSjBMFwoWj0Bqmq0GVEDA0+MMBAQWIjRaByPM7BkQmL0TDxLFsVK6YUyYIgMJEIUBgoh3jhQJESlCInGBWargAgskaCKdaB8JT0aF1IE8G5UyJsdP3pVELQBT1TlgDD5xeUFGaVRa6XpS5t7FcSu754GH5xfXn31k/+LrD7954dIzT87eenC7MUOHTjbf88bBZ7/WABAJgQllmqm96RqQKQ4o3MxYAQhp1BhSKocvPK7mD+PJ48Agg0FU5YUgNNo5+7KZqjc+OHz21Q47zoeRwdoDMChN4JlBmIURNMCUlh1FxpMOCdmZdudNbzJHDtPMbLw4T5oHVdGYW7SjWq0cND94onz0e11D4H2hIDcSHETEWAoFIAAU9A5JCaMQavZBKUIRyNFc7dcbg0oBpHFNnjOtVxfbpw7vTiZRA/HG5VZdIyltFDM2I5Wfv7y3vW6a0FzoTvo5jeu2xrSTVqWvyU+uXkg5gpkZ6wKgitqdeS0hH+xsr127vn7AmKMR23y4X1cvv/q8EgEFAMSBSVBuXgYIAihEESYBRXoqbUWFDMGYCIHYucDeI5DOCJVghAAcaqWUYkQWAHIsPoTYmL/aBREQkialnQ1otGfQBoxhw5yCmACKNAYBQtAgCoUQADj44AMDeg4ZhcrbUrjW5FF1lURKkWWZ3pwJMgIjWR8wokhrDJwmDULNkhf5GBSjSFV6yyHsDF7+wh/NPf7i0j2379zoFedfXivHC7ffc9uHf9ys3nJs9/K1T/8msmua4LfXtp/+UuOe01WSaJemteXLa/r6Dbe1KfmkriakdHbyPhd3d7PV5FffGT7za4Mf/EVssnS1uzGW+X/y/7DF1sbzzy/Hkg2GuLbnizLKjKttazalYly/8iz1LnjVTo3zT/yerXZ0HOOZS8Y7Pxk0dgfxbBcaqXvq6dGXviq2ru1kfml1vhJbjScWMTitNDlp9gb8+HfcE49HSYzLizC3AEmjYE4feK9fOgk1BD8HZ74H2y9AMQEJXJUBvElTAGSVYdaSqlbCqtmgpAE2eBDQIOggWGjF6UOfulGa2UazfXB568paevIOfeRO8b48+xwPh7Fyqhn5ce1Nq547oo48gETh+mtxVMPMHLh2WRU6W5yZ0fDak/lTvx8X+Xi/IN2ASBPVWiGzjw3GrPZfuNQ6fd8z18rOgaPf/c6jJ+4+cv0r3/PxobmDDedqyEzpK7KUNjoGfRCvtIEolnjmrofuHO4N8528dfCQJE2EVIdJMDp0j8HSO3Q0IwFERHlXbl6+/vKrz2/aZy6M+yI974vgPUgjwlgpH6ACCYiRpmnxlgEUoQ88TQ6CgAgEAR8CMnGolCIVNKHCYH0NApREhlSZNpEojn1yc2Dq4PjK/PVef1RjFGlRepyXR5azh+87MdtYPXLy1la7xUU1unZhyfvZXAwnQXWC77347e9k0Oqevouas0GbK5eK+9/zi/UTT5ozN2489+erH3wXHJ6zVVVuD5tabV66omdWQzK7+q6PJtvPhw6X5bB0EaBvIq/ccmT/zBU5cCg7lPVro5eX/bElu3nx4nOPtJNkbvlwPLNKG6O0dHuvvmo6jU0bDXrbc0rNUJUUQ9/rTxrQWeqCYLG7v7+/d/e73ugTuW7zIw+8+T3dOy488bmLf/7rM/d/6g9/6zO/8W9/5cr514wgMQkI+6BhaolDBT5K07ysXSVb28NmGmtUnVbsWK5vj9kqRZgX1hgyqMpJmUXJeHe0k9DlRue+h2+59PyjO+s3ugurWbstwCqKScA6Jg7CkGbxqpnf2926evG1Q0dPOY+BvTIYfBBhVIqYGISFhb24im3Vmsn27ejU/Sf7W72nX7hxvdePZxJfVRt7Iw3m2KHD33vmedbQzKLN9e1uo0lK7e0P2XOrna00GjpC8GXwrtOMVKx6+0VMfHShOx6V+agEpbbzSjQZhSmiAl3UZZyY5bjhevW1ca0Dz87MhVCBIUKYVPm4lm6n1WxHqdFepBWlmaG20S+/dqGVdTtLy4uxfvKxp1ypFlaWNjf2m0kcaWy1dFnal9d2z231l2eTe2498ua7j11c37+21W+3G0PvfVE2gzsyN1MXlfMc0CUKQUKzHeejvJJQFjVFer83jFIiUkk7dq5OGJChHceVHXQMlXmV52G+nZlUL8xmvgqjGiRQxWLidHtQmTHMz2bzM03qSlG4oUfvg63cXxsVjYq6EUXshIXqIM7aUFlruAzVPrhXdjdu0bxxY4966w90KL/wQvn8xXf/+E/jXe8dznQOdBrDL563m9tpu12DcIAKzfG//3fxE79Iy10VhlKHWppUKZdP0PvghUE0komjUFiXRJsmXvrbP9t+3zuHE7f1L/4Vrl8yVU0oSikiIfZkcCC1D5xi0wA2WnFVMQeJTew1UqS5tjYf1tWOW2oRJ90g1feetFfWNWoSFUUpewFvIy12NOROp+wsNE/epy+/Qg7qySSKYjscS6wgBq+4zqj7wTeF29+48cpaq7JSFkqpKI2GJKHZ1FrHlBgCCtLJoH7xjPu1Xw8JHey7RrLowUEAojRN0nJ3oAIOfH707/zo7icf9jG5b31v67f+Yq6w0htTnGSTIhkY73saFFNgcl6ITdqYOTQudsGXMghxzhSknIyOdE3x/ctCl1bTJomuo0gpxuAceVw6NtM6Mhhu5d//3rxzEMSyBwDSygEiNSzEcdL0tqfDGBFRA4U6TGVcrCKTRO1Dg2HDzXWi6F5uHOLc0atPjf/4d2aCJh9EgUYjvjLKoNHB5koLKAXKMOlylDdSjZ69DaSnZCpAFkAIQAqRnSChQvJBFIlCCChg0DNTYFIIATShUBClU0UzHNpp9ML3zoXFe1r5aDQeHpjPhnVtlPKiplHzsg5jAWf9bEwGME6iOIoCwM64zFCJCInBAMp5I+S9csB1DbHWxKJZBCMWmWlneW3TrAlw9b9OiH74NbU0IqCAAOK01zMdHCmFBMgCwOhFAk9/DANjYGS5WV2TKa9Y6KZHBQEBOdw0qrCAV6AIPXMQcRIUkSHtPQcXgEyvX79md1TkTs42JkO/v77/3FMvvO8dD3WS8ZmvfcWVRVn6hQh4rZcAVEUfQdeip/1Ah07lvPONzx8/cTBfvI/uvKvxoU9sfek/p1Y0aScYmNlBPCVMBaYgwWlU2Jjs+Gt/TgkzQF5BZnSLEo8YIslGW1uf+1V8yydmljv5+ef85lrv8uWjOzVWyHGsCaUMUAf2tVt7NeXCsQrkSXOWr3HlgxAy/bCwpxAZp9Rl54gUgS7HpWm2gINhl7/yrcZdd/kshSSEJIn9Eo37cPXZybf/rLW9o3UIQoq0BDv1CAEEFAIMDCiCBOh9oKCBIAy89EEc5CRxV0el9xa6USYbFn06qmpzaHH+kx+sHv5gfMsrg2f+ZRfBXhtG5x4zD3yoiGEyKaPzZ8pxcLXSJvGaSwwSC6Dh4Ge34YVf+0oRsW5C2QytGXJEmyM5X7q1OLnRc6UH4SkJHTmwMBAhAHgWBlZAcawYYVhMAFgJJMYoEIYQgOpagqD3qqxCEGFmIVEgLKIIlZ5ugzAIqGkbUgQBtMJIKRQxihBRBBROiR6CnptGkbBB1AoB0DNYEfT/1XrDHFgCKq1IheAxSYr9dfX0epvHvaG/vldX2VyfI9dc1BiNX7q0UPuZlRkGH/NMuZ5vbl2SkV1u3NWQbO/M9c1qS7WUTiPTiAMz1r4Y9h3EEQnFICq04naY7CftzHFtR2NCNsazY8OW7T7XVpjKnLUkNlDlCx0ptjUSB9WwSWvhtoV2pFdPzA/3Jre84wPn1tZ2L2yaYXHhxWfq/f2Tx26hZqN+9cX6Tz97lGMr9OAvfWTSUerMZdwqN11y509+aPzy+smILl+81vPcAleN6qZplp9/vNF4HotidiE68IEHR37mgz/ykeJ7j6c7L7e5X01CFqe754YmOrhx7mw5sTO3tN/wjg+ce/mVhd39DKoIEj83V6wuw0MfGt+4Gm++wNef7N5zKx046Q69SXibhIM3UTZrQ+6LEj1IAO1B2IPLTVQn4lOxDBvAihotMF1uHc5NE6uR8dco75EiUNqTq4kYMJvvjHojUBCsNZptbaMkUzoK1hGKcw5RJDAqUkohacVenAveCWgAdEEw+FiTsNPdZK+zkPsDrTPD8PLj7W6uoDYUS2quRihvf3DuTQ+Pv/sdef7FZpZa8tRcSFeW8+2temRlRk9sldx5p5vcu/9nj8zOqLpwYoskMa1UFagNqr1qfODh47ffcSipJjpp7W4N1/e2Dx5bWnvmtXIjRyyTWbQTzlDFXkUt011eWjk8fzXneK7TWJntzoaF2Uij2t2qR0MblI6jKPjA4EjjsDcmwkYrKUdlBot7+eZd990zfO7psFelJmkRvu3eu7//5LfHZSFOz822RGerC/MdY7cuXTURxI3m9C0Yl4WIYhClFYDAdOxyU1agiKYcNFCIIKwRUUQh3SwWkyYBRK1QAeC0DTFdIKbASQZBZQIHL4IKCRSJIiQUhxKRALAgkCCDCCFp0nGUCPqbsCQINSuQm0klhaSM1iZCFB+srWtG0TEqtgQRCzOxEJCgAgzeh4BEEDcbaZocPXYSGxlACBzycalrlbRapQ3sLfgJJYqiRBmEUAu7OKIQoC6rEFF89ODxY39r55nv/8XXvrvsgvje4Y98eH2yN3fr6ejg8/78jhYBQmEhEUIRrTiIUsBeIqN8CB55CuY2RDpOo91x9e1Ho7IOOspf+Ibv7SUn7utk82vjHjZRVlbe9iv/4OL/9v+VcizlNDgKnhkUagIvEjMoBlTAMbiEZWG2/cY3z73treX8HLXTyXg88nXWTGaaq8WwUGmcrdw/0+msnX+Vx5WtYLS82Hrotkh8/5U1vbGbVjlbUB4IERhQBIAVEjOGIGRMBOgtUKRj0cqGpB3lHMWzK4GqpLc/fPYHK3G6lxr24IvgKxeaiVtYhuOrfmVWLQ3SyiouNs+exeAziPML5wJnLCEXSNJIGlnS0qPza8Pda+2M9vZ6V89fqBv7rw0GAkEpYc/MQKgROQTPwjjlhAMQESERkVIEKDpSqEAUekEIQMw1BEEnYhFqo6wSDSHWEDx7llqAo8jAD91PSpFCBEYKqFApEPEiTBoxUpIAGE9AqAG8IDBrQHDCEgRBnENBg9LSAhBKCAy6AqYsVmQC1wIAQiRKBAIHUcrXLjNaR+hchUohBI4QY1AW6kIaOjNJGG4Oee/V/YvnEBRUZR/jA3e9pXHrXaT4yOnbriepd/Vwz8JkL+If9A7+dueeO4ds8yefN9fOy2jila4HRaQRsTJrFxrXvzc5+Y7JTHf+x/6Gf+qr2tnJZBjf81DnZ34ZEt11YOw2jLf4yad2vvSlwbNPLMxmPRFGZYzU/Ykvyyh+RdR5LKtxaWMfVKjBsVRuNCoi9OBcIyhl2knc4omLlJi2rlhMktrCNaIMCGUy1oYD1bS7T0QuaaX3vFNO3V/HRqeUNo/CT/0N+L1XYd+BMiCRgA5OVKpEp4pQXJDCqSyS4MSzTmOux6Qh2KKcXcF7P9ztdNrlNUjaMz//L+K3fUgQGFzx7NfKx77L2+vRaB+d58GWP/t4+60/AQlJf/PaU39+5L2fGC/cJTMHAwj0Xq2+82+ivQEjmXgmKBXcWCQnUhiCq4M4Gg755E//6PEP6aXT96z95h/vnnns0qPP9Om1g++8DY8MZudWKJXEK86rerhPqXHok4R8zYudufH5/qyswERD7LUQSj3RcPSnf8XpJQIERaABiDYvXX70sQsu7uwxD2sXCGLEKQHNCIjCRKtahAVckIAgIAHEIwaBOgRiCEGmW/vA7FmYxTmvEEgpJCBtSnZK+SAGKWm4m2sBGyyCJAgTJaXzhfa3P7Byz+kDD73nXe3OyWuvr8t4UKxtml6Z1NUr1x5LO51bPvohTfbkqbsOr95etdt9nBw8fevO2TXZuKYunw+jMN+Y6332d5KjB5cfvGdw4Zqp7QPL6cZLN4pbWj4LdPU10+v79XruHW93kOPu/u43HqeLW2E0zC+rSaN56ld/sXc4cfXRtS8+cis3Rq+cwae+j7sj4WASGuDV2WPJ/ScPV9de7J854yajBkS93dK0Z0fDEVdijL5+Yc2St8sz3Tzs758NfaHxsWc/8/id0Vvef9+Pfq0/Gc8X1Nvb2S48A9cSnNWpSaMkU5EvbeGFgGwVRHhmPm4q6ozd2HtUut8rO+0UgKNYJU2jVNorKtza/Ve/9hu/8kuf4DAejcbNmXYUGRMpUQFJiCCAFEWJDJ1ud+3qlclgvzPTLWouhyPvvXO20er44JBAgqvyvBjlhGGrt724fDIfTerCZiqba4TLOztSF+ikLCYvXj9XxiowziTpwmzr+pVN0JAkWgDGedWKlS04I6qRQLQ2kae69GoyHmaRaqXGIxJIXtWkdbMdW7adTJlEj3Yn7biRF67VznpVNajqiCNxdS21ZqI2Wl/oKPElL0XJLfOd7b3x2MYP3X77+sZmb9AfjCYBVVWFZrux2MwU0v76ThCO280oii8P8rVnz9zeaS10WneemHOiJ8Mi1B6cXFjbyFDPZCnnoZkaH4JQpLJ4ozdM47T27FBcRbNz2Wxi9sdVmU/iKIlVrIyZ7y5ddztV4cY19Xr9I3PtO07NXr0xLhz7cY0SAHReierZTqZnm/FMZKLKYasRKre/P/ivo6JGEhmiUWGZFAOUVpSiJI1jjf3dSdbAExLdd+RuGrmn/9U/d3vbi3WUziy51qErW5dj3r/wjT+558Sy3S+zbkoBxlVoPfSmUWdhMNzn/kaso0ac+Bev7j/9+kFKdGqFdHAOCcZJGn3kXcd/4W9V97zBxzPNko996Pnt3zyzSEgKBCU4DsG4uYPtn//l7VjohWdHTz7etrkxU0w6omchhxR5JB6U0fX9ePPc2u/9zszL5xNQoLUyCJoDBiECdqKYeBfLy92lOdho1XmBjMjCAC63yOK7uv3BN75+y7JEKk21aqeaHBKNBPmjH/EnDxRPPNu6vN6KBMYeAixoo69tBx4H0M6n1G5y8DrSCGV8ANNOaobFM3/2ueTYEt57uk5P5enL7eqKCbURMKhNKeCcoKOEvHOCqAlcua99Tc5xXhtUKiJfATInTEopkGAdOxQwoAnSKIbt4fAf/IKErYZN0gpAYQAAYM9CFPnSGpMRWApVKCagNJgYfEARESDSJobeE39aH20feP/DrteP6uuDL/1J/7nvHwxoEVhhcB4VGh2hiAirpZnq8IJfvXW3zrJjh6tzr/Qe/cayjGMlIkEIUBO4AIjoPRApQBFmUIwgIaAAEnkQRAIUAg9KptvxWiRUEphLDNJoltGqiuPRJD+w2EAABEiMzpJkMhkDiAmAwL3BJI1NrCgE0ZoyovGkQEJttCIz7vcBtSfDGvLKNg0ZTZPcg8KAwURKMJ6M6/+/UNHNJBHglHINOC0/iwBM9fNgEJQAAoQgfkq/limIQZhheg99M4A0BRUHpummnuXmr2XQCoUlADgRFmBSpJAFggAQshOEQIRVWV+8uGeXuvON9lywm0898eXe7js/9PH2aK7/gz+buB3UlLBplnYuTYLgWELNlFsGYxqR0mtrN37zfwi33dO6823a7hZZqqsJoGfR3nkR8kppFIZQSUgiHQS0FwjoJkbp6OYRk8CLSFDGRIvr6/4z/5qCTz1ar0kiiZWLhMWqQEQUPMcE6fnHoqd/V9/34TxTEPL+9/80YQyIPlhSSHHsPJAgSSBEZYi9Fw5kksDkyzzm/PU/+c1bH7gDTr9rGLSxDnYnrTNfvfH1P1xNWgjgK0YMoIg1TsU1wsLAFFgEIUxlzMr7AACMCgJgW40PH7GHk+zpF1KGEEonjA89IG9/S7x4YGPsJYRGdybqztrdskvl+D/+n27+s4snl668tjN8dTTr0SXeKXARsKKSAA01U+0rsSDbWo8T83jNm2XoQdgrZTipLU4VM6IRWBgBCAE1TXcqotAjNA02tGjwURYHpjz3hRdrPSIIoHWhtiDCwMIBEJhYYgPaoFIAIBLABgkMIcjNxxVByZTXCo7DVMwXJBBBTEQgJEHRtF6hOPA0/8c3oVrTDJL2XlXWJgnpOBqjvjiYFJNSVZM0aZXzrSqOClAOyyxUsjuoKaTJcVHBlePR0A364wyaRYAi3yj6vRCZrDmnEzCZQl+V1YQ9EzMS1HXdmclckHHhhwWjUhkJ27yyG4ENGOUrlyaZYxJDwMpa64QJBIk77UbhF3b0kdkjJ2aX5g4crVa2L37r5cEnf/WfXPzGlwb/+Q/Xzq23VGa27aGDP7jyg0fVpWvt+e7CWz7YetenLuxdtdXZgw92Z+uo2LzWe/rxaAL1sflTf+snBt/62u7XzhyeFCl7Ho08ytUb5Q8ef+EN//ifK2Ubg/MHWtXePUcO3vfOjW89uX3h0oFWU3frw3ffli90/9O///+0DswdGwzesrqwtVXP3/vwaLTZe+bbs/2t2VZqX3p873tfE2nOzrZDErsoC1HDsQAhkaAxKk1wWIBnFBUcEZNyBHWlTUx2VPuBbYziA4dVU+mqRiOj/b5tNfZdrJdun11a6Ut9o9uo7zjNW1dnLj5+gCd+MgxllSj2xQSDhihCVADIXhCcQgacYmunvRVCIBd81Mjy1kx857s2H/kaT8bxgdP1u+7Ye/QvlpxcLVX7n/1m77YjwzK4F3bb4YUsDoJ+cOmCtjqbaVqZmLR6/du/Z/BM2mplS9rWGIhYIYMaDHKTNuJE7w/355vxTCfZqaoANHtqpamXBv3esffcef9coxG311+49N1Hn+6vjbFUS53mkbuPv3Lm9ah94NLlG09/75l2EiVCDNRq4bhwSpNnnlSBneumejKuF5Zmdjb260z/zMf+xvzlFy9ffEnGg2aW7vR2jy6tXnntFevs4fnVHgyj+cWzNzZW88l4f3046FOs20dmpi9BEMbpSRaFCEkr1hoIWUQR0TRpiiQgyIGmvWQB4ICIKEyoiJEAABgAmSWwsEx9aAgsgESkFZnpvAmDBPEBgyJFoJGAxXvvAnsEIFCRVgBKC9hQuxoQwAepbcBIa6XIaNHEIsxOAktgAiRU3gaVoMSJGPZFJd5PvWQRYqhDljVXDt4WnFBZ6ixrtttFOc5d1THtJsXDwdakvBitPug5C9ZrwlYz8Q5sHXSEKsLxCA694f2uJ9uPfmUB4/zaVZM1BcXH010lBu8piDNQZMDduLm4OsrrNIvq0iWNLE8ibDSU0vm1y6qoUi9q97J8fwuNdlevCuihf11dvORmkroOaXd2kCbJvbfb8RXMg2JwzIDEgCSiBNCD01DOwcyp7k5tDn/4k+n9b5g0Mk/gXBGETZwpnVQVYdJRRL62br7bvv+28vVzuLA0944PjA8sk4rn3mqTtdc3/vKrZlL7vE60Zm8JFSAyiWVQmDAab6ugEvFiXe3FeUeG670nv8n5EMvc9Ab7VE+UBHZpygVXvLp65Jd+plxaikihrYdFvXv5/PqN64dUpFLjxoPRtZcXDzedZE4iE+k44asXX9u8uteZ7TiSs2fW9/TGljh7cxvCqBBYWMSjMIgSQiJhRlQMGBhRaVKKARVCUFogQjAReERf+4KUCiyBQ2YyjYbAewk+BDUdZ8rN0gEHEYTYqADoPLAgckSQKNSJAcLAgXzNoBRrlAAk6Gsn4SZFNQS0YJiUaAuBSKlGlooiUZpiBPaOmYBY2DEECFGEtqxZBMCjQeQQRWih1qDSmU45Kqmu4naU51XGeqYR175qthdbt5wuJIq57N52cvmuWzavvwRBlPO4uX/9d/5TfaBBrHAzX1K6LFxOCl2okBqRSYb5+d/99KG/23L2jnr9Yqi9ITXq9YMdWxAWAK4MJYU6kL3rp2fe8uN88UWc3ODr4+tf+OzB0bqWEKdJudlrNDIpS3JOrHPOOWatjcstKjaEFCGjldozBzYYvI1brWpSonDwQGOniK0NQLFUA5IAjdrlo8iFUDtIEp9X0cgaSCBD8DUaEI4YQSyzOAgBGYMQecCGQhYWZm9V2lIqc/GRmfGN4SP/ExNYPkz3f7TIEvSBnI4Pv731iTfUz32j9+0/WNBVpNltnO198d+4U29vdbpHPvQLuVmAQI3JkF74y83P/y+LDvPcg27YyKlmZkxVlxYxMkAhKFYqZv3V//s37/wf/2m+lr3hTT/3xNPfPP3G4999bWtSDMYbcXz18l1vvHc8qqEIdW79eOKMHDg2D3v51XOvrWaH8mvb6fFEdWKuJ9BI4/veIcfvATBiLUDQKgtif/D4s3ppaWN3XDomkEhACcQISOhRMaITiUA5Zk8oACziWFgBi0ROPIhjsY6nF3WBIMhNwpewRxBywSjFmo1JJVhvq5vXBq7GMvciKomLyl+4vvkj7z49A83JpvaD4XJ3ZbbVurRlqSO6LnTW3718PnmpcfTh96rGof7QzanR8OorV59/zOTZ8JXXD6Spm0zGRR3mMcmabn3U3J5MXn31+mCHl7sULS/dcwyjrt3aq6/8YPLEk8GVi8eOt8vW1vq+ik09HKYLsPXUK9J4cMB023s+hk+9Xl19nPq7UluHSfeONx0/+cGN5x/Z/cbXqv4GRjIhWx04ot54f3T76YOdtr34cn5xzU7y7qHlG/sb1154Unm7fHBhPj109OHWpY2/fP9Hf3y0fvyRZ56cnzH7mz44SjU14oYNkFdcSh6YY4Uzrc5wOKZE90eTZqSbWZyPPYuEwHlRkiJjot3N/snbD3zqZ358Nj783Pcv7mxtBDecO0C2nA/Ox0kcXMiLWhmdNptxouvCisDqoQOvvPzsLbfe5kVHcZy1Min0ZJJPJv0sTaYVjROn73z5+efe8/Z3+KK42rv69vfd+52vn1u/cXVeu9qwNNI4iV9dvxDFWW39xmDSm0waBEVVt9Koci7Pg55pgodOK2s3squ7e5rcXLNxvZd7Fna+lUbjovbeYRAg2B9PSl8tZ+2uTmYWovHE9j1PxgWJ16iqkK90O/1xmM0yx35U2zwEnLijs+1QFiZLxTQfff1SJu7AQiYGy/3h3t4gSswQqdts3LI6f+HGDtlaEbQjXTjenNTbhT1wsAMcjq508v4IiQF0ZhISWJxptzrdG5tbdc+WgkanW7klkrlm5iuuJpJ2o3pYZiapEO6549gLL53dmmwnRiUaKi9x1h6VXFWTRlsvzcfdUby3VU1qlkihMWWQSmihnRXljmF0xH8tVcSeK18146hXlYVlYQYSR5hknVaDP/d7fwBZ9I633f2Z3/jG8cWVS89fmE3Vo3/8W594/ycfXDhSv7A2evnyqElCKOzEWhbuvfjs3D0frhrtfCnjuG23h/7Jr7eMn4xsmhEHiZO4BL34yZ/in/qb/rYjksXkK3aSLHVNotixswE0am0EGWY73Td8uHs4hfe9R/Y3Jn/+5Z2vf7OZ5xIQMJAwRSFCgrPni3/xv1ab/e7GniFxglK4yEDRTtK3vZWrUL/0fFQNqSrjtfOmO4NBwETWeUZSqc4k2MoWo8o0j+Jse29QtnUjSVLMR+StXjqQveHd19su3Z+Ya9fF18FNnYzK56WJGGMTkNlWqMhjOXTjxVtWxzkETo8MYPC//M7QbUWOjqSrkWJOooCMWgIEUhwAva0VAqKXssZ6lEaJKJXOJrasQ2VFLDuIG5loBqy0McyklcYQgvdkQ7f2hJpRmDAwo0JSxnoxiGlwoeoBBZZaIfkAZAAQUREBK2Soy5nAzS/+Yf3F35aSLKqZ2iKClQqCQ4VamIAVIQMV7Ub6kfclP/ajPm6uslFzC1oTfOb3r/9vv7bKOQkyCsQKYoLA6BGCFwRE8iGgVuABWTQEDFNtDAkBCCMRMhtC0shYr4udf9Op5hsOn7l2KW1mde289aAjQRjnlTHxaJLPt9quct6GScBeEKXCkYWs1UpoUE1qropQ2ryZzFRl4QXjbrSQpcg1MydJXFvrKvbWOhdc6X9YvhFEQLoZ05jKkqfToim9CBEUgiFRBADIAmGKB0DQN3nBDAAkCCCIKCQ0DRBN8d44LRwJ4ZQNQwEAGZCQAUWIOQgEEVEKYcobDNgb1QF69Xwa1bbTxtdfePH6/vD4yoO3vfcnX/zGH7TEXe8XS4m2BK1Ye89V4Y2mXecB8VDadjcG5cbL/tmzLnEJZmWQyhELJkgBkNg5X9PKnD95qho5/9JrLa4ZgD2Fso5TDVoCSwjEjMxIjnUlKLoOOogSPcWyooKAWspUMxg7LHVdDX/vt+Fbj2QrWb650eyzkcgKkNIqeGBRJEwigh5ECZMI+6Bj8CI6JZf700fMuf/wK0s/+z9FtzwYdja3//S3ff/S4Wa3Gk0MWUHWWnl2wA4wIBDcHMUxCgl7YgQkVsajhzkdp1CMq1ivOd+uNGgEVFwmoO9eLe85aamJw1I14yTtOMJ6LM044gHYc9X6Dy66AFGsesSQUAlSBZ8ZBK13rd1lLI3eT9Nv9sPrA7+nk726Egk1o3gBJC9Cgg4ECZFIEAILYjBGIqMiQmU91eBFgsex48pJCAg8PXgyT7MIIloBEhKZKd1p+j2tSGuIlACDCAhPGVgoMg0xgQAQEhKBBE2gFaBMy29agD2IIFrnhYi0RnVzVETAmiQxhL4iABUl9awZLS4Cz/e287SVWiRX1waVNiR16G0Nd2Z6WTct3CSNZxY6B3Xg4db6YGeH2M8fWzIJW1+GQgkiqVQ8cuXKYhjKfDjOfTDBi8vZUOBkpNAqQEEtEBMlVRWCiBVwoU6anUhgPBnnw0k9KnU8vudH7j18z90bly4/89lP26s3iFa/fPkHZnj9gc7czizPrWblxuip3/8LmkxScmWg9fPPzz8zv3zXbWej6MKIysf+5BPLqmUaV3Zu9GYX4rSTvff9D7zt/Vt//sX2aOxH1dWdYutg+65/9g8e+ebLd3bxBMKLj5+9tnz09nc11u8/ltxy8MIXPnfHXHpuY1/f+1PH/smP3fWuN2z91r/Zffm5xYMzgwsvdNQk5GK3RwURcDzXWvZaBxY3KpKmJl9Xm1umEUWE7GtIgw6Op3gcT8ExMnJNbIOiQMgNU/q9LZ0Ya6i/fHj7loMz7/yAXlxOjDKPfn3/67+79P6fzd728fGll4OeqP4uTUb9nQ0/nnSaqi5qQAFiHxgcoNIMAQiVjkgAJThm0uQqX1cBqQPFzu0HePNKod7yqfyhtxTR0Z0Xn0+i5QYtDC/+wL36rcOXn26vpnlZxmnqCxn3CqVVjY6adLARhi8/c+iO+Qn6keVmY7Zw4KxNojRIPFbtnWJr/OSrraLoHJpPGsYWhU507aOy1p2lld6ID9/9wIcPHb56Zf3bjzzePriQN+pTdx9TMP/tT3+nfyOPIj3su2YXTSM6de+hl167IcpkqamFMDGN1DllFpZmdjfyP/vWI3ffeqSZLKYymDs4P6ebJ4+devnJrxikucWuZVkv+7ffc6jsT/qD/aSRKKLrG9s3lwIUAQES0miMAaWQEJVCQCJUQICASAAshFOkMokgMAFrdooDIU8bytNJNgKLeGDHDBKQtBAGQwGJhYUJWWlBLUJKPIAXUR6dB0+AzCiIBBJHSnmjA1gnIYh3wSqKFYpCJlUHZz2HICoQO4JYKxMxkVPARik0WCIKMYZpuIwgnm12ZFKFUZ203Xj7apI1udGuxdvRhIvi/HP/5dT9zcaJu6so82KnZUeuSidGWfGFG+t248gtd3zsk3PtbO3ChZU729uP/IWs7WgBZKTAeQLJO9+48vaHsNsVHc3GhhWWg6EVWL7jjspjcKPWZHv78385uz3QO2Plysn+RAl6y+7GjcHGtcYdD2dEUZT0dzcbp0/F/ZfLS9dN3yqCwODrm3EsSyB3Hjn0Ez85duXxhYPm1pOhmY7Ho26U2vHIYKPRbLO3Ns9JJ82FOcvjmom7CwN6ce7I4fapEx4h0nGAHFZWFj/0znI0yVAxYJKlrq7ZWVbc6HTLcU0Qt1OsiaL9frGzUxf5uPBcyuTKZTUoCVkiGw4k6Z0n3G6v3Onl6Odvv9V1ZzGOUUGoKlTmwKlbGu6h5/7wa3ZUyXCA5T6/UJg4rXTz9Rujce96tb6XiBnvj9VSYrWdOBKNYeqXVQQAQoyMUzafCLPwtE/sgJiUBEQWFdiDVmgiTZmJYmAOIXj2oplZIVgk0pFh54NjAmdrTUr/0HojghzAKwgCtSd0daqosihMTa3ReQTCEDvvPDMYAeeUIhekYvCkOXBAAA1gCBA0YOxtO02MRmTDNSGJd1yHwESehR3HSnMQ9J4QOVTCrFBVteQGbrvnzjmIti9vDzZ7lQu9/n5KlJKeVVkqHBGJTrKVg1w/U3lSgXWMMcvofK+lsU0mIAOSY5+mqmapg88q6ZSF++xvt5ZW6rVBGmWoJcV0srMeDbdDd5GDq3a30u4SpCbiYvXetwsESTszH/rpV3/09Kq1hnMy2Lu2o5DImGrsgoNYE+CEQHSUQRA/smnTOPY2t8qomn1KcTM2LtiyKBWgYoziFLLMF0OoJkZj/fTX3NxC+6O/UGBUk1iOVUVt52A8CmQoi4QjqgvtAwQHDCozVQg8dlkzoUZEGXIIVNlsZ2P4G/+wMxNCZyV54/G8mQSBDBWO9l2x6Zur0bs/1Ugb63/0b9qVR8XKD5pvf1sR9qoSZ3UDvv1v9r//CN3Y7oKqwRMZNARGJEl8bw+ZEQIjOpbYAFBYrXrX/+Df3v62nzr/+Iuf/JV//NRrl/IrjylnD60uOFeev3I1Vpnx/tQtx3euXRua+mjHFGd25w/MsZovX7+cNZMqeKgGZQXJ0bdJ1tRKiRhQwsD9nasvv/q6d0GDNCO8GaQLjACiyLEERCcSgFwQJxgEGFAze2ZAMoR1CJpFIXoW8EICBOAQmKeTUQwsAIEF6qqM44r9Td1Ns9HSibHB2TqY1Pzkpz58fD5eoObwzNqFy4+duO2YHDg8s7gwd+J4QtB+7fDuxWMLty0P9vs76xe6W5s4rOy1vkhpspXl+fnk+GJ9xNS33bb8lju1TvOL4+LJtcxlM4sn5W336ZWDwy9/M3npUuaM5LXb4WYzDcXaftD6xB3zt8xNrlxNfb7951/rFD0WZpeYi1fT/X0SEJBquG/Pfq/uvZYVLi4nLlQ7S+nsL/9S6/gdqGPwxPuXy0svxePhzMwipUvHHlzaGve7xw4U/f7+1fWJLUjbV77zp1RsH55rV51W9/7ZK2v9ybhSxuwNC+tqHxwwaoS9wTgxOgRpt9qpr2tQITASemT0IMAOAiUqMTQTJyeWW/f9wgci5Srny9IPBkW/12t2eolJjFE6UlGkjY4AsNlpqMgsHly1tppfOgREzkkcZ1GUzSzOTXqDYjSs6/D6pSut9lw56t+4stWcW7rw6vWr6zvNdmtmdv7s+Us3hnuTwd7S8vKlSxugUABlGj5B7LQbg2EJmovA3TSe5KULrtUwSiQvCi2ODDoJu1WtDZEyc63M5S7LmnUxHkqYjAdt1nluo0bsCXwFKB5slUaLqtUI7Ma1zTrNclTPN5LjK7Oxjs9t9i/vDBTQYhSb1USD08rXzHlu+yObDcq7js2fOrZ67sZWUZXeSquRLB7o9ibV9dzt7w+MbC+1m3G7JQ0zAd68MYhiE+X1XYeWG2l84fpmMORFfBDtoSpsCHautTpcWt7cm4wq2Z4U3ZlWWTgl/p6ji+e2e6VVklcTB1ZR1bd17hRCI1MTKxMRI1QPCxXk4NyceLexO/5royJSpJRGxEhRwV5r1ISu8rfec1AtdNdeuvCUVNG9d1yDGE68/c7G7N73H7lV88X//e/d8+GPXvjzb9yeRW1NEin0wQDONrD/B//qiT/5v+786R9r3/fOZO6O3p985up/+Pd3tFqcIMW6zq0AoDFw4ISPksGNfb+/P9udS7fHl77wxSYaEYsKXeUpVcaobsqvfeY36O6Dx378o5PsSOtnfmFybVc9/b22IkQSwprBKFKjIhnlVIECw86qTJNTlr1/43v5p34+XL2CW+vhzFYapxgKLmr0lkG1FhcKZ2tfKmGEKsqHF/7z77b+9t9Klm5dOtLZo6/mYxuzLdRmdebF9A13VRNxG/2FxECqUEiEIVFeG4+k0pizVj3fzd7/4G65df7xZ455rRNozzY7g5qxrVIQKera+WB1Iyt9jc5rjdoYUqQUIWhnxWhNiIFdiNhGY5NJkgdXKwZnS2sS0FGshSR4ZK76uSJSBIAgnn0IpCEIcHBGEdueZtSoRMSjirIG+MDeiQgoEoaqqhJUpFU0GEbBgtYgEYiAYRs8Eoug1kTC7KQ4eDj65AfU2x+qogSMh9iMpXSVzD/8YDXT4u0hASGCr2qdEcSEWoFjri0CaoXee0I1NcWgAEBgVALTo4tSIMRYVxXO8Bv+/huf25hZXmh94XM/aGUxI2pjHOhJ5Uelb2SJrcSQC5bZctbR5GWU15v90G00ssgolrGtSClklyiFpKUOpFGBrnxtFQtiI42KygvLX5kO1LSuI0CIJDfZ1oA4VSmAAAFqBK1wCiRGBE2IJIpAKxQRo0TDNK11czQAPM0jgfCUfSTEAlNyNiLdZO6JME+NaV4EULEw3oQNiCC6OuzsThJNnsqZZiMbbJ8785mN4wduO/1+t3etkQzywXA7z5c01QotoHMhM9QkvTEoG7HEWXP/8n46n44i7z0oweCDVgBGTETBgc3m6lvfqwRjlbrHH+UAolCASrip/jFm+oipytpYGesCGOIQHAf0mOmYCQattv7Ae91+b/ylr8w7Urkvti5FXcwwUJICRag8EmFgJGSEELyYmIh87TUyYESifVUW5IOPGkodc8Xer//TgqDb7R6zptFOMVIKHFVDMRLkZpN8Gs5CIQlMAkEQUCvNrq4RCZWEmCAznSFUZ/Li9Vwq8ALAEDGE578dHZwrspUWKTVZy7/2dbM2qgrYdlgWoZHFsbDE4A2WgWxAUSYOREUIURVHMCK40sleMfqZcb1duLosfUAFogwJCYNohTiVtyLa6ROlMSHOFGSI4KSosbLshUSFMC2QMStEAQYUTegZpoMhAAKRIOAY3HTKKqBp6vsARcjMAKgIBYAAIqUEwAZWIIjgGTlMqbKiFAMwCyAAKYMKRU3TbtP/J7PzoClKYgiuHUk36ly2vmWipMNVWZY2n0aY2ILikDaSjWvXO6Pm7MoMWevynXywY6vxwvKMTmPMwGNJkUadBC9KxTqJJsUeKakVDKwDMBqhOZsAs2UJgRppCzgubB28j6IYjQrWIYjzdVl5AS2igZQqd69/599vvhJjckvn6DvifHLnHbdycrWo2usvvlZT9PzZ85HENuqsHGwe7sy8fmE74b4684yp69Ot+dXjJ93Gqf6z35Iq4YXF+/7OP9w70n3+9fPXm+7Q7Yd633qsM7O4pzq3/eLPtxePfvKdC8UPvv76N55IOoce/m9+fbSSzcw/MNddaWYrm4/+kaiZlm8utw/1n3hp8OzLB/p17ofQz7kBTiujG8yAaGpniYwwJnFc90aqE3dmk6p2Kk4qlP3azXifOCtebF0YhQJCaRJAkQKuSxCYMOzuh7k3v739jg/IldfV2W+W375IiOq5Zw/VtvzKH8ijX0knfUWhqOLLM29s/9Tfc9/4gn79O5o9e8vIGMUgDDpmBpLAgSUEEEEgH4IyijRDr2e//5fKF4c7yf63/ml4rTM/ur6EDaho+Cv/7lCAZBYcgnRSb5ULTrejrklJfGKy8dAJ+FYetr/Z4wpZJTYPgYitZ9Fjrf/4uWuvVcy7+blnnzlwbHbhyHKj3Th876HVhflsqU0lUhQpqWeS5vKbTt9+18mGxK7CvavXv/nlz+ys9cX7Vqd9aLF78frFBE0/jLNY1ZXF2i/NZOw4CO3tTWbm4tjWF8/8oBpee9vDD7dFylDUmfreM9/KB+Nmq3P18m5tLbEdXN6lkB8+OLc3GiRZK8WbeQqlFZICRVorRQhESAqQpmkdRMKpD1MACd30vRVPAMAsYUpBJsAAgBz89M5B2Af2IoBAGADEB/GKAhAhA4swYBC2zOwYRYdgrbMACIGINBDC1HNuSIEHEWVQG6W0UgodMbKweBIxpAyqSGullSdQCAigjUInbJlArC1Tkxjxo7VrK4vR5rU1x3tsd07c/dayTkCZWEtrcfH4/G0Xv/CZ4+97T3bX27ndznOHLmhAsJyP95XITm+3m5FXC9eVad56/PL3v779X15dAq0dGJYEYTRjTvzUzxUH58ejXVvn7VYrLypaXolajarT8GU9qZT17cbHftTsrI++9g3ZG3MFWYBE4aQq52ebRZL53lDbXr63szXMW6fvmrnnpHz9u3a7doFRgWdIAEqApQdOV0ePD66/wqrfVSVRYpBCXnId4kZEOhUVZ1EjSCiqwnvfWZ4b3nOHhnF07La88r62aTdB9JOqwPklXj1SQhQ3U4ugFdf7O8QCjTZOfO29acWkVJU05eQtzQOzxauXr/7xI2a/SFiVimw3W3zPO6P7H8wmE//M0/vPv5idOIxK1eOSE8AQVB36441zL13EeEZ3YfFQ1uuXo3PrRW80GIVRCWgUcEg7kUqiSvFY+aCi4KCoGRllWn/HMCUTITOHABgESSklhEEEmCUwaUgTk6VxM46VaKMboI0Wthx8yJltFYIRI75m8Q4tCjPSX1Ebg/NKKecqdhV6VtoI+gSzqnaGhaxYHwChJq7ZI5JnB1o7LaA1AAOBiUiAU1ZSiVYhEqu9ItDTJUejKKU8u6DICbNwaW2kwHhAJEQEDojB1lW72ZqdX7nl5ENzd4Mrxvn6tc2vfjGtCxkNBs882j56kFNgpbrHDjUSlJy94KjiZqJsDq021eKIyYuoGAkhBYgMApLi2J7fptc3IWgnoMSooLrjUfGd/6Ie+rjqduPuHKC4vaum27x+9nvtE3dFjZanOm21zGQQa3Jc60Tb2qETQaEUm22NqMtGEs0s8Y1dXZTiHUhodDN2oWkyl4eiX8QrS533f9xdfUZdOgeAUgYJAhSDhHi0A3/x78Njn9W3PDhpzabtBZ0tQH1FOGZCZwMFqz1QcJBq8LAbCN/wztbqid1nvr6gBoAlZU0oclOWVOaQgvjx5OwLzfE4tynIpt8627tybv6hH+d0xrzzU/lzX4/Pf08Vxf7Ljx3e240WOqHY9F/4l/jqX7RrYZXVtUURUDVoCqYVhLtNVZSaPAKjUgZYkkTiFt727tN6Ud7+M+8crp48ecdH7vr4f3vp6f/06rPPmsne1e1tk3TarWS5AeXetfN7Z+48+O52s9WfW5x/46dij/loJ5lLiHS1m4fV2RSYWAI7ZCasfvBH/7FB2Pceg+iASiGCkCIOYgH0zc9iJEBFpAQtCwMAYmx0EAkIkUYvUAb2IN6T91z7gIABRaZ8S8Sb2yBhZ2tvb7YNGmlzb5T72mYpxBJONZZmy7B19Urpro3XetfWNsLde/Pv+5hPzM5kXM91Dx94eCQT5OquD70l6/ny//wT7rulY6vjfqn729XFsZ2bmXnDT6xVha6G7flk6f6jdG5vHHef97SyOYQnnzmqU9NM0vZyzE58XdNEv/dt/eN3FNqDxO0XXjqUpeG556Q3Hm8XysS+KKUZxZ008MjbQbg8iBstZQIk6fi+d8M97029g9ee4SeenCmHyfmr5bifdq5d3vjWob/x8ZW773ULdzaXGqfuLqv1F33eq65c7qy7d771JC/Pf+ORl+3+lTRK+oPce08KgIFZgkJbVY4jDuxvDBMl7EIakxfRiWFA8UII3XZmJFBdjXt75bhcPLwiKppfmp2dZ3tokcVPBoUyemZuLjgHKCBUl/X+9V2ioFuqrsqs2VCRJlRlMent9Pp7+5fOvlwHPHHXfVXFWkUrR45AJqPJVnels3l57+prl9ut5mi7TrImk0QJTbEUMVCkQVDKskLmCOnG3pA72UwnnYxt2kx0onlURYSdTmaD3+6PVhYPDHo9V5bgQ6qTTpaWRV3W9aF2677jR75z/kJZUSfVbaWTuDEYj4K3WWoM6WFhyfnDRw7Oz80/8dLrl3bHEKs0i8bjen27n6BeWZq9lpeTwlNEVvyljZ1jy4vHZ2eu7w5AkxPfn4wb2hyYndly3EpodWn2pbXB9nDUzogh7A+tAnrZ+hMH5pZmmrkLg3FVOV/3XBKZrKG2d3cXFmd741FX6eF+P5QOCOI0Hownq52G8zQx0htzXbEi5WoQlLmFJJmEUe4jbWKh4aiaMwYDLnbTvzYqUpEm1KNhJcFnicpLx0QF2qvbN1pzHSWhE8Peje3x3MKxn/nb8/3LX3786+2xMWcvXbz26baLFrLMVxOViJ1UCkkrlVXh9txNfvN3t4pPIyWzAnfMzLqyBqVsHbLEiLcRV5PvfxUfuK1YWNHZSj22yeaeXr8BeRChQDqKSYRq6zFqHPvQp4Z3tLmbceHGq607fvw95771xUbaVlp7NMGJipTWhsraxCqIIjHeScrM3UbjI+9bb7jmQtadP4bmchTHrE2oAjXSAGSaBoZIIiROp+IL393Zy//Dp7Exf81yc7RjhJVWNBzvfOb3F/bf3hkkabSgeKQ0ukAEyAgA4vKCYqjT6MDPvm/zLaf2rV05cXj8m38Sb+1HcStWaFFUklSljeO0k86MR2UcxR6c0uh8FVMUgUbPEoSI0RZxgrUrt1sKPS4RR2kUKqXjBityPijDAlaC14lChc474UBxRE4pYgQSF4idIg4edZoihygA5aVyDDriIMyexRulnWeqLEgwBkF5jyCA4muSoDQFRlQA3ttUjT/8gcZHPsYyGJ+/uPDA3aHVzSsmE3HSmF3owvY6oFDWFGehFBgzRAJRjOA53ARUCQsCBGZERQgKGICABVGmlPs4jZ/YK9794V+Dx783v7x0/crFaE6zl0xHYtKtUV8QjZdGknJtdaTJGOcCKeWD9MYyGOYHumknjeqCm410WNZoyLmgo1ipSAW30GlULMGHunS2Yo1a3wzMAwIoIsKpqHEqcAEPIAI3PbTABDQ1kStCQCFETaAQtKLp+dsATa8pUCki+qFSDQIzy/SWmZEQBMKUKxxAhKZHCRJC5pt1apEAQkSBg3NiUBdIflwUjmdac6rZzof+0RdeMlj85Ic+eGR1buPS2e0zL4BhnYbJOOQVd5p+8dZ2YeLxRLWOLbuiqqoaUNLIELG3TkWmqoKpgfZ202ov77Rn77jNPvV4UbrIiIth9p4DtQo8Kc3mCEpnIu0UiQdCDIAGFHMFSSKKx67oQWvm4P10gNy57cnZ55LSNlpta6usEztwpIWD1TpCDkqRAGmdOoYAYgwx6bQ1l2/u6hrZO2EVSgy1WkiT2ta6MqqzWrU61fhGGmolIQCyd4AaQRkQ9oyiREgYQQkQhSCx0dba4GBfryxVseyWzGAwzYuJSigwYw3qhUF9/dNH3nwaQjW+XvILe77vqjQaR15nOrc12MCArhJlDDmp6jIB9IFjDYduX7igzZ8/v3+mroJQYRkse4GgQBPyD605apoCUqRAtMIkhhRI1VDkzAFdUCKkFXofkphumqUBgqD3wizsAwp5DyIchKcjVg0AhILoWKZdFgw4pasjBEBEhCgEFCAQKwxEQWnLRBIAURxPRz0KCSBoDVOg7l/lKUwcBe+4Dioglf7I8uw1Jb1qdzKqmwRGEJi9YECUAC5nAqfF3n7HgXHv+t7WejNNmkvznEYcs25lnkUpFaUxVaXnQWnD2JUh1GK0Wp4F1bDDvCLkuibUqdGiUBO0KLHBq8gEZbqNpneFFGUShTqorBEXxUSSoHyFk5ZbOXH7/Z+89678S//rL2PL+9hEysSLS9Ra7Ffueu5X7jx+9olnkoYsrWQHknD2619bPnGbfvXlrVee82NQscmzlmstUCu7481vP3BwVgbXL/3RX/q0/cH//udfO/tcsXV+59uvNHv7x3RjtLP58j/6uZXTd8t+//KNgVy/ccuxRetGvU///r78XitVd3WbCTarkYtMS5BBgldGN1JUFMoxAynhUFXY6rSOn6jPPq2RAsWje+63990bHvs8XzqrfKhGBhRrVwXviYytHTS6VxqHGj/x9+dhQk89av/drzYuXU8a8UynMao08qyTgGMdV/sccmgbuetY9z2fSE7d1j03i6/UmpTXGYdAAqB8kBrRIaCIEgigEAHZszKkoswVnCQZ+6jOdRS4Pr9hslDKCCkxEIPCojIc26qUZKlVa1eSRy4NR95pEE0Acdpy4xyRAkRAKlJUeMeot5y64MPEYyaKfbK/abfWrnrg073y5Gnu89bKkVPzC93heMNaSRPTNnGsW87JKMEJNfRMvXTMTAbu3Pb14GUhnh+PtsRJhIx5nWa6Cpim6UK3e6CV7G72I4Pnrtw4v/mNdx0/HJF+7doaU96cXWw32hKkrMuIINRV1ohRUVEMg7Wl/aEHMNKIKIDTIaoIaNQgRAgSBEkQGARIOLAIohdQASAEJUSMCCgoHl0AQZQg4kW8BEZQOA2BeO9dQA/AioBA2DshHab+A8ba2rKuPQshJEjIHhC00oyKNRlNatqziJUHRIHgAwQfIYlCFYCIpssWkVKIEQIEa4wKZAK6gOgAKwhIih2mKhnu5RTG2xevLLxx2QZfR6DYS9oZXN9/+QtfOrx27daPfWIUL3qmOM1CaXVkelcu6qTVWDpYAkSGDqDb+Pz5TqS9M6ahufKjmuff9OZBuxOM0VmTk0w3ZjT2BaNsdnFSOVcWMeu0MQdxrWfTenDD7O1WT121V6tIgjI8PPNY59Th/Pp2/trT4ofH3/ruoURzC3Hv+4+Bh4bRnhSxsPOx4e1Xz516708lR077epJKAhDV/Q0XZGZhXowpsfKMWSsb93qNOLZl1SvLMm4d+7G/Odob6yybjarBtbOD/kApNXvL7c3ZmdqJSpNyPDLEaXtOPHvr0hmdkpKqJiUqojJucaQpz5N6rCJwKHUrPvnRd/pb76jiBrCi5aXG7Qs7k9HspFLNtq/KTrubhkLKSVtFw7pudbvNVrZ3Y+x6oRihlKqltBdmgqISx6G1ko5C7WsfvMZquioIIgChB8cwjSijMCIhK2BgUEyIokE0cqSV1qnSRjeIyJNDQAm1sABbEQhSY4AQPKPXWmGAEG6iTNlJCAFFuAq2DlphkiTOQgJGSQi1V0Q1s0NiMByYWYJwAHISMCJFJJoRKFIRxUqBbyQap6YqYNSeImCnItAOJEJi5KCQBR0H60KkiYFc8KFyUEwisXGczK2uBAVLd5zAfO3y578+S7z2zT/t3nu6e+eRPR4s33fHeWYFZBCDiGZqNiOOCkkocIRVUBLsyGmlopauQlCoNCgJYBAYMZABSBourH/+08fveaBvd9PFJaMz1ZiR3C0eOW2pzaMi2d5YiFB7FSYeldfJFDWAcRY1Z5q7++vpyhE6eavSoHYvVhsTUEnSTUv2ZL2qJTUGqIbNC/WVxrC1KvmLSw32laE44QqwrBEZilLtDNX14Syjnp2HVldGgCrWCrFyJJ5UBKqCZnKtXlz+R/9B33JCMAmzp7a++/vLw+cZ9oQi1Wk6qoO3zk/6Lz8vT/8l3/WuYnJ1fmZ1aeY4ddsceaz17T/3j8//w8cWVXxsqctf+z/SO4/tfe1PGxtbFCc2BGd9UBgnSjT3wJvVu1Ldqa+doyhErAGEY5XGem80zE6dDPe/c7BfdLsJzM31isFcnHVn7zXDRzqr6WZVbJZuEWYvXLy+vDI7o49MhoPitYt2vVqL9ubVQcyLsprQogqukSyeFsHAjFpTqMrrL25cfA2gVhI0KqUACJ0LjIJARqMGoACCFAADIgoCqcr56Z20AkREjaRYlCYHHJSyFCJFLFLZ4BgcAAuqqeHSOwGv1c21IGrHB1a79Xjw3//tT2Sb9fC13au79p6PvI2S2TQ0tK0u33iFgYpq5NhBI+a52W5zcbCzfX39Ol646sZby3Pp1vXrRqUalRmM3ZU9++gr5u3H3FJc9Cp79kV68aX89PFjf/OXJs+fve3dby8f+RqJq32z1Z7fu3KldctsurCErSxZmmkcu2/AoffY9zKeiISIDCtsrCxW1WR/fzwcFbMr87FEo81eMJKLLJc8Py77X/7s4mDbXtkYbPRpdsakwhgOHDk8fuyFhZ2RfeklbM75UV/qrfTwLHbSuz7+3q2LVwWj/x9V7/mmW3rVZ661nrDDmyudOqFO7nM6d6uDupUzIgiEwDYYDLYJBhsb4/FwzYCNxzDMDLYxHtuDE9GYYIQRigilltRB6tytDif0yaFy1Vtv3OEJa82H92jmov6Aqqs+7L2fZ63f777vf/StbWh/7WvPN7MEOAlWxfF4djYlhZE9A4wmleTWKgIAV9cQkIic8wBgKNnaLP/wNz73j/7nn1leWaJElDLKAIBYQ1onRmnAMLfQcbVTWiltOXJ3YW5t9epo3J9OR2jS8WS0s7XRbvcOrKws7l+xOq9L12m1fBq6vUPPPPvS41/66t2nDhXjCoeTXpIu79tXeqomE+HyxPL8zZs7hmCxaRHUYOJH0ypRSYyRtdksakcQaqhCoRQEZggSKh8dNDBZu7nZSnJkiQJr6zteYTtrZu32znDQTkkFnzndMNoiJWIAqixJE5t08nzi615bH1uY/8aZK+v9gVIpWrM3dT3Qw8o5Ja5f33P44BOvXWEFmtT6wI2K1QeOLD90x5FnX71aFlx4XuyqS6vr9x/fl3qY7BR+Z3K4mx9Yar/hd4dhmmRJiWptuzjYy9u9XA/GaWK4DByAxfT7tWnytCwpafS3uZ3hcDqpYwNYKjd1Vbmy0vVM6xI9cGJ1lCCBWwa9Aom+EjWYSlP5nF1wfzlVNC5CYjWQdhzIKK1EFDLCtc29bl2W07WNGrpJ49Ch/UrvXL/85wTjXpKqulaCNlFenMNalWKUjTUUI69AUhUzth1llQKAQLoOUkbGEBCZLcYMgc48bT72H/nYvc3b7nVXrl/71B81p9UMqREiE7KBqCSMrrx2MImsOO6M62GlDW2tbYiiAJEJBRGs9uCJAdAxKCDSChUxjJ1bOFgs7d8c7OWJSfbfK/bZSDUSB/Tc63pTEgxysiGC9rbemxiJqdXNejTeXktNppTiGjHVGekVV0z/9OPKzqXNOWo1R4MhoLZKcfQUo42MXISiPvPUV+jBh50yqjusa+rmWQxVFBWcQ0M6icbweNhPrFYp1oWQVtqm4iqAaBQ6x8ZoiWFSVHsPfRv+/D+vV9fO/vI/ubscaKq9IJICV0pgiBFiJJsEYEwMRGJmQGFfIwpGQPFIhMYGL0okz9IwHaMSSKzynqytJwwJMSEoMpYjuFLilKWlIEGUKkokdqIMAcSg/fyJ20d7G3X/ku9Pxd1dj+pEGZmOt377t5JrN7VNJMYYop4VgiBAYMg1GSAh5wU0oQ/MKIAzbjQyQIygFMCtfM3Y83u/+0Mb18vf+OPf/ie/dPfuYLDUnXeBNeg0zYLbSbIkeokE0ypgwGbDxhiYo8kMMLoibvfHU2uUUmVR2DRVRjUi1kGKYcVV1VfjEijLsizPO91cynpUFN9MFYFGIABEIERmZgEUmU1GEEAjKAWzHlkABBSjkUAEsQocgXyAwBKjiAApJmStUGtCuvUvA8Jso4x0a38hwMBEQNEzC8js5MeCiMwCRLObhZ2RGF1dBN6duvlcrzSNCbI3Dv/pjz5+6uGH3vPu9y/3Vnbf+MaN558GRCB9xcNDDzz08mBb+nGwNoBBGQMpTdFrpTg3UbuoAC1otz7lJ55YeOt91eU3ACgg+Iq5afcO7XeHT1iW6hOfXfDDypURQBCUgCuDIcQOxEMtvzZqKlUPtvzrn5rUSm9esBpRSyCnOsa1VWg2JgdP8JG7dNLJNt6QV17MpkMLXpIEBCOoYBOwbLIaKgc2D04pweDBa++RVcuMFtJ0+ejoynpijPMUgqhURx8tB0GBCBzirIUREZlFBRLm1NLm8knzV35u+43Xqiu/bdyeiSVkMC6ZgYxSqhJ7OW6ef5VrCIaSZrO2vKb98Pa5k295M15chXMXxpulIWAI86mta2h39c5861nI/u4L65dcnJQ609hJJVqpIkiQWeJMISpNnm+F05BippUGUCXUQaKXygmSircUZpJphFunfQqRZ+Uxz6wUxYCCwIjCiLNsGgoSAjOJkPpm7RFlxmMXEBFwIAiQABiFTDLTDgFgBAAUhFnRUgAkRgDmbzoxAVFcCAqRgwCjVham42+989FPPP+FrYmDTDUslZNKmcSzRjSWtGLI0Ey2N0MoWt08bbYhS9hwEBGPNmm4EKvKUaxdOSyLaKk1qou80cFEpk418s60P1aSZzZRUmkGDnVqDSAFgiAsIba7vZGvPENkmAxHSdYYN9TD3/6dubIXzmyd/c3/6cb1mwXEjT5PEVt5493f8wO3v+XhUYDTW9c2/+Q/qEmZp7YeVOtb13tibjz+THChRKFuOpdhY7z1+i/81L43Ha5EXy69f+EVi71q0z/zi/9qAXy1N8lsb2Hfvt2NvXwuOc1V9eUvNdgfSZumm8qer7Ru2hStRvGhX45dmZkGK1GEROi9c8jWKvKlrypqNIemXnj3+17eliM6xXGxWdf4rX97OC/V039yZy/pu+n09gfGZ88vTWuNghJISx3Ko71s+PjvT1/97JG0CY0WNFUsgkoo1a1srgcJTDc2PLlY1T5kVdJZimH6J7+8/fE/PtxZDDqVwALeh5qAIiniIMDG6IAAatZH5iBRoigFEAOR1jZlXwWTYadTTCbgRTiozAYVfc2TGsp5rxZx+Y6TF598PR3XMXr2nGVJ4eq0mRc1ewaNUFfTNNGxlX3mxfV1NokxnTSJsTbaIIL4sHV1kPrr03qydqP4nh/5e9N6YPJGqBmt3bheGGV8nuZHDg8L35zPhoMNiYFDnFR7JlEqmiRJLMDYh+G03J8Z7Wsf9OKBXuGc3+E6xfbygq7Hkyt7zU7L5L2829u4esP5YDJxMVScDKaqkS/Wk0LxrZEp4TejpcwshCTRe0IEBSwMolAhKYTZo4oUGYVBRdJy6y4cmSPEiKBQRYlRJArPvL9AChCYQYRCiFE5Q98kSQmI974KheNJ5YOgRWaMpAmBlbKk9CzxRIg6MaQ1z7BJKIiRFAvJLNhUhci1N2QYgoBWaBk1K/QgAFgjbo5GBxt+3EW73L7z6ENrF79RurK/sZb0etxO9iKlhw7PnzwmW/3RE0/dqK4tf/BvxIUjkyJGxsSYuWPHSXAy9VGROL/34lPVuWEvy0oObNko9BqSg0c9JKPxZHmuF8au8hxstm/p8M5wkqYt5dxw7eqRI6e213Zr5vz0o9k78uHgt/PdcdiadFOi5x8vX3tCSrFTTnLtrp9JE7k6GaY3nUaMXoUqpqkSzxDAvXFx67FPtd/yTtXIxoNd6A9p4vK5HuTpYDoCRM0KK0m8K7a3DYXdK2/4spiO1oJj8LUO48FosHLPOypKdbs3rb1O88AoOteZ8eOhnxQSfHPxWOSynG7XzlEzRcrU5mTz669yzS5BfWxu8eH78P57sT1fTKrMqt5dJ7LGeKeuevPdUtLgudgbT65evPnq03509W/+wDufePyLZ15xe1t+OBGjVVRBQUCKYo1j8Dq/OvSTXKLKnfMYmYAEMMaZI0FFEoi31J+OI0uk2aUNok50VGiaOZqcUSPpKIxkRRAU6iRKLKKrI/soNIOyBx9Agf5mGVkihxBcGV1dlz5YazSLYjGazGyr4CMLOxHR6ENkjAosIIPWBgABsizVArGuQghR0CltEQDFaLC5VkaFUqxCCiHGgASEihECSwiRWDSCqxmD2r65t3Zt9cRdDtkjKhCYO3XnzcVn6tUh37x87qO/e/zDj8bllt5Ym9u/UG6Mps63Dfm9KaWcHMhP/q0fGmxPr3/iL7rDQctKrAJWmFgoyhqSHECIPQEED4aRqrhv4/L1f/cPD//0z5ilA8X6KEFdnD2XHT3MxbR6/EvX//jfHhpOREBQTcZlpkg1k/bcPkexbrVwfytm81yH8ub1bOpVnjFl0mxnvWa4dI0qB6ECqcHzQA7yj/6a+t6fWfujX6DXX1jQUeUISQK7BXgFLkAoNCCU66LWENl7kQA2U+BriAVk8UaWHPn1P6laxwuu6345/67vNfe8c+9X3t+rNl2o2ZekCYOkOrSR02k/5N10+R0gFYSiriYCSsXMLp42R9+MF79mR4P45CfKz2x2m222BqKIYZtR7arQwKrZ5Pv/RvOuH2q89undi+NEIdoUIvjasXfguQVm9zpsbE7UModGv66ULKqF5flid+oubEwONtqnFnzg3eHmHQ/uP/9a1WjOnzm/HbaGD761U6zGZjI/8rL84ENyeRo5QVAco0bwextf/sP/trk5qWoJHkAhCYMAKpqFN3m2dlWIRMAQb8FVZPYSRgAUMITWaBFmIB+jY4mWQmAA9po8QxUlBIggwoIEEJz65sD09Su7D333u372R/9a88KN1esvLadza9ZtQnqoN4dbE014+NTth247OowFBddp9kZXt4fVWE/3mpNJa/kO/O7byvOv+Y0bdr6pG6qe7B3sLmyd+8Zg9Mq+tz2UBotpc5Is6XJOzl2xG9v9nT5Ma5hOakhxoYfv/db+kXZB+fqnPwXF6spt9x5bOYqPvHPvqU+FqghIzCyumkYO7/0b1X0Hd9NQff6LB+rXwVV5NRp/8qPrX/3souY9rPHY7Yd/9p8Oj2Q7X39s+Md/MDe3cKiZtq7f0M99dePCukzCNOH2gyfl5LHs4WOPntr/uc+98Mql6e2n33Ryc+uFM5cUqV6nszGFIrDSqBR6zwBsjJ6WIRiVZQYwFlVopCq3hllYZK8Mg3rwZ5957K//4Ec6XSsUy0mpjfaVV0TaagXU3xknWYKkYh0JEIWCl1Z3/vCxQ6vXtg4cOnjs2Om82YjA9bReWprbWr0pPFUeVy+c/8pX/+L4PXdCSx/uLvX7G1OIV3b7BcDWpDZKl7vDSjRX1VyL2Ic8sdFJq9HsFztJQj7SzqjkwI3UNFLrAOZ67QCxl6V71zdMYhBlYa49Ho4U2CQ3rgiTejw/3x5VbrHXma/B+8iOs6ZtG1rottJW5/qNzYPd5r5Wtr0z2ByO263c5Olu5ETLkWZ7fq59/uxqjeFgq/mW21eevXgjtbbR7I6mxfmd/psayZtvWxmMqyuurARvbo8O7u/tizrJGpmt9ypfbU/SVt4iKafOGp3mdljVFy/1A5AhPSkqm1ljLXqPPi7P93YrF11Mm2ZIuuDQ7c5tb6x18nwwmt7Y2EmbS0bD3ScWzl24KT6UgYEEHXgBk6UTF5qZycD8pVFRiGhRD4ppDAwIpHEW3nMcdgeDt737TR954G1P/fcnVjrwtX/z9+YHqytGQ0kEbJBHWzvd3DaSNG/ZaX+qGySRWWAcgmLQgASsjYSyTA0Fq2TqCYUUCYExKj72lElfGgaRqurVDKEmhY7FECkEitEonK/r1f/7l1ofeG/r/reWg0k652Vta2LSiMAQTKKFFBACRwIQpRxDVfvUUmxl7sDtiMtLuY43R5Prm8ZPbGAIFSQ2vv1bkzsP1s99xW7tWYmCCa3vhs1tKZ32MW80yKgYnEobHAl8rSF2EROrYlWGVKWdFpc+hhoISZQyAFK3XT3+0jPR/NbR+0/j159bqkYyHoUo1O42mnYY/TTl3pGlLOnI+TNY940jDB2nbUgMQ1BCpi7rYZU2kgjSbi6IyhTN13O3yfm/iFNPxgoR1gEiE7BEhroWdkKsCLxzqFBJZEarjDBFL4pYAuuIHCaQuHHHBgPNISbitAHvHAe2CqNzZY5bDzxsTz1gHvtTs7tpLDOQFQLngWJzytNf+9mFN5+AI7fD3R+MOmcL1EjyPI4e/2xWOoCA1hAr8AFUBFVDIynLSYYQAIFJPCmlZ2kdIfSKNM9OuTNjGoHxNpXPf/zTpx59z0//vV944cLFyoqLQKw4iq+9QcgUBR8rZhclxlCFIkqsa5dnJrVWMAiZSRnyXFchBqxzzKP3SiepMeOqHk/KSmg0qZO02L/cmmtZ1vzN6wEKM9Ct+zbiDEYBIIICREgE37TgIAggUmTwt+jU4HnWCEQQQUFkIBAmCIEBAQlIzwgyKCg0QxPNYkoSWYQRhG8NAWZ9Qk0zzgWyYAzR5gkHNRgHrYOveTCeao1G2dE0PPGVp154+vn3v+3N9x2760CCX/zK82mUgQt/8dwr7//xnxxs+nhpfObTvycueEe83Drx6P07F66oy9c6tY+IadDhxQ1AW12/BGOsR9qiCuu1XN3JDt9dSwndjh/UZYb5Ox6Uoi6eOqP9hEOc5mDventx7vO5dx0j/U98OY3UjeJLaXX0toTG8Tua73134+63SeeYS5qR01QNko1zm3/4W80zT5MLsQy4uBLuuWPz+rWDzRTryERgjCsd5InTIU11PdzLM6jdVhYnXNU4mZrEQnQEgBzIwKyMDjNIUGQVNEVS5PsZ5R/+3sHBpWzpAz7UxbOfwb0tvTuVBKYlaiICSQlJohCAMdMYBxKq+bT18ANw7P7uoYfPrP3GqF/mhDyRETs40viLTP/3a6O1aeU4JYI2BIgyGDmbKFSgkRgQApAhANAkpNFqNAQpgK+xLIIwIqJREpGBMBIUkXNSGjhEiQAKkBShQhITFLOGUHkUUAiEAoCehViMhoU0G8dYe888y8HJDJ4VWDygIhAlWWrrKEqAiF2UyIIaUUBYQEQRcmTAW5JjAIjMHBmtBmWiZ0Bw/ZvFE392f75w1UBtRINv6UyIImGoYkc4VdKFul697rXO5pqSWVGsQRSQBNSWWFg8slO+Mplq1lVqlR5s+xjq3JiVuTm1Mm+zbG+47QaDlm26qB0zGyvM1V5FUZUOlM7I0+Jd951MWj2b7Y7qK1845y8+lytsYg1g02bW9tQxpuyPHv+1X8mOznUP3n1iybbW1rJGFgHOb/UzUd1GgzNgoeDYD7waekJcjOPi0lVfoAiSJUoNhNiIaDQmjSYjrm0PME006OnOyIC1mS1imVvJlhb6NzfamY0uRBe0EdtMvPfsghbUwGmWG+VjNUy0qjpZ8u63Ntv5Gds68JFvqb7C01fPTWTx8MGF3NzA472ta65YuWfux/7N3p/95/qxP6HChYnLUqWjk+tn06pcPnCAXXWl0Wi+/83q4kZz5FI3LdZuwlJz7b6T5oNvraZVvlksN3qX//3PLKHbd+xgNQbxlsvaJg0Wihy0VkEAoqfoASIgICpSIi4iCRgQKaOi4XhkjMmaqUo7kxhUG+tYM0eDBNFALX7kJXMba5dab7tj78yaWi/y3I4ntWml/WJqdVZX4Guohj5tJ69t8I1IKqW5xN5754mNSXXuzE2S2Gyke7vD06cOnLr/of/0W3988s73EezaVmgfPUhZwyykrWa6evbS/ffte/BEY2+3T5NJ1a+5093eHXuBiFaxGUdutVoUXAz54oGF118775mtltTYjMrJ9oW5xc69p/aLMhtbe7XChUZnIrR0dPHFN16jCGsbQ4NuoZuTMgA7AJAYA4AsPPsQBGYGwZkACgS04QgEQMwoyBwhRIlAQijAzAGFSAkgAwSOLCyzm+WMM4OKQDSRoI7AEVihIHgBkIgxSh1CCCwco8As/TULYxpkwgiogUDbhLRRCo1SSFELg0iQSJqQFBBEYB+c1IgITEhATKiURiEGKX2Y63bHwV/vbx851N6crM0dWjz/2sXy3Ov7jx7rdQ+W5KIItfVkvWiSPvv5m2ef+Y13/Nj39e64bZykqJRoiKhUYk0jy4u1J//bx+drNaq9Q3EudiKggIw3W3kn5rlKLIyK2pFpNidloTS4agrBo6edaxck+igGksY0pkv3v3X03BWFWEyBCvR1JAQVQU1D1d9SCg2BMgoIFYlHKGNtE0iIypLPfeyPHj26QsdPB62Gg13PUtSonGrZ1IjUl8/tXHr5ytPPF6sbCSJLnR1c2QlRTLq3sbVy1+mTb3kr57203fDMpK1SKgRG4ei8G43GW2tWJZXeLoer9c6WmjtiG/PjiTR3hm57x8Uw9+gdK9//HUW2NI1W6lDXEUhNYl5tx2L7ijt11meL3kK9N6iKTc9ufv+hp974xu6Edte4KiAaPYmhZURiKEAkVdGqyqityNORUyYNPHvD+wjCAEEAAIPMbBpyi1WHoEKYASKBrKiEAUQZ1qbwXkPUJCSiIJJWtZiILoQaACMoIELUiU2S5Nb1oK6cUdaF4APXjK4WS5hqNIlWwkYbX3vFoDA6mR2gEFEhiCFCloY1ubIJEWpV1EUNqo4EiVKksA5KEYGyAqF0hDoQBCVBAZGwsCELwrWPAgoorX24dPHanatvrPTySSE6a84fPJguzuHORFW89sUv0+DVZo52ErrOc4w6V4mEZLFhMnBTt7s+0affeuQjc9d/59elcF3b9CWg40azOXau0UpiqCCIq5wwJil2XMZfffXa+Z+K+5cNNzK0SabHD92XH72t/7FfX3aRUsC0We1Omo2mj3Xiw3h1Le1pn7j06H31+rA5GqQbBQy0QROhxvEuuC2tBFKMRUSbUAuWHn2gPHlYJwv2od91rzx7+df/2UoebRWQkIoSbALeg9LANVIQRmFWlEARAIPkuHH7O1b+6a/63hH2XkuFDSGLIT2E9769eukz2oXoylBMMoNodUvi6PyTrff81UDaxShkxOQxCCOHpl76wHfF1a9NNzeSRpI0W4Godp5QFGqSOu2mm2Zx5fv/sT/y/nILJ9vfaGQQa4koFL02kiV25UDy/Jkz7eOXTt/5Jlm92NgdtpP50Zceu/C1/7HkaG7fobUg5d6uq0MnybEu/MZw/fm1eqsxuvTqcx/7l2960/Hd81v7jx72T744vT5K8Ul13wehkTJz/9LZ57/wlTKygGgFjEAETIg8o0NAFKiFgFnxrRpZZA7MjpmRQNgiWkILopUixBogKBGkYIVnKjQAxxwZKsdCIKRBg/0m0PeOleOnDx5effqZI5U/dfcchdbp951a29nc+ItP77124bY7H9AnDu+8cqlwQ7OzanR7/My5wc3VvKj2H2hWeReSBopvajW9tO4NJI1czS9kq+unzZJ86YVqexdu7iaokp2d/q//ZtfovA6x001IFaSKu5fVm+8bed9qz9/1zreIGk9k7urza/jS8w2uEAOH6MaFSjG4kuLw6EOPTiwvpx/sf+2rJijTaVgP2mFBpvOdH8g+/G271CwUNN78UGdrHa5uD89u1PUW2mz/3Q+Vu5M2q2rbT29eHJ27AHcdPLl0sLjvSJl1fuj73vXiR36a62I6nGhW7JlIt1qNEHhSlgolM9hqZFNfa6KmTbNEB1Bl7WrGGOumTZ77+rMrhw9+6LvfEjzXlbAX9n5+/yJp46upbSRaESkQwVA7k6aHjhzd3Lg52B1055qodCIoTK5yCKosy/PnLt5xx21zc+1L17Y3NkbQLA4vNF67ec65OKwLNNr7qt3Sa2t7LngBWeh1ZjsdQ5op1PWUOMYQSUCsdirWCQjHRmoTqzc3d5pL+08dPbC2uV0Uo2YmmqTXzY3Re24KDIk1oSoRledgM6uTLO/1fNlf3RtnZdmwum2swXhh2G8vdKbj6vjCynD9ap42pMQOtZIkGTNcWNs5eXDujgMLl1cHkFpndKHTl69tv+uuk7atV3cCoHrgjhWKsdFtJ1kSr+1sTSa5Tjo2yxLbtWka9KlD+16/foONHQ7qVMWgDYm+fG14ZL4JrFo2v7E5IY699mIyl75yaWNvuGGMUzqpgObnDw1K5LK6sdpf6PWKwrHz7RzIy/YgIsBo7FqO3vnoyc9f3vn/R0UssayqRstqw8EziJSlN5qyxIw2R4/c/d7Tb/q+T/3Wqx/50Ld85Tf+lyTNYIzj0ndz1ZCyt9jgGhyTG3pEUDb4umZRKtMSWLEiEtMmiDLac+Jiao0i4ijOYcpOkajgtIsxggcEohC8iCalQmBFpDRxHZcunZHt68XHPmdBuFFVm6NulsfgPTNGSRJix1FAaaPTRETzuECErE2tG0/zf/yRrh/zHuqrkyQ1KBFcVYHPHv3ItZ6rhjVWL61QxKQZqJ3oVDbXdYzCGJxLs9yDJqWtQSWVKAqVT3u92iTbm4NGMwHllQQIyMGDhCRRx2vyH/2E+VMOztm8G5ttFo4s44nbe/R9o+9+ZCdb8LuDA5s35/emaTcPIR+qpeSDH/AyGn3+M/uzgTVeOPSUgy/81+LiY3vnL+8HwoQiQyhixEgBNSqUQBYEQftQO09Gzd6fjChKR5GZ2ReEEVEb48jv3bW4+M/+3niiq9//czhzLh2PpKwNsmLkKk5aS4f+6s/XEMvHPxljMMwkBG5WiCII1Li5B4Ovw/KN6WTZHXtzSBTUUD/2WGO3n1ACSerroIGACVrUT8F+4G4KzZ1PPt32rEkRUfC11gZEEAVv5YkABAEAYgAjZYP2Hzz9yhNfeeSnf2zr4jNJqkgTAaQmAW2MSdKsETx7VzdSO63qYlppa7LUAktV1s5Fk6jMKOYQXfQxgheFSjOS1nkztakqQhwNSy5qExth4rLU3hqYihASgGicUbaFRRhAvkkvIhSFMyUaIVGIEgV8hHoGxBMBFkKcZYJg5jpjZAAkAgAVgIEBZQa7uCXRQQKe/SChIhRNxIK3LhczYJICIfA+MLPSFDhOgkxKT1YjVwoAYyhHxec+84WvNbJ3vOme3uHby42Njb2N8xfWu5/+hFk8etfK/Z39venqaH1E9uSp5AM/rE7fuPQb/8L7PXbBVbG9crjgrBp7mcToxVJsM1ZffWMhFgMOg7Ob20OI7/iW1Ue+y1Qcb370yEtfswZMG0pTJ95LRW4aM4QYSRnNXG0DLv3k363veYfftyKmASAhVgFwAlmx/NDoLUN46Yl5qZKKR6ORzB/2C92rn79xME90Jyv3xpJwNa2bWjGmBBg398gonScuGgtNLhi0aEJIMIATFbEGHUABKDIYNCmzN5/1fvIf9w89kNgFJ5D9tR/ofvtbz37s37f/4utJCWih9CwkJYWGhlxhWdfdHFtzadGo2q3Ws69f1tAy3QNnB+sicvKOhWnb/s7ra7sdFVp59CGLaJgDhCBoEqJEGS0WsXJMiIBAKKQwSzBRKAFCwVWJgMpYUhgQIQjWQTwzEoEwImgAECYg9qwU6Vn1EyFVVHkRJACeRd5mt9YIsZcngylPqyAo38zBsSIiARSJUWofk8QawgjsXCgDR4mAM6wRgsAMVCQIt6r5QABSV94mGpF8jIgxd5XVWpu0TjKOaPLG5uamTcEaSiRQAu25LF3omJVTU8LKjVutBgcXfQSxMYqgVkhbU3np2mDoRetmG1XD614jW5ybtwk4V+2OJrpNNmkOtid1xf0o0c410RJzplq5alOnd/87HnnuiSeKtSe3xmOkLFcm63Zsj6d14kZmrx9DFU0OmYZOj910o3xtc8PidMp7E6gOrdDf/I5RZW+cefbQy6/ToG61m1Ud2wm5cWUN5aizpaYmGRfeZMa54CgGq42FgsVBTDBMJiOryBOxkkgYyPUnV1mzn44VWokkRlc+aDJEBpVoweB5bTyh247201bv3ocHaWNhtHp0su3/++/h2RcOd6xMr+/8w++Mk935dhJimu7E8MnfiU99siFs8pYLJUKkGADq2NpfhHTYnG/+o18q2sB//pg88bnF3EOYrvb2pX/j70C7eWB9c/JH/6La2z2ptPNQrJXK2rRtAMvIJccKUBAlIEdKKQqAFyUxBoiiUwNKgqqhgaXy9dS1IdRVRbtj0ZD2skg6eij7hYWk09BAigeh76vlB06u+nZ17dl9EPJu4iCChsjOoEKJqdGbe/KlzbgBqTIMwQ+nRavTXTl6YLq9yxyybmPiqq3B8J4Tp1766mcPH57Hbhx6bnXmE7DDazd3v/aslAOoS22TNujB2DGyMZRbWzuMTubzdKm9xHlWDjZuXL/R7TZ87Re72eZgtH9lfrgz2hmV83PtYlottyz6oiy8q4sr1waL7XYsnQfuLXWHewNz644MmsysGx0lAgCDAAsA34qU1gEIOQAxE4tChSwcUZijoFIkCEGCAEcBZmaUWbJUEaIQz0SiChkAQUUAHyJBEEAiywrQKEMkABBAE1AUZgZgkCgxCgAoUkiIbEgpjRBZJAJLDKIEtEKYbd8B2HmlFCWIwJEdcxDkyD6zSVlA2ZaptmubG+ONK81Ul8g9NNPrV/z4Uqfd7LYPqRPHN25c1dOw1E52bux+9V/+hyPfct+R7/2+keoQo0ooaWbl7vXLn/rjRlCIOk1QnEsJM8HgeeO5r3T/yk+KbU8nVeGkkzcqH70buIjddnfqnRMJ46G4Ym5uwU37qsKWc6NpzU6cE1IUGWnW4mXQhORBKYoxsomhZ+1ti/W4qNdGPKrFQyPAa//lV5ff9d75u+6abGwdPHSE9lari6Nyd3vjG+dGr10OW0GXsJBTYFHzWtrTpNGaO3H8wKlTYhM7txhTPejvdOfmfIjj8V6MojQpZbxKmvtX9na3qtHaQisj2B8bLdGxJeNzX/70aFq1Dy8f+PbvmPYOTiYhTMfKl02jMUixObj5/PlBf21ne2/fmx5pnrw9Inb2rzQ6XZWaSxfOji48X9YTazCxCEF0gplNK2LY30uaZjSlahAUKfFeWAQpMntmEeAYJfIsQoY8Q/wisMQQSaNoiqVrGN0iMQLCAMJ1XUwnQ+BgNJnEGAASttqEGKOIMYlCtEr/f1hrAuEYkISBvfegBHVmDSkUQ6SFwSjl2SgqOURk1KRQQBhjyLRpK9PSqZqFQx3WISBaYVYEhlAiAiATRVIzaYhQZIzaWBSIkYKIAwwBwtRhUFcvrn71f3zqvrXrS8eO2s6hJLp2p7FauqbTaQju+dUsEVdHdoiBU4QEWTUSaufDm5eT51657R3fWi3ec4r+2trnvlLvORlMIYS85FwTVw4IRRsxwsxlhKqGFNpLNyvZuNHotHevT+XYocXv/CAup6GZUTEaV1FklJrgy2ASoUpTyb6oM9BVfT4fubQsYFICMiSErgAXQZt+TO3Rw54S66oG9HH/UkmcpBm2j7tHT9zWXNl99nezxdbk859rXLqOXEKWzTR0lCTAbIUheHACVg3z3uLP/OFu0tJ7NU9HjflUKWD0ojU9+jdXv/YXJ3LR6HVUCAJ1CL5W/StqdCXYjBFNBA0CZz8bLj1Z7V2za7vGUlVhEI4A1hgbglKkCDFL9+bnD//Qr5Zzd3npME7T6ZpKjATURN5Fm6deaRBzbLnjz325Jdfrm8Nw+Q1l59SNi8cS315qu0L2d7JXNrcW5xdto3Pm/M32wZOjdPndv/hvrz/2ic7772701PBrFy499WTv5k64fOUbL//SI790uz52khGyXiNv5bvrfe+iMAFBjBwBA0tECFEYsEIwhAASOc4C1opQIypFwKhYNIJG0SxEs74+CpADQKOFwUVOQZCw0pERakEBMXwLa/32E4dPVJy9slp55oPLHKGIk+K5l2F9vDJ/Ig9QlnX0lWHnr11ZvbgLq3JUZbpGf2W8Hjf42P7Db39ksDmUpWPEU7++M7pwg/aGbnstlFU9nWibi2c7J4v1OPqqv8cYIFvOm3PN6cbl9OOv9i680d532qPgQ0cbty/rpTsG1VOiMpNlpvZSi7DrZumVT/9B9wFGle0+/vyhQyvlxi4aq0IlysrCPvXAw1usmIP2Qlur5dlXVGka73qI3fTS88+sLO9r3HFsGOpeY37y7FMtFr2XiKGH3v/QG/3hpeeuNUiBBJsbbShGzYjCrIlaSdJsGC04mdT7Oq2yrgKH+V5zq19UdWDPWcP2erZeLy8+99yZRRpUPm13XnnlpWPLS6duu7N34ODBQ/sEkT2z91prYzRwsFqH0vc3BvtW9oeaEbV3vhgVjWYOJKwhb3eGo3HN5h/8459N2+0Lzz179pU3VFOGo+l0NBmPy0RTPam0ApuaUNWmnQiaqnYIMCrrIAJAc732+ta20UnD2HI8tLpZjcJie66YlqwkULQ2GU1C8DIta4lliBw1XFvfThkyrSIAakEFa/0ddFUjTXwZ5zK6ff/ibjkccahK19Z6Y2vNBpFQt7vd0aheH01MQ0/LGNZ27zi2lCb27E7/0FL39KGVyc2NcjJK8mT95rDdyJaMVh7Xrm3Xzi0kxppOgRzK6MZl2krm8nRrMNwaj1mpMvil+Q4HGg5KEI4RimF148aWIpW2kr1xMeGpQmnk+mC3C3vu0vVhp32AMCqBkwsra9trOnBDG9KQZFTVtavBpqabGj/xfylV5LyXKjabGSIgcSNLjFHKBeW5O5ecu3j+nQ+b9cHm8v0PYhNH01IQQg69e+ZsnMarQ6WT4IVUojVNp854QIxgUJPSGqCOk4nvfOh9FtTql5400xICqUCgyLnaGuLAwiKkQx2VViYx6AVI2ICwVDUrIm2Zh2MzBo2iBq5tbaWojFoDUADtRJMBAphNJn2tI2OE4GMSYnz6dSsSJSE0dRXQgMkkBg9bN3tH7izvfFTlx8fPf63FmqcDVdXeVYKaAW3eBAiabAyEYIzOhKISFSdxL4Tjf/9H3NzcU//pP9xW7CLUIFFrFYTEk6WmEpCEnSLwqJXSwoih9IND95wcJKmYY+qZFXp5l1JU6+sJ9Zfu+a7+3afd2+f3PvNRfPwMllUSsTHf5Os7XZ0wAEw8aUUMRABKaaOiuElwlReL2Ow2QwCKrChCXQMpYcchztomKIqJB63m3E/9bH3sHlNh9iNLcPmi/9NPVGfPpr7EyhtLvem6+4Ofne5sLq3t6IDAChQCRNAEEIEBUMNYAHfrM48n7gdrn7b7g93/65+vlBSEI4PRFgKDoZvl5NCv/8r44TdlMS0u/3Tx7LlmZLIxGJgG31EIog0a9iwMaBAseI5XA5f33n/v//V7l3/1N5Jm8uKzz2ZZyp412SzPq5p7jTnUOsSKSGmrdIQUDHPkIBJZgNt5asgYraNz2tiag5PQUCZEVioKSppaqWLMY7eVaWOIcXF5/pt5CoCZ9QwBGAQxzpKuiEpEK9QKQRHRjF4EzOiiuCBB4JsoSCEQoJkTWQRJAABFKSAiAEEk+abZXYEQieCMxqdmqaKIoAABAYk4BEZBoohSMDcIgUOMPPNbIaIPQQF4AQxslQoRy4n72OPPZibZ1+t0Fg42Thx8/LXh9ugrjcXnP/LWuxpXcPzSejKaVHvD5tLCuLs/G5WJjLkr8Oi+9lveN/7dNV1cbqZUTwLpJNR69Ymb0SgeCiH4yXZjvlGGeOqH/9obLzzRraClWi2TTIIjRpXODE4EoFxm1LveWz/4HaG7PyBHFzD6qJTJGkpDCXrxnrfvDJ0acSowvb4Rk8eWv/PdxUNv2XnlcvvOu6adVuvue+2w2P21f34AJWs2J7uThtiimEattTEspTYa66rM7bWVQ3jPae109vKFzvWbXWU813uH5ts/+nO7Kw9VPtGijSEB69p33vlDv3Dm8W9vTqD2kUGhVWWMHIVA2MJuXW3tQTicXvjqs5OlE69ceOVd3YNVksPBuY9t9sGjzOWp5zwItqnyIQQcAZGgaHIxkkLSqGeJM8LMkgUQ5hDAFywVCINHKHwklFwRIRoAiRJRKhZBZZQi4RAiCCkBgSjCFCUnNBZrIQ8gIqRBGCLTdulbEZyLAhAJNSlmUYAyC6kBBsGaJUEQAE1KWSIIjjkgKEXCohCZOYqAIoAIAMxRoU4sRGb2QQBJxYhYbA9s1lB+qpgpyRrLc8pKe76ZLi9O51pVahqHFjcLsY00qcZlXZq8a9JGLBzXZRr45o21p167Mk6SCcNctxlEu6HL8sxRHIOQNqCTcemGI1S03Dx+amFh5cDRu9KN86tPfckEYl+vXj730qtn2uLnuVAGSesq8t7utGE7977n/Wsvn0+TcVnF1a1dD4FSU2I22K16y71mT09SPvzWt574zu9hTHT5/ud+5ifVnk+VOO37ZZUqmZI3neaUqJmQINdF7UM1TWPVTNPl/a1HHl685039P/5UPPOyRpWmzXU3xIVOemS5On34eO/k5KN/CGsTDSYl7SNrQmMsU5xOg1tY9isLB77lO/I0NFs2vPJq/MwnTclqHFUIUwMEpgsobGACiJXdOh9eOXM0BakpcKWIBYJO7Cilub/z868/+7L75H/r/clvTsIaX/rG0X3H6ytrY47d7/7QFczhN/9L/rXHD1Q+MNa25bonG7ynimG1NzImotI6zVHq6CqL7JVmjipvCXrlJhJBaeOk7CfanT7WffiOuTAqn/mG7o95WIITN4mYw/xio2+qyV5MapCAVpuOjzc/9dhbf/B7Ll66MT23BilFlmY2r6OEeo812bbdjOalcXAdk/tqZxzjtc35ZY6hOn7ncn93tDUt13dHWm0oCLUWuzAfUix3R7mT6+fWXvj6c/31UfBRRJJmVhRlq5OMq2JhsaMAXKyPLcxnpnV0eeXs5Ruj8fZyM23nDT2HKorqLm5c2lYq9VYXO31w3qam0WqUsaBEyvE0E6qKaZqaalqNJ1W7eat6Y7UlAUAMMcyW0yHGKBBmdDAADgyIt74PIsAexCAqEYgsipgBwowpw0AAigRhtjdQETmwYwnKAPvIgALoYwQ0SKIJVYLRcZJQaij4AEARSCEpIGSBIBw5YkxIG0DFIAIxCAkowVsiNq0EBYEASIAYmCByFI0KOWomJiowbJf9vcmkl4QstatlubLYC4Wqdoc4KvvDG432mumY+UcfvO1QsvfY82Raa2uj61/8xvZrF489+tZjDz4a53vVoBp/7bHqCy/0rPYstTBZG50vHCcCYa3uP/7Z9oe/e6rRdho2z1wxDpV0unO1K0xqpqPBvv0LNjsAIGYaF7BY//JjvOugAGV0HTkgRoiZUTUyEWdae4jaYjh+OP/Q++Nth1pR9Lm17d/+QzsoUo+jVb75B1+8Zr9oUxgRlBXUHlIB5SEviFApQCyUc2HX+XqRH/iRH6gwK4Y+S/3G9SuN/UcT2yj3RiLe1U5ACeqgSCcJieGI7bxFCFNwQTOPRpNnn1u7fFWn2HvXXVeTBo58w5eLeUaj8frF8xiU3a4mV/dqJ+s3LkdpHDt8xMx3pkzYntsaTzv33fXm0yce/4+/G7b3KISGEm1oWPv2Xbep205lc/NyZWPHXZsUZVk7nSbMECKjgHBUDBwjBEYWZpQwa6wjaDWTIjCLBrKkCClw5OgRvHO1sIeIyFWapYISBYlB60SbZJZnleD/v3sBioTAlfeVDyQUODKd5KNYAAEAAElEQVTEOgRSGINDBpA4O6+gwsCMFFGhiKTWaEHFBKgCaySDGCCKiMTAiEaTkohKU0h0dCyISlsgRgRiII3BA5JiiZ7FEIvQ+fNXp1V15717i/snczZRQRU1wLTspWnTU88go9e9RoNSX41cdK2PfMvKB99Lf/IHBusQC7u4JO95/7G3vLNj5Pof/dn24y/3ymkeIwRy0ducBBg0Bh80KUIib1NrVaNhjjUmd5/wnWTwZ//96GAycVq95d3dU0c3nnxSbmyZ3c0YRCurSMLAG9enygEwqKm3fiMUnbe8TZ96OH/HB0FSOXK0sbhgN9c2/48PLZoq7aWQQCDACtDIwvGVPvVa3/MP/Of/BC6cNdUIdRorRkIUAIVgDSADJY3lB2XStIlyzpmko6yerG6QsWkT1eE7T/z4L07+w083M6Mi+CAAZBsZTdfj03+QPPRjYdivr75Gu7t84cmwdiXXkRpNEM7aeVWL+AgqKK10FqHX3Kiayz/8X9z+045RY7PZG44GZ+zYY9Bo0FgTQAU71+l1iqtnkl1hULWTaJvzd57ywys+wMkjt1/bqrqqu9wMxhqv/EaspwEn/Zv3nNo79IH7NpeO7m6txpUH1bceWRi9cu3Kb2pQZFIGYlSqcbAuay4rhRg1hcjAwshBbvk9GNDJzB8DltTshWwQU0MRIArYRCOCJSIGBDBGs3AEYgEgIq0UAwIISWI4ilQCHnVqb62Qr1+9+R1vu//IfScf/9xT7cO3NXDJlSKVWZrfJ5PqwouvHrzvjsYjd0yG23lb1ree2DeowmBsE+sbc713frj93ru5a+ZtGpWupwP32SfC1y/sP7QyvnxeIaWNFqWJIcWuKCej0O103/N+3O5X11/C8RSGTC7O4VHZNlxWpt6dPveJuu/mkiBRgTIgkKQ5ROS6OGyw/2t/1FlaOKAbWIYyqnpck5PheCxzh7uH7ohqaoMkdbSXrsvmSBqLC9/93VfaeNs//jsv/PJvLPR3wvb5ZtpJmGhShnOr8drmVl0vv+/Ni/cvv/d73vrpj/+F7XWwrKGs2UNdOw5MQDG1waNWSQzIEcsYnSiOUtfOpCp619/eu+PY4Ufe/vZ8aV8xLk6cXDm4sG97ffXFV199x9K8SLDaBIbIIAgiwIEFsNnqcfDKKMIYWYyxvYUGl348mty8duW+hx7sj+tDK6cPHlra2h6ePH70+trNq+vXyGoP0M4yivUYZVrWwNGAGMw9RAacREmNNgIuAkbKbYMFOEiv0UFmJlW7YJWqq7qZmE63W9RxNJ6MyrqRN920SlNSlddad5utfjEeTesWqRADS8goJ+R7Th/rjwab42HPtNqtdGNrZ2O6lyh7fP98brGqXN7u1boCkiHBudX+obmurXw2mvZXL3zLW+6dTPDMtfVTxw/vbm2eu7C+Mt+59+C+6+sb01guLTSuDEKaWC7dnbcfRu9euXSj3c0LH1EwKGpqu+77CvXN9dGRg70ZMNEmtqojE3CouqYhDj03Uyt+UkfnbAqvXLy8f6GTmnQ4rlxgyokBFQspNQlsEvuXRkVImOaJTY226D2QhcQqLjhU3mZmsLVXj8oj+5fHPErvPnL+81cfOL6v3zr0Pf/Hr0zOPvPUz/3TO/fVUkGoxBa+m6YsQogsJA5ZQoZwrXuy9Xd/ccr1YHtHP/VCRxiQUGsCEzhyhZ5FtJhE+1mDX4FNVF16FhHAWkCFQIQ2Z/ERAEQJJQAxQh2stVjWzEEfOFCx127PavHoo9eh9kLGkDYoSBTYM0dkYU3LaT76rV+Zf/HO7ZtbrejTwfZ0PLVRYj3V2sQIQOg4GBTxJXAqpCQEoRhqx6B3arh29WrypoeO/qOfC//6fzVSkzKeAUn5KhAKpIZnOAJPMq2MVbmRfa8+s/UTL+GxA+n+E3B2q9wR0pXyfpHGr/6rnz31e7+/++CbpneevPaOi/fazHzpZff4JxMZoQeIgpF8YEo0CrELyJFT7B9ZwOWj1YWrajjJwDgfhZg8RwgaiYGNQpGIqD1g7/0frk48Uo848ZNhtpi9/XhSFeWNS2p3SoGBKPOSvfB62wRgAkfAAsqDUoKMEIEFGEEjcJi7+Fz9r75/+dQd3/j9P73PN0ErRRiAIToVKUQ39763FSceqWAfez935+md594Ikf2Ut+452Xj7/VtPPd64OmggEwpEZkGpQt2yzQ++z7/vHZ/82J9+48YL7+x+wFvQZGMltmkkMgpYRYVnH8C5GCASUqqVCwKKBCV6rxEtgQ++qMsszVJtjFWz831Z+8icW22N6ai8RRbKaBO9tz289RQgMAsi8MxEBiIwWy2LItSEpBAJiRBEWARAQuSZeQoRUG4hmmQWKJIZ/RpQoSLQBCLCAjDTmrMISYyRAElIAwCD4xmhZqY6F5Zb5JnI6DxrJABlFfrIUURpRRxnIjYAioAQhYQBsAzhylZfLK7F8M43PfDOuTdtDDY/9+JWWK8PrZyUiXrps1++/1u+bf5tP3hz9c+aydkyDo4cPLFlV/YO3dG6tpWjKyRWc1007cnFC20vGpQKUn/9xdOvf3V9Y+PC4882TL7TD2Yzc199BhzUKAokKp5WTiFvNcPJD32Pb/ZcNTEkoCwRISWxdBIEUzMqhhs7bNmIlsSa8OLrG+de33/PgenQDDp39++5Y9w9WI8uU8nOl9ACheKLaaJdEaNoYKmCWOJwfarnv++Xh70DqVmcNJ9wf/TLTbO5OZc1fvDHR8ce4NhIVS3M6AiFGexALZX7FmF7GzXOLmCImgyPkbeD2vFhnKSTVbX36s37vnP+b73nntvzfWd2z/756nZtlNseNjKtotrZLjBR5cy8hgoIYmBhiDECotWkFIKAFSBm7yQyAIO2iFFilJmrpvAgEBOFRmsUCMIAMy43G0JhUcCIFAVRkwiIDwKAJJEhAAiIVgpBKh9EBAl9FE1kAAjEMQuCUsQMDDj1oZVYREitQURf+cAcmAkBbskIhb9Z8UdQLBSDR4ukgiiJIBElaYkxVZrloxLn7z7t2xZTLb7a8DGYRtQ2pa6ESYKNiS98tDprFKLBQJYnFNylJy7ulS5p9poKxTnMWotHD7YyLn05DiwRIG9B1na80tZdP5zsnP/S9Y/95+50tLy8b1rjJLiTf/U77njw7fPQfOl///nxtU2bJgVzs9udt80zj73cazTvevu9l67e0McX+NjRhQfupoX9WZ3K5z6fF4PrZy6tf+nzNy89NezvLOZqP2fJ/nmp60nd13ceG+ybW9PcSJYPblWLcezWN1USL7j86D/7B3td8/JOffTQqSYxUz1vlHdcl356973Nv/1XNspYG4S9KVjbjKGLOkzLLKVqUue9eTFh8XjP1bu7L71SPPlJVJEbzaSQXMiTRUxiVCRAt94KCHVUBMpSQhiLqJWJTOw9kEiskmp64WP/Ss+fuOddt9c3nqvdWB0+XCwc1o1jexu7Zjssrn1h7vLTGTJ49ix4eL+9483F5de261y+7yfs8X18dSP+2X87Kq8l5CrPAIaQI3hAr4RL55U2dWrC3Sc7P/U/FaaNbnB92jtkXbZ5rnjpTJqic9wfFo3bWuR18dowF10OHIlNLhdv/M6n6z3vvN0rNButAIz37DkAXhyG5ypKl5rAoUE4qcE5HE5CUbHql4Oh390bHziQ2SzDVrF4anlta63dNm0V0iZ+4bEv9AeQZ6luUjEpa4ic6LHjgKoS9JOKDOatrq/CpbXrlMvSoQNHlk9dunR+qZmBkqIsDp5auXx9XZmwOylW9i/v7y3cvH4taaXt3OIIqxCX9rUQZc+5yG40irOnwChLgrfqDBhARCSyoDBwZBTmGGftNCIVQQgASCIHQGIAYRBCUZolAjACMDJKjMIgLoCIooiIoFHTrT8jlkGDaIyQqAQzVVFNDMhlZCFFyEwAzBwDAgIonr3wEZQCpVAJB8OsRGJkEPDEqLNZtzDEoAyCguAjRAkxTkOpNFWyQwEmXichZUr3zR9oHD9M0zC30Dj/9eeuDLeXuktFmDgl3XsfTXuops83irG/Or1+8wvVV59O5prUSa+9fKUNurZQxqC0it63SISQa8kZ+h/7nVbXt97y7WNrJzIuqxK9r3wdQ2Afbn/zI6OdrUlVNDst1d/ae/aru09fzWtCg0WMoAURSEEJgRWQgPc+ISi13fdt71o9fteowpW0251rjQMZBpoI1JAZJVWUIQBCDpAKSACFgAROR4cgMegGskGzeLCKTWku5jCarl9hF6WIVXBYlxwqNLYxP0eIdVVrrSJie3mh2hsMdkfT4Bfmu7qYrr7yqhHM59Pm8f1jHWIxDDubwVV6urf72pnhtbLeDb7wqEwnT8uRT+cXR+nu9rW1LqkgTVYHwCzNHXtgbeNz3dxG50ipg3ecSO6+p+zuH4193lqY7wxc7cFCFDEISOSCUAQUxCDiozBwQBRQiCyMgMgRGFErF2Ido2WPRjt2UpcxeokzZJACLwSSaGu1QqIQagZlbCpy61sQY1SkBDjGADECxRDZMwdNARCZNUqaqqAFWSKICIQQLerZUlOTiARSxJG9AAC6EGpCTyogRESNIMKIGBUwkbIpEgNHBsAIEiNHVkBaGUGElIZ7VXtaD/b88p2Hwnx7dOZVu9hUYahiGV0MeSKLefvhe3WNmy98PUl54d3vuZH3ivvvvq0dWFM5qGy3wyap8m7246dPfM/upX//f8Jzr3QVEEiovbI6KiRQrvSBAxDWVS27JXcSTiZQnj+I1XBauJVHWj/0C5sNm731w/bSy9v/7ldpc5ucUJ7GwicaFUo5Ge+FEL7jHYd/4b+OzQoH3hgNuo3OtPDCVnWP7vvI90EcowFGUBEy3nzjv/7SgTtOzv34L5cTgVPfNv3zX7df+G/tcmxTC0BSBFGIEEAxkJgrT49+8wfz97+99Y4fr0Cx58ZcN3LUmdkblvH427IT98P2G35cSiRmJnRaO3j6o/Glp3mCGQk4p3mqVIms6t1dZcB7D6QbvRx5Ci2GHrnTJ5bf9s/qzmknScKVjWX9/MfMuExt7hEFRSVqWpRgOe72ey0syhrrvl1arla6u+Or28VuT9XV4A1dqffe/1dW33j6/O7l+Wx+MBodWJ7rDyf9teewf6G7stye9DdWp0IL1dZrc23l0GhFdeSotdm/YLot2diLkYEElYIZpwtJK0IUz2AQLaIFyBQJEBhkAO+CzBoLCpXSPjDOgOpCDABILDKbpJNCq1QIURuMzMFHrU2eJLOn4Md+/PuK2tyI6X3/8H9KjWw8+2LaaR5678npN875cjp3fO7C2sVs+k6dtMOBU0e/Z+nCL//HZe+EdezuW37k4Z3M72wP2rlpgPRffqW3fjUNo51XrilxqpmFOmKN3tf53D5QvJktdlfuDYMn2uNRKmZx3zG/t1WPS9Np5gu5G+81VXTjUpQPPghFDp6rItE6eiXse2hgo3QZhdK5yKhjmmJbeOo362tXRinOLywoX9tx31Bc+sB71lP9xqRMfTE/L4s3d91uPbpxIWk187m008787trqZx8fTSbm0Yfnm/uWDu/fY7N1czKtgFAySxEEEH1kYxR7mlYVKVmc61DgCJCkKrHQaaXTveGHv/chFyDJ7cMP3rl78Vp09cqRA4WWQHp9rb+0vBgZGo0GEXgfAUmQm3MdTRCCVGXgyHmuhdnXRd4yS0uLw8H2xura8dMLimC+rft1mO9kzz2zxRqbqXrw+Ik//4svNfJOISIESqmimoIIA7oIc51e2NvVEHxdJkTTqqoRmo00sBNR+xbnBuOCMKt8JcPCRwYQsmq1v3Oo12sYwG8qfRKUZm7a3Wx7MBJSZVEenFsohi54zlV2qE3GJuOcJ7VebDeiw91hf//KoXwy7e+UWUMzqBBlr3IHDi65QbG3MZns+t1pXNq378Unnl3IU9vIJolqHpw/2NGre3uVyCg4ceMjvZ44fuPquuVkrtmmybQMbjQZeMyyvFGWrlYyQRkEP40EdbQw3r/SFsjjpLoZeVhmnSxRdQCEQZBoqfBeOwaJIGB92G/1Urc16Hv24cLF1b80KrJKW2s1SNrJJ5PKB1/XQbG0OqnuZsM6LCx1zNY1u3Ytw+7GFI5+8Ife867vvb655wftKwtH3v+r/1rtO1a+cn7nf/s7unZkEUVhDUZRrnHC4fhP/MDWvK6T7NDf+1H/8gs8Bp1Q6QMqrYmYAxBGQK6jVtpXIbMmOtGKQEHpGRKcirMisYpGp440zbWTVntwZa2V1uTjLOMqiwfLbkNdfFb6faUpikIiMhJD5QV8ZBQwiWUQEgQvjbX16vL1eZsSO45VZg0zUGKioLYUBQOAKCQUNMAgQdhz3VxsAzWta+0q7rV9ebHfCB4DEDIpHRWppvHsMTNIYhG5BmYgrT1YjpBdKf3lK95f1JKKNkUsDLHO6MBedfmn/r5535vn3v7tPjbM3tTfPKfLNYVgMh0qQKVjCIysYyBB53m40G783P+q77uv+8Lzo1/5t7RHua1rqkREeeFvxpWBRRMgM+gOpkcT2msttifTwjV7yb6e9iN2JYoKHpUgAYEkAAhRboWJUG4tLjXAjMZZMQSVPHam+uyr9zVyqEtQIFaJIFkNhnjq8t6CT5pSTsFmdGoeElYsQ6WP/PN/7h+8y+x+P/zBx7d/5xMLUiMBaQMslqn80tN3vunR48dvIy5fvHFzk7n0stxqQASbJEBgIY6rSpiNtVHY155EOaYQsWVMp50hRUXGB9GNticiEE1JGQrS2hgdqyrE6EPMkvTQvgPdztwLr7+8t75xq3UgovH/140HAYbZvgwUoTaKCIhAAEXEi3iOcRbfiMIIKAKkiBAQcNakJlQaFQriDHwRREAQZzeMQIikIqNmQRaDKIizI5QIq5nGYdZQE2aRGGJkVoRGqzoEYQkMAKAUCciMqKQ0KaBZLKmowtXr/RvrXznSbZw6cejEnfc0F1eGVy/3dzd47eZTZ7/0wPt/4J7v/sjap3by3em5//Hlu3/unere+3h3vbx0zT5yd/Kd36/biXvy85d/93PLLtpA7VKd/8U/ogh2EqcePGnYMna3KAN4IiSjXGwYzRimmSnNnBVJrLUK2XuIZA0EghBDjdRshLSb4FCCQASyKssG9fTptfkMzv7mLy/8w5/JbtPFZ3+/4QOSLYso1iqJukK0ZlKCQrXdbZy8//aVs+eu//L/Miz3kt6BHoeDR8xYNeODf80d/QDFHINHjsCcZLl33jH2+8VA76vcblvIlZIZjCQjE0VRP19YNe7azWE7kbtOtiYXr+0B/u+vf+brk9D3SpxMK5gMHAAg6VhBACAAItZEMdzSUadKaWIECYFrBxIkBBRhRRCVoMREa2KJjCwSRIRpFhEDwMBAeGtiqZQKLIoECbVCFsi18SyzzmMVJIqwxJldjxBEpJWZ6H2aWIkRAjhBhcgItYiPogBzhNIHYWLECBBYNECMgiikZqPP2UMAIXjByHVItPLMFUdnAK0F0HUd0+XloU5FJZikKs1J6aUjx/bWrmy/+Px4Y8f0eqqTzN9+3xSoZk+Jdoh5p10ZqlCVY2+1xFht8GiSb7bvuJsWDwLERGlfu2Rviturg6vP8t52RnKo1xUr5bDeas7PP/Lw86XxL16KazcXlg4cOn5XLJSM1hYOdsLF7ZNLhws/vnz2RuP4icMHF/3Bo1uDGlbPXzzzWvbCmSzE8bhupmZwaXsRTb5XTb0bTNN8Lrn77//d7nd9+MzrF++aP3yKsqd+/kdHN4atREYUvvvXfunisdNCyd33N9JR0GcvVdcvaye5psl4dKq7rxh3cXc43yrWP/eJ27TEuXyy6Za67VECh9/2oTit119/fnxzg/u1JWlIAwT1GJWoWkJQEtGhJiRQMQIp5qgIECHOHIkEEbwAsgQlqJC0TvfdWHc3hiHW4oKV6G6M62y7u++2/bGY/o/f1NOtxPC0hHShlwLXWxfVeBt83X3gIfVXPxBaRTU9PWiZ4s/+t2R3TzdsFB1qjxLE14GiamgvzJG6i13k4fbXnx/uXjzYmm/c/tBoc2nnyhthBGGoq13f7dULDxy+fH53vsBmN/caE53GCvrDMu0krC1DMt2bJBIDZr7Ve6OGC1tTpQI5V8agkSD4qqjSdnN7ewQCMeLuYNzb88K2reX81u5CZ3l5X+fatdXhhAObyod66o0lk6pyGlBobqFVihw5cuCN7Y0XN2/OZ7oDuYuu086H003dTIbj0iiZ+KIaFUVwoaiizYfepEU6Lc2kjhX7qgr9cpI3lrY3NgIiMjRbCUABANYkwAKAHgABgUNkIBaMjHE2J2IQiFFkxnUkII2EQqARFAIQkCIKwmSUQETCGCWKRJ65qwRQsSCCJkVRmJSaxQYSSiwkUViblGNUGkACcyQkZkalQABAIkjFkCqDTAiRGLwTCBIjsxBIBIrMEQlnH5oQAwlJCMxQuxBRxEMMIc+z3aqM/cGh5YM2BNXIqNsoGmnnLe9oFNXyQleNz22vP5udvgdOJfe844S++OobX3y26OONcVS7O6Rlob1QRNEnFu595yN7Fy5cff71anukkbS2xgoU/vof/J69tj7/5veY/YfTVitYsHk2LaLidFy6YPKk2XHA7sxr6//9C+0aXCREBIVCTJETvPUtVgoUIbLYkRt8+oudk280fDW+uF1uTpONSU3gDKimLiNANCAMKBKRCShlD8LIlQHU0EqwiPLQO2+/WU1f+n/+ZbPVCcO9lXvuWrjjruHWdnJw/7guUZHNm0X0ZVm2Op269shhOhqO1zetRd3O+udeca+em17eiVUIGtZeuYRLE05Srge7G5vl6lZbtTTAyNQyZ6isTIzxytrexz4q+2Fy7mIVkiKYePfp/MA999z1kNm+uNffTdrNuaNLzeP3msN3csCkrtJGmqjVRBtCciECitagGWogHzkysiCzRBYEEGDSJEbhLAKtVJjFWqsJsIYQUKEyBi1RkgphBBGJgg4VBkHvPemsKGr1TUoLM8cYgg8+xBAjsiuqKtVAzmurNDBpUoYSozJhIa5ikCgsQAFc5UNOijQwe+QKoAJhIi/sWVgrz4BMAlKLBBI2iglIWRImpUGAKGJwCoAQp841TJot5qqVxDQfNlK/kMqxOX7W5w3SpdO5Hsxnhz78AXXyrt78/IE3LZ359KfGV870Vj6At92289rjS+0SbWoxI6MFvckMnbjtwV/5t9d+4kem52+mCTFoNw1JQwUtupGRi4qxLGqcONyrGuE8Dj6KqzfDWrn0zoN1t2ETTLrLzaW3tfbZyz//y3iz7ybBuwBl1BiK7nz77/5M/gN/s8p6KgTB2G73Em2im1ScTUFxaba/+rGT7/45n1rYG179+L9RqMv9b2kQqDZDfnzhb//ClEL56Y8m1YQ0YNYAHwEjxAqIoSza556uB29oOpGcetNOmCwuLZ5/8vOHTx9Pkvms29LzDXd5rE1SMYPRXkQxw+5E4XUlyhcTY9OIAa2JHEgxokFibRjBAVLd2pe874fV3R+ukyVUmkKN1oZSq+ENwUiklEAMgUXylsF24DhRWUUCN3el9/YP9r7tB3am6crDf330zO9de/LPlnq9jZd/+1DafZ3HfW746M6df32O4cvnducnWwvjayt+AK/vDG7EudSH/qhA2T3/em/fAZBYD0bgAUFmxbEgtwr+CpAQNIpRYkiTSKYoMdYxgCAgZlozMyL6GAApMUQgXiQChAg+MiCJoDAaq7UiqzWgCICxUgHSN09EZtRfXDjePn2cu61cY/f44f75NxpY27aeXKvLqVnqHKKbN3opbF5cdVvFkXtua2wM6q3NenB19ZP/qfGWNx+Z68jmZPOTX1wYMw02ubc09853Ty6/quNYBooDATVc4wA8eP/RR96+vnp95d33UMuFs1dGW6u6qpNGYzIdRK95OhTwBkl8NKCr0ZhypTpWILKgUQa0jbX3cc9FcEbbzLi6NLmYnRvjL/z5/o98/4TmoOUgV3vbl0GGrpUclHYjqd3exuCZV5qdVtpoQKPVL6oiDjHJj51eaR4//NgXn7ztvkMHvu/tf/zJZ2M7m1SFYHRCzEoRjad1nlgWmVauk1mJoBIzGBdpoglZAdrErN/cSZrt8YCe/Oobp450bMvv1UXBug6wsNCtERFVUYQkNTFIVdfGakI12NnrNjp52vAxRgQfYuBw4+LlUPl2vnDyRKOx1OQ4vXHmyvnX3vji11/ZG5WoxOI0jPv7l/ftDhk15M0kEV+U9WKnPRgWANQvx2mSQaxQkY8OFSqbkFVBFCD5umDkvapI08RFnoyKXq+lTNjXbY/2xtMKOjZ13hXslxYXjYLrG7uUqDxL20nj2P79GOtWo+mHY4mQaVuWTqk4kpJs1moseNGD3UEsHVk1KaZpK1vfGHT3dVvLvWNzj4yKamf35rG5Aw+dOHj95mYINB75azd2DxxcUCO3uTOqJnx0ceHhB+9+/uxZYzKtVH84rXxtgl9Y7O1t1q6og6vmO1nlyva+TBtg5Zf3N7ybcqxIp51GHmNsZIlOJAuq084npdfB9XKzONeeluz2XHRVz3hgnrAMh/VfGhXVnqc70xBE6anSqt2xAAE0HjrW3vJ06fr11Wuvl/2rO9/4yvbq3lt+9Mdq2n7+f/zE6bmFj/7Hx77vv3z60r63NrQ4XJsOJzoCAlBkEkMxFF6ctWGnXp5b6XOVHzqxwVbHyBK1IudDBBSFYrQgaRKpY8NqYCYApAgEjiEqBFIM4uvgxae3PzI8dGqK1XRznE+mCTDXlWnmuzKS+x8dVFsw2GtYwMjeRQpCpOoYUCsUBERi0FZHF41Ek5DULsQYESKDJiOCAMgcABRpDShEBMYKaM8kqKronSru/lv/6OXperuSsL21YFOIhoPnumZrQKskSTwLiUaDMUFHFVlmx66OASTJk5axo93ApAhsRK8iWDanVifyXz9T/+6nTwQPmTYlqEY7Bu+rECJrYtHIMRhgRRoEjbINteyv9KunH7fzGS0cq8+dZT80JgKpWDOjjq42QARRI1z7/MdWfvKn9jpml6PJs3J9VT75Kd13ik0khdZE55FpFpIHiTArwVMEEOAIygAKIIJSEAm0TimCMORq1t1iEQQCEZvQ3lNPdM8+T8dvd+BHa/3IEBCgjTC/OPRKNffRh77FlHuTP/1quldDZCElFc0Pp+u/8C/ONPmOf/I3P/PVb6xfH7VFlQ67WSIide0TpWJZRw7WahCMFDUqiWS0SSz54FqZrUsXRLQxChEio48pJHWUgKBIQl3XdZgU7nV/tfIXt/rbvrjVSdYIBlARsYiwzJS0hEgghlAjEAEBCkIUiLPAJCCIzFRmIiDCCIQw+5YB4a1iGt5yoNMMFKIBbs1HQRGLRVEKIc4EzOT41q+QW+K0iCCKwUNEAK1IKVSgYhQSCALyzSUgAkQRrTFLjA+SgXaRMcbVzb21/vCJV9840W3ffnRl/50riXcHQ7n7xvN/uDa6bSlvuH3Fhevm3/xcL/cPrHSfeGln7aY51VoZNSy+68ObX7+8cnM16Y+t1drDdOKEVZqazKAb7CZdhFTVERUTOVCBnYXGwrzzRlVO0sQjeO+UGNAhsDOpsTy59mf/+lBH1cOqYCPMHIMSK2Uk8W+Kfu0X/9WggnkNKoERO318vxxd8Gurze3dDBljqVty4F1vufnUM70bo149mDNS7l5udrWZn98aONU9XReA5Z5N0yS1EGLwzvlSyHQzPxhWLUATQWGYlrXJgVvm2H3vWd730JnPfXycFLZLZ5lv7vFrj31jYu0oKmEItY+AQCAEiBgcC4JWOOubkKbUkgZUIuLFeeaAzjMKBRYU8JEhgFZoJCoAQgoACCDMtyKDAhwlRmYQozRHFkCJYDVmgEjAAAohRjBaAUKEEASExRAkyjgRjpxqrQRAKQYRJuGoAJAQhKNznGirdBTkKCy3lGlRoiKKgEgAwAAAJCa1LExGc4CoW3O3f3DUTcbDvf4br3XSMHfwpD18cK+/Nrx+o93sdReWaX1Cl6/rjdXu2KndEXRaTi+6djtZng8CtfNUKecw1xoQSFgLEUEoi2deenZqsZVmh5Wxo0oVoavwtmNLeykX/QlUtWK0i0vv/Zlf2DYu2Rvafv/QbbcVnR5OQ1VeB9fvn12HjWpvtT9cbLSPn9jfPHr+sa/sbH18ujcM/Umz0dy3NJcspRMT+/2y3nP7982nuhUAsWcPvOnUuNylJ35fPv253b792s4wZS0ME4fUkOnXPp6fOfjqUy8fP3VbfO3Sxte/cbTXYRYz12rMufrVz1392iePZWkcj/NB2G23zP7b4V0n+UDWO0jPTndfeupL7330bfOb1ejxl60AS1REECBEBoWsBEhc9AoJSAQlkgCyMCtAJB0BhBkJ0ergvQokUZOb5siiRKGIiGGg0cgNX1QQ5xSKCSUlq40WmrkjrWB2x6raJQjF1Vfpid+Nc3gzO3rqA9+286Xfn58MDFQhllFYggYQslqRQqC2letf+WIrvPHwXQ+Z22T49c/ffOFzjWNHDx4/vv7ViyklnfnWcHOUu+bSQw9d+fPnD7VNNfFBpJv3uscPFOUgTl1qDXOsBeosf2LDP1Zl3VOnwvoV573J7WTi6kkRp5UZjwml0Wy2KFlsdUf9vem4uIB10uxhhGKveu4rLyuhdk4iwWYaAFKBRrO9ON/tj7Yl8I3VTUKhDFs9mxUuA9rZ66u0tNm8afUGO+uurkfTqmVTlcnaqNrauH7ywSOJOTmebkVwJgGbUBHFawOE1mJvPgfYAwACBSAAoEmHyIikiWbPiwCwsITILBKFkBSSMUoDacSZfhuBAUCYFRKgBBBBiMxBWASQb31dyCSotFZGc2BGx4hCRFYBkaAPHmLQSILIAlGQRTShJgRhTagJVGCFAIGRET2B1xKiAHjnIjJmHpTy0SfNFBEFFRqIpZtx8iBGiV5rpTEe3L/v9kOHDzU7Xa1H0wlZaCRKJW03Hiw2lt7YrA+faE5Ue3s4yu+5p3UgWxi6a89fw/50Lk92+uKadu5t95en7rEHjqwc2n/ldz5mK051DJFUqW0Ioz/9i70v/EVYSNOV4y7vHHj4gSrd195/RLUywSQ3sPPk42/833/ULtFHooho0HEQLYqABPSsIRJACSiEnMC/uFY/s2YIOhpKB6hADLChWoIx6ESUACEopWLkQKIUKEVaGJEUqPmm2Xz9JgmerGp/o8/CRf/J7a89GTtZevRwlbSbBw/jqXsq5YVE1TS99MbuhdeqjS2cxv1LPY28/srV0Y1pKKjZy9zI3/jiN6CJ+sCcbehD+zvf9p1/5dUvPzftTA6fvu3ihfNc1x2tWqiGT52XJdVs2H4RCy+tQTxwR7py4radmyc2nx/P7T9w7P47Bqo9HBYqyRsEG/0t72oAQIWtxJKSUIXSOxGOMlPxoQjTrRc4oiYxZKy2SIG9FvLTiYosNcUoxpLSmdVRJPoYIgIqXSMKi4veu5qipCpNrPnm2iAKQIgusJ+to+q6mkLd1GJVkiRJmtmoiYwqYiyjA0TPUQXQyrAPlZtBiDUaQ4aUIMWoRQkjiBDHWDkgDSxWKyfAiI6dmYGtgUGchhhqJ14kSMkuNUqlvHRi32BU2qk9euJ06HyV+r72SEudQ9/1/uUf+KHCm9FoZ9w7mB2+bXr25QPHFuorN69+9ckcDnXu3MfTykthGk1ptqsYvfDi+9+9euF30YPWkhgShLIKxlCqAEO0VpPVxqrJ1ljJDZvCwlK72l1PuJhKsj2OPmn2Tr318E/9xNb/+Sv5tAIEJVpE8aHbmt/6t3cromqEvu7Ot+J4Mr38msQddfhdkvQaraMsqvrqZ+HUW84985npqy/fcd+3tx/93mkxSjIzjdE1U/WhHy5feNmunweKgE5EUCOIAmXAR1CcDHfLP/x35du/Pf/Of3RjY3r49u+CG58z136DdwfxjdfBRaekrELWTEIMbuCVAtUiiLVppBKAdOpd8L62eYMxSVu5lGNo0LR9HL/zx+Pt73XUpSgKvZBQXcv2ml99JlXoqhqVRQCWaFPlVGEOdPjsoB9h/w/9z3zvu8p0LiN0eEAe+Ba88jRStdCFDk+6Qd9cX0uspERe+CbD4iMPfPkbb3woX7Aba62JSclUpS2H43rnBlEJIRlfulkMC4gRkRDFImpAUsQipDA6FiJPIIwJESBapQiUm53eaSZaVojIISIRcpTAmkgIGAi1AtTWWBTIskRpqsoSNSpA801i13Of+cy73/ttdPux4Y6j+Tbki9QeMEa16Lu3z+2nxuDM2clzXxitrUdvm/P7ob1vMJjEOriqyM6e74US0oYfyv4+tRIcYnald3zvwXdtnn319t3dzM7z/LxiV954JWyEzsZ66+qlTfDLi/vabTvdHkSMQCbNUz8ZYV0zoU6MWD3zF2HTbpTXwYARyMZ1psHkVHuvrIYY2BMGcsHlhv1TXxquX+q854cLSvxOnEhn/763NFw0L3x5+szjreevtjtLdaOhF2ijlfpGK472zLRqWNOkfH7h0Ojq5pUnt+/L9+87vNg6dOjVc2dDUfVaOYVY1zGG2qamEdVcllCM1aTEqCGYwwfb/d1dI3Zzs/zgw0duXl1tNlt+zBvr219/6mtz+w7ffvQIVwWSNBvNaeEAiGmmmiZAaHTy3e3NuX0HtVH9vdHVy9fm0/jiU1/3rDPTbMzlr1y6fKDVzC2vrV/p793MW7i3M9Kp3tgaCLMm3wDItegAI1egmU+MinVQjp0IAjofa+dZ2EZWYiMjSNjdLQJQy5pQ+4YyC/uXpvW0nrphxbFmjaQTMknCYqeurqdFHbwxanN7mC4a0oI6NQ2Kw1EUub62OZmOJxALHYHdgf0L5y5e6+Upcx0DI1FVh6Xl3s3d3YX5k/v2L37mTz9pbbzZH58+fuDhh257/vWbVR3PXr4JFPYvNKIvE5g7Nd87d/7SxtrA2LydxlBNp0VtUt2wNua6Dr7RSI4c6tZVGdjrxGLCC3O91ctbGXWa7azZNjLol+NxluqF+a6rhYMuA0wn8ea4qmpeCLJA1k04I2Sr2Km/NCqqKmGh6bRmAa1IIVstNgeKYjzsrq1Ph6M3v/2DF9+4vFvgO77jI3dUz/7Oj//XuUPw0MHO8XsfHS027Pobq5//zdYEAFATGqu1TcTVxqokwUu//m/6Zz8OplcNqgOYB11L9AQASkUXlFbBeyXKoBaIoiGQYiYLICGkxjpB9kwqGg31ZFqN9th2xSTQXIOir6hWDSt1XZ19ofXwm+097/EbE9m5LN4ZTBgU2oQ4IBkTIURHFmvFkCexKiAwG8WaQSuxBAIRBJUGpQi10tookioCawDtq9q0UyVOT3ev/tG/PPzom4427uWldjGaGgeJImssawXAEIJWllyoJ5IcObjVO2gfOMoT5KmTqzvDa1er6a5qZ7q9AKM9YudHIw0JMyI3U1QQKgASo9iTEiBllQRfF6iRCCCweI9iujWEJ59SVdx77RvL3/fX4f3fY/78sav/z68fHG6Cr4AJdIoKOXoQYYiN4Ub/U38491M/usMtoWF64Q3/hcfbMYXoAJEioVJoo3iPzGAFVJQoCAgCQBoiAEcwBMJgEDgCAFiEGEEBWZWKxCooAYjQnhZb//k/t3/qn6qTy3uXbuJEkTDWxFNMCGOMvHKo909+qlpdrT97Nhc1rdk7b/MGqWig6TsLz339SfZRSBfTeqGVxyiMwAQ1eDJaEIxWSmdEJgWMguxrYqgrqF20RltSihSKMIJRGjUFlsp5hZDmZjSJW6NJ9J4FbSOBUQEA1iojCmGW+AAAZhAForVSGpUCrVEEGJBZfGAfhIFm62VGRAQikpk3DWHGtZ4NnQQYhAhEIQgKIShQAYkRUuJczVKwRAFZoBJgEQRBBJiJl2U24yRgYREUVEREgiAYIQBKZKSZwk0EuI6RbhE1IiMIKPHgXP36ePv1td1eRncdWDyx1Gk2cX7//LXJsAToQPvll6sTi+mLG6Nt15peGI1/57fu+dBbJz7p2Z7t7qEeVsbxWEcjLOJ08MbdeefBm6+vBg+NhKAOweHEgze4GdJDzbagZiZArTUCaTCkMKnRr3/uj3f+y5+dbEFyqHNjK6oIeWrrgYciEiZ1iEkdqURnQKXKH58vfuDDcPp4zvXuRz9b//lXliyEKcjQJbFZjPuVE5PpEGmsQa5yv+D8Wj+/jVlD8LHCiphr5wihHg3rrUsw+n+p+u8o3bKrvBeec66ww5sqp5PP6ZyT1N2KLSEJgQQIDMKAEbZ8beOPaxvjALYx97uXa3xtnOEzTiLJNgiBhAIKCOXuVud4cg516lR66807rDDn98d7mjt8/qkxzqix662x19615rOe5/dM9jz2R2HBmk6uO/fc7w4c/NRr584+9d97ve6Y1eU+enE94T1lfQUUAwtzFCEILMBiNJIGnnrkRRKt04QMoVUUfHQ1OI8hQvAowpEFQYwCQgiMHpiQEEV4SrwSEaojggizRGQiIGRCCSIBCUUkoBIRBEZQSteBiVEBKkSlMUSe1p0DgmPWgLlNxrWLSCQkMWhSCBAjOx+FABgFp1wtEASFFCMT4U3WFoBCYBYSxsgGmGrPl59yDdNYu2v53je3slheOQmDqy2u9822x1tXR8dfk9TYqm4klpozw7KwEetLl32znZKhvIEKUWWr+w5v9M9qZeo6kEksSKhiJMAKVK8v7BJKFTWbnaymEnO0tjkYlugobuy+/O9/bTY1h2d5RsL1M1d2B0Xp2AUAjhVAvjh7bf/cvX//Z5vp/v3J7JmTT/GN4YzS+dzc4srMFvnDH37HY4+9tX+pXz1/5vQffpG0LRabD/zCT45Wmi+duLH+x3/6ruSWxN2wuqVyG9r60s6uVdn1T7+w7+CNx1S+/XtfaRu7b3HfmGrK1LVR3x9uLH3gexfjwd5v/nqzts12e8gEywdv/0s/8cX/8kv62ROP/q2faRz7q+ZiH7tnHHmrUoV62pRurI4SULEIJ5nlGBWwgAgQe68JUSlAICJmFCLUCWAFHDmgTqyyaVUUFlWoa7IKYgDEbGmBUY2Jrx+5Pf6lv9vO0p3f+If7xrU4VJ35+dDa/dXf7Sypu9t5z/9GvrlLoBxykhpSKkYBosjsyspmRmVqgU1zZ/L8t5478l0fHp97Hne3hjeKRq5wJCGRUpUQ8fQ3Tt//03+31+etV443I7gYC60WH7q3e/X86NyO1KVJAOfyP70wOGMW9aHVkVdpNrPXGyqIRFJVkVG7eqKNymzaTvMmm1FZjTZKY9Kl+SYlDUlkPBnbzOzsjTpzWbPZvmVlaefG9b2iHFZAKuagxYW8qQNTovLz62ebebPZSDrz7VNXN/NmHnxphBc6zf6kzlVqoEwUXjr7wr233DXqV2U9qWOVEV3fuKFzpRo6EdjeHb7xFCAQMQsCESkMkZAQIkqEyBI4hmlkczqLCCEZIo1gFBESABPpaSuUoIiIk0BKacboA0z/D0CIUREpImVANIYgMTAFVhqjJgooTiFFQJxmJwCBMCJpMqC0UhoFUJgAOETFAGAcs7BExoiGtBAEQIy112CCVkSRSabfAQJGUzUoF5c6R47eu9xc2tvanOmomYVObVUr08PtvU6jOaiqtX0P/9a/+pd/9aM/Oze3dH7jqpldxfnk4e96/87Vi2sqK/fcld1dP7/QnfSt0dmhI/d+/xOXPvvUpB8MkBqLJpMmmrcrGnh38WRCcOHz36kMiLHztx+YPdC+sb5Tvbi+MlSV0qyjKBwnqv3AnY3bV4qdne0XTmfjOvORWLGXiCoSaAFACYgxilgUEAG0AdAzs1hFWgEHRoGEjCCLZ6ixoTQhxZGvKyl9QAAI4GtQCepCJxLixrg+eUJSmKTPTma/Sp1cgLeHRbEz1CwGiR30cTNUIThItGFPZY8dcsLoSJlkdi9iB25/7tTG9Utnu8Pq4OpCVZRVHVoWXIjO+2QmKyks3XX7odUj6cHDsLzvGuP8Y+95/J63+nIS2tJITHejEl9NBgOQ6CFSohMUi6ARJSH0EKN3wD5GIEYSEmAUpZXJNBqFiJpIYaIweu+MtkBEoiggEAmID64OTpDRpgQmhAgipIzz3pCNf+4whcgswXuJHGMEgMAqRDGZslpNRVIg0kAJSkOpEIICgMjgPRMHJUBKWyAgQkCN5BkBBUztogZtiHyMgIo9R2bUmhCjSIQYQhWjk0jiJBQcHdpcD4tqdzS4sbe5b/5QY+KysSzmuTNFbFDj7kfm3/3DxexBrDlvdJK0zVe3jn/8k6NXLhpjdc1nvvjl2ynLbz22t7fXWl5rtRfKYZlnOj16y+wdtw2ubvHENYG0VjlGhnJY18SYJakIRoOi80kxUWnTtrK9p15s2F9tvvcdM/c94jHuolKPPeEf+szo26fmmiqiqotKZ3kc14zkWNBB5fv1idf1xoX1099ZeByaR9/Er7/W7vZ7X/w35H5i7tHvu/v9P2KaedCIdS+SQqNBNMJcWpXKO9AEeVZbSMFBzRAZ0EDpwEkWzw+/8+lq+c4DT7zfdYe6t12/8hRvlUl0kGbeYYK6Hrjp/QItPCgoUWAAMICgUmRM4tkDC/gECVxjMfuxXx0fvBctggsIQGkWJpX2Eq+/Vl88nscaTVoGshmmxtT1yCoH40yizW5/yL71gzGVaINKM0gyWH10Jf4V/s7vdesNmpvbvd7j5sygP1qaywa9btJc/Nal8xe21+8CfWxSLs5nEGqlrI2+uLHJk7Fqq7zjI4Y00S5giKgJE62UQmEGFJ0aJ2AAQBtAFcmAkJAKiEKkRQyCxiggghhiUESRhJWWaeIxMWlq8tQorbXWhlRidRAogyhzk9Iiw5KHuzNzTTYJQsQsNbcc7Mwd3PnM19LdYdLbbZ2/0WYT3ZptzsSyKra285mFYm7RPHFk7dFbq2++Pvzqi5ltmcSORnulhaW33R9uP3DvD/5k/NgvGiKeXSrIBdtePfqm0aWTyeJK8+5bdiRbf+YbcwvLaV1NdgZAVbPVDKnxDkUgjPZ0AlL5Pdtc/Wv/GA6sIanex3+nfPIbrYRiHdN2DhxlWIPS9ZiTZpIDuFcv1Cd/lXRm9nfuuf2x3qc+mf3BuHnxWsOFybhGa33pJ2sL+//R36hXFhdmZquN3Rd+9/Me5e1/+yObL720/mu/e3StOd+YfORDP/y733zhd//HJ1HYh+B9BEIIgQzs1kVKOkltmunU2oFDZTLQ6uz564vffjIVmV2e7+w7+vlXn93o7k5Gsf2+d5XjMrh61BvnjXyjuzO7uJhlNjK6wo3H1UsnTryttVTH/vWtnZ7rSuJOXDyRpx1CXRflfJLlCV/Zu3TqwpnJaMhohqWbeF+Uk0fuPHb8/IaqYz0JgoDQQCQSVuwT3WCld8ZlmukkscF5lCDBW21IKQqqpYxIyBs5MhvCqBByEyOqpBmAJwjE7CelETHG2MSo3PrxRApnmatQXe2OXOU7uuPDOGmlMeEbhUtK66ps4KqFpblRNaxFVtaWRoMiohw9eudb3/Xoa8+cUBZEqZ1+zec2j62urs7MDIvyRr8YjcVSmOxNVlqLfjy8dO367PxcEaESaeRZ5WBQOVdWJbjFA1mrlYALWmEnyRoL6c7O4NKpHQNZqoMEv7dTJAqyRlMzu8pf2KxGDo2iqmSjMRFKDC22rGFPRvNw4gz+L1JRYmhUBEBUiDbVIeLSbCO30NsZcJoHsAWmaw99/6f+7T/90C/+8mjY+sanvrRaQ30B9u2Dvf/6MzQ7eepjnz88gfnEuGCV4sCRPSAaAtGV3GLQffNKXZ9TMZp2LjTtKo5IhFajQh2ZEBVRzBtbmaY776L5A+rS2fTKRZpMCBXHGD2Q0WlLuxuvanc5UHvOUyub8TvrZq6NVM1LsfPZ/zx3z7uMTgATkAKtVY0mEJAviRU7Pc6Xs7c86BYXxWl+6kv51iWTkAhGEEYmBGQUAm11RIrk0SgiFevKAKFx0QdWpCBZ3t3pfONP5cLLYc+3IqNWDgQQSJMi5QvXmu8U48qsLV9rLi399I9fSaor17oP33WvXBtu/JtfuUsPiX1zRg8HpS8niQajtIoswKQErJFpqolFghdNKECiQojKILPo1ALYclzbpN1dTte+++fj4Tv7Oo3veHcDQ/kv/3mbXQwxcs3MwCEwaMIFE9x//EX3zJ/MzmZXXvnWAZcmqg1Qi7LMQWJUyECem6QC11UJRxf1wmE+ddJM6ulMAloBEQBBHQARFAJHEAYi4QjMBAyBAIzyavnlk/2//hdxNrFXi07aJK18g2iyy4VhXfSv9YwfUM2kVUTCBNk5UNA6sHTnO99h3vOW13/9dxIkYdbGVJ4VQGrNWEQbI0hRxHnXzNpWGR8CEAijUYnEQCbRVgUOgYWABSE1SoEAojU2+NqFoAkUMhmsK0B7UzfFqT0IIIowsExtrkRIgAQIOB3pIktkCPENxMiUTgRwk1B0014EmkhYiAhQUFAQAaeuImJhQkEAERAgBEAkUqg4UhSFIIARQBEkShNHjkICEpkQhYVBkHA68RMKAYO6CdEQIu/jNNoQJd7MO0gkxGmiLQruFPz10ztPn9uZy6XTSQ8cPhIWFvfS5rg72CuACtGQZqnZ+vp3rrz29GAUFsfUT+GJv/69C488fOHpV7ef/k6609eaLwqsLXfgtc0Zq3Lw5cQZBkntoAm9fq/YW88XDkekiCQMEhyA0olazqpP/9//5w/MrGUL/sK4Fxdnxzt1XU2WZ4z3fjKB6YmmNCiyOIG4vDB39yMb1g5BVn70w25nc3T5+kzO28++mo5qbFinaFzXaBW5WGx2OVHrz3/hgXe9feQNJ1JHiiEKs2YPo43NU9+6vDfss1UL86xymmu+em595+SFlg+Zw8V2Pi7jboijWo0ZgRFFYgyMCBqn+p9CZAEQEGLQlGiVaUwVIEgIoapkMo4B0AdmxmlDNqGIgEYhFECMgAACKFNCeogSOAq/IdUwFnVUhEqTkDBRiJEAGbB0AUQJQ0KSkGKOIiIInjmCiCATmdQaoNwYdhEREmtABEFC5CjAyIikFVhBB8iREUHfjEzeTGHGCN6xpqmDkLJEq/KaLUrXu0A2rVITuiWAZuBhKx9XAYxJAMoYe7tD3QpJS3VyiNTPxuN4sucxVTO5XZi3/YKK4I0zRsUQas9YSWqpabEJQHXIm4nzUqAdF0EtLu3uDHb9eA6wSZJuXhNXXepPfOnIgJufyW85kmCzvnGlHBfXfXPt8Y8YWPFXuq99+XeTnc2FmdxXMtoZTKDp33zgi69dwGdOrNVp49JWrIc7A57s9l/9ysvzT7zvzqPvePznn5g9u/5nP/v/uf/Awl43bij76P/1y9++9OrghZf3pcno1VOzAImicVWH+5YGIV6rfOvBH127893Jxd2iBBRi5zXw8Nzz61/Ut/rJUuuQfOEEb27unThTlq6VtbxjRoEYSFGcNnuyoAhSNCw0tZ8pTaQIcXpBhQIiYowQAnCMUQKjkQgRGhY0mwTFOPJhMhgnmqjZiEXF/a3cdUdnX80ufrtEg2Bx2IdqsjQ7Ayiwfl17UK3OqPDa6ohQ1l5pSixTrDjDqKuqDGChu9U99pG/OVy9U63cUYdxc77hygpndGM+FVO3W61Br9jauWjufXDwysszOUUCJ6F7ad16FUqANNnx6oXLxbfGWh/o3P3A0d3r4wN3HU1fy3ZubBejrsTYaqWRoSz9qF8e2NcZbO2oPMsz3ejkadbIW529neu1o+ikneaJSSdVGI3qYlQPJ+O1xbm81bl4/vpsOx9OijS1m+s9SubKEJOAve0h1tXIlY0ka9s8NTCpu0Xpas8xOBi7q/bCqCwjiSbu741SnUxcfd+Dd29cOlMMi5vjAcuU4iUswKKVYlZKCaIHiRJijMwsIKI15cYkSJaUwelJNKOAEhBEBoocUelpbBkgAkxzo9MqHjXtOteEGhEQGJkkxOA0GIjeiARmiZF5+uQaZbSA0tYSKQUKAgAwREAvGASY2UMVYgRiRVSy8iIGBSQKSiqgSGkVYwRgiVGQAGHtljs7+471d0cnTp9bPHRvK3IsPGVZp2mVONNqHVj74MK3v/bqS099/4d+SmYPXNvdjplsbO1UEbdqXFg5dMvR2ybdXvQ8KGOFreW3PpF0++Nvvd6uI6I4Hy0oBYYmIROwxs5KjLViFYsXLnWfZhLIEDABAV9n4hJtHrjPvufdm81cDC29fW/0nWd2v/nUTIiWFCDWProIyhCRcOQIwCAiCm5G+NBrCIG1mdKWGSiimvbXCRN6QRaIVgGzoNjc1CGAiugkY5UHIc845tjbc3G3ipBqooioMTFU17XSgIJkqWTXaCSCHAWihtW7HnAPPgwmHZT42jOfq3b7yuTjC+dsgtxUY3DNBdPZv0pHV/OVQwfve3Ni27rZqRCY3fzqrYPtPez1Yj2cbA/667tpu9GZaVWxRGOMJCARQ7DaEjEbdhQrBBRgFgQkRYaIDBmltNJTBAsja6FY+RpKq1WirUWMMURkjlMmdgURkDiyWECMMdUaYgz+JtbaB0+itabKR0YQwogECAnp1BjSOoZYxxiMTEs5lWBkAAHnY82+BkmAW0krBgeIRmnWgl44ig9So1S1V0rX0WnSRLZ2ATQECZErwsAQfeTgow8Qg4zHNWi6sT3Wp87nczMtWOD1Gy5MYsqUtw9/z7tlcaUohOpgtUGV6kbTlzwqRpGjiFNFvfPqV1cWm2puVbXnvCtnNOLl86deePXwR36mpbKX//Nv1lfOd0ZVKly0mrN/7S8MxsXk0vVOEYbru3uu7OzbNx5Vy2m+2knVs8/3T34rffxNcOzB8vb3ZHfdPnn0CXt5t39lq6mqdotG55/f+/1fzN7+9uTeD0CS8u7FFlnaf/Rob9eef7366h/y2VOJKmcmF6tnPnXosQ/vugmA2euO9zcTFUyjmujNizv/9VfV+ct5w3iw/Oh30yNvGVw+ZY4/ry6fS1wFVsAH4Gp5cLn+xM/H535VB8WXXsmWmtAyMmm60nMV2YsxyuRKA4gEtMb7WI9qsqSIhaMFMiAhulhTPaiLZru1cKcPkSQmhEQqTFh5TtxW8fLvZewBqRKMhiJEzSFpECiEGavunQ+N5e7INSX6ulKqMoR+MoagtZkfbBxXS2Z1f3ZiMBqHKi10qPjK1a2ZrL3/0G1bF84s5wkMyvGwaBCZNtmWZkOjcdkd+6zZrvpDQjRaawQQBgBEZJ52wqBWxKiDYBQlAhIhAHFAoyyCJ0Ifo4AOEpEwSVQUbLRbmGdWk9E6S7QgKZ1AiErrOoRQeMGbFQf3PfB4S7dHZ0/Vx46ybjmBsi79q9fLl8+VZzZIWwqxDFhHl8HQO9debqH0u4Od/N73fSedT+9/eGFUNdf7OCnABa3c9c/+xsJ4vffM+VndEIn1jetGj7TjyfFhhJjvW3Szy+nBO/HooW7Zh3PnzevruXeYAsYUHEsduFI1M7Wb7cz0nn8y3dwf66AHXcxSEWmktq5qlQBzFJC8nTMqCR7EkiPLtb/WlUaZicbNbS5rMzdf10HqOlvKC13o9kw/m7tSNpoL7ZUHH9r87H/Y1y5yThtYZejq4fC3fv1Xv+cv/9wff+pL5bifGJVmRoiYOQrEKNZIp6k5uplmcq27Z8gAwoWNLuLp++/Zd/bE5qeffe7yZhdZV53szPrk0MG5mrmoJ1kIECMNRqlPbJK6qkKUuVbrk5/42G133HH2ev/63taF468MdkYKh3//n/yfP/WjP6VN/fk/+MrpcydRVKNtnauyhjEmz4mGAwcxtjIzjDFwSLJk4kptTI66qEMdPShwIVhrogiQKmoXULTSKSUN1aplUtTVZm83TVseuJGqfbOzkWm7Kq4PCnF+KdfH9q2evrA+USIjp0jPd5qjyWRYlOs3dleWVg8fPvrsmZNzBxYxVFlVv/3Nj37+a09Kg2aMsZ3OYLeXV74eju84estd99726d/7o92d4cGVhQrqOMGiDGdOb9xxrHPowJLOe/fctnLxws787Fy7kRYTT82stuxq8RBW52d6w+qWpUUNdZ5hM6NWarrd0jEurzVOnzub2dn+AJPcLK/o9d2dfbPzDatKr5JERBg4Nhis4GorzVBapJe0pRhqz8xBKZPn/6tUtDOcaDDGYmLNaOQndZUbOnrPkX53N2lktjm8fO3CO5p3Bzd84uHHXlq//vrTF456W4P0L4/dr3/CAjzczqlSEqJKgDSqiFEiCLoKCZRFQceZMSbTPgaVp1LGxKSB2ddeCVmjueba6vSxR2be/Ta8816am08vXhj+5m+q11/VgVkIFXoWREmQ1c4OYV+xZiCllB9WYJDSdL+j+ulnUDW1Npy3go/oBkxMDOAktJcWP/Izw4cOBquozlutmeqPfycxIhEBCDWjq6UKwgooj2nOizq2ErdXJMMJjyqIEpm59FZYWUgmMRzfsmnKxIzIomIMmgMxCEFRDaEjatEVZ589/4fDAx/+seV7j47HW3b/zMM/95H+v/5neT8M97pZs+mhxikzPkZmDsEZ0gxoU42iY8kIKkyJuYqiBEC5Cf0LFd2+0njbrX0eBecpVbCQth69v2poLJQ1FAMjTRtnBRitExvQ/9l3dBJvyRPwBUQPIGCNCkBAIHEsofkD31vv39+7fGXlx38UZveps8/xf/8EnboKwwE4gRgBAYyCGCHEKV4FBMFH1ASkhQUTDSBQUTMo1/OWVHCVSckM3M5v/vvZv/XDftnKxkb9jRfy41dM4KKuORU7G5uPHH7x/Hpncf+n/+BbehI1sNLapIYFkMGmBFWYdgD7IJlpuhrIch08KGg006ryhQ8C2LT5uIiIMcsNCwVkJ6CV1UkSCCW6qhy2Uptqg5iU8SbKdJqhAwSWabgMEElNVZZpoTgAM7jAdZhiAkRQEG+GFRBBKUKYXkECC4HIFAZOxCKEwMKIQKQQpw3AxDEGRIPAIjDFGAkIwLQKTStQhBEIAT0AESEAT93n0yyDQmAUAUEAUFMQtnch3pwGQBhwCrsWnGpbBBQh1hF2hrE3dtc3X2u0svvufujRhx7xVffGta2t69dCNWlZ3dtxBlK0esc5fm1r0V9e23/fm3/+e/3Jza31rXc8PLtay/HP/2pd+EMP3+Hm3cnzF+aXGksri4Oz1+trVzrLR4iELIhDFAWoeudPfPyf/NS9ZvbEDX8khdvf8cCNi7ubjUayun8Xqb3UVKD8bm985cbM8gKMC90rt1+5eOkf/sPmHSuik+GEu6+s9wdSSgAZNIGT3FQgVpNiNoBJQ40lXr92JfvGZ5J9B0WZKnpXldWof/3qlWvnz4xGo21INlXcq0O/3sWy3243kSx2coP2ws7O+rgq/NQqQBwjgwQGeuOu0jTcLgII1mCWa4OiOJLAuIjekwuqjsjCIQpOWzkEImCMiACkEEGm6UQCsGa6GREQYEERICSFMFWNomcgEWFDoAQ868AoIsiCiEQARNMQCQMSIBMJYclsACIzIyCIQgTAqegjAJ5F0XRJgKKp1htJBPH/dRVNqSusFZHygWMQjSiiEsF6MMaxAk8eZJwntDA3ciTFKKlHAJQttlgRi+9fv5YqQDDakpY0rofKJCsRVwmHRqtMs5ioakBigsDBka5V4+JW+egHfwgPL+xRCLmdT/KF4d6Nb3/VX7g86Q9ddHYlb8wdSVcO2SO3t+96sCrLyXNfri6f+uD3//jJF7927snfh1NXl4KWWLHRcwfnF/cv7y3c8ujP/o3Trz6784UvxbMn9bBenjd1Lob1uf/0afzC17N2upXm58/vven2/f2tnapXI8DOK88//L63xr/8oTz6V//eLy48f4kGfbXWWfm+785GceGrXzv3+f988Uu/lpdqrdGpS4qRMXK6tbfxPz4zb2DkpeQNBOkYm+gEPRmFmgBZDHJwrLVSMQCD1IEBkYElCCgBDCyGQJMG9kgSJYBjCd6AEqLonMQ6a9px4TlT48w07r9nYWZ146mvNXvXOs22HZ7c+ec/sYS02OkMML/UWV26777sxNl44YTuebQAiwvQ7OCwdOUuC6hMoQKVovfBr63x/kPV7mauCzXeC+efSlSvry6aWSx1Satx6ZaD3XO7wZHh0LL1jee+cvsP/836/vt7L7+iTe66TDtbKvBgLw5zdcLh6chLh48NWJ17/qLV+W7SWb31wTqejUGr4cC7KICqYUoD2VxaD9zOrkfRXqDRaYnJn/z6y3EcW828MihC1qrdyZi02b/v0EZ3NCftaPTI823HHlnfXA8hMjgvdGO3DLEIMdrEOKQdGWbz+6IbJEqhxmYDl5vt8WSg28nK/rntzV3lMEvylJq7uxsqEWMsgAMAXztDigGFp5liIJweZpEinBa1BAbkqDQZwukorohAJHAgAmbPAFGISbwgamVUxiGQYODIIgIkoG+2HoAgYqITIvBRCWOITEgwTToL+ak6YazSKjVWKaMRNQDG4ANTDNFzDAzMkTGyYgSOCpghIkchYmUARYJnFIoCQYCsrhXl2szMHdzauKZHZFSyuXV5YfW+xCblZKQqj4onofaq+b0f/blf++W/u9X7j6se24v7tMru+q5jm6ZtWlkRpXtpU6p6dml+ZqVVFtWeheUf+/4dPwnPn1MBooK6dAb0lNI3CdLQpBBClMiohBRRzYzMdUZh39LRD7x90lke6MBQIFpeWV76gQ/Vh5bPfeYLC4UzgVUtWiEzIjOhoAUHMFGRASACJBIyymcazBIDAQhXpVIArCC1ktiyX9YjJyQqJyaKAr4QjKIVGIwAYBQAAwg7BtbAmqNWpSWhOm2SNrpmrovaWKxD2V6eJeRsIZ3dl0rbbkERaLT6wK362OEkWcDQk52Te/1xETJanVt48M3tO+/lpGmbcwCaTUocneM4CePCm7ThyvqrX/k2i9z74H0a0iRBbVR0opFMmiCRFQWZ8hWUrqoEWYgFNJImLQIoABwEQClFMt0ZQHAOWIE4ZdMIEry/WcknimKMPEp1qkExBgMqchSyb3jriPkmmWsql2WaZlKaBp1DDKQwsHjECrjiOooEoMgojCIqRuVdSLxPM+O9CxIBFSqtNRGLCxyZQ6gQBQUCC6EKAAGFQBGhEAAG0IJaiY/BR0SlSF+9cDU3MNq/OF+VzRkb9wpj07qu5tKWVlo3DQU33Nk68/QzNrPbPa+AEqtnE8ubm4OTL82+74PDYpCVsVUPr37xc1nzYP7YW8c2ffOxO5//3/+G2b1RF8XMh95rf/jvrVkrm+d5HDszq0aqtrLjP/rM9je/2Mln61ii862zO9tP/l7r/jO80mltXFxZnCm9K3qDNEWIIXzrKzi4Nl4vcPmIM3vl5o1qcl2ZsLhXJTvrFkagBPtj9eLzu//lH9m3v5/uvn9OL/LWjqlG1dOfH37jc+bUpVRbB6LvOGze8S7Yd6s9fFe8/5386jd7X/9cY/e60QmWDOSSchtuXA6u1LMN2JqAVo6UXZv3ewXv1QrFTWrbVKikLOpEJ0ql5MEgotUgEdPc5JZ9HSGri6wNDaNrrUUbWxeliKjJKX7xv6nzXyPnxRAKG0CoPaYMaQTU0O1CFuda3eLsc8l9j8c8jXWw2yfg4tf48sXUp2sH33m2vtRuVE3vr1wfxdrSGEr0Mrtr0qKG5nbF5e6kU8axG3dNnE0SEkJfl6N6cXmp6m6Xk6CmMUsWppu9MTzdVAsAMpAOAD7KlJehlfYhEslUsg+ANWoAAaRGnuettmpkRiOxEGEIEGsh0N45RuDIDPyGkaJ16flTM+Puvr9ysG4BloPBM98IL51MTm7RUEBXyYzZOLJ0x1/58f7p0+blC/UrZ6C/l7MfvfDN5Z/+G+H2hfTeOf9bX3Ffe7Je302PzK2BhE/9aXOijCfbsSricL0LWWaaJgy362tn1J+9vj2o5289uHhgttlZqNkG8eXWbpakPBwnykgVJqNxujK3us8Uly8Mn3wuM9aVIw61JqUQBcMOxyS3iSMTIAqHGJUmBs8Mqg7VZKKN0blNZ5t7vTpaM9tqe1eYs1e6v/u7sx/9ST+7mlaTyearraL/1d/6+Fv+4k+09i2t3nfHvvbi+Y9/0tfl//Zj3/1r//V3Y50aYzhwYE4ULc92QlHUZVAsri47adLdGzUbHTOTXN6bjE5c9THs9EZWqyyH7taN4y89c9stPxH8TiOxCiITMvB4UkBR6BjbLXv9ymWAyWDvyrXTZ9c3e7s3dkNko5PzF6/9yec/E8e9K+vnG51GAF/XFQpnNlEMncTWZTk/02JXD0qI2jpXJoaUYJ6Y2vN4XDEEVJRZTJJEWwyhJqClvFnUfrfojYJrNuzM/BIg9YcDI9lWv5jUtc5U05IyGQRVBJpfmWtavrE1XGqnR5YWx+MxG7s5qg8dyjZ6mwN2MKnr0WQ5nVm/dnlSDWw2c3XjajNvtefm69LdeXDtA+99y2987JPVpG7kqUmoqu1wNJnNbd6AfM7uToY3Rv3XrhtD8paHbn/mlVf3JjVqHA3LccEqwWswSvLUu8moDo25bDSpiokvJ7RXh7yJVs3NLa+e721lGBtNMwMNn+QX1ocpdbr97rHl1oPtrI1EdcgNWkUxsjLgWQ9Y0FKiYojhf5GKtFEo2sWAlcsTZWzqQnj97FVEp+pqJsvijYuH3vn2WWhd/pPfb3K92o+LKs9iTRDnIGkDN2ICM6bXG3kfhYRIhbK21moi7z1qjSkzxaAEAH1VAUdBQsTEIjCjALXadOzW8MR3Je96R2g1x6NxXF1pvfc9k0vnzaBvMAIZEaAoyGwUKgRg71j5qJTWJBCGNWlpKMHcRKtQJ9ztS1mzL8jmCpzHzdjc6mbpVhHmvIVRP+sshzhh59gFdAERKE9R55G104vJg3dNji15D/jai/UzL3SUGKDogECF4IE0cozlBFOIxGQMRYhlqY3SQlJUWptyz91737uuvfS8rr/I+5Zab77/5O6FI+1O1V6by8X1a+JIAsAYY00AUZOyKShARvastECKQKCIwAMakpoBiREx1CwchqEczJqmTQDNzsBuXdz51MdbhYOgAjOxRBZIlKCIMAtiQJNnEEqY1JBacDcrthQiTLtR0ibe8yODpRmz8EqZLRST1B56sPVzq/D8a8Vv/Od8MgY7rTQkIAXa3KwIi4gMEBFIMNHAUZxDQE2aBDmSIiW1JKQWn3zBnXuBOpnaxaRS1WZhbEY2gI6wkMO7ngjHBp1H7z7+67+UsYuUeRZUpEhVZWkTlSitglRhSluISgGRSrQGAGSIIYTISumyGkNkQXBe8jSXEHNtlLJRKS8SokekEES8T5QCuekqmpo0YuQogggEAMKESk0bHBEA0MdYB/bT+XxqF0ERBoVAiGpaXSbTNjQhvHmULCJEiCAwPakGmUKOBCJNE0ciwkyKtCAJy01dSphFEwEysyApEUCJItMLTZ0gogj55g+BEAWm4GwQnmpdNP1mmv5SwCzACmkK2I6AUcQNi6e+8+RLr9Jtd9/+wB2PgMk3d9fr4EdjsqQndZUkNDlzqXX16relfteb32S31PFzx5fDre+446HXW1nRst9c33NRXcuSJW1vcX695q99/tM/cPju1r61ajjI82aS5r2LF/7F//Zjt5rGjSSzzdSf252H4WYxevBv//DuTPtaBaOsAx72z7Sr5741unZ1cO7SftOS3RgubA6Pb44LmJTgEygzdBoTYKcRhbP5hs6SItKJQe1Fj0BfGsIffuz3lUUiAWYSttpGlRRFKCP1oqoZt4dlQNIushtzjJvCdRXF+xgxMhgCmvaMEWlDBMiI09kGALSlLFVpQomB6OqyCizkA9ZBXOSIwnHqSBMSERCQ6Q2CKDjNFZppdTYLESKiVexEIgBOK/RYOAISRAEStgkJYPBemAREIyvEGDHKTelwSsWyBCCsQHmOiiAB8AGrCNOQTORgjdZTfUlRjFFuqkkCIoSIbyTzI0eZymSEGgEQYxSlEoUmMSoIsHCpk9cb87f+wEfmD625577W+8oft4wl5RlU8KDQ1pWX4BA4SwlEsD+a73TuabZfG1Yq2qzVZhilqaq8dwFcUTVcmG2lN7bO84yhg6t5NrNokrjZv3q1W3RrCqLnF5PbjtKhu5uQla9958q3PlvtbM8CL2bJi//uV+eMugVVPpM02qmERjEpJjs9Y7vHltNXf/VvSz89qsCsLe+E7f6wdlU9O99aXTSNeuCvbE6cbZlm91K3t7t32z0rUJd7f/C7e3/2+xdm5/Z/8AMPfOijF3p/cEj74Hv9P/6yvrKbT8p7bIJeVQFDpVwdbZakRppazTufMCnCzE19hgwiKMDekZVEIzMAoESGIMCIBrUxDKC0FRESTwgQYwRAY6ZmEGAWQFGI2ihhIr/Tn8T3/fS3GvmdH3znjsq73/7K3OLpdBDBJQZlTQgAu3tQPfzu+R/6SO/KqercxmInndgCVg4Xi0f0zk6So1IaXTUZhbyjHVbN+5aa73rPJF9b//IXF47OpNtx9Prrmy+/trR/Du448szxzc7i/MJ9bxld+1yeB05DyjAa7F5+8nOjGLK5lh9RlqRbW7tKKW5nu0hnKt/D5GiW7l3bntT1zOrStVPlZBzTXO87fPDy6fMSHSKptj5439H5LCl3+fDM/MWzO+RkVuexNxjt9FGlIQhw6O0NOu1crC1K5yR2Ws2GSe3sQkbJaDQcjYcxBA8Mwsaq6AIJ55oaFocjd7R1eO7IwulrpyiHWE2Ort6y0+2NivHoxmBno3vw4MF6Um13NwwmWSNtL7eunh4DAPs6kgZSyEBEgIBEIlPRXgCmXg6c1h1oo5WxoPSU5wIikSMLB+EIoKxWSmmjgYEFAgZCIWatNAAwB8VKTZF2oAgJFToGBogRGCDESIQ6iCAZra2iVGlkQBQUCS4Qx+hCCJGApn8TDGFgZmatQAG5GMWLr73oSGr6uyhg5iguRBiPW66wZK6Pt+5+/N6Tl060NudX55eR1WQ4JA5JZrrrZ5fbje97/1/cu3DyxIkz/VfW10K48sILb/rBD6d3dIbi09kVH4rN8bBFLjdmPPFlqOYee9uzz517/P79u5e3cITl0BkCpUlZKmoXEcFARPJOICIicm77K4sHfvAH3G3HJoN+FRyyadpmJDVC8g88tHr0wPBr3xhsrHeCbzhxXWcrVKQmGKqVTji8MnA832rt7I2X7jqU7lv0wgqVaDXq7rWbxtVM7SYptVz7099+dTjuPfiuRyuFAtK/0S17Ew++qn2SUH9ru6GiEc0TJ8js62DS3SQ/+ODtSZpNyjom7Pd6i0sd1cj7RWmzrJXKuVfPx9c3uTnfXty/eMs9AkpljVsP0Kufv6CoXl5YOfaWN0G6EqnRyNqhjqKU1WCThDRPJoP5pcaNKxvnT50rI6Z5opQaDQfd4aionKs9Wh2FlAAKWKOtRgPBQARFU6y1FmGJkYlAlFIBBIADszBrUIQgmrzzYMhoI4hELJ5jrAMPFTaYrABHYZt0iP684oARSEQICUA0Ssa+rZKUSJNyzCGEwLHw3isUZCISICBiBo5Yly5TFDG6IKXzNUskSo3RQkoic6wkemJFkUSQUoXTECfATces0dZIErHyWgECRh+ijxjo0qkLo93uYmYOt9UtRxf8zmTn9NOdB95Ci4tgTCwrtX5944WzGcJEeQWkCU2s86EbPf1i2Vyztz2my/ELn/qdvXPr7/qVnxzMNEuNamE1fd/940/dUGM59ND9A9PxW5fTqlfhHM/OG5WG0Sj0j88cmBsceWT28Ycmz/7pzqlLvjvOnv9Oc1+Gpdvp+eyxJ6rNy3tnXupY1nWtX3u+98IZe/ttSz/y4ROtO+fe/dd6G+uDb34q23561uhqUGvbyAXhm1/QF75FKwupnYMbezIYpYNSDbvRBVLaZ8rccScs7BOiOlH68H3m8C2dO+/a+2//It/dzZUAEohAI9HtHEKAYZCEzOxMaKSAuqELGE7AIkiAFLLDB2FjOwsMSQbsINYAoexX2coyuiqdxSorpNilRpOZi1FFCjLehXN/XD77J0lVl0EpBcGHNCVjLTRsf6+bvvlRc+yuwXc+rV9+vnzmXGvz/fltj4Qbp1T39ZnL5wPOmMV7Aq1INdD+/EysWoShkgxNqtOq5wapbMPMYsN0u5tHU6UrkfmZ/Q/dN6idyRrzC+3JcKKshYKnW+MoAAyRWSNEZoUCU+ctoYD+87dzZEbEAMgx+igRpWYATcDQJGN1ypGqGIvKaxCNOk0SF2ofIhuMCKRvuoru/dBfev1/fi5cfPnCL/3qoUOHJ2cvNPq7cYyyJ62sY5zvTpx/5/teyWbaB46lF3eyynUandlOsv7qevLU89AJu3/67fypq3PSaKysEAH3epiCffzB0erCJKid16619cGl2blr166kD90zGa+7y/19R1Zk43J15eTYJOUAOkf2N2baVTEIaqKzVjrXTnjGla4uJ76qmnNtpaWu9rTW5KQq6p2jRw/+xj8vB3uX/9Vv7L92UaNVlNTVCFkIUSJiK5+5647Tx0/RaJxn2qbKBRawtx64c/uFE43q3wy2Rugibe4t7E+P/Nw/gFse/8CP/Z3jr56or7z2+IP3nvzy1x5505EHjux79doeCgpL8EGBGUyK3KrCxeXF2d3eyCZmebFdRFfVNUY9HIe0nWS5sQIqxpZVr730zPnLZ1ury29609tjf9jr9Y7eerTdVrWLRb+/ee3q1c2rRTl8+luv7fUndYgRgDQ2NK5kdM+iv1L0Bg1dOrfbn5gkXWw3Ekq29nqs0WkEQy5GEF8EXpjNuQ79UTnXShE5UUCoyarSuUwpCKplMxKGEEx0hNoQGQSOQYBvW5sZjVxR1f3J5ND8vK5cA60KmOiYtWe3J72M/a1rq3kref7U8dmVFWd11xfZbKKtSRs2t3nDZFe7vaWVpb3K5SrLyEjghXzuHY8//P/829+tRUjbqoy+VPVg1EAiiIGjDpChPpbNtIJOU3X16vpoHEIJ6EKD0CupA+wO6vk8SxEh8Ewj7+726iA189JcOpPjBOnK9qWWtWud9mh7YhI73JvQqDjYyI8uzKTECRHX0di0dgEEY0BRqJCbiZqwYJREvaERTb9kifaeESHVxmo9Kiub5P1xwVQf2zfTaunzF17/s9bxGjqXn3kxxcmaNQ3xFMHVMILCQ9MT4qQAqxKdlLEGie3MoiBEHxIZMkjEvGGYY3ScpAqDhMASiZi0ghAh5Hnyvve7t73bzc8licmzhp6d0QCtr3+teOU1FYooAFFpBNDkY2AQ4CiAoMnHqCFmmWLP2G6K83FQm5ZWSKAT4UiEyJXq3eh/8rdn8YN0yyNLy4v9s8ft+iXK0iTPQvTKOYVCwAReiXKDwnQXs8ceoJVWfs9aY3Vx8sdfzJ0QkEfywiiStDOQiCqEqjJQWauYlTKJMimXVdytedAt/cnlZl6dvTS5urF25NCtKweNS9K1B6szL+rKs3c2yRyTZoWxMkzMYqa6ucAkSlydxSyDfp10d5A9kJ0m5bSFtgm7n/ztlYUlGJyU65erl9Ynp8+3B4OMFSgVJRqMoCCGCMygVJgyblWtNAAjs9OpYQZSBDECCCDMKip+/79mh+9MW2OzfChda49msv7i0Xh43/y9++Gn/j7UAkDgBAggAUAFzgEmYBBiERUIg2YDKB7QeZcpmrYrRSdpaoARrzjTUFIBRp8nSUzMuI6ROU3mtk+Mbnn4HtVwV85fYFn0jiHTHiCCMABqHavgYyST+Bgr5iRJgFAYFREHJjC5JQbIGqY/HMcoJArJai3MmhQhUqoalGn0kYR96dPU4E2GIzCLCEaWKDcLF4y+KfDQlGka2UX2DEFuij6aiKf5LhD9xrA9nb2nepDc9BvhVCFSRIIAiAHe6HQADqgAAUWQBBk0IgoSwJRyhEikFEucGlIQCZlveotAmAER5Y0DEEYAwenAAgggQtN4nDCDiCCAsODNFBRiFEAGIUUgvTE/88zx554+nhqdZplKLSq9vH9fpEITFai2diZOzX3m6atmMqrAPf/N1//kay/Nq6THOkS27Pds+8KQj++OUmycffb8pe4v/dN/9n9EV1zoDmfmlm8887nD6Tzb/Klh70Db3XrrwU9f3eoXg3P//n9++O//9YXFg9BauPSJT146cWJ48cZgZzy70Hi5P1RiuJWGUIuRmCImupXrRouWlnIm8AovD4qxUqeLcJFVMaJSsNa2DmI8N1qJtqQUjKJCY9w8lXUIwqbyqnSVi6GOxbhSKEjkA6MAyVT1A0SYJsUQRN3MaBFhtAo7TZ0mZAn7w9LVwmALJ5WPIYAAE4iaKnJTmrmggEznSwCYLpwwjRmKMIEiMIoA2PNUALxp70EgQACNShOyTJcQI2giQyQApUSJrEEMKeFIgAYxhuAEGsaoGAthz8AiSEQap0sCgSWiIQIAHyMQEREhvIGnAKVAiWL20XkCBFCgdAQIESMDWVBZth7hvp/9e8m+VSpdi0yjlcpE9ta3lbEqSyNwXUelTJboKjI7FiYa1nOT4uEQLOgrV85ZZXIDlKqYNgYhNGxEKKtTryTXzqvZBmmz3u0Nr2yupE3RaWg3V9/3fQu3Hzh98sX65CvZ5RNUuzllxyUVVVxuzSpU3ulx4SdYsYmqSTSbD7oVX1rXxajerSczrbKoKkVdG7GVbHI5O5umjjRhWgP7MKnl0CN39Pvb0gudxtx4o3hgdzT6l791YVDdcuxop51sX7ycACWYABnxxE6M0cGHRiOra0dGSfRWEYNEBMkwgBir2AX2QEYLclFXZHNtjIQaBZPEei9T9YGtFiQ3KVOlxE+9mgpZCFgJIilghWCCq7WWVkJDGbzzrW/ybssW3P/6x/P+jlY4Gk1ss8FOKY1Wczj7ZPHx0+bGxuwkjpzM/rVf3uq0G8Otyed+jfvjhdVsUmewOp++Zf/GxsV2lN3P/HEnlIeDokHa2xxBsB0N6ST6kNz75u+73r345Oc/e3TfvjufePfpz/5OnoaF5tLO9a1mmi8cmT/74uZwu/aurvP07GD4Onl36OAj9z9y6sXz1zd3yzKsX96dX2zPLC5tXO4uLLQXlmcGg2p+frZCHlbWkm2tHLxwajOfbx09tO/oLP3hxz9d7AzQ2qqMJlNpkrCgImzkZmdvqABiCAvNGU12/crVZsPaVro3GEaAJLUMKJFLhtT7R+66JdbbZVUtJNny7XcwyaULJ4s4ns9zYdVOW1BHNx4szOR5I7Npvrm5O30KXF0LRVRakRIgUqSUng7hrBURkZIgTETKkDJKW4NaA/NUD2YWH2JEYUJg1mAIVJAYAEQBgTKgEBUIRREIgghBoiKlldKgGCUge2BAQEUSwpSTrZVKtNKECuBmKi16ZpY4Zb0ACbJEQRQUTaCmrHoWjuBrr1StMoMgGgEJmSMRGm03Ny4ED9d6fZ6Px3eu9V7+1tuPPTLXWvEjp4I7d+rs8vz85fWN1duOrB2dP/KeN3/yf/xJ98z1yZWN1lc+8+b6YS4nc7NHxjubqY+t+XluprUbWCUXX79+1zueGC7A3d/zXZf+7NnumauhqlNBrmubIonuV45Im8SMq8rllNxz5/0/9WMbzOV4IjHGAMBBckCrXV2qxOiZhZlHH585vDjYvNyhUB6/rq72it1CVmZW3/3YYHkuT4wMx2slK3AVyqhb5HlKjTSy2q7L1vIMtRrFoA9J2nrLgx3DrtGoAmqL8/sPDyZVlsboPCids1NGYuG0qwKMedhNOnNz+aJkHSEq+8P26mxe+c3uXqOZK9sYbnaNyVceWFVgGFqttUNesFuNu6PhpWdOa7syf/fdnc4Kpvs47ZiZeRalrDI2tdoGDEFAKTXu9/Z2tq9dvzQRF0Vv7m6Oq+Jqd1B6VlpCVSlt8iwBRDDKZDqpyNQSmFEhKGAVtRZrQSlShIElctSKAElJFB9ENCmjNUkQTIgBSBlh1pj74FixiDeCisOU1AgALMCRY4gISgCNwlaiU0KtNQiBEEN0HCvhmiFCREU6klLGc2RhDjVwEoAAqAZywRPUAspAIlEEWBkqgIOERGurBDEqUiQEoIUjAgEDQTSJCpVHQGFgFlIwHISJH1XzeWdp9gabhXm18frzzn787h/+yTAzy3s3bjz5J4uzWbJv/oH3vPnCiYvD1y93L2+QQZX4zc99YvWDfna2dfXaRuX0ploAhxxjqtTdH/nwk1/6psC4zua64za+dHL/XNAHbxPbSQhyKrunXwvZytoP/80bmV19y/vHJ1+hL7zoPvex8uwlL1AdOJZ99B/s37d65f/4a/4rf9LR2hXcUOP44ne218+uffeP2nvunjvUrJZavd0+CCet1DQTYYCiUtsT6k58cc6wRtAQOIjElCIzHjiAb33fpFaNlrJz84E6gC165F0Lqwdu/Nq/KF95bi4FBI5WMWljNCQGq8pv99gl2k+k8BiiACDpetcnkwEECynKGHsuqToH2nc94GK6uXF+JlxuJuM4vLT37B8tvO/DTkEZEXfPJTeeoqe/ZDbHxNoLgwSbKfElp42ifdvMj3+UDzzmUzt79N17v/VP8+tX4rc/rc5/O1bjsLOdrixpZFeXtrWqr91YUq2tYkMJjIOetbnnMOoOF4+tXbhyfSV2luazTKuofFhdqxqLxma+8iefe7UYuaJkUDRN4iNCBIhEAGxQKQAkxTxlgbIW9AhegggF0sCsgAJKJdEBcuCUqHZhMp6IsZM6usAIUYtLUonsAvuomNTNagIAmLTz/T/wE7t/eHVur1e9fDrzmsy+0no7Q4vzi77eGewz9tE7fUvPLy/N9w+Pn3yFggavZq5Nxv/qk5n1C72ezuciKoqBe+NG2tyUENcW+fHHy/4IDxxuXL109ZOf4FZ78sB78E0/vHvydOKKvV//zblm0j60uELN9efPS8tgO62XO9gdAVShncd2YzyfDtozzQHj9XOaKLqQGEMtO3N0X2gbc/Dgm/7FL934Kz+Rco7NecdCWpsAGWhf15sn11e/5y9WdvvqN55qeX/41mWoYOxFq3Z9SZJRjuyysMTNhcU3f3CrdJtjt/TAfQfefMd/+Zm/w6MqqSc//ws/9dF/9B94FC0pi8IKBpMJc4pI1dZAEJsJt1LtxrXWyoHv5NnW7kgDkiY/qQ4d20+gbtzoVQ4+f/mz+xaXFKSnz3zje973eHPBfPuVk3vb1zmMhoPBZFBOxpXNElQ0N9fGMizntLlxyVeFjhEjzDSyGgjrEKTOFKiII4n90chY9AJtZQzASLgmO3JSO681NG0SfQyhbtn56KNNEgj1xJWdVtt5l0Ksa1e5oBBI59vDShEmeba5M0qCMIU7Dyw1m61nTp03iTqyuHJwed/l65tJ1iLSHv361laTbEdnunDNdrq7t3v7bUcv3OjVOh7Zt//yuXMrawff+463/NHnPlNF1kk6qf2orPZBvHV15dT69aKKUdOLxy/fc+zI8kyr3+sdvuPgd145axrt+++fO3H8QhkxqDgqJvv2LypFOuTEtHGtbxDzNBFbLM3joNoZlePVTmt1YXZzs+hPwsTHmXZ2R964dTZJg3OBMU3AgEdNVqPGPFVVjIiSGUWCdeQ3Zto3pKLJpE6NNYYisyhe3TeLWpd11lxM73rglryqXv7qK++7757lv/CR89e+8j13NS+cG18f1s223QO3fPedh3/gL17//B/NXj4fiqBRoSiDxC6mZGrirRabu+5yPZxcPrOKogApIjIaImZAkume2FqiTqd2Xko/7YBRTiCflYffJq7yr7+eeAdoFWmIoCIRE6AGpaxWMVCo6kQUKRWKkZppU+TIjgGIo9EEEZCSlFV2/jp87LfT9ichT2fO9Ixk7Dn0xwgsLkQhNMB1pZWeNTB8+hk/P5d98D31zKp+1wdau8Xwy1+b1RTIBEDgyKRASFAF5SsVk9ySJ/acECskAKRQhI2rrKjVmsnKYe8T/1MvzJR743w4SUNwRSUxRvGgrdgMqoDRg/OhQkYKSYbv+6D78HthxprN0eQ//Fp+/qyJzIjAxCwkeuHMhfKvftRyVM0s0w0QCmJ88CyMWkUBFsaIOHWdEImwsHiIhAIKmQhELPPNE0yjQGJ+4hV47UWYUdVzT5v3vr/9o39lhzTMr8AqSKuN9RAkAhEgQmBgATJQe9Dg21QcnVGlVxcGWQQDWgMykgg7H21mKhdSIAME01RLoisfR2VNmSED83ffeu5Pn3LXLh5/xeZ2tsKkrkUU1tF7sahp2j2vCWqOCgkBvXfG2hACIyWEiVGjomikRgEZsiCRQFVl3TDWKuAYCcEgkU4rAULRGseTSZJlNxUeEBZmkACgkAhRkbrJBBIQlhCiCxwYEKYUaSAAgqlHaHoFgDfwRQTT0BneZFAAIggBiogIMggAEIgQ1hAZQJMICMO0+xzkpvwvUYJSKIggzBEBpojI6T0FnMpJQiw8xSQjIANMsThTfOqfu0emn4Vo2lUHIjdlJBFxBFobBI2EnrkqKikrBNjY6qUJzrWzO44dOvbwna2GSco43NweVuMyeFFUu7jXnyiCgsIApJm0YooNo11PLp85+z//7S/cc8+dMwsd1Gtb159cvSU3SzN3PHgPpXjjerHYvP2bv/2lbZ/+6c/8a60hr2B/Bk3bGpdxbm3l4mgALW0IdnfrmdzOLiatxbnC0oDthe7w0pWqXwbh6AQ9lxXqYBSIjgy+KENgRZDWnplVokXbmseuqoxIlKAEEjIM7IQjSwQgFhIgJIkRFE11HSIwGhmAgIAjKm43qZnqRqrqMnSHoajQs64DOOeiALIoQmC+uRQQCOime2y6NAQQgW76jG7SjOWmlAhWTVeRaJrS04VBSDA6NkgJ4ZR+pEl5jpGBERFEISgSETX1rQFzFAwsjKAUIjCRCjdJSBgis4DShEjIoGlKtIj4xqeGm6FFJkSliV1gASIQJFfUpDCKBFe1k7Q88Wy4gOHkqfTGNbW115ybb3ZyrUVlVIy8thRDCFN+VhSTNqLg2lx7b3dn3Ose6+SFD4ZgMJz4SUhUIPZEZq6ZyN5e3Nk0iVXRWJsTQESsyvHlb/zh9af7iVBa54urC7GuNq7sqtS28hRjpVqLBx9+E1fl8Wef3EPFq52wcltTFuzJZ5JQUc5FWXsPvtVsf+AJueW2xXYzuXG9/5v/vRFqZXSjk8Hiwvq++fr8ZnPsci5m88S5mJkkPdgMsb9xedTomIhh1O9pyZSQtjo6J4RQMYYoogLHiDeHGaUJULiKBsQorZVhz2gSBorOa6UA2fmACpACI1QVizZJaqrAOjFAigUNIIfAUSyBghA85AqUjdbE9rOf2PvKf9EKtIID7ZkyYuVFCOqJg8iUUyNDcnvq1I422Gx3pG52T708rE7tXXplIWrQemerr1tZTPlSCK0f/MmXnrkw0+rPXPrO3s6gUdhBXNYz7dH22faNutd/5ZYfeku956yb0GDv+rUt8rJ9I6zdvdZEt319Z+JqjyamqkDVJbWehm67MXNk+fL6xvETp3yEqExENRrUZbEREPe641azuVf6hgbJ7NZosNhZuf/hRy+sv2hN8/pOfeWTX1jf3NWttKoDaADAJG/kzWw8GiBIMptkc6Yq4qX+3r6V/a2l2RwARTppc6vYsWCV1mVVaW3YlYtZur3RXZyfW2rnw3F/q5iMmZ1Nk7Wju5sXRemdslw9tjrs9Ya1M7XLG290P7FnYWEmmyhSUQARiQgRFaHWhBIsKatJW4PaiKI3fJ4SOU5LxiPdJFFrAohBOFbOgY9WaSAAIBZmAeQYkRVqZEFgmh58SxSOAlPa3ZSYrSNgEJhirKIPHDz7MngnzMxw07OISIoMEBEpgCBikQSBA4AHxgCEQIpjQFKkVB3iK5euJFlmm43L69fJpMcvnD9/8tx3PfqeR24/ltqODK/PHTrkNk73h9f3zedZ58h7fvJn1/qjl7/86dHWpU//zieZoWOTZOQynYHJxhiyGbOyb35v3W1ha+ad944X7jZ/4balhMoLl+Da+cEzzzWLsa1dW2vnZFjWRTtrvffB9uPffUkZ0lFCnTRsk1IyRpEWhZRYZRB8BG0Gw4me23ft2sZ1k977Pe/F0pmmHWTtrUloW02hePLLX3novgexYaqKKwaouCh9o9koHJU7RWJsXbvupJ7pNLpDx5Q2VFKPnHPRGstRA1pMZkauBANG8273hieayfYpTGMZR7EknY7GVI6qQLZtmuWwXDt0mIugIvvSpWkjuIqJkYbVYL3u7+mInVbTNmd3+nW6QLkYdi5JSGJdeVezH7lyZzja21lPZtR2NaBOJ6bJqc29/nBQVg4VJRpVYLIxohJjWYHNktwnlffBBSfimTWiQsi0IgBCrUgEARVIiMyBQaJSLrAoFGIVARRqmyrWISCwBpx2eyqeEgZuHmyICBBRZFHGpBYTrQAoiPYRAktgroNHCxyji+w5AiagKU5r+AI7B3VAIEJB8gHq4BXXVhkgSpKoPURKSBFQlMDCEDACImgtpMh4DMITTWJUdD4QghCygCb0QXpjt96tTIqtNE2oGn3zCze2XmrNHxyNu+vPvtZYXTvwfT9Q3npb57Y3h/lvud/7jDbUNmSG/b1P/wEttmZnTHLbsc7R1Z3eYHa1BWWF2TIdWK03u7Jx+vDa7mv/8z/NPvrI4l3vGxQ1kneXn8Gaq7oP496wfetud9RZvSP/0G3V1/9bxqqq6knKrfkj3dCs3vJ96qUXq71eHNVoicjojb3+f/8N/63P7b/32O4zrzetzxvZpChNDW5Yaa3KkWTGYBVQIxAHhVAHEPFi7NJBmb1bSl1s7TKkdmEmMEpshcVHV37h93f+9d+Kl5/UdV8ZUOBBHABDVZos5UCu60Snm7uk77h9vHhk5cF3D599KT31VBWd+sCPt9//E8n8rGidOlSXr25+/P/L69/Mk/Het37f7D9S65QarbnyeHjuk+7cZQimiowaVCJJJm4Y61nT+MG/XOy7n7NFXwvNPiD7Horn19VaB+bvCQtLMjrr4055ZSdser10eN/8HGGK++df2tupJqHnKiZ1o6gGF7YTl+53VVt7Z1FW0rf+9E91J7y0iL7Y3bl6uqyrOhCBieynb1lGQFRImhkUCIOIBCSSGB1TVOiRBIhFgEEBuMgBpBKpffTEWntQdXSxKGNZOxavEGQw4ulm2mJnpqnkjaKP1swXvvyNA0V9H8aqimlzldlCtR4wCaEurEuPHaW9SToYdv/0y/2nT61SMuyNkiwRRW2bpzEdi69Bpc1ZSQsGWHf13NE7q7OXyrMn69Gu2X+sHsdQcZhsmWe+fu30C+Nicted70xve+zcxTNHV+86/cmvHJppBfDD+fl9P/czm3/25MXvPH/0A987/5a3VnMzWNX4+1+sP/GKCUxEgDEx4eqLT811P9zNZjavXEnQqCqw29UESoRZhuVEc12Mry88dPvOvvs3Z+f09pVLEWaxxS+dSIZFluS6nZf9jQTD6PyF8//2n4TlFXj8g/bYnaphbzl24MUnn7t83B2emf3Bd77505/7NkIqCCGyFxzWPsmzOrIwchm9ZmWTLLdQld3eKNYsEPN2Y3ZxbuLZKrWw1Ai+DljPL81WYze/0jx35oUrX9+51ht5V+9sbMzP5qSp3bYAoBVK5VxZ75F096qmNSsLcxe2egqkoTFP862dzU57xhq9My4dAihLxFzUqKCTJhOoSx8dEHPEurx//6GtrU2McRL8aOxzjQuzc91JUdc1koBCMjpLksI5jzw71x4Oh8utluvXqc1uuePIl596XiXaqrzTXt7uTna7k/b87GA0NiLjUTl39yxcuDYp49DVLMoqbEgoy+rK+fPzeeutDz/0G//992s/adhkUtaRJG+nbGV5ce5qd7esaqtNpgww+Lpu5fmF81fbzdbAhcFoPNOZk/HkSDtX0imjI4ZerxxM/Pxiw0W/1+0trORVdJSnUNOk73fUqKyk36t9Ylch2T+D4itBqUAVNTLHpoXUKM9efNSALGItGSQkxfS/SkUznQZa02zooGFttR0msfbQmM87y3PbmzszEvbciJazN//Nn/r6f3zm4s6JIhY1qF5dtt7/2J3/8B+r5XsOLc5c/6d/xwD5UYVEgKJYJij+Xe+69f/5Z3BoP3Qnu7/y85M/+pO2xFAFhCBIGk0IAgCGgXd21KXX82OHOLOZgdDvu94uWU3f/bbGBx7b+cX/27xy3Eamm0UJDAEQVS0A0UtArVSxV6vV5fz7PwgrS7LZnbx+wQ67sXddOS/MMaICg1HJ1UphCLinFActHIASy8GDBhYCBNIQkAk52XXjLz3f3H8Q3np/PZu2f/SHRHL/5a9irJUmEoCKFdhCcfZd39N459scs3/yRbp42W1voR9bIPEeYkCi2m0pk7SGhVvfpprT1Prak1VKo9Q1xzrEUiAoECYQAEKKiK2D+8yxpYk2ydKh9Od+pvtL/7R1o0cs4iEIWEVQQwYZKIAyCNUBORKDBSRyUTSzIk0EGpVnJlFcB1ZoEysSpA4IfvpyAYwcmUgBADgGq6FXp+Mt/thvwwsvLj764FDq7lMvzG/2ADQYBIPACkIEIVAK8tBPTP4LP5e///thOFDf/ub43/y7ZrfCAIwYNQVNlRObGh8ZPbBzKlOY6DTVu7u1r9FWeOGzLxz9wAfV3/qRf/yzf9uneaxJK9AAGokUFCEwEKASxBgZFHrvtNaaKc0SdkyKqrpsZEkrz/vDUQhCWiMoiKhS1ApZEIliiDWHZrNZ10U5rkQgUzefhsjCEUQYEIFIayUISpNSxCICEllYgBkUokIknLpFcLoru+njwakQM3UUQUQhBNJEwDeBMIJEwixIxAAMqAlCDIgYmBmACKMIAgCLj6CmrFsQEACaQm0EEW52awvcFIluWmAQbmbcaGouigLTlBIiTLNNCqYNayhwcwBiEWZGBZqQOQqgMUSIIUqO5OuwuVNt7Jz96gtnFpp0y+ri2lxrdd9iGqQ1PxfGxUIxWJ5d2ty4VGt7aPHg5o1rF25cdh2aW9v3x6cGf/LKN+6768Cl4Rfe99gdc4+vfuFrzzV9vOvOo89eHPb7W607bn3p6gBunVnePxulWn7s3svH11XeONnvHzi6b25xKU9T3tg8dfLEi+cu717crgrnAxrUWlsjZMgCkUkgMVpp1Mq0WnOTYX9QjoKPropIMBmVdRgTWsW+DCFK1BrTRDRBEQIzKQSZVpBN45TCAqgAtFWZ1Z6FWVKj0kzmm8TAg341LNAFqgPU3rNgjH+uJ0ZgCYCIJPKGS4hQWKY/AwGnTCpBEgFgCFG0AgBQhIhAMO2VgWkI0SKQACCisAZQiCTApCv2jIQiGsgAInJgBiCJEZUqWUgCA1hDilAi+AgBpksBw3TFTKUrmLKM/t9/iBI4iJGIrJQCUszBC5NGEo5RFMdZ8cM/+SSiWsxbzXajt3BwOwRMbE4ha7SBR2V3aMgWA681ErPzI6O0RMRUO5adwQCbDSNeGqom1qTSLOFxHYoKKpdo5UshDVohg4hIKqFRBT2uD68dLPNm2L+wY6w9tHvj5HrtfVp5KW7Iy8+MhP3i/nf+/V8o5qVx+AEW+7UffFtzZ7DQyFwlosWk6f0PvWfQIFaN6obfrIetSKnyRb83xuEdf+fHtkbvWNZ44w8+NXppB+qimUkxKClR0swGmeoLHPrBJ1SvHJ84pSz5Xm2QmINGijVwFFKCCAYBmesgNgUfIwEqxNQITDGxzokiMpqJvQIU0ACZoPg6+ECJBY5KAFgIhAgpVQGFRThT3gWKaJM2+2ixkyIKQVEAaEsEliDWrDMdYhWGEpBsnghxBVWzY8fPfnmWqjyfGY0K3VBJIytGk0Rt7H3hws6Jpw+89e2L3/+h+In1slsOC2m+//14953VZz9mt88pB93XnqwvXeKRG6gdLkDZY6vHGlvXtnvdrfEkxiKWBVSN5IzD80GSQ0dvu++BZC576qWnhWE0KDlGk9hsphOcz3OLHPvDQXt5YVyNZc/nDvu74/Ob4yNHD67curbX7T7/4pMINEWZJGgAsChLweCEQWGCVDkWreqy2Bvv7Ns3nwPs7Y4PHdhvb8Di4uqZq1c4MzPLs3ng4ciP94rE5N29vk/0qKoDuCyzZy6f1SwcIAINShWTGUtVb3vXvhE6qJ0TrUlhjAGISBuevnOJEIkUaVECZK1RNkGTgDIAJHGaOEQRRFIRJDBIQAwROJbORRYArjkioZqGtkkJIoJXQMzomQSBI0cfBYD55ptjenBBICFGAahciL5m78X5EKIgBkFQyMipSSwIChAhEWkBZgbCKAKBmZTKtM00OIUS6tqZzJSIXmmZVGE09izVxDfm8uOXX7jjwcU6odn9y8dfePqupcbFl670lT9yvzTtfre29sD//g+K7ZM7p58cXLg+3pvoCKNCWJIsT6Ssuv1QKgp5mq/s02mGqSmUVg89Et9836EfeKL75c/sfeHFpoMgQonJ1vYdfu8H9vJFil7zmJqNzCSx9qCtgKAW5z17jZyOdquNp77SmZvLFlbm77hrb3ZGeVPVUcZlRBhcGl771svVK8MzG6eSI43Ve++jZFEZBuMxULkz1rmebc/VpW/ZtNy53l5cKjxSafe6O2ipX1dZOpvoZvRkIAUxsFuNj6935pZWkwMbOxf3Njc8h/lDh5YOHx1W9W6/u3v5smHq3RhH5r5Ns2a7Rib2PuXXL53udnczJWmar7Z4q+5mi0cgNVFc2tAQfJTI4C5ePv/NV16+ePnK6nwHwde1D1CijpNQTiSyEvbOBzQeKOIoFtRWlBEbTWmqVWEVVRhEkzakiSVG0lqYjVYRDChAbT2i0gqIIpK66UuOAjxNMLNQAIUxaFRIKkSv39gR+QASWJgB0CjMLSVWKa2FiJECxMqDB6p8qJjLKJ4VApAPAugiR6MDqQAKXfBFjbVIzTWHkXUNa8gIISlkQItKc5y64aD2HiK0MDWgWUJKkckHIiRFBIGxDoEFEKQcVb1dWtlPOxIaTSV7wysvnuLRifnZVkSipdYwb4YgSbt9+Im311fODV65SCFQJFN4MK59ZLY32cmGo1mb6aqEGKmR73/3W19/4bXRUy/J535u/sR6fxIX7nsWH3iiyrA8d8Zvlf1Bj//4Y3M/928m0iqLOk/TUliNUCPo0bgzqsbtbHXf3ZuDQsegO5kXIz4qpWxkPnNj89y11GpGHZzH4CZDzhudcX8EEIaT/uJ8BpkajxyLSbRRpZM4wfXz4clPJ627BGq7esj7Ei2BQR+CN+32d/0If34DJteh2ws68dZmqoa5Re6WVXfXaixVtvbT/0S94x3SSNib1i13bf72RXPr/pmf/FFuzUlwHAqVUevO5fyjf3fz331nIctmYv/yVz6d72/PznXKl/8Qb1zHKD5i7ZmMoDJRAHKdvfWtVecggzJUotV1VAtPvNe99KUYaHz0nZ23/FA13qknV8JzX5xLw+725dbafMWpnVs9uOA3uzAqqqhU7QhHk3Quq3I4uHJ06/nzh25du7Lenb0lbJ89+Z2vfnHc2x6PaiQVYhCkKcVKIRORIqURgKNCiuIJAoKanrsBUUAKAgTKx+iZXeQgUZhLFixrShIiU3MYVRVRjMw+YmDyITQyHRFbb3jrzt5YP/LeR9uzF+iV1+fnWqPr1jTbtrm2cPD2vWvb+fy+GGfxX38i6W+0tnZtmvokTlLBtu3Mz4x2Rrs3Bma5kS7NKE91sTVcmJl971/3515obKzn/XFvZ69VLhUjWTz64ODG1fqF7vyMefeHf+Qrv/xLVPG7/uP/db0ezZ7ZGp86Sdu77WZj/JUvp2zu+5H3qbfcf+b1Z+ZXlqEe3/ijXznMLUCo6qBASYy3ZHj6oz/eeOtd+9sHR/3SlUypTlJx3g+0jfvnlmZpZrvoXnzGHHv/rW9/rHnS1JfOq7WDgcGffHk03AjF2BA2G53xbrn3p6+Fu7Yf+O4fGkgcTdh4pXaLzdFoiM9+3wff9/Wnni4mZMFqYB2nG9aYJKauvfMQXMgzu9zKkJ1VWuoiIvXq2pHMIA2KUTUums08T7PLpy9ooknXFlXY2h1Tqt2410i0F2QyiUmV9/W4Aq9aSRKFddYYFl4GUjnNEo3IRMZ5o1VE3igHlUJUupxUragOLXWixho4lVJbo1vZta3+gOFsd2smNeQkAUpaVmKsfdSovFKN3FTeC8hMI7++O3QxrG/taTK+k4GOUfwrp86N2K2utddm94WhRDF5o1XU5dUbO2BwEuJ2dy+gVNHtW92X5E0AqkXVrl6anXn3ux/+vS99zoVJalOlkVScVA6JHfO17nYr0xVBI8tjb9TbHrztTXe+dv5ikmZbW8Og1XgUBntjQj8/ozup2dyeNFqt3dpHpUdBaRLIkqStQXg09oN+1REs0AdBnSoxeqc/mrttDcZjAxGVLoNEFB9riWiV0gjKpHWoI7DiqIHpjTPkm1IRWEhyQiXzHWupriPv7U0W5mLdrXobm9dryfNVm7eube8cuPMW9/rJMUCeZpMG3PvRv1UffWDi0LQaZYTUixZQgDF4zNPkwErzJz86WTsCaGJCre/7/smfPRludK0iH4AVROZpPgIVqVEBX/1S2uoMdu+Lh4/qREHaGUkcps3QktnH3+xeeiUxFIpakY7IxiIQSs3KahGRQGSzSXM5f/+HBkudsgrND7J65juTT/9WozsxSIAU60gIiKSEAIQBmUTYR3fT6Q0I3jkrwgoAIcG41u13f/N/zM00/W2HRq3EfvDt1bnX9ZlLhpRuZGCJC+ZGp/W9PzA+eiBEz3uT+bm5cPY0X7/s97qt2eZkMBZGCZGBE5saKUOiWCLoab1SLcFNybYhshMx1lgWAsiAJ68813jvXWF2oc+4cmCf3bfCe0Pr2UuUwCEiA5ABiQEwskJQFAWYEAklRvZsrcbp6GsUIShSIADegTAkCfgIAsE7MFolWmJE9wYlBQwEoADwwil4+XheTbAGSDUggYBERkEAgshQ1zAnrZ/9EfnAh4qwyGbZPrEMp467j33WgtZKjeqSU9uwKkbE1HgSYjWpvSggBtEq1sxAAcLGlYuvP/v0dcQ2oFIILiKjFjGgJLJEihw4RAYBlsRoFnbRawVKKQBRighgUtXOh9oxgogEJeADImCMYjUREPhgSdURRCEgVs7d9FMACExTXkQACiBRqBVOqxZ84DhtwyEiEATRqKLcfJAiCE3FApCbgBqSqTyDCoCmZZ3ICNP+BiJAYGZkjhyZAGpmUlqY5aYVCImQURhxWnmmEOLNLNn0MBoAhYURCJCnjhW5GTwDQgwcgVDrmzIT3/SH3MzB3cxWTY1I07ATIaMoRUSIAkRTwgGSUiw4TVUUE3nx7PbzsGXpgtaoDWVElgDgtCYSwGfUZaJwcHVRhbBXSM+05+eWX90pemHpY1/aLSfreWL4Su+prz8VBZUBlGgyS1lydbObzsx8/ap76B0f6rQ78cy5rWK8uVVdu3rxO6+8Wk3GyGBMstDMQYREEq2BQQBNao3GXm+83OncfexAM+1c2zLPne65urKJEgbDrC0Ke2YhrRSjsQoAvfdkFIabrh8ixKmRX6NSZBUJKI4kEhA5tZhpLIs4LKT0qoygtIquBhZhpDcihyA3z/ZFeJrpEpwqMYh4s19vavGaRiqnmr2PQgiRp8E31NPUGoghJGYBCBCs0USMgAqp5GiIPKIiLcyegYQZokJrFTlgTTpGahgd4xRxStPPFViigEKsQ0yUIpYp7BD1nwfzgUl0pl2smRlEYozCkUEZUuynjgRlAKnk5f23TJwbmOblRWP3H243Env+1etn19HVVb+0KSpryYK4yIy6kWA7ic352YO3ZWztXDtlBwDJ3Lze3Nh57tu94SZ7n+WpQsCoXWBE0agMQmaN4ahNu7s9hLQaNuLTdOC+t/50Z99pefIPtZkY4GpvC2K02fgr//aX7/nBt+++evrai88fpJSSpgrcMNhsa1fsnPmVvyMSPGtdhtvzHJhSo4swSSu/9fkvqENv+86X/mi1Hi3ecfDQgw88//w3VVGvXyvo9gd3crzrvgft0fvhma8PXjjurCqNsiiJ1A1UZLO6JKVQKSJhShqzC0eHox2DCsa7HMvS1SCEZLUhQIwUqlRdTdVspee6w4RQ2E/7rtGAiAh7rTUxVJ66S0vlTKrzLO/vLfU3oWaQaBLNwKyESLvKAxIRmcQ6N0ILLKxTlSzY3thPBpPmKMpw0FlOstmExVU6eksgJipZaQD1YvfPvgFFb/TCiQiJ08mFzXr2bXdszB/Kdk+3VnQorkWeqLVkfvWILic3Nm7sFiVpM9yaVMEkWV6mdG7iLpHeTpr3HL1ltLt3/PlLO9dugLGI0MgT0UBaWxTnXUPrGjlbSg8fWDn/zMlQ1ONoZDEq6653e/Ww6MzOh7rvxrVNDYoYQ2MfyiKQUQrw8NrK1uZmZ6a5upoZEh38cFjs9AaReLHd2tvdXp7Jy4pHRZUpvTkYHrxt9dTJq8YYlSFzpaxyE28iporm1xrbvcnmRhcVzLdUliZLC62Lx68DAANHiSAxcmCPCiGgMEdFJFrj9C6LWEJLRFPVVaaRYEEgIMWRI6AICHNw3nOoQ6wjG0XKTrn5xCAeWAsKBwVkQEsUhhC8izEiEJERiXZad8hCMYBIDD6GELwD5siBmYVuXk0pEPCAZJSOMu3knP5FQVKICgnQGqutQTRVOVapBmOAyLSzelzUHIsyuDo6xXvV7ouvvrBy4N6Z1uKRN73p+omneuOx1e1qa/v+B2954do535zLTPvOt3/XYGX7hZ3t5bsP7lzeyEXmk6T/ysX1k6daqwtv+/731MuLrJmii+yKECeFE2/7o5m9DfCWtTWDsXOXL734B59MH3li9sHbsIgqEAfu7W6ZhYVspinReQ1A4vvF1770peGla/sPt37oI7e9cqNnmgvFbt9V3JlJOjnNJvDy8VeWZg5pBU1UK7Z95cxxxb3O/NrB5UPPPvv0iSuXllYWqxJ8FWwySR5+NFlcZVdYUMWkkNpZNnX0vnCpSWZbyxJMv3KTcydOnHshKBVrX4+q/mY3nrvUmk1ktAeoXF1vFb1b73soX7xl+b571y+dPXvh+LXJNW7SgCc3+sNxf7csDs7NHzrQWtSly+bmxVXnz5/aGW1ZzddOn93tFxqTjb0CVZyfaztAD3VwlUTPPrAIktbAsaqLqs4UpUkLjTJpaq113qdG1ZHxJmZhWraqSAgVgSZlTaKVADAzIoASBBaOMQYEE8UwWecEOTBBhEiA+MauJkw9y4CGSAunjKngTXSiuplcd7UUEkchlNPLA5MyAAo5qsiGI7oCCCWKIGmlMJAEGdelzhJSghAjYskszF4BBEZBzRaBo4hhjC7WtdcROLLRBggCKYnCMbLnYb/stVLIKStjhiFD0ZnuBYeixhvd+1nc9m7BcXl1ya3u33zxLIXYThROpNobS1u1JBu+8pq668HCV8Fz48ByduCYSIwXN2ac6QGaUV2cOusPPu6beOGzXz84pER09+tfPvCmP1659db+5Utn/+C3jrhgtG11Uhz1r/7//sHkjnsGr3/9/rlkstnj1IoLJIyoKMRmq8EShZm0jgrrpm8dWNq85KyLRussscOI+vCKX9qfuFb97LN20mtmNmyv8+d/y6drzbvvhrvvitJyVmttdNnP9jY2v/TvzMapzvxKb28X7rxr9if+brl3OVm/vPelj8+2GJxUabP9lu/p54jCuY07dnP42J3773xrUZNNHEQPwZWjMTYW4uzSgNrpcDuds4fvm/Ob5+KLp+nGjeC9oGLFiiDJNDCj1xA9NOa5qsLoQrI0SVUzVpvVqc8lbdyyjcW3fM9YNM8drmb228cXx9/8T9DfnnvkHh7bjfUzR9eWXrry/+fqv8Psyq7zTniFvU+4sXIhowE0Ojeb3YxNmlmJlhVHloOksSzbctA4jP3NfB5rHGbGI4dxmHHQZ0mWLFmygiXKViIpUgwim7kDOwNoZBQqpxvPOXvvtdb3xy3I9uAfPM8tFJ5bp+49d693ve/vvR1G6BAXc4cIWkW30I/d3tm3P+iKQl2+cGzl+suXJgeHe/u7CdQ7pynOzNlHzTCgBOqIidhM2cxiMHWozlAADHlmszQDJVBGjTGqaDIwEq4ln90jyUKMTYpGPiYLMTI7moimo3fBYorFwa5uHHAdw2FKI7ebudZbH2weuT+tdWxrZzXhwdffYDTvisFQ7h5fWPxjb6m29+KLVwrAfKEFc3m1v55VldAow4Xhy6/2NtbdcD+EeOKhc9NU9eZ489b2ZGf/kXc/dWfv5vPP/OR7fuRPb/72M+v/7mcP3cQ/8OD0/BMn727z3sB97cU6WFsvxFXshEG6PJKvfvE4LTUxeCJkTIhIPsV4wearj1+p860W90OOSYNVMoFCvv07Fv+7b6/3X3/9b33fm/YOzh0Ot772hfo3/313e2zLJ5jbqVZRFSXfLgfjkLmyyFvH3vzuueP3TcFH4E6/u9Qr8pVWXXLLpZWF7vVpLdGKwpU5MRuBm4ymXoARwKipm/Wd/X47Z+LVpXxj/xBzV4no4ahD3G35osCQmt2dw7zVch7a7Q6A7O/sa4xZnrmE49BEwgWPHYIT/dI7u3EwEMNOme/HUd5yGeaMGkI9qSMR+YyHIZmmHrt+RucXF6/u7BcFH19s74+maTxaabtJ1KhGnjseB2MFtVpTnE4VQM2SqEYzsK39A+8zboiB2lm+trl1ol0sz7WQ4MLxYxlY3B8szy9NQpqG6X49qEPM8qLdL29trvf7rT6Wj509dena5tcvr0fUfq/9jrc98luf/Oykruf7nRSsk7lQh167REteZDycMhNlvD0YQZOqNPnsc6/lDgqAY/3OAaSYkjMlg9Ggpuh8lk2acW/ORY8TwcE49OcoxnAwjMNR7BTtlYL3d8dlu59rioAHtd3cGdyXZaqGBLlzkwRgNhEJovN5BmCMqsEILHNc/r9cRf2FrNMtFspsNK0OtsZSp3PLbZTKTYNK2YALycilxx544Fd/9la6NGwnXyKvnrvYue/tKWuTqxaefpsdWwlXhmbsAF2W1wLl3AJdfHPkIiMr+y3/2JvCakfXd8EA2c16hcBRMDDRAjK5vMFvXCmXV6YbZnUVji36CxeKrFXkyW5dd5OGHFHpkgt1v4tANBZHmYrBDO5gtHTufJMkX/SYdbHoZ6fm4xu/L79/B7kEzhlRE6pGyklMBQE8o7KJOUg0K8BiB5CASFLCQKh1+8704Fd/b+5vfP+oldm54/0/+R0H/+gn2slZAvbOtbC9OJetrkBOLc/Fo6u+0zh3VjBMtrbDZEqGKooAFiGMxpip72b1cGoBCZGYjHNEMOOQGs58UoMQPBmIDy9coc88k33/900dHAxpfyCnEmhMRCikRgwKSgKekxiQYcZMOUhkEATKixY0FXiW5d7UF4UKj4cQAilAaIABACEkxwwIEBpAmlW3pya4nAANjMEcVOBcG0qFmU2CCM0gAiABJ6C4s7i4/M7vhdu73Z3L+OTTOwnm/9yf5995Bm4PkKFX9uqU/NScoeaCKUBW9EufvI9N0tg4wDnKNzT/2HNX+Zs+VAJbgBiFEByRiSKxQuZ8B9IBms22Xhn5aBoFFH0dQuE5L32o6yRmYFnumiTATkFjCoROFesYYwqmYs6y3LvgQkzjUXXvYKQEbAbM6BC9w8zTjCmT4ChKgzCrx4J7rGKclZrBkTVLEQFmtb02M2vgDDQNSICAjKrKhGSzlhBSwThzm4CJAACBwRFXm8AQkokmy3mGq6QosxF+9pFJSDAjWqgpIiLSPSQNZMzsWM2SiioAzGSHe5oSABgQoqmCoZiimDAoovesqig2K3OfPSWAI4M6EBATqIhAEol4pEyCJkb0DjotP0/ZwkLrte3D6cEwDA4co1KmSUShrpTASoeMAsnYYZg0aVx57wZr+5uvrF37/IuYRAUmTdA0NZBu5ru+kJicIYIlBAOskxggu0zE6pSm0W6uHd65vRtSM62TsWdvMUViJCQEiCIiguSQoA4xz7jM/XRUg7GoMSISiKnLIMuZAB2RIDchlAV1yixHDckOhzqcohgqKnJEAKI/yJEZGOCMUA42+5XOXidwz88167Y7Mu/MdJmj1lc7ghGZEUKwGffKCFCBwCxHdACeydQIZlIiFGQeEQgEwCiLkBgdmbIlSMI461HmINagqakDcgZk6pFnRy2Y9Q+AJVV3L3SgjuNECCGjLGkyADFTNlB1jglMzdiAGrh7Y31y5mL53T9w4tyZlZXV4UvPfewnf2ah4jZiu+VLoqauhLmV6bGF3uCwnqR8/r7HQjWeazant7b1YFRPqkk3l/2RD1aCgzJvCKwKaNous0YbAbKU0FxOmSpogmoyQKnedHrpAcfuzNlX6kHYD0XhpEkn58r9cdNaW9//l79ASVcE5jAjzGoQJ5LGkoPvRDAjFA+a6zCqYqhTgb5Lfvt3n0N/9V1cp+lIbg1uvnbl6fe/5fX1Gx/8xg99+vrrP/Tff8/otZcu/51/vdLY2eXOuqXud33o2B9615V//wvw/EvdVLX7HWkimjqHUg2bzcsdMSLWOEmmBIZER81zlqZVrB589L6//Xfmrl6v/+E/guEAELJWJlmmgMToicCsacLuUlf+8p+BM4tOonvjxuY//yf9IbQ7HSE2lWQNO8hzH6oKSOtkyGk0abBVlNHiDg6z7onv/NDd9Us9HcH62nh4mExUuapDQgNNvQLa3i+JVZ+5ZruKLlCT2q9/6vVPD9//ne/jX38Rnd24skXlyvFHnzi8fGn84ufxsFEwagG3itEwbqX0wrDWhQ4dO7F8+uJhWd6+dHW8uZuVbu9gyhmRp6XF1fF4CLEBwu1Y3ff0m5fuO9Xs3+qWOPVpMJK7W7tveef5S5eurXKeH194/dIWMjZBHKISFEUek5R5gWi7B6MguL8/CV0/GlenznYXWq1M0ZA3tw5ClHa3XY+nvbzMgxwe1FVokKRTFBvT6SCOVIv5TrtVYDvznuDkSnd8c7eprWZBEbx3IooiAJgkKQN7EFFlEDMFSwbknCMkxYwzr+zIaVIzmJWYAhgRO5ZMzEBJBADMVKM6RBA0RfKZsQMTFiNTUpqJsmwRAEGURARJAJEYEGc9nJ5AkoDBLIs6Q9MDz3rKgUBZIWNGAAUjxwYKZMRoiKKCCnknz3Kv5inLKVmUKvdFVddc1RiiYzx1+tRpV6yc9DbdPHj1tfGt8ak3vbkqVh5+69sfe3ruM7/6qa88+0IFeOz8RRfG9eUX27R+6SvXrXNisDJ/+vFHgQAknTi/0npyScUNjveyeXd4Z3dwZ+dwd+2pt73lzMLKxleuTl9eh3Z7R7VWLJYL186uvXQ53t48fesxr83kYLq+dndyuDe/euxNT7+tf/L49ZtXLj/7gh9wHnS1u9CJ+Sd+8ePnP/QN3SLPimpnd29neGjQ7E0PvG9t7WwsFO15171+6cXtq5cGd28/eOFNr9767J3dyaixW3d2Leqco6c/8MBkY3ep1TsMm5j5uV6vm88Nt/eydme5bG2s3Xj2zu8HAZdj58z83Mr50KRep9hb3zrY3ZF8MpZJTPXCyrntWzc5uhZgjtPnv/qZrVtXtjc3pjCyrtvfHcQEvmi98sZuayN86fk3VpeP9U4df+3rL62vrS+szj38+P3lylLbdnViVUj9fuE4k2paVwnGgUSaGPPcOwYi0CQiOBpV1M4UMwVzZeaaJicAMQkxBJhZgxFV0QSMPTFgZuQ8GnGUyGigkESZWVNKECtDQCNkMVFJaGa13fPWCQFbSpnDAqFAZs7uUffQRGY7rWhciwYATSFHIjQxEjVVsmQWQrAYGwHgMs8c55Q8Yqbm43SEJhZFkIG8sk9V3QbMWQnAkhJCmlZNiCqAQAZoqHnhXUyGGAXqSX0wHM51lsBjIBC0suOqKpWMvLP32n/4mbnTK93eQr1zLk7M53N1s0/jlCQbH8Z87RDzyY1f+5m5b/jOuQ98MOWx6bj+mx5NSOvX9qdmvsiK0Gx87D+7poaVhWPDAzBWyHnj4O4/+NtL9/V3N7cWpy40VfRUT2Rhdd42bi499a61B97ZL1vpc7+OCN1WPjyM02njCg+e2bOBNoKHCyfP/uD3+kee6l/ffemvfd98arHKqHP25A/+nc6Zc7w/2bh5Ke7sQGAgn9aHVMbhwd28O1d86w8eunaocf7aV3b/zf9VvnKnXFqodkeFKrgpLc1LC0LvgfnxFff87053ghzr7XaWXQfyZkjDW/nxp8686Vu4HmxdvxYvfb3dn1+5+EhdTKrJWEa57zxdbd31OoFrz+vmddzfC5NKi9IIpK69J59nzqweTSHE5rXX4dQP+F4B4K2yrFkfX3lhUuGx/88/CO0ug/OZo+RaJ+6n0mS0Brde4rmHljuPP5S/8c4LB88e7sfkUMw5qgcD6LVrbLnFOd9fPfWu94/yuTNvfe+nPv57G7e2waCqhAARUVQJ0SHPlvt+Blo0TDEyMANWGpX8LKmIKSEaqQBYUnEASQEBUkp1CEz5zCaaRIZ1TLNkr4pMGtOM7uEbT2UL+1cGd75we7K95wPSiV7ve98zeaw1IOULy+7TX2m+/lp00yAcDeozyyd/5Ad6b14d/9xv6dZmqkpXtoBT5pAX+3b6LPExvnEV1m5Nx2PudBD9eDK2+UweXN5q0eDwoJ0Xy6ePw7LwIvqNaXs0gdPZ/Ie/sXjx0t5vfaxcr3K/eOsjVxbK+/vf+UeK+ZXtyzvjyUs9ZETkLIuhIaCUDM1nbpmKrqFPYZxn5E3i5uZyH6GcpnP3Pf1jHxn+7mvT3/i94uvPl1vdNCy1Yt9JUPq55WP71QGoZt6WTi0ODsbTfnHncEDzy3nJx07Mf320v3hxYSdI2m7myvki3+90uUDbH1a5J8/KBbmWqxPVdXQ+PzwYeu0gkTrOCx9dlqo4nFTduX4VhcEPq7E6akAIeWVp7tbGuneIXKDCdDAWkgMJ2eL8sU6Bok1souM6QTWJFppumc+VHXY4CiEArPQ6BpinsQN333yvg/723q4k2N4YtOc6YBSnzZnVpZ3peDyto+dArGZF3q4jDGNisYKxEXGZm9Z1E5p2ixfKkn1ZjUYl0vnzx3ut1uVra0XmTGj5WLuT49XbdyqyqpF2u2Os73js4a997tnHHjhTdvyNu3fFGDlfXmwdX+596ksvHIzGeV5ORtMgCbTIkPJ2qbFhsaX5zv5oPBiOx1XwArVmYTB58/nVTomDcVNPpHR630p5OI510sEwRY011p1OcXwxrxKQho6jnm9vTxJDO7fQLtzQ1w+cPnZ7I9g41VFuH1QPn+yUzFUyQSDmqBYFIqZxiG1EB+yzLIQqKRDSfyMVFa0iJrt5d1AlKIqy26etg0nG5WRUay3ddibkh+tvlPnmihuQ1KmJMat77Xw0aqqtnbJN1c1rg2HTYmNnzqnGVKIf3r67vHNIJ4+LIU9DdXlNAjgCA0hmlowZjFEFAgGreDX51Cey5SaLF2EA9f58MdroDPcHX/qi/M4XOlxaSiHo5NHHpt/4jXvPfrX/3CtdbxaiJirKgjq5sUrWnjbNqG46WOBkmsYp5wIUARQ8GyMkizGqY3CmyMC5aQQNORnNOoo0mQBRqaC+qfPM7OZ1unrDnnh4vYorp87T6XN4+xarQjQjlno8vXHDtR6EajRdv9uvAp06R6uPTG4PW2EDYb8ovcuK8UGgHLJObJqBzxznhQWVqAaKyGDCDoXB0IlXz+RIezLZ/O1PLD/59qUHzqf1PWdlUtZgVDh1s8qrCKCKZkQGDhrxjlnVadAI4HJotQerC/kP/wV44JH6jWvhS1/sbq9lw/Xp9RvQTL2IJ4MUAHOY1WgbgJnLM0UhVFABNSCDJJDNupmMUIFn8oECkmS4/Ff+9zjs7fz9Pz0Pan/8z/A3/3ej3pm9Kt6/OA+xcZnr1A2IQFkAhZhE3CQ3jMOs2+2WS6vZSjPebfJWv1rpf+aVT5bOkiATZuSQoJIwrCdZngWNyESO0EzMTJNEaZdlqusQgoNSJYZ6akztdlkHc8BGZCpN0hirMm+pGgJ476rQqAGS95nL2AAOjkZ2AERgwpzRM5CBIUTVkExs5tVSNSMkMTU1RGBEAEtyFOaRGbAGjrARMzlplvYSU1R0RIjmmFJSEQMjMxQEVEEGAHBEfmYCUnWOZhklS8kzzngEyKhqaJBkZoECMyAmEZ1FighITJ0jRGtlWR1TYwn16JOPj5DbZjPvisER7wgAFGbFa8zmHcGsFktBZ2FwM0YgJsOEBI6ZZuVuAKbiHWUZGWCV9HNXb4slA/LeSbQqIFgwND+DKINFBVQBRAGCWZecqkNgStPBtmMCQueMPCQhQkOHYmCqBoSAzjsANSOfuRDiYDAeT6MZic6uSAYGKagjdMjJjAlmWaokOkM5h2TSREanqjRjeBF4ImYgdA6siZEybXeoYECww3EajK2OEA1AExKgKRMBzNImR/rP7JrOEmP3HERgM/HOjvxEM0rUzGQ0O+bMXF84Cx0iKIAozP5rIiBChxiieMcz9ol3PkfLkTKiOsU6WkJhYCE1E0Y4EnQIVXRW3waOMmQ0NVFQcUQigohGGHRGnbg3HnjmlkuTmhHYUAAYzExCMiBiQBQR1cx7xjpVW/LGs2vXPv6Rz/xeeXV8H/iojnJukhwONX/4kfziGdm4vHblalm0aW5leP1FuX69GtVE5NgvtAqoefMwSZaZQ2P0ZOzJkmAKpIocy8yhSgrRxJJxO2sVIMWNr2+88ddIZVnUHHvIHHEzkDKlgr0ZgwIyaw2NpLxkQmBEDAKiuWdVE0A0xJnxAsmSLbLv5Sw1gBY+c61otz/6mW6mdnPnfRav/uUf7br84X6/0UiTZJh3Try56Tx60DvX8pdmv9Ck0Dq9vHOwubhAZazr7QYjGAF2WiDInIUGSC3LfTlXuJU5GtzdePY/zRfjJkzjtAGtLYXCFexKi2qmvk1tP/ZrX7j1e1/NpVdvNQudJbAYkMQEiXxeSqwaiLFIdWF6+v7i4oO2mvV8qn/1U37cFJOtrZc/G0+dv54mvcP6WJEBxpZXSKl7ots9d9/o5pV6QusbVd7iYxfP7owP3nTigeu37g6vXY4vcxzG9sn7uisnB1PeulwPXh2PNsfZXKdydPvuVIvsaoARpfXFTuv0fb0z58Y1Xv/6lTgctfvtu2u7SczICWeTFJImCKHTKlKRLZw8Q9aQWdmGlbO929ZkJ1cOb6+vP3/lThXPrh7/o9/5bT/1S/8pL/PM8cLC0nC0A4DVtCYENMzK3BnMdbsHo4PhZOozPx7FQXNw/vTS2tZwGmKrV96/fGy6s3/zcJRS68yp4wxxcHt3vt954oFHvvrisxceOouaNnbqaQwZQxIp2l21fG/QHK0NTEGECckgNQkSmiNlNkJgB6ZEhIoCFEUNo6mZJMQj+54ZELKDZAaqkGJKCKaKSEzgdIYERkAmOiojTwlAG0mARDMWUTAQ8hk7QPDekSoBIDKAcuY8YxJJhCEkVVNTJkUEQ1IwNZ25P2DmgAWLkohmN+zk2U+aBpKxYjVqHGGzNyDiKdjTb3rvB46fi4e3Yrz2xcufeemlF7fHw1Z+nJ68uHDCPfCtH+7E98aiOGT2V29ufuWNVzauHtTZbb1+Yzp94F1Pn3v08ay/MK2JFs9lwJd2DjqjSjYPTmTzYf/uy7/2n73VezcPmv2Ibb8zmHRX53wmvQ4vry4pcT7aJqsml9fuP3n+UCkbVu71K5//zx893KlyzjPkcxdWJ2k8P99aOxh89D99/L5rV88u9YaTeuH0uUp1bWO3++B9i92FpfuPkyuqoGfuu89PhtX6IW1VSao8hTLL5hfaxxfLk/cvbO4Oh/sbE4k+y5tRfVDFtZubaso+DmR9/vS5+9/x3nx+sZxbKsqCspzN3N7esdj0eu2tG9dao8kLn3+hi0BZ68uvvnHClVtia9vbjo18HkMiYVTXTDmJq5oQJqO97a3Wxg2ppF+USwvLglnIaIqOoXYZK/mt0SRXGA9qMMsIOWcgcYhqMQZJAS1p3nYZmyFx7steG4PqdKqqqigqDQAzUuYNiZKSigKYR2NDS4qiZI0mm1UQOwc0O/aQGUqKjBjjUUFyEkAzB4wAHtGRi4kSgxNDU3QuJQkKAuiZYogeCBQkqpgZEjknhtNKzJKBImgCjwwtnznJxbiOUxWViAbahKkZFqoxiUMmjrFJjYYkMk3iADNgUO12sizj2vO0DkYUze/t1ydXJAfIe2Wlw5iSIVDJhdL2rS2DNHXDBNkD73j4VL91+3c/AYfjts80cBViy0J6/ZWhs/aDF/cXio7rxbU3lEkQAhEQ9LzJ7k79n34toqBhVMvb3gdJOzuj4V5KgJQFtDCpfJdxd4hRdi8/c+y9f2L98tfyrIV1PZg0fq7HkkBhuDvM53PXzur5+Qf/5k/d1onTU9nSQlxwujeNCt13PGHnnphy4VcrPrcYXr1WCNsoIoSWo2T16AsfH/X7/t1/gm5f3/zJH1vaGbp+Z1S57urydH1jdOtObxKqone33njoqafghd/CzO/u7Z8putAMitGdL/6Lv/Wuv/dLwyrlxKtnTlV52Sm4vn1lb+Pq0sNP58da5VsfrIcrOe1X11+R0QQsUlEmADRzzgla0yiy+tw14xiuXs4+VCfLMZreeCl95t/A3eHiB35whKebSouCICoMRnD9Y5uXf5/iaO3qlfbb3mUx1/GVDzxxfOvWwa0BTffrAj2oTifV4jvP+JEL2koSzVhzl5VkRFHTzK9tho4IEMTsXkrSGFERnXPOEos5NAUxIDQsCFUFCRVJgcEsiZlaCKl2wWcpc84XGabaGouaTE1ViDHEGMJRM/K1j368efHO8V7b7aNrLCLosYe2/EAO9tqH1dlL18OLl4khy7lAGh3s7f3OJ7afOayf+fLTxy9S3Z9MzQzGIRbz52rmdnI2PsjOHm+fOrF1e+Pm3cng0ceP/8C36dmVY4utjc+9ah/5yAN7wxu/8QurF99UvvNCj/TwYFT97K/G9bvLx1bd6vFJfuz86dZkZ3P08/8+muMXvlgwY9CkYB6QRENCTeAYzMJkAFRAChEAwFYWW+u/8AsXz3SqysKLL7kv3Ij1sGAQyrnDjaRwuJdlfUtZ26fxeBBrvdscZE+9g9/29s3Gd5QP7hysv3RpcXVpOq7qBOtrd4+trry+MWJH3kK/k6eokwQIDsn7gusYo8YyzyWpWHLmk1h1OClBjndKBlwbVDt1XZTYywslbvU6X79zc4I6n5cs4J0b7+1H0Sxr7U6hAFhu46hKm6Mxl36+XTYmQ51qsrNz/SxkTlGTZA6xSp55OoG9elyNK25lc8v9Is8Pm2h5fnM8UlQBrCtpF7jYKjYHgz1omPHc3NLG3m6WeWJ03nvvGaEaN8FCr8jPXjiLZfGl124uLfXKLMNB4zm7s7azNxo++MgjVdgaTQbzi9n+/nbpSzQtWrQ+GgxqOgzV/XOnb61tTFLsdLvMeDgZFsgpRQCilEA0VlXv5LEbt2532y30bjKp2r1iPIztbmd10V/buH19vz7dby2WMG7CNKgxtub7hZaVJmw0SgVs7Ohwd0KJ89znCAeD2nH39ZsDNunlGS37CuRu3Vxo5ZnESMlMPTtEL+ZFNJhDCE2oM0Sfuz+I5B9JRYcHI8ZsNIqirEGmB8EMBWKscSErWvNL3dMPlmeyT//iz184oXtfb7oOkJv1V59/YPdG78TTRvWN6zcrCxk1mtgBeGZz5sOw+tRHygt/aeCWI5Ctb8Nw7Ag0KTA6lsxhncRl2TQJeYq148PY/NuPu+yTpJD1241CNQ3euDBLBlRk09hki4vViXP+kSy9up6G2wRadPPYTDOtN698fQG7Lmu1iXyto9/9mL98yZsBE6BploAKMkfmQRSBLVhKEV1mBGLKQJAigUUwSQ2TQelR2N3Zq3/9s3P3PbDftlF/yT/4zurq1ZaK6xUSIjYTefbrvQfeUuNw4EoSkzubkxSai/cNDgu4vt9FMGVYXJUL9+nptHvnDt8Z93YnnlCIwHya1o5mxBQhQs5ZTAkiEyztD/b/3v8WTyzgwXjlADyBZiSzYnrnMCm5HDW6jAEcBdIqqDTo0AFLgDjX6//Q91dPv5lckfkL3ZOrkIZw/bW0taeTJnv9qm1ci5t3M84kJBNxRJDCDKExA5QgEHEGqhAayNyMewOigLPCl8aO92+73vHVkyfe/67Nj302jx10pQwPjq104cY+JAexhjbDSnfz1vaxd54/fMKXjz85/PHP2vXJZj0ODyyfe/dDl37p924u2p/95X/zU9/+QyeEwzhwlpuhYw5NikkKR2ImCWJSYATAqhY2ImCfMbpiMq46bSrarbpKUQjMRMFbljmI2gAgpugAGVEVRBWJi7KlBnTPbm0GDMhMmePMITMAYKMao4ZoYjZDESuamDo0RpwFudTMEQio6gwhNIuEAc6AMwh6r9EcQBFQDURlBrVARAMENMoAeCbbHDFu4KhyHZJRnPFP0QqPjMiqZsBqgDOexRF3yBBmIz8hxpnVFiIYkJmaEdGMnI2IR0QjMJ2FpADBYEbdnl2KJMo4Ox8oOwSjWZYBQGbqEooCgkf0fobKliTiiAGAHTH42ShuAEx/IMMZQSLAoymJkHDmhULQmZAiQJZMcAYYBGPiWak0FY6Zk5FTIMeEQM57nzVV3NkdiGgSNdAZqh0AZj30CMQoUQQBGGewcgTAJsqsmB6NFM1nmHliAEBKogKSZVjklmVKhPuHdVNDk0js6A8pHCk/CMhoBipAM00QZ6LPjBuFeIS1nmkScCQg4RHZ6ujho7azGZJc6aiO2wDQFAUIxFqERIiGSRVxtkwDQyycI9SUNCEGBEAqHSVJMwgv4ezCgiKomTdgxzOYCpFvQoois1fkH9idhuzRtPROa2Fz6D2QZqRsYKKgKmCU+2ixcNIZbGz/ys9z07wbc5PWdJqMGTICw2nQcxcf4pNzh4d3Uza3Nl1+7Ph7x1/8ifJwxBVZVjYmOelocuiISTA1AT1ljnwiTImZEpqZRRVyDAwWZhMM2Ug9Qi8aGEpK0bQJ4svCF9whXzXBjMmMUKOnPGPOIJoCIjuCIAIpAamhy4C8C7VKihAxIxwcHBIiFllDhuAsdTqZD7XlVvZCKU0cQnQ5DXZHrVbY+5mfnLvwxTPXXm4LeJ/PzS1vhbvXifG9H9h76fnTMio7GdbsQCIiIpkIGaNiqoK1s8ONG+4L//nsE8fdw99+5fNfXd8bLp9cPba02HzyeQo1oaiqR+wMYvrp33myi1UzQtc2SpKJcwZVaHXaGqSx7sGpdnz34/j4/dKaPzwEhj1YfxUnQ4olY1ZfPag3L9//wx+szi+sP7cTbl5b7sD8Sp6M7e64HbtTsv5j98VIVzduLJw9SY98qLr8i088/pbJtSt7h3vZfQ+nZHH/znA4rQ7riS/3kr99UG9GUsxv9bJQdh5479MVFVcv3R1tHfpYDcbV/mQUmwTGIoBEg8ODHA0CNBTPv+8d+VKbBqNqGnY3qykZdvKa7dkvv9GMYBptur599TPPZe1cVIPC5mDPI1RRAXlxqQ8IAOHUiWP72wcrqwuhqjcOJ0xeze+OUm2QmvjwmWNQjxfbrTvNPoXs+fWD7lI2pnbXyleuX1mY6x7uVqNqUlE8/9jJ3bWJ7cXRpPEEnfn2kbdOVQhVoqkYQIoC3nGeATpjZ8BJgAijqqhYSGwGKkwzy6kSAJogAiKpGrG/B/pN3iOAMTG4XE3EIs1ulISaZhY+bVSSiQCIWkRBZI88o9c5dMRABMYSY4gIGbsgJqZEwI4FCREEKBGklKJIslmCGTCBDMd9DDlarBoQjikaqpFzmY9iZrS7ea2am7P9g7qqH734cAW3t6p6bevWv7/1xtvf9bYLDz7w8AMXJRFabouLpx5761ev3OJeq6e6fX3n9e2PnTI+/Za3VWVX2p26SafyjGKzfHr+1Y/+7utfeiHPhCn2e73lN53Nzq4+cLw3rZqlk4vT6bTXn9cpSB2/+tHfPn3uZHtx0Tq5n4arW7uWtzo99kCDUfj69a2R2fjGeJoS5M7d2eTR5sLZc/t7h8fPPDA9S2VBWXcu73emw2neaweUg7LjV493zx47t73xxHL3t37x98aU5h89gadWXeEybhXJMu+//tLVwe7+9t5IPUKm7/sT3/3wk29z7YUAUo2CcmZEUaRYWAwh7IrxfQ/Q1s79T1qfmq31w5P3neOzC7VMf++5j5/vZGxshO2icMkPhymmOkQpc5mbz80COMry0k3qzRt35k4vFgWWRTneT0F14jSYNZ7YLENjxRl1sJEUFdWskjQYT3KnWaedGCF3pYNGGhc4JoqaVMwjOANLIioRDIUigqIBKWYgLJENkJkoSNIiF6SjslRNyvYH5tcUhACyDAHAO0feJ4RkxIKzo0oQZe8hCIE5BFA0IRFMYurME4ijaKbJRBITH2pdsBXtrOzPFdDJ2+3d7c3p5ECTSQgOoYnqgISokmiqUVUAAZwBSjTvKFNpedcrW3VR7jdJ2fmWn6Ziaa43P9eNocLk6qFUQTLS4+dONK3+xQ9/Bx47OZ0rehcfXTnY2P3kl6GZqisPKiCC3nI23br0xkf+yRP/y/+8/+qL1S//8pJhO0cV3K8js7KQjpuyS2OLWSc7TKkBc46rqRp4M23lhEAugOwPc6vy5z452d1YGgwhLw8h73/g/bs3b7q1ayW63FMWxMGgncbNpec6j57bbfa7x1dXvvM7w8/9mtSw8s73V+0FII2pWrn44J5/NjXgfJaCVnuBHVCzVv3WL8RLV1fuXG3d2kaGIWf+u9+PZ1aan79i69Xdn/3xxT/7o1k7bQ22+toq+lmvVaRbm/3+jdu/+aNt1zTTxnq1QnTMveUlKPJmyx176qHNyy+2Nn7bXvudAsa5s9BUiJE8mxoki0k8kXecYU5TqJvGMerowK49Qw+8WxWLtc9LGtPp8/v5/b3eeYy1KlGs8MpXNn75n/u4j53cPfI4P/KW9oadjzTBL733zeGTr0xuDdQw817rFKM4XDz30ENPjXAuVM3+xp319duj4dgBqwAz5o4BUY/OTWZERiRgRkyZJwRLUVGDNkpGlLnZoVc1GWRAIshEiopiZhpSNKJkADRrLhEEYEbv0RP5e0Py+MrN8eWbrbNxqcf1ZLr32i347KX2t73ZdeYX6lzu/GwaNwzWxOidz/an9twlOKfFmx+5tYErB7VPLCEWRSvtNK2tAU7i4RDh/PFPvPGVvUl1e83+1F/+6+6sCFax1lPtbO9TH62y/srcafGdquhpmnRb5cIkRZdDHMVpNil95+L8znBnrnsxXL2bZ6UVmUNLUZERzZDNyKFnUYFYG8GM6RSqhgteBov/8TcWWqeHl15v5wvMZQzVuMSpmAq28lYdRXbH6hiiayc6mOyefc+7d86fbsNcp90KB+s5DKHnE7su8XzPrZxqwxUSxek4uSyv0jQgGUuNkipFp9Mk3mUI5IoMPWkzncvxm97y+K3bW1uH0wwROaOy2NsZdDsd8u3RwU5T2UE9apU5NsFMAciiBgnnHr1/vq52t6aIYArDOjjASW2D8fBkr9/KsqaKTSPVoO47V5TF5nisoL2FcmdUQ3RisanrSG5vWBWFL70vyg42CXNUhzFiKX4wrMy4CRZSUIROmWtIvV6r1y0fOH3s69fWb13ebmfFdDgJcvCWBx/IS7+xORnFJkz2wnh/zuE7Tj1+5/at7nJnwri7tptMuxl98MOPvXZta2uwlTBvk0ek+W47wxbn+cb+1migrDq/3NsfTCpk5VaZJWey2O2Ka6ypb66Nks9OX2jtb+9PRspI47ouOwUqEfgkadQYoS0t5EhqzF6YDfKS0sRik0ChUeAiBMVJ5j+/sZefOn1fJ8Pp1HueNk3b+6igxAomwTTFWakT/r8CaGjQhAhomceW4yZJcrlx4bLs+Ep58b4ze9O7n/m1f3brlbvY1dX+Um8w7Ilk4+qVf/wj7/yhv+a8uee/dv/b3i5rO4c3bmIVsTGBlGkz/Pl/vbQ46X/jX+QQJl/42Gh9FwETKnh2uSMwr0kkZsSCBl7ZEwcPlRECDaoc2ZkJqiIaqCB0Wzr4widdfbnD5+RwgAqzLQkhIOvxuHP49/5k78ETUWRyd6u4tV80gmaKwc6tNBfmAbP8xh7vVH4YLAFnbQem5COycoJpxWDmvXqvYiASQFjQs9kLXxv+k8H82x6HxQfi7R2gMlltU1GRwmzyqY8f7lVw8ZxbWuyunq2ufuHwpec7y/3+uQdGEuRgMJmG7uLZXV6cXnyweaqXDWP1S7+Cg01jc2XPDg6hGTNzxm46qZMiqqgZGVrS/nQ/beyCEbgspEBkEmPUwA5yn7m8HUWQVJva6kDeudyBKYED78KFc9XFh6Td4qYWGXK1Tc14r+XqCxdPHDs3WPvJual3Qye1CJgDBDVwBIxHA7dzBqgSiAFKD6aABikBOSCGpBBpJHn3/PsOJM196I8Xj31DeeHNubOX/+e/9NDaIXgHmRuNJ/6b3968+QmeW5hMxi/THp997PTi2uC1Lz75Nz5wx9of/+hHv+XH/ure5p1//OM/X9YZSFBEMdFGc2AAIOfrJnbnPWmDSArAjghcTs5EXJaFuspRPFBIkZkQFNQ8YGZkISQ0ZAQiVdUU2WeOHJBliHXdwL1SzJlFyBHkDh0hmAVNtZiIypEJxxyhGAAaAzicYYIJUNEM1GbtDDTzFBEgoZESIpjes+dAQgCHziGRMYChICKTucyQzSKoQFIjIhVLqjDbERuEBGKKxBmZI5gtsIFMhGISVIwCogaGeo+QY6oxJAYgAEZQ0Hu6BRiY6VF1GyPobGUN5Bhohlw1E0MJRy08RDNSt6kqEjnGjKhgYAJQUwAmnNXiGRiC0gzNPOupoKNcnMEMtoozwjIhELGZIRK6mQlLEYyYZpsiBCBkZqdgSoSOMyNAElXH7PKWKuadFmW7mHSGo0a0GRhKAUQVks7q/5xjT2xmIcwewWRAYM6homVEObGaCQASesetFmZkdSNVZZMaRAHYTBIaECKYIYIetZcdiXs2c5DhLLmJiLO2uxlR/L8mR8OROmc2cxAgAhMQoAGqGMJRO91sOASzZDghyE0doyMis6QqgEjonWOIiGgKsxzbVNURI6IYAFACNTUArFWAKMnMOzZjqB8Jmsx4T7mCNcwKH1coLmYchrWRi2qz3Z2Qzn5qMzOlpjFGWPR5hyFFX4FRxkbUiLZR57N09/d+0TIoyc11l4+dOT1648WWSjE3F6GJDFHTIAymQYqybSnkjlSsm2NGqUmxnjQud1jkAhgA1YC988ishoKswEhmRq60EH1ZthfazWg8ScpMk0mdAZKC8wURpJQcEwHEFB2hqrrcI2cpTUDVoWaOEQzJstwDU1WUm4WI81kN7Zicc54JyAfAoCIork1UxGyyV33+k50COSdtqr2124mQTz6y+D0/crj0O3u//tOnCVyBIE5VQcUxucJTkixz16R1/i/+ra2SNvMxajt/+sNLWHcuvTZ55otcjbVhVxbkWFUYfZ4gDWqzzHVkuDegTKKHmGqtKquz6fLCwn//R9ZbZvm03t+8+qXn3/voRb72fCANQTR3JrQEgddfmlDJ735H573f8Ks//RPfd9/xw8v7cbTV6eDapJp/03vj7dcFp9Xw7tqzH7Fj+vKNl06p9edO1vvj3RdfFN9uDOvcHwqPGPYwHTg9tKSnTi3Nre5t7W3tDuYQHQxdbrv1eL7TGcJEIuaZi2gxSKfTQpXOXIe75XC0e/frX29XFhtQtbLvHn7w9AtXLktpHqCKldRgBqIiKbFZ1i06uU9RnCQEbJLcunE3dz5HoCQCca6bFxWYSqebT4bT4dahhdSeXzi5WtaFjY1isnMrS2k8aLeKtve9cnXn4DK2860bh53oWpLIpKnivtx7gxLKLDQmSQEUjRidARkwEKPLHFlKKSVNMc1CZCLeeQIiBM88WwnQ0b3PaFY7BUimjKBRmNUTITFDJqAzrO/sfZzAEoKAmGkTIyGjuoyQgL1jhyiqTJTlHDXFmByIEQGoIciRg9WaIHVMdQiiRkBk4hyF2hCso8RIhsBkyC6CgKJTbQlevfz1zfsuPn5idbgzPpRxh2irrua7/SdPnIS1Gy+++tL8u9+/+uCDc6fOT1aXuu98++m7r93d2S6beJ/rhaq+9rGP737tC3l/vnvsRFYWrW5+8+vPv7w1TsEXc22VZv7Y/FPf8m7pLW9PQ3exk42rmpSy9pQ4URpi88Qf/dZ8crAzogeeeOvGSy/B5ZcLjM3uyBsuB9jYHg0PqmkEQdYE73rfd7RLpf6xFhCODmAwGU5g3rXanSbFKs8LPzd/98ZaA1JX8dr65itba/sr3f3t6vLzrx67e6uTuU67UzfN/nR4cNDEwyjKIPDdf+q7L77zHQr5NIhzzrlZQ4Q0dUgSVKQRLXot3+3lJ5aWj3UWH/Lrk7Q9rren1bf98T87H4YvfvGZQZhYzq1OexJ2nREoZJ6AyHuXdQtrLMRJ7rKDO5uQkhBn3jnl1nw/TMYxqCYzgtg0BODZk+PZwicRNElIlJFsdo9HyxiVIQYxM1NIAhoTEpuJEqChKiiA85hEwIkvPDuWKMSoBsAcq4ohGVgMAemoBxAQwEDFHENGZJKUXK2aIyFhlaKYKcwWKOo8SrIYAZEATE3QKWVGKcYqSASx6DOc6KA0zLNW4bnd6cdJSAcHCtKAMTKB0awuzZCcMxEFS2oZUWZWsBUO+oVzWdmdX1kfT/cmo6xb5EWmYsFzZ74fRhXVilUzrbT1wKp/4rH65Jne4tJof9IYLbzrPZ3x8MZnXzFBM1o/DKvOYxR47sUr/8ffqjbr+rnbc0UZYsw6WUpp4A2DtssikVqyQVVVpTtMsW84N+dTozpOScAheaQ8K9Jo4kOMm8/VrbIxqp/+7uX/4R+sTLdHP/e/NV/8Ao0HDtCleNLJ2j/+C4vf8tTC49+Ej7+XXakAruDdW+utSpRii+P0ym3i0tow9kk0Zp45Sg+z/HAnPPc5G1cWaUiY3vLu270L7gsvnIsZ237/xqfgJ189JhHbS0XTLrydSJv7/+ibm2I7n0zOfOOfJ9+11ARMYRKcok6Hfq4XKS3QXnjuI377losxhAho4HwUIGQkijLNcsqZdDpByilSNlf66XTtl//xyfe/n1rl6NWvdD/4P1bZ8QWM+ql/1JatKt11neXs8i7s7WaruT/+4OhNH94J3bzX6WXtGy9e7YS9iwuu3qsmKXVavhrtlM5cu9jVOBoNmvH+G1/+1GR/09BSSqY4Wzo6ZjCTpEg2O1kyExKqggdtOYrBDHEs4hkKJgI0tComRnLMjGKAnmYe9YQwW4ZT7ljU0JDUMrV+yy0vd+AaAMBT3/s9n4i/c7vcc9u3ijw/d6J/+NGfbb4SumUn3ZqUYWrtDiKCRqltrlwEae0PJye+//sP92Xvp39+IdTsc2zNdzqL4e6OYTF/8cT//blPvto+wG5Znzz+93/m3/zrp/9CJ+4u5Ytf+Yf/+1NuFVw7tudYXdjcdNNageKkAWSaNtX6peyU3c5SXfbjxnBhNIxNsGCEgiYqbKrmPCGJiqQGCbNOKzVGQL4A9qZpcvW5K/0T8VjHiU4sxcNJdfD4Q5dP9B955NHtn/ulk9VoZkjPy7L0jsLk8Pc/mvWLxYfelaXmzjOfn15f67WLYqU/3RrmSToJm+l0uVXO94vBJIFEZiBgBVZTJuzkuRlMYiCNroKMdKE3/8bNrVfX1nMuer18Iirj8VLLtTLkFNFi5tGihSblZcFF0dQ1kJWOOMP9QTh5Zql9rH/l+uY0NSFSItfqdm7vH7aR8pxFLRJWJuNqvF1XRl4ysMwOxlMo2hISeO3mWSUSCYajcb9VDEaDxqDlXYGGkDpFjggxJVdmIUYwWJnr7o4mX710c1jVzAXlvhpNzi52uGk2x4fb08PzZ0+GSdPN85NL81u76wejseu46zc2zp88zhl2crp9d+/mxs7SwuJBHbrtfHt7O3POKKVGM3Le+2ZSzRXZ+WOdUdO9sn/oWFYoW1G6szMol+Zac+UuQV1Pz59dHKm/s3a308pikoPBCE2NpMxQY93PytqS872SpB6MU7KVbkcd3dkfI7nRpIKZz6LwH7uzNt/1D3X92Vaei6CoJKA8T8xRHSAag5g1Vf3fSEWjKjjjAlBVp5NpYtbSX3zi8fMnlur1y8dP+f1nNz/6Czf/6T/5hy989ov3nytv/Pivd9D122X10uaVH/0/fT1sG/WfejtceNP41iDLDsCSIarBUpDqX/3b8c/9Bk5Ca/9gCR1E9RmpZypKyDJvQz9tEEBEHVNGlo4GbTPGkIQzNsfkM2tiMsw9LVgaf22dZb8tLATivQribECfUPdgBy6tA0KbDJBs1nvWIfqub+b3vw/YeG0HPvmV+ssvucGYgERIIih5RJ71WyuoY06pYhFMTlU5Szyuyq+8GC+/kriXWStzqi6TJkFSyiKP6vD7n5Avu/byivZL3NxYPTyATa1ffr7I2hly2ztoNpfXbl/7D59f/uD39otuGg9wMPDtDFSVuUHnAIiyrCSF1EyUPIYYncscICRDDzHWTMguTyl5NodJqonEhFmHyLm8jMkis2chhdQ0Alo8dH998mSsY16UfnXx8OWX2RKtrna3RT/92/rMbyowiaVasrwNKIDJSAENBZkcJJOYCBFaHlRAExQ5hAqCAQAkBGWZTEobpOPda7E79/ATfOd69kv/98UvvogHCg6hjfG7vs1+7G9uj8fzrh7+uT/ywW99yrrnhtObnglKvvvZ5+6M4y/8xuc+Ml7uLFU99RFiAgO1lBKSeCQTQSOnKYloSuh49oIHxFarFWNlpJMw7rQXnCsm08oRlb5wLkNUUx+a4NnQ1DuuJSKhJW2moV34hX77XtHBDFEEhWdHSAiiFpOlZGp2hIswmE3gBuAJaCYGwAxgZLPPJLtXUs+O0CnOnikAGKqyCKqZGSVNzqGhUoboCcjAAxAxEUVAMYhoAAoglggQAMRAFTWaEhQOeIb3QyIHjJwMKCgj6AxbqQoIzKxmiIaIDIZqAoBIYv8FpHT0nAmIcOY5giPMDgKgzaIWYLPtumcyRiZ0ZG3CknBmUBkHmUXJiQAB74Gyj1jOeMTJplngB9Fwdj0BkGlW1jMTTZg8MRgQMznHM3HNZTkwAzKAgSEzgRoxky9n9V2IaKrMpGCSEhISEwAIkZjlBAQWzUzEEP09BHSLWUXNDNERQQIwUOexXWJGqCpVLYOKpTlC1pqJoQHQTBMyMJ3BnhCOZDGFWQDunoVo9pf9l+5hgKO03T2Q9lH6DFHtKNPpmNSO2ukIZ1oOJrFomMAKwCLjEtER1knqJFUUJCsdt4Ak2VQhqZWOSE2SKDEAZsRBRAADgEeMJkkhGagoMxaePWKMcfYkJ6vHp9NRMZpy1eTOQDnGjJwJkmgw0NyxiXrvUx0IVFICQstQRbqLBRoK8/FeHrf3TChLnsD5neF07ZnRcMotXB81Dz5+cXO8Vy4Wh8OJuoyjuOGkU3oCZKk0Tom8a+VZxkxWVVIAZQBNEsvMKaQQlVkBTUQAXdl57Il3b6y/ojgOg1iWWbfTtRS8iKCZamYAUcwgI2eADg0EyciD05lWCQbAKhFQGoHDhZPn/8c/2ywQDwabf+8frVYCdRjVVi63KbdbkwFn5gX6zN3FsihzzrPmYMgxeRR4+VPbP30z3xydb5c8HsdxxZT5IldSkAQaieCgju4Pf+utt7zZZU1r7WV77kX/9TujZ7/sXr+5UhWZOCyKZCghsifn9cBqYiUStGl/tRxl+eDUspR1urF+amWxXdqtj/5EPp46kaXHHuref6YZHRR56jy1OnwpSOY6j59xzd3Ba5faULSfdPtn3/eBv/J31z/x81hPVk7MD2Iz9573dN79TXF4tbkJLcD1vcng1OoT3/D+r//bf913tHdlk1xZtrOd0ejaIO7mnS0NY6cD8sWZk1Mt96/uSgpNnHbml/Y3dqcxeso9lgg1ZeSBLIoiVaMqTMIDb3l8/tjSzs41EDN2ySg2qRSJOvEl1gfT2Eh/oQceq6mwQpnnSByHwXVcr1tMho0zcrkbDeuQ8XhaqyZu4crJ1Tiq1jYOyk6BrWxnUJ28/9TiYuvt33z/b3zyhbCZnrzv1Ghnl3r5ubMnr928+cLma2pIRGFcB6JeX950/vzLL90MUe8NyQaGNqNJz26YCijAmWPkGSFO1QAsaoqzBkkzJ1IcmSiNAMBQdFa8Sd4hGoAYGVqMyLmJAdIMGEbGxglzUNSYrFEJaLUKGzOBgSZNZOgJTJMRlVmRkiAYMjN5yFFBVS2JRdEIUjfVJIQQxUTNABnVTBkT8rjWrEPtwoemMtTMeTKaTicOiIVGk/SJL346nj55bomzhfKt73107auXh4fxTQ8+uHttozjce+GXf/+937zXfeJQy9WDtj3w7d/05Z/5FWwXLQfTrLN8aiXVzWDjcP3O+vBgYil1ipJ8axynRa+lvbJ98eHLm1Oo9zrzvf2Dw9FB5TxKoDgNN9ZvFMfO+DuTeGft+Pn7rlx+8WC8e+FD7yl6nfWbO91+vwjxgWr0tZdeufH6Gw8sHTOeR3Rf+9oXh2OAwZ2922sPvuWhlQsPL/j8537sX00qG47A9btWYrI4t9Rv9fJbO3vGEDLYSWH/9ghEyGWcUVSd63YWzswdDMcLywvnHn+zYTb7pESx3GVmqikUgCIQmmkt2uylZi+A+mFEsXQw3L1+/UbD4IcnO9jK7LSPWwnDfj1JbLlnCzaLY4MJYGhiTNORk06dKMVEuVvpdnplTp6bsi3JjYaHqsoE7FxJTlhqjaAgSZugLUST5AEFnIE555kDY0qiQAwAUYTMGE0tRYVkpoCUGBm8copgoDOrclMFKygjJymaJgmiKve8dfeaFpiJWE2rEAydmKJA1FBbrDGllEQkoWoEjRhVlaUsKSfgUKMkjqAViUBU9Z6qqhnv7Wse/bLrzrd8dnpzbYOsVkNgaxS8425RMqpTihI8Yh2Cy4AY2t0iqbRaHfHcO7a4Wh6/uXGX+/MuL2qbHu5uLs/lMcFkf3dhbm68UPTf8shho3kYFgudTAiz82d+4M8dbv+L3WtbMqgboI298MBSblXc/+JtqTHPS3Wq87l/9MzJ88dHkxruHG68cKMVooU4LfITP/THllt88zc/zuMtdI1YdLnPjNQYwAHklABNmlE9UD3/h95j8518sdX+C393lP3E2n/8t/POJPiUeDFHfebVpcsbo9/8j1AFD2Wd0+ClL899Xx4KYknV4c6+VHN/+JuW3vP27U9/cvOZr646R3WM0RyOjGiIxO/78OL3/w89zK9+9ksHu003c1USvHKnyKFJO+DLWsw8FXs75isr+r2LbxlXg7xoMym3Oyy2+dwnVucS7u8MPvdb/eGeNlPgYjqNWenZw2g6BkRlyzyoBM44VdUIqqrxnUkbyHpEze9/WeI0zLdvxaUWyeRTP7bkaWc45rd+S1a8L1v+gp4/1lw8Odxd2niV50+MsdW4Yh7quRz6953tT727enXXq7Za/fZSYV7vXr/WvfAw+dH62ktXr94oxDMiiPnCoVlMiRAVQYkaOdr0MB2V8SQBryiEHXY5WKbADhMYekJDAwKfTWJkgoRoCoTonasBHDuPimB5xr2cO0Wx0DtymN6UvYU//43Hz1n8Dz8//uzlfCqwv9Gexlx9Ke1QemwXcVIVWc6ccFjTQd1NuvPbnz/olaO4tXrxATjcs2orydSVyc/L/uJ+eXEuS5gvtus9OA+T5uPP+J39tLb77rhKRRm7eXGsPRoeuuh1Mmik4Mawjr7FeaszHk3mFt+av/3i4MuXJ1/+tG+mmTNXZMBcNdFlZYyKxgzgik5saguhLOea8cQaq6epevji2b/xN3fW71z98X++Oo0dj61+cfjwuff88HeXnC02O7s//lNz+VLhs8lkiu32/NlTYTj0z3xu/Owbe9fW6juXH7hw9tbmdp1P98aNbR2uHl86vrII0+lk0nifE5HnfKns705HppHQciQivzmtGkEUwxDe+8Tbf/sTnyp7LdHky1YcDXq9zuJ8n3ze7rQy75pq7L1TsUkTpqPRiWNL73z8fOHzm3d2RtOm1/a7m4cfeOCRuV4ZXPrEs6+NYqizzGWOOn44nowlNqk5ttTPyARwUlWSu+m08Y7avXw0jS1iBQG1cVNjO+8U3UlTYeZErVsUTRWZMRFN6tDutKQOr92+tTo37xFbRVYlLXvFfKc8ubJYsr+0s4ecAdhwWi8WnfP9U8/fuVphePjkibJBq6Ca1Ja5zekA0Q+r1GjaGuxJDJNJU+RSeCaBqaT5lXnX8Vu7+5BSrjjHrazSjeHOycXeUjtf2z9oz7owxkmaujTrOkBmIDeI8XAao89PLnUB4ngaKE4m48YCBvCV18mkNmxW2wu7Iyk8q4jPeaBys0rXhgeLO/4tC73HFuZ7AlNpkBVyIsrRFMUwz/4bqSjzNN9pdxg2tipCo5wefehMqYP9168zNC/sbdy5sysIh83Dp5++8OmPfeTMghtPNJO4mLt8EE4Uc51c91/8fNNrhxoJIjPGZGLQAuw00R/uAIBGFSL0JGaQhCBBlNkI5VUZjmwUAkgegygoGhIQAYCCMING1RqdUcsyBgdMsYmUK4JPkgwQkzogAJ+DKaUEZpCqqGlxufvBD48WT1k9LU6f7vzJR/J3Xhv80q+1r13xLmaurJsqAlq7gwIa9jgkEoEAyA4JUkqGqFXtjDhHoSbmWWJHzoOIhGgWvcViIjjaq0mTJABAUCSELDVqauZG5Fv5QwDNL/xU3TQlQEKuD5pkkR97akSIw7XWeIQWPGnRZ1TE2jQ1YiBwJKtHtaAGTI4BwJEzIK9RYtMQq/rMnEuTw8ySA6+O6+GBjcbqs6BWH47c4lkfm257Ub/2S7sf+0+LKU8RiAA4ByVwpAVPl9kSue1xKQ0QsWMwgKYBl4HPwGzmwpFGOAGQLun02r/8G2f+4g87LOeH17b/9p+vv/L6Ul4KswBWEvvf9cduwlyn6PVvXZ1eTuHOq7s/dcM2Juj4lX/1pf3zF47/wIPv/sH/7w88+l0/+sPfNWjl+6OpijFgUWRIMK0bnladoouAhpDlWa2SRFKCzGXTulIJzvPCXD/GpLMVnACCdHM+mE46ZemobCRJilOJSGazWi+Xk3d5uwN1/ANXUcbkERhARZskyQwBMyTviI+qzSzBf42GBtUjUwmiOUQAAtRZI82sjM4xmmISSOnIlWWmpmBA5FDZwCuRikNNUDKgAaIpsqgJzWQpnEWSACnoERzHE8163TwRE6Ip5ahiZiYCYjgj+YsBIxmAKc7sJXBk3oWjgjazo6wW3vsiAajQbCLCWSGKMREQItOMclQgdnJHAFWTptEmiYSg8JQTkaMjGs9sQwiIBHgU/TJAYEaiGaMQ72XQiIgA78lJwMSMTDRrZuMMicjlgKCi7IjUkFDRM+cgqAozOhSDOEJiBBBVTDobxRBoplaBqBKp90SMDFCLIM7+tTFb3mJ2gKRJoIk4ntA0gCMkMAJgJoQZyhPAZtKhmSggEpCYzfjieE8Wsv86a4b3IOJmdmQjQiZUNVUQAwVMZmiGePT47LtnnBEiVISkFsGGTUwO296RQ0gghMCUANGsFp0mNCYQc6ZoQPf0KgTIicV0GhMRJDAkkmiEKGo+45w8TBMAHJy7f2t9OzZTzod9GGd14wCBKaph5iHGIOLJV3Xw4BJYnUwZgyRVjSEQkWeFUTNXluQYgCSCI8Mwhb6Ddta/cHKysNR/+9vs5JlTraVWgTsf/5Xs9TeqwwllvHz/hY0qheNvGsd67uqX25NBSwiJKYhjQOdSFERMTJ4JvQPGrKCNu681h7sxxKIoAQBdrjEiO3KWl5zqQIphqs47x85SNBVNtWoAJkMwAlMFNXTYybkerR186t8PqMyr5lTR0jhsLS40w2pQhVGndeEv/s3emcXNL39u9Lufb4klhVQFE20Mc3LHNHReeAWjGngg4lZmcRaAVYeQmuBK1+7AnVc+u/rM8fCVT06vrnUOgEJzYhhb2hfFOne+U2pSkUjsdiON3/eejTk9Mdw99fqlUTWKjzzqvu370S7v/fovz4uN1jfa6Apuy3A4+dIbne98eHqwvbc5Lqg3UNRjS72n39Zs52eLR3eef7Z65Wu8Jzr3znzp2OJCK97d3d0fQPvsYZPuvvTy4yvdottzkXqGr/zeb48Huj+G4PvQ7kyr0V6VNjzvdPxNna4eP+4nunFnr0r7QK2n3/F0jIcvfPazZbsost7+pBqND2cMHiZql15CtCbOHV84dqJ/5cUXRpNBO89zctNhVbTd0rljY1+0Tp+GvctpmpLITLfMnCMgA9JkmdFKuTTFZmF+/tq1K+1WPjff3z8YFkUnsVx7Y6d0piGONpuiV/Z6cwvLp6ejwWuv7izMLfa3NiebG+ykAbx0c+NwUJVlQTk+9tiZjcu3gqRT9x2/deu2w8TFH/gpcAYiE5lp6DgDk6maonlkRxRIAURUkmoQRUOvCVA9oKgyIAIyOyJHhB6Q0AxUEzQqhNE7D0rAbCaIBMQJQBgSUSKLKkqEhqimJs5UCIk9EKLzAgDEZipiio6dNxVFIYDYjBuNojORCpgQER0DGgmCikQ2TSkEBGQ1iU2wpJn6FKKKJKGrt9alqj988tF28u1uK5vvba9tfOn1a299/K3lyQqm4dOf++ibHCych4nRQT199ze/f0zDarAL/aWyvdj1eQ9tvLZx8+UbxLQ3GI9SPP/IQ3PHForjy0Q8mQyGIep4iEBNlkdzYnlvsfPuJx6fIN/42gudkyfy5WNDCW7p7H4qW0PI547tTKbj7b3lhR50W9opbu5vjwY3X7711WMnjh9Y/o5vfP+DzbCJ8eb+xiDncx/80CtffamGA6J6Ps8eesv9eTszovseOTauJ5BxqEIzrVplHpuoZuR9rGqoY3Lyh//oN7LzTdUURamGKKoxoqmmJoNUH6x/7YXP3v/Y20TKTmvBeGFYTw9Gg9Tntd2t4eZ+ii9nrfzEidMXzj7x/Cufb82TiQNjFosiZJhiCjLTbnxdSxTDpAiUSg/KmDh3Wa902DR1La1WO6WEogbQKAAYO46OpgAWoyPnyQWVZBjhqLL0CDcoaqSJQMlklmp2LoEBkEdSVTZC5CjR0DQ2QHQkFSVVuYeBUUVmVHPAEi2CIEKZERtojEnTQGLycMRnFDA1MNJklDu1SIaeOATRhFJBMjCwFC03ndQj80mCdrv9Vt6d6+NIdyfjIRnWyUo28lwUbbI4rgaoUgJ5UGaNqVnottrISclCU8u4X/rxpGoXc+2sXx7zw71LWSf1TvYy9nWlJ3Y2wuu/e2n90v3v+87yzB9Cv7it1Znv/iPxo79998tXvTBE2Dlssk4e66SAjmGS0+N/8sPdD3/bYXtpTrWMw8n/9f/IV15oJrVdvNj9rr/qCnDHH9z4nV9cOHdy8e3vSneu7n7uC3JzEyfTFisms0YlJ8p5d/dOy7KmqhvonflLf3VFbodnPmt70WJUZi8oa3tYRyo7Qi5H2Hj5a+76q3x+aePyp3pvOX/hz/5P8eK7ap8tveW7m8d/ufr1f9fbuOmTKKSUZf6Rk+3v/b6NfAXqeuEPf3tnycKLz8zPt5r1AYoVrZYZxrrBhiQKEbSLfOPOzf7jH6gasNTI/t7KwRvLX/ql4bOf7/f7fVcCChhPxhUWJCAmws4lIXIZq3iMU6Ph/U8U7/ke175v/5nfb7/2SQ+D8kR3sN9yD9x/5hxNX3x+0O2lp7+zdfxbpH9y8InftK99lc8vuae/bfhrH1m6/BvNy9eL+y5O558spZBpUfTbcwvYLcL0cDih/LmX7zzx+JluO1t79aUvfu6zX/zk53LIq2getSDUmJApY4egZhoVgVGARMWxZQzqSDEFhFkhmjeb9VR6EqYZ9gHEISc0BTBAIlEjQwCSdPQazohyh53S97qtoxVye2GwPZ1e/fzp6y8vzbXqw4lCpSz7w62Fk2cmUeJ0nAE240kL0ROVRdknby9c2ZlvP/mn/8/NL30iW9+CGsbj/f7qfJ8yGY+fPLnitrGf3Pz5+eXDVP3sZ9KwLgNb76RkPmIjwzrr9aRoEZfYmO7ueQeiIqBtPJQv/U7cOOFxzp08TdOxDHcbUM4yi6CoPietawuRWnk+n0M9TY0AKxXkDSdpWq2NUFuts+fzu2thMI6ZpYbqabQY93b32+0uAqhXYYYLJ6p8Mrl9tzcdFItby4DlubOX1tcE+e76eC/G06snN8NwZa4jTKNpE0Jw3nm0YRwniLPVcVQTqwmRmTXGvmRWaQDKZxi+zK8srmKRS9aJlI0n0ul1h4eDZixJ7A+9691PPflIPTk42Fn/6ouXWTGqn+xUwyp95vIbWYtahe/3OscKvz+tDif1UEMUc4zzRQuCtgGndZjrlJExTBpQuO/s8guv3mwSpEzUZZOUPHO/dIO6qZWnUdoFGfPhpBKH0TRMa9fEubJ38ezpa3fWqhAhiYW8bLdynw/q5sb+XrvTX9+e3rfUailf2rw8jE1roT+OOAhxoddqFf3ba+tYesyYxDLJUNC0mSuzMitjCjE0C63SYCpKxlmrQ772C4sn6uEgTfbPP3KaPF16dTA0unjfcsZgsVpcnGvldO32AXAWrD5zbnnzYD1vdfbXd1rdOcfZKIZuq+3Zt33W7XXGzTTEul3kJiZITS0FuiW24Lt7iZ7ZlauTw/edXOo7tSStzOOsTk8s3QsjH0lFJWd1Y+rUzWULCx1w+WS4tb0/7hbF9sFoOAjTUXN29cxDTz30wguf+6M/8r12onnxP386P6iKUMfaBW4dTKdA0CliqzNf1YwQU6OZs1pTpgSgCoZsea5qjgh51nQTI+hsRESa7c7UjCkk8KLsMydMyRSCcxl59sRgChG5ZAkCCFnLI4JJqlQEzRcUIFoEnNVRkbMEAGSN8hSyRWnaTrrdQ7bJmXLhvoXqx/6Ve/2yhW2HRkIEU7USNOGkznwBuQeghFqPJr7loc3GCpkCRkLQkMAVUQQ0aEpEqoja1D53SobMZsAZGwUzEwMV5+ogIRBw6V2SlAzYUzTX+4Z3th88G6vp9Mo6XX5jcu3FfHgw60k3EyODlI7UBSBVJXMUEY1QvcvcbJ8ImIiMcvTMECrIxMVm9JXP9t/6FL/lGyWIlcWoCfmrX6Cw33zxM91ogqSglOUobE2ocoIPfKj9x77LnJt87rnxv/u3nTA+GnxJZo3sAAQKgMyZA1CAHMZy9iOftk9/OjWwO8WVJtVcTo08A4Q4HladfguLwnYOmt/5VLXjErBAmqKztuucmIspfOt33f/y5hfOnO6/eumNKgT2XBIjkGOXJHnyihQEyHuswfHsSqPPsK7HPmcDIEMCZHJq6jx0vItNGsfDPAdJ1SwkVHhqNHHmptMxszdmIz8YTfAepyVznDl290qsRFTNHDlHWHryCAAgM8KzQjQLYgoQVdMRlwhmOS1mAlBkRGK7h6pRJAVFnLXSIyhaAgHUWXCIDcCYKERlmbHFFQgMLBqyiWNGs5l6KoBxJqoiAmJGRKhMyIzsiO/lntTA1GaRNEQTA0s4k0Vslg2bjURIf4DNmTmkzICIFYGZHAExzVrfk4EpOLJOxnMFa9SkOI00rkUA0OEMQMSEgMjEYMA8G7nsCEnENGNf0az8b+YtQmTHM8mGnTu6hM4R8ywmx+yBybnMCGC2NDUgJDFgV2hEpMxgymjsyMy8Z1CTpCkBGDRqmWNgExVGywv2hJ6ojuIyFwRMlRlabfKePFk0HI21qjGlI6OVmiEB05GOlgRM0GZsJTsCPSGQgsAs93eEmoJ7oKJ7F3vWO2dgCO6eWmRiUVENZoIXw9G36BFNakahAjTIEM20EaiA9mLoeNd2jAhIPIkxik4jRDMFNTUH0HYMZlEkIeEMsHQURURinIluohqCiUinXczeBV969fL73/uHkoRb29cuznsbEw2DVEBAPsPkCEwm05T5XAykqZtZr40lTdr1HIiq8WG7W3rEehJSSj5rHdbTumkgcx3wvLc+vXqpc/PVrZikvZKvLne37raUFxYWtNPvPPpNnftOT8/fvzhXND+xU3zp5bLNB6Mmz4sYo8bogbJ2SzzXkyYDKJ1rDhrHBxQt7xQlU45OcuZuN894ZzjcOzgoyeXk85yCNmYVI8kMY44KNNPzGRkRVEVtIqtM4996eSXPJGobtbO0vLlz0CIqwMq1nf1/8Q8nC57VPTm3VI2Gh8NKzNAkJHCQsbo8BlVwuQNIYDJjETtCQipKR5a82UPj+s7f+SfnF0pVqoeCEU2LmGepbgys8Clny0qXmzZVff6+t/qLpY5G+88/f8ylwe5devUreut32nfWyzNP3t3dBm+1Q0xFXUmzBXF/l0c6rYdCunzq/P7lK+mVF6+Wpwvrl7kbX71+/j1vvuGNTy6VxxYOfnnrZHnmIAzOPXhxcnNzcjAsF5d31+8cHIbxyDMUeyMcSLOlIc/n99i9PjLrLqyN5yYbd6fDKecu1pO9N954+MkHX1vq6jTGlIoMhlMJTfJZFhQylQRy7MLJJz/w4Y27b3jmhbnO4ehgOh0pxIy5286SQX955eKjw5vX7vQWyv06UurH0DjwKkAM7X57a7A+mdZBp/OL7WEVB4NDaCzGyYMPX6gDDA8P2nNceLexM+W62Vpf/45vfvprX3+jM9chuLO3NXnsrW8t5+hTz7wIeWZCC64c7o9CQhOoD5vF+bkmHaweP/by83tHYrqCgXnHQOycB2IllwAAjJkBzDtvkggRVEySAiliUHTgMiZUyZA8qNM4c/UQGCGJmgikKBoSIgoSA4AKiRiQAooqICKSJ2BDQjVFx94zkXfkvBGTc2KakjUhRUkqTQQ0EVZNFBUVDckZE3uchYDFOxcRCdlAU4p5dhSnq0OwqrGETD73FBFqtZs7g8+8eLNITTiY3No6HA2n0ndb+Wj95uUY45l3vunRb/8mQxwPmt0t2N/ezWTg0uDs6uldyKjdHzYVXTj78KMP1EEWq9DrZy3Cqq5qx1UVqdNawGy521XBlNzdy9d/6+d+tsw773zT/fOd1gqEfhu71OueWAgO62pKeR5HqSyzpUdPTfZ2tnbC5sagY3rhoVNPfuAdhrlfPS0VDvcPKdd5JuTUvp8/+N7Hm5B8U8tg32fNeFqJiisKGJHzXqKkJA4t1TW4rG5S1ipiCG53PK0rUfJZgY5YQVNAFLTYhOH+eP/Z5z539foLnePtEycfSOiy/jJYqzzR/fwXvrI9HGSeegvz08DQ6o9z/5b3vPflK1/WAYlhijxJTU7cKjClUAmliE2EFGXeexBq2IJhI6Fot1IDDGXbo+fIGFNoOGnBNGazzFGrCC7LiYzRbIYdJCUGp6jRBNQIjdAopoRkOrNPmLAjMDBVAEgitaYoEYDMQWMkSUAUAI2O9h2EREyFJwdISpMQEIElYs5ViCNLQ7MqSobIzE3EDFCNiEBEC8cKWtVJxKaVkpAAiiIA1clKpyphuLXtpxPL55c6xzuYHQBDPYmk3rssI58zkGuhSqjRsVoCBiGYBG0r5VQM673BeEcNIrgBFLxyxreXBeOt6y+v5n5uYXX/6tq8Rll/I26vX1r/xbf+4InuhTdXjuWhxy+sLkwOfmLntc12gkq4DvHk6c7+RA/Wxounjq1+8JtHC6seM4qOysXH/spff+1//ZF0MFh99KL5Monwytknvudb8zOPH5Sn2o984+IHf4hefuHZ/98/W96+kYu6LDOV3Gzjkx994lv+p7wNl5uKW3Odh99kz/1+SKHfKesmBUFHapmrRKJYkeBM01z/X//Uxf/j75HrdT/05+DYI5AVZID9pc53/anO+7917S/9keN7Gx6ioYUwSqnxWYHex4cf6zz1WPrkT4drz2nS6dZBQRqBwFFoQlF6SXUabOvrX8w/8McqSKknpdt57Z/98Hlz/fYJqbSWRN6AyYSwATGjHIo8H+yrI9WUIuf69j9x4k/8jSlmLl+aO/HWnX/1Rr9+uZo26R3fmfWLm//yR1cfvkgf+gt7D7wnDKh84/P2xX/K47ut7OHXP/8rS34kvlHG1SffPq3PvnH7KyYIB1U72BIVMO8opd2N7d3V619+5upXnn1j9+5+J88THHUNgZvtIInYm9SEeHQwIjBGyghSneUKCrWJABkl8kyGxOCdE1A1NBUCiwkikiEQJCRORkoATMBMDNHMiAVA5chhenph8dLvPedvXc92bLq/U5THho+177ztDF84VeMJ/8p2+Pzzfm8EKWJeCmFdpTxCv9c+a/31TzzfuXvNB1TLu63CxNLOJKzvn1yen4eMhnXLH+ZNcI3Lyk7WhagHAOZc1+parSbNNDUUa9NBqCs01hDAEt8NxQ7yogBzY94SkQa0yI2iV9CYA3Feji2tj6rOqbPm+vNOCwTZXc9vvLHx178H5xZPHztPjTXDhpXwmefm3/NkvnqSQ3fYsNRR89B577vxj37vxE0W9+/u/uaXD69ep+Pdhe96X/Gi3v3KzfmlzsHB5I2b1y48eeLcauv2ZEzOM2i38KqwP6iqGLxziBRDMkugmoSaaN/8tvdWzZTaLoI2MdnB8PyxVcnLztx8VSfH0F04N55OxofVxfMPffXatc9dfYHFsgAts29/52M3dwavb++evX/h1p39Bmg6jQXHdz3xWIr07KWrScJItUoSACNw3QQyzLMCUur4IjRxfXNv+fhiHWRjNOi0fSOtg1AvZFkLNQInkK29kfesjClVS/OL7aJE0YcvPvLi5ees8FhXi93sZCfvuyJz/sb+NnmnSWQau+VcSJO14SFknQXnb9/ZzFw2f/LM5s5NN5dHgeFwIqagttDpu1JzctW0nu/ky518eaH7xtpBgWWn3a5txHl1eee2RDl7srd1eHh7OJ0Ya9S1u3udwvd6neu7W72uO3F8eWMvzRctEFnuFF5wsbs4iliPpe96IDqahnookpqkQR1nWQYaJin2s6yT514kb2U+wqCRK9Ow//qN73j0vqWORwORSEiAWE3Df+sqyiDPODax5bIupmZaIZOJbQ0m+wchBYqU393fO1y72x+bjMtHv/OHfufXv9xxw4lZ2c5fi3H+/HEOV7sW+HAn1yIvOSKaJEWYBnHkDYQZRRVAHCKYqgKrGuEM2wFqxEiE4tAhoRIJIhhkJlmUls8aZSSYETYYDWbxA0ABBEQ2cxhZCVFNwXDW18NkbICb+9VP/D+9D74tXbwfFx4Nvc6YylG2GrKVXC5n2jgPoaoRC868JQBFStrAlDsdZGFFQwtilLGC0kx/ZlMGbOUaDJFiqBIaFxwdEnCMSsx2BO9VM1D0pgBEmiJ4MGJUVI/l6iLI5p39EN78dP/pd8KdzeEv/tLyV57JJIgeBQVFiVxuklSUnc/QYx28I/AIBVMTxIEr2qmqQUWmNZphCuBkcb+u/vHfDHM/ni8tGE7aU8vuboTRoIhJkRjQxKRO3mHVzur3Pr7w5/7MZH5p2rD/5gv00c/AzZcBE2QEYCARRIA8GAMiOLKYUACCcUBsbAEpJlB2RoYO2IABFwuMa1f1fOfEeOuVf/5Pz0OpjrnwdQWNM2vr4Oat/+V7Xv2S5E9eeLR0MCFwLo91ZGAAKLxHA+dcFeq6qSSl2CQ1IaKMfNCYFR2RZlJP+mVfBZskPudGJZmCqiff67bH1fSeGMIyw+EAsHcAFJtG9KjvwxMSHhGdU0oKxAwlU87kEDLGWVpJzNLMXaKWdIaFPpJdwAxwllKCowTXTIIxNdXZEYzQiEDVNIGqIQErGRpEVEcgZoJoRmxsGBMQGiAhEQNoUqBZ2Gn2PE0MKxMCYweM4AgKhw7RMWaMpoYzY42aAhBaUAA00Vn5Fhw1vBse1b3fy4vJDMOkBgI4g1kjqwITtBj7pXNoAehgmoa1yhGUSR0572cyECIRH6GVCQlo5hsCI2ZmBiQiZiac/WjMxEyGiMTOITIRIzMazhQ3AHLeH22CCFJMNItUGJuxzm4cZIjKOPMxIcw6g2coKZjVySEROEdm1IgmxSTCTOYgy8CxImBVp0klMVJMYGCOEU0ZQNVAiAAQZ/zPmdYANNMIzdCQkWab/CMbz+zxezYjsP/Ct54hrAGRCQFNkoGCGgKhIRCAGJABETCxmtGRCwwIMclRXdlE07g27yn35g1AkclIZgh1EIMo4hwzkSGmpJLUCBlRxDyjGZDNniIZ4nh65K27+cqNN8oT733bh7YuL9zZeqOXjU8ed7w/bQ4qrxRiBOct5wmhM/XsnHJoQuZsoe9AzCsslT3SBJ4LDwFMS69YUJGrQWo0g6bvmJp6xbiY7oVLG/Vw2l6YX+2Vg8Ha5Ku/vPla6+Jf+fu4f9XnmJVFmJrM5a0Lp8PB/u7arcwVzpjNZ6bcmDN1ZJ4DFqY5hL1DU78LFjv+9lTuf8e7j+e8++ILOYBUUVWNkgKaWd7paVJBcECxDjPjRUaMwTL2uQMKWPY748Pdw/3DuW5LYpPlvlMrgdgwjMNkd38KyJJMFPI2d3KyWE3GiTMuygzI1XWIhg4JACUZAJIhRjBAOWxOthZjHb3Pgw0b4kOOOcR2Ds5BaiYxBQOYa9tqN975lb8rTz5Qvu8DC08dD5dv9dqcxrvbN9fPnKRwBlbOPbX7withWLe95bmP/eTGKUhMznffeSY+fKqghQNa6hx/oLrx7PprX+pws/ncb+RhfOfVZm7Fn1zN6xc+7raXq61B2jcBWjvcGE6CZe1hctNR1ShN2WnbN77cHg+p3W/78nB/UNW1ZTitm9IX+wfbX/7q9tLyPIzqSzd31CMadFoFEjkzMnMZrt5/Zne8ube/08/UQDiEOAntrBxsHLLC4qnV7PxZN+3lX0qXXrxpvr242n/ve971/DPPNqInzs1vHe7ML/Dx02ffeG5fxZ9orTSbk2Y4vfDQKmDMmnxjo848hqgZ592isCSvvXFdmvH2+sHxhZ7P3MH2YHtj3CIq+sXy8e7W2sGNm4O8aD904eyt27f2Rk2e+/H46GCkMOPuc+4cgDPiGU5eVBVVTAmZyBMlYu8QElhSM7NkomIzV2xCytBy59L/n6r/DLNtO+s70TeMMWZYqXLt2vmcfbKOjnJEAYRAMgYDBkwwGAcaMMZcu32xr29zux98225sg93Y3Y9vmzY2xmCihYSQQEIJSSico5PDPnufnUPlWnmGEd73flh16OZbVT2rqmatNeeqOf7j//5+STMyyASSRAUQRVOIAY1GoAyFVfHPMG7MaFhCEBGNEQGctQYws4wgoIGNTSStxBb9PM58CkisbIsyy8iYFowkbT2wIYCU2rzIXJ5ZxyFCaCMg5JkhlBCkqaK0wMn6NliHIrKg9aQQXvjqZQs8H9bKCIAvP3sDtXjxSy/mruw9WgqURa6Wik9/5bkP/+pHNhwu5/GRa+HkY28tHlgXTITNZFYFQVe4lHRS1ZE1emjq0BsMXnrp9h89/YndO7fFS55oszugKLdefOVa2/R7xWx32K6+9F0/9Xd35gc2K9Mc4kyNnU2PdsJsvlroylvvm+zu3P/I/f1ibTqZyGS/nct8NrIgs8kUCQSwZm4anxPG0Ka5JwISRakylwEBs2MFQnC5RevyKKJi2GyeWLl4/emVsw/0zQn00k7nIo1vZ9PZwTPPv3jp+u3ReKRgn3352mwSNjbvSU19ZzS+fbh/+8btIkNkLTpIqts3X3rldvjAe969vrR+9+rLCa2oWpuH0FZVAIH5XAGZLbAApYigzXxas4rJNdocbVVPu5kzqMI6w5DnpU9RCuScoyYOMXcZkkkJyRqLOYaICAwJIKEgJNUkSRRJyJAqshCKxhiiMjhMQbykmBJRQohgM0AjCiklfHUKE1QtoEVgFd82bUqMKEZ9lCakw7atCJWhjcIIIYFR1jaJYvBBMowMKUYF4sgaIXfso7fMhTMcJcY2NTKtU6Vtdqrsd1azTmhCiiTGcBSNKRoyuc2iKBGH1ITQgtpEZlw3IPMWddZGsjCdHBAb57LuYOC6J3qb9Wx4e1+KCw9eoKP90EQNZnxp59Ln/+ixU/ejyye+3yzZ+77/2/DXP3L0/B2ZxDJR72R35dzan16/uHn/+Rm78f4cqImR1eYIRX9rpS52pqP6hO2GZlr23I1PfbS/fevUt/zIQVuN27T8hrds/Njf87/4P+STShJgwg4AXnv+yR951/3f9R33v+st4SZPnnyOxy0DtG0UoqrxzhkhpcwQRdDk0HReeNl//FeK7/v7OxWvzuaQt23i5DqA/WkG/R/7H4/+5T9arSvrsLm7s/+R/7z1Iw9UEnIzPnziy9nOzRSTzw13s8Y3syoZNqBKXtFDz0V68tN3//Z7Tz52emjVYP6Q9MJsDim2EJWpnXtTOMMmYwo+YJKUAjKhtpnho7k9/U0/PO2u+OANzGiz6J2C6vHxwfpJ87rvTpOrXQEZjkbPP3Nm85x/9iuTz/7aSjYrL5y9PnPFG/7S7nLTe82b6unwKp5t2tndKgQq5uN53u1SOiyLrsni/t27H7368ZdvVkcHMQnXXjCl0iAzI2lMkjlOCgSL/UghBVFh69RZ4yA2LcVgTMKIQcCjM4AZsTNImixoxhAl5Q58QDYMKIbUIHomJpQEjJARLrZd57N6cREcVLH76EPLm936w3t5sr21lVfK2ZcDv+uNb622295hEzNhQs5KzTIvmve7GhtX4MDFTnvbqq8SAXKqmugru9TZXDs1PBrnoc4pizOyWamsAm1bt2SdqqZpMkjgE9QNSRuaBkMIdcWJDUJRZDH5MJ9waYoLp8mnuo3UoFatM04ZSQm8T6I73fX0j/5B9fqHbz/5Qve3fuX1S73ZeLL0wOu7jY9k21kNIJ2NVYD4KM5G//RnoejoUV2ADjY6dZjemhzpyoOXRrfPDbqnPrh+9vbl4eGVyeXrdOMWj2eHYY4mu3Rje+VUL0z9nTsjRG59MgQ+SJvEWkeEnU4+Gc2SqKIa4KZpV7LyqUvPhjYaYxGI0Azy7m41q3XsfVrZ6M2nk+BrsnrxlWeCCFtrc7s86JwA81B3cHB7r+sIcpXMSYu93J5b7uzd2n7+7kGWZafX+9NGtw+O6iagM7kzPoZJVYtAaV2YNWud/pW7+2dPrq308rvzJjk7DXHQ7aoKBWGyQ0mrS0XJUrhBmXcm4waIrt++OplVRqFnuAfE09RZz5oEz27vLZ9cw1nFDLv7R9aZw73Z1lq5d+PO+taGzXuXt28cHY2yDDMm04aUUtEpD4eHyy53bPM+9Qed2PrrBzPIrC3MxDcv3dxpwWLEAuWelbJjaUdjmtS9gg6nUzWdHhVb/T5IA+CtxdlkWDKt9TknOqxjSJTmvpo1bKlOEhTWukVdBVFhDQyamyypGdaJQBJJhmDbdmBclej3Xrn1jfedeWBrSfbHKjEJLp/o/7moaHk5P3d2Y74zSuDqeoypzm3/7jwcTevoFUGNoaptMjp83ZnOz/3s3/q7P/ajr3/ft9V/8Guxmk/i9OTr3rTy1/5edr/p++uX//VvbVy84usIIKwqQZFZACwZNqAxMhJGRVQwFJCQDcbESCpJErAjhaQCxlgJs6aA+MiDg+/7bljfgltHkz/4Y7x7NZ8MURKSsUpNFCZFJMuMiMG3oGA4A4gKpD6BiHEGgsE/frz9wlfd6a04WLb3nB6c3CzDAG/fCbMALDEJmUzBsJHMAbQMBNbIuFdIaFxsDDpLll1eSYUiKXokx1AbtKIogIKEzjCINIGZjc2AOcUoQY2zxBAQEiE5AyTRB2OtyXI2BZ2659LXXjjxk9+T7j/VeJfOn1p/yxvtU5/haWDAGKOoGOCUEquyCibQVIPhMMAQE1C0nSy2XjUCE4mSKhBKUowKjRTTKt+/Cpcv5xwg60DVADMYij5q4izvRm2n2vK3ftvKT/7dGaNS4G7uekvluVN67VlEBQFAA2wgHhu0AARS60GBSUXy0mijlCCzjI40kionxShN02jaP7x67drhf/2lR/MMmkxzrlJEq9tRm4k+2Zp73/V1l27VHqQ+momCYysEoGCQM0fA2KakZBRRVZkMqDJbpryfFzEIEpeuIwlIKbNOUOsUQdUa04qOmhaUcudEYzWbEoFG7S8NEKnolMNR0FcNaJlh0oVPXKIAITqCjCBnRGYipCSIkFTFR0RVANFj3AwSEgARAFICQVjM/mhAJARgSAJKADEt6iaaFvYyQERIqIQaABlElY9d5sJKvEgbVJGUkJSPycmSFj0j0aQJEADEq1clpJrUIjJrbowlsIyGCUgNojUcVZsQo6iI+pgEj7k5CqiA8CrufrGxvUBWkpKAKqgxVOa03DUGNEUd1nHcxqigCEnVEqlBYrCZQQRig3rcEiJUIsPMgMBMxBaRkNgYXnxASGwsLvIl5kV6RMgqwoaBWFWty5FIRRip5YBMKSUFBqYgkJIYUGZYdJokSUyJj8X0kAQFlB1ZQ8mLAqYoQGgtWquGxTCkqJNGUsDgjQKSSEJEECYhJAWMSoiomIwBIFCBFBWEUHXhuVdQ4mM49wLUtEBWA5CIHvvQFgUjRUkQQAgIABhBF3ATQAVIAABIpIzItFAQKB3zkqQgiqpBNAEKQB00Jb/iOLdGA6SkIYE1hARC6IEEJCkmAF3kpKoAYoisI03qwNRRIC3GWwEA9m/XH7v12enO7nd953vSmaK+duNwumtm084qN00CZEiqRKwpdygCw3HDGs90OwZjqL16a40qQTufl8S25HGcO2ctlL2iaI72l3qd+Xw+jwmysvbROdNf6zbR7+3viLRFyQ9DPv83P0vDoZ1OWDJ53Vvu/6mfGC6J2Z/nL7ywFE31B78Th3c7PUNGUltp1+BqDhFjpOyBs1NTHnSzx77+A+d8HW69nN24ueIEIniVPM+UbfK+yIxvvWVmQEwxsxRTVNDYRkvYYgRDaKWpDpmwLAqvPusZUtTAIWJK4jouxiDMKZGVCBiDj4oxdqDXy2LlNQCiYRIDaszC5RZURQ2pMQjMIkVeDMejcGGT3/IabOb1H3+t00iWIVgz3+wVb7hv78nn06Q+1e/dvl67zVk4Gvuj1N30F6tp9tiZx5+4tTrY07W4er4/u5Pi3CNIPRoO2lBPU2Ntr7sRG3dw95niwffs91e8PlZduR63r67QvNdfmd7Y3q+itdQ/vLa2Fl88GIualLlqOjVEhzHdbXyeqTora719w1fuNjUV3ptmNNKUttbWxn5etYGLYq9t+WC+sTE42xOl6wABAABJREFUnByYnIdtWM7ytk4mw16eW9Jzb3qMV9d27uymBqbjuW+btg1hKgDRmm5/9Xw361HRnexPtrbunx3FaYDhqH3iT7/iw5wM3Lh7F0sy0L92eOSXQXxw+WTl9Z0Objz9p8/dt3XiwUcG995333B/fvHiNpBkOQ2rUWV7Jx5YOXp6lJWlI97Z2SeSXCCrdP+VoWizsV4czPWFa3dHw4qsk7kfHd45fgO0BgGZGawjYETGJAAQQVESJQJkBLKcFVlSL0LICkkSgapIFF28MbaKSZOzBkAMKhgyqCKiqAoRlDRohCQpOSRZZOsJNEYHFFEWA2yZZSNKBEnEAAD6BNjEqo4xYBJSBFEmdVbZkCH23qTUdSamgIZtkREhLu5UDKCChKTCFjIK4H2gBJQQgnhJIsLWQEKNEEltkaeYUkg3tmcPvX3z69//rVdevnjzxvU//cQnX/+a107a6tKlK51Opw7ASb/05G14Zvedb3/dg4/eP1hz3qI3lohBUt4p2hDIlLcPw3/8pd+f742LUV0QDUrTYbUUxqHxwG3dlFtrgTABfvZj/zXL0HsZTlosMoKpT63NiyqFLIes5Ge+9IXJ3b1e3rt48+mVU+dMtzc7mAJQWRbExpbOcUzRC8QUvcszIpPlmbMZEhMxsUsiTACogOx9BAFxZUmDr3z+D9aWVi27+XRet/Pdvd3D0WjeaBWoFfTBPPPS0f5+6PYPxnUcjmoSXVkdVCGkKEejWWoleKGimI0mk2kLhCmFlETBxACKtp2Htia2bAizDJASIWSFm8m8y24+H+kcIMUQ1BBoUks2WOQOE1BuGNuGvYANSgaMlZSM4X7HtXXyETjLQhNVNYrGoIhqAdAyISz+p0XV4EVARFWImyo4C+iCIUghqsCxRRXAWCICs1ipi6iAYdQEXlId1Su1XggRkxKzIY5JQFAikDVtlcgQg4WAlDAKSBJk6GS2V5aZYDOsDKlG0BBmu7d4udoadNSuHFatLBhMLksxsbZkCmZxTHWKTEUTrMk7UfvW5nnRVDLtDDoa5v7odoWR1k7kq5sTPz9sx+bgmdOBYzOJqMVy59KfPJ4NfuvBv/AXTKefAmdvfvubz5598d//2pXPvYyt3Lk9u3/z3tc89MDgxOru0UiWsj6jLZEMD6+MJ9szIdo/2LkQae6kX/QPX9gfvfICrTyfP/zabj9Xm228/q23e0VzMAMP1jiMmAKu3r26+8u/uPXCBbDd/KmX4jgQsG8ULDHZJiQlZPSICIhNHbMMrv3hFx74oX8Z/eHR1/6EZdx56C1u65GaoFge9N7y9f23/YWDj/xORsJdg1/4E1r6Z71TPRhvD//kqfr67f6pDaokVAnRSS1QgBBNZ8mp7bTGj7ybTuqbz5Sl6ZzeiuMGLXsFYADUssjYSFPXKJRZvTOvO296s0on3ryeVRNePwlbD0Rl53JOhNMpzUbTNp3/wR952urr33L61u9jc21nMP3S5MoXaH+nr3Cwf7Cv87V3v1W23lWtpPHyqWxVR0fN3E6z+87p7AB269zk586fmVf1mXs383j+jz70qapWTRGJNIk1jKhEYBCzLENiQbJsQgwEiRw7a9AYZAOg1mmcNyAaQ/AIbRty40DBJGBmw1jkqIgMKYG0CNZlSBaULZIljBEWKbOPCUBBj4lde838nvc93B6e2PvEH54cTpujO0vz+p3ZPP/VX7XQqZ++1dudrkBnMp8DhMHqZpi0LE0Yz1MzTpEVUrG6HmzhK8CzF4a2ytfW6eln3dHhUqdTJ06IChGoQZ8MW6XoCCCAKsm0gdQGUHY5ZcH4GmOaD4+0R+nsarOyHuZcBCliQoKF4xhCxCTGsCilXnbv+9921/oL3/zw4OZ99Mwtg0UqevM4NN1CRJtR5dhgBElEkI9nkYt+msamrpjV7R2tzXnp/Lnu+Gr80Ed1b2/28vVJQ6cevo9KIKnuHlTlcq/TXVldClX9SuZwXnkG59vga8msFcaKvG/9rG5sUWZKncw++uiFzzz5yV6vQ0QaFSgZliylWE2LvLhz7cq17ZseEcE6Q2TYGdMv8ryVcyeWhpNm/2iWnOkXa6sD5+rZudK+5sF7Pv6VZ4RzILp5OCfCe7ZWJOnF2zvdYhAFo6hFcIx537S+WVrKNtbMbBxTlu2EtvE4HgootdO6tzbQnsOEJWUxwFFV51gsb65fObw5U9vVuFqWJmKed06dvnDt8LY6O51MaXTUtUvbh6HTs5uDlY7LDWomthpNIAaJMo/gWTJ2DIkI8zybzr2v/cZyf29vPAs6nbdnzy2vrro7e8P9eZO73Hl/73rpfHPryEfD9bytUqiV5sOKM7dU5M676dDXjT/Zc3nJZY77o3ZYpV6nU9NMbRLWysc2aGaYLfc6hcZAKr3MUkK2NibxTew71y+pRZ2ZbBL9H7zwyot3ew91e/eePhnm0xTjn4uKYgu3rg7Z69I694tOVdHoqBYfUYhQCTEK1WR+4d/+8/eeeqQd4omtd2Zvbz7z6d8p0pwiNPFK+dD9T0znq73BY3/v/M7f+9HVEFIUmzlhqCtflA4tqIixqFGRCWIAEcOkKkQgMRETI6QUGVW8phSHAL0ffH/n+35qRquQ9+MDee+936bXHt/9X/7Jxp09IxZsYSKKpSTJpESCdiFLIxJZwLGNakwREJJRRC/w8o7N9uCFFzM1MWSuSc5QYhKJSGgIQVpgAIZQV7fW4N6f/5+v/e6H7Sc+uRyINEqoM8ZIbKxRVZAAKapAAkW2UdWQwcUoonjLjkiBMcttaL00DdtOisKSrAGMMh/P+dz58oPfK1Q1y2s4iRRD72h35+O/fU8UX9XGFkCsCUU0pWCctRYVtBr0q/vOw2vvS2ur1ub5we7ki08Wt28upxqFFLzERAqqAF4gKRoANBASpBoAACMgGlaAqNVkRn7ynkfO/uTfHEKiJhDEkvL8ysX9L395PTOLDAGiigqiQUOQEkCCnOiB+9pyafzMc+uHM5shKkstSkIGFYRCMkWe5Qi7L33jnd2bX34qy3OzsTIZH85br5vmNd/+mv/05evDpc6hDnb2ry9v9Rbw5aZZkIRJQX0IQkoM49nMGJ43MaVknQGhKkiHcwchCijArK4KLoIGzMhkRiE2MSlQTElFGVVDFKHcGHQgMTqmejZmTGj+XO+jDalJgoiGwDHz8SpdSImJoogqikJIoKC0sH0gMgMSLsgvAIQL3TKAWECzgPQoRV1MLakuFFmACUAhwnGdJwEgYAISBjYkx/4rTapW1FkiwCivjl8lkQWyFGiBxgHVRJoSelVMMA/JkRpKmVVnwLFRAC+iSExKBMSUVCRpTGlhslc4rhoRIaogoOXjJg0ydnLqZkwIoHRUhcMqRQEARVFkZEZr2TlDlonYsDHMzIaNQVQiy2wQF3Uig8jMhlCNtUiGiIgMAqBhZqvHk2gMosgGEAGBjVsUhwiQvBeFajZZjLkttPeqqoICqpAICJCsJVTyMRKBWQQDSgjgozIhGVREg+SbUAvGiE0QVDCIKQqKImIEJQO4wDQJIGgSYFoEQ6C88J7pouilC/PdorC1mBrDhc1M/6xahK+a4Bb5XBJFAFpMoonKIpMTQSJanEaLqxggqSItzjMgUQOKr7LJAbCKYiQumnEWF6+atCEiqwoIARNFUUOISKqoCimpIWJCCxBTevUAYe4Mcf6l5y811fC7fviv8vID8+rg4PLFpTuX8qODgsBWEqvIzEA4mSd0pc9t977z9a3nU6OSq8vttGmoLIw1w3bYxIScOYrT+RH6tF/NJiQn3vwocH/24iU0AMSofs5QvP2tunkKr4/w2Wc6FJsAdHp960d/8vay/drOLXd3+03nL9CtnbY9IA2hbU3C6fLSPT/5/WHFhpdutS+/sPm+N+tBOL938+qv/1w2ih1lSY5s2bYBFKOSISdBlE2nNNVkzs5pagmVWMkhcrCd7G5Vy+p6rmmlnuSRYzuHJL5WBwRRNCJllgmEcNLrNYNBd/t6L+ftVKz98HcOrR9//I/v2dmnKDESs5Uq+hDznGsIU/TFUl5apFnyDaCK31yO3/fe6b0PFhBmTz9bHLYZ6chHeO3bd97/lhstvuv6C3uzw8Hr/+K4bjNkXoG6HhcG2td/YKIX1+6/FyfXD2/t1cC9sgdV8/A9b7h8+RJ3ynvf8d7Ld17quHjp5a++/sKFK3d33/muv3zp4mU4vIp5H3tnq3Bg2HWXlijFZz77Uhus56wRt52K6axtOTSG8tXuSxO5dpQmRrAsNrorsW4OJ0eCPGrrZuaDgGfNqDjVd/2CZhE3NtZlZ9IBzERTgunRfHWtVxbZM889nwEMOnmq3ZLpSxFvDG/2Sre52e+7FPf285XwwMMnbkyuP/zYQx/7xNOSwhRSWdIDD63ioHvi3tMXn709dlm2Ke14Wsns7tF4ZXn+0F94YHJn8pmXXtxYLs+cWH3rI48MNldu3dg2JhwdTPcOjrr3dNHrhfXTrnNTfLMlcHA0DwaPGigLWnPuYFgJcwBRlU6/BKgAIChYY5QoqhoCUqFjsJuCJvUITGDIMBNkCqIpBkiQEoAiwgKFEUWjSO0TohqXpYXmEEBSSiIxJACPZJImVQygEZNhNkw2aRUDIjBCTmhVCTTGpIpAULdzYw1QUiOgABENskJS33qOohJjTKCiws5SgUq4OPmTJiGyxJyiRgpt4JQKRhBddD4F2bCtKg+IRhmTWCIidGU2ThIFBhv9rz//zpsvvZwJnFxZevIzX33pyadY8xBj0emOa8nJvfDkC9devvjBb3njxgMXLJvptAXhvN/bPRh+/snPv/DyZTuXtQhrvfzBh08ONhwbaJKfW7d+dqtj7Msv32xu3uw7S3mwG735/rTsZVk/r6omy8u2TR2y1lFsfbnc2U+z63t3pJMlB9YBRkagpJKCjzMPhGzZgFGEqFLmtuj28jwPwSOyc7mPEVSJKIqQARVgxtTqcm+pnh1duXO7bgWdm9e+TaDOzGehbtJiC+bOTq3bk6zMAKwxdng4IwYfAqVkjHEd40y+sz+c+qgEGpKKNnXdtilDpy1BwISQlz2TxWo6yovuzLfGGDP3GmQ2rpFM39iCTOmytmpndQjOJcK6ajlK5jgmim3Mu4VhcqIOBZMH72JENCChVcIFiM2nhKRgCBWFNMbUSCRm1YUvz0ZEUTWaVAAJ5VW4uyQxBjWBGABVh2BQmDSpVj7KYv2giUQNKaACYdCUjrWgQMAk4htBQVUFkaywZV5Y4RysxJBCaNu2y1wPD9vpkNaWzp06n3mcRWoQY6SMM2syFAQUTRYd1ppClFAr9PtrTrsG4jwkCWipboc8bac472bl0tLS6KDdz+CRt7xx+2vzdHPomzZWzQu//2FfXznxTd+HfKLq9Nt+ef9f/1vj9j8cPvH8Fsdems2hHl+5GLfc1ns+qDX4WRTb0cm82vMtcr176cv/6/ff87aHX/r0J+3Ogfbd/tWXTl24h6HfhlnR57TUiXjQy3Px0ERVEQZfRJl//nlhcCKK1AKHIAtDLoNBTSJYtbFTkMsoL8qT9ezuj793qTQGy6oO6cwZfuT+pC1YOrixM//cc13brVVNE2wz2//Pv1sWVPkgYMqyM7wxlaCYMMSIaAHFmtA/tdzOUhvbosxiAosiaAKgFuBb7yeBLHT6eaxrt+IKBhnNZ6msvvFv8ff+OE9D/J1/2bz45eL8+bqODCkriesqXHlu54XtZTeY7+6a8a1n//QjfVus9rVp56u97h0JvLbadHoP/3ff/MKzB+HuS/e+8V03h6OQYl7XyVfkU9+VJx6474UrV1l488SZN73nfVinT370hcPdF3NI1jpnaXFfFJNqToKEhkISXHAVQDEpIhNZoExVQWJmrSNhFZAUfZgFANUcmYitdZkxxIaobSEu6giGspgQgIkMsyAcOwOj9ykcc+vyAV+6ddG5pfX3fp376O8yw4nBoDwKzYvXlYqOdUXZGw0b6ZAWaZoOyLCjGNsmKAt3IkAzGUmnXX7NBTqzfnjr2v5kb/DaN1N7VF2+HKuGCiupIRRSQpt8mARRTC6XLCdVSQkU21ZjIFTmNDLV0ZkTZ//fPw2PvF2fug2f+lC6ewdDy2SlDZCSRFHHxurK0e0X/uU/OvtDPzi9eDE+/pXZiweuKKvt21lh2t1tScSamka73f5zB/XW3/nu4r4Hbv72Hyw98dUyhCLH02k2/vn/Hvgg+MPyQGbFCt736IWH3n5w/dL4yhWbxRNFpzFm9eR6t6oFMXN2fYAmc5UzduBmwyqFeKrbl7Wl7tLgxnh/MqltB4/SsLvajyjNvA0xTmJzsxp2i7xpJKZmbzLMypIJMnZrHdeE1I6r1VpPdpe31vsH4nWlzGNoDocwnp5fHfRN9uknLh7MUshtU6fTS92D8fRunD54bmt5uO+9N4SVgoc4C/6Rs6vDu+PIdGd/tmzt2fX+Ub2PGe9U9cmeKSwUliWHR849eLSzPZ1NLODyUufZW8/NOcv6nULTUt4d7Q63Tq642Fy5dmngmJXIdbZWTr5w88o8pfvX1iazpj8oukvl7Sv7dRsNG2Je6y8d7R0iqsQ2eQ0xKmEMsVVtrZtiLMpiNJ7dvjvcXF8SjznDxmr36kHz/N60V+Kgnxl10yBVHXcm1d3R/L5+t9/pudyfWDZzmEObSGzet1lZ3tkdlnmeOdOhUEQSRUPUtsLKURJDyAwX1tSiWX8wG9cphF7fsMFTm8uTw3r7aHprd4rXd5cMn95c+nNR0d7uBCQPjR/UkzOn3Op6sXc46nc706pW4sUAijXZH3/qiff99W/8H3/hP8hWuTdYNt/87uqPP9qP6c7l0cpTv/uab/trd+7eas+8Wc+fgctXs0geJCJ0+1nbRFY2CAQCLgNJwAxJURA1QVJEVAwJzYIWaxzNmiQPnc3/2o/tdrtdsGzBFQ7TEq++/dQv/uLhT/z4yo05hihEpnSahKuWjF2wHoE4iEIUUMXChRQYqQ6NA0OahZlkWa4qGSS0qKqEgMzJt8pKhGAIDJmVPrz9oflp5tNL0xKzZLtofBQSYM4SNpCSBgI1aIAIDKEJQdvIhFRab1lzRwmNgA/qij65FNrgslx8RE2WQJ3I7Ep46qMlDfzVdOJdb9h/5aUrv/+heyZW2soULN6nFMQaimqUKWGMwW/2+z/5I713vK0tCs6dirTNZPCt3zT/g0/O//Nv9KTGzItfxGSUIHKuURpQZYpoGBSCTyTMAMCENo5P3Hvu//UvGpt1lH3rya3ba+M7/+bnTqY5JA90jGImA6AIEYAQrJmHmt7+ttHDb3Df/I79n/vF1b2Uk5DjlNrKgXEWhI1E6Czv/Ovf4RLyFvcB2/ZopWsPxvM6mqf/9PJtKf7PD//htdvXP/S3/8aKsCAmTTElA2jYKWhMqQ0SVVyZj+a1JFzciiNRTBBJu1kxayogJUvAjKQNplA3HesUEjIRc4RYBw9eRKFRccZ2iwyTikY0SGperdJIiBBT0gWbR4QU2BCwOsOMTKoCmJLEY68VEgAisUE2CCoIyoQxiSICkKoyIIESAhtUAsQFlUZo0UpCRaBFpQeOb89Q9ZhzrJo0oSwyCAbmBYoaY1JDCAICGFEXw5x8HF/In8ELomhSgARBpRCKKKLqJUVVBSVYDCoJIy6iGgFNCqDHP2Jh+BJURjGG2VCnYwpmTWlUh+k8qiiCIByzqZMCEmdlQYaI2LEhMtZlvNjoZsfEgEjMhAREbAwhGjZIzMzEdtGloT+rFxETLmBPoAjG5Zpk4VAzeVnVDZoWwYGENsaQABWYgZEkSQJZ8KMEojOQZcQErNC2SRQB1RhixhilCdA05AMQLLgPEikCgigqYNCF/gUQhFBBEJVUjp8eUiBCNcflJRbkRTKkoAto0TE7XAkAaQFCR1A9lsQt2kegREAIiogEoiCIi8+YSGAxmwtIi41hiKDMmBmOMaUoiBiT1EKEyRJaQlxw0IHZIiAm1FbVEQWQeJwoSiRw1qQoBEi8WGgeR0WrJ5bHw3oe6YvP3r76P/3rD7z/vY9+8zvp7H3WH8L1K+PP/Glfp2urWZwnaygL7awx5x98zRwOd+ctg1OTglauyNQHj1gREJMFxTBNNSxtrB5ycbi69vq/+VOjz310fPEJPzU2E184fPe3pO//oWCywac/U3/ti1WduKBmxV8/eOUwnX4whIf66eZ/+bnRxZ2ub22RWQOVy878/f/nwWsemrdTK+eWR7f3PveJ6Qu38jnck3fEdiOgAISUbMdCUiOISV3GHD1L7C45QkJxMTZi2YsvsjS2uf+h7xl8718p97cPfuYfrN05LLVg40A1AwyWsixLqqGuml6x/OM/NTq5rv/8H6bp9PSb35De/QHbydaKc/gLP2tQFJyCWkfGmJrT0QOn2m95b7Vajj7xxf5XXh4sufqw8TTIlpaPxnvi27VBF+80qdVMcPj8JfMtj534hrfZz7Ttrae73/S221/+6vy5SbfiePvQ3H5iOYdBbtJXPtuOd5t5u/qWt22eOvnyf/zQ6MtfKESOlorstW+88sU/es+bXv/Bb3vraPuZE/uHL13+Iry0383y/QO5ebRTGw7JVKOEVT2aaWXcXPnOOEwqJu10SlcM7OOT6lqrc9HcWWJ3VB1U00kbWzTGJlFJWWa75zbe9/b3T5/+0teefcI562ZQBiaMubPzOua9zkOPPXrnzt4Sim/ro/rA1zgwS+ijOJyBXy3JLWOnl9XQHk5SYHO4d5h1O6ubq+RHnOIrzx+WZX50eWiz/MyZFVN4XrUtpSqZeZXqdpSdMGcfODUftV/b2abhrrluu52+Ee3mLu+VAcWhff7gYnHCba5tZIG2RqHXz49Gs93RXnVUWwZI4qNqSguHBgAQMxAQoQDIceArBpRUERBRg8QolpDYOEpS5uqDJ8SkCQElJEmoAqKImgiUUJBsSiAqQJySJo0RxFliIFYUiMKcGKJvY4xNEETqMoEERYqSkkhEaMG4rhOSKCKgys4SFGhAo8QmRkqKpOiMQ4uQETiQGEQkpdTGwIAYEyQxbK2juZeYQsbAbIDI+yACjLRQOzh2iwYqgSEKB7cvptOvf+r6zccee+P85vU//PRH59Pq3NJyxOBtr1P2sxNrGUO4uz2vmqvXb9Q+NXOrlg8PR0eT6vb+0WQ8P2Xw0QdOnVldfuDB84f13PbKJuPi1PKkUt+iEbnn686efw91NNy5c3H75uXYFbS+Rk+lAos1jIYR0RZOEQl4/eSAnM0ErSUTLQEzWVUgg2hUFQtrkwJhsqZwWWZdaV0OiNZm6KNIdJxHSZKFGCWEkBnb1gk1661vUN3Oq5oda2BNSix5ZlQ4RiViBQWipOglOTKCVp20CMaZjcFq40Ec9k6u7uzsCKQomhKRkkZOPsZWGKGqmg7bsjtovFg2bSspzjWCUWyTb5RqQmBq2nY0rLDXp9KKqEZF8TMQV2aUAhpTGLacdwiR2lkVAVpCTUkSpKQgUU1SElXGBTErJYxJIIEkIUNRxRAosUhEAn7V2WmQENEY8iE6QoLjGwMkcBmnNggCE6mkJnoyuWVUA2gQWLOOcRlhRE8oSsIgqCDgQDHFaSOSILQRFMQoIMxjmu0d3p3M7t08sVxsHWoSJmUHzqaQkm+J+kiI0JLAZNagawa9taLI+9bOJtfYqKUQIaTZHrWls6sBXFuPP/vk51534oQ/RMaxNRGaavuTX2lvzzcfefvqu989V6dnN8792PdWMh5d3d7ZvovRzJ7aTUeT7Ys7g82TjdFo8r0/eSHuVdQtcgv+6WeuvPClsq5WlzuHctBZmtZh2kmOit61Z1+pt2euQZuJN1ilRBCBtAikk4acSQW2CRRYUCGqMWwstPOISYo8b5MnpnlSTp3seh1iBFcbMu3li5M/fppzbKuoypbteJqytRI7OSuEJONDwSwXgukIg7At2KcmqgqDb5vMYLHkso1y9MpNZuY8Ja2km/tqtBSDM2wIUhOyWYgp3r7bnH73m2h/Em3n/u/86wcxI7G9+x+G3a/qqQ3TtYToTBQbfD+jtRMwPjh46qkwetZVlwZbeXXraDxT7Tzq3vt96cT901cOX7oWhlENrrIdMOjyoJiOLt586okqjKRTSlyVdtWBWynOj3dzmEtA11nOnJcURAFF0TnjnE3EkRwAK6rXlAATmKSUy+I6BIOcGZMMgyHAFHz0YoCobaVNZPqlABA7m+eZYA8VEiqxNZwxtZkLkoKqQ8mtdixwSpaP74jOnFifjVtZXxrd42Ryy2UbmHXaYd2pirohKWSIFa2d7LzmEe657eeeW8amnVVFvwuWgcs2xrBcTqiS6U386tU+orZQ9UZ14XolZ0JhfmQ1GpcT9SahgG98e+/r3rJ/uxn+yq+emN4xkJhcO6mcEEGqbTp6yzvf9Mu/MLVZmsbl8WW69sVwtKOaJQBkpySUWyVMjV9xrB//cvv4M0tke97ToNvWjZGakrEkmrxBypSizFa+9Zs6f/n92lt5aLO78xOfMq2FhMkW2c6eaQ6Ljo3Gzh9789kf/+vTcVy//ODwxpNtnCw5PtiZ3bo13Tusin5RWjfI3XDcLuVsDFCElZXNLudXb+zlnfxkt6t189pHH3rx+aebxqMx81pWOmXZz7jLe8P5wXhecXXqnhNHN/cZYum4l1ny7Wq3uLBc1DNpmrAfwsS3We72D+tqmpq+HY6qSd2W3XyC0iHuLfd2qkaMPHnn9j1n1ueHVTNpyZggqMBaYHetc3DYALqjo+laZmOKKcp2HdaLXqdwh/tHxcry3s7u0SieWD51YnPjyu6lWduGFHrLA0pUVc2Z02srG4M6tbPZfLnfb4O6XvdgfmSZSsgQMjaJjXv2+ctqjc1c3bQEMq9mgjKe1stLywSNog66vTNba09fvTVKsrxS3HNqMDrcP5wlKsnC7DUPbCmCnyXryBicVG0OXBo2mRk2ISls123RXellPJuOvVYrJ9bu3LiLeZ5xs7bcP5z6xIA2AwlL3axfuNIV3bIAY49G48lkFto0mdQs3oIM59W8paywOciAzfo9py/e2j9sQjVtZ2ny56KiAAqasoFt2vbGzeZw1IBmTUhZbkLSBMhJHJoxgPZ7vfOneGP+H//Nb/+t7/mh269cjcPnyhau/6d/+9Y3vHGr3JB6GjbPHT3x4iAz3LHNpCWTucykpK6g0ARfz4scUAARwAIYBi9AVtqEqCmpgDKgy9kyN19+plw/X2xs+M2NEJOC1rN5d3CifNs7/Y2PZWpZkgRPwLTAGHFCCwLedJTUyjwGBAEMrMY430YgDyU0rFHVsuM2JYkIJqWIxrI1SRKjEdX4wPmNb//2aRX73/XdZ7/9g1d+5XfrT39lXZPOPWhCTQnVOCcekFFQkJCdJXaqGrmj92zMVpcw2s7uXTMaW5en8TQrjYJPXkUxxIiOijQPH/+jtQpR/Pw//0a3y8v9HqY8JpZQI3CABIoWFYMiERiWNz4C3/yuWZFLG7idJ9VJ3az31wcnziZD0KaFlgUIkyqCKCYwhIZTElg0OBemeVFIKlHPvf0Ds1SE/RmbVORLtkb4g/82uPgEFgkCwIJhQqhJEBmQwAdg6azY2Rc+VDaHtp+Vg5yHFXiFKMPXvnHlf/iRenht/ksfxWevy/bUcn9S1cGaIAgRx/vV1uvu2Tuj157cuSvlz/6Tn/7ic4+fOb+eAhEqWk4hqkSRgKiWAVUYE4KUvaXxtAqIlfedHI02ydejoFFCXmSlc6gIzBKDZUKEBBp9TexiGwBTL3PzWubzxvZ6pDKdzfJubtk5d6wDjCIpQVysywFUVVJKhA6AQQ2BiopqEk0L6zzoog9ChIZIgXShGJPjwgehggIBqCZmSqoiuIgPFoUgOiYbqQIIHM8xgYKIagAEFYUFvwJFUNECGFAiBBWDyIisizyCFED+jDGwWKMAAqKoLjI1w5BEYhSvgKiEx6SkBaCZJC2IYcfjUbCos+LC3F6wKRxZgpRS8DKeBy+KiGZxdIhJlJCdNZl17BgUy6wktq4oFu0iYzMEXAj1FlERIhlmY6wCMjEZi8TEhMiIymxFgYgEUESREJDRKDO1IRBbCoJsRMAnn2JICiGCapKkhjEzZA1EFUeIcCwAaprQBmUDZU6WEADaJtZeQxQEAkwLFugixFZSUGFBSKKMiACUEEgiINCimsyEiwbBcfEKFAFfLfC/yjMHWNDYFgkRvYqaAgQVAgSEY5faor2nrz5edTGkiAtA04KGznicUSohGV7whpgxAURRRhIFAvU+Zs44ZhVxhq0AEDBxExIgCHESDVEN0eLgCV/FagGISKsRjDLT9ri6cumFx97/np3W1sunstMm23wlZ0+Oc2hj4zOIzgbZf6nmeZ5xRNO0yTo7bgKm5GIo2JUM1odZhAff/dbPPbOHb/2G+z/4jpuf+w35yie6htWR0Tiv0+b9bzhs6mx4ZfS5D8O01YS8NrCPvod7G8uXLi5f/vDOc1f89jzXImlmGtEkcmpQn3vdYZAlu7zZL/wQ5ah7buM101duG8nr4MtBfhjnUzBWY0c8hSyzYgsFCBOZl0WHG2Oz1WvtRN73NuqvrN+6ePLkufI1r9+pXT3vBLWGjl869RIMiFHNlEQMghBPzPq8e2Z9K19baq9dfKKPsh/CEOFeBoeMqMDRt4nZcs7BmvL824a2ys7eik9fsgZhOe+99uHD2/P1TpZvrtiNtXRlYpxZXjJm/3D4+Zfo9KPVIUDNV7/8BV+suLxTTSZMZJp2+qUnYbWc7W5jmcfTJ+6unp+nUGewf3CpPNHZE2tv3Hjs1Pr+l/5kvb+yWhR3vrZbFlzYAtTmpjwaz+eGgO1oVhcQXZ8r0SAamsqpGNeZ5b0v3NydOraOS4r3rXT2jqajFCOBR6IolkkAlnr9rZXBwc2Ls72by72uydzt4eGkjkagNDaz2fKJte3R+PBouzTc1C069QmnflrX40ntXVlcvr3bubF232mq5tP102sr93SDv7Z1vn7ju0+/7sIHvvapZ29fffnype1rkzEQxnS7W8D5RzfLdTPYcjb4KqZh1XKIRebKjSInJeWyq85ZIUxOhnuzaj4vGMZH09thSK2b7TXz3aqbd0Oo96fj1bUlh0JIeVnMZ+3xVRCFBMQgACaMgkSL9BaJFu/tmjRRYiKA0mWxTciCTqIaUFEiFcSoMWmGnBGxKisQGTEExG1qFBdQN2QEiQmRVJOPqkl8G2NiS8DEDlEVYpSoKSwIHaiE0MY2gXHGaggJYoIQJAEXCbRjLQqQQzECRlOMvm41ROdsbGIQMSZTgkSUAAFooVZwgCyoAgYwJCGCJIkISmNjSkukS2HG1fz85ubNq5dWKTmrr3n9+UcfvPDcxYuXJ6G3dNL69mtfffqeTnbP6fNkwrPPvzCf0WC5431KyZ5dX117+ILJ5hfuOTWrZM8xdze3j4agebufUuWBswge2EWeP3X52XF94JjyTiYiApDZxeC5IURAMEzIpFEIMc8zK5Akcp7n1gEgEjKbqMHYjJiTCKQ6c1lW9hYwPBEBpdzmCgHVKCITK0mU46RQ2ZLLjEAaV94LAvkU2JmkEH2svXDGzBACYEYmc92sMzyaN6nVLE/Ik7YhZ7SQmY9BMSRFZo3huPwrCQlBpJ5WCLnLbGglQCoyowDex6ghUQTCgOqbWHkfPLHXvMdFr8AYmmldLToerS0NI7FRdDli1LaJkVDZICUWktaTgrTiJZnMuG4WiTBRSokVjQAl9SFGOv6Xh0QLve6rF4J6FBIIIpm1jk3uMq/JpFg40ZQW/WhrLPLx0ycpMaVukVmjMSkzikKKQgjq42Q4t2i0lT45YGQ0bZQUJSQAxGo4K2Tn3OmVzLAY52GBMLWWMTOAGpOgh6BMk/kh23RqY6Nw664X6rSfNCVIsR1CaoRgY7VoJuLHuzerWyvcy1wvxHHwcYD53pdeHr94J0y23Wtel507lRu68A1vf/zyb/RLe+rUuhxOq1eODp79YreXt7EGyx0qGFIDbFRKw1nWXe+USfzWmls/f3Kcre0dNVk3P+G6zx7trTD5FATEFDahSU2okgxMJimGimISYTVEDrRu6sUduQgyGF8Hay0xKgEbQnJoWVIsHOdgQDG3PKljhNhbzapQpxZjaMuOayx0MlEAjsYnaVqvlOZr3XN/8X2zWjHK4IPfNJy2nXoUr+1n4/HNS5+/77u+t3PP23f+939VvXylV3R6Wx0YTZr7Hl76qz8x2+i1X/w4nLq3tkV199bSWl6cW7n7W3vZmVmOHGOopuFw1Jbn3nDy23784Df++blVtzQ5GE7aYLQigrNn4xv+YudtH5CUn+7cmR98oZsvnbpwdrq3y01qfbN/4+Xx8KbtIGMxOWqyYvPc/WdVitjivSfWNzvZLd9qNEzMREpoXU7GAls12QJzqymQCiGSgiZBSYSCSJFsIBOIkioqkKhoVOYQJSUxAMTWmpJApW1CE4MB55ABO8nUgSVZi9I30HNQZgCvjt6004Nmtp/1yhMn+t1zp8yuTPcOOYWiuxx9lbKY90u/1sUTS3P1YWO1wknRtuCcRG9ynM5x5Z3v0fUOHY7l6ZczF8nEOkq37PpJbfs5FR1bT9FHP+X5m7+p+7e/bXcJvEB6+kvNZy93RCAj23WuFko0m/mN7//H1zOs9rfz/Sr7+C+3X36+Z0uxqEht07jcRRFVyJ0ri1yAidbE1362AxhtkUOm3kd1uSm6pp1nKKPZ4bXxFItcpmO+8gzEGOYesBiPm66l1TNLYVKNlNN9b3u2buez2UZf+689O/rak0s5PrLu1pgOrt8qmaNP4xDbFCfT+crKkukU25MZppE4vLW3m1B3xvOH2tQr86zo+NYPMtPPTC8zGgJTyF0oVwbTWM3ELxVmKjOpaycRAs5mBLHcXD1z+coL8wg5EaJkTiWl3DnwGmI4v963VTPd3V4x5FudI0/HicjFOAcL7ChUfrw3KdmKyu7RlGM0s6YweBSDOGwlLXcLR6bolioSs1xafOXK1ZdvXXWOV8uiZ0xqql7Zc8zeh4P5xOV53URRtUws0M8cId2ZDPeH4/uzk6vrK0ezan1teWd7NwavpSXSlW4nNyyVmgQppat3t4Hk9HrPStzd3bl8a7foLuWWTi6tloV74uXb+3OxZVZNG0ySNN134cGXb16DIIZoVPnnd/be/egpnTVdl93em5ApHJWrg5XbYaR1qqbVmbX+qRNr/W5GKZad7uVbR8/tTMZVzSpLDpdye6rjVjrZqOkHlIPpfHtSI+AS6JnN1R7I0c7hfNr+uagoLnxhCRFMjAiNVW055w7zvE6xSaiomnr9/Od+6Rde9/437D/94ve//03ni83Xfu+PPHX9p2fbHu40X/uZH7ab59/46HtOVUcVAivkjLCUU0vgW9e3tyU78yPfY+/ZaPd3bn7lUnz5xn1hZK2BAAiMC29ShBgiCDkVuLU3/uf/1p48T4++Lv/LfyOd6NWdWVixI+PW3vqO+a99JCO0ma0n3mYIBitp6pWyv7aidaTZhIJQaZ3a1CaVRCzMOMMm9h2s93h5w2kPbtyl8RFJQiARTU3kIpNZJf1i/qbXH/m0Wc3p3s2JO3X2//sPrlT/sHr8Umkh+agi7FCO5VCKCtEHQ6QKpihmWHTOvyW77+zS5lJ9+flw8Xr0Mdg9GY84NBCTCKagpBpAyXtWB4nz5JoGqSyg6OnkEJMSGWtZJHKMxjAhzzt28J1/cZblAmxUY4gp2c6kkt/5jfbDf7g8mUAQBXJIqGQQQAm8RFFyiIRKGkMgw6QCoiBAxAdPfKH7ne8bbyzxYIDj2nz09yb/7f8cEECIgArEoAkMoY8gAgagyyGJJdedoP+1P3KnTsLdFsQAcQOw9qP/6PDBR23/m9ce+9Eb73qz1diCzolDon5hOrmbetxtzYeeuRS2VvxuuPXyxfWVvh0U+3crQCbVnFGUogRrDBJJQhaEppXpJKYgCIiGmDLLwdchKiNqkjyH+XSe9YrlvPA+JB8NUEE5JADgkKSto/qU2SyCRQSbG0RNbVu9yngPKYGyqgKKKgpiYlZEJmAEFJFXdVjh1SePdYFlPyYpq5KIHrviASSBJmBHhCiiBIuZs2MPepJ0bFHHBbJYF7NmC4U8vPpDFi0UJWQmAM0UhEhFUcEQeDlmXBPRYnzsVYwzgoIkWRh0wjFkClTlGGOtr2ZKCz4OLX7TMT0HQYmYEBVJDRhnMkOsKKLzNnnR8Cq0mxEBwDAiqrPGOkfOOXK5zY1zWae7mEGzzhGxLmRnAMQGEJmZiIkMMdOrnSNAWkCxFRCRRCHGyGwUlJREhY3zMemiNsS0YKxr0rCgjIt2HSArABgFQ6SqoUkiEsJiEQEMGJLEIBIRZSF9kwSQVB0pARKi4OLpwiTkBaxBVSEAIBIhBUVF1GP6bZJEi9fvOHg5flc4Du4UFqhtOCagH38dERcAK9VjQ1yStOgQ0YJrhHDMvwYAhZSOEUiIkNKi87WYqV8cGUZRQiAkJW1FCaREtMwEEkUVYcm5BNrGFA14kSgQRAyAtQzHiyS4uXtElM1TONtz59c2Hnr7w0cHV1YePLWzc2P68uVzeT2zGhIVxBqRkZdWB0W/Pz6s6ymiAaegHlGwCoFMEVqJiiZR7lgCbm4Nste+Ef2RvvR419uiUzaQMClzMWuNO6jqj/1678bNoqB2dSn/hreNypP9F/4EP/G7sHPYKZex7I6SMd3CpMa2vmgl+/R/WzZ3w8sv7V+8tTSp1u+973B3lvX6pjQ2ZDvV7MqFjbf9i1/Y+cQnw6/8ypozBuNOnhcf/Ab3jocmz79yz+7eS594tvvDf3PwA39Ln7q0dv2f+E/8Qf74V5Yee1sa3L9k+6ndDyJAkY3MNAVrVKsSIcsSsEpWGFieDefjesrTVH/5a+vv/k5bbifDEkzWLbh0LTc2xhIhXLo2+rl/2nl4fWs5h253Pqx00MH7T03qSW9e7V7c3pgflKvc1FD0C2qa9PHPL701Tm/sQkhrG/1sc23+Ynd483DQcejIdfPD3f1zF05sb7djs3bybe9JN250N3JIcTistl737tLP6nkDlR5tj67PDgmKpU4eg001HE2OOgVzkK3l4si3VWjzQX7jdn2kFtr25MrSTSi/ejgeGSeGfOuJ4v7Qz+apjhCFXJHlGRtjY90M8vjaLTva3XXdzqROV3YOudtNCjkhIZbWiUhnYLFYOrh9Z2Olzxk47d+4edsBLBW5R5gdTG9+7RW8O+737N7ssPbD8Wz/wXPrWO89/dJk/XXnTj9279F/+Kj4CbAdjWaTg/T8l8aUx7VTWbkaT15YXjlRNE1oYms6qWOyMNbhtb3clsZQttnbWC2zk8X0aLK2tObExRlsdpbrjdWbt6aSFd0OCgAzxDb6CiwdL5ITKKtIQkCUCIhgURWYBRY9Tj7WHTIhsRoyGSgaNglSbFtgjqBgxBoyEgnJkmG2AhxiZADLTlDYGEKNKpmhheegCRGDpCQkaMlqSMIURYMXdGiY1DlBRgBrMkiRIZKRgIJsDRRRCRQVDEJK4lPj1ftYt9IKK0ASFjh2zlpKTFlRhFSn1scU2Bkim1JKGg07PJ5+1ZSSJcuGLr2yu3/whf7yysZG98E3PsL2XLButDcSu/XYN7727L335vPdEycGn/vYn0rdXnjw3OrZ+/velNS4IhvujnPmLLOd3mA3hbQ8MG6g81bIFd11E+PsYH50MCL1dRhidy5xWihllixbWchNLRMggho2CmKNAUEqyDjHgIyAphBNmcuSJGdcUmXOiAwTiwREyFxOllGA2SCqKGRZJoIpagoR0RkkNQ6INaQkiSkYS51eijSb1qPKh7pR9RgjErAKJZFKQk5MOVV1jSrNtFrKOrnrTetpaTrz8dHBwTQlZWZRAYhtDEkyZFVEm3EVPLeLEmwymbN5ISERYpJkHJnMgKOpj5O6SQmMul5WlMYmzGIM6gVFtfXCxKUFJUDMbJa5FJIGEQLO0IKEIBpVUHmh3yPRgqjSaBRVMXlxxjYxskEkQwoSjiumGZnFqHtKGiBlzjljCMkBdthGAorQAkQBQiJEw9yyIIgtnClcmZs6NpFhoa92TJqirwIYsAlVPaNYMHWUCDTX5CyRzaekh9Obp9ZX2xhmwoIl0yCxNoqm6BrtpNmYHIAfz2qdNCsnl1YzTBpk3jTOGmeUVWIazaPjbLB+5vV3XvqKp7DZhdVTa7s35ymISybMq5d/7483X37uxGMXektLyyfW7D1nL88b5Dpf6Y52xlmnGE7qrMikCep8S6b2GivPJaw9tL4/vtNxdqOPFz/6m2vfeKqzsVnvTsZPfQT6UlOkCjM2znsEcGhTkoTiY8vGsWXmAKSNRL/ZhXu2xlf3uQ4yrftsYsDZtFnp5gnBawpRoTAm+jLnpmpnoPUS+CCDnMLcL5V5r8wP5y10yqapUJE4aQm7Bi78wPff94G/CifOZZM5jg+hLDubzq0tw7sBICznGKa+qU33r/x4/dxzna0L8PxzcPNJeOTB8MCbm9Hcnnhk7ev+0mT7CI4mrW3Htw7c6gNiV30ofDOrYmXI6v7B9NITS109evKp6ZGYTv/w8LD/xgvZa7/dP/ieKuXo4/Lp+1xnNL/1UtKGXLlcdo9u33rl5otXD7e/6Q0f/PLTL8dmNmM9+eB6kWcq7fpW//3vfuPn/ugzK90+EVljDBOTIcps1uOio6llCIIVAkIUVEjBk1oQRV7ca4m1hggJEQElaRvFMrrWGwtkDHNWdApPWJiIYJ1lSFJmWLQ2KpRW+pmxoSmssa/Wil556crZ977jzuW7zRe+1G+FogrBUd3M2z1bm95SFmTC08v141dyoPXU7XXug/WBjnZNnFUH+2fe+o7ysUddAULDkdz04x2spwSFKOehAS5FrGLmbJJ8jHItXwbsgAC0S+vJk+3kCR0YAtcwMs7j0RN/cvLbf7DcWsl5MNybF/lAAQMmNGQdAXhBMp0CIlR1wJWB5t0AMx30QY3OK+OT1CFhGQE0UdAIaHTncKksabpPF++SLTCXnc5W5y/9teGf/DpMjrLRaK4uO3tfdiqfx7bprD704z/W/dCvvfDElQJgZaO3O57kmQmxVUMqNFjtEztDMKuavMhb42dtM+gNNlxxeqv/+a8+NVhefcfrHrl+63rZza5e2w6EsRXyctqtHk0PyBGq5p2smk4JyBEZMoUtJ8NpSNoanaJ6MEWBw7Ya1kFLaqvWAQJCXU3KznKv15mMjuZt083z1X5ZJ19pWu7lJWLwaRojcJbl9urtvfWtdWfriHhzOj9njCMyVpVk/USnSvbK89uC1hgTWg9Mp/pdaaHs9rsrq8/duRMUXZaNprNpLTk4SWANDEN64E33pjpFgs1Tqy+9eHmtKGx0nTyzhARu+85wkNn1jdVJ3Vpjs6glkUNbN2SzlUlDfnd2f3fr7sEUC1OC2l7pLU9Gs4m2N0fbayv5oGJ2ZrdKu5P5dC5+GhDVqu2l+IZzJ3vd7J6yY88Zoyk2s7aVo4MwGjfT2XBuglHoGG4iBKRDL/v1aLXK2ioRYJ2SMiFifTR/ZMtunRy8OE0zARj936IitmINMIkgJIVJ22pqB3lBqJa4UQltMqyiNHfxJ//hT3zqD37pD/+3j6mEh+wSnXqgaa/xaD449HDr0hN/ePUtb3xsqHB6q6x2JgaBuaQgN6Le++GPjHrr8+XVoZ8OfrQ5U92efO8PtHsN+eSr1hEbSwSigEoEaCHgIHm4/ArcugY3bst7vqPzze+se5ELv/2FZ1ZNCQlB1GUZxNT4ducdb934Vz8z8XX9+WeKj/7mqovw4l0Ek1kAYPRNirF597vlx/9ydeZeylfwqUv8H3+dR4dlbCQhc0fapJlQZhpDy+/9jubuJT68medvqTFrgu2/713t81fLoDmyD5RSJIxKGWAiQCA2TKmNKrLkQnjiU/4FM7pwtnzve+KD96tBu7enH/kk3B5RDMxGEDWJSNI2kkVkSKEiR1JhCBGrigWjABOrb9mgKAQjzamTbmM9Nt4QcIouy2ydwhNPH/yHX9kqB7AAKjgXQzCgIAIaoSQjCqipqsFmRLaNkgFyjIAMrKt3Xqx+9ee3vuuv6/Lm+MO/efDb/2190IFQQ70wZSkQQlJgAlCgdrI+sN/6HdM/+sTKrZlrEZ69AcCLNCUbhPGlz+o3PXZtOlyBbPNH/trVX/p3q84u97KdYciN9lboyu36iVduP/AD3/Cpuy/9i3/xiz/xvT+4atfcPCEda+uRIEiMAooL1BKISK87MOi8TzFEwmid8W10JvMaomoSyRT63eU2eMocSEIVUjVEZZZP08RmDlVTjLPYZDEPUXtlh1Cmvmn9cW66oMsQEyAwoDFkiIlQAZKAoiZRQUwgcLzgV4XFlIEgLqpIiy1JejVTAhFIAkSLOBFUAQUSHkvQj/vcisiw2CxdhEcACCL0qkxrAUUCAEOYKcRFpwkXjRMAXHBu1BAuXGyyyINUCI4DIgEI8mckIoWkxzkUvvqHH4+80WLejRUXPjc27Bw6RlIJrTReay+KyBYZQRIKgIgyoUY1xpRlaYpSIxg2Li+yTulczszGOGaDzIhAzAsUDxEhErAxxhwPsjEvDmoRv9Gi/xJaRWS2qNC2lUGriRCA2SpSFABZBGYkAkGhBhUVZ8kYUoUkkATaIFGUmWKUplkUrJQAHC0Mb6qAqiiASYBAmRb4BiDklBKiWEIUWXRMCBbnBLya9nFUJSRUpf9bRoQKx8IzePUll//rXFM9risBACgI6KvfAQnUAKgIEbGhRd608LAZIgAUEQCMSRe0qkVNjEEza1DEJwgqhoAAUozOsGNeBEoJMOFCBY4hiagQgqj5v85ehE7hcmvf/Oi51cJJ0X36hZf7RzcSz1ZPrU0Ko1snhTqzg53zg6Vw/e50OJvFZiF+c0RtHWIQD5jbMoBFDRBSVpQs4ZXPfaWzVmx/5ld7D61uhnnlkwSZNZ6tmK2Njc3l9s4zze2rGFvl0m+ccoMT13/7l9fDwemsFFv6OtWtwjKsnM3D9jTntETN9Df//RJFaxFToa6zd2uXjK1DQy0ZJuxlfN9bx3rP/JXdpWqakZ+WvPqP//ud9QsxjQZHX4TLLzxohrOP/a90+b8Mv7gNHXDdnoNp9dHf6Z5/QKd7YhGQk0hLMvq6t/T/wjv3//gP3SvX7Cw1OdG5JbM8LU70w9XZiZP9yR/99uClJ6YvPL2c20SmqdvkG5AFtMotO+7XDdzd33lq2OWsPL12aGznzKn56O7Eu7XzF4Yf/93oDIrZmzSpbUxb+aNrvfd/Q7FaXH7l87DzfGd94Lac63TmDfv7T2+cfR3v3ODpdDj3K60PzWxnp7lnqYwB8MqLh9O5VDWIqYcxYmZ7xTwSszus69uVYNsuO3vpzngeMBWDa7emd73pd93pjU6F2VfvjHeRvCKLQoyCGnJdXe12wM19rBQJBDE+fE93q9TbL16cTKWe062DyqMrkFZ6XZaEjdTj4FZyFZukeOD+R1e4t314DZ1rQ5AoSpyiApi9nakEeujBMwc3Z3WsGZHLydKp4mA6qYZ+fnv0xvdstrMT1y4dbDbu2ou7QVI7x1uvNHpLr7906MDbLOMyG86OVosBqdnbPmKdMlDku9TF8w+uMMOJsxsrJ1eOpgfjqZ9FjH2Qtl6x9vBo1OlmkmII0rTHHsAkEokkJkEgw0RICgoRhQyQCjKZJCAxMjsCIMqMNa1vBJNBVVDH4DEhGxU2NifCCCCgyugbAUCGhRkVRSGQpBQ9ShvFiBpAI4AhJaJ5G8gcI+7JkhoSVVIwiqCoISUAYDRsFNAQRcEoAigikSWmJG0VUVzwoqqiWlgTU6qktkVpDIHh0CKxiSlKipadERZRQLNwNjKRSELBJuj+9ujg9sF+zw66a2941zs6PWdTx6xtTvKVvd12xZSz1nK/f/bBC0W/56MklHEVHZA6Z3qFV5yOR7Yotvd2fBoh+CLnHrsSqMw6myf7s/G16eTQWc5M7pAIEzMTADIaJlrsESGJRiaLxiChsY6BRKJ1ThWQCdArMYGiMdZkIEk0Otc1tiRTMFKWlU0zQ1WyNvlAlnMyoAYgoWpMohoQqdMdcBAVNwmwt79bJ9vUnpJam6ESigFIzmiOWCDX3oc2LRUrFizEWBaZpqYe1e3+ONVAYkKUIBQSEIJzZIEVtcgdCqJixsYQGNIEOG9jllk2Yh3HEGdHTTX3qWFyjfqCskzYZP1OHAUJQRF8TC1ykedJgAwba7BBRcBFcdhw8F5IRROoWDGSRJRMAgnJWMMAmsAC2UUnViX64z4FiZKKiMYkbDgoLobYrKEuWkkQRRIIGuTjAXphVgKk3EZrxZisUGmDtAlhYUmlJqUYUwkUGEvDFiBESQqOCRUMavBpNpvtayTXBZuRaMUpcyVzAYzJJ6HQhEloZjnhcHe7ly2v9s50pA/skhwm9UiafBWTJDCHWpx48wdweHD36PKMiu6Z8+ngoJnMGJwTs/309b1L2w88sOWWV8uyk2x243Cet82cE2HyXZ1i2zbtOLqYQjejpYzirL5966i4cP7Cd/zwk7/2/1tpDuef/KV7vv6tt4aj0eT61/8v/+wLtxqu0+6nPnXmxWc7c68xAJBq1VpIZZsXrhmFvABdXTnxoz89PvfwRhUnH/m17T/8WCbKiEWeRcSudd7Xzhq7XGYURsO531y7uX56+V1vN4OV9tpLxc3b9bWr3bI99fpH4ML923/w+25cVW3cXd985J/9Yvboe1rMNUTuD1y3BAmQOi2VdZh1O/00n5Gxkid64xsHb/5GaBCczO58dm/v5U3jdbztxjf0Tz/UOdyePfN4hKLem6wuLQ/3bh79158/eUrW46HfuxHH9fTxZ90gDe9O3NYGbJ2ISxeK7/7vqHyYyA6PdlfXN1zH+rbbgPO+5WTa4fSTH/vQjWtPH1WzD3/0s2urKyeLcvfo4PoXPnXzxnYq7ed+df7ysy+vLnUgoIpqUrZsM5eXS6ZcDZRxmKd2RGRYIcYkqm3kJgJHQkSnJFAC1sBGMImKIQNCEaD1ArPWWA8U2FhncjJKiGScVdYUSwuilDE6Sz2bOU3HqxGA+MKVolxzf/Th8oWv6rCdh+7FztLgx76/dHH0ocdh9+YqpBCNrfIw2ldbhdU+UGadNaksCP3uncnH/lMrIzfN1mNfE1G+lJog1ZApprryldFQgWkJ2/4zf3zrB/4Uzqwc7u3d+0rbtUshxYSprRuWiA421jv2j/4rzr44b9q61dVg226vHo1UomFkYyyn2KajJi5j1enlTRrHYQWcpOzCYKs9uF0MxyYT1BoiESCavOOKNxztVz/243rj+kp0Ure3yXT/9k9dW+nSV6A3anpdt8TZy1/4xL1v/0bX3I47d+8Od+6+eNVb1z+1fG13bxIStTHMq3NnTwwnszqE9f7K7sGdpq0D+H6enz+9Ppm1mabr126dP3d6f95+6ukXkgbajQiQk2ubZqnv5uO92ATDPGkTWzSm04awtLZBrZnOYl5XrsC+M8AqKK954OyVq9cUWyQ+sdVJEubifWYsZvW8bYK0VYzabnUsHFQKlKXEXbc/Hi8VvcNJbBAGva4F4wWiaIqQjaqOqO32hDVk06eu3W18ZSxhTu28zgeD0rosNxvrvQiNF5857nfcwUTQ8sjSm15zX72zbw9bHNcAto6RTb2+spqnmHPBkBpNcx/K5cL7GAyL4c7K2nD36PCoOX9iaVS30xpWC3v+wla3Y6/dmJkC3vDA2tde3O11O6bs5ppXPji2LnfdwkpPN0+sHh4c5AQY4qnN1XOrG7Hx0xBHo1kb0uGw6nfy2bwBtGvrK488uPm55758otPbG82cgU5uh1UbhA8bZbKhTU2UE2vZ2vrqK69se8juXh9O7oyy5aU/1yqSlFQ1eEXDzhIaBs6SJIeUZVnTpGiUmVPSuYdLu9t/52/843/13T/6+H/7+PU1mHXW6zN+SS5V23B6a9B95ztH66cv/MAPweb69F/+THHtOmdulqezP/I9264XyhXyYauLcOuFT/7Ej77x4sQoBMOGWdQErxYFco4JEqpDhSTABI2Hp/40XLtir34pO7/WW+uNP/UpF0A0RWFjCTM5srj69//23vI5MzzkM/f2/sq3Q3UI7ktw8YhCTE1gMZVxSz/807de9wjO4/p4Mv3q48uk2Zlzceeua5MCcWFFBI0UsYLP/WE2HvH9GzgZ5x0KIbqts7rccRrbgwazEpNPQSFUikDMoKQpEsTU1FZFD33W69fza0vf+h3NqotF6VZP4Jee1p1X2CZQQEspipBgF8UkRpR5DVMBkGStdRa8ahsTJk0i7IxxtXr7+kfgzJaKsKE2JYTQfOETzW//yoksA0kKqowIRAqAAjYCBx+TQwtE3CnUa0qaZ1ZSC4ygCpJQtfMnX6m++FTb1KVv83wJhmPIFBjIWghRYiC0AAyhaTqY/+iPzt73nd3+vTs/9/MnSGHgIBgAgeixheG/+3fn/+I7hg+8+WjJDP7eD+BXPz185qWVQ7Vg6llqeVZ17GhYfMs7vnP56J7/7f/4+R/8Oz/w5rd94J//Dz/bG+RNK20djHVRBFLSpMF7BUZCEQixbtqaCCzTdDI3pkOmADQ++sw4L0jQOmdmPkgCQldkdjwfKykSOmerpiWgMuuOZ20qnVDwkgwTIr96FSiqEoJldIjOUu7YAoBqEBEFUW1SCou4QAFQ04Ijs1DQAOgCa02gqEQQlYIACCBDNMSkgAuQpMox13qRIhxnRIvtYFUFpEXbZDGQhkiiaggMMiJgFFAkZNZEkgQWfMRjhjIiJoAECEp6HGQAAAgAI4rq8QyTHkva8DiYWsCOFxkNACISqSG2VGTGEKCKKMxD9Au5IC2w8cfRAwAwMzPbLMs6XRCgBCbLbJaZPHMuN+wM8wJURLQYumLiRYOHiAwRLGqFrx4CAgAjJF3UcZDZiSgAqmioJgDJOvY+CiRmRlzwnzGK1gFChCwugNsAgDEJ44IXAkmx9WqURNSyEqglePWhqoIeVAEMgiLyq5Y6VQJUICVUQSEgMiYBQIIkx7NmixEyhYVNjJBRj6lP8Gdp0cKBttjyQpQ/m/xKSRagqQXinACZFtDqBQIbECnpotyGispISQUQCVEkMVJGaECNKiA4ggAQFKKoBR0gOMIFgpuVnEFRJAIJuigxhRDp1cyqWxaDEt782ntywNl89sKLF21ZtLvVysnSk42rKybPbtdp6fyjB4XTe891ox6+cGX21MWe5WUALm3bthiB1GSZbWPSrt3xsgRaLuV7s/rcOx8s3/n67cc/Xkauo2+SAGnav4uP/wZffRZ2DkPLNatPZu8Tf3IB2uATFdauutHBHO9Z5vu2Dl54ZhBi22iQZJERISVSAoGU5wSUQmwdswPYCKPmC789evkL98xGg63O/q7Xk2f8xmPZZG/9hc/Vv/dZsIWvrAWYP7HdfWxr/sH3Dl++Ve0erD7w5vTiy/V4bgBYBESak+tn/h8/s7+0fOq+N89+8af16pH0TnTNqszv+uFRp+JEKDt7eDDcMLWHmDBlhZtPZsVybtcGO3uzte7AT+q4HWxjJdPp3pEt8uHv/e7ma87srj9q73nLnZUvdbvY8/Hw5etLyxn5+vDuNr13eTToTx8fbvQzWjrVfXC9a7tXP/PK8htWh931u/tf3jrzGmg2Sukdjm+tbPSmR7Hs22p/Ntwd2l4WPBjgKNCqGc+kGY2Ohg0MeilzT+7PKMvmVF4ftnm+HLOsKszT4+mV0XhiXBJFTWW3SAxV4+9O/SDCO1574XByeGdW7x3Nq0ndydTMpJ2EKhXzKpLNYkptIMXUzRkSLq+sv+nd73/i+c93bKlttlc3zbxXrJ6/99Hlnf3d+f6R87Myd20rB3vjLx9VEWhlsxgNZ3inmUm1ejbPrQ76LfjWrvfsMMSpPPiGFVWzv9fubFcZFjBDP2+CYpG7ze7Zngz29m4P8iWDtmMKtEoW8YAPpkfbV195rmtPbfUv3HcSq/nWg1svPP5CFbT13jlX9ovRaFaUDqAGgBCT0iKgJyYAECEMUUiBUAgABGihhUpJidFYRFdYF8UnYyUF1GQBRdXaLCIKYFD1EmKMQGiQiTChkmFAiil4jbVPISSjJDGJaITFGU4oKapYy50yI2cFhSSiakoJhZgsiWIAlYiYmFgJ0UlKEOdtXafUGkxZiiAxMSqSTdFriswxeBWBiISAogqIOR7PuaZX1Z6Ki1+VACAoENBwFj/6ex9/+mtfuf+ee5Z7y2uveWxztddZXotNdenuXqPtA/edXz97741btyNG3/bGR7OD/bu3D/ebpm6q8NADb1/ubW3H5sSZVZ+aLC9Ngs3lc5df+pPabptlo0wWaLE3ZY1RAGIgIgRlMoBoyBljCIyoWHKMrOycyVICw0DkmFhViZxjB+SNsXxsUkJrnXNZ3c4USFQ5cyiEbBQ42iSoGoItS/AKamxEwvLW/nw6JQEmcESgQtELSSQDmbWsGr23hhuNoQ0I4MqsCk2kFGKNbCVK8BKDIrmMWJMkFc5sikl8co6YMDfMRqKvVQwCRUnGuYAY2zQb+5goBoBa9vfrrDOAPLfGUgPV4Sg2bbdvFFBIkmBSTaCCQLjYEUpJEzAJqQKi4yopJWgbrwoMGGMwjETcLbIitzEE7z3oqwNoCqRAi/MgpbaNkDvCRXVNMuKOtYopODKIPqm0ygZsRlnPcWHBoBGLpAhKSj4qyEICYtoYMAGAIDGgGMQuGS+SWVaRiDCpG2l8kXecAJo5AERjiftsyDgrkxjmh5O2WlriQz9xxjkqMlqTNgiyMYJxGOoggDG1M1sMTr7W9teevfK1+zeK848+xKsHh8/vJa/GdMcHzUuT7bw47GwU7LjVRtey/oOnlM3a0kr35CBo1dR+tDuaPXk5m9VFALcTTUEvPX/z4W/9B9MP//LVrzxx5fBa/sCj/TNfd7t8w8m3r7Ck+97w2hf/5nedKXupiYdgB9/w/oe/6wMh6zhnhl/72vjF59ZPbOb3vKbqbWAJ57/jb7a1P/r0J4ixYNPM6jxXx5gEw34yORZZLwzW3vcTf383L4qVTffmr8ujzD72+5PP/f6y9qrBGf66r59+/I+b5fvf/E//j/Dgg1FtgRATkst15ODaS27t3rpYs2UpMcTty7uXnzn/9d8jmQlR4lJB7/xAd+u0XV9OaXl5aXjrq5/Zvz3p5zZNZ2GeVu69l4zjK9f1c0/rplbYJhP4xGn7wKPDrs3vTztH862H75tP91y+efvZJ4q1e5c6PozvCpPN1s8/8DYsBsGb51585td/679srPaOhn7a7vZ7ZjMPTBRiYnLDUVQf7ELRCImIEySTmawsXXfV9DYRMpnsU6zYGfFATMo2qq3VkDoQXlhBoiRRJgYriEAJOIqGhJygrduMW0XO2OUujxoAkJSdtSW3JmNDakBIU5axf9UDmF/+8pMf+q9biZY6LLWOi+wN//PPyBtPO03n3vetze98aP83f6OcpMB1TGydkflQ1UhU5wxoLKqJuaYDo9gqpRTEE5M2Ffo5ZBm4DmZ90eSldZIGhO7aXrx087RSiT3BLMVkDZI1LNE4QtNbq3n/V57qnz1NhcdOlm9siWhq5ggETVQKabmvH/wruy9+fumlS6VFDA2F1nVbqdpaU8yNgFhDEhIE9ZigKLNG0ud3OGHi2YTarb/7XYevPbnaWd2rq3qv7hWGoV797f9Sv/wReziFdnowU1peP6yrpfWTO+O5y2xhqVVpfCAlCMSptFySUeuyNoiPvg31hXOnBs498fKNpo7gMGrSFMquoyytncgJsU0RmUEUKWlUUWWLDaq3xrvi9IX7Xvza9fVBPmtb1rScmaqqkFxb0ww12DBvogG2OVSp7ZVuhrpfN6dL+/D9y09cOqjmstTl1QKWVgd+fpBiytB1cutAgEzR6+2OqtOZXVnumoH5va++1HrpWOdZukXRKfsZ2bqJy8t9o3D77gGSbds0DLPclXUTc4id0S7Ws9Mba01Mu9N2PbNSR01kIGvrkGfs8v7NvVtlJ3OZ3T6aRsH2YBxjOn9i6fS5pRsv3e2W+I6H102SZ6/don7vaDq9entsgSj6e0+u3dobqmJM6gEPdo9Ont3UBBBo62QfgjdR947GuwfTrFOAwlxCCzBuQlvVQetr06MvXX2x1yuKmDCpCtQpdpCKvnHGCtF47ldcNyS4uz/KmO4ejR69cGY292Mf/lxUZJxJHpAViUWAEhrklKROEqIXRRGIIIBaOE6Cn3jqyS9+8923fee3/+r/5+fe8f3f+G3f9j3P/O8/o5eO7g7H5fJS8dY3vxQOv+7Esj196vDZ62e/+4ea3/zl0VOXVt/4vPX76fGP3P21fw+X45sbsB4iO7QYJGRAxjAIIqYFL9ersgKnBTOWy9EhfPjD/cI10S8lhswgGWTS5IOksLHGy2ftXnVif7/+6vNuI5epjb01OKvu6m1DCaqYG5IbL29u0fhD/6n97Ke7dXCv/Xrzrg8efOK3ujevmBgXwzvQJEw6/s3fWv7L34kPPjq7ctWdPTsZzgUEO0vNzqHpll5S03qjBBKJQAUgpeBry5wIs36/gJmwNSQwGsn6xt50toxLZQgQEhOnkARBJJClyAqWUkimcCJCBBCqGIMTQSDjMg8tCEpKjURT9usIbYKglJmiN5mlx7+Ih0MUbttIhsAniN7aDGILlKq33w9vf0Pzhefd05fyeY1kTQJIoIhRiQGOJd2tlBBLtpATcIKUQBBIQQKAECP4BEDgePb/p+q/oy3bsvJOcJq11jbnnHuuC2+ef/lsekeSpLAChJNwEioQDUgCSVVyrZaqq1qqaokhpK6SQJiSUKlkEQIhrABBQpLeZ76XL583YV746+89Zpu11pyz/9g3kmLEPxEjRsQ925y91/rm9/2+83X1xieaFiePvntvGuqd2UpVQNMDE6iB8L0AV/7CD1z4iZ+5fOYNN2T82cvdn1hdr1ga08k9oy9cu7n2pntvf+D1xfMf+PwznzqYxHd/04/85E/8BxmXReFj35okQe+IU1ZTQ0T03Papm8+O2kWfk4BKhpI9o6VmURa+LILzziQ3uZn6lRXvomGTes1WhCIJFOz3jmaouDGZNlnKcemcEZB2Uk9KXxXH3wJGBvKonrD2zIiMSoCiFmXgEw0VyIOnSIc+rGPmaTQiRGakgXWNogoKjABi7EBNUUyPK9EBBxcLABriUFJvdtxjBkNZPSDg8RkA88yDsOSJMyoDARihMcDQw844mIsGeQLJYOhbQxv8IsdhprvZMyQEAgQc0nBDQgoVdMC6MoJnAoelh5INRRSs7wfkNxUORbNjEsUEmLOhAasG730oyBVFKCAKOcfOh1CGsgqucOwBkYhoWMczE/FA60AcHFR83CiPhAimNmzGGIaeC2cpOmYFaLvOjEPhYlIjygBf6hYbLooCZh1wTUA44MdB8/AoITHMYEhDKtDcsWSnHsEGBxjAkCwjNMXj9noxNMjsaSilO64nG5jvqMfEn+Mk4HBWDf/QDgQAxnR8dcDMEGgY1wPdJUof/59E+KV/SIhZBwOaDqJkAjAzIjCTIc/mEB0hmQ5nM4sCIJoMhw5qfcqFw+BITZHIzKJKQSyE0dDAAEHvztDWV8en1+vt7RmgOoY6hMhSrY3HJ0906CFgOSlHU2IeSXBuWs44HJWT1ccfC3f2b73/o5u5gZgqzxh7k8QmvbrMvEwpOwtTbi4/n+GIEhN4RKwDm8sau5u//eFCsytLLdzJx9+0pd363uGS9h/87j+/87EvLOY7V5yv3vG+s08+0H3205gZkVNKiCRgPoScbaimQwD2QWLOQp74wRD6rT1KfUtV4So+2Jdf+LH+6U9XM9gIk7hkYmaXo8FW9XD55T92+NbZBPembXfnr/zpij2RsqFFbZgKCbdf3drY3SsO/Khahfmh/PTf5vnlE4divkbwowK0y33tqPAJdJEaKLAs8WZ/tPat7+sbSa9dkVs7w2PBTBc5Fbev0+I1Lp6h7WsX7bC7s9V3oe9yBp2ulqD59tVL1aNvTotm63Bv49wmHPT1lMZT6vq9FasOrx+ce/zWW8bx6Nd+Qm5d4bmA8OF+PGgjY5g3XrNxzGNn3c5CAJKQC+HGopu10Ad3pGnG8WDsJiGkBFvLtkmy7Qo3CmOH3WIZU5szdokchlmrn/jccwo6M0hGRT1qG9zu5mc3x7WVfU7OxOesnWTT2Dtv7uJDq0fLK2ur9Tj7+WxRFMXJE2dCvXppb/fUqentw535Mi37XI1HEsEgdb3cud2BEOn4hY9F9j3K3sVz4zOn65MPlo8/trG/s+wOZnu7R+fvqSnj7ZsH0+kYNOWUHce+R4Hm5MXp7du73SKdeaA6fbqKBr3hwVYahyLvy9Wdwxc/uzPeHHE4qELhVsL5MxdjK9dv7uUkhTteEcUoyZQQ/FB1SizDA8oGhcHUTECBHTEDAIEDBCByAAACkIYqVgAgICBMqoDQqoiZJyIwQEBGQwUTNYkxgaE/1qYZHIiiZFU0Ey3HBVUBCodkYNbGGIB8KCGRCOQsiCY5ac6+CDx09eWcFr20aJFEI7FHEAeAMQZHkk26GDj0Q3WkSM7gEZVkcBMhWgRQwKhqokggWTwzuSEvzbdvH2xf3ysnK9/xyFuD94d7Nz7x/g899YnPKqSf/Xf/cfPkqZ2DGVA2UBF44PzZEyfrsqr9dPzUs5976fatOc+/82u/+eRkOo9bT1167t9efnl9gl/7Fe/OCdWUkBXQMSMDI3rvB18XAmUTdmHI4xuYc8zEMGR2Sck5T4EoqEQzA1NE9K4IRQlKwAHJqVnw9eDHLItKooiBsSu4MFTByMhWMSgsFjFFibkHBhRFE6Yh7g2Dnzipy9lS23sqkqSYJJnFwyOBLkyKer04SkswoWyo5tgBQMyqaEJqYNkyq4t9AtRq5IfFRVGwC1TVwUzArCh8YiE2FE2LxsWOi2LWpb6JlgQDJc1AllV7UVRDZmbSnFRyTKIKQENpKqipxJx7Sz0YIHoHpujYeyqYAzOA5Zy/VLHhCdmQCRmUzApEAkuSnDGIMkJFyL5YWuTAmDVi9p6Dg3oFRzVylMKRlqytJAQTFDNGtJQdgAdEHWzZw4gICkZUNbOcRBmr4Au01M1cUYnsZa+Koyg+0LS2gwgYCh5Ni2oCQNk7DsUq19A0e2idQOIguelDDXF5e+nSydUH7zsP3d7Te74bF2WuQiMptp2ruFWLfWxuRTcKoxNra1/+rie/9Tu4LNFcajsJGMbl8nD3qR/7SXz6C6mPq+X48MZ2++LnkTZbmlT3rTi/BILNB554+cVnVh8b+WXz4X/+Ty+M68NE/fn7T3/rN42/4qt3gFOfF7F1b/7KE489tvyD3zh67tP+vX/ytdtbp6fugT/3J19+6VO2kK5NqyvFwXxhAiubNj9qVIIvyN28Nv/i75Vv/dq9w13HlFvdeMtb7dWP4/6r08NifWPti3MZPfIonjodY06LxlelGVgWTPPX/tO/OPWm906+60dmEUhS2t8+c2LTMkaI+3uH4411VinOnI/Ows6l/Q/9R9jeL7pERBpjvVEfNUeuOTxT82ptdVUf7DfzFTz5td+S3/gtu4eyiXju8tPLK79P+69f+cC/3rzw5cVaeefGy9O1NajWbty8AcUKLftOmpef/9zD952/cnNr3uZlh5oiBqCcyawsUhl8MopJFZAAQaUuQzALGFyYZD/NUCIvhzknAvjgFCCbpJSdaE+FioEFdXWm5dAHg0PaFiCpQCfMyLQcsQ8+IPsC2dAsg3N+VFW+bz2rNxim81+qrXWLdM6NfDy0HPykltOn5b5TLeaYuyPZm7zrofrl+/mLl/p2mT2mvHTE2kQycLgqSXiWiZi9p5z65kjYsgiiMJKlTiSRF8REhdMWYsxkrvZTCh4SgqllVc3og2bsU4xdLsiduGft6GC7wNKfWDsC6XJTquQuF74oQBfaV08+WT64qTv/B8ySYA6jglPfKR3+se/c8bcnz37m5LXbVVGoEjHHtiWXJ1M2rDFT8vXisbccFmHn6nNrp8/7rUQFsOQNk/jaYqWabt8+1Fw2I1q799SJC2c+8bnPevKUEzlsuu5o2TPi/uEN1XhqdWReZd77gLPFwTd+2Tf8+vs/aA6nk4CF29mfp9504rYOlmUB73jkoStXbwqCIAZmMygdMUID0ixmjtzNxZ2CnDQ9S6rZvX5z75777r+1uxski/VilREY0v7ywLkQHFufCi4XEZbbzTLqxmR6+6i579RGNBCPim4JKIcLYt/0ghgL9hsnNqLKpdfvFIRnT03bo/bC2VOnzp1+9cUroSgqKgpfxLbngGVND57ZiPO2azBU8NjFzaO9nZPTzfHaiU8988pBlzuLGNzG+traeDKfL0aj8qDvysJJzAvsp+NxTpQ6CTmfmYSda/s+0+n1MRLuLrsj7y7vz9e9t2zBcRt7AGCBZdeONkZqkMHFg07UOdXFogeTnXmDhdcCONCJyfjS9jYEt79oyDIDTOpiKUzm5vtzZ7A+ritHK9Pinotrt7eO5r2sOKq8mzfpsOuL9Tomu7G1f+Lk2vzW9h+VihhD6fqUJWsCAEVgJEVVUxNEDUwpaeoysSjQouS/9o9+9N/82K/+qb/2d+Ly2R////7Mt58Z9cv9O/vw3q99bP7l7z56/Uq/+2Kx+1xVA7znGxcf/82VS680f+37gZw71Auty1LmymUvXCISaYQqlKnLSOhdADQVkSzoqFf1RJQUlIA8tFCah4DAXqL6kgCIC7eaRvyffm/aHjTPPO1f38mTwh7Y0Pc81L75rfHf/Ifx8hAcOufjv/yJ8v2ny+s3yg5gNGmvvJZEy73bPnWWKbfmCgZ2ZuSP9tPudV39qsNRUdejbm+55ifjC/fPnrnugMCgCCMRcsaWl96SGZhjHKwliKZK/ZHFvPeBD5SPfRfVI1vm/vVXir0jh4kpKDhPCkopZ8gKQuxQmVKvIOqAnKO+T4kKc15Shx4KFxavXNq8se9XRugPm4994MX/6788dGc+NgYAMgExXyCogDcIvnn3m8r/11+bn784/sbt5hd+Pv7mh6qdA8+iSS0rORJR5wxAwQFABgRwBJChYDABVNBjKQEKBxEky+a739GW45GPeP/64//0R6/+0F9dGXqcBKAnUAdA993K+3/5fzz5FffZiSf/xOZpvbIVSzwo4GCkN958YeMHf/iv/5X13/23/2rj4cfSiYfufeSPvXrtZ2BxcBI3yHA8qpZddogOUI/3vYKgRVWI5F6SAfQpU+XKgCo2LkOfkqkYsSsqJdPUDy12aDKpQtMldD436Ipyu5kXRJUP0oEU5cbGdO/wIN3NCznGAjEwB8JiMPAYZrWkFgWSaNYh7KMI4BCzAZGpmg25IgE2QzJyqAaINCSaQBTEGAwGLjaiqTESmMpxyz3e/WUDUluPo2lwbPsZ2tURdZB4GAO5LmdTTGaEpGaIQz0amALSsYAkx8YcQwAGy6aIiHaMWdahimuQjgANAIEAjJk8M3tyBRXBexRCTNmiDL4rzaLk0JAHxpGoFkYVM5oWoWQ/qqsaXM4qjr3zIRQlkw+hQCQcmNZAfAxTRiI+RvAc62bHyg0QIBqaIyZVAyQXXAZquzargvExZhyYiTMCECoY4pCyM0VQACY0OIYI6XE3mRlA0uMSOzbMpASAaERECMMK1gAEZNgVkEFWVjQH4IdhKNzl0ZIN1CQemNaENmxZTI8zK4gCRoAENuwaBxSWKqApGupgDgKzobLVD2cFRG3YkRqiDJ/aTA0UUA1MjkHoQ4aOABweVxQP0ltQEEC9i0nqRBRBRAkAh3I+0+OvmJkNO4nBVcRKGdm77YODUydXT2yceH3vaIRuQSV7Lh0EQEIcr067LiJaNBmdXxUmeeK+J973jvT007c++HFYNDxvUVib3qOrR7Xroa4Ycp+efj4999x6cDSqDvaW5SQER7OlGLhGnC3lobc+AtzPPv65R89XfO8J2L7U3Hjp4OTmme//H6aPvW3vw7/oDtUFzwVTSVkVFPukwXlFkSxA6utKkiYBsJDnAECOqr5l5z0eqnzklVMwhkg9+D6rI9JkjFq88mz+d38v6m1aX7n+sadX206yx0BEiCW7eQzPvXIWG/no71T7Mx9oTI63XuUYzRXC3MccSkeBE+amb+Sh++onHm6uXNu59JoFukOi56frj7x98Z9/a8wEve53efXbvt5uXm2f+/yJlTL+zgc9916OUtbJ2Yu5SfO4nIusLtrm+Ve57yarwXu/uN01LRHKbPvW2nzj5HhtuTNP/VF/0ExG/qCTybjuZi30GZ2V5Hrtg1NPLCYLwy3Fa410tZuhubrYRsR6NF1dV3A7t3balAldhNR1fVk5hKSdxQZNUB1ktCWDKSiTq9z6tNTD2YMPnSwIFeqXbt/JWQtEJqyrslv265uTt7z7kU9euWatkPKilWp9rV5fm23faq9fFxBsu7VJPUvRhTCuqGmWkLXwBSH3naYFJubYwWtLfe6F7VMvL++9Z/yed90LJ6cHp9L27Z3yidW1+zc//fydqN3Ic4fSNDHE/s7sENH5E+7iu0/XuPjcR14RVzx4YfPm9YX3xXQaFkvZHK3vHu10B8laPZBFKHhSFbxSdcvj6E1OwsAGRkNw1DJ56gERQQfnrio6MjURcQSddIbsHMFdon3W6IeqdgcRsDPrVHpTBc0CqOocErPBYNtVVPOgmhWUyDGwZRErEAAIGeqgRUhiuW0A1czMkWMUM0LWHnNOptkTmogkk5h60b5VTV6yABM770CZKUVBs6jq2GFOsYs2PBVMCYDUAIDZJbvrkxVFRAIjRkPocyJ2LrhoVjpcav6t3/hPe7+cm2WjR510ORNevbx/9eoBBwZTJAxO2hU8+eCjaqmZyyuXXzqcZyL8+f/rFyXG1UkFiOxxZL456kNFRC44nw3ZERGQY3Y86HKqwEB4nGVGZvbeEbucBRgrX6EpoNHAlCJmZiYPiN6XqoDgHAfnKgIfU6eIAAWzgQozJ8xDjtYV3rkyxq5QMmYqsc8danAOyzqYgSkIAiB3Ks7AuzLmiIHAHDiXsxWFLwEqpJ2Usgo5ZkKxrKIpSggOEMl574mMQCznGDN7xzmLR/MB6pFnD0eLNjjwFQKx9TnFuHXt1okLVgU/izHn5LwnhmXTUGIIBcrwuGNUStH6KMzmiCQnh4xEapDEVMkMoimSMSuzpJTMcgLtU4rd3UkygjMEUQYs2RXeIzpzmHJ0DpmdagbExGSIofLlNABkMAkOSQ2Ro2RyVtWIiaTLJYfcZ002rOr6nMuSSs/mOMdYsUPARnUhaJkcMJpFwMJ5BsR4pKiiY/A1QxrVk5W1En1ZMXJaLrsIkzL4cb1S55jN9ovCmPc0t74/7OfbPeez5x7ci0d7W692qXWTQgq3OEiefD9rR8EX6Lplft+3vfuhP/k9ceWkI3JG4LOJtsIQ/Pv+u++9cvtGvziatTEGfOQr3rm7cvL0xe/8/C9dP3z1hVN8XabPr1/6SP+pW1svHk0bPmjoxDseOfEdf/potLKzdzQdj13J3tfBb1qzefXZn730a//yjXLu9FvftrV/e/fWLRYkzaEimdbjr36rbUxj0ubG4ZWXbq96HFu7/98+UO7Fc9/83e14vC89Pvzw+R/8kef++o+cOcra4cmqWuw8f/Sx/1K/+avCiZOztg1l1SxnG7vX3O2rW7evFadW+OKjFur+6Y8ezA7vO/sojjbGReGYeDpRLX0Y24vP7H3sY77pUi9LsEVBo8cvrj70ZPeZLx7u3tG4WLTQveuPnfwzP3S0/mgsV9Y3Q71//ROf+tWy2Zrec+/unM+ev0fG6ysXHp1OS1+OF5f6SV3P5/H3/+D3fvtXfnlSu53bixBcjTh1FNB8cJXnJCn2vRiKQjJzzCWz964sCnABy6qoR5oAHBlpSkIKzESICJZTLy2RKxAQ2Me+BeJspoDDMlYAo6kRSC+aFy54dOCDG48nWURYPfgqYN8o595idt5nlS+1xFy68tr58xtN48u9LLt71Xv/NNRjc8tlm1JI0/c83Lxwf//xj0yq01IVGYyAgmMSI2XPJbFvUxtzB2bGisFnIUgUfGCJvDhC6sQceYfkpBjxuILQ9ylirwzALeasrNkoLwLa+Ufay6/ZYr9Ak1m883K/+R1fv3pxOvvcC0WfyXyOaRqt+Z2fn9x/tqtIlqVZ7vuuWh7Ny337jveNnywf6r5v9hd+JF26VYb1CKQmmqSLwhWjJmJfzeCM9psnQr5vLezMty9dZTNyngD75bxan0Zw8YHp6j1nleXSKy+VJy8u22WfxAff9GlzZXJ6bbI72795dFSvjPNCLTUPnbj3E3/wyZ29A+dHJTE7u+f86uGixzJkQPHw7JWr969tPnv1MrnSOyaik+MVbQ9MU3Tcp9TmOFmp2tRBnyYUlouGwE6O62Uzu72/v3nm0X7R95DbfjkOzmehDK3azXnePLGmaAfLFny9t3S7B4dRVWLK4tRRNLISyPp7T22++cn7Pvz8yzfmcxKwpj+/Wq4hz7f2SoKiyw/ef8/BYlYVJXhubtw5xzhr+rc/cKHfn81359q7m7E/2L6avEdCDNwl6TQ3cekcHs3mvcZJQFPUBGv19OqNg6rCx584szqtn3r2Kq6uk+Suny+7/rDLTRdPj3xRhJRbNby1ddC3y5Vx2cS4Op1qqweHy/NnT5SBD+YdjFxeW/niK9fKKpwMidoZeVzG9tzFDYv56HDZiDZMSDaqilWEsxurkypc3d979tWtvoeb+/2oRO/crOsubGxuLQ7nnZRrbnNlmm62f0QqYiY19OAcSc6RHOCwSwGtyroz0F4BkDwbKIgGcQeh//u/+q//20//8+2PjL+uuWd0+EFd0Y3u5vv/6d99ot7YwJU7n/rFB9PRdAMWH/ypsyHanUUdATKq1eZUCJVVCKJKQPNMOTZF6VQlp8xMREYoQFyQTwrECFkAEngCMQAGFQPs2xhQkWl6tAf/+p9VlcFoApYgjY5e3N+5/135nW9d3/jd8f4ciCG7sJzBC7vgHBhD21Z9m2KTYp9BmZw5Z8EBIaJUCjc+/LEL3/Udq9Mzurfc2Jml3/3N3T/4TN12AAhZ0DUO2BOINCBJFclXZsYg/cH1AOYc+4Liq6/xK1fGpzbj5553W9c8InIReyFvzAQMTJQVuQiSkyfKnaAYDMYetBwbyWaaTcFnOfHZT6S/8VQfJS3akehDWHpXWW6bWccA6AFKb71iiQePPLj2P/3t9uQZjT7W58Of++HiPV99/Z/9xMazz9c8jDs9sYNjoScBEziCFMEzOLAoGBMUDlyAJoEzCCQ9M4+XB21bzv3Genn+DfRn/9SLv/Krj1YAFCADZADNILC+tex+9akd+eLm5nm9b/PGzuzKHF7dDl/5Mz96B1YuvXT5E7duuShf8443/r9/+G+S7J+5cGJ+pwUDZgagLkV0XJcVETV9IqLVUZVy7JdLZFYISIzkgqM2ZiYiQCZ2hE41qa6MR9Y0ANq1Tewj1KmqXBM7Ux2X5XhcSYy7R83esleTfJdPUToqkT2DG8QbQDXIBm2UKMdCiSMjIkcocJc5rSp3Q12SBRBIh8YqMEUzs4xCBkRAgEP/lRkBGBgf540M1IYWs4GBjMP2BOA4QGbHfWzekxkEoAzKZogQCAYV6W5YCXBA99yt3xrEAoBjJYDwGJMsg7yFgIwEaMOzAI0ICyZE8AzeY3DAChKl7bRPlIkyKBmqopqAqgJ4YhIFA2Ifk05C8EVBvoTUARGwY18EXzA5Znc3a0dMPHxiIobj2i80Mxi0rAH2PHw6JTTLuUdk8rxotwgdORJZlsE7dCJmhncPGQH/ECgtMiQHjyW3u0zpu1VvAGooRjQkLQa7BxrCUDc2hLYQYFg1IBAiGhNmQREwNEfgCVRUFWxgSDGb6cC4MoBsAMcKDqDBcIg2XGkDNRsqzgZzEOHdPKCBDUQpO7YjDafNBnIV6LE6aTCY00TU8fEulQCHDuNhZkuD7g7Yi5mBBygdMVGf1RGyc61kVVM9XhgdLXNwrj2a+1E52Vzb65ZLbKenV6PGyo2QoO/yqB5ZZ2v1Spa+jy0wtn2K3Onmuck3fN3pL/uK7U9++OBjn8A7RyNETL3PrmujAwIvVV0u277xvm36cn0UKR800icM7ABspaTdG5cXB93aWr171OtTO7n+yGQlxNBVR6/Mfv1Diw9++OFp1Rwk71idogcSgyQGaFl85cy5LIIgAJgzKBERxCRcMCJozM4sCoAquGROowihssPzlOa/+yunQux68eIAfFSEjCqxLvi0NN2/+J/WuXXiUoNSFCk1UDJUZUouto0hAaNa8mXZK4WvfN8NXyTjzamGxY5OTn7y6u0HHrq3Kvxsd+lbzKurdy48udjePosAoijoT5xUkeuL2YNf9earT7+yYbbSpnh458SZUwc5unoDLj5x57OfP33iwo3t7TOnz89v3lCho1stAy0bnC2FfH0Ysc2SkWaHmVcEPbaKry90n4oDXx542h+hr3kOeXN9Q1OUxKnpu37eSp8g59ZEs4JZ9m+9982fff4ZxwysaogOppOyWUZRgCg476Yu5J4bDpdu7mRCLl3tQr/sl13XqkzOnLy6f+BZHn7w/JUXXn/nm9+yu2gh2ZVb15588o1d11/GSweLwwJ9weXewfXJuKzVpZQ0ixJCAM9UkO+zAJe3b6Zrr2+/9MLy4fvPPPTA6RMrvHz96pnzq29fO3352f3ZLAfv4gH0MU8KV1TFm9/7+FNXbq0FlnpzNm+Kw366Mu1y62sOojd3b41HPBn7/eWybVNtVI0KdgB2/C5AA82Ss8QoHELhiExFE7mAoIHIIaFkQtBsHQCF0oCTkWr2msGMgYdvuRgIKDKoKZOZWC/ZqZFzzJzMyDlHaBrRspERMhAmUKgYkcCwKEr0Ts0kinYZVCGQK5EYBDAnjQmIPWTLWXPXhcJpkqxKQmzOMQkigYKKZLMEjWRl7LUrioJ86GIENQbUnIAdGBBTAOxVEU3N3FC6CWZmjAxMA91bHeZs16/e6ABEDbIlFYVhUINgCEA5a9fkF17cLkbnn3rl2YP9ZY6I4kTFoePSJSPN6kzbTmdt3KhrIgLmY/QeofOeySEbIhiZAzeUahooO8/OGYDzHsmKMkgW1R4JyXvng+OAasxuEMO9Lwa/JzOTIJHPImjmgkdkMyh8UXBB4NQox1yESpQcUPDE4MgNCSoLwWtKKHo8A8DI7NCDIgIweipr8GxxEauyiKMkvQGQxCiCyC5HE8hhpXQONCqSImHqohIF55CIAxugZDPHEly1SRZ5sbXQzvbmEbaPfOksx6Jy5Cmrpix9H+upCxwcU14aI2pWRwSQJWcyBTEyN7hHTVUAFQANu6ToTEyaaG1KZEZ3I/ko4Id2YcZJ8M674CtkymoGRkRjLjpQdF7L0BJiwMAp9h2R63tzxGXtS2cNdrlV38Xa+SQQcw4AZua8zwqO0QGFUHpEUGUxJDIgOWZ+UeqWxg4DxnZuvu4a51yxOlkPVWlUHR0c5RA8gwFNV2rvSnRcjlfVFr4oGI9SP8vzxdHus8u9qyW7RWcRoO+SFn7tyYdB2Pbu7O4c1qoCApvTjnnWxBH5lYLYuyyJjIXquP7IbnEuNbFN+6Gu3v+JT973Z95RjMqLX/kNe7dvdtafXQ2Xr17Zvbp96zpM1ycZuITCwSSidYv9st0lVuxituLwQIx5VOtzv/FzX3bxxImKOtOtnZaR5mbVxTMXv/+vzqdroaj8UXcv0OFLL9/6dz/zQDjQ5/4gFzvj93ybbNy3nM8PX7w+t2m+1nFnwRtu37n9b39y8sTvn/7Kr6xPPzK67/7tzz9z/Vf/k7+2Nyrx2j/8e+qFyxHNsxXFK5dfpCe+fPWJ9xRvee/Botk4udrtHM0+/FsBZHxm9fr2XnvvA9Ov+a7qyXe4ldPrG/9179f/9S7lJ/7MNxVf/deXK+ekQ6+pv/na/jO/GiWuXXhgPr3v8a//0+imBDgZTxZ7O3f27swbu7p//Zd+4ecuP/+5Ucn7R/NJ6UaBHIMHM7PAZCqMSGaGEDw6co4okAMwJAh1ZUg559wn7GcaezMj74mdASBaTH3pSPsuUSlAfVYRZUBGduiRnUpWtaimMXHA2WJujrCuHEhwaC4IigQHiNhRzp1IP5QUw/EuedT6mk5S1n5cjpfzWXFrSy/mejw+6te2Lh/A5v1w4nSzYBUwpG7eFyFQKKKZqnlR8gZINlQbE6akhCySWROaOsvkkByASXZBCdqjGYK4bFktuIJd0Cb2lO888KazP/hDi3/+j8Ire+wYsusefOzV97ynaA/sytbp7R3qhaoKChgd3Hn+Dz5+8Z6H+2v7BcHIuzByXFMn9WEbrh/eao8WZynErEpMYFxV1rdmAATrfbP/9/+Bnxbr94ycuFtXZ2unLy5vXe6W7WQ6yQL7cPjAd3zLwX2bv/ebv/dn3/c9TS8PrKwd7RyIwoR5fTJC0IS9cvZ10UrpC+Sue/ODj3/+pc+WYTxeXT3a2m/m7dnzZ6br48O+zYqLJqZA5OjM2qSP0ggloKWmZdOX2RRKR3Dx9OlPvPqKG4WCVx65952/8cnfHasUBI5oOtrcmS0M1QFORhMV9c67lIOJks0Wh+MCUiOOLUU5aHv2aIDsXSaX+x5yfMdjZ1a5/NDzz7ywM6vLclKH6Wi0vlHd2W5aoLVqsrmycmd3typ8H7vPXL5amdtatKfXNg8aPTyCarxmo+rGzl49IRd1rVzZSjMmuT1rEWhCjIjjejRZKReLNKlHKnTxzIqDdszy0ivXuajnfX75xnJ9vPHSpTud96fqskBb9svTJ8d7B00T++lGBUYZ6ebOvD1qzkwm2vdtb1fuHNikmC16cs55WqncbNY0XaoKf3TUSVQw6PuczMiX62vjhzZPvXL15udv7B1hZsaTo9Fo4tdLf3lnpgGXKO98xyMf+uRzfeyu3Li9ujqF63t/KBVNxgWgcwDzxVKzMwAGSgSIIWchh6O6iPMeEU0QwCBnNvrMU7/2//ihSz/5N/+33ebgxY9efvTE+Xq9yPuXl7/4Lx/4qm/76C9/9gEPoSvtk6+lO3MFagzIk0AGhExAhIYMSTxoWYZlZ/H8GXriSajOtB/44Mb8BjiERoA9MfY5FpU3SVh5kAw5gg8eVUWRFcBAI5QImiAdAQHEdqyWvviJ9OijfrSp+RoNbGbI4Awsgwj4AljBiT10kS5fs0UfKifMsetCwVhWZxLM/v4/TKsbadbwncNyuVjnIH1GT85Um56P69ANHCOTQUYAAnQqqMreo/jVvb3lz/7b8eZEXnt93PaqLhOFypnpkJXJZDz2Sc0CZ1VXB3JFmqtnQlIQtb4x5kzAQLDMuL9fE7MBoRq22fcKEnwwldxmYMIQYj2a/sAPt5tnEdzIITOwX4O3f82Ff//Oxf/nr7S/+bEKGdKQREcAhBAgsBmgZ4gJkuHgMMoKlsEhkEIfg/HtP/jM6T/1vbd97mI77zH9wA+MvuNNe3/pf92YiZS4jKaZKkAvEIUKoK14cHlPa89a+v39xZT6q9uX3/MVD/zGv+K/+9e+/+/+k/+j296vmXVvnuZdQvZllbLkYYvPZky+Ks6dPXG0vbNsWshYFC4RRrUuJRG7cGqjdqQifRvZkIDIOHaxYE7ZIMP6aNxILph9Xc6O5m0rIk1w1CmkPubU+bv2nVFwAdATDv3zohYVetFeLGVNZmoAYAHF0N0l6pDBkEcANANDRcNjIigOlCI1MCQEIwQlAxkGxQBIInq3wWzIEtlQb29D2AgMAXRw48LQDIYAVnhnktkzRHNEaJANgCirMoLAcGPpXfA2HEsKYDQkIgYENOFANQJAB2hgnocqdwzMgFoUHBxAjmakCl3UXqBnyABhqHdHExFARkZV46r0o5EgUQjoPCFVhUOwwN5zcM4xeSZGOrZc0fHhDDagQcUZsnt0rHyZDPYXIkYDZBG1rl9a6rzjnE0zKKEgKg5sBoP/2wt+sNgcO7IQh+A53u2pvzv8H34CiAEpAqCgISETApiqIcBxzxmCISY1pqH0DcVABRwaEwwUDQVEA1UgAAUjIgIaTGcENpxw1cHwNOh7x9V5RAg25BpBZBD+TEQVEI6J5miqzGw0iFtgA1oJUMEyIAAMui8jKGJnqoDeoTdEAGbOWaKaZ4diYMpEZeBlnxAIDFX1S3ztlHV7f973i3Obm9XIbzfzRVp2qS1pPJ6OQuyCYWAPXQ8BJe4551PXA3NVjjL3SzDdWBl/67edeNdbt97/wdnnn+sOlqmVelJHyrmXPO9H0xFXbLE9ymlUo5DrWMQTWD6xXnXzPplFBAM/qgKiSezXls3st/9bFbsVK5plz5VTh8ZEbAwoCkmyQ4QBbA5oQOBBBAzBWMFBJ5EwB6YUxZc+S8KgBtTOc1U6VXJWUNtC6wrnY1YTY0YKzMrRgREW0occM4L4Ajz5UPX9kqDwaxOKSTOWoyKmvm+NjjReuVk98sjt268/9u6v+cJP//gZfeqxe9+c4+rh9f11K9R0cTgPJW583fu6Fz8KO4frm9PD6WQL0urXfv2V1TdW73jDSrmz97u/nZuWpi7PuubGYTx9pR/nNvRdBzWF/e3X2yYdHHRmLmZWBcZIhYoZIumkOPKjPYkW/Osme2W9J1qUwfughn3bv7bdheA8wmK5bPNyMpqUfv3S1SuGGo2ixqduXYWiiF1vzJlQGHd6BaO6cqu1OzeejELVCb7w3FUILFk0a+szqXBZv/2973nwkYdfeul5bZfd1mubK6f2dw/afsEyzjz597//6YfO38Op3DtQQWC/CKOVlfHFopzdObzdm/W9Bc+KEshK4nFVHx42Jfv9rfTJrWvXLu2/5633vOnJJ25s3T4T46mLGy+8tHsU87T0a6fX5ttx3OC9MvVTffXa7a295EzLVRCRjbPrk9P106+8PCrhjfefeOaV19XRyQunIOnecqF9OylGx66iLKDYZ0VAjCllFI81cUzJcxADJBAzQ1DKYgCpY3IiqGCqGTV7AIesAAQsJl1KlrOlrDkzeULs+zZCVueB2BFTCKbEaGCshEIc6soTld6LAJilLqW+J4GUlEtXBAOXNWsgppFT9ABlXDZIFFWkNY1qSR0NOVaTPgZBU4wJmDgrxF4okAtF7DJmQbOSPRqCDbB8JARG8MQKmDUTudo7U4PgMloWXWbRJIaQkiJBF3MSBWMzdY6zSs6KQEp+1tinnnq2S1k6h2CqioPx1TBnRQBVEwUzYM+IjEjsWdUcoWfP7NCMmDBHosBMbkigMRESsVM1YiBiCg6tGDae7BjJGRo577ggJO+CDCZQ54KbqiqpggkTiYhK9FQUxaqIxNQVoXaobaeWmckF9qpqYmLa5ohgDrhAx5a0zcYGDk0MAoeqHq/XmrvlfKfvUtenUIzAJAv1KQdFU4AsKpmDk4CIPKwiCMgVBbDx6ggnxfLoUAxGtS/LIiZHijs399tl7rujlbWyLBAdZM2pyTFlKorU90UFlq3wgMRZVUHUMjkGBVFRNRmSOXRM1AME9dwTsFpK0idxg7oJAACBnTMsPRkis6tHI+DCeWIUs5wBAEGUGAJBGBeFCwbYBXZREE2AWArHqEVpo0JDUe5tL4c+DTIDMyInaMAcGMtQiChoHjG2yWZqIvFEKAoWMCkKUMa+77t2HtUnLaZ0upWuGGOgkDTXkxEKp+SYIFoel4HdpvSACmRYbhT7W7ckLw53drnw5DjmZGU4+egjm2fuL537wgffP7t6Gfrugx/87SdSc/rxL+t7TlQRgqsKi4uD3VvPfeJ56o+m5zZXN09GEWe0/9HfWz133/jIrt/uYefOf/3Mj71hbaRSm4es2pu+/qkv7M927/vKJ/v9+XyrjYezgzvbXa+9MvQHSPFg6/nP/8Evrd97P1x6zoA1Qk5JVFutKUyx9oWGgqsz3/hIOjP5vR/+c49Mx/u/8IHVbTv3I39lsexuffgjnFTUu9J1ua2UcGu23PvUzWe/UJ5aO6r84rV9twu6jG0VQJmSONcYIpbmnru89/yt0e2rXFcrFx9f7s3Sq88uXvoCN+3t2Xz1qx578M/8HVh5A4U650iPvi1ee3b1zGY77prdp7OMcmbtcvf6Der2vvkv/sWbdw5e66q8ds7aPjbN7M7tV7/w6cN29uib7vnl//xL80vXVqnXViZsm6dHENNAGeiTEJOYEoFzKgBmg3+F2VxZIDJaVoyqEFObKPdJuoKJkRUJAbxDlsyxN1vkyveGbTYxFE1MpGhoxkjE1iYF4l6F+sjLRTWpvfahKA1QuOqymJJDxCzSJzUlPl4R3VNPmmuHUvTOeG1aH3zwv945eH71q+8fvfHrlgdyz6nVL3zg98u2ydE7C5SBodpNDZ5C5TRWmh8erVQlmvZdHypOTeMRtRc0RhRfOCA5bBbBQk0Bs8/ZVdM1zaKHR1RNQFo0CzXHvh+f3dixna69uZkyeswcy8fPLS9UnNzqu98Sf/23SyXK1CbaOX36oR/966Pp6aP/5d/Gz3woNh0WnJr+zv/5T578B3+r+ehTF63EHHOKoaIwtkNsZrEvqgmlce3MN7lqHF1eLNvcx7VZW4+nqyW10koqp298z9uefm1vY2N0v5/uX7ne9TZhYo0xmpRxVPHhfD7r0rkzKzLL2/NFtVJ8xXvf84lnntudLdj5g+t3TqyvTHm6c+dInFQFvOXeMy/dOthdxC9e3jp5oigo9UuctbmPy/WVVSQ7mudV7/e2mnoyWULfpO7T156lCZNTAQ2VWynqfq5EHjR5wDYnMWCjHHslTRFOr0/NQFXQipF3nSZ0Dsp63nQrlX/w7PjCyeKpF25utyAKTZTDo47FnT2xWVRyNJ8t2txPXBX8yrieazpaxEPpTrFNuqWDtHFufBTh2es3o5fTK+PlQe6b+YPnNm9u7xaBau9Hpd86OCw8eC7nIvNFp/P+zFq9MQo3Xt/Z34/GBTgZrRe3mwSjCpaZNS9n6ByEjcJhNyrIMQkgcXm4e1Sjf8O9p45mO9u7i3Mr9cJBZ2aGE2VupShDERwSLlOKWQPaudWReRyN6+UyffKVVxSgZ1RHweP1g6NAtjae3nty7APf2N67/Gpc9aFwfLg3O3/PqT/iKgpEAphjJFTvXbPMkiGbK4InVNU0OAByNpHjLY1DOunCM889+y/f/4kxPfo9P/rT3a1nXv31nwpzuPnxL7AsH/z6d/NL1+HWXrG4Jt76JBK8eeeJ+qZVII/EYgV4bGSufvo3/vbON3ydP/+w02l54aeP/re/PXURCgZURmPnkoEPZU7ibMiHJBiEDmKQQQASGBIbmsB7rvzmtc8s/9nTVUIyBkNgAVIgA2YghJzBILr1dOrNurUMR7dMkzoCz0qMUZD8+OoO8D4iQ69mJJYNLHe9OuLCaYpGNNSfIzCIaBYMDpCAKGajZIgyun6g1++QASirAYihKXuSstCz603tyo3pbNEgsJ8v8PahbxtXGRqJqGOCEDKY5mQpM6FzjgxM87FxIgMgCWpZeB4cFJEzjGDzgSQFLmejqoRmYd4d9pg36vUf/PM7H/hE1RmUBGKQEwwttCroHQzNGYLgHQxqQ69ABp6gCJBJXry59Us/P/me72ivvda99lpawfOPnpgLxEMF842oK0OzSAUM6obxBPrendwYu8PuwdMrv/4Pf+xO38/e984nzp37zz/1n/eu7zDI6ngSl7FkDK4E4h4TEQcfAK1PAh5z7kpPPTGymy0a533XSjkd1/UYEZOpZIkp+SI471POKafxyiTOZ45D5cZlTfuzfe/VeVQxIZs1y7bPPoSUlIvjGVpB6AF5iPoYiJmqJdGsKmb2pWIrABU1IFUTU73baoUIOtRYqQ31scctYjC0isFAdwQzNkCzIUv9hwYaODYcHeOK7gpYqsYERDQYW4YM9UAgKr0PiDkPnm4AsCG1hIBEx3qImeFxIOu4qH3oZb8bLEQwIzAm80QGRsSEgAyM2aOFwZ46ILCHRy2hiLIoEnpiATQ1x1RVoagq54NkK0JhokVRxRyJGIkce6QhMuAYyACY2FDBAId+uGNLjSHikMIDIzMjJBATyMNHPzw6QPTEmCUNU5qcB+DB8bD02IY1wKEZB4uOqBEgmCGhHlOiB9z0sFoegNZwfIHVzIzt+BwqmgEQIx1fJEQAAh3kPhjO9pBZs2M0kgJ6puHEDoMvA3BIgIAEpmBofHwZ0OzuXaKmx8FB02PD0KA9GZgSoZqq2oBXPzZI2ZfkvyFPaIgEagSgaMPcLYuKHt9rDsATOsScBZA8YsyCg9AJx3e3L9mBC+PJ+uZalwU93XffyekY1lcqUPWeyxBSzH3K8SgKal27MCoZlNUw9r4OLTZLyd2JzeLbv5mfeKz94vPz51/rjw66eTOdOhJZLJdsFEHNu/2mTwoJQaEd17B70HOEYsyt9uqCkSPpJiGwZVn0IkIQgJ2KOkYgy7FDZGQgB0yG3JtCvwAfXM6RyRtylgSmRqxihqYE5DDrkKwBCq6X7A2060LNkqzrMxCFghW01z54x96yJFBTdFjUyST2HaIW03qZpF8ugnNObHEw1xW39uC9289ciq9euXD/qeXBYb68/cD0bN5abm194Ow3/9U5nWyxLbwmyQXardvX11bhwYfP39rn/J5v+O0P/cJ7JxeRrL39hfsfoGfubE8uFHvLHckQY1vhdjcqbh+2keHqC1f63DW9deZFXa+mIiWKE60mJTi4ui8vd2kRuG8yjMq2ZI+8Ph0dzPo+m0gKlY8qGWwyHcFiUbO23ayc+BijAJpY28SAfjQJi8UySfYUljmemozOrlSnTlU7txcH83yw7FzlEioocIDa09iP/th3f1c7nnzuxedfe/nS2Y363Q89tPvqzWI0ObU+euyJ+77wyivn10qQdu9we2O9Opovc+pHRZ36w9sHNxcS62I0KXwnaVSWqe9izEfaIIKlfuTZDA73Zr/+W5978vHTDz1y7/lV1+jiDWfkxnx27aC1rOt1/eip9d0bV27u7WlK9YTf/vg7Fzfu3H/m/Kdf/MzWkY6drz3cvLXbZ1TjILi9vXXy3MlmEeDuJtl7JCDnUERj6s2xDnyau/5MNMtZM0gPmsEgmecCiImDGjKQqmk2Zkqo0aRPuY+pj9FMFSSLqglJDhNmH3woMmlsE6IxmYKNqhINXTYyjTkzqANTwm4pzIRgqU1Jo5lVlWcShezKyhxLyjFK7hWVCQKhy5qTWCtSgzMBETPQbCAIXRfr4LKKU3PEjOAAJScAHuoEsgkhGoAC5QzgybEhmoH2aimKZhHQfPxUAnKkdxn/ZggEBgoKCrBcJGIkJKRBLqfj15GBITAPf8yIxu4YY+d4AMYRswcTYnJYILJjZIQQAgAwe+eDKgCI44DATCSgObfOeSZGYMfewBESIA1v6MJ5AlDVmHoDZGZEVCwcBSZUA0BkX+TYFXW5ujYejTyaA7MMmmMPioSU1SBHh2ZmrMZA3lFR+6oEjV2Urmlju+whocTeRKxP2IkKExErYBdVlZg4sKo6JgfqUXxBRaAkwkDzRes5lCVq11nXpSZJxmR5cdTmipENPapKOSrLojDJuScCcuSRHAFlVTGSjJrN1JjJ1JCQPCCgiBABoppC6qXvJQEqAN8NIzuiwcslg+9jmOOIBKSM1OWEzpeBe2JiBkSPGHMPNNR3piwO1dDEpM0R+6U1rVjSwISmgZmOA+PmiAiAObSSA1OM0RupwbKL6sU7XjR9BotJuhSz9r10MeVqVEDuC5Yw2XQhcFGaQwq44mrPGAJTudl1sMyqEOoNTu2SDVPq5suD0aQQS01ztADdW/Rv/ON/fOelp26/clksv/T0Z154/tk3P/yOGE4088Xk7Im9a8/uXX/ttS9eu7g2bcq1H/y+7332Q39w5+mn+cpzW0s42u/I0ZIsSXp11jQNVidGi93D8TRAxOap1y89f23kdTKpmJmO5tkocXBlsTrhcTWNr17e+tQX7M6dzSazcxPgfDCfCN+e9938KJTjUMCdw53ywj3r73lbvnSpaKudj3wm4Hx5Y9F9/OVR8AtpZBxYkxmX4DEBbeXFretFIN8BU+FH3OTeM6NaFXzMGUkQdD12+bMf3N65Ovmy9za5WHz6Q3RnZ3JqHKrN4p4nJ+feMJu7nTvb081NOPnQQ9//o3c+98mt9qnu1s28/MKDj76t1rHfeNtLn/rEp7/w9J1bO2ff9vWHW3vaU3c4u3X59U994uNXrlz9yG8DZTtbk1tbXRw1VcUITgFTztnUBQRUzYYA7DmQywLHD4OspS8dkYjkptGOWQ2lZTDnHSiy82qKrJgo9i1kQigzF0ggYI5YTAgEjESzqoqKqbWaCCASdIcHZWBCQD/yvsDCs7UAoH2HllKKX1oRPfTIG77wu0+tXahyI/vSGzdrd66sfuDWrX///pzjZWt557CoVqEMKgjGey7eXIX7vvGbJg+/TbZnOx/6nXD9ku8jgGrOlpKvCq08KHZJYsz7o0n75364PrHy8q//9mRvZ6U5WFMrirEWQR2YoS21zxkVV7av4fVTuykQ1l2CLuXV5e7GrS9uf+I5utK40QQ5WdKD1N985I8t8+TCjZ20dblgM9Mkbp3925/+xMHXfmUd0ZVjoxACZGjm8ejGu79687u/KfDKwfs/tvdff3PDQzSR7PY2zm78jb+0d/3m4j/9TNUfuM3N8qHHn/rgBwtfT08W+y9e2XrDA2669ureVmt21OeRuFKga9uwOimkKmV+YW20vjL99Isv35rvM7syECg0XYMVbK7W+4t5ynp4OD9RB+lzY74zWRsVa0yGcRQqcDEUhOLKBDs3rjd9PEjLRUonNstqUi6lRRONsDFezbd2y5qQHJBGS32fRRyieOeAad71NVOfcoA8CuwyzZNAlpHHR07W958Yf+YL17ZasbIaj7hr07h05O3Va7d67cWTQ5+TgmOw/PLl66xmpA/fe25xe++ojVzn15tFw2rMt/aWOusNcHHlVgjFagjS57mlNnVqeZljGQokzULm8zLnYmX02Pkzr16+hYW+4Z6Vl1/bFYUiEIJYL8jh2s6eZVsfT1ZXqkt39haQAe3UidF+sydijH5tUp4ry9NcFJ5Hpd/bnSPA2oqrC793kJcpnVxfGVd+3i1znydm9frE1eHK9kzaJfU6CkVZ0eG8DQArrnxwc3W+aCBKUbnTpzYW8/6PSEVF6TSTWhEjojrK1jYiAuAAAjK52CciQzYQA4QspqYdm2f82V/+8X/w939+cYoP6ImTX/stN//NP90YAYzx3A9/33P/4uefODne+fDLazWm4FNEVBMnpedEhqKFOUbnvL1anXjTD/zV4oT0ixYWOnrnE6O3PgYvfxGigAw0Q/Aq0idXFgAKzkObQBECATOgAzIgUKRUeMbCiUE0QD9qMyiBd5AUjiMvCHoMfwET1x62s/1y8yzeORiwsr4oVVVbceMAmEFQQQ1VA+csXHgGZ6KIBsSGqEnQlDQNCxuAwbtUiCiakkPpzbPPmlwI0PcOmQmiJnnwgeLbv358ckVUq8KlKljb8m98qv39z9a5zTGD8wU71OQAUlRylDQZqguOiEGVjBBI0RwzOEY3NJZSPloWhw3cuyEro6ZRaKKfSIMlLT23q2t+BItDQABfAg4BtAyikA1IQQXMQRQAAyRAgroAR3CwgKI6iXznF397pTD73OdWXr4ZRMKJyeYlgOyBaZWsa9NKoGXsBSyStnNN0fcPn9yNt3eRTq+vl6YvfOTZcqW+tL3jITl10HWj1ZW+lUzMSNupZwMuPRH33JPnplleOHfq2uefDaFWKhWJIFegp0ZUeJ3PlybgHPUibU6gwIDzeSu9dW0uOGeFPiboTXMG52POLtBqXSGHGD3dxQoXDp3hoAQBYVJUUzMzM1XQoSPdwIwGDLCYZYUMx9mnwbCig88MBxVE2QGwKRoDASn4QbcBA1JVOOYG4TGCeZCIBnUB7uapEACUiBhRVZEwD5KoaOUcs+tRPeEsWwYAIDMhPNZA6FhuMDzG1tig+WSDgfZnCI6IwbwD51HAMloELciA8qCU5Myd5KqgxNhmQSURJEIRRAY0UNWMWlYlEqphihGJfVkSgfceCBmJ2aFjMCRyAyEJhwZqU0K+y3X+v3G2B2GHHJgRA0BOBl3Tpb5lLFUSAiKS88c470Fxw7t4p7tCkA34biZUw+PQ1hANHAJpSHfP9/Eps2O9CNVAFQbO9nDVB4uPAggMZ1LA0BBVkQgIQQYX2DFImwgMVL0jBJPjfjUDs+O/RbwbNDw2Ew0YlKEETQzUbJAHB9eVGKgqImQZOOZ3CeBw3KqW1AjBGJGRRNBQRQDJ0ICJkDw7VCVANVQDIyAiz5RUsoHc3SQvui6gPfLQ/efuO60aRyt+9QSuT3g6rrhaJe0ZUy8x9a1qWValK7CsC83Rm3LuUcvSM5BaEnXEjz28/oYH6G1Xu8uXZ194qT/cmaQIXcYkoLgxXdk93DfWU2dXH33o7M3LLx7ezOWYJuvsFZoO2fs0jzYqzJRAWd3hYSzK0ntj51WzZs2ExNplKUoXRJA0FE5M2bElQWTJwIZZJATnAsUoKWVQIGUBc2apE/UOmJJBAktlULOMVjguM+VsFoXLQTU15lAySN9J37geSgt9PJLEljx6k8b6WdcmzbeO5h/53PpOc7D9Yds71NFyY7UKO6+U41FuE6au9Bl22/S5S+M0un2laVM6NXogrD04KeqNgHe2L71+pyknI1s/3zYOEs0PDW8frI4uzJpwmCfrXO9tbx8d9cieypBiZzn2DutT0+eb5dZOuqGTK8Dra+ON1Xp/b3HYS12Wd/Z7yZpTzLmbugpM5vMFaU+pu7m3V1RjS33lC8jWZhHFTLYGlVhRleQDGsK9J9dhsXj90sGNw1iQq2vfGc1bWRmVKyvFE489/FVf+a7f++RrW88/E+fzeyaVA+2Xu2u1vXTjUt+vHNx+9XTlA1U39o8WXe/K0BPUxeho0SXbd94XmUpAp/moazw7xywo0brJSrXCo52d+eZKXYQwOyzu7MPl97/w4Im1EdMbn7zwplX+6GdeKC241XLzkdPPPn97PDnRzrazLK/tXB754ur+9ZWN2gVcnZRXr9/ebmKn4cSYd3Z3NjdXYt+1s8Y7f7wwCijZPGJZBO4sqeWUUlZm54gJNSsbQA9yIEkIEanK2ZOx4bH58zjOy6oaVbNBMogpaxYRCo5FVS0VVa4JEIyYk/MCigQFB0cBQJ1I7lpT63J27Jplr8nMSJKGQKCoCsul5D4qQVX3pMhZYptyr6hQMNnA+zPw4HqBmDIBcbZspoix7UdlUbNHjiaCxMfTAnLJFAjAwDHDkNd0DgFFhTKQKQuIKYAis5hIFjSEu5q3ghoCIkkWM2MiIjLQ4dHLhHYMOVAgQgA1BRPmoXuTiXj4cUMWzfkwyOWmQuyYCBnZFUOhJvFgz/REPvhac09I7CtCVE1FKIMfD3QeIgjEx2FkQSbPKKIZwCOIJ2ZHIklEmV1OPTGHItSjsS/YoQezLibvC82GIKo5AyIM9m8wVQBYK8xZEmMgMzJmsphjjxqzGKAxEjMRmJjScNiazQCTSvAcpa+LmpCss+VR7BZi3pbSpmy5S2xsZjFp36soAoqv2ZeuV2NTFiwCIECSGEoejSvtutijClgWy6hkiMaOjExNnUdmBFE4DmqjiSri0PkHAAVzYQiQi8ILiMboCasQ2hSNTEmBU8cdTVbQu5jJsubU9Rjb2BToENe6ZqmSp6VbzlJaomRQMUfIZp4wON+DclEiE/tAYoVjUi0RBNUMF0m6KIVjBVQyNSOisnYUrctHs13sj+aQlpV2ENDzZnBE7IsqOGT0zIYlrYpZyl0Yjd2od/Wo6+ayC+XIV6FsF0fN3vVoYZcnCwmjc/eQg6aLvhjXG1XK82qFF7DDZ8sJnlnZ7166s7d90D79v/7Dt5/d/KZv/MpP/s5vKZLz1a4mQGwrpCAWwTfNFNG1hqTeqFTjJDfaw1SUNpnS2TMrpy9mE1nunh1tzJ69ynf2+ygJdN6nMcD+pZ3tL3ze3vlVGYs4X0zKwDE998Hft5koOiAJEu584EXX2bQqmq7XCc4rKtXlRT92tUWpK+8YRJQr6lIWpQgaLRr3S+zKqkraEVGxWppZ8/KV5vZtBRqXJa/4Frvl+gl39l3brx/WqydXV8beufmibdn1p95w/vzXzLfuFONVa5vtG9cvv/iZ11983lE524/hVLv2wMivVOsr9Yd+7fO3rl3Wpmuicw4qz475/IWz3hWLZdO2y27eKAqgIeRRWQ4ivAIV3pkKqvmSSIGA1DTnBNzHdoHxgC0zF0RgIZiZWfIchDUrYEo0cKrRRAQxqJhhb2qx7w1JEaMSRQtOizYW83kdguPChwn5wiEpKMUuQZKc9W730x2R+x88vb9/ebeu0g99uzugV//9f7yvT9NiDMkdbvXTaj0BZwNigqBXQB/4y3/pzLd+A65Ma0U4NVr+k787RVTExXJZUJH7LJRGLsyW3WSzPv/2x/jPfutilU9/z7f1z7381I//i8dvvERxSaJqWePA0vTOT9zLt5fXfnd97olREdbGRX7/U/P/+v71aoprp9WQ1DhIHNePf9vXRb2+/Lmfne5cB8tSUAc+LXLiotZTvkoxICKIZ3zg3jOPPtCuveXkybN7eTb5779leV9ZfeHq3qc+u3Fhgx598+z+0ebj75YXfnX+6S16y2P45/7ifZ+479l/9k/KF05vTO6zerNXUMBl17NDLLgMfm2yZsK7+/Nlszi5uVJY7PrWu2JSF7v725pjzdN529bUCIpHmh0eVKOyomiKGvnOwcHptZX1gOsr4faim8/7pkkn109x4L2dGY95hYvKESTdaVvv3XK/cbJcL8uoqTdhHxSZPDE6p9FEHFPX9VktGOfYglDg2iCN2T1yz8ZI+2tXD5aNd2V5e29ZlzRiOj2p07JRHzZPj+8su8Oj5gJX5Zhd5Wfdcn1UWdfdub3bL3sfeHvn4FrbdhkC+lEIk2l18+jAWeE1NTnOm6YehaqsVNOyaauST65XN/YPdlogpUfOnUm5Lz2r2e7tIxCbLWJZ+Gnp7jl5+srujrrAzPvzZQ+SBSdZLq5Vp1bLpy/dLsqxJnXLHmJeddy1/cHR0hSXbT5MqpBXfTUtfbuczxsMzIh6cjpp+84Tjtcm/bSczdq9pmP0AtTnPBcoS2TEjXHR584yLhfyR6Qi0zyuq8Ve7uZJMsSlGhAhdzmqWuHJMh1vu9gGNq6qdR24mk7fP/7Jf/WPvvVXPzYrb5dnVp84Of303/tf5NJri1/7yM7J9518o/eTD+/85oc3ykjMEIquS6FwDtUH7A7aypeHam/8H//WtaVJe3hqgqR9OOF343Lj4XswR7k652UC6QGRh3qiYWdWlxAVTEAEHFpWgEyBCiuzMZCBU2iWAAjGABlIwWQIhwy5eyACB0Vc4M1Xsek85DxAZNCpCjvKEtGSMRmASTb0oCDHHVdiKmCmCjxs7VGNYNgjqgKIABBo1ixqKgkAVaKQKagagK1U1XvffnT+jIwKU8rjlUPwk7OjyRsO08eftbZhxpxj1giQQJXZyAGoE8vZTIc6ksBgpF3vkU3ABEEUWMZxvvMff+LUyp9e+DF3o4rL5etXVwtzd24e/rtfWJ1lMA+KQACDzVIQlAEJkoAOAQ6GJEAGCJAjDKpA3wWgi02ED3+23juChYcuyFbPPkAIoNb3kY3mKsuL506/995PfeBjtRR7gs8dbr/rL3/XqDn61Ac+89AbH//e737X/+/HfzpXdd3bmTK88bGLV16/c/bUmS9eueVDwQ5RoU9Nyqigki1Uo8ODuYhpFgPIIlVVOKam6RezRgyWXSw8k0MRrUJV1L5plsG5ybTaX85CHarCL5rOF76qyy5Z7mNVeBFT0Hp03IBGeLdyHUgUFEABxEy/hJ4e5AcwEzNDlSHthASgwwTC7uaazPAYcQ5EhkOdOsJwPnXQVBAJaRAtB8+LgQ0AURlCw3dFE0IEMDFVNQMIiApAjB7BMaKxA+1EegFBcEPfF+GwDh9W72CgSGogx7crCB6rIDjMgMmMlIiigSEEBkQTtdxqE7kXdozIUJnmdGyosbuSCiI5hvGoLEKJQCbWt7Gux8TggY6ZPIhEiIPXamjwGqQadmTH+TsDO7bqwBC8GyjdqBoRmdBmyz3nSxRMWRSABhOyiakeO3PwuDNusFPZsetnuApfEoWMENRgqMBWOOaC33VzHQtDg7sH2B3Dg9RIjAFVwUyZSU0GAc5MnR7XTRMhMWUYQm9YupBAxUDBTAQRCe1YpDM7TsoZKpgBih7b2eBYZzymmQ/KthIMlqnjwxsODYEQQBWHPrVBk0Jkw+PfEBJxtqHLRsE0A5JBMkVBQHRIbNndjSMAAPuwtjZZ26iB9dR9pzPJ6sSdPnlivH4mW23NTtL92B1CGYFU2KrpFJCZK1cGBp8SOUQikyShCDHakePioYf7Bx6UN767u365+fwn3e3b464petk6OoQonnBMbuvKweE+8biiqmyCtX3XiYB1o5XRzqLxjCRJk/gVKHyfE3YZ2CEVISpq4dOZe8Lqub0XPlooODIAlCyoCoZApgDsg5p0XQfgQBUzoMPxuOwXSSOtXXhoe/emylI0uXERwec+elHthMDYUAW48FxQt+iAiNQcsLaO109oidOH78+zw51nnz77hkfTnrQx1y7sX7qdtUnCdcHJY2yavc980od2MilKq1+7dPXx6vzW/tF8EcpiakfN5V/4qbevNsVnZtt39tut/aMumquDrOqBLlPtgOeXZ8JxsUjm3dWdw7Z3ytgL9UfZB5ierG4cNtfm8XPivvy7vvu0X6uvvHTn+vV5L12fNUqGyMQaeOPMibzs9mezbNq1M+l8FKGiykB1WaYkXkyTgOVJWbznvgdfv7lz5eA2mXOBC6KG6PZO61aqnMiXBbewsjF+4k2Pgyte3T689isfo3axMaZlns0Pu1yMntm/meZLXxU7B50XkOBE3H1nTvRrvsuxGk/nB/OF9KvTyaLtiqLMZoJaFuOuNeua1bXq4sMn9+4cjnwo3KozgN5OjgOWMOGyi3Lz4Oh2NxtP/Zvuvc9F/9KVm5974cZRq6tel4eLzQnbcnHAvd+oCy0l52s39w8bXVJx4czZkx75aPv05ukrt67dfeACALQpO3KhcI7Yq0qfJJkQiIlRAlIgELNW+ka1RQjBOSIQi6aKlAGCARGgmjCpcVRMClkABBEpq2YADj4RK4B1HZI6BDFFcA4ZxCSLphy7mABGoxJAEYGYk0kZHDrsmx4yQ6ScMAOmeWQESRmVGJwpqFmShEiEoKoiBmZGAEQgCmCErFEBMQMiWTYVUTYgADJIYkToCByyJDMVM1DLbCgxx2zJ1BPlAT4HJHcfncddkEiDzTIfrykUAYiHQYsCwZCmJUAbwtGOXPBIbohas3OD49J5P8SVzcwVhRkwe3LsfEAwJmbHQ2uS50CAyN7QCNhQCZl8YVwgQW57AkQaOr0cIRNxAFKnaiogxN47n3PPzgOiqaKi9jqdTE+dO98s0nz/SFQdO9GsAgpkRNmsBGA1ycZg3ayrx4WfsKjlKN0i5c60FxNAZpFMwwsJsc+iCIAsSs5TUTjnnLGaA5XekVMxQm+JD7uUYj6cd+SIswEhOBouBkTxJTsCzUlQk+cMDDEzu7Is2i56oCYlyIpKYKDDfAJNVMmRGaCqGoooAqEhIskfBtCQwTRLMvCOekumnFMUkKSZAqu3ovBae0RXFWW7XAiG3qCzPmMsPHKGIjiXjbJaD44AA5eMCH6IXHtCj+Sd71IqkBCok6zMphKTJCBTK1VTFHaeHYaCiULtIbZtVIsAR83h3rVmU3S0kcuVtaIuOGtd1GiYNBP4slwhKQ0FnKCrXbUsyg3pmq7vA7v5zk4Ziju3rlmvSrYEWTt1fjxdefraa2dOrW/W05uv3ygmxfrZe7/5nX/8n/zTn4pNXxXFR1+9+fyd3Yv3nv+q7/n2gzu3X/mDT1ldXPVr3/aXfvCh9TD/2Gc+/49+fEVjBWitldNRHJcHK6OVd36F3PvG9SffXE9WZ5ee3/kv/56/+NI9XW5i7gzUcRtTHbwz2PrUB048dObUo2/rD+ZxPlOVCxfPfuS1197xlvtw2d589YDNJUvJ7Hawh//yd64+eGb/Yy/MPvTxsWidzXIPiNlxbyaMSAiCC6/h3k04fZ5gsrh2ueqWkjVFQwu6SOPVqjk6Wj1VN+WSHnyyDxdpkbp8ddFtSV9QmBYrVSmyvHrt1ec/f+b+96xNN/rFnZ1bN8ZrZ86eefCTn/zinds36iuXysn0xhef+vAH3g/ae4CmTZ4orI4FMSdt+xiFemFjx+SBkQiICzTJImCASINpEnHAdSVzmVyUGDUfpXhQMvcJ6hCAmZidckBLKWvM5DIISpYkhsgpa6akJgaoTGADhRLUYNlGZijYJqWvytozgGMqCsmSmzklRmaku0Uf73hf3P/Q3kufd9/81vB9f9HtdeXea93vfihIZ9H8lBfSRRHAwvoUIb/rL/wPa9/7fdegN+hOs7txcH0dIfWSibgIJqgmRsWyPrHyXf/P6Sp+/F/+43ceHeHGqedvbz96/xue+N4faf73vzNOIrnDwBA4S+8Jib22cRSVy5Xl0S46M8Uq1OPqxHKZWCWXBaSEdWVQzH7l1+Jrv3/qhStVPVpAbr0dzNqC0ihgcpzFIEPamL5+7/iNf/PPx9/6ta1//EPVBx87+33fvTN917zJ+1+8fO7kxvr5tddbXs7mCWb3P37yxmfkTd/5l57Lox23mu9fWxi8dHvnoYP9FYZuNg9lkUSXKXsmFR2PC0IrtHKhvrN9eHDY+LqqAE6vrETtE4Seebdb1AFX64DZ9vtmbW1lnV2zXMQqgEFRFUtto8aUImLJobq+vxUZrc9VXc6bNjZzD3lU+B6LpdrYY7tozVHsE5kLFKqi3oltn3siVgNBcAVPV0Kcayh9btJ9a8XE52u35svWbfWoKOAwA2SEE6P1iw+cfeqZ50a+HnsbT/zFCyd7ycuYxuvri2XrBPqlCIeq8tu7e0owCrRacQU0axsffCiG2TdQ8C6EUNAk1GfLemdvmZdpbVI3Ue8cyMtbhw9fnGxu1LtHR7iMZ8Yjy1Y4X2Qktcq5q/uLcRFOVCHN+zUsHjw37btFN2uqaqzer05CXHRzg1mnXdZFE1WVHC9VxlU5oeKo62POHMIkFJLyzf2uICtFp1W5GSabLp5flzanRdcfdjxrtddUBj8qeEy0WCYp7I9IRQS+6ywlAaOkKATOkWYxMR/IVCejyme/N+/QQE3I0BCNcbHMaX/vTQ+df8t9537qX/z+13zZW37uf/4Pj55+vGyujq/fuf+7vvloo6i//u3t0aXZ527UAduYHXtEJ6g45soVrpPlQbP/0rXVb5+Cat1t51/5uZv/509OrvewMYEve5z7A7h2C7ICEZADw9z2rvRgPQwbSiDIhuAAPMx76LKjKA7JE1QBAFEVIIMYiN7twh5iVgTgoDO/vQOGlnpVgECGYoGEjdhbyhITFQwOySGZWYqQMhpF4VAwEaWkzDhYTwD4OB1igKCCagbkWCwzkXY9knFRaBQuprB5P/MqBh9WpkuFVQjjNtmzz42OjnzuhiI26KMjNGUwHgqoKCG6Y/O2dBHBBfIWE9WeJ4XNl1g5Ilz5+GcXV79oRREXRhm95n4xY7PVVgARAMExEEASIARiYAJiQAAQQAJC8AxqgBQ7CSnBSgGdABFElS/eGBpUAZgnHgKkiN4XAnTUzsuLp+sf+5+/APFN3/S9N37xV155/yfGDzxyfX771q1b7p6Ne9/zti9evrrbpVmKZ8oSIj778jYpX262EGC57ApyUVSSpi6bp6MUz1y80B0chNFIs+auX0bFqiiR9/Ybdh4czlNKptpbztoEaEBzzrUDU20tBSoOjpaImCMCpwxkYIfzdkhXHhzmP5SKAJBITU1NhnC/DsoPGCAQ2tAVc4yOhoGPY2Z4DJ4ZeNV4HHJiQNahgkrNCMHIdEj72bEUNDhr8K7JDXDY2yIMOcXBIIOYwBwYGGSDDEoIQORBwMg5QoMia0/QGgoAmjEROEBFAVABYFAAgaGDHsCMCQmAB3IoAzIiQTIFZEXIZr2qIJJxKxgNJakjLRwTgGUVAR264AfwgeNegUJV1RPN0C8XtL7BzGrg2IEN7Gw+VjqQB7mM73agDTYZg8HrM2g8dBypAiVEVejaVro+UJE0w9AEF1yM+biDzo6jYDA4ce5Smr7EB8fjnJcxMaHx8AMJUY+B02p3lRmzY8ai0RC9M0JBDUhgmoVIkUCYABBUTI/b24CYDM0QPbGC+sH4BDhIWYMHyx3TtAGBhourCEO+EfD4dhAd9CICsCGWMehYxxo0DLcZHp8zIjBjPA7BJQM0EzU1EMNsZkAZjEQHH5NzTs2YUAFyVnRDyaF9aZfsDAPSdHX1zD33n7n/3AuXL2+cunf97H2IjG2rbkHgC6cIaJbKcmTSG6gPK4BFVmZ0AEyUfe3BMBSewFhVjcN999DFB+Tik/LKs8sXn1re2QoHh0BsvS2vHNTGoShN02y3DZXLvUhWV2Gbkna9K9yo5HRio8+5390foUsxNksoKgIHmuPpk2cPl4c5JSpCyskHp0Si6tDQmWUT0aLgnHIftfZlzEaqEJOKdpgO2OaOywX4bOR6togZIOp45I1Mk2Sysqqag6amKoMAGfqqPnVqb2tmKyeK8UM728/O3Oilzz/zvq/4qt3PtcXOUR2ibdLGW9609eJVPn/GV6P9F27ef199ePVynVfGzl/6zG+6/ua86xprl7Ht7lzDzh9t7zUHqWkjeBfbyLeuJQJXjqLofC/3/QEAiWKfoyFrtj4RhBFOqi80B7fSeOdg8jV/6luvX72zffulWXtwuLe7uTq97+TZRZ9Xp6OrN29fv3G7posXzp5GMOHxxoOPPPfMF017cCFFiSkXQCdG475UzeoJP/X8M33XGvNkvXj7m+7/9Befzlop23zehqKqRqOV9ZW19dGNSy+8fn2XpFqf1KWT1uX9nf0yFNnnmVrN6Lt+NOYu297ebG/R3HPmHGHev30DyiLFDGgHy0zgFjEhkUMyVSJ7+OKpvo9uRuvFyvVbe2VZTif10e72xdOneeze/o7HPvrxl06trvYePvvs1SbCWx568G3vedMcq9/74KcWXXdife3ihZNHfZ6n/v6z91y6dm3e9U3wvQczaHK7QMYU9rb3KWUXaPPs5pWX9wEAkRSwjdmRmEgbRTIgKRM20qtj57TP2uYYAYRJEXtGRVBgYe/NdFDJDVQhi0rMqcsmAEh9ljI4rh1NavUhIzhPMeU2J1VhI7U+Cfd9Tp2AIjF1lAnkblU8qOS2SQU5AL9oct9rVmBA53Cw6WhWNDIgUSXHwzuMABzRccp1wPYbtH3PVeVL78QcYmp7NhQxRUiqiORUhmx/FrGURzUDoEY0A0VOZmKas4BCHqK+PNR7DjL+AN8HFSNGx6Dp+F2JRPClQYaYuQE8NMzayHvHTGbAYEjIzjFCtuRcGBQuYkZiImJmz0FFkJSd9+xFIjsWTegcqjGVRIwmGtDQVCX3KZRlcKyi5EoyTdI5dGaURZkdASGyibOsDDgu65MnTt2Bg7bp+2wGBqbk2DuPDlUzA1EyQnPkc6dLaXLXNNI3i6jZqWjKwMgysNMIkkRzEMoiqrJzkrVgZsDesChHNK6YNXdLsiRqfZ/6NpEHq33lAidrY+66nE2DIwnEowAkhOaQUtSsal1GywzkyYnXrC4Nwy01BUU1YMgIqMaMOJCiDEQEANXsS66ic2dPxuXCxM9ji9A7BiFUj/OckRmCS6RcVlQWTnyKYCLIuuzb7CrjuuAqgJYFrE1H/e6inXcScx1c8AxZHLvSB7JcKmgU9kFNO5E+JxzUTTIFNUQBQEdiysB9H9W0Ynjwwqnnb2/tLDvXYVmRXLsxbdrNc2eXHL2spdCvrW4wCBoAOUZ0jlESZuMqWJh2YeEd9t2yW+xp18a2AeTSh1LtYOtqu8W5y83+7suAJ06dzuXGzd3DL3YvfuPf/Fuf/OBvXH7mk2949+MXzjy2tTP7lc+9yHD0lm99X2Hw7W/4Mq5OnyrC2a/9lqNXLm/9xi+PmclsL/nZxtpb/97/zGfuEx1HcJBy6SqE7OOCqDjoZUFYl+xK7dq8fnK0vPVq+MTv+MmpqGF8ZvVgdoCbZ97xrd9x/TO/f9Fj0/YxAnmII33zD/zZtW/8k1UxLlafrEdy+PQX+ltHtS+lR3KOg5Vk/ayL6/UbfvC/1ze+t7zwqFfQ7Wef/7mf7Z9+yqHEpZRJ2mXTJTs6yBe/81s23v6VLx+99NFf+28ro7VG85u+7BvOXVjbvXp1fvvSYv762oV7au937zwHaevkPeX2nn70uY9Nz7jR6Oblp3/nqU8/f+35F6RvU9ImS1UGR5RjkjZRjOgLEdIojhwQ4HG2lAgcoxpZzklzZkNVKYJzFDK4lGLXNSntelQAIsBjWROIAJicoEPQnCKyJxeG/hEBGxonCVnMCKFyhKaAEIGXYkXbj9uln+37elSUFZe+l9LXY7OEbW93paLVd79z9urtyWt/sP7AWdndiqhnv//ryp3r/WdfXHZmle9yrEarfSvZQb05uf7qx6Y3v+7M2lir8Xon9c2jsmUTo0AKRkUJfePZloXQ255sT9DaM1+x8/RrG6fvPe/c7ic+s/yD31+jCEXgMI4WiZBU2qMlmCu9t65nWzgyhTwaj/dmuStYVVesd0ly03Y9bpw50/3q72z4FttiNp/PnODXvnP9G75l64Of6n/jF1YI/DgsVfefePK+H/2RK/uL6dXFg+sX9YXZ7X/8S4enf29lWS+ee50v1Nev3+4fG5177M/svf7i7qUr59fKqx/9jfG3fX9818W3ffXf+alv+Kv0yNeNz9zb9g1I8MHZrG1y9KJnN1cP5/1C4saZlbBS5OXRaFSgCahSzmPkerp2s9srCpKEh8u+dNyhXDvcXnPThy88fOtg+zDOm6MZYtIQTOnE5qm11ZXDwzvjUZld2mtm51dWqcggbuy0b/sFuNjziY2zB/O5qSZTlQx5UZQlRw/IGdEIpfCtp3It3Llz4+LpMyW221stcLEkWaJ4wozqjRPYKwfX1lbdyfVJXiY77E5urhKpR3dru712azZa5cLrpFq7tTuLtnSOS+IROyc8j/2iyStrq2FS3tneAjFi7tXmu8vyzMnl3NCP9md9wSX66Eewn9sbR/zAyUpmXTUaSe7OjELti0DuaDabOHdmpe5ydiVPFCZMqW/2523TZ1GqWM9ONo80ac7lan2r7TPZmH0dvE9pUpXjsjqEFNBz4fcXC40auDy1Pkb2WaDrsyMaI64QTVRPjevtppub7bVxFvPmyRWCWFj8o66ibC64UV0dLhbSJ2Wnjlg1aXbk69KtjQrAle39G2TmBgwNIgBmCteux9pu/Hd/4o3/4Efe+btv+br//Rv/JN7+/1P139GWbVd5L9rDGGOGFXauXCdHSUcRiWhAJCf8wBiME/a9GNv42uZh44cD4Hsx5jq9i7ONsbGfwTYOFyeiAAllJB3p6Egn6ORTVafSrp1WnGGM0Xt/f8xdus1/VbW1296rrbXmGnOMr3/f79t/8QM3V9euPbKZ3p/Onbtw4YEf+P6rf+gH7+ms4NysUqtQjX0vIJ161Z1RceULnx8rNByjtB/86Gff+tZvGF1o8Nkn4SNfWI62+0fud3euws3jzZoggGNWYlKAgoQcGHJKaALBARBAAo3QqUGFTOC9aYQsSAzOnboGdPCCIBBBFsw9FAUEDhQyYZYIiGhqKSMAMhoYMZuoZRmU8BjKUTnujg/YkWdgZwaYRcEjO4YsoALMpoY0ODRYh7EaKOVELpAwdNau2ujzpB6T+en6mD/x6cUHPrLRKgImEU0akBmRPCC5LAkRuChyL4akqr6qIBpyAa7uITmJVCKwQQ/FUmi1BD8HAegNTEPJwAnIn0pmYKACRGAC7EAU9G6FOAHkdGr2YHOMwAQxAjLkBMVQKm4mSc1SFu9CFlk3aXCe9USTi+dOnv7EHTvGC7x9f6rq1cEXXn/j1snmg/et7xzGxeLifbvnAp7Dan2nM6bXXr0aykBF8N5lzYRoCL50AlBjrTGvZnMwMZPBPpJBhI1Kh+SX6wVhBoKulxR7YKEkZnAwX3vnCu9ibNk7MysQY59ilroMauJCyGruLoJITzUCMMPhsK1fpBANg9DT3NbAc4LBtaFgjEADjhpg0IoGgswXoTgKSEgIcurhO00qDT32ioPDhRCJbOirNxj22HjKn1Y17FImIwMCRCSjUxSEmRmhlYQdQatAjAGRCUWA0RRRDLKpwYB2pqEBenhSNGQEOs1MAQ3RJYAsJlHRk5glEwFSMTXr1QjAO0SAbCCmiARAfUrMQQHJcc7StH3XxWk9JTQiRANRceiRBkz1Xd0NiQZ4Dwx4Ij79EAAA0IhPrUGGmvVkdsJUEgxxPQEgx05YxxujejpatS2eKlcIKkP4zgBo+DwGNefuuPtuFg9B1YaOn7tUaaIvakqK5MBAB7QQG5qgokVMiuDMgzHY4M8iRBBwRIAmZmKDQw8Y1Tl2Q1LjFPkBJoYD6HzozkMwGHxnMMTTDMGGNwoxEKmelridXqenCtTpI2bGAxncDMyyWjYDMM8kqp2hkhEhM4GZczzocGIAaMyYRLKqwhep1iDaPfbI4+/5ykenZ8/O2+bRy+cub58hcn0yzdF7NnaTzbFK50Pwxdj7sWYuXUFcARdqMUk2MHTOgJJACCNMUggTVUk8XnisPP9Ae/khufZi88xn4/7Nfn9ZFz7l6K0vKTNZn7NljVEzg6L5IqxBXl+nB3/nd9Wunf3Xfx6U+ijqA3nq+15zPvzk+0LpiwDC5Cq/7lsXPFYk7GLqimmFwov1Ep1WmxNbq6iFUUixzdaWGwh4p5qWOWulZe5XAFa54EduBUm0qzaDOe5HWE93pZPF8iiJah/98f6MuUnHL3zk59VVC4b0lgeeoe7OFN7x5e9aXn8x0jpsnH3u+pMPvvv30dnxFz79U9t0BqtRXFmeFocvfyb3a8qU44qCKmqcLTvlLAyFVzPnLJ3cSUAGEmPum5S6FKYeybeWegJfFzdX6XbXzPt1LkJ5/tKFex564fVnm5M7+28cQhkKx2owWy/3zu7t3zlyTt/+5nso8urgYIrkyo3ZGyeXN89mFD+dztbrW9evXTy7uzfdeu7K1VVMkrLz5EYBgUaj+hNfeD4qRVHHOjEcO1gdHkZNt67m2MaK3Nb5qRM5ma2OUy8CbSdts+iYOvZj716+catCcuQu7Iz69njdteT9dDwC5pev36icN1HvkIMjg9hmEguat0bV3mTn1WvXC/DT0Ygr/dr3Pnrjyvz6/smzT72w7d2N430N4eGLezeOZ1ff/6nf+cSjD973yCOFt0nhK1+7YGI1+xdf+MKtk3kbtSiLc3UlneTV+uWTxTsff/PRwZ1QTPqutbtQfGLKYqiGRrlPqY8mEBV7Ri5cB8YKSa3PCgBBIQzRLMYhSCVmTODJkqhkSzFZUhBBQ0CsCheKoM4ItMJsfS8hCJihI+AB+pO6zjJaMgJgxtxJipmZgwPvUEFNQQZHiAmYIqAqqDEjwYAvRjBkA1IZ0lFGRJqFYGC7mWdSlWGdpaH4nHiIrBqC6kDfVyRvZiIJhjlDGuQOK5gUzDP2KXY5EjpCBARDIyTFU3LcqalzGKUYMCMA4GCtvttHycSgMuDgmJzzwTmPOHg0MYSCiADNWWAkQ2B2yOx8IIShvhOZFHSg7oExkTckcoEMFSirkGZy2vSr2fr4+u037jn3lvsvPQQ5xyTOccqA7FQUzNB5UwI4Nf6aaBlCHUoDSEnEQHIyEQZyRM5gqE4DsJSlS5ZSJg/K1vVtyoIYENB7AAQTGzobCNAckSNWMJGAjFF8wR7JuWDKYtAs+9RnQgwEXLqo2ZeBQlE7L/N534olIufqkgiid1yVntRQYbVeU5ICHZErCyeSSscWk6AJGDKKiCkakgzO1sForHcdq6fTJACA6cYmbUxZ0/HiRCUragviAjIJBhYnwfNamhoqz4gi5CE5qA07RQSD1BReJoG4iZQ0d8LszBQVPZN3xKjOzJn1KRLjICt65xU0OBSTmA3VRIwBAhOYMAKkpAhbI/81b33oY8+8dtBk1eyIcr+a3X5jNTtebk7r0Ti127V3DDAaTxBMsgVy5WgMlg3Aez+IiY6pbRY99Qqs2YKnCTNrPpnNZ8fLTnHVrqerOCrLfDR78ZUX5I2bb7pwZmfv7Fvf9rWLm6/8ws//G2qOX3z9tbw8fv3ib7z66m0qbONtX/Mt3/09ly5PP/wP/vGjFzZ33nJ+44E3LTuZRkDOq1k3cnjunsu3pts8KhaqbtvrKq4lE1A1cmfPTe59aOv288++ePwfpl/9Xtxy6Lg/nOW1NDfaWROjkoIBm+3Vu1/2VXnr8p3F0YHIY3/0O/p7N9ynPt9+4c64BGY8ma3OXt6cPnovvOnx/KZ34D0PHkHRr5vpzoNbX/fbT9a3q+PldjGd3zzEAunCdnjTO042N3/93/8DOjfdu3dnDJtFOrd54fHpmR3Ubn57huPG3Ppg/6mDW9frcrRs9dVXjq+89MZ0s3RypZl3h/vHseljElRjIM0awbDT2vNq1RdFNA4D0dyMRAzN0EwsD6ZwEdUkRKiAzI6cA2DICrGFGL1HZ1YyI2SVyDhghaIm1azIzOwdg3O+7xpEZCZUGNrrA6M3VEmDCC7RIkm7WpcOi0VZF4H8lNmzKwAdOu/kdIR89eqNcRiP643jzz5fftPhUZm20/zqK1fPJVqtU98ZoaeiKApEi4Hc9OrzN370u2lv75En3nvn+cPtp16s0ffe1jGx5xxTSDYqZCT7i//wf2w8+sjlIsJnfvX413+GV/PtLk0WfVmVKUXIzCXn2CmaeHJAQJmcnKQZ1N2U+KBru+/5y6+c2z+Lcfmzv3hu3aAgVjUFKLvODGhUknRNn+L5y5tv/Yq8pP6Xfm7ca2xTdGVx5tHOj+D67fXTL+9iyJhrZtxvheH847tmfUgcrr8SPv2bey9fsU/eiof4yPjBjz1/OOfbTx+80k32Lr7piQ9/9DcYsYtZojqVyqH2WoUqxhyR5zmXElOMsemK4DqVVdPUrtTVypla1pKDLwC1mQTM0Xt0L9y8c3O+zwFHjCZWB5cEysCrdMJBAcyAnJO3vfnRX//IL4wmOwZ5WuPtxWGKW5sRSCyAJkdtt5yOtkDZiSFTr01d+jb2BZSKsru9UZewXmdfT27enB1nVU+l92wBohTOrZoUo13YGu2fzHbKcrpRZQLweGs1ayRpIh/K1+7ciSkRYFmX2qaUJVTEwU0YZ/PDkOvScdIoos2sD87t3zqcbmxt7W6X9bgo7MaVa3m9puCPD5dF7FwOOaWc7OzGBMgmpTMNrnDaxzsCd1bdY9OqSkI+FD6I4Rb5s+PSd10AbPtWHVZA7NAn8Wbnaj9fL+axJzAADRm9o9IFEws512UVNHsGNGCgwjnneNn1e1UYq7LYPObZrPFm07r8n6Uiwa5TyGqILsA4+Atnd7tZM1sxO7AuGqe989vFyEufKA+OYlURIoreP3Nt/8y5q1/ybe/+wEc+8fGzv3NjY9KcvGv79Rv/+Yf+8mM/+6vrQuCRdz385d/Yf+DXJcXJ7s58GUm1ENe2q4pdGPmtz/3Gyd/+vnPf8O5XFssnfvTvHIc4ObmRf+c3hwYPf/tX3/9jfw0W1+789L89/Fe/sKXKZuRczua2S+fHcnBESBAMYN0WapFqj1yU0Iu0ynkIcjCIAiIwAwCIQCAggpwBC1AHgqCoao44d50bVcpIgswABqmP6IQRkR1kVDfhsw8sju5My43VfF2NnamAqS/KNKB/grcEFCo00xgdksYIDOA9FxWs16B9bm+lzz3L7/xD/e4EE2xhzx/+tcXP/dft3rKowLAbY+dZpJGgsfAoviLMfXSjCmIWMfYsKZsoITjvcrsq6lJXDSFRAYAOlu1pcy0AoAAjYAZHwAFEQBA8gzF0BsCQBByBCgCAG3qxFGTg8SIYQdODZ0hJA/K0Bo1OvZyYS+YCdmJZtQWi+aL5+Be24tbzd15Zwej3/fnf/99++RPiJ5H10UffdO+587/1wQ9wlrc9eDEdWY8JvG1v7Y5G03P3nHnhteuruCiYs1LXZxQtGMdod9q+CC6UQUbWzlZAcHC8GpWV9Muu79hZF7MYEXDb5lUrBmhJiDRQOx1V7FlNvPOW8igUXdchghSFDYjqU6kIUE/NZzrYUADADM0AkAaTPZjdTTsBGCMwGDswNDYQMBwIxwoDbBkRB6c9DMggMCJAAgVQRbOhIfQu+/iuG8nAkE43wYaGTGLGAKKKRIYkpk4xmyKTmXrmylsrGghB0QEwDtqmOAAlDEZm0JtmwIEhMeCWEIAIjCCrkWIydciigIiEDsQUzDFqVgNLAoBIZB7JMaiaJlMYWlW0XXajUQkgk82Nbr1ezE8293bI2dB2b2YEjEMyDoFOLVynJOYvEgTBhlG3EtBQxQNoorBo1pLUOw8qCENlGBGa9zwe1UVZLFTpFL4NQAQ68DIBYThADJ4xBjMbWrEQkckAxAztFPatQ/McDJa7ITqHCmYIHsF7yo2weVEQAYNcO/IEDMCiwVHpUQ0zWDRDAEdABIMgXaNmhCxDZozBdIiVnWYDFYjQMQ0RNGJkwMHPhAjMpHCXuX23mQjMGFBh8ETBcJ5UNQFVRBN1CGiYwNQgGGYDb5jVFMBs4NWDmBpAMlVVO41Qwvn7Ro+949zlB/cOOnTe7V287KvpqicONGbo++MsfRm8mREH5iAqZVUyKjtFyDEns2gUEnbOlVU5BnWuGllmxELUEIg4FPfc7y+dzfdc1NWhNMt4cjh7/dWwPFkcHHPMtM6EUE2LXqgRyKtYBDZXjV3I6xdmCyHGonBqMF/GwKyeQTMJQ3Aq2qSkyIAAziA2hJgsJ8lhVBwtV2E6albHwM55bLNW25P1elGkNZoaW58y25AY1j7UR5cebUeloFjfudjmRTc+f+7FGh56z9uoXd++dujG41tH+NiXvGOyMX76Y79RBn3okYdf/40PerM6t3p01D798fscnFm9ARfuu2e3Xr5xvBFt1cbsQfp1EyO4qoPsLBOjmrUGCcExYdaQlfoOu9j0kpXAh2TQSZy3nZblkUkrcKfwJ6UrN6f3PvDAG7cPj+/c3PAKoA+/8/E3rt9ZHh7fPtzv21y9cj3G7KsgV2VUVBWjxFRNO1WtSmpiXh6uynG1u719slrfODhsU+qieOeXfRxPq93NSRewj0yFm7ryOB1PCw7Bur5frlpfBPa+xFxxuW6OU8xZsQg+IGOWgonIgLBPOZS+l34xbyssmy6qh9Xxoq6qS7s7gdztg7kC5S4Z4ngc9spQlMVyvl40N8ajScEjMepPFnETD45bX0zM01d+ydueevKpl669fvHyhUcef+tLL966dufk2q3ffNODF1a9NCDXblzfPrPbzNZ904xLp46aZSotx3VjIDtbk+P+6LhZd2Cjyt+8fnv4FrR9YmAy62NKXW/ZNGtUXCtl1cI7RBUwMTBAZ4ZZ2LEYqpqpObSs0msG58Q0pRhT6nvNaiGAD1xVmExz34AShYDKkhOIoHLsNUkSMVUjQ0aSKKKiGcgrBRxPiiamvstqkmOSYbsHBEBJFNBQFRGReLBJKpKJZrUhJ02EqlI6hwSIrgwu5+zYiUHKuSrZsqQkngeXomZRNSDEpBrV+l4cAhGSKapkQ0JXeMyqBCiqgzcFEUV1MD86RlWjYeJx2opCqIaENjiP8BS7RsxEjpxDYs/uFPbsHJEHVeLT1szgSiMjA8d+6AJg5BBqEXEefFEgBDGNScl5U2EERTs63j9enaxic3I826j249nzXpjUpRhNzTkvmIjYuVIERDMRAjrn/HhjYzxd0K07fdubqCZhQMtmlAdXp3ceAmaRvssx55yB+NS5i0DqBDyKiZppUkR0RBSYPUMGyTn3xkhQAwMFAlKMKTdtH/ssQj6UJBB7cUWoxjVEdYUH7khJTUWQXMGOXQjS9zn1hIQEBC7GPudMZqRCDFnVAOV0kEUAQICWlZBg+GhgoDbiF6Wi4IP3AUzOjTdEZL6YQ26UurIw9NiqiGhZVM4BmBalcxnUoPZF7tQQm77dqpmcmaT1vEdEM2tVS/al84GZLZfOERCYZhVDGOyymg0APCJmFQHJBgyMKtmKwoGiGt8+PHnswXPf8Xu+5mff92Tf9mgiEROts8SUVu3CH77xoieaVJO9y/dtX7gAjIjouVDzknM1KrPknHtxNRdTX0361Fu3RFYAiX0zGpc5NtJ1dHKwOrgzPnfPved3DpeH58+ce/65V579/Ic+/4sf2duoLhe1lpM7t+7E2M8yTsb14uRg+dKLC9zc+sZv+12Xxk//p7+/cZlwL+wfHUU3Pnv/o+PtmlJCqs8+/JVvfPgjWygF85ScNzlp80Lw5Bjuo53u1gvr/c9ff/3FB770re/59m/m8/X7n/qgLpqRY1fREbXb7zj/6Nd96Z3jw837VqGgC/c95M+ML/2eR+Ctz77yMz9z/NTz1EYr3DLwsWl368bn/t5fu/zbfscT7/3OzfP3vPbUJ1cf/1i1WFcg5WS8mKxuzk9G5x+D+x8Lk/N7j6duVE19U/TBbZypt0fLfnb9teef+fwnty9tHR29fv9jO9u7G8fr7vNfeOnK61f7tl2cHJegzaLru94D5SzBESAwApjFlEFSQdTHNlsHyMCeyIuhsnlkAu3VFCWljKo5ycg70YhSZhTNhlnJRCUpsUIERRUCGArLoqaIkM0MtGUqChfIFyjKTIIQuHDOlUQOVJJ1MTKCAfWZjk56RiI+dExhLOzGzODYO+dJTllF093x6JF7r/6CzK4dv/PSg2n5QvGpV+6NsEkuWs6oseulmUNZK4Kpm/Sb5Ws+XIXDj/2aJCz6JqeeghXiObgMEEpf7Ey73G4frdMnX+8drK9cgaJyokLOFQ5CWajmWe+zz90KS/LsHFMXT14Z0cP/4Iebo5OTv/OPdnbD9rt2/ePnte3gky/Of/19G/VE/EYuagqLuFpg7qVZb4X89L/9qZ377t2+uYDJntfkESrMN//H/689+dDGfHV+Y6M5kJ4VRUNRNCdHNjIuHIKdXS/0n/083ToKMyAqn/pn/3TvO//IN/y+h//BT/+ojcaPvf2ej/zcLxthjuYrTmZk+vCli5ulv3W4nvX5/PnNxaI9Wq5KX5QMqLY52jKlLibPiL0ptJ5c069Hk/E48HLRcEHjcRGTKGI5nh6d9Du+3iknV26+zgRdVqi8qHv6tWemO5dWba4mvJ6tzm7udofMmgqnbZKCq2pUUxZWZcau7dnhBpfToiqYmnVbOT5ZnlDmZHq0XlMxKkxqxYyhkUYEBN3VwztWTe7cnl+6vHf+3NbB0VyV7ixm2awTudG19djbMnWClKBPCSFt1hNwEGM7qaomabfuEQ0LGk9CwaFdC4dyvLfn+65igzPb2rbKAgrz4+WDZ84vm/mkrGPWPqtZCszBhU2j/WU/c3wjwmZvVYqxy4Xz0zL4lIrgcquQy1UDLeukKAuGkvD28XKRMjPXBQDEYuwmdeHaNK3Ht5dtn+CerQl5UqKUzCwDsSOHfVtXrpqGgwWcIMxma+71f5KKZsseESry42pDuqWs9EZ3EAgKRgRVlBCsHCEWaOIsRzBzDkVQ1NAIC/roky+8+S1Hf/zv/plv+4s/+qEPvO+Jt1Wzf/NT9zz73+c/8/MPfdc7mw8/1z358lae6jm54+rVSTSHvOy2Nja8AxjZudzDf/jp9LP/aDypN37gB4+f+B1HFTUCu6Xd/6f/wh1/8XCyd+FHHu9/7cN6Z8aqkBIYaQZidVUFkqCk62fP3HzP28czLT78Ww/mEyDjcDdjhQHIgUbICRCACSQDCDACG5QMYCBK2QC1CCK5BzV0BQKKKjsPoGaKaFa6vqir+x8ot+r5G69uXb48PzgYjTA1DSKTdBo7V46isQzeAEDLBlFcaVJtRe+L9RIwO7T00V+p763hoYfsjcOjD31448Xnd9QjB6SGPFlKaFlTpjEc3Ve7b/xarnZOPvF0+foVp6jHRl2ELMwOxGTdIRdUbeS61iYG7WBgkHsEVvAMBqDZBDHwqdDBDHkg/SJ4A8xQIqCAKOTBSiPgDPoIhiAGzoHHU9uWGmQDZnZUFAA5LY906+u/5KkPfBoA7r1Q75yrt2n6Kx+8ujqzffCZW1dmqZpCsXHGWnrlxVeuHB+Mp3uvvnJnfrQ6OFxsbk8zUuv8nOt7H3m0GtOvvO99W+O9LKKqwNB168AuhIAmblzdOD7RnJGCWC4KToJdil3KnoNnbttU1sHAHHOflYiaqNIm57HvGkJwZYxJ1GwW+0nt6lH4olQ0EIXFIJuJgaqpnprPBt1hOE8P0Gke4mg8kHVggEmLmWYYoFoDEmAw7Ay74ayn0cS74Gozuvsn71KrB11vEFIGLcXMxCAPrmwwBR2MIELmAImGQSs5lqDATE7NVMGADICg8IiGaMBivVkEGFrbCFEHg4kZwfCn0FQRrQjgzbJimywOFWxgScGUTC2p8lCJZqaAgzGobxMCOh9C4XP0TbtMMZVF6ZhUBkONIqCjU4g1Ag/z4IF0oTBUtCkOe7pTbI+CQdK86k5cERAwaa+mhI6cI82gVpDz5AaNzczMhgn24DA6jV0MTCUbInBqcFo3P2TACMHuakQ0uAqIwBGKqZoakqGeorCNQIWAhzNPNvCIDsk7YzNHRkyiaEkQwfFg4AJiZPKi0oMO+3WzQZaHUyvT6cQdAIyIHA+hOpC72bDhclIDBjQbInGoAGQoMLySIeaIoiYGjkgNPJOaOQA0FUFRAEUj8wSeEBDlFLiLQwcQQAaAy2++7/53vru1GsuwtbGTzTcxOXYO+l73BWKotnJ0liMCCXh0lQJFAe8gWxIECi5rLMKGgUcYoxLRCAhSH6uicKjSp7Y3Q2dbF/PWmeSolUTvWZJrDp/8rB3cthevlF0XYnJRUXw0CF6Q7MP/6f87KdLFe7yuwABS25dTz0bLvufAOfeiuSyK1tSCz86h49Q147qG4Jftmp2b3v9oEzhNOmt0tlxWwQGhuWo+79pujTjqY1eRoLemldvWb/2//upirHF5rX/9hfWNV776O3/vy899dqOEw/WxHPbrLj76wMXFG083H/mNw5s3dgtOffzYh5/eDdUzn/iE05QTH3zhqCzd9Y/9sj2tYaUNjeZtAoQYrdWs7Lo+C7qsSAkkQQIEQi9mCblXWfWhIPC2ijkSHXPKjg5acuVUzp2tppu46qcWTw6bT794dVIU9UZZ726Ynn3l9tHN/TZ3oKH0QYUse+qSoWGOsQ/eObd/+8QzBhBwUIxGLsq1W7frUZg1rREnAUXJJlbQ7aat+1zUI0WaL5vEXgH3lx0hclEhUVatXPHA+ft/69ZNhwSE29MRxJxz2+cUPHc5ZUxFUbddm1BRQQvXSlKAoDpC37f99mao6xIyJNTtzXp2sJx3FiZhMY/3bpw7liM3Cg9feOzKzVdaVzSSm3lLL1zf2nz4kXrzZLacz/qt8WixWLlQPn31oM1w8eK0nEyc93Xtey3KMSB55HXfS495WjGCOXJt3xeV25pW/bI5dRURIlCKCYmUSUmROQMYUhJNKESKjDoEcskSKWEGJEAUzYN1gh2DQtbc9qnpYp8VyMUIShlQgZQdsGMAkCyeKKMwQdJExkm076UIFQCJZSMAspQFsl904nxBDBJz1syBEcSSIhgagujQGaAqBpSGeJcqATGRqXoi75yq4tDQntUxx6YzNUWrqqAgYkrIQ7C3j+IcDzC9wIQGYkAIpAopkg/gXPBoOaeBsYcDZk2JiZlNdEgIqxoxGZiKcqCicGUoUpZm3YnoUFEUnGd2RG64X7J3NpSNMZkN7din0GtgZHbsClNjdI5QzJidSK9RHQE659gRDwkVlLQ+OLo5a5dGWFXl/uG1++5/ZOqnNtxQLXc5BR8QiMmBmoEy+Rw7DiULiGGzalIfCdETOaCsGEWiaRUYTUVNEJSUPYoKOiVDRMrJwNQYQ80GkDvDZKLCA806myYFY8/UrPpQ+RrBBRezhXLcrJty5J3zFnMjRIhkQKq5SZiNkAiRvKeydMF1SREYLDs05xAkG0BOqmKI6JiHSUrMOtR0goCoqRkQGsKwzTjd5ZyeDkAFkKkMlRGqIUSX17bMvRUIzkw0IQvmxKkTGoeyYMyrNVEANAFuclSw3GbsK2dWMOkw/CHOgAJaOTa1rKoAq7ZLCN5R5Ykx9H3EbBVgBOhxIDYSMbKhmXojy7Z/Z1Zu7P6ur/3yz37mKUCgQD64+bwRog4N+yZ37VEZmr5Z5X60sVsVdeEdeed9KSI+jMyYCl/W09SN+9xD6vu4aFdLKsL8eH/drVQtUqirajY/Xq9mphmq8QNPnN9et22THdNjD7/p+deffODc5Wo8vfLK/jhwfY5HO+d2Nnde/PzVS9O37lfhK778TbL5nluvx7Iax7ZlN44xz5LuNwk2povbB8GzOsyxr9E6h4tVc+v67Z2zW5VxeTw/+i+//MknP6iRu2sLUdSoOApXL2898Rf/ytL0l//pT3zbuW1fXRifubRsw9UTt3H2mx75c+/8wJ/4fefHum7am6+cjB+/5+v/l/8tPvnZJ77yy575pf9rqx7DYvb291zwb/+KV3/rY681V3Q3vyEn2xe2btx8dW/a7z3yxFvuf/hikZ/7zJPh7Lkb1692TdvMVxk3yuqhYrz3uVdPDvZfeeGFV66/eqNiLgGtjY619IzqU1YuvSEaoIg4JjNLGTKqmQIwsklMDh37wBB6VVNtDZLmPmePMB5gY8AEmHPftAlyz0gevCdH6BksW46dcB85R5DsUIEITcpQODOKrWIyk0BWBSg8D2XBRJYGTJh3ORuqzmfrPjYIOormpkZDHyK5lE4F0/mNqyfXDrfvv88vZu2/+7nd5StH/+XXpgs36/NBTscXt8+/6c1Xn/tcWN7ZLjcgVwTmEjhbg3RALLWu2gRg5oNY9uyp8HFjtx+hYrbbre+1CC7HdSb2xDlGNNRkfnOkJs4FtRwXPSisx3Tpr/z4tce+Yn3z1nb90+Ojo/nf+6vVu3b3rx1vfGE1Gu/2Ua1p8PA1gkzepEPMrq7Lr3Kj2z/y4+xxWo56kYTkDM+ANR9/uSqojx53RsvVgWLvcwTq0yILoSYjoABaBhctYRid2R41n/7QT/3LH7vu6sk73/LUpz/10rVrodwglyMRFMUo2rnR5JUbr9YF3rc5LsxiBleNui7GmDa53KhG+/ND9t5EN8ejCLlNa+fr5RoJ86KZTR1s1uPj1JqhZd6stu7b3Gj7k+lG0TarsvDH7er8dMN1GHu2hEd3tKaNrWrj0BY5Nrubk4P5+qBLAOjNYt/XIZzd3mu1HQWftOcM81aidRfOjtfrtFz3u9PpskkMIF1SIDVdt33wboHx+vwkFzyZ1u2qJ0Bfh8bSdBq6JEVZhapcrlbjagIA5agOhBG07Xsw62PqFYBkWtXLptvcrLZ3J/PjWFZVu4qjsq48tuWk3tq8vX9UVX7v7Pa8ax2VTK5PaaEyXzf3Tqereb+9Ub3j/N4nZ7PFKgKhuHz/+Y3bh0swyCZ9l/fGIwd41DTrHJNzSawBkKrwRCMfcreqHa9O+sZjt46WVm1WX9m0xAlWRVWAJ+1BxJxj7yozKx1ujUmytj7exdbdlYrQgqPSZTy+s87JRFXJekKzxKhndstlzMXJbFIXh31blC4YxF4CcWdKamAcnT79+YPwc7/6fX/2e//KD//0n/+mb/vdf+b7fu13f3zxd38Bf+3jVz/3ufeAO4L04Pd/Fy+6y+/7+I0Xr5coR6tmfHEyTQ14BlVb4jmOt370/+je86FHfuT/s/yBPz37pV/aPXufwMJthVnhzj+821295YODwpt6E7F1i46hiYvL91/6xz85fuLN0y7mX/yPy7/1Y5M+Q5sgpbut2KeBDVAFGs7iqt7ROAABeG+NYUcZsvPoiLRRS70iqCIQE4ilpCY0CtUk3HrjSrHB5YWNudLGV7yrXRxV0z3ou+bFZ0Mscqfe+2zIwKYdE9HmNK4PRWKeXgjhANIaPFWH8+4f/2vw4nvcgopoCMsLqJgFREI1NW0unT3zD38iPfQYN0zf+Aa88rn+yefjG0fhE8+ExaG1zTCBA7KIzBvnc5fC8U0TwWkN2kNUYANmiEMLFEKKkAw8QxRIBi5BiTmL8wYZwAEway+EABmggFP/g2QwAFYonBfrj/ui8mANoN0+MfnyRyc//hN7//k/f+Gf/Iv9a7PfeXzjky9/7vb85vmHLhwc3e4z5VWanjt7fm/rt178XH1hql07moxfvnFyIlm6flqUs9nxvF1tbxZvufS2Nz/2jlv7R1haVXCX9Hi+RGRSzbEfT+pLO3tt2xy3aT5fbU1GWc2xr5lNDUzHVVl5Tn0UiQ6w8pUPvo26jtHYULRtMhIKIzEUHCmdGk2zGJupDcwFkLvl5QZDbgCHCP+gQtgpaWgovBkO/cZ3ecNDaAgBTRAcgJmo4F06NpABG9ppVewXbR5INLBybAizDVs4syjoEJHIIYBB0oFVhBnMD+AkAx1UFjAwIKaYRAAzIAMwABMgQBgsSShJYCifMEIZesJMASiTZdQCrXJQGc1W2CcWACNxOKTk4LQkz06hQ2SIhOxdVZZm7IPvUpxsjKVb527pN0bMBEQIBCqOw5AEQ0DC09a4gcg0GK9Ah/JcsIG9k1ERT+aHpOYdiRqzS5aYmIi8C53FajweT7cAXzvlEg0dZojDUwyBs8FEo6BESARZDIb5v4GjQa9DBpThPGSmAIKn8CggQCBkiip3Yd6KiAbYGaphAHOMDACOB+kvOJfNIhAPmCEzT0CE5ggFzCCdVkkDMyKe0pbuWtWGGNpAtsaB5a9gYKiidJpTw7v480GBBCLMp9AkxCHheAoqMlAgBAVTZEFTMAJitWyqgApGCPmuoQ0Aftfv/c6tex5vVtcxmwP1zpbL/YJNrGn6fVdwjM6MR5Nd004NskUmZB8sZ09FUUxEzPsJ2RYIWkbQPGT4giscYjDtxUhd1KRY5Ux9D0Y+5mKxbOyer4CLTd54dRWXI13Nr93ERWurVe5ijskH14l1K+VWdsdltVF3OUKSXHBR+9QJEJmX6P0rs27b6m2AejLKjpAhjIJk6VfHReFXx7ONeirOdX2Oq4zMqKiRDMSRB+9TXhcVYtYrv/iTN7UdtyfV/u1JP7965YV43GjWvGg3RlWR8q1PvbyJ4jm4JuXCRQUDXvWSEBNCVkcdeMhODDuRBBmzoosikakzyKRZoBNTV6iC9uoRnUmEqNGKEPpQ9ohJ4Uh1vpa+HuPWVrh4/sZi6QSWr187OWrCRqUdPPbg5SceufjqlSuvv/LqbB3bdRxXJU/HB6t1oLJpumI0KmMejyv2YWt7o6gmImGzDLldNylubW1Bm0fTqRvjrZOT5XLty7A1HhWTzbP33s9utFFNX/3cs2bttfnLTZuDQ1U0QgD25NTycb/+2HMf2BjV0vLtpp3HbDE1lsejenNULterxKNFlwB4GuqH9i48+cqL6AMoLNuU2s40V2MPKW0U1eHJYn/VTorQKKT1+uzG+Mrt11FhXF+6cXx8ZXb10Qfvma3ag6P22VevOSkvX6ofe8ujL732/BNPPPjU8y9ubUyOru17B/VmUPEvXr0+cgSI9+yd/fjL12eLZuyKd77tLa+8/vq6k8ujrbo4RsaT2Xq6MQZoAICV0bBwvs/ZALPqgNZQRAUcDu3OUIbFZWCOoQGKGVg2BnPgsmjK1sR+0SdRy2pmuU/mMuUIPgAFRW/BsVpy7ArCKIKB+l6iWUaEnLjgsg6qktqejYyxzyaoOaskQEVGMwJXcm5zFssCqhgYDSyD2N2aRwWjoTGeABA8D+qPIJqpShYABAIFZOcx9lmG0khziDjkiBE8Dt5IC74AJ1Nfr1OOKSdRtbvwvsEISqBmzEqOQM0xmsCQb7pb+zkoWwOqH4DJmEWVmYmYEByzITJ5Isfs8zCxM0UiHG6EoEwG5AgcEwNkAxDFa2+8RhAeeuCtrgiSkpqhYc5x3cyBIJQFEx4e7N+5c726+EgWATM1NRPnRpoHty8wh5zXamBAMVnbpa5tOKAO5ZQgZISGCtCrRrUsYoxQYOEYM7gQNBsSYc7QKjoiZFBURAFznkWySkQFMla1nA2INARzZdv02iUyqMoQgt/Znca2723dIWcyJhQzZDbBBFCyE+JMVARuVxFjTn00JiLXpkSIBorIxEZmAOAZYYiNG6gqIw/FoWp22qNw9y4EAA7JmzomNfTsuJws1kuCAN5h0HFAUOhTzGmVWRLKNFgIo5iy8yEBEmomcYSggEPsGrBkJISsOrTRZYEmpmQmhokxizkGVAsMruCAsI65Q0p0t30WqQgeyAA5Rdm/dWP7nH7529/69MtX122r5gKFpElyxK6vC685LW/fWszXo629vTNnJ1ubRVXVo5rIZ8iEHtgh0Wg8KUy1jyE1VRXbbjG2sQu7Zj07KSqMJyez5Wy5XC10sbs3mVyquzvLrO4T1z7Zs3ad3ntmxzZOjiGP6rMf/cDTD375Z/buuW/Wnzz+HX/2yWtXt2f7b37n7zYIzL4TmWxWKTZoMxi7jYenGxcvffqzV4uTNCp8lVU0PvuFNx5+/MytO3fSSr3ya0/eiQm3t6tiZ7QZ6Mqxftsf/4EceTQ5//Dl+wuoty89HsM4C144t7ley6c/8vHbh30ESIA5Yr7y6of+xd/OF942OvPg8eEsLp/fPbOTw5QeeNPq8LgK6eVPf/b+b/4D9/3uP/2oFWnR33j9uY//+r8viXeq8zUfXXrs8dvHR+lkPN088/nnXnlj/xOv3jiG2SoYVBnqAiHmUeFKD8RQOYpZo1gCS1GQ2QhyBkVuRBNwzhocAREjcFLKmYCSmABkEQE0gw54FAoBB5BSzqYClp1jJmQkBCNEELOsOhioGUQUKReBEI29p7KEqCDgmbxjIFDCnCCpdSoRIYk4x5lckyT3cjhfZAwjMIAChJjQFad1N9jB8RrrnM7HdvHPfjJr2ibkER4w7Pyx7/jy7/+zsRrBjevv+8t/Ory6b6syGmjQ4JlYo+bYRXOhKHzsM2ZlVqPi1d49/pf+vBvD6qPPHv+nny+6znKbyeeOOGW3hXNdJleAqznnwjOPi7KglrIdpXMnI30VcJZpyZPFun3laC/4ikoVNgDpzFuvueOqpAB9Cn0C7XQMFY9dAutQQZRNmTnPJVOfQnytKu7/3/7U3qOPFWny2b//D6s7B3C82Ls8jct2cXueC3BjeW19dN+3/w5cj/uXry19/Y6v+/qXP//LAQNkoSydZOTwzifeXpa5D7Dq0qXNSVp1ZUGiERyWXJtSQp6Mtn3lOk0H87UfOd48c+f4RC1XSJPdMylB6tvYdYWbto3ce2Z3d1y8drjvxuX8YL413d2gYtuN215z7M6Mp1Xgg5PlyWLdiCZDWDVRBFB6ES7KmCEBBczb02Ja8HIe110Uyd6zQ7/qmr5DLEKWRMyqAJYR0FdlykmDPzhszm3snT13lvKqT3LtxmE2KYkUsG+y2npzMlWFpu0plMxF7LuN6WbTrhXS9uYImtZDcf+Zi4eHt6+vDjYqvzk5R0UBgkmgrDanW7ZM0OZ2ZlSCnyhu1rvzeHIyb1exJ/IXt3dWs/beM5tfMnVfiAezLhnzfpuNeaXGWc+4cUHILm4KlKPyOPONdZxbMoPKIKS2YK59tU7x9nxO5OpQPXDpkshR0/cOEQ2YcBgFIbMqiWjsJLdxMqry7rTr/2dWkYLrM8Q2JSR1TkUMqEvC3gHK7KQvCocdQJcRoSw9GoCoRGUzOT3qBg756fd9Ql6/+d7v/FMvLL5w9B8/8l0//td/9f/8q+trL08gT8a7NtkCNy5e+/Ttqy/d+3f+5Oc/8SLNRjqm6Qc+lA8SCxA4WNN5tp3f+mD3XR/Zvm9j+9xu9zP//Py3/44JeH9zdeMjz+xlD4UzU3ZAqiAJUOC+vfwd39re/7B2lBuHD7756uW3nb16fW91HQoAyIAOJIMqEIL3oAbk+nrbvu6b1l/6lkXw586M9faSnrqy/OQHto7f8BKp8ABOUwbHzGQxKZmrCi3r5c55+vY/2O1uuqNXT155ceOxx65/5De2tza7Gy+fKXrsly5FBxXE7DemUHhZdxA41CFKS2OW20LZKCZQLMXKjKAeQIFFQcmxEUrfF+SYOY/C+Ld/U7P3IHRGq+Orz3yoOlrvbVwsdu7vnnoGXCZnUAL0Yt2yxDbd6ZA81CV2EXhQJxSygRAYwVD+5DwYgGdIQ1YKVliNf+ivXnnj+llYLp951U/qrXddPvjk09vXDml/idNwK2D1+G58+fropdkoCBQFRQH1EGwx1nP/6l/9h5cO3wVb973r6z/Y/qPL77n84//2//5Mjpvn6o88+eRO5c6dO+vVNUfzbM1LN14+6NdnyZ9cPz5fTCabctJHq7gO2C9P2gZfh6c2io1UV6Kpi9E5qwCbYK7m3Y2tmJVNJ2UxO2kppdR3VekK7/uur4vSO1e4QJq7hsrarfrctWl93EaCmNUXDACuYGUQs1BS4bsvbo10sL4YqA20h2EiBzRYbwahZ7DS2BdL1e8qRTwc3hUBQAYVAgc9xojRlGhwuiCIqQAI2ml4aJAjTjWCUwj76YbbABGIRAwJMygiOGaEgViMJTMiD8YwBXBM5cCkAGUCOXW+qKjxaf0YOoShpRgVB60mmxHgcPENWOXKgxs6AxFVYMAODDIMoQ0vHwGYyRFLVlTznjcmI005eKe9Ft4bTZrYTiWFUCE6BM6SAdEQCXiIngEOrxvR8JQnRAQAp5XMYKaWctc0q9pNHJtIOlWBwIhITcj5PmvsIupd5ek0WIdg6ngopEcAIz3tC3PIw+dDg54ESISqKqd9cYAIjhFMgZCYkikhkFFWsGynzw7GSCI6VGkpYCb0hp4JwUDMsvWiBhQYGZSRDBRNCUwACE+rBVWUCR3TXarowBwa3mUQAWAb4qOMaERD+nGY0ttQRk3oAZhIYZgQA6sxnW761YwBkImRwCDb6UgZEUAG3DegAQHq3fFB5WB2+wpx4mqMHFZtQ1zGvBJsySECaFZEEEUCr5JsSI+jIw6IlWlVcqj9riSXMBoqIgVf5pTMJIq1fZfa3ogdBu9ZW0xi0qei60dmx3dmq/bIjLpyc42F3eOxbYKl9mhOlk+OGx/H++tOCvOZ0jq1y7hdVjUCrFvHVo0gn3QAII7W0mwiNFm9RxNDx00jcQmuAMew7JY5ZofA7HPOuc9V5RxA18WuhxwhOatKs2ufnSI2XZb5khV6WYyYIMWowDlDBrLkCwJL5LDL0ooBoSt42acVABfOsVrK1mtVBQKMIoaioAZsCr1oBkyAMQkCegQTyZqQFAMf5jh34UarPY+PwmTv0qVqu24sf/pTnyIwvRank8oT7u1sPHTxwdTE93/gwzdvnyC5IWf0wAPnQnDvPv+EZJOcJKYNx96VYTJSkflsjRja5UnKfV1QOr6dOrm0M0XvR76wi5Z63dna5nJy50QW6zvP7H/atW3fzPp+XXqkoQdSBk1AWNUUnbCadG0sPWeTMPIbBZjlk9UCkroqZBUG7frVy/tX6lHZZAE2z6FkLslXddjZ3nz1yu1VKxhckzS4vF0XpYmrXV3VVYkv37oG0R1fPWa0sk1lKDpsr91a3Di6seGKT3z8mYR25+ZJjYiWX3zx+oWdXV3GlfdR5fiZN5h5GgoCfub515A0eH7t5SuOsFds27y9VQ3fghGXTsgxQMCVNlNQEJNsznsDULShJiwO4j8zMmW03tQxIJkIRE2gaMZdTkmzmSGoGZiaAuYEfYoVOF22OC7r8SSLpmguBC4ROAKqaUIEdKSMxuzQUxJC05i7LmlWFHNgSRIjCbEaxJgNyaEDVWTkQdoWHXRgAiO8u9re9RV69sMSw8wppi4lZ4jEWaWogsUEZlmzAiNxn/KIiABSjIYYgu9iCo4EoGsTINGwnCIwoSJ4xyln50kFk4gNsVsAFU2qORqoSRIDBAdooFkHIcl7jzRw204Ph8xESMgeAAb3FhGfsvxO76RoJs65uqpjmxyaJCFmEFORGDsg41CW5ZQrmM8Pbu6/dvHsZcWxSlJRIiUiJI9ASJA1e18YgolYTqltYtOiCiGIiiEjg+WESMOaiCISUQV6Vg6E5MhL1KRk4ADRcswm2Pc6RKxNTQ0L5webEhmQg+moZqLgHWs9a2ejgifjUHiOHZSVXzctE4fKo2MkIRv6ITLErIQZQXJuF50pmmVgRbPAiIZ62p4KA5JveHa4y6dTPa0DNRsehqEoCgCYCJBE8qlnWpNFCa4QlaooEqYSFVWbmBd6HMJGi0UoKssrQUFClkxR2XG7FAaPpqpCjjALqSqQOMpmogBEKaekBGyLLgfEjSJQztXIGcG6NzEgJJCMaiKAqilJ34Cn9QJubZ+7/9zO+as3riUT1d6iULZxOSkYsjR53Vt/spjN8+Gt2eZmGNWT7b2intSjCYcyFME5FjMGHo23kLeamHi1QtpEOs7dST3VGBfGTotQ+B0qwvEa5n1MWikAeS9tXC765597ni27wjcgf+HP/vFfeP9Pffl3/sXPf+4Tj1zcWad+fuWlk2Jv4nZrLk/mNybVGvJ+c/vp6sxONR5/Nvu3/u2/ET/35Af/6c88VAk2Xdvy0y8cqJFFGQWisSfDFpMhhyrcN10995M/NMsno83zx8u4eOgrp+e+pi27YmNUmbQ3nv+1n/zL05ykchiYzvIf+/P/66f+x39/+hP/9cI3/a6v+DN/v3nhF1/73MeDe/Dg8J6zX/c7wnj6VV+/pCTrWPcS28XJxa2N5Us5Ea1YfU6379y5dXv/k+//rSuvvny4nMeuI2M2wWzBNICUFVceyJGBsSP2CEm8qTky0WSWifqkJpizIIHZaY7VOfVsaM7EhmZpNTNFYiTAPiWRvo3ZEg7M1gGnmnUgfwFlQzUgMkMkLItAOBylUA1ZrURwjJ4dOTIk713qG8lJh+EloLKLYhItz1d9zmekL4pp8BNHinhqrtvqbldTWdrCsY2Zh6z+/nLefNWXPvR933NQOO0bt1W9/fv+2DN/4YdGyQW3yaSxX2VJnkMgjMlMjIGqugDDVdTHv/UPHO+ejwTF11xcfeBjcOsGZyk2R6kDyHhl1V36/j/h3/k2W8db//6/zD711DmmiLJD3Y2/8xfW+fvPkj9bTbpibBnrbGBBCIgVUVXM0aSsfRtnVrjMHXjPBaBRNqeW0Q8lLmTe1VxB3+xT8fD/9e/n9407ql78wkvv/rG//pkf+eFL80U0m3URipI2i1dnt0d/7g++8eB9dz55feeRN73p/ou/8eFfuP3GF1xwCCaaHNOI3e5o+uEvfLSTNK6CpgSZFs1K1k0Rimk1ti73sT+YNxyJEDe3t9ZxNZstodeNqsa+3w2j43QSKGAoVJzzfnMy+tz1l2EEq/2TLL7v+8locvvw5IGzl9MqUdMcnrS9si+8L33h3Na4nB0cemZXFKt1G7wzwPmyC+i8Yc65rIugednH1w4WXStFXXcp16UnUBPTpJOqjgxZsgiNxqPzZ7Yi9OvlYpHlC1dvoneiOhrXzf5+WYwMaLluRTR165WuTWUdc+V55IOs2hJp/2D+xq0jyRHAjkBmrX3Jl52DLpOhuRLzuDwj1xeHTrWkUDIt46rpurZrgfnWYumdf8uZPQf2wNa4T+laWBlAG2Pseyn0/u0xz3Pf54poXLqRB8oc1fVrbUC7NrdI5Hh/uTro4nEXkWDp+9XN9UNnJhMOqe+iQuFIHZLzTE7ZE5uKjgx60JpMnP1PUlHX9ASyPGlU2QgNpJ6UYN5SAoW+FyYsmXioyXLgmEXQe2edYbIs2USMKHt44dVrq3/z97/9u7+X3vGmn7/+zAflYMtWf+g73vT0R79wZmtTfvW/3PjYa/sIlx556MLld9dXbtTTrf1f+sWtULORkQc1yETJxrcztCvIJ/Shv7X69/8k7+42144fWFeRihzFVdgtuqoqwBmYvLr5wIPf+yPLvq0ShR66Vz/44I/+JVpY/NO/PzQNgIIIIA+GANCBF8yF97C1ofc+Nrr3Qjny9mbGd6g8dG/86Z/wGvMyU/BG3voIkhAE1TAUKWXamu584zc2264MX36feVgcPfANX7Y6XvrPfPrkxz+yLZmzaN8H9NC1wOAqXJelpONxjnB8DUUAQLJyWYD1p44n5+2Uv+uRPXPPCtBnKTTc/0AqKo4pT0bT3/Ntev3W+of/d3rmc/WdE3AjyABrAUREhr73t9Z+Zxccg2RoBKwHIMgKTKdCSEpQeBgV0LRQFBAJ5u342752+c1/+OzmpIon1ZwgEEzL6XclPrnTHl9/8cVn97706+PlzZ1rzy3//I+2zz6rq1RC0E7Wh/30B/7X2dm3PrgTd/u1f/SdD37tV/2XFz+7uP/c9uX7X33hmS95y5vyojm7s3XztVuj6VTyejU7mtTlKMBstnrTE0989Mnn0JeHx+tCVluTer3WZasQZ6tV44ki4u5GebYOz756MJlOvVG3zm3TlQXvTKv5SmJOgDZfd+S4KIkA14t1WTr2zIgbdVX7qumOfHAWQHIqApw5Vx6rkOrjb9roFvFg/1Q3VbVh36QKoiZ6l/8JMMyTT4/ngKqmBnaXXY0DaoEQlMCMmGhIswmaomRABBqSUIamjDo4VU6BR3j6+6cOlNNH4JTrY3qqdyAiM4LhUNZFCJ6R0AQw5xyI2ZFCFoKsYGBElBR6xd4U0IJjBEsiiqeDYhksTqcxNwDlDMCgtccyOMkWk6jRwLsc1Ae1U0uVAZ4KYgYGWm1OJ2e32HHwoSgDAJRFPe/W3XIxqkfKCAYMHhC+KNAMubPTdxR0IDkBKKI3MEZQRRU9OjyseYyI2YTZqQqiESMiSlJkr9qLLAEVh9TIYCxSAwQZ3CyDljfQXIfp+in7mhnumpiG/fNgIQIlICBUNERzZo4QRck4qQy6mZgB6kDEVuImaeVYWpkELAvnyAy1HQ6GYsExIygM1XIyHG8cQFYTJDTjQR86HepCEhvoTUwIpxsjYgQEI6JTWxOgGKCdYpcQgICyyjAKhtMiv1MW1XCBgpkHJAATUzyFeoMYmQ064vAtWM2Ptzb2zp7bW0aQ1HnicrSZezo6vj6qQ+FHSkgYA3lQCR6y+eA3HY0JR1W9a1ZgP7d+FbMgMlEAQwNTTaIpNU3ObU4QytB2UaPmLlLECdDRG7eODm4ujm4sVnfKjTFtTNa4rgvCke+NU8FgIjsbzhRi6nvplbplOzsq7/QYj1ZFJCcyjjkoBAPnNY8gJIAMwayoISdZK2AJwUNZhPUyTjfKHLOB69adq/lolUsPUGFCysH3RM0yltiHqt6/tXTe1UVhoVocHwVHCXDemebhQI4paVISw5OsLhAkiQQNoRESEjNTcAsBx9znXHiWJBoFDHpV8j6bGJJayqKqxGRKpg7bUbhTji5+yTuuHjbf9GXv+fX3/9LNT97yhNynlHM5DhHEIz523wPTevQLn3zy5HBtQnVBDuHsZLq6djRl6k8SmZ45s7OOa0366u3F4XKZUq9KyJBSl3Kq67GAqZkPLht1qeeSROxlea1FJVfcd/n8XsVcFlfXOQ3Qm6wISAaMoCJgQIQpW1FgNtgsi87hKqcCMPYpZSnQjRx1jQYEMtvaqbNgc3zStD2WOnaVGs7Wcf/gFiiUjoMLxjpbLQNWWXJVFFtb4ZXrL680jTanLAlSnhTejyUoHZ7kbhVF+xBGnROeVpbVxK16k/2jBy9snizTnaZ1IfCoOtjvcs4FQRjxYdM+cGYXYzs7akRhebIevgVnJzsBPauigflNQoSsDKCEaigEjjBF7XI2gmS5k9xbAounwVE1QDQyk6Q5o5wS7YfWLjNTocTMjqeT2tUFV2MTYBQgUpEyEKQ+QQ+qgTwCqWYk9MGpgqWEQhKFVMEPqynGmCQLE2cxMUEmB2SSzRBBHfFQwoA2LP5gooMrFNQIYFR670IHxkhdl8hoAO147ywrEyZRB1Yy8+kajm0fe1NGEgURYSJAzpJUAQmICQmSCTjrcirYO88GaAIAaAYmqiYDBhDUQCxFQ3LsA5MjHE6OQAhETOwJkAkMhDkQEiIyMSEzOQAgYkM0ERF1rkCHyBxGE7Sc+wxAKjnFFZIHqEajUV1cO7p55ej8fRu7DwGz8yHqOufOYWDygmY5OeeSRCafVbJlcoSEhESEgQmJYsSBSsDeW8qpt75DZoPaQiAFAc3BBQOMMeeoaI4HrrcoGZ1Wf6IZKXssx04hg/pQ1c16hjza3N2eTss+piKAQ6v86dmb3GAHY8hqvUFOhQsmgOCYfderiBpBVRIziiqIDUOmKCJZhnsukTGQqCAPoKKhy8EQ8Yu2Imb2A7vA1DlIkIuS26QmjnhSe8yxFzWSdgTJ2YoxKZGyOnSEsDku48lxjxV2bnNazVdrFiy8l2xEqIjLnJxRCNQDkHHO4pEFoBMtkwQAMAjsJ0FiVpYB82cxqwu+65XUikCLkxkVN85vX0R39slXrrlVq50WKuW03i6rRLCOvRGu29hJXs+XwOTH10fbe/XW9sbu3nhrYwRjV5TEHhAMfSiLYrJRb52Z3zk8uXUT00xELjx08WC9mh3OpuNCgecn6/X6JKf51oTHBUewHDOYEjqF+PrxS27+3Od+9v/9e//gn4TJhf7i+dX192vzqZPbzWv7J8cHt3em20Z+66G3v+ud777+6//5/qgXL7xz+sjXXv6KP/gbP/CdF6Z51NHVg67twINpRZ30wLhZ8fzmul8U080yza0KOyilI37ht37xuc98IG9NH/2qr+8PDq59/sOPPLaVl8Wjb794cw6Tx996/NYve8jR8z/9c1vntsHyfLl2W5uXH/8qv94N0wsZgcut1HRxvtD1nd2qKcCmO9vH6+6Tn33KCG7ur5a3DyuGdd9DAq8DqFLRoy+8qbgAoWBAylkcc0BypwMtzkmDYUTMMZfDZkTNVM3QyDqxKNkBKCANKxWoAyyJCRQUuz6KmoiZAwD2xIRM5MyUzZhRUlQBAwJgNT9GFwB7sN4xCOecTcyRFuSMMKfGiQQAEFPMIQQUQ/arpJKk79eYdG8brbAcNfWnrKLlR37ZVorHxwm8OjJXKud+Y/PSH//edrIJmkPBRVFtvftrJt/75579pz9XmIB1DBkaqRizSDEtwdfCBNqjSuVl8eKHp9/6xB32RuO9t7ytf/bJqvLrNoOFOC6nX/tl29/6rW0ZSPXRex76zR/8If+5Z6edaT2p6s3NeqNiWLVN32TvHXlPBWvfp6hU+eCIbL1K63hm3J/bSVo0r+9vpaW20bnCFaUHV3jrVqLmMHYOkSH1t25uPHaxrezer7xky6OH3vVmuPpKOj6559LZw8NGp9vywIPuPe8a++nevZv/5p//yte+97d/8OUPz1X7jJNJiaIbCI+c3Wni/olIRLp5tHxiPKkLWh3NPPocpU/OY+77tvYw8mWXZL4/44phlseecd13fdyXI/QOI7dZ0OdH7724zge9a6blZDZfRmfH3TwDWrLDwxsxtluTEQsK2miEtw57Ajk+ajZK30sqiLbGxEbLlBXDuk3TEELwbc5NyloVb9xuR8w9xGbVFKHc2RgfHx9nBfAUQnBltThZbYf6/nPj5brxG/WNawfldNy0bdfFPsVzZzYnW1tXrtwuXegopdRnsWpUzVcxTEvEXFfl/GiWU0I0CL6oivliNb91+8zLL73l0ceaKJLBhGpfb/pSNU3KogaOfV9U5bgpFppawWtHyy0IMXAt8sDOtJvF3vswBL+8QzBgWre6sqJP7eYEHOmFYG0nB+tu4oI6H2rObexy70ahIGjalCS/cDuW4PfK0T2bxcWNQvrOWc5qopk9lY6orGZNS1EWR8v/SSqqnItRS3bgnYG5soDAlq1Zd2DgyGexnNWyJNE+IzOMpl4iCkrKEQhVVRWRi47k2s3Df/wTP/HH/saPPfDI9Jt+6n//yb/0N2986llW/Bs/9DcPXrzm3lKIO/nsjdHG1c/Qb/x0/cAjG/dtz15qS8jgwaXh0OQ0A88V0Afnw+uNXnmDgKAiRCEwyMkzgApk6fv27Df94Rv7/RTXFRh8/sOv/ud/t/WDf2RW4711HVYNZAMVI0QOoBmGQyQptEfwwf+Io644/8c6R1Z4N6nj+QdHO+dgOWMEo5H2S23Xfdd7hygKq0Uoy/jGZ+MXPkS/7Rta53qFfjbv42rv/AX/2MNxVNFsiQjoHYBp19Du7vHly9M/8Vde/fl/cu9Hf6Ncr8Ekx+hKnygxmOYeyQEoOg+mlloARk0wzAMprPevp7jsqHSRCXjDQnr9pfJgBsDQZwhj0AxdBO8AGQoHXQsggAa9gGTwDN6Bp9OGdmLwHpIAACwaqCuYuLg4Ilcdz5Y7pSs8ofPQT6yV1mse1Y+9/cKdZ54PL796/L7/Fj/1dOlDQayNeq9SwvFjb3761djuX33HE2ev/tq/+9ef/xQ9sjc6M7YY/5ff9XVhUr/47NUmQ6Mw2d587trr2xsb0zPF6vYio71684ZzIWdEhUlV5ihFWTYpzg9XfSc4quap/5KLF5rrB2PHabEMpc+i9aggb3tbI3dCy16bto9ZTSEu1iF4yeoMtydBYk4xjcblZDxexBbNXMAz2+6r3r37gU+8FkUfuW/62acW7KovSkWmqGLZ9FQTAUBEVc2ACKhgPDiLcDiog6qhgCGaKgMPqScgUDRTyKhgZAqWwYaRrpzqAXhXpPh/3Epw+g+dlqOfPmJqyBCQPGJgFlE1IzVmRlXHRICemYyMzJjUYDgfEIJT6BWSoQIkU0KTwbNjJsPsBcwxoAMzEzVE8ggBADPmDKd1QEMtlxozMqLeNe3IoLgCKsDW+Z2Nc9voAqAvy1GOHQCVoV6t1+MuFpMxIYnYQCcaMgc2NBEiwd3qleE/A0ADVQFwvj7RHEs/UtCUDIkNlMgxEjAheBFtT45Xs+NBjFEDtbvYH6AvIpwRcMjoARiTek+ah5I7NBto22iqNMCggDLAKVObjdEISUSzgprcYlMAAQAASURBVBGDnSKmRQ3ACAf2O7ZJPQNlQMbgMQAHULFT7EMWcUR8Sma6GwhDQiRRATBCYh48W6BqWS0QOjODgdZoOMReEOTUVqVDd9oXi9XADJEYQcyGvImZDVBJPa1fs6FMD5nttDANmIAUDEHvBtDqjbNhfGa1dtnxxtZmyrnt+yi2uXGZcZFtPR7tkBVIlFPnHFXFVh9LwLErzoFWKsnUem3VeUzmCZNI2zQqmQyEPflCNWZ0CWTdi62E75wc3Xl9dnB1vTo5mR+V0zGxsYslooiIZEMRB2qqTM4zg6esuYvV1hj3qv0Tu+VHebWqokJKKPHM1iQ2PczyhQ0/mtTAfOPW8b1nxzpaVx7ySTsNFU1Gi5KTExdVJ6QAroBg2Dctjko0SqvEu5s5W+5FLu4mibc77bpub2cCeUizckpGntVhB1mIPGH0WjmQmMqqmKcMCEmgKIJ3lFMmBVeGZJFK7rvIjlulLucuCZZspuAgel9VO6+uVicZZli9551f9l9/9dc46qc/8TEPWG6MEanPSoDdOkOH737bE32Tfua//4IPzhWezJVlqDz7IuzVVTAxBOHyudduXL990It0QBlUULemVcp9ZPV11ZimlNlRYOv7Xk2oSaNQFM6fPX/u27/l2576+C9duHTxP/3Cr3dZyCEhCpof/EtmmsF5FsIeAASm47p2fGs118rXZa1NDM5JykfHjZEvAmzWdWo1q5wdVVse68kIeutS2t7bXMh63fQBOXW9r3g6qos6FFh06/TqG0fHTexYR64G6k/WzfHqZJLH43JkjL4sN0Nx+84qk1w8c4FTf3z7ZH+ZSgRNOir93kZ11MTXrl6/tL0RynD92lGjHAk3Nrduvnzg0QA5tadh5GmoSypKIjXUHBkNhRxz0myISRXMzBERITs1iaZJ00mzWqa2gabJPSAmyERYFE7IskCfMgLQABczYSdlUY2qcmMy7pthNcAYBRUdBAdahFJFyAxzMlFTUUQx0ywmimDIIABqwGrknGbUmAmRGRFMVXnQnw14AF0DMAIaFs4N+WhybIhZsssAGuvgl000wjbLdl3GnAAwMAd2fer6pIkx+JC62MXcGZjIqAjOOaeqOZspE6oiIcWUfYmjSVDILoH2pzA4NcWhLdTMTkO1+EU7qGM3SKuqwsCIg+mRCGnYHTEzETF5sQhEp4viQNsZRg2Smd2lh94cqloYISkgpiShGhkFhrGTgrTc2rmwXJ5cufP6o2cuBpzkpvPeDfdicmzZiNhEHLEaErERhoLTGkTUewwes4lj7aIQsAJkTaIelcw0NiaSyBmQE0RTlAS5V0A1Ru+AVM0EgdRMzdRgXITgXYGc2tQuege+qKroXE8IoJO6OLi1dAIImGOWLKpCbOhIVbsmFbV64slklNctrBMCq6ImyQjOeyTJfdThLoGIRCp6Wrehg5V3oPwhEN41FwEAeGQ0MiADi32KOcccvXdVVYKEDH5rdPbEbs7bfWJxLCm2Yqaai1CiSS1Z+3A8A1jFaV2ww4JQVAAQDDqxgQDKRNkUiAqPOYmKIehKrPZcOygLN2I0iZ1kx5RFFI2FCueMVEAYaHF0TBjObZ0NGESXRmyOvcNRQc0aVknbnBkxp4xIOUq7Xi1mi2SvTHe2pufObu+dO3/+8niyzb5ABUSUThyUW9XW4fLlF68/18tivbgX6+3NyT0o0mq/d+ESwmuU2XGOYAldMr9RMoCK2guvvDbaOcfN7Jf/9d8UUCHcGPumSWE8nZ7Z3rw4Pn/ukRPdPZg+cLxz7wO/7ct/9m/9jQe+4fd3j793vo35voe7G1diu5zuhUpIcyLGIiOBVTshbO/c/7ZHn/3k0w9cOuOrTa03Hnz0XCryrddfK+PBnWdPnviS30bxsXt/z7cfHO+XO4XtZ6ouvPzC7TOTS7/7h//6Se5otRg/9hWb1VfdWo82di6sO2Eucp/B+UDuxpUvfP6lp24dLN+4vbh9eHg4X1VeNorgupa898mGoWkSE0MDykZMaExdljK4oiokZwMkYh8cmGbmvu8BaFQUTFaRIFjO1gp0piKKDGIRyCt7yZlNPKKwtAIUJWVDJM+n0TMx8M6DC5qzYgbLyINDmh0FU9ScjduUNFlC5wjAsyc/SSJswuSYO++pj4BMSYE4CNICoUuimisfy/WqNlBj50+9/vnla7BIRUnrvkMqjEBIl9anK9fuu2fbheDGUxE9mYntPprqCbTUt7k0xyl3630KnPuMGMGNNkaFYZpe2n754x8efeC+S7/9m9P81vX3/9etlNC7wLhcp6UvHvrmb943WhyuS+bp9vgt3//dh9/3A6PMAXIOQJUauthhkmiWigKb2EBpvpAy5G7RNVbFr/7q7T/1J+6c2ai8xn/y0+tf/ZWp8wEUY9ImqgsOkiGUEwakBzfCiz/0J2lH58Vo76vfzHsPrD71DLTKwS/WoX/gwure3Xjp4snN19/4Fz/X7Ls/9Ee/87NXPnHn5uujrW1XsQCM6moL5cyk+swLLy0tUyhGm5u+KCnGoqiSw2kVoNc6BPbikA/XzXFzfO/m5XVu773vwu07twqHTcZllDP1dL2WcV3X6LFdrJcHY+dO7pwEQHAlWsjs6ombN+udnc3puDo6WBIRFjzZHFmm+Xw2rYvZcubrTRW3NZ3Omn1Aa1rLVTkeuXmznnVNWY5HwTkDMSPnRKR0xaarT9rF5niyaNZVVTrjvc0NYGhmq3IyfX0535mMxp7W83Y8qTfKMvYJjOZNX2yE3EYD6Jq+cG40cuNAx0erUVFVoVznfGe5gpSInWb47Oeff+DSA6EqFeLGuIhRtmTUdS0D5wzOl6awt70hs8WxJCjd7WZd0cbJ9cWl85tTcVdbmzMUwYFqv4pFhnvPnHnh8LhnfydGsnRps75ILo9CJ7RcRuiyEGFZxFW0rM65LLAWbExnTXelTWcW7nxdnJ+Wo8AEoJFFsgc/9DDv7k7hjRv/j1RUT0aVIo00C44n1aqLEHi1as1RjkqEMclyFYeITlkVaikULiJU4KNoAdQ2UUzBVAx75/tV9y9/9Ef/8Hd8y8U/+C3f8zf/xvt/+l+++ou/+be+9y/+vZ//1dtP3vYP7/mtxQNF9dQP/+Tsg8898J535PIY2kPnFAllrVmTL0NjibxZTF6QCIAFRk5BsQMg7xxCTOCAzYnUm9M6pRmkZnH0qcN6dulStQOQXQttBkbwQwd1hsFVMPSg9QjXF/ziG1iO+3GlPKZFDItVIS00LRpKXIC0YB0RaihD6SBG8Dlcv3rnn//dS/eezw+9rXXgHrhkevnwtZc2D/c9dKQZiFSVqoqiAMDk3W+/c25r9KVfhh/8bzCaQhJWBUwqaiyusKw9sBGr5UxJyQIPLocCii6t/tO/ne45ePwJufX0nV/5he5TV8+/PAOqTzNQnMHhUHMLfQRHwABtBDHwHkI1xJoADAoEVfABtjZTyl4FDpeABszzj31+40OfvfiV74Q6zJrlJr7+1C/9j9qFx37/D8L7/sfRT/7Y/P23/QbQEnaLsm8tFEgVdV0/uw8ufv3vvfeV/OA7v/Wv/dHHP/fcyw+/61x974VL910IsFEQzLp+4/xZZlayo9QVyxSbPh5rLyHQ5uGNZeodAUy5PjvaXXfLCGiAqBkr1zuYhtGZsPNaOpmU4+mkeOPGPpAbFbbKcdGaFn65Wk02qnpUtlGWTaNJTLlLvaGNqpIqk4JGXLnEbxzMqlFxrZm9uqx7o0AWe+0TTDe3AFqAU9SCImQ9TewPuyYFQwQFJURCEzU11cGmctqXNcTIxGCoBgZmAm8GpqfAI2REvXuwt0Ew4WEbf2rnsNO97gDRAgAUUgQjR0jm0BgB0IjMEZIZgxHzQNwiBE+WDYzJAAkEDEXUDNQsGyQFGi6DAeGjqgaARmDEpxRtAwAkT0RIWa3X09dDAKKGRKe/QgO2wkTEhhYbs8m4nownkB2RR3bIzoDqqortom0bN6rZIRMioGpm9nf9WKfIb7v70gGcqSCoiLQ5Nd2q8CWiimQiGohDw0AZiMArmFWjsqwmS13Z3fZlIjbT08awwUasZgACYIAZKQAE5iHvN8T9DL4IEge0YdCOiMaMTAyIIIPrCO30QgEbWuTAsokDSkgKKBkFdGpYEAWCPgsMeJDBfwQmoA4Rhn66rECkgKbAZKdqDSIgMBgSEgEaiVkeZCs67YpDIlNgRENTAzXNegrPOmV4AAzFbYSDrDlcekRgTANb0hRIVAiBiIbDGEAEgL0LDyat6sodz05Wy1XXzCBITt6RxtwZdyoLUG9cCtagveRNsnNjv+v9RpRuuTwg66piUvjNXqIgOY59uwYRR7ZcrVw1BcDVcp1yTL2k+ezaR385x1m5SbThpltb4DkTtN18VHtkzqIKqAocuAgeCU0kFMSI0jaikYrC7YzStH756rxw46L0rxytN3053Zi8dLw4vJ6bvnXFmA+h5FGN/YafnK8vfdM3vbd2sVkrNRGdknPSdioZuy6atss1ZJMYm65pFgtX8WLdtEVexv52xctVkpIdsy9p3fUe2JD6DBVoQVgTi4pLsM7EiDiUO6uSDaXgoBgglDQpknIbU+rXPMUc2AdZsv/8QWxXMSY2QWvX7//VX63JJ7TgSkGLWeuSi1EQBa7Hb3/3l1nTfPTJTxWBFTAlGY/LM/ecPTqevz6b3enbacEv3TxG8AFZLEHJQtamnMxi14CqoWFKCigiloFjpkHLFlOJCHF7efj+9/1cXLdPPv9yToPcCn0WNsxD6DadJiEFoFPrUg5kFZlEEXJH7ToYaFSHPC6LJFbXbnNzK0ZddqvgcFQFYFcURM0aUzbsLp6brDrNbdv2PXtaNrQSLIFH5Dcnk50LOy9cu7pmaJququsE4bDLTR8nblJtbJdtPwlVEWPsFg/dv91eWza97Pc2tfyWCztMa+BJ27Yn3frc2TobzrI2XffIww997qUXCXk6rvZvrQCg8t4pIzGqeu8ZUUnbKEkMUbNJVumTeSYfPBM55tLVIz9qs5zw4ni1ME1JMjtMkokETQ1RzTIaeSiLwKUPhNqmHjvJlJCamMGMMyXAnAcDI6WoljIxqERVyIYmyYwAMJs6dL4MzJw6EVBfhJwyIkqSYbX1TJ4ZTQFMDNizBwOJxQC3NlVEAsxJEwg5VlB0pI4iE6O3nEU1ahqiJWLUKCnwImdlUlFK6tE0q5kN1akIoKoU2HusgmPvFqu+7VSyKOrdddbstLqRTn8FTSQxEJIzJOeYeNCVYLhnoRkiOQ54Gj8LCMjskDwZALNaYsQsCoq+mBoSIvngTRvJUSF3cT0JNCo3QwjT8c7mdOPO4et7x/edP/OEowIsmphiQkRiYHaOWUQh9ZZVYsopx5SRvQvBe1guV6s2IRGRZs1GqGRqSkCaBABUKOesoIyYMyEERVMWLpCMNCIoMlHOisRJzCdbrhoJgJ4LhSR9DDDacl5iv05ElppIBalTHDkvnoAga1bQiG1rftOXJe7tVjn33Vq1R0FCjwqa0MCxJbHTmcwwHhkGYagyqERIcArMo9OfgveOyYmhZQGRuM7dOuGUUAT6SAUH1s2t883Ezxe32nbtA4j20Vrgwju3Pmxqc80SgvlFzqPCs2AXzTGoofdMAKaqhDGaGrAhAvemYKrkxAiBm5SHuyITIGImzgSE4BwoQsoJgBwWs9u3CeBt9557+rmDg9bOjEoBgZzYDAgygHdQKqYsAKbEfd+D6vL27eXx0cnmjfbocGv73Ob23mS8wUCGGNUd7S+OTk6Wy7aJuY83pxt9fenB0WhDZAl9d2a60bT9bHG7BYMwckiArCDe86Jt1km9kWaWbKNxvd9IPTnTI+Wwc/Zt7x2P3721c6Zj2j84Xpxsoq//4Q/+8a/47h95z7f9kch1J8XOfSHX1jRGjltgPO7GJJOLZ+Etbx9//Vc/8KVfPcpHh9cOb6eT681tT+UT3/LVxSLHVI0fe+/G9Pil128j7E6wvuexM/P91bguwHODG2G8jRzUSIuRZVo3WY1C6TWbJGq78Fu/+Zuf/NjHWilXvbV9LEvnS8xtmhTsREQtMJIrOoNOJRr1GYIDRlcwOXZIhC4oGAXPZgAIllUJ1TLEAgUcI+BqHRGNEb0KG2TLjhySYXAOmBmUqRfxQNHAEAqiwACWyBfmOaIhGpmoRM/AhKLGCBWLmkbBqJAsG5oP3pEzM2KUpJIsKWY19ZQM0Myh66I1gMlwIcrrZNDtgAeENp8Kpul4uT2qW5RyVGCkrm2pT2cc3fmX/+zaR8697Q98D2zv7Bf+8Pbh6Hh1ZrIJB/vUibJ1W3v2rm/cfOu981tvLD7/Sn2wdrEfb0/na5q05dX/8/9+6HNXT1547sxymdifrLpQEfmA5Yg3HjHvig3ncm5zO378oXKD8ThT4Hpjul6uyJW8MfLScSeQ0sG0dl/3Hnnl5uT6LZ7U+Hu+5YE/9d1HLCWZRNU3XhmdrAKwNrEclX5zcqdLklMNWXoTApqXF7Bevj7fcEq3P7OPL053zti5s3Jy8sbzV/fe+977vvltb3zo/Xsz6Fbuia//njTZ+/iHfwmQ+pgT6mKx2iuLB+49V216DaJzcRq3J5Np6Y9ms1Vu1znuTkbzk3y07N1IRJJzfH5j99Lm7q3FvqQ5cDxZpuloZ9FBH32X+MLZi9uF2z9+AxBQhCWXwdd+I8fUo+TSFtHm88VOm7CnPslsrcteOFC9OV5qDpOtxToJyuzksCydUyp9qEIAszIUY/Rni3pRp+W6W3WGCcHhncVsRDIqXPBVkrUJVuXowYfuXedVqKo3bs7MoMv9osntOtZbm5tndz/55BeaZAl0d3uEnnidXMbtSf3aSfN69/+n6k/jdduysk5wNHPO1bzNbk9/77n9jf4GAQR9BQoijShYIqhpWqiYiVUpaZaWJWqWYlMiaYKaJGBbiiCkKIpAKiBNEA0BRMSN9vb33Hv6Zrdvt9aazRijPqx9ItPz5ezfb5+z373fPddacz7jef7P4GJ55vK5xWq9f24nAx6vVmoQaq9RXr32+jve9ZyknAtkpabdAqXcpdiDYxDAknArVKKKUE6L3e83T16aC8nVna3Dm0e31stHLs+RqCnYVNxOqqs2vduv7yaNBrCIkzacCyEWniiQwWmUra0ZVcOwGSSVsy0NQdE8SL7R8e3VZnLotys+31bnd+YmpUZ2IawX63XK/4WraFilyaRRsJzzeiUxF1+YcwwoHBhU69pHBET0gKnopGZXua4fyIfgmZQ0uKEUzapi7DwEsNXm53/836T++Hd+zdd947f9/g+u3OS15//i737uT3/791a7T15++tI//XN/8ynASbMNu9PpXrv6jcNlzJu+zAk8Ux4SNT47QoeWwBUIwdYs8V2PNuswfPbBLgyAAqauxvv/5keufOvX3HLaaTX9xj/31q//47hZnf6D75m/egjZQetsdICPh8Hx1IcGzBAcv/ySfPJj9AVfjlaaT3+6/Psfhns3QVA60ZSVlRxLAUIqRZ0hLLrg9ZHXPnnzW987/eZvmX3Dt5TLz02refrV96/+tx++sBoADLzXpFQSCII3WtydtDDMtMRNZSsgMU8qgkSAaESOK+MgWgg9kgA8jMioQoK920fwl78XEkHsH6sYogdtQRGYwASyABRAAiBQhSxADjxCGNvKHto4hgxZAQo4sZNT3p5CXcGyg66H1p+r9fQv/pHjt+9c/Bv/oJ9/vs2+eO+Lntn/9R+D7/kz8IGfaT9z+Paq3RynyaQC8r2WfkgTRgpgAjfe/4Fp2vp7f+eb76/e+Pzf88zeY0/dOZWm3YLiT1YLbqbAuZlNYLF+9Y1XPv8Lnzn85DKuG/J1d7zaJN3fnsV1ubo9fWr//Mu3h1yGk+XGQKuaiPTJnTYe3b9770CnWy7ipKmymmguKn2mVcxCvE5aYtlvqyk394+WPlTmeNlFUakc1ZU5xCb4C9vzVewqau9cEzJXBTk5iCV54upzriIwKPqQVTSasR+ypc+AoIAGJgpKnxtjWhmtsKjsGAjBgIAR1UjLOK07K7oi0XGKRyPQBsfc9pl7aQSv4+eEC0YCBCJzBGAiSk7pcxjQh7lukLGGbFzfhKJn5AYBAAKgMaoEgKgqiKA6eprIwJCAePyZUEe+D4EZDNn6pDJScgCA0MxoNKizI/xcvRh4xKYK86r1yBTqksUH72ikDBqHcLxZ1ts7HsfQAJAijGognrXK2ec2rUByJs1ZSnmxPkFz7GuVaGqIJCJMjMFAsYwbXOcuPfros2952703bjERwBhnM2IcfVKf0/zGejcEGKvpPY84Vx0MFcDAxmn8yKEqqkTEAKDgmZKqKhCxqpzpWmON2dm5B8SUCAwoGUCBwFY5aogALIqVIoFRVRh13ITL2GB3ZmWCEclRRAgJCMmUEEUNmZCIRBhQwcbFc8auJrJRmASjMTdJOEKs9azIZqRm2RjKJ2IxsIfvj4hlPDMaIAGMOiYAAHSroiCHDw5W6weIm1D5nXOPuXq7osMYN6n0ORcEa+pp4EBQWbFQQXCWuwU59sKepoizbH5j3XqdGtQJQlvVN6+9CPNJt0GJNqQUPJ9ev3f08Y/eeenlc09tSTPJqMVX7HwuOaPTqIbROUQC57iqHbvA3hMKcoBJVQbvwros4da95WpT9R0t+8HiigmFc+Wn95ar9VBysYZcjFp7ht1mZfbhF45+71/4+vNu1U73d2aTgXA1FCKU9Sb2XS6yOj25/cad7uDe4YObpT7p+9MNSVSgibvRdckYBKxgUBmS1QqOURiUuJccieqmWolK4Hbi+36op7V4F5qW/NxPZtXufqmnhWp0DtNmyxaf+NgnD4c8LPM6dVkJU+fMppUvAEYhm2UiUADE9TrnmMnM1+2XfMmXbV248PM//e82J+utWRPXKWZLuj544TUngkCrPj0gVW8guZMSAqY8xGLA6DxVgXIxNRPNzOQ8ZlEEMwFmDs5VznWn/eGNo4vzfSOVBM75YTNUFSNAVVUpFQAaUV8xlYzoCevg5iFUldsaghJnKDUROV2f9rO6FpGTdXK1PnP5QnuKJ6eL3vQ0DRVCHuJQ1k1Vn6xytly0r4IzNDVh7y9dusDJ7t4/unXrelAxw6ceu3rv3t2YhqZp6tahlNu37xRJHOTWrRPv/O529c6r0+t3D28cdGvUz75534Fs77SXzm/fvHdcgUfIweD1N29feOvbtOTZJBDL2SR5SD44VYkpOuc6UQbOpr1I0bzuN70UYJy0oRavnRJwW4XgfY1+fzrfm81iit0wJEmnqe+ixJLFAIlUMzqqK/YEJdpySKbEzqsjgIIGpiWroXNqIGxFDUWcqBQxGanO6B2P9yhy5IJHF6gkcwaiLCRFiXgsOwuOHKIVG/luNNIa2QUwMkMAK0oIUqyoOQBGEJCmrgtqyrE2FDVVYLOcMiJ3RVPKg6mIBcYiWvoBADxxgZFDQFmVxEyoX4uhpoySgJDHSQoBKqqIMD7UJBAdW9V4ds57b4CO/XiYJDqb4TA5MyUct6xKyACAREQIgoReNedSDCzFIQ7rum6JAiAxOBMswkSBEDxXFdTEs8pNc7x9+8bLu9tPTnw99Mu2nXnXjBcaMYEZM4Oa86xquRQ1I9E0JEmakuainsFVJIjoQypqcdQXCUCkgBQhciKFmWnEYatZFA6Vc6hZgQgQVCwPpTf0bbszaXPUVEQRrB/iUgzLatMrojlXkLBm79ukeRqqssn9kE1gWKdJ7bn1YVb7BQwb8eBQCphDMysCgI4J0WTckBCiQsk6zj/GUORYgGAjdXEcX4kiGxOnIVouJWUwzBl54hA457LcbKqtrZ3pPA/HwkUglwjZPCIhG9eMyhx0uYwxpq3G7VUBTBCQz+ZNIIY5yxi/H1ICgBFbE3MxY0PwaB6tDlw5EkVTPNuoEQIgAMeYwYoRnx4fT6b29gvnI55OK2idQolxGGrmpWEUcc6bgmRV1ZGRDIDSC+LiYPOZY/9qPd927NtQS8xhMuuTxNixJxns5HizOu37o5Mre9u7ezOKWDWwKSKFAbAJ9bxtksR2tlfVba56JBDT6XljYg6zPtnWI4/PG3jz5Y+98Z9/5tzl5Xu/4TtSf7J1bk/K5uqXfQm9/MKbH/4XN974N4+949GLj3/e8eGLb33fE0dH5ZU35K1f/weevXDpUz/xg9zsDFfe8drdQjptZDhYvfjkF169+do19bu3Fu32UO1ML77wG5+BQVrDqmn0EPsYa95CCIpzN9mHUAGQJnNYA4snVoA0RLISQrh4cYIFq1AtTtPQKTmQnIMP27N6GhANnLNUJBdlRBMQUzNSwpxKqB0ynYHFiJk0AOY81jzQ6GgjJCBQQ3KMJo6gUUIiDx6JxJSYPWPt2DGZgIEzJ4jCJGplUtfgA/pA5AAiggA6QQUDqiqHVCQWVceixBwqIIpZxAUwErUiaK4CBMABVVGhqisOfoilCEYkAMcCjbJLpaQUzx4FsDWbbNY5BnNeKx1HrTitJnzS98+/8qlX/5Z/71fsfdsffuTJZ+LNu6eLoxoKIx6tB/eVv+udf/uvHdHynA4XF+uDf/qj5TMf1IvzbmevbOP5JaSPvlCf3jcwqxxiIML5LHRp8cYv/sL5b/nKMA9aVMpw++XX0mmZqbeY7XTjlXQzJIhc0Ifp6XqYfOv/sP4jvzNsuuU/+pHh9vFb/9R3XjPZHJw+us/62Zf7l9+o1IMPrrJuU+7stW//wR9e3r32/F//H8+fPpjVlaw2F+ZzNwm+CgMP7vy5ktLde8eTMFz6vIslHxz87C/Ax37dXX1ctq8+8bVf/UPf99dfuHu9C9gS+sY1pW2gemLv0VeuX88r3a/runaT4NabjXhY5UTOHy+XYnXl8crlx169dh0V2rZ57e7tjGUydTuznap0aIKWJ0092d0apLt9uNqsV5cf3cpJNqsEUgHYelgNWnZ2zg2lYXYTV5UhxVUR06rGDPmRnb3DxSZpqRt6sDnx3rdVw1hrl7YaXsTuZLVsQ1UbZ1f2L1/47M0DcgZkpqosT1w5v96k7ba5+Ojl1cGyjysCCcEtbq7Ota1anrRNWud1t3zxM0so2lY4nc3KYoBBW3SPXt6rvb18cKyiEPjOyXFD7t6D02KqWcSAPFihl1578/O/5Muk70CiJzYk8a6zDdaTENxmkBTLkHMwLskON31X5VMY9ibVUzR798W9R098Od5o7fdnU7YiOkwD7pK/JykKniRlzI55XtfzFti7bbPOCoka5cykVswADZ0JgBFBVj0ttiz4xnJDD05aT4F9IPQA7uEj+iHWWsp6laxoEVttBgghbXqOidnqinJW761unJygS66ouKpipum06XsFglkbzu20R6eb5SalWEAUzITc8Sr+7I//0hufvv3tf+5PPPedX7e68/buh3/2R/799+effPBn//J/8+Xv+7x//6lrX3yhxf/8iWev7vfa52/53ftfdvnlH/rRx46gXaCUotkVBe8CWU4Iy/f8rsv/7AchG//Yfzr8W39+nxXEIPPO9U8v//73XPl/fd+Sa1dN9sjx+/85/cx/aKQCcmA8jlLO+syZQBBUwGcA5uWDo3/8A42A3rix/Bc/vLccoAxQ+dE2EJANnHOAhpqTjSdyIziBR2kbfvzXjv7hz4Tz+7R/rrp71IJCLOCcFXOeIQ9AHqZ48MLzs/sHc8Ew2wGpIAoxKFIgBlUYIgAAG6MCOagYShkj8DYYhgA9QOfAeYgMUc5kIAAYvRUKkARAoGlgWkHpQUfTgIdBoHKgZexAgkBADOhwVRAS0ACqwA6Sg7VNUtz+zLWP/bGvf+5/e3+pq215+doP//ezm1wPglAbQPa0cgXAbO4ne836jUVo/RNf8c2b6/z//N5v33nPJM9303z/2mHZ27+AOn/z7r1S1ue13avq2ld3kp5v59O2bl2ICSfMkmFrd/f4dPPIfLvmfGvzYDKbnN6JVFcDECv70j+zu7ta9MfGLeH9xXHtyEBD0Lde2Lr5YGi9cVMfLOOKKMzcEzvzprLDxUaAI7g+FhO7f9TP22p3MmEFV2jIZXVfoumjT28xNSaBh7PQgZ6RYqQYmoKdOd/NVMnAI3qEcQBnNPaYnakcpigIhGwFkeyMTKxgNqKj0QzFjBGIaTyxj6W/ZoZjQGpUpM6eTYYw7uKBEJjMO0RAAY4KbmwhA0NiBDIzACpgKmY4ClRATCYW0bKNVehgAAxgRIpAeFYNP1aSOIRRmBhr6h2iKohiVx4qOEg2Nh2NwM+H1pWzCJmqD36ytxMmU8mMzgUXlABSZkZ2kzKsu9Vitn9hhPsAsoESnilthoAKhDzmpwiLIWS1LnWg4nn81oyZRXQsewYkZCxSCHyM2mWtoEFCtbEzHmyEYYxSzOjcsocRK6QRSKVITMgIaiZIYgxnqcMxpEYAiGZMKKKiY7PZ+HvBs4618VelZ6RQNfWIBTEbbKISyrx2HmEwy4bsUBHAzDEnMUPOKoDokEDPPGyErKZ6pm8pAAmYogIQw3hKhDQ2BY1q4bhCYBTBPoeotrPv7+EPMzY0jSYjAABCUSsIY51bGVFcpPZwYxS1ZjdrWx/CtOTl/PwVN31M1n3ZDBWd8yGwdq72JKkOvqQEtNIhqu+b5orlKB7RNV23GcpxQd2etrDp6hhvvfzGG2/cuvrFb8cZG2VPVroVbxZ3Xns9Gt3bZNv0j1zaR0NyXjWZSW+qlibeeRbviYMXG+m6DojYT7GuZ00b3fGTl5sXrmGDHjRHM1UFxq3ZNh7dRUGHUKTUdWiaMAyRW1/KyWuv/Mq7vux3TCZbk/19z+izEWh3uvGlRJGeqlYr2Np5kGKkJKCWDLMxYsFs7BAsqfSi7Oq1KSQDgATAgMUTTqtirpBtGHl3EvbOSbs73bvEk/PTre2k5Mhv784M40c/+uFPvPCZo+PjvqBkAYWWbBhKARuQrJhzPpsMQ2knIdS+l0Ec7e9uf/mXv+/0ePmffuZnbBgQ3PFpf4YHVpMiXPsCWWLSUahWLEVpVnkfikVmQAXIymagIEXRo3c0rjzH1FRVl7T0BkZNqH7nl3/Bj/3sz+RNHJIQmJmiaBo6Nc2MoFp577hih73G7XZiyXK27XZ60nV1cFe2Zyfr9exSo4AV43qQ24eHab2+eml+tByUKQJ0m6FxOG9adLzarJqWxWRrsnVue/v+sl8N9sbd052KF93SOWImcHzn9Khqa4hSIwLp/dXpdDrrAF+9d/TkhSu5cH8aWcuuC/00WoPIsOnL4v7J+Wz7s0tPXbz0wisfN88Pjlde8lsfe2SZ8/HpGavIQBkNQD2YylCSZFRyIUscbDhMy2UsTOAHrhxPfOWIcyrUWxWC91w5t7uzZbizTqnZrFRErQym5pDI1XVgsjJIIVY3PgR0E4fxNmiqaKTE6hywWUqYrBQAcYRkoMQARp4QHDZNpWKWk0kBNc0COuaMgJk9gUMCFSYSVdDRbGghsCcwM8+MUD5XvViKZjBjCKY26gVmMRbnWG206aCkImKOSA20aHmo/iDBGIk1MMdEjsSw9FKSEjMpjtZRUzFiwjEdgiaCAMFx7WBS18SBXQ3MZoWBzlo6R24SEqEDdGjKzKPV15EDALNR3fFoYKQxL0recNsCmgGpmfOOMnnvybF3wVHtm53Z/HyzeHN9eu/+zZcv77/du5aBGcmxP6MUagmhaqrsvXPsGFFzVtASgRSzGBqaqkjhAESOQ4FWJWVRJcAiUs4mSjbOlxRMxLzzCuQBFaBklWKBPTKXnBuk2ni97kosvvJ1TfF4LZ6Ig3IBj6GtLMdhyCVGX899W5V4IiA1u9xLyUTTrWZfh+G0mDCMAXn0LmTNgMgOVVSBRtDi2DRqWcfuDnSEhAA4ynAAZzWlWQo5zpK954JY7+zihBV9Xg8lb6QMbiYXtuo7R6soJYsxNjk5TIP2EouFhkvizWCrWFyG1pHzfozfm4IoRZUkI/TprDtWgYphH7VT8yB7gdrgDcERaNSKqACQFB9IARUoFytmMUcti6uXHr2XCqbeGbSN08zHXc4yUgE9GXpAMWA1zxAcUuVDXYnkodscLjZdhrppgmexu4bEAYcisZRiJsSHaVjcur2zCIw62ar9tEKqAxIUdUQUJoS1iGNqK+eHNPhQi7G4rXoyO3xw+mA44KRTkjc/9q8uNOnCk+9uH336+U//FjLh9qxBnO5fpu29O/G4ufrkneMsHbRPvN0uvmdowhvLoQX8mt/1J++uFrc//gsvf+TD23v5LU88Lou0feld9Wx/fXjz2t1X9y49tnN+zkDIs5irye455OCbhkKjLhBTyVJXZIhURkOOaB8V8eT0+M3PPo+aLu5MN/1iAMoKiyG1KGHbOTYT8QRGAIzk0AkxoAAYsSKUokXNWEEF0DGyITnvsZSRtswGhDwUFaRsoEgAFpwTIEJ26MZePwELoZ1NJlZsPaTaCVmGkkLlyXkLHl1w4IASIJJvOKCZgp+bcukXRVJRNSgYWsMquJF5RiUlhaqoTylkrRiNCBS9jkNToQheXLO91VJT9XkoKuuhP3MVpegrHx3nUiQO5HwCFYugyQ9wyzZvee8XLOZ2forrg1e4P/SFEYl2Jntf/u6hzVnWoObPzR/9w3/843/pQ5CofueX3Fumz9u9dO9f/gAS0FgVpaZd6qyrit35sR8qtz/0xDf+0WO2+bM7rseVm3kiwlzSUE2mUrrWh7yJ2vVtyvE33n/ui5+995u/uv/ybyVs+9ce1Be2J63Lz//6wT/66ebUkrCiVSop5PPf/AV3z3fNM0+843/6H+7/zb+Ly+iKpm6NTg51OXvm0u6733XvY5+h3ffuv++pw0LVI0/f++UP/74/9l/98N/6wfWVL3zz7kc2fNtv1ZpyzLnyznOofXXt3tHu9v7nt1s3VoeX9ndWqxiaarGKO7P9wYY8rPf2tzZp+drRnWOIrVZTEMMUc2zcFqrlgtE253dmj+34E42DQF5LCPVi0adinpt+I0i5Id+61vW5zmogzay6c7pwk3pE7NXgThaHKOF0udxumiY0ADwUIx2o5JzLqugaKJSSYuzTemvG73xy/uaN21u7e91gkGFgCI2DmC82GCo7t9WcbtbrYX20OJ2e25m5EKr2yC2LmRjMtuenJyfn96dHx4utxl3en5O5l6/fXnbZu2BguR8K6f7O9uFqrZpcCFXlzHHu00svffwtjz62PolK0PcbAwl1O2Sz2lCsqrGqJyB2slq1NQ2a3jhNrxyvb4f1uy6ce/bcjGfVq3dP13k9nwb0sDle13M+1wQtIn0clFI3QNbZZJpjCVlB5KlJW6p2iO7GwYNNSWaiqmyGntkMqBQDJQDGtZoMpQmu8RTsv5SKVpvESI4IUNhKTlly0qKe2TOWKEOf+8o5j64QO8qpSAEfOEVQkHZa7U7CbOrvHK4Oj3oTy1kFzdV+Uez5l165+33/8Jl3f94f/bN/wvSRq6vrv/QDP/CD3/eTT3/l/+VPv/D63dv3XvqmP9AcHbmnH4U/87df3t/b++a/Jv/hH9/+83/zClodU8IgSdgyol3+pj96t5ut/fbkm/4g/sT/FK/fropAsdYj/PMfPfx3PxUe38uT5vTOna2lTFcA1JgVBFARQlYFIgRVIIaxfTMVMNp76Vr+7j/ntUwNQBQilJzBeWDSknkkGQsAqKJRcKAGxUANmPZoB047SPcgCzgERFBBRCgKHsEyPPLk/jf/V6t6sjOJggkcQrSxgQlKHsdKYAZsAEkRzJACSjLQwh5BBIKDFqBkaBBIQQugH60vwA7YAxAwPzQVB7ACKUPNwAAWAQt4Ai9QBCLAFGDuIW+gDTADGAQaD8fqFaA0X7DcrP/gVy4V3B14KkBytVVQzFzNBBJbW3SDJ4mdNu/+8r7eevHVNz/y2b/zxNc8u97NWys6PYLL+xMQOzaZXbrghllTfNmse5U7t+5fubw3HMWqaruyYUm+bpAm85p26uriXnj19sF2szWrwhoFglUOH2128mr9xv2jFWqXBi3Q97F19ux2G9ge26umO82dw+Mhyu11qjPMBnnisR28Ntw6GcC88x4drIeURVeriEp9zoYWRZNl33LX9U3lhuV6vApSUVUo8rCjCmgM7ROOEUYDIDHLY5X7mbEGzQABQFDBkM7YNKOVS0eEJ6IBGtpZ7sxGdck+pwydQaxH3eTMmI/jsc1MnWNmUNHRiJ9Ua8DA5EARMIumsZcNzPGYTyIFEIMCGAGJkXAE14CefeOACEqGDMQwwqrV0MyYlRFKsZTR0BkokfHI+z7jRoAaFD3rhiAiUGDnmrqu2MOInZTCIeCYAjOetTur9fHOfKsKNRIYqiM3GnnG6SUijJ0ragUQRbSLfSzZhRqkqKmYqQESE5FYMiuEzjm2bIsHB/dvX//Ep3/bmFDOHFl81vICZ0kHQDwDQCsQgIKgEY5xwhFUZDDmBHVU1Wh0DiGaQ1LTktWxozEZCSgP8eNw9s7D51IVCIpAYhiLDknHkw2aioz99gxiFXNRyKOoB4CIMkKvzrKNBgiKSGioikRICIpnpreH4iQhjrExA0Q0MlI9a6YbM5F2pgjCSMQAPasIeghkB3gIcCs6lhCfHQ/OX3lUtGHW5dFBgzuT2bkIHrjjtk7JoToxBFHVhBFyjmDF8XQoA0DphtNu0236OAybrXYnZxu6Pi9O3/zsq7/207/y6Due232nUEubVbS+93FY3b9/vDiop4yTxrXNSR/VCoGAGTlm9FWz5T02tTNXjIW4Aj8vIgR1MQYEbtrZnjqD3/74GyztvK0eDJ0gbpJcO3zArpKSzNASaM7723NW5xq/X29uvfqJ5vd+lQIlduRcTYqa/Va9HqJ1ZWvuu5V21jumnLEIFQqrYp6wnmyfrJellCRoRjWz2tiCjUWkqew9X/2ue4fL5XFxVYCquvKWJ5AnbnYB0G8Gii4wlNbH05tvfOLjv/3Sqy9qEc2YBrMsqmdmwqadJNW+JDYxBPRcipY+GcJkvvPOd31+3mye/9iHi+aMGkuez9oU07rPBOoqLqAxFSCYTUKKOYsFZisZvCMtDinGpMQi2jaBmcxEC0gsChQ85c1gaqWXnXk12eJf+cj7NxIzJCCtvDPQ3fPbQ9R2b2sjcOHc9oMbt1IX1aRyPGsqQ3NEwiWUIKBH634o4gm7lFMqSASoJ8Piadec26oP1n1FhK1vHc3berUZJlXr6vD01bcfPzg+XXM32GK9qRxhtAvn5303OMR7p+t2awqOUi6L49N2VmuG48VmttO+87l3LO4ttme+rb31sD2Z5GB3F+uDo83WdphNq4Pj9QPsb92/U3tZdtFVzZ2T06259+w36fRMNFE9XW+CIynlZHEsRJNZkDgcrtfrlNYxFjQF7GKESFWInmniXAXs1Pvomqoq6CZ1U1eTC77ySKu4PDg97iEjmfNI4Nad9DFmhuBdRZ4EHSMDpDMGPDoElpJTtmIlKSohnbVjAhs7IjPdDFZAxTQLMaMjIBAlyRIcTIIjg1xQDJAZAAggIJAqIRmQjlK6js2KZ9p3FFXOVXAEVoqYQs5aAMgxEZIQkRVVUmAEdiyqxBSz6ChMiiGIq7x3HIuOESdEco6zCCOOejgxKagLNpuGih2VTCLOVQBuNJwyc9H8UJS3cf4BJoTEyAaCpEZAgMhOTFULMxaxtpnmHGNMjGwGqmBIYjAUvbR3kahyCKo4a7fRKObNaXcbTiYX5+daJAUtkpndWVcFIJGrm3q6sz3bnq4X61JUi6V8pk45h96haFbJSGhOS5IiyoyuciA5MJmY2Xh/R0LQaHmM7ymMu4CYs0KuHKz61SauuwLz+TY3nhtbb7ott23GRXvEgjLsNMFOZH0ip93GVZ5cEBWRbAVWm75tQmjbthlW607VxGgcBSkSj7sVRkDKRQzNMYqaczwC7bxHxwSI8rANM6XiAxkSMEbT4szNa6l9VVcByaKkov3J2vpSanVW1cwZCoJSibKJ2qF5H2qcIm8WKSiBEZIzGJtVrRTJCmpgaowPYeVgsZRxr90PllArwhAgIHrHhJBLicVykogKhBUTIDnmlHWz6e7cu3l5Nh1OtfIwlGQIQpaSeqB+GBjBIbWMk8bnkhm4ric+1FJivxxyLAO65elmrBtFInSsBiPtr5hlgMa5VdFJ7XtzJTtH2DRNVTXg61BVzoei5is2ERdCLoqgENdDv/QGmsywAaDLlx+9+cpHDu5+ui9KvfMecbZNVNaL1eJgM+G6tL5bx8rN9688cf1TL7eXLjz9pV8pqRzf/vitV17eHH768fc8028Wz3/kzf3dR22QPp1M9/bDblU1O9TMiOut849S1SoQItYhpKIFyFS9cwY2FhGlmEGKQ7bcffz9v3zrk78GJ8cXd7aGzhZ3F50aeGeMiBajqCohx6RMjIjeh14KAGbRwiiGQyrkgDQ7Io+eFLIUBiqCMRZVMKAsImCA7BxEFTMiJFMgUgYchiEzIcHYa8ZghmqqgdkQVLVygcgRgDpCdKIGTL7elsn5uFz5uIZyVr4iisijRR4IhBkAuYvYK0UjRPUIBkaGPtS1yCBUN/PJfEdj1zRhmXI3nE3PnAcRITPnWBMrATtfYtLUJ6NHv+h9ef/ZdjoLqX/9P/7cU0KOUTyEZx9zb338ZL2MecixI+Xhxr1Zc7F743r89z+5s3PuxuKo0sKVS13Ki6UWRAcYkEJ4hDl94vlXP/UGQkXP7i5vXz8XSKIUVS2S07DqVst0sD+ttszXDcbPfLD/6y9Nr91utmfTdrj/V77D0KogPpftNAGqLCgwJpWNb65+3he9Rj5KqS9fGuoZgKccO6vemOOlr3/fiy+9uP/xF8jvNX/gO06+4uLRzdu+5xP46GvXytPPfWX1+NMf+IWfeP3Bq7bVlkFLKoWhdOaDDKT7sPPc5SdONmmzsbzRduKHlLMZWnn0woVhA8NyOdvfBao0NFpXXmWr9Zt17xW36tZo8vSVy0N3sjk6DnuzvoIkQohDtsDet2E+nS2XhyXGCOQSpyGtdS1JDElRh66f+RbApk2Tp8WJALrVkKOBQzi/VXNlq66bzVo73Ww1s6J6sowb7WbT+cw33fJUDQ+POyv5qQvnrt+5e+Xyhc707vGyS/n8xYvJ4dCtTxbrva36yjNXP/Lhz57frh7Z3/KAq5NN5dshDZRlNm2r2JNaGcp0u/bI2ZWt7elwGomdGWpRFfvMiy+dq+bDemOoTaiCm56mfnurvv7gtb4bWqwcudpVgFbiEDdSRMC5B6n86mt3ry3X54kHs6lhlDLJw2R/MsiwDZgETs3SkBx4ExCF4OqS4mqZpY9gYCazZhosDiWWUpgwimlWMCQa8zJoCMRUUMEHMP0vpKIQPCjVPiQrm80qFbERzZGlrGOokINduuK7+/GoSzVUxYCIiKHPJSkwUxyiq/z+uWkBzrGs14NkG/unN5IevPHaGy9c8/Ppt/1ff387e9tj1z/76Z/7rX/9c7/8VX/yTv34I4/8v7/r1t/9Hy/cXT5Zn/qwZ8cn+BVff+Wf7Kfv+O9JnHdOCznEISroGluYtD1vteGZx4YbdytVM0ImELd/X2B518hPBQAhFu/QMzHgqBABiYAa8Ij0RSgGzgEplOLLmEwxINLaqQI6AlJyjIhWhNiATAEAszkCUUQCG8AMSEAEiCwLehor6jVlAgdg8I639V/43hSHxb/54FaKMBwDIxGCq8AykEAYT6cCHEhYOkMyBlA0dCOmY6kzc+ZlYewRqIIIo0wBMQKNgOUCZJALdD1MaphNIHewW8PuVuqEMriLfAKTsGJ//XpoFfIAGvvg/LuePPnY6+eEIWawClIVDm1LaeJ96QeaAQS4c6IcgikA++lju7tQHS77ra//jht3PvQLL33q2pysooyTztIcw8kq3Xxw1x1UV+c7z158hDa26tKiW7548/UvuLQVXEU780ef2L+yd+HkzeMbD04ff2T7zpsPQthjqxaLTQI7zSkC7FQcmJDt1uJwPp1AsHUE1wY1u5vsfr9axfTu/UvvfNveY6t0/ih98trJnVUJE37bux6b3Ft98vpBLpAiEHBRVTJEVVTnXZdLO/eTLb53sJju7jpQgBMA6LOAjQEuJiJAw7HjysbrBA1BRuTNGOnHM66zjG5oQ7MzdOTIv7GzanU0ACAgDypmAiZkZ2xHQwBDRRiDQKCgaDAyOgmB0WjEAwGjjhEkQ0bP5IkMMRsNZhkBFAKgB0SkBJhMs6gZKYKr3Sg2mZy1jokpERkqMhgBGigRAniy4B0UyEVVwREQgidUgGSKSKYmhgIApoCoSEhgjgxREXxwWSKCMSFzXXICVV9XYJOD4/tXrzxjIKPVfUSaIiIz4cPGNzRUVVHZrE4c1sxOVBCcWmYkGIEV5IzNTHXsVAsuBAdIqAQiNnoeRtb0GagTz9xECIhkZxRqygbOTM5G04RoIqCjkWvsmCargzNENcTxmKXgEZUQkETNAMjOeKwAIOMrAyOBKHTJHOTdSe2wxFREqAqBCUgzPVwR4xkNH/rL9Cw5BnrmgMQRWIsm4+dGr5EqKpgnIxqDBGCGBIZgY1RwRDqdtTARGiIhqaobe50BCQFUefwAQc3oc8WwAEzYD6WLq+22bQlLwvXmKG1uN37R94tZg5N222BDYFZ6gpGfK5m6srw1DDmnpGJBXaupP7l/9/i0O9pc++hnb968f/UdpM7WcU1o0uV0sPj4h357iGItThkZSQkNDAUka3AVIZBlRL/KyxCqOkyIa6IGQRlrVSgmQFRP9z1Vly4fvPjZRRy4qtiQUionJyePPnKu6/uYNWdlQjFD0/XpZrutbrz8xqufefH8ZWp2LgJiLhrYowPVpEUFlD2TZz9tQtpJxQqmSI1DCj6YLY1ZCjBCEihjlaChFd0/t9fsXW3gyILtnr/o6qbZ2UlZxTcAbrYf2km7Pnz9lVdf+O0PfGi17FWt8gF96GIGzwG4rqrY5yq0Q7fORWMqwbvxFO6Qrly9+uwz70lp/cobn1yvlj3QZlMcezWsm3bQrh9iIIainhiJp2F/GY+zRTXoe4OSPaAgcvBMXAGE2ndDX1RaTzvzxpDKpm9R9nZnnr2gPPHcJa6rrcZ2n5tNJ9XskQtVO8+KxdLieHHnztHrN+/XreVBSRGIl5uhFhR2y/VgjN57RV0NvSSoJxPv/XodHXhN9sard9/67ON3VtdSDq6qNjkOq82Vrflqk4dMtx48cIIllvlssr/l7p+ul0PUZakLchPm23Nzrh9y7Xi+syXmsIY+Jejz5ihv+rw9rxzDg35xZ324vbOzpY0xbNJg7LJCymlRYGfuw/Zsse5WDe3MJqgqdHYd9DkxOABa9MtlyUOWpQ0plV5kk0oGKyZ+vCEzpawiYAWtrbyzrHmIss62nZSkn07D9tbcJ+/a9nC91NKvN4NqVkMAh0TrVMijd0wy2lEtiXoSFssxWlQRVkMYlQZnrhlNGYoKpctFUQ2Q0TtmZskFswZ2FdiI8lcEKcqEahgYK0+eiYDUqKgUgbHnrSiAgqoxk2Tpi1QMgUjBslpWdZ5VjGj8VzZG+bMaIWURQzRCM0VC752qDkNKSRE5ZmVCoLFxAMctGwbvAzx99dL2xBHK8mRpqlXduOCNyI28v9GKZETkERnJxliwIZqhFBHFEGoD0CIqMvRDTlFFNt16vvsoAJCBqJlIKnk9rH01B8OS1Tmu6q1Js5djTOW0uOM3HzzY2vsqHbsmx+AbsIkgWsV+21ct8XwSFv1QRNkjEosIoGYjJHbeVaagPpM5N7qg2JFrfVCVPncxGhExoKohgqg4T5X3ecjmlKcVeJOWhmW/KbJ/7mqYNoLRlyiCRRJ5rHyoHG039d1lgd71qXjDalYp5aEU5zilUuUcUJvKrQCLAigZABESkIicjUIAmAmBUM2kjHFqcs47AgNklHJ2FWzWnXMpTGoyEymLfpBpDS44cp54a8vHFPvDsl6X44NVu1PLtDXORTsEYSmmPMQsYsy+aWzOdS0EguS5iIlYNGUMPAYImQEEiTAXZiqpjLv1WPBgsK6kS41HIkRkRIcmgKcJpqiudjmXAIIIKRXYaOOq+da0QanFNpt+mVJhGqRMiFvPqcjUu4aLQwRXczUJzsesW83sNC2yqjIBIDsEKWgIigyISFmNSvGBPFHVePY+J4XgM7hAvk8yrRnJeRcCETorJQGBI0ipeCyE1rSTGIWCRx9MtHhnmnjmTNFIgAKW4pogrtqQJ1eJm/UdfcMf/r+9/pkPPv6ur/rAf/yXy0/8+9VJv73TFnSZfDu56KvdOGTfbIM109n5+blL9dbcN2fpSzBgRCBkBjAQFTArRcmBlGRgRLw6PP3oh371+vMfSQ/uwLDGyp1r60e2y2un63WGdRJQc56yaFEl4iwlpZKBGIzMHDEhZSAtSAAVBykSIQZ2ORcRyAWycjQtxYArFTW00VtExN6AUZHVzKTk48E2B8fn2mF3MiHLJiWXAg4rJmZ0qoxZNSMqBC/q/ORcPb8i5HXQs+kROVDUomgJzXIpyUSsLHLeZEvqu2JDKozmmJuWRSVlcQDBCNWjr/vciWvDVAAOxi1RjgYkJpnRKXpVYUdt4PVi073ygn/+1xef7j75cz/12EFyfa2I6xCnv+Nr3NufOtlszk22a6ro3vL+b3/c7t+zbqj8Bo8HXS14q0qLTTNp1OVeoaCz05N6e6/daaW3aVY7OdbVnb2JozzEIXNVu9qtUj589tLsO/7AwdHR3X/48ztvHu6/9erO+77m8Pg/DmAVVpPiZFPamkQHBRUZFIqh5+DaqnrjR38qnd9744XPPHX+0uO5WRw8mD22f/Ebv+XO7Py5L3mm+bc/XH/o9t3VIZTVL//iR7/6y97VYP3inft08eqDO/fCpF/CAW97x9DWrpcStSSUg5Tcbrh5clyibk9dQT5/df/+6cmJSRR1QF3SlIcr5y5nc9u+FI3dINOmvnPvaGdrf6+dh4Igw8np6WYYQlMNXTruNiZZyuR0HduGfB067ULLU+fSkM6f337pzs1z7W5jAsaVOUHe8tX+1vT+0ckM83TSnPbFgksOc0pt0yBDjKKSG6XaV0lTZzSggtGdk+V0VisgIB13XSL2jb94abLs12tVROfApk21ltTHYoqvvXp7OptsNVQLrpans3mzAeo3w76V+XZrB8WAJ9OqacN6nUuMfVy5JiA4iTkO8ZH97XoyPTy8vzufdhkXOU3YCaQcdcvvFT1Z65BK78EIdOJCvcUP1mtBrGvHU3cg5XTY1HU4SuaNJ8VfnlSTAJtVjL04g+mkVsFuE9eyEAPFkGqHwQjl9HQVJTMxnBnOg5SoAo6p9j5jEbNioqKG2K06q/1/IRXVtUelfrPuUjTTEFDUQIgdE9v+uXmG9WTXnj2/f+fB9WoamokjZ8gYKs+deVdNfTFHWUvlra2rhul0OfSbiEiaNBPutfgbP/mvmk3/u/7of/d85K/4tq/+2jfjr/69v/rg0mPf/SM/9Ik3PkE/+a+X3/+Xz//5/+/Stf/4Ix/56kcu7z8xmb+4CeJBFZRVpL56aeLT+vTmzt318SdenghA8FZMxRgYvAcpyB40DYh+q2IQSQMCUcngCchUlOoGDIuocw4QQTJgAVBwDEglJvAOHFrJRIioQA6ZQQQIyLGZFckueLDxECqAAoEBHfQGqoAMjFQ5EIWGT27eWATQ6aRbxq2qgcQQGAAAGQLAlNfbjqaX5SjOTu+BCQcPQGhEYJBSKvH4S94x/a4/0JehfPjFyT/+T3NG8BU4hm4J7CEVqAIQARs4B1hBzlADvG3/9lNbF//Qt2L1pHv1drz9nzff8N1ufrX/G98ZfvMDkCAnPv1f/m3/Vc+1H/8F+Et/Da4fwrKHpg6VD4VKjBkorjduxjDhlQ5BYX2s7/mWb4WbRxdvfOqnf+6v/iIfdlfO1bv7djDwUM5N263tLe2HHeG+S76W04Oj9VFfs9BUJrPJ1HACmLI+fnm/7zZLW51/cvLqZ2+6gjErs6tDdbA47fpIylSHonjt8LQwOTUy25oyV/7e/XUnCmARm/c/f+/lls5P+OojW83jO8dDVBhcE97xxOzo4MH9ZY5IFILGJFm89yGQI2tacpVy0JPN+tH5xWrqH06SzUb870NyEJxFrEecC6jqWCGCCGSGQAI2JhppNLCoPYxAnYWURi8He/TBiARo3JvRQ5aynnmKHvpURpsIoKHgWEUPqgBIjkGNEBHRISJjAVDTqBoNMwESCqiOM1tDAQNARkCnsymzsyhMGUpSVHCAI37oLDBghAAM1jIwohkSkKIimGNgFEarHKpqLmDGgqhKZYxfGXhmF7wAEGCopmfb+rNaGgCQqp2s16eL9WJnOsMzzrQaKIwRvNF4pQIqImXVdY5qh05NAExUmJmAzezsLWXUbFJEwerd9sm9Z28d3j28d0/xzI5jdja+BFQVBaTRVINgeFaQpiCYzZiB4KG3CIAMRRDUiMwxtJ570ajmGCuApIIGDhn+T31hCIBqo2v/LF+hRkAG1Kn6lJ1DRlRgMWBARhIthkgPO4kRUe3h1xt/EwZiOCpHo08MCHHMU4/5RwOzkQY16megMIYJcLSkwfhfztYTqKkjUjOHY1fz2VcVAxElQsL/I/14cu8OUDWp6PjO3bVK1c7qEAIv+nQIJGBQcZsSZM0qfU6lDtOqqkLIQzxk59WVGTezre3bL37iwf2b65Tb7csX3/KWxS89f+uNe4/0aYidj8anJ89/+APHy8XW+Xm778IkIEjOwqTe+dpVvnHkmAOQrwBqNpZCaurcgIAENSMwh1gG8g1M2qe+CI/l+vIox9NlP8Qq0LxtUlZQNDPvqQphE5PG7D2QQT8IYkzd8dHhyaXLl8wbqAASM7squFL7up5szZrZtMvd6clJQfTBVzUPsU8iYOg9OYUomnIJzI7JgK489uggjtr5VuuVWBx2osVgUrFpWS9uvPCpF669/MLh4SpHq+btcNg5z6a0iYWJJ5UD54fcr46OQQUQgiPnEDIEZzvzyRe8823DYvPBD30o28o5lkHq2udii/WGiRBoZ9bEONC4YBAWq0MEbRo2gawKCNNJlWMKvlKgHFOf0nTabM/aWTO/dvNu1jKbVe/7kndgSVE4b7t0brZe99uPPTGdTsvp6vbp8cn1B6s+rQ+OVofHU3brxfrCdK5mllNTVSplMeSd7W2oSERVYxyGWe03KaOWejIdPYpt3SAVdrhVtTcXyZsEROfECMBSC8EbzOfbj5y7/PxLH0teawB1vDudPrl7+aOvvVTqSkqagC+r9RNPPHayjNcP7rchTEKoAy483To83WkrKOrRT2B6kjYNwZC102gx1p5nbZ1zSV0MNU/n4fDeTW7GDhAAgJO+N8C4yiIx5qJI3RABQAjIozNjI48cVIuJZwCQtm18hT5YXMXlYhmgstmwPZ0PMQYIjnhnNgdHQ/R9FjMLwXRTQAskULbiyVdBihUAct6QEcGssHOihizGEGpygULjwVCGpEUMyWxctsRAkAuLmqobMWRGMWUgdjTSYGA0JhUDtCwFihoCMiGK8Uj/V1BTBQDEiokQwQQUcMw2mko5uxMzkZrkkkdNB4hAz/yczjn2UMSMoag5RgSTUsSUmAjJsVNBApqE2knucx+ltE0TYyZD9gGVDBVJDQQIgXgs6AJiIjIkBANwiKaj8UDVFIc+SsndZuHDLKeurqbeebEOUAlzXc9C1XJUQkIfGKv5fO/k8N7q/snFC4s4bO7e+vSjj3y+CIgZCDjvkmQFrjlc3t492t9twnDQy+3bImlU2ouolJidcwAgXdaMzggJs6gBgOpgPTnzDSlJEXM8VosKIVpRpRIq3N/baeezxenx6enKJSspb5anzTSoFOerksswZBghAsinXRYDY7OilkRSNg/I6CuPBmk1FCkypDqYImWxlIwM6ezJSeN60YeBZUREwpE/PH4MAC6cXQUxD2q+bASLqFouZ+GiopAAiTwEy0pDxNzDQodiGuZOS5k2NSSfsqFZduzI1Uhe2J85fBVBGa1iNz76PKKJZBVmYGYiUIOYk2YFs8EABDYOiaGtyDEmg00sgxESOTYC0iE5x0VUu4ihc/M5V56jB1PJOdQVKVSIDggIRDKCm1V1p5CL1OQRaBQJxczMiNBEGRHUwFDNGM0ROiZHzMymyEaI6Jy3JODKZDIBRTxjMBkROfaKCgwukBmjqUqup9tgOuYSxTTUgRG1mJoKSD0ZgwOuoJtMZ4YV+eNf/sW/O0dccXjiPV+azPaucuVcirJdTbGZbs92RbFpdxBp98J5Awp1TTzKOIbEo93ZDMSUCXNWyRkMpSQi0pTffPO1X/71/zRLp7tOvScr0lZ+fxpuLYAQ2NOyj1vIADYkGYqxaRYQkECsiCpaBA01F3XkDNCjoKkzBMMkpUulF1vmIurqQKpipkkKACYRQkC0rJhyGYpGgXWO3ZBO1stgUAdfBe+ASA2siPSgBUgN1IGvCKHEtL4zpI0MHUFSpEEADaMOFiOrqqqQdjkuh+F0KOuEyVAUmcDA/No5DqAwm9Q8LFcPVq5t2LvY55QeeuuM/LSV0nOx2GUOTb/ZeNQ0cdmVur92/C//1smDfr/aKb3rzFeVo+2tC09+oeSWD09wne3w1dNPvnb4gV/fllLPmv2rl09uHFPjCaBhLMPmvvWzb/iG+txl/OCvV+tkZG5SmRKHKlhOKiKGgLGPoalWWZ78Pd8Qn/2y8zs78pYv/dD3/tV3X95ZvvzblUWHLsUiiKFyhJTExi5sARhK8Z4u7c3aV67ZK6+/c96evno7d11Xw+rRAI9QKJvDf/Uj7Uc+cHBt+MI/9V1vzup3Du29//BT8Y3lF1999pUP/+qHPvrRJ3afe/7OSa78pYs7i1snaShXLl1cPDgYNkksF6SD/iRZCwKT2k/ruvbOB6wUsKgvfjqdvfbgulqpmUpXouj+zlZdtQb1cXdShq6eWtESnA3LzZRD8B49rdn6VIorbevjaTbPRHj3+H7UTEI1QIqdCVbeb2JX9S6EysQb0KR2CpkJtGQoenLaoblhVZzAatmV2Puqqet6y4WXr93ZqvYqz71snnvHY6kv3aKfVvT67UMxdFwdHhxfCOfW66hCqy6ac4a63AzLvlzc36oErx2etL5++e7xIwxf++63Lk7KZ67d8FMMdehXaxFAQZXEZDvz+rm3XPnQZ9+88+D21/2OL2o7wl6rEIgnQxpEhUPVrdfzpplwu+yGuqoE81auFpsEUuratYGqelJQ10VOSpxWdHx8fL52RWExZO8oqlWOla1uOIs8kLJ3fq/fLLrFqvK0Pds24Vfv3DTnCXvvORDYeAcyMwMyIMbgHSGk8tBbN/4lWa1YjHkzZGZ0SFYsKyS0nSmEVjvB37h25/ELF69caDxBslKFypTI1LPLKc+2qqVqGoqmPJ+7ra0mpixC5Dh2lrOtTWnZ/9K/+tcf+IX3P7Ubnvsrf6r+/Mnpi6+8/3/58b99+83v/pH/38H8iZNPfuRH3/E7vvU733rp899lV7798nf92YM/+30p87xmU2oB4O983+z3f8Xm47959OFX9l4/Qo9iwrXPBjwJ46E69b02dOxYmR5rlR2uj4Z2fw7rgdCIHEQFIOcYFJWwxBTaAJ5siFi1rgoKACliMXKMmoFlHNiDoRVTQh8qEAUFcA58A5ZgyICK4GAcSxYBy+ARrKSXPnhuMqx8M0WW5cDYAnsoBIPAudny930dfue3G8/wP/9a/Mv/XQUB2EERyGObug6z5uKf+cvyjreGHN0zX2efvgMf/iQogq8AHRCDIyAPnqDbQNOCqyDoieSdP/Pdl3/PN+coYT2Bu//utV/8xWd+9/fc3jRua3crKexMKJbp/jOnzXzyxe+RZ7f58C6sERBjNh2kubRTDu5MpvB6kvB0272y1mLow+nP/RKUcFvLCxnwCz6vsy6eejvp5opSFZ5kBNmt68l0/8KkXpyciLlVinfvHx0Pw6xtXvvkm1ttde31m1VFBYbNMvpAsYNr9w73K3/5/E65laYENfMWVXXdfvJ4wb6qgLrOXJcnUzcpbr2K5rQ4NXAng7tzFFdpcfXyZH9v69Wbh28c9lvQv/ftV27c6z955zAKKNv2JEgxZpy2VTX3AF0A51OobDfLarwKsirDqOKd+TpGsjWdMaRh3GMxEdmo6QCd+YmMDIxRAceEAIIZKjIhGiOQE++BHGQ1zqBqpmNYCs9QRvg5PBHgmGQa3S6ExGP0yhwjjlG4sUoMQUxj0QxUABIYIRVniKhFk0AxIEAIVGpAz1jMOQRCyCPXTNHGmq8z/w2iNd47dFFNxy52BEDxgceKdgIIDsWsCBhTAYhJa+c0jSk4b1mYR7ohEDM7UhFkJAzz2d7x4d2tplJiAEVgT9Xo4RnFHUAVtD6l2A+1rz3hIBmMDIwcMqCpoeHI9DEyyYMBeeesk9ZNR5HFiMb2ZRvlNgVEEAWkEXyhYxewGchI9lEQMEEjQFFlQOdITINn50DUspigBcKKMUUAZUQGUMKRKa5nPG4YPWJACCoChIZgYpjzHvvK4ToLnpHKiZHBgGwsjKYRjm0jnZFGzJXR2SocsVZnBzwyYhjXiykCEhcEJgJVBRoNcGgApqMSiXgGm8Cx8WZUhBCLnKWlEJCZRrHQHvJKJu16s3xwdHgMOZFHTlvM1fHRq51sqkCVusSxSAbyJFXg1lENxiUryFCT1DWe3Lt3/fDuzWvXjpd3964+qhWUuccGV/Fo3voq5cXp8Wc++uEbr78SZpXfDTRBbj1YIsTxDArBQQDyARBFJlRvMQvyoJCkFCIqEEGNiZmpFOhTadv5Y2958vSw1+Xq2ms3IWUmDky7O1v3jo5GLhcaZBON4Krq7v1DJ8cWpy+/9Mr+9nkjwrF0Uq1yXJyvvAPVtnELB2rCwRlZhIKOgLyBGELMUMyqOohYAUCm3atX2ovn16fLpqLgqhS1mnvIw/3rn3r+45+4d/taYNf1RZBSslXqPNpqnVXUxMBUsp10ixSjq4OooaAiJgUheNvbHv2y9zy3vLW4fu1mU1M/uG5IJWrlERRAMZnWAR37TKWLaeIrMQsVrVMygzoEp2gAWc3V9dZ8erJZqddJ28Su3HpwIrZAheDdSujHf+1T262PXZme3zpJmUF8w/1J3xCui5liaAIOw5QMJM2BK1HyhMgNmEcgplTyxuIipRqpYmqCH/sT82rJogToXXu00dfvHX/5F3/ef/rw81nIUj6NkXEz8bi71ext77x+7+4bL71WkUtFU18un9sFwYPTo+BqHErN/GXPPlE3ddGMbnkcq6bheVvfvH/gtloM0xfvnM5r2An19Rt3QfP+znR2ZfuVg9vzWdMth0mAKDosh+SlslKkr6Cmh9Gbg9XSuypJIbSCliUpKDObqYCG4AjAo0ODpGImrqomsymIgpqKFC1ZbTgqq25zfndWwaQoEWOoHVSTqbPVahk3G9RMQmnQaNhMCAOy41G+VzUBUzZRUUTzVNWhboMbyxZUBIA9a1FDQzEDy1pARHMZWyKJkRk8sgE4xwSgaI45qwpRIBQrSM5Ei+go/ZSiZljEBMF5ArGihRAdQ1EwwKKYioChmiACEYmJD27s+gQz9kzMisaAiNjULApZJcYCSEyMRJJFQMGwV33z3uETF2ZAbnncza/OQhU84QgnMivkvGhCZDJjF6wUAmDyjjmXDKZkAjqYkQs0dJJiklK8axGwpN5cVVRBsqkOy/Xc7U3dRIeOvI+xV5Ur5x89vHd71R1vToeqotdf+63L598NaGhCGETUxLQIA1y+cCGv31ndAMZh5ftFlmgCWMjxmNiyInGAONhZN6WhgalmBdnaqpq2Wp+ukYiJ15vkiRFRso3+z1nV7OzuFbHUH5IrmP36+OSRS1tEmkqGDHEdLYTEsF7HCVi3yWCGzpCgqJoaEglZcKRqIgqMwUMqKkktMKg4MgIkP9atGgKKqJTRBGZIxExAiESllJLPjger9Tr44JwjVUMsucQueRFFOF73ra9jSeQdRrIokAtPqEJCQs19ILXgh+xp0qoLey7w0UZjMvWEmHJ2RETjLohg5GsqmZmYKqiqGQKBBUdJdVP03jqKqpkLoFDMKViWTs2S1BVtN6wAHFzl/Wgd9qFuPNHhsSfCJAGgDt47b6V45wkDAKMpMTpfSS5o0AZfpZwVCEDE2GEBZE80FqES1Y2v69ohGNBqKOycZvFEJZVUSQMYXD1Oo5SQPRMYgICJATKBKbNzWpTIRMVUx4GRr5nBjEb7OTkXDFidM+fAALuh1JVBSRsNTeOaLfDznUfPqbmdnd00lK2tORh777iqTA2Izejhwx0McOyPs2xSNPedlDJ0yQhKSq9/5sVf+fmfWRzfD3XJKJtNZJVzMzdz1bR2oNJlETFQiCrJdC3AgLmoIEwADJQQk1hF5ByrQlJx7AEopqxqfSpdib1or6LccNUYgsWCCFIKuGAjHgFsLGMlUUfYq/axVOSmJFtIE49SkvMBQIumyjsca2All35t/UY1Qo5aSlGLaWzCLSYSRYeUT7IeD7GXso6lGMHYXwwggJ5L7cETY9/XoqBuDSDBFZHPXQUKprknG8BwNp92Mbc1qOU37x5nsEeE5jidbe/EGN22B8UhSnfSv/rj3z9/+R2Xzj1z42Mf29x6+cr5J89NJsBRtD+5/SA+6Cs3SJfY+dd6cX/yD+/8oT+Cdw7wwWtH738B+2nZmvpJ217Y6a5/upSBHQMyG+dij8xm9/7+P1n/r/98/vbn9r79O9/5D74foy5+8Meb8qKb7ksblOvYD6WkpIlR2DtioyLSa38/lqNkjheLoVhZEpz7U396/TVPv7qQcDL0H74+fXUlWX/jn/0LeOo3fs9//TUfu3foHnvvu9/7VR/82780ffejG8f3ToawPa3XKRBSzqGPlXFTTTHJuvTHCfqSn7l63vncrReDDg1U5+azB3fvTOTccnUEHEmoqtqKwmrZkZer53ZeeeG1ug6hrqa7zSdee3VrGmJJDOAdTKt2HsK6wHI1gBgMwJs0mbDzgUEXi8hWZs4L49Ks2m7fPFxPjIdN3pvV5GFIqbfCjJd25p9+9Rr7aqvxaRWddxcuXbl262CbsfXVDk7nEJImiHGCdrpc72/tHZ/0i5P13tblW4vDcxe2VnFz0q8b17TT5u7p5vz+tIgsUpmIlW5AgKPjjfO1FL8+7Ld8/aVveXRp5ZU7x5X32VHV+OFktT31b7105cUXrverTpRuvnZwcb7rQNfdKXu/u3MuhPV0umlnPm5iKqn2PmdAcOe2dqo65mGwPhPB3Lvt7a17q6UvCADe+ZilJ5ccL5MOOU681sENXaon1SbGB7duPXv5CkF1eLq8t1hVTH5SDypFta6qPCQVQAY1lazBsZillGvH8jBucCYVEQqSExXvnHeIiMpKAbGyeq4FB41KaXrvRvaKRphSKhZQtAwleI+oXcrRNA7qudn0yjl5553XVIQYc1EVIseuogfHD44W9hf+yb/95m/6vb/v27/xD+XTj/7vv/o3v+tb/8R3fQ9+/vv+9Hu/5JP/+9/5g+/7cm7xwU//bGfsPWm03YBYTewDv41v27/46gtwcAIThFBLlCFlcMRWiEuSGM7PfqH37/3pX/PTrQf/999f51vhe/7ebYPhh37g6usvhRxxrAlrJ9oPWNfON4MqDoUNpe8JCJEdkjGPrgdBcsyAYOAURtEfgQlSgqKAAm5kFxoED8MamhqSgjkABk0X7r5p/+O31Dbpf/JDPJ2DAgwZgocJFlnpN/3X9+pH2uJkJVcwADgoAlEBFBoPy57O78ujuwd33kypg63JhcfO51+J9XYNoKAEpYwUEpAMjiFmUASG6s/8P1Zf9ydib2FzQiG4L3rPO77tvW/+yHc98U9+/YW2Lh25YeAZuh//3rc1X3vvtf/I7gh+5xd88F9/7Eu2oRIeYA0m/nL7i6Jv+f5/evnJt+z/3K98/O/8BZi5n715604bVo/N81NPfPbOvUrT05f9ztVzGWERu/urw0ngc1vbXb9YyNBUIWxtfeC33jzGbiNiVe04nC42CcOlCzsvXD86WUZXwBLnDKc5H4mstEhJ03mYMJ50qzvd6vOeuAzLmJbDdDo97bvTuN6bTgaRjQgSJJV6b3ZzGO68evrup889dn7vzePFYSrFyt4uf1G1df043lmlwgoCXVfaiQdNM0bXu6lO49Hw4PBovApGqBCBjbxoVcOz6jBAMBAQtHFodAZ2JmAAYjw7358RH8YGLhjb08DGwnVFMmYzACNMcJaJQtVRpXmoGoEB0tlkCjwSMDhGBXBoDtEzmikQFsWilhUKEiCpKSAaYlQlAkMABw4QDNRZJgAwdAjjlk8R1UaWDxIjmigAYOW0deiNomoqaoCCGIgMkMkVNTfmpMwUwTHWiNXIvhFJScQgsE9mmSAgFBNyjtBQgB2QC/Ppzu2D21cefVwLPKwvOWsnUy2GkHLu+7V3np0rWhAplYFdBaBErFBQYSREIRA5p6ps4Crau7BNRCIjSPwMqERnZOfRPgWmhjjONEFhdFSNpW9gRRWJyCkAoTEas3lHBiYF2Hu0AgyEikagYDbyY85UojOZ7cyRBsykpuNCSAarlKux5qZINvPOlbNqPQMZ/WQ6IlqJwXR8Z8YaPRhTHmYAqDSKTDYKO2falBmKKD7syxN42PhGiGfDdiAw7zAYFDVRK4ZFTVVHzicA0AgUfeiT6g4/E4/WQ9/3eXPu3OX1yer4zdNclsWl83u7/SAVu7bZcX6n6GBgBDjEbACSqSKUQZYn5ehwiDo5f+HpupqdLG2tdO6pqxvpTq5df3Dv9Zdfeu3m9Te2Ls7PPbIjwQwKOm2qgIGQzAGmkok4az+t5u3kUgRSXZEhEvgQShZjBoaUIrsaEEEKSY5Hi9whS+141qXTHKN3VeO9B1YyMy0ltpUvpcym9c5+5bTz2C1Ob3fdup1U4FDNQuWHZIZsxpbNo0fhOjRWhJ0HEBFBckysJoKKzEiIoJpFJWcoqxx7hoCg/fLg7uHLv/zStVdfIyAzaqd1LLReSEaQYpOpm1Z8ukmmMF5oJUtVNd2QyVCFHIKJTub1pYvn3/bYM7aCN199fYi9n3HqU9OE5JTMEIHQiaqU3PcYgncOvSNAKKIhhChKzIiATCnnmHMfj5MVaqv9yfbQH6oSBkdF45CUiRw7580J9EPIulkN5pxTAAcVEIhyGgJhTjmnMptNFielrrlin7NmGQLRZrlaS26ayhX1iDkXBIWYtIAjdOB1yHU7vb0YnjlZPD6bvH7nyDlu23qrDv1mM3T55eNbi5KMQlP5lNdvfeLSC6/cE+9NdBL8zmS2s7d7b92f3LnTTtxitWl2q9XJput1k6Q72QhCaKt1GZ5sJ05gq3WncRCxAOSB6km76nMpoMhkcO/+0YXWH54cX7qye+PaXQBAgiwJEFxwZxZNYgUjADJyAERERGcwfueIHUHlPRaRVb/skoGp5yKq1mmj/WQyq12NrLX357a3G8RTLccnnQqqUsrYmkuDhNY7FClSSlJEcBwckwMDYkA0RkbHLJYRxYqAAQOAGYqkKIhENtrLQNGy5gAUnHOARKBqxRTGVm9C7wnMsioYqmoGG20ljjkDqlqoXaXY5yI6suJJAcbaVgATtcDknPcPy6RSzmZmWgyoT4JMwERMAQjIS7EsKmIAqKaOODCu1/1r/UZz9gQlplwGU5MsyEzgHBLQ2a0WgJAckQMgFURAUEipgMeSoRguFseLxYlnLNBXrH0eMPaenXdQoixPF/vbF2K/BABVAgTE4NxsurNzvD4Yos7PTdfL7o2bn3n68bdrUXEGokNOLgTyrmm28kla3T1a692tsADTVSfmHIbKTNKQU5QUFc2NnitDE5PQOK4DeCtSmiZ0Q1ErdescUUGKG4EMBnS8WGntVJI3jGsJoaJM3YNVvRMka4rChJsUBwKt2p2LF+++dGO0NhsYZlVVNZRehcQIJtuz/vRUGdBRqLGoCVDwBKKjh7hIQSQTMAViZABDY2JRzSolm571fICJFItgUlIZUlnmwk29WEVqfT9EExv6Pi6XadPJZmCPtopp4rH27ApyEo1AU982NYS2QOqTxFJPGhGpvHNIpaiCpSTZJBVRAVAxAx6PDGqKkM+Ie1jUhiyNQ3IIaM5BDVBAu2R9ERCaNyE4nIeqIFnMJebi2DtfOWwBQnCg6lAB1RMDSBwSOIcmCUSZyAVGDKZEhEjmiBmRABiQIARiplARk5hqSSqGKjb0cdLMGq5Xp52KeVdx03CoxlYNQCV0RALjNtLUAFxANEPMxgBQDIycQxPmCsbSUg6IREpknpFgVG/ViCGgE7G6rSrfEAYaFSJgICYksDOZFR9iFAlBHtrVCbVYkdJLSqzFV9W9O7d/8xd+9uYLL2bX33hw5GdtyxCIydSbBiEFY48q0MXSS+mKJiRQRVRFjCqAWDFX7BDQIycrjlnUkohIJqIkomBFczat6llWJkiIRGieISEoYirFEQISE0wZs2EBSwLAXMhl4FIsmGrOhRON0W92MYkCiVFMCR2KguWsBbKQqYJFEItFDpf9QcJj1WGki8EZfR/G+SfQpmjN5JhWUTK79aa00SpPbjIdr4Jq/9Kgdbd4fUddSRENNdu9dVn93j918SueffV//VtX7h8HKOo5OpBBGq6aoTSf+BS9+tlFe+Wib0pdlYP7mtapO2krCn4Oj1zlxStl09vFyztPvYcefVv81Z+6/8/+6TTsyrmLzd5TxzffPI+x25xKF8diRxR03gd2DZ97EoPGofv4yy+tv/+Zn/j7q9c+e/TCbz1GTVZVT6V0CAnYyAPICCaDtmoAFahgjZHo+HA93a/Cef/yG59529ZXdJKmW3V/8Up65eVLlybXXrmP98svvf7Gh65f/8P/n9/3vT/0fS/jaXN197fffGM+r4piHNRznfrDe4dH29N5iXrv+Hjr3HZofcrQcRlcteq7CmgW/GJ97Fqe+MmNo+t+Mg2+KrGU3O3s1NPJ1unqdDbxiHBvtdiEtDObF9RE+vj84uHh9bqpL1/eeXC0apKxUwnYVk0aYjutq4KTdgZDqolu3F+uZ35JliEjiQu8KpkA1YUS07nZ9M7i+LTkECoAzahhEmKJEkt0aQWbUIOrUdgu7Z8rLnQxfd7+1eXBYurOX5g/cfPw+Pzl2eu3729vTVJSQdvdax6/3B4cDksXveeqdreO+73tGQgiUaeac7k0q9KprRab2Xw69Eli2vL0pe986vbd9WDg6+CD++zN23vvONfOsHQZ1JWBLDsPeKFtOqpunZ4+srsb+wSIPdjB8RqLQFFJcJr6OglpmbShqeqmaFu5T965l4xSUfSu8aAOq6o5XGxKKhXYzRs3Gt80wZVSplV1t1ubgXOu6zMZClhK4oMjRikF88giRO/59P8sFa2WPahXJUQtRevaVzVSrecvNACDlnz53Nbxsa675H1QknbukU2EQl3FTqdNqzrUs8atdFjkVW/7bTUN5mtvZqenm75L5MiKrFMhdDnpb334Y7/9/Gf6v/gnn3ucf99f+mM/+AP/4W/8g+/7+t/x9e7JZ+fbX/biX/zVSO/nNw8r4KpyLndDHo4eCY/+0Hd/8sd+cfrCnafqLRj6kiNxYPYqKIVo2oSCp6fd1/7kf7i5s73nJ93m9fPPPQYXzhFt1V/x1eH2HaQFAEFhM6NJ0CKQra5C1sRmDlhi4UCmgIzg3cjIhbEiwoid09hDNmCFhgACZAMk8AGQoAgEDylCRGCCAYADSMB/+0aOZRqmls0CEDogBA/uq99691xr5KYq2y9+inwAQPBB334hVVzfWWrnyvS5VH9Rp6/pdEk7u3bxfIkAKYIRBIZ5A4OAZYgFJh5qAOV+kdsn334/BM+Ak601N1U4x7e77Y98dvXf/J5zb951VAEV2N5pfu5n4d/9i2oD8Bhce3b+FcPL6+/5byf/6Nfrtu5vdG+Cvf1//vM3UpV+82NvVQHBo53wkW3/9Dd/1U/97K+2B3dU9IlmMqG6cq5P6wRxK0yevHx5cbjarmcXJ9Nhs/7Ey9eGUrooUKo3rt2hQEdHi17dAKUTNcZ6Up1YWsbko+8ycCzMwEjI+GAVs7r1OvuixdmtzUlxZeWGytcRtZnwThuWm5h13Xse2F9fpi96Yud9j21/8JM3XjmJDeS37rfvnLWz0+XRUiQ4H3yp3d1Nd+HcLK/EIfV9h1ydbYxG0wohj/YLMwDzfKYCAI+CDpwRjw2Qxw6qs857ZCNVAFQc/TxnnvFRwCAwMiAarWCmZ2DnscPe6P/oXic0cATemfcGjOToYcd7EUAFFJEClIwMXAFQGd3/hoZmBszMwGY8Up49A4ExZrMiQo5IzFQJmEhHWg4RAmLt1LOBWMkyCl+JR+ACMCo5VkVgUpAkQgrsCb0VtMl2MC1EgM4ZoqkRItEIRCNTNBMkrtvp6nR1cnKyt30hSyIyUDYwlQSGScvpZskKzrOSmloWIXYj9n3kVRgYMYtkACUmSfHw4DiuV9eu3XW+BetlhImNmGozJCRDOxOEzvoPx360sw39KI8QAVAxQADPUFXOO0xFJBsqOjNVU4Y6uF6EKIw5DQFgBAJUG19rBKGecc0RVIkGNTZFJiQsWrw6Ux23vwowkhphpH8jnhGVAJjRYCQvjbzskSg0criJCUzHc8LZn7NTg9q4bB9uS2Ec1AWWaeWD2jIWQSqIGeRzfHXChy/x8Mu9/plfb82tBTYldZsVunlUCx6r0DqsmFk15KRgBawOoSHkUgbVjGiLo5Pj28vNUi3BI/uXIgyKzAG39nbf9U1f/+Jvvv8nfv7fDif3gcPFp89VbW2Nr2r2Hn0bPAmykEfW4qqKAqtiExBdCvU+Qw2yzGUhWURRc0/oAWsxG1LH7OsaseT71x6kblivtWmdET1Yd1d3Z3u7s7sni9F8td3wcoABOCn/6D//xb/4V/7sKz//kefe/qWz2SMyqr+IRMjOEVE1aapcB+/rulqfLgBtOpmdxiNEG5u3CSClMrLdEdlAQu0qzzGWo5tvfOT9H3tw0ENUM24aZ4aLk9xnACFL4Cj4NN2qJ/f66xW7lLMaCqTLO/Onrly6ce/u9jZhztttfXE6e+6tn5dz/uQnX757bzGAnlqe1LNJOztZnppJ5XxD1dHyxIxiLpO2rup63fUx5hB8O60bExJZrWMpzIwESmY1+xjt9q1DEkFAHZJpubDVxlQS4XKxnjCtToZeobIQlHPMeVAFNC3OE6Kb8BbOwUwTJXNohCKaizofBIpDVqEiRo5OcgJHj1zYXh5vPPiLW3vLVQeNT0lev3nnsb3d23cegKsr71IpYdreXWxyEvGsgcumpwxHB/18d7JIYlnDdtMVW6+Xm+V6dx42fVnH/vK5RzBj3+dMMm18Ng1sT567iiubzkvMpStDv6Z3Xnnk3uHhouvryjOac24hIgylpjs34qX2YeggZ4eMjGhCaMGREWYRU/SMTFhUREFVlFDEtuqmrqoS+24zxGH05hRAEuGTflgMQ5vL1vb2vJlZtsrjvJmwWlnmRSdBQQST4qQJzjE718smsBdE5x0iU8wgJsniRpLDdhKcQ3ZmlpUSj0k1NUIcj9vGxo6K2Zh4BVVzmLQQMnnniJmdFTkD1yAi4lijgERIRgBgFhADkIqNczAiUlPHhIYmoIRMxESO0Y0WFUIgYCAAQyI0MjRVE8wI6JmhiJyVaSI79ojIyITdKitZU/PxYn391o3Lj1ydVC0jsgsKhZCZnedAgOQCkTNV8gQGLCZFulKW6/XR4nhYLyrjTZ+oMgr1oMViN5vU/bBS7Y5OD9721rdbKoLm6woN+k1fV+7RJ95y68GN9fJgd1v3L84++9oHL156eqdpFuulubbkTpCns12NcPGRR0EAmnQwDGmzSSpdD5YN1CSTFhxbEQRGK7JiQ37LI0M7DZpyKmqg3rvGEZa8LCzKhMDs1l3OD05Mi0MDpiISl92Dyj99+VHnMGtsA2/WiS1P57NqPlVGAWODUoRRQREdkaEY7l669Phb3todHl57/lO9rjAPzpH3DhyxA0IUKWQjLlgAR4IpkGcEtgJZVFQ+5zAtqYwWGRFLRbohBhE/VoGydafH6/VmdXBqgGUQVB6Ohjy1MK0wVN551DxNs/PVeVz1cnxcIqjyatOHyjE7BjRGQogkRSmBKIAjMlUcs/kEIjCIRgHPWAW/6oURllgqR5VnhqJFsyARdQKVGosJmGdUsCji0M23d06HbrNJjliQU0lbTNM6xCLk61XuvOVE4CeVqkoXRDtFZM9IQJ7MofPWeGqawMQjhjybOkAVskEMcC0JJDtfEZjBMN91jj3xuI9kU8WzbQySgagyqpoSIDEjiZkyOUQk9AbqkdERoDnnnXPEjIRMjMS+Cr6qyWA6qbfnO8TTIjJvA4j54Ea+IRMRotnDjo4RVQsAaJIzSp4EFwHC9u79G/f6wyVs1qk7jWw54/Fg863Ku9LFzqGbTb1GadjtTpuj1UKYBAjM+lzqMcdHyI4jQoVkpmPJLxqKkihIoWKyjmWdykE2a8+d335a+4MoCSmZGBMxgqp4QEIXc0bmxnNlkDM0RMDkmXCs9mWH5ER0XMMKEBUUUDCrd6LZmI29ZMmKKFCyGEKXZFDoco5AWY2YTAHUiNE7RAA2K1rAB2NSoHUWAgIfBPBzDtO+9HLxkfbZi/JgkFtv8HrNltfzvPdtX3vpy9759q9457X/9o+nVx4kASWsXehWw+6snfsJDI12EeukmHPIlrEqBTAvYoGvfF97YLM3P6mNbB9eW/+jjw/3Di76nTtPf/57/9E/yJaHH/tJ+bmfCNFKTD4EEoQopKkM3YM07O3stJfOrRdH8zfzG3/++/2N33xixR652yiaVkVLL357O8ISSy+bjAbGWR2c1NNHv/F9b27WF7cv3Hv++dWNV9y9X/34+z/U7l9a7czOn9wl3x+sj6svmPEzT86fes/jL5z+zz/zo/fgem7pqbZZbgSNVcqy5MS6sz37/3P1n+G2bmlZLvqGFr7Qw4gzrbnmyqtyFVBQ5IyieKGgGNCtW8RtDhhwm/WoGJGNHhXT8cAGDJtgwJJUYEEVVVBFVVFx1aqV14wj9/x9X2vtfd/zo48Fl2f+WvPHvK4xxup9fL097XnumwwBKCtUcWyKTR2kFE1w1HfLQVKC87TqF4uH9g/vrx6UmrqsE4Lz2bpuwlsffWJxdrxIyze8/tpzz90JlobTcng4un0+b30VvYU4Pu91rsuK5GBn/PK9uSdyMexfPTg5O7WMp/PjG9N61ZU4ae4OKaCq6kY2bRiFupqvEogMSW5effgTL37K142CqkDi0qeLG9d27t07Awrniz5GftAvmzE/dG206frdqy3HVXcyv3r42OnZvXGsT09mWGA6aixSN+93a7oG2BWotKDqUFIgSquhieH0+NzVdScbqibXr07f4p78zO0Tj/jk4ZVHJpNXb88+88rxeBKXmzWkROZ//pPPPHVr/3CnlU05XwyLzULKphpXq0Q+VvPV2hdpJ+2qDHs7fijMjpabfj3IBKRGYMX9utHV+mLo90Ztn8oMMhIsxGZdZqdXD3dlNnMYHpycGoRB/dDT7qhp65GXUnJpKxrF8fkmbcDm3RDZR+f3prU3QKJYhXtnp78WFTlGM/VeHBmAjieOohxc8UPOm5kBhC71qehoEj1qD1qPnBlaUQ+ORJbLBQRTtjL0TmjsolOXrT8YV+g5eN91abXu10OSAgzFeVTjsJH/91/5F196a/LHvvUbftdveVvbTj/1/MdufM1fAuQ73////ML7fuEw2J66EHBKONRO//Cf/cRv+lP1b/zztz7xzvJH/xK9fBdUMiYgij5KB2U5uP1qR5fy3n9Tvv4vP3hs9InPfVsdmqs1fuz7/vLXfs6XpTyP0wYWy/XID9E3RyeVIzCAJJe8YmfsAJygUyCCYQWOOdRlnVgNEUGV2EFQcLhBY/Lx2v7mIpfl+WSU8tB7NvAMCCAExjAAFIPMFXnwBiZEaENCCEA8P5pfjdY9+MzyX/2L6Tt/FgSBBEY1vem6PPMyVA3BUXzmf1Y//B03m+WQHgR3jd/501XNkBEab2VAMQgeXARDUIZegVb1JL3wPX/10a/6dX3VSmQ9X9Yf/tjsJ+75+678pw/sHjSw6GCnhhdP4MpVWLW70cFpfzPE5/75O1ff++5b9yDShjk+2rhP/df/QGf/5VrCX3nxxf2b7L70sVfW+Wd+6t3TtsZ1jxwfv75Xse9SUeauGw7c6PTsJEL05NTwwXw1sFgTTAcF2Fjp0+p+mk/3D6DyiRJV0E7qByfrYiV4l0zHTayDTXZbdsDODscxal4NaVBZdMvrV6YOwjJldaSWH5oG7yEB39mU1ZqfeXXhSvnaz33oMMQPzYagGnXzxkfqz3p8/NK9dO8kOS2CCCTkLFahbevjZf+Gtzz1kY/e/rXD8iXBx7YIGAQDBFGTrYfq0sb1mpkcwBQdbbcrIM6IFQjNQM0MiARVEcQKIZFenuG3ZJhLktEWg759rG8vsaAKGCtyAQUxFwVAU8iKuoUsE2awgqBAxtvpjAEBMRoy0CU15zJoYDIERdsqzUAARGk7PQMEwKJCxIwQkUEJFMQA9PKr2kq/tsJ2UCtmdplIACBgsGs396qda/XO1ABKyt47SwkJfRPVZLCBMDqO7BmMD/aun108qOtx9BWCKco2fcslr/tlSl3rmxhoyCVJVs2OAhIjiFw612zb3hPJjnw9otufeeHo7t3l+TmRkSdJ8hoEatvfQtr6X7b/fNsGMkCAy70RoohuQygm9A4DK4OqYEqWEoAjKOic60pugqMkYgJoHslUYEv22LqHzXhbQdsSuk1NDQEEqJgG3vK71bYyMxPYctPJTI3AwEy3aKdfNU8bXFrwtlM0uGTxb/+bkQDBTAkuk024/M5xezOLiA4J1KLaxHHKBcG24juHUMy2lSiCy74VvUZpaepZG9357dPpZLLTjl8+uk9VUxG6def4AiUw7gwb7CACxCq2DgJy6Dfzvp/1a+w3WTHGmjtZdyX7MA5NczpsfvmjH3j1M88EyuNxnO7v7l3fYyYC4OCcR+dCcFBK5yBWFScthoakSUvQVFeBhBUaQsmyKXlNBkXMnAIIgs8r/Z8/8SvPffJ2jG3u03RUdX3XDYVjmK9W3aZvHSNeOj4r5jKke3dlc57/80+872CnOn1w7+bNK9w0ZmaqKsJMsQrkEBy6QFXF0ZOFioxyLmqEyEhqiOydFFEDVUvAH//UcwcX8/f9wi+fHs1EnLlgUlygqqpW/ZCTlV6Z0KFZyTnJ/bNlQGYEIlQzBlzPZ20bG7RR4KvXdqm3xcnifT/3ntmq00AUKQ+pIt/ncr6cOUQT7FMCr00Tuz55x5u+EwvRBwZkRyXnoc+qWtQULTiOjnPXHe7Um4KzZXLB9ZteQZuGq8gAmvuMBi7ESXTYD1fHe/ce3AfbxsaWirQjP8bRYraMvkoyDHld+WnXSVSYxHpSt8vz08jU9cnMdqaTspEBYLUoLUUH2A3rEOxiuRzEToBff2VycH16sZFc0rpbPv7IrWw6X4ghqMhhVT98/eD4wREH6leradueLtZxVIEIkR3Plg/f2D2opzoUyCXn0kYq6/Wj13ccwerkQnt56I3XTk9mVw/2Xjy+OLpYLPtsiH0utWNiKl1BX6l2uztBX1vmVzEgMaLqFim/xdIAAGDORoRFgNgq71CVgSqHhtlYu2GDhA7JewJU5wgQ+pTAISypINSx8eyrGB2hv05y/2y52hQzZPLBeUUDIt8kS1sJAREKQsrZCoqKb3yRUkANwRERIDLwtkG5Ze4jbM+WAOSAEIy2BUUxYwvMdfSlWCrFRJCdIBXZYvpRRNRQAQIiCQxdiUhSVMGQyRENqryNnszQkYlaNmAoqoJQRWbAXDIjE6EhIVOWbKpbR4Qj8I6kiPdIqipWOhEFUeiTLDbl1Xt3njg7PqSqqqtxjJIIAA1JDQGAicSUYCsUoPPV6u7p0XqYjdspa/Q8HoWwyouqabnaMVevUq+DDN3Fan7sYzOe1qfH56FuEMBxaOsJ2lDz6JEbtxaLu2DaTNrpTvehT/3iV77965jK6enRzs5OVdXBeSQUs+bKlf38aPnMx1fDakgARihQzELlQ42bNXRJECEEQlP0WrOgIvai6pZ9GsVWhpwkP35z+nj70K986sWKnSOLzWixWhkiBsZKZbAs0BcN7HMJQNkQ6qbiwVpGWC6C8+ALKBgqee8ck1pJIs4EUdBEsIpVwlXwKKJoSkDk2CEXUzXIZWs0BQNAR0hEZrZ9UOKvKVnRjJBLkpILbt8ZKpIG0VKGYXP/dL0ZJCdAdARxW6VOBoNK1hgaQI44zhfFrVLqeg7crzsjAsJJ4x2gimYTEzEDRsxaPJNjJANDZAIRcGLAZIBZJAReJwHQBFAAAhMzsoiodcXqLKPoNn2/M27J4SZn8vXuaLo/XYNf56K9IoAAkxIqACF6dpX32XQLmHfO1yFkIPSBECjyaDdMG4oqZJiyJYHU5aJigDlngMzOp1SONwUoShntsq9bwjbkXNizj8H5AAiIhmZgxuQcE6gIyaV0wiy6aAYGLJqdD8BERM4xsTMkQyqGDhmJ1XQ0mRLZ0G/2diegYCIGYKJIaGKGW2MKANjWfQt0yUM0VOddCEG6skxk1Tize+Tpa7df/WRZds77oWhKeeIoRs/AN3YavLN4eHfCNqyGlCk6T4EBkAKzGBRVNDKEvmQkQineEQAJYBEtBdZZerPBdF3cleZGRTDrlpb6yjOypZKdd5KUzMxku67XUtg5QfSOzcCbjGNwDoHYwEkpaABi5lhcZewVa0VnoiYiblVgoZrzpkumhjCUbZarhEqmpMiGl/e4ZoHIMVbsoyMUJQIVQWZDA89lGLbvgtn5MQzdrl3lgj0MCCn367l0Bw7Xx/eH2YO12N50d77IIgYOhnaz8EvJ67E/jFVLFZUhmQFz8Eol5c1TDx/8/q+fffDKyXf+1NV7SwuTg3ZaBO+th8e/9dtf1Nyh9l/89vrH/+/GPDnfZSHzXlwo4EfjnW/52vd+6qet7h+69rlPn9DmxefTaRIKELwjUykoZTLZGUAco49Oln1ExiT3N/nWH/jW1Zd/zuHhrijEn7529zueGUu5puvhxbOzrt953UNlkc595b7yjfEdX/Qr5+vJr3/k5Pa7LlZdXroPffK5dlKj0dDLoltObh4uz0+jhcl4Z+gvRLXf9B6hrAd3MKqmzVnnh249jn5c15o2j1299vzRkQeNmPbGo/3dneXZ/KUX7h08dHj7bLXcDKvVwCHcP1tBVtBy/+zEIdSxcY4tlZzKlfEE1FZDOnvwgJGK6DprfXWXsUQOs6OlMzGgUdV2qp30WQtYboLr1suT+cpPJtExkENw/apMdyeCyt5ZKQPZwd4ErS/rdPZgOd0bdQg4rnPDxw+WvWg321RtLEOJ4HYCxwS+dyPEQHh6vhApzgc0EjFit0rZOff8K6dn92dvecuTB1TPLi5ed+vw9r2Li8U6tu2y9EWEySVT0gxkhCWljUFomrhTjU+HlViuq1HrYd/FF4+OpbLrh/Wrty8g42HrH9rfn637Bi2Yrs5nkAt5GldVcJkQxaAvZcilAJzMz4NRW7nHb1419OusrqIe82h3PDu+cAoN+8OqHXNbPJ1ulgCmUmrnN+tBTP7/B2hGJpqrisYHYbxD/bJX1DRYGjxaUKhS7psJ1RUy8nw+4AAhhKHLJYsqqqGLNO/6TbeuYNySWy5mFqiJFJHGVb2/M76Yb+6VXFRK0ixAIolYxX/glXX6dz/11icPvuxL35IV/syf+3P/1/f+AHZHn//rnv7gP/uB69GBZKRm6FY3Dm5Jrjaj5J+8nh99JL1yxzE59jllgcKBAXl1/2T0WCv/8acWH5Ob/+JfP/F7/1/wS5+Af/8Dt3783d0LZ20OcLpaPfGYfuc/4f2r/d/8a/yud/qUC6n3HsBgSNqy7MWkyNiGWqi7ANeLQxcCCEifuaqB02w6bv/An9Ff/iV47uPdO75w7w/+7s3JR92d8833/H+bfgYkQB4yASmAQhOALfcDIbN3uJWXGUxfuBi+6x/VizN83wtMI9iNqzR/Ye/q8++9/9XPvgrUwijWLdm/+KehgZAFdh6CeQ/1CHqFJOgI1gNQhsPrECaQFY4vAAQm4dFXTud/7rePvuD17dvfVj4xO/pb331wAawT1UTDBiZhmC0jUffKUf3YPuQEq0TN5OznXnqMrqzhuCDNm+Hpr3vH27/kDw3//l+ePffxo1rf+s1f94sEH/7IR/bZlQCz8+HmPhPkUqRtgnT82PgwZsqpNCH0q+ULq7OP37tr9aTLaSjDarXMkHjkxld2ak+L09NuM29xkuf9YRgdcpn3OeUyqO7vjEDzohvWmnem0QusuqFt6zDeoR5rgQS+S2U+pM+cpButv3WtDZ2/f+7FySsn/Uv3y0MPXT04wcVs+MzRar05/+LX7T180JZeT1MugFXTYq/VVuWQc/SXH4z0NbI0IsLWCWZkgNmskEsqr+3LkIGQ7HIFbqAAAljQtkBjRgMCUjMVEi7CilCSbI1qoEgARmBb75nBa4ouJETP6D2EmjgSEBKqMxSBoqDKAiyEANIjCKkYkCMPiqDEiLz9wK8GBgTOEwCICgqhiqqyOlQS5e22zpAIICAZoUOrfXRGgGTb0EoMSCEDMTrezujBXwIyAcyKQtP68XRn99o1VzUF2bFbp1yLACoBGF0654HQgIjRczyY7J+e3b5540kzMlJQkGJDGlI3RK6IUU2yiYAxOWKkS2EY2Rb4Ta/9xmIDV+1fe+iTH//44uICxcqQ8VcXWGqItt2BEW0DFlNk22ZTl3kOACA7ZjBCrTzX3hwaMiw6KxlNEQUNVMAG0KjADClnR8HQSO01YORlgLWt6ujWtmaXYVpRSwhOzROIFjMEYnYsooaGYNthCAKp4fbExgSsl1hqMdBL5ihso6kCxkhmlz+PX80cTbevJyRgkQzoDCyQ7tShoG4kMTIB9qJixkzbreVlYEj4q8eDzeb06H63WtpimW7nU2oebxhW+bwdlZQrLVpyLYZCPnKVOgOJZnUeKBtLrsO4aqp60w2xDoHbpp585BPP/vCP/fTpnaO9yuI4Hl7d372y1zQVoDpCX0fNxYsS8yiOyDkz81wTkyESCxJbXiF7AGVXiWQPHtGr12U37OxejW78r7/nB198/rSOEZyI6KZbB+JVKX0umguJeiZTrR3vxLixvMgqBEcz+L5//zN/5lt+28ntF+3tb1WRbf/PO5eGjkFARbIwcYihisFMh9IbcxUrRu1zAsTArNulIkEp/gO/+FzOn1IBsLhFm5ViuWiRAaHkjOwdWnaOHbplku2ZxTN1ybY1w9W6d0cnEaAv+tzJPK/ztb2p5iKOL1K/Xgxt49GKpERMQpikVDEiCRKMfCyipciwPf0riGgqYqaA22UCpqFAgcq7xWZIgopQVFykqnKTEE7PVwg4ZFDi1bo3LUwwX56TMwMtDFiTZ28IvS20SqNRa36c1jSp29V6IUk3AcpmHZ1nEVOtKmfd0GLs+nyxEqq8RVgt123w+5NofVkXeemlY4a66xdV5Q4O9iqmoduE4HZ2mm6TKqJ7Jyer1fC6J28+duNKJ/aJ2y/utE3jq+P+3JDTRovp+fyoYv/5jz59cvYAvMbB7e+MdN+9ejZ/5cHCK5ytZkq67IZBkjBIgW2Bxns3O1uMd6yu3dHpxfZdwI4RgYnlclsMoFmKiaAUI94GuMpozqQOfhRrYjApzAokvE2kHfep+OhiHdHTaui4D47dal2S98HxdG/yaBXv3j+ar1csGRI77yWbiDkAdmxmOeVLqSOTARQxy8UFl0FEhAlBQFQUQJkAgRGZUVUZYQsoKqKekZnNM3onZqUUQFCkIiqigmiERUwRL/WdqqYoCsLGhESYDNQsmAXvBU1I0LGCBedKEgY2VRUANO+cMYDpVqwGBuwYAT0SZAHTULF3VAYBM1GLgYsqk4xGtWY8Ob5nfd9UY9s9tODrNqAjAERkQOcZhpRXXXf7wdFsPtciLuwRjQOjkQGQ540TrqAq2fLQQazQ2enFiRHXozGcXARfBSZGKgjsnEh85NbbP/LLrwyrfloPN68efvD9H/ef/Y3NOLI4UWyqaSnKjCUbDnJydjpLw0BcjIpsO7FlVDkXmDR5b2pQNQ4E1DT3wkCbXnNRLFBI0FOzu3f91vWI9RNf/zn3Luant4/XXS/rjY8to8OaxLJ3BCWn9SKOqh03ma83Ecp4NKHK5c3GeV9IiAjY2JNs9+Bt5MZtZucnt1/lTr330TnAakhSxESBmUwVEVQyIKoVdp4dOQIgAjW1sm3vboWfAOCZHFER6JNmtUHKsF5Px00aOu2GzWrIKRcRIGPCknIdKzQaVuXKYVsliRZn5/PRmMc7dcKq5BQqXgylreoqesmFmKAoITJYUdl63HVb7gRS0eicAEkSQOiLOsKaKTqXoWxUq+Arx2jDapBOaI3aBhvVuMmJARV5uVkbwHhnPwFuuqFkCsED46rkcdUOg9btrnKsQpWGBOAAnWMvCC64Uesmh6Pxjqux1ICrVT4fBhtAxZcEAmYCQCqaBGiTi7ly76zMutSV8SqXnfHImyNH7BwSEiK7rUDWgPCykwNWzAyxECAgE3v2ouK9A9z+Q3I+ICKx91WMdeNd5UPMGbDvSll7F1LOy361M9qrQmMGkjOQ2ypgGbd+WZCcAQCKUogG7KNjsxCr05dfsvX5iIfkcZ0AWCi4LR9KBaPQFU+PjadFurWt+wwHFTNhFUgBSLeMSQJDITRVA5cLCIgRZ9Wu2AC8LKlLANTg0J31nwHtPKuqEhgzs0GhLaAKillWMaQu9Z4cIHqC3cp7RscY2JVUVGWgQt6LZu9axejcqBiIZNh241Wt9EXTImMxKEU3xYSolLLlqdmWuWAGBo7Ih4q5qrw3FUMQTXmQHtH5GuDy2uBkZROTux/42NV6rFUkdljvTMpO+fBLI7c4jL6ZPjp78XkyjxX2164+8Tu/Afal3L3/0o/+zCNSUq9A9JrLEdZgD3/T13fXDx75bf+br9rn/+F37ZpsloN2yWo88sPosIldf/1tb+sOrsH5MRlzUW8uOvbebsvFu2Tz1r/zd9xqs3esZ//s/yOvvOq9K5hFkg/eYjI/dLIyCWSDiox3a+rz6cWi/aZ3dF/z0Hs+8ktveuqt1x6/+thXvenonxMcraPn5mC0+8Y3fd6XfeWLP/vjL533r//Sb+n6InL+X/7nT60M+0SL1Vo0jyftjd290+XJsiz3/UOjpk2L7s7Jfc0qORNxGNca+higSK+aAxiZbIr0qdBGKJNzmNebnXa3NXjh+VeZd4+WNN3zVNWu38TaE9KVUXu8XIjE3owoj8l54/l6jcR7zWh9YaOq3lgWK20dVDtn6oQO1febEnzdurbr5hAQWaTPTz52/cHx3eidU9ROpFgINJttuvmAOXsT9I5IdndjtxpG1XR9snjsypRMxuOpoK10QJA4qiTYatXtUFUTv/HJx8r85OHd3Vfun1GI5vho2Q9QHm2aR3ZHd+bdYtHX5Pti90/PX3/9+iM7+OxzL52sjYLv+s2o8ZWbzLpkjhxTt+pS8KrqvCs4qqqK+7mVlBZLIgtjqiscIq4369c9eSUvuvPjdUSTYcPBNcyT4Cj69aYr685Qdys3rePpfL1CEObc9UOR1XJ1uDfZ3YnVoMseTtfF0IFIBIIEfd8rWOX4CmMG7jL1GxMO/bDuZ/3/EhVxxN3DHWfJaskm4K2KIYmENvYO+1W3eyWEashd7jtLCa1o3HWIgsCAgsGFKoylcgdhWCpKGVfOx7DbVCYASuNqpIWH/bxerlalM0BVBLbi6QL4Z++cfWjA/BVPd9dOD9/8lnf8kd/8J3//b/ycr/6qX3c9vv/bvjtq2d9z00M6+wd/9sF3/uWbb71xPFv7j81H9WQ534zGzKSay9CnF970WU98+588+cF/cPjC8Vs+9G76C9+wf7zJz96B0r2pqcrzdwEEdqab3/577j38RZWHsDeaNgEITCnn7JHuHzxy8Bf//OZ1j26qvd3jU/yFd5b/8h+cT3jrdWd3H+yerng6to1hgOGbv3Xztb9r9/z4/Ed/YP8b/tCda5873319egNc+cBHml96D3QFNAMSoG6xaGaopmJYRKNzUDkQhnMX//3/hJ0A1QFsltpOXvm6r7v1p//eW45Phq96I7gIPkFn6MYwCEQPizUMGXIB50AKoIALIArLOVQ1hAqqBjSAJO5k77//sr3zfWVEjpqrowO4OIXxmOZLmef5NRj+8V/85Z/7yJvv5cdqgp//AKCtH1l/wTe/8UMnB2999HN+9Gdf/rzf/4fh1z/2/N//rp9//y88/fn8qavX/9RP/aTf3R+1VAGwlmsj99C4EeEsRUtyaNHAUkGH04P6+GJzd34xd0aSlptV7WgyimZCgPt1TVIev753elxavjGbrRerbjr2gsWNqvnFJlZeFF4+mV9U8PBu2HOhdOOTxWoSAb12TEfLjfc8cS6vrRM8SesrV/b6vpsctvM5ferTrzx1bXRjHCzpEMdSldMVTrRATj7wKudRjFakpLJS7c2/9MLRZfzwq4QbBkQyUAPIBr1YAlUzMkAzd3lFA0jIjEUNtlcd23JHQDPdOuazKRFC3lpaSAEcG2+hOfRr7ZetxsqBAWr05COwBwMTNTPCoiAKGVRMkDKDOcgexCkAMimiekZBhS0iWnSLGAVjNFSCopmtEBKogIIpqDIaABojMJljZAJGB4YigACBSU3EwNSSKaNDpMikTCCCIsEHc3rt1s29K7diO131m5EVIyJS5zwIiqghOY5A28yLAIAAfdU2aTg+eXDtykOgaChF0jBszKD2FSN03SarEvIW472FAb3W4TIwYKRUspg0vppePXz40YeXm+Vipc65ktPlRvBX+z1gBkjkkokiEcMlw1sunWiVs9pBIARTMh6yJoNNoq2ajlRUzQzJuT5rG9ygxUxBjAHzJcpq6/3Zfq2XBKZtZLUVUSFAKUpMRFsx0faa18i2adO2mKRm+Bq3Gwlhi4mF10pF29eKASBstUCwvWZXNANgMMdkcAm8RmIBdqCNwyttuL3eZLHKEQANYFJMAWALQL1kbV9yiwDg7GRhaoI0WyyGXBjc7mhvf6K+5WxZibphhiyemLHpsmmqgp8ajgA8N6Hb6NCtXYwW3N2j1Yf+5zvf894PC1DleX+nvnpl59qVA6uCXlIsHQh7dFWM2TJSLEXYhxhrJEu5J1JCTJuV+pbAS1GmKgYaUpfWGyeBev8v/8UPvPzCGccoBnmQJvqmohjiJiupdqaecBxDSZmAZusEYOtFx42rgl9mvPHYQ5/8xFG3GgJ5dOiCJyHuOmIfqzq42LaNlGFVz9brheTBM20TXod8CQRXBVXbOvDMoeMkmRnTkMGYGAEwZ0HALBgIWk918BfzwYQUEU26VBCR0YCxFOhSOpi06012sXau9dNmdja7WK3FAwY2R1YEGYsaIhHFIsZM3dCzC21dWz9ks6wmWSq2OobFZu3QqZkxmaNiW6wn5JJDHTAbAlydVKU3JM4KGIgF0pAjYxpEuWcCNSsqBWU8rtKq6/qyN57M1z0HnnClmxIMQuC+GFeuacJQsiv9w1cOGz/++AsvjaLLAGAyrsL+dPdisclFSSWKzk/Pnnr8iVdmC3N+UHj+/kVXIFQQvRu0Ox1WN65da0aj5149mo6bL3/Hl44ovnr3To8dlAExXlysqOJFEQS+ffZg0vjr+9fTpjtb9athpaPqYt2lYc0m5mDV9bHhJrAVTsWMdS+OovTSKTfA8Nol2naVpbKlWSMACwGUbcBcigRPasUKEIHfPghK6vsEjO04aDLyPrOLBFXtLQsaFDXn0FCLmiQE5zxzNa6uw0G79JvZCkpZJwmuyqmw5+BDLmKgwL7AFlIHzhGolC6r2JC1ATRVRwxohICAUhTViIwNUQzUTLcaa/VtzVXNKXdpEFEFTGpFTba1VEYCKmCmxoymBsxG2786VvVIl7cLwRWlTsRF5+u4OlmxIZtZKcBcRE0Q6XI3wo6Lbn8vESO4gE3jnHMluK6XfiiOiUmbig+m7f7OPgl2y+XFydFsfl+p3Lx5a//K9S65dZcyWLLh9OIiaQEhZ0RKo9CO4wglKcpqMSePUHG/WSDXDn0e+qEMs81wuHtLCnnXEBKSVwU1c+YDWFF66PBzLpbPODrbuX7w6KPX3vXe//I1v+lb+jk3tS5XazMc705GU77/4NVVml154kb34KKbn4kIobS1G7dcNxVV2PVdSQpQDEEKSNEqRkOf8uCdTCe+mYQsfOf+4spucBU1PN6bdpMdHu35s3k/O+tKAiRGsyHLvZP5Fc+uit4FyWqK/aJPm7TJeVvsdUzkyRh9FY0NySjr6uh02tQxwsbZkBIRkyIjbV2f5JyJoBp6t31GspmKCSgys5peqhYAAFgNTSEVR5SsePIqkM9mJefNfCX9QLSt5wITYTHqrYbY+nFdKlmsu5m1PG7aadtWebkqKYsYE9QhmEoRSUXYMRWGnJm28SqpFmaqnZciRUGJgNEAE5hjqLyvmKygqK6SjhnrGERzVszIs01iVEfgnSem1G96JkCtq1ETJpvZxlCz5TpUxNy0dXEuA2DOLOhDHLdtptTWONmb7O/GqiU01cx9wrPZsFpaLiDbqbeBI0cIYkKMgXTIXZ8TEDw4lfunZ+Npc+X63qOjR5E8ewYzYjIGU0FEk8zeERGZV0QwEVXyDESBgw+BiAEQCb2Pznvngq9qx76uGh8b59pYVUNKQyreuyY2jJBTT+guy9SOTFHBRBEQBAwRXVPLoFkKoBu6zSuf/MTtX/5wnr24i7y/N34wX0kj2eGqFHbUhkqH1dPXR77Cl+4vl0o9UMg6IhzF0PVdFeucS1borKiSdyzZVNW2DNVc+iQdwCrBUKhuopRZN6wcAfKWCUqOneTkkQxMVEqxZFbMgg8iEIkaj9XWy0FkAMSkZkWLI3ORiUmZgRiNiKykgkpMLhH2YJ3ZYJhFlcATe1MwVUMRI0AiCETRe8dBwWXDwFRUTE20kAtSBrLLZ0E4PLx9ctqmbs+5aryTYGjayaNM9//Tv3IvPvRApWSgx68Or35mtDe6/uu/JH7N7zhxq+7xo25Vlff9jJsrAMqQyyY1vqp2phpGs6F++eTs5hd+vn3jl+afeNe0bqSBLiCePBjnp0u3PPpvPzS5fWZFkTkQgWXg/t6uu/k3/3R6/TvK3Ze6H/jexc9/4gpO3c5oSHnYJE+oebXStJ70Qx3bq28++8Snr7swJT/2tXv0xuu+4++9mNdPrnZbzem5Z979j/4xHq2u7+z1jd998g3rJ2994IMfC33v3/KF737Pex98+Ocz+k2rZGkypr7YoGhEylhXPhU3nC02q7UJD3155Pr+6dlJ29az5eLa7ni93PRahuUQFZbnq83GqhBfvX/sQA5jvTNpr18//J/v/vAb3/rmPo3uLs9lKI8++vD62aE3NKWbVaSZzHNvQMuUryDvEe3tTdOmTKa76OLxYjkbNtPDNg3laNHtjRpHuj+tGab3Li7WpTfUJFTEInFRvH2xClVFxUxxSKWpY58GNfGVZ8CUUzXiPPSKenR2Php7olT6hD7eufeACBjwys74aLlkCgni7qidr4a6hP3peFzFjiAw7bShHoWHXZwGRufWbT3eHR2drV++f/7i7Ysve+qGDLg/rZuDancTmawN7s79+YXaYj7cP589/NBDnsGHZr3pTuZnq/WqdDQsuivjkGizWKfTpQr7+7N1Wc8DQre8zQ43G9rfP2iq9pUHpyKiZomQDZJmcDGSDFkAnA8BRZcbPZudBnCujiKYdcDoKwc1h1UaBpPru5OdkT9fJUJm4IvlJjjf1OFkufi1qKiug2oJ0Z2sEnvz7OaLTJE8dOz05s3gYr+z42+/shFqa0eOzECHIYFWAWC9zPeHPN6diGmxTe3hoG3X65w2hWMwptOTpZGL1cghXj3cO190i/k6iYCUAS0Zrc5Ov/vvf+f/8bu+6snPe+SP/aX/85OffOZ//F8/8AUHwxd/2//++OvfAv/hP8DmYvLyhR3r4qMvKOhoVPW9NOPKiL2WIZfh6Vtv+L4fOX74xmRvX7/jj9FOO/zMhwSCzRI5G9Y5tgggL108uPWFXzDvF/ue8vmLMts4ctuTzNrD9b/2N/LXfq3ztpd8PHwCmtD9yH9vb95afvXv5kl3/Nf/wrWdJ+zJJ9bycvzcr7QSY3cvHHJ355l6p14s+7oN4yce0V/4OfIehMF72PQwFAgeR8Gj5N5crKCqAQHOeyAE38BmA2UO2eRkc+v6E+lTn7r7zu+6PutgL8FKQBRgBd7DFoa80wIlWBWoK5AEwcFQoB9gtYLRCAigraEvEBvoFFfRmQqsuWYNVFarAJEhNpCn7bWv//VfOv8H/3T+weOpC8CQfvxj7/rPf/T1V0f+FjzhZ//5e//ivR8Znt6dfM1f/5sXb3L/6ru/Cw6v6KLbnU5qkDbhiKt2g7ZEVLq4WO/tNT2qRwCHq1U/my/Pl5tHH736yU/f2xlXad1NmxqUl+vUD+qJVp2NR1cfHNnQcdeXpnLqeVPydG80mkxyWq89i5bZybKJrVMZx3C4Nz5dzHdizLUNKFzRRbZk+PKyP9VlVfs1r688fs1Oq7rlp1pMpxefHlKMzauzdBPDtZsH6cHF2cWQ+6wV713dWV0sNl1354W7l1ERAAIwbbkNaghFrQfoVYsBmYEZbR/FgFvLmYqB4WXxw1CJh6JbwbAAiCGpmqEZqiIURAK9xBkAEDMg6japQU+IZCESO0UGA1DBIgZZTEHFTAiYVEGMRAwDOQLPwLx10xMo6PZ+WJEMB7VSxMAhoRZgMEITyaggBgGZEQNrE5336kN07MugohIcJxFEM6QC1oN6BEbcNl2YqY5VLxCaSXX4VDx8PAIe331x0rSTnYM6NFUYE4locZ6MTaGAIlGwrdcduR3tXpw9WK3n9WhSUhm6vuTEXKFZkWIKqICsxH7bkjEAVDEDUwUpJsboNl0HtCHv3vy5bzs5f3B6elEGFSRUU7Qt19EMCJgMVNURAZiBOQZGI1KHQISO0BGYWTHqByyZC6BsOwOmgFv7GIrgANJWzoKVpMxkCsSkaltb9WXwh7+Kut4mRgZEilDMyNBtl4FgRcWAkGnLtCZHWhQItnM5x0gIqFiyXEKJ8DXmObxGtAZARAUVRQVlBOOt23dbTmEhnHra8V7VRI2AtnO5rJbMSOCyl7QFSqkxXva1iPJmEAoeKAFbyvdRUy5hsWyj33U+IhcizaXMFkug0HAYJHkPNgxofrw3RYRFKb/wvl/8qf/+S4u7F0bO1TQa1VeuH1y5vhvGTWGvudQu1D4AGLJnx+QqoopBBbyAN+nJj6MLgAKAQN4hkdNStJRkVu2NW1+P/v5f+57ZiQYfUxEBUixX9sYBFQ13YrVJQ5ekM5hvhmuTyhjP+94jtxUbghEkyz/9kz9ej99w5/6DJ6dPINAgUETJh2Y0HqsuV/1ivrAtPJqYQsyrNUphQgBGwKLDdn1TSiqKTJYFRUlVFYiQzUxFkYAcNK3zZG998uGjs83FYmBCRShimgp5V3KGbeAkMvQ9NqPplYcDuJRO1pZchYVAjDpFhyFpz8ijODKwIW8yAHlPzP2QhyERu7Id+DAshoTOYXAkmgzEBB21k9CnfNBG76HGMDtP84u03uTsHHrabHoEZ0rOsXPcjlux1G2G6F3Rsj+e9sMwnuymBJM6nK82RW1UhUnblDSMHdd1uHc+I+8z8quzecM9k9us82DqQkglFChdtk1ODiGgNoHe+uS1n//oM0zsnFsnGwxytu54XnmpJ2E5pJRQq3B/kX7oJ979m77sc3am7Seee/76lel8bZLSfN1Nq6BA9+fzpVWLcqcxvxpUa2pH7VLXPcBhc3ixOlVyyait6vWqH7g89ZarNtOj59c9lvGoYiYABYAQHQCoggqomYiaCROlUoyMAMFB5BB9RLEw2rFY9V2CEKFL41EN5NjHDaCvQwCyvqcCxSiXlLtkjgUhd6kH9Cbk/e7erjNcr4euS71DQMBBgAEAJadk1OfilRyiqaCIigw5e4qKRNv6pZmU5AA9k0dARFLbIqEd+2ICBLEdNeMdXPW8KQZZDLNkIzLELKBqWyYaExCjjw6NiggiFRFiisFrUULsRQpYHMdrN6+cXaxCZEvKiOxcLgpE2SSwv1yUkJFjEwIzx5dUFh+9D5C0Z0UyBbK2rdq6apuR8zWTG09HBuIAhnWenZwNSWbLddJCHrOod35UR1SOvvLsdRjYu9BWQ+lTv5zUERm9FxRLXZ9Lt5jNnnr4bZTUbxk6SIDgXDBFEI1YXbvxtnvP3klK625x9dGdu8/d/sUP/VC/gEevP95ODpp6VPqyuJgZKNTTk7t3Ui/OEwY1cONpU1dUUkGFugpQ03zWGaKvAmYrSEMafKt7h253wnuT6t6pLgauuma96mbLiyKrvZ2RDECaWCFlkWIOqG2rMthm0U/JzLJYRnNDnwXK5KAa5j0IOefUA4zq8fWdsY85KwHG4LKUoZQ4rsBMM4oUEUW2bYuIEYeUyYyJi2YmNKCSiqmpmpjKa9cGfHmqRxMzMwKVlAYtm1UnfWHHm5SZ0HuqHFfs2rra39nnvbELbddthry8enB4uNNezI6Zo2FYD+vrN65Pmzg7PwIlx5yKCCgz59emDlurBxEiMxGmXKIjICTV4BDZkGxUxXWfBLRHHNW1GQ5DUYIMrjfnt79VDZKIKK6zTqrxNI52pqPButX8KBArQwyefWulWNEQQio51u5wvFNdaXzbOF0POUmS82Xp15A3mrMBkJTkgIsqEgJYkeIQnMmQ8pCwKAxaiuauX/lIjz/C3jvy3lQBFA2dc4hoBMHxtvTtmMBMwUKoHBETsfPI3nuHgOy37PgYYuudj1VTN9Oqarx3ahp8ZHKV84CsqKVIYFYRdgRASGBFEJGZ8lBM0cCcd5KGzf3PXLzy4fO7n25tpX1HUp7cj3VTrQYrpSs9rIqWDscxZMcXgmeDco3nqRTn2QOyLyZbmyEaZLENkGMsQmpQxEqRVEpCyAYCwFC0CJmYmQL2IgEcMkbHW5kvi241I8qkjLWD2sNuE9iA2HtGLdnQBi0JadSMXDvKUpFrtsRIxO06FQbV3qCYARmj21pwtQgTJKWtpTSTMVHlA7AD780QHSbRPqeUEiKIZFMVfS0xHbsnnn7I3Qnthb9976Jtgdhj4v37i/N3nZxOx2/609/evunph5b3Pvrdf8d3L9n8VWzrg+s7T/7mr3vpvf+tpIHMKThHCJs1ZLz93961/0XfIBNdkl75fd/SvffnuqN104Q9wvv//Dv6V9958vyd+pN34aJL5Jg5BDtRuXs1fO53/O2P7e+dfujj43/7ndc+dLQzutUPg1KvAYMjlNINafaln3Xjr/5Z3j/oFnTz9FRffG739N7L/+YHpC9/9x2/4fO/4Q8+/NY376SXf/F7/211tKoOr5cnnvr1f+Vbfujn3vX2L/uKj/yDf3T7pQ//H3/jH/zYh94Pd68a7+6M4F0fex84XCxzIUMqx+cXHuXQTxqNRXO/UR/c/bNlNZnO+jQdjSbj0dCvz+fzqh6tu6FuG6waShkZpFvvHxycrlavfOL5gys3mJuj49t+5D1Uy0XanR4ONT776qvXEz/x6K3n7h2zDzT0dbFlFuZcRbpzfpQ1jUb+mquvH+4cnS36lZ13PYCMabLjTUCSQEFbDkNXhrc99eiD2WxgKlk8ZB8qcpTUzFOfhtG4Pl92jtEBXJ3ufuKZl9vQNtF5Z1BH9qPTs1kdQpK8WHfWJUzFYus4NFUI0buAe3ujl1cZRCdMbzg4vHt0sVh2OLiDdqyDHkQngx0l/eS9i7c+8dhq2Lx0dGTEVVVdzIbFOvdJ99vR1XY8P1t5b7shQlov171z0zoYOb1+sPPK8XFfaHcaL9LmvNv4ALnvd+o4rpuHprt37l+8cLRIAM5jG4gczYZ+0StnjM6z96NxuymZS9r0XSFMQ3dlp2Xmi/mGGMBkM2wa7xadybybEmSB+Wyo6mY6biRtxpP4v7SKhmRgulwMHeHuldacpL7bGfuaspQCQos5zU6k64ISeGbCoiZbOgkz9+uhDLjSpZQSCR3jRZ9zKtKV4XQ1mjT7411Fun/n3mRU1+P2RjNu/UytnFws+27wDMFon4RL2gnVT/7kzz118NiT7/gqpO4Pftf3vPPvfvlDX/H56//xP9YX51euHAyL2G2GYZOatk556EV2AwvIHLtq58awBHr9lzwY8o3jO1F8Hgp5RjQHyIUg2K2dndn3f99TX/COF/7Ndz/x8l2AGshFEFAxZkAZSpck54suNhXIEXzu02Ttyf/4kdf/nl/34nKAm0/R2758/rP/uO66MLu49/H33vRjeN+799/zY92n3m/3n/fveg8VBl9B7kR7dgY+pliBAmRiMHQCmkAzjAIUhb4DLxAcOPbQHX30Qz3D1cls5WDneA6TFlzYMnKgqiB3MCyADaKBFTCBUgECGAMDlARtZbnTLFQQyYMDQGGPm7p8+mseGb3tzcc//JOvVzw4Xtkf+7Z0D6YeEhCMxoOcHbzlia+pHoVnnzn54KeWCGGP41OTT5wc/u/v+ILf+z1/pK7NQXEeJKVxU52fzzDy+cXJ4+Mpm8YYHVaSN7GJYdTePT6/cz7nCs9vPxgloJzzJiXpRxVxCU4xxjhfDmXdbWY8DtbstefLpQVM67Q7dUHTarXaDKnaqfOQfHQew9HyGBaIvj5bLafT+mi9zJpHMZwvutCGvsBiviCnqQ9XxuMV8qPj9rd/xY13feiZ025YdnIn4M1RZUA7MTTsHLJujBI51Zou3wW41dXbdukNCphMBsNiioa45QrDJabncpH1GiDm0pUmYGqWwYiYAIlB0RQNlBSYQLMhAREb2mtmCg0E3jsiI0b2iIQqejk6LwAiCEQIW6kNISgiO2BGT8iIiFvRNoLBVgK2LayoGSloURMTQUJEQFdg24gJBhGxYtxrmDywc6rbQ7kjK44QQdHscnGmwHzJYSwpefKqcHBw7fqNx6t22i9Oy/w8nU9T9JPDG+Y0A3tHIuJd7IYOthtiJFVFAiY33t29d3LnVvO0qQ1pSY5CDGaacxJVZN6KGMG2NjMFuHSvqSogEccsy2hIhtE116899NILtzdD2kY7pqKwZYEDGrBBJCFG740BCM0ToVEWGwR6BTNOZWs5AiMQM9iSBcC25TI0IiBV6pNEz11KiEQEJPga4erytQFbUy4CwvYHvt1hEAAmUTX0jsiBYxJAADICNjUDZnJ4ifAm2qLBwfE2abwMnba8D9ieZbciNATGS53blqBjqohsYgy6H9w0uovUbwoEIuewL5KkAJC89mI2NUe01a5t/5zOUxEa+kyGTVtzJFDL2QNHDo0WKVLQUirrVBahboWUJMfeT9prvpoeP5i/8Oyzv/TBZ5555l7ZICMZqhQZ746nB/uGmHMh9JFdAJc2uWqDqg19guDU+joERSkDAJr0KwoVkhFb8KjJhn5ZSvEkMsQ7s/W/+nf/3pYAzm9vwhXUEabNhpmI3LSpo3fnZa0AwFBX4Xg+R6CEOg3bLYZ60DKfffr5X/7K3/BNQ9dX4xETulBrThkdcRwKpmzb/gHVFRrbgrQAGKiKZs2SGYnJlBERx5MqzTYhoJbLyp6qUuAhF00WCCaTMF8uTy/WyK4UNTTH5BnZ88YyIophTjkzuUDO+OjBy0O/8B68xxBcn1XUomHEULFLZWNqZInAh+CGrKjgCZmQwZx3wUGfxAAINEayoZgB5ZIWwqg1uBji6Wk3DMwOnCdXsZgwAYNxJC0lBtdWuFiVUePP5hsXw3K+aHzwIW769bovpSRCKiWv1SYhLtZDKksf3Fyyr+NyGFzAqqaNYoVuALt9sRpKjhXXgdsRX7lxML9/vkjdTqDNZjCfA2MzCnlQTxyjvf0tB/MH9L4P3Rm1TWAnancfPHhoUj9U7Yir5mlxvhgIOfWDD46YRExE+iJqsOPi/PzecrFwgSetP5kXzygqy+VG8zAaQ+iW3ZK94XR//2KzHrUtwBwuf8kjAeGW3IbI5CpHiCglx8BIEIkRqWpj29ZImERSlrYemwAQgwAzQCqbdeGSu83gqoo9qVgqyVhlGNywiQiVgybyqKkZEAzXAKIaHKoJqnFRyEW6zBSVdciqJTkiDwy5FGR/yd8Adt4BoKoD3H7ZBGimxMaA7F3FvomjYZ0rz33JRWQUYleSIikqgm17SEwQCSp2pQgSFdjONkiKkoH3rGxgEiNXoLJZ65AcMW3VjWBZdGtEC86JFfROiQRURAmgrkMd/JZVZzVEHMAkxrizO2rHYzAPENAFXwVTQTOAtmQmorZpK1DHoEagGij4UHkXAMgAU8rzzcXdB/fY0Wg6beooaTDY2t+tFHEubJ8khE5EwMgxIyMiQDESDH5vtrhIDLVLDHSwGzuc/+R7v/eLv+j33IBHdw932yuhh9W9D56sz+ec++hJzZkZAYDgsE5ZiqujGjA7UwNCJEgpMVoVYbob6li9+sqZVO2NJx7aVV7cWU2rZrHMi1kZGIxdsXVVexCqgjeDi5MZpCJrdG1Nij6GdlwNZUMVLEgxmaXclSF4H2qdVGHdlfnF2pk3YBVw7EOtBVIFHoEUtvc9WwAWgyghOuKiIqjbMSAgbm+2fvWzjYiAQXSuyyIF+jIwv9a4Fms8e0cm0hA1wV+9ug8UHcTWt+qGG1f3a45nF8fGmrRslusW/eMH1+48eLHfJBeaLUsHATwzBBxKCURo4AlVJTiH4ApYElPQYhLYqxl7BlUPUIyGrOss0XOzVVACKFKftJCJZmOEymGSLncm5uoJFHbViMEIMKWByZFxNiwAxVEcx2rCZcQSfH82X8/7YZFOZgLmOAOWUnlfkVczz4yghCgIUIr0WbIauPl87foUPLXoJiFGH4i9GRA7JjRV8qFoZg50mRCBc56IBMG76BBCqBGZ2ZF3zOzYOeecj1XV+hCqug0c0EiKGqiyVpVPWYmdc8G2ajGwJALbSywrqGhWEMQAjAjQ0uLipV9+z/z2xw+mqAvb5HVw4ylDbVYxzxmij8WYOFXM92aL8z4bmoGuU7asFdsoOEREAodWim3RbyRkZkV1k7JqMVUhLEWySNdt0GET2IM6QASLiN6UtyYyVRVxCAGQwaJIRbbnKs5l0u50JZkCbg0SBm1VO6pIwAFmKUyVqkoREClSUhY18iGMzIyoeBQ1yiCmg+nlJ7PLnvb2E6UQARGlokPOhkhoXZeYXF1dHpLP1kvpLkYJQ5miCRRenq+BXBVphTl++Vecf/ZnbQ4n3EW6unfxgffPX7j/hd/6uz7zE7949xOvVosy5AjOG+LVqxO9e5cFT3/2vcsf/rHDr/3s7LvnfvC/XLtAyNQln4Gu42A/9v4rnWGxEqrtdz3flKPf+I2P//VvfiGViWvKj/+nR+709c61tVioq2G1FiTnQIDT/vQdf/6vvNzuhi49ZPOPfNff7D/y3PWDx5j96Mmbv2P/ba/839//qXW/cxjccmXNbnj6zY9+4Rf95I/90GNf/raPffpT9cOPfcVDD33/v/2OT23uH4xGp/ePP/zska9inzTGGEiD5yxFRF935aHbd8/22v12r/nISy8hQ+ocC7LCYtWVYpPpnqY8qUdXDh795It3rE+KZX9/PFtuXnlw/Pgjj0+rNjawXJ7ujQ/2x+NQyLKbb1bXrx60TaUEO+AJ/O7ELRfdTJnEokco2vgwanzO3YM7x10RBj8OoW2ae0fLGF0I2BfxDqPZqGpqhV+5dxS4rjyb5MrzYJKGrq3p/GymucTgHNjObv3qySnG4Kqg1E1vVBsZ5mfrUDASxhDSYh2cQypXdqv93bpXdeO6s6EaNaujB1frZifwxdEspfLIzg3M+MydV+smCmjOOfdyexiGIb3t8YcP2/1Pv/LKdBeXYisRMFXpT2fLx28+DAppWF2Z7pwOuTeBxcWj13frMS1PcjvxHEUBu+ww5Lc/8vjiYnGxKZ96cJJLEYYMAE5vXdsdButnKzV1nvpeYEiLdT/f9EY2lIKg07qOVXOIvNksN0WyWB6ypOyYypA6ojaEg2ldFFbDMKni7HTxv0RFBWDc1k4iDN16sWkntjt200pLSmo4m+liE6J57xBUFaUeBwqI3glA7b1jHwINlDPlAiQxbEpJzDllyRaVL9arPg/O+1XKMltMqtFk3ASHnv2Di7kV8czLjb7zv/3il7zta579sXfea5rf94d+q9249ubf/rv/1s/+6Lf/tt/x5Je++eKffN/pr7xAJlqrCoSbuzH4/Nx9ZHRU7OXz/L53t1/5FSvslt/yx+DFn4EPvLp634ttHHlGElhdDKMJsQ7Vf/yRzY/+5yfYtBPjmLNVwYHpCPDsB//V7vWxPvHUqpJ+aj8/v/imf/R3L/72Pzz8uQ/CT8dHrzwCsxm88Klw/wXP2l/bda//Qrj9nnqd13/0rxxSz7RxiQA8ZAQOTAmQIHgF9NWEJcLsAlY97O/AZgEqIA4ggANYDtDLZnV68y++46Wn3giPf/7Om79Qfuff4ESADG0DUmC5gB3sbtR26+HqztyOjngxQG+QCKoAnkAL9ILkGREiw3yAkQNfFqTjb/mGa5//jZODN1/75r+RD+DBH/jmvR/7ZPAMdV1G3cXF2cHNfbwygec+fXz33t4bn4Ao71++bJPqz/6Fv/7/vPzLF6XAElBzJVKN88n5pmjy9STaeK2LTT9M2umowvW6a9q42KSFMuzsnhw9OGibYbNRxVwUIwAIlv7adCoUTpZDSloH2q3wohPynplihTsj7xhDXY2GjKoVmZT+cH/8eD3+1J2LgpGdv5gtlYUAGa2JoYgZiCdGCGf3F/0sbQpfTPPnXfff9IWfd/94/p5PfPrBcnXcC4uMRZ98aFeLwDI/Pd5/9qRDu9wkMwGaIqCaIkAqklTFtjGQbtEw27WYIerlXg0NhQ0AEBTALj3Eiq+VQFRNL7dGZoCAbGaoAOqYiCyAOSLngIgVoSCRggKoghUBpctBE5qRKlomlODIATEygttmW1v+dlEyJ6WwKiA6AxuUM0HBoo6BHChfjqUUwGL0o8ZXTaSAppyFkq630rAYfA8ZioqYEGYxckroDM0HVgWDsrO73+4c+Bi19JN2ull3zNxUPgAQO+cilgHAnAsIDHZpslct7IOnZmey88yzH33qoccR0FeBnJUsQEaO2DHxlt+MuoXJAm7nWEikZobIWMrmQrkG5ltPvf7O7bur1bOQ8xZWDaBb50ggi2aNJyTzjEVRDYpSVitKg1gxVNrevhsTaDHdBijbcTmYAZoaMihALzKJTr2VUhx6IVFAstdM80hmYAi2bezYNuEDNTVCVQBT55EIK4QkaoB5+3p6bfJCl+kYAmIRQUBCuyxg2+XazVQZUQEYwEzpctgHxdQjMTMAjU3G3h04HlQ3igq8LUCBbrFPoFvuEyE7JrxkfG7fBet+CDwqSUfBk6I3Kssh91LvceA14HooKymdWOedtN6Pmn2Uw3598JkX58+//OwnPvrs8s7ppgfcsMn2AyOC6tH57Cl5eOw9ApECEg9DcYH7ISnAdDSad7256LaNPUrOOYIgQx6GIeeMoQ+hqVqKVf1TP/KLP/+hz1gRdlWhsq32MZNnLln6LjVVtRy6VFlbxRC9ZB23PlZYb2JALQYBIfebg8M9H6vj01Vwh2a9c2yo3jkDJMBu2a0Wq80yuViPRsP8ImyYpe8dELqoagDFQLOKkRcDUdOiozpuKr9Y9UwQmQwVBQQMGWOM2/Hi6dlqdzxCGEQHdEylOCIX3JDRkJ1DBNdOr+1fefy5Fz+d1xdV5GLUrTcjbCrBZZd6HCpXIYhRNuLIgTgEMsmaimhRKeLI6ipOm3rFm2Sy7pMqgELrt2U22d1pZJAHx30qrAqOyHtEBu9jGgYTCEyDlH7oqp61SBKpalcF7wr0KWdZjUaTbj13KI13yDRfrTUHKcUzF5FkZoTBAEBH47BM/ToZsnMOOHB0lLtBXH12siEK5yeLg8no/nLFHlPRthmdbhat99dHcadpXr548LpHryzXfSYsPdw+O7q+/+jVG4cvvvhqI/I5jx68erEoQkOXR+OK6hoAiiRX0WTSLFfrEYYI4aW7d9WEAQkJyfaut9GVdpC8ksZXZ+f9apM8X74LcilaFJmKipiqkQ8eFJwzZqJtfG7SBK7qmsi6bsEK0UWPLqeyHYBpysNQ0kKdQVHt83q824bgpWwvriVnQYReJTuuQ5yMYz2q76Zhs+h6MTQk2Y5T1Tlnuu0smhGCgRZFoWQKjKC2xZkBkkMsqmyQizgk51zSAqAByJMypHoUYxifnZTU5QL6mjCygAET+OCcIxABFdVSxAApMNXOmamoJpMkBRgZ4PzkXPvsyJmhGqJa2c51XwvsiCmwU4CM4gg9cRNCW1exHoFZ9HWuujT0Prq2aoJr6mrs/aRqGpUh+FA3Tawq7z0QtKwgCa34WJcixH77lPM+dqlHCEUSI96++/JmcfKW13929M65qkBez1YoBFI8c3SBia0UMyWOiJDAACBW7aTev/3ii96Pdg7cDJbni9VjN5/63NLdeeVD5eyVwG989nRuzWQDuxez9UFbSnJDL4CGq9JpklQUoGzP6CZcOQP0AaWUOvrpNB7f7UCLCSNo18+Wcva2L3p6uZR+Uy023fOv3ldAZvbsCB0akWPKm2Xqq1KXZap3d9hHR4xd8ZCT39oqdFKNnbO2DPnktN8Ak2NDZTfaiVqG2FSrs1MO6ID7vhChICk6ZwjerKiaGUgRKGqIoCK2LUluoyJCBjIVNHRAaGJIWQ3BnCPnyAfyCDHU40lTxdgejNcckwUalBy7mlza3H/1xd0r1+8eH3Hq3vDIUyjrTbdGdkhUSmbGClxRKGqtIwIzNUdowIYgWsg0MgFRjIEIveOmdpKKI1gnMfZGbGJMzkxC4ALIRSDJSlLbeBejc2QC61S8K5XzWFU5DwaqgjLkQTfctjohDN5qnytKVvqjs+FkOSzy5iKpOXRoYNEFMkO1wGRgKggGroCq2WCl2KoMguyNVHE82Tm8/hCEmkNQNc9ERIoFkYILIp0hOvZu+0AkDtv/76BE0fkQQwVMTOidB9Dgg3eXH4vYMaggeVU1lZILk8OtvsMMkUEKGSJhybkMXUmbbnFEZJODx8DXfa9nd87mL949vX8E3ZDny4N2FzBOyOGQ2FXVqALgi2U3cuavjZ55+cGASKpp3VWBk+nFkAxx7J2kshXLApgyDqoi2pfcl4JguZSkakiCPPbBI5jIIGreRWSH6A2yiBEZUUJVIQaKBDVSQAUDzz6nAqJAWyOxKlGownR6mDZnZAmCJ4ackAjVpKgaoSmTj40jABUVBYQuA6JmsQFEQYt4h1KsgPoKmWxIfckZEIChGMbaxUj22rlgvrfz2Fd/7o/8hX/7Orv5tqvVbs2LDWXRYd2dx7Tzls/xjxwO2h80h09+zpd9/Gde2Dmef/Qvfk9A3ZkbbtyAOrgV7dRn/XoUfES6oXj6j/9Bfu8TVNeHz9ypNgBNvVYLLgbnurOVCy7lfpDim4iewMVHCOPHP+I+8sHjn3z2DSeOhqpDlbzBNoRRmPepXGxG451W3cf/9ffQV30ZHT/7me//3vGDbjdeT3fOq0nuTh+88et+9/0fes+NW9chkk4nq6du/YY/8Qc/+a+/c/benzx59mMP/bY/dbp85uPH5QN38vn113/lb/l13/fv/kmnyBm6dU8OkGjatCcXM1J9/uRuqHjTnYH1kxG2h5N7p+v9OlydNBrDCy+8HCLljXgZQZ49fnVvOV8/euvxl27fXS7Kow89Lrmalf7u0fl4NK4o9Ofzpm44D49fPbgzmz24WF/Zrd/81ENDN4jJwtfDMAAYMSvrfFWMsACXJMqoJc031oSI4OY5QywGAoxNxscOdk+OjwNEz26ThknlqxjMhggh9Qky9F0xcu24KgJcx0XeJF7fvBGpzmkz3L2/rDCyar9OzjvP8aG92lnitCpcP3f3+NrV0DsnbRg1Yc+zKM+wWaKs+lMcQQGsGHXIzQh39vaOTxbPHh+94fEnvmRv/L6PPTfTHAPV47ZtnJdy0fe+gAOkHerTzBl+w1d87lHq/utHP+zr8cTrfLWIITooZOFTd8+7zcYxx4qLFQOo2e1Uzem9RV9KLmVUBTLt8+CYLGU2McCqCilLEeuHAV0MoFyFvOkQKBchouUyURN2d2N0eHw8I3WsdGWnffX47NeioqaK3oXSrZyQ9zhx2G3kYsk5VYBkBVuOW2yqZDMQ5zGJqlpwXooQk2oODgqYWlksN8t1yoVqH9pJtSm5GTWbZY8A7L0Kevb9eg3iJ6NJyXRxcY6iIQYA99f/2t+p22aV7YPv//T+2+wNb3vdE5/19X/mn37/N37x53/9d//owe33v+uv/YndeuJH7ekmNC/fnTrs1n0b3W72z3/H33jrl3zfy9o/SIevu/bU/N4vVQe1LAcbFAXrpgJysN5UzLlI79QEzRULri/FO2PD/ReePf/Lf7L6jV975Uu/psib31Cu0Ad/Ob3vZ5uc5UOf4sMb+fZtv6bJqjr56PP9Y1+4uD2/OjPcHbXOACrQDJJgXAF6kOVQxXjtGrx6t9pp4OoYFl4uFrdvPv7ot/2VF//WX3k8n8NyCXstgEBWqHwznr5UFi9Od+9cDLc++dIjkykUARTollAFaOxY8sF3/7fjh9+gzz9/8p1//LFfeQbIgA1YAAw2AwBCa4ACGaDvYJDV54WXf+uXvPU3f82ND3/iZ37dN7352//g/u/79k/fz5Dl2le/sfzKJl0PV//u759/3/fOf+EjfOX6C2+89WX//J99++/9ps3NfXh++Yf+7B8fN2FvwtRwWpVRCO1oZ7laIzU+NNGGs9l8rbDMsFytRkhuudz0tqzo/ma2O2mYWynrEKscgCNdrNZkCJgGS9PdmIP5Tp+6vvehT94HReJokKZtrUULWHBu+/R76Wy50M3Nw/rGYfvSyaYOrpAu1qtpXXXzNXiXVZ0yepeLluLOU78uXterF547fuKRyUOHh29609P1i692fRqP3K63asAkoTkYv25/nDbD4nx+eZMMwIRmlhUMLAEUA4LLnIi2OnPYasxsS3dGANrqWLZLITMxE0EFFLzsJ22PFYRGCLwF0qhtr3ADIZOyQyQoqgVIFQgdE6EKgDJiMVMDBVQEIGSP4NERoKKA2fb4L6pKkKAURdHL9AgABbAoCZKqA0FT5wnJAFEJYvCeQQ0InREUBUMCYHIQXZWkH4okQADKRYGKGKBDZkbAWLeH1x71XJsYsscQs1jfZRQgoKRl3es0MqkG5w1RtQDxlp8MIAQ0rvaacPGZlz752M2nkEhNCBGdN0VkZyZ4ybdEU1UDU7u0iKgiYdXs6jAMXa/okvDh/sGVvTvzs8yIYMpEhEpojKiCSXAQsAxiiIbFQLb+LwZTVVACcIioRowgBri9X92GPtsqj6GhKPdFmQlVSIlgu457rVp0KRkmRDQTRFDdOokIERVNgbICisUIkUHRshEalqJsYGpCCLal6BIhA5gCXTaKtomSAZEDVDUjRDUCMDOzrbJUgEAj67XGBbaVpIusm0KOGERi8KpIoAjgHSKgIwxMuC24Xe7PoGJmhN3oCT0iY8mWu2rSKJ9epDmg2ACRYlM9vJyFZz6Zj+6dvfjy88en/WqxNoXah0rIkg6DCCkoMjskWp6vPvDLH3vd07du3NyfNBUDAteVrxb9uZmeLWaubgMGRgyBzbSIlpSzlKLW93mY9UlX9+/fv32vv/vKA6bWXFYVEEXdxovESKF1tXdFtKmD8xQ9H7BfpcH1uYcyCZxSbto2bYbxdKcMKlbOCrA7PTt66WLxVK0Qo9+s1/164yN7D3UbioZ1seBrR5WzzsQIEEyy9ADGTEUkF5WiTaykLyxQsWO2g72667vZrEcKDslEmojjllRbAGelq6MTsy1Pxgz2pqNB8rrruFTXJod37z6r/XmsCBiIqAnRA4kWVHHsyJGLTlZrDl7NipVNn3LWqoqaQROCmA1yb340mjRt43LqgnM9YimllFLHMCRTcOtiTIgio7YCryfzFSRmxIw2aAEEF7x3cU2l66Wq2ESMDD11UiqGnNUxrVMGUmS30nJ4MOpW3XTcHozre0cXsY5ienS+6Yay5a1OJgEljSPTuFHD+XwVd9q9p6/erMKz7/7wiMlUT+fL7BCp61fw4qfnm01IOiDK6566+vLti1i5zveP33hoWvtPPfPy1fEoOqeAn3j2GUvcmQTn5hdL38QEoij7++P7R+d+Epw5x2E2W0x22pFns+revW5xXjgGQxoS5dc2B6JKRLlIFjFC5wgBPJEVSIZoEMjdmB6owWg6KZyWywGMiTiLJjWx5NRKKigWgytDMSZfsTENIGZaUs+mZcgpS0olhyrVGq7GOIo3QjzOw2rdl8ECuRhCEgNVAhLVwIRIIEYI6JiJyACMBhUAFARnYCKekJhlG3MjE26HIeoCFsAqVqMcBy0pqxGwQe24iDIj4GsMNVVHqKoKVjmsPXcZxKQUU3TMZMBZMhLHirouiWoVnXOoYkaUzbZaTlIDxMqFIsq0xVo2TDE670acUtetF7nk7ZDEBw8qDoBcJKYQmxjryA4QQoxIyoSiJl7ENKUBANXUEQFRGE12n37L1f0rJ8d3P/3Cc75qH7/50HgUnIMQw8X89OHrD/sQRAGAyfH287FtlRKCV6ZPHsdPb1Zza8LDN6c/9p53tV/y+yNfXa8+8cBW6Yxm1f7P/cK7Na+/5Muewou77z+aJwE1VcghMDo3dNmyhADgYDwOImaqniMZ9SvJyfdDEdOWw/GdRXPr4JNHx4yBMb96ctF1ttkMZK4Qr9WuXttt2lG48N1qCVQYHRlUziuFREkkoacMuXIMnlF1uCinp93sYj3Zn1ZQu2bsm4l40jLEJqScRAQdZMnJCJ3zMaCoaNqigrdGU0IgMwX9tYopoiIaQMnFRB0CMwqiU6gDI0DwvLc7itG7GGJVJTGsqG4qSBJjRZIvTk8mfpJnnQxpZ3fajOpP3355KBTYbffdSOQMwMRAGRGBBtAiBogMiAhNFUR1ECXyTfCWU1CoJlFBz48WXNVN2Fkc3w/RbRfgPkTKCAAtOVdKGTrvYzEg8pZzFhdcLCIOi69cJ4NGwwnigY/jkHPp+7V03fp83p8P1pENoAZQMgdyzCAGZqC6BbVKEY9soB4JtvoLLVCoCVXdTupmRwQBmBgMwQgJPTMZGlLFhHQptjdCYnbEHhGQiRid3+ZCsB2YIhEwmZmolJxCqIL3isE5t436aAuwA0ZAZIdqhKhop/df2fSn3ezBjZtPpSzz5UarkbTkdlo7CqWACA6i7KUYYBF0wIAAOp7Ebp5fvDdb9uRDTDgAoioUMZACht4gEjFz6gYFy2pZBUAFhchKlqGIIRYEJSLnK++sXxuimBrBlizNCINJpyqA7AgNHIIjLGbrIZkgu223WtPQEwE43mvGMsxK2gAyshOLqgxa+n6TU69mPlbkmYI36SXnJBoKkGHWNKAkQvTMRAIQiEQE1JBAxVTAB19XvLsXo/Tr9aUB7a2/+7f+wHv+s77+zZtzvcizsEzDZvBAtsmjKwejZjdfrLil5f2X7r3//R4AN7kmlaXFqsqNgfUb7krte/XOeSbGIV8pA/7Sc5tUQjsuHJalxFGd5ivvmJ2mfsgmAiT92hxMIi/+8w+u/8P61pW9tz/9xWcny7Ua1lRXfC6bT+Tjp37vNzx+9bGj73ln3cP8nb/09Fve3tn02tt/57jeFcD7P/FO6+b5iH/47//NGweeeHl+lu/W4U1f8GXP/rfvf/De9wWZ3Hr8i/f33vyBi/8olf/i/+0bP1Pg//ynfzf2w+Fk3IT25X5IIih6sVimNKBidPV0MpFBclbHfDFfV544JZf17unxetZDxc7ZmNzQLR8s5sTF7p3OV/3B7jV23lvBSBcDnqb1BPa0zz1kZ+qGfhLdy0ulzfp6Heq2Go1Gt6rJ7Pbt1cXy9HxZ1yilrBNnIgt+WvP8dG6ow2Y1DHnuYTSKBMlJiUSlpFcv5hRb7x1oQaA0qAPPrIu82d97/P55r8q7od2s5zmVh/YbM9od+9nxejDSEh1HVX342kPzfqHGHWJKOZlCWnEQVc59P6r87v7u/PZJjK0C3OtnhNK0scY4DN3Bbot9ml+cR+dmXffuX/n4Uwd7b7z1yEfvvMKBSxOkpp12enb3jMUFh7lbHVx9+OmHX/fulz/2oaN76uuroxhBnrr22J3jk/YgLoe86Dpy2vhQqT15/ep4Ul/MuqHLYlZKvhocq4baXR2P+pJPZ3kzlKquhKkkKSLrfj3ZdYBAW3iJGRGiYvQRHB13iyffeK3apbsPNsMmxxD+l1bRZrlOLG3gSYOust06rBZdSY0DJ1JEMwUD1KzmPAsRR8eARUrLbrHs1JmhHDTtXmzXy7WRcuSNpJwhO6vJ+RDHozqJnC3XtfnZCsA4p7QTaO/KobIO67Uj3Gz66BFdn3P5mf/6U1+cVxcf+uDxS2//I3/42159YfEvf/r5L3rjFf77//yzP+vzSBFeeRZ+8D+t/tn31tFhazvoP6d/9e4f/63hs75y9OMfXB5/fMI1HlC3WlBEH3lYDyUJobeiRYwju0BFBE19dIwMEKG46UsL/uH/sfnRdzY3Dh++e3F8fDpSx8SzB8fNJOAbK/+Gsf1wn977w7u/7bfQdJQFgvUQDai2c0XvoC8QdzfSz7/69+686a3uT3+zN4a3fTHc+/j5ZnHrr//t1eueevzP/NX8n/6hP5qDJugyOAfsZy+d7LRvvXXwVN0dlw/cheSACGoAx0AGunPlW791dfiGisd05dbNP/Kd+id+GxWCQDDPEBEYoa5tscGoEAwOxq+e0N5f+xc7v+nai9/2bY//0K989Y0r+jPv/ZX//kz88DO+BTg/diL0wuri7/39Sdj/lRP48n/3T7v5S6/7M78Rb+2s5usrHDZmOzXSAIuLbgw8mda25gY8I/Tz/my2TM4nh8tuvUR7dH80H8gyo4ms0gZwoLWPVQe25LzfxmY6CsaLte7sTB+szi9WKcz45c3SJS+bTixd3xkdHkyO1+v7J/PVYHUdQyBy8aV1ci1cubZbT6eL2exKNXrsrU+s1vLYw1dWi3Vdt90y/cQHPtqZoLMQqG3DepmD88+eb16Y344qBxNuJ01Cayb1hKb9pmDh0Kc3X79yFOK7nzmCbT1bQQkHMTQrlxThLVUIL6E5l5yi7enfXpN1bMdSqGagsF0dbfkyaCZgRADbrgiAiRG5SzEYIGICVmAFc2Igl9MlY9yu4czAmaEoZDLxhI4IkUAJt9xrKAplMMmEg0IBByiqTMgEZEBGZsKmjhguvx1QIGU+RyNiZAKiPGxSFgITA2BCZyFGUytZDVAUJCmWQjUhKAC37c5052pkNofzi2Gx7kIVkaiUQQc7XXezlN/2yBMEAqCA8dLyjgYAWpTJnHM3Hnrkg+9/eT3tdvb3CySxsv1J4nZite25KCCAajEVk+0ELQOxr2NXkpaMzkrpCmKoqlj5gIBgW2RRl3WTTcBnoyywxWaimaoRgwEIqIJtGQ0FzBMhbA8jiADIbCBmRkRghkBFqcs6rj2qqtmlo87s8ntD2HKptyKt7avFEGQ7IQPMappBTdpRQAZjLUCW0fCy8a+XiOrtehAATLfUo9fYR4i0/dEYbodyYIBbv7YBkmlFOGJoIvSqmwxDBiIDAIdYRHtVIVZ7ba+IWFSYti+0y6yoiIEHQEuaAjH4RG1e2cznsVlT1TdKmb78meEDv/DM6fG6WyoqFEDvcNrEnUlQRUKdSY8emZ3K1pMNqeCD2SAvHtU1jYO42CbVdc7sfRkSFR1RmDY7IosIvreikhWsmCkAe187t7w3X56t7909y5JBC4h6h5G3xnBa98NoFARgk8UD1o6JcbZaVojNiJraE1rJJYbQ91oUKw5J8mI5TGu/O9H3/sRPTh/+7MOD3boO4MwEmNhXvhn5XHhuGGI9Ho37oXeuKkPKBj1sy4NIBEWKI1+KOnRSyrRpwWtCcDE0raVkRC7lzIj7uzuzwfquwJb0jojMWazmgIRa/P7hY/uTa7PN2VBWO3vVatWv89CGMaKYlYqdRDbATlSy1O1EEXLOXS7b8tJwedutB+MRImoeukG843FdZbV4KYUeA+HJRRIrRggOd6fVatjIIL7ypdi0bdZF132qnQ/sNn0CgeC5lK0PywQsiZ6czR2AGRZQJtyytM9WXe1cb7CarRlw6IdFn2OgvYP2dLZBRw/O1jWjGe9O6vV6sXc4OV0vXr13D6tJe9iM63BxvsxsV65P3viIf/WZk3UXkshimAHbnduzSH61SZ988WhMo4dHh1evluOT01nKn/3Y42/+ilvP3j965pWXq0nAaZurcLbKoHpvfj7aqU81d323F/Tw+phi9cL9DZRMA6KP6F17ZbK6d9wED6cbABi6QsRiomhMRGBaRJGaunLGw5DMWMw8G6B0aV0ki6lKQaD1kESz5qGt27oJpn7jVExd7bByy81a+8QGeUhlIyZsEAYFAlgshgpgMvG39qfH6parDpCzmoKlnIKLk2kbmpj6DYtJEFEMhF4VBTAjADsjM3EcGLdcfbzEqDnsRWi+qqbjELlLBX3Fru+HPChUjB7Jo23BsWamaGaWSpFiiJBYndc+55ylKIAjQehytz3bDrmHyFXwqhCIIZUCBkDEzgdGQkYsGdDFph3V9cj7GGITGdETMgsCaSHvhRBRgI2C9xSQgDloIWTftiMDiDEgYlETTZ0kFFQZ8rCOvjEBj+TZ37py8/rhjQenxy/f+/9R9afR1q5nXS94Nfd9P81sV//2ze73TrKTnWSnI4RgjCgGbAClQFQ8UmodT1kOLC2OOiy7qqMWckT0gIfOBkSwQCBEGkkgkGaH7L2z++7tu9Wv2T7d3VxXfZhvOOUY69u7xrvWmHM987mf6/r/f79rd/dvlVNLRAiuDaFuPdvSWCNRgIxCEkkgKfo2+NQvylMbF55749cgpXeefs/Tj77n6GByejz80JN/YG968MxzrxzIzSzVJ4tDt24PO2kRuxiJNMaoBMEHQbCWbR9tyaJRkyoxOuqaFDrpIibFsuypsU3nD5eeBKVrFsdHDgvfgYJJ0S+rZQPCvcHFcxdzRmGYHU17pWUAbLwtTV6UYjkjoa6Gpk7i1ePNvXmoldFVc5+XOBz0ffKaWVuUvfwU13U3r0U78aCdWEQ2BJYiCKlNnaSYYlq1zdJqKbK6F4iIpCRJmMBZQp8kCRmSqJkzRZkVo6I/LJGIbNZFaRO5PqUUGcBlFDwsfWOy7N69QxI8s7kxrScxeEO5s6yIbQyWkRgMoANOIqgUxABSlBhTssTWOhJpMQmZpDDuZ5sbvcR8tFi4M1sXLr9ro+lNj381pJRSCgCZKBlOKTjmKApBUwqDIiuMMUoao3TCCcToEuvYc8XWFgwGmptOQvTL6dFROFlIo9oSS0a5A49oOErnE1jQINGgSfcJlMCMAMSIIElVgiBGMSkmSSmlPCsIDRAAqWEmAKVEAIxGQck4QlKJ1mYKSoaMsQhEjMiJ2BCxWTH6jCVVYxgU7mMbNQFyigGAjbMKcUU6X4XYJWFCUTKnLj4q6UHLHJL6IIXj/ds3d9/8Mtg6z+TWySIXUzUhE1yDbLxeKBoiVy1moXDHJ3wwrzI7sBbqybRrldgyY5fEBa0o2YxJgdn6lLoY2xQNgCZBkZyR0IhqAERHhsFrLCypci3RrawBCIrYhViBgmGIYhGdoahRCMjwTJSTsgpKsgBMRIjSLSS1KaiCNRbJMJEGCU1Xe98QsGVrsoydETFoQmjbrCBrOSC0qQudCqJzFhHIkoAYRFSrMeWm2NoaZc6fe2BrOZum4wrgCAD+9U/92/EDZ3/v7vMTU7aFeXJ9zXULm6Bo82VV3/nZn3nY/Mm10/0XfvRfn/zaM2WXhxTDCMWltpn3Tq1VxWD88Y/3P/jkjV/87PHzL4dZYzyOXB5rKPPcx7ZRasF7oq0iC9FXoSNnUqNZaSbLSWyIaS0frtlihE2aXH+xhdwoOaaZ0fnXPvmRv/F/zjbW57/+6Sre6+ZcVQs5U7qnPzz6hph14GjwwPuHX/wvP775Dd+5cfr0rf/3D58Dunp8W2J4+X/7idOzey4z1db681emh7v/4YFH3n7+PU9/4bnP/Luf+g+D3BL1UF3dNWwMBiVCEBz3x33bny+W+9MqRmEhZF4suo3x8MGLp5pmngTL/lqUJtPu0UuXP/vSc2mo6xsbx8u0vnPqxvXJuR3yUhdrroPwyKUzG5hX81mvX9jBYH+6NDmApmWkl24dPXHm3HBcVNPlo1ubE8v7XVwb5HvN9HjhJzEUJRb9MbncspvPfc86YTOpPIeEUS6f3nnl1h1FE0U6jWS5CimGNif2IY562bQ+IaPrRbEzHu5OFuO1Egszn7cWhR21lV+2XdYkx3RvsljWVUBsJbUqejLpObtWuv1ZlQI2oZq2i53t0/Npszc7sn0YktMoVaq8T1yuNf6o6jxxdGCXPt6tjh8+TR941zkse//11ZvcIjQLL6AqZIHWevnO9n976Ut3T47zfk6a6pPlIHdLoxQYBKzQdtlb28xymzFnL1zbO7qx37fZmX6Z51m/zLoqdLWfLOLspK5D14oQQVe3nGecWzTsHdHQmoWdTDunnCQgkgEqXV7XdVHksSGDcPbsmgGzmDT/3ahIAQi57WRU5m3t9xe1RWcILWkrMWFnMxNFuyBD64QUE4iPlFRl1WZhQVj6gCqSAiPnuY0NxjZ1Xo3Tvf3DUZkl0EHPgUrnmzLvWSBEEumIiaypmzZJ8o0GkF4OeQ5f/swXzpw/fTjna/3Pv/8DH3XvXNu9dvPq/tFXPvVjGKo//a6379/cfezyNi9bGEVoGijDWV+d/N4v+FnfVGaegnSm/Man4TxOf/Vz5dwu7y4Kl1NemIShC8agUSRC8R0bB1EAPYOFE192sb1zzSgZsVGEHVtH3HVhV2O8lcvWAy+9cvx/+ZOnjgQ9ApQnTz2w/sGnjz77qfHLr9n+OvQ2Ji++ZTYv7nNvfYfknY+ahx/k3/38nGXrobM+xM93Rxf+5v9VoZe9+Mzw5349P2mhlfG5rb0f+PvnX/3N7rc/NfrKLkwVRgWogm9hVIBvwHYRbLuoOUf32Dl67DzcmUDPwY0DqJegClFnT70d29no1hvwiW+4cP49z//ifx5W/MCtr8ARwMHB9do88t5v6p/b0ud/DSZNZfAAw66Jzfa5J7/vh/7r3ef/xU/+y3KtXBx1aU4yQmCzWCiC2F7PIU1nMEmtUDcU7JeFZG7mvXM2UiwyTjGE4C7tnH3p9lvGgDgUxKBhsmha1231TKYAQVLws6ODZdtknDti72MTwyou60y+rLvpss7zvJZ63Hd37x1rbqzLdvdbammznxd5f3/evLA42Gvib716M8UIEjMk4JwVRJVBGNu8r9QCJyYG6EIEnCxqEODA5/KwXmYU1WWUFfkxz+6v0AhWYpS4clgh0lfTQ6Ar+ici3Rdd0X2ika56VQSYVmQdBAFdRVLovhNrRSFWiCoESJyEUZAYmMVZJKvEAAIQFEQF7zuLGSkRrgYDAkmZBdEoUFIUBZD7UNyo4JVEcUU1koSoFkiDECqoOEIAQboPNlaBqCkwAsJUVENAjJQkdW1GmXHoo3iJ1hkfAJJGiagKCQDBe8ksOseDvO/IYPAq0s5moW0SghAFEO+bKzevXLv5qlm+7z1vfweiCai4InWDApACCQiQmMiXzj586/aNXn+E1iDG1ZAlSiJgwFWdPImsYF2r2REiUlQVInI5mraaHlfL+WTv8N7eFEE7UIlCRKIqSEH0q1p60JW+HtSgStKVWVVEYVUaA02KoBF0NUUBBJX7mSG9n+gB0qSa1DKmJAEIEeX/r/0BgGmV95FVp21lRBNAVUIHJBGRyHdpOHICSl6CAhNKSACrAZSiqsL9n70S2Sugqoquho8AuAq0iyIqgqxcyASOoceYM0271KzY6US5NZKSZRIBn2LSlSAOQQXvQ7MBifGrWOuEqpjqrjV9l8CTV+dorbeV/Pjam+bll3ZvXnmprhIDMoBlay1TG7YGg61RtjXI5k06ODmGGHJrWhUAIkWNwoaJ3LJqr968jVI+fPliv5f5gJwXagwnTb6r3ZQwtV0dkyZEY/KkxrB2UhPB2loZ29Fr1+85SkTK1pZlLqKGMEZZ6xcpxhhTpzDxqYVuwCYz4H1gxS5FQ9CpLObeZDmR2a8aYygiJd+GKi5md/LejDRTpCzLrcJsOvOVt8A5oQERBmYETGwQwfogiZyCBo2cwv3PCFXroKdMFtUpO40JPENSFR8ZwNlsVoVZk1ISthxD6LqYOc4t+5Ayci4fbJ99BJZHi8WhSKprCUGty4MyaGq9H1KWoYsibYiRMEQxDEkigxpDqBgFvFdCrKoqszkqhQQhQtkr5ydTY5iN8Ulioi7q6s/LhxBF89K0dSJCAkQBiKoRhEEg+caTYSIExf4g21nrXb+1Z9A0tbeDwhjQNnYh5MQGEYNWIUTtMKEkEbq/iW+79txOv20DK6nCtPInS29JVKpCo5kve8M8ztplHYcFXzo1xiDH1+vN/tp7Lz71i5/5zHrPqUG/CDHU/fWyyNxkunj72QeGJ0ejjfXK6ZdffPkPPf01F/t2cIbvVLNDriJrkuAM7IwG+biobh9hwl6/FzV2iwZAGEkdnj+1HdpwuKiyngG5z9YlQu9DFxMbTKK2cIRsrDNEMSjGmFSOJrO1UR+jb5suqXoNvouplShoLDISKomyD9FHMRmppLZtVVUASBWFUtLgFUTUpDznZtF00avPBmU57vUytssU2hisdRAiGM7yLHcWPBJobk3jhUSDpIyNFdCUQBRU0DISgmoSUEIRSQkgSjChWkytGTqmhFDmWeZFojAjBJEkAiiqIaaVuzNGUQVLWHWpCZWqGqakykTIFDSansUEeY5JNTPs6+hDilEVgA1aZFJgAOeKTgNlxWg0LDhHhMxybjhpcs6hZnVUJfGxCaHiVFKMWdkDRAI2TMRArIYNihAbUhFgRpNiEhFjCnbOki3IAkASzXPTK8YXT11s6um9Ozc6jm3srt56yzf+nU+9j6MhpRgDKqQYiXLKCqIImnbOPnJ6dmv/+NrerZsGerd2Xz6//ZGwiIVu9tzW0Z0bJTQlZK+9fNIuA4aYm9VnvHYhqmhZWrY6GmdotZ61nVeyYgvDMSlTiDowxnDqlc44M2RJ6qPAcHPNGrffxsWsJRAnENpU3Tuej477lKhJFpiIDK3awybvZctJyICs46qGlEQT+jbGiAoEIvWCXK8mkaTZ2mhgqJcn4gKqaglEZe5iiEqIhtmZEDtA9T4mJE0QIxAh0VcVBwhAmFAJ8b7pL4mK9vJsWBSDQYGFRSQGkpgkCWVElihjX3cs2oWuEu+8FpnJ3DC2/mhySMyGEAhFNcscIqCIAhoyMQYFtUxJJUlSAZdx23TIRgWTgGUaDLKN7YFxPSyKCOVgayfsLXpFr11OiCFpqqomWWbUBBJUjMIwJ0IY5GX0MSIyZ5JJS4E3B/l43fTXyJl2OW2OTtrZzM8abIHVrKjbUZFXhwgIUdAhZ2hICTWtCuZRBFStgYyRkxpCIpDQVdNFt6gNEiElScas7rAiKoUrkkSBRAhEiGQJEYgMM8CKZUkiKiREltggMRAbl7NhUGZjATGJEHPwUcTnORLRbL5YNs3WTr+ggZBTIhKi3KWOkJyPLTIW7ez6Z37l5ObzDz48gB4cQlIAEDM/aWBtlA/X7szrwzkcHNRzqPYnlZZZUPzAO95bHS8XYQKaVlh0QBCFpACiMUUCFEmGGVWtGgYxqoUFRWiidDGm5E1Zik+iGIWaGAdkGDXGqKKIxIgW1SiQKiF3KEGEwCFyDN4SaEwAkhvXVJVQy65weQnAwYfUVc3ysGuXAErWFHmhbBQ4ISMiQwKLolo4NyggYFImQMyYLVMCEFWVUDrXz7K1gnojzrNgNnL9ahn5ytWrj2fw8W/+mhdfvfLL+81PPX/9nevZu09tblNQ8emZ33z+jd9oY9iktUL7ybjGxFvd7uk/8HR5+Z1ptLmxcW60s+7XTHn6VPOFF8aWiEQoQQpgMlRFQ4Wlblmf1Abf8d69D79z41J/+eVXbn76c+fe/9Ajb3/v/LMvyf5dAuyMv3nBjT7+x4a/9qzfu/NWBo/95b/aro/b115+9V/98AU0WNLFnfH13/j17bNPXr352vooM/fuvfnT3997/AH7tneFM1vD9z99+2c/OV7rr6335SCdPnfxjXl73Y7zM+cef+iRyxftD/3vP/zila+UAI6NK6zJ+GQyR8NOJMXYL1yGzgbV1rcpjdf7J3vLM1trw15/c9izFA+ny0i6iK0zEhGPm32Xp82drRbsnckdAzZ3lFsk445P5lh358tBXB5t9FwGMUVcd1mXwT1eNj7WSY+n1dn14YNbw9vTEywcbAyOpnNm7CEIqrRxsn/Sc0VGlPVKBWlAl8FLknHW62Jc+mAz5xDYknSpFSXHzBw733N2c9zfPZmvrZWLbkqGe/lgfzYNIQSfFGDv7qxtkchkGS3qyf7xHAyTQ5tzTOK9P4nVyFpNMLZFn8zuwUEM0WaoKbnclciTyRTVnsymk7YCBd9FbkJeZKGDxdSb5EpyH3v86Y0hTvfuLRcn5x84c+fafrWYHVN1+nQmph8UYtVhTIrupJ2LyGzZFf3h9sbOZHb85u503nVBtCiyS2tra8wqaVE3XLrOh+NlO209MnQxZci9LCvyHlislosCMm1DpoliY00OzlpmTZpi6Bd2Y5SVUF84N57Mq8zm+/LfY62ZsqYTC9jOU/Qpd9kygc1UIaCLg9wyqyYYOIcgDtEhtwksZixmYziaSxsQ66brl/mwYEFtIxhybKBtVo9qKSPGjNhmzBzJ2Lzn6+Wkqh25jIF6Wd02aIGQ6tanGKuoTrHIds+eG37+Z37kbDH5Q9/17X6Pz7/34+HShz75i//ll7740h//rj+TL77m9v/6HwYnt8YlwmbZ3tqNF91Df/e7q+/7e3ScpGpu780f+u4/Py77zW/+zphKueOj82CIRFLomISLkoGACZR12eBaDzofvWFjk4hBiyElT5lBnsdub4knGXRrIN3w2bt2uA1ntg5OJqNv//ZbG2sbDz4Z/vp3WO1g9+q2T9Mv/lb+Ld+2jALrF3i56OCw9+2fuDumbtrQqdNy6nzqivK9/fbTr+S3rwIgYH7q7jL975/MYQFBYB3A1AlyTgGmAGWqf/Jf5t/0V6drY4M82LsF8wM46eBWAxvx7gcvD88+fvLi4c4P//PFRnztE9/y+PRVsO2jz32lfNsH6zvr5aX11zb0gV/+qdf/4l9+5Je+AgjFw6M328MXNf3h/9dfp6e+44d+5hf+/c/84I7UqsNUB2OgiTocZBs9t3d01CZYLzfLteEbe7sbg972cH0xn1edNJ2oxjaEAqGV1IXmjYOrgeK5zY3GkcvcnaMlA7oGYSpzP2dnTUZWcVQM3rq+ezEfbJ/dqG7iBtKSZLi+LgUfTibYNxlkXYLT25u3jo+ds2e21mYn0/V+dnpnA6C5cnBkBcg4tT1r2RKcHM/Z8mg0MDY0UD16xi12m/mcmkiDDI2mZZQ6JMZyZGXdGh+8hk5iyii/n6cQJQG/8o3DqiVGyECEZIFZRJGAJCoCEoLSCjS8ytKCikYV1VU25PcLSQqAqoIJI6AqQyJBKhiYxebqclBKAJCSolpSXaEBETAgKAqCGMvKKIgMlMWEoqCQEqaAGigJgiQFTSqEiVf2NBQ05FczL5ACBTQKYUgghIIYU4pd4KgODCU1oqRZnuVtgC4GAySiShZWEjNUAEhJAiCSqMTx9lmb5RCWvq5mBzf84oh0cO9oF4u+QS4wX+di78Yr853N3voZcAYAVjZwVFDSJIKgKamYbH083N2/fuHCQ0lRUEGAlVc5b1WC+0MeJEaRSJAQQLpOUAyyoDbzydHe3mI+k6QpaZeEiDQgCJIllGQUBHTlsyAmVRWRKIq/XzEDXQGDRPF+modWaKSv9r4QUGBFNBcFH5MhUo0qCcjifbP2fe8ZftU7LwqrJv99eZloVEHgEGHZxuEosxbJiBKhABHJaqIECQBEV8BVTSoAgKi4ajKufogqohABJEFCSkkRcoAh88iyF2kSLsOq/oFGxYIwog/iCI2CjxKRHJskgoS6Ot1+1feRQiqcDAYcmfJBL4q5e7393Ov11TeOkrcYgcj2iaNGQ4CQUgybp9c++HUffuKJB4dF9vqLrz//5S92u91inpQQAAxhZk0CZMM+yd3DTkTXBtOtzYIZY6UrMCgx+qahFTBMIgBEIUngfXDWIqTeeknFJvye+HnMFC2iEVkBntgYFMnYWWcOF1WRgYLMmhZj7FsWhRh12sQsN8CUou/nBmLYyHNguzUul9Nqd+/uM7/6yx//xu/CLMSONCTDLFFAVYXYWLJGCCWps6b1nVUtUJMmEPXeI5hGkqAqeGAxBifNbMClxGhEyzyfLnyvyNpA7eFCGRNA13SgAjEKcZdiVrjT5y5snr10697+8Z0rsa0TgqAgJkzilJHKRfItgobWMpJCDOpTKJ2pG28NgnMGUZJYJAKqm2Z9faNehKoTX4UdMpYtgXY+NiEacKqQGRtIMiSLVDe+cC4l6LrQxC7Li0FmQFVSWN8cHx8vVIWJc9Z2uWyTWKJezlZlufRlz+2Mx810SYjWZkeLWg1lSEIghI4phZi6WGmrUTAoslER6wwSLCtxZbZx4dLN1++1VcqG2XrRG3O/rdv5Mtrx4Iuv/t5w6KoQHjzz4N61/Q7aQdkH1Nev3qsb93Xvevj5l7+sm0Vrs5995kvn8uxrHn7oVNr+7Zdf0GV3OufeYIAU58d1GXQ4XgvCaPje3n5hS99FSEDCKZ0MrJRl731f864f/eefBIC29RIwRPERyYgzXFpnjM2tY1ZN2nWp6ZL1aZTl2i18CMs2hiphQiDXVNG5jJga3yQvlqmaLq0z5ByiUgRfx9BK8pIATMFYII5ZknYxUEcizbAsx8MyLpcSkwCU1mYuKzI2KFz0UtshajAxhGisiUlBRUIyYFEhiAYEh0isKipJAIiJGh9lusgZi6JEpqLMii5I6jREVWVjgqSkGkWEMIEqYVJQSYbIa7KGo0FECCoxRdO32jMs5GJC1LHLDv0iJYpRAJRIg/eGTQQgTZQZ2yuy3qBv8+BbIgDCPM9RPGAWqg5UCbDp2tY3USM7a7NcNBnnFJNAFF1pi5ENgaau66zBlBAhGYQiNz1brrY2nQbBrpcPNtfXz569MPfTjb3hc898affmG/V098zpM73euD8+U7heSgrWoVFVgATOrp/bfOLOW9du36rWL2y1dnHcTT/05Puq2dHOzhO9NDnc7d64cSBG2o4Y1EsUUXZsLRPCaM26nEMX5pMOhJg5czQodRlTl7DMberaRT1xbr10+cG9O2tnds6du1i3vsh6k6NlKa45aTSKU5aI9f709MWdiZpFnUDbIm+LYYZGE6UorcZaxBtDxhkkrUjIEERl5qjaNHWuKXZ19AltmTlXjDc84XTvODSe2BIiGxIVAlgldTsvKUZCUMbfp7SQRUqAyqhAjDmg8WCMHZbZeGPgikwyYiYLsFi2wQdyfYw+1pGQIaGvvINsp795e3lr3C+btiGgPMucy6OAUzUEKQUE9KCqwGgEhFd/XcYGhY6ZDXkPmqgo7PmdEZoQlLsQDLsiQjvdzYU2ds4vrKubqXQ+JVhESaxFpmzIkCLEkNo6HjGZKkZ2/dgzNN7Ot9co67NyXM66/ZN2f9rWUT2zWoskJsWYiLjIbR0CU+EVcyFJnePVISIRoSHqfFQRi2QRMcGKKb+olt7XITRZkTni1QAIgCxZRGRjGJiYAIgAjbGIiEjMqKBMzGwQiYkMWyLDxljjrMsIeXUkIWDHjiwvF8tmWc0nk9deevnzz33xQ1/3/q//8B/N2JEqMYOCKbjzMevl0zu7v/cLP/fm5z7bzvYL3MiTGZA2XmIXT5e4ZqpL5x5aLPvPfGlv2aZJXTNxW3UHx8fXbe8jH/joT/3qz5MFCZHZxhA6BMycIRMIYvA5ACMAIDnbs9aCdN5HBIUYYoIkRIRoUgyS1LIBJQUAghg8MxvRkWGCRERIZAUALJssIQiSpGCJV13RldUEAAlIUicpLafHTVcJSOasc44MoeGIzIKIbNiAQFKJPuUmG5SiqClGSQmZFDASK8LWoNwe9efN3KntbWzdvXtHMa6ughb48y/f3rh5ePbUzgNPPNIulzakX/ncW49QenCY9X2BHZNwZSkbCFPWpp2L3/FXHv2eb6VeoU0d7hxOPvnD3d0r3UvVRdvTUE+bJmRYsGliIGKXm64NRdRmMes9vPPQ9/wx30tn/vjHTz33ifw04M5YHz7/1g/8GM7TqW/+5vH/9B2RbK/Vl//ji09+/z+/uTPme/eOf/BHLlanxgUt9Diqh9996db+D7/tT35sNNq4c/vV89/2p+CBxz1u0N3q5f/vz53fX5x+9PLpJx56bv+FO2RvndsZffybnvq6Dx7vXv/Bn/jnz774qi3zrJd1IaG2ZlSM+r07k/ncd+PcZMgjm3nvszJDjc28HRjGttsc9o2GW3cmvX7ZsswPZgOHhmnRLgvj+jZnSWdHfQhVv8ejUXllfzcRbvb7WWhDXXG/TF7Y2WVdAWfDvGjahgxdv7O/PaQSjx5/6Ozvfvm2tVQUeZvk/Jn8yt2jNnFVEdS+P7K2lx0v5+wceSDFR85tvXbzrgJo0ISaopDoqJerCpJYixZhsVgWzl08t3XjyqunTp26cu2OV8j61jjXtbE9Tj1TAoAx4H1cXyuAqfHRsamD96E7e/YUS3JeKNH8sBoOe8cn08fOnumqGQge1/MIZm3UnywP+mQRcFiOZnVlnV0bF87Y7Y2to93jo2qfhy7HVFf10c1rk+O5cWV3HPJR38dW8ywf5Kyx0g4xsNW8R0b9Szev3T6ZRY/bZX5hbbAzLgeMi7re21tkNq9jOOnqhUZxlGUsHamyR0ze99FkuY1eQG3SfNyjpq6JiA0HTZyzHZjhxsjHxXQRjVK9qGIr/92oiBgLa1igi/Hs1ub+0aHJszIzta8kJeYMgX3bDsusrmtnNGPnU+gUNcFo4IqYQhPaSFj5UZk1bWOcHW4MZNFETSIiINO6po7Go1FuXJTkYnTGAlACXnYQRNg533YAwIYTkJDGIMdz/9zz15564okrn//c8dGVP/Dtf+bo6Nata3ee/ujjb7w8+6nPP/uu/nrve/7GY7m/9pnP5vfequs9XnD1lS/1zpTLxeLucfvQw08fv9h95frxu//+369/92765z86dtz4ZTXKewNLkZuiMENbXjrjn33TWQKpEuJSxapRIonKSYnEOEs5DQRgPoE2AiabE1RTCGl8evuthedzG3d/+sc+WHWwfwIba6Ycpisv9+03nciQXz6kF65g2x+Vl5IpZ9Vrp689f+HqF/Z+6/Nr585X927A44/DK1chenDKbQQnMKAZNbM/+z+Uf/jr4Rf/zeZ/+RJEUzpqv/Kl/BvfYw9uXP1bf+HBia9C1fsrf+uzd28/8C/+ll69dvRN33zx6jMLOnOjPT7fHTdvhsES4fkmvNi15eyRj33tG5/+FFnTjLOjeXfr9b3sr/+5p7/xPftj8w/+wd999c3P/I9//cP/+Z99hhrloEEpZRyEjmbtYNAfWlM4u3+8PyjCiBoH7axZzJdtb1gWOZPFjiQZxqiA2svKpo11xOW89sGThrGzGZnhYO1oWZ05vUFRJ7uThx8+M0iQl6bnaDqpN7eL02eyq/cmTRCO+PYnLkyvzaLg5mA4a/3h0ZwFppM5JWwXse+RS94829s9bOs2RkDDEHzomkXq0iL6uxH9LM7qEJnzwg1GJSy7jV6fyXLS5Xy53i/H4+Fy6Xvcu58qMgiRV4EQRkRdxS9IUVzJWU6i0DVKyBDvk4sJ788Fkq6wxQKA9wnBioJKsHIukAKiEiQ0rMQ6KJWdcgZxNRUBUmRFowASkyFMiGl1kEiQUkIEC4AhIFASDRFFMaaoIggIKREishq8f0ljFABh5AAYVm0qQoOgoAlRRFCB6hBRgmjsWTIWuPQCRZ77OA0hGEZUJUBmSqKAgIgJkImRctPbDL5Fak+O9pfzWd3VbHl/93CweXG7HG3116teX0P14vU337tzjgmRdJWTSfcVXspobt17zWJ+8dLFt15/bTI/HA2HGmTlXyGApCCSvrqDT6CIQDF5VUwpAFvnrGY2pdQ2vm18ltm6SrpKg6GudGJEZBhF1WtMK1UroCIQ3p+5wCo/BICI6b7wHlRkBVUiRAVVuj8NXMWiYgTjkIiIIa6I5vfHRPdDU6iyCuuAggoQ4f15kQKgikJMEFJip8YhtnC/EYZgmFQEAGKSVZLIEDCtFry46rNpUsLVgQqJQAAcqbNYMGvSSZAuaVIMCISYJJlEUWDpgzFmZItmWSvh/ZoJaAzigBT5q1Rr6PU3Ebibmet3dX/q37qxL6FwaYA4tiaI1oZAEEkUDTJCQe5DH/3gk0+9//Sps6lLm1v48KVjp+1kebJ6DxiRETLLwadenlkeNsswn5vY3Lt4+UFjXZdSFOhnBSDF1BlrncWQAhHFkECR0GjSLkafIM+tzdGIgkrXemMtqLadlHnGxrRdHA0GmTTL2UIdelI1UPlYz1OH1EUZMfREx6TFMGPCGvCt3epk2ULOR5/+3Ue+9o89tjGyznRddM7110Z21K9CV4zGTWy4yG3RJ4SmnkhoUFRClJByY70XQ6QMiMgZBm16uZWYQpSyzGKHWWE4z9qYDDOQxqZTEQJ1SMGLyfJzZ849fOnMa2++cPfWXVDtDXPfAoIT4hh98nVIQmolATI5Z9Fq1YXCGCSwhhhBYojIMQoIS4r9fl7Vc1SxjpzlzntJKSTh3DpwlAhFNCZWsQ5mixqANUjXpaSG2Rg2k/mCLZ/aGmTWLBdo2ETQlOKpc+sxM7PjhgJwTJKSSxB9jEpdK04jaerldmCzg8kSIxTONIpFnhukZdMZm8Wo/cxxbroQ0NkQ0q3rJ2B4uJGPhqPzlx8YbOSvfeX1Mh+gkIbi8vmzd/cOpgfHBWqWl9W0ykopB3YBi0l38NR7L79y744z4biDG81y8coLg7y4vHXm4UuP7i8PLz745J17N7744rMUQ1wkDZ0pzanhOmuqQp3lZnJ4E8WfOrUxHJ25eXX/9/MUiGJQowApxi74BOwcGiKIeekiBIjknKUY07KW1jtFRVGhmCITkcXE0MYVzya5wjESJvVtmyKqECo6i0parlszzDBj8YmTCkLVBZCqsFnPOYqJE2SDIgkGH4TJMiFCiklFvQ8JDceQqbJl6WJK6oFTQmW2CqiJYRVqRC80X3rW+cY6loOcsmwsQTu/CFEEibGLUUSVKImE+zFOjCIJlR11KVECRgAGzPX0AyNbuoNrJ9WsddYdLZqmDpKICK0xSaMoqWpuMxUpimIwHDnipJJYIkUAMMQxRdUkKYim3JaGbNMFQGesY2KLKNGzpRQjW6NIAklEANSAlrnzUQzbrDcwTIBIqqLRGAbOJGlQ6fUGvV4WJocbPHzH+75OEaz1t2+9tn9yUyUQm8F4A6jouTXmgXW94frZ85fedefOzauzm25r69deeWF06tKTOz3X3f7THx4uJvFHd9+4x2feuHk8HGQEne/qIisHo9wZRcfRQ/AcPLvMZIWNvplP7XQGCTAzYgFyu5Ykm9WJKMeEi2kFRHdP7kYNSpqipkApAmOUo8mc2v3jGSJq6+VkvlMUfQsm8chsLsFnZVbBPMa2iZ0yJp8MkrFocsPOdt4DU+UXxsU0Go4GGz313cmsrperLAsKGNRkODQ+eUkRU1BhMl890gBAFCRAl1uJKbYeAVgoM64ociDTKSlmsfGhqtuqsjkXmJJv2wiuKHwX23pmMcZu2c9tiJ1jKoaDftlr6siMPaLoG1YFgTYJIK6aj8wWgdqkYjgQndvoccLYhUGGwzyJYmxCK9KkCB4TYchHbnsnV+1iapdHqOBJPVJIpjRokrc2A01JO8oVMlMbgPEw2x7CoIeiOp9VeyfLvUVYJgnGEKWQAIEVMQkhgQKrCcIxaRXjwHJInWGDhmIMhAya8twGBhMDCXgfrbFN0+7vHT7wmIgIGlZEZgJVZhZQUGFjmA0RiQgwrw4LQIYNo7FkHLOzWcbGWpezsQhM7IgYEK0xxli2GTugPPOhCV11MLt+88pL733/+5hZARERREBAohJo8rE6uPPGs5+9/uabaLm7outmQM5JXXUxBKXLD27funUbNs+dPnsK1ourt2/H1lMX14pi72B/6fmp937988/9ttEUBEWVQloEHRH2shxTcEmh64x1zlpRYUOAEGLquhCAwNK06kpJKGBUWUEBo2hQNNYaNDmkgSNC7JJAigUZNnlQQJGYkiMmVMMUYkqWM3ICGIPvQtdGiKFeqRuRHBursCrjY4oaQxRVUWAyvdwESQzSto1XTBgMkCGyvZxsvlmWnYZEEAAPJtOIanr3V8hvnSy317d3535vdptu3tscmPe994m/+gP/05Vf+czh829iBoUjJ9RWsWnqYWYf+MDHHv1Tf/HleV0d3nv3RjaY7d35wsvp4JhnZn9paXtt/K3fMnjw0evPvmCXd9ea5uTlV4cx5Tnjuj+885UdW1eT6fVf/ZW3/rd/d/7B02/7jj/+6jNX1v7Etz34oa/d+/JvH/3ED/qrr8XXDk2nvWpR/MavNc9++t0lmtNnD24c1Za8gaR+/ulfunLtt7IzF+mBM+e+528cl15OYnlv9+ypC2tDf2MqL/zO83um7j369oe/6VvWHn3y2c/+6k//5L9MXV2ujRDZoLZVNxr1DNO9k5Oy6E2j75X2/Ob2y2/eiIQdptObo1S3iDhYX1eCo/lcAE3O1HlnDCDajIy1Zdmfz9vJohKQedXmo7WDpb9VBUzp3ZsjaaoisynFmBJW6oidsy6FFXm/HBT3jqtmng4Og3S40XPF2trU+7mP+Th3Qrsnvl9SBO9MkRNjgj4ZIr6xf3JvUqFxGJMjrOtuUGYgMSZJ1pZlrtK0TWfRvXbzihGYniyqFmrhXslFb9BWlXNrB7MFmdYaOxxkk8USwJDKYlFHlaLgellliEVWCtLNe0cXs3I0HlaLmQVZds1hMx3ma3vVCWbZQ+fPS13HZBqJSZMBKRHq+QlqszO0IO5Egt3YunOyt/DQz+z585t1O+sXebB2Vi3a1BUQh/kaSrszKmazFlv/yObaY+e3LeCya6/fPFosqkE/y60xBaxn+eywThHbiE2TLLEh7lJq22kvX8tdljmXQAXUOtN21LZdkpjn3BtkauHgaDJydHN6nBsabK73x/a/GxVhwpikC8llxkMshoWSyVjZZh4yiNwGDB3OWsnzzDidTuvQKZvM5sg2ZQw7vUG9bLqYJjGNR4N5FTrUopcZq4eHc1BtOnVZvmw0JYXosaDc5kDs2woIQpcYybks+iBJQTWz7Cw3Sbqj+tVXbnDnJ8vJ4t73x2VvfOr86O0PPXb5wWtvZL/zyk08gP5HPnj2E49ce+HVwWPffvmhx6f/8cdg4k7mCuBef+65y/bc659//mP/+Af0K188MsDOwdvOlv/znwv5RtZt8qnzTfTCbJuu/bX/dutf/+h22wx6g5CCr33BlnMAJNEIZR8SwskCshLyEkDkeEkFzQ7fsstk8PTZ3QoagmBgkbSFTZR098r26XP58TGEJdQp+7H/mF575tRyyXfmMIunGobBvJ8B7CicL2B/CpMlDMYwWAM3H/0v/0+99LEud8W3fQt8+TW4PYVpu/zBv7az/qe7z7926aUJLOveJ54EuDF+8WfdD/vhG2++J/CX/9D3Pvi9f2D96XP9f/aP9HPNtb/0j8/+7OeGHWI6mf2bn89+5tfzC1sTw29G/vjf+9vZ3/3uv/IX/+gv/earSuMHNvrP/errbWfbJogyJIwdVW1rVIfbg9D5Az+1hr7msQeryaGDMBpnXRIfY/AyHPfy0s4m9XrW29lcn9SzadNglqWYKvG2zBBc1/L+oZ8Ldoc1dxKINobDdZst9k7GQ94ejmqIaVGnZetaQNXDt/awwSaJVdo0RYCwsdYHCXcW3Qff9u5nnvu9g+nEm0bFLRaNqIKkWnzbVpfWR9INpkufoqvRoOEDn6ppPSY3YJsZW5S92PjJtM16JQlKur9DExBD6ixai4QqCUNAVaLcFAM3GJigJEeNT2lFpVZQC6AIETAqCmgCWslmCUERETDpqvBzf35EqL0Cs0LKQomlI1QyEDkmTAIiIFE1URISElEgIiVQYFp1z0VjlJjIJ0ggIqtO1arrBKSIKgZJkYCU4b5WPQG2hMzIoqKaFOLqhi1KBF0n7CAaGynf6p0u2M6Xk8IpsybBLqkIoKEUEyIaAg1dvnlusH2mS4EoVs2sCw2StqHVauaBi+HW+tbW1umd3d1rgcVrzMnBynrPhASqCsjVYnq8d/3Rx5+O4E5devD1N1946oknQa1ABGRZjU9ESFFUQEVERBWMIVCIBNZ00bPNe8MhZfvFOBuf6u/ePZjNFhITACiBiAaJaMiS7SDoikJEK1lqWoWoV3MeWOWQERVgtWuNoqv6HjGBrrDaSsikmARWZOguKkSBVcoHQXUFsV7hpkB0tTJUZgQVBEyr1htik7Tp4iAnBE2qEhGTGkZiIEOIyLyK+ZDC6lwPILAaXSqBrLbg8T5Sghk7ISUQgTZCAATQqGpEjEoXOQG2iUhgBh0gWsTVBA6A0KxeKPn90sHV67B7TPeuHknq5+jW88t5b3R4cC/oMoXALMaYJGoLtrkpmS6cOvfk29517swF5IIQt09fKPMPXrt5I8kxGzKWNSogp0TiI4CJPvR6xVs3Fpcu7ly/eby9tm4KBkh1M0tCCpJnGRkOIWRlbg1HH2PXpRiqJs7n1cAWU52mKFluU4wgqMqE0Piu8q3hmLusx5ANslnwTYITHyFJJ5hA8oSqaTzIBhYho7tHzW4NrRdBkCAHr9y6+tYrj57ZEaRsDNJ3AAEAAElEQVR+2ZMkPsZcou/126MpeUtaZFlfJGbGgc1810oUAmYkg8FH6TvbiW+T7w1cUPVdalHnYZmbMitckpRbTlGRyBAokjE2kKq6y4+/45EHLz3/5d+aHu1blaqNMXaYsCzWCutmfrJinzE7QE1eAmCbYogKSSCBMSaJZIaXVYfARUYgev7sxuRkFhP4FC2Y4KMqJjKW82HhlsuqsLbpuqwwXWizIu+8SlIB7hQs0KzpysJxxoA6GOTHB4AAEuFk5i942dkeVJO5dVluOc9NUeT39mdeUNmwQcgokRzUc+7Tuc3R8cnUQur1cyLIen1mN5vVvutc8gTJObdR5IDtfH64Pc53zm2NTxdX3rqRWF2Jk8XJcDwIZnHp4S2X24tnNvaPpq/euFZNKmSc1NVbN+4+9Y6z3WG1uHPXjTaWKVKeYdl16ejkxrzx4aXbVxj9+UdPrXcb6+Px4cHd44NFSDDbP1YVZWO2z77rwx9c7h8++/nPg351h+ao9SGKAqMCtREiRIUEsYYuGUP9gdGo22sDkJiiglIC8SklRAWxObohseOi57RNIMLkpRNJ6EOCZEU0xNQr2A54sDEQZwQBNSprF1P0rYgVSKO81MKEDoih7cSH5BRAU0xdCtEnDD4mQiPqEAxRMgqIESAphJASKKWQGUPIIQQhA2jmVWDbYemIoCxzWk8wx1nlvUhA9KokKYnGBEioAECa5RkxKSZAbHxgNoSKohCBkSlzwafUJWuzIFEZyfJqkx+SGkFVWOGRHVvfeWLTc/0Yu+B9W89IFTRZZo0RYkCBdjELZc8MNzWllCIKGWPACBkrqil51WCJosCgHFpXJDYiEnXF8AbDhEoC4c7tK4OiPL09Xls/+9R7vvniE48LY1tNBuuXuxDm07vT46uhm+/fe52Bi3ytbvjMqfNbWxdns7br5r2B3j48/tFf/KlPvPfd3/w1D778m79w6hR+0x+5+P/56RsPnBnNEnQR8hzLXlbmHGLo6lSdRBUryTCTUSLI5scpdC4kBae2VzCQo6xLXcZOY9lWjFbbzlNm20WzqDqJRpMqSldVs0nnerYVaEAyQ21XD0MZk86Xy6pLRXHaEVd+d1JFRLAZZdZYi6Kp6xrp2giJi6G2vg7zRTsbZFmR2yVoBEENqYvIJoCisQlCjAJKElSTGnN/b2DYoAARK0JWlBS9NWStyzKDoNGHOsjyZCmxS7GzAnajcMYAU8TgQ5c4uMIYJvaAym1K/fEgXxvmQTlIEcJ00tVeQtIUxasaZjI2CnnEBNoDefD8+O2ne5OTWYJ8OV9glxDo5GSWrIlk2gBd3eBYGjc8dW57UBbzkLxfRh8SQAwpBcDcVHUa9AuvWTJ9GY54PMDMoQMDjS5m3d7CTzoOYIDbBIrkUTSGAhGVNGqSFFMSUp9ABUxSR6YTiGzQGI0+sxYJOGBhzUJSSAAiIcQkUQGsZSWKEq0aZpMkZi4TQUOGTQYAzErMiKIqxjrDxhhLnBlrABnJGZOztYatNbzaQRq2TBbREEOoA4EZDYobb732J//Ed33s6z+RUQ6IhlhEgFcnHFNNj7/4y/+5Pd7tF1Al3wVGl1lnRUDYLK373VcO1tcGR7PDsigffPL8Yrq/kLS5VpwsfMKicmnnws5HTn3is7/xKQcJkRHJp4QGmQQx5lYDqE+d75IaXrQKXQAEY8gH9SklIgyxyFhaH0HIMhrqfFLiHuPQ5bmFJIEVTaI2UlQARJHkDCloG1PBRoTahJgSMibxXUxRJLdAmIkp1BXqMiWnipI0RvAx+RQIgACNobWyr2xaH05mtcuYDHdtW/QKNUwgdR2W3henSigLa/3iZLG6CnbG64SUF25ZNUbo3n798//t2d/68it//lv/9Ac+8P5XPvXZZ9+6dsoRBTlzahgX/s6zn4GfH+Qf+EOSDe4881/3f/oX9M0TEV86njUN4fDt3/Sxam10+Wvf1R9FxBR/4j/t/sQv9+dNNLyc1xfqQfb5z5lPfeliO8LnFrdf/o87Fnl89eiZL8R7t9eb1mS2miUB9/z3fxLvvpjn/gDw9CPnbx7tpZ7Y9b6PKd/mdq+bX30Nnnn54PNvXPiGD9955ZX93/ziWjksH7l4D0RPX3zwIx85/4F31aw/8e9+5Hd+9ifz3PR7g2rulcOwV5ze2RyUResTEtchCtko5srdPWGuJNicO8JEumy6vXt3t9cH2xuj5fGJaUIeMAdAtrGNmxe37zV7QE5iR1aMy85e2Hjp1p1E/qGdU/3ax9qr0bzIHfGyapy1EJG91671iR5YX5e6PZ6m3YOTjeHoocsbHVlTlHfv3D2Z1Jpj1nMtpWmbYrPYGBaTRdOpbvSzybIGY5F5lNt+YUoaSpKi16/ablq1Arg/X166+LbjazdHoGv9/p3dmRdcRt0cjJqq7bpwPG2tyajA/Wa2v6wgqjERLQx6+db6ILZd0etN9iYTX6mhWFJ0HMnHEIRNVrpHxqfvHFWRzMzL7OAgUyxdtrlzamM03rTdrVevaa82iK7gw8kul4P9yfzm4XTQ78/b5cFeWB/a0WiU26wgt2ziRn8dU1s3i9fvNtPFcjgcK/Ir1+9N5xVCttnvnToz2tkoTUyzZW1Z33V5eyk6XXZHs7prQ914ZuyNhhGVEOY+XOgVXeenJ/PxsCeEReGcw/4wkxiQiI0JnVaB670lQvrvRkUAyoy5ofHAotQZgY9dFbrCDRik6UIgJiYQiT4RyVo+mLddhllhJHQRDDjHfUN11S6jFAj9zAUfDRtjYHNQVHXbtF0ntaSoZV5aFk2V7wjBhw5BhmVR1XXnPQDEmKwh0ajALuPooZG28Qu5de/84MGHzu7cu37tlz71ix//rr/wxNmtNVO98MZLn/zpFyWHv/b3f+TN3/nC577y8sWSFovZ1jvO7b/81tN/9y9c/5f/9slorn7f95pfe7MXND62npecPfLUtZ/7nUuXHmiKHEbmKLB1o/VPfMvljfXFv/nBdNQKtdOzVnKA3eM+M26OYKvV2REOy+grAx6IyAkk2hrZ5U9//7mqip/9eUgCmQFmwkgHu/rLn7JNhL1bYBmEad7Qf31h9RAJ1INeDmQQYrc39bODgfqmx8U/+jsv/Ivvf+eHv2b+0AP3pN6/9+W3bY2W12dn1raBms23ZvV3/YtwTLn2YbwOv30PPvXG25dx/qX/BACwtvXes6b6j6/v6O70lb82/73ZE+fPwse+Dl54qZpB6+LtdjnfefCj3/t9Y29+5t4rf+fy5dLAE5c2z73j0fTWwbW3ZjbrRZCujRkzOUMipHjSetXklDPr7h3NUyOZ6fJhr2y0CR2iNrPl5CD28qLCbv/kRDjYjNVQaCB4QJ9yCtYoc0cFL+uuVJvluSy6Djy2fufc1ny67CtZNBfOrq1vdCfHi7E13qc8pPF6HpMeLLrC+7ZuHWYvPv8KB9zKx/VJM+2qhCLMRDjqlWTAIGTqq2UzyA0LZGTrtgugkLu27sTqlLCPtigKQUrqi979NmbW54w5K7AYMalU8zSbgQRjDLvMFiXniO1EgogkJSRGoFXFCDElSiungyBpIgXVBIjOMFiipKJoCXs59UsEm9SIEAAwgIkphoiaBPWr/WgUQiBAFVilYCVJSqJJURFB7nedQL5K21YCJFnNOYQYdAU5hGSTsKAXkq8a0XFFIAEmVVToQiyECTSQCa5vLfV6gxQqhWSMYU5ekiCwZYxKKfZ7Zm1tgzOH0s6mx83yGELTy6xGTSnULUpMxmGe5WsPP3bYNF0bLAsR30/UoIIwQXjryitnTj3uOAfR3JWXzly6cuWVhx57KjSgvPLdIxOBKshKcM2y4lqnZMhglpPjJI3rj3v9MR2dpKRPPPLoa6+9NF94EVCFBKqgQURkZUYDoJVFbWWruw/tWZGoiUhBZQWpXr2rKvBVSwsqIN//ZgROKWVODWsMQsAAAAgEtPpnQmDAmBRAEYGB0kqfRyBp9QRGIQAAWBZD2Kyab6AkYi0xMyEElSQogCGkVe9s9UVERAiivNIbEQKgj+LTiph+36ZHKgTAAKrSKXhFDYkIMkOEYBC7IEk0JhUAVWC6/3jwmd85oG6QWfPIxdPHkwOXR+cqH6cJge5rlSDv55ljAi2QH3vg8fXhGQuuWSZkowDzJjZJKLdkDCoQUQrKVq0hYxQpUeqmE33u8LolGQ+H737X+cGGU2Y0igytePKkCu1sriElUdXkCLNEkxsH1dFSo4KiDykm6UINyJYoY0OIgGJE2FBEFdCqW7EYkrXGKsTaJ0NHrewvU6Nt3YogB00A4JxxIvtXXyw//g3ssjzPEaA3NJPJLG2MYgwh+HQkFZnKS+GMA7esPZJiAp8iAeSGMmeni7oG8I0Qow8SkfPMGaVFtSiyEtLK7qPWEFpuO0Wylx986MKZ01dferY6OQbGlGS1Xo8pLdu5IwbV0tgoUUEcG8tZHZoYY2YopoRCGbFXRaDNce/szvbsZJE7DjG0bVBRDinFZjwoFLTxMXZdDJ4RAJJC8m0qC5flWYpN10ZidhYZBEA3Bv3Zcm57xf7BrOlikZFzlMjuThans97WuHfnTt0aLvNstmyBUBEUldjkrvQ+FEXR+nB3UjkEa7Gpl1mWDwemqufjLdNFGm/n441859w2oO2CbBSDwXAY1YCD97/38aODvf07h+NsePGJB6SNR3eOy2Hhx3Tmsbdd/sR7f/6H/sPN10+Y118/mp8bbZ/dPr1776AZkETderB3bnvtrZdvF+sjrMgxVaSL0zDeOHdyNIFsuH3xzJYZH926M6mPGzMYnXn4jZvXZ9dvESSm+wcjHyM5gxqRwbCCQpZZ1OjIGGKLGiVZxxR9ylgNeUCfFCwys0PIe1gOsaqbrMgiCRMXZTE7qtqqBWs0gSQhS7Z0w7UcGTFp8lF9BDIpioRUNTG1CKKZs45ZLAVKIUokSCIAyEzYtSbGoIqotsxAVCEgGugS0wowg9YYYiJCMkaSapKgULehDNEZM8hcb43J2CSLadMSkzUkkigpSCBkBGVelWiVEOJquI7YL3JtUtXUqgBWDTEhsVLwEUQkRUYA1eRjI01RZOI7CN4QuLIffAjRZ85QBqhZXS/YUG4dSkrR+9TO68mgGXF/lDODGmJmMqoq0ujqxoaEoNaY3OVonUGKGkREVQxR9BGBEMy4XLvz2u+99YV7Fx984uLjHwKI1dJnWdZL4DiMzz9x7tQDi+XkzKnFfHl9OTmZn+wf3J7NFh4C5Eqwd3LKaiXtrzzz27fv3fyz3/CHP/tTPy5t7zs/8qFPPfeV3ePFcNwv2RWOsoyq46puMIIF0kHhejkoRjYISQwxChjgbhmIkrQ1Gtg82+v3TIiJnLEhJ/GOV1pwQhJF6W3kvXHOa71buyeFQs5xOTvOLVg3cgjCvdh2VT31PiYRk7kMYNjvxRA8cpblIEzqAyJK8nXDURumalppgKQRCYkZVJwxsXCmSdZASqKCzKC/X0YWcWQVVvsv9EG7qKxQp9T4uu18AO6qDlEkdkXfEoYU60QMEaWLMaSBKTLDbV33imI4Ho52NrNeBo3HukJMPnSiGEQEgBiZEYyJXjnhhcI9ecpZ02ZVYB8ny1bIIHGKKScXgX0by9y0sbp7Y3bcWwvnz62vbYzPpMmdKynNVZVJVbTykq2NUm8z9cY6GqbSOBsoLORk1vpFM6mxBaPZYDCemaWQVCEhqTEmpcgKBiCqgCZCtowAKARKCATLEDsBSVKSWAIAFAVNAoCaBDH1+jkxAQGCOraEBEDGZknVsCW2CkhEhgkIGA2gEhljMyIiImscGcvWmSxn4lX1jJmZDDITcoiCqMw2M3jl3p1v/MN/5j3v/6ijTFFx9ZvcF+RC2/ibr780u3c7VcuBxVQ1w4Y4TceDQZi2RoQyc1xFLrFLVZGZxe7+KO8dHSxrCF7wgYdODYaD337m00+/72u+4V3v/PKLzzUhJotV2zVGgcRJan2riEmAVQDIoiHLIaTkgwo2XYcAuaEuCiGtZCUhpUgkgtYaC2iYATV0dca5Jlx2HtkYVAJJqoZZRIgJlZouIANbNNZaQyo4qWOGSM4CKECKXZIoyfsoHTEYNgkEmcvRetk/ZTjLd480zJtm2e+7vOQmhuODSaC88tgXXDdIIP3h/TyFSGw6vwRhJAw66ve7rgtT/5M/9dNf/953fOQPvp9G+d3nXzcGQutHVbyQnbz+r/7BpXsvPfBtf67z3e3JkRoBg4HVjGW0w5VOr0W5dm/v7W97wvXzzb/4nYf701N+/oXP/8b73n/u4N//+MFP/bibx41iRGVx8eyZe7evLrtjq07DsuiVwsV+sXzyj/2pUw+9/wv/5HdaI8dlkT88SAfj+u4+LdLB8XJjMBhslm6tXB5O+sdH+tKzx6+92T+7cQj5m/WC3/30k1//kWx97UuvfPnnfvyHT06Ox6Ny0cF82fqkhi1aJkeL2WRzY7y5NroxWZTWtt4PXc9kRYzqHN25szdwLooIw361nLXtMOqlzbNrhvem86OuXi/yejYzTJEASabT9uxmeTI9PjqpFMyw7bKY6spDgYZjbg0oe4Fu2ahPA2Jr3NlB8dLtux54njBU87YKWd/tWLt2bmdnvXzm1RucO1+nDMk6Nqo5kwHYWe8d3ap642K9zJeTxXLZTbqoRFxFleC99Iwbro9JeH9v7+HtB5uqlpgy63pMO+M8oxQh2kwJAwI4gaS8vlbM65Nxf2TVtPOu8Wk6n0FKy9A+9PBZpaqNfmujqPcbAqqbOs8shkDIRsKa6WlHF9c3e+trV964KQ4t2I31UTHIv/Lm7Y2t9bMPXbr2xc8N1nqjvqvnDTXt+c316eE0WXMqW59HFK/T6bwoySCy4apdHE1al+frG+sxQNN2s9DtLqaFxPm0G+Vlr2eWKZb9/NLO2uRk2eYGAJBpfbS2e3zCWRniKkGHIaXC2lFhy0EWug4iKcJ80YCAosYQ7H0B2ldHRTGF0tLWsKcqzJx8VASm3FkiSaAYk1Qh5tYwMQaqFkKetemSU1O4STNnN+iZLElTeZqehLIPPkkMoZdzbGPmMu1JF9PSd030KXdDRGJtQhARIFIVa613AVYA/yghYfC+7NsWMCxmm3P+7u/8I2++8dYLx294Iw+8+/LvfvlzWcJv/bZvvLQ92H397hffuPO3//x3v3Nn9B1/6TvtR4c//49+/Vu+72/GP/s98JW35r/1FZuVd3/5TTupnvrOP5H92Q9+7sd/svnu/7n9nbeepx+hJx9/+ts/dum7/szsrth7U/y92+utBefS2TPZD/yjo9H2WphNb+8VGyNHPr55Nb3wxvEnn9nZv5OhoBqICA2sXb83/dEf3nIWfAs2QEzQE3jwjF3baZ55pljbgZMFuBwEAC2QAgCMGIKHkwWsg7/4kTx/L/zKTxSn8zvz4zM/8g/333wZy4fO7M8u1LPer38Bzz8Fz78BaznMupJ7kGsC4Qzh7duTV9uQYL23CeWoW3Z2t+mkOVVshc92p3msdxfLwy8dNfUS6psDuvBX/u67vvlPDS+Vf+Ibv+6Ltw/GZRmcOTiMm7dP2uPFoO8++EefuHp977lXdps6Flm5mKdIaZwXp08P9+8eR4TDe7MiZzRRogKpT22hGcQEXciyUpPO24SsdmxdPw9z32NrkTWEbEDbD2zM6/mljfOTK4cFylBg0CtvXj9q40ls63e87bHJ8WR7czD0dPncyLftlLrydNZ03caoXx65ybx74IGHdg9m1aJ69KFLN47uZcXGKzduSeZsz02ntWNjLC3aFHxXFoYkZKoUU7nCNNjoegUmadoOWSlwmM16zgHfvwpG57PC2YIhEVhIliMCNEtk59gxuwSiRuN9R70KEDgL1rCwcUmrkDygoiRBArAIhtAwgTXWYW6ZmUTj/fRuSIaIEBKIREwRZRVjQQGmlXuNAEWUFaIoJhABRlCEiBBIkyiDJgC7guMoIKCgCSA5rBIxAKiGUKKusr4rBzIREiIjigIiOkZSxRRsRmgt2DgY9KeT2rEF0NZ7AOqCMABS3BwNBv3BmbMP5mSJxLdtt5yxim/FAMR6fnj75cVwcH68A6BiMLKAxCEAIzJh0iQICPDWzdd7nG9ubCMDMijQaG27Wk7v3L558fS5xidiEkAi0hQJVIEQgdlE32lIGAUDsLPCsn3+4rSaXbnyRuya/tapU5vb0+oWqUGAqNoBqqJFsURJIiMmxRVsHFZ4IFVcpc1VATF9lTGEq0IZUlJEBmSCFb9IARC7pLmoMYQUVQwRiiSBFWFo9cICr/p+IqICoAnuw0FBUVXbqG0EJWASAEyAMSoCBgUKEQBFNK2kAUoASrSiPAGBgohhsrzqNwIAoAohId+X8imAQ2QCQqSkyyAMDEQC4uP9BypJogBJQFdi3RS/mjC1iJA722HrOZbjURcYjKaIbA0gqkrp3Klhfz6b7Gyun7p4iUxJbPMcjYXpQffqWzdu7Z2osIk4yGxeZF1IVecNQ+6IVJb1wpZF55MQH50sl9NmZ9NEDD6Q+hADxgSM1CaNvgNjxae8KE72929fuxWasFJxhaQCKkQalTCBSsbcqVTLymX5om0WSUXBrBDxSVMSm2cCqmSmSTpha8gQCoiqEoIk37VVvzdQlxMaQhUxw6Kg0cA3zWRQxthWM5uKcnEy74Jvu4aUjMna2DlrnM2qumGTocbOCxN1rS+KMscs+NaxMUxJMcaYYkTKg0I+6j/68DvWd84e7V+dH++Swfm0UVFkYwwQQEg+coGkQhpCcmRH5XhWzRERFVhABEJMxByjghUkPZkdb/WHvtPJvIqinHEGmCFnZEJIBOisrbyPKTqDSIpETOTbRqOIogrkzEyw9N180RjO7+0vM8cuswrJRwkWl8tqFPI8Z+soK/KmCykFBe0XLgJXy7ohHYx6QJh8dEwhpl7flYNy4aUZytrZ7JH3XnTnNjpIKrGaLzrfSuY8awOztk0ns4oaWpwsXMEMuPvy62xM8jJow72mY6opdGcubh5c24fYpTbsXj/62MefqhYHz98+Gpfl/r2FcP7Qex556drd3maJluresDH40usvtbMmJzsqx3fjYrhRrr/j0fWdS8e36iuvP6/LmXSVT/dTRWVuVLBX5gkAVJPoeNR3jD0ml6GEFLpQkN0Y9e8sZkgIBLnLFZUN9hiaNi5OlgyYJJFzorBcChFnuVVFAFNLEIL78/+6IyASiFGAogh0XlMVapFukcZrfdvLBNQQUmZjlNX0M6lISJggihJT28XSuTIrF/Mli/EhcW7YWpGgBjJnrSjGlNB2IlFgflyN+wUiInCZlRsDYdRO48KHTkAAjGNEIgVRjSGioZQUBVEJOsUuLhdVvj4wOaLQoD84OZiHOgqqCkBSUQhJDHNoGwcCmct6uc2tNXmSBJzssG/LPC857NWhaQSBCNjlaChCShra5cL2cktsDBlrCZGAozTWlIqEIGwckdEoAoF0lQyNzDkAJAHKs1F2dnM8+NRP/bvrL71EYh/40B+QXk4kbZcUlG1mTD7kkp3dCA/qw/jw47O9Ky+//uqzu/f2TNG/8er+eGc9s3zr9uJXXj668+xbDoZlf3g6O/76D1+af+lV03eTw6UKz5uEBosRU6QUUpZxSD50nWEr1seOOgUElyQQAQDn1taScptEIwmTX6YYu0WDAGxYFbHAsxfXcsPrO5vkYX44Pd5bDtaHgbLWd3leIvNsuXdSnZS9vg3qJVrk5LIOpHMO87LIMA8dJDYWFDqEuKxjAhaB0AQl7juXEsQUrcGiQEGJikmYAUDu3wtiFMRkjAGiuvWKCa36FOZNygRRCUlzizGgy9zqDZDVMSOmEDvChGQm9VKdDchc5LYspotqDW2GcPdwUlU+ia1DSipgOesVte8sywce3BrIYpiFae2DZByd0ZgQFrVHtL0sa7s4YGfd2tFyzx+dhKPqsGrdxQfOXzy/vtG//tZrTbO0BTMDDoZy4VLIx9naaRBv46Q7nMnyqNk/5oQxqkEk9nbNZDkXqgpURY8KCphAUDoQ6rFtBEiiRWZAkYiCLIkRqpQkpL5bOXKRCEgUCAVovLWd2PgkjGwNEVGUqABlMdDkkYCNXfk5FISYVx3/r7bm2TADEiMwkbUOBIiYgYiYyCBSjpBAO1/fO767vfPAqTOXLWeKgAiqwExJYogJRfdv3vriJ38ZYzMY945u7ZNAezTbWCs2BoMAWkJQcG/tz8H0hzsbljKnOVE4Xt7jbMilXbv4YJ/Qofn0b336z3/Dh7ZHH/iVz32ZnKWEbfQSY9e2zpgmBeucZXSWQNSQQUHHYlU6xKqLOVpQ5SQmJ2UGiYjIbAwRm9UhlBz3u5jqCB2QA7QIGVEdoyFGBQgxoQcmBHLOqmppTRQZ9VztMTGvVDQICST5FBIICCKCCrDjRWgmy93R+mh8eTTO16a7+77tPDWpaVOHWJqUBARNWFJaZIPR6ipY1jWi6WIqcitdy6SQoiETA3zuS69cf+3KRz/wjvd843t/+3dfvnHl+CE2mLUb2+eP3nzlQjN98+aVubU0tm0XBwbW8uzqa9fW7xwPPnD5iX6R777mpge3XrsZ9g7gm//gN/y9v3Tt9nG6dbe/neUbZZvyqk5v3r5JO1uX/87/GHbOvvb/+NvzOycn4/Sun/v+eZMvXrp5a6xP/vnvfvI7v/369b33f+PHn/t7/6S9szvOTWfhpK6X02bcc3MK9+7uu0cfPAj59WLja7/jY8PLDxwfHX7q5/7tV37nc1mos7JXVW1SSQoaBQvYW0ze9uDpJJoZbS0wQs70wKn1HrlXr++ZDG0Xt/ulDwEyu7Y1WDRNbLT1evv4sGfN2WG/1+JmUSj4IjOm7O2fnKyX9tKpzU9/5dUk+XueuNBfHrPjRNCzReeDcaZFmIZojBHmNqTNYe/Uxsad8t5e06EFMfra1RvvfvKxMnPG0IWttWXQK3u74631164eGkHxqa27wXpvb/9k0TYpdCd7R/1eAYpsnBcJTUsG1tZKoXT29PZ0ergzGjnKOhuAqAvCROtYYLuYHcwEUtfEtksamqEtIMw3Mntu1Nvdb+8dz8GZIFBkeGptCE2MsxmNabmIDHg0WeaFYTeKoZXUXhj2Hjk9vnMwi/ODyWT3FGbGc6VuUjeddIVh75svPv/Cxsb20XyGEXKBxbw9MEenxgNOsJG5vqHD4+OdwWiw1ru0HQfW+BDqrp4t6jbRyVEThNAgGqNAO+eG4rWOsSWeLzu7bFXEWrJs5osOtcvEObClzann+2nEonXbgk/LozYEmc1D9OJy03aty3DUy1bL1v9jVJSiWnSOXd1UQTUFIZOLYudBlaJEZyyoWiZSWPrQxkBonUOBKILDfEiCazarkRgUDUeESbOEYCQiIQzywYrgGX1Ihie11E0cDvrOGgbNcrdsO5GUGeO9F4WQks0MMC3bKIKB8fX9xV5z9n1f9+7nvvDJyfSwcpUbn1sv1l5788bGoHz6D37wsY/Ic7/6uRuvfP5v/eO/+cGP/pGH/86/f+mlL/Te9VT72uH2+ugr+yeS9y6fP+th87/8ix/7hj/+R2/+bvvwd3xn/ch460Pv+LXv+TMo3SdfeePdb/vo+Wnfvroc56N8MCpG79j7yosb2z0fzvr0QO0X6ckPwAcQP3q7/ut/3B1OkQgyBkrk2x4ouC3QGroG2q568nT7jofdrvinP+aaiuevQuuBGBwDE0iCHndWM4dzkvAX/2pql+PP/CRIt3vjxH30fesn0/Nvvj75v/9VOAgYjN6NOBrB3QN1KD6ooUNNNLajb3773Q8/dTZPt3/tv11+55PNL3wp0549rn3j1Ua+4F6N8+kFfEk3/8R3/s1Hn/5ad+mxX/3FH/snf/kfVvN44czG9rC3t6i/6Wsf87PmzUV9a+lfu7oL3myNx3YHYhtHzqxtlSWZqq6Pdmeu6CU2wKYFmc79dn9QFnHkyrbxHfim8b0SxqNh7TsrEKaddJESokXKzWgjn3bLwrpqNslN6hmsp8vDo8U8xOOjiWX+witXGp/Kg6PJYT3oF2Tw3qwmR5obuHnCUTI23f6UVTe3hnf29zlzRTn4mnc9dXAy2z0+tC4/nM9tYYlhvFMe7Lfeg0fsumiR0Zijum1SGhiz0aPhWhlZj6ZTHQ2Go3J1FdiSrBMiiAm8qjcIGVogNqoUY/KxVe9FhVcaNABFICbMmHLCQWbaEJchxgSg4BBJNEfMrS2stYYb0FowaQAASxZVk0hCTQoiSgqISe53nSiJolFLZFUlCiqhggVFBA+KKy0WAAOCgAFkQk0QAAg5xOgMMJPcJ++AJlTllcLGkBKApPuqEYtGBcWDyQgBRZMCW84RRDQ4Y0BSrzQGJSe3vjUerZ8bjMdMAiTGFlujU/vNnRRaNjZoMzl4/WppB/k7t4fbLDiwztlcE0RVwAioRHiwPFh2i0fOPwxMSkDEokpkTp2+dPPWWyeL47JcE1Fd4XPud/w1iYAoM5Ok+eFxJkWe9/JhGdt26AY2mRjtwd6x7yKDUVFRAURGAkQAIQDLzAgxaRLR1eANVqGh1TdhVMH76Oj7lbIVS1oFFRTpPqAcDSbkAMlaIAsSRIAAgBF4xTjB1eBOURVp9R/dL0esaEii2LTSdpJlhm1QlhXjQ5PGtOInrZDbKxkVrmBITMAIFoEAM0a7el1U2eAKgy2qMYgkYVXHyICQNAgoAKM6o6IYk0ZERVQkERVVBFKA9FVABbFRlfVx4dvaIGZZub9/pOIAVBOqSu64ABLvLXNvOCqGazFhDEoYfbWcn9y7c/taG6Rw5vR4sFb2QoQ7J7NOWVVRCCX5JL7zxrKxFqPcuHXveAr9DffA5QdT1GGR5QXPF0ttIgP1i8Ey6C9/8ovTtk5BLRIkWZlb2JKuEGMYQ4woKqSZM1G1jupVDSMlVVGk1fN1FDTL2jMTg/ou8gpWrqpVIIyPPfg4oYtgkpK1BFHIZeQydoUrc50wEitZKgpNjbM9CTVRars2cdaG0M9d3QSNyUt0VqxwaKLvtCgcawqtRyJrMLdZ6DTPew88ePnS+fVXXn72zt3bbdsKYz4Y5kUZVOrlTCWCEAAk0EjIzknC6bL2KDuntk6OJt77vLAkEAWMc8BYeVFJ9XKafHTOCaIqKXNE0iwfD3N/fNx5kaT9fhl91+u5EELd1ACIZAprGAlUQhdyRgYlQB8UmZzFrgmRYTAolrVeuXEyZiLDbeeXPmyOskXd+hAAUs4IquiliVETklrlLAJoRpXtzrz73NsfP+2X3aTq9idLD4mwq0LwAWIbfNWpQSHEDq1l5zh6SWTLzI23Cuq0pljY5anSbJ7ZuPNG7mvqlu5qPf+T5848ki699MbVtkUYuMNrk7u3D8N6TgXGFKbTWV6YoqeZ1VFvkI66qLKs7eSFt67XL8KyGecmu9A7uNmUha0PTwCgcNahQRUwmDkrxIgmQzQxgJeMHRtiohBjUzWMZjwcNTFFn1LQRR06j4AGOI56Dq1rg4iKJJ+5PFZeFEixa0PKqWo6sujYEpCkGLrkfUpNDJ2SJIxCrOOCjXMxARNGiBqDhBiCoslIAgVVgigKhJCEkZNCbg0oxhh7jleWTmDoZY6z/KRpOWlsUyttJdDrlT3rNC8y0DZ1pTVHy7ZG1URRIaE4wyqSUlJFyzYlsZa7GJ0IkB+sZaGzJ10bSb0KWDZAFrgNXVRMKTFg2/peSkxk2KEKE/aKfDAYqCvIonXHqZ75EHNTZJYdUD/LJAXClCSiSFFkqxwFAliTMa2EvcyGCSnGuPqgZAJjs9WHrSFGYDCkbuvrvuVbn/3NX3r5+WcuPvW1DSBpzFzu8nx14VtbghJLzi6zg+23v//ycHO9/tVfnMyOilyP9o9Dq72sGK2tP3fjeDgu79w9fvyB6lIg63rHJy0WxbJp1Rnbs2VPtQoYtKrmZZYXpiSCoeHZtKGgltrCssnsRn84mTdNB9NGM4amWkTfplZiA0VREPDWqK/ctVVj8nL32uH+zYkPScHWVcLpogMZDZIrSsh5CGsxxozVoEDw9SIKMg+yRFDYfqxbFEXWvCyWTbfwHWNJhizZ+2VHpKBic+4bBwGEabFM6KPj+5tkARTFpvZEqAARkhImpqBMgn3rMqshJQ2JmdrQiooEj+xKVxCItXmPeOJnRZ4xMTLNF0tji9hKfbSsl3XjO2MoShTVnE1MullkWy6NbZt8PJxJdHkTeNkmIIgptsFnvbxDJaDSWceucwOGfRN8d3I4y93aaHN4/qEd6yYHd2NcrG+MaGMtFaMqosYpd8sCF7E9rA4n3Ty0Cw1qytJwod2iLtWYxHnUGCGpRJEoyYBBFRG6fzaCxEgiYo0dZEZ8XTI4w5KSAqSUVicV4+jU6e3BaOByR4jGcEqJeYVkNjEFhKSi1hQETISI7KxbyU2RjTUOiUSACXVVfVc1bJhZ9auFeRVEJojT+Z17u2+9+51fbyhTUpJVSFpiSElRNNs/mrzxxj3r+eSkXkybqJgYNMsh7+0vq87h0uu8qky/iJsjeOzhbHNjeXx4dOve9pkdO86H/bFfzH7vyvU700UV/c//1jNPv+PJr/3oH37mKy/PZoeOMPg2JRFUJCNAgOqIVmsHIi0sm5BARAmrkJAwU1jNfBwTRgFWNuhDVDW1D0oOlEKMFimDyCpRNCZRSKTAgFEiIgFoimwNpuDzLEsAgnbu1bERSSBRkg8SvSQmlIQMZBz7UDuWgjIqi97FjXLAV1+80tUNhGid6aIQYNN0p89s7d68d3DncHUV5BajaJazzTjFVLU+JXGCRZYtq3Clbd74tS8PR+7Rdzz89Affd/Xzvzs/mJ+p9XI3/o1/+r0YzMWL27fuzI9r6Qz5BFk/f/Vn/tUH5I8183uHz7558Pr+yeHcBvmt11/mzz/e++gnhlStXz5z8vrhvWh3PvzuojB7s+UpHXWv39u5dGrrwdOTsi++GzvzT//p//T3f+AX3uifujYZXZu1LmKzsAe7i3Kr3+V4AjAnO7PaNnL+4gU6fXn7wcdHF87vL2/89Pf/L68/9zz5QGyTcV3rF/OGMlMUbnN97e50jtbMZi0HbY9PTJb7LuQma3ya1/MuiROWzNVN62MqsjzWUZvQtaHptCx7MfozpzbCxBw31XB9nYMcHc9AwBXuy1duYl5cWFtnaZZNW2RZv58pQkCcx66KscyLnImc3Y2ebG93ujhzemd5Z7/VhI4nko7r5tRoXFo7b8K57S3V7t6s5jyvwmo9iz7EbK2kpZeUSldEMMfT5c545Bi7phoN+haxzy5MF34WxsNi4pvK+8iUKFsfD6wxPnHdgMvzqp5Ztsxg2FqTE7oX3pr6NnlNeekkagRU5K6JvsM8z23B8+U87+dB0/7iBA1tj9cl6vX9JqqxCtCl9XGmKncOFpMZXOptRMBbd29snzpTzecc0uFhBYKGs6N5Ox7mpeGrR3eO520/y42GW6/c9V66eb21vd4fWrbGmOzUToEE1Hd7R9M2pYUPFqJxrln6GEQyo2irTlLbGua9yXyQZUUPq4OJc+705nZW9o/mx8cnB5Ojia4sTYyIWLgsM3pqc8OV5uXrt/6PUdFw0Hd5QcRZXja1jxJiUmVou44FjCUylANZIgTyAVWRDCcJndcMxfW466KIt2BAYlQb67RRrEmmbbNEB8fLRZnlNoq1UQl9Ih+TzVYcFG07EoA8s3nWa1s/nc8JoQmiqtZQnpkY0vG8+d5/9I/f9ujjf/t/+EtPvrO5ff21l67fNr0m5f7LL8zfvHnj/DsfG3/9u//It3/jL/zCL/3Df/a/nj11+a/9qb/y8b/xQyef+cwrR//p8mPnqjYWQZ79mV/OU/XW534+zU5eA3nq//bds5+fXl6848qv45/94f+04Hz76Vv/+VPf/61/7/uWP/pDN9/30KlxIdmy/9735u/4P73w6f/0yHf9Lf3AN+2cPbcQ61vnWoNOU8EiIdoS3vP+9Fs/z6dyeGjn2bz/oQ/8wTf+8T988Fv+dlfV5s3nnSs0aVIyMcJO+bzMnvrZH4eb1299/ouy2WzeuPr/o+o/w3XbsvsucIQ55wpv2vHkcHOVKqoUSsFllyW53WDJmMYStqERNm2gTWMD7adNcEPTjd1AG/zYJpkHcNtghJBtJIRkZVmhSqFy3SrdfM6998Sd9xvXWjOMMfrDe8pP++v+svd+955rzTnm///7pZPzKx/42HM3ru9NDp/82P+6+n//z+M1l0TWCF4dSRxyTfXeiDebDeP6atgv5b3/14806zA92NuFDbz+s/R0gOs3/K3dtx49OcmQR2H/j//gzX/8B7/tg59Kp+//2b/8F/7r/+UXDs7KS1fau1euNM1seTl88807Ez/78c+/erlRpGqpHTJXijcOrz6av3uw0xzsNV9586SHajbbm3f5uEt1Fz3zuo97Wk1H1dAXRWoPd0Oo4sV8kwsixqFkMU0ogrnIZLfO5B8+Xc8ODmTbeB+PHp4tzy7jqGmKpSVpPF/WlYPKa0NLkpRljQBFORUtpSEYNX6Rhv3RdJ3S0eUw2RkpdRX2d/b2Xrx65f1Hj66ExjEuYjo/2cxG454HNknLoSjaULLBJpcF+9O+nBfZG433A61SpqF/9kqoGUkVbSjFFITAWgJnZkKgEmOKkLNtj9cC5tFUoWRDk9r7mnFGfsejEGZ7Zk5jhYqIiQBU1Ippj1ohG6FqUcSh6HanC0TbGHbOqmYAVjFVTiWJM9ICHpAMwIwNSAGQCAAVHIDftocIFKAQqToC216vFKAMos9YN+Yd1gHRtkFpZGIjo8CgYil7MMcOnTeEJDYYdAB1qNqmChyms4kx084NP73mUL3jvems1/784kTSICxRtQzz19/8zS6uPvLBb3/52o22ahCNQQmzGJjSZlg8fPTgxdsvjupWEIkIDRnRyChUd+/c/fo7X33p1ofraqLPQOECBIjPdkgGMFg+PX86GkaHt583kqoZgd+/uNTF0C1WsXVEW5UmACI6MAQFMDUIvB2WGaEZo6IhMJoBbaX2uBWwEYIyIMD28IVA2y8yAyFkAAQshpsMex6qAEW+QTH6BsvaTI2YGYEB9BlCWwzNwHALlsKYIYurGaoWqLMiCkZg+A9Z22pKiABGaEQEYEzkCByhGShAEUVUQqStkR7x2W8igOwdgTPsVBZSMhBvmesGjKSASawoeKJApt8otm1XwaaXyaQSU1QcjaqqroahgKEjrILLOZFjFY1qWDftzh5BVdU1elqezzeLiy987XfeePceeqqr4I3qUGWJXHkzQ4fFyEocT1piyIVIzXvfFV6c5nQS37l3z2Wtx2FckUC+emXn6v7e73zx/hv3j+bLLhuiwyTqijaes5QKfCBEUFRAZHK0TmK5ZM2pqBHmrC0TO85IxGBIXRYWUwd9StuUflUxOwoBywDf/InvIPbExEBb2QqAiUC/SWhIhNNZc352oqqXFyvIKIkW69XOzriY7k1rADi6jEjYepeyjuvRcpOYuXjsYg5kCALqBIXr5uWPfeLa1Wv33nx1OT+pAm4ioveT2a7zk1ntz32IyzmmnIsUhT4mT9VeXVWeGzdh8pNpvepx2WVDMFAUUMXKszEv+8gKlrOa5T662o+n7WXXr0qaTluftO9Xysx1sxm6yrmmcUUQDWLKAhoceXSTtl2uNh5LzdxUbpM6ZGJCyVABEbp1zuNxZWJt465dG48WDlz9+OlpQPbO74TJ0/UCwTbrGCahj1mWcXpjMp7tKs3iMD9fzMsQgXR9fBqLhhkbAKdcjStqA2cdN60H5DHVo8mwjnSZWsjJ0viwPbp38d6Z9DjaKHSal1p+4R/8/Pd98kO3rj33aJOs9SdPLsc7oQF46ebseBgo+sXiwgq4FEt6GDKlixRl5ERzFNKShqFfddXYJ3g2ML0ymxIAq6ph8JxyQcIyFFBBVdNoCggj9BCqYBHJOZSu11wG1MImRJ6BDMiHuk1BqSBxAyYFFjkWVa2agM4VQV9zL2Kas+Y0JE0q2YqQQ15FicuB283YM6IJbvtfpEAKWgAMINuz2IKoxpSzalEi2qLX0NAzmErJhpUnxnLQ+oqoXyVSKBFXOda1QrGGvTcLlW9cWMU0lDIfUi8Qi4FtBepWTA0howG75qCtR26zilBAh8wuhEBSSuVZU/GOjC2JViEwQ0qSU+qHTRWqzdCNxuMquIhayIWdG2N2cT0vCLEkIkeuLmoxJSRiX5uClQKe66ouJQGAAhA5VTUyxe0VAnnvEIicByN2vogVKVHN71w7eO5Dn/vlX/3Fn/i5T37/9wVfUAldWwQYSMWYuG6aXHLwVACvP/+JP/onX/rxv/lXargIZ/2ql6PlInbJ1bXfrZ67NqNWf/ut47TO1SiwIoWgYquLfnEhomU8boicD3WNoVtvSoa2DtWemzV+cd53g17EZRZxI06b3leMjEquFFhsOg7h+s3xld3qydEgSOSq9++flIi52GSnqUg1deNpKzlLbUqBG45xnboFSK5D1Xdl1k5mzZWuDJaEtBpVVe3FGra2RFjGwTAKaqmEt+DAUPksgxKExqujiQ8mikUANgDQdxGcEaIabvo8DMUqXxGaIrKv2xFSrlCQMFqp6pqYkigZUNEaqeIKJFkuRszeD91aVqvxbNcpuskE+zWpbmIpgM77HEtDeNBUVx0OQ79Kibg9XkemYkbttA1YefYLwzrLxDUxl+VwpibsHOTSd/358bEbPbh7666781LY3W0qD1yKdFD6tF5vTuZNkNT0uZytVmIdqTlB6otgNk7slOJ6cAaVUQFSEAMy9GZF1IQwGzBTYGRyGVQlj5m0aMWcSgHErKII6EiRJru77LwW8FVQtaxSYROqgAAuUEyxctXWvwGIRIzkaWvuQAIkRCJiZsfsDQxU2QETGOCWbE0EpgqonuoPPv+dNY8FjJG3+t0t13E5v/zJH/8JGroXr75SZm1XE028QrhYldlsh5q6SIJpWC/ioDD2/uDG1c7V1Oxcro+fnM1rX925fqX1o4vV8DtvP9gMvSA+OFk29x7f/uAHvunjH3j79S4fH+8E10MBQCYWA1Hsi3gkLUUVEYERSi7gWbZYb3YGoOSKmWjxgRICt9WQIbtmSAKialQxjTyYWs6C5KOKIwBmdq6ogmpMiQhJUYRVxBnM6gaMUi5Zcz9shLgYajY2BQdFpJ21W/hA27pqCtnvXX4h+8ImFFPuTDJhD7pYb+KQNvNnA1MgVlEQWK8Hx+SZfIXsoKmBvE9JlO3dp8v7T77wuRF+6IN3/w//4h+kJ4+73/ja6tFpG+je2489497e1NQt17Emxgern/kP/r87rHkjVI0gcd+lEaT8zircGZrhwTuff9fb+JP/3p+rP3mN1K5USMt+fe+rn/nFn/7d3/yx3/j13/n9Nye7L33zv/CH/+hs/8pHru6c6/J5r7/8//kPZotLu7n3+vlFDmWVYO3rD//u7/r23/N7/P5k3q++/uZbP/4f/qf5YjHpbFJXytU6yrL0jODrqgA47zfrjTdrJ6Ohy4fjtlsMgasrk1EVfEN+M/SuCetSlvNOimWwYbVx0rDpuAoHV/eCgGZ5erE+XfbM/uRUG84nF4sPf+SVL73/xoWVvcnOlbHfbFbYlWQ+iDEKgmUldujNzIQFapSq8n46qWfjbz4YvfnwNMU+R33/4fEIaO/wCgoxWeuavZH4nfrx08t+0BduHY6q8OVXX79z4+a1/emDJyfVdPTSzdmwLiZ6dXKoDoc+jqZ17V0/X1bEGvOwjIVIPe3vNOOAD5d5Y7hadLVz0zAi0XW0k0ybbtlWpISjur5yZf+19x57pf1xm6J24iRnyVZVvnhabnIBmMyqjcnZWjSXq7OAoJVBifHh2TK0VTMdPz5fieMXXn7x/HTdzfvZqG0bvykEVg6utqvcJ8eP4mawUiBplh5tU0ide+3xhT6hUe0DFoUCTIkgVPV0t7l265qtu9bVe1e176OAGFO/jn2POWNV6WTWvvji4fHj89VyePRkXhi7uHAed6/snJ+fZ6MQAjJ5F/Z2Jss0NJT+kVTRVuZyOU+xpMCMaGqK5Gkr/C4KVshASiEiB7ANVShaShFR5xep8tyi7dQBC/So1/d343rea9+OqcuRuQrOZ09MqkZQBJFSTt6YPAzDUDUTAAak2e5ob3f03oOjJEmLpqJSkkOqvK9I7731+v/xz/6Z524d/ol/+p9+4eWPvPn53zpZXY5H+6+98ZUvvvqlPJ0+d+3Kp7/7ez9++Ke71fFf+x/+dT78CzSe6Ke+++n63Y++vPv25177nv/o3/zCn/8bslgIZILul//qXxk3Vw7HN+/cfOU//6e///qN8eaXf/bmx+8Of+z7fvmdL33oyUfp859px97phP/YD9Hf+PPv/nt/5OvzcvDKD33L45MAY6sq8n6zWmgjcvoe7I4WVw72vu027ET/la9e/JXPfnixvvd3/1LzXf/MyEo4cDYwrwyyQt9c+31/9N2d3Te+/oXhbv27DvPs579YXb8D984Of+S/K//Ff3DtydJXuwLcjOA8rs/udC/+0Pd/9T/9O59I7ApGRy//i/+nkz//11/s9/3+VXi4gipA3U4//NxnTl+3j93d/Z5/4vmPfveLf+D7TnH48r2H//qf/QP333pNon3btevXnm9XT1dA1fHlMmZ+952Tn/jqfRQioBv7Ixeqr775KFT1++f9bg0By3vdJkoA733Im5NVABI1ZBojLc8ugtRiSMCrLJd5vu+d1zxtXL+JbdtqMY9OWUd1SEV9HUYj16iL55vjk/PT+cVkvJtyPI+byaQCBjS4ONlc3R/Xk1CMbo3Gi0XXmhFgHeD69R1Ff/bo7ODKhDkIKjC+dPvmu2+9P5tN95rx7d29yWh6fHbeeznaLN6f91EUt/wWhw6fTQY2qXRPzx/oxbTxO5P6Wj9sV8FyEb3nxptnLCronAKYlpLVZYu9DhsDISIwMEQjwm0sQ8WKWSouMLSBfU2yjSGz16ikIGZbPqKwMoOBiGhUKSpIjlRVkIjV0MSgoGQjBuesYVBQRjQwBkBQQFAkAzUiRkI1Fqs8kANEZMWsiIqejFAQjQCIibdeNoSa0ROQMyYu2ZKBZwqEhIS+cugBUAxjjlFkAOJQO6Xa1wEcW4DQZNOuv9yp67oMol3duJ3ZZJ3jYthojoZWipy8f7+789EUu6ixGdXArIqhqk5OT54c33vpzkujagxIW62Y2paqQcbeudGLtz/46PG7d2++wr5CM2QyBTUTVQWMRYY+rucrS6s8FFcDhzoESFrSVtScxVTNxAwcMaqYacWsW4AUmHOsCiIqANuNGiKW7Y+CyARgsJ3XIJACEhiaObDGkxHhYCJCBug4Z3AETEWUtyMmMUWGillV0YiIismW6A0KgiaqgcCQRLFP0BqhszCG3KsWJDIGMwUwILTtc3ZLLgcAAjAFATBUUDRUT2wASQQBgkNFKAbOEZoxoIl2WQYF57aUbTQABSPAgOAdQlEEIFUkBITtxJQRzWBntjNfLL3js5OFFSQKUqKZBiYCGY2CSNKum5DtNY1n6pbLQLIu3Vv3X1MrlSdFmHdp3Z8myYnZmbIwkc3Gk2ISgl+nFFOa7lXj6RjYjk/XfYbca8jxEmKfy7uPFn13f7HZ5CLOg4oGdIIKWzccM5iiIYgS0lAkmvbFihRERLAAWDPttDUjHK9iNkWAKjAZRJWtaM95zkkgEJO7fut6xd5y4tAioarlUoYupm4ATVXtoMTYr7v15eX8aEjrSVM3NU8nk5jFWXbM3bDZnVXzfqgCN2Fro2cDKzk6R8xYBTd0iZSu3bi1v3/t/NGDzfw8pm7ZZQN25Nk5X9fDsClZrhzsLRbnfZ+sT7v1yFRbb7Nps9r0q8Vq2XfkvVNyYNcPJyUNl+tYUhFiT+gdkiEDKAIjphSHmEpfwt5kb2eCTruYo8reznTousl47Li+uLhER5shIVejOhQpzFSHkPq+j93OtaY7X2mh3CcVVdDp2M9XG8NA6O89mO/Oxg252ajNqRTUy7hw3lLKoFiHCoiCh6v7Ta0yP+oXj+NwsTp67enqskBSIWdajCgEGqqS0yUC1NUcHYoIM0/rJi66esQF9T3HsS8xeTQWU2Nq6/Zivrq2v3f72pW3X33n7HJtsWyy4mJ45/I+X92jg9H+6Eof14opDh03VO/5spa8zl2KFhHZWcBNikDPCmjBzCGTYRbTUlCEmDwyMThvKRZCnIxaKxocp/WQ+iH1vRQrCR2xmRFoE7glK0MXfKWAZZCu69MgxOQcg9uCZIQcq8KQohmCKCgiADOQmQGg4rDqQ3C+9SUbGDl24INoZgITayuHKsOQCkkUE9Xas4mJFGG/7IYxU9V4T0gI3rGZFlUIPPQ59z0pug5b7yYjX4da++Sda8gpWsWb8z7NkwjiNnsOoAAYi2CDoFB6zYusQyH0vgpKsIrDYOodjSZtZbJcJ2IoMYuXVHrvZ2pSeWbRYbUe7R9GwHbMuaTN6kIQsOahpNPz4zpUB4cyHl31FbInIHTOK6D3rZowAhIXzSJCzOSc974KgcgZkAkYAHtkIgsesX3pW373k+PlvVe/PDmcV+BuvfzN11789giopqCAxKaqCr5ym01C8rGrPvSt31/vhJv7tz77Y3//H3z+599fXKob9yvzq+VZzqQ0duwBzk4uR+NGsvbzLKqhcsH7ljxHKxiLahcTAMZOunlO66IKzjlzMK2dk0yaxodjZI1rqXZ33CjMboXL8/nl8mJy/bqFTDXHlMGjdzIaVV3sKBc0kMuFCvnApIOlhctejRvzk6g7Ma8vzjYE052d3dleS/08bUig9Z5STmhNHVSLikpOzBh8iKUQbutRVo+qCsM3zsgoZSvxIDRTBACVnH3gqnLZMkOpnRtcKbEEw1LMNzVikKIAiJ4uNmuloMCeOa43KpaIpodXq/agAovOL59eIOiIDLXMiMu6W2RYptyjA6+ZfY04GTWELKZiGJraK2BxORcinlT+cDrpZJENmHQ1fzqkTse7kiYpb+YPn6TlPJ4dYV6xn9/54AE25EZl/QZwz74ih5pLtoQbMYDMhlmUmVMZDLmgRUiEFrwvCh4dEAAgmqYcwTs1ZbQYo2P8xp7BApMjakMgwJKLqLEjBwEIixiBNNyAr70LTAxkBMTsxJTJsaNtk121EATn3bbzLiWpc0SEBIYKWwoiAQDu7RwSV4pChgTP9KvM2C+Xv/mTf+fV//5HRgjva3np+et1Xs1mo6GHvZ3xpi9uNwy9Hq3n55S+7Tu+yea8zKPDvRvsghRoqhog1O3e2dn862++s1x3vtbakSWYr8+ux4PD/dHjuFGHpkYOQa1o8Z7AMBZD9kwIqlLUoQVCA/FE7KgULYDFrBQJzpUSNQT0TSopoWZQTzAirMhqVgEVUI+azRiJEUCVRb1zqMUpS9Q+qxF7FwKZmhTLzFgHj6rMTrJahqryCColhsq9+NIsYj/CTc+8Px0vz3NRQwdklvPQ1rvnx9164Sk+ozfu7dfoq36VlqvBRJuKq9prgWGTi9oQEwG0CuPal6i/9eX7X7z3Xzw3bj5R7V19bqzrXCdB1VGNA3AZjx89OCFwN6dtsvL0rKucG+007Zhm++50ePTG3/kvwpPH1w4PS07lLKubPTo+P43d3Vl1Npr9nn/138Z7x9c+WD7/Iz+1XPzYzt60XfxnXYU/+5lfna/mu+O9RVWPr90IL7+8/+KdTz33AuxOl3Hzhbff/MyP/ur52+8FLS/s7B4LNrujLqX1usfajcfVZt0bgHO85Rms+hhX1ir6nfba1f3lcj1qQ4WI6zhCWmtWi7uzdkiw7NaHk1FadyZ67eCwXy9n4xFW/HS5VMcu1IiaUn9rd7S8uLi8GJpq9MHrN4+e3mOAndGI2RFkNemLtru3Li8em0NNxRM1GZ8cXz5/5Qb03aidvHKLYr85uuzm8+VJ5dtm5LSiAaQHzXJnv96lg3ePFs7pcrm6e/XOpGqGy8Wt3Z3LbgNZrZc+iWMwh32X2mu7m02qfFPUPELJJdWNY9gbhX7dHx2vLi47Moyga0kpRmVfjWqIxTlfVzxu/eXZaUN2db/JJd17fNkLZLHYpaHkpRR1vF4PQ85ZxSgc7Ox7LBeP3v/ujz6nxOdLt8oyPz7zdbUm3YGqRtzZ28spF1RhqKsKJZ1tokWca5yOQgX55s2Dt75+dtbFunHTOhhAHfCVG1colkfni0fLfi39ZjNcHG+8AiDtHuyY2GRUAUJDfOPF61kNYulivvfw+NGTpwd7e7P90TsPjxBNFfZ3rlnqu6RDnxXgfLU+X69BSvONiOk3CmgiAjGLFi2BXZLiAtfoUCgWE0RUzpIUoa0degCjKClU6AtscsrEy+VQE1yb1a6qQMRs8A0uFpvZeK/y4Xw+9NiD2TS0IrjS7aajTA92FquNCprkZuTXm77vra3H8IwnYrjlxDGwZBXz3lGw95+e/oW/+l996MW7P/C7vvXRycWvf+FLOzO3d/XGg9cefumXv/DF3/itl1966RPf88/JR+nf/9s/9X//M//KBz/xf3n1Nz47XGm+fLT+cDu+eOH26Rvvj8dhf3r4/pOj73zu7muvvXP7q9XNh+/r+3Kj2T99Y/EzP/GLz//hP/7SJX7+Z37u7vTG4194P1/+P0cH37RrT27e9t/+f/7B1/7U3/loIN6I9MDcuiCb9eXJa7918Id+6J3ukftbP/tdv+/7Fl/8HNC4tXXvfF2HBD5Ma1hlgACv3P6F/+kv/VN/5QePP/qR4eFB++69zX/z49Xetf7hSbNo6R6TH8VRzAXyHpz/M7/38p/6g2+98PLt/ZfO/9RfvI7NbiqLn/yFK7kGGGCX4Nqtk3d+W0t4O7Xf/T//ncsbz42agzcfPP7eP/kvPH7n9b6niQ9UT1vP/Wl5f90NvXaUm1F1sVgu1hkVgSwBPj1b0KSWUTtPAOu8N5mGUH3l4VFvblTH8+WaKtcibpUCJRU/ctWs6QcZ+erhozk6nst6gsAaHGPKqe/6HNU3btPF2f6eLJZTgrTpPdF8UYJO90eT1Xq1LO7KbG+eYolpXIebV0d3Xrg+nC+FiHZmB7PJo8fzLsZhPWxiRwR9l2NKA9qgeXjnwSRU80U/DMO6pYK6u1/dGtejOWPXr4Z+FdPZOjbMitR1uSJqPWctGNyij6tN9+j9s+0qGBYFPPkgRgVRDMGKOWMz0wQpYolQTJGBzZAJ2QBZGU2pKKAaIZJRLuA8oUHJCgZFimPvvOs1EkI2YzU1RTM0sQyQFdQJginkBCoICqSkIIEpG0KRgPgNpBICgkNWRHJmZmQWAgGAQ2SlXIwVkFjUclEE8w4YEZjYY6jQkTmHCMBopfAgOHbj0DQ8mjoWETElR76LqZTivTORkl1ThfUmjZpR0zYldqLdyfKi5FUzGoGZk6Qxog7eM5CltHn48K1b+/V+cwWTmq9OF+vz+ftFuttXX2j9eKuZJ2Yzo2eaeTMwdmE03b+pcnT+3pX9G5Ubo6qqgQITqwk7llxYaX52FJwDTyBlefaYdE0lJrWiwo6QwIESCm6bYczeqIA5JGA22NrlWKQggBkBqIIJqABsvfFEpAqIUDncjhhEiyqAgUcUwCzcxTIdOXZFUgF15FhVniWkzLSoMYBBFsUthMAAv5EeB7Ohy7DbeKamxg0lQGRCREtFCbd6azDb7k4BAA0NgWIRQzKEypHbitcMiLiYDklFDVAqpmxQDNL2e6oqKDMbIiGqmol6ZuTtaIz1WZ0RAKAJOK01UF+w20QOGFoXUjLkQkgApgjzLMGkcqFpq5Q3i5Pj3Md6ZL/yaz9/dPwEQbrIOcJIy6QOuYhn5KpyFmLXUe09IhbzgK52o4o95ovlAFq4dr6qL8/nu7NmvUoMEKUoGniLKqoq2ZjJO0cEjpAQ0aAKPmbpTVISMdNizOiYa4a9UevIMfGmxVXOJoqIpZQtrktUyQSIBXG1jtd2RqNRm5JQUB9ISkEBjfLk6ePNors8O99cLtMmVRTu3H7x6mGO2br1Rb+ZezIFTpBDHRaXG0UCtbZxi3VPTAJYVLzzatT3vfPu+bsv3HrlE2mTzx8fL+bry3WvTKGpHGK/XA05T3b2xqXt0hB8ULGSkkki4JIGlCbmzbKP4NgzTMfuyrjmUuq2HaNbaDnZJGYiwpiyczRGf3B4Y7G6zL6ouNN5H2Pem7TOUSfA7JpAQ1+WqxMEI4VpXQHCxWoxbevQhPmqY0Z22HqH4yr1cNknRjTUtg0NuuVG6pq73hbLuCj9eBSo4dPVxltBQyQkxLSOPpRm4g5bujbhDsPKzTgt7AJDV5WipQCTAwPnGA2dUXAUqiqX3HhXhwALs6640hrmvmgza0VyjtJOw2y3uTLbvTw+fvONe3vj3bjqQLwVy1Er787ny3xvMdqtP/TdHxiclp3JSocYezA3c1V/vs5n64oCh+piud6pxzMHr7/5GABIjAHR0BUDQDZEoKyCZIRAuO2vYt+V5dFci8w3m4IA7MlTAa0qNpBYCsdUtyNSEJDVZpG6AgjYOI9GBF0/FEmKIzNTQUCCQpoUjcgMFESBinWdNjNANXROijlGJq7YXPBcad+X3A+eQoqpcl5BKwRD8w7NYRIbTGpAR65oWXcl+LYf+l6tILTssJQAhKLamyeXi6EIMhHaGJ1WmGXoRXMqjKyERERs4wlPHa9XCQbrN5mYzITFiBEAq8aLinc8ndaaEgCZg/V8nseVoQflGHlivhJ2OaFgPd5Ni+piWJ3PLyrFtpoySikZiJFrcCGMJtuTjCM2k1ySgSAYk/O+ouCRPXENYIyohACiOSMjEiYAaUaf+id/sLJ4/vj01S/89g/+idmV2x8mbtg74y3Tz1yFyqWdhdjnJHB45+XHp4/7+0ff+wf/yT/8J374C1/5rX/3v/zLqWu6mLsuj2fNbHfU9X0zrpgxrdSXYBkBeX4c9w+bAlpM6sqFpkIHfSeX8zUrVY45hGxSYmRXJUEFx3X7dP62bw+u3x1JuhiPeH9nsh6GlNL01m636fI6h2k4my+Cx/l67VyYtjOQJMVqIvYHJhA32QDON+vL7i3vefdw35EOsgFJlsSL7vjqXBMTm0LlfImiCq4Jlsyr92CeNGphLcz0D88FDreNgaLkFJ2YY/B11frgEQqqxSGmmLTEmirJuRqNyIjUqmrsxaUkYOTIIYBjkhL71WoYTxNX48Mbo/2DTO+X83OfElmhRMCUhEQD+LBMMBqFaRO8uVndnq5Xh+2Md8fzxWLa7Fi5BEdNxdcOrj7ddKyqZsPi8slrX7j+sU/uElyen1++/npZDVYicZ4eVGpwMcwL6qBhQgyGgholsXlR2uTBIwZgB4TMWZSdL0VqAmSr2RFiQds6B5yvBgd9ElWpHGXRpCa4JQkIeQFTRqrYIyAAqeUipWlqzJyGngm2LlhGR7wV7ppDduwKIBrhNvKq6r3f4uS2VgvbXjqpmvNqRkDO19tEsSHloow4xMik681ycXzygRfvrh8/lWE4efDuMMTZbu2GPPETH6r92e2vrd9f+Jk2TZo9t065vn73+eefA1j85v2vjF0+uHX7YlifxvXF5WI68jF3aZPGo2a+Wr32+msffP7WwWh02g/LTQreMQKpNYgEVszKs9ocFFFCDGYFoCgkUQDrizESK7TOQSkODEpCUOcoZ0QrFQlqEgEBUhREckyM7NFtIZ5oJmp9loo5ivq6EgPIWVVAcsUWah6iDYTFeZ6FpqmLpXpE+7MQV8ejndHJ/UspzunQd91o1Kz6wTPuhzEs0lDBo/dWu83udhXEVILVAeDm7qSuq7PLOSUQ1cFEMyFKTimQL4aTJgSrumU6i/FvPf7KtRt7H9rZedFgd8Ouxg7j8aBlVFdN9WQYKiv8/F4M1fx005RCscT9289de65HEfZO0i/+xb/4Ay//h7OD0fjwFlwc37zx8gc+/V2v/zd//VG/uTwfLnphXL32W7+RRcH5vdt3b37Xp2PVHnzw5eXl/I1H7/3SL/zk8eP36zBqM7z8/A3Uad+vnvaLzUaXaUWeejXrU61QktYN7Yzra4ezo/MFNx6ZjaDkvOgiCqYCD+br3VH9rc9fv7FYP7g8XSQx9rOd8WadK8bJqL0yHa06qMbTN++9u0zD9WvXDg4m/Vl/dW831uWXXn1tr53tOTg+fn+56qbj8SrL1Lu1ZFJWCxcpD0gaS02kahXBpt+I5NGk7ZYrpNGVq1Pwi3ORktPR46fXn3sujHcWpZufx/MnayQbgYSCqw4vY3m6unjpYNx18WzZ+7oaVxVxPjo9v3147crhPiXcLBNx7X0uZNoG26l3p9Np5VDL6UXfTCZdt2o9E2hbMwSeDxtBMCRy7ny+IiSnBoxvPj6C0EqWunXBw+l8c3B48PZ7p92QJ4FmDV99YXL8YO7d+IMvPrcpeu/oLNQN9P31mzvnm6Hrh6PT5DbFQo9q3/rB55526fV3nkgngwBr2BnPbLNMYA8fnvcpjScNop3G4oOvCQ1zQQR2VahGlVMtkmJWQu9Plkvp8+UpV0zEnHIaJO827dHRoh63k3qyWm4udEUkwC6LXZ6sfJZG5GA6FUgBKXGVB4sx/iOjInLEPsimC1VFSLWvkpZ1vzS1IoZC6LFxjEw1+VLyZbdCB7uT1gZYLNfFcx3IUNapa93Iop6v1pOAk7rt+yRK5PgyDm1bV4rsAVISgFT6yu9I14O4rs/a9YiQc17hpk/R1EBVEQmQmQHBTEUEEergVPXr7947vjj94X/uh28N67fffOveyau7V69Nrs++fv/JV+8//B9+6h945+7e3flz/+af/kM/+MPf8j2///OLkyv/yp/65cv56Xe8fOf7vj0/eeetL3/t+rcevPQdd5ruaXr6zscnzauP5zfv7h4M8a2/9hf7j378Z3/q7W/94AcP/6WPP/3Z+2/8yk/98F//j375L/0bH/6rf/21uzc/8/u+vfqt+y9vhAF4FPoWDm/c/p0vvVHPnu9fuGs/+sv6hSft1T0Io8cnj1/5ge/88o/b7QGbyrWWZ1jATr9n50b/n/2ND92ZvfP3fmL+tIRHF10fTnbh6rftNd988N6Xfu25H/pd9//mb6TbB8/9uX/nF99+9NFwozzSOwDgGZ8/mH37t63feHVcw5ePvx6+/dbZ7/30p//An5mUq+u90d/4e3/zv/uRv3H6cLHngEPtySs7RTpbdVXjJDllcOPJZNYeH80dqlOogl9uEtd0MGuy5afDuiaU1fDO/LIryQVuyWV26AKiWZZgGgJNRo6hjD0OcdBipuVgt04lFnWmAKgakwmaupXo024Izi3OFx4oAA+LOB2Ndpu636z3RqO43liWkaPb+7O9/YPzTs/nctr1jAwPNiLCAFwjeLx289p77x6RGAdk41VKvqo1WCrW1H4R5XKxvrY7qkRu15XUQVlWks1kPh96BnZ+GctGDdmA0TmHbND1ANCvQQ0TgzmoaucqJs0kKr1pwhItFQQHtI32mBhxYUB2IEbf0IsVATPMigwYEWrvRlXIpfRaCgo78EggJlFNWZOlgs7ICoCKIW3frAQkSYxo3W9Va8YBgFJTO3DoDY24gCpJMXPEZiDGpiaKIZAzFUMG5qIAhFtuL5ICMiGaMfIz2DLopK7H9Ziq0I5GlUMaEqM6dnlThiKDKz1AJzoUbZqdBtjhWHgS+8ViddkPF7p0cbOMRTZxUAQlBTMehtPFUeFPLVd49PjV03gcAQ7G+x/+wEfHzchUQYyQTAERgbZ8iS3xWUkrnh4Om/Vi/nh/csNcpWAqtt1Oian3nErOQ5/myxB2cpbzJ0dDvyEwAxAjNNj+ORB0yw9CUAEgBAKFDKZoWzaUEpoZCCGIChLTtpemAoTBUwiuMgMCItICfRJQ2lbADDiZJRXnUYoVhGJGQKZQAEtWQmRUQrCtOwxARdkRmDKxCWoGzYhotcMq0FDAzBCV0WCbpELaErIRyMBUAEHNUAzQkajmIluAEnkWgSSoBrWnwG6T8jKX3rYJcEQjMdhSmACg8mymROyYRMXU9BvWGwSb1vX8bJG6YTyejac7K151aVU3aFZKkWyQuq5RObixB4qnT560db+3O/3x/+3HPvOFL0cF59BK8YFnOyNSAw5V4I2aquzNxswKSI6QgHJJq3U/xibn1LRh2W/6TZKix+cDGgFa1pQlO4cK4JgQIBl4ACL0TKCmxQxhnVKnIqaBCRmawKhQEaNY8Lw3GZ+XWBIiUEqSUybvtjl+URO14Im9E1DfVD44MTWJVRWy6IP3Xv+5X/pfvvUTnwbCajzZb8J4Ojo6OV2tVuu+v1jMU9yglk3pmf1qHY2DIsRoXor3/ubhQTQ9Pb0AgFChRJ1Oxi/cvdPNz44ePeyGs0GTmk1qN5k0fSeDlBJj6bqRd1FdO92fu1UpJmLFlEwenJ0oqiOYjKssIFJiynujZmccQhvKsqMum5iopiyCPBnVWpIj7vve+SqLAbs+W8olgltveovZuX/4n2mOuI+R2UeAUoprXRUoeO5X3WoTJZNHBjFyYX5RLGkxcN60SCnmCIvYMORJaHdG7ZPTI48VEavgclHE6QvcfO31x/VzLRzUVbV7+Uuvjd0ESJCsqhgRCcrBbNoNK4fWNCiFyNHQ9/W4mu6O5xcbrsh5jV3nzZq2IUTdRD/StcJlGXbvHoKzCtk7KAqIoIUh2/A4vvqTX6sm1c3v+na/B0OTL6VbOio7dXQ7pSCJguNSCodneQpJgkYqKibeOSbcQsSSaBFFxeKpZ+w3w2adSirAzjlStdqhACCqmAJg7NNoPFERFfUCiIE9GWGOUQqoMkIoSZrGm4EYqYJkYzAtwuwwECMYgxHmlMFAAbNBHcI0NEDICgR53heRhKbBjJgAdDJtB4jqEAEdUSIw0AAAAqBlyGWDgK6qqkqH2DhXYtaYCyozgRiggsOq9hNlcHy22iRkMYhFzADJRkC6KfOjVSm+rhpDICurVeeISi6mPBlPVkPKJgoUi7Vg56sVP9Gd3Z1QTerpCMl3QyypBytKvQRcLvrlRg6a5srB7vVrL+7s7zfTaahHbdt67z27rUJTlYyZHDpgIgqhBkJyFQFtCXHbIzS7oKBiUMxUMAhcPbh+BunP/ht/+sH58Ve/9uXbL3xgOjtEppLFzIoWy8Kk3fysXywdeVqLd/jrn/v5TdX/ztvH1ewwptK2lWefUl6sel/x3k5LhIunGyveipW+VCMtNihAFXhnFJYbUYGhy56ZiLyvBHKfom18MrhyuKuLPK7Di1eu+End+A6NY8F+Zc3YXbl5/WyxMsgxxuOTiFlXGnf2Zu3OblRRZy5UAKhsiDlJyiY+bBmC4F2/Mx6t18sCVLbddxHNErOIQjL0ao1vdMhJCwRfUsw4NHUQka7fPDsXwPbjVucYTAOS57qqRozAEksZyIc+FlUN7GLMNAwNiGUl9k1wl8crK4RG47ZmkLp1826ek/Wreqep12utJ5Px9OD06IJK2R+FcRNU7GIdq6ZCZyN2I+btcHFzudhWr6jINLQ2FG9gYDlK49pAbj2kqnLecTp6EsNXLuer08ePutWG2LGjbGKuSgpFcL7S9SqF3NSMgRGAwcyLFVViV1TVspht33aesGEHWYhKG0KXU2AmRyaaASoOWnJgApSYxJCQDAGKZERLfUIzUFQVMMNShtWQletKqwDo0BGLAjkEIDTdts6dc1toJYiUkh07RkB8FqwGNM+eHAEYM5upwvZ2hMEAEYpmRRxief/+Q8Z86+7Vs9Rtlvnk6BQJuvmiFK1FkP16cZqSXcj0H/tD/+z7996eXqlGk718fPT6F3+jyTTeb268cOVpad54PB/vTss6wmpQFSuFkRbz9Ze/ft8VSFEQnANoHW0hjWYm9o08kaEjaBDFqAPcmPYi6NijZoAqBCZmMymyWfVNaJrQWOwDaBvUCjCgZg3sE0DFgQG0iCcHAClLJASFTMmYUDUwRM3eewJrGlcHjrkMAFHUVVXqo8cyaXHSShlW50fxvffOFIKVpm5H664zsGLqHUM3nDyM/cr4G4fk0/MOTSUVrphoOa3r/enuYn2aka7uXnnv6P2qropAb5z6nHNqg0eiwxvX4jh8bYhfz/Ef++ZPfPTO5MGvfm2wAk3dixy2YTMvjSnlYXrgTt45vvmhT/6xf/svW2r+63/vj8d+PvG1cPfFv/affPhTnzx87qOf+dWffu2tr5yczzfLoT7crW7vPD3elLb+9Ke+NZT86pe+2rfjn/jbPxGhDKkrfV83fn8yuT1uR1Xzyjc99wuf/c3Hj87RuYxQCoxDGI/bLi0rzwClbtyVnTEiPD45W/apqnw7aefHi+OzjUdtq9rMDYYnvfzqm+8HkrtXD9rIX3nw7v547Mj2ZrOi9NqjE0ODi8X+zqgeKrnsujJvg1Om156csh/d2TucYHn35DRUTU9SwE7WXSgyZh8QjxdPZ9ywmQNqgueQN6vNyZN3d27fnLZhyIoQpuPdNNrk1J8sFjv9MJ3t+qqZNEG9F7Kr7Wx9ubl969rFvXersbt7984Xv/jFu1dvwGT61ptvZODdqzezlPefPrmxs4t1+87T84MdvhLqq7O9buhv3W5snRf9ULI1rWt9dXM66dc674Z1EYXSVHzr6v7J2bkjHO80J6fLzZAU3UYyOis5baKUGLv1pnHMTbU3aTarVTxf+5JB8VKdRtioXwxybXfn2nS3X5+EQVM2BfKCe7PZ60/OFzE303p3t73/4CJucheHD905vP/ag6oded+shnz9yviFq7Ojx6c7iPPLVVONXnnhJj88eXQ8N7Yq8GRcr1PpU19XHovkHEd1s1oPq1IeP5oDUDCpnbx48+Ybb9/z3ne5uFDVzbRgyjGerZednH/8lRe+9u4ZO2hH/vTy/29UdHWvHaIFqLpcEHRcheNNBiBRdVQBUEFoaxeHbrVKApRKNKJH8/kEwu39K6eLy4Px9P3TS6xG2RKV0s4m7M2yFpNstkoRPCU0dUqgUoHiFtUpYiJGfdSYclUTAZWcEYAIHfuSn+HiBKwJXs2GmFERwID4eL35C//lX/vHf+/v+v5/6Z/96R/7yXvvPpg2lQthsxlSzirD2+903ujHfvRv/e3/4X/8+Ld+8p//Z//gzsdeuvvKx9YP3pKXpz/w7/yrf/Hf+g/hzd958RV388YLP/eTX3/pA7cwdtcODvO7x/yLb92x5uz10+6//epVmXxn2befub/zpv/1H/436j/5Q3/kP/2vH/xrf2H9s7+yHypUJ0vULr0A7YP/6q/Vf/Zfm3z7p/Pr8+rmNB49Pphz+Qe/cM0fYoc7e1dsebY4v5itqynvhh978/z+veeLZsDdnec6hmZcDQ8v331wMrv1IrxTXRvd3A345Y9+6uaN9hP/1/94/FgVKhgZXEmvfe4nmj/6qXt7Bx/6U//uq/cudl44+PGv//Zf/Y//jCrH+/Mr4xm0GrOVbCKWVZgtE0GCbJKxHJ1dpm4zbkO3yf1QmD0YFaX5UFSlpDTabfdvTt57sNEq7EzCXqiGFAeUG4d7j49ONMOVyaia+uPz5W41uVgMRqYIs3FNpuPx5NHxSWirph7FZRm14wiDRJtNg+Y4kM2Xw+WiP9i70m/iC8/deuvoNATc9IsPfuDlmwcH7zx+slmn2EsOvuTSDeXZkzpKb/lo0TeADTvyvma47PuLfhE3pfFe1kPjdUT83v2z64fjvfHo4nLD4DXlqvbPHbZsdrZYH16ddF1MYusksSvDszImWCFRloxGQSJwAHYJFSCaZC4CSYs6JFICdEDMXADUSnDoPXtDD2RIW1MnEbkQnPeKZmAxJiFAIgItYKCUB4wDFEUB06LbWhIQqCqBeselWA9IjtgpeGvqIBVsjR2AyETOMQuUgmY+FQVRZxgq8hxU1MxScWhAqqbA5FTIgKyoCKpmM/OoU+8Dt+zHwI4qH9eCBkmgmBXTlEUULOcupX2Dqe6q6njv+RFmbKZVPFtdHK1Xy03J22mZiqIk31Lun7z29m9RClDmh7euPvf8S4Ga2rlSoudgWKlu6+oIQNtpEW5RP0jONTtXbl5ePFls5s14B9kDqJasInnTWyyeadVfPn3//o3djw5DP93baUfNYrk2ITXLYowWHBCSmQKqGBUAAmBGRqe9IJEU9UjAZCai6ogMqag6g9p7RGLGUWA1Ww6DAZpxMWFHYkpEDqEYxKRti+I0RSH0qihixFCKBo8+eFFhRgDDbQcQUJLRNiOUoRt00gYtWo98N5gYIBJ7lKKApEbbcCU8o1sCGm4raWSghoOpI2LEHIsZFMMtVUHJosggBoRkgAaGCAi4FbuZqVlREFHM6ggM7B8KknfGYzEXQReD1JVFKdGSsSkoAIoZGThTVEsDINLeDrPT197+rd/4wm/0yRfFHJWR2NmWT1TXDZNpH9UMjUwNwZyrVqmzokMqN2c75LML7vzkVAqlYlnK1b39blgjmqqZARM9i/ER1rUvqVjKdRXYw1rLSnMSA7AC1jDXzEbgna/rxju3GZIVBZXKsQiyr7Z/RAyEimKAhFVFUMrmYrM7M9MiQphyzDkKv/D8d9y68/J6M3dn7cnTs9Inds4xEIlido2LXexi9IxELABMru8SBFXQzWbtvTvcnRTJl+vh+u2Xv+lj3xHSMCxPNpcXj07PEgJWPiIGKVxRbaSs3oaS1FCzeaBmPKFu06kmCDzEyOQqYsNqsJzFsCueZLWY101d+brysRgWVfOUkXoQHdal2Gg8NjCh3JeiRKPZOK1SStFyBkNmLEVEVAuG2m9RhWLQtv7qzvj48hyc6wfwAJPxJPYDoUtDkSToaLNKo3Fw7NFgtewRgcQWNtTNtHLVerFBtqzufKVvPZh/6Luvry0+vX+0OT3dfWG6flgyQxYlRO+xaaujbkEAleeL+cp5cOD8yGMw81gmVcJ05crB2cU8dzmpuQSrTT/E84OD6WffeP0P/b7xxz7+3Ge/cORrXnUJTE0RCjA6Ej+c5Hd+9suzl67tfcud63evbrpVW1mYeUiYJWfZNKNwfTT6+t97GwByKmaopkggUkQQyIlpVmUCICi1o/1mvriMSRzViIoE2UpJYihEOERBT01d9UiCOl8kGJynKg+xWE6DlgzOEzD6ujLIwFoGYUBCRgBwRjUE72om5mcpQ2PIRQSIzSZVnYo6xqptvXG/WI4rhyWlmDj4NcXpyCs5UKiCy2Z9KcE7j5yjsfMN+ayyikPjHIe6Ck3uBjFrK6dC65jSIBpo6IZi1jjPzhcztQxAYHhxloda2cbBtZO6ucyXQ59MMBcrxWK2quRsMgw5xVLVQQD6OCxzVRdUK3401ho7GqBJ/dCdXJ48XS1Pu75hDmox1GHvVjUdceWZAiCpqLLh1tuI6DiwcwiGAIDsiEzMSBWQzHgLf9lWghRbHIW+f+Nzv/Zbv/Cz6+VGNscvfeRb/bU74+sHMmQCR4RglGPOZTg7PT568NaTe/eLmiQxtq+89t6j1WUn6ho3lORDG5No0a7EpvFsdHK6VOFc1Nhcq/vX60I2authMxydrlbrUhQImQjFdBVXFrS51hqia3yYtNxU4KGZuVs3a4nDg/fyk6MyrJBJzxer9fllXm1cdppYxdBVeVlgGn2toa76TQ8GnqTZYfT+6bJrp3XT2pWDOi+6vr+IMfbq1BlZyatUhDKCMpjDtmomzY4DulitujwAYV03PkAuxeiZJpyQUspm4BFzFjBDEzJ1aN1mCWp91GE1hIrBIQKVbJrVyKydqNSrxXEpxsGJikGWEmMpQHx0fLaJsn/7pqV6Z7J/T22HeGevzV1n7PysMSJ2viJfA+Yh9WJX9vbn87lpaipswmSpMhmNY+oZoA7VwXS8istQ1VxXq03/4K0342qDgACqUozITUPYbwuWFHO/HEqqo2bMDN4RMQAgByoEwOywFAHmGAczqHwI7Jj8OqdsgmSg2REFBUVkBPAsW6NoyWpgAIKIput13yeNqQBHLoZQVOXhW28PtHf75WvI1JgBIgEhoJmxY4NC3AbiXCLatq9ecozBBXS+5Ow9eu8QcKs+RTBTdd4piIEBQs4ZVFan8/fffO/tX/uNePTek4dvfOhjHx0dfPgzv/755flRn4bYbXYs1ePZ/ffuu9HB/uHzHVa7L74826mDjmV19t5r70bK/dnwys5efzI8fO8hbxa7Ywqt71fZBT4ctfdPLwdzqCgFW8eGlkxbBsdbdiIUEXpWnLciikDj0GyGPhnkZFXtsmlBTVCyaiyKQF4yihJFx4aWiQyRvfGQISoFBM/oGAjtGf8R0Axjzg7EOwqEEWggH0aNbyof2Gn2qHWKBubR56LLVYcy1JV/9+kaZRQaFIduFNJ6kwQGB03gksvQm1HI33DCMjIAjndGfc45xrn2F8ODF27uO4PL/un+bh1jGQzWSZwpWx6jOajJQ1Fdb8qq17/96udo8m2//wf+ic/+wq+cXi4uLs+kHo8oXB6vw271id/9of/dP/8HJ+HaX/73f/h01U322g3Zvbj54It7X3ty/tb/+JMn538zVdoz9YZS10/O+7GyHNaj2finf+nX6pFvd/a/9vX3UIVK2qvq6cFeZT4lrcx//etvff61r0chqh0iOueGrh9Ndzebzgc63N1drZceaWdUHV+ueoEhghVJZbM3bZy5OzcO7z94HLy98NzBk+NlMlpFWj+6rF34xHN3Auij49VyXhYCxq7rhqbCGnkT415VHew26vEzb7+fAUZUz9fdhcaS8NZsp7R2drTs+7I/m1ben/XrPuLuOPC2ECqySbnP9juPn06r+srBlSSmWXJE4jZKZ2hHR0+q/d0wmcCm6iQeHrTvHy2jxVAuX7o5vVx275+8+4FXPnR+enn//v3BXN+LQQ9luLW/E0ajp6u5jopUIbPrF5vdptqppgG5X5wM8+LNjMIl4GqjQn53b5c5nc8XT5Zz1TSu63U/2Ig3lvpsg+DzV3bbtl2vV+Sqy3V/0XWe+clgUGQm8tyVVnu3TtLFErNlzkrV6fycJb84moXKnUkaBff4eJ2Q1DsOsDzrqPZptRkGezzvJweH1uts2kxG2WnJZ8trrkpRTucxVOXk7Gld03TcnueurSoDh5DAynqTGflgWu0e7rz15DgVA2cA1ve9eXj76EE7HSNz6WI2ef3Jg2/7wK0Vx8uNehkvLlZsidiF+h8toG2W63HbqMdlb+t+WHQxg47ryjX1prdOCpAkRAwUy6YetbVCLIXMR4NB497utMRybTJxgGq2Mw5RuxJNNTfeRSlVcFXlE+lF6nZmTaj8kFIVqsfnSzUzVGYzM0HMpTijKri+FC2FidBMigBYRNq6ipwHMZSobe0D2q/9ym8+vlj+U3/kj3z1M5/77c99TmEgBhVT70VQGE9XC+3zL/3Mj3/2V37qpY997M//S3/m7q2PnB2//6V75y/+gX/8IM/fe+Ot95aLs/H4VqB41q9Ol7PD8fWdnTFXbj1AjuHu7rg7w/GTb/l9n9z/xV9/8P/4S83yYftoxVL1KwMWwiya2vHerY2mr1/s3f5w97kfAT2s9sPharX+W3/3euWiyPztJzuMDJjOLrwO+uDs1sFOjmvDyrfSiKR3j6eXM682vvucfu71XSlwGV6Bg5c3/fG/9qc79gc/cP3tZnz1X/i37u5/AK7uN5vlxbl9/jd/+W//3340blaTpr17+Fz1sr3x8CGyy3FAQvKISIbgm+AAoBgILS6SDbC3u2MujkakWho2Z7q4WJPjvZ36yo6rR1C8Hl4ZyyptegW0bOlodcZOSdE57OPgQnCtSytLguxwvYltRRfL5Xrd77mw17QDzK1sXOkqL6PJOKc4HtdA1HrvAWr2aZPyZphWkw/cuVl5enpyvF72gDyUlITYaLHuEHTcVJs0bJLs+ZbRzs4udnd3EkjtXXAVud5Eluuhk75v/LVpA6KTUTM6qFf9qlglgH1f+thHMO0TZmlBb1xp+mhDoteOLgCgqhxmlaRkYKJZUQEQgAvHKIIISKBiYECIBloKIoMVIGQmBDUENUUwJqocNQEdWlQbSikIQObYkEStACAJQlFUBFA2QwIAY0bnIDASmAkCUwbLAt4jOgSnVDOYKRMQmxGICpiAQkCPVQUUHBIi5pxFiHlLuTFU2YKBFExMtGQpQDCqaG/kBs2lKJr1MZIjVEZ2SM6R9qkImCj1qJvlogxDXY/G0z3WbsKH1nlFd355Bn7lMgqotwIElQeiYb05n7hrn/r27/XODJBdpWV7w5M262WownjUEnncjjFs+9GRmAKax3rc7L769mdfeP4jVdVKBtUszFxXqmV6/eBysRAqEbD31dVv+sjz99/52pe/lKMQb8cZZqLgENDAI4GBmEN2nlWxrnxWM8RQwbQdXa6XMJghiWGFjGBXZlXOJQ3ZKQyaA1uUUhCD92qylc0DoCqakqiRA3YCyqYG29EUMSE4epbjUQAzIUQEEpAsQsRm1vd5XIDBQkB2AgkQkMwATdTI8FnaZ0vDxm1UylSNRACMvMtgxWwLlQBDRuyydDlHM0Cs8FmSyAzUEIm2CHUVVSMFYAQzY0JyzwpoMeeYRIsAN9yMMohCcgEBgBm9IqGNiOKQpqPpjWvXruyP333w7n//E3+3U1ZAQkOEirgFsq6XqjJgA2mIGbnxIYMMkuMwmKoUWQ/Da+89qls/iPadChgiTqqqT11KCVQno+ARPXEVXO4jeI+I4LDyXqQQU8nRwHjLJDN0hCJSOzdpXEVouYgJlhzQGMA5VwicQ0NerbumCt4xOrR+GB1ebadTAeDKMWAphT298k0vXr99a7PaoOB4NEt7OvRd8M2onXQlTSajoV+rg3HlVEwEDNSkODBiQoPFerMzm/arNSGyVAcHL1mm86dH85NH56cnouZaz63fGTUT12RhJXe5PCrIbdMgI3C9idFQas/dRvtcclHw1GuOcakOR6NQBjnebK42Ve3dxWqJpjkqIu6Pqj4XjXmVeoc8GdfFStHiDClJWSlHrdEwOEJdd0Pdhsm4AnLMvFkN125fWayWrPLue8exWKiw5kZS6TexZFXpkcgz18FlTbeem5FUy3m3XvUliVjRDTDjwGJZ0ib5ljnR/c8dhWVkc0/fPsvFKsfDunDtUSENNgy2WvdEntH6obCjqnbTvcmdu1eIw6JLY4bdcfXq59/e9KWt/OHO3tnxaUCug2sdWOSz+5fffOvul379qWuY2JxzUkwGUdUhR1dsWtnefHP845+5fvuqxM1pHAj8cp0VyuTGuBk3K/eNnPX2yc+0RctvH51o5smBh4SlOOpNLhar5aobNZStOOCsUlRFZftwQMuSQDUC8sj5CEaGZlr6ZAqMDsRM1UriGqraCSiBoaiKIIGb+Kb2rWPIUkoiYjQig6EfEKoEmYlBLZWInqu6tr44ImSsPCtATqZOGUE0A6EjzRAl05Ckcp41i5QupbYdd33nmzapSslVRd4FGeKqjylTikly4TrY9v1mUFRQaUg03q9JyZmt0jzFTc2VoeWUa0fe4TBEYeLKk4iJpL4c3NxRxIvl+oXbB5f9gGyxlk7Pjk8fnJ8+cr52tRjoeri8d/+rm8XZlSvX9/ev3j68Hvgqe48YHDtBQEKwsg3KbiG1UpTcFtyn2yKtQLFs5Hjmq9Xp5f/2t//y/tX9P/Fv/9k0dL/+0z/2k3/n7//eP/aBM7+pvYxmYwcoqT+6/9Unj++fHZ9BkTuvvIDAq/l5r/0H+YXLL73hsKiTtm7TkD0BBS9ZSpfPF9lRlawg2Wjkpvtcj9xqnReL3K1LjsrswZTRqZaU+lA7bvHK/iitclkP3sQ5p956wPPOzh+ul8cS19IPwMFmzp0vl2VZgqOAoSComRa5eHp07eruuK1lo0NKoWJEbALe3Bk5UMywOuu0lD6mrEXr0WR6mJYrAKwqxy6fXV420Bp7kbI7vSKFCS8KZIIcEAjI+2cFNGQOTTX0sZgSWYrJzMipJ0eSVWy52jhDZcspgVo9qtgEnK/qajjdbIahmIynVSylIdLBbCBFiSk6wzKpXRUfPzrXNDiHNTpf+155UPCIkypYUclJkdYmaXlGIlYKbfRgd7pcduN2h9REerTN/rh+cL4yg5wLsQ2pFxBmxoII4AL6FlyTY1kth5UmrwAKVBRYFUwBja14QBMTNULMpXggx1gTipQo1on2ogQQGhdLjqaEHqSQc6A69AMRkKGoGqJnHEqMJQ2pVC0CaUm9dJfnR+987s34B/Z+SEpfVQErZFcRoJkF5wmMtgwiDoymqiJGVIAQCAnNIKeUDGrnAvNWm4rbl7tYURWTTDLAxZPXfulXH37pC2N3eePKWJdPTwf9wAvPf/no3RHo1f1ac2pYQ1UBmKRzmz9auVAfeifpzTe/hlX2Tuv26v71O7q8f3GxPHAA4BwwABWQZVxd228uOllFMwURyGajhvbHrqilaKzg2G13mFQQTJMUKMNO5QbVgiamSWzVx630Zd5nR8xMfVcAdItbQi3Oo0gR4WLsAHPOjpGYBbRIFgMg8ugc+5xAIa1Lgdr7nTE1kyRRNAkqmpoZO7pY2MPjxa2r7fMvzR4/eXSwc5C5HlLZbysGrB0Wyoc707Oz3uwZxv/ZKkCKMfXD0LTBEQFQU/n1JqqhFsNAOzvjNO9iLCoFHa6zVqPp08unQcwBX51UfbG/98Uv/9yXPv/Hv+fT3zr56PLhu7Myvnx48bt/6Hvbj3wwN7Pf/KXf+Pznfvb89AKq+t7xckhWVwBlrd72WpebkXoiwLKJvg6zqu6krPok6fLadLSzG0pDlcreXj0e7z96cBJqt7joHpytRxV1ppvOHEMVnORiKtf3ZpLTtK2Wq269WTmkGzduPnz6uEulqurGOdA8IrrRuiK4ubwIjrWkA6Y18eWyr8ZVXeHp4+WYaObdc1cPFl3uFssehGut2vD09PzabFJjGbfw9qNjUJmOApXSd5uq8Qf748v1Yr0qoBRG1TJGV1IX47QdeQD2biNJ+iigBqYA5+vFwf5+TayGICW4Kuzudf1qGLrNcrW7f22znj49e0yWWw6T3b2cuzo4LuCsWfTp8cWZ5FRSqauq8ZJZPvaJ5379C19bZnAtQQUAeRRwtjNtGNfdernsRnVbAO/cvrJZr+Jg6BjTRqRUCnmIo+B9QIt5NMLpzA3e7j9ZTcPBct3FriDzfLXyjEnydKeqR5Ojy3ngEJerZMhE4KSqXLfq929ehQ175Kx5nYfpbI+a6IGjSOqkaDm8Po5pY0KPzy73nb8+nvbDuh2FANV83gmwMbxw9+aTozMM4DwP8zUk7SXloF1OUrIjJiwfeOH2Ww9Pz5arCoMHcURkaEU3y4R1GI+qWpIHM8273h50KYR65jzVtjetYiqHB+0/MiqqvS9Jl/PNJgIaFbOmcqTgHNQtCMVrV2ZPL1YOUDko0N393fcenxNBgTTv85DrMeHYsUFugm8qtxhkOaRuyNwgJGmB1AzNGh+CD1aymdaeNhcJUgA0z5gFyACIQs3X9ncen54CO82Gz+7RDczINDTMyKvV4Igqxp12fH65fPtrr//MRv/kv/4vzw6v/fLP/4xxGlPoYxqG4jxbAlBh4C7DF3/7Sz/8lX/5o6+88p0f/vinP/W/T/7Dv3P53u/9A9/fr08mn3jnu17YvfjNV++9fV8bfW++OH3w5O7BQelQfueeGyf7/K9988e+vb917XDgs7//2Y9853c9/moejXb94WT59L327k73ZCU+7Y1XD3/ux/emqT9/P3Bl12ZOLFzb6U97WAzSuvE33TnfdG5H6pn0PKSWcpcpa7/ON19+AVqfvnh/+I2vnxydCc3Ho9t2fa++88HdW9+8973fBR/88Muz/XlMDx99+X/9q//Ra189qlcp7EyvXLbLYhM3zsu40F7ZqHLWuygaABgUCFDNFDQJeUdqahSCr2rXl5WB3dmfXSyyFSzF1Mrezu7Dp+dOCi5TiGWdI3rw5JvQOsx9Kecx1iPnnAkWITGyyXjUxW60M+VRzcvgwKWuIyyscNi2694encRg2I78KnXgwAeYHEznsW+bsLc7JrX1ZtN32RyvTI67ITOSGDlFooTqgnfIznFO3c7B1BPlIToXGjcuEDcm4H0xW/Ti/VCNwuPTFXoc+m483XOVP0kXqfSFOVBglHHjZ1XNli3JdhW0LUvUghCHwogILMVLISxFZQvTUSMz/4weowCqViFWBmQqyEMpggBmI+8qZw5ztLhC14MIqiFu1RQCVNByUVVTUEJzgbyD4Nk79FseslkpGM2kqCPMojoYiLIJoRoZVx5ARSGLMqEn3zoX1NBEFbOUlLewL0OgLCZFKSqTU4NURMw8GlginpfYmcR6MiJ2FOphc17SUDusXUOc57FHAwQqVob1YBlZpKkD80QYSinjnYNMq76fp1LUhMGkpL6ne48efPzFqzEOORdsAyU0tTisluvzi/XjD97+CLej7XzCALaiRkAkBSAAGp8cL+dPZk8zXn/uCmoqsJLAQqqVnxzcJXl3WK1cVulMZPzScx9793e+XmQwAAQQQDF0iM4ZojgC58ATZijJETnXZ8vOuZGLXljByJWC2Uo1oabNOOpl4/za55LrGhqCk430BWvwqFZKYUZPqEApEbPWtcVgqc9g7IjEhBnJQdKy/fwRUKRs3SWIAGiMWMxSKpKdq8gHcxWkrCzPevlJCoAZ4jOvGpjZFlJknsEhqqGZiZmaMiITVg7FaJ6UgdS2UAMABEMUMwNTLduqPxgAg4kVQEbkLc0bAACGWBy5nLLzlWPnyEAHZslZIaOIouPLxer29YPv+57vPJzNHt5/8D/95E8PaqmAU1UTZvYOdkYOiZLBat0Twqhq66olDp40geRcQu3Ph36RykaFchKFypNlHY99cC6JeM+q2ng38wxZAiGOWiPui+i2bCiSUiwiJMrEhBi8r5lEtXIcCM2kqsNyHbshRwMRvL4z6+LQDX1o3JW9ETu/nvcO6OpsTEK+nbiqgi0YQhWB2FXNCBVUrbjNsJyvRlVVmumc5o6DZz8IBORBsQiKgoiyg+k4KOmoaUJTCUGo6rzSV178tudvftPF0bvH95+kvMqS6toVg6rI1MrN2fSth2cRHGE9FGxdW7sRsru9q4uzU6wJh9LnnLSQIZqVlEtGJTI1D4gpL6wcrVbo6oqgqVhThj62kyZWwTGuumUd3P541DZ17IfFuktJDYiZFFwzmTaj9s6NqwVSzuXWrXB0Pt/MOxQtCaVg0a1xC8FUUjYDQGOiEksYVYTVk6dnm3WvWbdcZjMjwlISIfqAnilna1x79G5PAiVWbAZq07FHFvW1Z7fqohHs7o9Fk5hMDqbP392r6oajLuYbdfTk7cf350kGiyJU61l/7sg5xH459KbzZF+YH3364x/+3k98+Osnrw1WT3Ynj4/OIpQazdcsre3emVTMu7mllWwuY9VUdd325dKE+6M+0SDwDGttpiLJlJiIGVQMSIlcEhWwAQ2UN9GSQkY+X/YuENO2D1gccwjOMwoYELNjU1A01RJTjDmX7emFEBDRUz2tKEAycQ3mUiSY84FGDitfoghsD7aoxLaFxgAq6rIM+zs7zoCHMmE/JNEcUsTKuSEXRmx82JgyQ1ZxdTur8ej8rMGpMW5MyAoAhaodFCrPp91aciIAWS2v7u/t7086y2ebbIoCDFEUTERLAQMwEmCn3tVVVQYrsWiihBpj0qLkCAjZuWvXdk/mi/UqV+jHk1ER5+tqVE3mm3V9/WDVLU+O57FclM2iYawp5bLwrmZQyfro4WUpizIsdyqajcd12EEAROfYkBDU5RzVBBDFjImeZS0RiFQMUF1DVZH0Sz//I6fvP/70p3/g9rd8stmdpLzYfesr+0+H4eGjTQhrWszP3YP33nToKqruvvit3/TRkYlMru4vF/NYlhufL1+/19+/V/rSD6lRYCJkG1fjYdNv+gjiiL1JqSs/Cgii77137l0jQiDEzITgqyDb0i35esLXb+zNTzZlUYrR5rQfm6MKg4aLJ/n8EXDypcvjaZjt1pJKw60EKwKqGodS2IAcpJA3enRyJuCCd2mTYxpq77yaA11vNhqsGTVKaIRWSlwsci7eEFGLynRUIWPqBqCwkWNUmtbVar2SkkLlhhiLPDsXhCZ4DABpKCWJZkuKrDlHKRYzZCUAh9JWbVdEcgFwsc/NaJJRj4/eH0pmT7mkquKGuYuqEYyxcVXp08XTk51pD916THY4HudB41DEUah8MKmKmOoabSC8IBih7k/atFHxVSc51G4oCyFJQz+pmhEbe0pIbajI15t53Gj2BmBaBWqnwc+w8mmxXlcCwyo7rEVtyCIRx5WzlAWLI4+ODGEz9MDsCCUnDAEI+yEXRgB0gIbgmMRA2GWClJVMXHDOjEoJRMVQimw2fc45paigKKJFT95/ev7waCzN2eOHLsx4vjzc5RaZQIBYpTCxiSEaIgQXiqpzIVSuiCAyk5NckHC9Xtd1XVdNVVWqpRQFgH69cUy59OvLJ29+4ctnb7/x3I3Jk9NHZnU377rujXEYX6XNarAqtKGtun7lQiEo++Ds3bdGN5+raHdYzOfHT5zzDx5dfsvv+fD5Wf+1z3yxjv1yUwyqvdkoDAlJRfPh/tVld1mxZFOHIADttH3hlVvvPznPF0vtcnBkBEwMgYfBgKzXMq0DmxtylGJCbMx9EUnFGYrBolfv0TMqASHWwYsBmnrjATCJEpkCRMmmnE3JhWyYDUiq2CkRRlUHPXSo3jwrS0eeqgBHZ93xRX+xiovLuDvZ99UVRw8q5+eLtajbCGUoCDpxvlusW4A+Rwaq2/rZIdnZzs29CqgfUuxjykWjplKcZ0K+XG4aBVOgbBV5KdoDHJ0dM5SGGETBjFUQQofVX/+5396r8rXr+3Vw1Tg+ee1XL1777PWbLz1/cHj3U89975/85M//1GfsfIPD8OLLH3jrtd9RZxe1JaGM7nI5bACqKJv1qlT0u77rQ1fa9qOvvPDG11796Z//zDS0zjT2cVQ380V/frnucukFm5ZNjB3d3N05u7jscrlcbQ72pkPUknWVNzeneyeL5VnMkjnlXlJuCK7U00++8OLr94+OVpvVOgWPpyeX08r7nea425z0Qo0/X62v3bruHDYONaZMgCGAQN7E6dX9wvyrbz5oFGvmItKvhsbXo1AdDcPFchNCRSqYyvXxSMHU8zoODaINBUHHdXu2nLfeE9OTJ+e7YXznypVs0ElGUwA/aceXl6cn9x5eo2rcTsZtOwypafy08U/mQ+5t4rkWnQ8XbYOrJNPZRAn6y4sPvHD79OmT+XK9HPxoN/TcT68f2mU6OGgIoqtteqU5DOAqsripLe5fHxVR0dInBUAHrpp4V7OcKhtB4YbklcPdHY9DTEYkEoOjUDH2JXfRSo9AR+vLCmjncNrncjDaffPhMVC9mm+6vFn13Qu3r12ZXj++XF29Oo1DN540x8cdlLDHbnpw5Y0nJ209duCi4qqXKMUg3r5zGDddv47e5xz0LCZMEriZhWKEojar6iUAOj6ctIbSTtxsPbaCDYtkIwAgyoKbPqssR94PfXIhXBytd0a7qzhs4nB1VudVcQz33zn6R0ZF5Kgk63MxDoQ0LOK1aQsW1+sIPlTmynpokMUSI/dxcMijUd3FDGjMMBRxdTXv4rimXUAB6HNKWcFXxWg6qtUUHI1IS9fFbpiym0sJCtlTjkaOvWMsqlEE0VrOGnd22/Umr3I0QFVw3iGa5ITF0Nt0UoMIRUOQ2tc+8Gtv3fub/+3/9Af/mT/KAD/1sz/ddwvvfQiOmWIqhgSAWAyMYslfePONtx+9/9/+Lz867/vRrKYEH/rER3bv7l87b3w1uvZP/J7f/b2//46v9uNT6/oHr7/riJcn9120H/3sa7mCg/F+PL346i9/9sY1XqfoV2dtNT976+LOC7P3TxfvH/3cnb/87/zqV9/6yK1r7avvPvn7/2B+9Pij33RIV/bfe/XrH3h+7/6Tr9Gtna6U/WrHDxqsfvPRe3svHBztDo+rp3e+47vX3/HxL3SPf/8f/8/f/+q7h7/v+4G4kH90cvFo+eBH/5M/9+7Z+++/ef9mzQ1XCk17dccHQielz6dnlzenVVE8X3QFjT03yI4gRwFRcIyE4NAFUrVUyvnl6XRWZ4GUYZUGQ6fEBXC806yz1qPxNag2Xbx2cND3nbCM0SRLjMIUln3EgLOA69VQh6rrU85l3FYhMHjWYN5bXOemCiPvQI18/f7FpmU6w65fd1dv3Kgn7VrKoksH+wfrHrtV3N2frWB5Pp8nBEBzgExaIRtCtxmYnCNenJ0f7LTBOTEzZkU8X5/ltJHgx7NmfT4oUkd6KXES6knrK489yGYzOOZrs2nxtCry/M3JZtmvNrJYDuv+WdCUCoCBCBCwqnBREyi5oG2F4ttxJeZnWCEgJCYGwKiqBM4EabsMSaQMyWKiXvPgM6N5loSQVRXQtmIsFUbzziqPVc0Ot8g+BIVthkRJt/sAKWaKOQN6Vzrwjsgj9AUZ2LNn9uQZwXKJaqKSxVDFgCxnLYBGRUyyBGGhLKBFVAGJMBZa9paGorYcTc897yu6InmIvRG1vq0q8wybfmMqhsAeQUpaLxjrIcXVplvGzhrHVuG5OWM1JpSSStG5m7ZPjt7+ipTnn3vu7tW7lZ/Ol5eINtu7fXj1zpgDAKoqE6uBgRIQmCGhIc8vzn795/7umOqjYfHlL/+D2c7tyc4V2Nk9vHs1WJ9XtlnFxw8fVof3Lobw8J334tG7zlch5lQKAiCzGeaCzkNVg3fKSLkU18D0Bq+WA+cgJll1IbEaQ7Rsre5eI5imUauTGZ68nnWhe2FWNIoAqDEgmShYYCI0VQUlBJ+LVhKDhxgN9JnOmk09OlGToohbRhQDIJgB2rPJN0BJ1vc48oScwwhjhyK09a7h1kwAvK2MbZFDjLANOtCWXgDGZgDmGU3Fk5OiWQshA9g2MURICIQosAVlf8OpBqaE8GzoKQKM37hDMzVl59DQAQybFWoBFRUpSOwCERDzJz/8wZBivnj8K7/9uXeOL7vCANvk2pb+hwk0bWIGTEA7o9GobgFcEev7HhGc4eV6tYg9VpRNpQARNs7VjtvACJZjbh1x8A5NS5mEsK3hOWT2uC5qZqqqRRvvAmHJoAZOxTEwKmpJEREomy1TKgAIiAZ9TI65bXxT07ittcBHPn6nZgkyRGu0yDYBRkg+eNHifEi5mBK7SsuARRnIVB2QZw9iIKYCjExEBtJUfhR4f9ZEif0QiwME89598lPf8/xL3/K1V7929vD9uB7qWTOetKtczCxFmadhXE52rlbLnmpp2U88VipKpWctY9S69iWGTUyIQGBApEWCc1osl1I7TsmWyQqFwO7m7uzo5KkPY/JhE0W9Z/JYMTVVEVnFHIeSBIEcINR1453fxGFYDW+89rZR3t2dXXvhhXv3Hw6bQojMzKAmSu4ZAZiZVJ/p+QDtxVfueG/dsivJEFCsjNq6pGJmyDQeN2VIqtrW/vDqwdCvFhcbI/B1sKwvPH/lrbfvZaiSiA8EKDasx5N2Nt55cLz86tEjENUhcQjmQIec87MBaOzyuKqmTdsNse/j/WVfnB8C/S7Kf/iPfpf+zOXJ147nyz5l1QzD/4+q/4q1bsvy+7Axxkwr7HTSl7+bq6tvVXWlZnd1Jps0qVZbJkSTMCRasg3DtmQ9GAYcZMOw/CjApgwDEmT4gQ8iaNKCaLgJkh3EVnWzqzpVrrq3qm6+97tfOnmnlWYYY/jh3KLA143zsHH2mmutOeb///vloqDzW/Mh6xA71ZTHGGpbJF/32zFlEJgtZy7YYRhuVkFBRQOgMqVsEAU0BFtyHqasRCPJ/J7XhrS1SdUZo4RTYiRCYxUwFyECMarW5gLAmnKCIFLYVSSFOBVyEIJFK76yQOJA1VkVIgcWkSwZZisCBQsrWDAGVYoHBNSx78VXdZrm1gdAYvDG9WxSzoXMIEwkkBI4a0IIvhpQUykrfyjFsklTSY5cESmok5QyZimMpaiUmbG7NB749tZ8vo27iKhUbop4KTHKJ3PmxaIK5PqpjPsBhpSjRGZDBiyq6H43HJ8sTEktgtTBMoKCM7S6ffTow6e35jN3cVpA4uYCgxr2q9krd+79DElc90/Pnr3hHR0cnxzdf7FtjqKzuziGdlEJc47OBWZ21rOYkpnwxoCCzMVgAQQQtOhJ4PE7b/zR1//BZ770c7/xm/9z51bRhO2uV4tf+Y2/0r//4Te/+k+nb4af/sJrr33m81/8yl+dNUeaRZAsMBBPhYHrMsKffet7v/f1P+/HmPdd7fy945OnF0+cDWOeiiZwChadR2uqknmMikkRWiILoPOm2o07JcqaYlFL1s/CbBlk0m6bZCBB2Vz1vqaKfHvgt+9f3b516+LppfNIjaLNuU9GxFgrhQGYjIYK28qqZjbZBOJJyOiNmLgoTkPJsYANzhFgZVBUU56mrk+sdoY1OFwtF0O3Y8aS8tXZpkFb1d5VjhIkgXXJcRL6SfVGnIsMYgCN5qn44CfVUiQzQ2ZPBFDunswHjs6oADqyaHw7W677/vLy2kjlrE376H3DMcUpFlZh8RZQxajmNAWnBmE/TnPjC6sLZjGrYRoqArIhchpKYuBYIDG1pvKAOTIDd9PkFFtH8+AzzOeh30YpU5rVVQ+sjkQwVB6MtHNnqzQMOywEPaRrlUSgzIVBdEppjlA1VpRjEQYtiF0pFWGjGMcEBAKCzuQsIhoTWGYktx7jJMSsAdQQFlEQsYgCUBSEpeTMKeZxQksgNDs8mGJXuuHpo/fqB59vsBljklhAsvU1OY+O0BJaYxQULZEQkSqKsECxzhMYADDGCHApk4ICUhyTNegQYup///d+9/0ff3/WG5C83VwdNH7Ko53P2rmrcwmNmUCGwpsxWp6Ovf3SZ37mzUcXp2//UNaXU9qYpr772vGzN9f3X3zl6MXX44SnP/6xNbmPZTNwl6bKUgtUV/Z6u+XMmsWCOgBnCL0roSp1VWRnDakIChACggSCoJRYU4xN8E1wKqoKUypQYEpcEXkCBSZnDYKIGm+9h1IYvBuLsWgyAjlLIDlnIrLWTYpFABTjGLMyS3HBxXGEcdf3u3bmHcrVdh8TP7vqt11UY4cpDXG4Wm8Wq9Xq9lE+u9xshl0uVd2ABXQ0xhhc1VSSCv/LVJEITLuBDRHCraNZO5tv12tlzMyKpfI2p5SZ24oa57LolLQSefGVl58+fjKpTLlIVk9ijGGGVB987e1tAjxu8He+9W1wflG/QdNUreyte7cpQgWlCuGP33zLJVWU4SIDkVIamXeoLx8fvHS0ev1nXlu1TWT8s3ef/ME33l2PDhJ2F3s19MqDI8dciK1hATJoQ+1WwRUStgRKE0fr8eZ+/vLBsQP3/tm5CdXicLG5OHdoMuPHV/HZ135YN6HydLgIi3l7cXV1cND226H2drvP9QIPF8sPLi5feXBy6/ZyM06bKTFqJdLOV+M69gaGjG1wP/+5uz98+1k2EAzuhnFSzWRjLFZ1NXPLk+bJ6Q6NMwYcmjROnFOoZlgALJYpq5qnF1fEEuo2hMCVub7cestJceh29e5ieTS7vZglP267blPSkJilnLSrQhJZ9lg6YmTuN8O9Zv7lL3z+7//2b6OtQoWoksZITvqK9SBcXO2t0MGtdivJCGlyzhlNiKq1r8bIqgAEU+FuKAw09smRrQqExr1/eh6cc85drbvZspk3zbIKV9c7QgozV81t6crlvgO11qST5WyO1bNdd3zgX75zZ4zl48cXV/u0dbaGfDw/CJZygd06oSUpmi3sOB14u3QuRu5ZPrrcBcR55bfDtDqcXV3us/X1wtvtXlRSKqxoyAbjDxfLs+f9+jLObSigDfkIUxeHUIU2NImLCovA3duHI2sG9JZwKJut7jxW4LpxbIz/V0ZFuz51mwmNHRMbI0dH9bI1XbZKdLmO88pVqdxuQrOsLq/H0172mYtCAlquFsNunUSvyogGlu1CDTzqN0PKUynGByUzxSJjOjlaEeVUuCv5wf1lpj5UPmcdcwEFfwOZtRDmVXu7RpSF8b4CQ6VkHDOkDGx0Pp/VddjtekWdYmnR/dpXfuE7H77fQ+xw/Ma333LLr//Sr37+L5T03X/x1d26C94W0ZvNm4ACCCEUAVAtJGx0edBYwmGK7731w+47IzRQORqK/c/+zn+ybOpf+Pwrv/rX/tbLr3zl7suf/ZSfFotV+x/UsD0rev7db34t3F/ev3dfErrurOZdOHvz+ZN4cOe15V/815727rN/5a+7eX203X3rBX35C58dbz34wY+efer/8r9fPZw//qf//PYv/RuL5uAw7P6jv/Hf+e99+dfv/pt/9eHf/lu8Hq6irF+7tztd9/vH/wQ3300//vDvfffNN79hTbYX3cLQwlRhtvipuw8dwdiVnHg/DRRjwlxIBnF9lCEXyeIDsUERTTkbh6JoG2OBMAoRyRBBFOrAIrO2HYuGqt2dDVZRnDm+exi7bWKddqlyvs9i6lkIsl7vbObG10M2BiBYcg7SULiwKvZRXI0Hyt1+d+fwcKH++snl0XKW0yQGm3k9Gx2Tng6lH/WFh4fWmXFKUvx21IvtvoZqvOxPt7vaWxS0wAgwxUSOrA1EZKzzzg+pTxmRZOAc0KiaQSIBSdYpKRhThLskkmgL6ZY1t9olTPvFrMJSKrRX171HrHJ10cd9TlmA5ZNNsowFBaFIyWIQRJhv7H8qrKqKCKiKIMgIIFBbRBAwVAiFIAsDCBJZNAQqzMBYGEAEbGZgsYZVGSExFxIb1FYQgvOIiMYAkYIIiWhmiMqJWeSGW4gIIETABDeUUwMUUCiFCoJTDyCFECAmiXwjnlbmwqVgJhIRABHkG+o0gIpkNKJYsu3PeYZq7RS6S1fXUB1g1YgzozBAOajrF+qjoVTPtptgaJtiNK61UFCz8D4O18PQj1PXRc4IGRFAiSl4gxwCGxuN9102l6fbcbxkKPOD+Woxs5aMsgrgDY5Rb9xcpADCKlr2m50ATvTxNPUZp8tn72yfLjDMcfOlZnkr7vbdfrrevPn42Vvsmt31kLp9sGiM1M5MmQHAkgUUQ+grrCtEdDkjzNSv/FEVDgptx9JvU3DWGouUQg3uEOuThsfYqTQnENdm2CXNGoVKUQSNysYAGFJFZBYFMr5wKgVsAOc1qaqiCjDf/G5QbtA8BpVIlJ2xICiK8InlEaeolZB1WFW4RwYxDECkQGLRFFaEG+PHTUURALWoEqKzKKJGEckiARhyhlD1TuX7VCZWMMigReXGnoKIclOBAUUEUTCEN35UBeCfsIrkJvMguly1zjZnV+shFUkFEEUVBBLR51//zOuf++Kcrx99/P6b778v1hgQBEqZCUBAp8JP1p1HG4JrZ/WqXUxDCd5VdRAooGXKsh36WFjJKIJxxjoHjqxgTqoiuYAPNnEiIkYMoU4l51hAi0G1hRGxrv0IwKporRgtIlm1KDhjBJABADSmcj3lDACIUXibpwZs7QwUnLkGUe/a8KlXT+4/nL31aMycrbVJsrUGAYXFGvTB4mCUCJ3zdXCVNR7rRdiKN1VlYqsIecw9yyAgwgch+MrlzMEZaut2Vj288/rrX/71/fpCy2WW3UV/3YSKDMEgxpoxy/mmIO5ferjcleT8/Kg6ilPnjInDgMRVZfZDt+n2YFRUd1MMPoC3ag0orppZhVhyLlOqKp+krPO4nB/kgrtxAINRmZGsNRBC6sdpiJzZeZ9y9Ab3Y496U1PEzExt9VO/8EsfvPFdgYJWrTU5F1cZBONcNaXpBvOsXARwiNA23vtF7K9LYePDNCYKvpBBgyo8paQDWAFCzFzOzs9KKsRovdt1UYho1rz4qVsfPNvOqqbro4r1zk59urq4SlFBgBSg6OK4XZ3Mn1yec47MTIRk/TZhSVMqeRenwgw1bq8uP3i2e32sf+rhy3/2w6uLruQMhoz1ergIqzvHw7YXwCHk1x+8Nnbd2x88y6DV3FfOX8ehAYv+JzVMYc4ioohGFKIIiXAWYUAkqSpAozEaZAwqiQlJDWYRYxAABcDVxs2Dn9UsNz6OOD8M1cwLg2eRVIxCycUjlbHYxpCxzGoUnSESiX0hRFUcmQsX21rRnMsk7EgdottPAuvBts4yCrMWPVrMDWopbMFnlL5AXRE7wwbJ+EU4Gsp+M2wZxHlPaPI4ZuUpFwGqnB2LkDUbhmkzEvO9o8NPEVwO8bSbumHMLACgBgSAQfCmTAuShtGLKYVB9Oa24p1FkKt1t+kGVc1FydCm39079NM03L53wmN/vXleOFvS2ezB3ZOfXrUPwLWexnZ5S8pG9hfzugpt20ncX60Zm+XyJKXiHAWHSE5KKWkAUesqsoaFFUGKBtuQyLPHH/3om38etxf/9r/7v24P7/lgbhyUiyaASacfnrUtfennH7z+K//G/P4LYb7KYiQCVkiAhiTLNOxTHNw3/pvvfuOH35jPsO+RwJTIz6/OtcAwRWudgvoqzL2b+okAOBcFKoKpUIPGBNimXh2iAQF1AJxKQ81+Hfd5j9oYi4jSD0PRagKo57U5MNs8DO2+OWgXR/W43o37MiY1RrDW2ttbdbPte4ljVbvV0fLs7Nq0Hi0iJOssoMtkCso0KCVRzaGxfZFYoKbaWysAw9j3JetUpokNg7FmSmk2C6UfC8M+QqfsNdt/uU1uQh3CvD56/vysXJ8XRVYFLZoLlWLmVTWvcoXjIIUzGjelfNgemTC7eOdxHNQiI5fFvMKiBspQUrxJY2txJFZ87KaYUrBkHexjbkNwloIjFOMUCkhlbB+jzWLEQCm+FgeGwBHimKJDmM0cx3hnsTpo+z5uLUhAXVWuqfz1dQze10fBzh1R9IaGommtkpoYNWNmKVkwCjKocbnxtqhuYlGiEQTULL1zRJHVgAU0CTMoDow1IQP0nFUtqAKiiFpjrJFUGBBu4qUpRVEWUDAGyFftrdd++tP5jQ8vP/5w8eKtg7bKQIrSdZvaN81ifuLuqhRvrCEPhMYYEHaGRDJPnLIgkPPBWQcAcSguWFehNaKg27Pn7/7we9//nd/2pNXBinVf9vtuvD5e2urWbHd2Kd1+bpGdplQUtKlnOfLzxz+WPM3tPOeFu1y7uwPYgai88PqXPvMrP/v4x48r0t0w5QKciAdZLP2Br+69ePjj956nLIWVVAeQpvW5FDXazJodgQAoiyUcYqytrQ0mUEYcikymLCvX1nVkSWMU0AKQREDEOwwWHLFz5gbDpCqKxMzGAXpLDkHYgBWmIkkBWFjRClFiVQBOExbOk8ldpBCmGDdjzIVvqmqoIrYc3Q++FXB09869y6eX3X6wxoe6RmfUGAANtjEQgTOXT1ZBv4sTcQguiVxc7wGvkIWQjMPjW7ODuu33Q+ONMrtg19spJ2ka8/j04wkgKWrWYCixAOcStVx1gTiotK51q1kUrSrXCVQuXDzfdVP51L3DB0dHw8QlFkGIJbaVv//Srdlhu7h1cuL9cUO7bry+vvhHf/i9J48vHFAwTgwyi0rZbrZf+PSrjx4/N6BKkIoEYyCXKH0R8MEZxXFMWDTY9vbxvR+887YI8JhH3hChwerOcvHo4oKtDUN8Zd6OXX+xEyY83e5yNovlPCF0/XRNkYHeP991Q7l9sqj3/fWmv7VoJ8tqzbOzS19VZ7t0cTnOfBNa+KXP/9Qfvfn+ps+58FHrHh4uGsLH5+sxSU3uZF77WO4fHR4eHGwn9qE62+4OT5rTfTzb9ynL3UN+cHsJgkiQCtfLar0bTx89Kd3BbNU+u957qqYCpq0zkSwX7334qI/5wb1V8k3syq279379137pjUdvDgWwsBJKoZMHh1G4OZmVA50ihclc7bq6sjWFpprFbqhdtb6+XO+HIUkhs88TAikKAodgxFIEvBrG7HDuhITrKpSsQz8q5triweG8L2KwWk/rpvLTwLvNfrVa7nfTmEI7hsUyvPP0jGfVPNgpl9BU7z/fpVharC+mzjn74PbyuZSSgIU/9+qLP3j3o2UdHvc9km0PV97yzJvX7x2+e7bWpJOSiiCYnPPycH68aAyDiPXqDJmJykG7mmwvqkrO2ypK16ciAqFkQbh3//bFs6t54bZpkM2saW1wYBx8fP3fjoqGKamazIqoTW1PloE8X11fq6lC7RRwHHlOpVnQzBiPWdWoparyGWJlwTFEUWscljKkFKy95i6DJjZRKKi3ASOWadfBpDNrpyEF73LJWll/5MYojKZ0eXZYhaWvGt1d7g2Y2mrdlFvHx0+f9VlgyMMXPvug3+H1xaYoAhnw9O6zD37mCw9/9w+/acguGv3h1/7FZx7efWV1NPvST//5t984u+jBGSBQkZvEhwAgoQGMkZu6IYP7/eCsBzDLsPzKz31+GLbbq00zw+vL3cUHj/5v/9H/+TomE4Jg/tQXP/fTD7748Pbr84d3D2Yv/vLn/uK+jGFW//3/8rf+3j/4u//53/vPH7nzr/3jf3b76X/1//07/7HJNtw7/PjH5//u/+4/vDz+lc+9+sLv/tf/6f/p//Uff/qV47tm9fy/+YNH6+eQ4ur4xffONtXwPv0n//fnl/uQ87TeQ+JsuDHkVYwLn64xoKfVgYLhQsxQRDJrMaABE1goAs6Zyr5wcshdBHJHJ0f7qVsu6mGKCZy1WHmDBpR50bT7fck6rWZVKTr1zIiqYo0sZmTVQMBxuy5jZBDHLFlL7U93u+PDpm1CYGzbxbvPLp2RVCSSIoJFgFzmTVUrHIQQ192sNdJhTNon5pS1NtPQv/ry3R999EHTNPPliRq52k2g7Mk9v15X8+XFVZf3qW28pGnqk0Xrgg0IJcWYcu0DiaSxt0aa2hhLFhwJJBmWdSii5MI0JZAys+iCr8Fvd/15N+66cdkGn8mTd1YWC4PGnvXxqhsiqq9cjZ/MTXMukE3OqoKsTBYVkQwpKCmwKN70LuAmI6zEYgBAkR2JMpJRZ6yCgopoGZlAiAABC7CzqiSqWiSLIAo6e0PCRmBNLKUI8yeVIgBl+MRRpgp0o3wHZWZCNECaVRmcJ6MisYixIpQzTxGmIohkUJHgxvEEqiLwCQzREKgigEMpDKlgYUykYRqauUdhN1uJGL99GrmvAiFw46rlrPaI+34fp2m/j8ux3/dX693T9eVH2+unaXsZxy3GbMAlFgo4pVLVQcYuh6vlYnnY3o27dT8m0QLcocDiYG6dBbL6L2E8gDcVP0RCgJO7L5689sr15Qf9eAaVWGOtDjKdnr3ziNXturjrE4t2+6wqQI4KD4Urh0wgQFIAVAxKbUGUCwKY3Jy09QvL2Ry6R7uuTzAxCZB1qnJ4u2YcQhAQJmOIdHmM085eD5kTGnJSEiGyigApM3mLlrAoABMrR7AOvIOcWQoZNKTCzABoVYlASlYARVS96U7dtNJARG+8z2rUWwmBSgFFRYM39llU1KIKYAx+8jMKEiEisoIiOIMEqIgpl1FkEczK2f2k24yxSLkxAKCSwk1kTQVuvog1hCo3nyOR4iebZG+AFdFRbQm1KAggGmeRsIwMCnniF49ux83Fx9Ppb/3Rn++LvemACKszRKrOkqo674Fs7WyjimU6mDVobCqTARCkp9eXUxEErLxhZksw83aK8WC5zJxLQQZk0doaAyqgQ+w90NIb721MidSMKRcUVXXOuuD33ZCFE1LrrArHLIiQchlEEt+UABVAUIoqNb6ZWzsXfPHk1s+/dOczX3w4uf6Dp1vOSQEMGVVVUCIqpRhjfBVuAsboAqAzPqgx1jnng3F2GgriDXZcFZCFUyxlmmYH7axuMJlf+fJfvDq72Dx5L9A0DNsoEocyxLEhSyyLQBmh25fHH51D1ZiQu+HSKhSWllg4KcgY+zFmtSicV7M6Zi2stUXV4oQEIEs2yOOYfW1F8XpMpYjz5mZ0GUjHcbxmVgFLZGt/I9FTECQVICjorFFLdx7cVemePX4qySIgFsECZMCAOJCszMx8gxZjaapAgDLx5ellzuJIvHdFJU/RKhBAZWzl3H4/WmMlCxUQ0YoIhOvgBs3rrjs5rOTsVJILQNt97+bLtjX9OIAyGmMAlgdNzP3F1bhaLX/uS689ev/9Z6dPyUIsHB3tpimrKoJyqSr94Qcf/Ub5tQfzu/FqI8U7AlFA1ZpcPcW+nyYWG5pupx88vYwMDMhZUi6Lw+V+vTXuJ6MiAPCuH6eUCpAqGVEOwY59cYrWWBAs+6RRbGWsQQFo5iExg4o3CAq+sX4RbG3Q2pwTlcqCguEs4AwULgaoFBjH4oFMRaCCCoaVWFDUslhEo5YTGLLIN5xGB8ZqAi3ibZ2TDlRuLQ9kGoG0cZR56ofYACRFsoQise+tLwzVeZ94ikqgDMyfPIxQNAACl1IYCouzijBk3k9pHPvD0MxcDdg9Y76OiZEQ0Ttsgq1rgjRBkto6USBDQIKkiFBSBsDCIlAEEYjqhupgIWXtJ1uHHJMWms+PrDUv3/383aNXSiwpp9WqDq7t54v17plOvcmDOHex39lpWmJuH7wcgk2cQmgKQi5IhAWAbigtary6frN+63vfPL/88Bd/7a+8+MqnwZrCWpTIelAATnm3v3z3+Te/8da949t/+f4drpupsKoikiqkKTKzItsqzA8NVX2YszWJpl3eTixmTegdVW1VVZ6V0zgxg2YB0sWqSpFLBGvEWO7TWFALiyV7OGuGbnKVNcIZDLXzYSxx1x2frIpRTdlLyOe97jNBfOnuatzEfDVM+1IvV9Dw1a4jKatVU0R4S3kSEXjnvYuDg6Wy9CkNcaigIgBjiY0AKRJ1+7FkcsFaM2urqqr8ttu62ubM4xSnnj0Zj2g9SaDbJ0c/fvdjQispM0Id3M0KSGTZhhdefP3w3gvr89/NA3MqwXvnKISqmVdu5pp52KVBslSNM7V3oe133cWz0wKIqKUUKOzF9vs9CDtrBMmQsUZzypb8OORQuf04kfG35rWjYvMYgFhU8lS5ambsoBrAQc7zOhTJ41BWt+7kbphkrKpF6tJM4ci6M4S6CZByQLTW+MOmDsEsq+J1mjKJGTuTJy2CYAQ1OXeD36KiElNGESFwBIJiVSuDqEoAkktBiDEjaOMdAsVSBLU1RjIDQu3tVNgoGoOYFRQMYi4SY8rTyCmq88Z6G/D+S586Pd1szwbtE8aSpG8Paixydf7s9CJWzi9WK0QoUggtKXLO3W6X8zSbtyG0xgZUMhiMIQqgVqdxtJKYp+//yR9960++vj17frxcbbbvv3R40FN13pVuPf3wezuv4Lpp1gQnVAMW8pCJcn706P1Pf/mVq8m+/ku/8cajd4Wng7qio6UuqzKNVZWOVqbrMAkba8aiMZbNfjqcz05m8369JgJryAFURpZQ8vlFXEdIInIDZxCDhAiNtzcgRY6qLJmZRSrvhilNzBbUAgZrEBSErQFryCCUXBQwZzVkVdWAqICqGjQIaICIABBGTgJGtIAIAJMqgJZYpmkaVbucuYgzxgJ5S5L1sHIVVUfHd7ptHqP2UdGwrnf37h+Rc9vNcBH3XRJVTD/x3TSzpq6rNEWOWVGdxQwKpUjSstnNUlMBSdHIeNmPYsB7GOP4wsHhs8vdoq3VUEzcTdPdRTU/bt7+8KIKXliu172KhmCvNoOxDsF0MU2qn3v9S0sauPL1wVxUYpfndTDGiLPnFxO1fPFs9/Wvf//DZ8961uVy1u1Sl6Zq5oPHUqDbDe+8+4zIc0pZtHA+OgwVsPHh5Rdvfeftd+vgPQJY6314fH667ycfnEppyExIhdNlt06mqHXjmM+uu5MmnF5u2uU8mND349nlxpHervzZfu9Ds+dc5XQcajugs3bTlymWqQwCUEqeL8JHp5uS1Cv84L3T1vvbWf/ST79Whs4KPz/ftWRqwjvL5vZyhklXy3ks+ODOaneJcZx6KUyFbblO++uPziQ/CG27j9Ns0UjOs6UvLKPko8XR9Wa2Xq+vp9HOQxymPcuqqoiFphgyNgRf+dxrcbx+50fvEIOvkYIlgEVjvYme4OzqotKZKsjEq7blkR89fR5TTprjbjcmEYd7TVjryXK52/XW0MmtxfV+mpjBUxK5/8rJ+oPrxaLqYwQRRKrr+ul5n4zhi2HWOFGovF/WodvtjdqTpT84Xpx3a1tXXTeKCAv1ALaCOrj78zlccmhDqHSWNRVNQ3xycV5XNkp57d7ho/Pd5a5viezMr0J4qa1PTzfQBOuq1GeHRorsu3G3GerK1a3fdz1ZsxnWUAoBFMkxZWfABtvW9X4/CsLZ5Wa1Cm3fO+fHcQrLOTh3fr39V1JF7awdVCVNtw7D3eN67KaulLatJzHkDEcJZE/Xw0FtZ4KvtE0W2U5pAtqOqglC0Zmzq1lVO7PvlMjWVWvIFWuvu5g176dYmVhXtlFMpVxv+9u3F4e1fnx+0Vb2uHVWYcBsXF5hWplZVTVnu4mIRFw3TrMZSiGffd5sD9t5W0ERo0BTyolHY2zVmO31YKwt4/hP//Hv/Obf/M354d3P/yJ952vfPVv3ztqi5YYNqyIIIKolJba4WM5TyuMYS1Tn3NV298qD2x8+enbdp9qFiw3eWt576aDtrvcCqb7cf3z+tW8/+6/nt6qxH//ufxbu1u1rt042F/oVv/wH/4v/w+W5XK1PP/xjev3ki/2Y6/nBF/+1X37x4MXdB+fffXY125SX9MH4frryHQsfcbC+iesyuuvNk9Oq8vPWB4M1iG8dkrfWllQYSZOAUlYBpCSKRpmAsyYWAigxr2ZVy2a/L+uzy5rtyd0771xfgVVTUWNdW9GDo1m/Tq6y6/Mu7idkeXD3MJUybOLJatF1fRpTDP2to+aFk/nzq/7quqs93L6/PM3XwYaP1yMLXPMUMB8tZyUyKBpRFLje9pWf5Rs/EpMV2k951vrGUATcllKhuoBTzsM0mU1AMrVxJ/O5x3HCeHR/+eGjs+thD2na9llUwWmNgIRKgJaCb2QajTHOUinFWmCWdd/Xvr59+9AIPnl66m0QpikWb90wDtW8NgwZxAbfZSmoC6Fls9qs11klxdLzcD1FW4fIPOQC5ZO4NTOhoIiCIKLJqViLSHgTAkHUG5AOEDKChxv6EygjWxVUe5POQALUImwrQzd6s6QgqgjsWYQNFkFbmEpCKTCOwKygoKyIAKQCfLOZB0SDqKiIwKBqFB0gKwIpALBKAlEtQJE1JSiZVEHAICgaIVIgNYgISMpgqCgrECIaLAhGGSODCCYVQlFEQqqd802b6lXpBlMmdMG6OSoerW6Z0I7Y5OF8uoSzy4+uLj6QuDZ5kt02WM+ECOAAs0pidazAZb9fPz97W2l15/jwzsIYY1lxyvvdfqpmLRlVDwiIiDc8UkQAYIuo5OrD2yDstc1DBzwBRhg0D9R3bl+wWGIGZSaj1mQAMcgEhhDBqIBYS+jVrSw1OdfAVo5fWGhdiwqoSVHKBCYDiAoWDQyOpfCwyWDRK1gieyCL+7Q7z2lXkNQYUrCgkFFQ1SAoqcVSECKTZw1WoxcFhawKBkEQodyE7cFOnI0hASFjbi4cBECFUiQWqDw5p1WtXa8KCCqESAZIRdUIAN4oP34SwULAUhhA0SICqMGEyMaoSj3myppUmFHVIKFR0U/es1ThRrtiiIhAheDGEyWA/3KTzJ5MG/zxctYNe0kZAZRIjSmqjWod/IGB3frsGz/84S6ZIWG68fcxmJt3w6JZdF6beeVfOT5wYKYYQ6DEWUsWW513223JKmRBchKrxjsrfZo7e9LMTrcXoOQBSLUhY1H3RTIgcVks6jENtxfzgem98+t9Uc3sgDbTIAhjEbBoVb2IISqgGSSWAqScpXJWjFXAKeX1fmpmVezSS194/S/+5i9LLaVKD64RLTKL8SRyE/0iax3nRIDWGuUCUoRZiigriVrQKoSJjLNkEt+ImNa7cTmfn5wc9PtOcWro4PzDpxYhoFqRbuhs43ZTNMEsQxj7ToR95YcJtlO5Pasx7etQH1a+pGkR7KPL7r3z6wQUFS2Z1TKkXAhhXrW1Can0fT+QNUSEzmDhUNfqnKnLvs+1D62xlsswjMjMpTAYrKw1bozjyeGtytj9rjfBlDFxLsXK53/u5Y8ef2iMQXRkwVoih4aklNwPAwCBEBgUQBfsLHgA+/CVF37w9g+aeTsOiUWdc6EK09gBWWd9zNLO2rGf0KD3jqVMY7KZqEIBevvRs5ce/tRL95aXT8fgV4uWZr7xgWLeoTHjlCpnsyoZ7LbTepcvz65fffjSX/r5nz1fb3704WmB3CzqKcaUk6qAge8/evb202efe+3Ba68+XH90NmYsgIDUT3DXr5yZgqUQQr9+ZkHZOKro3p1Dn6O1iFwRwtnN4VnKgBKLMKlxWFTJUVcKtYZswLapV/PufD1bLqvlLBhbACCY3b73VqwpeUjeG7WYxjHMWgHKSjll5UgJhInHLOrSmEFBalGijGKArEeYBBVklCK2iGQu6HE2C1LUIslNQ5eEy5By6YiXWFlXQKAryc/q/TRKEQZRUhcoJzYsMQ29iLWIIqWIERJUROOIUh6hcGEApDxmcFZV+winV/3dgwCltCV/6uTg8W67i9OQuGqcDU7JpFg4KxIMcfQuoCKQUWZltsaIgIhaQywihQV5ezVaS1zSMCTybVOHk6MXvVv0JbazOqCzNge1t49eSNenYAyX2LQ0rJ8+jY+XFE+WCy5SL46QEheuQ1CHIhNpsGYmsXzw429/8P63f/Wv/o2/cv+vK+kk2VkfKgfkQI1yAXRhtjp48fPt0Tf20/aNb37z5Z/55eJrREQMaFBVvXMx4vl28y+++0dvP/72Tq62z8/SNgIHZSwIxnu0tvJUclZVEW5qLyjO2pwTOBLBbZ5C4ySXClAFOI+EBYkisyqKI2No1Sy67dYa++zq6qWjZcnUTzpr2oDV9a7vh8JZi5fFnTm0Dqd4edFV1msOIDT2xVnPHIbuUlXINpmNwUKWjm5Xu71q1jKhA0+sKoI2W+sPTg6zMAE9HaJwVqF+im5WpbGkBsjU09ATqhYt5if0xnlNVcjz6qVbJ83Xab9Ls8oZbwMZTOLJuMKSckCq24a9odqsjg6evfUOjgkEcskIkmJMUbBAheQNlVIWVUsonJMBUznvyY4xsTXgqLKEKt6FSXNT12DsyOMCLRWtXCGaTIFKaK4QHUnhknvS7MXfX9Qf7AgMBueQXSFtV1XtK7Zmn8a86xVBh3q8WXEqSOQQCKCwJqSMFCwZUijqEGuBY+c8UhYFZ7qYC1mvAiSqbAmCd65kRyYWUAUCcM5ZUUPFgWEuACjMY9fFZuarFhV9qMKtey9+5jPPd99bP31qX3/ZO+t8qMPC2tnZ5fOLiwtXVdY5a2zJooWeP3r23js/2m4uv/Krv/rCizMQLbHEQZq2IoVuu21mbRz6N7/zzY/ef2d7eX333qEXfune4QL18UdnEockgnZ23fWLXBQ55TIUUd+ezJdl260HiFdxMasfvf17vj30jd0/Of343R/80i/81HB6WtPmwa3Zej9OkkZRazBHWcf45MOdAQOag6ssAjGmMU9Wr8/WOapmJqLEoii1pVLYGHFEVdGFR0ZDAIkLGVq0HvqCBkjREFkCg2pBHSAoEFgGzFoEKataBc7s0ACKM1aUEqhRccyxJKfEJTtDzkAUNaTECCoG1DijClmYmGOM8/rg6nwEPrz70qe/+vvfAsHMccgxH82MCuc4TMW1tmQM1ScewNXhyim+f3aphry31hmwygpEpNZ0E2eVw3m1HUbjXQiUurgIc+9q5wYuJYQwjPHOPPw7v/mXfuerf+ScooCzrp8mADWB6spFwMt1p4Ao5fd+93f/Z3/zX+88r3c9KdbBW2v3+54qd3RQry92f/gH3zm93FtTsaZ1N4KqX9RDyiICRplMX8a2rYpCZdE7fHAy3w1jlPLk7MnhrAaji7bNGV1d9+N4cLQaprE1rq3bmeMYx1TGl27NZ61//+MdAiWFdj4vqA9OluMwXg+T9eGFu6tyuYPGJ9WLMZqz9dIZAZSil12qKuOrsNt0SpiSGEecob/eff6F5cHSOI3nm1FzQbGVcYeH7e3Z3ADXy2oSJbGboXw49Fc5kpTtfhxicd4aX//5x6eAYNCE5+7+0er+7dl66J/tt9XOiinOmsPl/N3TSwduDzE4V1PgLjk1v/r5V3f91T7bNsyiDmBBhBfLxhV1Hs6vLl85eKlf97Gb+t0AUcs49V2fEAqIb4ypTJfL2CWj5vLJBhQKYW780I2boW+8n1X+4w+vHdCQhzz2x6ujUNePn16BberaMo3X28F523prTbaErz9cfvR8//bzbTsLOWeI053VIjp3uem8kjo9224Wy4aC63b7oU+tIxC62nT37x7laTzddAtv7h3Mnz7ZjENeVtPJyRJEH11tBQEMNZVNmScWBhsRr3bbxnlWTTHOgwFUFaoNbYfeW3ejmxlKeuvx+aduLWurm2HYTIL7GPuoUv6VUZEUSGNuK7tqKFBJyKJUV3WJ2TocElyNk7f24+uYp3QwX3z2xft//sa3YHYQCzStu2UCsXijTLLOud/3HuDOQX1nsWht6nJ8+cWjsduknuOYgnOFgSOytbO2jmNsHPT9/vhgEZyDia6e0W7S/Qg58Sz4NEGw/tmmV2e2bPqrDYAgQHCmbt1iER4/+jiNKSXGzL6y17tnTx+/+4tf+enLb16+8tLJNE3bPqMhEQAANESEKkqAyrLfjcql8kadvXPvuJr5Dz5+mmKyKOpRgIh06Pbbzd5ao2l85cUXTl4JL79y+8dv/KD188+89MLVxXWJ5XB2SNIfH9EXvvQL3/vOj8ZYxow1uwcvP1wE7Pbn82axWDW2aSq36LYX6KAKLmuBHMnKfH6DcGMWNO4TkYJkdt6KMrOIKIERUGfo5li3rSrEHGNy5AzQzPrY8aB6jfLxu+8Zr5yVJS+aqrssb7z1JLOxDoPBdhFCoKmP1odgsd8Mlqa7B6ZtLFF5fnVV1LVNQ5hjTLNFbSHMs6SSVJikzI29vt7oGIE0eFsYDCkRWO8EsUuFdumFO0dTP4I2zdwVLO2i3m16BPvx5aZq28ZWaTfZJvCo51f549NuNxXhnQBU3lYOp26qvC8Kfc513TCCckbQaYrOuLYJFOzVuk8g86p+eP9ujGOX9qnorFkYap01rDwMvRIdHs36zW693U19unX7sCiMaTdo3iVovZumwkUdfrIKQFVVpLAC6Q1QhgUBDRKB3oBmCIBVDKFRNmBJMRfNAmrVVCqokWTyWlQqITOBiQKqyMypsKi3lkhVigpCIRG6KUjSjfhKmQA+kZYhsv6EKgNSBTQzRKNGkEcBxSLIrEOCnPWGelyKEBIhI7Jz6iwWhcxKqGRAQBVoykpEBAbIqFFgUdBPBFvGNHVD0LTzozy76DfX47i2gFqSD40AHbSHGvPm+Q/L1ff2/W7oOo5ZDRkIUVGUSAHRGAUikwuXmFDKj9/6k0fd9QvhN07C4uGtkx++8f1vvfVPHj9+/9/+q3/zr/3q3yysahUQAW+4RaiAnEEn1nKdrRBV1ojCFGGSSsQz1zlnk7eeMxgEIixFVIHQpMICCMjtjNgwB0xzV5rJrujk3v2grWYdN9uu71NhX9fKmmMZY46ZmgOjHhKzqWzHpfJaNbl6CFjh1ePCSljAqCKiMCZmZ4AQmdWCTWCnzLXjKkBKhdUrKOsNbAulKKlYQBAxNxMuVGFVACTkAtNUfI0E4GqjhknpZjR0g7hEo6h4g6VGhJtdoCEAICRVBFEozMZjG0LXd6COmVUQ0TDcjKQ+oR0RKhlEQgIVuAl4oyLwJ+IgAIApw+pwSTnFaRzHUTI7Z3ZR0NKitS3oauHefvzObrt/tu20qopGvDlQVRVWFjFEB7W737azuk1RN1OyFssUlcVZe7q5eLw+V4XKEzF6b5VRkawjQ/B8v+0Lj4krtG0VNCeRQogZEA1exsETraf4fMpXUoZcvGrMecgMN5eO6H5MgdAbgJRFRBAMoiB6Y7sxgrXkbZfKeoq3bx883e3OO/Q78Uu8f/ee5KTApQAiGGsk5Ru0mBRREet8CJV1NVJtbW20s+AMFEQnZFOZCKlIiYYeb7fLAQ5mdWK+dbBqZnOO20GGtz58MiTOFoHM3Yf3GpWsWkQ5BO+dtwYytwwVZEQ9vdx9a9vtSxEx5BC9JgDLUHuHSHEqA7NqcZXzIez6qZ5Xtw6OVWVzfpWYFTTl1DjDuazmbRynmAuqWBYSnYfQ7S42uRhyWBxkTXF6+NoRXz969IN3pHgyNuWYRJwPVfAxJjBqDFRi5KbDyKI5l3HPuSMXhi6pIuqNIlK9sYVLPwyLts1JDWBMua4tEYElZ60AeONY8fr5+si5PffF4GJR991W7CwWqC2E4JxFBF4uWlSdkrDg99780cVx87Nf+MLq4OQHb72/myI6h8wlcyYAF/7gO+9++tXXXr778FtvPbZEIQQ0AF4fnz0TLqt5m5VPr/fFeCaar+qTQ3/x0UZInSD+xP1EBm+MUYUUiHNiMjSrra18hdR49eNw0PjU9+CMCyACBaWtQ8mp9AzZK5D1lhoPjgiVsEAqFfrg7PU4IblpKKxCgCVnAWu8sUSkLCOnia0aLjDFAkZAsQKBzElycI2xxlUkY8JAyeh17Crjg1LMJZaUgQwhIzEZBQOV79JkDQlKsiQiKaeGPCmAilFrRQAIrSnMAMiqotBzcYK7PNyazY6pnnJ5ebmaJD++WqsAF46Zl00w3my2HZJlAUdoCJkhg2YurKoEZJQQlEtGZIGr9eBdQueXqxZzAkLvqylOTe1inoI1yIWyM9CywCRjd7E1Gb0Nu2E8u7p8eK8JGoVslpJiCiYA07Jq03b6//yD/8dXvvKlf+vf+/fIHGgpSBSoNsYBEP9kOSuRINx59Wf+J//h//Fr/+gf/uN/9F/97cMH9z79RYHEXAw57zyInl9e/V///v/7o8u32yII47jLqM54Q4ySi0rSqMMYCwMzoIN5FTgXAvKOhhJZyNeuqqwHKmPSQinDVCTUMp/XYoQdDn1SVlDdd6NfBBUFS6ato7FXQxzVEiJYPTlo7t5qHr2/GXbRZQHmkjMAzxYhGRjSYLzL49D4uaimtAdAzLKoTUTuR8mlpCnNFp6c6abIicAa58xsNStDNoLCwlIIlMd0dHBw1feGoIDqTyr5iNjWzfnp5UGbv/hLr337a+8MOzMhFOtadyMm5smUZRvAmIvCy9VJLPT4o48JbsRTCgaZOQ9AkefWj2OZWQqSDFGXE3psLBjSQlgQYubWiie0FlpFIrRE2drdFA15snqVt7N66Y1LU54vjsbNuozTvA6Ew7IKlqDPeRH8wazZ5XGxarxviqf95V6T+qpxGGonhFYtSS6VN6iYM1fOWoQ45UVFB01AwZJFWZKy8a7PhYJzYlri2mDJhQhLygyogMONyASEWUnhBuWlAlyUFVhhyrllIeNzQbL++N4L8+Wjs7Pz9fXm6PglW7WW6qVZiLoseYgToR4cHiDL6fPzr/7R15+dfmCYf/aXf9kYQoI47QHABTd1yVqddpfXT9752j/5LVJ//ay/45o7q1CbkqfuU59/8PgD2Vz321QqB3eO2zFOpYOKcHlcaUn1olrculcGnfqtTo4f3j++95n9B79/GPxPVeXND37wnR99//zZ9cnBfD9s9/0kIEQGlJ5sx0+9/iI8PgdWVvGWADBGOB9GAoNKMRdjsSghoDUoqgiozG1wTCYVjTE5YwxCcBYUUpJPAASiWFhSAmNVaR95EmJEY4yAAtKkGgySChk0NxlvlgYpAVpr+SYCDcqSCZFUf2KgxZuLWQAePLz34Z+++cqrn1ufXysWRHGgVTB5GkoJhqDyxge7i9E39c0qWLS2deHxMytEqipZcmEgaOdhHNOSzML7Vw8a5/HR+eSynDh/ezG7WndAttungngwr4+XzZ/96EePrrdN26SOFRgRbfCx8ChqEJZNmKacE1wO05df/sKfPfoTJ36z66WdXV/tqrbafHjeX/fX3fbkhYPzfTcMsbBYNU3rq0UTIu83O0HMQE83/U/dOdJJ+xgbb1JkUTw+OHry8QWAHraVTtzt4lFlvcP1Ns0aP6WYNQOU6936l/7Cz55tr99/dJkZun6Y1ysh3XJ88/HjW031wvywj3mdEgV3vRt9Y0DhfNvdfuHuEnHXJbZaLHLmo8MZEhpLCTiVXBnizJrK5abUFJrl3LlwVAejpGLGqSiiCe74cPV+N56V8cuvv/L0rbeCq4vmEWVe+VzYeooZ9pEXAnXdfvDkacbqvY8vFm2Dxopka9WzokJj6eGLDz56fNo086cXewChO8sUsPHVKLzf9beqgxgnxmpRz2FCLTLmBE4jD4XjfBayoXo1f/zRhynrmLmqHBoTvOu78aD166u1VXNSNV/47Et/8r0fhbYxgdqZw9AM0zSM+XjWfHi5TuIrT6xQRJxyVVU85fcfX5/vp9NS7iBoLvO6iUUVAQpsc25mJkqedt3RalGTx6Gz81lCKFrGMW/7fBETenu23TqLS1PFNFxdb+4cLUj52dUA1o5TTqUAAXprjauDV8IRGSt/tu8rRx6x5LKoK+9dKTyklEmZcDOklrBTbY/m1Xy235/OfrIKfmJAawwVc3tVmRqV4qbvTWgUZOKsXbGEi4U3xmwzZwqbOO0+fv/Fl+5/2PVbKb3SsTWVpetpOge5TGXKiAAX1/tFN8ytZWsX1aydpjqAW4axz9uu+FUNwMvD46P5fHN+3s5XPGGK9uq0T4kzq0UCVUSOAtspeV/tYtr35QD5eNWcdmJDddDaEqd7925/9221lqQAgSGnf/b1P3nt069+5nO/+K2rq9def/j2W8/6zJyBbviyAMYY4wxZUzg3jQdRJfNgsdycXz9/+mQ2a642Y1v7X/ji69/6wXfEwm7YNrOGXLUul8DUPz2FCoYib3341ACNmt9/9mEIxCJsSlf6vuit+w9f++yn21t31/1wfr5mVzbjWB22edwWL2RVpDCpkuSSjNUbDg4zWufRGmVBVOesCjCCtRhcmEYmY/fD5F1lKZAyIqHBOE0NUkmowffAQ5KT1jUOG088pv01TFPIipiY5v741u3Ti2ddP7WV6ca0qODBoT05rljt99957lbt4aLmNDkHu+tNbRddjDGKq9z2KjZh9uLRT737/BudQoWqRNb7PqbMVBiyKhO+sFyqET+vYedU2RsbYzZKaM1l13/6wYt1ZVpnSgn7IT4+f7IdsgleNTNzTLzd9hbhet81dZ2LTtIRZkICMlVVK1CSIhnqedPtR2Lsu251UPvacUoZJ+spVL4UkEKJYJuL9c0+csfE+1TGMY4TWG1nc2ONd2Uquan9T1JFKkUElEWEkBXQoFUkBFQ2CkCIqgaRECqDyFzUsGC6+WtWVRFfnAdnCUjEOXbAiUGMB3BqmBU5iypJAXACqnrj3BFGARQltQTeGEZUQVZVBDJia9M0jlBBtIiMIlkwMymr8k3MRG6kV9ZyCOq9iKoWYqUs4AhVNSkpGFGUm3ITABowCKxQFMaUMhRjwc/adnGvrp9ISX1R7PZRJDhXtAQtU7fdTgPfPFcNKYUCckOoMcpooBgiUEYENOq12O5s9/0n3/6gAgzNTDvuy2a2Cuvt85Im4yoRBAOqojd8qKI4uvXT52n7uHKSlREho2HnNWQNbCaYsWyyQPaSJYM6NJGLAgpSkULWJscYTDY0jtLOlweHi7q+pZf9uN2NXadaQitIo4pNk5HoORneZxMAg3FN8gekKiMAGYAT03qbLnF8nmGgktGQUUABts6AMgKCYCylFggWwEmX1RBxSWQtIBXNAQgAbhqMYkBBAUBBCZAUOIsKsohrjHqCCKAAZMjiJy5cRJWbHiKyKDMyA4JaY1RUhK3FwyqsmvZZTjnKgLBoQk24i2nIBYGQrAogKSEQqQIIASBlvkEUfQIqAoBbBysWPTmc56k/PnAH7XK76w/mNRFiii/eXg2pXHbd5T5N6LfbnJM4bxVUkAWUrHHOv3h0z2Jab/tJkcQsZt4Za0mT4cvhEhTo5uWNDBgjCFG0aVzwrhunUTQR1MTOIRofx+wIjTOsNBG4ym9iuhrHCRktVqbqYxQEYxAKAN90HDATxIkNQla9+V8CoG/CmAuBqHWbLO9d7Sc6/5Pvf/Tf//W/cHn5aL8flsMeAIwxiKKiioiEzMXcgMuExylOWci6um6Guh1TosTk6jTGWABQRXRiVUjiVEj7Ls/aV9v54W466zbrYcxDLIAE1o4Fa1fff/hCMr5uVhWEtL9aQK40n6533/rw9LKPjEatAWWj5kZ+plJW7ZwoxDxkKKUIAaWU1FCXy/560xhMIsaYSiXnvN5ERwYV+iFbY1G0REm5d94UYeaihoiIRdXbOy/e3u9PrWJGW3K6SdkLTHW9OLh96/nFhSfAonWogSSPSUDu3FsN3bA8qMma3VVnjTFEMUdQAUJrnTULU8uAvVVIRVkYiUDVAjjEAnC6G147oKOD5vmW2za0Jmxj79owxgKmsKHaGbBiDDgDLOpDe7bV3/v6d7/y5c/84ld+/o+/+d3IgzN2Giblosb+8J3HH3y0/tmf+dIffe+7ZwMrUdFyOfKRtaR0vt1FZ9xyMUP/wsFihPTkvVMzino7TVO1+uTF6M7J8RQLZyko9bKylfcON9sYmuZo3vbX+xrMxW4Y+hxJx1oZFQ1ZMdxD3mmepJ6bMHPWSE4ToeS096UeBu059pFFRUTJOkAGfxMBFRFFg4WEERNLKZxBmpm1Mw8OVICUOBcVkaLGG19VhE6BFJ1t3TpFcezQOrRUShGJpQz5uvWtWoNFiAhQG2+9sZLymHpFD6QKZIzDG/c2EFmMKFcx6zCpoReOD/Plroz9neV8eft2n4azKfZDvGLoOrbMwCCaE4tTA6DWUgZlVSQyziAzkHHOskoqACogaRw2vp716/ONc217eDX2deOj+qmfZJKpj+iKtTMqoykDcbgan8V7D7r1FoqN+11UlKymvest/Okf/fPvfPtrf/t/+D996fXPZIGkYJ0HRSKjgAB0c+tVoJKyICQWv7z7xb/x70wr+Bd/+vv/+vEL7XyRMxcnmUucyvfee2tI2xdfvHP9ow+KZCIPgoCopHVrXOUqW+2vB0Q3TalqcBxSHYKS3n/p5fT48bAbnLBj2sfCCYSNIaqCNYVLn5EYauOy5oFVzMG86YZ9f3ZZz1snOQltnl9btnlULaB9Xn94ljdx2EQBTCWWWGpvRISEJE4FSts2pfCURlQka7ohceF9FmAiFGyNO6rY2lI451xSntLeqKEqlCGpSIyFS0ZgAeuDi8oIbrZsAQYAADVIFFqrYWKKd46dHNXXaM7XsYtyEIJRUIBuZDHFtO0LL7/88dvvdONUsmERAPUhBGIqpVGsLd46msccyRgla8nsUyJndqlkox5k2PcvHa4EJ4Hig09FlbPxVa1m4d3Ui7riW4BcAFLbHJiphTQpR09m4XBluGfIbJJG1mws2drZ5bya4rjrUKw3lHw0QDd6rkBklG0gSwhkmKEy3hkLCKhoLOVcipRsAMC01jcYg7fe0Sg6MlzGwswkUAE4AkOYY7GWUlZDVFSSSF+YxinsersIxuOUDLuDVz/72d3uTy5PTx++cF/YkPG2dberKuUxx2HiPFY07scP3n778vRxHPrD1dKFWjIr63i1ySVVokmcQH77je89evNNQh263TTwQbOoTLJB0n5yXu7eCj/z8smb711ql2vi0DoDxRuTp6HP2DG5SDXpGKeju/V4eNLef9AeL+f04Ntf/cO3zvlHZ/t14kVdjha+L3kqaIi4SHXnRfvw82bxDnQJVK01eFPYVhQVstYCGiIjUhSTQus8GfCIZE1h9cEVFQcMAK01AWAPUADGIvNAZAAIE0JJJaqZEFUoWARga62gIQRmScwswsKEBGg0swKBSi4ioMYiIRVkQZqKEGFBFEtuDmE1t+3RbPXqG1//Z0WiUjG5kLEH8+p021ury2VrqkrRVFX45AS5TKUIISZAhwAWjXMIaABPavvSvMUJfcTAeuDwhZOFRs5TASBrlRyBNU1tz07XHz67rt2SCLCipFJVGNpmt9lxTraq1ruRC1TBqzF/5+/9F70MxbmTg9W0/zgVrYLllANikvHxx7vN2AWLpGStCyGUbhzTVFc2KzRNU6ex5FQFqcjVbfv8auwzXwwdZ1nV4eTo6PGTs8zjojl4fNUNpRDbBH7fJ+D46Rdf3W927z29KBlr55YLe7ysNuN0ecWFMFsBLV3kKLnPed3nBdHMmma2eLze18Y4h8tZOJ/GGFPwM1/76+s9G7h7PK8LypAWxpIxdWjms7lAbr0dBymA1nvvbUEgX71x9lEqmlL0LsTisAy11YXx1cLeursYEd5862LfXe32/u7dg/U47YdJBIwxP/PZT8e3Pnj33cdOXECz66YpyWrJdx6sHn98dvbo42kcVkcLVl651fHtxeOnb//0nZ/eXXQTdk+G530e796+df7k0gpwP0UxabtmEQBkvNGowD5K4QJQk5QkIJv01juPZ01jnDtd715fHETBfsirplbi1aq+f/vek6ePV42fROu2udpNPrndZnALv6hM6y0b2O2HyhgDag22zezlFx/88O2PAHFVeLmql51XlWppF2GBBdXRMGDcT8x2Zmy7qPaP+6txXO/Sp1+8jbZ+vt2RsVPiaZrmrfVolweHz68uEhfv0TlXW6yIgrNkzXK2uNx0rOw8plLAyu17h8+fX/uGFHV5uMg5/yujohT56HipPF1tdgY02EAGg7fTGI1BIgBlJ6AlWSBv7JjyYn5UDWkuxRU4aANC2We+3I9Z1DhEIEWICPPKAvPHHz5qhA/q+qW7h1eX8c7tsFy1+7I1YsPQHvijiPnps4tuoCFmY5yxYLgQwuFqsUtjPxRjoHZYOThoqsM6PPngSlQ2V9v2uD57drqq6xHZETIrGcNF/+B3/uR/9D/+X/7sp778p9/76q2Fv9yXrmRj8WaX0jSOletglM3tW/PK26HP+8225JgSXVyP9Xw+gfnzt9/MQphKqNu6bZerWePr/X5M4uvKIFqpwtgPLGqsC7PZGPXJJomb3bv34IUXXq4WzVgUc2Zjn5x2tpr55UGUkYoHFGJhkbqt6UbvjWCstZbQ2hRL2wQyBtBySbFw44MoNHUTuYTKIxCXTKjO2+CtpFLPXQiQtVQOK+tqUmdp0+0rrWJfkMhatYjeaLfvHHljbRczI5OX119/+fvvnj653E/U2OKaAgWVCFxd3Tk5+e5bTzZCfpiENFTwg0c/FGSPWDnnrGMtse+LEIMBEgFt503PXT1vKaLVQmxFsLIW0Z3UdkFYYh6Bn59ebXc7JS0pVXXNHI2wgE5RZk1FhtAg56yMwdG8nffbLlhrCKZuxFCh4UXlGofjyMoAZAkKs6ZpbHwYxwnRgOiw74KpakNZebPbWgSwpqn92I9cUo1knO/3n3SSRRSQlAQAbxAuqliKWFAEvCnoIMJNOkMRlJBZiiIoqCoX0CJsNGTUSrUh9lZYFFEzAyuoCigSIBhjSYwiqkEAFgRUVCAwnojEErJAYskCjDCrjffGW6vAijcTFSj5pk0GAICiAOpIvce2AVuDCgyj5gSpIBLqJwMCUdXCSARMaAGsqiLcrJqhH1Oc5kRNVdcnD7qrd8f9GWdep5ElDWSKsLJASU7UkRVhb00StqoGyFhaNB7QbGRg1YgGSAuLR6qJm7hBFRmuPTUnAbdTvHMwi92OKiHfqggQIDkWBWxQ/en+j2N4Sj6brDwpqAEAMEhUGLOZaXWcc6ECllSBlZk5i9wAelHQU33gMINDPG7D8awGypNJhgqyatHa0hCZEHNkTSQKhcWgATLZ2nogOWRYqnEKwDSHamYnA+mpmD2RiCXkAkpqDEnO1rrCmCeoW1AHhhjUEBGKggFjrAgQgiKQsEEDKgjm5qQLkTgXSYYQrGNfg0bAG7KagkEoIAg3VHUSAFFQVVEhAuGbLTQR6WFrhzQQECom5t7ocaD5wq87uI4FrRUlAC2ZLQAjqHwyr0IEQP2XsaJpys7i09OrWU2zZZCcX3rhyHu73vUx04dX/XbI6PxeSQFElAxwKaA3a4QUcRR9d3vloXiyTdUuqzCrvQ0oIu88ehQjW2MtgkUUVk6ZCGvrlHnbTYm1K1xXXopKZkK0xjHITbwusQiLqlaWDKKyHtUhxZhRUdVbJMTMklkMgKKSNUY0FVbRLKWZNUNMN8WRehai4TefPDv9rf/fs48/+PWf/8zs1oEixDLVzgEZ5sKFFRQNkSMwmkpWUFGy1gGBciRkBakq73sTvM1cEFFZSta+iHIp7I6OHqTNZd93/XZjnPjKZuCDpn39/t2WzNSPqDA3DHGTeSLQJxdXH15vr6fMBhRFUQGFsxJojcTM/TTWNTRLP8VcouANbacIZGUR9MYQzupmHEcuqmTQmG4/WjKWKJdcWAQl1MGS8S6AAqCZcrEzuxk7ve4VDQtba9ASRgaV/W6b5+1qNS9jZOUCWSYOzo451Qfz49t1/N5UBq6MEZYUY2gCMyNSW1U5damUtg5FEAGKAWtsN2YAgpjBaWGYL5ssoJvU7/p2AYcz//7pvglNKugJQVEYVFmhTFkALQIqwJ99+3u/+vPVL7z22g9+9KMrzpUzkxRPFMdh38WHtw5+7Uuf+4e//62CqF6ZJTZaVWjJqK1O2lvxepOGbR9T46r53Waz2dx66ahZurffOgOAz7x0x9kq7vN2P3QxTXsumIPYuBmfbvqZD5ylqmq1XUrZgfMmAcq89dsuFlYLpEmm/egJFESRDxfzlGCKMaYcixCqIxJmVHGAHiiVrICIPlRNiTl20ThoF251GBQUQSKrQwdKJRcpag3YVOZNk4ZpT33OoW1DYS8TIxpLoqBA4H2D5pMziRwnU9CitYpM1hpvwHIpICpaWDVldg5BQUHUmqshJlUhur+czzwMw1iFsJgvtvvzbeQhjaWQASgxOSIkRDBFclV5AiljBpaCYhUqb0rWKWfyYB3kMUVnxLsUu25zFfuUCh7dXs2rE1R01hG6JGXdpZWzOnSkgPODnquLTf/Oh4+OmtDvNndfeNUE+uOvfn0a1//Bv//vL269VDKoIUTLAsJgDAAwEYKqAKFBIIOI47bb9QNnvXfnyx9+57e++o/+yy//2n93sVpkThly8eoOaXEUdqcf2hz7PgdnQ2OBOXFpKs8chzGrQhFuW5tjlIQJMCyrUZL3JJ6Ct1qUo1oTppJLymBIBKVwEZ7WyQEhAyFEngQkTsm3vglEY8oABg069MuwG/ZPPzp32KpA3bgsmRyxKhbWzMaJbwhyrILnrN5YIMMIYz+5EMBJs6g4UCJxnmbzuu/H6bozuQQEaw1XviQouez6wSm1h02wbkqxqoLyJ8+C27eW0eCYex7N4zdOr8/6T33a/pt/6y//9lffeuPNNxSIU66aWR4nJbp1fDJeXjx9731AsBZKEUfkFGzig0V1fFwLQUq8cM7aehRxBqJkNpCZmdEZnPopT9E3pMyGfBkHW1lyVWtryKO3FKrKGEKBcViftAtmJTTempzZJV4hnisMQ7JOALRMPF+21Bwe3m4hwfb5M5PywlhjDCTlrMyiCJV3RpQLB8KARiITWWc8kbLhSTmDOlSJ0+HRLEMqbLopdiKioMJkTC4CrPiT+DgqGJBgyRqM4+jIXpVLC2Z5eIAgFuDWajGr7eXzx2n8NOpJzmIDWWNYyzDpuNuMXXf57OzRO+8ZLJ7k+HC+XLZqKGe2rd19/OT9y8ca5kf3T/70939vuNyj5f11fOXVV+uQ9henZZOPGjdcnLvg16fnL66qLdqhi1EUqOpSca4Gch8/va4EDipMU54pr+7durju+8kehdVXf/+tt7YSm2bTSTDTcdtcdcMUAYQbx7/yC6/81te+WkpxiAaglEKqI0BlSEBF2BJawArNJFKAhqIVwrz2qkJN2MYSkFhZGeoqFKKBcykKBMLCBhUNCybFgVXQVJYsgqqET7QuWIoyFwWQXEQJkAxiZr7pFZbCNrgkctMjMdZEkQLIJb3ywmK7vqjnR0ndu48+7gsbkNYbMHS1H1UBDIFCmqbG2/gTVtHF5bYx9XxW1wjBUTdlMmABqaSFdQfGXXP+8HJ3rgqK3vk+DVfDsMnFNsYYTCk/OZ2cYN2GUnSz6dAgGRusK0NsvEcPMRdEtB7GmCow7zw/axunhoPsF22VoABq1ZgY83sfXbD3be0r73SSzDruY7Dkga2BFx6++N6zp8GQGvHOfen1F7/x/Q+Mxabx62E4nvt5FR49ehqCbw/rs+v1yw/u98OTmNQHaqw5JPzii0e//c0fKTjjKVT27u3lxfW2SxyCjzFlkduH7ViGi3XPFp1BYPAGm1Dt9jsK5tZhW9Y9ARTnoTD38V4dlPkhwN3Gt0TLWa3ghElZ1KhKrNuG2XHhfpystZfXa07D7SpcPvr4bjs/rObJ17ePKmOnkjkPyZH50u35oja5G5uDZvHC7IMne4lc1+Hi0cevPzy+OL0YJhgsfHy93Y/Zp7TZj09Pr8S7WahSPx0s28R4fdHVft5H6GJeD0+fbzcOw5Nn275PJ42HLGOXp8xTSof1rAooJQ5pOj6a1UeLPPKDF08+fHpFYC4vO2vp3v2ZnbVxnQns7YO6qarHz89caM7Pr4LxiIiqaZvKrvu5L/yFjOYPvvftk6PF1A+hskZg2A2FjCM8mbXnl1d9zlLKxoMhsY6GiVWJh+nIhqEbD72dDFTGqOYu73/zL//yH37njbP95tmmO5kvzi8uk5TKUjA1gY7jYMDfOVwdppRLMrNw2LrddlIAMvZ0vdvuhlDbzXpYzqrg7HrbHQZDedpfX/3cF39mM44/fOuj/3ZU1ARnQKvGXQxT492irjwhgdbLsO15H+Osqm63s4ZiTEkERq3eenKKBe6rqYDi1UZJqBuafiITUFWYlaBxwQN0Q8m5ROc2++yht0gucNPowcFDoJqHSWZuM16S4YGLBARUMphjAZSrOOYidV13YwYr4O06lcYnVd3vp3ZpZ3faN37wyLcra+3dxcGzi4t24TXRuH/+W//07/9b/4O/vo1X6z/92mrZWJPBICqWnAiNZLVqV4fzxvmSxkXt99tNs2gf3L91enYlXPZDXli32e6WyxYUpyk5gjiO+357q7l3ezU7vdxFKKpsiRS8Gu8ddhlXy9mDhw9XL9ztrtf96drVbYxxLOujw8ZaAkZHNqURQB2QNwZYLZhpGpyzNgRnjKstGUIiZa68VSwGBbGQcZaAlVQViAL5HDOAvWG2WiSdkpALloK1XMDb6uTkeBz357seUL2j4zvz3aYHctOUSsnLxvyln//i7/zhn41azaqarWTm7WbDhSHKwardxP3BYa3Zrp9f3j6aHR4vHz26EjKaC8SsljiKBWOdZQBSrJ2O+644jOj6YeecywnaRd3txwKpCeFW60DtpuTtGLtpEgPMJU4xTRFRYkpSV85ZY2yXU4pT8CEJ9ZiI3G7ojg9WFbRkzDRFYVmPUx2CKuYkaYKYy+Gy3e5HVBglrebzugizzirKDEVtEVG1QMs6wLa7rq21YPxPUKbMCqLKigCiAmRACUQFgBCAlFAMAaAQoiIoMFpiZhWjQqCAQmAw7UQHNkN0xDUawQKgZEgJCmEBQ4Ke1DhEBSg3tGEUUGsIDABaVi0MRUAE0AAAoSVWsSoImlmAUZUQkEWJ1BowVtsKfYDgdSwyMSb2woqf5D3YWjRw43JDVEQBIDCGDGBilqJTl3IsHKggN1V9dHh89jiDSAG86kZLFggCcuNABMVQsLWAkCqXbIw7aEPjHaLRwsqixCOLM4oKKYsKEIExEMtY2C0ePqzvfPZiPdw9aUmlEJM3JWcokMf07Pn7p8MPOzrHPDErsQlMWIwQFQAC1lraQ+mHMXNICUCkIDKaG+STGHJZkwIFsaZgkDJOxvim5jxMu37UBBFhSoCpcLFcBLGgoipIBklBnlvZp+Yh2FUCx8ygIPUhetD4mKE3kkGJRNQEgygITExD1OClstB7LhMok3M30UgFBLrxuwkHb5JhAVC9mTFKyTxO2FbWmGJrnLZsBW+i+zeNWYAbWDqIMAsA3IzvURSVFUlCMLHAEKdFEzDYq5guYt5kftj4u6vFCeBlN17HgmDEYmZOoqhCBKQKqIZQfpIqMgYJtBAomZiKqxyQbse83kxJ7WbPUxL1ZUgFtVAuZAkJFCCngogGkAm6sTuog7DOgjs8mA37/VRKz2lKwkwOUYqgQ4tgDS5qR4BAOAzirBXOuUguGCM7RwjaBh+IUmFRg+oLl0DWogoLZBBWLSgKxhtWAcTam0mEkBRQQFDVGDJEy3a+HvZEaBA5lQk0i57th3/4O38Ein/tb/5lZ2maRlVtqgoBQAsSlBJFS46x5FjiFIfdOHVX6904ZRWVzJLZW2cJWVCYCdV6C2ALant48MWf+0VZPzco4zBmLmMRBvnyg7svrdpYYBVcQE3XV8rZVvjocvvo/HqXkQIRa2KpgiMHOWfJwCyEuN/n/VjmtT1ctK0BZ8Pl1R4QVMEiWQBOeRp6FWkrnzJr4Sr4IjrlhADGmspXyirM6Iy1dugTkL76+r1bh/L2W7s4QCpSB0ME1ipnUNDU9U07W7bz3WZTSinMdRsWq9m8wc3pU6NcW0POrLedcYYQ0diUyxAnh04Kc2ZiGfNEBsTZyhlQDKEZofQ9b7bisJ4ZzgxThpfuNMftMI2FjHcqMuVsrap6a2qlbWYkrZzPbP/ou9/6jZ/9/K/8hc/+8x/8KOVRQdq6ln783a99dUa//PmXf/qPj956tosRoTImDpEaP7OuIhO77S7tIeesBpfV2bg1C7KHPsybm1WwO92STMS0rMLt1Wqz3yfk7ZSf7a7UqPcNFNhudmPXG2tKV7CWqnY5TiBZBKUoGyyABsDXwUqMMQ9TyQpZtCgbRGNUk4bgPBnK4p01wXFiVQ013Wpnq6UvoEM/5C4S2pKUgUVKTsUbOw6DaTHRkDL34w7qWb1AA6JImTOXOGlpZm1BAywWxTmXUqyDabFKUy6xgLosigrCIgCgZAwgQEzJGwMkSWG3TzFvNcmrt45Z+qu+nzV492DlS35nvYnTKGQNGTHkCBEhhAAIklMAo0QpFiH0AgrqGlcMDUnaUBdFtNTlPWQ02nE2oRs27QgpjuPVFIchT7Z2XCB3Mje6XN19dflw9+wtGrqDu/euHz//0bf+FH5of/Hnf/2zn/kCzOzIjOQM2ptHNSIDKgICKiDd3D6NgZSjokI7v+r56Mu/evjhs2/8k39mj1/48s99oZnZbj+99+GT3/vqV02KtC/VvFrev7O7vIxT5BydMZWhJJBQjDcG0TiqZjNvTZzSLMwz5we3bj0eH2+20RpL4KaYGASDVjNfStEMXKywsdYqSIkRFGxdmTBrV4tt30E/+AD73T66PL/1cEjg9nPeFWutAZulqDJnMWhyLpmgqmeOFCCbYCQxJ2ADxpg0DQeL6vaRLcaEOnT7Mu72JWXKY0XgATYDCwOAAqGgSYUb5lnjo2vAuUVdA5wDQOQU2kU/9cM69nu+uIb0ftLf/vPX7x++/hc/z2J++L3H10MRlMp7lOnqyRNNAOK5MKB6jzOPx2hOlnXwTirnp9T6edMeX+7Xu/357VvLi3VXyJLASd1k4V0Xj1erNszcWAfUvttpA8vZnSgSuQBb1AA5VZRmskFbDbFz1kmGpW9Wofc5AxkgrJwxPvj5KsxXEERjGk7PATWzWCQBYx0WIiZMzoQCxiqAorGkoApFBBGKUpYCqsHAsrZSJgEcIncRIrNFdYQBFS0BwFiEAK2lpKqCRVQYEZBZ9t2ubqytyXBDzvn54qVP/9S7b71xefr0lVde9o11DkjRFWxm4bLbbs+2F88uhrF3lenWCW2loByjN1RXcHG5/uEbb/3Jk0f/q//t/+aXf+7X/p//6d9dLmsQN1/wbAF0LQFtt9mQg7Ze5bKJ/T7HKSWRjOsxjSz3jiTFzWHL3hpD6muXie6+ehubo6T4/NFHu6jraEYqjFy3zUFT3YntJk7E+cHdps497vs0KoiCMhjDrGhdVAFrJpYgN547JCQVKHxz4gLBGmPCaEyRYkDwRn+BOPd2wmgALCIglSyKMOXM6glURE2wVeXg5n2X1QEK4lhY0KQihrgwI8D/n6r/7LV1y/L7sBHmnE9aYed9wj3nxsrdXc1udhBFipIISpZlCQ5vDAGGAAfAb/zSn8eAIYEwYMCyJFKiKIlspmZXd1VXV7pVdcM555648wpPmmGM4Rf7Fg3uL7A29lpz7WeM+f//fkyUVMAwJUFHnknQiEiSAdicy3pxON2UT5783rPLl1f9FTc+OHKNS5odt8ZhmuZ+mpq23m530/h1ngLIu7oJwz4AeCBu61DXedif13WXcOznGWxwNCq23l7d3jm0hMW8YYUkuLsbm+BztmTFORAwMPNqqFgH3mx37F1T1w7sctPXVV0Htz5uakdM9MH751eXNwdHyza0P//Fr273U7tclqzR8MC1yaXb6e7B+erktLnblZpg5dJHx23o6l99dgcGP/vsot/P1IT9mGp23376+PWrt4z1olm+e/mKHNVyuyiMmK2Il/gH3/vgH//lL7gOSx8slw9P27Gf46AKIChi1Ed78WrXLNo6JDFYeBqneRZHJrGIWqQNAPPpepH2U6iJzB4uq4rcYVWXu31Xt5XjpNBUFTsWCArFqBKwUXJy4Gt/E/uJ8kbmv/Wdbx/Os2BlI7UkVbMa5mjoLy83B8GXND08P9v3o26n2iSLni6WHerBql53vB3nu33P5J88ODs8rb5481YZD9eHX11cnR2vlnU1FOzHdHJ6XlP3br5KSuu6Zq1ub/q+F0wKKjuKi7btXGjQzWqdVQ3UClaSzFPcD9MuRjQnLvjav7gbVeZO/bLhUcZNmpZHK822202IeHhykK/2IPa/+/f/1sWb7Rdv3hyEhqJmtc0YAzlXkyIC2OOD9c8++xLUVuvWebfro6urcbMvbJ7pvcPVsrTTbX9Qu0FsGvWA9NXzX7eUXVN/ddvvx/zxkwcvbm93YwzOex9yzF3lmfLKVf1e+jFfKySlMicbJUs2QgRcdPUwyYzGFYzz5Lo6LNeXl9d32/HfSBX54FXtxctra5qz04d3m+0uZcc+CtyMeSxhNNyWnoiYg2lJiiy4akKzqLfvblfHy9rD4aoDAlWOMSETMFW1Y++ubvdjzIIhcNsGU8GDk+7s/BC7VZIqGb/86mW3cB88fLTJt2nIEkshMBHPOA6TJ5cNDMFVrm593qSry+0CKZJMopvN9N6DR2Mpw5g2411dYxWckM19/uKXP/uH/+P6b/7Wty5fvfrx8y/XnWvqUIrlwojEi9o5Xq9Wp6fHX/z0rwqBr8N7Hz+8udhPaboeZyFwbVjBuo8J6+rjT95//vnn33z/wfnRYUC6vd2R0c3V3XrdtXUtCZ2psnvy9MnB4bFvu+3tXdqPVeU3u22c+6PzQxFh4hITGwTvpmkKITBzUSEXJM4AzgB85XMuYkxAuUQEqF1d+zDFhHwfmjA1884jkGkqRVSLRntwdLB5fV2KiJkMeYr5+KCtndcijinmohVNfel3xaCoxGXAf/f73/mTf/qzHOu2bZZtJXkgr8HhbR8RbDeAGBhgZ7I8P6w8fXW9+aovq2WNOieMU8S+nxcHSzVIkEHleFG5pE59k91QtGsDJGLiDDgZpBTR0tRPcyl14JMPn3761bN24WOM5MnEQh3Y8TDNgOiq0LWVFWWUOI8M/OD4pJ+Gpq4cUU7AFRN5Jrq53jahC+wiFgOum1A7DvNUe3e5nV2otnOaptxUjQsY47ztr6nkOnTrRXt1fZPL1xBH0fuoBand30ICqIIiIAgAA8q9BRnA1AxJwZKaIH0tmzZAMxAwIyoOZzA1rU19ogqFScjAOTPIAN7f44VUgUoxrwBIBcCKGZgIiNzjZYwcmUN0BGaSVbLeY7gZQdVqpy5AW6OvyNDUdEgwRDdHK4oOjAFE7rVdSoyAcP8MrQZS7rlFRkiClHO6vro5kgGrAOho8aBaHl31F5OaGhlk52DF6JkJENCNBo7Q0NjjKmDNqqYFANkxFAbzXKmUhFYo3ZsH7yk7Y4wl3f3w4q++efD9/dv6hKlaevbq60aH7bPPf/iXb/5JObmZ2oijKIIPhtlIDQoYgmAoMIsH6QCLUkSYDYox+JKNkUxtngxjrtfUHXCzIoBZxry9yBevc+q5FA+G85whlUDsgmnSr4XhoA4dJKMtJZOyp/qhIw9AxdXqTg0M4yuDHSIQgpFm8qxZFbAYzxHrRjpnWzQAVkUAE7HA97/+vToPiFi/DvYYAoGBFtRCRuo9R7rvqtlvqhP3WyGA+yEIlBkdgIgx0f0q5HS5GOd5PwGx1k6XFBJTP6Y3Q7md0nsny0frVTfs78bcg6gZ3vf9BJjJsTGR/aaDVkpRUGLCEHaDAOnl3byf1QzQ0ahqDClODtAhkGNBkCJ2v9NiBDBmIuO2qk+a+mjhSu7JqyK8utoIkXOGqmCQRCviVVOfLat5juQdQrOf4joEAUCkXLBtvGR1wExUMYrZZp5zUVU1kEXTkLGgOU9M6JhBQERIwAOraSlazBBJzIBMtGhRJmKHaLjfRyQKjSut/5Nnz/79kxMrhSSWPk15IseqIMWmOSJQcGHVtLyywNU//tN/OKa5IJBr6poQQ0End9emZmoAxmqzlG7Z/fXf/l4ZL1wZyKhddbrbaOO7ZXu8Pkq7WcHQFNg0T7v98Pa2f3U3ToDmIXhmAkariQA5xaJqzOgci5oZxkEvhz049NV9B1ERSES8D8H7OWZVdQ5NMCd1DrOIIjSVAyTvOeWSsrCBZsAiq5YegMwvdnmgwLXVyIillPsMV6AACMO2j342BWL2zMNcAIpp+dWnX0x98fUyqdRdbYDzXAwVGIccO+9dVRVTrtyHH37n7vrNPEXHRIAEpSl5XdNwNwbHi8rfRLtT/KSuHyzq5/uRGVUKcpGcWe/zANEbCaloYUVy/Gc//fXf+cM/+tYH3/7s2acR56iFPH1+d/eq7//wo/PvnD386uIzqLyCmMHTk5PzxernX7zOhuHAhYY7XxVQrTws6CbGu9v5/hRsr6eWyAVH3jRNTRXOFku32bX1cjf3XdsYafBYt56Z4zxWvmJyoLo6WftadzdD3XC9dtRSTgUNAaCfJs0MBExcLVzlUaNKLtmQ1Rw7AmEP6nBdNbkoULZsBoaOzBCJVFDAkIkDog+lgtGLd9hRa2ib2DtP4KFrggp5csaqWfo0uvvlpfJB062rdivDbjcKsqFVzqEWVTQDA1BTYgeMJUsWLaA3WeIQpywfHB+cV4thji7wWdfuS85jRKdo6J3TXIqYJ6t8qH0z9XlOakYp6eCk7UK7rvbzrCVDUbZ6t7tbHy5NGW0e+95BN03Xq64JvqSinkPqrQgetednzarD+fWn/9LScH7+4NlnP3/x2a/f/+jDs+OTB8dHV5urrj123CkgmFoxIkRAUSCwe5VJUcpRtpvt5vZiJv/s3d2vPnu5ffN5+uo5Ib27fvXs4qiJ9a+eX1z1m9u3VxUUh/Tok4+ffvMb//C//f8ksKatNZXdNIkIIy/a+1FH2LFpbhfVLKmiuuoadiHFxAFNRXNqFhWygJbK0b4fY0EiX4pCBilKxHkscc4JZJjiPOZ5jOujcPrk6OOPn/6X/69/eqCtFGUHqRRBQO9QxXsmIqhwSBqDLheh5rp/ewcFwWBZ++DbOeXLr3rwXFVjzNmYxcxSGhNuZtVECOyJ6xAkKQVQzeenB2UYYpabTf91wjQN2ueKcbVwKhmAr6/K51/0b19di83t4WFh/d7vfXJ7ffnVF8+rHN687n3wywVeX2+q4AJhBdp5Wi/azd0u7efgW+rWr3exXZwdBJiHm6OaYczF5MGCr0bYj4UTVAuf9lFg7kLYF7E0iWgxqLGa5+SYEk5ShoY4qqKCqhLbsibdxRCaKY+t13rR+K4D9uZpeXIUwElKFbiSChOvQr0XnRgmwyxSARcQNasNcpq7qqrIAUBWXnuPRZChCCvqbLZXrQmPK08qwdOYJKqKWv4ai2PIPMTSD9EbWUEm223JEFcHR95aQnf64OFXzz//4rNfffM7v31YtaQ4bEdNY9Zx++7NNEyr47CZw7C5zTGOfek3fbNyqDmO17q7raFcPnv5j/7r/zZcb1ar7uj44M3L1+X6yh8fdLUkS1w1c5rj7X5NNGwHi3K48PNkl31RCpuxrBr45OOjq20cUj46f3r0W79XLc7Gq9vaxV/+7NdXY7dRhGImgCJLLGc1vQy+FHn84IOf/eKr/nrngEXEEwXnu5oCGhDtciLHjkhU0KFDdmqgJqo5aU1+HEZDZwC1r5hYQavAUMSkJLMxl8pzTPfwa8eAFREjec9EGDznLOn+gsgsFylqYiYliaoZ3AOwsxqRL3NGxFVXZymSNcdcV7S9yKtvPF517X/5p/8PbM15Q2dNG9oQVEFEnHMmZbvfp1LsN9dnhrzrcxvqLGlOBSufstTgG3VVW7/b79VxVdfb200kqkOYhknY6kAHwe36vHJcMQ9ZyFTAIYfmHiYMNM/TelEDYbds52k4P156z2Uu4z5CE9ZdePXyHbrw5u12M13NY9kmOXBOdIpq73bbxrtvfus9pnjRj935Ian+8uVFAGrUhDT3oMvynW++96Mvv1gdrivgl69exdmU6e7dJYYQJd/s9n/rO9/9lz//q9Pzw9OD+ufvdiV0nzw93w/b3e18++6Wfc3Bx2H68OnxzWbe9fOo+f33FuM47MYkSgY4lLKqOpIp1A7Q0pCOO364WBw0TeXQI6oBK1RHR0a+14RMzgfjkHNirBnX2/1FNFufnE8yzo4aWRVftSffrTfPi8zNYt0Ff3W7LYA5RWG67od2Xd+luFz4pglagtbuyzeXJ2fLu1cXm83skQ3KH/32N159+Vrqalk3+zG6phXENtB75+uf3V0ScVe74W572++OPnz/9eZlmLOUwuBQhRt5+ujg9dV204/Hzer13faoXbGvUowO+MlqWal9eLC42s0xq4syqh4dV996/OgXX7wUMhG+2uxXbdjMYyDnh56cHbeLv/zli2fXN2cnB+uF0zjlbXl6eLYf9+vDw5vNXVXjwaLdDXN7eFAF30+TxvnR6ni1DFcixWQ/xbYOWdM0wuykO1wcHR6HVc3j3JlV5+t5HAvp+Xpt+SZnHWQisV2/N7E64M02JuQUMxCsg2vZTVOhEAqoiAWkNoRF5YD0OmufkgH539whf70qirlstpNhBbmu8LhzeSi72fBuztE76tp9TAkRVMexXzXtYlmlWK72Y+04LBdXE27fjct1SFJqouNVM/SzsDvxYb/ta4KnZyd88OjF1fjVm8/Pjw6PFm779kLcBG7pvH94cpZneXv1GmbRXGrvDE2dW67bcYxNU/e9rpfN8Xm3dHkjcnq0trt5KPHh4VHt6qkkQ6yc1IGAfFOTiHWdv3wbf/pXf3XS8O/9W3/41fbu9u7OrHLeheBK0TaEm37yOa8pfPiNj0Tg9et3n3/6oqmr4+P2YjMUxsu77cnRcpcm0vzs2YtHD85U4cnZ4xfPN08ef/D6zVtPHZNTlRIVKluvV8uTx+ZrIPCM4HCzv3n17KLfXLf174W6BqhCWFjZiGEIAVAVNUMW1aquEBFVc5qNyFWezaVY0O5nseSYTQCVA3tglFwQnBEksNWyHe/uHh6d/vKrt/WiSwSj5athBs/l5Q2BOTKusPXBATVNmIbxvK7/xh//1n//z38wFTpsFvNkxvNOo1gxxbPjxZCyRswZby6H2vH3f+/R2A/O8cuLGcQwULPoWDBjHARiEawpx7xen1QoQM4Uc8yVkRpNs4n3dzl96/3jMc+pJFN7+KB9+OjB5XZ3e3fl0BQxg0lW0cKeYso1kneoas7AMQvakCIgxVgmmZ13Sa1rK8jq2VdVZZzSXIZpstojkWN/cbunKmSFwK5dVjFZWzeic5pSADC1Tb8PdaVFfzMk3w/oAEh2P6Sb/QbwCEr3j6AGiIaQAVEpCYgh3LNfGAEBgUGADAgInRkqECnafXsLVZHMGzMZeUMmYTEGFVBBU5KiRFAMC5hzVACaml3FalpMCXAWTGoF74Vo5gPUweoajcAAp0xDxHlCLHpPliQEZhTDpLYIBA5zMQA0NQXMQFlFkdgxAG3urvLmjR2eoz9dHj8Ki4fI16RFvo6/UFRzprUnFQDCWUvleVE13jl2WMgNSeZchpIzKymBJu+trVsBP4sKTojWMsi+f/HiH2+3nzEcP00nIZEPq8PjB7F/+fz60029u7q6pRYrLMFRBrGgTsEU0EJSl4WEFZaIAsa5raxraBxk3BAXRCJyVNfWLqytJY/Zpny389OF297eR3RcKQICCIwkQuo8ELMZxtnwazcZlQ1bNAHh8+RWAAXFGR6BE8yiMLEUywZtAA6WMgC6IdvCURt0nCVnLMoE970/VAAjwMqTpzIlQA8EgARAhJASNEnZQ/BITqEAICCRiKmZKqsAMRKaY/SBnGIGU0QzACKgcDf0YwSx3LrcVv60qbBIH8tdweFqPKjSB8edb4ps+izJE2UBA8jlXqcG3jOAAAB7CMyHhwsMod9sh1xSkmFWBtApFSmgomaO2TEnLcFViCoqRGSoJkKKde2PDtarxk9xqjyHmn/+6uqmz2xAYJV3HLgYJIW2rsdpNgUh2g37tq0lSVTIolB5Ch7RFGlIQkzZdAeFK8QIqJjN5hwVQNEIcbqPODF2TTdrEZEpZoeoBAo0icw5htqLADunoOxQDBANmF+8vf57f+/v/+6jp+1q+fDJgw8++RAVq6oNVdVyiGMs+/HFr351ff0mlXS27HJ9cHVxe7fbjVMOwSOgcz5HIe85+IhsmR+cPvn44VNLA0IFwrnoMEY0OGiXB6ujdPeq8mSaX7y83t3ttinfxTipzWaT6pyoQgTiMYpjo3sbw73mXIyRyPmSS4yieaw9YUAtUPlQDJ1DUF9VDggkK6MTBN9WD86PK8d3t9vt7R6RTA0MBKxq+PHp8hirHz3fjqNThwLgKkI2Z5xN1JQUCSzFhOBIBNiIHDkOTRgjCgbT0s8RiR0zOPYuABgoAJiIFARIOkwC1bJpFqtutbm5DgTewWHn0jCszg6HMedsU5I3V3p6fPzyohdiM6u8yxlBpYBKseCcEASHJSYQi0V++NPPv/+Hv98F+JMf/agUY6Q5lf/5X/6rj87+zv/qP/27P33z9iIrB2yWXgr88NcvRoCq5m5du3XYjNkxfPP9o/2UN+/63evN/fd91y0lwTCn3ZyLFUdI/pbIG7uOHYwJPRy3rUkxovZwOeS56dpcivdutYCz867zuE8FCSJBjKXxDg4WyQnVrleRexMiqYbCS+c6nyQ2RGbkEBSNHORk+30CoymDiVTOccUOGSTLPHKoppy1lAY9ppwRQ+X6MVUVusP1sq23+x4gs+YayYdGYnlwcHhYuWkoc8yEBAiIHLgeZBhzMkZFdAjEKGCMwIhmlsUuZt1fbieDbz5Y+7YZ96OU8sHJ6nDpfvn6Ok46mzAxMhojeiQKuc9FzYgQKSE4zZ+sjuhg9fKrd/sxOoerRVd5nHbbm2n06PZTARrLSUuYMMZQU5VxsTz43W/+4e7qOs27Ifm2OXhzs61Xx//r//P/7WD14LO//LN/8A//qwLD3/67//HTj/5IyAQUkdUU0cBAEPOcsyByGIf4+tX1y1dvPvvVFy++/Pzuy5cWb0NAMP38x39x8+7N4298/O5uw+vqWx9/PPTbUkoD/hc/+SEKQfLgUFWMnCoykZqAKpAVs6PjtQBo5Y3wbnvXHqyaERbBZc3E960kTqM1bd3U7FRyKZoSV361bATMG8T9frx1qS8YAq95cdB+cHL0l3/yVw/9Msbi11VdV7t+NDUk4ArJE1gGYI3z0dE6pXm731fkfO26g/r2Zpt6zJEQcNZU1+Yc1QsniikzFJ4TWFYES4CG4OeyN2gXueVmHfxtyle/uUluQhvnVKTfId9c7jWGsGySgqsW1+/GTofKm2/t7OHiyYd/+OZi+9WLzWFXjyWVZctAyxpgTuD8diuWLbCPEiJVxnGSfUe572+XVacSq6rO6W69VGwXcc6W5/Xab9L++ODBeLtNpcfgEFtXLeZ4y8Ez+aiTU0J24NmUDOLx0uqL6CWLFslatQtfV8ShQZyS5CFaVPJoiEBACAvCFkCQ1QqzTYhRcgJqmyYBoaEIVVQLGqASm3q63Q3XYoUIGUWl884csioJ5ZiJcEoCiIigIsN+XPrK8rRYdfOUHPVd1zG7uQho04WDV8++/Msf/Oyv/7sH6xWleb69vLq4eH7z5o1v3DEfyJQdwMHJejv1BhEw6Tjdvnz1ox/84PX17elB/T/9F//P99fHD9979HpzcdO/+1/8/nc03/gKvPHH7//Nl1/89G683cxj0zSLDgQzM8Cd7GeSlBuni8fnzfee3uTlR7//N8bmhFfn8fVVxrib5XIo5l2KpfUQY4qTPT1f/vj6TomffPSt//Gf/RMHDPfxex8oVI7IysxIHXsxYxMDA/ZJxJB8oGBa1IhrtdyXRI5IISAigAAy6LL1U1E0jMQVcomlGNm9k8ORIBBSYS/CyKpZSxYFS5IFCAkFKIkUsIgwGjjRNjAaVnVViwPUMSd/uL6M/viTv/ar5z8d+01beygY2B2uD8o8o4JnN8bUj3lMM6Dm+Wv3U06lrXxTV5CNDBx7VDsJi5qrL+42PepRE1pXjpYOzY6OFs+nWYE0G+7LaXf4brrWgliQgQpIGwIzpJRdcOcPDuZ+isX6fpSszhFmLRF2Oc2DrENz9OTgly8uN33ph6l2dLA6eO/g/Ms3XxKiQomI1/srdmEmd305Tfu+bqp5KrvNOEohBgjd8+uLB2erjz96+tnPPnfUas2Xu71zGJNmxQHkL371i5OjZefp+e386i6h4u31Zjft68ofrCrfND95/prJ7fYixfpUXPB/+fNnf/T9b3764qu7WXxwTe2jQhWqKebWV48Xi4dd6MgzkgNAYk+ECIYojg1rJF+sIiRlKnM16NTHcU6IYb5N2zs3Qc6N+R/86T/9D771DW+CKndTqg+XtAcZ99UKpAoDyH43WRO8g6N2beZeXF/8/Nebx6dH4XA9jfHb77+nOa+7ahV8LFoUXm/fnb7XAc8bidtx6oJ//8mjXz37Zdet3ly8E7V51oCuXYazddXPE/S230wVV/2Ua9eKOcrSAfzxh0/quUiMv3VUtx/4291YkF7e7SjgeHG3DP700cnPPn/V47/ITgABAABJREFUeF8KtF1NQpoRJ3HO/+rN1WzSv9vGOHeVnoTDUiyBXNzeasyV737x8mImd7pePFw3n/66f/r45HY7pGxmpmKv3lz89vc+eBhPri+3U7R6gddTXLaLHHG4msqhi1HeXGzXy/bJo4e32+Grflge1NthDr6+24xI4Bpyjvphzpk2MT5cLg8Pzj598yyJlWx3u+Hx4elumiLxzWaIm+F8sfg3U0UNl9mBhvX6+PXF672MyeM8FeQai/T7WLGlVIyoqusMdjtHnVIwaLv69c3uzS6BY3cXzexoEe7u0mUfgZtHBc4qbYrc3ex0VCP3h3/47evL129fXz46fpiGcje8psDrDl5dXH/24sVENZq1zjchHB4d9jG5hir2Oeiicl4t761YPSRomzqNeH0zXmMET+Dp4cHKAcc57W9HctDWuOycDeOf//m/gvHbv/udb//Zn/1oGmfyvq2DFLsZh6zl+vWz/urNe6cnIXRH3WLI07Af5rlUQGyQo/SbkbKsg4/93B5jSXh5MxytFpeXFyS5JeOcmViRFnV9dnDcLk6vtiMGVZZh3L5+9fkvP/tsvt2dHp5++FvfNQF0Lt8jL8xMldhq560AsF+0DWBGJEQuSXIpnimEAFQKaKgayM5KzpK9r6RYAfVtnZIiwTSmqov34uxkUtfu5KghKLOkpgnBeN3Wp4eHL16+GjWeH62++8GjP/nxz8aMVdei5zzMlMB5TCUbVTIqikqGJGXhg8/5+tcX63bhtnaYoZqSa/PHx6ev3uwYhbSkqZALGHiMsQ5EJDdXV7Wn2rnJ8mYY9maAUqZ5cbIqzt2+2BCky1ev/4O/+e1ffll9+cWbq7sRybVdNWfLKgA2DrOINJ4XXeWMiJkICBwATPOMTKVo2e3JsHJhnEdScWRN5WeJHzx48OtfvumTLavqoFtaKrGUztNut41iDXsV86FilqJi+etU0X2JwxDNjAwMzRGSoYE4IkOl++GHEADsni6sAAqEQIhMgAiM+jXlCIQdEtPX2SBBsFJM2SOAoqohIoPD+7gIqBEoEON9AsJVGBysG0ekhMUVK6KiGIsVNArICJ6hYmWGrEWEc4L9bCUhCPK9+AqNEEzNFFBRCviAXYUAKIo5a0JISgqIiAW5n+P1u5cffPgtJXLdulmdK/1KoTCYQyQDMhCCZCJZRDCbWcHRODsZCYRKyiqijq02ELNkLMUok2cnkokYgY0UIKedXNvLB493r+9+un0laXZz9jHPzsEMkJPVFS+X2i1dfYC4zjOZOWBTvUc+AZBDC4CITNaici8CQBMroT+w+gDQFUlGQ331At5ceMoAilRSqAxNDMw5zlmtmCE5wqLmGEXMrAQCy+pmTBc2R24fUGgBIYJXd6ZSKL4DGtkUU9K60lDRlE0yDUnXAUOwJKroApOIAphzrKasVnmUgFGMgYqYqKoAJpwzhqK+M/KgxUDvZXgmdt8xJANgQjLFezIQA6qZqa9CH+NQioGfYskJhrm4ppBhyxAISpLblA3KuvVN5VIspajcK9YQ1ECKym8OATOtuioQ78Z5t+kHQWaoCCwXInYec0YGdIQEVLvKDKRkdmxoAIhGaUrHVdVIzlPyTSgIP3/x6rIvJuA9MmBwLGpSzBN7h13T3G2maZidDzGLlZLVFLBgGWNsyZUsyOAc9kNEIsdskAFBVWOMcm8nJChZseBJu2QDElUzT6iIqQiTdW3VDz0jEEKcowuEBsFjnhN6a9vg4p6glFE++8Wzz3715bJrp/10fHYUKr+926c8m8GyqbabWcaYSwSJkEaUGZUap8vGa87IfLOfH5yeHi+bbujl+vXi9LQ9efz65m2cJc+Srdg44HQRYNrdbPt+fHs9zIqX++gqZm8uW+MpqThXNb4a02ygRCACROAdmwggpBTBjAm8c8TUNc24G9NUzIHzwUidI0WYx3m16tarrqjqPF/3U5oTiLhAVLkikMwCctt1Rfxtr6M5B4gO2qbZD7FkATO9F8sA1CEwciy5ZAWnN7c90lP2LjQACLWSiVWOc8kggmAIpqpqwkQtY3/9NqtFs/12HxBTkrYj8lW9IKqrxYKv9htmtxnz4fnS1y4XoYrAcUnzXoc1L2ryu6KKoMAixTtcdGGYhjfPXr3/4NH7i/PrvNnOcxa7nKc//eln/9Hf+uN/+xu/8w9+8ldKBMjbaQAP9ar64L3DYdj1047Qnx2s7GK6/uJ27GOevj4Gu2FgJVP1wTMgIrLzANQPg5p5H6rOy5Qa9aqFprIAr5t53TYQjYmKKCbtgDyReD8pMVJ3VGBpRISeFCCZboY5NKwmHLhIuCcxi5R5iAhMAHVwTLhw7dq3pwfLh+89XLQnOhcivbq8vr272ebpq9fvrJg4Y4DWV0Q4jgLJutDGnBkhTXNOsuT6pF4Hlpv+UorEIqMBs5cyIUAIAdAUzRP280SACgCIlu67ODRN6fO3NyLlg8PDzldzipx1TfWTg8OXab83Dcx15UgKq6GKJ9J7tBqCxFSMvnh+UdTiVNC7bG5IsLZq2O9383y4XidNUNLLFxtM6dHJ8tD77338B7/+/KuXr971u5u6wu9+60PX8Ouff/WHH3+jXncJyjf+4Hfftw8//fE/B/BFxIzYeRW9Zz/eCyOTmBoTGjlcnx3/8suXadg/bOhy86aqSQRdcJuL3d1NHGM8e3JWZlkEXp6eHR6fvLv4yk/GSRrCeTcAgaG0VSAzUmmaSkpmF+55KsFziknRuuP6UVfZGPfbHRaSgtubGdBP/cCMvsJl5bnyUaxt63GYpGDZJD2MJJrUVsvu6qK/ebXNvRpx3VWO7/mIMM3Jal9XXp2lMaFSaMPu7c4ByWDQ6Hrt18tFUy2ffXEdBStXlB15N47zMEwZoGsawZLKVAcnSimbxNyKtgbkfC7iyTmmqv0a6HtxcVs1NfswFpnMhCxLiXe3H5123/7WmaEdro8v3lypbn7n+x9Hpd/5nbO4maSjcCWceeGwaX3HpGUSycfHR9KdzJx3764br1Hvjg6X4xiPj1sRZEfTPC5dZ1Jk1w/oqvZwL8AUJBfvG19DslS5SlXQSlGMU8HQKTE4n9AO2nCOUGKymg/OHzTdAXMoqXh2EqPLBmaSS1V5IqAiAR0L5wLIzWCJQBF5Rud9haJFDA1FimdUsJLVMGUDAWRGZzBn4SKZ7P5DXgdOCpnRkMQkxmmzu+6CLZvOrMrikkAWCMjkGJGePH3/xZef/bN/+k9gsfj+737rxQ9//PMf/2CS4ej49KNHH+8nYVfnficlp93l1Wc/ePDdb13+8qv3Ojlp+R9/+rx+/+HhYlGBYpoY4WwdnhwuqlRUp6Zqb+++6Grdb0vwjkjF0jDsG+rOliEXWdRWuylQ5k5d9SCVKprL6nY3t3e3kbqFjxRAmICKdNUCvWsb/3gZrgr8y5/+dHM3giKpASiKWkFftS7UU57NzKMxAQKVnLzz94dFVR3zbhqMqEGMWUHNcaUmJsUxgaqQni6Pp3kYhomIQQEA1ZEAQC7Z1KtgIVJEuXfrOUdmaoBQrGSRZJKZiEmspEKtD4zYdS1AmdvmNuXHDz5Q0j/7i3+yahxE9FU4O+3u3z4lHOY4DnGzGzICkjj82obZ1jUZxpwYkQwqhGCwMhpUbhCEXWMKWVqz42U9jhMHP8wzMxXMb8erHPSgXnBI8zBLBhNdLttkZZiy0VSyMjuQUkSrUItpBOGKM8Lzq5uLabjZT7X3CyYDG6fhLbyqAgV2Hvmq337y4WmRUSM++3JbVdiuHGWJsXiik3U97IaurhrjX/3sRSONCm6HzdnBwjxf7icoNo15KHB6EN69Gd6k3By0B121ubism/DgeBV30/XN7WFTMfJmd3u0XlUjkXfNarXbDOuqi2lQxIAUx/mors/a5hD5tKrWHIhBBbUogBkREmYrxbQJlRSwktSEA1Utvbu9HTUerQ7Npl0cRlYEmO/2ssXdbf/J+weaxikpkS9lx7kcBHdYt++GPQCUOW3NmKfdPCDI+VGr81wHev+DD9fUbi+urKCAu9lcEhvm8t7R2c275yerdr1w68VSQK/329Lx0M9dqT589Mnnz77oWia2cU53d9cPFmtXOSwWA0rWNfMZ1yexBCSoW0rgUU9CDQSHpweXm/HlXe+0vHt1uXIOC22G/cffOMl7NcXjxytT3L2L2AYupXJc+8pCeH53o1DIhc7B8dGyqHqPq453d3ubik/U+Xocd45QUbPa5qY/WXWYRYYZc7nZ73a3+4dtfXbYXuuuXdSUcZZS5dj6ADru+iiiQHp6vLS5z6bzXHyCDBkZ+jFt0usp3fcCOIrsprgfxx1Q5TyzuMr/G6uig3W3u53iri9ZXW19Lw9O1+GoyfNUL1yac9dQyXC522vwMQowUlUxOY0ZUSCIb1iioEOzlMG60+p2n/usv/voKKBKMUFemovXQ+ptXTWz0pz6uUyzwbOLi1nNN21S8khTkmksKSsye6JBZmA9XJ1Y4WdfvetjQosPjo8cuJjLmOJclGt/GOpVHRYLqmvnXGVp5Fqryu37+JPPvvit3/rmN77x8Y9/9evluqmaJl71atK21TTJPMebzW654LpmcvXJYf38i7dt5RQRiUqGJ48fjLc9L5pdn4n8mFMjBSxWnud5RmQt6F3HlU8gNlxV5FV52o9vPnvx7NkvyNRT1V/vcp9yzLkUJEKgUpCIgYiRYkkOhFCziIGFylsRU/w6sCKQwWrvYylAxsoOPXg0sda5PA2YPKJnlpPD5kLLsg6FKCwWTcNvrwZvphmSwt2861b8yfmDg8P1P/vhL/cTd3UnCbbT5B2aAoK2yA0wGwyDTOpI48PFoqrc66txEwRTfP+gPV7XfezHITaNf9o+uJySZ0yExK5qOyqJONze7U4ctk2Yx5RynsUI7MjxUeteX26XJ3XelmEv8WBaV83vfefjy9vtl1+9c3XV73eeAFCoqYdZLWNFylrqxu9TqUI4WK0MUQlQS5QiyRBcW7tpPwLzNBbXVP2YfR2WgTTlbew7x2mcQqC2Zk+8qhb77W4/xa5x45Tyb6ZkBaOvidYACIhoAIrGRORQUBjJkFXvgyKgRcmADRwBe3SEBvKvsTd4rzkHIYOcBIGVjBm8KDIioQGYCQOwAyBA0owsxgWsruFkjce1cclFaZgkm8uK6b4RgsqEAdXu5WUZLWEsmGcwZVRlUEBQUCYwAEYUQ1Eaky6YAhmTVY7RU1KYoMyKUS0TI8DrF68/fP/1gyfvsbludVLXjeYxxmLs2CGBFrScEioTMaqUJHdzykoA1NakrI0zcB7AkDArWgkFiGFPdUYxhNqyARp6SLfwut8uBdzCZ4A566wtDiqjenGInK4QG0cnEE7G+iRTXZQLYEG4rwSqBc7OARX10BwYHWvZZEZnS/XHUojHbT29wPkNuSiEBqaMSGqEYIZeWZVI0AjHaT5o2plSNhQ0teIDEFGaSARnIVinsCJlVFC3UDiClNEKKqFjoKBmZZ/dNmGVoGtpmwVIFNR5QjBkZCDJ4gszqRES8H24iwxNNWcKRs4sMCQFIwJTBSQ0REUiM0paPKGpIYL3LiVlhKZpZ0kGyGRgiICxyLSPlQdHUAGhQ1Qcso6bsXZIHgmwAsxF0aCoEUCBr7N1J4cr7/x2s9/2KUXIxTAQGxS5ZzyAZyaklAtYQWJTcC4UzYiAzMbcVc2yqVGKZ7+sql9dvLvazWDek4CAAZsagRFaUdnc7dwimGiKOXoKtddSDKwAjCU7oq6tIZeq9t4RmYKqipIpM3vvriUpGCiokfOkAnOJFXJKuYApWO3Zk8tFcixg5MGUsDBU3hOXrGZgTDhst5/+5Bf/p//8Pzs4+nZ3dDzMwzjurt5e311eGGXf1ODAk437mKMSkJZp39+lPITWMZft3TAMM3HdNKGul9/78LFud6YG3q9OTr56dX1wtHKBOLiAMKfp+ZfPaRpjKe8uN7mptpl2YjSLlVJV3hkCIKHNko1AxZzzQEIODo/aeYy7vrjK53lmxCo4RRpTqitadnXoVu89fRhz+urlyzTHVdeS0X4zzDHHKSE7LWDmJKMhiFrdhuBcQ+HHP/9KCzXBzSWh4RxjQA9UFA0QnXMpZiJCQlY088h0eny83WyGXZ+sQkdkyIwekB2NOaVUCIEcARg5Pj9ZXd/u0BGI5ji54JkIHX31bntyUN++2TxYLdcNxqQvLm8er5uDg8V4PYFRLuYpmORF223SOEgkclrgsF3upqGPfdUun20vCsrJqulvb3cltwSB+J/84CcfPH78H/3Hf+fT1y+2kKD1F/u7hPJgfbCbJinxaIlPzg7ffb7/yV+8NutcCE1Hd/sJAJ5dXtdUEdrRYsUkhMigRTEVZcdGyBy8wjj0U8q1c4BIhGVOZiCIoXEC4gg1ixGxkGRhZe/QogZkyWXp8ahpkGzKJgnMBQVMqRhAMPSeUAs3TSBcLMP7h6cStdmncvM2JUXQJqdvrY+jyFNXFy2X281mmGYTUQ7s5jHJnMjRFAVMocDx4UkVOtGbwGSIUykR2UPOqg0Hz1yHajNtRMwhmqEjLEWZqBQFEGAekn7xbtdv0yfnB6tlzR7V8OGqGnuJY09aUoKFd1Is50SMApLUmLmuKlPZ7pIYgaEHGIbs2G14p5mC1Z4dU3bAm30eBqxr/PjswYtfv4jb8dXtuFo2T7//O3/5y5//sz//s//r//H/Xi3PsCZAIV9V+fj7f/SfhLoWRCQS0Xtf+X3ROhc0Ir0vhzd+2mN2cXf3Yvf6hWsspTmNBngvkkvDcD2OAbtuv5tWq8VumjLVdX0I8TkUJUBRQAYjJKKmqgJ7dC4aFC1pjh4CM98vul0T+lm4NRlsmpQ55IyaVJwJWls5JlexSZbD5Wof46wEjhaH7fa61yFzopLBL2qqfExJwCxOqoUCUeNDxZ5dilyhR+M4TK5dOGegWoq7udhjU/uWFWWex3qxDL6LajGmTFZxYdCTowYYh1jiJAKs3onnFxc33/nmUwXZXY/zby7Phu02x/zovdMy3ORS1EIpYGjLx8tvfHi0udt8+7sf/Mk/+kV90K4f8M12+Ph7ZxbL4/PmJz94fvf5/kHXDtfztJ/ckrirUkVDGUzhuG5i7Mc8dcuOmIFizV6RK98548Z5VCcCcRasEiFPqV/UDYIrWg7CwU1/y4276/uT5ijN4oEKulRK8O5wsb7LbnF2cvDgSTh6lAysqCNN/SXbFLDJIMwOtLTelSJZcjGt2AXJbe27EDYqDrIjbSunBUsmY56BphJ3Qy5ZnNrhslohFGIA2udSeUKiwKCp0D0Vy4icY+emcfQI+XZeHJ2zhGEcfVVX6H0Tjh6dfvDJez/885/8D//f//dPf3B80oRC9OTDb627g0dPPw7t8tOffRbfvl54Xkj88l/+4+HHP+EJ6Aw3V8//L//53/kv/v6f/8HHD9988RrG8XpM/4e/+7er+eVwe+mDF3MaN/vbt5X5eR+LzKF1Tx6ff/n5O9C6Yg2NjSCffflusYPv/jt/fUqjp266fffmi58cvHfufA2Q6Otgng5j8kfLsi1H5ndQWVnGWdtARIBEwRGoZshoyMwqZqRI5NgBaOPIA2DOiSmjy0WYzKkRkIrlUhBsnqKvggPQbvFH/97/8mc//PPrL34SjNSACKOoJwKFNGd1wQE2RIpIDF4EEe+r14xkaIyIgFktSinOR5E5lqPFAldH1sZXFxeffPt3tpv91dU1GNTsPnx61la8H6eqDuNY4n4UUfgaHGrymyeiyoc0yzRnx+Q9zfP0dHlSOfrZzZUR12BjlpKEEStwF1d3E2BXBxWRSaKW4ANjGdMAzOwCkEtGk8rh+rikOc65aisDIeY5RSslGxUxUJ0A93E4PGwOan+w6F68207F5phRZLLEaKcPjrZzvL25CbJ66B0gpU3aT1MdQhzLJvffff9UBMf9XEaApnl98zb50nG38lWQSQyOT5ZeNbT+4er42fNLMvjyZl8Z2QQY4Wo/7DR/8vjDtLvmUh6va1J6cbMxotdvN3UdLGuo6pa8h/ww+JXDB6tlGUvKCQW9C23TJFWm4EgRLKDV4gzv9c7MXf3F62cTc71wWee313d9Vb71/pMXv3jjXSU4D9fv9CHmVMhqLLSuFpjmnObGuTPQfZ4CtCXq9XDXy5xZPJLMxollOyROLtDj9z94+/Yq+IqiLprl2Oenjz4ZttRyeHKyCjYZCGA46ham6c32K6n3rvOvb65E3UG3iKJxNx0ulvspxn0+b/13P3xUAXgkFypmJyVZsTglT3Bc13BM57X71dRbMYjypFkdY3g1zyaZK9mP8/c/PruwfHnZ18m+8/S93TDd9pKLoliRsl6tf/3Fy/fWq/fW3Ze3vUMAwMqFIUZwzeHxYjP3F1f7w5W1wZ8pPjzunj2/GhEGrGrWP3r4YJzly69uMtPtPnvvj0M1zKkQxbFow4SuH/pFaHlFdeVFQ1tX/fbyaFnfTZGYzMxXgMF7hSyFEd9d3f0bq6JfffaaDep1dbRsmN3Dk0eU55zKuvKImF1dSi6SFrXvp3zU1M65eehjms+OFn0ZfTIs6Sg4VPlwfVBUPt/0K18vHD55ePzpz3/ZdgdHR4evLq73VymjzvN4MWUgAFLX+HLLDjmZICEhkKesOhVzAEWldniwbMs8vLvYGhlYXi0W3vm7YW8IB4fNMGdFuNruhn569OjA+3pRLaadDf2+CtS11W6af/HrL773jW+eLU6vLy/szMADMPZjVmNyfDfmQXZLccpQmyMmQrvZ9Ot1u1i2J4fd4tFpu+hevbiclCLizd3+0dHROjTzFEVNoy669uBw2c9TilfeN27ZMQmwmdOuDZPAXGZ1ietgBqDKDMF7VVFTMUkyI6FYBgYkc47vNfMGFrzpfQhJEhGaomMvqojkEAxcDTTebkNRQjhY1e/u7mpe3KW59hDMLXzYXI7rrvUoZPjo8UOH7k9/8NKsOlxX6zZcXvZZYUqGTh+eNmkGyXmKVkmYJ0HPTq3CHFSvNsPByk1qoqWtmtVycf3Vi/Pj810uE2Mqgp41TlkitnTZj4frBWYxKchEAIua1l0z7qbb601ZLt693YXCLy5/Olv6nW+9//T8wapatuvu7rZ/++b6+Zu3YoU4CKCAEEFKyZCz6ZBSmnPKc7OoFl17V2Kfi3fOh9A2zTTnZd1dX+z7MRFDYGYP3bIxQsCMBFpSH3sBIaRJZSxaNeE3oSK4zwt97Z9CAFVAJCRGuFe8mAEZmqgaEAATMoIjREIkQGMxAwJUwHtyUTZUJGA1UEBkBAUjQ0YiQlADVDQjBQeAxlaCp3UlB4FrF6ZoKYEZ56KlqIoxAOM97A+ykiqkQmSmhmjGdk9XAjMFAEAUu5e2ASiAkWYDhhAcsmPECrDz1Jcyg9vOQgbzOF6+fX14Pob2sFQ+dNWw16pyYIgeRUSBna9KFv816duSyKwISLGYkpXa1QDoqWRjdJAtZTOyakGJxJJWBGgkCXIBGWlwGhqzmoebNO+tEvAiHkvtyRGZ8bjnVKgtVThS6BQZwCArsAITFqAZCSqtanOrwgcAVow1K4hBmXm6Q43CZiBCDIwoxYjvUVEFlTxiTuJ9lZKKoKoZwKwaiAKDU8MC5Vb6gcJEfknsY/Dq12aJyhaLUE7SeBCCEUWM7iY9cbgIOEYVomXlc4qqgIAEiIjtkmMRTYhwr5lFR6xFtYBldA4jGQEVRTBgJmaSbKpiYOwIAUy1iKgZkXYeXm76Isj35HVAZMpm2TBmITAkYyTJxdiiEKIBA5ixAwIUsa/Xo18fA0ypTFNKSarARNB4x6JA1HXNdT9GAzVgZiLMuTjvCEiBDSEVQ4XgkcSqqnN1/aeffnEXowIHh5DNAQFAKUJIqah3POcSsyulNLWLBOQ9lMIAhKQAk1lfUuccKYxzSlmNIWepFACxn6IU8ETehSQli6KhFJ000/1dvIEYShFmVgMgOFp2u34OFd9nudQ0zcWAqa6vTf/ef//f/c0/pu/Wv4fsQrM8eeBWy+b68u04ThQ8Esc8C7qpwG4uUvkpO3aYk3C7WK9QlSXro/MjtPTm8tXxwQnVzZTK0mOfypwmMTGRGPHFze6QtD2sQl4MUaeYPSOB1m0ggr6P99vBqLnuqpP1ch/nw+MVOdrc7ZquWi/bYTs5cQ4hTZE5xJgODtfvnT5ql93NzeXkSiopz5ZyASxFVVQZ2bNPJRMiEavKunZHbRj2/c27q5glFSxSmBAcxphASghe1UxNiwBYzlELkGNgKtlSVBSSiM6hZD1crDfDdpaUsyQ19h4RKh/mKI71ansjas450lIztBXFUeYZxkmKxs00nS6OWdlDEcKLd7eLrjbZqNaTlEXlnfNZizGwd3NKxriPkkxKEdGsMK4X9p329PnNy5QzGu5kqrruH//iF3/4/d/57fff/x9+9OdcLQ6edoujdj/EhvlocVbR4k//u0/7rRAsQl3Z/df6fapIyt1sYPBy967x3HrXNPWyaxjRe65D1feTmnqHpowIBJZSYu8JSbJlKaYqjOw5STIwzx4VAmHwDQIYey2aYyLHq9AAURGJKVfgRIvPyMKatV2GGj0kvLqZoQBrbLqqiJgIGtzexSLFsdVVXR+fHbVRWa/3w4QSAVIuFXgGW6/bZX2yDFW/36rMUWQ/jCF4Va0dm7CaRSkpCSKllLx3UlRU72WfamoAJRdAnsQuZEKPp7k9XXfskAG++8HD7vLq7e1+M85c+XrZcsXsXZ8iF/QEoIoK41QMidTyXE5PGyxJMhshCuy2+9ODxikMU9zNerePn718dxDCMJeuOxpj9ad/9bO//OVPvvPRX3tw8q00DjtUYG8H3gEvF52CiqmVDEC+qowUxGCOb64uF0ePyTlLtp/zXsCfH11sd/NuLEVISUFikcoTBBji9OnnLx5++Lj1FYhoifXBglfYHqzH201dOy3Kge7dEIqmWjBAHUKCzB7IceW9pDTGJGrkcbHudn0cNjkbEIALbAAyl8mSVVi1wfm6H/ZJJXTBh2YUwEAlZyAgpkXXbWOcRNfrLhYBsbbhZUNpisrSVnVMBaQEV0kRKNoummmSbtl23J5+dPLZs2cfv/94c72NeSxTZEWu3aLuxnkvYItF0x5gPeR+EpttUlt1laBAmcisqfj+FMQxtqEerzahKebJTOsFnn94mou8vdynMX3+yy8KZ+9tmkfX6Ha8ffTBh+1RffDgTt/uFiSFbV+0H2Poqs57GOP64CAO4+52Vy8XJYNDQsd3m83D42PfuKy2y4WCX1TV3O+brIGaVVgxupyToE5lLJJA1ISzQ2KQsa+batJSB9d60NCcnJ6SD2TZgMxsut5un7/y0YIHMMxFiHE0IVIBZWbPFKhLVkSkMm0Jm8BdU01zLA63c95GSWhbQwf82NkR6Enj50r6bEhYDKMqOWwDm7lYVBTEB0ee1OI8uqy5aaqmmlKuUyFiixlRj548LX/6Fz4OMjWH7z/86Lt/7eE3v90Cs3PbKZn7yrE7WHjazPuXl68vX6wOz3/6LF72sTndfnK6zNPmydlaqtPNzZeV7mXeBE8InJV8Wy8eHuNk1Z5Kwl2fX3++3Y10O+RYyCc4Ojnb3Fjw8uLTny3PP1p19Pazz/7yX/zP/8F/+rcv+1nQpSLgIItuh8kTHiy7RU9pD8/v3lXBG+Qs6oHJqKjsp9R6D4QJkMAhO0MshKF2B1VoUb94+w6r2jyB43mKCiBmORuDgrETKIzUuB+9/PHLt595rnNWQDSgUrRAzgzeOzVVkwKmJoSMoB6xQlQwVe28i0WyGBIbKqERGAPUwaEAqRwfnqzq8A//5L9OaufL1YNFs2ocoZnJbkq7zZxKUcCiqmhoRr9JFXW1k2LMbT/Ma2+PVgdPTo7/h1/+bKxDXbEDAwBJ0rYVM3VVqB3HnJGDIdf1KmUZkxK4bDDPmZxlid/+6Pzd67uSrXaUcqw9e8BhHLzzi8b3c0KHxJyjUNKIuVphZaC5OGdzzF3tUCEOw+GyWS4OpUfHMJTsyJlgEgjeo6aOw8u7u8WiUXbX2023WrQsJnZze0tRa7SDQJjx5dVmfbhYBixDrkBW6/qb5+dfvbnsC4sPVTja621EfH03MvGwH7tFlzw9WFTHNTriFfrVyYMKEMjG2RwyIAOiDz6bIpMhZhPHLnBtyHMaPVdm1E9zL+oaH0JIUa84JuYffvqF3Obz89OP/9aHzZyvpu35wRGqL2IRCxEq4T5mEQ5h1dWH+/nOBcDsXO1PTk93smm86zybYgbZxz6liFaWTTg/P3398suTow/HaQSS1cHii+fPmeuuXjK43fYOXb1cPGDXGCV1eDFvF0issL3rg69CRY+7uiqqSJNq6wFUELDyxK5JMYGWgC4gfOP46NnrSyY+aurtRXx7Nf7R9x4xyPOXN6cuDLt9RfD73/vkbrN/c3mlYurYkY372FJ3uj5YrOjZi6t+SOb5xe3dxw/Onp6f3ex6p9Y2HgCb1msxcLTvS9cGBHy2HTXq27v9uuKHD9ZSMGpW5mXAuYS3N7uCydfh/PjE34XSF8m67lb7nPc29SIlFnPUS6lEF+2iq3MjerMdV1UIhP/GqihH4bppPZ+u1rfb7Zu7dxW7/WZybFzBbhoQnQLup4SAdUpQpCrikSYH292IknOSUtUV0DBIgXnhsQpsc/rpL149Ovlg7Mvl1c1ujtQ1dePTPNeHK7dYn52dQSkHq6t+ezm7u22ahuvesgA4o5CzJKfvPT4yy9VBdfn59mC9dsoZy6vb6ymLb3jBYKiEOPSxOQi7fVzWq2xFFY4OlmC5a/z41Xx7efuT+dff/+43v3hun93ckBkbMJsPvq7YKZjm7e0Qcz48XYVV45SSIQdQgBdv9nUjVT3EcRbzaZpyild3V3sHgpgjoenSeDcMSVm8m9NAk0Lqm7PK3bbbu00bFocPV9upj+NcsjKgKZIhIMc4V66q/MJA1YiYDGAcJnbOGENwBmIAjrzE0jRdQpWM8yzBOTCRIq521+/GU19fXm2P1t3RMLdtuBjHMZU4K7N/8Hi5rh3YfHq8vno3fPbFjYJfHi62Y8/OlMFXfhSpa7dcLm9yIqNgoXbBWLLJdnv3/sdHJyeLv/jsdg+2z7gZ7KBqA64fP3qPkeLtTnIhRYnqVGsmIdeFalGq02VzvR1LxdOcv/nk0WK9lGmfc7q8vuHG/f63v/8Xb16/u7z45dvN0WQfPX3v9uq67dwf//Enj18e/eCnzwpKEbvdluOuWR6vppyylSlNPmBbNao2DXMXfNuEfj+3RCYoSfJUspIQ+S4MU6pVBQUDJeDgmxrzbhhWy85UC2AhavzXpwAJHRLd819M7ynHZggKUAwZkAytsCEiMAEiOm/EDhUUMd/XolRQzROCQVEGNRUUNUIgx2iggMAAosQkJkpgDhnFyBgNERHFOZ4S5QhmPonkoikLGJoKGSKCgBaDZCRRy9fqL71PQX1dh7svtX3NklYzICAzmrN6hw6IEAGAEarggiMAWjlwQqWUt69eL55cHdXtgGbe+cqhQjGMdd2EA8o+D72UXrXUlWPnA9I0T6UIGCHwWEBSDjUjQe2qouo9iLWWp6rzkTjPkxi5wlioFHJhcl4hCidzgmTFeWAnrpaqgkQs7AW59DVK1RT1bYW0oftWILAaGQv7EKqCrkh9r1N1NhRRYDVvOrHx18hyFAMAFMR710kCK6pEdv/jiFQVAI0RvAFC5UyVcyEzK9cAAmHlgZJjadeUgPodDpnbAG3AcbJZSA3iBKvOzTlPBZfZCFwWEAMBnLN0B5wHpQyAiASIhIgAJuqA0NcMzkxRsyGRgBADOaAiCPcSaMp2v9IW77kiVDFmFjUxUWJUAwBVMMAERmY1m2NUNCQVMwNQM0B0aA7pX0/IAHBztbX75LAaB1+yaCmt5/eOjl/u7taLtoDbzIOaFjFgymYEAmBMBAbOMxE2y9VXm/5d/3qYowIhQlHzyM4xINw7BZlJADZzPmiqqqsUgZOWrFZgjtJ0HhE2YzIhX/lF12z281wUkeeYqyoY0BQjMJkAAZlAUW29C97PcxZRI1x0nRRxHhBd0RJLutgPjfPOhe3QAxoj+copmYLtZv3Fj392iAcHfvHt3/12KljXXeqcmrZdG+fkADQCUHBdePOz6yjFOb6+3lfsaj5w7eLJkyeH7fKrV19dXP46TREOSgS9u9t64anv97tNjMlQJyozhNE0DzIOOWenhRAMQlgsGjJMosjeB4YpQdZpGuvGtctmKtIermXOJcZFy4uDg2GbqCEppXJtLPKzX/8aucRYVo9PHj1+7+3z11OKORoyiRoCJiiqVlSqwKGqKoBcpkXn+yEWgOXKR4GplFAHS6LEhphiZHbeo/Oubaqhn13wSaCu6XDdro+6AlCHykpORQ1wmCKjI+fEDIpk0iY0x+vFME9zmuZ5ZsRlVZ2tD25lMys6wJwyAJSSFsv67fWtMV/vd7///vnzi5ucDZRiLMSwk3k7J6CKHbMjBfCu5ooFLQ39u9u3dHfru+CTS5P1uZDjL19f/cM/+Vd/8Md/45/++Ed9tNrqEm3R8FHTPv+Lm5t3bySz49rYMor3X8sMAGA7Ro++GBhANt4MyUdZxhzEFk11fsYukJj027EULck8QtNUjhEJmankXAVvYCjslYnMQQBUyxkdIDlTdUChDWbAyEyohg0Th5BLFMk5ZgGDIqoqwnOMDpwnHLd7NQneeSIBiDE5Aph1nhM77NpqphKHfcvATeuZpliOlidPjt7f9XeQIFuz6zdRQL0DK6oAZqlkBWQjlew4lFIULCuyQS6FHIoaGiICgEWQV33cio2iB13jILdQnh4vDrrmR1+9KyJDKp5QihLc/2MyUxExRygAAtIGhwRd41MqgOg91k01DHnu03Y/dk3dQJa8efDRN5+7zfF7D4vxP/8X//PTx+/91m//0d3tzfrpOgTyPgTncsx9P4rY6rghQnKsJmkSNty8vvlH/+Dv/+2/+789ffIQGNV0uxs+++mX/d02DSOZBUMVI4+LZbM+Wu3ScPXmerVcvvfJIw7VbbaCstvNwdUZnXE2AMfEYIAAohHBCi5aqpgzzgeN2+z6OaboOOPkqYJJzftmXdGUUtImNPM0sXoiSkmKzEQZwbpFlSmlaRYqLNOD985//YvXBO3tfopWtJT+Zj/2UyAXvDfCGAsWZAczltXxkgYcbnuHsNlM3PHibD3EuH2T2NaVW5W8nWcJREnUOxhiH0tkrDbbmESq4E4OumEbc5bo+N1m2m834xwtf30KqlB3i+U0DaF1/Zgqh6fHy+WyevLk7Pkvfv6NT77R1fUHHy8++mS5eftFs6IHZ2epuLf7sDh/j57IYtfvb/pQVYPqtEtEV4ftop3T7eYWmNpmlfPGNM1TrIKra2irdjMbGU8G+yE12Aas5jmSw15nQK08DmNfQMi5FMfEsXEoeapX3V3cVOXg6fn7n932bVuHrhqnGdiNu0ipyCBN20jWJoSK3CSlBHSOY0YDU1OnOhYg1KYCF0y0TPOEZD6gF+zHWVzIqk3gR0tPmmedk1rl3AljUtoWndSI2RGp41RstTxarA54HJC15Hh38cp5CDVKIqnXHrzm3HWr73zrky9++QX14+/87l87+OC7XK+7gGnKXdst68P9zW012mmjv/W9b//p5hfPN3c94Ul1+PkPfjHK9bf/4Dsvvtj91bsvDw78x2fduJtDh5vN5HwwG6fdqKOClJs+Xe7strhhJkGqOjeV8uZWEOjdL94+sNUnq3Pe3w67izzLvE3H60cvp+tshYAL816tiBBA5533aRoKs8eY2DCJ9pA90NqFAmiq7DwhETomVTCs6gnBh+r84dM3Q8wpgYH6qsSiRkUUFapQD0UrQufc25s3gSjlgubQMSBGVQYDQ4+OkAEgipQsIXgFJkOFMoswiSAG5KYKu1QCO/KEhIqWJRfF2832+ORkvnizf3vx6PTwZLlYOmy6ehj2N/vh5m4IrhKAYY6pCHoixH+9MJ3nucxZ2WkyVq6c/9Gr5zmQipERkmYxUQsVjWlGsoCwPlo3TXd5fTcb1G1dcjlomzynq7uJGETEYqwCoae2rV5f31FY5RzRewVyhISaMqhp6PjwpHl7dbtaP3mo7tmXL5zzq0Utsbz/8GyXhn4r+ymzazZxJLKqsCOXjY7q6snqZL8ZW/b9Pt9O06R40NZpmMdxWNbu4Kha17BufY746vMbT/g3vnm2uZmevdutvP/s+TtjHqMFtXdvvzArQ0mgAZN98/33etHbzf706MBNU5ymBnnpCAEQSYlKkTlFCk4K+/vnaBDnyCE6RDFt3CK7Ohu8ub7ux8ohTlkmxYnd9e0QJFrBMvTXv/6irar/8N/6vflqv6pR0pynaRxGClXl+R6VvZ3uXIWVwiLwpLDbbJum+eD8lNCrC5+//Gx7Pe728xRFW7/r7x49PHU+9Jf9R08ft2vfP0vEXueipgy+xq5pw8s375ir2yEum6Pf/fgbP/n0x947X4cmyIdnK5jFdwHBKZiqiZTGMzNW5JGAG5pi9Pv5gfcFSJKsjxa4i2/GfpfnC406zCBcId++229lnks6eXB8V4oM8o2zow65YQrqri/HTDZMqeaw3W2fvH8+5JymuWQUg/1UiKvX+4tV7R8v2g7SNmkG6iNtN9PNLF3lqnXtHC9FZT81lk5PFsXo4vpu2dZS0GMuw3az3ZWqarqWnW1TWlW+FX7+9kbVrVtn3eHwbvftjx68/eHd/39V1LXtmGya7MXbi7d3mz5BFehg0cU0WYoVIxgMAtL4JGWX5kPnT7paUnlzu0liaFh70lLM+8tpEy36uhm322XdbSBgH+OcMlAOddu2jvzHn3x4/tHDi5vNr3/+809//eU87bu2XqzX6zoUJvKUE9QeCLEi9V7TlDeb7eGiQ8l1IC2pDiRIdeXnKZqaiRytmtrjsO+pYOIKLZ0frvdTyak0S1dUpri7vr364NF77273ziPkiR2wo67y2+3eMTrL5nC7G7zjddU9PFiqlknLPAw55ZtboSLLumsQF3UwAw6BybPYnHQzjHSp2lZVB5AxUGUFQ7U+O33SuGbZLMFjkRFyrr0TAVHRHB1zYE9IYiU4dkbTONehds6JWtLMpKXkKjAHViHLCmbsuAqsZjHNvm587atFAwkY+HjZNTe3t/udc8AuHLfd3d3OBbVSukW12e/eXo/mg2+qIU+rpVsswpzzNIzBuwcHi5u7W4d4vO4w0vX1TMgrX6eYP3+7Xy6aB0fN66sJFNVot+8/jy8eH/pFUzGYmjjnMqivfHB+ynBWd+8fn6JMaVIxq7yeH3jE+Pr2dj8XZDpcNr/9weO/fPXMWbm+7a+u+pa6bzw517L3Ds4fd+tnfr+dnCNLOoxad76uq37b9zItFs2iqQ2orut+nCTlQFB5nuLoKx7mPpYsjMMYKyIZ40Z0m+LB8fHdPtaAvuoAMRcRBUaI09dSTCZ0CI5IxKSYqhqA3Qc/VJmQCYgI1Jjw3leORIaAiElAzL4GZBsUUzDKqiaKRAJGX7OyUVQNITjQeyCNsRQFQwO416IzYJxBARzinO5tT5qLClIWIkNDyAKiIGB8v/0wuzdlKQAY3L8QAKAZIIL9JpFvCoo5aUnFAeg9osc5QgCQJTP7EDPvtptXL7+g7txzKwkDc8ozcvCLR8fvfb9sbfPsX4n2appKFkRXudq7gjlnBVUU1BmkIIKBU1DwNTVGBn4/J6wYhVIqvgBkChhKRp1tmjyoa715NUfkkM2EPTgvhYFYkcwEZVS0zK4wgVMQVSwa0GpRL5zABIwQ41ysgEbIF8lSTaCOrZiZ2f3MzEQiggYqBgAEIJY9BAYkBUDgQGIlGrAyoyVNBIgAcaNaLHSeQRyVuqU4cyrcF1uyVQ7mooQ85VwBdQ0MUYoRqhExC6pCitpFaAiiCaC7d0HBvbVaAQp4JqAEQp5JTOy+E2lwHwYCNSRzxAqoYI7dNCQr6BiJ1BAJTAEc4b0XhImIgJgU9OtPCZjdF5zMRMFMCRm/ng7AIQIzmHqkMmdn7MgB0eV+gwiBoGacIg5Fk5hDYMKS1ROhGIosKibSd5ubu3nuYyYz70xEQck5t2y6fuqLSc4Ficm5JCredZVPoqsKTRSCdwCmKgQhkFhOalMCNPEOldB55uBMaBbJamBgUBgUiRQhqznHoNq1dYxJCJ4+PB2SffbynfMOwVBU00gIzhEAFBECjHNad/W//eHTd//yX/xkc9mWv3P48GPs2v0+awSGYFT6cSCmyvnXbzdxnon5oHKro/pwvWx9a7kcnZ/kWb+SolHOzw/qQE/OHpioQxr7rWA2NhCpq/Z//7/5z751ePbf/Df/1avxi2KiqwoczAonZ4+cOnWXl/utqhkagIkoIw7RxNdl4c32ziSmkTKwC6Su39wgoQ91d9idPz5eNSHN8ObdVZ5mBGNn5JCVtahpMTAOTJ6a1j8+Xr66eP7k6SfO/C+/eOUYY8qtc2lOagZogly1Tcnl9Gh9dnqSS3r14mIck4genS7OV13KiZ0bxjkE3vV7JG0rh0DkKKbCnkCzmN3cZkZsnfdV5RiT6NUwiJpIEZXguXX+4vb2eNUyM9bVnEzMDrrmzUVfVYFLAUDPvvYoBgGoJspJlACj1oEwJb3dymoJzim5YtlX3nns2P/0q8/++G/8/n/yH/7xP3v2i9XBwmp68+WbT2+v+1uz7JDRBc1FxNAHbpr6/hTUjUNgyUmzqQE6jKrjkILKDNUytjKVKZVNP6akMRqjNtXsHYfKO3LrZTtmyal4z4iQx4SQHBNqyVjIOVAgB5WvEEBKYWVANjbUEoJ3WGOld7t9nzKCsUfX1Ew+piSKxN4I56wxZjZLpo6AHIxjTEnMNCDlkhFzXfkQqsO2G+bbcR6D5+2wR8I6hMmUybRkRHJo9wW6rFw5pwpjymqGAARoREWF7tVOBkxccrnNJaX0IJWnJ8t5nKnxR233u0/ev+v3l/1+NDDv0XnUPPXJBZ+mhMim6gAZcBpj7Xzl6WDdqZU6+E3f5wwrc48r/9sfnE1Df/n2NnAYr99c3O0Pjlbf/v53p2n49VdffbT6OFNaNDXOWYGxcVRXJc8hBMmiCFKw38d/8c9/+NXnNz87+cmH+/3Jew+GlC+eP3/zkx9Bv2/rKk2JGZe+9hXXTPMw1Z4WGb/84S9Wxk8/+sDiXFt1+eqSh7iuwjznAqIluaoKoWbvhpzars25TOPAjreWEJnMzPKDo8Pd1W4eiwNcLJtpnNCslFx7AgVVQ4RShNg8kxQhtgoCchOVNv2+Dg6n+7+5lqwScwUODALXUAShcsSaS/CsWSv2TARiUIxmWSn6dnV9NwQH8zyFKmyHsak55igMuAyrcHDxakuKjgkB43awWCw4qfA2zn0/lSwVhX89F/T7YbFcHh/X3QJXbXhw3iL1b171zXrhu7o7WqMOrh7Qp5v+7um3zn/6oy+Xyw+OlquNYTQ9OF293WwSYkxi1/vqKAy723GatcbrYbOqMarbzuUw+KauqiosGEtBUphSKuAM65xFSyJz2bQjJ4Dc1OrJeAJAIzJiA2+KMcaDxTpcD3l/C2bQLRDBlVKJzf3YOq+a2HPJBTQW8czBTJOZIyhF5wJdcJXHli3llLIAsRlI0aO6jgKMCIB9Ki1DTT4g3MckQYlzRMCURUXhPmqdUxEN7Eoc0xRD7e7evRIZKzcb9NSsuq6uPf87//4fokvj9S7F5E2G7V1Y1qi4qMM6lCVilWYi9FKagilOblmtGf/6B0//+c9v+uvoPTbLRsrYv3rZ3+6MII5yd7WZ4yZ0h8vVwX7sn9/mL+8UA3qDwKZgY4btbnJViHMZ390eTWMTp5//+Q9OmuWLn30GNiOVymsqWgX2gaacp3GErE+OT69TjNNNE8zKpCDBeQR2oUbJgFARmShoNtY6uOluj03FanW3wCEi2TBOdV05TwqQ1ZBoiEYED06bJ99aP3vz8vf+zjf+7O//sp+gRK0qxwCOWMyKGpIhSiCH3iuKolVMhmQF2roZSxmm6BgCoXnOqq3zZUyVQhWqqvXE/NWrN+hc5+uG6PRweTWPP/n81W7MgcARCiIgMBMQgor/zSPRMI2ALsW5ZXfQNNWqffX6Hbf+yBGS5WL9fvKB31zvHq7q/TgXZEoypE2ZMjKy4xTzalkfL6vHJx0zd219u91Mxbpl++b6RhSmcSQtjIQEqeR7yh8QapohNodVd3d1M42CuShxMn386PDtzW47RsdYVTyPo/d4cnS4udoDAYMi2u0w6Jzqugpt5bOYg3GcZJwXNd8b3w5Wh69f34i6Rw/PhjK9fLc9rJpvvX/y+naXTItpqM1TOThcvn03PDhY1c3i9avbk5NFyXPF+Ozt1RnR2apuOUgp98X0KcZiCmxG5tmTKULx3jE6RFegqOXAjXM4jqmtmiGa95Z53knaz30wq1kPVt1uGHZ73aD/85999btHRyyT5AnIyIVZHZpmKduxZ+drqAJ4MfCe3r27wtWZGT1/+fbZ5cVqyR9+8Ml+O8Yp+4o0zs1iLVHaENrKX15d7qdxcbTcb3Ynh2c7Ukcw9FumXGI+dO5BU3/1/LMKnCPWKB2H/V1c11UpwABacvA+cCBCAysqRFg7FxhsikDkFk0sZWCtOV9fb6zmw9ViWZMlWFfVel2VZJMtg3NNytvd9R//W9+/293mnL96e3k3znXnqaLlqt3u9qf79fl6/fzLfjZSj2VzV3v3Ox8cv3x96V2bpjztN0C1Bz5cLq/2fV+slBzn/M318nzdIJdpltspe0biUtV4tF5u+36xCECsqF3XgQdVnC/3508er1fVT1++rWry6O6udv9Gquj67pZd3ZydbnebbZxn4AxscU5zPGzcRw/Of3VxvQMYTeaUDc1lPW7a6+1NEozK2WFNwMzJjB0E9gerbq87Zikm+/08inv/9/6gOVgvW/qt7300vbr4n/7RP/rpp1/EeTJygVsVmIcYKnd00HDNm9upIja1dt0mwDkVqkPCXHI+XHcr5+v18tPn164hj3LStlxQxZ2uVrebbR8Hnafas+kVMdzezYs2INE0ly+evfzOB0+/9/RMUN97eM5+9fZ6QEg1XuziPopm1SIw345uiefvHRVzB4Fevrkplg8OFpoLmlM1MVq0nSFkFb63cxFRtRCP6LBMcexv0zSG1h0vH+tUjk5O99t5uuoXYTFMY1eTASgZe2bQLCXaROYKOnYEDOgcmRKgI3LsLerryzePzj9ANibMpZh3eZZF1xUwtXIPWk7DpLNydmMeD9sGVHjsP1zRck2PzlY//vXmi9c7KF7J7eKsmo6PF4e124qx91XV6ABdCJlgKhJjEaI8JMk5ZxvQX03j+w+PT44p3+2a/x9V/9Vj3ZalZ2JjjOmW2SZ22M8en3kqTdkslmF1N9lUi60mGxJbAlpoAfoH+gOCfoguBDQg3bQapNhSk03QieWTKlalqbQnj/9sfOG2XW6aMYYu4mQTRFzETQCxF/aee6055vs+jzFpwm7Xv8xkLUylkCEBrIKb0tT6wDnXytawCNWhCcNw1tbfOF58sbv79Hofk1HRCPqLjz++u7oqMauYpg2/eP2sPZk/XTWbm3XKwYKZB+cCTpimia/v9rPKVVWoGjcxlz5aADBk0ZQkqtrFuDxeguS+n9ombNI0dQWCqx26OlABCxXo2MfRexesGbrRV8EZ+p8TFaY2CI4LE2FRvReWKwgTIqh1BAYVwVhSVkAUQEbDRQiARVUBUEGRkaQAKDDgVwMiQiFgAjZsvGEL2ahBREZSBQGjRKAgCohZFQhVNEpmpYKggpGxCOJXSmOUX3aGGFRUEe7F6gBKIOLx3mqFAICACgoIwkKAKhgLYIKZBWesMR7QIIJk4QI+YOu9tQg3z3JtLTaNseusznj27fnDdxa+8QsrYMacWCUzkhQH6EUMIVqJWbMAAiKTsvBUELBkLRNVMwNNaxMiiVMvRUpWK6iZklrOJKrB5iOvwDBGGtiy5NOlmIozcfIqliKUXHBmHDKICBA6S04UOwUhKTSlYoidwZg09SBbxlgsErDgL/lTqpAyo6KAABIRqIBBinpfyyJSra0VoSGzIhbJzqGqKhksWHq1Rik4Sdlxbh2tC+0SzxpczkyfI4tnsWPkpqWEMnGRrLPagCgrxKzTqLW3kdL9J0wVmNmAlQK5SLCIFlSU7uVeACwIrI4sCJJSHJPxFqEY5OBCKqUw1AacsQwCKAUVAJyje2I5kQICAKACw/2nDJDgHuEOcB9p+moVVN6A2vHQt97VMxfmM7W22w91HXxkFjkMvUoJDkFAUVMurbWL4GJia8gilcTdlBMIkTGEzhApWOuFoS8pizCDM6awAqqy7IeptiSZm1DN6ibG0SKxaBeT4QLOdik7g4CkqrmwQVLQolkQkEzjLQKSgUlkKgLKXskZm1Jp2uCsMShvbm5BVURAVEhZWUWNsV8pDRVq50pJ/78f//Up+aubz//7//az3/zOb/3u3/k9zwsQn6zy0FsDF4/P/uTjv/ji088fPjh5+vV3pldXw/X2wempEhytGrWGycqDX/ur/+f3zyZ5q2nW+8NqXrfzKue0O4wioIKtrVanJ+3jd/5P/5f/8z/7R/+P//Gf/atQ+UnBklt3XUqdD7Q09cQ8SvGOTh6cVvNg5tVqtbi7686fvLe5W09jTDFLJl9Vs0enKR12d3sWXe8Pd1dTnMrUR+sDGGQoXAQRBZVVwaJrbD0LmfnTu+uHFw92Yz8dJoKcIKBFXzU+pKGLQjQM2Vuqqmq9HcfhpQKFYKeJ6+CgyLDr44GdcbGAEkmJ5GxmsZZQwToiwJRyLKWqnGgBkhaqlV9+sbmOOWsqsyp4K5nLbDmXLKeLOdqmB7mN5fJ6s3LNnd+BAYdWgWahSalHi8HY2ppd4TEpWVCWo8o5g/1u19T1u6fHfFyGFGdV2N3sv+in/++/+e7f/Rvf/v4nH999dnMjfLjtx4SF0VmylVESQ2SaSr3r+nS/CpgAhBG1aR0CGjJRBCycnJ+C6CFIYD2MU7Zy4DJZ9saMwjRkGrMyz/poCDwRmowGDAKxeudmFlnUqDdWnPNKv/x+tjZrLiISM2SwiBYQrDrjnK1Uy8TJEQ5SCpTK1UQVQBLGoiUxx2HyCgTijS4WbRpUIiYuKU7NfNGl7EiUZZzGbd8lyQCiXFSZFGfBJaaUiwh4MpojAATvMBcVUJCci+p9JRtUFURJVYn6Is+3h5LysgrdyA6npqm+8eT4ZIcHLV/c7qeRQTTUnosaZ4TBGWxmlbdGON+nXqXgNJXDvmMyVvS9k9XFwqVYukk0paaxr29fzBdntLfp5s2Xz350uRse/+ytxWLx4YffPD46rU319L2H+/W6djRvm6ZdmKY6bPtPfvrJ57/4SdptfvTnf/Tvvvtvf+sP/qbM5uvu+nguGx4KaxZofWgqUzXBkNjGU00YYEZHnz+/sdWifTKPee1sziaHOgixU4uWUNEFLwYWswocJSAbjsskQy5g81Qmi5T6iBkMIxhyXv2cJJIkjmMSKXXbtlXYThGAyNqUYi4TtBVZKgkJTG2rCKo5xyl7Cij2fqIfNwlBkAFBS2QzDzLlLsaUkxFy1pQBXn1+d3ImYzeYk6U29vZqyEa6aWSXfVUF76Fq2hPUbkAEY9EZdMaqMadHy826u88jG/vVJvnB4+MvP73lOsybM1+Zb3/7fXApOz599PjBe+9CBnJ888VHFejl5a2a1N2uW4dUhjJC0UJsStY45WRNF3NSmNnBG6uqY5JF64jMOOWsNsaCCCVNPOGyXlgk9NhN6RBHBWwCppIEcdbOuqFrjM+qiOScSyXWvp2S5mKttcd14+Iuv1GziNXZSg25RN1m9MYyYjOrIpP3JsZiyDklA+oJG1+NkZk5ZRnGbES8sVm0qhyINKpNZQ9jPvX+euKxsGawaACkcGG9DyTjlFkNOm9zZrTIeeI0FkbNOByyFDXObjaHpq7SYcrtzjw4b1bnNsx/93f+4Eff/fNPv//zs+OnzckJIKQp3T5//fznP6og8zC9fNMNm2E2b99xdMDy/PUvwmh/59ff63ZTszp6df3q7//2N3fXX6YxHQ5MKnVdOW76joep2x4O6PzxAvdJgGUflSepFNGFqq2HPMBiFn2VyTrOy1ANuwEIT2YtHwaFgqollkAwn9mC0GPBtD31VaVuH5NQDkCV9aglBM+QDamCIVAyhGBmTTMK7A7RT6IKWcg3jSgolnwPQwAQYapl/sCwJmftLz76+Pis2b5MSH5ITIiOQBELZ2ehCY4TV1VIOftZZRVkKo1RIXVkqwoNOQqmz5MpBWKpKgsgBvJq3s5Pjq5erp8+PTmd2ZN5Pe67v/7o5c0+A6IFjDEjESIqIAIYRw8eHn12vQWAIrBczMxm55352rd/5bs/+Um2YYgcUq4b4w01wQ1cVAGtP5Qp1IupFO9DhSaXUoStM+OU1pJm3p6fHj+7Xk+lgPGHfrTWSFYuCkQoYA3N6nrDA2ppavr1D55+8aOrWd1MU0mpnK5mQ+Yywd06Hbp0b5V58uDi1ZvnBimXsWqgpPK1d955+eJqcd6MwW7WB1dKFAUowciHH54c9uM06TDyzz57M2/bOGmyZTPkzDA6nTfu9Gh+u7vNRSbmWeU2Ix+dL2nIHkoRfvnmNkmB4L+86RfHMyo4xbhqGyDTxyHnYpGQDBnyRubUErAzzoFz1ArGjEoI6/7QTwxCFQPk8snlZfLBTjoNA9Xu6fLiSq4Hlu1efvrXH/9H/8XfbgxG6DaHrqCJIJwmI1K5JjNyBtacOXHWWRWWR0cd1rcFyLlNd2j2r8fc195crOZ33e0H7//6T372rAqVq5s3r28E7Ho35CmyCBoWihOkUcq+H44Wyyj5btM1dZMYdYjtsp7VQbKUpMaAqoAWDc6CAbxXJEZhRYXGOrY8JRmjvhz2ZIwziGoqb9MYH8zn33jy4OcvX1YnqwXBdt1BzH/wK9/4+NkzG8R6F0VwbqfI1tKw7aqon3/+5u33Hj54tHqx6TD4/Th1h/huipWBy7vtzDdzu8xxqA3MDMwfHC8W/m4/3OiQtPzo8urdhw+9ivC4T2UahjrYV9vN2eqoxImAmez20KFlsu78/Gz9puPIZKpslTw8fbj8xfNX/35UZLy3lu4OGx7TrCLL+aKu5QCFXRvx6vnVEOPIqAQVg0VqlONuR0OZVV5b9yJ16HzObBw9OJlZmZCnxvDRoj05uXDUfuOdr50/frzf7k9W7ff/9Hv/4l/+IcEBvB7N2nGXHRpWkAIQMIPmVEJd9YdiDFky2/1wOp+p6Ml8ab2tF1V31725HGrfuhLfe/eYI2/XCciNObPTpvXFY5ySAUp937YLg+iD84hVqF7f3lTz6s3dFqvVxdnp8uKpjvuL46/vpsOnX3z/4+ef+dqb2u3iuB2SJdKks6YeY+aUQqjTBIdDj+hERRH2/biqKtTy+MGFN8b5KsfxuLXrXU+Fg7ptNxamOGQrWIoOZYopW5c9orOVAoGqs5aVCSHF7JxTwFIyKFgycRJh2Xeb5+vtvImzljJy1lw1VRyzAhokReyHDkazmtVX6/XRvNnsdZjAsLoaF/W8S/DHP9i+2BRn66N5fXt7CB7B+UL0Yr3PDN6GGHU5p7quXu72xbpsbUJRI8EbIkOuGRJc3vSL1s9r228nNB4d7FOujTXOekNQFFnr4CELJz5/fDYxgWt2ssuiT5pa+u7q+g5MYANHtf7qu08/ffXCWqrJI7hmVvUp/fTFy9P5u6ZYZ+zjh2fx6maYIiIBiSLtuhEsXRwdbbY7dMZX5ma3WVWztqpzKQSYC3NMzJylGNbKWARThTplcUTr9TqxVN4Ii4Il56raA8HmrvuqhgkoBMbbUgrfn6EiFAAyaAyBQSAUBhVUgcxiyKhRp4hfJT5UWeRe1CsI96o0uu+UgYC6iigYJoigiiqAlSWQe9ODKAsDCJLQ/YhHFZRBp3xvyAZCBGGDoAIWUUQZkRDhnjijWlhVgYjusceI9yo2AFXFe0T6fUUNGIgFfbCIaBWUwYJFa1LJAmqNG/fXrz7ZMmsXkxQh57hI9/Ino/+o3x6m/oaVuZAginBRJG8MEaEApwJEqjklVECDLJAyolo8qJkIrUXrrDWOQIAJRMQdGMeE5GVRyYLUFuyyu5swJdqmEkapV0YAwVtBsEjKYBClFLAWgVSNIvJAaU9x6wEsVahmT1ApKiDcR6uUFb5KXgGwAhoVQUIUAAFj7sXzAKTHtY9TZCYUjFwIwXtzHy8jRY2QO9AMlQ/O6KySxDglnCQHg1UwXa9kKGcxE7cBYlLxGEt2YIiosJkSt4GMAWFGItT7eRCgEAKqFCIooIBovdGS78c7ACAsWUDIFkFLEKxRhS5nb4MwF1UBJQRUEBAkCAYV4D54c//2EwIhyH28SL/S7yGA/HJWNHVpXpnV8dIZfPvJ+b4bo0FAPfS9JvSegreMOomSJvLGG2pVKykE7L1lYUIAb4TcrpscoIggg0oxxg1TQjQhOALRlCXnYBFRrcHKhynzkEYRboIjtGPKhMSiBxaB+PTitJumEkvwdhhSAVBERwSZvXV9KaJAlshQiWyJyJkQQhXc5WY7TtEZYwkq74M1+34kbxSBENu6GqesUrLh9dRMlG6+fLVahn/3F/96e/txjvOH7/za6q13xsPu4aOzn/78B3/xF//6t7/9W7/xW9989fz6aFE//va7u/3YdbFezZLoPPjPLt8YV1nhuWnLZifhaJgKGGIBY8hZgGn49Gc/aJqj22v6L//Bf/Pg9Oi//R/+6ZS0qBy6wzB187YGH6QO5289PbtYjC9vJWra77Y/fg4IW3xRjKmOlqGu72K/2+2rtj49Po6MtB90mqZuRCRxWFUh2MW62wxjhiJA6Lw9P18WVCTulFmwm7RxKEoXT06fX3WLo6Nvvf/+X/7lXxs0oOLIAIJBnrd11/WJYbmYVTNTzQPnmEXefefRq9tf+GDJYj2rMguoDX6ZC/fcG4Pk1VUuA7FMHnJ/GK0zrjYxs3q/PDq+ubyabHIWuz66YB77+vnddrWst3H7n33nmy/+5MVhGtp6KVH6rtciBakQpcxsxNfGW8NJCiuPPKsCaA6ii0W96xRxNCd+N+KnVy//j4/+9nfe+/Y/+jd/uGE4pOLrpgo4q3yfSyoFRI1wUE+/bOZnLc45F1yZijMuldK07vjiyNezGOP2bgulFKPVMnR7UZIkYkGDpZRYCbZxrCsrzoiUmMQTGiFNaWfQgmVOzmhVeecDEeTCiIUQ1ZCrPGcJ3hoAVg4usLAadY4ERvRiACctqgVBXesFrKqbbGGAgEhEg6LzNaG1pRjCpp0pIGdwiEOOKEwG28pBxKlgECQFAlClcSrOlNrm49nydZdAKWc2CMYgsJAxyr+8uQCgqhbOIq9jPFThwckskEDiZuKVl6+fH3/rrSefP1t/8ubVduJuUm8oWKgCqhP2lTPVdt+T0cISuGBJj8/PHj5auZytwWGYDPsuS7OkxdHZejeeNG2/3beOHs7r28+eXQE9+/hzhXCyWj5+cnG73o/dsJgdf/v3/tbFWw/++J//089//IPjthLrkhbO+3/3R//kkNW3/jiktg67w/5osZBcqqYmZ2JREBzWebE8f3N1fbjpPtPPf/Xkg88++kUwtmnMbF4VnmJW670qTcwGbeV8z4Odz3f9pKVMh8P5o4WhKqcUtyNmOnS7urYZsSwX3XZnhTAYAUgeVDNZdIqBDEAYVbVAyjwBoYaUp8wSUwZE62zJ2RnMolXjWDkPCQDRojVEZMCrBNAs6H2a0uYwZoQwc7bBhCKsJYr1vvLBV84gxN2hdoGdAy7L5WzmbXfocsbLl29iLKAAlli/Avpu1mtLksv4ox//ohvNjz55/ehJ+8G3nzTz0+Xq4XBzWfrbs1Xoh8NUKoTm6koJ/BC72XElXl7dXL91cuG97/qkBsGY/TS+//jt7eUrWlWzWchd6g+lMIZFAGRvrUhgtVVY+LDPu0NWBuNWizql0VgbExq1GNWUeE/9RKOpZGNEvE/GdHG8mM+33ZDz5mRV5c3Nyersk92NEi5Ojrvp0I25IJraESAoAwABDimJiDMAKqg8pDyb15WnicUYoyBaAAGLQkS8Y6gV4hSXlbM+RFEWKaDe2bEUZ9x9eFtS2t/tqJ1bNL6e9ak/3OyqWVjMgpuVzB0v2l3anp2dv/fBh3G7/+LLq8xAjMRwfb25/Ojju08+mpVDY+XZFF9mnjfuvK4eCCYX2vPTpLgdp852MPVm/WJyMQuCNYW0WjV6kPPV0hrHL7Nm3u4GKxIsIMg+KVo/lcKHYbFojh+fg7evP/18v12/8/jJjz/dv16ndjkzu0FEhQEsbFOu5iYdDu++++5tX999ed3td2fzJivWtmIBQJy4kKFMqorBGDQWkMjVHGPCbMmIUBQWoJOm2e22WdkQeENFzV7GZ1f7Y0yjs7OT6u3ZyZ/95C8fHD++7+UrQmUNitQ2AKgxeBj7WWhn5+fDrnc65VyYEwsyeO9mlnzi2wKlxGjReOcV4OJ4lchdnB0/OV+GMpKk7/7gJ5cDg3UeFVEfrhZvDl1RIESLXJFenM2/2h2D3e57zNNbJ6dv7t68Hnbztll61HE6DUELDJjHVFaLWnM+nc3ZEE3lrPa+9pe3N+zNEJMxJhcI8+rZ5e1BpJ23zDKlaYhFihy1c28glyRSYhwNp4erxSrYactjDjHJ7Mgmjg8fLD+6uj2bzbrNWEREFZBndcuKrLg5TCRwdnzS7ToMtC45JmZj45gm4V9978J7uD1Mr25iMEZFrcVmOVczJuWnTy5W9eyz55evX/WzdjhbNN75V+u7yofdZsSTxild3e5FcSi5qTxYBAPOEaLOZpUoE6gzJNa0rnLWm2Bb5zSDt0FVhTGbjCBkQwTsSxSFZe3i2O2nAxSsFtU49RS0GH61vx3HKSMedt2TdnFi8HZ92/H4s8+v5+0RGohTt2yWAA4RKtuOac95Ojprt9v+5e31t999NHzJ18NgEPpX65l1H7z/aL/fPzy+eP1mv9ls33v0ZPvmZr8bZvP5m30/m5+MUaKUzXhgNsG688V8sQwFlYLDYNKQQTEq3g7TwlrOKXMJ1pKxsWRS8eRJ5T70z6JQxIGNLGMp6yTz1h27mlH33bRoZt66L66uBHC3OexTJm/Pj1Z+dvL6xedfe+fEWsHaE4LEaBRQYHVy/Mmbm3mfH1wcP7u6doglCyM/e7NdeFu7urL+4ZI0UynlphvHMY85r2q7PFpwTotHZ9lI3w2GJHg9OZl3fXa+vtlP+xjnlTOsoNz3sbLVFMvXzx5GyljsKH3rYX0Y/oNUUUo5FR1YGiIhSkmzykVzjFycA++Mc/lhnBZzK5kBRUse9sPbj5Y56zVTP2uyCqRYG39Efj/slrMHH/7q7xu/LJzO3jo1xj97+dlbD87/6E/+8Pvf/z6rLuaQEiuksydHDYWbdaee3n1v9cmXl/s+efLbAieL2jfGaE0M683uZLlMrJLdYTLDoMt5aJsqTppiqds6hNk0dmKm8/NHX362BW+TDWa+fOu9dw5vXqv2Y+o4634YfOOMCX/5o58/eZA+/NXfVASTxfnF06ffKWHx8cuPAybTmg7Sk9OFlpzZt7BgYfIVWHteLQ3SIa6HaUTV/X6sXF0fPZjN/W6fy6CTyeMQM0MowELeHpVSL+fudjpYslU9RzjEPFhEiy7m5Jz3xgeyAuB8VYooFLLWBpdyZmbXzB/Xx8vlonDKJZk63K53FbrbXdcuKlBdLFfjeFiPcthNYSaF8/HSn86qeWMvO/3JyyGOwuhba+KU6rlZzKur3U7UbffFYcgIVUv10r/eXw6GcuH1oTttmuOTJpfA23GIUy5yl6aJ85MH7Wqen12PqChMA0NDWFJWQK/ibZWHsevz8Tw3s0bBC5hZW339ybHq9OJ603Gd1Zbijudn393/nFlr5xlpiDkWHg7rTh+89/7Dq5vy1qy5OuQxbwCK8xwaPwzajYmvtser+dXmblYaBDPm6Ly3PuTEOiSyVqhwzAFdKgyMXU5CYB0RqkVsF01MeXsYtcAYC3P+pfMMyKKCFiAFAyCqep/IIAJrDRIioABORYWBlQjQEVkyoAoioKJ6b5ZWUUBzj30GQrWGMBhbkbNqEGNh/koCBYIgqEIgCkXpnowEKgbVWEpZimJhJAUUIVBLaAzcy7FAgQgNqCVTVCLqoYCAApqvsNb3HnEAQBCEew8XomLJrnANpnbeGqOKaN2kGlNUECTZD8O4x5RxUnUqhRMA76/2kVVFMJecQe8xzaqlcEI2agSQABm14H2YBQVYUdXYiqQWxahUbAHyBp1XtM4ijAJSIItaJhICEAKsRF2mqK7PZkqIxeisAAO2ju5bgtaRc4WQAQCRAKZOIQftkSeThEPljLeSk6KIkIqq3sfBCFStWhVFMqqIoiwgRKJCBslRscDOlqKKBAQZRKVYIBF1ZIg8pwJiI9jCkyNprY1JOgAiqBvqJ0XAWIyM0jgwpGA1JbBkHAIz9ZN6r3Vtdj0DEKoaMgiUUkYhNGAsJtIKCVDJYAYgh4RIqCySGVVJM9cVAuF+ihXVzDoKKECwVkCAVEFBGBEZlJDQoIiwfvX4pYiEKioIBP8+VAS1d4Fw2dYiMIxq0cQpm8RL8myhtiZ7tsotQgJbkFiKQ2HVfkpIOA/OOfvs7kBtc7RsQWHoYlW5WdOkPKQps0BRsUQKZDyQMbtJjoq2RsmqEk1JDLAIJ+EsAkB9Fja66/vaV6WM3vuccj9EJWBUYDlpfdacC99j5sGaSfTRxYM+Trf7w2E3sqIRqWpPSikV72wszAzWUipZWILjD7/2HgEZ0pvt9V2fpzf5T37+sw/OLr52mX6/nfkq/Hf/8B//+ff+5P/wv/uvGz+/26xrD5jcpudiHTjd7yJbN3b9xz/7Ravw6HjlrW0qR8R160xr/MzJlosgAP3sp5+fPvrm6fHFWmbv/8rvXzz8wfXz181sxpRFK1wct8vmaNbs++nw6iqtN8M6G7m/VmFNbKDfH5y3y/P58aNZP0w3L2+Oj89P3nlrd7vZffa8HMbaYjf2CacqBC5CwSIhE4gUgNLlogjOut1+71dzmjdbCqWGhV/9+IeflbEIg5I6R8IgUYY4kKGmtrv9ZC3FbaoCZiyvXt9Y51OGfkiAhsjM2vbJ8fHUxx7qTToME1gVMrBcNDyOs5UHj5yEVZloH4d33z5a56mdza+6hEQ8ZYuwS3kf5c3gTo7Py6Yng2gBWVeLep3TLueiXCG0FgZJ6C25upR05J3qhNbaCk7mR29u+3Fi8uHF5fU/+R//5MP3PljYP5uMltoL0ZRZc6zamU5EImSMTFG1fLUKGp+KqIqrPQuknJwSWD30B19VEQBAjLMFYLaobBYV6Q5xuqd6MJAxTW20dnFIiQWNFWBBTXrPRlFXBIcJRmIFUrKkRksVrNPaOEqxgCpzsil671XluJn1sS+iWZUxjTw654NtCU3lWmqQUCtjOBUkYwiQue8nAvVhLoXHw96UCIq1N1yEDFYWKmuNohQ8xFwUjCFCWoTZ+6fnh/H1xHJ/ZytowFApSgiGQFVFVKOCIRFJCgcsZnNY1A7UXt/mtrLT67uULr9+/uAPvvZ7X1z3/+rHHzcos4Df+faTzy9vP3q2A7RtIHJw2vhH8xOjFgk9qIjEJKvFwhymbtoZI1Oiu92O4LC7iou69cbMEIxDDxmxuP3+5sdfgq2s0MvXry9ffmZRMMcVOlOmcTjsbse2bZbzmSMz83NPdt/OXAhFABwyai6iBaZDeXG7myp68PR4tx9i7r98/ixGIM3N0o9xWCyaIRrnPBjXDV1T195rZeazpuEhZWtPVwuY4vJ09fLyyopIpqN2gV62U0kOlk/mZZvygKiO0GlRJ4JFUsoKgoY4Ms2d9WaYxmmcDFjbmAMP9cL7IN0YxaM7nU1dqlahCdW4nVgxFzWiaFFJpxxFxDsfc2na1sycEXbFT+NoTfBoW6qwsImy3RyqOrTz2TTloT8QiKNgAcK8GmMiYz946/zZ8x8AgMRh6vcPH5588ouXscMtlPmyjENy+fD62S+87B5fjLe3mxdXQ8yeB7nlcXnkJh3qltHkrIfV/OmJrzfbBESHVI7aupt6G+xy1tTGlJziAOJozFwYsSZh6WWaiikag7Oeqz6ySnMfoDh0NtA5KhoZOd35JjBpFBuqedl3HrCM27a1bwZ5czi8/ByX3rQ+7w5rtCIaAbWpXcdSWWuFILM6uxkmkjRvKxmzEam9Q/RjUbiPjhM2jU9CKZVNnCI5RRyFCpKynaPt8ogAaGxOGe9PK4swcyqZAGbzmSGj5Mj4IY6HTZ+5fOsbb6/mDRT1datQrTcoeJTyR1Pc6lRd3faf/+gX06tPzldOb4bK+vNV0zncDHl93T1qiMQezZZl7E8uFj9/vf2DD57+5tP69vX1ch7O3z/Xo+Xm0D342lspTv2hf/j0OL64O279goz3dLebZrM2hCpttpaonVtPZeE5xZ3j9O4H85/cVFfr9KtPHj8iXD97CeRYKI86TRls2MbhdnwVlnUFTVWD9BiC66YIaGLJeH8Aa6xFjDlXvk6CJrgqUF21V8+vR5ZJtbLq63lJOzDYDdGHqpC52aVEUj1e/ezLK/929Z3f+drnH11aV9u68s6kWOZthVW17w/HTTMHqBr76OvvZzCXH39i+12dXOr6kmHKk7OlcnYoCMG6yvXjdHJ+9uRr33xzNzbHec4Ha/G7P/nFth8cBWtVYvHBMZT7IL4AKOFs5tv2qxomJxRO7509+pu/8Rv/3Z//caisAZGiwpJzMcb200DGDIVv0nSxXN3sts6Z59d3TajUWxe8yckH78nGorZtqS9taC9vr2OWksUiBm+Mo2kfjTG7w/hgMa8VG6Gbm2F5tOr6AwgIa7fpGqC3H5//cP8peeMMzYz58vI5q5IBZCMZ9/tClT5975RTeTnuTaB5437jvUfru/6TV93YpdDW/ZQA+XhRf3m4WhgfsLVM11drVD0+qtFAVEtAv/3t958/W7eenAub9f7oaBFhmLpMVt9/9+EnX75oG+IiINjlCESOsHV+6UJTNVET5WzBGCQkYlWGwgwD4PW0M2RmlctpmM8JXR1yvx72FuT0wUVKacp5UkngVkfH/+vf/1uJy/dvN9/77M2TkwuZ4skyzJYn20M2WABg5DSOSQG7KBkxSXn3Yfvl52XWnL14ecsFRi2HfQ+q33j3w1e7g8Hw+PzBy+sXo0AuRdKElU9FE9i7fjibn1oFMrA75Lu+86a5fLM7XjaPHs9L1oNqICiIhjAD5zJZEyiLBhyFldkaa61xKko4TqMxAhZdoM2QmmA9+dV8xsD7IfWltJUlVmuh8vUXr+4Q1AndXO+a2tUN3B76J7PT/e6QkLUJv7i6kXn95L23P//0S+vq42o27svbp4+4xJJZJOdc0BuooSS+6eLrV7v3Luarxj9drb7/8vWmy45QyYxj1lIYsZk5dbBw5J3rux6cbQhnQUxVLl9cLU/av/Nf/YM//LM/l/34H4yK0CJ50gJT5pwV0cUszQKBZWB50/XM+LWzCjmNU6xmfkxlOfeCOaD95uMHpzXsh/Vpc7S564+qoyfn75vmGMiN/QACMpZXr589eNi8efmjH//437kmhICtlVLSctUuj6qx48hFi/3i8m5eN5IUFOeNz7l03XTenqXIKhZBN9vduMUhA1kfvGMdI2QbzBhTHHacIjq4vtwu62Z/2PYRTk9OabIl4tXlenY0O3+8QtZxGpvKepzJ4c1P/upPHj169+jp42mYdq/eGKzeefDetx4f/exnP3BAl1fr2pnTo4uhn7znLHsCVQBrq4UzZFzd1HHUt5+8G2qXBUgNiN2XYRRmoy82rxerJ8cLd/PmTc5IVZi1R90gCGqtJWBniQURwRovIhOLFK68JyVAUFFjTVU5NdaxKRpjjMahojTBBjGmnalHBDh/sPrJyz1lKMF2MTUenpz401X4+Rc3Lzpl1VAjKZSSvTOqEsS2qmVKxthImJ1ULUw6hsqNOTuEk2Ccl6MTd7x8/Oznz7vrbbAYmJxRGbKxhix2I5vKJxaThVCNwcYbVK6Cawoqq1GdiibR5SI45/Zdz+wDmrrGucU//cFfjElEcJqStw4RCMQ7/+bNzalxmqxgWbbtYewMm6lMZcqLymjKFnLJ01FdK6hztrLuZrOfLxY5lfN53U+DtXQyn223KQRbhPvYz5ZNHcxu36tAtz8wKCc24HxVX77eB//LVXBPh+Zyn+ohQlG2hIbgPm+YVXPUzFgKK6ITULofDSEIZGZRAQJEICPi0Bg1KMGQIbUVOW9JRUQsIIgiEiHofXpIFBkNgyiwilrKCEqAIihIYFGFEByqs+AJQe87bwT3ZB9DFoBQonJkLKoe4d7Xdp8tEgFUBEAQ8aROpSSO0RqwrjWVt8Z55awgSTNLKQgZMRMUIUuGOVujXEouaOCrLhcgKCret6cQ7L3RTYBFRaFCDKjWKCgWYK/qQQkQWB2SKBpEFiJAE1mTGDC10SDGEggQEXiTRYgUIOu4za6Qslrj/MIaj+SCVUuQsxZAYCnZCvhCTktPlFFYcxdVFAlYFQDuMYfwy26e3DeyAJDBCimKAbUgM2eBOU2csxhriwqAsKCoqIgBrcgZ1RhZiRQtqUiW+v53RgtSBYixkBKrGSdFZGfRG2AtlhwxIEDMcI8pAxZSQwDMwnp/rqkIqIJMiMJIaFQQ1ToAuR/+ALCACqpJuQhaBBCU+6RQLIqIyoKIhGgNWEOqCqyIYPC+R3L/t2CRAOB/DlMAQExlbnzsp1nVeDahnuW0sZDb1hWnKiWnsiBo29AhXHZTSswGGuNCsJFziTIDOWlDRkwWBNDP6zxlBS65gLAo5KJC9yJBUC6geLPtpLHWgHNOEKeUppgys0UQgioQqOz7/oOLxzm/QQQRtgbUmVzUe+vIemOyCKCWmAza5XzRD9Om2w3jhKx1MARUMoNKTKkKnggzCxNYh6QQAtlSyJjXz+6y1cvNLqA5qf3TU/vOE92tv/xX3/3rj19evv/0a8swW6+vhqE/WSxr35K6OA7CYAwhl4erZoZdmYbbA7Abj7bb5cnD2E0ckcSgFlJ1Hov2P/yzf3Nx+vSw7t46Dv/p7/3Hn97+43V3KMSucrPZzArwzV2+vhumzjtbV5VmqOt6tzvIVEoUIiwmbfqoX17Pl8259f3nz9+8vjVVePvdBynG2MXLy8vaeFfA1t4bQ6DGmjROAoxGj45nBCgWFWTXj+5J+3f/87/NP33xZ599gvelRAVglSxiqIASGwAlUWHBnEuRcDJbLtpXd3u5L+kiPVytKhbf7z54dDo/ecLV4k9++FdF+27YmsLzyi6sHUu2jhyQJ+Ju+PrTB19ex+3d9sPzFQA1x20dqLzaEbiPPvrMSlnWFYHuhunh0Wwbh1wEyWLhypvW2cj5ULCfCoE93AytAd6OcDkpoZKpCeqKFOWHn//iN3/7tz44f+v29SfBVWZRDbshOGudm7pBCxtjfeVK+WpkKqjGmb5LjIKijoyMHHfZVXa92xuPOSlPCUSNhVCYrMXalsxkzbAfCBXBSJRpGFENl0IIhuwkLESKcp9rdM6I4hQTZbZQsgLHg20q9LRoF2gsqCIklHKz6cZyIHIGqQq+oKa8D6YQuDQcGNBYw2yQXGE0ZBTQBgdFd8OBCmoWZ/xiUcW7yQFIKRqT846QvfXBUGJGgOCrWT37+MWbnMrc8NffevzFzf6zm72gIUQRKfc4fAUiULz/TgUE6KcirKhatcGD474E61+8vLq8uT5bHv+9X3vvqKr76bC7WR8n/p2LY1K8eHBig50OY+tcijxNObHJOZFxuy4NY/S+iZM5qttnfGONMehZIcfkEAwwMasIOvDOF05tXZUpIyqC2MaLqK9o5UMJJrTVyclKFaRwt997pKqtd7uOgDCLMRRCSMxtQCjytYfv/OX6e965OE5VBcFWEXRZNwRGlRNi4axkp6J5onkzv7zcNU015rHywbnw8ubAbmZsPdxtQlU1M7Mv47yClvI29odunM3mi6aOffGAQxeNqcZpMg6xV9eQtZLSZK3JYoDIthBDTOP+6PTItVUaRuvJWIrjYAm1YIw5BO8rn2IRKFIErXG1q448p8mOxTN4IJk4p3IYUhqTJWeVxt3ERWwFUfLDh6uSiPfje48udkO33+43V+uvNsm5xGEY9ntvwFnizJzh6vV6Ej57wrvt9c2rm6bJt9f7tr0wNpAQgPWBNnddYXTN/Oa2W9VNpYepmKZxnlA4SqD5ss7DYeiysEFHFnTYZYMEFDgmEIfinKuzctGxG03UFHxVmJtZfTj0Dy5Ob6822UiGWBrqdDevGw8kVE6eXPzw5ZsxL8ZtKQtLt2sMfhlo3x9OVrOYofRjZVBQYkwpY1H0zgx5urd/3PSpCmYZDAiSmpjUBBtZomIBbUhqModSBsFcStJCAKRaBBwZZhZQ/goQCSJijXVNC+hsNkUEIqehPP/yrqzKt9tHyxCWNQ7d5usfPvj8C3n1xScPYiwxp+4K42GcNp4TgWlrM2vNyGW/kwmwOZpf3eywT4vVcrPe/lff+SDuvzh7tHKKVWP20+SNLbkfun4aYtfLboQvtrItoJBLQeOSt1PRfHbUlJKpS3A3vPjpc1Z6ffWmQNlqfnXofuXJo08/f+kWwaBZNbOCxtQFujeh26H1Yn2fJ7Hu7tDNqiqnFKwVUmspeM+ZExdOcSqpnlWK2nMMrZt6tqJgipb4d//O3/ijP/srT25I03LlTB36Xdntd5DtzzbX3/mtX/v8xy+0kqQ8U2eFjAvswtGioTIgmrZ2xhWuKzpqSDdh6XuT4z4RSBNwGIEMSMqGHKJhW41sdjE9fHpyYt2LL794c71D5wOQRUCC1hPHCaRUwUOW2UzPTwLhV8cGcexOl8tffffDP/rhj/ZT8csWpGjOMaa7/cYbd9o0l130SpUJ1plhTMzknRXhzCWXQoIBQEqKasJymcf95e1tShmELBAppFSkaAYQwsV8PgNqEB+t5p3CJ3ebk9qDweao3o8TRrh5teGkmgp6M+U4W7Zt45PA6fHpcMiJc4o5b0dNvKzx4ePVuI6Xl/vbdRcTGGcnyQk4WAARmbiaV9MUp2k4dFmIWuAFhX4cX9/FEuOqndd16GKk1RytGERhSZlfvbieF2rIxa7fcS6OCvO8Cq2zOfcxAgZb22DIF4QYJ7LGWBh1utwPG5ZVPZuHMA6iFqWoNegyGKXcTfPadzGdPVh8+Wz9H339OxdPF//X/9d/f8dAyd1cHt5f1jLubbOovE1lioIpqzXGWhqGsRu6ty7euvrZC93E5aqNs/pmdxBmFJeZZ6G+u/tyvpyth/1uGvcppyE5oKnrMwMZms2WhISk/eFQuerE1SdHs+qidVqCg23JgG7gTKjVPfqehTSDNUWL9Q4tiaqoEhlJ2SOeNNVjB21tP9+sa3fkAr252y+WgQuDKmjxCJAT5/jyZt+gZaGr1y+bo5NK6EE4yoMa5+92mwfnq/VN12227zxYfePJk59++ebt0/NJ8812jZW7WMyb2q7Xu+2UYi5gwRqqjttOYXOznbSc1xVPwsbEIsO2dwKNkfPj43XJZczTVGoyiFoHb5huNc8ujvZj/y/+5f9E4HPf/wejIiJwCFF0kNLHEmxt2+Aaeu/p0V988WpjCzj8LKqJbMmk7VTyeNrWXik7TamLhnQs17vRhZO6fXtx/Na2TBMoGK2NfvSzHw3jdurd9//6+4bcKOKN3Wd1VSCG3aZLIxDrMGWVIg6MN13feU9vPXpwc315tbmJIz19epp0YgeZdTar5+1st16HmRyfnPUHztOQh6E2SMXOl7NpFDJ+WZ1dPPrg+ubzLg7GuaZqZovWWhf25s2bK2tptmh263T96jnw/njRtufh1edXt4c17m7eungyxHTbd5d3RYcpOLZQ3ExU8/Xu2lTLumpy5cf9fjr031x82xkxlZGYgm+7AbEkobLvDkfHaimP3SEiLgxVvsm26Ye7WWWsRVY15AidtyHFLJymNHlrHCGDsqiCiiFEqrB4h1xMNwzPX6xPZqePVqvQ1AOV8dDXLHPMXdFo8lvvHD2emQL5xc12SmVeB9vQ3XoQ5Idny8vNdLcfHVnvnEVb1ZCEw8w8OAt910uWZXCOjHCC7H7287tZk1wCm6mxgSkdxjJVMqvMW4+WQ5ze9LEhF0du6wqLNsZ561LMWVVAr9aby3G64btvvfthswg//Xydi4rkwtK3DAAny+V6iEOMtaWLxfLNYVMSf/yzLz5YHtezozzFxfHynZX//MW1vR9sEU65ZE1K5qhtWMpu6MVXaCADh9blXCQrCu7HDBY32/XD5XmWVLLuOBZQQpj6aEMVU3r48OE+HdSbLPzV9kAZFLWIJjVqUMEgWQPGoCHUIsqQCzPTfRijAGqSYr6aPAiIkNxXndQCOFQiIQJUawwqACuQEQVk9mgE7n9UQBBsYYGM94JiFQULQAqIKqoi920hY1AAGBWAGAnkl6wVUUAgpMaoqiZFViUA+grNo4iEqKhq6V7QjpOaqwRHEq2jwmIUknW7rF1i5VxZC0aHgcfEQDALZBCK0AGwL4KAxhOoiqpBMERKdM8XziyqZBQQVQ2qIafs7mdUSEX1HqyLYJhBDaIlASkgmUELSpJ7stLAWRHu9X/GWDJowHAGyVQihbZGNA7VUGWVs+bCBT0OOFGADKTspJCSIqE6FcMIyLaQV3HZeAADRCoIKCAdYjQc0RkKaJihZCVFMnRf10IkBUCiJCoKBvPZotpO6SpHK7a9TwQhRCZhIasumCTMDAbNmMQQEAE5KVksMCiCQkrSEAVv08CkAArEyoVBDBFmVVbE+/9qQIqKama2KE2wyDSOyVtHaPuYmLCpLObMWQSwqNr7MR4ooCkiBgAMiCoiAAncR+YAvuKyiyp85TcCgKKaRX0VAGF9ONSgfZkYUlbvg6VCFt00sUw89mlKMjK0IUTUaLSPSgqZ5GEdqqp+tdsCGRXn62qKRUGcIQcIZKeUQEATm+DB2u2U540NlqaU9uM9Er3MajekzKUAkCIMsdweNqLCWUXAOKcWpeTa2m7sR5GiaAmJxJFzBqfYSUwWSUBQlUs5XjZEtO+kCjbFaCrMwqwcqlAUtusdKfRdXwjmlYMp/ycfvvUEB+Ht/+1/+MfP3uSjZn50dv6j7/3b/Wb7+NHFs8vrWTs/Oe32Y0mJm3m7WjZvnr189ckXMJUOsveUJjEgtbXni+XMucqVzOAckkLJ/fWrV90h6wfnv/Ht92ZkD15E7axuxrv1zJDEKH0i8EqYJAGzUe9bVyAboWAMIaYxA+p4N4h0zpLm3F9H3Tc++LPl0dPvfHM7KULZbQ43V+uzNiSZmtaJYC6sYwEViZpSrmaLs4vHuJP+5tY4icUkkVLE+/t+rcxmwVfVeju4VVVZH4cknFLWrhuRNU3R2BAqb4SPKhyHA4i9e7NZnZ/+3d98/+bu5uPPd51oFhPB9dPYpyzgmpz/znfeq6fD0eJouJkCtvuNtq2vvNU45ijRWTK460fCdH5e/8bXz/7FX3wcszHBAmNJ0sfh4qQtu6lDB+RSNt6aknPOGUWBS1hUZw9nW8CP726++73v/YP//d+/+kf/9y+2fb8fIXFG5xlzSqRIzI2vyIf7VVC1VUrFBZuzWCIGE5pAhMt5SJ2UKRtrilJOnGJGQAcUjIXCLBwqR2AtmcKqagwaFQZjDFAgYNQmOCKKzGBMLqLKiIhoi6ogchYE6U20hlTYxAIq1pGSY5EI1HW9ghKYMm6tdSgFOGtdFTCipqrbYWLEihM0ri25K1OeWWtBxn6cOVcZs+n2s6rOykPKFpWMegsKBArbYTTOtK7+4Lz53Q8/8J+9+vxqR8GIsuq9WRHvWX73NWdWvfd79n0elUCLAeNRWIHFAIXdOKUyjpO3ZNW3q/mpsrIURsxjAtGhj6UUXzuygOCiaE5pFJ6UCOX69uq4dpJktmw4Z0NUV65wnrpxKhlDBZpjzn0cGI1lLblUletzGbvpbHUavNjK7Xd3gWwaGckEa+LQV844QwimlOJDmfapAXhwdv780+dV8YduSAc4f7TilJgwT1xQxbpDHGsf2qopSccxl2lTUgYKUnLKstuPVLlt7LIW9NxzRylY0qDimAL5GtWI7bsDFerH1FYVignWAiGjH5N0u3ExryMNo04sgGoMmNrPWAxGYLVD4oWhMuVhP1qzrNnAFAszEbjWHiCXQFnLUR4rV2HhkpmZhQHJjF1UoWKklASo1Yzm8/n5bHV2tpRIZ+9Uh6vbfbcPyt3VV5V86yFUgQzdnzsFT01tAc327gA65nHbtFPbOGJtPZ6ez8qY66bcXQ+v3ggkEyOOjR7P/cVR6Cd1vuJymK8ej8OutaYXUSJVWFg7NzT1XFestjjjHOVDmoaSGLFu7BjZWA9NM0iz2w0PV8v1Zk3kmY3zbY+zSpIDV1s3lduc7QdHZ36dnu/7atG89+7FszevD31ezE988N22lyooSWEby+SM9wRcUlGehuJdtRep0UAyINkrNnWzHoZRtOOSshxZ9EXBGDQYQQYRUkJh4eyMcYCg6JASC6FR1ly4ImwWs4BzJCqbRIC7uyFtc7AvY49H33zbGdPUIZDdX1+e12G4fPlkjn3P3d1YWWdYTRlXMquPZnY3ONX1duw60xr6cvPmrQfzh21RswCiUnQ3SXO8bCUOu+tZkODtj37+8pM7enHQUcERokIDMEYOHqYC5O3Dt5/qiJ/87PXIzG08P744fdnfXj+/yWf/q//st//pn3/3g/Pz41l5+72LH/zRX56fPP3Www//1Q8+MU3QMnnCxpmizCCzOkQuKCSRVW09a5Rj43wRTGmU/mAEfWVBZd9P88p/9uzybLXcr/dIfHwcgvdX19E5P016iHDY8//2H/wv/uWf/vliUc0gGBewntfnD2aIt88/Mg6lyPbFs9P333336Rme2Z/+9Iu6WYAtngykgjn1qkx4GMqilQftDNbdh/Mnmxcv+3j98uq6CQEsWERCbGf10bymUrp08Ib2sfimqhf1bP6V4uBk2fyt3/6dU1+t92thuLvZAvHFrKlDo1iMd2PSytt3Th9eX72auWY2X23GBM713a5yJiUtUmDKVV0fr46vNuvSD3kqaB2XYgwuV+3yqLm8uk1Fg7XWyIOTWRrKbkyvtzsQmLdNAVh3AyCEYOvK3APjYj+tlnVbe1/smHkYp3GMVVMjZxJwwUtXfJKb7dR3kwM7q0wRDpZawsYRT/mt+YqnHAxe7+KQHBrNKS2app7XycHLTddnORrM+28/fH3XjSnXZB4fzzdDFJbzquru9merozrYbUre2kywKfHY1sZS5pImnTkSUFUUcrvcH6YppnIW2gWoicNh23VSisvvzZddVd6sN2+2h8Ito7n99PkDs/ib7z76p3/8h1jZKtGyMks1v/nB29frz3eFx5ILizHNoqnLmBizMb6/VnNkYs/X66HKqRuGBPnD84erWeva+R//+AfDwL/69ffJlqvDFhwZdM6YGEeO02K13PQxY3LBgo1nq2Mf/N1ufVQ1/ZbX06iiJ40FThgEiAChkKhKzQTARGqMBaRcMgtYZ63I3NiqGw/T7uHq6Kjyz9fji5vtb599GLshS7neDUX56w8fT5j3ONbVQnxt23lSyIc4p2rQfLPrpA5NJ6bA9qZLoTFsHq+Odpvbm23/tQ/f+dn69nU//Mrp0SjTchlOQnXIst9MBADG4GL+6d3+b3zrrWFMN11etBVM+rW3z2uEZ+s77krKcCjRWiCDFmHo437IttIIsF1PXnor8h+MiiTLFLP3trWV4LSqvCB8dLj7eD9lgVljtJQta65nZMOh21tXwqzq9n0zWx7PFtWq3XbdrD1dnLxbzc82Q5cMNMtFM6/+7f/7H766fnV0XH3jW+9a3wJqMGRqe3vXN3UVWY3BLMzGoDAX6FL35GLVoEHjcupyLrmIswFtcgiPHixvt5lZhn5tbaot7u9utgOsp/zW6mG3vyWU9dTLKLEoSLx8/cnVm88N6qPzk+PFovGLMaVZMxd9vR0OWQsY8/bj04kPI/MWeJvuSPIQ6fYwlKxSTGXxru/nMz1tG9tyANcs3tlLDAEiD8vz0zP7e1V7fNcNNqOBRfCLIAPuexI+Xa3qGtra9ONWBZlSwfqkcmWyrElTNoaEi3cmxlFAjxZ11ojKKZe6WpCx95u7ksQAZNCkknMyxUIUROinzsyCq5wt8vTt081uZGfrxtwN436cdiNbO4/9OF+66qhijIAdQjl2vrY2CXm0IdD6sG58lePAOSHqad2Mk2yHzNlW1MYk1vq2qVLMWYQR1j3vh+nx2ep8WY9pb4FTMMrZgUFhSWXqx9a6FIf9es++cqSVh8M+Dn1Z1lWf4WrXJ0wlwfnSHbWVs0oKh35HClVwahefXq2/c3GyXCxeXH8xXzSk7AyQt1PKEFyofCnl0E3ztkFI3nnNU99tD6xvn57VdXWYBidBUN99/Pa+OwQX7voBrUWQtjIVuTHzxdkcddpeXROaqql+magQVERVQQQVJCRUJAQFZtWiOSszKijcj3EUCisjCygaIFQkJYOAQgQCimgATVa2SoikSqQAoM7ZXJKqFmZB1IKlqArxPVRG9R60nBNkRmBw98oqAAG4752BikFiRHuPJBYx1iiARTDK9w52VbmPFeEve2iISoAGEFijSkSxRXLagyNj3QA0RUmleKfkCEFNLl44oCEBRDJAAUQJxRkFBUVmvX+OVBUuwgAMhEgIJRAK6CAYFA0oIlgVA8CiIooAzMrGZNSJhQEr1BoU7i8ZFZEEFUkN0lfmekBk4BFiJ36BrjYAhgBJmRQ1m2nQMqEUFDAGEA2JKBKrZdcoepWgbgFQg6mADIAFJmAG7nTalnyLyBZBa7HdRpWVEETEWcNSVEFECBAUCuuY82rmxv2UFcYEzqK1qIoKtkgGEms0KQOgBRTGGMF5UmRBIWegYGHlwsaae4QSqxohIJOLOBWLaACJVVCV7tPHRu/BIojeICNaVS4akzKpAanJZoyMaACA+R6SrgAKWESNkiJY+0sXHioB3Cv+ALDwv6+gWWvGFHciR02tSlKSQ0O20lzQCYKmGLWIDdU277MxxpkCOMTMgFGpInuITJpOyJzOKjHm5hBjKmMGFLGgogKGUNUYYAFnDJCbzetFY8bDPhaZJnWNV7LW1lRUSi4sZGwR3na9AwIitfYwTgYtAXIqhlAIwVhGMGRmzfFm3AzlPmaklSVlDUgOUFiOZ02og6MjNfji8jYrMyiSfbPtS8qKyAl8ht989/Hc0I+/vJo2m62KWejFQ7OYrZ9dvYpg8tWr01mdMn3+5S/UNoDVyLzZ716+/LhTWD04qm3lErRHy6EDE1PjlsF4hoGN2SVg5eOlDzkaXb94vfnoy5+FJsysWS1WocE54M2Xr6eUVsfzbhhAGUph1GE4MIMKGFADUDlrEbgURGNUp5hJTe1b7rXvptvXz9vTqhi3fHT04MMH7bsncnPYbm/FSzpMBPeneVRyDkTvv336W3/z923P//oPv1vITLmgIR/s/TmSDWbM2bX+7/zH3/nuz366nZKIsRTWQ46xa3zdGclkJoEX68PJW/OTx02SwVeVln5z/dl600NiBTOCKVyMc0TmsI5PZ7Oq6GZMUT0zWOeBAgT35nZvjatqYhfuhttqFc6qBWr1//nuR8Y0y4aEIBVBNBOay+0wq4IzlqzrNANLTMkSggiREdHdLhWAg7r/6Xs/+f3f/e0/+OBrd3/8x9a21rpIdBgGQEPWnp0cxRRT+ipuLZybNlRVfb05TFk4q3O2OV5s+l1RTEWmmIwxCIogxoUiBtEkpijFWnKoU4y5iCXnwDowjijlkoUVNd+j9FVKKTFlZRU0ScDc3yOYOOUu9SZY49Sj5hhhcACADo03mTnmQhwNMmBxBlDLer9zlrhM89Y745bNk8Y2kNK4P0jJ1ts61HEqhzFvpsk56xwpa+0CIC5rWiBwLgZNN2ZAQ0QvNsP00Ud50tN52EwypqJEhlAEGIQF7k+YEVFBDaF3ti/lZtSU88mimjsyBpXvO3dGwdlQF4aeRacYvEkCIkpELAUIDl2Phpq62g69AtSzRTubeWtj7matTzrKMJKjKSUmQVG0tq0CAvUpMhIiRYCpj2PkKu5WbbU4WkZFQuNt3YSZUVwtZ5vuZkx7W/kkOhaxhMVQP0YFjZyfb6+fvvWWi9Zura+ssVSkFOBdHE0IiuRd8NZbshQ4soKBZub2sZsvfWNdjtEKrWwIdb3t+iSTip1Tu74+rLMIK1jfsxogC2qIWMETWqJJuBQ1TE5JIpBIKMmRd2hNBAFNYyITkA0KSOQyZqvese3HfU5JPaGlujbkzGhJIQHmtvbCpnAmY2RMYAnFCGosDARHR9WTt08fn50cbvflTdf69vkXn0xTvt31Jnj8ypEBKZUSszP24ePzv/7F57WpUy6umC5l58yyqU6OF1qGk/mqdagpyhSnMuWDbDo5bR9m37SzBkv81d98+sM//uhUfF1XRLyszdR3wXrBzKCEDKDbboJgFLOrCaRIUiPeIL118viLqzvr/N0onbH1vLnuDhd1qPximnYrf+LoKOU7JJqYyXlGvXh0dLN/9eB08Y13H5a4C8aU2u+jXQ/VZt0piKNCWAhxjPlu3z29OJIirkIh48mBtzdDrC0GLlkmFR5L8YTB2xmZpqkS624YCYo1NsbiEL21CApFau9EqctFRA2AihJa4yok08yX681NislC4Up/8fFHm/WGgls+ePDTn/9wvYFhe+dGPrZ8HIbSv+lTbGbzo7q9OxwOQ46QL47CFPMumJte+sazw//mP/k1O91UR7Ulv+8S2ABQxxjHnq7vps8vu5+9gcuJizU6Fee8M+gRE0tVBV+HnKNzuL2+YStDKTfbgxlyXPdQudd3++Xp2cnZAwzmkMYXn76p+Ojh8Vv/8ovLUdhMY/DGWBpiOVrWILGbplD5o+OF9c2aZX52dHj5MqZifNvp8Ae/+qtf/uxHuzdRnAOkfiqvnt8FA+sp+8YNxVtbI/SH/eScZ+P/+Ec//Y3/+r/88MGT9WEaynS6bCMOKd8ecl4ch5Oj5eXr7bTemfpN1bTz6ujJk29stn2iftm6abu/3UlBEHQArg0zHxPk9Q++91fbOK2W1cX5xVFncOhrD4C4WtZ1VfWbrq7qwxiXi6aqjNqClblfBd60MNI//96fHjRmYc2FUWLmkxDq4BOjo4IAdzevVlWIXf9odfK6f54zUGUzqhHUAvO2rb3jaSxj5Mih8sPEdeVCME3ru673ZNCppDxJMidHE/Pt7aYguobGPAn4MbJx1FT2uutDW2mRi+Pz9WbjQxNLt+u7lPSoah9fHI/cI0m9aMj6/SHvpwTF+GCAZZz6lZ8B2ukwGvSDga4bi6Smbed1s8vD2ar1zlzebBOodTbnfLXevv/oZN5gbRthRutvh7xNaWHcew8e6jBasqva9SklzpGh45QUR6vEFBNZMt5SnsaB8hi10qqZ4KgJRXlqbDdFRS1Zn5w+Llm2KXeMatBh87/5T//+j18//+LNXTTY51Q37nLMf/X65bcen3V3nZExp0x1tIZY037op5l3R83jty52m6vlcdj0UzfK8enFf/Trv/tP/9k/efrtJ32UfjQ5yWFz9+rm9ujhuaCE0IxDZ40Z+z6Nw0DhoNy2iy8PXXc1jil/sU0+l0Xjz9tqVlsY1KIzAo2v9jztsk4cZw6PQ2BOAFQKsxhj0BofYzlyIQiyD8M0onBT2U3fJdbaV2nSVTufDuP1MFwcuROnV+s3AyAafOvB07tX187jYhnQ+2Vdx5SZIWUlr0nz6nilvi2aS84T0Fh0108jgJ3kyfnJg6q+Xe/3qaRQ42L185fbE+vmKrBLp+fzUeT1rj9MmmMpZJJoZrBk9gLekjFEFuI4pqKLZW1+WTj4alRUV00bmrlmNUgzr6ou4MvdNIEYi42LXz+v3OLory7jIcfq2M9t6Kc4u1gdLS9igc3zq2BPLh6/a9uVqLHI9qS+vP7yoz/6Xrq5IaC+i19+cb0vOXgLoCXn+XxuHVYeifDuzQ5ycMagqrAdMo5J6lp17JvGtk3YbaKUuO0SGeqGZIMJDRzPwhQji2oRm6JzM9/mw3DgRE6xbl0/jteba2+xmc/R+amoGVSKt75um5NdzGPifhqrfvOrH7y/3u+//PyzMe4tSkR7NWRWOr04KWNxoGj49W5HCRd1rWRN2/bQgQ2rk3ffPvqV2+12GRaqrsu5jDuJvXAmkFJKAB23d1piKpDWrHpz+vbbSZgyO4NggECQ0Bibc5eBhIWlTLlzpr2XNwZXIYohiWlaby+XfnkyP17NF3eb2+poFupmKLLZjuvNIXgbvF6u918+u/ZmlhIUGDRP1T61jZ4f2cXqqD9sd8NEllrr+64gw1k9f7xYHDYbZ1yU6ZDSqzsu0fBUbDCN0920dwrLuYvRnc7qdRzJYD+luq2+8eTRo5P6ky/vXt5ymvT8eBF8wyGrsBRxs+bzq/3ZeXU0dzRCYQsMpMICPJSE5vV29I5mjrppsgCshpykBD/74vLB6ek7b5/P2uYwRUtYhGezJovr+yiCueiY+1BXAnC739jg+1RAdZ/yw9XKeGeApq6LmA9Dd7w4aXJOIGpNFOiGnJQe+WoapuPZcj/Gi0en8MmnAMB8P10RJVQAC3AfFxHVVFQLlAKsIApw7yRHFVS0gCiIQARIKARGkVCNMIKgskMG1SIEaAwg3M/ZLbBoEc1qSgHOKAwCqijOAKApTKpQRAEEUYnQEBQAJhT9qhAHoERgAQkRAIooKBCi+SXFmFWQEPF+3gXu/lWDGkAEzCydqFYGEMs0EZKyIkMqsE6FSBxgBRBAUi7WOGRtAFpLRQRVCwDj/eEygEgGI4rM4IzWHgNCFMgKKhBAPaEqsDACGCQBFUMd6wCAigL3cSdKgiA4MYtAEbHWoCoXYTRTzAF97sHMSNQoOTSWGaAUYJQJeQ/akcuWrCKBKBgCdAaaYldoZyANacPkASw4BCGwHjQDOQgNHGrlqXDRGKVMymJR6P4ylShnJQBDyIql6K7P3uFJ42/3hYnunwxRQVCjIloljxZBcyGwwMgAkAEtTSqWwFhKCaakwYIaFRYEYtUiAEiqRUENAIoqYimshGTucdU4ZgQAawwJAUBBVLSFRbUkVLR4XzG7d1sziAKKEggKEah+RekEBSJj7uuEoEUAv9oejDEB2QHLiSMqlHIpoCLZGnsYowdRheVs9qYfBoAo4AljylwYydiiSkUEthMnHc9bf75obtaTFAbBEBpP5jB2qOqDF5FwPzq9fwVsQAGdr9GMiYF0l0YuBQCQnHdWGWPmuq2ctd2+I2NVgQgW3hui/ZTAmEPMjTH7NCTVUiT4EGM2xtXBzWurivWiTX3Ok0TkalE9fvqWMeXy6g4NerW7mELtEdRl/frZjPP66O2zf/39Z11NHz49fmdpP/rRxzeH2DMoy4PTo1N7+Oajt+8OvZkhzMKr6+uNjMWZ3/3md/70e//uxPu6Mb72zrrV/Nz7WnRbCA4pemuPT+bT9mCxvHl52HFualoATgQQZh//4iM7RTS0n4ZuHLylqvLLUN/t75iFyEnRMTJnFebKUzurS4y2cqmoJcxJiqirqzhq4Wmdrm4ub+tleO/rH7y/+vDNq9dx28duz5mnlN3cO4S2Pfrse7/4/IcfPf/iuijfKzUJURWcpcQAxl/fbY8un/8vf/v99TT8yV99acgR+UOKDyv/rYdvf//z5/MW3bK+5aG1/mhZdR0+u1yDlAz45Mm5oP/566tcSjeJGlvb8NbR05PFgx892768uVo29fHF0W5MbGmUPHA6PnvowKgx3/i9d69/dPPRpxvipp3P87hXBAJMRQeFwaB1NUiZWXdyNluPw8iJCAHUKszn8yll8q6eh/Xd7l/++V/8/d//9qdf/PTZmoM210DdOJGqqkxxqipbyleroOSMSMHJ2apiY9dd52bhUMTN58NhmJA6Jq+wqqnxfgIYh8KpiKCxwVphLkhIZHIRS5ILs5CITiLY2ALFoiHQLArBAguhQwUR5cJaxCBmziBshBbHi3HEvs8pFeusd4mA1RAaZGPBuFSyMJp7v6W1xlVL1xQQTj3kohxZ5G4YnUzTBGNKAxRgUEGHcta2XAozO0fOGgNYLPpgGcCA2e7H2vlfeed0mmLMEmMmwJQVQEUUUUFpigmQuJQqUBJ1BqzRIpIyEKt0Q9NUi/kM0r1NNMcppWEcUJAsEE7TSGi8t5HLxGBjyVKquq3r1jg/DQdvzDT0qDBOqTtIIRi7KdQ+GDMzBJy4FDI0a4/O6vmX5VJGdgicSj7Eq2HLKXu0FokBldBptFbqipD8pDnmjKBFWaKCw4HiOm9mwa7O2pRYSfw8eEOZ0FgzTlPhYkEUMYto7drV8fXlm1K4BqNql/OTbd8hm/62t+w9eM9utxs4SizialMAssfKu7mb+0njugNRJOIY62V19mDxxW0apnHmKmcdZNFkjK9S2ZALDlvRyGPpQRttik7j1EkRQMsZlUFYMhSzcM0qgKeUShlFORN5N/PFmClldHjy5OTD9x7bFPfrw5e/eEGDrGbzqfT97WTbyvuQJT+4OP7o5RYAjppjCA4SxjSAKiLGGKf1qASmPV7OTm4/u/YNhWbe7abdTvJ+evroaGG1OQ1nR0c3t9P17f5I9MH8wdsXK2fQqZvGgsBqKUrZjp1vqhAceZyFMHJJyv2gC4JgKk9+5MNm2liTPLrpLh/41j5+Uld+Qk3TaJQrzjmvjREwzgYbhx4kOWs//J0Pqnd+pU39q89uj+fN1evdq+tplBqmZEGNkz5P3pg+5YQycZ43fhhzlmIcLqxT5xuPVmnfT2hM7aohJkNYVIdSUJUAamM92axKCKD3J3P4lTgXNHjLiFVVGWeRrLF10zjkT/OUTG3rxpNwLumHf/3T/vt//eCiauuqGoevLardyy83u23spiJmyMaHTMHuNjtX+2bRPl6txk+veynXJL//radPZ7xb74q0RtJ236t65m6M481N//Iqvhp0l52SOAPNzJBIpeAJlqvKBDeO+e33H3ETXvb7IfVvPzoqWPru8GDur7vUR3p5s33y3ltH1TRTuL3s8/Dwn//Jl1cpshbIMIm4JoxZfSycSxz43aeP3ONHv/Ht79zeXh4vQjk5ubq6fbYfL+ZP/4v//O/9w9ev+8N2tK4nWJzOUFLqosXqkNK0SXcyBjHBUiwAJGHV/OTlxxfnx262nTifzOhyPTx7dnO6nDtN+4NUzqcir19cutlsefbw8VvfFn1e9KrEniOPQ5xYowAhqRoC+eGnP19P6ZChT3a7vuv23byuV8eLMU8hIDkv1haIdRXQ2+Dk7OER2K/uBR++d3bXX96NYy+lHyY0Toy5G6KAnLmaiwKacZyO60aYhj7PKH/r4fkvLq/Got4hxNI4WxmHQG9u78DZoiCpgIXmqF3Mqv3+UGJGshOWGVaP5qsvb/be2lHxwMqcl3VVQMgTI+6H2E/ZOkcmvN5OSO7F1Q6xtG316984bZz99MurfcrA+v57VT+ODGQXdaQ0KIycXOXHMZOShXrVnK+nu2ydRVc3zfX6cBjG9x588NnVy6lAHHPThArgracX20MvjVVHY2ZkNhYU6HXfX0zxg+NVGTpXhdrbmowyG1FBKcoWHQs6coexD/MWwJYiS6JHizkLP9/uXuSOvHIv25u77bCvQ7uqjypb3mzGv/c3/q6plz+5/vzRk+Xdbhw6jqIn89mnl+tu6OZN+9bTh+ub7a7v1iMOu26kVNjdHuK8Pvr0849dwH47aTVL2f3ks58//uDt4rCq9PHpo3lbvbgalm273+wJVVMxgmQMJ27a+Zd3/WLuQ2OnaagauzhqLfnp0LWrmR0ljTw3XpCAARlcQc14KJQMbXbpvKmWplVI4NAgVpUTGs/Ajph+8vxNO2uIaOzH3XbbzqrdvvfWGMTIOBVxrIfY1dVM0WWBzTCyM5yk8cF6m8ecpsIqX756/d77j3xDy4twyOObu/Vv/Np7f/JvP34j2/OLJy/v1tbZz652WmRu4RvvHV92ZbsrjTO1hebcP7lYfvxq/3o9Hbro66Za1vv1moIFhSKaDqPT8o0H58plkmSRFbKx1X8wKlpU/uHxPEzd1X796MHZ3XaIyqiaVfoE4zAcE751Ik+Oze0E12PKPR9Vs0W9eHR6cnO9cW65WC6tAR47ZQytefX8849++uPDbmgsgVJd2d2mny1qX0PXTxLFOb+s5iUeRhZHGHN0oTFK5Gjf91VtKw/ztlkfhu4wOkegPE0RlMrEcRzDyTyNCgJ+1vg4nczCfv+iEHGWojn3yRqbc7Y5N6E9qWdY2DmsyLngrIPHjx4l7Q+H/XJhd5vrV8/UWHo0M8d1uz6sn5wv+oiHMRWIUfPXHl90d7s8Kme0J/XtbXfWLE9mTewhXve36U1BH+rAhVBjGgfJUwg1+XoY9odugiGGEGzQlFTT0O9ugnfAxYA0xsq9kgqk8p4UnfEA0FRHYNBYVQYRJgM5FUDz4PyRBbc48Wi0kmVR2N6sp53wkN9+62J32F3fbATweHU0TooQJTMY7FM59DmlMN8fZmSpqSculSUuyXpatXXJTNYDpso2255zIUCsG2JJhy5WQU5OqgWBKxWDWe+FSW+2Q+r1ZLGM69wNWCacRhVWRQaHd+v9vG2qYGcNvrVoVirDvodCY067kmtPWjAqTiqVtSfLpnW0OlnerMd1P8Zc6rb69MtXj48vgrjb3fbxycno6oGTFFWBKWYh49rqan2zWizAhlGKMwaK9IfuVcpARlOZ1Y2xeLpcDdPImZUYja2bygE7Ma8vb1DUoFGAu83+l6kiRaIiSsFCYhQAEWQAQSkorACA9wwPAkAlB9aBeLWoqIIIIqhihEEBUQiBDKEgsEPyRhTuGTSiwAyZoWQtrJpBMyICohqS4NQC5qKZkZEIgBAQFETRYFZVJFJEUEtg8SsKDQsW0XsoD8G9gl2JfoksAjCg5r5LJSqoKIIKI/IcTW2pKDECIvKUEM2oYj0Ea0ELQHHGqYInxKKGAAicARYRBSBgUWZlhTGLQ/CkNSGIpgxRUQE8Aqrck4GIkACLQAZg1CJgAECUQKakoyAhMiiAWjRalAgUccpsiiqwFHKj+FKsIBoEUU45pjxseFobHIwoGDBACqJkgKyGpS3zYmeolUBtENUiGgUWQVFjAD0ggakgZjUJOBZwgGviWzbF8D38gxDv+1ugZEhFNvtkCKySADCCiui9SxoV2SAIGZACqoqEyCqgSBgsSRZVQLgnlSsZEBIUAgVCuseRJ9By/9QpUpjJAmoxgJxBUIlIWEhRDbIiAYLqzFpG2BdhBEsIqKhgAYuIESAlEOByP54GpHvW1FeorK+eb+/P0DwBUATdTFMFPk8pBBOscdbllGIsjswoZT0OoBgsOiMqqMY4Y+4DbtZQSZwJbw5x298yq79/17UIMBEWZmEAFkIQKTkWFaDjUM99isAM1oCoci7M7J0rhQlIQWaNN0aHHIFAGRQQcn774YNpzC8OQ1HxpCSsHA1JHQyiOIRK7OPTi5gO05S03MeyTNO0QA5t66s8teN6uy+sR009lGxRyZs//fmnTgtYNJbeOV/91gfvpaHrnq8PUbCivsCLdbfF29qmo7Aq3cgYx8P++etnC3/8g5/85TQOPevt9e1xc1bNV/1uUzuYV8RFHLrWVm9eb+dkFo+PFyernz17db1fDzl7yqamX//Gt794+YmWOO1j1ThF6FLOWsCAByCjrq2mfeJcPNrUFeQpGLUBcylJNBdRI5U1CiYz0KTUZU948/EzOTqcf/2dk6dmZs1HHz9PN7c2Dxz5r/78R1P/A84CpEXumfQUcw7WSmQgcsaeNvPXr97kfv0rXz/63W8dPXsxXd+N5OxNPx0/Xn7trcdXl9dsSrT+56/jZmfA0PsPLzZvrg3Y3WHstJf/P1f/9TPblmV3YnPO5bYL+9njzz3XZebNzMosw6oiu4pWVAPsbgIS0BAEChD0Jv01etOL0BIkARL0olZLIpvdTbKKySpWZqXP6/Ka48/5fNjtlptTD98tClQ8RyAisNfaa6+xxvgNyD6lwtgA0sz0Zbr85z/9cu3z4ngyceX55Xo6nYr3juTe4YxzJoDTk9Ozq/DLL88KPFAKrdZJZMyhrJxhBZEzwb5tj+eTPMRUAkjWAhRzBep4MT28e3S92Zqq9hc3k1L99PNP/9E/+P6f/O3f3f/rX9yct6ap4xAqS6iQAYDMfxBM910oR8UmuULPazM1rm4cCwXhMafd4Esk8TkqZM1NWUQPSXNKIiKYEUWZwsUcgNhDbiqlBOl262JUWRkk2O1aa7Uxph9GRRoEOd62ChAqUAycGQJvV60igzAYAgFJMXGMSitlVfKBDKcQcpZEyJ6zhTuHM0MaQYUwcu9JULJoS1HS4MfeR49QWpOTCMpu37nbm8s39/BMoMYxlqVBgdI6Tpx8qDRVhiKxsICjzPk2EqsVcGEICdBIzlabeT0NOWZEa4thyIkjJ+W7pBiHsEqSUSNhzjlqkK71DKKd2o5jN8bIWJZ6tpgujg5zlOBD341xaLebbUGFH+O6j+hUtpS7sUasFroiqwutNSqOu9WlA1jObb9tQZnbvgBUMeeILBlJULOkdvBDANTGI6ArnFF5yMDpYN7M6knIMIRoNILiXbeZTZvCFPshVKCQNQNq1CIMpELO7XrtRIzQsNpZXVV22m77mFNZOZFMSKSqDJsEKWulHEKKu+1meXioAWaTyW4XDUhSVJbWWWXGdFqVamZX1y1aPYaRAPuxFwWC2ac2cbYGlDIadCettkp20aBWLCyQAhca8qqzzhV2HlPOAMWs3oXsgbd5fPxw8eBgUrl69+LN2dsrYSq0LVEVmk4XB7PKfP7ylaj4+Mnx3/R8QEW6YwKRcgpUga20sQSGSZTfDdd2IKp3bU8jc9+fniyfvHN3Pi048aodLYvD7GWYHC2MhW89vrverCApn5lDNKWBFEoSLMzBcUMxQAROuTK664c+y470pIRJWeTMrizIwMRjpXOSqyzYdemgLEGKPozG2uAHjSCuKkqb2HcKXVlbG3PXd/twHfn1+VWEwgeujZpo0+33SqFWIJR7Tm927TKYyqnalcYoRMo52Syc2WoliDHnxJlAptOq7ROhGmMgQVRIBoc4WtSTstDWbPqemREweO+ModwXNBUGyEyMhKS0SiD9GCtDnL2kjnOu5ydxy41Tmzcvoe1ViVqkVBTDuIoopMaMNZAPaTMMp7PJfohPr1bfOZmq4doWLqvK+7HdbPZ7n6FMFttEl23fZhKJJYBmVIiIMHYDVe503mhToKGDidlenn3yxdNJXXgfsxIQXE5MRpXBTYw90NokXLfDYTO9vNh9fHFWzCqtBHMi4JTJOJVzBAFnDSD03fDqy0+r1K7f7nnw3/vw/fLcX69vXv/mv/v+owdhld60482wr6bueKpeXw4Q/HxeDBkca4M4nxQ9+8lhUyysIW+O3aOje2dXW7/ZVY3c17Wz5c0uNEXRZQgqhyGkYdNnyrGwiScKUmY3cfZa5xiAWFm95fjzF88vxrELacw49OPNfm+yhAGwJQKYNOXBfDLE0F3srNUcfGn1YmEnlbmdBW/eXCDjfNlU5POldFm0ppSg93yd+0Lr0pXHzcw5J4SGUYk8XDQ3VzfnIQArIFU4ox3kmIQp9AmS2AJdYzVkHkeDeHhn+ep6O7VFPeQJynnIRivXWNvlmHja1Jvtqu0H7Yp5UwXEYUzapk0XppUusyxmBWlaXe1f9b7rWJVWUFTm7eWVrWpm1kkCp7sHs931ngUC55kt+rDtQ48GnShKPCV9NF/mfl9V5dD5yaRwwnHfPXx0eDmEN293J8eLpi6ub1YlCCPuOL/t+yfHxzYlq6lwShjDEDgn0kobNKCJc2ZfFCrltB29QJ5PG05+Nw7P19urMhuSSpvKEuShUNVhpQbOiOW3ju7/6x//yzFn9oPL6f5B023bOPbHVQlhfDvG4HhBrgEVfTZOM1I/CsYU+mHV+qvYWVdOyS1c0VjchvT667Oj6fzeXePHzo9jHnPOGR2JADAYZR4cPfjs/PlhZVESjXzQ1MvGfvnsvM1w93g+n0+nihcajMaYYAhpG1qOcWIrskLWrHTohPMYFIJRRNr03udMUJoX59cBdWOL4MeytgIcYgSAdgiF0UVhMKiU8qxwypBzllBWu10JcDqfHU1nr95enm/WXhg0ZpDdfpjWxdsvr41SLhX9WbskWWilMcwbPSnsxbrbBwmiw9nekuZ+0FWjXTlK+OJyt48eSEtlB873prPYjV32QqCR9n1QpX57s5uVCkGcNSmmHMb/2FVkIbetcXTvzmHbd0MKN6OPIWqFI9IQ9ccX49t89eTe0bsLXek8BOWkKotT5tmsNnpaurpox3FaN5O6/qu//rPPXn0eh2i0I0CO7PcpMlqDjIAsSlKhtUJpI0gAzlwak30OQLbEzHIwb04P569evmr3Y7/3RVVsfNCoTGGdApQS0i2ogm9udkopxVCUpvOxmU76XR6GxDlAlsI4DXrYDXWh67oqJzOV2RhtxNRmso1r7+Po5bObs2ZSNBNTae1md9LIJvHElKvrloPZF8PxwXyW7drS05fP70+Prt9cHi+tK45dsVDGSEbOQEqjJGW8jPvovW+7opnNDg6zBbq8zikgwr7fPXCnmPWwH4WZGYggxtFnKrFgoBiTcVZrBZgRBJRm5gyYMpBWaAwC9sP+YDkdvI5drGB6cfXWlup832Xm5mTS3+yXs/ochrb1CpQxlEQC25sBfZZ789rmYcygrEtqmNR2PilSDpzy+fW+bsqcpDEqIJRWQxydxo/eO8ytn1pnpPn6vL11VTCaq3Xu+x2PfrLQyilHiAo5hnHfQiYjerPztam+e/+h9CkPUtiqdMg5HTYH51fbcUxNbRvC2uSTw8nyaBGHYOr67XWrCK43u5/9+umTk2mZ4uF02RK+Pt9NmpnLMKS4jkyWaiy1wn2bREHwvimNK9R+6G1dZg27tlelqTQLJVsSJFGi/N6zj64qx8zHB8vtboss3fYbcJfSyFkKTZ4ZhREUCEpWOYOwfCP0gKC6dRwJOVAKNDGCKAYWIJGUQbIWJhIgQCASYkYiFC2sOKFCRkpAUSSwypkosSQhjYqyMaQJFGMGTLe6AyFqQPgmT6YRNbMWVogAkBGYVGRMAAmBAEUBZb6tgWUAECFCFlGEiILIpPCWfcSJSWHnx4kpWGTvQ6nV8cT1t0z9BLs01igRQYswS4hsUMcgDDjeVmoJEClE0SQEiMhGfRN4SwBZkBgyQCs4INjbZBoTCSQBn3O81bUQDNHtkZ0AsDAQakUsAreGOhAS0UTBZ2QZep6mRjOJAEKWlMYu+y3pVoHXpFkpQYMgKkvOiMygADlkjaApGySLGpKKCMACmlGlgjIBkAawgA4KA76JQ6a0JkQlCBkzEQMiMgAqzjBGIsKcmREEmRxYpfZDJlAkt8FFyCpLZn0b+hLkDB5ACQGgoASBFGJt1ZgyQgIxwASMDJwRmMAIpZSFdBJARiIE5CycgW2pKIslURkAwGmqLaWAnmNGtCgpc2S+hREBgEbMpG7HNClSCCxARDlGEVBEkv8G6JsZgJll14WIsGgmYQjgaBd774MF6mLqh55RKyNDzlkwZVakBSAxG6ONJlSKyYzMMcZpWQpKSOxDUgk4J0WEhNqYMHhXlAjsnLna9A1xAtz7VFtVaNN7IS3WqJAlJeEsCZQFlWLKLD4xIE6sYmJD4pSKQJqkssUYRgAg1N6HaVX+3vsffPrqTeB8Z7KMOe/6XVFUgfWyXF7drDhxHlLptEGcNrUbeswp5LxJQaHKQ2xDun52ubq4qazcO15859Hx07PzyxQtqco2XQ86DLt+i+2WpPzW4skf/cE/+N//i/+rVroq3dDuJQ55SBgVos1JUFlnVPShvje/MyvX2xuri5N5PSAg45077+77/uL6TZf59OTo4FQSydXNjmMcwtjoElFGzylFLkQVSrKQswkBGaRLGcA6Y8rsU44p3UZIiHA2LSZl3d7sh1X79OIi5PzBo4dPDk7m73749qvPdrsupTXsBlI6i6SQUCnQYrXS2kROSquYYxg5le6Lq9jSel7qo6bsdjFk6Uf+8a9+8wffefLwo3c++fzrRqsh2FdXfZvTxXb34WT2+PT+L1493eVxTHE2qTNi2ntdYKh4NebNkLj0AdRHH753dfF61e5XO386m12cny+m0ymWf/3Tl8Sla2y3a6EPgKn1sW5UjSaETCLJJytYNsXF1Y0utUGSlAprF9NDye7+oydjzNL7VVxf77t//s9/9j/5Ox8umxcqPK9VczCvxhD6zJVPlIcw/IcHozJmVEaljmPsJrW1hcScrHPTog44WkhRYPCYssyVqmrtfczDGEbOCbU2KXLilEUcqGXpUs5O2TEzkV23bbBZQEGWED0xgKQsOWYvAAotpywoKWQFasijQCBEBmEWQlDKiHDOAgjsAwmBAGQCSAWZyhlKTMCQs1EqM/mUupzGFObOWCIfc6mMGNq1g88YKU9L64zu+p6QlFIKiDOMvQebOYNXxCwpJhJBIJKECFaRsTbFSEolER8kReQcd8NOExGRoSxKF9beqmD9MPhhtIXmnGNKzCwQMmDnk+9z8DEDNPPJCHBc10nEKBYGzqkdBkZkkkEyWBxTyqCm06LWSpAzsABZY3KU/b7vyNyE0QDuW296n7VWmGtnTg5LKIvrXRxGzmjFGqMUpyghYsIZKVvqZVF4pVfRq8IQqSg4r+eYqV0HhabbDBmgqG3j6oABDcyqwu96D5RQhLNAqus5X10DwDBkzRy577P3kCMNuiiPT+p+k+dmYUfp9/vZcXX46N7mzaXBKEkc2M157+aNaEgJQoQUlEKIKZuJA22BKEWPTmUehki6Ucv5YkuXYcgqKY7CgbUyCnNhVMqZmNbrtbfAzj14fPC3nxyV2936q6uvPj7rB58VgRJGPr47BY5FNT6/2e4kJKd2IDPzN/uCuaNr0geFPeTmc6RGVRMNRvWb3hgzq9LQA4sxmUBoUZccebv1kuDyaovz4mh2kJhUoVa79vhggX58e7YRVytXBpHohRNOa31QY862uwmTqj5YGMs3SeBZ68UHQKOtHcbR1ZZmh4njmHXwedEcjn5DmKx2RrErTAbpho55Zxe1nhzcffhwt766eHNxsQkXex+iEiPGklUkyROJtXroe+9zzrDtc026bohzLJyJKUEMwhpBURJSoABHRmv04CFmnb23mr45/shZk9HKgHI+DO0QsTSBkymL0lltCgZFWpmiUDEIpJxDztxpO3gwMX7nzvJO6e5ZkKZWNzcTpUaAdsgxYYW8LLW24BEef+futJr8+NdfHd47mRxUn56dIxa//+Rbu1/9qxfnvakx+HY+m6tSPn+7Xu3lusM9oU9sjEo+pijkTGRWiA5BAmWQurYa1HorYy6cGW92rZ7oTdc/XhzPp+6myznH9598+PTzz+eT43nhvn77G0EVY0bhkLJBTD5Ya0IQQ3R4NNMh3Lx6ATfrf/yHf/zJJ7/eD/EkdqlMf/C97x7ghq5Xp6r/z/70wz3ye/eW0wr+23/16189vSRbvtoEDKly6uQuFZPqgw8ef/X1SxPcNvVvLNYPHr3c/eadk+Pu5aWPw8MHx8FTHuS4Tn7MEGGEePnq89PZghTce3L88ovXxiJ6qNHcXx4icbvtEbFpimPrhpCv92mfYhvigPlkVk1nzbSAfddbjSWpZHF+MrfWDv03NeGRAYaUtRDx/cX8+XpLWjPyYjZJXdi0vSBZVyoUEJWT7HxKN91RXe9CCpmnpTtsihDT1eVuSAKEZVMUGo8PF/uuTz5mhqt1SxnuVcUPPzx+9uzm0f1l9OnVzaayJgOcXa4rQ7UuhiHveAgxcczEfHdZvffwIOy89+P55VYATWWziEYoG1U4yQjTqpGcX+/aMSPuR1SUUiyMvnM4+/ry4vbosCxMSrkucWG5PijaFjoFjybl5vXmnXfuZUc/e3ppwc6GiIBaK8xiFe4gvF5df6bd7949wRwsGUYNZEEn0oSYClUOfkRLWudNux1CfDSdUYoictn1UOlZYTmH9W5/b7bctddDOC9NubvYfv8Hf2fFm1zR/cXd10/P9723hgq0+8v1yV1992Dx1dX+bNtd9qtvzWfLw/mzy+vdatQNPjqeTQ6LHWfQphujZn80acK+u9m3FkvyOvXj2e4KUTBLVVlTaQLFeSSCi/0ZqRQH35gJJbjcrTYrdzhdnlgQI59/+vJ3T46XB7NdNxTOhJRIWTJm0w8a6cn8zjhcM6icoCwrZwQpa6OV0+epvwa6CTne7MKQnNKI0neDRXM8my2bZtVuc05NVd05OHp7vS6UWKsvb3qtrUeJjWpVZKvGIcU+zA5mT6/W75jFCFgACZm2S99/53Hcxt0Qx6wowmI2HXajsbj3/bygJ6fTPhA4t17tR+61K9a7ISDWVQEwWswpScxZjF5U5by26/Xm7r3Hn75+A4pTyqmP/7FUNK0Uq/Pr7vpiNE6vNn1R6Sf3TjchDNvBlLVWdtPCEI6V6rr1elocnR4+dnbid3u0GjX1vdem2bf5l5//5MXrZ4hmMXWUpdB2l4Y+QgZqN30ISEY0QiLYbNqQk0FTTRdhnRAk+Eg6L2fucGLX691q3ZG1urAhc9v6+byp6ma3HQpyV9ebUCvlqM987+6hjn075HbbTw4KRuw4kWBTFymBKmqtrbEGmcQP07JgEJ/TyXJxdXO56feqst3AY06rTXSKF5PJ4Hc+DScnd46wvun85dv9zVU3ndf3v/0wzTtJvnJ0eX7z4XsfKNIp55zz6HvOues3SH3XbS5XmxD9fVMvj9/tAZr68u3b16YotC0CqMI4UtpoDMw5DNaUWjtlLACyhshCzOMQJk1RTFzfBa1s4YoQsgLISVxR9SN7z2VRa6xPDyfapIvLi+XRfAhDqUy72XLMViOxMGBMjAiEagxx1Q7jMDRNId5PpnY6taMf9+0w9KF2TUlV169JictpZuzJ0aQojfSq3dLZkF+fn5naRZEx5KIwiSMBiMGoKGYeQiyaQ6WRgBpXLG1RZyPWxS6NfR6l/Gp1VpyYw3LynbvvHNgVXp93Kd/41G6SBp/Or5u6qqtqOi2H3TAKfHVz/kd/+N22u1YqP7g3a5y+2nXzk9luCMOqDV2Y1c57n33QhSMkztD5GJl9NzhnjTGt9+MYOaemqWw0kAGVyojMXDrXjWNOUpUG/iaNyQLGGkoRQBCAAFGQRRiFEEDd6jYiSrQF0MIkCHjr5CGGnABZiVDKpIGAExEhZ0RAFo6MIqJFAITg1saRWG5ZNIgkOQOyBTSEGlVWgnJL7WGFAMBZAG6f1SUBQAYQQC8YBRSCSCKliEEyCAsAigARMoCA6FtdByDLLf8KUEAj5syDx0tOgDgmkMSmIlTKAfYhAaHHrBRGkcAAqCJDzOgZhLRCBGYWQERUSAoKrZRGYBHBPqYAkIQZEIDkVqcSiIQGBEUis0FUIAqQBL4h6wAjIjArIiNCghE5IxIiiyBgZjBZSRAIwggp4bDD7gb8ml3IBoQEmG9dXwCgIsAYRcWMBgkly20+ImsFSBQlobBCJgXEgAgJARSoGoQAliwDUiDQXDRspywKMNLQZQmQByURFCCRKEdArBVPRPmRJQGLgCYCZAIWQARmueVaZUGjiFBYbpM03xi/RJhREkMKYBg4soAQCrFkoSwYEZVWMUWtiBRqEs1iiRDBc04ZDaIhFQWyZEIkEhEghaABBEEysoCAJoQszJhu2Xf61kUCAzAAZGZSqAx4iQjQpw45+SGJJiUQRG5SSkCzyWSzXpMmHzNnEGGt0VlDgClBAuCcnVGWzcy6y30bWLS1lEWEraIYBQisM0hQWJ1yGrwsGhP7qEkTIhIpIkFIIqgkhVw6QyKKkBRFz0IEIApAY17MC9wIoqqtEgYOjIoSAIuzxeTzN2/WfYvKYLOoOcznbtOOqoQo+6rEsqBcu92Qep8fHd79Ww9Ozp6//Plvv9oJRUSFGhz6IZ7vWFCu2R92w7unp1nOIwcBXPmWkMVFZxN5/733Pwo4dFZTFt337xVQz4vMeHj3brWYw8XblHKXcjbmnLjQ8OB3vl2O9Orty74bi/lhynl1cbnfruqZA3IBhmY5vXcwvby+ubj008cnD949yahB8XZ3s77YXz1fyYg4ZjtKVSgnXBV6TJkRWx8FFQCZSs0OqunkwO98vxtohKkzz3/x1c5/6VHIwJ/8vb8/jF/JxT7JmIVuJXFBTJkZgrVaIEMWIckca2vaTrp9cryTDJhAo8qEX79++8H947uHx+thGFJ2VbVt21XPP9+tvrzpLREHqevac4qIY5Z+6ztmtM65nELsJbRDN7K/9rv3v/fdp198Uc6q73/v4V/86K8WdKe+u4jSdfuIWEYwdaW5Q28kRc4IBHq/G7kCa50AoJLJfPrO6XtkrFgN2IT9BTAICCj186fP/4t//Pu/870Pr9Y3L7ph0kz7FGZNeVI3V1c36psHItBKhcDtNihFg2cRLoroXOkzt+1gFWROQIioclI3ey/WN7UplBbujTGZIykoCsuBS9LdLhpjdWXTuBfjiaJBfXpw//z8BQsyglIEia11fIt0ZwIRYwkIU45GkS60seCjCAsBZMYQkjaESGVZjH0QZqPz4WyCyWfvQ7aUpTFFBNmFcRxjhttEKlhtxswiaKxtxwgISaKGEWLymUnTdFKyRCHYdmMWxaiIKINSwgSgADVRBsQESrvI3I+ZAbNWTNwl0Dk7K5VBZcA6tVwsNxcXbMUYfX6z9QmGLCEDKBpDJEJbWKOlcJZTBJaCsFtvdeVSStcXlxnkYD4RnxdNEbKknJFwVphK68Ko7OPovXDQpPyY9pkHy9WsHHIPiJlz68fGGglx7AYH2mgZSSWRECJIUlkUYcj5JtBl189OZmgsxzjEMOTb5RL7NkkOh7UuHY67XjEM3E0OK7/vb67XfZKqcihw0+5BX9rSra86n+PxopSkgo+T6aSKJpOkbU+jaHac1TikNzcrN01NaVXGOKakSJO9uVgXC10QRS9MUFiNiiVjhKwKyjk1mipXXm5adKbdhEk9a7nd9UGTdqXJJBl5yHyz7yjnwfeLO3c+uHMaOv/mZ6+vnl/kQXwA0Bq+OR7gHNiWetO2bddVxkhVkyq3+/3tLNiNGYtJG3xe746OnSYZ4+g990E3kZ/+9s13fuf7n3365rvvnLZDu18JpzyZllVZQo55tBdv1xMyTJSTyj4CaJCIwKQQBD2oZjZtmqbtgtJomkJQDYzL5SHq+ka3itNqs7ZSzOp6YPti1Q5jB66ybrLer++42OiAgMpqZ/R+38+Lsh/y9MMPF48+2F09f/brT8dUtx67gbWtjAafsqQwjhGJjNboys0wTI2YonAgkDnk2OXknDEWrFFWu37AxJmJpqKZYTMMY4a709ngtymmduhNWXmWTYxemZxE26pFMqVKKZXaVlWdGbRh0OLDmECM1sYoUS4RNzXNbXpcSrHanb9ol3Xzvd99/1c/udnvx33Ih6X64L7W7H/71er4zjuz4/t33MUJKG4v/8mf3lv92Rdff/E5bdLVymO3iTFMJvPpZBr2b8de+6yqxmEXDUoElTIwYlPZWleLupyX5R6GRWO3q9WvrlY5Z2WMNbq2NnSDjL019djtvHMXl6/3oZuo+s3ZDRROxjyrm65vgSUhYs7Zg1b68ft3SqD/5Hsf/Nu//mna3/zsr/5i14Vo01dfX2dr+oPrr79+cZjzH3/0gLu+mTYXF1d/9ulTdlVZ1pvNOCNVNkoXajP4GOnqZ789KKpm6hLmbR/83jbF/bPV1ZjEGOwTw8jt1ct3JvfqSXG93vca9vvLvYT1AG92Q5nz/XsnZ1++sU5pByl7Y+R4Nut2aZZVI0p0io0FUQigEC3Cq9cX21WbskTKunbV8ZwNbG42t7PAzYrl0cGvP/2kWUwW5YRYX653LUDbe0V4uJhGTlfD0MV4Z7EoC9p1oR3D0WzaRxCgGEJiHAV3wOIMZK6raUGw74Zd13PO08mk0NqP/aofP3l7bp0dhn5s/Z3Tg7ftvu2iICUmzllpNYwROT8+mhZKJ1KvXq27IZaWbi3vZV3t2h2FZEhdvrmZFeUwDFYbDklbx0CMQJLnxl2uV533ZPWksk1RPn99/c7dg++/d1RODuTVuWw2cdcdF44y/vZ8XVYTo1XWcrxcxhiIqHL6IsZhlI/PLr794P4S2CKSMYiGCNsYksiQOCtJuUNRLLoApDELcq/l7biPjZpb3W1lYk0CMXU9xiCEI6nZcvoXn//65Gh6XFgzn15p3sR0XM8ePJ6gJAGZz8p1SFLJm6Fdnef5ZFrJfJ+6A6efnT2vGt13o3HKUnHvyYNf/eqvB6R6Xt8/uHPTdtW0Oro31a+KL29WFhQa06duBJHMAGreVCfN4uXZWU7eTSYj5zyySrrfRzjS2QMm7PuRSAprUhSjCyT9tu0OF5Na6ykertstETBkbfUQ47Pzs4shGKMVISIVymiRg6rsx0jAYRwqqw/MJGY+X+/7MY7RZ0VKoOv9uWrPgl9vWxvYAUwnFRIoZ1ar7awplpWxx7NnF1elm1SN/fK3z9TR9E3bV1oNY1CgrSoZyQOKptdXN2ihsKWPWQsaS+j9GuKTd5aXl1ul3C4EYZKcHpweXd1sbruWmGU6r95ebf5/UtG+H7wp37TD2mc1SukmHeNZKJSuIekcbpuY0quX54t51ZiTg+Xh5GDue1Z1Vc0nGXIhcrXtfv2bX15t3tQVqqRSziVRXda7MaElJRBj4piXVS0qbkM0kXzwi0YXxuraArCr1befHBQ63LT7T7+40sZpcP0YmQXZ7PvQw+Wju3cOqmMQM8qwHTpTTJ6/7U8XhTHu/r2jq127222BgVnaIVhj9mMUTU09XW9j4Tw7E3IehmDRHh7ML8d972OKCTgxyqzQ2XAUtNVcW9tvIwGMPZPC86vuJvd37t799Iuffuvu/YN6pmFWuBpRJxly7FabzW6/BpO6trvZ72pbPPv6zb0PW4N6Xi/bZh+FQaFWEgYvopEVCGtKRrmUGTUopQABFVpDiNj7tBvWVVETYvJJoy60G/OotBKkuqlTSCGO1cSuVu1u3Y4errbr2fzudeyMdY6zyZAFyWoBHPo0qQqxZKCwZXV2czlfTJW2r16/ZdHImpC853lVGPHffnBKIqTdi/P49nLMgCGmoJwmBcJo9G4MVvPBSdMNKWqEiLO6qgunJZrSXfb9vXnBfr+oNGopDo9+/puv7eEk6jE7+Mvnn2SGIfWTyeTe0d0Q+hevNpxVHNJmd4VWv/N4/uJq2GzDixfPCWhoe0RsYwySCltqn47qYnFnvl5tjhbHW9fftC0pHWLO7JPC2aTQoC1q4Kw0IemdjwYMR5k1rq7rg/nh+ep6N462LLXF9fqbvo+6LDXp0tp1GxKmnJEBRFhQlAKDAgSCggSoQAAUA6MAQBQlmVIiEhLIiAzASn8TQsPbFBiQACbODIKCmbMCsPZWG5AMwkAIkAAMUmRh+eYMt7CKUBAwAXDGIGhAKRJEiUThNjaHYBE0At56x4kyCwLeihQEqAD0bfs13Fax3Uo4IgBjpgjIhKUyGeGyHwFkzCSkmUVQsnCBAqQyswaOhAExI5AISyYgEIGMBJAAIgOKpMTImIQZgYlAAJGYxVnKIsxCGhVqziIsgpQAFMItKkopMoQauSY0AK0IMTCAZ1RIwigj+y6rUlJIrc9hx3mvcsgxJdK3kEwxADklAZ09ZhCjxaBkDVFBdEImKU63h/GOgBCYQTEQKKWIIWUQXUJ5CLJX3CKWaXKPzBylVBDAdJxCHraZR8KEKTMWACzZMwChR2QFgnzbK6YAsnAGuFUesxASgxCiQRDmFEWRSigKMQGgQhRAAFDAlCxBjJwZfRYGUFrQkLbIkplFaWOIURAyZAuaCFIMhCOgQwJ1627CBIAgBjABW6XwdiQz5MwEggoB4T+8FKFRYLXeDCOjNKowBlSmxlb92IpWygtrfTXsvcgtJwv1NwjkSWmJYe8DszROHzT2uGks6tW4U0mSpFJRqa3ROmhuA6dbUBHzcumOJq5ANox+n4xWYcg5CRLEMTApa3VZlATZZ9mPMYsIQWIQa0NIPgkwoFV9kIkrypp6ZqP10XJRotrvrxCFiL8+e147vHdyEjn7vu+ha5SRjEQiLBHsm5tuOe2PD+/8ST1/2l5+/vJNaNsYkr6lgiHdrMfLm3G7t995vDw+Ovzy6Rvt1Jt9W2stEYo917n+889elIoKTVOtJovDzXW3KBMKCbOIRJ9YYYr89NlzTPfqu/fPnm8UVQ/eu/vo/QcvfvMbkX4xa/Z9++VvvjaQyVCMwVhTKnrx01cvPn4DZT55cro4qO+/u/zeD+5vb4b9Zty8XK8v9nVSW98TaBBlSZQxIMJRxtXYX73c7gfvAQfwKpPWxNEKAMm/+7P/9p/8p3/60+1qs2pzFFNoAUIlmRMqUBZEhJDFaEHJGEC0jykQlK5sGrPphoLUGOJvX1zcO5wv7kyH64Fz1siQoA2y9cPJbP7H73/06bMvWk5E2UytANSNExHKKaK8/zvfO9vvn95c6GX55uXnVo2nx/MfP//6qsc7TeVv1r1JdeEInGRsjL0znb29OGvKqkucIzupUoY+AQv0mY+q5Xxx0o5d8OBz7Hfj1fUuRKZCrXc3f/GTX/yjP/r2O1991p/vv2w7EO6HEetyeThpxwywAoBxDJCJE7BI4OQmFdalj7zZd33fS4wsLEwkrAQjc04CSA7ZWpdvzaf0DbUIMsUgMfB+v0Id5/NmXi4029X2nBRzNpK5HwIqMBoZlEGdIChFgJhFjHKkKQZWShHiINGQiObFokFQYYykNKgQxJ8eNQcHE45RiTbi+jh4CD5lkMzMgdTZECwRJJ9AFCgj4nMmpcaQFYpVNAgFgc12nJWkUlJKOecGn3zKQkRGjzFLQs5ilDjCZV1MjQbejzH6cTRaT50tFUynZYzShxhD//YmNk29Wt3cbPdeYO9jYmIBzpyZZ5X1kI5P5rPZbNx0CvOYQ9UY0ggJs9Gv9/vVBg5syd7PZvXCVPt9q4S7faeaRjvnlJ2Utt3tTpezR0W1t/R6c+EsVNb5lA/nTrHex3S0KCVLhmz09Grf+5xJGxDpu9QPvG4zqnyxH+fzojCQ2A85WmuTIGfWCKP3Spd1U+zjYJzxIw6MDMYoiAliDFpLj9u7j0+wlBfnN/sQxTMpncdUOCsK/JDaVVIYs0RE2O18aVU9nbhJ2b/eKMmJo7VUilLE3kDO2BSuawMzDNF3wKCsj4ZSEXc9adBFPnl83w8vTQUxCqAELcVhXZzMsoXKudPFHaPs+Zcv19d9v+pJbM7IIMKAKGFMhxPnEHmEi7a/uu5Qq7rJud2F0d+uBQcnU05DplTVNZ7OLy6GuqmvXm6C1F++3v6d3//w3/zi7OMv337//QfLg/lkPmvscnPVh4Gn9jAOcHByrwGZHU4vz7dKgCVoFRTG9aotJzWLcoUzApJU0VRh6CnDEAgIuxwmy/m+3WDTtF08aIwWmVWkSK0Gz6qSmKG0CnkYR2VhCKGYLskUJ3cet3J89vRlWK11NG0XM2hbuCwO8jgxOqQ85JyzJGFIGTMaBgtCiibN5Hq9vx78gggZE0KUTEZjJD+GmDMAdfvgQc5hRyjZZ07AfQgiqBB8P9G2Js1DPJ4u7y3nY85jF9kYOy2GFH0IkcgVTUqs9GRW4B9/9+S+ba9++/SAGn/Z0h3z5def3rm7fPW077u8TVzIPEnnh/QX//4pH164TO0nN0cTGoerR1Xx4unbfDO0GYKEGGP/8mo6637n2w+fX3T+2TYn8DGDxsRirWZjGAkcualG8BLyvcfv/uSnv1qveiQaR1+QPtWwmFUvX908fLi888F7v7i4+refXtRuUtjJv//NjwO6HAVjOJ5VbxO3XVhMSudMC3jTwx+8/+BHf/HXP3hy+LNPXm5GaAE9Ufv6urT63VP39//43c/+1Y8qNdsW9l/+1ZvRBzJqP2737TBZlodH7nRRPXt9FTsTgTOiKqKSvZ3k779z+md/+RelPUSHdx4/3m83oqA51H/3T/7+9Zf9obUz+/roO+9f3Tn/8V99cb1O601aLtxqew1kD2aLYdvHsS+KgqIrxN63ulkWL/ZrmuvNLuQsx8cHRVN9/dnz4NmVOnAyTVnfXzDtpicHt7NgOw59d83OrBNvNvv7i/ruwb2L9fbFvt0zxpgXy0ZiULa4ajcElDOezOqbm32JJIIp8ejzy20PjMQJRfq2BafjGDKRtsr7NKRoC7VNnLtgcrbISHRQNdur82TUpHLMWZfNZrefziczZ5TibZ92rR9T1oYwCDp953i56YIrValpUpTXby6fPLn31YvX79+79+jg8KzthpExcRyiOz6+uL6OqDEpHrENcXa4jLa+6HHWd3f0RCjElN65c/i67fo+G+uAZNMObXumU0Ky+/1OMgORB/7N2fkfPrybh0EBKkXGoDV1amlMCdGJqoY0tj5UQE5ZUfLbt682OVDQ28g5JEjcDt06jyFI3+3/Rz/444hMOu1WnGkc9pH36mQ+Oaz087P10XL+/M0mFrpHLkvwQW/ankt3VB+eHp72+9V+tR+3PSAMYzxd4s3N2xvfbWKOo/ze9//4xaefVhgty8TV754qP467bigRVWECSz/kxPxmvT6cH8yribL1vm2JaPRpOZs/Or0/9OeT2ipQWaTbj4UzINAOyUGakiFIu7zWjTKKvDfrfXd74q9ZJEvE7AAL4HcfPRj7fiN7Za1TcD3GTgRFxn4/hMzMiKycq0sXIYL4b71zp7vc9F1f18V+SAi8342EqtR5Wpm6KC7WV6ez5eRAbY2U1mGQutAn8+pw1qz6/mxM7EMQ9jlFyHcXRy1vNGHoQ4v8er0DBFPoIae+Gw8ru9n2B0dl6lkbxajQ0H/kKvJ+7Pfj6Aej9dh5P4K1Ng1XKWtjCibglKxiDGNtlofHR5r02I9ZFAP6yIlD7LdPf/tb316WFkulUkoYs7LUR380b96sd0PMBYFF8ePgnLYZC2Oc0aUQdGMJ+mQxcbPJdEmff351tY3KOFSqb/tbOAgTF5pSN5YYg+8PZpPz68EK8TgWyhQJ67pqh/76bAWARinSpBSBwj50oWsB0uls4eNI1CiWuihAmTotDva785ut5GwVCoIjWK32VWVr7fpVG1oprRuHXhu9mFm/eTPWzWl1J27C8dGSU2rbcRx3LGMeN+MwKkkKTFJuNp3mIcVh3F6eF7LnEI4PD84ur9OYCNmVro19ymPiZAm0soC57/tpPYOcc4ZNF/qxV1rt2/2kbk71XYVakoyd9zzWrrkF3EYB78fYDjnz4cF8MTu+Odu+8RejoTAyJcWQAaUiLQKuIOcMIWqNo++bqi4MDf1YF6Wy5XY/dGOyMd89sN/54N563b+93O+GvZCjCvvWE+QZqYLNzg9aQaV1ZZASaxCIIkOYNXUNmEPY98mze36xnSHs9zd3Txb73K1hly1Ppjp12x88eueTV+fvfnD3l+eXYX8DLLrBw9lsf92WjTleloeT8s3VUCv17OXzP/jo8fnFG9+i1WpRl+fnN1o7TTSE4Iw+aKzJsB9aQI4APjEgpSEVziQ/KsRo1MF0ctW3WgFnoYRDHL7aPR9zFiRO2WSqZxW8BgCgBMAcRSjJbWs9EwAiISsUwlvqDAjcenbwlkkEAsKUM2oGlEzqG+yLJRIQQgJClqwBiREFNGrJpEBCYk0iSpJCTpBvO84yJGCNRIQKRDQoLQoABJBYMgJjBkhw64ViQgIABFEECkBEGASQEATxbwJoCIQggAgALBkosQgiM4uIViiZWbD1aenooHK9j2POIYkwAEEETElIZSLRhBrEhwQIAmgUYk4sQEicJDKQIhTJSQyRJiIQ0ir7LJlLg0pxozWBWmfJkpWA+qakXZhFEzEQilDmeYUFCQvaKIiQEJMAIWaRMETuPfcIxGnv89Zzh5QZGUUYhOgbwQyQRfqMo2DCvANZAswgZaAShEAUgIIAwAJRIMttLAwJKUlGA2aJVRRpwdVULlM2kKygBVNJimKWAMwqAgOIAAlIC8OKFSC3rBkTIIuI0G1zvQCTwDfMaQBhvr0qKQoJkgASKA0Zb1HdyEgJsTBoIXPKJYhCJTmTSIUGGDRSSAwMguAFWp9rrQtSWngkHVAks0VkQRGIOeMtcElEBCQzABkkROTEQAD5G7lIWFAg52S01kqF0VeWQCSn4HNg5mnhLvsBLSlEAnEgCmRmzJBCHsc+ZGf1pND3jubZBwJpfQ98WxOImOHxyVGfhygQt61EAasNQGOVA7FGA8RCGcmCgq4wIURENESVQkghs3iWFBMoUAiSuSADIqUtlrbMAkVdSE4hemA5WM4O5vXbV69zSgqZIPmd95U9v7yCqLQSV5rM8WYzhpBCCpqlv7reTey9+6cmq999cE/a7lm/50oNgX1kA2gBtNV+HF6+OA+Jimq6G/aAShAsqcPD5dG9uxeffaKYNeTSSKGx1jb7uNtsYsjCUlhkgRyi78Znz5532c/K04NqrmT48ic/Onv63IDq+ti2rcoAoNhnq6xTLoU0ERe7JN5sfrV+0501Cz07qYuFW96tj9+vFk/c+uv9zavORVGs59PGD0Eh2NJk5rYNLOQq5RN7Zo45BQZEjFDW9ud/8QtIpIkCZERRCKiQADWpHBMDoBJUqBH7YVBZAJHI+MzaiHFqsxksaXZ8uV4fuUPLuNr3VmQICRmtdZt9f76H/+V/9r9++eKv35x9PepuF9pkcHvdkve//5/8cK/i5dmrokzduNMIjbNvX21fXWzfWZ7Ycd/nUZIqjeMoOpPTMvTr+bT0ovoxY8jfffwHP339k5iSIXNSTJZkxmHDCKawBiQmLyzOGeVUAvzq+eu/9we/e2d6/OKqm+mMTgsqi/FocRAAn37xEr5BejNLNkrXlpwjV5qUB4gRYpbEShnOjMwiQhkwcRxjQnBlYY01pU4ph5TymIdddKgRpahVOdGmKFKf27Bq016s5cgKOMTMGQitUQoIYxDvfUFGiDIKAWoCEC4LNW/q2cQO47CczttNvBly2wVSqIt8fFwLxNGPM13kIXD0rWcEU2oznzbnfU9EKTPnTNbMytqPgwHJnA1R5WyKMfqktCKS0hbKcBhzSIDf9Cfw4COjsoamTX18eHR3eVQiYRzitF3vdvuuqwrDMWgQR8gaNGMWsYikKCu1YY5JsiFggCyI0DhtWY6Wk+Om0ijKyK7345Ct0tFzzlgXzSykkOTK901hd0Pob9HaXV+S6oOvtGTJ15tuXruC1FTkh++8M8ijn/zq1/v97mhq7hxNblZjZd3MUedzGnMfevZBCSmjlIJOoilNTTmHBILWwGLWXGz7stSosVQ6+KQVzauyDzFKsJXM5tPLm3a3HwigKA0QgCoO5g2yNxLef/Jwtji+fv6iC4MAau1u1ntVF5hVJEUalciksWXIOY0312OWWVlX0o2uUbXWjm0kvrm5Lo3xviNFDCAxKlC10ZOiGdtstMqCfoxfv37bcuoQbG2KQk4f1g/fu9NfJc9MQpuL7W6z7TadJCqtQYaRE4Ao0koRiEwsNq5IXXy97ZNx1cSWs7JNo9A3YeTc7w8mZj+Gt8/P+jTkDF3nDyZFN4rRtsk+92lSFBldUS7OLofN699evbgslD585/Tu6UkWj0Udx1i7uu86Jn1wctIOaYJTQdnsh2ZZTAo7tG3FdHpwcHFxs2jKumyeXW85tNHH9X7Qot5erqKO9dEdAmddEYSKqiaJwlC5AiEpq+2s9n22ZMPm/NOPfzpf3v0v/+F/8X/4r/7P2NSBg0geh1YpVKpwVgEr0qrNvinLUuk3my0ptQvrWmGpnTPOWLVpg7KoCH0fQgxVY0Ni0lmE10M7m07QK4ixQFkUprQ6Dt29k0XtKrT1uG/BeoOlRFW4EjgbV0yOZr/6uXfaPZ6ah3drx36+2e22qyJZKEpdb5an6u79g7sPPvzRmzNWEEb58rPV/ASTY/bx7cUNkP6f/u0f8rD6Hz75NInuoGwOm/31rhV2gLD3AOl77981tipM8/xyH4Y1aK1RJ6KsVUpsQmhXuTiqT58c/vrl0y/P1la7ArgBCmMcVv64xPceLQccnl+vHx8//Nkm/97vfOu98vDP//JfThZlGKVbjYe2PkD7w3fvv12//MFHTz5/vdnv27/+xW8mRj76Ox/95ovfzqYT0ipM6rxLhxrsdfeLzXq1x0+fv2rF9KyzQNcGAZ5PHSg1Br7ZjafLRR/ylkVbk1La+9Fp/eb59p6Zr1bb5aOjNIS78ylyWi6Kj558742r337yZ3q3ufrsl89f7d9959iW+xc9dW3rwBzpinb+/qx8/NG3fv70Rfbp4QL/0bdO/8Vf/mwyXfS9nxCg0xXlL377dL3tlTKKtK1xflijjNXEDiF94ypCYIwP7h7ufVhHv0ueIdeFPYhV9iNo2MbAgjJ4jDEZzGNG4cooYkSA5bxqfbTIopA5s/Dgg1aSJIFV87IUgNW2Q40epLJ23w8ns0Xi+MWrp5bREGJIpVZDCLWxGNN2DOMYCmeUQsf58d35duuvu81qt9/uRuKcQA0tDR7HkUtTpJSnTXXZtvPGcYxDNtOmkhVRBs30YD7f9ruTR6c/++LNdx6elj4aK1xZKpcn8+anz89GchlwOcPddljv49RZpel63wHRrHEDjNfdxtMdo8AavN1nGK2cyr7vXF2PCJthN3i/mCyKwnx1db6DBKUBvM0fyDD2ZlrFLBkpefre3Xf+xY//h1yY+bTux15Q3l0edmNvJ3T//uLqqhvGKKVdt+1kNj1ZLp+/ON8N7dvV6l2889H777x5/goSltMySSid3fYbLzGk8M7Rvc315ZdvXk5qfTKrlTWny0nSWndjKm0H+XaLkRIuXTGmZLQzhT2dHF5dXI7b/nc//OCLm9cvrt7Uyr17ODutKpuIUygLrQtbOOs0IelbTBMoxZSYyCr97tHd4eztvveKc0U6c9z3rYp8Mpm0o9/t+z7nLUhlVA7BGWWpqIv6ersz4CHxsXX/4Psf/bsf/3I3+M0QfcxAUjYFg1xtdz5zXbta27ou7p8uz76+PC7L+wdTa/Wk0qtt6/vBGFVNsWtBE5jJxPdeWwjjmHNa1lMLRFZdt30fBazZ+1QAD5z7nEyg0ij3/ycVpZyE6J37i4vVmHzqQkKEZjLftt0wDFZjXZv3nzxutAE0ri5EsGmavvVgCCd2/+bq/OzrlPaGQs4qDyhD5ph9BkOaRQi4dLpUEP3Ydzn6zMLlxDoBBTSZlyXZj54cf3H+8ie/vOr2iQm1gyf3Dj/96jyyELG1WltdWg0xD2nXFNOH9w+fvTgrnJ5N5h88Pv6rn/9603UaTRbUmmZNFbMETiBBjOy6i4OpSRFWK1TGpZhQMvv8+N7dg8UkplgZHSXXTTX4nHOkGBLQDtuDRbM4Onj5+pwh8zC+eL4D4qK0y0mTimLX+m6/T9xZ5UWFvu8w2hSx73pX1IePTxKO0+NJu19JYknJWoPAkmJdlevL9aSuY2i9HhikUGq9u0IIk9mCSNfN0vt4sDxs6iaEsW7s+fmGOc/nUxItqLKPRiRBro5qyfrnP3/+vjv46Aff/+9/+uOmPLJK9T4OKRijLTqndVISctZKWESAOXO/gx4SGG0QdFFMSL7/4Ttvz15//Pl1F5MAYGnFgB/TzsRZqe4tlhOqXr5uU+QGcVnU3baPKGMUnWRxWJVItp693nI39OT9cjE9qBuDdrfeD/3AmtxONVBLNxYM11db0+dhXAOY1ktttIJkFVKfOfk6pYC4XqftBV9cblUlzXyBQ6pcnQjRx5vLPQoEXldAGDwarAobsk6ZSZhjlCiRhYE2nY+JHdHJ8XK932mtFIjkaEq33nrcJ9t/s0kOXdKoQkrMrEAEWBOiBgYgQb5tHgeAW0sOo2LNomLMIMQAALkwpBQhMwMCioZvZJBbq09mVt9YfQQEFCAJKcAR8yi38SuRJErrxCyE6vbjLKQwsSAAsRgWQMyCtxdSCBFFkSAw3vaIpwS3Xo+/meEawaAIiCAkRC8gACwizHjrIgZITAjQ53ykySibIMQhoKhbBHViVsykkQUkg8VbsQkIhQg4A7OIQBRgllKT0SAMwmKVQobSqMpogzCfNtP55HC2/GK1+fzFc0PImW9dNiBCiAxIBCWCRREmEdBIt4V0DJJv2yaU+E1nVGAC6BkjUEhZyGeGb2QzVkAiAJlVBCEdt5ocjoIoiRyLyqbMKJATCEG6DRwioLCWwPJNS5wU4g69nSprSFUYDSAhZM7MhcPIAgAkIAqEgTNIA0UNoZLhMkKnZQRhuhXoFGG+BUQBAkKWjEROk09MGQkREIGEFBMJCKqkXFacoY/BFkpThsz6m3gecM63ZedGaSQZATxgzNkSlVpXKSAwCyIozgwgDMgA6jZ6KCxCqAkzIAKIMKG5bca7HTAa67LAHDllJCyNrY0KIWZCZ1GzEqWDdYMGJZiGWFiznNZD71NMWutCmWnhKms0AJD2YwKW0hpvQBlXZ+niOHJAq8CAJAEE50ypad/1KSrFsSrMft+jckIKWJwma6gu1DjEb8Y1ASkMLFbT0Hl9MAk5VwpnSp+33bRSiyL/4z/8/our/lcvn6Uoi0mdOIy9d7Uu6nLj+6IoyqpsptNhv960PaE9PFrstuuCaHV5fqVs2g+zY/fufH5/Ur5t989eXxOJVpBC5JiSpsuRZd0eF/qk1IdHdrvppQ8Dbt9ev0QVEbVgFqvebjZzvbS8bxxOTpv8RsMoPEardFWqbmiz8Mm3fqdO7eef/eT61SunzeX1qhtT4gyAVivJQiJj9jkwJ14ezpM2VrtmBn2/3bwet1+uyomaH9uD+5PH375z9+7i+tWwPe82QyvC1hifIoNkQ5PaDj4Hn2K6Ba6JMWQK1dTl1fXWFQ5AtCJQmHMWxm8aG4WTJBFQipBw4sqQglHKaRU5d+MoGUtXaILS2cVkPqEmpe5OPdM6n11vBslVBYTVpy8/O3/z2b3a/v2/9Xv37p6eXV1u0xjf1cW0/qtff+rN2LZDUr6pzNiGds1pNDgWH73/7se/+ZUtTRrc/fnJ+c0VGVWbKai+H/autM6xkOviDrQszXR/vSmbYjqZuspBDuUE4zik3NZTt+96nREYfvP01Sefvvj2u08u05433fNNz7YoF9Pz1Wboh9tZQFqSgFaQJFUgi7rc94MWtxs5+mwEEG7dh4ozIwgJkFaJhUl8DDF4V5vNZltSpQoFArrAxWkDOm3SkHej0jqTWh5MrILrt2sfBVkb1Jzy4PcffHj3eHnQ78MvP/myqGpEjFEAcl2Y0oCCWGizutqEVgBIkhDlu0czR8RCRTnpQmpKC5sBklo00z7325RmhSqtG1LMlkKiId5K/EoTVtbcW059yDrtt/2grYldYoWzpkGRxlZVYQpnmrpEXZTWFnU1Duy7uFntnPCwb3e7rTHojE5y24bRE6gMPJ1M66oCTR89fmTJnd1c70ZPhFVptaKDg6Yi4xAhJc4ZMnBig44D+DHkLHfmk0bj4FMC9DGS5L7zQLhcVhqkHeI4CAr3Ia45PDo8OCrt8xdfMelGaVXWKgTZDwUkp20KEVi0Vrt2VKTHBOtdGMah1GVIoe3bsjRaqQD+1brVjU6VjpEnChUQKdpzYCsP7h7YZib14vX2M9GkhJtSecgxgUYStn4d0vZSh3y8mLZGbTvvfUZl2z4h5qTYOtNM3cRRt43M1G3aNsXFowfQVDc3fuKK65selcyqZsgJUECBSlwpawAa4AJD5wdBGYZAFZhl8/h7H+Ruf1jiwsrzL17uv7zod1kUZYFh1Q+7qMQJAoJEgQRChESAkp0Czbzd7TjmYHKxqNiqGwiAXP8Nq6jbbBbFUexyOw5jSTlDHvaaSEk+qieNS4+Wkrf86U8+Dt2uLN2dyfSjD+7oqowWmiNFg1+v+2GDlO1223tKwWeyFUnabLflcnr8+KEVvBn7Iq7megYl6ZJ04YYUKcYKoCRdFNPttt203VxtkVQzadQ41o6NQgjYzCc+bcqJFWIf+nRzVpXmTl2dn11+/vTVjlJNxlmObU8CkIVUzpKLouhC6kM6nE5vui5ZBKNEgJIkSDD24CkwhoGFk+8Dar3ZDMGnMYQQk60Kv9+XGY5mtirMneNZ6r1UsziMq3bdTBsC3bV+cefd42bWi+oU7Xno2+HhwemhMr/3oKY8Nk6lNoY8/+2Lp672H96vT06gmcD5+hUq2/fbupzlpSzfn975W4d//qOnpKqPX+52RH/4d//Wx6tX7OWf/MmjV8+uJmXzajd+68HD55+/ut53v/3saj6ZPLm3qBrz7uNm38Uvnq3WnScPmmg+L45mZVXqBw8Wu9fbIelMsu97dCYlKqqyqHlh5aSWR6bEo+nzf/WXzc3s1f63f+cP3v83v72cWPnuu7Pv3Zv85JNX/+wfPT57ZZJyZ5sVunrNyqv6L/+733zr7v3fPLs8uHt0NL/39vUn/4t/8jvd5fr/+ZcvyJiT2YF0w+Gd8nI71GZ2eudgVnC/HbCwm+tVn8I4jIfHx7q0+6vt9rxT1qpZwj7xEDZXoirXrZKr3IP5Eb+U//r/9v94cAf+V//0n/78k4+f/vynD8vWukRtd/doNp9Mvn6++vbdwyJ2TVrPYWhm6t1Z8869w5ODw9debVabeubqmWWVzy9XMWTldIrZLitVqvpo9vr15zW7b2KYfT94Pp3NmqZ8drYdhrQHnBlzNCtzL1lg1Y4gOJsUy2Xz6dW+MGrd9qlwNRpnnSEAlgJ5SCKC1hTGalAiSqGo/Zg4BwJ0pGPqHfLf+vbdn39+VhROIwHqrh8KYwtjlBEDuXSu5/y286iNLpQaYd/FdowAphtiYraYT+8cXF/2VdPMquri/GwzdH7X3j2ePtusjaiidmi5HwarzWlRPFlO3kLnt+uphqHr7bQZV9voeR/Gr9fX9bLOSR/Oi4MpfdoOxeEUlKmrYp/82Gf0mWO8uNncXF01VZWGvqwbBoUMxthJXe1TSGBolClZzXy527xp12fDOCsmd6blclJ/+exyiFSzSdat+vCn3//20K9D3z+68+Dter32own4w3fv7p/+drXfTibNxKA7mp1xaKpy2Ifn23MgiEiZ4JOnz0GlR8fLR3zy1eXNoau+8+SdX3z+MZeFI/UP//YffPrLX86mtirsm8uV0mq/bk+Wk3tHy3XfxmG33fdTU7m6oYQxhdVqx/sWkczY//6HT0qMz/c3UhUvLrvLEIog3z44Op1YSGwMFAokYiBMDDnHHsM4jCmTBp6hngcpTHk8a/Zt1/n48nxwgKeVaIRaqx4zpawQVWG9j0WBOXmrEAK/c3TwwaOjP//RXz9bbVIGFgk5AYoY3agyp3x2vb9Tqs3WG+smZfPBYXx83EBmH+Fy7S/WY5eEVU4lIrNKUA/m4eL+T579WgAP6gkiHi5LyXg9jixstYqARweLi3ZtXaEBEfLQ/8dYax8hJSZIPmdyVJLLSW7WG000a4pvf/ikrCtnCxmC1o6jysD7zmvEKOn87cvLty8gbr/94d0vftNz7y2hbly3Z+cKW9jnqysDFgEjkLYOGIiUpIiCBmB3tX1yeqeaVv/u869fXV+TsR7dvLFPnhztNm3MnFFFn7XWYx9Jc7vpQgi+FKPd/dPjqqpudv7Pf/npfhgloyaVxlhPKxGlFSTh6WyS+i70Y9dtFUfKZBv2ffB+ZJ2Lwh5UE+UMAUUSAqlL8H1Xz6t1N/7q2YWdzn738dGbr18mFCATmQRxMT9Me+67GwG7aS+cpqZRgIoQmYNkGfY389nyWx98EMZut1pfXV5llJQZmDlTHtk4bCYTTUSqAaUQJXKczkofCI0hAOOscSaNaRxHyakd9sqyxcrZKSrcbttJaRHlcr8qZ8vckdPVarM9eTjJw6DLPglWVispATSI7kMOnBLJMEbnzHI59T77IaeUuzY9bqpH9yYV1W9evt7cbO/dPZgv7evz68Q8LYtu1ykELGhQgXOn9bhsysfzeYxwturAaTezOIzzylGK18Pwpt2QVq4oJZHOpsjm7OxqeVDvE6+6fD0OX65HUnC/mv/Tv/3k1fmr3eAHyWPfqym898Fhv84X563npKqmC9lVE2LgnLZdt19tBGg6KSurq+Vy2/eJc7Tqu99+/Ob85rrzY2anVTv6QPmoriEjECjFE+diTOuu00Y1tZERSkz7ob9l39vim6YDEpHMyKCRQDEi3gIuACiLAOgkIhlQxBAqEM7MjMwgIqREoygCEgCgxCB4SxoSdVvdDAiAiZnSrWHp9vkNoghkvC2dTwKGICbWigCQBHKSyJRJiEQpIi0xSZbbTjO51Q0EGVAiMAAoJKsgJAEEFkG5bfxEBuTMQSQhRf6buqtbfJDcvpMJMWZZjamwWDnMrLqQM9PtL88sKnMWtGRDSCwAiFngFj4tgMJCtzhqZkKVWfItxkjEOKWMNoxX6xGq+YGZnC6MH/ub9cqHRACRMTMKAhMqAkOKERVQTBIQgkAEYfjGHsUZxpYVZTSYoxgNUlDqBRkyiIyZNAQNShBuq+dBJIGgSnsgTThjVowWDAFkCAyo4TaMdnu1iQEJGBEMyERSECQUp1mhQkWcFagojJIzMWkSBQCcGFkLOiADWUE8SxaMiiIiwiIZCRQS5CSiBDUhSYZMiLekLBFWCnTBAEAEBoVBYgKl9RAFkIwREcpZiHBMACiWUZNkZM9EhCTYCy+tWpCSIXYixqAi9Eluq/0U0e0XZUHOgKRyZi3gAMx/6LwBsEaTiEYsEDln7YA0Zp8A7bRoKlWs2t6BFFpHENFKaT0m7mIaEhw3hcnCUQbA0EppsHa2ULoL0ccMButaI0kp6vB4Bkpdpl0f0ghUPTyGdgOU7j8icfTqKZFx3cqj0ZPKeB/HlDIAkY45EWECFJAcmRSt29FrNcTsM05teWfeHEzdX/32+dOLXdT6vW89+fCde3/xr//tEHNdl+s0vP+de8+fn0fSl/vVVAEa0pa6cSecGGAgenZ19eHd0/OLC0Y0hT2dHx3Nj5++eq2cbPfbYTeidSPUB0fv3beeu20lHDRebNpm2kRNx0fHeUvd+opYWVv3fkRnoCiMU4LCLMYqSpz79dQWf/jD77959cVPP/5MYFNoGoLPt9qDIkUohFkg5mysFSO6sKwFIE2b2dD7MeQ8Som16fTN58PmRfcl7kyjj+5PH/69R9NZdXO1vn49Xp1tKTFA9OOoBLVVklNKohQ45A8e3WGm2MdRBIkWB4Uo3m6HlEUpYaMUC2VAARIEUWNiYBQRzyFLqpv5sPezQhlWDZdLM4uboe4jFzKkxCBV7QqnhnGcHLsY8svR/99/8rOTun7v6MFydjRE+9vNWs9m++7KLuydw3Kz3rRvujSWieGdh0cXN29v+n5qDxj1Td+LQQTahp1SiUnFmCd1FYK73l2qgIBxNqnqWT09WkZMirkd95989nUOgoqSF/E+IUA1/8lF+6f/4I/oNz8+nbmz/fB2O/ZMBGjUN2sBsxSlcdr5mCwEp0QS77sWWQxiDp4BhUEBokLJkmMSQBAcx6CMQsnWoWtcjsTACnF2WKuaX59dUVHYpiDS86LJIXkCHyF5kZQ8Kq3T975z99GDw12bl0fT3//Db3/yyXMG0oaK0lgL23bjisN97xEogISUJaflxDptbrbRaQ1CQxf8EDjlibFbv8uaEwdFubFASu3CiKj2wVMSRWpWOBQZPYPIoi4OpzUJKSTSeDibZoTaaR9CCDwgheS30huzDyFJzEYrJuwheIXepy7G2ulZZULMCBQSZx885KaeNWXx/QcP3z2ei8Wxa+vKMphujApVZXU3jm23r6q6qaYg4MhkjEozcnBOK0XO1W9uVizApJxRIspLDiADp+i9qQq0+sVqXR7QOIycMSdGa5vFMuUh5X5MFPq86kVPbLImDuPFruWiHDIJonJqcTKdHxc5Q9/nkkwirA+asjLrVRt3aV7Q4POsrGPMN2+vzvev2Pv5xNWgC2cD5uFyu03drJwq1eTkQ46L08N6rq8+exo6zQiIUjic3Ju0Q4ySdwNvdoETsEfJyd7sjhfzilTa7DRgHCQZ7VHPZpPr3RXHoNGA6C7kwffdfvRDbCbl7GF9/PC0zHz9Zv/1+Y2AMKCrU1Hp2LezycQrMloVoAcfU8wAZJTOkH3KSbJbVjip+srkFHNtmsNJHEehVEfO198UfXDK3MPXn1/2B5qX7sG7D599/DMXYt/KKtp/+ezq7XY4npkce3LDyePl9mJ4c7EqJw4b3vmdinzv6OTo+FBGsx1Fc2KXDx/cPX+zBqPxcFEv56++/BozLKcH291e18uR+fr6AhGNpV2QZGjQiaZa90ayMlLYaEscLs5//eDOfbK0bseDRVOUta8W3PnDw4XTPj9rM8ddXIMyo/fA2doCmVL0aQg5phHGdvTroe8jAtE4jLzvp1YHgkSyWcemLjOqITBDNghdm4wtMUsObBALztQG17jCUs7hZtsi6nk5SyG40tRTS9rdnT5cdYm9D2HU2hYpvXrz/J2jo8fTKvYboGxd/enT86t+yIWdOtOhXq13XdfdfXz/4bcP3lx1Z5uLv/vwW7/5+ItlZd5f2Pe/d///dP3xJ7/+0fce0D/+1ns/+fc/788vHxwVv/vDB29vxnEVjn5w8vZqG8cUQzo/P9+m9PB4dmfWPDo+nJeH/+5nv7jqe63k/Hz3d/702198cnYxsCjdaAopi9b1rBpzfNGmUbPbrt97OK+H3X/65EHa7X788evf/c//6HA7e9S/+f37ar/bP7h39N/86PNNO5SlHRNGlVmb82H4wZ/+j9sv/ur8fF2JXH/8+bsm+ze/2W8gs1BdmZN7k3GzadeQWTt6c3YZqsIw5/2Q2uRKKRSkdhMHxYyoAFBu+vGoLgypy+uWmpQgqSD/mz/5k+9Mj/X/7v/4aog/+qvDg7sPv/9Hd3R/df7m54cgfR+ux/OH75xUTbF51b99dhWVVnfuffQP/94////8v688E9G8cvWsSVbpRQPOTSBXyhrnBp+1M22/req6u/wGTKGKsrA6sFxe3dRWV9p02z6WmlVeVm7iCtNHY8xEZFjHccyTStmJA+bHD49/8emz+bSaFfpodvJ217/a7gNDaQR4yD7VRa0x+jDOyuk4pqaaSkxfv9kOEiXkaTV71bVH88VR2ew3+wf3D2sLXz196xWVVuUhdF18OJ9DZInj9588+PTrF3fmk3G3s0nfrPeLZvZqfWmcaSbV21ebJ08md9jPq+nXF+tnq5WtrEN68Ojo12/Oeu9RR/F5c7PZop42jUJ99bbN2h0tpy5J3+0vAnz4waOz9e7Fm+t915eF4ZhDDEgoSB3EclEMq1HGoJyb1lMUEkkGuPe5MKUfBwF62223CqipjCu0qN11e286/71vv/PVly/quoQ2Hzbm64sX5JCMuEKl4G4k/Deff2xTMFkOxvaH33nn58/f7PsQmdOYtSgl1EyIS1VW5c2+O57P5q54WNUypG59M4S8j5GUoPDPvvh6dnrAzItFvRt8VPjmZo8kpcM71fykWLy82iqWt0NbNlVXGLLYaAdt+3h28Oe/+tU+JnJWtNal6zR9HfNxVRaIb1ZrhZgkz+YzyZIzZ0mosFJa4mBIf/fxO19fXo0xKYtPFvW6BWXoqKyasphO7eubDVxeXO08o55Mq8xpMrGoXb9Pl93w9uMvBx+FVFHQvh1IISORIaVVRACUy+3eCW533bsPT+ezOJLyQs/eXlpleh+oLvYhQcZaqwzDLl9/ebMGLSYSRhnTMGY8O99lZSalHYcUQl/b2duUnDKGhJD22+E/korWq94Wrhv2wTMZnRMyKDT69MH9hw/vNtZCjmkMRVmWZTN0npR1RYEwvr5486tPP9GQK5vXF+v7Ryd4fdP7uB2TcU6MiQAM5DOzzwHQGsPASgkLrlo/L/T9x/e1KX7+5cuL7SaxJpTSFlaXry/2LCn4aKyrKicIKVNVThfLu6/OL4Gp7UIjtIv7q/O1lyBKF4XlqGd2iiAhRB+HQZBspbHS1lbFUhS9Xe2LoRfhWV23g/c5AzOpWBgTEikC4Rg8eIvnPQTWZ9udVR9868P3P/76tS5NVrbbx5zLSbXc7baq1CSm9SGnNGRfmvJgVmRT3Hv0zrQ6bXftuLq+2m1vpdUBJUff+jCvipxHY52hzGC34zApKtScNRkscpY4JgJNAERKaVBFGXNaTMu2j0IxJZxUlUje9aGYTIbWmw4XVF28frX44P1vv/tgP8Q+IpJGY6bFQe/7KO0QElolgpmp9Nj1EZKaGfdkOfvBB3e/+vqz8+2b6XxZ1Ufr0XebkQmMMxNXFEfTN7uhFXq2Wk/H+GReGUlK4iqlgMBAGWBeOqPIGVqvu5TDoihmTaV6npdzRZPLrk/MKQsh7jNn0krwq20Lz96U4B8cFyjp/PV4vU+XZ/3VLlxcjVrZyPGm2y2Omh/Mv/fzr56SzWw4M3Sht6AXVZWlONtux8gHk8m3Ht+/2nW/eXnmLBFZFh5ICqOAMWUASQIkElLGTTdmRYvDZby4yoKjT6DsN966lJRojUJE+dbjI5KEUgaG20ZyUYAowoKMkpmZgQEziiKyCgxizMAZMhARRclGITAgAiFkAESVEUEQGBQCZxEBYeEMGRABQ0xWiSAwQ07CgkwoCJVThliMMEDIcgsAUoS3XOVICUmzEN6GqBIbQINMCgEpAwbEhOCFkW8pRQgACCAMAIICChABgXG9987RRMlJ47btuGPpI6NSAIo0s8AgORDeGqWQUMOt6wqsxoxcKFIIOSelQIASZKUoUO7SyKCStm/3w3i1PzqYYV1y76rKsvcYJQJowpBZa3K32GVBD7BnCMCcRSNoAAJhhrbF5LFsNLlIJUhMlDAEEiZ1+6dEEjIgMrIiFCC47faSrGxyNRp9y1/TOSOLAkpIiRkTYyEEJEiMwFaDMVmDEaTEibKoJEpIMhCSQgFA5gwEpE2mjJhlIhYAPTBnZAURhQEIswiIkEURNoZQo2FIObMQEDABGzA1xiS1NjFGTYoFRCildJv9IWQ0KmUWVENODVGtKWUWyAgIwn0Sh1hbnDktIWhSTpFAGrIIAgOKgKBGZk1KC2qEQiFyBlCgCSACQIhpyP5wUkRAH3LOnDihJiC6xWE3pc0oZCgwoNGj0a9X+8Siq2Iym8i4F1EAoAvlc+IQN2nQZBa1W4/+puNZU5osQ5cMGkygswbWN6veIE8X5cZfOofvfzB7fR46yiA2su7DEKM4IpI0joEMEaJR4FO2zoyMfetZgMe8OJxd3HRnq10XArqirKuI9OWr88iuD8Nu7KGE1zd70oaBBh+KplKuQROH1iMq64qbNqy6m4BpUrq+yy9enrmZnc/cuw+Pcx5mtRpn/mpUf/iDvzchc9d0vt+s9m+fr7pcaLWckDNjP6QhNEWl3aSuTpuiGoY+JR63HQfhBBlz5vjg7ul//j/7Zz/95ccvvvyUcz9w2naD78eimFmJHkAQfJKE2CzqELItdCbcQSrQtqm/uj4zaHIUBQyZC3Eq0RglRnnd3zz7+mxyPDk5mn77Ox/+8Dsm98PNzdX5+Xp9uUPxCsRqkxKMKV1ctiGksfNBSFsliQnJFgozpywpZQBQRaFVBawRRKC/ZY0X2gIUXTeWyMeufufedxbz5cs3N/ODo5PvNP/9z/78ottJYTofMfow9I0uS2tn03rXcd/uOn71X/7+t7789PUaunXYPl2/OV7YV2fd7noYe4PZjF1871sTUwco54FLU9rA48gCabyN1zJAiClzp1Bi0pUtQ983y8nk9HCbBq00knr99sXYtplRKQJFZCwxsNM//uzn/+avD77/0Xv/r3/7kxqLiaPtOKTBH1bl7VqAoHLEkLIIKVcoaxlRk0wn1YWPUZnb0Kkkub2bozMsCjCDAdb5+F4zPSryHLtdPDoolnXx5evrfceidV1M7yzv3FxdFYoy6P3QZ1BMqEqKWtXT4od//MMf/fmP9l1+cP/u5fVKOR0H1hptgV4SkHt7tWFgZ6tAElCiznVTkVOVdr2HW+UKmKtSI8l8Vt8M3bxuAsddF5j0vJ6OkUsjhdI55nlVaoHMXJdNqYLRTgsyMBq168bo81CYKDxmmMQYUy6VjH3gnFCZEOKYQiYqp41iaGPvicU4yZ40TadNF3PONHflvt2V1k6cyUbpDErAR+EBUavE0LeDACiNBDL63W5POeWq0pFh34858bYLMeUhp6oqckxDG2a1qqaFDyEJ9oNPY1RAr86unCZrzGRalRO364fG6mU18Z4Hpk5zzzlDUoU5eefub19eESgf48G8md+ZnA0r59z03kHofGPrnGGzHupmMjs6wpDu2OLsfPPlyzWCdlWRVc4xdRpvdhtlDZZVNa8r63JMxhpp8/Xmxinz4OHDN5f7LnTv3l3COJLWRUldiKuNj156H6vSJaBXN+ss9rQuUcT3OJ1WL7ebkWJuN7asY9S6cIyh3/jSWl2agnB+1Hz0vXdePns7dtLd+NhrMsrnFHIaNR0vFrvVOPZBEoyYY0pZRBRnYhYWYleV9ng2WM3TCjkfGB18L+OolYch4JD+Zntgbi5XCoRFx46/fPmmdfzkaDkv668+6cjMvOflcvbhB0YbxTo8/WqjiqIu9OD95fkueobyIC117WaTR4uY+u2rZy8u18gxowx9/8H8AI/5df+smBxbU3Xu0FYYNq8IUDc0dfk8bro4tr2vynkSFUI623x1PE2LOt2bpfrgCKLt9/b5l768rxbzu9c3N9oMB4+O/vrf/GL/41+qqPu+VbWuJpVEb8gowYDaQx4kBcCJMx89fLzubvqurZxSWncZSWlmuLrZ1CUdLmd92zYx11W52e30tMIEs3nTlHa969NsZlh2MV1tRpBzn3AynaouWfHTogNSOcG23TpbasKJEulXXvvUDRnwbDu8FT/W1I7w+qqrcvdH372zWsfYbx41dXvfjovp+c31+z9899NfnF28HNie/7P/+T/83/5X//z/8l//xe+99/53fu97ry6++u6Du+yHh/cXl3rz8tn+cjd+fdZ+/8N7H7x398e/evr1233nr0YI793Zf/ejw+W9w8++3u/Haiwbj7tu7CYABzHd/f9S9R/BtmVJeibm7kttceTV9+kXWqSqzKwqVBV0VTXQ1gCbNCObbWxy0hySU3JKzsk2GkdgD0gjekAzdsMAsAUIoLTMyszKiIzI0PH0e1ffI7dewp2DGwkaJmd6zjY7a++9fH3/9x+6gLEfNg2IGOeN48E8ftJ4eDHAMPDi1tFBXrj/w//2fz0++3x4/hf/z3/2QfbWt2oXrxZdu1786ptv1Nv1c5bQhQ8/+RCurgLKut6ODf6j331L06ZRIfBKd/VHX3yGGf7ur793MBv/9JNn4Mq69sxS5m6+M8tdoq5LqNZVvxy41KoOIQW82AaFNCtcHeJ8L392vv23/+y/fnXo/pf/i7e//OzFH3/889+883e6Ysgy9Xd++52juzv/1b/8xWWjv3h89dgsD4qMNayG/tHnny37Rb9tJ7Niu6ru3N+NmXq8WKSzJpHJAJARIvXrFhLNcl1vt7H5hqdYVcFktG0HH2WIaZuG+dQlVC+vKo58MOe3Xjucubxb9uVsunr5sm0bba1G/PL5q8moHLxnlDHi3Jh8fwKobKba3gSGCKAVjl05zYuzTb2sksvzxGlnvBeYM1e6IhFS1/mdnZ3zxab3XRQ+2juou36zrimmvXHe+ZQQz8+uYWANOs9GF1dbSdQH3/uUG7v02CN3TX17Mp9Ps6uqqUJkJCR11rQnPlbbbmc2JqMkDtCFye7k979+ilY7gqtVVfshLxxn9ovzpTKQrI1dDEMEo+MwAGnt9MtN9ebOgUXLrNKQWh2QIyoHIcW+HlJfFE5G5mRVrXyYTEpns6H3R7uHJ5fV+ux6PQwl0Zv3Z4NsXlXbTuDr56/ef+uuLvDjV+edpGRJaRfJvLhcvVrULSXPoSiKyWxntVjVnnvmrDBV3T46v+xbun80vkEjteie/buvvX6+qPbu3jYOphDv37v74RePhCKU6nzdYoC5yo9d9sbu7nv3Hv7V409PPK8DOGurqvut99+73C5UqUZi5uP8stJVHApHeUbbYeAsY6c7ZCvc+Ki1SYSJCSgopGGgrutcnh/N5k/W160u50Vexu3e3nSnLIfaO8DXdmYHe9OPXl58cbHso4jQyaazSkGGLpNCG+X1dtmElFhEGSLSQ4TURJaobZpg9v233+zWVRySIn1yuUrGHb120Led6/XLzXaaZ9bQ3alr2jZSFrbDrtU9odU2AFy1sSVljTPGTIyNHSdOQ0zAgOamLln9e6OiG0g4dQEAjDJCbI0tR+MHx0elUkPdCWE5nUCidoigjMmKhPLZB3/9/PIFxASaqkVz1fPb9+5+943bXz85FWFEDCH2PnEiECQAVCpyiolTH6zVwFxFP3vrtYDDarNgAZVbh/Da8fGzly8YOQiXRU5CcUjjHIoiG2uzWCxLaybTfLFuQ9eBgtxpp8su+J1y0jUpBhYBrYy2PkPsu60F0Ch97J1oQQCl+i50q2WEGPrUNG1e2P35eDbZPT19HkIoymxWzDbX17kmIzBsW0c5ITBiTIJRnV9193fNdLyz3viROyzzlLrO96sHt+/U29V0MnMKL68uzy+vpa7vvnF/vZHT84t87BKScHQm33SekHserNZjmznALnroIbdOIRnjksdVW+3szMiY8+ur+XinYz8Mw7gcIaHR7uLiJaCyziEmV+CDd15/5623ttVqbjLfDMpmwUdEIq5JOgi9U4QA+ahEULHuD6YOuvD9e8ftdvv5x79gg/moWNSdZ9gMvSLUCoYQX1ytJ8YGn+quvzudvHdn9vz85M6dnZeX3aYdOCUOsfeenFaBK9+1bT/LXEY00iozMCrs88Xzs7bK2YYkXpEgJB/HhYUEm7W/TOrF0leVVzHeurXbeL0dvE9iLZaF5Xkx3zXAY/NVkj5OM4fKbjbVNg251sjEXjrmz16cfOve8e2DXZ3odLm4il0CvOz6cZbPnAWEddULQjab7IzKoe8iqa4Z4pA4shJsq2/mpiFJYvHIFOWXAwcUBAG4cbyAgIgwg6AAAgFpAAA2iBoAGRhuKJ+b6FkyCrSIAgQQEFFADAIJAdAQifDNUvQp4Y2cWAARJWECToISRQDpG6XNza9AIlACCYAUIgoJAzMqAWSKkCJBIouoJGZGIVJk8Yk9IDOTCP6SSBL4pbYG5MZldONaBpYQsYoAKRVZRjEBxiDCAobAaupDUkiIKIlJiyUgYYfoCAFFYWRhtiiCfUpAZBQYrTBK34ckkqTaKHVYmFS1JogS0URZTr0kFLYKbqgrYeh8ZCIA4ZgUEidh5CgQWUCpPrLGlDvWLo1LlDV0kaUnJYQcIQIR31DlzAIJSIgmfnQnlrfYOrhBkJIES3jjbdFaIt5E8xLd1G+BEIHWRrFiIUIBjILCDAKSRAgJQSEqAIGkNUQkldTgclAT6CuWFgjNzdCRFINBVDIGZUViRBJgQCRiREIEYkUoLCrepB6UUikyJ0AyKkhCYYWgEYcoFlVMAEyU2DIqEIMEIYlhAJVpnCXjEzCARgVaUkoaQROSICYYgSgRp6gkCkIRJaT0zSaZSGdGACVFRMgyhyS5tYqcSoElkfDY6S4kA6SZLjddH8VqlRnV1rVLXDgtgMjiY0wC3odxpo9m5Q4Uz5dt3QRKYbttR5N8b3dUdzGgrup4ONEKeFSknYPJ1x9er65136Fw8n5ApUCxMTZFyTIXJEQfiVRhNGpDinSZhcjchPPFAoh8DM5pQilUctwd7u190baaQIFAlFStbx0fKOTrOqyaddeEO0c7eUhtHLZ1z4Zc4ZKm6f7OIIug+rpuFg3P9N0i43bR3L17/K07D9j7/uJVt1s8vVx/fr06Wcdi1+1m4+8f3FL+S26TWASllKOnJ89dad+Y3CpKEvRKi4Z0vL//O7/zDz/8yx89/fxLazkaTm0gJGedBpUgaWGlKYEYQEqSgYLESUAn4NBcr9aEBMg2U5w4cgLgOLAickmh1yEi+Pji0cXjPzvRiOUoO379+M79u2++JVcn169enrZNEEGT2UXTKgaTGRQUYU5pPM66rsMkioUTE0KClKBLUbRShqDMiyH01iif4rhwow7eOX7YJj67up7M7N6toz//8C8HjPt7kz6EIlE3DLf3Z6OxrvvgYxe9OLJNvflX//rf3H33oWg5e76dGAv1MGwHvwVgQIwH+ybD7vTJcuS11ZAban2S1Pg0jN3IoU4CmbWQgjG0Ox61beTCppQihnmZa1IKoPp8YZXywBADkVNIykCUQBD/7LMP93748Pj4qFp0oepRE5EirX+5CiB0HCInYSXEzANHbXWmkIBFMAbWgiyiCEQUiKYkk7EeTVWSVGLSfaei6fvOpSxEXlaD07kpxtO98cXqpVNWo4khbSu/6ZJCuX1n56qS+bx49NmnM7tTIIWmHbaLTM37yELS9h4VeR+1Am3M4IP3orQ6mE2UQt+FIjcmSEpJJ9I3RYEKB1CijQBSSqUrBbDrOo20NypTN9giq+s2Ahpj4uDjEKptmxjQaEVARHnmNnWtjS7KDCkpzSLCwMqgUhh8YmAmpbSRCKnrRdLF0GhJo1FWKq0iBz9cXZ1MR/mknK5WXddRijIgKi1ZRiFyNURBMsqmyG0MPrElFWKSPpHi2Si7ut72PrYhiNYS0SHsjdzexBZlcbB/++LkRd31VdU5qwLH3Z0iDNJ3g9EamS42TWGNIrVpfReindgUYdMNAXh/f+arPrPmwYPjIe9tUknFdls3l52iPs914dLDgwfny5VWqk8+dr1DnWIKVa0UGGsD+k3wJqmiyDxqZcu2WUrkVHsEvq7Wk/lB6702uvOREuTWJO4vzhcSXGbd1FqlAQyFJgz1erR3nBfFirth8LlWrYTcZGjyZHDVV7Nxnk9GPqZRRtiwc3j96pybwUSTYjIKIYFKcbo3tge5cspfNj4SJ4zMQcr1t74AAJHESURBVCCSeCVB8f7c3dmdo7cR2CfgKBKZ/BCqtrqqMqMNmXGRA1wDwM4kbxuY7pjpYVBTeFUHNCNAHIKvNrXKWGu12IQ2qf56NTua/PZ/8PbXny7fef3o+dfP0uBuf+v1+WvvpWwvbPj2W/f/6T/5vwA09bZ78/CwXlS7R3vDoyfDRXU4LUJKyuSjvb22royZjsbFyfZ0sGq0uys8yLKOCYcmjPMRQnztzu5Rae8ezF9dV7Pdu9M0DssFkQbj+u5kTNher6bapKp3tqj84HLy/RC9aGM94mXfXPt+NCmRdSv440dfKyWz+VgV5WLdrn1crjcpDikka82rTXtnVmbK1uuWfBqPcrtbnm/6p+u28kE2rRa6e+f24euHQxuWdb1hLJWd5DT4SqM44sNJafR4tV4VJRYW2nrtwHTNMCjcPZie9yEM7WhUGGN//vOWdPHFo+tXl88nk6zz8HgdP3xU8yDRjv7FB5v0wV/U/ei0iT/66q8P94oDi59fnG4XC1+UX7y6NkZnLu8kv3i8/OlFtd54lfjvvfP2/Vv0449+sbb2crHZbvBv/6N/6O6M7tyfT37/x7Hxf/N7t997rVzX3TAMi9B8/Kp/9rz61t15zrgy+ge//YPLyyZN3v7qpP0n/7f/63dK+Z//2t3vvP3+v31xuRBQaHd2dy8ur4amPdopM4QP//yTe4ezdQf7Th8UKi2Gf/5Xj14EMxk5C8GWpvb92ZNXzaQ42h+//fZ3f/rR03rdljM6767/p3/rN+7Oir/40w+//Z2HTx69uLw+MU29N8/zhG0H6NT5Zad7fH0+evro5ODwvZxD7cPZsvqD3/tRSglAjt45nhbyu7/xvQ+/OhmGk03rr5t+uxmshom1F48v7xzvgigcFQseoFUuH+++fqwm9YuffTbJi5ZTm3jdx8urym/67JfPAqMwz83Qd1qDVmIB5wbKXN0az89WW278dbsZ7+sd5zIN3z7c/+qU2z7lk4IgxBDR6cYHQzh2BgKHJE0bmUErtW3q3fnOSLlXy0UdkjLGakgsPoUUxavBJAypN664WK6vhnaSaxJYXS/6IWhS08K1PjV+iEFEEWZ2UXdaRDNYozHJfF5qA53W8/n+UTbWUVTDu2iGEEggc3qxXAjTaKcApfpt5yZl6DYvTjvQKgET0tVm8FoNHTjSzTCURLORG83L4z338mx9uYyrfkgRn61a/4YdFwBiJEK3qYWHYjpWJkBKlgA5fnlxsgW2RkM/dLAZF8Xj08svzy7b2JXW7ojebBbvvH372z9490d//EFIYb0ZUnTf+dZrH3/61DtTgaLROMcQiHno5rlzqGPTKgVN3YTAm+hLFHJ2cbXms/Zbd47Ix7d35+21/Cd//2//6f/w+0e7O7mTnTytm5VPqQqBEoomm+VXm0ApjkA+ffLk7aPd48Bnrm+H6Et1e1T+wU+/mBzMnIhjeHcyXi7Sr7z/+tPnF0O1NjTOXG6MSilt11uTWzJ22/UQh4nNrcU8y1CBtrDrs1XNj05WD3fKWaGhr51SLIScdvLR947vzk3xfLO57vvKx3GpRxk5gl95++2ffvG4sNr7YJRlTpoQEEIccsJfuXv7jb1727ZuQc7Or2eHk5188vhi6ZgS+9vzUWml6WMXwvWi7TmR5oJcEnEWLzerHuCN0W5RjoahNyzNtjNE22pAVC7To8z2bWud+fdGRcrozqfEFBUNQyKh24e3X7//sO0rPwiCzrTLbCYgQ5f0ZJwy95M/+v3Fy8c61xZRhJQdBzZPTy6/9WD6g3f3f/7o2kdou5hbm9LQhqSUMlaHPkgCJENKgUq78+mt49mPfvGRUcBATR8nebFZ1b5Pde9tZkWR9NGh/KPf/vv70zf+1R//YRpql+mtT3XXuKjBqgQgoCejoquH6KP3HozOslynvO873/vRaDLN9dX1Yn8yJmsj620YtNX7h/Onz17Q2GxiqBdLXCxj3yJDGcPF9dp7rxV2Xfr08ck7D954eOeN6+25Bf/gvWOpRPH2/tH+OjenV1sptZvvQ5qbzCHL8rzmWDddKDPTgwg0m6qzWU4C/eCTj8KolRIQn6REG4EUKZV0jNhDYBjmO8dO5R2mJib0vDueA1CUNJvvAJGIVN02SESAosg4xhTj3sEOe+CRfvtgJ3z84baDhCYKb3wd/cAAkoAHUgRK9SXF3ziaHR3vv3h2eb5YdYCi1EXVtElAkQeGwGNt/ZAWYSiyUFDaH2Gmhq8uX7CCZ1dLwEwrZa14RImSK5qPinVVtSGKMlFhtd1mxo2nxbq6cHulCol9PNgfYYyhDXuO8jwzI/f0ZNMmTNplZfmi8c3psjBQFLb3KW0bQ0ni1jn71sPjoU+Xi8YrIGtGlCurYZDXjqePVk3r6aPnF0fL5va4eO/enU+ePd+G2As4guvrZZYZQSMC16uaBQqrQ4h9GwhQUhKWrCi+oYqSENxs3wEB4cahcxMdg28kMiiU4MY7Q0okR9BEBACIjBQFUSQKJyQLoAi1QvylgkcEQYBBRDAJW4WEqBBEIaLANw33GBIopUQkETIAC1gNpOiXEycAYI2AN7pkAhEhAEBJiCiECSkmTUBACTARhIh8415mEcF/p1y6mRIB3LR13YBCyICBsRfsIR2znpYuS2rTD20CBgzCqMQgexZnFCpRKBmBBTGEhEoAExNK1ESOKCVQgMZQIBKlrKgmpTRsn72oAzOExADj3GWKInLvOwlCRD6CZwCtIgdIbICQIQmwgDAjkgAAAiPbsaZc0FAu0Qfumf2gjVJEcqP1ISIC9Ar6ojt8X0YPRZdgEClKFBgh9IN0Q1IMKUJCYSOgQBPflMlhEqWYMAEmVDGmEBg8AyMAAQFYARTQoFUEZiBUiIAKUgkwAt+z3KjPFSsHheERmVhJ7MGHm/YxfRM/I5Tc6ciRESIAGhqiJ4NabrAFLaAIkESlFFCEiFCSQcqUkiSMwAii9RooBjFAgqAImxAVKUhhROhCGiPnALkyCMDaDAxD5ITQxRDTNxm0IcFIax9FAueKJAZSaICcJgG3jj5KBE55boVlVJTPT1sHnAFOAHVCDghGk3AmICic2CmCmHLgeZYtkbfEfZIydzEkAzw2GBDqJrQt1t3lrTmdvFr1W9AxV5CEJISggIYhZRiRwJKhwKgIBKw17RCMUs0QWpbgfW51WZqNJ6epr/tVu3Vj8/TROteKQ0oCCjSSNVZC1+YFLqqawA2DOSiPxC976PWY794dxSs2U1q+qJrke0ZG/NnTk/s744PdY3TFi8vLN8rp7uHo58+ff/riNIB3Cnw7fP3o6Tuj3ZEZZiOzHfqh6vrFItNqSLxaVLgVs61ff/jW2M7e++6v/n//4F8P28bcdDD1HShlnZGIkoIPIQFGQaPUyBaGlA/NtmpH02lpqW27m3OXIQwgrJ0eZ8Vqu5XIkDhi2nejATh0SaLNdJEktpv05UenX9qz1944fuf9t+6/9bCq+vNX16Kgq9vtdc0gQxOTT6AgrreSQAFOxnYYQuwSgUohSuAAHq3ptRKBmIbUyf7e/V9593utLJWkWVHUWv23H/5suVxGH7MYOHmdaE5qynqGxe0deHK5SkpN5qO+Ic7GnSq/fvrzpt1OJ2WZ2K+GjAxnBNbMjvZWdWyqmJOGFNuhDqF3wrlxKqGAR+CU0ihzKbRh2HJUbKE8mObTclTa509PFttLElLA1iqCYpztdt3C+4g6xcQ/+/qsv67/9neP42lrAViRno72pqMvAQCgqpJlpZUG9uJjrnUYuuSjRQeeJYqhb+5C1qoQlQbdh/bw9p2j/eznj5/EdXeoR5u665phPjm4Xm2odWIVlXJRn+2OC94kVurp4jLL3e44/Yd/74c//ZOPmsvK7T3MihwMPH929ebtnXmaVWsiQQJFrDJrW+6dyRiMyKCVsqQmZLvaM2O76lhU9L7QOHJ6kmd1SHXXC1ACuDFWFa4Q5RJLHDgGiAn6jjnJAJ2zNjc6CPcp5bklrRd9F6rBgBoxp+AhcuFcAgjekyIKnGIUZmEJgTFxrh2D6fpWfHBZ1veJtAt9qJbtatNdLRurTD1w8omMme04Qup8XDftdFLMyny53g4+KquZoE5p2EQAcBkkRNJUoAFnh7afTfLDeTa2DgJUF9c7Nj+ezv1eHPr+uq4sqgFCyGxtdd12tVZXgblrBUlPnHJ50zdoaZ38pFSWqEvhq+tndqSzQo4Py1efV+R13XlH6nf/5q/+9Bef1NsBQCli9gMmCUPo/ZCPJhOaWRW3WllRUvlUhY2LB/u7i+U1JyzLufC4sJMHh9QNbbXqQ5+EosusM7MkISUhImuzeqiREzFsritwY6O1D4FERjrHCAaTRjjaO9i59/APf/qjzCqb9NHBToi+XW7Hozn3HIZ1DJASjHbyJCxWS0EtymYYOIlYLG6NpjvleJ6XExPXW+qkXtUEzoxd6BNJCn5oLytqxDhFGbbpG6qonNrnX502vs8TjkoVl00pVpHDkbz7zvHpRX1+3T15Ee4cqHu3dl88u7g6S/Vq6C5P4hAnxeTsmWpx/PDXXtcq/bN/+c+vlovS6HFZKhSDjKGvFisVlBqorcLQx+7q63mpS9Lj+dEGELwvnJQFQDDLi5Ur3Xpo9o7LYu5YjV9Vgz08Dlisu8XJ6vO9/WQlL3kYzq73M5X6/rJK41KCCAzBe+ma0OGwikFYBeNWHrdNyJC7oQ8i/qoJQYxVQMQpKgJJ0rR9ptWy6VVsj/enb75+67Nnl189O2uSiSkNaQBASdwMXZ9uv37/TTuxJ9eb+/df980i9Qat3xsVL79+2W5rCvHW/kGhAhE6g+fXW5tlt4tbRzn/5fmmMK6t7GfXrS7kohoCGedT3QUkpQg1cxOHgaOgHk/duo89G+9VR/rrLy+HIelCtWIyrSn4l+thVCi56rohWRL54smvr7Omy9ue3MDRQ1edd5vsD/7VH+zW8tq+bvny1TrrhY/G+OZxabq6HODb3xorKc/tdHTvdd7TLd793mHx0QWs2rW7dQ/tyYO9fD+lIeW/9d3v/Js/+ZMu8ahLe4by/Z3n1+3Y2JnI3/n2t3/0yc8Xg/GUfX3d3pvYEco06eXJ2rOEs4bXP35wMP/u7/xKo8zv//mTD392+fNufTzL/ub33/7V9+4ths2ff/CJqU/aV6e//mvfv/Xa0dVV//z5BU/3H/38+Ztv/s3qxdeXZx++e2fc190w+Ktl/CKOqkszuz35je89cJF//ItHRLqLPDJ2WrpRls93sqrq9m7tiYXHTxb5NC925CCbLJ9OtsuQLOAtZW9rKbxUaXn+TQyTJG63rTMaIu9OxxniNHcXL6ujO0e1C/Xgx4ac05ZUtaxCE4/IpV06PJo/v6qeL85n83x2PN8sthDp9sHBydVVZPGcMtS7xQTbsFa+Sqkb/O2j8mCcX11UxuXlGKpVm2tyLFnhYs6y9tGoEGMBIgjKQAvy5cXVvMgNQ7LUq3h8ew+qttt6QIgSv/3O7Q8/+3zP7JCLMxRlsCwzZYvrFy/GJb15f//JV1dR4njsLNgyy2cT40N89eqq8vDe/UMjYZKXmKnNtoscRKRdd4agU3Hr4K3jo//st7/9V598+emry8ViO8S22J2GhoXJWEiJJVVts7Ya0OR976/XTXKKRTCkert88Nbus+XJuusTMzpsu3AvH+/YHbvpZ+ySyRevesbh7d3y27/67S8vLl+GdH66jI6K+dQE7fskaJqmFQUxiFKKiQ729uc7d754dYUDfvbo1Xv3b11fLMcG/vj3/q0O0anRy6vtK+21ElDOOjLQ7yiXF+VZ21ZJXKaeLJptinf2y9B0xPjgeOerVy+syxXqZt2MFI9mo/v376ZNd5CXntW0GD+93IznpSHqI5fa+thz6KuuG9q0m+eIkR1OnXl9f7zqm7XA2UDZ9XBv7CDFzRBTSNhtrDVvHs4fHsw+ePLy86sKB2Al1033pz/7pLT03dd2r2p/vewwt6tNFb1/4/buu7f2d7Ps5x99tmpaWzqw5PpM50pFXp9uu3otO9tbs2KqsQr25Do0Usyn403fRQkRAMtc+7ja9tPCQYopkkGtFAWFKMBM2uXcdvTLduRvRkVVNShrSYFRONamLCc7s3HbrpTRyllMpIDCtk8Mt+4fRRP+6f/r/wHBjzJDitptZ8ba5CaKWlf9o7Ptm3fn7zw8PDldEHjRyRMqVD2g75NCmI0toO67MJsV+1N3eXZiEx7tT19drKbaIqeq3kZho/XevKxWnTH69Tu3d6eHT69WbIrQDakflIT90TgOaZAUI8uQWu9LZxSK0pQwhbYCxJHC6ExKvhmSGY8k0+fX16AUWJWLun626Bd1uTuTlIYhjib5dDyul3Xdh2012Fz5wGLdhtSC68lU//Cdb3325ae3C358tlhCysrozHhmYbVpWtOLpMVlf321yZ3Ls1RtKmdtF/rnJ5ej0bTvJcYoAiTYVRUpIcLc5jGxTzEKElKRO0zRlUXb1E75ucuUta7MU4h+8CkBgur7MAwenVYmty7LR5O+aYzLJREo0pOxJrN/fHj+yRPUJSgc+gRKZ06ngaMAR8mF/86vvD/K6Y/+4rNtUmpaNN4L6QG45zjKs0LboWr2Rvpi7W2mg+Gjndlmc11hhwoM6AA0LvJNaBKLkDQhABUxik8iqCOigthU/Wt3Zn2/fbq4RtKOpVQau+GgNJBlBnC56qpNw0FikNzYuqrLoli3Qc1K41SIQxzCqLAmc6/OL++/duf00fOrxG035NoYpUOU5brOC+VIPKkupLO6MxpnIEc7s6L3F02fIWLmhiEkjXlmGKHyg1YCgAiQF1mfokIk+mYxIOKN0RgRiG/qv0QAFIFSoLRihhSBb5TBgBo4I8qIhKVnicJwY5tGUoQaRIHcwCkoIIwIgswaMAlrREpiSTHDjWL+l64cAQbNwAIgIghAohVaEgKIIRrSBOCFATEKJxBFggKJMTJBooKTRjEgzBwEOxEQsYSSgBGS3Mwu/p3IGX7pgv5GwAwkiEIKjbF9EEeca2W1WyYfUQhJE5NGwzeXhFEEGIGQRQ8JiDQqJNHMTEKWhBCj9ybLtFFakFgkeGHIlNaKomBKFAm1UqXLGEI/+MSUAFnECyJqEGFBBAQCRZQAILEIjwozG1NviBSqMSLyylMXAUUcAt1AO0AMwGM/f5PnrwNYwIQBiSTdXDfSjZQagZGBk4BxAAkYRJmbmrmErCJLEkkeJIAkEATQMDDc8GMMrAUEMLISdAkS6yQj4Ro4sNJSjGCWYdhgu+WhR0iSBLRBRZgEhEGAbxRGVQBjdOra0tkkwVjthwAYmxgFgFEMEgAwAgom8YagRRHEwAIoSulViIYUJXAoGSg9xFLpzMddN0KURNwKdin1AQdhQQkgSKRzgmYAACJV90IayRiDyWpypKMPMVEb08YHVrA3Ki2IRGl8FJRMoUUSDizkshJM7ij2fVtazUhtjENMl1eNw/b1493ni+U5DslRDLxZd6My68R7FXFSHs8mw3ozM/uU08bTptoICTCElLJMZYqT8LQoY1DbbhhCiiGBqKtNH4iTQgPx9s7suuqSvwHy8Ma1nqPLTd95BkGjdeNxOwQMPWR4cGAWL/x6eT2Q0oYighFaXNWuww8//TJwnEwsej8alaniUXl8fOf2ycnF4e2Dq/X5zy5evdxULXMKkRC1stebJurkdt16s0Bj3CgvRsXJi2fl3sF227/75j2QX/3rJ6dP1vWnf/hKIChioxW3rMiKMUKEihMwKby5yRikXHHT1WFgZx0JNF0bEUQb0kqD6X2YFGUaongQUBGiNYg5QR9i9MAoiVFEA1pQwvTss7Pnjy9vPdw9vHUwOdhTzmalCoMPIW1XXWzaZtG0TUuYJKW2iQKECrRWmVZBxcQiIJFlOh+Prdy9f6+AyeX2MmWIBq3pX11cxP4qczECI4FzygpYVD3F5/WWV5w8W6HFxXIyLpwzH37854tNzVbXiyvDyWj14Phg1fRbiIvFJktUugJFYooKtVMWQYGIkBbxPgYAGWJCVNu6iUmPxqOsMK6wn3/9dHG5aethd+eA7RA4BIY+tSFJP3gYJBEKqsuYGnQjZ9+5O368WRdTu7xafnOSbIhY94NH4vnupG7rCFGTaZqWAYyynFJMmx+8d4+r+OxlFzm9+XBy5zb+65/8fH93XmqjUXjwKURJ3FdNqc0oN0pF33Nu1OW2ftws8mn+2u1JMUpfPn2cCCa75TZ1780PF49eCm80lkxQd32WWSHOMtWEjqwOnKzSAOyctkDBh6aLTUwAYowGwZBw2/hqE9uYvIH9w4O9Udl3/gaxNMrjwEMKRTlq6m5vNu+bJoK2pdNIbdcbTcixbgMrGYK3yk6KwntPQJISEmlrhmFQSmlDBlBEmDkJ9yE1IXR+yBTWvd+ZTvLxqK5qQQiE151XFDlxIQRJ2k0ApMrHPoUiYtsMoffC2MUYIPWRm9ZbbYOE2SyPXUOMiWU6LnWmrRuZzPR1LzF5RcPQDyFleW4RF5v1pgublPr1ZlIU46PD01enU2t83zmA0PXjsc4n2SCxi0NGenWxcqJHWuEwXD4dQg8JGRzevrP/+z/+K/aotUkxxDBoS207gJBWlgDaZlVLPSQus3EYQtNF73pX2Nn+bNW1l9u2VLYNlSHWWYZeNaEFQh8iKE0YmKMfgBQqIeHAvl8uYihkXJYSwWo7nRTLbRWHPoYYlEpd//d/8zdZ/KtPn9ZD6kPSHBVw3dQdCTqFiNFhMmgIrusqOj66V9y9fUtNlMtNtdhUy+325dBXHUSVPE7GusiMlzjiVFVdWqeDrNidjNepHqK/WQVPXyyyoiyUetqs3EaG1JecNO7CxG2ueOf24XWIDlU+GZe4VPOZMru3jlwJ/tXXLzNnC6e/+ODj6y5cvHxRBp6X+dWyqrq6326++9prfbs1IyeQAofNqpIgMUrOLrLyGzsp8oP5zouqV4W5RWWsU4+9zaHIU+M9+xg3QS8qE7c5xDs7Rbh41iZAlNyJkN12eNmmoCORVOsBrX7n9oOXV2dFhIe3j764vAwso8xwihIFBXJjTRJE7oeAKAgIwFmmrbJ1YJOXS4AfP7u6WPZ9lKFpikJrLd4zKQwxPH/+4tHXz8rMGG2uX550PsYEZpSXGvYoHOT5uCglhs12UE5trzY92q6H9dNXIGHfYkrBFWXUalG3VWKPynESQ6DAOiMpcResciBiDI2AFPLQ9yvWZWnzQpPCumHvB6vNpNStj0MfQWEU+eOXp+vi7p27b54+enSs1UjBJ5+eXny80uC++96Oqc9iqJ+ftr0qzxJ99MFiseJLnf/BJ4s39qzdmyyer8rD+2fLej7h3QPzl39x8v/+vVrRRIZeDd0bt/Jq8fjocHpYHNw/2L24uNzGOJ6Xv/333v9bUxle1h988bKZTQfl9/bLg2n+j3/t73/41z8Oul9jPy6nzXXTN0nxR9/74Vv/4fe/9dEHL9n09ar67/7lf/f+D94//v6v/6Pf+B+N5Oz60w+efvbo0xfnhRvVgV999YxHxZ9++fw//p/957/+n/+vnv7Fv/pv/k//9/uH47fv7cx37m9Wi9//8QdHT8tdO/7Be3eXdffa3cOmasrSIgj2YWowra5XXTTe5JKuvz6d3D344T9+7/Gnq3yHjn5zb7F+vOe6NA5X6pvup67xRa73jZ3vzlSpfvHk4sNn29Lo8ycv0dG0KMaz6ZPz9d2d8eF8ZPPY927gJJXcctOj18e977oq9skNAz/3l30KnbIDUd3093enQ90KJaNhtjfKObbrBjy3te8z7Ad/NJ2+drD79dMzb3WuVFt1pVVHe+Plul3VXaewF7wzmYd6s1jXSaRvPbf90EcfJSvt85PV3Mzr8+XxyBk2o50xaZUB783Gr/r2ZL3eG5vv7N+K5F89WxZWVw0vlxuXZblWvee9cpwRjicKJ+MXi2WmUiryIYRNG568HB7x+V99/Nwpm7S+9/Cw5rRs21xlCQPEmDgO9ZBAvIJo6bxpGoII6WBebq+3d27tTQqTIJABYRWVSowHB3vioW6GaTnZPdjptvT49NmzZ+eLzPzwe/ffSfavOx1N+Pr06rVb+yTRD3FbDX7gwmWaqKu7wzvFl88+NSg+QK0S5HZyNPvsy5cbL3/rB9/bvXt8+mc/uji9+Nb7r7vkuy70VXv3eHLVDApSVfnNkKw2J9c9Bx1E50VWM0atR7Ox06is3SlLzxLEVNtt0pknk5oGVLpu+sNpaTKjtMHkPevADlF88sDhalPJbLpTlm/sZc+28aTz10B57A4zv19MF+sQWIE2HEKB+Ftv3RmV9fPlMibuojDgbllapdMQYuv7qptq9Z1vvVlq8+jri09C8FFMNnrtzu2fP/3Kn67vv3bMqbNoR/P55TBsrtkhlgXNp6NZgDB0gVGL9ha6xCKqY46bphSymu4c7V1v1nXb58ogw3bbxIQY+N8bFe3uz/vAxujdW7vdplXgtn23r8rxqCQqtIV22+VU3Hpw5+nTT//1H/8LjcbaQhgRdFbmTLoZ4iiDcl5sgD54ufn2a7dv374TT04E4uT2zrZP2yDX9VC1tYQwH02VzroQX12t56PjENO2aYus6AMH5mXVA6ApXB1lbz759r07Wu998MVlWw0H+wer0IikEGGUZdu+zoscSFb10jpiIDKIIEWRZVqtr9dBUtcHFsnyjEM/BNKFWlT1neNb94/2f/HXHyRlX1zU2rmul5inoGBbt9loZOd5XwcmisAdhCtfPTzcfdltaHL3VdWow+nTk/rRR/1svPvOXjkv7U9evWy7upzsJ5ZtV7335kN77+Gf/PivdI6bbfCMvunyIiudk5jyMhtC1EZFTkFJ54NJpBUxGEFBoAjREIBwu11rBYEZFTm0hMAoYshmRhtFoNqqt9aS1i4rm67X2gw13D1866svT9dDajjWIRaZ84jEpJiPdmY/fPv2xdXizz9fKpNtvR/avu4brY0IKUJJKVPqaHe2Wq4nuWtSGiA9Wl5NHczLkXP2/LpSWld1430EIUrx/mz02uEMAgcPWZYro+faqNDvHM4a5+tBDtzo9g4tm6onWPv0eD3MTKaSXafheKK+92D2ky8usj2dUpqMssw64xR4JsbprNzZn3/w9OW7d97sLroPV+fBZT5FwX5A9hQlJBmgzM1o7rptv+z6qu0OdyfGkAYZhmG3MNPDva/OLsMwDBGjiaWG6WzSdU3V9Skm0Kr5peNdSEhEKRIBBgEERlIkpIGIUBESKBBiiUkU3PSN4c20JQH2QgKoEEChIVRKjEEgFEQS4G+wIUIiZBQEBZJSAkAAhcj4jWtaECBGYQRARBKjMSOCFIFQaZVuPNIIjAQEAZm1ScTASpgsS05iiSxBjRBFQRSlGAVQKc+chPkbquib8RgAwU0DFydnsHRqZjUJAWIfoxfOUO2McjF44X0nSEg5MKIQkoAYMgwYBVAQRIVEiclHIdKIYkCswdKR1ShIHKJhsQ6jT5iSBopMtQ814wgwR4iJQQGwKKUjQkoISRiAkUkr48QqGBICqxS9zjmZiKWBlDBiOVESYh+Ae+UZVVJaYUwilEbHuHtPs0Ri1IhMogCCQD/AtoXgCSOlJICgLRgBRyAO+h6SAyZpBx+iGphST5AMCST2YIUMDACosRdG8YQklJg5KIqGYQbiBVQqUSuAdgNxo/rhBvzyiUQRAUJikRuyTGMiCQAYpSTjB0jaqAQWIJEySpgUfTM2oyiCVq+RA/KgAEFZBgVovNgkSicDbBgyMnKDoCh7HqFD6hijAkQUSUoEAdASKAwYbv4Rvu9ysuU4F42KY0TlRQVIjU9B0gDSi5IhHufZaOoWVZ+UNopLYzsPwCxWs1VN20PSbXDbdosImTbbTqphuFWu33p7usPuq5cXyQqMEsyTpva4NNEPZxWmeg/M3mJ1PUjvcjUkZhGl1e6sKJUOkYcgfYpdigxiUBlDlQ8uV7cORn/7/Tf+6EefVV3fRmw9KEk6U6OZNhbBTJ8/ujY608bBmPR9HVrwm83uXra6DtJmNccQgrWKRVXdsKk7tIVGdf/ALTdDG401ervx59fb0dg2lL44e3F9tQSjAekmmpiCqIw/ePH5WNkgrJNo5V5eXu0f33/tve/nHO5Pw+eff7beLjdoI+kyw86nfvCYRClqhjYkaxmbISCpwCCEdWyhH4AICUmruu+tpihKErIIsSqMgsAU48jaNoZ8lNuJblJnpyov9DBI8kwCkBiAMzQILGAuny+vn10ZRzp35Aw6MykLRe5o/8AdT6bTKYp0oV8trl88O6k218EHUQoUGGtjhIR63cLO6N7h/EG1XiDJZHevSquf/OKzs1cXSCwaSJLLXWa0ckprvdy0257mqiwdAcj9o/2DWenT2izraYbLakhREqp+gE0+vPb23RcX574LcdOLKN+z1sb3HFMyWhBAokcQY3JQKgLneTF0ETLavbczmY+rMFycX/oO8lFxtmpGeXn37vuLxdPUtzV70oQgLIAgy7r70SfPdjOVd9Wd6fjo7Xsfb7/6JowcomewGUFGdlaA5uipyOzQ+8DMnMhIkROjbzgkTsMmTB5Mz6+XtrTzQyoZtqsuMmGWwRiHLAYM03HWpAYZLprhOqZI9Pr9OUl/ft5dnDbzYhJ6f3m2/uoXp/t7I0O3dk355fV519O4RE6eAZvQi1itTN8FlSLmZlqqzWaIpFhh4sSQUCA1XIDyXYBMdZYu2rZBiT76JoU+9iEk77XVGKt5plfR48C5MYttv/XeEGQoTng2GmVlWVfk0MjgM0VaGUEKKbYhxJQ0C7Kkm/BjSlXT9Qwhpggyyh1LvxO6uI2FoxAxAvQibdUUSk1zC0omk/HL61WI6LJsOplKU2mtmrYPgG3yLGBRGQFFNB/NOc+2VQ2Ik+nIKqqGuK2GDElZrNt+Vbc30WhVZGcMbNw2tA1z6n232OZFNt/J9yYPf/bxF4P3yqqJjy5Tcehbkx483EvCqGGo/GLdO52bUnPET09fjJky0Rhj13WZproaNtthWk6dQiWx9sNlV+/Od7dVPyuzAF3v/fl6fX+kutjmkyxjuLi4Op7PnS2qoTNExTS/WG+VMXs7exdnS4CEkSdFnkgbxBSxDrHUqHObmnZZbX1Ibe+1s5s+1mcX74xfX18slcQhhZ1ZUbUVm0FGaVAhOr57bz+f5hC8BY913D0uVCuU+pdfXW3qJFtPSRMoUkYSISZBCDGonKqLZdsGwGhdaGW9GZoUvvFTyMChDq/Oq8UxZAN97723nn36NVoi4ysVmqsmMooKn3/6+M63sndfm1xsYdH13oeD470yN20X33jjgAroAdPA27prQg+Yeci/Wg/HhXt5dlWSUdZA6arrdWq7UZZ3NcYq6kLt33nNWne92hLjeDa7XJ6Mx7Pf+rV3f/HxH45bpRs8vXypORYGylLyrOz9EFLMXf7oZL1JNBhsCTRSF2MS/mRxlnxrlH22WKQYmxhDDKPM5tYQgNbax9j0iUS0VRpQlDG5jSmFKEMYtqctaIokewez/XL6848/I60RCBVFEQRMBANw1zWAShgio69Tw3H/9u7xwXxsVGiTyspXm22bUtNHg8q3nbopBpFUj4vCaephfzQ52Wy7kEII+ci2LE6pCN5ZO8ntydmCNSoEBrXtB++Hven0fN0ASYi4rlrPfMO8c+SeOSr3Z5+fwy/Opij+1uTv/ObrfcJHLytq4vzR6dsz//33jl++XJ3W8fGzrlnHJuBaa30Z6fL6vW/bDlLoe8A7Z6vzj//qJ2k7PHnU/OZ3vo1Z+dGXnz07Xc/nst16CFBbPlk0Vxy/+/4b+4h+VU007o5Gdc9d6sbzfD3ED1492RC/vjP+j379O//lP/03bzx4e1mnzz+56mvfpk9frervfufw8mnvxP7ik2d//fkZ7x5MHPTPlhDiuqrPTh5LTFluVvVVXa3+y+7lOqX/yTtvvvXumy8fPfc+fd08c5ZTrrdN0N12fjz2pPyQRiMXYmyuO2Wsc7ReDdfbfjOk2+UeTfJn59dhboY7enLHXTcbm7jfbnnRZnbvZhUUZXH71uwHD44/fHr54Vcvu1Yz2em82BnJoxfXVdv7IU4nu59eLS7b+q1bB8bgmMwQeYyARJCNjM43/aCAfKzPN8s1YqttVhITrDn0ngchB2KdToOypVv0XWrj7dG478PldpuNHfVpZz5aQipdtl0NXdMj8iTPCxKNeDFU2piJMZtVlWmdl4aYE8vVss5IHeyNH+7mI0dOUzf40upSi4+p7mBCrm26fvAjMopg0fs2KVCq8oOsKpusUKgGQI1k8LCcjMq9L5+fCkqUWG0G4cA8gILG9/mtNMKZxbS4aiaj0ago2etlc7Xsms6ks7ZpYlKi1lU/ym3mzGpTX3e9dVZIR9A7uXrv4X5/Ua27WjCmYXlVN7fujxfU//Rks/jsBTIcPbhX15tvzyapicvl2tjs1vF+Et40IVTdVBX3b7318xdPY+TEYp299J0xqGbqP/1P/8GXH37y9OPHxzP7qw9/8Gq5GrvJ4dhM7xw8f7G42nZNTNkoU9oUIwcbrFM/G9lvvX30p7840UaalFZdnCrIxjYNbEpwZvR403YxvHM4Gtbh1WJdFDpHToGNlb2ZnRY4LccnL86u+1AJhasGROXO7BdpZdKKurYNHcAD13nVKOPsWBU2110ymXEcxk5dNl6cqqNsY/bk0boLkYDfv7Xzd99/4y8/+fqL61WELMtMshDQ/+LyrFe4XV6/897du/dmL5+t5tNcWC2roRLwUSakFOI4g93CPDw4/Leffi2AlLg0FlK8dzBrYlq2TdUPoI1JSSGAIDMcH8+evrj6/4+KQKmRzUs3omQ1BiLnspkrxiIqxj5wlCgPHh5/9vjT//a//+fjWWa1yo0tna3bJopwSIoAOWnQTZXY4B/94uW3bx/sT3aaahUIRg5Io0acFtZZs25C1UUgyHLnxuXwImow947uPD59BYqUwgQwLvNC6wcH+8VodH29pSzbmdHp+iSlxhK1IV4tFqR08n1KiSD1g+cwWK2NIZ9C3bZNNzAyIgBIx37ddMnm82lWlO7yfLE/nmXFbLXYjkcjVOBDmJo4K/QP/tbbf/nXz8IASmSyU5qxi+3mh6/dv7qqp/t7eTb7/T/9c1fi/m5eqPzl9Wl4cO9gb/y27FdNsQ2QjUxuVXt9Vcv4aH9nOSxVgjp041kWvKQoy6qdjxwp6npvrCiCwlglKIp84m7oCwXzssidruuuLJ1PMQGWzpWuXFcbbbTLTdv1GrTLnMlHyqhttdV+yF2mFbmZ3bGO2WvMSq0JpW+GPFPHRv3WD17voD+/eOqjji7rZdhA8jEyy9B2TlmLioDqwbfD4BNg7zNDPAgqZXLnB+q6iNoxqH4YwIf9wu6Q3SncjgCEMBvlL6+bkbOHNs+K3AbctNWv3rsb6/TgqHh6xef1Wmw6nDHHXmfjqsbQx+Xji4CyWSwLM7LKDn1cVo22ZHL78vRilI8uLtrTV5vpbJo5FQg4pbrvlcV5YXd05qbZ6dBsB+8EOUEVOS5qxZFZSmtKl40Lezwpqq6rOKYUrzdhO4RxVjqSANK0ncJvTg+0IhREEAH45WfSiPoGT2BBQWQhEIM3sSNAkcicWCIAE4IACtwUBmmFRIiEwowCiW+SbZAEEgAIRmFLoElxiAkYAAiARAgREW6ayRDQgGgCRB2FWSQxppsysMRIYhQig0IlrChihqCYrdKAMCSOIIaEGQILgCREuCl8B/glVAQEgJAsyrjQO6VVqArzjZKsj6nv00021iqVs2ojR02AxDFZhSQkqCNgw8mBKEksMQikBEkQEEEkc5SsjIAyg5m1YHgYQsfS+1AqGxF9SoDKR1ISjCLQ2goPnFBAAYCIAkaNxqXRBJXCTQPtwOOxnk/tOmwplRQRkoBCN4Z8J3WXAEELMIKxCqSQ2SFryxGRkRQlisADdB7aCvrOUgQFSSAhgzSQWLUheVKikLIIPeSZQIrRI3gQnzQTMSWfhEAyEHsjnUqsWKIggxLULMEDaDAjyDtIDfa1pIikATGREktw043GSYDAEKBg6IESkmDwIZINQYXgC60SxwwpCZI2A8TAEhMoFkKDEilFJ6lkyIEKRVbpIQaFKteqCZGU9iC1SKskGjWkACAkySjQN7lHjZiDdgCXAAC5NSOrKCaRpABT4k3oDIjLnCR2nCSwJVXVQ9v1lY9OUe9ThKhQjYssYkAQpWOZTz682iiQKeDeZDw0HaNc94lO1pOZmTsFY3ter181q+kEdu1gIlRYmGLy7HypIBklNnLsozVqbOgoN20XUoKuT56TtTrGGENIMZKKP3zvLiX6t3/xi4tVi0ojxsSAhIjUeZ800EgVYx17BRhGBvKUOPXc+BfrweWjVhiQtFHOKEcyDO3Bfna9TU0fXg1ek2yX68PR/t6OyfPBTuPv/eVPdcTE6EBpBQI4n02uFlsTxWkkwt35JHRJEkev96f39rOjYXXy+U8/GU/KiMkZgpTYy407DBBC1xvg0mWbpr1hGwkEhbU1EpPWyJBiGHJnFLAlEZCQEghIFGZJKaLG3IiFKHVKgwdny6zs6hqBEnNMggopBqtRK7HOLpfb5IFrj4YA9QCLrumfI9jMOpcdHB4+eOetX/n+93/rN3+9ujj72U8+eHFycbVaWFeEhJqxrcO1qpfjSlO8/drBo6uTTz/9oq2afJqlwfvWxxTFmroW34EzgKBHZakSJh+KTB/MZ5vN8usXL0JMzRBQlCIavCiB5WLd1pvxbH4w3V/Vl13bMzOSlcgIgkDRRw3onBatI0uI0GJMHHcmpStdlPDpz7/otz4mxdJqJE7d2fnnBlPybe6sTxFFYoiKI0XcNGGnyB4e7MZMv3x+Zr7ZI4MtyDh9NM9HI6NjlzyoKJSERciRAigL3puOgx9iAnHazuFgf/Jy3eZYTjHrrjaOxhebbTkfXzdbL6wUb6oBUIFQX2PO7qjM06I9u1x70aN83LaDJYyRvnhytel29nfGH3+yWFynLM9iTNYAEBPpkCgKaSSLN3WfbYRErkidn4zyqu51JIppmtuo1FnXSa5IZRfLZWb1DUqZwiAATqMDPbOucHaw/bZuNykiQm7UTlnsuXzVdKvVRkQG8bHnySijEPuQmqEHrVKC3kdEUQq7fkgppZSIiBAVIAKXzoZ2cLkmbZb9OiKSsjcQpzY6y7VQyh1pFB8GFbiPzMw+eFvmU130fUKQSZ653HSbVZ6ZkTFaiQkeIoh2CDhwCl0/cogGhjC8Wl7/+q99N5lwcr40TgqPDiDUbc3BD23tRedmYO9Z6joGDyEhKoIwRKTKdyTcNpJ0UBQEYPdwFqvU977tBqNwOikW2y2Ia9rYh+5wr/RGvfHaG2eb9rV3X9ucPt87KpaXQ9/Um5Wa5HZxvbnuI0Z1dr6N3Wo8nRPSelHDEMmJ3+I8yzdV27Z9CKHUepQXbfKN7y/OrhzpwIwWMq08xxyNMip1/dOPPkltCDxQobu2F+YuhWxfv3t/h4qE66rdLqY5WdLS+/4ari59GyWxcAQLGhKmlJKE3Jms1OVIJ4IuYgqKRcigKSZBcxsD+G8466oeQqd8ktA7/7L5cnl2kI3yAca7WP5w5/HPhsXlVlvLNFq1dHIVq959/MV1qWg+tu8+HF1fXJtZgX3cXjfrdVh2fWJWlBaxSTH1Fg9Hzmp+6/XX/vnnH+smjRVrEleqy83KJyR/ezQuvc4EcO/N28PFoFL/5NHn09m9N+7c+9Gf/cGqHaTrj/YnE1t0CZ8vhtUQthf9q8pvGZES+G6+O41hgBSbumWWSQ7D0ASftNV15Jl1ZWaXq3qcO+9DiEkpFXwi0kkgNj534nSazbLYoidM1vzwzXffvfPGq+fP635IIogsDClEFEhBRDDFpLSympGSJTqaToRxUXW7RdlWw3XVKWvfPb7z5YsnEaDMDHGyxtarTatIgzRtbTVqq6KWEFPju3VM2ujNoq4KA0ZzDMqog92d6+2GU7rYVsZoBGj6ISVGhQoVyA2KbhmpaoZAgIVbtOnLR5eX23b83sMP/vLpJ5vq1i7cP7vYSzyytLnqtNGReGzNZp0G1IU4SMy+2znUjz8/XTx/WQf9qix+9PVj0XkrVmPcNkNR6Lr3P/vqy0E4KL5bjGdetHO33pybUo1quH1nxGNxyv3VV89+4ze+3W+e/MkffeTs9OnTc1Fmurt7vWn6bc99/Ogv6tKa6Sj1i9X9h4fxvKu3tdFFVo5DX77/9rfaejvZK1ebddP6oQq//rv/6Dfe/9X53/ub/8X//n93uty+qL3JbMZ8//bsew/2xjuj6tOrT846v2MLBy37dt2NpqNlgFUCcGZ51Qx1NzqcMqZg26bt0qKfj6g8uLN6eb5efWNvZKFtE754duYrP9YZWli1faGK6+s1D3EgddlUajSeTYuPn518drXeG4/evnM8MUZ8j6RtXhT5pJxgU/vDwztH28XHT19i187n7mK7Hk9z4khdmFDazYpivPv1i1fjTI/HeXtdI6pN3YHA7YPdZ6cX41Fmjer6MCRuId5VruJqvTnfm020KVfXl6VRksQ5skBDNyjWbRfISuEyTYxBdIAUkx1S1/podaVSXVW3b82TTi8uruy4SEav6/74aFaAM3AjvZNtHdDRYtOunz9ZbTbNkMCgzRUkbOuWEFfXdbc7uVhc37tza3Rn/Gc/+XzRd999++3Z2A3duk8REI3WxiqJSVia5VaXo6oPEmiUQanjOzuzHel+cnW+Ss7X6brdKJXNkGfGHZRpeVpZo6vtk7t3ZtoqMyot4tliU5SjeWEL14pTE5s9uX4yn439aoNK5Zk9v7jAmB4+PNxcvnr88sWyjnv7c4rVm/ffWFx3f/rki3eORqYs1BCdQjAw2iuGLjQRfBj2Zvn1+ebi/OK1O4eLoVu0vRrltU9TY4c2LtqaEypOsYsC5JVpCYj56vzy4f19QkYCD2EY6Y/OLifTMStVX25+67t33i+cW6xO/HC5UU82ft2tCzUcz0Pi0MRYB/nk61dPnm9doQXxYFo8vH/rzz95lmKixG/t7jzY3/+XP/pgFRKaDJy1mau3KxbsQ6UNOWM++/irw/1JoYwS2C0oYXa+raYmV1pXbaNcZnL1eH05m2XQxRz0zigHzLbBk1FGQDtTx3C0N6uqRgATmboO/x5VtLM/zfOca16vtyhpfrBns5KC0lneVMuJc4evH/zBn/3hX/zsJ3uzkrRCQuPQA3fAdfRIfHtWHE1Hfe3tOO+J63X4anH9w/3de3fvfvLkGWnwQ8REWvSmYYvq3lG5HPoe+PMvn4503g58db4CyFKMWtuDafH63cOhS82mf5U2oUUK8RrS1dWy4845a1KKnMjoFIa+7uw3OA6t6hqRREzoPAmbTGelsw4vm+r23YO0CkMr3lAI3A/9vdcPTpZXt/b313WbleZvvHu/burNy9XtIje7ed8Oxd4os2XT9ZtXy5wIu6Frr7/7xgOv2dWVQVlZdXJ63W3i8Xxmep+GdnOxcJP8xctFZDOauAxwCFy37cgYimCU7O3MewyZFueyvukcYPADKKWtyqwmKpHsdtvimBjBjvOYaGj90HsfQoieAMho1EqBK4oRaC3aapMh2iwvJaUkKTh87Tvv/uLDL1LU2vOBw++9OTuwauS6xaYaz83919/6y//+rxJDlKRZG1SkEJlDAlLaGF1vq/G0ABn6mHanZTX4iJYAunZga0CSAxhpONbq1x7cefFyObF5NnWLMGQtaeR+Uz08vHVrNvvsospt/M7bB//m934e5saNstkENydLRfDzi+c7e7fPL32vuHD0g/fvfPDhSSKM0XdddKOsbYdbO6MAVdVer9rL9+/e6X3VUzGwrPowDWo6g1997+j3/uKrrHBv7RYP7t364JNnW50t2kBaaVTOqapuFcpkbCYlPj3ZsNF9kvW6yuaGhdkniOnf4dZ4k9ISYICbLjEiIhEUIkRmZJHIN21oooStIoWIAB7FCwZBxcwJgAhFSGkgFBBldAiBEAVJEjNLFEgoCgQJ5WY6hcggCoAQAQFIQACYRICjBBAhGABTFE5CQIlBIRGmGxwIOCpmDZghWaWSYMcQREfg5KNBSEkQUYhuYjkKEQAFQCEaDYXGg8ISmt6nLqXVkACAkhcUstQkaEI/zczImm3ogujokwUlAiKAwJGBET0hhKAVERFDJAYSSiKD8JBUE8OO4Z2SnCJrXQqDMkgJDLIyijVxTAMn1FoISSP6xAJWK1LijPISrQNNAMwjo7FP98u5qqTdOlvJZMdG8omTGMhnIF78mg0YTkqRmCmJSz4AICAkBvARhga3fcYtGGGiQIqNAUng19BXlgcVBEGLmUVrfSCODAhiNABH7oA9SiBGkSQQADQggdLCAphAJ+EOoAMbwBJSUt02sceUkijQCjWRoHAEBAAFgmAILVHnRadkNaFSzBhZYkJCUgZVTAgBJBUMjrBU2dp3u46GhEyqFNqxJnohberUD6SYYe2D1tqnYaSoNEprdR06RYwKQZImQkJFNFiWAlT2zSb59uHONCtj3/d1sESOdKQkzK3v+yH2QyKnQ0jrNmS5ahqfjWxZmsxZn3xGxAISAnJYN1dJRAkfHh2eL5Zd4iFK16NfQWYHW9p2lezE7hpyMbRX6XCWi1anV6/KsrAKX7u3f/H15RhpZ1LM55PTq003iPc4dBEtoyEBAmBO6fho9/Sqev5qkXrRmWEflLBWxjoTYmqqtEl+vJtN77qrkzgdmWmZNifXpsxDV5JAIPbUZyQTh3f2JlfLNVm1u79/1S2JMHNa4jDRmTM2wma5TU++unDo1nWda+WHkDQ5o9ddV5Qmc6rvQp5bHxIaN53O7t957a0HB9Rd/8VP//J778/vFrfgZzAEiT4RQdcNeW4hJQZvtFpsVk5ZrVSIiZCFgUCzqBv00OU2d66tahTOrIlJ+uCnhUUQ5VApLEoLMdRdLIqx7yMTa8G67ayzaIhIKQvCXHeDjgNptk4hKDK2b3ySqIhSAo5S9XVTN599/iTLzd5k/O5rb7z14P1fef83N9XJpmk++eLJYrM+npa7MzveAW2yn3z+88vlVrPMZ3lVN0dl/v6vf38+sn/0h3/eJMYhDJHJ5IPtXIavHe3s7++dbVaPn74gUk3sBhUU8dhmzmrvWSvTNmFIbRvocH/nMDNn51fL5UYYnLECYnMLiUQg9DGGgEpXsTMO7j44OJjmTx89SZsaCQ1h5GiAQ9cbm7PBIrdV0wOnKACJFUHysFxtb8/yd986/Ozpeb3ark7X37wRHY7KcXFvnNXbCpHiEK3OQ0weB52JASlyBSRBIFoaFOfWzQ7GH718QuSeP2kPs9l220Pw4yJXQYH3DpICOrvuNaB1heaUurTxkYMRwL4bYvSzcR6YtlV4eb56cX5phHcO523bFgVOp9n1ujGoUAQ5IUKWKaMVEkxHs5OryrCKfdSMOqXcqSS+TTybu5Sr+d54NhTbzXp3ai6u6/EkExFtjGOVklyttn1sDidTC/r66nqsbLv01xlfN00U0UZFEdGq2bYKSQGkMDhlOUWFiEQCjFoRorGaUFmtIktKaVsNM+MUSV13Oy4Hgi6wMTYzalF1+2rkvVcJB06Hs3lbVyH6CMpaZ0CXLl/HzmXm7ny2rirlNIdktEIOkgIR5TkCQ0hQTO+8On0UWCqfapDtsGXxg+9YaJyVu+XkZLkeWk6DauvrcqyYh8xl4zzXWp9dbgmUDkxK9xsxGguXowTtlBYPoXXKLXtP2g3Jf3F+bVlrUMPQ3d4vTWk5VkaF/ZK25xe5yoamRzGi0hBjrCRGZNLRciKMiIu2gYSaKEYMIZHzkqAffIrMUZik6yulkBlSAC0A2hzv71yulkYrq3RpqPMxAihNEjUZhcQ+iY7cd3T682VeqtInYuFxdl41TQ261z7Q4AGEWUQZAMMR2WiVrLATXeoqhJi4D5Ji0kZfx9j42Ioa628Oz1ofiSnP835ZWcerKk0Ps6uLzo3KLOt/9zduP/5443v2Op1d+L6G3JmHB4ec1Hq9TnGcz/KvH1/t7R+gzhebrcmdFdJo12G4GjpXZjNDfQynp5e2AekkOX121ZBPYDWl7LPPPrv/+p1SF4MgL8O45Xrb0nzUr4c/efLxoocqKMXmy0X3ZOMzN3q29luWdRBQhghCGHyCEIiAJIbaJ9Eqz6keAiFmSu2NRsMQOo7KuuPDB/7V0445pWS1QUSIsST+h999OJ9PPnty8bSqjegMTVKQLN69d/Tl81eSJEa40RYgioAIAhKmxD7FwqYHR/v3Dqb1thZra2NfdZugrGM5vzxRBBQ5eCaE1WpzOC8xckrsh6Esc6WlnE13d/a+fHzxsloa0q1n0jLODMTIIa03a2LhKB6xC77UepS5Xg0CGgkH731iBDKAWpNP0qd4ua3+43/8fXp6mlSRWF95urqWX1wG4+MkVxOjCLkPcaLTROvRgM8vN6WaVMM1yeUnz089I2T2RRfPXlyKdjz0Dw8mifDFq3PQlFhPs+I/+x//g8Wzz8K4NPvzP/nzzxLoPLPQIrdBJmnYVJuLZa4QQAMj5m7bDpenV8fzyeZqtTMfKW1PltvVGkba5HkV0ZhMiYIUOp9836XRpLxeN6QLbenVpvvf/M5/Uj/7KHOUQDvybx5PhBEQJ6PJyxeXv/nardS9hNTvFBMy3RX6kOlPF9t1lxDYMtfnoRvSvCZ9ayxFMDuw41C2/bYZ2mvYLL6pu9m2fYiyugrv3Nn9lZ3iZ59fFqPs2zu3frr55mHRDP7xi5d/9/37Dx4cPrpYfnV1/myxfnh09PreWIfU13VZaq1BK31ZV0abg6N9V1X3HxxcfdF6bVLHkGAIzCP1eHMWOZom5U4rZ8aT0dn1QhBeLM4VofdhWdWJqEspdy4OPUIKHkalqrYrTSRCXR8GlDYkzZBpRICZNX3v0app6TI0ddWPjJ3lJozcxVWXB8n6/vxigaBC7bOUDkZFockqyRkjypB4PB9dp/hisVlte5NigGiADdDxrb2np37ok1KQF+bhu4fDpn9492Dv+Ps/++TxL774ZIzZxDnKlcWoNGmj+2poJf3we9/+6NPPQIMPqet5f55/68Gtr78+W3u8Fpm6YWd3+uS8bk83vQyrPu4Y83C/XFf99enlsk1HB/vffef2dDo+udq0Xe+Fbx3tLhfd8vJy6HsDaAs7mZf9ok5B35/e+h/+1Z+GhC1nJg8Qr/bi6MF373wyTD989OxQJ2uyFIhZzk9XozKLnL7z7sPow+PtZjLKdnO6WiWroPbx6dX224e7iNKzrKvtu8c7yvtpnp9u2vNN+/peUXVp8LHt/fnWt34x2R9jpuohVFVrY1Jf4d/99u39DE4vNs4XPct19NOiLaPPVRpU/gdfnFTrBJY2QyqMSgNfvrrwPkoM/+Dbb+YW//QXX3bAxais+tj77rJal1ZxhE3THY8nzPZk3b/x9sPL9abaDtZTU9eHRV4qY8psoujscpv1MYQYwfQenNUJdN0Ni6bLCttsK9TaOLtab4KPRtM4s/jLCMo3oyIGOwQcOp/EAinMZugs+7i6anXUb96+/Yd//C+evzx58+GejtL0XiL2bWxS7+NgDE0QDnM9NAOS6nwfUpxqQB8++Prpr7375v1bt68uXu2NLZGrukSdLzNTdS30bDMzKktFSpe4rSMxTvP83r61BL7q2z5qY6uuJ9JOwbbZBu4VpLZuOKFSGkEZFAIMIUQkBvCAJAKciqmLTQCm3dmuJs/e+1WDrdo72P364oKUOTu9Lq0cTGdXp8vp/shoODtdL+tQV1gN9K07t6blNkFcX60Qzelq2Bln7cUCcYwoBlVeTIf15o3Do92d3evl9vmi7nw4PN7fM6YLzc7O4RuvvfXJB5+/9eD1znM19JL69bYtrCKKWV4wD9eb5d29o6peujxLMSVOAHG7XihllICxKp+PrlZrY4vxOI9ddK6wLjPWJWRFPB3vVpvGOYkhKsCbuCYjxAiBzWz/DvhfuCSlw/15YQCNzp5ctKGYDCH+8R/+2CoJQ8iNLlxWbao8N8wUva/7PkO9Oysx+hxxEE5dsAzDuk8Iwhz6NDZ0YM1bx3vjPP/4yVUX8W4+WbXrV8PWSwyhnzCE0G9D/DePTpFgsY2vkC5Pm01MO3O7O99549i+rL8SvzUIAqrqJHTueHf+4mQ5GU27PmoNEcjlxcvHr1Qv7Nt6qKZ7s9Wib/o4LvKxJiPYLPrX946arjesmsq/f/fgpO5fXVwVE5tSShFKpxOozWYY/FBm+aruAGGW2dV6MZvMjEK02pTmutrATem7Qo4iiKIIkQhRgBPfOGuAExAoRNAkCpJVqAVBILEwgEJCgBuNESImZmICBGQgUCyiAG6q0W+0RECoFCJg4oQCv3QFCQAKo4AAMwEGBk4iIEzIgijwyy9ABjAGAIGsgoTAEhNLUjFIl8QrIhRHaAA0gQAMnJRGvHm3QUBGpdEZSyRBiBm6RFVMSivDAhByQwiEKAmkS0kBawCOCVCMIQLkJCCs4RvqylktiW9a2wQgpaQ1iQhHHgJsNMTQj60yADlQYZR2FAG7GARueubRBxFmIQBFmCgmMM6K5kwjkvQ+aAJSyISI4wf33nnxxQcpNS0GmDASJgKbAcwZEaVKSEqVXOwzWAgeiIAEBoauU0OrY9IZ9soEbQE1UAYosG3BhwictEHRURtIwD4BaCADGkESJAdhK2lACCQhkQIywAxAgAA6QRggBhVZIXqXKxgjtqgR0TOgSnxTQYcpJCACVCBJoSBL6KDQeYgCJMigWBCJBVVCBVQSEse5zVvfaUwOLXZsbNmnxEpdhDhA6sPACEAExIEAMbFKpVWFVQUBEXpUHQdQqAgScpCYHKnSqIIAEgBYZUDQapNyxwJXbQ/IPg5dF4zOEui6TqQGJaJR7++P+77PtM5U5tshaeaYJkXmBc/btU44y83Q16Ah+JSiaKfJmNW2y71a9f28KGY5npxfTkfFZZf6KmRRZjmWOtucDahKN4PzajirVzGyMkplYAkASWuFUZmM5mU+npefPDsdojaUDkejuq4DiE+ifJiM7Lww51wrVw46ROUH4W0/7N6dsnXV8/PdbM/kErErTGya9ullYK2VKz69vIY8Uwirejt2djbf62o+P3tVFgbBeh+cVbklz+CjMMXd3VIxHx3sDX2our5PcZKP7uzt7rjh+Wc//vTjr86D//bf+nvIhCjCUBTW9wNpRUqTdgeHd9fVKpvCdlMHH5EBAYFU8KJB5jaLOoIiYFFKxSidD5zAKdN5VhqmhfUxrRrxHrpB6W4gQOxbUjKfln3obeZQUBudIosPjY9aY2F0ShAChxRjSESESJyAI4BgbggiL1ebP/urn7FAlpu9aX739uE//Ls/iL1v2u56OawWF2XuqAs/+NY7p4+eEvN/8Pd/J+Ps9OnLjcfvfe/X16ur04vVtm1tYTyk7+zvHe/tPL9an1dtr0ySrizcjiuP9t969vQzSkiMYYgERAFWV9XQx/E827l3fPfh7ZOTl8urddMFV8wIc4daI3DwpBJiOnhwUEzz7WL54uszDaQJUZEr8q5uog9DAOtGRT7q/HmIbTt4Q4pDZCEkfbFsP/rs8tnpVRI83ps/OrsGgKoJRqdlGvwQs0KRIlJSjrLrbpM7Qo4ojISkgfvAGF0x+uLJMxaSyBp1z2kQr+bWzLK66fLM+b7uu42DBJHzwrnMVas6SBrNS606m0kSPdIynmVxyJq62z3eWa67q21rVLe/N6naAQI75XzyKAIKyrIITHXPJ/Uqt1YrBAENVBicjopc0bBuUBlJtLiqxYeuql5xtK4QSNa4601TaMdGjUZ5kfQwcIj98WziB2+t7uOQOdsmJoDcoNZm1TRecJzZnfGICKuYfEiiIKIMQ1CkJYkQRBZARsEBOBImwqzMsR+iD5kzlCRxHGXGakoMOs/3j+b74/L05QtCwhgzpTVR23ZRQAlebzskE1M0ZDbbWivAJM7Zk6s6AXSJ2+Uauc+dE0tK6ZBBeTiX1UIGavreS+o5AjJkutixd24fQM8vXlyu26bnaKwyinzwXdUqIg4sCsajPKQ+y7NqOYBA4tQPbZR46/B4udw0m/pwmo1yVeR26o5fvLpG0uNRGYYIjKSy28d3h27RrDYXi6YY53aEr795a7MYXjy9cMYhkCQprJOQGNnmDoPXqNpq6BGLaUmc+jagpAI8tPlBUTjSLKAAutCTssIsHIcQkdkRjbRdXKxNQNVmTKrxYXUtA1rSMDV2uoNp3YRekiTPIIieRBUmcJoZkzHsotbGZbduh2ZIwpgkVVuX2V1rnpytAKDr/dwWJgy5gRQ5Jk+SjXKj2qgTvHz0xU4OwRaHo3S0Awpikr6ru2I0n07zx08X3qcid9EPQOn4sEwRfBe11lNMjBiEq6Y73p00i+ZIzULeV81WOS1D0sJZbrrGL59eSD4nl3HfZA7teJ+Z+ja0TVj2HrIyGbPpeozgu6ZTyjtCxZkh3wwEUo4yASEkpY0F8CCMNJtOrabluhLQzClGmBb2/PzVfDTqgdd1q1E0wWxS/Pq3Hl5cL3/01WMSPN4ZT4tsVbe/+Ouf//xHH9w5ONCeUCPqCEKIHKMQMwgbrawlYfXWnf3XdnZX11sloLS9vtwOXnJtMgw6RiVsNLDEnsEVJqZBCR4f7nc+3r17GzWttnVWjnfnXe2bJsWiVKPMGJJikvnEWplt3Vlrcmd9CMq4TVURpHGRJflGS8rMjEySHOHMmgcP7vYJoQNc1fOcegd9TASgSnslsgpQKM6UrkBajxTFPFk9yDOaluuTzenCK5NZhRqjaLxuN6jV6baakGKEw92yoOytvaPN06+hl4vlMJ7GbDQ5Pt6NOCpg2J0V450xZPbVxdnf/s1fG+r45L/+PZ9in6AO4bSqD3ZHLIGUG5eZZbGjbFEPHlMWbNwMxhmjVRQc1n0xnQxD3zdp1+l//l/8H9+ZFb//xU+/+63vNO2mbuvb9x/+0R//db3dvH44/+AnX2HHD3ZG1dX13k5xOBptfLzYNoGjB5nlTonq6sasKu+H4r7uL7rZ8Y7vdBTSNju4jQDnAKCsikr5kJb98M5R+a3Xj15dLh5fPc+dM80QkhilbabP1hsC9fBwb5Tpr19ebob6k5ebo9HkvXsPXr1cuDLb2R0rwcPpLIBeLbcSzXS0c1WtnQbUYES/XG4SIhEdHh013dITnG+2UYSJijw3OVWdr1iMwSI3k9wBcAqwO532IQZmbbR21jAPiUMUo8hpnE2zqUFHMMpdGIJiyfNsjrDft6cxHe6UadMslsuQJAMoKL1zsCMhxR5Gk3Hn0/P1xmjqNsuX1bb33hgtMWmW1Pf/8B/8zT/78UeWqAneWfPZlxd3bh/Pi3y9rfNx/pt/463X7x/+N/+fv7Tl/r39Wb5eV02/WfVRcDSyJ6cXQUBbLbaAEOf5vK501KNFt1x322/d350X5auwCcKQkQ16VoweHu1uxv3ZchM0vnxx5pvN7u7B3Z2dOAwvrq4fXzR+iOOpncxde7EADZooRh5n4/WyBTMxFve0ef3BzvJs8flnv7hz2x6NJn1uvvva3cWmPZzfOj9fvjh7NcrsZORMGh6fr1rffvfObcOcGzuzZtPFRkTlelnVDSchlVvr+8R9AA+b0F0ZEKKTq8XdW0fri37VdNvoUxv7oc+sMUX2fLV9sZ4d7WeuyFZ1jznoklMuW4r9Zb3acF+nwcfCWfKRNDw4moR+uFVmB27UbesfvTpF5wqjtVHSNciCwkMXQxcmeTbPbXSqXXovcTRT3nPiYXesxwW9PF9tfe6AjDNKKRQVhySMauBkpBu8CPeDVwZj4twaFX3XBZ0r5uT9v+8qym2x6dpm6Jwqjo5vkVU+9s4VB8UeAf+f/6t/Yrm/c/cgRkgQTWRbOMpM3wVNZJU53j/o+7YLsQvRJ+EwaCQgHZX+0cnp7/yNv7GX259/9RUx///a+69mW9Pjzg/MzMe9Zpm9tj+u6pQFCgUQAEEQEE1jNN3yHaGQInpGlzPfbhSh6ZaaMy1ddKtpmk0SIOFZxao6/pzt97KvfVymLnbxor/D+d2t2xUr1xtv5t9Myzk3I4kqnE2SjQJF6vhof9BatXFaTS9en48xeuYUUgaJHDftyIzMTFpxjkhQTWbNrqOiWhyctturem8Cwrt+bAYfY1KGgg/K1apQXZee3S6Vy994/LhtxmbZLx6enpi4XTV+9JgpK9ATXR3vxV17PcabzehH0IX58vK8lnFaaTuvykl504RpXe/X5tk/XICrzf68Or6X1Sxkvmj6gBlKaFV1XM8t5mELJeK6aafHiycXS9HuW5986JvlYj8cn0w+f/omd7HZhND0Km1dgdW0GoY+RC+jlLaCTCGk3a1XaChATnEgC5kpgSPddQyKrDbdkF1ROWvabaMsgQLOAoKTWY3Gvvftj//uP/6ftsd3jxYPH+5f73Y3rM8GOd/cNl3vWYymMbJgVugZxAtnkhEA6jLVZjP6j4/2Dyy2uyFkOciSGcYQ68phpkVRPDjc++Liepu79XrYKyYM6nLoX2z7zBHD8Ohgdvre4ZkfcH+CBH85LL/5owfXn11hH24ivL7Ml2nz6fc++JvPXh0dzbY7Dgk/P7tSHI4fTVDK1eBNYUptD+4dXDa+8/z8y/PvHR9++t0Pnv273yrCjLQe0un9w89v+7jtEbDv+RXuPjle7C0mnzxUKL6JgwH89NH7f/XVi6Bkl/K0KN55PL+6vZlPZ03bTeqqDcGnYPAfbZgAd+Xx8rXiBgAEiFArYUQhxQxZAMAqsopI7pY4mIASC6CA3IURAZKIVkIKgYWFRe7ayxAQEQ2CsCjAzEJEgoSQERHhLk8RsnBkEQYhYAHJcFfIJQCEKHLnGgPShAZZUTCEFkhJzgA9is9AAJwF2GhVKBKQnLND0oSIIHDnjBMAsBoSYIyZRVKKBlALWxIhpUgJSgIISZIPCMSCd6VpWVA5m2NQggoo55SEkTQitiEKCwIqSyCgABEhA44M45ibwFNDBqku7KQqJAuhdEOUjKAVEwZhAAwsKeWMmBONmQsEESFhIepjzIW9/zsf46T+ncn3f/bXf9ZEmGjr6hQ5CwJWUKocC04h4Qx8lTlBoZWOGDvMHtNoUKA2YzWJrgbS4AnuyuZoAmgTGFSFgBFdAmmIfPdtg1hQCtQUTAVhxX6p9KAJNHNGpZlzZohMwMCos43lvmOHwUQEwXXOO6AIkjFHUlpryoKQAZVG5fQoMobsUDHkDMKQiVABCkOSFID3nCNWtavI6KYNpLRn9MMQSMaMWhNkZsigEAiAKDEggAa3ytiNUlAGQgEgbT2kqBWoRIZUpVDd/cgAAEptDVavb89HiUpBN0SjNAORcR50672yTDZWhSUnz7e7g7JKQRLE6WTSNB1k6IJcbNtdwpihGRODbwcfkyCgTzFyTpQn++WiqBaH0y5EPU3BkAVn0L9/dEqKbq66kNJtGxOm+byUwGjVzieJiVN22oyBxyEuZgUQ3l5tQp9i4kVt584WUkmBy9aPXazq6WbYTo8m23WPIyqjIklOcV+5MQNbihiHLgRWQ1ba1Go6nd872ay2oHLtqhzaPmEwahOGrvfJmhQxDYFTLpwio8IYfU4FqL3pROcc+xBCdlX5nQ/fn1fzk8X07IvPzs4ur5tO5pM3ry4+ePf4aHG4uezHmAVEW2rHflKWq3alSGXUEZUpTY4JGa3Rfky1U5mTJRVZ2hDGwCCoQDShMLtCa6eyppFz2yXJIogpy11bIAEYsqcnh6+vLzkkZypm9mO0BR0czrSC1XI3jgIiohQo5LtkdEc5ZxQhRKXIWp2BEOi2Ga5++/Szp8+d4OFsPp8dHH10v+2HD7/x0dMnrye2+MGn3+5befb6q+XV8nx53YCfTWezev+Th4+NJKN5WtDPn7zaDn4beJOjrtVecbDaLm+3v5LB81AIKWYsrbYGIGMO/vamv75c1pU6Xsx/8k8+3azXr843N7smh7FSoLR046BO9t799ieE+OTJ800zEGoRmdS1HzynPJsvysVx8HGz7Y0pYwbNJEQIWXxWBn2Gz87XWkRi3juY302BAdX3pFNGBPBRszjkLgwMWTsNokKMMApqxSLzhV4sXAeQS4c+G62Qcn1czfbmqG2z6UlRIF1OnFGJwxgwdkP0PgJQ7n1xoOcPJptVFxKsVrvS4r0D/b2PZj//1Qhz51SxbYYEaMvJcjco0hppbza76ZvBZwItrFLGmFJirpEqYzTp3gerVTOMjaJxZA4CTEVZGVfu2t5mViiDH3qvCkUHdZUpMgqRRBDErDQoholSfgyhzdmmFCVwFsmKir1ZXQFEHjZ9aJgBAEYPIqgNIWfOhdHGkB8jT4RRVs1movTR8bHknDP3KTf9SEqh0fPZLMVQlWWHYxLUyqYsTd/YalruTU3put3Q9UGleNs0tqyd0x5p1/ZDzlGyEExKrYC6YUzEr66aj/f2Du4fJqvj4K8uGjQKHAQjh/fmz85uc+sziq6UAzCKaoeTwmmFAvp2NcQhobJZ0nIXrNKKyKBN5Pf3pnuHFk1x9N5Bajdh7Ce6fnmxIjBtH26Xy1lZZWLQstmsjaEwiCEbRcBTt5GqOp4fyvriJo2sBTlzDD6hzPb3cNDZBxIBgW7bxxgVQj1x1WJyud4d7++tu74sTalt5fT+/vz1emPQzRbl5fUN5FBodHoa2tiNKmsSU+oC0cZ6po9t4bv84HQ69ikxECIqQmViTFPtJoSUR+ucMFTGgjFEmAf/TqkDsy3MX35+DgBzXYin103XTWns89SqN+vl45O9r653947q/aPpwXv0xYv1pwf2R7938uyrS7V//9nz1cuzpamszbh/VBPRbt13QyAQU9p1H3KIoBQqFRCjhfJwkkPubzYxxcm8LED1gYzTSVIXhq5vyiP9rcf32xu4DZDQsZUIY1Wne3Dw9OoWQDdejFV9P0rpQBHFoCLMtDofpB15UZv1uE0MdVVSltxzgOxqUykXIi7qeRyDE6iKUil8tD+fzyj6oS6r7377k6dPXp2drZQtlORdP/imSTHOnM0aN9ubn/zg3a+uVl9c3BbW6ASkFBACEmcmkOPF4huP3wvbndKq3yZrUt8NfdujAJI82J/GDUdOpNBk0EDTamrKuo3p9NE7qbKDH7uA3WqLSmaz4uZmh4ikzRgG5wpDgoDzWZ0yRwFm2HVjUVQKcgIBY05m824cdkPIOccIKOyJ5u99vHf8gd/wf/zFZ+tdlzCLZGPIGgAGYHBVWcTx8dG8b1Nqu20HKirZblyJV6/OvE99Slapsij8theW6zFJYYqi3Jsd/PC7P3j6q19OKvPo/n47DmJVNupq06iJfO8Pvvfky5fL5eadD44bpn/1b/9qc9v2WrvSDO0Yo2zZQ+Z5aYZVMy2LxX4dBKZ7+9ynmPJkXjOkzBFcaWyxaZsxxpMH+2NQzcXVKz40+4/ONlvMfHj86OXrm/e++Q1XqTD0Y1JQQY2YlA1AnKVkqFKSZqcnZdPGbI3bLzzQep3Sw8V84V7cXJ/OHz86/uTvf/7nJ+/sfa0q2nRlLazV88vt/l7ltDuczX71ZvnB+/du+xDGkCGHSFeroaicRFgcHN+7ZwFks1q/WTdEF5Wom9tlzHFSlU5b56p2TK/eXN1//531s5Fy4OhFMIyiNY2QfvPq1f6sjD4BSVFYzzzE3HQdWn1wPJeQNmm8HQdn7WQ2hdqGNgmpLorOwYLU1p1MTEjRGmOIBs7U+6qqCqu73aAIJsYduMnTq4vJfO4Kh1l3IUGQmTZTVY05SsZ2E9d+9IhQLl5fv2oyWVv4EDlyaQ1HevFqM3juWi6dYWYF+NmTy48fH+/PJMQQ24wz96N/9ruf//3r355f1Cl/+OiBbZpeskGFd4YEoyXB1BUfv/tOZnx1fTMOY63UbTcswxghRkz79d6nn74rif/29XkR5FsfP9o9eWEKerXafXXTHB8c/943Pv72O4cvbs7P39w2/TiOiGiEcXXbanTf/f0//Mtf/4XMyrHx1tluiEKya+JvP798/XqsF4vPrv3lsqEXX/2TH/zge9/+5OXZmycvz29uh3EnUfRq11VOtT7sQmJjhyjl9PDp+e0rH50iQW2qQseUBl9ObNuFlHKl66ttum281VTX1m8HAONF9T4bVH/1i/Mffe/+YHKasJvoXOpdxoBQ2moZaWASojFm66w1punjbt092F8czoqf/cOz6bze9hkF180w9jEndoVBAGOtIO4GPyuraVVcvD4TDNOjiaqK1W0M4tDm0Us7eqfNdF6s+y76/p3Z/OPTe692a2jj6f7eevR9EyECt5woPn60mB/M3ry8NtbcJVN8/ZK8Xm0SsNG6npSRgwRwgkfzenn97M//5s+mFo73D47npQ95DNoV2A8+hFggFsYYUF0zlCgIjMwlkWgFIEhcaD2k9PLZ8/fu3R+XHWlzu26M0k3PKcVJqSd1hSn7rgetw2b7/OLMx5yyFNYQih+D0uiHGJK4wqQQ0hidMWL80V6VhGIaC1cuV5vjw+l8Ud6ut2im9cQ9fnj65tXVar2rLAwalC261Wbq7OH96Xp9NZ9a32Kl3b17i2dnl0br1dnK+ZwlGgTQKsWseuqHdr8+UajDIBTo8mJ9Gbph1Y55E69WT9TrCtBlKEua79eLenp9s/1yuZoKzGo7rYvlq8uL7W6diUnfXl3NlOiUYhvfeXDsY96sWuds50dh3MAWOSNIAp5OKwCFfXCljTlrpcuyFlJFXSbPXduMMc/397W1tnKYc+f7uipIqbqaRp+Cj5aJCG1linIi2e9E/4evLi+3bWQhyQLMMRtrcopOiQIBZtAQJYdMVWHJOT/E0MsFDKNBxzApi/meW6+a/bmuC910ufHjr16fr2NuctIOFpUy2PV9E9vmcF4p0Sdl+WCx9+zNy7btUSBLLqN/72DyEla320Gja9r8y79faikKYXQQtZCCvsmHBzOMxYPZPIVhddPGy9VFCJpgu9lOHD6YT2qld9H30VukJ+fLCqFW+v3TPSrgq6vm83YoyJ7MqszaGqeM+u3FM5HY9TmTAgIkmFVlTmJ1sd3uImNVVpC+3psSwl1YMyMqQIWgALQipQgQCZAFRFgTKRQC0UohQWRgToSAIgrFkBQajSZNCCAaIHL+umcNMd2FADADilFEhJq+Fi8RC4HQ3a4IEO4ip0EI6S7w+i48iQCEBRVqIqXRKExKjcIKFVMOzIYEhAlIMWgCBTK1ZgieFIpWJEyKckYAUUohIgIgy52GqkKtFGlSKcfIHFMOnMacRQBQQLJzjkTS3cUqZ0kZSLFkANF0pyRiZuEshGK1toTDkACUAkGEzBKjdAmNUUkkS5xXblpbVLDrow8ygAiIImARrRCAOUYhHgdWBkpDTukYeFbT4/cPLtbdfOre2T9+vVmnbSycIwliCBRLBdaJIzYWGMFE5AiZkT1CJC1S1LGaRzcDbYEBEHCMogkKo6JVAEAugQVCUQBWoYBIBkkABEqDmwAhpCAyEHsBUSIgjAQAwsigVKQiTUtLVqVawYJhEfyG443gTigoJaxEUCmfEwIXhQ0pF4QWVY6ZkyALAWiEnBkISKk+IChquzFLLpwptI7diJoRxCBZBCAWBGXoToAGnKOIEuQYM+pBo4ggEVowFnrFQOIcOQvKwGRmARIA3F4vJW4H3ulZ0bXd4w/u5zGs1rHvM7LsL+iDDyflxJ/c239z7YdXY3Pr+zbX1lgQDTifTlZ924QYRazTOeV+GIiUQiaFOSchnB6XboF1AbE5C4M3Ws2UmyEtHhy/+vwss+KESLhXQFFVaFUUSSHhOHgRU9iI4EhrhhhziDGNo0Us62o6wZt2mZgn9WRaqYOZtVMYAzvAlAkU1xVySvP9clKX/c24qO1BWZ2ftQockOzVVlHQfVN57z3HoecYSwZHkmIsnArAE2N2ibNgCAwIxlpjtTZwcX59YNS7B/cnx8eP3v2wX16Pq9tfPXvy5vWrcl6ngkLw9++dGHL7k1nqN6YogEEItVLMGWIefM53OjKtFShmjoOfaVsQpZBsoVLMOWVlFWSGDCmz1mgMxZiaPsUkOQEBKKQ7aSMzEGLfd4u9yceP7p+fvRHMwxCMgknh0hg3bZ89I9xFpgMhZWRAZM5EirTCzJIEhYmQFFpjiqomTCWqx/dOTw7vFc69/Orii5vfvHN070ff+35Z2p/++c+izh9895v/7Oif/umf/dvLrr/ZbMHqAqQfOoZx3fZIoJUcTydZu8TkfScpYdSEinMGUpGzjAmJjDKUOQwhRHq+7N+8upqV7vT+6Tc/3n/+5MyPY+j80dFs9sHjCt35V89eP7nmAK4kBJSYitLZsrRFTUopxdlp0ooBQk4xJeZMCoTz5nY5ycMHDw9Y82Rq754FQzNSkj6PZZn394vpwTQkSDlHLwFYERhQIKRRo4T5nptU4AM8uDcL3VBYk1LOpE1V+WYkIGDMjAnR1U7XdhgjMJvapcxE6Df5grvCFAoTAITMVx39H//hzThInNjbZacEUWQYIwsaQ5xltd3mnGIUAgbBiJFAHCmHeDiddt0AiUGrurLLbmuc8zlZTYowx/7R6f52uR05BZaYclYqxC1Etlr8AJR5fzG5d3CwXq370QtloKwACyVIyMIx5Zwix2RBFAopUgqjgAjnFEFAGxCA6JMUCTIbAxPlRu/fXG40Ql2YqnQjj8aZ/cVM+g5ymk0m666LKRXOFc7U7kBICTFKWByVsOWrsyuq1OVuBw1qqwkxp+Bjmh9NVaVBSelct22ryhzer9/sKGodwjg7LCalZQBSotC3YYjCWNnFpHCojQaHnIK0m24X0phVHONuO4YcwdDeRIkIoRiNRuvr5Ra0vo1xryqtsrut3y+Oyon9+y+fIQHqlGMCEZSA5KpJIWrMyEY4bHZUyayUjnIGIUJSYqyqnDaEurK2LjrF22b0Q2QGQNGARmiX+OzidlaVVtPKb51RN8utNWgI25t1lfNsNjGVHjVIZqW1shmMmAK11SXS1CclqBmpLu/kcwKYkpiqlMCYsrHWEDCz5MiJIyfFyZAgUlV9Hdm1p7VRc1gtCRWiyqL18cGrZYuxhKU6eGevrrYPTxZ93/7mb7Yik/XtJgWaTqbGmmE3dr0og5zVdLJ3u9447fR8b9v2trBk9dD6qO2qGbwPIfm2i5soTutdO1KbQYtmOa7tzfmbv1heRebnyzZZd+/k6HRSz7xfFO7BJ+92Xfji1Y0tdH1v5jPHIY1p/KMff/JyPfzJX39WGp1iOF7MScBaA9qkISrhhbNotNJl4cx2vapqPVlMh7brOJ+1+XSx/6M/+L//r3/yr5tNV2qTIVJKYCiIgODYjqS0UnT+5M2Pv/fNf/Ff//G//Ff//mK3S9owISBohe89ODiezFc3KwcopFAjKOE8WkopsULTND3EVGH+6N2DH//T/3x5vbrexWpa4XgbY1oF7ya6amAYxro2i9MHb5qOQDmrSVQMse9D6VxZGBBWRKgpKXGFmbmZ0ZiBlFDG5DUnBaClqMqXb1YQwSm9bdvRp7EJReGmE6s1xJAdUkJaj9hiOazTzLre7x5/65NXy8122zXU14WpWn/vaJ4AzrpBgGVI1qhHh3slqUmIv/3bnwefFEm7e8MpaC1Pf/3seTNetemnf/LnlsmRoS9ufMSTWdUufQKoq0RZ5qVTzmoNypiJdT7lZZ9Iu+XrFaMJwafbtbMaUTJ0aBwihzhe3W4LbTHwm5evSyW56+aLvZX37cjn528Kixz609P9wLld9+RclNiECJwP96fTvcrMXE5Ijs6XuxBzguR0t75ZfzBxeHPzxZNVGPJPX/zD3RTMp2UzBGdcRmlHf+Dww3cPnm363S4WZAQDKbQOtEKGOCb/1YtnhSkp50OrfMp+3AYmp+zYNsPQch4yEMSRkj0ui1/1LSKlACl4YRpCRsKqsCHm3sfCKaMQciY/HO5VYN12CBRyrbBJMClt6mMkKDT1fa4R90qzr4vD/XnbDsuGQzvuUj66b12hfRgJBZUoh67UJ+jeHSsknkzd8ze3RdIHSn36+Lgfx565LOpAsbbZzfTF5tphEMiKgYHFWSN57tTxYXGzqTe77TgmDeiDv3558fvfe3h0aOzE9n1aLjdW5/eOZtc3mybkJoS6qvwwSMjzuj6/vsk+E0Yc/cNp9auvXkoIx4vJqttVpb3dbjjl+4fH3/v0g//1Z7+6WW+hDe/N9r7RwNXF0ld2f1aNIe+2t//+p8vTx8fvfvjuTJvt5ep2eUM5TWczw3JvOg+bN6uzS11USjSP4+1VH7pOGXQS39+36OB1M1zdrnJW/98//9PTvekffPu7j+/zttld48Y5Y2rDyR/WjhOAUtOD8uzqjSo0+6AJs491YZICgWS1c0Zr5sqRz62rxpOyfvDe0d++3hgyjrQCUSmljOyUK/j+ng3iKytD6/VEq8CbdsgxG0QlaAAK4dXNyqhiN4az21U5myqNZQVaQ7MLCGwVSoo5SmHs/ePDMHah79Pojx+djHF8c7X78L3DCjKHXAAUGkKhjdVDPz46nk/swTvHB3/3yye7bjzcm80OppdfbUhkWtvZhFjo3fuHr8/W908OVqvuP1EVhdgjmOlsf3FwGH083Tscrm9+/au/eP3mea3to4dH3bZt1k01KSZzoyNFiBSSQozMnGXZxUIrRDEGp0URQpCclNYeQIJ8+dXLg9k73/z4w98+fQpWmdJKBj+GIqnVatf3Ob65tqWKKSDpDGTKQleWYyJltFFT47o+WGshcS4tKY2gNMn17dLm8OjRw8mBHtud8PjO/entZgzr/lX/pmu2aRirSbHZNrPZQbNcTk8PfPShDzpZa0gX5nbXGGPGwAbYZK4Ktyjd1WrUSu9Ni1hYQatBF9X04vKyC3x/MWMIoqAfRmM0ldoz+x4zqW03rLuATM3Itqy++c2H/a982nYeAjI/ffbiJz/+xryarVY9Rpk7/eh4vryJTdefvHuEWpJnrSwQpSwi2dWuqByxWOM4i+9HTcikTOlM5faP749+iAzAMCYuSwJUY8paUTWtTVmkzNDCdz754V/86f953V56Z3ZjQKJCAQhba3zMd+70LqVsSSaTaj49rqqa+fXL15aInb7axS3CQus2+WUXJAXLahtiZvAhKUSjyVhbEHzy4b2rm2sWmNU1Zu047VX18nL7+s2OEw3E7br7+a+ez515cOA+/fT03/7Nm2o6Tan3PmRlj+9N3pyvqnqyqOrNql/frB/u74/j0O/6yaNDdEXfR82q7zpZ9Q/2Fq9XF2WpCkV+DD4w1Phy3STMmzHtUB1O7aOpa5uY+jip6qgT1ioq5hhiTG/Obg/2amsNQxx2UbQlpSf0tZ7CGkBRyBxZOLEislprIgVKFDGzU4SEkhkB8a7LTCDliIggrAgLRYVVzhCJZBARYEKlSBGBCCFJ5MiCSKRQGaI7YY+ARspwFyCNgMQid6YuEQQUUJiZSSGgACEZNBYRwWhEQYlZ3V20gAyK0YAOMN01a2DO2IxRKRU5hyROK0cGIRPIXambCFtEUopApxQAICXfjGEICYlAIH5duIYgYBQgYE4SEBhBoQEQQSBQkSGBiEDKd3FD8Gh6cN3dZuQYxBAW5m43JcYqQdr5kJF0beq6OqzrdriJkj1LYnHakEZNxDkCsIAAolEqSR6iIFLy7cGRftYnzPL/+O//+f/yP//La9yul2G+MAkxcHAWyAIpMUp5nyVByhjupGFatB3LE3YzIAtAqIUkAlFmhmZgTE45QgMRgtUqS7ZaJUxaYRZBAbrbwZWwOOabgTMDZgDSyScQUAhGadJaWEIX53UpRkwhMkG9AD/JuwsfblP2aFhnRkGxRpyh5KMWADRJCFXQShSqQptmGEipsqhG5m7wSAAKmxBKEVUgCZo7N2FmRaCJUmZOkEU4JxbUpCwZQuVZEnOhHUIiLaZEXZKbUlGCK83pzH0BPQA8erQ3Ns1/8fufiivfuX9Qm/Tm2dn//G9fJgWuHH/vBweffntmld/ctJNjs93xr191nC3E5IkU89C2zdBH5CQgmZEBtFZKjSFLEFuqb3z/A6ohiY/XV+8cTeZjsRuHk8lkd+2vblZjzxFsH9O04k+OZ+++f+/Nsv+Hz28e7E//2x9/K5j073766yfNtqbpGGKxKL7/n33807/6tcpOlVXWOQI3EgTTOydT3w4CwWjotq1nOD5eLNs1p6CVgeCnVgLyZrPTugpdFkQ3p4eH9Xo1ttdrIuuc6WJKIhNT1pVthrDpm6KepJxQGUUoKQGI9+Hg9NQA/NOf/Odx40c/3rw+W19egsgQvZpPX/fjdvSkiy6oeTkpsLZKjFFjQqvvCuAFEcWPxmkRyQkFAJQ5nFYOeFKaMSgk2Q1jCkyAxBI4TwqNGshpCZj6ZI1ByMKiSKWYEIUQhHFM/PzFm48fv2uU241BK7XYK4vK3azWnEhpwykLIzAgoSGVRYxxjERKoZLSUGHJVFUIeVrZpoO6nj2+9853v/XR3/3i7/yLRif47//4vzxw/OTF+f/27/801tNU6r9+fvHDH37/9//oD/9oNv3ss9/+9ZfPOabSQhauZnXTtnEcSm22y1ZS1FgZs4i8Mk4lif2QQkyOMOVUc6FJCgWApKxNWXZN7J+f9bk++ca+cmZ1vqsn84Oje+O6vXlxSwlLZxFUDFzMJvcevaeM9hxDnwSStaXRqm97iQEFhQVQFa5UBvdPFrp25XQy9P3X+tIs3mfKohDYC5EmEhEdeoosWbEtbRqz+EyC3SYMrXSeF3tysihjzmPicnaQREkeDVprKdkUGJXQth8M2sm8iD77MZKk4HmzHPb3ysmkNC6OPrSNxFaYoR9GElakJLMympliyilnpzUqrUmAQZMiRWkMTluOoRuDczZj6HOWLPvTudKw9j4HGcZgFKWSRXg2KXKC292ABEgARLa0ELNW5Eldt0Nh3BSArMqAQzPSGDTiKsi286V1Tuu9uRGn27ZJjAKsNbECQkwsPgtxms6nyiqFWJhiN4SLy+2sLDT46WSsFWjEuqxyYF0UL88v+sGPzHGI+xN7erh4enZ+c7tKgY+OFu2w0yo1zcB3jmurfQz1olScUoEr39XWusImxRni7fJ2fdvtxt3xflWWJodxuWrGPihdFqQPDvXscHKg3fK2Wa+62+Wtwtqg9CmboppOJ0jQj55BF3Xd+b4q1cyqFJK1Bgs3jrkBOzbN7qrT1J6enPzgW9//8tXn5Kgs8Oj0UNXF+dnldOK6GHabfr8uIfO0UjpoP3YhFhGAjFZKVXWdcjaKlqslBtZAkFNptSJa3zTQ5USinc6lmR5Mb26vjS3HLJvt4GM42J+XlmJKmJSrbMBhfuB8yIHj4vCwvd1VUhSRHCKBHnaDRgJMCEpy0vpuO5RAXMqJtAKlMwettLGEOQtL6L9OafngfrlajYc6LtfjgbUFmaNY96vOZXjAdfyry08T/PHi5OyyO0q1sbo9ez2v6tx7pbH3cVxxOZ2pbszKLjL5dTNXyBFDMxiNNWcjaj3404cHieNtN267vlC468eyLLddxJhdXW1VHHbb3eh3CoLk5fXlMM7/6Pe+9/TzXz86vJfD5gc/PBXxyQ9EWis0Tg+si1nxN9amKGZaVkZXxg4+ONTZkdM4qapdM7a3y+Jweryod8MQuwhtmk/K7/zRj77//ifT6UnzydWXT5/33I0yOhEkMJNqHELfe1cZRTTsui9/8/T5Z1/9P3/8ey+Xtz9/ff3F7XoxnRLL1FVWUfTB1jWR7oYmiGigg2lJVXVxtctjPthbfPK7H336wb316ub51c3Zbvgffvjfrf7h5uX1atvxg8ePe9NP7bRtB+5gv5isdptdY072qpwikEEBjmKQ9vYmzRiXmzZ2MYEWNta4OPSVq8YgkiEPIbLXIO8/ejR//O6Xf/L/R+vqGdTTylha7FdhSKEfJ3W58Xm17jZdu/GsJ4u/vl6/7wbfhDZLNZ0eF9V7BwfddvzJj3/nctu9Obslid9+/HC7uWHGJGyqUs/3nr15OanLq9+8bq52nXOeMSfUWrU+LOYzXSuuJ32bqqKYzyrv/fV6gyhDHwvNABiy5GEwCrWzN9uhKCwDE0HyIQvZcjL2OxGOQ1hMSsqyacN8bh1mOdsp9ZoIBHVV2SHkN7stxDSd2uOjg9Vm56Ge71fdxcYV1ih7fLQ/25sKfHG12UxLdbu+PZ1OpuS+fHrGca/rOYSvA305R6V033pD+PTFWg7qxeFeQ+OyCYeLOYmvrF7U0xc3N4/ffe/1+no7jpc33cGkrA+ribGVrVfr1hVQT6pN27TjmEKsSnu1Xt/bhspN1tvN6GFR1iA4DsP+Yn7T7JyzqnAszN7PwP/hD795c7M8Ojyu9ya//MXzVdtbpcuq8Eh+SMBZ+3T/YPb4/rFftcrnYTdIBlNYI7Qbs8YEoq3monIM6Mc4VWrfuJe7bvSx91AE1A4BiIzdq8qinrxcXfG8fLbcbLZD7n1KAZTOolio74dvfOs9yf3V7Zvp7Gg3RqXtbDJbD5e2xJx8TGbo4/p2vLzeTgv37sOT1+e3r27W7x4frprIMe4tdyf7+8+2QzWZPzyejVkHH1xhztouEK4y5km9XrazYfjr3/yy2e2S6P2T6WG9uLi+PDre2yquJzPYdfcOi7Oz2/OXZ2dnl4+ODj/95vv7+xUW5osnz2e6uL+Yex9nZYWFM8bNKrO8uS3r6e3o//Tzr/6b7//gqydPLnfsCgWojdGvV+t/9R//w3fvH/3kDz5tfnbToP3Fq5tvHu1/9NE789ubej7VKd+cX0z353oMIBAZ45h9isYoZUxVl6sY0NpxvZ0R/Oh3vvMXn/2mLqrN2mOBGKMi6X1ofTB75uRgevb6ZYnKOXCSqOv3dZGQc8IkyMKzojqcVtvR9xwGQ1VtQ4xbP+qEjhQaAkBjnB9yVVSMuGra0rj1Te/fTX3M46C6Xa6Me355MZ0sZtP6+fU6R20yPj651253P/3tk8D56HDejfLy7ForNSCthuH0aDrRth9yx/ry/DY243+yKtps1/vzUxCVxlgC3p6/vHz6m7HrPzzdR9Zx188rN5lWm3YEElM5Fxxas911IgAkoEDUnToBun5EEGABAu8TKkrCP/3FX39w78iiAZRu1x8t9vb37jXbhlECJShM53PhrCZtgEgV7TYdHkzrw1o43d5sS4XRJ8xgnQatju7t315cKJ2Q2+uz17/7/Y9etVuVwe/yq/OlTpxYnCMKKJ2c7h14HydlNd0/WOzZl09eK0OL44MP33vn5z/9zCeGEO8/PO3W2zEnPwxKQ/Rj08jpyQKFx87frrtx7JXSXT+UVQUxHjowTg/bdm9RI6CrZLnebbb96b3DyV7x5vpN+1erdjl+8zsf/fL1m6KwtbNMqu189CmJgMDpw5PZweTq4jZJwihG66p2fszAFHwCRrCgSHHMiOgKaws3jH46m2ldhqGRzD6KLW01qbVWkoCZdVUKk4gYTcT8e3/4w5vV2c9+/duBATVohX4IhigrBgFjFRJoxuTTe59+uOk2BUEcuhyjD5IEDNq9/XLYdKRNizyflcttO62cDGmvqk+Ojp9cnje7QRnl23ZUatkGKi2IkRArNZ1ODkN6hcN4eFi8c3yQWn5zfnsT+lnm+/cO1meramrvnyzWmw6kV2jbrWfliqIuZ3TTtpyhPJhtQ+5vm8PpxGF2jk5PZx/u/N8+u4CcU/KzqkiO0dE6xz54RkGP65yexvbh/ZMPDuZfPX0zJHaTYlYaVDCOAUi1IWKXRLFRZAsz+DFIupsChULIzGyUEkFFoO/sXjkToSIlWb7e+YCICAsDEhAqRaUCi1hqVCSWBAmIISPdRQJpRSiQE2uFGSALIAhnMYpIERARASlAQQHMAAAo//gZABDFOaU1kgYiEEGyCjKMiSlDyoImQwIR1kBGEVXK9xlAAGlILCLEkACzoAGVGAg1IKcM2iCKzgIhCWMeYuzDGFlECEgBA6IIolJ0Z3vLIlmAAUmAQzJOC0CMSWtlteLMkcVqw8xIcjM0Y8x3yiltKDMQgAbMkatCVbVLmTfbETzPq1lV1lu/g5y0opAiwd2KgaymmNNdyrdGHcdYkPnG+8fX1+cvz5vQRWriP/uv/qt//Wf/po+bcYgJ0DhQAEruTFyMBiUIpKwIBLItZbInRQ1IYDRkEaCsGBGAARzIkEY0QgKKIHEmB70kVMAi1gABQAKlQRPQBI4e4+pcQgc+JGdBCDhS9IApK0HpYFiPeFLkzGRUUUMxlfrULJ/H1RuRlhUpLWIs9j7FBIjgwyiC2jqQzImjZK01IrTRi1KgiQl8ztaQCNekNFEYPWhKiT0DCXxteCQFpAvSCkSYQ0rktEXFkpSI74ObOI2xtO5w7gRxz9V3U7B/WPzkf/w0hO3lZTep4uvz1y/Pr3Xi90/sP/+nJ0O6bG9unCl3N+3JycN3D44uJ822SaZQMUnMHBiHLEJEKJLYFi7EBMy2sCD44PFB9mHYdilz6GCp9Dg6YDfff+jpfLVZAonEXDv6xncWP/79+//6X/+sHd1AxW9fn0XZvP/x5P/1Lz7J88n/9r//rS2PQOLnz75sdyOBGda78qjeXyx0CGWhPUsQiaPff7D/5uImZ7XdtNO9uukkBN4u/d7+4nW3S16hLgrN1US3u91ZTM02kNZd632Csp7qnIIfg8cY+Hix2C0HiGIhlxb+8Eef3lys9urTcnq8f3R8+fLl2LT1zNxcXM8mVlCvztZep9F7jXR8ckhEVT0DlhiCYG+0NkqnFEGYCI1SwnfzjmPIhdWVtRCiZCCBzFJVRhckwIZAgI6OyqYPm2EA0MZayKJQZc6ZMyIiIrMQoLFGGF6fnx3Npwut+y7EAJ3f+ZEZtIgQKhBAhTFnRQQInDkLSIbCaoNUaytRsM86xYeLw+livwD54lc/twnM4l6bgMt7/7+//JMXl9eLjx5dbIfb5fLweP/6+vWfvXl2cm/xxz/43V3TP7u4YMzX62ZWGMhSF9XQ9hhzTpxzLB2gVoMfQSttNBFyiGVdaq2jHzmzZEZgBCShkgrFbuy1b3IxOTLTvc2quz47W252CMQpFcYoa6dVLYm7sEWi6WQ2Kuh2rQ/9GJMoQ0Qa7Nj7cRgPajpcTNEPQ9/m8PXZwIdQWieJUHhalZBCHHG9bWFMSghRkrCA+JyVUX5MiTORamNTpqiL4mA29yFvVzv0nkhHHzFC9HmMHhE8hBACsDAjMSCZCdrc+k0aVMlGmYePPjx/fr1dXTCwq3VVFkM/aqVjioo0pyyZS2uVguiTcEYCyTKGngS4hjGnIUafkjFFCjnF7FB5yEIKCW6XN4WxWUFGLIwKIeYEtjRO6+jTEEZgRk7TvRoztK13hZ1MKsiJhLxWmfO66WalsXVZKfXByd5t2292XkSISGkFMWmiKqHkqCLymOe26G0qSMhSl1IYhv3j+d58FsfkQ0zjsO3a7ehD4sW9+7O9gy/Pvlrn8brvnNW8W2bhyhptnFMICvPoj48nppB2F12JXLqhDd1mMEalIL/522d9k4xxu24dc7SlDV0GNhJ10Pjw3tH1+cXVro8BUVFdHTCWyPFgoVNKzhoicBq952a1GdOoynK2mJGRpuutJBRzvRun070JVt329tnFF69W//Dj3/1OOa1Wm42I2JynSl9eXaMuQHTM0vUDrJdhjM5VKYs2pu/DYlLk6FPIopEAN13Q2riiiCEIyd7h3KJE77vRu8E2vZ/v7a822yQqMqDSYRzLUi/KYrPtvEqadLgZkcmR6vrOsTXG7dlpu90AsmUtIgjEnEtD0QfSCgzlnCUxhigUjFYxBjDagBBS+seij6a5+ejePd+4x0tDWdIu3lPDo9m81lhRqVRO0XeXt7gSfbOrJ/q/ffBQMuIsD37AA+05cvCtxJxxUs+9ysKpvndU2OJydatn9XI3AhBuAnR+CnoIHkgKY0JKtrLVtHh6tsxKPnr3Hi+XQwh1ZcZMDz949MXzr/IQX3727L39ukqUkv/wZP/12c2u6dKsPl/G2z4+rGy0xpGEbtA2KlFlado+bJqxHUYlarpXhOQ5eRlDaZ0ra9aTd2cPNmfnL5Z/e28Kx5988ObyxeLx+9fnl7vdzpS2ETmqTIhx7IMzVkgw589+/stpaf/43uFp6V7crjlxbrrEXFuXU/QSBYSTTOfToR+jT5PKfPTg/uZ611wvf9Ounz892zs5PK3m4fpWg9IaSyU3Z6+1KavagaLo8wePH6avOoCkCLKIQZxUDkVyTsOuJcGTvRIABDjEkIhOTvfGvpuWeyH6dtDtdjOZKNtcbl/OduvQt9EZ3JtNRZGwmkxqU1UKoSjRZIbFZNn2Q+SE8OFH77/56jxnuxlHlvT06jIPYfWbFWhXuioK/PzJ0zz2xweTSV3dXq9fra4YTUI8nL33O5/+wb//2X84VJCy3D/dZx8TQ0jw5PWtJGkhXne3oLjpx9R2Y2JEU5TF/nQyO6mJuI/D6fFJu2lEGIDttEwZu7aNmgttZvNJqZEFj09VYtaUYmCJEoXdtL5arlxpFWVT03XbnX/+HEk3Y0jn14gUcuh91JXLORqUorSe84Nv7O3vTcGa+cFxbOvLsxfV5OurgffeGq2dhpQlhmag4/leycpxLDgVh/PYdMvtdjqpzm4uUwpWGztVIYbVDvYntcrROtWMXQbgFFh0PSs9Y9Nylet3Tu4tr5dW6TEkB7Rfl4/2J07D9fW2j3la2vfuzx7ONQ79j7/7sdKFz/jxO6eN5M9eXqy7FoU4Ziv80f2jAqhgYGtGzqYuN1e7ssIEaBHSruGcCwu20MKgAR8cHqxD/upmnaKa1mVVqEfHi83gQasc882232Terbqui5xYgIvSCYMfM2mlnd103QMsHj98+OVlU+/V3cq34/D48XHTLqFNdYbKTY2iYTfYjDmkjx4dN6NnVrFP7P30xD58dPjzV2fD0MPi6MnyRlXON+vt7raYTc5ub7XGw/n+/uLePzz/LTMT0c12+8fvP/71Lz7b6jgavev5YDaNPi9mswR5zOn68ma7bd+5f3K6OPr0g4/mSBDS1WqJLDrH4+MZaqdXbvQsiECqntdJaHu9NnXRbLvFgwMNRUrp6eVF/Uu6vzjYBLmI3eDTi7OLaWHnClabAcDsdiN73g19vzc/ONrfrqOzehhDoxSQXm66wjjKTYr04uwGYDItXaE15KhRzU7qJg40Yv9mM6HZsOoMGLOoJ5PZK9wBMwoBCkdxVcUKOUVTaWfUarczigwpDnlvPgGp+27smmFvWoHk68vrojKAanE4JVA5qcmsvt5sPrq3+L3vvPvrz18lgyfTcrseY5RffvbUETpnlcDR3j63u64ZY8qZ6ZOPHqs03m7X480ymzpkQMT/ZFWEbMykKOYKtP/q75/tVks1bN959/jg/mx10+zt1W07ROF6WgRk3w+YmFG00cnnnEVyDjk7p7XWGqHvw3TqQuaMmYiZYoacynz6/snTr147pVnnJoyjT8bpyczGJPN9u386v77YWcB6VsUorjAhJwScLibTeXn15iaGcHx/sd525+dvDCNm6ZsumfzF8+dd05wezo/eOT2MaVzuvE+GVH2v/Oi7722v29X5el67XbsdupRiVIWWCC9e3tT1DNJAPI4pBIXDGAyaMSatSU90J3loRhLuuiApE8lmNxilUFFhbBgCoADIth3ztiGkWVU23TCqYADbQRLRcnP7rQ8fP/jg/k//9G+Wl7eLaTGG4aNvfXB1vUIfIfLepCJIfdelxKkUBtREZeWs08aq0EdjTD2vfOS+88LY7PpqSsFnQmWc5Rjn85qQ0pAwZeIsALPpLPQDatFOFYsjSiIx3PmCmDmjIFAUgSwGlTJqOnNDd5Ga4eKs7dsxhwjKKaQYRh+0KU1W0LRhGEOlrR8yJYkC66Gd1JYwHOxNG/S3QwMkaRjJ5f2H1b0/fGQPD9rfDocOr24bZWdDSJnswOCX3jkAq3dRFBRtGLs+Vs4FH6iG08fHb37xQqXISZTVfhiDD7OCPnjniPZKVua9eu/dz85XXQeAKUfnXMip770ITKauGYZR0hWkvN6pTTNmDgIppeW6Nc4AZ1QqMPftMJ3VYvXovRZo/dfXg0IZIWJUY8hGKSRkYoUowgIREtxtEwBFUDgxAokwImv6OkOusATMmlAAWEQhAyCSIAApyMycGe+cZYikkJEFGEiAWDHz164zZADhO9ObIKBRpI0yFkmDABNSEg7CKWZgSRnRg7IshGAoM5NSugTfJoAkyFFI4l3bGmUGQp1ZFAIihhw5S8gcUwo5Z4Fwl5EkgIR4F7UtLJlRRClKICzMWQgxYw4xK00AklIixZyYEH1KChVpZErMgIJaEylUBChiDVWlG3ddXTrtKI65G8c+paKqgABAck4ZRBJkJSiAgMZivqv8VaxLGPKwSnTwzvEHg/3sF79alt3sdz76vz36yb/8X/6N98Mo4DIU+6AMJAZUEjqACAbFVXlxaKZHFikYGzMIAGQGo4G1pABNAz5DcowOsAQgUAUkAGNBFyACkgAVWAvCQARKYDah8j6g2O3tEDqICTKwMAtDStBtoQtpasPewuYsIlQYe3yyOJnS2aQb3wzbm5EBzMSBorH1maEoQVAQkEARIYAICyrimEAYBBSSYqYotqRxGMQoAeDEgAIkRBRZBAFBlCbkyASoxVTUB4+E1irUMJsbNcmzRTE9soak0tPh6msbZuD48vK86brV0n/1Yvn6zY3iOCnp/VPeP4jLnpotzw7mRw/ff/b56uJZX4noqWpD9OOYGRF0lgQZtVYpS/jH5QVp1kaj5tXVVb8bFdrD0/3lauy3XeH0F/6F+DGGaI32vlHWvvOdb//y7GbX5+VNk0wSlJ+fb5745nXTfOf7j//5f/2Hk8XeX/67vztbrhWBIdFTsCWEHJSwAhaC+rBMkjZN23WjAGnLrKH346Le78ewebn2AxKhSGROu21PyJzCbheKwprCmtq0YYidtwBKKWYotKunta74028/+p/+3//iV3/12fvvgR3D3/7NF9dnz0LXkaZlk4YQelQhQpP7sQ2qspz9VIcHp6U9QFtp65mNEAkZkcQSExmFBByFFAEkrbLVqJ2r50UYImfRVhcEtcLpxPhxUFpliaQEYiYGpxQT5AxEFGISEYVEGpgzcOaUEuK26+69u5iqkgFfveiysDIgIiAASHd/OVmS0RoACkPWmLI0RqCclZWzdsFHe+WH3/5Q2/ri1WV7vakKUz+ann3x4v/zb37qtzuPNg0xSvzdf/KN7W2T+vHFxWq53b18fs7KOFds1q1jGbpekbGVMWUJqdWaQJPnHQuT1gySUq4KkwwBQuKcALTVyuphNxqgBIlJJofz3bh+8OC+csWQ+cmvPvfrNvkRSRmj1UQfnB6UZc2pT12DQE0KfdvlmFLyYRzuqgOYMwJbQ2RU1jCvZtvbse/912cD0Z69tThEv41DMdnf7Nbr1ZaySkiA2DUBEJGIfBZGRPKdT332nKcnarNrum6Zm8hDKOvaxxRCyvnrRgKRbK0OMQNAWbkUYozJOtf2vrvt9vZn2/Q89l4RIoiA9HFk5ODHFJiJlVJKQUopZ1AIoLAfhpzEoAKrBsqKRRRWdZEJTZaUWBj2FtNd5yVlU5L3iYFSzkVtlCGOqapN3/WzukDUEtgUetUNs6owk5IQGeLByR44E6+2Q58yxzAkqYxAdmSOprVFHbOEmJklisw03tuf7U1UZWnb9u+8f2z7us3no8QcPEAeJdFUFfNifTG8fnXR9t2QODNcbC68C9fb3qcoSSIHzuHxNx+eHp88+/I8r1pO2VoiyRzFaQUhZmCtEATdxCSVdushdoIFiEBM2fiYRp7V5ThGdPjV69ddO8QuTopybP18z833y6GX1XKjSXUwZM45pboqFZERUgqWmy0i5MQx5XHcGWO1rQebskER56bl0+Xt+sn25HgReh9jJGUKV6csRishaocxQeyaYJ0jlUGyLakdWzRF4rxe9lrZybwyzrY7r0kXNU0P9Pa2mS9cN3hrUheHNnAG0SWllRfJXBd9iD6Obdu7olJKde0IGVChcykDAQ2RGkg+h1w6c/dESpy/NsAnYAAijZE5pcy5rKwQscoMkAOHrwvQACbzdd8eT+y9ubl3snhw/+DqyauFyrEbF8fFxO31y+bN682kgGoytRaKqUz291jb4IFjnB4ejLs1imTO1mhG8qOPPqvCffOHn17cdu+xI6u2XVpuezObdQBPn78aUl61w8VqWzB/8P59Evzkm+8WL3V+cSsBHOTXf//brCx6rwWUDm+6m1ltNqHdNePQpbQZlm3aMitCksyhzYkbzwhq223vrP0hYgqZZ4VBUILTgoLvbOmqSn315X9MwSc/OqMyU5T86vUVZ6gms8EHSkkgT0unQEREELsubcbUAh+krcRxj1AKssRaQ+AIciflM2NIPStTud6PAryNzYj+5atmvqhUUXZd2Fy/Wl5d708LrdXQ9in7w/tl4hzGWJZljjJVNRsQyYYUEta1DSnLmG1phz4YhcMYtdFWM6dx6DEOYVa5yZ51Rh4c1ENoJjN89vTL2CWnFUtmyMroyXQybIeqrBcHs74PcRQyWGqz2m5OJ+WTV6so6ujBwfDyAqy6G9N+l0AnbcdNEwQoxdQM2dg2C/sgzHDwge3i5pfP+2IyVYY2m/71q6XVmoztQyhriyhtNyiD3RBcbZyyBUjOYBU2u02/2wEIg2x3raQEwJI5xBBjdtYUlev7FnMws6ofw141DV2MKPcenNR1OXTD7apRSnVt32zbyLmwhgH6MWQB0jqlxChB2ETimFiJqYxWzGP75S8vim99lKwd4u5wVkTz9dlAsg7grdI5BWetILy8vT0s7e16i5W2k5qgVIkTYrvrgVFi5pg0agJo+6Hxqa4Ko1Tf9RyDBkbmJDir6Xz5fESmRGQhxzjmrIBevBmKWfXw/jwGP6nw+7//MKyDRj2IWV5tQ8pSOUUAOcmQtFGlkr2qePj4GJPcXK+6JnR+DMx1QTmHcUzAhkLo2x3q40RQ25IBHaCxVREp5SCK6+kk6KSNGf3YbHepsDe7Xfw6ewH6JEkJEwiknPjevfrbv/ewBDzkw5sR/OgJBklpf2/vw289lmHoOwxZcoSpdnHXJ0FTFcvz26Iqp062u3EZti4Xj987XezvGdBPv3peOSdGzWa1rnXbhu1NZ/f3znYXqIuDWm+GcX7vqCnj7P1jCl2/63bbXXa46xJnKGeuUDpy6Hfdr1dfPX/18vho/4//s+9VRJ+9ONPIHLkfR3FZa9qflG7squrgp3/3mwTmvXff8b6pSBTEOPQoVNTms6dvHpweDjHWBcY4vtmuMMnNwTT4XJTO1E4v14bUwOGy2wUETTqBJMlaI2buU/7gux/fTnLUzg9RawCDOUXW/MmP38dKbm9ueNN3Y7z/6P44jrFX20zMokiyZIZczxy61PtECrIG33mIUURpVKxUDDFnUUpVtQPNRAr64EcinU8f7DVx+MFPvvuzv/mH9cVw4+xxVVpjrdUQxAAk4iR+vfR783pxOIdDd/lmk0IqncqtDzBGzctmACDfNdLzdP6PG9O3vOUtb3nLW97ylre85S1vectb3vKWt7zlLW95y1ve8pa3vOUtb3nLW97ylre85S1v+U/4vwAXh2gItVZatQAAAABJRU5ErkJggg==\\n\"\n     },\n     \"metadata\": {},\n     \"execution_count\": 6\n    }\n   ]\n  },\n  {\n   \"cell_type\": \"code\",\n   \"source\": [\n    \"\"\n   ],\n   \"metadata\": {\n    \"id\": \"92QkRfm0e6K0\"\n   },\n   \"execution_count\": null,\n   \"outputs\": []\n  }\n ]\n}"
  },
  {
    "path": "ldm_exp/scripts/sample_diffusion.py",
    "content": "import argparse, os, sys, glob, datetime, yaml\nimport torch\nimport time\nimport numpy as np\nfrom tqdm import trange\n\nfrom omegaconf import OmegaConf\nfrom PIL import Image\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\nfrom ldm.util import instantiate_from_config\n\nrescale = lambda x: (x + 1.) / 2.\n\ndef custom_to_pil(x):\n    x = x.detach().cpu()\n    x = torch.clamp(x, -1., 1.)\n    x = (x + 1.) / 2.\n    x = x.permute(1, 2, 0).numpy()\n    x = (255 * x).astype(np.uint8)\n    x = Image.fromarray(x)\n    if not x.mode == \"RGB\":\n        x = x.convert(\"RGB\")\n    return x\n\n\ndef custom_to_np(x):\n    # saves the batch in adm style as in https://github.com/openai/guided-diffusion/blob/main/scripts/image_sample.py\n    sample = x.detach().cpu()\n    sample = ((sample + 1) * 127.5).clamp(0, 255).to(torch.uint8)\n    sample = sample.permute(0, 2, 3, 1)\n    sample = sample.contiguous()\n    return sample\n\n\ndef logs2pil(logs, keys=[\"sample\"]):\n    imgs = dict()\n    for k in logs:\n        try:\n            if len(logs[k].shape) == 4:\n                img = custom_to_pil(logs[k][0, ...])\n            elif len(logs[k].shape) == 3:\n                img = custom_to_pil(logs[k])\n            else:\n                print(f\"Unknown format for key {k}. \")\n                img = None\n        except:\n            img = None\n        imgs[k] = img\n    return imgs\n\n\n@torch.no_grad()\ndef convsample(model, shape, return_intermediates=True,\n               verbose=True,\n               make_prog_row=False):\n\n\n    if not make_prog_row:\n        return model.p_sample_loop(None, shape,\n                                   return_intermediates=return_intermediates, verbose=verbose)\n    else:\n        return model.progressive_denoising(\n            None, shape, verbose=True\n        )\n\n\n@torch.no_grad()\ndef convsample_ddim(model, steps, shape, eta=1.0\n                    ):\n    ddim = DDIMSampler(model)\n    bs = shape[0]\n    shape = shape[1:]\n    samples, intermediates = ddim.sample(steps, batch_size=bs, shape=shape, eta=eta, verbose=False,)\n    return samples, intermediates\n\n\n@torch.no_grad()\ndef make_convolutional_sample(model, batch_size, vanilla=False, custom_steps=None, eta=1.0,):\n\n\n    log = dict()\n\n    shape = [batch_size,\n             model.model.diffusion_model.in_channels,\n             model.model.diffusion_model.image_size,\n             model.model.diffusion_model.image_size]\n\n    with model.ema_scope(\"Plotting\"):\n        t0 = time.time()\n        if vanilla:\n            sample, progrow = convsample(model, shape,\n                                         make_prog_row=True)\n        else:\n            sample, intermediates = convsample_ddim(model,  steps=custom_steps, shape=shape,\n                                                    eta=eta)\n\n        t1 = time.time()\n\n    x_sample = model.decode_first_stage(sample)\n\n    log[\"sample\"] = x_sample\n    log[\"time\"] = t1 - t0\n    log['throughput'] = sample.shape[0] / (t1 - t0)\n    print(f'Throughput for this batch: {log[\"throughput\"]}')\n    return log\n\ndef run(model, logdir, batch_size=50, vanilla=False, custom_steps=None, eta=None, n_samples=50000, nplog=None):\n    if vanilla:\n        print(f'Using Vanilla DDPM sampling with {model.num_timesteps} sampling steps.')\n    else:\n        print(f'Using DDIM sampling with {custom_steps} sampling steps and eta={eta}')\n\n\n    tstart = time.time()\n    n_saved = len(glob.glob(os.path.join(logdir,'*.png')))-1\n    # path = logdir\n    if model.cond_stage_model is None:\n        all_images = []\n\n        print(f\"Running unconditional sampling for {n_samples} samples\")\n        for _ in trange(n_samples // batch_size, desc=\"Sampling Batches (unconditional)\"):\n            logs = make_convolutional_sample(model, batch_size=batch_size,\n                                             vanilla=vanilla, custom_steps=custom_steps,\n                                             eta=eta)\n            n_saved = save_logs(logs, logdir, n_saved=n_saved, key=\"sample\")\n            all_images.extend([custom_to_np(logs[\"sample\"])])\n            if n_saved >= n_samples:\n                print(f'Finish after generating {n_saved} samples')\n                break\n        all_img = np.concatenate(all_images, axis=0)\n        all_img = all_img[:n_samples]\n        shape_str = \"x\".join([str(x) for x in all_img.shape])\n        nppath = os.path.join(nplog, f\"{shape_str}-samples.npz\")\n        np.savez(nppath, all_img)\n\n    else:\n       raise NotImplementedError('Currently only sampling for unconditional models supported.')\n\n    print(f\"sampling of {n_saved} images finished in {(time.time() - tstart) / 60.:.2f} minutes.\")\n\n\ndef save_logs(logs, path, n_saved=0, key=\"sample\", np_path=None):\n    for k in logs:\n        if k == key:\n            batch = logs[key]\n            if np_path is None:\n                for x in batch:\n                    img = custom_to_pil(x)\n                    imgpath = os.path.join(path, f\"{key}_{n_saved:06}.png\")\n                    img.save(imgpath)\n                    n_saved += 1\n            else:\n                npbatch = custom_to_np(batch)\n                shape_str = \"x\".join([str(x) for x in npbatch.shape])\n                nppath = os.path.join(np_path, f\"{n_saved}-{shape_str}-samples.npz\")\n                np.savez(nppath, npbatch)\n                n_saved += npbatch.shape[0]\n    return n_saved\n\n\ndef get_parser():\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\n        \"-r\",\n        \"--resume\",\n        type=str,\n        nargs=\"?\",\n        help=\"load from logdir or checkpoint in logdir\",\n    )\n    parser.add_argument(\n        \"-n\",\n        \"--n_samples\",\n        type=int,\n        nargs=\"?\",\n        help=\"number of samples to draw\",\n        default=50000\n    )\n    parser.add_argument(\n        \"-e\",\n        \"--eta\",\n        type=float,\n        nargs=\"?\",\n        help=\"eta for ddim sampling (0.0 yields deterministic sampling)\",\n        default=1.0\n    )\n    parser.add_argument(\n        \"-v\",\n        \"--vanilla_sample\",\n        default=False,\n        action='store_true',\n        help=\"vanilla sampling (default option is DDIM sampling)?\",\n    )\n    parser.add_argument(\n        \"-l\",\n        \"--logdir\",\n        type=str,\n        nargs=\"?\",\n        help=\"extra logdir\",\n        default=\"none\"\n    )\n    parser.add_argument(\n        \"-c\",\n        \"--custom_steps\",\n        type=int,\n        nargs=\"?\",\n        help=\"number of steps for ddim and fastdpm sampling\",\n        default=50\n    )\n    parser.add_argument(\n        \"--batch_size\",\n        type=int,\n        nargs=\"?\",\n        help=\"the bs\",\n        default=10\n    )\n    return parser\n\n\ndef load_model_from_config(config, sd):\n    model = instantiate_from_config(config)\n    model.load_state_dict(sd,strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\n\ndef load_model(config, ckpt, gpu, eval_mode):\n    if ckpt:\n        print(f\"Loading model from {ckpt}\")\n        pl_sd = torch.load(ckpt, map_location=\"cpu\")\n        global_step = pl_sd[\"global_step\"]\n    else:\n        pl_sd = {\"state_dict\": None}\n        global_step = None\n    model = load_model_from_config(config.model,\n                                   pl_sd[\"state_dict\"])\n\n    return model, global_step\n\n\nif __name__ == \"__main__\":\n    now = datetime.datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")\n    sys.path.append(os.getcwd())\n    command = \" \".join(sys.argv)\n\n    parser = get_parser()\n    opt, unknown = parser.parse_known_args()\n    ckpt = None\n\n    if not os.path.exists(opt.resume):\n        raise ValueError(\"Cannot find {}\".format(opt.resume))\n    if os.path.isfile(opt.resume):\n        # paths = opt.resume.split(\"/\")\n        try:\n            logdir = '/'.join(opt.resume.split('/')[:-1])\n            # idx = len(paths)-paths[::-1].index(\"logs\")+1\n            print(f'Logdir is {logdir}')\n        except ValueError:\n            paths = opt.resume.split(\"/\")\n            idx = -2  # take a guess: path/to/logdir/checkpoints/model.ckpt\n            logdir = \"/\".join(paths[:idx])\n        ckpt = opt.resume\n    else:\n        assert os.path.isdir(opt.resume), f\"{opt.resume} is not a directory\"\n        logdir = opt.resume.rstrip(\"/\")\n        ckpt = os.path.join(logdir, \"model.ckpt\")\n\n    base_configs = sorted(glob.glob(os.path.join(logdir, \"config.yaml\")))\n    opt.base = base_configs\n\n    configs = [OmegaConf.load(cfg) for cfg in opt.base]\n    cli = OmegaConf.from_dotlist(unknown)\n    config = OmegaConf.merge(*configs, cli)\n\n    gpu = True\n    eval_mode = True\n\n    if opt.logdir != \"none\":\n        locallog = logdir.split(os.sep)[-1]\n        if locallog == \"\": locallog = logdir.split(os.sep)[-2]\n        print(f\"Switching logdir from '{logdir}' to '{os.path.join(opt.logdir, locallog)}'\")\n        logdir = os.path.join(opt.logdir, locallog)\n\n    print(config)\n\n    model, global_step = load_model(config, ckpt, gpu, eval_mode)\n    print(f\"global step: {global_step}\")\n    print(75 * \"=\")\n    print(\"logging to:\")\n    logdir = os.path.join(logdir, \"samples\", f\"{global_step:08}\", now)\n    imglogdir = os.path.join(logdir, \"img\")\n    numpylogdir = os.path.join(logdir, \"numpy\")\n\n    os.makedirs(imglogdir)\n    os.makedirs(numpylogdir)\n    print(logdir)\n    print(75 * \"=\")\n\n    # write config out\n    sampling_file = os.path.join(logdir, \"sampling_config.yaml\")\n    sampling_conf = vars(opt)\n\n    with open(sampling_file, 'w') as f:\n        yaml.dump(sampling_conf, f, default_flow_style=False)\n    print(sampling_conf)\n\n\n    run(model, imglogdir, eta=opt.eta,\n        vanilla=opt.vanilla_sample,  n_samples=opt.n_samples, custom_steps=opt.custom_steps,\n        batch_size=opt.batch_size, nplog=numpylogdir)\n\n    print(\"done.\")\n"
  },
  {
    "path": "ldm_exp/scripts/train_searcher.py",
    "content": "import os, sys\nimport numpy as np\nimport scann\nimport argparse\nimport glob\nfrom multiprocessing import cpu_count\nfrom tqdm import tqdm\n\nfrom ldm.util import parallel_data_prefetch\n\n\ndef search_bruteforce(searcher):\n    return searcher.score_brute_force().build()\n\n\ndef search_partioned_ah(searcher, dims_per_block, aiq_threshold, reorder_k,\n                        partioning_trainsize, num_leaves, num_leaves_to_search):\n    return searcher.tree(num_leaves=num_leaves,\n                         num_leaves_to_search=num_leaves_to_search,\n                         training_sample_size=partioning_trainsize). \\\n        score_ah(dims_per_block, anisotropic_quantization_threshold=aiq_threshold).reorder(reorder_k).build()\n\n\ndef search_ah(searcher, dims_per_block, aiq_threshold, reorder_k):\n    return searcher.score_ah(dims_per_block, anisotropic_quantization_threshold=aiq_threshold).reorder(\n        reorder_k).build()\n\ndef load_datapool(dpath):\n\n\n    def load_single_file(saved_embeddings):\n        compressed = np.load(saved_embeddings)\n        database = {key: compressed[key] for key in compressed.files}\n        return database\n\n    def load_multi_files(data_archive):\n        database = {key: [] for key in data_archive[0].files}\n        for d in tqdm(data_archive, desc=f'Loading datapool from {len(data_archive)} individual files.'):\n            for key in d.files:\n                database[key].append(d[key])\n\n        return database\n\n    print(f'Load saved patch embedding from \"{dpath}\"')\n    file_content = glob.glob(os.path.join(dpath, '*.npz'))\n\n    if len(file_content) == 1:\n        data_pool = load_single_file(file_content[0])\n    elif len(file_content) > 1:\n        data = [np.load(f) for f in file_content]\n        prefetched_data = parallel_data_prefetch(load_multi_files, data,\n                                                 n_proc=min(len(data), cpu_count()), target_data_type='dict')\n\n        data_pool = {key: np.concatenate([od[key] for od in prefetched_data], axis=1)[0] for key in prefetched_data[0].keys()}\n    else:\n        raise ValueError(f'No npz-files in specified path \"{dpath}\" is this directory existing?')\n\n    print(f'Finished loading of retrieval database of length {data_pool[\"embedding\"].shape[0]}.')\n    return data_pool\n\n\ndef train_searcher(opt,\n                   metric='dot_product',\n                   partioning_trainsize=None,\n                   reorder_k=None,\n                   # todo tune\n                   aiq_thld=0.2,\n                   dims_per_block=2,\n                   num_leaves=None,\n                   num_leaves_to_search=None,):\n\n    data_pool = load_datapool(opt.database)\n    k = opt.knn\n\n    if not reorder_k:\n        reorder_k = 2 * k\n\n    # normalize\n    # embeddings =\n    searcher = scann.scann_ops_pybind.builder(data_pool['embedding'] / np.linalg.norm(data_pool['embedding'], axis=1)[:, np.newaxis], k, metric)\n    pool_size = data_pool['embedding'].shape[0]\n\n    print(*(['#'] * 100))\n    print('Initializing scaNN searcher with the following values:')\n    print(f'k: {k}')\n    print(f'metric: {metric}')\n    print(f'reorder_k: {reorder_k}')\n    print(f'anisotropic_quantization_threshold: {aiq_thld}')\n    print(f'dims_per_block: {dims_per_block}')\n    print(*(['#'] * 100))\n    print('Start training searcher....')\n    print(f'N samples in pool is {pool_size}')\n\n    # this reflects the recommended design choices proposed at\n    # https://github.com/google-research/google-research/blob/aca5f2e44e301af172590bb8e65711f0c9ee0cfd/scann/docs/algorithms.md\n    if pool_size < 2e4:\n        print('Using brute force search.')\n        searcher = search_bruteforce(searcher)\n    elif 2e4 <= pool_size and pool_size < 1e5:\n        print('Using asymmetric hashing search and reordering.')\n        searcher = search_ah(searcher, dims_per_block, aiq_thld, reorder_k)\n    else:\n        print('Using using partioning, asymmetric hashing search and reordering.')\n\n        if not partioning_trainsize:\n            partioning_trainsize = data_pool['embedding'].shape[0] // 10\n        if not num_leaves:\n            num_leaves = int(np.sqrt(pool_size))\n\n        if not num_leaves_to_search:\n            num_leaves_to_search = max(num_leaves // 20, 1)\n\n        print('Partitioning params:')\n        print(f'num_leaves: {num_leaves}')\n        print(f'num_leaves_to_search: {num_leaves_to_search}')\n        # self.searcher = self.search_ah(searcher, dims_per_block, aiq_thld, reorder_k)\n        searcher = search_partioned_ah(searcher, dims_per_block, aiq_thld, reorder_k,\n                                                 partioning_trainsize, num_leaves, num_leaves_to_search)\n\n    print('Finish training searcher')\n    searcher_savedir = opt.target_path\n    os.makedirs(searcher_savedir, exist_ok=True)\n    searcher.serialize(searcher_savedir)\n    print(f'Saved trained searcher under \"{searcher_savedir}\"')\n\nif __name__ == '__main__':\n    sys.path.append(os.getcwd())\n    parser = argparse.ArgumentParser()\n    parser.add_argument('--database',\n                        '-d',\n                        default='data/rdm/retrieval_databases/openimages',\n                        type=str,\n                        help='path to folder containing the clip feature of the database')\n    parser.add_argument('--target_path',\n                        '-t',\n                        default='data/rdm/searchers/openimages',\n                        type=str,\n                        help='path to the target folder where the searcher shall be stored.')\n    parser.add_argument('--knn',\n                        '-k',\n                        default=20,\n                        type=int,\n                        help='number of nearest neighbors, for which the searcher shall be optimized')\n\n    opt, _  = parser.parse_known_args()\n\n    train_searcher(opt,)"
  },
  {
    "path": "ldm_exp/scripts/txt2img.py",
    "content": "import argparse, os, sys, glob\nimport torch\nimport numpy as np\nfrom omegaconf import OmegaConf\nfrom PIL import Image\nfrom tqdm import tqdm, trange\nfrom einops import rearrange\nfrom torchvision.utils import make_grid\n\nfrom ldm.util import instantiate_from_config\nfrom ldm.models.diffusion.ddim import DDIMSampler\nfrom ldm.models.diffusion.plms import PLMSSampler\n\n\ndef load_model_from_config(config, ckpt, verbose=False):\n    print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    if len(m) > 0 and verbose:\n        print(\"missing keys:\")\n        print(m)\n    if len(u) > 0 and verbose:\n        print(\"unexpected keys:\")\n        print(u)\n\n    model.cuda()\n    model.eval()\n    return model\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser()\n\n    parser.add_argument(\n        \"--prompt\",\n        type=str,\n        nargs=\"?\",\n        default=\"a painting of a virus monster playing guitar\",\n        help=\"the prompt to render\"\n    )\n\n    parser.add_argument(\n        \"--outdir\",\n        type=str,\n        nargs=\"?\",\n        help=\"dir to write results to\",\n        default=\"outputs/txt2img-samples\"\n    )\n    parser.add_argument(\n        \"--ddim_steps\",\n        type=int,\n        default=200,\n        help=\"number of ddim sampling steps\",\n    )\n\n    parser.add_argument(\n        \"--plms\",\n        action='store_true',\n        help=\"use plms sampling\",\n    )\n\n    parser.add_argument(\n        \"--ddim_eta\",\n        type=float,\n        default=0.0,\n        help=\"ddim eta (eta=0.0 corresponds to deterministic sampling\",\n    )\n    parser.add_argument(\n        \"--n_iter\",\n        type=int,\n        default=1,\n        help=\"sample this often\",\n    )\n\n    parser.add_argument(\n        \"--H\",\n        type=int,\n        default=256,\n        help=\"image height, in pixel space\",\n    )\n\n    parser.add_argument(\n        \"--W\",\n        type=int,\n        default=256,\n        help=\"image width, in pixel space\",\n    )\n\n    parser.add_argument(\n        \"--n_samples\",\n        type=int,\n        default=4,\n        help=\"how many samples to produce for the given prompt\",\n    )\n\n    parser.add_argument(\n        \"--scale\",\n        type=float,\n        default=5.0,\n        help=\"unconditional guidance scale: eps = eps(x, empty) + scale * (eps(x, cond) - eps(x, empty))\",\n    )\n    opt = parser.parse_args()\n\n\n    config = OmegaConf.load(\"configs/latent-diffusion/txt2img-1p4B-eval.yaml\")  # TODO: Optionally download from same location as ckpt and chnage this logic\n    model = load_model_from_config(config, \"models/ldm/text2img-large/model.ckpt\")  # TODO: check path\n\n    device = torch.device(\"cuda\") if torch.cuda.is_available() else torch.device(\"cpu\")\n    model = model.to(device)\n\n    if opt.plms:\n        sampler = PLMSSampler(model)\n    else:\n        sampler = DDIMSampler(model)\n\n    os.makedirs(opt.outdir, exist_ok=True)\n    outpath = opt.outdir\n\n    prompt = opt.prompt\n\n\n    sample_path = os.path.join(outpath, \"samples\")\n    os.makedirs(sample_path, exist_ok=True)\n    base_count = len(os.listdir(sample_path))\n\n    all_samples=list()\n    with torch.no_grad():\n        with model.ema_scope():\n            uc = None\n            if opt.scale != 1.0:\n                uc = model.get_learned_conditioning(opt.n_samples * [\"\"])\n            for n in trange(opt.n_iter, desc=\"Sampling\"):\n                c = model.get_learned_conditioning(opt.n_samples * [prompt])\n                shape = [4, opt.H//8, opt.W//8]\n                samples_ddim, _ = sampler.sample(S=opt.ddim_steps,\n                                                 conditioning=c,\n                                                 batch_size=opt.n_samples,\n                                                 shape=shape,\n                                                 verbose=False,\n                                                 unconditional_guidance_scale=opt.scale,\n                                                 unconditional_conditioning=uc,\n                                                 eta=opt.ddim_eta)\n\n                x_samples_ddim = model.decode_first_stage(samples_ddim)\n                x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, min=0.0, max=1.0)\n\n                for x_sample in x_samples_ddim:\n                    x_sample = 255. * rearrange(x_sample.cpu().numpy(), 'c h w -> h w c')\n                    Image.fromarray(x_sample.astype(np.uint8)).save(os.path.join(sample_path, f\"{base_count:04}.png\"))\n                    base_count += 1\n                all_samples.append(x_samples_ddim)\n\n\n    # additionally, save as grid\n    grid = torch.stack(all_samples, 0)\n    grid = rearrange(grid, 'n b c h w -> (n b) c h w')\n    grid = make_grid(grid, nrow=opt.n_samples)\n\n    # to image\n    grid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\n    Image.fromarray(grid.astype(np.uint8)).save(os.path.join(outpath, f'{prompt.replace(\" \", \"-\")}.png'))\n\n    print(f\"Your samples are ready and waiting four you here: \\n{outpath} \\nEnjoy.\")\n"
  },
  {
    "path": "ldm_exp/setup.py",
    "content": "from setuptools import setup, find_packages\n\nsetup(\n    name='latent-diffusion',\n    version='0.0.1',\n    description='',\n    packages=find_packages(),\n    install_requires=[\n        'torch',\n        'numpy',\n        'tqdm',\n    ],\n)"
  },
  {
    "path": "ldm_exp/test_criterion.py",
    "content": "import sys\nsys.path.append(\".\")\nsys.path.append('./taming-transformers')\nfrom taming.models import vqgan \nimport argparse\nfrom ldm.modules.attention import CrossAttention\nimport numpy as np\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--pruner\", type=str, choices=[\"magnitude\", \"random\", \"taylor\", \"diff-pruning\", \"reinit\", \"diff0\"], default=\"magnitude\")\nargs = parser.parse_args()\n\nimport torch\n\ntorch.manual_seed(0)\ntorch.cuda.manual_seed(0)\ntorch.backends.cudnn.deterministic = True\ntorch.backends.cudnn.benchmark = False\n\n#@title loading utils\n\nfrom omegaconf import OmegaConf\n\nfrom ldm.util import instantiate_from_config\n\nimport torch_pruning as tp\n\ndef load_model_from_config(config, ckpt):\n    #print(f\"Loading model from {ckpt}\")\n    pl_sd = torch.load(ckpt, map_location=\"cpu\")\n    sd = pl_sd[\"state_dict\"]\n    model = instantiate_from_config(config.model)\n    m, u = model.load_state_dict(sd, strict=False)\n    model.cuda()\n    model.eval()\n    return model\n\n\ndef get_model():\n    config = OmegaConf.load(\"configs/latent-diffusion/cin256-v2.yaml\")  \n    model = load_model_from_config(config, \"models/ldm/cin256-v2/model.ckpt\")\n    return model\n\nfrom ldm.models.diffusion.ddim import DDIMSampler\n\nclasses = [25, 187, 448, 992]   # define classes to be sampled here\nn_samples_per_class = 6\nddim_steps = 20\nddim_eta = 0.0\nscale = 3.0   # for unconditional guidance\nx_T = torch.randn(n_samples_per_class, 3, 64, 64).cuda()\n\nfor _sparsity in range(0,10,1):\n    sparsity = _sparsity / 100.0\n    print(\"sparsity: \", sparsity)\n    model = get_model()\n    sampler = DDIMSampler(model)\n\n    import numpy as np \n    from PIL import Image\n    from einops import rearrange\n    from torchvision.utils import make_grid\n    model.eval()\n\n    if args.pruner == \"magnitude\":\n        imp = tp.importance.MagnitudeImportance()\n    elif args.pruner == \"random\":\n        imp = tp.importance.RandomImportance()\n    elif args.pruner == 'taylor':\n        imp = tp.importance.TaylorImportance(multivariable=True) # standard first-order taylor expansion\n    elif args.pruner == 'diff-pruning' or args.pruner == 'diff0':\n        imp = tp.importance.TaylorImportance(multivariable=False) # a modified version, estimating the accumulated error of weight removal\n    else:\n        raise ValueError(f\"Unknown pruner '{args.pruner}'\")\n\n    ignored_layers = [model.model.diffusion_model.out]\n    channel_groups = {}\n    iterative_steps = 1\n    uc = model.get_learned_conditioning(\n                {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n                )\n\n\n    for m in model.model.diffusion_model.modules():\n        if isinstance(m, CrossAttention):\n            channel_groups[m.to_q] = m.heads\n            channel_groups[m.to_k] = m.heads\n            channel_groups[m.to_v] = m.heads\n\n\n    xc = torch.tensor(n_samples_per_class*[classes[0]])\n    c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n    example_inputs = {\"x\": torch.randn(n_samples_per_class, 3, 64, 64).to(model.device), \"timesteps\": torch.full((n_samples_per_class,), 1, device=model.device, dtype=torch.long), \"context\": c}\n    base_macs, base_params = tp.utils.count_ops_and_params(model.model.diffusion_model, example_inputs)\n    pruner = tp.pruner.MagnitudePruner(\n        model.model.diffusion_model,\n        example_inputs,\n        importance=imp,\n        iterative_steps=1,\n        channel_groups =channel_groups,\n        ch_sparsity=sparsity, # remove 50% channels, ResNet18 = {64, 128, 256, 512} => ResNet18_Half = {32, 64, 128, 256}\n        ignored_layers=ignored_layers,\n        root_module_types=[torch.nn.Conv2d, torch.nn.Linear],\n        round_to=2\n    )\n    model.zero_grad()\n\n    import random\n    max_loss = -1\n\n    for t in range(1000):\n        if args.pruner not in ['diff-pruning', 'taylor', 'diff0']:\n            break\n        xc = torch.tensor(random.sample(range(1000), n_samples_per_class))\n        c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n        samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                        conditioning=c,\n                                        batch_size=n_samples_per_class,\n                                        shape=[3, 64, 64],\n                                        verbose=False,\n                                        unconditional_guidance_scale=scale,\n                                        unconditional_conditioning=uc, \n                                        eta=ddim_eta)\n\n        encoded = model.encode_first_stage(samples_ddim)\n        example_inputs = {\"x\": encoded.to(model.device), \"timesteps\": torch.full((n_samples_per_class,), t, device=model.device, dtype=torch.long), \"context\": c}\n        loss = model.get_loss_at_t(example_inputs['x'], {model.cond_stage_key: xc.to(model.device)}, example_inputs['timesteps'])\n        loss = loss[0]\n        if loss > max_loss:\n            max_loss = loss\n        thres = 0.1 if args.pruner == 'diff-pruning' else 0.0\n        if args.pruner == 'diff-pruning' or args.pruner == 'diff0':\n            if loss / max_loss<thres:\n                break\n        #print(t, (loss / max_loss).item(), loss.item(), max_loss.item())\n        loss.backward()\n\n    pruner.step()\n    all_samples = list()\n\n    with torch.no_grad():\n        with model.ema_scope():\n            uc = model.get_learned_conditioning(\n                {model.cond_stage_key: torch.tensor(n_samples_per_class*[1000]).to(model.device)}\n                )\n            \n            for class_label in classes:\n                #print(f\"rendering {n_samples_per_class} examples of class '{class_label}' in {ddim_steps} steps and using s={scale:.2f}.\")\n                xc = torch.tensor(n_samples_per_class*[class_label])\n                c = model.get_learned_conditioning({model.cond_stage_key: xc.to(model.device)})\n                \n                samples_ddim, _ = sampler.sample(S=ddim_steps,\n                                                conditioning=c,\n                                                batch_size=n_samples_per_class,\n                                                shape=[3, 64, 64],\n                                                verbose=False,\n                                                unconditional_guidance_scale=scale,\n                                                unconditional_conditioning=uc, \n                                                eta=ddim_eta,\n                                                x_T=x_T)\n                x_samples_ddim = model.decode_first_stage(samples_ddim)\n                x_samples_ddim = torch.clamp((x_samples_ddim+1.0)/2.0, \n                                            min=0.0, max=1.0)\n                all_samples.append(x_samples_ddim)\n\n    # display as grid\n    grid = torch.stack(all_samples, 0)\n    grid = rearrange(grid, 'n b c h w -> (n b) c h w')\n    grid = make_grid(grid, nrow=n_samples_per_class)\n\n    # to image\n    grid = 255. * rearrange(grid, 'c h w -> h w c').cpu().numpy()\n    img = Image.fromarray(grid.astype(np.uint8))\n    import os\n    os.makedirs(\"run/criteria/{}\".format(args.pruner), exist_ok=True)\n    img.save(\"run/criteria/{}/sparsity-{}.png\".format(args.pruner, sparsity))\n"
  },
  {
    "path": "ldm_exp/test_diffusion.py",
    "content": "import torch\nimport torch_fidelity\n\nimport argparse\nparser = argparse.ArgumentParser()\nparser.add_argument('--input1', type=str)\nparser.add_argument('--input2', type=str)\n\nargs = parser.parse_args()\n\nmetrics_dict = torch_fidelity.calculate_metrics(\n    input1=args.input1, \n    input2=args.input2, \n    cuda=True, \n    isc=True, \n    fid=True, \n    kid=True, \n    prc=True, \n    verbose=False,\n    samples_find_deep=True\n)\nprint(metrics_dict)"
  },
  {
    "path": "ldm_prune.py",
    "content": "from diffusers import LDMPipeline, DDPMPipeline, DDIMPipeline, DDIMScheduler, DDPMScheduler, VQModel\nfrom diffusers.models import UNet2DModel\nimport torch_pruning as tp\nimport torch\nimport torchvision\nfrom torchvision import transforms\nimport torchvision\nfrom tqdm import tqdm\nimport os\nfrom glob import glob\nfrom PIL import Image\nimport accelerate\nimport utils\n\nimport argparse\nparser = argparse.ArgumentParser()\n#parser.add_argument(\"--dataset\", type=str, required=True)\nparser.add_argument(\"--model_path\", type=str, required=True)\nparser.add_argument(\"--save_path\", type=str, required=True)\nparser.add_argument(\"--pruning_ratio\", type=float, default=0.3)\nparser.add_argument(\"--batch_size\", type=int, default=128)\nparser.add_argument(\"--device\", type=str, default='cpu')\n#parser.add_argument(\"--pruner\", type=str, default='taylor', choices=['taylor', 'random', 'magnitude', 'reinit', 'diff-pruning'])\nparser.add_argument(\"--pruner\", type=str, default='random', choices=['random', 'magnitude', 'reinit'])\n\n#parser.add_argument(\"--thr\", type=float, default=0.05, help=\"threshold for diff-pruning\")\n\nargs = parser.parse_args()\n\nbatch_size = args.batch_size\n\nif __name__=='__main__':\n    #dataset = utils.get_dataset(args.dataset)\n    #print(f\"Dataset size: {len(dataset)}\")\n    #train_dataloader = torch.utils.data.DataLoader(\n    #    dataset, batch_size=args.batch_size, shuffle=True, num_workers=4, drop_last=True\n    #)\n    #import torch_pruning as tp\n\n    # loading images for gradient-based pruning\n    #clean_images = iter(train_dataloader).next()\n    #if isinstance(clean_images, (list, tuple)):\n    #    clean_images = clean_images[0]\n    #clean_images = clean_images.to(args.device)\n    #noise = torch.randn(clean_images.shape).to(clean_images.device)\n\n    # Loading pretrained model\n    print(\"Loading pretrained model from {}\".format(args.model_path))\n    # load all models\n    unet = UNet2DModel.from_pretrained(\"CompVis/ldm-celebahq-256\", subfolder=\"unet\")\n    vqvae = VQModel.from_pretrained(\"CompVis/ldm-celebahq-256\", subfolder=\"vqvae\")\n    scheduler = DDIMScheduler.from_config(\"CompVis/ldm-celebahq-256\", subfolder=\"scheduler\")\n\n    # set to cuda\n    torch_device = torch.device(args.device) if torch.cuda.is_available() else \"cpu\"\n\n    unet.to(torch_device)\n    vqvae.to(torch_device)\n    example_inputs = {'sample': torch.randn(1, unet.in_channels, unet.sample_size, unet.sample_size).to(args.device), 'timestep': torch.ones((1,)).long().to(args.device)}\n\n    if args.pruning_ratio>0:\n        if args.pruner == 'taylor':\n            imp = tp.importance.TaylorImportance() \n        elif args.pruner == 'random' or args.pruner=='reinit':\n            imp = tp.importance.RandomImportance()\n        elif args.pruner == 'magnitude':\n            imp = tp.importance.MagnitudeImportance()\n        elif args.pruner == 'diff-pruning':\n            imp = tp.importance.TaylorImportance(multivariable=False) \n        else:\n            raise NotImplementedError\n\n        ignored_layers = [unet.conv_out]\n        ignored_layers = [unet.conv_out]\n        from diffusers.models.attention import Attention\n        channel_groups = {}\n        for m in unet.modules():\n            if isinstance(m, Attention):\n                channel_groups[m.to_q] = m.heads\n                channel_groups[m.to_k] = m.heads\n                channel_groups[m.to_v] = m.heads\n        \n        pruner = tp.pruner.MagnitudePruner(\n            unet,\n            example_inputs,\n            importance=imp,\n            iterative_steps=1,\n            channel_groups=channel_groups,\n            ch_sparsity=args.pruning_ratio,\n            ignored_layers=ignored_layers,\n        )\n\n        base_macs, base_params = tp.utils.count_ops_and_params(unet, example_inputs)\n        unet.zero_grad()\n        unet.eval()\n        import random\n\n        for g in pruner.step(interactive=True):\n            g.prune()\n\n        # Update static attributes\n        from diffusers.models.resnet import Upsample2D, Downsample2D\n        for m in unet.modules():\n            if isinstance(m, (Upsample2D, Downsample2D)):\n                m.channels = m.conv.in_channels\n                m.out_channels == m.conv.out_channels\n\n        macs, params = tp.utils.count_ops_and_params(unet, example_inputs)\n        print(unet)\n        print(\"#Params: {:.4f} M => {:.4f} M\".format(base_params/1e6, params/1e6))\n        print(\"#MACS: {:.4f} G => {:.4f} G\".format(base_macs/1e9, macs/1e9))\n        unet.zero_grad()\n        del pruner\n\n        if args.pruner=='reinit':\n            def reset_parameters(model):\n                for m in model.modules():\n                    if hasattr(m, 'reset_parameters'):\n                        m.reset_parameters()\n            reset_parameters(unet)\n\n    \n    pipeline = LDMPipeline(\n        unet=unet,\n        vqvae=vqvae,\n        scheduler=scheduler,\n    ).to(torch_device)\n    pipeline.save_pretrained(args.save_path)\n    if args.pruning_ratio>0:\n        os.makedirs(os.path.join(args.save_path, \"pruned\"), exist_ok=True)\n        torch.save(unet, os.path.join(args.save_path, \"pruned\", \"unet_pruned.pth\"))\n    with torch.no_grad():\n        generator = torch.Generator(device=torch_device).manual_seed(0)\n        images = pipeline(num_inference_steps=100, batch_size=args.batch_size, output_type=\"numpy\").images\n        os.makedirs(os.path.join(args.save_path, 'vis'), exist_ok=True)\n        torchvision.utils.save_image(torch.from_numpy(images).permute([0, 3, 1, 2]), \"{}/vis/after_pruning.png\".format(args.save_path))\n        "
  },
  {
    "path": "requirements.txt",
    "content": "accelerate\ntorchvision>=0.13.1\nscipy\ntorch>=1.12.1\ntqdm\nnumpy\ntorch_pruning\nhuggingface_hub\n"
  },
  {
    "path": "scripts/finetune_ddpm_cifar10.sh",
    "content": "python ddpm_train.py \\\n  --dataset=\"cifar10\" \\\n  --model_path=\"run/pruned/ddpm_cifar10_pruned\" \\\n  --pruned_model_ckpt=\"run/pruned/ddpm_cifar10_pruned/pruned/unet_pruned.pth\" \\\n  --resolution=32 \\\n  --output_dir=\"run/finetuned/ddpm_cifar10_pruned_post_training\" \\\n  --train_batch_size=128 \\\n  --num_iters=100000 \\\n  --gradient_accumulation_steps=1 \\\n  --learning_rate=2e-4 \\\n  --lr_warmup_steps=0 \\\n  --save_model_steps 1000 \\\n  --dataloader_num_workers 8 \\\n  --adam_weight_decay 0.00 \\\n  --ema_max_decay 0.9999 \\\n  --dropout 0.1 \\\n  --use_ema \\"
  },
  {
    "path": "scripts/prune_ddpm_cifar10.sh",
    "content": "python ddpm_prune.py \\\n--dataset cifar10 \\\n--model_path pretrained/ddpm_ema_cifar10 \\\n--save_path run/pruned/ddpm_cifar10_pruned \\\n--pruning_ratio $1 \\\n--batch_size 128 \\\n--pruner diff-pruning \\\n--thr 0.05 \\\n--device cuda:0 \\"
  },
  {
    "path": "scripts/prune_ddpm_ema_bedroom_random.sh",
    "content": "python ddpm_prune.py \\\n--dataset data/lsun/bedroom \\\n--model_path google/ddpm-ema-bedroom-256 \\\n--save_path run/pruned/ddpm_ema_bedroom_256_pruned \\\n--pruning_ratio $1 \\\n--batch_size 4 \\\n--pruner random \\\n--thr 0.05 \\\n--device cuda:0 \\"
  },
  {
    "path": "scripts/prune_ddpm_ema_church_random.sh",
    "content": "python ddpm_prune.py \\\n--dataset data/lsun/church \\\n--model_path google/ddpm-ema-church-256 \\\n--save_path run/pruned/ddpm_ema_church_256_pruned \\\n--pruning_ratio $1 \\\n--batch_size 4 \\\n--pruner random \\\n--thr 0.05 \\\n--device cuda:0 \\"
  },
  {
    "path": "scripts/prune_ldm.sh",
    "content": "python ldm_prune.py \\\n--model_path CompVis/ldm-celebahq-256 \\\n--save_path run/pruned/ldm_celeba_pruned \\\n--pruning_ratio 0.5 \\\n--pruner magnitude \\\n--device cuda:0 \\\n--batch_size 4 \\"
  },
  {
    "path": "scripts/sample_ddpm_cifar10_pretrained.sh",
    "content": "python ddpm_sample.py \\\n--output_dir run/sample/ddpm_cifar10_pretrained \\\n--batch_size 128 \\\n--model_path pretrained/ddpm_ema_cifar10 \\"
  },
  {
    "path": "scripts/sample_ddpm_cifar10_pretrained_distributed.sh",
    "content": "python -m torch.distributed.launch --nproc_per_node=8 --master_port 22222 --use_env ddpm_sample.py \\\n--output_dir run/sample/ddpm_cifar10_pretrained \\\n--model_path pretrained/ddpm_ema_cifar10 \\\n--batch_size 128 \\"
  },
  {
    "path": "scripts/sample_ddpm_cifar10_pruned.sh",
    "content": "python ddpm_sample.py \\\n--output_dir run/sample/ddpm_cifar10_pruned \\\n--batch_size 128 \\\n--pruned_model_ckpt run/finetuned/ddpm_cifar10_pruned_post_training/pruned/unet_ema_pruned.pth \\\n--model_path run/finetuned/ddpm_cifar10_pruned_post_training \\\n--skip_type uniform \\"
  },
  {
    "path": "tools/convert_cifar10_ddpm_ema.sh",
    "content": "mkdir -p pretrained\nwget https://heibox.uni-heidelberg.de/f/2e4f01e2d9ee49bab1d5/?dl=1 -O pretrained/cifar10-ema-model-790000.ckpt\npython tools/convert_ddpm_original_checkpoint_to_diffusers_cifar10.py --checkpoint_path pretrained/cifar10-ema-model-790000.ckpt --config_file tools/ddpm_cifar10_config.json --dump_path pretrained/ddpm_ema_cifar10\n"
  },
  {
    "path": "tools/convert_ddpm_original_checkpoint_to_diffusers_cifar10.py",
    "content": "import argparse\nimport json\n\nimport torch\n\nfrom diffusers import AutoencoderKL, DDPMPipeline, DDPMScheduler, UNet2DModel, VQModel\n\n\ndef shave_segments(path, n_shave_prefix_segments=1):\n    \"\"\"\n    Removes segments. Positive values shave the first segments, negative shave the last segments.\n    \"\"\"\n    if n_shave_prefix_segments >= 0:\n        return \".\".join(path.split(\".\")[n_shave_prefix_segments:])\n    else:\n        return \".\".join(path.split(\".\")[:n_shave_prefix_segments])\n\n\ndef renew_resnet_paths(old_list, n_shave_prefix_segments=0):\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item\n        new_item = new_item.replace(\"block.\", \"resnets.\")\n        new_item = new_item.replace(\"conv_shorcut\", \"conv1\")\n        new_item = new_item.replace(\"in_shortcut\", \"conv_shortcut\")\n        new_item = new_item.replace(\"temb_proj\", \"time_emb_proj\")\n\n        new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef renew_attention_paths(old_list, n_shave_prefix_segments=0, in_mid=False):\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item\n\n        # In `model.mid`, the layer is called `attn`.\n        if not in_mid:\n            new_item = new_item.replace(\"attn\", \"attentions\")\n        new_item = new_item.replace(\".k.\", \".to_k.\")\n        new_item = new_item.replace(\".v.\", \".to_v.\")\n        new_item = new_item.replace(\".q.\", \".to_q.\")\n\n        new_item = new_item.replace(\"proj_out\", \"to_out.0\")\n        new_item = new_item.replace(\"norm\", \"group_norm\")\n\n        new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef assign_to_checkpoint(\n    paths, checkpoint, old_checkpoint, attention_paths_to_split=None, additional_replacements=None, config=None\n):\n    assert isinstance(paths, list), \"Paths should be a list of dicts containing 'old' and 'new' keys.\"\n\n    if attention_paths_to_split is not None:\n        if config is None:\n            raise ValueError(\"Please specify the config if setting 'attention_paths_to_split' to 'True'.\")\n\n        for path, path_map in attention_paths_to_split.items():\n            old_tensor = old_checkpoint[path]\n            channels = old_tensor.shape[0] // 3\n\n            target_shape = (-1, channels) if len(old_tensor.shape) == 3 else (-1)\n\n            num_heads = old_tensor.shape[0] // config.get(\"num_head_channels\", 1) // 3\n\n            old_tensor = old_tensor.reshape((num_heads, 3 * channels // num_heads) + old_tensor.shape[1:])\n            query, key, value = old_tensor.split(channels // num_heads, dim=1)\n\n            checkpoint[path_map[\"query\"]] = query.reshape(target_shape).squeeze()\n            checkpoint[path_map[\"key\"]] = key.reshape(target_shape).squeeze()\n            checkpoint[path_map[\"value\"]] = value.reshape(target_shape).squeeze()\n\n    for path in paths:\n        new_path = path[\"new\"]\n\n        if attention_paths_to_split is not None and new_path in attention_paths_to_split:\n            continue\n\n        new_path = new_path.replace(\"down.\", \"down_blocks.\")\n        new_path = new_path.replace(\"up.\", \"up_blocks.\")\n\n        if additional_replacements is not None:\n            for replacement in additional_replacements:\n                new_path = new_path.replace(replacement[\"old\"], replacement[\"new\"])\n\n        if \"attentions\" in new_path:\n            checkpoint[new_path] = old_checkpoint[path[\"old\"]].squeeze()\n        else:\n            checkpoint[new_path] = old_checkpoint[path[\"old\"]]\n\n\ndef convert_ddpm_checkpoint(checkpoint, config):\n    \"\"\"\n    Takes a state dict and a config, and returns a converted checkpoint.\n    \"\"\"\n    new_checkpoint = {}\n\n    new_checkpoint[\"time_embedding.linear_1.weight\"] = checkpoint[\"temb.dense.0.weight\"]\n    new_checkpoint[\"time_embedding.linear_1.bias\"] = checkpoint[\"temb.dense.0.bias\"]\n    new_checkpoint[\"time_embedding.linear_2.weight\"] = checkpoint[\"temb.dense.1.weight\"]\n    new_checkpoint[\"time_embedding.linear_2.bias\"] = checkpoint[\"temb.dense.1.bias\"]\n\n    new_checkpoint[\"conv_norm_out.weight\"] = checkpoint[\"norm_out.weight\"]\n    new_checkpoint[\"conv_norm_out.bias\"] = checkpoint[\"norm_out.bias\"]\n\n    new_checkpoint[\"conv_in.weight\"] = checkpoint[\"conv_in.weight\"]\n    new_checkpoint[\"conv_in.bias\"] = checkpoint[\"conv_in.bias\"]\n    new_checkpoint[\"conv_out.weight\"] = checkpoint[\"conv_out.weight\"]\n    new_checkpoint[\"conv_out.bias\"] = checkpoint[\"conv_out.bias\"]\n\n    num_down_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in checkpoint if \"down\" in layer})\n    down_blocks = {\n        layer_id: [key for key in checkpoint if f\"down.{layer_id}\" in key] for layer_id in range(num_down_blocks)\n    }\n\n    num_up_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in checkpoint if \"up\" in layer})\n    up_blocks = {layer_id: [key for key in checkpoint if f\"up.{layer_id}\" in key] for layer_id in range(num_up_blocks)}\n\n    for i in range(num_down_blocks):\n        block_id = (i - 1) // (config[\"layers_per_block\"] + 1)\n\n        if any(\"downsample\" in layer for layer in down_blocks[i]):\n            new_checkpoint[f\"down_blocks.{i}.downsamplers.0.conv.weight\"] = checkpoint[\n                f\"down.{i}.downsample.conv.weight\"\n            ]\n            new_checkpoint[f\"down_blocks.{i}.downsamplers.0.conv.bias\"] = checkpoint[f\"down.{i}.downsample.conv.bias\"]\n        #            new_checkpoint[f'down_blocks.{i}.downsamplers.0.op.weight'] = checkpoint[f'down.{i}.downsample.conv.weight']\n        #            new_checkpoint[f'down_blocks.{i}.downsamplers.0.op.bias'] = checkpoint[f'down.{i}.downsample.conv.bias']\n\n        if any(\"block\" in layer for layer in down_blocks[i]):\n            num_blocks = len(\n                {\".\".join(shave_segments(layer, 2).split(\".\")[:2]) for layer in down_blocks[i] if \"block\" in layer}\n            )\n            blocks = {\n                layer_id: [key for key in down_blocks[i] if f\"block.{layer_id}\" in key]\n                for layer_id in range(num_blocks)\n            }\n\n            if num_blocks > 0:\n                for j in range(config[\"layers_per_block\"]):\n                    paths = renew_resnet_paths(blocks[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint)\n\n        if any(\"attn\" in layer for layer in down_blocks[i]):\n            num_attn = len(\n                {\".\".join(shave_segments(layer, 2).split(\".\")[:2]) for layer in down_blocks[i] if \"attn\" in layer}\n            )\n            attns = {\n                layer_id: [key for key in down_blocks[i] if f\"attn.{layer_id}\" in key]\n                for layer_id in range(num_blocks)\n            }\n\n            if num_attn > 0:\n                for j in range(config[\"layers_per_block\"]):\n                    paths = renew_attention_paths(attns[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint, config=config)\n\n    mid_block_1_layers = [key for key in checkpoint if \"mid.block_1\" in key]\n    mid_block_2_layers = [key for key in checkpoint if \"mid.block_2\" in key]\n    mid_attn_1_layers = [key for key in checkpoint if \"mid.attn_1\" in key]\n\n    # Mid new 2\n    paths = renew_resnet_paths(mid_block_1_layers)\n    assign_to_checkpoint(\n        paths,\n        new_checkpoint,\n        checkpoint,\n        additional_replacements=[{\"old\": \"mid.\", \"new\": \"mid_new_2.\"}, {\"old\": \"block_1\", \"new\": \"resnets.0\"}],\n    )\n\n    paths = renew_resnet_paths(mid_block_2_layers)\n    assign_to_checkpoint(\n        paths,\n        new_checkpoint,\n        checkpoint,\n        additional_replacements=[{\"old\": \"mid.\", \"new\": \"mid_new_2.\"}, {\"old\": \"block_2\", \"new\": \"resnets.1\"}],\n    )\n\n    paths = renew_attention_paths(mid_attn_1_layers, in_mid=True)\n    assign_to_checkpoint(\n        paths,\n        new_checkpoint,\n        checkpoint,\n        additional_replacements=[{\"old\": \"mid.\", \"new\": \"mid_new_2.\"}, {\"old\": \"attn_1\", \"new\": \"attentions.0\"}],\n    )\n\n    for i in range(num_up_blocks):\n        block_id = num_up_blocks - 1 - i\n\n        if any(\"upsample\" in layer for layer in up_blocks[i]):\n            new_checkpoint[f\"up_blocks.{block_id}.upsamplers.0.conv.weight\"] = checkpoint[\n                f\"up.{i}.upsample.conv.weight\"\n            ]\n            new_checkpoint[f\"up_blocks.{block_id}.upsamplers.0.conv.bias\"] = checkpoint[f\"up.{i}.upsample.conv.bias\"]\n\n        if any(\"block\" in layer for layer in up_blocks[i]):\n            num_blocks = len(\n                {\".\".join(shave_segments(layer, 2).split(\".\")[:2]) for layer in up_blocks[i] if \"block\" in layer}\n            )\n            blocks = {\n                layer_id: [key for key in up_blocks[i] if f\"block.{layer_id}\" in key] for layer_id in range(num_blocks)\n            }\n\n            if num_blocks > 0:\n                for j in range(config[\"layers_per_block\"] + 1):\n                    replace_indices = {\"old\": f\"up_blocks.{i}\", \"new\": f\"up_blocks.{block_id}\"}\n                    paths = renew_resnet_paths(blocks[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint, additional_replacements=[replace_indices])\n\n        if any(\"attn\" in layer for layer in up_blocks[i]):\n            num_attn = len(\n                {\".\".join(shave_segments(layer, 2).split(\".\")[:2]) for layer in up_blocks[i] if \"attn\" in layer}\n            )\n            attns = {\n                layer_id: [key for key in up_blocks[i] if f\"attn.{layer_id}\" in key] for layer_id in range(num_blocks)\n            }\n\n            if num_attn > 0:\n                for j in range(config[\"layers_per_block\"] + 1):\n                    replace_indices = {\"old\": f\"up_blocks.{i}\", \"new\": f\"up_blocks.{block_id}\"}\n                    paths = renew_attention_paths(attns[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint, additional_replacements=[replace_indices])\n\n    new_checkpoint = {k.replace(\"mid_new_2\", \"mid_block\"): v for k, v in new_checkpoint.items()}\n    new_checkpoint = {k.replace(\"nconv_shortcut\", \"conv_shortcut\"): v for k, v in new_checkpoint.items()}\n    return new_checkpoint\n\n\ndef convert_vq_autoenc_checkpoint(checkpoint, config):\n    \"\"\"\n    Takes a state dict and a config, and returns a converted checkpoint.\n    \"\"\"\n    new_checkpoint = {}\n\n    new_checkpoint[\"encoder.conv_norm_out.weight\"] = checkpoint[\"encoder.norm_out.weight\"]\n    new_checkpoint[\"encoder.conv_norm_out.bias\"] = checkpoint[\"encoder.norm_out.bias\"]\n\n    new_checkpoint[\"encoder.conv_in.weight\"] = checkpoint[\"encoder.conv_in.weight\"]\n    new_checkpoint[\"encoder.conv_in.bias\"] = checkpoint[\"encoder.conv_in.bias\"]\n    new_checkpoint[\"encoder.conv_out.weight\"] = checkpoint[\"encoder.conv_out.weight\"]\n    new_checkpoint[\"encoder.conv_out.bias\"] = checkpoint[\"encoder.conv_out.bias\"]\n\n    new_checkpoint[\"decoder.conv_norm_out.weight\"] = checkpoint[\"decoder.norm_out.weight\"]\n    new_checkpoint[\"decoder.conv_norm_out.bias\"] = checkpoint[\"decoder.norm_out.bias\"]\n\n    new_checkpoint[\"decoder.conv_in.weight\"] = checkpoint[\"decoder.conv_in.weight\"]\n    new_checkpoint[\"decoder.conv_in.bias\"] = checkpoint[\"decoder.conv_in.bias\"]\n    new_checkpoint[\"decoder.conv_out.weight\"] = checkpoint[\"decoder.conv_out.weight\"]\n    new_checkpoint[\"decoder.conv_out.bias\"] = checkpoint[\"decoder.conv_out.bias\"]\n\n    num_down_blocks = len({\".\".join(layer.split(\".\")[:3]) for layer in checkpoint if \"down\" in layer})\n    down_blocks = {\n        layer_id: [key for key in checkpoint if f\"down.{layer_id}\" in key] for layer_id in range(num_down_blocks)\n    }\n\n    num_up_blocks = len({\".\".join(layer.split(\".\")[:3]) for layer in checkpoint if \"up\" in layer})\n    up_blocks = {layer_id: [key for key in checkpoint if f\"up.{layer_id}\" in key] for layer_id in range(num_up_blocks)}\n\n    for i in range(num_down_blocks):\n        block_id = (i - 1) // (config[\"layers_per_block\"] + 1)\n\n        if any(\"downsample\" in layer for layer in down_blocks[i]):\n            new_checkpoint[f\"encoder.down_blocks.{i}.downsamplers.0.conv.weight\"] = checkpoint[\n                f\"encoder.down.{i}.downsample.conv.weight\"\n            ]\n            new_checkpoint[f\"encoder.down_blocks.{i}.downsamplers.0.conv.bias\"] = checkpoint[\n                f\"encoder.down.{i}.downsample.conv.bias\"\n            ]\n\n        if any(\"block\" in layer for layer in down_blocks[i]):\n            num_blocks = len(\n                {\".\".join(shave_segments(layer, 3).split(\".\")[:3]) for layer in down_blocks[i] if \"block\" in layer}\n            )\n            blocks = {\n                layer_id: [key for key in down_blocks[i] if f\"block.{layer_id}\" in key]\n                for layer_id in range(num_blocks)\n            }\n\n            if num_blocks > 0:\n                for j in range(config[\"layers_per_block\"]):\n                    paths = renew_resnet_paths(blocks[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint)\n\n        if any(\"attn\" in layer for layer in down_blocks[i]):\n            num_attn = len(\n                {\".\".join(shave_segments(layer, 3).split(\".\")[:3]) for layer in down_blocks[i] if \"attn\" in layer}\n            )\n            attns = {\n                layer_id: [key for key in down_blocks[i] if f\"attn.{layer_id}\" in key]\n                for layer_id in range(num_blocks)\n            }\n\n            if num_attn > 0:\n                for j in range(config[\"layers_per_block\"]):\n                    paths = renew_attention_paths(attns[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint, config=config)\n\n    mid_block_1_layers = [key for key in checkpoint if \"mid.block_1\" in key]\n    mid_block_2_layers = [key for key in checkpoint if \"mid.block_2\" in key]\n    mid_attn_1_layers = [key for key in checkpoint if \"mid.attn_1\" in key]\n\n    # Mid new 2\n    paths = renew_resnet_paths(mid_block_1_layers)\n    assign_to_checkpoint(\n        paths,\n        new_checkpoint,\n        checkpoint,\n        additional_replacements=[{\"old\": \"mid.\", \"new\": \"mid_new_2.\"}, {\"old\": \"block_1\", \"new\": \"resnets.0\"}],\n    )\n\n    paths = renew_resnet_paths(mid_block_2_layers)\n    assign_to_checkpoint(\n        paths,\n        new_checkpoint,\n        checkpoint,\n        additional_replacements=[{\"old\": \"mid.\", \"new\": \"mid_new_2.\"}, {\"old\": \"block_2\", \"new\": \"resnets.1\"}],\n    )\n\n    paths = renew_attention_paths(mid_attn_1_layers, in_mid=True)\n    assign_to_checkpoint(\n        paths,\n        new_checkpoint,\n        checkpoint,\n        additional_replacements=[{\"old\": \"mid.\", \"new\": \"mid_new_2.\"}, {\"old\": \"attn_1\", \"new\": \"attentions.0\"}],\n    )\n\n    for i in range(num_up_blocks):\n        block_id = num_up_blocks - 1 - i\n\n        if any(\"upsample\" in layer for layer in up_blocks[i]):\n            new_checkpoint[f\"decoder.up_blocks.{block_id}.upsamplers.0.conv.weight\"] = checkpoint[\n                f\"decoder.up.{i}.upsample.conv.weight\"\n            ]\n            new_checkpoint[f\"decoder.up_blocks.{block_id}.upsamplers.0.conv.bias\"] = checkpoint[\n                f\"decoder.up.{i}.upsample.conv.bias\"\n            ]\n\n        if any(\"block\" in layer for layer in up_blocks[i]):\n            num_blocks = len(\n                {\".\".join(shave_segments(layer, 3).split(\".\")[:3]) for layer in up_blocks[i] if \"block\" in layer}\n            )\n            blocks = {\n                layer_id: [key for key in up_blocks[i] if f\"block.{layer_id}\" in key] for layer_id in range(num_blocks)\n            }\n\n            if num_blocks > 0:\n                for j in range(config[\"layers_per_block\"] + 1):\n                    replace_indices = {\"old\": f\"up_blocks.{i}\", \"new\": f\"up_blocks.{block_id}\"}\n                    paths = renew_resnet_paths(blocks[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint, additional_replacements=[replace_indices])\n\n        if any(\"attn\" in layer for layer in up_blocks[i]):\n            num_attn = len(\n                {\".\".join(shave_segments(layer, 3).split(\".\")[:3]) for layer in up_blocks[i] if \"attn\" in layer}\n            )\n            attns = {\n                layer_id: [key for key in up_blocks[i] if f\"attn.{layer_id}\" in key] for layer_id in range(num_blocks)\n            }\n\n            if num_attn > 0:\n                for j in range(config[\"layers_per_block\"] + 1):\n                    replace_indices = {\"old\": f\"up_blocks.{i}\", \"new\": f\"up_blocks.{block_id}\"}\n                    paths = renew_attention_paths(attns[j])\n                    assign_to_checkpoint(paths, new_checkpoint, checkpoint, additional_replacements=[replace_indices])\n\n    new_checkpoint = {k.replace(\"mid_new_2\", \"mid_block\"): v for k, v in new_checkpoint.items()}\n    new_checkpoint[\"quant_conv.weight\"] = checkpoint[\"quant_conv.weight\"]\n    new_checkpoint[\"quant_conv.bias\"] = checkpoint[\"quant_conv.bias\"]\n    if \"quantize.embedding.weight\" in checkpoint:\n        new_checkpoint[\"quantize.embedding.weight\"] = checkpoint[\"quantize.embedding.weight\"]\n    new_checkpoint[\"post_quant_conv.weight\"] = checkpoint[\"post_quant_conv.weight\"]\n    new_checkpoint[\"post_quant_conv.bias\"] = checkpoint[\"post_quant_conv.bias\"]\n\n    return new_checkpoint\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser()\n\n    parser.add_argument(\n        \"--checkpoint_path\", default=None, type=str, required=True, help=\"Path to the checkpoint to convert.\"\n    )\n\n    parser.add_argument(\n        \"--config_file\",\n        default=None,\n        type=str,\n        required=True,\n        help=\"The config json file corresponding to the architecture.\",\n    )\n\n    parser.add_argument(\"--dump_path\", default=None, type=str, required=True, help=\"Path to the output model.\")\n\n    args = parser.parse_args()\n    checkpoint = torch.load(args.checkpoint_path)\n    print(checkpoint.keys())\n    with open(args.config_file) as f:\n        config = json.loads(f.read())\n\n    # unet case\n    key_prefix_set = {key.split(\".\")[0] for key in checkpoint.keys()}\n    if \"encoder\" in key_prefix_set and \"decoder\" in key_prefix_set:\n        converted_checkpoint = convert_vq_autoenc_checkpoint(checkpoint, config)\n    else:\n        converted_checkpoint = convert_ddpm_checkpoint(checkpoint, config)\n\n    if \"ddpm\" in config:\n        del config[\"ddpm\"]\n\n    if config[\"_class_name\"] == \"VQModel\":\n        model = VQModel(**config)\n        model.load_state_dict(converted_checkpoint)\n        model.save_pretrained(args.dump_path)\n    elif config[\"_class_name\"] == \"AutoencoderKL\":\n        model = AutoencoderKL(**config)\n        model.load_state_dict(converted_checkpoint)\n        model.save_pretrained(args.dump_path)\n    else:\n        model = UNet2DModel(**config)\n        model.load_state_dict(converted_checkpoint)\n\n        scheduler = DDPMScheduler.from_config('google/ddpm-cifar10-32')\n\n        pipe = DDPMPipeline(unet=model, scheduler=scheduler)\n        pipe.save_pretrained(args.dump_path)\n"
  },
  {
    "path": "tools/convert_ldm_original_checkpoint_to_diffusers.py",
    "content": "# coding=utf-8\n# Copyright 2023 The HuggingFace Inc. team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#     http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\" Conversion script for the LDM checkpoints. \"\"\"\n\nimport argparse\nimport json\n\nimport torch\n\nfrom diffusers import DDPMScheduler, LDMPipeline, UNet2DModel, VQModel\n\n\ndef shave_segments(path, n_shave_prefix_segments=1):\n    \"\"\"\n    Removes segments. Positive values shave the first segments, negative shave the last segments.\n    \"\"\"\n    if n_shave_prefix_segments >= 0:\n        return \".\".join(path.split(\".\")[n_shave_prefix_segments:])\n    else:\n        return \".\".join(path.split(\".\")[:n_shave_prefix_segments])\n\n\ndef renew_resnet_paths(old_list, n_shave_prefix_segments=0):\n    \"\"\"\n    Updates paths inside resnets to the new naming scheme (local renaming)\n    \"\"\"\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item.replace(\"in_layers.0\", \"norm1\")\n        new_item = new_item.replace(\"in_layers.2\", \"conv1\")\n\n        new_item = new_item.replace(\"out_layers.0\", \"norm2\")\n        new_item = new_item.replace(\"out_layers.3\", \"conv2\")\n\n        new_item = new_item.replace(\"emb_layers.1\", \"time_emb_proj\")\n        new_item = new_item.replace(\"skip_connection\", \"conv_shortcut\")\n\n        new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef renew_attention_paths(old_list, n_shave_prefix_segments=0):\n    \"\"\"\n    Updates paths inside attentions to the new naming scheme (local renaming)\n    \"\"\"\n    mapping = []\n    for old_item in old_list:\n        new_item = old_item\n\n        new_item = new_item.replace(\"norm.weight\", \"group_norm.weight\")\n        new_item = new_item.replace(\"norm.bias\", \"group_norm.bias\")\n\n        new_item = new_item.replace(\"proj_out.weight\", \"proj_attn.weight\")\n        new_item = new_item.replace(\"proj_out.bias\", \"proj_attn.bias\")\n\n        new_item = shave_segments(new_item, n_shave_prefix_segments=n_shave_prefix_segments)\n\n        mapping.append({\"old\": old_item, \"new\": new_item})\n\n    return mapping\n\n\ndef assign_to_checkpoint(\n    paths, checkpoint, old_checkpoint, attention_paths_to_split=None, additional_replacements=None, config=None\n):\n    \"\"\"\n    This does the final conversion step: take locally converted weights and apply a global renaming\n    to them. It splits attention layers, and takes into account additional replacements\n    that may arise.\n\n    Assigns the weights to the new checkpoint.\n    \"\"\"\n    assert isinstance(paths, list), \"Paths should be a list of dicts containing 'old' and 'new' keys.\"\n\n    # Splits the attention layers into three variables.\n    if attention_paths_to_split is not None:\n        for path, path_map in attention_paths_to_split.items():\n            old_tensor = old_checkpoint[path]\n            channels = old_tensor.shape[0] // 3\n\n            target_shape = (-1, channels) if len(old_tensor.shape) == 3 else (-1)\n\n            num_heads = old_tensor.shape[0] // config[\"num_head_channels\"] // 3\n\n            old_tensor = old_tensor.reshape((num_heads, 3 * channels // num_heads) + old_tensor.shape[1:])\n            query, key, value = old_tensor.split(channels // num_heads, dim=1)\n\n            checkpoint[path_map[\"query\"]] = query.reshape(target_shape)\n            checkpoint[path_map[\"key\"]] = key.reshape(target_shape)\n            checkpoint[path_map[\"value\"]] = value.reshape(target_shape)\n\n    for path in paths:\n        new_path = path[\"new\"]\n\n        # These have already been assigned\n        if attention_paths_to_split is not None and new_path in attention_paths_to_split:\n            continue\n\n        # Global renaming happens here\n        new_path = new_path.replace(\"middle_block.0\", \"mid_block.resnets.0\")\n        new_path = new_path.replace(\"middle_block.1\", \"mid_block.attentions.0\")\n        new_path = new_path.replace(\"middle_block.2\", \"mid_block.resnets.1\")\n\n        if additional_replacements is not None:\n            for replacement in additional_replacements:\n                new_path = new_path.replace(replacement[\"old\"], replacement[\"new\"])\n\n        # proj_attn.weight has to be converted from conv 1D to linear\n        if \"proj_attn.weight\" in new_path:\n            checkpoint[new_path] = old_checkpoint[path[\"old\"]][:, :, 0]\n        else:\n            checkpoint[new_path] = old_checkpoint[path[\"old\"]]\n\n\ndef convert_ldm_checkpoint(checkpoint, config):\n    \"\"\"\n    Takes a state dict and a config, and returns a converted checkpoint.\n    \"\"\"\n    new_checkpoint = {}\n\n    filtered_checkpoint = {}\n    for k, v in checkpoint.items():\n        if 'model.diffusion_model.' in k:\n            filtered_checkpoint[k.replace('model.diffusion_model.', '')] = v\n    checkpoint = filtered_checkpoint\n    print(checkpoint.keys())\n\n    new_checkpoint[\"time_embedding.linear_1.weight\"] = checkpoint[\"time_embed.0.weight\"]\n    new_checkpoint[\"time_embedding.linear_1.bias\"] = checkpoint[\"time_embed.0.bias\"]\n    new_checkpoint[\"time_embedding.linear_2.weight\"] = checkpoint[\"time_embed.2.weight\"]\n    new_checkpoint[\"time_embedding.linear_2.bias\"] = checkpoint[\"time_embed.2.bias\"]\n\n    new_checkpoint[\"conv_in.weight\"] = checkpoint[\"input_blocks.0.0.weight\"]\n    new_checkpoint[\"conv_in.bias\"] = checkpoint[\"input_blocks.0.0.bias\"]\n\n    new_checkpoint[\"conv_norm_out.weight\"] = checkpoint[\"out.0.weight\"]\n    new_checkpoint[\"conv_norm_out.bias\"] = checkpoint[\"out.0.bias\"]\n    new_checkpoint[\"conv_out.weight\"] = checkpoint[\"out.2.weight\"]\n    new_checkpoint[\"conv_out.bias\"] = checkpoint[\"out.2.bias\"]\n\n    # Retrieves the keys for the input blocks only\n    num_input_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in checkpoint if \"input_blocks\" in layer})\n    input_blocks = {\n        layer_id: [key for key in checkpoint if f\"input_blocks.{layer_id}\" in key]\n        for layer_id in range(num_input_blocks)\n    }\n\n    # Retrieves the keys for the middle blocks only\n    num_middle_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in checkpoint if \"middle_block\" in layer})\n    middle_blocks = {\n        layer_id: [key for key in checkpoint if f\"middle_block.{layer_id}\" in key]\n        for layer_id in range(num_middle_blocks)\n    }\n\n    # Retrieves the keys for the output blocks only\n    num_output_blocks = len({\".\".join(layer.split(\".\")[:2]) for layer in checkpoint if \"output_blocks\" in layer})\n    output_blocks = {\n        layer_id: [key for key in checkpoint if f\"output_blocks.{layer_id}\" in key]\n        for layer_id in range(num_output_blocks)\n    }\n\n    for i in range(1, num_input_blocks):\n        block_id = (i - 1) // (config[\"num_res_blocks\"] + 1)\n        layer_in_block_id = (i - 1) % (config[\"num_res_blocks\"] + 1)\n\n        resnets = [key for key in input_blocks[i] if f\"input_blocks.{i}.0\" in key]\n        attentions = [key for key in input_blocks[i] if f\"input_blocks.{i}.1\" in key]\n\n        if f\"input_blocks.{i}.0.op.weight\" in checkpoint:\n            new_checkpoint[f\"down_blocks.{block_id}.downsamplers.0.conv.weight\"] = checkpoint[\n                f\"input_blocks.{i}.0.op.weight\"\n            ]\n            new_checkpoint[f\"down_blocks.{block_id}.downsamplers.0.conv.bias\"] = checkpoint[\n                f\"input_blocks.{i}.0.op.bias\"\n            ]\n            continue\n\n        paths = renew_resnet_paths(resnets)\n        meta_path = {\"old\": f\"input_blocks.{i}.0\", \"new\": f\"down_blocks.{block_id}.resnets.{layer_in_block_id}\"}\n        resnet_op = {\"old\": \"resnets.2.op\", \"new\": \"downsamplers.0.op\"}\n        assign_to_checkpoint(\n            paths, new_checkpoint, checkpoint, additional_replacements=[meta_path, resnet_op], config=config\n        )\n\n        if len(attentions):\n            paths = renew_attention_paths(attentions)\n            meta_path = {\n                \"old\": f\"input_blocks.{i}.1\",\n                \"new\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}\",\n            }\n            to_split = {\n                f\"input_blocks.{i}.1.qkv.bias\": {\n                    \"key\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}.key.bias\",\n                    \"query\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}.query.bias\",\n                    \"value\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}.value.bias\",\n                },\n                f\"input_blocks.{i}.1.qkv.weight\": {\n                    \"key\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}.key.weight\",\n                    \"query\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}.query.weight\",\n                    \"value\": f\"down_blocks.{block_id}.attentions.{layer_in_block_id}.value.weight\",\n                },\n            }\n            assign_to_checkpoint(\n                paths,\n                new_checkpoint,\n                checkpoint,\n                additional_replacements=[meta_path],\n                attention_paths_to_split=to_split,\n                config=config,\n            )\n\n    resnet_0 = middle_blocks[0]\n    attentions = middle_blocks[1]\n    resnet_1 = middle_blocks[2]\n\n    resnet_0_paths = renew_resnet_paths(resnet_0)\n    assign_to_checkpoint(resnet_0_paths, new_checkpoint, checkpoint, config=config)\n\n    resnet_1_paths = renew_resnet_paths(resnet_1)\n    assign_to_checkpoint(resnet_1_paths, new_checkpoint, checkpoint, config=config)\n\n    attentions_paths = renew_attention_paths(attentions)\n    to_split = {\n        \"middle_block.1.qkv.bias\": {\n            \"key\": \"mid_block.attentions.0.key.bias\",\n            \"query\": \"mid_block.attentions.0.query.bias\",\n            \"value\": \"mid_block.attentions.0.value.bias\",\n        },\n        \"middle_block.1.qkv.weight\": {\n            \"key\": \"mid_block.attentions.0.key.weight\",\n            \"query\": \"mid_block.attentions.0.query.weight\",\n            \"value\": \"mid_block.attentions.0.value.weight\",\n        },\n    }\n    assign_to_checkpoint(\n        attentions_paths, new_checkpoint, checkpoint, attention_paths_to_split=to_split, config=config\n    )\n\n    for i in range(num_output_blocks):\n        block_id = i // (config[\"num_res_blocks\"] + 1)\n        layer_in_block_id = i % (config[\"num_res_blocks\"] + 1)\n        output_block_layers = [shave_segments(name, 2) for name in output_blocks[i]]\n        output_block_list = {}\n\n        for layer in output_block_layers:\n            layer_id, layer_name = layer.split(\".\")[0], shave_segments(layer, 1)\n            if layer_id in output_block_list:\n                output_block_list[layer_id].append(layer_name)\n            else:\n                output_block_list[layer_id] = [layer_name]\n\n        if len(output_block_list) > 1:\n            resnets = [key for key in output_blocks[i] if f\"output_blocks.{i}.0\" in key]\n            attentions = [key for key in output_blocks[i] if f\"output_blocks.{i}.1\" in key]\n\n            resnet_0_paths = renew_resnet_paths(resnets)\n            paths = renew_resnet_paths(resnets)\n\n            meta_path = {\"old\": f\"output_blocks.{i}.0\", \"new\": f\"up_blocks.{block_id}.resnets.{layer_in_block_id}\"}\n            assign_to_checkpoint(paths, new_checkpoint, checkpoint, additional_replacements=[meta_path], config=config)\n\n            if [\"conv.weight\", \"conv.bias\"] in output_block_list.values():\n                index = list(output_block_list.values()).index([\"conv.weight\", \"conv.bias\"])\n                new_checkpoint[f\"up_blocks.{block_id}.upsamplers.0.conv.weight\"] = checkpoint[\n                    f\"output_blocks.{i}.{index}.conv.weight\"\n                ]\n                new_checkpoint[f\"up_blocks.{block_id}.upsamplers.0.conv.bias\"] = checkpoint[\n                    f\"output_blocks.{i}.{index}.conv.bias\"\n                ]\n\n                # Clear attentions as they have been attributed above.\n                if len(attentions) == 2:\n                    attentions = []\n\n            if len(attentions):\n                paths = renew_attention_paths(attentions)\n                meta_path = {\n                    \"old\": f\"output_blocks.{i}.1\",\n                    \"new\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}\",\n                }\n                to_split = {\n                    f\"output_blocks.{i}.1.qkv.bias\": {\n                        \"key\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}.key.bias\",\n                        \"query\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}.query.bias\",\n                        \"value\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}.value.bias\",\n                    },\n                    f\"output_blocks.{i}.1.qkv.weight\": {\n                        \"key\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}.key.weight\",\n                        \"query\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}.query.weight\",\n                        \"value\": f\"up_blocks.{block_id}.attentions.{layer_in_block_id}.value.weight\",\n                    },\n                }\n                assign_to_checkpoint(\n                    paths,\n                    new_checkpoint,\n                    checkpoint,\n                    additional_replacements=[meta_path],\n                    attention_paths_to_split=to_split if any(\"qkv\" in key for key in attentions) else None,\n                    config=config,\n                )\n        else:\n            resnet_0_paths = renew_resnet_paths(output_block_layers, n_shave_prefix_segments=1)\n            for path in resnet_0_paths:\n                old_path = \".\".join([\"output_blocks\", str(i), path[\"old\"]])\n                new_path = \".\".join([\"up_blocks\", str(block_id), \"resnets\", str(layer_in_block_id), path[\"new\"]])\n\n                new_checkpoint[new_path] = checkpoint[old_path]\n\n    return new_checkpoint\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser()\n\n    parser.add_argument(\n        \"--checkpoint_path\", default=None, type=str, required=True, help=\"Path to the checkpoint to convert.\"\n    )\n\n    parser.add_argument(\n        \"--config_file\",\n        default=None,\n        type=str,\n        required=True,\n        help=\"The config json file corresponding to the architecture.\",\n    )\n\n    parser.add_argument(\"--dump_path\", default=None, type=str, required=True, help=\"Path to the output model.\")\n\n    args = parser.parse_args()\n\n    checkpoint = torch.load(args.checkpoint_path)['state_dict']\n    \n    with open(args.config_file) as f:\n        config = json.loads(f.read())\n    config[\"num_res_blocks\"] = 2\n    config[\"num_head_channels\"] = 32\n    converted_checkpoint = convert_ldm_checkpoint(checkpoint, config)\n\n    if \"ldm\" in config:\n        del config[\"ldm\"]\n    if \"num_res_blocks\" in config:\n        del config[\"num_res_blocks\"]\n    if \"num_head_channels\" in config:\n        del config[\"num_head_channels\"]\n\n    model = UNet2DModel(**config)\n    model.load_state_dict(converted_checkpoint)\n\n    try:\n        scheduler = DDPMScheduler.from_config(\"/\".join(args.checkpoint_path.split(\"/\")[:-1]))\n        vqvae = VQModel.from_pretrained(\"/\".join(args.checkpoint_path.split(\"/\")[:-1]))\n\n        pipe = LDMPipeline(unet=model, scheduler=scheduler, vae=vqvae)\n        pipe.save_pretrained(args.dump_path)\n    except:  # noqa: E722\n        model.save_pretrained(args.dump_path)\n"
  },
  {
    "path": "tools/ddpm_cifar10_config.json",
    "content": "{\n    \"_class_name\": \"UNet2DModel\",\n    \"_diffusers_version\": \"0.0.4\",\n    \"act_fn\": \"silu\",\n    \"attention_head_dim\": null,\n    \"block_out_channels\": [\n      128,\n      256,\n      256,\n      256\n    ],\n    \"center_input_sample\": false,\n    \"down_block_types\": [\n      \"DownBlock2D\",\n      \"AttnDownBlock2D\",\n      \"DownBlock2D\",\n      \"DownBlock2D\"\n    ],\n    \"downsample_padding\": 0,\n    \"flip_sin_to_cos\": false,\n    \"freq_shift\": 1,\n    \"in_channels\": 3,\n    \"layers_per_block\": 2,\n    \"mid_block_scale_factor\": 1,\n    \"norm_eps\": 1e-06,\n    \"norm_num_groups\": 32,\n    \"out_channels\": 3,\n    \"sample_size\": 32,\n    \"time_embedding_type\": \"positional\",\n    \"up_block_types\": [\n      \"UpBlock2D\",\n      \"UpBlock2D\",\n      \"AttnUpBlock2D\",\n      \"UpBlock2D\"\n    ]\n  }\n  "
  },
  {
    "path": "tools/extract_cifar10.py",
    "content": "import os\nimport torchvision\nfrom torchvision.datasets import CIFAR10\nfrom tqdm import tqdm\nimport argparse\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--output\", type=str, required=True)\nargs = parser.parse_args()\nos.makedirs(args.output, exist_ok=True)\n\n# Define the path to the folder where the images will be saved\nsave_path = os.path.join(args.output, 'cifar10_images')\n\n# Create the folder if it doesn't exist\nif not os.path.exists(save_path):\n    os.makedirs(save_path)\n\n# Load the CIFAR10 dataset\ndataset = CIFAR10(root=args.output, train=True, download=True)\n\n# Loop through the dataset and save each image to the folder\nfor i in tqdm(range(len(dataset))):\n    image, label = dataset[i]\n    image_name = f'{i}.png'\n    image_path = os.path.join(save_path, image_name)\n    image.save(image_path)"
  },
  {
    "path": "tools/ldm_unet_config.json",
    "content": "{\n    \"_class_name\": \"UNet2DModel\",\n    \"_diffusers_version\": \"0.0.4\",\n    \"act_fn\": \"silu\",\n    \"attention_head_dim\": 32,\n    \"block_out_channels\": [\n      224,\n      448,\n      672,\n      896\n    ],\n    \"center_input_sample\": false,\n    \"down_block_types\": [\n      \"DownBlock2D\",\n      \"AttnDownBlock2D\",\n      \"AttnDownBlock2D\",\n      \"AttnDownBlock2D\"\n    ],\n    \"downsample_padding\": 1,\n    \"flip_sin_to_cos\": true,\n    \"freq_shift\": 0,\n    \"in_channels\": 3,\n    \"layers_per_block\": 2,\n    \"mid_block_scale_factor\": 1,\n    \"norm_eps\": 1e-05,\n    \"norm_num_groups\": 32,\n    \"out_channels\": 3,\n    \"sample_size\": 64,\n    \"time_embedding_type\": \"positional\",\n    \"up_block_types\": [\n      \"AttnUpBlock2D\",\n      \"AttnUpBlock2D\",\n      \"AttnUpBlock2D\",\n      \"UpBlock2D\"\n    ]\n  }"
  },
  {
    "path": "utils.py",
    "content": "import torch\nfrom glob import glob\nfrom PIL import Image\nimport os\nfrom torchvision import transforms as T\nfrom torchvision.datasets import CIFAR10, CIFAR100\n\nclass UnlabeledImageFolder(torch.utils.data.Dataset):\n    def __init__(self, root, transform=None, exts=[\"*.jpg\", \"*.png\", \"*.jpeg\", \"*.webp\"]):\n        self.root = root\n        self.files = []\n        self.transform = transform\n        for ext in exts:\n            self.files.extend(glob(os.path.join(root, '**/*.{}'.format(ext)), recursive=True))\n\n    def __len__(self):\n        return len(self.files)\n\n    def __getitem__(self, idx):\n        path = self.files[idx]\n        img = Image.open(path).convert(\"RGB\")\n        if self.transform is not None:\n            img = self.transform(img)\n        return img\n\ndef set_dropout(model, p):\n    for m in model.modules():\n        if isinstance(m, torch.nn.Dropout):\n            m.p = p\n\ndef get_dataset(name_or_path, transform=None):\n    if name_or_path.lower()=='cifar10':\n        if transform is None:\n            transform = T.Compose([\n                T.RandomHorizontalFlip(),\n                T.ToTensor(),\n                T.Normalize(mean=0.5, std=0.5),\n            ])\n        dataset = CIFAR10(root='./data', train=True, download=True, transform=transform)\n    elif name_or_path.lower()=='cifar100':\n        if transform is None:\n            transform = T.Compose([\n                T.RandomHorizontalFlip(),\n                T.ToTensor(),\n                T.Normalize(mean=0.5, std=0.5),\n            ])\n        dataset = CIFAR100(root='./data', train=True, download=True, transform=transform)\n    elif os.path.isdir(name_or_path):\n        if transform is None:\n            transform = T.Compose([\n                T.Resize(256),\n                T.RandomCrop(256),\n                T.RandomHorizontalFlip(),\n                T.ToTensor(),\n                T.Normalize(mean=0.5, std=0.5),\n            ])\n        dataset = UnlabeledImageFolder(name_or_path, transform=transform)\n    return dataset\n\n"
  }
]